diff --git a/owl-bot-staging/v1/.coveragerc b/owl-bot-staging/v1/.coveragerc new file mode 100644 index 000000000..56278f63e --- /dev/null +++ b/owl-bot-staging/v1/.coveragerc @@ -0,0 +1,13 @@ +[run] +branch = True + +[report] +show_missing = True +omit = + google/cloud/compute/__init__.py + google/cloud/compute/gapic_version.py +exclude_lines = + # Re-enable the standard pragma + pragma: NO COVER + # Ignore debug-only repr + def __repr__ diff --git a/owl-bot-staging/v1/.flake8 b/owl-bot-staging/v1/.flake8 new file mode 100644 index 000000000..29227d4cf --- /dev/null +++ b/owl-bot-staging/v1/.flake8 @@ -0,0 +1,33 @@ +# -*- coding: utf-8 -*- +# +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# Generated by synthtool. DO NOT EDIT! +[flake8] +ignore = E203, E266, E501, W503 +exclude = + # Exclude generated code. + **/proto/** + **/gapic/** + **/services/** + **/types/** + *_pb2.py + + # Standard linting exemptions. + **/.nox/** + __pycache__, + .git, + *.pyc, + conf.py diff --git a/owl-bot-staging/v1/MANIFEST.in b/owl-bot-staging/v1/MANIFEST.in new file mode 100644 index 000000000..ac59f919b --- /dev/null +++ b/owl-bot-staging/v1/MANIFEST.in @@ -0,0 +1,2 @@ +recursive-include google/cloud/compute *.py +recursive-include google/cloud/compute_v1 *.py diff --git a/owl-bot-staging/v1/README.rst b/owl-bot-staging/v1/README.rst new file mode 100644 index 000000000..f6373e2cb --- /dev/null +++ b/owl-bot-staging/v1/README.rst @@ -0,0 +1,49 @@ +Python Client for Google Cloud Compute API +================================================= + +Quick Start +----------- + +In order to use this library, you first need to go through the following steps: + +1. `Select or create a Cloud Platform project.`_ +2. `Enable billing for your project.`_ +3. Enable the Google Cloud Compute API. +4. `Setup Authentication.`_ + +.. _Select or create a Cloud Platform project.: https://console.cloud.google.com/project +.. _Enable billing for your project.: https://cloud.google.com/billing/docs/how-to/modify-project#enable_billing_for_a_project +.. _Setup Authentication.: https://googleapis.dev/python/google-api-core/latest/auth.html + +Installation +~~~~~~~~~~~~ + +Install this library in a `virtualenv`_ using pip. `virtualenv`_ is a tool to +create isolated Python environments. The basic problem it addresses is one of +dependencies and versions, and indirectly permissions. + +With `virtualenv`_, it's possible to install this library without needing system +install permissions, and without clashing with the installed system +dependencies. + +.. _`virtualenv`: https://virtualenv.pypa.io/en/latest/ + + +Mac/Linux +^^^^^^^^^ + +.. code-block:: console + + python3 -m venv + source /bin/activate + /bin/pip install /path/to/library + + +Windows +^^^^^^^ + +.. code-block:: console + + python3 -m venv + \Scripts\activate + \Scripts\pip.exe install \path\to\library diff --git a/owl-bot-staging/v1/docs/_static/custom.css b/owl-bot-staging/v1/docs/_static/custom.css new file mode 100644 index 000000000..06423be0b --- /dev/null +++ b/owl-bot-staging/v1/docs/_static/custom.css @@ -0,0 +1,3 @@ +dl.field-list > dt { + min-width: 100px +} diff --git a/owl-bot-staging/v1/docs/compute_v1/accelerator_types.rst b/owl-bot-staging/v1/docs/compute_v1/accelerator_types.rst new file mode 100644 index 000000000..f865b6225 --- /dev/null +++ b/owl-bot-staging/v1/docs/compute_v1/accelerator_types.rst @@ -0,0 +1,10 @@ +AcceleratorTypes +---------------------------------- + +.. automodule:: google.cloud.compute_v1.services.accelerator_types + :members: + :inherited-members: + +.. automodule:: google.cloud.compute_v1.services.accelerator_types.pagers + :members: + :inherited-members: diff --git a/owl-bot-staging/v1/docs/compute_v1/addresses.rst b/owl-bot-staging/v1/docs/compute_v1/addresses.rst new file mode 100644 index 000000000..cb88b008e --- /dev/null +++ b/owl-bot-staging/v1/docs/compute_v1/addresses.rst @@ -0,0 +1,10 @@ +Addresses +--------------------------- + +.. automodule:: google.cloud.compute_v1.services.addresses + :members: + :inherited-members: + +.. automodule:: google.cloud.compute_v1.services.addresses.pagers + :members: + :inherited-members: diff --git a/owl-bot-staging/v1/docs/compute_v1/autoscalers.rst b/owl-bot-staging/v1/docs/compute_v1/autoscalers.rst new file mode 100644 index 000000000..59e44e672 --- /dev/null +++ b/owl-bot-staging/v1/docs/compute_v1/autoscalers.rst @@ -0,0 +1,10 @@ +Autoscalers +----------------------------- + +.. automodule:: google.cloud.compute_v1.services.autoscalers + :members: + :inherited-members: + +.. automodule:: google.cloud.compute_v1.services.autoscalers.pagers + :members: + :inherited-members: diff --git a/owl-bot-staging/v1/docs/compute_v1/backend_buckets.rst b/owl-bot-staging/v1/docs/compute_v1/backend_buckets.rst new file mode 100644 index 000000000..2a8d5210c --- /dev/null +++ b/owl-bot-staging/v1/docs/compute_v1/backend_buckets.rst @@ -0,0 +1,10 @@ +BackendBuckets +-------------------------------- + +.. automodule:: google.cloud.compute_v1.services.backend_buckets + :members: + :inherited-members: + +.. automodule:: google.cloud.compute_v1.services.backend_buckets.pagers + :members: + :inherited-members: diff --git a/owl-bot-staging/v1/docs/compute_v1/backend_services.rst b/owl-bot-staging/v1/docs/compute_v1/backend_services.rst new file mode 100644 index 000000000..80a321599 --- /dev/null +++ b/owl-bot-staging/v1/docs/compute_v1/backend_services.rst @@ -0,0 +1,10 @@ +BackendServices +--------------------------------- + +.. automodule:: google.cloud.compute_v1.services.backend_services + :members: + :inherited-members: + +.. automodule:: google.cloud.compute_v1.services.backend_services.pagers + :members: + :inherited-members: diff --git a/owl-bot-staging/v1/docs/compute_v1/disk_types.rst b/owl-bot-staging/v1/docs/compute_v1/disk_types.rst new file mode 100644 index 000000000..be3cfb6c3 --- /dev/null +++ b/owl-bot-staging/v1/docs/compute_v1/disk_types.rst @@ -0,0 +1,10 @@ +DiskTypes +--------------------------- + +.. automodule:: google.cloud.compute_v1.services.disk_types + :members: + :inherited-members: + +.. automodule:: google.cloud.compute_v1.services.disk_types.pagers + :members: + :inherited-members: diff --git a/owl-bot-staging/v1/docs/compute_v1/disks.rst b/owl-bot-staging/v1/docs/compute_v1/disks.rst new file mode 100644 index 000000000..898c491c2 --- /dev/null +++ b/owl-bot-staging/v1/docs/compute_v1/disks.rst @@ -0,0 +1,10 @@ +Disks +----------------------- + +.. automodule:: google.cloud.compute_v1.services.disks + :members: + :inherited-members: + +.. automodule:: google.cloud.compute_v1.services.disks.pagers + :members: + :inherited-members: diff --git a/owl-bot-staging/v1/docs/compute_v1/external_vpn_gateways.rst b/owl-bot-staging/v1/docs/compute_v1/external_vpn_gateways.rst new file mode 100644 index 000000000..804891507 --- /dev/null +++ b/owl-bot-staging/v1/docs/compute_v1/external_vpn_gateways.rst @@ -0,0 +1,10 @@ +ExternalVpnGateways +------------------------------------- + +.. automodule:: google.cloud.compute_v1.services.external_vpn_gateways + :members: + :inherited-members: + +.. automodule:: google.cloud.compute_v1.services.external_vpn_gateways.pagers + :members: + :inherited-members: diff --git a/owl-bot-staging/v1/docs/compute_v1/firewall_policies.rst b/owl-bot-staging/v1/docs/compute_v1/firewall_policies.rst new file mode 100644 index 000000000..2d4fbd9ff --- /dev/null +++ b/owl-bot-staging/v1/docs/compute_v1/firewall_policies.rst @@ -0,0 +1,10 @@ +FirewallPolicies +---------------------------------- + +.. automodule:: google.cloud.compute_v1.services.firewall_policies + :members: + :inherited-members: + +.. automodule:: google.cloud.compute_v1.services.firewall_policies.pagers + :members: + :inherited-members: diff --git a/owl-bot-staging/v1/docs/compute_v1/firewalls.rst b/owl-bot-staging/v1/docs/compute_v1/firewalls.rst new file mode 100644 index 000000000..24448c98b --- /dev/null +++ b/owl-bot-staging/v1/docs/compute_v1/firewalls.rst @@ -0,0 +1,10 @@ +Firewalls +--------------------------- + +.. automodule:: google.cloud.compute_v1.services.firewalls + :members: + :inherited-members: + +.. automodule:: google.cloud.compute_v1.services.firewalls.pagers + :members: + :inherited-members: diff --git a/owl-bot-staging/v1/docs/compute_v1/forwarding_rules.rst b/owl-bot-staging/v1/docs/compute_v1/forwarding_rules.rst new file mode 100644 index 000000000..6a808d3fc --- /dev/null +++ b/owl-bot-staging/v1/docs/compute_v1/forwarding_rules.rst @@ -0,0 +1,10 @@ +ForwardingRules +--------------------------------- + +.. automodule:: google.cloud.compute_v1.services.forwarding_rules + :members: + :inherited-members: + +.. automodule:: google.cloud.compute_v1.services.forwarding_rules.pagers + :members: + :inherited-members: diff --git a/owl-bot-staging/v1/docs/compute_v1/global_addresses.rst b/owl-bot-staging/v1/docs/compute_v1/global_addresses.rst new file mode 100644 index 000000000..d106676a0 --- /dev/null +++ b/owl-bot-staging/v1/docs/compute_v1/global_addresses.rst @@ -0,0 +1,10 @@ +GlobalAddresses +--------------------------------- + +.. automodule:: google.cloud.compute_v1.services.global_addresses + :members: + :inherited-members: + +.. automodule:: google.cloud.compute_v1.services.global_addresses.pagers + :members: + :inherited-members: diff --git a/owl-bot-staging/v1/docs/compute_v1/global_forwarding_rules.rst b/owl-bot-staging/v1/docs/compute_v1/global_forwarding_rules.rst new file mode 100644 index 000000000..710d24590 --- /dev/null +++ b/owl-bot-staging/v1/docs/compute_v1/global_forwarding_rules.rst @@ -0,0 +1,10 @@ +GlobalForwardingRules +--------------------------------------- + +.. automodule:: google.cloud.compute_v1.services.global_forwarding_rules + :members: + :inherited-members: + +.. automodule:: google.cloud.compute_v1.services.global_forwarding_rules.pagers + :members: + :inherited-members: diff --git a/owl-bot-staging/v1/docs/compute_v1/global_network_endpoint_groups.rst b/owl-bot-staging/v1/docs/compute_v1/global_network_endpoint_groups.rst new file mode 100644 index 000000000..a13a31a67 --- /dev/null +++ b/owl-bot-staging/v1/docs/compute_v1/global_network_endpoint_groups.rst @@ -0,0 +1,10 @@ +GlobalNetworkEndpointGroups +--------------------------------------------- + +.. automodule:: google.cloud.compute_v1.services.global_network_endpoint_groups + :members: + :inherited-members: + +.. automodule:: google.cloud.compute_v1.services.global_network_endpoint_groups.pagers + :members: + :inherited-members: diff --git a/owl-bot-staging/v1/docs/compute_v1/global_operations.rst b/owl-bot-staging/v1/docs/compute_v1/global_operations.rst new file mode 100644 index 000000000..94450a7ee --- /dev/null +++ b/owl-bot-staging/v1/docs/compute_v1/global_operations.rst @@ -0,0 +1,10 @@ +GlobalOperations +---------------------------------- + +.. automodule:: google.cloud.compute_v1.services.global_operations + :members: + :inherited-members: + +.. automodule:: google.cloud.compute_v1.services.global_operations.pagers + :members: + :inherited-members: diff --git a/owl-bot-staging/v1/docs/compute_v1/global_organization_operations.rst b/owl-bot-staging/v1/docs/compute_v1/global_organization_operations.rst new file mode 100644 index 000000000..d4e514357 --- /dev/null +++ b/owl-bot-staging/v1/docs/compute_v1/global_organization_operations.rst @@ -0,0 +1,10 @@ +GlobalOrganizationOperations +---------------------------------------------- + +.. automodule:: google.cloud.compute_v1.services.global_organization_operations + :members: + :inherited-members: + +.. automodule:: google.cloud.compute_v1.services.global_organization_operations.pagers + :members: + :inherited-members: diff --git a/owl-bot-staging/v1/docs/compute_v1/global_public_delegated_prefixes.rst b/owl-bot-staging/v1/docs/compute_v1/global_public_delegated_prefixes.rst new file mode 100644 index 000000000..c712f7473 --- /dev/null +++ b/owl-bot-staging/v1/docs/compute_v1/global_public_delegated_prefixes.rst @@ -0,0 +1,10 @@ +GlobalPublicDelegatedPrefixes +----------------------------------------------- + +.. automodule:: google.cloud.compute_v1.services.global_public_delegated_prefixes + :members: + :inherited-members: + +.. automodule:: google.cloud.compute_v1.services.global_public_delegated_prefixes.pagers + :members: + :inherited-members: diff --git a/owl-bot-staging/v1/docs/compute_v1/health_checks.rst b/owl-bot-staging/v1/docs/compute_v1/health_checks.rst new file mode 100644 index 000000000..4f6fbf590 --- /dev/null +++ b/owl-bot-staging/v1/docs/compute_v1/health_checks.rst @@ -0,0 +1,10 @@ +HealthChecks +------------------------------ + +.. automodule:: google.cloud.compute_v1.services.health_checks + :members: + :inherited-members: + +.. automodule:: google.cloud.compute_v1.services.health_checks.pagers + :members: + :inherited-members: diff --git a/owl-bot-staging/v1/docs/compute_v1/image_family_views.rst b/owl-bot-staging/v1/docs/compute_v1/image_family_views.rst new file mode 100644 index 000000000..40c73b6a1 --- /dev/null +++ b/owl-bot-staging/v1/docs/compute_v1/image_family_views.rst @@ -0,0 +1,6 @@ +ImageFamilyViews +---------------------------------- + +.. automodule:: google.cloud.compute_v1.services.image_family_views + :members: + :inherited-members: diff --git a/owl-bot-staging/v1/docs/compute_v1/images.rst b/owl-bot-staging/v1/docs/compute_v1/images.rst new file mode 100644 index 000000000..a128da7e5 --- /dev/null +++ b/owl-bot-staging/v1/docs/compute_v1/images.rst @@ -0,0 +1,10 @@ +Images +------------------------ + +.. automodule:: google.cloud.compute_v1.services.images + :members: + :inherited-members: + +.. automodule:: google.cloud.compute_v1.services.images.pagers + :members: + :inherited-members: diff --git a/owl-bot-staging/v1/docs/compute_v1/instance_group_managers.rst b/owl-bot-staging/v1/docs/compute_v1/instance_group_managers.rst new file mode 100644 index 000000000..eec48ff06 --- /dev/null +++ b/owl-bot-staging/v1/docs/compute_v1/instance_group_managers.rst @@ -0,0 +1,10 @@ +InstanceGroupManagers +--------------------------------------- + +.. automodule:: google.cloud.compute_v1.services.instance_group_managers + :members: + :inherited-members: + +.. automodule:: google.cloud.compute_v1.services.instance_group_managers.pagers + :members: + :inherited-members: diff --git a/owl-bot-staging/v1/docs/compute_v1/instance_groups.rst b/owl-bot-staging/v1/docs/compute_v1/instance_groups.rst new file mode 100644 index 000000000..30ccb2bcb --- /dev/null +++ b/owl-bot-staging/v1/docs/compute_v1/instance_groups.rst @@ -0,0 +1,10 @@ +InstanceGroups +-------------------------------- + +.. automodule:: google.cloud.compute_v1.services.instance_groups + :members: + :inherited-members: + +.. automodule:: google.cloud.compute_v1.services.instance_groups.pagers + :members: + :inherited-members: diff --git a/owl-bot-staging/v1/docs/compute_v1/instance_templates.rst b/owl-bot-staging/v1/docs/compute_v1/instance_templates.rst new file mode 100644 index 000000000..1e04745dc --- /dev/null +++ b/owl-bot-staging/v1/docs/compute_v1/instance_templates.rst @@ -0,0 +1,10 @@ +InstanceTemplates +----------------------------------- + +.. automodule:: google.cloud.compute_v1.services.instance_templates + :members: + :inherited-members: + +.. automodule:: google.cloud.compute_v1.services.instance_templates.pagers + :members: + :inherited-members: diff --git a/owl-bot-staging/v1/docs/compute_v1/instances.rst b/owl-bot-staging/v1/docs/compute_v1/instances.rst new file mode 100644 index 000000000..ff79fdecc --- /dev/null +++ b/owl-bot-staging/v1/docs/compute_v1/instances.rst @@ -0,0 +1,10 @@ +Instances +--------------------------- + +.. automodule:: google.cloud.compute_v1.services.instances + :members: + :inherited-members: + +.. automodule:: google.cloud.compute_v1.services.instances.pagers + :members: + :inherited-members: diff --git a/owl-bot-staging/v1/docs/compute_v1/interconnect_attachments.rst b/owl-bot-staging/v1/docs/compute_v1/interconnect_attachments.rst new file mode 100644 index 000000000..6c6d6e907 --- /dev/null +++ b/owl-bot-staging/v1/docs/compute_v1/interconnect_attachments.rst @@ -0,0 +1,10 @@ +InterconnectAttachments +----------------------------------------- + +.. automodule:: google.cloud.compute_v1.services.interconnect_attachments + :members: + :inherited-members: + +.. automodule:: google.cloud.compute_v1.services.interconnect_attachments.pagers + :members: + :inherited-members: diff --git a/owl-bot-staging/v1/docs/compute_v1/interconnect_locations.rst b/owl-bot-staging/v1/docs/compute_v1/interconnect_locations.rst new file mode 100644 index 000000000..ed94bf8b0 --- /dev/null +++ b/owl-bot-staging/v1/docs/compute_v1/interconnect_locations.rst @@ -0,0 +1,10 @@ +InterconnectLocations +--------------------------------------- + +.. automodule:: google.cloud.compute_v1.services.interconnect_locations + :members: + :inherited-members: + +.. automodule:: google.cloud.compute_v1.services.interconnect_locations.pagers + :members: + :inherited-members: diff --git a/owl-bot-staging/v1/docs/compute_v1/interconnect_remote_locations.rst b/owl-bot-staging/v1/docs/compute_v1/interconnect_remote_locations.rst new file mode 100644 index 000000000..0c749e4e8 --- /dev/null +++ b/owl-bot-staging/v1/docs/compute_v1/interconnect_remote_locations.rst @@ -0,0 +1,10 @@ +InterconnectRemoteLocations +--------------------------------------------- + +.. automodule:: google.cloud.compute_v1.services.interconnect_remote_locations + :members: + :inherited-members: + +.. automodule:: google.cloud.compute_v1.services.interconnect_remote_locations.pagers + :members: + :inherited-members: diff --git a/owl-bot-staging/v1/docs/compute_v1/interconnects.rst b/owl-bot-staging/v1/docs/compute_v1/interconnects.rst new file mode 100644 index 000000000..810de28ad --- /dev/null +++ b/owl-bot-staging/v1/docs/compute_v1/interconnects.rst @@ -0,0 +1,10 @@ +Interconnects +------------------------------- + +.. automodule:: google.cloud.compute_v1.services.interconnects + :members: + :inherited-members: + +.. automodule:: google.cloud.compute_v1.services.interconnects.pagers + :members: + :inherited-members: diff --git a/owl-bot-staging/v1/docs/compute_v1/license_codes.rst b/owl-bot-staging/v1/docs/compute_v1/license_codes.rst new file mode 100644 index 000000000..88fe1b72e --- /dev/null +++ b/owl-bot-staging/v1/docs/compute_v1/license_codes.rst @@ -0,0 +1,6 @@ +LicenseCodes +------------------------------ + +.. automodule:: google.cloud.compute_v1.services.license_codes + :members: + :inherited-members: diff --git a/owl-bot-staging/v1/docs/compute_v1/licenses.rst b/owl-bot-staging/v1/docs/compute_v1/licenses.rst new file mode 100644 index 000000000..1782e396a --- /dev/null +++ b/owl-bot-staging/v1/docs/compute_v1/licenses.rst @@ -0,0 +1,10 @@ +Licenses +-------------------------- + +.. automodule:: google.cloud.compute_v1.services.licenses + :members: + :inherited-members: + +.. automodule:: google.cloud.compute_v1.services.licenses.pagers + :members: + :inherited-members: diff --git a/owl-bot-staging/v1/docs/compute_v1/machine_images.rst b/owl-bot-staging/v1/docs/compute_v1/machine_images.rst new file mode 100644 index 000000000..663ecb04b --- /dev/null +++ b/owl-bot-staging/v1/docs/compute_v1/machine_images.rst @@ -0,0 +1,10 @@ +MachineImages +------------------------------- + +.. automodule:: google.cloud.compute_v1.services.machine_images + :members: + :inherited-members: + +.. automodule:: google.cloud.compute_v1.services.machine_images.pagers + :members: + :inherited-members: diff --git a/owl-bot-staging/v1/docs/compute_v1/machine_types.rst b/owl-bot-staging/v1/docs/compute_v1/machine_types.rst new file mode 100644 index 000000000..9c536e008 --- /dev/null +++ b/owl-bot-staging/v1/docs/compute_v1/machine_types.rst @@ -0,0 +1,10 @@ +MachineTypes +------------------------------ + +.. automodule:: google.cloud.compute_v1.services.machine_types + :members: + :inherited-members: + +.. automodule:: google.cloud.compute_v1.services.machine_types.pagers + :members: + :inherited-members: diff --git a/owl-bot-staging/v1/docs/compute_v1/network_attachments.rst b/owl-bot-staging/v1/docs/compute_v1/network_attachments.rst new file mode 100644 index 000000000..bd85bb34d --- /dev/null +++ b/owl-bot-staging/v1/docs/compute_v1/network_attachments.rst @@ -0,0 +1,10 @@ +NetworkAttachments +------------------------------------ + +.. automodule:: google.cloud.compute_v1.services.network_attachments + :members: + :inherited-members: + +.. automodule:: google.cloud.compute_v1.services.network_attachments.pagers + :members: + :inherited-members: diff --git a/owl-bot-staging/v1/docs/compute_v1/network_edge_security_services.rst b/owl-bot-staging/v1/docs/compute_v1/network_edge_security_services.rst new file mode 100644 index 000000000..d0bb5841f --- /dev/null +++ b/owl-bot-staging/v1/docs/compute_v1/network_edge_security_services.rst @@ -0,0 +1,10 @@ +NetworkEdgeSecurityServices +--------------------------------------------- + +.. automodule:: google.cloud.compute_v1.services.network_edge_security_services + :members: + :inherited-members: + +.. automodule:: google.cloud.compute_v1.services.network_edge_security_services.pagers + :members: + :inherited-members: diff --git a/owl-bot-staging/v1/docs/compute_v1/network_endpoint_groups.rst b/owl-bot-staging/v1/docs/compute_v1/network_endpoint_groups.rst new file mode 100644 index 000000000..0a929be61 --- /dev/null +++ b/owl-bot-staging/v1/docs/compute_v1/network_endpoint_groups.rst @@ -0,0 +1,10 @@ +NetworkEndpointGroups +--------------------------------------- + +.. automodule:: google.cloud.compute_v1.services.network_endpoint_groups + :members: + :inherited-members: + +.. automodule:: google.cloud.compute_v1.services.network_endpoint_groups.pagers + :members: + :inherited-members: diff --git a/owl-bot-staging/v1/docs/compute_v1/network_firewall_policies.rst b/owl-bot-staging/v1/docs/compute_v1/network_firewall_policies.rst new file mode 100644 index 000000000..c3fddc831 --- /dev/null +++ b/owl-bot-staging/v1/docs/compute_v1/network_firewall_policies.rst @@ -0,0 +1,10 @@ +NetworkFirewallPolicies +----------------------------------------- + +.. automodule:: google.cloud.compute_v1.services.network_firewall_policies + :members: + :inherited-members: + +.. automodule:: google.cloud.compute_v1.services.network_firewall_policies.pagers + :members: + :inherited-members: diff --git a/owl-bot-staging/v1/docs/compute_v1/networks.rst b/owl-bot-staging/v1/docs/compute_v1/networks.rst new file mode 100644 index 000000000..2a74f4714 --- /dev/null +++ b/owl-bot-staging/v1/docs/compute_v1/networks.rst @@ -0,0 +1,10 @@ +Networks +-------------------------- + +.. automodule:: google.cloud.compute_v1.services.networks + :members: + :inherited-members: + +.. automodule:: google.cloud.compute_v1.services.networks.pagers + :members: + :inherited-members: diff --git a/owl-bot-staging/v1/docs/compute_v1/node_groups.rst b/owl-bot-staging/v1/docs/compute_v1/node_groups.rst new file mode 100644 index 000000000..ee5f0b254 --- /dev/null +++ b/owl-bot-staging/v1/docs/compute_v1/node_groups.rst @@ -0,0 +1,10 @@ +NodeGroups +---------------------------- + +.. automodule:: google.cloud.compute_v1.services.node_groups + :members: + :inherited-members: + +.. automodule:: google.cloud.compute_v1.services.node_groups.pagers + :members: + :inherited-members: diff --git a/owl-bot-staging/v1/docs/compute_v1/node_templates.rst b/owl-bot-staging/v1/docs/compute_v1/node_templates.rst new file mode 100644 index 000000000..1cd30fb6c --- /dev/null +++ b/owl-bot-staging/v1/docs/compute_v1/node_templates.rst @@ -0,0 +1,10 @@ +NodeTemplates +------------------------------- + +.. automodule:: google.cloud.compute_v1.services.node_templates + :members: + :inherited-members: + +.. automodule:: google.cloud.compute_v1.services.node_templates.pagers + :members: + :inherited-members: diff --git a/owl-bot-staging/v1/docs/compute_v1/node_types.rst b/owl-bot-staging/v1/docs/compute_v1/node_types.rst new file mode 100644 index 000000000..4e765f36f --- /dev/null +++ b/owl-bot-staging/v1/docs/compute_v1/node_types.rst @@ -0,0 +1,10 @@ +NodeTypes +--------------------------- + +.. automodule:: google.cloud.compute_v1.services.node_types + :members: + :inherited-members: + +.. automodule:: google.cloud.compute_v1.services.node_types.pagers + :members: + :inherited-members: diff --git a/owl-bot-staging/v1/docs/compute_v1/packet_mirrorings.rst b/owl-bot-staging/v1/docs/compute_v1/packet_mirrorings.rst new file mode 100644 index 000000000..1fce6caed --- /dev/null +++ b/owl-bot-staging/v1/docs/compute_v1/packet_mirrorings.rst @@ -0,0 +1,10 @@ +PacketMirrorings +---------------------------------- + +.. automodule:: google.cloud.compute_v1.services.packet_mirrorings + :members: + :inherited-members: + +.. automodule:: google.cloud.compute_v1.services.packet_mirrorings.pagers + :members: + :inherited-members: diff --git a/owl-bot-staging/v1/docs/compute_v1/projects.rst b/owl-bot-staging/v1/docs/compute_v1/projects.rst new file mode 100644 index 000000000..5e076fa50 --- /dev/null +++ b/owl-bot-staging/v1/docs/compute_v1/projects.rst @@ -0,0 +1,10 @@ +Projects +-------------------------- + +.. automodule:: google.cloud.compute_v1.services.projects + :members: + :inherited-members: + +.. automodule:: google.cloud.compute_v1.services.projects.pagers + :members: + :inherited-members: diff --git a/owl-bot-staging/v1/docs/compute_v1/public_advertised_prefixes.rst b/owl-bot-staging/v1/docs/compute_v1/public_advertised_prefixes.rst new file mode 100644 index 000000000..54c433688 --- /dev/null +++ b/owl-bot-staging/v1/docs/compute_v1/public_advertised_prefixes.rst @@ -0,0 +1,10 @@ +PublicAdvertisedPrefixes +------------------------------------------ + +.. automodule:: google.cloud.compute_v1.services.public_advertised_prefixes + :members: + :inherited-members: + +.. automodule:: google.cloud.compute_v1.services.public_advertised_prefixes.pagers + :members: + :inherited-members: diff --git a/owl-bot-staging/v1/docs/compute_v1/public_delegated_prefixes.rst b/owl-bot-staging/v1/docs/compute_v1/public_delegated_prefixes.rst new file mode 100644 index 000000000..798744764 --- /dev/null +++ b/owl-bot-staging/v1/docs/compute_v1/public_delegated_prefixes.rst @@ -0,0 +1,10 @@ +PublicDelegatedPrefixes +----------------------------------------- + +.. automodule:: google.cloud.compute_v1.services.public_delegated_prefixes + :members: + :inherited-members: + +.. automodule:: google.cloud.compute_v1.services.public_delegated_prefixes.pagers + :members: + :inherited-members: diff --git a/owl-bot-staging/v1/docs/compute_v1/region_autoscalers.rst b/owl-bot-staging/v1/docs/compute_v1/region_autoscalers.rst new file mode 100644 index 000000000..cb7ace393 --- /dev/null +++ b/owl-bot-staging/v1/docs/compute_v1/region_autoscalers.rst @@ -0,0 +1,10 @@ +RegionAutoscalers +----------------------------------- + +.. automodule:: google.cloud.compute_v1.services.region_autoscalers + :members: + :inherited-members: + +.. automodule:: google.cloud.compute_v1.services.region_autoscalers.pagers + :members: + :inherited-members: diff --git a/owl-bot-staging/v1/docs/compute_v1/region_backend_services.rst b/owl-bot-staging/v1/docs/compute_v1/region_backend_services.rst new file mode 100644 index 000000000..1fcbe4028 --- /dev/null +++ b/owl-bot-staging/v1/docs/compute_v1/region_backend_services.rst @@ -0,0 +1,10 @@ +RegionBackendServices +--------------------------------------- + +.. automodule:: google.cloud.compute_v1.services.region_backend_services + :members: + :inherited-members: + +.. automodule:: google.cloud.compute_v1.services.region_backend_services.pagers + :members: + :inherited-members: diff --git a/owl-bot-staging/v1/docs/compute_v1/region_commitments.rst b/owl-bot-staging/v1/docs/compute_v1/region_commitments.rst new file mode 100644 index 000000000..f88e46dc1 --- /dev/null +++ b/owl-bot-staging/v1/docs/compute_v1/region_commitments.rst @@ -0,0 +1,10 @@ +RegionCommitments +----------------------------------- + +.. automodule:: google.cloud.compute_v1.services.region_commitments + :members: + :inherited-members: + +.. automodule:: google.cloud.compute_v1.services.region_commitments.pagers + :members: + :inherited-members: diff --git a/owl-bot-staging/v1/docs/compute_v1/region_disk_types.rst b/owl-bot-staging/v1/docs/compute_v1/region_disk_types.rst new file mode 100644 index 000000000..16a33bc3e --- /dev/null +++ b/owl-bot-staging/v1/docs/compute_v1/region_disk_types.rst @@ -0,0 +1,10 @@ +RegionDiskTypes +--------------------------------- + +.. automodule:: google.cloud.compute_v1.services.region_disk_types + :members: + :inherited-members: + +.. automodule:: google.cloud.compute_v1.services.region_disk_types.pagers + :members: + :inherited-members: diff --git a/owl-bot-staging/v1/docs/compute_v1/region_disks.rst b/owl-bot-staging/v1/docs/compute_v1/region_disks.rst new file mode 100644 index 000000000..db87ad4b7 --- /dev/null +++ b/owl-bot-staging/v1/docs/compute_v1/region_disks.rst @@ -0,0 +1,10 @@ +RegionDisks +----------------------------- + +.. automodule:: google.cloud.compute_v1.services.region_disks + :members: + :inherited-members: + +.. automodule:: google.cloud.compute_v1.services.region_disks.pagers + :members: + :inherited-members: diff --git a/owl-bot-staging/v1/docs/compute_v1/region_health_check_services.rst b/owl-bot-staging/v1/docs/compute_v1/region_health_check_services.rst new file mode 100644 index 000000000..2d28e52c1 --- /dev/null +++ b/owl-bot-staging/v1/docs/compute_v1/region_health_check_services.rst @@ -0,0 +1,10 @@ +RegionHealthCheckServices +------------------------------------------- + +.. automodule:: google.cloud.compute_v1.services.region_health_check_services + :members: + :inherited-members: + +.. automodule:: google.cloud.compute_v1.services.region_health_check_services.pagers + :members: + :inherited-members: diff --git a/owl-bot-staging/v1/docs/compute_v1/region_health_checks.rst b/owl-bot-staging/v1/docs/compute_v1/region_health_checks.rst new file mode 100644 index 000000000..296eab2c4 --- /dev/null +++ b/owl-bot-staging/v1/docs/compute_v1/region_health_checks.rst @@ -0,0 +1,10 @@ +RegionHealthChecks +------------------------------------ + +.. automodule:: google.cloud.compute_v1.services.region_health_checks + :members: + :inherited-members: + +.. automodule:: google.cloud.compute_v1.services.region_health_checks.pagers + :members: + :inherited-members: diff --git a/owl-bot-staging/v1/docs/compute_v1/region_instance_group_managers.rst b/owl-bot-staging/v1/docs/compute_v1/region_instance_group_managers.rst new file mode 100644 index 000000000..a6caf22fb --- /dev/null +++ b/owl-bot-staging/v1/docs/compute_v1/region_instance_group_managers.rst @@ -0,0 +1,10 @@ +RegionInstanceGroupManagers +--------------------------------------------- + +.. automodule:: google.cloud.compute_v1.services.region_instance_group_managers + :members: + :inherited-members: + +.. automodule:: google.cloud.compute_v1.services.region_instance_group_managers.pagers + :members: + :inherited-members: diff --git a/owl-bot-staging/v1/docs/compute_v1/region_instance_groups.rst b/owl-bot-staging/v1/docs/compute_v1/region_instance_groups.rst new file mode 100644 index 000000000..83ddb95ab --- /dev/null +++ b/owl-bot-staging/v1/docs/compute_v1/region_instance_groups.rst @@ -0,0 +1,10 @@ +RegionInstanceGroups +-------------------------------------- + +.. automodule:: google.cloud.compute_v1.services.region_instance_groups + :members: + :inherited-members: + +.. automodule:: google.cloud.compute_v1.services.region_instance_groups.pagers + :members: + :inherited-members: diff --git a/owl-bot-staging/v1/docs/compute_v1/region_instance_templates.rst b/owl-bot-staging/v1/docs/compute_v1/region_instance_templates.rst new file mode 100644 index 000000000..bbeffb6e2 --- /dev/null +++ b/owl-bot-staging/v1/docs/compute_v1/region_instance_templates.rst @@ -0,0 +1,10 @@ +RegionInstanceTemplates +----------------------------------------- + +.. automodule:: google.cloud.compute_v1.services.region_instance_templates + :members: + :inherited-members: + +.. automodule:: google.cloud.compute_v1.services.region_instance_templates.pagers + :members: + :inherited-members: diff --git a/owl-bot-staging/v1/docs/compute_v1/region_instances.rst b/owl-bot-staging/v1/docs/compute_v1/region_instances.rst new file mode 100644 index 000000000..780a33cd9 --- /dev/null +++ b/owl-bot-staging/v1/docs/compute_v1/region_instances.rst @@ -0,0 +1,6 @@ +RegionInstances +--------------------------------- + +.. automodule:: google.cloud.compute_v1.services.region_instances + :members: + :inherited-members: diff --git a/owl-bot-staging/v1/docs/compute_v1/region_network_endpoint_groups.rst b/owl-bot-staging/v1/docs/compute_v1/region_network_endpoint_groups.rst new file mode 100644 index 000000000..5c386c6fd --- /dev/null +++ b/owl-bot-staging/v1/docs/compute_v1/region_network_endpoint_groups.rst @@ -0,0 +1,10 @@ +RegionNetworkEndpointGroups +--------------------------------------------- + +.. automodule:: google.cloud.compute_v1.services.region_network_endpoint_groups + :members: + :inherited-members: + +.. automodule:: google.cloud.compute_v1.services.region_network_endpoint_groups.pagers + :members: + :inherited-members: diff --git a/owl-bot-staging/v1/docs/compute_v1/region_network_firewall_policies.rst b/owl-bot-staging/v1/docs/compute_v1/region_network_firewall_policies.rst new file mode 100644 index 000000000..4cfb4d6d5 --- /dev/null +++ b/owl-bot-staging/v1/docs/compute_v1/region_network_firewall_policies.rst @@ -0,0 +1,10 @@ +RegionNetworkFirewallPolicies +----------------------------------------------- + +.. automodule:: google.cloud.compute_v1.services.region_network_firewall_policies + :members: + :inherited-members: + +.. automodule:: google.cloud.compute_v1.services.region_network_firewall_policies.pagers + :members: + :inherited-members: diff --git a/owl-bot-staging/v1/docs/compute_v1/region_notification_endpoints.rst b/owl-bot-staging/v1/docs/compute_v1/region_notification_endpoints.rst new file mode 100644 index 000000000..b69fe5d72 --- /dev/null +++ b/owl-bot-staging/v1/docs/compute_v1/region_notification_endpoints.rst @@ -0,0 +1,10 @@ +RegionNotificationEndpoints +--------------------------------------------- + +.. automodule:: google.cloud.compute_v1.services.region_notification_endpoints + :members: + :inherited-members: + +.. automodule:: google.cloud.compute_v1.services.region_notification_endpoints.pagers + :members: + :inherited-members: diff --git a/owl-bot-staging/v1/docs/compute_v1/region_operations.rst b/owl-bot-staging/v1/docs/compute_v1/region_operations.rst new file mode 100644 index 000000000..df407212c --- /dev/null +++ b/owl-bot-staging/v1/docs/compute_v1/region_operations.rst @@ -0,0 +1,10 @@ +RegionOperations +---------------------------------- + +.. automodule:: google.cloud.compute_v1.services.region_operations + :members: + :inherited-members: + +.. automodule:: google.cloud.compute_v1.services.region_operations.pagers + :members: + :inherited-members: diff --git a/owl-bot-staging/v1/docs/compute_v1/region_security_policies.rst b/owl-bot-staging/v1/docs/compute_v1/region_security_policies.rst new file mode 100644 index 000000000..5df2bfdce --- /dev/null +++ b/owl-bot-staging/v1/docs/compute_v1/region_security_policies.rst @@ -0,0 +1,10 @@ +RegionSecurityPolicies +---------------------------------------- + +.. automodule:: google.cloud.compute_v1.services.region_security_policies + :members: + :inherited-members: + +.. automodule:: google.cloud.compute_v1.services.region_security_policies.pagers + :members: + :inherited-members: diff --git a/owl-bot-staging/v1/docs/compute_v1/region_ssl_certificates.rst b/owl-bot-staging/v1/docs/compute_v1/region_ssl_certificates.rst new file mode 100644 index 000000000..e12d89e5d --- /dev/null +++ b/owl-bot-staging/v1/docs/compute_v1/region_ssl_certificates.rst @@ -0,0 +1,10 @@ +RegionSslCertificates +--------------------------------------- + +.. automodule:: google.cloud.compute_v1.services.region_ssl_certificates + :members: + :inherited-members: + +.. automodule:: google.cloud.compute_v1.services.region_ssl_certificates.pagers + :members: + :inherited-members: diff --git a/owl-bot-staging/v1/docs/compute_v1/region_ssl_policies.rst b/owl-bot-staging/v1/docs/compute_v1/region_ssl_policies.rst new file mode 100644 index 000000000..068928fe2 --- /dev/null +++ b/owl-bot-staging/v1/docs/compute_v1/region_ssl_policies.rst @@ -0,0 +1,10 @@ +RegionSslPolicies +----------------------------------- + +.. automodule:: google.cloud.compute_v1.services.region_ssl_policies + :members: + :inherited-members: + +.. automodule:: google.cloud.compute_v1.services.region_ssl_policies.pagers + :members: + :inherited-members: diff --git a/owl-bot-staging/v1/docs/compute_v1/region_target_http_proxies.rst b/owl-bot-staging/v1/docs/compute_v1/region_target_http_proxies.rst new file mode 100644 index 000000000..187504997 --- /dev/null +++ b/owl-bot-staging/v1/docs/compute_v1/region_target_http_proxies.rst @@ -0,0 +1,10 @@ +RegionTargetHttpProxies +----------------------------------------- + +.. automodule:: google.cloud.compute_v1.services.region_target_http_proxies + :members: + :inherited-members: + +.. automodule:: google.cloud.compute_v1.services.region_target_http_proxies.pagers + :members: + :inherited-members: diff --git a/owl-bot-staging/v1/docs/compute_v1/region_target_https_proxies.rst b/owl-bot-staging/v1/docs/compute_v1/region_target_https_proxies.rst new file mode 100644 index 000000000..32ad2918d --- /dev/null +++ b/owl-bot-staging/v1/docs/compute_v1/region_target_https_proxies.rst @@ -0,0 +1,10 @@ +RegionTargetHttpsProxies +------------------------------------------ + +.. automodule:: google.cloud.compute_v1.services.region_target_https_proxies + :members: + :inherited-members: + +.. automodule:: google.cloud.compute_v1.services.region_target_https_proxies.pagers + :members: + :inherited-members: diff --git a/owl-bot-staging/v1/docs/compute_v1/region_target_tcp_proxies.rst b/owl-bot-staging/v1/docs/compute_v1/region_target_tcp_proxies.rst new file mode 100644 index 000000000..9c4814414 --- /dev/null +++ b/owl-bot-staging/v1/docs/compute_v1/region_target_tcp_proxies.rst @@ -0,0 +1,10 @@ +RegionTargetTcpProxies +---------------------------------------- + +.. automodule:: google.cloud.compute_v1.services.region_target_tcp_proxies + :members: + :inherited-members: + +.. automodule:: google.cloud.compute_v1.services.region_target_tcp_proxies.pagers + :members: + :inherited-members: diff --git a/owl-bot-staging/v1/docs/compute_v1/region_url_maps.rst b/owl-bot-staging/v1/docs/compute_v1/region_url_maps.rst new file mode 100644 index 000000000..204cdf69b --- /dev/null +++ b/owl-bot-staging/v1/docs/compute_v1/region_url_maps.rst @@ -0,0 +1,10 @@ +RegionUrlMaps +------------------------------- + +.. automodule:: google.cloud.compute_v1.services.region_url_maps + :members: + :inherited-members: + +.. automodule:: google.cloud.compute_v1.services.region_url_maps.pagers + :members: + :inherited-members: diff --git a/owl-bot-staging/v1/docs/compute_v1/regions.rst b/owl-bot-staging/v1/docs/compute_v1/regions.rst new file mode 100644 index 000000000..4beda5440 --- /dev/null +++ b/owl-bot-staging/v1/docs/compute_v1/regions.rst @@ -0,0 +1,10 @@ +Regions +------------------------- + +.. automodule:: google.cloud.compute_v1.services.regions + :members: + :inherited-members: + +.. automodule:: google.cloud.compute_v1.services.regions.pagers + :members: + :inherited-members: diff --git a/owl-bot-staging/v1/docs/compute_v1/reservations.rst b/owl-bot-staging/v1/docs/compute_v1/reservations.rst new file mode 100644 index 000000000..adfa21b2c --- /dev/null +++ b/owl-bot-staging/v1/docs/compute_v1/reservations.rst @@ -0,0 +1,10 @@ +Reservations +------------------------------ + +.. automodule:: google.cloud.compute_v1.services.reservations + :members: + :inherited-members: + +.. automodule:: google.cloud.compute_v1.services.reservations.pagers + :members: + :inherited-members: diff --git a/owl-bot-staging/v1/docs/compute_v1/resource_policies.rst b/owl-bot-staging/v1/docs/compute_v1/resource_policies.rst new file mode 100644 index 000000000..39e2bab0e --- /dev/null +++ b/owl-bot-staging/v1/docs/compute_v1/resource_policies.rst @@ -0,0 +1,10 @@ +ResourcePolicies +---------------------------------- + +.. automodule:: google.cloud.compute_v1.services.resource_policies + :members: + :inherited-members: + +.. automodule:: google.cloud.compute_v1.services.resource_policies.pagers + :members: + :inherited-members: diff --git a/owl-bot-staging/v1/docs/compute_v1/routers.rst b/owl-bot-staging/v1/docs/compute_v1/routers.rst new file mode 100644 index 000000000..0a82b2235 --- /dev/null +++ b/owl-bot-staging/v1/docs/compute_v1/routers.rst @@ -0,0 +1,10 @@ +Routers +------------------------- + +.. automodule:: google.cloud.compute_v1.services.routers + :members: + :inherited-members: + +.. automodule:: google.cloud.compute_v1.services.routers.pagers + :members: + :inherited-members: diff --git a/owl-bot-staging/v1/docs/compute_v1/routes.rst b/owl-bot-staging/v1/docs/compute_v1/routes.rst new file mode 100644 index 000000000..d03cd7e37 --- /dev/null +++ b/owl-bot-staging/v1/docs/compute_v1/routes.rst @@ -0,0 +1,10 @@ +Routes +------------------------ + +.. automodule:: google.cloud.compute_v1.services.routes + :members: + :inherited-members: + +.. automodule:: google.cloud.compute_v1.services.routes.pagers + :members: + :inherited-members: diff --git a/owl-bot-staging/v1/docs/compute_v1/security_policies.rst b/owl-bot-staging/v1/docs/compute_v1/security_policies.rst new file mode 100644 index 000000000..a2893695b --- /dev/null +++ b/owl-bot-staging/v1/docs/compute_v1/security_policies.rst @@ -0,0 +1,10 @@ +SecurityPolicies +---------------------------------- + +.. automodule:: google.cloud.compute_v1.services.security_policies + :members: + :inherited-members: + +.. automodule:: google.cloud.compute_v1.services.security_policies.pagers + :members: + :inherited-members: diff --git a/owl-bot-staging/v1/docs/compute_v1/service_attachments.rst b/owl-bot-staging/v1/docs/compute_v1/service_attachments.rst new file mode 100644 index 000000000..07c4e4699 --- /dev/null +++ b/owl-bot-staging/v1/docs/compute_v1/service_attachments.rst @@ -0,0 +1,10 @@ +ServiceAttachments +------------------------------------ + +.. automodule:: google.cloud.compute_v1.services.service_attachments + :members: + :inherited-members: + +.. automodule:: google.cloud.compute_v1.services.service_attachments.pagers + :members: + :inherited-members: diff --git a/owl-bot-staging/v1/docs/compute_v1/services.rst b/owl-bot-staging/v1/docs/compute_v1/services.rst new file mode 100644 index 000000000..92385f571 --- /dev/null +++ b/owl-bot-staging/v1/docs/compute_v1/services.rst @@ -0,0 +1,95 @@ +Services for Google Cloud Compute v1 API +======================================== +.. toctree:: + :maxdepth: 2 + + accelerator_types + addresses + autoscalers + backend_buckets + backend_services + disks + disk_types + external_vpn_gateways + firewall_policies + firewalls + forwarding_rules + global_addresses + global_forwarding_rules + global_network_endpoint_groups + global_operations + global_organization_operations + global_public_delegated_prefixes + health_checks + image_family_views + images + instance_group_managers + instance_groups + instances + instance_templates + interconnect_attachments + interconnect_locations + interconnect_remote_locations + interconnects + license_codes + licenses + machine_images + machine_types + network_attachments + network_edge_security_services + network_endpoint_groups + network_firewall_policies + networks + node_groups + node_templates + node_types + packet_mirrorings + projects + public_advertised_prefixes + public_delegated_prefixes + region_autoscalers + region_backend_services + region_commitments + region_disks + region_disk_types + region_health_checks + region_health_check_services + region_instance_group_managers + region_instance_groups + region_instances + region_instance_templates + region_network_endpoint_groups + region_network_firewall_policies + region_notification_endpoints + region_operations + regions + region_security_policies + region_ssl_certificates + region_ssl_policies + region_target_http_proxies + region_target_https_proxies + region_target_tcp_proxies + region_url_maps + reservations + resource_policies + routers + routes + security_policies + service_attachments + snapshots + ssl_certificates + ssl_policies + subnetworks + target_grpc_proxies + target_http_proxies + target_https_proxies + target_instances + target_pools + target_ssl_proxies + target_tcp_proxies + target_vpn_gateways + url_maps + vpn_gateways + vpn_tunnels + zone_operations + zones diff --git a/owl-bot-staging/v1/docs/compute_v1/snapshots.rst b/owl-bot-staging/v1/docs/compute_v1/snapshots.rst new file mode 100644 index 000000000..a29a1aa93 --- /dev/null +++ b/owl-bot-staging/v1/docs/compute_v1/snapshots.rst @@ -0,0 +1,10 @@ +Snapshots +--------------------------- + +.. automodule:: google.cloud.compute_v1.services.snapshots + :members: + :inherited-members: + +.. automodule:: google.cloud.compute_v1.services.snapshots.pagers + :members: + :inherited-members: diff --git a/owl-bot-staging/v1/docs/compute_v1/ssl_certificates.rst b/owl-bot-staging/v1/docs/compute_v1/ssl_certificates.rst new file mode 100644 index 000000000..e90c34ddd --- /dev/null +++ b/owl-bot-staging/v1/docs/compute_v1/ssl_certificates.rst @@ -0,0 +1,10 @@ +SslCertificates +--------------------------------- + +.. automodule:: google.cloud.compute_v1.services.ssl_certificates + :members: + :inherited-members: + +.. automodule:: google.cloud.compute_v1.services.ssl_certificates.pagers + :members: + :inherited-members: diff --git a/owl-bot-staging/v1/docs/compute_v1/ssl_policies.rst b/owl-bot-staging/v1/docs/compute_v1/ssl_policies.rst new file mode 100644 index 000000000..d07c8b443 --- /dev/null +++ b/owl-bot-staging/v1/docs/compute_v1/ssl_policies.rst @@ -0,0 +1,10 @@ +SslPolicies +----------------------------- + +.. automodule:: google.cloud.compute_v1.services.ssl_policies + :members: + :inherited-members: + +.. automodule:: google.cloud.compute_v1.services.ssl_policies.pagers + :members: + :inherited-members: diff --git a/owl-bot-staging/v1/docs/compute_v1/subnetworks.rst b/owl-bot-staging/v1/docs/compute_v1/subnetworks.rst new file mode 100644 index 000000000..cd20e0933 --- /dev/null +++ b/owl-bot-staging/v1/docs/compute_v1/subnetworks.rst @@ -0,0 +1,10 @@ +Subnetworks +----------------------------- + +.. automodule:: google.cloud.compute_v1.services.subnetworks + :members: + :inherited-members: + +.. automodule:: google.cloud.compute_v1.services.subnetworks.pagers + :members: + :inherited-members: diff --git a/owl-bot-staging/v1/docs/compute_v1/target_grpc_proxies.rst b/owl-bot-staging/v1/docs/compute_v1/target_grpc_proxies.rst new file mode 100644 index 000000000..4ddd96105 --- /dev/null +++ b/owl-bot-staging/v1/docs/compute_v1/target_grpc_proxies.rst @@ -0,0 +1,10 @@ +TargetGrpcProxies +----------------------------------- + +.. automodule:: google.cloud.compute_v1.services.target_grpc_proxies + :members: + :inherited-members: + +.. automodule:: google.cloud.compute_v1.services.target_grpc_proxies.pagers + :members: + :inherited-members: diff --git a/owl-bot-staging/v1/docs/compute_v1/target_http_proxies.rst b/owl-bot-staging/v1/docs/compute_v1/target_http_proxies.rst new file mode 100644 index 000000000..8fe97fe26 --- /dev/null +++ b/owl-bot-staging/v1/docs/compute_v1/target_http_proxies.rst @@ -0,0 +1,10 @@ +TargetHttpProxies +----------------------------------- + +.. automodule:: google.cloud.compute_v1.services.target_http_proxies + :members: + :inherited-members: + +.. automodule:: google.cloud.compute_v1.services.target_http_proxies.pagers + :members: + :inherited-members: diff --git a/owl-bot-staging/v1/docs/compute_v1/target_https_proxies.rst b/owl-bot-staging/v1/docs/compute_v1/target_https_proxies.rst new file mode 100644 index 000000000..d3cd4242d --- /dev/null +++ b/owl-bot-staging/v1/docs/compute_v1/target_https_proxies.rst @@ -0,0 +1,10 @@ +TargetHttpsProxies +------------------------------------ + +.. automodule:: google.cloud.compute_v1.services.target_https_proxies + :members: + :inherited-members: + +.. automodule:: google.cloud.compute_v1.services.target_https_proxies.pagers + :members: + :inherited-members: diff --git a/owl-bot-staging/v1/docs/compute_v1/target_instances.rst b/owl-bot-staging/v1/docs/compute_v1/target_instances.rst new file mode 100644 index 000000000..9ab124ef9 --- /dev/null +++ b/owl-bot-staging/v1/docs/compute_v1/target_instances.rst @@ -0,0 +1,10 @@ +TargetInstances +--------------------------------- + +.. automodule:: google.cloud.compute_v1.services.target_instances + :members: + :inherited-members: + +.. automodule:: google.cloud.compute_v1.services.target_instances.pagers + :members: + :inherited-members: diff --git a/owl-bot-staging/v1/docs/compute_v1/target_pools.rst b/owl-bot-staging/v1/docs/compute_v1/target_pools.rst new file mode 100644 index 000000000..7be339431 --- /dev/null +++ b/owl-bot-staging/v1/docs/compute_v1/target_pools.rst @@ -0,0 +1,10 @@ +TargetPools +----------------------------- + +.. automodule:: google.cloud.compute_v1.services.target_pools + :members: + :inherited-members: + +.. automodule:: google.cloud.compute_v1.services.target_pools.pagers + :members: + :inherited-members: diff --git a/owl-bot-staging/v1/docs/compute_v1/target_ssl_proxies.rst b/owl-bot-staging/v1/docs/compute_v1/target_ssl_proxies.rst new file mode 100644 index 000000000..5c39bcf67 --- /dev/null +++ b/owl-bot-staging/v1/docs/compute_v1/target_ssl_proxies.rst @@ -0,0 +1,10 @@ +TargetSslProxies +---------------------------------- + +.. automodule:: google.cloud.compute_v1.services.target_ssl_proxies + :members: + :inherited-members: + +.. automodule:: google.cloud.compute_v1.services.target_ssl_proxies.pagers + :members: + :inherited-members: diff --git a/owl-bot-staging/v1/docs/compute_v1/target_tcp_proxies.rst b/owl-bot-staging/v1/docs/compute_v1/target_tcp_proxies.rst new file mode 100644 index 000000000..940978c3f --- /dev/null +++ b/owl-bot-staging/v1/docs/compute_v1/target_tcp_proxies.rst @@ -0,0 +1,10 @@ +TargetTcpProxies +---------------------------------- + +.. automodule:: google.cloud.compute_v1.services.target_tcp_proxies + :members: + :inherited-members: + +.. automodule:: google.cloud.compute_v1.services.target_tcp_proxies.pagers + :members: + :inherited-members: diff --git a/owl-bot-staging/v1/docs/compute_v1/target_vpn_gateways.rst b/owl-bot-staging/v1/docs/compute_v1/target_vpn_gateways.rst new file mode 100644 index 000000000..0fb0f169c --- /dev/null +++ b/owl-bot-staging/v1/docs/compute_v1/target_vpn_gateways.rst @@ -0,0 +1,10 @@ +TargetVpnGateways +----------------------------------- + +.. automodule:: google.cloud.compute_v1.services.target_vpn_gateways + :members: + :inherited-members: + +.. automodule:: google.cloud.compute_v1.services.target_vpn_gateways.pagers + :members: + :inherited-members: diff --git a/owl-bot-staging/v1/docs/compute_v1/types.rst b/owl-bot-staging/v1/docs/compute_v1/types.rst new file mode 100644 index 000000000..b922d95ed --- /dev/null +++ b/owl-bot-staging/v1/docs/compute_v1/types.rst @@ -0,0 +1,6 @@ +Types for Google Cloud Compute v1 API +===================================== + +.. automodule:: google.cloud.compute_v1.types + :members: + :show-inheritance: diff --git a/owl-bot-staging/v1/docs/compute_v1/url_maps.rst b/owl-bot-staging/v1/docs/compute_v1/url_maps.rst new file mode 100644 index 000000000..c0494fe72 --- /dev/null +++ b/owl-bot-staging/v1/docs/compute_v1/url_maps.rst @@ -0,0 +1,10 @@ +UrlMaps +------------------------- + +.. automodule:: google.cloud.compute_v1.services.url_maps + :members: + :inherited-members: + +.. automodule:: google.cloud.compute_v1.services.url_maps.pagers + :members: + :inherited-members: diff --git a/owl-bot-staging/v1/docs/compute_v1/vpn_gateways.rst b/owl-bot-staging/v1/docs/compute_v1/vpn_gateways.rst new file mode 100644 index 000000000..4313fd9b8 --- /dev/null +++ b/owl-bot-staging/v1/docs/compute_v1/vpn_gateways.rst @@ -0,0 +1,10 @@ +VpnGateways +----------------------------- + +.. automodule:: google.cloud.compute_v1.services.vpn_gateways + :members: + :inherited-members: + +.. automodule:: google.cloud.compute_v1.services.vpn_gateways.pagers + :members: + :inherited-members: diff --git a/owl-bot-staging/v1/docs/compute_v1/vpn_tunnels.rst b/owl-bot-staging/v1/docs/compute_v1/vpn_tunnels.rst new file mode 100644 index 000000000..ba0faf8ba --- /dev/null +++ b/owl-bot-staging/v1/docs/compute_v1/vpn_tunnels.rst @@ -0,0 +1,10 @@ +VpnTunnels +---------------------------- + +.. automodule:: google.cloud.compute_v1.services.vpn_tunnels + :members: + :inherited-members: + +.. automodule:: google.cloud.compute_v1.services.vpn_tunnels.pagers + :members: + :inherited-members: diff --git a/owl-bot-staging/v1/docs/compute_v1/zone_operations.rst b/owl-bot-staging/v1/docs/compute_v1/zone_operations.rst new file mode 100644 index 000000000..3ad84fbc8 --- /dev/null +++ b/owl-bot-staging/v1/docs/compute_v1/zone_operations.rst @@ -0,0 +1,10 @@ +ZoneOperations +-------------------------------- + +.. automodule:: google.cloud.compute_v1.services.zone_operations + :members: + :inherited-members: + +.. automodule:: google.cloud.compute_v1.services.zone_operations.pagers + :members: + :inherited-members: diff --git a/owl-bot-staging/v1/docs/compute_v1/zones.rst b/owl-bot-staging/v1/docs/compute_v1/zones.rst new file mode 100644 index 000000000..f34131bf8 --- /dev/null +++ b/owl-bot-staging/v1/docs/compute_v1/zones.rst @@ -0,0 +1,10 @@ +Zones +----------------------- + +.. automodule:: google.cloud.compute_v1.services.zones + :members: + :inherited-members: + +.. automodule:: google.cloud.compute_v1.services.zones.pagers + :members: + :inherited-members: diff --git a/owl-bot-staging/v1/docs/conf.py b/owl-bot-staging/v1/docs/conf.py new file mode 100644 index 000000000..b9116509f --- /dev/null +++ b/owl-bot-staging/v1/docs/conf.py @@ -0,0 +1,376 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# +# google-cloud-compute documentation build configuration file +# +# This file is execfile()d with the current directory set to its +# containing dir. +# +# Note that not all possible configuration values are present in this +# autogenerated file. +# +# All configuration values have a default; values that are commented out +# serve to show the default. + +import sys +import os +import shlex + +# If extensions (or modules to document with autodoc) are in another directory, +# add these directories to sys.path here. If the directory is relative to the +# documentation root, use os.path.abspath to make it absolute, like shown here. +sys.path.insert(0, os.path.abspath("..")) + +__version__ = "0.1.0" + +# -- General configuration ------------------------------------------------ + +# If your documentation needs a minimal Sphinx version, state it here. +needs_sphinx = "4.0.1" + +# Add any Sphinx extension module names here, as strings. They can be +# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom +# ones. +extensions = [ + "sphinx.ext.autodoc", + "sphinx.ext.autosummary", + "sphinx.ext.intersphinx", + "sphinx.ext.coverage", + "sphinx.ext.napoleon", + "sphinx.ext.todo", + "sphinx.ext.viewcode", +] + +# autodoc/autosummary flags +autoclass_content = "both" +autodoc_default_flags = ["members"] +autosummary_generate = True + + +# Add any paths that contain templates here, relative to this directory. +templates_path = ["_templates"] + +# Allow markdown includes (so releases.md can include CHANGLEOG.md) +# http://www.sphinx-doc.org/en/master/markdown.html +source_parsers = {".md": "recommonmark.parser.CommonMarkParser"} + +# The suffix(es) of source filenames. +# You can specify multiple suffix as a list of string: +source_suffix = [".rst", ".md"] + +# The encoding of source files. +# source_encoding = 'utf-8-sig' + +# The root toctree document. +root_doc = "index" + +# General information about the project. +project = u"google-cloud-compute" +copyright = u"2023, Google, LLC" +author = u"Google APIs" # TODO: autogenerate this bit + +# The version info for the project you're documenting, acts as replacement for +# |version| and |release|, also used in various other places throughout the +# built documents. +# +# The full version, including alpha/beta/rc tags. +release = __version__ +# The short X.Y version. +version = ".".join(release.split(".")[0:2]) + +# The language for content autogenerated by Sphinx. Refer to documentation +# for a list of supported languages. +# +# This is also used if you do content translation via gettext catalogs. +# Usually you set "language" from the command line for these cases. +language = 'en' + +# There are two options for replacing |today|: either, you set today to some +# non-false value, then it is used: +# today = '' +# Else, today_fmt is used as the format for a strftime call. +# today_fmt = '%B %d, %Y' + +# List of patterns, relative to source directory, that match files and +# directories to ignore when looking for source files. +exclude_patterns = ["_build"] + +# The reST default role (used for this markup: `text`) to use for all +# documents. +# default_role = None + +# If true, '()' will be appended to :func: etc. cross-reference text. +# add_function_parentheses = True + +# If true, the current module name will be prepended to all description +# unit titles (such as .. function::). +# add_module_names = True + +# If true, sectionauthor and moduleauthor directives will be shown in the +# output. They are ignored by default. +# show_authors = False + +# The name of the Pygments (syntax highlighting) style to use. +pygments_style = "sphinx" + +# A list of ignored prefixes for module index sorting. +# modindex_common_prefix = [] + +# If true, keep warnings as "system message" paragraphs in the built documents. +# keep_warnings = False + +# If true, `todo` and `todoList` produce output, else they produce nothing. +todo_include_todos = True + + +# -- Options for HTML output ---------------------------------------------- + +# The theme to use for HTML and HTML Help pages. See the documentation for +# a list of builtin themes. +html_theme = "alabaster" + +# Theme options are theme-specific and customize the look and feel of a theme +# further. For a list of options available for each theme, see the +# documentation. +html_theme_options = { + "description": "Google Cloud Client Libraries for Python", + "github_user": "googleapis", + "github_repo": "google-cloud-python", + "github_banner": True, + "font_family": "'Roboto', Georgia, sans", + "head_font_family": "'Roboto', Georgia, serif", + "code_font_family": "'Roboto Mono', 'Consolas', monospace", +} + +# Add any paths that contain custom themes here, relative to this directory. +# html_theme_path = [] + +# The name for this set of Sphinx documents. If None, it defaults to +# " v documentation". +# html_title = None + +# A shorter title for the navigation bar. Default is the same as html_title. +# html_short_title = None + +# The name of an image file (relative to this directory) to place at the top +# of the sidebar. +# html_logo = None + +# The name of an image file (within the static path) to use as favicon of the +# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32 +# pixels large. +# html_favicon = None + +# Add any paths that contain custom static files (such as style sheets) here, +# relative to this directory. They are copied after the builtin static files, +# so a file named "default.css" will overwrite the builtin "default.css". +html_static_path = ["_static"] + +# Add any extra paths that contain custom files (such as robots.txt or +# .htaccess) here, relative to this directory. These files are copied +# directly to the root of the documentation. +# html_extra_path = [] + +# If not '', a 'Last updated on:' timestamp is inserted at every page bottom, +# using the given strftime format. +# html_last_updated_fmt = '%b %d, %Y' + +# If true, SmartyPants will be used to convert quotes and dashes to +# typographically correct entities. +# html_use_smartypants = True + +# Custom sidebar templates, maps document names to template names. +# html_sidebars = {} + +# Additional templates that should be rendered to pages, maps page names to +# template names. +# html_additional_pages = {} + +# If false, no module index is generated. +# html_domain_indices = True + +# If false, no index is generated. +# html_use_index = True + +# If true, the index is split into individual pages for each letter. +# html_split_index = False + +# If true, links to the reST sources are added to the pages. +# html_show_sourcelink = True + +# If true, "Created using Sphinx" is shown in the HTML footer. Default is True. +# html_show_sphinx = True + +# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True. +# html_show_copyright = True + +# If true, an OpenSearch description file will be output, and all pages will +# contain a tag referring to it. The value of this option must be the +# base URL from which the finished HTML is served. +# html_use_opensearch = '' + +# This is the file name suffix for HTML files (e.g. ".xhtml"). +# html_file_suffix = None + +# Language to be used for generating the HTML full-text search index. +# Sphinx supports the following languages: +# 'da', 'de', 'en', 'es', 'fi', 'fr', 'hu', 'it', 'ja' +# 'nl', 'no', 'pt', 'ro', 'ru', 'sv', 'tr' +# html_search_language = 'en' + +# A dictionary with options for the search language support, empty by default. +# Now only 'ja' uses this config value +# html_search_options = {'type': 'default'} + +# The name of a javascript file (relative to the configuration directory) that +# implements a search results scorer. If empty, the default will be used. +# html_search_scorer = 'scorer.js' + +# Output file base name for HTML help builder. +htmlhelp_basename = "google-cloud-compute-doc" + +# -- Options for warnings ------------------------------------------------------ + + +suppress_warnings = [ + # Temporarily suppress this to avoid "more than one target found for + # cross-reference" warning, which are intractable for us to avoid while in + # a mono-repo. + # See https://github.com/sphinx-doc/sphinx/blob + # /2a65ffeef5c107c19084fabdd706cdff3f52d93c/sphinx/domains/python.py#L843 + "ref.python" +] + +# -- Options for LaTeX output --------------------------------------------- + +latex_elements = { + # The paper size ('letterpaper' or 'a4paper'). + # 'papersize': 'letterpaper', + # The font size ('10pt', '11pt' or '12pt'). + # 'pointsize': '10pt', + # Additional stuff for the LaTeX preamble. + # 'preamble': '', + # Latex figure (float) alignment + # 'figure_align': 'htbp', +} + +# Grouping the document tree into LaTeX files. List of tuples +# (source start file, target name, title, +# author, documentclass [howto, manual, or own class]). +latex_documents = [ + ( + root_doc, + "google-cloud-compute.tex", + u"google-cloud-compute Documentation", + author, + "manual", + ) +] + +# The name of an image file (relative to this directory) to place at the top of +# the title page. +# latex_logo = None + +# For "manual" documents, if this is true, then toplevel headings are parts, +# not chapters. +# latex_use_parts = False + +# If true, show page references after internal links. +# latex_show_pagerefs = False + +# If true, show URL addresses after external links. +# latex_show_urls = False + +# Documents to append as an appendix to all manuals. +# latex_appendices = [] + +# If false, no module index is generated. +# latex_domain_indices = True + + +# -- Options for manual page output --------------------------------------- + +# One entry per manual page. List of tuples +# (source start file, name, description, authors, manual section). +man_pages = [ + ( + root_doc, + "google-cloud-compute", + u"Google Cloud Compute Documentation", + [author], + 1, + ) +] + +# If true, show URL addresses after external links. +# man_show_urls = False + + +# -- Options for Texinfo output ------------------------------------------- + +# Grouping the document tree into Texinfo files. List of tuples +# (source start file, target name, title, author, +# dir menu entry, description, category) +texinfo_documents = [ + ( + root_doc, + "google-cloud-compute", + u"google-cloud-compute Documentation", + author, + "google-cloud-compute", + "GAPIC library for Google Cloud Compute API", + "APIs", + ) +] + +# Documents to append as an appendix to all manuals. +# texinfo_appendices = [] + +# If false, no module index is generated. +# texinfo_domain_indices = True + +# How to display URL addresses: 'footnote', 'no', or 'inline'. +# texinfo_show_urls = 'footnote' + +# If true, do not generate a @detailmenu in the "Top" node's menu. +# texinfo_no_detailmenu = False + + +# Example configuration for intersphinx: refer to the Python standard library. +intersphinx_mapping = { + "python": ("http://python.readthedocs.org/en/latest/", None), + "gax": ("https://gax-python.readthedocs.org/en/latest/", None), + "google-auth": ("https://google-auth.readthedocs.io/en/stable", None), + "google-gax": ("https://gax-python.readthedocs.io/en/latest/", None), + "google.api_core": ("https://googleapis.dev/python/google-api-core/latest/", None), + "grpc": ("https://grpc.io/grpc/python/", None), + "requests": ("http://requests.kennethreitz.org/en/stable/", None), + "proto": ("https://proto-plus-python.readthedocs.io/en/stable", None), + "protobuf": ("https://googleapis.dev/python/protobuf/latest/", None), +} + + +# Napoleon settings +napoleon_google_docstring = True +napoleon_numpy_docstring = True +napoleon_include_private_with_doc = False +napoleon_include_special_with_doc = True +napoleon_use_admonition_for_examples = False +napoleon_use_admonition_for_notes = False +napoleon_use_admonition_for_references = False +napoleon_use_ivar = False +napoleon_use_param = True +napoleon_use_rtype = True diff --git a/owl-bot-staging/v1/docs/index.rst b/owl-bot-staging/v1/docs/index.rst new file mode 100644 index 000000000..a84412687 --- /dev/null +++ b/owl-bot-staging/v1/docs/index.rst @@ -0,0 +1,7 @@ +API Reference +------------- +.. toctree:: + :maxdepth: 2 + + compute_v1/services + compute_v1/types diff --git a/owl-bot-staging/v1/google/cloud/compute/__init__.py b/owl-bot-staging/v1/google/cloud/compute/__init__.py new file mode 100644 index 000000000..d46ed4caf --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/compute/__init__.py @@ -0,0 +1,2931 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from google.cloud.compute import gapic_version as package_version + +__version__ = package_version.__version__ + + +from google.cloud.compute_v1.services.accelerator_types.client import AcceleratorTypesClient +from google.cloud.compute_v1.services.addresses.client import AddressesClient +from google.cloud.compute_v1.services.autoscalers.client import AutoscalersClient +from google.cloud.compute_v1.services.backend_buckets.client import BackendBucketsClient +from google.cloud.compute_v1.services.backend_services.client import BackendServicesClient +from google.cloud.compute_v1.services.disks.client import DisksClient +from google.cloud.compute_v1.services.disk_types.client import DiskTypesClient +from google.cloud.compute_v1.services.external_vpn_gateways.client import ExternalVpnGatewaysClient +from google.cloud.compute_v1.services.firewall_policies.client import FirewallPoliciesClient +from google.cloud.compute_v1.services.firewalls.client import FirewallsClient +from google.cloud.compute_v1.services.forwarding_rules.client import ForwardingRulesClient +from google.cloud.compute_v1.services.global_addresses.client import GlobalAddressesClient +from google.cloud.compute_v1.services.global_forwarding_rules.client import GlobalForwardingRulesClient +from google.cloud.compute_v1.services.global_network_endpoint_groups.client import GlobalNetworkEndpointGroupsClient +from google.cloud.compute_v1.services.global_operations.client import GlobalOperationsClient +from google.cloud.compute_v1.services.global_organization_operations.client import GlobalOrganizationOperationsClient +from google.cloud.compute_v1.services.global_public_delegated_prefixes.client import GlobalPublicDelegatedPrefixesClient +from google.cloud.compute_v1.services.health_checks.client import HealthChecksClient +from google.cloud.compute_v1.services.image_family_views.client import ImageFamilyViewsClient +from google.cloud.compute_v1.services.images.client import ImagesClient +from google.cloud.compute_v1.services.instance_group_managers.client import InstanceGroupManagersClient +from google.cloud.compute_v1.services.instance_groups.client import InstanceGroupsClient +from google.cloud.compute_v1.services.instances.client import InstancesClient +from google.cloud.compute_v1.services.instance_templates.client import InstanceTemplatesClient +from google.cloud.compute_v1.services.interconnect_attachments.client import InterconnectAttachmentsClient +from google.cloud.compute_v1.services.interconnect_locations.client import InterconnectLocationsClient +from google.cloud.compute_v1.services.interconnect_remote_locations.client import InterconnectRemoteLocationsClient +from google.cloud.compute_v1.services.interconnects.client import InterconnectsClient +from google.cloud.compute_v1.services.license_codes.client import LicenseCodesClient +from google.cloud.compute_v1.services.licenses.client import LicensesClient +from google.cloud.compute_v1.services.machine_images.client import MachineImagesClient +from google.cloud.compute_v1.services.machine_types.client import MachineTypesClient +from google.cloud.compute_v1.services.network_attachments.client import NetworkAttachmentsClient +from google.cloud.compute_v1.services.network_edge_security_services.client import NetworkEdgeSecurityServicesClient +from google.cloud.compute_v1.services.network_endpoint_groups.client import NetworkEndpointGroupsClient +from google.cloud.compute_v1.services.network_firewall_policies.client import NetworkFirewallPoliciesClient +from google.cloud.compute_v1.services.networks.client import NetworksClient +from google.cloud.compute_v1.services.node_groups.client import NodeGroupsClient +from google.cloud.compute_v1.services.node_templates.client import NodeTemplatesClient +from google.cloud.compute_v1.services.node_types.client import NodeTypesClient +from google.cloud.compute_v1.services.packet_mirrorings.client import PacketMirroringsClient +from google.cloud.compute_v1.services.projects.client import ProjectsClient +from google.cloud.compute_v1.services.public_advertised_prefixes.client import PublicAdvertisedPrefixesClient +from google.cloud.compute_v1.services.public_delegated_prefixes.client import PublicDelegatedPrefixesClient +from google.cloud.compute_v1.services.region_autoscalers.client import RegionAutoscalersClient +from google.cloud.compute_v1.services.region_backend_services.client import RegionBackendServicesClient +from google.cloud.compute_v1.services.region_commitments.client import RegionCommitmentsClient +from google.cloud.compute_v1.services.region_disks.client import RegionDisksClient +from google.cloud.compute_v1.services.region_disk_types.client import RegionDiskTypesClient +from google.cloud.compute_v1.services.region_health_checks.client import RegionHealthChecksClient +from google.cloud.compute_v1.services.region_health_check_services.client import RegionHealthCheckServicesClient +from google.cloud.compute_v1.services.region_instance_group_managers.client import RegionInstanceGroupManagersClient +from google.cloud.compute_v1.services.region_instance_groups.client import RegionInstanceGroupsClient +from google.cloud.compute_v1.services.region_instances.client import RegionInstancesClient +from google.cloud.compute_v1.services.region_instance_templates.client import RegionInstanceTemplatesClient +from google.cloud.compute_v1.services.region_network_endpoint_groups.client import RegionNetworkEndpointGroupsClient +from google.cloud.compute_v1.services.region_network_firewall_policies.client import RegionNetworkFirewallPoliciesClient +from google.cloud.compute_v1.services.region_notification_endpoints.client import RegionNotificationEndpointsClient +from google.cloud.compute_v1.services.region_operations.client import RegionOperationsClient +from google.cloud.compute_v1.services.regions.client import RegionsClient +from google.cloud.compute_v1.services.region_security_policies.client import RegionSecurityPoliciesClient +from google.cloud.compute_v1.services.region_ssl_certificates.client import RegionSslCertificatesClient +from google.cloud.compute_v1.services.region_ssl_policies.client import RegionSslPoliciesClient +from google.cloud.compute_v1.services.region_target_http_proxies.client import RegionTargetHttpProxiesClient +from google.cloud.compute_v1.services.region_target_https_proxies.client import RegionTargetHttpsProxiesClient +from google.cloud.compute_v1.services.region_target_tcp_proxies.client import RegionTargetTcpProxiesClient +from google.cloud.compute_v1.services.region_url_maps.client import RegionUrlMapsClient +from google.cloud.compute_v1.services.reservations.client import ReservationsClient +from google.cloud.compute_v1.services.resource_policies.client import ResourcePoliciesClient +from google.cloud.compute_v1.services.routers.client import RoutersClient +from google.cloud.compute_v1.services.routes.client import RoutesClient +from google.cloud.compute_v1.services.security_policies.client import SecurityPoliciesClient +from google.cloud.compute_v1.services.service_attachments.client import ServiceAttachmentsClient +from google.cloud.compute_v1.services.snapshots.client import SnapshotsClient +from google.cloud.compute_v1.services.ssl_certificates.client import SslCertificatesClient +from google.cloud.compute_v1.services.ssl_policies.client import SslPoliciesClient +from google.cloud.compute_v1.services.subnetworks.client import SubnetworksClient +from google.cloud.compute_v1.services.target_grpc_proxies.client import TargetGrpcProxiesClient +from google.cloud.compute_v1.services.target_http_proxies.client import TargetHttpProxiesClient +from google.cloud.compute_v1.services.target_https_proxies.client import TargetHttpsProxiesClient +from google.cloud.compute_v1.services.target_instances.client import TargetInstancesClient +from google.cloud.compute_v1.services.target_pools.client import TargetPoolsClient +from google.cloud.compute_v1.services.target_ssl_proxies.client import TargetSslProxiesClient +from google.cloud.compute_v1.services.target_tcp_proxies.client import TargetTcpProxiesClient +from google.cloud.compute_v1.services.target_vpn_gateways.client import TargetVpnGatewaysClient +from google.cloud.compute_v1.services.url_maps.client import UrlMapsClient +from google.cloud.compute_v1.services.vpn_gateways.client import VpnGatewaysClient +from google.cloud.compute_v1.services.vpn_tunnels.client import VpnTunnelsClient +from google.cloud.compute_v1.services.zone_operations.client import ZoneOperationsClient +from google.cloud.compute_v1.services.zones.client import ZonesClient + +from google.cloud.compute_v1.types.compute import AbandonInstancesInstanceGroupManagerRequest +from google.cloud.compute_v1.types.compute import AbandonInstancesRegionInstanceGroupManagerRequest +from google.cloud.compute_v1.types.compute import AcceleratorConfig +from google.cloud.compute_v1.types.compute import Accelerators +from google.cloud.compute_v1.types.compute import AcceleratorType +from google.cloud.compute_v1.types.compute import AcceleratorTypeAggregatedList +from google.cloud.compute_v1.types.compute import AcceleratorTypeList +from google.cloud.compute_v1.types.compute import AcceleratorTypesScopedList +from google.cloud.compute_v1.types.compute import AccessConfig +from google.cloud.compute_v1.types.compute import AddAccessConfigInstanceRequest +from google.cloud.compute_v1.types.compute import AddAssociationFirewallPolicyRequest +from google.cloud.compute_v1.types.compute import AddAssociationNetworkFirewallPolicyRequest +from google.cloud.compute_v1.types.compute import AddAssociationRegionNetworkFirewallPolicyRequest +from google.cloud.compute_v1.types.compute import AddHealthCheckTargetPoolRequest +from google.cloud.compute_v1.types.compute import AddInstancesInstanceGroupRequest +from google.cloud.compute_v1.types.compute import AddInstanceTargetPoolRequest +from google.cloud.compute_v1.types.compute import AddNodesNodeGroupRequest +from google.cloud.compute_v1.types.compute import AddPeeringNetworkRequest +from google.cloud.compute_v1.types.compute import AddResourcePoliciesDiskRequest +from google.cloud.compute_v1.types.compute import AddResourcePoliciesInstanceRequest +from google.cloud.compute_v1.types.compute import AddResourcePoliciesRegionDiskRequest +from google.cloud.compute_v1.types.compute import Address +from google.cloud.compute_v1.types.compute import AddressAggregatedList +from google.cloud.compute_v1.types.compute import AddressesScopedList +from google.cloud.compute_v1.types.compute import AddressList +from google.cloud.compute_v1.types.compute import AddRuleFirewallPolicyRequest +from google.cloud.compute_v1.types.compute import AddRuleNetworkFirewallPolicyRequest +from google.cloud.compute_v1.types.compute import AddRuleRegionNetworkFirewallPolicyRequest +from google.cloud.compute_v1.types.compute import AddRuleSecurityPolicyRequest +from google.cloud.compute_v1.types.compute import AddSignedUrlKeyBackendBucketRequest +from google.cloud.compute_v1.types.compute import AddSignedUrlKeyBackendServiceRequest +from google.cloud.compute_v1.types.compute import AdvancedMachineFeatures +from google.cloud.compute_v1.types.compute import AggregatedListAcceleratorTypesRequest +from google.cloud.compute_v1.types.compute import AggregatedListAddressesRequest +from google.cloud.compute_v1.types.compute import AggregatedListAutoscalersRequest +from google.cloud.compute_v1.types.compute import AggregatedListBackendServicesRequest +from google.cloud.compute_v1.types.compute import AggregatedListDisksRequest +from google.cloud.compute_v1.types.compute import AggregatedListDiskTypesRequest +from google.cloud.compute_v1.types.compute import AggregatedListForwardingRulesRequest +from google.cloud.compute_v1.types.compute import AggregatedListGlobalOperationsRequest +from google.cloud.compute_v1.types.compute import AggregatedListHealthChecksRequest +from google.cloud.compute_v1.types.compute import AggregatedListInstanceGroupManagersRequest +from google.cloud.compute_v1.types.compute import AggregatedListInstanceGroupsRequest +from google.cloud.compute_v1.types.compute import AggregatedListInstancesRequest +from google.cloud.compute_v1.types.compute import AggregatedListInstanceTemplatesRequest +from google.cloud.compute_v1.types.compute import AggregatedListInterconnectAttachmentsRequest +from google.cloud.compute_v1.types.compute import AggregatedListMachineTypesRequest +from google.cloud.compute_v1.types.compute import AggregatedListNetworkAttachmentsRequest +from google.cloud.compute_v1.types.compute import AggregatedListNetworkEdgeSecurityServicesRequest +from google.cloud.compute_v1.types.compute import AggregatedListNetworkEndpointGroupsRequest +from google.cloud.compute_v1.types.compute import AggregatedListNodeGroupsRequest +from google.cloud.compute_v1.types.compute import AggregatedListNodeTemplatesRequest +from google.cloud.compute_v1.types.compute import AggregatedListNodeTypesRequest +from google.cloud.compute_v1.types.compute import AggregatedListPacketMirroringsRequest +from google.cloud.compute_v1.types.compute import AggregatedListPublicDelegatedPrefixesRequest +from google.cloud.compute_v1.types.compute import AggregatedListRegionCommitmentsRequest +from google.cloud.compute_v1.types.compute import AggregatedListReservationsRequest +from google.cloud.compute_v1.types.compute import AggregatedListResourcePoliciesRequest +from google.cloud.compute_v1.types.compute import AggregatedListRoutersRequest +from google.cloud.compute_v1.types.compute import AggregatedListSecurityPoliciesRequest +from google.cloud.compute_v1.types.compute import AggregatedListServiceAttachmentsRequest +from google.cloud.compute_v1.types.compute import AggregatedListSslCertificatesRequest +from google.cloud.compute_v1.types.compute import AggregatedListSslPoliciesRequest +from google.cloud.compute_v1.types.compute import AggregatedListSubnetworksRequest +from google.cloud.compute_v1.types.compute import AggregatedListTargetHttpProxiesRequest +from google.cloud.compute_v1.types.compute import AggregatedListTargetHttpsProxiesRequest +from google.cloud.compute_v1.types.compute import AggregatedListTargetInstancesRequest +from google.cloud.compute_v1.types.compute import AggregatedListTargetPoolsRequest +from google.cloud.compute_v1.types.compute import AggregatedListTargetTcpProxiesRequest +from google.cloud.compute_v1.types.compute import AggregatedListTargetVpnGatewaysRequest +from google.cloud.compute_v1.types.compute import AggregatedListUrlMapsRequest +from google.cloud.compute_v1.types.compute import AggregatedListVpnGatewaysRequest +from google.cloud.compute_v1.types.compute import AggregatedListVpnTunnelsRequest +from google.cloud.compute_v1.types.compute import AliasIpRange +from google.cloud.compute_v1.types.compute import AllocationResourceStatus +from google.cloud.compute_v1.types.compute import AllocationResourceStatusSpecificSKUAllocation +from google.cloud.compute_v1.types.compute import AllocationSpecificSKUAllocationAllocatedInstancePropertiesReservedDisk +from google.cloud.compute_v1.types.compute import AllocationSpecificSKUAllocationReservedInstanceProperties +from google.cloud.compute_v1.types.compute import AllocationSpecificSKUReservation +from google.cloud.compute_v1.types.compute import Allowed +from google.cloud.compute_v1.types.compute import ApplyUpdatesToInstancesInstanceGroupManagerRequest +from google.cloud.compute_v1.types.compute import ApplyUpdatesToInstancesRegionInstanceGroupManagerRequest +from google.cloud.compute_v1.types.compute import AttachDiskInstanceRequest +from google.cloud.compute_v1.types.compute import AttachedDisk +from google.cloud.compute_v1.types.compute import AttachedDiskInitializeParams +from google.cloud.compute_v1.types.compute import AttachNetworkEndpointsGlobalNetworkEndpointGroupRequest +from google.cloud.compute_v1.types.compute import AttachNetworkEndpointsNetworkEndpointGroupRequest +from google.cloud.compute_v1.types.compute import AuditConfig +from google.cloud.compute_v1.types.compute import AuditLogConfig +from google.cloud.compute_v1.types.compute import AuthorizationLoggingOptions +from google.cloud.compute_v1.types.compute import Autoscaler +from google.cloud.compute_v1.types.compute import AutoscalerAggregatedList +from google.cloud.compute_v1.types.compute import AutoscalerList +from google.cloud.compute_v1.types.compute import AutoscalersScopedList +from google.cloud.compute_v1.types.compute import AutoscalerStatusDetails +from google.cloud.compute_v1.types.compute import AutoscalingPolicy +from google.cloud.compute_v1.types.compute import AutoscalingPolicyCpuUtilization +from google.cloud.compute_v1.types.compute import AutoscalingPolicyCustomMetricUtilization +from google.cloud.compute_v1.types.compute import AutoscalingPolicyLoadBalancingUtilization +from google.cloud.compute_v1.types.compute import AutoscalingPolicyScaleInControl +from google.cloud.compute_v1.types.compute import AutoscalingPolicyScalingSchedule +from google.cloud.compute_v1.types.compute import Backend +from google.cloud.compute_v1.types.compute import BackendBucket +from google.cloud.compute_v1.types.compute import BackendBucketCdnPolicy +from google.cloud.compute_v1.types.compute import BackendBucketCdnPolicyBypassCacheOnRequestHeader +from google.cloud.compute_v1.types.compute import BackendBucketCdnPolicyCacheKeyPolicy +from google.cloud.compute_v1.types.compute import BackendBucketCdnPolicyNegativeCachingPolicy +from google.cloud.compute_v1.types.compute import BackendBucketList +from google.cloud.compute_v1.types.compute import BackendService +from google.cloud.compute_v1.types.compute import BackendServiceAggregatedList +from google.cloud.compute_v1.types.compute import BackendServiceCdnPolicy +from google.cloud.compute_v1.types.compute import BackendServiceCdnPolicyBypassCacheOnRequestHeader +from google.cloud.compute_v1.types.compute import BackendServiceCdnPolicyNegativeCachingPolicy +from google.cloud.compute_v1.types.compute import BackendServiceConnectionTrackingPolicy +from google.cloud.compute_v1.types.compute import BackendServiceFailoverPolicy +from google.cloud.compute_v1.types.compute import BackendServiceGroupHealth +from google.cloud.compute_v1.types.compute import BackendServiceIAP +from google.cloud.compute_v1.types.compute import BackendServiceList +from google.cloud.compute_v1.types.compute import BackendServiceLocalityLoadBalancingPolicyConfig +from google.cloud.compute_v1.types.compute import BackendServiceLocalityLoadBalancingPolicyConfigCustomPolicy +from google.cloud.compute_v1.types.compute import BackendServiceLocalityLoadBalancingPolicyConfigPolicy +from google.cloud.compute_v1.types.compute import BackendServiceLogConfig +from google.cloud.compute_v1.types.compute import BackendServiceReference +from google.cloud.compute_v1.types.compute import BackendServicesScopedList +from google.cloud.compute_v1.types.compute import BfdPacket +from google.cloud.compute_v1.types.compute import BfdStatus +from google.cloud.compute_v1.types.compute import BfdStatusPacketCounts +from google.cloud.compute_v1.types.compute import Binding +from google.cloud.compute_v1.types.compute import BulkInsertDiskRequest +from google.cloud.compute_v1.types.compute import BulkInsertDiskResource +from google.cloud.compute_v1.types.compute import BulkInsertInstanceRequest +from google.cloud.compute_v1.types.compute import BulkInsertInstanceResource +from google.cloud.compute_v1.types.compute import BulkInsertInstanceResourcePerInstanceProperties +from google.cloud.compute_v1.types.compute import BulkInsertRegionDiskRequest +from google.cloud.compute_v1.types.compute import BulkInsertRegionInstanceRequest +from google.cloud.compute_v1.types.compute import CacheInvalidationRule +from google.cloud.compute_v1.types.compute import CacheKeyPolicy +from google.cloud.compute_v1.types.compute import CircuitBreakers +from google.cloud.compute_v1.types.compute import CloneRulesFirewallPolicyRequest +from google.cloud.compute_v1.types.compute import CloneRulesNetworkFirewallPolicyRequest +from google.cloud.compute_v1.types.compute import CloneRulesRegionNetworkFirewallPolicyRequest +from google.cloud.compute_v1.types.compute import Commitment +from google.cloud.compute_v1.types.compute import CommitmentAggregatedList +from google.cloud.compute_v1.types.compute import CommitmentList +from google.cloud.compute_v1.types.compute import CommitmentsScopedList +from google.cloud.compute_v1.types.compute import Condition +from google.cloud.compute_v1.types.compute import ConfidentialInstanceConfig +from google.cloud.compute_v1.types.compute import ConnectionDraining +from google.cloud.compute_v1.types.compute import ConsistentHashLoadBalancerSettings +from google.cloud.compute_v1.types.compute import ConsistentHashLoadBalancerSettingsHttpCookie +from google.cloud.compute_v1.types.compute import CorsPolicy +from google.cloud.compute_v1.types.compute import CreateInstancesInstanceGroupManagerRequest +from google.cloud.compute_v1.types.compute import CreateInstancesRegionInstanceGroupManagerRequest +from google.cloud.compute_v1.types.compute import CreateSnapshotDiskRequest +from google.cloud.compute_v1.types.compute import CreateSnapshotRegionDiskRequest +from google.cloud.compute_v1.types.compute import CustomerEncryptionKey +from google.cloud.compute_v1.types.compute import CustomerEncryptionKeyProtectedDisk +from google.cloud.compute_v1.types.compute import Data +from google.cloud.compute_v1.types.compute import DeleteAccessConfigInstanceRequest +from google.cloud.compute_v1.types.compute import DeleteAddressRequest +from google.cloud.compute_v1.types.compute import DeleteAutoscalerRequest +from google.cloud.compute_v1.types.compute import DeleteBackendBucketRequest +from google.cloud.compute_v1.types.compute import DeleteBackendServiceRequest +from google.cloud.compute_v1.types.compute import DeleteDiskRequest +from google.cloud.compute_v1.types.compute import DeleteExternalVpnGatewayRequest +from google.cloud.compute_v1.types.compute import DeleteFirewallPolicyRequest +from google.cloud.compute_v1.types.compute import DeleteFirewallRequest +from google.cloud.compute_v1.types.compute import DeleteForwardingRuleRequest +from google.cloud.compute_v1.types.compute import DeleteGlobalAddressRequest +from google.cloud.compute_v1.types.compute import DeleteGlobalForwardingRuleRequest +from google.cloud.compute_v1.types.compute import DeleteGlobalNetworkEndpointGroupRequest +from google.cloud.compute_v1.types.compute import DeleteGlobalOperationRequest +from google.cloud.compute_v1.types.compute import DeleteGlobalOperationResponse +from google.cloud.compute_v1.types.compute import DeleteGlobalOrganizationOperationRequest +from google.cloud.compute_v1.types.compute import DeleteGlobalOrganizationOperationResponse +from google.cloud.compute_v1.types.compute import DeleteGlobalPublicDelegatedPrefixeRequest +from google.cloud.compute_v1.types.compute import DeleteHealthCheckRequest +from google.cloud.compute_v1.types.compute import DeleteImageRequest +from google.cloud.compute_v1.types.compute import DeleteInstanceGroupManagerRequest +from google.cloud.compute_v1.types.compute import DeleteInstanceGroupRequest +from google.cloud.compute_v1.types.compute import DeleteInstanceRequest +from google.cloud.compute_v1.types.compute import DeleteInstancesInstanceGroupManagerRequest +from google.cloud.compute_v1.types.compute import DeleteInstancesRegionInstanceGroupManagerRequest +from google.cloud.compute_v1.types.compute import DeleteInstanceTemplateRequest +from google.cloud.compute_v1.types.compute import DeleteInterconnectAttachmentRequest +from google.cloud.compute_v1.types.compute import DeleteInterconnectRequest +from google.cloud.compute_v1.types.compute import DeleteLicenseRequest +from google.cloud.compute_v1.types.compute import DeleteMachineImageRequest +from google.cloud.compute_v1.types.compute import DeleteNetworkAttachmentRequest +from google.cloud.compute_v1.types.compute import DeleteNetworkEdgeSecurityServiceRequest +from google.cloud.compute_v1.types.compute import DeleteNetworkEndpointGroupRequest +from google.cloud.compute_v1.types.compute import DeleteNetworkFirewallPolicyRequest +from google.cloud.compute_v1.types.compute import DeleteNetworkRequest +from google.cloud.compute_v1.types.compute import DeleteNodeGroupRequest +from google.cloud.compute_v1.types.compute import DeleteNodesNodeGroupRequest +from google.cloud.compute_v1.types.compute import DeleteNodeTemplateRequest +from google.cloud.compute_v1.types.compute import DeletePacketMirroringRequest +from google.cloud.compute_v1.types.compute import DeletePerInstanceConfigsInstanceGroupManagerRequest +from google.cloud.compute_v1.types.compute import DeletePerInstanceConfigsRegionInstanceGroupManagerRequest +from google.cloud.compute_v1.types.compute import DeletePublicAdvertisedPrefixeRequest +from google.cloud.compute_v1.types.compute import DeletePublicDelegatedPrefixeRequest +from google.cloud.compute_v1.types.compute import DeleteRegionAutoscalerRequest +from google.cloud.compute_v1.types.compute import DeleteRegionBackendServiceRequest +from google.cloud.compute_v1.types.compute import DeleteRegionDiskRequest +from google.cloud.compute_v1.types.compute import DeleteRegionHealthCheckRequest +from google.cloud.compute_v1.types.compute import DeleteRegionHealthCheckServiceRequest +from google.cloud.compute_v1.types.compute import DeleteRegionInstanceGroupManagerRequest +from google.cloud.compute_v1.types.compute import DeleteRegionInstanceTemplateRequest +from google.cloud.compute_v1.types.compute import DeleteRegionNetworkEndpointGroupRequest +from google.cloud.compute_v1.types.compute import DeleteRegionNetworkFirewallPolicyRequest +from google.cloud.compute_v1.types.compute import DeleteRegionNotificationEndpointRequest +from google.cloud.compute_v1.types.compute import DeleteRegionOperationRequest +from google.cloud.compute_v1.types.compute import DeleteRegionOperationResponse +from google.cloud.compute_v1.types.compute import DeleteRegionSecurityPolicyRequest +from google.cloud.compute_v1.types.compute import DeleteRegionSslCertificateRequest +from google.cloud.compute_v1.types.compute import DeleteRegionSslPolicyRequest +from google.cloud.compute_v1.types.compute import DeleteRegionTargetHttpProxyRequest +from google.cloud.compute_v1.types.compute import DeleteRegionTargetHttpsProxyRequest +from google.cloud.compute_v1.types.compute import DeleteRegionTargetTcpProxyRequest +from google.cloud.compute_v1.types.compute import DeleteRegionUrlMapRequest +from google.cloud.compute_v1.types.compute import DeleteReservationRequest +from google.cloud.compute_v1.types.compute import DeleteResourcePolicyRequest +from google.cloud.compute_v1.types.compute import DeleteRouteRequest +from google.cloud.compute_v1.types.compute import DeleteRouterRequest +from google.cloud.compute_v1.types.compute import DeleteSecurityPolicyRequest +from google.cloud.compute_v1.types.compute import DeleteServiceAttachmentRequest +from google.cloud.compute_v1.types.compute import DeleteSignedUrlKeyBackendBucketRequest +from google.cloud.compute_v1.types.compute import DeleteSignedUrlKeyBackendServiceRequest +from google.cloud.compute_v1.types.compute import DeleteSnapshotRequest +from google.cloud.compute_v1.types.compute import DeleteSslCertificateRequest +from google.cloud.compute_v1.types.compute import DeleteSslPolicyRequest +from google.cloud.compute_v1.types.compute import DeleteSubnetworkRequest +from google.cloud.compute_v1.types.compute import DeleteTargetGrpcProxyRequest +from google.cloud.compute_v1.types.compute import DeleteTargetHttpProxyRequest +from google.cloud.compute_v1.types.compute import DeleteTargetHttpsProxyRequest +from google.cloud.compute_v1.types.compute import DeleteTargetInstanceRequest +from google.cloud.compute_v1.types.compute import DeleteTargetPoolRequest +from google.cloud.compute_v1.types.compute import DeleteTargetSslProxyRequest +from google.cloud.compute_v1.types.compute import DeleteTargetTcpProxyRequest +from google.cloud.compute_v1.types.compute import DeleteTargetVpnGatewayRequest +from google.cloud.compute_v1.types.compute import DeleteUrlMapRequest +from google.cloud.compute_v1.types.compute import DeleteVpnGatewayRequest +from google.cloud.compute_v1.types.compute import DeleteVpnTunnelRequest +from google.cloud.compute_v1.types.compute import DeleteZoneOperationRequest +from google.cloud.compute_v1.types.compute import DeleteZoneOperationResponse +from google.cloud.compute_v1.types.compute import Denied +from google.cloud.compute_v1.types.compute import DeprecateImageRequest +from google.cloud.compute_v1.types.compute import DeprecationStatus +from google.cloud.compute_v1.types.compute import DetachDiskInstanceRequest +from google.cloud.compute_v1.types.compute import DetachNetworkEndpointsGlobalNetworkEndpointGroupRequest +from google.cloud.compute_v1.types.compute import DetachNetworkEndpointsNetworkEndpointGroupRequest +from google.cloud.compute_v1.types.compute import DisableXpnHostProjectRequest +from google.cloud.compute_v1.types.compute import DisableXpnResourceProjectRequest +from google.cloud.compute_v1.types.compute import Disk +from google.cloud.compute_v1.types.compute import DiskAggregatedList +from google.cloud.compute_v1.types.compute import DiskAsyncReplication +from google.cloud.compute_v1.types.compute import DiskAsyncReplicationList +from google.cloud.compute_v1.types.compute import DiskInstantiationConfig +from google.cloud.compute_v1.types.compute import DiskList +from google.cloud.compute_v1.types.compute import DiskMoveRequest +from google.cloud.compute_v1.types.compute import DiskParams +from google.cloud.compute_v1.types.compute import DiskResourceStatus +from google.cloud.compute_v1.types.compute import DiskResourceStatusAsyncReplicationStatus +from google.cloud.compute_v1.types.compute import DisksAddResourcePoliciesRequest +from google.cloud.compute_v1.types.compute import DisksRemoveResourcePoliciesRequest +from google.cloud.compute_v1.types.compute import DisksResizeRequest +from google.cloud.compute_v1.types.compute import DisksScopedList +from google.cloud.compute_v1.types.compute import DisksStartAsyncReplicationRequest +from google.cloud.compute_v1.types.compute import DisksStopGroupAsyncReplicationResource +from google.cloud.compute_v1.types.compute import DiskType +from google.cloud.compute_v1.types.compute import DiskTypeAggregatedList +from google.cloud.compute_v1.types.compute import DiskTypeList +from google.cloud.compute_v1.types.compute import DiskTypesScopedList +from google.cloud.compute_v1.types.compute import DisplayDevice +from google.cloud.compute_v1.types.compute import DistributionPolicy +from google.cloud.compute_v1.types.compute import DistributionPolicyZoneConfiguration +from google.cloud.compute_v1.types.compute import Duration +from google.cloud.compute_v1.types.compute import EnableXpnHostProjectRequest +from google.cloud.compute_v1.types.compute import EnableXpnResourceProjectRequest +from google.cloud.compute_v1.types.compute import Error +from google.cloud.compute_v1.types.compute import ErrorDetails +from google.cloud.compute_v1.types.compute import ErrorInfo +from google.cloud.compute_v1.types.compute import Errors +from google.cloud.compute_v1.types.compute import ExchangedPeeringRoute +from google.cloud.compute_v1.types.compute import ExchangedPeeringRoutesList +from google.cloud.compute_v1.types.compute import ExpandIpCidrRangeSubnetworkRequest +from google.cloud.compute_v1.types.compute import Expr +from google.cloud.compute_v1.types.compute import ExternalVpnGateway +from google.cloud.compute_v1.types.compute import ExternalVpnGatewayInterface +from google.cloud.compute_v1.types.compute import ExternalVpnGatewayList +from google.cloud.compute_v1.types.compute import FileContentBuffer +from google.cloud.compute_v1.types.compute import Firewall +from google.cloud.compute_v1.types.compute import FirewallList +from google.cloud.compute_v1.types.compute import FirewallLogConfig +from google.cloud.compute_v1.types.compute import FirewallPoliciesListAssociationsResponse +from google.cloud.compute_v1.types.compute import FirewallPolicy +from google.cloud.compute_v1.types.compute import FirewallPolicyAssociation +from google.cloud.compute_v1.types.compute import FirewallPolicyList +from google.cloud.compute_v1.types.compute import FirewallPolicyRule +from google.cloud.compute_v1.types.compute import FirewallPolicyRuleMatcher +from google.cloud.compute_v1.types.compute import FirewallPolicyRuleMatcherLayer4Config +from google.cloud.compute_v1.types.compute import FirewallPolicyRuleSecureTag +from google.cloud.compute_v1.types.compute import FixedOrPercent +from google.cloud.compute_v1.types.compute import ForwardingRule +from google.cloud.compute_v1.types.compute import ForwardingRuleAggregatedList +from google.cloud.compute_v1.types.compute import ForwardingRuleList +from google.cloud.compute_v1.types.compute import ForwardingRuleReference +from google.cloud.compute_v1.types.compute import ForwardingRuleServiceDirectoryRegistration +from google.cloud.compute_v1.types.compute import ForwardingRulesScopedList +from google.cloud.compute_v1.types.compute import GetAcceleratorTypeRequest +from google.cloud.compute_v1.types.compute import GetAddressRequest +from google.cloud.compute_v1.types.compute import GetAssociationFirewallPolicyRequest +from google.cloud.compute_v1.types.compute import GetAssociationNetworkFirewallPolicyRequest +from google.cloud.compute_v1.types.compute import GetAssociationRegionNetworkFirewallPolicyRequest +from google.cloud.compute_v1.types.compute import GetAutoscalerRequest +from google.cloud.compute_v1.types.compute import GetBackendBucketRequest +from google.cloud.compute_v1.types.compute import GetBackendServiceRequest +from google.cloud.compute_v1.types.compute import GetDiagnosticsInterconnectRequest +from google.cloud.compute_v1.types.compute import GetDiskRequest +from google.cloud.compute_v1.types.compute import GetDiskTypeRequest +from google.cloud.compute_v1.types.compute import GetEffectiveFirewallsInstanceRequest +from google.cloud.compute_v1.types.compute import GetEffectiveFirewallsNetworkRequest +from google.cloud.compute_v1.types.compute import GetEffectiveFirewallsRegionNetworkFirewallPolicyRequest +from google.cloud.compute_v1.types.compute import GetExternalVpnGatewayRequest +from google.cloud.compute_v1.types.compute import GetFirewallPolicyRequest +from google.cloud.compute_v1.types.compute import GetFirewallRequest +from google.cloud.compute_v1.types.compute import GetForwardingRuleRequest +from google.cloud.compute_v1.types.compute import GetFromFamilyImageRequest +from google.cloud.compute_v1.types.compute import GetGlobalAddressRequest +from google.cloud.compute_v1.types.compute import GetGlobalForwardingRuleRequest +from google.cloud.compute_v1.types.compute import GetGlobalNetworkEndpointGroupRequest +from google.cloud.compute_v1.types.compute import GetGlobalOperationRequest +from google.cloud.compute_v1.types.compute import GetGlobalOrganizationOperationRequest +from google.cloud.compute_v1.types.compute import GetGlobalPublicDelegatedPrefixeRequest +from google.cloud.compute_v1.types.compute import GetGuestAttributesInstanceRequest +from google.cloud.compute_v1.types.compute import GetHealthBackendServiceRequest +from google.cloud.compute_v1.types.compute import GetHealthCheckRequest +from google.cloud.compute_v1.types.compute import GetHealthRegionBackendServiceRequest +from google.cloud.compute_v1.types.compute import GetHealthTargetPoolRequest +from google.cloud.compute_v1.types.compute import GetIamPolicyBackendServiceRequest +from google.cloud.compute_v1.types.compute import GetIamPolicyDiskRequest +from google.cloud.compute_v1.types.compute import GetIamPolicyFirewallPolicyRequest +from google.cloud.compute_v1.types.compute import GetIamPolicyImageRequest +from google.cloud.compute_v1.types.compute import GetIamPolicyInstanceRequest +from google.cloud.compute_v1.types.compute import GetIamPolicyInstanceTemplateRequest +from google.cloud.compute_v1.types.compute import GetIamPolicyLicenseRequest +from google.cloud.compute_v1.types.compute import GetIamPolicyMachineImageRequest +from google.cloud.compute_v1.types.compute import GetIamPolicyNetworkAttachmentRequest +from google.cloud.compute_v1.types.compute import GetIamPolicyNetworkFirewallPolicyRequest +from google.cloud.compute_v1.types.compute import GetIamPolicyNodeGroupRequest +from google.cloud.compute_v1.types.compute import GetIamPolicyNodeTemplateRequest +from google.cloud.compute_v1.types.compute import GetIamPolicyRegionBackendServiceRequest +from google.cloud.compute_v1.types.compute import GetIamPolicyRegionDiskRequest +from google.cloud.compute_v1.types.compute import GetIamPolicyRegionNetworkFirewallPolicyRequest +from google.cloud.compute_v1.types.compute import GetIamPolicyReservationRequest +from google.cloud.compute_v1.types.compute import GetIamPolicyResourcePolicyRequest +from google.cloud.compute_v1.types.compute import GetIamPolicyServiceAttachmentRequest +from google.cloud.compute_v1.types.compute import GetIamPolicySnapshotRequest +from google.cloud.compute_v1.types.compute import GetIamPolicySubnetworkRequest +from google.cloud.compute_v1.types.compute import GetImageFamilyViewRequest +from google.cloud.compute_v1.types.compute import GetImageRequest +from google.cloud.compute_v1.types.compute import GetInstanceGroupManagerRequest +from google.cloud.compute_v1.types.compute import GetInstanceGroupRequest +from google.cloud.compute_v1.types.compute import GetInstanceRequest +from google.cloud.compute_v1.types.compute import GetInstanceTemplateRequest +from google.cloud.compute_v1.types.compute import GetInterconnectAttachmentRequest +from google.cloud.compute_v1.types.compute import GetInterconnectLocationRequest +from google.cloud.compute_v1.types.compute import GetInterconnectRemoteLocationRequest +from google.cloud.compute_v1.types.compute import GetInterconnectRequest +from google.cloud.compute_v1.types.compute import GetLicenseCodeRequest +from google.cloud.compute_v1.types.compute import GetLicenseRequest +from google.cloud.compute_v1.types.compute import GetMachineImageRequest +from google.cloud.compute_v1.types.compute import GetMachineTypeRequest +from google.cloud.compute_v1.types.compute import GetNatMappingInfoRoutersRequest +from google.cloud.compute_v1.types.compute import GetNetworkAttachmentRequest +from google.cloud.compute_v1.types.compute import GetNetworkEdgeSecurityServiceRequest +from google.cloud.compute_v1.types.compute import GetNetworkEndpointGroupRequest +from google.cloud.compute_v1.types.compute import GetNetworkFirewallPolicyRequest +from google.cloud.compute_v1.types.compute import GetNetworkRequest +from google.cloud.compute_v1.types.compute import GetNodeGroupRequest +from google.cloud.compute_v1.types.compute import GetNodeTemplateRequest +from google.cloud.compute_v1.types.compute import GetNodeTypeRequest +from google.cloud.compute_v1.types.compute import GetPacketMirroringRequest +from google.cloud.compute_v1.types.compute import GetProjectRequest +from google.cloud.compute_v1.types.compute import GetPublicAdvertisedPrefixeRequest +from google.cloud.compute_v1.types.compute import GetPublicDelegatedPrefixeRequest +from google.cloud.compute_v1.types.compute import GetRegionAutoscalerRequest +from google.cloud.compute_v1.types.compute import GetRegionBackendServiceRequest +from google.cloud.compute_v1.types.compute import GetRegionCommitmentRequest +from google.cloud.compute_v1.types.compute import GetRegionDiskRequest +from google.cloud.compute_v1.types.compute import GetRegionDiskTypeRequest +from google.cloud.compute_v1.types.compute import GetRegionHealthCheckRequest +from google.cloud.compute_v1.types.compute import GetRegionHealthCheckServiceRequest +from google.cloud.compute_v1.types.compute import GetRegionInstanceGroupManagerRequest +from google.cloud.compute_v1.types.compute import GetRegionInstanceGroupRequest +from google.cloud.compute_v1.types.compute import GetRegionInstanceTemplateRequest +from google.cloud.compute_v1.types.compute import GetRegionNetworkEndpointGroupRequest +from google.cloud.compute_v1.types.compute import GetRegionNetworkFirewallPolicyRequest +from google.cloud.compute_v1.types.compute import GetRegionNotificationEndpointRequest +from google.cloud.compute_v1.types.compute import GetRegionOperationRequest +from google.cloud.compute_v1.types.compute import GetRegionRequest +from google.cloud.compute_v1.types.compute import GetRegionSecurityPolicyRequest +from google.cloud.compute_v1.types.compute import GetRegionSslCertificateRequest +from google.cloud.compute_v1.types.compute import GetRegionSslPolicyRequest +from google.cloud.compute_v1.types.compute import GetRegionTargetHttpProxyRequest +from google.cloud.compute_v1.types.compute import GetRegionTargetHttpsProxyRequest +from google.cloud.compute_v1.types.compute import GetRegionTargetTcpProxyRequest +from google.cloud.compute_v1.types.compute import GetRegionUrlMapRequest +from google.cloud.compute_v1.types.compute import GetReservationRequest +from google.cloud.compute_v1.types.compute import GetResourcePolicyRequest +from google.cloud.compute_v1.types.compute import GetRouteRequest +from google.cloud.compute_v1.types.compute import GetRouterRequest +from google.cloud.compute_v1.types.compute import GetRouterStatusRouterRequest +from google.cloud.compute_v1.types.compute import GetRuleFirewallPolicyRequest +from google.cloud.compute_v1.types.compute import GetRuleNetworkFirewallPolicyRequest +from google.cloud.compute_v1.types.compute import GetRuleRegionNetworkFirewallPolicyRequest +from google.cloud.compute_v1.types.compute import GetRuleSecurityPolicyRequest +from google.cloud.compute_v1.types.compute import GetScreenshotInstanceRequest +from google.cloud.compute_v1.types.compute import GetSecurityPolicyRequest +from google.cloud.compute_v1.types.compute import GetSerialPortOutputInstanceRequest +from google.cloud.compute_v1.types.compute import GetServiceAttachmentRequest +from google.cloud.compute_v1.types.compute import GetShieldedInstanceIdentityInstanceRequest +from google.cloud.compute_v1.types.compute import GetSnapshotRequest +from google.cloud.compute_v1.types.compute import GetSslCertificateRequest +from google.cloud.compute_v1.types.compute import GetSslPolicyRequest +from google.cloud.compute_v1.types.compute import GetStatusVpnGatewayRequest +from google.cloud.compute_v1.types.compute import GetSubnetworkRequest +from google.cloud.compute_v1.types.compute import GetTargetGrpcProxyRequest +from google.cloud.compute_v1.types.compute import GetTargetHttpProxyRequest +from google.cloud.compute_v1.types.compute import GetTargetHttpsProxyRequest +from google.cloud.compute_v1.types.compute import GetTargetInstanceRequest +from google.cloud.compute_v1.types.compute import GetTargetPoolRequest +from google.cloud.compute_v1.types.compute import GetTargetSslProxyRequest +from google.cloud.compute_v1.types.compute import GetTargetTcpProxyRequest +from google.cloud.compute_v1.types.compute import GetTargetVpnGatewayRequest +from google.cloud.compute_v1.types.compute import GetUrlMapRequest +from google.cloud.compute_v1.types.compute import GetVpnGatewayRequest +from google.cloud.compute_v1.types.compute import GetVpnTunnelRequest +from google.cloud.compute_v1.types.compute import GetXpnHostProjectRequest +from google.cloud.compute_v1.types.compute import GetXpnResourcesProjectsRequest +from google.cloud.compute_v1.types.compute import GetZoneOperationRequest +from google.cloud.compute_v1.types.compute import GetZoneRequest +from google.cloud.compute_v1.types.compute import GlobalAddressesMoveRequest +from google.cloud.compute_v1.types.compute import GlobalNetworkEndpointGroupsAttachEndpointsRequest +from google.cloud.compute_v1.types.compute import GlobalNetworkEndpointGroupsDetachEndpointsRequest +from google.cloud.compute_v1.types.compute import GlobalOrganizationSetPolicyRequest +from google.cloud.compute_v1.types.compute import GlobalSetLabelsRequest +from google.cloud.compute_v1.types.compute import GlobalSetPolicyRequest +from google.cloud.compute_v1.types.compute import GRPCHealthCheck +from google.cloud.compute_v1.types.compute import GuestAttributes +from google.cloud.compute_v1.types.compute import GuestAttributesEntry +from google.cloud.compute_v1.types.compute import GuestAttributesValue +from google.cloud.compute_v1.types.compute import GuestOsFeature +from google.cloud.compute_v1.types.compute import HealthCheck +from google.cloud.compute_v1.types.compute import HealthCheckList +from google.cloud.compute_v1.types.compute import HealthCheckLogConfig +from google.cloud.compute_v1.types.compute import HealthCheckReference +from google.cloud.compute_v1.types.compute import HealthChecksAggregatedList +from google.cloud.compute_v1.types.compute import HealthCheckService +from google.cloud.compute_v1.types.compute import HealthCheckServiceReference +from google.cloud.compute_v1.types.compute import HealthCheckServicesList +from google.cloud.compute_v1.types.compute import HealthChecksScopedList +from google.cloud.compute_v1.types.compute import HealthStatus +from google.cloud.compute_v1.types.compute import HealthStatusForNetworkEndpoint +from google.cloud.compute_v1.types.compute import Help +from google.cloud.compute_v1.types.compute import HelpLink +from google.cloud.compute_v1.types.compute import HostRule +from google.cloud.compute_v1.types.compute import HTTP2HealthCheck +from google.cloud.compute_v1.types.compute import HttpFaultAbort +from google.cloud.compute_v1.types.compute import HttpFaultDelay +from google.cloud.compute_v1.types.compute import HttpFaultInjection +from google.cloud.compute_v1.types.compute import HttpHeaderAction +from google.cloud.compute_v1.types.compute import HttpHeaderMatch +from google.cloud.compute_v1.types.compute import HttpHeaderOption +from google.cloud.compute_v1.types.compute import HTTPHealthCheck +from google.cloud.compute_v1.types.compute import HttpQueryParameterMatch +from google.cloud.compute_v1.types.compute import HttpRedirectAction +from google.cloud.compute_v1.types.compute import HttpRetryPolicy +from google.cloud.compute_v1.types.compute import HttpRouteAction +from google.cloud.compute_v1.types.compute import HttpRouteRule +from google.cloud.compute_v1.types.compute import HttpRouteRuleMatch +from google.cloud.compute_v1.types.compute import HTTPSHealthCheck +from google.cloud.compute_v1.types.compute import Image +from google.cloud.compute_v1.types.compute import ImageFamilyView +from google.cloud.compute_v1.types.compute import ImageList +from google.cloud.compute_v1.types.compute import InitialStateConfig +from google.cloud.compute_v1.types.compute import InsertAddressRequest +from google.cloud.compute_v1.types.compute import InsertAutoscalerRequest +from google.cloud.compute_v1.types.compute import InsertBackendBucketRequest +from google.cloud.compute_v1.types.compute import InsertBackendServiceRequest +from google.cloud.compute_v1.types.compute import InsertDiskRequest +from google.cloud.compute_v1.types.compute import InsertExternalVpnGatewayRequest +from google.cloud.compute_v1.types.compute import InsertFirewallPolicyRequest +from google.cloud.compute_v1.types.compute import InsertFirewallRequest +from google.cloud.compute_v1.types.compute import InsertForwardingRuleRequest +from google.cloud.compute_v1.types.compute import InsertGlobalAddressRequest +from google.cloud.compute_v1.types.compute import InsertGlobalForwardingRuleRequest +from google.cloud.compute_v1.types.compute import InsertGlobalNetworkEndpointGroupRequest +from google.cloud.compute_v1.types.compute import InsertGlobalPublicDelegatedPrefixeRequest +from google.cloud.compute_v1.types.compute import InsertHealthCheckRequest +from google.cloud.compute_v1.types.compute import InsertImageRequest +from google.cloud.compute_v1.types.compute import InsertInstanceGroupManagerRequest +from google.cloud.compute_v1.types.compute import InsertInstanceGroupRequest +from google.cloud.compute_v1.types.compute import InsertInstanceRequest +from google.cloud.compute_v1.types.compute import InsertInstanceTemplateRequest +from google.cloud.compute_v1.types.compute import InsertInterconnectAttachmentRequest +from google.cloud.compute_v1.types.compute import InsertInterconnectRequest +from google.cloud.compute_v1.types.compute import InsertLicenseRequest +from google.cloud.compute_v1.types.compute import InsertMachineImageRequest +from google.cloud.compute_v1.types.compute import InsertNetworkAttachmentRequest +from google.cloud.compute_v1.types.compute import InsertNetworkEdgeSecurityServiceRequest +from google.cloud.compute_v1.types.compute import InsertNetworkEndpointGroupRequest +from google.cloud.compute_v1.types.compute import InsertNetworkFirewallPolicyRequest +from google.cloud.compute_v1.types.compute import InsertNetworkRequest +from google.cloud.compute_v1.types.compute import InsertNodeGroupRequest +from google.cloud.compute_v1.types.compute import InsertNodeTemplateRequest +from google.cloud.compute_v1.types.compute import InsertPacketMirroringRequest +from google.cloud.compute_v1.types.compute import InsertPublicAdvertisedPrefixeRequest +from google.cloud.compute_v1.types.compute import InsertPublicDelegatedPrefixeRequest +from google.cloud.compute_v1.types.compute import InsertRegionAutoscalerRequest +from google.cloud.compute_v1.types.compute import InsertRegionBackendServiceRequest +from google.cloud.compute_v1.types.compute import InsertRegionCommitmentRequest +from google.cloud.compute_v1.types.compute import InsertRegionDiskRequest +from google.cloud.compute_v1.types.compute import InsertRegionHealthCheckRequest +from google.cloud.compute_v1.types.compute import InsertRegionHealthCheckServiceRequest +from google.cloud.compute_v1.types.compute import InsertRegionInstanceGroupManagerRequest +from google.cloud.compute_v1.types.compute import InsertRegionInstanceTemplateRequest +from google.cloud.compute_v1.types.compute import InsertRegionNetworkEndpointGroupRequest +from google.cloud.compute_v1.types.compute import InsertRegionNetworkFirewallPolicyRequest +from google.cloud.compute_v1.types.compute import InsertRegionNotificationEndpointRequest +from google.cloud.compute_v1.types.compute import InsertRegionSecurityPolicyRequest +from google.cloud.compute_v1.types.compute import InsertRegionSslCertificateRequest +from google.cloud.compute_v1.types.compute import InsertRegionSslPolicyRequest +from google.cloud.compute_v1.types.compute import InsertRegionTargetHttpProxyRequest +from google.cloud.compute_v1.types.compute import InsertRegionTargetHttpsProxyRequest +from google.cloud.compute_v1.types.compute import InsertRegionTargetTcpProxyRequest +from google.cloud.compute_v1.types.compute import InsertRegionUrlMapRequest +from google.cloud.compute_v1.types.compute import InsertReservationRequest +from google.cloud.compute_v1.types.compute import InsertResourcePolicyRequest +from google.cloud.compute_v1.types.compute import InsertRouteRequest +from google.cloud.compute_v1.types.compute import InsertRouterRequest +from google.cloud.compute_v1.types.compute import InsertSecurityPolicyRequest +from google.cloud.compute_v1.types.compute import InsertServiceAttachmentRequest +from google.cloud.compute_v1.types.compute import InsertSnapshotRequest +from google.cloud.compute_v1.types.compute import InsertSslCertificateRequest +from google.cloud.compute_v1.types.compute import InsertSslPolicyRequest +from google.cloud.compute_v1.types.compute import InsertSubnetworkRequest +from google.cloud.compute_v1.types.compute import InsertTargetGrpcProxyRequest +from google.cloud.compute_v1.types.compute import InsertTargetHttpProxyRequest +from google.cloud.compute_v1.types.compute import InsertTargetHttpsProxyRequest +from google.cloud.compute_v1.types.compute import InsertTargetInstanceRequest +from google.cloud.compute_v1.types.compute import InsertTargetPoolRequest +from google.cloud.compute_v1.types.compute import InsertTargetSslProxyRequest +from google.cloud.compute_v1.types.compute import InsertTargetTcpProxyRequest +from google.cloud.compute_v1.types.compute import InsertTargetVpnGatewayRequest +from google.cloud.compute_v1.types.compute import InsertUrlMapRequest +from google.cloud.compute_v1.types.compute import InsertVpnGatewayRequest +from google.cloud.compute_v1.types.compute import InsertVpnTunnelRequest +from google.cloud.compute_v1.types.compute import Instance +from google.cloud.compute_v1.types.compute import InstanceAggregatedList +from google.cloud.compute_v1.types.compute import InstanceConsumptionData +from google.cloud.compute_v1.types.compute import InstanceConsumptionInfo +from google.cloud.compute_v1.types.compute import InstanceGroup +from google.cloud.compute_v1.types.compute import InstanceGroupAggregatedList +from google.cloud.compute_v1.types.compute import InstanceGroupList +from google.cloud.compute_v1.types.compute import InstanceGroupManager +from google.cloud.compute_v1.types.compute import InstanceGroupManagerActionsSummary +from google.cloud.compute_v1.types.compute import InstanceGroupManagerAggregatedList +from google.cloud.compute_v1.types.compute import InstanceGroupManagerAutoHealingPolicy +from google.cloud.compute_v1.types.compute import InstanceGroupManagerInstanceLifecyclePolicy +from google.cloud.compute_v1.types.compute import InstanceGroupManagerList +from google.cloud.compute_v1.types.compute import InstanceGroupManagersAbandonInstancesRequest +from google.cloud.compute_v1.types.compute import InstanceGroupManagersApplyUpdatesRequest +from google.cloud.compute_v1.types.compute import InstanceGroupManagersCreateInstancesRequest +from google.cloud.compute_v1.types.compute import InstanceGroupManagersDeleteInstancesRequest +from google.cloud.compute_v1.types.compute import InstanceGroupManagersDeletePerInstanceConfigsReq +from google.cloud.compute_v1.types.compute import InstanceGroupManagersListErrorsResponse +from google.cloud.compute_v1.types.compute import InstanceGroupManagersListManagedInstancesResponse +from google.cloud.compute_v1.types.compute import InstanceGroupManagersListPerInstanceConfigsResp +from google.cloud.compute_v1.types.compute import InstanceGroupManagersPatchPerInstanceConfigsReq +from google.cloud.compute_v1.types.compute import InstanceGroupManagersRecreateInstancesRequest +from google.cloud.compute_v1.types.compute import InstanceGroupManagersScopedList +from google.cloud.compute_v1.types.compute import InstanceGroupManagersSetInstanceTemplateRequest +from google.cloud.compute_v1.types.compute import InstanceGroupManagersSetTargetPoolsRequest +from google.cloud.compute_v1.types.compute import InstanceGroupManagerStatus +from google.cloud.compute_v1.types.compute import InstanceGroupManagerStatusStateful +from google.cloud.compute_v1.types.compute import InstanceGroupManagerStatusStatefulPerInstanceConfigs +from google.cloud.compute_v1.types.compute import InstanceGroupManagerStatusVersionTarget +from google.cloud.compute_v1.types.compute import InstanceGroupManagersUpdatePerInstanceConfigsReq +from google.cloud.compute_v1.types.compute import InstanceGroupManagerUpdatePolicy +from google.cloud.compute_v1.types.compute import InstanceGroupManagerVersion +from google.cloud.compute_v1.types.compute import InstanceGroupsAddInstancesRequest +from google.cloud.compute_v1.types.compute import InstanceGroupsListInstances +from google.cloud.compute_v1.types.compute import InstanceGroupsListInstancesRequest +from google.cloud.compute_v1.types.compute import InstanceGroupsRemoveInstancesRequest +from google.cloud.compute_v1.types.compute import InstanceGroupsScopedList +from google.cloud.compute_v1.types.compute import InstanceGroupsSetNamedPortsRequest +from google.cloud.compute_v1.types.compute import InstanceList +from google.cloud.compute_v1.types.compute import InstanceListReferrers +from google.cloud.compute_v1.types.compute import InstanceManagedByIgmError +from google.cloud.compute_v1.types.compute import InstanceManagedByIgmErrorInstanceActionDetails +from google.cloud.compute_v1.types.compute import InstanceManagedByIgmErrorManagedInstanceError +from google.cloud.compute_v1.types.compute import InstanceMoveRequest +from google.cloud.compute_v1.types.compute import InstanceParams +from google.cloud.compute_v1.types.compute import InstanceProperties +from google.cloud.compute_v1.types.compute import InstanceReference +from google.cloud.compute_v1.types.compute import InstancesAddResourcePoliciesRequest +from google.cloud.compute_v1.types.compute import InstancesGetEffectiveFirewallsResponse +from google.cloud.compute_v1.types.compute import InstancesGetEffectiveFirewallsResponseEffectiveFirewallPolicy +from google.cloud.compute_v1.types.compute import InstancesRemoveResourcePoliciesRequest +from google.cloud.compute_v1.types.compute import InstancesScopedList +from google.cloud.compute_v1.types.compute import InstancesSetLabelsRequest +from google.cloud.compute_v1.types.compute import InstancesSetMachineResourcesRequest +from google.cloud.compute_v1.types.compute import InstancesSetMachineTypeRequest +from google.cloud.compute_v1.types.compute import InstancesSetMinCpuPlatformRequest +from google.cloud.compute_v1.types.compute import InstancesSetNameRequest +from google.cloud.compute_v1.types.compute import InstancesSetServiceAccountRequest +from google.cloud.compute_v1.types.compute import InstancesStartWithEncryptionKeyRequest +from google.cloud.compute_v1.types.compute import InstanceTemplate +from google.cloud.compute_v1.types.compute import InstanceTemplateAggregatedList +from google.cloud.compute_v1.types.compute import InstanceTemplateList +from google.cloud.compute_v1.types.compute import InstanceTemplatesScopedList +from google.cloud.compute_v1.types.compute import InstanceWithNamedPorts +from google.cloud.compute_v1.types.compute import Int64RangeMatch +from google.cloud.compute_v1.types.compute import Interconnect +from google.cloud.compute_v1.types.compute import InterconnectAttachment +from google.cloud.compute_v1.types.compute import InterconnectAttachmentAggregatedList +from google.cloud.compute_v1.types.compute import InterconnectAttachmentConfigurationConstraints +from google.cloud.compute_v1.types.compute import InterconnectAttachmentConfigurationConstraintsBgpPeerASNRange +from google.cloud.compute_v1.types.compute import InterconnectAttachmentList +from google.cloud.compute_v1.types.compute import InterconnectAttachmentPartnerMetadata +from google.cloud.compute_v1.types.compute import InterconnectAttachmentPrivateInfo +from google.cloud.compute_v1.types.compute import InterconnectAttachmentsScopedList +from google.cloud.compute_v1.types.compute import InterconnectCircuitInfo +from google.cloud.compute_v1.types.compute import InterconnectDiagnostics +from google.cloud.compute_v1.types.compute import InterconnectDiagnosticsARPEntry +from google.cloud.compute_v1.types.compute import InterconnectDiagnosticsLinkLACPStatus +from google.cloud.compute_v1.types.compute import InterconnectDiagnosticsLinkOpticalPower +from google.cloud.compute_v1.types.compute import InterconnectDiagnosticsLinkStatus +from google.cloud.compute_v1.types.compute import InterconnectList +from google.cloud.compute_v1.types.compute import InterconnectLocation +from google.cloud.compute_v1.types.compute import InterconnectLocationList +from google.cloud.compute_v1.types.compute import InterconnectLocationRegionInfo +from google.cloud.compute_v1.types.compute import InterconnectOutageNotification +from google.cloud.compute_v1.types.compute import InterconnectRemoteLocation +from google.cloud.compute_v1.types.compute import InterconnectRemoteLocationConstraints +from google.cloud.compute_v1.types.compute import InterconnectRemoteLocationConstraintsSubnetLengthRange +from google.cloud.compute_v1.types.compute import InterconnectRemoteLocationList +from google.cloud.compute_v1.types.compute import InterconnectRemoteLocationPermittedConnections +from google.cloud.compute_v1.types.compute import InterconnectsGetDiagnosticsResponse +from google.cloud.compute_v1.types.compute import InvalidateCacheUrlMapRequest +from google.cloud.compute_v1.types.compute import Items +from google.cloud.compute_v1.types.compute import License +from google.cloud.compute_v1.types.compute import LicenseCode +from google.cloud.compute_v1.types.compute import LicenseCodeLicenseAlias +from google.cloud.compute_v1.types.compute import LicenseResourceCommitment +from google.cloud.compute_v1.types.compute import LicenseResourceRequirements +from google.cloud.compute_v1.types.compute import LicensesListResponse +from google.cloud.compute_v1.types.compute import ListAcceleratorTypesRequest +from google.cloud.compute_v1.types.compute import ListAddressesRequest +from google.cloud.compute_v1.types.compute import ListAssociationsFirewallPolicyRequest +from google.cloud.compute_v1.types.compute import ListAutoscalersRequest +from google.cloud.compute_v1.types.compute import ListAvailableFeaturesRegionSslPoliciesRequest +from google.cloud.compute_v1.types.compute import ListAvailableFeaturesSslPoliciesRequest +from google.cloud.compute_v1.types.compute import ListBackendBucketsRequest +from google.cloud.compute_v1.types.compute import ListBackendServicesRequest +from google.cloud.compute_v1.types.compute import ListDisksRequest +from google.cloud.compute_v1.types.compute import ListDiskTypesRequest +from google.cloud.compute_v1.types.compute import ListErrorsInstanceGroupManagersRequest +from google.cloud.compute_v1.types.compute import ListErrorsRegionInstanceGroupManagersRequest +from google.cloud.compute_v1.types.compute import ListExternalVpnGatewaysRequest +from google.cloud.compute_v1.types.compute import ListFirewallPoliciesRequest +from google.cloud.compute_v1.types.compute import ListFirewallsRequest +from google.cloud.compute_v1.types.compute import ListForwardingRulesRequest +from google.cloud.compute_v1.types.compute import ListGlobalAddressesRequest +from google.cloud.compute_v1.types.compute import ListGlobalForwardingRulesRequest +from google.cloud.compute_v1.types.compute import ListGlobalNetworkEndpointGroupsRequest +from google.cloud.compute_v1.types.compute import ListGlobalOperationsRequest +from google.cloud.compute_v1.types.compute import ListGlobalOrganizationOperationsRequest +from google.cloud.compute_v1.types.compute import ListGlobalPublicDelegatedPrefixesRequest +from google.cloud.compute_v1.types.compute import ListHealthChecksRequest +from google.cloud.compute_v1.types.compute import ListImagesRequest +from google.cloud.compute_v1.types.compute import ListInstanceGroupManagersRequest +from google.cloud.compute_v1.types.compute import ListInstanceGroupsRequest +from google.cloud.compute_v1.types.compute import ListInstancesInstanceGroupsRequest +from google.cloud.compute_v1.types.compute import ListInstancesRegionInstanceGroupsRequest +from google.cloud.compute_v1.types.compute import ListInstancesRequest +from google.cloud.compute_v1.types.compute import ListInstanceTemplatesRequest +from google.cloud.compute_v1.types.compute import ListInterconnectAttachmentsRequest +from google.cloud.compute_v1.types.compute import ListInterconnectLocationsRequest +from google.cloud.compute_v1.types.compute import ListInterconnectRemoteLocationsRequest +from google.cloud.compute_v1.types.compute import ListInterconnectsRequest +from google.cloud.compute_v1.types.compute import ListLicensesRequest +from google.cloud.compute_v1.types.compute import ListMachineImagesRequest +from google.cloud.compute_v1.types.compute import ListMachineTypesRequest +from google.cloud.compute_v1.types.compute import ListManagedInstancesInstanceGroupManagersRequest +from google.cloud.compute_v1.types.compute import ListManagedInstancesRegionInstanceGroupManagersRequest +from google.cloud.compute_v1.types.compute import ListNetworkAttachmentsRequest +from google.cloud.compute_v1.types.compute import ListNetworkEndpointGroupsRequest +from google.cloud.compute_v1.types.compute import ListNetworkEndpointsGlobalNetworkEndpointGroupsRequest +from google.cloud.compute_v1.types.compute import ListNetworkEndpointsNetworkEndpointGroupsRequest +from google.cloud.compute_v1.types.compute import ListNetworkFirewallPoliciesRequest +from google.cloud.compute_v1.types.compute import ListNetworksRequest +from google.cloud.compute_v1.types.compute import ListNodeGroupsRequest +from google.cloud.compute_v1.types.compute import ListNodesNodeGroupsRequest +from google.cloud.compute_v1.types.compute import ListNodeTemplatesRequest +from google.cloud.compute_v1.types.compute import ListNodeTypesRequest +from google.cloud.compute_v1.types.compute import ListPacketMirroringsRequest +from google.cloud.compute_v1.types.compute import ListPeeringRoutesNetworksRequest +from google.cloud.compute_v1.types.compute import ListPerInstanceConfigsInstanceGroupManagersRequest +from google.cloud.compute_v1.types.compute import ListPerInstanceConfigsRegionInstanceGroupManagersRequest +from google.cloud.compute_v1.types.compute import ListPreconfiguredExpressionSetsSecurityPoliciesRequest +from google.cloud.compute_v1.types.compute import ListPublicAdvertisedPrefixesRequest +from google.cloud.compute_v1.types.compute import ListPublicDelegatedPrefixesRequest +from google.cloud.compute_v1.types.compute import ListReferrersInstancesRequest +from google.cloud.compute_v1.types.compute import ListRegionAutoscalersRequest +from google.cloud.compute_v1.types.compute import ListRegionBackendServicesRequest +from google.cloud.compute_v1.types.compute import ListRegionCommitmentsRequest +from google.cloud.compute_v1.types.compute import ListRegionDisksRequest +from google.cloud.compute_v1.types.compute import ListRegionDiskTypesRequest +from google.cloud.compute_v1.types.compute import ListRegionHealthCheckServicesRequest +from google.cloud.compute_v1.types.compute import ListRegionHealthChecksRequest +from google.cloud.compute_v1.types.compute import ListRegionInstanceGroupManagersRequest +from google.cloud.compute_v1.types.compute import ListRegionInstanceGroupsRequest +from google.cloud.compute_v1.types.compute import ListRegionInstanceTemplatesRequest +from google.cloud.compute_v1.types.compute import ListRegionNetworkEndpointGroupsRequest +from google.cloud.compute_v1.types.compute import ListRegionNetworkFirewallPoliciesRequest +from google.cloud.compute_v1.types.compute import ListRegionNotificationEndpointsRequest +from google.cloud.compute_v1.types.compute import ListRegionOperationsRequest +from google.cloud.compute_v1.types.compute import ListRegionSecurityPoliciesRequest +from google.cloud.compute_v1.types.compute import ListRegionsRequest +from google.cloud.compute_v1.types.compute import ListRegionSslCertificatesRequest +from google.cloud.compute_v1.types.compute import ListRegionSslPoliciesRequest +from google.cloud.compute_v1.types.compute import ListRegionTargetHttpProxiesRequest +from google.cloud.compute_v1.types.compute import ListRegionTargetHttpsProxiesRequest +from google.cloud.compute_v1.types.compute import ListRegionTargetTcpProxiesRequest +from google.cloud.compute_v1.types.compute import ListRegionUrlMapsRequest +from google.cloud.compute_v1.types.compute import ListReservationsRequest +from google.cloud.compute_v1.types.compute import ListResourcePoliciesRequest +from google.cloud.compute_v1.types.compute import ListRoutersRequest +from google.cloud.compute_v1.types.compute import ListRoutesRequest +from google.cloud.compute_v1.types.compute import ListSecurityPoliciesRequest +from google.cloud.compute_v1.types.compute import ListServiceAttachmentsRequest +from google.cloud.compute_v1.types.compute import ListSnapshotsRequest +from google.cloud.compute_v1.types.compute import ListSslCertificatesRequest +from google.cloud.compute_v1.types.compute import ListSslPoliciesRequest +from google.cloud.compute_v1.types.compute import ListSubnetworksRequest +from google.cloud.compute_v1.types.compute import ListTargetGrpcProxiesRequest +from google.cloud.compute_v1.types.compute import ListTargetHttpProxiesRequest +from google.cloud.compute_v1.types.compute import ListTargetHttpsProxiesRequest +from google.cloud.compute_v1.types.compute import ListTargetInstancesRequest +from google.cloud.compute_v1.types.compute import ListTargetPoolsRequest +from google.cloud.compute_v1.types.compute import ListTargetSslProxiesRequest +from google.cloud.compute_v1.types.compute import ListTargetTcpProxiesRequest +from google.cloud.compute_v1.types.compute import ListTargetVpnGatewaysRequest +from google.cloud.compute_v1.types.compute import ListUrlMapsRequest +from google.cloud.compute_v1.types.compute import ListUsableSubnetworksRequest +from google.cloud.compute_v1.types.compute import ListVpnGatewaysRequest +from google.cloud.compute_v1.types.compute import ListVpnTunnelsRequest +from google.cloud.compute_v1.types.compute import ListXpnHostsProjectsRequest +from google.cloud.compute_v1.types.compute import ListZoneOperationsRequest +from google.cloud.compute_v1.types.compute import ListZonesRequest +from google.cloud.compute_v1.types.compute import LocalDisk +from google.cloud.compute_v1.types.compute import LocalizedMessage +from google.cloud.compute_v1.types.compute import LocationPolicy +from google.cloud.compute_v1.types.compute import LocationPolicyLocation +from google.cloud.compute_v1.types.compute import LocationPolicyLocationConstraints +from google.cloud.compute_v1.types.compute import LogConfig +from google.cloud.compute_v1.types.compute import LogConfigCloudAuditOptions +from google.cloud.compute_v1.types.compute import LogConfigCounterOptions +from google.cloud.compute_v1.types.compute import LogConfigCounterOptionsCustomField +from google.cloud.compute_v1.types.compute import LogConfigDataAccessOptions +from google.cloud.compute_v1.types.compute import MachineImage +from google.cloud.compute_v1.types.compute import MachineImageList +from google.cloud.compute_v1.types.compute import MachineType +from google.cloud.compute_v1.types.compute import MachineTypeAggregatedList +from google.cloud.compute_v1.types.compute import MachineTypeList +from google.cloud.compute_v1.types.compute import MachineTypesScopedList +from google.cloud.compute_v1.types.compute import ManagedInstance +from google.cloud.compute_v1.types.compute import ManagedInstanceInstanceHealth +from google.cloud.compute_v1.types.compute import ManagedInstanceLastAttempt +from google.cloud.compute_v1.types.compute import ManagedInstanceVersion +from google.cloud.compute_v1.types.compute import Metadata +from google.cloud.compute_v1.types.compute import MetadataFilter +from google.cloud.compute_v1.types.compute import MetadataFilterLabelMatch +from google.cloud.compute_v1.types.compute import MoveAddressRequest +from google.cloud.compute_v1.types.compute import MoveDiskProjectRequest +from google.cloud.compute_v1.types.compute import MoveFirewallPolicyRequest +from google.cloud.compute_v1.types.compute import MoveGlobalAddressRequest +from google.cloud.compute_v1.types.compute import MoveInstanceProjectRequest +from google.cloud.compute_v1.types.compute import NamedPort +from google.cloud.compute_v1.types.compute import Network +from google.cloud.compute_v1.types.compute import NetworkAttachment +from google.cloud.compute_v1.types.compute import NetworkAttachmentAggregatedList +from google.cloud.compute_v1.types.compute import NetworkAttachmentConnectedEndpoint +from google.cloud.compute_v1.types.compute import NetworkAttachmentList +from google.cloud.compute_v1.types.compute import NetworkAttachmentsScopedList +from google.cloud.compute_v1.types.compute import NetworkEdgeSecurityService +from google.cloud.compute_v1.types.compute import NetworkEdgeSecurityServiceAggregatedList +from google.cloud.compute_v1.types.compute import NetworkEdgeSecurityServicesScopedList +from google.cloud.compute_v1.types.compute import NetworkEndpoint +from google.cloud.compute_v1.types.compute import NetworkEndpointGroup +from google.cloud.compute_v1.types.compute import NetworkEndpointGroupAggregatedList +from google.cloud.compute_v1.types.compute import NetworkEndpointGroupAppEngine +from google.cloud.compute_v1.types.compute import NetworkEndpointGroupCloudFunction +from google.cloud.compute_v1.types.compute import NetworkEndpointGroupCloudRun +from google.cloud.compute_v1.types.compute import NetworkEndpointGroupList +from google.cloud.compute_v1.types.compute import NetworkEndpointGroupPscData +from google.cloud.compute_v1.types.compute import NetworkEndpointGroupsAttachEndpointsRequest +from google.cloud.compute_v1.types.compute import NetworkEndpointGroupsDetachEndpointsRequest +from google.cloud.compute_v1.types.compute import NetworkEndpointGroupsListEndpointsRequest +from google.cloud.compute_v1.types.compute import NetworkEndpointGroupsListNetworkEndpoints +from google.cloud.compute_v1.types.compute import NetworkEndpointGroupsScopedList +from google.cloud.compute_v1.types.compute import NetworkEndpointWithHealthStatus +from google.cloud.compute_v1.types.compute import NetworkInterface +from google.cloud.compute_v1.types.compute import NetworkList +from google.cloud.compute_v1.types.compute import NetworkPeering +from google.cloud.compute_v1.types.compute import NetworkPerformanceConfig +from google.cloud.compute_v1.types.compute import NetworkRoutingConfig +from google.cloud.compute_v1.types.compute import NetworksAddPeeringRequest +from google.cloud.compute_v1.types.compute import NetworksGetEffectiveFirewallsResponse +from google.cloud.compute_v1.types.compute import NetworksGetEffectiveFirewallsResponseEffectiveFirewallPolicy +from google.cloud.compute_v1.types.compute import NetworksRemovePeeringRequest +from google.cloud.compute_v1.types.compute import NetworksUpdatePeeringRequest +from google.cloud.compute_v1.types.compute import NodeGroup +from google.cloud.compute_v1.types.compute import NodeGroupAggregatedList +from google.cloud.compute_v1.types.compute import NodeGroupAutoscalingPolicy +from google.cloud.compute_v1.types.compute import NodeGroupList +from google.cloud.compute_v1.types.compute import NodeGroupMaintenanceWindow +from google.cloud.compute_v1.types.compute import NodeGroupNode +from google.cloud.compute_v1.types.compute import NodeGroupsAddNodesRequest +from google.cloud.compute_v1.types.compute import NodeGroupsDeleteNodesRequest +from google.cloud.compute_v1.types.compute import NodeGroupsListNodes +from google.cloud.compute_v1.types.compute import NodeGroupsScopedList +from google.cloud.compute_v1.types.compute import NodeGroupsSetNodeTemplateRequest +from google.cloud.compute_v1.types.compute import NodeGroupsSimulateMaintenanceEventRequest +from google.cloud.compute_v1.types.compute import NodeTemplate +from google.cloud.compute_v1.types.compute import NodeTemplateAggregatedList +from google.cloud.compute_v1.types.compute import NodeTemplateList +from google.cloud.compute_v1.types.compute import NodeTemplateNodeTypeFlexibility +from google.cloud.compute_v1.types.compute import NodeTemplatesScopedList +from google.cloud.compute_v1.types.compute import NodeType +from google.cloud.compute_v1.types.compute import NodeTypeAggregatedList +from google.cloud.compute_v1.types.compute import NodeTypeList +from google.cloud.compute_v1.types.compute import NodeTypesScopedList +from google.cloud.compute_v1.types.compute import NotificationEndpoint +from google.cloud.compute_v1.types.compute import NotificationEndpointGrpcSettings +from google.cloud.compute_v1.types.compute import NotificationEndpointList +from google.cloud.compute_v1.types.compute import Operation +from google.cloud.compute_v1.types.compute import OperationAggregatedList +from google.cloud.compute_v1.types.compute import OperationList +from google.cloud.compute_v1.types.compute import OperationsScopedList +from google.cloud.compute_v1.types.compute import OutlierDetection +from google.cloud.compute_v1.types.compute import PacketIntervals +from google.cloud.compute_v1.types.compute import PacketMirroring +from google.cloud.compute_v1.types.compute import PacketMirroringAggregatedList +from google.cloud.compute_v1.types.compute import PacketMirroringFilter +from google.cloud.compute_v1.types.compute import PacketMirroringForwardingRuleInfo +from google.cloud.compute_v1.types.compute import PacketMirroringList +from google.cloud.compute_v1.types.compute import PacketMirroringMirroredResourceInfo +from google.cloud.compute_v1.types.compute import PacketMirroringMirroredResourceInfoInstanceInfo +from google.cloud.compute_v1.types.compute import PacketMirroringMirroredResourceInfoSubnetInfo +from google.cloud.compute_v1.types.compute import PacketMirroringNetworkInfo +from google.cloud.compute_v1.types.compute import PacketMirroringsScopedList +from google.cloud.compute_v1.types.compute import PatchAutoscalerRequest +from google.cloud.compute_v1.types.compute import PatchBackendBucketRequest +from google.cloud.compute_v1.types.compute import PatchBackendServiceRequest +from google.cloud.compute_v1.types.compute import PatchFirewallPolicyRequest +from google.cloud.compute_v1.types.compute import PatchFirewallRequest +from google.cloud.compute_v1.types.compute import PatchForwardingRuleRequest +from google.cloud.compute_v1.types.compute import PatchGlobalForwardingRuleRequest +from google.cloud.compute_v1.types.compute import PatchGlobalPublicDelegatedPrefixeRequest +from google.cloud.compute_v1.types.compute import PatchHealthCheckRequest +from google.cloud.compute_v1.types.compute import PatchImageRequest +from google.cloud.compute_v1.types.compute import PatchInstanceGroupManagerRequest +from google.cloud.compute_v1.types.compute import PatchInterconnectAttachmentRequest +from google.cloud.compute_v1.types.compute import PatchInterconnectRequest +from google.cloud.compute_v1.types.compute import PatchNetworkEdgeSecurityServiceRequest +from google.cloud.compute_v1.types.compute import PatchNetworkFirewallPolicyRequest +from google.cloud.compute_v1.types.compute import PatchNetworkRequest +from google.cloud.compute_v1.types.compute import PatchNodeGroupRequest +from google.cloud.compute_v1.types.compute import PatchPacketMirroringRequest +from google.cloud.compute_v1.types.compute import PatchPerInstanceConfigsInstanceGroupManagerRequest +from google.cloud.compute_v1.types.compute import PatchPerInstanceConfigsRegionInstanceGroupManagerRequest +from google.cloud.compute_v1.types.compute import PatchPublicAdvertisedPrefixeRequest +from google.cloud.compute_v1.types.compute import PatchPublicDelegatedPrefixeRequest +from google.cloud.compute_v1.types.compute import PatchRegionAutoscalerRequest +from google.cloud.compute_v1.types.compute import PatchRegionBackendServiceRequest +from google.cloud.compute_v1.types.compute import PatchRegionHealthCheckRequest +from google.cloud.compute_v1.types.compute import PatchRegionHealthCheckServiceRequest +from google.cloud.compute_v1.types.compute import PatchRegionInstanceGroupManagerRequest +from google.cloud.compute_v1.types.compute import PatchRegionNetworkFirewallPolicyRequest +from google.cloud.compute_v1.types.compute import PatchRegionSecurityPolicyRequest +from google.cloud.compute_v1.types.compute import PatchRegionSslPolicyRequest +from google.cloud.compute_v1.types.compute import PatchRegionTargetHttpsProxyRequest +from google.cloud.compute_v1.types.compute import PatchRegionUrlMapRequest +from google.cloud.compute_v1.types.compute import PatchResourcePolicyRequest +from google.cloud.compute_v1.types.compute import PatchRouterRequest +from google.cloud.compute_v1.types.compute import PatchRuleFirewallPolicyRequest +from google.cloud.compute_v1.types.compute import PatchRuleNetworkFirewallPolicyRequest +from google.cloud.compute_v1.types.compute import PatchRuleRegionNetworkFirewallPolicyRequest +from google.cloud.compute_v1.types.compute import PatchRuleSecurityPolicyRequest +from google.cloud.compute_v1.types.compute import PatchSecurityPolicyRequest +from google.cloud.compute_v1.types.compute import PatchServiceAttachmentRequest +from google.cloud.compute_v1.types.compute import PatchSslPolicyRequest +from google.cloud.compute_v1.types.compute import PatchSubnetworkRequest +from google.cloud.compute_v1.types.compute import PatchTargetGrpcProxyRequest +from google.cloud.compute_v1.types.compute import PatchTargetHttpProxyRequest +from google.cloud.compute_v1.types.compute import PatchTargetHttpsProxyRequest +from google.cloud.compute_v1.types.compute import PatchUrlMapRequest +from google.cloud.compute_v1.types.compute import PathMatcher +from google.cloud.compute_v1.types.compute import PathRule +from google.cloud.compute_v1.types.compute import PerInstanceConfig +from google.cloud.compute_v1.types.compute import Policy +from google.cloud.compute_v1.types.compute import PreconfiguredWafSet +from google.cloud.compute_v1.types.compute import PreservedState +from google.cloud.compute_v1.types.compute import PreservedStatePreservedDisk +from google.cloud.compute_v1.types.compute import PreviewRouterRequest +from google.cloud.compute_v1.types.compute import Project +from google.cloud.compute_v1.types.compute import ProjectsDisableXpnResourceRequest +from google.cloud.compute_v1.types.compute import ProjectsEnableXpnResourceRequest +from google.cloud.compute_v1.types.compute import ProjectsGetXpnResources +from google.cloud.compute_v1.types.compute import ProjectsListXpnHostsRequest +from google.cloud.compute_v1.types.compute import ProjectsSetDefaultNetworkTierRequest +from google.cloud.compute_v1.types.compute import PublicAdvertisedPrefix +from google.cloud.compute_v1.types.compute import PublicAdvertisedPrefixList +from google.cloud.compute_v1.types.compute import PublicAdvertisedPrefixPublicDelegatedPrefix +from google.cloud.compute_v1.types.compute import PublicDelegatedPrefix +from google.cloud.compute_v1.types.compute import PublicDelegatedPrefixAggregatedList +from google.cloud.compute_v1.types.compute import PublicDelegatedPrefixesScopedList +from google.cloud.compute_v1.types.compute import PublicDelegatedPrefixList +from google.cloud.compute_v1.types.compute import PublicDelegatedPrefixPublicDelegatedSubPrefix +from google.cloud.compute_v1.types.compute import Quota +from google.cloud.compute_v1.types.compute import QuotaExceededInfo +from google.cloud.compute_v1.types.compute import RawDisk +from google.cloud.compute_v1.types.compute import RecreateInstancesInstanceGroupManagerRequest +from google.cloud.compute_v1.types.compute import RecreateInstancesRegionInstanceGroupManagerRequest +from google.cloud.compute_v1.types.compute import Reference +from google.cloud.compute_v1.types.compute import Region +from google.cloud.compute_v1.types.compute import RegionAddressesMoveRequest +from google.cloud.compute_v1.types.compute import RegionAutoscalerList +from google.cloud.compute_v1.types.compute import RegionDisksAddResourcePoliciesRequest +from google.cloud.compute_v1.types.compute import RegionDisksRemoveResourcePoliciesRequest +from google.cloud.compute_v1.types.compute import RegionDisksResizeRequest +from google.cloud.compute_v1.types.compute import RegionDisksStartAsyncReplicationRequest +from google.cloud.compute_v1.types.compute import RegionDiskTypeList +from google.cloud.compute_v1.types.compute import RegionInstanceGroupList +from google.cloud.compute_v1.types.compute import RegionInstanceGroupManagerDeleteInstanceConfigReq +from google.cloud.compute_v1.types.compute import RegionInstanceGroupManagerList +from google.cloud.compute_v1.types.compute import RegionInstanceGroupManagerPatchInstanceConfigReq +from google.cloud.compute_v1.types.compute import RegionInstanceGroupManagersAbandonInstancesRequest +from google.cloud.compute_v1.types.compute import RegionInstanceGroupManagersApplyUpdatesRequest +from google.cloud.compute_v1.types.compute import RegionInstanceGroupManagersCreateInstancesRequest +from google.cloud.compute_v1.types.compute import RegionInstanceGroupManagersDeleteInstancesRequest +from google.cloud.compute_v1.types.compute import RegionInstanceGroupManagersListErrorsResponse +from google.cloud.compute_v1.types.compute import RegionInstanceGroupManagersListInstanceConfigsResp +from google.cloud.compute_v1.types.compute import RegionInstanceGroupManagersListInstancesResponse +from google.cloud.compute_v1.types.compute import RegionInstanceGroupManagersRecreateRequest +from google.cloud.compute_v1.types.compute import RegionInstanceGroupManagersSetTargetPoolsRequest +from google.cloud.compute_v1.types.compute import RegionInstanceGroupManagersSetTemplateRequest +from google.cloud.compute_v1.types.compute import RegionInstanceGroupManagerUpdateInstanceConfigReq +from google.cloud.compute_v1.types.compute import RegionInstanceGroupsListInstances +from google.cloud.compute_v1.types.compute import RegionInstanceGroupsListInstancesRequest +from google.cloud.compute_v1.types.compute import RegionInstanceGroupsSetNamedPortsRequest +from google.cloud.compute_v1.types.compute import RegionList +from google.cloud.compute_v1.types.compute import RegionNetworkFirewallPoliciesGetEffectiveFirewallsResponse +from google.cloud.compute_v1.types.compute import RegionNetworkFirewallPoliciesGetEffectiveFirewallsResponseEffectiveFirewallPolicy +from google.cloud.compute_v1.types.compute import RegionSetLabelsRequest +from google.cloud.compute_v1.types.compute import RegionSetPolicyRequest +from google.cloud.compute_v1.types.compute import RegionTargetHttpsProxiesSetSslCertificatesRequest +from google.cloud.compute_v1.types.compute import RegionUrlMapsValidateRequest +from google.cloud.compute_v1.types.compute import RemoveAssociationFirewallPolicyRequest +from google.cloud.compute_v1.types.compute import RemoveAssociationNetworkFirewallPolicyRequest +from google.cloud.compute_v1.types.compute import RemoveAssociationRegionNetworkFirewallPolicyRequest +from google.cloud.compute_v1.types.compute import RemoveHealthCheckTargetPoolRequest +from google.cloud.compute_v1.types.compute import RemoveInstancesInstanceGroupRequest +from google.cloud.compute_v1.types.compute import RemoveInstanceTargetPoolRequest +from google.cloud.compute_v1.types.compute import RemovePeeringNetworkRequest +from google.cloud.compute_v1.types.compute import RemoveResourcePoliciesDiskRequest +from google.cloud.compute_v1.types.compute import RemoveResourcePoliciesInstanceRequest +from google.cloud.compute_v1.types.compute import RemoveResourcePoliciesRegionDiskRequest +from google.cloud.compute_v1.types.compute import RemoveRuleFirewallPolicyRequest +from google.cloud.compute_v1.types.compute import RemoveRuleNetworkFirewallPolicyRequest +from google.cloud.compute_v1.types.compute import RemoveRuleRegionNetworkFirewallPolicyRequest +from google.cloud.compute_v1.types.compute import RemoveRuleSecurityPolicyRequest +from google.cloud.compute_v1.types.compute import RequestMirrorPolicy +from google.cloud.compute_v1.types.compute import Reservation +from google.cloud.compute_v1.types.compute import ReservationAffinity +from google.cloud.compute_v1.types.compute import ReservationAggregatedList +from google.cloud.compute_v1.types.compute import ReservationList +from google.cloud.compute_v1.types.compute import ReservationsResizeRequest +from google.cloud.compute_v1.types.compute import ReservationsScopedList +from google.cloud.compute_v1.types.compute import ResetInstanceRequest +from google.cloud.compute_v1.types.compute import ResizeDiskRequest +from google.cloud.compute_v1.types.compute import ResizeInstanceGroupManagerRequest +from google.cloud.compute_v1.types.compute import ResizeRegionDiskRequest +from google.cloud.compute_v1.types.compute import ResizeRegionInstanceGroupManagerRequest +from google.cloud.compute_v1.types.compute import ResizeReservationRequest +from google.cloud.compute_v1.types.compute import ResourceCommitment +from google.cloud.compute_v1.types.compute import ResourceGroupReference +from google.cloud.compute_v1.types.compute import ResourcePoliciesScopedList +from google.cloud.compute_v1.types.compute import ResourcePolicy +from google.cloud.compute_v1.types.compute import ResourcePolicyAggregatedList +from google.cloud.compute_v1.types.compute import ResourcePolicyDailyCycle +from google.cloud.compute_v1.types.compute import ResourcePolicyDiskConsistencyGroupPolicy +from google.cloud.compute_v1.types.compute import ResourcePolicyGroupPlacementPolicy +from google.cloud.compute_v1.types.compute import ResourcePolicyHourlyCycle +from google.cloud.compute_v1.types.compute import ResourcePolicyInstanceSchedulePolicy +from google.cloud.compute_v1.types.compute import ResourcePolicyInstanceSchedulePolicySchedule +from google.cloud.compute_v1.types.compute import ResourcePolicyList +from google.cloud.compute_v1.types.compute import ResourcePolicyResourceStatus +from google.cloud.compute_v1.types.compute import ResourcePolicyResourceStatusInstanceSchedulePolicyStatus +from google.cloud.compute_v1.types.compute import ResourcePolicySnapshotSchedulePolicy +from google.cloud.compute_v1.types.compute import ResourcePolicySnapshotSchedulePolicyRetentionPolicy +from google.cloud.compute_v1.types.compute import ResourcePolicySnapshotSchedulePolicySchedule +from google.cloud.compute_v1.types.compute import ResourcePolicySnapshotSchedulePolicySnapshotProperties +from google.cloud.compute_v1.types.compute import ResourcePolicyWeeklyCycle +from google.cloud.compute_v1.types.compute import ResourcePolicyWeeklyCycleDayOfWeek +from google.cloud.compute_v1.types.compute import ResourceStatus +from google.cloud.compute_v1.types.compute import ResumeInstanceRequest +from google.cloud.compute_v1.types.compute import Route +from google.cloud.compute_v1.types.compute import RouteAsPath +from google.cloud.compute_v1.types.compute import RouteList +from google.cloud.compute_v1.types.compute import Router +from google.cloud.compute_v1.types.compute import RouterAdvertisedIpRange +from google.cloud.compute_v1.types.compute import RouterAggregatedList +from google.cloud.compute_v1.types.compute import RouterBgp +from google.cloud.compute_v1.types.compute import RouterBgpPeer +from google.cloud.compute_v1.types.compute import RouterBgpPeerBfd +from google.cloud.compute_v1.types.compute import RouterBgpPeerCustomLearnedIpRange +from google.cloud.compute_v1.types.compute import RouterInterface +from google.cloud.compute_v1.types.compute import RouterList +from google.cloud.compute_v1.types.compute import RouterMd5AuthenticationKey +from google.cloud.compute_v1.types.compute import RouterNat +from google.cloud.compute_v1.types.compute import RouterNatLogConfig +from google.cloud.compute_v1.types.compute import RouterNatRule +from google.cloud.compute_v1.types.compute import RouterNatRuleAction +from google.cloud.compute_v1.types.compute import RouterNatSubnetworkToNat +from google.cloud.compute_v1.types.compute import RoutersPreviewResponse +from google.cloud.compute_v1.types.compute import RoutersScopedList +from google.cloud.compute_v1.types.compute import RouterStatus +from google.cloud.compute_v1.types.compute import RouterStatusBgpPeerStatus +from google.cloud.compute_v1.types.compute import RouterStatusNatStatus +from google.cloud.compute_v1.types.compute import RouterStatusNatStatusNatRuleStatus +from google.cloud.compute_v1.types.compute import RouterStatusResponse +from google.cloud.compute_v1.types.compute import Rule +from google.cloud.compute_v1.types.compute import SavedAttachedDisk +from google.cloud.compute_v1.types.compute import SavedDisk +from google.cloud.compute_v1.types.compute import ScalingScheduleStatus +from google.cloud.compute_v1.types.compute import Scheduling +from google.cloud.compute_v1.types.compute import SchedulingNodeAffinity +from google.cloud.compute_v1.types.compute import ScratchDisks +from google.cloud.compute_v1.types.compute import Screenshot +from google.cloud.compute_v1.types.compute import SecurityPoliciesAggregatedList +from google.cloud.compute_v1.types.compute import SecurityPoliciesListPreconfiguredExpressionSetsResponse +from google.cloud.compute_v1.types.compute import SecurityPoliciesScopedList +from google.cloud.compute_v1.types.compute import SecurityPoliciesWafConfig +from google.cloud.compute_v1.types.compute import SecurityPolicy +from google.cloud.compute_v1.types.compute import SecurityPolicyAdaptiveProtectionConfig +from google.cloud.compute_v1.types.compute import SecurityPolicyAdaptiveProtectionConfigLayer7DdosDefenseConfig +from google.cloud.compute_v1.types.compute import SecurityPolicyAdvancedOptionsConfig +from google.cloud.compute_v1.types.compute import SecurityPolicyAdvancedOptionsConfigJsonCustomConfig +from google.cloud.compute_v1.types.compute import SecurityPolicyDdosProtectionConfig +from google.cloud.compute_v1.types.compute import SecurityPolicyList +from google.cloud.compute_v1.types.compute import SecurityPolicyRecaptchaOptionsConfig +from google.cloud.compute_v1.types.compute import SecurityPolicyReference +from google.cloud.compute_v1.types.compute import SecurityPolicyRule +from google.cloud.compute_v1.types.compute import SecurityPolicyRuleHttpHeaderAction +from google.cloud.compute_v1.types.compute import SecurityPolicyRuleHttpHeaderActionHttpHeaderOption +from google.cloud.compute_v1.types.compute import SecurityPolicyRuleMatcher +from google.cloud.compute_v1.types.compute import SecurityPolicyRuleMatcherConfig +from google.cloud.compute_v1.types.compute import SecurityPolicyRulePreconfiguredWafConfig +from google.cloud.compute_v1.types.compute import SecurityPolicyRulePreconfiguredWafConfigExclusion +from google.cloud.compute_v1.types.compute import SecurityPolicyRulePreconfiguredWafConfigExclusionFieldParams +from google.cloud.compute_v1.types.compute import SecurityPolicyRuleRateLimitOptions +from google.cloud.compute_v1.types.compute import SecurityPolicyRuleRateLimitOptionsEnforceOnKeyConfig +from google.cloud.compute_v1.types.compute import SecurityPolicyRuleRateLimitOptionsThreshold +from google.cloud.compute_v1.types.compute import SecurityPolicyRuleRedirectOptions +from google.cloud.compute_v1.types.compute import SecuritySettings +from google.cloud.compute_v1.types.compute import SendDiagnosticInterruptInstanceRequest +from google.cloud.compute_v1.types.compute import SendDiagnosticInterruptInstanceResponse +from google.cloud.compute_v1.types.compute import SerialPortOutput +from google.cloud.compute_v1.types.compute import ServerBinding +from google.cloud.compute_v1.types.compute import ServiceAccount +from google.cloud.compute_v1.types.compute import ServiceAttachment +from google.cloud.compute_v1.types.compute import ServiceAttachmentAggregatedList +from google.cloud.compute_v1.types.compute import ServiceAttachmentConnectedEndpoint +from google.cloud.compute_v1.types.compute import ServiceAttachmentConsumerProjectLimit +from google.cloud.compute_v1.types.compute import ServiceAttachmentList +from google.cloud.compute_v1.types.compute import ServiceAttachmentsScopedList +from google.cloud.compute_v1.types.compute import SetBackendServiceTargetSslProxyRequest +from google.cloud.compute_v1.types.compute import SetBackendServiceTargetTcpProxyRequest +from google.cloud.compute_v1.types.compute import SetBackupTargetPoolRequest +from google.cloud.compute_v1.types.compute import SetCertificateMapTargetHttpsProxyRequest +from google.cloud.compute_v1.types.compute import SetCertificateMapTargetSslProxyRequest +from google.cloud.compute_v1.types.compute import SetCommonInstanceMetadataProjectRequest +from google.cloud.compute_v1.types.compute import SetDefaultNetworkTierProjectRequest +from google.cloud.compute_v1.types.compute import SetDeletionProtectionInstanceRequest +from google.cloud.compute_v1.types.compute import SetDiskAutoDeleteInstanceRequest +from google.cloud.compute_v1.types.compute import SetEdgeSecurityPolicyBackendBucketRequest +from google.cloud.compute_v1.types.compute import SetEdgeSecurityPolicyBackendServiceRequest +from google.cloud.compute_v1.types.compute import SetIamPolicyBackendServiceRequest +from google.cloud.compute_v1.types.compute import SetIamPolicyDiskRequest +from google.cloud.compute_v1.types.compute import SetIamPolicyFirewallPolicyRequest +from google.cloud.compute_v1.types.compute import SetIamPolicyImageRequest +from google.cloud.compute_v1.types.compute import SetIamPolicyInstanceRequest +from google.cloud.compute_v1.types.compute import SetIamPolicyInstanceTemplateRequest +from google.cloud.compute_v1.types.compute import SetIamPolicyLicenseRequest +from google.cloud.compute_v1.types.compute import SetIamPolicyMachineImageRequest +from google.cloud.compute_v1.types.compute import SetIamPolicyNetworkAttachmentRequest +from google.cloud.compute_v1.types.compute import SetIamPolicyNetworkFirewallPolicyRequest +from google.cloud.compute_v1.types.compute import SetIamPolicyNodeGroupRequest +from google.cloud.compute_v1.types.compute import SetIamPolicyNodeTemplateRequest +from google.cloud.compute_v1.types.compute import SetIamPolicyRegionBackendServiceRequest +from google.cloud.compute_v1.types.compute import SetIamPolicyRegionDiskRequest +from google.cloud.compute_v1.types.compute import SetIamPolicyRegionNetworkFirewallPolicyRequest +from google.cloud.compute_v1.types.compute import SetIamPolicyReservationRequest +from google.cloud.compute_v1.types.compute import SetIamPolicyResourcePolicyRequest +from google.cloud.compute_v1.types.compute import SetIamPolicyServiceAttachmentRequest +from google.cloud.compute_v1.types.compute import SetIamPolicySnapshotRequest +from google.cloud.compute_v1.types.compute import SetIamPolicySubnetworkRequest +from google.cloud.compute_v1.types.compute import SetInstanceTemplateInstanceGroupManagerRequest +from google.cloud.compute_v1.types.compute import SetInstanceTemplateRegionInstanceGroupManagerRequest +from google.cloud.compute_v1.types.compute import SetLabelsAddressRequest +from google.cloud.compute_v1.types.compute import SetLabelsDiskRequest +from google.cloud.compute_v1.types.compute import SetLabelsExternalVpnGatewayRequest +from google.cloud.compute_v1.types.compute import SetLabelsForwardingRuleRequest +from google.cloud.compute_v1.types.compute import SetLabelsGlobalAddressRequest +from google.cloud.compute_v1.types.compute import SetLabelsGlobalForwardingRuleRequest +from google.cloud.compute_v1.types.compute import SetLabelsImageRequest +from google.cloud.compute_v1.types.compute import SetLabelsInstanceRequest +from google.cloud.compute_v1.types.compute import SetLabelsInterconnectAttachmentRequest +from google.cloud.compute_v1.types.compute import SetLabelsInterconnectRequest +from google.cloud.compute_v1.types.compute import SetLabelsRegionDiskRequest +from google.cloud.compute_v1.types.compute import SetLabelsSecurityPolicyRequest +from google.cloud.compute_v1.types.compute import SetLabelsSnapshotRequest +from google.cloud.compute_v1.types.compute import SetLabelsTargetVpnGatewayRequest +from google.cloud.compute_v1.types.compute import SetLabelsVpnGatewayRequest +from google.cloud.compute_v1.types.compute import SetLabelsVpnTunnelRequest +from google.cloud.compute_v1.types.compute import SetMachineResourcesInstanceRequest +from google.cloud.compute_v1.types.compute import SetMachineTypeInstanceRequest +from google.cloud.compute_v1.types.compute import SetMetadataInstanceRequest +from google.cloud.compute_v1.types.compute import SetMinCpuPlatformInstanceRequest +from google.cloud.compute_v1.types.compute import SetNamedPortsInstanceGroupRequest +from google.cloud.compute_v1.types.compute import SetNamedPortsRegionInstanceGroupRequest +from google.cloud.compute_v1.types.compute import SetNameInstanceRequest +from google.cloud.compute_v1.types.compute import SetNodeTemplateNodeGroupRequest +from google.cloud.compute_v1.types.compute import SetPrivateIpGoogleAccessSubnetworkRequest +from google.cloud.compute_v1.types.compute import SetProxyHeaderTargetSslProxyRequest +from google.cloud.compute_v1.types.compute import SetProxyHeaderTargetTcpProxyRequest +from google.cloud.compute_v1.types.compute import SetQuicOverrideTargetHttpsProxyRequest +from google.cloud.compute_v1.types.compute import SetSchedulingInstanceRequest +from google.cloud.compute_v1.types.compute import SetSecurityPolicyBackendServiceRequest +from google.cloud.compute_v1.types.compute import SetServiceAccountInstanceRequest +from google.cloud.compute_v1.types.compute import SetShieldedInstanceIntegrityPolicyInstanceRequest +from google.cloud.compute_v1.types.compute import SetSslCertificatesRegionTargetHttpsProxyRequest +from google.cloud.compute_v1.types.compute import SetSslCertificatesTargetHttpsProxyRequest +from google.cloud.compute_v1.types.compute import SetSslCertificatesTargetSslProxyRequest +from google.cloud.compute_v1.types.compute import SetSslPolicyTargetHttpsProxyRequest +from google.cloud.compute_v1.types.compute import SetSslPolicyTargetSslProxyRequest +from google.cloud.compute_v1.types.compute import SetTagsInstanceRequest +from google.cloud.compute_v1.types.compute import SetTargetForwardingRuleRequest +from google.cloud.compute_v1.types.compute import SetTargetGlobalForwardingRuleRequest +from google.cloud.compute_v1.types.compute import SetTargetPoolsInstanceGroupManagerRequest +from google.cloud.compute_v1.types.compute import SetTargetPoolsRegionInstanceGroupManagerRequest +from google.cloud.compute_v1.types.compute import SetUrlMapRegionTargetHttpProxyRequest +from google.cloud.compute_v1.types.compute import SetUrlMapRegionTargetHttpsProxyRequest +from google.cloud.compute_v1.types.compute import SetUrlMapTargetHttpProxyRequest +from google.cloud.compute_v1.types.compute import SetUrlMapTargetHttpsProxyRequest +from google.cloud.compute_v1.types.compute import SetUsageExportBucketProjectRequest +from google.cloud.compute_v1.types.compute import ShareSettings +from google.cloud.compute_v1.types.compute import ShareSettingsProjectConfig +from google.cloud.compute_v1.types.compute import ShieldedInstanceConfig +from google.cloud.compute_v1.types.compute import ShieldedInstanceIdentity +from google.cloud.compute_v1.types.compute import ShieldedInstanceIdentityEntry +from google.cloud.compute_v1.types.compute import ShieldedInstanceIntegrityPolicy +from google.cloud.compute_v1.types.compute import SignedUrlKey +from google.cloud.compute_v1.types.compute import SimulateMaintenanceEventInstanceRequest +from google.cloud.compute_v1.types.compute import SimulateMaintenanceEventNodeGroupRequest +from google.cloud.compute_v1.types.compute import Snapshot +from google.cloud.compute_v1.types.compute import SnapshotList +from google.cloud.compute_v1.types.compute import SourceDiskEncryptionKey +from google.cloud.compute_v1.types.compute import SourceInstanceParams +from google.cloud.compute_v1.types.compute import SourceInstanceProperties +from google.cloud.compute_v1.types.compute import SslCertificate +from google.cloud.compute_v1.types.compute import SslCertificateAggregatedList +from google.cloud.compute_v1.types.compute import SslCertificateList +from google.cloud.compute_v1.types.compute import SslCertificateManagedSslCertificate +from google.cloud.compute_v1.types.compute import SslCertificateSelfManagedSslCertificate +from google.cloud.compute_v1.types.compute import SslCertificatesScopedList +from google.cloud.compute_v1.types.compute import SSLHealthCheck +from google.cloud.compute_v1.types.compute import SslPoliciesAggregatedList +from google.cloud.compute_v1.types.compute import SslPoliciesList +from google.cloud.compute_v1.types.compute import SslPoliciesListAvailableFeaturesResponse +from google.cloud.compute_v1.types.compute import SslPoliciesScopedList +from google.cloud.compute_v1.types.compute import SslPolicy +from google.cloud.compute_v1.types.compute import SslPolicyReference +from google.cloud.compute_v1.types.compute import StartAsyncReplicationDiskRequest +from google.cloud.compute_v1.types.compute import StartAsyncReplicationRegionDiskRequest +from google.cloud.compute_v1.types.compute import StartInstanceRequest +from google.cloud.compute_v1.types.compute import StartWithEncryptionKeyInstanceRequest +from google.cloud.compute_v1.types.compute import StatefulPolicy +from google.cloud.compute_v1.types.compute import StatefulPolicyPreservedState +from google.cloud.compute_v1.types.compute import StatefulPolicyPreservedStateDiskDevice +from google.cloud.compute_v1.types.compute import StopAsyncReplicationDiskRequest +from google.cloud.compute_v1.types.compute import StopAsyncReplicationRegionDiskRequest +from google.cloud.compute_v1.types.compute import StopGroupAsyncReplicationDiskRequest +from google.cloud.compute_v1.types.compute import StopGroupAsyncReplicationRegionDiskRequest +from google.cloud.compute_v1.types.compute import StopInstanceRequest +from google.cloud.compute_v1.types.compute import Subnetwork +from google.cloud.compute_v1.types.compute import SubnetworkAggregatedList +from google.cloud.compute_v1.types.compute import SubnetworkList +from google.cloud.compute_v1.types.compute import SubnetworkLogConfig +from google.cloud.compute_v1.types.compute import SubnetworkSecondaryRange +from google.cloud.compute_v1.types.compute import SubnetworksExpandIpCidrRangeRequest +from google.cloud.compute_v1.types.compute import SubnetworksScopedList +from google.cloud.compute_v1.types.compute import SubnetworksSetPrivateIpGoogleAccessRequest +from google.cloud.compute_v1.types.compute import Subsetting +from google.cloud.compute_v1.types.compute import SuspendInstanceRequest +from google.cloud.compute_v1.types.compute import SwitchToCustomModeNetworkRequest +from google.cloud.compute_v1.types.compute import Tags +from google.cloud.compute_v1.types.compute import TargetGrpcProxy +from google.cloud.compute_v1.types.compute import TargetGrpcProxyList +from google.cloud.compute_v1.types.compute import TargetHttpProxiesScopedList +from google.cloud.compute_v1.types.compute import TargetHttpProxy +from google.cloud.compute_v1.types.compute import TargetHttpProxyAggregatedList +from google.cloud.compute_v1.types.compute import TargetHttpProxyList +from google.cloud.compute_v1.types.compute import TargetHttpsProxiesScopedList +from google.cloud.compute_v1.types.compute import TargetHttpsProxiesSetCertificateMapRequest +from google.cloud.compute_v1.types.compute import TargetHttpsProxiesSetQuicOverrideRequest +from google.cloud.compute_v1.types.compute import TargetHttpsProxiesSetSslCertificatesRequest +from google.cloud.compute_v1.types.compute import TargetHttpsProxy +from google.cloud.compute_v1.types.compute import TargetHttpsProxyAggregatedList +from google.cloud.compute_v1.types.compute import TargetHttpsProxyList +from google.cloud.compute_v1.types.compute import TargetInstance +from google.cloud.compute_v1.types.compute import TargetInstanceAggregatedList +from google.cloud.compute_v1.types.compute import TargetInstanceList +from google.cloud.compute_v1.types.compute import TargetInstancesScopedList +from google.cloud.compute_v1.types.compute import TargetPool +from google.cloud.compute_v1.types.compute import TargetPoolAggregatedList +from google.cloud.compute_v1.types.compute import TargetPoolInstanceHealth +from google.cloud.compute_v1.types.compute import TargetPoolList +from google.cloud.compute_v1.types.compute import TargetPoolsAddHealthCheckRequest +from google.cloud.compute_v1.types.compute import TargetPoolsAddInstanceRequest +from google.cloud.compute_v1.types.compute import TargetPoolsRemoveHealthCheckRequest +from google.cloud.compute_v1.types.compute import TargetPoolsRemoveInstanceRequest +from google.cloud.compute_v1.types.compute import TargetPoolsScopedList +from google.cloud.compute_v1.types.compute import TargetReference +from google.cloud.compute_v1.types.compute import TargetSslProxiesSetBackendServiceRequest +from google.cloud.compute_v1.types.compute import TargetSslProxiesSetCertificateMapRequest +from google.cloud.compute_v1.types.compute import TargetSslProxiesSetProxyHeaderRequest +from google.cloud.compute_v1.types.compute import TargetSslProxiesSetSslCertificatesRequest +from google.cloud.compute_v1.types.compute import TargetSslProxy +from google.cloud.compute_v1.types.compute import TargetSslProxyList +from google.cloud.compute_v1.types.compute import TargetTcpProxiesScopedList +from google.cloud.compute_v1.types.compute import TargetTcpProxiesSetBackendServiceRequest +from google.cloud.compute_v1.types.compute import TargetTcpProxiesSetProxyHeaderRequest +from google.cloud.compute_v1.types.compute import TargetTcpProxy +from google.cloud.compute_v1.types.compute import TargetTcpProxyAggregatedList +from google.cloud.compute_v1.types.compute import TargetTcpProxyList +from google.cloud.compute_v1.types.compute import TargetVpnGateway +from google.cloud.compute_v1.types.compute import TargetVpnGatewayAggregatedList +from google.cloud.compute_v1.types.compute import TargetVpnGatewayList +from google.cloud.compute_v1.types.compute import TargetVpnGatewaysScopedList +from google.cloud.compute_v1.types.compute import TCPHealthCheck +from google.cloud.compute_v1.types.compute import TestFailure +from google.cloud.compute_v1.types.compute import TestIamPermissionsDiskRequest +from google.cloud.compute_v1.types.compute import TestIamPermissionsExternalVpnGatewayRequest +from google.cloud.compute_v1.types.compute import TestIamPermissionsFirewallPolicyRequest +from google.cloud.compute_v1.types.compute import TestIamPermissionsImageRequest +from google.cloud.compute_v1.types.compute import TestIamPermissionsInstanceRequest +from google.cloud.compute_v1.types.compute import TestIamPermissionsInstanceTemplateRequest +from google.cloud.compute_v1.types.compute import TestIamPermissionsLicenseCodeRequest +from google.cloud.compute_v1.types.compute import TestIamPermissionsLicenseRequest +from google.cloud.compute_v1.types.compute import TestIamPermissionsMachineImageRequest +from google.cloud.compute_v1.types.compute import TestIamPermissionsNetworkAttachmentRequest +from google.cloud.compute_v1.types.compute import TestIamPermissionsNetworkEndpointGroupRequest +from google.cloud.compute_v1.types.compute import TestIamPermissionsNetworkFirewallPolicyRequest +from google.cloud.compute_v1.types.compute import TestIamPermissionsNodeGroupRequest +from google.cloud.compute_v1.types.compute import TestIamPermissionsNodeTemplateRequest +from google.cloud.compute_v1.types.compute import TestIamPermissionsPacketMirroringRequest +from google.cloud.compute_v1.types.compute import TestIamPermissionsRegionDiskRequest +from google.cloud.compute_v1.types.compute import TestIamPermissionsRegionNetworkFirewallPolicyRequest +from google.cloud.compute_v1.types.compute import TestIamPermissionsReservationRequest +from google.cloud.compute_v1.types.compute import TestIamPermissionsResourcePolicyRequest +from google.cloud.compute_v1.types.compute import TestIamPermissionsServiceAttachmentRequest +from google.cloud.compute_v1.types.compute import TestIamPermissionsSnapshotRequest +from google.cloud.compute_v1.types.compute import TestIamPermissionsSubnetworkRequest +from google.cloud.compute_v1.types.compute import TestIamPermissionsVpnGatewayRequest +from google.cloud.compute_v1.types.compute import TestPermissionsRequest +from google.cloud.compute_v1.types.compute import TestPermissionsResponse +from google.cloud.compute_v1.types.compute import Uint128 +from google.cloud.compute_v1.types.compute import UpdateAccessConfigInstanceRequest +from google.cloud.compute_v1.types.compute import UpdateAutoscalerRequest +from google.cloud.compute_v1.types.compute import UpdateBackendBucketRequest +from google.cloud.compute_v1.types.compute import UpdateBackendServiceRequest +from google.cloud.compute_v1.types.compute import UpdateDiskRequest +from google.cloud.compute_v1.types.compute import UpdateDisplayDeviceInstanceRequest +from google.cloud.compute_v1.types.compute import UpdateFirewallRequest +from google.cloud.compute_v1.types.compute import UpdateHealthCheckRequest +from google.cloud.compute_v1.types.compute import UpdateInstanceRequest +from google.cloud.compute_v1.types.compute import UpdateNetworkInterfaceInstanceRequest +from google.cloud.compute_v1.types.compute import UpdatePeeringNetworkRequest +from google.cloud.compute_v1.types.compute import UpdatePerInstanceConfigsInstanceGroupManagerRequest +from google.cloud.compute_v1.types.compute import UpdatePerInstanceConfigsRegionInstanceGroupManagerRequest +from google.cloud.compute_v1.types.compute import UpdateRegionAutoscalerRequest +from google.cloud.compute_v1.types.compute import UpdateRegionBackendServiceRequest +from google.cloud.compute_v1.types.compute import UpdateRegionCommitmentRequest +from google.cloud.compute_v1.types.compute import UpdateRegionDiskRequest +from google.cloud.compute_v1.types.compute import UpdateRegionHealthCheckRequest +from google.cloud.compute_v1.types.compute import UpdateRegionUrlMapRequest +from google.cloud.compute_v1.types.compute import UpdateReservationRequest +from google.cloud.compute_v1.types.compute import UpdateRouterRequest +from google.cloud.compute_v1.types.compute import UpdateShieldedInstanceConfigInstanceRequest +from google.cloud.compute_v1.types.compute import UpdateUrlMapRequest +from google.cloud.compute_v1.types.compute import UrlMap +from google.cloud.compute_v1.types.compute import UrlMapList +from google.cloud.compute_v1.types.compute import UrlMapReference +from google.cloud.compute_v1.types.compute import UrlMapsAggregatedList +from google.cloud.compute_v1.types.compute import UrlMapsScopedList +from google.cloud.compute_v1.types.compute import UrlMapsValidateRequest +from google.cloud.compute_v1.types.compute import UrlMapsValidateResponse +from google.cloud.compute_v1.types.compute import UrlMapTest +from google.cloud.compute_v1.types.compute import UrlMapTestHeader +from google.cloud.compute_v1.types.compute import UrlMapValidationResult +from google.cloud.compute_v1.types.compute import UrlRewrite +from google.cloud.compute_v1.types.compute import UsableSubnetwork +from google.cloud.compute_v1.types.compute import UsableSubnetworksAggregatedList +from google.cloud.compute_v1.types.compute import UsableSubnetworkSecondaryRange +from google.cloud.compute_v1.types.compute import UsageExportLocation +from google.cloud.compute_v1.types.compute import ValidateRegionUrlMapRequest +from google.cloud.compute_v1.types.compute import ValidateUrlMapRequest +from google.cloud.compute_v1.types.compute import VmEndpointNatMappings +from google.cloud.compute_v1.types.compute import VmEndpointNatMappingsInterfaceNatMappings +from google.cloud.compute_v1.types.compute import VmEndpointNatMappingsInterfaceNatMappingsNatRuleMappings +from google.cloud.compute_v1.types.compute import VmEndpointNatMappingsList +from google.cloud.compute_v1.types.compute import VpnGateway +from google.cloud.compute_v1.types.compute import VpnGatewayAggregatedList +from google.cloud.compute_v1.types.compute import VpnGatewayList +from google.cloud.compute_v1.types.compute import VpnGatewaysGetStatusResponse +from google.cloud.compute_v1.types.compute import VpnGatewaysScopedList +from google.cloud.compute_v1.types.compute import VpnGatewayStatus +from google.cloud.compute_v1.types.compute import VpnGatewayStatusHighAvailabilityRequirementState +from google.cloud.compute_v1.types.compute import VpnGatewayStatusTunnel +from google.cloud.compute_v1.types.compute import VpnGatewayStatusVpnConnection +from google.cloud.compute_v1.types.compute import VpnGatewayVpnGatewayInterface +from google.cloud.compute_v1.types.compute import VpnTunnel +from google.cloud.compute_v1.types.compute import VpnTunnelAggregatedList +from google.cloud.compute_v1.types.compute import VpnTunnelList +from google.cloud.compute_v1.types.compute import VpnTunnelsScopedList +from google.cloud.compute_v1.types.compute import WafExpressionSet +from google.cloud.compute_v1.types.compute import WafExpressionSetExpression +from google.cloud.compute_v1.types.compute import WaitGlobalOperationRequest +from google.cloud.compute_v1.types.compute import WaitRegionOperationRequest +from google.cloud.compute_v1.types.compute import WaitZoneOperationRequest +from google.cloud.compute_v1.types.compute import Warning +from google.cloud.compute_v1.types.compute import Warnings +from google.cloud.compute_v1.types.compute import WeightedBackendService +from google.cloud.compute_v1.types.compute import XpnHostList +from google.cloud.compute_v1.types.compute import XpnResourceId +from google.cloud.compute_v1.types.compute import Zone +from google.cloud.compute_v1.types.compute import ZoneList +from google.cloud.compute_v1.types.compute import ZoneSetLabelsRequest +from google.cloud.compute_v1.types.compute import ZoneSetPolicyRequest + +__all__ = ('AcceleratorTypesClient', + 'AddressesClient', + 'AutoscalersClient', + 'BackendBucketsClient', + 'BackendServicesClient', + 'DisksClient', + 'DiskTypesClient', + 'ExternalVpnGatewaysClient', + 'FirewallPoliciesClient', + 'FirewallsClient', + 'ForwardingRulesClient', + 'GlobalAddressesClient', + 'GlobalForwardingRulesClient', + 'GlobalNetworkEndpointGroupsClient', + 'GlobalOperationsClient', + 'GlobalOrganizationOperationsClient', + 'GlobalPublicDelegatedPrefixesClient', + 'HealthChecksClient', + 'ImageFamilyViewsClient', + 'ImagesClient', + 'InstanceGroupManagersClient', + 'InstanceGroupsClient', + 'InstancesClient', + 'InstanceTemplatesClient', + 'InterconnectAttachmentsClient', + 'InterconnectLocationsClient', + 'InterconnectRemoteLocationsClient', + 'InterconnectsClient', + 'LicenseCodesClient', + 'LicensesClient', + 'MachineImagesClient', + 'MachineTypesClient', + 'NetworkAttachmentsClient', + 'NetworkEdgeSecurityServicesClient', + 'NetworkEndpointGroupsClient', + 'NetworkFirewallPoliciesClient', + 'NetworksClient', + 'NodeGroupsClient', + 'NodeTemplatesClient', + 'NodeTypesClient', + 'PacketMirroringsClient', + 'ProjectsClient', + 'PublicAdvertisedPrefixesClient', + 'PublicDelegatedPrefixesClient', + 'RegionAutoscalersClient', + 'RegionBackendServicesClient', + 'RegionCommitmentsClient', + 'RegionDisksClient', + 'RegionDiskTypesClient', + 'RegionHealthChecksClient', + 'RegionHealthCheckServicesClient', + 'RegionInstanceGroupManagersClient', + 'RegionInstanceGroupsClient', + 'RegionInstancesClient', + 'RegionInstanceTemplatesClient', + 'RegionNetworkEndpointGroupsClient', + 'RegionNetworkFirewallPoliciesClient', + 'RegionNotificationEndpointsClient', + 'RegionOperationsClient', + 'RegionsClient', + 'RegionSecurityPoliciesClient', + 'RegionSslCertificatesClient', + 'RegionSslPoliciesClient', + 'RegionTargetHttpProxiesClient', + 'RegionTargetHttpsProxiesClient', + 'RegionTargetTcpProxiesClient', + 'RegionUrlMapsClient', + 'ReservationsClient', + 'ResourcePoliciesClient', + 'RoutersClient', + 'RoutesClient', + 'SecurityPoliciesClient', + 'ServiceAttachmentsClient', + 'SnapshotsClient', + 'SslCertificatesClient', + 'SslPoliciesClient', + 'SubnetworksClient', + 'TargetGrpcProxiesClient', + 'TargetHttpProxiesClient', + 'TargetHttpsProxiesClient', + 'TargetInstancesClient', + 'TargetPoolsClient', + 'TargetSslProxiesClient', + 'TargetTcpProxiesClient', + 'TargetVpnGatewaysClient', + 'UrlMapsClient', + 'VpnGatewaysClient', + 'VpnTunnelsClient', + 'ZoneOperationsClient', + 'ZonesClient', + 'AbandonInstancesInstanceGroupManagerRequest', + 'AbandonInstancesRegionInstanceGroupManagerRequest', + 'AcceleratorConfig', + 'Accelerators', + 'AcceleratorType', + 'AcceleratorTypeAggregatedList', + 'AcceleratorTypeList', + 'AcceleratorTypesScopedList', + 'AccessConfig', + 'AddAccessConfigInstanceRequest', + 'AddAssociationFirewallPolicyRequest', + 'AddAssociationNetworkFirewallPolicyRequest', + 'AddAssociationRegionNetworkFirewallPolicyRequest', + 'AddHealthCheckTargetPoolRequest', + 'AddInstancesInstanceGroupRequest', + 'AddInstanceTargetPoolRequest', + 'AddNodesNodeGroupRequest', + 'AddPeeringNetworkRequest', + 'AddResourcePoliciesDiskRequest', + 'AddResourcePoliciesInstanceRequest', + 'AddResourcePoliciesRegionDiskRequest', + 'Address', + 'AddressAggregatedList', + 'AddressesScopedList', + 'AddressList', + 'AddRuleFirewallPolicyRequest', + 'AddRuleNetworkFirewallPolicyRequest', + 'AddRuleRegionNetworkFirewallPolicyRequest', + 'AddRuleSecurityPolicyRequest', + 'AddSignedUrlKeyBackendBucketRequest', + 'AddSignedUrlKeyBackendServiceRequest', + 'AdvancedMachineFeatures', + 'AggregatedListAcceleratorTypesRequest', + 'AggregatedListAddressesRequest', + 'AggregatedListAutoscalersRequest', + 'AggregatedListBackendServicesRequest', + 'AggregatedListDisksRequest', + 'AggregatedListDiskTypesRequest', + 'AggregatedListForwardingRulesRequest', + 'AggregatedListGlobalOperationsRequest', + 'AggregatedListHealthChecksRequest', + 'AggregatedListInstanceGroupManagersRequest', + 'AggregatedListInstanceGroupsRequest', + 'AggregatedListInstancesRequest', + 'AggregatedListInstanceTemplatesRequest', + 'AggregatedListInterconnectAttachmentsRequest', + 'AggregatedListMachineTypesRequest', + 'AggregatedListNetworkAttachmentsRequest', + 'AggregatedListNetworkEdgeSecurityServicesRequest', + 'AggregatedListNetworkEndpointGroupsRequest', + 'AggregatedListNodeGroupsRequest', + 'AggregatedListNodeTemplatesRequest', + 'AggregatedListNodeTypesRequest', + 'AggregatedListPacketMirroringsRequest', + 'AggregatedListPublicDelegatedPrefixesRequest', + 'AggregatedListRegionCommitmentsRequest', + 'AggregatedListReservationsRequest', + 'AggregatedListResourcePoliciesRequest', + 'AggregatedListRoutersRequest', + 'AggregatedListSecurityPoliciesRequest', + 'AggregatedListServiceAttachmentsRequest', + 'AggregatedListSslCertificatesRequest', + 'AggregatedListSslPoliciesRequest', + 'AggregatedListSubnetworksRequest', + 'AggregatedListTargetHttpProxiesRequest', + 'AggregatedListTargetHttpsProxiesRequest', + 'AggregatedListTargetInstancesRequest', + 'AggregatedListTargetPoolsRequest', + 'AggregatedListTargetTcpProxiesRequest', + 'AggregatedListTargetVpnGatewaysRequest', + 'AggregatedListUrlMapsRequest', + 'AggregatedListVpnGatewaysRequest', + 'AggregatedListVpnTunnelsRequest', + 'AliasIpRange', + 'AllocationResourceStatus', + 'AllocationResourceStatusSpecificSKUAllocation', + 'AllocationSpecificSKUAllocationAllocatedInstancePropertiesReservedDisk', + 'AllocationSpecificSKUAllocationReservedInstanceProperties', + 'AllocationSpecificSKUReservation', + 'Allowed', + 'ApplyUpdatesToInstancesInstanceGroupManagerRequest', + 'ApplyUpdatesToInstancesRegionInstanceGroupManagerRequest', + 'AttachDiskInstanceRequest', + 'AttachedDisk', + 'AttachedDiskInitializeParams', + 'AttachNetworkEndpointsGlobalNetworkEndpointGroupRequest', + 'AttachNetworkEndpointsNetworkEndpointGroupRequest', + 'AuditConfig', + 'AuditLogConfig', + 'AuthorizationLoggingOptions', + 'Autoscaler', + 'AutoscalerAggregatedList', + 'AutoscalerList', + 'AutoscalersScopedList', + 'AutoscalerStatusDetails', + 'AutoscalingPolicy', + 'AutoscalingPolicyCpuUtilization', + 'AutoscalingPolicyCustomMetricUtilization', + 'AutoscalingPolicyLoadBalancingUtilization', + 'AutoscalingPolicyScaleInControl', + 'AutoscalingPolicyScalingSchedule', + 'Backend', + 'BackendBucket', + 'BackendBucketCdnPolicy', + 'BackendBucketCdnPolicyBypassCacheOnRequestHeader', + 'BackendBucketCdnPolicyCacheKeyPolicy', + 'BackendBucketCdnPolicyNegativeCachingPolicy', + 'BackendBucketList', + 'BackendService', + 'BackendServiceAggregatedList', + 'BackendServiceCdnPolicy', + 'BackendServiceCdnPolicyBypassCacheOnRequestHeader', + 'BackendServiceCdnPolicyNegativeCachingPolicy', + 'BackendServiceConnectionTrackingPolicy', + 'BackendServiceFailoverPolicy', + 'BackendServiceGroupHealth', + 'BackendServiceIAP', + 'BackendServiceList', + 'BackendServiceLocalityLoadBalancingPolicyConfig', + 'BackendServiceLocalityLoadBalancingPolicyConfigCustomPolicy', + 'BackendServiceLocalityLoadBalancingPolicyConfigPolicy', + 'BackendServiceLogConfig', + 'BackendServiceReference', + 'BackendServicesScopedList', + 'BfdPacket', + 'BfdStatus', + 'BfdStatusPacketCounts', + 'Binding', + 'BulkInsertDiskRequest', + 'BulkInsertDiskResource', + 'BulkInsertInstanceRequest', + 'BulkInsertInstanceResource', + 'BulkInsertInstanceResourcePerInstanceProperties', + 'BulkInsertRegionDiskRequest', + 'BulkInsertRegionInstanceRequest', + 'CacheInvalidationRule', + 'CacheKeyPolicy', + 'CircuitBreakers', + 'CloneRulesFirewallPolicyRequest', + 'CloneRulesNetworkFirewallPolicyRequest', + 'CloneRulesRegionNetworkFirewallPolicyRequest', + 'Commitment', + 'CommitmentAggregatedList', + 'CommitmentList', + 'CommitmentsScopedList', + 'Condition', + 'ConfidentialInstanceConfig', + 'ConnectionDraining', + 'ConsistentHashLoadBalancerSettings', + 'ConsistentHashLoadBalancerSettingsHttpCookie', + 'CorsPolicy', + 'CreateInstancesInstanceGroupManagerRequest', + 'CreateInstancesRegionInstanceGroupManagerRequest', + 'CreateSnapshotDiskRequest', + 'CreateSnapshotRegionDiskRequest', + 'CustomerEncryptionKey', + 'CustomerEncryptionKeyProtectedDisk', + 'Data', + 'DeleteAccessConfigInstanceRequest', + 'DeleteAddressRequest', + 'DeleteAutoscalerRequest', + 'DeleteBackendBucketRequest', + 'DeleteBackendServiceRequest', + 'DeleteDiskRequest', + 'DeleteExternalVpnGatewayRequest', + 'DeleteFirewallPolicyRequest', + 'DeleteFirewallRequest', + 'DeleteForwardingRuleRequest', + 'DeleteGlobalAddressRequest', + 'DeleteGlobalForwardingRuleRequest', + 'DeleteGlobalNetworkEndpointGroupRequest', + 'DeleteGlobalOperationRequest', + 'DeleteGlobalOperationResponse', + 'DeleteGlobalOrganizationOperationRequest', + 'DeleteGlobalOrganizationOperationResponse', + 'DeleteGlobalPublicDelegatedPrefixeRequest', + 'DeleteHealthCheckRequest', + 'DeleteImageRequest', + 'DeleteInstanceGroupManagerRequest', + 'DeleteInstanceGroupRequest', + 'DeleteInstanceRequest', + 'DeleteInstancesInstanceGroupManagerRequest', + 'DeleteInstancesRegionInstanceGroupManagerRequest', + 'DeleteInstanceTemplateRequest', + 'DeleteInterconnectAttachmentRequest', + 'DeleteInterconnectRequest', + 'DeleteLicenseRequest', + 'DeleteMachineImageRequest', + 'DeleteNetworkAttachmentRequest', + 'DeleteNetworkEdgeSecurityServiceRequest', + 'DeleteNetworkEndpointGroupRequest', + 'DeleteNetworkFirewallPolicyRequest', + 'DeleteNetworkRequest', + 'DeleteNodeGroupRequest', + 'DeleteNodesNodeGroupRequest', + 'DeleteNodeTemplateRequest', + 'DeletePacketMirroringRequest', + 'DeletePerInstanceConfigsInstanceGroupManagerRequest', + 'DeletePerInstanceConfigsRegionInstanceGroupManagerRequest', + 'DeletePublicAdvertisedPrefixeRequest', + 'DeletePublicDelegatedPrefixeRequest', + 'DeleteRegionAutoscalerRequest', + 'DeleteRegionBackendServiceRequest', + 'DeleteRegionDiskRequest', + 'DeleteRegionHealthCheckRequest', + 'DeleteRegionHealthCheckServiceRequest', + 'DeleteRegionInstanceGroupManagerRequest', + 'DeleteRegionInstanceTemplateRequest', + 'DeleteRegionNetworkEndpointGroupRequest', + 'DeleteRegionNetworkFirewallPolicyRequest', + 'DeleteRegionNotificationEndpointRequest', + 'DeleteRegionOperationRequest', + 'DeleteRegionOperationResponse', + 'DeleteRegionSecurityPolicyRequest', + 'DeleteRegionSslCertificateRequest', + 'DeleteRegionSslPolicyRequest', + 'DeleteRegionTargetHttpProxyRequest', + 'DeleteRegionTargetHttpsProxyRequest', + 'DeleteRegionTargetTcpProxyRequest', + 'DeleteRegionUrlMapRequest', + 'DeleteReservationRequest', + 'DeleteResourcePolicyRequest', + 'DeleteRouteRequest', + 'DeleteRouterRequest', + 'DeleteSecurityPolicyRequest', + 'DeleteServiceAttachmentRequest', + 'DeleteSignedUrlKeyBackendBucketRequest', + 'DeleteSignedUrlKeyBackendServiceRequest', + 'DeleteSnapshotRequest', + 'DeleteSslCertificateRequest', + 'DeleteSslPolicyRequest', + 'DeleteSubnetworkRequest', + 'DeleteTargetGrpcProxyRequest', + 'DeleteTargetHttpProxyRequest', + 'DeleteTargetHttpsProxyRequest', + 'DeleteTargetInstanceRequest', + 'DeleteTargetPoolRequest', + 'DeleteTargetSslProxyRequest', + 'DeleteTargetTcpProxyRequest', + 'DeleteTargetVpnGatewayRequest', + 'DeleteUrlMapRequest', + 'DeleteVpnGatewayRequest', + 'DeleteVpnTunnelRequest', + 'DeleteZoneOperationRequest', + 'DeleteZoneOperationResponse', + 'Denied', + 'DeprecateImageRequest', + 'DeprecationStatus', + 'DetachDiskInstanceRequest', + 'DetachNetworkEndpointsGlobalNetworkEndpointGroupRequest', + 'DetachNetworkEndpointsNetworkEndpointGroupRequest', + 'DisableXpnHostProjectRequest', + 'DisableXpnResourceProjectRequest', + 'Disk', + 'DiskAggregatedList', + 'DiskAsyncReplication', + 'DiskAsyncReplicationList', + 'DiskInstantiationConfig', + 'DiskList', + 'DiskMoveRequest', + 'DiskParams', + 'DiskResourceStatus', + 'DiskResourceStatusAsyncReplicationStatus', + 'DisksAddResourcePoliciesRequest', + 'DisksRemoveResourcePoliciesRequest', + 'DisksResizeRequest', + 'DisksScopedList', + 'DisksStartAsyncReplicationRequest', + 'DisksStopGroupAsyncReplicationResource', + 'DiskType', + 'DiskTypeAggregatedList', + 'DiskTypeList', + 'DiskTypesScopedList', + 'DisplayDevice', + 'DistributionPolicy', + 'DistributionPolicyZoneConfiguration', + 'Duration', + 'EnableXpnHostProjectRequest', + 'EnableXpnResourceProjectRequest', + 'Error', + 'ErrorDetails', + 'ErrorInfo', + 'Errors', + 'ExchangedPeeringRoute', + 'ExchangedPeeringRoutesList', + 'ExpandIpCidrRangeSubnetworkRequest', + 'Expr', + 'ExternalVpnGateway', + 'ExternalVpnGatewayInterface', + 'ExternalVpnGatewayList', + 'FileContentBuffer', + 'Firewall', + 'FirewallList', + 'FirewallLogConfig', + 'FirewallPoliciesListAssociationsResponse', + 'FirewallPolicy', + 'FirewallPolicyAssociation', + 'FirewallPolicyList', + 'FirewallPolicyRule', + 'FirewallPolicyRuleMatcher', + 'FirewallPolicyRuleMatcherLayer4Config', + 'FirewallPolicyRuleSecureTag', + 'FixedOrPercent', + 'ForwardingRule', + 'ForwardingRuleAggregatedList', + 'ForwardingRuleList', + 'ForwardingRuleReference', + 'ForwardingRuleServiceDirectoryRegistration', + 'ForwardingRulesScopedList', + 'GetAcceleratorTypeRequest', + 'GetAddressRequest', + 'GetAssociationFirewallPolicyRequest', + 'GetAssociationNetworkFirewallPolicyRequest', + 'GetAssociationRegionNetworkFirewallPolicyRequest', + 'GetAutoscalerRequest', + 'GetBackendBucketRequest', + 'GetBackendServiceRequest', + 'GetDiagnosticsInterconnectRequest', + 'GetDiskRequest', + 'GetDiskTypeRequest', + 'GetEffectiveFirewallsInstanceRequest', + 'GetEffectiveFirewallsNetworkRequest', + 'GetEffectiveFirewallsRegionNetworkFirewallPolicyRequest', + 'GetExternalVpnGatewayRequest', + 'GetFirewallPolicyRequest', + 'GetFirewallRequest', + 'GetForwardingRuleRequest', + 'GetFromFamilyImageRequest', + 'GetGlobalAddressRequest', + 'GetGlobalForwardingRuleRequest', + 'GetGlobalNetworkEndpointGroupRequest', + 'GetGlobalOperationRequest', + 'GetGlobalOrganizationOperationRequest', + 'GetGlobalPublicDelegatedPrefixeRequest', + 'GetGuestAttributesInstanceRequest', + 'GetHealthBackendServiceRequest', + 'GetHealthCheckRequest', + 'GetHealthRegionBackendServiceRequest', + 'GetHealthTargetPoolRequest', + 'GetIamPolicyBackendServiceRequest', + 'GetIamPolicyDiskRequest', + 'GetIamPolicyFirewallPolicyRequest', + 'GetIamPolicyImageRequest', + 'GetIamPolicyInstanceRequest', + 'GetIamPolicyInstanceTemplateRequest', + 'GetIamPolicyLicenseRequest', + 'GetIamPolicyMachineImageRequest', + 'GetIamPolicyNetworkAttachmentRequest', + 'GetIamPolicyNetworkFirewallPolicyRequest', + 'GetIamPolicyNodeGroupRequest', + 'GetIamPolicyNodeTemplateRequest', + 'GetIamPolicyRegionBackendServiceRequest', + 'GetIamPolicyRegionDiskRequest', + 'GetIamPolicyRegionNetworkFirewallPolicyRequest', + 'GetIamPolicyReservationRequest', + 'GetIamPolicyResourcePolicyRequest', + 'GetIamPolicyServiceAttachmentRequest', + 'GetIamPolicySnapshotRequest', + 'GetIamPolicySubnetworkRequest', + 'GetImageFamilyViewRequest', + 'GetImageRequest', + 'GetInstanceGroupManagerRequest', + 'GetInstanceGroupRequest', + 'GetInstanceRequest', + 'GetInstanceTemplateRequest', + 'GetInterconnectAttachmentRequest', + 'GetInterconnectLocationRequest', + 'GetInterconnectRemoteLocationRequest', + 'GetInterconnectRequest', + 'GetLicenseCodeRequest', + 'GetLicenseRequest', + 'GetMachineImageRequest', + 'GetMachineTypeRequest', + 'GetNatMappingInfoRoutersRequest', + 'GetNetworkAttachmentRequest', + 'GetNetworkEdgeSecurityServiceRequest', + 'GetNetworkEndpointGroupRequest', + 'GetNetworkFirewallPolicyRequest', + 'GetNetworkRequest', + 'GetNodeGroupRequest', + 'GetNodeTemplateRequest', + 'GetNodeTypeRequest', + 'GetPacketMirroringRequest', + 'GetProjectRequest', + 'GetPublicAdvertisedPrefixeRequest', + 'GetPublicDelegatedPrefixeRequest', + 'GetRegionAutoscalerRequest', + 'GetRegionBackendServiceRequest', + 'GetRegionCommitmentRequest', + 'GetRegionDiskRequest', + 'GetRegionDiskTypeRequest', + 'GetRegionHealthCheckRequest', + 'GetRegionHealthCheckServiceRequest', + 'GetRegionInstanceGroupManagerRequest', + 'GetRegionInstanceGroupRequest', + 'GetRegionInstanceTemplateRequest', + 'GetRegionNetworkEndpointGroupRequest', + 'GetRegionNetworkFirewallPolicyRequest', + 'GetRegionNotificationEndpointRequest', + 'GetRegionOperationRequest', + 'GetRegionRequest', + 'GetRegionSecurityPolicyRequest', + 'GetRegionSslCertificateRequest', + 'GetRegionSslPolicyRequest', + 'GetRegionTargetHttpProxyRequest', + 'GetRegionTargetHttpsProxyRequest', + 'GetRegionTargetTcpProxyRequest', + 'GetRegionUrlMapRequest', + 'GetReservationRequest', + 'GetResourcePolicyRequest', + 'GetRouteRequest', + 'GetRouterRequest', + 'GetRouterStatusRouterRequest', + 'GetRuleFirewallPolicyRequest', + 'GetRuleNetworkFirewallPolicyRequest', + 'GetRuleRegionNetworkFirewallPolicyRequest', + 'GetRuleSecurityPolicyRequest', + 'GetScreenshotInstanceRequest', + 'GetSecurityPolicyRequest', + 'GetSerialPortOutputInstanceRequest', + 'GetServiceAttachmentRequest', + 'GetShieldedInstanceIdentityInstanceRequest', + 'GetSnapshotRequest', + 'GetSslCertificateRequest', + 'GetSslPolicyRequest', + 'GetStatusVpnGatewayRequest', + 'GetSubnetworkRequest', + 'GetTargetGrpcProxyRequest', + 'GetTargetHttpProxyRequest', + 'GetTargetHttpsProxyRequest', + 'GetTargetInstanceRequest', + 'GetTargetPoolRequest', + 'GetTargetSslProxyRequest', + 'GetTargetTcpProxyRequest', + 'GetTargetVpnGatewayRequest', + 'GetUrlMapRequest', + 'GetVpnGatewayRequest', + 'GetVpnTunnelRequest', + 'GetXpnHostProjectRequest', + 'GetXpnResourcesProjectsRequest', + 'GetZoneOperationRequest', + 'GetZoneRequest', + 'GlobalAddressesMoveRequest', + 'GlobalNetworkEndpointGroupsAttachEndpointsRequest', + 'GlobalNetworkEndpointGroupsDetachEndpointsRequest', + 'GlobalOrganizationSetPolicyRequest', + 'GlobalSetLabelsRequest', + 'GlobalSetPolicyRequest', + 'GRPCHealthCheck', + 'GuestAttributes', + 'GuestAttributesEntry', + 'GuestAttributesValue', + 'GuestOsFeature', + 'HealthCheck', + 'HealthCheckList', + 'HealthCheckLogConfig', + 'HealthCheckReference', + 'HealthChecksAggregatedList', + 'HealthCheckService', + 'HealthCheckServiceReference', + 'HealthCheckServicesList', + 'HealthChecksScopedList', + 'HealthStatus', + 'HealthStatusForNetworkEndpoint', + 'Help', + 'HelpLink', + 'HostRule', + 'HTTP2HealthCheck', + 'HttpFaultAbort', + 'HttpFaultDelay', + 'HttpFaultInjection', + 'HttpHeaderAction', + 'HttpHeaderMatch', + 'HttpHeaderOption', + 'HTTPHealthCheck', + 'HttpQueryParameterMatch', + 'HttpRedirectAction', + 'HttpRetryPolicy', + 'HttpRouteAction', + 'HttpRouteRule', + 'HttpRouteRuleMatch', + 'HTTPSHealthCheck', + 'Image', + 'ImageFamilyView', + 'ImageList', + 'InitialStateConfig', + 'InsertAddressRequest', + 'InsertAutoscalerRequest', + 'InsertBackendBucketRequest', + 'InsertBackendServiceRequest', + 'InsertDiskRequest', + 'InsertExternalVpnGatewayRequest', + 'InsertFirewallPolicyRequest', + 'InsertFirewallRequest', + 'InsertForwardingRuleRequest', + 'InsertGlobalAddressRequest', + 'InsertGlobalForwardingRuleRequest', + 'InsertGlobalNetworkEndpointGroupRequest', + 'InsertGlobalPublicDelegatedPrefixeRequest', + 'InsertHealthCheckRequest', + 'InsertImageRequest', + 'InsertInstanceGroupManagerRequest', + 'InsertInstanceGroupRequest', + 'InsertInstanceRequest', + 'InsertInstanceTemplateRequest', + 'InsertInterconnectAttachmentRequest', + 'InsertInterconnectRequest', + 'InsertLicenseRequest', + 'InsertMachineImageRequest', + 'InsertNetworkAttachmentRequest', + 'InsertNetworkEdgeSecurityServiceRequest', + 'InsertNetworkEndpointGroupRequest', + 'InsertNetworkFirewallPolicyRequest', + 'InsertNetworkRequest', + 'InsertNodeGroupRequest', + 'InsertNodeTemplateRequest', + 'InsertPacketMirroringRequest', + 'InsertPublicAdvertisedPrefixeRequest', + 'InsertPublicDelegatedPrefixeRequest', + 'InsertRegionAutoscalerRequest', + 'InsertRegionBackendServiceRequest', + 'InsertRegionCommitmentRequest', + 'InsertRegionDiskRequest', + 'InsertRegionHealthCheckRequest', + 'InsertRegionHealthCheckServiceRequest', + 'InsertRegionInstanceGroupManagerRequest', + 'InsertRegionInstanceTemplateRequest', + 'InsertRegionNetworkEndpointGroupRequest', + 'InsertRegionNetworkFirewallPolicyRequest', + 'InsertRegionNotificationEndpointRequest', + 'InsertRegionSecurityPolicyRequest', + 'InsertRegionSslCertificateRequest', + 'InsertRegionSslPolicyRequest', + 'InsertRegionTargetHttpProxyRequest', + 'InsertRegionTargetHttpsProxyRequest', + 'InsertRegionTargetTcpProxyRequest', + 'InsertRegionUrlMapRequest', + 'InsertReservationRequest', + 'InsertResourcePolicyRequest', + 'InsertRouteRequest', + 'InsertRouterRequest', + 'InsertSecurityPolicyRequest', + 'InsertServiceAttachmentRequest', + 'InsertSnapshotRequest', + 'InsertSslCertificateRequest', + 'InsertSslPolicyRequest', + 'InsertSubnetworkRequest', + 'InsertTargetGrpcProxyRequest', + 'InsertTargetHttpProxyRequest', + 'InsertTargetHttpsProxyRequest', + 'InsertTargetInstanceRequest', + 'InsertTargetPoolRequest', + 'InsertTargetSslProxyRequest', + 'InsertTargetTcpProxyRequest', + 'InsertTargetVpnGatewayRequest', + 'InsertUrlMapRequest', + 'InsertVpnGatewayRequest', + 'InsertVpnTunnelRequest', + 'Instance', + 'InstanceAggregatedList', + 'InstanceConsumptionData', + 'InstanceConsumptionInfo', + 'InstanceGroup', + 'InstanceGroupAggregatedList', + 'InstanceGroupList', + 'InstanceGroupManager', + 'InstanceGroupManagerActionsSummary', + 'InstanceGroupManagerAggregatedList', + 'InstanceGroupManagerAutoHealingPolicy', + 'InstanceGroupManagerInstanceLifecyclePolicy', + 'InstanceGroupManagerList', + 'InstanceGroupManagersAbandonInstancesRequest', + 'InstanceGroupManagersApplyUpdatesRequest', + 'InstanceGroupManagersCreateInstancesRequest', + 'InstanceGroupManagersDeleteInstancesRequest', + 'InstanceGroupManagersDeletePerInstanceConfigsReq', + 'InstanceGroupManagersListErrorsResponse', + 'InstanceGroupManagersListManagedInstancesResponse', + 'InstanceGroupManagersListPerInstanceConfigsResp', + 'InstanceGroupManagersPatchPerInstanceConfigsReq', + 'InstanceGroupManagersRecreateInstancesRequest', + 'InstanceGroupManagersScopedList', + 'InstanceGroupManagersSetInstanceTemplateRequest', + 'InstanceGroupManagersSetTargetPoolsRequest', + 'InstanceGroupManagerStatus', + 'InstanceGroupManagerStatusStateful', + 'InstanceGroupManagerStatusStatefulPerInstanceConfigs', + 'InstanceGroupManagerStatusVersionTarget', + 'InstanceGroupManagersUpdatePerInstanceConfigsReq', + 'InstanceGroupManagerUpdatePolicy', + 'InstanceGroupManagerVersion', + 'InstanceGroupsAddInstancesRequest', + 'InstanceGroupsListInstances', + 'InstanceGroupsListInstancesRequest', + 'InstanceGroupsRemoveInstancesRequest', + 'InstanceGroupsScopedList', + 'InstanceGroupsSetNamedPortsRequest', + 'InstanceList', + 'InstanceListReferrers', + 'InstanceManagedByIgmError', + 'InstanceManagedByIgmErrorInstanceActionDetails', + 'InstanceManagedByIgmErrorManagedInstanceError', + 'InstanceMoveRequest', + 'InstanceParams', + 'InstanceProperties', + 'InstanceReference', + 'InstancesAddResourcePoliciesRequest', + 'InstancesGetEffectiveFirewallsResponse', + 'InstancesGetEffectiveFirewallsResponseEffectiveFirewallPolicy', + 'InstancesRemoveResourcePoliciesRequest', + 'InstancesScopedList', + 'InstancesSetLabelsRequest', + 'InstancesSetMachineResourcesRequest', + 'InstancesSetMachineTypeRequest', + 'InstancesSetMinCpuPlatformRequest', + 'InstancesSetNameRequest', + 'InstancesSetServiceAccountRequest', + 'InstancesStartWithEncryptionKeyRequest', + 'InstanceTemplate', + 'InstanceTemplateAggregatedList', + 'InstanceTemplateList', + 'InstanceTemplatesScopedList', + 'InstanceWithNamedPorts', + 'Int64RangeMatch', + 'Interconnect', + 'InterconnectAttachment', + 'InterconnectAttachmentAggregatedList', + 'InterconnectAttachmentConfigurationConstraints', + 'InterconnectAttachmentConfigurationConstraintsBgpPeerASNRange', + 'InterconnectAttachmentList', + 'InterconnectAttachmentPartnerMetadata', + 'InterconnectAttachmentPrivateInfo', + 'InterconnectAttachmentsScopedList', + 'InterconnectCircuitInfo', + 'InterconnectDiagnostics', + 'InterconnectDiagnosticsARPEntry', + 'InterconnectDiagnosticsLinkLACPStatus', + 'InterconnectDiagnosticsLinkOpticalPower', + 'InterconnectDiagnosticsLinkStatus', + 'InterconnectList', + 'InterconnectLocation', + 'InterconnectLocationList', + 'InterconnectLocationRegionInfo', + 'InterconnectOutageNotification', + 'InterconnectRemoteLocation', + 'InterconnectRemoteLocationConstraints', + 'InterconnectRemoteLocationConstraintsSubnetLengthRange', + 'InterconnectRemoteLocationList', + 'InterconnectRemoteLocationPermittedConnections', + 'InterconnectsGetDiagnosticsResponse', + 'InvalidateCacheUrlMapRequest', + 'Items', + 'License', + 'LicenseCode', + 'LicenseCodeLicenseAlias', + 'LicenseResourceCommitment', + 'LicenseResourceRequirements', + 'LicensesListResponse', + 'ListAcceleratorTypesRequest', + 'ListAddressesRequest', + 'ListAssociationsFirewallPolicyRequest', + 'ListAutoscalersRequest', + 'ListAvailableFeaturesRegionSslPoliciesRequest', + 'ListAvailableFeaturesSslPoliciesRequest', + 'ListBackendBucketsRequest', + 'ListBackendServicesRequest', + 'ListDisksRequest', + 'ListDiskTypesRequest', + 'ListErrorsInstanceGroupManagersRequest', + 'ListErrorsRegionInstanceGroupManagersRequest', + 'ListExternalVpnGatewaysRequest', + 'ListFirewallPoliciesRequest', + 'ListFirewallsRequest', + 'ListForwardingRulesRequest', + 'ListGlobalAddressesRequest', + 'ListGlobalForwardingRulesRequest', + 'ListGlobalNetworkEndpointGroupsRequest', + 'ListGlobalOperationsRequest', + 'ListGlobalOrganizationOperationsRequest', + 'ListGlobalPublicDelegatedPrefixesRequest', + 'ListHealthChecksRequest', + 'ListImagesRequest', + 'ListInstanceGroupManagersRequest', + 'ListInstanceGroupsRequest', + 'ListInstancesInstanceGroupsRequest', + 'ListInstancesRegionInstanceGroupsRequest', + 'ListInstancesRequest', + 'ListInstanceTemplatesRequest', + 'ListInterconnectAttachmentsRequest', + 'ListInterconnectLocationsRequest', + 'ListInterconnectRemoteLocationsRequest', + 'ListInterconnectsRequest', + 'ListLicensesRequest', + 'ListMachineImagesRequest', + 'ListMachineTypesRequest', + 'ListManagedInstancesInstanceGroupManagersRequest', + 'ListManagedInstancesRegionInstanceGroupManagersRequest', + 'ListNetworkAttachmentsRequest', + 'ListNetworkEndpointGroupsRequest', + 'ListNetworkEndpointsGlobalNetworkEndpointGroupsRequest', + 'ListNetworkEndpointsNetworkEndpointGroupsRequest', + 'ListNetworkFirewallPoliciesRequest', + 'ListNetworksRequest', + 'ListNodeGroupsRequest', + 'ListNodesNodeGroupsRequest', + 'ListNodeTemplatesRequest', + 'ListNodeTypesRequest', + 'ListPacketMirroringsRequest', + 'ListPeeringRoutesNetworksRequest', + 'ListPerInstanceConfigsInstanceGroupManagersRequest', + 'ListPerInstanceConfigsRegionInstanceGroupManagersRequest', + 'ListPreconfiguredExpressionSetsSecurityPoliciesRequest', + 'ListPublicAdvertisedPrefixesRequest', + 'ListPublicDelegatedPrefixesRequest', + 'ListReferrersInstancesRequest', + 'ListRegionAutoscalersRequest', + 'ListRegionBackendServicesRequest', + 'ListRegionCommitmentsRequest', + 'ListRegionDisksRequest', + 'ListRegionDiskTypesRequest', + 'ListRegionHealthCheckServicesRequest', + 'ListRegionHealthChecksRequest', + 'ListRegionInstanceGroupManagersRequest', + 'ListRegionInstanceGroupsRequest', + 'ListRegionInstanceTemplatesRequest', + 'ListRegionNetworkEndpointGroupsRequest', + 'ListRegionNetworkFirewallPoliciesRequest', + 'ListRegionNotificationEndpointsRequest', + 'ListRegionOperationsRequest', + 'ListRegionSecurityPoliciesRequest', + 'ListRegionsRequest', + 'ListRegionSslCertificatesRequest', + 'ListRegionSslPoliciesRequest', + 'ListRegionTargetHttpProxiesRequest', + 'ListRegionTargetHttpsProxiesRequest', + 'ListRegionTargetTcpProxiesRequest', + 'ListRegionUrlMapsRequest', + 'ListReservationsRequest', + 'ListResourcePoliciesRequest', + 'ListRoutersRequest', + 'ListRoutesRequest', + 'ListSecurityPoliciesRequest', + 'ListServiceAttachmentsRequest', + 'ListSnapshotsRequest', + 'ListSslCertificatesRequest', + 'ListSslPoliciesRequest', + 'ListSubnetworksRequest', + 'ListTargetGrpcProxiesRequest', + 'ListTargetHttpProxiesRequest', + 'ListTargetHttpsProxiesRequest', + 'ListTargetInstancesRequest', + 'ListTargetPoolsRequest', + 'ListTargetSslProxiesRequest', + 'ListTargetTcpProxiesRequest', + 'ListTargetVpnGatewaysRequest', + 'ListUrlMapsRequest', + 'ListUsableSubnetworksRequest', + 'ListVpnGatewaysRequest', + 'ListVpnTunnelsRequest', + 'ListXpnHostsProjectsRequest', + 'ListZoneOperationsRequest', + 'ListZonesRequest', + 'LocalDisk', + 'LocalizedMessage', + 'LocationPolicy', + 'LocationPolicyLocation', + 'LocationPolicyLocationConstraints', + 'LogConfig', + 'LogConfigCloudAuditOptions', + 'LogConfigCounterOptions', + 'LogConfigCounterOptionsCustomField', + 'LogConfigDataAccessOptions', + 'MachineImage', + 'MachineImageList', + 'MachineType', + 'MachineTypeAggregatedList', + 'MachineTypeList', + 'MachineTypesScopedList', + 'ManagedInstance', + 'ManagedInstanceInstanceHealth', + 'ManagedInstanceLastAttempt', + 'ManagedInstanceVersion', + 'Metadata', + 'MetadataFilter', + 'MetadataFilterLabelMatch', + 'MoveAddressRequest', + 'MoveDiskProjectRequest', + 'MoveFirewallPolicyRequest', + 'MoveGlobalAddressRequest', + 'MoveInstanceProjectRequest', + 'NamedPort', + 'Network', + 'NetworkAttachment', + 'NetworkAttachmentAggregatedList', + 'NetworkAttachmentConnectedEndpoint', + 'NetworkAttachmentList', + 'NetworkAttachmentsScopedList', + 'NetworkEdgeSecurityService', + 'NetworkEdgeSecurityServiceAggregatedList', + 'NetworkEdgeSecurityServicesScopedList', + 'NetworkEndpoint', + 'NetworkEndpointGroup', + 'NetworkEndpointGroupAggregatedList', + 'NetworkEndpointGroupAppEngine', + 'NetworkEndpointGroupCloudFunction', + 'NetworkEndpointGroupCloudRun', + 'NetworkEndpointGroupList', + 'NetworkEndpointGroupPscData', + 'NetworkEndpointGroupsAttachEndpointsRequest', + 'NetworkEndpointGroupsDetachEndpointsRequest', + 'NetworkEndpointGroupsListEndpointsRequest', + 'NetworkEndpointGroupsListNetworkEndpoints', + 'NetworkEndpointGroupsScopedList', + 'NetworkEndpointWithHealthStatus', + 'NetworkInterface', + 'NetworkList', + 'NetworkPeering', + 'NetworkPerformanceConfig', + 'NetworkRoutingConfig', + 'NetworksAddPeeringRequest', + 'NetworksGetEffectiveFirewallsResponse', + 'NetworksGetEffectiveFirewallsResponseEffectiveFirewallPolicy', + 'NetworksRemovePeeringRequest', + 'NetworksUpdatePeeringRequest', + 'NodeGroup', + 'NodeGroupAggregatedList', + 'NodeGroupAutoscalingPolicy', + 'NodeGroupList', + 'NodeGroupMaintenanceWindow', + 'NodeGroupNode', + 'NodeGroupsAddNodesRequest', + 'NodeGroupsDeleteNodesRequest', + 'NodeGroupsListNodes', + 'NodeGroupsScopedList', + 'NodeGroupsSetNodeTemplateRequest', + 'NodeGroupsSimulateMaintenanceEventRequest', + 'NodeTemplate', + 'NodeTemplateAggregatedList', + 'NodeTemplateList', + 'NodeTemplateNodeTypeFlexibility', + 'NodeTemplatesScopedList', + 'NodeType', + 'NodeTypeAggregatedList', + 'NodeTypeList', + 'NodeTypesScopedList', + 'NotificationEndpoint', + 'NotificationEndpointGrpcSettings', + 'NotificationEndpointList', + 'Operation', + 'OperationAggregatedList', + 'OperationList', + 'OperationsScopedList', + 'OutlierDetection', + 'PacketIntervals', + 'PacketMirroring', + 'PacketMirroringAggregatedList', + 'PacketMirroringFilter', + 'PacketMirroringForwardingRuleInfo', + 'PacketMirroringList', + 'PacketMirroringMirroredResourceInfo', + 'PacketMirroringMirroredResourceInfoInstanceInfo', + 'PacketMirroringMirroredResourceInfoSubnetInfo', + 'PacketMirroringNetworkInfo', + 'PacketMirroringsScopedList', + 'PatchAutoscalerRequest', + 'PatchBackendBucketRequest', + 'PatchBackendServiceRequest', + 'PatchFirewallPolicyRequest', + 'PatchFirewallRequest', + 'PatchForwardingRuleRequest', + 'PatchGlobalForwardingRuleRequest', + 'PatchGlobalPublicDelegatedPrefixeRequest', + 'PatchHealthCheckRequest', + 'PatchImageRequest', + 'PatchInstanceGroupManagerRequest', + 'PatchInterconnectAttachmentRequest', + 'PatchInterconnectRequest', + 'PatchNetworkEdgeSecurityServiceRequest', + 'PatchNetworkFirewallPolicyRequest', + 'PatchNetworkRequest', + 'PatchNodeGroupRequest', + 'PatchPacketMirroringRequest', + 'PatchPerInstanceConfigsInstanceGroupManagerRequest', + 'PatchPerInstanceConfigsRegionInstanceGroupManagerRequest', + 'PatchPublicAdvertisedPrefixeRequest', + 'PatchPublicDelegatedPrefixeRequest', + 'PatchRegionAutoscalerRequest', + 'PatchRegionBackendServiceRequest', + 'PatchRegionHealthCheckRequest', + 'PatchRegionHealthCheckServiceRequest', + 'PatchRegionInstanceGroupManagerRequest', + 'PatchRegionNetworkFirewallPolicyRequest', + 'PatchRegionSecurityPolicyRequest', + 'PatchRegionSslPolicyRequest', + 'PatchRegionTargetHttpsProxyRequest', + 'PatchRegionUrlMapRequest', + 'PatchResourcePolicyRequest', + 'PatchRouterRequest', + 'PatchRuleFirewallPolicyRequest', + 'PatchRuleNetworkFirewallPolicyRequest', + 'PatchRuleRegionNetworkFirewallPolicyRequest', + 'PatchRuleSecurityPolicyRequest', + 'PatchSecurityPolicyRequest', + 'PatchServiceAttachmentRequest', + 'PatchSslPolicyRequest', + 'PatchSubnetworkRequest', + 'PatchTargetGrpcProxyRequest', + 'PatchTargetHttpProxyRequest', + 'PatchTargetHttpsProxyRequest', + 'PatchUrlMapRequest', + 'PathMatcher', + 'PathRule', + 'PerInstanceConfig', + 'Policy', + 'PreconfiguredWafSet', + 'PreservedState', + 'PreservedStatePreservedDisk', + 'PreviewRouterRequest', + 'Project', + 'ProjectsDisableXpnResourceRequest', + 'ProjectsEnableXpnResourceRequest', + 'ProjectsGetXpnResources', + 'ProjectsListXpnHostsRequest', + 'ProjectsSetDefaultNetworkTierRequest', + 'PublicAdvertisedPrefix', + 'PublicAdvertisedPrefixList', + 'PublicAdvertisedPrefixPublicDelegatedPrefix', + 'PublicDelegatedPrefix', + 'PublicDelegatedPrefixAggregatedList', + 'PublicDelegatedPrefixesScopedList', + 'PublicDelegatedPrefixList', + 'PublicDelegatedPrefixPublicDelegatedSubPrefix', + 'Quota', + 'QuotaExceededInfo', + 'RawDisk', + 'RecreateInstancesInstanceGroupManagerRequest', + 'RecreateInstancesRegionInstanceGroupManagerRequest', + 'Reference', + 'Region', + 'RegionAddressesMoveRequest', + 'RegionAutoscalerList', + 'RegionDisksAddResourcePoliciesRequest', + 'RegionDisksRemoveResourcePoliciesRequest', + 'RegionDisksResizeRequest', + 'RegionDisksStartAsyncReplicationRequest', + 'RegionDiskTypeList', + 'RegionInstanceGroupList', + 'RegionInstanceGroupManagerDeleteInstanceConfigReq', + 'RegionInstanceGroupManagerList', + 'RegionInstanceGroupManagerPatchInstanceConfigReq', + 'RegionInstanceGroupManagersAbandonInstancesRequest', + 'RegionInstanceGroupManagersApplyUpdatesRequest', + 'RegionInstanceGroupManagersCreateInstancesRequest', + 'RegionInstanceGroupManagersDeleteInstancesRequest', + 'RegionInstanceGroupManagersListErrorsResponse', + 'RegionInstanceGroupManagersListInstanceConfigsResp', + 'RegionInstanceGroupManagersListInstancesResponse', + 'RegionInstanceGroupManagersRecreateRequest', + 'RegionInstanceGroupManagersSetTargetPoolsRequest', + 'RegionInstanceGroupManagersSetTemplateRequest', + 'RegionInstanceGroupManagerUpdateInstanceConfigReq', + 'RegionInstanceGroupsListInstances', + 'RegionInstanceGroupsListInstancesRequest', + 'RegionInstanceGroupsSetNamedPortsRequest', + 'RegionList', + 'RegionNetworkFirewallPoliciesGetEffectiveFirewallsResponse', + 'RegionNetworkFirewallPoliciesGetEffectiveFirewallsResponseEffectiveFirewallPolicy', + 'RegionSetLabelsRequest', + 'RegionSetPolicyRequest', + 'RegionTargetHttpsProxiesSetSslCertificatesRequest', + 'RegionUrlMapsValidateRequest', + 'RemoveAssociationFirewallPolicyRequest', + 'RemoveAssociationNetworkFirewallPolicyRequest', + 'RemoveAssociationRegionNetworkFirewallPolicyRequest', + 'RemoveHealthCheckTargetPoolRequest', + 'RemoveInstancesInstanceGroupRequest', + 'RemoveInstanceTargetPoolRequest', + 'RemovePeeringNetworkRequest', + 'RemoveResourcePoliciesDiskRequest', + 'RemoveResourcePoliciesInstanceRequest', + 'RemoveResourcePoliciesRegionDiskRequest', + 'RemoveRuleFirewallPolicyRequest', + 'RemoveRuleNetworkFirewallPolicyRequest', + 'RemoveRuleRegionNetworkFirewallPolicyRequest', + 'RemoveRuleSecurityPolicyRequest', + 'RequestMirrorPolicy', + 'Reservation', + 'ReservationAffinity', + 'ReservationAggregatedList', + 'ReservationList', + 'ReservationsResizeRequest', + 'ReservationsScopedList', + 'ResetInstanceRequest', + 'ResizeDiskRequest', + 'ResizeInstanceGroupManagerRequest', + 'ResizeRegionDiskRequest', + 'ResizeRegionInstanceGroupManagerRequest', + 'ResizeReservationRequest', + 'ResourceCommitment', + 'ResourceGroupReference', + 'ResourcePoliciesScopedList', + 'ResourcePolicy', + 'ResourcePolicyAggregatedList', + 'ResourcePolicyDailyCycle', + 'ResourcePolicyDiskConsistencyGroupPolicy', + 'ResourcePolicyGroupPlacementPolicy', + 'ResourcePolicyHourlyCycle', + 'ResourcePolicyInstanceSchedulePolicy', + 'ResourcePolicyInstanceSchedulePolicySchedule', + 'ResourcePolicyList', + 'ResourcePolicyResourceStatus', + 'ResourcePolicyResourceStatusInstanceSchedulePolicyStatus', + 'ResourcePolicySnapshotSchedulePolicy', + 'ResourcePolicySnapshotSchedulePolicyRetentionPolicy', + 'ResourcePolicySnapshotSchedulePolicySchedule', + 'ResourcePolicySnapshotSchedulePolicySnapshotProperties', + 'ResourcePolicyWeeklyCycle', + 'ResourcePolicyWeeklyCycleDayOfWeek', + 'ResourceStatus', + 'ResumeInstanceRequest', + 'Route', + 'RouteAsPath', + 'RouteList', + 'Router', + 'RouterAdvertisedIpRange', + 'RouterAggregatedList', + 'RouterBgp', + 'RouterBgpPeer', + 'RouterBgpPeerBfd', + 'RouterBgpPeerCustomLearnedIpRange', + 'RouterInterface', + 'RouterList', + 'RouterMd5AuthenticationKey', + 'RouterNat', + 'RouterNatLogConfig', + 'RouterNatRule', + 'RouterNatRuleAction', + 'RouterNatSubnetworkToNat', + 'RoutersPreviewResponse', + 'RoutersScopedList', + 'RouterStatus', + 'RouterStatusBgpPeerStatus', + 'RouterStatusNatStatus', + 'RouterStatusNatStatusNatRuleStatus', + 'RouterStatusResponse', + 'Rule', + 'SavedAttachedDisk', + 'SavedDisk', + 'ScalingScheduleStatus', + 'Scheduling', + 'SchedulingNodeAffinity', + 'ScratchDisks', + 'Screenshot', + 'SecurityPoliciesAggregatedList', + 'SecurityPoliciesListPreconfiguredExpressionSetsResponse', + 'SecurityPoliciesScopedList', + 'SecurityPoliciesWafConfig', + 'SecurityPolicy', + 'SecurityPolicyAdaptiveProtectionConfig', + 'SecurityPolicyAdaptiveProtectionConfigLayer7DdosDefenseConfig', + 'SecurityPolicyAdvancedOptionsConfig', + 'SecurityPolicyAdvancedOptionsConfigJsonCustomConfig', + 'SecurityPolicyDdosProtectionConfig', + 'SecurityPolicyList', + 'SecurityPolicyRecaptchaOptionsConfig', + 'SecurityPolicyReference', + 'SecurityPolicyRule', + 'SecurityPolicyRuleHttpHeaderAction', + 'SecurityPolicyRuleHttpHeaderActionHttpHeaderOption', + 'SecurityPolicyRuleMatcher', + 'SecurityPolicyRuleMatcherConfig', + 'SecurityPolicyRulePreconfiguredWafConfig', + 'SecurityPolicyRulePreconfiguredWafConfigExclusion', + 'SecurityPolicyRulePreconfiguredWafConfigExclusionFieldParams', + 'SecurityPolicyRuleRateLimitOptions', + 'SecurityPolicyRuleRateLimitOptionsEnforceOnKeyConfig', + 'SecurityPolicyRuleRateLimitOptionsThreshold', + 'SecurityPolicyRuleRedirectOptions', + 'SecuritySettings', + 'SendDiagnosticInterruptInstanceRequest', + 'SendDiagnosticInterruptInstanceResponse', + 'SerialPortOutput', + 'ServerBinding', + 'ServiceAccount', + 'ServiceAttachment', + 'ServiceAttachmentAggregatedList', + 'ServiceAttachmentConnectedEndpoint', + 'ServiceAttachmentConsumerProjectLimit', + 'ServiceAttachmentList', + 'ServiceAttachmentsScopedList', + 'SetBackendServiceTargetSslProxyRequest', + 'SetBackendServiceTargetTcpProxyRequest', + 'SetBackupTargetPoolRequest', + 'SetCertificateMapTargetHttpsProxyRequest', + 'SetCertificateMapTargetSslProxyRequest', + 'SetCommonInstanceMetadataProjectRequest', + 'SetDefaultNetworkTierProjectRequest', + 'SetDeletionProtectionInstanceRequest', + 'SetDiskAutoDeleteInstanceRequest', + 'SetEdgeSecurityPolicyBackendBucketRequest', + 'SetEdgeSecurityPolicyBackendServiceRequest', + 'SetIamPolicyBackendServiceRequest', + 'SetIamPolicyDiskRequest', + 'SetIamPolicyFirewallPolicyRequest', + 'SetIamPolicyImageRequest', + 'SetIamPolicyInstanceRequest', + 'SetIamPolicyInstanceTemplateRequest', + 'SetIamPolicyLicenseRequest', + 'SetIamPolicyMachineImageRequest', + 'SetIamPolicyNetworkAttachmentRequest', + 'SetIamPolicyNetworkFirewallPolicyRequest', + 'SetIamPolicyNodeGroupRequest', + 'SetIamPolicyNodeTemplateRequest', + 'SetIamPolicyRegionBackendServiceRequest', + 'SetIamPolicyRegionDiskRequest', + 'SetIamPolicyRegionNetworkFirewallPolicyRequest', + 'SetIamPolicyReservationRequest', + 'SetIamPolicyResourcePolicyRequest', + 'SetIamPolicyServiceAttachmentRequest', + 'SetIamPolicySnapshotRequest', + 'SetIamPolicySubnetworkRequest', + 'SetInstanceTemplateInstanceGroupManagerRequest', + 'SetInstanceTemplateRegionInstanceGroupManagerRequest', + 'SetLabelsAddressRequest', + 'SetLabelsDiskRequest', + 'SetLabelsExternalVpnGatewayRequest', + 'SetLabelsForwardingRuleRequest', + 'SetLabelsGlobalAddressRequest', + 'SetLabelsGlobalForwardingRuleRequest', + 'SetLabelsImageRequest', + 'SetLabelsInstanceRequest', + 'SetLabelsInterconnectAttachmentRequest', + 'SetLabelsInterconnectRequest', + 'SetLabelsRegionDiskRequest', + 'SetLabelsSecurityPolicyRequest', + 'SetLabelsSnapshotRequest', + 'SetLabelsTargetVpnGatewayRequest', + 'SetLabelsVpnGatewayRequest', + 'SetLabelsVpnTunnelRequest', + 'SetMachineResourcesInstanceRequest', + 'SetMachineTypeInstanceRequest', + 'SetMetadataInstanceRequest', + 'SetMinCpuPlatformInstanceRequest', + 'SetNamedPortsInstanceGroupRequest', + 'SetNamedPortsRegionInstanceGroupRequest', + 'SetNameInstanceRequest', + 'SetNodeTemplateNodeGroupRequest', + 'SetPrivateIpGoogleAccessSubnetworkRequest', + 'SetProxyHeaderTargetSslProxyRequest', + 'SetProxyHeaderTargetTcpProxyRequest', + 'SetQuicOverrideTargetHttpsProxyRequest', + 'SetSchedulingInstanceRequest', + 'SetSecurityPolicyBackendServiceRequest', + 'SetServiceAccountInstanceRequest', + 'SetShieldedInstanceIntegrityPolicyInstanceRequest', + 'SetSslCertificatesRegionTargetHttpsProxyRequest', + 'SetSslCertificatesTargetHttpsProxyRequest', + 'SetSslCertificatesTargetSslProxyRequest', + 'SetSslPolicyTargetHttpsProxyRequest', + 'SetSslPolicyTargetSslProxyRequest', + 'SetTagsInstanceRequest', + 'SetTargetForwardingRuleRequest', + 'SetTargetGlobalForwardingRuleRequest', + 'SetTargetPoolsInstanceGroupManagerRequest', + 'SetTargetPoolsRegionInstanceGroupManagerRequest', + 'SetUrlMapRegionTargetHttpProxyRequest', + 'SetUrlMapRegionTargetHttpsProxyRequest', + 'SetUrlMapTargetHttpProxyRequest', + 'SetUrlMapTargetHttpsProxyRequest', + 'SetUsageExportBucketProjectRequest', + 'ShareSettings', + 'ShareSettingsProjectConfig', + 'ShieldedInstanceConfig', + 'ShieldedInstanceIdentity', + 'ShieldedInstanceIdentityEntry', + 'ShieldedInstanceIntegrityPolicy', + 'SignedUrlKey', + 'SimulateMaintenanceEventInstanceRequest', + 'SimulateMaintenanceEventNodeGroupRequest', + 'Snapshot', + 'SnapshotList', + 'SourceDiskEncryptionKey', + 'SourceInstanceParams', + 'SourceInstanceProperties', + 'SslCertificate', + 'SslCertificateAggregatedList', + 'SslCertificateList', + 'SslCertificateManagedSslCertificate', + 'SslCertificateSelfManagedSslCertificate', + 'SslCertificatesScopedList', + 'SSLHealthCheck', + 'SslPoliciesAggregatedList', + 'SslPoliciesList', + 'SslPoliciesListAvailableFeaturesResponse', + 'SslPoliciesScopedList', + 'SslPolicy', + 'SslPolicyReference', + 'StartAsyncReplicationDiskRequest', + 'StartAsyncReplicationRegionDiskRequest', + 'StartInstanceRequest', + 'StartWithEncryptionKeyInstanceRequest', + 'StatefulPolicy', + 'StatefulPolicyPreservedState', + 'StatefulPolicyPreservedStateDiskDevice', + 'StopAsyncReplicationDiskRequest', + 'StopAsyncReplicationRegionDiskRequest', + 'StopGroupAsyncReplicationDiskRequest', + 'StopGroupAsyncReplicationRegionDiskRequest', + 'StopInstanceRequest', + 'Subnetwork', + 'SubnetworkAggregatedList', + 'SubnetworkList', + 'SubnetworkLogConfig', + 'SubnetworkSecondaryRange', + 'SubnetworksExpandIpCidrRangeRequest', + 'SubnetworksScopedList', + 'SubnetworksSetPrivateIpGoogleAccessRequest', + 'Subsetting', + 'SuspendInstanceRequest', + 'SwitchToCustomModeNetworkRequest', + 'Tags', + 'TargetGrpcProxy', + 'TargetGrpcProxyList', + 'TargetHttpProxiesScopedList', + 'TargetHttpProxy', + 'TargetHttpProxyAggregatedList', + 'TargetHttpProxyList', + 'TargetHttpsProxiesScopedList', + 'TargetHttpsProxiesSetCertificateMapRequest', + 'TargetHttpsProxiesSetQuicOverrideRequest', + 'TargetHttpsProxiesSetSslCertificatesRequest', + 'TargetHttpsProxy', + 'TargetHttpsProxyAggregatedList', + 'TargetHttpsProxyList', + 'TargetInstance', + 'TargetInstanceAggregatedList', + 'TargetInstanceList', + 'TargetInstancesScopedList', + 'TargetPool', + 'TargetPoolAggregatedList', + 'TargetPoolInstanceHealth', + 'TargetPoolList', + 'TargetPoolsAddHealthCheckRequest', + 'TargetPoolsAddInstanceRequest', + 'TargetPoolsRemoveHealthCheckRequest', + 'TargetPoolsRemoveInstanceRequest', + 'TargetPoolsScopedList', + 'TargetReference', + 'TargetSslProxiesSetBackendServiceRequest', + 'TargetSslProxiesSetCertificateMapRequest', + 'TargetSslProxiesSetProxyHeaderRequest', + 'TargetSslProxiesSetSslCertificatesRequest', + 'TargetSslProxy', + 'TargetSslProxyList', + 'TargetTcpProxiesScopedList', + 'TargetTcpProxiesSetBackendServiceRequest', + 'TargetTcpProxiesSetProxyHeaderRequest', + 'TargetTcpProxy', + 'TargetTcpProxyAggregatedList', + 'TargetTcpProxyList', + 'TargetVpnGateway', + 'TargetVpnGatewayAggregatedList', + 'TargetVpnGatewayList', + 'TargetVpnGatewaysScopedList', + 'TCPHealthCheck', + 'TestFailure', + 'TestIamPermissionsDiskRequest', + 'TestIamPermissionsExternalVpnGatewayRequest', + 'TestIamPermissionsFirewallPolicyRequest', + 'TestIamPermissionsImageRequest', + 'TestIamPermissionsInstanceRequest', + 'TestIamPermissionsInstanceTemplateRequest', + 'TestIamPermissionsLicenseCodeRequest', + 'TestIamPermissionsLicenseRequest', + 'TestIamPermissionsMachineImageRequest', + 'TestIamPermissionsNetworkAttachmentRequest', + 'TestIamPermissionsNetworkEndpointGroupRequest', + 'TestIamPermissionsNetworkFirewallPolicyRequest', + 'TestIamPermissionsNodeGroupRequest', + 'TestIamPermissionsNodeTemplateRequest', + 'TestIamPermissionsPacketMirroringRequest', + 'TestIamPermissionsRegionDiskRequest', + 'TestIamPermissionsRegionNetworkFirewallPolicyRequest', + 'TestIamPermissionsReservationRequest', + 'TestIamPermissionsResourcePolicyRequest', + 'TestIamPermissionsServiceAttachmentRequest', + 'TestIamPermissionsSnapshotRequest', + 'TestIamPermissionsSubnetworkRequest', + 'TestIamPermissionsVpnGatewayRequest', + 'TestPermissionsRequest', + 'TestPermissionsResponse', + 'Uint128', + 'UpdateAccessConfigInstanceRequest', + 'UpdateAutoscalerRequest', + 'UpdateBackendBucketRequest', + 'UpdateBackendServiceRequest', + 'UpdateDiskRequest', + 'UpdateDisplayDeviceInstanceRequest', + 'UpdateFirewallRequest', + 'UpdateHealthCheckRequest', + 'UpdateInstanceRequest', + 'UpdateNetworkInterfaceInstanceRequest', + 'UpdatePeeringNetworkRequest', + 'UpdatePerInstanceConfigsInstanceGroupManagerRequest', + 'UpdatePerInstanceConfigsRegionInstanceGroupManagerRequest', + 'UpdateRegionAutoscalerRequest', + 'UpdateRegionBackendServiceRequest', + 'UpdateRegionCommitmentRequest', + 'UpdateRegionDiskRequest', + 'UpdateRegionHealthCheckRequest', + 'UpdateRegionUrlMapRequest', + 'UpdateReservationRequest', + 'UpdateRouterRequest', + 'UpdateShieldedInstanceConfigInstanceRequest', + 'UpdateUrlMapRequest', + 'UrlMap', + 'UrlMapList', + 'UrlMapReference', + 'UrlMapsAggregatedList', + 'UrlMapsScopedList', + 'UrlMapsValidateRequest', + 'UrlMapsValidateResponse', + 'UrlMapTest', + 'UrlMapTestHeader', + 'UrlMapValidationResult', + 'UrlRewrite', + 'UsableSubnetwork', + 'UsableSubnetworksAggregatedList', + 'UsableSubnetworkSecondaryRange', + 'UsageExportLocation', + 'ValidateRegionUrlMapRequest', + 'ValidateUrlMapRequest', + 'VmEndpointNatMappings', + 'VmEndpointNatMappingsInterfaceNatMappings', + 'VmEndpointNatMappingsInterfaceNatMappingsNatRuleMappings', + 'VmEndpointNatMappingsList', + 'VpnGateway', + 'VpnGatewayAggregatedList', + 'VpnGatewayList', + 'VpnGatewaysGetStatusResponse', + 'VpnGatewaysScopedList', + 'VpnGatewayStatus', + 'VpnGatewayStatusHighAvailabilityRequirementState', + 'VpnGatewayStatusTunnel', + 'VpnGatewayStatusVpnConnection', + 'VpnGatewayVpnGatewayInterface', + 'VpnTunnel', + 'VpnTunnelAggregatedList', + 'VpnTunnelList', + 'VpnTunnelsScopedList', + 'WafExpressionSet', + 'WafExpressionSetExpression', + 'WaitGlobalOperationRequest', + 'WaitRegionOperationRequest', + 'WaitZoneOperationRequest', + 'Warning', + 'Warnings', + 'WeightedBackendService', + 'XpnHostList', + 'XpnResourceId', + 'Zone', + 'ZoneList', + 'ZoneSetLabelsRequest', + 'ZoneSetPolicyRequest', +) diff --git a/owl-bot-staging/v1/google/cloud/compute/gapic_version.py b/owl-bot-staging/v1/google/cloud/compute/gapic_version.py new file mode 100644 index 000000000..360a0d13e --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/compute/gapic_version.py @@ -0,0 +1,16 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +__version__ = "0.0.0" # {x-release-please-version} diff --git a/owl-bot-staging/v1/google/cloud/compute/py.typed b/owl-bot-staging/v1/google/cloud/compute/py.typed new file mode 100644 index 000000000..071da5269 --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/compute/py.typed @@ -0,0 +1,2 @@ +# Marker file for PEP 561. +# The google-cloud-compute package uses inline types. diff --git a/owl-bot-staging/v1/google/cloud/compute_v1/__init__.py b/owl-bot-staging/v1/google/cloud/compute_v1/__init__.py new file mode 100644 index 000000000..0a6f14f78 --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/compute_v1/__init__.py @@ -0,0 +1,2932 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from google.cloud.compute_v1 import gapic_version as package_version + +__version__ = package_version.__version__ + + +from .services.accelerator_types import AcceleratorTypesClient +from .services.addresses import AddressesClient +from .services.autoscalers import AutoscalersClient +from .services.backend_buckets import BackendBucketsClient +from .services.backend_services import BackendServicesClient +from .services.disks import DisksClient +from .services.disk_types import DiskTypesClient +from .services.external_vpn_gateways import ExternalVpnGatewaysClient +from .services.firewall_policies import FirewallPoliciesClient +from .services.firewalls import FirewallsClient +from .services.forwarding_rules import ForwardingRulesClient +from .services.global_addresses import GlobalAddressesClient +from .services.global_forwarding_rules import GlobalForwardingRulesClient +from .services.global_network_endpoint_groups import GlobalNetworkEndpointGroupsClient +from .services.global_operations import GlobalOperationsClient +from .services.global_organization_operations import GlobalOrganizationOperationsClient +from .services.global_public_delegated_prefixes import GlobalPublicDelegatedPrefixesClient +from .services.health_checks import HealthChecksClient +from .services.image_family_views import ImageFamilyViewsClient +from .services.images import ImagesClient +from .services.instance_group_managers import InstanceGroupManagersClient +from .services.instance_groups import InstanceGroupsClient +from .services.instances import InstancesClient +from .services.instance_templates import InstanceTemplatesClient +from .services.interconnect_attachments import InterconnectAttachmentsClient +from .services.interconnect_locations import InterconnectLocationsClient +from .services.interconnect_remote_locations import InterconnectRemoteLocationsClient +from .services.interconnects import InterconnectsClient +from .services.license_codes import LicenseCodesClient +from .services.licenses import LicensesClient +from .services.machine_images import MachineImagesClient +from .services.machine_types import MachineTypesClient +from .services.network_attachments import NetworkAttachmentsClient +from .services.network_edge_security_services import NetworkEdgeSecurityServicesClient +from .services.network_endpoint_groups import NetworkEndpointGroupsClient +from .services.network_firewall_policies import NetworkFirewallPoliciesClient +from .services.networks import NetworksClient +from .services.node_groups import NodeGroupsClient +from .services.node_templates import NodeTemplatesClient +from .services.node_types import NodeTypesClient +from .services.packet_mirrorings import PacketMirroringsClient +from .services.projects import ProjectsClient +from .services.public_advertised_prefixes import PublicAdvertisedPrefixesClient +from .services.public_delegated_prefixes import PublicDelegatedPrefixesClient +from .services.region_autoscalers import RegionAutoscalersClient +from .services.region_backend_services import RegionBackendServicesClient +from .services.region_commitments import RegionCommitmentsClient +from .services.region_disks import RegionDisksClient +from .services.region_disk_types import RegionDiskTypesClient +from .services.region_health_checks import RegionHealthChecksClient +from .services.region_health_check_services import RegionHealthCheckServicesClient +from .services.region_instance_group_managers import RegionInstanceGroupManagersClient +from .services.region_instance_groups import RegionInstanceGroupsClient +from .services.region_instances import RegionInstancesClient +from .services.region_instance_templates import RegionInstanceTemplatesClient +from .services.region_network_endpoint_groups import RegionNetworkEndpointGroupsClient +from .services.region_network_firewall_policies import RegionNetworkFirewallPoliciesClient +from .services.region_notification_endpoints import RegionNotificationEndpointsClient +from .services.region_operations import RegionOperationsClient +from .services.regions import RegionsClient +from .services.region_security_policies import RegionSecurityPoliciesClient +from .services.region_ssl_certificates import RegionSslCertificatesClient +from .services.region_ssl_policies import RegionSslPoliciesClient +from .services.region_target_http_proxies import RegionTargetHttpProxiesClient +from .services.region_target_https_proxies import RegionTargetHttpsProxiesClient +from .services.region_target_tcp_proxies import RegionTargetTcpProxiesClient +from .services.region_url_maps import RegionUrlMapsClient +from .services.reservations import ReservationsClient +from .services.resource_policies import ResourcePoliciesClient +from .services.routers import RoutersClient +from .services.routes import RoutesClient +from .services.security_policies import SecurityPoliciesClient +from .services.service_attachments import ServiceAttachmentsClient +from .services.snapshots import SnapshotsClient +from .services.ssl_certificates import SslCertificatesClient +from .services.ssl_policies import SslPoliciesClient +from .services.subnetworks import SubnetworksClient +from .services.target_grpc_proxies import TargetGrpcProxiesClient +from .services.target_http_proxies import TargetHttpProxiesClient +from .services.target_https_proxies import TargetHttpsProxiesClient +from .services.target_instances import TargetInstancesClient +from .services.target_pools import TargetPoolsClient +from .services.target_ssl_proxies import TargetSslProxiesClient +from .services.target_tcp_proxies import TargetTcpProxiesClient +from .services.target_vpn_gateways import TargetVpnGatewaysClient +from .services.url_maps import UrlMapsClient +from .services.vpn_gateways import VpnGatewaysClient +from .services.vpn_tunnels import VpnTunnelsClient +from .services.zone_operations import ZoneOperationsClient +from .services.zones import ZonesClient + +from .types.compute import AbandonInstancesInstanceGroupManagerRequest +from .types.compute import AbandonInstancesRegionInstanceGroupManagerRequest +from .types.compute import AcceleratorConfig +from .types.compute import Accelerators +from .types.compute import AcceleratorType +from .types.compute import AcceleratorTypeAggregatedList +from .types.compute import AcceleratorTypeList +from .types.compute import AcceleratorTypesScopedList +from .types.compute import AccessConfig +from .types.compute import AddAccessConfigInstanceRequest +from .types.compute import AddAssociationFirewallPolicyRequest +from .types.compute import AddAssociationNetworkFirewallPolicyRequest +from .types.compute import AddAssociationRegionNetworkFirewallPolicyRequest +from .types.compute import AddHealthCheckTargetPoolRequest +from .types.compute import AddInstancesInstanceGroupRequest +from .types.compute import AddInstanceTargetPoolRequest +from .types.compute import AddNodesNodeGroupRequest +from .types.compute import AddPeeringNetworkRequest +from .types.compute import AddResourcePoliciesDiskRequest +from .types.compute import AddResourcePoliciesInstanceRequest +from .types.compute import AddResourcePoliciesRegionDiskRequest +from .types.compute import Address +from .types.compute import AddressAggregatedList +from .types.compute import AddressesScopedList +from .types.compute import AddressList +from .types.compute import AddRuleFirewallPolicyRequest +from .types.compute import AddRuleNetworkFirewallPolicyRequest +from .types.compute import AddRuleRegionNetworkFirewallPolicyRequest +from .types.compute import AddRuleSecurityPolicyRequest +from .types.compute import AddSignedUrlKeyBackendBucketRequest +from .types.compute import AddSignedUrlKeyBackendServiceRequest +from .types.compute import AdvancedMachineFeatures +from .types.compute import AggregatedListAcceleratorTypesRequest +from .types.compute import AggregatedListAddressesRequest +from .types.compute import AggregatedListAutoscalersRequest +from .types.compute import AggregatedListBackendServicesRequest +from .types.compute import AggregatedListDisksRequest +from .types.compute import AggregatedListDiskTypesRequest +from .types.compute import AggregatedListForwardingRulesRequest +from .types.compute import AggregatedListGlobalOperationsRequest +from .types.compute import AggregatedListHealthChecksRequest +from .types.compute import AggregatedListInstanceGroupManagersRequest +from .types.compute import AggregatedListInstanceGroupsRequest +from .types.compute import AggregatedListInstancesRequest +from .types.compute import AggregatedListInstanceTemplatesRequest +from .types.compute import AggregatedListInterconnectAttachmentsRequest +from .types.compute import AggregatedListMachineTypesRequest +from .types.compute import AggregatedListNetworkAttachmentsRequest +from .types.compute import AggregatedListNetworkEdgeSecurityServicesRequest +from .types.compute import AggregatedListNetworkEndpointGroupsRequest +from .types.compute import AggregatedListNodeGroupsRequest +from .types.compute import AggregatedListNodeTemplatesRequest +from .types.compute import AggregatedListNodeTypesRequest +from .types.compute import AggregatedListPacketMirroringsRequest +from .types.compute import AggregatedListPublicDelegatedPrefixesRequest +from .types.compute import AggregatedListRegionCommitmentsRequest +from .types.compute import AggregatedListReservationsRequest +from .types.compute import AggregatedListResourcePoliciesRequest +from .types.compute import AggregatedListRoutersRequest +from .types.compute import AggregatedListSecurityPoliciesRequest +from .types.compute import AggregatedListServiceAttachmentsRequest +from .types.compute import AggregatedListSslCertificatesRequest +from .types.compute import AggregatedListSslPoliciesRequest +from .types.compute import AggregatedListSubnetworksRequest +from .types.compute import AggregatedListTargetHttpProxiesRequest +from .types.compute import AggregatedListTargetHttpsProxiesRequest +from .types.compute import AggregatedListTargetInstancesRequest +from .types.compute import AggregatedListTargetPoolsRequest +from .types.compute import AggregatedListTargetTcpProxiesRequest +from .types.compute import AggregatedListTargetVpnGatewaysRequest +from .types.compute import AggregatedListUrlMapsRequest +from .types.compute import AggregatedListVpnGatewaysRequest +from .types.compute import AggregatedListVpnTunnelsRequest +from .types.compute import AliasIpRange +from .types.compute import AllocationResourceStatus +from .types.compute import AllocationResourceStatusSpecificSKUAllocation +from .types.compute import AllocationSpecificSKUAllocationAllocatedInstancePropertiesReservedDisk +from .types.compute import AllocationSpecificSKUAllocationReservedInstanceProperties +from .types.compute import AllocationSpecificSKUReservation +from .types.compute import Allowed +from .types.compute import ApplyUpdatesToInstancesInstanceGroupManagerRequest +from .types.compute import ApplyUpdatesToInstancesRegionInstanceGroupManagerRequest +from .types.compute import AttachDiskInstanceRequest +from .types.compute import AttachedDisk +from .types.compute import AttachedDiskInitializeParams +from .types.compute import AttachNetworkEndpointsGlobalNetworkEndpointGroupRequest +from .types.compute import AttachNetworkEndpointsNetworkEndpointGroupRequest +from .types.compute import AuditConfig +from .types.compute import AuditLogConfig +from .types.compute import AuthorizationLoggingOptions +from .types.compute import Autoscaler +from .types.compute import AutoscalerAggregatedList +from .types.compute import AutoscalerList +from .types.compute import AutoscalersScopedList +from .types.compute import AutoscalerStatusDetails +from .types.compute import AutoscalingPolicy +from .types.compute import AutoscalingPolicyCpuUtilization +from .types.compute import AutoscalingPolicyCustomMetricUtilization +from .types.compute import AutoscalingPolicyLoadBalancingUtilization +from .types.compute import AutoscalingPolicyScaleInControl +from .types.compute import AutoscalingPolicyScalingSchedule +from .types.compute import Backend +from .types.compute import BackendBucket +from .types.compute import BackendBucketCdnPolicy +from .types.compute import BackendBucketCdnPolicyBypassCacheOnRequestHeader +from .types.compute import BackendBucketCdnPolicyCacheKeyPolicy +from .types.compute import BackendBucketCdnPolicyNegativeCachingPolicy +from .types.compute import BackendBucketList +from .types.compute import BackendService +from .types.compute import BackendServiceAggregatedList +from .types.compute import BackendServiceCdnPolicy +from .types.compute import BackendServiceCdnPolicyBypassCacheOnRequestHeader +from .types.compute import BackendServiceCdnPolicyNegativeCachingPolicy +from .types.compute import BackendServiceConnectionTrackingPolicy +from .types.compute import BackendServiceFailoverPolicy +from .types.compute import BackendServiceGroupHealth +from .types.compute import BackendServiceIAP +from .types.compute import BackendServiceList +from .types.compute import BackendServiceLocalityLoadBalancingPolicyConfig +from .types.compute import BackendServiceLocalityLoadBalancingPolicyConfigCustomPolicy +from .types.compute import BackendServiceLocalityLoadBalancingPolicyConfigPolicy +from .types.compute import BackendServiceLogConfig +from .types.compute import BackendServiceReference +from .types.compute import BackendServicesScopedList +from .types.compute import BfdPacket +from .types.compute import BfdStatus +from .types.compute import BfdStatusPacketCounts +from .types.compute import Binding +from .types.compute import BulkInsertDiskRequest +from .types.compute import BulkInsertDiskResource +from .types.compute import BulkInsertInstanceRequest +from .types.compute import BulkInsertInstanceResource +from .types.compute import BulkInsertInstanceResourcePerInstanceProperties +from .types.compute import BulkInsertRegionDiskRequest +from .types.compute import BulkInsertRegionInstanceRequest +from .types.compute import CacheInvalidationRule +from .types.compute import CacheKeyPolicy +from .types.compute import CircuitBreakers +from .types.compute import CloneRulesFirewallPolicyRequest +from .types.compute import CloneRulesNetworkFirewallPolicyRequest +from .types.compute import CloneRulesRegionNetworkFirewallPolicyRequest +from .types.compute import Commitment +from .types.compute import CommitmentAggregatedList +from .types.compute import CommitmentList +from .types.compute import CommitmentsScopedList +from .types.compute import Condition +from .types.compute import ConfidentialInstanceConfig +from .types.compute import ConnectionDraining +from .types.compute import ConsistentHashLoadBalancerSettings +from .types.compute import ConsistentHashLoadBalancerSettingsHttpCookie +from .types.compute import CorsPolicy +from .types.compute import CreateInstancesInstanceGroupManagerRequest +from .types.compute import CreateInstancesRegionInstanceGroupManagerRequest +from .types.compute import CreateSnapshotDiskRequest +from .types.compute import CreateSnapshotRegionDiskRequest +from .types.compute import CustomerEncryptionKey +from .types.compute import CustomerEncryptionKeyProtectedDisk +from .types.compute import Data +from .types.compute import DeleteAccessConfigInstanceRequest +from .types.compute import DeleteAddressRequest +from .types.compute import DeleteAutoscalerRequest +from .types.compute import DeleteBackendBucketRequest +from .types.compute import DeleteBackendServiceRequest +from .types.compute import DeleteDiskRequest +from .types.compute import DeleteExternalVpnGatewayRequest +from .types.compute import DeleteFirewallPolicyRequest +from .types.compute import DeleteFirewallRequest +from .types.compute import DeleteForwardingRuleRequest +from .types.compute import DeleteGlobalAddressRequest +from .types.compute import DeleteGlobalForwardingRuleRequest +from .types.compute import DeleteGlobalNetworkEndpointGroupRequest +from .types.compute import DeleteGlobalOperationRequest +from .types.compute import DeleteGlobalOperationResponse +from .types.compute import DeleteGlobalOrganizationOperationRequest +from .types.compute import DeleteGlobalOrganizationOperationResponse +from .types.compute import DeleteGlobalPublicDelegatedPrefixeRequest +from .types.compute import DeleteHealthCheckRequest +from .types.compute import DeleteImageRequest +from .types.compute import DeleteInstanceGroupManagerRequest +from .types.compute import DeleteInstanceGroupRequest +from .types.compute import DeleteInstanceRequest +from .types.compute import DeleteInstancesInstanceGroupManagerRequest +from .types.compute import DeleteInstancesRegionInstanceGroupManagerRequest +from .types.compute import DeleteInstanceTemplateRequest +from .types.compute import DeleteInterconnectAttachmentRequest +from .types.compute import DeleteInterconnectRequest +from .types.compute import DeleteLicenseRequest +from .types.compute import DeleteMachineImageRequest +from .types.compute import DeleteNetworkAttachmentRequest +from .types.compute import DeleteNetworkEdgeSecurityServiceRequest +from .types.compute import DeleteNetworkEndpointGroupRequest +from .types.compute import DeleteNetworkFirewallPolicyRequest +from .types.compute import DeleteNetworkRequest +from .types.compute import DeleteNodeGroupRequest +from .types.compute import DeleteNodesNodeGroupRequest +from .types.compute import DeleteNodeTemplateRequest +from .types.compute import DeletePacketMirroringRequest +from .types.compute import DeletePerInstanceConfigsInstanceGroupManagerRequest +from .types.compute import DeletePerInstanceConfigsRegionInstanceGroupManagerRequest +from .types.compute import DeletePublicAdvertisedPrefixeRequest +from .types.compute import DeletePublicDelegatedPrefixeRequest +from .types.compute import DeleteRegionAutoscalerRequest +from .types.compute import DeleteRegionBackendServiceRequest +from .types.compute import DeleteRegionDiskRequest +from .types.compute import DeleteRegionHealthCheckRequest +from .types.compute import DeleteRegionHealthCheckServiceRequest +from .types.compute import DeleteRegionInstanceGroupManagerRequest +from .types.compute import DeleteRegionInstanceTemplateRequest +from .types.compute import DeleteRegionNetworkEndpointGroupRequest +from .types.compute import DeleteRegionNetworkFirewallPolicyRequest +from .types.compute import DeleteRegionNotificationEndpointRequest +from .types.compute import DeleteRegionOperationRequest +from .types.compute import DeleteRegionOperationResponse +from .types.compute import DeleteRegionSecurityPolicyRequest +from .types.compute import DeleteRegionSslCertificateRequest +from .types.compute import DeleteRegionSslPolicyRequest +from .types.compute import DeleteRegionTargetHttpProxyRequest +from .types.compute import DeleteRegionTargetHttpsProxyRequest +from .types.compute import DeleteRegionTargetTcpProxyRequest +from .types.compute import DeleteRegionUrlMapRequest +from .types.compute import DeleteReservationRequest +from .types.compute import DeleteResourcePolicyRequest +from .types.compute import DeleteRouteRequest +from .types.compute import DeleteRouterRequest +from .types.compute import DeleteSecurityPolicyRequest +from .types.compute import DeleteServiceAttachmentRequest +from .types.compute import DeleteSignedUrlKeyBackendBucketRequest +from .types.compute import DeleteSignedUrlKeyBackendServiceRequest +from .types.compute import DeleteSnapshotRequest +from .types.compute import DeleteSslCertificateRequest +from .types.compute import DeleteSslPolicyRequest +from .types.compute import DeleteSubnetworkRequest +from .types.compute import DeleteTargetGrpcProxyRequest +from .types.compute import DeleteTargetHttpProxyRequest +from .types.compute import DeleteTargetHttpsProxyRequest +from .types.compute import DeleteTargetInstanceRequest +from .types.compute import DeleteTargetPoolRequest +from .types.compute import DeleteTargetSslProxyRequest +from .types.compute import DeleteTargetTcpProxyRequest +from .types.compute import DeleteTargetVpnGatewayRequest +from .types.compute import DeleteUrlMapRequest +from .types.compute import DeleteVpnGatewayRequest +from .types.compute import DeleteVpnTunnelRequest +from .types.compute import DeleteZoneOperationRequest +from .types.compute import DeleteZoneOperationResponse +from .types.compute import Denied +from .types.compute import DeprecateImageRequest +from .types.compute import DeprecationStatus +from .types.compute import DetachDiskInstanceRequest +from .types.compute import DetachNetworkEndpointsGlobalNetworkEndpointGroupRequest +from .types.compute import DetachNetworkEndpointsNetworkEndpointGroupRequest +from .types.compute import DisableXpnHostProjectRequest +from .types.compute import DisableXpnResourceProjectRequest +from .types.compute import Disk +from .types.compute import DiskAggregatedList +from .types.compute import DiskAsyncReplication +from .types.compute import DiskAsyncReplicationList +from .types.compute import DiskInstantiationConfig +from .types.compute import DiskList +from .types.compute import DiskMoveRequest +from .types.compute import DiskParams +from .types.compute import DiskResourceStatus +from .types.compute import DiskResourceStatusAsyncReplicationStatus +from .types.compute import DisksAddResourcePoliciesRequest +from .types.compute import DisksRemoveResourcePoliciesRequest +from .types.compute import DisksResizeRequest +from .types.compute import DisksScopedList +from .types.compute import DisksStartAsyncReplicationRequest +from .types.compute import DisksStopGroupAsyncReplicationResource +from .types.compute import DiskType +from .types.compute import DiskTypeAggregatedList +from .types.compute import DiskTypeList +from .types.compute import DiskTypesScopedList +from .types.compute import DisplayDevice +from .types.compute import DistributionPolicy +from .types.compute import DistributionPolicyZoneConfiguration +from .types.compute import Duration +from .types.compute import EnableXpnHostProjectRequest +from .types.compute import EnableXpnResourceProjectRequest +from .types.compute import Error +from .types.compute import ErrorDetails +from .types.compute import ErrorInfo +from .types.compute import Errors +from .types.compute import ExchangedPeeringRoute +from .types.compute import ExchangedPeeringRoutesList +from .types.compute import ExpandIpCidrRangeSubnetworkRequest +from .types.compute import Expr +from .types.compute import ExternalVpnGateway +from .types.compute import ExternalVpnGatewayInterface +from .types.compute import ExternalVpnGatewayList +from .types.compute import FileContentBuffer +from .types.compute import Firewall +from .types.compute import FirewallList +from .types.compute import FirewallLogConfig +from .types.compute import FirewallPoliciesListAssociationsResponse +from .types.compute import FirewallPolicy +from .types.compute import FirewallPolicyAssociation +from .types.compute import FirewallPolicyList +from .types.compute import FirewallPolicyRule +from .types.compute import FirewallPolicyRuleMatcher +from .types.compute import FirewallPolicyRuleMatcherLayer4Config +from .types.compute import FirewallPolicyRuleSecureTag +from .types.compute import FixedOrPercent +from .types.compute import ForwardingRule +from .types.compute import ForwardingRuleAggregatedList +from .types.compute import ForwardingRuleList +from .types.compute import ForwardingRuleReference +from .types.compute import ForwardingRuleServiceDirectoryRegistration +from .types.compute import ForwardingRulesScopedList +from .types.compute import GetAcceleratorTypeRequest +from .types.compute import GetAddressRequest +from .types.compute import GetAssociationFirewallPolicyRequest +from .types.compute import GetAssociationNetworkFirewallPolicyRequest +from .types.compute import GetAssociationRegionNetworkFirewallPolicyRequest +from .types.compute import GetAutoscalerRequest +from .types.compute import GetBackendBucketRequest +from .types.compute import GetBackendServiceRequest +from .types.compute import GetDiagnosticsInterconnectRequest +from .types.compute import GetDiskRequest +from .types.compute import GetDiskTypeRequest +from .types.compute import GetEffectiveFirewallsInstanceRequest +from .types.compute import GetEffectiveFirewallsNetworkRequest +from .types.compute import GetEffectiveFirewallsRegionNetworkFirewallPolicyRequest +from .types.compute import GetExternalVpnGatewayRequest +from .types.compute import GetFirewallPolicyRequest +from .types.compute import GetFirewallRequest +from .types.compute import GetForwardingRuleRequest +from .types.compute import GetFromFamilyImageRequest +from .types.compute import GetGlobalAddressRequest +from .types.compute import GetGlobalForwardingRuleRequest +from .types.compute import GetGlobalNetworkEndpointGroupRequest +from .types.compute import GetGlobalOperationRequest +from .types.compute import GetGlobalOrganizationOperationRequest +from .types.compute import GetGlobalPublicDelegatedPrefixeRequest +from .types.compute import GetGuestAttributesInstanceRequest +from .types.compute import GetHealthBackendServiceRequest +from .types.compute import GetHealthCheckRequest +from .types.compute import GetHealthRegionBackendServiceRequest +from .types.compute import GetHealthTargetPoolRequest +from .types.compute import GetIamPolicyBackendServiceRequest +from .types.compute import GetIamPolicyDiskRequest +from .types.compute import GetIamPolicyFirewallPolicyRequest +from .types.compute import GetIamPolicyImageRequest +from .types.compute import GetIamPolicyInstanceRequest +from .types.compute import GetIamPolicyInstanceTemplateRequest +from .types.compute import GetIamPolicyLicenseRequest +from .types.compute import GetIamPolicyMachineImageRequest +from .types.compute import GetIamPolicyNetworkAttachmentRequest +from .types.compute import GetIamPolicyNetworkFirewallPolicyRequest +from .types.compute import GetIamPolicyNodeGroupRequest +from .types.compute import GetIamPolicyNodeTemplateRequest +from .types.compute import GetIamPolicyRegionBackendServiceRequest +from .types.compute import GetIamPolicyRegionDiskRequest +from .types.compute import GetIamPolicyRegionNetworkFirewallPolicyRequest +from .types.compute import GetIamPolicyReservationRequest +from .types.compute import GetIamPolicyResourcePolicyRequest +from .types.compute import GetIamPolicyServiceAttachmentRequest +from .types.compute import GetIamPolicySnapshotRequest +from .types.compute import GetIamPolicySubnetworkRequest +from .types.compute import GetImageFamilyViewRequest +from .types.compute import GetImageRequest +from .types.compute import GetInstanceGroupManagerRequest +from .types.compute import GetInstanceGroupRequest +from .types.compute import GetInstanceRequest +from .types.compute import GetInstanceTemplateRequest +from .types.compute import GetInterconnectAttachmentRequest +from .types.compute import GetInterconnectLocationRequest +from .types.compute import GetInterconnectRemoteLocationRequest +from .types.compute import GetInterconnectRequest +from .types.compute import GetLicenseCodeRequest +from .types.compute import GetLicenseRequest +from .types.compute import GetMachineImageRequest +from .types.compute import GetMachineTypeRequest +from .types.compute import GetNatMappingInfoRoutersRequest +from .types.compute import GetNetworkAttachmentRequest +from .types.compute import GetNetworkEdgeSecurityServiceRequest +from .types.compute import GetNetworkEndpointGroupRequest +from .types.compute import GetNetworkFirewallPolicyRequest +from .types.compute import GetNetworkRequest +from .types.compute import GetNodeGroupRequest +from .types.compute import GetNodeTemplateRequest +from .types.compute import GetNodeTypeRequest +from .types.compute import GetPacketMirroringRequest +from .types.compute import GetProjectRequest +from .types.compute import GetPublicAdvertisedPrefixeRequest +from .types.compute import GetPublicDelegatedPrefixeRequest +from .types.compute import GetRegionAutoscalerRequest +from .types.compute import GetRegionBackendServiceRequest +from .types.compute import GetRegionCommitmentRequest +from .types.compute import GetRegionDiskRequest +from .types.compute import GetRegionDiskTypeRequest +from .types.compute import GetRegionHealthCheckRequest +from .types.compute import GetRegionHealthCheckServiceRequest +from .types.compute import GetRegionInstanceGroupManagerRequest +from .types.compute import GetRegionInstanceGroupRequest +from .types.compute import GetRegionInstanceTemplateRequest +from .types.compute import GetRegionNetworkEndpointGroupRequest +from .types.compute import GetRegionNetworkFirewallPolicyRequest +from .types.compute import GetRegionNotificationEndpointRequest +from .types.compute import GetRegionOperationRequest +from .types.compute import GetRegionRequest +from .types.compute import GetRegionSecurityPolicyRequest +from .types.compute import GetRegionSslCertificateRequest +from .types.compute import GetRegionSslPolicyRequest +from .types.compute import GetRegionTargetHttpProxyRequest +from .types.compute import GetRegionTargetHttpsProxyRequest +from .types.compute import GetRegionTargetTcpProxyRequest +from .types.compute import GetRegionUrlMapRequest +from .types.compute import GetReservationRequest +from .types.compute import GetResourcePolicyRequest +from .types.compute import GetRouteRequest +from .types.compute import GetRouterRequest +from .types.compute import GetRouterStatusRouterRequest +from .types.compute import GetRuleFirewallPolicyRequest +from .types.compute import GetRuleNetworkFirewallPolicyRequest +from .types.compute import GetRuleRegionNetworkFirewallPolicyRequest +from .types.compute import GetRuleSecurityPolicyRequest +from .types.compute import GetScreenshotInstanceRequest +from .types.compute import GetSecurityPolicyRequest +from .types.compute import GetSerialPortOutputInstanceRequest +from .types.compute import GetServiceAttachmentRequest +from .types.compute import GetShieldedInstanceIdentityInstanceRequest +from .types.compute import GetSnapshotRequest +from .types.compute import GetSslCertificateRequest +from .types.compute import GetSslPolicyRequest +from .types.compute import GetStatusVpnGatewayRequest +from .types.compute import GetSubnetworkRequest +from .types.compute import GetTargetGrpcProxyRequest +from .types.compute import GetTargetHttpProxyRequest +from .types.compute import GetTargetHttpsProxyRequest +from .types.compute import GetTargetInstanceRequest +from .types.compute import GetTargetPoolRequest +from .types.compute import GetTargetSslProxyRequest +from .types.compute import GetTargetTcpProxyRequest +from .types.compute import GetTargetVpnGatewayRequest +from .types.compute import GetUrlMapRequest +from .types.compute import GetVpnGatewayRequest +from .types.compute import GetVpnTunnelRequest +from .types.compute import GetXpnHostProjectRequest +from .types.compute import GetXpnResourcesProjectsRequest +from .types.compute import GetZoneOperationRequest +from .types.compute import GetZoneRequest +from .types.compute import GlobalAddressesMoveRequest +from .types.compute import GlobalNetworkEndpointGroupsAttachEndpointsRequest +from .types.compute import GlobalNetworkEndpointGroupsDetachEndpointsRequest +from .types.compute import GlobalOrganizationSetPolicyRequest +from .types.compute import GlobalSetLabelsRequest +from .types.compute import GlobalSetPolicyRequest +from .types.compute import GRPCHealthCheck +from .types.compute import GuestAttributes +from .types.compute import GuestAttributesEntry +from .types.compute import GuestAttributesValue +from .types.compute import GuestOsFeature +from .types.compute import HealthCheck +from .types.compute import HealthCheckList +from .types.compute import HealthCheckLogConfig +from .types.compute import HealthCheckReference +from .types.compute import HealthChecksAggregatedList +from .types.compute import HealthCheckService +from .types.compute import HealthCheckServiceReference +from .types.compute import HealthCheckServicesList +from .types.compute import HealthChecksScopedList +from .types.compute import HealthStatus +from .types.compute import HealthStatusForNetworkEndpoint +from .types.compute import Help +from .types.compute import HelpLink +from .types.compute import HostRule +from .types.compute import HTTP2HealthCheck +from .types.compute import HttpFaultAbort +from .types.compute import HttpFaultDelay +from .types.compute import HttpFaultInjection +from .types.compute import HttpHeaderAction +from .types.compute import HttpHeaderMatch +from .types.compute import HttpHeaderOption +from .types.compute import HTTPHealthCheck +from .types.compute import HttpQueryParameterMatch +from .types.compute import HttpRedirectAction +from .types.compute import HttpRetryPolicy +from .types.compute import HttpRouteAction +from .types.compute import HttpRouteRule +from .types.compute import HttpRouteRuleMatch +from .types.compute import HTTPSHealthCheck +from .types.compute import Image +from .types.compute import ImageFamilyView +from .types.compute import ImageList +from .types.compute import InitialStateConfig +from .types.compute import InsertAddressRequest +from .types.compute import InsertAutoscalerRequest +from .types.compute import InsertBackendBucketRequest +from .types.compute import InsertBackendServiceRequest +from .types.compute import InsertDiskRequest +from .types.compute import InsertExternalVpnGatewayRequest +from .types.compute import InsertFirewallPolicyRequest +from .types.compute import InsertFirewallRequest +from .types.compute import InsertForwardingRuleRequest +from .types.compute import InsertGlobalAddressRequest +from .types.compute import InsertGlobalForwardingRuleRequest +from .types.compute import InsertGlobalNetworkEndpointGroupRequest +from .types.compute import InsertGlobalPublicDelegatedPrefixeRequest +from .types.compute import InsertHealthCheckRequest +from .types.compute import InsertImageRequest +from .types.compute import InsertInstanceGroupManagerRequest +from .types.compute import InsertInstanceGroupRequest +from .types.compute import InsertInstanceRequest +from .types.compute import InsertInstanceTemplateRequest +from .types.compute import InsertInterconnectAttachmentRequest +from .types.compute import InsertInterconnectRequest +from .types.compute import InsertLicenseRequest +from .types.compute import InsertMachineImageRequest +from .types.compute import InsertNetworkAttachmentRequest +from .types.compute import InsertNetworkEdgeSecurityServiceRequest +from .types.compute import InsertNetworkEndpointGroupRequest +from .types.compute import InsertNetworkFirewallPolicyRequest +from .types.compute import InsertNetworkRequest +from .types.compute import InsertNodeGroupRequest +from .types.compute import InsertNodeTemplateRequest +from .types.compute import InsertPacketMirroringRequest +from .types.compute import InsertPublicAdvertisedPrefixeRequest +from .types.compute import InsertPublicDelegatedPrefixeRequest +from .types.compute import InsertRegionAutoscalerRequest +from .types.compute import InsertRegionBackendServiceRequest +from .types.compute import InsertRegionCommitmentRequest +from .types.compute import InsertRegionDiskRequest +from .types.compute import InsertRegionHealthCheckRequest +from .types.compute import InsertRegionHealthCheckServiceRequest +from .types.compute import InsertRegionInstanceGroupManagerRequest +from .types.compute import InsertRegionInstanceTemplateRequest +from .types.compute import InsertRegionNetworkEndpointGroupRequest +from .types.compute import InsertRegionNetworkFirewallPolicyRequest +from .types.compute import InsertRegionNotificationEndpointRequest +from .types.compute import InsertRegionSecurityPolicyRequest +from .types.compute import InsertRegionSslCertificateRequest +from .types.compute import InsertRegionSslPolicyRequest +from .types.compute import InsertRegionTargetHttpProxyRequest +from .types.compute import InsertRegionTargetHttpsProxyRequest +from .types.compute import InsertRegionTargetTcpProxyRequest +from .types.compute import InsertRegionUrlMapRequest +from .types.compute import InsertReservationRequest +from .types.compute import InsertResourcePolicyRequest +from .types.compute import InsertRouteRequest +from .types.compute import InsertRouterRequest +from .types.compute import InsertSecurityPolicyRequest +from .types.compute import InsertServiceAttachmentRequest +from .types.compute import InsertSnapshotRequest +from .types.compute import InsertSslCertificateRequest +from .types.compute import InsertSslPolicyRequest +from .types.compute import InsertSubnetworkRequest +from .types.compute import InsertTargetGrpcProxyRequest +from .types.compute import InsertTargetHttpProxyRequest +from .types.compute import InsertTargetHttpsProxyRequest +from .types.compute import InsertTargetInstanceRequest +from .types.compute import InsertTargetPoolRequest +from .types.compute import InsertTargetSslProxyRequest +from .types.compute import InsertTargetTcpProxyRequest +from .types.compute import InsertTargetVpnGatewayRequest +from .types.compute import InsertUrlMapRequest +from .types.compute import InsertVpnGatewayRequest +from .types.compute import InsertVpnTunnelRequest +from .types.compute import Instance +from .types.compute import InstanceAggregatedList +from .types.compute import InstanceConsumptionData +from .types.compute import InstanceConsumptionInfo +from .types.compute import InstanceGroup +from .types.compute import InstanceGroupAggregatedList +from .types.compute import InstanceGroupList +from .types.compute import InstanceGroupManager +from .types.compute import InstanceGroupManagerActionsSummary +from .types.compute import InstanceGroupManagerAggregatedList +from .types.compute import InstanceGroupManagerAutoHealingPolicy +from .types.compute import InstanceGroupManagerInstanceLifecyclePolicy +from .types.compute import InstanceGroupManagerList +from .types.compute import InstanceGroupManagersAbandonInstancesRequest +from .types.compute import InstanceGroupManagersApplyUpdatesRequest +from .types.compute import InstanceGroupManagersCreateInstancesRequest +from .types.compute import InstanceGroupManagersDeleteInstancesRequest +from .types.compute import InstanceGroupManagersDeletePerInstanceConfigsReq +from .types.compute import InstanceGroupManagersListErrorsResponse +from .types.compute import InstanceGroupManagersListManagedInstancesResponse +from .types.compute import InstanceGroupManagersListPerInstanceConfigsResp +from .types.compute import InstanceGroupManagersPatchPerInstanceConfigsReq +from .types.compute import InstanceGroupManagersRecreateInstancesRequest +from .types.compute import InstanceGroupManagersScopedList +from .types.compute import InstanceGroupManagersSetInstanceTemplateRequest +from .types.compute import InstanceGroupManagersSetTargetPoolsRequest +from .types.compute import InstanceGroupManagerStatus +from .types.compute import InstanceGroupManagerStatusStateful +from .types.compute import InstanceGroupManagerStatusStatefulPerInstanceConfigs +from .types.compute import InstanceGroupManagerStatusVersionTarget +from .types.compute import InstanceGroupManagersUpdatePerInstanceConfigsReq +from .types.compute import InstanceGroupManagerUpdatePolicy +from .types.compute import InstanceGroupManagerVersion +from .types.compute import InstanceGroupsAddInstancesRequest +from .types.compute import InstanceGroupsListInstances +from .types.compute import InstanceGroupsListInstancesRequest +from .types.compute import InstanceGroupsRemoveInstancesRequest +from .types.compute import InstanceGroupsScopedList +from .types.compute import InstanceGroupsSetNamedPortsRequest +from .types.compute import InstanceList +from .types.compute import InstanceListReferrers +from .types.compute import InstanceManagedByIgmError +from .types.compute import InstanceManagedByIgmErrorInstanceActionDetails +from .types.compute import InstanceManagedByIgmErrorManagedInstanceError +from .types.compute import InstanceMoveRequest +from .types.compute import InstanceParams +from .types.compute import InstanceProperties +from .types.compute import InstanceReference +from .types.compute import InstancesAddResourcePoliciesRequest +from .types.compute import InstancesGetEffectiveFirewallsResponse +from .types.compute import InstancesGetEffectiveFirewallsResponseEffectiveFirewallPolicy +from .types.compute import InstancesRemoveResourcePoliciesRequest +from .types.compute import InstancesScopedList +from .types.compute import InstancesSetLabelsRequest +from .types.compute import InstancesSetMachineResourcesRequest +from .types.compute import InstancesSetMachineTypeRequest +from .types.compute import InstancesSetMinCpuPlatformRequest +from .types.compute import InstancesSetNameRequest +from .types.compute import InstancesSetServiceAccountRequest +from .types.compute import InstancesStartWithEncryptionKeyRequest +from .types.compute import InstanceTemplate +from .types.compute import InstanceTemplateAggregatedList +from .types.compute import InstanceTemplateList +from .types.compute import InstanceTemplatesScopedList +from .types.compute import InstanceWithNamedPorts +from .types.compute import Int64RangeMatch +from .types.compute import Interconnect +from .types.compute import InterconnectAttachment +from .types.compute import InterconnectAttachmentAggregatedList +from .types.compute import InterconnectAttachmentConfigurationConstraints +from .types.compute import InterconnectAttachmentConfigurationConstraintsBgpPeerASNRange +from .types.compute import InterconnectAttachmentList +from .types.compute import InterconnectAttachmentPartnerMetadata +from .types.compute import InterconnectAttachmentPrivateInfo +from .types.compute import InterconnectAttachmentsScopedList +from .types.compute import InterconnectCircuitInfo +from .types.compute import InterconnectDiagnostics +from .types.compute import InterconnectDiagnosticsARPEntry +from .types.compute import InterconnectDiagnosticsLinkLACPStatus +from .types.compute import InterconnectDiagnosticsLinkOpticalPower +from .types.compute import InterconnectDiagnosticsLinkStatus +from .types.compute import InterconnectList +from .types.compute import InterconnectLocation +from .types.compute import InterconnectLocationList +from .types.compute import InterconnectLocationRegionInfo +from .types.compute import InterconnectOutageNotification +from .types.compute import InterconnectRemoteLocation +from .types.compute import InterconnectRemoteLocationConstraints +from .types.compute import InterconnectRemoteLocationConstraintsSubnetLengthRange +from .types.compute import InterconnectRemoteLocationList +from .types.compute import InterconnectRemoteLocationPermittedConnections +from .types.compute import InterconnectsGetDiagnosticsResponse +from .types.compute import InvalidateCacheUrlMapRequest +from .types.compute import Items +from .types.compute import License +from .types.compute import LicenseCode +from .types.compute import LicenseCodeLicenseAlias +from .types.compute import LicenseResourceCommitment +from .types.compute import LicenseResourceRequirements +from .types.compute import LicensesListResponse +from .types.compute import ListAcceleratorTypesRequest +from .types.compute import ListAddressesRequest +from .types.compute import ListAssociationsFirewallPolicyRequest +from .types.compute import ListAutoscalersRequest +from .types.compute import ListAvailableFeaturesRegionSslPoliciesRequest +from .types.compute import ListAvailableFeaturesSslPoliciesRequest +from .types.compute import ListBackendBucketsRequest +from .types.compute import ListBackendServicesRequest +from .types.compute import ListDisksRequest +from .types.compute import ListDiskTypesRequest +from .types.compute import ListErrorsInstanceGroupManagersRequest +from .types.compute import ListErrorsRegionInstanceGroupManagersRequest +from .types.compute import ListExternalVpnGatewaysRequest +from .types.compute import ListFirewallPoliciesRequest +from .types.compute import ListFirewallsRequest +from .types.compute import ListForwardingRulesRequest +from .types.compute import ListGlobalAddressesRequest +from .types.compute import ListGlobalForwardingRulesRequest +from .types.compute import ListGlobalNetworkEndpointGroupsRequest +from .types.compute import ListGlobalOperationsRequest +from .types.compute import ListGlobalOrganizationOperationsRequest +from .types.compute import ListGlobalPublicDelegatedPrefixesRequest +from .types.compute import ListHealthChecksRequest +from .types.compute import ListImagesRequest +from .types.compute import ListInstanceGroupManagersRequest +from .types.compute import ListInstanceGroupsRequest +from .types.compute import ListInstancesInstanceGroupsRequest +from .types.compute import ListInstancesRegionInstanceGroupsRequest +from .types.compute import ListInstancesRequest +from .types.compute import ListInstanceTemplatesRequest +from .types.compute import ListInterconnectAttachmentsRequest +from .types.compute import ListInterconnectLocationsRequest +from .types.compute import ListInterconnectRemoteLocationsRequest +from .types.compute import ListInterconnectsRequest +from .types.compute import ListLicensesRequest +from .types.compute import ListMachineImagesRequest +from .types.compute import ListMachineTypesRequest +from .types.compute import ListManagedInstancesInstanceGroupManagersRequest +from .types.compute import ListManagedInstancesRegionInstanceGroupManagersRequest +from .types.compute import ListNetworkAttachmentsRequest +from .types.compute import ListNetworkEndpointGroupsRequest +from .types.compute import ListNetworkEndpointsGlobalNetworkEndpointGroupsRequest +from .types.compute import ListNetworkEndpointsNetworkEndpointGroupsRequest +from .types.compute import ListNetworkFirewallPoliciesRequest +from .types.compute import ListNetworksRequest +from .types.compute import ListNodeGroupsRequest +from .types.compute import ListNodesNodeGroupsRequest +from .types.compute import ListNodeTemplatesRequest +from .types.compute import ListNodeTypesRequest +from .types.compute import ListPacketMirroringsRequest +from .types.compute import ListPeeringRoutesNetworksRequest +from .types.compute import ListPerInstanceConfigsInstanceGroupManagersRequest +from .types.compute import ListPerInstanceConfigsRegionInstanceGroupManagersRequest +from .types.compute import ListPreconfiguredExpressionSetsSecurityPoliciesRequest +from .types.compute import ListPublicAdvertisedPrefixesRequest +from .types.compute import ListPublicDelegatedPrefixesRequest +from .types.compute import ListReferrersInstancesRequest +from .types.compute import ListRegionAutoscalersRequest +from .types.compute import ListRegionBackendServicesRequest +from .types.compute import ListRegionCommitmentsRequest +from .types.compute import ListRegionDisksRequest +from .types.compute import ListRegionDiskTypesRequest +from .types.compute import ListRegionHealthCheckServicesRequest +from .types.compute import ListRegionHealthChecksRequest +from .types.compute import ListRegionInstanceGroupManagersRequest +from .types.compute import ListRegionInstanceGroupsRequest +from .types.compute import ListRegionInstanceTemplatesRequest +from .types.compute import ListRegionNetworkEndpointGroupsRequest +from .types.compute import ListRegionNetworkFirewallPoliciesRequest +from .types.compute import ListRegionNotificationEndpointsRequest +from .types.compute import ListRegionOperationsRequest +from .types.compute import ListRegionSecurityPoliciesRequest +from .types.compute import ListRegionsRequest +from .types.compute import ListRegionSslCertificatesRequest +from .types.compute import ListRegionSslPoliciesRequest +from .types.compute import ListRegionTargetHttpProxiesRequest +from .types.compute import ListRegionTargetHttpsProxiesRequest +from .types.compute import ListRegionTargetTcpProxiesRequest +from .types.compute import ListRegionUrlMapsRequest +from .types.compute import ListReservationsRequest +from .types.compute import ListResourcePoliciesRequest +from .types.compute import ListRoutersRequest +from .types.compute import ListRoutesRequest +from .types.compute import ListSecurityPoliciesRequest +from .types.compute import ListServiceAttachmentsRequest +from .types.compute import ListSnapshotsRequest +from .types.compute import ListSslCertificatesRequest +from .types.compute import ListSslPoliciesRequest +from .types.compute import ListSubnetworksRequest +from .types.compute import ListTargetGrpcProxiesRequest +from .types.compute import ListTargetHttpProxiesRequest +from .types.compute import ListTargetHttpsProxiesRequest +from .types.compute import ListTargetInstancesRequest +from .types.compute import ListTargetPoolsRequest +from .types.compute import ListTargetSslProxiesRequest +from .types.compute import ListTargetTcpProxiesRequest +from .types.compute import ListTargetVpnGatewaysRequest +from .types.compute import ListUrlMapsRequest +from .types.compute import ListUsableSubnetworksRequest +from .types.compute import ListVpnGatewaysRequest +from .types.compute import ListVpnTunnelsRequest +from .types.compute import ListXpnHostsProjectsRequest +from .types.compute import ListZoneOperationsRequest +from .types.compute import ListZonesRequest +from .types.compute import LocalDisk +from .types.compute import LocalizedMessage +from .types.compute import LocationPolicy +from .types.compute import LocationPolicyLocation +from .types.compute import LocationPolicyLocationConstraints +from .types.compute import LogConfig +from .types.compute import LogConfigCloudAuditOptions +from .types.compute import LogConfigCounterOptions +from .types.compute import LogConfigCounterOptionsCustomField +from .types.compute import LogConfigDataAccessOptions +from .types.compute import MachineImage +from .types.compute import MachineImageList +from .types.compute import MachineType +from .types.compute import MachineTypeAggregatedList +from .types.compute import MachineTypeList +from .types.compute import MachineTypesScopedList +from .types.compute import ManagedInstance +from .types.compute import ManagedInstanceInstanceHealth +from .types.compute import ManagedInstanceLastAttempt +from .types.compute import ManagedInstanceVersion +from .types.compute import Metadata +from .types.compute import MetadataFilter +from .types.compute import MetadataFilterLabelMatch +from .types.compute import MoveAddressRequest +from .types.compute import MoveDiskProjectRequest +from .types.compute import MoveFirewallPolicyRequest +from .types.compute import MoveGlobalAddressRequest +from .types.compute import MoveInstanceProjectRequest +from .types.compute import NamedPort +from .types.compute import Network +from .types.compute import NetworkAttachment +from .types.compute import NetworkAttachmentAggregatedList +from .types.compute import NetworkAttachmentConnectedEndpoint +from .types.compute import NetworkAttachmentList +from .types.compute import NetworkAttachmentsScopedList +from .types.compute import NetworkEdgeSecurityService +from .types.compute import NetworkEdgeSecurityServiceAggregatedList +from .types.compute import NetworkEdgeSecurityServicesScopedList +from .types.compute import NetworkEndpoint +from .types.compute import NetworkEndpointGroup +from .types.compute import NetworkEndpointGroupAggregatedList +from .types.compute import NetworkEndpointGroupAppEngine +from .types.compute import NetworkEndpointGroupCloudFunction +from .types.compute import NetworkEndpointGroupCloudRun +from .types.compute import NetworkEndpointGroupList +from .types.compute import NetworkEndpointGroupPscData +from .types.compute import NetworkEndpointGroupsAttachEndpointsRequest +from .types.compute import NetworkEndpointGroupsDetachEndpointsRequest +from .types.compute import NetworkEndpointGroupsListEndpointsRequest +from .types.compute import NetworkEndpointGroupsListNetworkEndpoints +from .types.compute import NetworkEndpointGroupsScopedList +from .types.compute import NetworkEndpointWithHealthStatus +from .types.compute import NetworkInterface +from .types.compute import NetworkList +from .types.compute import NetworkPeering +from .types.compute import NetworkPerformanceConfig +from .types.compute import NetworkRoutingConfig +from .types.compute import NetworksAddPeeringRequest +from .types.compute import NetworksGetEffectiveFirewallsResponse +from .types.compute import NetworksGetEffectiveFirewallsResponseEffectiveFirewallPolicy +from .types.compute import NetworksRemovePeeringRequest +from .types.compute import NetworksUpdatePeeringRequest +from .types.compute import NodeGroup +from .types.compute import NodeGroupAggregatedList +from .types.compute import NodeGroupAutoscalingPolicy +from .types.compute import NodeGroupList +from .types.compute import NodeGroupMaintenanceWindow +from .types.compute import NodeGroupNode +from .types.compute import NodeGroupsAddNodesRequest +from .types.compute import NodeGroupsDeleteNodesRequest +from .types.compute import NodeGroupsListNodes +from .types.compute import NodeGroupsScopedList +from .types.compute import NodeGroupsSetNodeTemplateRequest +from .types.compute import NodeGroupsSimulateMaintenanceEventRequest +from .types.compute import NodeTemplate +from .types.compute import NodeTemplateAggregatedList +from .types.compute import NodeTemplateList +from .types.compute import NodeTemplateNodeTypeFlexibility +from .types.compute import NodeTemplatesScopedList +from .types.compute import NodeType +from .types.compute import NodeTypeAggregatedList +from .types.compute import NodeTypeList +from .types.compute import NodeTypesScopedList +from .types.compute import NotificationEndpoint +from .types.compute import NotificationEndpointGrpcSettings +from .types.compute import NotificationEndpointList +from .types.compute import Operation +from .types.compute import OperationAggregatedList +from .types.compute import OperationList +from .types.compute import OperationsScopedList +from .types.compute import OutlierDetection +from .types.compute import PacketIntervals +from .types.compute import PacketMirroring +from .types.compute import PacketMirroringAggregatedList +from .types.compute import PacketMirroringFilter +from .types.compute import PacketMirroringForwardingRuleInfo +from .types.compute import PacketMirroringList +from .types.compute import PacketMirroringMirroredResourceInfo +from .types.compute import PacketMirroringMirroredResourceInfoInstanceInfo +from .types.compute import PacketMirroringMirroredResourceInfoSubnetInfo +from .types.compute import PacketMirroringNetworkInfo +from .types.compute import PacketMirroringsScopedList +from .types.compute import PatchAutoscalerRequest +from .types.compute import PatchBackendBucketRequest +from .types.compute import PatchBackendServiceRequest +from .types.compute import PatchFirewallPolicyRequest +from .types.compute import PatchFirewallRequest +from .types.compute import PatchForwardingRuleRequest +from .types.compute import PatchGlobalForwardingRuleRequest +from .types.compute import PatchGlobalPublicDelegatedPrefixeRequest +from .types.compute import PatchHealthCheckRequest +from .types.compute import PatchImageRequest +from .types.compute import PatchInstanceGroupManagerRequest +from .types.compute import PatchInterconnectAttachmentRequest +from .types.compute import PatchInterconnectRequest +from .types.compute import PatchNetworkEdgeSecurityServiceRequest +from .types.compute import PatchNetworkFirewallPolicyRequest +from .types.compute import PatchNetworkRequest +from .types.compute import PatchNodeGroupRequest +from .types.compute import PatchPacketMirroringRequest +from .types.compute import PatchPerInstanceConfigsInstanceGroupManagerRequest +from .types.compute import PatchPerInstanceConfigsRegionInstanceGroupManagerRequest +from .types.compute import PatchPublicAdvertisedPrefixeRequest +from .types.compute import PatchPublicDelegatedPrefixeRequest +from .types.compute import PatchRegionAutoscalerRequest +from .types.compute import PatchRegionBackendServiceRequest +from .types.compute import PatchRegionHealthCheckRequest +from .types.compute import PatchRegionHealthCheckServiceRequest +from .types.compute import PatchRegionInstanceGroupManagerRequest +from .types.compute import PatchRegionNetworkFirewallPolicyRequest +from .types.compute import PatchRegionSecurityPolicyRequest +from .types.compute import PatchRegionSslPolicyRequest +from .types.compute import PatchRegionTargetHttpsProxyRequest +from .types.compute import PatchRegionUrlMapRequest +from .types.compute import PatchResourcePolicyRequest +from .types.compute import PatchRouterRequest +from .types.compute import PatchRuleFirewallPolicyRequest +from .types.compute import PatchRuleNetworkFirewallPolicyRequest +from .types.compute import PatchRuleRegionNetworkFirewallPolicyRequest +from .types.compute import PatchRuleSecurityPolicyRequest +from .types.compute import PatchSecurityPolicyRequest +from .types.compute import PatchServiceAttachmentRequest +from .types.compute import PatchSslPolicyRequest +from .types.compute import PatchSubnetworkRequest +from .types.compute import PatchTargetGrpcProxyRequest +from .types.compute import PatchTargetHttpProxyRequest +from .types.compute import PatchTargetHttpsProxyRequest +from .types.compute import PatchUrlMapRequest +from .types.compute import PathMatcher +from .types.compute import PathRule +from .types.compute import PerInstanceConfig +from .types.compute import Policy +from .types.compute import PreconfiguredWafSet +from .types.compute import PreservedState +from .types.compute import PreservedStatePreservedDisk +from .types.compute import PreviewRouterRequest +from .types.compute import Project +from .types.compute import ProjectsDisableXpnResourceRequest +from .types.compute import ProjectsEnableXpnResourceRequest +from .types.compute import ProjectsGetXpnResources +from .types.compute import ProjectsListXpnHostsRequest +from .types.compute import ProjectsSetDefaultNetworkTierRequest +from .types.compute import PublicAdvertisedPrefix +from .types.compute import PublicAdvertisedPrefixList +from .types.compute import PublicAdvertisedPrefixPublicDelegatedPrefix +from .types.compute import PublicDelegatedPrefix +from .types.compute import PublicDelegatedPrefixAggregatedList +from .types.compute import PublicDelegatedPrefixesScopedList +from .types.compute import PublicDelegatedPrefixList +from .types.compute import PublicDelegatedPrefixPublicDelegatedSubPrefix +from .types.compute import Quota +from .types.compute import QuotaExceededInfo +from .types.compute import RawDisk +from .types.compute import RecreateInstancesInstanceGroupManagerRequest +from .types.compute import RecreateInstancesRegionInstanceGroupManagerRequest +from .types.compute import Reference +from .types.compute import Region +from .types.compute import RegionAddressesMoveRequest +from .types.compute import RegionAutoscalerList +from .types.compute import RegionDisksAddResourcePoliciesRequest +from .types.compute import RegionDisksRemoveResourcePoliciesRequest +from .types.compute import RegionDisksResizeRequest +from .types.compute import RegionDisksStartAsyncReplicationRequest +from .types.compute import RegionDiskTypeList +from .types.compute import RegionInstanceGroupList +from .types.compute import RegionInstanceGroupManagerDeleteInstanceConfigReq +from .types.compute import RegionInstanceGroupManagerList +from .types.compute import RegionInstanceGroupManagerPatchInstanceConfigReq +from .types.compute import RegionInstanceGroupManagersAbandonInstancesRequest +from .types.compute import RegionInstanceGroupManagersApplyUpdatesRequest +from .types.compute import RegionInstanceGroupManagersCreateInstancesRequest +from .types.compute import RegionInstanceGroupManagersDeleteInstancesRequest +from .types.compute import RegionInstanceGroupManagersListErrorsResponse +from .types.compute import RegionInstanceGroupManagersListInstanceConfigsResp +from .types.compute import RegionInstanceGroupManagersListInstancesResponse +from .types.compute import RegionInstanceGroupManagersRecreateRequest +from .types.compute import RegionInstanceGroupManagersSetTargetPoolsRequest +from .types.compute import RegionInstanceGroupManagersSetTemplateRequest +from .types.compute import RegionInstanceGroupManagerUpdateInstanceConfigReq +from .types.compute import RegionInstanceGroupsListInstances +from .types.compute import RegionInstanceGroupsListInstancesRequest +from .types.compute import RegionInstanceGroupsSetNamedPortsRequest +from .types.compute import RegionList +from .types.compute import RegionNetworkFirewallPoliciesGetEffectiveFirewallsResponse +from .types.compute import RegionNetworkFirewallPoliciesGetEffectiveFirewallsResponseEffectiveFirewallPolicy +from .types.compute import RegionSetLabelsRequest +from .types.compute import RegionSetPolicyRequest +from .types.compute import RegionTargetHttpsProxiesSetSslCertificatesRequest +from .types.compute import RegionUrlMapsValidateRequest +from .types.compute import RemoveAssociationFirewallPolicyRequest +from .types.compute import RemoveAssociationNetworkFirewallPolicyRequest +from .types.compute import RemoveAssociationRegionNetworkFirewallPolicyRequest +from .types.compute import RemoveHealthCheckTargetPoolRequest +from .types.compute import RemoveInstancesInstanceGroupRequest +from .types.compute import RemoveInstanceTargetPoolRequest +from .types.compute import RemovePeeringNetworkRequest +from .types.compute import RemoveResourcePoliciesDiskRequest +from .types.compute import RemoveResourcePoliciesInstanceRequest +from .types.compute import RemoveResourcePoliciesRegionDiskRequest +from .types.compute import RemoveRuleFirewallPolicyRequest +from .types.compute import RemoveRuleNetworkFirewallPolicyRequest +from .types.compute import RemoveRuleRegionNetworkFirewallPolicyRequest +from .types.compute import RemoveRuleSecurityPolicyRequest +from .types.compute import RequestMirrorPolicy +from .types.compute import Reservation +from .types.compute import ReservationAffinity +from .types.compute import ReservationAggregatedList +from .types.compute import ReservationList +from .types.compute import ReservationsResizeRequest +from .types.compute import ReservationsScopedList +from .types.compute import ResetInstanceRequest +from .types.compute import ResizeDiskRequest +from .types.compute import ResizeInstanceGroupManagerRequest +from .types.compute import ResizeRegionDiskRequest +from .types.compute import ResizeRegionInstanceGroupManagerRequest +from .types.compute import ResizeReservationRequest +from .types.compute import ResourceCommitment +from .types.compute import ResourceGroupReference +from .types.compute import ResourcePoliciesScopedList +from .types.compute import ResourcePolicy +from .types.compute import ResourcePolicyAggregatedList +from .types.compute import ResourcePolicyDailyCycle +from .types.compute import ResourcePolicyDiskConsistencyGroupPolicy +from .types.compute import ResourcePolicyGroupPlacementPolicy +from .types.compute import ResourcePolicyHourlyCycle +from .types.compute import ResourcePolicyInstanceSchedulePolicy +from .types.compute import ResourcePolicyInstanceSchedulePolicySchedule +from .types.compute import ResourcePolicyList +from .types.compute import ResourcePolicyResourceStatus +from .types.compute import ResourcePolicyResourceStatusInstanceSchedulePolicyStatus +from .types.compute import ResourcePolicySnapshotSchedulePolicy +from .types.compute import ResourcePolicySnapshotSchedulePolicyRetentionPolicy +from .types.compute import ResourcePolicySnapshotSchedulePolicySchedule +from .types.compute import ResourcePolicySnapshotSchedulePolicySnapshotProperties +from .types.compute import ResourcePolicyWeeklyCycle +from .types.compute import ResourcePolicyWeeklyCycleDayOfWeek +from .types.compute import ResourceStatus +from .types.compute import ResumeInstanceRequest +from .types.compute import Route +from .types.compute import RouteAsPath +from .types.compute import RouteList +from .types.compute import Router +from .types.compute import RouterAdvertisedIpRange +from .types.compute import RouterAggregatedList +from .types.compute import RouterBgp +from .types.compute import RouterBgpPeer +from .types.compute import RouterBgpPeerBfd +from .types.compute import RouterBgpPeerCustomLearnedIpRange +from .types.compute import RouterInterface +from .types.compute import RouterList +from .types.compute import RouterMd5AuthenticationKey +from .types.compute import RouterNat +from .types.compute import RouterNatLogConfig +from .types.compute import RouterNatRule +from .types.compute import RouterNatRuleAction +from .types.compute import RouterNatSubnetworkToNat +from .types.compute import RoutersPreviewResponse +from .types.compute import RoutersScopedList +from .types.compute import RouterStatus +from .types.compute import RouterStatusBgpPeerStatus +from .types.compute import RouterStatusNatStatus +from .types.compute import RouterStatusNatStatusNatRuleStatus +from .types.compute import RouterStatusResponse +from .types.compute import Rule +from .types.compute import SavedAttachedDisk +from .types.compute import SavedDisk +from .types.compute import ScalingScheduleStatus +from .types.compute import Scheduling +from .types.compute import SchedulingNodeAffinity +from .types.compute import ScratchDisks +from .types.compute import Screenshot +from .types.compute import SecurityPoliciesAggregatedList +from .types.compute import SecurityPoliciesListPreconfiguredExpressionSetsResponse +from .types.compute import SecurityPoliciesScopedList +from .types.compute import SecurityPoliciesWafConfig +from .types.compute import SecurityPolicy +from .types.compute import SecurityPolicyAdaptiveProtectionConfig +from .types.compute import SecurityPolicyAdaptiveProtectionConfigLayer7DdosDefenseConfig +from .types.compute import SecurityPolicyAdvancedOptionsConfig +from .types.compute import SecurityPolicyAdvancedOptionsConfigJsonCustomConfig +from .types.compute import SecurityPolicyDdosProtectionConfig +from .types.compute import SecurityPolicyList +from .types.compute import SecurityPolicyRecaptchaOptionsConfig +from .types.compute import SecurityPolicyReference +from .types.compute import SecurityPolicyRule +from .types.compute import SecurityPolicyRuleHttpHeaderAction +from .types.compute import SecurityPolicyRuleHttpHeaderActionHttpHeaderOption +from .types.compute import SecurityPolicyRuleMatcher +from .types.compute import SecurityPolicyRuleMatcherConfig +from .types.compute import SecurityPolicyRulePreconfiguredWafConfig +from .types.compute import SecurityPolicyRulePreconfiguredWafConfigExclusion +from .types.compute import SecurityPolicyRulePreconfiguredWafConfigExclusionFieldParams +from .types.compute import SecurityPolicyRuleRateLimitOptions +from .types.compute import SecurityPolicyRuleRateLimitOptionsEnforceOnKeyConfig +from .types.compute import SecurityPolicyRuleRateLimitOptionsThreshold +from .types.compute import SecurityPolicyRuleRedirectOptions +from .types.compute import SecuritySettings +from .types.compute import SendDiagnosticInterruptInstanceRequest +from .types.compute import SendDiagnosticInterruptInstanceResponse +from .types.compute import SerialPortOutput +from .types.compute import ServerBinding +from .types.compute import ServiceAccount +from .types.compute import ServiceAttachment +from .types.compute import ServiceAttachmentAggregatedList +from .types.compute import ServiceAttachmentConnectedEndpoint +from .types.compute import ServiceAttachmentConsumerProjectLimit +from .types.compute import ServiceAttachmentList +from .types.compute import ServiceAttachmentsScopedList +from .types.compute import SetBackendServiceTargetSslProxyRequest +from .types.compute import SetBackendServiceTargetTcpProxyRequest +from .types.compute import SetBackupTargetPoolRequest +from .types.compute import SetCertificateMapTargetHttpsProxyRequest +from .types.compute import SetCertificateMapTargetSslProxyRequest +from .types.compute import SetCommonInstanceMetadataProjectRequest +from .types.compute import SetDefaultNetworkTierProjectRequest +from .types.compute import SetDeletionProtectionInstanceRequest +from .types.compute import SetDiskAutoDeleteInstanceRequest +from .types.compute import SetEdgeSecurityPolicyBackendBucketRequest +from .types.compute import SetEdgeSecurityPolicyBackendServiceRequest +from .types.compute import SetIamPolicyBackendServiceRequest +from .types.compute import SetIamPolicyDiskRequest +from .types.compute import SetIamPolicyFirewallPolicyRequest +from .types.compute import SetIamPolicyImageRequest +from .types.compute import SetIamPolicyInstanceRequest +from .types.compute import SetIamPolicyInstanceTemplateRequest +from .types.compute import SetIamPolicyLicenseRequest +from .types.compute import SetIamPolicyMachineImageRequest +from .types.compute import SetIamPolicyNetworkAttachmentRequest +from .types.compute import SetIamPolicyNetworkFirewallPolicyRequest +from .types.compute import SetIamPolicyNodeGroupRequest +from .types.compute import SetIamPolicyNodeTemplateRequest +from .types.compute import SetIamPolicyRegionBackendServiceRequest +from .types.compute import SetIamPolicyRegionDiskRequest +from .types.compute import SetIamPolicyRegionNetworkFirewallPolicyRequest +from .types.compute import SetIamPolicyReservationRequest +from .types.compute import SetIamPolicyResourcePolicyRequest +from .types.compute import SetIamPolicyServiceAttachmentRequest +from .types.compute import SetIamPolicySnapshotRequest +from .types.compute import SetIamPolicySubnetworkRequest +from .types.compute import SetInstanceTemplateInstanceGroupManagerRequest +from .types.compute import SetInstanceTemplateRegionInstanceGroupManagerRequest +from .types.compute import SetLabelsAddressRequest +from .types.compute import SetLabelsDiskRequest +from .types.compute import SetLabelsExternalVpnGatewayRequest +from .types.compute import SetLabelsForwardingRuleRequest +from .types.compute import SetLabelsGlobalAddressRequest +from .types.compute import SetLabelsGlobalForwardingRuleRequest +from .types.compute import SetLabelsImageRequest +from .types.compute import SetLabelsInstanceRequest +from .types.compute import SetLabelsInterconnectAttachmentRequest +from .types.compute import SetLabelsInterconnectRequest +from .types.compute import SetLabelsRegionDiskRequest +from .types.compute import SetLabelsSecurityPolicyRequest +from .types.compute import SetLabelsSnapshotRequest +from .types.compute import SetLabelsTargetVpnGatewayRequest +from .types.compute import SetLabelsVpnGatewayRequest +from .types.compute import SetLabelsVpnTunnelRequest +from .types.compute import SetMachineResourcesInstanceRequest +from .types.compute import SetMachineTypeInstanceRequest +from .types.compute import SetMetadataInstanceRequest +from .types.compute import SetMinCpuPlatformInstanceRequest +from .types.compute import SetNamedPortsInstanceGroupRequest +from .types.compute import SetNamedPortsRegionInstanceGroupRequest +from .types.compute import SetNameInstanceRequest +from .types.compute import SetNodeTemplateNodeGroupRequest +from .types.compute import SetPrivateIpGoogleAccessSubnetworkRequest +from .types.compute import SetProxyHeaderTargetSslProxyRequest +from .types.compute import SetProxyHeaderTargetTcpProxyRequest +from .types.compute import SetQuicOverrideTargetHttpsProxyRequest +from .types.compute import SetSchedulingInstanceRequest +from .types.compute import SetSecurityPolicyBackendServiceRequest +from .types.compute import SetServiceAccountInstanceRequest +from .types.compute import SetShieldedInstanceIntegrityPolicyInstanceRequest +from .types.compute import SetSslCertificatesRegionTargetHttpsProxyRequest +from .types.compute import SetSslCertificatesTargetHttpsProxyRequest +from .types.compute import SetSslCertificatesTargetSslProxyRequest +from .types.compute import SetSslPolicyTargetHttpsProxyRequest +from .types.compute import SetSslPolicyTargetSslProxyRequest +from .types.compute import SetTagsInstanceRequest +from .types.compute import SetTargetForwardingRuleRequest +from .types.compute import SetTargetGlobalForwardingRuleRequest +from .types.compute import SetTargetPoolsInstanceGroupManagerRequest +from .types.compute import SetTargetPoolsRegionInstanceGroupManagerRequest +from .types.compute import SetUrlMapRegionTargetHttpProxyRequest +from .types.compute import SetUrlMapRegionTargetHttpsProxyRequest +from .types.compute import SetUrlMapTargetHttpProxyRequest +from .types.compute import SetUrlMapTargetHttpsProxyRequest +from .types.compute import SetUsageExportBucketProjectRequest +from .types.compute import ShareSettings +from .types.compute import ShareSettingsProjectConfig +from .types.compute import ShieldedInstanceConfig +from .types.compute import ShieldedInstanceIdentity +from .types.compute import ShieldedInstanceIdentityEntry +from .types.compute import ShieldedInstanceIntegrityPolicy +from .types.compute import SignedUrlKey +from .types.compute import SimulateMaintenanceEventInstanceRequest +from .types.compute import SimulateMaintenanceEventNodeGroupRequest +from .types.compute import Snapshot +from .types.compute import SnapshotList +from .types.compute import SourceDiskEncryptionKey +from .types.compute import SourceInstanceParams +from .types.compute import SourceInstanceProperties +from .types.compute import SslCertificate +from .types.compute import SslCertificateAggregatedList +from .types.compute import SslCertificateList +from .types.compute import SslCertificateManagedSslCertificate +from .types.compute import SslCertificateSelfManagedSslCertificate +from .types.compute import SslCertificatesScopedList +from .types.compute import SSLHealthCheck +from .types.compute import SslPoliciesAggregatedList +from .types.compute import SslPoliciesList +from .types.compute import SslPoliciesListAvailableFeaturesResponse +from .types.compute import SslPoliciesScopedList +from .types.compute import SslPolicy +from .types.compute import SslPolicyReference +from .types.compute import StartAsyncReplicationDiskRequest +from .types.compute import StartAsyncReplicationRegionDiskRequest +from .types.compute import StartInstanceRequest +from .types.compute import StartWithEncryptionKeyInstanceRequest +from .types.compute import StatefulPolicy +from .types.compute import StatefulPolicyPreservedState +from .types.compute import StatefulPolicyPreservedStateDiskDevice +from .types.compute import StopAsyncReplicationDiskRequest +from .types.compute import StopAsyncReplicationRegionDiskRequest +from .types.compute import StopGroupAsyncReplicationDiskRequest +from .types.compute import StopGroupAsyncReplicationRegionDiskRequest +from .types.compute import StopInstanceRequest +from .types.compute import Subnetwork +from .types.compute import SubnetworkAggregatedList +from .types.compute import SubnetworkList +from .types.compute import SubnetworkLogConfig +from .types.compute import SubnetworkSecondaryRange +from .types.compute import SubnetworksExpandIpCidrRangeRequest +from .types.compute import SubnetworksScopedList +from .types.compute import SubnetworksSetPrivateIpGoogleAccessRequest +from .types.compute import Subsetting +from .types.compute import SuspendInstanceRequest +from .types.compute import SwitchToCustomModeNetworkRequest +from .types.compute import Tags +from .types.compute import TargetGrpcProxy +from .types.compute import TargetGrpcProxyList +from .types.compute import TargetHttpProxiesScopedList +from .types.compute import TargetHttpProxy +from .types.compute import TargetHttpProxyAggregatedList +from .types.compute import TargetHttpProxyList +from .types.compute import TargetHttpsProxiesScopedList +from .types.compute import TargetHttpsProxiesSetCertificateMapRequest +from .types.compute import TargetHttpsProxiesSetQuicOverrideRequest +from .types.compute import TargetHttpsProxiesSetSslCertificatesRequest +from .types.compute import TargetHttpsProxy +from .types.compute import TargetHttpsProxyAggregatedList +from .types.compute import TargetHttpsProxyList +from .types.compute import TargetInstance +from .types.compute import TargetInstanceAggregatedList +from .types.compute import TargetInstanceList +from .types.compute import TargetInstancesScopedList +from .types.compute import TargetPool +from .types.compute import TargetPoolAggregatedList +from .types.compute import TargetPoolInstanceHealth +from .types.compute import TargetPoolList +from .types.compute import TargetPoolsAddHealthCheckRequest +from .types.compute import TargetPoolsAddInstanceRequest +from .types.compute import TargetPoolsRemoveHealthCheckRequest +from .types.compute import TargetPoolsRemoveInstanceRequest +from .types.compute import TargetPoolsScopedList +from .types.compute import TargetReference +from .types.compute import TargetSslProxiesSetBackendServiceRequest +from .types.compute import TargetSslProxiesSetCertificateMapRequest +from .types.compute import TargetSslProxiesSetProxyHeaderRequest +from .types.compute import TargetSslProxiesSetSslCertificatesRequest +from .types.compute import TargetSslProxy +from .types.compute import TargetSslProxyList +from .types.compute import TargetTcpProxiesScopedList +from .types.compute import TargetTcpProxiesSetBackendServiceRequest +from .types.compute import TargetTcpProxiesSetProxyHeaderRequest +from .types.compute import TargetTcpProxy +from .types.compute import TargetTcpProxyAggregatedList +from .types.compute import TargetTcpProxyList +from .types.compute import TargetVpnGateway +from .types.compute import TargetVpnGatewayAggregatedList +from .types.compute import TargetVpnGatewayList +from .types.compute import TargetVpnGatewaysScopedList +from .types.compute import TCPHealthCheck +from .types.compute import TestFailure +from .types.compute import TestIamPermissionsDiskRequest +from .types.compute import TestIamPermissionsExternalVpnGatewayRequest +from .types.compute import TestIamPermissionsFirewallPolicyRequest +from .types.compute import TestIamPermissionsImageRequest +from .types.compute import TestIamPermissionsInstanceRequest +from .types.compute import TestIamPermissionsInstanceTemplateRequest +from .types.compute import TestIamPermissionsLicenseCodeRequest +from .types.compute import TestIamPermissionsLicenseRequest +from .types.compute import TestIamPermissionsMachineImageRequest +from .types.compute import TestIamPermissionsNetworkAttachmentRequest +from .types.compute import TestIamPermissionsNetworkEndpointGroupRequest +from .types.compute import TestIamPermissionsNetworkFirewallPolicyRequest +from .types.compute import TestIamPermissionsNodeGroupRequest +from .types.compute import TestIamPermissionsNodeTemplateRequest +from .types.compute import TestIamPermissionsPacketMirroringRequest +from .types.compute import TestIamPermissionsRegionDiskRequest +from .types.compute import TestIamPermissionsRegionNetworkFirewallPolicyRequest +from .types.compute import TestIamPermissionsReservationRequest +from .types.compute import TestIamPermissionsResourcePolicyRequest +from .types.compute import TestIamPermissionsServiceAttachmentRequest +from .types.compute import TestIamPermissionsSnapshotRequest +from .types.compute import TestIamPermissionsSubnetworkRequest +from .types.compute import TestIamPermissionsVpnGatewayRequest +from .types.compute import TestPermissionsRequest +from .types.compute import TestPermissionsResponse +from .types.compute import Uint128 +from .types.compute import UpdateAccessConfigInstanceRequest +from .types.compute import UpdateAutoscalerRequest +from .types.compute import UpdateBackendBucketRequest +from .types.compute import UpdateBackendServiceRequest +from .types.compute import UpdateDiskRequest +from .types.compute import UpdateDisplayDeviceInstanceRequest +from .types.compute import UpdateFirewallRequest +from .types.compute import UpdateHealthCheckRequest +from .types.compute import UpdateInstanceRequest +from .types.compute import UpdateNetworkInterfaceInstanceRequest +from .types.compute import UpdatePeeringNetworkRequest +from .types.compute import UpdatePerInstanceConfigsInstanceGroupManagerRequest +from .types.compute import UpdatePerInstanceConfigsRegionInstanceGroupManagerRequest +from .types.compute import UpdateRegionAutoscalerRequest +from .types.compute import UpdateRegionBackendServiceRequest +from .types.compute import UpdateRegionCommitmentRequest +from .types.compute import UpdateRegionDiskRequest +from .types.compute import UpdateRegionHealthCheckRequest +from .types.compute import UpdateRegionUrlMapRequest +from .types.compute import UpdateReservationRequest +from .types.compute import UpdateRouterRequest +from .types.compute import UpdateShieldedInstanceConfigInstanceRequest +from .types.compute import UpdateUrlMapRequest +from .types.compute import UrlMap +from .types.compute import UrlMapList +from .types.compute import UrlMapReference +from .types.compute import UrlMapsAggregatedList +from .types.compute import UrlMapsScopedList +from .types.compute import UrlMapsValidateRequest +from .types.compute import UrlMapsValidateResponse +from .types.compute import UrlMapTest +from .types.compute import UrlMapTestHeader +from .types.compute import UrlMapValidationResult +from .types.compute import UrlRewrite +from .types.compute import UsableSubnetwork +from .types.compute import UsableSubnetworksAggregatedList +from .types.compute import UsableSubnetworkSecondaryRange +from .types.compute import UsageExportLocation +from .types.compute import ValidateRegionUrlMapRequest +from .types.compute import ValidateUrlMapRequest +from .types.compute import VmEndpointNatMappings +from .types.compute import VmEndpointNatMappingsInterfaceNatMappings +from .types.compute import VmEndpointNatMappingsInterfaceNatMappingsNatRuleMappings +from .types.compute import VmEndpointNatMappingsList +from .types.compute import VpnGateway +from .types.compute import VpnGatewayAggregatedList +from .types.compute import VpnGatewayList +from .types.compute import VpnGatewaysGetStatusResponse +from .types.compute import VpnGatewaysScopedList +from .types.compute import VpnGatewayStatus +from .types.compute import VpnGatewayStatusHighAvailabilityRequirementState +from .types.compute import VpnGatewayStatusTunnel +from .types.compute import VpnGatewayStatusVpnConnection +from .types.compute import VpnGatewayVpnGatewayInterface +from .types.compute import VpnTunnel +from .types.compute import VpnTunnelAggregatedList +from .types.compute import VpnTunnelList +from .types.compute import VpnTunnelsScopedList +from .types.compute import WafExpressionSet +from .types.compute import WafExpressionSetExpression +from .types.compute import WaitGlobalOperationRequest +from .types.compute import WaitRegionOperationRequest +from .types.compute import WaitZoneOperationRequest +from .types.compute import Warning +from .types.compute import Warnings +from .types.compute import WeightedBackendService +from .types.compute import XpnHostList +from .types.compute import XpnResourceId +from .types.compute import Zone +from .types.compute import ZoneList +from .types.compute import ZoneSetLabelsRequest +from .types.compute import ZoneSetPolicyRequest + +__all__ = ( +'AbandonInstancesInstanceGroupManagerRequest', +'AbandonInstancesRegionInstanceGroupManagerRequest', +'AcceleratorConfig', +'AcceleratorType', +'AcceleratorTypeAggregatedList', +'AcceleratorTypeList', +'AcceleratorTypesClient', +'AcceleratorTypesScopedList', +'Accelerators', +'AccessConfig', +'AddAccessConfigInstanceRequest', +'AddAssociationFirewallPolicyRequest', +'AddAssociationNetworkFirewallPolicyRequest', +'AddAssociationRegionNetworkFirewallPolicyRequest', +'AddHealthCheckTargetPoolRequest', +'AddInstanceTargetPoolRequest', +'AddInstancesInstanceGroupRequest', +'AddNodesNodeGroupRequest', +'AddPeeringNetworkRequest', +'AddResourcePoliciesDiskRequest', +'AddResourcePoliciesInstanceRequest', +'AddResourcePoliciesRegionDiskRequest', +'AddRuleFirewallPolicyRequest', +'AddRuleNetworkFirewallPolicyRequest', +'AddRuleRegionNetworkFirewallPolicyRequest', +'AddRuleSecurityPolicyRequest', +'AddSignedUrlKeyBackendBucketRequest', +'AddSignedUrlKeyBackendServiceRequest', +'Address', +'AddressAggregatedList', +'AddressList', +'AddressesClient', +'AddressesScopedList', +'AdvancedMachineFeatures', +'AggregatedListAcceleratorTypesRequest', +'AggregatedListAddressesRequest', +'AggregatedListAutoscalersRequest', +'AggregatedListBackendServicesRequest', +'AggregatedListDiskTypesRequest', +'AggregatedListDisksRequest', +'AggregatedListForwardingRulesRequest', +'AggregatedListGlobalOperationsRequest', +'AggregatedListHealthChecksRequest', +'AggregatedListInstanceGroupManagersRequest', +'AggregatedListInstanceGroupsRequest', +'AggregatedListInstanceTemplatesRequest', +'AggregatedListInstancesRequest', +'AggregatedListInterconnectAttachmentsRequest', +'AggregatedListMachineTypesRequest', +'AggregatedListNetworkAttachmentsRequest', +'AggregatedListNetworkEdgeSecurityServicesRequest', +'AggregatedListNetworkEndpointGroupsRequest', +'AggregatedListNodeGroupsRequest', +'AggregatedListNodeTemplatesRequest', +'AggregatedListNodeTypesRequest', +'AggregatedListPacketMirroringsRequest', +'AggregatedListPublicDelegatedPrefixesRequest', +'AggregatedListRegionCommitmentsRequest', +'AggregatedListReservationsRequest', +'AggregatedListResourcePoliciesRequest', +'AggregatedListRoutersRequest', +'AggregatedListSecurityPoliciesRequest', +'AggregatedListServiceAttachmentsRequest', +'AggregatedListSslCertificatesRequest', +'AggregatedListSslPoliciesRequest', +'AggregatedListSubnetworksRequest', +'AggregatedListTargetHttpProxiesRequest', +'AggregatedListTargetHttpsProxiesRequest', +'AggregatedListTargetInstancesRequest', +'AggregatedListTargetPoolsRequest', +'AggregatedListTargetTcpProxiesRequest', +'AggregatedListTargetVpnGatewaysRequest', +'AggregatedListUrlMapsRequest', +'AggregatedListVpnGatewaysRequest', +'AggregatedListVpnTunnelsRequest', +'AliasIpRange', +'AllocationResourceStatus', +'AllocationResourceStatusSpecificSKUAllocation', +'AllocationSpecificSKUAllocationAllocatedInstancePropertiesReservedDisk', +'AllocationSpecificSKUAllocationReservedInstanceProperties', +'AllocationSpecificSKUReservation', +'Allowed', +'ApplyUpdatesToInstancesInstanceGroupManagerRequest', +'ApplyUpdatesToInstancesRegionInstanceGroupManagerRequest', +'AttachDiskInstanceRequest', +'AttachNetworkEndpointsGlobalNetworkEndpointGroupRequest', +'AttachNetworkEndpointsNetworkEndpointGroupRequest', +'AttachedDisk', +'AttachedDiskInitializeParams', +'AuditConfig', +'AuditLogConfig', +'AuthorizationLoggingOptions', +'Autoscaler', +'AutoscalerAggregatedList', +'AutoscalerList', +'AutoscalerStatusDetails', +'AutoscalersClient', +'AutoscalersScopedList', +'AutoscalingPolicy', +'AutoscalingPolicyCpuUtilization', +'AutoscalingPolicyCustomMetricUtilization', +'AutoscalingPolicyLoadBalancingUtilization', +'AutoscalingPolicyScaleInControl', +'AutoscalingPolicyScalingSchedule', +'Backend', +'BackendBucket', +'BackendBucketCdnPolicy', +'BackendBucketCdnPolicyBypassCacheOnRequestHeader', +'BackendBucketCdnPolicyCacheKeyPolicy', +'BackendBucketCdnPolicyNegativeCachingPolicy', +'BackendBucketList', +'BackendBucketsClient', +'BackendService', +'BackendServiceAggregatedList', +'BackendServiceCdnPolicy', +'BackendServiceCdnPolicyBypassCacheOnRequestHeader', +'BackendServiceCdnPolicyNegativeCachingPolicy', +'BackendServiceConnectionTrackingPolicy', +'BackendServiceFailoverPolicy', +'BackendServiceGroupHealth', +'BackendServiceIAP', +'BackendServiceList', +'BackendServiceLocalityLoadBalancingPolicyConfig', +'BackendServiceLocalityLoadBalancingPolicyConfigCustomPolicy', +'BackendServiceLocalityLoadBalancingPolicyConfigPolicy', +'BackendServiceLogConfig', +'BackendServiceReference', +'BackendServicesClient', +'BackendServicesScopedList', +'BfdPacket', +'BfdStatus', +'BfdStatusPacketCounts', +'Binding', +'BulkInsertDiskRequest', +'BulkInsertDiskResource', +'BulkInsertInstanceRequest', +'BulkInsertInstanceResource', +'BulkInsertInstanceResourcePerInstanceProperties', +'BulkInsertRegionDiskRequest', +'BulkInsertRegionInstanceRequest', +'CacheInvalidationRule', +'CacheKeyPolicy', +'CircuitBreakers', +'CloneRulesFirewallPolicyRequest', +'CloneRulesNetworkFirewallPolicyRequest', +'CloneRulesRegionNetworkFirewallPolicyRequest', +'Commitment', +'CommitmentAggregatedList', +'CommitmentList', +'CommitmentsScopedList', +'Condition', +'ConfidentialInstanceConfig', +'ConnectionDraining', +'ConsistentHashLoadBalancerSettings', +'ConsistentHashLoadBalancerSettingsHttpCookie', +'CorsPolicy', +'CreateInstancesInstanceGroupManagerRequest', +'CreateInstancesRegionInstanceGroupManagerRequest', +'CreateSnapshotDiskRequest', +'CreateSnapshotRegionDiskRequest', +'CustomerEncryptionKey', +'CustomerEncryptionKeyProtectedDisk', +'Data', +'DeleteAccessConfigInstanceRequest', +'DeleteAddressRequest', +'DeleteAutoscalerRequest', +'DeleteBackendBucketRequest', +'DeleteBackendServiceRequest', +'DeleteDiskRequest', +'DeleteExternalVpnGatewayRequest', +'DeleteFirewallPolicyRequest', +'DeleteFirewallRequest', +'DeleteForwardingRuleRequest', +'DeleteGlobalAddressRequest', +'DeleteGlobalForwardingRuleRequest', +'DeleteGlobalNetworkEndpointGroupRequest', +'DeleteGlobalOperationRequest', +'DeleteGlobalOperationResponse', +'DeleteGlobalOrganizationOperationRequest', +'DeleteGlobalOrganizationOperationResponse', +'DeleteGlobalPublicDelegatedPrefixeRequest', +'DeleteHealthCheckRequest', +'DeleteImageRequest', +'DeleteInstanceGroupManagerRequest', +'DeleteInstanceGroupRequest', +'DeleteInstanceRequest', +'DeleteInstanceTemplateRequest', +'DeleteInstancesInstanceGroupManagerRequest', +'DeleteInstancesRegionInstanceGroupManagerRequest', +'DeleteInterconnectAttachmentRequest', +'DeleteInterconnectRequest', +'DeleteLicenseRequest', +'DeleteMachineImageRequest', +'DeleteNetworkAttachmentRequest', +'DeleteNetworkEdgeSecurityServiceRequest', +'DeleteNetworkEndpointGroupRequest', +'DeleteNetworkFirewallPolicyRequest', +'DeleteNetworkRequest', +'DeleteNodeGroupRequest', +'DeleteNodeTemplateRequest', +'DeleteNodesNodeGroupRequest', +'DeletePacketMirroringRequest', +'DeletePerInstanceConfigsInstanceGroupManagerRequest', +'DeletePerInstanceConfigsRegionInstanceGroupManagerRequest', +'DeletePublicAdvertisedPrefixeRequest', +'DeletePublicDelegatedPrefixeRequest', +'DeleteRegionAutoscalerRequest', +'DeleteRegionBackendServiceRequest', +'DeleteRegionDiskRequest', +'DeleteRegionHealthCheckRequest', +'DeleteRegionHealthCheckServiceRequest', +'DeleteRegionInstanceGroupManagerRequest', +'DeleteRegionInstanceTemplateRequest', +'DeleteRegionNetworkEndpointGroupRequest', +'DeleteRegionNetworkFirewallPolicyRequest', +'DeleteRegionNotificationEndpointRequest', +'DeleteRegionOperationRequest', +'DeleteRegionOperationResponse', +'DeleteRegionSecurityPolicyRequest', +'DeleteRegionSslCertificateRequest', +'DeleteRegionSslPolicyRequest', +'DeleteRegionTargetHttpProxyRequest', +'DeleteRegionTargetHttpsProxyRequest', +'DeleteRegionTargetTcpProxyRequest', +'DeleteRegionUrlMapRequest', +'DeleteReservationRequest', +'DeleteResourcePolicyRequest', +'DeleteRouteRequest', +'DeleteRouterRequest', +'DeleteSecurityPolicyRequest', +'DeleteServiceAttachmentRequest', +'DeleteSignedUrlKeyBackendBucketRequest', +'DeleteSignedUrlKeyBackendServiceRequest', +'DeleteSnapshotRequest', +'DeleteSslCertificateRequest', +'DeleteSslPolicyRequest', +'DeleteSubnetworkRequest', +'DeleteTargetGrpcProxyRequest', +'DeleteTargetHttpProxyRequest', +'DeleteTargetHttpsProxyRequest', +'DeleteTargetInstanceRequest', +'DeleteTargetPoolRequest', +'DeleteTargetSslProxyRequest', +'DeleteTargetTcpProxyRequest', +'DeleteTargetVpnGatewayRequest', +'DeleteUrlMapRequest', +'DeleteVpnGatewayRequest', +'DeleteVpnTunnelRequest', +'DeleteZoneOperationRequest', +'DeleteZoneOperationResponse', +'Denied', +'DeprecateImageRequest', +'DeprecationStatus', +'DetachDiskInstanceRequest', +'DetachNetworkEndpointsGlobalNetworkEndpointGroupRequest', +'DetachNetworkEndpointsNetworkEndpointGroupRequest', +'DisableXpnHostProjectRequest', +'DisableXpnResourceProjectRequest', +'Disk', +'DiskAggregatedList', +'DiskAsyncReplication', +'DiskAsyncReplicationList', +'DiskInstantiationConfig', +'DiskList', +'DiskMoveRequest', +'DiskParams', +'DiskResourceStatus', +'DiskResourceStatusAsyncReplicationStatus', +'DiskType', +'DiskTypeAggregatedList', +'DiskTypeList', +'DiskTypesClient', +'DiskTypesScopedList', +'DisksAddResourcePoliciesRequest', +'DisksClient', +'DisksRemoveResourcePoliciesRequest', +'DisksResizeRequest', +'DisksScopedList', +'DisksStartAsyncReplicationRequest', +'DisksStopGroupAsyncReplicationResource', +'DisplayDevice', +'DistributionPolicy', +'DistributionPolicyZoneConfiguration', +'Duration', +'EnableXpnHostProjectRequest', +'EnableXpnResourceProjectRequest', +'Error', +'ErrorDetails', +'ErrorInfo', +'Errors', +'ExchangedPeeringRoute', +'ExchangedPeeringRoutesList', +'ExpandIpCidrRangeSubnetworkRequest', +'Expr', +'ExternalVpnGateway', +'ExternalVpnGatewayInterface', +'ExternalVpnGatewayList', +'ExternalVpnGatewaysClient', +'FileContentBuffer', +'Firewall', +'FirewallList', +'FirewallLogConfig', +'FirewallPoliciesClient', +'FirewallPoliciesListAssociationsResponse', +'FirewallPolicy', +'FirewallPolicyAssociation', +'FirewallPolicyList', +'FirewallPolicyRule', +'FirewallPolicyRuleMatcher', +'FirewallPolicyRuleMatcherLayer4Config', +'FirewallPolicyRuleSecureTag', +'FirewallsClient', +'FixedOrPercent', +'ForwardingRule', +'ForwardingRuleAggregatedList', +'ForwardingRuleList', +'ForwardingRuleReference', +'ForwardingRuleServiceDirectoryRegistration', +'ForwardingRulesClient', +'ForwardingRulesScopedList', +'GRPCHealthCheck', +'GetAcceleratorTypeRequest', +'GetAddressRequest', +'GetAssociationFirewallPolicyRequest', +'GetAssociationNetworkFirewallPolicyRequest', +'GetAssociationRegionNetworkFirewallPolicyRequest', +'GetAutoscalerRequest', +'GetBackendBucketRequest', +'GetBackendServiceRequest', +'GetDiagnosticsInterconnectRequest', +'GetDiskRequest', +'GetDiskTypeRequest', +'GetEffectiveFirewallsInstanceRequest', +'GetEffectiveFirewallsNetworkRequest', +'GetEffectiveFirewallsRegionNetworkFirewallPolicyRequest', +'GetExternalVpnGatewayRequest', +'GetFirewallPolicyRequest', +'GetFirewallRequest', +'GetForwardingRuleRequest', +'GetFromFamilyImageRequest', +'GetGlobalAddressRequest', +'GetGlobalForwardingRuleRequest', +'GetGlobalNetworkEndpointGroupRequest', +'GetGlobalOperationRequest', +'GetGlobalOrganizationOperationRequest', +'GetGlobalPublicDelegatedPrefixeRequest', +'GetGuestAttributesInstanceRequest', +'GetHealthBackendServiceRequest', +'GetHealthCheckRequest', +'GetHealthRegionBackendServiceRequest', +'GetHealthTargetPoolRequest', +'GetIamPolicyBackendServiceRequest', +'GetIamPolicyDiskRequest', +'GetIamPolicyFirewallPolicyRequest', +'GetIamPolicyImageRequest', +'GetIamPolicyInstanceRequest', +'GetIamPolicyInstanceTemplateRequest', +'GetIamPolicyLicenseRequest', +'GetIamPolicyMachineImageRequest', +'GetIamPolicyNetworkAttachmentRequest', +'GetIamPolicyNetworkFirewallPolicyRequest', +'GetIamPolicyNodeGroupRequest', +'GetIamPolicyNodeTemplateRequest', +'GetIamPolicyRegionBackendServiceRequest', +'GetIamPolicyRegionDiskRequest', +'GetIamPolicyRegionNetworkFirewallPolicyRequest', +'GetIamPolicyReservationRequest', +'GetIamPolicyResourcePolicyRequest', +'GetIamPolicyServiceAttachmentRequest', +'GetIamPolicySnapshotRequest', +'GetIamPolicySubnetworkRequest', +'GetImageFamilyViewRequest', +'GetImageRequest', +'GetInstanceGroupManagerRequest', +'GetInstanceGroupRequest', +'GetInstanceRequest', +'GetInstanceTemplateRequest', +'GetInterconnectAttachmentRequest', +'GetInterconnectLocationRequest', +'GetInterconnectRemoteLocationRequest', +'GetInterconnectRequest', +'GetLicenseCodeRequest', +'GetLicenseRequest', +'GetMachineImageRequest', +'GetMachineTypeRequest', +'GetNatMappingInfoRoutersRequest', +'GetNetworkAttachmentRequest', +'GetNetworkEdgeSecurityServiceRequest', +'GetNetworkEndpointGroupRequest', +'GetNetworkFirewallPolicyRequest', +'GetNetworkRequest', +'GetNodeGroupRequest', +'GetNodeTemplateRequest', +'GetNodeTypeRequest', +'GetPacketMirroringRequest', +'GetProjectRequest', +'GetPublicAdvertisedPrefixeRequest', +'GetPublicDelegatedPrefixeRequest', +'GetRegionAutoscalerRequest', +'GetRegionBackendServiceRequest', +'GetRegionCommitmentRequest', +'GetRegionDiskRequest', +'GetRegionDiskTypeRequest', +'GetRegionHealthCheckRequest', +'GetRegionHealthCheckServiceRequest', +'GetRegionInstanceGroupManagerRequest', +'GetRegionInstanceGroupRequest', +'GetRegionInstanceTemplateRequest', +'GetRegionNetworkEndpointGroupRequest', +'GetRegionNetworkFirewallPolicyRequest', +'GetRegionNotificationEndpointRequest', +'GetRegionOperationRequest', +'GetRegionRequest', +'GetRegionSecurityPolicyRequest', +'GetRegionSslCertificateRequest', +'GetRegionSslPolicyRequest', +'GetRegionTargetHttpProxyRequest', +'GetRegionTargetHttpsProxyRequest', +'GetRegionTargetTcpProxyRequest', +'GetRegionUrlMapRequest', +'GetReservationRequest', +'GetResourcePolicyRequest', +'GetRouteRequest', +'GetRouterRequest', +'GetRouterStatusRouterRequest', +'GetRuleFirewallPolicyRequest', +'GetRuleNetworkFirewallPolicyRequest', +'GetRuleRegionNetworkFirewallPolicyRequest', +'GetRuleSecurityPolicyRequest', +'GetScreenshotInstanceRequest', +'GetSecurityPolicyRequest', +'GetSerialPortOutputInstanceRequest', +'GetServiceAttachmentRequest', +'GetShieldedInstanceIdentityInstanceRequest', +'GetSnapshotRequest', +'GetSslCertificateRequest', +'GetSslPolicyRequest', +'GetStatusVpnGatewayRequest', +'GetSubnetworkRequest', +'GetTargetGrpcProxyRequest', +'GetTargetHttpProxyRequest', +'GetTargetHttpsProxyRequest', +'GetTargetInstanceRequest', +'GetTargetPoolRequest', +'GetTargetSslProxyRequest', +'GetTargetTcpProxyRequest', +'GetTargetVpnGatewayRequest', +'GetUrlMapRequest', +'GetVpnGatewayRequest', +'GetVpnTunnelRequest', +'GetXpnHostProjectRequest', +'GetXpnResourcesProjectsRequest', +'GetZoneOperationRequest', +'GetZoneRequest', +'GlobalAddressesClient', +'GlobalAddressesMoveRequest', +'GlobalForwardingRulesClient', +'GlobalNetworkEndpointGroupsAttachEndpointsRequest', +'GlobalNetworkEndpointGroupsClient', +'GlobalNetworkEndpointGroupsDetachEndpointsRequest', +'GlobalOperationsClient', +'GlobalOrganizationOperationsClient', +'GlobalOrganizationSetPolicyRequest', +'GlobalPublicDelegatedPrefixesClient', +'GlobalSetLabelsRequest', +'GlobalSetPolicyRequest', +'GuestAttributes', +'GuestAttributesEntry', +'GuestAttributesValue', +'GuestOsFeature', +'HTTP2HealthCheck', +'HTTPHealthCheck', +'HTTPSHealthCheck', +'HealthCheck', +'HealthCheckList', +'HealthCheckLogConfig', +'HealthCheckReference', +'HealthCheckService', +'HealthCheckServiceReference', +'HealthCheckServicesList', +'HealthChecksAggregatedList', +'HealthChecksClient', +'HealthChecksScopedList', +'HealthStatus', +'HealthStatusForNetworkEndpoint', +'Help', +'HelpLink', +'HostRule', +'HttpFaultAbort', +'HttpFaultDelay', +'HttpFaultInjection', +'HttpHeaderAction', +'HttpHeaderMatch', +'HttpHeaderOption', +'HttpQueryParameterMatch', +'HttpRedirectAction', +'HttpRetryPolicy', +'HttpRouteAction', +'HttpRouteRule', +'HttpRouteRuleMatch', +'Image', +'ImageFamilyView', +'ImageFamilyViewsClient', +'ImageList', +'ImagesClient', +'InitialStateConfig', +'InsertAddressRequest', +'InsertAutoscalerRequest', +'InsertBackendBucketRequest', +'InsertBackendServiceRequest', +'InsertDiskRequest', +'InsertExternalVpnGatewayRequest', +'InsertFirewallPolicyRequest', +'InsertFirewallRequest', +'InsertForwardingRuleRequest', +'InsertGlobalAddressRequest', +'InsertGlobalForwardingRuleRequest', +'InsertGlobalNetworkEndpointGroupRequest', +'InsertGlobalPublicDelegatedPrefixeRequest', +'InsertHealthCheckRequest', +'InsertImageRequest', +'InsertInstanceGroupManagerRequest', +'InsertInstanceGroupRequest', +'InsertInstanceRequest', +'InsertInstanceTemplateRequest', +'InsertInterconnectAttachmentRequest', +'InsertInterconnectRequest', +'InsertLicenseRequest', +'InsertMachineImageRequest', +'InsertNetworkAttachmentRequest', +'InsertNetworkEdgeSecurityServiceRequest', +'InsertNetworkEndpointGroupRequest', +'InsertNetworkFirewallPolicyRequest', +'InsertNetworkRequest', +'InsertNodeGroupRequest', +'InsertNodeTemplateRequest', +'InsertPacketMirroringRequest', +'InsertPublicAdvertisedPrefixeRequest', +'InsertPublicDelegatedPrefixeRequest', +'InsertRegionAutoscalerRequest', +'InsertRegionBackendServiceRequest', +'InsertRegionCommitmentRequest', +'InsertRegionDiskRequest', +'InsertRegionHealthCheckRequest', +'InsertRegionHealthCheckServiceRequest', +'InsertRegionInstanceGroupManagerRequest', +'InsertRegionInstanceTemplateRequest', +'InsertRegionNetworkEndpointGroupRequest', +'InsertRegionNetworkFirewallPolicyRequest', +'InsertRegionNotificationEndpointRequest', +'InsertRegionSecurityPolicyRequest', +'InsertRegionSslCertificateRequest', +'InsertRegionSslPolicyRequest', +'InsertRegionTargetHttpProxyRequest', +'InsertRegionTargetHttpsProxyRequest', +'InsertRegionTargetTcpProxyRequest', +'InsertRegionUrlMapRequest', +'InsertReservationRequest', +'InsertResourcePolicyRequest', +'InsertRouteRequest', +'InsertRouterRequest', +'InsertSecurityPolicyRequest', +'InsertServiceAttachmentRequest', +'InsertSnapshotRequest', +'InsertSslCertificateRequest', +'InsertSslPolicyRequest', +'InsertSubnetworkRequest', +'InsertTargetGrpcProxyRequest', +'InsertTargetHttpProxyRequest', +'InsertTargetHttpsProxyRequest', +'InsertTargetInstanceRequest', +'InsertTargetPoolRequest', +'InsertTargetSslProxyRequest', +'InsertTargetTcpProxyRequest', +'InsertTargetVpnGatewayRequest', +'InsertUrlMapRequest', +'InsertVpnGatewayRequest', +'InsertVpnTunnelRequest', +'Instance', +'InstanceAggregatedList', +'InstanceConsumptionData', +'InstanceConsumptionInfo', +'InstanceGroup', +'InstanceGroupAggregatedList', +'InstanceGroupList', +'InstanceGroupManager', +'InstanceGroupManagerActionsSummary', +'InstanceGroupManagerAggregatedList', +'InstanceGroupManagerAutoHealingPolicy', +'InstanceGroupManagerInstanceLifecyclePolicy', +'InstanceGroupManagerList', +'InstanceGroupManagerStatus', +'InstanceGroupManagerStatusStateful', +'InstanceGroupManagerStatusStatefulPerInstanceConfigs', +'InstanceGroupManagerStatusVersionTarget', +'InstanceGroupManagerUpdatePolicy', +'InstanceGroupManagerVersion', +'InstanceGroupManagersAbandonInstancesRequest', +'InstanceGroupManagersApplyUpdatesRequest', +'InstanceGroupManagersClient', +'InstanceGroupManagersCreateInstancesRequest', +'InstanceGroupManagersDeleteInstancesRequest', +'InstanceGroupManagersDeletePerInstanceConfigsReq', +'InstanceGroupManagersListErrorsResponse', +'InstanceGroupManagersListManagedInstancesResponse', +'InstanceGroupManagersListPerInstanceConfigsResp', +'InstanceGroupManagersPatchPerInstanceConfigsReq', +'InstanceGroupManagersRecreateInstancesRequest', +'InstanceGroupManagersScopedList', +'InstanceGroupManagersSetInstanceTemplateRequest', +'InstanceGroupManagersSetTargetPoolsRequest', +'InstanceGroupManagersUpdatePerInstanceConfigsReq', +'InstanceGroupsAddInstancesRequest', +'InstanceGroupsClient', +'InstanceGroupsListInstances', +'InstanceGroupsListInstancesRequest', +'InstanceGroupsRemoveInstancesRequest', +'InstanceGroupsScopedList', +'InstanceGroupsSetNamedPortsRequest', +'InstanceList', +'InstanceListReferrers', +'InstanceManagedByIgmError', +'InstanceManagedByIgmErrorInstanceActionDetails', +'InstanceManagedByIgmErrorManagedInstanceError', +'InstanceMoveRequest', +'InstanceParams', +'InstanceProperties', +'InstanceReference', +'InstanceTemplate', +'InstanceTemplateAggregatedList', +'InstanceTemplateList', +'InstanceTemplatesClient', +'InstanceTemplatesScopedList', +'InstanceWithNamedPorts', +'InstancesAddResourcePoliciesRequest', +'InstancesClient', +'InstancesGetEffectiveFirewallsResponse', +'InstancesGetEffectiveFirewallsResponseEffectiveFirewallPolicy', +'InstancesRemoveResourcePoliciesRequest', +'InstancesScopedList', +'InstancesSetLabelsRequest', +'InstancesSetMachineResourcesRequest', +'InstancesSetMachineTypeRequest', +'InstancesSetMinCpuPlatformRequest', +'InstancesSetNameRequest', +'InstancesSetServiceAccountRequest', +'InstancesStartWithEncryptionKeyRequest', +'Int64RangeMatch', +'Interconnect', +'InterconnectAttachment', +'InterconnectAttachmentAggregatedList', +'InterconnectAttachmentConfigurationConstraints', +'InterconnectAttachmentConfigurationConstraintsBgpPeerASNRange', +'InterconnectAttachmentList', +'InterconnectAttachmentPartnerMetadata', +'InterconnectAttachmentPrivateInfo', +'InterconnectAttachmentsClient', +'InterconnectAttachmentsScopedList', +'InterconnectCircuitInfo', +'InterconnectDiagnostics', +'InterconnectDiagnosticsARPEntry', +'InterconnectDiagnosticsLinkLACPStatus', +'InterconnectDiagnosticsLinkOpticalPower', +'InterconnectDiagnosticsLinkStatus', +'InterconnectList', +'InterconnectLocation', +'InterconnectLocationList', +'InterconnectLocationRegionInfo', +'InterconnectLocationsClient', +'InterconnectOutageNotification', +'InterconnectRemoteLocation', +'InterconnectRemoteLocationConstraints', +'InterconnectRemoteLocationConstraintsSubnetLengthRange', +'InterconnectRemoteLocationList', +'InterconnectRemoteLocationPermittedConnections', +'InterconnectRemoteLocationsClient', +'InterconnectsClient', +'InterconnectsGetDiagnosticsResponse', +'InvalidateCacheUrlMapRequest', +'Items', +'License', +'LicenseCode', +'LicenseCodeLicenseAlias', +'LicenseCodesClient', +'LicenseResourceCommitment', +'LicenseResourceRequirements', +'LicensesClient', +'LicensesListResponse', +'ListAcceleratorTypesRequest', +'ListAddressesRequest', +'ListAssociationsFirewallPolicyRequest', +'ListAutoscalersRequest', +'ListAvailableFeaturesRegionSslPoliciesRequest', +'ListAvailableFeaturesSslPoliciesRequest', +'ListBackendBucketsRequest', +'ListBackendServicesRequest', +'ListDiskTypesRequest', +'ListDisksRequest', +'ListErrorsInstanceGroupManagersRequest', +'ListErrorsRegionInstanceGroupManagersRequest', +'ListExternalVpnGatewaysRequest', +'ListFirewallPoliciesRequest', +'ListFirewallsRequest', +'ListForwardingRulesRequest', +'ListGlobalAddressesRequest', +'ListGlobalForwardingRulesRequest', +'ListGlobalNetworkEndpointGroupsRequest', +'ListGlobalOperationsRequest', +'ListGlobalOrganizationOperationsRequest', +'ListGlobalPublicDelegatedPrefixesRequest', +'ListHealthChecksRequest', +'ListImagesRequest', +'ListInstanceGroupManagersRequest', +'ListInstanceGroupsRequest', +'ListInstanceTemplatesRequest', +'ListInstancesInstanceGroupsRequest', +'ListInstancesRegionInstanceGroupsRequest', +'ListInstancesRequest', +'ListInterconnectAttachmentsRequest', +'ListInterconnectLocationsRequest', +'ListInterconnectRemoteLocationsRequest', +'ListInterconnectsRequest', +'ListLicensesRequest', +'ListMachineImagesRequest', +'ListMachineTypesRequest', +'ListManagedInstancesInstanceGroupManagersRequest', +'ListManagedInstancesRegionInstanceGroupManagersRequest', +'ListNetworkAttachmentsRequest', +'ListNetworkEndpointGroupsRequest', +'ListNetworkEndpointsGlobalNetworkEndpointGroupsRequest', +'ListNetworkEndpointsNetworkEndpointGroupsRequest', +'ListNetworkFirewallPoliciesRequest', +'ListNetworksRequest', +'ListNodeGroupsRequest', +'ListNodeTemplatesRequest', +'ListNodeTypesRequest', +'ListNodesNodeGroupsRequest', +'ListPacketMirroringsRequest', +'ListPeeringRoutesNetworksRequest', +'ListPerInstanceConfigsInstanceGroupManagersRequest', +'ListPerInstanceConfigsRegionInstanceGroupManagersRequest', +'ListPreconfiguredExpressionSetsSecurityPoliciesRequest', +'ListPublicAdvertisedPrefixesRequest', +'ListPublicDelegatedPrefixesRequest', +'ListReferrersInstancesRequest', +'ListRegionAutoscalersRequest', +'ListRegionBackendServicesRequest', +'ListRegionCommitmentsRequest', +'ListRegionDiskTypesRequest', +'ListRegionDisksRequest', +'ListRegionHealthCheckServicesRequest', +'ListRegionHealthChecksRequest', +'ListRegionInstanceGroupManagersRequest', +'ListRegionInstanceGroupsRequest', +'ListRegionInstanceTemplatesRequest', +'ListRegionNetworkEndpointGroupsRequest', +'ListRegionNetworkFirewallPoliciesRequest', +'ListRegionNotificationEndpointsRequest', +'ListRegionOperationsRequest', +'ListRegionSecurityPoliciesRequest', +'ListRegionSslCertificatesRequest', +'ListRegionSslPoliciesRequest', +'ListRegionTargetHttpProxiesRequest', +'ListRegionTargetHttpsProxiesRequest', +'ListRegionTargetTcpProxiesRequest', +'ListRegionUrlMapsRequest', +'ListRegionsRequest', +'ListReservationsRequest', +'ListResourcePoliciesRequest', +'ListRoutersRequest', +'ListRoutesRequest', +'ListSecurityPoliciesRequest', +'ListServiceAttachmentsRequest', +'ListSnapshotsRequest', +'ListSslCertificatesRequest', +'ListSslPoliciesRequest', +'ListSubnetworksRequest', +'ListTargetGrpcProxiesRequest', +'ListTargetHttpProxiesRequest', +'ListTargetHttpsProxiesRequest', +'ListTargetInstancesRequest', +'ListTargetPoolsRequest', +'ListTargetSslProxiesRequest', +'ListTargetTcpProxiesRequest', +'ListTargetVpnGatewaysRequest', +'ListUrlMapsRequest', +'ListUsableSubnetworksRequest', +'ListVpnGatewaysRequest', +'ListVpnTunnelsRequest', +'ListXpnHostsProjectsRequest', +'ListZoneOperationsRequest', +'ListZonesRequest', +'LocalDisk', +'LocalizedMessage', +'LocationPolicy', +'LocationPolicyLocation', +'LocationPolicyLocationConstraints', +'LogConfig', +'LogConfigCloudAuditOptions', +'LogConfigCounterOptions', +'LogConfigCounterOptionsCustomField', +'LogConfigDataAccessOptions', +'MachineImage', +'MachineImageList', +'MachineImagesClient', +'MachineType', +'MachineTypeAggregatedList', +'MachineTypeList', +'MachineTypesClient', +'MachineTypesScopedList', +'ManagedInstance', +'ManagedInstanceInstanceHealth', +'ManagedInstanceLastAttempt', +'ManagedInstanceVersion', +'Metadata', +'MetadataFilter', +'MetadataFilterLabelMatch', +'MoveAddressRequest', +'MoveDiskProjectRequest', +'MoveFirewallPolicyRequest', +'MoveGlobalAddressRequest', +'MoveInstanceProjectRequest', +'NamedPort', +'Network', +'NetworkAttachment', +'NetworkAttachmentAggregatedList', +'NetworkAttachmentConnectedEndpoint', +'NetworkAttachmentList', +'NetworkAttachmentsClient', +'NetworkAttachmentsScopedList', +'NetworkEdgeSecurityService', +'NetworkEdgeSecurityServiceAggregatedList', +'NetworkEdgeSecurityServicesClient', +'NetworkEdgeSecurityServicesScopedList', +'NetworkEndpoint', +'NetworkEndpointGroup', +'NetworkEndpointGroupAggregatedList', +'NetworkEndpointGroupAppEngine', +'NetworkEndpointGroupCloudFunction', +'NetworkEndpointGroupCloudRun', +'NetworkEndpointGroupList', +'NetworkEndpointGroupPscData', +'NetworkEndpointGroupsAttachEndpointsRequest', +'NetworkEndpointGroupsClient', +'NetworkEndpointGroupsDetachEndpointsRequest', +'NetworkEndpointGroupsListEndpointsRequest', +'NetworkEndpointGroupsListNetworkEndpoints', +'NetworkEndpointGroupsScopedList', +'NetworkEndpointWithHealthStatus', +'NetworkFirewallPoliciesClient', +'NetworkInterface', +'NetworkList', +'NetworkPeering', +'NetworkPerformanceConfig', +'NetworkRoutingConfig', +'NetworksAddPeeringRequest', +'NetworksClient', +'NetworksGetEffectiveFirewallsResponse', +'NetworksGetEffectiveFirewallsResponseEffectiveFirewallPolicy', +'NetworksRemovePeeringRequest', +'NetworksUpdatePeeringRequest', +'NodeGroup', +'NodeGroupAggregatedList', +'NodeGroupAutoscalingPolicy', +'NodeGroupList', +'NodeGroupMaintenanceWindow', +'NodeGroupNode', +'NodeGroupsAddNodesRequest', +'NodeGroupsClient', +'NodeGroupsDeleteNodesRequest', +'NodeGroupsListNodes', +'NodeGroupsScopedList', +'NodeGroupsSetNodeTemplateRequest', +'NodeGroupsSimulateMaintenanceEventRequest', +'NodeTemplate', +'NodeTemplateAggregatedList', +'NodeTemplateList', +'NodeTemplateNodeTypeFlexibility', +'NodeTemplatesClient', +'NodeTemplatesScopedList', +'NodeType', +'NodeTypeAggregatedList', +'NodeTypeList', +'NodeTypesClient', +'NodeTypesScopedList', +'NotificationEndpoint', +'NotificationEndpointGrpcSettings', +'NotificationEndpointList', +'Operation', +'OperationAggregatedList', +'OperationList', +'OperationsScopedList', +'OutlierDetection', +'PacketIntervals', +'PacketMirroring', +'PacketMirroringAggregatedList', +'PacketMirroringFilter', +'PacketMirroringForwardingRuleInfo', +'PacketMirroringList', +'PacketMirroringMirroredResourceInfo', +'PacketMirroringMirroredResourceInfoInstanceInfo', +'PacketMirroringMirroredResourceInfoSubnetInfo', +'PacketMirroringNetworkInfo', +'PacketMirroringsClient', +'PacketMirroringsScopedList', +'PatchAutoscalerRequest', +'PatchBackendBucketRequest', +'PatchBackendServiceRequest', +'PatchFirewallPolicyRequest', +'PatchFirewallRequest', +'PatchForwardingRuleRequest', +'PatchGlobalForwardingRuleRequest', +'PatchGlobalPublicDelegatedPrefixeRequest', +'PatchHealthCheckRequest', +'PatchImageRequest', +'PatchInstanceGroupManagerRequest', +'PatchInterconnectAttachmentRequest', +'PatchInterconnectRequest', +'PatchNetworkEdgeSecurityServiceRequest', +'PatchNetworkFirewallPolicyRequest', +'PatchNetworkRequest', +'PatchNodeGroupRequest', +'PatchPacketMirroringRequest', +'PatchPerInstanceConfigsInstanceGroupManagerRequest', +'PatchPerInstanceConfigsRegionInstanceGroupManagerRequest', +'PatchPublicAdvertisedPrefixeRequest', +'PatchPublicDelegatedPrefixeRequest', +'PatchRegionAutoscalerRequest', +'PatchRegionBackendServiceRequest', +'PatchRegionHealthCheckRequest', +'PatchRegionHealthCheckServiceRequest', +'PatchRegionInstanceGroupManagerRequest', +'PatchRegionNetworkFirewallPolicyRequest', +'PatchRegionSecurityPolicyRequest', +'PatchRegionSslPolicyRequest', +'PatchRegionTargetHttpsProxyRequest', +'PatchRegionUrlMapRequest', +'PatchResourcePolicyRequest', +'PatchRouterRequest', +'PatchRuleFirewallPolicyRequest', +'PatchRuleNetworkFirewallPolicyRequest', +'PatchRuleRegionNetworkFirewallPolicyRequest', +'PatchRuleSecurityPolicyRequest', +'PatchSecurityPolicyRequest', +'PatchServiceAttachmentRequest', +'PatchSslPolicyRequest', +'PatchSubnetworkRequest', +'PatchTargetGrpcProxyRequest', +'PatchTargetHttpProxyRequest', +'PatchTargetHttpsProxyRequest', +'PatchUrlMapRequest', +'PathMatcher', +'PathRule', +'PerInstanceConfig', +'Policy', +'PreconfiguredWafSet', +'PreservedState', +'PreservedStatePreservedDisk', +'PreviewRouterRequest', +'Project', +'ProjectsClient', +'ProjectsDisableXpnResourceRequest', +'ProjectsEnableXpnResourceRequest', +'ProjectsGetXpnResources', +'ProjectsListXpnHostsRequest', +'ProjectsSetDefaultNetworkTierRequest', +'PublicAdvertisedPrefix', +'PublicAdvertisedPrefixList', +'PublicAdvertisedPrefixPublicDelegatedPrefix', +'PublicAdvertisedPrefixesClient', +'PublicDelegatedPrefix', +'PublicDelegatedPrefixAggregatedList', +'PublicDelegatedPrefixList', +'PublicDelegatedPrefixPublicDelegatedSubPrefix', +'PublicDelegatedPrefixesClient', +'PublicDelegatedPrefixesScopedList', +'Quota', +'QuotaExceededInfo', +'RawDisk', +'RecreateInstancesInstanceGroupManagerRequest', +'RecreateInstancesRegionInstanceGroupManagerRequest', +'Reference', +'Region', +'RegionAddressesMoveRequest', +'RegionAutoscalerList', +'RegionAutoscalersClient', +'RegionBackendServicesClient', +'RegionCommitmentsClient', +'RegionDiskTypeList', +'RegionDiskTypesClient', +'RegionDisksAddResourcePoliciesRequest', +'RegionDisksClient', +'RegionDisksRemoveResourcePoliciesRequest', +'RegionDisksResizeRequest', +'RegionDisksStartAsyncReplicationRequest', +'RegionHealthCheckServicesClient', +'RegionHealthChecksClient', +'RegionInstanceGroupList', +'RegionInstanceGroupManagerDeleteInstanceConfigReq', +'RegionInstanceGroupManagerList', +'RegionInstanceGroupManagerPatchInstanceConfigReq', +'RegionInstanceGroupManagerUpdateInstanceConfigReq', +'RegionInstanceGroupManagersAbandonInstancesRequest', +'RegionInstanceGroupManagersApplyUpdatesRequest', +'RegionInstanceGroupManagersClient', +'RegionInstanceGroupManagersCreateInstancesRequest', +'RegionInstanceGroupManagersDeleteInstancesRequest', +'RegionInstanceGroupManagersListErrorsResponse', +'RegionInstanceGroupManagersListInstanceConfigsResp', +'RegionInstanceGroupManagersListInstancesResponse', +'RegionInstanceGroupManagersRecreateRequest', +'RegionInstanceGroupManagersSetTargetPoolsRequest', +'RegionInstanceGroupManagersSetTemplateRequest', +'RegionInstanceGroupsClient', +'RegionInstanceGroupsListInstances', +'RegionInstanceGroupsListInstancesRequest', +'RegionInstanceGroupsSetNamedPortsRequest', +'RegionInstanceTemplatesClient', +'RegionInstancesClient', +'RegionList', +'RegionNetworkEndpointGroupsClient', +'RegionNetworkFirewallPoliciesClient', +'RegionNetworkFirewallPoliciesGetEffectiveFirewallsResponse', +'RegionNetworkFirewallPoliciesGetEffectiveFirewallsResponseEffectiveFirewallPolicy', +'RegionNotificationEndpointsClient', +'RegionOperationsClient', +'RegionSecurityPoliciesClient', +'RegionSetLabelsRequest', +'RegionSetPolicyRequest', +'RegionSslCertificatesClient', +'RegionSslPoliciesClient', +'RegionTargetHttpProxiesClient', +'RegionTargetHttpsProxiesClient', +'RegionTargetHttpsProxiesSetSslCertificatesRequest', +'RegionTargetTcpProxiesClient', +'RegionUrlMapsClient', +'RegionUrlMapsValidateRequest', +'RegionsClient', +'RemoveAssociationFirewallPolicyRequest', +'RemoveAssociationNetworkFirewallPolicyRequest', +'RemoveAssociationRegionNetworkFirewallPolicyRequest', +'RemoveHealthCheckTargetPoolRequest', +'RemoveInstanceTargetPoolRequest', +'RemoveInstancesInstanceGroupRequest', +'RemovePeeringNetworkRequest', +'RemoveResourcePoliciesDiskRequest', +'RemoveResourcePoliciesInstanceRequest', +'RemoveResourcePoliciesRegionDiskRequest', +'RemoveRuleFirewallPolicyRequest', +'RemoveRuleNetworkFirewallPolicyRequest', +'RemoveRuleRegionNetworkFirewallPolicyRequest', +'RemoveRuleSecurityPolicyRequest', +'RequestMirrorPolicy', +'Reservation', +'ReservationAffinity', +'ReservationAggregatedList', +'ReservationList', +'ReservationsClient', +'ReservationsResizeRequest', +'ReservationsScopedList', +'ResetInstanceRequest', +'ResizeDiskRequest', +'ResizeInstanceGroupManagerRequest', +'ResizeRegionDiskRequest', +'ResizeRegionInstanceGroupManagerRequest', +'ResizeReservationRequest', +'ResourceCommitment', +'ResourceGroupReference', +'ResourcePoliciesClient', +'ResourcePoliciesScopedList', +'ResourcePolicy', +'ResourcePolicyAggregatedList', +'ResourcePolicyDailyCycle', +'ResourcePolicyDiskConsistencyGroupPolicy', +'ResourcePolicyGroupPlacementPolicy', +'ResourcePolicyHourlyCycle', +'ResourcePolicyInstanceSchedulePolicy', +'ResourcePolicyInstanceSchedulePolicySchedule', +'ResourcePolicyList', +'ResourcePolicyResourceStatus', +'ResourcePolicyResourceStatusInstanceSchedulePolicyStatus', +'ResourcePolicySnapshotSchedulePolicy', +'ResourcePolicySnapshotSchedulePolicyRetentionPolicy', +'ResourcePolicySnapshotSchedulePolicySchedule', +'ResourcePolicySnapshotSchedulePolicySnapshotProperties', +'ResourcePolicyWeeklyCycle', +'ResourcePolicyWeeklyCycleDayOfWeek', +'ResourceStatus', +'ResumeInstanceRequest', +'Route', +'RouteAsPath', +'RouteList', +'Router', +'RouterAdvertisedIpRange', +'RouterAggregatedList', +'RouterBgp', +'RouterBgpPeer', +'RouterBgpPeerBfd', +'RouterBgpPeerCustomLearnedIpRange', +'RouterInterface', +'RouterList', +'RouterMd5AuthenticationKey', +'RouterNat', +'RouterNatLogConfig', +'RouterNatRule', +'RouterNatRuleAction', +'RouterNatSubnetworkToNat', +'RouterStatus', +'RouterStatusBgpPeerStatus', +'RouterStatusNatStatus', +'RouterStatusNatStatusNatRuleStatus', +'RouterStatusResponse', +'RoutersClient', +'RoutersPreviewResponse', +'RoutersScopedList', +'RoutesClient', +'Rule', +'SSLHealthCheck', +'SavedAttachedDisk', +'SavedDisk', +'ScalingScheduleStatus', +'Scheduling', +'SchedulingNodeAffinity', +'ScratchDisks', +'Screenshot', +'SecurityPoliciesAggregatedList', +'SecurityPoliciesClient', +'SecurityPoliciesListPreconfiguredExpressionSetsResponse', +'SecurityPoliciesScopedList', +'SecurityPoliciesWafConfig', +'SecurityPolicy', +'SecurityPolicyAdaptiveProtectionConfig', +'SecurityPolicyAdaptiveProtectionConfigLayer7DdosDefenseConfig', +'SecurityPolicyAdvancedOptionsConfig', +'SecurityPolicyAdvancedOptionsConfigJsonCustomConfig', +'SecurityPolicyDdosProtectionConfig', +'SecurityPolicyList', +'SecurityPolicyRecaptchaOptionsConfig', +'SecurityPolicyReference', +'SecurityPolicyRule', +'SecurityPolicyRuleHttpHeaderAction', +'SecurityPolicyRuleHttpHeaderActionHttpHeaderOption', +'SecurityPolicyRuleMatcher', +'SecurityPolicyRuleMatcherConfig', +'SecurityPolicyRulePreconfiguredWafConfig', +'SecurityPolicyRulePreconfiguredWafConfigExclusion', +'SecurityPolicyRulePreconfiguredWafConfigExclusionFieldParams', +'SecurityPolicyRuleRateLimitOptions', +'SecurityPolicyRuleRateLimitOptionsEnforceOnKeyConfig', +'SecurityPolicyRuleRateLimitOptionsThreshold', +'SecurityPolicyRuleRedirectOptions', +'SecuritySettings', +'SendDiagnosticInterruptInstanceRequest', +'SendDiagnosticInterruptInstanceResponse', +'SerialPortOutput', +'ServerBinding', +'ServiceAccount', +'ServiceAttachment', +'ServiceAttachmentAggregatedList', +'ServiceAttachmentConnectedEndpoint', +'ServiceAttachmentConsumerProjectLimit', +'ServiceAttachmentList', +'ServiceAttachmentsClient', +'ServiceAttachmentsScopedList', +'SetBackendServiceTargetSslProxyRequest', +'SetBackendServiceTargetTcpProxyRequest', +'SetBackupTargetPoolRequest', +'SetCertificateMapTargetHttpsProxyRequest', +'SetCertificateMapTargetSslProxyRequest', +'SetCommonInstanceMetadataProjectRequest', +'SetDefaultNetworkTierProjectRequest', +'SetDeletionProtectionInstanceRequest', +'SetDiskAutoDeleteInstanceRequest', +'SetEdgeSecurityPolicyBackendBucketRequest', +'SetEdgeSecurityPolicyBackendServiceRequest', +'SetIamPolicyBackendServiceRequest', +'SetIamPolicyDiskRequest', +'SetIamPolicyFirewallPolicyRequest', +'SetIamPolicyImageRequest', +'SetIamPolicyInstanceRequest', +'SetIamPolicyInstanceTemplateRequest', +'SetIamPolicyLicenseRequest', +'SetIamPolicyMachineImageRequest', +'SetIamPolicyNetworkAttachmentRequest', +'SetIamPolicyNetworkFirewallPolicyRequest', +'SetIamPolicyNodeGroupRequest', +'SetIamPolicyNodeTemplateRequest', +'SetIamPolicyRegionBackendServiceRequest', +'SetIamPolicyRegionDiskRequest', +'SetIamPolicyRegionNetworkFirewallPolicyRequest', +'SetIamPolicyReservationRequest', +'SetIamPolicyResourcePolicyRequest', +'SetIamPolicyServiceAttachmentRequest', +'SetIamPolicySnapshotRequest', +'SetIamPolicySubnetworkRequest', +'SetInstanceTemplateInstanceGroupManagerRequest', +'SetInstanceTemplateRegionInstanceGroupManagerRequest', +'SetLabelsAddressRequest', +'SetLabelsDiskRequest', +'SetLabelsExternalVpnGatewayRequest', +'SetLabelsForwardingRuleRequest', +'SetLabelsGlobalAddressRequest', +'SetLabelsGlobalForwardingRuleRequest', +'SetLabelsImageRequest', +'SetLabelsInstanceRequest', +'SetLabelsInterconnectAttachmentRequest', +'SetLabelsInterconnectRequest', +'SetLabelsRegionDiskRequest', +'SetLabelsSecurityPolicyRequest', +'SetLabelsSnapshotRequest', +'SetLabelsTargetVpnGatewayRequest', +'SetLabelsVpnGatewayRequest', +'SetLabelsVpnTunnelRequest', +'SetMachineResourcesInstanceRequest', +'SetMachineTypeInstanceRequest', +'SetMetadataInstanceRequest', +'SetMinCpuPlatformInstanceRequest', +'SetNameInstanceRequest', +'SetNamedPortsInstanceGroupRequest', +'SetNamedPortsRegionInstanceGroupRequest', +'SetNodeTemplateNodeGroupRequest', +'SetPrivateIpGoogleAccessSubnetworkRequest', +'SetProxyHeaderTargetSslProxyRequest', +'SetProxyHeaderTargetTcpProxyRequest', +'SetQuicOverrideTargetHttpsProxyRequest', +'SetSchedulingInstanceRequest', +'SetSecurityPolicyBackendServiceRequest', +'SetServiceAccountInstanceRequest', +'SetShieldedInstanceIntegrityPolicyInstanceRequest', +'SetSslCertificatesRegionTargetHttpsProxyRequest', +'SetSslCertificatesTargetHttpsProxyRequest', +'SetSslCertificatesTargetSslProxyRequest', +'SetSslPolicyTargetHttpsProxyRequest', +'SetSslPolicyTargetSslProxyRequest', +'SetTagsInstanceRequest', +'SetTargetForwardingRuleRequest', +'SetTargetGlobalForwardingRuleRequest', +'SetTargetPoolsInstanceGroupManagerRequest', +'SetTargetPoolsRegionInstanceGroupManagerRequest', +'SetUrlMapRegionTargetHttpProxyRequest', +'SetUrlMapRegionTargetHttpsProxyRequest', +'SetUrlMapTargetHttpProxyRequest', +'SetUrlMapTargetHttpsProxyRequest', +'SetUsageExportBucketProjectRequest', +'ShareSettings', +'ShareSettingsProjectConfig', +'ShieldedInstanceConfig', +'ShieldedInstanceIdentity', +'ShieldedInstanceIdentityEntry', +'ShieldedInstanceIntegrityPolicy', +'SignedUrlKey', +'SimulateMaintenanceEventInstanceRequest', +'SimulateMaintenanceEventNodeGroupRequest', +'Snapshot', +'SnapshotList', +'SnapshotsClient', +'SourceDiskEncryptionKey', +'SourceInstanceParams', +'SourceInstanceProperties', +'SslCertificate', +'SslCertificateAggregatedList', +'SslCertificateList', +'SslCertificateManagedSslCertificate', +'SslCertificateSelfManagedSslCertificate', +'SslCertificatesClient', +'SslCertificatesScopedList', +'SslPoliciesAggregatedList', +'SslPoliciesClient', +'SslPoliciesList', +'SslPoliciesListAvailableFeaturesResponse', +'SslPoliciesScopedList', +'SslPolicy', +'SslPolicyReference', +'StartAsyncReplicationDiskRequest', +'StartAsyncReplicationRegionDiskRequest', +'StartInstanceRequest', +'StartWithEncryptionKeyInstanceRequest', +'StatefulPolicy', +'StatefulPolicyPreservedState', +'StatefulPolicyPreservedStateDiskDevice', +'StopAsyncReplicationDiskRequest', +'StopAsyncReplicationRegionDiskRequest', +'StopGroupAsyncReplicationDiskRequest', +'StopGroupAsyncReplicationRegionDiskRequest', +'StopInstanceRequest', +'Subnetwork', +'SubnetworkAggregatedList', +'SubnetworkList', +'SubnetworkLogConfig', +'SubnetworkSecondaryRange', +'SubnetworksClient', +'SubnetworksExpandIpCidrRangeRequest', +'SubnetworksScopedList', +'SubnetworksSetPrivateIpGoogleAccessRequest', +'Subsetting', +'SuspendInstanceRequest', +'SwitchToCustomModeNetworkRequest', +'TCPHealthCheck', +'Tags', +'TargetGrpcProxiesClient', +'TargetGrpcProxy', +'TargetGrpcProxyList', +'TargetHttpProxiesClient', +'TargetHttpProxiesScopedList', +'TargetHttpProxy', +'TargetHttpProxyAggregatedList', +'TargetHttpProxyList', +'TargetHttpsProxiesClient', +'TargetHttpsProxiesScopedList', +'TargetHttpsProxiesSetCertificateMapRequest', +'TargetHttpsProxiesSetQuicOverrideRequest', +'TargetHttpsProxiesSetSslCertificatesRequest', +'TargetHttpsProxy', +'TargetHttpsProxyAggregatedList', +'TargetHttpsProxyList', +'TargetInstance', +'TargetInstanceAggregatedList', +'TargetInstanceList', +'TargetInstancesClient', +'TargetInstancesScopedList', +'TargetPool', +'TargetPoolAggregatedList', +'TargetPoolInstanceHealth', +'TargetPoolList', +'TargetPoolsAddHealthCheckRequest', +'TargetPoolsAddInstanceRequest', +'TargetPoolsClient', +'TargetPoolsRemoveHealthCheckRequest', +'TargetPoolsRemoveInstanceRequest', +'TargetPoolsScopedList', +'TargetReference', +'TargetSslProxiesClient', +'TargetSslProxiesSetBackendServiceRequest', +'TargetSslProxiesSetCertificateMapRequest', +'TargetSslProxiesSetProxyHeaderRequest', +'TargetSslProxiesSetSslCertificatesRequest', +'TargetSslProxy', +'TargetSslProxyList', +'TargetTcpProxiesClient', +'TargetTcpProxiesScopedList', +'TargetTcpProxiesSetBackendServiceRequest', +'TargetTcpProxiesSetProxyHeaderRequest', +'TargetTcpProxy', +'TargetTcpProxyAggregatedList', +'TargetTcpProxyList', +'TargetVpnGateway', +'TargetVpnGatewayAggregatedList', +'TargetVpnGatewayList', +'TargetVpnGatewaysClient', +'TargetVpnGatewaysScopedList', +'TestFailure', +'TestIamPermissionsDiskRequest', +'TestIamPermissionsExternalVpnGatewayRequest', +'TestIamPermissionsFirewallPolicyRequest', +'TestIamPermissionsImageRequest', +'TestIamPermissionsInstanceRequest', +'TestIamPermissionsInstanceTemplateRequest', +'TestIamPermissionsLicenseCodeRequest', +'TestIamPermissionsLicenseRequest', +'TestIamPermissionsMachineImageRequest', +'TestIamPermissionsNetworkAttachmentRequest', +'TestIamPermissionsNetworkEndpointGroupRequest', +'TestIamPermissionsNetworkFirewallPolicyRequest', +'TestIamPermissionsNodeGroupRequest', +'TestIamPermissionsNodeTemplateRequest', +'TestIamPermissionsPacketMirroringRequest', +'TestIamPermissionsRegionDiskRequest', +'TestIamPermissionsRegionNetworkFirewallPolicyRequest', +'TestIamPermissionsReservationRequest', +'TestIamPermissionsResourcePolicyRequest', +'TestIamPermissionsServiceAttachmentRequest', +'TestIamPermissionsSnapshotRequest', +'TestIamPermissionsSubnetworkRequest', +'TestIamPermissionsVpnGatewayRequest', +'TestPermissionsRequest', +'TestPermissionsResponse', +'Uint128', +'UpdateAccessConfigInstanceRequest', +'UpdateAutoscalerRequest', +'UpdateBackendBucketRequest', +'UpdateBackendServiceRequest', +'UpdateDiskRequest', +'UpdateDisplayDeviceInstanceRequest', +'UpdateFirewallRequest', +'UpdateHealthCheckRequest', +'UpdateInstanceRequest', +'UpdateNetworkInterfaceInstanceRequest', +'UpdatePeeringNetworkRequest', +'UpdatePerInstanceConfigsInstanceGroupManagerRequest', +'UpdatePerInstanceConfigsRegionInstanceGroupManagerRequest', +'UpdateRegionAutoscalerRequest', +'UpdateRegionBackendServiceRequest', +'UpdateRegionCommitmentRequest', +'UpdateRegionDiskRequest', +'UpdateRegionHealthCheckRequest', +'UpdateRegionUrlMapRequest', +'UpdateReservationRequest', +'UpdateRouterRequest', +'UpdateShieldedInstanceConfigInstanceRequest', +'UpdateUrlMapRequest', +'UrlMap', +'UrlMapList', +'UrlMapReference', +'UrlMapTest', +'UrlMapTestHeader', +'UrlMapValidationResult', +'UrlMapsAggregatedList', +'UrlMapsClient', +'UrlMapsScopedList', +'UrlMapsValidateRequest', +'UrlMapsValidateResponse', +'UrlRewrite', +'UsableSubnetwork', +'UsableSubnetworkSecondaryRange', +'UsableSubnetworksAggregatedList', +'UsageExportLocation', +'ValidateRegionUrlMapRequest', +'ValidateUrlMapRequest', +'VmEndpointNatMappings', +'VmEndpointNatMappingsInterfaceNatMappings', +'VmEndpointNatMappingsInterfaceNatMappingsNatRuleMappings', +'VmEndpointNatMappingsList', +'VpnGateway', +'VpnGatewayAggregatedList', +'VpnGatewayList', +'VpnGatewayStatus', +'VpnGatewayStatusHighAvailabilityRequirementState', +'VpnGatewayStatusTunnel', +'VpnGatewayStatusVpnConnection', +'VpnGatewayVpnGatewayInterface', +'VpnGatewaysClient', +'VpnGatewaysGetStatusResponse', +'VpnGatewaysScopedList', +'VpnTunnel', +'VpnTunnelAggregatedList', +'VpnTunnelList', +'VpnTunnelsClient', +'VpnTunnelsScopedList', +'WafExpressionSet', +'WafExpressionSetExpression', +'WaitGlobalOperationRequest', +'WaitRegionOperationRequest', +'WaitZoneOperationRequest', +'Warning', +'Warnings', +'WeightedBackendService', +'XpnHostList', +'XpnResourceId', +'Zone', +'ZoneList', +'ZoneOperationsClient', +'ZoneSetLabelsRequest', +'ZoneSetPolicyRequest', +'ZonesClient', +) diff --git a/owl-bot-staging/v1/google/cloud/compute_v1/gapic_metadata.json b/owl-bot-staging/v1/google/cloud/compute_v1/gapic_metadata.json new file mode 100644 index 000000000..0f722bc68 --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/compute_v1/gapic_metadata.json @@ -0,0 +1,4314 @@ + { + "comment": "This file maps proto services/RPCs to the corresponding library clients/methods", + "language": "python", + "libraryPackage": "google.cloud.compute_v1", + "protoPackage": "google.cloud.compute.v1", + "schema": "1.0", + "services": { + "AcceleratorTypes": { + "clients": { + "rest": { + "libraryClient": "AcceleratorTypesClient", + "rpcs": { + "AggregatedList": { + "methods": [ + "aggregated_list" + ] + }, + "Get": { + "methods": [ + "get" + ] + }, + "List": { + "methods": [ + "list" + ] + } + } + } + } + }, + "Addresses": { + "clients": { + "rest": { + "libraryClient": "AddressesClient", + "rpcs": { + "AggregatedList": { + "methods": [ + "aggregated_list" + ] + }, + "Delete": { + "methods": [ + "delete" + ] + }, + "Get": { + "methods": [ + "get" + ] + }, + "Insert": { + "methods": [ + "insert" + ] + }, + "List": { + "methods": [ + "list" + ] + }, + "Move": { + "methods": [ + "move" + ] + }, + "SetLabels": { + "methods": [ + "set_labels" + ] + } + } + } + } + }, + "Autoscalers": { + "clients": { + "rest": { + "libraryClient": "AutoscalersClient", + "rpcs": { + "AggregatedList": { + "methods": [ + "aggregated_list" + ] + }, + "Delete": { + "methods": [ + "delete" + ] + }, + "Get": { + "methods": [ + "get" + ] + }, + "Insert": { + "methods": [ + "insert" + ] + }, + "List": { + "methods": [ + "list" + ] + }, + "Patch": { + "methods": [ + "patch" + ] + }, + "Update": { + "methods": [ + "update" + ] + } + } + } + } + }, + "BackendBuckets": { + "clients": { + "rest": { + "libraryClient": "BackendBucketsClient", + "rpcs": { + "AddSignedUrlKey": { + "methods": [ + "add_signed_url_key" + ] + }, + "Delete": { + "methods": [ + "delete" + ] + }, + "DeleteSignedUrlKey": { + "methods": [ + "delete_signed_url_key" + ] + }, + "Get": { + "methods": [ + "get" + ] + }, + "Insert": { + "methods": [ + "insert" + ] + }, + "List": { + "methods": [ + "list" + ] + }, + "Patch": { + "methods": [ + "patch" + ] + }, + "SetEdgeSecurityPolicy": { + "methods": [ + "set_edge_security_policy" + ] + }, + "Update": { + "methods": [ + "update" + ] + } + } + } + } + }, + "BackendServices": { + "clients": { + "rest": { + "libraryClient": "BackendServicesClient", + "rpcs": { + "AddSignedUrlKey": { + "methods": [ + "add_signed_url_key" + ] + }, + "AggregatedList": { + "methods": [ + "aggregated_list" + ] + }, + "Delete": { + "methods": [ + "delete" + ] + }, + "DeleteSignedUrlKey": { + "methods": [ + "delete_signed_url_key" + ] + }, + "Get": { + "methods": [ + "get" + ] + }, + "GetHealth": { + "methods": [ + "get_health" + ] + }, + "GetIamPolicy": { + "methods": [ + "get_iam_policy" + ] + }, + "Insert": { + "methods": [ + "insert" + ] + }, + "List": { + "methods": [ + "list" + ] + }, + "Patch": { + "methods": [ + "patch" + ] + }, + "SetEdgeSecurityPolicy": { + "methods": [ + "set_edge_security_policy" + ] + }, + "SetIamPolicy": { + "methods": [ + "set_iam_policy" + ] + }, + "SetSecurityPolicy": { + "methods": [ + "set_security_policy" + ] + }, + "Update": { + "methods": [ + "update" + ] + } + } + } + } + }, + "DiskTypes": { + "clients": { + "rest": { + "libraryClient": "DiskTypesClient", + "rpcs": { + "AggregatedList": { + "methods": [ + "aggregated_list" + ] + }, + "Get": { + "methods": [ + "get" + ] + }, + "List": { + "methods": [ + "list" + ] + } + } + } + } + }, + "Disks": { + "clients": { + "rest": { + "libraryClient": "DisksClient", + "rpcs": { + "AddResourcePolicies": { + "methods": [ + "add_resource_policies" + ] + }, + "AggregatedList": { + "methods": [ + "aggregated_list" + ] + }, + "BulkInsert": { + "methods": [ + "bulk_insert" + ] + }, + "CreateSnapshot": { + "methods": [ + "create_snapshot" + ] + }, + "Delete": { + "methods": [ + "delete" + ] + }, + "Get": { + "methods": [ + "get" + ] + }, + "GetIamPolicy": { + "methods": [ + "get_iam_policy" + ] + }, + "Insert": { + "methods": [ + "insert" + ] + }, + "List": { + "methods": [ + "list" + ] + }, + "RemoveResourcePolicies": { + "methods": [ + "remove_resource_policies" + ] + }, + "Resize": { + "methods": [ + "resize" + ] + }, + "SetIamPolicy": { + "methods": [ + "set_iam_policy" + ] + }, + "SetLabels": { + "methods": [ + "set_labels" + ] + }, + "StartAsyncReplication": { + "methods": [ + "start_async_replication" + ] + }, + "StopAsyncReplication": { + "methods": [ + "stop_async_replication" + ] + }, + "StopGroupAsyncReplication": { + "methods": [ + "stop_group_async_replication" + ] + }, + "TestIamPermissions": { + "methods": [ + "test_iam_permissions" + ] + }, + "Update": { + "methods": [ + "update" + ] + } + } + } + } + }, + "ExternalVpnGateways": { + "clients": { + "rest": { + "libraryClient": "ExternalVpnGatewaysClient", + "rpcs": { + "Delete": { + "methods": [ + "delete" + ] + }, + "Get": { + "methods": [ + "get" + ] + }, + "Insert": { + "methods": [ + "insert" + ] + }, + "List": { + "methods": [ + "list" + ] + }, + "SetLabels": { + "methods": [ + "set_labels" + ] + }, + "TestIamPermissions": { + "methods": [ + "test_iam_permissions" + ] + } + } + } + } + }, + "FirewallPolicies": { + "clients": { + "rest": { + "libraryClient": "FirewallPoliciesClient", + "rpcs": { + "AddAssociation": { + "methods": [ + "add_association" + ] + }, + "AddRule": { + "methods": [ + "add_rule" + ] + }, + "CloneRules": { + "methods": [ + "clone_rules" + ] + }, + "Delete": { + "methods": [ + "delete" + ] + }, + "Get": { + "methods": [ + "get" + ] + }, + "GetAssociation": { + "methods": [ + "get_association" + ] + }, + "GetIamPolicy": { + "methods": [ + "get_iam_policy" + ] + }, + "GetRule": { + "methods": [ + "get_rule" + ] + }, + "Insert": { + "methods": [ + "insert" + ] + }, + "List": { + "methods": [ + "list" + ] + }, + "ListAssociations": { + "methods": [ + "list_associations" + ] + }, + "Move": { + "methods": [ + "move" + ] + }, + "Patch": { + "methods": [ + "patch" + ] + }, + "PatchRule": { + "methods": [ + "patch_rule" + ] + }, + "RemoveAssociation": { + "methods": [ + "remove_association" + ] + }, + "RemoveRule": { + "methods": [ + "remove_rule" + ] + }, + "SetIamPolicy": { + "methods": [ + "set_iam_policy" + ] + }, + "TestIamPermissions": { + "methods": [ + "test_iam_permissions" + ] + } + } + } + } + }, + "Firewalls": { + "clients": { + "rest": { + "libraryClient": "FirewallsClient", + "rpcs": { + "Delete": { + "methods": [ + "delete" + ] + }, + "Get": { + "methods": [ + "get" + ] + }, + "Insert": { + "methods": [ + "insert" + ] + }, + "List": { + "methods": [ + "list" + ] + }, + "Patch": { + "methods": [ + "patch" + ] + }, + "Update": { + "methods": [ + "update" + ] + } + } + } + } + }, + "ForwardingRules": { + "clients": { + "rest": { + "libraryClient": "ForwardingRulesClient", + "rpcs": { + "AggregatedList": { + "methods": [ + "aggregated_list" + ] + }, + "Delete": { + "methods": [ + "delete" + ] + }, + "Get": { + "methods": [ + "get" + ] + }, + "Insert": { + "methods": [ + "insert" + ] + }, + "List": { + "methods": [ + "list" + ] + }, + "Patch": { + "methods": [ + "patch" + ] + }, + "SetLabels": { + "methods": [ + "set_labels" + ] + }, + "SetTarget": { + "methods": [ + "set_target" + ] + } + } + } + } + }, + "GlobalAddresses": { + "clients": { + "rest": { + "libraryClient": "GlobalAddressesClient", + "rpcs": { + "Delete": { + "methods": [ + "delete" + ] + }, + "Get": { + "methods": [ + "get" + ] + }, + "Insert": { + "methods": [ + "insert" + ] + }, + "List": { + "methods": [ + "list" + ] + }, + "Move": { + "methods": [ + "move" + ] + }, + "SetLabels": { + "methods": [ + "set_labels" + ] + } + } + } + } + }, + "GlobalForwardingRules": { + "clients": { + "rest": { + "libraryClient": "GlobalForwardingRulesClient", + "rpcs": { + "Delete": { + "methods": [ + "delete" + ] + }, + "Get": { + "methods": [ + "get" + ] + }, + "Insert": { + "methods": [ + "insert" + ] + }, + "List": { + "methods": [ + "list" + ] + }, + "Patch": { + "methods": [ + "patch" + ] + }, + "SetLabels": { + "methods": [ + "set_labels" + ] + }, + "SetTarget": { + "methods": [ + "set_target" + ] + } + } + } + } + }, + "GlobalNetworkEndpointGroups": { + "clients": { + "rest": { + "libraryClient": "GlobalNetworkEndpointGroupsClient", + "rpcs": { + "AttachNetworkEndpoints": { + "methods": [ + "attach_network_endpoints" + ] + }, + "Delete": { + "methods": [ + "delete" + ] + }, + "DetachNetworkEndpoints": { + "methods": [ + "detach_network_endpoints" + ] + }, + "Get": { + "methods": [ + "get" + ] + }, + "Insert": { + "methods": [ + "insert" + ] + }, + "List": { + "methods": [ + "list" + ] + }, + "ListNetworkEndpoints": { + "methods": [ + "list_network_endpoints" + ] + } + } + } + } + }, + "GlobalOperations": { + "clients": { + "rest": { + "libraryClient": "GlobalOperationsClient", + "rpcs": { + "AggregatedList": { + "methods": [ + "aggregated_list" + ] + }, + "Delete": { + "methods": [ + "delete" + ] + }, + "Get": { + "methods": [ + "get" + ] + }, + "List": { + "methods": [ + "list" + ] + }, + "Wait": { + "methods": [ + "wait" + ] + } + } + } + } + }, + "GlobalOrganizationOperations": { + "clients": { + "rest": { + "libraryClient": "GlobalOrganizationOperationsClient", + "rpcs": { + "Delete": { + "methods": [ + "delete" + ] + }, + "Get": { + "methods": [ + "get" + ] + }, + "List": { + "methods": [ + "list" + ] + } + } + } + } + }, + "GlobalPublicDelegatedPrefixes": { + "clients": { + "rest": { + "libraryClient": "GlobalPublicDelegatedPrefixesClient", + "rpcs": { + "Delete": { + "methods": [ + "delete" + ] + }, + "Get": { + "methods": [ + "get" + ] + }, + "Insert": { + "methods": [ + "insert" + ] + }, + "List": { + "methods": [ + "list" + ] + }, + "Patch": { + "methods": [ + "patch" + ] + } + } + } + } + }, + "HealthChecks": { + "clients": { + "rest": { + "libraryClient": "HealthChecksClient", + "rpcs": { + "AggregatedList": { + "methods": [ + "aggregated_list" + ] + }, + "Delete": { + "methods": [ + "delete" + ] + }, + "Get": { + "methods": [ + "get" + ] + }, + "Insert": { + "methods": [ + "insert" + ] + }, + "List": { + "methods": [ + "list" + ] + }, + "Patch": { + "methods": [ + "patch" + ] + }, + "Update": { + "methods": [ + "update" + ] + } + } + } + } + }, + "ImageFamilyViews": { + "clients": { + "rest": { + "libraryClient": "ImageFamilyViewsClient", + "rpcs": { + "Get": { + "methods": [ + "get" + ] + } + } + } + } + }, + "Images": { + "clients": { + "rest": { + "libraryClient": "ImagesClient", + "rpcs": { + "Delete": { + "methods": [ + "delete" + ] + }, + "Deprecate": { + "methods": [ + "deprecate" + ] + }, + "Get": { + "methods": [ + "get" + ] + }, + "GetFromFamily": { + "methods": [ + "get_from_family" + ] + }, + "GetIamPolicy": { + "methods": [ + "get_iam_policy" + ] + }, + "Insert": { + "methods": [ + "insert" + ] + }, + "List": { + "methods": [ + "list" + ] + }, + "Patch": { + "methods": [ + "patch" + ] + }, + "SetIamPolicy": { + "methods": [ + "set_iam_policy" + ] + }, + "SetLabels": { + "methods": [ + "set_labels" + ] + }, + "TestIamPermissions": { + "methods": [ + "test_iam_permissions" + ] + } + } + } + } + }, + "InstanceGroupManagers": { + "clients": { + "rest": { + "libraryClient": "InstanceGroupManagersClient", + "rpcs": { + "AbandonInstances": { + "methods": [ + "abandon_instances" + ] + }, + "AggregatedList": { + "methods": [ + "aggregated_list" + ] + }, + "ApplyUpdatesToInstances": { + "methods": [ + "apply_updates_to_instances" + ] + }, + "CreateInstances": { + "methods": [ + "create_instances" + ] + }, + "Delete": { + "methods": [ + "delete" + ] + }, + "DeleteInstances": { + "methods": [ + "delete_instances" + ] + }, + "DeletePerInstanceConfigs": { + "methods": [ + "delete_per_instance_configs" + ] + }, + "Get": { + "methods": [ + "get" + ] + }, + "Insert": { + "methods": [ + "insert" + ] + }, + "List": { + "methods": [ + "list" + ] + }, + "ListErrors": { + "methods": [ + "list_errors" + ] + }, + "ListManagedInstances": { + "methods": [ + "list_managed_instances" + ] + }, + "ListPerInstanceConfigs": { + "methods": [ + "list_per_instance_configs" + ] + }, + "Patch": { + "methods": [ + "patch" + ] + }, + "PatchPerInstanceConfigs": { + "methods": [ + "patch_per_instance_configs" + ] + }, + "RecreateInstances": { + "methods": [ + "recreate_instances" + ] + }, + "Resize": { + "methods": [ + "resize" + ] + }, + "SetInstanceTemplate": { + "methods": [ + "set_instance_template" + ] + }, + "SetTargetPools": { + "methods": [ + "set_target_pools" + ] + }, + "UpdatePerInstanceConfigs": { + "methods": [ + "update_per_instance_configs" + ] + } + } + } + } + }, + "InstanceGroups": { + "clients": { + "rest": { + "libraryClient": "InstanceGroupsClient", + "rpcs": { + "AddInstances": { + "methods": [ + "add_instances" + ] + }, + "AggregatedList": { + "methods": [ + "aggregated_list" + ] + }, + "Delete": { + "methods": [ + "delete" + ] + }, + "Get": { + "methods": [ + "get" + ] + }, + "Insert": { + "methods": [ + "insert" + ] + }, + "List": { + "methods": [ + "list" + ] + }, + "ListInstances": { + "methods": [ + "list_instances" + ] + }, + "RemoveInstances": { + "methods": [ + "remove_instances" + ] + }, + "SetNamedPorts": { + "methods": [ + "set_named_ports" + ] + } + } + } + } + }, + "InstanceTemplates": { + "clients": { + "rest": { + "libraryClient": "InstanceTemplatesClient", + "rpcs": { + "AggregatedList": { + "methods": [ + "aggregated_list" + ] + }, + "Delete": { + "methods": [ + "delete" + ] + }, + "Get": { + "methods": [ + "get" + ] + }, + "GetIamPolicy": { + "methods": [ + "get_iam_policy" + ] + }, + "Insert": { + "methods": [ + "insert" + ] + }, + "List": { + "methods": [ + "list" + ] + }, + "SetIamPolicy": { + "methods": [ + "set_iam_policy" + ] + }, + "TestIamPermissions": { + "methods": [ + "test_iam_permissions" + ] + } + } + } + } + }, + "Instances": { + "clients": { + "rest": { + "libraryClient": "InstancesClient", + "rpcs": { + "AddAccessConfig": { + "methods": [ + "add_access_config" + ] + }, + "AddResourcePolicies": { + "methods": [ + "add_resource_policies" + ] + }, + "AggregatedList": { + "methods": [ + "aggregated_list" + ] + }, + "AttachDisk": { + "methods": [ + "attach_disk" + ] + }, + "BulkInsert": { + "methods": [ + "bulk_insert" + ] + }, + "Delete": { + "methods": [ + "delete" + ] + }, + "DeleteAccessConfig": { + "methods": [ + "delete_access_config" + ] + }, + "DetachDisk": { + "methods": [ + "detach_disk" + ] + }, + "Get": { + "methods": [ + "get" + ] + }, + "GetEffectiveFirewalls": { + "methods": [ + "get_effective_firewalls" + ] + }, + "GetGuestAttributes": { + "methods": [ + "get_guest_attributes" + ] + }, + "GetIamPolicy": { + "methods": [ + "get_iam_policy" + ] + }, + "GetScreenshot": { + "methods": [ + "get_screenshot" + ] + }, + "GetSerialPortOutput": { + "methods": [ + "get_serial_port_output" + ] + }, + "GetShieldedInstanceIdentity": { + "methods": [ + "get_shielded_instance_identity" + ] + }, + "Insert": { + "methods": [ + "insert" + ] + }, + "List": { + "methods": [ + "list" + ] + }, + "ListReferrers": { + "methods": [ + "list_referrers" + ] + }, + "RemoveResourcePolicies": { + "methods": [ + "remove_resource_policies" + ] + }, + "Reset": { + "methods": [ + "reset" + ] + }, + "Resume": { + "methods": [ + "resume" + ] + }, + "SendDiagnosticInterrupt": { + "methods": [ + "send_diagnostic_interrupt" + ] + }, + "SetDeletionProtection": { + "methods": [ + "set_deletion_protection" + ] + }, + "SetDiskAutoDelete": { + "methods": [ + "set_disk_auto_delete" + ] + }, + "SetIamPolicy": { + "methods": [ + "set_iam_policy" + ] + }, + "SetLabels": { + "methods": [ + "set_labels" + ] + }, + "SetMachineResources": { + "methods": [ + "set_machine_resources" + ] + }, + "SetMachineType": { + "methods": [ + "set_machine_type" + ] + }, + "SetMetadata": { + "methods": [ + "set_metadata" + ] + }, + "SetMinCpuPlatform": { + "methods": [ + "set_min_cpu_platform" + ] + }, + "SetName": { + "methods": [ + "set_name" + ] + }, + "SetScheduling": { + "methods": [ + "set_scheduling" + ] + }, + "SetServiceAccount": { + "methods": [ + "set_service_account" + ] + }, + "SetShieldedInstanceIntegrityPolicy": { + "methods": [ + "set_shielded_instance_integrity_policy" + ] + }, + "SetTags": { + "methods": [ + "set_tags" + ] + }, + "SimulateMaintenanceEvent": { + "methods": [ + "simulate_maintenance_event" + ] + }, + "Start": { + "methods": [ + "start" + ] + }, + "StartWithEncryptionKey": { + "methods": [ + "start_with_encryption_key" + ] + }, + "Stop": { + "methods": [ + "stop" + ] + }, + "Suspend": { + "methods": [ + "suspend" + ] + }, + "TestIamPermissions": { + "methods": [ + "test_iam_permissions" + ] + }, + "Update": { + "methods": [ + "update" + ] + }, + "UpdateAccessConfig": { + "methods": [ + "update_access_config" + ] + }, + "UpdateDisplayDevice": { + "methods": [ + "update_display_device" + ] + }, + "UpdateNetworkInterface": { + "methods": [ + "update_network_interface" + ] + }, + "UpdateShieldedInstanceConfig": { + "methods": [ + "update_shielded_instance_config" + ] + } + } + } + } + }, + "InterconnectAttachments": { + "clients": { + "rest": { + "libraryClient": "InterconnectAttachmentsClient", + "rpcs": { + "AggregatedList": { + "methods": [ + "aggregated_list" + ] + }, + "Delete": { + "methods": [ + "delete" + ] + }, + "Get": { + "methods": [ + "get" + ] + }, + "Insert": { + "methods": [ + "insert" + ] + }, + "List": { + "methods": [ + "list" + ] + }, + "Patch": { + "methods": [ + "patch" + ] + }, + "SetLabels": { + "methods": [ + "set_labels" + ] + } + } + } + } + }, + "InterconnectLocations": { + "clients": { + "rest": { + "libraryClient": "InterconnectLocationsClient", + "rpcs": { + "Get": { + "methods": [ + "get" + ] + }, + "List": { + "methods": [ + "list" + ] + } + } + } + } + }, + "InterconnectRemoteLocations": { + "clients": { + "rest": { + "libraryClient": "InterconnectRemoteLocationsClient", + "rpcs": { + "Get": { + "methods": [ + "get" + ] + }, + "List": { + "methods": [ + "list" + ] + } + } + } + } + }, + "Interconnects": { + "clients": { + "rest": { + "libraryClient": "InterconnectsClient", + "rpcs": { + "Delete": { + "methods": [ + "delete" + ] + }, + "Get": { + "methods": [ + "get" + ] + }, + "GetDiagnostics": { + "methods": [ + "get_diagnostics" + ] + }, + "Insert": { + "methods": [ + "insert" + ] + }, + "List": { + "methods": [ + "list" + ] + }, + "Patch": { + "methods": [ + "patch" + ] + }, + "SetLabels": { + "methods": [ + "set_labels" + ] + } + } + } + } + }, + "LicenseCodes": { + "clients": { + "rest": { + "libraryClient": "LicenseCodesClient", + "rpcs": { + "Get": { + "methods": [ + "get" + ] + }, + "TestIamPermissions": { + "methods": [ + "test_iam_permissions" + ] + } + } + } + } + }, + "Licenses": { + "clients": { + "rest": { + "libraryClient": "LicensesClient", + "rpcs": { + "Delete": { + "methods": [ + "delete" + ] + }, + "Get": { + "methods": [ + "get" + ] + }, + "GetIamPolicy": { + "methods": [ + "get_iam_policy" + ] + }, + "Insert": { + "methods": [ + "insert" + ] + }, + "List": { + "methods": [ + "list" + ] + }, + "SetIamPolicy": { + "methods": [ + "set_iam_policy" + ] + }, + "TestIamPermissions": { + "methods": [ + "test_iam_permissions" + ] + } + } + } + } + }, + "MachineImages": { + "clients": { + "rest": { + "libraryClient": "MachineImagesClient", + "rpcs": { + "Delete": { + "methods": [ + "delete" + ] + }, + "Get": { + "methods": [ + "get" + ] + }, + "GetIamPolicy": { + "methods": [ + "get_iam_policy" + ] + }, + "Insert": { + "methods": [ + "insert" + ] + }, + "List": { + "methods": [ + "list" + ] + }, + "SetIamPolicy": { + "methods": [ + "set_iam_policy" + ] + }, + "TestIamPermissions": { + "methods": [ + "test_iam_permissions" + ] + } + } + } + } + }, + "MachineTypes": { + "clients": { + "rest": { + "libraryClient": "MachineTypesClient", + "rpcs": { + "AggregatedList": { + "methods": [ + "aggregated_list" + ] + }, + "Get": { + "methods": [ + "get" + ] + }, + "List": { + "methods": [ + "list" + ] + } + } + } + } + }, + "NetworkAttachments": { + "clients": { + "rest": { + "libraryClient": "NetworkAttachmentsClient", + "rpcs": { + "AggregatedList": { + "methods": [ + "aggregated_list" + ] + }, + "Delete": { + "methods": [ + "delete" + ] + }, + "Get": { + "methods": [ + "get" + ] + }, + "GetIamPolicy": { + "methods": [ + "get_iam_policy" + ] + }, + "Insert": { + "methods": [ + "insert" + ] + }, + "List": { + "methods": [ + "list" + ] + }, + "SetIamPolicy": { + "methods": [ + "set_iam_policy" + ] + }, + "TestIamPermissions": { + "methods": [ + "test_iam_permissions" + ] + } + } + } + } + }, + "NetworkEdgeSecurityServices": { + "clients": { + "rest": { + "libraryClient": "NetworkEdgeSecurityServicesClient", + "rpcs": { + "AggregatedList": { + "methods": [ + "aggregated_list" + ] + }, + "Delete": { + "methods": [ + "delete" + ] + }, + "Get": { + "methods": [ + "get" + ] + }, + "Insert": { + "methods": [ + "insert" + ] + }, + "Patch": { + "methods": [ + "patch" + ] + } + } + } + } + }, + "NetworkEndpointGroups": { + "clients": { + "rest": { + "libraryClient": "NetworkEndpointGroupsClient", + "rpcs": { + "AggregatedList": { + "methods": [ + "aggregated_list" + ] + }, + "AttachNetworkEndpoints": { + "methods": [ + "attach_network_endpoints" + ] + }, + "Delete": { + "methods": [ + "delete" + ] + }, + "DetachNetworkEndpoints": { + "methods": [ + "detach_network_endpoints" + ] + }, + "Get": { + "methods": [ + "get" + ] + }, + "Insert": { + "methods": [ + "insert" + ] + }, + "List": { + "methods": [ + "list" + ] + }, + "ListNetworkEndpoints": { + "methods": [ + "list_network_endpoints" + ] + }, + "TestIamPermissions": { + "methods": [ + "test_iam_permissions" + ] + } + } + } + } + }, + "NetworkFirewallPolicies": { + "clients": { + "rest": { + "libraryClient": "NetworkFirewallPoliciesClient", + "rpcs": { + "AddAssociation": { + "methods": [ + "add_association" + ] + }, + "AddRule": { + "methods": [ + "add_rule" + ] + }, + "CloneRules": { + "methods": [ + "clone_rules" + ] + }, + "Delete": { + "methods": [ + "delete" + ] + }, + "Get": { + "methods": [ + "get" + ] + }, + "GetAssociation": { + "methods": [ + "get_association" + ] + }, + "GetIamPolicy": { + "methods": [ + "get_iam_policy" + ] + }, + "GetRule": { + "methods": [ + "get_rule" + ] + }, + "Insert": { + "methods": [ + "insert" + ] + }, + "List": { + "methods": [ + "list" + ] + }, + "Patch": { + "methods": [ + "patch" + ] + }, + "PatchRule": { + "methods": [ + "patch_rule" + ] + }, + "RemoveAssociation": { + "methods": [ + "remove_association" + ] + }, + "RemoveRule": { + "methods": [ + "remove_rule" + ] + }, + "SetIamPolicy": { + "methods": [ + "set_iam_policy" + ] + }, + "TestIamPermissions": { + "methods": [ + "test_iam_permissions" + ] + } + } + } + } + }, + "Networks": { + "clients": { + "rest": { + "libraryClient": "NetworksClient", + "rpcs": { + "AddPeering": { + "methods": [ + "add_peering" + ] + }, + "Delete": { + "methods": [ + "delete" + ] + }, + "Get": { + "methods": [ + "get" + ] + }, + "GetEffectiveFirewalls": { + "methods": [ + "get_effective_firewalls" + ] + }, + "Insert": { + "methods": [ + "insert" + ] + }, + "List": { + "methods": [ + "list" + ] + }, + "ListPeeringRoutes": { + "methods": [ + "list_peering_routes" + ] + }, + "Patch": { + "methods": [ + "patch" + ] + }, + "RemovePeering": { + "methods": [ + "remove_peering" + ] + }, + "SwitchToCustomMode": { + "methods": [ + "switch_to_custom_mode" + ] + }, + "UpdatePeering": { + "methods": [ + "update_peering" + ] + } + } + } + } + }, + "NodeGroups": { + "clients": { + "rest": { + "libraryClient": "NodeGroupsClient", + "rpcs": { + "AddNodes": { + "methods": [ + "add_nodes" + ] + }, + "AggregatedList": { + "methods": [ + "aggregated_list" + ] + }, + "Delete": { + "methods": [ + "delete" + ] + }, + "DeleteNodes": { + "methods": [ + "delete_nodes" + ] + }, + "Get": { + "methods": [ + "get" + ] + }, + "GetIamPolicy": { + "methods": [ + "get_iam_policy" + ] + }, + "Insert": { + "methods": [ + "insert" + ] + }, + "List": { + "methods": [ + "list" + ] + }, + "ListNodes": { + "methods": [ + "list_nodes" + ] + }, + "Patch": { + "methods": [ + "patch" + ] + }, + "SetIamPolicy": { + "methods": [ + "set_iam_policy" + ] + }, + "SetNodeTemplate": { + "methods": [ + "set_node_template" + ] + }, + "SimulateMaintenanceEvent": { + "methods": [ + "simulate_maintenance_event" + ] + }, + "TestIamPermissions": { + "methods": [ + "test_iam_permissions" + ] + } + } + } + } + }, + "NodeTemplates": { + "clients": { + "rest": { + "libraryClient": "NodeTemplatesClient", + "rpcs": { + "AggregatedList": { + "methods": [ + "aggregated_list" + ] + }, + "Delete": { + "methods": [ + "delete" + ] + }, + "Get": { + "methods": [ + "get" + ] + }, + "GetIamPolicy": { + "methods": [ + "get_iam_policy" + ] + }, + "Insert": { + "methods": [ + "insert" + ] + }, + "List": { + "methods": [ + "list" + ] + }, + "SetIamPolicy": { + "methods": [ + "set_iam_policy" + ] + }, + "TestIamPermissions": { + "methods": [ + "test_iam_permissions" + ] + } + } + } + } + }, + "NodeTypes": { + "clients": { + "rest": { + "libraryClient": "NodeTypesClient", + "rpcs": { + "AggregatedList": { + "methods": [ + "aggregated_list" + ] + }, + "Get": { + "methods": [ + "get" + ] + }, + "List": { + "methods": [ + "list" + ] + } + } + } + } + }, + "PacketMirrorings": { + "clients": { + "rest": { + "libraryClient": "PacketMirroringsClient", + "rpcs": { + "AggregatedList": { + "methods": [ + "aggregated_list" + ] + }, + "Delete": { + "methods": [ + "delete" + ] + }, + "Get": { + "methods": [ + "get" + ] + }, + "Insert": { + "methods": [ + "insert" + ] + }, + "List": { + "methods": [ + "list" + ] + }, + "Patch": { + "methods": [ + "patch" + ] + }, + "TestIamPermissions": { + "methods": [ + "test_iam_permissions" + ] + } + } + } + } + }, + "Projects": { + "clients": { + "rest": { + "libraryClient": "ProjectsClient", + "rpcs": { + "DisableXpnHost": { + "methods": [ + "disable_xpn_host" + ] + }, + "DisableXpnResource": { + "methods": [ + "disable_xpn_resource" + ] + }, + "EnableXpnHost": { + "methods": [ + "enable_xpn_host" + ] + }, + "EnableXpnResource": { + "methods": [ + "enable_xpn_resource" + ] + }, + "Get": { + "methods": [ + "get" + ] + }, + "GetXpnHost": { + "methods": [ + "get_xpn_host" + ] + }, + "GetXpnResources": { + "methods": [ + "get_xpn_resources" + ] + }, + "ListXpnHosts": { + "methods": [ + "list_xpn_hosts" + ] + }, + "MoveDisk": { + "methods": [ + "move_disk" + ] + }, + "MoveInstance": { + "methods": [ + "move_instance" + ] + }, + "SetCommonInstanceMetadata": { + "methods": [ + "set_common_instance_metadata" + ] + }, + "SetDefaultNetworkTier": { + "methods": [ + "set_default_network_tier" + ] + }, + "SetUsageExportBucket": { + "methods": [ + "set_usage_export_bucket" + ] + } + } + } + } + }, + "PublicAdvertisedPrefixes": { + "clients": { + "rest": { + "libraryClient": "PublicAdvertisedPrefixesClient", + "rpcs": { + "Delete": { + "methods": [ + "delete" + ] + }, + "Get": { + "methods": [ + "get" + ] + }, + "Insert": { + "methods": [ + "insert" + ] + }, + "List": { + "methods": [ + "list" + ] + }, + "Patch": { + "methods": [ + "patch" + ] + } + } + } + } + }, + "PublicDelegatedPrefixes": { + "clients": { + "rest": { + "libraryClient": "PublicDelegatedPrefixesClient", + "rpcs": { + "AggregatedList": { + "methods": [ + "aggregated_list" + ] + }, + "Delete": { + "methods": [ + "delete" + ] + }, + "Get": { + "methods": [ + "get" + ] + }, + "Insert": { + "methods": [ + "insert" + ] + }, + "List": { + "methods": [ + "list" + ] + }, + "Patch": { + "methods": [ + "patch" + ] + } + } + } + } + }, + "RegionAutoscalers": { + "clients": { + "rest": { + "libraryClient": "RegionAutoscalersClient", + "rpcs": { + "Delete": { + "methods": [ + "delete" + ] + }, + "Get": { + "methods": [ + "get" + ] + }, + "Insert": { + "methods": [ + "insert" + ] + }, + "List": { + "methods": [ + "list" + ] + }, + "Patch": { + "methods": [ + "patch" + ] + }, + "Update": { + "methods": [ + "update" + ] + } + } + } + } + }, + "RegionBackendServices": { + "clients": { + "rest": { + "libraryClient": "RegionBackendServicesClient", + "rpcs": { + "Delete": { + "methods": [ + "delete" + ] + }, + "Get": { + "methods": [ + "get" + ] + }, + "GetHealth": { + "methods": [ + "get_health" + ] + }, + "GetIamPolicy": { + "methods": [ + "get_iam_policy" + ] + }, + "Insert": { + "methods": [ + "insert" + ] + }, + "List": { + "methods": [ + "list" + ] + }, + "Patch": { + "methods": [ + "patch" + ] + }, + "SetIamPolicy": { + "methods": [ + "set_iam_policy" + ] + }, + "Update": { + "methods": [ + "update" + ] + } + } + } + } + }, + "RegionCommitments": { + "clients": { + "rest": { + "libraryClient": "RegionCommitmentsClient", + "rpcs": { + "AggregatedList": { + "methods": [ + "aggregated_list" + ] + }, + "Get": { + "methods": [ + "get" + ] + }, + "Insert": { + "methods": [ + "insert" + ] + }, + "List": { + "methods": [ + "list" + ] + }, + "Update": { + "methods": [ + "update" + ] + } + } + } + } + }, + "RegionDiskTypes": { + "clients": { + "rest": { + "libraryClient": "RegionDiskTypesClient", + "rpcs": { + "Get": { + "methods": [ + "get" + ] + }, + "List": { + "methods": [ + "list" + ] + } + } + } + } + }, + "RegionDisks": { + "clients": { + "rest": { + "libraryClient": "RegionDisksClient", + "rpcs": { + "AddResourcePolicies": { + "methods": [ + "add_resource_policies" + ] + }, + "BulkInsert": { + "methods": [ + "bulk_insert" + ] + }, + "CreateSnapshot": { + "methods": [ + "create_snapshot" + ] + }, + "Delete": { + "methods": [ + "delete" + ] + }, + "Get": { + "methods": [ + "get" + ] + }, + "GetIamPolicy": { + "methods": [ + "get_iam_policy" + ] + }, + "Insert": { + "methods": [ + "insert" + ] + }, + "List": { + "methods": [ + "list" + ] + }, + "RemoveResourcePolicies": { + "methods": [ + "remove_resource_policies" + ] + }, + "Resize": { + "methods": [ + "resize" + ] + }, + "SetIamPolicy": { + "methods": [ + "set_iam_policy" + ] + }, + "SetLabels": { + "methods": [ + "set_labels" + ] + }, + "StartAsyncReplication": { + "methods": [ + "start_async_replication" + ] + }, + "StopAsyncReplication": { + "methods": [ + "stop_async_replication" + ] + }, + "StopGroupAsyncReplication": { + "methods": [ + "stop_group_async_replication" + ] + }, + "TestIamPermissions": { + "methods": [ + "test_iam_permissions" + ] + }, + "Update": { + "methods": [ + "update" + ] + } + } + } + } + }, + "RegionHealthCheckServices": { + "clients": { + "rest": { + "libraryClient": "RegionHealthCheckServicesClient", + "rpcs": { + "Delete": { + "methods": [ + "delete" + ] + }, + "Get": { + "methods": [ + "get" + ] + }, + "Insert": { + "methods": [ + "insert" + ] + }, + "List": { + "methods": [ + "list" + ] + }, + "Patch": { + "methods": [ + "patch" + ] + } + } + } + } + }, + "RegionHealthChecks": { + "clients": { + "rest": { + "libraryClient": "RegionHealthChecksClient", + "rpcs": { + "Delete": { + "methods": [ + "delete" + ] + }, + "Get": { + "methods": [ + "get" + ] + }, + "Insert": { + "methods": [ + "insert" + ] + }, + "List": { + "methods": [ + "list" + ] + }, + "Patch": { + "methods": [ + "patch" + ] + }, + "Update": { + "methods": [ + "update" + ] + } + } + } + } + }, + "RegionInstanceGroupManagers": { + "clients": { + "rest": { + "libraryClient": "RegionInstanceGroupManagersClient", + "rpcs": { + "AbandonInstances": { + "methods": [ + "abandon_instances" + ] + }, + "ApplyUpdatesToInstances": { + "methods": [ + "apply_updates_to_instances" + ] + }, + "CreateInstances": { + "methods": [ + "create_instances" + ] + }, + "Delete": { + "methods": [ + "delete" + ] + }, + "DeleteInstances": { + "methods": [ + "delete_instances" + ] + }, + "DeletePerInstanceConfigs": { + "methods": [ + "delete_per_instance_configs" + ] + }, + "Get": { + "methods": [ + "get" + ] + }, + "Insert": { + "methods": [ + "insert" + ] + }, + "List": { + "methods": [ + "list" + ] + }, + "ListErrors": { + "methods": [ + "list_errors" + ] + }, + "ListManagedInstances": { + "methods": [ + "list_managed_instances" + ] + }, + "ListPerInstanceConfigs": { + "methods": [ + "list_per_instance_configs" + ] + }, + "Patch": { + "methods": [ + "patch" + ] + }, + "PatchPerInstanceConfigs": { + "methods": [ + "patch_per_instance_configs" + ] + }, + "RecreateInstances": { + "methods": [ + "recreate_instances" + ] + }, + "Resize": { + "methods": [ + "resize" + ] + }, + "SetInstanceTemplate": { + "methods": [ + "set_instance_template" + ] + }, + "SetTargetPools": { + "methods": [ + "set_target_pools" + ] + }, + "UpdatePerInstanceConfigs": { + "methods": [ + "update_per_instance_configs" + ] + } + } + } + } + }, + "RegionInstanceGroups": { + "clients": { + "rest": { + "libraryClient": "RegionInstanceGroupsClient", + "rpcs": { + "Get": { + "methods": [ + "get" + ] + }, + "List": { + "methods": [ + "list" + ] + }, + "ListInstances": { + "methods": [ + "list_instances" + ] + }, + "SetNamedPorts": { + "methods": [ + "set_named_ports" + ] + } + } + } + } + }, + "RegionInstanceTemplates": { + "clients": { + "rest": { + "libraryClient": "RegionInstanceTemplatesClient", + "rpcs": { + "Delete": { + "methods": [ + "delete" + ] + }, + "Get": { + "methods": [ + "get" + ] + }, + "Insert": { + "methods": [ + "insert" + ] + }, + "List": { + "methods": [ + "list" + ] + } + } + } + } + }, + "RegionInstances": { + "clients": { + "rest": { + "libraryClient": "RegionInstancesClient", + "rpcs": { + "BulkInsert": { + "methods": [ + "bulk_insert" + ] + } + } + } + } + }, + "RegionNetworkEndpointGroups": { + "clients": { + "rest": { + "libraryClient": "RegionNetworkEndpointGroupsClient", + "rpcs": { + "Delete": { + "methods": [ + "delete" + ] + }, + "Get": { + "methods": [ + "get" + ] + }, + "Insert": { + "methods": [ + "insert" + ] + }, + "List": { + "methods": [ + "list" + ] + } + } + } + } + }, + "RegionNetworkFirewallPolicies": { + "clients": { + "rest": { + "libraryClient": "RegionNetworkFirewallPoliciesClient", + "rpcs": { + "AddAssociation": { + "methods": [ + "add_association" + ] + }, + "AddRule": { + "methods": [ + "add_rule" + ] + }, + "CloneRules": { + "methods": [ + "clone_rules" + ] + }, + "Delete": { + "methods": [ + "delete" + ] + }, + "Get": { + "methods": [ + "get" + ] + }, + "GetAssociation": { + "methods": [ + "get_association" + ] + }, + "GetEffectiveFirewalls": { + "methods": [ + "get_effective_firewalls" + ] + }, + "GetIamPolicy": { + "methods": [ + "get_iam_policy" + ] + }, + "GetRule": { + "methods": [ + "get_rule" + ] + }, + "Insert": { + "methods": [ + "insert" + ] + }, + "List": { + "methods": [ + "list" + ] + }, + "Patch": { + "methods": [ + "patch" + ] + }, + "PatchRule": { + "methods": [ + "patch_rule" + ] + }, + "RemoveAssociation": { + "methods": [ + "remove_association" + ] + }, + "RemoveRule": { + "methods": [ + "remove_rule" + ] + }, + "SetIamPolicy": { + "methods": [ + "set_iam_policy" + ] + }, + "TestIamPermissions": { + "methods": [ + "test_iam_permissions" + ] + } + } + } + } + }, + "RegionNotificationEndpoints": { + "clients": { + "rest": { + "libraryClient": "RegionNotificationEndpointsClient", + "rpcs": { + "Delete": { + "methods": [ + "delete" + ] + }, + "Get": { + "methods": [ + "get" + ] + }, + "Insert": { + "methods": [ + "insert" + ] + }, + "List": { + "methods": [ + "list" + ] + } + } + } + } + }, + "RegionOperations": { + "clients": { + "rest": { + "libraryClient": "RegionOperationsClient", + "rpcs": { + "Delete": { + "methods": [ + "delete" + ] + }, + "Get": { + "methods": [ + "get" + ] + }, + "List": { + "methods": [ + "list" + ] + }, + "Wait": { + "methods": [ + "wait" + ] + } + } + } + } + }, + "RegionSecurityPolicies": { + "clients": { + "rest": { + "libraryClient": "RegionSecurityPoliciesClient", + "rpcs": { + "Delete": { + "methods": [ + "delete" + ] + }, + "Get": { + "methods": [ + "get" + ] + }, + "Insert": { + "methods": [ + "insert" + ] + }, + "List": { + "methods": [ + "list" + ] + }, + "Patch": { + "methods": [ + "patch" + ] + } + } + } + } + }, + "RegionSslCertificates": { + "clients": { + "rest": { + "libraryClient": "RegionSslCertificatesClient", + "rpcs": { + "Delete": { + "methods": [ + "delete" + ] + }, + "Get": { + "methods": [ + "get" + ] + }, + "Insert": { + "methods": [ + "insert" + ] + }, + "List": { + "methods": [ + "list" + ] + } + } + } + } + }, + "RegionSslPolicies": { + "clients": { + "rest": { + "libraryClient": "RegionSslPoliciesClient", + "rpcs": { + "Delete": { + "methods": [ + "delete" + ] + }, + "Get": { + "methods": [ + "get" + ] + }, + "Insert": { + "methods": [ + "insert" + ] + }, + "List": { + "methods": [ + "list" + ] + }, + "ListAvailableFeatures": { + "methods": [ + "list_available_features" + ] + }, + "Patch": { + "methods": [ + "patch" + ] + } + } + } + } + }, + "RegionTargetHttpProxies": { + "clients": { + "rest": { + "libraryClient": "RegionTargetHttpProxiesClient", + "rpcs": { + "Delete": { + "methods": [ + "delete" + ] + }, + "Get": { + "methods": [ + "get" + ] + }, + "Insert": { + "methods": [ + "insert" + ] + }, + "List": { + "methods": [ + "list" + ] + }, + "SetUrlMap": { + "methods": [ + "set_url_map" + ] + } + } + } + } + }, + "RegionTargetHttpsProxies": { + "clients": { + "rest": { + "libraryClient": "RegionTargetHttpsProxiesClient", + "rpcs": { + "Delete": { + "methods": [ + "delete" + ] + }, + "Get": { + "methods": [ + "get" + ] + }, + "Insert": { + "methods": [ + "insert" + ] + }, + "List": { + "methods": [ + "list" + ] + }, + "Patch": { + "methods": [ + "patch" + ] + }, + "SetSslCertificates": { + "methods": [ + "set_ssl_certificates" + ] + }, + "SetUrlMap": { + "methods": [ + "set_url_map" + ] + } + } + } + } + }, + "RegionTargetTcpProxies": { + "clients": { + "rest": { + "libraryClient": "RegionTargetTcpProxiesClient", + "rpcs": { + "Delete": { + "methods": [ + "delete" + ] + }, + "Get": { + "methods": [ + "get" + ] + }, + "Insert": { + "methods": [ + "insert" + ] + }, + "List": { + "methods": [ + "list" + ] + } + } + } + } + }, + "RegionUrlMaps": { + "clients": { + "rest": { + "libraryClient": "RegionUrlMapsClient", + "rpcs": { + "Delete": { + "methods": [ + "delete" + ] + }, + "Get": { + "methods": [ + "get" + ] + }, + "Insert": { + "methods": [ + "insert" + ] + }, + "List": { + "methods": [ + "list" + ] + }, + "Patch": { + "methods": [ + "patch" + ] + }, + "Update": { + "methods": [ + "update" + ] + }, + "Validate": { + "methods": [ + "validate" + ] + } + } + } + } + }, + "Regions": { + "clients": { + "rest": { + "libraryClient": "RegionsClient", + "rpcs": { + "Get": { + "methods": [ + "get" + ] + }, + "List": { + "methods": [ + "list" + ] + } + } + } + } + }, + "Reservations": { + "clients": { + "rest": { + "libraryClient": "ReservationsClient", + "rpcs": { + "AggregatedList": { + "methods": [ + "aggregated_list" + ] + }, + "Delete": { + "methods": [ + "delete" + ] + }, + "Get": { + "methods": [ + "get" + ] + }, + "GetIamPolicy": { + "methods": [ + "get_iam_policy" + ] + }, + "Insert": { + "methods": [ + "insert" + ] + }, + "List": { + "methods": [ + "list" + ] + }, + "Resize": { + "methods": [ + "resize" + ] + }, + "SetIamPolicy": { + "methods": [ + "set_iam_policy" + ] + }, + "TestIamPermissions": { + "methods": [ + "test_iam_permissions" + ] + }, + "Update": { + "methods": [ + "update" + ] + } + } + } + } + }, + "ResourcePolicies": { + "clients": { + "rest": { + "libraryClient": "ResourcePoliciesClient", + "rpcs": { + "AggregatedList": { + "methods": [ + "aggregated_list" + ] + }, + "Delete": { + "methods": [ + "delete" + ] + }, + "Get": { + "methods": [ + "get" + ] + }, + "GetIamPolicy": { + "methods": [ + "get_iam_policy" + ] + }, + "Insert": { + "methods": [ + "insert" + ] + }, + "List": { + "methods": [ + "list" + ] + }, + "Patch": { + "methods": [ + "patch" + ] + }, + "SetIamPolicy": { + "methods": [ + "set_iam_policy" + ] + }, + "TestIamPermissions": { + "methods": [ + "test_iam_permissions" + ] + } + } + } + } + }, + "Routers": { + "clients": { + "rest": { + "libraryClient": "RoutersClient", + "rpcs": { + "AggregatedList": { + "methods": [ + "aggregated_list" + ] + }, + "Delete": { + "methods": [ + "delete" + ] + }, + "Get": { + "methods": [ + "get" + ] + }, + "GetNatMappingInfo": { + "methods": [ + "get_nat_mapping_info" + ] + }, + "GetRouterStatus": { + "methods": [ + "get_router_status" + ] + }, + "Insert": { + "methods": [ + "insert" + ] + }, + "List": { + "methods": [ + "list" + ] + }, + "Patch": { + "methods": [ + "patch" + ] + }, + "Preview": { + "methods": [ + "preview" + ] + }, + "Update": { + "methods": [ + "update" + ] + } + } + } + } + }, + "Routes": { + "clients": { + "rest": { + "libraryClient": "RoutesClient", + "rpcs": { + "Delete": { + "methods": [ + "delete" + ] + }, + "Get": { + "methods": [ + "get" + ] + }, + "Insert": { + "methods": [ + "insert" + ] + }, + "List": { + "methods": [ + "list" + ] + } + } + } + } + }, + "SecurityPolicies": { + "clients": { + "rest": { + "libraryClient": "SecurityPoliciesClient", + "rpcs": { + "AddRule": { + "methods": [ + "add_rule" + ] + }, + "AggregatedList": { + "methods": [ + "aggregated_list" + ] + }, + "Delete": { + "methods": [ + "delete" + ] + }, + "Get": { + "methods": [ + "get" + ] + }, + "GetRule": { + "methods": [ + "get_rule" + ] + }, + "Insert": { + "methods": [ + "insert" + ] + }, + "List": { + "methods": [ + "list" + ] + }, + "ListPreconfiguredExpressionSets": { + "methods": [ + "list_preconfigured_expression_sets" + ] + }, + "Patch": { + "methods": [ + "patch" + ] + }, + "PatchRule": { + "methods": [ + "patch_rule" + ] + }, + "RemoveRule": { + "methods": [ + "remove_rule" + ] + }, + "SetLabels": { + "methods": [ + "set_labels" + ] + } + } + } + } + }, + "ServiceAttachments": { + "clients": { + "rest": { + "libraryClient": "ServiceAttachmentsClient", + "rpcs": { + "AggregatedList": { + "methods": [ + "aggregated_list" + ] + }, + "Delete": { + "methods": [ + "delete" + ] + }, + "Get": { + "methods": [ + "get" + ] + }, + "GetIamPolicy": { + "methods": [ + "get_iam_policy" + ] + }, + "Insert": { + "methods": [ + "insert" + ] + }, + "List": { + "methods": [ + "list" + ] + }, + "Patch": { + "methods": [ + "patch" + ] + }, + "SetIamPolicy": { + "methods": [ + "set_iam_policy" + ] + }, + "TestIamPermissions": { + "methods": [ + "test_iam_permissions" + ] + } + } + } + } + }, + "Snapshots": { + "clients": { + "rest": { + "libraryClient": "SnapshotsClient", + "rpcs": { + "Delete": { + "methods": [ + "delete" + ] + }, + "Get": { + "methods": [ + "get" + ] + }, + "GetIamPolicy": { + "methods": [ + "get_iam_policy" + ] + }, + "Insert": { + "methods": [ + "insert" + ] + }, + "List": { + "methods": [ + "list" + ] + }, + "SetIamPolicy": { + "methods": [ + "set_iam_policy" + ] + }, + "SetLabels": { + "methods": [ + "set_labels" + ] + }, + "TestIamPermissions": { + "methods": [ + "test_iam_permissions" + ] + } + } + } + } + }, + "SslCertificates": { + "clients": { + "rest": { + "libraryClient": "SslCertificatesClient", + "rpcs": { + "AggregatedList": { + "methods": [ + "aggregated_list" + ] + }, + "Delete": { + "methods": [ + "delete" + ] + }, + "Get": { + "methods": [ + "get" + ] + }, + "Insert": { + "methods": [ + "insert" + ] + }, + "List": { + "methods": [ + "list" + ] + } + } + } + } + }, + "SslPolicies": { + "clients": { + "rest": { + "libraryClient": "SslPoliciesClient", + "rpcs": { + "AggregatedList": { + "methods": [ + "aggregated_list" + ] + }, + "Delete": { + "methods": [ + "delete" + ] + }, + "Get": { + "methods": [ + "get" + ] + }, + "Insert": { + "methods": [ + "insert" + ] + }, + "List": { + "methods": [ + "list" + ] + }, + "ListAvailableFeatures": { + "methods": [ + "list_available_features" + ] + }, + "Patch": { + "methods": [ + "patch" + ] + } + } + } + } + }, + "Subnetworks": { + "clients": { + "rest": { + "libraryClient": "SubnetworksClient", + "rpcs": { + "AggregatedList": { + "methods": [ + "aggregated_list" + ] + }, + "Delete": { + "methods": [ + "delete" + ] + }, + "ExpandIpCidrRange": { + "methods": [ + "expand_ip_cidr_range" + ] + }, + "Get": { + "methods": [ + "get" + ] + }, + "GetIamPolicy": { + "methods": [ + "get_iam_policy" + ] + }, + "Insert": { + "methods": [ + "insert" + ] + }, + "List": { + "methods": [ + "list" + ] + }, + "ListUsable": { + "methods": [ + "list_usable" + ] + }, + "Patch": { + "methods": [ + "patch" + ] + }, + "SetIamPolicy": { + "methods": [ + "set_iam_policy" + ] + }, + "SetPrivateIpGoogleAccess": { + "methods": [ + "set_private_ip_google_access" + ] + }, + "TestIamPermissions": { + "methods": [ + "test_iam_permissions" + ] + } + } + } + } + }, + "TargetGrpcProxies": { + "clients": { + "rest": { + "libraryClient": "TargetGrpcProxiesClient", + "rpcs": { + "Delete": { + "methods": [ + "delete" + ] + }, + "Get": { + "methods": [ + "get" + ] + }, + "Insert": { + "methods": [ + "insert" + ] + }, + "List": { + "methods": [ + "list" + ] + }, + "Patch": { + "methods": [ + "patch" + ] + } + } + } + } + }, + "TargetHttpProxies": { + "clients": { + "rest": { + "libraryClient": "TargetHttpProxiesClient", + "rpcs": { + "AggregatedList": { + "methods": [ + "aggregated_list" + ] + }, + "Delete": { + "methods": [ + "delete" + ] + }, + "Get": { + "methods": [ + "get" + ] + }, + "Insert": { + "methods": [ + "insert" + ] + }, + "List": { + "methods": [ + "list" + ] + }, + "Patch": { + "methods": [ + "patch" + ] + }, + "SetUrlMap": { + "methods": [ + "set_url_map" + ] + } + } + } + } + }, + "TargetHttpsProxies": { + "clients": { + "rest": { + "libraryClient": "TargetHttpsProxiesClient", + "rpcs": { + "AggregatedList": { + "methods": [ + "aggregated_list" + ] + }, + "Delete": { + "methods": [ + "delete" + ] + }, + "Get": { + "methods": [ + "get" + ] + }, + "Insert": { + "methods": [ + "insert" + ] + }, + "List": { + "methods": [ + "list" + ] + }, + "Patch": { + "methods": [ + "patch" + ] + }, + "SetCertificateMap": { + "methods": [ + "set_certificate_map" + ] + }, + "SetQuicOverride": { + "methods": [ + "set_quic_override" + ] + }, + "SetSslCertificates": { + "methods": [ + "set_ssl_certificates" + ] + }, + "SetSslPolicy": { + "methods": [ + "set_ssl_policy" + ] + }, + "SetUrlMap": { + "methods": [ + "set_url_map" + ] + } + } + } + } + }, + "TargetInstances": { + "clients": { + "rest": { + "libraryClient": "TargetInstancesClient", + "rpcs": { + "AggregatedList": { + "methods": [ + "aggregated_list" + ] + }, + "Delete": { + "methods": [ + "delete" + ] + }, + "Get": { + "methods": [ + "get" + ] + }, + "Insert": { + "methods": [ + "insert" + ] + }, + "List": { + "methods": [ + "list" + ] + } + } + } + } + }, + "TargetPools": { + "clients": { + "rest": { + "libraryClient": "TargetPoolsClient", + "rpcs": { + "AddHealthCheck": { + "methods": [ + "add_health_check" + ] + }, + "AddInstance": { + "methods": [ + "add_instance" + ] + }, + "AggregatedList": { + "methods": [ + "aggregated_list" + ] + }, + "Delete": { + "methods": [ + "delete" + ] + }, + "Get": { + "methods": [ + "get" + ] + }, + "GetHealth": { + "methods": [ + "get_health" + ] + }, + "Insert": { + "methods": [ + "insert" + ] + }, + "List": { + "methods": [ + "list" + ] + }, + "RemoveHealthCheck": { + "methods": [ + "remove_health_check" + ] + }, + "RemoveInstance": { + "methods": [ + "remove_instance" + ] + }, + "SetBackup": { + "methods": [ + "set_backup" + ] + } + } + } + } + }, + "TargetSslProxies": { + "clients": { + "rest": { + "libraryClient": "TargetSslProxiesClient", + "rpcs": { + "Delete": { + "methods": [ + "delete" + ] + }, + "Get": { + "methods": [ + "get" + ] + }, + "Insert": { + "methods": [ + "insert" + ] + }, + "List": { + "methods": [ + "list" + ] + }, + "SetBackendService": { + "methods": [ + "set_backend_service" + ] + }, + "SetCertificateMap": { + "methods": [ + "set_certificate_map" + ] + }, + "SetProxyHeader": { + "methods": [ + "set_proxy_header" + ] + }, + "SetSslCertificates": { + "methods": [ + "set_ssl_certificates" + ] + }, + "SetSslPolicy": { + "methods": [ + "set_ssl_policy" + ] + } + } + } + } + }, + "TargetTcpProxies": { + "clients": { + "rest": { + "libraryClient": "TargetTcpProxiesClient", + "rpcs": { + "AggregatedList": { + "methods": [ + "aggregated_list" + ] + }, + "Delete": { + "methods": [ + "delete" + ] + }, + "Get": { + "methods": [ + "get" + ] + }, + "Insert": { + "methods": [ + "insert" + ] + }, + "List": { + "methods": [ + "list" + ] + }, + "SetBackendService": { + "methods": [ + "set_backend_service" + ] + }, + "SetProxyHeader": { + "methods": [ + "set_proxy_header" + ] + } + } + } + } + }, + "TargetVpnGateways": { + "clients": { + "rest": { + "libraryClient": "TargetVpnGatewaysClient", + "rpcs": { + "AggregatedList": { + "methods": [ + "aggregated_list" + ] + }, + "Delete": { + "methods": [ + "delete" + ] + }, + "Get": { + "methods": [ + "get" + ] + }, + "Insert": { + "methods": [ + "insert" + ] + }, + "List": { + "methods": [ + "list" + ] + }, + "SetLabels": { + "methods": [ + "set_labels" + ] + } + } + } + } + }, + "UrlMaps": { + "clients": { + "rest": { + "libraryClient": "UrlMapsClient", + "rpcs": { + "AggregatedList": { + "methods": [ + "aggregated_list" + ] + }, + "Delete": { + "methods": [ + "delete" + ] + }, + "Get": { + "methods": [ + "get" + ] + }, + "Insert": { + "methods": [ + "insert" + ] + }, + "InvalidateCache": { + "methods": [ + "invalidate_cache" + ] + }, + "List": { + "methods": [ + "list" + ] + }, + "Patch": { + "methods": [ + "patch" + ] + }, + "Update": { + "methods": [ + "update" + ] + }, + "Validate": { + "methods": [ + "validate" + ] + } + } + } + } + }, + "VpnGateways": { + "clients": { + "rest": { + "libraryClient": "VpnGatewaysClient", + "rpcs": { + "AggregatedList": { + "methods": [ + "aggregated_list" + ] + }, + "Delete": { + "methods": [ + "delete" + ] + }, + "Get": { + "methods": [ + "get" + ] + }, + "GetStatus": { + "methods": [ + "get_status" + ] + }, + "Insert": { + "methods": [ + "insert" + ] + }, + "List": { + "methods": [ + "list" + ] + }, + "SetLabels": { + "methods": [ + "set_labels" + ] + }, + "TestIamPermissions": { + "methods": [ + "test_iam_permissions" + ] + } + } + } + } + }, + "VpnTunnels": { + "clients": { + "rest": { + "libraryClient": "VpnTunnelsClient", + "rpcs": { + "AggregatedList": { + "methods": [ + "aggregated_list" + ] + }, + "Delete": { + "methods": [ + "delete" + ] + }, + "Get": { + "methods": [ + "get" + ] + }, + "Insert": { + "methods": [ + "insert" + ] + }, + "List": { + "methods": [ + "list" + ] + }, + "SetLabels": { + "methods": [ + "set_labels" + ] + } + } + } + } + }, + "ZoneOperations": { + "clients": { + "rest": { + "libraryClient": "ZoneOperationsClient", + "rpcs": { + "Delete": { + "methods": [ + "delete" + ] + }, + "Get": { + "methods": [ + "get" + ] + }, + "List": { + "methods": [ + "list" + ] + }, + "Wait": { + "methods": [ + "wait" + ] + } + } + } + } + }, + "Zones": { + "clients": { + "rest": { + "libraryClient": "ZonesClient", + "rpcs": { + "Get": { + "methods": [ + "get" + ] + }, + "List": { + "methods": [ + "list" + ] + } + } + } + } + } + } +} diff --git a/owl-bot-staging/v1/google/cloud/compute_v1/gapic_version.py b/owl-bot-staging/v1/google/cloud/compute_v1/gapic_version.py new file mode 100644 index 000000000..360a0d13e --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/compute_v1/gapic_version.py @@ -0,0 +1,16 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +__version__ = "0.0.0" # {x-release-please-version} diff --git a/owl-bot-staging/v1/google/cloud/compute_v1/py.typed b/owl-bot-staging/v1/google/cloud/compute_v1/py.typed new file mode 100644 index 000000000..071da5269 --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/compute_v1/py.typed @@ -0,0 +1,2 @@ +# Marker file for PEP 561. +# The google-cloud-compute package uses inline types. diff --git a/owl-bot-staging/v1/google/cloud/compute_v1/services/__init__.py b/owl-bot-staging/v1/google/cloud/compute_v1/services/__init__.py new file mode 100644 index 000000000..89a37dc92 --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/compute_v1/services/__init__.py @@ -0,0 +1,15 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/owl-bot-staging/v1/google/cloud/compute_v1/services/accelerator_types/__init__.py b/owl-bot-staging/v1/google/cloud/compute_v1/services/accelerator_types/__init__.py new file mode 100644 index 000000000..3e0d818df --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/compute_v1/services/accelerator_types/__init__.py @@ -0,0 +1,20 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from .client import AcceleratorTypesClient + +__all__ = ( + 'AcceleratorTypesClient', +) diff --git a/owl-bot-staging/v1/google/cloud/compute_v1/services/accelerator_types/client.py b/owl-bot-staging/v1/google/cloud/compute_v1/services/accelerator_types/client.py new file mode 100644 index 000000000..1630e5d85 --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/compute_v1/services/accelerator_types/client.py @@ -0,0 +1,760 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import os +import re +from typing import Dict, Mapping, MutableMapping, MutableSequence, Optional, Sequence, Tuple, Type, Union, cast + +from google.cloud.compute_v1 import gapic_version as package_version + +from google.api_core import client_options as client_options_lib +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport import mtls # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth.exceptions import MutualTLSChannelError # type: ignore +from google.oauth2 import service_account # type: ignore + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + +from google.cloud.compute_v1.services.accelerator_types import pagers +from google.cloud.compute_v1.types import compute +from .transports.base import AcceleratorTypesTransport, DEFAULT_CLIENT_INFO +from .transports.rest import AcceleratorTypesRestTransport + + +class AcceleratorTypesClientMeta(type): + """Metaclass for the AcceleratorTypes client. + + This provides class-level methods for building and retrieving + support objects (e.g. transport) without polluting the client instance + objects. + """ + _transport_registry = OrderedDict() # type: Dict[str, Type[AcceleratorTypesTransport]] + _transport_registry["rest"] = AcceleratorTypesRestTransport + + def get_transport_class(cls, + label: Optional[str] = None, + ) -> Type[AcceleratorTypesTransport]: + """Returns an appropriate transport class. + + Args: + label: The name of the desired transport. If none is + provided, then the first transport in the registry is used. + + Returns: + The transport class to use. + """ + # If a specific transport is requested, return that one. + if label: + return cls._transport_registry[label] + + # No transport is requested; return the default (that is, the first one + # in the dictionary). + return next(iter(cls._transport_registry.values())) + + +class AcceleratorTypesClient(metaclass=AcceleratorTypesClientMeta): + """Services + + The AcceleratorTypes API. + """ + + @staticmethod + def _get_default_mtls_endpoint(api_endpoint): + """Converts api endpoint to mTLS endpoint. + + Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to + "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. + Args: + api_endpoint (Optional[str]): the api endpoint to convert. + Returns: + str: converted mTLS api endpoint. + """ + if not api_endpoint: + return api_endpoint + + mtls_endpoint_re = re.compile( + r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?" + ) + + m = mtls_endpoint_re.match(api_endpoint) + name, mtls, sandbox, googledomain = m.groups() + if mtls or not googledomain: + return api_endpoint + + if sandbox: + return api_endpoint.replace( + "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" + ) + + return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") + + DEFAULT_ENDPOINT = "compute.googleapis.com" + DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore + DEFAULT_ENDPOINT + ) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + AcceleratorTypesClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_info(info) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + AcceleratorTypesClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_file( + filename) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + from_service_account_json = from_service_account_file + + @property + def transport(self) -> AcceleratorTypesTransport: + """Returns the transport used by the client instance. + + Returns: + AcceleratorTypesTransport: The transport used by the client + instance. + """ + return self._transport + + @staticmethod + def common_billing_account_path(billing_account: str, ) -> str: + """Returns a fully-qualified billing_account string.""" + return "billingAccounts/{billing_account}".format(billing_account=billing_account, ) + + @staticmethod + def parse_common_billing_account_path(path: str) -> Dict[str,str]: + """Parse a billing_account path into its component segments.""" + m = re.match(r"^billingAccounts/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_folder_path(folder: str, ) -> str: + """Returns a fully-qualified folder string.""" + return "folders/{folder}".format(folder=folder, ) + + @staticmethod + def parse_common_folder_path(path: str) -> Dict[str,str]: + """Parse a folder path into its component segments.""" + m = re.match(r"^folders/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_organization_path(organization: str, ) -> str: + """Returns a fully-qualified organization string.""" + return "organizations/{organization}".format(organization=organization, ) + + @staticmethod + def parse_common_organization_path(path: str) -> Dict[str,str]: + """Parse a organization path into its component segments.""" + m = re.match(r"^organizations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_project_path(project: str, ) -> str: + """Returns a fully-qualified project string.""" + return "projects/{project}".format(project=project, ) + + @staticmethod + def parse_common_project_path(path: str) -> Dict[str,str]: + """Parse a project path into its component segments.""" + m = re.match(r"^projects/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_location_path(project: str, location: str, ) -> str: + """Returns a fully-qualified location string.""" + return "projects/{project}/locations/{location}".format(project=project, location=location, ) + + @staticmethod + def parse_common_location_path(path: str) -> Dict[str,str]: + """Parse a location path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @classmethod + def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[client_options_lib.ClientOptions] = None): + """Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + if client_options is None: + client_options = client_options_lib.ClientOptions() + use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") + use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") + if use_client_cert not in ("true", "false"): + raise ValueError("Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`") + if use_mtls_endpoint not in ("auto", "never", "always"): + raise MutualTLSChannelError("Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`") + + # Figure out the client cert source to use. + client_cert_source = None + if use_client_cert == "true": + if client_options.client_cert_source: + client_cert_source = client_options.client_cert_source + elif mtls.has_default_client_cert_source(): + client_cert_source = mtls.default_client_cert_source() + + # Figure out which api endpoint to use. + if client_options.api_endpoint is not None: + api_endpoint = client_options.api_endpoint + elif use_mtls_endpoint == "always" or (use_mtls_endpoint == "auto" and client_cert_source): + api_endpoint = cls.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = cls.DEFAULT_ENDPOINT + + return api_endpoint, client_cert_source + + def __init__(self, *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Optional[Union[str, AcceleratorTypesTransport]] = None, + client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the accelerator types client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Union[str, AcceleratorTypesTransport]): The + transport to use. If set to None, a transport is chosen + automatically. + NOTE: "rest" transport functionality is currently in a + beta state (preview). We welcome your feedback via an + issue in this library's source repository. + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): Custom options for the + client. It won't take effect if a ``transport`` instance is provided. + (1) The ``api_endpoint`` property can be used to override the + default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT + environment variable can also be used to override the endpoint: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto switch to the + default mTLS endpoint if client certificate is present, this is + the default value). However, the ``api_endpoint`` property takes + precedence if provided. + (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide client certificate for mutual TLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + """ + if isinstance(client_options, dict): + client_options = client_options_lib.from_dict(client_options) + if client_options is None: + client_options = client_options_lib.ClientOptions() + client_options = cast(client_options_lib.ClientOptions, client_options) + + api_endpoint, client_cert_source_func = self.get_mtls_endpoint_and_cert_source(client_options) + + api_key_value = getattr(client_options, "api_key", None) + if api_key_value and credentials: + raise ValueError("client_options.api_key and credentials are mutually exclusive") + + # Save or instantiate the transport. + # Ordinarily, we provide the transport, but allowing a custom transport + # instance provides an extensibility point for unusual situations. + if isinstance(transport, AcceleratorTypesTransport): + # transport is a AcceleratorTypesTransport instance. + if credentials or client_options.credentials_file or api_key_value: + raise ValueError("When providing a transport instance, " + "provide its credentials directly.") + if client_options.scopes: + raise ValueError( + "When providing a transport instance, provide its scopes " + "directly." + ) + self._transport = transport + else: + import google.auth._default # type: ignore + + if api_key_value and hasattr(google.auth._default, "get_api_key_credentials"): + credentials = google.auth._default.get_api_key_credentials(api_key_value) + + Transport = type(self).get_transport_class(transport) + self._transport = Transport( + credentials=credentials, + credentials_file=client_options.credentials_file, + host=api_endpoint, + scopes=client_options.scopes, + client_cert_source_for_mtls=client_cert_source_func, + quota_project_id=client_options.quota_project_id, + client_info=client_info, + always_use_jwt_access=True, + api_audience=client_options.api_audience, + ) + + def aggregated_list(self, + request: Optional[Union[compute.AggregatedListAcceleratorTypesRequest, dict]] = None, + *, + project: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.AggregatedListPager: + r"""Retrieves an aggregated list of accelerator types. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_aggregated_list(): + # Create a client + client = compute_v1.AcceleratorTypesClient() + + # Initialize request argument(s) + request = compute_v1.AggregatedListAcceleratorTypesRequest( + project="project_value", + ) + + # Make the request + page_result = client.aggregated_list(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.AggregatedListAcceleratorTypesRequest, dict]): + The request object. A request message for + AcceleratorTypes.AggregatedList. See the + method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.compute_v1.services.accelerator_types.pagers.AggregatedListPager: + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.AggregatedListAcceleratorTypesRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.AggregatedListAcceleratorTypesRequest): + request = compute.AggregatedListAcceleratorTypesRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.aggregated_list] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.AggregatedListPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get(self, + request: Optional[Union[compute.GetAcceleratorTypeRequest, dict]] = None, + *, + project: Optional[str] = None, + zone: Optional[str] = None, + accelerator_type: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.AcceleratorType: + r"""Returns the specified accelerator type. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_get(): + # Create a client + client = compute_v1.AcceleratorTypesClient() + + # Initialize request argument(s) + request = compute_v1.GetAcceleratorTypeRequest( + accelerator_type="accelerator_type_value", + project="project_value", + zone="zone_value", + ) + + # Make the request + response = client.get(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.GetAcceleratorTypeRequest, dict]): + The request object. A request message for + AcceleratorTypes.Get. See the method + description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + zone (str): + The name of the zone for this + request. + + This corresponds to the ``zone`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + accelerator_type (str): + Name of the accelerator type to + return. + + This corresponds to the ``accelerator_type`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.compute_v1.types.AcceleratorType: + Represents an Accelerator Type + resource. Google Cloud Platform provides + graphics processing units (accelerators) + that you can add to VM instances to + improve or accelerate performance when + working with intensive workloads. For + more information, read GPUs on Compute + Engine. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, zone, accelerator_type]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.GetAcceleratorTypeRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.GetAcceleratorTypeRequest): + request = compute.GetAcceleratorTypeRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if zone is not None: + request.zone = zone + if accelerator_type is not None: + request.accelerator_type = accelerator_type + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("zone", request.zone), + ("accelerator_type", request.accelerator_type), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def list(self, + request: Optional[Union[compute.ListAcceleratorTypesRequest, dict]] = None, + *, + project: Optional[str] = None, + zone: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListPager: + r"""Retrieves a list of accelerator types that are + available to the specified project. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_list(): + # Create a client + client = compute_v1.AcceleratorTypesClient() + + # Initialize request argument(s) + request = compute_v1.ListAcceleratorTypesRequest( + project="project_value", + zone="zone_value", + ) + + # Make the request + page_result = client.list(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.ListAcceleratorTypesRequest, dict]): + The request object. A request message for + AcceleratorTypes.List. See the method + description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + zone (str): + The name of the zone for this + request. + + This corresponds to the ``zone`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.compute_v1.services.accelerator_types.pagers.ListPager: + Contains a list of accelerator types. + + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, zone]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.ListAcceleratorTypesRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.ListAcceleratorTypesRequest): + request = compute.ListAcceleratorTypesRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if zone is not None: + request.zone = zone + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("zone", request.zone), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + def __enter__(self) -> "AcceleratorTypesClient": + return self + + def __exit__(self, type, value, traceback): + """Releases underlying transport's resources. + + .. warning:: + ONLY use as a context manager if the transport is NOT shared + with other clients! Exiting the with block will CLOSE the transport + and may cause errors in other clients! + """ + self.transport.close() + + + + + + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) + + +__all__ = ( + "AcceleratorTypesClient", +) diff --git a/owl-bot-staging/v1/google/cloud/compute_v1/services/accelerator_types/pagers.py b/owl-bot-staging/v1/google/cloud/compute_v1/services/accelerator_types/pagers.py new file mode 100644 index 000000000..332687389 --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/compute_v1/services/accelerator_types/pagers.py @@ -0,0 +1,139 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import Any, AsyncIterator, Awaitable, Callable, Sequence, Tuple, Optional, Iterator + +from google.cloud.compute_v1.types import compute + + +class AggregatedListPager: + """A pager for iterating through ``aggregated_list`` requests. + + This class thinly wraps an initial + :class:`google.cloud.compute_v1.types.AcceleratorTypeAggregatedList` object, and + provides an ``__iter__`` method to iterate through its + ``items`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``AggregatedList`` requests and continue to iterate + through the ``items`` field on the + corresponding responses. + + All the usual :class:`google.cloud.compute_v1.types.AcceleratorTypeAggregatedList` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., compute.AcceleratorTypeAggregatedList], + request: compute.AggregatedListAcceleratorTypesRequest, + response: compute.AcceleratorTypeAggregatedList, + *, + metadata: Sequence[Tuple[str, str]] = ()): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.compute_v1.types.AggregatedListAcceleratorTypesRequest): + The initial request object. + response (google.cloud.compute_v1.types.AcceleratorTypeAggregatedList): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = compute.AggregatedListAcceleratorTypesRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[compute.AcceleratorTypeAggregatedList]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[Tuple[str, compute.AcceleratorTypesScopedList]]: + for page in self.pages: + yield from page.items.items() + + def get(self, key: str) -> Optional[compute.AcceleratorTypesScopedList]: + return self._response.items.get(key) + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + + +class ListPager: + """A pager for iterating through ``list`` requests. + + This class thinly wraps an initial + :class:`google.cloud.compute_v1.types.AcceleratorTypeList` object, and + provides an ``__iter__`` method to iterate through its + ``items`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``List`` requests and continue to iterate + through the ``items`` field on the + corresponding responses. + + All the usual :class:`google.cloud.compute_v1.types.AcceleratorTypeList` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., compute.AcceleratorTypeList], + request: compute.ListAcceleratorTypesRequest, + response: compute.AcceleratorTypeList, + *, + metadata: Sequence[Tuple[str, str]] = ()): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.compute_v1.types.ListAcceleratorTypesRequest): + The initial request object. + response (google.cloud.compute_v1.types.AcceleratorTypeList): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = compute.ListAcceleratorTypesRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[compute.AcceleratorTypeList]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[compute.AcceleratorType]: + for page in self.pages: + yield from page.items + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) diff --git a/owl-bot-staging/v1/google/cloud/compute_v1/services/accelerator_types/transports/__init__.py b/owl-bot-staging/v1/google/cloud/compute_v1/services/accelerator_types/transports/__init__.py new file mode 100644 index 000000000..c8c9ef738 --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/compute_v1/services/accelerator_types/transports/__init__.py @@ -0,0 +1,32 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +from typing import Dict, Type + +from .base import AcceleratorTypesTransport +from .rest import AcceleratorTypesRestTransport +from .rest import AcceleratorTypesRestInterceptor + + +# Compile a registry of transports. +_transport_registry = OrderedDict() # type: Dict[str, Type[AcceleratorTypesTransport]] +_transport_registry['rest'] = AcceleratorTypesRestTransport + +__all__ = ( + 'AcceleratorTypesTransport', + 'AcceleratorTypesRestTransport', + 'AcceleratorTypesRestInterceptor', +) diff --git a/owl-bot-staging/v1/google/cloud/compute_v1/services/accelerator_types/transports/base.py b/owl-bot-staging/v1/google/cloud/compute_v1/services/accelerator_types/transports/base.py new file mode 100644 index 000000000..c4534ef15 --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/compute_v1/services/accelerator_types/transports/base.py @@ -0,0 +1,178 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import abc +from typing import Awaitable, Callable, Dict, Optional, Sequence, Union + +from google.cloud.compute_v1 import gapic_version as package_version + +import google.auth # type: ignore +import google.api_core +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.compute_v1.types import compute + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) + + +class AcceleratorTypesTransport(abc.ABC): + """Abstract transport class for AcceleratorTypes.""" + + AUTH_SCOPES = ( + 'https://www.googleapis.com/auth/compute.readonly', + 'https://www.googleapis.com/auth/compute', + 'https://www.googleapis.com/auth/cloud-platform', + ) + + DEFAULT_HOST: str = 'compute.googleapis.com' + def __init__( + self, *, + host: str = DEFAULT_HOST, + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + **kwargs, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A list of scopes. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + """ + + scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} + + # Save the scopes. + self._scopes = scopes + + # If no credentials are provided, then determine the appropriate + # defaults. + if credentials and credentials_file: + raise core_exceptions.DuplicateCredentialArgs("'credentials_file' and 'credentials' are mutually exclusive") + + if credentials_file is not None: + credentials, _ = google.auth.load_credentials_from_file( + credentials_file, + **scopes_kwargs, + quota_project_id=quota_project_id + ) + elif credentials is None: + credentials, _ = google.auth.default(**scopes_kwargs, quota_project_id=quota_project_id) + # Don't apply audience if the credentials file passed from user. + if hasattr(credentials, "with_gdch_audience"): + credentials = credentials.with_gdch_audience(api_audience if api_audience else host) + + # If the credentials are service account credentials, then always try to use self signed JWT. + if always_use_jwt_access and isinstance(credentials, service_account.Credentials) and hasattr(service_account.Credentials, "with_always_use_jwt_access"): + credentials = credentials.with_always_use_jwt_access(True) + + # Save the credentials. + self._credentials = credentials + + # Save the hostname. Default to port 443 (HTTPS) if none is specified. + if ':' not in host: + host += ':443' + self._host = host + + def _prep_wrapped_messages(self, client_info): + # Precompute the wrapped methods. + self._wrapped_methods = { + self.aggregated_list: gapic_v1.method.wrap_method( + self.aggregated_list, + default_timeout=None, + client_info=client_info, + ), + self.get: gapic_v1.method.wrap_method( + self.get, + default_timeout=None, + client_info=client_info, + ), + self.list: gapic_v1.method.wrap_method( + self.list, + default_timeout=None, + client_info=client_info, + ), + } + + def close(self): + """Closes resources associated with the transport. + + .. warning:: + Only call this method if the transport is NOT shared + with other clients - this may cause errors in other clients! + """ + raise NotImplementedError() + + @property + def aggregated_list(self) -> Callable[ + [compute.AggregatedListAcceleratorTypesRequest], + Union[ + compute.AcceleratorTypeAggregatedList, + Awaitable[compute.AcceleratorTypeAggregatedList] + ]]: + raise NotImplementedError() + + @property + def get(self) -> Callable[ + [compute.GetAcceleratorTypeRequest], + Union[ + compute.AcceleratorType, + Awaitable[compute.AcceleratorType] + ]]: + raise NotImplementedError() + + @property + def list(self) -> Callable[ + [compute.ListAcceleratorTypesRequest], + Union[ + compute.AcceleratorTypeList, + Awaitable[compute.AcceleratorTypeList] + ]]: + raise NotImplementedError() + + @property + def kind(self) -> str: + raise NotImplementedError() + + +__all__ = ( + 'AcceleratorTypesTransport', +) diff --git a/owl-bot-staging/v1/google/cloud/compute_v1/services/accelerator_types/transports/rest.py b/owl-bot-staging/v1/google/cloud/compute_v1/services/accelerator_types/transports/rest.py new file mode 100644 index 000000000..8f653f6f3 --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/compute_v1/services/accelerator_types/transports/rest.py @@ -0,0 +1,525 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +from google.auth.transport.requests import AuthorizedSession # type: ignore +import json # type: ignore +import grpc # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.api_core import exceptions as core_exceptions +from google.api_core import retry as retries +from google.api_core import rest_helpers +from google.api_core import rest_streaming +from google.api_core import path_template +from google.api_core import gapic_v1 + +from google.protobuf import json_format +from requests import __version__ as requests_version +import dataclasses +import re +from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union +import warnings + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + + +from google.cloud.compute_v1.types import compute + +from .base import AcceleratorTypesTransport, DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, + grpc_version=None, + rest_version=requests_version, +) + + +class AcceleratorTypesRestInterceptor: + """Interceptor for AcceleratorTypes. + + Interceptors are used to manipulate requests, request metadata, and responses + in arbitrary ways. + Example use cases include: + * Logging + * Verifying requests according to service or custom semantics + * Stripping extraneous information from responses + + These use cases and more can be enabled by injecting an + instance of a custom subclass when constructing the AcceleratorTypesRestTransport. + + .. code-block:: python + class MyCustomAcceleratorTypesInterceptor(AcceleratorTypesRestInterceptor): + def pre_aggregated_list(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_aggregated_list(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_get(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list(self, response): + logging.log(f"Received response: {response}") + return response + + transport = AcceleratorTypesRestTransport(interceptor=MyCustomAcceleratorTypesInterceptor()) + client = AcceleratorTypesClient(transport=transport) + + + """ + def pre_aggregated_list(self, request: compute.AggregatedListAcceleratorTypesRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.AggregatedListAcceleratorTypesRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for aggregated_list + + Override in a subclass to manipulate the request or metadata + before they are sent to the AcceleratorTypes server. + """ + return request, metadata + + def post_aggregated_list(self, response: compute.AcceleratorTypeAggregatedList) -> compute.AcceleratorTypeAggregatedList: + """Post-rpc interceptor for aggregated_list + + Override in a subclass to manipulate the response + after it is returned by the AcceleratorTypes server but before + it is returned to user code. + """ + return response + def pre_get(self, request: compute.GetAcceleratorTypeRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.GetAcceleratorTypeRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get + + Override in a subclass to manipulate the request or metadata + before they are sent to the AcceleratorTypes server. + """ + return request, metadata + + def post_get(self, response: compute.AcceleratorType) -> compute.AcceleratorType: + """Post-rpc interceptor for get + + Override in a subclass to manipulate the response + after it is returned by the AcceleratorTypes server but before + it is returned to user code. + """ + return response + def pre_list(self, request: compute.ListAcceleratorTypesRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.ListAcceleratorTypesRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list + + Override in a subclass to manipulate the request or metadata + before they are sent to the AcceleratorTypes server. + """ + return request, metadata + + def post_list(self, response: compute.AcceleratorTypeList) -> compute.AcceleratorTypeList: + """Post-rpc interceptor for list + + Override in a subclass to manipulate the response + after it is returned by the AcceleratorTypes server but before + it is returned to user code. + """ + return response + + +@dataclasses.dataclass +class AcceleratorTypesRestStub: + _session: AuthorizedSession + _host: str + _interceptor: AcceleratorTypesRestInterceptor + + +class AcceleratorTypesRestTransport(AcceleratorTypesTransport): + """REST backend transport for AcceleratorTypes. + + Services + + The AcceleratorTypes API. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends JSON representations of protocol buffers over HTTP/1.1 + + NOTE: This REST transport functionality is currently in a beta + state (preview). We welcome your feedback via an issue in this + library's source repository. Thank you! + """ + + def __init__(self, *, + host: str = 'compute.googleapis.com', + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + client_cert_source_for_mtls: Optional[Callable[[ + ], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + url_scheme: str = 'https', + interceptor: Optional[AcceleratorTypesRestInterceptor] = None, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + NOTE: This REST transport functionality is currently in a beta + state (preview). We welcome your feedback via a GitHub issue in + this library's repository. Thank you! + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + client_cert_source_for_mtls (Callable[[], Tuple[bytes, bytes]]): Client + certificate to configure mutual TLS HTTP channel. It is ignored + if ``channel`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you are developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + url_scheme: the protocol scheme for the API endpoint. Normally + "https", but for testing or local servers, + "http" can be specified. + """ + # Run the base constructor + # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. + # TODO: When custom host (api_endpoint) is set, `scopes` must *also* be set on the + # credentials object + maybe_url_match = re.match("^(?Phttp(?:s)?://)?(?P.*)$", host) + if maybe_url_match is None: + raise ValueError(f"Unexpected hostname structure: {host}") # pragma: NO COVER + + url_match_items = maybe_url_match.groupdict() + + host = f"{url_scheme}://{host}" if not url_match_items["scheme"] else host + + super().__init__( + host=host, + credentials=credentials, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience + ) + self._session = AuthorizedSession( + self._credentials, default_host=self.DEFAULT_HOST) + if client_cert_source_for_mtls: + self._session.configure_mtls_channel(client_cert_source_for_mtls) + self._interceptor = interceptor or AcceleratorTypesRestInterceptor() + self._prep_wrapped_messages(client_info) + + class _AggregatedList(AcceleratorTypesRestStub): + def __hash__(self): + return hash("AggregatedList") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.AggregatedListAcceleratorTypesRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.AcceleratorTypeAggregatedList: + r"""Call the aggregated list method over HTTP. + + Args: + request (~.compute.AggregatedListAcceleratorTypesRequest): + The request object. A request message for + AcceleratorTypes.AggregatedList. See the + method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.AcceleratorTypeAggregatedList: + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/compute/v1/projects/{project}/aggregated/acceleratorTypes', + }, + ] + request, metadata = self._interceptor.pre_aggregated_list(request, metadata) + pb_request = compute.AggregatedListAcceleratorTypesRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.AcceleratorTypeAggregatedList() + pb_resp = compute.AcceleratorTypeAggregatedList.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_aggregated_list(resp) + return resp + + class _Get(AcceleratorTypesRestStub): + def __hash__(self): + return hash("Get") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.GetAcceleratorTypeRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.AcceleratorType: + r"""Call the get method over HTTP. + + Args: + request (~.compute.GetAcceleratorTypeRequest): + The request object. A request message for + AcceleratorTypes.Get. See the method + description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.AcceleratorType: + Represents an Accelerator Type + resource. Google Cloud Platform provides + graphics processing units (accelerators) + that you can add to VM instances to + improve or accelerate performance when + working with intensive workloads. For + more information, read GPUs on Compute + Engine. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/compute/v1/projects/{project}/zones/{zone}/acceleratorTypes/{accelerator_type}', + }, + ] + request, metadata = self._interceptor.pre_get(request, metadata) + pb_request = compute.GetAcceleratorTypeRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.AcceleratorType() + pb_resp = compute.AcceleratorType.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get(resp) + return resp + + class _List(AcceleratorTypesRestStub): + def __hash__(self): + return hash("List") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.ListAcceleratorTypesRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.AcceleratorTypeList: + r"""Call the list method over HTTP. + + Args: + request (~.compute.ListAcceleratorTypesRequest): + The request object. A request message for + AcceleratorTypes.List. See the method + description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.AcceleratorTypeList: + Contains a list of accelerator types. + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/compute/v1/projects/{project}/zones/{zone}/acceleratorTypes', + }, + ] + request, metadata = self._interceptor.pre_list(request, metadata) + pb_request = compute.ListAcceleratorTypesRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.AcceleratorTypeList() + pb_resp = compute.AcceleratorTypeList.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_list(resp) + return resp + + @property + def aggregated_list(self) -> Callable[ + [compute.AggregatedListAcceleratorTypesRequest], + compute.AcceleratorTypeAggregatedList]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._AggregatedList(self._session, self._host, self._interceptor) # type: ignore + + @property + def get(self) -> Callable[ + [compute.GetAcceleratorTypeRequest], + compute.AcceleratorType]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._Get(self._session, self._host, self._interceptor) # type: ignore + + @property + def list(self) -> Callable[ + [compute.ListAcceleratorTypesRequest], + compute.AcceleratorTypeList]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._List(self._session, self._host, self._interceptor) # type: ignore + + @property + def kind(self) -> str: + return "rest" + + def close(self): + self._session.close() + + +__all__=( + 'AcceleratorTypesRestTransport', +) diff --git a/owl-bot-staging/v1/google/cloud/compute_v1/services/addresses/__init__.py b/owl-bot-staging/v1/google/cloud/compute_v1/services/addresses/__init__.py new file mode 100644 index 000000000..b8d8a8c6f --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/compute_v1/services/addresses/__init__.py @@ -0,0 +1,20 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from .client import AddressesClient + +__all__ = ( + 'AddressesClient', +) diff --git a/owl-bot-staging/v1/google/cloud/compute_v1/services/addresses/client.py b/owl-bot-staging/v1/google/cloud/compute_v1/services/addresses/client.py new file mode 100644 index 000000000..939213e5e --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/compute_v1/services/addresses/client.py @@ -0,0 +1,1855 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import functools +import os +import re +from typing import Dict, Mapping, MutableMapping, MutableSequence, Optional, Sequence, Tuple, Type, Union, cast + +from google.cloud.compute_v1 import gapic_version as package_version + +from google.api_core import client_options as client_options_lib +from google.api_core import exceptions as core_exceptions +from google.api_core import extended_operation +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport import mtls # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth.exceptions import MutualTLSChannelError # type: ignore +from google.oauth2 import service_account # type: ignore + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + +from google.api_core import extended_operation # type: ignore +from google.cloud.compute_v1.services.addresses import pagers +from google.cloud.compute_v1.types import compute +from .transports.base import AddressesTransport, DEFAULT_CLIENT_INFO +from .transports.rest import AddressesRestTransport + + +class AddressesClientMeta(type): + """Metaclass for the Addresses client. + + This provides class-level methods for building and retrieving + support objects (e.g. transport) without polluting the client instance + objects. + """ + _transport_registry = OrderedDict() # type: Dict[str, Type[AddressesTransport]] + _transport_registry["rest"] = AddressesRestTransport + + def get_transport_class(cls, + label: Optional[str] = None, + ) -> Type[AddressesTransport]: + """Returns an appropriate transport class. + + Args: + label: The name of the desired transport. If none is + provided, then the first transport in the registry is used. + + Returns: + The transport class to use. + """ + # If a specific transport is requested, return that one. + if label: + return cls._transport_registry[label] + + # No transport is requested; return the default (that is, the first one + # in the dictionary). + return next(iter(cls._transport_registry.values())) + + +class AddressesClient(metaclass=AddressesClientMeta): + """The Addresses API.""" + + @staticmethod + def _get_default_mtls_endpoint(api_endpoint): + """Converts api endpoint to mTLS endpoint. + + Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to + "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. + Args: + api_endpoint (Optional[str]): the api endpoint to convert. + Returns: + str: converted mTLS api endpoint. + """ + if not api_endpoint: + return api_endpoint + + mtls_endpoint_re = re.compile( + r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?" + ) + + m = mtls_endpoint_re.match(api_endpoint) + name, mtls, sandbox, googledomain = m.groups() + if mtls or not googledomain: + return api_endpoint + + if sandbox: + return api_endpoint.replace( + "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" + ) + + return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") + + DEFAULT_ENDPOINT = "compute.googleapis.com" + DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore + DEFAULT_ENDPOINT + ) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + AddressesClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_info(info) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + AddressesClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_file( + filename) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + from_service_account_json = from_service_account_file + + @property + def transport(self) -> AddressesTransport: + """Returns the transport used by the client instance. + + Returns: + AddressesTransport: The transport used by the client + instance. + """ + return self._transport + + @staticmethod + def common_billing_account_path(billing_account: str, ) -> str: + """Returns a fully-qualified billing_account string.""" + return "billingAccounts/{billing_account}".format(billing_account=billing_account, ) + + @staticmethod + def parse_common_billing_account_path(path: str) -> Dict[str,str]: + """Parse a billing_account path into its component segments.""" + m = re.match(r"^billingAccounts/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_folder_path(folder: str, ) -> str: + """Returns a fully-qualified folder string.""" + return "folders/{folder}".format(folder=folder, ) + + @staticmethod + def parse_common_folder_path(path: str) -> Dict[str,str]: + """Parse a folder path into its component segments.""" + m = re.match(r"^folders/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_organization_path(organization: str, ) -> str: + """Returns a fully-qualified organization string.""" + return "organizations/{organization}".format(organization=organization, ) + + @staticmethod + def parse_common_organization_path(path: str) -> Dict[str,str]: + """Parse a organization path into its component segments.""" + m = re.match(r"^organizations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_project_path(project: str, ) -> str: + """Returns a fully-qualified project string.""" + return "projects/{project}".format(project=project, ) + + @staticmethod + def parse_common_project_path(path: str) -> Dict[str,str]: + """Parse a project path into its component segments.""" + m = re.match(r"^projects/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_location_path(project: str, location: str, ) -> str: + """Returns a fully-qualified location string.""" + return "projects/{project}/locations/{location}".format(project=project, location=location, ) + + @staticmethod + def parse_common_location_path(path: str) -> Dict[str,str]: + """Parse a location path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @classmethod + def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[client_options_lib.ClientOptions] = None): + """Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + if client_options is None: + client_options = client_options_lib.ClientOptions() + use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") + use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") + if use_client_cert not in ("true", "false"): + raise ValueError("Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`") + if use_mtls_endpoint not in ("auto", "never", "always"): + raise MutualTLSChannelError("Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`") + + # Figure out the client cert source to use. + client_cert_source = None + if use_client_cert == "true": + if client_options.client_cert_source: + client_cert_source = client_options.client_cert_source + elif mtls.has_default_client_cert_source(): + client_cert_source = mtls.default_client_cert_source() + + # Figure out which api endpoint to use. + if client_options.api_endpoint is not None: + api_endpoint = client_options.api_endpoint + elif use_mtls_endpoint == "always" or (use_mtls_endpoint == "auto" and client_cert_source): + api_endpoint = cls.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = cls.DEFAULT_ENDPOINT + + return api_endpoint, client_cert_source + + def __init__(self, *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Optional[Union[str, AddressesTransport]] = None, + client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the addresses client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Union[str, AddressesTransport]): The + transport to use. If set to None, a transport is chosen + automatically. + NOTE: "rest" transport functionality is currently in a + beta state (preview). We welcome your feedback via an + issue in this library's source repository. + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): Custom options for the + client. It won't take effect if a ``transport`` instance is provided. + (1) The ``api_endpoint`` property can be used to override the + default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT + environment variable can also be used to override the endpoint: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto switch to the + default mTLS endpoint if client certificate is present, this is + the default value). However, the ``api_endpoint`` property takes + precedence if provided. + (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide client certificate for mutual TLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + """ + if isinstance(client_options, dict): + client_options = client_options_lib.from_dict(client_options) + if client_options is None: + client_options = client_options_lib.ClientOptions() + client_options = cast(client_options_lib.ClientOptions, client_options) + + api_endpoint, client_cert_source_func = self.get_mtls_endpoint_and_cert_source(client_options) + + api_key_value = getattr(client_options, "api_key", None) + if api_key_value and credentials: + raise ValueError("client_options.api_key and credentials are mutually exclusive") + + # Save or instantiate the transport. + # Ordinarily, we provide the transport, but allowing a custom transport + # instance provides an extensibility point for unusual situations. + if isinstance(transport, AddressesTransport): + # transport is a AddressesTransport instance. + if credentials or client_options.credentials_file or api_key_value: + raise ValueError("When providing a transport instance, " + "provide its credentials directly.") + if client_options.scopes: + raise ValueError( + "When providing a transport instance, provide its scopes " + "directly." + ) + self._transport = transport + else: + import google.auth._default # type: ignore + + if api_key_value and hasattr(google.auth._default, "get_api_key_credentials"): + credentials = google.auth._default.get_api_key_credentials(api_key_value) + + Transport = type(self).get_transport_class(transport) + self._transport = Transport( + credentials=credentials, + credentials_file=client_options.credentials_file, + host=api_endpoint, + scopes=client_options.scopes, + client_cert_source_for_mtls=client_cert_source_func, + quota_project_id=client_options.quota_project_id, + client_info=client_info, + always_use_jwt_access=True, + api_audience=client_options.api_audience, + ) + + def aggregated_list(self, + request: Optional[Union[compute.AggregatedListAddressesRequest, dict]] = None, + *, + project: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.AggregatedListPager: + r"""Retrieves an aggregated list of addresses. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_aggregated_list(): + # Create a client + client = compute_v1.AddressesClient() + + # Initialize request argument(s) + request = compute_v1.AggregatedListAddressesRequest( + project="project_value", + ) + + # Make the request + page_result = client.aggregated_list(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.AggregatedListAddressesRequest, dict]): + The request object. A request message for + Addresses.AggregatedList. See the method + description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.compute_v1.services.addresses.pagers.AggregatedListPager: + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.AggregatedListAddressesRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.AggregatedListAddressesRequest): + request = compute.AggregatedListAddressesRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.aggregated_list] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.AggregatedListPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + def delete_unary(self, + request: Optional[Union[compute.DeleteAddressRequest, dict]] = None, + *, + project: Optional[str] = None, + region: Optional[str] = None, + address: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.Operation: + r"""Deletes the specified address resource. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_delete(): + # Create a client + client = compute_v1.AddressesClient() + + # Initialize request argument(s) + request = compute_v1.DeleteAddressRequest( + address="address_value", + project="project_value", + region="region_value", + ) + + # Make the request + response = client.delete(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.DeleteAddressRequest, dict]): + The request object. A request message for + Addresses.Delete. See the method + description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + region (str): + Name of the region for this request. + This corresponds to the ``region`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + address (str): + Name of the address resource to + delete. + + This corresponds to the ``address`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, region, address]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.DeleteAddressRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.DeleteAddressRequest): + request = compute.DeleteAddressRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if region is not None: + request.region = region + if address is not None: + request.address = address + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("region", request.region), + ("address", request.address), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def delete(self, + request: Optional[Union[compute.DeleteAddressRequest, dict]] = None, + *, + project: Optional[str] = None, + region: Optional[str] = None, + address: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> extended_operation.ExtendedOperation: + r"""Deletes the specified address resource. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_delete(): + # Create a client + client = compute_v1.AddressesClient() + + # Initialize request argument(s) + request = compute_v1.DeleteAddressRequest( + address="address_value", + project="project_value", + region="region_value", + ) + + # Make the request + response = client.delete(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.DeleteAddressRequest, dict]): + The request object. A request message for + Addresses.Delete. See the method + description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + region (str): + Name of the region for this request. + This corresponds to the ``region`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + address (str): + Name of the address resource to + delete. + + This corresponds to the ``address`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, region, address]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.DeleteAddressRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.DeleteAddressRequest): + request = compute.DeleteAddressRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if region is not None: + request.region = region + if address is not None: + request.address = address + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("region", request.region), + ("address", request.address), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + operation_service = self._transport._region_operations_client + operation_request = compute.GetRegionOperationRequest() + operation_request.project = request.project + operation_request.region = request.region + operation_request.operation = response.name + + get_operation = functools.partial(operation_service.get, operation_request) + # Cancel is not part of extended operations yet. + cancel_operation = lambda: None + + # Note: this class is an implementation detail to provide a uniform + # set of names for certain fields in the extended operation proto message. + # See google.api_core.extended_operation.ExtendedOperation for details + # on these properties and the expected interface. + class _CustomOperation(extended_operation.ExtendedOperation): + @property + def error_message(self): + return self._extended_operation.http_error_message + + @property + def error_code(self): + return self._extended_operation.http_error_status_code + + response = _CustomOperation.make(get_operation, cancel_operation, response) + + # Done; return the response. + return response + + def get(self, + request: Optional[Union[compute.GetAddressRequest, dict]] = None, + *, + project: Optional[str] = None, + region: Optional[str] = None, + address: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.Address: + r"""Returns the specified address resource. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_get(): + # Create a client + client = compute_v1.AddressesClient() + + # Initialize request argument(s) + request = compute_v1.GetAddressRequest( + address="address_value", + project="project_value", + region="region_value", + ) + + # Make the request + response = client.get(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.GetAddressRequest, dict]): + The request object. A request message for Addresses.Get. + See the method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + region (str): + Name of the region for this request. + This corresponds to the ``region`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + address (str): + Name of the address resource to + return. + + This corresponds to the ``address`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.compute_v1.types.Address: + Represents an IP Address resource. Google Compute Engine + has two IP Address resources: \* [Global (external and + internal)](\ https://cloud.google.com/compute/docs/reference/rest/v1/globalAddresses) + \* [Regional (external and + internal)](\ https://cloud.google.com/compute/docs/reference/rest/v1/addresses) + For more information, see Reserving a static external IP + address. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, region, address]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.GetAddressRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.GetAddressRequest): + request = compute.GetAddressRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if region is not None: + request.region = region + if address is not None: + request.address = address + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("region", request.region), + ("address", request.address), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def insert_unary(self, + request: Optional[Union[compute.InsertAddressRequest, dict]] = None, + *, + project: Optional[str] = None, + region: Optional[str] = None, + address_resource: Optional[compute.Address] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.Operation: + r"""Creates an address resource in the specified project + by using the data included in the request. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_insert(): + # Create a client + client = compute_v1.AddressesClient() + + # Initialize request argument(s) + request = compute_v1.InsertAddressRequest( + project="project_value", + region="region_value", + ) + + # Make the request + response = client.insert(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.InsertAddressRequest, dict]): + The request object. A request message for + Addresses.Insert. See the method + description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + region (str): + Name of the region for this request. + This corresponds to the ``region`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + address_resource (google.cloud.compute_v1.types.Address): + The body resource for this request + This corresponds to the ``address_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, region, address_resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.InsertAddressRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.InsertAddressRequest): + request = compute.InsertAddressRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if region is not None: + request.region = region + if address_resource is not None: + request.address_resource = address_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.insert] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("region", request.region), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def insert(self, + request: Optional[Union[compute.InsertAddressRequest, dict]] = None, + *, + project: Optional[str] = None, + region: Optional[str] = None, + address_resource: Optional[compute.Address] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> extended_operation.ExtendedOperation: + r"""Creates an address resource in the specified project + by using the data included in the request. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_insert(): + # Create a client + client = compute_v1.AddressesClient() + + # Initialize request argument(s) + request = compute_v1.InsertAddressRequest( + project="project_value", + region="region_value", + ) + + # Make the request + response = client.insert(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.InsertAddressRequest, dict]): + The request object. A request message for + Addresses.Insert. See the method + description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + region (str): + Name of the region for this request. + This corresponds to the ``region`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + address_resource (google.cloud.compute_v1.types.Address): + The body resource for this request + This corresponds to the ``address_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, region, address_resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.InsertAddressRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.InsertAddressRequest): + request = compute.InsertAddressRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if region is not None: + request.region = region + if address_resource is not None: + request.address_resource = address_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.insert] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("region", request.region), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + operation_service = self._transport._region_operations_client + operation_request = compute.GetRegionOperationRequest() + operation_request.project = request.project + operation_request.region = request.region + operation_request.operation = response.name + + get_operation = functools.partial(operation_service.get, operation_request) + # Cancel is not part of extended operations yet. + cancel_operation = lambda: None + + # Note: this class is an implementation detail to provide a uniform + # set of names for certain fields in the extended operation proto message. + # See google.api_core.extended_operation.ExtendedOperation for details + # on these properties and the expected interface. + class _CustomOperation(extended_operation.ExtendedOperation): + @property + def error_message(self): + return self._extended_operation.http_error_message + + @property + def error_code(self): + return self._extended_operation.http_error_status_code + + response = _CustomOperation.make(get_operation, cancel_operation, response) + + # Done; return the response. + return response + + def list(self, + request: Optional[Union[compute.ListAddressesRequest, dict]] = None, + *, + project: Optional[str] = None, + region: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListPager: + r"""Retrieves a list of addresses contained within the + specified region. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_list(): + # Create a client + client = compute_v1.AddressesClient() + + # Initialize request argument(s) + request = compute_v1.ListAddressesRequest( + project="project_value", + region="region_value", + ) + + # Make the request + page_result = client.list(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.ListAddressesRequest, dict]): + The request object. A request message for Addresses.List. + See the method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + region (str): + Name of the region for this request. + This corresponds to the ``region`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.compute_v1.services.addresses.pagers.ListPager: + Contains a list of addresses. + + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, region]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.ListAddressesRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.ListAddressesRequest): + request = compute.ListAddressesRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if region is not None: + request.region = region + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("region", request.region), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + def move_unary(self, + request: Optional[Union[compute.MoveAddressRequest, dict]] = None, + *, + project: Optional[str] = None, + region: Optional[str] = None, + address: Optional[str] = None, + region_addresses_move_request_resource: Optional[compute.RegionAddressesMoveRequest] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.Operation: + r"""Moves the specified address resource. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_move(): + # Create a client + client = compute_v1.AddressesClient() + + # Initialize request argument(s) + request = compute_v1.MoveAddressRequest( + address="address_value", + project="project_value", + region="region_value", + ) + + # Make the request + response = client.move(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.MoveAddressRequest, dict]): + The request object. A request message for Addresses.Move. + See the method description for details. + project (str): + Source project ID which the Address + is moved from. + + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + region (str): + Name of the region for this request. + This corresponds to the ``region`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + address (str): + Name of the address resource to move. + This corresponds to the ``address`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + region_addresses_move_request_resource (google.cloud.compute_v1.types.RegionAddressesMoveRequest): + The body resource for this request + This corresponds to the ``region_addresses_move_request_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, region, address, region_addresses_move_request_resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.MoveAddressRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.MoveAddressRequest): + request = compute.MoveAddressRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if region is not None: + request.region = region + if address is not None: + request.address = address + if region_addresses_move_request_resource is not None: + request.region_addresses_move_request_resource = region_addresses_move_request_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.move] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("region", request.region), + ("address", request.address), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def move(self, + request: Optional[Union[compute.MoveAddressRequest, dict]] = None, + *, + project: Optional[str] = None, + region: Optional[str] = None, + address: Optional[str] = None, + region_addresses_move_request_resource: Optional[compute.RegionAddressesMoveRequest] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> extended_operation.ExtendedOperation: + r"""Moves the specified address resource. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_move(): + # Create a client + client = compute_v1.AddressesClient() + + # Initialize request argument(s) + request = compute_v1.MoveAddressRequest( + address="address_value", + project="project_value", + region="region_value", + ) + + # Make the request + response = client.move(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.MoveAddressRequest, dict]): + The request object. A request message for Addresses.Move. + See the method description for details. + project (str): + Source project ID which the Address + is moved from. + + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + region (str): + Name of the region for this request. + This corresponds to the ``region`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + address (str): + Name of the address resource to move. + This corresponds to the ``address`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + region_addresses_move_request_resource (google.cloud.compute_v1.types.RegionAddressesMoveRequest): + The body resource for this request + This corresponds to the ``region_addresses_move_request_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, region, address, region_addresses_move_request_resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.MoveAddressRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.MoveAddressRequest): + request = compute.MoveAddressRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if region is not None: + request.region = region + if address is not None: + request.address = address + if region_addresses_move_request_resource is not None: + request.region_addresses_move_request_resource = region_addresses_move_request_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.move] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("region", request.region), + ("address", request.address), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + operation_service = self._transport._region_operations_client + operation_request = compute.GetRegionOperationRequest() + operation_request.project = request.project + operation_request.region = request.region + operation_request.operation = response.name + + get_operation = functools.partial(operation_service.get, operation_request) + # Cancel is not part of extended operations yet. + cancel_operation = lambda: None + + # Note: this class is an implementation detail to provide a uniform + # set of names for certain fields in the extended operation proto message. + # See google.api_core.extended_operation.ExtendedOperation for details + # on these properties and the expected interface. + class _CustomOperation(extended_operation.ExtendedOperation): + @property + def error_message(self): + return self._extended_operation.http_error_message + + @property + def error_code(self): + return self._extended_operation.http_error_status_code + + response = _CustomOperation.make(get_operation, cancel_operation, response) + + # Done; return the response. + return response + + def set_labels_unary(self, + request: Optional[Union[compute.SetLabelsAddressRequest, dict]] = None, + *, + project: Optional[str] = None, + region: Optional[str] = None, + resource: Optional[str] = None, + region_set_labels_request_resource: Optional[compute.RegionSetLabelsRequest] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.Operation: + r"""Sets the labels on an Address. To learn more about + labels, read the Labeling Resources documentation. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_set_labels(): + # Create a client + client = compute_v1.AddressesClient() + + # Initialize request argument(s) + request = compute_v1.SetLabelsAddressRequest( + project="project_value", + region="region_value", + resource="resource_value", + ) + + # Make the request + response = client.set_labels(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.SetLabelsAddressRequest, dict]): + The request object. A request message for + Addresses.SetLabels. See the method + description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + region (str): + The region for this request. + This corresponds to the ``region`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + resource (str): + Name or id of the resource for this + request. + + This corresponds to the ``resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + region_set_labels_request_resource (google.cloud.compute_v1.types.RegionSetLabelsRequest): + The body resource for this request + This corresponds to the ``region_set_labels_request_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, region, resource, region_set_labels_request_resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.SetLabelsAddressRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.SetLabelsAddressRequest): + request = compute.SetLabelsAddressRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if region is not None: + request.region = region + if resource is not None: + request.resource = resource + if region_set_labels_request_resource is not None: + request.region_set_labels_request_resource = region_set_labels_request_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.set_labels] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("region", request.region), + ("resource", request.resource), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def set_labels(self, + request: Optional[Union[compute.SetLabelsAddressRequest, dict]] = None, + *, + project: Optional[str] = None, + region: Optional[str] = None, + resource: Optional[str] = None, + region_set_labels_request_resource: Optional[compute.RegionSetLabelsRequest] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> extended_operation.ExtendedOperation: + r"""Sets the labels on an Address. To learn more about + labels, read the Labeling Resources documentation. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_set_labels(): + # Create a client + client = compute_v1.AddressesClient() + + # Initialize request argument(s) + request = compute_v1.SetLabelsAddressRequest( + project="project_value", + region="region_value", + resource="resource_value", + ) + + # Make the request + response = client.set_labels(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.SetLabelsAddressRequest, dict]): + The request object. A request message for + Addresses.SetLabels. See the method + description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + region (str): + The region for this request. + This corresponds to the ``region`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + resource (str): + Name or id of the resource for this + request. + + This corresponds to the ``resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + region_set_labels_request_resource (google.cloud.compute_v1.types.RegionSetLabelsRequest): + The body resource for this request + This corresponds to the ``region_set_labels_request_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, region, resource, region_set_labels_request_resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.SetLabelsAddressRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.SetLabelsAddressRequest): + request = compute.SetLabelsAddressRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if region is not None: + request.region = region + if resource is not None: + request.resource = resource + if region_set_labels_request_resource is not None: + request.region_set_labels_request_resource = region_set_labels_request_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.set_labels] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("region", request.region), + ("resource", request.resource), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + operation_service = self._transport._region_operations_client + operation_request = compute.GetRegionOperationRequest() + operation_request.project = request.project + operation_request.region = request.region + operation_request.operation = response.name + + get_operation = functools.partial(operation_service.get, operation_request) + # Cancel is not part of extended operations yet. + cancel_operation = lambda: None + + # Note: this class is an implementation detail to provide a uniform + # set of names for certain fields in the extended operation proto message. + # See google.api_core.extended_operation.ExtendedOperation for details + # on these properties and the expected interface. + class _CustomOperation(extended_operation.ExtendedOperation): + @property + def error_message(self): + return self._extended_operation.http_error_message + + @property + def error_code(self): + return self._extended_operation.http_error_status_code + + response = _CustomOperation.make(get_operation, cancel_operation, response) + + # Done; return the response. + return response + + def __enter__(self) -> "AddressesClient": + return self + + def __exit__(self, type, value, traceback): + """Releases underlying transport's resources. + + .. warning:: + ONLY use as a context manager if the transport is NOT shared + with other clients! Exiting the with block will CLOSE the transport + and may cause errors in other clients! + """ + self.transport.close() + + + + + + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) + + +__all__ = ( + "AddressesClient", +) diff --git a/owl-bot-staging/v1/google/cloud/compute_v1/services/addresses/pagers.py b/owl-bot-staging/v1/google/cloud/compute_v1/services/addresses/pagers.py new file mode 100644 index 000000000..75bf40263 --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/compute_v1/services/addresses/pagers.py @@ -0,0 +1,139 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import Any, AsyncIterator, Awaitable, Callable, Sequence, Tuple, Optional, Iterator + +from google.cloud.compute_v1.types import compute + + +class AggregatedListPager: + """A pager for iterating through ``aggregated_list`` requests. + + This class thinly wraps an initial + :class:`google.cloud.compute_v1.types.AddressAggregatedList` object, and + provides an ``__iter__`` method to iterate through its + ``items`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``AggregatedList`` requests and continue to iterate + through the ``items`` field on the + corresponding responses. + + All the usual :class:`google.cloud.compute_v1.types.AddressAggregatedList` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., compute.AddressAggregatedList], + request: compute.AggregatedListAddressesRequest, + response: compute.AddressAggregatedList, + *, + metadata: Sequence[Tuple[str, str]] = ()): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.compute_v1.types.AggregatedListAddressesRequest): + The initial request object. + response (google.cloud.compute_v1.types.AddressAggregatedList): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = compute.AggregatedListAddressesRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[compute.AddressAggregatedList]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[Tuple[str, compute.AddressesScopedList]]: + for page in self.pages: + yield from page.items.items() + + def get(self, key: str) -> Optional[compute.AddressesScopedList]: + return self._response.items.get(key) + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + + +class ListPager: + """A pager for iterating through ``list`` requests. + + This class thinly wraps an initial + :class:`google.cloud.compute_v1.types.AddressList` object, and + provides an ``__iter__`` method to iterate through its + ``items`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``List`` requests and continue to iterate + through the ``items`` field on the + corresponding responses. + + All the usual :class:`google.cloud.compute_v1.types.AddressList` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., compute.AddressList], + request: compute.ListAddressesRequest, + response: compute.AddressList, + *, + metadata: Sequence[Tuple[str, str]] = ()): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.compute_v1.types.ListAddressesRequest): + The initial request object. + response (google.cloud.compute_v1.types.AddressList): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = compute.ListAddressesRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[compute.AddressList]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[compute.Address]: + for page in self.pages: + yield from page.items + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) diff --git a/owl-bot-staging/v1/google/cloud/compute_v1/services/addresses/transports/__init__.py b/owl-bot-staging/v1/google/cloud/compute_v1/services/addresses/transports/__init__.py new file mode 100644 index 000000000..c9fc8fd9e --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/compute_v1/services/addresses/transports/__init__.py @@ -0,0 +1,32 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +from typing import Dict, Type + +from .base import AddressesTransport +from .rest import AddressesRestTransport +from .rest import AddressesRestInterceptor + + +# Compile a registry of transports. +_transport_registry = OrderedDict() # type: Dict[str, Type[AddressesTransport]] +_transport_registry['rest'] = AddressesRestTransport + +__all__ = ( + 'AddressesTransport', + 'AddressesRestTransport', + 'AddressesRestInterceptor', +) diff --git a/owl-bot-staging/v1/google/cloud/compute_v1/services/addresses/transports/base.py b/owl-bot-staging/v1/google/cloud/compute_v1/services/addresses/transports/base.py new file mode 100644 index 000000000..29f87b02e --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/compute_v1/services/addresses/transports/base.py @@ -0,0 +1,247 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import abc +from typing import Awaitable, Callable, Dict, Optional, Sequence, Union + +from google.cloud.compute_v1 import gapic_version as package_version + +import google.auth # type: ignore +import google.api_core +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.compute_v1.types import compute +from google.cloud.compute_v1.services import region_operations + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) + + +class AddressesTransport(abc.ABC): + """Abstract transport class for Addresses.""" + + AUTH_SCOPES = ( + 'https://www.googleapis.com/auth/compute', + 'https://www.googleapis.com/auth/cloud-platform', + ) + + DEFAULT_HOST: str = 'compute.googleapis.com' + def __init__( + self, *, + host: str = DEFAULT_HOST, + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + **kwargs, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A list of scopes. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + """ + self._extended_operations_services: Dict[str, Any] = {} + + scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} + + # Save the scopes. + self._scopes = scopes + + # If no credentials are provided, then determine the appropriate + # defaults. + if credentials and credentials_file: + raise core_exceptions.DuplicateCredentialArgs("'credentials_file' and 'credentials' are mutually exclusive") + + if credentials_file is not None: + credentials, _ = google.auth.load_credentials_from_file( + credentials_file, + **scopes_kwargs, + quota_project_id=quota_project_id + ) + elif credentials is None: + credentials, _ = google.auth.default(**scopes_kwargs, quota_project_id=quota_project_id) + # Don't apply audience if the credentials file passed from user. + if hasattr(credentials, "with_gdch_audience"): + credentials = credentials.with_gdch_audience(api_audience if api_audience else host) + + # If the credentials are service account credentials, then always try to use self signed JWT. + if always_use_jwt_access and isinstance(credentials, service_account.Credentials) and hasattr(service_account.Credentials, "with_always_use_jwt_access"): + credentials = credentials.with_always_use_jwt_access(True) + + # Save the credentials. + self._credentials = credentials + + # Save the hostname. Default to port 443 (HTTPS) if none is specified. + if ':' not in host: + host += ':443' + self._host = host + + def _prep_wrapped_messages(self, client_info): + # Precompute the wrapped methods. + self._wrapped_methods = { + self.aggregated_list: gapic_v1.method.wrap_method( + self.aggregated_list, + default_timeout=None, + client_info=client_info, + ), + self.delete: gapic_v1.method.wrap_method( + self.delete, + default_timeout=None, + client_info=client_info, + ), + self.get: gapic_v1.method.wrap_method( + self.get, + default_timeout=None, + client_info=client_info, + ), + self.insert: gapic_v1.method.wrap_method( + self.insert, + default_timeout=None, + client_info=client_info, + ), + self.list: gapic_v1.method.wrap_method( + self.list, + default_timeout=None, + client_info=client_info, + ), + self.move: gapic_v1.method.wrap_method( + self.move, + default_timeout=None, + client_info=client_info, + ), + self.set_labels: gapic_v1.method.wrap_method( + self.set_labels, + default_timeout=None, + client_info=client_info, + ), + } + + def close(self): + """Closes resources associated with the transport. + + .. warning:: + Only call this method if the transport is NOT shared + with other clients - this may cause errors in other clients! + """ + raise NotImplementedError() + + @property + def aggregated_list(self) -> Callable[ + [compute.AggregatedListAddressesRequest], + Union[ + compute.AddressAggregatedList, + Awaitable[compute.AddressAggregatedList] + ]]: + raise NotImplementedError() + + @property + def delete(self) -> Callable[ + [compute.DeleteAddressRequest], + Union[ + compute.Operation, + Awaitable[compute.Operation] + ]]: + raise NotImplementedError() + + @property + def get(self) -> Callable[ + [compute.GetAddressRequest], + Union[ + compute.Address, + Awaitable[compute.Address] + ]]: + raise NotImplementedError() + + @property + def insert(self) -> Callable[ + [compute.InsertAddressRequest], + Union[ + compute.Operation, + Awaitable[compute.Operation] + ]]: + raise NotImplementedError() + + @property + def list(self) -> Callable[ + [compute.ListAddressesRequest], + Union[ + compute.AddressList, + Awaitable[compute.AddressList] + ]]: + raise NotImplementedError() + + @property + def move(self) -> Callable[ + [compute.MoveAddressRequest], + Union[ + compute.Operation, + Awaitable[compute.Operation] + ]]: + raise NotImplementedError() + + @property + def set_labels(self) -> Callable[ + [compute.SetLabelsAddressRequest], + Union[ + compute.Operation, + Awaitable[compute.Operation] + ]]: + raise NotImplementedError() + + @property + def kind(self) -> str: + raise NotImplementedError() + + @property + def _region_operations_client(self) -> region_operations.RegionOperationsClient: + ex_op_service = self._extended_operations_services.get("region_operations") + if not ex_op_service: + ex_op_service = region_operations.RegionOperationsClient( + credentials=self._credentials, + transport=self.kind, + ) + self._extended_operations_services["region_operations"] = ex_op_service + + return ex_op_service + + +__all__ = ( + 'AddressesTransport', +) diff --git a/owl-bot-staging/v1/google/cloud/compute_v1/services/addresses/transports/rest.py b/owl-bot-staging/v1/google/cloud/compute_v1/services/addresses/transports/rest.py new file mode 100644 index 000000000..2052bdbbb --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/compute_v1/services/addresses/transports/rest.py @@ -0,0 +1,1046 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +from google.auth.transport.requests import AuthorizedSession # type: ignore +import json # type: ignore +import grpc # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.api_core import exceptions as core_exceptions +from google.api_core import retry as retries +from google.api_core import rest_helpers +from google.api_core import rest_streaming +from google.api_core import path_template +from google.api_core import gapic_v1 + +from google.protobuf import json_format +from requests import __version__ as requests_version +import dataclasses +import re +from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union +import warnings + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + + +from google.cloud.compute_v1.types import compute + +from .base import AddressesTransport, DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, + grpc_version=None, + rest_version=requests_version, +) + + +class AddressesRestInterceptor: + """Interceptor for Addresses. + + Interceptors are used to manipulate requests, request metadata, and responses + in arbitrary ways. + Example use cases include: + * Logging + * Verifying requests according to service or custom semantics + * Stripping extraneous information from responses + + These use cases and more can be enabled by injecting an + instance of a custom subclass when constructing the AddressesRestTransport. + + .. code-block:: python + class MyCustomAddressesInterceptor(AddressesRestInterceptor): + def pre_aggregated_list(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_aggregated_list(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_delete(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_delete(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_get(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_insert(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_insert(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_move(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_move(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_set_labels(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_set_labels(self, response): + logging.log(f"Received response: {response}") + return response + + transport = AddressesRestTransport(interceptor=MyCustomAddressesInterceptor()) + client = AddressesClient(transport=transport) + + + """ + def pre_aggregated_list(self, request: compute.AggregatedListAddressesRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.AggregatedListAddressesRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for aggregated_list + + Override in a subclass to manipulate the request or metadata + before they are sent to the Addresses server. + """ + return request, metadata + + def post_aggregated_list(self, response: compute.AddressAggregatedList) -> compute.AddressAggregatedList: + """Post-rpc interceptor for aggregated_list + + Override in a subclass to manipulate the response + after it is returned by the Addresses server but before + it is returned to user code. + """ + return response + def pre_delete(self, request: compute.DeleteAddressRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.DeleteAddressRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for delete + + Override in a subclass to manipulate the request or metadata + before they are sent to the Addresses server. + """ + return request, metadata + + def post_delete(self, response: compute.Operation) -> compute.Operation: + """Post-rpc interceptor for delete + + Override in a subclass to manipulate the response + after it is returned by the Addresses server but before + it is returned to user code. + """ + return response + def pre_get(self, request: compute.GetAddressRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.GetAddressRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get + + Override in a subclass to manipulate the request or metadata + before they are sent to the Addresses server. + """ + return request, metadata + + def post_get(self, response: compute.Address) -> compute.Address: + """Post-rpc interceptor for get + + Override in a subclass to manipulate the response + after it is returned by the Addresses server but before + it is returned to user code. + """ + return response + def pre_insert(self, request: compute.InsertAddressRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.InsertAddressRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for insert + + Override in a subclass to manipulate the request or metadata + before they are sent to the Addresses server. + """ + return request, metadata + + def post_insert(self, response: compute.Operation) -> compute.Operation: + """Post-rpc interceptor for insert + + Override in a subclass to manipulate the response + after it is returned by the Addresses server but before + it is returned to user code. + """ + return response + def pre_list(self, request: compute.ListAddressesRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.ListAddressesRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list + + Override in a subclass to manipulate the request or metadata + before they are sent to the Addresses server. + """ + return request, metadata + + def post_list(self, response: compute.AddressList) -> compute.AddressList: + """Post-rpc interceptor for list + + Override in a subclass to manipulate the response + after it is returned by the Addresses server but before + it is returned to user code. + """ + return response + def pre_move(self, request: compute.MoveAddressRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.MoveAddressRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for move + + Override in a subclass to manipulate the request or metadata + before they are sent to the Addresses server. + """ + return request, metadata + + def post_move(self, response: compute.Operation) -> compute.Operation: + """Post-rpc interceptor for move + + Override in a subclass to manipulate the response + after it is returned by the Addresses server but before + it is returned to user code. + """ + return response + def pre_set_labels(self, request: compute.SetLabelsAddressRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.SetLabelsAddressRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for set_labels + + Override in a subclass to manipulate the request or metadata + before they are sent to the Addresses server. + """ + return request, metadata + + def post_set_labels(self, response: compute.Operation) -> compute.Operation: + """Post-rpc interceptor for set_labels + + Override in a subclass to manipulate the response + after it is returned by the Addresses server but before + it is returned to user code. + """ + return response + + +@dataclasses.dataclass +class AddressesRestStub: + _session: AuthorizedSession + _host: str + _interceptor: AddressesRestInterceptor + + +class AddressesRestTransport(AddressesTransport): + """REST backend transport for Addresses. + + The Addresses API. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends JSON representations of protocol buffers over HTTP/1.1 + + NOTE: This REST transport functionality is currently in a beta + state (preview). We welcome your feedback via an issue in this + library's source repository. Thank you! + """ + + def __init__(self, *, + host: str = 'compute.googleapis.com', + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + client_cert_source_for_mtls: Optional[Callable[[ + ], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + url_scheme: str = 'https', + interceptor: Optional[AddressesRestInterceptor] = None, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + NOTE: This REST transport functionality is currently in a beta + state (preview). We welcome your feedback via a GitHub issue in + this library's repository. Thank you! + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + client_cert_source_for_mtls (Callable[[], Tuple[bytes, bytes]]): Client + certificate to configure mutual TLS HTTP channel. It is ignored + if ``channel`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you are developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + url_scheme: the protocol scheme for the API endpoint. Normally + "https", but for testing or local servers, + "http" can be specified. + """ + # Run the base constructor + # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. + # TODO: When custom host (api_endpoint) is set, `scopes` must *also* be set on the + # credentials object + maybe_url_match = re.match("^(?Phttp(?:s)?://)?(?P.*)$", host) + if maybe_url_match is None: + raise ValueError(f"Unexpected hostname structure: {host}") # pragma: NO COVER + + url_match_items = maybe_url_match.groupdict() + + host = f"{url_scheme}://{host}" if not url_match_items["scheme"] else host + + super().__init__( + host=host, + credentials=credentials, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience + ) + self._session = AuthorizedSession( + self._credentials, default_host=self.DEFAULT_HOST) + if client_cert_source_for_mtls: + self._session.configure_mtls_channel(client_cert_source_for_mtls) + self._interceptor = interceptor or AddressesRestInterceptor() + self._prep_wrapped_messages(client_info) + + class _AggregatedList(AddressesRestStub): + def __hash__(self): + return hash("AggregatedList") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.AggregatedListAddressesRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.AddressAggregatedList: + r"""Call the aggregated list method over HTTP. + + Args: + request (~.compute.AggregatedListAddressesRequest): + The request object. A request message for + Addresses.AggregatedList. See the method + description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.AddressAggregatedList: + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/compute/v1/projects/{project}/aggregated/addresses', + }, + ] + request, metadata = self._interceptor.pre_aggregated_list(request, metadata) + pb_request = compute.AggregatedListAddressesRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.AddressAggregatedList() + pb_resp = compute.AddressAggregatedList.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_aggregated_list(resp) + return resp + + class _Delete(AddressesRestStub): + def __hash__(self): + return hash("Delete") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.DeleteAddressRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.Operation: + r"""Call the delete method over HTTP. + + Args: + request (~.compute.DeleteAddressRequest): + The request object. A request message for + Addresses.Delete. See the method + description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.Operation: + Represents an Operation resource. Google Compute Engine + has three Operation resources: \* + `Global `__ + \* + `Regional `__ + \* + `Zonal `__ + You can use an operation resource to manage asynchronous + API requests. For more information, read Handling API + responses. Operations can be global, regional or zonal. + - For global operations, use the ``globalOperations`` + resource. - For regional operations, use the + ``regionOperations`` resource. - For zonal operations, + use the ``zonalOperations`` resource. For more + information, read Global, Regional, and Zonal Resources. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'delete', + 'uri': '/compute/v1/projects/{project}/regions/{region}/addresses/{address}', + }, + ] + request, metadata = self._interceptor.pre_delete(request, metadata) + pb_request = compute.DeleteAddressRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.Operation() + pb_resp = compute.Operation.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_delete(resp) + return resp + + class _Get(AddressesRestStub): + def __hash__(self): + return hash("Get") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.GetAddressRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.Address: + r"""Call the get method over HTTP. + + Args: + request (~.compute.GetAddressRequest): + The request object. A request message for Addresses.Get. + See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.Address: + Represents an IP Address resource. Google Compute Engine + has two IP Address resources: \* `Global (external and + internal) `__ + \* `Regional (external and + internal) `__ + For more information, see Reserving a static external IP + address. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/compute/v1/projects/{project}/regions/{region}/addresses/{address}', + }, + ] + request, metadata = self._interceptor.pre_get(request, metadata) + pb_request = compute.GetAddressRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.Address() + pb_resp = compute.Address.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get(resp) + return resp + + class _Insert(AddressesRestStub): + def __hash__(self): + return hash("Insert") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.InsertAddressRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.Operation: + r"""Call the insert method over HTTP. + + Args: + request (~.compute.InsertAddressRequest): + The request object. A request message for + Addresses.Insert. See the method + description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.Operation: + Represents an Operation resource. Google Compute Engine + has three Operation resources: \* + `Global `__ + \* + `Regional `__ + \* + `Zonal `__ + You can use an operation resource to manage asynchronous + API requests. For more information, read Handling API + responses. Operations can be global, regional or zonal. + - For global operations, use the ``globalOperations`` + resource. - For regional operations, use the + ``regionOperations`` resource. - For zonal operations, + use the ``zonalOperations`` resource. For more + information, read Global, Regional, and Zonal Resources. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/compute/v1/projects/{project}/regions/{region}/addresses', + 'body': 'address_resource', + }, + ] + request, metadata = self._interceptor.pre_insert(request, metadata) + pb_request = compute.InsertAddressRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + including_default_value_fields=False, + use_integers_for_enums=False + ) + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.Operation() + pb_resp = compute.Operation.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_insert(resp) + return resp + + class _List(AddressesRestStub): + def __hash__(self): + return hash("List") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.ListAddressesRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.AddressList: + r"""Call the list method over HTTP. + + Args: + request (~.compute.ListAddressesRequest): + The request object. A request message for Addresses.List. + See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.AddressList: + Contains a list of addresses. + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/compute/v1/projects/{project}/regions/{region}/addresses', + }, + ] + request, metadata = self._interceptor.pre_list(request, metadata) + pb_request = compute.ListAddressesRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.AddressList() + pb_resp = compute.AddressList.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_list(resp) + return resp + + class _Move(AddressesRestStub): + def __hash__(self): + return hash("Move") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.MoveAddressRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.Operation: + r"""Call the move method over HTTP. + + Args: + request (~.compute.MoveAddressRequest): + The request object. A request message for Addresses.Move. + See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.Operation: + Represents an Operation resource. Google Compute Engine + has three Operation resources: \* + `Global `__ + \* + `Regional `__ + \* + `Zonal `__ + You can use an operation resource to manage asynchronous + API requests. For more information, read Handling API + responses. Operations can be global, regional or zonal. + - For global operations, use the ``globalOperations`` + resource. - For regional operations, use the + ``regionOperations`` resource. - For zonal operations, + use the ``zonalOperations`` resource. For more + information, read Global, Regional, and Zonal Resources. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/compute/v1/projects/{project}/regions/{region}/addresses/{address}/move', + 'body': 'region_addresses_move_request_resource', + }, + ] + request, metadata = self._interceptor.pre_move(request, metadata) + pb_request = compute.MoveAddressRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + including_default_value_fields=False, + use_integers_for_enums=False + ) + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.Operation() + pb_resp = compute.Operation.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_move(resp) + return resp + + class _SetLabels(AddressesRestStub): + def __hash__(self): + return hash("SetLabels") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.SetLabelsAddressRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.Operation: + r"""Call the set labels method over HTTP. + + Args: + request (~.compute.SetLabelsAddressRequest): + The request object. A request message for + Addresses.SetLabels. See the method + description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.Operation: + Represents an Operation resource. Google Compute Engine + has three Operation resources: \* + `Global `__ + \* + `Regional `__ + \* + `Zonal `__ + You can use an operation resource to manage asynchronous + API requests. For more information, read Handling API + responses. Operations can be global, regional or zonal. + - For global operations, use the ``globalOperations`` + resource. - For regional operations, use the + ``regionOperations`` resource. - For zonal operations, + use the ``zonalOperations`` resource. For more + information, read Global, Regional, and Zonal Resources. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/compute/v1/projects/{project}/regions/{region}/addresses/{resource}/setLabels', + 'body': 'region_set_labels_request_resource', + }, + ] + request, metadata = self._interceptor.pre_set_labels(request, metadata) + pb_request = compute.SetLabelsAddressRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + including_default_value_fields=False, + use_integers_for_enums=False + ) + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.Operation() + pb_resp = compute.Operation.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_set_labels(resp) + return resp + + @property + def aggregated_list(self) -> Callable[ + [compute.AggregatedListAddressesRequest], + compute.AddressAggregatedList]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._AggregatedList(self._session, self._host, self._interceptor) # type: ignore + + @property + def delete(self) -> Callable[ + [compute.DeleteAddressRequest], + compute.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._Delete(self._session, self._host, self._interceptor) # type: ignore + + @property + def get(self) -> Callable[ + [compute.GetAddressRequest], + compute.Address]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._Get(self._session, self._host, self._interceptor) # type: ignore + + @property + def insert(self) -> Callable[ + [compute.InsertAddressRequest], + compute.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._Insert(self._session, self._host, self._interceptor) # type: ignore + + @property + def list(self) -> Callable[ + [compute.ListAddressesRequest], + compute.AddressList]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._List(self._session, self._host, self._interceptor) # type: ignore + + @property + def move(self) -> Callable[ + [compute.MoveAddressRequest], + compute.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._Move(self._session, self._host, self._interceptor) # type: ignore + + @property + def set_labels(self) -> Callable[ + [compute.SetLabelsAddressRequest], + compute.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._SetLabels(self._session, self._host, self._interceptor) # type: ignore + + @property + def kind(self) -> str: + return "rest" + + def close(self): + self._session.close() + + +__all__=( + 'AddressesRestTransport', +) diff --git a/owl-bot-staging/v1/google/cloud/compute_v1/services/autoscalers/__init__.py b/owl-bot-staging/v1/google/cloud/compute_v1/services/autoscalers/__init__.py new file mode 100644 index 000000000..e72576b4b --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/compute_v1/services/autoscalers/__init__.py @@ -0,0 +1,20 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from .client import AutoscalersClient + +__all__ = ( + 'AutoscalersClient', +) diff --git a/owl-bot-staging/v1/google/cloud/compute_v1/services/autoscalers/client.py b/owl-bot-staging/v1/google/cloud/compute_v1/services/autoscalers/client.py new file mode 100644 index 000000000..41be8f1b2 --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/compute_v1/services/autoscalers/client.py @@ -0,0 +1,1815 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import functools +import os +import re +from typing import Dict, Mapping, MutableMapping, MutableSequence, Optional, Sequence, Tuple, Type, Union, cast + +from google.cloud.compute_v1 import gapic_version as package_version + +from google.api_core import client_options as client_options_lib +from google.api_core import exceptions as core_exceptions +from google.api_core import extended_operation +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport import mtls # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth.exceptions import MutualTLSChannelError # type: ignore +from google.oauth2 import service_account # type: ignore + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + +from google.api_core import extended_operation # type: ignore +from google.cloud.compute_v1.services.autoscalers import pagers +from google.cloud.compute_v1.types import compute +from .transports.base import AutoscalersTransport, DEFAULT_CLIENT_INFO +from .transports.rest import AutoscalersRestTransport + + +class AutoscalersClientMeta(type): + """Metaclass for the Autoscalers client. + + This provides class-level methods for building and retrieving + support objects (e.g. transport) without polluting the client instance + objects. + """ + _transport_registry = OrderedDict() # type: Dict[str, Type[AutoscalersTransport]] + _transport_registry["rest"] = AutoscalersRestTransport + + def get_transport_class(cls, + label: Optional[str] = None, + ) -> Type[AutoscalersTransport]: + """Returns an appropriate transport class. + + Args: + label: The name of the desired transport. If none is + provided, then the first transport in the registry is used. + + Returns: + The transport class to use. + """ + # If a specific transport is requested, return that one. + if label: + return cls._transport_registry[label] + + # No transport is requested; return the default (that is, the first one + # in the dictionary). + return next(iter(cls._transport_registry.values())) + + +class AutoscalersClient(metaclass=AutoscalersClientMeta): + """The Autoscalers API.""" + + @staticmethod + def _get_default_mtls_endpoint(api_endpoint): + """Converts api endpoint to mTLS endpoint. + + Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to + "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. + Args: + api_endpoint (Optional[str]): the api endpoint to convert. + Returns: + str: converted mTLS api endpoint. + """ + if not api_endpoint: + return api_endpoint + + mtls_endpoint_re = re.compile( + r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?" + ) + + m = mtls_endpoint_re.match(api_endpoint) + name, mtls, sandbox, googledomain = m.groups() + if mtls or not googledomain: + return api_endpoint + + if sandbox: + return api_endpoint.replace( + "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" + ) + + return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") + + DEFAULT_ENDPOINT = "compute.googleapis.com" + DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore + DEFAULT_ENDPOINT + ) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + AutoscalersClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_info(info) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + AutoscalersClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_file( + filename) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + from_service_account_json = from_service_account_file + + @property + def transport(self) -> AutoscalersTransport: + """Returns the transport used by the client instance. + + Returns: + AutoscalersTransport: The transport used by the client + instance. + """ + return self._transport + + @staticmethod + def common_billing_account_path(billing_account: str, ) -> str: + """Returns a fully-qualified billing_account string.""" + return "billingAccounts/{billing_account}".format(billing_account=billing_account, ) + + @staticmethod + def parse_common_billing_account_path(path: str) -> Dict[str,str]: + """Parse a billing_account path into its component segments.""" + m = re.match(r"^billingAccounts/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_folder_path(folder: str, ) -> str: + """Returns a fully-qualified folder string.""" + return "folders/{folder}".format(folder=folder, ) + + @staticmethod + def parse_common_folder_path(path: str) -> Dict[str,str]: + """Parse a folder path into its component segments.""" + m = re.match(r"^folders/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_organization_path(organization: str, ) -> str: + """Returns a fully-qualified organization string.""" + return "organizations/{organization}".format(organization=organization, ) + + @staticmethod + def parse_common_organization_path(path: str) -> Dict[str,str]: + """Parse a organization path into its component segments.""" + m = re.match(r"^organizations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_project_path(project: str, ) -> str: + """Returns a fully-qualified project string.""" + return "projects/{project}".format(project=project, ) + + @staticmethod + def parse_common_project_path(path: str) -> Dict[str,str]: + """Parse a project path into its component segments.""" + m = re.match(r"^projects/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_location_path(project: str, location: str, ) -> str: + """Returns a fully-qualified location string.""" + return "projects/{project}/locations/{location}".format(project=project, location=location, ) + + @staticmethod + def parse_common_location_path(path: str) -> Dict[str,str]: + """Parse a location path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @classmethod + def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[client_options_lib.ClientOptions] = None): + """Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + if client_options is None: + client_options = client_options_lib.ClientOptions() + use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") + use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") + if use_client_cert not in ("true", "false"): + raise ValueError("Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`") + if use_mtls_endpoint not in ("auto", "never", "always"): + raise MutualTLSChannelError("Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`") + + # Figure out the client cert source to use. + client_cert_source = None + if use_client_cert == "true": + if client_options.client_cert_source: + client_cert_source = client_options.client_cert_source + elif mtls.has_default_client_cert_source(): + client_cert_source = mtls.default_client_cert_source() + + # Figure out which api endpoint to use. + if client_options.api_endpoint is not None: + api_endpoint = client_options.api_endpoint + elif use_mtls_endpoint == "always" or (use_mtls_endpoint == "auto" and client_cert_source): + api_endpoint = cls.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = cls.DEFAULT_ENDPOINT + + return api_endpoint, client_cert_source + + def __init__(self, *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Optional[Union[str, AutoscalersTransport]] = None, + client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the autoscalers client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Union[str, AutoscalersTransport]): The + transport to use. If set to None, a transport is chosen + automatically. + NOTE: "rest" transport functionality is currently in a + beta state (preview). We welcome your feedback via an + issue in this library's source repository. + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): Custom options for the + client. It won't take effect if a ``transport`` instance is provided. + (1) The ``api_endpoint`` property can be used to override the + default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT + environment variable can also be used to override the endpoint: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto switch to the + default mTLS endpoint if client certificate is present, this is + the default value). However, the ``api_endpoint`` property takes + precedence if provided. + (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide client certificate for mutual TLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + """ + if isinstance(client_options, dict): + client_options = client_options_lib.from_dict(client_options) + if client_options is None: + client_options = client_options_lib.ClientOptions() + client_options = cast(client_options_lib.ClientOptions, client_options) + + api_endpoint, client_cert_source_func = self.get_mtls_endpoint_and_cert_source(client_options) + + api_key_value = getattr(client_options, "api_key", None) + if api_key_value and credentials: + raise ValueError("client_options.api_key and credentials are mutually exclusive") + + # Save or instantiate the transport. + # Ordinarily, we provide the transport, but allowing a custom transport + # instance provides an extensibility point for unusual situations. + if isinstance(transport, AutoscalersTransport): + # transport is a AutoscalersTransport instance. + if credentials or client_options.credentials_file or api_key_value: + raise ValueError("When providing a transport instance, " + "provide its credentials directly.") + if client_options.scopes: + raise ValueError( + "When providing a transport instance, provide its scopes " + "directly." + ) + self._transport = transport + else: + import google.auth._default # type: ignore + + if api_key_value and hasattr(google.auth._default, "get_api_key_credentials"): + credentials = google.auth._default.get_api_key_credentials(api_key_value) + + Transport = type(self).get_transport_class(transport) + self._transport = Transport( + credentials=credentials, + credentials_file=client_options.credentials_file, + host=api_endpoint, + scopes=client_options.scopes, + client_cert_source_for_mtls=client_cert_source_func, + quota_project_id=client_options.quota_project_id, + client_info=client_info, + always_use_jwt_access=True, + api_audience=client_options.api_audience, + ) + + def aggregated_list(self, + request: Optional[Union[compute.AggregatedListAutoscalersRequest, dict]] = None, + *, + project: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.AggregatedListPager: + r"""Retrieves an aggregated list of autoscalers. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_aggregated_list(): + # Create a client + client = compute_v1.AutoscalersClient() + + # Initialize request argument(s) + request = compute_v1.AggregatedListAutoscalersRequest( + project="project_value", + ) + + # Make the request + page_result = client.aggregated_list(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.AggregatedListAutoscalersRequest, dict]): + The request object. A request message for + Autoscalers.AggregatedList. See the + method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.compute_v1.services.autoscalers.pagers.AggregatedListPager: + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.AggregatedListAutoscalersRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.AggregatedListAutoscalersRequest): + request = compute.AggregatedListAutoscalersRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.aggregated_list] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.AggregatedListPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + def delete_unary(self, + request: Optional[Union[compute.DeleteAutoscalerRequest, dict]] = None, + *, + project: Optional[str] = None, + zone: Optional[str] = None, + autoscaler: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.Operation: + r"""Deletes the specified autoscaler. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_delete(): + # Create a client + client = compute_v1.AutoscalersClient() + + # Initialize request argument(s) + request = compute_v1.DeleteAutoscalerRequest( + autoscaler="autoscaler_value", + project="project_value", + zone="zone_value", + ) + + # Make the request + response = client.delete(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.DeleteAutoscalerRequest, dict]): + The request object. A request message for + Autoscalers.Delete. See the method + description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + zone (str): + Name of the zone for this request. + This corresponds to the ``zone`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + autoscaler (str): + Name of the autoscaler to delete. + This corresponds to the ``autoscaler`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, zone, autoscaler]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.DeleteAutoscalerRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.DeleteAutoscalerRequest): + request = compute.DeleteAutoscalerRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if zone is not None: + request.zone = zone + if autoscaler is not None: + request.autoscaler = autoscaler + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("zone", request.zone), + ("autoscaler", request.autoscaler), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def delete(self, + request: Optional[Union[compute.DeleteAutoscalerRequest, dict]] = None, + *, + project: Optional[str] = None, + zone: Optional[str] = None, + autoscaler: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> extended_operation.ExtendedOperation: + r"""Deletes the specified autoscaler. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_delete(): + # Create a client + client = compute_v1.AutoscalersClient() + + # Initialize request argument(s) + request = compute_v1.DeleteAutoscalerRequest( + autoscaler="autoscaler_value", + project="project_value", + zone="zone_value", + ) + + # Make the request + response = client.delete(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.DeleteAutoscalerRequest, dict]): + The request object. A request message for + Autoscalers.Delete. See the method + description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + zone (str): + Name of the zone for this request. + This corresponds to the ``zone`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + autoscaler (str): + Name of the autoscaler to delete. + This corresponds to the ``autoscaler`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, zone, autoscaler]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.DeleteAutoscalerRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.DeleteAutoscalerRequest): + request = compute.DeleteAutoscalerRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if zone is not None: + request.zone = zone + if autoscaler is not None: + request.autoscaler = autoscaler + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("zone", request.zone), + ("autoscaler", request.autoscaler), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + operation_service = self._transport._zone_operations_client + operation_request = compute.GetZoneOperationRequest() + operation_request.project = request.project + operation_request.zone = request.zone + operation_request.operation = response.name + + get_operation = functools.partial(operation_service.get, operation_request) + # Cancel is not part of extended operations yet. + cancel_operation = lambda: None + + # Note: this class is an implementation detail to provide a uniform + # set of names for certain fields in the extended operation proto message. + # See google.api_core.extended_operation.ExtendedOperation for details + # on these properties and the expected interface. + class _CustomOperation(extended_operation.ExtendedOperation): + @property + def error_message(self): + return self._extended_operation.http_error_message + + @property + def error_code(self): + return self._extended_operation.http_error_status_code + + response = _CustomOperation.make(get_operation, cancel_operation, response) + + # Done; return the response. + return response + + def get(self, + request: Optional[Union[compute.GetAutoscalerRequest, dict]] = None, + *, + project: Optional[str] = None, + zone: Optional[str] = None, + autoscaler: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.Autoscaler: + r"""Returns the specified autoscaler resource. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_get(): + # Create a client + client = compute_v1.AutoscalersClient() + + # Initialize request argument(s) + request = compute_v1.GetAutoscalerRequest( + autoscaler="autoscaler_value", + project="project_value", + zone="zone_value", + ) + + # Make the request + response = client.get(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.GetAutoscalerRequest, dict]): + The request object. A request message for + Autoscalers.Get. See the method + description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + zone (str): + Name of the zone for this request. + This corresponds to the ``zone`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + autoscaler (str): + Name of the autoscaler to return. + This corresponds to the ``autoscaler`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.compute_v1.types.Autoscaler: + Represents an Autoscaler resource. Google Compute Engine + has two Autoscaler resources: \* + [Zonal](/compute/docs/reference/rest/v1/autoscalers) \* + [Regional](/compute/docs/reference/rest/v1/regionAutoscalers) + Use autoscalers to automatically add or delete instances + from a managed instance group according to your defined + autoscaling policy. For more information, read + Autoscaling Groups of Instances. For zonal managed + instance groups resource, use the autoscaler resource. + For regional managed instance groups, use the + regionAutoscalers resource. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, zone, autoscaler]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.GetAutoscalerRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.GetAutoscalerRequest): + request = compute.GetAutoscalerRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if zone is not None: + request.zone = zone + if autoscaler is not None: + request.autoscaler = autoscaler + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("zone", request.zone), + ("autoscaler", request.autoscaler), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def insert_unary(self, + request: Optional[Union[compute.InsertAutoscalerRequest, dict]] = None, + *, + project: Optional[str] = None, + zone: Optional[str] = None, + autoscaler_resource: Optional[compute.Autoscaler] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.Operation: + r"""Creates an autoscaler in the specified project using + the data included in the request. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_insert(): + # Create a client + client = compute_v1.AutoscalersClient() + + # Initialize request argument(s) + request = compute_v1.InsertAutoscalerRequest( + project="project_value", + zone="zone_value", + ) + + # Make the request + response = client.insert(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.InsertAutoscalerRequest, dict]): + The request object. A request message for + Autoscalers.Insert. See the method + description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + zone (str): + Name of the zone for this request. + This corresponds to the ``zone`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + autoscaler_resource (google.cloud.compute_v1.types.Autoscaler): + The body resource for this request + This corresponds to the ``autoscaler_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, zone, autoscaler_resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.InsertAutoscalerRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.InsertAutoscalerRequest): + request = compute.InsertAutoscalerRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if zone is not None: + request.zone = zone + if autoscaler_resource is not None: + request.autoscaler_resource = autoscaler_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.insert] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("zone", request.zone), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def insert(self, + request: Optional[Union[compute.InsertAutoscalerRequest, dict]] = None, + *, + project: Optional[str] = None, + zone: Optional[str] = None, + autoscaler_resource: Optional[compute.Autoscaler] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> extended_operation.ExtendedOperation: + r"""Creates an autoscaler in the specified project using + the data included in the request. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_insert(): + # Create a client + client = compute_v1.AutoscalersClient() + + # Initialize request argument(s) + request = compute_v1.InsertAutoscalerRequest( + project="project_value", + zone="zone_value", + ) + + # Make the request + response = client.insert(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.InsertAutoscalerRequest, dict]): + The request object. A request message for + Autoscalers.Insert. See the method + description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + zone (str): + Name of the zone for this request. + This corresponds to the ``zone`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + autoscaler_resource (google.cloud.compute_v1.types.Autoscaler): + The body resource for this request + This corresponds to the ``autoscaler_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, zone, autoscaler_resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.InsertAutoscalerRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.InsertAutoscalerRequest): + request = compute.InsertAutoscalerRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if zone is not None: + request.zone = zone + if autoscaler_resource is not None: + request.autoscaler_resource = autoscaler_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.insert] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("zone", request.zone), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + operation_service = self._transport._zone_operations_client + operation_request = compute.GetZoneOperationRequest() + operation_request.project = request.project + operation_request.zone = request.zone + operation_request.operation = response.name + + get_operation = functools.partial(operation_service.get, operation_request) + # Cancel is not part of extended operations yet. + cancel_operation = lambda: None + + # Note: this class is an implementation detail to provide a uniform + # set of names for certain fields in the extended operation proto message. + # See google.api_core.extended_operation.ExtendedOperation for details + # on these properties and the expected interface. + class _CustomOperation(extended_operation.ExtendedOperation): + @property + def error_message(self): + return self._extended_operation.http_error_message + + @property + def error_code(self): + return self._extended_operation.http_error_status_code + + response = _CustomOperation.make(get_operation, cancel_operation, response) + + # Done; return the response. + return response + + def list(self, + request: Optional[Union[compute.ListAutoscalersRequest, dict]] = None, + *, + project: Optional[str] = None, + zone: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListPager: + r"""Retrieves a list of autoscalers contained within the + specified zone. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_list(): + # Create a client + client = compute_v1.AutoscalersClient() + + # Initialize request argument(s) + request = compute_v1.ListAutoscalersRequest( + project="project_value", + zone="zone_value", + ) + + # Make the request + page_result = client.list(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.ListAutoscalersRequest, dict]): + The request object. A request message for + Autoscalers.List. See the method + description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + zone (str): + Name of the zone for this request. + This corresponds to the ``zone`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.compute_v1.services.autoscalers.pagers.ListPager: + Contains a list of Autoscaler + resources. + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, zone]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.ListAutoscalersRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.ListAutoscalersRequest): + request = compute.ListAutoscalersRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if zone is not None: + request.zone = zone + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("zone", request.zone), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + def patch_unary(self, + request: Optional[Union[compute.PatchAutoscalerRequest, dict]] = None, + *, + project: Optional[str] = None, + zone: Optional[str] = None, + autoscaler_resource: Optional[compute.Autoscaler] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.Operation: + r"""Updates an autoscaler in the specified project using + the data included in the request. This method supports + PATCH semantics and uses the JSON merge patch format and + processing rules. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_patch(): + # Create a client + client = compute_v1.AutoscalersClient() + + # Initialize request argument(s) + request = compute_v1.PatchAutoscalerRequest( + project="project_value", + zone="zone_value", + ) + + # Make the request + response = client.patch(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.PatchAutoscalerRequest, dict]): + The request object. A request message for + Autoscalers.Patch. See the method + description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + zone (str): + Name of the zone for this request. + This corresponds to the ``zone`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + autoscaler_resource (google.cloud.compute_v1.types.Autoscaler): + The body resource for this request + This corresponds to the ``autoscaler_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, zone, autoscaler_resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.PatchAutoscalerRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.PatchAutoscalerRequest): + request = compute.PatchAutoscalerRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if zone is not None: + request.zone = zone + if autoscaler_resource is not None: + request.autoscaler_resource = autoscaler_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.patch] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("zone", request.zone), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def patch(self, + request: Optional[Union[compute.PatchAutoscalerRequest, dict]] = None, + *, + project: Optional[str] = None, + zone: Optional[str] = None, + autoscaler_resource: Optional[compute.Autoscaler] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> extended_operation.ExtendedOperation: + r"""Updates an autoscaler in the specified project using + the data included in the request. This method supports + PATCH semantics and uses the JSON merge patch format and + processing rules. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_patch(): + # Create a client + client = compute_v1.AutoscalersClient() + + # Initialize request argument(s) + request = compute_v1.PatchAutoscalerRequest( + project="project_value", + zone="zone_value", + ) + + # Make the request + response = client.patch(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.PatchAutoscalerRequest, dict]): + The request object. A request message for + Autoscalers.Patch. See the method + description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + zone (str): + Name of the zone for this request. + This corresponds to the ``zone`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + autoscaler_resource (google.cloud.compute_v1.types.Autoscaler): + The body resource for this request + This corresponds to the ``autoscaler_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, zone, autoscaler_resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.PatchAutoscalerRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.PatchAutoscalerRequest): + request = compute.PatchAutoscalerRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if zone is not None: + request.zone = zone + if autoscaler_resource is not None: + request.autoscaler_resource = autoscaler_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.patch] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("zone", request.zone), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + operation_service = self._transport._zone_operations_client + operation_request = compute.GetZoneOperationRequest() + operation_request.project = request.project + operation_request.zone = request.zone + operation_request.operation = response.name + + get_operation = functools.partial(operation_service.get, operation_request) + # Cancel is not part of extended operations yet. + cancel_operation = lambda: None + + # Note: this class is an implementation detail to provide a uniform + # set of names for certain fields in the extended operation proto message. + # See google.api_core.extended_operation.ExtendedOperation for details + # on these properties and the expected interface. + class _CustomOperation(extended_operation.ExtendedOperation): + @property + def error_message(self): + return self._extended_operation.http_error_message + + @property + def error_code(self): + return self._extended_operation.http_error_status_code + + response = _CustomOperation.make(get_operation, cancel_operation, response) + + # Done; return the response. + return response + + def update_unary(self, + request: Optional[Union[compute.UpdateAutoscalerRequest, dict]] = None, + *, + project: Optional[str] = None, + zone: Optional[str] = None, + autoscaler_resource: Optional[compute.Autoscaler] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.Operation: + r"""Updates an autoscaler in the specified project using + the data included in the request. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_update(): + # Create a client + client = compute_v1.AutoscalersClient() + + # Initialize request argument(s) + request = compute_v1.UpdateAutoscalerRequest( + project="project_value", + zone="zone_value", + ) + + # Make the request + response = client.update(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.UpdateAutoscalerRequest, dict]): + The request object. A request message for + Autoscalers.Update. See the method + description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + zone (str): + Name of the zone for this request. + This corresponds to the ``zone`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + autoscaler_resource (google.cloud.compute_v1.types.Autoscaler): + The body resource for this request + This corresponds to the ``autoscaler_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, zone, autoscaler_resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.UpdateAutoscalerRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.UpdateAutoscalerRequest): + request = compute.UpdateAutoscalerRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if zone is not None: + request.zone = zone + if autoscaler_resource is not None: + request.autoscaler_resource = autoscaler_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.update] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("zone", request.zone), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def update(self, + request: Optional[Union[compute.UpdateAutoscalerRequest, dict]] = None, + *, + project: Optional[str] = None, + zone: Optional[str] = None, + autoscaler_resource: Optional[compute.Autoscaler] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> extended_operation.ExtendedOperation: + r"""Updates an autoscaler in the specified project using + the data included in the request. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_update(): + # Create a client + client = compute_v1.AutoscalersClient() + + # Initialize request argument(s) + request = compute_v1.UpdateAutoscalerRequest( + project="project_value", + zone="zone_value", + ) + + # Make the request + response = client.update(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.UpdateAutoscalerRequest, dict]): + The request object. A request message for + Autoscalers.Update. See the method + description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + zone (str): + Name of the zone for this request. + This corresponds to the ``zone`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + autoscaler_resource (google.cloud.compute_v1.types.Autoscaler): + The body resource for this request + This corresponds to the ``autoscaler_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, zone, autoscaler_resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.UpdateAutoscalerRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.UpdateAutoscalerRequest): + request = compute.UpdateAutoscalerRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if zone is not None: + request.zone = zone + if autoscaler_resource is not None: + request.autoscaler_resource = autoscaler_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.update] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("zone", request.zone), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + operation_service = self._transport._zone_operations_client + operation_request = compute.GetZoneOperationRequest() + operation_request.project = request.project + operation_request.zone = request.zone + operation_request.operation = response.name + + get_operation = functools.partial(operation_service.get, operation_request) + # Cancel is not part of extended operations yet. + cancel_operation = lambda: None + + # Note: this class is an implementation detail to provide a uniform + # set of names for certain fields in the extended operation proto message. + # See google.api_core.extended_operation.ExtendedOperation for details + # on these properties and the expected interface. + class _CustomOperation(extended_operation.ExtendedOperation): + @property + def error_message(self): + return self._extended_operation.http_error_message + + @property + def error_code(self): + return self._extended_operation.http_error_status_code + + response = _CustomOperation.make(get_operation, cancel_operation, response) + + # Done; return the response. + return response + + def __enter__(self) -> "AutoscalersClient": + return self + + def __exit__(self, type, value, traceback): + """Releases underlying transport's resources. + + .. warning:: + ONLY use as a context manager if the transport is NOT shared + with other clients! Exiting the with block will CLOSE the transport + and may cause errors in other clients! + """ + self.transport.close() + + + + + + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) + + +__all__ = ( + "AutoscalersClient", +) diff --git a/owl-bot-staging/v1/google/cloud/compute_v1/services/autoscalers/pagers.py b/owl-bot-staging/v1/google/cloud/compute_v1/services/autoscalers/pagers.py new file mode 100644 index 000000000..53f0b9951 --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/compute_v1/services/autoscalers/pagers.py @@ -0,0 +1,139 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import Any, AsyncIterator, Awaitable, Callable, Sequence, Tuple, Optional, Iterator + +from google.cloud.compute_v1.types import compute + + +class AggregatedListPager: + """A pager for iterating through ``aggregated_list`` requests. + + This class thinly wraps an initial + :class:`google.cloud.compute_v1.types.AutoscalerAggregatedList` object, and + provides an ``__iter__`` method to iterate through its + ``items`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``AggregatedList`` requests and continue to iterate + through the ``items`` field on the + corresponding responses. + + All the usual :class:`google.cloud.compute_v1.types.AutoscalerAggregatedList` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., compute.AutoscalerAggregatedList], + request: compute.AggregatedListAutoscalersRequest, + response: compute.AutoscalerAggregatedList, + *, + metadata: Sequence[Tuple[str, str]] = ()): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.compute_v1.types.AggregatedListAutoscalersRequest): + The initial request object. + response (google.cloud.compute_v1.types.AutoscalerAggregatedList): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = compute.AggregatedListAutoscalersRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[compute.AutoscalerAggregatedList]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[Tuple[str, compute.AutoscalersScopedList]]: + for page in self.pages: + yield from page.items.items() + + def get(self, key: str) -> Optional[compute.AutoscalersScopedList]: + return self._response.items.get(key) + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + + +class ListPager: + """A pager for iterating through ``list`` requests. + + This class thinly wraps an initial + :class:`google.cloud.compute_v1.types.AutoscalerList` object, and + provides an ``__iter__`` method to iterate through its + ``items`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``List`` requests and continue to iterate + through the ``items`` field on the + corresponding responses. + + All the usual :class:`google.cloud.compute_v1.types.AutoscalerList` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., compute.AutoscalerList], + request: compute.ListAutoscalersRequest, + response: compute.AutoscalerList, + *, + metadata: Sequence[Tuple[str, str]] = ()): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.compute_v1.types.ListAutoscalersRequest): + The initial request object. + response (google.cloud.compute_v1.types.AutoscalerList): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = compute.ListAutoscalersRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[compute.AutoscalerList]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[compute.Autoscaler]: + for page in self.pages: + yield from page.items + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) diff --git a/owl-bot-staging/v1/google/cloud/compute_v1/services/autoscalers/transports/__init__.py b/owl-bot-staging/v1/google/cloud/compute_v1/services/autoscalers/transports/__init__.py new file mode 100644 index 000000000..5b076e813 --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/compute_v1/services/autoscalers/transports/__init__.py @@ -0,0 +1,32 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +from typing import Dict, Type + +from .base import AutoscalersTransport +from .rest import AutoscalersRestTransport +from .rest import AutoscalersRestInterceptor + + +# Compile a registry of transports. +_transport_registry = OrderedDict() # type: Dict[str, Type[AutoscalersTransport]] +_transport_registry['rest'] = AutoscalersRestTransport + +__all__ = ( + 'AutoscalersTransport', + 'AutoscalersRestTransport', + 'AutoscalersRestInterceptor', +) diff --git a/owl-bot-staging/v1/google/cloud/compute_v1/services/autoscalers/transports/base.py b/owl-bot-staging/v1/google/cloud/compute_v1/services/autoscalers/transports/base.py new file mode 100644 index 000000000..19fdc2b68 --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/compute_v1/services/autoscalers/transports/base.py @@ -0,0 +1,247 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import abc +from typing import Awaitable, Callable, Dict, Optional, Sequence, Union + +from google.cloud.compute_v1 import gapic_version as package_version + +import google.auth # type: ignore +import google.api_core +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.compute_v1.types import compute +from google.cloud.compute_v1.services import zone_operations + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) + + +class AutoscalersTransport(abc.ABC): + """Abstract transport class for Autoscalers.""" + + AUTH_SCOPES = ( + 'https://www.googleapis.com/auth/compute', + 'https://www.googleapis.com/auth/cloud-platform', + ) + + DEFAULT_HOST: str = 'compute.googleapis.com' + def __init__( + self, *, + host: str = DEFAULT_HOST, + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + **kwargs, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A list of scopes. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + """ + self._extended_operations_services: Dict[str, Any] = {} + + scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} + + # Save the scopes. + self._scopes = scopes + + # If no credentials are provided, then determine the appropriate + # defaults. + if credentials and credentials_file: + raise core_exceptions.DuplicateCredentialArgs("'credentials_file' and 'credentials' are mutually exclusive") + + if credentials_file is not None: + credentials, _ = google.auth.load_credentials_from_file( + credentials_file, + **scopes_kwargs, + quota_project_id=quota_project_id + ) + elif credentials is None: + credentials, _ = google.auth.default(**scopes_kwargs, quota_project_id=quota_project_id) + # Don't apply audience if the credentials file passed from user. + if hasattr(credentials, "with_gdch_audience"): + credentials = credentials.with_gdch_audience(api_audience if api_audience else host) + + # If the credentials are service account credentials, then always try to use self signed JWT. + if always_use_jwt_access and isinstance(credentials, service_account.Credentials) and hasattr(service_account.Credentials, "with_always_use_jwt_access"): + credentials = credentials.with_always_use_jwt_access(True) + + # Save the credentials. + self._credentials = credentials + + # Save the hostname. Default to port 443 (HTTPS) if none is specified. + if ':' not in host: + host += ':443' + self._host = host + + def _prep_wrapped_messages(self, client_info): + # Precompute the wrapped methods. + self._wrapped_methods = { + self.aggregated_list: gapic_v1.method.wrap_method( + self.aggregated_list, + default_timeout=None, + client_info=client_info, + ), + self.delete: gapic_v1.method.wrap_method( + self.delete, + default_timeout=None, + client_info=client_info, + ), + self.get: gapic_v1.method.wrap_method( + self.get, + default_timeout=None, + client_info=client_info, + ), + self.insert: gapic_v1.method.wrap_method( + self.insert, + default_timeout=None, + client_info=client_info, + ), + self.list: gapic_v1.method.wrap_method( + self.list, + default_timeout=None, + client_info=client_info, + ), + self.patch: gapic_v1.method.wrap_method( + self.patch, + default_timeout=None, + client_info=client_info, + ), + self.update: gapic_v1.method.wrap_method( + self.update, + default_timeout=None, + client_info=client_info, + ), + } + + def close(self): + """Closes resources associated with the transport. + + .. warning:: + Only call this method if the transport is NOT shared + with other clients - this may cause errors in other clients! + """ + raise NotImplementedError() + + @property + def aggregated_list(self) -> Callable[ + [compute.AggregatedListAutoscalersRequest], + Union[ + compute.AutoscalerAggregatedList, + Awaitable[compute.AutoscalerAggregatedList] + ]]: + raise NotImplementedError() + + @property + def delete(self) -> Callable[ + [compute.DeleteAutoscalerRequest], + Union[ + compute.Operation, + Awaitable[compute.Operation] + ]]: + raise NotImplementedError() + + @property + def get(self) -> Callable[ + [compute.GetAutoscalerRequest], + Union[ + compute.Autoscaler, + Awaitable[compute.Autoscaler] + ]]: + raise NotImplementedError() + + @property + def insert(self) -> Callable[ + [compute.InsertAutoscalerRequest], + Union[ + compute.Operation, + Awaitable[compute.Operation] + ]]: + raise NotImplementedError() + + @property + def list(self) -> Callable[ + [compute.ListAutoscalersRequest], + Union[ + compute.AutoscalerList, + Awaitable[compute.AutoscalerList] + ]]: + raise NotImplementedError() + + @property + def patch(self) -> Callable[ + [compute.PatchAutoscalerRequest], + Union[ + compute.Operation, + Awaitable[compute.Operation] + ]]: + raise NotImplementedError() + + @property + def update(self) -> Callable[ + [compute.UpdateAutoscalerRequest], + Union[ + compute.Operation, + Awaitable[compute.Operation] + ]]: + raise NotImplementedError() + + @property + def kind(self) -> str: + raise NotImplementedError() + + @property + def _zone_operations_client(self) -> zone_operations.ZoneOperationsClient: + ex_op_service = self._extended_operations_services.get("zone_operations") + if not ex_op_service: + ex_op_service = zone_operations.ZoneOperationsClient( + credentials=self._credentials, + transport=self.kind, + ) + self._extended_operations_services["zone_operations"] = ex_op_service + + return ex_op_service + + +__all__ = ( + 'AutoscalersTransport', +) diff --git a/owl-bot-staging/v1/google/cloud/compute_v1/services/autoscalers/transports/rest.py b/owl-bot-staging/v1/google/cloud/compute_v1/services/autoscalers/transports/rest.py new file mode 100644 index 000000000..2e5c28748 --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/compute_v1/services/autoscalers/transports/rest.py @@ -0,0 +1,1056 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +from google.auth.transport.requests import AuthorizedSession # type: ignore +import json # type: ignore +import grpc # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.api_core import exceptions as core_exceptions +from google.api_core import retry as retries +from google.api_core import rest_helpers +from google.api_core import rest_streaming +from google.api_core import path_template +from google.api_core import gapic_v1 + +from google.protobuf import json_format +from requests import __version__ as requests_version +import dataclasses +import re +from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union +import warnings + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + + +from google.cloud.compute_v1.types import compute + +from .base import AutoscalersTransport, DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, + grpc_version=None, + rest_version=requests_version, +) + + +class AutoscalersRestInterceptor: + """Interceptor for Autoscalers. + + Interceptors are used to manipulate requests, request metadata, and responses + in arbitrary ways. + Example use cases include: + * Logging + * Verifying requests according to service or custom semantics + * Stripping extraneous information from responses + + These use cases and more can be enabled by injecting an + instance of a custom subclass when constructing the AutoscalersRestTransport. + + .. code-block:: python + class MyCustomAutoscalersInterceptor(AutoscalersRestInterceptor): + def pre_aggregated_list(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_aggregated_list(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_delete(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_delete(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_get(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_insert(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_insert(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_patch(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_patch(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_update(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_update(self, response): + logging.log(f"Received response: {response}") + return response + + transport = AutoscalersRestTransport(interceptor=MyCustomAutoscalersInterceptor()) + client = AutoscalersClient(transport=transport) + + + """ + def pre_aggregated_list(self, request: compute.AggregatedListAutoscalersRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.AggregatedListAutoscalersRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for aggregated_list + + Override in a subclass to manipulate the request or metadata + before they are sent to the Autoscalers server. + """ + return request, metadata + + def post_aggregated_list(self, response: compute.AutoscalerAggregatedList) -> compute.AutoscalerAggregatedList: + """Post-rpc interceptor for aggregated_list + + Override in a subclass to manipulate the response + after it is returned by the Autoscalers server but before + it is returned to user code. + """ + return response + def pre_delete(self, request: compute.DeleteAutoscalerRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.DeleteAutoscalerRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for delete + + Override in a subclass to manipulate the request or metadata + before they are sent to the Autoscalers server. + """ + return request, metadata + + def post_delete(self, response: compute.Operation) -> compute.Operation: + """Post-rpc interceptor for delete + + Override in a subclass to manipulate the response + after it is returned by the Autoscalers server but before + it is returned to user code. + """ + return response + def pre_get(self, request: compute.GetAutoscalerRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.GetAutoscalerRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get + + Override in a subclass to manipulate the request or metadata + before they are sent to the Autoscalers server. + """ + return request, metadata + + def post_get(self, response: compute.Autoscaler) -> compute.Autoscaler: + """Post-rpc interceptor for get + + Override in a subclass to manipulate the response + after it is returned by the Autoscalers server but before + it is returned to user code. + """ + return response + def pre_insert(self, request: compute.InsertAutoscalerRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.InsertAutoscalerRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for insert + + Override in a subclass to manipulate the request or metadata + before they are sent to the Autoscalers server. + """ + return request, metadata + + def post_insert(self, response: compute.Operation) -> compute.Operation: + """Post-rpc interceptor for insert + + Override in a subclass to manipulate the response + after it is returned by the Autoscalers server but before + it is returned to user code. + """ + return response + def pre_list(self, request: compute.ListAutoscalersRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.ListAutoscalersRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list + + Override in a subclass to manipulate the request or metadata + before they are sent to the Autoscalers server. + """ + return request, metadata + + def post_list(self, response: compute.AutoscalerList) -> compute.AutoscalerList: + """Post-rpc interceptor for list + + Override in a subclass to manipulate the response + after it is returned by the Autoscalers server but before + it is returned to user code. + """ + return response + def pre_patch(self, request: compute.PatchAutoscalerRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.PatchAutoscalerRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for patch + + Override in a subclass to manipulate the request or metadata + before they are sent to the Autoscalers server. + """ + return request, metadata + + def post_patch(self, response: compute.Operation) -> compute.Operation: + """Post-rpc interceptor for patch + + Override in a subclass to manipulate the response + after it is returned by the Autoscalers server but before + it is returned to user code. + """ + return response + def pre_update(self, request: compute.UpdateAutoscalerRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.UpdateAutoscalerRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for update + + Override in a subclass to manipulate the request or metadata + before they are sent to the Autoscalers server. + """ + return request, metadata + + def post_update(self, response: compute.Operation) -> compute.Operation: + """Post-rpc interceptor for update + + Override in a subclass to manipulate the response + after it is returned by the Autoscalers server but before + it is returned to user code. + """ + return response + + +@dataclasses.dataclass +class AutoscalersRestStub: + _session: AuthorizedSession + _host: str + _interceptor: AutoscalersRestInterceptor + + +class AutoscalersRestTransport(AutoscalersTransport): + """REST backend transport for Autoscalers. + + The Autoscalers API. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends JSON representations of protocol buffers over HTTP/1.1 + + NOTE: This REST transport functionality is currently in a beta + state (preview). We welcome your feedback via an issue in this + library's source repository. Thank you! + """ + + def __init__(self, *, + host: str = 'compute.googleapis.com', + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + client_cert_source_for_mtls: Optional[Callable[[ + ], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + url_scheme: str = 'https', + interceptor: Optional[AutoscalersRestInterceptor] = None, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + NOTE: This REST transport functionality is currently in a beta + state (preview). We welcome your feedback via a GitHub issue in + this library's repository. Thank you! + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + client_cert_source_for_mtls (Callable[[], Tuple[bytes, bytes]]): Client + certificate to configure mutual TLS HTTP channel. It is ignored + if ``channel`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you are developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + url_scheme: the protocol scheme for the API endpoint. Normally + "https", but for testing or local servers, + "http" can be specified. + """ + # Run the base constructor + # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. + # TODO: When custom host (api_endpoint) is set, `scopes` must *also* be set on the + # credentials object + maybe_url_match = re.match("^(?Phttp(?:s)?://)?(?P.*)$", host) + if maybe_url_match is None: + raise ValueError(f"Unexpected hostname structure: {host}") # pragma: NO COVER + + url_match_items = maybe_url_match.groupdict() + + host = f"{url_scheme}://{host}" if not url_match_items["scheme"] else host + + super().__init__( + host=host, + credentials=credentials, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience + ) + self._session = AuthorizedSession( + self._credentials, default_host=self.DEFAULT_HOST) + if client_cert_source_for_mtls: + self._session.configure_mtls_channel(client_cert_source_for_mtls) + self._interceptor = interceptor or AutoscalersRestInterceptor() + self._prep_wrapped_messages(client_info) + + class _AggregatedList(AutoscalersRestStub): + def __hash__(self): + return hash("AggregatedList") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.AggregatedListAutoscalersRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.AutoscalerAggregatedList: + r"""Call the aggregated list method over HTTP. + + Args: + request (~.compute.AggregatedListAutoscalersRequest): + The request object. A request message for + Autoscalers.AggregatedList. See the + method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.AutoscalerAggregatedList: + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/compute/v1/projects/{project}/aggregated/autoscalers', + }, + ] + request, metadata = self._interceptor.pre_aggregated_list(request, metadata) + pb_request = compute.AggregatedListAutoscalersRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.AutoscalerAggregatedList() + pb_resp = compute.AutoscalerAggregatedList.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_aggregated_list(resp) + return resp + + class _Delete(AutoscalersRestStub): + def __hash__(self): + return hash("Delete") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.DeleteAutoscalerRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.Operation: + r"""Call the delete method over HTTP. + + Args: + request (~.compute.DeleteAutoscalerRequest): + The request object. A request message for + Autoscalers.Delete. See the method + description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.Operation: + Represents an Operation resource. Google Compute Engine + has three Operation resources: \* + `Global `__ + \* + `Regional `__ + \* + `Zonal `__ + You can use an operation resource to manage asynchronous + API requests. For more information, read Handling API + responses. Operations can be global, regional or zonal. + - For global operations, use the ``globalOperations`` + resource. - For regional operations, use the + ``regionOperations`` resource. - For zonal operations, + use the ``zonalOperations`` resource. For more + information, read Global, Regional, and Zonal Resources. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'delete', + 'uri': '/compute/v1/projects/{project}/zones/{zone}/autoscalers/{autoscaler}', + }, + ] + request, metadata = self._interceptor.pre_delete(request, metadata) + pb_request = compute.DeleteAutoscalerRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.Operation() + pb_resp = compute.Operation.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_delete(resp) + return resp + + class _Get(AutoscalersRestStub): + def __hash__(self): + return hash("Get") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.GetAutoscalerRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.Autoscaler: + r"""Call the get method over HTTP. + + Args: + request (~.compute.GetAutoscalerRequest): + The request object. A request message for + Autoscalers.Get. See the method + description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.Autoscaler: + Represents an Autoscaler resource. Google Compute Engine + has two Autoscaler resources: \* + `Zonal `__ + \* + `Regional `__ + Use autoscalers to automatically add or delete instances + from a managed instance group according to your defined + autoscaling policy. For more information, read + Autoscaling Groups of Instances. For zonal managed + instance groups resource, use the autoscaler resource. + For regional managed instance groups, use the + regionAutoscalers resource. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/compute/v1/projects/{project}/zones/{zone}/autoscalers/{autoscaler}', + }, + ] + request, metadata = self._interceptor.pre_get(request, metadata) + pb_request = compute.GetAutoscalerRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.Autoscaler() + pb_resp = compute.Autoscaler.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get(resp) + return resp + + class _Insert(AutoscalersRestStub): + def __hash__(self): + return hash("Insert") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.InsertAutoscalerRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.Operation: + r"""Call the insert method over HTTP. + + Args: + request (~.compute.InsertAutoscalerRequest): + The request object. A request message for + Autoscalers.Insert. See the method + description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.Operation: + Represents an Operation resource. Google Compute Engine + has three Operation resources: \* + `Global `__ + \* + `Regional `__ + \* + `Zonal `__ + You can use an operation resource to manage asynchronous + API requests. For more information, read Handling API + responses. Operations can be global, regional or zonal. + - For global operations, use the ``globalOperations`` + resource. - For regional operations, use the + ``regionOperations`` resource. - For zonal operations, + use the ``zonalOperations`` resource. For more + information, read Global, Regional, and Zonal Resources. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/compute/v1/projects/{project}/zones/{zone}/autoscalers', + 'body': 'autoscaler_resource', + }, + ] + request, metadata = self._interceptor.pre_insert(request, metadata) + pb_request = compute.InsertAutoscalerRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + including_default_value_fields=False, + use_integers_for_enums=False + ) + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.Operation() + pb_resp = compute.Operation.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_insert(resp) + return resp + + class _List(AutoscalersRestStub): + def __hash__(self): + return hash("List") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.ListAutoscalersRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.AutoscalerList: + r"""Call the list method over HTTP. + + Args: + request (~.compute.ListAutoscalersRequest): + The request object. A request message for + Autoscalers.List. See the method + description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.AutoscalerList: + Contains a list of Autoscaler + resources. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/compute/v1/projects/{project}/zones/{zone}/autoscalers', + }, + ] + request, metadata = self._interceptor.pre_list(request, metadata) + pb_request = compute.ListAutoscalersRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.AutoscalerList() + pb_resp = compute.AutoscalerList.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_list(resp) + return resp + + class _Patch(AutoscalersRestStub): + def __hash__(self): + return hash("Patch") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.PatchAutoscalerRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.Operation: + r"""Call the patch method over HTTP. + + Args: + request (~.compute.PatchAutoscalerRequest): + The request object. A request message for + Autoscalers.Patch. See the method + description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.Operation: + Represents an Operation resource. Google Compute Engine + has three Operation resources: \* + `Global `__ + \* + `Regional `__ + \* + `Zonal `__ + You can use an operation resource to manage asynchronous + API requests. For more information, read Handling API + responses. Operations can be global, regional or zonal. + - For global operations, use the ``globalOperations`` + resource. - For regional operations, use the + ``regionOperations`` resource. - For zonal operations, + use the ``zonalOperations`` resource. For more + information, read Global, Regional, and Zonal Resources. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'patch', + 'uri': '/compute/v1/projects/{project}/zones/{zone}/autoscalers', + 'body': 'autoscaler_resource', + }, + ] + request, metadata = self._interceptor.pre_patch(request, metadata) + pb_request = compute.PatchAutoscalerRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + including_default_value_fields=False, + use_integers_for_enums=False + ) + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.Operation() + pb_resp = compute.Operation.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_patch(resp) + return resp + + class _Update(AutoscalersRestStub): + def __hash__(self): + return hash("Update") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.UpdateAutoscalerRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.Operation: + r"""Call the update method over HTTP. + + Args: + request (~.compute.UpdateAutoscalerRequest): + The request object. A request message for + Autoscalers.Update. See the method + description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.Operation: + Represents an Operation resource. Google Compute Engine + has three Operation resources: \* + `Global `__ + \* + `Regional `__ + \* + `Zonal `__ + You can use an operation resource to manage asynchronous + API requests. For more information, read Handling API + responses. Operations can be global, regional or zonal. + - For global operations, use the ``globalOperations`` + resource. - For regional operations, use the + ``regionOperations`` resource. - For zonal operations, + use the ``zonalOperations`` resource. For more + information, read Global, Regional, and Zonal Resources. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'put', + 'uri': '/compute/v1/projects/{project}/zones/{zone}/autoscalers', + 'body': 'autoscaler_resource', + }, + ] + request, metadata = self._interceptor.pre_update(request, metadata) + pb_request = compute.UpdateAutoscalerRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + including_default_value_fields=False, + use_integers_for_enums=False + ) + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.Operation() + pb_resp = compute.Operation.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_update(resp) + return resp + + @property + def aggregated_list(self) -> Callable[ + [compute.AggregatedListAutoscalersRequest], + compute.AutoscalerAggregatedList]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._AggregatedList(self._session, self._host, self._interceptor) # type: ignore + + @property + def delete(self) -> Callable[ + [compute.DeleteAutoscalerRequest], + compute.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._Delete(self._session, self._host, self._interceptor) # type: ignore + + @property + def get(self) -> Callable[ + [compute.GetAutoscalerRequest], + compute.Autoscaler]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._Get(self._session, self._host, self._interceptor) # type: ignore + + @property + def insert(self) -> Callable[ + [compute.InsertAutoscalerRequest], + compute.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._Insert(self._session, self._host, self._interceptor) # type: ignore + + @property + def list(self) -> Callable[ + [compute.ListAutoscalersRequest], + compute.AutoscalerList]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._List(self._session, self._host, self._interceptor) # type: ignore + + @property + def patch(self) -> Callable[ + [compute.PatchAutoscalerRequest], + compute.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._Patch(self._session, self._host, self._interceptor) # type: ignore + + @property + def update(self) -> Callable[ + [compute.UpdateAutoscalerRequest], + compute.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._Update(self._session, self._host, self._interceptor) # type: ignore + + @property + def kind(self) -> str: + return "rest" + + def close(self): + self._session.close() + + +__all__=( + 'AutoscalersRestTransport', +) diff --git a/owl-bot-staging/v1/google/cloud/compute_v1/services/backend_buckets/__init__.py b/owl-bot-staging/v1/google/cloud/compute_v1/services/backend_buckets/__init__.py new file mode 100644 index 000000000..fe6fa6f4b --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/compute_v1/services/backend_buckets/__init__.py @@ -0,0 +1,20 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from .client import BackendBucketsClient + +__all__ = ( + 'BackendBucketsClient', +) diff --git a/owl-bot-staging/v1/google/cloud/compute_v1/services/backend_buckets/client.py b/owl-bot-staging/v1/google/cloud/compute_v1/services/backend_buckets/client.py new file mode 100644 index 000000000..da6054cc2 --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/compute_v1/services/backend_buckets/client.py @@ -0,0 +1,2462 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import functools +import os +import re +from typing import Dict, Mapping, MutableMapping, MutableSequence, Optional, Sequence, Tuple, Type, Union, cast + +from google.cloud.compute_v1 import gapic_version as package_version + +from google.api_core import client_options as client_options_lib +from google.api_core import exceptions as core_exceptions +from google.api_core import extended_operation +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport import mtls # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth.exceptions import MutualTLSChannelError # type: ignore +from google.oauth2 import service_account # type: ignore + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + +from google.api_core import extended_operation # type: ignore +from google.cloud.compute_v1.services.backend_buckets import pagers +from google.cloud.compute_v1.types import compute +from .transports.base import BackendBucketsTransport, DEFAULT_CLIENT_INFO +from .transports.rest import BackendBucketsRestTransport + + +class BackendBucketsClientMeta(type): + """Metaclass for the BackendBuckets client. + + This provides class-level methods for building and retrieving + support objects (e.g. transport) without polluting the client instance + objects. + """ + _transport_registry = OrderedDict() # type: Dict[str, Type[BackendBucketsTransport]] + _transport_registry["rest"] = BackendBucketsRestTransport + + def get_transport_class(cls, + label: Optional[str] = None, + ) -> Type[BackendBucketsTransport]: + """Returns an appropriate transport class. + + Args: + label: The name of the desired transport. If none is + provided, then the first transport in the registry is used. + + Returns: + The transport class to use. + """ + # If a specific transport is requested, return that one. + if label: + return cls._transport_registry[label] + + # No transport is requested; return the default (that is, the first one + # in the dictionary). + return next(iter(cls._transport_registry.values())) + + +class BackendBucketsClient(metaclass=BackendBucketsClientMeta): + """The BackendBuckets API.""" + + @staticmethod + def _get_default_mtls_endpoint(api_endpoint): + """Converts api endpoint to mTLS endpoint. + + Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to + "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. + Args: + api_endpoint (Optional[str]): the api endpoint to convert. + Returns: + str: converted mTLS api endpoint. + """ + if not api_endpoint: + return api_endpoint + + mtls_endpoint_re = re.compile( + r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?" + ) + + m = mtls_endpoint_re.match(api_endpoint) + name, mtls, sandbox, googledomain = m.groups() + if mtls or not googledomain: + return api_endpoint + + if sandbox: + return api_endpoint.replace( + "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" + ) + + return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") + + DEFAULT_ENDPOINT = "compute.googleapis.com" + DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore + DEFAULT_ENDPOINT + ) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + BackendBucketsClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_info(info) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + BackendBucketsClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_file( + filename) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + from_service_account_json = from_service_account_file + + @property + def transport(self) -> BackendBucketsTransport: + """Returns the transport used by the client instance. + + Returns: + BackendBucketsTransport: The transport used by the client + instance. + """ + return self._transport + + @staticmethod + def common_billing_account_path(billing_account: str, ) -> str: + """Returns a fully-qualified billing_account string.""" + return "billingAccounts/{billing_account}".format(billing_account=billing_account, ) + + @staticmethod + def parse_common_billing_account_path(path: str) -> Dict[str,str]: + """Parse a billing_account path into its component segments.""" + m = re.match(r"^billingAccounts/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_folder_path(folder: str, ) -> str: + """Returns a fully-qualified folder string.""" + return "folders/{folder}".format(folder=folder, ) + + @staticmethod + def parse_common_folder_path(path: str) -> Dict[str,str]: + """Parse a folder path into its component segments.""" + m = re.match(r"^folders/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_organization_path(organization: str, ) -> str: + """Returns a fully-qualified organization string.""" + return "organizations/{organization}".format(organization=organization, ) + + @staticmethod + def parse_common_organization_path(path: str) -> Dict[str,str]: + """Parse a organization path into its component segments.""" + m = re.match(r"^organizations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_project_path(project: str, ) -> str: + """Returns a fully-qualified project string.""" + return "projects/{project}".format(project=project, ) + + @staticmethod + def parse_common_project_path(path: str) -> Dict[str,str]: + """Parse a project path into its component segments.""" + m = re.match(r"^projects/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_location_path(project: str, location: str, ) -> str: + """Returns a fully-qualified location string.""" + return "projects/{project}/locations/{location}".format(project=project, location=location, ) + + @staticmethod + def parse_common_location_path(path: str) -> Dict[str,str]: + """Parse a location path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @classmethod + def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[client_options_lib.ClientOptions] = None): + """Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + if client_options is None: + client_options = client_options_lib.ClientOptions() + use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") + use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") + if use_client_cert not in ("true", "false"): + raise ValueError("Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`") + if use_mtls_endpoint not in ("auto", "never", "always"): + raise MutualTLSChannelError("Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`") + + # Figure out the client cert source to use. + client_cert_source = None + if use_client_cert == "true": + if client_options.client_cert_source: + client_cert_source = client_options.client_cert_source + elif mtls.has_default_client_cert_source(): + client_cert_source = mtls.default_client_cert_source() + + # Figure out which api endpoint to use. + if client_options.api_endpoint is not None: + api_endpoint = client_options.api_endpoint + elif use_mtls_endpoint == "always" or (use_mtls_endpoint == "auto" and client_cert_source): + api_endpoint = cls.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = cls.DEFAULT_ENDPOINT + + return api_endpoint, client_cert_source + + def __init__(self, *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Optional[Union[str, BackendBucketsTransport]] = None, + client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the backend buckets client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Union[str, BackendBucketsTransport]): The + transport to use. If set to None, a transport is chosen + automatically. + NOTE: "rest" transport functionality is currently in a + beta state (preview). We welcome your feedback via an + issue in this library's source repository. + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): Custom options for the + client. It won't take effect if a ``transport`` instance is provided. + (1) The ``api_endpoint`` property can be used to override the + default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT + environment variable can also be used to override the endpoint: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto switch to the + default mTLS endpoint if client certificate is present, this is + the default value). However, the ``api_endpoint`` property takes + precedence if provided. + (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide client certificate for mutual TLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + """ + if isinstance(client_options, dict): + client_options = client_options_lib.from_dict(client_options) + if client_options is None: + client_options = client_options_lib.ClientOptions() + client_options = cast(client_options_lib.ClientOptions, client_options) + + api_endpoint, client_cert_source_func = self.get_mtls_endpoint_and_cert_source(client_options) + + api_key_value = getattr(client_options, "api_key", None) + if api_key_value and credentials: + raise ValueError("client_options.api_key and credentials are mutually exclusive") + + # Save or instantiate the transport. + # Ordinarily, we provide the transport, but allowing a custom transport + # instance provides an extensibility point for unusual situations. + if isinstance(transport, BackendBucketsTransport): + # transport is a BackendBucketsTransport instance. + if credentials or client_options.credentials_file or api_key_value: + raise ValueError("When providing a transport instance, " + "provide its credentials directly.") + if client_options.scopes: + raise ValueError( + "When providing a transport instance, provide its scopes " + "directly." + ) + self._transport = transport + else: + import google.auth._default # type: ignore + + if api_key_value and hasattr(google.auth._default, "get_api_key_credentials"): + credentials = google.auth._default.get_api_key_credentials(api_key_value) + + Transport = type(self).get_transport_class(transport) + self._transport = Transport( + credentials=credentials, + credentials_file=client_options.credentials_file, + host=api_endpoint, + scopes=client_options.scopes, + client_cert_source_for_mtls=client_cert_source_func, + quota_project_id=client_options.quota_project_id, + client_info=client_info, + always_use_jwt_access=True, + api_audience=client_options.api_audience, + ) + + def add_signed_url_key_unary(self, + request: Optional[Union[compute.AddSignedUrlKeyBackendBucketRequest, dict]] = None, + *, + project: Optional[str] = None, + backend_bucket: Optional[str] = None, + signed_url_key_resource: Optional[compute.SignedUrlKey] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.Operation: + r"""Adds a key for validating requests with signed URLs + for this backend bucket. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_add_signed_url_key(): + # Create a client + client = compute_v1.BackendBucketsClient() + + # Initialize request argument(s) + request = compute_v1.AddSignedUrlKeyBackendBucketRequest( + backend_bucket="backend_bucket_value", + project="project_value", + ) + + # Make the request + response = client.add_signed_url_key(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.AddSignedUrlKeyBackendBucketRequest, dict]): + The request object. A request message for + BackendBuckets.AddSignedUrlKey. See the + method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + backend_bucket (str): + Name of the BackendBucket resource to + which the Signed URL Key should be + added. The name should conform to + RFC1035. + + This corresponds to the ``backend_bucket`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + signed_url_key_resource (google.cloud.compute_v1.types.SignedUrlKey): + The body resource for this request + This corresponds to the ``signed_url_key_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, backend_bucket, signed_url_key_resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.AddSignedUrlKeyBackendBucketRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.AddSignedUrlKeyBackendBucketRequest): + request = compute.AddSignedUrlKeyBackendBucketRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if backend_bucket is not None: + request.backend_bucket = backend_bucket + if signed_url_key_resource is not None: + request.signed_url_key_resource = signed_url_key_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.add_signed_url_key] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("backend_bucket", request.backend_bucket), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def add_signed_url_key(self, + request: Optional[Union[compute.AddSignedUrlKeyBackendBucketRequest, dict]] = None, + *, + project: Optional[str] = None, + backend_bucket: Optional[str] = None, + signed_url_key_resource: Optional[compute.SignedUrlKey] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> extended_operation.ExtendedOperation: + r"""Adds a key for validating requests with signed URLs + for this backend bucket. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_add_signed_url_key(): + # Create a client + client = compute_v1.BackendBucketsClient() + + # Initialize request argument(s) + request = compute_v1.AddSignedUrlKeyBackendBucketRequest( + backend_bucket="backend_bucket_value", + project="project_value", + ) + + # Make the request + response = client.add_signed_url_key(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.AddSignedUrlKeyBackendBucketRequest, dict]): + The request object. A request message for + BackendBuckets.AddSignedUrlKey. See the + method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + backend_bucket (str): + Name of the BackendBucket resource to + which the Signed URL Key should be + added. The name should conform to + RFC1035. + + This corresponds to the ``backend_bucket`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + signed_url_key_resource (google.cloud.compute_v1.types.SignedUrlKey): + The body resource for this request + This corresponds to the ``signed_url_key_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, backend_bucket, signed_url_key_resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.AddSignedUrlKeyBackendBucketRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.AddSignedUrlKeyBackendBucketRequest): + request = compute.AddSignedUrlKeyBackendBucketRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if backend_bucket is not None: + request.backend_bucket = backend_bucket + if signed_url_key_resource is not None: + request.signed_url_key_resource = signed_url_key_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.add_signed_url_key] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("backend_bucket", request.backend_bucket), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + operation_service = self._transport._global_operations_client + operation_request = compute.GetGlobalOperationRequest() + operation_request.project = request.project + operation_request.operation = response.name + + get_operation = functools.partial(operation_service.get, operation_request) + # Cancel is not part of extended operations yet. + cancel_operation = lambda: None + + # Note: this class is an implementation detail to provide a uniform + # set of names for certain fields in the extended operation proto message. + # See google.api_core.extended_operation.ExtendedOperation for details + # on these properties and the expected interface. + class _CustomOperation(extended_operation.ExtendedOperation): + @property + def error_message(self): + return self._extended_operation.http_error_message + + @property + def error_code(self): + return self._extended_operation.http_error_status_code + + response = _CustomOperation.make(get_operation, cancel_operation, response) + + # Done; return the response. + return response + + def delete_unary(self, + request: Optional[Union[compute.DeleteBackendBucketRequest, dict]] = None, + *, + project: Optional[str] = None, + backend_bucket: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.Operation: + r"""Deletes the specified BackendBucket resource. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_delete(): + # Create a client + client = compute_v1.BackendBucketsClient() + + # Initialize request argument(s) + request = compute_v1.DeleteBackendBucketRequest( + backend_bucket="backend_bucket_value", + project="project_value", + ) + + # Make the request + response = client.delete(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.DeleteBackendBucketRequest, dict]): + The request object. A request message for + BackendBuckets.Delete. See the method + description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + backend_bucket (str): + Name of the BackendBucket resource to + delete. + + This corresponds to the ``backend_bucket`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, backend_bucket]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.DeleteBackendBucketRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.DeleteBackendBucketRequest): + request = compute.DeleteBackendBucketRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if backend_bucket is not None: + request.backend_bucket = backend_bucket + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("backend_bucket", request.backend_bucket), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def delete(self, + request: Optional[Union[compute.DeleteBackendBucketRequest, dict]] = None, + *, + project: Optional[str] = None, + backend_bucket: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> extended_operation.ExtendedOperation: + r"""Deletes the specified BackendBucket resource. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_delete(): + # Create a client + client = compute_v1.BackendBucketsClient() + + # Initialize request argument(s) + request = compute_v1.DeleteBackendBucketRequest( + backend_bucket="backend_bucket_value", + project="project_value", + ) + + # Make the request + response = client.delete(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.DeleteBackendBucketRequest, dict]): + The request object. A request message for + BackendBuckets.Delete. See the method + description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + backend_bucket (str): + Name of the BackendBucket resource to + delete. + + This corresponds to the ``backend_bucket`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, backend_bucket]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.DeleteBackendBucketRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.DeleteBackendBucketRequest): + request = compute.DeleteBackendBucketRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if backend_bucket is not None: + request.backend_bucket = backend_bucket + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("backend_bucket", request.backend_bucket), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + operation_service = self._transport._global_operations_client + operation_request = compute.GetGlobalOperationRequest() + operation_request.project = request.project + operation_request.operation = response.name + + get_operation = functools.partial(operation_service.get, operation_request) + # Cancel is not part of extended operations yet. + cancel_operation = lambda: None + + # Note: this class is an implementation detail to provide a uniform + # set of names for certain fields in the extended operation proto message. + # See google.api_core.extended_operation.ExtendedOperation for details + # on these properties and the expected interface. + class _CustomOperation(extended_operation.ExtendedOperation): + @property + def error_message(self): + return self._extended_operation.http_error_message + + @property + def error_code(self): + return self._extended_operation.http_error_status_code + + response = _CustomOperation.make(get_operation, cancel_operation, response) + + # Done; return the response. + return response + + def delete_signed_url_key_unary(self, + request: Optional[Union[compute.DeleteSignedUrlKeyBackendBucketRequest, dict]] = None, + *, + project: Optional[str] = None, + backend_bucket: Optional[str] = None, + key_name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.Operation: + r"""Deletes a key for validating requests with signed + URLs for this backend bucket. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_delete_signed_url_key(): + # Create a client + client = compute_v1.BackendBucketsClient() + + # Initialize request argument(s) + request = compute_v1.DeleteSignedUrlKeyBackendBucketRequest( + backend_bucket="backend_bucket_value", + key_name="key_name_value", + project="project_value", + ) + + # Make the request + response = client.delete_signed_url_key(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.DeleteSignedUrlKeyBackendBucketRequest, dict]): + The request object. A request message for + BackendBuckets.DeleteSignedUrlKey. See + the method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + backend_bucket (str): + Name of the BackendBucket resource to + which the Signed URL Key should be + added. The name should conform to + RFC1035. + + This corresponds to the ``backend_bucket`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + key_name (str): + The name of the Signed URL Key to + delete. + + This corresponds to the ``key_name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, backend_bucket, key_name]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.DeleteSignedUrlKeyBackendBucketRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.DeleteSignedUrlKeyBackendBucketRequest): + request = compute.DeleteSignedUrlKeyBackendBucketRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if backend_bucket is not None: + request.backend_bucket = backend_bucket + if key_name is not None: + request.key_name = key_name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete_signed_url_key] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("backend_bucket", request.backend_bucket), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def delete_signed_url_key(self, + request: Optional[Union[compute.DeleteSignedUrlKeyBackendBucketRequest, dict]] = None, + *, + project: Optional[str] = None, + backend_bucket: Optional[str] = None, + key_name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> extended_operation.ExtendedOperation: + r"""Deletes a key for validating requests with signed + URLs for this backend bucket. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_delete_signed_url_key(): + # Create a client + client = compute_v1.BackendBucketsClient() + + # Initialize request argument(s) + request = compute_v1.DeleteSignedUrlKeyBackendBucketRequest( + backend_bucket="backend_bucket_value", + key_name="key_name_value", + project="project_value", + ) + + # Make the request + response = client.delete_signed_url_key(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.DeleteSignedUrlKeyBackendBucketRequest, dict]): + The request object. A request message for + BackendBuckets.DeleteSignedUrlKey. See + the method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + backend_bucket (str): + Name of the BackendBucket resource to + which the Signed URL Key should be + added. The name should conform to + RFC1035. + + This corresponds to the ``backend_bucket`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + key_name (str): + The name of the Signed URL Key to + delete. + + This corresponds to the ``key_name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, backend_bucket, key_name]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.DeleteSignedUrlKeyBackendBucketRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.DeleteSignedUrlKeyBackendBucketRequest): + request = compute.DeleteSignedUrlKeyBackendBucketRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if backend_bucket is not None: + request.backend_bucket = backend_bucket + if key_name is not None: + request.key_name = key_name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete_signed_url_key] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("backend_bucket", request.backend_bucket), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + operation_service = self._transport._global_operations_client + operation_request = compute.GetGlobalOperationRequest() + operation_request.project = request.project + operation_request.operation = response.name + + get_operation = functools.partial(operation_service.get, operation_request) + # Cancel is not part of extended operations yet. + cancel_operation = lambda: None + + # Note: this class is an implementation detail to provide a uniform + # set of names for certain fields in the extended operation proto message. + # See google.api_core.extended_operation.ExtendedOperation for details + # on these properties and the expected interface. + class _CustomOperation(extended_operation.ExtendedOperation): + @property + def error_message(self): + return self._extended_operation.http_error_message + + @property + def error_code(self): + return self._extended_operation.http_error_status_code + + response = _CustomOperation.make(get_operation, cancel_operation, response) + + # Done; return the response. + return response + + def get(self, + request: Optional[Union[compute.GetBackendBucketRequest, dict]] = None, + *, + project: Optional[str] = None, + backend_bucket: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.BackendBucket: + r"""Returns the specified BackendBucket resource. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_get(): + # Create a client + client = compute_v1.BackendBucketsClient() + + # Initialize request argument(s) + request = compute_v1.GetBackendBucketRequest( + backend_bucket="backend_bucket_value", + project="project_value", + ) + + # Make the request + response = client.get(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.GetBackendBucketRequest, dict]): + The request object. A request message for + BackendBuckets.Get. See the method + description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + backend_bucket (str): + Name of the BackendBucket resource to + return. + + This corresponds to the ``backend_bucket`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.compute_v1.types.BackendBucket: + Represents a Cloud Storage Bucket + resource. This Cloud Storage bucket + resource is referenced by a URL map of a + load balancer. For more information, + read Backend Buckets. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, backend_bucket]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.GetBackendBucketRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.GetBackendBucketRequest): + request = compute.GetBackendBucketRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if backend_bucket is not None: + request.backend_bucket = backend_bucket + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("backend_bucket", request.backend_bucket), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def insert_unary(self, + request: Optional[Union[compute.InsertBackendBucketRequest, dict]] = None, + *, + project: Optional[str] = None, + backend_bucket_resource: Optional[compute.BackendBucket] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.Operation: + r"""Creates a BackendBucket resource in the specified + project using the data included in the request. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_insert(): + # Create a client + client = compute_v1.BackendBucketsClient() + + # Initialize request argument(s) + request = compute_v1.InsertBackendBucketRequest( + project="project_value", + ) + + # Make the request + response = client.insert(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.InsertBackendBucketRequest, dict]): + The request object. A request message for + BackendBuckets.Insert. See the method + description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + backend_bucket_resource (google.cloud.compute_v1.types.BackendBucket): + The body resource for this request + This corresponds to the ``backend_bucket_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, backend_bucket_resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.InsertBackendBucketRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.InsertBackendBucketRequest): + request = compute.InsertBackendBucketRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if backend_bucket_resource is not None: + request.backend_bucket_resource = backend_bucket_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.insert] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def insert(self, + request: Optional[Union[compute.InsertBackendBucketRequest, dict]] = None, + *, + project: Optional[str] = None, + backend_bucket_resource: Optional[compute.BackendBucket] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> extended_operation.ExtendedOperation: + r"""Creates a BackendBucket resource in the specified + project using the data included in the request. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_insert(): + # Create a client + client = compute_v1.BackendBucketsClient() + + # Initialize request argument(s) + request = compute_v1.InsertBackendBucketRequest( + project="project_value", + ) + + # Make the request + response = client.insert(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.InsertBackendBucketRequest, dict]): + The request object. A request message for + BackendBuckets.Insert. See the method + description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + backend_bucket_resource (google.cloud.compute_v1.types.BackendBucket): + The body resource for this request + This corresponds to the ``backend_bucket_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, backend_bucket_resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.InsertBackendBucketRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.InsertBackendBucketRequest): + request = compute.InsertBackendBucketRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if backend_bucket_resource is not None: + request.backend_bucket_resource = backend_bucket_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.insert] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + operation_service = self._transport._global_operations_client + operation_request = compute.GetGlobalOperationRequest() + operation_request.project = request.project + operation_request.operation = response.name + + get_operation = functools.partial(operation_service.get, operation_request) + # Cancel is not part of extended operations yet. + cancel_operation = lambda: None + + # Note: this class is an implementation detail to provide a uniform + # set of names for certain fields in the extended operation proto message. + # See google.api_core.extended_operation.ExtendedOperation for details + # on these properties and the expected interface. + class _CustomOperation(extended_operation.ExtendedOperation): + @property + def error_message(self): + return self._extended_operation.http_error_message + + @property + def error_code(self): + return self._extended_operation.http_error_status_code + + response = _CustomOperation.make(get_operation, cancel_operation, response) + + # Done; return the response. + return response + + def list(self, + request: Optional[Union[compute.ListBackendBucketsRequest, dict]] = None, + *, + project: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListPager: + r"""Retrieves the list of BackendBucket resources + available to the specified project. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_list(): + # Create a client + client = compute_v1.BackendBucketsClient() + + # Initialize request argument(s) + request = compute_v1.ListBackendBucketsRequest( + project="project_value", + ) + + # Make the request + page_result = client.list(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.ListBackendBucketsRequest, dict]): + The request object. A request message for + BackendBuckets.List. See the method + description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.compute_v1.services.backend_buckets.pagers.ListPager: + Contains a list of BackendBucket + resources. + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.ListBackendBucketsRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.ListBackendBucketsRequest): + request = compute.ListBackendBucketsRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + def patch_unary(self, + request: Optional[Union[compute.PatchBackendBucketRequest, dict]] = None, + *, + project: Optional[str] = None, + backend_bucket: Optional[str] = None, + backend_bucket_resource: Optional[compute.BackendBucket] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.Operation: + r"""Updates the specified BackendBucket resource with the + data included in the request. This method supports PATCH + semantics and uses the JSON merge patch format and + processing rules. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_patch(): + # Create a client + client = compute_v1.BackendBucketsClient() + + # Initialize request argument(s) + request = compute_v1.PatchBackendBucketRequest( + backend_bucket="backend_bucket_value", + project="project_value", + ) + + # Make the request + response = client.patch(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.PatchBackendBucketRequest, dict]): + The request object. A request message for + BackendBuckets.Patch. See the method + description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + backend_bucket (str): + Name of the BackendBucket resource to + patch. + + This corresponds to the ``backend_bucket`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + backend_bucket_resource (google.cloud.compute_v1.types.BackendBucket): + The body resource for this request + This corresponds to the ``backend_bucket_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, backend_bucket, backend_bucket_resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.PatchBackendBucketRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.PatchBackendBucketRequest): + request = compute.PatchBackendBucketRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if backend_bucket is not None: + request.backend_bucket = backend_bucket + if backend_bucket_resource is not None: + request.backend_bucket_resource = backend_bucket_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.patch] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("backend_bucket", request.backend_bucket), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def patch(self, + request: Optional[Union[compute.PatchBackendBucketRequest, dict]] = None, + *, + project: Optional[str] = None, + backend_bucket: Optional[str] = None, + backend_bucket_resource: Optional[compute.BackendBucket] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> extended_operation.ExtendedOperation: + r"""Updates the specified BackendBucket resource with the + data included in the request. This method supports PATCH + semantics and uses the JSON merge patch format and + processing rules. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_patch(): + # Create a client + client = compute_v1.BackendBucketsClient() + + # Initialize request argument(s) + request = compute_v1.PatchBackendBucketRequest( + backend_bucket="backend_bucket_value", + project="project_value", + ) + + # Make the request + response = client.patch(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.PatchBackendBucketRequest, dict]): + The request object. A request message for + BackendBuckets.Patch. See the method + description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + backend_bucket (str): + Name of the BackendBucket resource to + patch. + + This corresponds to the ``backend_bucket`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + backend_bucket_resource (google.cloud.compute_v1.types.BackendBucket): + The body resource for this request + This corresponds to the ``backend_bucket_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, backend_bucket, backend_bucket_resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.PatchBackendBucketRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.PatchBackendBucketRequest): + request = compute.PatchBackendBucketRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if backend_bucket is not None: + request.backend_bucket = backend_bucket + if backend_bucket_resource is not None: + request.backend_bucket_resource = backend_bucket_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.patch] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("backend_bucket", request.backend_bucket), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + operation_service = self._transport._global_operations_client + operation_request = compute.GetGlobalOperationRequest() + operation_request.project = request.project + operation_request.operation = response.name + + get_operation = functools.partial(operation_service.get, operation_request) + # Cancel is not part of extended operations yet. + cancel_operation = lambda: None + + # Note: this class is an implementation detail to provide a uniform + # set of names for certain fields in the extended operation proto message. + # See google.api_core.extended_operation.ExtendedOperation for details + # on these properties and the expected interface. + class _CustomOperation(extended_operation.ExtendedOperation): + @property + def error_message(self): + return self._extended_operation.http_error_message + + @property + def error_code(self): + return self._extended_operation.http_error_status_code + + response = _CustomOperation.make(get_operation, cancel_operation, response) + + # Done; return the response. + return response + + def set_edge_security_policy_unary(self, + request: Optional[Union[compute.SetEdgeSecurityPolicyBackendBucketRequest, dict]] = None, + *, + project: Optional[str] = None, + backend_bucket: Optional[str] = None, + security_policy_reference_resource: Optional[compute.SecurityPolicyReference] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.Operation: + r"""Sets the edge security policy for the specified + backend bucket. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_set_edge_security_policy(): + # Create a client + client = compute_v1.BackendBucketsClient() + + # Initialize request argument(s) + request = compute_v1.SetEdgeSecurityPolicyBackendBucketRequest( + backend_bucket="backend_bucket_value", + project="project_value", + ) + + # Make the request + response = client.set_edge_security_policy(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.SetEdgeSecurityPolicyBackendBucketRequest, dict]): + The request object. A request message for + BackendBuckets.SetEdgeSecurityPolicy. + See the method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + backend_bucket (str): + Name of the BackendService resource + to which the security policy should be + set. The name should conform to RFC1035. + + This corresponds to the ``backend_bucket`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + security_policy_reference_resource (google.cloud.compute_v1.types.SecurityPolicyReference): + The body resource for this request + This corresponds to the ``security_policy_reference_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, backend_bucket, security_policy_reference_resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.SetEdgeSecurityPolicyBackendBucketRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.SetEdgeSecurityPolicyBackendBucketRequest): + request = compute.SetEdgeSecurityPolicyBackendBucketRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if backend_bucket is not None: + request.backend_bucket = backend_bucket + if security_policy_reference_resource is not None: + request.security_policy_reference_resource = security_policy_reference_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.set_edge_security_policy] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("backend_bucket", request.backend_bucket), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def set_edge_security_policy(self, + request: Optional[Union[compute.SetEdgeSecurityPolicyBackendBucketRequest, dict]] = None, + *, + project: Optional[str] = None, + backend_bucket: Optional[str] = None, + security_policy_reference_resource: Optional[compute.SecurityPolicyReference] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> extended_operation.ExtendedOperation: + r"""Sets the edge security policy for the specified + backend bucket. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_set_edge_security_policy(): + # Create a client + client = compute_v1.BackendBucketsClient() + + # Initialize request argument(s) + request = compute_v1.SetEdgeSecurityPolicyBackendBucketRequest( + backend_bucket="backend_bucket_value", + project="project_value", + ) + + # Make the request + response = client.set_edge_security_policy(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.SetEdgeSecurityPolicyBackendBucketRequest, dict]): + The request object. A request message for + BackendBuckets.SetEdgeSecurityPolicy. + See the method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + backend_bucket (str): + Name of the BackendService resource + to which the security policy should be + set. The name should conform to RFC1035. + + This corresponds to the ``backend_bucket`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + security_policy_reference_resource (google.cloud.compute_v1.types.SecurityPolicyReference): + The body resource for this request + This corresponds to the ``security_policy_reference_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, backend_bucket, security_policy_reference_resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.SetEdgeSecurityPolicyBackendBucketRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.SetEdgeSecurityPolicyBackendBucketRequest): + request = compute.SetEdgeSecurityPolicyBackendBucketRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if backend_bucket is not None: + request.backend_bucket = backend_bucket + if security_policy_reference_resource is not None: + request.security_policy_reference_resource = security_policy_reference_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.set_edge_security_policy] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("backend_bucket", request.backend_bucket), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + operation_service = self._transport._global_operations_client + operation_request = compute.GetGlobalOperationRequest() + operation_request.project = request.project + operation_request.operation = response.name + + get_operation = functools.partial(operation_service.get, operation_request) + # Cancel is not part of extended operations yet. + cancel_operation = lambda: None + + # Note: this class is an implementation detail to provide a uniform + # set of names for certain fields in the extended operation proto message. + # See google.api_core.extended_operation.ExtendedOperation for details + # on these properties and the expected interface. + class _CustomOperation(extended_operation.ExtendedOperation): + @property + def error_message(self): + return self._extended_operation.http_error_message + + @property + def error_code(self): + return self._extended_operation.http_error_status_code + + response = _CustomOperation.make(get_operation, cancel_operation, response) + + # Done; return the response. + return response + + def update_unary(self, + request: Optional[Union[compute.UpdateBackendBucketRequest, dict]] = None, + *, + project: Optional[str] = None, + backend_bucket: Optional[str] = None, + backend_bucket_resource: Optional[compute.BackendBucket] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.Operation: + r"""Updates the specified BackendBucket resource with the + data included in the request. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_update(): + # Create a client + client = compute_v1.BackendBucketsClient() + + # Initialize request argument(s) + request = compute_v1.UpdateBackendBucketRequest( + backend_bucket="backend_bucket_value", + project="project_value", + ) + + # Make the request + response = client.update(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.UpdateBackendBucketRequest, dict]): + The request object. A request message for + BackendBuckets.Update. See the method + description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + backend_bucket (str): + Name of the BackendBucket resource to + update. + + This corresponds to the ``backend_bucket`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + backend_bucket_resource (google.cloud.compute_v1.types.BackendBucket): + The body resource for this request + This corresponds to the ``backend_bucket_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, backend_bucket, backend_bucket_resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.UpdateBackendBucketRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.UpdateBackendBucketRequest): + request = compute.UpdateBackendBucketRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if backend_bucket is not None: + request.backend_bucket = backend_bucket + if backend_bucket_resource is not None: + request.backend_bucket_resource = backend_bucket_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.update] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("backend_bucket", request.backend_bucket), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def update(self, + request: Optional[Union[compute.UpdateBackendBucketRequest, dict]] = None, + *, + project: Optional[str] = None, + backend_bucket: Optional[str] = None, + backend_bucket_resource: Optional[compute.BackendBucket] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> extended_operation.ExtendedOperation: + r"""Updates the specified BackendBucket resource with the + data included in the request. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_update(): + # Create a client + client = compute_v1.BackendBucketsClient() + + # Initialize request argument(s) + request = compute_v1.UpdateBackendBucketRequest( + backend_bucket="backend_bucket_value", + project="project_value", + ) + + # Make the request + response = client.update(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.UpdateBackendBucketRequest, dict]): + The request object. A request message for + BackendBuckets.Update. See the method + description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + backend_bucket (str): + Name of the BackendBucket resource to + update. + + This corresponds to the ``backend_bucket`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + backend_bucket_resource (google.cloud.compute_v1.types.BackendBucket): + The body resource for this request + This corresponds to the ``backend_bucket_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, backend_bucket, backend_bucket_resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.UpdateBackendBucketRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.UpdateBackendBucketRequest): + request = compute.UpdateBackendBucketRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if backend_bucket is not None: + request.backend_bucket = backend_bucket + if backend_bucket_resource is not None: + request.backend_bucket_resource = backend_bucket_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.update] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("backend_bucket", request.backend_bucket), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + operation_service = self._transport._global_operations_client + operation_request = compute.GetGlobalOperationRequest() + operation_request.project = request.project + operation_request.operation = response.name + + get_operation = functools.partial(operation_service.get, operation_request) + # Cancel is not part of extended operations yet. + cancel_operation = lambda: None + + # Note: this class is an implementation detail to provide a uniform + # set of names for certain fields in the extended operation proto message. + # See google.api_core.extended_operation.ExtendedOperation for details + # on these properties and the expected interface. + class _CustomOperation(extended_operation.ExtendedOperation): + @property + def error_message(self): + return self._extended_operation.http_error_message + + @property + def error_code(self): + return self._extended_operation.http_error_status_code + + response = _CustomOperation.make(get_operation, cancel_operation, response) + + # Done; return the response. + return response + + def __enter__(self) -> "BackendBucketsClient": + return self + + def __exit__(self, type, value, traceback): + """Releases underlying transport's resources. + + .. warning:: + ONLY use as a context manager if the transport is NOT shared + with other clients! Exiting the with block will CLOSE the transport + and may cause errors in other clients! + """ + self.transport.close() + + + + + + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) + + +__all__ = ( + "BackendBucketsClient", +) diff --git a/owl-bot-staging/v1/google/cloud/compute_v1/services/backend_buckets/pagers.py b/owl-bot-staging/v1/google/cloud/compute_v1/services/backend_buckets/pagers.py new file mode 100644 index 000000000..3a72c1454 --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/compute_v1/services/backend_buckets/pagers.py @@ -0,0 +1,77 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import Any, AsyncIterator, Awaitable, Callable, Sequence, Tuple, Optional, Iterator + +from google.cloud.compute_v1.types import compute + + +class ListPager: + """A pager for iterating through ``list`` requests. + + This class thinly wraps an initial + :class:`google.cloud.compute_v1.types.BackendBucketList` object, and + provides an ``__iter__`` method to iterate through its + ``items`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``List`` requests and continue to iterate + through the ``items`` field on the + corresponding responses. + + All the usual :class:`google.cloud.compute_v1.types.BackendBucketList` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., compute.BackendBucketList], + request: compute.ListBackendBucketsRequest, + response: compute.BackendBucketList, + *, + metadata: Sequence[Tuple[str, str]] = ()): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.compute_v1.types.ListBackendBucketsRequest): + The initial request object. + response (google.cloud.compute_v1.types.BackendBucketList): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = compute.ListBackendBucketsRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[compute.BackendBucketList]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[compute.BackendBucket]: + for page in self.pages: + yield from page.items + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) diff --git a/owl-bot-staging/v1/google/cloud/compute_v1/services/backend_buckets/transports/__init__.py b/owl-bot-staging/v1/google/cloud/compute_v1/services/backend_buckets/transports/__init__.py new file mode 100644 index 000000000..9d7c24098 --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/compute_v1/services/backend_buckets/transports/__init__.py @@ -0,0 +1,32 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +from typing import Dict, Type + +from .base import BackendBucketsTransport +from .rest import BackendBucketsRestTransport +from .rest import BackendBucketsRestInterceptor + + +# Compile a registry of transports. +_transport_registry = OrderedDict() # type: Dict[str, Type[BackendBucketsTransport]] +_transport_registry['rest'] = BackendBucketsRestTransport + +__all__ = ( + 'BackendBucketsTransport', + 'BackendBucketsRestTransport', + 'BackendBucketsRestInterceptor', +) diff --git a/owl-bot-staging/v1/google/cloud/compute_v1/services/backend_buckets/transports/base.py b/owl-bot-staging/v1/google/cloud/compute_v1/services/backend_buckets/transports/base.py new file mode 100644 index 000000000..2b0b56e78 --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/compute_v1/services/backend_buckets/transports/base.py @@ -0,0 +1,275 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import abc +from typing import Awaitable, Callable, Dict, Optional, Sequence, Union + +from google.cloud.compute_v1 import gapic_version as package_version + +import google.auth # type: ignore +import google.api_core +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.compute_v1.types import compute +from google.cloud.compute_v1.services import global_operations + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) + + +class BackendBucketsTransport(abc.ABC): + """Abstract transport class for BackendBuckets.""" + + AUTH_SCOPES = ( + 'https://www.googleapis.com/auth/compute', + 'https://www.googleapis.com/auth/cloud-platform', + ) + + DEFAULT_HOST: str = 'compute.googleapis.com' + def __init__( + self, *, + host: str = DEFAULT_HOST, + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + **kwargs, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A list of scopes. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + """ + self._extended_operations_services: Dict[str, Any] = {} + + scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} + + # Save the scopes. + self._scopes = scopes + + # If no credentials are provided, then determine the appropriate + # defaults. + if credentials and credentials_file: + raise core_exceptions.DuplicateCredentialArgs("'credentials_file' and 'credentials' are mutually exclusive") + + if credentials_file is not None: + credentials, _ = google.auth.load_credentials_from_file( + credentials_file, + **scopes_kwargs, + quota_project_id=quota_project_id + ) + elif credentials is None: + credentials, _ = google.auth.default(**scopes_kwargs, quota_project_id=quota_project_id) + # Don't apply audience if the credentials file passed from user. + if hasattr(credentials, "with_gdch_audience"): + credentials = credentials.with_gdch_audience(api_audience if api_audience else host) + + # If the credentials are service account credentials, then always try to use self signed JWT. + if always_use_jwt_access and isinstance(credentials, service_account.Credentials) and hasattr(service_account.Credentials, "with_always_use_jwt_access"): + credentials = credentials.with_always_use_jwt_access(True) + + # Save the credentials. + self._credentials = credentials + + # Save the hostname. Default to port 443 (HTTPS) if none is specified. + if ':' not in host: + host += ':443' + self._host = host + + def _prep_wrapped_messages(self, client_info): + # Precompute the wrapped methods. + self._wrapped_methods = { + self.add_signed_url_key: gapic_v1.method.wrap_method( + self.add_signed_url_key, + default_timeout=None, + client_info=client_info, + ), + self.delete: gapic_v1.method.wrap_method( + self.delete, + default_timeout=None, + client_info=client_info, + ), + self.delete_signed_url_key: gapic_v1.method.wrap_method( + self.delete_signed_url_key, + default_timeout=None, + client_info=client_info, + ), + self.get: gapic_v1.method.wrap_method( + self.get, + default_timeout=None, + client_info=client_info, + ), + self.insert: gapic_v1.method.wrap_method( + self.insert, + default_timeout=None, + client_info=client_info, + ), + self.list: gapic_v1.method.wrap_method( + self.list, + default_timeout=None, + client_info=client_info, + ), + self.patch: gapic_v1.method.wrap_method( + self.patch, + default_timeout=None, + client_info=client_info, + ), + self.set_edge_security_policy: gapic_v1.method.wrap_method( + self.set_edge_security_policy, + default_timeout=None, + client_info=client_info, + ), + self.update: gapic_v1.method.wrap_method( + self.update, + default_timeout=None, + client_info=client_info, + ), + } + + def close(self): + """Closes resources associated with the transport. + + .. warning:: + Only call this method if the transport is NOT shared + with other clients - this may cause errors in other clients! + """ + raise NotImplementedError() + + @property + def add_signed_url_key(self) -> Callable[ + [compute.AddSignedUrlKeyBackendBucketRequest], + Union[ + compute.Operation, + Awaitable[compute.Operation] + ]]: + raise NotImplementedError() + + @property + def delete(self) -> Callable[ + [compute.DeleteBackendBucketRequest], + Union[ + compute.Operation, + Awaitable[compute.Operation] + ]]: + raise NotImplementedError() + + @property + def delete_signed_url_key(self) -> Callable[ + [compute.DeleteSignedUrlKeyBackendBucketRequest], + Union[ + compute.Operation, + Awaitable[compute.Operation] + ]]: + raise NotImplementedError() + + @property + def get(self) -> Callable[ + [compute.GetBackendBucketRequest], + Union[ + compute.BackendBucket, + Awaitable[compute.BackendBucket] + ]]: + raise NotImplementedError() + + @property + def insert(self) -> Callable[ + [compute.InsertBackendBucketRequest], + Union[ + compute.Operation, + Awaitable[compute.Operation] + ]]: + raise NotImplementedError() + + @property + def list(self) -> Callable[ + [compute.ListBackendBucketsRequest], + Union[ + compute.BackendBucketList, + Awaitable[compute.BackendBucketList] + ]]: + raise NotImplementedError() + + @property + def patch(self) -> Callable[ + [compute.PatchBackendBucketRequest], + Union[ + compute.Operation, + Awaitable[compute.Operation] + ]]: + raise NotImplementedError() + + @property + def set_edge_security_policy(self) -> Callable[ + [compute.SetEdgeSecurityPolicyBackendBucketRequest], + Union[ + compute.Operation, + Awaitable[compute.Operation] + ]]: + raise NotImplementedError() + + @property + def update(self) -> Callable[ + [compute.UpdateBackendBucketRequest], + Union[ + compute.Operation, + Awaitable[compute.Operation] + ]]: + raise NotImplementedError() + + @property + def kind(self) -> str: + raise NotImplementedError() + + @property + def _global_operations_client(self) -> global_operations.GlobalOperationsClient: + ex_op_service = self._extended_operations_services.get("global_operations") + if not ex_op_service: + ex_op_service = global_operations.GlobalOperationsClient( + credentials=self._credentials, + transport=self.kind, + ) + self._extended_operations_services["global_operations"] = ex_op_service + + return ex_op_service + + +__all__ = ( + 'BackendBucketsTransport', +) diff --git a/owl-bot-staging/v1/google/cloud/compute_v1/services/backend_buckets/transports/rest.py b/owl-bot-staging/v1/google/cloud/compute_v1/services/backend_buckets/transports/rest.py new file mode 100644 index 000000000..ebf423420 --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/compute_v1/services/backend_buckets/transports/rest.py @@ -0,0 +1,1332 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +from google.auth.transport.requests import AuthorizedSession # type: ignore +import json # type: ignore +import grpc # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.api_core import exceptions as core_exceptions +from google.api_core import retry as retries +from google.api_core import rest_helpers +from google.api_core import rest_streaming +from google.api_core import path_template +from google.api_core import gapic_v1 + +from google.protobuf import json_format +from requests import __version__ as requests_version +import dataclasses +import re +from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union +import warnings + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + + +from google.cloud.compute_v1.types import compute + +from .base import BackendBucketsTransport, DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, + grpc_version=None, + rest_version=requests_version, +) + + +class BackendBucketsRestInterceptor: + """Interceptor for BackendBuckets. + + Interceptors are used to manipulate requests, request metadata, and responses + in arbitrary ways. + Example use cases include: + * Logging + * Verifying requests according to service or custom semantics + * Stripping extraneous information from responses + + These use cases and more can be enabled by injecting an + instance of a custom subclass when constructing the BackendBucketsRestTransport. + + .. code-block:: python + class MyCustomBackendBucketsInterceptor(BackendBucketsRestInterceptor): + def pre_add_signed_url_key(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_add_signed_url_key(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_delete(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_delete(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_delete_signed_url_key(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_delete_signed_url_key(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_get(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_insert(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_insert(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_patch(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_patch(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_set_edge_security_policy(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_set_edge_security_policy(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_update(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_update(self, response): + logging.log(f"Received response: {response}") + return response + + transport = BackendBucketsRestTransport(interceptor=MyCustomBackendBucketsInterceptor()) + client = BackendBucketsClient(transport=transport) + + + """ + def pre_add_signed_url_key(self, request: compute.AddSignedUrlKeyBackendBucketRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.AddSignedUrlKeyBackendBucketRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for add_signed_url_key + + Override in a subclass to manipulate the request or metadata + before they are sent to the BackendBuckets server. + """ + return request, metadata + + def post_add_signed_url_key(self, response: compute.Operation) -> compute.Operation: + """Post-rpc interceptor for add_signed_url_key + + Override in a subclass to manipulate the response + after it is returned by the BackendBuckets server but before + it is returned to user code. + """ + return response + def pre_delete(self, request: compute.DeleteBackendBucketRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.DeleteBackendBucketRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for delete + + Override in a subclass to manipulate the request or metadata + before they are sent to the BackendBuckets server. + """ + return request, metadata + + def post_delete(self, response: compute.Operation) -> compute.Operation: + """Post-rpc interceptor for delete + + Override in a subclass to manipulate the response + after it is returned by the BackendBuckets server but before + it is returned to user code. + """ + return response + def pre_delete_signed_url_key(self, request: compute.DeleteSignedUrlKeyBackendBucketRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.DeleteSignedUrlKeyBackendBucketRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for delete_signed_url_key + + Override in a subclass to manipulate the request or metadata + before they are sent to the BackendBuckets server. + """ + return request, metadata + + def post_delete_signed_url_key(self, response: compute.Operation) -> compute.Operation: + """Post-rpc interceptor for delete_signed_url_key + + Override in a subclass to manipulate the response + after it is returned by the BackendBuckets server but before + it is returned to user code. + """ + return response + def pre_get(self, request: compute.GetBackendBucketRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.GetBackendBucketRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get + + Override in a subclass to manipulate the request or metadata + before they are sent to the BackendBuckets server. + """ + return request, metadata + + def post_get(self, response: compute.BackendBucket) -> compute.BackendBucket: + """Post-rpc interceptor for get + + Override in a subclass to manipulate the response + after it is returned by the BackendBuckets server but before + it is returned to user code. + """ + return response + def pre_insert(self, request: compute.InsertBackendBucketRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.InsertBackendBucketRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for insert + + Override in a subclass to manipulate the request or metadata + before they are sent to the BackendBuckets server. + """ + return request, metadata + + def post_insert(self, response: compute.Operation) -> compute.Operation: + """Post-rpc interceptor for insert + + Override in a subclass to manipulate the response + after it is returned by the BackendBuckets server but before + it is returned to user code. + """ + return response + def pre_list(self, request: compute.ListBackendBucketsRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.ListBackendBucketsRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list + + Override in a subclass to manipulate the request or metadata + before they are sent to the BackendBuckets server. + """ + return request, metadata + + def post_list(self, response: compute.BackendBucketList) -> compute.BackendBucketList: + """Post-rpc interceptor for list + + Override in a subclass to manipulate the response + after it is returned by the BackendBuckets server but before + it is returned to user code. + """ + return response + def pre_patch(self, request: compute.PatchBackendBucketRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.PatchBackendBucketRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for patch + + Override in a subclass to manipulate the request or metadata + before they are sent to the BackendBuckets server. + """ + return request, metadata + + def post_patch(self, response: compute.Operation) -> compute.Operation: + """Post-rpc interceptor for patch + + Override in a subclass to manipulate the response + after it is returned by the BackendBuckets server but before + it is returned to user code. + """ + return response + def pre_set_edge_security_policy(self, request: compute.SetEdgeSecurityPolicyBackendBucketRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.SetEdgeSecurityPolicyBackendBucketRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for set_edge_security_policy + + Override in a subclass to manipulate the request or metadata + before they are sent to the BackendBuckets server. + """ + return request, metadata + + def post_set_edge_security_policy(self, response: compute.Operation) -> compute.Operation: + """Post-rpc interceptor for set_edge_security_policy + + Override in a subclass to manipulate the response + after it is returned by the BackendBuckets server but before + it is returned to user code. + """ + return response + def pre_update(self, request: compute.UpdateBackendBucketRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.UpdateBackendBucketRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for update + + Override in a subclass to manipulate the request or metadata + before they are sent to the BackendBuckets server. + """ + return request, metadata + + def post_update(self, response: compute.Operation) -> compute.Operation: + """Post-rpc interceptor for update + + Override in a subclass to manipulate the response + after it is returned by the BackendBuckets server but before + it is returned to user code. + """ + return response + + +@dataclasses.dataclass +class BackendBucketsRestStub: + _session: AuthorizedSession + _host: str + _interceptor: BackendBucketsRestInterceptor + + +class BackendBucketsRestTransport(BackendBucketsTransport): + """REST backend transport for BackendBuckets. + + The BackendBuckets API. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends JSON representations of protocol buffers over HTTP/1.1 + + NOTE: This REST transport functionality is currently in a beta + state (preview). We welcome your feedback via an issue in this + library's source repository. Thank you! + """ + + def __init__(self, *, + host: str = 'compute.googleapis.com', + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + client_cert_source_for_mtls: Optional[Callable[[ + ], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + url_scheme: str = 'https', + interceptor: Optional[BackendBucketsRestInterceptor] = None, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + NOTE: This REST transport functionality is currently in a beta + state (preview). We welcome your feedback via a GitHub issue in + this library's repository. Thank you! + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + client_cert_source_for_mtls (Callable[[], Tuple[bytes, bytes]]): Client + certificate to configure mutual TLS HTTP channel. It is ignored + if ``channel`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you are developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + url_scheme: the protocol scheme for the API endpoint. Normally + "https", but for testing or local servers, + "http" can be specified. + """ + # Run the base constructor + # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. + # TODO: When custom host (api_endpoint) is set, `scopes` must *also* be set on the + # credentials object + maybe_url_match = re.match("^(?Phttp(?:s)?://)?(?P.*)$", host) + if maybe_url_match is None: + raise ValueError(f"Unexpected hostname structure: {host}") # pragma: NO COVER + + url_match_items = maybe_url_match.groupdict() + + host = f"{url_scheme}://{host}" if not url_match_items["scheme"] else host + + super().__init__( + host=host, + credentials=credentials, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience + ) + self._session = AuthorizedSession( + self._credentials, default_host=self.DEFAULT_HOST) + if client_cert_source_for_mtls: + self._session.configure_mtls_channel(client_cert_source_for_mtls) + self._interceptor = interceptor or BackendBucketsRestInterceptor() + self._prep_wrapped_messages(client_info) + + class _AddSignedUrlKey(BackendBucketsRestStub): + def __hash__(self): + return hash("AddSignedUrlKey") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.AddSignedUrlKeyBackendBucketRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.Operation: + r"""Call the add signed url key method over HTTP. + + Args: + request (~.compute.AddSignedUrlKeyBackendBucketRequest): + The request object. A request message for + BackendBuckets.AddSignedUrlKey. See the + method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.Operation: + Represents an Operation resource. Google Compute Engine + has three Operation resources: \* + `Global `__ + \* + `Regional `__ + \* + `Zonal `__ + You can use an operation resource to manage asynchronous + API requests. For more information, read Handling API + responses. Operations can be global, regional or zonal. + - For global operations, use the ``globalOperations`` + resource. - For regional operations, use the + ``regionOperations`` resource. - For zonal operations, + use the ``zonalOperations`` resource. For more + information, read Global, Regional, and Zonal Resources. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/compute/v1/projects/{project}/global/backendBuckets/{backend_bucket}/addSignedUrlKey', + 'body': 'signed_url_key_resource', + }, + ] + request, metadata = self._interceptor.pre_add_signed_url_key(request, metadata) + pb_request = compute.AddSignedUrlKeyBackendBucketRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + including_default_value_fields=False, + use_integers_for_enums=False + ) + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.Operation() + pb_resp = compute.Operation.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_add_signed_url_key(resp) + return resp + + class _Delete(BackendBucketsRestStub): + def __hash__(self): + return hash("Delete") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.DeleteBackendBucketRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.Operation: + r"""Call the delete method over HTTP. + + Args: + request (~.compute.DeleteBackendBucketRequest): + The request object. A request message for + BackendBuckets.Delete. See the method + description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.Operation: + Represents an Operation resource. Google Compute Engine + has three Operation resources: \* + `Global `__ + \* + `Regional `__ + \* + `Zonal `__ + You can use an operation resource to manage asynchronous + API requests. For more information, read Handling API + responses. Operations can be global, regional or zonal. + - For global operations, use the ``globalOperations`` + resource. - For regional operations, use the + ``regionOperations`` resource. - For zonal operations, + use the ``zonalOperations`` resource. For more + information, read Global, Regional, and Zonal Resources. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'delete', + 'uri': '/compute/v1/projects/{project}/global/backendBuckets/{backend_bucket}', + }, + ] + request, metadata = self._interceptor.pre_delete(request, metadata) + pb_request = compute.DeleteBackendBucketRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.Operation() + pb_resp = compute.Operation.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_delete(resp) + return resp + + class _DeleteSignedUrlKey(BackendBucketsRestStub): + def __hash__(self): + return hash("DeleteSignedUrlKey") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + "keyName" : "", } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.DeleteSignedUrlKeyBackendBucketRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.Operation: + r"""Call the delete signed url key method over HTTP. + + Args: + request (~.compute.DeleteSignedUrlKeyBackendBucketRequest): + The request object. A request message for + BackendBuckets.DeleteSignedUrlKey. See + the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.Operation: + Represents an Operation resource. Google Compute Engine + has three Operation resources: \* + `Global `__ + \* + `Regional `__ + \* + `Zonal `__ + You can use an operation resource to manage asynchronous + API requests. For more information, read Handling API + responses. Operations can be global, regional or zonal. + - For global operations, use the ``globalOperations`` + resource. - For regional operations, use the + ``regionOperations`` resource. - For zonal operations, + use the ``zonalOperations`` resource. For more + information, read Global, Regional, and Zonal Resources. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/compute/v1/projects/{project}/global/backendBuckets/{backend_bucket}/deleteSignedUrlKey', + }, + ] + request, metadata = self._interceptor.pre_delete_signed_url_key(request, metadata) + pb_request = compute.DeleteSignedUrlKeyBackendBucketRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.Operation() + pb_resp = compute.Operation.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_delete_signed_url_key(resp) + return resp + + class _Get(BackendBucketsRestStub): + def __hash__(self): + return hash("Get") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.GetBackendBucketRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.BackendBucket: + r"""Call the get method over HTTP. + + Args: + request (~.compute.GetBackendBucketRequest): + The request object. A request message for + BackendBuckets.Get. See the method + description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.BackendBucket: + Represents a Cloud Storage Bucket + resource. This Cloud Storage bucket + resource is referenced by a URL map of a + load balancer. For more information, + read Backend Buckets. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/compute/v1/projects/{project}/global/backendBuckets/{backend_bucket}', + }, + ] + request, metadata = self._interceptor.pre_get(request, metadata) + pb_request = compute.GetBackendBucketRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.BackendBucket() + pb_resp = compute.BackendBucket.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get(resp) + return resp + + class _Insert(BackendBucketsRestStub): + def __hash__(self): + return hash("Insert") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.InsertBackendBucketRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.Operation: + r"""Call the insert method over HTTP. + + Args: + request (~.compute.InsertBackendBucketRequest): + The request object. A request message for + BackendBuckets.Insert. See the method + description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.Operation: + Represents an Operation resource. Google Compute Engine + has three Operation resources: \* + `Global `__ + \* + `Regional `__ + \* + `Zonal `__ + You can use an operation resource to manage asynchronous + API requests. For more information, read Handling API + responses. Operations can be global, regional or zonal. + - For global operations, use the ``globalOperations`` + resource. - For regional operations, use the + ``regionOperations`` resource. - For zonal operations, + use the ``zonalOperations`` resource. For more + information, read Global, Regional, and Zonal Resources. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/compute/v1/projects/{project}/global/backendBuckets', + 'body': 'backend_bucket_resource', + }, + ] + request, metadata = self._interceptor.pre_insert(request, metadata) + pb_request = compute.InsertBackendBucketRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + including_default_value_fields=False, + use_integers_for_enums=False + ) + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.Operation() + pb_resp = compute.Operation.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_insert(resp) + return resp + + class _List(BackendBucketsRestStub): + def __hash__(self): + return hash("List") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.ListBackendBucketsRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.BackendBucketList: + r"""Call the list method over HTTP. + + Args: + request (~.compute.ListBackendBucketsRequest): + The request object. A request message for + BackendBuckets.List. See the method + description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.BackendBucketList: + Contains a list of BackendBucket + resources. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/compute/v1/projects/{project}/global/backendBuckets', + }, + ] + request, metadata = self._interceptor.pre_list(request, metadata) + pb_request = compute.ListBackendBucketsRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.BackendBucketList() + pb_resp = compute.BackendBucketList.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_list(resp) + return resp + + class _Patch(BackendBucketsRestStub): + def __hash__(self): + return hash("Patch") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.PatchBackendBucketRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.Operation: + r"""Call the patch method over HTTP. + + Args: + request (~.compute.PatchBackendBucketRequest): + The request object. A request message for + BackendBuckets.Patch. See the method + description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.Operation: + Represents an Operation resource. Google Compute Engine + has three Operation resources: \* + `Global `__ + \* + `Regional `__ + \* + `Zonal `__ + You can use an operation resource to manage asynchronous + API requests. For more information, read Handling API + responses. Operations can be global, regional or zonal. + - For global operations, use the ``globalOperations`` + resource. - For regional operations, use the + ``regionOperations`` resource. - For zonal operations, + use the ``zonalOperations`` resource. For more + information, read Global, Regional, and Zonal Resources. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'patch', + 'uri': '/compute/v1/projects/{project}/global/backendBuckets/{backend_bucket}', + 'body': 'backend_bucket_resource', + }, + ] + request, metadata = self._interceptor.pre_patch(request, metadata) + pb_request = compute.PatchBackendBucketRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + including_default_value_fields=False, + use_integers_for_enums=False + ) + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.Operation() + pb_resp = compute.Operation.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_patch(resp) + return resp + + class _SetEdgeSecurityPolicy(BackendBucketsRestStub): + def __hash__(self): + return hash("SetEdgeSecurityPolicy") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.SetEdgeSecurityPolicyBackendBucketRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.Operation: + r"""Call the set edge security policy method over HTTP. + + Args: + request (~.compute.SetEdgeSecurityPolicyBackendBucketRequest): + The request object. A request message for + BackendBuckets.SetEdgeSecurityPolicy. + See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.Operation: + Represents an Operation resource. Google Compute Engine + has three Operation resources: \* + `Global `__ + \* + `Regional `__ + \* + `Zonal `__ + You can use an operation resource to manage asynchronous + API requests. For more information, read Handling API + responses. Operations can be global, regional or zonal. + - For global operations, use the ``globalOperations`` + resource. - For regional operations, use the + ``regionOperations`` resource. - For zonal operations, + use the ``zonalOperations`` resource. For more + information, read Global, Regional, and Zonal Resources. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/compute/v1/projects/{project}/global/backendBuckets/{backend_bucket}/setEdgeSecurityPolicy', + 'body': 'security_policy_reference_resource', + }, + ] + request, metadata = self._interceptor.pre_set_edge_security_policy(request, metadata) + pb_request = compute.SetEdgeSecurityPolicyBackendBucketRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + including_default_value_fields=False, + use_integers_for_enums=False + ) + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.Operation() + pb_resp = compute.Operation.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_set_edge_security_policy(resp) + return resp + + class _Update(BackendBucketsRestStub): + def __hash__(self): + return hash("Update") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.UpdateBackendBucketRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.Operation: + r"""Call the update method over HTTP. + + Args: + request (~.compute.UpdateBackendBucketRequest): + The request object. A request message for + BackendBuckets.Update. See the method + description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.Operation: + Represents an Operation resource. Google Compute Engine + has three Operation resources: \* + `Global `__ + \* + `Regional `__ + \* + `Zonal `__ + You can use an operation resource to manage asynchronous + API requests. For more information, read Handling API + responses. Operations can be global, regional or zonal. + - For global operations, use the ``globalOperations`` + resource. - For regional operations, use the + ``regionOperations`` resource. - For zonal operations, + use the ``zonalOperations`` resource. For more + information, read Global, Regional, and Zonal Resources. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'put', + 'uri': '/compute/v1/projects/{project}/global/backendBuckets/{backend_bucket}', + 'body': 'backend_bucket_resource', + }, + ] + request, metadata = self._interceptor.pre_update(request, metadata) + pb_request = compute.UpdateBackendBucketRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + including_default_value_fields=False, + use_integers_for_enums=False + ) + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.Operation() + pb_resp = compute.Operation.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_update(resp) + return resp + + @property + def add_signed_url_key(self) -> Callable[ + [compute.AddSignedUrlKeyBackendBucketRequest], + compute.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._AddSignedUrlKey(self._session, self._host, self._interceptor) # type: ignore + + @property + def delete(self) -> Callable[ + [compute.DeleteBackendBucketRequest], + compute.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._Delete(self._session, self._host, self._interceptor) # type: ignore + + @property + def delete_signed_url_key(self) -> Callable[ + [compute.DeleteSignedUrlKeyBackendBucketRequest], + compute.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._DeleteSignedUrlKey(self._session, self._host, self._interceptor) # type: ignore + + @property + def get(self) -> Callable[ + [compute.GetBackendBucketRequest], + compute.BackendBucket]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._Get(self._session, self._host, self._interceptor) # type: ignore + + @property + def insert(self) -> Callable[ + [compute.InsertBackendBucketRequest], + compute.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._Insert(self._session, self._host, self._interceptor) # type: ignore + + @property + def list(self) -> Callable[ + [compute.ListBackendBucketsRequest], + compute.BackendBucketList]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._List(self._session, self._host, self._interceptor) # type: ignore + + @property + def patch(self) -> Callable[ + [compute.PatchBackendBucketRequest], + compute.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._Patch(self._session, self._host, self._interceptor) # type: ignore + + @property + def set_edge_security_policy(self) -> Callable[ + [compute.SetEdgeSecurityPolicyBackendBucketRequest], + compute.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._SetEdgeSecurityPolicy(self._session, self._host, self._interceptor) # type: ignore + + @property + def update(self) -> Callable[ + [compute.UpdateBackendBucketRequest], + compute.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._Update(self._session, self._host, self._interceptor) # type: ignore + + @property + def kind(self) -> str: + return "rest" + + def close(self): + self._session.close() + + +__all__=( + 'BackendBucketsRestTransport', +) diff --git a/owl-bot-staging/v1/google/cloud/compute_v1/services/backend_services/__init__.py b/owl-bot-staging/v1/google/cloud/compute_v1/services/backend_services/__init__.py new file mode 100644 index 000000000..f30a85cbc --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/compute_v1/services/backend_services/__init__.py @@ -0,0 +1,20 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from .client import BackendServicesClient + +__all__ = ( + 'BackendServicesClient', +) diff --git a/owl-bot-staging/v1/google/cloud/compute_v1/services/backend_services/client.py b/owl-bot-staging/v1/google/cloud/compute_v1/services/backend_services/client.py new file mode 100644 index 000000000..e565ce53b --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/compute_v1/services/backend_services/client.py @@ -0,0 +1,3296 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import functools +import os +import re +from typing import Dict, Mapping, MutableMapping, MutableSequence, Optional, Sequence, Tuple, Type, Union, cast + +from google.cloud.compute_v1 import gapic_version as package_version + +from google.api_core import client_options as client_options_lib +from google.api_core import exceptions as core_exceptions +from google.api_core import extended_operation +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport import mtls # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth.exceptions import MutualTLSChannelError # type: ignore +from google.oauth2 import service_account # type: ignore + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + +from google.api_core import extended_operation # type: ignore +from google.cloud.compute_v1.services.backend_services import pagers +from google.cloud.compute_v1.types import compute +from .transports.base import BackendServicesTransport, DEFAULT_CLIENT_INFO +from .transports.rest import BackendServicesRestTransport + + +class BackendServicesClientMeta(type): + """Metaclass for the BackendServices client. + + This provides class-level methods for building and retrieving + support objects (e.g. transport) without polluting the client instance + objects. + """ + _transport_registry = OrderedDict() # type: Dict[str, Type[BackendServicesTransport]] + _transport_registry["rest"] = BackendServicesRestTransport + + def get_transport_class(cls, + label: Optional[str] = None, + ) -> Type[BackendServicesTransport]: + """Returns an appropriate transport class. + + Args: + label: The name of the desired transport. If none is + provided, then the first transport in the registry is used. + + Returns: + The transport class to use. + """ + # If a specific transport is requested, return that one. + if label: + return cls._transport_registry[label] + + # No transport is requested; return the default (that is, the first one + # in the dictionary). + return next(iter(cls._transport_registry.values())) + + +class BackendServicesClient(metaclass=BackendServicesClientMeta): + """The BackendServices API.""" + + @staticmethod + def _get_default_mtls_endpoint(api_endpoint): + """Converts api endpoint to mTLS endpoint. + + Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to + "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. + Args: + api_endpoint (Optional[str]): the api endpoint to convert. + Returns: + str: converted mTLS api endpoint. + """ + if not api_endpoint: + return api_endpoint + + mtls_endpoint_re = re.compile( + r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?" + ) + + m = mtls_endpoint_re.match(api_endpoint) + name, mtls, sandbox, googledomain = m.groups() + if mtls or not googledomain: + return api_endpoint + + if sandbox: + return api_endpoint.replace( + "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" + ) + + return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") + + DEFAULT_ENDPOINT = "compute.googleapis.com" + DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore + DEFAULT_ENDPOINT + ) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + BackendServicesClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_info(info) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + BackendServicesClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_file( + filename) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + from_service_account_json = from_service_account_file + + @property + def transport(self) -> BackendServicesTransport: + """Returns the transport used by the client instance. + + Returns: + BackendServicesTransport: The transport used by the client + instance. + """ + return self._transport + + @staticmethod + def common_billing_account_path(billing_account: str, ) -> str: + """Returns a fully-qualified billing_account string.""" + return "billingAccounts/{billing_account}".format(billing_account=billing_account, ) + + @staticmethod + def parse_common_billing_account_path(path: str) -> Dict[str,str]: + """Parse a billing_account path into its component segments.""" + m = re.match(r"^billingAccounts/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_folder_path(folder: str, ) -> str: + """Returns a fully-qualified folder string.""" + return "folders/{folder}".format(folder=folder, ) + + @staticmethod + def parse_common_folder_path(path: str) -> Dict[str,str]: + """Parse a folder path into its component segments.""" + m = re.match(r"^folders/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_organization_path(organization: str, ) -> str: + """Returns a fully-qualified organization string.""" + return "organizations/{organization}".format(organization=organization, ) + + @staticmethod + def parse_common_organization_path(path: str) -> Dict[str,str]: + """Parse a organization path into its component segments.""" + m = re.match(r"^organizations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_project_path(project: str, ) -> str: + """Returns a fully-qualified project string.""" + return "projects/{project}".format(project=project, ) + + @staticmethod + def parse_common_project_path(path: str) -> Dict[str,str]: + """Parse a project path into its component segments.""" + m = re.match(r"^projects/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_location_path(project: str, location: str, ) -> str: + """Returns a fully-qualified location string.""" + return "projects/{project}/locations/{location}".format(project=project, location=location, ) + + @staticmethod + def parse_common_location_path(path: str) -> Dict[str,str]: + """Parse a location path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @classmethod + def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[client_options_lib.ClientOptions] = None): + """Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + if client_options is None: + client_options = client_options_lib.ClientOptions() + use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") + use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") + if use_client_cert not in ("true", "false"): + raise ValueError("Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`") + if use_mtls_endpoint not in ("auto", "never", "always"): + raise MutualTLSChannelError("Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`") + + # Figure out the client cert source to use. + client_cert_source = None + if use_client_cert == "true": + if client_options.client_cert_source: + client_cert_source = client_options.client_cert_source + elif mtls.has_default_client_cert_source(): + client_cert_source = mtls.default_client_cert_source() + + # Figure out which api endpoint to use. + if client_options.api_endpoint is not None: + api_endpoint = client_options.api_endpoint + elif use_mtls_endpoint == "always" or (use_mtls_endpoint == "auto" and client_cert_source): + api_endpoint = cls.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = cls.DEFAULT_ENDPOINT + + return api_endpoint, client_cert_source + + def __init__(self, *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Optional[Union[str, BackendServicesTransport]] = None, + client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the backend services client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Union[str, BackendServicesTransport]): The + transport to use. If set to None, a transport is chosen + automatically. + NOTE: "rest" transport functionality is currently in a + beta state (preview). We welcome your feedback via an + issue in this library's source repository. + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): Custom options for the + client. It won't take effect if a ``transport`` instance is provided. + (1) The ``api_endpoint`` property can be used to override the + default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT + environment variable can also be used to override the endpoint: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto switch to the + default mTLS endpoint if client certificate is present, this is + the default value). However, the ``api_endpoint`` property takes + precedence if provided. + (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide client certificate for mutual TLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + """ + if isinstance(client_options, dict): + client_options = client_options_lib.from_dict(client_options) + if client_options is None: + client_options = client_options_lib.ClientOptions() + client_options = cast(client_options_lib.ClientOptions, client_options) + + api_endpoint, client_cert_source_func = self.get_mtls_endpoint_and_cert_source(client_options) + + api_key_value = getattr(client_options, "api_key", None) + if api_key_value and credentials: + raise ValueError("client_options.api_key and credentials are mutually exclusive") + + # Save or instantiate the transport. + # Ordinarily, we provide the transport, but allowing a custom transport + # instance provides an extensibility point for unusual situations. + if isinstance(transport, BackendServicesTransport): + # transport is a BackendServicesTransport instance. + if credentials or client_options.credentials_file or api_key_value: + raise ValueError("When providing a transport instance, " + "provide its credentials directly.") + if client_options.scopes: + raise ValueError( + "When providing a transport instance, provide its scopes " + "directly." + ) + self._transport = transport + else: + import google.auth._default # type: ignore + + if api_key_value and hasattr(google.auth._default, "get_api_key_credentials"): + credentials = google.auth._default.get_api_key_credentials(api_key_value) + + Transport = type(self).get_transport_class(transport) + self._transport = Transport( + credentials=credentials, + credentials_file=client_options.credentials_file, + host=api_endpoint, + scopes=client_options.scopes, + client_cert_source_for_mtls=client_cert_source_func, + quota_project_id=client_options.quota_project_id, + client_info=client_info, + always_use_jwt_access=True, + api_audience=client_options.api_audience, + ) + + def add_signed_url_key_unary(self, + request: Optional[Union[compute.AddSignedUrlKeyBackendServiceRequest, dict]] = None, + *, + project: Optional[str] = None, + backend_service: Optional[str] = None, + signed_url_key_resource: Optional[compute.SignedUrlKey] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.Operation: + r"""Adds a key for validating requests with signed URLs + for this backend service. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_add_signed_url_key(): + # Create a client + client = compute_v1.BackendServicesClient() + + # Initialize request argument(s) + request = compute_v1.AddSignedUrlKeyBackendServiceRequest( + backend_service="backend_service_value", + project="project_value", + ) + + # Make the request + response = client.add_signed_url_key(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.AddSignedUrlKeyBackendServiceRequest, dict]): + The request object. A request message for + BackendServices.AddSignedUrlKey. See the + method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + backend_service (str): + Name of the BackendService resource + to which the Signed URL Key should be + added. The name should conform to + RFC1035. + + This corresponds to the ``backend_service`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + signed_url_key_resource (google.cloud.compute_v1.types.SignedUrlKey): + The body resource for this request + This corresponds to the ``signed_url_key_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, backend_service, signed_url_key_resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.AddSignedUrlKeyBackendServiceRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.AddSignedUrlKeyBackendServiceRequest): + request = compute.AddSignedUrlKeyBackendServiceRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if backend_service is not None: + request.backend_service = backend_service + if signed_url_key_resource is not None: + request.signed_url_key_resource = signed_url_key_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.add_signed_url_key] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("backend_service", request.backend_service), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def add_signed_url_key(self, + request: Optional[Union[compute.AddSignedUrlKeyBackendServiceRequest, dict]] = None, + *, + project: Optional[str] = None, + backend_service: Optional[str] = None, + signed_url_key_resource: Optional[compute.SignedUrlKey] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> extended_operation.ExtendedOperation: + r"""Adds a key for validating requests with signed URLs + for this backend service. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_add_signed_url_key(): + # Create a client + client = compute_v1.BackendServicesClient() + + # Initialize request argument(s) + request = compute_v1.AddSignedUrlKeyBackendServiceRequest( + backend_service="backend_service_value", + project="project_value", + ) + + # Make the request + response = client.add_signed_url_key(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.AddSignedUrlKeyBackendServiceRequest, dict]): + The request object. A request message for + BackendServices.AddSignedUrlKey. See the + method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + backend_service (str): + Name of the BackendService resource + to which the Signed URL Key should be + added. The name should conform to + RFC1035. + + This corresponds to the ``backend_service`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + signed_url_key_resource (google.cloud.compute_v1.types.SignedUrlKey): + The body resource for this request + This corresponds to the ``signed_url_key_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, backend_service, signed_url_key_resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.AddSignedUrlKeyBackendServiceRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.AddSignedUrlKeyBackendServiceRequest): + request = compute.AddSignedUrlKeyBackendServiceRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if backend_service is not None: + request.backend_service = backend_service + if signed_url_key_resource is not None: + request.signed_url_key_resource = signed_url_key_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.add_signed_url_key] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("backend_service", request.backend_service), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + operation_service = self._transport._global_operations_client + operation_request = compute.GetGlobalOperationRequest() + operation_request.project = request.project + operation_request.operation = response.name + + get_operation = functools.partial(operation_service.get, operation_request) + # Cancel is not part of extended operations yet. + cancel_operation = lambda: None + + # Note: this class is an implementation detail to provide a uniform + # set of names for certain fields in the extended operation proto message. + # See google.api_core.extended_operation.ExtendedOperation for details + # on these properties and the expected interface. + class _CustomOperation(extended_operation.ExtendedOperation): + @property + def error_message(self): + return self._extended_operation.http_error_message + + @property + def error_code(self): + return self._extended_operation.http_error_status_code + + response = _CustomOperation.make(get_operation, cancel_operation, response) + + # Done; return the response. + return response + + def aggregated_list(self, + request: Optional[Union[compute.AggregatedListBackendServicesRequest, dict]] = None, + *, + project: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.AggregatedListPager: + r"""Retrieves the list of all BackendService resources, + regional and global, available to the specified project. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_aggregated_list(): + # Create a client + client = compute_v1.BackendServicesClient() + + # Initialize request argument(s) + request = compute_v1.AggregatedListBackendServicesRequest( + project="project_value", + ) + + # Make the request + page_result = client.aggregated_list(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.AggregatedListBackendServicesRequest, dict]): + The request object. A request message for + BackendServices.AggregatedList. See the + method description for details. + project (str): + Name of the project scoping this + request. + + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.compute_v1.services.backend_services.pagers.AggregatedListPager: + Contains a list of + BackendServicesScopedList. + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.AggregatedListBackendServicesRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.AggregatedListBackendServicesRequest): + request = compute.AggregatedListBackendServicesRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.aggregated_list] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.AggregatedListPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + def delete_unary(self, + request: Optional[Union[compute.DeleteBackendServiceRequest, dict]] = None, + *, + project: Optional[str] = None, + backend_service: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.Operation: + r"""Deletes the specified BackendService resource. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_delete(): + # Create a client + client = compute_v1.BackendServicesClient() + + # Initialize request argument(s) + request = compute_v1.DeleteBackendServiceRequest( + backend_service="backend_service_value", + project="project_value", + ) + + # Make the request + response = client.delete(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.DeleteBackendServiceRequest, dict]): + The request object. A request message for + BackendServices.Delete. See the method + description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + backend_service (str): + Name of the BackendService resource + to delete. + + This corresponds to the ``backend_service`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, backend_service]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.DeleteBackendServiceRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.DeleteBackendServiceRequest): + request = compute.DeleteBackendServiceRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if backend_service is not None: + request.backend_service = backend_service + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("backend_service", request.backend_service), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def delete(self, + request: Optional[Union[compute.DeleteBackendServiceRequest, dict]] = None, + *, + project: Optional[str] = None, + backend_service: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> extended_operation.ExtendedOperation: + r"""Deletes the specified BackendService resource. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_delete(): + # Create a client + client = compute_v1.BackendServicesClient() + + # Initialize request argument(s) + request = compute_v1.DeleteBackendServiceRequest( + backend_service="backend_service_value", + project="project_value", + ) + + # Make the request + response = client.delete(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.DeleteBackendServiceRequest, dict]): + The request object. A request message for + BackendServices.Delete. See the method + description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + backend_service (str): + Name of the BackendService resource + to delete. + + This corresponds to the ``backend_service`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, backend_service]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.DeleteBackendServiceRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.DeleteBackendServiceRequest): + request = compute.DeleteBackendServiceRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if backend_service is not None: + request.backend_service = backend_service + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("backend_service", request.backend_service), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + operation_service = self._transport._global_operations_client + operation_request = compute.GetGlobalOperationRequest() + operation_request.project = request.project + operation_request.operation = response.name + + get_operation = functools.partial(operation_service.get, operation_request) + # Cancel is not part of extended operations yet. + cancel_operation = lambda: None + + # Note: this class is an implementation detail to provide a uniform + # set of names for certain fields in the extended operation proto message. + # See google.api_core.extended_operation.ExtendedOperation for details + # on these properties and the expected interface. + class _CustomOperation(extended_operation.ExtendedOperation): + @property + def error_message(self): + return self._extended_operation.http_error_message + + @property + def error_code(self): + return self._extended_operation.http_error_status_code + + response = _CustomOperation.make(get_operation, cancel_operation, response) + + # Done; return the response. + return response + + def delete_signed_url_key_unary(self, + request: Optional[Union[compute.DeleteSignedUrlKeyBackendServiceRequest, dict]] = None, + *, + project: Optional[str] = None, + backend_service: Optional[str] = None, + key_name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.Operation: + r"""Deletes a key for validating requests with signed + URLs for this backend service. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_delete_signed_url_key(): + # Create a client + client = compute_v1.BackendServicesClient() + + # Initialize request argument(s) + request = compute_v1.DeleteSignedUrlKeyBackendServiceRequest( + backend_service="backend_service_value", + key_name="key_name_value", + project="project_value", + ) + + # Make the request + response = client.delete_signed_url_key(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.DeleteSignedUrlKeyBackendServiceRequest, dict]): + The request object. A request message for + BackendServices.DeleteSignedUrlKey. See + the method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + backend_service (str): + Name of the BackendService resource + to which the Signed URL Key should be + added. The name should conform to + RFC1035. + + This corresponds to the ``backend_service`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + key_name (str): + The name of the Signed URL Key to + delete. + + This corresponds to the ``key_name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, backend_service, key_name]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.DeleteSignedUrlKeyBackendServiceRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.DeleteSignedUrlKeyBackendServiceRequest): + request = compute.DeleteSignedUrlKeyBackendServiceRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if backend_service is not None: + request.backend_service = backend_service + if key_name is not None: + request.key_name = key_name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete_signed_url_key] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("backend_service", request.backend_service), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def delete_signed_url_key(self, + request: Optional[Union[compute.DeleteSignedUrlKeyBackendServiceRequest, dict]] = None, + *, + project: Optional[str] = None, + backend_service: Optional[str] = None, + key_name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> extended_operation.ExtendedOperation: + r"""Deletes a key for validating requests with signed + URLs for this backend service. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_delete_signed_url_key(): + # Create a client + client = compute_v1.BackendServicesClient() + + # Initialize request argument(s) + request = compute_v1.DeleteSignedUrlKeyBackendServiceRequest( + backend_service="backend_service_value", + key_name="key_name_value", + project="project_value", + ) + + # Make the request + response = client.delete_signed_url_key(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.DeleteSignedUrlKeyBackendServiceRequest, dict]): + The request object. A request message for + BackendServices.DeleteSignedUrlKey. See + the method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + backend_service (str): + Name of the BackendService resource + to which the Signed URL Key should be + added. The name should conform to + RFC1035. + + This corresponds to the ``backend_service`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + key_name (str): + The name of the Signed URL Key to + delete. + + This corresponds to the ``key_name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, backend_service, key_name]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.DeleteSignedUrlKeyBackendServiceRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.DeleteSignedUrlKeyBackendServiceRequest): + request = compute.DeleteSignedUrlKeyBackendServiceRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if backend_service is not None: + request.backend_service = backend_service + if key_name is not None: + request.key_name = key_name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete_signed_url_key] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("backend_service", request.backend_service), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + operation_service = self._transport._global_operations_client + operation_request = compute.GetGlobalOperationRequest() + operation_request.project = request.project + operation_request.operation = response.name + + get_operation = functools.partial(operation_service.get, operation_request) + # Cancel is not part of extended operations yet. + cancel_operation = lambda: None + + # Note: this class is an implementation detail to provide a uniform + # set of names for certain fields in the extended operation proto message. + # See google.api_core.extended_operation.ExtendedOperation for details + # on these properties and the expected interface. + class _CustomOperation(extended_operation.ExtendedOperation): + @property + def error_message(self): + return self._extended_operation.http_error_message + + @property + def error_code(self): + return self._extended_operation.http_error_status_code + + response = _CustomOperation.make(get_operation, cancel_operation, response) + + # Done; return the response. + return response + + def get(self, + request: Optional[Union[compute.GetBackendServiceRequest, dict]] = None, + *, + project: Optional[str] = None, + backend_service: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.BackendService: + r"""Returns the specified BackendService resource. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_get(): + # Create a client + client = compute_v1.BackendServicesClient() + + # Initialize request argument(s) + request = compute_v1.GetBackendServiceRequest( + backend_service="backend_service_value", + project="project_value", + ) + + # Make the request + response = client.get(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.GetBackendServiceRequest, dict]): + The request object. A request message for + BackendServices.Get. See the method + description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + backend_service (str): + Name of the BackendService resource + to return. + + This corresponds to the ``backend_service`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.compute_v1.types.BackendService: + Represents a Backend Service resource. A backend service + defines how Google Cloud load balancers distribute + traffic. The backend service configuration contains a + set of values, such as the protocol used to connect to + backends, various distribution and session settings, + health checks, and timeouts. These settings provide + fine-grained control over how your load balancer + behaves. Most of the settings have default values that + allow for easy configuration if you need to get started + quickly. Backend services in Google Compute Engine can + be either regionally or globally scoped. \* + [Global](https://cloud.google.com/compute/docs/reference/rest/v1/backendServices) + \* + [Regional](https://cloud.google.com/compute/docs/reference/rest/v1/regionBackendServices) + For more information, see Backend Services. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, backend_service]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.GetBackendServiceRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.GetBackendServiceRequest): + request = compute.GetBackendServiceRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if backend_service is not None: + request.backend_service = backend_service + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("backend_service", request.backend_service), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get_health(self, + request: Optional[Union[compute.GetHealthBackendServiceRequest, dict]] = None, + *, + project: Optional[str] = None, + backend_service: Optional[str] = None, + resource_group_reference_resource: Optional[compute.ResourceGroupReference] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.BackendServiceGroupHealth: + r"""Gets the most recent health check results for this + BackendService. Example request body: { "group": + "/zones/us-east1-b/instanceGroups/lb-backend-example" } + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_get_health(): + # Create a client + client = compute_v1.BackendServicesClient() + + # Initialize request argument(s) + request = compute_v1.GetHealthBackendServiceRequest( + backend_service="backend_service_value", + project="project_value", + ) + + # Make the request + response = client.get_health(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.GetHealthBackendServiceRequest, dict]): + The request object. A request message for + BackendServices.GetHealth. See the + method description for details. + project (str): + + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + backend_service (str): + Name of the BackendService resource + to which the queried instance belongs. + + This corresponds to the ``backend_service`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + resource_group_reference_resource (google.cloud.compute_v1.types.ResourceGroupReference): + The body resource for this request + This corresponds to the ``resource_group_reference_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.compute_v1.types.BackendServiceGroupHealth: + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, backend_service, resource_group_reference_resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.GetHealthBackendServiceRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.GetHealthBackendServiceRequest): + request = compute.GetHealthBackendServiceRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if backend_service is not None: + request.backend_service = backend_service + if resource_group_reference_resource is not None: + request.resource_group_reference_resource = resource_group_reference_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_health] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("backend_service", request.backend_service), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get_iam_policy(self, + request: Optional[Union[compute.GetIamPolicyBackendServiceRequest, dict]] = None, + *, + project: Optional[str] = None, + resource: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.Policy: + r"""Gets the access control policy for a resource. May be + empty if no such policy or resource exists. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_get_iam_policy(): + # Create a client + client = compute_v1.BackendServicesClient() + + # Initialize request argument(s) + request = compute_v1.GetIamPolicyBackendServiceRequest( + project="project_value", + resource="resource_value", + ) + + # Make the request + response = client.get_iam_policy(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.GetIamPolicyBackendServiceRequest, dict]): + The request object. A request message for + BackendServices.GetIamPolicy. See the + method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + resource (str): + Name or id of the resource for this + request. + + This corresponds to the ``resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.compute_v1.types.Policy: + An Identity and Access Management (IAM) policy, which + specifies access controls for Google Cloud resources. A + Policy is a collection of bindings. A binding binds one + or more members, or principals, to a single role. + Principals can be user accounts, service accounts, + Google groups, and domains (such as G Suite). A role is + a named list of permissions; each role can be an IAM + predefined role or a user-created custom role. For some + types of Google Cloud resources, a binding can also + specify a condition, which is a logical expression that + allows access to a resource only if the expression + evaluates to true. A condition can add constraints based + on attributes of the request, the resource, or both. To + learn which resources support conditions in their IAM + policies, see the [IAM + documentation](\ https://cloud.google.com/iam/help/conditions/resource-policies). + **JSON example:** { "bindings": [ { "role": + "roles/resourcemanager.organizationAdmin", "members": [ + "user:mike@example.com", "group:admins@example.com", + "domain:google.com", + "serviceAccount:my-project-id@appspot.gserviceaccount.com" + ] }, { "role": + "roles/resourcemanager.organizationViewer", "members": [ + "user:eve@example.com" ], "condition": { "title": + "expirable access", "description": "Does not grant + access after Sep 2020", "expression": "request.time < + timestamp('2020-10-01T00:00:00.000Z')", } } ], "etag": + "BwWWja0YfJA=", "version": 3 } **YAML example:** + bindings: - members: - user:\ mike@example.com - + group:\ admins@example.com - domain:google.com - + serviceAccount:\ my-project-id@appspot.gserviceaccount.com + role: roles/resourcemanager.organizationAdmin - members: + - user:\ eve@example.com role: + roles/resourcemanager.organizationViewer condition: + title: expirable access description: Does not grant + access after Sep 2020 expression: request.time < + timestamp('2020-10-01T00:00:00.000Z') etag: BwWWja0YfJA= + version: 3 For a description of IAM and its features, + see the [IAM + documentation](\ https://cloud.google.com/iam/docs/). + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.GetIamPolicyBackendServiceRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.GetIamPolicyBackendServiceRequest): + request = compute.GetIamPolicyBackendServiceRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if resource is not None: + request.resource = resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_iam_policy] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("resource", request.resource), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def insert_unary(self, + request: Optional[Union[compute.InsertBackendServiceRequest, dict]] = None, + *, + project: Optional[str] = None, + backend_service_resource: Optional[compute.BackendService] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.Operation: + r"""Creates a BackendService resource in the specified + project using the data included in the request. For more + information, see Backend services overview . + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_insert(): + # Create a client + client = compute_v1.BackendServicesClient() + + # Initialize request argument(s) + request = compute_v1.InsertBackendServiceRequest( + project="project_value", + ) + + # Make the request + response = client.insert(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.InsertBackendServiceRequest, dict]): + The request object. A request message for + BackendServices.Insert. See the method + description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + backend_service_resource (google.cloud.compute_v1.types.BackendService): + The body resource for this request + This corresponds to the ``backend_service_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, backend_service_resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.InsertBackendServiceRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.InsertBackendServiceRequest): + request = compute.InsertBackendServiceRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if backend_service_resource is not None: + request.backend_service_resource = backend_service_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.insert] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def insert(self, + request: Optional[Union[compute.InsertBackendServiceRequest, dict]] = None, + *, + project: Optional[str] = None, + backend_service_resource: Optional[compute.BackendService] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> extended_operation.ExtendedOperation: + r"""Creates a BackendService resource in the specified + project using the data included in the request. For more + information, see Backend services overview . + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_insert(): + # Create a client + client = compute_v1.BackendServicesClient() + + # Initialize request argument(s) + request = compute_v1.InsertBackendServiceRequest( + project="project_value", + ) + + # Make the request + response = client.insert(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.InsertBackendServiceRequest, dict]): + The request object. A request message for + BackendServices.Insert. See the method + description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + backend_service_resource (google.cloud.compute_v1.types.BackendService): + The body resource for this request + This corresponds to the ``backend_service_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, backend_service_resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.InsertBackendServiceRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.InsertBackendServiceRequest): + request = compute.InsertBackendServiceRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if backend_service_resource is not None: + request.backend_service_resource = backend_service_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.insert] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + operation_service = self._transport._global_operations_client + operation_request = compute.GetGlobalOperationRequest() + operation_request.project = request.project + operation_request.operation = response.name + + get_operation = functools.partial(operation_service.get, operation_request) + # Cancel is not part of extended operations yet. + cancel_operation = lambda: None + + # Note: this class is an implementation detail to provide a uniform + # set of names for certain fields in the extended operation proto message. + # See google.api_core.extended_operation.ExtendedOperation for details + # on these properties and the expected interface. + class _CustomOperation(extended_operation.ExtendedOperation): + @property + def error_message(self): + return self._extended_operation.http_error_message + + @property + def error_code(self): + return self._extended_operation.http_error_status_code + + response = _CustomOperation.make(get_operation, cancel_operation, response) + + # Done; return the response. + return response + + def list(self, + request: Optional[Union[compute.ListBackendServicesRequest, dict]] = None, + *, + project: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListPager: + r"""Retrieves the list of BackendService resources + available to the specified project. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_list(): + # Create a client + client = compute_v1.BackendServicesClient() + + # Initialize request argument(s) + request = compute_v1.ListBackendServicesRequest( + project="project_value", + ) + + # Make the request + page_result = client.list(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.ListBackendServicesRequest, dict]): + The request object. A request message for + BackendServices.List. See the method + description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.compute_v1.services.backend_services.pagers.ListPager: + Contains a list of BackendService + resources. + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.ListBackendServicesRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.ListBackendServicesRequest): + request = compute.ListBackendServicesRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + def patch_unary(self, + request: Optional[Union[compute.PatchBackendServiceRequest, dict]] = None, + *, + project: Optional[str] = None, + backend_service: Optional[str] = None, + backend_service_resource: Optional[compute.BackendService] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.Operation: + r"""Patches the specified BackendService resource with + the data included in the request. For more information, + see Backend services overview. This method supports + PATCH semantics and uses the JSON merge patch format and + processing rules. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_patch(): + # Create a client + client = compute_v1.BackendServicesClient() + + # Initialize request argument(s) + request = compute_v1.PatchBackendServiceRequest( + backend_service="backend_service_value", + project="project_value", + ) + + # Make the request + response = client.patch(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.PatchBackendServiceRequest, dict]): + The request object. A request message for + BackendServices.Patch. See the method + description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + backend_service (str): + Name of the BackendService resource + to patch. + + This corresponds to the ``backend_service`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + backend_service_resource (google.cloud.compute_v1.types.BackendService): + The body resource for this request + This corresponds to the ``backend_service_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, backend_service, backend_service_resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.PatchBackendServiceRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.PatchBackendServiceRequest): + request = compute.PatchBackendServiceRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if backend_service is not None: + request.backend_service = backend_service + if backend_service_resource is not None: + request.backend_service_resource = backend_service_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.patch] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("backend_service", request.backend_service), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def patch(self, + request: Optional[Union[compute.PatchBackendServiceRequest, dict]] = None, + *, + project: Optional[str] = None, + backend_service: Optional[str] = None, + backend_service_resource: Optional[compute.BackendService] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> extended_operation.ExtendedOperation: + r"""Patches the specified BackendService resource with + the data included in the request. For more information, + see Backend services overview. This method supports + PATCH semantics and uses the JSON merge patch format and + processing rules. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_patch(): + # Create a client + client = compute_v1.BackendServicesClient() + + # Initialize request argument(s) + request = compute_v1.PatchBackendServiceRequest( + backend_service="backend_service_value", + project="project_value", + ) + + # Make the request + response = client.patch(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.PatchBackendServiceRequest, dict]): + The request object. A request message for + BackendServices.Patch. See the method + description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + backend_service (str): + Name of the BackendService resource + to patch. + + This corresponds to the ``backend_service`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + backend_service_resource (google.cloud.compute_v1.types.BackendService): + The body resource for this request + This corresponds to the ``backend_service_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, backend_service, backend_service_resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.PatchBackendServiceRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.PatchBackendServiceRequest): + request = compute.PatchBackendServiceRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if backend_service is not None: + request.backend_service = backend_service + if backend_service_resource is not None: + request.backend_service_resource = backend_service_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.patch] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("backend_service", request.backend_service), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + operation_service = self._transport._global_operations_client + operation_request = compute.GetGlobalOperationRequest() + operation_request.project = request.project + operation_request.operation = response.name + + get_operation = functools.partial(operation_service.get, operation_request) + # Cancel is not part of extended operations yet. + cancel_operation = lambda: None + + # Note: this class is an implementation detail to provide a uniform + # set of names for certain fields in the extended operation proto message. + # See google.api_core.extended_operation.ExtendedOperation for details + # on these properties and the expected interface. + class _CustomOperation(extended_operation.ExtendedOperation): + @property + def error_message(self): + return self._extended_operation.http_error_message + + @property + def error_code(self): + return self._extended_operation.http_error_status_code + + response = _CustomOperation.make(get_operation, cancel_operation, response) + + # Done; return the response. + return response + + def set_edge_security_policy_unary(self, + request: Optional[Union[compute.SetEdgeSecurityPolicyBackendServiceRequest, dict]] = None, + *, + project: Optional[str] = None, + backend_service: Optional[str] = None, + security_policy_reference_resource: Optional[compute.SecurityPolicyReference] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.Operation: + r"""Sets the edge security policy for the specified + backend service. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_set_edge_security_policy(): + # Create a client + client = compute_v1.BackendServicesClient() + + # Initialize request argument(s) + request = compute_v1.SetEdgeSecurityPolicyBackendServiceRequest( + backend_service="backend_service_value", + project="project_value", + ) + + # Make the request + response = client.set_edge_security_policy(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.SetEdgeSecurityPolicyBackendServiceRequest, dict]): + The request object. A request message for + BackendServices.SetEdgeSecurityPolicy. + See the method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + backend_service (str): + Name of the BackendService resource + to which the edge security policy should + be set. The name should conform to + RFC1035. + + This corresponds to the ``backend_service`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + security_policy_reference_resource (google.cloud.compute_v1.types.SecurityPolicyReference): + The body resource for this request + This corresponds to the ``security_policy_reference_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, backend_service, security_policy_reference_resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.SetEdgeSecurityPolicyBackendServiceRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.SetEdgeSecurityPolicyBackendServiceRequest): + request = compute.SetEdgeSecurityPolicyBackendServiceRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if backend_service is not None: + request.backend_service = backend_service + if security_policy_reference_resource is not None: + request.security_policy_reference_resource = security_policy_reference_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.set_edge_security_policy] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("backend_service", request.backend_service), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def set_edge_security_policy(self, + request: Optional[Union[compute.SetEdgeSecurityPolicyBackendServiceRequest, dict]] = None, + *, + project: Optional[str] = None, + backend_service: Optional[str] = None, + security_policy_reference_resource: Optional[compute.SecurityPolicyReference] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> extended_operation.ExtendedOperation: + r"""Sets the edge security policy for the specified + backend service. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_set_edge_security_policy(): + # Create a client + client = compute_v1.BackendServicesClient() + + # Initialize request argument(s) + request = compute_v1.SetEdgeSecurityPolicyBackendServiceRequest( + backend_service="backend_service_value", + project="project_value", + ) + + # Make the request + response = client.set_edge_security_policy(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.SetEdgeSecurityPolicyBackendServiceRequest, dict]): + The request object. A request message for + BackendServices.SetEdgeSecurityPolicy. + See the method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + backend_service (str): + Name of the BackendService resource + to which the edge security policy should + be set. The name should conform to + RFC1035. + + This corresponds to the ``backend_service`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + security_policy_reference_resource (google.cloud.compute_v1.types.SecurityPolicyReference): + The body resource for this request + This corresponds to the ``security_policy_reference_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, backend_service, security_policy_reference_resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.SetEdgeSecurityPolicyBackendServiceRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.SetEdgeSecurityPolicyBackendServiceRequest): + request = compute.SetEdgeSecurityPolicyBackendServiceRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if backend_service is not None: + request.backend_service = backend_service + if security_policy_reference_resource is not None: + request.security_policy_reference_resource = security_policy_reference_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.set_edge_security_policy] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("backend_service", request.backend_service), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + operation_service = self._transport._global_operations_client + operation_request = compute.GetGlobalOperationRequest() + operation_request.project = request.project + operation_request.operation = response.name + + get_operation = functools.partial(operation_service.get, operation_request) + # Cancel is not part of extended operations yet. + cancel_operation = lambda: None + + # Note: this class is an implementation detail to provide a uniform + # set of names for certain fields in the extended operation proto message. + # See google.api_core.extended_operation.ExtendedOperation for details + # on these properties and the expected interface. + class _CustomOperation(extended_operation.ExtendedOperation): + @property + def error_message(self): + return self._extended_operation.http_error_message + + @property + def error_code(self): + return self._extended_operation.http_error_status_code + + response = _CustomOperation.make(get_operation, cancel_operation, response) + + # Done; return the response. + return response + + def set_iam_policy(self, + request: Optional[Union[compute.SetIamPolicyBackendServiceRequest, dict]] = None, + *, + project: Optional[str] = None, + resource: Optional[str] = None, + global_set_policy_request_resource: Optional[compute.GlobalSetPolicyRequest] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.Policy: + r"""Sets the access control policy on the specified + resource. Replaces any existing policy. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_set_iam_policy(): + # Create a client + client = compute_v1.BackendServicesClient() + + # Initialize request argument(s) + request = compute_v1.SetIamPolicyBackendServiceRequest( + project="project_value", + resource="resource_value", + ) + + # Make the request + response = client.set_iam_policy(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.SetIamPolicyBackendServiceRequest, dict]): + The request object. A request message for + BackendServices.SetIamPolicy. See the + method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + resource (str): + Name or id of the resource for this + request. + + This corresponds to the ``resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + global_set_policy_request_resource (google.cloud.compute_v1.types.GlobalSetPolicyRequest): + The body resource for this request + This corresponds to the ``global_set_policy_request_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.compute_v1.types.Policy: + An Identity and Access Management (IAM) policy, which + specifies access controls for Google Cloud resources. A + Policy is a collection of bindings. A binding binds one + or more members, or principals, to a single role. + Principals can be user accounts, service accounts, + Google groups, and domains (such as G Suite). A role is + a named list of permissions; each role can be an IAM + predefined role or a user-created custom role. For some + types of Google Cloud resources, a binding can also + specify a condition, which is a logical expression that + allows access to a resource only if the expression + evaluates to true. A condition can add constraints based + on attributes of the request, the resource, or both. To + learn which resources support conditions in their IAM + policies, see the [IAM + documentation](\ https://cloud.google.com/iam/help/conditions/resource-policies). + **JSON example:** { "bindings": [ { "role": + "roles/resourcemanager.organizationAdmin", "members": [ + "user:mike@example.com", "group:admins@example.com", + "domain:google.com", + "serviceAccount:my-project-id@appspot.gserviceaccount.com" + ] }, { "role": + "roles/resourcemanager.organizationViewer", "members": [ + "user:eve@example.com" ], "condition": { "title": + "expirable access", "description": "Does not grant + access after Sep 2020", "expression": "request.time < + timestamp('2020-10-01T00:00:00.000Z')", } } ], "etag": + "BwWWja0YfJA=", "version": 3 } **YAML example:** + bindings: - members: - user:\ mike@example.com - + group:\ admins@example.com - domain:google.com - + serviceAccount:\ my-project-id@appspot.gserviceaccount.com + role: roles/resourcemanager.organizationAdmin - members: + - user:\ eve@example.com role: + roles/resourcemanager.organizationViewer condition: + title: expirable access description: Does not grant + access after Sep 2020 expression: request.time < + timestamp('2020-10-01T00:00:00.000Z') etag: BwWWja0YfJA= + version: 3 For a description of IAM and its features, + see the [IAM + documentation](\ https://cloud.google.com/iam/docs/). + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, resource, global_set_policy_request_resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.SetIamPolicyBackendServiceRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.SetIamPolicyBackendServiceRequest): + request = compute.SetIamPolicyBackendServiceRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if resource is not None: + request.resource = resource + if global_set_policy_request_resource is not None: + request.global_set_policy_request_resource = global_set_policy_request_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.set_iam_policy] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("resource", request.resource), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def set_security_policy_unary(self, + request: Optional[Union[compute.SetSecurityPolicyBackendServiceRequest, dict]] = None, + *, + project: Optional[str] = None, + backend_service: Optional[str] = None, + security_policy_reference_resource: Optional[compute.SecurityPolicyReference] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.Operation: + r"""Sets the Google Cloud Armor security policy for the + specified backend service. For more information, see + Google Cloud Armor Overview + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_set_security_policy(): + # Create a client + client = compute_v1.BackendServicesClient() + + # Initialize request argument(s) + request = compute_v1.SetSecurityPolicyBackendServiceRequest( + backend_service="backend_service_value", + project="project_value", + ) + + # Make the request + response = client.set_security_policy(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.SetSecurityPolicyBackendServiceRequest, dict]): + The request object. A request message for + BackendServices.SetSecurityPolicy. See + the method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + backend_service (str): + Name of the BackendService resource + to which the security policy should be + set. The name should conform to RFC1035. + + This corresponds to the ``backend_service`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + security_policy_reference_resource (google.cloud.compute_v1.types.SecurityPolicyReference): + The body resource for this request + This corresponds to the ``security_policy_reference_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, backend_service, security_policy_reference_resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.SetSecurityPolicyBackendServiceRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.SetSecurityPolicyBackendServiceRequest): + request = compute.SetSecurityPolicyBackendServiceRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if backend_service is not None: + request.backend_service = backend_service + if security_policy_reference_resource is not None: + request.security_policy_reference_resource = security_policy_reference_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.set_security_policy] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("backend_service", request.backend_service), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def set_security_policy(self, + request: Optional[Union[compute.SetSecurityPolicyBackendServiceRequest, dict]] = None, + *, + project: Optional[str] = None, + backend_service: Optional[str] = None, + security_policy_reference_resource: Optional[compute.SecurityPolicyReference] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> extended_operation.ExtendedOperation: + r"""Sets the Google Cloud Armor security policy for the + specified backend service. For more information, see + Google Cloud Armor Overview + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_set_security_policy(): + # Create a client + client = compute_v1.BackendServicesClient() + + # Initialize request argument(s) + request = compute_v1.SetSecurityPolicyBackendServiceRequest( + backend_service="backend_service_value", + project="project_value", + ) + + # Make the request + response = client.set_security_policy(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.SetSecurityPolicyBackendServiceRequest, dict]): + The request object. A request message for + BackendServices.SetSecurityPolicy. See + the method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + backend_service (str): + Name of the BackendService resource + to which the security policy should be + set. The name should conform to RFC1035. + + This corresponds to the ``backend_service`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + security_policy_reference_resource (google.cloud.compute_v1.types.SecurityPolicyReference): + The body resource for this request + This corresponds to the ``security_policy_reference_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, backend_service, security_policy_reference_resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.SetSecurityPolicyBackendServiceRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.SetSecurityPolicyBackendServiceRequest): + request = compute.SetSecurityPolicyBackendServiceRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if backend_service is not None: + request.backend_service = backend_service + if security_policy_reference_resource is not None: + request.security_policy_reference_resource = security_policy_reference_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.set_security_policy] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("backend_service", request.backend_service), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + operation_service = self._transport._global_operations_client + operation_request = compute.GetGlobalOperationRequest() + operation_request.project = request.project + operation_request.operation = response.name + + get_operation = functools.partial(operation_service.get, operation_request) + # Cancel is not part of extended operations yet. + cancel_operation = lambda: None + + # Note: this class is an implementation detail to provide a uniform + # set of names for certain fields in the extended operation proto message. + # See google.api_core.extended_operation.ExtendedOperation for details + # on these properties and the expected interface. + class _CustomOperation(extended_operation.ExtendedOperation): + @property + def error_message(self): + return self._extended_operation.http_error_message + + @property + def error_code(self): + return self._extended_operation.http_error_status_code + + response = _CustomOperation.make(get_operation, cancel_operation, response) + + # Done; return the response. + return response + + def update_unary(self, + request: Optional[Union[compute.UpdateBackendServiceRequest, dict]] = None, + *, + project: Optional[str] = None, + backend_service: Optional[str] = None, + backend_service_resource: Optional[compute.BackendService] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.Operation: + r"""Updates the specified BackendService resource with + the data included in the request. For more information, + see Backend services overview. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_update(): + # Create a client + client = compute_v1.BackendServicesClient() + + # Initialize request argument(s) + request = compute_v1.UpdateBackendServiceRequest( + backend_service="backend_service_value", + project="project_value", + ) + + # Make the request + response = client.update(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.UpdateBackendServiceRequest, dict]): + The request object. A request message for + BackendServices.Update. See the method + description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + backend_service (str): + Name of the BackendService resource + to update. + + This corresponds to the ``backend_service`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + backend_service_resource (google.cloud.compute_v1.types.BackendService): + The body resource for this request + This corresponds to the ``backend_service_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, backend_service, backend_service_resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.UpdateBackendServiceRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.UpdateBackendServiceRequest): + request = compute.UpdateBackendServiceRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if backend_service is not None: + request.backend_service = backend_service + if backend_service_resource is not None: + request.backend_service_resource = backend_service_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.update] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("backend_service", request.backend_service), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def update(self, + request: Optional[Union[compute.UpdateBackendServiceRequest, dict]] = None, + *, + project: Optional[str] = None, + backend_service: Optional[str] = None, + backend_service_resource: Optional[compute.BackendService] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> extended_operation.ExtendedOperation: + r"""Updates the specified BackendService resource with + the data included in the request. For more information, + see Backend services overview. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_update(): + # Create a client + client = compute_v1.BackendServicesClient() + + # Initialize request argument(s) + request = compute_v1.UpdateBackendServiceRequest( + backend_service="backend_service_value", + project="project_value", + ) + + # Make the request + response = client.update(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.UpdateBackendServiceRequest, dict]): + The request object. A request message for + BackendServices.Update. See the method + description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + backend_service (str): + Name of the BackendService resource + to update. + + This corresponds to the ``backend_service`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + backend_service_resource (google.cloud.compute_v1.types.BackendService): + The body resource for this request + This corresponds to the ``backend_service_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, backend_service, backend_service_resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.UpdateBackendServiceRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.UpdateBackendServiceRequest): + request = compute.UpdateBackendServiceRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if backend_service is not None: + request.backend_service = backend_service + if backend_service_resource is not None: + request.backend_service_resource = backend_service_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.update] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("backend_service", request.backend_service), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + operation_service = self._transport._global_operations_client + operation_request = compute.GetGlobalOperationRequest() + operation_request.project = request.project + operation_request.operation = response.name + + get_operation = functools.partial(operation_service.get, operation_request) + # Cancel is not part of extended operations yet. + cancel_operation = lambda: None + + # Note: this class is an implementation detail to provide a uniform + # set of names for certain fields in the extended operation proto message. + # See google.api_core.extended_operation.ExtendedOperation for details + # on these properties and the expected interface. + class _CustomOperation(extended_operation.ExtendedOperation): + @property + def error_message(self): + return self._extended_operation.http_error_message + + @property + def error_code(self): + return self._extended_operation.http_error_status_code + + response = _CustomOperation.make(get_operation, cancel_operation, response) + + # Done; return the response. + return response + + def __enter__(self) -> "BackendServicesClient": + return self + + def __exit__(self, type, value, traceback): + """Releases underlying transport's resources. + + .. warning:: + ONLY use as a context manager if the transport is NOT shared + with other clients! Exiting the with block will CLOSE the transport + and may cause errors in other clients! + """ + self.transport.close() + + + + + + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) + + +__all__ = ( + "BackendServicesClient", +) diff --git a/owl-bot-staging/v1/google/cloud/compute_v1/services/backend_services/pagers.py b/owl-bot-staging/v1/google/cloud/compute_v1/services/backend_services/pagers.py new file mode 100644 index 000000000..ad822c4d2 --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/compute_v1/services/backend_services/pagers.py @@ -0,0 +1,139 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import Any, AsyncIterator, Awaitable, Callable, Sequence, Tuple, Optional, Iterator + +from google.cloud.compute_v1.types import compute + + +class AggregatedListPager: + """A pager for iterating through ``aggregated_list`` requests. + + This class thinly wraps an initial + :class:`google.cloud.compute_v1.types.BackendServiceAggregatedList` object, and + provides an ``__iter__`` method to iterate through its + ``items`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``AggregatedList`` requests and continue to iterate + through the ``items`` field on the + corresponding responses. + + All the usual :class:`google.cloud.compute_v1.types.BackendServiceAggregatedList` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., compute.BackendServiceAggregatedList], + request: compute.AggregatedListBackendServicesRequest, + response: compute.BackendServiceAggregatedList, + *, + metadata: Sequence[Tuple[str, str]] = ()): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.compute_v1.types.AggregatedListBackendServicesRequest): + The initial request object. + response (google.cloud.compute_v1.types.BackendServiceAggregatedList): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = compute.AggregatedListBackendServicesRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[compute.BackendServiceAggregatedList]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[Tuple[str, compute.BackendServicesScopedList]]: + for page in self.pages: + yield from page.items.items() + + def get(self, key: str) -> Optional[compute.BackendServicesScopedList]: + return self._response.items.get(key) + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + + +class ListPager: + """A pager for iterating through ``list`` requests. + + This class thinly wraps an initial + :class:`google.cloud.compute_v1.types.BackendServiceList` object, and + provides an ``__iter__`` method to iterate through its + ``items`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``List`` requests and continue to iterate + through the ``items`` field on the + corresponding responses. + + All the usual :class:`google.cloud.compute_v1.types.BackendServiceList` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., compute.BackendServiceList], + request: compute.ListBackendServicesRequest, + response: compute.BackendServiceList, + *, + metadata: Sequence[Tuple[str, str]] = ()): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.compute_v1.types.ListBackendServicesRequest): + The initial request object. + response (google.cloud.compute_v1.types.BackendServiceList): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = compute.ListBackendServicesRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[compute.BackendServiceList]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[compute.BackendService]: + for page in self.pages: + yield from page.items + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) diff --git a/owl-bot-staging/v1/google/cloud/compute_v1/services/backend_services/transports/__init__.py b/owl-bot-staging/v1/google/cloud/compute_v1/services/backend_services/transports/__init__.py new file mode 100644 index 000000000..b121084fd --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/compute_v1/services/backend_services/transports/__init__.py @@ -0,0 +1,32 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +from typing import Dict, Type + +from .base import BackendServicesTransport +from .rest import BackendServicesRestTransport +from .rest import BackendServicesRestInterceptor + + +# Compile a registry of transports. +_transport_registry = OrderedDict() # type: Dict[str, Type[BackendServicesTransport]] +_transport_registry['rest'] = BackendServicesRestTransport + +__all__ = ( + 'BackendServicesTransport', + 'BackendServicesRestTransport', + 'BackendServicesRestInterceptor', +) diff --git a/owl-bot-staging/v1/google/cloud/compute_v1/services/backend_services/transports/base.py b/owl-bot-staging/v1/google/cloud/compute_v1/services/backend_services/transports/base.py new file mode 100644 index 000000000..f05c5aebe --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/compute_v1/services/backend_services/transports/base.py @@ -0,0 +1,345 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import abc +from typing import Awaitable, Callable, Dict, Optional, Sequence, Union + +from google.cloud.compute_v1 import gapic_version as package_version + +import google.auth # type: ignore +import google.api_core +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.compute_v1.types import compute +from google.cloud.compute_v1.services import global_operations + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) + + +class BackendServicesTransport(abc.ABC): + """Abstract transport class for BackendServices.""" + + AUTH_SCOPES = ( + 'https://www.googleapis.com/auth/compute', + 'https://www.googleapis.com/auth/cloud-platform', + ) + + DEFAULT_HOST: str = 'compute.googleapis.com' + def __init__( + self, *, + host: str = DEFAULT_HOST, + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + **kwargs, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A list of scopes. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + """ + self._extended_operations_services: Dict[str, Any] = {} + + scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} + + # Save the scopes. + self._scopes = scopes + + # If no credentials are provided, then determine the appropriate + # defaults. + if credentials and credentials_file: + raise core_exceptions.DuplicateCredentialArgs("'credentials_file' and 'credentials' are mutually exclusive") + + if credentials_file is not None: + credentials, _ = google.auth.load_credentials_from_file( + credentials_file, + **scopes_kwargs, + quota_project_id=quota_project_id + ) + elif credentials is None: + credentials, _ = google.auth.default(**scopes_kwargs, quota_project_id=quota_project_id) + # Don't apply audience if the credentials file passed from user. + if hasattr(credentials, "with_gdch_audience"): + credentials = credentials.with_gdch_audience(api_audience if api_audience else host) + + # If the credentials are service account credentials, then always try to use self signed JWT. + if always_use_jwt_access and isinstance(credentials, service_account.Credentials) and hasattr(service_account.Credentials, "with_always_use_jwt_access"): + credentials = credentials.with_always_use_jwt_access(True) + + # Save the credentials. + self._credentials = credentials + + # Save the hostname. Default to port 443 (HTTPS) if none is specified. + if ':' not in host: + host += ':443' + self._host = host + + def _prep_wrapped_messages(self, client_info): + # Precompute the wrapped methods. + self._wrapped_methods = { + self.add_signed_url_key: gapic_v1.method.wrap_method( + self.add_signed_url_key, + default_timeout=None, + client_info=client_info, + ), + self.aggregated_list: gapic_v1.method.wrap_method( + self.aggregated_list, + default_timeout=None, + client_info=client_info, + ), + self.delete: gapic_v1.method.wrap_method( + self.delete, + default_timeout=None, + client_info=client_info, + ), + self.delete_signed_url_key: gapic_v1.method.wrap_method( + self.delete_signed_url_key, + default_timeout=None, + client_info=client_info, + ), + self.get: gapic_v1.method.wrap_method( + self.get, + default_timeout=None, + client_info=client_info, + ), + self.get_health: gapic_v1.method.wrap_method( + self.get_health, + default_timeout=None, + client_info=client_info, + ), + self.get_iam_policy: gapic_v1.method.wrap_method( + self.get_iam_policy, + default_timeout=None, + client_info=client_info, + ), + self.insert: gapic_v1.method.wrap_method( + self.insert, + default_timeout=None, + client_info=client_info, + ), + self.list: gapic_v1.method.wrap_method( + self.list, + default_timeout=None, + client_info=client_info, + ), + self.patch: gapic_v1.method.wrap_method( + self.patch, + default_timeout=None, + client_info=client_info, + ), + self.set_edge_security_policy: gapic_v1.method.wrap_method( + self.set_edge_security_policy, + default_timeout=None, + client_info=client_info, + ), + self.set_iam_policy: gapic_v1.method.wrap_method( + self.set_iam_policy, + default_timeout=None, + client_info=client_info, + ), + self.set_security_policy: gapic_v1.method.wrap_method( + self.set_security_policy, + default_timeout=None, + client_info=client_info, + ), + self.update: gapic_v1.method.wrap_method( + self.update, + default_timeout=None, + client_info=client_info, + ), + } + + def close(self): + """Closes resources associated with the transport. + + .. warning:: + Only call this method if the transport is NOT shared + with other clients - this may cause errors in other clients! + """ + raise NotImplementedError() + + @property + def add_signed_url_key(self) -> Callable[ + [compute.AddSignedUrlKeyBackendServiceRequest], + Union[ + compute.Operation, + Awaitable[compute.Operation] + ]]: + raise NotImplementedError() + + @property + def aggregated_list(self) -> Callable[ + [compute.AggregatedListBackendServicesRequest], + Union[ + compute.BackendServiceAggregatedList, + Awaitable[compute.BackendServiceAggregatedList] + ]]: + raise NotImplementedError() + + @property + def delete(self) -> Callable[ + [compute.DeleteBackendServiceRequest], + Union[ + compute.Operation, + Awaitable[compute.Operation] + ]]: + raise NotImplementedError() + + @property + def delete_signed_url_key(self) -> Callable[ + [compute.DeleteSignedUrlKeyBackendServiceRequest], + Union[ + compute.Operation, + Awaitable[compute.Operation] + ]]: + raise NotImplementedError() + + @property + def get(self) -> Callable[ + [compute.GetBackendServiceRequest], + Union[ + compute.BackendService, + Awaitable[compute.BackendService] + ]]: + raise NotImplementedError() + + @property + def get_health(self) -> Callable[ + [compute.GetHealthBackendServiceRequest], + Union[ + compute.BackendServiceGroupHealth, + Awaitable[compute.BackendServiceGroupHealth] + ]]: + raise NotImplementedError() + + @property + def get_iam_policy(self) -> Callable[ + [compute.GetIamPolicyBackendServiceRequest], + Union[ + compute.Policy, + Awaitable[compute.Policy] + ]]: + raise NotImplementedError() + + @property + def insert(self) -> Callable[ + [compute.InsertBackendServiceRequest], + Union[ + compute.Operation, + Awaitable[compute.Operation] + ]]: + raise NotImplementedError() + + @property + def list(self) -> Callable[ + [compute.ListBackendServicesRequest], + Union[ + compute.BackendServiceList, + Awaitable[compute.BackendServiceList] + ]]: + raise NotImplementedError() + + @property + def patch(self) -> Callable[ + [compute.PatchBackendServiceRequest], + Union[ + compute.Operation, + Awaitable[compute.Operation] + ]]: + raise NotImplementedError() + + @property + def set_edge_security_policy(self) -> Callable[ + [compute.SetEdgeSecurityPolicyBackendServiceRequest], + Union[ + compute.Operation, + Awaitable[compute.Operation] + ]]: + raise NotImplementedError() + + @property + def set_iam_policy(self) -> Callable[ + [compute.SetIamPolicyBackendServiceRequest], + Union[ + compute.Policy, + Awaitable[compute.Policy] + ]]: + raise NotImplementedError() + + @property + def set_security_policy(self) -> Callable[ + [compute.SetSecurityPolicyBackendServiceRequest], + Union[ + compute.Operation, + Awaitable[compute.Operation] + ]]: + raise NotImplementedError() + + @property + def update(self) -> Callable[ + [compute.UpdateBackendServiceRequest], + Union[ + compute.Operation, + Awaitable[compute.Operation] + ]]: + raise NotImplementedError() + + @property + def kind(self) -> str: + raise NotImplementedError() + + @property + def _global_operations_client(self) -> global_operations.GlobalOperationsClient: + ex_op_service = self._extended_operations_services.get("global_operations") + if not ex_op_service: + ex_op_service = global_operations.GlobalOperationsClient( + credentials=self._credentials, + transport=self.kind, + ) + self._extended_operations_services["global_operations"] = ex_op_service + + return ex_op_service + + +__all__ = ( + 'BackendServicesTransport', +) diff --git a/owl-bot-staging/v1/google/cloud/compute_v1/services/backend_services/transports/rest.py b/owl-bot-staging/v1/google/cloud/compute_v1/services/backend_services/transports/rest.py new file mode 100644 index 000000000..3c3231705 --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/compute_v1/services/backend_services/transports/rest.py @@ -0,0 +1,2018 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +from google.auth.transport.requests import AuthorizedSession # type: ignore +import json # type: ignore +import grpc # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.api_core import exceptions as core_exceptions +from google.api_core import retry as retries +from google.api_core import rest_helpers +from google.api_core import rest_streaming +from google.api_core import path_template +from google.api_core import gapic_v1 + +from google.protobuf import json_format +from requests import __version__ as requests_version +import dataclasses +import re +from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union +import warnings + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + + +from google.cloud.compute_v1.types import compute + +from .base import BackendServicesTransport, DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, + grpc_version=None, + rest_version=requests_version, +) + + +class BackendServicesRestInterceptor: + """Interceptor for BackendServices. + + Interceptors are used to manipulate requests, request metadata, and responses + in arbitrary ways. + Example use cases include: + * Logging + * Verifying requests according to service or custom semantics + * Stripping extraneous information from responses + + These use cases and more can be enabled by injecting an + instance of a custom subclass when constructing the BackendServicesRestTransport. + + .. code-block:: python + class MyCustomBackendServicesInterceptor(BackendServicesRestInterceptor): + def pre_add_signed_url_key(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_add_signed_url_key(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_aggregated_list(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_aggregated_list(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_delete(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_delete(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_delete_signed_url_key(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_delete_signed_url_key(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_get(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_get_health(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_health(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_get_iam_policy(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_iam_policy(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_insert(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_insert(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_patch(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_patch(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_set_edge_security_policy(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_set_edge_security_policy(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_set_iam_policy(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_set_iam_policy(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_set_security_policy(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_set_security_policy(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_update(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_update(self, response): + logging.log(f"Received response: {response}") + return response + + transport = BackendServicesRestTransport(interceptor=MyCustomBackendServicesInterceptor()) + client = BackendServicesClient(transport=transport) + + + """ + def pre_add_signed_url_key(self, request: compute.AddSignedUrlKeyBackendServiceRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.AddSignedUrlKeyBackendServiceRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for add_signed_url_key + + Override in a subclass to manipulate the request or metadata + before they are sent to the BackendServices server. + """ + return request, metadata + + def post_add_signed_url_key(self, response: compute.Operation) -> compute.Operation: + """Post-rpc interceptor for add_signed_url_key + + Override in a subclass to manipulate the response + after it is returned by the BackendServices server but before + it is returned to user code. + """ + return response + def pre_aggregated_list(self, request: compute.AggregatedListBackendServicesRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.AggregatedListBackendServicesRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for aggregated_list + + Override in a subclass to manipulate the request or metadata + before they are sent to the BackendServices server. + """ + return request, metadata + + def post_aggregated_list(self, response: compute.BackendServiceAggregatedList) -> compute.BackendServiceAggregatedList: + """Post-rpc interceptor for aggregated_list + + Override in a subclass to manipulate the response + after it is returned by the BackendServices server but before + it is returned to user code. + """ + return response + def pre_delete(self, request: compute.DeleteBackendServiceRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.DeleteBackendServiceRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for delete + + Override in a subclass to manipulate the request or metadata + before they are sent to the BackendServices server. + """ + return request, metadata + + def post_delete(self, response: compute.Operation) -> compute.Operation: + """Post-rpc interceptor for delete + + Override in a subclass to manipulate the response + after it is returned by the BackendServices server but before + it is returned to user code. + """ + return response + def pre_delete_signed_url_key(self, request: compute.DeleteSignedUrlKeyBackendServiceRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.DeleteSignedUrlKeyBackendServiceRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for delete_signed_url_key + + Override in a subclass to manipulate the request or metadata + before they are sent to the BackendServices server. + """ + return request, metadata + + def post_delete_signed_url_key(self, response: compute.Operation) -> compute.Operation: + """Post-rpc interceptor for delete_signed_url_key + + Override in a subclass to manipulate the response + after it is returned by the BackendServices server but before + it is returned to user code. + """ + return response + def pre_get(self, request: compute.GetBackendServiceRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.GetBackendServiceRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get + + Override in a subclass to manipulate the request or metadata + before they are sent to the BackendServices server. + """ + return request, metadata + + def post_get(self, response: compute.BackendService) -> compute.BackendService: + """Post-rpc interceptor for get + + Override in a subclass to manipulate the response + after it is returned by the BackendServices server but before + it is returned to user code. + """ + return response + def pre_get_health(self, request: compute.GetHealthBackendServiceRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.GetHealthBackendServiceRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_health + + Override in a subclass to manipulate the request or metadata + before they are sent to the BackendServices server. + """ + return request, metadata + + def post_get_health(self, response: compute.BackendServiceGroupHealth) -> compute.BackendServiceGroupHealth: + """Post-rpc interceptor for get_health + + Override in a subclass to manipulate the response + after it is returned by the BackendServices server but before + it is returned to user code. + """ + return response + def pre_get_iam_policy(self, request: compute.GetIamPolicyBackendServiceRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.GetIamPolicyBackendServiceRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_iam_policy + + Override in a subclass to manipulate the request or metadata + before they are sent to the BackendServices server. + """ + return request, metadata + + def post_get_iam_policy(self, response: compute.Policy) -> compute.Policy: + """Post-rpc interceptor for get_iam_policy + + Override in a subclass to manipulate the response + after it is returned by the BackendServices server but before + it is returned to user code. + """ + return response + def pre_insert(self, request: compute.InsertBackendServiceRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.InsertBackendServiceRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for insert + + Override in a subclass to manipulate the request or metadata + before they are sent to the BackendServices server. + """ + return request, metadata + + def post_insert(self, response: compute.Operation) -> compute.Operation: + """Post-rpc interceptor for insert + + Override in a subclass to manipulate the response + after it is returned by the BackendServices server but before + it is returned to user code. + """ + return response + def pre_list(self, request: compute.ListBackendServicesRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.ListBackendServicesRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list + + Override in a subclass to manipulate the request or metadata + before they are sent to the BackendServices server. + """ + return request, metadata + + def post_list(self, response: compute.BackendServiceList) -> compute.BackendServiceList: + """Post-rpc interceptor for list + + Override in a subclass to manipulate the response + after it is returned by the BackendServices server but before + it is returned to user code. + """ + return response + def pre_patch(self, request: compute.PatchBackendServiceRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.PatchBackendServiceRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for patch + + Override in a subclass to manipulate the request or metadata + before they are sent to the BackendServices server. + """ + return request, metadata + + def post_patch(self, response: compute.Operation) -> compute.Operation: + """Post-rpc interceptor for patch + + Override in a subclass to manipulate the response + after it is returned by the BackendServices server but before + it is returned to user code. + """ + return response + def pre_set_edge_security_policy(self, request: compute.SetEdgeSecurityPolicyBackendServiceRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.SetEdgeSecurityPolicyBackendServiceRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for set_edge_security_policy + + Override in a subclass to manipulate the request or metadata + before they are sent to the BackendServices server. + """ + return request, metadata + + def post_set_edge_security_policy(self, response: compute.Operation) -> compute.Operation: + """Post-rpc interceptor for set_edge_security_policy + + Override in a subclass to manipulate the response + after it is returned by the BackendServices server but before + it is returned to user code. + """ + return response + def pre_set_iam_policy(self, request: compute.SetIamPolicyBackendServiceRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.SetIamPolicyBackendServiceRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for set_iam_policy + + Override in a subclass to manipulate the request or metadata + before they are sent to the BackendServices server. + """ + return request, metadata + + def post_set_iam_policy(self, response: compute.Policy) -> compute.Policy: + """Post-rpc interceptor for set_iam_policy + + Override in a subclass to manipulate the response + after it is returned by the BackendServices server but before + it is returned to user code. + """ + return response + def pre_set_security_policy(self, request: compute.SetSecurityPolicyBackendServiceRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.SetSecurityPolicyBackendServiceRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for set_security_policy + + Override in a subclass to manipulate the request or metadata + before they are sent to the BackendServices server. + """ + return request, metadata + + def post_set_security_policy(self, response: compute.Operation) -> compute.Operation: + """Post-rpc interceptor for set_security_policy + + Override in a subclass to manipulate the response + after it is returned by the BackendServices server but before + it is returned to user code. + """ + return response + def pre_update(self, request: compute.UpdateBackendServiceRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.UpdateBackendServiceRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for update + + Override in a subclass to manipulate the request or metadata + before they are sent to the BackendServices server. + """ + return request, metadata + + def post_update(self, response: compute.Operation) -> compute.Operation: + """Post-rpc interceptor for update + + Override in a subclass to manipulate the response + after it is returned by the BackendServices server but before + it is returned to user code. + """ + return response + + +@dataclasses.dataclass +class BackendServicesRestStub: + _session: AuthorizedSession + _host: str + _interceptor: BackendServicesRestInterceptor + + +class BackendServicesRestTransport(BackendServicesTransport): + """REST backend transport for BackendServices. + + The BackendServices API. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends JSON representations of protocol buffers over HTTP/1.1 + + NOTE: This REST transport functionality is currently in a beta + state (preview). We welcome your feedback via an issue in this + library's source repository. Thank you! + """ + + def __init__(self, *, + host: str = 'compute.googleapis.com', + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + client_cert_source_for_mtls: Optional[Callable[[ + ], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + url_scheme: str = 'https', + interceptor: Optional[BackendServicesRestInterceptor] = None, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + NOTE: This REST transport functionality is currently in a beta + state (preview). We welcome your feedback via a GitHub issue in + this library's repository. Thank you! + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + client_cert_source_for_mtls (Callable[[], Tuple[bytes, bytes]]): Client + certificate to configure mutual TLS HTTP channel. It is ignored + if ``channel`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you are developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + url_scheme: the protocol scheme for the API endpoint. Normally + "https", but for testing or local servers, + "http" can be specified. + """ + # Run the base constructor + # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. + # TODO: When custom host (api_endpoint) is set, `scopes` must *also* be set on the + # credentials object + maybe_url_match = re.match("^(?Phttp(?:s)?://)?(?P.*)$", host) + if maybe_url_match is None: + raise ValueError(f"Unexpected hostname structure: {host}") # pragma: NO COVER + + url_match_items = maybe_url_match.groupdict() + + host = f"{url_scheme}://{host}" if not url_match_items["scheme"] else host + + super().__init__( + host=host, + credentials=credentials, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience + ) + self._session = AuthorizedSession( + self._credentials, default_host=self.DEFAULT_HOST) + if client_cert_source_for_mtls: + self._session.configure_mtls_channel(client_cert_source_for_mtls) + self._interceptor = interceptor or BackendServicesRestInterceptor() + self._prep_wrapped_messages(client_info) + + class _AddSignedUrlKey(BackendServicesRestStub): + def __hash__(self): + return hash("AddSignedUrlKey") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.AddSignedUrlKeyBackendServiceRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.Operation: + r"""Call the add signed url key method over HTTP. + + Args: + request (~.compute.AddSignedUrlKeyBackendServiceRequest): + The request object. A request message for + BackendServices.AddSignedUrlKey. See the + method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.Operation: + Represents an Operation resource. Google Compute Engine + has three Operation resources: \* + `Global `__ + \* + `Regional `__ + \* + `Zonal `__ + You can use an operation resource to manage asynchronous + API requests. For more information, read Handling API + responses. Operations can be global, regional or zonal. + - For global operations, use the ``globalOperations`` + resource. - For regional operations, use the + ``regionOperations`` resource. - For zonal operations, + use the ``zonalOperations`` resource. For more + information, read Global, Regional, and Zonal Resources. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/compute/v1/projects/{project}/global/backendServices/{backend_service}/addSignedUrlKey', + 'body': 'signed_url_key_resource', + }, + ] + request, metadata = self._interceptor.pre_add_signed_url_key(request, metadata) + pb_request = compute.AddSignedUrlKeyBackendServiceRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + including_default_value_fields=False, + use_integers_for_enums=False + ) + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.Operation() + pb_resp = compute.Operation.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_add_signed_url_key(resp) + return resp + + class _AggregatedList(BackendServicesRestStub): + def __hash__(self): + return hash("AggregatedList") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.AggregatedListBackendServicesRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.BackendServiceAggregatedList: + r"""Call the aggregated list method over HTTP. + + Args: + request (~.compute.AggregatedListBackendServicesRequest): + The request object. A request message for + BackendServices.AggregatedList. See the + method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.BackendServiceAggregatedList: + Contains a list of + BackendServicesScopedList. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/compute/v1/projects/{project}/aggregated/backendServices', + }, + ] + request, metadata = self._interceptor.pre_aggregated_list(request, metadata) + pb_request = compute.AggregatedListBackendServicesRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.BackendServiceAggregatedList() + pb_resp = compute.BackendServiceAggregatedList.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_aggregated_list(resp) + return resp + + class _Delete(BackendServicesRestStub): + def __hash__(self): + return hash("Delete") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.DeleteBackendServiceRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.Operation: + r"""Call the delete method over HTTP. + + Args: + request (~.compute.DeleteBackendServiceRequest): + The request object. A request message for + BackendServices.Delete. See the method + description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.Operation: + Represents an Operation resource. Google Compute Engine + has three Operation resources: \* + `Global `__ + \* + `Regional `__ + \* + `Zonal `__ + You can use an operation resource to manage asynchronous + API requests. For more information, read Handling API + responses. Operations can be global, regional or zonal. + - For global operations, use the ``globalOperations`` + resource. - For regional operations, use the + ``regionOperations`` resource. - For zonal operations, + use the ``zonalOperations`` resource. For more + information, read Global, Regional, and Zonal Resources. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'delete', + 'uri': '/compute/v1/projects/{project}/global/backendServices/{backend_service}', + }, + ] + request, metadata = self._interceptor.pre_delete(request, metadata) + pb_request = compute.DeleteBackendServiceRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.Operation() + pb_resp = compute.Operation.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_delete(resp) + return resp + + class _DeleteSignedUrlKey(BackendServicesRestStub): + def __hash__(self): + return hash("DeleteSignedUrlKey") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + "keyName" : "", } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.DeleteSignedUrlKeyBackendServiceRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.Operation: + r"""Call the delete signed url key method over HTTP. + + Args: + request (~.compute.DeleteSignedUrlKeyBackendServiceRequest): + The request object. A request message for + BackendServices.DeleteSignedUrlKey. See + the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.Operation: + Represents an Operation resource. Google Compute Engine + has three Operation resources: \* + `Global `__ + \* + `Regional `__ + \* + `Zonal `__ + You can use an operation resource to manage asynchronous + API requests. For more information, read Handling API + responses. Operations can be global, regional or zonal. + - For global operations, use the ``globalOperations`` + resource. - For regional operations, use the + ``regionOperations`` resource. - For zonal operations, + use the ``zonalOperations`` resource. For more + information, read Global, Regional, and Zonal Resources. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/compute/v1/projects/{project}/global/backendServices/{backend_service}/deleteSignedUrlKey', + }, + ] + request, metadata = self._interceptor.pre_delete_signed_url_key(request, metadata) + pb_request = compute.DeleteSignedUrlKeyBackendServiceRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.Operation() + pb_resp = compute.Operation.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_delete_signed_url_key(resp) + return resp + + class _Get(BackendServicesRestStub): + def __hash__(self): + return hash("Get") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.GetBackendServiceRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.BackendService: + r"""Call the get method over HTTP. + + Args: + request (~.compute.GetBackendServiceRequest): + The request object. A request message for + BackendServices.Get. See the method + description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.BackendService: + Represents a Backend Service resource. A backend service + defines how Google Cloud load balancers distribute + traffic. The backend service configuration contains a + set of values, such as the protocol used to connect to + backends, various distribution and session settings, + health checks, and timeouts. These settings provide + fine-grained control over how your load balancer + behaves. Most of the settings have default values that + allow for easy configuration if you need to get started + quickly. Backend services in Google Compute Engine can + be either regionally or globally scoped. \* + `Global `__ + \* + `Regional `__ + For more information, see Backend Services. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/compute/v1/projects/{project}/global/backendServices/{backend_service}', + }, + ] + request, metadata = self._interceptor.pre_get(request, metadata) + pb_request = compute.GetBackendServiceRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.BackendService() + pb_resp = compute.BackendService.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get(resp) + return resp + + class _GetHealth(BackendServicesRestStub): + def __hash__(self): + return hash("GetHealth") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.GetHealthBackendServiceRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.BackendServiceGroupHealth: + r"""Call the get health method over HTTP. + + Args: + request (~.compute.GetHealthBackendServiceRequest): + The request object. A request message for + BackendServices.GetHealth. See the + method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.BackendServiceGroupHealth: + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/compute/v1/projects/{project}/global/backendServices/{backend_service}/getHealth', + 'body': 'resource_group_reference_resource', + }, + ] + request, metadata = self._interceptor.pre_get_health(request, metadata) + pb_request = compute.GetHealthBackendServiceRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + including_default_value_fields=False, + use_integers_for_enums=False + ) + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.BackendServiceGroupHealth() + pb_resp = compute.BackendServiceGroupHealth.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get_health(resp) + return resp + + class _GetIamPolicy(BackendServicesRestStub): + def __hash__(self): + return hash("GetIamPolicy") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.GetIamPolicyBackendServiceRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.Policy: + r"""Call the get iam policy method over HTTP. + + Args: + request (~.compute.GetIamPolicyBackendServiceRequest): + The request object. A request message for + BackendServices.GetIamPolicy. See the + method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.Policy: + An Identity and Access Management (IAM) policy, which + specifies access controls for Google Cloud resources. A + ``Policy`` is a collection of ``bindings``. A + ``binding`` binds one or more ``members``, or + principals, to a single ``role``. Principals can be user + accounts, service accounts, Google groups, and domains + (such as G Suite). A ``role`` is a named list of + permissions; each ``role`` can be an IAM predefined role + or a user-created custom role. For some types of Google + Cloud resources, a ``binding`` can also specify a + ``condition``, which is a logical expression that allows + access to a resource only if the expression evaluates to + ``true``. A condition can add constraints based on + attributes of the request, the resource, or both. To + learn which resources support conditions in their IAM + policies, see the `IAM + documentation `__. + **JSON example:** { "bindings": [ { "role": + "roles/resourcemanager.organizationAdmin", "members": [ + "user:mike@example.com", "group:admins@example.com", + "domain:google.com", + "serviceAccount:my-project-id@appspot.gserviceaccount.com" + ] }, { "role": + "roles/resourcemanager.organizationViewer", "members": [ + "user:eve@example.com" ], "condition": { "title": + "expirable access", "description": "Does not grant + access after Sep 2020", "expression": "request.time < + timestamp('2020-10-01T00:00:00.000Z')", } } ], "etag": + "BwWWja0YfJA=", "version": 3 } **YAML example:** + bindings: - members: - user:mike@example.com - + group:admins@example.com - domain:google.com - + serviceAccount:my-project-id@appspot.gserviceaccount.com + role: roles/resourcemanager.organizationAdmin - members: + - user:eve@example.com role: + roles/resourcemanager.organizationViewer condition: + title: expirable access description: Does not grant + access after Sep 2020 expression: request.time < + timestamp('2020-10-01T00:00:00.000Z') etag: BwWWja0YfJA= + version: 3 For a description of IAM and its features, + see the `IAM + documentation `__. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/compute/v1/projects/{project}/global/backendServices/{resource}/getIamPolicy', + }, + ] + request, metadata = self._interceptor.pre_get_iam_policy(request, metadata) + pb_request = compute.GetIamPolicyBackendServiceRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.Policy() + pb_resp = compute.Policy.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get_iam_policy(resp) + return resp + + class _Insert(BackendServicesRestStub): + def __hash__(self): + return hash("Insert") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.InsertBackendServiceRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.Operation: + r"""Call the insert method over HTTP. + + Args: + request (~.compute.InsertBackendServiceRequest): + The request object. A request message for + BackendServices.Insert. See the method + description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.Operation: + Represents an Operation resource. Google Compute Engine + has three Operation resources: \* + `Global `__ + \* + `Regional `__ + \* + `Zonal `__ + You can use an operation resource to manage asynchronous + API requests. For more information, read Handling API + responses. Operations can be global, regional or zonal. + - For global operations, use the ``globalOperations`` + resource. - For regional operations, use the + ``regionOperations`` resource. - For zonal operations, + use the ``zonalOperations`` resource. For more + information, read Global, Regional, and Zonal Resources. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/compute/v1/projects/{project}/global/backendServices', + 'body': 'backend_service_resource', + }, + ] + request, metadata = self._interceptor.pre_insert(request, metadata) + pb_request = compute.InsertBackendServiceRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + including_default_value_fields=False, + use_integers_for_enums=False + ) + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.Operation() + pb_resp = compute.Operation.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_insert(resp) + return resp + + class _List(BackendServicesRestStub): + def __hash__(self): + return hash("List") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.ListBackendServicesRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.BackendServiceList: + r"""Call the list method over HTTP. + + Args: + request (~.compute.ListBackendServicesRequest): + The request object. A request message for + BackendServices.List. See the method + description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.BackendServiceList: + Contains a list of BackendService + resources. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/compute/v1/projects/{project}/global/backendServices', + }, + ] + request, metadata = self._interceptor.pre_list(request, metadata) + pb_request = compute.ListBackendServicesRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.BackendServiceList() + pb_resp = compute.BackendServiceList.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_list(resp) + return resp + + class _Patch(BackendServicesRestStub): + def __hash__(self): + return hash("Patch") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.PatchBackendServiceRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.Operation: + r"""Call the patch method over HTTP. + + Args: + request (~.compute.PatchBackendServiceRequest): + The request object. A request message for + BackendServices.Patch. See the method + description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.Operation: + Represents an Operation resource. Google Compute Engine + has three Operation resources: \* + `Global `__ + \* + `Regional `__ + \* + `Zonal `__ + You can use an operation resource to manage asynchronous + API requests. For more information, read Handling API + responses. Operations can be global, regional or zonal. + - For global operations, use the ``globalOperations`` + resource. - For regional operations, use the + ``regionOperations`` resource. - For zonal operations, + use the ``zonalOperations`` resource. For more + information, read Global, Regional, and Zonal Resources. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'patch', + 'uri': '/compute/v1/projects/{project}/global/backendServices/{backend_service}', + 'body': 'backend_service_resource', + }, + ] + request, metadata = self._interceptor.pre_patch(request, metadata) + pb_request = compute.PatchBackendServiceRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + including_default_value_fields=False, + use_integers_for_enums=False + ) + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.Operation() + pb_resp = compute.Operation.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_patch(resp) + return resp + + class _SetEdgeSecurityPolicy(BackendServicesRestStub): + def __hash__(self): + return hash("SetEdgeSecurityPolicy") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.SetEdgeSecurityPolicyBackendServiceRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.Operation: + r"""Call the set edge security policy method over HTTP. + + Args: + request (~.compute.SetEdgeSecurityPolicyBackendServiceRequest): + The request object. A request message for + BackendServices.SetEdgeSecurityPolicy. + See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.Operation: + Represents an Operation resource. Google Compute Engine + has three Operation resources: \* + `Global `__ + \* + `Regional `__ + \* + `Zonal `__ + You can use an operation resource to manage asynchronous + API requests. For more information, read Handling API + responses. Operations can be global, regional or zonal. + - For global operations, use the ``globalOperations`` + resource. - For regional operations, use the + ``regionOperations`` resource. - For zonal operations, + use the ``zonalOperations`` resource. For more + information, read Global, Regional, and Zonal Resources. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/compute/v1/projects/{project}/global/backendServices/{backend_service}/setEdgeSecurityPolicy', + 'body': 'security_policy_reference_resource', + }, + ] + request, metadata = self._interceptor.pre_set_edge_security_policy(request, metadata) + pb_request = compute.SetEdgeSecurityPolicyBackendServiceRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + including_default_value_fields=False, + use_integers_for_enums=False + ) + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.Operation() + pb_resp = compute.Operation.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_set_edge_security_policy(resp) + return resp + + class _SetIamPolicy(BackendServicesRestStub): + def __hash__(self): + return hash("SetIamPolicy") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.SetIamPolicyBackendServiceRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.Policy: + r"""Call the set iam policy method over HTTP. + + Args: + request (~.compute.SetIamPolicyBackendServiceRequest): + The request object. A request message for + BackendServices.SetIamPolicy. See the + method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.Policy: + An Identity and Access Management (IAM) policy, which + specifies access controls for Google Cloud resources. A + ``Policy`` is a collection of ``bindings``. A + ``binding`` binds one or more ``members``, or + principals, to a single ``role``. Principals can be user + accounts, service accounts, Google groups, and domains + (such as G Suite). A ``role`` is a named list of + permissions; each ``role`` can be an IAM predefined role + or a user-created custom role. For some types of Google + Cloud resources, a ``binding`` can also specify a + ``condition``, which is a logical expression that allows + access to a resource only if the expression evaluates to + ``true``. A condition can add constraints based on + attributes of the request, the resource, or both. To + learn which resources support conditions in their IAM + policies, see the `IAM + documentation `__. + **JSON example:** { "bindings": [ { "role": + "roles/resourcemanager.organizationAdmin", "members": [ + "user:mike@example.com", "group:admins@example.com", + "domain:google.com", + "serviceAccount:my-project-id@appspot.gserviceaccount.com" + ] }, { "role": + "roles/resourcemanager.organizationViewer", "members": [ + "user:eve@example.com" ], "condition": { "title": + "expirable access", "description": "Does not grant + access after Sep 2020", "expression": "request.time < + timestamp('2020-10-01T00:00:00.000Z')", } } ], "etag": + "BwWWja0YfJA=", "version": 3 } **YAML example:** + bindings: - members: - user:mike@example.com - + group:admins@example.com - domain:google.com - + serviceAccount:my-project-id@appspot.gserviceaccount.com + role: roles/resourcemanager.organizationAdmin - members: + - user:eve@example.com role: + roles/resourcemanager.organizationViewer condition: + title: expirable access description: Does not grant + access after Sep 2020 expression: request.time < + timestamp('2020-10-01T00:00:00.000Z') etag: BwWWja0YfJA= + version: 3 For a description of IAM and its features, + see the `IAM + documentation `__. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/compute/v1/projects/{project}/global/backendServices/{resource}/setIamPolicy', + 'body': 'global_set_policy_request_resource', + }, + ] + request, metadata = self._interceptor.pre_set_iam_policy(request, metadata) + pb_request = compute.SetIamPolicyBackendServiceRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + including_default_value_fields=False, + use_integers_for_enums=False + ) + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.Policy() + pb_resp = compute.Policy.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_set_iam_policy(resp) + return resp + + class _SetSecurityPolicy(BackendServicesRestStub): + def __hash__(self): + return hash("SetSecurityPolicy") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.SetSecurityPolicyBackendServiceRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.Operation: + r"""Call the set security policy method over HTTP. + + Args: + request (~.compute.SetSecurityPolicyBackendServiceRequest): + The request object. A request message for + BackendServices.SetSecurityPolicy. See + the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.Operation: + Represents an Operation resource. Google Compute Engine + has three Operation resources: \* + `Global `__ + \* + `Regional `__ + \* + `Zonal `__ + You can use an operation resource to manage asynchronous + API requests. For more information, read Handling API + responses. Operations can be global, regional or zonal. + - For global operations, use the ``globalOperations`` + resource. - For regional operations, use the + ``regionOperations`` resource. - For zonal operations, + use the ``zonalOperations`` resource. For more + information, read Global, Regional, and Zonal Resources. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/compute/v1/projects/{project}/global/backendServices/{backend_service}/setSecurityPolicy', + 'body': 'security_policy_reference_resource', + }, + ] + request, metadata = self._interceptor.pre_set_security_policy(request, metadata) + pb_request = compute.SetSecurityPolicyBackendServiceRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + including_default_value_fields=False, + use_integers_for_enums=False + ) + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.Operation() + pb_resp = compute.Operation.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_set_security_policy(resp) + return resp + + class _Update(BackendServicesRestStub): + def __hash__(self): + return hash("Update") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.UpdateBackendServiceRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.Operation: + r"""Call the update method over HTTP. + + Args: + request (~.compute.UpdateBackendServiceRequest): + The request object. A request message for + BackendServices.Update. See the method + description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.Operation: + Represents an Operation resource. Google Compute Engine + has three Operation resources: \* + `Global `__ + \* + `Regional `__ + \* + `Zonal `__ + You can use an operation resource to manage asynchronous + API requests. For more information, read Handling API + responses. Operations can be global, regional or zonal. + - For global operations, use the ``globalOperations`` + resource. - For regional operations, use the + ``regionOperations`` resource. - For zonal operations, + use the ``zonalOperations`` resource. For more + information, read Global, Regional, and Zonal Resources. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'put', + 'uri': '/compute/v1/projects/{project}/global/backendServices/{backend_service}', + 'body': 'backend_service_resource', + }, + ] + request, metadata = self._interceptor.pre_update(request, metadata) + pb_request = compute.UpdateBackendServiceRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + including_default_value_fields=False, + use_integers_for_enums=False + ) + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.Operation() + pb_resp = compute.Operation.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_update(resp) + return resp + + @property + def add_signed_url_key(self) -> Callable[ + [compute.AddSignedUrlKeyBackendServiceRequest], + compute.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._AddSignedUrlKey(self._session, self._host, self._interceptor) # type: ignore + + @property + def aggregated_list(self) -> Callable[ + [compute.AggregatedListBackendServicesRequest], + compute.BackendServiceAggregatedList]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._AggregatedList(self._session, self._host, self._interceptor) # type: ignore + + @property + def delete(self) -> Callable[ + [compute.DeleteBackendServiceRequest], + compute.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._Delete(self._session, self._host, self._interceptor) # type: ignore + + @property + def delete_signed_url_key(self) -> Callable[ + [compute.DeleteSignedUrlKeyBackendServiceRequest], + compute.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._DeleteSignedUrlKey(self._session, self._host, self._interceptor) # type: ignore + + @property + def get(self) -> Callable[ + [compute.GetBackendServiceRequest], + compute.BackendService]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._Get(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_health(self) -> Callable[ + [compute.GetHealthBackendServiceRequest], + compute.BackendServiceGroupHealth]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetHealth(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_iam_policy(self) -> Callable[ + [compute.GetIamPolicyBackendServiceRequest], + compute.Policy]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetIamPolicy(self._session, self._host, self._interceptor) # type: ignore + + @property + def insert(self) -> Callable[ + [compute.InsertBackendServiceRequest], + compute.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._Insert(self._session, self._host, self._interceptor) # type: ignore + + @property + def list(self) -> Callable[ + [compute.ListBackendServicesRequest], + compute.BackendServiceList]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._List(self._session, self._host, self._interceptor) # type: ignore + + @property + def patch(self) -> Callable[ + [compute.PatchBackendServiceRequest], + compute.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._Patch(self._session, self._host, self._interceptor) # type: ignore + + @property + def set_edge_security_policy(self) -> Callable[ + [compute.SetEdgeSecurityPolicyBackendServiceRequest], + compute.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._SetEdgeSecurityPolicy(self._session, self._host, self._interceptor) # type: ignore + + @property + def set_iam_policy(self) -> Callable[ + [compute.SetIamPolicyBackendServiceRequest], + compute.Policy]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._SetIamPolicy(self._session, self._host, self._interceptor) # type: ignore + + @property + def set_security_policy(self) -> Callable[ + [compute.SetSecurityPolicyBackendServiceRequest], + compute.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._SetSecurityPolicy(self._session, self._host, self._interceptor) # type: ignore + + @property + def update(self) -> Callable[ + [compute.UpdateBackendServiceRequest], + compute.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._Update(self._session, self._host, self._interceptor) # type: ignore + + @property + def kind(self) -> str: + return "rest" + + def close(self): + self._session.close() + + +__all__=( + 'BackendServicesRestTransport', +) diff --git a/owl-bot-staging/v1/google/cloud/compute_v1/services/disk_types/__init__.py b/owl-bot-staging/v1/google/cloud/compute_v1/services/disk_types/__init__.py new file mode 100644 index 000000000..1fe356602 --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/compute_v1/services/disk_types/__init__.py @@ -0,0 +1,20 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from .client import DiskTypesClient + +__all__ = ( + 'DiskTypesClient', +) diff --git a/owl-bot-staging/v1/google/cloud/compute_v1/services/disk_types/client.py b/owl-bot-staging/v1/google/cloud/compute_v1/services/disk_types/client.py new file mode 100644 index 000000000..a25192d03 --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/compute_v1/services/disk_types/client.py @@ -0,0 +1,756 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import os +import re +from typing import Dict, Mapping, MutableMapping, MutableSequence, Optional, Sequence, Tuple, Type, Union, cast + +from google.cloud.compute_v1 import gapic_version as package_version + +from google.api_core import client_options as client_options_lib +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport import mtls # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth.exceptions import MutualTLSChannelError # type: ignore +from google.oauth2 import service_account # type: ignore + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + +from google.cloud.compute_v1.services.disk_types import pagers +from google.cloud.compute_v1.types import compute +from .transports.base import DiskTypesTransport, DEFAULT_CLIENT_INFO +from .transports.rest import DiskTypesRestTransport + + +class DiskTypesClientMeta(type): + """Metaclass for the DiskTypes client. + + This provides class-level methods for building and retrieving + support objects (e.g. transport) without polluting the client instance + objects. + """ + _transport_registry = OrderedDict() # type: Dict[str, Type[DiskTypesTransport]] + _transport_registry["rest"] = DiskTypesRestTransport + + def get_transport_class(cls, + label: Optional[str] = None, + ) -> Type[DiskTypesTransport]: + """Returns an appropriate transport class. + + Args: + label: The name of the desired transport. If none is + provided, then the first transport in the registry is used. + + Returns: + The transport class to use. + """ + # If a specific transport is requested, return that one. + if label: + return cls._transport_registry[label] + + # No transport is requested; return the default (that is, the first one + # in the dictionary). + return next(iter(cls._transport_registry.values())) + + +class DiskTypesClient(metaclass=DiskTypesClientMeta): + """The DiskTypes API.""" + + @staticmethod + def _get_default_mtls_endpoint(api_endpoint): + """Converts api endpoint to mTLS endpoint. + + Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to + "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. + Args: + api_endpoint (Optional[str]): the api endpoint to convert. + Returns: + str: converted mTLS api endpoint. + """ + if not api_endpoint: + return api_endpoint + + mtls_endpoint_re = re.compile( + r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?" + ) + + m = mtls_endpoint_re.match(api_endpoint) + name, mtls, sandbox, googledomain = m.groups() + if mtls or not googledomain: + return api_endpoint + + if sandbox: + return api_endpoint.replace( + "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" + ) + + return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") + + DEFAULT_ENDPOINT = "compute.googleapis.com" + DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore + DEFAULT_ENDPOINT + ) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + DiskTypesClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_info(info) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + DiskTypesClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_file( + filename) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + from_service_account_json = from_service_account_file + + @property + def transport(self) -> DiskTypesTransport: + """Returns the transport used by the client instance. + + Returns: + DiskTypesTransport: The transport used by the client + instance. + """ + return self._transport + + @staticmethod + def common_billing_account_path(billing_account: str, ) -> str: + """Returns a fully-qualified billing_account string.""" + return "billingAccounts/{billing_account}".format(billing_account=billing_account, ) + + @staticmethod + def parse_common_billing_account_path(path: str) -> Dict[str,str]: + """Parse a billing_account path into its component segments.""" + m = re.match(r"^billingAccounts/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_folder_path(folder: str, ) -> str: + """Returns a fully-qualified folder string.""" + return "folders/{folder}".format(folder=folder, ) + + @staticmethod + def parse_common_folder_path(path: str) -> Dict[str,str]: + """Parse a folder path into its component segments.""" + m = re.match(r"^folders/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_organization_path(organization: str, ) -> str: + """Returns a fully-qualified organization string.""" + return "organizations/{organization}".format(organization=organization, ) + + @staticmethod + def parse_common_organization_path(path: str) -> Dict[str,str]: + """Parse a organization path into its component segments.""" + m = re.match(r"^organizations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_project_path(project: str, ) -> str: + """Returns a fully-qualified project string.""" + return "projects/{project}".format(project=project, ) + + @staticmethod + def parse_common_project_path(path: str) -> Dict[str,str]: + """Parse a project path into its component segments.""" + m = re.match(r"^projects/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_location_path(project: str, location: str, ) -> str: + """Returns a fully-qualified location string.""" + return "projects/{project}/locations/{location}".format(project=project, location=location, ) + + @staticmethod + def parse_common_location_path(path: str) -> Dict[str,str]: + """Parse a location path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @classmethod + def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[client_options_lib.ClientOptions] = None): + """Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + if client_options is None: + client_options = client_options_lib.ClientOptions() + use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") + use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") + if use_client_cert not in ("true", "false"): + raise ValueError("Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`") + if use_mtls_endpoint not in ("auto", "never", "always"): + raise MutualTLSChannelError("Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`") + + # Figure out the client cert source to use. + client_cert_source = None + if use_client_cert == "true": + if client_options.client_cert_source: + client_cert_source = client_options.client_cert_source + elif mtls.has_default_client_cert_source(): + client_cert_source = mtls.default_client_cert_source() + + # Figure out which api endpoint to use. + if client_options.api_endpoint is not None: + api_endpoint = client_options.api_endpoint + elif use_mtls_endpoint == "always" or (use_mtls_endpoint == "auto" and client_cert_source): + api_endpoint = cls.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = cls.DEFAULT_ENDPOINT + + return api_endpoint, client_cert_source + + def __init__(self, *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Optional[Union[str, DiskTypesTransport]] = None, + client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the disk types client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Union[str, DiskTypesTransport]): The + transport to use. If set to None, a transport is chosen + automatically. + NOTE: "rest" transport functionality is currently in a + beta state (preview). We welcome your feedback via an + issue in this library's source repository. + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): Custom options for the + client. It won't take effect if a ``transport`` instance is provided. + (1) The ``api_endpoint`` property can be used to override the + default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT + environment variable can also be used to override the endpoint: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto switch to the + default mTLS endpoint if client certificate is present, this is + the default value). However, the ``api_endpoint`` property takes + precedence if provided. + (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide client certificate for mutual TLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + """ + if isinstance(client_options, dict): + client_options = client_options_lib.from_dict(client_options) + if client_options is None: + client_options = client_options_lib.ClientOptions() + client_options = cast(client_options_lib.ClientOptions, client_options) + + api_endpoint, client_cert_source_func = self.get_mtls_endpoint_and_cert_source(client_options) + + api_key_value = getattr(client_options, "api_key", None) + if api_key_value and credentials: + raise ValueError("client_options.api_key and credentials are mutually exclusive") + + # Save or instantiate the transport. + # Ordinarily, we provide the transport, but allowing a custom transport + # instance provides an extensibility point for unusual situations. + if isinstance(transport, DiskTypesTransport): + # transport is a DiskTypesTransport instance. + if credentials or client_options.credentials_file or api_key_value: + raise ValueError("When providing a transport instance, " + "provide its credentials directly.") + if client_options.scopes: + raise ValueError( + "When providing a transport instance, provide its scopes " + "directly." + ) + self._transport = transport + else: + import google.auth._default # type: ignore + + if api_key_value and hasattr(google.auth._default, "get_api_key_credentials"): + credentials = google.auth._default.get_api_key_credentials(api_key_value) + + Transport = type(self).get_transport_class(transport) + self._transport = Transport( + credentials=credentials, + credentials_file=client_options.credentials_file, + host=api_endpoint, + scopes=client_options.scopes, + client_cert_source_for_mtls=client_cert_source_func, + quota_project_id=client_options.quota_project_id, + client_info=client_info, + always_use_jwt_access=True, + api_audience=client_options.api_audience, + ) + + def aggregated_list(self, + request: Optional[Union[compute.AggregatedListDiskTypesRequest, dict]] = None, + *, + project: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.AggregatedListPager: + r"""Retrieves an aggregated list of disk types. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_aggregated_list(): + # Create a client + client = compute_v1.DiskTypesClient() + + # Initialize request argument(s) + request = compute_v1.AggregatedListDiskTypesRequest( + project="project_value", + ) + + # Make the request + page_result = client.aggregated_list(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.AggregatedListDiskTypesRequest, dict]): + The request object. A request message for + DiskTypes.AggregatedList. See the method + description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.compute_v1.services.disk_types.pagers.AggregatedListPager: + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.AggregatedListDiskTypesRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.AggregatedListDiskTypesRequest): + request = compute.AggregatedListDiskTypesRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.aggregated_list] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.AggregatedListPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get(self, + request: Optional[Union[compute.GetDiskTypeRequest, dict]] = None, + *, + project: Optional[str] = None, + zone: Optional[str] = None, + disk_type: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.DiskType: + r"""Returns the specified disk type. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_get(): + # Create a client + client = compute_v1.DiskTypesClient() + + # Initialize request argument(s) + request = compute_v1.GetDiskTypeRequest( + disk_type="disk_type_value", + project="project_value", + zone="zone_value", + ) + + # Make the request + response = client.get(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.GetDiskTypeRequest, dict]): + The request object. A request message for DiskTypes.Get. + See the method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + zone (str): + The name of the zone for this + request. + + This corresponds to the ``zone`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + disk_type (str): + Name of the disk type to return. + This corresponds to the ``disk_type`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.compute_v1.types.DiskType: + Represents a Disk Type resource. Google Compute Engine + has two Disk Type resources: \* + [Regional](/compute/docs/reference/rest/v1/regionDiskTypes) + \* [Zonal](/compute/docs/reference/rest/v1/diskTypes) + You can choose from a variety of disk types based on + your needs. For more information, read Storage options. + The diskTypes resource represents disk types for a zonal + persistent disk. For more information, read Zonal + persistent disks. The regionDiskTypes resource + represents disk types for a regional persistent disk. + For more information, read Regional persistent disks. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, zone, disk_type]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.GetDiskTypeRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.GetDiskTypeRequest): + request = compute.GetDiskTypeRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if zone is not None: + request.zone = zone + if disk_type is not None: + request.disk_type = disk_type + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("zone", request.zone), + ("disk_type", request.disk_type), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def list(self, + request: Optional[Union[compute.ListDiskTypesRequest, dict]] = None, + *, + project: Optional[str] = None, + zone: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListPager: + r"""Retrieves a list of disk types available to the + specified project. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_list(): + # Create a client + client = compute_v1.DiskTypesClient() + + # Initialize request argument(s) + request = compute_v1.ListDiskTypesRequest( + project="project_value", + zone="zone_value", + ) + + # Make the request + page_result = client.list(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.ListDiskTypesRequest, dict]): + The request object. A request message for DiskTypes.List. + See the method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + zone (str): + The name of the zone for this + request. + + This corresponds to the ``zone`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.compute_v1.services.disk_types.pagers.ListPager: + Contains a list of disk types. + + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, zone]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.ListDiskTypesRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.ListDiskTypesRequest): + request = compute.ListDiskTypesRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if zone is not None: + request.zone = zone + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("zone", request.zone), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + def __enter__(self) -> "DiskTypesClient": + return self + + def __exit__(self, type, value, traceback): + """Releases underlying transport's resources. + + .. warning:: + ONLY use as a context manager if the transport is NOT shared + with other clients! Exiting the with block will CLOSE the transport + and may cause errors in other clients! + """ + self.transport.close() + + + + + + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) + + +__all__ = ( + "DiskTypesClient", +) diff --git a/owl-bot-staging/v1/google/cloud/compute_v1/services/disk_types/pagers.py b/owl-bot-staging/v1/google/cloud/compute_v1/services/disk_types/pagers.py new file mode 100644 index 000000000..768b57257 --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/compute_v1/services/disk_types/pagers.py @@ -0,0 +1,139 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import Any, AsyncIterator, Awaitable, Callable, Sequence, Tuple, Optional, Iterator + +from google.cloud.compute_v1.types import compute + + +class AggregatedListPager: + """A pager for iterating through ``aggregated_list`` requests. + + This class thinly wraps an initial + :class:`google.cloud.compute_v1.types.DiskTypeAggregatedList` object, and + provides an ``__iter__`` method to iterate through its + ``items`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``AggregatedList`` requests and continue to iterate + through the ``items`` field on the + corresponding responses. + + All the usual :class:`google.cloud.compute_v1.types.DiskTypeAggregatedList` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., compute.DiskTypeAggregatedList], + request: compute.AggregatedListDiskTypesRequest, + response: compute.DiskTypeAggregatedList, + *, + metadata: Sequence[Tuple[str, str]] = ()): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.compute_v1.types.AggregatedListDiskTypesRequest): + The initial request object. + response (google.cloud.compute_v1.types.DiskTypeAggregatedList): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = compute.AggregatedListDiskTypesRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[compute.DiskTypeAggregatedList]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[Tuple[str, compute.DiskTypesScopedList]]: + for page in self.pages: + yield from page.items.items() + + def get(self, key: str) -> Optional[compute.DiskTypesScopedList]: + return self._response.items.get(key) + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + + +class ListPager: + """A pager for iterating through ``list`` requests. + + This class thinly wraps an initial + :class:`google.cloud.compute_v1.types.DiskTypeList` object, and + provides an ``__iter__`` method to iterate through its + ``items`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``List`` requests and continue to iterate + through the ``items`` field on the + corresponding responses. + + All the usual :class:`google.cloud.compute_v1.types.DiskTypeList` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., compute.DiskTypeList], + request: compute.ListDiskTypesRequest, + response: compute.DiskTypeList, + *, + metadata: Sequence[Tuple[str, str]] = ()): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.compute_v1.types.ListDiskTypesRequest): + The initial request object. + response (google.cloud.compute_v1.types.DiskTypeList): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = compute.ListDiskTypesRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[compute.DiskTypeList]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[compute.DiskType]: + for page in self.pages: + yield from page.items + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) diff --git a/owl-bot-staging/v1/google/cloud/compute_v1/services/disk_types/transports/__init__.py b/owl-bot-staging/v1/google/cloud/compute_v1/services/disk_types/transports/__init__.py new file mode 100644 index 000000000..916c17143 --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/compute_v1/services/disk_types/transports/__init__.py @@ -0,0 +1,32 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +from typing import Dict, Type + +from .base import DiskTypesTransport +from .rest import DiskTypesRestTransport +from .rest import DiskTypesRestInterceptor + + +# Compile a registry of transports. +_transport_registry = OrderedDict() # type: Dict[str, Type[DiskTypesTransport]] +_transport_registry['rest'] = DiskTypesRestTransport + +__all__ = ( + 'DiskTypesTransport', + 'DiskTypesRestTransport', + 'DiskTypesRestInterceptor', +) diff --git a/owl-bot-staging/v1/google/cloud/compute_v1/services/disk_types/transports/base.py b/owl-bot-staging/v1/google/cloud/compute_v1/services/disk_types/transports/base.py new file mode 100644 index 000000000..6a2e43a57 --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/compute_v1/services/disk_types/transports/base.py @@ -0,0 +1,178 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import abc +from typing import Awaitable, Callable, Dict, Optional, Sequence, Union + +from google.cloud.compute_v1 import gapic_version as package_version + +import google.auth # type: ignore +import google.api_core +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.compute_v1.types import compute + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) + + +class DiskTypesTransport(abc.ABC): + """Abstract transport class for DiskTypes.""" + + AUTH_SCOPES = ( + 'https://www.googleapis.com/auth/compute.readonly', + 'https://www.googleapis.com/auth/compute', + 'https://www.googleapis.com/auth/cloud-platform', + ) + + DEFAULT_HOST: str = 'compute.googleapis.com' + def __init__( + self, *, + host: str = DEFAULT_HOST, + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + **kwargs, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A list of scopes. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + """ + + scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} + + # Save the scopes. + self._scopes = scopes + + # If no credentials are provided, then determine the appropriate + # defaults. + if credentials and credentials_file: + raise core_exceptions.DuplicateCredentialArgs("'credentials_file' and 'credentials' are mutually exclusive") + + if credentials_file is not None: + credentials, _ = google.auth.load_credentials_from_file( + credentials_file, + **scopes_kwargs, + quota_project_id=quota_project_id + ) + elif credentials is None: + credentials, _ = google.auth.default(**scopes_kwargs, quota_project_id=quota_project_id) + # Don't apply audience if the credentials file passed from user. + if hasattr(credentials, "with_gdch_audience"): + credentials = credentials.with_gdch_audience(api_audience if api_audience else host) + + # If the credentials are service account credentials, then always try to use self signed JWT. + if always_use_jwt_access and isinstance(credentials, service_account.Credentials) and hasattr(service_account.Credentials, "with_always_use_jwt_access"): + credentials = credentials.with_always_use_jwt_access(True) + + # Save the credentials. + self._credentials = credentials + + # Save the hostname. Default to port 443 (HTTPS) if none is specified. + if ':' not in host: + host += ':443' + self._host = host + + def _prep_wrapped_messages(self, client_info): + # Precompute the wrapped methods. + self._wrapped_methods = { + self.aggregated_list: gapic_v1.method.wrap_method( + self.aggregated_list, + default_timeout=None, + client_info=client_info, + ), + self.get: gapic_v1.method.wrap_method( + self.get, + default_timeout=None, + client_info=client_info, + ), + self.list: gapic_v1.method.wrap_method( + self.list, + default_timeout=None, + client_info=client_info, + ), + } + + def close(self): + """Closes resources associated with the transport. + + .. warning:: + Only call this method if the transport is NOT shared + with other clients - this may cause errors in other clients! + """ + raise NotImplementedError() + + @property + def aggregated_list(self) -> Callable[ + [compute.AggregatedListDiskTypesRequest], + Union[ + compute.DiskTypeAggregatedList, + Awaitable[compute.DiskTypeAggregatedList] + ]]: + raise NotImplementedError() + + @property + def get(self) -> Callable[ + [compute.GetDiskTypeRequest], + Union[ + compute.DiskType, + Awaitable[compute.DiskType] + ]]: + raise NotImplementedError() + + @property + def list(self) -> Callable[ + [compute.ListDiskTypesRequest], + Union[ + compute.DiskTypeList, + Awaitable[compute.DiskTypeList] + ]]: + raise NotImplementedError() + + @property + def kind(self) -> str: + raise NotImplementedError() + + +__all__ = ( + 'DiskTypesTransport', +) diff --git a/owl-bot-staging/v1/google/cloud/compute_v1/services/disk_types/transports/rest.py b/owl-bot-staging/v1/google/cloud/compute_v1/services/disk_types/transports/rest.py new file mode 100644 index 000000000..82b783241 --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/compute_v1/services/disk_types/transports/rest.py @@ -0,0 +1,524 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +from google.auth.transport.requests import AuthorizedSession # type: ignore +import json # type: ignore +import grpc # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.api_core import exceptions as core_exceptions +from google.api_core import retry as retries +from google.api_core import rest_helpers +from google.api_core import rest_streaming +from google.api_core import path_template +from google.api_core import gapic_v1 + +from google.protobuf import json_format +from requests import __version__ as requests_version +import dataclasses +import re +from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union +import warnings + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + + +from google.cloud.compute_v1.types import compute + +from .base import DiskTypesTransport, DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, + grpc_version=None, + rest_version=requests_version, +) + + +class DiskTypesRestInterceptor: + """Interceptor for DiskTypes. + + Interceptors are used to manipulate requests, request metadata, and responses + in arbitrary ways. + Example use cases include: + * Logging + * Verifying requests according to service or custom semantics + * Stripping extraneous information from responses + + These use cases and more can be enabled by injecting an + instance of a custom subclass when constructing the DiskTypesRestTransport. + + .. code-block:: python + class MyCustomDiskTypesInterceptor(DiskTypesRestInterceptor): + def pre_aggregated_list(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_aggregated_list(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_get(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list(self, response): + logging.log(f"Received response: {response}") + return response + + transport = DiskTypesRestTransport(interceptor=MyCustomDiskTypesInterceptor()) + client = DiskTypesClient(transport=transport) + + + """ + def pre_aggregated_list(self, request: compute.AggregatedListDiskTypesRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.AggregatedListDiskTypesRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for aggregated_list + + Override in a subclass to manipulate the request or metadata + before they are sent to the DiskTypes server. + """ + return request, metadata + + def post_aggregated_list(self, response: compute.DiskTypeAggregatedList) -> compute.DiskTypeAggregatedList: + """Post-rpc interceptor for aggregated_list + + Override in a subclass to manipulate the response + after it is returned by the DiskTypes server but before + it is returned to user code. + """ + return response + def pre_get(self, request: compute.GetDiskTypeRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.GetDiskTypeRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get + + Override in a subclass to manipulate the request or metadata + before they are sent to the DiskTypes server. + """ + return request, metadata + + def post_get(self, response: compute.DiskType) -> compute.DiskType: + """Post-rpc interceptor for get + + Override in a subclass to manipulate the response + after it is returned by the DiskTypes server but before + it is returned to user code. + """ + return response + def pre_list(self, request: compute.ListDiskTypesRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.ListDiskTypesRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list + + Override in a subclass to manipulate the request or metadata + before they are sent to the DiskTypes server. + """ + return request, metadata + + def post_list(self, response: compute.DiskTypeList) -> compute.DiskTypeList: + """Post-rpc interceptor for list + + Override in a subclass to manipulate the response + after it is returned by the DiskTypes server but before + it is returned to user code. + """ + return response + + +@dataclasses.dataclass +class DiskTypesRestStub: + _session: AuthorizedSession + _host: str + _interceptor: DiskTypesRestInterceptor + + +class DiskTypesRestTransport(DiskTypesTransport): + """REST backend transport for DiskTypes. + + The DiskTypes API. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends JSON representations of protocol buffers over HTTP/1.1 + + NOTE: This REST transport functionality is currently in a beta + state (preview). We welcome your feedback via an issue in this + library's source repository. Thank you! + """ + + def __init__(self, *, + host: str = 'compute.googleapis.com', + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + client_cert_source_for_mtls: Optional[Callable[[ + ], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + url_scheme: str = 'https', + interceptor: Optional[DiskTypesRestInterceptor] = None, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + NOTE: This REST transport functionality is currently in a beta + state (preview). We welcome your feedback via a GitHub issue in + this library's repository. Thank you! + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + client_cert_source_for_mtls (Callable[[], Tuple[bytes, bytes]]): Client + certificate to configure mutual TLS HTTP channel. It is ignored + if ``channel`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you are developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + url_scheme: the protocol scheme for the API endpoint. Normally + "https", but for testing or local servers, + "http" can be specified. + """ + # Run the base constructor + # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. + # TODO: When custom host (api_endpoint) is set, `scopes` must *also* be set on the + # credentials object + maybe_url_match = re.match("^(?Phttp(?:s)?://)?(?P.*)$", host) + if maybe_url_match is None: + raise ValueError(f"Unexpected hostname structure: {host}") # pragma: NO COVER + + url_match_items = maybe_url_match.groupdict() + + host = f"{url_scheme}://{host}" if not url_match_items["scheme"] else host + + super().__init__( + host=host, + credentials=credentials, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience + ) + self._session = AuthorizedSession( + self._credentials, default_host=self.DEFAULT_HOST) + if client_cert_source_for_mtls: + self._session.configure_mtls_channel(client_cert_source_for_mtls) + self._interceptor = interceptor or DiskTypesRestInterceptor() + self._prep_wrapped_messages(client_info) + + class _AggregatedList(DiskTypesRestStub): + def __hash__(self): + return hash("AggregatedList") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.AggregatedListDiskTypesRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.DiskTypeAggregatedList: + r"""Call the aggregated list method over HTTP. + + Args: + request (~.compute.AggregatedListDiskTypesRequest): + The request object. A request message for + DiskTypes.AggregatedList. See the method + description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.DiskTypeAggregatedList: + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/compute/v1/projects/{project}/aggregated/diskTypes', + }, + ] + request, metadata = self._interceptor.pre_aggregated_list(request, metadata) + pb_request = compute.AggregatedListDiskTypesRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.DiskTypeAggregatedList() + pb_resp = compute.DiskTypeAggregatedList.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_aggregated_list(resp) + return resp + + class _Get(DiskTypesRestStub): + def __hash__(self): + return hash("Get") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.GetDiskTypeRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.DiskType: + r"""Call the get method over HTTP. + + Args: + request (~.compute.GetDiskTypeRequest): + The request object. A request message for DiskTypes.Get. + See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.DiskType: + Represents a Disk Type resource. Google Compute Engine + has two Disk Type resources: \* + `Regional `__ + \* `Zonal `__ + You can choose from a variety of disk types based on + your needs. For more information, read Storage options. + The diskTypes resource represents disk types for a zonal + persistent disk. For more information, read Zonal + persistent disks. The regionDiskTypes resource + represents disk types for a regional persistent disk. + For more information, read Regional persistent disks. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/compute/v1/projects/{project}/zones/{zone}/diskTypes/{disk_type}', + }, + ] + request, metadata = self._interceptor.pre_get(request, metadata) + pb_request = compute.GetDiskTypeRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.DiskType() + pb_resp = compute.DiskType.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get(resp) + return resp + + class _List(DiskTypesRestStub): + def __hash__(self): + return hash("List") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.ListDiskTypesRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.DiskTypeList: + r"""Call the list method over HTTP. + + Args: + request (~.compute.ListDiskTypesRequest): + The request object. A request message for DiskTypes.List. + See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.DiskTypeList: + Contains a list of disk types. + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/compute/v1/projects/{project}/zones/{zone}/diskTypes', + }, + ] + request, metadata = self._interceptor.pre_list(request, metadata) + pb_request = compute.ListDiskTypesRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.DiskTypeList() + pb_resp = compute.DiskTypeList.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_list(resp) + return resp + + @property + def aggregated_list(self) -> Callable[ + [compute.AggregatedListDiskTypesRequest], + compute.DiskTypeAggregatedList]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._AggregatedList(self._session, self._host, self._interceptor) # type: ignore + + @property + def get(self) -> Callable[ + [compute.GetDiskTypeRequest], + compute.DiskType]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._Get(self._session, self._host, self._interceptor) # type: ignore + + @property + def list(self) -> Callable[ + [compute.ListDiskTypesRequest], + compute.DiskTypeList]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._List(self._session, self._host, self._interceptor) # type: ignore + + @property + def kind(self) -> str: + return "rest" + + def close(self): + self._session.close() + + +__all__=( + 'DiskTypesRestTransport', +) diff --git a/owl-bot-staging/v1/google/cloud/compute_v1/services/disks/__init__.py b/owl-bot-staging/v1/google/cloud/compute_v1/services/disks/__init__.py new file mode 100644 index 000000000..4a7e88b86 --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/compute_v1/services/disks/__init__.py @@ -0,0 +1,20 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from .client import DisksClient + +__all__ = ( + 'DisksClient', +) diff --git a/owl-bot-staging/v1/google/cloud/compute_v1/services/disks/client.py b/owl-bot-staging/v1/google/cloud/compute_v1/services/disks/client.py new file mode 100644 index 000000000..25d440b0d --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/compute_v1/services/disks/client.py @@ -0,0 +1,4613 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import functools +import os +import re +from typing import Dict, Mapping, MutableMapping, MutableSequence, Optional, Sequence, Tuple, Type, Union, cast + +from google.cloud.compute_v1 import gapic_version as package_version + +from google.api_core import client_options as client_options_lib +from google.api_core import exceptions as core_exceptions +from google.api_core import extended_operation +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport import mtls # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth.exceptions import MutualTLSChannelError # type: ignore +from google.oauth2 import service_account # type: ignore + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + +from google.api_core import extended_operation # type: ignore +from google.cloud.compute_v1.services.disks import pagers +from google.cloud.compute_v1.types import compute +from .transports.base import DisksTransport, DEFAULT_CLIENT_INFO +from .transports.rest import DisksRestTransport + + +class DisksClientMeta(type): + """Metaclass for the Disks client. + + This provides class-level methods for building and retrieving + support objects (e.g. transport) without polluting the client instance + objects. + """ + _transport_registry = OrderedDict() # type: Dict[str, Type[DisksTransport]] + _transport_registry["rest"] = DisksRestTransport + + def get_transport_class(cls, + label: Optional[str] = None, + ) -> Type[DisksTransport]: + """Returns an appropriate transport class. + + Args: + label: The name of the desired transport. If none is + provided, then the first transport in the registry is used. + + Returns: + The transport class to use. + """ + # If a specific transport is requested, return that one. + if label: + return cls._transport_registry[label] + + # No transport is requested; return the default (that is, the first one + # in the dictionary). + return next(iter(cls._transport_registry.values())) + + +class DisksClient(metaclass=DisksClientMeta): + """The Disks API.""" + + @staticmethod + def _get_default_mtls_endpoint(api_endpoint): + """Converts api endpoint to mTLS endpoint. + + Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to + "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. + Args: + api_endpoint (Optional[str]): the api endpoint to convert. + Returns: + str: converted mTLS api endpoint. + """ + if not api_endpoint: + return api_endpoint + + mtls_endpoint_re = re.compile( + r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?" + ) + + m = mtls_endpoint_re.match(api_endpoint) + name, mtls, sandbox, googledomain = m.groups() + if mtls or not googledomain: + return api_endpoint + + if sandbox: + return api_endpoint.replace( + "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" + ) + + return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") + + DEFAULT_ENDPOINT = "compute.googleapis.com" + DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore + DEFAULT_ENDPOINT + ) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + DisksClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_info(info) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + DisksClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_file( + filename) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + from_service_account_json = from_service_account_file + + @property + def transport(self) -> DisksTransport: + """Returns the transport used by the client instance. + + Returns: + DisksTransport: The transport used by the client + instance. + """ + return self._transport + + @staticmethod + def common_billing_account_path(billing_account: str, ) -> str: + """Returns a fully-qualified billing_account string.""" + return "billingAccounts/{billing_account}".format(billing_account=billing_account, ) + + @staticmethod + def parse_common_billing_account_path(path: str) -> Dict[str,str]: + """Parse a billing_account path into its component segments.""" + m = re.match(r"^billingAccounts/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_folder_path(folder: str, ) -> str: + """Returns a fully-qualified folder string.""" + return "folders/{folder}".format(folder=folder, ) + + @staticmethod + def parse_common_folder_path(path: str) -> Dict[str,str]: + """Parse a folder path into its component segments.""" + m = re.match(r"^folders/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_organization_path(organization: str, ) -> str: + """Returns a fully-qualified organization string.""" + return "organizations/{organization}".format(organization=organization, ) + + @staticmethod + def parse_common_organization_path(path: str) -> Dict[str,str]: + """Parse a organization path into its component segments.""" + m = re.match(r"^organizations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_project_path(project: str, ) -> str: + """Returns a fully-qualified project string.""" + return "projects/{project}".format(project=project, ) + + @staticmethod + def parse_common_project_path(path: str) -> Dict[str,str]: + """Parse a project path into its component segments.""" + m = re.match(r"^projects/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_location_path(project: str, location: str, ) -> str: + """Returns a fully-qualified location string.""" + return "projects/{project}/locations/{location}".format(project=project, location=location, ) + + @staticmethod + def parse_common_location_path(path: str) -> Dict[str,str]: + """Parse a location path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @classmethod + def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[client_options_lib.ClientOptions] = None): + """Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + if client_options is None: + client_options = client_options_lib.ClientOptions() + use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") + use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") + if use_client_cert not in ("true", "false"): + raise ValueError("Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`") + if use_mtls_endpoint not in ("auto", "never", "always"): + raise MutualTLSChannelError("Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`") + + # Figure out the client cert source to use. + client_cert_source = None + if use_client_cert == "true": + if client_options.client_cert_source: + client_cert_source = client_options.client_cert_source + elif mtls.has_default_client_cert_source(): + client_cert_source = mtls.default_client_cert_source() + + # Figure out which api endpoint to use. + if client_options.api_endpoint is not None: + api_endpoint = client_options.api_endpoint + elif use_mtls_endpoint == "always" or (use_mtls_endpoint == "auto" and client_cert_source): + api_endpoint = cls.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = cls.DEFAULT_ENDPOINT + + return api_endpoint, client_cert_source + + def __init__(self, *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Optional[Union[str, DisksTransport]] = None, + client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the disks client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Union[str, DisksTransport]): The + transport to use. If set to None, a transport is chosen + automatically. + NOTE: "rest" transport functionality is currently in a + beta state (preview). We welcome your feedback via an + issue in this library's source repository. + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): Custom options for the + client. It won't take effect if a ``transport`` instance is provided. + (1) The ``api_endpoint`` property can be used to override the + default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT + environment variable can also be used to override the endpoint: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto switch to the + default mTLS endpoint if client certificate is present, this is + the default value). However, the ``api_endpoint`` property takes + precedence if provided. + (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide client certificate for mutual TLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + """ + if isinstance(client_options, dict): + client_options = client_options_lib.from_dict(client_options) + if client_options is None: + client_options = client_options_lib.ClientOptions() + client_options = cast(client_options_lib.ClientOptions, client_options) + + api_endpoint, client_cert_source_func = self.get_mtls_endpoint_and_cert_source(client_options) + + api_key_value = getattr(client_options, "api_key", None) + if api_key_value and credentials: + raise ValueError("client_options.api_key and credentials are mutually exclusive") + + # Save or instantiate the transport. + # Ordinarily, we provide the transport, but allowing a custom transport + # instance provides an extensibility point for unusual situations. + if isinstance(transport, DisksTransport): + # transport is a DisksTransport instance. + if credentials or client_options.credentials_file or api_key_value: + raise ValueError("When providing a transport instance, " + "provide its credentials directly.") + if client_options.scopes: + raise ValueError( + "When providing a transport instance, provide its scopes " + "directly." + ) + self._transport = transport + else: + import google.auth._default # type: ignore + + if api_key_value and hasattr(google.auth._default, "get_api_key_credentials"): + credentials = google.auth._default.get_api_key_credentials(api_key_value) + + Transport = type(self).get_transport_class(transport) + self._transport = Transport( + credentials=credentials, + credentials_file=client_options.credentials_file, + host=api_endpoint, + scopes=client_options.scopes, + client_cert_source_for_mtls=client_cert_source_func, + quota_project_id=client_options.quota_project_id, + client_info=client_info, + always_use_jwt_access=True, + api_audience=client_options.api_audience, + ) + + def add_resource_policies_unary(self, + request: Optional[Union[compute.AddResourcePoliciesDiskRequest, dict]] = None, + *, + project: Optional[str] = None, + zone: Optional[str] = None, + disk: Optional[str] = None, + disks_add_resource_policies_request_resource: Optional[compute.DisksAddResourcePoliciesRequest] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.Operation: + r"""Adds existing resource policies to a disk. You can + only add one policy which will be applied to this disk + for scheduling snapshot creation. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_add_resource_policies(): + # Create a client + client = compute_v1.DisksClient() + + # Initialize request argument(s) + request = compute_v1.AddResourcePoliciesDiskRequest( + disk="disk_value", + project="project_value", + zone="zone_value", + ) + + # Make the request + response = client.add_resource_policies(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.AddResourcePoliciesDiskRequest, dict]): + The request object. A request message for + Disks.AddResourcePolicies. See the + method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + zone (str): + The name of the zone for this + request. + + This corresponds to the ``zone`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + disk (str): + The disk name for this request. + This corresponds to the ``disk`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + disks_add_resource_policies_request_resource (google.cloud.compute_v1.types.DisksAddResourcePoliciesRequest): + The body resource for this request + This corresponds to the ``disks_add_resource_policies_request_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, zone, disk, disks_add_resource_policies_request_resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.AddResourcePoliciesDiskRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.AddResourcePoliciesDiskRequest): + request = compute.AddResourcePoliciesDiskRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if zone is not None: + request.zone = zone + if disk is not None: + request.disk = disk + if disks_add_resource_policies_request_resource is not None: + request.disks_add_resource_policies_request_resource = disks_add_resource_policies_request_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.add_resource_policies] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("zone", request.zone), + ("disk", request.disk), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def add_resource_policies(self, + request: Optional[Union[compute.AddResourcePoliciesDiskRequest, dict]] = None, + *, + project: Optional[str] = None, + zone: Optional[str] = None, + disk: Optional[str] = None, + disks_add_resource_policies_request_resource: Optional[compute.DisksAddResourcePoliciesRequest] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> extended_operation.ExtendedOperation: + r"""Adds existing resource policies to a disk. You can + only add one policy which will be applied to this disk + for scheduling snapshot creation. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_add_resource_policies(): + # Create a client + client = compute_v1.DisksClient() + + # Initialize request argument(s) + request = compute_v1.AddResourcePoliciesDiskRequest( + disk="disk_value", + project="project_value", + zone="zone_value", + ) + + # Make the request + response = client.add_resource_policies(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.AddResourcePoliciesDiskRequest, dict]): + The request object. A request message for + Disks.AddResourcePolicies. See the + method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + zone (str): + The name of the zone for this + request. + + This corresponds to the ``zone`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + disk (str): + The disk name for this request. + This corresponds to the ``disk`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + disks_add_resource_policies_request_resource (google.cloud.compute_v1.types.DisksAddResourcePoliciesRequest): + The body resource for this request + This corresponds to the ``disks_add_resource_policies_request_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, zone, disk, disks_add_resource_policies_request_resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.AddResourcePoliciesDiskRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.AddResourcePoliciesDiskRequest): + request = compute.AddResourcePoliciesDiskRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if zone is not None: + request.zone = zone + if disk is not None: + request.disk = disk + if disks_add_resource_policies_request_resource is not None: + request.disks_add_resource_policies_request_resource = disks_add_resource_policies_request_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.add_resource_policies] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("zone", request.zone), + ("disk", request.disk), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + operation_service = self._transport._zone_operations_client + operation_request = compute.GetZoneOperationRequest() + operation_request.project = request.project + operation_request.zone = request.zone + operation_request.operation = response.name + + get_operation = functools.partial(operation_service.get, operation_request) + # Cancel is not part of extended operations yet. + cancel_operation = lambda: None + + # Note: this class is an implementation detail to provide a uniform + # set of names for certain fields in the extended operation proto message. + # See google.api_core.extended_operation.ExtendedOperation for details + # on these properties and the expected interface. + class _CustomOperation(extended_operation.ExtendedOperation): + @property + def error_message(self): + return self._extended_operation.http_error_message + + @property + def error_code(self): + return self._extended_operation.http_error_status_code + + response = _CustomOperation.make(get_operation, cancel_operation, response) + + # Done; return the response. + return response + + def aggregated_list(self, + request: Optional[Union[compute.AggregatedListDisksRequest, dict]] = None, + *, + project: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.AggregatedListPager: + r"""Retrieves an aggregated list of persistent disks. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_aggregated_list(): + # Create a client + client = compute_v1.DisksClient() + + # Initialize request argument(s) + request = compute_v1.AggregatedListDisksRequest( + project="project_value", + ) + + # Make the request + page_result = client.aggregated_list(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.AggregatedListDisksRequest, dict]): + The request object. A request message for + Disks.AggregatedList. See the method + description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.compute_v1.services.disks.pagers.AggregatedListPager: + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.AggregatedListDisksRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.AggregatedListDisksRequest): + request = compute.AggregatedListDisksRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.aggregated_list] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.AggregatedListPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + def bulk_insert_unary(self, + request: Optional[Union[compute.BulkInsertDiskRequest, dict]] = None, + *, + project: Optional[str] = None, + zone: Optional[str] = None, + bulk_insert_disk_resource_resource: Optional[compute.BulkInsertDiskResource] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.Operation: + r"""Bulk create a set of disks. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_bulk_insert(): + # Create a client + client = compute_v1.DisksClient() + + # Initialize request argument(s) + request = compute_v1.BulkInsertDiskRequest( + project="project_value", + zone="zone_value", + ) + + # Make the request + response = client.bulk_insert(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.BulkInsertDiskRequest, dict]): + The request object. A request message for + Disks.BulkInsert. See the method + description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + zone (str): + The name of the zone for this + request. + + This corresponds to the ``zone`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + bulk_insert_disk_resource_resource (google.cloud.compute_v1.types.BulkInsertDiskResource): + The body resource for this request + This corresponds to the ``bulk_insert_disk_resource_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, zone, bulk_insert_disk_resource_resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.BulkInsertDiskRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.BulkInsertDiskRequest): + request = compute.BulkInsertDiskRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if zone is not None: + request.zone = zone + if bulk_insert_disk_resource_resource is not None: + request.bulk_insert_disk_resource_resource = bulk_insert_disk_resource_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.bulk_insert] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("zone", request.zone), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def bulk_insert(self, + request: Optional[Union[compute.BulkInsertDiskRequest, dict]] = None, + *, + project: Optional[str] = None, + zone: Optional[str] = None, + bulk_insert_disk_resource_resource: Optional[compute.BulkInsertDiskResource] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> extended_operation.ExtendedOperation: + r"""Bulk create a set of disks. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_bulk_insert(): + # Create a client + client = compute_v1.DisksClient() + + # Initialize request argument(s) + request = compute_v1.BulkInsertDiskRequest( + project="project_value", + zone="zone_value", + ) + + # Make the request + response = client.bulk_insert(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.BulkInsertDiskRequest, dict]): + The request object. A request message for + Disks.BulkInsert. See the method + description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + zone (str): + The name of the zone for this + request. + + This corresponds to the ``zone`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + bulk_insert_disk_resource_resource (google.cloud.compute_v1.types.BulkInsertDiskResource): + The body resource for this request + This corresponds to the ``bulk_insert_disk_resource_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, zone, bulk_insert_disk_resource_resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.BulkInsertDiskRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.BulkInsertDiskRequest): + request = compute.BulkInsertDiskRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if zone is not None: + request.zone = zone + if bulk_insert_disk_resource_resource is not None: + request.bulk_insert_disk_resource_resource = bulk_insert_disk_resource_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.bulk_insert] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("zone", request.zone), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + operation_service = self._transport._zone_operations_client + operation_request = compute.GetZoneOperationRequest() + operation_request.project = request.project + operation_request.zone = request.zone + operation_request.operation = response.name + + get_operation = functools.partial(operation_service.get, operation_request) + # Cancel is not part of extended operations yet. + cancel_operation = lambda: None + + # Note: this class is an implementation detail to provide a uniform + # set of names for certain fields in the extended operation proto message. + # See google.api_core.extended_operation.ExtendedOperation for details + # on these properties and the expected interface. + class _CustomOperation(extended_operation.ExtendedOperation): + @property + def error_message(self): + return self._extended_operation.http_error_message + + @property + def error_code(self): + return self._extended_operation.http_error_status_code + + response = _CustomOperation.make(get_operation, cancel_operation, response) + + # Done; return the response. + return response + + def create_snapshot_unary(self, + request: Optional[Union[compute.CreateSnapshotDiskRequest, dict]] = None, + *, + project: Optional[str] = None, + zone: Optional[str] = None, + disk: Optional[str] = None, + snapshot_resource: Optional[compute.Snapshot] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.Operation: + r"""Creates a snapshot of a specified persistent disk. + For regular snapshot creation, consider using + snapshots.insert instead, as that method supports more + features, such as creating snapshots in a project + different from the source disk project. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_create_snapshot(): + # Create a client + client = compute_v1.DisksClient() + + # Initialize request argument(s) + request = compute_v1.CreateSnapshotDiskRequest( + disk="disk_value", + project="project_value", + zone="zone_value", + ) + + # Make the request + response = client.create_snapshot(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.CreateSnapshotDiskRequest, dict]): + The request object. A request message for + Disks.CreateSnapshot. See the method + description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + zone (str): + The name of the zone for this + request. + + This corresponds to the ``zone`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + disk (str): + Name of the persistent disk to + snapshot. + + This corresponds to the ``disk`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + snapshot_resource (google.cloud.compute_v1.types.Snapshot): + The body resource for this request + This corresponds to the ``snapshot_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, zone, disk, snapshot_resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.CreateSnapshotDiskRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.CreateSnapshotDiskRequest): + request = compute.CreateSnapshotDiskRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if zone is not None: + request.zone = zone + if disk is not None: + request.disk = disk + if snapshot_resource is not None: + request.snapshot_resource = snapshot_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.create_snapshot] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("zone", request.zone), + ("disk", request.disk), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def create_snapshot(self, + request: Optional[Union[compute.CreateSnapshotDiskRequest, dict]] = None, + *, + project: Optional[str] = None, + zone: Optional[str] = None, + disk: Optional[str] = None, + snapshot_resource: Optional[compute.Snapshot] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> extended_operation.ExtendedOperation: + r"""Creates a snapshot of a specified persistent disk. + For regular snapshot creation, consider using + snapshots.insert instead, as that method supports more + features, such as creating snapshots in a project + different from the source disk project. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_create_snapshot(): + # Create a client + client = compute_v1.DisksClient() + + # Initialize request argument(s) + request = compute_v1.CreateSnapshotDiskRequest( + disk="disk_value", + project="project_value", + zone="zone_value", + ) + + # Make the request + response = client.create_snapshot(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.CreateSnapshotDiskRequest, dict]): + The request object. A request message for + Disks.CreateSnapshot. See the method + description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + zone (str): + The name of the zone for this + request. + + This corresponds to the ``zone`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + disk (str): + Name of the persistent disk to + snapshot. + + This corresponds to the ``disk`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + snapshot_resource (google.cloud.compute_v1.types.Snapshot): + The body resource for this request + This corresponds to the ``snapshot_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, zone, disk, snapshot_resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.CreateSnapshotDiskRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.CreateSnapshotDiskRequest): + request = compute.CreateSnapshotDiskRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if zone is not None: + request.zone = zone + if disk is not None: + request.disk = disk + if snapshot_resource is not None: + request.snapshot_resource = snapshot_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.create_snapshot] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("zone", request.zone), + ("disk", request.disk), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + operation_service = self._transport._zone_operations_client + operation_request = compute.GetZoneOperationRequest() + operation_request.project = request.project + operation_request.zone = request.zone + operation_request.operation = response.name + + get_operation = functools.partial(operation_service.get, operation_request) + # Cancel is not part of extended operations yet. + cancel_operation = lambda: None + + # Note: this class is an implementation detail to provide a uniform + # set of names for certain fields in the extended operation proto message. + # See google.api_core.extended_operation.ExtendedOperation for details + # on these properties and the expected interface. + class _CustomOperation(extended_operation.ExtendedOperation): + @property + def error_message(self): + return self._extended_operation.http_error_message + + @property + def error_code(self): + return self._extended_operation.http_error_status_code + + response = _CustomOperation.make(get_operation, cancel_operation, response) + + # Done; return the response. + return response + + def delete_unary(self, + request: Optional[Union[compute.DeleteDiskRequest, dict]] = None, + *, + project: Optional[str] = None, + zone: Optional[str] = None, + disk: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.Operation: + r"""Deletes the specified persistent disk. Deleting a + disk removes its data permanently and is irreversible. + However, deleting a disk does not delete any snapshots + previously made from the disk. You must separately + delete snapshots. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_delete(): + # Create a client + client = compute_v1.DisksClient() + + # Initialize request argument(s) + request = compute_v1.DeleteDiskRequest( + disk="disk_value", + project="project_value", + zone="zone_value", + ) + + # Make the request + response = client.delete(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.DeleteDiskRequest, dict]): + The request object. A request message for Disks.Delete. + See the method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + zone (str): + The name of the zone for this + request. + + This corresponds to the ``zone`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + disk (str): + Name of the persistent disk to + delete. + + This corresponds to the ``disk`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, zone, disk]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.DeleteDiskRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.DeleteDiskRequest): + request = compute.DeleteDiskRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if zone is not None: + request.zone = zone + if disk is not None: + request.disk = disk + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("zone", request.zone), + ("disk", request.disk), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def delete(self, + request: Optional[Union[compute.DeleteDiskRequest, dict]] = None, + *, + project: Optional[str] = None, + zone: Optional[str] = None, + disk: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> extended_operation.ExtendedOperation: + r"""Deletes the specified persistent disk. Deleting a + disk removes its data permanently and is irreversible. + However, deleting a disk does not delete any snapshots + previously made from the disk. You must separately + delete snapshots. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_delete(): + # Create a client + client = compute_v1.DisksClient() + + # Initialize request argument(s) + request = compute_v1.DeleteDiskRequest( + disk="disk_value", + project="project_value", + zone="zone_value", + ) + + # Make the request + response = client.delete(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.DeleteDiskRequest, dict]): + The request object. A request message for Disks.Delete. + See the method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + zone (str): + The name of the zone for this + request. + + This corresponds to the ``zone`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + disk (str): + Name of the persistent disk to + delete. + + This corresponds to the ``disk`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, zone, disk]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.DeleteDiskRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.DeleteDiskRequest): + request = compute.DeleteDiskRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if zone is not None: + request.zone = zone + if disk is not None: + request.disk = disk + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("zone", request.zone), + ("disk", request.disk), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + operation_service = self._transport._zone_operations_client + operation_request = compute.GetZoneOperationRequest() + operation_request.project = request.project + operation_request.zone = request.zone + operation_request.operation = response.name + + get_operation = functools.partial(operation_service.get, operation_request) + # Cancel is not part of extended operations yet. + cancel_operation = lambda: None + + # Note: this class is an implementation detail to provide a uniform + # set of names for certain fields in the extended operation proto message. + # See google.api_core.extended_operation.ExtendedOperation for details + # on these properties and the expected interface. + class _CustomOperation(extended_operation.ExtendedOperation): + @property + def error_message(self): + return self._extended_operation.http_error_message + + @property + def error_code(self): + return self._extended_operation.http_error_status_code + + response = _CustomOperation.make(get_operation, cancel_operation, response) + + # Done; return the response. + return response + + def get(self, + request: Optional[Union[compute.GetDiskRequest, dict]] = None, + *, + project: Optional[str] = None, + zone: Optional[str] = None, + disk: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.Disk: + r"""Returns the specified persistent disk. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_get(): + # Create a client + client = compute_v1.DisksClient() + + # Initialize request argument(s) + request = compute_v1.GetDiskRequest( + disk="disk_value", + project="project_value", + zone="zone_value", + ) + + # Make the request + response = client.get(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.GetDiskRequest, dict]): + The request object. A request message for Disks.Get. See + the method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + zone (str): + The name of the zone for this + request. + + This corresponds to the ``zone`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + disk (str): + Name of the persistent disk to + return. + + This corresponds to the ``disk`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.compute_v1.types.Disk: + Represents a Persistent Disk resource. Google Compute + Engine has two Disk resources: \* + [Zonal](/compute/docs/reference/rest/v1/disks) \* + [Regional](/compute/docs/reference/rest/v1/regionDisks) + Persistent disks are required for running your VM + instances. Create both boot and non-boot (data) + persistent disks. For more information, read Persistent + Disks. For more storage options, read Storage options. + The disks resource represents a zonal persistent disk. + For more information, read Zonal persistent disks. The + regionDisks resource represents a regional persistent + disk. For more information, read Regional resources. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, zone, disk]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.GetDiskRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.GetDiskRequest): + request = compute.GetDiskRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if zone is not None: + request.zone = zone + if disk is not None: + request.disk = disk + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("zone", request.zone), + ("disk", request.disk), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get_iam_policy(self, + request: Optional[Union[compute.GetIamPolicyDiskRequest, dict]] = None, + *, + project: Optional[str] = None, + zone: Optional[str] = None, + resource: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.Policy: + r"""Gets the access control policy for a resource. May be + empty if no such policy or resource exists. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_get_iam_policy(): + # Create a client + client = compute_v1.DisksClient() + + # Initialize request argument(s) + request = compute_v1.GetIamPolicyDiskRequest( + project="project_value", + resource="resource_value", + zone="zone_value", + ) + + # Make the request + response = client.get_iam_policy(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.GetIamPolicyDiskRequest, dict]): + The request object. A request message for + Disks.GetIamPolicy. See the method + description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + zone (str): + The name of the zone for this + request. + + This corresponds to the ``zone`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + resource (str): + Name or id of the resource for this + request. + + This corresponds to the ``resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.compute_v1.types.Policy: + An Identity and Access Management (IAM) policy, which + specifies access controls for Google Cloud resources. A + Policy is a collection of bindings. A binding binds one + or more members, or principals, to a single role. + Principals can be user accounts, service accounts, + Google groups, and domains (such as G Suite). A role is + a named list of permissions; each role can be an IAM + predefined role or a user-created custom role. For some + types of Google Cloud resources, a binding can also + specify a condition, which is a logical expression that + allows access to a resource only if the expression + evaluates to true. A condition can add constraints based + on attributes of the request, the resource, or both. To + learn which resources support conditions in their IAM + policies, see the [IAM + documentation](\ https://cloud.google.com/iam/help/conditions/resource-policies). + **JSON example:** { "bindings": [ { "role": + "roles/resourcemanager.organizationAdmin", "members": [ + "user:mike@example.com", "group:admins@example.com", + "domain:google.com", + "serviceAccount:my-project-id@appspot.gserviceaccount.com" + ] }, { "role": + "roles/resourcemanager.organizationViewer", "members": [ + "user:eve@example.com" ], "condition": { "title": + "expirable access", "description": "Does not grant + access after Sep 2020", "expression": "request.time < + timestamp('2020-10-01T00:00:00.000Z')", } } ], "etag": + "BwWWja0YfJA=", "version": 3 } **YAML example:** + bindings: - members: - user:\ mike@example.com - + group:\ admins@example.com - domain:google.com - + serviceAccount:\ my-project-id@appspot.gserviceaccount.com + role: roles/resourcemanager.organizationAdmin - members: + - user:\ eve@example.com role: + roles/resourcemanager.organizationViewer condition: + title: expirable access description: Does not grant + access after Sep 2020 expression: request.time < + timestamp('2020-10-01T00:00:00.000Z') etag: BwWWja0YfJA= + version: 3 For a description of IAM and its features, + see the [IAM + documentation](\ https://cloud.google.com/iam/docs/). + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, zone, resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.GetIamPolicyDiskRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.GetIamPolicyDiskRequest): + request = compute.GetIamPolicyDiskRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if zone is not None: + request.zone = zone + if resource is not None: + request.resource = resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_iam_policy] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("zone", request.zone), + ("resource", request.resource), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def insert_unary(self, + request: Optional[Union[compute.InsertDiskRequest, dict]] = None, + *, + project: Optional[str] = None, + zone: Optional[str] = None, + disk_resource: Optional[compute.Disk] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.Operation: + r"""Creates a persistent disk in the specified project + using the data in the request. You can create a disk + from a source (sourceImage, sourceSnapshot, or + sourceDisk) or create an empty 500 GB data disk by + omitting all properties. You can also create a disk that + is larger than the default size by specifying the sizeGb + property. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_insert(): + # Create a client + client = compute_v1.DisksClient() + + # Initialize request argument(s) + request = compute_v1.InsertDiskRequest( + project="project_value", + zone="zone_value", + ) + + # Make the request + response = client.insert(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.InsertDiskRequest, dict]): + The request object. A request message for Disks.Insert. + See the method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + zone (str): + The name of the zone for this + request. + + This corresponds to the ``zone`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + disk_resource (google.cloud.compute_v1.types.Disk): + The body resource for this request + This corresponds to the ``disk_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, zone, disk_resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.InsertDiskRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.InsertDiskRequest): + request = compute.InsertDiskRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if zone is not None: + request.zone = zone + if disk_resource is not None: + request.disk_resource = disk_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.insert] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("zone", request.zone), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def insert(self, + request: Optional[Union[compute.InsertDiskRequest, dict]] = None, + *, + project: Optional[str] = None, + zone: Optional[str] = None, + disk_resource: Optional[compute.Disk] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> extended_operation.ExtendedOperation: + r"""Creates a persistent disk in the specified project + using the data in the request. You can create a disk + from a source (sourceImage, sourceSnapshot, or + sourceDisk) or create an empty 500 GB data disk by + omitting all properties. You can also create a disk that + is larger than the default size by specifying the sizeGb + property. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_insert(): + # Create a client + client = compute_v1.DisksClient() + + # Initialize request argument(s) + request = compute_v1.InsertDiskRequest( + project="project_value", + zone="zone_value", + ) + + # Make the request + response = client.insert(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.InsertDiskRequest, dict]): + The request object. A request message for Disks.Insert. + See the method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + zone (str): + The name of the zone for this + request. + + This corresponds to the ``zone`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + disk_resource (google.cloud.compute_v1.types.Disk): + The body resource for this request + This corresponds to the ``disk_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, zone, disk_resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.InsertDiskRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.InsertDiskRequest): + request = compute.InsertDiskRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if zone is not None: + request.zone = zone + if disk_resource is not None: + request.disk_resource = disk_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.insert] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("zone", request.zone), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + operation_service = self._transport._zone_operations_client + operation_request = compute.GetZoneOperationRequest() + operation_request.project = request.project + operation_request.zone = request.zone + operation_request.operation = response.name + + get_operation = functools.partial(operation_service.get, operation_request) + # Cancel is not part of extended operations yet. + cancel_operation = lambda: None + + # Note: this class is an implementation detail to provide a uniform + # set of names for certain fields in the extended operation proto message. + # See google.api_core.extended_operation.ExtendedOperation for details + # on these properties and the expected interface. + class _CustomOperation(extended_operation.ExtendedOperation): + @property + def error_message(self): + return self._extended_operation.http_error_message + + @property + def error_code(self): + return self._extended_operation.http_error_status_code + + response = _CustomOperation.make(get_operation, cancel_operation, response) + + # Done; return the response. + return response + + def list(self, + request: Optional[Union[compute.ListDisksRequest, dict]] = None, + *, + project: Optional[str] = None, + zone: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListPager: + r"""Retrieves a list of persistent disks contained within + the specified zone. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_list(): + # Create a client + client = compute_v1.DisksClient() + + # Initialize request argument(s) + request = compute_v1.ListDisksRequest( + project="project_value", + zone="zone_value", + ) + + # Make the request + page_result = client.list(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.ListDisksRequest, dict]): + The request object. A request message for Disks.List. See + the method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + zone (str): + The name of the zone for this + request. + + This corresponds to the ``zone`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.compute_v1.services.disks.pagers.ListPager: + A list of Disk resources. + + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, zone]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.ListDisksRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.ListDisksRequest): + request = compute.ListDisksRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if zone is not None: + request.zone = zone + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("zone", request.zone), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + def remove_resource_policies_unary(self, + request: Optional[Union[compute.RemoveResourcePoliciesDiskRequest, dict]] = None, + *, + project: Optional[str] = None, + zone: Optional[str] = None, + disk: Optional[str] = None, + disks_remove_resource_policies_request_resource: Optional[compute.DisksRemoveResourcePoliciesRequest] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.Operation: + r"""Removes resource policies from a disk. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_remove_resource_policies(): + # Create a client + client = compute_v1.DisksClient() + + # Initialize request argument(s) + request = compute_v1.RemoveResourcePoliciesDiskRequest( + disk="disk_value", + project="project_value", + zone="zone_value", + ) + + # Make the request + response = client.remove_resource_policies(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.RemoveResourcePoliciesDiskRequest, dict]): + The request object. A request message for + Disks.RemoveResourcePolicies. See the + method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + zone (str): + The name of the zone for this + request. + + This corresponds to the ``zone`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + disk (str): + The disk name for this request. + This corresponds to the ``disk`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + disks_remove_resource_policies_request_resource (google.cloud.compute_v1.types.DisksRemoveResourcePoliciesRequest): + The body resource for this request + This corresponds to the ``disks_remove_resource_policies_request_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, zone, disk, disks_remove_resource_policies_request_resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.RemoveResourcePoliciesDiskRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.RemoveResourcePoliciesDiskRequest): + request = compute.RemoveResourcePoliciesDiskRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if zone is not None: + request.zone = zone + if disk is not None: + request.disk = disk + if disks_remove_resource_policies_request_resource is not None: + request.disks_remove_resource_policies_request_resource = disks_remove_resource_policies_request_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.remove_resource_policies] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("zone", request.zone), + ("disk", request.disk), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def remove_resource_policies(self, + request: Optional[Union[compute.RemoveResourcePoliciesDiskRequest, dict]] = None, + *, + project: Optional[str] = None, + zone: Optional[str] = None, + disk: Optional[str] = None, + disks_remove_resource_policies_request_resource: Optional[compute.DisksRemoveResourcePoliciesRequest] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> extended_operation.ExtendedOperation: + r"""Removes resource policies from a disk. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_remove_resource_policies(): + # Create a client + client = compute_v1.DisksClient() + + # Initialize request argument(s) + request = compute_v1.RemoveResourcePoliciesDiskRequest( + disk="disk_value", + project="project_value", + zone="zone_value", + ) + + # Make the request + response = client.remove_resource_policies(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.RemoveResourcePoliciesDiskRequest, dict]): + The request object. A request message for + Disks.RemoveResourcePolicies. See the + method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + zone (str): + The name of the zone for this + request. + + This corresponds to the ``zone`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + disk (str): + The disk name for this request. + This corresponds to the ``disk`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + disks_remove_resource_policies_request_resource (google.cloud.compute_v1.types.DisksRemoveResourcePoliciesRequest): + The body resource for this request + This corresponds to the ``disks_remove_resource_policies_request_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, zone, disk, disks_remove_resource_policies_request_resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.RemoveResourcePoliciesDiskRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.RemoveResourcePoliciesDiskRequest): + request = compute.RemoveResourcePoliciesDiskRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if zone is not None: + request.zone = zone + if disk is not None: + request.disk = disk + if disks_remove_resource_policies_request_resource is not None: + request.disks_remove_resource_policies_request_resource = disks_remove_resource_policies_request_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.remove_resource_policies] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("zone", request.zone), + ("disk", request.disk), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + operation_service = self._transport._zone_operations_client + operation_request = compute.GetZoneOperationRequest() + operation_request.project = request.project + operation_request.zone = request.zone + operation_request.operation = response.name + + get_operation = functools.partial(operation_service.get, operation_request) + # Cancel is not part of extended operations yet. + cancel_operation = lambda: None + + # Note: this class is an implementation detail to provide a uniform + # set of names for certain fields in the extended operation proto message. + # See google.api_core.extended_operation.ExtendedOperation for details + # on these properties and the expected interface. + class _CustomOperation(extended_operation.ExtendedOperation): + @property + def error_message(self): + return self._extended_operation.http_error_message + + @property + def error_code(self): + return self._extended_operation.http_error_status_code + + response = _CustomOperation.make(get_operation, cancel_operation, response) + + # Done; return the response. + return response + + def resize_unary(self, + request: Optional[Union[compute.ResizeDiskRequest, dict]] = None, + *, + project: Optional[str] = None, + zone: Optional[str] = None, + disk: Optional[str] = None, + disks_resize_request_resource: Optional[compute.DisksResizeRequest] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.Operation: + r"""Resizes the specified persistent disk. You can only + increase the size of the disk. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_resize(): + # Create a client + client = compute_v1.DisksClient() + + # Initialize request argument(s) + request = compute_v1.ResizeDiskRequest( + disk="disk_value", + project="project_value", + zone="zone_value", + ) + + # Make the request + response = client.resize(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.ResizeDiskRequest, dict]): + The request object. A request message for Disks.Resize. + See the method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + zone (str): + The name of the zone for this + request. + + This corresponds to the ``zone`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + disk (str): + The name of the persistent disk. + This corresponds to the ``disk`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + disks_resize_request_resource (google.cloud.compute_v1.types.DisksResizeRequest): + The body resource for this request + This corresponds to the ``disks_resize_request_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, zone, disk, disks_resize_request_resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.ResizeDiskRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.ResizeDiskRequest): + request = compute.ResizeDiskRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if zone is not None: + request.zone = zone + if disk is not None: + request.disk = disk + if disks_resize_request_resource is not None: + request.disks_resize_request_resource = disks_resize_request_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.resize] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("zone", request.zone), + ("disk", request.disk), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def resize(self, + request: Optional[Union[compute.ResizeDiskRequest, dict]] = None, + *, + project: Optional[str] = None, + zone: Optional[str] = None, + disk: Optional[str] = None, + disks_resize_request_resource: Optional[compute.DisksResizeRequest] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> extended_operation.ExtendedOperation: + r"""Resizes the specified persistent disk. You can only + increase the size of the disk. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_resize(): + # Create a client + client = compute_v1.DisksClient() + + # Initialize request argument(s) + request = compute_v1.ResizeDiskRequest( + disk="disk_value", + project="project_value", + zone="zone_value", + ) + + # Make the request + response = client.resize(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.ResizeDiskRequest, dict]): + The request object. A request message for Disks.Resize. + See the method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + zone (str): + The name of the zone for this + request. + + This corresponds to the ``zone`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + disk (str): + The name of the persistent disk. + This corresponds to the ``disk`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + disks_resize_request_resource (google.cloud.compute_v1.types.DisksResizeRequest): + The body resource for this request + This corresponds to the ``disks_resize_request_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, zone, disk, disks_resize_request_resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.ResizeDiskRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.ResizeDiskRequest): + request = compute.ResizeDiskRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if zone is not None: + request.zone = zone + if disk is not None: + request.disk = disk + if disks_resize_request_resource is not None: + request.disks_resize_request_resource = disks_resize_request_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.resize] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("zone", request.zone), + ("disk", request.disk), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + operation_service = self._transport._zone_operations_client + operation_request = compute.GetZoneOperationRequest() + operation_request.project = request.project + operation_request.zone = request.zone + operation_request.operation = response.name + + get_operation = functools.partial(operation_service.get, operation_request) + # Cancel is not part of extended operations yet. + cancel_operation = lambda: None + + # Note: this class is an implementation detail to provide a uniform + # set of names for certain fields in the extended operation proto message. + # See google.api_core.extended_operation.ExtendedOperation for details + # on these properties and the expected interface. + class _CustomOperation(extended_operation.ExtendedOperation): + @property + def error_message(self): + return self._extended_operation.http_error_message + + @property + def error_code(self): + return self._extended_operation.http_error_status_code + + response = _CustomOperation.make(get_operation, cancel_operation, response) + + # Done; return the response. + return response + + def set_iam_policy(self, + request: Optional[Union[compute.SetIamPolicyDiskRequest, dict]] = None, + *, + project: Optional[str] = None, + zone: Optional[str] = None, + resource: Optional[str] = None, + zone_set_policy_request_resource: Optional[compute.ZoneSetPolicyRequest] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.Policy: + r"""Sets the access control policy on the specified + resource. Replaces any existing policy. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_set_iam_policy(): + # Create a client + client = compute_v1.DisksClient() + + # Initialize request argument(s) + request = compute_v1.SetIamPolicyDiskRequest( + project="project_value", + resource="resource_value", + zone="zone_value", + ) + + # Make the request + response = client.set_iam_policy(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.SetIamPolicyDiskRequest, dict]): + The request object. A request message for + Disks.SetIamPolicy. See the method + description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + zone (str): + The name of the zone for this + request. + + This corresponds to the ``zone`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + resource (str): + Name or id of the resource for this + request. + + This corresponds to the ``resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + zone_set_policy_request_resource (google.cloud.compute_v1.types.ZoneSetPolicyRequest): + The body resource for this request + This corresponds to the ``zone_set_policy_request_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.compute_v1.types.Policy: + An Identity and Access Management (IAM) policy, which + specifies access controls for Google Cloud resources. A + Policy is a collection of bindings. A binding binds one + or more members, or principals, to a single role. + Principals can be user accounts, service accounts, + Google groups, and domains (such as G Suite). A role is + a named list of permissions; each role can be an IAM + predefined role or a user-created custom role. For some + types of Google Cloud resources, a binding can also + specify a condition, which is a logical expression that + allows access to a resource only if the expression + evaluates to true. A condition can add constraints based + on attributes of the request, the resource, or both. To + learn which resources support conditions in their IAM + policies, see the [IAM + documentation](\ https://cloud.google.com/iam/help/conditions/resource-policies). + **JSON example:** { "bindings": [ { "role": + "roles/resourcemanager.organizationAdmin", "members": [ + "user:mike@example.com", "group:admins@example.com", + "domain:google.com", + "serviceAccount:my-project-id@appspot.gserviceaccount.com" + ] }, { "role": + "roles/resourcemanager.organizationViewer", "members": [ + "user:eve@example.com" ], "condition": { "title": + "expirable access", "description": "Does not grant + access after Sep 2020", "expression": "request.time < + timestamp('2020-10-01T00:00:00.000Z')", } } ], "etag": + "BwWWja0YfJA=", "version": 3 } **YAML example:** + bindings: - members: - user:\ mike@example.com - + group:\ admins@example.com - domain:google.com - + serviceAccount:\ my-project-id@appspot.gserviceaccount.com + role: roles/resourcemanager.organizationAdmin - members: + - user:\ eve@example.com role: + roles/resourcemanager.organizationViewer condition: + title: expirable access description: Does not grant + access after Sep 2020 expression: request.time < + timestamp('2020-10-01T00:00:00.000Z') etag: BwWWja0YfJA= + version: 3 For a description of IAM and its features, + see the [IAM + documentation](\ https://cloud.google.com/iam/docs/). + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, zone, resource, zone_set_policy_request_resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.SetIamPolicyDiskRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.SetIamPolicyDiskRequest): + request = compute.SetIamPolicyDiskRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if zone is not None: + request.zone = zone + if resource is not None: + request.resource = resource + if zone_set_policy_request_resource is not None: + request.zone_set_policy_request_resource = zone_set_policy_request_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.set_iam_policy] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("zone", request.zone), + ("resource", request.resource), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def set_labels_unary(self, + request: Optional[Union[compute.SetLabelsDiskRequest, dict]] = None, + *, + project: Optional[str] = None, + zone: Optional[str] = None, + resource: Optional[str] = None, + zone_set_labels_request_resource: Optional[compute.ZoneSetLabelsRequest] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.Operation: + r"""Sets the labels on a disk. To learn more about + labels, read the Labeling Resources documentation. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_set_labels(): + # Create a client + client = compute_v1.DisksClient() + + # Initialize request argument(s) + request = compute_v1.SetLabelsDiskRequest( + project="project_value", + resource="resource_value", + zone="zone_value", + ) + + # Make the request + response = client.set_labels(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.SetLabelsDiskRequest, dict]): + The request object. A request message for + Disks.SetLabels. See the method + description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + zone (str): + The name of the zone for this + request. + + This corresponds to the ``zone`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + resource (str): + Name or id of the resource for this + request. + + This corresponds to the ``resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + zone_set_labels_request_resource (google.cloud.compute_v1.types.ZoneSetLabelsRequest): + The body resource for this request + This corresponds to the ``zone_set_labels_request_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, zone, resource, zone_set_labels_request_resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.SetLabelsDiskRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.SetLabelsDiskRequest): + request = compute.SetLabelsDiskRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if zone is not None: + request.zone = zone + if resource is not None: + request.resource = resource + if zone_set_labels_request_resource is not None: + request.zone_set_labels_request_resource = zone_set_labels_request_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.set_labels] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("zone", request.zone), + ("resource", request.resource), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def set_labels(self, + request: Optional[Union[compute.SetLabelsDiskRequest, dict]] = None, + *, + project: Optional[str] = None, + zone: Optional[str] = None, + resource: Optional[str] = None, + zone_set_labels_request_resource: Optional[compute.ZoneSetLabelsRequest] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> extended_operation.ExtendedOperation: + r"""Sets the labels on a disk. To learn more about + labels, read the Labeling Resources documentation. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_set_labels(): + # Create a client + client = compute_v1.DisksClient() + + # Initialize request argument(s) + request = compute_v1.SetLabelsDiskRequest( + project="project_value", + resource="resource_value", + zone="zone_value", + ) + + # Make the request + response = client.set_labels(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.SetLabelsDiskRequest, dict]): + The request object. A request message for + Disks.SetLabels. See the method + description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + zone (str): + The name of the zone for this + request. + + This corresponds to the ``zone`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + resource (str): + Name or id of the resource for this + request. + + This corresponds to the ``resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + zone_set_labels_request_resource (google.cloud.compute_v1.types.ZoneSetLabelsRequest): + The body resource for this request + This corresponds to the ``zone_set_labels_request_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, zone, resource, zone_set_labels_request_resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.SetLabelsDiskRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.SetLabelsDiskRequest): + request = compute.SetLabelsDiskRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if zone is not None: + request.zone = zone + if resource is not None: + request.resource = resource + if zone_set_labels_request_resource is not None: + request.zone_set_labels_request_resource = zone_set_labels_request_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.set_labels] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("zone", request.zone), + ("resource", request.resource), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + operation_service = self._transport._zone_operations_client + operation_request = compute.GetZoneOperationRequest() + operation_request.project = request.project + operation_request.zone = request.zone + operation_request.operation = response.name + + get_operation = functools.partial(operation_service.get, operation_request) + # Cancel is not part of extended operations yet. + cancel_operation = lambda: None + + # Note: this class is an implementation detail to provide a uniform + # set of names for certain fields in the extended operation proto message. + # See google.api_core.extended_operation.ExtendedOperation for details + # on these properties and the expected interface. + class _CustomOperation(extended_operation.ExtendedOperation): + @property + def error_message(self): + return self._extended_operation.http_error_message + + @property + def error_code(self): + return self._extended_operation.http_error_status_code + + response = _CustomOperation.make(get_operation, cancel_operation, response) + + # Done; return the response. + return response + + def start_async_replication_unary(self, + request: Optional[Union[compute.StartAsyncReplicationDiskRequest, dict]] = None, + *, + project: Optional[str] = None, + zone: Optional[str] = None, + disk: Optional[str] = None, + disks_start_async_replication_request_resource: Optional[compute.DisksStartAsyncReplicationRequest] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.Operation: + r"""Starts asynchronous replication. Must be invoked on + the primary disk. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_start_async_replication(): + # Create a client + client = compute_v1.DisksClient() + + # Initialize request argument(s) + request = compute_v1.StartAsyncReplicationDiskRequest( + disk="disk_value", + project="project_value", + zone="zone_value", + ) + + # Make the request + response = client.start_async_replication(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.StartAsyncReplicationDiskRequest, dict]): + The request object. A request message for + Disks.StartAsyncReplication. See the + method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + zone (str): + The name of the zone for this + request. + + This corresponds to the ``zone`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + disk (str): + The name of the persistent disk. + This corresponds to the ``disk`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + disks_start_async_replication_request_resource (google.cloud.compute_v1.types.DisksStartAsyncReplicationRequest): + The body resource for this request + This corresponds to the ``disks_start_async_replication_request_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, zone, disk, disks_start_async_replication_request_resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.StartAsyncReplicationDiskRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.StartAsyncReplicationDiskRequest): + request = compute.StartAsyncReplicationDiskRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if zone is not None: + request.zone = zone + if disk is not None: + request.disk = disk + if disks_start_async_replication_request_resource is not None: + request.disks_start_async_replication_request_resource = disks_start_async_replication_request_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.start_async_replication] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("zone", request.zone), + ("disk", request.disk), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def start_async_replication(self, + request: Optional[Union[compute.StartAsyncReplicationDiskRequest, dict]] = None, + *, + project: Optional[str] = None, + zone: Optional[str] = None, + disk: Optional[str] = None, + disks_start_async_replication_request_resource: Optional[compute.DisksStartAsyncReplicationRequest] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> extended_operation.ExtendedOperation: + r"""Starts asynchronous replication. Must be invoked on + the primary disk. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_start_async_replication(): + # Create a client + client = compute_v1.DisksClient() + + # Initialize request argument(s) + request = compute_v1.StartAsyncReplicationDiskRequest( + disk="disk_value", + project="project_value", + zone="zone_value", + ) + + # Make the request + response = client.start_async_replication(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.StartAsyncReplicationDiskRequest, dict]): + The request object. A request message for + Disks.StartAsyncReplication. See the + method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + zone (str): + The name of the zone for this + request. + + This corresponds to the ``zone`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + disk (str): + The name of the persistent disk. + This corresponds to the ``disk`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + disks_start_async_replication_request_resource (google.cloud.compute_v1.types.DisksStartAsyncReplicationRequest): + The body resource for this request + This corresponds to the ``disks_start_async_replication_request_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, zone, disk, disks_start_async_replication_request_resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.StartAsyncReplicationDiskRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.StartAsyncReplicationDiskRequest): + request = compute.StartAsyncReplicationDiskRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if zone is not None: + request.zone = zone + if disk is not None: + request.disk = disk + if disks_start_async_replication_request_resource is not None: + request.disks_start_async_replication_request_resource = disks_start_async_replication_request_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.start_async_replication] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("zone", request.zone), + ("disk", request.disk), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + operation_service = self._transport._zone_operations_client + operation_request = compute.GetZoneOperationRequest() + operation_request.project = request.project + operation_request.zone = request.zone + operation_request.operation = response.name + + get_operation = functools.partial(operation_service.get, operation_request) + # Cancel is not part of extended operations yet. + cancel_operation = lambda: None + + # Note: this class is an implementation detail to provide a uniform + # set of names for certain fields in the extended operation proto message. + # See google.api_core.extended_operation.ExtendedOperation for details + # on these properties and the expected interface. + class _CustomOperation(extended_operation.ExtendedOperation): + @property + def error_message(self): + return self._extended_operation.http_error_message + + @property + def error_code(self): + return self._extended_operation.http_error_status_code + + response = _CustomOperation.make(get_operation, cancel_operation, response) + + # Done; return the response. + return response + + def stop_async_replication_unary(self, + request: Optional[Union[compute.StopAsyncReplicationDiskRequest, dict]] = None, + *, + project: Optional[str] = None, + zone: Optional[str] = None, + disk: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.Operation: + r"""Stops asynchronous replication. Can be invoked either + on the primary or on the secondary disk. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_stop_async_replication(): + # Create a client + client = compute_v1.DisksClient() + + # Initialize request argument(s) + request = compute_v1.StopAsyncReplicationDiskRequest( + disk="disk_value", + project="project_value", + zone="zone_value", + ) + + # Make the request + response = client.stop_async_replication(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.StopAsyncReplicationDiskRequest, dict]): + The request object. A request message for + Disks.StopAsyncReplication. See the + method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + zone (str): + The name of the zone for this + request. + + This corresponds to the ``zone`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + disk (str): + The name of the persistent disk. + This corresponds to the ``disk`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, zone, disk]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.StopAsyncReplicationDiskRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.StopAsyncReplicationDiskRequest): + request = compute.StopAsyncReplicationDiskRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if zone is not None: + request.zone = zone + if disk is not None: + request.disk = disk + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.stop_async_replication] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("zone", request.zone), + ("disk", request.disk), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def stop_async_replication(self, + request: Optional[Union[compute.StopAsyncReplicationDiskRequest, dict]] = None, + *, + project: Optional[str] = None, + zone: Optional[str] = None, + disk: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> extended_operation.ExtendedOperation: + r"""Stops asynchronous replication. Can be invoked either + on the primary or on the secondary disk. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_stop_async_replication(): + # Create a client + client = compute_v1.DisksClient() + + # Initialize request argument(s) + request = compute_v1.StopAsyncReplicationDiskRequest( + disk="disk_value", + project="project_value", + zone="zone_value", + ) + + # Make the request + response = client.stop_async_replication(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.StopAsyncReplicationDiskRequest, dict]): + The request object. A request message for + Disks.StopAsyncReplication. See the + method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + zone (str): + The name of the zone for this + request. + + This corresponds to the ``zone`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + disk (str): + The name of the persistent disk. + This corresponds to the ``disk`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, zone, disk]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.StopAsyncReplicationDiskRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.StopAsyncReplicationDiskRequest): + request = compute.StopAsyncReplicationDiskRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if zone is not None: + request.zone = zone + if disk is not None: + request.disk = disk + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.stop_async_replication] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("zone", request.zone), + ("disk", request.disk), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + operation_service = self._transport._zone_operations_client + operation_request = compute.GetZoneOperationRequest() + operation_request.project = request.project + operation_request.zone = request.zone + operation_request.operation = response.name + + get_operation = functools.partial(operation_service.get, operation_request) + # Cancel is not part of extended operations yet. + cancel_operation = lambda: None + + # Note: this class is an implementation detail to provide a uniform + # set of names for certain fields in the extended operation proto message. + # See google.api_core.extended_operation.ExtendedOperation for details + # on these properties and the expected interface. + class _CustomOperation(extended_operation.ExtendedOperation): + @property + def error_message(self): + return self._extended_operation.http_error_message + + @property + def error_code(self): + return self._extended_operation.http_error_status_code + + response = _CustomOperation.make(get_operation, cancel_operation, response) + + # Done; return the response. + return response + + def stop_group_async_replication_unary(self, + request: Optional[Union[compute.StopGroupAsyncReplicationDiskRequest, dict]] = None, + *, + project: Optional[str] = None, + zone: Optional[str] = None, + disks_stop_group_async_replication_resource_resource: Optional[compute.DisksStopGroupAsyncReplicationResource] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.Operation: + r"""Stops asynchronous replication for a consistency + group of disks. Can be invoked either in the primary or + secondary scope. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_stop_group_async_replication(): + # Create a client + client = compute_v1.DisksClient() + + # Initialize request argument(s) + request = compute_v1.StopGroupAsyncReplicationDiskRequest( + project="project_value", + zone="zone_value", + ) + + # Make the request + response = client.stop_group_async_replication(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.StopGroupAsyncReplicationDiskRequest, dict]): + The request object. A request message for + Disks.StopGroupAsyncReplication. See the + method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + zone (str): + The name of the zone for this + request. This must be the zone of the + primary or secondary disks in the + consistency group. + + This corresponds to the ``zone`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + disks_stop_group_async_replication_resource_resource (google.cloud.compute_v1.types.DisksStopGroupAsyncReplicationResource): + The body resource for this request + This corresponds to the ``disks_stop_group_async_replication_resource_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, zone, disks_stop_group_async_replication_resource_resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.StopGroupAsyncReplicationDiskRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.StopGroupAsyncReplicationDiskRequest): + request = compute.StopGroupAsyncReplicationDiskRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if zone is not None: + request.zone = zone + if disks_stop_group_async_replication_resource_resource is not None: + request.disks_stop_group_async_replication_resource_resource = disks_stop_group_async_replication_resource_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.stop_group_async_replication] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("zone", request.zone), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def stop_group_async_replication(self, + request: Optional[Union[compute.StopGroupAsyncReplicationDiskRequest, dict]] = None, + *, + project: Optional[str] = None, + zone: Optional[str] = None, + disks_stop_group_async_replication_resource_resource: Optional[compute.DisksStopGroupAsyncReplicationResource] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> extended_operation.ExtendedOperation: + r"""Stops asynchronous replication for a consistency + group of disks. Can be invoked either in the primary or + secondary scope. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_stop_group_async_replication(): + # Create a client + client = compute_v1.DisksClient() + + # Initialize request argument(s) + request = compute_v1.StopGroupAsyncReplicationDiskRequest( + project="project_value", + zone="zone_value", + ) + + # Make the request + response = client.stop_group_async_replication(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.StopGroupAsyncReplicationDiskRequest, dict]): + The request object. A request message for + Disks.StopGroupAsyncReplication. See the + method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + zone (str): + The name of the zone for this + request. This must be the zone of the + primary or secondary disks in the + consistency group. + + This corresponds to the ``zone`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + disks_stop_group_async_replication_resource_resource (google.cloud.compute_v1.types.DisksStopGroupAsyncReplicationResource): + The body resource for this request + This corresponds to the ``disks_stop_group_async_replication_resource_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, zone, disks_stop_group_async_replication_resource_resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.StopGroupAsyncReplicationDiskRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.StopGroupAsyncReplicationDiskRequest): + request = compute.StopGroupAsyncReplicationDiskRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if zone is not None: + request.zone = zone + if disks_stop_group_async_replication_resource_resource is not None: + request.disks_stop_group_async_replication_resource_resource = disks_stop_group_async_replication_resource_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.stop_group_async_replication] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("zone", request.zone), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + operation_service = self._transport._zone_operations_client + operation_request = compute.GetZoneOperationRequest() + operation_request.project = request.project + operation_request.zone = request.zone + operation_request.operation = response.name + + get_operation = functools.partial(operation_service.get, operation_request) + # Cancel is not part of extended operations yet. + cancel_operation = lambda: None + + # Note: this class is an implementation detail to provide a uniform + # set of names for certain fields in the extended operation proto message. + # See google.api_core.extended_operation.ExtendedOperation for details + # on these properties and the expected interface. + class _CustomOperation(extended_operation.ExtendedOperation): + @property + def error_message(self): + return self._extended_operation.http_error_message + + @property + def error_code(self): + return self._extended_operation.http_error_status_code + + response = _CustomOperation.make(get_operation, cancel_operation, response) + + # Done; return the response. + return response + + def test_iam_permissions(self, + request: Optional[Union[compute.TestIamPermissionsDiskRequest, dict]] = None, + *, + project: Optional[str] = None, + zone: Optional[str] = None, + resource: Optional[str] = None, + test_permissions_request_resource: Optional[compute.TestPermissionsRequest] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.TestPermissionsResponse: + r"""Returns permissions that a caller has on the + specified resource. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_test_iam_permissions(): + # Create a client + client = compute_v1.DisksClient() + + # Initialize request argument(s) + request = compute_v1.TestIamPermissionsDiskRequest( + project="project_value", + resource="resource_value", + zone="zone_value", + ) + + # Make the request + response = client.test_iam_permissions(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.TestIamPermissionsDiskRequest, dict]): + The request object. A request message for + Disks.TestIamPermissions. See the method + description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + zone (str): + The name of the zone for this + request. + + This corresponds to the ``zone`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + resource (str): + Name or id of the resource for this + request. + + This corresponds to the ``resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + test_permissions_request_resource (google.cloud.compute_v1.types.TestPermissionsRequest): + The body resource for this request + This corresponds to the ``test_permissions_request_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.compute_v1.types.TestPermissionsResponse: + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, zone, resource, test_permissions_request_resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.TestIamPermissionsDiskRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.TestIamPermissionsDiskRequest): + request = compute.TestIamPermissionsDiskRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if zone is not None: + request.zone = zone + if resource is not None: + request.resource = resource + if test_permissions_request_resource is not None: + request.test_permissions_request_resource = test_permissions_request_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.test_iam_permissions] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("zone", request.zone), + ("resource", request.resource), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def update_unary(self, + request: Optional[Union[compute.UpdateDiskRequest, dict]] = None, + *, + project: Optional[str] = None, + zone: Optional[str] = None, + disk: Optional[str] = None, + disk_resource: Optional[compute.Disk] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.Operation: + r"""Updates the specified disk with the data included in the + request. The update is performed only on selected fields + included as part of update-mask. Only the following fields can + be modified: user_license. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_update(): + # Create a client + client = compute_v1.DisksClient() + + # Initialize request argument(s) + request = compute_v1.UpdateDiskRequest( + disk="disk_value", + project="project_value", + zone="zone_value", + ) + + # Make the request + response = client.update(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.UpdateDiskRequest, dict]): + The request object. A request message for Disks.Update. + See the method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + zone (str): + The name of the zone for this + request. + + This corresponds to the ``zone`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + disk (str): + The disk name for this request. + This corresponds to the ``disk`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + disk_resource (google.cloud.compute_v1.types.Disk): + The body resource for this request + This corresponds to the ``disk_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, zone, disk, disk_resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.UpdateDiskRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.UpdateDiskRequest): + request = compute.UpdateDiskRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if zone is not None: + request.zone = zone + if disk is not None: + request.disk = disk + if disk_resource is not None: + request.disk_resource = disk_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.update] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("zone", request.zone), + ("disk", request.disk), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def update(self, + request: Optional[Union[compute.UpdateDiskRequest, dict]] = None, + *, + project: Optional[str] = None, + zone: Optional[str] = None, + disk: Optional[str] = None, + disk_resource: Optional[compute.Disk] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> extended_operation.ExtendedOperation: + r"""Updates the specified disk with the data included in the + request. The update is performed only on selected fields + included as part of update-mask. Only the following fields can + be modified: user_license. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_update(): + # Create a client + client = compute_v1.DisksClient() + + # Initialize request argument(s) + request = compute_v1.UpdateDiskRequest( + disk="disk_value", + project="project_value", + zone="zone_value", + ) + + # Make the request + response = client.update(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.UpdateDiskRequest, dict]): + The request object. A request message for Disks.Update. + See the method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + zone (str): + The name of the zone for this + request. + + This corresponds to the ``zone`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + disk (str): + The disk name for this request. + This corresponds to the ``disk`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + disk_resource (google.cloud.compute_v1.types.Disk): + The body resource for this request + This corresponds to the ``disk_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, zone, disk, disk_resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.UpdateDiskRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.UpdateDiskRequest): + request = compute.UpdateDiskRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if zone is not None: + request.zone = zone + if disk is not None: + request.disk = disk + if disk_resource is not None: + request.disk_resource = disk_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.update] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("zone", request.zone), + ("disk", request.disk), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + operation_service = self._transport._zone_operations_client + operation_request = compute.GetZoneOperationRequest() + operation_request.project = request.project + operation_request.zone = request.zone + operation_request.operation = response.name + + get_operation = functools.partial(operation_service.get, operation_request) + # Cancel is not part of extended operations yet. + cancel_operation = lambda: None + + # Note: this class is an implementation detail to provide a uniform + # set of names for certain fields in the extended operation proto message. + # See google.api_core.extended_operation.ExtendedOperation for details + # on these properties and the expected interface. + class _CustomOperation(extended_operation.ExtendedOperation): + @property + def error_message(self): + return self._extended_operation.http_error_message + + @property + def error_code(self): + return self._extended_operation.http_error_status_code + + response = _CustomOperation.make(get_operation, cancel_operation, response) + + # Done; return the response. + return response + + def __enter__(self) -> "DisksClient": + return self + + def __exit__(self, type, value, traceback): + """Releases underlying transport's resources. + + .. warning:: + ONLY use as a context manager if the transport is NOT shared + with other clients! Exiting the with block will CLOSE the transport + and may cause errors in other clients! + """ + self.transport.close() + + + + + + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) + + +__all__ = ( + "DisksClient", +) diff --git a/owl-bot-staging/v1/google/cloud/compute_v1/services/disks/pagers.py b/owl-bot-staging/v1/google/cloud/compute_v1/services/disks/pagers.py new file mode 100644 index 000000000..0e5672d8d --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/compute_v1/services/disks/pagers.py @@ -0,0 +1,139 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import Any, AsyncIterator, Awaitable, Callable, Sequence, Tuple, Optional, Iterator + +from google.cloud.compute_v1.types import compute + + +class AggregatedListPager: + """A pager for iterating through ``aggregated_list`` requests. + + This class thinly wraps an initial + :class:`google.cloud.compute_v1.types.DiskAggregatedList` object, and + provides an ``__iter__`` method to iterate through its + ``items`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``AggregatedList`` requests and continue to iterate + through the ``items`` field on the + corresponding responses. + + All the usual :class:`google.cloud.compute_v1.types.DiskAggregatedList` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., compute.DiskAggregatedList], + request: compute.AggregatedListDisksRequest, + response: compute.DiskAggregatedList, + *, + metadata: Sequence[Tuple[str, str]] = ()): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.compute_v1.types.AggregatedListDisksRequest): + The initial request object. + response (google.cloud.compute_v1.types.DiskAggregatedList): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = compute.AggregatedListDisksRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[compute.DiskAggregatedList]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[Tuple[str, compute.DisksScopedList]]: + for page in self.pages: + yield from page.items.items() + + def get(self, key: str) -> Optional[compute.DisksScopedList]: + return self._response.items.get(key) + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + + +class ListPager: + """A pager for iterating through ``list`` requests. + + This class thinly wraps an initial + :class:`google.cloud.compute_v1.types.DiskList` object, and + provides an ``__iter__`` method to iterate through its + ``items`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``List`` requests and continue to iterate + through the ``items`` field on the + corresponding responses. + + All the usual :class:`google.cloud.compute_v1.types.DiskList` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., compute.DiskList], + request: compute.ListDisksRequest, + response: compute.DiskList, + *, + metadata: Sequence[Tuple[str, str]] = ()): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.compute_v1.types.ListDisksRequest): + The initial request object. + response (google.cloud.compute_v1.types.DiskList): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = compute.ListDisksRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[compute.DiskList]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[compute.Disk]: + for page in self.pages: + yield from page.items + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) diff --git a/owl-bot-staging/v1/google/cloud/compute_v1/services/disks/transports/__init__.py b/owl-bot-staging/v1/google/cloud/compute_v1/services/disks/transports/__init__.py new file mode 100644 index 000000000..e5291b341 --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/compute_v1/services/disks/transports/__init__.py @@ -0,0 +1,32 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +from typing import Dict, Type + +from .base import DisksTransport +from .rest import DisksRestTransport +from .rest import DisksRestInterceptor + + +# Compile a registry of transports. +_transport_registry = OrderedDict() # type: Dict[str, Type[DisksTransport]] +_transport_registry['rest'] = DisksRestTransport + +__all__ = ( + 'DisksTransport', + 'DisksRestTransport', + 'DisksRestInterceptor', +) diff --git a/owl-bot-staging/v1/google/cloud/compute_v1/services/disks/transports/base.py b/owl-bot-staging/v1/google/cloud/compute_v1/services/disks/transports/base.py new file mode 100644 index 000000000..309983e14 --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/compute_v1/services/disks/transports/base.py @@ -0,0 +1,401 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import abc +from typing import Awaitable, Callable, Dict, Optional, Sequence, Union + +from google.cloud.compute_v1 import gapic_version as package_version + +import google.auth # type: ignore +import google.api_core +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.compute_v1.types import compute +from google.cloud.compute_v1.services import zone_operations + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) + + +class DisksTransport(abc.ABC): + """Abstract transport class for Disks.""" + + AUTH_SCOPES = ( + 'https://www.googleapis.com/auth/compute', + 'https://www.googleapis.com/auth/cloud-platform', + ) + + DEFAULT_HOST: str = 'compute.googleapis.com' + def __init__( + self, *, + host: str = DEFAULT_HOST, + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + **kwargs, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A list of scopes. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + """ + self._extended_operations_services: Dict[str, Any] = {} + + scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} + + # Save the scopes. + self._scopes = scopes + + # If no credentials are provided, then determine the appropriate + # defaults. + if credentials and credentials_file: + raise core_exceptions.DuplicateCredentialArgs("'credentials_file' and 'credentials' are mutually exclusive") + + if credentials_file is not None: + credentials, _ = google.auth.load_credentials_from_file( + credentials_file, + **scopes_kwargs, + quota_project_id=quota_project_id + ) + elif credentials is None: + credentials, _ = google.auth.default(**scopes_kwargs, quota_project_id=quota_project_id) + # Don't apply audience if the credentials file passed from user. + if hasattr(credentials, "with_gdch_audience"): + credentials = credentials.with_gdch_audience(api_audience if api_audience else host) + + # If the credentials are service account credentials, then always try to use self signed JWT. + if always_use_jwt_access and isinstance(credentials, service_account.Credentials) and hasattr(service_account.Credentials, "with_always_use_jwt_access"): + credentials = credentials.with_always_use_jwt_access(True) + + # Save the credentials. + self._credentials = credentials + + # Save the hostname. Default to port 443 (HTTPS) if none is specified. + if ':' not in host: + host += ':443' + self._host = host + + def _prep_wrapped_messages(self, client_info): + # Precompute the wrapped methods. + self._wrapped_methods = { + self.add_resource_policies: gapic_v1.method.wrap_method( + self.add_resource_policies, + default_timeout=None, + client_info=client_info, + ), + self.aggregated_list: gapic_v1.method.wrap_method( + self.aggregated_list, + default_timeout=None, + client_info=client_info, + ), + self.bulk_insert: gapic_v1.method.wrap_method( + self.bulk_insert, + default_timeout=None, + client_info=client_info, + ), + self.create_snapshot: gapic_v1.method.wrap_method( + self.create_snapshot, + default_timeout=None, + client_info=client_info, + ), + self.delete: gapic_v1.method.wrap_method( + self.delete, + default_timeout=None, + client_info=client_info, + ), + self.get: gapic_v1.method.wrap_method( + self.get, + default_timeout=None, + client_info=client_info, + ), + self.get_iam_policy: gapic_v1.method.wrap_method( + self.get_iam_policy, + default_timeout=None, + client_info=client_info, + ), + self.insert: gapic_v1.method.wrap_method( + self.insert, + default_timeout=None, + client_info=client_info, + ), + self.list: gapic_v1.method.wrap_method( + self.list, + default_timeout=None, + client_info=client_info, + ), + self.remove_resource_policies: gapic_v1.method.wrap_method( + self.remove_resource_policies, + default_timeout=None, + client_info=client_info, + ), + self.resize: gapic_v1.method.wrap_method( + self.resize, + default_timeout=None, + client_info=client_info, + ), + self.set_iam_policy: gapic_v1.method.wrap_method( + self.set_iam_policy, + default_timeout=None, + client_info=client_info, + ), + self.set_labels: gapic_v1.method.wrap_method( + self.set_labels, + default_timeout=None, + client_info=client_info, + ), + self.start_async_replication: gapic_v1.method.wrap_method( + self.start_async_replication, + default_timeout=None, + client_info=client_info, + ), + self.stop_async_replication: gapic_v1.method.wrap_method( + self.stop_async_replication, + default_timeout=None, + client_info=client_info, + ), + self.stop_group_async_replication: gapic_v1.method.wrap_method( + self.stop_group_async_replication, + default_timeout=None, + client_info=client_info, + ), + self.test_iam_permissions: gapic_v1.method.wrap_method( + self.test_iam_permissions, + default_timeout=None, + client_info=client_info, + ), + self.update: gapic_v1.method.wrap_method( + self.update, + default_timeout=None, + client_info=client_info, + ), + } + + def close(self): + """Closes resources associated with the transport. + + .. warning:: + Only call this method if the transport is NOT shared + with other clients - this may cause errors in other clients! + """ + raise NotImplementedError() + + @property + def add_resource_policies(self) -> Callable[ + [compute.AddResourcePoliciesDiskRequest], + Union[ + compute.Operation, + Awaitable[compute.Operation] + ]]: + raise NotImplementedError() + + @property + def aggregated_list(self) -> Callable[ + [compute.AggregatedListDisksRequest], + Union[ + compute.DiskAggregatedList, + Awaitable[compute.DiskAggregatedList] + ]]: + raise NotImplementedError() + + @property + def bulk_insert(self) -> Callable[ + [compute.BulkInsertDiskRequest], + Union[ + compute.Operation, + Awaitable[compute.Operation] + ]]: + raise NotImplementedError() + + @property + def create_snapshot(self) -> Callable[ + [compute.CreateSnapshotDiskRequest], + Union[ + compute.Operation, + Awaitable[compute.Operation] + ]]: + raise NotImplementedError() + + @property + def delete(self) -> Callable[ + [compute.DeleteDiskRequest], + Union[ + compute.Operation, + Awaitable[compute.Operation] + ]]: + raise NotImplementedError() + + @property + def get(self) -> Callable[ + [compute.GetDiskRequest], + Union[ + compute.Disk, + Awaitable[compute.Disk] + ]]: + raise NotImplementedError() + + @property + def get_iam_policy(self) -> Callable[ + [compute.GetIamPolicyDiskRequest], + Union[ + compute.Policy, + Awaitable[compute.Policy] + ]]: + raise NotImplementedError() + + @property + def insert(self) -> Callable[ + [compute.InsertDiskRequest], + Union[ + compute.Operation, + Awaitable[compute.Operation] + ]]: + raise NotImplementedError() + + @property + def list(self) -> Callable[ + [compute.ListDisksRequest], + Union[ + compute.DiskList, + Awaitable[compute.DiskList] + ]]: + raise NotImplementedError() + + @property + def remove_resource_policies(self) -> Callable[ + [compute.RemoveResourcePoliciesDiskRequest], + Union[ + compute.Operation, + Awaitable[compute.Operation] + ]]: + raise NotImplementedError() + + @property + def resize(self) -> Callable[ + [compute.ResizeDiskRequest], + Union[ + compute.Operation, + Awaitable[compute.Operation] + ]]: + raise NotImplementedError() + + @property + def set_iam_policy(self) -> Callable[ + [compute.SetIamPolicyDiskRequest], + Union[ + compute.Policy, + Awaitable[compute.Policy] + ]]: + raise NotImplementedError() + + @property + def set_labels(self) -> Callable[ + [compute.SetLabelsDiskRequest], + Union[ + compute.Operation, + Awaitable[compute.Operation] + ]]: + raise NotImplementedError() + + @property + def start_async_replication(self) -> Callable[ + [compute.StartAsyncReplicationDiskRequest], + Union[ + compute.Operation, + Awaitable[compute.Operation] + ]]: + raise NotImplementedError() + + @property + def stop_async_replication(self) -> Callable[ + [compute.StopAsyncReplicationDiskRequest], + Union[ + compute.Operation, + Awaitable[compute.Operation] + ]]: + raise NotImplementedError() + + @property + def stop_group_async_replication(self) -> Callable[ + [compute.StopGroupAsyncReplicationDiskRequest], + Union[ + compute.Operation, + Awaitable[compute.Operation] + ]]: + raise NotImplementedError() + + @property + def test_iam_permissions(self) -> Callable[ + [compute.TestIamPermissionsDiskRequest], + Union[ + compute.TestPermissionsResponse, + Awaitable[compute.TestPermissionsResponse] + ]]: + raise NotImplementedError() + + @property + def update(self) -> Callable[ + [compute.UpdateDiskRequest], + Union[ + compute.Operation, + Awaitable[compute.Operation] + ]]: + raise NotImplementedError() + + @property + def kind(self) -> str: + raise NotImplementedError() + + @property + def _zone_operations_client(self) -> zone_operations.ZoneOperationsClient: + ex_op_service = self._extended_operations_services.get("zone_operations") + if not ex_op_service: + ex_op_service = zone_operations.ZoneOperationsClient( + credentials=self._credentials, + transport=self.kind, + ) + self._extended_operations_services["zone_operations"] = ex_op_service + + return ex_op_service + + +__all__ = ( + 'DisksTransport', +) diff --git a/owl-bot-staging/v1/google/cloud/compute_v1/services/disks/transports/rest.py b/owl-bot-staging/v1/google/cloud/compute_v1/services/disks/transports/rest.py new file mode 100644 index 000000000..28f04469c --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/compute_v1/services/disks/transports/rest.py @@ -0,0 +1,2542 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +from google.auth.transport.requests import AuthorizedSession # type: ignore +import json # type: ignore +import grpc # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.api_core import exceptions as core_exceptions +from google.api_core import retry as retries +from google.api_core import rest_helpers +from google.api_core import rest_streaming +from google.api_core import path_template +from google.api_core import gapic_v1 + +from google.protobuf import json_format +from requests import __version__ as requests_version +import dataclasses +import re +from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union +import warnings + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + + +from google.cloud.compute_v1.types import compute + +from .base import DisksTransport, DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, + grpc_version=None, + rest_version=requests_version, +) + + +class DisksRestInterceptor: + """Interceptor for Disks. + + Interceptors are used to manipulate requests, request metadata, and responses + in arbitrary ways. + Example use cases include: + * Logging + * Verifying requests according to service or custom semantics + * Stripping extraneous information from responses + + These use cases and more can be enabled by injecting an + instance of a custom subclass when constructing the DisksRestTransport. + + .. code-block:: python + class MyCustomDisksInterceptor(DisksRestInterceptor): + def pre_add_resource_policies(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_add_resource_policies(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_aggregated_list(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_aggregated_list(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_bulk_insert(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_bulk_insert(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_create_snapshot(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_create_snapshot(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_delete(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_delete(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_get(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_get_iam_policy(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_iam_policy(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_insert(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_insert(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_remove_resource_policies(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_remove_resource_policies(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_resize(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_resize(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_set_iam_policy(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_set_iam_policy(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_set_labels(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_set_labels(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_start_async_replication(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_start_async_replication(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_stop_async_replication(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_stop_async_replication(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_stop_group_async_replication(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_stop_group_async_replication(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_test_iam_permissions(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_test_iam_permissions(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_update(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_update(self, response): + logging.log(f"Received response: {response}") + return response + + transport = DisksRestTransport(interceptor=MyCustomDisksInterceptor()) + client = DisksClient(transport=transport) + + + """ + def pre_add_resource_policies(self, request: compute.AddResourcePoliciesDiskRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.AddResourcePoliciesDiskRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for add_resource_policies + + Override in a subclass to manipulate the request or metadata + before they are sent to the Disks server. + """ + return request, metadata + + def post_add_resource_policies(self, response: compute.Operation) -> compute.Operation: + """Post-rpc interceptor for add_resource_policies + + Override in a subclass to manipulate the response + after it is returned by the Disks server but before + it is returned to user code. + """ + return response + def pre_aggregated_list(self, request: compute.AggregatedListDisksRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.AggregatedListDisksRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for aggregated_list + + Override in a subclass to manipulate the request or metadata + before they are sent to the Disks server. + """ + return request, metadata + + def post_aggregated_list(self, response: compute.DiskAggregatedList) -> compute.DiskAggregatedList: + """Post-rpc interceptor for aggregated_list + + Override in a subclass to manipulate the response + after it is returned by the Disks server but before + it is returned to user code. + """ + return response + def pre_bulk_insert(self, request: compute.BulkInsertDiskRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.BulkInsertDiskRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for bulk_insert + + Override in a subclass to manipulate the request or metadata + before they are sent to the Disks server. + """ + return request, metadata + + def post_bulk_insert(self, response: compute.Operation) -> compute.Operation: + """Post-rpc interceptor for bulk_insert + + Override in a subclass to manipulate the response + after it is returned by the Disks server but before + it is returned to user code. + """ + return response + def pre_create_snapshot(self, request: compute.CreateSnapshotDiskRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.CreateSnapshotDiskRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for create_snapshot + + Override in a subclass to manipulate the request or metadata + before they are sent to the Disks server. + """ + return request, metadata + + def post_create_snapshot(self, response: compute.Operation) -> compute.Operation: + """Post-rpc interceptor for create_snapshot + + Override in a subclass to manipulate the response + after it is returned by the Disks server but before + it is returned to user code. + """ + return response + def pre_delete(self, request: compute.DeleteDiskRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.DeleteDiskRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for delete + + Override in a subclass to manipulate the request or metadata + before they are sent to the Disks server. + """ + return request, metadata + + def post_delete(self, response: compute.Operation) -> compute.Operation: + """Post-rpc interceptor for delete + + Override in a subclass to manipulate the response + after it is returned by the Disks server but before + it is returned to user code. + """ + return response + def pre_get(self, request: compute.GetDiskRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.GetDiskRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get + + Override in a subclass to manipulate the request or metadata + before they are sent to the Disks server. + """ + return request, metadata + + def post_get(self, response: compute.Disk) -> compute.Disk: + """Post-rpc interceptor for get + + Override in a subclass to manipulate the response + after it is returned by the Disks server but before + it is returned to user code. + """ + return response + def pre_get_iam_policy(self, request: compute.GetIamPolicyDiskRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.GetIamPolicyDiskRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_iam_policy + + Override in a subclass to manipulate the request or metadata + before they are sent to the Disks server. + """ + return request, metadata + + def post_get_iam_policy(self, response: compute.Policy) -> compute.Policy: + """Post-rpc interceptor for get_iam_policy + + Override in a subclass to manipulate the response + after it is returned by the Disks server but before + it is returned to user code. + """ + return response + def pre_insert(self, request: compute.InsertDiskRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.InsertDiskRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for insert + + Override in a subclass to manipulate the request or metadata + before they are sent to the Disks server. + """ + return request, metadata + + def post_insert(self, response: compute.Operation) -> compute.Operation: + """Post-rpc interceptor for insert + + Override in a subclass to manipulate the response + after it is returned by the Disks server but before + it is returned to user code. + """ + return response + def pre_list(self, request: compute.ListDisksRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.ListDisksRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list + + Override in a subclass to manipulate the request or metadata + before they are sent to the Disks server. + """ + return request, metadata + + def post_list(self, response: compute.DiskList) -> compute.DiskList: + """Post-rpc interceptor for list + + Override in a subclass to manipulate the response + after it is returned by the Disks server but before + it is returned to user code. + """ + return response + def pre_remove_resource_policies(self, request: compute.RemoveResourcePoliciesDiskRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.RemoveResourcePoliciesDiskRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for remove_resource_policies + + Override in a subclass to manipulate the request or metadata + before they are sent to the Disks server. + """ + return request, metadata + + def post_remove_resource_policies(self, response: compute.Operation) -> compute.Operation: + """Post-rpc interceptor for remove_resource_policies + + Override in a subclass to manipulate the response + after it is returned by the Disks server but before + it is returned to user code. + """ + return response + def pre_resize(self, request: compute.ResizeDiskRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.ResizeDiskRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for resize + + Override in a subclass to manipulate the request or metadata + before they are sent to the Disks server. + """ + return request, metadata + + def post_resize(self, response: compute.Operation) -> compute.Operation: + """Post-rpc interceptor for resize + + Override in a subclass to manipulate the response + after it is returned by the Disks server but before + it is returned to user code. + """ + return response + def pre_set_iam_policy(self, request: compute.SetIamPolicyDiskRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.SetIamPolicyDiskRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for set_iam_policy + + Override in a subclass to manipulate the request or metadata + before they are sent to the Disks server. + """ + return request, metadata + + def post_set_iam_policy(self, response: compute.Policy) -> compute.Policy: + """Post-rpc interceptor for set_iam_policy + + Override in a subclass to manipulate the response + after it is returned by the Disks server but before + it is returned to user code. + """ + return response + def pre_set_labels(self, request: compute.SetLabelsDiskRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.SetLabelsDiskRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for set_labels + + Override in a subclass to manipulate the request or metadata + before they are sent to the Disks server. + """ + return request, metadata + + def post_set_labels(self, response: compute.Operation) -> compute.Operation: + """Post-rpc interceptor for set_labels + + Override in a subclass to manipulate the response + after it is returned by the Disks server but before + it is returned to user code. + """ + return response + def pre_start_async_replication(self, request: compute.StartAsyncReplicationDiskRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.StartAsyncReplicationDiskRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for start_async_replication + + Override in a subclass to manipulate the request or metadata + before they are sent to the Disks server. + """ + return request, metadata + + def post_start_async_replication(self, response: compute.Operation) -> compute.Operation: + """Post-rpc interceptor for start_async_replication + + Override in a subclass to manipulate the response + after it is returned by the Disks server but before + it is returned to user code. + """ + return response + def pre_stop_async_replication(self, request: compute.StopAsyncReplicationDiskRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.StopAsyncReplicationDiskRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for stop_async_replication + + Override in a subclass to manipulate the request or metadata + before they are sent to the Disks server. + """ + return request, metadata + + def post_stop_async_replication(self, response: compute.Operation) -> compute.Operation: + """Post-rpc interceptor for stop_async_replication + + Override in a subclass to manipulate the response + after it is returned by the Disks server but before + it is returned to user code. + """ + return response + def pre_stop_group_async_replication(self, request: compute.StopGroupAsyncReplicationDiskRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.StopGroupAsyncReplicationDiskRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for stop_group_async_replication + + Override in a subclass to manipulate the request or metadata + before they are sent to the Disks server. + """ + return request, metadata + + def post_stop_group_async_replication(self, response: compute.Operation) -> compute.Operation: + """Post-rpc interceptor for stop_group_async_replication + + Override in a subclass to manipulate the response + after it is returned by the Disks server but before + it is returned to user code. + """ + return response + def pre_test_iam_permissions(self, request: compute.TestIamPermissionsDiskRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.TestIamPermissionsDiskRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for test_iam_permissions + + Override in a subclass to manipulate the request or metadata + before they are sent to the Disks server. + """ + return request, metadata + + def post_test_iam_permissions(self, response: compute.TestPermissionsResponse) -> compute.TestPermissionsResponse: + """Post-rpc interceptor for test_iam_permissions + + Override in a subclass to manipulate the response + after it is returned by the Disks server but before + it is returned to user code. + """ + return response + def pre_update(self, request: compute.UpdateDiskRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.UpdateDiskRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for update + + Override in a subclass to manipulate the request or metadata + before they are sent to the Disks server. + """ + return request, metadata + + def post_update(self, response: compute.Operation) -> compute.Operation: + """Post-rpc interceptor for update + + Override in a subclass to manipulate the response + after it is returned by the Disks server but before + it is returned to user code. + """ + return response + + +@dataclasses.dataclass +class DisksRestStub: + _session: AuthorizedSession + _host: str + _interceptor: DisksRestInterceptor + + +class DisksRestTransport(DisksTransport): + """REST backend transport for Disks. + + The Disks API. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends JSON representations of protocol buffers over HTTP/1.1 + + NOTE: This REST transport functionality is currently in a beta + state (preview). We welcome your feedback via an issue in this + library's source repository. Thank you! + """ + + def __init__(self, *, + host: str = 'compute.googleapis.com', + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + client_cert_source_for_mtls: Optional[Callable[[ + ], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + url_scheme: str = 'https', + interceptor: Optional[DisksRestInterceptor] = None, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + NOTE: This REST transport functionality is currently in a beta + state (preview). We welcome your feedback via a GitHub issue in + this library's repository. Thank you! + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + client_cert_source_for_mtls (Callable[[], Tuple[bytes, bytes]]): Client + certificate to configure mutual TLS HTTP channel. It is ignored + if ``channel`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you are developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + url_scheme: the protocol scheme for the API endpoint. Normally + "https", but for testing or local servers, + "http" can be specified. + """ + # Run the base constructor + # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. + # TODO: When custom host (api_endpoint) is set, `scopes` must *also* be set on the + # credentials object + maybe_url_match = re.match("^(?Phttp(?:s)?://)?(?P.*)$", host) + if maybe_url_match is None: + raise ValueError(f"Unexpected hostname structure: {host}") # pragma: NO COVER + + url_match_items = maybe_url_match.groupdict() + + host = f"{url_scheme}://{host}" if not url_match_items["scheme"] else host + + super().__init__( + host=host, + credentials=credentials, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience + ) + self._session = AuthorizedSession( + self._credentials, default_host=self.DEFAULT_HOST) + if client_cert_source_for_mtls: + self._session.configure_mtls_channel(client_cert_source_for_mtls) + self._interceptor = interceptor or DisksRestInterceptor() + self._prep_wrapped_messages(client_info) + + class _AddResourcePolicies(DisksRestStub): + def __hash__(self): + return hash("AddResourcePolicies") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.AddResourcePoliciesDiskRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.Operation: + r"""Call the add resource policies method over HTTP. + + Args: + request (~.compute.AddResourcePoliciesDiskRequest): + The request object. A request message for + Disks.AddResourcePolicies. See the + method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.Operation: + Represents an Operation resource. Google Compute Engine + has three Operation resources: \* + `Global `__ + \* + `Regional `__ + \* + `Zonal `__ + You can use an operation resource to manage asynchronous + API requests. For more information, read Handling API + responses. Operations can be global, regional or zonal. + - For global operations, use the ``globalOperations`` + resource. - For regional operations, use the + ``regionOperations`` resource. - For zonal operations, + use the ``zonalOperations`` resource. For more + information, read Global, Regional, and Zonal Resources. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/compute/v1/projects/{project}/zones/{zone}/disks/{disk}/addResourcePolicies', + 'body': 'disks_add_resource_policies_request_resource', + }, + ] + request, metadata = self._interceptor.pre_add_resource_policies(request, metadata) + pb_request = compute.AddResourcePoliciesDiskRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + including_default_value_fields=False, + use_integers_for_enums=False + ) + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.Operation() + pb_resp = compute.Operation.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_add_resource_policies(resp) + return resp + + class _AggregatedList(DisksRestStub): + def __hash__(self): + return hash("AggregatedList") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.AggregatedListDisksRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.DiskAggregatedList: + r"""Call the aggregated list method over HTTP. + + Args: + request (~.compute.AggregatedListDisksRequest): + The request object. A request message for + Disks.AggregatedList. See the method + description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.DiskAggregatedList: + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/compute/v1/projects/{project}/aggregated/disks', + }, + ] + request, metadata = self._interceptor.pre_aggregated_list(request, metadata) + pb_request = compute.AggregatedListDisksRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.DiskAggregatedList() + pb_resp = compute.DiskAggregatedList.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_aggregated_list(resp) + return resp + + class _BulkInsert(DisksRestStub): + def __hash__(self): + return hash("BulkInsert") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.BulkInsertDiskRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.Operation: + r"""Call the bulk insert method over HTTP. + + Args: + request (~.compute.BulkInsertDiskRequest): + The request object. A request message for + Disks.BulkInsert. See the method + description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.Operation: + Represents an Operation resource. Google Compute Engine + has three Operation resources: \* + `Global `__ + \* + `Regional `__ + \* + `Zonal `__ + You can use an operation resource to manage asynchronous + API requests. For more information, read Handling API + responses. Operations can be global, regional or zonal. + - For global operations, use the ``globalOperations`` + resource. - For regional operations, use the + ``regionOperations`` resource. - For zonal operations, + use the ``zonalOperations`` resource. For more + information, read Global, Regional, and Zonal Resources. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/compute/v1/projects/{project}/zones/{zone}/disks/bulkInsert', + 'body': 'bulk_insert_disk_resource_resource', + }, + ] + request, metadata = self._interceptor.pre_bulk_insert(request, metadata) + pb_request = compute.BulkInsertDiskRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + including_default_value_fields=False, + use_integers_for_enums=False + ) + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.Operation() + pb_resp = compute.Operation.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_bulk_insert(resp) + return resp + + class _CreateSnapshot(DisksRestStub): + def __hash__(self): + return hash("CreateSnapshot") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.CreateSnapshotDiskRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.Operation: + r"""Call the create snapshot method over HTTP. + + Args: + request (~.compute.CreateSnapshotDiskRequest): + The request object. A request message for + Disks.CreateSnapshot. See the method + description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.Operation: + Represents an Operation resource. Google Compute Engine + has three Operation resources: \* + `Global `__ + \* + `Regional `__ + \* + `Zonal `__ + You can use an operation resource to manage asynchronous + API requests. For more information, read Handling API + responses. Operations can be global, regional or zonal. + - For global operations, use the ``globalOperations`` + resource. - For regional operations, use the + ``regionOperations`` resource. - For zonal operations, + use the ``zonalOperations`` resource. For more + information, read Global, Regional, and Zonal Resources. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/compute/v1/projects/{project}/zones/{zone}/disks/{disk}/createSnapshot', + 'body': 'snapshot_resource', + }, + ] + request, metadata = self._interceptor.pre_create_snapshot(request, metadata) + pb_request = compute.CreateSnapshotDiskRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + including_default_value_fields=False, + use_integers_for_enums=False + ) + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.Operation() + pb_resp = compute.Operation.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_create_snapshot(resp) + return resp + + class _Delete(DisksRestStub): + def __hash__(self): + return hash("Delete") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.DeleteDiskRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.Operation: + r"""Call the delete method over HTTP. + + Args: + request (~.compute.DeleteDiskRequest): + The request object. A request message for Disks.Delete. + See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.Operation: + Represents an Operation resource. Google Compute Engine + has three Operation resources: \* + `Global `__ + \* + `Regional `__ + \* + `Zonal `__ + You can use an operation resource to manage asynchronous + API requests. For more information, read Handling API + responses. Operations can be global, regional or zonal. + - For global operations, use the ``globalOperations`` + resource. - For regional operations, use the + ``regionOperations`` resource. - For zonal operations, + use the ``zonalOperations`` resource. For more + information, read Global, Regional, and Zonal Resources. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'delete', + 'uri': '/compute/v1/projects/{project}/zones/{zone}/disks/{disk}', + }, + ] + request, metadata = self._interceptor.pre_delete(request, metadata) + pb_request = compute.DeleteDiskRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.Operation() + pb_resp = compute.Operation.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_delete(resp) + return resp + + class _Get(DisksRestStub): + def __hash__(self): + return hash("Get") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.GetDiskRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.Disk: + r"""Call the get method over HTTP. + + Args: + request (~.compute.GetDiskRequest): + The request object. A request message for Disks.Get. See + the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.Disk: + Represents a Persistent Disk resource. Google Compute + Engine has two Disk resources: \* + `Zonal `__ \* + `Regional `__ + Persistent disks are required for running your VM + instances. Create both boot and non-boot (data) + persistent disks. For more information, read Persistent + Disks. For more storage options, read Storage options. + The disks resource represents a zonal persistent disk. + For more information, read Zonal persistent disks. The + regionDisks resource represents a regional persistent + disk. For more information, read Regional resources. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/compute/v1/projects/{project}/zones/{zone}/disks/{disk}', + }, + ] + request, metadata = self._interceptor.pre_get(request, metadata) + pb_request = compute.GetDiskRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.Disk() + pb_resp = compute.Disk.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get(resp) + return resp + + class _GetIamPolicy(DisksRestStub): + def __hash__(self): + return hash("GetIamPolicy") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.GetIamPolicyDiskRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.Policy: + r"""Call the get iam policy method over HTTP. + + Args: + request (~.compute.GetIamPolicyDiskRequest): + The request object. A request message for + Disks.GetIamPolicy. See the method + description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.Policy: + An Identity and Access Management (IAM) policy, which + specifies access controls for Google Cloud resources. A + ``Policy`` is a collection of ``bindings``. A + ``binding`` binds one or more ``members``, or + principals, to a single ``role``. Principals can be user + accounts, service accounts, Google groups, and domains + (such as G Suite). A ``role`` is a named list of + permissions; each ``role`` can be an IAM predefined role + or a user-created custom role. For some types of Google + Cloud resources, a ``binding`` can also specify a + ``condition``, which is a logical expression that allows + access to a resource only if the expression evaluates to + ``true``. A condition can add constraints based on + attributes of the request, the resource, or both. To + learn which resources support conditions in their IAM + policies, see the `IAM + documentation `__. + **JSON example:** { "bindings": [ { "role": + "roles/resourcemanager.organizationAdmin", "members": [ + "user:mike@example.com", "group:admins@example.com", + "domain:google.com", + "serviceAccount:my-project-id@appspot.gserviceaccount.com" + ] }, { "role": + "roles/resourcemanager.organizationViewer", "members": [ + "user:eve@example.com" ], "condition": { "title": + "expirable access", "description": "Does not grant + access after Sep 2020", "expression": "request.time < + timestamp('2020-10-01T00:00:00.000Z')", } } ], "etag": + "BwWWja0YfJA=", "version": 3 } **YAML example:** + bindings: - members: - user:mike@example.com - + group:admins@example.com - domain:google.com - + serviceAccount:my-project-id@appspot.gserviceaccount.com + role: roles/resourcemanager.organizationAdmin - members: + - user:eve@example.com role: + roles/resourcemanager.organizationViewer condition: + title: expirable access description: Does not grant + access after Sep 2020 expression: request.time < + timestamp('2020-10-01T00:00:00.000Z') etag: BwWWja0YfJA= + version: 3 For a description of IAM and its features, + see the `IAM + documentation `__. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/compute/v1/projects/{project}/zones/{zone}/disks/{resource}/getIamPolicy', + }, + ] + request, metadata = self._interceptor.pre_get_iam_policy(request, metadata) + pb_request = compute.GetIamPolicyDiskRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.Policy() + pb_resp = compute.Policy.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get_iam_policy(resp) + return resp + + class _Insert(DisksRestStub): + def __hash__(self): + return hash("Insert") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.InsertDiskRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.Operation: + r"""Call the insert method over HTTP. + + Args: + request (~.compute.InsertDiskRequest): + The request object. A request message for Disks.Insert. + See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.Operation: + Represents an Operation resource. Google Compute Engine + has three Operation resources: \* + `Global `__ + \* + `Regional `__ + \* + `Zonal `__ + You can use an operation resource to manage asynchronous + API requests. For more information, read Handling API + responses. Operations can be global, regional or zonal. + - For global operations, use the ``globalOperations`` + resource. - For regional operations, use the + ``regionOperations`` resource. - For zonal operations, + use the ``zonalOperations`` resource. For more + information, read Global, Regional, and Zonal Resources. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/compute/v1/projects/{project}/zones/{zone}/disks', + 'body': 'disk_resource', + }, + ] + request, metadata = self._interceptor.pre_insert(request, metadata) + pb_request = compute.InsertDiskRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + including_default_value_fields=False, + use_integers_for_enums=False + ) + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.Operation() + pb_resp = compute.Operation.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_insert(resp) + return resp + + class _List(DisksRestStub): + def __hash__(self): + return hash("List") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.ListDisksRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.DiskList: + r"""Call the list method over HTTP. + + Args: + request (~.compute.ListDisksRequest): + The request object. A request message for Disks.List. See + the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.DiskList: + A list of Disk resources. + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/compute/v1/projects/{project}/zones/{zone}/disks', + }, + ] + request, metadata = self._interceptor.pre_list(request, metadata) + pb_request = compute.ListDisksRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.DiskList() + pb_resp = compute.DiskList.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_list(resp) + return resp + + class _RemoveResourcePolicies(DisksRestStub): + def __hash__(self): + return hash("RemoveResourcePolicies") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.RemoveResourcePoliciesDiskRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.Operation: + r"""Call the remove resource policies method over HTTP. + + Args: + request (~.compute.RemoveResourcePoliciesDiskRequest): + The request object. A request message for + Disks.RemoveResourcePolicies. See the + method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.Operation: + Represents an Operation resource. Google Compute Engine + has three Operation resources: \* + `Global `__ + \* + `Regional `__ + \* + `Zonal `__ + You can use an operation resource to manage asynchronous + API requests. For more information, read Handling API + responses. Operations can be global, regional or zonal. + - For global operations, use the ``globalOperations`` + resource. - For regional operations, use the + ``regionOperations`` resource. - For zonal operations, + use the ``zonalOperations`` resource. For more + information, read Global, Regional, and Zonal Resources. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/compute/v1/projects/{project}/zones/{zone}/disks/{disk}/removeResourcePolicies', + 'body': 'disks_remove_resource_policies_request_resource', + }, + ] + request, metadata = self._interceptor.pre_remove_resource_policies(request, metadata) + pb_request = compute.RemoveResourcePoliciesDiskRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + including_default_value_fields=False, + use_integers_for_enums=False + ) + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.Operation() + pb_resp = compute.Operation.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_remove_resource_policies(resp) + return resp + + class _Resize(DisksRestStub): + def __hash__(self): + return hash("Resize") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.ResizeDiskRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.Operation: + r"""Call the resize method over HTTP. + + Args: + request (~.compute.ResizeDiskRequest): + The request object. A request message for Disks.Resize. + See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.Operation: + Represents an Operation resource. Google Compute Engine + has three Operation resources: \* + `Global `__ + \* + `Regional `__ + \* + `Zonal `__ + You can use an operation resource to manage asynchronous + API requests. For more information, read Handling API + responses. Operations can be global, regional or zonal. + - For global operations, use the ``globalOperations`` + resource. - For regional operations, use the + ``regionOperations`` resource. - For zonal operations, + use the ``zonalOperations`` resource. For more + information, read Global, Regional, and Zonal Resources. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/compute/v1/projects/{project}/zones/{zone}/disks/{disk}/resize', + 'body': 'disks_resize_request_resource', + }, + ] + request, metadata = self._interceptor.pre_resize(request, metadata) + pb_request = compute.ResizeDiskRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + including_default_value_fields=False, + use_integers_for_enums=False + ) + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.Operation() + pb_resp = compute.Operation.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_resize(resp) + return resp + + class _SetIamPolicy(DisksRestStub): + def __hash__(self): + return hash("SetIamPolicy") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.SetIamPolicyDiskRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.Policy: + r"""Call the set iam policy method over HTTP. + + Args: + request (~.compute.SetIamPolicyDiskRequest): + The request object. A request message for + Disks.SetIamPolicy. See the method + description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.Policy: + An Identity and Access Management (IAM) policy, which + specifies access controls for Google Cloud resources. A + ``Policy`` is a collection of ``bindings``. A + ``binding`` binds one or more ``members``, or + principals, to a single ``role``. Principals can be user + accounts, service accounts, Google groups, and domains + (such as G Suite). A ``role`` is a named list of + permissions; each ``role`` can be an IAM predefined role + or a user-created custom role. For some types of Google + Cloud resources, a ``binding`` can also specify a + ``condition``, which is a logical expression that allows + access to a resource only if the expression evaluates to + ``true``. A condition can add constraints based on + attributes of the request, the resource, or both. To + learn which resources support conditions in their IAM + policies, see the `IAM + documentation `__. + **JSON example:** { "bindings": [ { "role": + "roles/resourcemanager.organizationAdmin", "members": [ + "user:mike@example.com", "group:admins@example.com", + "domain:google.com", + "serviceAccount:my-project-id@appspot.gserviceaccount.com" + ] }, { "role": + "roles/resourcemanager.organizationViewer", "members": [ + "user:eve@example.com" ], "condition": { "title": + "expirable access", "description": "Does not grant + access after Sep 2020", "expression": "request.time < + timestamp('2020-10-01T00:00:00.000Z')", } } ], "etag": + "BwWWja0YfJA=", "version": 3 } **YAML example:** + bindings: - members: - user:mike@example.com - + group:admins@example.com - domain:google.com - + serviceAccount:my-project-id@appspot.gserviceaccount.com + role: roles/resourcemanager.organizationAdmin - members: + - user:eve@example.com role: + roles/resourcemanager.organizationViewer condition: + title: expirable access description: Does not grant + access after Sep 2020 expression: request.time < + timestamp('2020-10-01T00:00:00.000Z') etag: BwWWja0YfJA= + version: 3 For a description of IAM and its features, + see the `IAM + documentation `__. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/compute/v1/projects/{project}/zones/{zone}/disks/{resource}/setIamPolicy', + 'body': 'zone_set_policy_request_resource', + }, + ] + request, metadata = self._interceptor.pre_set_iam_policy(request, metadata) + pb_request = compute.SetIamPolicyDiskRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + including_default_value_fields=False, + use_integers_for_enums=False + ) + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.Policy() + pb_resp = compute.Policy.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_set_iam_policy(resp) + return resp + + class _SetLabels(DisksRestStub): + def __hash__(self): + return hash("SetLabels") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.SetLabelsDiskRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.Operation: + r"""Call the set labels method over HTTP. + + Args: + request (~.compute.SetLabelsDiskRequest): + The request object. A request message for + Disks.SetLabels. See the method + description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.Operation: + Represents an Operation resource. Google Compute Engine + has three Operation resources: \* + `Global `__ + \* + `Regional `__ + \* + `Zonal `__ + You can use an operation resource to manage asynchronous + API requests. For more information, read Handling API + responses. Operations can be global, regional or zonal. + - For global operations, use the ``globalOperations`` + resource. - For regional operations, use the + ``regionOperations`` resource. - For zonal operations, + use the ``zonalOperations`` resource. For more + information, read Global, Regional, and Zonal Resources. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/compute/v1/projects/{project}/zones/{zone}/disks/{resource}/setLabels', + 'body': 'zone_set_labels_request_resource', + }, + ] + request, metadata = self._interceptor.pre_set_labels(request, metadata) + pb_request = compute.SetLabelsDiskRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + including_default_value_fields=False, + use_integers_for_enums=False + ) + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.Operation() + pb_resp = compute.Operation.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_set_labels(resp) + return resp + + class _StartAsyncReplication(DisksRestStub): + def __hash__(self): + return hash("StartAsyncReplication") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.StartAsyncReplicationDiskRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.Operation: + r"""Call the start async replication method over HTTP. + + Args: + request (~.compute.StartAsyncReplicationDiskRequest): + The request object. A request message for + Disks.StartAsyncReplication. See the + method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.Operation: + Represents an Operation resource. Google Compute Engine + has three Operation resources: \* + `Global `__ + \* + `Regional `__ + \* + `Zonal `__ + You can use an operation resource to manage asynchronous + API requests. For more information, read Handling API + responses. Operations can be global, regional or zonal. + - For global operations, use the ``globalOperations`` + resource. - For regional operations, use the + ``regionOperations`` resource. - For zonal operations, + use the ``zonalOperations`` resource. For more + information, read Global, Regional, and Zonal Resources. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/compute/v1/projects/{project}/zones/{zone}/disks/{disk}/startAsyncReplication', + 'body': 'disks_start_async_replication_request_resource', + }, + ] + request, metadata = self._interceptor.pre_start_async_replication(request, metadata) + pb_request = compute.StartAsyncReplicationDiskRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + including_default_value_fields=False, + use_integers_for_enums=False + ) + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.Operation() + pb_resp = compute.Operation.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_start_async_replication(resp) + return resp + + class _StopAsyncReplication(DisksRestStub): + def __hash__(self): + return hash("StopAsyncReplication") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.StopAsyncReplicationDiskRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.Operation: + r"""Call the stop async replication method over HTTP. + + Args: + request (~.compute.StopAsyncReplicationDiskRequest): + The request object. A request message for + Disks.StopAsyncReplication. See the + method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.Operation: + Represents an Operation resource. Google Compute Engine + has three Operation resources: \* + `Global `__ + \* + `Regional `__ + \* + `Zonal `__ + You can use an operation resource to manage asynchronous + API requests. For more information, read Handling API + responses. Operations can be global, regional or zonal. + - For global operations, use the ``globalOperations`` + resource. - For regional operations, use the + ``regionOperations`` resource. - For zonal operations, + use the ``zonalOperations`` resource. For more + information, read Global, Regional, and Zonal Resources. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/compute/v1/projects/{project}/zones/{zone}/disks/{disk}/stopAsyncReplication', + }, + ] + request, metadata = self._interceptor.pre_stop_async_replication(request, metadata) + pb_request = compute.StopAsyncReplicationDiskRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.Operation() + pb_resp = compute.Operation.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_stop_async_replication(resp) + return resp + + class _StopGroupAsyncReplication(DisksRestStub): + def __hash__(self): + return hash("StopGroupAsyncReplication") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.StopGroupAsyncReplicationDiskRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.Operation: + r"""Call the stop group async + replication method over HTTP. + + Args: + request (~.compute.StopGroupAsyncReplicationDiskRequest): + The request object. A request message for + Disks.StopGroupAsyncReplication. See the + method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.Operation: + Represents an Operation resource. Google Compute Engine + has three Operation resources: \* + `Global `__ + \* + `Regional `__ + \* + `Zonal `__ + You can use an operation resource to manage asynchronous + API requests. For more information, read Handling API + responses. Operations can be global, regional or zonal. + - For global operations, use the ``globalOperations`` + resource. - For regional operations, use the + ``regionOperations`` resource. - For zonal operations, + use the ``zonalOperations`` resource. For more + information, read Global, Regional, and Zonal Resources. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/compute/v1/projects/{project}/zones/{zone}/disks/stopGroupAsyncReplication', + 'body': 'disks_stop_group_async_replication_resource_resource', + }, + ] + request, metadata = self._interceptor.pre_stop_group_async_replication(request, metadata) + pb_request = compute.StopGroupAsyncReplicationDiskRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + including_default_value_fields=False, + use_integers_for_enums=False + ) + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.Operation() + pb_resp = compute.Operation.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_stop_group_async_replication(resp) + return resp + + class _TestIamPermissions(DisksRestStub): + def __hash__(self): + return hash("TestIamPermissions") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.TestIamPermissionsDiskRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.TestPermissionsResponse: + r"""Call the test iam permissions method over HTTP. + + Args: + request (~.compute.TestIamPermissionsDiskRequest): + The request object. A request message for + Disks.TestIamPermissions. See the method + description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.TestPermissionsResponse: + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/compute/v1/projects/{project}/zones/{zone}/disks/{resource}/testIamPermissions', + 'body': 'test_permissions_request_resource', + }, + ] + request, metadata = self._interceptor.pre_test_iam_permissions(request, metadata) + pb_request = compute.TestIamPermissionsDiskRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + including_default_value_fields=False, + use_integers_for_enums=False + ) + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.TestPermissionsResponse() + pb_resp = compute.TestPermissionsResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_test_iam_permissions(resp) + return resp + + class _Update(DisksRestStub): + def __hash__(self): + return hash("Update") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.UpdateDiskRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.Operation: + r"""Call the update method over HTTP. + + Args: + request (~.compute.UpdateDiskRequest): + The request object. A request message for Disks.Update. + See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.Operation: + Represents an Operation resource. Google Compute Engine + has three Operation resources: \* + `Global `__ + \* + `Regional `__ + \* + `Zonal `__ + You can use an operation resource to manage asynchronous + API requests. For more information, read Handling API + responses. Operations can be global, regional or zonal. + - For global operations, use the ``globalOperations`` + resource. - For regional operations, use the + ``regionOperations`` resource. - For zonal operations, + use the ``zonalOperations`` resource. For more + information, read Global, Regional, and Zonal Resources. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'patch', + 'uri': '/compute/v1/projects/{project}/zones/{zone}/disks/{disk}', + 'body': 'disk_resource', + }, + ] + request, metadata = self._interceptor.pre_update(request, metadata) + pb_request = compute.UpdateDiskRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + including_default_value_fields=False, + use_integers_for_enums=False + ) + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.Operation() + pb_resp = compute.Operation.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_update(resp) + return resp + + @property + def add_resource_policies(self) -> Callable[ + [compute.AddResourcePoliciesDiskRequest], + compute.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._AddResourcePolicies(self._session, self._host, self._interceptor) # type: ignore + + @property + def aggregated_list(self) -> Callable[ + [compute.AggregatedListDisksRequest], + compute.DiskAggregatedList]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._AggregatedList(self._session, self._host, self._interceptor) # type: ignore + + @property + def bulk_insert(self) -> Callable[ + [compute.BulkInsertDiskRequest], + compute.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._BulkInsert(self._session, self._host, self._interceptor) # type: ignore + + @property + def create_snapshot(self) -> Callable[ + [compute.CreateSnapshotDiskRequest], + compute.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._CreateSnapshot(self._session, self._host, self._interceptor) # type: ignore + + @property + def delete(self) -> Callable[ + [compute.DeleteDiskRequest], + compute.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._Delete(self._session, self._host, self._interceptor) # type: ignore + + @property + def get(self) -> Callable[ + [compute.GetDiskRequest], + compute.Disk]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._Get(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_iam_policy(self) -> Callable[ + [compute.GetIamPolicyDiskRequest], + compute.Policy]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetIamPolicy(self._session, self._host, self._interceptor) # type: ignore + + @property + def insert(self) -> Callable[ + [compute.InsertDiskRequest], + compute.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._Insert(self._session, self._host, self._interceptor) # type: ignore + + @property + def list(self) -> Callable[ + [compute.ListDisksRequest], + compute.DiskList]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._List(self._session, self._host, self._interceptor) # type: ignore + + @property + def remove_resource_policies(self) -> Callable[ + [compute.RemoveResourcePoliciesDiskRequest], + compute.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._RemoveResourcePolicies(self._session, self._host, self._interceptor) # type: ignore + + @property + def resize(self) -> Callable[ + [compute.ResizeDiskRequest], + compute.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._Resize(self._session, self._host, self._interceptor) # type: ignore + + @property + def set_iam_policy(self) -> Callable[ + [compute.SetIamPolicyDiskRequest], + compute.Policy]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._SetIamPolicy(self._session, self._host, self._interceptor) # type: ignore + + @property + def set_labels(self) -> Callable[ + [compute.SetLabelsDiskRequest], + compute.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._SetLabels(self._session, self._host, self._interceptor) # type: ignore + + @property + def start_async_replication(self) -> Callable[ + [compute.StartAsyncReplicationDiskRequest], + compute.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._StartAsyncReplication(self._session, self._host, self._interceptor) # type: ignore + + @property + def stop_async_replication(self) -> Callable[ + [compute.StopAsyncReplicationDiskRequest], + compute.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._StopAsyncReplication(self._session, self._host, self._interceptor) # type: ignore + + @property + def stop_group_async_replication(self) -> Callable[ + [compute.StopGroupAsyncReplicationDiskRequest], + compute.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._StopGroupAsyncReplication(self._session, self._host, self._interceptor) # type: ignore + + @property + def test_iam_permissions(self) -> Callable[ + [compute.TestIamPermissionsDiskRequest], + compute.TestPermissionsResponse]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._TestIamPermissions(self._session, self._host, self._interceptor) # type: ignore + + @property + def update(self) -> Callable[ + [compute.UpdateDiskRequest], + compute.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._Update(self._session, self._host, self._interceptor) # type: ignore + + @property + def kind(self) -> str: + return "rest" + + def close(self): + self._session.close() + + +__all__=( + 'DisksRestTransport', +) diff --git a/owl-bot-staging/v1/google/cloud/compute_v1/services/external_vpn_gateways/__init__.py b/owl-bot-staging/v1/google/cloud/compute_v1/services/external_vpn_gateways/__init__.py new file mode 100644 index 000000000..7c5add9c4 --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/compute_v1/services/external_vpn_gateways/__init__.py @@ -0,0 +1,20 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from .client import ExternalVpnGatewaysClient + +__all__ = ( + 'ExternalVpnGatewaysClient', +) diff --git a/owl-bot-staging/v1/google/cloud/compute_v1/services/external_vpn_gateways/client.py b/owl-bot-staging/v1/google/cloud/compute_v1/services/external_vpn_gateways/client.py new file mode 100644 index 000000000..d1b388c17 --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/compute_v1/services/external_vpn_gateways/client.py @@ -0,0 +1,1510 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import functools +import os +import re +from typing import Dict, Mapping, MutableMapping, MutableSequence, Optional, Sequence, Tuple, Type, Union, cast + +from google.cloud.compute_v1 import gapic_version as package_version + +from google.api_core import client_options as client_options_lib +from google.api_core import exceptions as core_exceptions +from google.api_core import extended_operation +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport import mtls # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth.exceptions import MutualTLSChannelError # type: ignore +from google.oauth2 import service_account # type: ignore + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + +from google.api_core import extended_operation # type: ignore +from google.cloud.compute_v1.services.external_vpn_gateways import pagers +from google.cloud.compute_v1.types import compute +from .transports.base import ExternalVpnGatewaysTransport, DEFAULT_CLIENT_INFO +from .transports.rest import ExternalVpnGatewaysRestTransport + + +class ExternalVpnGatewaysClientMeta(type): + """Metaclass for the ExternalVpnGateways client. + + This provides class-level methods for building and retrieving + support objects (e.g. transport) without polluting the client instance + objects. + """ + _transport_registry = OrderedDict() # type: Dict[str, Type[ExternalVpnGatewaysTransport]] + _transport_registry["rest"] = ExternalVpnGatewaysRestTransport + + def get_transport_class(cls, + label: Optional[str] = None, + ) -> Type[ExternalVpnGatewaysTransport]: + """Returns an appropriate transport class. + + Args: + label: The name of the desired transport. If none is + provided, then the first transport in the registry is used. + + Returns: + The transport class to use. + """ + # If a specific transport is requested, return that one. + if label: + return cls._transport_registry[label] + + # No transport is requested; return the default (that is, the first one + # in the dictionary). + return next(iter(cls._transport_registry.values())) + + +class ExternalVpnGatewaysClient(metaclass=ExternalVpnGatewaysClientMeta): + """The ExternalVpnGateways API.""" + + @staticmethod + def _get_default_mtls_endpoint(api_endpoint): + """Converts api endpoint to mTLS endpoint. + + Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to + "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. + Args: + api_endpoint (Optional[str]): the api endpoint to convert. + Returns: + str: converted mTLS api endpoint. + """ + if not api_endpoint: + return api_endpoint + + mtls_endpoint_re = re.compile( + r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?" + ) + + m = mtls_endpoint_re.match(api_endpoint) + name, mtls, sandbox, googledomain = m.groups() + if mtls or not googledomain: + return api_endpoint + + if sandbox: + return api_endpoint.replace( + "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" + ) + + return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") + + DEFAULT_ENDPOINT = "compute.googleapis.com" + DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore + DEFAULT_ENDPOINT + ) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + ExternalVpnGatewaysClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_info(info) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + ExternalVpnGatewaysClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_file( + filename) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + from_service_account_json = from_service_account_file + + @property + def transport(self) -> ExternalVpnGatewaysTransport: + """Returns the transport used by the client instance. + + Returns: + ExternalVpnGatewaysTransport: The transport used by the client + instance. + """ + return self._transport + + @staticmethod + def common_billing_account_path(billing_account: str, ) -> str: + """Returns a fully-qualified billing_account string.""" + return "billingAccounts/{billing_account}".format(billing_account=billing_account, ) + + @staticmethod + def parse_common_billing_account_path(path: str) -> Dict[str,str]: + """Parse a billing_account path into its component segments.""" + m = re.match(r"^billingAccounts/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_folder_path(folder: str, ) -> str: + """Returns a fully-qualified folder string.""" + return "folders/{folder}".format(folder=folder, ) + + @staticmethod + def parse_common_folder_path(path: str) -> Dict[str,str]: + """Parse a folder path into its component segments.""" + m = re.match(r"^folders/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_organization_path(organization: str, ) -> str: + """Returns a fully-qualified organization string.""" + return "organizations/{organization}".format(organization=organization, ) + + @staticmethod + def parse_common_organization_path(path: str) -> Dict[str,str]: + """Parse a organization path into its component segments.""" + m = re.match(r"^organizations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_project_path(project: str, ) -> str: + """Returns a fully-qualified project string.""" + return "projects/{project}".format(project=project, ) + + @staticmethod + def parse_common_project_path(path: str) -> Dict[str,str]: + """Parse a project path into its component segments.""" + m = re.match(r"^projects/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_location_path(project: str, location: str, ) -> str: + """Returns a fully-qualified location string.""" + return "projects/{project}/locations/{location}".format(project=project, location=location, ) + + @staticmethod + def parse_common_location_path(path: str) -> Dict[str,str]: + """Parse a location path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @classmethod + def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[client_options_lib.ClientOptions] = None): + """Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + if client_options is None: + client_options = client_options_lib.ClientOptions() + use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") + use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") + if use_client_cert not in ("true", "false"): + raise ValueError("Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`") + if use_mtls_endpoint not in ("auto", "never", "always"): + raise MutualTLSChannelError("Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`") + + # Figure out the client cert source to use. + client_cert_source = None + if use_client_cert == "true": + if client_options.client_cert_source: + client_cert_source = client_options.client_cert_source + elif mtls.has_default_client_cert_source(): + client_cert_source = mtls.default_client_cert_source() + + # Figure out which api endpoint to use. + if client_options.api_endpoint is not None: + api_endpoint = client_options.api_endpoint + elif use_mtls_endpoint == "always" or (use_mtls_endpoint == "auto" and client_cert_source): + api_endpoint = cls.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = cls.DEFAULT_ENDPOINT + + return api_endpoint, client_cert_source + + def __init__(self, *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Optional[Union[str, ExternalVpnGatewaysTransport]] = None, + client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the external vpn gateways client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Union[str, ExternalVpnGatewaysTransport]): The + transport to use. If set to None, a transport is chosen + automatically. + NOTE: "rest" transport functionality is currently in a + beta state (preview). We welcome your feedback via an + issue in this library's source repository. + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): Custom options for the + client. It won't take effect if a ``transport`` instance is provided. + (1) The ``api_endpoint`` property can be used to override the + default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT + environment variable can also be used to override the endpoint: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto switch to the + default mTLS endpoint if client certificate is present, this is + the default value). However, the ``api_endpoint`` property takes + precedence if provided. + (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide client certificate for mutual TLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + """ + if isinstance(client_options, dict): + client_options = client_options_lib.from_dict(client_options) + if client_options is None: + client_options = client_options_lib.ClientOptions() + client_options = cast(client_options_lib.ClientOptions, client_options) + + api_endpoint, client_cert_source_func = self.get_mtls_endpoint_and_cert_source(client_options) + + api_key_value = getattr(client_options, "api_key", None) + if api_key_value and credentials: + raise ValueError("client_options.api_key and credentials are mutually exclusive") + + # Save or instantiate the transport. + # Ordinarily, we provide the transport, but allowing a custom transport + # instance provides an extensibility point for unusual situations. + if isinstance(transport, ExternalVpnGatewaysTransport): + # transport is a ExternalVpnGatewaysTransport instance. + if credentials or client_options.credentials_file or api_key_value: + raise ValueError("When providing a transport instance, " + "provide its credentials directly.") + if client_options.scopes: + raise ValueError( + "When providing a transport instance, provide its scopes " + "directly." + ) + self._transport = transport + else: + import google.auth._default # type: ignore + + if api_key_value and hasattr(google.auth._default, "get_api_key_credentials"): + credentials = google.auth._default.get_api_key_credentials(api_key_value) + + Transport = type(self).get_transport_class(transport) + self._transport = Transport( + credentials=credentials, + credentials_file=client_options.credentials_file, + host=api_endpoint, + scopes=client_options.scopes, + client_cert_source_for_mtls=client_cert_source_func, + quota_project_id=client_options.quota_project_id, + client_info=client_info, + always_use_jwt_access=True, + api_audience=client_options.api_audience, + ) + + def delete_unary(self, + request: Optional[Union[compute.DeleteExternalVpnGatewayRequest, dict]] = None, + *, + project: Optional[str] = None, + external_vpn_gateway: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.Operation: + r"""Deletes the specified externalVpnGateway. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_delete(): + # Create a client + client = compute_v1.ExternalVpnGatewaysClient() + + # Initialize request argument(s) + request = compute_v1.DeleteExternalVpnGatewayRequest( + external_vpn_gateway="external_vpn_gateway_value", + project="project_value", + ) + + # Make the request + response = client.delete(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.DeleteExternalVpnGatewayRequest, dict]): + The request object. A request message for + ExternalVpnGateways.Delete. See the + method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + external_vpn_gateway (str): + Name of the externalVpnGateways to + delete. + + This corresponds to the ``external_vpn_gateway`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, external_vpn_gateway]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.DeleteExternalVpnGatewayRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.DeleteExternalVpnGatewayRequest): + request = compute.DeleteExternalVpnGatewayRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if external_vpn_gateway is not None: + request.external_vpn_gateway = external_vpn_gateway + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("external_vpn_gateway", request.external_vpn_gateway), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def delete(self, + request: Optional[Union[compute.DeleteExternalVpnGatewayRequest, dict]] = None, + *, + project: Optional[str] = None, + external_vpn_gateway: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> extended_operation.ExtendedOperation: + r"""Deletes the specified externalVpnGateway. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_delete(): + # Create a client + client = compute_v1.ExternalVpnGatewaysClient() + + # Initialize request argument(s) + request = compute_v1.DeleteExternalVpnGatewayRequest( + external_vpn_gateway="external_vpn_gateway_value", + project="project_value", + ) + + # Make the request + response = client.delete(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.DeleteExternalVpnGatewayRequest, dict]): + The request object. A request message for + ExternalVpnGateways.Delete. See the + method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + external_vpn_gateway (str): + Name of the externalVpnGateways to + delete. + + This corresponds to the ``external_vpn_gateway`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, external_vpn_gateway]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.DeleteExternalVpnGatewayRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.DeleteExternalVpnGatewayRequest): + request = compute.DeleteExternalVpnGatewayRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if external_vpn_gateway is not None: + request.external_vpn_gateway = external_vpn_gateway + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("external_vpn_gateway", request.external_vpn_gateway), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + operation_service = self._transport._global_operations_client + operation_request = compute.GetGlobalOperationRequest() + operation_request.project = request.project + operation_request.operation = response.name + + get_operation = functools.partial(operation_service.get, operation_request) + # Cancel is not part of extended operations yet. + cancel_operation = lambda: None + + # Note: this class is an implementation detail to provide a uniform + # set of names for certain fields in the extended operation proto message. + # See google.api_core.extended_operation.ExtendedOperation for details + # on these properties and the expected interface. + class _CustomOperation(extended_operation.ExtendedOperation): + @property + def error_message(self): + return self._extended_operation.http_error_message + + @property + def error_code(self): + return self._extended_operation.http_error_status_code + + response = _CustomOperation.make(get_operation, cancel_operation, response) + + # Done; return the response. + return response + + def get(self, + request: Optional[Union[compute.GetExternalVpnGatewayRequest, dict]] = None, + *, + project: Optional[str] = None, + external_vpn_gateway: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.ExternalVpnGateway: + r"""Returns the specified externalVpnGateway. Get a list + of available externalVpnGateways by making a list() + request. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_get(): + # Create a client + client = compute_v1.ExternalVpnGatewaysClient() + + # Initialize request argument(s) + request = compute_v1.GetExternalVpnGatewayRequest( + external_vpn_gateway="external_vpn_gateway_value", + project="project_value", + ) + + # Make the request + response = client.get(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.GetExternalVpnGatewayRequest, dict]): + The request object. A request message for + ExternalVpnGateways.Get. See the method + description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + external_vpn_gateway (str): + Name of the externalVpnGateway to + return. + + This corresponds to the ``external_vpn_gateway`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.compute_v1.types.ExternalVpnGateway: + Represents an external VPN gateway. + External VPN gateway is the on-premises + VPN gateway(s) or another cloud + provider's VPN gateway that connects to + your Google Cloud VPN gateway. To create + a highly available VPN from Google Cloud + Platform to your VPN gateway or another + cloud provider's VPN gateway, you must + create a external VPN gateway resource + with information about the other + gateway. For more information about + using external VPN gateways, see + Creating an HA VPN gateway and tunnel + pair to a peer VPN. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, external_vpn_gateway]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.GetExternalVpnGatewayRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.GetExternalVpnGatewayRequest): + request = compute.GetExternalVpnGatewayRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if external_vpn_gateway is not None: + request.external_vpn_gateway = external_vpn_gateway + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("external_vpn_gateway", request.external_vpn_gateway), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def insert_unary(self, + request: Optional[Union[compute.InsertExternalVpnGatewayRequest, dict]] = None, + *, + project: Optional[str] = None, + external_vpn_gateway_resource: Optional[compute.ExternalVpnGateway] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.Operation: + r"""Creates a ExternalVpnGateway in the specified project + using the data included in the request. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_insert(): + # Create a client + client = compute_v1.ExternalVpnGatewaysClient() + + # Initialize request argument(s) + request = compute_v1.InsertExternalVpnGatewayRequest( + project="project_value", + ) + + # Make the request + response = client.insert(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.InsertExternalVpnGatewayRequest, dict]): + The request object. A request message for + ExternalVpnGateways.Insert. See the + method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + external_vpn_gateway_resource (google.cloud.compute_v1.types.ExternalVpnGateway): + The body resource for this request + This corresponds to the ``external_vpn_gateway_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, external_vpn_gateway_resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.InsertExternalVpnGatewayRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.InsertExternalVpnGatewayRequest): + request = compute.InsertExternalVpnGatewayRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if external_vpn_gateway_resource is not None: + request.external_vpn_gateway_resource = external_vpn_gateway_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.insert] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def insert(self, + request: Optional[Union[compute.InsertExternalVpnGatewayRequest, dict]] = None, + *, + project: Optional[str] = None, + external_vpn_gateway_resource: Optional[compute.ExternalVpnGateway] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> extended_operation.ExtendedOperation: + r"""Creates a ExternalVpnGateway in the specified project + using the data included in the request. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_insert(): + # Create a client + client = compute_v1.ExternalVpnGatewaysClient() + + # Initialize request argument(s) + request = compute_v1.InsertExternalVpnGatewayRequest( + project="project_value", + ) + + # Make the request + response = client.insert(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.InsertExternalVpnGatewayRequest, dict]): + The request object. A request message for + ExternalVpnGateways.Insert. See the + method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + external_vpn_gateway_resource (google.cloud.compute_v1.types.ExternalVpnGateway): + The body resource for this request + This corresponds to the ``external_vpn_gateway_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, external_vpn_gateway_resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.InsertExternalVpnGatewayRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.InsertExternalVpnGatewayRequest): + request = compute.InsertExternalVpnGatewayRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if external_vpn_gateway_resource is not None: + request.external_vpn_gateway_resource = external_vpn_gateway_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.insert] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + operation_service = self._transport._global_operations_client + operation_request = compute.GetGlobalOperationRequest() + operation_request.project = request.project + operation_request.operation = response.name + + get_operation = functools.partial(operation_service.get, operation_request) + # Cancel is not part of extended operations yet. + cancel_operation = lambda: None + + # Note: this class is an implementation detail to provide a uniform + # set of names for certain fields in the extended operation proto message. + # See google.api_core.extended_operation.ExtendedOperation for details + # on these properties and the expected interface. + class _CustomOperation(extended_operation.ExtendedOperation): + @property + def error_message(self): + return self._extended_operation.http_error_message + + @property + def error_code(self): + return self._extended_operation.http_error_status_code + + response = _CustomOperation.make(get_operation, cancel_operation, response) + + # Done; return the response. + return response + + def list(self, + request: Optional[Union[compute.ListExternalVpnGatewaysRequest, dict]] = None, + *, + project: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListPager: + r"""Retrieves the list of ExternalVpnGateway available to + the specified project. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_list(): + # Create a client + client = compute_v1.ExternalVpnGatewaysClient() + + # Initialize request argument(s) + request = compute_v1.ListExternalVpnGatewaysRequest( + project="project_value", + ) + + # Make the request + page_result = client.list(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.ListExternalVpnGatewaysRequest, dict]): + The request object. A request message for + ExternalVpnGateways.List. See the method + description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.compute_v1.services.external_vpn_gateways.pagers.ListPager: + Response to the list request, and + contains a list of externalVpnGateways. + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.ListExternalVpnGatewaysRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.ListExternalVpnGatewaysRequest): + request = compute.ListExternalVpnGatewaysRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + def set_labels_unary(self, + request: Optional[Union[compute.SetLabelsExternalVpnGatewayRequest, dict]] = None, + *, + project: Optional[str] = None, + resource: Optional[str] = None, + global_set_labels_request_resource: Optional[compute.GlobalSetLabelsRequest] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.Operation: + r"""Sets the labels on an ExternalVpnGateway. To learn + more about labels, read the Labeling Resources + documentation. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_set_labels(): + # Create a client + client = compute_v1.ExternalVpnGatewaysClient() + + # Initialize request argument(s) + request = compute_v1.SetLabelsExternalVpnGatewayRequest( + project="project_value", + resource="resource_value", + ) + + # Make the request + response = client.set_labels(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.SetLabelsExternalVpnGatewayRequest, dict]): + The request object. A request message for + ExternalVpnGateways.SetLabels. See the + method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + resource (str): + Name or id of the resource for this + request. + + This corresponds to the ``resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + global_set_labels_request_resource (google.cloud.compute_v1.types.GlobalSetLabelsRequest): + The body resource for this request + This corresponds to the ``global_set_labels_request_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, resource, global_set_labels_request_resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.SetLabelsExternalVpnGatewayRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.SetLabelsExternalVpnGatewayRequest): + request = compute.SetLabelsExternalVpnGatewayRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if resource is not None: + request.resource = resource + if global_set_labels_request_resource is not None: + request.global_set_labels_request_resource = global_set_labels_request_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.set_labels] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("resource", request.resource), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def set_labels(self, + request: Optional[Union[compute.SetLabelsExternalVpnGatewayRequest, dict]] = None, + *, + project: Optional[str] = None, + resource: Optional[str] = None, + global_set_labels_request_resource: Optional[compute.GlobalSetLabelsRequest] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> extended_operation.ExtendedOperation: + r"""Sets the labels on an ExternalVpnGateway. To learn + more about labels, read the Labeling Resources + documentation. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_set_labels(): + # Create a client + client = compute_v1.ExternalVpnGatewaysClient() + + # Initialize request argument(s) + request = compute_v1.SetLabelsExternalVpnGatewayRequest( + project="project_value", + resource="resource_value", + ) + + # Make the request + response = client.set_labels(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.SetLabelsExternalVpnGatewayRequest, dict]): + The request object. A request message for + ExternalVpnGateways.SetLabels. See the + method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + resource (str): + Name or id of the resource for this + request. + + This corresponds to the ``resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + global_set_labels_request_resource (google.cloud.compute_v1.types.GlobalSetLabelsRequest): + The body resource for this request + This corresponds to the ``global_set_labels_request_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, resource, global_set_labels_request_resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.SetLabelsExternalVpnGatewayRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.SetLabelsExternalVpnGatewayRequest): + request = compute.SetLabelsExternalVpnGatewayRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if resource is not None: + request.resource = resource + if global_set_labels_request_resource is not None: + request.global_set_labels_request_resource = global_set_labels_request_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.set_labels] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("resource", request.resource), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + operation_service = self._transport._global_operations_client + operation_request = compute.GetGlobalOperationRequest() + operation_request.project = request.project + operation_request.operation = response.name + + get_operation = functools.partial(operation_service.get, operation_request) + # Cancel is not part of extended operations yet. + cancel_operation = lambda: None + + # Note: this class is an implementation detail to provide a uniform + # set of names for certain fields in the extended operation proto message. + # See google.api_core.extended_operation.ExtendedOperation for details + # on these properties and the expected interface. + class _CustomOperation(extended_operation.ExtendedOperation): + @property + def error_message(self): + return self._extended_operation.http_error_message + + @property + def error_code(self): + return self._extended_operation.http_error_status_code + + response = _CustomOperation.make(get_operation, cancel_operation, response) + + # Done; return the response. + return response + + def test_iam_permissions(self, + request: Optional[Union[compute.TestIamPermissionsExternalVpnGatewayRequest, dict]] = None, + *, + project: Optional[str] = None, + resource: Optional[str] = None, + test_permissions_request_resource: Optional[compute.TestPermissionsRequest] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.TestPermissionsResponse: + r"""Returns permissions that a caller has on the + specified resource. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_test_iam_permissions(): + # Create a client + client = compute_v1.ExternalVpnGatewaysClient() + + # Initialize request argument(s) + request = compute_v1.TestIamPermissionsExternalVpnGatewayRequest( + project="project_value", + resource="resource_value", + ) + + # Make the request + response = client.test_iam_permissions(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.TestIamPermissionsExternalVpnGatewayRequest, dict]): + The request object. A request message for + ExternalVpnGateways.TestIamPermissions. + See the method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + resource (str): + Name or id of the resource for this + request. + + This corresponds to the ``resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + test_permissions_request_resource (google.cloud.compute_v1.types.TestPermissionsRequest): + The body resource for this request + This corresponds to the ``test_permissions_request_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.compute_v1.types.TestPermissionsResponse: + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, resource, test_permissions_request_resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.TestIamPermissionsExternalVpnGatewayRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.TestIamPermissionsExternalVpnGatewayRequest): + request = compute.TestIamPermissionsExternalVpnGatewayRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if resource is not None: + request.resource = resource + if test_permissions_request_resource is not None: + request.test_permissions_request_resource = test_permissions_request_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.test_iam_permissions] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("resource", request.resource), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def __enter__(self) -> "ExternalVpnGatewaysClient": + return self + + def __exit__(self, type, value, traceback): + """Releases underlying transport's resources. + + .. warning:: + ONLY use as a context manager if the transport is NOT shared + with other clients! Exiting the with block will CLOSE the transport + and may cause errors in other clients! + """ + self.transport.close() + + + + + + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) + + +__all__ = ( + "ExternalVpnGatewaysClient", +) diff --git a/owl-bot-staging/v1/google/cloud/compute_v1/services/external_vpn_gateways/pagers.py b/owl-bot-staging/v1/google/cloud/compute_v1/services/external_vpn_gateways/pagers.py new file mode 100644 index 000000000..3565639e6 --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/compute_v1/services/external_vpn_gateways/pagers.py @@ -0,0 +1,77 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import Any, AsyncIterator, Awaitable, Callable, Sequence, Tuple, Optional, Iterator + +from google.cloud.compute_v1.types import compute + + +class ListPager: + """A pager for iterating through ``list`` requests. + + This class thinly wraps an initial + :class:`google.cloud.compute_v1.types.ExternalVpnGatewayList` object, and + provides an ``__iter__`` method to iterate through its + ``items`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``List`` requests and continue to iterate + through the ``items`` field on the + corresponding responses. + + All the usual :class:`google.cloud.compute_v1.types.ExternalVpnGatewayList` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., compute.ExternalVpnGatewayList], + request: compute.ListExternalVpnGatewaysRequest, + response: compute.ExternalVpnGatewayList, + *, + metadata: Sequence[Tuple[str, str]] = ()): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.compute_v1.types.ListExternalVpnGatewaysRequest): + The initial request object. + response (google.cloud.compute_v1.types.ExternalVpnGatewayList): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = compute.ListExternalVpnGatewaysRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[compute.ExternalVpnGatewayList]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[compute.ExternalVpnGateway]: + for page in self.pages: + yield from page.items + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) diff --git a/owl-bot-staging/v1/google/cloud/compute_v1/services/external_vpn_gateways/transports/__init__.py b/owl-bot-staging/v1/google/cloud/compute_v1/services/external_vpn_gateways/transports/__init__.py new file mode 100644 index 000000000..0655d7d4e --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/compute_v1/services/external_vpn_gateways/transports/__init__.py @@ -0,0 +1,32 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +from typing import Dict, Type + +from .base import ExternalVpnGatewaysTransport +from .rest import ExternalVpnGatewaysRestTransport +from .rest import ExternalVpnGatewaysRestInterceptor + + +# Compile a registry of transports. +_transport_registry = OrderedDict() # type: Dict[str, Type[ExternalVpnGatewaysTransport]] +_transport_registry['rest'] = ExternalVpnGatewaysRestTransport + +__all__ = ( + 'ExternalVpnGatewaysTransport', + 'ExternalVpnGatewaysRestTransport', + 'ExternalVpnGatewaysRestInterceptor', +) diff --git a/owl-bot-staging/v1/google/cloud/compute_v1/services/external_vpn_gateways/transports/base.py b/owl-bot-staging/v1/google/cloud/compute_v1/services/external_vpn_gateways/transports/base.py new file mode 100644 index 000000000..9a6a04e29 --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/compute_v1/services/external_vpn_gateways/transports/base.py @@ -0,0 +1,233 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import abc +from typing import Awaitable, Callable, Dict, Optional, Sequence, Union + +from google.cloud.compute_v1 import gapic_version as package_version + +import google.auth # type: ignore +import google.api_core +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.compute_v1.types import compute +from google.cloud.compute_v1.services import global_operations + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) + + +class ExternalVpnGatewaysTransport(abc.ABC): + """Abstract transport class for ExternalVpnGateways.""" + + AUTH_SCOPES = ( + 'https://www.googleapis.com/auth/compute', + 'https://www.googleapis.com/auth/cloud-platform', + ) + + DEFAULT_HOST: str = 'compute.googleapis.com' + def __init__( + self, *, + host: str = DEFAULT_HOST, + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + **kwargs, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A list of scopes. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + """ + self._extended_operations_services: Dict[str, Any] = {} + + scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} + + # Save the scopes. + self._scopes = scopes + + # If no credentials are provided, then determine the appropriate + # defaults. + if credentials and credentials_file: + raise core_exceptions.DuplicateCredentialArgs("'credentials_file' and 'credentials' are mutually exclusive") + + if credentials_file is not None: + credentials, _ = google.auth.load_credentials_from_file( + credentials_file, + **scopes_kwargs, + quota_project_id=quota_project_id + ) + elif credentials is None: + credentials, _ = google.auth.default(**scopes_kwargs, quota_project_id=quota_project_id) + # Don't apply audience if the credentials file passed from user. + if hasattr(credentials, "with_gdch_audience"): + credentials = credentials.with_gdch_audience(api_audience if api_audience else host) + + # If the credentials are service account credentials, then always try to use self signed JWT. + if always_use_jwt_access and isinstance(credentials, service_account.Credentials) and hasattr(service_account.Credentials, "with_always_use_jwt_access"): + credentials = credentials.with_always_use_jwt_access(True) + + # Save the credentials. + self._credentials = credentials + + # Save the hostname. Default to port 443 (HTTPS) if none is specified. + if ':' not in host: + host += ':443' + self._host = host + + def _prep_wrapped_messages(self, client_info): + # Precompute the wrapped methods. + self._wrapped_methods = { + self.delete: gapic_v1.method.wrap_method( + self.delete, + default_timeout=None, + client_info=client_info, + ), + self.get: gapic_v1.method.wrap_method( + self.get, + default_timeout=None, + client_info=client_info, + ), + self.insert: gapic_v1.method.wrap_method( + self.insert, + default_timeout=None, + client_info=client_info, + ), + self.list: gapic_v1.method.wrap_method( + self.list, + default_timeout=None, + client_info=client_info, + ), + self.set_labels: gapic_v1.method.wrap_method( + self.set_labels, + default_timeout=None, + client_info=client_info, + ), + self.test_iam_permissions: gapic_v1.method.wrap_method( + self.test_iam_permissions, + default_timeout=None, + client_info=client_info, + ), + } + + def close(self): + """Closes resources associated with the transport. + + .. warning:: + Only call this method if the transport is NOT shared + with other clients - this may cause errors in other clients! + """ + raise NotImplementedError() + + @property + def delete(self) -> Callable[ + [compute.DeleteExternalVpnGatewayRequest], + Union[ + compute.Operation, + Awaitable[compute.Operation] + ]]: + raise NotImplementedError() + + @property + def get(self) -> Callable[ + [compute.GetExternalVpnGatewayRequest], + Union[ + compute.ExternalVpnGateway, + Awaitable[compute.ExternalVpnGateway] + ]]: + raise NotImplementedError() + + @property + def insert(self) -> Callable[ + [compute.InsertExternalVpnGatewayRequest], + Union[ + compute.Operation, + Awaitable[compute.Operation] + ]]: + raise NotImplementedError() + + @property + def list(self) -> Callable[ + [compute.ListExternalVpnGatewaysRequest], + Union[ + compute.ExternalVpnGatewayList, + Awaitable[compute.ExternalVpnGatewayList] + ]]: + raise NotImplementedError() + + @property + def set_labels(self) -> Callable[ + [compute.SetLabelsExternalVpnGatewayRequest], + Union[ + compute.Operation, + Awaitable[compute.Operation] + ]]: + raise NotImplementedError() + + @property + def test_iam_permissions(self) -> Callable[ + [compute.TestIamPermissionsExternalVpnGatewayRequest], + Union[ + compute.TestPermissionsResponse, + Awaitable[compute.TestPermissionsResponse] + ]]: + raise NotImplementedError() + + @property + def kind(self) -> str: + raise NotImplementedError() + + @property + def _global_operations_client(self) -> global_operations.GlobalOperationsClient: + ex_op_service = self._extended_operations_services.get("global_operations") + if not ex_op_service: + ex_op_service = global_operations.GlobalOperationsClient( + credentials=self._credentials, + transport=self.kind, + ) + self._extended_operations_services["global_operations"] = ex_op_service + + return ex_op_service + + +__all__ = ( + 'ExternalVpnGatewaysTransport', +) diff --git a/owl-bot-staging/v1/google/cloud/compute_v1/services/external_vpn_gateways/transports/rest.py b/owl-bot-staging/v1/google/cloud/compute_v1/services/external_vpn_gateways/transports/rest.py new file mode 100644 index 000000000..7e3178216 --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/compute_v1/services/external_vpn_gateways/transports/rest.py @@ -0,0 +1,933 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +from google.auth.transport.requests import AuthorizedSession # type: ignore +import json # type: ignore +import grpc # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.api_core import exceptions as core_exceptions +from google.api_core import retry as retries +from google.api_core import rest_helpers +from google.api_core import rest_streaming +from google.api_core import path_template +from google.api_core import gapic_v1 + +from google.protobuf import json_format +from requests import __version__ as requests_version +import dataclasses +import re +from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union +import warnings + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + + +from google.cloud.compute_v1.types import compute + +from .base import ExternalVpnGatewaysTransport, DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, + grpc_version=None, + rest_version=requests_version, +) + + +class ExternalVpnGatewaysRestInterceptor: + """Interceptor for ExternalVpnGateways. + + Interceptors are used to manipulate requests, request metadata, and responses + in arbitrary ways. + Example use cases include: + * Logging + * Verifying requests according to service or custom semantics + * Stripping extraneous information from responses + + These use cases and more can be enabled by injecting an + instance of a custom subclass when constructing the ExternalVpnGatewaysRestTransport. + + .. code-block:: python + class MyCustomExternalVpnGatewaysInterceptor(ExternalVpnGatewaysRestInterceptor): + def pre_delete(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_delete(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_get(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_insert(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_insert(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_set_labels(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_set_labels(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_test_iam_permissions(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_test_iam_permissions(self, response): + logging.log(f"Received response: {response}") + return response + + transport = ExternalVpnGatewaysRestTransport(interceptor=MyCustomExternalVpnGatewaysInterceptor()) + client = ExternalVpnGatewaysClient(transport=transport) + + + """ + def pre_delete(self, request: compute.DeleteExternalVpnGatewayRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.DeleteExternalVpnGatewayRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for delete + + Override in a subclass to manipulate the request or metadata + before they are sent to the ExternalVpnGateways server. + """ + return request, metadata + + def post_delete(self, response: compute.Operation) -> compute.Operation: + """Post-rpc interceptor for delete + + Override in a subclass to manipulate the response + after it is returned by the ExternalVpnGateways server but before + it is returned to user code. + """ + return response + def pre_get(self, request: compute.GetExternalVpnGatewayRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.GetExternalVpnGatewayRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get + + Override in a subclass to manipulate the request or metadata + before they are sent to the ExternalVpnGateways server. + """ + return request, metadata + + def post_get(self, response: compute.ExternalVpnGateway) -> compute.ExternalVpnGateway: + """Post-rpc interceptor for get + + Override in a subclass to manipulate the response + after it is returned by the ExternalVpnGateways server but before + it is returned to user code. + """ + return response + def pre_insert(self, request: compute.InsertExternalVpnGatewayRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.InsertExternalVpnGatewayRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for insert + + Override in a subclass to manipulate the request or metadata + before they are sent to the ExternalVpnGateways server. + """ + return request, metadata + + def post_insert(self, response: compute.Operation) -> compute.Operation: + """Post-rpc interceptor for insert + + Override in a subclass to manipulate the response + after it is returned by the ExternalVpnGateways server but before + it is returned to user code. + """ + return response + def pre_list(self, request: compute.ListExternalVpnGatewaysRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.ListExternalVpnGatewaysRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list + + Override in a subclass to manipulate the request or metadata + before they are sent to the ExternalVpnGateways server. + """ + return request, metadata + + def post_list(self, response: compute.ExternalVpnGatewayList) -> compute.ExternalVpnGatewayList: + """Post-rpc interceptor for list + + Override in a subclass to manipulate the response + after it is returned by the ExternalVpnGateways server but before + it is returned to user code. + """ + return response + def pre_set_labels(self, request: compute.SetLabelsExternalVpnGatewayRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.SetLabelsExternalVpnGatewayRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for set_labels + + Override in a subclass to manipulate the request or metadata + before they are sent to the ExternalVpnGateways server. + """ + return request, metadata + + def post_set_labels(self, response: compute.Operation) -> compute.Operation: + """Post-rpc interceptor for set_labels + + Override in a subclass to manipulate the response + after it is returned by the ExternalVpnGateways server but before + it is returned to user code. + """ + return response + def pre_test_iam_permissions(self, request: compute.TestIamPermissionsExternalVpnGatewayRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.TestIamPermissionsExternalVpnGatewayRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for test_iam_permissions + + Override in a subclass to manipulate the request or metadata + before they are sent to the ExternalVpnGateways server. + """ + return request, metadata + + def post_test_iam_permissions(self, response: compute.TestPermissionsResponse) -> compute.TestPermissionsResponse: + """Post-rpc interceptor for test_iam_permissions + + Override in a subclass to manipulate the response + after it is returned by the ExternalVpnGateways server but before + it is returned to user code. + """ + return response + + +@dataclasses.dataclass +class ExternalVpnGatewaysRestStub: + _session: AuthorizedSession + _host: str + _interceptor: ExternalVpnGatewaysRestInterceptor + + +class ExternalVpnGatewaysRestTransport(ExternalVpnGatewaysTransport): + """REST backend transport for ExternalVpnGateways. + + The ExternalVpnGateways API. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends JSON representations of protocol buffers over HTTP/1.1 + + NOTE: This REST transport functionality is currently in a beta + state (preview). We welcome your feedback via an issue in this + library's source repository. Thank you! + """ + + def __init__(self, *, + host: str = 'compute.googleapis.com', + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + client_cert_source_for_mtls: Optional[Callable[[ + ], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + url_scheme: str = 'https', + interceptor: Optional[ExternalVpnGatewaysRestInterceptor] = None, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + NOTE: This REST transport functionality is currently in a beta + state (preview). We welcome your feedback via a GitHub issue in + this library's repository. Thank you! + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + client_cert_source_for_mtls (Callable[[], Tuple[bytes, bytes]]): Client + certificate to configure mutual TLS HTTP channel. It is ignored + if ``channel`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you are developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + url_scheme: the protocol scheme for the API endpoint. Normally + "https", but for testing or local servers, + "http" can be specified. + """ + # Run the base constructor + # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. + # TODO: When custom host (api_endpoint) is set, `scopes` must *also* be set on the + # credentials object + maybe_url_match = re.match("^(?Phttp(?:s)?://)?(?P.*)$", host) + if maybe_url_match is None: + raise ValueError(f"Unexpected hostname structure: {host}") # pragma: NO COVER + + url_match_items = maybe_url_match.groupdict() + + host = f"{url_scheme}://{host}" if not url_match_items["scheme"] else host + + super().__init__( + host=host, + credentials=credentials, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience + ) + self._session = AuthorizedSession( + self._credentials, default_host=self.DEFAULT_HOST) + if client_cert_source_for_mtls: + self._session.configure_mtls_channel(client_cert_source_for_mtls) + self._interceptor = interceptor or ExternalVpnGatewaysRestInterceptor() + self._prep_wrapped_messages(client_info) + + class _Delete(ExternalVpnGatewaysRestStub): + def __hash__(self): + return hash("Delete") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.DeleteExternalVpnGatewayRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.Operation: + r"""Call the delete method over HTTP. + + Args: + request (~.compute.DeleteExternalVpnGatewayRequest): + The request object. A request message for + ExternalVpnGateways.Delete. See the + method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.Operation: + Represents an Operation resource. Google Compute Engine + has three Operation resources: \* + `Global `__ + \* + `Regional `__ + \* + `Zonal `__ + You can use an operation resource to manage asynchronous + API requests. For more information, read Handling API + responses. Operations can be global, regional or zonal. + - For global operations, use the ``globalOperations`` + resource. - For regional operations, use the + ``regionOperations`` resource. - For zonal operations, + use the ``zonalOperations`` resource. For more + information, read Global, Regional, and Zonal Resources. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'delete', + 'uri': '/compute/v1/projects/{project}/global/externalVpnGateways/{external_vpn_gateway}', + }, + ] + request, metadata = self._interceptor.pre_delete(request, metadata) + pb_request = compute.DeleteExternalVpnGatewayRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.Operation() + pb_resp = compute.Operation.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_delete(resp) + return resp + + class _Get(ExternalVpnGatewaysRestStub): + def __hash__(self): + return hash("Get") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.GetExternalVpnGatewayRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.ExternalVpnGateway: + r"""Call the get method over HTTP. + + Args: + request (~.compute.GetExternalVpnGatewayRequest): + The request object. A request message for + ExternalVpnGateways.Get. See the method + description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.ExternalVpnGateway: + Represents an external VPN gateway. + External VPN gateway is the on-premises + VPN gateway(s) or another cloud + provider's VPN gateway that connects to + your Google Cloud VPN gateway. To create + a highly available VPN from Google Cloud + Platform to your VPN gateway or another + cloud provider's VPN gateway, you must + create a external VPN gateway resource + with information about the other + gateway. For more information about + using external VPN gateways, see + Creating an HA VPN gateway and tunnel + pair to a peer VPN. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/compute/v1/projects/{project}/global/externalVpnGateways/{external_vpn_gateway}', + }, + ] + request, metadata = self._interceptor.pre_get(request, metadata) + pb_request = compute.GetExternalVpnGatewayRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.ExternalVpnGateway() + pb_resp = compute.ExternalVpnGateway.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get(resp) + return resp + + class _Insert(ExternalVpnGatewaysRestStub): + def __hash__(self): + return hash("Insert") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.InsertExternalVpnGatewayRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.Operation: + r"""Call the insert method over HTTP. + + Args: + request (~.compute.InsertExternalVpnGatewayRequest): + The request object. A request message for + ExternalVpnGateways.Insert. See the + method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.Operation: + Represents an Operation resource. Google Compute Engine + has three Operation resources: \* + `Global `__ + \* + `Regional `__ + \* + `Zonal `__ + You can use an operation resource to manage asynchronous + API requests. For more information, read Handling API + responses. Operations can be global, regional or zonal. + - For global operations, use the ``globalOperations`` + resource. - For regional operations, use the + ``regionOperations`` resource. - For zonal operations, + use the ``zonalOperations`` resource. For more + information, read Global, Regional, and Zonal Resources. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/compute/v1/projects/{project}/global/externalVpnGateways', + 'body': 'external_vpn_gateway_resource', + }, + ] + request, metadata = self._interceptor.pre_insert(request, metadata) + pb_request = compute.InsertExternalVpnGatewayRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + including_default_value_fields=False, + use_integers_for_enums=False + ) + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.Operation() + pb_resp = compute.Operation.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_insert(resp) + return resp + + class _List(ExternalVpnGatewaysRestStub): + def __hash__(self): + return hash("List") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.ListExternalVpnGatewaysRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.ExternalVpnGatewayList: + r"""Call the list method over HTTP. + + Args: + request (~.compute.ListExternalVpnGatewaysRequest): + The request object. A request message for + ExternalVpnGateways.List. See the method + description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.ExternalVpnGatewayList: + Response to the list request, and + contains a list of externalVpnGateways. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/compute/v1/projects/{project}/global/externalVpnGateways', + }, + ] + request, metadata = self._interceptor.pre_list(request, metadata) + pb_request = compute.ListExternalVpnGatewaysRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.ExternalVpnGatewayList() + pb_resp = compute.ExternalVpnGatewayList.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_list(resp) + return resp + + class _SetLabels(ExternalVpnGatewaysRestStub): + def __hash__(self): + return hash("SetLabels") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.SetLabelsExternalVpnGatewayRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.Operation: + r"""Call the set labels method over HTTP. + + Args: + request (~.compute.SetLabelsExternalVpnGatewayRequest): + The request object. A request message for + ExternalVpnGateways.SetLabels. See the + method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.Operation: + Represents an Operation resource. Google Compute Engine + has three Operation resources: \* + `Global `__ + \* + `Regional `__ + \* + `Zonal `__ + You can use an operation resource to manage asynchronous + API requests. For more information, read Handling API + responses. Operations can be global, regional or zonal. + - For global operations, use the ``globalOperations`` + resource. - For regional operations, use the + ``regionOperations`` resource. - For zonal operations, + use the ``zonalOperations`` resource. For more + information, read Global, Regional, and Zonal Resources. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/compute/v1/projects/{project}/global/externalVpnGateways/{resource}/setLabels', + 'body': 'global_set_labels_request_resource', + }, + ] + request, metadata = self._interceptor.pre_set_labels(request, metadata) + pb_request = compute.SetLabelsExternalVpnGatewayRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + including_default_value_fields=False, + use_integers_for_enums=False + ) + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.Operation() + pb_resp = compute.Operation.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_set_labels(resp) + return resp + + class _TestIamPermissions(ExternalVpnGatewaysRestStub): + def __hash__(self): + return hash("TestIamPermissions") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.TestIamPermissionsExternalVpnGatewayRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.TestPermissionsResponse: + r"""Call the test iam permissions method over HTTP. + + Args: + request (~.compute.TestIamPermissionsExternalVpnGatewayRequest): + The request object. A request message for + ExternalVpnGateways.TestIamPermissions. + See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.TestPermissionsResponse: + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/compute/v1/projects/{project}/global/externalVpnGateways/{resource}/testIamPermissions', + 'body': 'test_permissions_request_resource', + }, + ] + request, metadata = self._interceptor.pre_test_iam_permissions(request, metadata) + pb_request = compute.TestIamPermissionsExternalVpnGatewayRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + including_default_value_fields=False, + use_integers_for_enums=False + ) + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.TestPermissionsResponse() + pb_resp = compute.TestPermissionsResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_test_iam_permissions(resp) + return resp + + @property + def delete(self) -> Callable[ + [compute.DeleteExternalVpnGatewayRequest], + compute.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._Delete(self._session, self._host, self._interceptor) # type: ignore + + @property + def get(self) -> Callable[ + [compute.GetExternalVpnGatewayRequest], + compute.ExternalVpnGateway]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._Get(self._session, self._host, self._interceptor) # type: ignore + + @property + def insert(self) -> Callable[ + [compute.InsertExternalVpnGatewayRequest], + compute.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._Insert(self._session, self._host, self._interceptor) # type: ignore + + @property + def list(self) -> Callable[ + [compute.ListExternalVpnGatewaysRequest], + compute.ExternalVpnGatewayList]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._List(self._session, self._host, self._interceptor) # type: ignore + + @property + def set_labels(self) -> Callable[ + [compute.SetLabelsExternalVpnGatewayRequest], + compute.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._SetLabels(self._session, self._host, self._interceptor) # type: ignore + + @property + def test_iam_permissions(self) -> Callable[ + [compute.TestIamPermissionsExternalVpnGatewayRequest], + compute.TestPermissionsResponse]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._TestIamPermissions(self._session, self._host, self._interceptor) # type: ignore + + @property + def kind(self) -> str: + return "rest" + + def close(self): + self._session.close() + + +__all__=( + 'ExternalVpnGatewaysRestTransport', +) diff --git a/owl-bot-staging/v1/google/cloud/compute_v1/services/firewall_policies/__init__.py b/owl-bot-staging/v1/google/cloud/compute_v1/services/firewall_policies/__init__.py new file mode 100644 index 000000000..36df9407b --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/compute_v1/services/firewall_policies/__init__.py @@ -0,0 +1,20 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from .client import FirewallPoliciesClient + +__all__ = ( + 'FirewallPoliciesClient', +) diff --git a/owl-bot-staging/v1/google/cloud/compute_v1/services/firewall_policies/client.py b/owl-bot-staging/v1/google/cloud/compute_v1/services/firewall_policies/client.py new file mode 100644 index 000000000..07757f72b --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/compute_v1/services/firewall_policies/client.py @@ -0,0 +1,3629 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import functools +import os +import re +from typing import Dict, Mapping, MutableMapping, MutableSequence, Optional, Sequence, Tuple, Type, Union, cast + +from google.cloud.compute_v1 import gapic_version as package_version + +from google.api_core import client_options as client_options_lib +from google.api_core import exceptions as core_exceptions +from google.api_core import extended_operation +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport import mtls # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth.exceptions import MutualTLSChannelError # type: ignore +from google.oauth2 import service_account # type: ignore + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + +from google.api_core import extended_operation # type: ignore +from google.cloud.compute_v1.services.firewall_policies import pagers +from google.cloud.compute_v1.types import compute +from .transports.base import FirewallPoliciesTransport, DEFAULT_CLIENT_INFO +from .transports.rest import FirewallPoliciesRestTransport + + +class FirewallPoliciesClientMeta(type): + """Metaclass for the FirewallPolicies client. + + This provides class-level methods for building and retrieving + support objects (e.g. transport) without polluting the client instance + objects. + """ + _transport_registry = OrderedDict() # type: Dict[str, Type[FirewallPoliciesTransport]] + _transport_registry["rest"] = FirewallPoliciesRestTransport + + def get_transport_class(cls, + label: Optional[str] = None, + ) -> Type[FirewallPoliciesTransport]: + """Returns an appropriate transport class. + + Args: + label: The name of the desired transport. If none is + provided, then the first transport in the registry is used. + + Returns: + The transport class to use. + """ + # If a specific transport is requested, return that one. + if label: + return cls._transport_registry[label] + + # No transport is requested; return the default (that is, the first one + # in the dictionary). + return next(iter(cls._transport_registry.values())) + + +class FirewallPoliciesClient(metaclass=FirewallPoliciesClientMeta): + """The FirewallPolicies API.""" + + @staticmethod + def _get_default_mtls_endpoint(api_endpoint): + """Converts api endpoint to mTLS endpoint. + + Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to + "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. + Args: + api_endpoint (Optional[str]): the api endpoint to convert. + Returns: + str: converted mTLS api endpoint. + """ + if not api_endpoint: + return api_endpoint + + mtls_endpoint_re = re.compile( + r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?" + ) + + m = mtls_endpoint_re.match(api_endpoint) + name, mtls, sandbox, googledomain = m.groups() + if mtls or not googledomain: + return api_endpoint + + if sandbox: + return api_endpoint.replace( + "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" + ) + + return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") + + DEFAULT_ENDPOINT = "compute.googleapis.com" + DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore + DEFAULT_ENDPOINT + ) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + FirewallPoliciesClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_info(info) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + FirewallPoliciesClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_file( + filename) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + from_service_account_json = from_service_account_file + + @property + def transport(self) -> FirewallPoliciesTransport: + """Returns the transport used by the client instance. + + Returns: + FirewallPoliciesTransport: The transport used by the client + instance. + """ + return self._transport + + @staticmethod + def common_billing_account_path(billing_account: str, ) -> str: + """Returns a fully-qualified billing_account string.""" + return "billingAccounts/{billing_account}".format(billing_account=billing_account, ) + + @staticmethod + def parse_common_billing_account_path(path: str) -> Dict[str,str]: + """Parse a billing_account path into its component segments.""" + m = re.match(r"^billingAccounts/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_folder_path(folder: str, ) -> str: + """Returns a fully-qualified folder string.""" + return "folders/{folder}".format(folder=folder, ) + + @staticmethod + def parse_common_folder_path(path: str) -> Dict[str,str]: + """Parse a folder path into its component segments.""" + m = re.match(r"^folders/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_organization_path(organization: str, ) -> str: + """Returns a fully-qualified organization string.""" + return "organizations/{organization}".format(organization=organization, ) + + @staticmethod + def parse_common_organization_path(path: str) -> Dict[str,str]: + """Parse a organization path into its component segments.""" + m = re.match(r"^organizations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_project_path(project: str, ) -> str: + """Returns a fully-qualified project string.""" + return "projects/{project}".format(project=project, ) + + @staticmethod + def parse_common_project_path(path: str) -> Dict[str,str]: + """Parse a project path into its component segments.""" + m = re.match(r"^projects/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_location_path(project: str, location: str, ) -> str: + """Returns a fully-qualified location string.""" + return "projects/{project}/locations/{location}".format(project=project, location=location, ) + + @staticmethod + def parse_common_location_path(path: str) -> Dict[str,str]: + """Parse a location path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @classmethod + def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[client_options_lib.ClientOptions] = None): + """Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + if client_options is None: + client_options = client_options_lib.ClientOptions() + use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") + use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") + if use_client_cert not in ("true", "false"): + raise ValueError("Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`") + if use_mtls_endpoint not in ("auto", "never", "always"): + raise MutualTLSChannelError("Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`") + + # Figure out the client cert source to use. + client_cert_source = None + if use_client_cert == "true": + if client_options.client_cert_source: + client_cert_source = client_options.client_cert_source + elif mtls.has_default_client_cert_source(): + client_cert_source = mtls.default_client_cert_source() + + # Figure out which api endpoint to use. + if client_options.api_endpoint is not None: + api_endpoint = client_options.api_endpoint + elif use_mtls_endpoint == "always" or (use_mtls_endpoint == "auto" and client_cert_source): + api_endpoint = cls.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = cls.DEFAULT_ENDPOINT + + return api_endpoint, client_cert_source + + def __init__(self, *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Optional[Union[str, FirewallPoliciesTransport]] = None, + client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the firewall policies client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Union[str, FirewallPoliciesTransport]): The + transport to use. If set to None, a transport is chosen + automatically. + NOTE: "rest" transport functionality is currently in a + beta state (preview). We welcome your feedback via an + issue in this library's source repository. + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): Custom options for the + client. It won't take effect if a ``transport`` instance is provided. + (1) The ``api_endpoint`` property can be used to override the + default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT + environment variable can also be used to override the endpoint: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto switch to the + default mTLS endpoint if client certificate is present, this is + the default value). However, the ``api_endpoint`` property takes + precedence if provided. + (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide client certificate for mutual TLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + """ + if isinstance(client_options, dict): + client_options = client_options_lib.from_dict(client_options) + if client_options is None: + client_options = client_options_lib.ClientOptions() + client_options = cast(client_options_lib.ClientOptions, client_options) + + api_endpoint, client_cert_source_func = self.get_mtls_endpoint_and_cert_source(client_options) + + api_key_value = getattr(client_options, "api_key", None) + if api_key_value and credentials: + raise ValueError("client_options.api_key and credentials are mutually exclusive") + + # Save or instantiate the transport. + # Ordinarily, we provide the transport, but allowing a custom transport + # instance provides an extensibility point for unusual situations. + if isinstance(transport, FirewallPoliciesTransport): + # transport is a FirewallPoliciesTransport instance. + if credentials or client_options.credentials_file or api_key_value: + raise ValueError("When providing a transport instance, " + "provide its credentials directly.") + if client_options.scopes: + raise ValueError( + "When providing a transport instance, provide its scopes " + "directly." + ) + self._transport = transport + else: + import google.auth._default # type: ignore + + if api_key_value and hasattr(google.auth._default, "get_api_key_credentials"): + credentials = google.auth._default.get_api_key_credentials(api_key_value) + + Transport = type(self).get_transport_class(transport) + self._transport = Transport( + credentials=credentials, + credentials_file=client_options.credentials_file, + host=api_endpoint, + scopes=client_options.scopes, + client_cert_source_for_mtls=client_cert_source_func, + quota_project_id=client_options.quota_project_id, + client_info=client_info, + always_use_jwt_access=True, + api_audience=client_options.api_audience, + ) + + def add_association_unary(self, + request: Optional[Union[compute.AddAssociationFirewallPolicyRequest, dict]] = None, + *, + firewall_policy: Optional[str] = None, + firewall_policy_association_resource: Optional[compute.FirewallPolicyAssociation] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.Operation: + r"""Inserts an association for the specified firewall + policy. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_add_association(): + # Create a client + client = compute_v1.FirewallPoliciesClient() + + # Initialize request argument(s) + request = compute_v1.AddAssociationFirewallPolicyRequest( + firewall_policy="firewall_policy_value", + ) + + # Make the request + response = client.add_association(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.AddAssociationFirewallPolicyRequest, dict]): + The request object. A request message for + FirewallPolicies.AddAssociation. See the + method description for details. + firewall_policy (str): + Name of the firewall policy to + update. + + This corresponds to the ``firewall_policy`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + firewall_policy_association_resource (google.cloud.compute_v1.types.FirewallPolicyAssociation): + The body resource for this request + This corresponds to the ``firewall_policy_association_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([firewall_policy, firewall_policy_association_resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.AddAssociationFirewallPolicyRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.AddAssociationFirewallPolicyRequest): + request = compute.AddAssociationFirewallPolicyRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if firewall_policy is not None: + request.firewall_policy = firewall_policy + if firewall_policy_association_resource is not None: + request.firewall_policy_association_resource = firewall_policy_association_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.add_association] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("firewall_policy", request.firewall_policy), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def add_association(self, + request: Optional[Union[compute.AddAssociationFirewallPolicyRequest, dict]] = None, + *, + firewall_policy: Optional[str] = None, + firewall_policy_association_resource: Optional[compute.FirewallPolicyAssociation] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> extended_operation.ExtendedOperation: + r"""Inserts an association for the specified firewall + policy. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_add_association(): + # Create a client + client = compute_v1.FirewallPoliciesClient() + + # Initialize request argument(s) + request = compute_v1.AddAssociationFirewallPolicyRequest( + firewall_policy="firewall_policy_value", + ) + + # Make the request + response = client.add_association(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.AddAssociationFirewallPolicyRequest, dict]): + The request object. A request message for + FirewallPolicies.AddAssociation. See the + method description for details. + firewall_policy (str): + Name of the firewall policy to + update. + + This corresponds to the ``firewall_policy`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + firewall_policy_association_resource (google.cloud.compute_v1.types.FirewallPolicyAssociation): + The body resource for this request + This corresponds to the ``firewall_policy_association_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([firewall_policy, firewall_policy_association_resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.AddAssociationFirewallPolicyRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.AddAssociationFirewallPolicyRequest): + request = compute.AddAssociationFirewallPolicyRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if firewall_policy is not None: + request.firewall_policy = firewall_policy + if firewall_policy_association_resource is not None: + request.firewall_policy_association_resource = firewall_policy_association_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.add_association] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("firewall_policy", request.firewall_policy), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + operation_service = self._transport._global_organization_operations_client + operation_request = compute.GetGlobalOrganizationOperationRequest() + operation_request.operation = response.name + + get_operation = functools.partial(operation_service.get, operation_request) + # Cancel is not part of extended operations yet. + cancel_operation = lambda: None + + # Note: this class is an implementation detail to provide a uniform + # set of names for certain fields in the extended operation proto message. + # See google.api_core.extended_operation.ExtendedOperation for details + # on these properties and the expected interface. + class _CustomOperation(extended_operation.ExtendedOperation): + @property + def error_message(self): + return self._extended_operation.http_error_message + + @property + def error_code(self): + return self._extended_operation.http_error_status_code + + response = _CustomOperation.make(get_operation, cancel_operation, response) + + # Done; return the response. + return response + + def add_rule_unary(self, + request: Optional[Union[compute.AddRuleFirewallPolicyRequest, dict]] = None, + *, + firewall_policy: Optional[str] = None, + firewall_policy_rule_resource: Optional[compute.FirewallPolicyRule] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.Operation: + r"""Inserts a rule into a firewall policy. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_add_rule(): + # Create a client + client = compute_v1.FirewallPoliciesClient() + + # Initialize request argument(s) + request = compute_v1.AddRuleFirewallPolicyRequest( + firewall_policy="firewall_policy_value", + ) + + # Make the request + response = client.add_rule(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.AddRuleFirewallPolicyRequest, dict]): + The request object. A request message for + FirewallPolicies.AddRule. See the method + description for details. + firewall_policy (str): + Name of the firewall policy to + update. + + This corresponds to the ``firewall_policy`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + firewall_policy_rule_resource (google.cloud.compute_v1.types.FirewallPolicyRule): + The body resource for this request + This corresponds to the ``firewall_policy_rule_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([firewall_policy, firewall_policy_rule_resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.AddRuleFirewallPolicyRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.AddRuleFirewallPolicyRequest): + request = compute.AddRuleFirewallPolicyRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if firewall_policy is not None: + request.firewall_policy = firewall_policy + if firewall_policy_rule_resource is not None: + request.firewall_policy_rule_resource = firewall_policy_rule_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.add_rule] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("firewall_policy", request.firewall_policy), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def add_rule(self, + request: Optional[Union[compute.AddRuleFirewallPolicyRequest, dict]] = None, + *, + firewall_policy: Optional[str] = None, + firewall_policy_rule_resource: Optional[compute.FirewallPolicyRule] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> extended_operation.ExtendedOperation: + r"""Inserts a rule into a firewall policy. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_add_rule(): + # Create a client + client = compute_v1.FirewallPoliciesClient() + + # Initialize request argument(s) + request = compute_v1.AddRuleFirewallPolicyRequest( + firewall_policy="firewall_policy_value", + ) + + # Make the request + response = client.add_rule(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.AddRuleFirewallPolicyRequest, dict]): + The request object. A request message for + FirewallPolicies.AddRule. See the method + description for details. + firewall_policy (str): + Name of the firewall policy to + update. + + This corresponds to the ``firewall_policy`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + firewall_policy_rule_resource (google.cloud.compute_v1.types.FirewallPolicyRule): + The body resource for this request + This corresponds to the ``firewall_policy_rule_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([firewall_policy, firewall_policy_rule_resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.AddRuleFirewallPolicyRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.AddRuleFirewallPolicyRequest): + request = compute.AddRuleFirewallPolicyRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if firewall_policy is not None: + request.firewall_policy = firewall_policy + if firewall_policy_rule_resource is not None: + request.firewall_policy_rule_resource = firewall_policy_rule_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.add_rule] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("firewall_policy", request.firewall_policy), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + operation_service = self._transport._global_organization_operations_client + operation_request = compute.GetGlobalOrganizationOperationRequest() + operation_request.operation = response.name + + get_operation = functools.partial(operation_service.get, operation_request) + # Cancel is not part of extended operations yet. + cancel_operation = lambda: None + + # Note: this class is an implementation detail to provide a uniform + # set of names for certain fields in the extended operation proto message. + # See google.api_core.extended_operation.ExtendedOperation for details + # on these properties and the expected interface. + class _CustomOperation(extended_operation.ExtendedOperation): + @property + def error_message(self): + return self._extended_operation.http_error_message + + @property + def error_code(self): + return self._extended_operation.http_error_status_code + + response = _CustomOperation.make(get_operation, cancel_operation, response) + + # Done; return the response. + return response + + def clone_rules_unary(self, + request: Optional[Union[compute.CloneRulesFirewallPolicyRequest, dict]] = None, + *, + firewall_policy: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.Operation: + r"""Copies rules to the specified firewall policy. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_clone_rules(): + # Create a client + client = compute_v1.FirewallPoliciesClient() + + # Initialize request argument(s) + request = compute_v1.CloneRulesFirewallPolicyRequest( + firewall_policy="firewall_policy_value", + ) + + # Make the request + response = client.clone_rules(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.CloneRulesFirewallPolicyRequest, dict]): + The request object. A request message for + FirewallPolicies.CloneRules. See the + method description for details. + firewall_policy (str): + Name of the firewall policy to + update. + + This corresponds to the ``firewall_policy`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([firewall_policy]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.CloneRulesFirewallPolicyRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.CloneRulesFirewallPolicyRequest): + request = compute.CloneRulesFirewallPolicyRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if firewall_policy is not None: + request.firewall_policy = firewall_policy + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.clone_rules] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("firewall_policy", request.firewall_policy), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def clone_rules(self, + request: Optional[Union[compute.CloneRulesFirewallPolicyRequest, dict]] = None, + *, + firewall_policy: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> extended_operation.ExtendedOperation: + r"""Copies rules to the specified firewall policy. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_clone_rules(): + # Create a client + client = compute_v1.FirewallPoliciesClient() + + # Initialize request argument(s) + request = compute_v1.CloneRulesFirewallPolicyRequest( + firewall_policy="firewall_policy_value", + ) + + # Make the request + response = client.clone_rules(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.CloneRulesFirewallPolicyRequest, dict]): + The request object. A request message for + FirewallPolicies.CloneRules. See the + method description for details. + firewall_policy (str): + Name of the firewall policy to + update. + + This corresponds to the ``firewall_policy`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([firewall_policy]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.CloneRulesFirewallPolicyRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.CloneRulesFirewallPolicyRequest): + request = compute.CloneRulesFirewallPolicyRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if firewall_policy is not None: + request.firewall_policy = firewall_policy + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.clone_rules] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("firewall_policy", request.firewall_policy), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + operation_service = self._transport._global_organization_operations_client + operation_request = compute.GetGlobalOrganizationOperationRequest() + operation_request.operation = response.name + + get_operation = functools.partial(operation_service.get, operation_request) + # Cancel is not part of extended operations yet. + cancel_operation = lambda: None + + # Note: this class is an implementation detail to provide a uniform + # set of names for certain fields in the extended operation proto message. + # See google.api_core.extended_operation.ExtendedOperation for details + # on these properties and the expected interface. + class _CustomOperation(extended_operation.ExtendedOperation): + @property + def error_message(self): + return self._extended_operation.http_error_message + + @property + def error_code(self): + return self._extended_operation.http_error_status_code + + response = _CustomOperation.make(get_operation, cancel_operation, response) + + # Done; return the response. + return response + + def delete_unary(self, + request: Optional[Union[compute.DeleteFirewallPolicyRequest, dict]] = None, + *, + firewall_policy: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.Operation: + r"""Deletes the specified policy. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_delete(): + # Create a client + client = compute_v1.FirewallPoliciesClient() + + # Initialize request argument(s) + request = compute_v1.DeleteFirewallPolicyRequest( + firewall_policy="firewall_policy_value", + ) + + # Make the request + response = client.delete(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.DeleteFirewallPolicyRequest, dict]): + The request object. A request message for + FirewallPolicies.Delete. See the method + description for details. + firewall_policy (str): + Name of the firewall policy to + delete. + + This corresponds to the ``firewall_policy`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([firewall_policy]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.DeleteFirewallPolicyRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.DeleteFirewallPolicyRequest): + request = compute.DeleteFirewallPolicyRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if firewall_policy is not None: + request.firewall_policy = firewall_policy + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("firewall_policy", request.firewall_policy), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def delete(self, + request: Optional[Union[compute.DeleteFirewallPolicyRequest, dict]] = None, + *, + firewall_policy: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> extended_operation.ExtendedOperation: + r"""Deletes the specified policy. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_delete(): + # Create a client + client = compute_v1.FirewallPoliciesClient() + + # Initialize request argument(s) + request = compute_v1.DeleteFirewallPolicyRequest( + firewall_policy="firewall_policy_value", + ) + + # Make the request + response = client.delete(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.DeleteFirewallPolicyRequest, dict]): + The request object. A request message for + FirewallPolicies.Delete. See the method + description for details. + firewall_policy (str): + Name of the firewall policy to + delete. + + This corresponds to the ``firewall_policy`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([firewall_policy]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.DeleteFirewallPolicyRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.DeleteFirewallPolicyRequest): + request = compute.DeleteFirewallPolicyRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if firewall_policy is not None: + request.firewall_policy = firewall_policy + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("firewall_policy", request.firewall_policy), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + operation_service = self._transport._global_organization_operations_client + operation_request = compute.GetGlobalOrganizationOperationRequest() + operation_request.operation = response.name + + get_operation = functools.partial(operation_service.get, operation_request) + # Cancel is not part of extended operations yet. + cancel_operation = lambda: None + + # Note: this class is an implementation detail to provide a uniform + # set of names for certain fields in the extended operation proto message. + # See google.api_core.extended_operation.ExtendedOperation for details + # on these properties and the expected interface. + class _CustomOperation(extended_operation.ExtendedOperation): + @property + def error_message(self): + return self._extended_operation.http_error_message + + @property + def error_code(self): + return self._extended_operation.http_error_status_code + + response = _CustomOperation.make(get_operation, cancel_operation, response) + + # Done; return the response. + return response + + def get(self, + request: Optional[Union[compute.GetFirewallPolicyRequest, dict]] = None, + *, + firewall_policy: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.FirewallPolicy: + r"""Returns the specified firewall policy. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_get(): + # Create a client + client = compute_v1.FirewallPoliciesClient() + + # Initialize request argument(s) + request = compute_v1.GetFirewallPolicyRequest( + firewall_policy="firewall_policy_value", + ) + + # Make the request + response = client.get(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.GetFirewallPolicyRequest, dict]): + The request object. A request message for + FirewallPolicies.Get. See the method + description for details. + firewall_policy (str): + Name of the firewall policy to get. + This corresponds to the ``firewall_policy`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.compute_v1.types.FirewallPolicy: + Represents a Firewall Policy + resource. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([firewall_policy]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.GetFirewallPolicyRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.GetFirewallPolicyRequest): + request = compute.GetFirewallPolicyRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if firewall_policy is not None: + request.firewall_policy = firewall_policy + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("firewall_policy", request.firewall_policy), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get_association(self, + request: Optional[Union[compute.GetAssociationFirewallPolicyRequest, dict]] = None, + *, + firewall_policy: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.FirewallPolicyAssociation: + r"""Gets an association with the specified name. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_get_association(): + # Create a client + client = compute_v1.FirewallPoliciesClient() + + # Initialize request argument(s) + request = compute_v1.GetAssociationFirewallPolicyRequest( + firewall_policy="firewall_policy_value", + ) + + # Make the request + response = client.get_association(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.GetAssociationFirewallPolicyRequest, dict]): + The request object. A request message for + FirewallPolicies.GetAssociation. See the + method description for details. + firewall_policy (str): + Name of the firewall policy to which + the queried rule belongs. + + This corresponds to the ``firewall_policy`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.compute_v1.types.FirewallPolicyAssociation: + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([firewall_policy]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.GetAssociationFirewallPolicyRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.GetAssociationFirewallPolicyRequest): + request = compute.GetAssociationFirewallPolicyRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if firewall_policy is not None: + request.firewall_policy = firewall_policy + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_association] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("firewall_policy", request.firewall_policy), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get_iam_policy(self, + request: Optional[Union[compute.GetIamPolicyFirewallPolicyRequest, dict]] = None, + *, + resource: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.Policy: + r"""Gets the access control policy for a resource. May be + empty if no such policy or resource exists. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_get_iam_policy(): + # Create a client + client = compute_v1.FirewallPoliciesClient() + + # Initialize request argument(s) + request = compute_v1.GetIamPolicyFirewallPolicyRequest( + resource="resource_value", + ) + + # Make the request + response = client.get_iam_policy(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.GetIamPolicyFirewallPolicyRequest, dict]): + The request object. A request message for + FirewallPolicies.GetIamPolicy. See the + method description for details. + resource (str): + Name or id of the resource for this + request. + + This corresponds to the ``resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.compute_v1.types.Policy: + An Identity and Access Management (IAM) policy, which + specifies access controls for Google Cloud resources. A + Policy is a collection of bindings. A binding binds one + or more members, or principals, to a single role. + Principals can be user accounts, service accounts, + Google groups, and domains (such as G Suite). A role is + a named list of permissions; each role can be an IAM + predefined role or a user-created custom role. For some + types of Google Cloud resources, a binding can also + specify a condition, which is a logical expression that + allows access to a resource only if the expression + evaluates to true. A condition can add constraints based + on attributes of the request, the resource, or both. To + learn which resources support conditions in their IAM + policies, see the [IAM + documentation](\ https://cloud.google.com/iam/help/conditions/resource-policies). + **JSON example:** { "bindings": [ { "role": + "roles/resourcemanager.organizationAdmin", "members": [ + "user:mike@example.com", "group:admins@example.com", + "domain:google.com", + "serviceAccount:my-project-id@appspot.gserviceaccount.com" + ] }, { "role": + "roles/resourcemanager.organizationViewer", "members": [ + "user:eve@example.com" ], "condition": { "title": + "expirable access", "description": "Does not grant + access after Sep 2020", "expression": "request.time < + timestamp('2020-10-01T00:00:00.000Z')", } } ], "etag": + "BwWWja0YfJA=", "version": 3 } **YAML example:** + bindings: - members: - user:\ mike@example.com - + group:\ admins@example.com - domain:google.com - + serviceAccount:\ my-project-id@appspot.gserviceaccount.com + role: roles/resourcemanager.organizationAdmin - members: + - user:\ eve@example.com role: + roles/resourcemanager.organizationViewer condition: + title: expirable access description: Does not grant + access after Sep 2020 expression: request.time < + timestamp('2020-10-01T00:00:00.000Z') etag: BwWWja0YfJA= + version: 3 For a description of IAM and its features, + see the [IAM + documentation](\ https://cloud.google.com/iam/docs/). + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.GetIamPolicyFirewallPolicyRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.GetIamPolicyFirewallPolicyRequest): + request = compute.GetIamPolicyFirewallPolicyRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if resource is not None: + request.resource = resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_iam_policy] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("resource", request.resource), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get_rule(self, + request: Optional[Union[compute.GetRuleFirewallPolicyRequest, dict]] = None, + *, + firewall_policy: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.FirewallPolicyRule: + r"""Gets a rule of the specified priority. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_get_rule(): + # Create a client + client = compute_v1.FirewallPoliciesClient() + + # Initialize request argument(s) + request = compute_v1.GetRuleFirewallPolicyRequest( + firewall_policy="firewall_policy_value", + ) + + # Make the request + response = client.get_rule(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.GetRuleFirewallPolicyRequest, dict]): + The request object. A request message for + FirewallPolicies.GetRule. See the method + description for details. + firewall_policy (str): + Name of the firewall policy to which + the queried rule belongs. + + This corresponds to the ``firewall_policy`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.compute_v1.types.FirewallPolicyRule: + Represents a rule that describes one + or more match conditions along with the + action to be taken when traffic matches + this condition (allow or deny). + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([firewall_policy]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.GetRuleFirewallPolicyRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.GetRuleFirewallPolicyRequest): + request = compute.GetRuleFirewallPolicyRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if firewall_policy is not None: + request.firewall_policy = firewall_policy + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_rule] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("firewall_policy", request.firewall_policy), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def insert_unary(self, + request: Optional[Union[compute.InsertFirewallPolicyRequest, dict]] = None, + *, + parent_id: Optional[str] = None, + firewall_policy_resource: Optional[compute.FirewallPolicy] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.Operation: + r"""Creates a new policy in the specified project using + the data included in the request. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_insert(): + # Create a client + client = compute_v1.FirewallPoliciesClient() + + # Initialize request argument(s) + request = compute_v1.InsertFirewallPolicyRequest( + parent_id="parent_id_value", + ) + + # Make the request + response = client.insert(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.InsertFirewallPolicyRequest, dict]): + The request object. A request message for + FirewallPolicies.Insert. See the method + description for details. + parent_id (str): + Parent ID for this request. The ID can be either be + "folders/[FOLDER_ID]" if the parent is a folder or + "organizations/[ORGANIZATION_ID]" if the parent is an + organization. + + This corresponds to the ``parent_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + firewall_policy_resource (google.cloud.compute_v1.types.FirewallPolicy): + The body resource for this request + This corresponds to the ``firewall_policy_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent_id, firewall_policy_resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.InsertFirewallPolicyRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.InsertFirewallPolicyRequest): + request = compute.InsertFirewallPolicyRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent_id is not None: + request.parent_id = parent_id + if firewall_policy_resource is not None: + request.firewall_policy_resource = firewall_policy_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.insert] + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def insert(self, + request: Optional[Union[compute.InsertFirewallPolicyRequest, dict]] = None, + *, + parent_id: Optional[str] = None, + firewall_policy_resource: Optional[compute.FirewallPolicy] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> extended_operation.ExtendedOperation: + r"""Creates a new policy in the specified project using + the data included in the request. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_insert(): + # Create a client + client = compute_v1.FirewallPoliciesClient() + + # Initialize request argument(s) + request = compute_v1.InsertFirewallPolicyRequest( + parent_id="parent_id_value", + ) + + # Make the request + response = client.insert(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.InsertFirewallPolicyRequest, dict]): + The request object. A request message for + FirewallPolicies.Insert. See the method + description for details. + parent_id (str): + Parent ID for this request. The ID can be either be + "folders/[FOLDER_ID]" if the parent is a folder or + "organizations/[ORGANIZATION_ID]" if the parent is an + organization. + + This corresponds to the ``parent_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + firewall_policy_resource (google.cloud.compute_v1.types.FirewallPolicy): + The body resource for this request + This corresponds to the ``firewall_policy_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent_id, firewall_policy_resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.InsertFirewallPolicyRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.InsertFirewallPolicyRequest): + request = compute.InsertFirewallPolicyRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent_id is not None: + request.parent_id = parent_id + if firewall_policy_resource is not None: + request.firewall_policy_resource = firewall_policy_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.insert] + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + operation_service = self._transport._global_organization_operations_client + operation_request = compute.GetGlobalOrganizationOperationRequest() + operation_request.parent_id = request.parent_id + operation_request.operation = response.name + + get_operation = functools.partial(operation_service.get, operation_request) + # Cancel is not part of extended operations yet. + cancel_operation = lambda: None + + # Note: this class is an implementation detail to provide a uniform + # set of names for certain fields in the extended operation proto message. + # See google.api_core.extended_operation.ExtendedOperation for details + # on these properties and the expected interface. + class _CustomOperation(extended_operation.ExtendedOperation): + @property + def error_message(self): + return self._extended_operation.http_error_message + + @property + def error_code(self): + return self._extended_operation.http_error_status_code + + response = _CustomOperation.make(get_operation, cancel_operation, response) + + # Done; return the response. + return response + + def list(self, + request: Optional[Union[compute.ListFirewallPoliciesRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListPager: + r"""Lists all the policies that have been configured for + the specified folder or organization. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_list(): + # Create a client + client = compute_v1.FirewallPoliciesClient() + + # Initialize request argument(s) + request = compute_v1.ListFirewallPoliciesRequest( + ) + + # Make the request + page_result = client.list(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.ListFirewallPoliciesRequest, dict]): + The request object. A request message for + FirewallPolicies.List. See the method + description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.compute_v1.services.firewall_policies.pagers.ListPager: + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # Minor optimization to avoid making a copy if the user passes + # in a compute.ListFirewallPoliciesRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.ListFirewallPoliciesRequest): + request = compute.ListFirewallPoliciesRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list] + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + def list_associations(self, + request: Optional[Union[compute.ListAssociationsFirewallPolicyRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.FirewallPoliciesListAssociationsResponse: + r"""Lists associations of a specified target, i.e., + organization or folder. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_list_associations(): + # Create a client + client = compute_v1.FirewallPoliciesClient() + + # Initialize request argument(s) + request = compute_v1.ListAssociationsFirewallPolicyRequest( + ) + + # Make the request + response = client.list_associations(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.ListAssociationsFirewallPolicyRequest, dict]): + The request object. A request message for + FirewallPolicies.ListAssociations. See + the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.compute_v1.types.FirewallPoliciesListAssociationsResponse: + + """ + # Create or coerce a protobuf request object. + # Minor optimization to avoid making a copy if the user passes + # in a compute.ListAssociationsFirewallPolicyRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.ListAssociationsFirewallPolicyRequest): + request = compute.ListAssociationsFirewallPolicyRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_associations] + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def move_unary(self, + request: Optional[Union[compute.MoveFirewallPolicyRequest, dict]] = None, + *, + firewall_policy: Optional[str] = None, + parent_id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.Operation: + r"""Moves the specified firewall policy. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_move(): + # Create a client + client = compute_v1.FirewallPoliciesClient() + + # Initialize request argument(s) + request = compute_v1.MoveFirewallPolicyRequest( + firewall_policy="firewall_policy_value", + parent_id="parent_id_value", + ) + + # Make the request + response = client.move(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.MoveFirewallPolicyRequest, dict]): + The request object. A request message for + FirewallPolicies.Move. See the method + description for details. + firewall_policy (str): + Name of the firewall policy to + update. + + This corresponds to the ``firewall_policy`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + parent_id (str): + The new parent of the firewall policy. The ID can be + either be "folders/[FOLDER_ID]" if the parent is a + folder or "organizations/[ORGANIZATION_ID]" if the + parent is an organization. + + This corresponds to the ``parent_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([firewall_policy, parent_id]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.MoveFirewallPolicyRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.MoveFirewallPolicyRequest): + request = compute.MoveFirewallPolicyRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if firewall_policy is not None: + request.firewall_policy = firewall_policy + if parent_id is not None: + request.parent_id = parent_id + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.move] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("firewall_policy", request.firewall_policy), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def move(self, + request: Optional[Union[compute.MoveFirewallPolicyRequest, dict]] = None, + *, + firewall_policy: Optional[str] = None, + parent_id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> extended_operation.ExtendedOperation: + r"""Moves the specified firewall policy. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_move(): + # Create a client + client = compute_v1.FirewallPoliciesClient() + + # Initialize request argument(s) + request = compute_v1.MoveFirewallPolicyRequest( + firewall_policy="firewall_policy_value", + parent_id="parent_id_value", + ) + + # Make the request + response = client.move(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.MoveFirewallPolicyRequest, dict]): + The request object. A request message for + FirewallPolicies.Move. See the method + description for details. + firewall_policy (str): + Name of the firewall policy to + update. + + This corresponds to the ``firewall_policy`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + parent_id (str): + The new parent of the firewall policy. The ID can be + either be "folders/[FOLDER_ID]" if the parent is a + folder or "organizations/[ORGANIZATION_ID]" if the + parent is an organization. + + This corresponds to the ``parent_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([firewall_policy, parent_id]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.MoveFirewallPolicyRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.MoveFirewallPolicyRequest): + request = compute.MoveFirewallPolicyRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if firewall_policy is not None: + request.firewall_policy = firewall_policy + if parent_id is not None: + request.parent_id = parent_id + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.move] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("firewall_policy", request.firewall_policy), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + operation_service = self._transport._global_organization_operations_client + operation_request = compute.GetGlobalOrganizationOperationRequest() + operation_request.parent_id = request.parent_id + operation_request.operation = response.name + + get_operation = functools.partial(operation_service.get, operation_request) + # Cancel is not part of extended operations yet. + cancel_operation = lambda: None + + # Note: this class is an implementation detail to provide a uniform + # set of names for certain fields in the extended operation proto message. + # See google.api_core.extended_operation.ExtendedOperation for details + # on these properties and the expected interface. + class _CustomOperation(extended_operation.ExtendedOperation): + @property + def error_message(self): + return self._extended_operation.http_error_message + + @property + def error_code(self): + return self._extended_operation.http_error_status_code + + response = _CustomOperation.make(get_operation, cancel_operation, response) + + # Done; return the response. + return response + + def patch_unary(self, + request: Optional[Union[compute.PatchFirewallPolicyRequest, dict]] = None, + *, + firewall_policy: Optional[str] = None, + firewall_policy_resource: Optional[compute.FirewallPolicy] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.Operation: + r"""Patches the specified policy with the data included + in the request. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_patch(): + # Create a client + client = compute_v1.FirewallPoliciesClient() + + # Initialize request argument(s) + request = compute_v1.PatchFirewallPolicyRequest( + firewall_policy="firewall_policy_value", + ) + + # Make the request + response = client.patch(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.PatchFirewallPolicyRequest, dict]): + The request object. A request message for + FirewallPolicies.Patch. See the method + description for details. + firewall_policy (str): + Name of the firewall policy to + update. + + This corresponds to the ``firewall_policy`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + firewall_policy_resource (google.cloud.compute_v1.types.FirewallPolicy): + The body resource for this request + This corresponds to the ``firewall_policy_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([firewall_policy, firewall_policy_resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.PatchFirewallPolicyRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.PatchFirewallPolicyRequest): + request = compute.PatchFirewallPolicyRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if firewall_policy is not None: + request.firewall_policy = firewall_policy + if firewall_policy_resource is not None: + request.firewall_policy_resource = firewall_policy_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.patch] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("firewall_policy", request.firewall_policy), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def patch(self, + request: Optional[Union[compute.PatchFirewallPolicyRequest, dict]] = None, + *, + firewall_policy: Optional[str] = None, + firewall_policy_resource: Optional[compute.FirewallPolicy] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> extended_operation.ExtendedOperation: + r"""Patches the specified policy with the data included + in the request. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_patch(): + # Create a client + client = compute_v1.FirewallPoliciesClient() + + # Initialize request argument(s) + request = compute_v1.PatchFirewallPolicyRequest( + firewall_policy="firewall_policy_value", + ) + + # Make the request + response = client.patch(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.PatchFirewallPolicyRequest, dict]): + The request object. A request message for + FirewallPolicies.Patch. See the method + description for details. + firewall_policy (str): + Name of the firewall policy to + update. + + This corresponds to the ``firewall_policy`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + firewall_policy_resource (google.cloud.compute_v1.types.FirewallPolicy): + The body resource for this request + This corresponds to the ``firewall_policy_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([firewall_policy, firewall_policy_resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.PatchFirewallPolicyRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.PatchFirewallPolicyRequest): + request = compute.PatchFirewallPolicyRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if firewall_policy is not None: + request.firewall_policy = firewall_policy + if firewall_policy_resource is not None: + request.firewall_policy_resource = firewall_policy_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.patch] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("firewall_policy", request.firewall_policy), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + operation_service = self._transport._global_organization_operations_client + operation_request = compute.GetGlobalOrganizationOperationRequest() + operation_request.operation = response.name + + get_operation = functools.partial(operation_service.get, operation_request) + # Cancel is not part of extended operations yet. + cancel_operation = lambda: None + + # Note: this class is an implementation detail to provide a uniform + # set of names for certain fields in the extended operation proto message. + # See google.api_core.extended_operation.ExtendedOperation for details + # on these properties and the expected interface. + class _CustomOperation(extended_operation.ExtendedOperation): + @property + def error_message(self): + return self._extended_operation.http_error_message + + @property + def error_code(self): + return self._extended_operation.http_error_status_code + + response = _CustomOperation.make(get_operation, cancel_operation, response) + + # Done; return the response. + return response + + def patch_rule_unary(self, + request: Optional[Union[compute.PatchRuleFirewallPolicyRequest, dict]] = None, + *, + firewall_policy: Optional[str] = None, + firewall_policy_rule_resource: Optional[compute.FirewallPolicyRule] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.Operation: + r"""Patches a rule of the specified priority. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_patch_rule(): + # Create a client + client = compute_v1.FirewallPoliciesClient() + + # Initialize request argument(s) + request = compute_v1.PatchRuleFirewallPolicyRequest( + firewall_policy="firewall_policy_value", + ) + + # Make the request + response = client.patch_rule(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.PatchRuleFirewallPolicyRequest, dict]): + The request object. A request message for + FirewallPolicies.PatchRule. See the + method description for details. + firewall_policy (str): + Name of the firewall policy to + update. + + This corresponds to the ``firewall_policy`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + firewall_policy_rule_resource (google.cloud.compute_v1.types.FirewallPolicyRule): + The body resource for this request + This corresponds to the ``firewall_policy_rule_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([firewall_policy, firewall_policy_rule_resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.PatchRuleFirewallPolicyRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.PatchRuleFirewallPolicyRequest): + request = compute.PatchRuleFirewallPolicyRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if firewall_policy is not None: + request.firewall_policy = firewall_policy + if firewall_policy_rule_resource is not None: + request.firewall_policy_rule_resource = firewall_policy_rule_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.patch_rule] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("firewall_policy", request.firewall_policy), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def patch_rule(self, + request: Optional[Union[compute.PatchRuleFirewallPolicyRequest, dict]] = None, + *, + firewall_policy: Optional[str] = None, + firewall_policy_rule_resource: Optional[compute.FirewallPolicyRule] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> extended_operation.ExtendedOperation: + r"""Patches a rule of the specified priority. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_patch_rule(): + # Create a client + client = compute_v1.FirewallPoliciesClient() + + # Initialize request argument(s) + request = compute_v1.PatchRuleFirewallPolicyRequest( + firewall_policy="firewall_policy_value", + ) + + # Make the request + response = client.patch_rule(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.PatchRuleFirewallPolicyRequest, dict]): + The request object. A request message for + FirewallPolicies.PatchRule. See the + method description for details. + firewall_policy (str): + Name of the firewall policy to + update. + + This corresponds to the ``firewall_policy`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + firewall_policy_rule_resource (google.cloud.compute_v1.types.FirewallPolicyRule): + The body resource for this request + This corresponds to the ``firewall_policy_rule_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([firewall_policy, firewall_policy_rule_resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.PatchRuleFirewallPolicyRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.PatchRuleFirewallPolicyRequest): + request = compute.PatchRuleFirewallPolicyRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if firewall_policy is not None: + request.firewall_policy = firewall_policy + if firewall_policy_rule_resource is not None: + request.firewall_policy_rule_resource = firewall_policy_rule_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.patch_rule] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("firewall_policy", request.firewall_policy), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + operation_service = self._transport._global_organization_operations_client + operation_request = compute.GetGlobalOrganizationOperationRequest() + operation_request.operation = response.name + + get_operation = functools.partial(operation_service.get, operation_request) + # Cancel is not part of extended operations yet. + cancel_operation = lambda: None + + # Note: this class is an implementation detail to provide a uniform + # set of names for certain fields in the extended operation proto message. + # See google.api_core.extended_operation.ExtendedOperation for details + # on these properties and the expected interface. + class _CustomOperation(extended_operation.ExtendedOperation): + @property + def error_message(self): + return self._extended_operation.http_error_message + + @property + def error_code(self): + return self._extended_operation.http_error_status_code + + response = _CustomOperation.make(get_operation, cancel_operation, response) + + # Done; return the response. + return response + + def remove_association_unary(self, + request: Optional[Union[compute.RemoveAssociationFirewallPolicyRequest, dict]] = None, + *, + firewall_policy: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.Operation: + r"""Removes an association for the specified firewall + policy. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_remove_association(): + # Create a client + client = compute_v1.FirewallPoliciesClient() + + # Initialize request argument(s) + request = compute_v1.RemoveAssociationFirewallPolicyRequest( + firewall_policy="firewall_policy_value", + ) + + # Make the request + response = client.remove_association(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.RemoveAssociationFirewallPolicyRequest, dict]): + The request object. A request message for + FirewallPolicies.RemoveAssociation. See + the method description for details. + firewall_policy (str): + Name of the firewall policy to + update. + + This corresponds to the ``firewall_policy`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([firewall_policy]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.RemoveAssociationFirewallPolicyRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.RemoveAssociationFirewallPolicyRequest): + request = compute.RemoveAssociationFirewallPolicyRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if firewall_policy is not None: + request.firewall_policy = firewall_policy + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.remove_association] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("firewall_policy", request.firewall_policy), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def remove_association(self, + request: Optional[Union[compute.RemoveAssociationFirewallPolicyRequest, dict]] = None, + *, + firewall_policy: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> extended_operation.ExtendedOperation: + r"""Removes an association for the specified firewall + policy. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_remove_association(): + # Create a client + client = compute_v1.FirewallPoliciesClient() + + # Initialize request argument(s) + request = compute_v1.RemoveAssociationFirewallPolicyRequest( + firewall_policy="firewall_policy_value", + ) + + # Make the request + response = client.remove_association(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.RemoveAssociationFirewallPolicyRequest, dict]): + The request object. A request message for + FirewallPolicies.RemoveAssociation. See + the method description for details. + firewall_policy (str): + Name of the firewall policy to + update. + + This corresponds to the ``firewall_policy`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([firewall_policy]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.RemoveAssociationFirewallPolicyRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.RemoveAssociationFirewallPolicyRequest): + request = compute.RemoveAssociationFirewallPolicyRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if firewall_policy is not None: + request.firewall_policy = firewall_policy + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.remove_association] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("firewall_policy", request.firewall_policy), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + operation_service = self._transport._global_organization_operations_client + operation_request = compute.GetGlobalOrganizationOperationRequest() + operation_request.operation = response.name + + get_operation = functools.partial(operation_service.get, operation_request) + # Cancel is not part of extended operations yet. + cancel_operation = lambda: None + + # Note: this class is an implementation detail to provide a uniform + # set of names for certain fields in the extended operation proto message. + # See google.api_core.extended_operation.ExtendedOperation for details + # on these properties and the expected interface. + class _CustomOperation(extended_operation.ExtendedOperation): + @property + def error_message(self): + return self._extended_operation.http_error_message + + @property + def error_code(self): + return self._extended_operation.http_error_status_code + + response = _CustomOperation.make(get_operation, cancel_operation, response) + + # Done; return the response. + return response + + def remove_rule_unary(self, + request: Optional[Union[compute.RemoveRuleFirewallPolicyRequest, dict]] = None, + *, + firewall_policy: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.Operation: + r"""Deletes a rule of the specified priority. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_remove_rule(): + # Create a client + client = compute_v1.FirewallPoliciesClient() + + # Initialize request argument(s) + request = compute_v1.RemoveRuleFirewallPolicyRequest( + firewall_policy="firewall_policy_value", + ) + + # Make the request + response = client.remove_rule(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.RemoveRuleFirewallPolicyRequest, dict]): + The request object. A request message for + FirewallPolicies.RemoveRule. See the + method description for details. + firewall_policy (str): + Name of the firewall policy to + update. + + This corresponds to the ``firewall_policy`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([firewall_policy]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.RemoveRuleFirewallPolicyRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.RemoveRuleFirewallPolicyRequest): + request = compute.RemoveRuleFirewallPolicyRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if firewall_policy is not None: + request.firewall_policy = firewall_policy + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.remove_rule] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("firewall_policy", request.firewall_policy), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def remove_rule(self, + request: Optional[Union[compute.RemoveRuleFirewallPolicyRequest, dict]] = None, + *, + firewall_policy: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> extended_operation.ExtendedOperation: + r"""Deletes a rule of the specified priority. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_remove_rule(): + # Create a client + client = compute_v1.FirewallPoliciesClient() + + # Initialize request argument(s) + request = compute_v1.RemoveRuleFirewallPolicyRequest( + firewall_policy="firewall_policy_value", + ) + + # Make the request + response = client.remove_rule(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.RemoveRuleFirewallPolicyRequest, dict]): + The request object. A request message for + FirewallPolicies.RemoveRule. See the + method description for details. + firewall_policy (str): + Name of the firewall policy to + update. + + This corresponds to the ``firewall_policy`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([firewall_policy]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.RemoveRuleFirewallPolicyRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.RemoveRuleFirewallPolicyRequest): + request = compute.RemoveRuleFirewallPolicyRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if firewall_policy is not None: + request.firewall_policy = firewall_policy + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.remove_rule] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("firewall_policy", request.firewall_policy), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + operation_service = self._transport._global_organization_operations_client + operation_request = compute.GetGlobalOrganizationOperationRequest() + operation_request.operation = response.name + + get_operation = functools.partial(operation_service.get, operation_request) + # Cancel is not part of extended operations yet. + cancel_operation = lambda: None + + # Note: this class is an implementation detail to provide a uniform + # set of names for certain fields in the extended operation proto message. + # See google.api_core.extended_operation.ExtendedOperation for details + # on these properties and the expected interface. + class _CustomOperation(extended_operation.ExtendedOperation): + @property + def error_message(self): + return self._extended_operation.http_error_message + + @property + def error_code(self): + return self._extended_operation.http_error_status_code + + response = _CustomOperation.make(get_operation, cancel_operation, response) + + # Done; return the response. + return response + + def set_iam_policy(self, + request: Optional[Union[compute.SetIamPolicyFirewallPolicyRequest, dict]] = None, + *, + resource: Optional[str] = None, + global_organization_set_policy_request_resource: Optional[compute.GlobalOrganizationSetPolicyRequest] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.Policy: + r"""Sets the access control policy on the specified + resource. Replaces any existing policy. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_set_iam_policy(): + # Create a client + client = compute_v1.FirewallPoliciesClient() + + # Initialize request argument(s) + request = compute_v1.SetIamPolicyFirewallPolicyRequest( + resource="resource_value", + ) + + # Make the request + response = client.set_iam_policy(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.SetIamPolicyFirewallPolicyRequest, dict]): + The request object. A request message for + FirewallPolicies.SetIamPolicy. See the + method description for details. + resource (str): + Name or id of the resource for this + request. + + This corresponds to the ``resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + global_organization_set_policy_request_resource (google.cloud.compute_v1.types.GlobalOrganizationSetPolicyRequest): + The body resource for this request + This corresponds to the ``global_organization_set_policy_request_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.compute_v1.types.Policy: + An Identity and Access Management (IAM) policy, which + specifies access controls for Google Cloud resources. A + Policy is a collection of bindings. A binding binds one + or more members, or principals, to a single role. + Principals can be user accounts, service accounts, + Google groups, and domains (such as G Suite). A role is + a named list of permissions; each role can be an IAM + predefined role or a user-created custom role. For some + types of Google Cloud resources, a binding can also + specify a condition, which is a logical expression that + allows access to a resource only if the expression + evaluates to true. A condition can add constraints based + on attributes of the request, the resource, or both. To + learn which resources support conditions in their IAM + policies, see the [IAM + documentation](\ https://cloud.google.com/iam/help/conditions/resource-policies). + **JSON example:** { "bindings": [ { "role": + "roles/resourcemanager.organizationAdmin", "members": [ + "user:mike@example.com", "group:admins@example.com", + "domain:google.com", + "serviceAccount:my-project-id@appspot.gserviceaccount.com" + ] }, { "role": + "roles/resourcemanager.organizationViewer", "members": [ + "user:eve@example.com" ], "condition": { "title": + "expirable access", "description": "Does not grant + access after Sep 2020", "expression": "request.time < + timestamp('2020-10-01T00:00:00.000Z')", } } ], "etag": + "BwWWja0YfJA=", "version": 3 } **YAML example:** + bindings: - members: - user:\ mike@example.com - + group:\ admins@example.com - domain:google.com - + serviceAccount:\ my-project-id@appspot.gserviceaccount.com + role: roles/resourcemanager.organizationAdmin - members: + - user:\ eve@example.com role: + roles/resourcemanager.organizationViewer condition: + title: expirable access description: Does not grant + access after Sep 2020 expression: request.time < + timestamp('2020-10-01T00:00:00.000Z') etag: BwWWja0YfJA= + version: 3 For a description of IAM and its features, + see the [IAM + documentation](\ https://cloud.google.com/iam/docs/). + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([resource, global_organization_set_policy_request_resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.SetIamPolicyFirewallPolicyRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.SetIamPolicyFirewallPolicyRequest): + request = compute.SetIamPolicyFirewallPolicyRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if resource is not None: + request.resource = resource + if global_organization_set_policy_request_resource is not None: + request.global_organization_set_policy_request_resource = global_organization_set_policy_request_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.set_iam_policy] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("resource", request.resource), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def test_iam_permissions(self, + request: Optional[Union[compute.TestIamPermissionsFirewallPolicyRequest, dict]] = None, + *, + resource: Optional[str] = None, + test_permissions_request_resource: Optional[compute.TestPermissionsRequest] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.TestPermissionsResponse: + r"""Returns permissions that a caller has on the + specified resource. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_test_iam_permissions(): + # Create a client + client = compute_v1.FirewallPoliciesClient() + + # Initialize request argument(s) + request = compute_v1.TestIamPermissionsFirewallPolicyRequest( + resource="resource_value", + ) + + # Make the request + response = client.test_iam_permissions(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.TestIamPermissionsFirewallPolicyRequest, dict]): + The request object. A request message for + FirewallPolicies.TestIamPermissions. See + the method description for details. + resource (str): + Name or id of the resource for this + request. + + This corresponds to the ``resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + test_permissions_request_resource (google.cloud.compute_v1.types.TestPermissionsRequest): + The body resource for this request + This corresponds to the ``test_permissions_request_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.compute_v1.types.TestPermissionsResponse: + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([resource, test_permissions_request_resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.TestIamPermissionsFirewallPolicyRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.TestIamPermissionsFirewallPolicyRequest): + request = compute.TestIamPermissionsFirewallPolicyRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if resource is not None: + request.resource = resource + if test_permissions_request_resource is not None: + request.test_permissions_request_resource = test_permissions_request_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.test_iam_permissions] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("resource", request.resource), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def __enter__(self) -> "FirewallPoliciesClient": + return self + + def __exit__(self, type, value, traceback): + """Releases underlying transport's resources. + + .. warning:: + ONLY use as a context manager if the transport is NOT shared + with other clients! Exiting the with block will CLOSE the transport + and may cause errors in other clients! + """ + self.transport.close() + + + + + + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) + + +__all__ = ( + "FirewallPoliciesClient", +) diff --git a/owl-bot-staging/v1/google/cloud/compute_v1/services/firewall_policies/pagers.py b/owl-bot-staging/v1/google/cloud/compute_v1/services/firewall_policies/pagers.py new file mode 100644 index 000000000..8309fb38f --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/compute_v1/services/firewall_policies/pagers.py @@ -0,0 +1,77 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import Any, AsyncIterator, Awaitable, Callable, Sequence, Tuple, Optional, Iterator + +from google.cloud.compute_v1.types import compute + + +class ListPager: + """A pager for iterating through ``list`` requests. + + This class thinly wraps an initial + :class:`google.cloud.compute_v1.types.FirewallPolicyList` object, and + provides an ``__iter__`` method to iterate through its + ``items`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``List`` requests and continue to iterate + through the ``items`` field on the + corresponding responses. + + All the usual :class:`google.cloud.compute_v1.types.FirewallPolicyList` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., compute.FirewallPolicyList], + request: compute.ListFirewallPoliciesRequest, + response: compute.FirewallPolicyList, + *, + metadata: Sequence[Tuple[str, str]] = ()): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.compute_v1.types.ListFirewallPoliciesRequest): + The initial request object. + response (google.cloud.compute_v1.types.FirewallPolicyList): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = compute.ListFirewallPoliciesRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[compute.FirewallPolicyList]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[compute.FirewallPolicy]: + for page in self.pages: + yield from page.items + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) diff --git a/owl-bot-staging/v1/google/cloud/compute_v1/services/firewall_policies/transports/__init__.py b/owl-bot-staging/v1/google/cloud/compute_v1/services/firewall_policies/transports/__init__.py new file mode 100644 index 000000000..f5f34c233 --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/compute_v1/services/firewall_policies/transports/__init__.py @@ -0,0 +1,32 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +from typing import Dict, Type + +from .base import FirewallPoliciesTransport +from .rest import FirewallPoliciesRestTransport +from .rest import FirewallPoliciesRestInterceptor + + +# Compile a registry of transports. +_transport_registry = OrderedDict() # type: Dict[str, Type[FirewallPoliciesTransport]] +_transport_registry['rest'] = FirewallPoliciesRestTransport + +__all__ = ( + 'FirewallPoliciesTransport', + 'FirewallPoliciesRestTransport', + 'FirewallPoliciesRestInterceptor', +) diff --git a/owl-bot-staging/v1/google/cloud/compute_v1/services/firewall_policies/transports/base.py b/owl-bot-staging/v1/google/cloud/compute_v1/services/firewall_policies/transports/base.py new file mode 100644 index 000000000..e3a96834f --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/compute_v1/services/firewall_policies/transports/base.py @@ -0,0 +1,401 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import abc +from typing import Awaitable, Callable, Dict, Optional, Sequence, Union + +from google.cloud.compute_v1 import gapic_version as package_version + +import google.auth # type: ignore +import google.api_core +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.compute_v1.types import compute +from google.cloud.compute_v1.services import global_organization_operations + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) + + +class FirewallPoliciesTransport(abc.ABC): + """Abstract transport class for FirewallPolicies.""" + + AUTH_SCOPES = ( + 'https://www.googleapis.com/auth/compute', + 'https://www.googleapis.com/auth/cloud-platform', + ) + + DEFAULT_HOST: str = 'compute.googleapis.com' + def __init__( + self, *, + host: str = DEFAULT_HOST, + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + **kwargs, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A list of scopes. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + """ + self._extended_operations_services: Dict[str, Any] = {} + + scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} + + # Save the scopes. + self._scopes = scopes + + # If no credentials are provided, then determine the appropriate + # defaults. + if credentials and credentials_file: + raise core_exceptions.DuplicateCredentialArgs("'credentials_file' and 'credentials' are mutually exclusive") + + if credentials_file is not None: + credentials, _ = google.auth.load_credentials_from_file( + credentials_file, + **scopes_kwargs, + quota_project_id=quota_project_id + ) + elif credentials is None: + credentials, _ = google.auth.default(**scopes_kwargs, quota_project_id=quota_project_id) + # Don't apply audience if the credentials file passed from user. + if hasattr(credentials, "with_gdch_audience"): + credentials = credentials.with_gdch_audience(api_audience if api_audience else host) + + # If the credentials are service account credentials, then always try to use self signed JWT. + if always_use_jwt_access and isinstance(credentials, service_account.Credentials) and hasattr(service_account.Credentials, "with_always_use_jwt_access"): + credentials = credentials.with_always_use_jwt_access(True) + + # Save the credentials. + self._credentials = credentials + + # Save the hostname. Default to port 443 (HTTPS) if none is specified. + if ':' not in host: + host += ':443' + self._host = host + + def _prep_wrapped_messages(self, client_info): + # Precompute the wrapped methods. + self._wrapped_methods = { + self.add_association: gapic_v1.method.wrap_method( + self.add_association, + default_timeout=None, + client_info=client_info, + ), + self.add_rule: gapic_v1.method.wrap_method( + self.add_rule, + default_timeout=None, + client_info=client_info, + ), + self.clone_rules: gapic_v1.method.wrap_method( + self.clone_rules, + default_timeout=None, + client_info=client_info, + ), + self.delete: gapic_v1.method.wrap_method( + self.delete, + default_timeout=None, + client_info=client_info, + ), + self.get: gapic_v1.method.wrap_method( + self.get, + default_timeout=None, + client_info=client_info, + ), + self.get_association: gapic_v1.method.wrap_method( + self.get_association, + default_timeout=None, + client_info=client_info, + ), + self.get_iam_policy: gapic_v1.method.wrap_method( + self.get_iam_policy, + default_timeout=None, + client_info=client_info, + ), + self.get_rule: gapic_v1.method.wrap_method( + self.get_rule, + default_timeout=None, + client_info=client_info, + ), + self.insert: gapic_v1.method.wrap_method( + self.insert, + default_timeout=None, + client_info=client_info, + ), + self.list: gapic_v1.method.wrap_method( + self.list, + default_timeout=None, + client_info=client_info, + ), + self.list_associations: gapic_v1.method.wrap_method( + self.list_associations, + default_timeout=None, + client_info=client_info, + ), + self.move: gapic_v1.method.wrap_method( + self.move, + default_timeout=None, + client_info=client_info, + ), + self.patch: gapic_v1.method.wrap_method( + self.patch, + default_timeout=None, + client_info=client_info, + ), + self.patch_rule: gapic_v1.method.wrap_method( + self.patch_rule, + default_timeout=None, + client_info=client_info, + ), + self.remove_association: gapic_v1.method.wrap_method( + self.remove_association, + default_timeout=None, + client_info=client_info, + ), + self.remove_rule: gapic_v1.method.wrap_method( + self.remove_rule, + default_timeout=None, + client_info=client_info, + ), + self.set_iam_policy: gapic_v1.method.wrap_method( + self.set_iam_policy, + default_timeout=None, + client_info=client_info, + ), + self.test_iam_permissions: gapic_v1.method.wrap_method( + self.test_iam_permissions, + default_timeout=None, + client_info=client_info, + ), + } + + def close(self): + """Closes resources associated with the transport. + + .. warning:: + Only call this method if the transport is NOT shared + with other clients - this may cause errors in other clients! + """ + raise NotImplementedError() + + @property + def add_association(self) -> Callable[ + [compute.AddAssociationFirewallPolicyRequest], + Union[ + compute.Operation, + Awaitable[compute.Operation] + ]]: + raise NotImplementedError() + + @property + def add_rule(self) -> Callable[ + [compute.AddRuleFirewallPolicyRequest], + Union[ + compute.Operation, + Awaitable[compute.Operation] + ]]: + raise NotImplementedError() + + @property + def clone_rules(self) -> Callable[ + [compute.CloneRulesFirewallPolicyRequest], + Union[ + compute.Operation, + Awaitable[compute.Operation] + ]]: + raise NotImplementedError() + + @property + def delete(self) -> Callable[ + [compute.DeleteFirewallPolicyRequest], + Union[ + compute.Operation, + Awaitable[compute.Operation] + ]]: + raise NotImplementedError() + + @property + def get(self) -> Callable[ + [compute.GetFirewallPolicyRequest], + Union[ + compute.FirewallPolicy, + Awaitable[compute.FirewallPolicy] + ]]: + raise NotImplementedError() + + @property + def get_association(self) -> Callable[ + [compute.GetAssociationFirewallPolicyRequest], + Union[ + compute.FirewallPolicyAssociation, + Awaitable[compute.FirewallPolicyAssociation] + ]]: + raise NotImplementedError() + + @property + def get_iam_policy(self) -> Callable[ + [compute.GetIamPolicyFirewallPolicyRequest], + Union[ + compute.Policy, + Awaitable[compute.Policy] + ]]: + raise NotImplementedError() + + @property + def get_rule(self) -> Callable[ + [compute.GetRuleFirewallPolicyRequest], + Union[ + compute.FirewallPolicyRule, + Awaitable[compute.FirewallPolicyRule] + ]]: + raise NotImplementedError() + + @property + def insert(self) -> Callable[ + [compute.InsertFirewallPolicyRequest], + Union[ + compute.Operation, + Awaitable[compute.Operation] + ]]: + raise NotImplementedError() + + @property + def list(self) -> Callable[ + [compute.ListFirewallPoliciesRequest], + Union[ + compute.FirewallPolicyList, + Awaitable[compute.FirewallPolicyList] + ]]: + raise NotImplementedError() + + @property + def list_associations(self) -> Callable[ + [compute.ListAssociationsFirewallPolicyRequest], + Union[ + compute.FirewallPoliciesListAssociationsResponse, + Awaitable[compute.FirewallPoliciesListAssociationsResponse] + ]]: + raise NotImplementedError() + + @property + def move(self) -> Callable[ + [compute.MoveFirewallPolicyRequest], + Union[ + compute.Operation, + Awaitable[compute.Operation] + ]]: + raise NotImplementedError() + + @property + def patch(self) -> Callable[ + [compute.PatchFirewallPolicyRequest], + Union[ + compute.Operation, + Awaitable[compute.Operation] + ]]: + raise NotImplementedError() + + @property + def patch_rule(self) -> Callable[ + [compute.PatchRuleFirewallPolicyRequest], + Union[ + compute.Operation, + Awaitable[compute.Operation] + ]]: + raise NotImplementedError() + + @property + def remove_association(self) -> Callable[ + [compute.RemoveAssociationFirewallPolicyRequest], + Union[ + compute.Operation, + Awaitable[compute.Operation] + ]]: + raise NotImplementedError() + + @property + def remove_rule(self) -> Callable[ + [compute.RemoveRuleFirewallPolicyRequest], + Union[ + compute.Operation, + Awaitable[compute.Operation] + ]]: + raise NotImplementedError() + + @property + def set_iam_policy(self) -> Callable[ + [compute.SetIamPolicyFirewallPolicyRequest], + Union[ + compute.Policy, + Awaitable[compute.Policy] + ]]: + raise NotImplementedError() + + @property + def test_iam_permissions(self) -> Callable[ + [compute.TestIamPermissionsFirewallPolicyRequest], + Union[ + compute.TestPermissionsResponse, + Awaitable[compute.TestPermissionsResponse] + ]]: + raise NotImplementedError() + + @property + def kind(self) -> str: + raise NotImplementedError() + + @property + def _global_organization_operations_client(self) -> global_organization_operations.GlobalOrganizationOperationsClient: + ex_op_service = self._extended_operations_services.get("global_organization_operations") + if not ex_op_service: + ex_op_service = global_organization_operations.GlobalOrganizationOperationsClient( + credentials=self._credentials, + transport=self.kind, + ) + self._extended_operations_services["global_organization_operations"] = ex_op_service + + return ex_op_service + + +__all__ = ( + 'FirewallPoliciesTransport', +) diff --git a/owl-bot-staging/v1/google/cloud/compute_v1/services/firewall_policies/transports/rest.py b/owl-bot-staging/v1/google/cloud/compute_v1/services/firewall_policies/transports/rest.py new file mode 100644 index 000000000..06e33ce6a --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/compute_v1/services/firewall_policies/transports/rest.py @@ -0,0 +1,2450 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +from google.auth.transport.requests import AuthorizedSession # type: ignore +import json # type: ignore +import grpc # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.api_core import exceptions as core_exceptions +from google.api_core import retry as retries +from google.api_core import rest_helpers +from google.api_core import rest_streaming +from google.api_core import path_template +from google.api_core import gapic_v1 + +from google.protobuf import json_format +from requests import __version__ as requests_version +import dataclasses +import re +from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union +import warnings + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + + +from google.cloud.compute_v1.types import compute + +from .base import FirewallPoliciesTransport, DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, + grpc_version=None, + rest_version=requests_version, +) + + +class FirewallPoliciesRestInterceptor: + """Interceptor for FirewallPolicies. + + Interceptors are used to manipulate requests, request metadata, and responses + in arbitrary ways. + Example use cases include: + * Logging + * Verifying requests according to service or custom semantics + * Stripping extraneous information from responses + + These use cases and more can be enabled by injecting an + instance of a custom subclass when constructing the FirewallPoliciesRestTransport. + + .. code-block:: python + class MyCustomFirewallPoliciesInterceptor(FirewallPoliciesRestInterceptor): + def pre_add_association(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_add_association(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_add_rule(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_add_rule(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_clone_rules(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_clone_rules(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_delete(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_delete(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_get(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_get_association(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_association(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_get_iam_policy(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_iam_policy(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_get_rule(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_rule(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_insert(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_insert(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list_associations(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_associations(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_move(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_move(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_patch(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_patch(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_patch_rule(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_patch_rule(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_remove_association(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_remove_association(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_remove_rule(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_remove_rule(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_set_iam_policy(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_set_iam_policy(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_test_iam_permissions(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_test_iam_permissions(self, response): + logging.log(f"Received response: {response}") + return response + + transport = FirewallPoliciesRestTransport(interceptor=MyCustomFirewallPoliciesInterceptor()) + client = FirewallPoliciesClient(transport=transport) + + + """ + def pre_add_association(self, request: compute.AddAssociationFirewallPolicyRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.AddAssociationFirewallPolicyRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for add_association + + Override in a subclass to manipulate the request or metadata + before they are sent to the FirewallPolicies server. + """ + return request, metadata + + def post_add_association(self, response: compute.Operation) -> compute.Operation: + """Post-rpc interceptor for add_association + + Override in a subclass to manipulate the response + after it is returned by the FirewallPolicies server but before + it is returned to user code. + """ + return response + def pre_add_rule(self, request: compute.AddRuleFirewallPolicyRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.AddRuleFirewallPolicyRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for add_rule + + Override in a subclass to manipulate the request or metadata + before they are sent to the FirewallPolicies server. + """ + return request, metadata + + def post_add_rule(self, response: compute.Operation) -> compute.Operation: + """Post-rpc interceptor for add_rule + + Override in a subclass to manipulate the response + after it is returned by the FirewallPolicies server but before + it is returned to user code. + """ + return response + def pre_clone_rules(self, request: compute.CloneRulesFirewallPolicyRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.CloneRulesFirewallPolicyRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for clone_rules + + Override in a subclass to manipulate the request or metadata + before they are sent to the FirewallPolicies server. + """ + return request, metadata + + def post_clone_rules(self, response: compute.Operation) -> compute.Operation: + """Post-rpc interceptor for clone_rules + + Override in a subclass to manipulate the response + after it is returned by the FirewallPolicies server but before + it is returned to user code. + """ + return response + def pre_delete(self, request: compute.DeleteFirewallPolicyRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.DeleteFirewallPolicyRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for delete + + Override in a subclass to manipulate the request or metadata + before they are sent to the FirewallPolicies server. + """ + return request, metadata + + def post_delete(self, response: compute.Operation) -> compute.Operation: + """Post-rpc interceptor for delete + + Override in a subclass to manipulate the response + after it is returned by the FirewallPolicies server but before + it is returned to user code. + """ + return response + def pre_get(self, request: compute.GetFirewallPolicyRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.GetFirewallPolicyRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get + + Override in a subclass to manipulate the request or metadata + before they are sent to the FirewallPolicies server. + """ + return request, metadata + + def post_get(self, response: compute.FirewallPolicy) -> compute.FirewallPolicy: + """Post-rpc interceptor for get + + Override in a subclass to manipulate the response + after it is returned by the FirewallPolicies server but before + it is returned to user code. + """ + return response + def pre_get_association(self, request: compute.GetAssociationFirewallPolicyRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.GetAssociationFirewallPolicyRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_association + + Override in a subclass to manipulate the request or metadata + before they are sent to the FirewallPolicies server. + """ + return request, metadata + + def post_get_association(self, response: compute.FirewallPolicyAssociation) -> compute.FirewallPolicyAssociation: + """Post-rpc interceptor for get_association + + Override in a subclass to manipulate the response + after it is returned by the FirewallPolicies server but before + it is returned to user code. + """ + return response + def pre_get_iam_policy(self, request: compute.GetIamPolicyFirewallPolicyRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.GetIamPolicyFirewallPolicyRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_iam_policy + + Override in a subclass to manipulate the request or metadata + before they are sent to the FirewallPolicies server. + """ + return request, metadata + + def post_get_iam_policy(self, response: compute.Policy) -> compute.Policy: + """Post-rpc interceptor for get_iam_policy + + Override in a subclass to manipulate the response + after it is returned by the FirewallPolicies server but before + it is returned to user code. + """ + return response + def pre_get_rule(self, request: compute.GetRuleFirewallPolicyRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.GetRuleFirewallPolicyRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_rule + + Override in a subclass to manipulate the request or metadata + before they are sent to the FirewallPolicies server. + """ + return request, metadata + + def post_get_rule(self, response: compute.FirewallPolicyRule) -> compute.FirewallPolicyRule: + """Post-rpc interceptor for get_rule + + Override in a subclass to manipulate the response + after it is returned by the FirewallPolicies server but before + it is returned to user code. + """ + return response + def pre_insert(self, request: compute.InsertFirewallPolicyRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.InsertFirewallPolicyRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for insert + + Override in a subclass to manipulate the request or metadata + before they are sent to the FirewallPolicies server. + """ + return request, metadata + + def post_insert(self, response: compute.Operation) -> compute.Operation: + """Post-rpc interceptor for insert + + Override in a subclass to manipulate the response + after it is returned by the FirewallPolicies server but before + it is returned to user code. + """ + return response + def pre_list(self, request: compute.ListFirewallPoliciesRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.ListFirewallPoliciesRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list + + Override in a subclass to manipulate the request or metadata + before they are sent to the FirewallPolicies server. + """ + return request, metadata + + def post_list(self, response: compute.FirewallPolicyList) -> compute.FirewallPolicyList: + """Post-rpc interceptor for list + + Override in a subclass to manipulate the response + after it is returned by the FirewallPolicies server but before + it is returned to user code. + """ + return response + def pre_list_associations(self, request: compute.ListAssociationsFirewallPolicyRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.ListAssociationsFirewallPolicyRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list_associations + + Override in a subclass to manipulate the request or metadata + before they are sent to the FirewallPolicies server. + """ + return request, metadata + + def post_list_associations(self, response: compute.FirewallPoliciesListAssociationsResponse) -> compute.FirewallPoliciesListAssociationsResponse: + """Post-rpc interceptor for list_associations + + Override in a subclass to manipulate the response + after it is returned by the FirewallPolicies server but before + it is returned to user code. + """ + return response + def pre_move(self, request: compute.MoveFirewallPolicyRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.MoveFirewallPolicyRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for move + + Override in a subclass to manipulate the request or metadata + before they are sent to the FirewallPolicies server. + """ + return request, metadata + + def post_move(self, response: compute.Operation) -> compute.Operation: + """Post-rpc interceptor for move + + Override in a subclass to manipulate the response + after it is returned by the FirewallPolicies server but before + it is returned to user code. + """ + return response + def pre_patch(self, request: compute.PatchFirewallPolicyRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.PatchFirewallPolicyRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for patch + + Override in a subclass to manipulate the request or metadata + before they are sent to the FirewallPolicies server. + """ + return request, metadata + + def post_patch(self, response: compute.Operation) -> compute.Operation: + """Post-rpc interceptor for patch + + Override in a subclass to manipulate the response + after it is returned by the FirewallPolicies server but before + it is returned to user code. + """ + return response + def pre_patch_rule(self, request: compute.PatchRuleFirewallPolicyRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.PatchRuleFirewallPolicyRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for patch_rule + + Override in a subclass to manipulate the request or metadata + before they are sent to the FirewallPolicies server. + """ + return request, metadata + + def post_patch_rule(self, response: compute.Operation) -> compute.Operation: + """Post-rpc interceptor for patch_rule + + Override in a subclass to manipulate the response + after it is returned by the FirewallPolicies server but before + it is returned to user code. + """ + return response + def pre_remove_association(self, request: compute.RemoveAssociationFirewallPolicyRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.RemoveAssociationFirewallPolicyRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for remove_association + + Override in a subclass to manipulate the request or metadata + before they are sent to the FirewallPolicies server. + """ + return request, metadata + + def post_remove_association(self, response: compute.Operation) -> compute.Operation: + """Post-rpc interceptor for remove_association + + Override in a subclass to manipulate the response + after it is returned by the FirewallPolicies server but before + it is returned to user code. + """ + return response + def pre_remove_rule(self, request: compute.RemoveRuleFirewallPolicyRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.RemoveRuleFirewallPolicyRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for remove_rule + + Override in a subclass to manipulate the request or metadata + before they are sent to the FirewallPolicies server. + """ + return request, metadata + + def post_remove_rule(self, response: compute.Operation) -> compute.Operation: + """Post-rpc interceptor for remove_rule + + Override in a subclass to manipulate the response + after it is returned by the FirewallPolicies server but before + it is returned to user code. + """ + return response + def pre_set_iam_policy(self, request: compute.SetIamPolicyFirewallPolicyRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.SetIamPolicyFirewallPolicyRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for set_iam_policy + + Override in a subclass to manipulate the request or metadata + before they are sent to the FirewallPolicies server. + """ + return request, metadata + + def post_set_iam_policy(self, response: compute.Policy) -> compute.Policy: + """Post-rpc interceptor for set_iam_policy + + Override in a subclass to manipulate the response + after it is returned by the FirewallPolicies server but before + it is returned to user code. + """ + return response + def pre_test_iam_permissions(self, request: compute.TestIamPermissionsFirewallPolicyRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.TestIamPermissionsFirewallPolicyRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for test_iam_permissions + + Override in a subclass to manipulate the request or metadata + before they are sent to the FirewallPolicies server. + """ + return request, metadata + + def post_test_iam_permissions(self, response: compute.TestPermissionsResponse) -> compute.TestPermissionsResponse: + """Post-rpc interceptor for test_iam_permissions + + Override in a subclass to manipulate the response + after it is returned by the FirewallPolicies server but before + it is returned to user code. + """ + return response + + +@dataclasses.dataclass +class FirewallPoliciesRestStub: + _session: AuthorizedSession + _host: str + _interceptor: FirewallPoliciesRestInterceptor + + +class FirewallPoliciesRestTransport(FirewallPoliciesTransport): + """REST backend transport for FirewallPolicies. + + The FirewallPolicies API. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends JSON representations of protocol buffers over HTTP/1.1 + + NOTE: This REST transport functionality is currently in a beta + state (preview). We welcome your feedback via an issue in this + library's source repository. Thank you! + """ + + def __init__(self, *, + host: str = 'compute.googleapis.com', + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + client_cert_source_for_mtls: Optional[Callable[[ + ], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + url_scheme: str = 'https', + interceptor: Optional[FirewallPoliciesRestInterceptor] = None, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + NOTE: This REST transport functionality is currently in a beta + state (preview). We welcome your feedback via a GitHub issue in + this library's repository. Thank you! + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + client_cert_source_for_mtls (Callable[[], Tuple[bytes, bytes]]): Client + certificate to configure mutual TLS HTTP channel. It is ignored + if ``channel`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you are developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + url_scheme: the protocol scheme for the API endpoint. Normally + "https", but for testing or local servers, + "http" can be specified. + """ + # Run the base constructor + # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. + # TODO: When custom host (api_endpoint) is set, `scopes` must *also* be set on the + # credentials object + maybe_url_match = re.match("^(?Phttp(?:s)?://)?(?P.*)$", host) + if maybe_url_match is None: + raise ValueError(f"Unexpected hostname structure: {host}") # pragma: NO COVER + + url_match_items = maybe_url_match.groupdict() + + host = f"{url_scheme}://{host}" if not url_match_items["scheme"] else host + + super().__init__( + host=host, + credentials=credentials, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience + ) + self._session = AuthorizedSession( + self._credentials, default_host=self.DEFAULT_HOST) + if client_cert_source_for_mtls: + self._session.configure_mtls_channel(client_cert_source_for_mtls) + self._interceptor = interceptor or FirewallPoliciesRestInterceptor() + self._prep_wrapped_messages(client_info) + + class _AddAssociation(FirewallPoliciesRestStub): + def __hash__(self): + return hash("AddAssociation") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.AddAssociationFirewallPolicyRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.Operation: + r"""Call the add association method over HTTP. + + Args: + request (~.compute.AddAssociationFirewallPolicyRequest): + The request object. A request message for + FirewallPolicies.AddAssociation. See the + method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.Operation: + Represents an Operation resource. Google Compute Engine + has three Operation resources: \* + `Global `__ + \* + `Regional `__ + \* + `Zonal `__ + You can use an operation resource to manage asynchronous + API requests. For more information, read Handling API + responses. Operations can be global, regional or zonal. + - For global operations, use the ``globalOperations`` + resource. - For regional operations, use the + ``regionOperations`` resource. - For zonal operations, + use the ``zonalOperations`` resource. For more + information, read Global, Regional, and Zonal Resources. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/compute/v1/locations/global/firewallPolicies/{firewall_policy}/addAssociation', + 'body': 'firewall_policy_association_resource', + }, + ] + request, metadata = self._interceptor.pre_add_association(request, metadata) + pb_request = compute.AddAssociationFirewallPolicyRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + including_default_value_fields=False, + use_integers_for_enums=False + ) + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.Operation() + pb_resp = compute.Operation.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_add_association(resp) + return resp + + class _AddRule(FirewallPoliciesRestStub): + def __hash__(self): + return hash("AddRule") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.AddRuleFirewallPolicyRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.Operation: + r"""Call the add rule method over HTTP. + + Args: + request (~.compute.AddRuleFirewallPolicyRequest): + The request object. A request message for + FirewallPolicies.AddRule. See the method + description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.Operation: + Represents an Operation resource. Google Compute Engine + has three Operation resources: \* + `Global `__ + \* + `Regional `__ + \* + `Zonal `__ + You can use an operation resource to manage asynchronous + API requests. For more information, read Handling API + responses. Operations can be global, regional or zonal. + - For global operations, use the ``globalOperations`` + resource. - For regional operations, use the + ``regionOperations`` resource. - For zonal operations, + use the ``zonalOperations`` resource. For more + information, read Global, Regional, and Zonal Resources. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/compute/v1/locations/global/firewallPolicies/{firewall_policy}/addRule', + 'body': 'firewall_policy_rule_resource', + }, + ] + request, metadata = self._interceptor.pre_add_rule(request, metadata) + pb_request = compute.AddRuleFirewallPolicyRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + including_default_value_fields=False, + use_integers_for_enums=False + ) + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.Operation() + pb_resp = compute.Operation.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_add_rule(resp) + return resp + + class _CloneRules(FirewallPoliciesRestStub): + def __hash__(self): + return hash("CloneRules") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.CloneRulesFirewallPolicyRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.Operation: + r"""Call the clone rules method over HTTP. + + Args: + request (~.compute.CloneRulesFirewallPolicyRequest): + The request object. A request message for + FirewallPolicies.CloneRules. See the + method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.Operation: + Represents an Operation resource. Google Compute Engine + has three Operation resources: \* + `Global `__ + \* + `Regional `__ + \* + `Zonal `__ + You can use an operation resource to manage asynchronous + API requests. For more information, read Handling API + responses. Operations can be global, regional or zonal. + - For global operations, use the ``globalOperations`` + resource. - For regional operations, use the + ``regionOperations`` resource. - For zonal operations, + use the ``zonalOperations`` resource. For more + information, read Global, Regional, and Zonal Resources. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/compute/v1/locations/global/firewallPolicies/{firewall_policy}/cloneRules', + }, + ] + request, metadata = self._interceptor.pre_clone_rules(request, metadata) + pb_request = compute.CloneRulesFirewallPolicyRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.Operation() + pb_resp = compute.Operation.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_clone_rules(resp) + return resp + + class _Delete(FirewallPoliciesRestStub): + def __hash__(self): + return hash("Delete") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.DeleteFirewallPolicyRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.Operation: + r"""Call the delete method over HTTP. + + Args: + request (~.compute.DeleteFirewallPolicyRequest): + The request object. A request message for + FirewallPolicies.Delete. See the method + description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.Operation: + Represents an Operation resource. Google Compute Engine + has three Operation resources: \* + `Global `__ + \* + `Regional `__ + \* + `Zonal `__ + You can use an operation resource to manage asynchronous + API requests. For more information, read Handling API + responses. Operations can be global, regional or zonal. + - For global operations, use the ``globalOperations`` + resource. - For regional operations, use the + ``regionOperations`` resource. - For zonal operations, + use the ``zonalOperations`` resource. For more + information, read Global, Regional, and Zonal Resources. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'delete', + 'uri': '/compute/v1/locations/global/firewallPolicies/{firewall_policy}', + }, + ] + request, metadata = self._interceptor.pre_delete(request, metadata) + pb_request = compute.DeleteFirewallPolicyRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.Operation() + pb_resp = compute.Operation.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_delete(resp) + return resp + + class _Get(FirewallPoliciesRestStub): + def __hash__(self): + return hash("Get") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.GetFirewallPolicyRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.FirewallPolicy: + r"""Call the get method over HTTP. + + Args: + request (~.compute.GetFirewallPolicyRequest): + The request object. A request message for + FirewallPolicies.Get. See the method + description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.FirewallPolicy: + Represents a Firewall Policy + resource. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/compute/v1/locations/global/firewallPolicies/{firewall_policy}', + }, + ] + request, metadata = self._interceptor.pre_get(request, metadata) + pb_request = compute.GetFirewallPolicyRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.FirewallPolicy() + pb_resp = compute.FirewallPolicy.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get(resp) + return resp + + class _GetAssociation(FirewallPoliciesRestStub): + def __hash__(self): + return hash("GetAssociation") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.GetAssociationFirewallPolicyRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.FirewallPolicyAssociation: + r"""Call the get association method over HTTP. + + Args: + request (~.compute.GetAssociationFirewallPolicyRequest): + The request object. A request message for + FirewallPolicies.GetAssociation. See the + method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.FirewallPolicyAssociation: + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/compute/v1/locations/global/firewallPolicies/{firewall_policy}/getAssociation', + }, + ] + request, metadata = self._interceptor.pre_get_association(request, metadata) + pb_request = compute.GetAssociationFirewallPolicyRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.FirewallPolicyAssociation() + pb_resp = compute.FirewallPolicyAssociation.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get_association(resp) + return resp + + class _GetIamPolicy(FirewallPoliciesRestStub): + def __hash__(self): + return hash("GetIamPolicy") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.GetIamPolicyFirewallPolicyRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.Policy: + r"""Call the get iam policy method over HTTP. + + Args: + request (~.compute.GetIamPolicyFirewallPolicyRequest): + The request object. A request message for + FirewallPolicies.GetIamPolicy. See the + method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.Policy: + An Identity and Access Management (IAM) policy, which + specifies access controls for Google Cloud resources. A + ``Policy`` is a collection of ``bindings``. A + ``binding`` binds one or more ``members``, or + principals, to a single ``role``. Principals can be user + accounts, service accounts, Google groups, and domains + (such as G Suite). A ``role`` is a named list of + permissions; each ``role`` can be an IAM predefined role + or a user-created custom role. For some types of Google + Cloud resources, a ``binding`` can also specify a + ``condition``, which is a logical expression that allows + access to a resource only if the expression evaluates to + ``true``. A condition can add constraints based on + attributes of the request, the resource, or both. To + learn which resources support conditions in their IAM + policies, see the `IAM + documentation `__. + **JSON example:** { "bindings": [ { "role": + "roles/resourcemanager.organizationAdmin", "members": [ + "user:mike@example.com", "group:admins@example.com", + "domain:google.com", + "serviceAccount:my-project-id@appspot.gserviceaccount.com" + ] }, { "role": + "roles/resourcemanager.organizationViewer", "members": [ + "user:eve@example.com" ], "condition": { "title": + "expirable access", "description": "Does not grant + access after Sep 2020", "expression": "request.time < + timestamp('2020-10-01T00:00:00.000Z')", } } ], "etag": + "BwWWja0YfJA=", "version": 3 } **YAML example:** + bindings: - members: - user:mike@example.com - + group:admins@example.com - domain:google.com - + serviceAccount:my-project-id@appspot.gserviceaccount.com + role: roles/resourcemanager.organizationAdmin - members: + - user:eve@example.com role: + roles/resourcemanager.organizationViewer condition: + title: expirable access description: Does not grant + access after Sep 2020 expression: request.time < + timestamp('2020-10-01T00:00:00.000Z') etag: BwWWja0YfJA= + version: 3 For a description of IAM and its features, + see the `IAM + documentation `__. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/compute/v1/locations/global/firewallPolicies/{resource}/getIamPolicy', + }, + ] + request, metadata = self._interceptor.pre_get_iam_policy(request, metadata) + pb_request = compute.GetIamPolicyFirewallPolicyRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.Policy() + pb_resp = compute.Policy.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get_iam_policy(resp) + return resp + + class _GetRule(FirewallPoliciesRestStub): + def __hash__(self): + return hash("GetRule") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.GetRuleFirewallPolicyRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.FirewallPolicyRule: + r"""Call the get rule method over HTTP. + + Args: + request (~.compute.GetRuleFirewallPolicyRequest): + The request object. A request message for + FirewallPolicies.GetRule. See the method + description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.FirewallPolicyRule: + Represents a rule that describes one + or more match conditions along with the + action to be taken when traffic matches + this condition (allow or deny). + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/compute/v1/locations/global/firewallPolicies/{firewall_policy}/getRule', + }, + ] + request, metadata = self._interceptor.pre_get_rule(request, metadata) + pb_request = compute.GetRuleFirewallPolicyRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.FirewallPolicyRule() + pb_resp = compute.FirewallPolicyRule.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get_rule(resp) + return resp + + class _Insert(FirewallPoliciesRestStub): + def __hash__(self): + return hash("Insert") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + "parentId" : "", } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.InsertFirewallPolicyRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.Operation: + r"""Call the insert method over HTTP. + + Args: + request (~.compute.InsertFirewallPolicyRequest): + The request object. A request message for + FirewallPolicies.Insert. See the method + description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.Operation: + Represents an Operation resource. Google Compute Engine + has three Operation resources: \* + `Global `__ + \* + `Regional `__ + \* + `Zonal `__ + You can use an operation resource to manage asynchronous + API requests. For more information, read Handling API + responses. Operations can be global, regional or zonal. + - For global operations, use the ``globalOperations`` + resource. - For regional operations, use the + ``regionOperations`` resource. - For zonal operations, + use the ``zonalOperations`` resource. For more + information, read Global, Regional, and Zonal Resources. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/compute/v1/locations/global/firewallPolicies', + 'body': 'firewall_policy_resource', + }, + ] + request, metadata = self._interceptor.pre_insert(request, metadata) + pb_request = compute.InsertFirewallPolicyRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + including_default_value_fields=False, + use_integers_for_enums=False + ) + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.Operation() + pb_resp = compute.Operation.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_insert(resp) + return resp + + class _List(FirewallPoliciesRestStub): + def __hash__(self): + return hash("List") + + def __call__(self, + request: compute.ListFirewallPoliciesRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.FirewallPolicyList: + r"""Call the list method over HTTP. + + Args: + request (~.compute.ListFirewallPoliciesRequest): + The request object. A request message for + FirewallPolicies.List. See the method + description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.FirewallPolicyList: + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/compute/v1/locations/global/firewallPolicies', + }, + ] + request, metadata = self._interceptor.pre_list(request, metadata) + pb_request = compute.ListFirewallPoliciesRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.FirewallPolicyList() + pb_resp = compute.FirewallPolicyList.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_list(resp) + return resp + + class _ListAssociations(FirewallPoliciesRestStub): + def __hash__(self): + return hash("ListAssociations") + + def __call__(self, + request: compute.ListAssociationsFirewallPolicyRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.FirewallPoliciesListAssociationsResponse: + r"""Call the list associations method over HTTP. + + Args: + request (~.compute.ListAssociationsFirewallPolicyRequest): + The request object. A request message for + FirewallPolicies.ListAssociations. See + the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.FirewallPoliciesListAssociationsResponse: + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/compute/v1/locations/global/firewallPolicies/listAssociations', + }, + ] + request, metadata = self._interceptor.pre_list_associations(request, metadata) + pb_request = compute.ListAssociationsFirewallPolicyRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.FirewallPoliciesListAssociationsResponse() + pb_resp = compute.FirewallPoliciesListAssociationsResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_list_associations(resp) + return resp + + class _Move(FirewallPoliciesRestStub): + def __hash__(self): + return hash("Move") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + "parentId" : "", } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.MoveFirewallPolicyRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.Operation: + r"""Call the move method over HTTP. + + Args: + request (~.compute.MoveFirewallPolicyRequest): + The request object. A request message for + FirewallPolicies.Move. See the method + description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.Operation: + Represents an Operation resource. Google Compute Engine + has three Operation resources: \* + `Global `__ + \* + `Regional `__ + \* + `Zonal `__ + You can use an operation resource to manage asynchronous + API requests. For more information, read Handling API + responses. Operations can be global, regional or zonal. + - For global operations, use the ``globalOperations`` + resource. - For regional operations, use the + ``regionOperations`` resource. - For zonal operations, + use the ``zonalOperations`` resource. For more + information, read Global, Regional, and Zonal Resources. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/compute/v1/locations/global/firewallPolicies/{firewall_policy}/move', + }, + ] + request, metadata = self._interceptor.pre_move(request, metadata) + pb_request = compute.MoveFirewallPolicyRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.Operation() + pb_resp = compute.Operation.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_move(resp) + return resp + + class _Patch(FirewallPoliciesRestStub): + def __hash__(self): + return hash("Patch") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.PatchFirewallPolicyRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.Operation: + r"""Call the patch method over HTTP. + + Args: + request (~.compute.PatchFirewallPolicyRequest): + The request object. A request message for + FirewallPolicies.Patch. See the method + description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.Operation: + Represents an Operation resource. Google Compute Engine + has three Operation resources: \* + `Global `__ + \* + `Regional `__ + \* + `Zonal `__ + You can use an operation resource to manage asynchronous + API requests. For more information, read Handling API + responses. Operations can be global, regional or zonal. + - For global operations, use the ``globalOperations`` + resource. - For regional operations, use the + ``regionOperations`` resource. - For zonal operations, + use the ``zonalOperations`` resource. For more + information, read Global, Regional, and Zonal Resources. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'patch', + 'uri': '/compute/v1/locations/global/firewallPolicies/{firewall_policy}', + 'body': 'firewall_policy_resource', + }, + ] + request, metadata = self._interceptor.pre_patch(request, metadata) + pb_request = compute.PatchFirewallPolicyRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + including_default_value_fields=False, + use_integers_for_enums=False + ) + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.Operation() + pb_resp = compute.Operation.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_patch(resp) + return resp + + class _PatchRule(FirewallPoliciesRestStub): + def __hash__(self): + return hash("PatchRule") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.PatchRuleFirewallPolicyRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.Operation: + r"""Call the patch rule method over HTTP. + + Args: + request (~.compute.PatchRuleFirewallPolicyRequest): + The request object. A request message for + FirewallPolicies.PatchRule. See the + method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.Operation: + Represents an Operation resource. Google Compute Engine + has three Operation resources: \* + `Global `__ + \* + `Regional `__ + \* + `Zonal `__ + You can use an operation resource to manage asynchronous + API requests. For more information, read Handling API + responses. Operations can be global, regional or zonal. + - For global operations, use the ``globalOperations`` + resource. - For regional operations, use the + ``regionOperations`` resource. - For zonal operations, + use the ``zonalOperations`` resource. For more + information, read Global, Regional, and Zonal Resources. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/compute/v1/locations/global/firewallPolicies/{firewall_policy}/patchRule', + 'body': 'firewall_policy_rule_resource', + }, + ] + request, metadata = self._interceptor.pre_patch_rule(request, metadata) + pb_request = compute.PatchRuleFirewallPolicyRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + including_default_value_fields=False, + use_integers_for_enums=False + ) + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.Operation() + pb_resp = compute.Operation.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_patch_rule(resp) + return resp + + class _RemoveAssociation(FirewallPoliciesRestStub): + def __hash__(self): + return hash("RemoveAssociation") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.RemoveAssociationFirewallPolicyRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.Operation: + r"""Call the remove association method over HTTP. + + Args: + request (~.compute.RemoveAssociationFirewallPolicyRequest): + The request object. A request message for + FirewallPolicies.RemoveAssociation. See + the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.Operation: + Represents an Operation resource. Google Compute Engine + has three Operation resources: \* + `Global `__ + \* + `Regional `__ + \* + `Zonal `__ + You can use an operation resource to manage asynchronous + API requests. For more information, read Handling API + responses. Operations can be global, regional or zonal. + - For global operations, use the ``globalOperations`` + resource. - For regional operations, use the + ``regionOperations`` resource. - For zonal operations, + use the ``zonalOperations`` resource. For more + information, read Global, Regional, and Zonal Resources. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/compute/v1/locations/global/firewallPolicies/{firewall_policy}/removeAssociation', + }, + ] + request, metadata = self._interceptor.pre_remove_association(request, metadata) + pb_request = compute.RemoveAssociationFirewallPolicyRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.Operation() + pb_resp = compute.Operation.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_remove_association(resp) + return resp + + class _RemoveRule(FirewallPoliciesRestStub): + def __hash__(self): + return hash("RemoveRule") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.RemoveRuleFirewallPolicyRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.Operation: + r"""Call the remove rule method over HTTP. + + Args: + request (~.compute.RemoveRuleFirewallPolicyRequest): + The request object. A request message for + FirewallPolicies.RemoveRule. See the + method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.Operation: + Represents an Operation resource. Google Compute Engine + has three Operation resources: \* + `Global `__ + \* + `Regional `__ + \* + `Zonal `__ + You can use an operation resource to manage asynchronous + API requests. For more information, read Handling API + responses. Operations can be global, regional or zonal. + - For global operations, use the ``globalOperations`` + resource. - For regional operations, use the + ``regionOperations`` resource. - For zonal operations, + use the ``zonalOperations`` resource. For more + information, read Global, Regional, and Zonal Resources. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/compute/v1/locations/global/firewallPolicies/{firewall_policy}/removeRule', + }, + ] + request, metadata = self._interceptor.pre_remove_rule(request, metadata) + pb_request = compute.RemoveRuleFirewallPolicyRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.Operation() + pb_resp = compute.Operation.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_remove_rule(resp) + return resp + + class _SetIamPolicy(FirewallPoliciesRestStub): + def __hash__(self): + return hash("SetIamPolicy") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.SetIamPolicyFirewallPolicyRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.Policy: + r"""Call the set iam policy method over HTTP. + + Args: + request (~.compute.SetIamPolicyFirewallPolicyRequest): + The request object. A request message for + FirewallPolicies.SetIamPolicy. See the + method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.Policy: + An Identity and Access Management (IAM) policy, which + specifies access controls for Google Cloud resources. A + ``Policy`` is a collection of ``bindings``. A + ``binding`` binds one or more ``members``, or + principals, to a single ``role``. Principals can be user + accounts, service accounts, Google groups, and domains + (such as G Suite). A ``role`` is a named list of + permissions; each ``role`` can be an IAM predefined role + or a user-created custom role. For some types of Google + Cloud resources, a ``binding`` can also specify a + ``condition``, which is a logical expression that allows + access to a resource only if the expression evaluates to + ``true``. A condition can add constraints based on + attributes of the request, the resource, or both. To + learn which resources support conditions in their IAM + policies, see the `IAM + documentation `__. + **JSON example:** { "bindings": [ { "role": + "roles/resourcemanager.organizationAdmin", "members": [ + "user:mike@example.com", "group:admins@example.com", + "domain:google.com", + "serviceAccount:my-project-id@appspot.gserviceaccount.com" + ] }, { "role": + "roles/resourcemanager.organizationViewer", "members": [ + "user:eve@example.com" ], "condition": { "title": + "expirable access", "description": "Does not grant + access after Sep 2020", "expression": "request.time < + timestamp('2020-10-01T00:00:00.000Z')", } } ], "etag": + "BwWWja0YfJA=", "version": 3 } **YAML example:** + bindings: - members: - user:mike@example.com - + group:admins@example.com - domain:google.com - + serviceAccount:my-project-id@appspot.gserviceaccount.com + role: roles/resourcemanager.organizationAdmin - members: + - user:eve@example.com role: + roles/resourcemanager.organizationViewer condition: + title: expirable access description: Does not grant + access after Sep 2020 expression: request.time < + timestamp('2020-10-01T00:00:00.000Z') etag: BwWWja0YfJA= + version: 3 For a description of IAM and its features, + see the `IAM + documentation `__. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/compute/v1/locations/global/firewallPolicies/{resource}/setIamPolicy', + 'body': 'global_organization_set_policy_request_resource', + }, + ] + request, metadata = self._interceptor.pre_set_iam_policy(request, metadata) + pb_request = compute.SetIamPolicyFirewallPolicyRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + including_default_value_fields=False, + use_integers_for_enums=False + ) + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.Policy() + pb_resp = compute.Policy.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_set_iam_policy(resp) + return resp + + class _TestIamPermissions(FirewallPoliciesRestStub): + def __hash__(self): + return hash("TestIamPermissions") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.TestIamPermissionsFirewallPolicyRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.TestPermissionsResponse: + r"""Call the test iam permissions method over HTTP. + + Args: + request (~.compute.TestIamPermissionsFirewallPolicyRequest): + The request object. A request message for + FirewallPolicies.TestIamPermissions. See + the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.TestPermissionsResponse: + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/compute/v1/locations/global/firewallPolicies/{resource}/testIamPermissions', + 'body': 'test_permissions_request_resource', + }, + ] + request, metadata = self._interceptor.pre_test_iam_permissions(request, metadata) + pb_request = compute.TestIamPermissionsFirewallPolicyRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + including_default_value_fields=False, + use_integers_for_enums=False + ) + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.TestPermissionsResponse() + pb_resp = compute.TestPermissionsResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_test_iam_permissions(resp) + return resp + + @property + def add_association(self) -> Callable[ + [compute.AddAssociationFirewallPolicyRequest], + compute.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._AddAssociation(self._session, self._host, self._interceptor) # type: ignore + + @property + def add_rule(self) -> Callable[ + [compute.AddRuleFirewallPolicyRequest], + compute.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._AddRule(self._session, self._host, self._interceptor) # type: ignore + + @property + def clone_rules(self) -> Callable[ + [compute.CloneRulesFirewallPolicyRequest], + compute.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._CloneRules(self._session, self._host, self._interceptor) # type: ignore + + @property + def delete(self) -> Callable[ + [compute.DeleteFirewallPolicyRequest], + compute.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._Delete(self._session, self._host, self._interceptor) # type: ignore + + @property + def get(self) -> Callable[ + [compute.GetFirewallPolicyRequest], + compute.FirewallPolicy]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._Get(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_association(self) -> Callable[ + [compute.GetAssociationFirewallPolicyRequest], + compute.FirewallPolicyAssociation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetAssociation(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_iam_policy(self) -> Callable[ + [compute.GetIamPolicyFirewallPolicyRequest], + compute.Policy]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetIamPolicy(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_rule(self) -> Callable[ + [compute.GetRuleFirewallPolicyRequest], + compute.FirewallPolicyRule]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetRule(self._session, self._host, self._interceptor) # type: ignore + + @property + def insert(self) -> Callable[ + [compute.InsertFirewallPolicyRequest], + compute.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._Insert(self._session, self._host, self._interceptor) # type: ignore + + @property + def list(self) -> Callable[ + [compute.ListFirewallPoliciesRequest], + compute.FirewallPolicyList]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._List(self._session, self._host, self._interceptor) # type: ignore + + @property + def list_associations(self) -> Callable[ + [compute.ListAssociationsFirewallPolicyRequest], + compute.FirewallPoliciesListAssociationsResponse]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListAssociations(self._session, self._host, self._interceptor) # type: ignore + + @property + def move(self) -> Callable[ + [compute.MoveFirewallPolicyRequest], + compute.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._Move(self._session, self._host, self._interceptor) # type: ignore + + @property + def patch(self) -> Callable[ + [compute.PatchFirewallPolicyRequest], + compute.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._Patch(self._session, self._host, self._interceptor) # type: ignore + + @property + def patch_rule(self) -> Callable[ + [compute.PatchRuleFirewallPolicyRequest], + compute.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._PatchRule(self._session, self._host, self._interceptor) # type: ignore + + @property + def remove_association(self) -> Callable[ + [compute.RemoveAssociationFirewallPolicyRequest], + compute.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._RemoveAssociation(self._session, self._host, self._interceptor) # type: ignore + + @property + def remove_rule(self) -> Callable[ + [compute.RemoveRuleFirewallPolicyRequest], + compute.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._RemoveRule(self._session, self._host, self._interceptor) # type: ignore + + @property + def set_iam_policy(self) -> Callable[ + [compute.SetIamPolicyFirewallPolicyRequest], + compute.Policy]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._SetIamPolicy(self._session, self._host, self._interceptor) # type: ignore + + @property + def test_iam_permissions(self) -> Callable[ + [compute.TestIamPermissionsFirewallPolicyRequest], + compute.TestPermissionsResponse]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._TestIamPermissions(self._session, self._host, self._interceptor) # type: ignore + + @property + def kind(self) -> str: + return "rest" + + def close(self): + self._session.close() + + +__all__=( + 'FirewallPoliciesRestTransport', +) diff --git a/owl-bot-staging/v1/google/cloud/compute_v1/services/firewalls/__init__.py b/owl-bot-staging/v1/google/cloud/compute_v1/services/firewalls/__init__.py new file mode 100644 index 000000000..015472b6c --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/compute_v1/services/firewalls/__init__.py @@ -0,0 +1,20 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from .client import FirewallsClient + +__all__ = ( + 'FirewallsClient', +) diff --git a/owl-bot-staging/v1/google/cloud/compute_v1/services/firewalls/client.py b/owl-bot-staging/v1/google/cloud/compute_v1/services/firewalls/client.py new file mode 100644 index 000000000..7627b9850 --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/compute_v1/services/firewalls/client.py @@ -0,0 +1,1638 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import functools +import os +import re +from typing import Dict, Mapping, MutableMapping, MutableSequence, Optional, Sequence, Tuple, Type, Union, cast + +from google.cloud.compute_v1 import gapic_version as package_version + +from google.api_core import client_options as client_options_lib +from google.api_core import exceptions as core_exceptions +from google.api_core import extended_operation +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport import mtls # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth.exceptions import MutualTLSChannelError # type: ignore +from google.oauth2 import service_account # type: ignore + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + +from google.api_core import extended_operation # type: ignore +from google.cloud.compute_v1.services.firewalls import pagers +from google.cloud.compute_v1.types import compute +from .transports.base import FirewallsTransport, DEFAULT_CLIENT_INFO +from .transports.rest import FirewallsRestTransport + + +class FirewallsClientMeta(type): + """Metaclass for the Firewalls client. + + This provides class-level methods for building and retrieving + support objects (e.g. transport) without polluting the client instance + objects. + """ + _transport_registry = OrderedDict() # type: Dict[str, Type[FirewallsTransport]] + _transport_registry["rest"] = FirewallsRestTransport + + def get_transport_class(cls, + label: Optional[str] = None, + ) -> Type[FirewallsTransport]: + """Returns an appropriate transport class. + + Args: + label: The name of the desired transport. If none is + provided, then the first transport in the registry is used. + + Returns: + The transport class to use. + """ + # If a specific transport is requested, return that one. + if label: + return cls._transport_registry[label] + + # No transport is requested; return the default (that is, the first one + # in the dictionary). + return next(iter(cls._transport_registry.values())) + + +class FirewallsClient(metaclass=FirewallsClientMeta): + """The Firewalls API.""" + + @staticmethod + def _get_default_mtls_endpoint(api_endpoint): + """Converts api endpoint to mTLS endpoint. + + Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to + "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. + Args: + api_endpoint (Optional[str]): the api endpoint to convert. + Returns: + str: converted mTLS api endpoint. + """ + if not api_endpoint: + return api_endpoint + + mtls_endpoint_re = re.compile( + r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?" + ) + + m = mtls_endpoint_re.match(api_endpoint) + name, mtls, sandbox, googledomain = m.groups() + if mtls or not googledomain: + return api_endpoint + + if sandbox: + return api_endpoint.replace( + "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" + ) + + return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") + + DEFAULT_ENDPOINT = "compute.googleapis.com" + DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore + DEFAULT_ENDPOINT + ) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + FirewallsClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_info(info) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + FirewallsClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_file( + filename) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + from_service_account_json = from_service_account_file + + @property + def transport(self) -> FirewallsTransport: + """Returns the transport used by the client instance. + + Returns: + FirewallsTransport: The transport used by the client + instance. + """ + return self._transport + + @staticmethod + def common_billing_account_path(billing_account: str, ) -> str: + """Returns a fully-qualified billing_account string.""" + return "billingAccounts/{billing_account}".format(billing_account=billing_account, ) + + @staticmethod + def parse_common_billing_account_path(path: str) -> Dict[str,str]: + """Parse a billing_account path into its component segments.""" + m = re.match(r"^billingAccounts/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_folder_path(folder: str, ) -> str: + """Returns a fully-qualified folder string.""" + return "folders/{folder}".format(folder=folder, ) + + @staticmethod + def parse_common_folder_path(path: str) -> Dict[str,str]: + """Parse a folder path into its component segments.""" + m = re.match(r"^folders/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_organization_path(organization: str, ) -> str: + """Returns a fully-qualified organization string.""" + return "organizations/{organization}".format(organization=organization, ) + + @staticmethod + def parse_common_organization_path(path: str) -> Dict[str,str]: + """Parse a organization path into its component segments.""" + m = re.match(r"^organizations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_project_path(project: str, ) -> str: + """Returns a fully-qualified project string.""" + return "projects/{project}".format(project=project, ) + + @staticmethod + def parse_common_project_path(path: str) -> Dict[str,str]: + """Parse a project path into its component segments.""" + m = re.match(r"^projects/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_location_path(project: str, location: str, ) -> str: + """Returns a fully-qualified location string.""" + return "projects/{project}/locations/{location}".format(project=project, location=location, ) + + @staticmethod + def parse_common_location_path(path: str) -> Dict[str,str]: + """Parse a location path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @classmethod + def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[client_options_lib.ClientOptions] = None): + """Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + if client_options is None: + client_options = client_options_lib.ClientOptions() + use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") + use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") + if use_client_cert not in ("true", "false"): + raise ValueError("Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`") + if use_mtls_endpoint not in ("auto", "never", "always"): + raise MutualTLSChannelError("Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`") + + # Figure out the client cert source to use. + client_cert_source = None + if use_client_cert == "true": + if client_options.client_cert_source: + client_cert_source = client_options.client_cert_source + elif mtls.has_default_client_cert_source(): + client_cert_source = mtls.default_client_cert_source() + + # Figure out which api endpoint to use. + if client_options.api_endpoint is not None: + api_endpoint = client_options.api_endpoint + elif use_mtls_endpoint == "always" or (use_mtls_endpoint == "auto" and client_cert_source): + api_endpoint = cls.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = cls.DEFAULT_ENDPOINT + + return api_endpoint, client_cert_source + + def __init__(self, *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Optional[Union[str, FirewallsTransport]] = None, + client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the firewalls client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Union[str, FirewallsTransport]): The + transport to use. If set to None, a transport is chosen + automatically. + NOTE: "rest" transport functionality is currently in a + beta state (preview). We welcome your feedback via an + issue in this library's source repository. + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): Custom options for the + client. It won't take effect if a ``transport`` instance is provided. + (1) The ``api_endpoint`` property can be used to override the + default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT + environment variable can also be used to override the endpoint: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto switch to the + default mTLS endpoint if client certificate is present, this is + the default value). However, the ``api_endpoint`` property takes + precedence if provided. + (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide client certificate for mutual TLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + """ + if isinstance(client_options, dict): + client_options = client_options_lib.from_dict(client_options) + if client_options is None: + client_options = client_options_lib.ClientOptions() + client_options = cast(client_options_lib.ClientOptions, client_options) + + api_endpoint, client_cert_source_func = self.get_mtls_endpoint_and_cert_source(client_options) + + api_key_value = getattr(client_options, "api_key", None) + if api_key_value and credentials: + raise ValueError("client_options.api_key and credentials are mutually exclusive") + + # Save or instantiate the transport. + # Ordinarily, we provide the transport, but allowing a custom transport + # instance provides an extensibility point for unusual situations. + if isinstance(transport, FirewallsTransport): + # transport is a FirewallsTransport instance. + if credentials or client_options.credentials_file or api_key_value: + raise ValueError("When providing a transport instance, " + "provide its credentials directly.") + if client_options.scopes: + raise ValueError( + "When providing a transport instance, provide its scopes " + "directly." + ) + self._transport = transport + else: + import google.auth._default # type: ignore + + if api_key_value and hasattr(google.auth._default, "get_api_key_credentials"): + credentials = google.auth._default.get_api_key_credentials(api_key_value) + + Transport = type(self).get_transport_class(transport) + self._transport = Transport( + credentials=credentials, + credentials_file=client_options.credentials_file, + host=api_endpoint, + scopes=client_options.scopes, + client_cert_source_for_mtls=client_cert_source_func, + quota_project_id=client_options.quota_project_id, + client_info=client_info, + always_use_jwt_access=True, + api_audience=client_options.api_audience, + ) + + def delete_unary(self, + request: Optional[Union[compute.DeleteFirewallRequest, dict]] = None, + *, + project: Optional[str] = None, + firewall: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.Operation: + r"""Deletes the specified firewall. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_delete(): + # Create a client + client = compute_v1.FirewallsClient() + + # Initialize request argument(s) + request = compute_v1.DeleteFirewallRequest( + firewall="firewall_value", + project="project_value", + ) + + # Make the request + response = client.delete(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.DeleteFirewallRequest, dict]): + The request object. A request message for + Firewalls.Delete. See the method + description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + firewall (str): + Name of the firewall rule to delete. + This corresponds to the ``firewall`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, firewall]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.DeleteFirewallRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.DeleteFirewallRequest): + request = compute.DeleteFirewallRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if firewall is not None: + request.firewall = firewall + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("firewall", request.firewall), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def delete(self, + request: Optional[Union[compute.DeleteFirewallRequest, dict]] = None, + *, + project: Optional[str] = None, + firewall: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> extended_operation.ExtendedOperation: + r"""Deletes the specified firewall. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_delete(): + # Create a client + client = compute_v1.FirewallsClient() + + # Initialize request argument(s) + request = compute_v1.DeleteFirewallRequest( + firewall="firewall_value", + project="project_value", + ) + + # Make the request + response = client.delete(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.DeleteFirewallRequest, dict]): + The request object. A request message for + Firewalls.Delete. See the method + description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + firewall (str): + Name of the firewall rule to delete. + This corresponds to the ``firewall`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, firewall]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.DeleteFirewallRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.DeleteFirewallRequest): + request = compute.DeleteFirewallRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if firewall is not None: + request.firewall = firewall + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("firewall", request.firewall), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + operation_service = self._transport._global_operations_client + operation_request = compute.GetGlobalOperationRequest() + operation_request.project = request.project + operation_request.operation = response.name + + get_operation = functools.partial(operation_service.get, operation_request) + # Cancel is not part of extended operations yet. + cancel_operation = lambda: None + + # Note: this class is an implementation detail to provide a uniform + # set of names for certain fields in the extended operation proto message. + # See google.api_core.extended_operation.ExtendedOperation for details + # on these properties and the expected interface. + class _CustomOperation(extended_operation.ExtendedOperation): + @property + def error_message(self): + return self._extended_operation.http_error_message + + @property + def error_code(self): + return self._extended_operation.http_error_status_code + + response = _CustomOperation.make(get_operation, cancel_operation, response) + + # Done; return the response. + return response + + def get(self, + request: Optional[Union[compute.GetFirewallRequest, dict]] = None, + *, + project: Optional[str] = None, + firewall: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.Firewall: + r"""Returns the specified firewall. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_get(): + # Create a client + client = compute_v1.FirewallsClient() + + # Initialize request argument(s) + request = compute_v1.GetFirewallRequest( + firewall="firewall_value", + project="project_value", + ) + + # Make the request + response = client.get(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.GetFirewallRequest, dict]): + The request object. A request message for Firewalls.Get. + See the method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + firewall (str): + Name of the firewall rule to return. + This corresponds to the ``firewall`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.compute_v1.types.Firewall: + Represents a Firewall Rule resource. + Firewall rules allow or deny ingress + traffic to, and egress traffic from your + instances. For more information, read + Firewall rules. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, firewall]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.GetFirewallRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.GetFirewallRequest): + request = compute.GetFirewallRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if firewall is not None: + request.firewall = firewall + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("firewall", request.firewall), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def insert_unary(self, + request: Optional[Union[compute.InsertFirewallRequest, dict]] = None, + *, + project: Optional[str] = None, + firewall_resource: Optional[compute.Firewall] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.Operation: + r"""Creates a firewall rule in the specified project + using the data included in the request. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_insert(): + # Create a client + client = compute_v1.FirewallsClient() + + # Initialize request argument(s) + request = compute_v1.InsertFirewallRequest( + project="project_value", + ) + + # Make the request + response = client.insert(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.InsertFirewallRequest, dict]): + The request object. A request message for + Firewalls.Insert. See the method + description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + firewall_resource (google.cloud.compute_v1.types.Firewall): + The body resource for this request + This corresponds to the ``firewall_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, firewall_resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.InsertFirewallRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.InsertFirewallRequest): + request = compute.InsertFirewallRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if firewall_resource is not None: + request.firewall_resource = firewall_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.insert] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def insert(self, + request: Optional[Union[compute.InsertFirewallRequest, dict]] = None, + *, + project: Optional[str] = None, + firewall_resource: Optional[compute.Firewall] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> extended_operation.ExtendedOperation: + r"""Creates a firewall rule in the specified project + using the data included in the request. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_insert(): + # Create a client + client = compute_v1.FirewallsClient() + + # Initialize request argument(s) + request = compute_v1.InsertFirewallRequest( + project="project_value", + ) + + # Make the request + response = client.insert(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.InsertFirewallRequest, dict]): + The request object. A request message for + Firewalls.Insert. See the method + description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + firewall_resource (google.cloud.compute_v1.types.Firewall): + The body resource for this request + This corresponds to the ``firewall_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, firewall_resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.InsertFirewallRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.InsertFirewallRequest): + request = compute.InsertFirewallRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if firewall_resource is not None: + request.firewall_resource = firewall_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.insert] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + operation_service = self._transport._global_operations_client + operation_request = compute.GetGlobalOperationRequest() + operation_request.project = request.project + operation_request.operation = response.name + + get_operation = functools.partial(operation_service.get, operation_request) + # Cancel is not part of extended operations yet. + cancel_operation = lambda: None + + # Note: this class is an implementation detail to provide a uniform + # set of names for certain fields in the extended operation proto message. + # See google.api_core.extended_operation.ExtendedOperation for details + # on these properties and the expected interface. + class _CustomOperation(extended_operation.ExtendedOperation): + @property + def error_message(self): + return self._extended_operation.http_error_message + + @property + def error_code(self): + return self._extended_operation.http_error_status_code + + response = _CustomOperation.make(get_operation, cancel_operation, response) + + # Done; return the response. + return response + + def list(self, + request: Optional[Union[compute.ListFirewallsRequest, dict]] = None, + *, + project: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListPager: + r"""Retrieves the list of firewall rules available to the + specified project. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_list(): + # Create a client + client = compute_v1.FirewallsClient() + + # Initialize request argument(s) + request = compute_v1.ListFirewallsRequest( + project="project_value", + ) + + # Make the request + page_result = client.list(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.ListFirewallsRequest, dict]): + The request object. A request message for Firewalls.List. + See the method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.compute_v1.services.firewalls.pagers.ListPager: + Contains a list of firewalls. + + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.ListFirewallsRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.ListFirewallsRequest): + request = compute.ListFirewallsRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + def patch_unary(self, + request: Optional[Union[compute.PatchFirewallRequest, dict]] = None, + *, + project: Optional[str] = None, + firewall: Optional[str] = None, + firewall_resource: Optional[compute.Firewall] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.Operation: + r"""Updates the specified firewall rule with the data + included in the request. This method supports PATCH + semantics and uses the JSON merge patch format and + processing rules. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_patch(): + # Create a client + client = compute_v1.FirewallsClient() + + # Initialize request argument(s) + request = compute_v1.PatchFirewallRequest( + firewall="firewall_value", + project="project_value", + ) + + # Make the request + response = client.patch(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.PatchFirewallRequest, dict]): + The request object. A request message for + Firewalls.Patch. See the method + description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + firewall (str): + Name of the firewall rule to patch. + This corresponds to the ``firewall`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + firewall_resource (google.cloud.compute_v1.types.Firewall): + The body resource for this request + This corresponds to the ``firewall_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, firewall, firewall_resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.PatchFirewallRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.PatchFirewallRequest): + request = compute.PatchFirewallRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if firewall is not None: + request.firewall = firewall + if firewall_resource is not None: + request.firewall_resource = firewall_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.patch] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("firewall", request.firewall), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def patch(self, + request: Optional[Union[compute.PatchFirewallRequest, dict]] = None, + *, + project: Optional[str] = None, + firewall: Optional[str] = None, + firewall_resource: Optional[compute.Firewall] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> extended_operation.ExtendedOperation: + r"""Updates the specified firewall rule with the data + included in the request. This method supports PATCH + semantics and uses the JSON merge patch format and + processing rules. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_patch(): + # Create a client + client = compute_v1.FirewallsClient() + + # Initialize request argument(s) + request = compute_v1.PatchFirewallRequest( + firewall="firewall_value", + project="project_value", + ) + + # Make the request + response = client.patch(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.PatchFirewallRequest, dict]): + The request object. A request message for + Firewalls.Patch. See the method + description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + firewall (str): + Name of the firewall rule to patch. + This corresponds to the ``firewall`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + firewall_resource (google.cloud.compute_v1.types.Firewall): + The body resource for this request + This corresponds to the ``firewall_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, firewall, firewall_resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.PatchFirewallRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.PatchFirewallRequest): + request = compute.PatchFirewallRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if firewall is not None: + request.firewall = firewall + if firewall_resource is not None: + request.firewall_resource = firewall_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.patch] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("firewall", request.firewall), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + operation_service = self._transport._global_operations_client + operation_request = compute.GetGlobalOperationRequest() + operation_request.project = request.project + operation_request.operation = response.name + + get_operation = functools.partial(operation_service.get, operation_request) + # Cancel is not part of extended operations yet. + cancel_operation = lambda: None + + # Note: this class is an implementation detail to provide a uniform + # set of names for certain fields in the extended operation proto message. + # See google.api_core.extended_operation.ExtendedOperation for details + # on these properties and the expected interface. + class _CustomOperation(extended_operation.ExtendedOperation): + @property + def error_message(self): + return self._extended_operation.http_error_message + + @property + def error_code(self): + return self._extended_operation.http_error_status_code + + response = _CustomOperation.make(get_operation, cancel_operation, response) + + # Done; return the response. + return response + + def update_unary(self, + request: Optional[Union[compute.UpdateFirewallRequest, dict]] = None, + *, + project: Optional[str] = None, + firewall: Optional[str] = None, + firewall_resource: Optional[compute.Firewall] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.Operation: + r"""Updates the specified firewall rule with the data + included in the request. Note that all fields will be + updated if using PUT, even fields that are not + specified. To update individual fields, please use PATCH + instead. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_update(): + # Create a client + client = compute_v1.FirewallsClient() + + # Initialize request argument(s) + request = compute_v1.UpdateFirewallRequest( + firewall="firewall_value", + project="project_value", + ) + + # Make the request + response = client.update(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.UpdateFirewallRequest, dict]): + The request object. A request message for + Firewalls.Update. See the method + description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + firewall (str): + Name of the firewall rule to update. + This corresponds to the ``firewall`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + firewall_resource (google.cloud.compute_v1.types.Firewall): + The body resource for this request + This corresponds to the ``firewall_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, firewall, firewall_resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.UpdateFirewallRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.UpdateFirewallRequest): + request = compute.UpdateFirewallRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if firewall is not None: + request.firewall = firewall + if firewall_resource is not None: + request.firewall_resource = firewall_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.update] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("firewall", request.firewall), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def update(self, + request: Optional[Union[compute.UpdateFirewallRequest, dict]] = None, + *, + project: Optional[str] = None, + firewall: Optional[str] = None, + firewall_resource: Optional[compute.Firewall] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> extended_operation.ExtendedOperation: + r"""Updates the specified firewall rule with the data + included in the request. Note that all fields will be + updated if using PUT, even fields that are not + specified. To update individual fields, please use PATCH + instead. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_update(): + # Create a client + client = compute_v1.FirewallsClient() + + # Initialize request argument(s) + request = compute_v1.UpdateFirewallRequest( + firewall="firewall_value", + project="project_value", + ) + + # Make the request + response = client.update(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.UpdateFirewallRequest, dict]): + The request object. A request message for + Firewalls.Update. See the method + description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + firewall (str): + Name of the firewall rule to update. + This corresponds to the ``firewall`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + firewall_resource (google.cloud.compute_v1.types.Firewall): + The body resource for this request + This corresponds to the ``firewall_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, firewall, firewall_resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.UpdateFirewallRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.UpdateFirewallRequest): + request = compute.UpdateFirewallRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if firewall is not None: + request.firewall = firewall + if firewall_resource is not None: + request.firewall_resource = firewall_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.update] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("firewall", request.firewall), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + operation_service = self._transport._global_operations_client + operation_request = compute.GetGlobalOperationRequest() + operation_request.project = request.project + operation_request.operation = response.name + + get_operation = functools.partial(operation_service.get, operation_request) + # Cancel is not part of extended operations yet. + cancel_operation = lambda: None + + # Note: this class is an implementation detail to provide a uniform + # set of names for certain fields in the extended operation proto message. + # See google.api_core.extended_operation.ExtendedOperation for details + # on these properties and the expected interface. + class _CustomOperation(extended_operation.ExtendedOperation): + @property + def error_message(self): + return self._extended_operation.http_error_message + + @property + def error_code(self): + return self._extended_operation.http_error_status_code + + response = _CustomOperation.make(get_operation, cancel_operation, response) + + # Done; return the response. + return response + + def __enter__(self) -> "FirewallsClient": + return self + + def __exit__(self, type, value, traceback): + """Releases underlying transport's resources. + + .. warning:: + ONLY use as a context manager if the transport is NOT shared + with other clients! Exiting the with block will CLOSE the transport + and may cause errors in other clients! + """ + self.transport.close() + + + + + + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) + + +__all__ = ( + "FirewallsClient", +) diff --git a/owl-bot-staging/v1/google/cloud/compute_v1/services/firewalls/pagers.py b/owl-bot-staging/v1/google/cloud/compute_v1/services/firewalls/pagers.py new file mode 100644 index 000000000..23ff64278 --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/compute_v1/services/firewalls/pagers.py @@ -0,0 +1,77 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import Any, AsyncIterator, Awaitable, Callable, Sequence, Tuple, Optional, Iterator + +from google.cloud.compute_v1.types import compute + + +class ListPager: + """A pager for iterating through ``list`` requests. + + This class thinly wraps an initial + :class:`google.cloud.compute_v1.types.FirewallList` object, and + provides an ``__iter__`` method to iterate through its + ``items`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``List`` requests and continue to iterate + through the ``items`` field on the + corresponding responses. + + All the usual :class:`google.cloud.compute_v1.types.FirewallList` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., compute.FirewallList], + request: compute.ListFirewallsRequest, + response: compute.FirewallList, + *, + metadata: Sequence[Tuple[str, str]] = ()): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.compute_v1.types.ListFirewallsRequest): + The initial request object. + response (google.cloud.compute_v1.types.FirewallList): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = compute.ListFirewallsRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[compute.FirewallList]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[compute.Firewall]: + for page in self.pages: + yield from page.items + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) diff --git a/owl-bot-staging/v1/google/cloud/compute_v1/services/firewalls/transports/__init__.py b/owl-bot-staging/v1/google/cloud/compute_v1/services/firewalls/transports/__init__.py new file mode 100644 index 000000000..17e9bc4ee --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/compute_v1/services/firewalls/transports/__init__.py @@ -0,0 +1,32 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +from typing import Dict, Type + +from .base import FirewallsTransport +from .rest import FirewallsRestTransport +from .rest import FirewallsRestInterceptor + + +# Compile a registry of transports. +_transport_registry = OrderedDict() # type: Dict[str, Type[FirewallsTransport]] +_transport_registry['rest'] = FirewallsRestTransport + +__all__ = ( + 'FirewallsTransport', + 'FirewallsRestTransport', + 'FirewallsRestInterceptor', +) diff --git a/owl-bot-staging/v1/google/cloud/compute_v1/services/firewalls/transports/base.py b/owl-bot-staging/v1/google/cloud/compute_v1/services/firewalls/transports/base.py new file mode 100644 index 000000000..9203e34fd --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/compute_v1/services/firewalls/transports/base.py @@ -0,0 +1,233 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import abc +from typing import Awaitable, Callable, Dict, Optional, Sequence, Union + +from google.cloud.compute_v1 import gapic_version as package_version + +import google.auth # type: ignore +import google.api_core +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.compute_v1.types import compute +from google.cloud.compute_v1.services import global_operations + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) + + +class FirewallsTransport(abc.ABC): + """Abstract transport class for Firewalls.""" + + AUTH_SCOPES = ( + 'https://www.googleapis.com/auth/compute', + 'https://www.googleapis.com/auth/cloud-platform', + ) + + DEFAULT_HOST: str = 'compute.googleapis.com' + def __init__( + self, *, + host: str = DEFAULT_HOST, + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + **kwargs, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A list of scopes. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + """ + self._extended_operations_services: Dict[str, Any] = {} + + scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} + + # Save the scopes. + self._scopes = scopes + + # If no credentials are provided, then determine the appropriate + # defaults. + if credentials and credentials_file: + raise core_exceptions.DuplicateCredentialArgs("'credentials_file' and 'credentials' are mutually exclusive") + + if credentials_file is not None: + credentials, _ = google.auth.load_credentials_from_file( + credentials_file, + **scopes_kwargs, + quota_project_id=quota_project_id + ) + elif credentials is None: + credentials, _ = google.auth.default(**scopes_kwargs, quota_project_id=quota_project_id) + # Don't apply audience if the credentials file passed from user. + if hasattr(credentials, "with_gdch_audience"): + credentials = credentials.with_gdch_audience(api_audience if api_audience else host) + + # If the credentials are service account credentials, then always try to use self signed JWT. + if always_use_jwt_access and isinstance(credentials, service_account.Credentials) and hasattr(service_account.Credentials, "with_always_use_jwt_access"): + credentials = credentials.with_always_use_jwt_access(True) + + # Save the credentials. + self._credentials = credentials + + # Save the hostname. Default to port 443 (HTTPS) if none is specified. + if ':' not in host: + host += ':443' + self._host = host + + def _prep_wrapped_messages(self, client_info): + # Precompute the wrapped methods. + self._wrapped_methods = { + self.delete: gapic_v1.method.wrap_method( + self.delete, + default_timeout=None, + client_info=client_info, + ), + self.get: gapic_v1.method.wrap_method( + self.get, + default_timeout=None, + client_info=client_info, + ), + self.insert: gapic_v1.method.wrap_method( + self.insert, + default_timeout=None, + client_info=client_info, + ), + self.list: gapic_v1.method.wrap_method( + self.list, + default_timeout=None, + client_info=client_info, + ), + self.patch: gapic_v1.method.wrap_method( + self.patch, + default_timeout=None, + client_info=client_info, + ), + self.update: gapic_v1.method.wrap_method( + self.update, + default_timeout=None, + client_info=client_info, + ), + } + + def close(self): + """Closes resources associated with the transport. + + .. warning:: + Only call this method if the transport is NOT shared + with other clients - this may cause errors in other clients! + """ + raise NotImplementedError() + + @property + def delete(self) -> Callable[ + [compute.DeleteFirewallRequest], + Union[ + compute.Operation, + Awaitable[compute.Operation] + ]]: + raise NotImplementedError() + + @property + def get(self) -> Callable[ + [compute.GetFirewallRequest], + Union[ + compute.Firewall, + Awaitable[compute.Firewall] + ]]: + raise NotImplementedError() + + @property + def insert(self) -> Callable[ + [compute.InsertFirewallRequest], + Union[ + compute.Operation, + Awaitable[compute.Operation] + ]]: + raise NotImplementedError() + + @property + def list(self) -> Callable[ + [compute.ListFirewallsRequest], + Union[ + compute.FirewallList, + Awaitable[compute.FirewallList] + ]]: + raise NotImplementedError() + + @property + def patch(self) -> Callable[ + [compute.PatchFirewallRequest], + Union[ + compute.Operation, + Awaitable[compute.Operation] + ]]: + raise NotImplementedError() + + @property + def update(self) -> Callable[ + [compute.UpdateFirewallRequest], + Union[ + compute.Operation, + Awaitable[compute.Operation] + ]]: + raise NotImplementedError() + + @property + def kind(self) -> str: + raise NotImplementedError() + + @property + def _global_operations_client(self) -> global_operations.GlobalOperationsClient: + ex_op_service = self._extended_operations_services.get("global_operations") + if not ex_op_service: + ex_op_service = global_operations.GlobalOperationsClient( + credentials=self._credentials, + transport=self.kind, + ) + self._extended_operations_services["global_operations"] = ex_op_service + + return ex_op_service + + +__all__ = ( + 'FirewallsTransport', +) diff --git a/owl-bot-staging/v1/google/cloud/compute_v1/services/firewalls/transports/rest.py b/owl-bot-staging/v1/google/cloud/compute_v1/services/firewalls/transports/rest.py new file mode 100644 index 000000000..619fe1e1f --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/compute_v1/services/firewalls/transports/rest.py @@ -0,0 +1,935 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +from google.auth.transport.requests import AuthorizedSession # type: ignore +import json # type: ignore +import grpc # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.api_core import exceptions as core_exceptions +from google.api_core import retry as retries +from google.api_core import rest_helpers +from google.api_core import rest_streaming +from google.api_core import path_template +from google.api_core import gapic_v1 + +from google.protobuf import json_format +from requests import __version__ as requests_version +import dataclasses +import re +from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union +import warnings + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + + +from google.cloud.compute_v1.types import compute + +from .base import FirewallsTransport, DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, + grpc_version=None, + rest_version=requests_version, +) + + +class FirewallsRestInterceptor: + """Interceptor for Firewalls. + + Interceptors are used to manipulate requests, request metadata, and responses + in arbitrary ways. + Example use cases include: + * Logging + * Verifying requests according to service or custom semantics + * Stripping extraneous information from responses + + These use cases and more can be enabled by injecting an + instance of a custom subclass when constructing the FirewallsRestTransport. + + .. code-block:: python + class MyCustomFirewallsInterceptor(FirewallsRestInterceptor): + def pre_delete(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_delete(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_get(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_insert(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_insert(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_patch(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_patch(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_update(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_update(self, response): + logging.log(f"Received response: {response}") + return response + + transport = FirewallsRestTransport(interceptor=MyCustomFirewallsInterceptor()) + client = FirewallsClient(transport=transport) + + + """ + def pre_delete(self, request: compute.DeleteFirewallRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.DeleteFirewallRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for delete + + Override in a subclass to manipulate the request or metadata + before they are sent to the Firewalls server. + """ + return request, metadata + + def post_delete(self, response: compute.Operation) -> compute.Operation: + """Post-rpc interceptor for delete + + Override in a subclass to manipulate the response + after it is returned by the Firewalls server but before + it is returned to user code. + """ + return response + def pre_get(self, request: compute.GetFirewallRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.GetFirewallRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get + + Override in a subclass to manipulate the request or metadata + before they are sent to the Firewalls server. + """ + return request, metadata + + def post_get(self, response: compute.Firewall) -> compute.Firewall: + """Post-rpc interceptor for get + + Override in a subclass to manipulate the response + after it is returned by the Firewalls server but before + it is returned to user code. + """ + return response + def pre_insert(self, request: compute.InsertFirewallRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.InsertFirewallRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for insert + + Override in a subclass to manipulate the request or metadata + before they are sent to the Firewalls server. + """ + return request, metadata + + def post_insert(self, response: compute.Operation) -> compute.Operation: + """Post-rpc interceptor for insert + + Override in a subclass to manipulate the response + after it is returned by the Firewalls server but before + it is returned to user code. + """ + return response + def pre_list(self, request: compute.ListFirewallsRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.ListFirewallsRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list + + Override in a subclass to manipulate the request or metadata + before they are sent to the Firewalls server. + """ + return request, metadata + + def post_list(self, response: compute.FirewallList) -> compute.FirewallList: + """Post-rpc interceptor for list + + Override in a subclass to manipulate the response + after it is returned by the Firewalls server but before + it is returned to user code. + """ + return response + def pre_patch(self, request: compute.PatchFirewallRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.PatchFirewallRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for patch + + Override in a subclass to manipulate the request or metadata + before they are sent to the Firewalls server. + """ + return request, metadata + + def post_patch(self, response: compute.Operation) -> compute.Operation: + """Post-rpc interceptor for patch + + Override in a subclass to manipulate the response + after it is returned by the Firewalls server but before + it is returned to user code. + """ + return response + def pre_update(self, request: compute.UpdateFirewallRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.UpdateFirewallRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for update + + Override in a subclass to manipulate the request or metadata + before they are sent to the Firewalls server. + """ + return request, metadata + + def post_update(self, response: compute.Operation) -> compute.Operation: + """Post-rpc interceptor for update + + Override in a subclass to manipulate the response + after it is returned by the Firewalls server but before + it is returned to user code. + """ + return response + + +@dataclasses.dataclass +class FirewallsRestStub: + _session: AuthorizedSession + _host: str + _interceptor: FirewallsRestInterceptor + + +class FirewallsRestTransport(FirewallsTransport): + """REST backend transport for Firewalls. + + The Firewalls API. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends JSON representations of protocol buffers over HTTP/1.1 + + NOTE: This REST transport functionality is currently in a beta + state (preview). We welcome your feedback via an issue in this + library's source repository. Thank you! + """ + + def __init__(self, *, + host: str = 'compute.googleapis.com', + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + client_cert_source_for_mtls: Optional[Callable[[ + ], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + url_scheme: str = 'https', + interceptor: Optional[FirewallsRestInterceptor] = None, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + NOTE: This REST transport functionality is currently in a beta + state (preview). We welcome your feedback via a GitHub issue in + this library's repository. Thank you! + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + client_cert_source_for_mtls (Callable[[], Tuple[bytes, bytes]]): Client + certificate to configure mutual TLS HTTP channel. It is ignored + if ``channel`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you are developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + url_scheme: the protocol scheme for the API endpoint. Normally + "https", but for testing or local servers, + "http" can be specified. + """ + # Run the base constructor + # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. + # TODO: When custom host (api_endpoint) is set, `scopes` must *also* be set on the + # credentials object + maybe_url_match = re.match("^(?Phttp(?:s)?://)?(?P.*)$", host) + if maybe_url_match is None: + raise ValueError(f"Unexpected hostname structure: {host}") # pragma: NO COVER + + url_match_items = maybe_url_match.groupdict() + + host = f"{url_scheme}://{host}" if not url_match_items["scheme"] else host + + super().__init__( + host=host, + credentials=credentials, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience + ) + self._session = AuthorizedSession( + self._credentials, default_host=self.DEFAULT_HOST) + if client_cert_source_for_mtls: + self._session.configure_mtls_channel(client_cert_source_for_mtls) + self._interceptor = interceptor or FirewallsRestInterceptor() + self._prep_wrapped_messages(client_info) + + class _Delete(FirewallsRestStub): + def __hash__(self): + return hash("Delete") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.DeleteFirewallRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.Operation: + r"""Call the delete method over HTTP. + + Args: + request (~.compute.DeleteFirewallRequest): + The request object. A request message for + Firewalls.Delete. See the method + description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.Operation: + Represents an Operation resource. Google Compute Engine + has three Operation resources: \* + `Global `__ + \* + `Regional `__ + \* + `Zonal `__ + You can use an operation resource to manage asynchronous + API requests. For more information, read Handling API + responses. Operations can be global, regional or zonal. + - For global operations, use the ``globalOperations`` + resource. - For regional operations, use the + ``regionOperations`` resource. - For zonal operations, + use the ``zonalOperations`` resource. For more + information, read Global, Regional, and Zonal Resources. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'delete', + 'uri': '/compute/v1/projects/{project}/global/firewalls/{firewall}', + }, + ] + request, metadata = self._interceptor.pre_delete(request, metadata) + pb_request = compute.DeleteFirewallRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.Operation() + pb_resp = compute.Operation.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_delete(resp) + return resp + + class _Get(FirewallsRestStub): + def __hash__(self): + return hash("Get") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.GetFirewallRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.Firewall: + r"""Call the get method over HTTP. + + Args: + request (~.compute.GetFirewallRequest): + The request object. A request message for Firewalls.Get. + See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.Firewall: + Represents a Firewall Rule resource. + Firewall rules allow or deny ingress + traffic to, and egress traffic from your + instances. For more information, read + Firewall rules. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/compute/v1/projects/{project}/global/firewalls/{firewall}', + }, + ] + request, metadata = self._interceptor.pre_get(request, metadata) + pb_request = compute.GetFirewallRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.Firewall() + pb_resp = compute.Firewall.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get(resp) + return resp + + class _Insert(FirewallsRestStub): + def __hash__(self): + return hash("Insert") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.InsertFirewallRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.Operation: + r"""Call the insert method over HTTP. + + Args: + request (~.compute.InsertFirewallRequest): + The request object. A request message for + Firewalls.Insert. See the method + description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.Operation: + Represents an Operation resource. Google Compute Engine + has three Operation resources: \* + `Global `__ + \* + `Regional `__ + \* + `Zonal `__ + You can use an operation resource to manage asynchronous + API requests. For more information, read Handling API + responses. Operations can be global, regional or zonal. + - For global operations, use the ``globalOperations`` + resource. - For regional operations, use the + ``regionOperations`` resource. - For zonal operations, + use the ``zonalOperations`` resource. For more + information, read Global, Regional, and Zonal Resources. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/compute/v1/projects/{project}/global/firewalls', + 'body': 'firewall_resource', + }, + ] + request, metadata = self._interceptor.pre_insert(request, metadata) + pb_request = compute.InsertFirewallRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + including_default_value_fields=False, + use_integers_for_enums=False + ) + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.Operation() + pb_resp = compute.Operation.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_insert(resp) + return resp + + class _List(FirewallsRestStub): + def __hash__(self): + return hash("List") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.ListFirewallsRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.FirewallList: + r"""Call the list method over HTTP. + + Args: + request (~.compute.ListFirewallsRequest): + The request object. A request message for Firewalls.List. + See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.FirewallList: + Contains a list of firewalls. + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/compute/v1/projects/{project}/global/firewalls', + }, + ] + request, metadata = self._interceptor.pre_list(request, metadata) + pb_request = compute.ListFirewallsRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.FirewallList() + pb_resp = compute.FirewallList.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_list(resp) + return resp + + class _Patch(FirewallsRestStub): + def __hash__(self): + return hash("Patch") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.PatchFirewallRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.Operation: + r"""Call the patch method over HTTP. + + Args: + request (~.compute.PatchFirewallRequest): + The request object. A request message for + Firewalls.Patch. See the method + description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.Operation: + Represents an Operation resource. Google Compute Engine + has three Operation resources: \* + `Global `__ + \* + `Regional `__ + \* + `Zonal `__ + You can use an operation resource to manage asynchronous + API requests. For more information, read Handling API + responses. Operations can be global, regional or zonal. + - For global operations, use the ``globalOperations`` + resource. - For regional operations, use the + ``regionOperations`` resource. - For zonal operations, + use the ``zonalOperations`` resource. For more + information, read Global, Regional, and Zonal Resources. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'patch', + 'uri': '/compute/v1/projects/{project}/global/firewalls/{firewall}', + 'body': 'firewall_resource', + }, + ] + request, metadata = self._interceptor.pre_patch(request, metadata) + pb_request = compute.PatchFirewallRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + including_default_value_fields=False, + use_integers_for_enums=False + ) + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.Operation() + pb_resp = compute.Operation.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_patch(resp) + return resp + + class _Update(FirewallsRestStub): + def __hash__(self): + return hash("Update") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.UpdateFirewallRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.Operation: + r"""Call the update method over HTTP. + + Args: + request (~.compute.UpdateFirewallRequest): + The request object. A request message for + Firewalls.Update. See the method + description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.Operation: + Represents an Operation resource. Google Compute Engine + has three Operation resources: \* + `Global `__ + \* + `Regional `__ + \* + `Zonal `__ + You can use an operation resource to manage asynchronous + API requests. For more information, read Handling API + responses. Operations can be global, regional or zonal. + - For global operations, use the ``globalOperations`` + resource. - For regional operations, use the + ``regionOperations`` resource. - For zonal operations, + use the ``zonalOperations`` resource. For more + information, read Global, Regional, and Zonal Resources. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'put', + 'uri': '/compute/v1/projects/{project}/global/firewalls/{firewall}', + 'body': 'firewall_resource', + }, + ] + request, metadata = self._interceptor.pre_update(request, metadata) + pb_request = compute.UpdateFirewallRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + including_default_value_fields=False, + use_integers_for_enums=False + ) + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.Operation() + pb_resp = compute.Operation.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_update(resp) + return resp + + @property + def delete(self) -> Callable[ + [compute.DeleteFirewallRequest], + compute.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._Delete(self._session, self._host, self._interceptor) # type: ignore + + @property + def get(self) -> Callable[ + [compute.GetFirewallRequest], + compute.Firewall]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._Get(self._session, self._host, self._interceptor) # type: ignore + + @property + def insert(self) -> Callable[ + [compute.InsertFirewallRequest], + compute.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._Insert(self._session, self._host, self._interceptor) # type: ignore + + @property + def list(self) -> Callable[ + [compute.ListFirewallsRequest], + compute.FirewallList]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._List(self._session, self._host, self._interceptor) # type: ignore + + @property + def patch(self) -> Callable[ + [compute.PatchFirewallRequest], + compute.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._Patch(self._session, self._host, self._interceptor) # type: ignore + + @property + def update(self) -> Callable[ + [compute.UpdateFirewallRequest], + compute.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._Update(self._session, self._host, self._interceptor) # type: ignore + + @property + def kind(self) -> str: + return "rest" + + def close(self): + self._session.close() + + +__all__=( + 'FirewallsRestTransport', +) diff --git a/owl-bot-staging/v1/google/cloud/compute_v1/services/forwarding_rules/__init__.py b/owl-bot-staging/v1/google/cloud/compute_v1/services/forwarding_rules/__init__.py new file mode 100644 index 000000000..478872f9b --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/compute_v1/services/forwarding_rules/__init__.py @@ -0,0 +1,20 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from .client import ForwardingRulesClient + +__all__ = ( + 'ForwardingRulesClient', +) diff --git a/owl-bot-staging/v1/google/cloud/compute_v1/services/forwarding_rules/client.py b/owl-bot-staging/v1/google/cloud/compute_v1/services/forwarding_rules/client.py new file mode 100644 index 000000000..4999ab0d4 --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/compute_v1/services/forwarding_rules/client.py @@ -0,0 +1,2181 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import functools +import os +import re +from typing import Dict, Mapping, MutableMapping, MutableSequence, Optional, Sequence, Tuple, Type, Union, cast + +from google.cloud.compute_v1 import gapic_version as package_version + +from google.api_core import client_options as client_options_lib +from google.api_core import exceptions as core_exceptions +from google.api_core import extended_operation +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport import mtls # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth.exceptions import MutualTLSChannelError # type: ignore +from google.oauth2 import service_account # type: ignore + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + +from google.api_core import extended_operation # type: ignore +from google.cloud.compute_v1.services.forwarding_rules import pagers +from google.cloud.compute_v1.types import compute +from .transports.base import ForwardingRulesTransport, DEFAULT_CLIENT_INFO +from .transports.rest import ForwardingRulesRestTransport + + +class ForwardingRulesClientMeta(type): + """Metaclass for the ForwardingRules client. + + This provides class-level methods for building and retrieving + support objects (e.g. transport) without polluting the client instance + objects. + """ + _transport_registry = OrderedDict() # type: Dict[str, Type[ForwardingRulesTransport]] + _transport_registry["rest"] = ForwardingRulesRestTransport + + def get_transport_class(cls, + label: Optional[str] = None, + ) -> Type[ForwardingRulesTransport]: + """Returns an appropriate transport class. + + Args: + label: The name of the desired transport. If none is + provided, then the first transport in the registry is used. + + Returns: + The transport class to use. + """ + # If a specific transport is requested, return that one. + if label: + return cls._transport_registry[label] + + # No transport is requested; return the default (that is, the first one + # in the dictionary). + return next(iter(cls._transport_registry.values())) + + +class ForwardingRulesClient(metaclass=ForwardingRulesClientMeta): + """The ForwardingRules API.""" + + @staticmethod + def _get_default_mtls_endpoint(api_endpoint): + """Converts api endpoint to mTLS endpoint. + + Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to + "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. + Args: + api_endpoint (Optional[str]): the api endpoint to convert. + Returns: + str: converted mTLS api endpoint. + """ + if not api_endpoint: + return api_endpoint + + mtls_endpoint_re = re.compile( + r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?" + ) + + m = mtls_endpoint_re.match(api_endpoint) + name, mtls, sandbox, googledomain = m.groups() + if mtls or not googledomain: + return api_endpoint + + if sandbox: + return api_endpoint.replace( + "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" + ) + + return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") + + DEFAULT_ENDPOINT = "compute.googleapis.com" + DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore + DEFAULT_ENDPOINT + ) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + ForwardingRulesClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_info(info) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + ForwardingRulesClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_file( + filename) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + from_service_account_json = from_service_account_file + + @property + def transport(self) -> ForwardingRulesTransport: + """Returns the transport used by the client instance. + + Returns: + ForwardingRulesTransport: The transport used by the client + instance. + """ + return self._transport + + @staticmethod + def common_billing_account_path(billing_account: str, ) -> str: + """Returns a fully-qualified billing_account string.""" + return "billingAccounts/{billing_account}".format(billing_account=billing_account, ) + + @staticmethod + def parse_common_billing_account_path(path: str) -> Dict[str,str]: + """Parse a billing_account path into its component segments.""" + m = re.match(r"^billingAccounts/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_folder_path(folder: str, ) -> str: + """Returns a fully-qualified folder string.""" + return "folders/{folder}".format(folder=folder, ) + + @staticmethod + def parse_common_folder_path(path: str) -> Dict[str,str]: + """Parse a folder path into its component segments.""" + m = re.match(r"^folders/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_organization_path(organization: str, ) -> str: + """Returns a fully-qualified organization string.""" + return "organizations/{organization}".format(organization=organization, ) + + @staticmethod + def parse_common_organization_path(path: str) -> Dict[str,str]: + """Parse a organization path into its component segments.""" + m = re.match(r"^organizations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_project_path(project: str, ) -> str: + """Returns a fully-qualified project string.""" + return "projects/{project}".format(project=project, ) + + @staticmethod + def parse_common_project_path(path: str) -> Dict[str,str]: + """Parse a project path into its component segments.""" + m = re.match(r"^projects/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_location_path(project: str, location: str, ) -> str: + """Returns a fully-qualified location string.""" + return "projects/{project}/locations/{location}".format(project=project, location=location, ) + + @staticmethod + def parse_common_location_path(path: str) -> Dict[str,str]: + """Parse a location path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @classmethod + def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[client_options_lib.ClientOptions] = None): + """Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + if client_options is None: + client_options = client_options_lib.ClientOptions() + use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") + use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") + if use_client_cert not in ("true", "false"): + raise ValueError("Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`") + if use_mtls_endpoint not in ("auto", "never", "always"): + raise MutualTLSChannelError("Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`") + + # Figure out the client cert source to use. + client_cert_source = None + if use_client_cert == "true": + if client_options.client_cert_source: + client_cert_source = client_options.client_cert_source + elif mtls.has_default_client_cert_source(): + client_cert_source = mtls.default_client_cert_source() + + # Figure out which api endpoint to use. + if client_options.api_endpoint is not None: + api_endpoint = client_options.api_endpoint + elif use_mtls_endpoint == "always" or (use_mtls_endpoint == "auto" and client_cert_source): + api_endpoint = cls.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = cls.DEFAULT_ENDPOINT + + return api_endpoint, client_cert_source + + def __init__(self, *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Optional[Union[str, ForwardingRulesTransport]] = None, + client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the forwarding rules client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Union[str, ForwardingRulesTransport]): The + transport to use. If set to None, a transport is chosen + automatically. + NOTE: "rest" transport functionality is currently in a + beta state (preview). We welcome your feedback via an + issue in this library's source repository. + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): Custom options for the + client. It won't take effect if a ``transport`` instance is provided. + (1) The ``api_endpoint`` property can be used to override the + default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT + environment variable can also be used to override the endpoint: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto switch to the + default mTLS endpoint if client certificate is present, this is + the default value). However, the ``api_endpoint`` property takes + precedence if provided. + (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide client certificate for mutual TLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + """ + if isinstance(client_options, dict): + client_options = client_options_lib.from_dict(client_options) + if client_options is None: + client_options = client_options_lib.ClientOptions() + client_options = cast(client_options_lib.ClientOptions, client_options) + + api_endpoint, client_cert_source_func = self.get_mtls_endpoint_and_cert_source(client_options) + + api_key_value = getattr(client_options, "api_key", None) + if api_key_value and credentials: + raise ValueError("client_options.api_key and credentials are mutually exclusive") + + # Save or instantiate the transport. + # Ordinarily, we provide the transport, but allowing a custom transport + # instance provides an extensibility point for unusual situations. + if isinstance(transport, ForwardingRulesTransport): + # transport is a ForwardingRulesTransport instance. + if credentials or client_options.credentials_file or api_key_value: + raise ValueError("When providing a transport instance, " + "provide its credentials directly.") + if client_options.scopes: + raise ValueError( + "When providing a transport instance, provide its scopes " + "directly." + ) + self._transport = transport + else: + import google.auth._default # type: ignore + + if api_key_value and hasattr(google.auth._default, "get_api_key_credentials"): + credentials = google.auth._default.get_api_key_credentials(api_key_value) + + Transport = type(self).get_transport_class(transport) + self._transport = Transport( + credentials=credentials, + credentials_file=client_options.credentials_file, + host=api_endpoint, + scopes=client_options.scopes, + client_cert_source_for_mtls=client_cert_source_func, + quota_project_id=client_options.quota_project_id, + client_info=client_info, + always_use_jwt_access=True, + api_audience=client_options.api_audience, + ) + + def aggregated_list(self, + request: Optional[Union[compute.AggregatedListForwardingRulesRequest, dict]] = None, + *, + project: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.AggregatedListPager: + r"""Retrieves an aggregated list of forwarding rules. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_aggregated_list(): + # Create a client + client = compute_v1.ForwardingRulesClient() + + # Initialize request argument(s) + request = compute_v1.AggregatedListForwardingRulesRequest( + project="project_value", + ) + + # Make the request + page_result = client.aggregated_list(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.AggregatedListForwardingRulesRequest, dict]): + The request object. A request message for + ForwardingRules.AggregatedList. See the + method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.compute_v1.services.forwarding_rules.pagers.AggregatedListPager: + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.AggregatedListForwardingRulesRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.AggregatedListForwardingRulesRequest): + request = compute.AggregatedListForwardingRulesRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.aggregated_list] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.AggregatedListPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + def delete_unary(self, + request: Optional[Union[compute.DeleteForwardingRuleRequest, dict]] = None, + *, + project: Optional[str] = None, + region: Optional[str] = None, + forwarding_rule: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.Operation: + r"""Deletes the specified ForwardingRule resource. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_delete(): + # Create a client + client = compute_v1.ForwardingRulesClient() + + # Initialize request argument(s) + request = compute_v1.DeleteForwardingRuleRequest( + forwarding_rule="forwarding_rule_value", + project="project_value", + region="region_value", + ) + + # Make the request + response = client.delete(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.DeleteForwardingRuleRequest, dict]): + The request object. A request message for + ForwardingRules.Delete. See the method + description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + region (str): + Name of the region scoping this + request. + + This corresponds to the ``region`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + forwarding_rule (str): + Name of the ForwardingRule resource + to delete. + + This corresponds to the ``forwarding_rule`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, region, forwarding_rule]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.DeleteForwardingRuleRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.DeleteForwardingRuleRequest): + request = compute.DeleteForwardingRuleRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if region is not None: + request.region = region + if forwarding_rule is not None: + request.forwarding_rule = forwarding_rule + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("region", request.region), + ("forwarding_rule", request.forwarding_rule), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def delete(self, + request: Optional[Union[compute.DeleteForwardingRuleRequest, dict]] = None, + *, + project: Optional[str] = None, + region: Optional[str] = None, + forwarding_rule: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> extended_operation.ExtendedOperation: + r"""Deletes the specified ForwardingRule resource. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_delete(): + # Create a client + client = compute_v1.ForwardingRulesClient() + + # Initialize request argument(s) + request = compute_v1.DeleteForwardingRuleRequest( + forwarding_rule="forwarding_rule_value", + project="project_value", + region="region_value", + ) + + # Make the request + response = client.delete(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.DeleteForwardingRuleRequest, dict]): + The request object. A request message for + ForwardingRules.Delete. See the method + description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + region (str): + Name of the region scoping this + request. + + This corresponds to the ``region`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + forwarding_rule (str): + Name of the ForwardingRule resource + to delete. + + This corresponds to the ``forwarding_rule`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, region, forwarding_rule]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.DeleteForwardingRuleRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.DeleteForwardingRuleRequest): + request = compute.DeleteForwardingRuleRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if region is not None: + request.region = region + if forwarding_rule is not None: + request.forwarding_rule = forwarding_rule + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("region", request.region), + ("forwarding_rule", request.forwarding_rule), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + operation_service = self._transport._region_operations_client + operation_request = compute.GetRegionOperationRequest() + operation_request.project = request.project + operation_request.region = request.region + operation_request.operation = response.name + + get_operation = functools.partial(operation_service.get, operation_request) + # Cancel is not part of extended operations yet. + cancel_operation = lambda: None + + # Note: this class is an implementation detail to provide a uniform + # set of names for certain fields in the extended operation proto message. + # See google.api_core.extended_operation.ExtendedOperation for details + # on these properties and the expected interface. + class _CustomOperation(extended_operation.ExtendedOperation): + @property + def error_message(self): + return self._extended_operation.http_error_message + + @property + def error_code(self): + return self._extended_operation.http_error_status_code + + response = _CustomOperation.make(get_operation, cancel_operation, response) + + # Done; return the response. + return response + + def get(self, + request: Optional[Union[compute.GetForwardingRuleRequest, dict]] = None, + *, + project: Optional[str] = None, + region: Optional[str] = None, + forwarding_rule: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.ForwardingRule: + r"""Returns the specified ForwardingRule resource. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_get(): + # Create a client + client = compute_v1.ForwardingRulesClient() + + # Initialize request argument(s) + request = compute_v1.GetForwardingRuleRequest( + forwarding_rule="forwarding_rule_value", + project="project_value", + region="region_value", + ) + + # Make the request + response = client.get(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.GetForwardingRuleRequest, dict]): + The request object. A request message for + ForwardingRules.Get. See the method + description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + region (str): + Name of the region scoping this + request. + + This corresponds to the ``region`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + forwarding_rule (str): + Name of the ForwardingRule resource + to return. + + This corresponds to the ``forwarding_rule`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.compute_v1.types.ForwardingRule: + Represents a Forwarding Rule resource. Forwarding rule + resources in Google Cloud can be either regional or + global in scope: \* + [Global](https://cloud.google.com/compute/docs/reference/rest/v1/globalForwardingRules) + \* + [Regional](https://cloud.google.com/compute/docs/reference/rest/v1/forwardingRules) + A forwarding rule and its corresponding IP address + represent the frontend configuration of a Google Cloud + Platform load balancer. Forwarding rules can also + reference target instances and Cloud VPN Classic + gateways (targetVpnGateway). For more information, read + Forwarding rule concepts and Using protocol forwarding. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, region, forwarding_rule]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.GetForwardingRuleRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.GetForwardingRuleRequest): + request = compute.GetForwardingRuleRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if region is not None: + request.region = region + if forwarding_rule is not None: + request.forwarding_rule = forwarding_rule + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("region", request.region), + ("forwarding_rule", request.forwarding_rule), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def insert_unary(self, + request: Optional[Union[compute.InsertForwardingRuleRequest, dict]] = None, + *, + project: Optional[str] = None, + region: Optional[str] = None, + forwarding_rule_resource: Optional[compute.ForwardingRule] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.Operation: + r"""Creates a ForwardingRule resource in the specified + project and region using the data included in the + request. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_insert(): + # Create a client + client = compute_v1.ForwardingRulesClient() + + # Initialize request argument(s) + request = compute_v1.InsertForwardingRuleRequest( + project="project_value", + region="region_value", + ) + + # Make the request + response = client.insert(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.InsertForwardingRuleRequest, dict]): + The request object. A request message for + ForwardingRules.Insert. See the method + description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + region (str): + Name of the region scoping this + request. + + This corresponds to the ``region`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + forwarding_rule_resource (google.cloud.compute_v1.types.ForwardingRule): + The body resource for this request + This corresponds to the ``forwarding_rule_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, region, forwarding_rule_resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.InsertForwardingRuleRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.InsertForwardingRuleRequest): + request = compute.InsertForwardingRuleRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if region is not None: + request.region = region + if forwarding_rule_resource is not None: + request.forwarding_rule_resource = forwarding_rule_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.insert] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("region", request.region), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def insert(self, + request: Optional[Union[compute.InsertForwardingRuleRequest, dict]] = None, + *, + project: Optional[str] = None, + region: Optional[str] = None, + forwarding_rule_resource: Optional[compute.ForwardingRule] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> extended_operation.ExtendedOperation: + r"""Creates a ForwardingRule resource in the specified + project and region using the data included in the + request. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_insert(): + # Create a client + client = compute_v1.ForwardingRulesClient() + + # Initialize request argument(s) + request = compute_v1.InsertForwardingRuleRequest( + project="project_value", + region="region_value", + ) + + # Make the request + response = client.insert(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.InsertForwardingRuleRequest, dict]): + The request object. A request message for + ForwardingRules.Insert. See the method + description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + region (str): + Name of the region scoping this + request. + + This corresponds to the ``region`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + forwarding_rule_resource (google.cloud.compute_v1.types.ForwardingRule): + The body resource for this request + This corresponds to the ``forwarding_rule_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, region, forwarding_rule_resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.InsertForwardingRuleRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.InsertForwardingRuleRequest): + request = compute.InsertForwardingRuleRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if region is not None: + request.region = region + if forwarding_rule_resource is not None: + request.forwarding_rule_resource = forwarding_rule_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.insert] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("region", request.region), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + operation_service = self._transport._region_operations_client + operation_request = compute.GetRegionOperationRequest() + operation_request.project = request.project + operation_request.region = request.region + operation_request.operation = response.name + + get_operation = functools.partial(operation_service.get, operation_request) + # Cancel is not part of extended operations yet. + cancel_operation = lambda: None + + # Note: this class is an implementation detail to provide a uniform + # set of names for certain fields in the extended operation proto message. + # See google.api_core.extended_operation.ExtendedOperation for details + # on these properties and the expected interface. + class _CustomOperation(extended_operation.ExtendedOperation): + @property + def error_message(self): + return self._extended_operation.http_error_message + + @property + def error_code(self): + return self._extended_operation.http_error_status_code + + response = _CustomOperation.make(get_operation, cancel_operation, response) + + # Done; return the response. + return response + + def list(self, + request: Optional[Union[compute.ListForwardingRulesRequest, dict]] = None, + *, + project: Optional[str] = None, + region: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListPager: + r"""Retrieves a list of ForwardingRule resources + available to the specified project and region. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_list(): + # Create a client + client = compute_v1.ForwardingRulesClient() + + # Initialize request argument(s) + request = compute_v1.ListForwardingRulesRequest( + project="project_value", + region="region_value", + ) + + # Make the request + page_result = client.list(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.ListForwardingRulesRequest, dict]): + The request object. A request message for + ForwardingRules.List. See the method + description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + region (str): + Name of the region scoping this + request. + + This corresponds to the ``region`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.compute_v1.services.forwarding_rules.pagers.ListPager: + Contains a list of ForwardingRule + resources. + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, region]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.ListForwardingRulesRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.ListForwardingRulesRequest): + request = compute.ListForwardingRulesRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if region is not None: + request.region = region + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("region", request.region), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + def patch_unary(self, + request: Optional[Union[compute.PatchForwardingRuleRequest, dict]] = None, + *, + project: Optional[str] = None, + region: Optional[str] = None, + forwarding_rule: Optional[str] = None, + forwarding_rule_resource: Optional[compute.ForwardingRule] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.Operation: + r"""Updates the specified forwarding rule with the data included in + the request. This method supports PATCH semantics and uses the + JSON merge patch format and processing rules. Currently, you can + only patch the network_tier field. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_patch(): + # Create a client + client = compute_v1.ForwardingRulesClient() + + # Initialize request argument(s) + request = compute_v1.PatchForwardingRuleRequest( + forwarding_rule="forwarding_rule_value", + project="project_value", + region="region_value", + ) + + # Make the request + response = client.patch(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.PatchForwardingRuleRequest, dict]): + The request object. A request message for + ForwardingRules.Patch. See the method + description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + region (str): + Name of the region scoping this + request. + + This corresponds to the ``region`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + forwarding_rule (str): + Name of the ForwardingRule resource + to patch. + + This corresponds to the ``forwarding_rule`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + forwarding_rule_resource (google.cloud.compute_v1.types.ForwardingRule): + The body resource for this request + This corresponds to the ``forwarding_rule_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, region, forwarding_rule, forwarding_rule_resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.PatchForwardingRuleRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.PatchForwardingRuleRequest): + request = compute.PatchForwardingRuleRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if region is not None: + request.region = region + if forwarding_rule is not None: + request.forwarding_rule = forwarding_rule + if forwarding_rule_resource is not None: + request.forwarding_rule_resource = forwarding_rule_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.patch] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("region", request.region), + ("forwarding_rule", request.forwarding_rule), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def patch(self, + request: Optional[Union[compute.PatchForwardingRuleRequest, dict]] = None, + *, + project: Optional[str] = None, + region: Optional[str] = None, + forwarding_rule: Optional[str] = None, + forwarding_rule_resource: Optional[compute.ForwardingRule] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> extended_operation.ExtendedOperation: + r"""Updates the specified forwarding rule with the data included in + the request. This method supports PATCH semantics and uses the + JSON merge patch format and processing rules. Currently, you can + only patch the network_tier field. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_patch(): + # Create a client + client = compute_v1.ForwardingRulesClient() + + # Initialize request argument(s) + request = compute_v1.PatchForwardingRuleRequest( + forwarding_rule="forwarding_rule_value", + project="project_value", + region="region_value", + ) + + # Make the request + response = client.patch(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.PatchForwardingRuleRequest, dict]): + The request object. A request message for + ForwardingRules.Patch. See the method + description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + region (str): + Name of the region scoping this + request. + + This corresponds to the ``region`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + forwarding_rule (str): + Name of the ForwardingRule resource + to patch. + + This corresponds to the ``forwarding_rule`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + forwarding_rule_resource (google.cloud.compute_v1.types.ForwardingRule): + The body resource for this request + This corresponds to the ``forwarding_rule_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, region, forwarding_rule, forwarding_rule_resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.PatchForwardingRuleRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.PatchForwardingRuleRequest): + request = compute.PatchForwardingRuleRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if region is not None: + request.region = region + if forwarding_rule is not None: + request.forwarding_rule = forwarding_rule + if forwarding_rule_resource is not None: + request.forwarding_rule_resource = forwarding_rule_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.patch] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("region", request.region), + ("forwarding_rule", request.forwarding_rule), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + operation_service = self._transport._region_operations_client + operation_request = compute.GetRegionOperationRequest() + operation_request.project = request.project + operation_request.region = request.region + operation_request.operation = response.name + + get_operation = functools.partial(operation_service.get, operation_request) + # Cancel is not part of extended operations yet. + cancel_operation = lambda: None + + # Note: this class is an implementation detail to provide a uniform + # set of names for certain fields in the extended operation proto message. + # See google.api_core.extended_operation.ExtendedOperation for details + # on these properties and the expected interface. + class _CustomOperation(extended_operation.ExtendedOperation): + @property + def error_message(self): + return self._extended_operation.http_error_message + + @property + def error_code(self): + return self._extended_operation.http_error_status_code + + response = _CustomOperation.make(get_operation, cancel_operation, response) + + # Done; return the response. + return response + + def set_labels_unary(self, + request: Optional[Union[compute.SetLabelsForwardingRuleRequest, dict]] = None, + *, + project: Optional[str] = None, + region: Optional[str] = None, + resource: Optional[str] = None, + region_set_labels_request_resource: Optional[compute.RegionSetLabelsRequest] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.Operation: + r"""Sets the labels on the specified resource. To learn + more about labels, read the Labeling Resources + documentation. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_set_labels(): + # Create a client + client = compute_v1.ForwardingRulesClient() + + # Initialize request argument(s) + request = compute_v1.SetLabelsForwardingRuleRequest( + project="project_value", + region="region_value", + resource="resource_value", + ) + + # Make the request + response = client.set_labels(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.SetLabelsForwardingRuleRequest, dict]): + The request object. A request message for + ForwardingRules.SetLabels. See the + method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + region (str): + The region for this request. + This corresponds to the ``region`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + resource (str): + Name or id of the resource for this + request. + + This corresponds to the ``resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + region_set_labels_request_resource (google.cloud.compute_v1.types.RegionSetLabelsRequest): + The body resource for this request + This corresponds to the ``region_set_labels_request_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, region, resource, region_set_labels_request_resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.SetLabelsForwardingRuleRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.SetLabelsForwardingRuleRequest): + request = compute.SetLabelsForwardingRuleRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if region is not None: + request.region = region + if resource is not None: + request.resource = resource + if region_set_labels_request_resource is not None: + request.region_set_labels_request_resource = region_set_labels_request_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.set_labels] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("region", request.region), + ("resource", request.resource), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def set_labels(self, + request: Optional[Union[compute.SetLabelsForwardingRuleRequest, dict]] = None, + *, + project: Optional[str] = None, + region: Optional[str] = None, + resource: Optional[str] = None, + region_set_labels_request_resource: Optional[compute.RegionSetLabelsRequest] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> extended_operation.ExtendedOperation: + r"""Sets the labels on the specified resource. To learn + more about labels, read the Labeling Resources + documentation. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_set_labels(): + # Create a client + client = compute_v1.ForwardingRulesClient() + + # Initialize request argument(s) + request = compute_v1.SetLabelsForwardingRuleRequest( + project="project_value", + region="region_value", + resource="resource_value", + ) + + # Make the request + response = client.set_labels(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.SetLabelsForwardingRuleRequest, dict]): + The request object. A request message for + ForwardingRules.SetLabels. See the + method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + region (str): + The region for this request. + This corresponds to the ``region`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + resource (str): + Name or id of the resource for this + request. + + This corresponds to the ``resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + region_set_labels_request_resource (google.cloud.compute_v1.types.RegionSetLabelsRequest): + The body resource for this request + This corresponds to the ``region_set_labels_request_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, region, resource, region_set_labels_request_resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.SetLabelsForwardingRuleRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.SetLabelsForwardingRuleRequest): + request = compute.SetLabelsForwardingRuleRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if region is not None: + request.region = region + if resource is not None: + request.resource = resource + if region_set_labels_request_resource is not None: + request.region_set_labels_request_resource = region_set_labels_request_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.set_labels] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("region", request.region), + ("resource", request.resource), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + operation_service = self._transport._region_operations_client + operation_request = compute.GetRegionOperationRequest() + operation_request.project = request.project + operation_request.region = request.region + operation_request.operation = response.name + + get_operation = functools.partial(operation_service.get, operation_request) + # Cancel is not part of extended operations yet. + cancel_operation = lambda: None + + # Note: this class is an implementation detail to provide a uniform + # set of names for certain fields in the extended operation proto message. + # See google.api_core.extended_operation.ExtendedOperation for details + # on these properties and the expected interface. + class _CustomOperation(extended_operation.ExtendedOperation): + @property + def error_message(self): + return self._extended_operation.http_error_message + + @property + def error_code(self): + return self._extended_operation.http_error_status_code + + response = _CustomOperation.make(get_operation, cancel_operation, response) + + # Done; return the response. + return response + + def set_target_unary(self, + request: Optional[Union[compute.SetTargetForwardingRuleRequest, dict]] = None, + *, + project: Optional[str] = None, + region: Optional[str] = None, + forwarding_rule: Optional[str] = None, + target_reference_resource: Optional[compute.TargetReference] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.Operation: + r"""Changes target URL for forwarding rule. The new + target should be of the same type as the old target. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_set_target(): + # Create a client + client = compute_v1.ForwardingRulesClient() + + # Initialize request argument(s) + request = compute_v1.SetTargetForwardingRuleRequest( + forwarding_rule="forwarding_rule_value", + project="project_value", + region="region_value", + ) + + # Make the request + response = client.set_target(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.SetTargetForwardingRuleRequest, dict]): + The request object. A request message for + ForwardingRules.SetTarget. See the + method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + region (str): + Name of the region scoping this + request. + + This corresponds to the ``region`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + forwarding_rule (str): + Name of the ForwardingRule resource + in which target is to be set. + + This corresponds to the ``forwarding_rule`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + target_reference_resource (google.cloud.compute_v1.types.TargetReference): + The body resource for this request + This corresponds to the ``target_reference_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, region, forwarding_rule, target_reference_resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.SetTargetForwardingRuleRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.SetTargetForwardingRuleRequest): + request = compute.SetTargetForwardingRuleRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if region is not None: + request.region = region + if forwarding_rule is not None: + request.forwarding_rule = forwarding_rule + if target_reference_resource is not None: + request.target_reference_resource = target_reference_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.set_target] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("region", request.region), + ("forwarding_rule", request.forwarding_rule), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def set_target(self, + request: Optional[Union[compute.SetTargetForwardingRuleRequest, dict]] = None, + *, + project: Optional[str] = None, + region: Optional[str] = None, + forwarding_rule: Optional[str] = None, + target_reference_resource: Optional[compute.TargetReference] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> extended_operation.ExtendedOperation: + r"""Changes target URL for forwarding rule. The new + target should be of the same type as the old target. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_set_target(): + # Create a client + client = compute_v1.ForwardingRulesClient() + + # Initialize request argument(s) + request = compute_v1.SetTargetForwardingRuleRequest( + forwarding_rule="forwarding_rule_value", + project="project_value", + region="region_value", + ) + + # Make the request + response = client.set_target(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.SetTargetForwardingRuleRequest, dict]): + The request object. A request message for + ForwardingRules.SetTarget. See the + method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + region (str): + Name of the region scoping this + request. + + This corresponds to the ``region`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + forwarding_rule (str): + Name of the ForwardingRule resource + in which target is to be set. + + This corresponds to the ``forwarding_rule`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + target_reference_resource (google.cloud.compute_v1.types.TargetReference): + The body resource for this request + This corresponds to the ``target_reference_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, region, forwarding_rule, target_reference_resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.SetTargetForwardingRuleRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.SetTargetForwardingRuleRequest): + request = compute.SetTargetForwardingRuleRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if region is not None: + request.region = region + if forwarding_rule is not None: + request.forwarding_rule = forwarding_rule + if target_reference_resource is not None: + request.target_reference_resource = target_reference_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.set_target] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("region", request.region), + ("forwarding_rule", request.forwarding_rule), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + operation_service = self._transport._region_operations_client + operation_request = compute.GetRegionOperationRequest() + operation_request.project = request.project + operation_request.region = request.region + operation_request.operation = response.name + + get_operation = functools.partial(operation_service.get, operation_request) + # Cancel is not part of extended operations yet. + cancel_operation = lambda: None + + # Note: this class is an implementation detail to provide a uniform + # set of names for certain fields in the extended operation proto message. + # See google.api_core.extended_operation.ExtendedOperation for details + # on these properties and the expected interface. + class _CustomOperation(extended_operation.ExtendedOperation): + @property + def error_message(self): + return self._extended_operation.http_error_message + + @property + def error_code(self): + return self._extended_operation.http_error_status_code + + response = _CustomOperation.make(get_operation, cancel_operation, response) + + # Done; return the response. + return response + + def __enter__(self) -> "ForwardingRulesClient": + return self + + def __exit__(self, type, value, traceback): + """Releases underlying transport's resources. + + .. warning:: + ONLY use as a context manager if the transport is NOT shared + with other clients! Exiting the with block will CLOSE the transport + and may cause errors in other clients! + """ + self.transport.close() + + + + + + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) + + +__all__ = ( + "ForwardingRulesClient", +) diff --git a/owl-bot-staging/v1/google/cloud/compute_v1/services/forwarding_rules/pagers.py b/owl-bot-staging/v1/google/cloud/compute_v1/services/forwarding_rules/pagers.py new file mode 100644 index 000000000..fd36effb1 --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/compute_v1/services/forwarding_rules/pagers.py @@ -0,0 +1,139 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import Any, AsyncIterator, Awaitable, Callable, Sequence, Tuple, Optional, Iterator + +from google.cloud.compute_v1.types import compute + + +class AggregatedListPager: + """A pager for iterating through ``aggregated_list`` requests. + + This class thinly wraps an initial + :class:`google.cloud.compute_v1.types.ForwardingRuleAggregatedList` object, and + provides an ``__iter__`` method to iterate through its + ``items`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``AggregatedList`` requests and continue to iterate + through the ``items`` field on the + corresponding responses. + + All the usual :class:`google.cloud.compute_v1.types.ForwardingRuleAggregatedList` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., compute.ForwardingRuleAggregatedList], + request: compute.AggregatedListForwardingRulesRequest, + response: compute.ForwardingRuleAggregatedList, + *, + metadata: Sequence[Tuple[str, str]] = ()): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.compute_v1.types.AggregatedListForwardingRulesRequest): + The initial request object. + response (google.cloud.compute_v1.types.ForwardingRuleAggregatedList): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = compute.AggregatedListForwardingRulesRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[compute.ForwardingRuleAggregatedList]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[Tuple[str, compute.ForwardingRulesScopedList]]: + for page in self.pages: + yield from page.items.items() + + def get(self, key: str) -> Optional[compute.ForwardingRulesScopedList]: + return self._response.items.get(key) + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + + +class ListPager: + """A pager for iterating through ``list`` requests. + + This class thinly wraps an initial + :class:`google.cloud.compute_v1.types.ForwardingRuleList` object, and + provides an ``__iter__`` method to iterate through its + ``items`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``List`` requests and continue to iterate + through the ``items`` field on the + corresponding responses. + + All the usual :class:`google.cloud.compute_v1.types.ForwardingRuleList` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., compute.ForwardingRuleList], + request: compute.ListForwardingRulesRequest, + response: compute.ForwardingRuleList, + *, + metadata: Sequence[Tuple[str, str]] = ()): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.compute_v1.types.ListForwardingRulesRequest): + The initial request object. + response (google.cloud.compute_v1.types.ForwardingRuleList): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = compute.ListForwardingRulesRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[compute.ForwardingRuleList]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[compute.ForwardingRule]: + for page in self.pages: + yield from page.items + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) diff --git a/owl-bot-staging/v1/google/cloud/compute_v1/services/forwarding_rules/transports/__init__.py b/owl-bot-staging/v1/google/cloud/compute_v1/services/forwarding_rules/transports/__init__.py new file mode 100644 index 000000000..5bd886236 --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/compute_v1/services/forwarding_rules/transports/__init__.py @@ -0,0 +1,32 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +from typing import Dict, Type + +from .base import ForwardingRulesTransport +from .rest import ForwardingRulesRestTransport +from .rest import ForwardingRulesRestInterceptor + + +# Compile a registry of transports. +_transport_registry = OrderedDict() # type: Dict[str, Type[ForwardingRulesTransport]] +_transport_registry['rest'] = ForwardingRulesRestTransport + +__all__ = ( + 'ForwardingRulesTransport', + 'ForwardingRulesRestTransport', + 'ForwardingRulesRestInterceptor', +) diff --git a/owl-bot-staging/v1/google/cloud/compute_v1/services/forwarding_rules/transports/base.py b/owl-bot-staging/v1/google/cloud/compute_v1/services/forwarding_rules/transports/base.py new file mode 100644 index 000000000..abd7fea91 --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/compute_v1/services/forwarding_rules/transports/base.py @@ -0,0 +1,261 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import abc +from typing import Awaitable, Callable, Dict, Optional, Sequence, Union + +from google.cloud.compute_v1 import gapic_version as package_version + +import google.auth # type: ignore +import google.api_core +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.compute_v1.types import compute +from google.cloud.compute_v1.services import region_operations + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) + + +class ForwardingRulesTransport(abc.ABC): + """Abstract transport class for ForwardingRules.""" + + AUTH_SCOPES = ( + 'https://www.googleapis.com/auth/compute', + 'https://www.googleapis.com/auth/cloud-platform', + ) + + DEFAULT_HOST: str = 'compute.googleapis.com' + def __init__( + self, *, + host: str = DEFAULT_HOST, + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + **kwargs, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A list of scopes. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + """ + self._extended_operations_services: Dict[str, Any] = {} + + scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} + + # Save the scopes. + self._scopes = scopes + + # If no credentials are provided, then determine the appropriate + # defaults. + if credentials and credentials_file: + raise core_exceptions.DuplicateCredentialArgs("'credentials_file' and 'credentials' are mutually exclusive") + + if credentials_file is not None: + credentials, _ = google.auth.load_credentials_from_file( + credentials_file, + **scopes_kwargs, + quota_project_id=quota_project_id + ) + elif credentials is None: + credentials, _ = google.auth.default(**scopes_kwargs, quota_project_id=quota_project_id) + # Don't apply audience if the credentials file passed from user. + if hasattr(credentials, "with_gdch_audience"): + credentials = credentials.with_gdch_audience(api_audience if api_audience else host) + + # If the credentials are service account credentials, then always try to use self signed JWT. + if always_use_jwt_access and isinstance(credentials, service_account.Credentials) and hasattr(service_account.Credentials, "with_always_use_jwt_access"): + credentials = credentials.with_always_use_jwt_access(True) + + # Save the credentials. + self._credentials = credentials + + # Save the hostname. Default to port 443 (HTTPS) if none is specified. + if ':' not in host: + host += ':443' + self._host = host + + def _prep_wrapped_messages(self, client_info): + # Precompute the wrapped methods. + self._wrapped_methods = { + self.aggregated_list: gapic_v1.method.wrap_method( + self.aggregated_list, + default_timeout=None, + client_info=client_info, + ), + self.delete: gapic_v1.method.wrap_method( + self.delete, + default_timeout=None, + client_info=client_info, + ), + self.get: gapic_v1.method.wrap_method( + self.get, + default_timeout=None, + client_info=client_info, + ), + self.insert: gapic_v1.method.wrap_method( + self.insert, + default_timeout=None, + client_info=client_info, + ), + self.list: gapic_v1.method.wrap_method( + self.list, + default_timeout=None, + client_info=client_info, + ), + self.patch: gapic_v1.method.wrap_method( + self.patch, + default_timeout=None, + client_info=client_info, + ), + self.set_labels: gapic_v1.method.wrap_method( + self.set_labels, + default_timeout=None, + client_info=client_info, + ), + self.set_target: gapic_v1.method.wrap_method( + self.set_target, + default_timeout=None, + client_info=client_info, + ), + } + + def close(self): + """Closes resources associated with the transport. + + .. warning:: + Only call this method if the transport is NOT shared + with other clients - this may cause errors in other clients! + """ + raise NotImplementedError() + + @property + def aggregated_list(self) -> Callable[ + [compute.AggregatedListForwardingRulesRequest], + Union[ + compute.ForwardingRuleAggregatedList, + Awaitable[compute.ForwardingRuleAggregatedList] + ]]: + raise NotImplementedError() + + @property + def delete(self) -> Callable[ + [compute.DeleteForwardingRuleRequest], + Union[ + compute.Operation, + Awaitable[compute.Operation] + ]]: + raise NotImplementedError() + + @property + def get(self) -> Callable[ + [compute.GetForwardingRuleRequest], + Union[ + compute.ForwardingRule, + Awaitable[compute.ForwardingRule] + ]]: + raise NotImplementedError() + + @property + def insert(self) -> Callable[ + [compute.InsertForwardingRuleRequest], + Union[ + compute.Operation, + Awaitable[compute.Operation] + ]]: + raise NotImplementedError() + + @property + def list(self) -> Callable[ + [compute.ListForwardingRulesRequest], + Union[ + compute.ForwardingRuleList, + Awaitable[compute.ForwardingRuleList] + ]]: + raise NotImplementedError() + + @property + def patch(self) -> Callable[ + [compute.PatchForwardingRuleRequest], + Union[ + compute.Operation, + Awaitable[compute.Operation] + ]]: + raise NotImplementedError() + + @property + def set_labels(self) -> Callable[ + [compute.SetLabelsForwardingRuleRequest], + Union[ + compute.Operation, + Awaitable[compute.Operation] + ]]: + raise NotImplementedError() + + @property + def set_target(self) -> Callable[ + [compute.SetTargetForwardingRuleRequest], + Union[ + compute.Operation, + Awaitable[compute.Operation] + ]]: + raise NotImplementedError() + + @property + def kind(self) -> str: + raise NotImplementedError() + + @property + def _region_operations_client(self) -> region_operations.RegionOperationsClient: + ex_op_service = self._extended_operations_services.get("region_operations") + if not ex_op_service: + ex_op_service = region_operations.RegionOperationsClient( + credentials=self._credentials, + transport=self.kind, + ) + self._extended_operations_services["region_operations"] = ex_op_service + + return ex_op_service + + +__all__ = ( + 'ForwardingRulesTransport', +) diff --git a/owl-bot-staging/v1/google/cloud/compute_v1/services/forwarding_rules/transports/rest.py b/owl-bot-staging/v1/google/cloud/compute_v1/services/forwarding_rules/transports/rest.py new file mode 100644 index 000000000..eff5c27d8 --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/compute_v1/services/forwarding_rules/transports/rest.py @@ -0,0 +1,1190 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +from google.auth.transport.requests import AuthorizedSession # type: ignore +import json # type: ignore +import grpc # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.api_core import exceptions as core_exceptions +from google.api_core import retry as retries +from google.api_core import rest_helpers +from google.api_core import rest_streaming +from google.api_core import path_template +from google.api_core import gapic_v1 + +from google.protobuf import json_format +from requests import __version__ as requests_version +import dataclasses +import re +from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union +import warnings + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + + +from google.cloud.compute_v1.types import compute + +from .base import ForwardingRulesTransport, DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, + grpc_version=None, + rest_version=requests_version, +) + + +class ForwardingRulesRestInterceptor: + """Interceptor for ForwardingRules. + + Interceptors are used to manipulate requests, request metadata, and responses + in arbitrary ways. + Example use cases include: + * Logging + * Verifying requests according to service or custom semantics + * Stripping extraneous information from responses + + These use cases and more can be enabled by injecting an + instance of a custom subclass when constructing the ForwardingRulesRestTransport. + + .. code-block:: python + class MyCustomForwardingRulesInterceptor(ForwardingRulesRestInterceptor): + def pre_aggregated_list(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_aggregated_list(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_delete(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_delete(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_get(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_insert(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_insert(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_patch(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_patch(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_set_labels(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_set_labels(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_set_target(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_set_target(self, response): + logging.log(f"Received response: {response}") + return response + + transport = ForwardingRulesRestTransport(interceptor=MyCustomForwardingRulesInterceptor()) + client = ForwardingRulesClient(transport=transport) + + + """ + def pre_aggregated_list(self, request: compute.AggregatedListForwardingRulesRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.AggregatedListForwardingRulesRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for aggregated_list + + Override in a subclass to manipulate the request or metadata + before they are sent to the ForwardingRules server. + """ + return request, metadata + + def post_aggregated_list(self, response: compute.ForwardingRuleAggregatedList) -> compute.ForwardingRuleAggregatedList: + """Post-rpc interceptor for aggregated_list + + Override in a subclass to manipulate the response + after it is returned by the ForwardingRules server but before + it is returned to user code. + """ + return response + def pre_delete(self, request: compute.DeleteForwardingRuleRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.DeleteForwardingRuleRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for delete + + Override in a subclass to manipulate the request or metadata + before they are sent to the ForwardingRules server. + """ + return request, metadata + + def post_delete(self, response: compute.Operation) -> compute.Operation: + """Post-rpc interceptor for delete + + Override in a subclass to manipulate the response + after it is returned by the ForwardingRules server but before + it is returned to user code. + """ + return response + def pre_get(self, request: compute.GetForwardingRuleRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.GetForwardingRuleRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get + + Override in a subclass to manipulate the request or metadata + before they are sent to the ForwardingRules server. + """ + return request, metadata + + def post_get(self, response: compute.ForwardingRule) -> compute.ForwardingRule: + """Post-rpc interceptor for get + + Override in a subclass to manipulate the response + after it is returned by the ForwardingRules server but before + it is returned to user code. + """ + return response + def pre_insert(self, request: compute.InsertForwardingRuleRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.InsertForwardingRuleRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for insert + + Override in a subclass to manipulate the request or metadata + before they are sent to the ForwardingRules server. + """ + return request, metadata + + def post_insert(self, response: compute.Operation) -> compute.Operation: + """Post-rpc interceptor for insert + + Override in a subclass to manipulate the response + after it is returned by the ForwardingRules server but before + it is returned to user code. + """ + return response + def pre_list(self, request: compute.ListForwardingRulesRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.ListForwardingRulesRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list + + Override in a subclass to manipulate the request or metadata + before they are sent to the ForwardingRules server. + """ + return request, metadata + + def post_list(self, response: compute.ForwardingRuleList) -> compute.ForwardingRuleList: + """Post-rpc interceptor for list + + Override in a subclass to manipulate the response + after it is returned by the ForwardingRules server but before + it is returned to user code. + """ + return response + def pre_patch(self, request: compute.PatchForwardingRuleRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.PatchForwardingRuleRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for patch + + Override in a subclass to manipulate the request or metadata + before they are sent to the ForwardingRules server. + """ + return request, metadata + + def post_patch(self, response: compute.Operation) -> compute.Operation: + """Post-rpc interceptor for patch + + Override in a subclass to manipulate the response + after it is returned by the ForwardingRules server but before + it is returned to user code. + """ + return response + def pre_set_labels(self, request: compute.SetLabelsForwardingRuleRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.SetLabelsForwardingRuleRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for set_labels + + Override in a subclass to manipulate the request or metadata + before they are sent to the ForwardingRules server. + """ + return request, metadata + + def post_set_labels(self, response: compute.Operation) -> compute.Operation: + """Post-rpc interceptor for set_labels + + Override in a subclass to manipulate the response + after it is returned by the ForwardingRules server but before + it is returned to user code. + """ + return response + def pre_set_target(self, request: compute.SetTargetForwardingRuleRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.SetTargetForwardingRuleRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for set_target + + Override in a subclass to manipulate the request or metadata + before they are sent to the ForwardingRules server. + """ + return request, metadata + + def post_set_target(self, response: compute.Operation) -> compute.Operation: + """Post-rpc interceptor for set_target + + Override in a subclass to manipulate the response + after it is returned by the ForwardingRules server but before + it is returned to user code. + """ + return response + + +@dataclasses.dataclass +class ForwardingRulesRestStub: + _session: AuthorizedSession + _host: str + _interceptor: ForwardingRulesRestInterceptor + + +class ForwardingRulesRestTransport(ForwardingRulesTransport): + """REST backend transport for ForwardingRules. + + The ForwardingRules API. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends JSON representations of protocol buffers over HTTP/1.1 + + NOTE: This REST transport functionality is currently in a beta + state (preview). We welcome your feedback via an issue in this + library's source repository. Thank you! + """ + + def __init__(self, *, + host: str = 'compute.googleapis.com', + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + client_cert_source_for_mtls: Optional[Callable[[ + ], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + url_scheme: str = 'https', + interceptor: Optional[ForwardingRulesRestInterceptor] = None, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + NOTE: This REST transport functionality is currently in a beta + state (preview). We welcome your feedback via a GitHub issue in + this library's repository. Thank you! + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + client_cert_source_for_mtls (Callable[[], Tuple[bytes, bytes]]): Client + certificate to configure mutual TLS HTTP channel. It is ignored + if ``channel`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you are developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + url_scheme: the protocol scheme for the API endpoint. Normally + "https", but for testing or local servers, + "http" can be specified. + """ + # Run the base constructor + # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. + # TODO: When custom host (api_endpoint) is set, `scopes` must *also* be set on the + # credentials object + maybe_url_match = re.match("^(?Phttp(?:s)?://)?(?P.*)$", host) + if maybe_url_match is None: + raise ValueError(f"Unexpected hostname structure: {host}") # pragma: NO COVER + + url_match_items = maybe_url_match.groupdict() + + host = f"{url_scheme}://{host}" if not url_match_items["scheme"] else host + + super().__init__( + host=host, + credentials=credentials, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience + ) + self._session = AuthorizedSession( + self._credentials, default_host=self.DEFAULT_HOST) + if client_cert_source_for_mtls: + self._session.configure_mtls_channel(client_cert_source_for_mtls) + self._interceptor = interceptor or ForwardingRulesRestInterceptor() + self._prep_wrapped_messages(client_info) + + class _AggregatedList(ForwardingRulesRestStub): + def __hash__(self): + return hash("AggregatedList") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.AggregatedListForwardingRulesRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.ForwardingRuleAggregatedList: + r"""Call the aggregated list method over HTTP. + + Args: + request (~.compute.AggregatedListForwardingRulesRequest): + The request object. A request message for + ForwardingRules.AggregatedList. See the + method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.ForwardingRuleAggregatedList: + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/compute/v1/projects/{project}/aggregated/forwardingRules', + }, + ] + request, metadata = self._interceptor.pre_aggregated_list(request, metadata) + pb_request = compute.AggregatedListForwardingRulesRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.ForwardingRuleAggregatedList() + pb_resp = compute.ForwardingRuleAggregatedList.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_aggregated_list(resp) + return resp + + class _Delete(ForwardingRulesRestStub): + def __hash__(self): + return hash("Delete") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.DeleteForwardingRuleRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.Operation: + r"""Call the delete method over HTTP. + + Args: + request (~.compute.DeleteForwardingRuleRequest): + The request object. A request message for + ForwardingRules.Delete. See the method + description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.Operation: + Represents an Operation resource. Google Compute Engine + has three Operation resources: \* + `Global `__ + \* + `Regional `__ + \* + `Zonal `__ + You can use an operation resource to manage asynchronous + API requests. For more information, read Handling API + responses. Operations can be global, regional or zonal. + - For global operations, use the ``globalOperations`` + resource. - For regional operations, use the + ``regionOperations`` resource. - For zonal operations, + use the ``zonalOperations`` resource. For more + information, read Global, Regional, and Zonal Resources. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'delete', + 'uri': '/compute/v1/projects/{project}/regions/{region}/forwardingRules/{forwarding_rule}', + }, + ] + request, metadata = self._interceptor.pre_delete(request, metadata) + pb_request = compute.DeleteForwardingRuleRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.Operation() + pb_resp = compute.Operation.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_delete(resp) + return resp + + class _Get(ForwardingRulesRestStub): + def __hash__(self): + return hash("Get") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.GetForwardingRuleRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.ForwardingRule: + r"""Call the get method over HTTP. + + Args: + request (~.compute.GetForwardingRuleRequest): + The request object. A request message for + ForwardingRules.Get. See the method + description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.ForwardingRule: + Represents a Forwarding Rule resource. Forwarding rule + resources in Google Cloud can be either regional or + global in scope: \* + `Global `__ + \* + `Regional `__ + A forwarding rule and its corresponding IP address + represent the frontend configuration of a Google Cloud + Platform load balancer. Forwarding rules can also + reference target instances and Cloud VPN Classic + gateways (targetVpnGateway). For more information, read + Forwarding rule concepts and Using protocol forwarding. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/compute/v1/projects/{project}/regions/{region}/forwardingRules/{forwarding_rule}', + }, + ] + request, metadata = self._interceptor.pre_get(request, metadata) + pb_request = compute.GetForwardingRuleRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.ForwardingRule() + pb_resp = compute.ForwardingRule.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get(resp) + return resp + + class _Insert(ForwardingRulesRestStub): + def __hash__(self): + return hash("Insert") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.InsertForwardingRuleRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.Operation: + r"""Call the insert method over HTTP. + + Args: + request (~.compute.InsertForwardingRuleRequest): + The request object. A request message for + ForwardingRules.Insert. See the method + description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.Operation: + Represents an Operation resource. Google Compute Engine + has three Operation resources: \* + `Global `__ + \* + `Regional `__ + \* + `Zonal `__ + You can use an operation resource to manage asynchronous + API requests. For more information, read Handling API + responses. Operations can be global, regional or zonal. + - For global operations, use the ``globalOperations`` + resource. - For regional operations, use the + ``regionOperations`` resource. - For zonal operations, + use the ``zonalOperations`` resource. For more + information, read Global, Regional, and Zonal Resources. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/compute/v1/projects/{project}/regions/{region}/forwardingRules', + 'body': 'forwarding_rule_resource', + }, + ] + request, metadata = self._interceptor.pre_insert(request, metadata) + pb_request = compute.InsertForwardingRuleRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + including_default_value_fields=False, + use_integers_for_enums=False + ) + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.Operation() + pb_resp = compute.Operation.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_insert(resp) + return resp + + class _List(ForwardingRulesRestStub): + def __hash__(self): + return hash("List") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.ListForwardingRulesRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.ForwardingRuleList: + r"""Call the list method over HTTP. + + Args: + request (~.compute.ListForwardingRulesRequest): + The request object. A request message for + ForwardingRules.List. See the method + description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.ForwardingRuleList: + Contains a list of ForwardingRule + resources. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/compute/v1/projects/{project}/regions/{region}/forwardingRules', + }, + ] + request, metadata = self._interceptor.pre_list(request, metadata) + pb_request = compute.ListForwardingRulesRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.ForwardingRuleList() + pb_resp = compute.ForwardingRuleList.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_list(resp) + return resp + + class _Patch(ForwardingRulesRestStub): + def __hash__(self): + return hash("Patch") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.PatchForwardingRuleRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.Operation: + r"""Call the patch method over HTTP. + + Args: + request (~.compute.PatchForwardingRuleRequest): + The request object. A request message for + ForwardingRules.Patch. See the method + description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.Operation: + Represents an Operation resource. Google Compute Engine + has three Operation resources: \* + `Global `__ + \* + `Regional `__ + \* + `Zonal `__ + You can use an operation resource to manage asynchronous + API requests. For more information, read Handling API + responses. Operations can be global, regional or zonal. + - For global operations, use the ``globalOperations`` + resource. - For regional operations, use the + ``regionOperations`` resource. - For zonal operations, + use the ``zonalOperations`` resource. For more + information, read Global, Regional, and Zonal Resources. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'patch', + 'uri': '/compute/v1/projects/{project}/regions/{region}/forwardingRules/{forwarding_rule}', + 'body': 'forwarding_rule_resource', + }, + ] + request, metadata = self._interceptor.pre_patch(request, metadata) + pb_request = compute.PatchForwardingRuleRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + including_default_value_fields=False, + use_integers_for_enums=False + ) + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.Operation() + pb_resp = compute.Operation.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_patch(resp) + return resp + + class _SetLabels(ForwardingRulesRestStub): + def __hash__(self): + return hash("SetLabels") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.SetLabelsForwardingRuleRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.Operation: + r"""Call the set labels method over HTTP. + + Args: + request (~.compute.SetLabelsForwardingRuleRequest): + The request object. A request message for + ForwardingRules.SetLabels. See the + method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.Operation: + Represents an Operation resource. Google Compute Engine + has three Operation resources: \* + `Global `__ + \* + `Regional `__ + \* + `Zonal `__ + You can use an operation resource to manage asynchronous + API requests. For more information, read Handling API + responses. Operations can be global, regional or zonal. + - For global operations, use the ``globalOperations`` + resource. - For regional operations, use the + ``regionOperations`` resource. - For zonal operations, + use the ``zonalOperations`` resource. For more + information, read Global, Regional, and Zonal Resources. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/compute/v1/projects/{project}/regions/{region}/forwardingRules/{resource}/setLabels', + 'body': 'region_set_labels_request_resource', + }, + ] + request, metadata = self._interceptor.pre_set_labels(request, metadata) + pb_request = compute.SetLabelsForwardingRuleRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + including_default_value_fields=False, + use_integers_for_enums=False + ) + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.Operation() + pb_resp = compute.Operation.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_set_labels(resp) + return resp + + class _SetTarget(ForwardingRulesRestStub): + def __hash__(self): + return hash("SetTarget") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.SetTargetForwardingRuleRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.Operation: + r"""Call the set target method over HTTP. + + Args: + request (~.compute.SetTargetForwardingRuleRequest): + The request object. A request message for + ForwardingRules.SetTarget. See the + method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.Operation: + Represents an Operation resource. Google Compute Engine + has three Operation resources: \* + `Global `__ + \* + `Regional `__ + \* + `Zonal `__ + You can use an operation resource to manage asynchronous + API requests. For more information, read Handling API + responses. Operations can be global, regional or zonal. + - For global operations, use the ``globalOperations`` + resource. - For regional operations, use the + ``regionOperations`` resource. - For zonal operations, + use the ``zonalOperations`` resource. For more + information, read Global, Regional, and Zonal Resources. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/compute/v1/projects/{project}/regions/{region}/forwardingRules/{forwarding_rule}/setTarget', + 'body': 'target_reference_resource', + }, + ] + request, metadata = self._interceptor.pre_set_target(request, metadata) + pb_request = compute.SetTargetForwardingRuleRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + including_default_value_fields=False, + use_integers_for_enums=False + ) + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.Operation() + pb_resp = compute.Operation.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_set_target(resp) + return resp + + @property + def aggregated_list(self) -> Callable[ + [compute.AggregatedListForwardingRulesRequest], + compute.ForwardingRuleAggregatedList]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._AggregatedList(self._session, self._host, self._interceptor) # type: ignore + + @property + def delete(self) -> Callable[ + [compute.DeleteForwardingRuleRequest], + compute.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._Delete(self._session, self._host, self._interceptor) # type: ignore + + @property + def get(self) -> Callable[ + [compute.GetForwardingRuleRequest], + compute.ForwardingRule]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._Get(self._session, self._host, self._interceptor) # type: ignore + + @property + def insert(self) -> Callable[ + [compute.InsertForwardingRuleRequest], + compute.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._Insert(self._session, self._host, self._interceptor) # type: ignore + + @property + def list(self) -> Callable[ + [compute.ListForwardingRulesRequest], + compute.ForwardingRuleList]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._List(self._session, self._host, self._interceptor) # type: ignore + + @property + def patch(self) -> Callable[ + [compute.PatchForwardingRuleRequest], + compute.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._Patch(self._session, self._host, self._interceptor) # type: ignore + + @property + def set_labels(self) -> Callable[ + [compute.SetLabelsForwardingRuleRequest], + compute.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._SetLabels(self._session, self._host, self._interceptor) # type: ignore + + @property + def set_target(self) -> Callable[ + [compute.SetTargetForwardingRuleRequest], + compute.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._SetTarget(self._session, self._host, self._interceptor) # type: ignore + + @property + def kind(self) -> str: + return "rest" + + def close(self): + self._session.close() + + +__all__=( + 'ForwardingRulesRestTransport', +) diff --git a/owl-bot-staging/v1/google/cloud/compute_v1/services/global_addresses/__init__.py b/owl-bot-staging/v1/google/cloud/compute_v1/services/global_addresses/__init__.py new file mode 100644 index 000000000..d2180a7b6 --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/compute_v1/services/global_addresses/__init__.py @@ -0,0 +1,20 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from .client import GlobalAddressesClient + +__all__ = ( + 'GlobalAddressesClient', +) diff --git a/owl-bot-staging/v1/google/cloud/compute_v1/services/global_addresses/client.py b/owl-bot-staging/v1/google/cloud/compute_v1/services/global_addresses/client.py new file mode 100644 index 000000000..99998ae77 --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/compute_v1/services/global_addresses/client.py @@ -0,0 +1,1645 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import functools +import os +import re +from typing import Dict, Mapping, MutableMapping, MutableSequence, Optional, Sequence, Tuple, Type, Union, cast + +from google.cloud.compute_v1 import gapic_version as package_version + +from google.api_core import client_options as client_options_lib +from google.api_core import exceptions as core_exceptions +from google.api_core import extended_operation +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport import mtls # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth.exceptions import MutualTLSChannelError # type: ignore +from google.oauth2 import service_account # type: ignore + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + +from google.api_core import extended_operation # type: ignore +from google.cloud.compute_v1.services.global_addresses import pagers +from google.cloud.compute_v1.types import compute +from .transports.base import GlobalAddressesTransport, DEFAULT_CLIENT_INFO +from .transports.rest import GlobalAddressesRestTransport + + +class GlobalAddressesClientMeta(type): + """Metaclass for the GlobalAddresses client. + + This provides class-level methods for building and retrieving + support objects (e.g. transport) without polluting the client instance + objects. + """ + _transport_registry = OrderedDict() # type: Dict[str, Type[GlobalAddressesTransport]] + _transport_registry["rest"] = GlobalAddressesRestTransport + + def get_transport_class(cls, + label: Optional[str] = None, + ) -> Type[GlobalAddressesTransport]: + """Returns an appropriate transport class. + + Args: + label: The name of the desired transport. If none is + provided, then the first transport in the registry is used. + + Returns: + The transport class to use. + """ + # If a specific transport is requested, return that one. + if label: + return cls._transport_registry[label] + + # No transport is requested; return the default (that is, the first one + # in the dictionary). + return next(iter(cls._transport_registry.values())) + + +class GlobalAddressesClient(metaclass=GlobalAddressesClientMeta): + """The GlobalAddresses API.""" + + @staticmethod + def _get_default_mtls_endpoint(api_endpoint): + """Converts api endpoint to mTLS endpoint. + + Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to + "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. + Args: + api_endpoint (Optional[str]): the api endpoint to convert. + Returns: + str: converted mTLS api endpoint. + """ + if not api_endpoint: + return api_endpoint + + mtls_endpoint_re = re.compile( + r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?" + ) + + m = mtls_endpoint_re.match(api_endpoint) + name, mtls, sandbox, googledomain = m.groups() + if mtls or not googledomain: + return api_endpoint + + if sandbox: + return api_endpoint.replace( + "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" + ) + + return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") + + DEFAULT_ENDPOINT = "compute.googleapis.com" + DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore + DEFAULT_ENDPOINT + ) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + GlobalAddressesClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_info(info) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + GlobalAddressesClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_file( + filename) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + from_service_account_json = from_service_account_file + + @property + def transport(self) -> GlobalAddressesTransport: + """Returns the transport used by the client instance. + + Returns: + GlobalAddressesTransport: The transport used by the client + instance. + """ + return self._transport + + @staticmethod + def common_billing_account_path(billing_account: str, ) -> str: + """Returns a fully-qualified billing_account string.""" + return "billingAccounts/{billing_account}".format(billing_account=billing_account, ) + + @staticmethod + def parse_common_billing_account_path(path: str) -> Dict[str,str]: + """Parse a billing_account path into its component segments.""" + m = re.match(r"^billingAccounts/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_folder_path(folder: str, ) -> str: + """Returns a fully-qualified folder string.""" + return "folders/{folder}".format(folder=folder, ) + + @staticmethod + def parse_common_folder_path(path: str) -> Dict[str,str]: + """Parse a folder path into its component segments.""" + m = re.match(r"^folders/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_organization_path(organization: str, ) -> str: + """Returns a fully-qualified organization string.""" + return "organizations/{organization}".format(organization=organization, ) + + @staticmethod + def parse_common_organization_path(path: str) -> Dict[str,str]: + """Parse a organization path into its component segments.""" + m = re.match(r"^organizations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_project_path(project: str, ) -> str: + """Returns a fully-qualified project string.""" + return "projects/{project}".format(project=project, ) + + @staticmethod + def parse_common_project_path(path: str) -> Dict[str,str]: + """Parse a project path into its component segments.""" + m = re.match(r"^projects/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_location_path(project: str, location: str, ) -> str: + """Returns a fully-qualified location string.""" + return "projects/{project}/locations/{location}".format(project=project, location=location, ) + + @staticmethod + def parse_common_location_path(path: str) -> Dict[str,str]: + """Parse a location path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @classmethod + def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[client_options_lib.ClientOptions] = None): + """Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + if client_options is None: + client_options = client_options_lib.ClientOptions() + use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") + use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") + if use_client_cert not in ("true", "false"): + raise ValueError("Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`") + if use_mtls_endpoint not in ("auto", "never", "always"): + raise MutualTLSChannelError("Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`") + + # Figure out the client cert source to use. + client_cert_source = None + if use_client_cert == "true": + if client_options.client_cert_source: + client_cert_source = client_options.client_cert_source + elif mtls.has_default_client_cert_source(): + client_cert_source = mtls.default_client_cert_source() + + # Figure out which api endpoint to use. + if client_options.api_endpoint is not None: + api_endpoint = client_options.api_endpoint + elif use_mtls_endpoint == "always" or (use_mtls_endpoint == "auto" and client_cert_source): + api_endpoint = cls.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = cls.DEFAULT_ENDPOINT + + return api_endpoint, client_cert_source + + def __init__(self, *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Optional[Union[str, GlobalAddressesTransport]] = None, + client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the global addresses client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Union[str, GlobalAddressesTransport]): The + transport to use. If set to None, a transport is chosen + automatically. + NOTE: "rest" transport functionality is currently in a + beta state (preview). We welcome your feedback via an + issue in this library's source repository. + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): Custom options for the + client. It won't take effect if a ``transport`` instance is provided. + (1) The ``api_endpoint`` property can be used to override the + default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT + environment variable can also be used to override the endpoint: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto switch to the + default mTLS endpoint if client certificate is present, this is + the default value). However, the ``api_endpoint`` property takes + precedence if provided. + (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide client certificate for mutual TLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + """ + if isinstance(client_options, dict): + client_options = client_options_lib.from_dict(client_options) + if client_options is None: + client_options = client_options_lib.ClientOptions() + client_options = cast(client_options_lib.ClientOptions, client_options) + + api_endpoint, client_cert_source_func = self.get_mtls_endpoint_and_cert_source(client_options) + + api_key_value = getattr(client_options, "api_key", None) + if api_key_value and credentials: + raise ValueError("client_options.api_key and credentials are mutually exclusive") + + # Save or instantiate the transport. + # Ordinarily, we provide the transport, but allowing a custom transport + # instance provides an extensibility point for unusual situations. + if isinstance(transport, GlobalAddressesTransport): + # transport is a GlobalAddressesTransport instance. + if credentials or client_options.credentials_file or api_key_value: + raise ValueError("When providing a transport instance, " + "provide its credentials directly.") + if client_options.scopes: + raise ValueError( + "When providing a transport instance, provide its scopes " + "directly." + ) + self._transport = transport + else: + import google.auth._default # type: ignore + + if api_key_value and hasattr(google.auth._default, "get_api_key_credentials"): + credentials = google.auth._default.get_api_key_credentials(api_key_value) + + Transport = type(self).get_transport_class(transport) + self._transport = Transport( + credentials=credentials, + credentials_file=client_options.credentials_file, + host=api_endpoint, + scopes=client_options.scopes, + client_cert_source_for_mtls=client_cert_source_func, + quota_project_id=client_options.quota_project_id, + client_info=client_info, + always_use_jwt_access=True, + api_audience=client_options.api_audience, + ) + + def delete_unary(self, + request: Optional[Union[compute.DeleteGlobalAddressRequest, dict]] = None, + *, + project: Optional[str] = None, + address: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.Operation: + r"""Deletes the specified address resource. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_delete(): + # Create a client + client = compute_v1.GlobalAddressesClient() + + # Initialize request argument(s) + request = compute_v1.DeleteGlobalAddressRequest( + address="address_value", + project="project_value", + ) + + # Make the request + response = client.delete(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.DeleteGlobalAddressRequest, dict]): + The request object. A request message for + GlobalAddresses.Delete. See the method + description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + address (str): + Name of the address resource to + delete. + + This corresponds to the ``address`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, address]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.DeleteGlobalAddressRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.DeleteGlobalAddressRequest): + request = compute.DeleteGlobalAddressRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if address is not None: + request.address = address + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("address", request.address), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def delete(self, + request: Optional[Union[compute.DeleteGlobalAddressRequest, dict]] = None, + *, + project: Optional[str] = None, + address: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> extended_operation.ExtendedOperation: + r"""Deletes the specified address resource. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_delete(): + # Create a client + client = compute_v1.GlobalAddressesClient() + + # Initialize request argument(s) + request = compute_v1.DeleteGlobalAddressRequest( + address="address_value", + project="project_value", + ) + + # Make the request + response = client.delete(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.DeleteGlobalAddressRequest, dict]): + The request object. A request message for + GlobalAddresses.Delete. See the method + description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + address (str): + Name of the address resource to + delete. + + This corresponds to the ``address`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, address]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.DeleteGlobalAddressRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.DeleteGlobalAddressRequest): + request = compute.DeleteGlobalAddressRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if address is not None: + request.address = address + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("address", request.address), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + operation_service = self._transport._global_operations_client + operation_request = compute.GetGlobalOperationRequest() + operation_request.project = request.project + operation_request.operation = response.name + + get_operation = functools.partial(operation_service.get, operation_request) + # Cancel is not part of extended operations yet. + cancel_operation = lambda: None + + # Note: this class is an implementation detail to provide a uniform + # set of names for certain fields in the extended operation proto message. + # See google.api_core.extended_operation.ExtendedOperation for details + # on these properties and the expected interface. + class _CustomOperation(extended_operation.ExtendedOperation): + @property + def error_message(self): + return self._extended_operation.http_error_message + + @property + def error_code(self): + return self._extended_operation.http_error_status_code + + response = _CustomOperation.make(get_operation, cancel_operation, response) + + # Done; return the response. + return response + + def get(self, + request: Optional[Union[compute.GetGlobalAddressRequest, dict]] = None, + *, + project: Optional[str] = None, + address: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.Address: + r"""Returns the specified address resource. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_get(): + # Create a client + client = compute_v1.GlobalAddressesClient() + + # Initialize request argument(s) + request = compute_v1.GetGlobalAddressRequest( + address="address_value", + project="project_value", + ) + + # Make the request + response = client.get(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.GetGlobalAddressRequest, dict]): + The request object. A request message for + GlobalAddresses.Get. See the method + description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + address (str): + Name of the address resource to + return. + + This corresponds to the ``address`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.compute_v1.types.Address: + Represents an IP Address resource. Google Compute Engine + has two IP Address resources: \* [Global (external and + internal)](\ https://cloud.google.com/compute/docs/reference/rest/v1/globalAddresses) + \* [Regional (external and + internal)](\ https://cloud.google.com/compute/docs/reference/rest/v1/addresses) + For more information, see Reserving a static external IP + address. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, address]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.GetGlobalAddressRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.GetGlobalAddressRequest): + request = compute.GetGlobalAddressRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if address is not None: + request.address = address + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("address", request.address), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def insert_unary(self, + request: Optional[Union[compute.InsertGlobalAddressRequest, dict]] = None, + *, + project: Optional[str] = None, + address_resource: Optional[compute.Address] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.Operation: + r"""Creates an address resource in the specified project + by using the data included in the request. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_insert(): + # Create a client + client = compute_v1.GlobalAddressesClient() + + # Initialize request argument(s) + request = compute_v1.InsertGlobalAddressRequest( + project="project_value", + ) + + # Make the request + response = client.insert(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.InsertGlobalAddressRequest, dict]): + The request object. A request message for + GlobalAddresses.Insert. See the method + description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + address_resource (google.cloud.compute_v1.types.Address): + The body resource for this request + This corresponds to the ``address_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, address_resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.InsertGlobalAddressRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.InsertGlobalAddressRequest): + request = compute.InsertGlobalAddressRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if address_resource is not None: + request.address_resource = address_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.insert] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def insert(self, + request: Optional[Union[compute.InsertGlobalAddressRequest, dict]] = None, + *, + project: Optional[str] = None, + address_resource: Optional[compute.Address] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> extended_operation.ExtendedOperation: + r"""Creates an address resource in the specified project + by using the data included in the request. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_insert(): + # Create a client + client = compute_v1.GlobalAddressesClient() + + # Initialize request argument(s) + request = compute_v1.InsertGlobalAddressRequest( + project="project_value", + ) + + # Make the request + response = client.insert(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.InsertGlobalAddressRequest, dict]): + The request object. A request message for + GlobalAddresses.Insert. See the method + description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + address_resource (google.cloud.compute_v1.types.Address): + The body resource for this request + This corresponds to the ``address_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, address_resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.InsertGlobalAddressRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.InsertGlobalAddressRequest): + request = compute.InsertGlobalAddressRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if address_resource is not None: + request.address_resource = address_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.insert] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + operation_service = self._transport._global_operations_client + operation_request = compute.GetGlobalOperationRequest() + operation_request.project = request.project + operation_request.operation = response.name + + get_operation = functools.partial(operation_service.get, operation_request) + # Cancel is not part of extended operations yet. + cancel_operation = lambda: None + + # Note: this class is an implementation detail to provide a uniform + # set of names for certain fields in the extended operation proto message. + # See google.api_core.extended_operation.ExtendedOperation for details + # on these properties and the expected interface. + class _CustomOperation(extended_operation.ExtendedOperation): + @property + def error_message(self): + return self._extended_operation.http_error_message + + @property + def error_code(self): + return self._extended_operation.http_error_status_code + + response = _CustomOperation.make(get_operation, cancel_operation, response) + + # Done; return the response. + return response + + def list(self, + request: Optional[Union[compute.ListGlobalAddressesRequest, dict]] = None, + *, + project: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListPager: + r"""Retrieves a list of global addresses. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_list(): + # Create a client + client = compute_v1.GlobalAddressesClient() + + # Initialize request argument(s) + request = compute_v1.ListGlobalAddressesRequest( + project="project_value", + ) + + # Make the request + page_result = client.list(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.ListGlobalAddressesRequest, dict]): + The request object. A request message for + GlobalAddresses.List. See the method + description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.compute_v1.services.global_addresses.pagers.ListPager: + Contains a list of addresses. + + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.ListGlobalAddressesRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.ListGlobalAddressesRequest): + request = compute.ListGlobalAddressesRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + def move_unary(self, + request: Optional[Union[compute.MoveGlobalAddressRequest, dict]] = None, + *, + project: Optional[str] = None, + address: Optional[str] = None, + global_addresses_move_request_resource: Optional[compute.GlobalAddressesMoveRequest] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.Operation: + r"""Moves the specified address resource from one project + to another project. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_move(): + # Create a client + client = compute_v1.GlobalAddressesClient() + + # Initialize request argument(s) + request = compute_v1.MoveGlobalAddressRequest( + address="address_value", + project="project_value", + ) + + # Make the request + response = client.move(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.MoveGlobalAddressRequest, dict]): + The request object. A request message for + GlobalAddresses.Move. See the method + description for details. + project (str): + Source project ID which the Address + is moved from. + + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + address (str): + Name of the address resource to move. + This corresponds to the ``address`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + global_addresses_move_request_resource (google.cloud.compute_v1.types.GlobalAddressesMoveRequest): + The body resource for this request + This corresponds to the ``global_addresses_move_request_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, address, global_addresses_move_request_resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.MoveGlobalAddressRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.MoveGlobalAddressRequest): + request = compute.MoveGlobalAddressRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if address is not None: + request.address = address + if global_addresses_move_request_resource is not None: + request.global_addresses_move_request_resource = global_addresses_move_request_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.move] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("address", request.address), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def move(self, + request: Optional[Union[compute.MoveGlobalAddressRequest, dict]] = None, + *, + project: Optional[str] = None, + address: Optional[str] = None, + global_addresses_move_request_resource: Optional[compute.GlobalAddressesMoveRequest] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> extended_operation.ExtendedOperation: + r"""Moves the specified address resource from one project + to another project. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_move(): + # Create a client + client = compute_v1.GlobalAddressesClient() + + # Initialize request argument(s) + request = compute_v1.MoveGlobalAddressRequest( + address="address_value", + project="project_value", + ) + + # Make the request + response = client.move(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.MoveGlobalAddressRequest, dict]): + The request object. A request message for + GlobalAddresses.Move. See the method + description for details. + project (str): + Source project ID which the Address + is moved from. + + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + address (str): + Name of the address resource to move. + This corresponds to the ``address`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + global_addresses_move_request_resource (google.cloud.compute_v1.types.GlobalAddressesMoveRequest): + The body resource for this request + This corresponds to the ``global_addresses_move_request_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, address, global_addresses_move_request_resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.MoveGlobalAddressRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.MoveGlobalAddressRequest): + request = compute.MoveGlobalAddressRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if address is not None: + request.address = address + if global_addresses_move_request_resource is not None: + request.global_addresses_move_request_resource = global_addresses_move_request_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.move] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("address", request.address), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + operation_service = self._transport._global_operations_client + operation_request = compute.GetGlobalOperationRequest() + operation_request.project = request.project + operation_request.operation = response.name + + get_operation = functools.partial(operation_service.get, operation_request) + # Cancel is not part of extended operations yet. + cancel_operation = lambda: None + + # Note: this class is an implementation detail to provide a uniform + # set of names for certain fields in the extended operation proto message. + # See google.api_core.extended_operation.ExtendedOperation for details + # on these properties and the expected interface. + class _CustomOperation(extended_operation.ExtendedOperation): + @property + def error_message(self): + return self._extended_operation.http_error_message + + @property + def error_code(self): + return self._extended_operation.http_error_status_code + + response = _CustomOperation.make(get_operation, cancel_operation, response) + + # Done; return the response. + return response + + def set_labels_unary(self, + request: Optional[Union[compute.SetLabelsGlobalAddressRequest, dict]] = None, + *, + project: Optional[str] = None, + resource: Optional[str] = None, + global_set_labels_request_resource: Optional[compute.GlobalSetLabelsRequest] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.Operation: + r"""Sets the labels on a GlobalAddress. To learn more + about labels, read the Labeling Resources documentation. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_set_labels(): + # Create a client + client = compute_v1.GlobalAddressesClient() + + # Initialize request argument(s) + request = compute_v1.SetLabelsGlobalAddressRequest( + project="project_value", + resource="resource_value", + ) + + # Make the request + response = client.set_labels(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.SetLabelsGlobalAddressRequest, dict]): + The request object. A request message for + GlobalAddresses.SetLabels. See the + method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + resource (str): + Name or id of the resource for this + request. + + This corresponds to the ``resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + global_set_labels_request_resource (google.cloud.compute_v1.types.GlobalSetLabelsRequest): + The body resource for this request + This corresponds to the ``global_set_labels_request_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, resource, global_set_labels_request_resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.SetLabelsGlobalAddressRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.SetLabelsGlobalAddressRequest): + request = compute.SetLabelsGlobalAddressRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if resource is not None: + request.resource = resource + if global_set_labels_request_resource is not None: + request.global_set_labels_request_resource = global_set_labels_request_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.set_labels] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("resource", request.resource), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def set_labels(self, + request: Optional[Union[compute.SetLabelsGlobalAddressRequest, dict]] = None, + *, + project: Optional[str] = None, + resource: Optional[str] = None, + global_set_labels_request_resource: Optional[compute.GlobalSetLabelsRequest] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> extended_operation.ExtendedOperation: + r"""Sets the labels on a GlobalAddress. To learn more + about labels, read the Labeling Resources documentation. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_set_labels(): + # Create a client + client = compute_v1.GlobalAddressesClient() + + # Initialize request argument(s) + request = compute_v1.SetLabelsGlobalAddressRequest( + project="project_value", + resource="resource_value", + ) + + # Make the request + response = client.set_labels(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.SetLabelsGlobalAddressRequest, dict]): + The request object. A request message for + GlobalAddresses.SetLabels. See the + method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + resource (str): + Name or id of the resource for this + request. + + This corresponds to the ``resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + global_set_labels_request_resource (google.cloud.compute_v1.types.GlobalSetLabelsRequest): + The body resource for this request + This corresponds to the ``global_set_labels_request_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, resource, global_set_labels_request_resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.SetLabelsGlobalAddressRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.SetLabelsGlobalAddressRequest): + request = compute.SetLabelsGlobalAddressRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if resource is not None: + request.resource = resource + if global_set_labels_request_resource is not None: + request.global_set_labels_request_resource = global_set_labels_request_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.set_labels] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("resource", request.resource), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + operation_service = self._transport._global_operations_client + operation_request = compute.GetGlobalOperationRequest() + operation_request.project = request.project + operation_request.operation = response.name + + get_operation = functools.partial(operation_service.get, operation_request) + # Cancel is not part of extended operations yet. + cancel_operation = lambda: None + + # Note: this class is an implementation detail to provide a uniform + # set of names for certain fields in the extended operation proto message. + # See google.api_core.extended_operation.ExtendedOperation for details + # on these properties and the expected interface. + class _CustomOperation(extended_operation.ExtendedOperation): + @property + def error_message(self): + return self._extended_operation.http_error_message + + @property + def error_code(self): + return self._extended_operation.http_error_status_code + + response = _CustomOperation.make(get_operation, cancel_operation, response) + + # Done; return the response. + return response + + def __enter__(self) -> "GlobalAddressesClient": + return self + + def __exit__(self, type, value, traceback): + """Releases underlying transport's resources. + + .. warning:: + ONLY use as a context manager if the transport is NOT shared + with other clients! Exiting the with block will CLOSE the transport + and may cause errors in other clients! + """ + self.transport.close() + + + + + + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) + + +__all__ = ( + "GlobalAddressesClient", +) diff --git a/owl-bot-staging/v1/google/cloud/compute_v1/services/global_addresses/pagers.py b/owl-bot-staging/v1/google/cloud/compute_v1/services/global_addresses/pagers.py new file mode 100644 index 000000000..4862ec402 --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/compute_v1/services/global_addresses/pagers.py @@ -0,0 +1,77 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import Any, AsyncIterator, Awaitable, Callable, Sequence, Tuple, Optional, Iterator + +from google.cloud.compute_v1.types import compute + + +class ListPager: + """A pager for iterating through ``list`` requests. + + This class thinly wraps an initial + :class:`google.cloud.compute_v1.types.AddressList` object, and + provides an ``__iter__`` method to iterate through its + ``items`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``List`` requests and continue to iterate + through the ``items`` field on the + corresponding responses. + + All the usual :class:`google.cloud.compute_v1.types.AddressList` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., compute.AddressList], + request: compute.ListGlobalAddressesRequest, + response: compute.AddressList, + *, + metadata: Sequence[Tuple[str, str]] = ()): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.compute_v1.types.ListGlobalAddressesRequest): + The initial request object. + response (google.cloud.compute_v1.types.AddressList): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = compute.ListGlobalAddressesRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[compute.AddressList]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[compute.Address]: + for page in self.pages: + yield from page.items + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) diff --git a/owl-bot-staging/v1/google/cloud/compute_v1/services/global_addresses/transports/__init__.py b/owl-bot-staging/v1/google/cloud/compute_v1/services/global_addresses/transports/__init__.py new file mode 100644 index 000000000..9918af698 --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/compute_v1/services/global_addresses/transports/__init__.py @@ -0,0 +1,32 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +from typing import Dict, Type + +from .base import GlobalAddressesTransport +from .rest import GlobalAddressesRestTransport +from .rest import GlobalAddressesRestInterceptor + + +# Compile a registry of transports. +_transport_registry = OrderedDict() # type: Dict[str, Type[GlobalAddressesTransport]] +_transport_registry['rest'] = GlobalAddressesRestTransport + +__all__ = ( + 'GlobalAddressesTransport', + 'GlobalAddressesRestTransport', + 'GlobalAddressesRestInterceptor', +) diff --git a/owl-bot-staging/v1/google/cloud/compute_v1/services/global_addresses/transports/base.py b/owl-bot-staging/v1/google/cloud/compute_v1/services/global_addresses/transports/base.py new file mode 100644 index 000000000..982668034 --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/compute_v1/services/global_addresses/transports/base.py @@ -0,0 +1,233 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import abc +from typing import Awaitable, Callable, Dict, Optional, Sequence, Union + +from google.cloud.compute_v1 import gapic_version as package_version + +import google.auth # type: ignore +import google.api_core +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.compute_v1.types import compute +from google.cloud.compute_v1.services import global_operations + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) + + +class GlobalAddressesTransport(abc.ABC): + """Abstract transport class for GlobalAddresses.""" + + AUTH_SCOPES = ( + 'https://www.googleapis.com/auth/compute', + 'https://www.googleapis.com/auth/cloud-platform', + ) + + DEFAULT_HOST: str = 'compute.googleapis.com' + def __init__( + self, *, + host: str = DEFAULT_HOST, + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + **kwargs, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A list of scopes. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + """ + self._extended_operations_services: Dict[str, Any] = {} + + scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} + + # Save the scopes. + self._scopes = scopes + + # If no credentials are provided, then determine the appropriate + # defaults. + if credentials and credentials_file: + raise core_exceptions.DuplicateCredentialArgs("'credentials_file' and 'credentials' are mutually exclusive") + + if credentials_file is not None: + credentials, _ = google.auth.load_credentials_from_file( + credentials_file, + **scopes_kwargs, + quota_project_id=quota_project_id + ) + elif credentials is None: + credentials, _ = google.auth.default(**scopes_kwargs, quota_project_id=quota_project_id) + # Don't apply audience if the credentials file passed from user. + if hasattr(credentials, "with_gdch_audience"): + credentials = credentials.with_gdch_audience(api_audience if api_audience else host) + + # If the credentials are service account credentials, then always try to use self signed JWT. + if always_use_jwt_access and isinstance(credentials, service_account.Credentials) and hasattr(service_account.Credentials, "with_always_use_jwt_access"): + credentials = credentials.with_always_use_jwt_access(True) + + # Save the credentials. + self._credentials = credentials + + # Save the hostname. Default to port 443 (HTTPS) if none is specified. + if ':' not in host: + host += ':443' + self._host = host + + def _prep_wrapped_messages(self, client_info): + # Precompute the wrapped methods. + self._wrapped_methods = { + self.delete: gapic_v1.method.wrap_method( + self.delete, + default_timeout=None, + client_info=client_info, + ), + self.get: gapic_v1.method.wrap_method( + self.get, + default_timeout=None, + client_info=client_info, + ), + self.insert: gapic_v1.method.wrap_method( + self.insert, + default_timeout=None, + client_info=client_info, + ), + self.list: gapic_v1.method.wrap_method( + self.list, + default_timeout=None, + client_info=client_info, + ), + self.move: gapic_v1.method.wrap_method( + self.move, + default_timeout=None, + client_info=client_info, + ), + self.set_labels: gapic_v1.method.wrap_method( + self.set_labels, + default_timeout=None, + client_info=client_info, + ), + } + + def close(self): + """Closes resources associated with the transport. + + .. warning:: + Only call this method if the transport is NOT shared + with other clients - this may cause errors in other clients! + """ + raise NotImplementedError() + + @property + def delete(self) -> Callable[ + [compute.DeleteGlobalAddressRequest], + Union[ + compute.Operation, + Awaitable[compute.Operation] + ]]: + raise NotImplementedError() + + @property + def get(self) -> Callable[ + [compute.GetGlobalAddressRequest], + Union[ + compute.Address, + Awaitable[compute.Address] + ]]: + raise NotImplementedError() + + @property + def insert(self) -> Callable[ + [compute.InsertGlobalAddressRequest], + Union[ + compute.Operation, + Awaitable[compute.Operation] + ]]: + raise NotImplementedError() + + @property + def list(self) -> Callable[ + [compute.ListGlobalAddressesRequest], + Union[ + compute.AddressList, + Awaitable[compute.AddressList] + ]]: + raise NotImplementedError() + + @property + def move(self) -> Callable[ + [compute.MoveGlobalAddressRequest], + Union[ + compute.Operation, + Awaitable[compute.Operation] + ]]: + raise NotImplementedError() + + @property + def set_labels(self) -> Callable[ + [compute.SetLabelsGlobalAddressRequest], + Union[ + compute.Operation, + Awaitable[compute.Operation] + ]]: + raise NotImplementedError() + + @property + def kind(self) -> str: + raise NotImplementedError() + + @property + def _global_operations_client(self) -> global_operations.GlobalOperationsClient: + ex_op_service = self._extended_operations_services.get("global_operations") + if not ex_op_service: + ex_op_service = global_operations.GlobalOperationsClient( + credentials=self._credentials, + transport=self.kind, + ) + self._extended_operations_services["global_operations"] = ex_op_service + + return ex_op_service + + +__all__ = ( + 'GlobalAddressesTransport', +) diff --git a/owl-bot-staging/v1/google/cloud/compute_v1/services/global_addresses/transports/rest.py b/owl-bot-staging/v1/google/cloud/compute_v1/services/global_addresses/transports/rest.py new file mode 100644 index 000000000..a85288c75 --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/compute_v1/services/global_addresses/transports/rest.py @@ -0,0 +1,939 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +from google.auth.transport.requests import AuthorizedSession # type: ignore +import json # type: ignore +import grpc # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.api_core import exceptions as core_exceptions +from google.api_core import retry as retries +from google.api_core import rest_helpers +from google.api_core import rest_streaming +from google.api_core import path_template +from google.api_core import gapic_v1 + +from google.protobuf import json_format +from requests import __version__ as requests_version +import dataclasses +import re +from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union +import warnings + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + + +from google.cloud.compute_v1.types import compute + +from .base import GlobalAddressesTransport, DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, + grpc_version=None, + rest_version=requests_version, +) + + +class GlobalAddressesRestInterceptor: + """Interceptor for GlobalAddresses. + + Interceptors are used to manipulate requests, request metadata, and responses + in arbitrary ways. + Example use cases include: + * Logging + * Verifying requests according to service or custom semantics + * Stripping extraneous information from responses + + These use cases and more can be enabled by injecting an + instance of a custom subclass when constructing the GlobalAddressesRestTransport. + + .. code-block:: python + class MyCustomGlobalAddressesInterceptor(GlobalAddressesRestInterceptor): + def pre_delete(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_delete(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_get(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_insert(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_insert(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_move(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_move(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_set_labels(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_set_labels(self, response): + logging.log(f"Received response: {response}") + return response + + transport = GlobalAddressesRestTransport(interceptor=MyCustomGlobalAddressesInterceptor()) + client = GlobalAddressesClient(transport=transport) + + + """ + def pre_delete(self, request: compute.DeleteGlobalAddressRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.DeleteGlobalAddressRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for delete + + Override in a subclass to manipulate the request or metadata + before they are sent to the GlobalAddresses server. + """ + return request, metadata + + def post_delete(self, response: compute.Operation) -> compute.Operation: + """Post-rpc interceptor for delete + + Override in a subclass to manipulate the response + after it is returned by the GlobalAddresses server but before + it is returned to user code. + """ + return response + def pre_get(self, request: compute.GetGlobalAddressRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.GetGlobalAddressRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get + + Override in a subclass to manipulate the request or metadata + before they are sent to the GlobalAddresses server. + """ + return request, metadata + + def post_get(self, response: compute.Address) -> compute.Address: + """Post-rpc interceptor for get + + Override in a subclass to manipulate the response + after it is returned by the GlobalAddresses server but before + it is returned to user code. + """ + return response + def pre_insert(self, request: compute.InsertGlobalAddressRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.InsertGlobalAddressRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for insert + + Override in a subclass to manipulate the request or metadata + before they are sent to the GlobalAddresses server. + """ + return request, metadata + + def post_insert(self, response: compute.Operation) -> compute.Operation: + """Post-rpc interceptor for insert + + Override in a subclass to manipulate the response + after it is returned by the GlobalAddresses server but before + it is returned to user code. + """ + return response + def pre_list(self, request: compute.ListGlobalAddressesRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.ListGlobalAddressesRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list + + Override in a subclass to manipulate the request or metadata + before they are sent to the GlobalAddresses server. + """ + return request, metadata + + def post_list(self, response: compute.AddressList) -> compute.AddressList: + """Post-rpc interceptor for list + + Override in a subclass to manipulate the response + after it is returned by the GlobalAddresses server but before + it is returned to user code. + """ + return response + def pre_move(self, request: compute.MoveGlobalAddressRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.MoveGlobalAddressRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for move + + Override in a subclass to manipulate the request or metadata + before they are sent to the GlobalAddresses server. + """ + return request, metadata + + def post_move(self, response: compute.Operation) -> compute.Operation: + """Post-rpc interceptor for move + + Override in a subclass to manipulate the response + after it is returned by the GlobalAddresses server but before + it is returned to user code. + """ + return response + def pre_set_labels(self, request: compute.SetLabelsGlobalAddressRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.SetLabelsGlobalAddressRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for set_labels + + Override in a subclass to manipulate the request or metadata + before they are sent to the GlobalAddresses server. + """ + return request, metadata + + def post_set_labels(self, response: compute.Operation) -> compute.Operation: + """Post-rpc interceptor for set_labels + + Override in a subclass to manipulate the response + after it is returned by the GlobalAddresses server but before + it is returned to user code. + """ + return response + + +@dataclasses.dataclass +class GlobalAddressesRestStub: + _session: AuthorizedSession + _host: str + _interceptor: GlobalAddressesRestInterceptor + + +class GlobalAddressesRestTransport(GlobalAddressesTransport): + """REST backend transport for GlobalAddresses. + + The GlobalAddresses API. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends JSON representations of protocol buffers over HTTP/1.1 + + NOTE: This REST transport functionality is currently in a beta + state (preview). We welcome your feedback via an issue in this + library's source repository. Thank you! + """ + + def __init__(self, *, + host: str = 'compute.googleapis.com', + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + client_cert_source_for_mtls: Optional[Callable[[ + ], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + url_scheme: str = 'https', + interceptor: Optional[GlobalAddressesRestInterceptor] = None, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + NOTE: This REST transport functionality is currently in a beta + state (preview). We welcome your feedback via a GitHub issue in + this library's repository. Thank you! + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + client_cert_source_for_mtls (Callable[[], Tuple[bytes, bytes]]): Client + certificate to configure mutual TLS HTTP channel. It is ignored + if ``channel`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you are developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + url_scheme: the protocol scheme for the API endpoint. Normally + "https", but for testing or local servers, + "http" can be specified. + """ + # Run the base constructor + # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. + # TODO: When custom host (api_endpoint) is set, `scopes` must *also* be set on the + # credentials object + maybe_url_match = re.match("^(?Phttp(?:s)?://)?(?P.*)$", host) + if maybe_url_match is None: + raise ValueError(f"Unexpected hostname structure: {host}") # pragma: NO COVER + + url_match_items = maybe_url_match.groupdict() + + host = f"{url_scheme}://{host}" if not url_match_items["scheme"] else host + + super().__init__( + host=host, + credentials=credentials, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience + ) + self._session = AuthorizedSession( + self._credentials, default_host=self.DEFAULT_HOST) + if client_cert_source_for_mtls: + self._session.configure_mtls_channel(client_cert_source_for_mtls) + self._interceptor = interceptor or GlobalAddressesRestInterceptor() + self._prep_wrapped_messages(client_info) + + class _Delete(GlobalAddressesRestStub): + def __hash__(self): + return hash("Delete") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.DeleteGlobalAddressRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.Operation: + r"""Call the delete method over HTTP. + + Args: + request (~.compute.DeleteGlobalAddressRequest): + The request object. A request message for + GlobalAddresses.Delete. See the method + description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.Operation: + Represents an Operation resource. Google Compute Engine + has three Operation resources: \* + `Global `__ + \* + `Regional `__ + \* + `Zonal `__ + You can use an operation resource to manage asynchronous + API requests. For more information, read Handling API + responses. Operations can be global, regional or zonal. + - For global operations, use the ``globalOperations`` + resource. - For regional operations, use the + ``regionOperations`` resource. - For zonal operations, + use the ``zonalOperations`` resource. For more + information, read Global, Regional, and Zonal Resources. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'delete', + 'uri': '/compute/v1/projects/{project}/global/addresses/{address}', + }, + ] + request, metadata = self._interceptor.pre_delete(request, metadata) + pb_request = compute.DeleteGlobalAddressRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.Operation() + pb_resp = compute.Operation.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_delete(resp) + return resp + + class _Get(GlobalAddressesRestStub): + def __hash__(self): + return hash("Get") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.GetGlobalAddressRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.Address: + r"""Call the get method over HTTP. + + Args: + request (~.compute.GetGlobalAddressRequest): + The request object. A request message for + GlobalAddresses.Get. See the method + description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.Address: + Represents an IP Address resource. Google Compute Engine + has two IP Address resources: \* `Global (external and + internal) `__ + \* `Regional (external and + internal) `__ + For more information, see Reserving a static external IP + address. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/compute/v1/projects/{project}/global/addresses/{address}', + }, + ] + request, metadata = self._interceptor.pre_get(request, metadata) + pb_request = compute.GetGlobalAddressRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.Address() + pb_resp = compute.Address.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get(resp) + return resp + + class _Insert(GlobalAddressesRestStub): + def __hash__(self): + return hash("Insert") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.InsertGlobalAddressRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.Operation: + r"""Call the insert method over HTTP. + + Args: + request (~.compute.InsertGlobalAddressRequest): + The request object. A request message for + GlobalAddresses.Insert. See the method + description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.Operation: + Represents an Operation resource. Google Compute Engine + has three Operation resources: \* + `Global `__ + \* + `Regional `__ + \* + `Zonal `__ + You can use an operation resource to manage asynchronous + API requests. For more information, read Handling API + responses. Operations can be global, regional or zonal. + - For global operations, use the ``globalOperations`` + resource. - For regional operations, use the + ``regionOperations`` resource. - For zonal operations, + use the ``zonalOperations`` resource. For more + information, read Global, Regional, and Zonal Resources. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/compute/v1/projects/{project}/global/addresses', + 'body': 'address_resource', + }, + ] + request, metadata = self._interceptor.pre_insert(request, metadata) + pb_request = compute.InsertGlobalAddressRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + including_default_value_fields=False, + use_integers_for_enums=False + ) + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.Operation() + pb_resp = compute.Operation.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_insert(resp) + return resp + + class _List(GlobalAddressesRestStub): + def __hash__(self): + return hash("List") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.ListGlobalAddressesRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.AddressList: + r"""Call the list method over HTTP. + + Args: + request (~.compute.ListGlobalAddressesRequest): + The request object. A request message for + GlobalAddresses.List. See the method + description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.AddressList: + Contains a list of addresses. + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/compute/v1/projects/{project}/global/addresses', + }, + ] + request, metadata = self._interceptor.pre_list(request, metadata) + pb_request = compute.ListGlobalAddressesRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.AddressList() + pb_resp = compute.AddressList.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_list(resp) + return resp + + class _Move(GlobalAddressesRestStub): + def __hash__(self): + return hash("Move") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.MoveGlobalAddressRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.Operation: + r"""Call the move method over HTTP. + + Args: + request (~.compute.MoveGlobalAddressRequest): + The request object. A request message for + GlobalAddresses.Move. See the method + description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.Operation: + Represents an Operation resource. Google Compute Engine + has three Operation resources: \* + `Global `__ + \* + `Regional `__ + \* + `Zonal `__ + You can use an operation resource to manage asynchronous + API requests. For more information, read Handling API + responses. Operations can be global, regional or zonal. + - For global operations, use the ``globalOperations`` + resource. - For regional operations, use the + ``regionOperations`` resource. - For zonal operations, + use the ``zonalOperations`` resource. For more + information, read Global, Regional, and Zonal Resources. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/compute/v1/projects/{project}/global/addresses/{address}/move', + 'body': 'global_addresses_move_request_resource', + }, + ] + request, metadata = self._interceptor.pre_move(request, metadata) + pb_request = compute.MoveGlobalAddressRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + including_default_value_fields=False, + use_integers_for_enums=False + ) + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.Operation() + pb_resp = compute.Operation.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_move(resp) + return resp + + class _SetLabels(GlobalAddressesRestStub): + def __hash__(self): + return hash("SetLabels") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.SetLabelsGlobalAddressRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.Operation: + r"""Call the set labels method over HTTP. + + Args: + request (~.compute.SetLabelsGlobalAddressRequest): + The request object. A request message for + GlobalAddresses.SetLabels. See the + method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.Operation: + Represents an Operation resource. Google Compute Engine + has three Operation resources: \* + `Global `__ + \* + `Regional `__ + \* + `Zonal `__ + You can use an operation resource to manage asynchronous + API requests. For more information, read Handling API + responses. Operations can be global, regional or zonal. + - For global operations, use the ``globalOperations`` + resource. - For regional operations, use the + ``regionOperations`` resource. - For zonal operations, + use the ``zonalOperations`` resource. For more + information, read Global, Regional, and Zonal Resources. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/compute/v1/projects/{project}/global/addresses/{resource}/setLabels', + 'body': 'global_set_labels_request_resource', + }, + ] + request, metadata = self._interceptor.pre_set_labels(request, metadata) + pb_request = compute.SetLabelsGlobalAddressRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + including_default_value_fields=False, + use_integers_for_enums=False + ) + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.Operation() + pb_resp = compute.Operation.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_set_labels(resp) + return resp + + @property + def delete(self) -> Callable[ + [compute.DeleteGlobalAddressRequest], + compute.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._Delete(self._session, self._host, self._interceptor) # type: ignore + + @property + def get(self) -> Callable[ + [compute.GetGlobalAddressRequest], + compute.Address]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._Get(self._session, self._host, self._interceptor) # type: ignore + + @property + def insert(self) -> Callable[ + [compute.InsertGlobalAddressRequest], + compute.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._Insert(self._session, self._host, self._interceptor) # type: ignore + + @property + def list(self) -> Callable[ + [compute.ListGlobalAddressesRequest], + compute.AddressList]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._List(self._session, self._host, self._interceptor) # type: ignore + + @property + def move(self) -> Callable[ + [compute.MoveGlobalAddressRequest], + compute.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._Move(self._session, self._host, self._interceptor) # type: ignore + + @property + def set_labels(self) -> Callable[ + [compute.SetLabelsGlobalAddressRequest], + compute.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._SetLabels(self._session, self._host, self._interceptor) # type: ignore + + @property + def kind(self) -> str: + return "rest" + + def close(self): + self._session.close() + + +__all__=( + 'GlobalAddressesRestTransport', +) diff --git a/owl-bot-staging/v1/google/cloud/compute_v1/services/global_forwarding_rules/__init__.py b/owl-bot-staging/v1/google/cloud/compute_v1/services/global_forwarding_rules/__init__.py new file mode 100644 index 000000000..7d8365595 --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/compute_v1/services/global_forwarding_rules/__init__.py @@ -0,0 +1,20 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from .client import GlobalForwardingRulesClient + +__all__ = ( + 'GlobalForwardingRulesClient', +) diff --git a/owl-bot-staging/v1/google/cloud/compute_v1/services/global_forwarding_rules/client.py b/owl-bot-staging/v1/google/cloud/compute_v1/services/global_forwarding_rules/client.py new file mode 100644 index 000000000..ae22b373c --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/compute_v1/services/global_forwarding_rules/client.py @@ -0,0 +1,1929 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import functools +import os +import re +from typing import Dict, Mapping, MutableMapping, MutableSequence, Optional, Sequence, Tuple, Type, Union, cast + +from google.cloud.compute_v1 import gapic_version as package_version + +from google.api_core import client_options as client_options_lib +from google.api_core import exceptions as core_exceptions +from google.api_core import extended_operation +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport import mtls # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth.exceptions import MutualTLSChannelError # type: ignore +from google.oauth2 import service_account # type: ignore + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + +from google.api_core import extended_operation # type: ignore +from google.cloud.compute_v1.services.global_forwarding_rules import pagers +from google.cloud.compute_v1.types import compute +from .transports.base import GlobalForwardingRulesTransport, DEFAULT_CLIENT_INFO +from .transports.rest import GlobalForwardingRulesRestTransport + + +class GlobalForwardingRulesClientMeta(type): + """Metaclass for the GlobalForwardingRules client. + + This provides class-level methods for building and retrieving + support objects (e.g. transport) without polluting the client instance + objects. + """ + _transport_registry = OrderedDict() # type: Dict[str, Type[GlobalForwardingRulesTransport]] + _transport_registry["rest"] = GlobalForwardingRulesRestTransport + + def get_transport_class(cls, + label: Optional[str] = None, + ) -> Type[GlobalForwardingRulesTransport]: + """Returns an appropriate transport class. + + Args: + label: The name of the desired transport. If none is + provided, then the first transport in the registry is used. + + Returns: + The transport class to use. + """ + # If a specific transport is requested, return that one. + if label: + return cls._transport_registry[label] + + # No transport is requested; return the default (that is, the first one + # in the dictionary). + return next(iter(cls._transport_registry.values())) + + +class GlobalForwardingRulesClient(metaclass=GlobalForwardingRulesClientMeta): + """The GlobalForwardingRules API.""" + + @staticmethod + def _get_default_mtls_endpoint(api_endpoint): + """Converts api endpoint to mTLS endpoint. + + Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to + "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. + Args: + api_endpoint (Optional[str]): the api endpoint to convert. + Returns: + str: converted mTLS api endpoint. + """ + if not api_endpoint: + return api_endpoint + + mtls_endpoint_re = re.compile( + r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?" + ) + + m = mtls_endpoint_re.match(api_endpoint) + name, mtls, sandbox, googledomain = m.groups() + if mtls or not googledomain: + return api_endpoint + + if sandbox: + return api_endpoint.replace( + "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" + ) + + return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") + + DEFAULT_ENDPOINT = "compute.googleapis.com" + DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore + DEFAULT_ENDPOINT + ) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + GlobalForwardingRulesClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_info(info) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + GlobalForwardingRulesClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_file( + filename) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + from_service_account_json = from_service_account_file + + @property + def transport(self) -> GlobalForwardingRulesTransport: + """Returns the transport used by the client instance. + + Returns: + GlobalForwardingRulesTransport: The transport used by the client + instance. + """ + return self._transport + + @staticmethod + def common_billing_account_path(billing_account: str, ) -> str: + """Returns a fully-qualified billing_account string.""" + return "billingAccounts/{billing_account}".format(billing_account=billing_account, ) + + @staticmethod + def parse_common_billing_account_path(path: str) -> Dict[str,str]: + """Parse a billing_account path into its component segments.""" + m = re.match(r"^billingAccounts/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_folder_path(folder: str, ) -> str: + """Returns a fully-qualified folder string.""" + return "folders/{folder}".format(folder=folder, ) + + @staticmethod + def parse_common_folder_path(path: str) -> Dict[str,str]: + """Parse a folder path into its component segments.""" + m = re.match(r"^folders/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_organization_path(organization: str, ) -> str: + """Returns a fully-qualified organization string.""" + return "organizations/{organization}".format(organization=organization, ) + + @staticmethod + def parse_common_organization_path(path: str) -> Dict[str,str]: + """Parse a organization path into its component segments.""" + m = re.match(r"^organizations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_project_path(project: str, ) -> str: + """Returns a fully-qualified project string.""" + return "projects/{project}".format(project=project, ) + + @staticmethod + def parse_common_project_path(path: str) -> Dict[str,str]: + """Parse a project path into its component segments.""" + m = re.match(r"^projects/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_location_path(project: str, location: str, ) -> str: + """Returns a fully-qualified location string.""" + return "projects/{project}/locations/{location}".format(project=project, location=location, ) + + @staticmethod + def parse_common_location_path(path: str) -> Dict[str,str]: + """Parse a location path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @classmethod + def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[client_options_lib.ClientOptions] = None): + """Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + if client_options is None: + client_options = client_options_lib.ClientOptions() + use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") + use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") + if use_client_cert not in ("true", "false"): + raise ValueError("Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`") + if use_mtls_endpoint not in ("auto", "never", "always"): + raise MutualTLSChannelError("Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`") + + # Figure out the client cert source to use. + client_cert_source = None + if use_client_cert == "true": + if client_options.client_cert_source: + client_cert_source = client_options.client_cert_source + elif mtls.has_default_client_cert_source(): + client_cert_source = mtls.default_client_cert_source() + + # Figure out which api endpoint to use. + if client_options.api_endpoint is not None: + api_endpoint = client_options.api_endpoint + elif use_mtls_endpoint == "always" or (use_mtls_endpoint == "auto" and client_cert_source): + api_endpoint = cls.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = cls.DEFAULT_ENDPOINT + + return api_endpoint, client_cert_source + + def __init__(self, *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Optional[Union[str, GlobalForwardingRulesTransport]] = None, + client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the global forwarding rules client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Union[str, GlobalForwardingRulesTransport]): The + transport to use. If set to None, a transport is chosen + automatically. + NOTE: "rest" transport functionality is currently in a + beta state (preview). We welcome your feedback via an + issue in this library's source repository. + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): Custom options for the + client. It won't take effect if a ``transport`` instance is provided. + (1) The ``api_endpoint`` property can be used to override the + default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT + environment variable can also be used to override the endpoint: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto switch to the + default mTLS endpoint if client certificate is present, this is + the default value). However, the ``api_endpoint`` property takes + precedence if provided. + (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide client certificate for mutual TLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + """ + if isinstance(client_options, dict): + client_options = client_options_lib.from_dict(client_options) + if client_options is None: + client_options = client_options_lib.ClientOptions() + client_options = cast(client_options_lib.ClientOptions, client_options) + + api_endpoint, client_cert_source_func = self.get_mtls_endpoint_and_cert_source(client_options) + + api_key_value = getattr(client_options, "api_key", None) + if api_key_value and credentials: + raise ValueError("client_options.api_key and credentials are mutually exclusive") + + # Save or instantiate the transport. + # Ordinarily, we provide the transport, but allowing a custom transport + # instance provides an extensibility point for unusual situations. + if isinstance(transport, GlobalForwardingRulesTransport): + # transport is a GlobalForwardingRulesTransport instance. + if credentials or client_options.credentials_file or api_key_value: + raise ValueError("When providing a transport instance, " + "provide its credentials directly.") + if client_options.scopes: + raise ValueError( + "When providing a transport instance, provide its scopes " + "directly." + ) + self._transport = transport + else: + import google.auth._default # type: ignore + + if api_key_value and hasattr(google.auth._default, "get_api_key_credentials"): + credentials = google.auth._default.get_api_key_credentials(api_key_value) + + Transport = type(self).get_transport_class(transport) + self._transport = Transport( + credentials=credentials, + credentials_file=client_options.credentials_file, + host=api_endpoint, + scopes=client_options.scopes, + client_cert_source_for_mtls=client_cert_source_func, + quota_project_id=client_options.quota_project_id, + client_info=client_info, + always_use_jwt_access=True, + api_audience=client_options.api_audience, + ) + + def delete_unary(self, + request: Optional[Union[compute.DeleteGlobalForwardingRuleRequest, dict]] = None, + *, + project: Optional[str] = None, + forwarding_rule: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.Operation: + r"""Deletes the specified GlobalForwardingRule resource. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_delete(): + # Create a client + client = compute_v1.GlobalForwardingRulesClient() + + # Initialize request argument(s) + request = compute_v1.DeleteGlobalForwardingRuleRequest( + forwarding_rule="forwarding_rule_value", + project="project_value", + ) + + # Make the request + response = client.delete(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.DeleteGlobalForwardingRuleRequest, dict]): + The request object. A request message for + GlobalForwardingRules.Delete. See the + method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + forwarding_rule (str): + Name of the ForwardingRule resource + to delete. + + This corresponds to the ``forwarding_rule`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, forwarding_rule]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.DeleteGlobalForwardingRuleRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.DeleteGlobalForwardingRuleRequest): + request = compute.DeleteGlobalForwardingRuleRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if forwarding_rule is not None: + request.forwarding_rule = forwarding_rule + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("forwarding_rule", request.forwarding_rule), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def delete(self, + request: Optional[Union[compute.DeleteGlobalForwardingRuleRequest, dict]] = None, + *, + project: Optional[str] = None, + forwarding_rule: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> extended_operation.ExtendedOperation: + r"""Deletes the specified GlobalForwardingRule resource. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_delete(): + # Create a client + client = compute_v1.GlobalForwardingRulesClient() + + # Initialize request argument(s) + request = compute_v1.DeleteGlobalForwardingRuleRequest( + forwarding_rule="forwarding_rule_value", + project="project_value", + ) + + # Make the request + response = client.delete(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.DeleteGlobalForwardingRuleRequest, dict]): + The request object. A request message for + GlobalForwardingRules.Delete. See the + method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + forwarding_rule (str): + Name of the ForwardingRule resource + to delete. + + This corresponds to the ``forwarding_rule`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, forwarding_rule]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.DeleteGlobalForwardingRuleRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.DeleteGlobalForwardingRuleRequest): + request = compute.DeleteGlobalForwardingRuleRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if forwarding_rule is not None: + request.forwarding_rule = forwarding_rule + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("forwarding_rule", request.forwarding_rule), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + operation_service = self._transport._global_operations_client + operation_request = compute.GetGlobalOperationRequest() + operation_request.project = request.project + operation_request.operation = response.name + + get_operation = functools.partial(operation_service.get, operation_request) + # Cancel is not part of extended operations yet. + cancel_operation = lambda: None + + # Note: this class is an implementation detail to provide a uniform + # set of names for certain fields in the extended operation proto message. + # See google.api_core.extended_operation.ExtendedOperation for details + # on these properties and the expected interface. + class _CustomOperation(extended_operation.ExtendedOperation): + @property + def error_message(self): + return self._extended_operation.http_error_message + + @property + def error_code(self): + return self._extended_operation.http_error_status_code + + response = _CustomOperation.make(get_operation, cancel_operation, response) + + # Done; return the response. + return response + + def get(self, + request: Optional[Union[compute.GetGlobalForwardingRuleRequest, dict]] = None, + *, + project: Optional[str] = None, + forwarding_rule: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.ForwardingRule: + r"""Returns the specified GlobalForwardingRule resource. + Gets a list of available forwarding rules by making a + list() request. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_get(): + # Create a client + client = compute_v1.GlobalForwardingRulesClient() + + # Initialize request argument(s) + request = compute_v1.GetGlobalForwardingRuleRequest( + forwarding_rule="forwarding_rule_value", + project="project_value", + ) + + # Make the request + response = client.get(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.GetGlobalForwardingRuleRequest, dict]): + The request object. A request message for + GlobalForwardingRules.Get. See the + method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + forwarding_rule (str): + Name of the ForwardingRule resource + to return. + + This corresponds to the ``forwarding_rule`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.compute_v1.types.ForwardingRule: + Represents a Forwarding Rule resource. Forwarding rule + resources in Google Cloud can be either regional or + global in scope: \* + [Global](https://cloud.google.com/compute/docs/reference/rest/v1/globalForwardingRules) + \* + [Regional](https://cloud.google.com/compute/docs/reference/rest/v1/forwardingRules) + A forwarding rule and its corresponding IP address + represent the frontend configuration of a Google Cloud + Platform load balancer. Forwarding rules can also + reference target instances and Cloud VPN Classic + gateways (targetVpnGateway). For more information, read + Forwarding rule concepts and Using protocol forwarding. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, forwarding_rule]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.GetGlobalForwardingRuleRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.GetGlobalForwardingRuleRequest): + request = compute.GetGlobalForwardingRuleRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if forwarding_rule is not None: + request.forwarding_rule = forwarding_rule + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("forwarding_rule", request.forwarding_rule), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def insert_unary(self, + request: Optional[Union[compute.InsertGlobalForwardingRuleRequest, dict]] = None, + *, + project: Optional[str] = None, + forwarding_rule_resource: Optional[compute.ForwardingRule] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.Operation: + r"""Creates a GlobalForwardingRule resource in the + specified project using the data included in the + request. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_insert(): + # Create a client + client = compute_v1.GlobalForwardingRulesClient() + + # Initialize request argument(s) + request = compute_v1.InsertGlobalForwardingRuleRequest( + project="project_value", + ) + + # Make the request + response = client.insert(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.InsertGlobalForwardingRuleRequest, dict]): + The request object. A request message for + GlobalForwardingRules.Insert. See the + method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + forwarding_rule_resource (google.cloud.compute_v1.types.ForwardingRule): + The body resource for this request + This corresponds to the ``forwarding_rule_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, forwarding_rule_resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.InsertGlobalForwardingRuleRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.InsertGlobalForwardingRuleRequest): + request = compute.InsertGlobalForwardingRuleRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if forwarding_rule_resource is not None: + request.forwarding_rule_resource = forwarding_rule_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.insert] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def insert(self, + request: Optional[Union[compute.InsertGlobalForwardingRuleRequest, dict]] = None, + *, + project: Optional[str] = None, + forwarding_rule_resource: Optional[compute.ForwardingRule] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> extended_operation.ExtendedOperation: + r"""Creates a GlobalForwardingRule resource in the + specified project using the data included in the + request. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_insert(): + # Create a client + client = compute_v1.GlobalForwardingRulesClient() + + # Initialize request argument(s) + request = compute_v1.InsertGlobalForwardingRuleRequest( + project="project_value", + ) + + # Make the request + response = client.insert(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.InsertGlobalForwardingRuleRequest, dict]): + The request object. A request message for + GlobalForwardingRules.Insert. See the + method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + forwarding_rule_resource (google.cloud.compute_v1.types.ForwardingRule): + The body resource for this request + This corresponds to the ``forwarding_rule_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, forwarding_rule_resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.InsertGlobalForwardingRuleRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.InsertGlobalForwardingRuleRequest): + request = compute.InsertGlobalForwardingRuleRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if forwarding_rule_resource is not None: + request.forwarding_rule_resource = forwarding_rule_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.insert] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + operation_service = self._transport._global_operations_client + operation_request = compute.GetGlobalOperationRequest() + operation_request.project = request.project + operation_request.operation = response.name + + get_operation = functools.partial(operation_service.get, operation_request) + # Cancel is not part of extended operations yet. + cancel_operation = lambda: None + + # Note: this class is an implementation detail to provide a uniform + # set of names for certain fields in the extended operation proto message. + # See google.api_core.extended_operation.ExtendedOperation for details + # on these properties and the expected interface. + class _CustomOperation(extended_operation.ExtendedOperation): + @property + def error_message(self): + return self._extended_operation.http_error_message + + @property + def error_code(self): + return self._extended_operation.http_error_status_code + + response = _CustomOperation.make(get_operation, cancel_operation, response) + + # Done; return the response. + return response + + def list(self, + request: Optional[Union[compute.ListGlobalForwardingRulesRequest, dict]] = None, + *, + project: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListPager: + r"""Retrieves a list of GlobalForwardingRule resources + available to the specified project. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_list(): + # Create a client + client = compute_v1.GlobalForwardingRulesClient() + + # Initialize request argument(s) + request = compute_v1.ListGlobalForwardingRulesRequest( + project="project_value", + ) + + # Make the request + page_result = client.list(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.ListGlobalForwardingRulesRequest, dict]): + The request object. A request message for + GlobalForwardingRules.List. See the + method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.compute_v1.services.global_forwarding_rules.pagers.ListPager: + Contains a list of ForwardingRule + resources. + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.ListGlobalForwardingRulesRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.ListGlobalForwardingRulesRequest): + request = compute.ListGlobalForwardingRulesRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + def patch_unary(self, + request: Optional[Union[compute.PatchGlobalForwardingRuleRequest, dict]] = None, + *, + project: Optional[str] = None, + forwarding_rule: Optional[str] = None, + forwarding_rule_resource: Optional[compute.ForwardingRule] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.Operation: + r"""Updates the specified forwarding rule with the data included in + the request. This method supports PATCH semantics and uses the + JSON merge patch format and processing rules. Currently, you can + only patch the network_tier field. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_patch(): + # Create a client + client = compute_v1.GlobalForwardingRulesClient() + + # Initialize request argument(s) + request = compute_v1.PatchGlobalForwardingRuleRequest( + forwarding_rule="forwarding_rule_value", + project="project_value", + ) + + # Make the request + response = client.patch(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.PatchGlobalForwardingRuleRequest, dict]): + The request object. A request message for + GlobalForwardingRules.Patch. See the + method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + forwarding_rule (str): + Name of the ForwardingRule resource + to patch. + + This corresponds to the ``forwarding_rule`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + forwarding_rule_resource (google.cloud.compute_v1.types.ForwardingRule): + The body resource for this request + This corresponds to the ``forwarding_rule_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, forwarding_rule, forwarding_rule_resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.PatchGlobalForwardingRuleRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.PatchGlobalForwardingRuleRequest): + request = compute.PatchGlobalForwardingRuleRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if forwarding_rule is not None: + request.forwarding_rule = forwarding_rule + if forwarding_rule_resource is not None: + request.forwarding_rule_resource = forwarding_rule_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.patch] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("forwarding_rule", request.forwarding_rule), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def patch(self, + request: Optional[Union[compute.PatchGlobalForwardingRuleRequest, dict]] = None, + *, + project: Optional[str] = None, + forwarding_rule: Optional[str] = None, + forwarding_rule_resource: Optional[compute.ForwardingRule] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> extended_operation.ExtendedOperation: + r"""Updates the specified forwarding rule with the data included in + the request. This method supports PATCH semantics and uses the + JSON merge patch format and processing rules. Currently, you can + only patch the network_tier field. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_patch(): + # Create a client + client = compute_v1.GlobalForwardingRulesClient() + + # Initialize request argument(s) + request = compute_v1.PatchGlobalForwardingRuleRequest( + forwarding_rule="forwarding_rule_value", + project="project_value", + ) + + # Make the request + response = client.patch(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.PatchGlobalForwardingRuleRequest, dict]): + The request object. A request message for + GlobalForwardingRules.Patch. See the + method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + forwarding_rule (str): + Name of the ForwardingRule resource + to patch. + + This corresponds to the ``forwarding_rule`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + forwarding_rule_resource (google.cloud.compute_v1.types.ForwardingRule): + The body resource for this request + This corresponds to the ``forwarding_rule_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, forwarding_rule, forwarding_rule_resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.PatchGlobalForwardingRuleRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.PatchGlobalForwardingRuleRequest): + request = compute.PatchGlobalForwardingRuleRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if forwarding_rule is not None: + request.forwarding_rule = forwarding_rule + if forwarding_rule_resource is not None: + request.forwarding_rule_resource = forwarding_rule_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.patch] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("forwarding_rule", request.forwarding_rule), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + operation_service = self._transport._global_operations_client + operation_request = compute.GetGlobalOperationRequest() + operation_request.project = request.project + operation_request.operation = response.name + + get_operation = functools.partial(operation_service.get, operation_request) + # Cancel is not part of extended operations yet. + cancel_operation = lambda: None + + # Note: this class is an implementation detail to provide a uniform + # set of names for certain fields in the extended operation proto message. + # See google.api_core.extended_operation.ExtendedOperation for details + # on these properties and the expected interface. + class _CustomOperation(extended_operation.ExtendedOperation): + @property + def error_message(self): + return self._extended_operation.http_error_message + + @property + def error_code(self): + return self._extended_operation.http_error_status_code + + response = _CustomOperation.make(get_operation, cancel_operation, response) + + # Done; return the response. + return response + + def set_labels_unary(self, + request: Optional[Union[compute.SetLabelsGlobalForwardingRuleRequest, dict]] = None, + *, + project: Optional[str] = None, + resource: Optional[str] = None, + global_set_labels_request_resource: Optional[compute.GlobalSetLabelsRequest] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.Operation: + r"""Sets the labels on the specified resource. To learn + more about labels, read the Labeling resources + documentation. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_set_labels(): + # Create a client + client = compute_v1.GlobalForwardingRulesClient() + + # Initialize request argument(s) + request = compute_v1.SetLabelsGlobalForwardingRuleRequest( + project="project_value", + resource="resource_value", + ) + + # Make the request + response = client.set_labels(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.SetLabelsGlobalForwardingRuleRequest, dict]): + The request object. A request message for + GlobalForwardingRules.SetLabels. See the + method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + resource (str): + Name or id of the resource for this + request. + + This corresponds to the ``resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + global_set_labels_request_resource (google.cloud.compute_v1.types.GlobalSetLabelsRequest): + The body resource for this request + This corresponds to the ``global_set_labels_request_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, resource, global_set_labels_request_resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.SetLabelsGlobalForwardingRuleRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.SetLabelsGlobalForwardingRuleRequest): + request = compute.SetLabelsGlobalForwardingRuleRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if resource is not None: + request.resource = resource + if global_set_labels_request_resource is not None: + request.global_set_labels_request_resource = global_set_labels_request_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.set_labels] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("resource", request.resource), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def set_labels(self, + request: Optional[Union[compute.SetLabelsGlobalForwardingRuleRequest, dict]] = None, + *, + project: Optional[str] = None, + resource: Optional[str] = None, + global_set_labels_request_resource: Optional[compute.GlobalSetLabelsRequest] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> extended_operation.ExtendedOperation: + r"""Sets the labels on the specified resource. To learn + more about labels, read the Labeling resources + documentation. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_set_labels(): + # Create a client + client = compute_v1.GlobalForwardingRulesClient() + + # Initialize request argument(s) + request = compute_v1.SetLabelsGlobalForwardingRuleRequest( + project="project_value", + resource="resource_value", + ) + + # Make the request + response = client.set_labels(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.SetLabelsGlobalForwardingRuleRequest, dict]): + The request object. A request message for + GlobalForwardingRules.SetLabels. See the + method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + resource (str): + Name or id of the resource for this + request. + + This corresponds to the ``resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + global_set_labels_request_resource (google.cloud.compute_v1.types.GlobalSetLabelsRequest): + The body resource for this request + This corresponds to the ``global_set_labels_request_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, resource, global_set_labels_request_resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.SetLabelsGlobalForwardingRuleRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.SetLabelsGlobalForwardingRuleRequest): + request = compute.SetLabelsGlobalForwardingRuleRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if resource is not None: + request.resource = resource + if global_set_labels_request_resource is not None: + request.global_set_labels_request_resource = global_set_labels_request_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.set_labels] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("resource", request.resource), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + operation_service = self._transport._global_operations_client + operation_request = compute.GetGlobalOperationRequest() + operation_request.project = request.project + operation_request.operation = response.name + + get_operation = functools.partial(operation_service.get, operation_request) + # Cancel is not part of extended operations yet. + cancel_operation = lambda: None + + # Note: this class is an implementation detail to provide a uniform + # set of names for certain fields in the extended operation proto message. + # See google.api_core.extended_operation.ExtendedOperation for details + # on these properties and the expected interface. + class _CustomOperation(extended_operation.ExtendedOperation): + @property + def error_message(self): + return self._extended_operation.http_error_message + + @property + def error_code(self): + return self._extended_operation.http_error_status_code + + response = _CustomOperation.make(get_operation, cancel_operation, response) + + # Done; return the response. + return response + + def set_target_unary(self, + request: Optional[Union[compute.SetTargetGlobalForwardingRuleRequest, dict]] = None, + *, + project: Optional[str] = None, + forwarding_rule: Optional[str] = None, + target_reference_resource: Optional[compute.TargetReference] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.Operation: + r"""Changes target URL for the GlobalForwardingRule + resource. The new target should be of the same type as + the old target. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_set_target(): + # Create a client + client = compute_v1.GlobalForwardingRulesClient() + + # Initialize request argument(s) + request = compute_v1.SetTargetGlobalForwardingRuleRequest( + forwarding_rule="forwarding_rule_value", + project="project_value", + ) + + # Make the request + response = client.set_target(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.SetTargetGlobalForwardingRuleRequest, dict]): + The request object. A request message for + GlobalForwardingRules.SetTarget. See the + method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + forwarding_rule (str): + Name of the ForwardingRule resource + in which target is to be set. + + This corresponds to the ``forwarding_rule`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + target_reference_resource (google.cloud.compute_v1.types.TargetReference): + The body resource for this request + This corresponds to the ``target_reference_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, forwarding_rule, target_reference_resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.SetTargetGlobalForwardingRuleRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.SetTargetGlobalForwardingRuleRequest): + request = compute.SetTargetGlobalForwardingRuleRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if forwarding_rule is not None: + request.forwarding_rule = forwarding_rule + if target_reference_resource is not None: + request.target_reference_resource = target_reference_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.set_target] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("forwarding_rule", request.forwarding_rule), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def set_target(self, + request: Optional[Union[compute.SetTargetGlobalForwardingRuleRequest, dict]] = None, + *, + project: Optional[str] = None, + forwarding_rule: Optional[str] = None, + target_reference_resource: Optional[compute.TargetReference] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> extended_operation.ExtendedOperation: + r"""Changes target URL for the GlobalForwardingRule + resource. The new target should be of the same type as + the old target. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_set_target(): + # Create a client + client = compute_v1.GlobalForwardingRulesClient() + + # Initialize request argument(s) + request = compute_v1.SetTargetGlobalForwardingRuleRequest( + forwarding_rule="forwarding_rule_value", + project="project_value", + ) + + # Make the request + response = client.set_target(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.SetTargetGlobalForwardingRuleRequest, dict]): + The request object. A request message for + GlobalForwardingRules.SetTarget. See the + method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + forwarding_rule (str): + Name of the ForwardingRule resource + in which target is to be set. + + This corresponds to the ``forwarding_rule`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + target_reference_resource (google.cloud.compute_v1.types.TargetReference): + The body resource for this request + This corresponds to the ``target_reference_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, forwarding_rule, target_reference_resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.SetTargetGlobalForwardingRuleRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.SetTargetGlobalForwardingRuleRequest): + request = compute.SetTargetGlobalForwardingRuleRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if forwarding_rule is not None: + request.forwarding_rule = forwarding_rule + if target_reference_resource is not None: + request.target_reference_resource = target_reference_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.set_target] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("forwarding_rule", request.forwarding_rule), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + operation_service = self._transport._global_operations_client + operation_request = compute.GetGlobalOperationRequest() + operation_request.project = request.project + operation_request.operation = response.name + + get_operation = functools.partial(operation_service.get, operation_request) + # Cancel is not part of extended operations yet. + cancel_operation = lambda: None + + # Note: this class is an implementation detail to provide a uniform + # set of names for certain fields in the extended operation proto message. + # See google.api_core.extended_operation.ExtendedOperation for details + # on these properties and the expected interface. + class _CustomOperation(extended_operation.ExtendedOperation): + @property + def error_message(self): + return self._extended_operation.http_error_message + + @property + def error_code(self): + return self._extended_operation.http_error_status_code + + response = _CustomOperation.make(get_operation, cancel_operation, response) + + # Done; return the response. + return response + + def __enter__(self) -> "GlobalForwardingRulesClient": + return self + + def __exit__(self, type, value, traceback): + """Releases underlying transport's resources. + + .. warning:: + ONLY use as a context manager if the transport is NOT shared + with other clients! Exiting the with block will CLOSE the transport + and may cause errors in other clients! + """ + self.transport.close() + + + + + + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) + + +__all__ = ( + "GlobalForwardingRulesClient", +) diff --git a/owl-bot-staging/v1/google/cloud/compute_v1/services/global_forwarding_rules/pagers.py b/owl-bot-staging/v1/google/cloud/compute_v1/services/global_forwarding_rules/pagers.py new file mode 100644 index 000000000..c049436cb --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/compute_v1/services/global_forwarding_rules/pagers.py @@ -0,0 +1,77 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import Any, AsyncIterator, Awaitable, Callable, Sequence, Tuple, Optional, Iterator + +from google.cloud.compute_v1.types import compute + + +class ListPager: + """A pager for iterating through ``list`` requests. + + This class thinly wraps an initial + :class:`google.cloud.compute_v1.types.ForwardingRuleList` object, and + provides an ``__iter__`` method to iterate through its + ``items`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``List`` requests and continue to iterate + through the ``items`` field on the + corresponding responses. + + All the usual :class:`google.cloud.compute_v1.types.ForwardingRuleList` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., compute.ForwardingRuleList], + request: compute.ListGlobalForwardingRulesRequest, + response: compute.ForwardingRuleList, + *, + metadata: Sequence[Tuple[str, str]] = ()): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.compute_v1.types.ListGlobalForwardingRulesRequest): + The initial request object. + response (google.cloud.compute_v1.types.ForwardingRuleList): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = compute.ListGlobalForwardingRulesRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[compute.ForwardingRuleList]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[compute.ForwardingRule]: + for page in self.pages: + yield from page.items + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) diff --git a/owl-bot-staging/v1/google/cloud/compute_v1/services/global_forwarding_rules/transports/__init__.py b/owl-bot-staging/v1/google/cloud/compute_v1/services/global_forwarding_rules/transports/__init__.py new file mode 100644 index 000000000..4ed307048 --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/compute_v1/services/global_forwarding_rules/transports/__init__.py @@ -0,0 +1,32 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +from typing import Dict, Type + +from .base import GlobalForwardingRulesTransport +from .rest import GlobalForwardingRulesRestTransport +from .rest import GlobalForwardingRulesRestInterceptor + + +# Compile a registry of transports. +_transport_registry = OrderedDict() # type: Dict[str, Type[GlobalForwardingRulesTransport]] +_transport_registry['rest'] = GlobalForwardingRulesRestTransport + +__all__ = ( + 'GlobalForwardingRulesTransport', + 'GlobalForwardingRulesRestTransport', + 'GlobalForwardingRulesRestInterceptor', +) diff --git a/owl-bot-staging/v1/google/cloud/compute_v1/services/global_forwarding_rules/transports/base.py b/owl-bot-staging/v1/google/cloud/compute_v1/services/global_forwarding_rules/transports/base.py new file mode 100644 index 000000000..1e3728da4 --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/compute_v1/services/global_forwarding_rules/transports/base.py @@ -0,0 +1,247 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import abc +from typing import Awaitable, Callable, Dict, Optional, Sequence, Union + +from google.cloud.compute_v1 import gapic_version as package_version + +import google.auth # type: ignore +import google.api_core +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.compute_v1.types import compute +from google.cloud.compute_v1.services import global_operations + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) + + +class GlobalForwardingRulesTransport(abc.ABC): + """Abstract transport class for GlobalForwardingRules.""" + + AUTH_SCOPES = ( + 'https://www.googleapis.com/auth/compute', + 'https://www.googleapis.com/auth/cloud-platform', + ) + + DEFAULT_HOST: str = 'compute.googleapis.com' + def __init__( + self, *, + host: str = DEFAULT_HOST, + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + **kwargs, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A list of scopes. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + """ + self._extended_operations_services: Dict[str, Any] = {} + + scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} + + # Save the scopes. + self._scopes = scopes + + # If no credentials are provided, then determine the appropriate + # defaults. + if credentials and credentials_file: + raise core_exceptions.DuplicateCredentialArgs("'credentials_file' and 'credentials' are mutually exclusive") + + if credentials_file is not None: + credentials, _ = google.auth.load_credentials_from_file( + credentials_file, + **scopes_kwargs, + quota_project_id=quota_project_id + ) + elif credentials is None: + credentials, _ = google.auth.default(**scopes_kwargs, quota_project_id=quota_project_id) + # Don't apply audience if the credentials file passed from user. + if hasattr(credentials, "with_gdch_audience"): + credentials = credentials.with_gdch_audience(api_audience if api_audience else host) + + # If the credentials are service account credentials, then always try to use self signed JWT. + if always_use_jwt_access and isinstance(credentials, service_account.Credentials) and hasattr(service_account.Credentials, "with_always_use_jwt_access"): + credentials = credentials.with_always_use_jwt_access(True) + + # Save the credentials. + self._credentials = credentials + + # Save the hostname. Default to port 443 (HTTPS) if none is specified. + if ':' not in host: + host += ':443' + self._host = host + + def _prep_wrapped_messages(self, client_info): + # Precompute the wrapped methods. + self._wrapped_methods = { + self.delete: gapic_v1.method.wrap_method( + self.delete, + default_timeout=None, + client_info=client_info, + ), + self.get: gapic_v1.method.wrap_method( + self.get, + default_timeout=None, + client_info=client_info, + ), + self.insert: gapic_v1.method.wrap_method( + self.insert, + default_timeout=None, + client_info=client_info, + ), + self.list: gapic_v1.method.wrap_method( + self.list, + default_timeout=None, + client_info=client_info, + ), + self.patch: gapic_v1.method.wrap_method( + self.patch, + default_timeout=None, + client_info=client_info, + ), + self.set_labels: gapic_v1.method.wrap_method( + self.set_labels, + default_timeout=None, + client_info=client_info, + ), + self.set_target: gapic_v1.method.wrap_method( + self.set_target, + default_timeout=None, + client_info=client_info, + ), + } + + def close(self): + """Closes resources associated with the transport. + + .. warning:: + Only call this method if the transport is NOT shared + with other clients - this may cause errors in other clients! + """ + raise NotImplementedError() + + @property + def delete(self) -> Callable[ + [compute.DeleteGlobalForwardingRuleRequest], + Union[ + compute.Operation, + Awaitable[compute.Operation] + ]]: + raise NotImplementedError() + + @property + def get(self) -> Callable[ + [compute.GetGlobalForwardingRuleRequest], + Union[ + compute.ForwardingRule, + Awaitable[compute.ForwardingRule] + ]]: + raise NotImplementedError() + + @property + def insert(self) -> Callable[ + [compute.InsertGlobalForwardingRuleRequest], + Union[ + compute.Operation, + Awaitable[compute.Operation] + ]]: + raise NotImplementedError() + + @property + def list(self) -> Callable[ + [compute.ListGlobalForwardingRulesRequest], + Union[ + compute.ForwardingRuleList, + Awaitable[compute.ForwardingRuleList] + ]]: + raise NotImplementedError() + + @property + def patch(self) -> Callable[ + [compute.PatchGlobalForwardingRuleRequest], + Union[ + compute.Operation, + Awaitable[compute.Operation] + ]]: + raise NotImplementedError() + + @property + def set_labels(self) -> Callable[ + [compute.SetLabelsGlobalForwardingRuleRequest], + Union[ + compute.Operation, + Awaitable[compute.Operation] + ]]: + raise NotImplementedError() + + @property + def set_target(self) -> Callable[ + [compute.SetTargetGlobalForwardingRuleRequest], + Union[ + compute.Operation, + Awaitable[compute.Operation] + ]]: + raise NotImplementedError() + + @property + def kind(self) -> str: + raise NotImplementedError() + + @property + def _global_operations_client(self) -> global_operations.GlobalOperationsClient: + ex_op_service = self._extended_operations_services.get("global_operations") + if not ex_op_service: + ex_op_service = global_operations.GlobalOperationsClient( + credentials=self._credentials, + transport=self.kind, + ) + self._extended_operations_services["global_operations"] = ex_op_service + + return ex_op_service + + +__all__ = ( + 'GlobalForwardingRulesTransport', +) diff --git a/owl-bot-staging/v1/google/cloud/compute_v1/services/global_forwarding_rules/transports/rest.py b/owl-bot-staging/v1/google/cloud/compute_v1/services/global_forwarding_rules/transports/rest.py new file mode 100644 index 000000000..6ea5fdaf0 --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/compute_v1/services/global_forwarding_rules/transports/rest.py @@ -0,0 +1,1080 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +from google.auth.transport.requests import AuthorizedSession # type: ignore +import json # type: ignore +import grpc # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.api_core import exceptions as core_exceptions +from google.api_core import retry as retries +from google.api_core import rest_helpers +from google.api_core import rest_streaming +from google.api_core import path_template +from google.api_core import gapic_v1 + +from google.protobuf import json_format +from requests import __version__ as requests_version +import dataclasses +import re +from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union +import warnings + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + + +from google.cloud.compute_v1.types import compute + +from .base import GlobalForwardingRulesTransport, DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, + grpc_version=None, + rest_version=requests_version, +) + + +class GlobalForwardingRulesRestInterceptor: + """Interceptor for GlobalForwardingRules. + + Interceptors are used to manipulate requests, request metadata, and responses + in arbitrary ways. + Example use cases include: + * Logging + * Verifying requests according to service or custom semantics + * Stripping extraneous information from responses + + These use cases and more can be enabled by injecting an + instance of a custom subclass when constructing the GlobalForwardingRulesRestTransport. + + .. code-block:: python + class MyCustomGlobalForwardingRulesInterceptor(GlobalForwardingRulesRestInterceptor): + def pre_delete(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_delete(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_get(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_insert(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_insert(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_patch(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_patch(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_set_labels(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_set_labels(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_set_target(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_set_target(self, response): + logging.log(f"Received response: {response}") + return response + + transport = GlobalForwardingRulesRestTransport(interceptor=MyCustomGlobalForwardingRulesInterceptor()) + client = GlobalForwardingRulesClient(transport=transport) + + + """ + def pre_delete(self, request: compute.DeleteGlobalForwardingRuleRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.DeleteGlobalForwardingRuleRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for delete + + Override in a subclass to manipulate the request or metadata + before they are sent to the GlobalForwardingRules server. + """ + return request, metadata + + def post_delete(self, response: compute.Operation) -> compute.Operation: + """Post-rpc interceptor for delete + + Override in a subclass to manipulate the response + after it is returned by the GlobalForwardingRules server but before + it is returned to user code. + """ + return response + def pre_get(self, request: compute.GetGlobalForwardingRuleRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.GetGlobalForwardingRuleRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get + + Override in a subclass to manipulate the request or metadata + before they are sent to the GlobalForwardingRules server. + """ + return request, metadata + + def post_get(self, response: compute.ForwardingRule) -> compute.ForwardingRule: + """Post-rpc interceptor for get + + Override in a subclass to manipulate the response + after it is returned by the GlobalForwardingRules server but before + it is returned to user code. + """ + return response + def pre_insert(self, request: compute.InsertGlobalForwardingRuleRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.InsertGlobalForwardingRuleRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for insert + + Override in a subclass to manipulate the request or metadata + before they are sent to the GlobalForwardingRules server. + """ + return request, metadata + + def post_insert(self, response: compute.Operation) -> compute.Operation: + """Post-rpc interceptor for insert + + Override in a subclass to manipulate the response + after it is returned by the GlobalForwardingRules server but before + it is returned to user code. + """ + return response + def pre_list(self, request: compute.ListGlobalForwardingRulesRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.ListGlobalForwardingRulesRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list + + Override in a subclass to manipulate the request or metadata + before they are sent to the GlobalForwardingRules server. + """ + return request, metadata + + def post_list(self, response: compute.ForwardingRuleList) -> compute.ForwardingRuleList: + """Post-rpc interceptor for list + + Override in a subclass to manipulate the response + after it is returned by the GlobalForwardingRules server but before + it is returned to user code. + """ + return response + def pre_patch(self, request: compute.PatchGlobalForwardingRuleRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.PatchGlobalForwardingRuleRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for patch + + Override in a subclass to manipulate the request or metadata + before they are sent to the GlobalForwardingRules server. + """ + return request, metadata + + def post_patch(self, response: compute.Operation) -> compute.Operation: + """Post-rpc interceptor for patch + + Override in a subclass to manipulate the response + after it is returned by the GlobalForwardingRules server but before + it is returned to user code. + """ + return response + def pre_set_labels(self, request: compute.SetLabelsGlobalForwardingRuleRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.SetLabelsGlobalForwardingRuleRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for set_labels + + Override in a subclass to manipulate the request or metadata + before they are sent to the GlobalForwardingRules server. + """ + return request, metadata + + def post_set_labels(self, response: compute.Operation) -> compute.Operation: + """Post-rpc interceptor for set_labels + + Override in a subclass to manipulate the response + after it is returned by the GlobalForwardingRules server but before + it is returned to user code. + """ + return response + def pre_set_target(self, request: compute.SetTargetGlobalForwardingRuleRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.SetTargetGlobalForwardingRuleRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for set_target + + Override in a subclass to manipulate the request or metadata + before they are sent to the GlobalForwardingRules server. + """ + return request, metadata + + def post_set_target(self, response: compute.Operation) -> compute.Operation: + """Post-rpc interceptor for set_target + + Override in a subclass to manipulate the response + after it is returned by the GlobalForwardingRules server but before + it is returned to user code. + """ + return response + + +@dataclasses.dataclass +class GlobalForwardingRulesRestStub: + _session: AuthorizedSession + _host: str + _interceptor: GlobalForwardingRulesRestInterceptor + + +class GlobalForwardingRulesRestTransport(GlobalForwardingRulesTransport): + """REST backend transport for GlobalForwardingRules. + + The GlobalForwardingRules API. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends JSON representations of protocol buffers over HTTP/1.1 + + NOTE: This REST transport functionality is currently in a beta + state (preview). We welcome your feedback via an issue in this + library's source repository. Thank you! + """ + + def __init__(self, *, + host: str = 'compute.googleapis.com', + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + client_cert_source_for_mtls: Optional[Callable[[ + ], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + url_scheme: str = 'https', + interceptor: Optional[GlobalForwardingRulesRestInterceptor] = None, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + NOTE: This REST transport functionality is currently in a beta + state (preview). We welcome your feedback via a GitHub issue in + this library's repository. Thank you! + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + client_cert_source_for_mtls (Callable[[], Tuple[bytes, bytes]]): Client + certificate to configure mutual TLS HTTP channel. It is ignored + if ``channel`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you are developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + url_scheme: the protocol scheme for the API endpoint. Normally + "https", but for testing or local servers, + "http" can be specified. + """ + # Run the base constructor + # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. + # TODO: When custom host (api_endpoint) is set, `scopes` must *also* be set on the + # credentials object + maybe_url_match = re.match("^(?Phttp(?:s)?://)?(?P.*)$", host) + if maybe_url_match is None: + raise ValueError(f"Unexpected hostname structure: {host}") # pragma: NO COVER + + url_match_items = maybe_url_match.groupdict() + + host = f"{url_scheme}://{host}" if not url_match_items["scheme"] else host + + super().__init__( + host=host, + credentials=credentials, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience + ) + self._session = AuthorizedSession( + self._credentials, default_host=self.DEFAULT_HOST) + if client_cert_source_for_mtls: + self._session.configure_mtls_channel(client_cert_source_for_mtls) + self._interceptor = interceptor or GlobalForwardingRulesRestInterceptor() + self._prep_wrapped_messages(client_info) + + class _Delete(GlobalForwardingRulesRestStub): + def __hash__(self): + return hash("Delete") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.DeleteGlobalForwardingRuleRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.Operation: + r"""Call the delete method over HTTP. + + Args: + request (~.compute.DeleteGlobalForwardingRuleRequest): + The request object. A request message for + GlobalForwardingRules.Delete. See the + method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.Operation: + Represents an Operation resource. Google Compute Engine + has three Operation resources: \* + `Global `__ + \* + `Regional `__ + \* + `Zonal `__ + You can use an operation resource to manage asynchronous + API requests. For more information, read Handling API + responses. Operations can be global, regional or zonal. + - For global operations, use the ``globalOperations`` + resource. - For regional operations, use the + ``regionOperations`` resource. - For zonal operations, + use the ``zonalOperations`` resource. For more + information, read Global, Regional, and Zonal Resources. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'delete', + 'uri': '/compute/v1/projects/{project}/global/forwardingRules/{forwarding_rule}', + }, + ] + request, metadata = self._interceptor.pre_delete(request, metadata) + pb_request = compute.DeleteGlobalForwardingRuleRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.Operation() + pb_resp = compute.Operation.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_delete(resp) + return resp + + class _Get(GlobalForwardingRulesRestStub): + def __hash__(self): + return hash("Get") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.GetGlobalForwardingRuleRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.ForwardingRule: + r"""Call the get method over HTTP. + + Args: + request (~.compute.GetGlobalForwardingRuleRequest): + The request object. A request message for + GlobalForwardingRules.Get. See the + method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.ForwardingRule: + Represents a Forwarding Rule resource. Forwarding rule + resources in Google Cloud can be either regional or + global in scope: \* + `Global `__ + \* + `Regional `__ + A forwarding rule and its corresponding IP address + represent the frontend configuration of a Google Cloud + Platform load balancer. Forwarding rules can also + reference target instances and Cloud VPN Classic + gateways (targetVpnGateway). For more information, read + Forwarding rule concepts and Using protocol forwarding. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/compute/v1/projects/{project}/global/forwardingRules/{forwarding_rule}', + }, + ] + request, metadata = self._interceptor.pre_get(request, metadata) + pb_request = compute.GetGlobalForwardingRuleRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.ForwardingRule() + pb_resp = compute.ForwardingRule.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get(resp) + return resp + + class _Insert(GlobalForwardingRulesRestStub): + def __hash__(self): + return hash("Insert") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.InsertGlobalForwardingRuleRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.Operation: + r"""Call the insert method over HTTP. + + Args: + request (~.compute.InsertGlobalForwardingRuleRequest): + The request object. A request message for + GlobalForwardingRules.Insert. See the + method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.Operation: + Represents an Operation resource. Google Compute Engine + has three Operation resources: \* + `Global `__ + \* + `Regional `__ + \* + `Zonal `__ + You can use an operation resource to manage asynchronous + API requests. For more information, read Handling API + responses. Operations can be global, regional or zonal. + - For global operations, use the ``globalOperations`` + resource. - For regional operations, use the + ``regionOperations`` resource. - For zonal operations, + use the ``zonalOperations`` resource. For more + information, read Global, Regional, and Zonal Resources. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/compute/v1/projects/{project}/global/forwardingRules', + 'body': 'forwarding_rule_resource', + }, + ] + request, metadata = self._interceptor.pre_insert(request, metadata) + pb_request = compute.InsertGlobalForwardingRuleRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + including_default_value_fields=False, + use_integers_for_enums=False + ) + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.Operation() + pb_resp = compute.Operation.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_insert(resp) + return resp + + class _List(GlobalForwardingRulesRestStub): + def __hash__(self): + return hash("List") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.ListGlobalForwardingRulesRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.ForwardingRuleList: + r"""Call the list method over HTTP. + + Args: + request (~.compute.ListGlobalForwardingRulesRequest): + The request object. A request message for + GlobalForwardingRules.List. See the + method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.ForwardingRuleList: + Contains a list of ForwardingRule + resources. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/compute/v1/projects/{project}/global/forwardingRules', + }, + ] + request, metadata = self._interceptor.pre_list(request, metadata) + pb_request = compute.ListGlobalForwardingRulesRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.ForwardingRuleList() + pb_resp = compute.ForwardingRuleList.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_list(resp) + return resp + + class _Patch(GlobalForwardingRulesRestStub): + def __hash__(self): + return hash("Patch") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.PatchGlobalForwardingRuleRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.Operation: + r"""Call the patch method over HTTP. + + Args: + request (~.compute.PatchGlobalForwardingRuleRequest): + The request object. A request message for + GlobalForwardingRules.Patch. See the + method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.Operation: + Represents an Operation resource. Google Compute Engine + has three Operation resources: \* + `Global `__ + \* + `Regional `__ + \* + `Zonal `__ + You can use an operation resource to manage asynchronous + API requests. For more information, read Handling API + responses. Operations can be global, regional or zonal. + - For global operations, use the ``globalOperations`` + resource. - For regional operations, use the + ``regionOperations`` resource. - For zonal operations, + use the ``zonalOperations`` resource. For more + information, read Global, Regional, and Zonal Resources. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'patch', + 'uri': '/compute/v1/projects/{project}/global/forwardingRules/{forwarding_rule}', + 'body': 'forwarding_rule_resource', + }, + ] + request, metadata = self._interceptor.pre_patch(request, metadata) + pb_request = compute.PatchGlobalForwardingRuleRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + including_default_value_fields=False, + use_integers_for_enums=False + ) + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.Operation() + pb_resp = compute.Operation.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_patch(resp) + return resp + + class _SetLabels(GlobalForwardingRulesRestStub): + def __hash__(self): + return hash("SetLabels") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.SetLabelsGlobalForwardingRuleRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.Operation: + r"""Call the set labels method over HTTP. + + Args: + request (~.compute.SetLabelsGlobalForwardingRuleRequest): + The request object. A request message for + GlobalForwardingRules.SetLabels. See the + method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.Operation: + Represents an Operation resource. Google Compute Engine + has three Operation resources: \* + `Global `__ + \* + `Regional `__ + \* + `Zonal `__ + You can use an operation resource to manage asynchronous + API requests. For more information, read Handling API + responses. Operations can be global, regional or zonal. + - For global operations, use the ``globalOperations`` + resource. - For regional operations, use the + ``regionOperations`` resource. - For zonal operations, + use the ``zonalOperations`` resource. For more + information, read Global, Regional, and Zonal Resources. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/compute/v1/projects/{project}/global/forwardingRules/{resource}/setLabels', + 'body': 'global_set_labels_request_resource', + }, + ] + request, metadata = self._interceptor.pre_set_labels(request, metadata) + pb_request = compute.SetLabelsGlobalForwardingRuleRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + including_default_value_fields=False, + use_integers_for_enums=False + ) + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.Operation() + pb_resp = compute.Operation.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_set_labels(resp) + return resp + + class _SetTarget(GlobalForwardingRulesRestStub): + def __hash__(self): + return hash("SetTarget") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.SetTargetGlobalForwardingRuleRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.Operation: + r"""Call the set target method over HTTP. + + Args: + request (~.compute.SetTargetGlobalForwardingRuleRequest): + The request object. A request message for + GlobalForwardingRules.SetTarget. See the + method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.Operation: + Represents an Operation resource. Google Compute Engine + has three Operation resources: \* + `Global `__ + \* + `Regional `__ + \* + `Zonal `__ + You can use an operation resource to manage asynchronous + API requests. For more information, read Handling API + responses. Operations can be global, regional or zonal. + - For global operations, use the ``globalOperations`` + resource. - For regional operations, use the + ``regionOperations`` resource. - For zonal operations, + use the ``zonalOperations`` resource. For more + information, read Global, Regional, and Zonal Resources. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/compute/v1/projects/{project}/global/forwardingRules/{forwarding_rule}/setTarget', + 'body': 'target_reference_resource', + }, + ] + request, metadata = self._interceptor.pre_set_target(request, metadata) + pb_request = compute.SetTargetGlobalForwardingRuleRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + including_default_value_fields=False, + use_integers_for_enums=False + ) + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.Operation() + pb_resp = compute.Operation.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_set_target(resp) + return resp + + @property + def delete(self) -> Callable[ + [compute.DeleteGlobalForwardingRuleRequest], + compute.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._Delete(self._session, self._host, self._interceptor) # type: ignore + + @property + def get(self) -> Callable[ + [compute.GetGlobalForwardingRuleRequest], + compute.ForwardingRule]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._Get(self._session, self._host, self._interceptor) # type: ignore + + @property + def insert(self) -> Callable[ + [compute.InsertGlobalForwardingRuleRequest], + compute.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._Insert(self._session, self._host, self._interceptor) # type: ignore + + @property + def list(self) -> Callable[ + [compute.ListGlobalForwardingRulesRequest], + compute.ForwardingRuleList]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._List(self._session, self._host, self._interceptor) # type: ignore + + @property + def patch(self) -> Callable[ + [compute.PatchGlobalForwardingRuleRequest], + compute.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._Patch(self._session, self._host, self._interceptor) # type: ignore + + @property + def set_labels(self) -> Callable[ + [compute.SetLabelsGlobalForwardingRuleRequest], + compute.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._SetLabels(self._session, self._host, self._interceptor) # type: ignore + + @property + def set_target(self) -> Callable[ + [compute.SetTargetGlobalForwardingRuleRequest], + compute.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._SetTarget(self._session, self._host, self._interceptor) # type: ignore + + @property + def kind(self) -> str: + return "rest" + + def close(self): + self._session.close() + + +__all__=( + 'GlobalForwardingRulesRestTransport', +) diff --git a/owl-bot-staging/v1/google/cloud/compute_v1/services/global_network_endpoint_groups/__init__.py b/owl-bot-staging/v1/google/cloud/compute_v1/services/global_network_endpoint_groups/__init__.py new file mode 100644 index 000000000..1da7459ef --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/compute_v1/services/global_network_endpoint_groups/__init__.py @@ -0,0 +1,20 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from .client import GlobalNetworkEndpointGroupsClient + +__all__ = ( + 'GlobalNetworkEndpointGroupsClient', +) diff --git a/owl-bot-staging/v1/google/cloud/compute_v1/services/global_network_endpoint_groups/client.py b/owl-bot-staging/v1/google/cloud/compute_v1/services/global_network_endpoint_groups/client.py new file mode 100644 index 000000000..377623870 --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/compute_v1/services/global_network_endpoint_groups/client.py @@ -0,0 +1,1790 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import functools +import os +import re +from typing import Dict, Mapping, MutableMapping, MutableSequence, Optional, Sequence, Tuple, Type, Union, cast + +from google.cloud.compute_v1 import gapic_version as package_version + +from google.api_core import client_options as client_options_lib +from google.api_core import exceptions as core_exceptions +from google.api_core import extended_operation +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport import mtls # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth.exceptions import MutualTLSChannelError # type: ignore +from google.oauth2 import service_account # type: ignore + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + +from google.api_core import extended_operation # type: ignore +from google.cloud.compute_v1.services.global_network_endpoint_groups import pagers +from google.cloud.compute_v1.types import compute +from .transports.base import GlobalNetworkEndpointGroupsTransport, DEFAULT_CLIENT_INFO +from .transports.rest import GlobalNetworkEndpointGroupsRestTransport + + +class GlobalNetworkEndpointGroupsClientMeta(type): + """Metaclass for the GlobalNetworkEndpointGroups client. + + This provides class-level methods for building and retrieving + support objects (e.g. transport) without polluting the client instance + objects. + """ + _transport_registry = OrderedDict() # type: Dict[str, Type[GlobalNetworkEndpointGroupsTransport]] + _transport_registry["rest"] = GlobalNetworkEndpointGroupsRestTransport + + def get_transport_class(cls, + label: Optional[str] = None, + ) -> Type[GlobalNetworkEndpointGroupsTransport]: + """Returns an appropriate transport class. + + Args: + label: The name of the desired transport. If none is + provided, then the first transport in the registry is used. + + Returns: + The transport class to use. + """ + # If a specific transport is requested, return that one. + if label: + return cls._transport_registry[label] + + # No transport is requested; return the default (that is, the first one + # in the dictionary). + return next(iter(cls._transport_registry.values())) + + +class GlobalNetworkEndpointGroupsClient(metaclass=GlobalNetworkEndpointGroupsClientMeta): + """The GlobalNetworkEndpointGroups API.""" + + @staticmethod + def _get_default_mtls_endpoint(api_endpoint): + """Converts api endpoint to mTLS endpoint. + + Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to + "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. + Args: + api_endpoint (Optional[str]): the api endpoint to convert. + Returns: + str: converted mTLS api endpoint. + """ + if not api_endpoint: + return api_endpoint + + mtls_endpoint_re = re.compile( + r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?" + ) + + m = mtls_endpoint_re.match(api_endpoint) + name, mtls, sandbox, googledomain = m.groups() + if mtls or not googledomain: + return api_endpoint + + if sandbox: + return api_endpoint.replace( + "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" + ) + + return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") + + DEFAULT_ENDPOINT = "compute.googleapis.com" + DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore + DEFAULT_ENDPOINT + ) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + GlobalNetworkEndpointGroupsClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_info(info) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + GlobalNetworkEndpointGroupsClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_file( + filename) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + from_service_account_json = from_service_account_file + + @property + def transport(self) -> GlobalNetworkEndpointGroupsTransport: + """Returns the transport used by the client instance. + + Returns: + GlobalNetworkEndpointGroupsTransport: The transport used by the client + instance. + """ + return self._transport + + @staticmethod + def common_billing_account_path(billing_account: str, ) -> str: + """Returns a fully-qualified billing_account string.""" + return "billingAccounts/{billing_account}".format(billing_account=billing_account, ) + + @staticmethod + def parse_common_billing_account_path(path: str) -> Dict[str,str]: + """Parse a billing_account path into its component segments.""" + m = re.match(r"^billingAccounts/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_folder_path(folder: str, ) -> str: + """Returns a fully-qualified folder string.""" + return "folders/{folder}".format(folder=folder, ) + + @staticmethod + def parse_common_folder_path(path: str) -> Dict[str,str]: + """Parse a folder path into its component segments.""" + m = re.match(r"^folders/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_organization_path(organization: str, ) -> str: + """Returns a fully-qualified organization string.""" + return "organizations/{organization}".format(organization=organization, ) + + @staticmethod + def parse_common_organization_path(path: str) -> Dict[str,str]: + """Parse a organization path into its component segments.""" + m = re.match(r"^organizations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_project_path(project: str, ) -> str: + """Returns a fully-qualified project string.""" + return "projects/{project}".format(project=project, ) + + @staticmethod + def parse_common_project_path(path: str) -> Dict[str,str]: + """Parse a project path into its component segments.""" + m = re.match(r"^projects/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_location_path(project: str, location: str, ) -> str: + """Returns a fully-qualified location string.""" + return "projects/{project}/locations/{location}".format(project=project, location=location, ) + + @staticmethod + def parse_common_location_path(path: str) -> Dict[str,str]: + """Parse a location path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @classmethod + def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[client_options_lib.ClientOptions] = None): + """Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + if client_options is None: + client_options = client_options_lib.ClientOptions() + use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") + use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") + if use_client_cert not in ("true", "false"): + raise ValueError("Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`") + if use_mtls_endpoint not in ("auto", "never", "always"): + raise MutualTLSChannelError("Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`") + + # Figure out the client cert source to use. + client_cert_source = None + if use_client_cert == "true": + if client_options.client_cert_source: + client_cert_source = client_options.client_cert_source + elif mtls.has_default_client_cert_source(): + client_cert_source = mtls.default_client_cert_source() + + # Figure out which api endpoint to use. + if client_options.api_endpoint is not None: + api_endpoint = client_options.api_endpoint + elif use_mtls_endpoint == "always" or (use_mtls_endpoint == "auto" and client_cert_source): + api_endpoint = cls.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = cls.DEFAULT_ENDPOINT + + return api_endpoint, client_cert_source + + def __init__(self, *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Optional[Union[str, GlobalNetworkEndpointGroupsTransport]] = None, + client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the global network endpoint groups client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Union[str, GlobalNetworkEndpointGroupsTransport]): The + transport to use. If set to None, a transport is chosen + automatically. + NOTE: "rest" transport functionality is currently in a + beta state (preview). We welcome your feedback via an + issue in this library's source repository. + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): Custom options for the + client. It won't take effect if a ``transport`` instance is provided. + (1) The ``api_endpoint`` property can be used to override the + default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT + environment variable can also be used to override the endpoint: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto switch to the + default mTLS endpoint if client certificate is present, this is + the default value). However, the ``api_endpoint`` property takes + precedence if provided. + (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide client certificate for mutual TLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + """ + if isinstance(client_options, dict): + client_options = client_options_lib.from_dict(client_options) + if client_options is None: + client_options = client_options_lib.ClientOptions() + client_options = cast(client_options_lib.ClientOptions, client_options) + + api_endpoint, client_cert_source_func = self.get_mtls_endpoint_and_cert_source(client_options) + + api_key_value = getattr(client_options, "api_key", None) + if api_key_value and credentials: + raise ValueError("client_options.api_key and credentials are mutually exclusive") + + # Save or instantiate the transport. + # Ordinarily, we provide the transport, but allowing a custom transport + # instance provides an extensibility point for unusual situations. + if isinstance(transport, GlobalNetworkEndpointGroupsTransport): + # transport is a GlobalNetworkEndpointGroupsTransport instance. + if credentials or client_options.credentials_file or api_key_value: + raise ValueError("When providing a transport instance, " + "provide its credentials directly.") + if client_options.scopes: + raise ValueError( + "When providing a transport instance, provide its scopes " + "directly." + ) + self._transport = transport + else: + import google.auth._default # type: ignore + + if api_key_value and hasattr(google.auth._default, "get_api_key_credentials"): + credentials = google.auth._default.get_api_key_credentials(api_key_value) + + Transport = type(self).get_transport_class(transport) + self._transport = Transport( + credentials=credentials, + credentials_file=client_options.credentials_file, + host=api_endpoint, + scopes=client_options.scopes, + client_cert_source_for_mtls=client_cert_source_func, + quota_project_id=client_options.quota_project_id, + client_info=client_info, + always_use_jwt_access=True, + api_audience=client_options.api_audience, + ) + + def attach_network_endpoints_unary(self, + request: Optional[Union[compute.AttachNetworkEndpointsGlobalNetworkEndpointGroupRequest, dict]] = None, + *, + project: Optional[str] = None, + network_endpoint_group: Optional[str] = None, + global_network_endpoint_groups_attach_endpoints_request_resource: Optional[compute.GlobalNetworkEndpointGroupsAttachEndpointsRequest] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.Operation: + r"""Attach a network endpoint to the specified network + endpoint group. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_attach_network_endpoints(): + # Create a client + client = compute_v1.GlobalNetworkEndpointGroupsClient() + + # Initialize request argument(s) + request = compute_v1.AttachNetworkEndpointsGlobalNetworkEndpointGroupRequest( + network_endpoint_group="network_endpoint_group_value", + project="project_value", + ) + + # Make the request + response = client.attach_network_endpoints(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.AttachNetworkEndpointsGlobalNetworkEndpointGroupRequest, dict]): + The request object. A request message for + GlobalNetworkEndpointGroups.AttachNetworkEndpoints. + See the method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + network_endpoint_group (str): + The name of the network endpoint + group where you are attaching network + endpoints to. It should comply with + RFC1035. + + This corresponds to the ``network_endpoint_group`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + global_network_endpoint_groups_attach_endpoints_request_resource (google.cloud.compute_v1.types.GlobalNetworkEndpointGroupsAttachEndpointsRequest): + The body resource for this request + This corresponds to the ``global_network_endpoint_groups_attach_endpoints_request_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, network_endpoint_group, global_network_endpoint_groups_attach_endpoints_request_resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.AttachNetworkEndpointsGlobalNetworkEndpointGroupRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.AttachNetworkEndpointsGlobalNetworkEndpointGroupRequest): + request = compute.AttachNetworkEndpointsGlobalNetworkEndpointGroupRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if network_endpoint_group is not None: + request.network_endpoint_group = network_endpoint_group + if global_network_endpoint_groups_attach_endpoints_request_resource is not None: + request.global_network_endpoint_groups_attach_endpoints_request_resource = global_network_endpoint_groups_attach_endpoints_request_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.attach_network_endpoints] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("network_endpoint_group", request.network_endpoint_group), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def attach_network_endpoints(self, + request: Optional[Union[compute.AttachNetworkEndpointsGlobalNetworkEndpointGroupRequest, dict]] = None, + *, + project: Optional[str] = None, + network_endpoint_group: Optional[str] = None, + global_network_endpoint_groups_attach_endpoints_request_resource: Optional[compute.GlobalNetworkEndpointGroupsAttachEndpointsRequest] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> extended_operation.ExtendedOperation: + r"""Attach a network endpoint to the specified network + endpoint group. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_attach_network_endpoints(): + # Create a client + client = compute_v1.GlobalNetworkEndpointGroupsClient() + + # Initialize request argument(s) + request = compute_v1.AttachNetworkEndpointsGlobalNetworkEndpointGroupRequest( + network_endpoint_group="network_endpoint_group_value", + project="project_value", + ) + + # Make the request + response = client.attach_network_endpoints(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.AttachNetworkEndpointsGlobalNetworkEndpointGroupRequest, dict]): + The request object. A request message for + GlobalNetworkEndpointGroups.AttachNetworkEndpoints. + See the method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + network_endpoint_group (str): + The name of the network endpoint + group where you are attaching network + endpoints to. It should comply with + RFC1035. + + This corresponds to the ``network_endpoint_group`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + global_network_endpoint_groups_attach_endpoints_request_resource (google.cloud.compute_v1.types.GlobalNetworkEndpointGroupsAttachEndpointsRequest): + The body resource for this request + This corresponds to the ``global_network_endpoint_groups_attach_endpoints_request_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, network_endpoint_group, global_network_endpoint_groups_attach_endpoints_request_resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.AttachNetworkEndpointsGlobalNetworkEndpointGroupRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.AttachNetworkEndpointsGlobalNetworkEndpointGroupRequest): + request = compute.AttachNetworkEndpointsGlobalNetworkEndpointGroupRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if network_endpoint_group is not None: + request.network_endpoint_group = network_endpoint_group + if global_network_endpoint_groups_attach_endpoints_request_resource is not None: + request.global_network_endpoint_groups_attach_endpoints_request_resource = global_network_endpoint_groups_attach_endpoints_request_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.attach_network_endpoints] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("network_endpoint_group", request.network_endpoint_group), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + operation_service = self._transport._global_operations_client + operation_request = compute.GetGlobalOperationRequest() + operation_request.project = request.project + operation_request.operation = response.name + + get_operation = functools.partial(operation_service.get, operation_request) + # Cancel is not part of extended operations yet. + cancel_operation = lambda: None + + # Note: this class is an implementation detail to provide a uniform + # set of names for certain fields in the extended operation proto message. + # See google.api_core.extended_operation.ExtendedOperation for details + # on these properties and the expected interface. + class _CustomOperation(extended_operation.ExtendedOperation): + @property + def error_message(self): + return self._extended_operation.http_error_message + + @property + def error_code(self): + return self._extended_operation.http_error_status_code + + response = _CustomOperation.make(get_operation, cancel_operation, response) + + # Done; return the response. + return response + + def delete_unary(self, + request: Optional[Union[compute.DeleteGlobalNetworkEndpointGroupRequest, dict]] = None, + *, + project: Optional[str] = None, + network_endpoint_group: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.Operation: + r"""Deletes the specified network endpoint group.Note + that the NEG cannot be deleted if there are backend + services referencing it. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_delete(): + # Create a client + client = compute_v1.GlobalNetworkEndpointGroupsClient() + + # Initialize request argument(s) + request = compute_v1.DeleteGlobalNetworkEndpointGroupRequest( + network_endpoint_group="network_endpoint_group_value", + project="project_value", + ) + + # Make the request + response = client.delete(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.DeleteGlobalNetworkEndpointGroupRequest, dict]): + The request object. A request message for + GlobalNetworkEndpointGroups.Delete. See + the method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + network_endpoint_group (str): + The name of the network endpoint + group to delete. It should comply with + RFC1035. + + This corresponds to the ``network_endpoint_group`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, network_endpoint_group]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.DeleteGlobalNetworkEndpointGroupRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.DeleteGlobalNetworkEndpointGroupRequest): + request = compute.DeleteGlobalNetworkEndpointGroupRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if network_endpoint_group is not None: + request.network_endpoint_group = network_endpoint_group + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("network_endpoint_group", request.network_endpoint_group), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def delete(self, + request: Optional[Union[compute.DeleteGlobalNetworkEndpointGroupRequest, dict]] = None, + *, + project: Optional[str] = None, + network_endpoint_group: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> extended_operation.ExtendedOperation: + r"""Deletes the specified network endpoint group.Note + that the NEG cannot be deleted if there are backend + services referencing it. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_delete(): + # Create a client + client = compute_v1.GlobalNetworkEndpointGroupsClient() + + # Initialize request argument(s) + request = compute_v1.DeleteGlobalNetworkEndpointGroupRequest( + network_endpoint_group="network_endpoint_group_value", + project="project_value", + ) + + # Make the request + response = client.delete(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.DeleteGlobalNetworkEndpointGroupRequest, dict]): + The request object. A request message for + GlobalNetworkEndpointGroups.Delete. See + the method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + network_endpoint_group (str): + The name of the network endpoint + group to delete. It should comply with + RFC1035. + + This corresponds to the ``network_endpoint_group`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, network_endpoint_group]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.DeleteGlobalNetworkEndpointGroupRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.DeleteGlobalNetworkEndpointGroupRequest): + request = compute.DeleteGlobalNetworkEndpointGroupRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if network_endpoint_group is not None: + request.network_endpoint_group = network_endpoint_group + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("network_endpoint_group", request.network_endpoint_group), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + operation_service = self._transport._global_operations_client + operation_request = compute.GetGlobalOperationRequest() + operation_request.project = request.project + operation_request.operation = response.name + + get_operation = functools.partial(operation_service.get, operation_request) + # Cancel is not part of extended operations yet. + cancel_operation = lambda: None + + # Note: this class is an implementation detail to provide a uniform + # set of names for certain fields in the extended operation proto message. + # See google.api_core.extended_operation.ExtendedOperation for details + # on these properties and the expected interface. + class _CustomOperation(extended_operation.ExtendedOperation): + @property + def error_message(self): + return self._extended_operation.http_error_message + + @property + def error_code(self): + return self._extended_operation.http_error_status_code + + response = _CustomOperation.make(get_operation, cancel_operation, response) + + # Done; return the response. + return response + + def detach_network_endpoints_unary(self, + request: Optional[Union[compute.DetachNetworkEndpointsGlobalNetworkEndpointGroupRequest, dict]] = None, + *, + project: Optional[str] = None, + network_endpoint_group: Optional[str] = None, + global_network_endpoint_groups_detach_endpoints_request_resource: Optional[compute.GlobalNetworkEndpointGroupsDetachEndpointsRequest] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.Operation: + r"""Detach the network endpoint from the specified + network endpoint group. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_detach_network_endpoints(): + # Create a client + client = compute_v1.GlobalNetworkEndpointGroupsClient() + + # Initialize request argument(s) + request = compute_v1.DetachNetworkEndpointsGlobalNetworkEndpointGroupRequest( + network_endpoint_group="network_endpoint_group_value", + project="project_value", + ) + + # Make the request + response = client.detach_network_endpoints(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.DetachNetworkEndpointsGlobalNetworkEndpointGroupRequest, dict]): + The request object. A request message for + GlobalNetworkEndpointGroups.DetachNetworkEndpoints. + See the method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + network_endpoint_group (str): + The name of the network endpoint + group where you are removing network + endpoints. It should comply with + RFC1035. + + This corresponds to the ``network_endpoint_group`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + global_network_endpoint_groups_detach_endpoints_request_resource (google.cloud.compute_v1.types.GlobalNetworkEndpointGroupsDetachEndpointsRequest): + The body resource for this request + This corresponds to the ``global_network_endpoint_groups_detach_endpoints_request_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, network_endpoint_group, global_network_endpoint_groups_detach_endpoints_request_resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.DetachNetworkEndpointsGlobalNetworkEndpointGroupRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.DetachNetworkEndpointsGlobalNetworkEndpointGroupRequest): + request = compute.DetachNetworkEndpointsGlobalNetworkEndpointGroupRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if network_endpoint_group is not None: + request.network_endpoint_group = network_endpoint_group + if global_network_endpoint_groups_detach_endpoints_request_resource is not None: + request.global_network_endpoint_groups_detach_endpoints_request_resource = global_network_endpoint_groups_detach_endpoints_request_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.detach_network_endpoints] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("network_endpoint_group", request.network_endpoint_group), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def detach_network_endpoints(self, + request: Optional[Union[compute.DetachNetworkEndpointsGlobalNetworkEndpointGroupRequest, dict]] = None, + *, + project: Optional[str] = None, + network_endpoint_group: Optional[str] = None, + global_network_endpoint_groups_detach_endpoints_request_resource: Optional[compute.GlobalNetworkEndpointGroupsDetachEndpointsRequest] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> extended_operation.ExtendedOperation: + r"""Detach the network endpoint from the specified + network endpoint group. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_detach_network_endpoints(): + # Create a client + client = compute_v1.GlobalNetworkEndpointGroupsClient() + + # Initialize request argument(s) + request = compute_v1.DetachNetworkEndpointsGlobalNetworkEndpointGroupRequest( + network_endpoint_group="network_endpoint_group_value", + project="project_value", + ) + + # Make the request + response = client.detach_network_endpoints(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.DetachNetworkEndpointsGlobalNetworkEndpointGroupRequest, dict]): + The request object. A request message for + GlobalNetworkEndpointGroups.DetachNetworkEndpoints. + See the method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + network_endpoint_group (str): + The name of the network endpoint + group where you are removing network + endpoints. It should comply with + RFC1035. + + This corresponds to the ``network_endpoint_group`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + global_network_endpoint_groups_detach_endpoints_request_resource (google.cloud.compute_v1.types.GlobalNetworkEndpointGroupsDetachEndpointsRequest): + The body resource for this request + This corresponds to the ``global_network_endpoint_groups_detach_endpoints_request_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, network_endpoint_group, global_network_endpoint_groups_detach_endpoints_request_resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.DetachNetworkEndpointsGlobalNetworkEndpointGroupRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.DetachNetworkEndpointsGlobalNetworkEndpointGroupRequest): + request = compute.DetachNetworkEndpointsGlobalNetworkEndpointGroupRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if network_endpoint_group is not None: + request.network_endpoint_group = network_endpoint_group + if global_network_endpoint_groups_detach_endpoints_request_resource is not None: + request.global_network_endpoint_groups_detach_endpoints_request_resource = global_network_endpoint_groups_detach_endpoints_request_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.detach_network_endpoints] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("network_endpoint_group", request.network_endpoint_group), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + operation_service = self._transport._global_operations_client + operation_request = compute.GetGlobalOperationRequest() + operation_request.project = request.project + operation_request.operation = response.name + + get_operation = functools.partial(operation_service.get, operation_request) + # Cancel is not part of extended operations yet. + cancel_operation = lambda: None + + # Note: this class is an implementation detail to provide a uniform + # set of names for certain fields in the extended operation proto message. + # See google.api_core.extended_operation.ExtendedOperation for details + # on these properties and the expected interface. + class _CustomOperation(extended_operation.ExtendedOperation): + @property + def error_message(self): + return self._extended_operation.http_error_message + + @property + def error_code(self): + return self._extended_operation.http_error_status_code + + response = _CustomOperation.make(get_operation, cancel_operation, response) + + # Done; return the response. + return response + + def get(self, + request: Optional[Union[compute.GetGlobalNetworkEndpointGroupRequest, dict]] = None, + *, + project: Optional[str] = None, + network_endpoint_group: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.NetworkEndpointGroup: + r"""Returns the specified network endpoint group. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_get(): + # Create a client + client = compute_v1.GlobalNetworkEndpointGroupsClient() + + # Initialize request argument(s) + request = compute_v1.GetGlobalNetworkEndpointGroupRequest( + network_endpoint_group="network_endpoint_group_value", + project="project_value", + ) + + # Make the request + response = client.get(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.GetGlobalNetworkEndpointGroupRequest, dict]): + The request object. A request message for + GlobalNetworkEndpointGroups.Get. See the + method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + network_endpoint_group (str): + The name of the network endpoint + group. It should comply with RFC1035. + + This corresponds to the ``network_endpoint_group`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.compute_v1.types.NetworkEndpointGroup: + Represents a collection of network + endpoints. A network endpoint group + (NEG) defines how a set of endpoints + should be reached, whether they are + reachable, and where they are located. + For more information about using NEGs, + see Setting up external HTTP(S) Load + Balancing with internet NEGs, Setting up + zonal NEGs, or Setting up external + HTTP(S) Load Balancing with serverless + NEGs. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, network_endpoint_group]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.GetGlobalNetworkEndpointGroupRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.GetGlobalNetworkEndpointGroupRequest): + request = compute.GetGlobalNetworkEndpointGroupRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if network_endpoint_group is not None: + request.network_endpoint_group = network_endpoint_group + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("network_endpoint_group", request.network_endpoint_group), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def insert_unary(self, + request: Optional[Union[compute.InsertGlobalNetworkEndpointGroupRequest, dict]] = None, + *, + project: Optional[str] = None, + network_endpoint_group_resource: Optional[compute.NetworkEndpointGroup] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.Operation: + r"""Creates a network endpoint group in the specified + project using the parameters that are included in the + request. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_insert(): + # Create a client + client = compute_v1.GlobalNetworkEndpointGroupsClient() + + # Initialize request argument(s) + request = compute_v1.InsertGlobalNetworkEndpointGroupRequest( + project="project_value", + ) + + # Make the request + response = client.insert(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.InsertGlobalNetworkEndpointGroupRequest, dict]): + The request object. A request message for + GlobalNetworkEndpointGroups.Insert. See + the method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + network_endpoint_group_resource (google.cloud.compute_v1.types.NetworkEndpointGroup): + The body resource for this request + This corresponds to the ``network_endpoint_group_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, network_endpoint_group_resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.InsertGlobalNetworkEndpointGroupRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.InsertGlobalNetworkEndpointGroupRequest): + request = compute.InsertGlobalNetworkEndpointGroupRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if network_endpoint_group_resource is not None: + request.network_endpoint_group_resource = network_endpoint_group_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.insert] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def insert(self, + request: Optional[Union[compute.InsertGlobalNetworkEndpointGroupRequest, dict]] = None, + *, + project: Optional[str] = None, + network_endpoint_group_resource: Optional[compute.NetworkEndpointGroup] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> extended_operation.ExtendedOperation: + r"""Creates a network endpoint group in the specified + project using the parameters that are included in the + request. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_insert(): + # Create a client + client = compute_v1.GlobalNetworkEndpointGroupsClient() + + # Initialize request argument(s) + request = compute_v1.InsertGlobalNetworkEndpointGroupRequest( + project="project_value", + ) + + # Make the request + response = client.insert(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.InsertGlobalNetworkEndpointGroupRequest, dict]): + The request object. A request message for + GlobalNetworkEndpointGroups.Insert. See + the method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + network_endpoint_group_resource (google.cloud.compute_v1.types.NetworkEndpointGroup): + The body resource for this request + This corresponds to the ``network_endpoint_group_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, network_endpoint_group_resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.InsertGlobalNetworkEndpointGroupRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.InsertGlobalNetworkEndpointGroupRequest): + request = compute.InsertGlobalNetworkEndpointGroupRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if network_endpoint_group_resource is not None: + request.network_endpoint_group_resource = network_endpoint_group_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.insert] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + operation_service = self._transport._global_operations_client + operation_request = compute.GetGlobalOperationRequest() + operation_request.project = request.project + operation_request.operation = response.name + + get_operation = functools.partial(operation_service.get, operation_request) + # Cancel is not part of extended operations yet. + cancel_operation = lambda: None + + # Note: this class is an implementation detail to provide a uniform + # set of names for certain fields in the extended operation proto message. + # See google.api_core.extended_operation.ExtendedOperation for details + # on these properties and the expected interface. + class _CustomOperation(extended_operation.ExtendedOperation): + @property + def error_message(self): + return self._extended_operation.http_error_message + + @property + def error_code(self): + return self._extended_operation.http_error_status_code + + response = _CustomOperation.make(get_operation, cancel_operation, response) + + # Done; return the response. + return response + + def list(self, + request: Optional[Union[compute.ListGlobalNetworkEndpointGroupsRequest, dict]] = None, + *, + project: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListPager: + r"""Retrieves the list of network endpoint groups that + are located in the specified project. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_list(): + # Create a client + client = compute_v1.GlobalNetworkEndpointGroupsClient() + + # Initialize request argument(s) + request = compute_v1.ListGlobalNetworkEndpointGroupsRequest( + project="project_value", + ) + + # Make the request + page_result = client.list(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.ListGlobalNetworkEndpointGroupsRequest, dict]): + The request object. A request message for + GlobalNetworkEndpointGroups.List. See + the method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.compute_v1.services.global_network_endpoint_groups.pagers.ListPager: + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.ListGlobalNetworkEndpointGroupsRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.ListGlobalNetworkEndpointGroupsRequest): + request = compute.ListGlobalNetworkEndpointGroupsRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + def list_network_endpoints(self, + request: Optional[Union[compute.ListNetworkEndpointsGlobalNetworkEndpointGroupsRequest, dict]] = None, + *, + project: Optional[str] = None, + network_endpoint_group: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListNetworkEndpointsPager: + r"""Lists the network endpoints in the specified network + endpoint group. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_list_network_endpoints(): + # Create a client + client = compute_v1.GlobalNetworkEndpointGroupsClient() + + # Initialize request argument(s) + request = compute_v1.ListNetworkEndpointsGlobalNetworkEndpointGroupsRequest( + network_endpoint_group="network_endpoint_group_value", + project="project_value", + ) + + # Make the request + page_result = client.list_network_endpoints(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.ListNetworkEndpointsGlobalNetworkEndpointGroupsRequest, dict]): + The request object. A request message for + GlobalNetworkEndpointGroups.ListNetworkEndpoints. + See the method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + network_endpoint_group (str): + The name of the network endpoint + group from which you want to generate a + list of included network endpoints. It + should comply with RFC1035. + + This corresponds to the ``network_endpoint_group`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.compute_v1.services.global_network_endpoint_groups.pagers.ListNetworkEndpointsPager: + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, network_endpoint_group]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.ListNetworkEndpointsGlobalNetworkEndpointGroupsRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.ListNetworkEndpointsGlobalNetworkEndpointGroupsRequest): + request = compute.ListNetworkEndpointsGlobalNetworkEndpointGroupsRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if network_endpoint_group is not None: + request.network_endpoint_group = network_endpoint_group + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_network_endpoints] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("network_endpoint_group", request.network_endpoint_group), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListNetworkEndpointsPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + def __enter__(self) -> "GlobalNetworkEndpointGroupsClient": + return self + + def __exit__(self, type, value, traceback): + """Releases underlying transport's resources. + + .. warning:: + ONLY use as a context manager if the transport is NOT shared + with other clients! Exiting the with block will CLOSE the transport + and may cause errors in other clients! + """ + self.transport.close() + + + + + + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) + + +__all__ = ( + "GlobalNetworkEndpointGroupsClient", +) diff --git a/owl-bot-staging/v1/google/cloud/compute_v1/services/global_network_endpoint_groups/pagers.py b/owl-bot-staging/v1/google/cloud/compute_v1/services/global_network_endpoint_groups/pagers.py new file mode 100644 index 000000000..e11d84941 --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/compute_v1/services/global_network_endpoint_groups/pagers.py @@ -0,0 +1,136 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import Any, AsyncIterator, Awaitable, Callable, Sequence, Tuple, Optional, Iterator + +from google.cloud.compute_v1.types import compute + + +class ListPager: + """A pager for iterating through ``list`` requests. + + This class thinly wraps an initial + :class:`google.cloud.compute_v1.types.NetworkEndpointGroupList` object, and + provides an ``__iter__`` method to iterate through its + ``items`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``List`` requests and continue to iterate + through the ``items`` field on the + corresponding responses. + + All the usual :class:`google.cloud.compute_v1.types.NetworkEndpointGroupList` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., compute.NetworkEndpointGroupList], + request: compute.ListGlobalNetworkEndpointGroupsRequest, + response: compute.NetworkEndpointGroupList, + *, + metadata: Sequence[Tuple[str, str]] = ()): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.compute_v1.types.ListGlobalNetworkEndpointGroupsRequest): + The initial request object. + response (google.cloud.compute_v1.types.NetworkEndpointGroupList): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = compute.ListGlobalNetworkEndpointGroupsRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[compute.NetworkEndpointGroupList]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[compute.NetworkEndpointGroup]: + for page in self.pages: + yield from page.items + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + + +class ListNetworkEndpointsPager: + """A pager for iterating through ``list_network_endpoints`` requests. + + This class thinly wraps an initial + :class:`google.cloud.compute_v1.types.NetworkEndpointGroupsListNetworkEndpoints` object, and + provides an ``__iter__`` method to iterate through its + ``items`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListNetworkEndpoints`` requests and continue to iterate + through the ``items`` field on the + corresponding responses. + + All the usual :class:`google.cloud.compute_v1.types.NetworkEndpointGroupsListNetworkEndpoints` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., compute.NetworkEndpointGroupsListNetworkEndpoints], + request: compute.ListNetworkEndpointsGlobalNetworkEndpointGroupsRequest, + response: compute.NetworkEndpointGroupsListNetworkEndpoints, + *, + metadata: Sequence[Tuple[str, str]] = ()): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.compute_v1.types.ListNetworkEndpointsGlobalNetworkEndpointGroupsRequest): + The initial request object. + response (google.cloud.compute_v1.types.NetworkEndpointGroupsListNetworkEndpoints): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = compute.ListNetworkEndpointsGlobalNetworkEndpointGroupsRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[compute.NetworkEndpointGroupsListNetworkEndpoints]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[compute.NetworkEndpointWithHealthStatus]: + for page in self.pages: + yield from page.items + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) diff --git a/owl-bot-staging/v1/google/cloud/compute_v1/services/global_network_endpoint_groups/transports/__init__.py b/owl-bot-staging/v1/google/cloud/compute_v1/services/global_network_endpoint_groups/transports/__init__.py new file mode 100644 index 000000000..64f34ec12 --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/compute_v1/services/global_network_endpoint_groups/transports/__init__.py @@ -0,0 +1,32 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +from typing import Dict, Type + +from .base import GlobalNetworkEndpointGroupsTransport +from .rest import GlobalNetworkEndpointGroupsRestTransport +from .rest import GlobalNetworkEndpointGroupsRestInterceptor + + +# Compile a registry of transports. +_transport_registry = OrderedDict() # type: Dict[str, Type[GlobalNetworkEndpointGroupsTransport]] +_transport_registry['rest'] = GlobalNetworkEndpointGroupsRestTransport + +__all__ = ( + 'GlobalNetworkEndpointGroupsTransport', + 'GlobalNetworkEndpointGroupsRestTransport', + 'GlobalNetworkEndpointGroupsRestInterceptor', +) diff --git a/owl-bot-staging/v1/google/cloud/compute_v1/services/global_network_endpoint_groups/transports/base.py b/owl-bot-staging/v1/google/cloud/compute_v1/services/global_network_endpoint_groups/transports/base.py new file mode 100644 index 000000000..dbdf88d30 --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/compute_v1/services/global_network_endpoint_groups/transports/base.py @@ -0,0 +1,247 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import abc +from typing import Awaitable, Callable, Dict, Optional, Sequence, Union + +from google.cloud.compute_v1 import gapic_version as package_version + +import google.auth # type: ignore +import google.api_core +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.compute_v1.types import compute +from google.cloud.compute_v1.services import global_operations + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) + + +class GlobalNetworkEndpointGroupsTransport(abc.ABC): + """Abstract transport class for GlobalNetworkEndpointGroups.""" + + AUTH_SCOPES = ( + 'https://www.googleapis.com/auth/compute', + 'https://www.googleapis.com/auth/cloud-platform', + ) + + DEFAULT_HOST: str = 'compute.googleapis.com' + def __init__( + self, *, + host: str = DEFAULT_HOST, + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + **kwargs, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A list of scopes. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + """ + self._extended_operations_services: Dict[str, Any] = {} + + scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} + + # Save the scopes. + self._scopes = scopes + + # If no credentials are provided, then determine the appropriate + # defaults. + if credentials and credentials_file: + raise core_exceptions.DuplicateCredentialArgs("'credentials_file' and 'credentials' are mutually exclusive") + + if credentials_file is not None: + credentials, _ = google.auth.load_credentials_from_file( + credentials_file, + **scopes_kwargs, + quota_project_id=quota_project_id + ) + elif credentials is None: + credentials, _ = google.auth.default(**scopes_kwargs, quota_project_id=quota_project_id) + # Don't apply audience if the credentials file passed from user. + if hasattr(credentials, "with_gdch_audience"): + credentials = credentials.with_gdch_audience(api_audience if api_audience else host) + + # If the credentials are service account credentials, then always try to use self signed JWT. + if always_use_jwt_access and isinstance(credentials, service_account.Credentials) and hasattr(service_account.Credentials, "with_always_use_jwt_access"): + credentials = credentials.with_always_use_jwt_access(True) + + # Save the credentials. + self._credentials = credentials + + # Save the hostname. Default to port 443 (HTTPS) if none is specified. + if ':' not in host: + host += ':443' + self._host = host + + def _prep_wrapped_messages(self, client_info): + # Precompute the wrapped methods. + self._wrapped_methods = { + self.attach_network_endpoints: gapic_v1.method.wrap_method( + self.attach_network_endpoints, + default_timeout=None, + client_info=client_info, + ), + self.delete: gapic_v1.method.wrap_method( + self.delete, + default_timeout=None, + client_info=client_info, + ), + self.detach_network_endpoints: gapic_v1.method.wrap_method( + self.detach_network_endpoints, + default_timeout=None, + client_info=client_info, + ), + self.get: gapic_v1.method.wrap_method( + self.get, + default_timeout=None, + client_info=client_info, + ), + self.insert: gapic_v1.method.wrap_method( + self.insert, + default_timeout=None, + client_info=client_info, + ), + self.list: gapic_v1.method.wrap_method( + self.list, + default_timeout=None, + client_info=client_info, + ), + self.list_network_endpoints: gapic_v1.method.wrap_method( + self.list_network_endpoints, + default_timeout=None, + client_info=client_info, + ), + } + + def close(self): + """Closes resources associated with the transport. + + .. warning:: + Only call this method if the transport is NOT shared + with other clients - this may cause errors in other clients! + """ + raise NotImplementedError() + + @property + def attach_network_endpoints(self) -> Callable[ + [compute.AttachNetworkEndpointsGlobalNetworkEndpointGroupRequest], + Union[ + compute.Operation, + Awaitable[compute.Operation] + ]]: + raise NotImplementedError() + + @property + def delete(self) -> Callable[ + [compute.DeleteGlobalNetworkEndpointGroupRequest], + Union[ + compute.Operation, + Awaitable[compute.Operation] + ]]: + raise NotImplementedError() + + @property + def detach_network_endpoints(self) -> Callable[ + [compute.DetachNetworkEndpointsGlobalNetworkEndpointGroupRequest], + Union[ + compute.Operation, + Awaitable[compute.Operation] + ]]: + raise NotImplementedError() + + @property + def get(self) -> Callable[ + [compute.GetGlobalNetworkEndpointGroupRequest], + Union[ + compute.NetworkEndpointGroup, + Awaitable[compute.NetworkEndpointGroup] + ]]: + raise NotImplementedError() + + @property + def insert(self) -> Callable[ + [compute.InsertGlobalNetworkEndpointGroupRequest], + Union[ + compute.Operation, + Awaitable[compute.Operation] + ]]: + raise NotImplementedError() + + @property + def list(self) -> Callable[ + [compute.ListGlobalNetworkEndpointGroupsRequest], + Union[ + compute.NetworkEndpointGroupList, + Awaitable[compute.NetworkEndpointGroupList] + ]]: + raise NotImplementedError() + + @property + def list_network_endpoints(self) -> Callable[ + [compute.ListNetworkEndpointsGlobalNetworkEndpointGroupsRequest], + Union[ + compute.NetworkEndpointGroupsListNetworkEndpoints, + Awaitable[compute.NetworkEndpointGroupsListNetworkEndpoints] + ]]: + raise NotImplementedError() + + @property + def kind(self) -> str: + raise NotImplementedError() + + @property + def _global_operations_client(self) -> global_operations.GlobalOperationsClient: + ex_op_service = self._extended_operations_services.get("global_operations") + if not ex_op_service: + ex_op_service = global_operations.GlobalOperationsClient( + credentials=self._credentials, + transport=self.kind, + ) + self._extended_operations_services["global_operations"] = ex_op_service + + return ex_op_service + + +__all__ = ( + 'GlobalNetworkEndpointGroupsTransport', +) diff --git a/owl-bot-staging/v1/google/cloud/compute_v1/services/global_network_endpoint_groups/transports/rest.py b/owl-bot-staging/v1/google/cloud/compute_v1/services/global_network_endpoint_groups/transports/rest.py new file mode 100644 index 000000000..6396356af --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/compute_v1/services/global_network_endpoint_groups/transports/rest.py @@ -0,0 +1,1053 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +from google.auth.transport.requests import AuthorizedSession # type: ignore +import json # type: ignore +import grpc # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.api_core import exceptions as core_exceptions +from google.api_core import retry as retries +from google.api_core import rest_helpers +from google.api_core import rest_streaming +from google.api_core import path_template +from google.api_core import gapic_v1 + +from google.protobuf import json_format +from requests import __version__ as requests_version +import dataclasses +import re +from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union +import warnings + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + + +from google.cloud.compute_v1.types import compute + +from .base import GlobalNetworkEndpointGroupsTransport, DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, + grpc_version=None, + rest_version=requests_version, +) + + +class GlobalNetworkEndpointGroupsRestInterceptor: + """Interceptor for GlobalNetworkEndpointGroups. + + Interceptors are used to manipulate requests, request metadata, and responses + in arbitrary ways. + Example use cases include: + * Logging + * Verifying requests according to service or custom semantics + * Stripping extraneous information from responses + + These use cases and more can be enabled by injecting an + instance of a custom subclass when constructing the GlobalNetworkEndpointGroupsRestTransport. + + .. code-block:: python + class MyCustomGlobalNetworkEndpointGroupsInterceptor(GlobalNetworkEndpointGroupsRestInterceptor): + def pre_attach_network_endpoints(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_attach_network_endpoints(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_delete(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_delete(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_detach_network_endpoints(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_detach_network_endpoints(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_get(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_insert(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_insert(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list_network_endpoints(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_network_endpoints(self, response): + logging.log(f"Received response: {response}") + return response + + transport = GlobalNetworkEndpointGroupsRestTransport(interceptor=MyCustomGlobalNetworkEndpointGroupsInterceptor()) + client = GlobalNetworkEndpointGroupsClient(transport=transport) + + + """ + def pre_attach_network_endpoints(self, request: compute.AttachNetworkEndpointsGlobalNetworkEndpointGroupRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.AttachNetworkEndpointsGlobalNetworkEndpointGroupRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for attach_network_endpoints + + Override in a subclass to manipulate the request or metadata + before they are sent to the GlobalNetworkEndpointGroups server. + """ + return request, metadata + + def post_attach_network_endpoints(self, response: compute.Operation) -> compute.Operation: + """Post-rpc interceptor for attach_network_endpoints + + Override in a subclass to manipulate the response + after it is returned by the GlobalNetworkEndpointGroups server but before + it is returned to user code. + """ + return response + def pre_delete(self, request: compute.DeleteGlobalNetworkEndpointGroupRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.DeleteGlobalNetworkEndpointGroupRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for delete + + Override in a subclass to manipulate the request or metadata + before they are sent to the GlobalNetworkEndpointGroups server. + """ + return request, metadata + + def post_delete(self, response: compute.Operation) -> compute.Operation: + """Post-rpc interceptor for delete + + Override in a subclass to manipulate the response + after it is returned by the GlobalNetworkEndpointGroups server but before + it is returned to user code. + """ + return response + def pre_detach_network_endpoints(self, request: compute.DetachNetworkEndpointsGlobalNetworkEndpointGroupRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.DetachNetworkEndpointsGlobalNetworkEndpointGroupRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for detach_network_endpoints + + Override in a subclass to manipulate the request or metadata + before they are sent to the GlobalNetworkEndpointGroups server. + """ + return request, metadata + + def post_detach_network_endpoints(self, response: compute.Operation) -> compute.Operation: + """Post-rpc interceptor for detach_network_endpoints + + Override in a subclass to manipulate the response + after it is returned by the GlobalNetworkEndpointGroups server but before + it is returned to user code. + """ + return response + def pre_get(self, request: compute.GetGlobalNetworkEndpointGroupRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.GetGlobalNetworkEndpointGroupRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get + + Override in a subclass to manipulate the request or metadata + before they are sent to the GlobalNetworkEndpointGroups server. + """ + return request, metadata + + def post_get(self, response: compute.NetworkEndpointGroup) -> compute.NetworkEndpointGroup: + """Post-rpc interceptor for get + + Override in a subclass to manipulate the response + after it is returned by the GlobalNetworkEndpointGroups server but before + it is returned to user code. + """ + return response + def pre_insert(self, request: compute.InsertGlobalNetworkEndpointGroupRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.InsertGlobalNetworkEndpointGroupRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for insert + + Override in a subclass to manipulate the request or metadata + before they are sent to the GlobalNetworkEndpointGroups server. + """ + return request, metadata + + def post_insert(self, response: compute.Operation) -> compute.Operation: + """Post-rpc interceptor for insert + + Override in a subclass to manipulate the response + after it is returned by the GlobalNetworkEndpointGroups server but before + it is returned to user code. + """ + return response + def pre_list(self, request: compute.ListGlobalNetworkEndpointGroupsRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.ListGlobalNetworkEndpointGroupsRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list + + Override in a subclass to manipulate the request or metadata + before they are sent to the GlobalNetworkEndpointGroups server. + """ + return request, metadata + + def post_list(self, response: compute.NetworkEndpointGroupList) -> compute.NetworkEndpointGroupList: + """Post-rpc interceptor for list + + Override in a subclass to manipulate the response + after it is returned by the GlobalNetworkEndpointGroups server but before + it is returned to user code. + """ + return response + def pre_list_network_endpoints(self, request: compute.ListNetworkEndpointsGlobalNetworkEndpointGroupsRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.ListNetworkEndpointsGlobalNetworkEndpointGroupsRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list_network_endpoints + + Override in a subclass to manipulate the request or metadata + before they are sent to the GlobalNetworkEndpointGroups server. + """ + return request, metadata + + def post_list_network_endpoints(self, response: compute.NetworkEndpointGroupsListNetworkEndpoints) -> compute.NetworkEndpointGroupsListNetworkEndpoints: + """Post-rpc interceptor for list_network_endpoints + + Override in a subclass to manipulate the response + after it is returned by the GlobalNetworkEndpointGroups server but before + it is returned to user code. + """ + return response + + +@dataclasses.dataclass +class GlobalNetworkEndpointGroupsRestStub: + _session: AuthorizedSession + _host: str + _interceptor: GlobalNetworkEndpointGroupsRestInterceptor + + +class GlobalNetworkEndpointGroupsRestTransport(GlobalNetworkEndpointGroupsTransport): + """REST backend transport for GlobalNetworkEndpointGroups. + + The GlobalNetworkEndpointGroups API. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends JSON representations of protocol buffers over HTTP/1.1 + + NOTE: This REST transport functionality is currently in a beta + state (preview). We welcome your feedback via an issue in this + library's source repository. Thank you! + """ + + def __init__(self, *, + host: str = 'compute.googleapis.com', + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + client_cert_source_for_mtls: Optional[Callable[[ + ], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + url_scheme: str = 'https', + interceptor: Optional[GlobalNetworkEndpointGroupsRestInterceptor] = None, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + NOTE: This REST transport functionality is currently in a beta + state (preview). We welcome your feedback via a GitHub issue in + this library's repository. Thank you! + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + client_cert_source_for_mtls (Callable[[], Tuple[bytes, bytes]]): Client + certificate to configure mutual TLS HTTP channel. It is ignored + if ``channel`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you are developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + url_scheme: the protocol scheme for the API endpoint. Normally + "https", but for testing or local servers, + "http" can be specified. + """ + # Run the base constructor + # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. + # TODO: When custom host (api_endpoint) is set, `scopes` must *also* be set on the + # credentials object + maybe_url_match = re.match("^(?Phttp(?:s)?://)?(?P.*)$", host) + if maybe_url_match is None: + raise ValueError(f"Unexpected hostname structure: {host}") # pragma: NO COVER + + url_match_items = maybe_url_match.groupdict() + + host = f"{url_scheme}://{host}" if not url_match_items["scheme"] else host + + super().__init__( + host=host, + credentials=credentials, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience + ) + self._session = AuthorizedSession( + self._credentials, default_host=self.DEFAULT_HOST) + if client_cert_source_for_mtls: + self._session.configure_mtls_channel(client_cert_source_for_mtls) + self._interceptor = interceptor or GlobalNetworkEndpointGroupsRestInterceptor() + self._prep_wrapped_messages(client_info) + + class _AttachNetworkEndpoints(GlobalNetworkEndpointGroupsRestStub): + def __hash__(self): + return hash("AttachNetworkEndpoints") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.AttachNetworkEndpointsGlobalNetworkEndpointGroupRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.Operation: + r"""Call the attach network endpoints method over HTTP. + + Args: + request (~.compute.AttachNetworkEndpointsGlobalNetworkEndpointGroupRequest): + The request object. A request message for + GlobalNetworkEndpointGroups.AttachNetworkEndpoints. + See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.Operation: + Represents an Operation resource. Google Compute Engine + has three Operation resources: \* + `Global `__ + \* + `Regional `__ + \* + `Zonal `__ + You can use an operation resource to manage asynchronous + API requests. For more information, read Handling API + responses. Operations can be global, regional or zonal. + - For global operations, use the ``globalOperations`` + resource. - For regional operations, use the + ``regionOperations`` resource. - For zonal operations, + use the ``zonalOperations`` resource. For more + information, read Global, Regional, and Zonal Resources. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/compute/v1/projects/{project}/global/networkEndpointGroups/{network_endpoint_group}/attachNetworkEndpoints', + 'body': 'global_network_endpoint_groups_attach_endpoints_request_resource', + }, + ] + request, metadata = self._interceptor.pre_attach_network_endpoints(request, metadata) + pb_request = compute.AttachNetworkEndpointsGlobalNetworkEndpointGroupRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + including_default_value_fields=False, + use_integers_for_enums=False + ) + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.Operation() + pb_resp = compute.Operation.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_attach_network_endpoints(resp) + return resp + + class _Delete(GlobalNetworkEndpointGroupsRestStub): + def __hash__(self): + return hash("Delete") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.DeleteGlobalNetworkEndpointGroupRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.Operation: + r"""Call the delete method over HTTP. + + Args: + request (~.compute.DeleteGlobalNetworkEndpointGroupRequest): + The request object. A request message for + GlobalNetworkEndpointGroups.Delete. See + the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.Operation: + Represents an Operation resource. Google Compute Engine + has three Operation resources: \* + `Global `__ + \* + `Regional `__ + \* + `Zonal `__ + You can use an operation resource to manage asynchronous + API requests. For more information, read Handling API + responses. Operations can be global, regional or zonal. + - For global operations, use the ``globalOperations`` + resource. - For regional operations, use the + ``regionOperations`` resource. - For zonal operations, + use the ``zonalOperations`` resource. For more + information, read Global, Regional, and Zonal Resources. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'delete', + 'uri': '/compute/v1/projects/{project}/global/networkEndpointGroups/{network_endpoint_group}', + }, + ] + request, metadata = self._interceptor.pre_delete(request, metadata) + pb_request = compute.DeleteGlobalNetworkEndpointGroupRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.Operation() + pb_resp = compute.Operation.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_delete(resp) + return resp + + class _DetachNetworkEndpoints(GlobalNetworkEndpointGroupsRestStub): + def __hash__(self): + return hash("DetachNetworkEndpoints") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.DetachNetworkEndpointsGlobalNetworkEndpointGroupRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.Operation: + r"""Call the detach network endpoints method over HTTP. + + Args: + request (~.compute.DetachNetworkEndpointsGlobalNetworkEndpointGroupRequest): + The request object. A request message for + GlobalNetworkEndpointGroups.DetachNetworkEndpoints. + See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.Operation: + Represents an Operation resource. Google Compute Engine + has three Operation resources: \* + `Global `__ + \* + `Regional `__ + \* + `Zonal `__ + You can use an operation resource to manage asynchronous + API requests. For more information, read Handling API + responses. Operations can be global, regional or zonal. + - For global operations, use the ``globalOperations`` + resource. - For regional operations, use the + ``regionOperations`` resource. - For zonal operations, + use the ``zonalOperations`` resource. For more + information, read Global, Regional, and Zonal Resources. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/compute/v1/projects/{project}/global/networkEndpointGroups/{network_endpoint_group}/detachNetworkEndpoints', + 'body': 'global_network_endpoint_groups_detach_endpoints_request_resource', + }, + ] + request, metadata = self._interceptor.pre_detach_network_endpoints(request, metadata) + pb_request = compute.DetachNetworkEndpointsGlobalNetworkEndpointGroupRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + including_default_value_fields=False, + use_integers_for_enums=False + ) + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.Operation() + pb_resp = compute.Operation.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_detach_network_endpoints(resp) + return resp + + class _Get(GlobalNetworkEndpointGroupsRestStub): + def __hash__(self): + return hash("Get") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.GetGlobalNetworkEndpointGroupRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.NetworkEndpointGroup: + r"""Call the get method over HTTP. + + Args: + request (~.compute.GetGlobalNetworkEndpointGroupRequest): + The request object. A request message for + GlobalNetworkEndpointGroups.Get. See the + method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.NetworkEndpointGroup: + Represents a collection of network + endpoints. A network endpoint group + (NEG) defines how a set of endpoints + should be reached, whether they are + reachable, and where they are located. + For more information about using NEGs, + see Setting up external HTTP(S) Load + Balancing with internet NEGs, Setting up + zonal NEGs, or Setting up external + HTTP(S) Load Balancing with serverless + NEGs. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/compute/v1/projects/{project}/global/networkEndpointGroups/{network_endpoint_group}', + }, + ] + request, metadata = self._interceptor.pre_get(request, metadata) + pb_request = compute.GetGlobalNetworkEndpointGroupRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.NetworkEndpointGroup() + pb_resp = compute.NetworkEndpointGroup.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get(resp) + return resp + + class _Insert(GlobalNetworkEndpointGroupsRestStub): + def __hash__(self): + return hash("Insert") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.InsertGlobalNetworkEndpointGroupRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.Operation: + r"""Call the insert method over HTTP. + + Args: + request (~.compute.InsertGlobalNetworkEndpointGroupRequest): + The request object. A request message for + GlobalNetworkEndpointGroups.Insert. See + the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.Operation: + Represents an Operation resource. Google Compute Engine + has three Operation resources: \* + `Global `__ + \* + `Regional `__ + \* + `Zonal `__ + You can use an operation resource to manage asynchronous + API requests. For more information, read Handling API + responses. Operations can be global, regional or zonal. + - For global operations, use the ``globalOperations`` + resource. - For regional operations, use the + ``regionOperations`` resource. - For zonal operations, + use the ``zonalOperations`` resource. For more + information, read Global, Regional, and Zonal Resources. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/compute/v1/projects/{project}/global/networkEndpointGroups', + 'body': 'network_endpoint_group_resource', + }, + ] + request, metadata = self._interceptor.pre_insert(request, metadata) + pb_request = compute.InsertGlobalNetworkEndpointGroupRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + including_default_value_fields=False, + use_integers_for_enums=False + ) + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.Operation() + pb_resp = compute.Operation.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_insert(resp) + return resp + + class _List(GlobalNetworkEndpointGroupsRestStub): + def __hash__(self): + return hash("List") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.ListGlobalNetworkEndpointGroupsRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.NetworkEndpointGroupList: + r"""Call the list method over HTTP. + + Args: + request (~.compute.ListGlobalNetworkEndpointGroupsRequest): + The request object. A request message for + GlobalNetworkEndpointGroups.List. See + the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.NetworkEndpointGroupList: + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/compute/v1/projects/{project}/global/networkEndpointGroups', + }, + ] + request, metadata = self._interceptor.pre_list(request, metadata) + pb_request = compute.ListGlobalNetworkEndpointGroupsRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.NetworkEndpointGroupList() + pb_resp = compute.NetworkEndpointGroupList.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_list(resp) + return resp + + class _ListNetworkEndpoints(GlobalNetworkEndpointGroupsRestStub): + def __hash__(self): + return hash("ListNetworkEndpoints") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.ListNetworkEndpointsGlobalNetworkEndpointGroupsRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.NetworkEndpointGroupsListNetworkEndpoints: + r"""Call the list network endpoints method over HTTP. + + Args: + request (~.compute.ListNetworkEndpointsGlobalNetworkEndpointGroupsRequest): + The request object. A request message for + GlobalNetworkEndpointGroups.ListNetworkEndpoints. + See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.NetworkEndpointGroupsListNetworkEndpoints: + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/compute/v1/projects/{project}/global/networkEndpointGroups/{network_endpoint_group}/listNetworkEndpoints', + }, + ] + request, metadata = self._interceptor.pre_list_network_endpoints(request, metadata) + pb_request = compute.ListNetworkEndpointsGlobalNetworkEndpointGroupsRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.NetworkEndpointGroupsListNetworkEndpoints() + pb_resp = compute.NetworkEndpointGroupsListNetworkEndpoints.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_list_network_endpoints(resp) + return resp + + @property + def attach_network_endpoints(self) -> Callable[ + [compute.AttachNetworkEndpointsGlobalNetworkEndpointGroupRequest], + compute.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._AttachNetworkEndpoints(self._session, self._host, self._interceptor) # type: ignore + + @property + def delete(self) -> Callable[ + [compute.DeleteGlobalNetworkEndpointGroupRequest], + compute.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._Delete(self._session, self._host, self._interceptor) # type: ignore + + @property + def detach_network_endpoints(self) -> Callable[ + [compute.DetachNetworkEndpointsGlobalNetworkEndpointGroupRequest], + compute.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._DetachNetworkEndpoints(self._session, self._host, self._interceptor) # type: ignore + + @property + def get(self) -> Callable[ + [compute.GetGlobalNetworkEndpointGroupRequest], + compute.NetworkEndpointGroup]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._Get(self._session, self._host, self._interceptor) # type: ignore + + @property + def insert(self) -> Callable[ + [compute.InsertGlobalNetworkEndpointGroupRequest], + compute.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._Insert(self._session, self._host, self._interceptor) # type: ignore + + @property + def list(self) -> Callable[ + [compute.ListGlobalNetworkEndpointGroupsRequest], + compute.NetworkEndpointGroupList]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._List(self._session, self._host, self._interceptor) # type: ignore + + @property + def list_network_endpoints(self) -> Callable[ + [compute.ListNetworkEndpointsGlobalNetworkEndpointGroupsRequest], + compute.NetworkEndpointGroupsListNetworkEndpoints]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListNetworkEndpoints(self._session, self._host, self._interceptor) # type: ignore + + @property + def kind(self) -> str: + return "rest" + + def close(self): + self._session.close() + + +__all__=( + 'GlobalNetworkEndpointGroupsRestTransport', +) diff --git a/owl-bot-staging/v1/google/cloud/compute_v1/services/global_operations/__init__.py b/owl-bot-staging/v1/google/cloud/compute_v1/services/global_operations/__init__.py new file mode 100644 index 000000000..3b391d96d --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/compute_v1/services/global_operations/__init__.py @@ -0,0 +1,20 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from .client import GlobalOperationsClient + +__all__ = ( + 'GlobalOperationsClient', +) diff --git a/owl-bot-staging/v1/google/cloud/compute_v1/services/global_operations/client.py b/owl-bot-staging/v1/google/cloud/compute_v1/services/global_operations/client.py new file mode 100644 index 000000000..e050a7b47 --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/compute_v1/services/global_operations/client.py @@ -0,0 +1,990 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import os +import re +from typing import Dict, Mapping, MutableMapping, MutableSequence, Optional, Sequence, Tuple, Type, Union, cast + +from google.cloud.compute_v1 import gapic_version as package_version + +from google.api_core import client_options as client_options_lib +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport import mtls # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth.exceptions import MutualTLSChannelError # type: ignore +from google.oauth2 import service_account # type: ignore + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + +from google.cloud.compute_v1.services.global_operations import pagers +from google.cloud.compute_v1.types import compute +from .transports.base import GlobalOperationsTransport, DEFAULT_CLIENT_INFO +from .transports.rest import GlobalOperationsRestTransport + + +class GlobalOperationsClientMeta(type): + """Metaclass for the GlobalOperations client. + + This provides class-level methods for building and retrieving + support objects (e.g. transport) without polluting the client instance + objects. + """ + _transport_registry = OrderedDict() # type: Dict[str, Type[GlobalOperationsTransport]] + _transport_registry["rest"] = GlobalOperationsRestTransport + + def get_transport_class(cls, + label: Optional[str] = None, + ) -> Type[GlobalOperationsTransport]: + """Returns an appropriate transport class. + + Args: + label: The name of the desired transport. If none is + provided, then the first transport in the registry is used. + + Returns: + The transport class to use. + """ + # If a specific transport is requested, return that one. + if label: + return cls._transport_registry[label] + + # No transport is requested; return the default (that is, the first one + # in the dictionary). + return next(iter(cls._transport_registry.values())) + + +class GlobalOperationsClient(metaclass=GlobalOperationsClientMeta): + """The GlobalOperations API.""" + + @staticmethod + def _get_default_mtls_endpoint(api_endpoint): + """Converts api endpoint to mTLS endpoint. + + Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to + "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. + Args: + api_endpoint (Optional[str]): the api endpoint to convert. + Returns: + str: converted mTLS api endpoint. + """ + if not api_endpoint: + return api_endpoint + + mtls_endpoint_re = re.compile( + r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?" + ) + + m = mtls_endpoint_re.match(api_endpoint) + name, mtls, sandbox, googledomain = m.groups() + if mtls or not googledomain: + return api_endpoint + + if sandbox: + return api_endpoint.replace( + "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" + ) + + return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") + + DEFAULT_ENDPOINT = "compute.googleapis.com" + DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore + DEFAULT_ENDPOINT + ) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + GlobalOperationsClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_info(info) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + GlobalOperationsClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_file( + filename) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + from_service_account_json = from_service_account_file + + @property + def transport(self) -> GlobalOperationsTransport: + """Returns the transport used by the client instance. + + Returns: + GlobalOperationsTransport: The transport used by the client + instance. + """ + return self._transport + + @staticmethod + def common_billing_account_path(billing_account: str, ) -> str: + """Returns a fully-qualified billing_account string.""" + return "billingAccounts/{billing_account}".format(billing_account=billing_account, ) + + @staticmethod + def parse_common_billing_account_path(path: str) -> Dict[str,str]: + """Parse a billing_account path into its component segments.""" + m = re.match(r"^billingAccounts/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_folder_path(folder: str, ) -> str: + """Returns a fully-qualified folder string.""" + return "folders/{folder}".format(folder=folder, ) + + @staticmethod + def parse_common_folder_path(path: str) -> Dict[str,str]: + """Parse a folder path into its component segments.""" + m = re.match(r"^folders/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_organization_path(organization: str, ) -> str: + """Returns a fully-qualified organization string.""" + return "organizations/{organization}".format(organization=organization, ) + + @staticmethod + def parse_common_organization_path(path: str) -> Dict[str,str]: + """Parse a organization path into its component segments.""" + m = re.match(r"^organizations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_project_path(project: str, ) -> str: + """Returns a fully-qualified project string.""" + return "projects/{project}".format(project=project, ) + + @staticmethod + def parse_common_project_path(path: str) -> Dict[str,str]: + """Parse a project path into its component segments.""" + m = re.match(r"^projects/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_location_path(project: str, location: str, ) -> str: + """Returns a fully-qualified location string.""" + return "projects/{project}/locations/{location}".format(project=project, location=location, ) + + @staticmethod + def parse_common_location_path(path: str) -> Dict[str,str]: + """Parse a location path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @classmethod + def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[client_options_lib.ClientOptions] = None): + """Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + if client_options is None: + client_options = client_options_lib.ClientOptions() + use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") + use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") + if use_client_cert not in ("true", "false"): + raise ValueError("Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`") + if use_mtls_endpoint not in ("auto", "never", "always"): + raise MutualTLSChannelError("Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`") + + # Figure out the client cert source to use. + client_cert_source = None + if use_client_cert == "true": + if client_options.client_cert_source: + client_cert_source = client_options.client_cert_source + elif mtls.has_default_client_cert_source(): + client_cert_source = mtls.default_client_cert_source() + + # Figure out which api endpoint to use. + if client_options.api_endpoint is not None: + api_endpoint = client_options.api_endpoint + elif use_mtls_endpoint == "always" or (use_mtls_endpoint == "auto" and client_cert_source): + api_endpoint = cls.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = cls.DEFAULT_ENDPOINT + + return api_endpoint, client_cert_source + + def __init__(self, *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Optional[Union[str, GlobalOperationsTransport]] = None, + client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the global operations client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Union[str, GlobalOperationsTransport]): The + transport to use. If set to None, a transport is chosen + automatically. + NOTE: "rest" transport functionality is currently in a + beta state (preview). We welcome your feedback via an + issue in this library's source repository. + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): Custom options for the + client. It won't take effect if a ``transport`` instance is provided. + (1) The ``api_endpoint`` property can be used to override the + default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT + environment variable can also be used to override the endpoint: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto switch to the + default mTLS endpoint if client certificate is present, this is + the default value). However, the ``api_endpoint`` property takes + precedence if provided. + (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide client certificate for mutual TLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + """ + if isinstance(client_options, dict): + client_options = client_options_lib.from_dict(client_options) + if client_options is None: + client_options = client_options_lib.ClientOptions() + client_options = cast(client_options_lib.ClientOptions, client_options) + + api_endpoint, client_cert_source_func = self.get_mtls_endpoint_and_cert_source(client_options) + + api_key_value = getattr(client_options, "api_key", None) + if api_key_value and credentials: + raise ValueError("client_options.api_key and credentials are mutually exclusive") + + # Save or instantiate the transport. + # Ordinarily, we provide the transport, but allowing a custom transport + # instance provides an extensibility point for unusual situations. + if isinstance(transport, GlobalOperationsTransport): + # transport is a GlobalOperationsTransport instance. + if credentials or client_options.credentials_file or api_key_value: + raise ValueError("When providing a transport instance, " + "provide its credentials directly.") + if client_options.scopes: + raise ValueError( + "When providing a transport instance, provide its scopes " + "directly." + ) + self._transport = transport + else: + import google.auth._default # type: ignore + + if api_key_value and hasattr(google.auth._default, "get_api_key_credentials"): + credentials = google.auth._default.get_api_key_credentials(api_key_value) + + Transport = type(self).get_transport_class(transport) + self._transport = Transport( + credentials=credentials, + credentials_file=client_options.credentials_file, + host=api_endpoint, + scopes=client_options.scopes, + client_cert_source_for_mtls=client_cert_source_func, + quota_project_id=client_options.quota_project_id, + client_info=client_info, + always_use_jwt_access=True, + api_audience=client_options.api_audience, + ) + + def aggregated_list(self, + request: Optional[Union[compute.AggregatedListGlobalOperationsRequest, dict]] = None, + *, + project: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.AggregatedListPager: + r"""Retrieves an aggregated list of all operations. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_aggregated_list(): + # Create a client + client = compute_v1.GlobalOperationsClient() + + # Initialize request argument(s) + request = compute_v1.AggregatedListGlobalOperationsRequest( + project="project_value", + ) + + # Make the request + page_result = client.aggregated_list(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.AggregatedListGlobalOperationsRequest, dict]): + The request object. A request message for + GlobalOperations.AggregatedList. See the + method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.compute_v1.services.global_operations.pagers.AggregatedListPager: + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.AggregatedListGlobalOperationsRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.AggregatedListGlobalOperationsRequest): + request = compute.AggregatedListGlobalOperationsRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.aggregated_list] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.AggregatedListPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + def delete(self, + request: Optional[Union[compute.DeleteGlobalOperationRequest, dict]] = None, + *, + project: Optional[str] = None, + operation: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.DeleteGlobalOperationResponse: + r"""Deletes the specified Operations resource. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_delete(): + # Create a client + client = compute_v1.GlobalOperationsClient() + + # Initialize request argument(s) + request = compute_v1.DeleteGlobalOperationRequest( + operation="operation_value", + project="project_value", + ) + + # Make the request + response = client.delete(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.DeleteGlobalOperationRequest, dict]): + The request object. A request message for + GlobalOperations.Delete. See the method + description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + operation (str): + Name of the Operations resource to + delete. + + This corresponds to the ``operation`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.compute_v1.types.DeleteGlobalOperationResponse: + A response message for + GlobalOperations.Delete. See the method + description for details. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, operation]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.DeleteGlobalOperationRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.DeleteGlobalOperationRequest): + request = compute.DeleteGlobalOperationRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if operation is not None: + request.operation = operation + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("operation", request.operation), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get(self, + request: Optional[Union[compute.GetGlobalOperationRequest, dict]] = None, + *, + project: Optional[str] = None, + operation: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.Operation: + r"""Retrieves the specified Operations resource. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_get(): + # Create a client + client = compute_v1.GlobalOperationsClient() + + # Initialize request argument(s) + request = compute_v1.GetGlobalOperationRequest( + operation="operation_value", + project="project_value", + ) + + # Make the request + response = client.get(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.GetGlobalOperationRequest, dict]): + The request object. A request message for + GlobalOperations.Get. See the method + description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + operation (str): + Name of the Operations resource to + return. + + This corresponds to the ``operation`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.compute_v1.types.Operation: + Represents an Operation resource. Google Compute Engine + has three Operation resources: \* + [Global](/compute/docs/reference/rest/v1/globalOperations) + \* + [Regional](/compute/docs/reference/rest/v1/regionOperations) + \* + [Zonal](/compute/docs/reference/rest/v1/zoneOperations) + You can use an operation resource to manage asynchronous + API requests. For more information, read Handling API + responses. Operations can be global, regional or zonal. + - For global operations, use the globalOperations + resource. - For regional operations, use the + regionOperations resource. - For zonal operations, use + the zonalOperations resource. For more information, read + Global, Regional, and Zonal Resources. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, operation]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.GetGlobalOperationRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.GetGlobalOperationRequest): + request = compute.GetGlobalOperationRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if operation is not None: + request.operation = operation + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("operation", request.operation), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def list(self, + request: Optional[Union[compute.ListGlobalOperationsRequest, dict]] = None, + *, + project: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListPager: + r"""Retrieves a list of Operation resources contained + within the specified project. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_list(): + # Create a client + client = compute_v1.GlobalOperationsClient() + + # Initialize request argument(s) + request = compute_v1.ListGlobalOperationsRequest( + project="project_value", + ) + + # Make the request + page_result = client.list(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.ListGlobalOperationsRequest, dict]): + The request object. A request message for + GlobalOperations.List. See the method + description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.compute_v1.services.global_operations.pagers.ListPager: + Contains a list of Operation + resources. + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.ListGlobalOperationsRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.ListGlobalOperationsRequest): + request = compute.ListGlobalOperationsRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + def wait(self, + request: Optional[Union[compute.WaitGlobalOperationRequest, dict]] = None, + *, + project: Optional[str] = None, + operation: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.Operation: + r"""Waits for the specified Operation resource to return as ``DONE`` + or for the request to approach the 2 minute deadline, and + retrieves the specified Operation resource. This method differs + from the ``GET`` method in that it waits for no more than the + default deadline (2 minutes) and then returns the current state + of the operation, which might be ``DONE`` or still in progress. + This method is called on a best-effort basis. Specifically: - In + uncommon cases, when the server is overloaded, the request might + return before the default deadline is reached, or might return + after zero seconds. - If the default deadline is reached, there + is no guarantee that the operation is actually done when the + method returns. Be prepared to retry if the operation is not + ``DONE``. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_wait(): + # Create a client + client = compute_v1.GlobalOperationsClient() + + # Initialize request argument(s) + request = compute_v1.WaitGlobalOperationRequest( + operation="operation_value", + project="project_value", + ) + + # Make the request + response = client.wait(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.WaitGlobalOperationRequest, dict]): + The request object. A request message for + GlobalOperations.Wait. See the method + description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + operation (str): + Name of the Operations resource to + return. + + This corresponds to the ``operation`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.compute_v1.types.Operation: + Represents an Operation resource. Google Compute Engine + has three Operation resources: \* + [Global](/compute/docs/reference/rest/v1/globalOperations) + \* + [Regional](/compute/docs/reference/rest/v1/regionOperations) + \* + [Zonal](/compute/docs/reference/rest/v1/zoneOperations) + You can use an operation resource to manage asynchronous + API requests. For more information, read Handling API + responses. Operations can be global, regional or zonal. + - For global operations, use the globalOperations + resource. - For regional operations, use the + regionOperations resource. - For zonal operations, use + the zonalOperations resource. For more information, read + Global, Regional, and Zonal Resources. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, operation]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.WaitGlobalOperationRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.WaitGlobalOperationRequest): + request = compute.WaitGlobalOperationRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if operation is not None: + request.operation = operation + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.wait] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("operation", request.operation), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def __enter__(self) -> "GlobalOperationsClient": + return self + + def __exit__(self, type, value, traceback): + """Releases underlying transport's resources. + + .. warning:: + ONLY use as a context manager if the transport is NOT shared + with other clients! Exiting the with block will CLOSE the transport + and may cause errors in other clients! + """ + self.transport.close() + + + + + + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) + + +__all__ = ( + "GlobalOperationsClient", +) diff --git a/owl-bot-staging/v1/google/cloud/compute_v1/services/global_operations/pagers.py b/owl-bot-staging/v1/google/cloud/compute_v1/services/global_operations/pagers.py new file mode 100644 index 000000000..218fed661 --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/compute_v1/services/global_operations/pagers.py @@ -0,0 +1,139 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import Any, AsyncIterator, Awaitable, Callable, Sequence, Tuple, Optional, Iterator + +from google.cloud.compute_v1.types import compute + + +class AggregatedListPager: + """A pager for iterating through ``aggregated_list`` requests. + + This class thinly wraps an initial + :class:`google.cloud.compute_v1.types.OperationAggregatedList` object, and + provides an ``__iter__`` method to iterate through its + ``items`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``AggregatedList`` requests and continue to iterate + through the ``items`` field on the + corresponding responses. + + All the usual :class:`google.cloud.compute_v1.types.OperationAggregatedList` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., compute.OperationAggregatedList], + request: compute.AggregatedListGlobalOperationsRequest, + response: compute.OperationAggregatedList, + *, + metadata: Sequence[Tuple[str, str]] = ()): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.compute_v1.types.AggregatedListGlobalOperationsRequest): + The initial request object. + response (google.cloud.compute_v1.types.OperationAggregatedList): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = compute.AggregatedListGlobalOperationsRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[compute.OperationAggregatedList]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[Tuple[str, compute.OperationsScopedList]]: + for page in self.pages: + yield from page.items.items() + + def get(self, key: str) -> Optional[compute.OperationsScopedList]: + return self._response.items.get(key) + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + + +class ListPager: + """A pager for iterating through ``list`` requests. + + This class thinly wraps an initial + :class:`google.cloud.compute_v1.types.OperationList` object, and + provides an ``__iter__`` method to iterate through its + ``items`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``List`` requests and continue to iterate + through the ``items`` field on the + corresponding responses. + + All the usual :class:`google.cloud.compute_v1.types.OperationList` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., compute.OperationList], + request: compute.ListGlobalOperationsRequest, + response: compute.OperationList, + *, + metadata: Sequence[Tuple[str, str]] = ()): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.compute_v1.types.ListGlobalOperationsRequest): + The initial request object. + response (google.cloud.compute_v1.types.OperationList): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = compute.ListGlobalOperationsRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[compute.OperationList]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[compute.Operation]: + for page in self.pages: + yield from page.items + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) diff --git a/owl-bot-staging/v1/google/cloud/compute_v1/services/global_operations/transports/__init__.py b/owl-bot-staging/v1/google/cloud/compute_v1/services/global_operations/transports/__init__.py new file mode 100644 index 000000000..5d7d6d044 --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/compute_v1/services/global_operations/transports/__init__.py @@ -0,0 +1,32 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +from typing import Dict, Type + +from .base import GlobalOperationsTransport +from .rest import GlobalOperationsRestTransport +from .rest import GlobalOperationsRestInterceptor + + +# Compile a registry of transports. +_transport_registry = OrderedDict() # type: Dict[str, Type[GlobalOperationsTransport]] +_transport_registry['rest'] = GlobalOperationsRestTransport + +__all__ = ( + 'GlobalOperationsTransport', + 'GlobalOperationsRestTransport', + 'GlobalOperationsRestInterceptor', +) diff --git a/owl-bot-staging/v1/google/cloud/compute_v1/services/global_operations/transports/base.py b/owl-bot-staging/v1/google/cloud/compute_v1/services/global_operations/transports/base.py new file mode 100644 index 000000000..6a16961f6 --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/compute_v1/services/global_operations/transports/base.py @@ -0,0 +1,205 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import abc +from typing import Awaitable, Callable, Dict, Optional, Sequence, Union + +from google.cloud.compute_v1 import gapic_version as package_version + +import google.auth # type: ignore +import google.api_core +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.compute_v1.types import compute + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) + + +class GlobalOperationsTransport(abc.ABC): + """Abstract transport class for GlobalOperations.""" + + AUTH_SCOPES = ( + 'https://www.googleapis.com/auth/compute', + 'https://www.googleapis.com/auth/cloud-platform', + ) + + DEFAULT_HOST: str = 'compute.googleapis.com' + def __init__( + self, *, + host: str = DEFAULT_HOST, + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + **kwargs, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A list of scopes. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + """ + + scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} + + # Save the scopes. + self._scopes = scopes + + # If no credentials are provided, then determine the appropriate + # defaults. + if credentials and credentials_file: + raise core_exceptions.DuplicateCredentialArgs("'credentials_file' and 'credentials' are mutually exclusive") + + if credentials_file is not None: + credentials, _ = google.auth.load_credentials_from_file( + credentials_file, + **scopes_kwargs, + quota_project_id=quota_project_id + ) + elif credentials is None: + credentials, _ = google.auth.default(**scopes_kwargs, quota_project_id=quota_project_id) + # Don't apply audience if the credentials file passed from user. + if hasattr(credentials, "with_gdch_audience"): + credentials = credentials.with_gdch_audience(api_audience if api_audience else host) + + # If the credentials are service account credentials, then always try to use self signed JWT. + if always_use_jwt_access and isinstance(credentials, service_account.Credentials) and hasattr(service_account.Credentials, "with_always_use_jwt_access"): + credentials = credentials.with_always_use_jwt_access(True) + + # Save the credentials. + self._credentials = credentials + + # Save the hostname. Default to port 443 (HTTPS) if none is specified. + if ':' not in host: + host += ':443' + self._host = host + + def _prep_wrapped_messages(self, client_info): + # Precompute the wrapped methods. + self._wrapped_methods = { + self.aggregated_list: gapic_v1.method.wrap_method( + self.aggregated_list, + default_timeout=None, + client_info=client_info, + ), + self.delete: gapic_v1.method.wrap_method( + self.delete, + default_timeout=None, + client_info=client_info, + ), + self.get: gapic_v1.method.wrap_method( + self.get, + default_timeout=None, + client_info=client_info, + ), + self.list: gapic_v1.method.wrap_method( + self.list, + default_timeout=None, + client_info=client_info, + ), + self.wait: gapic_v1.method.wrap_method( + self.wait, + default_timeout=None, + client_info=client_info, + ), + } + + def close(self): + """Closes resources associated with the transport. + + .. warning:: + Only call this method if the transport is NOT shared + with other clients - this may cause errors in other clients! + """ + raise NotImplementedError() + + @property + def aggregated_list(self) -> Callable[ + [compute.AggregatedListGlobalOperationsRequest], + Union[ + compute.OperationAggregatedList, + Awaitable[compute.OperationAggregatedList] + ]]: + raise NotImplementedError() + + @property + def delete(self) -> Callable[ + [compute.DeleteGlobalOperationRequest], + Union[ + compute.DeleteGlobalOperationResponse, + Awaitable[compute.DeleteGlobalOperationResponse] + ]]: + raise NotImplementedError() + + @property + def get(self) -> Callable[ + [compute.GetGlobalOperationRequest], + Union[ + compute.Operation, + Awaitable[compute.Operation] + ]]: + raise NotImplementedError() + + @property + def list(self) -> Callable[ + [compute.ListGlobalOperationsRequest], + Union[ + compute.OperationList, + Awaitable[compute.OperationList] + ]]: + raise NotImplementedError() + + @property + def wait(self) -> Callable[ + [compute.WaitGlobalOperationRequest], + Union[ + compute.Operation, + Awaitable[compute.Operation] + ]]: + raise NotImplementedError() + + @property + def kind(self) -> str: + raise NotImplementedError() + + +__all__ = ( + 'GlobalOperationsTransport', +) diff --git a/owl-bot-staging/v1/google/cloud/compute_v1/services/global_operations/transports/rest.py b/owl-bot-staging/v1/google/cloud/compute_v1/services/global_operations/transports/rest.py new file mode 100644 index 000000000..5d0e44e91 --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/compute_v1/services/global_operations/transports/rest.py @@ -0,0 +1,770 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +from google.auth.transport.requests import AuthorizedSession # type: ignore +import json # type: ignore +import grpc # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.api_core import exceptions as core_exceptions +from google.api_core import retry as retries +from google.api_core import rest_helpers +from google.api_core import rest_streaming +from google.api_core import path_template +from google.api_core import gapic_v1 + +from google.protobuf import json_format +from requests import __version__ as requests_version +import dataclasses +import re +from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union +import warnings + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + + +from google.cloud.compute_v1.types import compute + +from .base import GlobalOperationsTransport, DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, + grpc_version=None, + rest_version=requests_version, +) + + +class GlobalOperationsRestInterceptor: + """Interceptor for GlobalOperations. + + Interceptors are used to manipulate requests, request metadata, and responses + in arbitrary ways. + Example use cases include: + * Logging + * Verifying requests according to service or custom semantics + * Stripping extraneous information from responses + + These use cases and more can be enabled by injecting an + instance of a custom subclass when constructing the GlobalOperationsRestTransport. + + .. code-block:: python + class MyCustomGlobalOperationsInterceptor(GlobalOperationsRestInterceptor): + def pre_aggregated_list(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_aggregated_list(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_delete(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_delete(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_get(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_wait(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_wait(self, response): + logging.log(f"Received response: {response}") + return response + + transport = GlobalOperationsRestTransport(interceptor=MyCustomGlobalOperationsInterceptor()) + client = GlobalOperationsClient(transport=transport) + + + """ + def pre_aggregated_list(self, request: compute.AggregatedListGlobalOperationsRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.AggregatedListGlobalOperationsRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for aggregated_list + + Override in a subclass to manipulate the request or metadata + before they are sent to the GlobalOperations server. + """ + return request, metadata + + def post_aggregated_list(self, response: compute.OperationAggregatedList) -> compute.OperationAggregatedList: + """Post-rpc interceptor for aggregated_list + + Override in a subclass to manipulate the response + after it is returned by the GlobalOperations server but before + it is returned to user code. + """ + return response + def pre_delete(self, request: compute.DeleteGlobalOperationRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.DeleteGlobalOperationRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for delete + + Override in a subclass to manipulate the request or metadata + before they are sent to the GlobalOperations server. + """ + return request, metadata + + def post_delete(self, response: compute.DeleteGlobalOperationResponse) -> compute.DeleteGlobalOperationResponse: + """Post-rpc interceptor for delete + + Override in a subclass to manipulate the response + after it is returned by the GlobalOperations server but before + it is returned to user code. + """ + return response + def pre_get(self, request: compute.GetGlobalOperationRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.GetGlobalOperationRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get + + Override in a subclass to manipulate the request or metadata + before they are sent to the GlobalOperations server. + """ + return request, metadata + + def post_get(self, response: compute.Operation) -> compute.Operation: + """Post-rpc interceptor for get + + Override in a subclass to manipulate the response + after it is returned by the GlobalOperations server but before + it is returned to user code. + """ + return response + def pre_list(self, request: compute.ListGlobalOperationsRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.ListGlobalOperationsRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list + + Override in a subclass to manipulate the request or metadata + before they are sent to the GlobalOperations server. + """ + return request, metadata + + def post_list(self, response: compute.OperationList) -> compute.OperationList: + """Post-rpc interceptor for list + + Override in a subclass to manipulate the response + after it is returned by the GlobalOperations server but before + it is returned to user code. + """ + return response + def pre_wait(self, request: compute.WaitGlobalOperationRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.WaitGlobalOperationRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for wait + + Override in a subclass to manipulate the request or metadata + before they are sent to the GlobalOperations server. + """ + return request, metadata + + def post_wait(self, response: compute.Operation) -> compute.Operation: + """Post-rpc interceptor for wait + + Override in a subclass to manipulate the response + after it is returned by the GlobalOperations server but before + it is returned to user code. + """ + return response + + +@dataclasses.dataclass +class GlobalOperationsRestStub: + _session: AuthorizedSession + _host: str + _interceptor: GlobalOperationsRestInterceptor + + +class GlobalOperationsRestTransport(GlobalOperationsTransport): + """REST backend transport for GlobalOperations. + + The GlobalOperations API. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends JSON representations of protocol buffers over HTTP/1.1 + + NOTE: This REST transport functionality is currently in a beta + state (preview). We welcome your feedback via an issue in this + library's source repository. Thank you! + """ + + def __init__(self, *, + host: str = 'compute.googleapis.com', + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + client_cert_source_for_mtls: Optional[Callable[[ + ], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + url_scheme: str = 'https', + interceptor: Optional[GlobalOperationsRestInterceptor] = None, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + NOTE: This REST transport functionality is currently in a beta + state (preview). We welcome your feedback via a GitHub issue in + this library's repository. Thank you! + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + client_cert_source_for_mtls (Callable[[], Tuple[bytes, bytes]]): Client + certificate to configure mutual TLS HTTP channel. It is ignored + if ``channel`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you are developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + url_scheme: the protocol scheme for the API endpoint. Normally + "https", but for testing or local servers, + "http" can be specified. + """ + # Run the base constructor + # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. + # TODO: When custom host (api_endpoint) is set, `scopes` must *also* be set on the + # credentials object + maybe_url_match = re.match("^(?Phttp(?:s)?://)?(?P.*)$", host) + if maybe_url_match is None: + raise ValueError(f"Unexpected hostname structure: {host}") # pragma: NO COVER + + url_match_items = maybe_url_match.groupdict() + + host = f"{url_scheme}://{host}" if not url_match_items["scheme"] else host + + super().__init__( + host=host, + credentials=credentials, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience + ) + self._session = AuthorizedSession( + self._credentials, default_host=self.DEFAULT_HOST) + if client_cert_source_for_mtls: + self._session.configure_mtls_channel(client_cert_source_for_mtls) + self._interceptor = interceptor or GlobalOperationsRestInterceptor() + self._prep_wrapped_messages(client_info) + + class _AggregatedList(GlobalOperationsRestStub): + def __hash__(self): + return hash("AggregatedList") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.AggregatedListGlobalOperationsRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.OperationAggregatedList: + r"""Call the aggregated list method over HTTP. + + Args: + request (~.compute.AggregatedListGlobalOperationsRequest): + The request object. A request message for + GlobalOperations.AggregatedList. See the + method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.OperationAggregatedList: + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/compute/v1/projects/{project}/aggregated/operations', + }, + ] + request, metadata = self._interceptor.pre_aggregated_list(request, metadata) + pb_request = compute.AggregatedListGlobalOperationsRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.OperationAggregatedList() + pb_resp = compute.OperationAggregatedList.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_aggregated_list(resp) + return resp + + class _Delete(GlobalOperationsRestStub): + def __hash__(self): + return hash("Delete") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.DeleteGlobalOperationRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.DeleteGlobalOperationResponse: + r"""Call the delete method over HTTP. + + Args: + request (~.compute.DeleteGlobalOperationRequest): + The request object. A request message for + GlobalOperations.Delete. See the method + description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.DeleteGlobalOperationResponse: + A response message for + GlobalOperations.Delete. See the method + description for details. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'delete', + 'uri': '/compute/v1/projects/{project}/global/operations/{operation}', + }, + ] + request, metadata = self._interceptor.pre_delete(request, metadata) + pb_request = compute.DeleteGlobalOperationRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.DeleteGlobalOperationResponse() + pb_resp = compute.DeleteGlobalOperationResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_delete(resp) + return resp + + class _Get(GlobalOperationsRestStub): + def __hash__(self): + return hash("Get") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.GetGlobalOperationRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.Operation: + r"""Call the get method over HTTP. + + Args: + request (~.compute.GetGlobalOperationRequest): + The request object. A request message for + GlobalOperations.Get. See the method + description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.Operation: + Represents an Operation resource. Google Compute Engine + has three Operation resources: \* + `Global `__ + \* + `Regional `__ + \* + `Zonal `__ + You can use an operation resource to manage asynchronous + API requests. For more information, read Handling API + responses. Operations can be global, regional or zonal. + - For global operations, use the ``globalOperations`` + resource. - For regional operations, use the + ``regionOperations`` resource. - For zonal operations, + use the ``zonalOperations`` resource. For more + information, read Global, Regional, and Zonal Resources. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/compute/v1/projects/{project}/global/operations/{operation}', + }, + ] + request, metadata = self._interceptor.pre_get(request, metadata) + pb_request = compute.GetGlobalOperationRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.Operation() + pb_resp = compute.Operation.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get(resp) + return resp + + class _List(GlobalOperationsRestStub): + def __hash__(self): + return hash("List") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.ListGlobalOperationsRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.OperationList: + r"""Call the list method over HTTP. + + Args: + request (~.compute.ListGlobalOperationsRequest): + The request object. A request message for + GlobalOperations.List. See the method + description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.OperationList: + Contains a list of Operation + resources. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/compute/v1/projects/{project}/global/operations', + }, + ] + request, metadata = self._interceptor.pre_list(request, metadata) + pb_request = compute.ListGlobalOperationsRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.OperationList() + pb_resp = compute.OperationList.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_list(resp) + return resp + + class _Wait(GlobalOperationsRestStub): + def __hash__(self): + return hash("Wait") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.WaitGlobalOperationRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.Operation: + r"""Call the wait method over HTTP. + + Args: + request (~.compute.WaitGlobalOperationRequest): + The request object. A request message for + GlobalOperations.Wait. See the method + description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.Operation: + Represents an Operation resource. Google Compute Engine + has three Operation resources: \* + `Global `__ + \* + `Regional `__ + \* + `Zonal `__ + You can use an operation resource to manage asynchronous + API requests. For more information, read Handling API + responses. Operations can be global, regional or zonal. + - For global operations, use the ``globalOperations`` + resource. - For regional operations, use the + ``regionOperations`` resource. - For zonal operations, + use the ``zonalOperations`` resource. For more + information, read Global, Regional, and Zonal Resources. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/compute/v1/projects/{project}/global/operations/{operation}/wait', + }, + ] + request, metadata = self._interceptor.pre_wait(request, metadata) + pb_request = compute.WaitGlobalOperationRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.Operation() + pb_resp = compute.Operation.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_wait(resp) + return resp + + @property + def aggregated_list(self) -> Callable[ + [compute.AggregatedListGlobalOperationsRequest], + compute.OperationAggregatedList]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._AggregatedList(self._session, self._host, self._interceptor) # type: ignore + + @property + def delete(self) -> Callable[ + [compute.DeleteGlobalOperationRequest], + compute.DeleteGlobalOperationResponse]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._Delete(self._session, self._host, self._interceptor) # type: ignore + + @property + def get(self) -> Callable[ + [compute.GetGlobalOperationRequest], + compute.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._Get(self._session, self._host, self._interceptor) # type: ignore + + @property + def list(self) -> Callable[ + [compute.ListGlobalOperationsRequest], + compute.OperationList]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._List(self._session, self._host, self._interceptor) # type: ignore + + @property + def wait(self) -> Callable[ + [compute.WaitGlobalOperationRequest], + compute.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._Wait(self._session, self._host, self._interceptor) # type: ignore + + @property + def kind(self) -> str: + return "rest" + + def close(self): + self._session.close() + + +__all__=( + 'GlobalOperationsRestTransport', +) diff --git a/owl-bot-staging/v1/google/cloud/compute_v1/services/global_organization_operations/__init__.py b/owl-bot-staging/v1/google/cloud/compute_v1/services/global_organization_operations/__init__.py new file mode 100644 index 000000000..8d4af1d22 --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/compute_v1/services/global_organization_operations/__init__.py @@ -0,0 +1,20 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from .client import GlobalOrganizationOperationsClient + +__all__ = ( + 'GlobalOrganizationOperationsClient', +) diff --git a/owl-bot-staging/v1/google/cloud/compute_v1/services/global_organization_operations/client.py b/owl-bot-staging/v1/google/cloud/compute_v1/services/global_organization_operations/client.py new file mode 100644 index 000000000..0ebeb7729 --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/compute_v1/services/global_organization_operations/client.py @@ -0,0 +1,697 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import os +import re +from typing import Dict, Mapping, MutableMapping, MutableSequence, Optional, Sequence, Tuple, Type, Union, cast + +from google.cloud.compute_v1 import gapic_version as package_version + +from google.api_core import client_options as client_options_lib +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport import mtls # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth.exceptions import MutualTLSChannelError # type: ignore +from google.oauth2 import service_account # type: ignore + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + +from google.cloud.compute_v1.services.global_organization_operations import pagers +from google.cloud.compute_v1.types import compute +from .transports.base import GlobalOrganizationOperationsTransport, DEFAULT_CLIENT_INFO +from .transports.rest import GlobalOrganizationOperationsRestTransport + + +class GlobalOrganizationOperationsClientMeta(type): + """Metaclass for the GlobalOrganizationOperations client. + + This provides class-level methods for building and retrieving + support objects (e.g. transport) without polluting the client instance + objects. + """ + _transport_registry = OrderedDict() # type: Dict[str, Type[GlobalOrganizationOperationsTransport]] + _transport_registry["rest"] = GlobalOrganizationOperationsRestTransport + + def get_transport_class(cls, + label: Optional[str] = None, + ) -> Type[GlobalOrganizationOperationsTransport]: + """Returns an appropriate transport class. + + Args: + label: The name of the desired transport. If none is + provided, then the first transport in the registry is used. + + Returns: + The transport class to use. + """ + # If a specific transport is requested, return that one. + if label: + return cls._transport_registry[label] + + # No transport is requested; return the default (that is, the first one + # in the dictionary). + return next(iter(cls._transport_registry.values())) + + +class GlobalOrganizationOperationsClient(metaclass=GlobalOrganizationOperationsClientMeta): + """The GlobalOrganizationOperations API.""" + + @staticmethod + def _get_default_mtls_endpoint(api_endpoint): + """Converts api endpoint to mTLS endpoint. + + Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to + "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. + Args: + api_endpoint (Optional[str]): the api endpoint to convert. + Returns: + str: converted mTLS api endpoint. + """ + if not api_endpoint: + return api_endpoint + + mtls_endpoint_re = re.compile( + r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?" + ) + + m = mtls_endpoint_re.match(api_endpoint) + name, mtls, sandbox, googledomain = m.groups() + if mtls or not googledomain: + return api_endpoint + + if sandbox: + return api_endpoint.replace( + "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" + ) + + return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") + + DEFAULT_ENDPOINT = "compute.googleapis.com" + DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore + DEFAULT_ENDPOINT + ) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + GlobalOrganizationOperationsClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_info(info) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + GlobalOrganizationOperationsClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_file( + filename) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + from_service_account_json = from_service_account_file + + @property + def transport(self) -> GlobalOrganizationOperationsTransport: + """Returns the transport used by the client instance. + + Returns: + GlobalOrganizationOperationsTransport: The transport used by the client + instance. + """ + return self._transport + + @staticmethod + def common_billing_account_path(billing_account: str, ) -> str: + """Returns a fully-qualified billing_account string.""" + return "billingAccounts/{billing_account}".format(billing_account=billing_account, ) + + @staticmethod + def parse_common_billing_account_path(path: str) -> Dict[str,str]: + """Parse a billing_account path into its component segments.""" + m = re.match(r"^billingAccounts/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_folder_path(folder: str, ) -> str: + """Returns a fully-qualified folder string.""" + return "folders/{folder}".format(folder=folder, ) + + @staticmethod + def parse_common_folder_path(path: str) -> Dict[str,str]: + """Parse a folder path into its component segments.""" + m = re.match(r"^folders/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_organization_path(organization: str, ) -> str: + """Returns a fully-qualified organization string.""" + return "organizations/{organization}".format(organization=organization, ) + + @staticmethod + def parse_common_organization_path(path: str) -> Dict[str,str]: + """Parse a organization path into its component segments.""" + m = re.match(r"^organizations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_project_path(project: str, ) -> str: + """Returns a fully-qualified project string.""" + return "projects/{project}".format(project=project, ) + + @staticmethod + def parse_common_project_path(path: str) -> Dict[str,str]: + """Parse a project path into its component segments.""" + m = re.match(r"^projects/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_location_path(project: str, location: str, ) -> str: + """Returns a fully-qualified location string.""" + return "projects/{project}/locations/{location}".format(project=project, location=location, ) + + @staticmethod + def parse_common_location_path(path: str) -> Dict[str,str]: + """Parse a location path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @classmethod + def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[client_options_lib.ClientOptions] = None): + """Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + if client_options is None: + client_options = client_options_lib.ClientOptions() + use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") + use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") + if use_client_cert not in ("true", "false"): + raise ValueError("Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`") + if use_mtls_endpoint not in ("auto", "never", "always"): + raise MutualTLSChannelError("Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`") + + # Figure out the client cert source to use. + client_cert_source = None + if use_client_cert == "true": + if client_options.client_cert_source: + client_cert_source = client_options.client_cert_source + elif mtls.has_default_client_cert_source(): + client_cert_source = mtls.default_client_cert_source() + + # Figure out which api endpoint to use. + if client_options.api_endpoint is not None: + api_endpoint = client_options.api_endpoint + elif use_mtls_endpoint == "always" or (use_mtls_endpoint == "auto" and client_cert_source): + api_endpoint = cls.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = cls.DEFAULT_ENDPOINT + + return api_endpoint, client_cert_source + + def __init__(self, *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Optional[Union[str, GlobalOrganizationOperationsTransport]] = None, + client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the global organization operations client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Union[str, GlobalOrganizationOperationsTransport]): The + transport to use. If set to None, a transport is chosen + automatically. + NOTE: "rest" transport functionality is currently in a + beta state (preview). We welcome your feedback via an + issue in this library's source repository. + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): Custom options for the + client. It won't take effect if a ``transport`` instance is provided. + (1) The ``api_endpoint`` property can be used to override the + default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT + environment variable can also be used to override the endpoint: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto switch to the + default mTLS endpoint if client certificate is present, this is + the default value). However, the ``api_endpoint`` property takes + precedence if provided. + (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide client certificate for mutual TLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + """ + if isinstance(client_options, dict): + client_options = client_options_lib.from_dict(client_options) + if client_options is None: + client_options = client_options_lib.ClientOptions() + client_options = cast(client_options_lib.ClientOptions, client_options) + + api_endpoint, client_cert_source_func = self.get_mtls_endpoint_and_cert_source(client_options) + + api_key_value = getattr(client_options, "api_key", None) + if api_key_value and credentials: + raise ValueError("client_options.api_key and credentials are mutually exclusive") + + # Save or instantiate the transport. + # Ordinarily, we provide the transport, but allowing a custom transport + # instance provides an extensibility point for unusual situations. + if isinstance(transport, GlobalOrganizationOperationsTransport): + # transport is a GlobalOrganizationOperationsTransport instance. + if credentials or client_options.credentials_file or api_key_value: + raise ValueError("When providing a transport instance, " + "provide its credentials directly.") + if client_options.scopes: + raise ValueError( + "When providing a transport instance, provide its scopes " + "directly." + ) + self._transport = transport + else: + import google.auth._default # type: ignore + + if api_key_value and hasattr(google.auth._default, "get_api_key_credentials"): + credentials = google.auth._default.get_api_key_credentials(api_key_value) + + Transport = type(self).get_transport_class(transport) + self._transport = Transport( + credentials=credentials, + credentials_file=client_options.credentials_file, + host=api_endpoint, + scopes=client_options.scopes, + client_cert_source_for_mtls=client_cert_source_func, + quota_project_id=client_options.quota_project_id, + client_info=client_info, + always_use_jwt_access=True, + api_audience=client_options.api_audience, + ) + + def delete(self, + request: Optional[Union[compute.DeleteGlobalOrganizationOperationRequest, dict]] = None, + *, + operation: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.DeleteGlobalOrganizationOperationResponse: + r"""Deletes the specified Operations resource. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_delete(): + # Create a client + client = compute_v1.GlobalOrganizationOperationsClient() + + # Initialize request argument(s) + request = compute_v1.DeleteGlobalOrganizationOperationRequest( + operation="operation_value", + ) + + # Make the request + response = client.delete(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.DeleteGlobalOrganizationOperationRequest, dict]): + The request object. A request message for + GlobalOrganizationOperations.Delete. See + the method description for details. + operation (str): + Name of the Operations resource to + delete. + + This corresponds to the ``operation`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.compute_v1.types.DeleteGlobalOrganizationOperationResponse: + A response message for + GlobalOrganizationOperations.Delete. See + the method description for details. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([operation]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.DeleteGlobalOrganizationOperationRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.DeleteGlobalOrganizationOperationRequest): + request = compute.DeleteGlobalOrganizationOperationRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if operation is not None: + request.operation = operation + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("operation", request.operation), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get(self, + request: Optional[Union[compute.GetGlobalOrganizationOperationRequest, dict]] = None, + *, + operation: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.Operation: + r"""Retrieves the specified Operations resource. Gets a list of + operations by making a ``list()`` request. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_get(): + # Create a client + client = compute_v1.GlobalOrganizationOperationsClient() + + # Initialize request argument(s) + request = compute_v1.GetGlobalOrganizationOperationRequest( + operation="operation_value", + ) + + # Make the request + response = client.get(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.GetGlobalOrganizationOperationRequest, dict]): + The request object. A request message for + GlobalOrganizationOperations.Get. See + the method description for details. + operation (str): + Name of the Operations resource to + return. + + This corresponds to the ``operation`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.compute_v1.types.Operation: + Represents an Operation resource. Google Compute Engine + has three Operation resources: \* + [Global](/compute/docs/reference/rest/v1/globalOperations) + \* + [Regional](/compute/docs/reference/rest/v1/regionOperations) + \* + [Zonal](/compute/docs/reference/rest/v1/zoneOperations) + You can use an operation resource to manage asynchronous + API requests. For more information, read Handling API + responses. Operations can be global, regional or zonal. + - For global operations, use the globalOperations + resource. - For regional operations, use the + regionOperations resource. - For zonal operations, use + the zonalOperations resource. For more information, read + Global, Regional, and Zonal Resources. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([operation]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.GetGlobalOrganizationOperationRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.GetGlobalOrganizationOperationRequest): + request = compute.GetGlobalOrganizationOperationRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if operation is not None: + request.operation = operation + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("operation", request.operation), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def list(self, + request: Optional[Union[compute.ListGlobalOrganizationOperationsRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListPager: + r"""Retrieves a list of Operation resources contained + within the specified organization. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_list(): + # Create a client + client = compute_v1.GlobalOrganizationOperationsClient() + + # Initialize request argument(s) + request = compute_v1.ListGlobalOrganizationOperationsRequest( + ) + + # Make the request + page_result = client.list(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.ListGlobalOrganizationOperationsRequest, dict]): + The request object. A request message for + GlobalOrganizationOperations.List. See + the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.compute_v1.services.global_organization_operations.pagers.ListPager: + Contains a list of Operation + resources. + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # Minor optimization to avoid making a copy if the user passes + # in a compute.ListGlobalOrganizationOperationsRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.ListGlobalOrganizationOperationsRequest): + request = compute.ListGlobalOrganizationOperationsRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list] + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + def __enter__(self) -> "GlobalOrganizationOperationsClient": + return self + + def __exit__(self, type, value, traceback): + """Releases underlying transport's resources. + + .. warning:: + ONLY use as a context manager if the transport is NOT shared + with other clients! Exiting the with block will CLOSE the transport + and may cause errors in other clients! + """ + self.transport.close() + + + + + + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) + + +__all__ = ( + "GlobalOrganizationOperationsClient", +) diff --git a/owl-bot-staging/v1/google/cloud/compute_v1/services/global_organization_operations/pagers.py b/owl-bot-staging/v1/google/cloud/compute_v1/services/global_organization_operations/pagers.py new file mode 100644 index 000000000..21fa91a9c --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/compute_v1/services/global_organization_operations/pagers.py @@ -0,0 +1,77 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import Any, AsyncIterator, Awaitable, Callable, Sequence, Tuple, Optional, Iterator + +from google.cloud.compute_v1.types import compute + + +class ListPager: + """A pager for iterating through ``list`` requests. + + This class thinly wraps an initial + :class:`google.cloud.compute_v1.types.OperationList` object, and + provides an ``__iter__`` method to iterate through its + ``items`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``List`` requests and continue to iterate + through the ``items`` field on the + corresponding responses. + + All the usual :class:`google.cloud.compute_v1.types.OperationList` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., compute.OperationList], + request: compute.ListGlobalOrganizationOperationsRequest, + response: compute.OperationList, + *, + metadata: Sequence[Tuple[str, str]] = ()): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.compute_v1.types.ListGlobalOrganizationOperationsRequest): + The initial request object. + response (google.cloud.compute_v1.types.OperationList): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = compute.ListGlobalOrganizationOperationsRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[compute.OperationList]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[compute.Operation]: + for page in self.pages: + yield from page.items + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) diff --git a/owl-bot-staging/v1/google/cloud/compute_v1/services/global_organization_operations/transports/__init__.py b/owl-bot-staging/v1/google/cloud/compute_v1/services/global_organization_operations/transports/__init__.py new file mode 100644 index 000000000..e22a8bb21 --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/compute_v1/services/global_organization_operations/transports/__init__.py @@ -0,0 +1,32 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +from typing import Dict, Type + +from .base import GlobalOrganizationOperationsTransport +from .rest import GlobalOrganizationOperationsRestTransport +from .rest import GlobalOrganizationOperationsRestInterceptor + + +# Compile a registry of transports. +_transport_registry = OrderedDict() # type: Dict[str, Type[GlobalOrganizationOperationsTransport]] +_transport_registry['rest'] = GlobalOrganizationOperationsRestTransport + +__all__ = ( + 'GlobalOrganizationOperationsTransport', + 'GlobalOrganizationOperationsRestTransport', + 'GlobalOrganizationOperationsRestInterceptor', +) diff --git a/owl-bot-staging/v1/google/cloud/compute_v1/services/global_organization_operations/transports/base.py b/owl-bot-staging/v1/google/cloud/compute_v1/services/global_organization_operations/transports/base.py new file mode 100644 index 000000000..966f8b15e --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/compute_v1/services/global_organization_operations/transports/base.py @@ -0,0 +1,177 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import abc +from typing import Awaitable, Callable, Dict, Optional, Sequence, Union + +from google.cloud.compute_v1 import gapic_version as package_version + +import google.auth # type: ignore +import google.api_core +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.compute_v1.types import compute + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) + + +class GlobalOrganizationOperationsTransport(abc.ABC): + """Abstract transport class for GlobalOrganizationOperations.""" + + AUTH_SCOPES = ( + 'https://www.googleapis.com/auth/compute', + 'https://www.googleapis.com/auth/cloud-platform', + ) + + DEFAULT_HOST: str = 'compute.googleapis.com' + def __init__( + self, *, + host: str = DEFAULT_HOST, + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + **kwargs, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A list of scopes. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + """ + + scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} + + # Save the scopes. + self._scopes = scopes + + # If no credentials are provided, then determine the appropriate + # defaults. + if credentials and credentials_file: + raise core_exceptions.DuplicateCredentialArgs("'credentials_file' and 'credentials' are mutually exclusive") + + if credentials_file is not None: + credentials, _ = google.auth.load_credentials_from_file( + credentials_file, + **scopes_kwargs, + quota_project_id=quota_project_id + ) + elif credentials is None: + credentials, _ = google.auth.default(**scopes_kwargs, quota_project_id=quota_project_id) + # Don't apply audience if the credentials file passed from user. + if hasattr(credentials, "with_gdch_audience"): + credentials = credentials.with_gdch_audience(api_audience if api_audience else host) + + # If the credentials are service account credentials, then always try to use self signed JWT. + if always_use_jwt_access and isinstance(credentials, service_account.Credentials) and hasattr(service_account.Credentials, "with_always_use_jwt_access"): + credentials = credentials.with_always_use_jwt_access(True) + + # Save the credentials. + self._credentials = credentials + + # Save the hostname. Default to port 443 (HTTPS) if none is specified. + if ':' not in host: + host += ':443' + self._host = host + + def _prep_wrapped_messages(self, client_info): + # Precompute the wrapped methods. + self._wrapped_methods = { + self.delete: gapic_v1.method.wrap_method( + self.delete, + default_timeout=None, + client_info=client_info, + ), + self.get: gapic_v1.method.wrap_method( + self.get, + default_timeout=None, + client_info=client_info, + ), + self.list: gapic_v1.method.wrap_method( + self.list, + default_timeout=None, + client_info=client_info, + ), + } + + def close(self): + """Closes resources associated with the transport. + + .. warning:: + Only call this method if the transport is NOT shared + with other clients - this may cause errors in other clients! + """ + raise NotImplementedError() + + @property + def delete(self) -> Callable[ + [compute.DeleteGlobalOrganizationOperationRequest], + Union[ + compute.DeleteGlobalOrganizationOperationResponse, + Awaitable[compute.DeleteGlobalOrganizationOperationResponse] + ]]: + raise NotImplementedError() + + @property + def get(self) -> Callable[ + [compute.GetGlobalOrganizationOperationRequest], + Union[ + compute.Operation, + Awaitable[compute.Operation] + ]]: + raise NotImplementedError() + + @property + def list(self) -> Callable[ + [compute.ListGlobalOrganizationOperationsRequest], + Union[ + compute.OperationList, + Awaitable[compute.OperationList] + ]]: + raise NotImplementedError() + + @property + def kind(self) -> str: + raise NotImplementedError() + + +__all__ = ( + 'GlobalOrganizationOperationsTransport', +) diff --git a/owl-bot-staging/v1/google/cloud/compute_v1/services/global_organization_operations/transports/rest.py b/owl-bot-staging/v1/google/cloud/compute_v1/services/global_organization_operations/transports/rest.py new file mode 100644 index 000000000..d2d6cd22c --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/compute_v1/services/global_organization_operations/transports/rest.py @@ -0,0 +1,527 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +from google.auth.transport.requests import AuthorizedSession # type: ignore +import json # type: ignore +import grpc # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.api_core import exceptions as core_exceptions +from google.api_core import retry as retries +from google.api_core import rest_helpers +from google.api_core import rest_streaming +from google.api_core import path_template +from google.api_core import gapic_v1 + +from google.protobuf import json_format +from requests import __version__ as requests_version +import dataclasses +import re +from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union +import warnings + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + + +from google.cloud.compute_v1.types import compute + +from .base import GlobalOrganizationOperationsTransport, DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, + grpc_version=None, + rest_version=requests_version, +) + + +class GlobalOrganizationOperationsRestInterceptor: + """Interceptor for GlobalOrganizationOperations. + + Interceptors are used to manipulate requests, request metadata, and responses + in arbitrary ways. + Example use cases include: + * Logging + * Verifying requests according to service or custom semantics + * Stripping extraneous information from responses + + These use cases and more can be enabled by injecting an + instance of a custom subclass when constructing the GlobalOrganizationOperationsRestTransport. + + .. code-block:: python + class MyCustomGlobalOrganizationOperationsInterceptor(GlobalOrganizationOperationsRestInterceptor): + def pre_delete(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_delete(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_get(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list(self, response): + logging.log(f"Received response: {response}") + return response + + transport = GlobalOrganizationOperationsRestTransport(interceptor=MyCustomGlobalOrganizationOperationsInterceptor()) + client = GlobalOrganizationOperationsClient(transport=transport) + + + """ + def pre_delete(self, request: compute.DeleteGlobalOrganizationOperationRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.DeleteGlobalOrganizationOperationRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for delete + + Override in a subclass to manipulate the request or metadata + before they are sent to the GlobalOrganizationOperations server. + """ + return request, metadata + + def post_delete(self, response: compute.DeleteGlobalOrganizationOperationResponse) -> compute.DeleteGlobalOrganizationOperationResponse: + """Post-rpc interceptor for delete + + Override in a subclass to manipulate the response + after it is returned by the GlobalOrganizationOperations server but before + it is returned to user code. + """ + return response + def pre_get(self, request: compute.GetGlobalOrganizationOperationRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.GetGlobalOrganizationOperationRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get + + Override in a subclass to manipulate the request or metadata + before they are sent to the GlobalOrganizationOperations server. + """ + return request, metadata + + def post_get(self, response: compute.Operation) -> compute.Operation: + """Post-rpc interceptor for get + + Override in a subclass to manipulate the response + after it is returned by the GlobalOrganizationOperations server but before + it is returned to user code. + """ + return response + def pre_list(self, request: compute.ListGlobalOrganizationOperationsRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.ListGlobalOrganizationOperationsRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list + + Override in a subclass to manipulate the request or metadata + before they are sent to the GlobalOrganizationOperations server. + """ + return request, metadata + + def post_list(self, response: compute.OperationList) -> compute.OperationList: + """Post-rpc interceptor for list + + Override in a subclass to manipulate the response + after it is returned by the GlobalOrganizationOperations server but before + it is returned to user code. + """ + return response + + +@dataclasses.dataclass +class GlobalOrganizationOperationsRestStub: + _session: AuthorizedSession + _host: str + _interceptor: GlobalOrganizationOperationsRestInterceptor + + +class GlobalOrganizationOperationsRestTransport(GlobalOrganizationOperationsTransport): + """REST backend transport for GlobalOrganizationOperations. + + The GlobalOrganizationOperations API. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends JSON representations of protocol buffers over HTTP/1.1 + + NOTE: This REST transport functionality is currently in a beta + state (preview). We welcome your feedback via an issue in this + library's source repository. Thank you! + """ + + def __init__(self, *, + host: str = 'compute.googleapis.com', + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + client_cert_source_for_mtls: Optional[Callable[[ + ], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + url_scheme: str = 'https', + interceptor: Optional[GlobalOrganizationOperationsRestInterceptor] = None, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + NOTE: This REST transport functionality is currently in a beta + state (preview). We welcome your feedback via a GitHub issue in + this library's repository. Thank you! + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + client_cert_source_for_mtls (Callable[[], Tuple[bytes, bytes]]): Client + certificate to configure mutual TLS HTTP channel. It is ignored + if ``channel`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you are developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + url_scheme: the protocol scheme for the API endpoint. Normally + "https", but for testing or local servers, + "http" can be specified. + """ + # Run the base constructor + # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. + # TODO: When custom host (api_endpoint) is set, `scopes` must *also* be set on the + # credentials object + maybe_url_match = re.match("^(?Phttp(?:s)?://)?(?P.*)$", host) + if maybe_url_match is None: + raise ValueError(f"Unexpected hostname structure: {host}") # pragma: NO COVER + + url_match_items = maybe_url_match.groupdict() + + host = f"{url_scheme}://{host}" if not url_match_items["scheme"] else host + + super().__init__( + host=host, + credentials=credentials, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience + ) + self._session = AuthorizedSession( + self._credentials, default_host=self.DEFAULT_HOST) + if client_cert_source_for_mtls: + self._session.configure_mtls_channel(client_cert_source_for_mtls) + self._interceptor = interceptor or GlobalOrganizationOperationsRestInterceptor() + self._prep_wrapped_messages(client_info) + + class _Delete(GlobalOrganizationOperationsRestStub): + def __hash__(self): + return hash("Delete") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.DeleteGlobalOrganizationOperationRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.DeleteGlobalOrganizationOperationResponse: + r"""Call the delete method over HTTP. + + Args: + request (~.compute.DeleteGlobalOrganizationOperationRequest): + The request object. A request message for + GlobalOrganizationOperations.Delete. See + the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.DeleteGlobalOrganizationOperationResponse: + A response message for + GlobalOrganizationOperations.Delete. See + the method description for details. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'delete', + 'uri': '/compute/v1/locations/global/operations/{operation}', + }, + ] + request, metadata = self._interceptor.pre_delete(request, metadata) + pb_request = compute.DeleteGlobalOrganizationOperationRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.DeleteGlobalOrganizationOperationResponse() + pb_resp = compute.DeleteGlobalOrganizationOperationResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_delete(resp) + return resp + + class _Get(GlobalOrganizationOperationsRestStub): + def __hash__(self): + return hash("Get") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.GetGlobalOrganizationOperationRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.Operation: + r"""Call the get method over HTTP. + + Args: + request (~.compute.GetGlobalOrganizationOperationRequest): + The request object. A request message for + GlobalOrganizationOperations.Get. See + the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.Operation: + Represents an Operation resource. Google Compute Engine + has three Operation resources: \* + `Global `__ + \* + `Regional `__ + \* + `Zonal `__ + You can use an operation resource to manage asynchronous + API requests. For more information, read Handling API + responses. Operations can be global, regional or zonal. + - For global operations, use the ``globalOperations`` + resource. - For regional operations, use the + ``regionOperations`` resource. - For zonal operations, + use the ``zonalOperations`` resource. For more + information, read Global, Regional, and Zonal Resources. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/compute/v1/locations/global/operations/{operation}', + }, + ] + request, metadata = self._interceptor.pre_get(request, metadata) + pb_request = compute.GetGlobalOrganizationOperationRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.Operation() + pb_resp = compute.Operation.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get(resp) + return resp + + class _List(GlobalOrganizationOperationsRestStub): + def __hash__(self): + return hash("List") + + def __call__(self, + request: compute.ListGlobalOrganizationOperationsRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.OperationList: + r"""Call the list method over HTTP. + + Args: + request (~.compute.ListGlobalOrganizationOperationsRequest): + The request object. A request message for + GlobalOrganizationOperations.List. See + the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.OperationList: + Contains a list of Operation + resources. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/compute/v1/locations/global/operations', + }, + ] + request, metadata = self._interceptor.pre_list(request, metadata) + pb_request = compute.ListGlobalOrganizationOperationsRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.OperationList() + pb_resp = compute.OperationList.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_list(resp) + return resp + + @property + def delete(self) -> Callable[ + [compute.DeleteGlobalOrganizationOperationRequest], + compute.DeleteGlobalOrganizationOperationResponse]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._Delete(self._session, self._host, self._interceptor) # type: ignore + + @property + def get(self) -> Callable[ + [compute.GetGlobalOrganizationOperationRequest], + compute.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._Get(self._session, self._host, self._interceptor) # type: ignore + + @property + def list(self) -> Callable[ + [compute.ListGlobalOrganizationOperationsRequest], + compute.OperationList]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._List(self._session, self._host, self._interceptor) # type: ignore + + @property + def kind(self) -> str: + return "rest" + + def close(self): + self._session.close() + + +__all__=( + 'GlobalOrganizationOperationsRestTransport', +) diff --git a/owl-bot-staging/v1/google/cloud/compute_v1/services/global_public_delegated_prefixes/__init__.py b/owl-bot-staging/v1/google/cloud/compute_v1/services/global_public_delegated_prefixes/__init__.py new file mode 100644 index 000000000..f6d35179f --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/compute_v1/services/global_public_delegated_prefixes/__init__.py @@ -0,0 +1,20 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from .client import GlobalPublicDelegatedPrefixesClient + +__all__ = ( + 'GlobalPublicDelegatedPrefixesClient', +) diff --git a/owl-bot-staging/v1/google/cloud/compute_v1/services/global_public_delegated_prefixes/client.py b/owl-bot-staging/v1/google/cloud/compute_v1/services/global_public_delegated_prefixes/client.py new file mode 100644 index 000000000..7d51881ba --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/compute_v1/services/global_public_delegated_prefixes/client.py @@ -0,0 +1,1387 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import functools +import os +import re +from typing import Dict, Mapping, MutableMapping, MutableSequence, Optional, Sequence, Tuple, Type, Union, cast + +from google.cloud.compute_v1 import gapic_version as package_version + +from google.api_core import client_options as client_options_lib +from google.api_core import exceptions as core_exceptions +from google.api_core import extended_operation +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport import mtls # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth.exceptions import MutualTLSChannelError # type: ignore +from google.oauth2 import service_account # type: ignore + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + +from google.api_core import extended_operation # type: ignore +from google.cloud.compute_v1.services.global_public_delegated_prefixes import pagers +from google.cloud.compute_v1.types import compute +from .transports.base import GlobalPublicDelegatedPrefixesTransport, DEFAULT_CLIENT_INFO +from .transports.rest import GlobalPublicDelegatedPrefixesRestTransport + + +class GlobalPublicDelegatedPrefixesClientMeta(type): + """Metaclass for the GlobalPublicDelegatedPrefixes client. + + This provides class-level methods for building and retrieving + support objects (e.g. transport) without polluting the client instance + objects. + """ + _transport_registry = OrderedDict() # type: Dict[str, Type[GlobalPublicDelegatedPrefixesTransport]] + _transport_registry["rest"] = GlobalPublicDelegatedPrefixesRestTransport + + def get_transport_class(cls, + label: Optional[str] = None, + ) -> Type[GlobalPublicDelegatedPrefixesTransport]: + """Returns an appropriate transport class. + + Args: + label: The name of the desired transport. If none is + provided, then the first transport in the registry is used. + + Returns: + The transport class to use. + """ + # If a specific transport is requested, return that one. + if label: + return cls._transport_registry[label] + + # No transport is requested; return the default (that is, the first one + # in the dictionary). + return next(iter(cls._transport_registry.values())) + + +class GlobalPublicDelegatedPrefixesClient(metaclass=GlobalPublicDelegatedPrefixesClientMeta): + """The GlobalPublicDelegatedPrefixes API.""" + + @staticmethod + def _get_default_mtls_endpoint(api_endpoint): + """Converts api endpoint to mTLS endpoint. + + Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to + "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. + Args: + api_endpoint (Optional[str]): the api endpoint to convert. + Returns: + str: converted mTLS api endpoint. + """ + if not api_endpoint: + return api_endpoint + + mtls_endpoint_re = re.compile( + r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?" + ) + + m = mtls_endpoint_re.match(api_endpoint) + name, mtls, sandbox, googledomain = m.groups() + if mtls or not googledomain: + return api_endpoint + + if sandbox: + return api_endpoint.replace( + "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" + ) + + return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") + + DEFAULT_ENDPOINT = "compute.googleapis.com" + DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore + DEFAULT_ENDPOINT + ) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + GlobalPublicDelegatedPrefixesClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_info(info) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + GlobalPublicDelegatedPrefixesClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_file( + filename) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + from_service_account_json = from_service_account_file + + @property + def transport(self) -> GlobalPublicDelegatedPrefixesTransport: + """Returns the transport used by the client instance. + + Returns: + GlobalPublicDelegatedPrefixesTransport: The transport used by the client + instance. + """ + return self._transport + + @staticmethod + def common_billing_account_path(billing_account: str, ) -> str: + """Returns a fully-qualified billing_account string.""" + return "billingAccounts/{billing_account}".format(billing_account=billing_account, ) + + @staticmethod + def parse_common_billing_account_path(path: str) -> Dict[str,str]: + """Parse a billing_account path into its component segments.""" + m = re.match(r"^billingAccounts/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_folder_path(folder: str, ) -> str: + """Returns a fully-qualified folder string.""" + return "folders/{folder}".format(folder=folder, ) + + @staticmethod + def parse_common_folder_path(path: str) -> Dict[str,str]: + """Parse a folder path into its component segments.""" + m = re.match(r"^folders/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_organization_path(organization: str, ) -> str: + """Returns a fully-qualified organization string.""" + return "organizations/{organization}".format(organization=organization, ) + + @staticmethod + def parse_common_organization_path(path: str) -> Dict[str,str]: + """Parse a organization path into its component segments.""" + m = re.match(r"^organizations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_project_path(project: str, ) -> str: + """Returns a fully-qualified project string.""" + return "projects/{project}".format(project=project, ) + + @staticmethod + def parse_common_project_path(path: str) -> Dict[str,str]: + """Parse a project path into its component segments.""" + m = re.match(r"^projects/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_location_path(project: str, location: str, ) -> str: + """Returns a fully-qualified location string.""" + return "projects/{project}/locations/{location}".format(project=project, location=location, ) + + @staticmethod + def parse_common_location_path(path: str) -> Dict[str,str]: + """Parse a location path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @classmethod + def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[client_options_lib.ClientOptions] = None): + """Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + if client_options is None: + client_options = client_options_lib.ClientOptions() + use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") + use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") + if use_client_cert not in ("true", "false"): + raise ValueError("Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`") + if use_mtls_endpoint not in ("auto", "never", "always"): + raise MutualTLSChannelError("Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`") + + # Figure out the client cert source to use. + client_cert_source = None + if use_client_cert == "true": + if client_options.client_cert_source: + client_cert_source = client_options.client_cert_source + elif mtls.has_default_client_cert_source(): + client_cert_source = mtls.default_client_cert_source() + + # Figure out which api endpoint to use. + if client_options.api_endpoint is not None: + api_endpoint = client_options.api_endpoint + elif use_mtls_endpoint == "always" or (use_mtls_endpoint == "auto" and client_cert_source): + api_endpoint = cls.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = cls.DEFAULT_ENDPOINT + + return api_endpoint, client_cert_source + + def __init__(self, *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Optional[Union[str, GlobalPublicDelegatedPrefixesTransport]] = None, + client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the global public delegated prefixes client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Union[str, GlobalPublicDelegatedPrefixesTransport]): The + transport to use. If set to None, a transport is chosen + automatically. + NOTE: "rest" transport functionality is currently in a + beta state (preview). We welcome your feedback via an + issue in this library's source repository. + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): Custom options for the + client. It won't take effect if a ``transport`` instance is provided. + (1) The ``api_endpoint`` property can be used to override the + default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT + environment variable can also be used to override the endpoint: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto switch to the + default mTLS endpoint if client certificate is present, this is + the default value). However, the ``api_endpoint`` property takes + precedence if provided. + (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide client certificate for mutual TLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + """ + if isinstance(client_options, dict): + client_options = client_options_lib.from_dict(client_options) + if client_options is None: + client_options = client_options_lib.ClientOptions() + client_options = cast(client_options_lib.ClientOptions, client_options) + + api_endpoint, client_cert_source_func = self.get_mtls_endpoint_and_cert_source(client_options) + + api_key_value = getattr(client_options, "api_key", None) + if api_key_value and credentials: + raise ValueError("client_options.api_key and credentials are mutually exclusive") + + # Save or instantiate the transport. + # Ordinarily, we provide the transport, but allowing a custom transport + # instance provides an extensibility point for unusual situations. + if isinstance(transport, GlobalPublicDelegatedPrefixesTransport): + # transport is a GlobalPublicDelegatedPrefixesTransport instance. + if credentials or client_options.credentials_file or api_key_value: + raise ValueError("When providing a transport instance, " + "provide its credentials directly.") + if client_options.scopes: + raise ValueError( + "When providing a transport instance, provide its scopes " + "directly." + ) + self._transport = transport + else: + import google.auth._default # type: ignore + + if api_key_value and hasattr(google.auth._default, "get_api_key_credentials"): + credentials = google.auth._default.get_api_key_credentials(api_key_value) + + Transport = type(self).get_transport_class(transport) + self._transport = Transport( + credentials=credentials, + credentials_file=client_options.credentials_file, + host=api_endpoint, + scopes=client_options.scopes, + client_cert_source_for_mtls=client_cert_source_func, + quota_project_id=client_options.quota_project_id, + client_info=client_info, + always_use_jwt_access=True, + api_audience=client_options.api_audience, + ) + + def delete_unary(self, + request: Optional[Union[compute.DeleteGlobalPublicDelegatedPrefixeRequest, dict]] = None, + *, + project: Optional[str] = None, + public_delegated_prefix: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.Operation: + r"""Deletes the specified global PublicDelegatedPrefix. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_delete(): + # Create a client + client = compute_v1.GlobalPublicDelegatedPrefixesClient() + + # Initialize request argument(s) + request = compute_v1.DeleteGlobalPublicDelegatedPrefixeRequest( + project="project_value", + public_delegated_prefix="public_delegated_prefix_value", + ) + + # Make the request + response = client.delete(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.DeleteGlobalPublicDelegatedPrefixeRequest, dict]): + The request object. A request message for + GlobalPublicDelegatedPrefixes.Delete. + See the method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + public_delegated_prefix (str): + Name of the PublicDelegatedPrefix + resource to delete. + + This corresponds to the ``public_delegated_prefix`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, public_delegated_prefix]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.DeleteGlobalPublicDelegatedPrefixeRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.DeleteGlobalPublicDelegatedPrefixeRequest): + request = compute.DeleteGlobalPublicDelegatedPrefixeRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if public_delegated_prefix is not None: + request.public_delegated_prefix = public_delegated_prefix + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("public_delegated_prefix", request.public_delegated_prefix), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def delete(self, + request: Optional[Union[compute.DeleteGlobalPublicDelegatedPrefixeRequest, dict]] = None, + *, + project: Optional[str] = None, + public_delegated_prefix: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> extended_operation.ExtendedOperation: + r"""Deletes the specified global PublicDelegatedPrefix. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_delete(): + # Create a client + client = compute_v1.GlobalPublicDelegatedPrefixesClient() + + # Initialize request argument(s) + request = compute_v1.DeleteGlobalPublicDelegatedPrefixeRequest( + project="project_value", + public_delegated_prefix="public_delegated_prefix_value", + ) + + # Make the request + response = client.delete(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.DeleteGlobalPublicDelegatedPrefixeRequest, dict]): + The request object. A request message for + GlobalPublicDelegatedPrefixes.Delete. + See the method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + public_delegated_prefix (str): + Name of the PublicDelegatedPrefix + resource to delete. + + This corresponds to the ``public_delegated_prefix`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, public_delegated_prefix]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.DeleteGlobalPublicDelegatedPrefixeRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.DeleteGlobalPublicDelegatedPrefixeRequest): + request = compute.DeleteGlobalPublicDelegatedPrefixeRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if public_delegated_prefix is not None: + request.public_delegated_prefix = public_delegated_prefix + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("public_delegated_prefix", request.public_delegated_prefix), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + operation_service = self._transport._global_operations_client + operation_request = compute.GetGlobalOperationRequest() + operation_request.project = request.project + operation_request.operation = response.name + + get_operation = functools.partial(operation_service.get, operation_request) + # Cancel is not part of extended operations yet. + cancel_operation = lambda: None + + # Note: this class is an implementation detail to provide a uniform + # set of names for certain fields in the extended operation proto message. + # See google.api_core.extended_operation.ExtendedOperation for details + # on these properties and the expected interface. + class _CustomOperation(extended_operation.ExtendedOperation): + @property + def error_message(self): + return self._extended_operation.http_error_message + + @property + def error_code(self): + return self._extended_operation.http_error_status_code + + response = _CustomOperation.make(get_operation, cancel_operation, response) + + # Done; return the response. + return response + + def get(self, + request: Optional[Union[compute.GetGlobalPublicDelegatedPrefixeRequest, dict]] = None, + *, + project: Optional[str] = None, + public_delegated_prefix: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.PublicDelegatedPrefix: + r"""Returns the specified global PublicDelegatedPrefix + resource. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_get(): + # Create a client + client = compute_v1.GlobalPublicDelegatedPrefixesClient() + + # Initialize request argument(s) + request = compute_v1.GetGlobalPublicDelegatedPrefixeRequest( + project="project_value", + public_delegated_prefix="public_delegated_prefix_value", + ) + + # Make the request + response = client.get(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.GetGlobalPublicDelegatedPrefixeRequest, dict]): + The request object. A request message for + GlobalPublicDelegatedPrefixes.Get. See + the method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + public_delegated_prefix (str): + Name of the PublicDelegatedPrefix + resource to return. + + This corresponds to the ``public_delegated_prefix`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.compute_v1.types.PublicDelegatedPrefix: + A PublicDelegatedPrefix resource + represents an IP block within a + PublicAdvertisedPrefix that is + configured within a single cloud scope + (global or region). IPs in the block can + be allocated to resources within that + scope. Public delegated prefixes may be + further broken up into smaller IP blocks + in the same scope as the parent block. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, public_delegated_prefix]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.GetGlobalPublicDelegatedPrefixeRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.GetGlobalPublicDelegatedPrefixeRequest): + request = compute.GetGlobalPublicDelegatedPrefixeRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if public_delegated_prefix is not None: + request.public_delegated_prefix = public_delegated_prefix + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("public_delegated_prefix", request.public_delegated_prefix), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def insert_unary(self, + request: Optional[Union[compute.InsertGlobalPublicDelegatedPrefixeRequest, dict]] = None, + *, + project: Optional[str] = None, + public_delegated_prefix_resource: Optional[compute.PublicDelegatedPrefix] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.Operation: + r"""Creates a global PublicDelegatedPrefix in the + specified project using the parameters that are included + in the request. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_insert(): + # Create a client + client = compute_v1.GlobalPublicDelegatedPrefixesClient() + + # Initialize request argument(s) + request = compute_v1.InsertGlobalPublicDelegatedPrefixeRequest( + project="project_value", + ) + + # Make the request + response = client.insert(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.InsertGlobalPublicDelegatedPrefixeRequest, dict]): + The request object. A request message for + GlobalPublicDelegatedPrefixes.Insert. + See the method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + public_delegated_prefix_resource (google.cloud.compute_v1.types.PublicDelegatedPrefix): + The body resource for this request + This corresponds to the ``public_delegated_prefix_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, public_delegated_prefix_resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.InsertGlobalPublicDelegatedPrefixeRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.InsertGlobalPublicDelegatedPrefixeRequest): + request = compute.InsertGlobalPublicDelegatedPrefixeRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if public_delegated_prefix_resource is not None: + request.public_delegated_prefix_resource = public_delegated_prefix_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.insert] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def insert(self, + request: Optional[Union[compute.InsertGlobalPublicDelegatedPrefixeRequest, dict]] = None, + *, + project: Optional[str] = None, + public_delegated_prefix_resource: Optional[compute.PublicDelegatedPrefix] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> extended_operation.ExtendedOperation: + r"""Creates a global PublicDelegatedPrefix in the + specified project using the parameters that are included + in the request. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_insert(): + # Create a client + client = compute_v1.GlobalPublicDelegatedPrefixesClient() + + # Initialize request argument(s) + request = compute_v1.InsertGlobalPublicDelegatedPrefixeRequest( + project="project_value", + ) + + # Make the request + response = client.insert(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.InsertGlobalPublicDelegatedPrefixeRequest, dict]): + The request object. A request message for + GlobalPublicDelegatedPrefixes.Insert. + See the method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + public_delegated_prefix_resource (google.cloud.compute_v1.types.PublicDelegatedPrefix): + The body resource for this request + This corresponds to the ``public_delegated_prefix_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, public_delegated_prefix_resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.InsertGlobalPublicDelegatedPrefixeRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.InsertGlobalPublicDelegatedPrefixeRequest): + request = compute.InsertGlobalPublicDelegatedPrefixeRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if public_delegated_prefix_resource is not None: + request.public_delegated_prefix_resource = public_delegated_prefix_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.insert] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + operation_service = self._transport._global_operations_client + operation_request = compute.GetGlobalOperationRequest() + operation_request.project = request.project + operation_request.operation = response.name + + get_operation = functools.partial(operation_service.get, operation_request) + # Cancel is not part of extended operations yet. + cancel_operation = lambda: None + + # Note: this class is an implementation detail to provide a uniform + # set of names for certain fields in the extended operation proto message. + # See google.api_core.extended_operation.ExtendedOperation for details + # on these properties and the expected interface. + class _CustomOperation(extended_operation.ExtendedOperation): + @property + def error_message(self): + return self._extended_operation.http_error_message + + @property + def error_code(self): + return self._extended_operation.http_error_status_code + + response = _CustomOperation.make(get_operation, cancel_operation, response) + + # Done; return the response. + return response + + def list(self, + request: Optional[Union[compute.ListGlobalPublicDelegatedPrefixesRequest, dict]] = None, + *, + project: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListPager: + r"""Lists the global PublicDelegatedPrefixes for a + project. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_list(): + # Create a client + client = compute_v1.GlobalPublicDelegatedPrefixesClient() + + # Initialize request argument(s) + request = compute_v1.ListGlobalPublicDelegatedPrefixesRequest( + project="project_value", + ) + + # Make the request + page_result = client.list(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.ListGlobalPublicDelegatedPrefixesRequest, dict]): + The request object. A request message for + GlobalPublicDelegatedPrefixes.List. See + the method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.compute_v1.services.global_public_delegated_prefixes.pagers.ListPager: + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.ListGlobalPublicDelegatedPrefixesRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.ListGlobalPublicDelegatedPrefixesRequest): + request = compute.ListGlobalPublicDelegatedPrefixesRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + def patch_unary(self, + request: Optional[Union[compute.PatchGlobalPublicDelegatedPrefixeRequest, dict]] = None, + *, + project: Optional[str] = None, + public_delegated_prefix: Optional[str] = None, + public_delegated_prefix_resource: Optional[compute.PublicDelegatedPrefix] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.Operation: + r"""Patches the specified global PublicDelegatedPrefix + resource with the data included in the request. This + method supports PATCH semantics and uses JSON merge + patch format and processing rules. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_patch(): + # Create a client + client = compute_v1.GlobalPublicDelegatedPrefixesClient() + + # Initialize request argument(s) + request = compute_v1.PatchGlobalPublicDelegatedPrefixeRequest( + project="project_value", + public_delegated_prefix="public_delegated_prefix_value", + ) + + # Make the request + response = client.patch(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.PatchGlobalPublicDelegatedPrefixeRequest, dict]): + The request object. A request message for + GlobalPublicDelegatedPrefixes.Patch. See + the method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + public_delegated_prefix (str): + Name of the PublicDelegatedPrefix + resource to patch. + + This corresponds to the ``public_delegated_prefix`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + public_delegated_prefix_resource (google.cloud.compute_v1.types.PublicDelegatedPrefix): + The body resource for this request + This corresponds to the ``public_delegated_prefix_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, public_delegated_prefix, public_delegated_prefix_resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.PatchGlobalPublicDelegatedPrefixeRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.PatchGlobalPublicDelegatedPrefixeRequest): + request = compute.PatchGlobalPublicDelegatedPrefixeRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if public_delegated_prefix is not None: + request.public_delegated_prefix = public_delegated_prefix + if public_delegated_prefix_resource is not None: + request.public_delegated_prefix_resource = public_delegated_prefix_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.patch] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("public_delegated_prefix", request.public_delegated_prefix), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def patch(self, + request: Optional[Union[compute.PatchGlobalPublicDelegatedPrefixeRequest, dict]] = None, + *, + project: Optional[str] = None, + public_delegated_prefix: Optional[str] = None, + public_delegated_prefix_resource: Optional[compute.PublicDelegatedPrefix] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> extended_operation.ExtendedOperation: + r"""Patches the specified global PublicDelegatedPrefix + resource with the data included in the request. This + method supports PATCH semantics and uses JSON merge + patch format and processing rules. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_patch(): + # Create a client + client = compute_v1.GlobalPublicDelegatedPrefixesClient() + + # Initialize request argument(s) + request = compute_v1.PatchGlobalPublicDelegatedPrefixeRequest( + project="project_value", + public_delegated_prefix="public_delegated_prefix_value", + ) + + # Make the request + response = client.patch(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.PatchGlobalPublicDelegatedPrefixeRequest, dict]): + The request object. A request message for + GlobalPublicDelegatedPrefixes.Patch. See + the method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + public_delegated_prefix (str): + Name of the PublicDelegatedPrefix + resource to patch. + + This corresponds to the ``public_delegated_prefix`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + public_delegated_prefix_resource (google.cloud.compute_v1.types.PublicDelegatedPrefix): + The body resource for this request + This corresponds to the ``public_delegated_prefix_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, public_delegated_prefix, public_delegated_prefix_resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.PatchGlobalPublicDelegatedPrefixeRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.PatchGlobalPublicDelegatedPrefixeRequest): + request = compute.PatchGlobalPublicDelegatedPrefixeRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if public_delegated_prefix is not None: + request.public_delegated_prefix = public_delegated_prefix + if public_delegated_prefix_resource is not None: + request.public_delegated_prefix_resource = public_delegated_prefix_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.patch] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("public_delegated_prefix", request.public_delegated_prefix), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + operation_service = self._transport._global_operations_client + operation_request = compute.GetGlobalOperationRequest() + operation_request.project = request.project + operation_request.operation = response.name + + get_operation = functools.partial(operation_service.get, operation_request) + # Cancel is not part of extended operations yet. + cancel_operation = lambda: None + + # Note: this class is an implementation detail to provide a uniform + # set of names for certain fields in the extended operation proto message. + # See google.api_core.extended_operation.ExtendedOperation for details + # on these properties and the expected interface. + class _CustomOperation(extended_operation.ExtendedOperation): + @property + def error_message(self): + return self._extended_operation.http_error_message + + @property + def error_code(self): + return self._extended_operation.http_error_status_code + + response = _CustomOperation.make(get_operation, cancel_operation, response) + + # Done; return the response. + return response + + def __enter__(self) -> "GlobalPublicDelegatedPrefixesClient": + return self + + def __exit__(self, type, value, traceback): + """Releases underlying transport's resources. + + .. warning:: + ONLY use as a context manager if the transport is NOT shared + with other clients! Exiting the with block will CLOSE the transport + and may cause errors in other clients! + """ + self.transport.close() + + + + + + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) + + +__all__ = ( + "GlobalPublicDelegatedPrefixesClient", +) diff --git a/owl-bot-staging/v1/google/cloud/compute_v1/services/global_public_delegated_prefixes/pagers.py b/owl-bot-staging/v1/google/cloud/compute_v1/services/global_public_delegated_prefixes/pagers.py new file mode 100644 index 000000000..26c17ab88 --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/compute_v1/services/global_public_delegated_prefixes/pagers.py @@ -0,0 +1,77 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import Any, AsyncIterator, Awaitable, Callable, Sequence, Tuple, Optional, Iterator + +from google.cloud.compute_v1.types import compute + + +class ListPager: + """A pager for iterating through ``list`` requests. + + This class thinly wraps an initial + :class:`google.cloud.compute_v1.types.PublicDelegatedPrefixList` object, and + provides an ``__iter__`` method to iterate through its + ``items`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``List`` requests and continue to iterate + through the ``items`` field on the + corresponding responses. + + All the usual :class:`google.cloud.compute_v1.types.PublicDelegatedPrefixList` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., compute.PublicDelegatedPrefixList], + request: compute.ListGlobalPublicDelegatedPrefixesRequest, + response: compute.PublicDelegatedPrefixList, + *, + metadata: Sequence[Tuple[str, str]] = ()): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.compute_v1.types.ListGlobalPublicDelegatedPrefixesRequest): + The initial request object. + response (google.cloud.compute_v1.types.PublicDelegatedPrefixList): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = compute.ListGlobalPublicDelegatedPrefixesRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[compute.PublicDelegatedPrefixList]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[compute.PublicDelegatedPrefix]: + for page in self.pages: + yield from page.items + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) diff --git a/owl-bot-staging/v1/google/cloud/compute_v1/services/global_public_delegated_prefixes/transports/__init__.py b/owl-bot-staging/v1/google/cloud/compute_v1/services/global_public_delegated_prefixes/transports/__init__.py new file mode 100644 index 000000000..57fc67754 --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/compute_v1/services/global_public_delegated_prefixes/transports/__init__.py @@ -0,0 +1,32 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +from typing import Dict, Type + +from .base import GlobalPublicDelegatedPrefixesTransport +from .rest import GlobalPublicDelegatedPrefixesRestTransport +from .rest import GlobalPublicDelegatedPrefixesRestInterceptor + + +# Compile a registry of transports. +_transport_registry = OrderedDict() # type: Dict[str, Type[GlobalPublicDelegatedPrefixesTransport]] +_transport_registry['rest'] = GlobalPublicDelegatedPrefixesRestTransport + +__all__ = ( + 'GlobalPublicDelegatedPrefixesTransport', + 'GlobalPublicDelegatedPrefixesRestTransport', + 'GlobalPublicDelegatedPrefixesRestInterceptor', +) diff --git a/owl-bot-staging/v1/google/cloud/compute_v1/services/global_public_delegated_prefixes/transports/base.py b/owl-bot-staging/v1/google/cloud/compute_v1/services/global_public_delegated_prefixes/transports/base.py new file mode 100644 index 000000000..20c15052a --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/compute_v1/services/global_public_delegated_prefixes/transports/base.py @@ -0,0 +1,219 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import abc +from typing import Awaitable, Callable, Dict, Optional, Sequence, Union + +from google.cloud.compute_v1 import gapic_version as package_version + +import google.auth # type: ignore +import google.api_core +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.compute_v1.types import compute +from google.cloud.compute_v1.services import global_operations + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) + + +class GlobalPublicDelegatedPrefixesTransport(abc.ABC): + """Abstract transport class for GlobalPublicDelegatedPrefixes.""" + + AUTH_SCOPES = ( + 'https://www.googleapis.com/auth/compute', + 'https://www.googleapis.com/auth/cloud-platform', + ) + + DEFAULT_HOST: str = 'compute.googleapis.com' + def __init__( + self, *, + host: str = DEFAULT_HOST, + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + **kwargs, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A list of scopes. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + """ + self._extended_operations_services: Dict[str, Any] = {} + + scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} + + # Save the scopes. + self._scopes = scopes + + # If no credentials are provided, then determine the appropriate + # defaults. + if credentials and credentials_file: + raise core_exceptions.DuplicateCredentialArgs("'credentials_file' and 'credentials' are mutually exclusive") + + if credentials_file is not None: + credentials, _ = google.auth.load_credentials_from_file( + credentials_file, + **scopes_kwargs, + quota_project_id=quota_project_id + ) + elif credentials is None: + credentials, _ = google.auth.default(**scopes_kwargs, quota_project_id=quota_project_id) + # Don't apply audience if the credentials file passed from user. + if hasattr(credentials, "with_gdch_audience"): + credentials = credentials.with_gdch_audience(api_audience if api_audience else host) + + # If the credentials are service account credentials, then always try to use self signed JWT. + if always_use_jwt_access and isinstance(credentials, service_account.Credentials) and hasattr(service_account.Credentials, "with_always_use_jwt_access"): + credentials = credentials.with_always_use_jwt_access(True) + + # Save the credentials. + self._credentials = credentials + + # Save the hostname. Default to port 443 (HTTPS) if none is specified. + if ':' not in host: + host += ':443' + self._host = host + + def _prep_wrapped_messages(self, client_info): + # Precompute the wrapped methods. + self._wrapped_methods = { + self.delete: gapic_v1.method.wrap_method( + self.delete, + default_timeout=None, + client_info=client_info, + ), + self.get: gapic_v1.method.wrap_method( + self.get, + default_timeout=None, + client_info=client_info, + ), + self.insert: gapic_v1.method.wrap_method( + self.insert, + default_timeout=None, + client_info=client_info, + ), + self.list: gapic_v1.method.wrap_method( + self.list, + default_timeout=None, + client_info=client_info, + ), + self.patch: gapic_v1.method.wrap_method( + self.patch, + default_timeout=None, + client_info=client_info, + ), + } + + def close(self): + """Closes resources associated with the transport. + + .. warning:: + Only call this method if the transport is NOT shared + with other clients - this may cause errors in other clients! + """ + raise NotImplementedError() + + @property + def delete(self) -> Callable[ + [compute.DeleteGlobalPublicDelegatedPrefixeRequest], + Union[ + compute.Operation, + Awaitable[compute.Operation] + ]]: + raise NotImplementedError() + + @property + def get(self) -> Callable[ + [compute.GetGlobalPublicDelegatedPrefixeRequest], + Union[ + compute.PublicDelegatedPrefix, + Awaitable[compute.PublicDelegatedPrefix] + ]]: + raise NotImplementedError() + + @property + def insert(self) -> Callable[ + [compute.InsertGlobalPublicDelegatedPrefixeRequest], + Union[ + compute.Operation, + Awaitable[compute.Operation] + ]]: + raise NotImplementedError() + + @property + def list(self) -> Callable[ + [compute.ListGlobalPublicDelegatedPrefixesRequest], + Union[ + compute.PublicDelegatedPrefixList, + Awaitable[compute.PublicDelegatedPrefixList] + ]]: + raise NotImplementedError() + + @property + def patch(self) -> Callable[ + [compute.PatchGlobalPublicDelegatedPrefixeRequest], + Union[ + compute.Operation, + Awaitable[compute.Operation] + ]]: + raise NotImplementedError() + + @property + def kind(self) -> str: + raise NotImplementedError() + + @property + def _global_operations_client(self) -> global_operations.GlobalOperationsClient: + ex_op_service = self._extended_operations_services.get("global_operations") + if not ex_op_service: + ex_op_service = global_operations.GlobalOperationsClient( + credentials=self._credentials, + transport=self.kind, + ) + self._extended_operations_services["global_operations"] = ex_op_service + + return ex_op_service + + +__all__ = ( + 'GlobalPublicDelegatedPrefixesTransport', +) diff --git a/owl-bot-staging/v1/google/cloud/compute_v1/services/global_public_delegated_prefixes/transports/rest.py b/owl-bot-staging/v1/google/cloud/compute_v1/services/global_public_delegated_prefixes/transports/rest.py new file mode 100644 index 000000000..194e8b4ae --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/compute_v1/services/global_public_delegated_prefixes/transports/rest.py @@ -0,0 +1,807 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +from google.auth.transport.requests import AuthorizedSession # type: ignore +import json # type: ignore +import grpc # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.api_core import exceptions as core_exceptions +from google.api_core import retry as retries +from google.api_core import rest_helpers +from google.api_core import rest_streaming +from google.api_core import path_template +from google.api_core import gapic_v1 + +from google.protobuf import json_format +from requests import __version__ as requests_version +import dataclasses +import re +from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union +import warnings + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + + +from google.cloud.compute_v1.types import compute + +from .base import GlobalPublicDelegatedPrefixesTransport, DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, + grpc_version=None, + rest_version=requests_version, +) + + +class GlobalPublicDelegatedPrefixesRestInterceptor: + """Interceptor for GlobalPublicDelegatedPrefixes. + + Interceptors are used to manipulate requests, request metadata, and responses + in arbitrary ways. + Example use cases include: + * Logging + * Verifying requests according to service or custom semantics + * Stripping extraneous information from responses + + These use cases and more can be enabled by injecting an + instance of a custom subclass when constructing the GlobalPublicDelegatedPrefixesRestTransport. + + .. code-block:: python + class MyCustomGlobalPublicDelegatedPrefixesInterceptor(GlobalPublicDelegatedPrefixesRestInterceptor): + def pre_delete(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_delete(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_get(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_insert(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_insert(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_patch(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_patch(self, response): + logging.log(f"Received response: {response}") + return response + + transport = GlobalPublicDelegatedPrefixesRestTransport(interceptor=MyCustomGlobalPublicDelegatedPrefixesInterceptor()) + client = GlobalPublicDelegatedPrefixesClient(transport=transport) + + + """ + def pre_delete(self, request: compute.DeleteGlobalPublicDelegatedPrefixeRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.DeleteGlobalPublicDelegatedPrefixeRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for delete + + Override in a subclass to manipulate the request or metadata + before they are sent to the GlobalPublicDelegatedPrefixes server. + """ + return request, metadata + + def post_delete(self, response: compute.Operation) -> compute.Operation: + """Post-rpc interceptor for delete + + Override in a subclass to manipulate the response + after it is returned by the GlobalPublicDelegatedPrefixes server but before + it is returned to user code. + """ + return response + def pre_get(self, request: compute.GetGlobalPublicDelegatedPrefixeRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.GetGlobalPublicDelegatedPrefixeRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get + + Override in a subclass to manipulate the request or metadata + before they are sent to the GlobalPublicDelegatedPrefixes server. + """ + return request, metadata + + def post_get(self, response: compute.PublicDelegatedPrefix) -> compute.PublicDelegatedPrefix: + """Post-rpc interceptor for get + + Override in a subclass to manipulate the response + after it is returned by the GlobalPublicDelegatedPrefixes server but before + it is returned to user code. + """ + return response + def pre_insert(self, request: compute.InsertGlobalPublicDelegatedPrefixeRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.InsertGlobalPublicDelegatedPrefixeRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for insert + + Override in a subclass to manipulate the request or metadata + before they are sent to the GlobalPublicDelegatedPrefixes server. + """ + return request, metadata + + def post_insert(self, response: compute.Operation) -> compute.Operation: + """Post-rpc interceptor for insert + + Override in a subclass to manipulate the response + after it is returned by the GlobalPublicDelegatedPrefixes server but before + it is returned to user code. + """ + return response + def pre_list(self, request: compute.ListGlobalPublicDelegatedPrefixesRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.ListGlobalPublicDelegatedPrefixesRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list + + Override in a subclass to manipulate the request or metadata + before they are sent to the GlobalPublicDelegatedPrefixes server. + """ + return request, metadata + + def post_list(self, response: compute.PublicDelegatedPrefixList) -> compute.PublicDelegatedPrefixList: + """Post-rpc interceptor for list + + Override in a subclass to manipulate the response + after it is returned by the GlobalPublicDelegatedPrefixes server but before + it is returned to user code. + """ + return response + def pre_patch(self, request: compute.PatchGlobalPublicDelegatedPrefixeRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.PatchGlobalPublicDelegatedPrefixeRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for patch + + Override in a subclass to manipulate the request or metadata + before they are sent to the GlobalPublicDelegatedPrefixes server. + """ + return request, metadata + + def post_patch(self, response: compute.Operation) -> compute.Operation: + """Post-rpc interceptor for patch + + Override in a subclass to manipulate the response + after it is returned by the GlobalPublicDelegatedPrefixes server but before + it is returned to user code. + """ + return response + + +@dataclasses.dataclass +class GlobalPublicDelegatedPrefixesRestStub: + _session: AuthorizedSession + _host: str + _interceptor: GlobalPublicDelegatedPrefixesRestInterceptor + + +class GlobalPublicDelegatedPrefixesRestTransport(GlobalPublicDelegatedPrefixesTransport): + """REST backend transport for GlobalPublicDelegatedPrefixes. + + The GlobalPublicDelegatedPrefixes API. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends JSON representations of protocol buffers over HTTP/1.1 + + NOTE: This REST transport functionality is currently in a beta + state (preview). We welcome your feedback via an issue in this + library's source repository. Thank you! + """ + + def __init__(self, *, + host: str = 'compute.googleapis.com', + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + client_cert_source_for_mtls: Optional[Callable[[ + ], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + url_scheme: str = 'https', + interceptor: Optional[GlobalPublicDelegatedPrefixesRestInterceptor] = None, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + NOTE: This REST transport functionality is currently in a beta + state (preview). We welcome your feedback via a GitHub issue in + this library's repository. Thank you! + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + client_cert_source_for_mtls (Callable[[], Tuple[bytes, bytes]]): Client + certificate to configure mutual TLS HTTP channel. It is ignored + if ``channel`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you are developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + url_scheme: the protocol scheme for the API endpoint. Normally + "https", but for testing or local servers, + "http" can be specified. + """ + # Run the base constructor + # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. + # TODO: When custom host (api_endpoint) is set, `scopes` must *also* be set on the + # credentials object + maybe_url_match = re.match("^(?Phttp(?:s)?://)?(?P.*)$", host) + if maybe_url_match is None: + raise ValueError(f"Unexpected hostname structure: {host}") # pragma: NO COVER + + url_match_items = maybe_url_match.groupdict() + + host = f"{url_scheme}://{host}" if not url_match_items["scheme"] else host + + super().__init__( + host=host, + credentials=credentials, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience + ) + self._session = AuthorizedSession( + self._credentials, default_host=self.DEFAULT_HOST) + if client_cert_source_for_mtls: + self._session.configure_mtls_channel(client_cert_source_for_mtls) + self._interceptor = interceptor or GlobalPublicDelegatedPrefixesRestInterceptor() + self._prep_wrapped_messages(client_info) + + class _Delete(GlobalPublicDelegatedPrefixesRestStub): + def __hash__(self): + return hash("Delete") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.DeleteGlobalPublicDelegatedPrefixeRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.Operation: + r"""Call the delete method over HTTP. + + Args: + request (~.compute.DeleteGlobalPublicDelegatedPrefixeRequest): + The request object. A request message for + GlobalPublicDelegatedPrefixes.Delete. + See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.Operation: + Represents an Operation resource. Google Compute Engine + has three Operation resources: \* + `Global `__ + \* + `Regional `__ + \* + `Zonal `__ + You can use an operation resource to manage asynchronous + API requests. For more information, read Handling API + responses. Operations can be global, regional or zonal. + - For global operations, use the ``globalOperations`` + resource. - For regional operations, use the + ``regionOperations`` resource. - For zonal operations, + use the ``zonalOperations`` resource. For more + information, read Global, Regional, and Zonal Resources. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'delete', + 'uri': '/compute/v1/projects/{project}/global/publicDelegatedPrefixes/{public_delegated_prefix}', + }, + ] + request, metadata = self._interceptor.pre_delete(request, metadata) + pb_request = compute.DeleteGlobalPublicDelegatedPrefixeRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.Operation() + pb_resp = compute.Operation.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_delete(resp) + return resp + + class _Get(GlobalPublicDelegatedPrefixesRestStub): + def __hash__(self): + return hash("Get") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.GetGlobalPublicDelegatedPrefixeRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.PublicDelegatedPrefix: + r"""Call the get method over HTTP. + + Args: + request (~.compute.GetGlobalPublicDelegatedPrefixeRequest): + The request object. A request message for + GlobalPublicDelegatedPrefixes.Get. See + the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.PublicDelegatedPrefix: + A PublicDelegatedPrefix resource + represents an IP block within a + PublicAdvertisedPrefix that is + configured within a single cloud scope + (global or region). IPs in the block can + be allocated to resources within that + scope. Public delegated prefixes may be + further broken up into smaller IP blocks + in the same scope as the parent block. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/compute/v1/projects/{project}/global/publicDelegatedPrefixes/{public_delegated_prefix}', + }, + ] + request, metadata = self._interceptor.pre_get(request, metadata) + pb_request = compute.GetGlobalPublicDelegatedPrefixeRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.PublicDelegatedPrefix() + pb_resp = compute.PublicDelegatedPrefix.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get(resp) + return resp + + class _Insert(GlobalPublicDelegatedPrefixesRestStub): + def __hash__(self): + return hash("Insert") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.InsertGlobalPublicDelegatedPrefixeRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.Operation: + r"""Call the insert method over HTTP. + + Args: + request (~.compute.InsertGlobalPublicDelegatedPrefixeRequest): + The request object. A request message for + GlobalPublicDelegatedPrefixes.Insert. + See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.Operation: + Represents an Operation resource. Google Compute Engine + has three Operation resources: \* + `Global `__ + \* + `Regional `__ + \* + `Zonal `__ + You can use an operation resource to manage asynchronous + API requests. For more information, read Handling API + responses. Operations can be global, regional or zonal. + - For global operations, use the ``globalOperations`` + resource. - For regional operations, use the + ``regionOperations`` resource. - For zonal operations, + use the ``zonalOperations`` resource. For more + information, read Global, Regional, and Zonal Resources. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/compute/v1/projects/{project}/global/publicDelegatedPrefixes', + 'body': 'public_delegated_prefix_resource', + }, + ] + request, metadata = self._interceptor.pre_insert(request, metadata) + pb_request = compute.InsertGlobalPublicDelegatedPrefixeRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + including_default_value_fields=False, + use_integers_for_enums=False + ) + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.Operation() + pb_resp = compute.Operation.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_insert(resp) + return resp + + class _List(GlobalPublicDelegatedPrefixesRestStub): + def __hash__(self): + return hash("List") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.ListGlobalPublicDelegatedPrefixesRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.PublicDelegatedPrefixList: + r"""Call the list method over HTTP. + + Args: + request (~.compute.ListGlobalPublicDelegatedPrefixesRequest): + The request object. A request message for + GlobalPublicDelegatedPrefixes.List. See + the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.PublicDelegatedPrefixList: + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/compute/v1/projects/{project}/global/publicDelegatedPrefixes', + }, + ] + request, metadata = self._interceptor.pre_list(request, metadata) + pb_request = compute.ListGlobalPublicDelegatedPrefixesRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.PublicDelegatedPrefixList() + pb_resp = compute.PublicDelegatedPrefixList.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_list(resp) + return resp + + class _Patch(GlobalPublicDelegatedPrefixesRestStub): + def __hash__(self): + return hash("Patch") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.PatchGlobalPublicDelegatedPrefixeRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.Operation: + r"""Call the patch method over HTTP. + + Args: + request (~.compute.PatchGlobalPublicDelegatedPrefixeRequest): + The request object. A request message for + GlobalPublicDelegatedPrefixes.Patch. See + the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.Operation: + Represents an Operation resource. Google Compute Engine + has three Operation resources: \* + `Global `__ + \* + `Regional `__ + \* + `Zonal `__ + You can use an operation resource to manage asynchronous + API requests. For more information, read Handling API + responses. Operations can be global, regional or zonal. + - For global operations, use the ``globalOperations`` + resource. - For regional operations, use the + ``regionOperations`` resource. - For zonal operations, + use the ``zonalOperations`` resource. For more + information, read Global, Regional, and Zonal Resources. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'patch', + 'uri': '/compute/v1/projects/{project}/global/publicDelegatedPrefixes/{public_delegated_prefix}', + 'body': 'public_delegated_prefix_resource', + }, + ] + request, metadata = self._interceptor.pre_patch(request, metadata) + pb_request = compute.PatchGlobalPublicDelegatedPrefixeRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + including_default_value_fields=False, + use_integers_for_enums=False + ) + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.Operation() + pb_resp = compute.Operation.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_patch(resp) + return resp + + @property + def delete(self) -> Callable[ + [compute.DeleteGlobalPublicDelegatedPrefixeRequest], + compute.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._Delete(self._session, self._host, self._interceptor) # type: ignore + + @property + def get(self) -> Callable[ + [compute.GetGlobalPublicDelegatedPrefixeRequest], + compute.PublicDelegatedPrefix]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._Get(self._session, self._host, self._interceptor) # type: ignore + + @property + def insert(self) -> Callable[ + [compute.InsertGlobalPublicDelegatedPrefixeRequest], + compute.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._Insert(self._session, self._host, self._interceptor) # type: ignore + + @property + def list(self) -> Callable[ + [compute.ListGlobalPublicDelegatedPrefixesRequest], + compute.PublicDelegatedPrefixList]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._List(self._session, self._host, self._interceptor) # type: ignore + + @property + def patch(self) -> Callable[ + [compute.PatchGlobalPublicDelegatedPrefixeRequest], + compute.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._Patch(self._session, self._host, self._interceptor) # type: ignore + + @property + def kind(self) -> str: + return "rest" + + def close(self): + self._session.close() + + +__all__=( + 'GlobalPublicDelegatedPrefixesRestTransport', +) diff --git a/owl-bot-staging/v1/google/cloud/compute_v1/services/health_checks/__init__.py b/owl-bot-staging/v1/google/cloud/compute_v1/services/health_checks/__init__.py new file mode 100644 index 000000000..921bbb67e --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/compute_v1/services/health_checks/__init__.py @@ -0,0 +1,20 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from .client import HealthChecksClient + +__all__ = ( + 'HealthChecksClient', +) diff --git a/owl-bot-staging/v1/google/cloud/compute_v1/services/health_checks/client.py b/owl-bot-staging/v1/google/cloud/compute_v1/services/health_checks/client.py new file mode 100644 index 000000000..a3a7180ef --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/compute_v1/services/health_checks/client.py @@ -0,0 +1,1776 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import functools +import os +import re +from typing import Dict, Mapping, MutableMapping, MutableSequence, Optional, Sequence, Tuple, Type, Union, cast + +from google.cloud.compute_v1 import gapic_version as package_version + +from google.api_core import client_options as client_options_lib +from google.api_core import exceptions as core_exceptions +from google.api_core import extended_operation +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport import mtls # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth.exceptions import MutualTLSChannelError # type: ignore +from google.oauth2 import service_account # type: ignore + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + +from google.api_core import extended_operation # type: ignore +from google.cloud.compute_v1.services.health_checks import pagers +from google.cloud.compute_v1.types import compute +from .transports.base import HealthChecksTransport, DEFAULT_CLIENT_INFO +from .transports.rest import HealthChecksRestTransport + + +class HealthChecksClientMeta(type): + """Metaclass for the HealthChecks client. + + This provides class-level methods for building and retrieving + support objects (e.g. transport) without polluting the client instance + objects. + """ + _transport_registry = OrderedDict() # type: Dict[str, Type[HealthChecksTransport]] + _transport_registry["rest"] = HealthChecksRestTransport + + def get_transport_class(cls, + label: Optional[str] = None, + ) -> Type[HealthChecksTransport]: + """Returns an appropriate transport class. + + Args: + label: The name of the desired transport. If none is + provided, then the first transport in the registry is used. + + Returns: + The transport class to use. + """ + # If a specific transport is requested, return that one. + if label: + return cls._transport_registry[label] + + # No transport is requested; return the default (that is, the first one + # in the dictionary). + return next(iter(cls._transport_registry.values())) + + +class HealthChecksClient(metaclass=HealthChecksClientMeta): + """The HealthChecks API.""" + + @staticmethod + def _get_default_mtls_endpoint(api_endpoint): + """Converts api endpoint to mTLS endpoint. + + Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to + "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. + Args: + api_endpoint (Optional[str]): the api endpoint to convert. + Returns: + str: converted mTLS api endpoint. + """ + if not api_endpoint: + return api_endpoint + + mtls_endpoint_re = re.compile( + r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?" + ) + + m = mtls_endpoint_re.match(api_endpoint) + name, mtls, sandbox, googledomain = m.groups() + if mtls or not googledomain: + return api_endpoint + + if sandbox: + return api_endpoint.replace( + "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" + ) + + return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") + + DEFAULT_ENDPOINT = "compute.googleapis.com" + DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore + DEFAULT_ENDPOINT + ) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + HealthChecksClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_info(info) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + HealthChecksClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_file( + filename) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + from_service_account_json = from_service_account_file + + @property + def transport(self) -> HealthChecksTransport: + """Returns the transport used by the client instance. + + Returns: + HealthChecksTransport: The transport used by the client + instance. + """ + return self._transport + + @staticmethod + def common_billing_account_path(billing_account: str, ) -> str: + """Returns a fully-qualified billing_account string.""" + return "billingAccounts/{billing_account}".format(billing_account=billing_account, ) + + @staticmethod + def parse_common_billing_account_path(path: str) -> Dict[str,str]: + """Parse a billing_account path into its component segments.""" + m = re.match(r"^billingAccounts/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_folder_path(folder: str, ) -> str: + """Returns a fully-qualified folder string.""" + return "folders/{folder}".format(folder=folder, ) + + @staticmethod + def parse_common_folder_path(path: str) -> Dict[str,str]: + """Parse a folder path into its component segments.""" + m = re.match(r"^folders/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_organization_path(organization: str, ) -> str: + """Returns a fully-qualified organization string.""" + return "organizations/{organization}".format(organization=organization, ) + + @staticmethod + def parse_common_organization_path(path: str) -> Dict[str,str]: + """Parse a organization path into its component segments.""" + m = re.match(r"^organizations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_project_path(project: str, ) -> str: + """Returns a fully-qualified project string.""" + return "projects/{project}".format(project=project, ) + + @staticmethod + def parse_common_project_path(path: str) -> Dict[str,str]: + """Parse a project path into its component segments.""" + m = re.match(r"^projects/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_location_path(project: str, location: str, ) -> str: + """Returns a fully-qualified location string.""" + return "projects/{project}/locations/{location}".format(project=project, location=location, ) + + @staticmethod + def parse_common_location_path(path: str) -> Dict[str,str]: + """Parse a location path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @classmethod + def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[client_options_lib.ClientOptions] = None): + """Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + if client_options is None: + client_options = client_options_lib.ClientOptions() + use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") + use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") + if use_client_cert not in ("true", "false"): + raise ValueError("Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`") + if use_mtls_endpoint not in ("auto", "never", "always"): + raise MutualTLSChannelError("Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`") + + # Figure out the client cert source to use. + client_cert_source = None + if use_client_cert == "true": + if client_options.client_cert_source: + client_cert_source = client_options.client_cert_source + elif mtls.has_default_client_cert_source(): + client_cert_source = mtls.default_client_cert_source() + + # Figure out which api endpoint to use. + if client_options.api_endpoint is not None: + api_endpoint = client_options.api_endpoint + elif use_mtls_endpoint == "always" or (use_mtls_endpoint == "auto" and client_cert_source): + api_endpoint = cls.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = cls.DEFAULT_ENDPOINT + + return api_endpoint, client_cert_source + + def __init__(self, *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Optional[Union[str, HealthChecksTransport]] = None, + client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the health checks client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Union[str, HealthChecksTransport]): The + transport to use. If set to None, a transport is chosen + automatically. + NOTE: "rest" transport functionality is currently in a + beta state (preview). We welcome your feedback via an + issue in this library's source repository. + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): Custom options for the + client. It won't take effect if a ``transport`` instance is provided. + (1) The ``api_endpoint`` property can be used to override the + default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT + environment variable can also be used to override the endpoint: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto switch to the + default mTLS endpoint if client certificate is present, this is + the default value). However, the ``api_endpoint`` property takes + precedence if provided. + (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide client certificate for mutual TLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + """ + if isinstance(client_options, dict): + client_options = client_options_lib.from_dict(client_options) + if client_options is None: + client_options = client_options_lib.ClientOptions() + client_options = cast(client_options_lib.ClientOptions, client_options) + + api_endpoint, client_cert_source_func = self.get_mtls_endpoint_and_cert_source(client_options) + + api_key_value = getattr(client_options, "api_key", None) + if api_key_value and credentials: + raise ValueError("client_options.api_key and credentials are mutually exclusive") + + # Save or instantiate the transport. + # Ordinarily, we provide the transport, but allowing a custom transport + # instance provides an extensibility point for unusual situations. + if isinstance(transport, HealthChecksTransport): + # transport is a HealthChecksTransport instance. + if credentials or client_options.credentials_file or api_key_value: + raise ValueError("When providing a transport instance, " + "provide its credentials directly.") + if client_options.scopes: + raise ValueError( + "When providing a transport instance, provide its scopes " + "directly." + ) + self._transport = transport + else: + import google.auth._default # type: ignore + + if api_key_value and hasattr(google.auth._default, "get_api_key_credentials"): + credentials = google.auth._default.get_api_key_credentials(api_key_value) + + Transport = type(self).get_transport_class(transport) + self._transport = Transport( + credentials=credentials, + credentials_file=client_options.credentials_file, + host=api_endpoint, + scopes=client_options.scopes, + client_cert_source_for_mtls=client_cert_source_func, + quota_project_id=client_options.quota_project_id, + client_info=client_info, + always_use_jwt_access=True, + api_audience=client_options.api_audience, + ) + + def aggregated_list(self, + request: Optional[Union[compute.AggregatedListHealthChecksRequest, dict]] = None, + *, + project: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.AggregatedListPager: + r"""Retrieves the list of all HealthCheck resources, + regional and global, available to the specified project. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_aggregated_list(): + # Create a client + client = compute_v1.HealthChecksClient() + + # Initialize request argument(s) + request = compute_v1.AggregatedListHealthChecksRequest( + project="project_value", + ) + + # Make the request + page_result = client.aggregated_list(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.AggregatedListHealthChecksRequest, dict]): + The request object. A request message for + HealthChecks.AggregatedList. See the + method description for details. + project (str): + Name of the project scoping this + request. + + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.compute_v1.services.health_checks.pagers.AggregatedListPager: + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.AggregatedListHealthChecksRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.AggregatedListHealthChecksRequest): + request = compute.AggregatedListHealthChecksRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.aggregated_list] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.AggregatedListPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + def delete_unary(self, + request: Optional[Union[compute.DeleteHealthCheckRequest, dict]] = None, + *, + project: Optional[str] = None, + health_check: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.Operation: + r"""Deletes the specified HealthCheck resource. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_delete(): + # Create a client + client = compute_v1.HealthChecksClient() + + # Initialize request argument(s) + request = compute_v1.DeleteHealthCheckRequest( + health_check="health_check_value", + project="project_value", + ) + + # Make the request + response = client.delete(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.DeleteHealthCheckRequest, dict]): + The request object. A request message for + HealthChecks.Delete. See the method + description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + health_check (str): + Name of the HealthCheck resource to + delete. + + This corresponds to the ``health_check`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, health_check]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.DeleteHealthCheckRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.DeleteHealthCheckRequest): + request = compute.DeleteHealthCheckRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if health_check is not None: + request.health_check = health_check + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("health_check", request.health_check), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def delete(self, + request: Optional[Union[compute.DeleteHealthCheckRequest, dict]] = None, + *, + project: Optional[str] = None, + health_check: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> extended_operation.ExtendedOperation: + r"""Deletes the specified HealthCheck resource. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_delete(): + # Create a client + client = compute_v1.HealthChecksClient() + + # Initialize request argument(s) + request = compute_v1.DeleteHealthCheckRequest( + health_check="health_check_value", + project="project_value", + ) + + # Make the request + response = client.delete(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.DeleteHealthCheckRequest, dict]): + The request object. A request message for + HealthChecks.Delete. See the method + description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + health_check (str): + Name of the HealthCheck resource to + delete. + + This corresponds to the ``health_check`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, health_check]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.DeleteHealthCheckRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.DeleteHealthCheckRequest): + request = compute.DeleteHealthCheckRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if health_check is not None: + request.health_check = health_check + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("health_check", request.health_check), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + operation_service = self._transport._global_operations_client + operation_request = compute.GetGlobalOperationRequest() + operation_request.project = request.project + operation_request.operation = response.name + + get_operation = functools.partial(operation_service.get, operation_request) + # Cancel is not part of extended operations yet. + cancel_operation = lambda: None + + # Note: this class is an implementation detail to provide a uniform + # set of names for certain fields in the extended operation proto message. + # See google.api_core.extended_operation.ExtendedOperation for details + # on these properties and the expected interface. + class _CustomOperation(extended_operation.ExtendedOperation): + @property + def error_message(self): + return self._extended_operation.http_error_message + + @property + def error_code(self): + return self._extended_operation.http_error_status_code + + response = _CustomOperation.make(get_operation, cancel_operation, response) + + # Done; return the response. + return response + + def get(self, + request: Optional[Union[compute.GetHealthCheckRequest, dict]] = None, + *, + project: Optional[str] = None, + health_check: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.HealthCheck: + r"""Returns the specified HealthCheck resource. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_get(): + # Create a client + client = compute_v1.HealthChecksClient() + + # Initialize request argument(s) + request = compute_v1.GetHealthCheckRequest( + health_check="health_check_value", + project="project_value", + ) + + # Make the request + response = client.get(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.GetHealthCheckRequest, dict]): + The request object. A request message for + HealthChecks.Get. See the method + description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + health_check (str): + Name of the HealthCheck resource to + return. + + This corresponds to the ``health_check`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.compute_v1.types.HealthCheck: + Represents a Health Check resource. Google Compute + Engine has two Health Check resources: \* + [Global](/compute/docs/reference/rest/v1/healthChecks) + \* + [Regional](/compute/docs/reference/rest/v1/regionHealthChecks) + Internal HTTP(S) load balancers must use regional health + checks (compute.v1.regionHealthChecks). Traffic Director + must use global health checks (compute.v1.healthChecks). + Internal TCP/UDP load balancers can use either regional + or global health checks (compute.v1.regionHealthChecks + or compute.v1.healthChecks). External HTTP(S), TCP + proxy, and SSL proxy load balancers as well as managed + instance group auto-healing must use global health + checks (compute.v1.healthChecks). Backend service-based + network load balancers must use regional health checks + (compute.v1.regionHealthChecks). Target pool-based + network load balancers must use legacy HTTP health + checks (compute.v1.httpHealthChecks). For more + information, see Health checks overview. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, health_check]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.GetHealthCheckRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.GetHealthCheckRequest): + request = compute.GetHealthCheckRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if health_check is not None: + request.health_check = health_check + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("health_check", request.health_check), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def insert_unary(self, + request: Optional[Union[compute.InsertHealthCheckRequest, dict]] = None, + *, + project: Optional[str] = None, + health_check_resource: Optional[compute.HealthCheck] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.Operation: + r"""Creates a HealthCheck resource in the specified + project using the data included in the request. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_insert(): + # Create a client + client = compute_v1.HealthChecksClient() + + # Initialize request argument(s) + request = compute_v1.InsertHealthCheckRequest( + project="project_value", + ) + + # Make the request + response = client.insert(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.InsertHealthCheckRequest, dict]): + The request object. A request message for + HealthChecks.Insert. See the method + description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + health_check_resource (google.cloud.compute_v1.types.HealthCheck): + The body resource for this request + This corresponds to the ``health_check_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, health_check_resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.InsertHealthCheckRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.InsertHealthCheckRequest): + request = compute.InsertHealthCheckRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if health_check_resource is not None: + request.health_check_resource = health_check_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.insert] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def insert(self, + request: Optional[Union[compute.InsertHealthCheckRequest, dict]] = None, + *, + project: Optional[str] = None, + health_check_resource: Optional[compute.HealthCheck] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> extended_operation.ExtendedOperation: + r"""Creates a HealthCheck resource in the specified + project using the data included in the request. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_insert(): + # Create a client + client = compute_v1.HealthChecksClient() + + # Initialize request argument(s) + request = compute_v1.InsertHealthCheckRequest( + project="project_value", + ) + + # Make the request + response = client.insert(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.InsertHealthCheckRequest, dict]): + The request object. A request message for + HealthChecks.Insert. See the method + description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + health_check_resource (google.cloud.compute_v1.types.HealthCheck): + The body resource for this request + This corresponds to the ``health_check_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, health_check_resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.InsertHealthCheckRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.InsertHealthCheckRequest): + request = compute.InsertHealthCheckRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if health_check_resource is not None: + request.health_check_resource = health_check_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.insert] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + operation_service = self._transport._global_operations_client + operation_request = compute.GetGlobalOperationRequest() + operation_request.project = request.project + operation_request.operation = response.name + + get_operation = functools.partial(operation_service.get, operation_request) + # Cancel is not part of extended operations yet. + cancel_operation = lambda: None + + # Note: this class is an implementation detail to provide a uniform + # set of names for certain fields in the extended operation proto message. + # See google.api_core.extended_operation.ExtendedOperation for details + # on these properties and the expected interface. + class _CustomOperation(extended_operation.ExtendedOperation): + @property + def error_message(self): + return self._extended_operation.http_error_message + + @property + def error_code(self): + return self._extended_operation.http_error_status_code + + response = _CustomOperation.make(get_operation, cancel_operation, response) + + # Done; return the response. + return response + + def list(self, + request: Optional[Union[compute.ListHealthChecksRequest, dict]] = None, + *, + project: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListPager: + r"""Retrieves the list of HealthCheck resources available + to the specified project. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_list(): + # Create a client + client = compute_v1.HealthChecksClient() + + # Initialize request argument(s) + request = compute_v1.ListHealthChecksRequest( + project="project_value", + ) + + # Make the request + page_result = client.list(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.ListHealthChecksRequest, dict]): + The request object. A request message for + HealthChecks.List. See the method + description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.compute_v1.services.health_checks.pagers.ListPager: + Contains a list of HealthCheck + resources. + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.ListHealthChecksRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.ListHealthChecksRequest): + request = compute.ListHealthChecksRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + def patch_unary(self, + request: Optional[Union[compute.PatchHealthCheckRequest, dict]] = None, + *, + project: Optional[str] = None, + health_check: Optional[str] = None, + health_check_resource: Optional[compute.HealthCheck] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.Operation: + r"""Updates a HealthCheck resource in the specified + project using the data included in the request. This + method supports PATCH semantics and uses the JSON merge + patch format and processing rules. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_patch(): + # Create a client + client = compute_v1.HealthChecksClient() + + # Initialize request argument(s) + request = compute_v1.PatchHealthCheckRequest( + health_check="health_check_value", + project="project_value", + ) + + # Make the request + response = client.patch(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.PatchHealthCheckRequest, dict]): + The request object. A request message for + HealthChecks.Patch. See the method + description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + health_check (str): + Name of the HealthCheck resource to + patch. + + This corresponds to the ``health_check`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + health_check_resource (google.cloud.compute_v1.types.HealthCheck): + The body resource for this request + This corresponds to the ``health_check_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, health_check, health_check_resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.PatchHealthCheckRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.PatchHealthCheckRequest): + request = compute.PatchHealthCheckRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if health_check is not None: + request.health_check = health_check + if health_check_resource is not None: + request.health_check_resource = health_check_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.patch] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("health_check", request.health_check), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def patch(self, + request: Optional[Union[compute.PatchHealthCheckRequest, dict]] = None, + *, + project: Optional[str] = None, + health_check: Optional[str] = None, + health_check_resource: Optional[compute.HealthCheck] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> extended_operation.ExtendedOperation: + r"""Updates a HealthCheck resource in the specified + project using the data included in the request. This + method supports PATCH semantics and uses the JSON merge + patch format and processing rules. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_patch(): + # Create a client + client = compute_v1.HealthChecksClient() + + # Initialize request argument(s) + request = compute_v1.PatchHealthCheckRequest( + health_check="health_check_value", + project="project_value", + ) + + # Make the request + response = client.patch(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.PatchHealthCheckRequest, dict]): + The request object. A request message for + HealthChecks.Patch. See the method + description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + health_check (str): + Name of the HealthCheck resource to + patch. + + This corresponds to the ``health_check`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + health_check_resource (google.cloud.compute_v1.types.HealthCheck): + The body resource for this request + This corresponds to the ``health_check_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, health_check, health_check_resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.PatchHealthCheckRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.PatchHealthCheckRequest): + request = compute.PatchHealthCheckRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if health_check is not None: + request.health_check = health_check + if health_check_resource is not None: + request.health_check_resource = health_check_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.patch] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("health_check", request.health_check), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + operation_service = self._transport._global_operations_client + operation_request = compute.GetGlobalOperationRequest() + operation_request.project = request.project + operation_request.operation = response.name + + get_operation = functools.partial(operation_service.get, operation_request) + # Cancel is not part of extended operations yet. + cancel_operation = lambda: None + + # Note: this class is an implementation detail to provide a uniform + # set of names for certain fields in the extended operation proto message. + # See google.api_core.extended_operation.ExtendedOperation for details + # on these properties and the expected interface. + class _CustomOperation(extended_operation.ExtendedOperation): + @property + def error_message(self): + return self._extended_operation.http_error_message + + @property + def error_code(self): + return self._extended_operation.http_error_status_code + + response = _CustomOperation.make(get_operation, cancel_operation, response) + + # Done; return the response. + return response + + def update_unary(self, + request: Optional[Union[compute.UpdateHealthCheckRequest, dict]] = None, + *, + project: Optional[str] = None, + health_check: Optional[str] = None, + health_check_resource: Optional[compute.HealthCheck] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.Operation: + r"""Updates a HealthCheck resource in the specified + project using the data included in the request. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_update(): + # Create a client + client = compute_v1.HealthChecksClient() + + # Initialize request argument(s) + request = compute_v1.UpdateHealthCheckRequest( + health_check="health_check_value", + project="project_value", + ) + + # Make the request + response = client.update(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.UpdateHealthCheckRequest, dict]): + The request object. A request message for + HealthChecks.Update. See the method + description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + health_check (str): + Name of the HealthCheck resource to + update. + + This corresponds to the ``health_check`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + health_check_resource (google.cloud.compute_v1.types.HealthCheck): + The body resource for this request + This corresponds to the ``health_check_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, health_check, health_check_resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.UpdateHealthCheckRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.UpdateHealthCheckRequest): + request = compute.UpdateHealthCheckRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if health_check is not None: + request.health_check = health_check + if health_check_resource is not None: + request.health_check_resource = health_check_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.update] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("health_check", request.health_check), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def update(self, + request: Optional[Union[compute.UpdateHealthCheckRequest, dict]] = None, + *, + project: Optional[str] = None, + health_check: Optional[str] = None, + health_check_resource: Optional[compute.HealthCheck] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> extended_operation.ExtendedOperation: + r"""Updates a HealthCheck resource in the specified + project using the data included in the request. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_update(): + # Create a client + client = compute_v1.HealthChecksClient() + + # Initialize request argument(s) + request = compute_v1.UpdateHealthCheckRequest( + health_check="health_check_value", + project="project_value", + ) + + # Make the request + response = client.update(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.UpdateHealthCheckRequest, dict]): + The request object. A request message for + HealthChecks.Update. See the method + description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + health_check (str): + Name of the HealthCheck resource to + update. + + This corresponds to the ``health_check`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + health_check_resource (google.cloud.compute_v1.types.HealthCheck): + The body resource for this request + This corresponds to the ``health_check_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, health_check, health_check_resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.UpdateHealthCheckRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.UpdateHealthCheckRequest): + request = compute.UpdateHealthCheckRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if health_check is not None: + request.health_check = health_check + if health_check_resource is not None: + request.health_check_resource = health_check_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.update] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("health_check", request.health_check), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + operation_service = self._transport._global_operations_client + operation_request = compute.GetGlobalOperationRequest() + operation_request.project = request.project + operation_request.operation = response.name + + get_operation = functools.partial(operation_service.get, operation_request) + # Cancel is not part of extended operations yet. + cancel_operation = lambda: None + + # Note: this class is an implementation detail to provide a uniform + # set of names for certain fields in the extended operation proto message. + # See google.api_core.extended_operation.ExtendedOperation for details + # on these properties and the expected interface. + class _CustomOperation(extended_operation.ExtendedOperation): + @property + def error_message(self): + return self._extended_operation.http_error_message + + @property + def error_code(self): + return self._extended_operation.http_error_status_code + + response = _CustomOperation.make(get_operation, cancel_operation, response) + + # Done; return the response. + return response + + def __enter__(self) -> "HealthChecksClient": + return self + + def __exit__(self, type, value, traceback): + """Releases underlying transport's resources. + + .. warning:: + ONLY use as a context manager if the transport is NOT shared + with other clients! Exiting the with block will CLOSE the transport + and may cause errors in other clients! + """ + self.transport.close() + + + + + + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) + + +__all__ = ( + "HealthChecksClient", +) diff --git a/owl-bot-staging/v1/google/cloud/compute_v1/services/health_checks/pagers.py b/owl-bot-staging/v1/google/cloud/compute_v1/services/health_checks/pagers.py new file mode 100644 index 000000000..dc36d043e --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/compute_v1/services/health_checks/pagers.py @@ -0,0 +1,139 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import Any, AsyncIterator, Awaitable, Callable, Sequence, Tuple, Optional, Iterator + +from google.cloud.compute_v1.types import compute + + +class AggregatedListPager: + """A pager for iterating through ``aggregated_list`` requests. + + This class thinly wraps an initial + :class:`google.cloud.compute_v1.types.HealthChecksAggregatedList` object, and + provides an ``__iter__`` method to iterate through its + ``items`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``AggregatedList`` requests and continue to iterate + through the ``items`` field on the + corresponding responses. + + All the usual :class:`google.cloud.compute_v1.types.HealthChecksAggregatedList` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., compute.HealthChecksAggregatedList], + request: compute.AggregatedListHealthChecksRequest, + response: compute.HealthChecksAggregatedList, + *, + metadata: Sequence[Tuple[str, str]] = ()): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.compute_v1.types.AggregatedListHealthChecksRequest): + The initial request object. + response (google.cloud.compute_v1.types.HealthChecksAggregatedList): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = compute.AggregatedListHealthChecksRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[compute.HealthChecksAggregatedList]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[Tuple[str, compute.HealthChecksScopedList]]: + for page in self.pages: + yield from page.items.items() + + def get(self, key: str) -> Optional[compute.HealthChecksScopedList]: + return self._response.items.get(key) + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + + +class ListPager: + """A pager for iterating through ``list`` requests. + + This class thinly wraps an initial + :class:`google.cloud.compute_v1.types.HealthCheckList` object, and + provides an ``__iter__`` method to iterate through its + ``items`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``List`` requests and continue to iterate + through the ``items`` field on the + corresponding responses. + + All the usual :class:`google.cloud.compute_v1.types.HealthCheckList` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., compute.HealthCheckList], + request: compute.ListHealthChecksRequest, + response: compute.HealthCheckList, + *, + metadata: Sequence[Tuple[str, str]] = ()): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.compute_v1.types.ListHealthChecksRequest): + The initial request object. + response (google.cloud.compute_v1.types.HealthCheckList): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = compute.ListHealthChecksRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[compute.HealthCheckList]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[compute.HealthCheck]: + for page in self.pages: + yield from page.items + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) diff --git a/owl-bot-staging/v1/google/cloud/compute_v1/services/health_checks/transports/__init__.py b/owl-bot-staging/v1/google/cloud/compute_v1/services/health_checks/transports/__init__.py new file mode 100644 index 000000000..2d82b9f90 --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/compute_v1/services/health_checks/transports/__init__.py @@ -0,0 +1,32 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +from typing import Dict, Type + +from .base import HealthChecksTransport +from .rest import HealthChecksRestTransport +from .rest import HealthChecksRestInterceptor + + +# Compile a registry of transports. +_transport_registry = OrderedDict() # type: Dict[str, Type[HealthChecksTransport]] +_transport_registry['rest'] = HealthChecksRestTransport + +__all__ = ( + 'HealthChecksTransport', + 'HealthChecksRestTransport', + 'HealthChecksRestInterceptor', +) diff --git a/owl-bot-staging/v1/google/cloud/compute_v1/services/health_checks/transports/base.py b/owl-bot-staging/v1/google/cloud/compute_v1/services/health_checks/transports/base.py new file mode 100644 index 000000000..9c6792ed8 --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/compute_v1/services/health_checks/transports/base.py @@ -0,0 +1,247 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import abc +from typing import Awaitable, Callable, Dict, Optional, Sequence, Union + +from google.cloud.compute_v1 import gapic_version as package_version + +import google.auth # type: ignore +import google.api_core +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.compute_v1.types import compute +from google.cloud.compute_v1.services import global_operations + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) + + +class HealthChecksTransport(abc.ABC): + """Abstract transport class for HealthChecks.""" + + AUTH_SCOPES = ( + 'https://www.googleapis.com/auth/compute', + 'https://www.googleapis.com/auth/cloud-platform', + ) + + DEFAULT_HOST: str = 'compute.googleapis.com' + def __init__( + self, *, + host: str = DEFAULT_HOST, + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + **kwargs, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A list of scopes. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + """ + self._extended_operations_services: Dict[str, Any] = {} + + scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} + + # Save the scopes. + self._scopes = scopes + + # If no credentials are provided, then determine the appropriate + # defaults. + if credentials and credentials_file: + raise core_exceptions.DuplicateCredentialArgs("'credentials_file' and 'credentials' are mutually exclusive") + + if credentials_file is not None: + credentials, _ = google.auth.load_credentials_from_file( + credentials_file, + **scopes_kwargs, + quota_project_id=quota_project_id + ) + elif credentials is None: + credentials, _ = google.auth.default(**scopes_kwargs, quota_project_id=quota_project_id) + # Don't apply audience if the credentials file passed from user. + if hasattr(credentials, "with_gdch_audience"): + credentials = credentials.with_gdch_audience(api_audience if api_audience else host) + + # If the credentials are service account credentials, then always try to use self signed JWT. + if always_use_jwt_access and isinstance(credentials, service_account.Credentials) and hasattr(service_account.Credentials, "with_always_use_jwt_access"): + credentials = credentials.with_always_use_jwt_access(True) + + # Save the credentials. + self._credentials = credentials + + # Save the hostname. Default to port 443 (HTTPS) if none is specified. + if ':' not in host: + host += ':443' + self._host = host + + def _prep_wrapped_messages(self, client_info): + # Precompute the wrapped methods. + self._wrapped_methods = { + self.aggregated_list: gapic_v1.method.wrap_method( + self.aggregated_list, + default_timeout=None, + client_info=client_info, + ), + self.delete: gapic_v1.method.wrap_method( + self.delete, + default_timeout=None, + client_info=client_info, + ), + self.get: gapic_v1.method.wrap_method( + self.get, + default_timeout=None, + client_info=client_info, + ), + self.insert: gapic_v1.method.wrap_method( + self.insert, + default_timeout=None, + client_info=client_info, + ), + self.list: gapic_v1.method.wrap_method( + self.list, + default_timeout=None, + client_info=client_info, + ), + self.patch: gapic_v1.method.wrap_method( + self.patch, + default_timeout=None, + client_info=client_info, + ), + self.update: gapic_v1.method.wrap_method( + self.update, + default_timeout=None, + client_info=client_info, + ), + } + + def close(self): + """Closes resources associated with the transport. + + .. warning:: + Only call this method if the transport is NOT shared + with other clients - this may cause errors in other clients! + """ + raise NotImplementedError() + + @property + def aggregated_list(self) -> Callable[ + [compute.AggregatedListHealthChecksRequest], + Union[ + compute.HealthChecksAggregatedList, + Awaitable[compute.HealthChecksAggregatedList] + ]]: + raise NotImplementedError() + + @property + def delete(self) -> Callable[ + [compute.DeleteHealthCheckRequest], + Union[ + compute.Operation, + Awaitable[compute.Operation] + ]]: + raise NotImplementedError() + + @property + def get(self) -> Callable[ + [compute.GetHealthCheckRequest], + Union[ + compute.HealthCheck, + Awaitable[compute.HealthCheck] + ]]: + raise NotImplementedError() + + @property + def insert(self) -> Callable[ + [compute.InsertHealthCheckRequest], + Union[ + compute.Operation, + Awaitable[compute.Operation] + ]]: + raise NotImplementedError() + + @property + def list(self) -> Callable[ + [compute.ListHealthChecksRequest], + Union[ + compute.HealthCheckList, + Awaitable[compute.HealthCheckList] + ]]: + raise NotImplementedError() + + @property + def patch(self) -> Callable[ + [compute.PatchHealthCheckRequest], + Union[ + compute.Operation, + Awaitable[compute.Operation] + ]]: + raise NotImplementedError() + + @property + def update(self) -> Callable[ + [compute.UpdateHealthCheckRequest], + Union[ + compute.Operation, + Awaitable[compute.Operation] + ]]: + raise NotImplementedError() + + @property + def kind(self) -> str: + raise NotImplementedError() + + @property + def _global_operations_client(self) -> global_operations.GlobalOperationsClient: + ex_op_service = self._extended_operations_services.get("global_operations") + if not ex_op_service: + ex_op_service = global_operations.GlobalOperationsClient( + credentials=self._credentials, + transport=self.kind, + ) + self._extended_operations_services["global_operations"] = ex_op_service + + return ex_op_service + + +__all__ = ( + 'HealthChecksTransport', +) diff --git a/owl-bot-staging/v1/google/cloud/compute_v1/services/health_checks/transports/rest.py b/owl-bot-staging/v1/google/cloud/compute_v1/services/health_checks/transports/rest.py new file mode 100644 index 000000000..da76fde24 --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/compute_v1/services/health_checks/transports/rest.py @@ -0,0 +1,1064 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +from google.auth.transport.requests import AuthorizedSession # type: ignore +import json # type: ignore +import grpc # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.api_core import exceptions as core_exceptions +from google.api_core import retry as retries +from google.api_core import rest_helpers +from google.api_core import rest_streaming +from google.api_core import path_template +from google.api_core import gapic_v1 + +from google.protobuf import json_format +from requests import __version__ as requests_version +import dataclasses +import re +from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union +import warnings + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + + +from google.cloud.compute_v1.types import compute + +from .base import HealthChecksTransport, DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, + grpc_version=None, + rest_version=requests_version, +) + + +class HealthChecksRestInterceptor: + """Interceptor for HealthChecks. + + Interceptors are used to manipulate requests, request metadata, and responses + in arbitrary ways. + Example use cases include: + * Logging + * Verifying requests according to service or custom semantics + * Stripping extraneous information from responses + + These use cases and more can be enabled by injecting an + instance of a custom subclass when constructing the HealthChecksRestTransport. + + .. code-block:: python + class MyCustomHealthChecksInterceptor(HealthChecksRestInterceptor): + def pre_aggregated_list(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_aggregated_list(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_delete(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_delete(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_get(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_insert(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_insert(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_patch(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_patch(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_update(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_update(self, response): + logging.log(f"Received response: {response}") + return response + + transport = HealthChecksRestTransport(interceptor=MyCustomHealthChecksInterceptor()) + client = HealthChecksClient(transport=transport) + + + """ + def pre_aggregated_list(self, request: compute.AggregatedListHealthChecksRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.AggregatedListHealthChecksRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for aggregated_list + + Override in a subclass to manipulate the request or metadata + before they are sent to the HealthChecks server. + """ + return request, metadata + + def post_aggregated_list(self, response: compute.HealthChecksAggregatedList) -> compute.HealthChecksAggregatedList: + """Post-rpc interceptor for aggregated_list + + Override in a subclass to manipulate the response + after it is returned by the HealthChecks server but before + it is returned to user code. + """ + return response + def pre_delete(self, request: compute.DeleteHealthCheckRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.DeleteHealthCheckRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for delete + + Override in a subclass to manipulate the request or metadata + before they are sent to the HealthChecks server. + """ + return request, metadata + + def post_delete(self, response: compute.Operation) -> compute.Operation: + """Post-rpc interceptor for delete + + Override in a subclass to manipulate the response + after it is returned by the HealthChecks server but before + it is returned to user code. + """ + return response + def pre_get(self, request: compute.GetHealthCheckRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.GetHealthCheckRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get + + Override in a subclass to manipulate the request or metadata + before they are sent to the HealthChecks server. + """ + return request, metadata + + def post_get(self, response: compute.HealthCheck) -> compute.HealthCheck: + """Post-rpc interceptor for get + + Override in a subclass to manipulate the response + after it is returned by the HealthChecks server but before + it is returned to user code. + """ + return response + def pre_insert(self, request: compute.InsertHealthCheckRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.InsertHealthCheckRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for insert + + Override in a subclass to manipulate the request or metadata + before they are sent to the HealthChecks server. + """ + return request, metadata + + def post_insert(self, response: compute.Operation) -> compute.Operation: + """Post-rpc interceptor for insert + + Override in a subclass to manipulate the response + after it is returned by the HealthChecks server but before + it is returned to user code. + """ + return response + def pre_list(self, request: compute.ListHealthChecksRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.ListHealthChecksRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list + + Override in a subclass to manipulate the request or metadata + before they are sent to the HealthChecks server. + """ + return request, metadata + + def post_list(self, response: compute.HealthCheckList) -> compute.HealthCheckList: + """Post-rpc interceptor for list + + Override in a subclass to manipulate the response + after it is returned by the HealthChecks server but before + it is returned to user code. + """ + return response + def pre_patch(self, request: compute.PatchHealthCheckRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.PatchHealthCheckRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for patch + + Override in a subclass to manipulate the request or metadata + before they are sent to the HealthChecks server. + """ + return request, metadata + + def post_patch(self, response: compute.Operation) -> compute.Operation: + """Post-rpc interceptor for patch + + Override in a subclass to manipulate the response + after it is returned by the HealthChecks server but before + it is returned to user code. + """ + return response + def pre_update(self, request: compute.UpdateHealthCheckRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.UpdateHealthCheckRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for update + + Override in a subclass to manipulate the request or metadata + before they are sent to the HealthChecks server. + """ + return request, metadata + + def post_update(self, response: compute.Operation) -> compute.Operation: + """Post-rpc interceptor for update + + Override in a subclass to manipulate the response + after it is returned by the HealthChecks server but before + it is returned to user code. + """ + return response + + +@dataclasses.dataclass +class HealthChecksRestStub: + _session: AuthorizedSession + _host: str + _interceptor: HealthChecksRestInterceptor + + +class HealthChecksRestTransport(HealthChecksTransport): + """REST backend transport for HealthChecks. + + The HealthChecks API. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends JSON representations of protocol buffers over HTTP/1.1 + + NOTE: This REST transport functionality is currently in a beta + state (preview). We welcome your feedback via an issue in this + library's source repository. Thank you! + """ + + def __init__(self, *, + host: str = 'compute.googleapis.com', + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + client_cert_source_for_mtls: Optional[Callable[[ + ], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + url_scheme: str = 'https', + interceptor: Optional[HealthChecksRestInterceptor] = None, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + NOTE: This REST transport functionality is currently in a beta + state (preview). We welcome your feedback via a GitHub issue in + this library's repository. Thank you! + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + client_cert_source_for_mtls (Callable[[], Tuple[bytes, bytes]]): Client + certificate to configure mutual TLS HTTP channel. It is ignored + if ``channel`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you are developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + url_scheme: the protocol scheme for the API endpoint. Normally + "https", but for testing or local servers, + "http" can be specified. + """ + # Run the base constructor + # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. + # TODO: When custom host (api_endpoint) is set, `scopes` must *also* be set on the + # credentials object + maybe_url_match = re.match("^(?Phttp(?:s)?://)?(?P.*)$", host) + if maybe_url_match is None: + raise ValueError(f"Unexpected hostname structure: {host}") # pragma: NO COVER + + url_match_items = maybe_url_match.groupdict() + + host = f"{url_scheme}://{host}" if not url_match_items["scheme"] else host + + super().__init__( + host=host, + credentials=credentials, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience + ) + self._session = AuthorizedSession( + self._credentials, default_host=self.DEFAULT_HOST) + if client_cert_source_for_mtls: + self._session.configure_mtls_channel(client_cert_source_for_mtls) + self._interceptor = interceptor or HealthChecksRestInterceptor() + self._prep_wrapped_messages(client_info) + + class _AggregatedList(HealthChecksRestStub): + def __hash__(self): + return hash("AggregatedList") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.AggregatedListHealthChecksRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.HealthChecksAggregatedList: + r"""Call the aggregated list method over HTTP. + + Args: + request (~.compute.AggregatedListHealthChecksRequest): + The request object. A request message for + HealthChecks.AggregatedList. See the + method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.HealthChecksAggregatedList: + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/compute/v1/projects/{project}/aggregated/healthChecks', + }, + ] + request, metadata = self._interceptor.pre_aggregated_list(request, metadata) + pb_request = compute.AggregatedListHealthChecksRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.HealthChecksAggregatedList() + pb_resp = compute.HealthChecksAggregatedList.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_aggregated_list(resp) + return resp + + class _Delete(HealthChecksRestStub): + def __hash__(self): + return hash("Delete") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.DeleteHealthCheckRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.Operation: + r"""Call the delete method over HTTP. + + Args: + request (~.compute.DeleteHealthCheckRequest): + The request object. A request message for + HealthChecks.Delete. See the method + description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.Operation: + Represents an Operation resource. Google Compute Engine + has three Operation resources: \* + `Global `__ + \* + `Regional `__ + \* + `Zonal `__ + You can use an operation resource to manage asynchronous + API requests. For more information, read Handling API + responses. Operations can be global, regional or zonal. + - For global operations, use the ``globalOperations`` + resource. - For regional operations, use the + ``regionOperations`` resource. - For zonal operations, + use the ``zonalOperations`` resource. For more + information, read Global, Regional, and Zonal Resources. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'delete', + 'uri': '/compute/v1/projects/{project}/global/healthChecks/{health_check}', + }, + ] + request, metadata = self._interceptor.pre_delete(request, metadata) + pb_request = compute.DeleteHealthCheckRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.Operation() + pb_resp = compute.Operation.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_delete(resp) + return resp + + class _Get(HealthChecksRestStub): + def __hash__(self): + return hash("Get") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.GetHealthCheckRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.HealthCheck: + r"""Call the get method over HTTP. + + Args: + request (~.compute.GetHealthCheckRequest): + The request object. A request message for + HealthChecks.Get. See the method + description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.HealthCheck: + Represents a Health Check resource. Google Compute + Engine has two Health Check resources: \* + `Global `__ + \* + `Regional `__ + Internal HTTP(S) load balancers must use regional health + checks (``compute.v1.regionHealthChecks``). Traffic + Director must use global health checks + (``compute.v1.healthChecks``). Internal TCP/UDP load + balancers can use either regional or global health + checks (``compute.v1.regionHealthChecks`` or + ``compute.v1.healthChecks``). External HTTP(S), TCP + proxy, and SSL proxy load balancers as well as managed + instance group auto-healing must use global health + checks (``compute.v1.healthChecks``). Backend + service-based network load balancers must use regional + health checks (``compute.v1.regionHealthChecks``). + Target pool-based network load balancers must use legacy + HTTP health checks (``compute.v1.httpHealthChecks``). + For more information, see Health checks overview. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/compute/v1/projects/{project}/global/healthChecks/{health_check}', + }, + ] + request, metadata = self._interceptor.pre_get(request, metadata) + pb_request = compute.GetHealthCheckRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.HealthCheck() + pb_resp = compute.HealthCheck.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get(resp) + return resp + + class _Insert(HealthChecksRestStub): + def __hash__(self): + return hash("Insert") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.InsertHealthCheckRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.Operation: + r"""Call the insert method over HTTP. + + Args: + request (~.compute.InsertHealthCheckRequest): + The request object. A request message for + HealthChecks.Insert. See the method + description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.Operation: + Represents an Operation resource. Google Compute Engine + has three Operation resources: \* + `Global `__ + \* + `Regional `__ + \* + `Zonal `__ + You can use an operation resource to manage asynchronous + API requests. For more information, read Handling API + responses. Operations can be global, regional or zonal. + - For global operations, use the ``globalOperations`` + resource. - For regional operations, use the + ``regionOperations`` resource. - For zonal operations, + use the ``zonalOperations`` resource. For more + information, read Global, Regional, and Zonal Resources. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/compute/v1/projects/{project}/global/healthChecks', + 'body': 'health_check_resource', + }, + ] + request, metadata = self._interceptor.pre_insert(request, metadata) + pb_request = compute.InsertHealthCheckRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + including_default_value_fields=False, + use_integers_for_enums=False + ) + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.Operation() + pb_resp = compute.Operation.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_insert(resp) + return resp + + class _List(HealthChecksRestStub): + def __hash__(self): + return hash("List") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.ListHealthChecksRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.HealthCheckList: + r"""Call the list method over HTTP. + + Args: + request (~.compute.ListHealthChecksRequest): + The request object. A request message for + HealthChecks.List. See the method + description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.HealthCheckList: + Contains a list of HealthCheck + resources. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/compute/v1/projects/{project}/global/healthChecks', + }, + ] + request, metadata = self._interceptor.pre_list(request, metadata) + pb_request = compute.ListHealthChecksRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.HealthCheckList() + pb_resp = compute.HealthCheckList.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_list(resp) + return resp + + class _Patch(HealthChecksRestStub): + def __hash__(self): + return hash("Patch") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.PatchHealthCheckRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.Operation: + r"""Call the patch method over HTTP. + + Args: + request (~.compute.PatchHealthCheckRequest): + The request object. A request message for + HealthChecks.Patch. See the method + description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.Operation: + Represents an Operation resource. Google Compute Engine + has three Operation resources: \* + `Global `__ + \* + `Regional `__ + \* + `Zonal `__ + You can use an operation resource to manage asynchronous + API requests. For more information, read Handling API + responses. Operations can be global, regional or zonal. + - For global operations, use the ``globalOperations`` + resource. - For regional operations, use the + ``regionOperations`` resource. - For zonal operations, + use the ``zonalOperations`` resource. For more + information, read Global, Regional, and Zonal Resources. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'patch', + 'uri': '/compute/v1/projects/{project}/global/healthChecks/{health_check}', + 'body': 'health_check_resource', + }, + ] + request, metadata = self._interceptor.pre_patch(request, metadata) + pb_request = compute.PatchHealthCheckRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + including_default_value_fields=False, + use_integers_for_enums=False + ) + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.Operation() + pb_resp = compute.Operation.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_patch(resp) + return resp + + class _Update(HealthChecksRestStub): + def __hash__(self): + return hash("Update") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.UpdateHealthCheckRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.Operation: + r"""Call the update method over HTTP. + + Args: + request (~.compute.UpdateHealthCheckRequest): + The request object. A request message for + HealthChecks.Update. See the method + description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.Operation: + Represents an Operation resource. Google Compute Engine + has three Operation resources: \* + `Global `__ + \* + `Regional `__ + \* + `Zonal `__ + You can use an operation resource to manage asynchronous + API requests. For more information, read Handling API + responses. Operations can be global, regional or zonal. + - For global operations, use the ``globalOperations`` + resource. - For regional operations, use the + ``regionOperations`` resource. - For zonal operations, + use the ``zonalOperations`` resource. For more + information, read Global, Regional, and Zonal Resources. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'put', + 'uri': '/compute/v1/projects/{project}/global/healthChecks/{health_check}', + 'body': 'health_check_resource', + }, + ] + request, metadata = self._interceptor.pre_update(request, metadata) + pb_request = compute.UpdateHealthCheckRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + including_default_value_fields=False, + use_integers_for_enums=False + ) + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.Operation() + pb_resp = compute.Operation.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_update(resp) + return resp + + @property + def aggregated_list(self) -> Callable[ + [compute.AggregatedListHealthChecksRequest], + compute.HealthChecksAggregatedList]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._AggregatedList(self._session, self._host, self._interceptor) # type: ignore + + @property + def delete(self) -> Callable[ + [compute.DeleteHealthCheckRequest], + compute.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._Delete(self._session, self._host, self._interceptor) # type: ignore + + @property + def get(self) -> Callable[ + [compute.GetHealthCheckRequest], + compute.HealthCheck]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._Get(self._session, self._host, self._interceptor) # type: ignore + + @property + def insert(self) -> Callable[ + [compute.InsertHealthCheckRequest], + compute.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._Insert(self._session, self._host, self._interceptor) # type: ignore + + @property + def list(self) -> Callable[ + [compute.ListHealthChecksRequest], + compute.HealthCheckList]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._List(self._session, self._host, self._interceptor) # type: ignore + + @property + def patch(self) -> Callable[ + [compute.PatchHealthCheckRequest], + compute.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._Patch(self._session, self._host, self._interceptor) # type: ignore + + @property + def update(self) -> Callable[ + [compute.UpdateHealthCheckRequest], + compute.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._Update(self._session, self._host, self._interceptor) # type: ignore + + @property + def kind(self) -> str: + return "rest" + + def close(self): + self._session.close() + + +__all__=( + 'HealthChecksRestTransport', +) diff --git a/owl-bot-staging/v1/google/cloud/compute_v1/services/image_family_views/__init__.py b/owl-bot-staging/v1/google/cloud/compute_v1/services/image_family_views/__init__.py new file mode 100644 index 000000000..89c3ca2d8 --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/compute_v1/services/image_family_views/__init__.py @@ -0,0 +1,20 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from .client import ImageFamilyViewsClient + +__all__ = ( + 'ImageFamilyViewsClient', +) diff --git a/owl-bot-staging/v1/google/cloud/compute_v1/services/image_family_views/client.py b/owl-bot-staging/v1/google/cloud/compute_v1/services/image_family_views/client.py new file mode 100644 index 000000000..05d732189 --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/compute_v1/services/image_family_views/client.py @@ -0,0 +1,513 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import os +import re +from typing import Dict, Mapping, MutableMapping, MutableSequence, Optional, Sequence, Tuple, Type, Union, cast + +from google.cloud.compute_v1 import gapic_version as package_version + +from google.api_core import client_options as client_options_lib +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport import mtls # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth.exceptions import MutualTLSChannelError # type: ignore +from google.oauth2 import service_account # type: ignore + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + +from google.cloud.compute_v1.types import compute +from .transports.base import ImageFamilyViewsTransport, DEFAULT_CLIENT_INFO +from .transports.rest import ImageFamilyViewsRestTransport + + +class ImageFamilyViewsClientMeta(type): + """Metaclass for the ImageFamilyViews client. + + This provides class-level methods for building and retrieving + support objects (e.g. transport) without polluting the client instance + objects. + """ + _transport_registry = OrderedDict() # type: Dict[str, Type[ImageFamilyViewsTransport]] + _transport_registry["rest"] = ImageFamilyViewsRestTransport + + def get_transport_class(cls, + label: Optional[str] = None, + ) -> Type[ImageFamilyViewsTransport]: + """Returns an appropriate transport class. + + Args: + label: The name of the desired transport. If none is + provided, then the first transport in the registry is used. + + Returns: + The transport class to use. + """ + # If a specific transport is requested, return that one. + if label: + return cls._transport_registry[label] + + # No transport is requested; return the default (that is, the first one + # in the dictionary). + return next(iter(cls._transport_registry.values())) + + +class ImageFamilyViewsClient(metaclass=ImageFamilyViewsClientMeta): + """The ImageFamilyViews API.""" + + @staticmethod + def _get_default_mtls_endpoint(api_endpoint): + """Converts api endpoint to mTLS endpoint. + + Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to + "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. + Args: + api_endpoint (Optional[str]): the api endpoint to convert. + Returns: + str: converted mTLS api endpoint. + """ + if not api_endpoint: + return api_endpoint + + mtls_endpoint_re = re.compile( + r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?" + ) + + m = mtls_endpoint_re.match(api_endpoint) + name, mtls, sandbox, googledomain = m.groups() + if mtls or not googledomain: + return api_endpoint + + if sandbox: + return api_endpoint.replace( + "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" + ) + + return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") + + DEFAULT_ENDPOINT = "compute.googleapis.com" + DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore + DEFAULT_ENDPOINT + ) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + ImageFamilyViewsClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_info(info) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + ImageFamilyViewsClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_file( + filename) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + from_service_account_json = from_service_account_file + + @property + def transport(self) -> ImageFamilyViewsTransport: + """Returns the transport used by the client instance. + + Returns: + ImageFamilyViewsTransport: The transport used by the client + instance. + """ + return self._transport + + @staticmethod + def common_billing_account_path(billing_account: str, ) -> str: + """Returns a fully-qualified billing_account string.""" + return "billingAccounts/{billing_account}".format(billing_account=billing_account, ) + + @staticmethod + def parse_common_billing_account_path(path: str) -> Dict[str,str]: + """Parse a billing_account path into its component segments.""" + m = re.match(r"^billingAccounts/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_folder_path(folder: str, ) -> str: + """Returns a fully-qualified folder string.""" + return "folders/{folder}".format(folder=folder, ) + + @staticmethod + def parse_common_folder_path(path: str) -> Dict[str,str]: + """Parse a folder path into its component segments.""" + m = re.match(r"^folders/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_organization_path(organization: str, ) -> str: + """Returns a fully-qualified organization string.""" + return "organizations/{organization}".format(organization=organization, ) + + @staticmethod + def parse_common_organization_path(path: str) -> Dict[str,str]: + """Parse a organization path into its component segments.""" + m = re.match(r"^organizations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_project_path(project: str, ) -> str: + """Returns a fully-qualified project string.""" + return "projects/{project}".format(project=project, ) + + @staticmethod + def parse_common_project_path(path: str) -> Dict[str,str]: + """Parse a project path into its component segments.""" + m = re.match(r"^projects/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_location_path(project: str, location: str, ) -> str: + """Returns a fully-qualified location string.""" + return "projects/{project}/locations/{location}".format(project=project, location=location, ) + + @staticmethod + def parse_common_location_path(path: str) -> Dict[str,str]: + """Parse a location path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @classmethod + def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[client_options_lib.ClientOptions] = None): + """Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + if client_options is None: + client_options = client_options_lib.ClientOptions() + use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") + use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") + if use_client_cert not in ("true", "false"): + raise ValueError("Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`") + if use_mtls_endpoint not in ("auto", "never", "always"): + raise MutualTLSChannelError("Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`") + + # Figure out the client cert source to use. + client_cert_source = None + if use_client_cert == "true": + if client_options.client_cert_source: + client_cert_source = client_options.client_cert_source + elif mtls.has_default_client_cert_source(): + client_cert_source = mtls.default_client_cert_source() + + # Figure out which api endpoint to use. + if client_options.api_endpoint is not None: + api_endpoint = client_options.api_endpoint + elif use_mtls_endpoint == "always" or (use_mtls_endpoint == "auto" and client_cert_source): + api_endpoint = cls.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = cls.DEFAULT_ENDPOINT + + return api_endpoint, client_cert_source + + def __init__(self, *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Optional[Union[str, ImageFamilyViewsTransport]] = None, + client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the image family views client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Union[str, ImageFamilyViewsTransport]): The + transport to use. If set to None, a transport is chosen + automatically. + NOTE: "rest" transport functionality is currently in a + beta state (preview). We welcome your feedback via an + issue in this library's source repository. + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): Custom options for the + client. It won't take effect if a ``transport`` instance is provided. + (1) The ``api_endpoint`` property can be used to override the + default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT + environment variable can also be used to override the endpoint: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto switch to the + default mTLS endpoint if client certificate is present, this is + the default value). However, the ``api_endpoint`` property takes + precedence if provided. + (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide client certificate for mutual TLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + """ + if isinstance(client_options, dict): + client_options = client_options_lib.from_dict(client_options) + if client_options is None: + client_options = client_options_lib.ClientOptions() + client_options = cast(client_options_lib.ClientOptions, client_options) + + api_endpoint, client_cert_source_func = self.get_mtls_endpoint_and_cert_source(client_options) + + api_key_value = getattr(client_options, "api_key", None) + if api_key_value and credentials: + raise ValueError("client_options.api_key and credentials are mutually exclusive") + + # Save or instantiate the transport. + # Ordinarily, we provide the transport, but allowing a custom transport + # instance provides an extensibility point for unusual situations. + if isinstance(transport, ImageFamilyViewsTransport): + # transport is a ImageFamilyViewsTransport instance. + if credentials or client_options.credentials_file or api_key_value: + raise ValueError("When providing a transport instance, " + "provide its credentials directly.") + if client_options.scopes: + raise ValueError( + "When providing a transport instance, provide its scopes " + "directly." + ) + self._transport = transport + else: + import google.auth._default # type: ignore + + if api_key_value and hasattr(google.auth._default, "get_api_key_credentials"): + credentials = google.auth._default.get_api_key_credentials(api_key_value) + + Transport = type(self).get_transport_class(transport) + self._transport = Transport( + credentials=credentials, + credentials_file=client_options.credentials_file, + host=api_endpoint, + scopes=client_options.scopes, + client_cert_source_for_mtls=client_cert_source_func, + quota_project_id=client_options.quota_project_id, + client_info=client_info, + always_use_jwt_access=True, + api_audience=client_options.api_audience, + ) + + def get(self, + request: Optional[Union[compute.GetImageFamilyViewRequest, dict]] = None, + *, + project: Optional[str] = None, + zone: Optional[str] = None, + family: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.ImageFamilyView: + r"""Returns the latest image that is part of an image + family, is not deprecated and is rolled out in the + specified zone. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_get(): + # Create a client + client = compute_v1.ImageFamilyViewsClient() + + # Initialize request argument(s) + request = compute_v1.GetImageFamilyViewRequest( + family="family_value", + project="project_value", + zone="zone_value", + ) + + # Make the request + response = client.get(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.GetImageFamilyViewRequest, dict]): + The request object. A request message for + ImageFamilyViews.Get. See the method + description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + zone (str): + The name of the zone for this + request. + + This corresponds to the ``zone`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + family (str): + Name of the image family to search + for. + + This corresponds to the ``family`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.compute_v1.types.ImageFamilyView: + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, zone, family]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.GetImageFamilyViewRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.GetImageFamilyViewRequest): + request = compute.GetImageFamilyViewRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if zone is not None: + request.zone = zone + if family is not None: + request.family = family + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("zone", request.zone), + ("family", request.family), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def __enter__(self) -> "ImageFamilyViewsClient": + return self + + def __exit__(self, type, value, traceback): + """Releases underlying transport's resources. + + .. warning:: + ONLY use as a context manager if the transport is NOT shared + with other clients! Exiting the with block will CLOSE the transport + and may cause errors in other clients! + """ + self.transport.close() + + + + + + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) + + +__all__ = ( + "ImageFamilyViewsClient", +) diff --git a/owl-bot-staging/v1/google/cloud/compute_v1/services/image_family_views/transports/__init__.py b/owl-bot-staging/v1/google/cloud/compute_v1/services/image_family_views/transports/__init__.py new file mode 100644 index 000000000..82cf2740d --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/compute_v1/services/image_family_views/transports/__init__.py @@ -0,0 +1,32 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +from typing import Dict, Type + +from .base import ImageFamilyViewsTransport +from .rest import ImageFamilyViewsRestTransport +from .rest import ImageFamilyViewsRestInterceptor + + +# Compile a registry of transports. +_transport_registry = OrderedDict() # type: Dict[str, Type[ImageFamilyViewsTransport]] +_transport_registry['rest'] = ImageFamilyViewsRestTransport + +__all__ = ( + 'ImageFamilyViewsTransport', + 'ImageFamilyViewsRestTransport', + 'ImageFamilyViewsRestInterceptor', +) diff --git a/owl-bot-staging/v1/google/cloud/compute_v1/services/image_family_views/transports/base.py b/owl-bot-staging/v1/google/cloud/compute_v1/services/image_family_views/transports/base.py new file mode 100644 index 000000000..765a497aa --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/compute_v1/services/image_family_views/transports/base.py @@ -0,0 +1,150 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import abc +from typing import Awaitable, Callable, Dict, Optional, Sequence, Union + +from google.cloud.compute_v1 import gapic_version as package_version + +import google.auth # type: ignore +import google.api_core +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.compute_v1.types import compute + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) + + +class ImageFamilyViewsTransport(abc.ABC): + """Abstract transport class for ImageFamilyViews.""" + + AUTH_SCOPES = ( + 'https://www.googleapis.com/auth/compute.readonly', + 'https://www.googleapis.com/auth/compute', + 'https://www.googleapis.com/auth/cloud-platform', + ) + + DEFAULT_HOST: str = 'compute.googleapis.com' + def __init__( + self, *, + host: str = DEFAULT_HOST, + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + **kwargs, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A list of scopes. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + """ + + scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} + + # Save the scopes. + self._scopes = scopes + + # If no credentials are provided, then determine the appropriate + # defaults. + if credentials and credentials_file: + raise core_exceptions.DuplicateCredentialArgs("'credentials_file' and 'credentials' are mutually exclusive") + + if credentials_file is not None: + credentials, _ = google.auth.load_credentials_from_file( + credentials_file, + **scopes_kwargs, + quota_project_id=quota_project_id + ) + elif credentials is None: + credentials, _ = google.auth.default(**scopes_kwargs, quota_project_id=quota_project_id) + # Don't apply audience if the credentials file passed from user. + if hasattr(credentials, "with_gdch_audience"): + credentials = credentials.with_gdch_audience(api_audience if api_audience else host) + + # If the credentials are service account credentials, then always try to use self signed JWT. + if always_use_jwt_access and isinstance(credentials, service_account.Credentials) and hasattr(service_account.Credentials, "with_always_use_jwt_access"): + credentials = credentials.with_always_use_jwt_access(True) + + # Save the credentials. + self._credentials = credentials + + # Save the hostname. Default to port 443 (HTTPS) if none is specified. + if ':' not in host: + host += ':443' + self._host = host + + def _prep_wrapped_messages(self, client_info): + # Precompute the wrapped methods. + self._wrapped_methods = { + self.get: gapic_v1.method.wrap_method( + self.get, + default_timeout=None, + client_info=client_info, + ), + } + + def close(self): + """Closes resources associated with the transport. + + .. warning:: + Only call this method if the transport is NOT shared + with other clients - this may cause errors in other clients! + """ + raise NotImplementedError() + + @property + def get(self) -> Callable[ + [compute.GetImageFamilyViewRequest], + Union[ + compute.ImageFamilyView, + Awaitable[compute.ImageFamilyView] + ]]: + raise NotImplementedError() + + @property + def kind(self) -> str: + raise NotImplementedError() + + +__all__ = ( + 'ImageFamilyViewsTransport', +) diff --git a/owl-bot-staging/v1/google/cloud/compute_v1/services/image_family_views/transports/rest.py b/owl-bot-staging/v1/google/cloud/compute_v1/services/image_family_views/transports/rest.py new file mode 100644 index 000000000..5331c8978 --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/compute_v1/services/image_family_views/transports/rest.py @@ -0,0 +1,295 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +from google.auth.transport.requests import AuthorizedSession # type: ignore +import json # type: ignore +import grpc # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.api_core import exceptions as core_exceptions +from google.api_core import retry as retries +from google.api_core import rest_helpers +from google.api_core import rest_streaming +from google.api_core import path_template +from google.api_core import gapic_v1 + +from google.protobuf import json_format +from requests import __version__ as requests_version +import dataclasses +import re +from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union +import warnings + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + + +from google.cloud.compute_v1.types import compute + +from .base import ImageFamilyViewsTransport, DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, + grpc_version=None, + rest_version=requests_version, +) + + +class ImageFamilyViewsRestInterceptor: + """Interceptor for ImageFamilyViews. + + Interceptors are used to manipulate requests, request metadata, and responses + in arbitrary ways. + Example use cases include: + * Logging + * Verifying requests according to service or custom semantics + * Stripping extraneous information from responses + + These use cases and more can be enabled by injecting an + instance of a custom subclass when constructing the ImageFamilyViewsRestTransport. + + .. code-block:: python + class MyCustomImageFamilyViewsInterceptor(ImageFamilyViewsRestInterceptor): + def pre_get(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get(self, response): + logging.log(f"Received response: {response}") + return response + + transport = ImageFamilyViewsRestTransport(interceptor=MyCustomImageFamilyViewsInterceptor()) + client = ImageFamilyViewsClient(transport=transport) + + + """ + def pre_get(self, request: compute.GetImageFamilyViewRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.GetImageFamilyViewRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get + + Override in a subclass to manipulate the request or metadata + before they are sent to the ImageFamilyViews server. + """ + return request, metadata + + def post_get(self, response: compute.ImageFamilyView) -> compute.ImageFamilyView: + """Post-rpc interceptor for get + + Override in a subclass to manipulate the response + after it is returned by the ImageFamilyViews server but before + it is returned to user code. + """ + return response + + +@dataclasses.dataclass +class ImageFamilyViewsRestStub: + _session: AuthorizedSession + _host: str + _interceptor: ImageFamilyViewsRestInterceptor + + +class ImageFamilyViewsRestTransport(ImageFamilyViewsTransport): + """REST backend transport for ImageFamilyViews. + + The ImageFamilyViews API. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends JSON representations of protocol buffers over HTTP/1.1 + + NOTE: This REST transport functionality is currently in a beta + state (preview). We welcome your feedback via an issue in this + library's source repository. Thank you! + """ + + def __init__(self, *, + host: str = 'compute.googleapis.com', + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + client_cert_source_for_mtls: Optional[Callable[[ + ], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + url_scheme: str = 'https', + interceptor: Optional[ImageFamilyViewsRestInterceptor] = None, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + NOTE: This REST transport functionality is currently in a beta + state (preview). We welcome your feedback via a GitHub issue in + this library's repository. Thank you! + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + client_cert_source_for_mtls (Callable[[], Tuple[bytes, bytes]]): Client + certificate to configure mutual TLS HTTP channel. It is ignored + if ``channel`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you are developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + url_scheme: the protocol scheme for the API endpoint. Normally + "https", but for testing or local servers, + "http" can be specified. + """ + # Run the base constructor + # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. + # TODO: When custom host (api_endpoint) is set, `scopes` must *also* be set on the + # credentials object + maybe_url_match = re.match("^(?Phttp(?:s)?://)?(?P.*)$", host) + if maybe_url_match is None: + raise ValueError(f"Unexpected hostname structure: {host}") # pragma: NO COVER + + url_match_items = maybe_url_match.groupdict() + + host = f"{url_scheme}://{host}" if not url_match_items["scheme"] else host + + super().__init__( + host=host, + credentials=credentials, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience + ) + self._session = AuthorizedSession( + self._credentials, default_host=self.DEFAULT_HOST) + if client_cert_source_for_mtls: + self._session.configure_mtls_channel(client_cert_source_for_mtls) + self._interceptor = interceptor or ImageFamilyViewsRestInterceptor() + self._prep_wrapped_messages(client_info) + + class _Get(ImageFamilyViewsRestStub): + def __hash__(self): + return hash("Get") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.GetImageFamilyViewRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.ImageFamilyView: + r"""Call the get method over HTTP. + + Args: + request (~.compute.GetImageFamilyViewRequest): + The request object. A request message for + ImageFamilyViews.Get. See the method + description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.ImageFamilyView: + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/compute/v1/projects/{project}/zones/{zone}/imageFamilyViews/{family}', + }, + ] + request, metadata = self._interceptor.pre_get(request, metadata) + pb_request = compute.GetImageFamilyViewRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.ImageFamilyView() + pb_resp = compute.ImageFamilyView.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get(resp) + return resp + + @property + def get(self) -> Callable[ + [compute.GetImageFamilyViewRequest], + compute.ImageFamilyView]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._Get(self._session, self._host, self._interceptor) # type: ignore + + @property + def kind(self) -> str: + return "rest" + + def close(self): + self._session.close() + + +__all__=( + 'ImageFamilyViewsRestTransport', +) diff --git a/owl-bot-staging/v1/google/cloud/compute_v1/services/images/__init__.py b/owl-bot-staging/v1/google/cloud/compute_v1/services/images/__init__.py new file mode 100644 index 000000000..1beff6791 --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/compute_v1/services/images/__init__.py @@ -0,0 +1,20 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from .client import ImagesClient + +__all__ = ( + 'ImagesClient', +) diff --git a/owl-bot-staging/v1/google/cloud/compute_v1/services/images/client.py b/owl-bot-staging/v1/google/cloud/compute_v1/services/images/client.py new file mode 100644 index 000000000..1495653e3 --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/compute_v1/services/images/client.py @@ -0,0 +1,2446 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import functools +import os +import re +from typing import Dict, Mapping, MutableMapping, MutableSequence, Optional, Sequence, Tuple, Type, Union, cast + +from google.cloud.compute_v1 import gapic_version as package_version + +from google.api_core import client_options as client_options_lib +from google.api_core import exceptions as core_exceptions +from google.api_core import extended_operation +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport import mtls # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth.exceptions import MutualTLSChannelError # type: ignore +from google.oauth2 import service_account # type: ignore + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + +from google.api_core import extended_operation # type: ignore +from google.cloud.compute_v1.services.images import pagers +from google.cloud.compute_v1.types import compute +from .transports.base import ImagesTransport, DEFAULT_CLIENT_INFO +from .transports.rest import ImagesRestTransport + + +class ImagesClientMeta(type): + """Metaclass for the Images client. + + This provides class-level methods for building and retrieving + support objects (e.g. transport) without polluting the client instance + objects. + """ + _transport_registry = OrderedDict() # type: Dict[str, Type[ImagesTransport]] + _transport_registry["rest"] = ImagesRestTransport + + def get_transport_class(cls, + label: Optional[str] = None, + ) -> Type[ImagesTransport]: + """Returns an appropriate transport class. + + Args: + label: The name of the desired transport. If none is + provided, then the first transport in the registry is used. + + Returns: + The transport class to use. + """ + # If a specific transport is requested, return that one. + if label: + return cls._transport_registry[label] + + # No transport is requested; return the default (that is, the first one + # in the dictionary). + return next(iter(cls._transport_registry.values())) + + +class ImagesClient(metaclass=ImagesClientMeta): + """The Images API.""" + + @staticmethod + def _get_default_mtls_endpoint(api_endpoint): + """Converts api endpoint to mTLS endpoint. + + Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to + "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. + Args: + api_endpoint (Optional[str]): the api endpoint to convert. + Returns: + str: converted mTLS api endpoint. + """ + if not api_endpoint: + return api_endpoint + + mtls_endpoint_re = re.compile( + r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?" + ) + + m = mtls_endpoint_re.match(api_endpoint) + name, mtls, sandbox, googledomain = m.groups() + if mtls or not googledomain: + return api_endpoint + + if sandbox: + return api_endpoint.replace( + "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" + ) + + return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") + + DEFAULT_ENDPOINT = "compute.googleapis.com" + DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore + DEFAULT_ENDPOINT + ) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + ImagesClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_info(info) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + ImagesClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_file( + filename) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + from_service_account_json = from_service_account_file + + @property + def transport(self) -> ImagesTransport: + """Returns the transport used by the client instance. + + Returns: + ImagesTransport: The transport used by the client + instance. + """ + return self._transport + + @staticmethod + def common_billing_account_path(billing_account: str, ) -> str: + """Returns a fully-qualified billing_account string.""" + return "billingAccounts/{billing_account}".format(billing_account=billing_account, ) + + @staticmethod + def parse_common_billing_account_path(path: str) -> Dict[str,str]: + """Parse a billing_account path into its component segments.""" + m = re.match(r"^billingAccounts/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_folder_path(folder: str, ) -> str: + """Returns a fully-qualified folder string.""" + return "folders/{folder}".format(folder=folder, ) + + @staticmethod + def parse_common_folder_path(path: str) -> Dict[str,str]: + """Parse a folder path into its component segments.""" + m = re.match(r"^folders/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_organization_path(organization: str, ) -> str: + """Returns a fully-qualified organization string.""" + return "organizations/{organization}".format(organization=organization, ) + + @staticmethod + def parse_common_organization_path(path: str) -> Dict[str,str]: + """Parse a organization path into its component segments.""" + m = re.match(r"^organizations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_project_path(project: str, ) -> str: + """Returns a fully-qualified project string.""" + return "projects/{project}".format(project=project, ) + + @staticmethod + def parse_common_project_path(path: str) -> Dict[str,str]: + """Parse a project path into its component segments.""" + m = re.match(r"^projects/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_location_path(project: str, location: str, ) -> str: + """Returns a fully-qualified location string.""" + return "projects/{project}/locations/{location}".format(project=project, location=location, ) + + @staticmethod + def parse_common_location_path(path: str) -> Dict[str,str]: + """Parse a location path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @classmethod + def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[client_options_lib.ClientOptions] = None): + """Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + if client_options is None: + client_options = client_options_lib.ClientOptions() + use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") + use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") + if use_client_cert not in ("true", "false"): + raise ValueError("Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`") + if use_mtls_endpoint not in ("auto", "never", "always"): + raise MutualTLSChannelError("Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`") + + # Figure out the client cert source to use. + client_cert_source = None + if use_client_cert == "true": + if client_options.client_cert_source: + client_cert_source = client_options.client_cert_source + elif mtls.has_default_client_cert_source(): + client_cert_source = mtls.default_client_cert_source() + + # Figure out which api endpoint to use. + if client_options.api_endpoint is not None: + api_endpoint = client_options.api_endpoint + elif use_mtls_endpoint == "always" or (use_mtls_endpoint == "auto" and client_cert_source): + api_endpoint = cls.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = cls.DEFAULT_ENDPOINT + + return api_endpoint, client_cert_source + + def __init__(self, *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Optional[Union[str, ImagesTransport]] = None, + client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the images client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Union[str, ImagesTransport]): The + transport to use. If set to None, a transport is chosen + automatically. + NOTE: "rest" transport functionality is currently in a + beta state (preview). We welcome your feedback via an + issue in this library's source repository. + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): Custom options for the + client. It won't take effect if a ``transport`` instance is provided. + (1) The ``api_endpoint`` property can be used to override the + default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT + environment variable can also be used to override the endpoint: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto switch to the + default mTLS endpoint if client certificate is present, this is + the default value). However, the ``api_endpoint`` property takes + precedence if provided. + (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide client certificate for mutual TLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + """ + if isinstance(client_options, dict): + client_options = client_options_lib.from_dict(client_options) + if client_options is None: + client_options = client_options_lib.ClientOptions() + client_options = cast(client_options_lib.ClientOptions, client_options) + + api_endpoint, client_cert_source_func = self.get_mtls_endpoint_and_cert_source(client_options) + + api_key_value = getattr(client_options, "api_key", None) + if api_key_value and credentials: + raise ValueError("client_options.api_key and credentials are mutually exclusive") + + # Save or instantiate the transport. + # Ordinarily, we provide the transport, but allowing a custom transport + # instance provides an extensibility point for unusual situations. + if isinstance(transport, ImagesTransport): + # transport is a ImagesTransport instance. + if credentials or client_options.credentials_file or api_key_value: + raise ValueError("When providing a transport instance, " + "provide its credentials directly.") + if client_options.scopes: + raise ValueError( + "When providing a transport instance, provide its scopes " + "directly." + ) + self._transport = transport + else: + import google.auth._default # type: ignore + + if api_key_value and hasattr(google.auth._default, "get_api_key_credentials"): + credentials = google.auth._default.get_api_key_credentials(api_key_value) + + Transport = type(self).get_transport_class(transport) + self._transport = Transport( + credentials=credentials, + credentials_file=client_options.credentials_file, + host=api_endpoint, + scopes=client_options.scopes, + client_cert_source_for_mtls=client_cert_source_func, + quota_project_id=client_options.quota_project_id, + client_info=client_info, + always_use_jwt_access=True, + api_audience=client_options.api_audience, + ) + + def delete_unary(self, + request: Optional[Union[compute.DeleteImageRequest, dict]] = None, + *, + project: Optional[str] = None, + image: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.Operation: + r"""Deletes the specified image. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_delete(): + # Create a client + client = compute_v1.ImagesClient() + + # Initialize request argument(s) + request = compute_v1.DeleteImageRequest( + image="image_value", + project="project_value", + ) + + # Make the request + response = client.delete(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.DeleteImageRequest, dict]): + The request object. A request message for Images.Delete. + See the method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + image (str): + Name of the image resource to delete. + This corresponds to the ``image`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, image]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.DeleteImageRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.DeleteImageRequest): + request = compute.DeleteImageRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if image is not None: + request.image = image + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("image", request.image), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def delete(self, + request: Optional[Union[compute.DeleteImageRequest, dict]] = None, + *, + project: Optional[str] = None, + image: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> extended_operation.ExtendedOperation: + r"""Deletes the specified image. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_delete(): + # Create a client + client = compute_v1.ImagesClient() + + # Initialize request argument(s) + request = compute_v1.DeleteImageRequest( + image="image_value", + project="project_value", + ) + + # Make the request + response = client.delete(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.DeleteImageRequest, dict]): + The request object. A request message for Images.Delete. + See the method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + image (str): + Name of the image resource to delete. + This corresponds to the ``image`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, image]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.DeleteImageRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.DeleteImageRequest): + request = compute.DeleteImageRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if image is not None: + request.image = image + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("image", request.image), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + operation_service = self._transport._global_operations_client + operation_request = compute.GetGlobalOperationRequest() + operation_request.project = request.project + operation_request.operation = response.name + + get_operation = functools.partial(operation_service.get, operation_request) + # Cancel is not part of extended operations yet. + cancel_operation = lambda: None + + # Note: this class is an implementation detail to provide a uniform + # set of names for certain fields in the extended operation proto message. + # See google.api_core.extended_operation.ExtendedOperation for details + # on these properties and the expected interface. + class _CustomOperation(extended_operation.ExtendedOperation): + @property + def error_message(self): + return self._extended_operation.http_error_message + + @property + def error_code(self): + return self._extended_operation.http_error_status_code + + response = _CustomOperation.make(get_operation, cancel_operation, response) + + # Done; return the response. + return response + + def deprecate_unary(self, + request: Optional[Union[compute.DeprecateImageRequest, dict]] = None, + *, + project: Optional[str] = None, + image: Optional[str] = None, + deprecation_status_resource: Optional[compute.DeprecationStatus] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.Operation: + r"""Sets the deprecation status of an image. If an empty + request body is given, clears the deprecation status + instead. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_deprecate(): + # Create a client + client = compute_v1.ImagesClient() + + # Initialize request argument(s) + request = compute_v1.DeprecateImageRequest( + image="image_value", + project="project_value", + ) + + # Make the request + response = client.deprecate(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.DeprecateImageRequest, dict]): + The request object. A request message for + Images.Deprecate. See the method + description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + image (str): + Image name. + This corresponds to the ``image`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + deprecation_status_resource (google.cloud.compute_v1.types.DeprecationStatus): + The body resource for this request + This corresponds to the ``deprecation_status_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, image, deprecation_status_resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.DeprecateImageRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.DeprecateImageRequest): + request = compute.DeprecateImageRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if image is not None: + request.image = image + if deprecation_status_resource is not None: + request.deprecation_status_resource = deprecation_status_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.deprecate] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("image", request.image), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def deprecate(self, + request: Optional[Union[compute.DeprecateImageRequest, dict]] = None, + *, + project: Optional[str] = None, + image: Optional[str] = None, + deprecation_status_resource: Optional[compute.DeprecationStatus] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> extended_operation.ExtendedOperation: + r"""Sets the deprecation status of an image. If an empty + request body is given, clears the deprecation status + instead. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_deprecate(): + # Create a client + client = compute_v1.ImagesClient() + + # Initialize request argument(s) + request = compute_v1.DeprecateImageRequest( + image="image_value", + project="project_value", + ) + + # Make the request + response = client.deprecate(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.DeprecateImageRequest, dict]): + The request object. A request message for + Images.Deprecate. See the method + description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + image (str): + Image name. + This corresponds to the ``image`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + deprecation_status_resource (google.cloud.compute_v1.types.DeprecationStatus): + The body resource for this request + This corresponds to the ``deprecation_status_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, image, deprecation_status_resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.DeprecateImageRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.DeprecateImageRequest): + request = compute.DeprecateImageRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if image is not None: + request.image = image + if deprecation_status_resource is not None: + request.deprecation_status_resource = deprecation_status_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.deprecate] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("image", request.image), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + operation_service = self._transport._global_operations_client + operation_request = compute.GetGlobalOperationRequest() + operation_request.project = request.project + operation_request.operation = response.name + + get_operation = functools.partial(operation_service.get, operation_request) + # Cancel is not part of extended operations yet. + cancel_operation = lambda: None + + # Note: this class is an implementation detail to provide a uniform + # set of names for certain fields in the extended operation proto message. + # See google.api_core.extended_operation.ExtendedOperation for details + # on these properties and the expected interface. + class _CustomOperation(extended_operation.ExtendedOperation): + @property + def error_message(self): + return self._extended_operation.http_error_message + + @property + def error_code(self): + return self._extended_operation.http_error_status_code + + response = _CustomOperation.make(get_operation, cancel_operation, response) + + # Done; return the response. + return response + + def get(self, + request: Optional[Union[compute.GetImageRequest, dict]] = None, + *, + project: Optional[str] = None, + image: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.Image: + r"""Returns the specified image. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_get(): + # Create a client + client = compute_v1.ImagesClient() + + # Initialize request argument(s) + request = compute_v1.GetImageRequest( + image="image_value", + project="project_value", + ) + + # Make the request + response = client.get(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.GetImageRequest, dict]): + The request object. A request message for Images.Get. See + the method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + image (str): + Name of the image resource to return. + This corresponds to the ``image`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.compute_v1.types.Image: + Represents an Image resource. You can + use images to create boot disks for your + VM instances. For more information, read + Images. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, image]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.GetImageRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.GetImageRequest): + request = compute.GetImageRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if image is not None: + request.image = image + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("image", request.image), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get_from_family(self, + request: Optional[Union[compute.GetFromFamilyImageRequest, dict]] = None, + *, + project: Optional[str] = None, + family: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.Image: + r"""Returns the latest image that is part of an image + family and is not deprecated. For more information on + image families, see Public image families documentation. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_get_from_family(): + # Create a client + client = compute_v1.ImagesClient() + + # Initialize request argument(s) + request = compute_v1.GetFromFamilyImageRequest( + family="family_value", + project="project_value", + ) + + # Make the request + response = client.get_from_family(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.GetFromFamilyImageRequest, dict]): + The request object. A request message for + Images.GetFromFamily. See the method + description for details. + project (str): + The image project that the image + belongs to. For example, to get a CentOS + image, specify centos-cloud as the image + project. + + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + family (str): + Name of the image family to search + for. + + This corresponds to the ``family`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.compute_v1.types.Image: + Represents an Image resource. You can + use images to create boot disks for your + VM instances. For more information, read + Images. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, family]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.GetFromFamilyImageRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.GetFromFamilyImageRequest): + request = compute.GetFromFamilyImageRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if family is not None: + request.family = family + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_from_family] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("family", request.family), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get_iam_policy(self, + request: Optional[Union[compute.GetIamPolicyImageRequest, dict]] = None, + *, + project: Optional[str] = None, + resource: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.Policy: + r"""Gets the access control policy for a resource. May be + empty if no such policy or resource exists. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_get_iam_policy(): + # Create a client + client = compute_v1.ImagesClient() + + # Initialize request argument(s) + request = compute_v1.GetIamPolicyImageRequest( + project="project_value", + resource="resource_value", + ) + + # Make the request + response = client.get_iam_policy(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.GetIamPolicyImageRequest, dict]): + The request object. A request message for + Images.GetIamPolicy. See the method + description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + resource (str): + Name or id of the resource for this + request. + + This corresponds to the ``resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.compute_v1.types.Policy: + An Identity and Access Management (IAM) policy, which + specifies access controls for Google Cloud resources. A + Policy is a collection of bindings. A binding binds one + or more members, or principals, to a single role. + Principals can be user accounts, service accounts, + Google groups, and domains (such as G Suite). A role is + a named list of permissions; each role can be an IAM + predefined role or a user-created custom role. For some + types of Google Cloud resources, a binding can also + specify a condition, which is a logical expression that + allows access to a resource only if the expression + evaluates to true. A condition can add constraints based + on attributes of the request, the resource, or both. To + learn which resources support conditions in their IAM + policies, see the [IAM + documentation](\ https://cloud.google.com/iam/help/conditions/resource-policies). + **JSON example:** { "bindings": [ { "role": + "roles/resourcemanager.organizationAdmin", "members": [ + "user:mike@example.com", "group:admins@example.com", + "domain:google.com", + "serviceAccount:my-project-id@appspot.gserviceaccount.com" + ] }, { "role": + "roles/resourcemanager.organizationViewer", "members": [ + "user:eve@example.com" ], "condition": { "title": + "expirable access", "description": "Does not grant + access after Sep 2020", "expression": "request.time < + timestamp('2020-10-01T00:00:00.000Z')", } } ], "etag": + "BwWWja0YfJA=", "version": 3 } **YAML example:** + bindings: - members: - user:\ mike@example.com - + group:\ admins@example.com - domain:google.com - + serviceAccount:\ my-project-id@appspot.gserviceaccount.com + role: roles/resourcemanager.organizationAdmin - members: + - user:\ eve@example.com role: + roles/resourcemanager.organizationViewer condition: + title: expirable access description: Does not grant + access after Sep 2020 expression: request.time < + timestamp('2020-10-01T00:00:00.000Z') etag: BwWWja0YfJA= + version: 3 For a description of IAM and its features, + see the [IAM + documentation](\ https://cloud.google.com/iam/docs/). + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.GetIamPolicyImageRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.GetIamPolicyImageRequest): + request = compute.GetIamPolicyImageRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if resource is not None: + request.resource = resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_iam_policy] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("resource", request.resource), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def insert_unary(self, + request: Optional[Union[compute.InsertImageRequest, dict]] = None, + *, + project: Optional[str] = None, + image_resource: Optional[compute.Image] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.Operation: + r"""Creates an image in the specified project using the + data included in the request. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_insert(): + # Create a client + client = compute_v1.ImagesClient() + + # Initialize request argument(s) + request = compute_v1.InsertImageRequest( + project="project_value", + ) + + # Make the request + response = client.insert(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.InsertImageRequest, dict]): + The request object. A request message for Images.Insert. + See the method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + image_resource (google.cloud.compute_v1.types.Image): + The body resource for this request + This corresponds to the ``image_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, image_resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.InsertImageRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.InsertImageRequest): + request = compute.InsertImageRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if image_resource is not None: + request.image_resource = image_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.insert] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def insert(self, + request: Optional[Union[compute.InsertImageRequest, dict]] = None, + *, + project: Optional[str] = None, + image_resource: Optional[compute.Image] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> extended_operation.ExtendedOperation: + r"""Creates an image in the specified project using the + data included in the request. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_insert(): + # Create a client + client = compute_v1.ImagesClient() + + # Initialize request argument(s) + request = compute_v1.InsertImageRequest( + project="project_value", + ) + + # Make the request + response = client.insert(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.InsertImageRequest, dict]): + The request object. A request message for Images.Insert. + See the method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + image_resource (google.cloud.compute_v1.types.Image): + The body resource for this request + This corresponds to the ``image_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, image_resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.InsertImageRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.InsertImageRequest): + request = compute.InsertImageRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if image_resource is not None: + request.image_resource = image_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.insert] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + operation_service = self._transport._global_operations_client + operation_request = compute.GetGlobalOperationRequest() + operation_request.project = request.project + operation_request.operation = response.name + + get_operation = functools.partial(operation_service.get, operation_request) + # Cancel is not part of extended operations yet. + cancel_operation = lambda: None + + # Note: this class is an implementation detail to provide a uniform + # set of names for certain fields in the extended operation proto message. + # See google.api_core.extended_operation.ExtendedOperation for details + # on these properties and the expected interface. + class _CustomOperation(extended_operation.ExtendedOperation): + @property + def error_message(self): + return self._extended_operation.http_error_message + + @property + def error_code(self): + return self._extended_operation.http_error_status_code + + response = _CustomOperation.make(get_operation, cancel_operation, response) + + # Done; return the response. + return response + + def list(self, + request: Optional[Union[compute.ListImagesRequest, dict]] = None, + *, + project: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListPager: + r"""Retrieves the list of custom images available to the + specified project. Custom images are images you create + that belong to your project. This method does not get + any images that belong to other projects, including + publicly-available images, like Debian 8. If you want to + get a list of publicly-available images, use this method + to make a request to the respective image project, such + as debian-cloud or windows-cloud. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_list(): + # Create a client + client = compute_v1.ImagesClient() + + # Initialize request argument(s) + request = compute_v1.ListImagesRequest( + project="project_value", + ) + + # Make the request + page_result = client.list(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.ListImagesRequest, dict]): + The request object. A request message for Images.List. + See the method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.compute_v1.services.images.pagers.ListPager: + Contains a list of images. + + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.ListImagesRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.ListImagesRequest): + request = compute.ListImagesRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + def patch_unary(self, + request: Optional[Union[compute.PatchImageRequest, dict]] = None, + *, + project: Optional[str] = None, + image: Optional[str] = None, + image_resource: Optional[compute.Image] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.Operation: + r"""Patches the specified image with the data included in + the request. Only the following fields can be modified: + family, description, deprecation status. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_patch(): + # Create a client + client = compute_v1.ImagesClient() + + # Initialize request argument(s) + request = compute_v1.PatchImageRequest( + image="image_value", + project="project_value", + ) + + # Make the request + response = client.patch(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.PatchImageRequest, dict]): + The request object. A request message for Images.Patch. + See the method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + image (str): + Name of the image resource to patch. + This corresponds to the ``image`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + image_resource (google.cloud.compute_v1.types.Image): + The body resource for this request + This corresponds to the ``image_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, image, image_resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.PatchImageRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.PatchImageRequest): + request = compute.PatchImageRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if image is not None: + request.image = image + if image_resource is not None: + request.image_resource = image_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.patch] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("image", request.image), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def patch(self, + request: Optional[Union[compute.PatchImageRequest, dict]] = None, + *, + project: Optional[str] = None, + image: Optional[str] = None, + image_resource: Optional[compute.Image] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> extended_operation.ExtendedOperation: + r"""Patches the specified image with the data included in + the request. Only the following fields can be modified: + family, description, deprecation status. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_patch(): + # Create a client + client = compute_v1.ImagesClient() + + # Initialize request argument(s) + request = compute_v1.PatchImageRequest( + image="image_value", + project="project_value", + ) + + # Make the request + response = client.patch(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.PatchImageRequest, dict]): + The request object. A request message for Images.Patch. + See the method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + image (str): + Name of the image resource to patch. + This corresponds to the ``image`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + image_resource (google.cloud.compute_v1.types.Image): + The body resource for this request + This corresponds to the ``image_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, image, image_resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.PatchImageRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.PatchImageRequest): + request = compute.PatchImageRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if image is not None: + request.image = image + if image_resource is not None: + request.image_resource = image_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.patch] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("image", request.image), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + operation_service = self._transport._global_operations_client + operation_request = compute.GetGlobalOperationRequest() + operation_request.project = request.project + operation_request.operation = response.name + + get_operation = functools.partial(operation_service.get, operation_request) + # Cancel is not part of extended operations yet. + cancel_operation = lambda: None + + # Note: this class is an implementation detail to provide a uniform + # set of names for certain fields in the extended operation proto message. + # See google.api_core.extended_operation.ExtendedOperation for details + # on these properties and the expected interface. + class _CustomOperation(extended_operation.ExtendedOperation): + @property + def error_message(self): + return self._extended_operation.http_error_message + + @property + def error_code(self): + return self._extended_operation.http_error_status_code + + response = _CustomOperation.make(get_operation, cancel_operation, response) + + # Done; return the response. + return response + + def set_iam_policy(self, + request: Optional[Union[compute.SetIamPolicyImageRequest, dict]] = None, + *, + project: Optional[str] = None, + resource: Optional[str] = None, + global_set_policy_request_resource: Optional[compute.GlobalSetPolicyRequest] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.Policy: + r"""Sets the access control policy on the specified + resource. Replaces any existing policy. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_set_iam_policy(): + # Create a client + client = compute_v1.ImagesClient() + + # Initialize request argument(s) + request = compute_v1.SetIamPolicyImageRequest( + project="project_value", + resource="resource_value", + ) + + # Make the request + response = client.set_iam_policy(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.SetIamPolicyImageRequest, dict]): + The request object. A request message for + Images.SetIamPolicy. See the method + description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + resource (str): + Name or id of the resource for this + request. + + This corresponds to the ``resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + global_set_policy_request_resource (google.cloud.compute_v1.types.GlobalSetPolicyRequest): + The body resource for this request + This corresponds to the ``global_set_policy_request_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.compute_v1.types.Policy: + An Identity and Access Management (IAM) policy, which + specifies access controls for Google Cloud resources. A + Policy is a collection of bindings. A binding binds one + or more members, or principals, to a single role. + Principals can be user accounts, service accounts, + Google groups, and domains (such as G Suite). A role is + a named list of permissions; each role can be an IAM + predefined role or a user-created custom role. For some + types of Google Cloud resources, a binding can also + specify a condition, which is a logical expression that + allows access to a resource only if the expression + evaluates to true. A condition can add constraints based + on attributes of the request, the resource, or both. To + learn which resources support conditions in their IAM + policies, see the [IAM + documentation](\ https://cloud.google.com/iam/help/conditions/resource-policies). + **JSON example:** { "bindings": [ { "role": + "roles/resourcemanager.organizationAdmin", "members": [ + "user:mike@example.com", "group:admins@example.com", + "domain:google.com", + "serviceAccount:my-project-id@appspot.gserviceaccount.com" + ] }, { "role": + "roles/resourcemanager.organizationViewer", "members": [ + "user:eve@example.com" ], "condition": { "title": + "expirable access", "description": "Does not grant + access after Sep 2020", "expression": "request.time < + timestamp('2020-10-01T00:00:00.000Z')", } } ], "etag": + "BwWWja0YfJA=", "version": 3 } **YAML example:** + bindings: - members: - user:\ mike@example.com - + group:\ admins@example.com - domain:google.com - + serviceAccount:\ my-project-id@appspot.gserviceaccount.com + role: roles/resourcemanager.organizationAdmin - members: + - user:\ eve@example.com role: + roles/resourcemanager.organizationViewer condition: + title: expirable access description: Does not grant + access after Sep 2020 expression: request.time < + timestamp('2020-10-01T00:00:00.000Z') etag: BwWWja0YfJA= + version: 3 For a description of IAM and its features, + see the [IAM + documentation](\ https://cloud.google.com/iam/docs/). + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, resource, global_set_policy_request_resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.SetIamPolicyImageRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.SetIamPolicyImageRequest): + request = compute.SetIamPolicyImageRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if resource is not None: + request.resource = resource + if global_set_policy_request_resource is not None: + request.global_set_policy_request_resource = global_set_policy_request_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.set_iam_policy] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("resource", request.resource), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def set_labels_unary(self, + request: Optional[Union[compute.SetLabelsImageRequest, dict]] = None, + *, + project: Optional[str] = None, + resource: Optional[str] = None, + global_set_labels_request_resource: Optional[compute.GlobalSetLabelsRequest] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.Operation: + r"""Sets the labels on an image. To learn more about + labels, read the Labeling Resources documentation. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_set_labels(): + # Create a client + client = compute_v1.ImagesClient() + + # Initialize request argument(s) + request = compute_v1.SetLabelsImageRequest( + project="project_value", + resource="resource_value", + ) + + # Make the request + response = client.set_labels(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.SetLabelsImageRequest, dict]): + The request object. A request message for + Images.SetLabels. See the method + description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + resource (str): + Name or id of the resource for this + request. + + This corresponds to the ``resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + global_set_labels_request_resource (google.cloud.compute_v1.types.GlobalSetLabelsRequest): + The body resource for this request + This corresponds to the ``global_set_labels_request_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, resource, global_set_labels_request_resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.SetLabelsImageRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.SetLabelsImageRequest): + request = compute.SetLabelsImageRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if resource is not None: + request.resource = resource + if global_set_labels_request_resource is not None: + request.global_set_labels_request_resource = global_set_labels_request_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.set_labels] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("resource", request.resource), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def set_labels(self, + request: Optional[Union[compute.SetLabelsImageRequest, dict]] = None, + *, + project: Optional[str] = None, + resource: Optional[str] = None, + global_set_labels_request_resource: Optional[compute.GlobalSetLabelsRequest] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> extended_operation.ExtendedOperation: + r"""Sets the labels on an image. To learn more about + labels, read the Labeling Resources documentation. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_set_labels(): + # Create a client + client = compute_v1.ImagesClient() + + # Initialize request argument(s) + request = compute_v1.SetLabelsImageRequest( + project="project_value", + resource="resource_value", + ) + + # Make the request + response = client.set_labels(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.SetLabelsImageRequest, dict]): + The request object. A request message for + Images.SetLabels. See the method + description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + resource (str): + Name or id of the resource for this + request. + + This corresponds to the ``resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + global_set_labels_request_resource (google.cloud.compute_v1.types.GlobalSetLabelsRequest): + The body resource for this request + This corresponds to the ``global_set_labels_request_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, resource, global_set_labels_request_resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.SetLabelsImageRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.SetLabelsImageRequest): + request = compute.SetLabelsImageRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if resource is not None: + request.resource = resource + if global_set_labels_request_resource is not None: + request.global_set_labels_request_resource = global_set_labels_request_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.set_labels] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("resource", request.resource), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + operation_service = self._transport._global_operations_client + operation_request = compute.GetGlobalOperationRequest() + operation_request.project = request.project + operation_request.operation = response.name + + get_operation = functools.partial(operation_service.get, operation_request) + # Cancel is not part of extended operations yet. + cancel_operation = lambda: None + + # Note: this class is an implementation detail to provide a uniform + # set of names for certain fields in the extended operation proto message. + # See google.api_core.extended_operation.ExtendedOperation for details + # on these properties and the expected interface. + class _CustomOperation(extended_operation.ExtendedOperation): + @property + def error_message(self): + return self._extended_operation.http_error_message + + @property + def error_code(self): + return self._extended_operation.http_error_status_code + + response = _CustomOperation.make(get_operation, cancel_operation, response) + + # Done; return the response. + return response + + def test_iam_permissions(self, + request: Optional[Union[compute.TestIamPermissionsImageRequest, dict]] = None, + *, + project: Optional[str] = None, + resource: Optional[str] = None, + test_permissions_request_resource: Optional[compute.TestPermissionsRequest] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.TestPermissionsResponse: + r"""Returns permissions that a caller has on the + specified resource. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_test_iam_permissions(): + # Create a client + client = compute_v1.ImagesClient() + + # Initialize request argument(s) + request = compute_v1.TestIamPermissionsImageRequest( + project="project_value", + resource="resource_value", + ) + + # Make the request + response = client.test_iam_permissions(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.TestIamPermissionsImageRequest, dict]): + The request object. A request message for + Images.TestIamPermissions. See the + method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + resource (str): + Name or id of the resource for this + request. + + This corresponds to the ``resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + test_permissions_request_resource (google.cloud.compute_v1.types.TestPermissionsRequest): + The body resource for this request + This corresponds to the ``test_permissions_request_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.compute_v1.types.TestPermissionsResponse: + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, resource, test_permissions_request_resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.TestIamPermissionsImageRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.TestIamPermissionsImageRequest): + request = compute.TestIamPermissionsImageRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if resource is not None: + request.resource = resource + if test_permissions_request_resource is not None: + request.test_permissions_request_resource = test_permissions_request_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.test_iam_permissions] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("resource", request.resource), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def __enter__(self) -> "ImagesClient": + return self + + def __exit__(self, type, value, traceback): + """Releases underlying transport's resources. + + .. warning:: + ONLY use as a context manager if the transport is NOT shared + with other clients! Exiting the with block will CLOSE the transport + and may cause errors in other clients! + """ + self.transport.close() + + + + + + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) + + +__all__ = ( + "ImagesClient", +) diff --git a/owl-bot-staging/v1/google/cloud/compute_v1/services/images/pagers.py b/owl-bot-staging/v1/google/cloud/compute_v1/services/images/pagers.py new file mode 100644 index 000000000..fccd48460 --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/compute_v1/services/images/pagers.py @@ -0,0 +1,77 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import Any, AsyncIterator, Awaitable, Callable, Sequence, Tuple, Optional, Iterator + +from google.cloud.compute_v1.types import compute + + +class ListPager: + """A pager for iterating through ``list`` requests. + + This class thinly wraps an initial + :class:`google.cloud.compute_v1.types.ImageList` object, and + provides an ``__iter__`` method to iterate through its + ``items`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``List`` requests and continue to iterate + through the ``items`` field on the + corresponding responses. + + All the usual :class:`google.cloud.compute_v1.types.ImageList` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., compute.ImageList], + request: compute.ListImagesRequest, + response: compute.ImageList, + *, + metadata: Sequence[Tuple[str, str]] = ()): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.compute_v1.types.ListImagesRequest): + The initial request object. + response (google.cloud.compute_v1.types.ImageList): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = compute.ListImagesRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[compute.ImageList]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[compute.Image]: + for page in self.pages: + yield from page.items + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) diff --git a/owl-bot-staging/v1/google/cloud/compute_v1/services/images/transports/__init__.py b/owl-bot-staging/v1/google/cloud/compute_v1/services/images/transports/__init__.py new file mode 100644 index 000000000..754ae235d --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/compute_v1/services/images/transports/__init__.py @@ -0,0 +1,32 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +from typing import Dict, Type + +from .base import ImagesTransport +from .rest import ImagesRestTransport +from .rest import ImagesRestInterceptor + + +# Compile a registry of transports. +_transport_registry = OrderedDict() # type: Dict[str, Type[ImagesTransport]] +_transport_registry['rest'] = ImagesRestTransport + +__all__ = ( + 'ImagesTransport', + 'ImagesRestTransport', + 'ImagesRestInterceptor', +) diff --git a/owl-bot-staging/v1/google/cloud/compute_v1/services/images/transports/base.py b/owl-bot-staging/v1/google/cloud/compute_v1/services/images/transports/base.py new file mode 100644 index 000000000..2d76b2875 --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/compute_v1/services/images/transports/base.py @@ -0,0 +1,303 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import abc +from typing import Awaitable, Callable, Dict, Optional, Sequence, Union + +from google.cloud.compute_v1 import gapic_version as package_version + +import google.auth # type: ignore +import google.api_core +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.compute_v1.types import compute +from google.cloud.compute_v1.services import global_operations + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) + + +class ImagesTransport(abc.ABC): + """Abstract transport class for Images.""" + + AUTH_SCOPES = ( + 'https://www.googleapis.com/auth/compute', + 'https://www.googleapis.com/auth/cloud-platform', + ) + + DEFAULT_HOST: str = 'compute.googleapis.com' + def __init__( + self, *, + host: str = DEFAULT_HOST, + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + **kwargs, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A list of scopes. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + """ + self._extended_operations_services: Dict[str, Any] = {} + + scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} + + # Save the scopes. + self._scopes = scopes + + # If no credentials are provided, then determine the appropriate + # defaults. + if credentials and credentials_file: + raise core_exceptions.DuplicateCredentialArgs("'credentials_file' and 'credentials' are mutually exclusive") + + if credentials_file is not None: + credentials, _ = google.auth.load_credentials_from_file( + credentials_file, + **scopes_kwargs, + quota_project_id=quota_project_id + ) + elif credentials is None: + credentials, _ = google.auth.default(**scopes_kwargs, quota_project_id=quota_project_id) + # Don't apply audience if the credentials file passed from user. + if hasattr(credentials, "with_gdch_audience"): + credentials = credentials.with_gdch_audience(api_audience if api_audience else host) + + # If the credentials are service account credentials, then always try to use self signed JWT. + if always_use_jwt_access and isinstance(credentials, service_account.Credentials) and hasattr(service_account.Credentials, "with_always_use_jwt_access"): + credentials = credentials.with_always_use_jwt_access(True) + + # Save the credentials. + self._credentials = credentials + + # Save the hostname. Default to port 443 (HTTPS) if none is specified. + if ':' not in host: + host += ':443' + self._host = host + + def _prep_wrapped_messages(self, client_info): + # Precompute the wrapped methods. + self._wrapped_methods = { + self.delete: gapic_v1.method.wrap_method( + self.delete, + default_timeout=None, + client_info=client_info, + ), + self.deprecate: gapic_v1.method.wrap_method( + self.deprecate, + default_timeout=None, + client_info=client_info, + ), + self.get: gapic_v1.method.wrap_method( + self.get, + default_timeout=None, + client_info=client_info, + ), + self.get_from_family: gapic_v1.method.wrap_method( + self.get_from_family, + default_timeout=None, + client_info=client_info, + ), + self.get_iam_policy: gapic_v1.method.wrap_method( + self.get_iam_policy, + default_timeout=None, + client_info=client_info, + ), + self.insert: gapic_v1.method.wrap_method( + self.insert, + default_timeout=None, + client_info=client_info, + ), + self.list: gapic_v1.method.wrap_method( + self.list, + default_timeout=None, + client_info=client_info, + ), + self.patch: gapic_v1.method.wrap_method( + self.patch, + default_timeout=None, + client_info=client_info, + ), + self.set_iam_policy: gapic_v1.method.wrap_method( + self.set_iam_policy, + default_timeout=None, + client_info=client_info, + ), + self.set_labels: gapic_v1.method.wrap_method( + self.set_labels, + default_timeout=None, + client_info=client_info, + ), + self.test_iam_permissions: gapic_v1.method.wrap_method( + self.test_iam_permissions, + default_timeout=None, + client_info=client_info, + ), + } + + def close(self): + """Closes resources associated with the transport. + + .. warning:: + Only call this method if the transport is NOT shared + with other clients - this may cause errors in other clients! + """ + raise NotImplementedError() + + @property + def delete(self) -> Callable[ + [compute.DeleteImageRequest], + Union[ + compute.Operation, + Awaitable[compute.Operation] + ]]: + raise NotImplementedError() + + @property + def deprecate(self) -> Callable[ + [compute.DeprecateImageRequest], + Union[ + compute.Operation, + Awaitable[compute.Operation] + ]]: + raise NotImplementedError() + + @property + def get(self) -> Callable[ + [compute.GetImageRequest], + Union[ + compute.Image, + Awaitable[compute.Image] + ]]: + raise NotImplementedError() + + @property + def get_from_family(self) -> Callable[ + [compute.GetFromFamilyImageRequest], + Union[ + compute.Image, + Awaitable[compute.Image] + ]]: + raise NotImplementedError() + + @property + def get_iam_policy(self) -> Callable[ + [compute.GetIamPolicyImageRequest], + Union[ + compute.Policy, + Awaitable[compute.Policy] + ]]: + raise NotImplementedError() + + @property + def insert(self) -> Callable[ + [compute.InsertImageRequest], + Union[ + compute.Operation, + Awaitable[compute.Operation] + ]]: + raise NotImplementedError() + + @property + def list(self) -> Callable[ + [compute.ListImagesRequest], + Union[ + compute.ImageList, + Awaitable[compute.ImageList] + ]]: + raise NotImplementedError() + + @property + def patch(self) -> Callable[ + [compute.PatchImageRequest], + Union[ + compute.Operation, + Awaitable[compute.Operation] + ]]: + raise NotImplementedError() + + @property + def set_iam_policy(self) -> Callable[ + [compute.SetIamPolicyImageRequest], + Union[ + compute.Policy, + Awaitable[compute.Policy] + ]]: + raise NotImplementedError() + + @property + def set_labels(self) -> Callable[ + [compute.SetLabelsImageRequest], + Union[ + compute.Operation, + Awaitable[compute.Operation] + ]]: + raise NotImplementedError() + + @property + def test_iam_permissions(self) -> Callable[ + [compute.TestIamPermissionsImageRequest], + Union[ + compute.TestPermissionsResponse, + Awaitable[compute.TestPermissionsResponse] + ]]: + raise NotImplementedError() + + @property + def kind(self) -> str: + raise NotImplementedError() + + @property + def _global_operations_client(self) -> global_operations.GlobalOperationsClient: + ex_op_service = self._extended_operations_services.get("global_operations") + if not ex_op_service: + ex_op_service = global_operations.GlobalOperationsClient( + credentials=self._credentials, + transport=self.kind, + ) + self._extended_operations_services["global_operations"] = ex_op_service + + return ex_op_service + + +__all__ = ( + 'ImagesTransport', +) diff --git a/owl-bot-staging/v1/google/cloud/compute_v1/services/images/transports/rest.py b/owl-bot-staging/v1/google/cloud/compute_v1/services/images/transports/rest.py new file mode 100644 index 000000000..6721781f0 --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/compute_v1/services/images/transports/rest.py @@ -0,0 +1,1609 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +from google.auth.transport.requests import AuthorizedSession # type: ignore +import json # type: ignore +import grpc # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.api_core import exceptions as core_exceptions +from google.api_core import retry as retries +from google.api_core import rest_helpers +from google.api_core import rest_streaming +from google.api_core import path_template +from google.api_core import gapic_v1 + +from google.protobuf import json_format +from requests import __version__ as requests_version +import dataclasses +import re +from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union +import warnings + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + + +from google.cloud.compute_v1.types import compute + +from .base import ImagesTransport, DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, + grpc_version=None, + rest_version=requests_version, +) + + +class ImagesRestInterceptor: + """Interceptor for Images. + + Interceptors are used to manipulate requests, request metadata, and responses + in arbitrary ways. + Example use cases include: + * Logging + * Verifying requests according to service or custom semantics + * Stripping extraneous information from responses + + These use cases and more can be enabled by injecting an + instance of a custom subclass when constructing the ImagesRestTransport. + + .. code-block:: python + class MyCustomImagesInterceptor(ImagesRestInterceptor): + def pre_delete(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_delete(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_deprecate(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_deprecate(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_get(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_get_from_family(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_from_family(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_get_iam_policy(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_iam_policy(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_insert(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_insert(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_patch(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_patch(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_set_iam_policy(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_set_iam_policy(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_set_labels(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_set_labels(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_test_iam_permissions(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_test_iam_permissions(self, response): + logging.log(f"Received response: {response}") + return response + + transport = ImagesRestTransport(interceptor=MyCustomImagesInterceptor()) + client = ImagesClient(transport=transport) + + + """ + def pre_delete(self, request: compute.DeleteImageRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.DeleteImageRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for delete + + Override in a subclass to manipulate the request or metadata + before they are sent to the Images server. + """ + return request, metadata + + def post_delete(self, response: compute.Operation) -> compute.Operation: + """Post-rpc interceptor for delete + + Override in a subclass to manipulate the response + after it is returned by the Images server but before + it is returned to user code. + """ + return response + def pre_deprecate(self, request: compute.DeprecateImageRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.DeprecateImageRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for deprecate + + Override in a subclass to manipulate the request or metadata + before they are sent to the Images server. + """ + return request, metadata + + def post_deprecate(self, response: compute.Operation) -> compute.Operation: + """Post-rpc interceptor for deprecate + + Override in a subclass to manipulate the response + after it is returned by the Images server but before + it is returned to user code. + """ + return response + def pre_get(self, request: compute.GetImageRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.GetImageRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get + + Override in a subclass to manipulate the request or metadata + before they are sent to the Images server. + """ + return request, metadata + + def post_get(self, response: compute.Image) -> compute.Image: + """Post-rpc interceptor for get + + Override in a subclass to manipulate the response + after it is returned by the Images server but before + it is returned to user code. + """ + return response + def pre_get_from_family(self, request: compute.GetFromFamilyImageRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.GetFromFamilyImageRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_from_family + + Override in a subclass to manipulate the request or metadata + before they are sent to the Images server. + """ + return request, metadata + + def post_get_from_family(self, response: compute.Image) -> compute.Image: + """Post-rpc interceptor for get_from_family + + Override in a subclass to manipulate the response + after it is returned by the Images server but before + it is returned to user code. + """ + return response + def pre_get_iam_policy(self, request: compute.GetIamPolicyImageRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.GetIamPolicyImageRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_iam_policy + + Override in a subclass to manipulate the request or metadata + before they are sent to the Images server. + """ + return request, metadata + + def post_get_iam_policy(self, response: compute.Policy) -> compute.Policy: + """Post-rpc interceptor for get_iam_policy + + Override in a subclass to manipulate the response + after it is returned by the Images server but before + it is returned to user code. + """ + return response + def pre_insert(self, request: compute.InsertImageRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.InsertImageRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for insert + + Override in a subclass to manipulate the request or metadata + before they are sent to the Images server. + """ + return request, metadata + + def post_insert(self, response: compute.Operation) -> compute.Operation: + """Post-rpc interceptor for insert + + Override in a subclass to manipulate the response + after it is returned by the Images server but before + it is returned to user code. + """ + return response + def pre_list(self, request: compute.ListImagesRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.ListImagesRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list + + Override in a subclass to manipulate the request or metadata + before they are sent to the Images server. + """ + return request, metadata + + def post_list(self, response: compute.ImageList) -> compute.ImageList: + """Post-rpc interceptor for list + + Override in a subclass to manipulate the response + after it is returned by the Images server but before + it is returned to user code. + """ + return response + def pre_patch(self, request: compute.PatchImageRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.PatchImageRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for patch + + Override in a subclass to manipulate the request or metadata + before they are sent to the Images server. + """ + return request, metadata + + def post_patch(self, response: compute.Operation) -> compute.Operation: + """Post-rpc interceptor for patch + + Override in a subclass to manipulate the response + after it is returned by the Images server but before + it is returned to user code. + """ + return response + def pre_set_iam_policy(self, request: compute.SetIamPolicyImageRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.SetIamPolicyImageRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for set_iam_policy + + Override in a subclass to manipulate the request or metadata + before they are sent to the Images server. + """ + return request, metadata + + def post_set_iam_policy(self, response: compute.Policy) -> compute.Policy: + """Post-rpc interceptor for set_iam_policy + + Override in a subclass to manipulate the response + after it is returned by the Images server but before + it is returned to user code. + """ + return response + def pre_set_labels(self, request: compute.SetLabelsImageRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.SetLabelsImageRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for set_labels + + Override in a subclass to manipulate the request or metadata + before they are sent to the Images server. + """ + return request, metadata + + def post_set_labels(self, response: compute.Operation) -> compute.Operation: + """Post-rpc interceptor for set_labels + + Override in a subclass to manipulate the response + after it is returned by the Images server but before + it is returned to user code. + """ + return response + def pre_test_iam_permissions(self, request: compute.TestIamPermissionsImageRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.TestIamPermissionsImageRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for test_iam_permissions + + Override in a subclass to manipulate the request or metadata + before they are sent to the Images server. + """ + return request, metadata + + def post_test_iam_permissions(self, response: compute.TestPermissionsResponse) -> compute.TestPermissionsResponse: + """Post-rpc interceptor for test_iam_permissions + + Override in a subclass to manipulate the response + after it is returned by the Images server but before + it is returned to user code. + """ + return response + + +@dataclasses.dataclass +class ImagesRestStub: + _session: AuthorizedSession + _host: str + _interceptor: ImagesRestInterceptor + + +class ImagesRestTransport(ImagesTransport): + """REST backend transport for Images. + + The Images API. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends JSON representations of protocol buffers over HTTP/1.1 + + NOTE: This REST transport functionality is currently in a beta + state (preview). We welcome your feedback via an issue in this + library's source repository. Thank you! + """ + + def __init__(self, *, + host: str = 'compute.googleapis.com', + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + client_cert_source_for_mtls: Optional[Callable[[ + ], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + url_scheme: str = 'https', + interceptor: Optional[ImagesRestInterceptor] = None, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + NOTE: This REST transport functionality is currently in a beta + state (preview). We welcome your feedback via a GitHub issue in + this library's repository. Thank you! + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + client_cert_source_for_mtls (Callable[[], Tuple[bytes, bytes]]): Client + certificate to configure mutual TLS HTTP channel. It is ignored + if ``channel`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you are developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + url_scheme: the protocol scheme for the API endpoint. Normally + "https", but for testing or local servers, + "http" can be specified. + """ + # Run the base constructor + # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. + # TODO: When custom host (api_endpoint) is set, `scopes` must *also* be set on the + # credentials object + maybe_url_match = re.match("^(?Phttp(?:s)?://)?(?P.*)$", host) + if maybe_url_match is None: + raise ValueError(f"Unexpected hostname structure: {host}") # pragma: NO COVER + + url_match_items = maybe_url_match.groupdict() + + host = f"{url_scheme}://{host}" if not url_match_items["scheme"] else host + + super().__init__( + host=host, + credentials=credentials, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience + ) + self._session = AuthorizedSession( + self._credentials, default_host=self.DEFAULT_HOST) + if client_cert_source_for_mtls: + self._session.configure_mtls_channel(client_cert_source_for_mtls) + self._interceptor = interceptor or ImagesRestInterceptor() + self._prep_wrapped_messages(client_info) + + class _Delete(ImagesRestStub): + def __hash__(self): + return hash("Delete") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.DeleteImageRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.Operation: + r"""Call the delete method over HTTP. + + Args: + request (~.compute.DeleteImageRequest): + The request object. A request message for Images.Delete. + See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.Operation: + Represents an Operation resource. Google Compute Engine + has three Operation resources: \* + `Global `__ + \* + `Regional `__ + \* + `Zonal `__ + You can use an operation resource to manage asynchronous + API requests. For more information, read Handling API + responses. Operations can be global, regional or zonal. + - For global operations, use the ``globalOperations`` + resource. - For regional operations, use the + ``regionOperations`` resource. - For zonal operations, + use the ``zonalOperations`` resource. For more + information, read Global, Regional, and Zonal Resources. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'delete', + 'uri': '/compute/v1/projects/{project}/global/images/{image}', + }, + ] + request, metadata = self._interceptor.pre_delete(request, metadata) + pb_request = compute.DeleteImageRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.Operation() + pb_resp = compute.Operation.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_delete(resp) + return resp + + class _Deprecate(ImagesRestStub): + def __hash__(self): + return hash("Deprecate") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.DeprecateImageRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.Operation: + r"""Call the deprecate method over HTTP. + + Args: + request (~.compute.DeprecateImageRequest): + The request object. A request message for + Images.Deprecate. See the method + description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.Operation: + Represents an Operation resource. Google Compute Engine + has three Operation resources: \* + `Global `__ + \* + `Regional `__ + \* + `Zonal `__ + You can use an operation resource to manage asynchronous + API requests. For more information, read Handling API + responses. Operations can be global, regional or zonal. + - For global operations, use the ``globalOperations`` + resource. - For regional operations, use the + ``regionOperations`` resource. - For zonal operations, + use the ``zonalOperations`` resource. For more + information, read Global, Regional, and Zonal Resources. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/compute/v1/projects/{project}/global/images/{image}/deprecate', + 'body': 'deprecation_status_resource', + }, + ] + request, metadata = self._interceptor.pre_deprecate(request, metadata) + pb_request = compute.DeprecateImageRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + including_default_value_fields=False, + use_integers_for_enums=False + ) + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.Operation() + pb_resp = compute.Operation.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_deprecate(resp) + return resp + + class _Get(ImagesRestStub): + def __hash__(self): + return hash("Get") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.GetImageRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.Image: + r"""Call the get method over HTTP. + + Args: + request (~.compute.GetImageRequest): + The request object. A request message for Images.Get. See + the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.Image: + Represents an Image resource. You can + use images to create boot disks for your + VM instances. For more information, read + Images. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/compute/v1/projects/{project}/global/images/{image}', + }, + ] + request, metadata = self._interceptor.pre_get(request, metadata) + pb_request = compute.GetImageRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.Image() + pb_resp = compute.Image.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get(resp) + return resp + + class _GetFromFamily(ImagesRestStub): + def __hash__(self): + return hash("GetFromFamily") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.GetFromFamilyImageRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.Image: + r"""Call the get from family method over HTTP. + + Args: + request (~.compute.GetFromFamilyImageRequest): + The request object. A request message for + Images.GetFromFamily. See the method + description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.Image: + Represents an Image resource. You can + use images to create boot disks for your + VM instances. For more information, read + Images. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/compute/v1/projects/{project}/global/images/family/{family}', + }, + ] + request, metadata = self._interceptor.pre_get_from_family(request, metadata) + pb_request = compute.GetFromFamilyImageRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.Image() + pb_resp = compute.Image.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get_from_family(resp) + return resp + + class _GetIamPolicy(ImagesRestStub): + def __hash__(self): + return hash("GetIamPolicy") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.GetIamPolicyImageRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.Policy: + r"""Call the get iam policy method over HTTP. + + Args: + request (~.compute.GetIamPolicyImageRequest): + The request object. A request message for + Images.GetIamPolicy. See the method + description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.Policy: + An Identity and Access Management (IAM) policy, which + specifies access controls for Google Cloud resources. A + ``Policy`` is a collection of ``bindings``. A + ``binding`` binds one or more ``members``, or + principals, to a single ``role``. Principals can be user + accounts, service accounts, Google groups, and domains + (such as G Suite). A ``role`` is a named list of + permissions; each ``role`` can be an IAM predefined role + or a user-created custom role. For some types of Google + Cloud resources, a ``binding`` can also specify a + ``condition``, which is a logical expression that allows + access to a resource only if the expression evaluates to + ``true``. A condition can add constraints based on + attributes of the request, the resource, or both. To + learn which resources support conditions in their IAM + policies, see the `IAM + documentation `__. + **JSON example:** { "bindings": [ { "role": + "roles/resourcemanager.organizationAdmin", "members": [ + "user:mike@example.com", "group:admins@example.com", + "domain:google.com", + "serviceAccount:my-project-id@appspot.gserviceaccount.com" + ] }, { "role": + "roles/resourcemanager.organizationViewer", "members": [ + "user:eve@example.com" ], "condition": { "title": + "expirable access", "description": "Does not grant + access after Sep 2020", "expression": "request.time < + timestamp('2020-10-01T00:00:00.000Z')", } } ], "etag": + "BwWWja0YfJA=", "version": 3 } **YAML example:** + bindings: - members: - user:mike@example.com - + group:admins@example.com - domain:google.com - + serviceAccount:my-project-id@appspot.gserviceaccount.com + role: roles/resourcemanager.organizationAdmin - members: + - user:eve@example.com role: + roles/resourcemanager.organizationViewer condition: + title: expirable access description: Does not grant + access after Sep 2020 expression: request.time < + timestamp('2020-10-01T00:00:00.000Z') etag: BwWWja0YfJA= + version: 3 For a description of IAM and its features, + see the `IAM + documentation `__. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/compute/v1/projects/{project}/global/images/{resource}/getIamPolicy', + }, + ] + request, metadata = self._interceptor.pre_get_iam_policy(request, metadata) + pb_request = compute.GetIamPolicyImageRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.Policy() + pb_resp = compute.Policy.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get_iam_policy(resp) + return resp + + class _Insert(ImagesRestStub): + def __hash__(self): + return hash("Insert") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.InsertImageRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.Operation: + r"""Call the insert method over HTTP. + + Args: + request (~.compute.InsertImageRequest): + The request object. A request message for Images.Insert. + See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.Operation: + Represents an Operation resource. Google Compute Engine + has three Operation resources: \* + `Global `__ + \* + `Regional `__ + \* + `Zonal `__ + You can use an operation resource to manage asynchronous + API requests. For more information, read Handling API + responses. Operations can be global, regional or zonal. + - For global operations, use the ``globalOperations`` + resource. - For regional operations, use the + ``regionOperations`` resource. - For zonal operations, + use the ``zonalOperations`` resource. For more + information, read Global, Regional, and Zonal Resources. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/compute/v1/projects/{project}/global/images', + 'body': 'image_resource', + }, + ] + request, metadata = self._interceptor.pre_insert(request, metadata) + pb_request = compute.InsertImageRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + including_default_value_fields=False, + use_integers_for_enums=False + ) + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.Operation() + pb_resp = compute.Operation.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_insert(resp) + return resp + + class _List(ImagesRestStub): + def __hash__(self): + return hash("List") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.ListImagesRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.ImageList: + r"""Call the list method over HTTP. + + Args: + request (~.compute.ListImagesRequest): + The request object. A request message for Images.List. + See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.ImageList: + Contains a list of images. + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/compute/v1/projects/{project}/global/images', + }, + ] + request, metadata = self._interceptor.pre_list(request, metadata) + pb_request = compute.ListImagesRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.ImageList() + pb_resp = compute.ImageList.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_list(resp) + return resp + + class _Patch(ImagesRestStub): + def __hash__(self): + return hash("Patch") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.PatchImageRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.Operation: + r"""Call the patch method over HTTP. + + Args: + request (~.compute.PatchImageRequest): + The request object. A request message for Images.Patch. + See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.Operation: + Represents an Operation resource. Google Compute Engine + has three Operation resources: \* + `Global `__ + \* + `Regional `__ + \* + `Zonal `__ + You can use an operation resource to manage asynchronous + API requests. For more information, read Handling API + responses. Operations can be global, regional or zonal. + - For global operations, use the ``globalOperations`` + resource. - For regional operations, use the + ``regionOperations`` resource. - For zonal operations, + use the ``zonalOperations`` resource. For more + information, read Global, Regional, and Zonal Resources. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'patch', + 'uri': '/compute/v1/projects/{project}/global/images/{image}', + 'body': 'image_resource', + }, + ] + request, metadata = self._interceptor.pre_patch(request, metadata) + pb_request = compute.PatchImageRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + including_default_value_fields=False, + use_integers_for_enums=False + ) + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.Operation() + pb_resp = compute.Operation.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_patch(resp) + return resp + + class _SetIamPolicy(ImagesRestStub): + def __hash__(self): + return hash("SetIamPolicy") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.SetIamPolicyImageRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.Policy: + r"""Call the set iam policy method over HTTP. + + Args: + request (~.compute.SetIamPolicyImageRequest): + The request object. A request message for + Images.SetIamPolicy. See the method + description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.Policy: + An Identity and Access Management (IAM) policy, which + specifies access controls for Google Cloud resources. A + ``Policy`` is a collection of ``bindings``. A + ``binding`` binds one or more ``members``, or + principals, to a single ``role``. Principals can be user + accounts, service accounts, Google groups, and domains + (such as G Suite). A ``role`` is a named list of + permissions; each ``role`` can be an IAM predefined role + or a user-created custom role. For some types of Google + Cloud resources, a ``binding`` can also specify a + ``condition``, which is a logical expression that allows + access to a resource only if the expression evaluates to + ``true``. A condition can add constraints based on + attributes of the request, the resource, or both. To + learn which resources support conditions in their IAM + policies, see the `IAM + documentation `__. + **JSON example:** { "bindings": [ { "role": + "roles/resourcemanager.organizationAdmin", "members": [ + "user:mike@example.com", "group:admins@example.com", + "domain:google.com", + "serviceAccount:my-project-id@appspot.gserviceaccount.com" + ] }, { "role": + "roles/resourcemanager.organizationViewer", "members": [ + "user:eve@example.com" ], "condition": { "title": + "expirable access", "description": "Does not grant + access after Sep 2020", "expression": "request.time < + timestamp('2020-10-01T00:00:00.000Z')", } } ], "etag": + "BwWWja0YfJA=", "version": 3 } **YAML example:** + bindings: - members: - user:mike@example.com - + group:admins@example.com - domain:google.com - + serviceAccount:my-project-id@appspot.gserviceaccount.com + role: roles/resourcemanager.organizationAdmin - members: + - user:eve@example.com role: + roles/resourcemanager.organizationViewer condition: + title: expirable access description: Does not grant + access after Sep 2020 expression: request.time < + timestamp('2020-10-01T00:00:00.000Z') etag: BwWWja0YfJA= + version: 3 For a description of IAM and its features, + see the `IAM + documentation `__. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/compute/v1/projects/{project}/global/images/{resource}/setIamPolicy', + 'body': 'global_set_policy_request_resource', + }, + ] + request, metadata = self._interceptor.pre_set_iam_policy(request, metadata) + pb_request = compute.SetIamPolicyImageRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + including_default_value_fields=False, + use_integers_for_enums=False + ) + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.Policy() + pb_resp = compute.Policy.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_set_iam_policy(resp) + return resp + + class _SetLabels(ImagesRestStub): + def __hash__(self): + return hash("SetLabels") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.SetLabelsImageRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.Operation: + r"""Call the set labels method over HTTP. + + Args: + request (~.compute.SetLabelsImageRequest): + The request object. A request message for + Images.SetLabels. See the method + description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.Operation: + Represents an Operation resource. Google Compute Engine + has three Operation resources: \* + `Global `__ + \* + `Regional `__ + \* + `Zonal `__ + You can use an operation resource to manage asynchronous + API requests. For more information, read Handling API + responses. Operations can be global, regional or zonal. + - For global operations, use the ``globalOperations`` + resource. - For regional operations, use the + ``regionOperations`` resource. - For zonal operations, + use the ``zonalOperations`` resource. For more + information, read Global, Regional, and Zonal Resources. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/compute/v1/projects/{project}/global/images/{resource}/setLabels', + 'body': 'global_set_labels_request_resource', + }, + ] + request, metadata = self._interceptor.pre_set_labels(request, metadata) + pb_request = compute.SetLabelsImageRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + including_default_value_fields=False, + use_integers_for_enums=False + ) + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.Operation() + pb_resp = compute.Operation.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_set_labels(resp) + return resp + + class _TestIamPermissions(ImagesRestStub): + def __hash__(self): + return hash("TestIamPermissions") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.TestIamPermissionsImageRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.TestPermissionsResponse: + r"""Call the test iam permissions method over HTTP. + + Args: + request (~.compute.TestIamPermissionsImageRequest): + The request object. A request message for + Images.TestIamPermissions. See the + method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.TestPermissionsResponse: + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/compute/v1/projects/{project}/global/images/{resource}/testIamPermissions', + 'body': 'test_permissions_request_resource', + }, + ] + request, metadata = self._interceptor.pre_test_iam_permissions(request, metadata) + pb_request = compute.TestIamPermissionsImageRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + including_default_value_fields=False, + use_integers_for_enums=False + ) + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.TestPermissionsResponse() + pb_resp = compute.TestPermissionsResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_test_iam_permissions(resp) + return resp + + @property + def delete(self) -> Callable[ + [compute.DeleteImageRequest], + compute.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._Delete(self._session, self._host, self._interceptor) # type: ignore + + @property + def deprecate(self) -> Callable[ + [compute.DeprecateImageRequest], + compute.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._Deprecate(self._session, self._host, self._interceptor) # type: ignore + + @property + def get(self) -> Callable[ + [compute.GetImageRequest], + compute.Image]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._Get(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_from_family(self) -> Callable[ + [compute.GetFromFamilyImageRequest], + compute.Image]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetFromFamily(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_iam_policy(self) -> Callable[ + [compute.GetIamPolicyImageRequest], + compute.Policy]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetIamPolicy(self._session, self._host, self._interceptor) # type: ignore + + @property + def insert(self) -> Callable[ + [compute.InsertImageRequest], + compute.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._Insert(self._session, self._host, self._interceptor) # type: ignore + + @property + def list(self) -> Callable[ + [compute.ListImagesRequest], + compute.ImageList]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._List(self._session, self._host, self._interceptor) # type: ignore + + @property + def patch(self) -> Callable[ + [compute.PatchImageRequest], + compute.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._Patch(self._session, self._host, self._interceptor) # type: ignore + + @property + def set_iam_policy(self) -> Callable[ + [compute.SetIamPolicyImageRequest], + compute.Policy]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._SetIamPolicy(self._session, self._host, self._interceptor) # type: ignore + + @property + def set_labels(self) -> Callable[ + [compute.SetLabelsImageRequest], + compute.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._SetLabels(self._session, self._host, self._interceptor) # type: ignore + + @property + def test_iam_permissions(self) -> Callable[ + [compute.TestIamPermissionsImageRequest], + compute.TestPermissionsResponse]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._TestIamPermissions(self._session, self._host, self._interceptor) # type: ignore + + @property + def kind(self) -> str: + return "rest" + + def close(self): + self._session.close() + + +__all__=( + 'ImagesRestTransport', +) diff --git a/owl-bot-staging/v1/google/cloud/compute_v1/services/instance_group_managers/__init__.py b/owl-bot-staging/v1/google/cloud/compute_v1/services/instance_group_managers/__init__.py new file mode 100644 index 000000000..915b51e4d --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/compute_v1/services/instance_group_managers/__init__.py @@ -0,0 +1,20 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from .client import InstanceGroupManagersClient + +__all__ = ( + 'InstanceGroupManagersClient', +) diff --git a/owl-bot-staging/v1/google/cloud/compute_v1/services/instance_group_managers/client.py b/owl-bot-staging/v1/google/cloud/compute_v1/services/instance_group_managers/client.py new file mode 100644 index 000000000..c9f1c3fb5 --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/compute_v1/services/instance_group_managers/client.py @@ -0,0 +1,5446 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import functools +import os +import re +from typing import Dict, Mapping, MutableMapping, MutableSequence, Optional, Sequence, Tuple, Type, Union, cast + +from google.cloud.compute_v1 import gapic_version as package_version + +from google.api_core import client_options as client_options_lib +from google.api_core import exceptions as core_exceptions +from google.api_core import extended_operation +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport import mtls # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth.exceptions import MutualTLSChannelError # type: ignore +from google.oauth2 import service_account # type: ignore + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + +from google.api_core import extended_operation # type: ignore +from google.cloud.compute_v1.services.instance_group_managers import pagers +from google.cloud.compute_v1.types import compute +from .transports.base import InstanceGroupManagersTransport, DEFAULT_CLIENT_INFO +from .transports.rest import InstanceGroupManagersRestTransport + + +class InstanceGroupManagersClientMeta(type): + """Metaclass for the InstanceGroupManagers client. + + This provides class-level methods for building and retrieving + support objects (e.g. transport) without polluting the client instance + objects. + """ + _transport_registry = OrderedDict() # type: Dict[str, Type[InstanceGroupManagersTransport]] + _transport_registry["rest"] = InstanceGroupManagersRestTransport + + def get_transport_class(cls, + label: Optional[str] = None, + ) -> Type[InstanceGroupManagersTransport]: + """Returns an appropriate transport class. + + Args: + label: The name of the desired transport. If none is + provided, then the first transport in the registry is used. + + Returns: + The transport class to use. + """ + # If a specific transport is requested, return that one. + if label: + return cls._transport_registry[label] + + # No transport is requested; return the default (that is, the first one + # in the dictionary). + return next(iter(cls._transport_registry.values())) + + +class InstanceGroupManagersClient(metaclass=InstanceGroupManagersClientMeta): + """The InstanceGroupManagers API.""" + + @staticmethod + def _get_default_mtls_endpoint(api_endpoint): + """Converts api endpoint to mTLS endpoint. + + Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to + "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. + Args: + api_endpoint (Optional[str]): the api endpoint to convert. + Returns: + str: converted mTLS api endpoint. + """ + if not api_endpoint: + return api_endpoint + + mtls_endpoint_re = re.compile( + r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?" + ) + + m = mtls_endpoint_re.match(api_endpoint) + name, mtls, sandbox, googledomain = m.groups() + if mtls or not googledomain: + return api_endpoint + + if sandbox: + return api_endpoint.replace( + "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" + ) + + return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") + + DEFAULT_ENDPOINT = "compute.googleapis.com" + DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore + DEFAULT_ENDPOINT + ) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + InstanceGroupManagersClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_info(info) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + InstanceGroupManagersClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_file( + filename) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + from_service_account_json = from_service_account_file + + @property + def transport(self) -> InstanceGroupManagersTransport: + """Returns the transport used by the client instance. + + Returns: + InstanceGroupManagersTransport: The transport used by the client + instance. + """ + return self._transport + + @staticmethod + def common_billing_account_path(billing_account: str, ) -> str: + """Returns a fully-qualified billing_account string.""" + return "billingAccounts/{billing_account}".format(billing_account=billing_account, ) + + @staticmethod + def parse_common_billing_account_path(path: str) -> Dict[str,str]: + """Parse a billing_account path into its component segments.""" + m = re.match(r"^billingAccounts/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_folder_path(folder: str, ) -> str: + """Returns a fully-qualified folder string.""" + return "folders/{folder}".format(folder=folder, ) + + @staticmethod + def parse_common_folder_path(path: str) -> Dict[str,str]: + """Parse a folder path into its component segments.""" + m = re.match(r"^folders/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_organization_path(organization: str, ) -> str: + """Returns a fully-qualified organization string.""" + return "organizations/{organization}".format(organization=organization, ) + + @staticmethod + def parse_common_organization_path(path: str) -> Dict[str,str]: + """Parse a organization path into its component segments.""" + m = re.match(r"^organizations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_project_path(project: str, ) -> str: + """Returns a fully-qualified project string.""" + return "projects/{project}".format(project=project, ) + + @staticmethod + def parse_common_project_path(path: str) -> Dict[str,str]: + """Parse a project path into its component segments.""" + m = re.match(r"^projects/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_location_path(project: str, location: str, ) -> str: + """Returns a fully-qualified location string.""" + return "projects/{project}/locations/{location}".format(project=project, location=location, ) + + @staticmethod + def parse_common_location_path(path: str) -> Dict[str,str]: + """Parse a location path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @classmethod + def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[client_options_lib.ClientOptions] = None): + """Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + if client_options is None: + client_options = client_options_lib.ClientOptions() + use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") + use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") + if use_client_cert not in ("true", "false"): + raise ValueError("Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`") + if use_mtls_endpoint not in ("auto", "never", "always"): + raise MutualTLSChannelError("Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`") + + # Figure out the client cert source to use. + client_cert_source = None + if use_client_cert == "true": + if client_options.client_cert_source: + client_cert_source = client_options.client_cert_source + elif mtls.has_default_client_cert_source(): + client_cert_source = mtls.default_client_cert_source() + + # Figure out which api endpoint to use. + if client_options.api_endpoint is not None: + api_endpoint = client_options.api_endpoint + elif use_mtls_endpoint == "always" or (use_mtls_endpoint == "auto" and client_cert_source): + api_endpoint = cls.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = cls.DEFAULT_ENDPOINT + + return api_endpoint, client_cert_source + + def __init__(self, *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Optional[Union[str, InstanceGroupManagersTransport]] = None, + client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the instance group managers client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Union[str, InstanceGroupManagersTransport]): The + transport to use. If set to None, a transport is chosen + automatically. + NOTE: "rest" transport functionality is currently in a + beta state (preview). We welcome your feedback via an + issue in this library's source repository. + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): Custom options for the + client. It won't take effect if a ``transport`` instance is provided. + (1) The ``api_endpoint`` property can be used to override the + default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT + environment variable can also be used to override the endpoint: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto switch to the + default mTLS endpoint if client certificate is present, this is + the default value). However, the ``api_endpoint`` property takes + precedence if provided. + (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide client certificate for mutual TLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + """ + if isinstance(client_options, dict): + client_options = client_options_lib.from_dict(client_options) + if client_options is None: + client_options = client_options_lib.ClientOptions() + client_options = cast(client_options_lib.ClientOptions, client_options) + + api_endpoint, client_cert_source_func = self.get_mtls_endpoint_and_cert_source(client_options) + + api_key_value = getattr(client_options, "api_key", None) + if api_key_value and credentials: + raise ValueError("client_options.api_key and credentials are mutually exclusive") + + # Save or instantiate the transport. + # Ordinarily, we provide the transport, but allowing a custom transport + # instance provides an extensibility point for unusual situations. + if isinstance(transport, InstanceGroupManagersTransport): + # transport is a InstanceGroupManagersTransport instance. + if credentials or client_options.credentials_file or api_key_value: + raise ValueError("When providing a transport instance, " + "provide its credentials directly.") + if client_options.scopes: + raise ValueError( + "When providing a transport instance, provide its scopes " + "directly." + ) + self._transport = transport + else: + import google.auth._default # type: ignore + + if api_key_value and hasattr(google.auth._default, "get_api_key_credentials"): + credentials = google.auth._default.get_api_key_credentials(api_key_value) + + Transport = type(self).get_transport_class(transport) + self._transport = Transport( + credentials=credentials, + credentials_file=client_options.credentials_file, + host=api_endpoint, + scopes=client_options.scopes, + client_cert_source_for_mtls=client_cert_source_func, + quota_project_id=client_options.quota_project_id, + client_info=client_info, + always_use_jwt_access=True, + api_audience=client_options.api_audience, + ) + + def abandon_instances_unary(self, + request: Optional[Union[compute.AbandonInstancesInstanceGroupManagerRequest, dict]] = None, + *, + project: Optional[str] = None, + zone: Optional[str] = None, + instance_group_manager: Optional[str] = None, + instance_group_managers_abandon_instances_request_resource: Optional[compute.InstanceGroupManagersAbandonInstancesRequest] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.Operation: + r"""Flags the specified instances to be removed from the + managed instance group. Abandoning an instance does not + delete the instance, but it does remove the instance + from any target pools that are applied by the managed + instance group. This method reduces the targetSize of + the managed instance group by the number of instances + that you abandon. This operation is marked as DONE when + the action is scheduled even if the instances have not + yet been removed from the group. You must separately + verify the status of the abandoning action with the + listmanagedinstances method. If the group is part of a + backend service that has enabled connection draining, it + can take up to 60 seconds after the connection draining + duration has elapsed before the VM instance is removed + or deleted. You can specify a maximum of 1000 instances + with this method per request. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_abandon_instances(): + # Create a client + client = compute_v1.InstanceGroupManagersClient() + + # Initialize request argument(s) + request = compute_v1.AbandonInstancesInstanceGroupManagerRequest( + instance_group_manager="instance_group_manager_value", + project="project_value", + zone="zone_value", + ) + + # Make the request + response = client.abandon_instances(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.AbandonInstancesInstanceGroupManagerRequest, dict]): + The request object. Messages + + A request message for + InstanceGroupManagers.AbandonInstances. + See the method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + zone (str): + The name of the zone where the + managed instance group is located. + + This corresponds to the ``zone`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + instance_group_manager (str): + The name of the managed instance + group. + + This corresponds to the ``instance_group_manager`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + instance_group_managers_abandon_instances_request_resource (google.cloud.compute_v1.types.InstanceGroupManagersAbandonInstancesRequest): + The body resource for this request + This corresponds to the ``instance_group_managers_abandon_instances_request_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, zone, instance_group_manager, instance_group_managers_abandon_instances_request_resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.AbandonInstancesInstanceGroupManagerRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.AbandonInstancesInstanceGroupManagerRequest): + request = compute.AbandonInstancesInstanceGroupManagerRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if zone is not None: + request.zone = zone + if instance_group_manager is not None: + request.instance_group_manager = instance_group_manager + if instance_group_managers_abandon_instances_request_resource is not None: + request.instance_group_managers_abandon_instances_request_resource = instance_group_managers_abandon_instances_request_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.abandon_instances] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("zone", request.zone), + ("instance_group_manager", request.instance_group_manager), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def abandon_instances(self, + request: Optional[Union[compute.AbandonInstancesInstanceGroupManagerRequest, dict]] = None, + *, + project: Optional[str] = None, + zone: Optional[str] = None, + instance_group_manager: Optional[str] = None, + instance_group_managers_abandon_instances_request_resource: Optional[compute.InstanceGroupManagersAbandonInstancesRequest] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> extended_operation.ExtendedOperation: + r"""Flags the specified instances to be removed from the + managed instance group. Abandoning an instance does not + delete the instance, but it does remove the instance + from any target pools that are applied by the managed + instance group. This method reduces the targetSize of + the managed instance group by the number of instances + that you abandon. This operation is marked as DONE when + the action is scheduled even if the instances have not + yet been removed from the group. You must separately + verify the status of the abandoning action with the + listmanagedinstances method. If the group is part of a + backend service that has enabled connection draining, it + can take up to 60 seconds after the connection draining + duration has elapsed before the VM instance is removed + or deleted. You can specify a maximum of 1000 instances + with this method per request. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_abandon_instances(): + # Create a client + client = compute_v1.InstanceGroupManagersClient() + + # Initialize request argument(s) + request = compute_v1.AbandonInstancesInstanceGroupManagerRequest( + instance_group_manager="instance_group_manager_value", + project="project_value", + zone="zone_value", + ) + + # Make the request + response = client.abandon_instances(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.AbandonInstancesInstanceGroupManagerRequest, dict]): + The request object. Messages + + A request message for + InstanceGroupManagers.AbandonInstances. + See the method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + zone (str): + The name of the zone where the + managed instance group is located. + + This corresponds to the ``zone`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + instance_group_manager (str): + The name of the managed instance + group. + + This corresponds to the ``instance_group_manager`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + instance_group_managers_abandon_instances_request_resource (google.cloud.compute_v1.types.InstanceGroupManagersAbandonInstancesRequest): + The body resource for this request + This corresponds to the ``instance_group_managers_abandon_instances_request_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, zone, instance_group_manager, instance_group_managers_abandon_instances_request_resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.AbandonInstancesInstanceGroupManagerRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.AbandonInstancesInstanceGroupManagerRequest): + request = compute.AbandonInstancesInstanceGroupManagerRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if zone is not None: + request.zone = zone + if instance_group_manager is not None: + request.instance_group_manager = instance_group_manager + if instance_group_managers_abandon_instances_request_resource is not None: + request.instance_group_managers_abandon_instances_request_resource = instance_group_managers_abandon_instances_request_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.abandon_instances] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("zone", request.zone), + ("instance_group_manager", request.instance_group_manager), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + operation_service = self._transport._zone_operations_client + operation_request = compute.GetZoneOperationRequest() + operation_request.project = request.project + operation_request.zone = request.zone + operation_request.operation = response.name + + get_operation = functools.partial(operation_service.get, operation_request) + # Cancel is not part of extended operations yet. + cancel_operation = lambda: None + + # Note: this class is an implementation detail to provide a uniform + # set of names for certain fields in the extended operation proto message. + # See google.api_core.extended_operation.ExtendedOperation for details + # on these properties and the expected interface. + class _CustomOperation(extended_operation.ExtendedOperation): + @property + def error_message(self): + return self._extended_operation.http_error_message + + @property + def error_code(self): + return self._extended_operation.http_error_status_code + + response = _CustomOperation.make(get_operation, cancel_operation, response) + + # Done; return the response. + return response + + def aggregated_list(self, + request: Optional[Union[compute.AggregatedListInstanceGroupManagersRequest, dict]] = None, + *, + project: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.AggregatedListPager: + r"""Retrieves the list of managed instance groups and + groups them by zone. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_aggregated_list(): + # Create a client + client = compute_v1.InstanceGroupManagersClient() + + # Initialize request argument(s) + request = compute_v1.AggregatedListInstanceGroupManagersRequest( + project="project_value", + ) + + # Make the request + page_result = client.aggregated_list(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.AggregatedListInstanceGroupManagersRequest, dict]): + The request object. A request message for + InstanceGroupManagers.AggregatedList. + See the method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.compute_v1.services.instance_group_managers.pagers.AggregatedListPager: + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.AggregatedListInstanceGroupManagersRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.AggregatedListInstanceGroupManagersRequest): + request = compute.AggregatedListInstanceGroupManagersRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.aggregated_list] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.AggregatedListPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + def apply_updates_to_instances_unary(self, + request: Optional[Union[compute.ApplyUpdatesToInstancesInstanceGroupManagerRequest, dict]] = None, + *, + project: Optional[str] = None, + zone: Optional[str] = None, + instance_group_manager: Optional[str] = None, + instance_group_managers_apply_updates_request_resource: Optional[compute.InstanceGroupManagersApplyUpdatesRequest] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.Operation: + r"""Applies changes to selected instances on the managed + instance group. This method can be used to apply new + overrides and/or new versions. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_apply_updates_to_instances(): + # Create a client + client = compute_v1.InstanceGroupManagersClient() + + # Initialize request argument(s) + request = compute_v1.ApplyUpdatesToInstancesInstanceGroupManagerRequest( + instance_group_manager="instance_group_manager_value", + project="project_value", + zone="zone_value", + ) + + # Make the request + response = client.apply_updates_to_instances(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.ApplyUpdatesToInstancesInstanceGroupManagerRequest, dict]): + The request object. A request message for + InstanceGroupManagers.ApplyUpdatesToInstances. + See the method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + zone (str): + The name of the zone where the + managed instance group is located. + Should conform to RFC1035. + + This corresponds to the ``zone`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + instance_group_manager (str): + The name of the managed instance + group, should conform to RFC1035. + + This corresponds to the ``instance_group_manager`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + instance_group_managers_apply_updates_request_resource (google.cloud.compute_v1.types.InstanceGroupManagersApplyUpdatesRequest): + The body resource for this request + This corresponds to the ``instance_group_managers_apply_updates_request_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, zone, instance_group_manager, instance_group_managers_apply_updates_request_resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.ApplyUpdatesToInstancesInstanceGroupManagerRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.ApplyUpdatesToInstancesInstanceGroupManagerRequest): + request = compute.ApplyUpdatesToInstancesInstanceGroupManagerRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if zone is not None: + request.zone = zone + if instance_group_manager is not None: + request.instance_group_manager = instance_group_manager + if instance_group_managers_apply_updates_request_resource is not None: + request.instance_group_managers_apply_updates_request_resource = instance_group_managers_apply_updates_request_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.apply_updates_to_instances] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("zone", request.zone), + ("instance_group_manager", request.instance_group_manager), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def apply_updates_to_instances(self, + request: Optional[Union[compute.ApplyUpdatesToInstancesInstanceGroupManagerRequest, dict]] = None, + *, + project: Optional[str] = None, + zone: Optional[str] = None, + instance_group_manager: Optional[str] = None, + instance_group_managers_apply_updates_request_resource: Optional[compute.InstanceGroupManagersApplyUpdatesRequest] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> extended_operation.ExtendedOperation: + r"""Applies changes to selected instances on the managed + instance group. This method can be used to apply new + overrides and/or new versions. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_apply_updates_to_instances(): + # Create a client + client = compute_v1.InstanceGroupManagersClient() + + # Initialize request argument(s) + request = compute_v1.ApplyUpdatesToInstancesInstanceGroupManagerRequest( + instance_group_manager="instance_group_manager_value", + project="project_value", + zone="zone_value", + ) + + # Make the request + response = client.apply_updates_to_instances(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.ApplyUpdatesToInstancesInstanceGroupManagerRequest, dict]): + The request object. A request message for + InstanceGroupManagers.ApplyUpdatesToInstances. + See the method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + zone (str): + The name of the zone where the + managed instance group is located. + Should conform to RFC1035. + + This corresponds to the ``zone`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + instance_group_manager (str): + The name of the managed instance + group, should conform to RFC1035. + + This corresponds to the ``instance_group_manager`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + instance_group_managers_apply_updates_request_resource (google.cloud.compute_v1.types.InstanceGroupManagersApplyUpdatesRequest): + The body resource for this request + This corresponds to the ``instance_group_managers_apply_updates_request_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, zone, instance_group_manager, instance_group_managers_apply_updates_request_resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.ApplyUpdatesToInstancesInstanceGroupManagerRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.ApplyUpdatesToInstancesInstanceGroupManagerRequest): + request = compute.ApplyUpdatesToInstancesInstanceGroupManagerRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if zone is not None: + request.zone = zone + if instance_group_manager is not None: + request.instance_group_manager = instance_group_manager + if instance_group_managers_apply_updates_request_resource is not None: + request.instance_group_managers_apply_updates_request_resource = instance_group_managers_apply_updates_request_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.apply_updates_to_instances] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("zone", request.zone), + ("instance_group_manager", request.instance_group_manager), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + operation_service = self._transport._zone_operations_client + operation_request = compute.GetZoneOperationRequest() + operation_request.project = request.project + operation_request.zone = request.zone + operation_request.operation = response.name + + get_operation = functools.partial(operation_service.get, operation_request) + # Cancel is not part of extended operations yet. + cancel_operation = lambda: None + + # Note: this class is an implementation detail to provide a uniform + # set of names for certain fields in the extended operation proto message. + # See google.api_core.extended_operation.ExtendedOperation for details + # on these properties and the expected interface. + class _CustomOperation(extended_operation.ExtendedOperation): + @property + def error_message(self): + return self._extended_operation.http_error_message + + @property + def error_code(self): + return self._extended_operation.http_error_status_code + + response = _CustomOperation.make(get_operation, cancel_operation, response) + + # Done; return the response. + return response + + def create_instances_unary(self, + request: Optional[Union[compute.CreateInstancesInstanceGroupManagerRequest, dict]] = None, + *, + project: Optional[str] = None, + zone: Optional[str] = None, + instance_group_manager: Optional[str] = None, + instance_group_managers_create_instances_request_resource: Optional[compute.InstanceGroupManagersCreateInstancesRequest] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.Operation: + r"""Creates instances with per-instance configurations in + this managed instance group. Instances are created using + the current instance template. The create instances + operation is marked DONE if the createInstances request + is successful. The underlying actions take additional + time. You must separately verify the status of the + creating or actions with the listmanagedinstances + method. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_create_instances(): + # Create a client + client = compute_v1.InstanceGroupManagersClient() + + # Initialize request argument(s) + request = compute_v1.CreateInstancesInstanceGroupManagerRequest( + instance_group_manager="instance_group_manager_value", + project="project_value", + zone="zone_value", + ) + + # Make the request + response = client.create_instances(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.CreateInstancesInstanceGroupManagerRequest, dict]): + The request object. A request message for + InstanceGroupManagers.CreateInstances. + See the method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + zone (str): + The name of the zone where the + managed instance group is located. It + should conform to RFC1035. + + This corresponds to the ``zone`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + instance_group_manager (str): + The name of the managed instance + group. It should conform to RFC1035. + + This corresponds to the ``instance_group_manager`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + instance_group_managers_create_instances_request_resource (google.cloud.compute_v1.types.InstanceGroupManagersCreateInstancesRequest): + The body resource for this request + This corresponds to the ``instance_group_managers_create_instances_request_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, zone, instance_group_manager, instance_group_managers_create_instances_request_resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.CreateInstancesInstanceGroupManagerRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.CreateInstancesInstanceGroupManagerRequest): + request = compute.CreateInstancesInstanceGroupManagerRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if zone is not None: + request.zone = zone + if instance_group_manager is not None: + request.instance_group_manager = instance_group_manager + if instance_group_managers_create_instances_request_resource is not None: + request.instance_group_managers_create_instances_request_resource = instance_group_managers_create_instances_request_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.create_instances] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("zone", request.zone), + ("instance_group_manager", request.instance_group_manager), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def create_instances(self, + request: Optional[Union[compute.CreateInstancesInstanceGroupManagerRequest, dict]] = None, + *, + project: Optional[str] = None, + zone: Optional[str] = None, + instance_group_manager: Optional[str] = None, + instance_group_managers_create_instances_request_resource: Optional[compute.InstanceGroupManagersCreateInstancesRequest] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> extended_operation.ExtendedOperation: + r"""Creates instances with per-instance configurations in + this managed instance group. Instances are created using + the current instance template. The create instances + operation is marked DONE if the createInstances request + is successful. The underlying actions take additional + time. You must separately verify the status of the + creating or actions with the listmanagedinstances + method. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_create_instances(): + # Create a client + client = compute_v1.InstanceGroupManagersClient() + + # Initialize request argument(s) + request = compute_v1.CreateInstancesInstanceGroupManagerRequest( + instance_group_manager="instance_group_manager_value", + project="project_value", + zone="zone_value", + ) + + # Make the request + response = client.create_instances(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.CreateInstancesInstanceGroupManagerRequest, dict]): + The request object. A request message for + InstanceGroupManagers.CreateInstances. + See the method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + zone (str): + The name of the zone where the + managed instance group is located. It + should conform to RFC1035. + + This corresponds to the ``zone`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + instance_group_manager (str): + The name of the managed instance + group. It should conform to RFC1035. + + This corresponds to the ``instance_group_manager`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + instance_group_managers_create_instances_request_resource (google.cloud.compute_v1.types.InstanceGroupManagersCreateInstancesRequest): + The body resource for this request + This corresponds to the ``instance_group_managers_create_instances_request_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, zone, instance_group_manager, instance_group_managers_create_instances_request_resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.CreateInstancesInstanceGroupManagerRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.CreateInstancesInstanceGroupManagerRequest): + request = compute.CreateInstancesInstanceGroupManagerRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if zone is not None: + request.zone = zone + if instance_group_manager is not None: + request.instance_group_manager = instance_group_manager + if instance_group_managers_create_instances_request_resource is not None: + request.instance_group_managers_create_instances_request_resource = instance_group_managers_create_instances_request_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.create_instances] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("zone", request.zone), + ("instance_group_manager", request.instance_group_manager), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + operation_service = self._transport._zone_operations_client + operation_request = compute.GetZoneOperationRequest() + operation_request.project = request.project + operation_request.zone = request.zone + operation_request.operation = response.name + + get_operation = functools.partial(operation_service.get, operation_request) + # Cancel is not part of extended operations yet. + cancel_operation = lambda: None + + # Note: this class is an implementation detail to provide a uniform + # set of names for certain fields in the extended operation proto message. + # See google.api_core.extended_operation.ExtendedOperation for details + # on these properties and the expected interface. + class _CustomOperation(extended_operation.ExtendedOperation): + @property + def error_message(self): + return self._extended_operation.http_error_message + + @property + def error_code(self): + return self._extended_operation.http_error_status_code + + response = _CustomOperation.make(get_operation, cancel_operation, response) + + # Done; return the response. + return response + + def delete_unary(self, + request: Optional[Union[compute.DeleteInstanceGroupManagerRequest, dict]] = None, + *, + project: Optional[str] = None, + zone: Optional[str] = None, + instance_group_manager: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.Operation: + r"""Deletes the specified managed instance group and all + of the instances in that group. Note that the instance + group must not belong to a backend service. Read + Deleting an instance group for more information. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_delete(): + # Create a client + client = compute_v1.InstanceGroupManagersClient() + + # Initialize request argument(s) + request = compute_v1.DeleteInstanceGroupManagerRequest( + instance_group_manager="instance_group_manager_value", + project="project_value", + zone="zone_value", + ) + + # Make the request + response = client.delete(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.DeleteInstanceGroupManagerRequest, dict]): + The request object. A request message for + InstanceGroupManagers.Delete. See the + method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + zone (str): + The name of the zone where the + managed instance group is located. + + This corresponds to the ``zone`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + instance_group_manager (str): + The name of the managed instance + group to delete. + + This corresponds to the ``instance_group_manager`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, zone, instance_group_manager]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.DeleteInstanceGroupManagerRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.DeleteInstanceGroupManagerRequest): + request = compute.DeleteInstanceGroupManagerRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if zone is not None: + request.zone = zone + if instance_group_manager is not None: + request.instance_group_manager = instance_group_manager + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("zone", request.zone), + ("instance_group_manager", request.instance_group_manager), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def delete(self, + request: Optional[Union[compute.DeleteInstanceGroupManagerRequest, dict]] = None, + *, + project: Optional[str] = None, + zone: Optional[str] = None, + instance_group_manager: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> extended_operation.ExtendedOperation: + r"""Deletes the specified managed instance group and all + of the instances in that group. Note that the instance + group must not belong to a backend service. Read + Deleting an instance group for more information. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_delete(): + # Create a client + client = compute_v1.InstanceGroupManagersClient() + + # Initialize request argument(s) + request = compute_v1.DeleteInstanceGroupManagerRequest( + instance_group_manager="instance_group_manager_value", + project="project_value", + zone="zone_value", + ) + + # Make the request + response = client.delete(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.DeleteInstanceGroupManagerRequest, dict]): + The request object. A request message for + InstanceGroupManagers.Delete. See the + method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + zone (str): + The name of the zone where the + managed instance group is located. + + This corresponds to the ``zone`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + instance_group_manager (str): + The name of the managed instance + group to delete. + + This corresponds to the ``instance_group_manager`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, zone, instance_group_manager]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.DeleteInstanceGroupManagerRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.DeleteInstanceGroupManagerRequest): + request = compute.DeleteInstanceGroupManagerRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if zone is not None: + request.zone = zone + if instance_group_manager is not None: + request.instance_group_manager = instance_group_manager + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("zone", request.zone), + ("instance_group_manager", request.instance_group_manager), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + operation_service = self._transport._zone_operations_client + operation_request = compute.GetZoneOperationRequest() + operation_request.project = request.project + operation_request.zone = request.zone + operation_request.operation = response.name + + get_operation = functools.partial(operation_service.get, operation_request) + # Cancel is not part of extended operations yet. + cancel_operation = lambda: None + + # Note: this class is an implementation detail to provide a uniform + # set of names for certain fields in the extended operation proto message. + # See google.api_core.extended_operation.ExtendedOperation for details + # on these properties and the expected interface. + class _CustomOperation(extended_operation.ExtendedOperation): + @property + def error_message(self): + return self._extended_operation.http_error_message + + @property + def error_code(self): + return self._extended_operation.http_error_status_code + + response = _CustomOperation.make(get_operation, cancel_operation, response) + + # Done; return the response. + return response + + def delete_instances_unary(self, + request: Optional[Union[compute.DeleteInstancesInstanceGroupManagerRequest, dict]] = None, + *, + project: Optional[str] = None, + zone: Optional[str] = None, + instance_group_manager: Optional[str] = None, + instance_group_managers_delete_instances_request_resource: Optional[compute.InstanceGroupManagersDeleteInstancesRequest] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.Operation: + r"""Flags the specified instances in the managed instance + group for immediate deletion. The instances are also + removed from any target pools of which they were a + member. This method reduces the targetSize of the + managed instance group by the number of instances that + you delete. This operation is marked as DONE when the + action is scheduled even if the instances are still + being deleted. You must separately verify the status of + the deleting action with the listmanagedinstances + method. If the group is part of a backend service that + has enabled connection draining, it can take up to 60 + seconds after the connection draining duration has + elapsed before the VM instance is removed or deleted. + You can specify a maximum of 1000 instances with this + method per request. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_delete_instances(): + # Create a client + client = compute_v1.InstanceGroupManagersClient() + + # Initialize request argument(s) + request = compute_v1.DeleteInstancesInstanceGroupManagerRequest( + instance_group_manager="instance_group_manager_value", + project="project_value", + zone="zone_value", + ) + + # Make the request + response = client.delete_instances(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.DeleteInstancesInstanceGroupManagerRequest, dict]): + The request object. A request message for + InstanceGroupManagers.DeleteInstances. + See the method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + zone (str): + The name of the zone where the + managed instance group is located. + + This corresponds to the ``zone`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + instance_group_manager (str): + The name of the managed instance + group. + + This corresponds to the ``instance_group_manager`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + instance_group_managers_delete_instances_request_resource (google.cloud.compute_v1.types.InstanceGroupManagersDeleteInstancesRequest): + The body resource for this request + This corresponds to the ``instance_group_managers_delete_instances_request_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, zone, instance_group_manager, instance_group_managers_delete_instances_request_resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.DeleteInstancesInstanceGroupManagerRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.DeleteInstancesInstanceGroupManagerRequest): + request = compute.DeleteInstancesInstanceGroupManagerRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if zone is not None: + request.zone = zone + if instance_group_manager is not None: + request.instance_group_manager = instance_group_manager + if instance_group_managers_delete_instances_request_resource is not None: + request.instance_group_managers_delete_instances_request_resource = instance_group_managers_delete_instances_request_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete_instances] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("zone", request.zone), + ("instance_group_manager", request.instance_group_manager), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def delete_instances(self, + request: Optional[Union[compute.DeleteInstancesInstanceGroupManagerRequest, dict]] = None, + *, + project: Optional[str] = None, + zone: Optional[str] = None, + instance_group_manager: Optional[str] = None, + instance_group_managers_delete_instances_request_resource: Optional[compute.InstanceGroupManagersDeleteInstancesRequest] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> extended_operation.ExtendedOperation: + r"""Flags the specified instances in the managed instance + group for immediate deletion. The instances are also + removed from any target pools of which they were a + member. This method reduces the targetSize of the + managed instance group by the number of instances that + you delete. This operation is marked as DONE when the + action is scheduled even if the instances are still + being deleted. You must separately verify the status of + the deleting action with the listmanagedinstances + method. If the group is part of a backend service that + has enabled connection draining, it can take up to 60 + seconds after the connection draining duration has + elapsed before the VM instance is removed or deleted. + You can specify a maximum of 1000 instances with this + method per request. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_delete_instances(): + # Create a client + client = compute_v1.InstanceGroupManagersClient() + + # Initialize request argument(s) + request = compute_v1.DeleteInstancesInstanceGroupManagerRequest( + instance_group_manager="instance_group_manager_value", + project="project_value", + zone="zone_value", + ) + + # Make the request + response = client.delete_instances(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.DeleteInstancesInstanceGroupManagerRequest, dict]): + The request object. A request message for + InstanceGroupManagers.DeleteInstances. + See the method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + zone (str): + The name of the zone where the + managed instance group is located. + + This corresponds to the ``zone`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + instance_group_manager (str): + The name of the managed instance + group. + + This corresponds to the ``instance_group_manager`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + instance_group_managers_delete_instances_request_resource (google.cloud.compute_v1.types.InstanceGroupManagersDeleteInstancesRequest): + The body resource for this request + This corresponds to the ``instance_group_managers_delete_instances_request_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, zone, instance_group_manager, instance_group_managers_delete_instances_request_resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.DeleteInstancesInstanceGroupManagerRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.DeleteInstancesInstanceGroupManagerRequest): + request = compute.DeleteInstancesInstanceGroupManagerRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if zone is not None: + request.zone = zone + if instance_group_manager is not None: + request.instance_group_manager = instance_group_manager + if instance_group_managers_delete_instances_request_resource is not None: + request.instance_group_managers_delete_instances_request_resource = instance_group_managers_delete_instances_request_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete_instances] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("zone", request.zone), + ("instance_group_manager", request.instance_group_manager), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + operation_service = self._transport._zone_operations_client + operation_request = compute.GetZoneOperationRequest() + operation_request.project = request.project + operation_request.zone = request.zone + operation_request.operation = response.name + + get_operation = functools.partial(operation_service.get, operation_request) + # Cancel is not part of extended operations yet. + cancel_operation = lambda: None + + # Note: this class is an implementation detail to provide a uniform + # set of names for certain fields in the extended operation proto message. + # See google.api_core.extended_operation.ExtendedOperation for details + # on these properties and the expected interface. + class _CustomOperation(extended_operation.ExtendedOperation): + @property + def error_message(self): + return self._extended_operation.http_error_message + + @property + def error_code(self): + return self._extended_operation.http_error_status_code + + response = _CustomOperation.make(get_operation, cancel_operation, response) + + # Done; return the response. + return response + + def delete_per_instance_configs_unary(self, + request: Optional[Union[compute.DeletePerInstanceConfigsInstanceGroupManagerRequest, dict]] = None, + *, + project: Optional[str] = None, + zone: Optional[str] = None, + instance_group_manager: Optional[str] = None, + instance_group_managers_delete_per_instance_configs_req_resource: Optional[compute.InstanceGroupManagersDeletePerInstanceConfigsReq] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.Operation: + r"""Deletes selected per-instance configurations for the + managed instance group. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_delete_per_instance_configs(): + # Create a client + client = compute_v1.InstanceGroupManagersClient() + + # Initialize request argument(s) + request = compute_v1.DeletePerInstanceConfigsInstanceGroupManagerRequest( + instance_group_manager="instance_group_manager_value", + project="project_value", + zone="zone_value", + ) + + # Make the request + response = client.delete_per_instance_configs(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.DeletePerInstanceConfigsInstanceGroupManagerRequest, dict]): + The request object. A request message for + InstanceGroupManagers.DeletePerInstanceConfigs. + See the method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + zone (str): + The name of the zone where the + managed instance group is located. It + should conform to RFC1035. + + This corresponds to the ``zone`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + instance_group_manager (str): + The name of the managed instance + group. It should conform to RFC1035. + + This corresponds to the ``instance_group_manager`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + instance_group_managers_delete_per_instance_configs_req_resource (google.cloud.compute_v1.types.InstanceGroupManagersDeletePerInstanceConfigsReq): + The body resource for this request + This corresponds to the ``instance_group_managers_delete_per_instance_configs_req_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, zone, instance_group_manager, instance_group_managers_delete_per_instance_configs_req_resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.DeletePerInstanceConfigsInstanceGroupManagerRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.DeletePerInstanceConfigsInstanceGroupManagerRequest): + request = compute.DeletePerInstanceConfigsInstanceGroupManagerRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if zone is not None: + request.zone = zone + if instance_group_manager is not None: + request.instance_group_manager = instance_group_manager + if instance_group_managers_delete_per_instance_configs_req_resource is not None: + request.instance_group_managers_delete_per_instance_configs_req_resource = instance_group_managers_delete_per_instance_configs_req_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete_per_instance_configs] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("zone", request.zone), + ("instance_group_manager", request.instance_group_manager), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def delete_per_instance_configs(self, + request: Optional[Union[compute.DeletePerInstanceConfigsInstanceGroupManagerRequest, dict]] = None, + *, + project: Optional[str] = None, + zone: Optional[str] = None, + instance_group_manager: Optional[str] = None, + instance_group_managers_delete_per_instance_configs_req_resource: Optional[compute.InstanceGroupManagersDeletePerInstanceConfigsReq] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> extended_operation.ExtendedOperation: + r"""Deletes selected per-instance configurations for the + managed instance group. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_delete_per_instance_configs(): + # Create a client + client = compute_v1.InstanceGroupManagersClient() + + # Initialize request argument(s) + request = compute_v1.DeletePerInstanceConfigsInstanceGroupManagerRequest( + instance_group_manager="instance_group_manager_value", + project="project_value", + zone="zone_value", + ) + + # Make the request + response = client.delete_per_instance_configs(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.DeletePerInstanceConfigsInstanceGroupManagerRequest, dict]): + The request object. A request message for + InstanceGroupManagers.DeletePerInstanceConfigs. + See the method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + zone (str): + The name of the zone where the + managed instance group is located. It + should conform to RFC1035. + + This corresponds to the ``zone`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + instance_group_manager (str): + The name of the managed instance + group. It should conform to RFC1035. + + This corresponds to the ``instance_group_manager`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + instance_group_managers_delete_per_instance_configs_req_resource (google.cloud.compute_v1.types.InstanceGroupManagersDeletePerInstanceConfigsReq): + The body resource for this request + This corresponds to the ``instance_group_managers_delete_per_instance_configs_req_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, zone, instance_group_manager, instance_group_managers_delete_per_instance_configs_req_resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.DeletePerInstanceConfigsInstanceGroupManagerRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.DeletePerInstanceConfigsInstanceGroupManagerRequest): + request = compute.DeletePerInstanceConfigsInstanceGroupManagerRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if zone is not None: + request.zone = zone + if instance_group_manager is not None: + request.instance_group_manager = instance_group_manager + if instance_group_managers_delete_per_instance_configs_req_resource is not None: + request.instance_group_managers_delete_per_instance_configs_req_resource = instance_group_managers_delete_per_instance_configs_req_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete_per_instance_configs] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("zone", request.zone), + ("instance_group_manager", request.instance_group_manager), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + operation_service = self._transport._zone_operations_client + operation_request = compute.GetZoneOperationRequest() + operation_request.project = request.project + operation_request.zone = request.zone + operation_request.operation = response.name + + get_operation = functools.partial(operation_service.get, operation_request) + # Cancel is not part of extended operations yet. + cancel_operation = lambda: None + + # Note: this class is an implementation detail to provide a uniform + # set of names for certain fields in the extended operation proto message. + # See google.api_core.extended_operation.ExtendedOperation for details + # on these properties and the expected interface. + class _CustomOperation(extended_operation.ExtendedOperation): + @property + def error_message(self): + return self._extended_operation.http_error_message + + @property + def error_code(self): + return self._extended_operation.http_error_status_code + + response = _CustomOperation.make(get_operation, cancel_operation, response) + + # Done; return the response. + return response + + def get(self, + request: Optional[Union[compute.GetInstanceGroupManagerRequest, dict]] = None, + *, + project: Optional[str] = None, + zone: Optional[str] = None, + instance_group_manager: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.InstanceGroupManager: + r"""Returns all of the details about the specified + managed instance group. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_get(): + # Create a client + client = compute_v1.InstanceGroupManagersClient() + + # Initialize request argument(s) + request = compute_v1.GetInstanceGroupManagerRequest( + instance_group_manager="instance_group_manager_value", + project="project_value", + zone="zone_value", + ) + + # Make the request + response = client.get(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.GetInstanceGroupManagerRequest, dict]): + The request object. A request message for + InstanceGroupManagers.Get. See the + method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + zone (str): + The name of the zone where the + managed instance group is located. + + This corresponds to the ``zone`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + instance_group_manager (str): + The name of the managed instance + group. + + This corresponds to the ``instance_group_manager`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.compute_v1.types.InstanceGroupManager: + Represents a Managed Instance Group + resource. An instance group is a + collection of VM instances that you can + manage as a single entity. For more + information, read Instance groups. For + zonal Managed Instance Group, use the + instanceGroupManagers resource. For + regional Managed Instance Group, use the + regionInstanceGroupManagers resource. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, zone, instance_group_manager]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.GetInstanceGroupManagerRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.GetInstanceGroupManagerRequest): + request = compute.GetInstanceGroupManagerRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if zone is not None: + request.zone = zone + if instance_group_manager is not None: + request.instance_group_manager = instance_group_manager + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("zone", request.zone), + ("instance_group_manager", request.instance_group_manager), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def insert_unary(self, + request: Optional[Union[compute.InsertInstanceGroupManagerRequest, dict]] = None, + *, + project: Optional[str] = None, + zone: Optional[str] = None, + instance_group_manager_resource: Optional[compute.InstanceGroupManager] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.Operation: + r"""Creates a managed instance group using the + information that you specify in the request. After the + group is created, instances in the group are created + using the specified instance template. This operation is + marked as DONE when the group is created even if the + instances in the group have not yet been created. You + must separately verify the status of the individual + instances with the listmanagedinstances method. A + managed instance group can have up to 1000 VM instances + per group. Please contact Cloud Support if you need an + increase in this limit. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_insert(): + # Create a client + client = compute_v1.InstanceGroupManagersClient() + + # Initialize request argument(s) + request = compute_v1.InsertInstanceGroupManagerRequest( + project="project_value", + zone="zone_value", + ) + + # Make the request + response = client.insert(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.InsertInstanceGroupManagerRequest, dict]): + The request object. A request message for + InstanceGroupManagers.Insert. See the + method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + zone (str): + The name of the zone where you want + to create the managed instance group. + + This corresponds to the ``zone`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + instance_group_manager_resource (google.cloud.compute_v1.types.InstanceGroupManager): + The body resource for this request + This corresponds to the ``instance_group_manager_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, zone, instance_group_manager_resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.InsertInstanceGroupManagerRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.InsertInstanceGroupManagerRequest): + request = compute.InsertInstanceGroupManagerRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if zone is not None: + request.zone = zone + if instance_group_manager_resource is not None: + request.instance_group_manager_resource = instance_group_manager_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.insert] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("zone", request.zone), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def insert(self, + request: Optional[Union[compute.InsertInstanceGroupManagerRequest, dict]] = None, + *, + project: Optional[str] = None, + zone: Optional[str] = None, + instance_group_manager_resource: Optional[compute.InstanceGroupManager] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> extended_operation.ExtendedOperation: + r"""Creates a managed instance group using the + information that you specify in the request. After the + group is created, instances in the group are created + using the specified instance template. This operation is + marked as DONE when the group is created even if the + instances in the group have not yet been created. You + must separately verify the status of the individual + instances with the listmanagedinstances method. A + managed instance group can have up to 1000 VM instances + per group. Please contact Cloud Support if you need an + increase in this limit. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_insert(): + # Create a client + client = compute_v1.InstanceGroupManagersClient() + + # Initialize request argument(s) + request = compute_v1.InsertInstanceGroupManagerRequest( + project="project_value", + zone="zone_value", + ) + + # Make the request + response = client.insert(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.InsertInstanceGroupManagerRequest, dict]): + The request object. A request message for + InstanceGroupManagers.Insert. See the + method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + zone (str): + The name of the zone where you want + to create the managed instance group. + + This corresponds to the ``zone`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + instance_group_manager_resource (google.cloud.compute_v1.types.InstanceGroupManager): + The body resource for this request + This corresponds to the ``instance_group_manager_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, zone, instance_group_manager_resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.InsertInstanceGroupManagerRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.InsertInstanceGroupManagerRequest): + request = compute.InsertInstanceGroupManagerRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if zone is not None: + request.zone = zone + if instance_group_manager_resource is not None: + request.instance_group_manager_resource = instance_group_manager_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.insert] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("zone", request.zone), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + operation_service = self._transport._zone_operations_client + operation_request = compute.GetZoneOperationRequest() + operation_request.project = request.project + operation_request.zone = request.zone + operation_request.operation = response.name + + get_operation = functools.partial(operation_service.get, operation_request) + # Cancel is not part of extended operations yet. + cancel_operation = lambda: None + + # Note: this class is an implementation detail to provide a uniform + # set of names for certain fields in the extended operation proto message. + # See google.api_core.extended_operation.ExtendedOperation for details + # on these properties and the expected interface. + class _CustomOperation(extended_operation.ExtendedOperation): + @property + def error_message(self): + return self._extended_operation.http_error_message + + @property + def error_code(self): + return self._extended_operation.http_error_status_code + + response = _CustomOperation.make(get_operation, cancel_operation, response) + + # Done; return the response. + return response + + def list(self, + request: Optional[Union[compute.ListInstanceGroupManagersRequest, dict]] = None, + *, + project: Optional[str] = None, + zone: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListPager: + r"""Retrieves a list of managed instance groups that are + contained within the specified project and zone. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_list(): + # Create a client + client = compute_v1.InstanceGroupManagersClient() + + # Initialize request argument(s) + request = compute_v1.ListInstanceGroupManagersRequest( + project="project_value", + zone="zone_value", + ) + + # Make the request + page_result = client.list(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.ListInstanceGroupManagersRequest, dict]): + The request object. A request message for + InstanceGroupManagers.List. See the + method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + zone (str): + The name of the zone where the + managed instance group is located. + + This corresponds to the ``zone`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.compute_v1.services.instance_group_managers.pagers.ListPager: + [Output Only] A list of managed instance groups. + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, zone]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.ListInstanceGroupManagersRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.ListInstanceGroupManagersRequest): + request = compute.ListInstanceGroupManagersRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if zone is not None: + request.zone = zone + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("zone", request.zone), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + def list_errors(self, + request: Optional[Union[compute.ListErrorsInstanceGroupManagersRequest, dict]] = None, + *, + project: Optional[str] = None, + zone: Optional[str] = None, + instance_group_manager: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListErrorsPager: + r"""Lists all errors thrown by actions on instances for a + given managed instance group. The filter and orderBy + query parameters are not supported. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_list_errors(): + # Create a client + client = compute_v1.InstanceGroupManagersClient() + + # Initialize request argument(s) + request = compute_v1.ListErrorsInstanceGroupManagersRequest( + instance_group_manager="instance_group_manager_value", + project="project_value", + zone="zone_value", + ) + + # Make the request + page_result = client.list_errors(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.ListErrorsInstanceGroupManagersRequest, dict]): + The request object. A request message for + InstanceGroupManagers.ListErrors. See + the method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + zone (str): + The name of the zone where the + managed instance group is located. It + should conform to RFC1035. + + This corresponds to the ``zone`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + instance_group_manager (str): + The name of the managed instance group. It must be a + string that meets the requirements in RFC1035, or an + unsigned long integer: must match regexp pattern: + (?:`a-z `__?)|1-9{0,19}. + + This corresponds to the ``instance_group_manager`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.compute_v1.services.instance_group_managers.pagers.ListErrorsPager: + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, zone, instance_group_manager]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.ListErrorsInstanceGroupManagersRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.ListErrorsInstanceGroupManagersRequest): + request = compute.ListErrorsInstanceGroupManagersRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if zone is not None: + request.zone = zone + if instance_group_manager is not None: + request.instance_group_manager = instance_group_manager + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_errors] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("zone", request.zone), + ("instance_group_manager", request.instance_group_manager), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListErrorsPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + def list_managed_instances(self, + request: Optional[Union[compute.ListManagedInstancesInstanceGroupManagersRequest, dict]] = None, + *, + project: Optional[str] = None, + zone: Optional[str] = None, + instance_group_manager: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListManagedInstancesPager: + r"""Lists all of the instances in the managed instance group. Each + instance in the list has a currentAction, which indicates the + action that the managed instance group is performing on the + instance. For example, if the group is still creating an + instance, the currentAction is CREATING. If a previous action + failed, the list displays the errors for that failed action. The + orderBy query parameter is not supported. The ``pageToken`` + query parameter is supported only in the alpha and beta API and + only if the group's ``listManagedInstancesResults`` field is set + to ``PAGINATED``. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_list_managed_instances(): + # Create a client + client = compute_v1.InstanceGroupManagersClient() + + # Initialize request argument(s) + request = compute_v1.ListManagedInstancesInstanceGroupManagersRequest( + instance_group_manager="instance_group_manager_value", + project="project_value", + zone="zone_value", + ) + + # Make the request + page_result = client.list_managed_instances(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.ListManagedInstancesInstanceGroupManagersRequest, dict]): + The request object. A request message for + InstanceGroupManagers.ListManagedInstances. + See the method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + zone (str): + The name of the zone where the + managed instance group is located. + + This corresponds to the ``zone`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + instance_group_manager (str): + The name of the managed instance + group. + + This corresponds to the ``instance_group_manager`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.compute_v1.services.instance_group_managers.pagers.ListManagedInstancesPager: + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, zone, instance_group_manager]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.ListManagedInstancesInstanceGroupManagersRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.ListManagedInstancesInstanceGroupManagersRequest): + request = compute.ListManagedInstancesInstanceGroupManagersRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if zone is not None: + request.zone = zone + if instance_group_manager is not None: + request.instance_group_manager = instance_group_manager + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_managed_instances] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("zone", request.zone), + ("instance_group_manager", request.instance_group_manager), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListManagedInstancesPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + def list_per_instance_configs(self, + request: Optional[Union[compute.ListPerInstanceConfigsInstanceGroupManagersRequest, dict]] = None, + *, + project: Optional[str] = None, + zone: Optional[str] = None, + instance_group_manager: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListPerInstanceConfigsPager: + r"""Lists all of the per-instance configurations defined + for the managed instance group. The orderBy query + parameter is not supported. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_list_per_instance_configs(): + # Create a client + client = compute_v1.InstanceGroupManagersClient() + + # Initialize request argument(s) + request = compute_v1.ListPerInstanceConfigsInstanceGroupManagersRequest( + instance_group_manager="instance_group_manager_value", + project="project_value", + zone="zone_value", + ) + + # Make the request + page_result = client.list_per_instance_configs(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.ListPerInstanceConfigsInstanceGroupManagersRequest, dict]): + The request object. A request message for + InstanceGroupManagers.ListPerInstanceConfigs. + See the method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + zone (str): + The name of the zone where the + managed instance group is located. It + should conform to RFC1035. + + This corresponds to the ``zone`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + instance_group_manager (str): + The name of the managed instance + group. It should conform to RFC1035. + + This corresponds to the ``instance_group_manager`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.compute_v1.services.instance_group_managers.pagers.ListPerInstanceConfigsPager: + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, zone, instance_group_manager]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.ListPerInstanceConfigsInstanceGroupManagersRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.ListPerInstanceConfigsInstanceGroupManagersRequest): + request = compute.ListPerInstanceConfigsInstanceGroupManagersRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if zone is not None: + request.zone = zone + if instance_group_manager is not None: + request.instance_group_manager = instance_group_manager + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_per_instance_configs] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("zone", request.zone), + ("instance_group_manager", request.instance_group_manager), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListPerInstanceConfigsPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + def patch_unary(self, + request: Optional[Union[compute.PatchInstanceGroupManagerRequest, dict]] = None, + *, + project: Optional[str] = None, + zone: Optional[str] = None, + instance_group_manager: Optional[str] = None, + instance_group_manager_resource: Optional[compute.InstanceGroupManager] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.Operation: + r"""Updates a managed instance group using the + information that you specify in the request. This + operation is marked as DONE when the group is patched + even if the instances in the group are still in the + process of being patched. You must separately verify the + status of the individual instances with the + listManagedInstances method. This method supports PATCH + semantics and uses the JSON merge patch format and + processing rules. If you update your group to specify a + new template or instance configuration, it's possible + that your intended specification for each VM in the + group is different from the current state of that VM. To + learn how to apply an updated configuration to the VMs + in a MIG, see Updating instances in a MIG. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_patch(): + # Create a client + client = compute_v1.InstanceGroupManagersClient() + + # Initialize request argument(s) + request = compute_v1.PatchInstanceGroupManagerRequest( + instance_group_manager="instance_group_manager_value", + project="project_value", + zone="zone_value", + ) + + # Make the request + response = client.patch(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.PatchInstanceGroupManagerRequest, dict]): + The request object. A request message for + InstanceGroupManagers.Patch. See the + method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + zone (str): + The name of the zone where you want + to create the managed instance group. + + This corresponds to the ``zone`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + instance_group_manager (str): + The name of the instance group + manager. + + This corresponds to the ``instance_group_manager`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + instance_group_manager_resource (google.cloud.compute_v1.types.InstanceGroupManager): + The body resource for this request + This corresponds to the ``instance_group_manager_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, zone, instance_group_manager, instance_group_manager_resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.PatchInstanceGroupManagerRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.PatchInstanceGroupManagerRequest): + request = compute.PatchInstanceGroupManagerRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if zone is not None: + request.zone = zone + if instance_group_manager is not None: + request.instance_group_manager = instance_group_manager + if instance_group_manager_resource is not None: + request.instance_group_manager_resource = instance_group_manager_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.patch] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("zone", request.zone), + ("instance_group_manager", request.instance_group_manager), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def patch(self, + request: Optional[Union[compute.PatchInstanceGroupManagerRequest, dict]] = None, + *, + project: Optional[str] = None, + zone: Optional[str] = None, + instance_group_manager: Optional[str] = None, + instance_group_manager_resource: Optional[compute.InstanceGroupManager] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> extended_operation.ExtendedOperation: + r"""Updates a managed instance group using the + information that you specify in the request. This + operation is marked as DONE when the group is patched + even if the instances in the group are still in the + process of being patched. You must separately verify the + status of the individual instances with the + listManagedInstances method. This method supports PATCH + semantics and uses the JSON merge patch format and + processing rules. If you update your group to specify a + new template or instance configuration, it's possible + that your intended specification for each VM in the + group is different from the current state of that VM. To + learn how to apply an updated configuration to the VMs + in a MIG, see Updating instances in a MIG. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_patch(): + # Create a client + client = compute_v1.InstanceGroupManagersClient() + + # Initialize request argument(s) + request = compute_v1.PatchInstanceGroupManagerRequest( + instance_group_manager="instance_group_manager_value", + project="project_value", + zone="zone_value", + ) + + # Make the request + response = client.patch(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.PatchInstanceGroupManagerRequest, dict]): + The request object. A request message for + InstanceGroupManagers.Patch. See the + method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + zone (str): + The name of the zone where you want + to create the managed instance group. + + This corresponds to the ``zone`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + instance_group_manager (str): + The name of the instance group + manager. + + This corresponds to the ``instance_group_manager`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + instance_group_manager_resource (google.cloud.compute_v1.types.InstanceGroupManager): + The body resource for this request + This corresponds to the ``instance_group_manager_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, zone, instance_group_manager, instance_group_manager_resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.PatchInstanceGroupManagerRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.PatchInstanceGroupManagerRequest): + request = compute.PatchInstanceGroupManagerRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if zone is not None: + request.zone = zone + if instance_group_manager is not None: + request.instance_group_manager = instance_group_manager + if instance_group_manager_resource is not None: + request.instance_group_manager_resource = instance_group_manager_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.patch] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("zone", request.zone), + ("instance_group_manager", request.instance_group_manager), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + operation_service = self._transport._zone_operations_client + operation_request = compute.GetZoneOperationRequest() + operation_request.project = request.project + operation_request.zone = request.zone + operation_request.operation = response.name + + get_operation = functools.partial(operation_service.get, operation_request) + # Cancel is not part of extended operations yet. + cancel_operation = lambda: None + + # Note: this class is an implementation detail to provide a uniform + # set of names for certain fields in the extended operation proto message. + # See google.api_core.extended_operation.ExtendedOperation for details + # on these properties and the expected interface. + class _CustomOperation(extended_operation.ExtendedOperation): + @property + def error_message(self): + return self._extended_operation.http_error_message + + @property + def error_code(self): + return self._extended_operation.http_error_status_code + + response = _CustomOperation.make(get_operation, cancel_operation, response) + + # Done; return the response. + return response + + def patch_per_instance_configs_unary(self, + request: Optional[Union[compute.PatchPerInstanceConfigsInstanceGroupManagerRequest, dict]] = None, + *, + project: Optional[str] = None, + zone: Optional[str] = None, + instance_group_manager: Optional[str] = None, + instance_group_managers_patch_per_instance_configs_req_resource: Optional[compute.InstanceGroupManagersPatchPerInstanceConfigsReq] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.Operation: + r"""Inserts or patches per-instance configurations for + the managed instance group. perInstanceConfig.name + serves as a key used to distinguish whether to perform + insert or patch. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_patch_per_instance_configs(): + # Create a client + client = compute_v1.InstanceGroupManagersClient() + + # Initialize request argument(s) + request = compute_v1.PatchPerInstanceConfigsInstanceGroupManagerRequest( + instance_group_manager="instance_group_manager_value", + project="project_value", + zone="zone_value", + ) + + # Make the request + response = client.patch_per_instance_configs(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.PatchPerInstanceConfigsInstanceGroupManagerRequest, dict]): + The request object. A request message for + InstanceGroupManagers.PatchPerInstanceConfigs. + See the method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + zone (str): + The name of the zone where the + managed instance group is located. It + should conform to RFC1035. + + This corresponds to the ``zone`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + instance_group_manager (str): + The name of the managed instance + group. It should conform to RFC1035. + + This corresponds to the ``instance_group_manager`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + instance_group_managers_patch_per_instance_configs_req_resource (google.cloud.compute_v1.types.InstanceGroupManagersPatchPerInstanceConfigsReq): + The body resource for this request + This corresponds to the ``instance_group_managers_patch_per_instance_configs_req_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, zone, instance_group_manager, instance_group_managers_patch_per_instance_configs_req_resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.PatchPerInstanceConfigsInstanceGroupManagerRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.PatchPerInstanceConfigsInstanceGroupManagerRequest): + request = compute.PatchPerInstanceConfigsInstanceGroupManagerRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if zone is not None: + request.zone = zone + if instance_group_manager is not None: + request.instance_group_manager = instance_group_manager + if instance_group_managers_patch_per_instance_configs_req_resource is not None: + request.instance_group_managers_patch_per_instance_configs_req_resource = instance_group_managers_patch_per_instance_configs_req_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.patch_per_instance_configs] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("zone", request.zone), + ("instance_group_manager", request.instance_group_manager), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def patch_per_instance_configs(self, + request: Optional[Union[compute.PatchPerInstanceConfigsInstanceGroupManagerRequest, dict]] = None, + *, + project: Optional[str] = None, + zone: Optional[str] = None, + instance_group_manager: Optional[str] = None, + instance_group_managers_patch_per_instance_configs_req_resource: Optional[compute.InstanceGroupManagersPatchPerInstanceConfigsReq] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> extended_operation.ExtendedOperation: + r"""Inserts or patches per-instance configurations for + the managed instance group. perInstanceConfig.name + serves as a key used to distinguish whether to perform + insert or patch. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_patch_per_instance_configs(): + # Create a client + client = compute_v1.InstanceGroupManagersClient() + + # Initialize request argument(s) + request = compute_v1.PatchPerInstanceConfigsInstanceGroupManagerRequest( + instance_group_manager="instance_group_manager_value", + project="project_value", + zone="zone_value", + ) + + # Make the request + response = client.patch_per_instance_configs(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.PatchPerInstanceConfigsInstanceGroupManagerRequest, dict]): + The request object. A request message for + InstanceGroupManagers.PatchPerInstanceConfigs. + See the method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + zone (str): + The name of the zone where the + managed instance group is located. It + should conform to RFC1035. + + This corresponds to the ``zone`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + instance_group_manager (str): + The name of the managed instance + group. It should conform to RFC1035. + + This corresponds to the ``instance_group_manager`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + instance_group_managers_patch_per_instance_configs_req_resource (google.cloud.compute_v1.types.InstanceGroupManagersPatchPerInstanceConfigsReq): + The body resource for this request + This corresponds to the ``instance_group_managers_patch_per_instance_configs_req_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, zone, instance_group_manager, instance_group_managers_patch_per_instance_configs_req_resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.PatchPerInstanceConfigsInstanceGroupManagerRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.PatchPerInstanceConfigsInstanceGroupManagerRequest): + request = compute.PatchPerInstanceConfigsInstanceGroupManagerRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if zone is not None: + request.zone = zone + if instance_group_manager is not None: + request.instance_group_manager = instance_group_manager + if instance_group_managers_patch_per_instance_configs_req_resource is not None: + request.instance_group_managers_patch_per_instance_configs_req_resource = instance_group_managers_patch_per_instance_configs_req_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.patch_per_instance_configs] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("zone", request.zone), + ("instance_group_manager", request.instance_group_manager), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + operation_service = self._transport._zone_operations_client + operation_request = compute.GetZoneOperationRequest() + operation_request.project = request.project + operation_request.zone = request.zone + operation_request.operation = response.name + + get_operation = functools.partial(operation_service.get, operation_request) + # Cancel is not part of extended operations yet. + cancel_operation = lambda: None + + # Note: this class is an implementation detail to provide a uniform + # set of names for certain fields in the extended operation proto message. + # See google.api_core.extended_operation.ExtendedOperation for details + # on these properties and the expected interface. + class _CustomOperation(extended_operation.ExtendedOperation): + @property + def error_message(self): + return self._extended_operation.http_error_message + + @property + def error_code(self): + return self._extended_operation.http_error_status_code + + response = _CustomOperation.make(get_operation, cancel_operation, response) + + # Done; return the response. + return response + + def recreate_instances_unary(self, + request: Optional[Union[compute.RecreateInstancesInstanceGroupManagerRequest, dict]] = None, + *, + project: Optional[str] = None, + zone: Optional[str] = None, + instance_group_manager: Optional[str] = None, + instance_group_managers_recreate_instances_request_resource: Optional[compute.InstanceGroupManagersRecreateInstancesRequest] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.Operation: + r"""Flags the specified VM instances in the managed + instance group to be immediately recreated. Each + instance is recreated using the group's current + configuration. This operation is marked as DONE when the + flag is set even if the instances have not yet been + recreated. You must separately verify the status of each + instance by checking its currentAction field; for more + information, see Checking the status of managed + instances. If the group is part of a backend service + that has enabled connection draining, it can take up to + 60 seconds after the connection draining duration has + elapsed before the VM instance is removed or deleted. + You can specify a maximum of 1000 instances with this + method per request. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_recreate_instances(): + # Create a client + client = compute_v1.InstanceGroupManagersClient() + + # Initialize request argument(s) + request = compute_v1.RecreateInstancesInstanceGroupManagerRequest( + instance_group_manager="instance_group_manager_value", + project="project_value", + zone="zone_value", + ) + + # Make the request + response = client.recreate_instances(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.RecreateInstancesInstanceGroupManagerRequest, dict]): + The request object. A request message for + InstanceGroupManagers.RecreateInstances. + See the method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + zone (str): + The name of the zone where the + managed instance group is located. + + This corresponds to the ``zone`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + instance_group_manager (str): + The name of the managed instance + group. + + This corresponds to the ``instance_group_manager`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + instance_group_managers_recreate_instances_request_resource (google.cloud.compute_v1.types.InstanceGroupManagersRecreateInstancesRequest): + The body resource for this request + This corresponds to the ``instance_group_managers_recreate_instances_request_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, zone, instance_group_manager, instance_group_managers_recreate_instances_request_resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.RecreateInstancesInstanceGroupManagerRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.RecreateInstancesInstanceGroupManagerRequest): + request = compute.RecreateInstancesInstanceGroupManagerRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if zone is not None: + request.zone = zone + if instance_group_manager is not None: + request.instance_group_manager = instance_group_manager + if instance_group_managers_recreate_instances_request_resource is not None: + request.instance_group_managers_recreate_instances_request_resource = instance_group_managers_recreate_instances_request_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.recreate_instances] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("zone", request.zone), + ("instance_group_manager", request.instance_group_manager), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def recreate_instances(self, + request: Optional[Union[compute.RecreateInstancesInstanceGroupManagerRequest, dict]] = None, + *, + project: Optional[str] = None, + zone: Optional[str] = None, + instance_group_manager: Optional[str] = None, + instance_group_managers_recreate_instances_request_resource: Optional[compute.InstanceGroupManagersRecreateInstancesRequest] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> extended_operation.ExtendedOperation: + r"""Flags the specified VM instances in the managed + instance group to be immediately recreated. Each + instance is recreated using the group's current + configuration. This operation is marked as DONE when the + flag is set even if the instances have not yet been + recreated. You must separately verify the status of each + instance by checking its currentAction field; for more + information, see Checking the status of managed + instances. If the group is part of a backend service + that has enabled connection draining, it can take up to + 60 seconds after the connection draining duration has + elapsed before the VM instance is removed or deleted. + You can specify a maximum of 1000 instances with this + method per request. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_recreate_instances(): + # Create a client + client = compute_v1.InstanceGroupManagersClient() + + # Initialize request argument(s) + request = compute_v1.RecreateInstancesInstanceGroupManagerRequest( + instance_group_manager="instance_group_manager_value", + project="project_value", + zone="zone_value", + ) + + # Make the request + response = client.recreate_instances(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.RecreateInstancesInstanceGroupManagerRequest, dict]): + The request object. A request message for + InstanceGroupManagers.RecreateInstances. + See the method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + zone (str): + The name of the zone where the + managed instance group is located. + + This corresponds to the ``zone`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + instance_group_manager (str): + The name of the managed instance + group. + + This corresponds to the ``instance_group_manager`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + instance_group_managers_recreate_instances_request_resource (google.cloud.compute_v1.types.InstanceGroupManagersRecreateInstancesRequest): + The body resource for this request + This corresponds to the ``instance_group_managers_recreate_instances_request_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, zone, instance_group_manager, instance_group_managers_recreate_instances_request_resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.RecreateInstancesInstanceGroupManagerRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.RecreateInstancesInstanceGroupManagerRequest): + request = compute.RecreateInstancesInstanceGroupManagerRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if zone is not None: + request.zone = zone + if instance_group_manager is not None: + request.instance_group_manager = instance_group_manager + if instance_group_managers_recreate_instances_request_resource is not None: + request.instance_group_managers_recreate_instances_request_resource = instance_group_managers_recreate_instances_request_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.recreate_instances] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("zone", request.zone), + ("instance_group_manager", request.instance_group_manager), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + operation_service = self._transport._zone_operations_client + operation_request = compute.GetZoneOperationRequest() + operation_request.project = request.project + operation_request.zone = request.zone + operation_request.operation = response.name + + get_operation = functools.partial(operation_service.get, operation_request) + # Cancel is not part of extended operations yet. + cancel_operation = lambda: None + + # Note: this class is an implementation detail to provide a uniform + # set of names for certain fields in the extended operation proto message. + # See google.api_core.extended_operation.ExtendedOperation for details + # on these properties and the expected interface. + class _CustomOperation(extended_operation.ExtendedOperation): + @property + def error_message(self): + return self._extended_operation.http_error_message + + @property + def error_code(self): + return self._extended_operation.http_error_status_code + + response = _CustomOperation.make(get_operation, cancel_operation, response) + + # Done; return the response. + return response + + def resize_unary(self, + request: Optional[Union[compute.ResizeInstanceGroupManagerRequest, dict]] = None, + *, + project: Optional[str] = None, + zone: Optional[str] = None, + instance_group_manager: Optional[str] = None, + size: Optional[int] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.Operation: + r"""Resizes the managed instance group. If you increase + the size, the group creates new instances using the + current instance template. If you decrease the size, the + group deletes instances. The resize operation is marked + DONE when the resize actions are scheduled even if the + group has not yet added or deleted any instances. You + must separately verify the status of the creating or + deleting actions with the listmanagedinstances method. + When resizing down, the instance group arbitrarily + chooses the order in which VMs are deleted. The group + takes into account some VM attributes when making the + selection including: + The status of the VM instance. + + The health of the VM instance. + The instance template + version the VM is based on. + For regional managed + instance groups, the location of the VM instance. This + list is subject to change. If the group is part of a + backend service that has enabled connection draining, it + can take up to 60 seconds after the connection draining + duration has elapsed before the VM instance is removed + or deleted. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_resize(): + # Create a client + client = compute_v1.InstanceGroupManagersClient() + + # Initialize request argument(s) + request = compute_v1.ResizeInstanceGroupManagerRequest( + instance_group_manager="instance_group_manager_value", + project="project_value", + size=443, + zone="zone_value", + ) + + # Make the request + response = client.resize(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.ResizeInstanceGroupManagerRequest, dict]): + The request object. A request message for + InstanceGroupManagers.Resize. See the + method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + zone (str): + The name of the zone where the + managed instance group is located. + + This corresponds to the ``zone`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + instance_group_manager (str): + The name of the managed instance + group. + + This corresponds to the ``instance_group_manager`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + size (int): + The number of running instances that + the managed instance group should + maintain at any given time. The group + automatically adds or removes instances + to maintain the number of instances + specified by this parameter. + + This corresponds to the ``size`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, zone, instance_group_manager, size]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.ResizeInstanceGroupManagerRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.ResizeInstanceGroupManagerRequest): + request = compute.ResizeInstanceGroupManagerRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if zone is not None: + request.zone = zone + if instance_group_manager is not None: + request.instance_group_manager = instance_group_manager + if size is not None: + request.size = size + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.resize] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("zone", request.zone), + ("instance_group_manager", request.instance_group_manager), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def resize(self, + request: Optional[Union[compute.ResizeInstanceGroupManagerRequest, dict]] = None, + *, + project: Optional[str] = None, + zone: Optional[str] = None, + instance_group_manager: Optional[str] = None, + size: Optional[int] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> extended_operation.ExtendedOperation: + r"""Resizes the managed instance group. If you increase + the size, the group creates new instances using the + current instance template. If you decrease the size, the + group deletes instances. The resize operation is marked + DONE when the resize actions are scheduled even if the + group has not yet added or deleted any instances. You + must separately verify the status of the creating or + deleting actions with the listmanagedinstances method. + When resizing down, the instance group arbitrarily + chooses the order in which VMs are deleted. The group + takes into account some VM attributes when making the + selection including: + The status of the VM instance. + + The health of the VM instance. + The instance template + version the VM is based on. + For regional managed + instance groups, the location of the VM instance. This + list is subject to change. If the group is part of a + backend service that has enabled connection draining, it + can take up to 60 seconds after the connection draining + duration has elapsed before the VM instance is removed + or deleted. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_resize(): + # Create a client + client = compute_v1.InstanceGroupManagersClient() + + # Initialize request argument(s) + request = compute_v1.ResizeInstanceGroupManagerRequest( + instance_group_manager="instance_group_manager_value", + project="project_value", + size=443, + zone="zone_value", + ) + + # Make the request + response = client.resize(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.ResizeInstanceGroupManagerRequest, dict]): + The request object. A request message for + InstanceGroupManagers.Resize. See the + method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + zone (str): + The name of the zone where the + managed instance group is located. + + This corresponds to the ``zone`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + instance_group_manager (str): + The name of the managed instance + group. + + This corresponds to the ``instance_group_manager`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + size (int): + The number of running instances that + the managed instance group should + maintain at any given time. The group + automatically adds or removes instances + to maintain the number of instances + specified by this parameter. + + This corresponds to the ``size`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, zone, instance_group_manager, size]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.ResizeInstanceGroupManagerRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.ResizeInstanceGroupManagerRequest): + request = compute.ResizeInstanceGroupManagerRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if zone is not None: + request.zone = zone + if instance_group_manager is not None: + request.instance_group_manager = instance_group_manager + if size is not None: + request.size = size + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.resize] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("zone", request.zone), + ("instance_group_manager", request.instance_group_manager), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + operation_service = self._transport._zone_operations_client + operation_request = compute.GetZoneOperationRequest() + operation_request.project = request.project + operation_request.zone = request.zone + operation_request.operation = response.name + + get_operation = functools.partial(operation_service.get, operation_request) + # Cancel is not part of extended operations yet. + cancel_operation = lambda: None + + # Note: this class is an implementation detail to provide a uniform + # set of names for certain fields in the extended operation proto message. + # See google.api_core.extended_operation.ExtendedOperation for details + # on these properties and the expected interface. + class _CustomOperation(extended_operation.ExtendedOperation): + @property + def error_message(self): + return self._extended_operation.http_error_message + + @property + def error_code(self): + return self._extended_operation.http_error_status_code + + response = _CustomOperation.make(get_operation, cancel_operation, response) + + # Done; return the response. + return response + + def set_instance_template_unary(self, + request: Optional[Union[compute.SetInstanceTemplateInstanceGroupManagerRequest, dict]] = None, + *, + project: Optional[str] = None, + zone: Optional[str] = None, + instance_group_manager: Optional[str] = None, + instance_group_managers_set_instance_template_request_resource: Optional[compute.InstanceGroupManagersSetInstanceTemplateRequest] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.Operation: + r"""Specifies the instance template to use when creating + new instances in this group. The templates for existing + instances in the group do not change unless you run + recreateInstances, run applyUpdatesToInstances, or set + the group's updatePolicy.type to PROACTIVE. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_set_instance_template(): + # Create a client + client = compute_v1.InstanceGroupManagersClient() + + # Initialize request argument(s) + request = compute_v1.SetInstanceTemplateInstanceGroupManagerRequest( + instance_group_manager="instance_group_manager_value", + project="project_value", + zone="zone_value", + ) + + # Make the request + response = client.set_instance_template(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.SetInstanceTemplateInstanceGroupManagerRequest, dict]): + The request object. A request message for + InstanceGroupManagers.SetInstanceTemplate. + See the method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + zone (str): + The name of the zone where the + managed instance group is located. + + This corresponds to the ``zone`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + instance_group_manager (str): + The name of the managed instance + group. + + This corresponds to the ``instance_group_manager`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + instance_group_managers_set_instance_template_request_resource (google.cloud.compute_v1.types.InstanceGroupManagersSetInstanceTemplateRequest): + The body resource for this request + This corresponds to the ``instance_group_managers_set_instance_template_request_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, zone, instance_group_manager, instance_group_managers_set_instance_template_request_resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.SetInstanceTemplateInstanceGroupManagerRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.SetInstanceTemplateInstanceGroupManagerRequest): + request = compute.SetInstanceTemplateInstanceGroupManagerRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if zone is not None: + request.zone = zone + if instance_group_manager is not None: + request.instance_group_manager = instance_group_manager + if instance_group_managers_set_instance_template_request_resource is not None: + request.instance_group_managers_set_instance_template_request_resource = instance_group_managers_set_instance_template_request_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.set_instance_template] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("zone", request.zone), + ("instance_group_manager", request.instance_group_manager), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def set_instance_template(self, + request: Optional[Union[compute.SetInstanceTemplateInstanceGroupManagerRequest, dict]] = None, + *, + project: Optional[str] = None, + zone: Optional[str] = None, + instance_group_manager: Optional[str] = None, + instance_group_managers_set_instance_template_request_resource: Optional[compute.InstanceGroupManagersSetInstanceTemplateRequest] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> extended_operation.ExtendedOperation: + r"""Specifies the instance template to use when creating + new instances in this group. The templates for existing + instances in the group do not change unless you run + recreateInstances, run applyUpdatesToInstances, or set + the group's updatePolicy.type to PROACTIVE. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_set_instance_template(): + # Create a client + client = compute_v1.InstanceGroupManagersClient() + + # Initialize request argument(s) + request = compute_v1.SetInstanceTemplateInstanceGroupManagerRequest( + instance_group_manager="instance_group_manager_value", + project="project_value", + zone="zone_value", + ) + + # Make the request + response = client.set_instance_template(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.SetInstanceTemplateInstanceGroupManagerRequest, dict]): + The request object. A request message for + InstanceGroupManagers.SetInstanceTemplate. + See the method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + zone (str): + The name of the zone where the + managed instance group is located. + + This corresponds to the ``zone`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + instance_group_manager (str): + The name of the managed instance + group. + + This corresponds to the ``instance_group_manager`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + instance_group_managers_set_instance_template_request_resource (google.cloud.compute_v1.types.InstanceGroupManagersSetInstanceTemplateRequest): + The body resource for this request + This corresponds to the ``instance_group_managers_set_instance_template_request_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, zone, instance_group_manager, instance_group_managers_set_instance_template_request_resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.SetInstanceTemplateInstanceGroupManagerRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.SetInstanceTemplateInstanceGroupManagerRequest): + request = compute.SetInstanceTemplateInstanceGroupManagerRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if zone is not None: + request.zone = zone + if instance_group_manager is not None: + request.instance_group_manager = instance_group_manager + if instance_group_managers_set_instance_template_request_resource is not None: + request.instance_group_managers_set_instance_template_request_resource = instance_group_managers_set_instance_template_request_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.set_instance_template] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("zone", request.zone), + ("instance_group_manager", request.instance_group_manager), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + operation_service = self._transport._zone_operations_client + operation_request = compute.GetZoneOperationRequest() + operation_request.project = request.project + operation_request.zone = request.zone + operation_request.operation = response.name + + get_operation = functools.partial(operation_service.get, operation_request) + # Cancel is not part of extended operations yet. + cancel_operation = lambda: None + + # Note: this class is an implementation detail to provide a uniform + # set of names for certain fields in the extended operation proto message. + # See google.api_core.extended_operation.ExtendedOperation for details + # on these properties and the expected interface. + class _CustomOperation(extended_operation.ExtendedOperation): + @property + def error_message(self): + return self._extended_operation.http_error_message + + @property + def error_code(self): + return self._extended_operation.http_error_status_code + + response = _CustomOperation.make(get_operation, cancel_operation, response) + + # Done; return the response. + return response + + def set_target_pools_unary(self, + request: Optional[Union[compute.SetTargetPoolsInstanceGroupManagerRequest, dict]] = None, + *, + project: Optional[str] = None, + zone: Optional[str] = None, + instance_group_manager: Optional[str] = None, + instance_group_managers_set_target_pools_request_resource: Optional[compute.InstanceGroupManagersSetTargetPoolsRequest] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.Operation: + r"""Modifies the target pools to which all instances in + this managed instance group are assigned. The target + pools automatically apply to all of the instances in the + managed instance group. This operation is marked DONE + when you make the request even if the instances have not + yet been added to their target pools. The change might + take some time to apply to all of the instances in the + group depending on the size of the group. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_set_target_pools(): + # Create a client + client = compute_v1.InstanceGroupManagersClient() + + # Initialize request argument(s) + request = compute_v1.SetTargetPoolsInstanceGroupManagerRequest( + instance_group_manager="instance_group_manager_value", + project="project_value", + zone="zone_value", + ) + + # Make the request + response = client.set_target_pools(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.SetTargetPoolsInstanceGroupManagerRequest, dict]): + The request object. A request message for + InstanceGroupManagers.SetTargetPools. + See the method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + zone (str): + The name of the zone where the + managed instance group is located. + + This corresponds to the ``zone`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + instance_group_manager (str): + The name of the managed instance + group. + + This corresponds to the ``instance_group_manager`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + instance_group_managers_set_target_pools_request_resource (google.cloud.compute_v1.types.InstanceGroupManagersSetTargetPoolsRequest): + The body resource for this request + This corresponds to the ``instance_group_managers_set_target_pools_request_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, zone, instance_group_manager, instance_group_managers_set_target_pools_request_resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.SetTargetPoolsInstanceGroupManagerRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.SetTargetPoolsInstanceGroupManagerRequest): + request = compute.SetTargetPoolsInstanceGroupManagerRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if zone is not None: + request.zone = zone + if instance_group_manager is not None: + request.instance_group_manager = instance_group_manager + if instance_group_managers_set_target_pools_request_resource is not None: + request.instance_group_managers_set_target_pools_request_resource = instance_group_managers_set_target_pools_request_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.set_target_pools] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("zone", request.zone), + ("instance_group_manager", request.instance_group_manager), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def set_target_pools(self, + request: Optional[Union[compute.SetTargetPoolsInstanceGroupManagerRequest, dict]] = None, + *, + project: Optional[str] = None, + zone: Optional[str] = None, + instance_group_manager: Optional[str] = None, + instance_group_managers_set_target_pools_request_resource: Optional[compute.InstanceGroupManagersSetTargetPoolsRequest] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> extended_operation.ExtendedOperation: + r"""Modifies the target pools to which all instances in + this managed instance group are assigned. The target + pools automatically apply to all of the instances in the + managed instance group. This operation is marked DONE + when you make the request even if the instances have not + yet been added to their target pools. The change might + take some time to apply to all of the instances in the + group depending on the size of the group. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_set_target_pools(): + # Create a client + client = compute_v1.InstanceGroupManagersClient() + + # Initialize request argument(s) + request = compute_v1.SetTargetPoolsInstanceGroupManagerRequest( + instance_group_manager="instance_group_manager_value", + project="project_value", + zone="zone_value", + ) + + # Make the request + response = client.set_target_pools(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.SetTargetPoolsInstanceGroupManagerRequest, dict]): + The request object. A request message for + InstanceGroupManagers.SetTargetPools. + See the method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + zone (str): + The name of the zone where the + managed instance group is located. + + This corresponds to the ``zone`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + instance_group_manager (str): + The name of the managed instance + group. + + This corresponds to the ``instance_group_manager`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + instance_group_managers_set_target_pools_request_resource (google.cloud.compute_v1.types.InstanceGroupManagersSetTargetPoolsRequest): + The body resource for this request + This corresponds to the ``instance_group_managers_set_target_pools_request_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, zone, instance_group_manager, instance_group_managers_set_target_pools_request_resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.SetTargetPoolsInstanceGroupManagerRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.SetTargetPoolsInstanceGroupManagerRequest): + request = compute.SetTargetPoolsInstanceGroupManagerRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if zone is not None: + request.zone = zone + if instance_group_manager is not None: + request.instance_group_manager = instance_group_manager + if instance_group_managers_set_target_pools_request_resource is not None: + request.instance_group_managers_set_target_pools_request_resource = instance_group_managers_set_target_pools_request_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.set_target_pools] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("zone", request.zone), + ("instance_group_manager", request.instance_group_manager), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + operation_service = self._transport._zone_operations_client + operation_request = compute.GetZoneOperationRequest() + operation_request.project = request.project + operation_request.zone = request.zone + operation_request.operation = response.name + + get_operation = functools.partial(operation_service.get, operation_request) + # Cancel is not part of extended operations yet. + cancel_operation = lambda: None + + # Note: this class is an implementation detail to provide a uniform + # set of names for certain fields in the extended operation proto message. + # See google.api_core.extended_operation.ExtendedOperation for details + # on these properties and the expected interface. + class _CustomOperation(extended_operation.ExtendedOperation): + @property + def error_message(self): + return self._extended_operation.http_error_message + + @property + def error_code(self): + return self._extended_operation.http_error_status_code + + response = _CustomOperation.make(get_operation, cancel_operation, response) + + # Done; return the response. + return response + + def update_per_instance_configs_unary(self, + request: Optional[Union[compute.UpdatePerInstanceConfigsInstanceGroupManagerRequest, dict]] = None, + *, + project: Optional[str] = None, + zone: Optional[str] = None, + instance_group_manager: Optional[str] = None, + instance_group_managers_update_per_instance_configs_req_resource: Optional[compute.InstanceGroupManagersUpdatePerInstanceConfigsReq] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.Operation: + r"""Inserts or updates per-instance configurations for + the managed instance group. perInstanceConfig.name + serves as a key used to distinguish whether to perform + insert or patch. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_update_per_instance_configs(): + # Create a client + client = compute_v1.InstanceGroupManagersClient() + + # Initialize request argument(s) + request = compute_v1.UpdatePerInstanceConfigsInstanceGroupManagerRequest( + instance_group_manager="instance_group_manager_value", + project="project_value", + zone="zone_value", + ) + + # Make the request + response = client.update_per_instance_configs(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.UpdatePerInstanceConfigsInstanceGroupManagerRequest, dict]): + The request object. A request message for + InstanceGroupManagers.UpdatePerInstanceConfigs. + See the method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + zone (str): + The name of the zone where the + managed instance group is located. It + should conform to RFC1035. + + This corresponds to the ``zone`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + instance_group_manager (str): + The name of the managed instance + group. It should conform to RFC1035. + + This corresponds to the ``instance_group_manager`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + instance_group_managers_update_per_instance_configs_req_resource (google.cloud.compute_v1.types.InstanceGroupManagersUpdatePerInstanceConfigsReq): + The body resource for this request + This corresponds to the ``instance_group_managers_update_per_instance_configs_req_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, zone, instance_group_manager, instance_group_managers_update_per_instance_configs_req_resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.UpdatePerInstanceConfigsInstanceGroupManagerRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.UpdatePerInstanceConfigsInstanceGroupManagerRequest): + request = compute.UpdatePerInstanceConfigsInstanceGroupManagerRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if zone is not None: + request.zone = zone + if instance_group_manager is not None: + request.instance_group_manager = instance_group_manager + if instance_group_managers_update_per_instance_configs_req_resource is not None: + request.instance_group_managers_update_per_instance_configs_req_resource = instance_group_managers_update_per_instance_configs_req_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.update_per_instance_configs] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("zone", request.zone), + ("instance_group_manager", request.instance_group_manager), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def update_per_instance_configs(self, + request: Optional[Union[compute.UpdatePerInstanceConfigsInstanceGroupManagerRequest, dict]] = None, + *, + project: Optional[str] = None, + zone: Optional[str] = None, + instance_group_manager: Optional[str] = None, + instance_group_managers_update_per_instance_configs_req_resource: Optional[compute.InstanceGroupManagersUpdatePerInstanceConfigsReq] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> extended_operation.ExtendedOperation: + r"""Inserts or updates per-instance configurations for + the managed instance group. perInstanceConfig.name + serves as a key used to distinguish whether to perform + insert or patch. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_update_per_instance_configs(): + # Create a client + client = compute_v1.InstanceGroupManagersClient() + + # Initialize request argument(s) + request = compute_v1.UpdatePerInstanceConfigsInstanceGroupManagerRequest( + instance_group_manager="instance_group_manager_value", + project="project_value", + zone="zone_value", + ) + + # Make the request + response = client.update_per_instance_configs(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.UpdatePerInstanceConfigsInstanceGroupManagerRequest, dict]): + The request object. A request message for + InstanceGroupManagers.UpdatePerInstanceConfigs. + See the method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + zone (str): + The name of the zone where the + managed instance group is located. It + should conform to RFC1035. + + This corresponds to the ``zone`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + instance_group_manager (str): + The name of the managed instance + group. It should conform to RFC1035. + + This corresponds to the ``instance_group_manager`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + instance_group_managers_update_per_instance_configs_req_resource (google.cloud.compute_v1.types.InstanceGroupManagersUpdatePerInstanceConfigsReq): + The body resource for this request + This corresponds to the ``instance_group_managers_update_per_instance_configs_req_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, zone, instance_group_manager, instance_group_managers_update_per_instance_configs_req_resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.UpdatePerInstanceConfigsInstanceGroupManagerRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.UpdatePerInstanceConfigsInstanceGroupManagerRequest): + request = compute.UpdatePerInstanceConfigsInstanceGroupManagerRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if zone is not None: + request.zone = zone + if instance_group_manager is not None: + request.instance_group_manager = instance_group_manager + if instance_group_managers_update_per_instance_configs_req_resource is not None: + request.instance_group_managers_update_per_instance_configs_req_resource = instance_group_managers_update_per_instance_configs_req_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.update_per_instance_configs] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("zone", request.zone), + ("instance_group_manager", request.instance_group_manager), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + operation_service = self._transport._zone_operations_client + operation_request = compute.GetZoneOperationRequest() + operation_request.project = request.project + operation_request.zone = request.zone + operation_request.operation = response.name + + get_operation = functools.partial(operation_service.get, operation_request) + # Cancel is not part of extended operations yet. + cancel_operation = lambda: None + + # Note: this class is an implementation detail to provide a uniform + # set of names for certain fields in the extended operation proto message. + # See google.api_core.extended_operation.ExtendedOperation for details + # on these properties and the expected interface. + class _CustomOperation(extended_operation.ExtendedOperation): + @property + def error_message(self): + return self._extended_operation.http_error_message + + @property + def error_code(self): + return self._extended_operation.http_error_status_code + + response = _CustomOperation.make(get_operation, cancel_operation, response) + + # Done; return the response. + return response + + def __enter__(self) -> "InstanceGroupManagersClient": + return self + + def __exit__(self, type, value, traceback): + """Releases underlying transport's resources. + + .. warning:: + ONLY use as a context manager if the transport is NOT shared + with other clients! Exiting the with block will CLOSE the transport + and may cause errors in other clients! + """ + self.transport.close() + + + + + + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) + + +__all__ = ( + "InstanceGroupManagersClient", +) diff --git a/owl-bot-staging/v1/google/cloud/compute_v1/services/instance_group_managers/pagers.py b/owl-bot-staging/v1/google/cloud/compute_v1/services/instance_group_managers/pagers.py new file mode 100644 index 000000000..e20d1464b --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/compute_v1/services/instance_group_managers/pagers.py @@ -0,0 +1,316 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import Any, AsyncIterator, Awaitable, Callable, Sequence, Tuple, Optional, Iterator + +from google.cloud.compute_v1.types import compute + + +class AggregatedListPager: + """A pager for iterating through ``aggregated_list`` requests. + + This class thinly wraps an initial + :class:`google.cloud.compute_v1.types.InstanceGroupManagerAggregatedList` object, and + provides an ``__iter__`` method to iterate through its + ``items`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``AggregatedList`` requests and continue to iterate + through the ``items`` field on the + corresponding responses. + + All the usual :class:`google.cloud.compute_v1.types.InstanceGroupManagerAggregatedList` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., compute.InstanceGroupManagerAggregatedList], + request: compute.AggregatedListInstanceGroupManagersRequest, + response: compute.InstanceGroupManagerAggregatedList, + *, + metadata: Sequence[Tuple[str, str]] = ()): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.compute_v1.types.AggregatedListInstanceGroupManagersRequest): + The initial request object. + response (google.cloud.compute_v1.types.InstanceGroupManagerAggregatedList): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = compute.AggregatedListInstanceGroupManagersRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[compute.InstanceGroupManagerAggregatedList]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[Tuple[str, compute.InstanceGroupManagersScopedList]]: + for page in self.pages: + yield from page.items.items() + + def get(self, key: str) -> Optional[compute.InstanceGroupManagersScopedList]: + return self._response.items.get(key) + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + + +class ListPager: + """A pager for iterating through ``list`` requests. + + This class thinly wraps an initial + :class:`google.cloud.compute_v1.types.InstanceGroupManagerList` object, and + provides an ``__iter__`` method to iterate through its + ``items`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``List`` requests and continue to iterate + through the ``items`` field on the + corresponding responses. + + All the usual :class:`google.cloud.compute_v1.types.InstanceGroupManagerList` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., compute.InstanceGroupManagerList], + request: compute.ListInstanceGroupManagersRequest, + response: compute.InstanceGroupManagerList, + *, + metadata: Sequence[Tuple[str, str]] = ()): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.compute_v1.types.ListInstanceGroupManagersRequest): + The initial request object. + response (google.cloud.compute_v1.types.InstanceGroupManagerList): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = compute.ListInstanceGroupManagersRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[compute.InstanceGroupManagerList]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[compute.InstanceGroupManager]: + for page in self.pages: + yield from page.items + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + + +class ListErrorsPager: + """A pager for iterating through ``list_errors`` requests. + + This class thinly wraps an initial + :class:`google.cloud.compute_v1.types.InstanceGroupManagersListErrorsResponse` object, and + provides an ``__iter__`` method to iterate through its + ``items`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListErrors`` requests and continue to iterate + through the ``items`` field on the + corresponding responses. + + All the usual :class:`google.cloud.compute_v1.types.InstanceGroupManagersListErrorsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., compute.InstanceGroupManagersListErrorsResponse], + request: compute.ListErrorsInstanceGroupManagersRequest, + response: compute.InstanceGroupManagersListErrorsResponse, + *, + metadata: Sequence[Tuple[str, str]] = ()): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.compute_v1.types.ListErrorsInstanceGroupManagersRequest): + The initial request object. + response (google.cloud.compute_v1.types.InstanceGroupManagersListErrorsResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = compute.ListErrorsInstanceGroupManagersRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[compute.InstanceGroupManagersListErrorsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[compute.InstanceManagedByIgmError]: + for page in self.pages: + yield from page.items + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + + +class ListManagedInstancesPager: + """A pager for iterating through ``list_managed_instances`` requests. + + This class thinly wraps an initial + :class:`google.cloud.compute_v1.types.InstanceGroupManagersListManagedInstancesResponse` object, and + provides an ``__iter__`` method to iterate through its + ``managed_instances`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListManagedInstances`` requests and continue to iterate + through the ``managed_instances`` field on the + corresponding responses. + + All the usual :class:`google.cloud.compute_v1.types.InstanceGroupManagersListManagedInstancesResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., compute.InstanceGroupManagersListManagedInstancesResponse], + request: compute.ListManagedInstancesInstanceGroupManagersRequest, + response: compute.InstanceGroupManagersListManagedInstancesResponse, + *, + metadata: Sequence[Tuple[str, str]] = ()): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.compute_v1.types.ListManagedInstancesInstanceGroupManagersRequest): + The initial request object. + response (google.cloud.compute_v1.types.InstanceGroupManagersListManagedInstancesResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = compute.ListManagedInstancesInstanceGroupManagersRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[compute.InstanceGroupManagersListManagedInstancesResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[compute.ManagedInstance]: + for page in self.pages: + yield from page.managed_instances + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + + +class ListPerInstanceConfigsPager: + """A pager for iterating through ``list_per_instance_configs`` requests. + + This class thinly wraps an initial + :class:`google.cloud.compute_v1.types.InstanceGroupManagersListPerInstanceConfigsResp` object, and + provides an ``__iter__`` method to iterate through its + ``items`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListPerInstanceConfigs`` requests and continue to iterate + through the ``items`` field on the + corresponding responses. + + All the usual :class:`google.cloud.compute_v1.types.InstanceGroupManagersListPerInstanceConfigsResp` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., compute.InstanceGroupManagersListPerInstanceConfigsResp], + request: compute.ListPerInstanceConfigsInstanceGroupManagersRequest, + response: compute.InstanceGroupManagersListPerInstanceConfigsResp, + *, + metadata: Sequence[Tuple[str, str]] = ()): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.compute_v1.types.ListPerInstanceConfigsInstanceGroupManagersRequest): + The initial request object. + response (google.cloud.compute_v1.types.InstanceGroupManagersListPerInstanceConfigsResp): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = compute.ListPerInstanceConfigsInstanceGroupManagersRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[compute.InstanceGroupManagersListPerInstanceConfigsResp]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[compute.PerInstanceConfig]: + for page in self.pages: + yield from page.items + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) diff --git a/owl-bot-staging/v1/google/cloud/compute_v1/services/instance_group_managers/transports/__init__.py b/owl-bot-staging/v1/google/cloud/compute_v1/services/instance_group_managers/transports/__init__.py new file mode 100644 index 000000000..3068d03f1 --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/compute_v1/services/instance_group_managers/transports/__init__.py @@ -0,0 +1,32 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +from typing import Dict, Type + +from .base import InstanceGroupManagersTransport +from .rest import InstanceGroupManagersRestTransport +from .rest import InstanceGroupManagersRestInterceptor + + +# Compile a registry of transports. +_transport_registry = OrderedDict() # type: Dict[str, Type[InstanceGroupManagersTransport]] +_transport_registry['rest'] = InstanceGroupManagersRestTransport + +__all__ = ( + 'InstanceGroupManagersTransport', + 'InstanceGroupManagersRestTransport', + 'InstanceGroupManagersRestInterceptor', +) diff --git a/owl-bot-staging/v1/google/cloud/compute_v1/services/instance_group_managers/transports/base.py b/owl-bot-staging/v1/google/cloud/compute_v1/services/instance_group_managers/transports/base.py new file mode 100644 index 000000000..9edd465b5 --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/compute_v1/services/instance_group_managers/transports/base.py @@ -0,0 +1,429 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import abc +from typing import Awaitable, Callable, Dict, Optional, Sequence, Union + +from google.cloud.compute_v1 import gapic_version as package_version + +import google.auth # type: ignore +import google.api_core +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.compute_v1.types import compute +from google.cloud.compute_v1.services import zone_operations + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) + + +class InstanceGroupManagersTransport(abc.ABC): + """Abstract transport class for InstanceGroupManagers.""" + + AUTH_SCOPES = ( + 'https://www.googleapis.com/auth/compute', + 'https://www.googleapis.com/auth/cloud-platform', + ) + + DEFAULT_HOST: str = 'compute.googleapis.com' + def __init__( + self, *, + host: str = DEFAULT_HOST, + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + **kwargs, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A list of scopes. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + """ + self._extended_operations_services: Dict[str, Any] = {} + + scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} + + # Save the scopes. + self._scopes = scopes + + # If no credentials are provided, then determine the appropriate + # defaults. + if credentials and credentials_file: + raise core_exceptions.DuplicateCredentialArgs("'credentials_file' and 'credentials' are mutually exclusive") + + if credentials_file is not None: + credentials, _ = google.auth.load_credentials_from_file( + credentials_file, + **scopes_kwargs, + quota_project_id=quota_project_id + ) + elif credentials is None: + credentials, _ = google.auth.default(**scopes_kwargs, quota_project_id=quota_project_id) + # Don't apply audience if the credentials file passed from user. + if hasattr(credentials, "with_gdch_audience"): + credentials = credentials.with_gdch_audience(api_audience if api_audience else host) + + # If the credentials are service account credentials, then always try to use self signed JWT. + if always_use_jwt_access and isinstance(credentials, service_account.Credentials) and hasattr(service_account.Credentials, "with_always_use_jwt_access"): + credentials = credentials.with_always_use_jwt_access(True) + + # Save the credentials. + self._credentials = credentials + + # Save the hostname. Default to port 443 (HTTPS) if none is specified. + if ':' not in host: + host += ':443' + self._host = host + + def _prep_wrapped_messages(self, client_info): + # Precompute the wrapped methods. + self._wrapped_methods = { + self.abandon_instances: gapic_v1.method.wrap_method( + self.abandon_instances, + default_timeout=None, + client_info=client_info, + ), + self.aggregated_list: gapic_v1.method.wrap_method( + self.aggregated_list, + default_timeout=None, + client_info=client_info, + ), + self.apply_updates_to_instances: gapic_v1.method.wrap_method( + self.apply_updates_to_instances, + default_timeout=None, + client_info=client_info, + ), + self.create_instances: gapic_v1.method.wrap_method( + self.create_instances, + default_timeout=None, + client_info=client_info, + ), + self.delete: gapic_v1.method.wrap_method( + self.delete, + default_timeout=None, + client_info=client_info, + ), + self.delete_instances: gapic_v1.method.wrap_method( + self.delete_instances, + default_timeout=None, + client_info=client_info, + ), + self.delete_per_instance_configs: gapic_v1.method.wrap_method( + self.delete_per_instance_configs, + default_timeout=None, + client_info=client_info, + ), + self.get: gapic_v1.method.wrap_method( + self.get, + default_timeout=None, + client_info=client_info, + ), + self.insert: gapic_v1.method.wrap_method( + self.insert, + default_timeout=None, + client_info=client_info, + ), + self.list: gapic_v1.method.wrap_method( + self.list, + default_timeout=None, + client_info=client_info, + ), + self.list_errors: gapic_v1.method.wrap_method( + self.list_errors, + default_timeout=None, + client_info=client_info, + ), + self.list_managed_instances: gapic_v1.method.wrap_method( + self.list_managed_instances, + default_timeout=None, + client_info=client_info, + ), + self.list_per_instance_configs: gapic_v1.method.wrap_method( + self.list_per_instance_configs, + default_timeout=None, + client_info=client_info, + ), + self.patch: gapic_v1.method.wrap_method( + self.patch, + default_timeout=None, + client_info=client_info, + ), + self.patch_per_instance_configs: gapic_v1.method.wrap_method( + self.patch_per_instance_configs, + default_timeout=None, + client_info=client_info, + ), + self.recreate_instances: gapic_v1.method.wrap_method( + self.recreate_instances, + default_timeout=None, + client_info=client_info, + ), + self.resize: gapic_v1.method.wrap_method( + self.resize, + default_timeout=None, + client_info=client_info, + ), + self.set_instance_template: gapic_v1.method.wrap_method( + self.set_instance_template, + default_timeout=None, + client_info=client_info, + ), + self.set_target_pools: gapic_v1.method.wrap_method( + self.set_target_pools, + default_timeout=None, + client_info=client_info, + ), + self.update_per_instance_configs: gapic_v1.method.wrap_method( + self.update_per_instance_configs, + default_timeout=None, + client_info=client_info, + ), + } + + def close(self): + """Closes resources associated with the transport. + + .. warning:: + Only call this method if the transport is NOT shared + with other clients - this may cause errors in other clients! + """ + raise NotImplementedError() + + @property + def abandon_instances(self) -> Callable[ + [compute.AbandonInstancesInstanceGroupManagerRequest], + Union[ + compute.Operation, + Awaitable[compute.Operation] + ]]: + raise NotImplementedError() + + @property + def aggregated_list(self) -> Callable[ + [compute.AggregatedListInstanceGroupManagersRequest], + Union[ + compute.InstanceGroupManagerAggregatedList, + Awaitable[compute.InstanceGroupManagerAggregatedList] + ]]: + raise NotImplementedError() + + @property + def apply_updates_to_instances(self) -> Callable[ + [compute.ApplyUpdatesToInstancesInstanceGroupManagerRequest], + Union[ + compute.Operation, + Awaitable[compute.Operation] + ]]: + raise NotImplementedError() + + @property + def create_instances(self) -> Callable[ + [compute.CreateInstancesInstanceGroupManagerRequest], + Union[ + compute.Operation, + Awaitable[compute.Operation] + ]]: + raise NotImplementedError() + + @property + def delete(self) -> Callable[ + [compute.DeleteInstanceGroupManagerRequest], + Union[ + compute.Operation, + Awaitable[compute.Operation] + ]]: + raise NotImplementedError() + + @property + def delete_instances(self) -> Callable[ + [compute.DeleteInstancesInstanceGroupManagerRequest], + Union[ + compute.Operation, + Awaitable[compute.Operation] + ]]: + raise NotImplementedError() + + @property + def delete_per_instance_configs(self) -> Callable[ + [compute.DeletePerInstanceConfigsInstanceGroupManagerRequest], + Union[ + compute.Operation, + Awaitable[compute.Operation] + ]]: + raise NotImplementedError() + + @property + def get(self) -> Callable[ + [compute.GetInstanceGroupManagerRequest], + Union[ + compute.InstanceGroupManager, + Awaitable[compute.InstanceGroupManager] + ]]: + raise NotImplementedError() + + @property + def insert(self) -> Callable[ + [compute.InsertInstanceGroupManagerRequest], + Union[ + compute.Operation, + Awaitable[compute.Operation] + ]]: + raise NotImplementedError() + + @property + def list(self) -> Callable[ + [compute.ListInstanceGroupManagersRequest], + Union[ + compute.InstanceGroupManagerList, + Awaitable[compute.InstanceGroupManagerList] + ]]: + raise NotImplementedError() + + @property + def list_errors(self) -> Callable[ + [compute.ListErrorsInstanceGroupManagersRequest], + Union[ + compute.InstanceGroupManagersListErrorsResponse, + Awaitable[compute.InstanceGroupManagersListErrorsResponse] + ]]: + raise NotImplementedError() + + @property + def list_managed_instances(self) -> Callable[ + [compute.ListManagedInstancesInstanceGroupManagersRequest], + Union[ + compute.InstanceGroupManagersListManagedInstancesResponse, + Awaitable[compute.InstanceGroupManagersListManagedInstancesResponse] + ]]: + raise NotImplementedError() + + @property + def list_per_instance_configs(self) -> Callable[ + [compute.ListPerInstanceConfigsInstanceGroupManagersRequest], + Union[ + compute.InstanceGroupManagersListPerInstanceConfigsResp, + Awaitable[compute.InstanceGroupManagersListPerInstanceConfigsResp] + ]]: + raise NotImplementedError() + + @property + def patch(self) -> Callable[ + [compute.PatchInstanceGroupManagerRequest], + Union[ + compute.Operation, + Awaitable[compute.Operation] + ]]: + raise NotImplementedError() + + @property + def patch_per_instance_configs(self) -> Callable[ + [compute.PatchPerInstanceConfigsInstanceGroupManagerRequest], + Union[ + compute.Operation, + Awaitable[compute.Operation] + ]]: + raise NotImplementedError() + + @property + def recreate_instances(self) -> Callable[ + [compute.RecreateInstancesInstanceGroupManagerRequest], + Union[ + compute.Operation, + Awaitable[compute.Operation] + ]]: + raise NotImplementedError() + + @property + def resize(self) -> Callable[ + [compute.ResizeInstanceGroupManagerRequest], + Union[ + compute.Operation, + Awaitable[compute.Operation] + ]]: + raise NotImplementedError() + + @property + def set_instance_template(self) -> Callable[ + [compute.SetInstanceTemplateInstanceGroupManagerRequest], + Union[ + compute.Operation, + Awaitable[compute.Operation] + ]]: + raise NotImplementedError() + + @property + def set_target_pools(self) -> Callable[ + [compute.SetTargetPoolsInstanceGroupManagerRequest], + Union[ + compute.Operation, + Awaitable[compute.Operation] + ]]: + raise NotImplementedError() + + @property + def update_per_instance_configs(self) -> Callable[ + [compute.UpdatePerInstanceConfigsInstanceGroupManagerRequest], + Union[ + compute.Operation, + Awaitable[compute.Operation] + ]]: + raise NotImplementedError() + + @property + def kind(self) -> str: + raise NotImplementedError() + + @property + def _zone_operations_client(self) -> zone_operations.ZoneOperationsClient: + ex_op_service = self._extended_operations_services.get("zone_operations") + if not ex_op_service: + ex_op_service = zone_operations.ZoneOperationsClient( + credentials=self._credentials, + transport=self.kind, + ) + self._extended_operations_services["zone_operations"] = ex_op_service + + return ex_op_service + + +__all__ = ( + 'InstanceGroupManagersTransport', +) diff --git a/owl-bot-staging/v1/google/cloud/compute_v1/services/instance_group_managers/transports/rest.py b/owl-bot-staging/v1/google/cloud/compute_v1/services/instance_group_managers/transports/rest.py new file mode 100644 index 000000000..df61d5adf --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/compute_v1/services/instance_group_managers/transports/rest.py @@ -0,0 +1,2718 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +from google.auth.transport.requests import AuthorizedSession # type: ignore +import json # type: ignore +import grpc # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.api_core import exceptions as core_exceptions +from google.api_core import retry as retries +from google.api_core import rest_helpers +from google.api_core import rest_streaming +from google.api_core import path_template +from google.api_core import gapic_v1 + +from google.protobuf import json_format +from requests import __version__ as requests_version +import dataclasses +import re +from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union +import warnings + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + + +from google.cloud.compute_v1.types import compute + +from .base import InstanceGroupManagersTransport, DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, + grpc_version=None, + rest_version=requests_version, +) + + +class InstanceGroupManagersRestInterceptor: + """Interceptor for InstanceGroupManagers. + + Interceptors are used to manipulate requests, request metadata, and responses + in arbitrary ways. + Example use cases include: + * Logging + * Verifying requests according to service or custom semantics + * Stripping extraneous information from responses + + These use cases and more can be enabled by injecting an + instance of a custom subclass when constructing the InstanceGroupManagersRestTransport. + + .. code-block:: python + class MyCustomInstanceGroupManagersInterceptor(InstanceGroupManagersRestInterceptor): + def pre_abandon_instances(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_abandon_instances(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_aggregated_list(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_aggregated_list(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_apply_updates_to_instances(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_apply_updates_to_instances(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_create_instances(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_create_instances(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_delete(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_delete(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_delete_instances(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_delete_instances(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_delete_per_instance_configs(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_delete_per_instance_configs(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_get(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_insert(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_insert(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list_errors(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_errors(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list_managed_instances(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_managed_instances(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list_per_instance_configs(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_per_instance_configs(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_patch(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_patch(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_patch_per_instance_configs(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_patch_per_instance_configs(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_recreate_instances(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_recreate_instances(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_resize(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_resize(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_set_instance_template(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_set_instance_template(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_set_target_pools(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_set_target_pools(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_update_per_instance_configs(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_update_per_instance_configs(self, response): + logging.log(f"Received response: {response}") + return response + + transport = InstanceGroupManagersRestTransport(interceptor=MyCustomInstanceGroupManagersInterceptor()) + client = InstanceGroupManagersClient(transport=transport) + + + """ + def pre_abandon_instances(self, request: compute.AbandonInstancesInstanceGroupManagerRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.AbandonInstancesInstanceGroupManagerRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for abandon_instances + + Override in a subclass to manipulate the request or metadata + before they are sent to the InstanceGroupManagers server. + """ + return request, metadata + + def post_abandon_instances(self, response: compute.Operation) -> compute.Operation: + """Post-rpc interceptor for abandon_instances + + Override in a subclass to manipulate the response + after it is returned by the InstanceGroupManagers server but before + it is returned to user code. + """ + return response + def pre_aggregated_list(self, request: compute.AggregatedListInstanceGroupManagersRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.AggregatedListInstanceGroupManagersRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for aggregated_list + + Override in a subclass to manipulate the request or metadata + before they are sent to the InstanceGroupManagers server. + """ + return request, metadata + + def post_aggregated_list(self, response: compute.InstanceGroupManagerAggregatedList) -> compute.InstanceGroupManagerAggregatedList: + """Post-rpc interceptor for aggregated_list + + Override in a subclass to manipulate the response + after it is returned by the InstanceGroupManagers server but before + it is returned to user code. + """ + return response + def pre_apply_updates_to_instances(self, request: compute.ApplyUpdatesToInstancesInstanceGroupManagerRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.ApplyUpdatesToInstancesInstanceGroupManagerRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for apply_updates_to_instances + + Override in a subclass to manipulate the request or metadata + before they are sent to the InstanceGroupManagers server. + """ + return request, metadata + + def post_apply_updates_to_instances(self, response: compute.Operation) -> compute.Operation: + """Post-rpc interceptor for apply_updates_to_instances + + Override in a subclass to manipulate the response + after it is returned by the InstanceGroupManagers server but before + it is returned to user code. + """ + return response + def pre_create_instances(self, request: compute.CreateInstancesInstanceGroupManagerRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.CreateInstancesInstanceGroupManagerRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for create_instances + + Override in a subclass to manipulate the request or metadata + before they are sent to the InstanceGroupManagers server. + """ + return request, metadata + + def post_create_instances(self, response: compute.Operation) -> compute.Operation: + """Post-rpc interceptor for create_instances + + Override in a subclass to manipulate the response + after it is returned by the InstanceGroupManagers server but before + it is returned to user code. + """ + return response + def pre_delete(self, request: compute.DeleteInstanceGroupManagerRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.DeleteInstanceGroupManagerRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for delete + + Override in a subclass to manipulate the request or metadata + before they are sent to the InstanceGroupManagers server. + """ + return request, metadata + + def post_delete(self, response: compute.Operation) -> compute.Operation: + """Post-rpc interceptor for delete + + Override in a subclass to manipulate the response + after it is returned by the InstanceGroupManagers server but before + it is returned to user code. + """ + return response + def pre_delete_instances(self, request: compute.DeleteInstancesInstanceGroupManagerRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.DeleteInstancesInstanceGroupManagerRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for delete_instances + + Override in a subclass to manipulate the request or metadata + before they are sent to the InstanceGroupManagers server. + """ + return request, metadata + + def post_delete_instances(self, response: compute.Operation) -> compute.Operation: + """Post-rpc interceptor for delete_instances + + Override in a subclass to manipulate the response + after it is returned by the InstanceGroupManagers server but before + it is returned to user code. + """ + return response + def pre_delete_per_instance_configs(self, request: compute.DeletePerInstanceConfigsInstanceGroupManagerRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.DeletePerInstanceConfigsInstanceGroupManagerRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for delete_per_instance_configs + + Override in a subclass to manipulate the request or metadata + before they are sent to the InstanceGroupManagers server. + """ + return request, metadata + + def post_delete_per_instance_configs(self, response: compute.Operation) -> compute.Operation: + """Post-rpc interceptor for delete_per_instance_configs + + Override in a subclass to manipulate the response + after it is returned by the InstanceGroupManagers server but before + it is returned to user code. + """ + return response + def pre_get(self, request: compute.GetInstanceGroupManagerRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.GetInstanceGroupManagerRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get + + Override in a subclass to manipulate the request or metadata + before they are sent to the InstanceGroupManagers server. + """ + return request, metadata + + def post_get(self, response: compute.InstanceGroupManager) -> compute.InstanceGroupManager: + """Post-rpc interceptor for get + + Override in a subclass to manipulate the response + after it is returned by the InstanceGroupManagers server but before + it is returned to user code. + """ + return response + def pre_insert(self, request: compute.InsertInstanceGroupManagerRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.InsertInstanceGroupManagerRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for insert + + Override in a subclass to manipulate the request or metadata + before they are sent to the InstanceGroupManagers server. + """ + return request, metadata + + def post_insert(self, response: compute.Operation) -> compute.Operation: + """Post-rpc interceptor for insert + + Override in a subclass to manipulate the response + after it is returned by the InstanceGroupManagers server but before + it is returned to user code. + """ + return response + def pre_list(self, request: compute.ListInstanceGroupManagersRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.ListInstanceGroupManagersRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list + + Override in a subclass to manipulate the request or metadata + before they are sent to the InstanceGroupManagers server. + """ + return request, metadata + + def post_list(self, response: compute.InstanceGroupManagerList) -> compute.InstanceGroupManagerList: + """Post-rpc interceptor for list + + Override in a subclass to manipulate the response + after it is returned by the InstanceGroupManagers server but before + it is returned to user code. + """ + return response + def pre_list_errors(self, request: compute.ListErrorsInstanceGroupManagersRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.ListErrorsInstanceGroupManagersRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list_errors + + Override in a subclass to manipulate the request or metadata + before they are sent to the InstanceGroupManagers server. + """ + return request, metadata + + def post_list_errors(self, response: compute.InstanceGroupManagersListErrorsResponse) -> compute.InstanceGroupManagersListErrorsResponse: + """Post-rpc interceptor for list_errors + + Override in a subclass to manipulate the response + after it is returned by the InstanceGroupManagers server but before + it is returned to user code. + """ + return response + def pre_list_managed_instances(self, request: compute.ListManagedInstancesInstanceGroupManagersRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.ListManagedInstancesInstanceGroupManagersRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list_managed_instances + + Override in a subclass to manipulate the request or metadata + before they are sent to the InstanceGroupManagers server. + """ + return request, metadata + + def post_list_managed_instances(self, response: compute.InstanceGroupManagersListManagedInstancesResponse) -> compute.InstanceGroupManagersListManagedInstancesResponse: + """Post-rpc interceptor for list_managed_instances + + Override in a subclass to manipulate the response + after it is returned by the InstanceGroupManagers server but before + it is returned to user code. + """ + return response + def pre_list_per_instance_configs(self, request: compute.ListPerInstanceConfigsInstanceGroupManagersRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.ListPerInstanceConfigsInstanceGroupManagersRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list_per_instance_configs + + Override in a subclass to manipulate the request or metadata + before they are sent to the InstanceGroupManagers server. + """ + return request, metadata + + def post_list_per_instance_configs(self, response: compute.InstanceGroupManagersListPerInstanceConfigsResp) -> compute.InstanceGroupManagersListPerInstanceConfigsResp: + """Post-rpc interceptor for list_per_instance_configs + + Override in a subclass to manipulate the response + after it is returned by the InstanceGroupManagers server but before + it is returned to user code. + """ + return response + def pre_patch(self, request: compute.PatchInstanceGroupManagerRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.PatchInstanceGroupManagerRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for patch + + Override in a subclass to manipulate the request or metadata + before they are sent to the InstanceGroupManagers server. + """ + return request, metadata + + def post_patch(self, response: compute.Operation) -> compute.Operation: + """Post-rpc interceptor for patch + + Override in a subclass to manipulate the response + after it is returned by the InstanceGroupManagers server but before + it is returned to user code. + """ + return response + def pre_patch_per_instance_configs(self, request: compute.PatchPerInstanceConfigsInstanceGroupManagerRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.PatchPerInstanceConfigsInstanceGroupManagerRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for patch_per_instance_configs + + Override in a subclass to manipulate the request or metadata + before they are sent to the InstanceGroupManagers server. + """ + return request, metadata + + def post_patch_per_instance_configs(self, response: compute.Operation) -> compute.Operation: + """Post-rpc interceptor for patch_per_instance_configs + + Override in a subclass to manipulate the response + after it is returned by the InstanceGroupManagers server but before + it is returned to user code. + """ + return response + def pre_recreate_instances(self, request: compute.RecreateInstancesInstanceGroupManagerRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.RecreateInstancesInstanceGroupManagerRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for recreate_instances + + Override in a subclass to manipulate the request or metadata + before they are sent to the InstanceGroupManagers server. + """ + return request, metadata + + def post_recreate_instances(self, response: compute.Operation) -> compute.Operation: + """Post-rpc interceptor for recreate_instances + + Override in a subclass to manipulate the response + after it is returned by the InstanceGroupManagers server but before + it is returned to user code. + """ + return response + def pre_resize(self, request: compute.ResizeInstanceGroupManagerRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.ResizeInstanceGroupManagerRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for resize + + Override in a subclass to manipulate the request or metadata + before they are sent to the InstanceGroupManagers server. + """ + return request, metadata + + def post_resize(self, response: compute.Operation) -> compute.Operation: + """Post-rpc interceptor for resize + + Override in a subclass to manipulate the response + after it is returned by the InstanceGroupManagers server but before + it is returned to user code. + """ + return response + def pre_set_instance_template(self, request: compute.SetInstanceTemplateInstanceGroupManagerRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.SetInstanceTemplateInstanceGroupManagerRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for set_instance_template + + Override in a subclass to manipulate the request or metadata + before they are sent to the InstanceGroupManagers server. + """ + return request, metadata + + def post_set_instance_template(self, response: compute.Operation) -> compute.Operation: + """Post-rpc interceptor for set_instance_template + + Override in a subclass to manipulate the response + after it is returned by the InstanceGroupManagers server but before + it is returned to user code. + """ + return response + def pre_set_target_pools(self, request: compute.SetTargetPoolsInstanceGroupManagerRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.SetTargetPoolsInstanceGroupManagerRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for set_target_pools + + Override in a subclass to manipulate the request or metadata + before they are sent to the InstanceGroupManagers server. + """ + return request, metadata + + def post_set_target_pools(self, response: compute.Operation) -> compute.Operation: + """Post-rpc interceptor for set_target_pools + + Override in a subclass to manipulate the response + after it is returned by the InstanceGroupManagers server but before + it is returned to user code. + """ + return response + def pre_update_per_instance_configs(self, request: compute.UpdatePerInstanceConfigsInstanceGroupManagerRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.UpdatePerInstanceConfigsInstanceGroupManagerRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for update_per_instance_configs + + Override in a subclass to manipulate the request or metadata + before they are sent to the InstanceGroupManagers server. + """ + return request, metadata + + def post_update_per_instance_configs(self, response: compute.Operation) -> compute.Operation: + """Post-rpc interceptor for update_per_instance_configs + + Override in a subclass to manipulate the response + after it is returned by the InstanceGroupManagers server but before + it is returned to user code. + """ + return response + + +@dataclasses.dataclass +class InstanceGroupManagersRestStub: + _session: AuthorizedSession + _host: str + _interceptor: InstanceGroupManagersRestInterceptor + + +class InstanceGroupManagersRestTransport(InstanceGroupManagersTransport): + """REST backend transport for InstanceGroupManagers. + + The InstanceGroupManagers API. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends JSON representations of protocol buffers over HTTP/1.1 + + NOTE: This REST transport functionality is currently in a beta + state (preview). We welcome your feedback via an issue in this + library's source repository. Thank you! + """ + + def __init__(self, *, + host: str = 'compute.googleapis.com', + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + client_cert_source_for_mtls: Optional[Callable[[ + ], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + url_scheme: str = 'https', + interceptor: Optional[InstanceGroupManagersRestInterceptor] = None, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + NOTE: This REST transport functionality is currently in a beta + state (preview). We welcome your feedback via a GitHub issue in + this library's repository. Thank you! + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + client_cert_source_for_mtls (Callable[[], Tuple[bytes, bytes]]): Client + certificate to configure mutual TLS HTTP channel. It is ignored + if ``channel`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you are developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + url_scheme: the protocol scheme for the API endpoint. Normally + "https", but for testing or local servers, + "http" can be specified. + """ + # Run the base constructor + # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. + # TODO: When custom host (api_endpoint) is set, `scopes` must *also* be set on the + # credentials object + maybe_url_match = re.match("^(?Phttp(?:s)?://)?(?P.*)$", host) + if maybe_url_match is None: + raise ValueError(f"Unexpected hostname structure: {host}") # pragma: NO COVER + + url_match_items = maybe_url_match.groupdict() + + host = f"{url_scheme}://{host}" if not url_match_items["scheme"] else host + + super().__init__( + host=host, + credentials=credentials, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience + ) + self._session = AuthorizedSession( + self._credentials, default_host=self.DEFAULT_HOST) + if client_cert_source_for_mtls: + self._session.configure_mtls_channel(client_cert_source_for_mtls) + self._interceptor = interceptor or InstanceGroupManagersRestInterceptor() + self._prep_wrapped_messages(client_info) + + class _AbandonInstances(InstanceGroupManagersRestStub): + def __hash__(self): + return hash("AbandonInstances") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.AbandonInstancesInstanceGroupManagerRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.Operation: + r"""Call the abandon instances method over HTTP. + + Args: + request (~.compute.AbandonInstancesInstanceGroupManagerRequest): + The request object. Messages + + A request message for + InstanceGroupManagers.AbandonInstances. + See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.Operation: + Represents an Operation resource. Google Compute Engine + has three Operation resources: \* + `Global `__ + \* + `Regional `__ + \* + `Zonal `__ + You can use an operation resource to manage asynchronous + API requests. For more information, read Handling API + responses. Operations can be global, regional or zonal. + - For global operations, use the ``globalOperations`` + resource. - For regional operations, use the + ``regionOperations`` resource. - For zonal operations, + use the ``zonalOperations`` resource. For more + information, read Global, Regional, and Zonal Resources. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/compute/v1/projects/{project}/zones/{zone}/instanceGroupManagers/{instance_group_manager}/abandonInstances', + 'body': 'instance_group_managers_abandon_instances_request_resource', + }, + ] + request, metadata = self._interceptor.pre_abandon_instances(request, metadata) + pb_request = compute.AbandonInstancesInstanceGroupManagerRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + including_default_value_fields=False, + use_integers_for_enums=False + ) + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.Operation() + pb_resp = compute.Operation.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_abandon_instances(resp) + return resp + + class _AggregatedList(InstanceGroupManagersRestStub): + def __hash__(self): + return hash("AggregatedList") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.AggregatedListInstanceGroupManagersRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.InstanceGroupManagerAggregatedList: + r"""Call the aggregated list method over HTTP. + + Args: + request (~.compute.AggregatedListInstanceGroupManagersRequest): + The request object. A request message for + InstanceGroupManagers.AggregatedList. + See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.InstanceGroupManagerAggregatedList: + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/compute/v1/projects/{project}/aggregated/instanceGroupManagers', + }, + ] + request, metadata = self._interceptor.pre_aggregated_list(request, metadata) + pb_request = compute.AggregatedListInstanceGroupManagersRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.InstanceGroupManagerAggregatedList() + pb_resp = compute.InstanceGroupManagerAggregatedList.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_aggregated_list(resp) + return resp + + class _ApplyUpdatesToInstances(InstanceGroupManagersRestStub): + def __hash__(self): + return hash("ApplyUpdatesToInstances") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.ApplyUpdatesToInstancesInstanceGroupManagerRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.Operation: + r"""Call the apply updates to + instances method over HTTP. + + Args: + request (~.compute.ApplyUpdatesToInstancesInstanceGroupManagerRequest): + The request object. A request message for + InstanceGroupManagers.ApplyUpdatesToInstances. + See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.Operation: + Represents an Operation resource. Google Compute Engine + has three Operation resources: \* + `Global `__ + \* + `Regional `__ + \* + `Zonal `__ + You can use an operation resource to manage asynchronous + API requests. For more information, read Handling API + responses. Operations can be global, regional or zonal. + - For global operations, use the ``globalOperations`` + resource. - For regional operations, use the + ``regionOperations`` resource. - For zonal operations, + use the ``zonalOperations`` resource. For more + information, read Global, Regional, and Zonal Resources. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/compute/v1/projects/{project}/zones/{zone}/instanceGroupManagers/{instance_group_manager}/applyUpdatesToInstances', + 'body': 'instance_group_managers_apply_updates_request_resource', + }, + ] + request, metadata = self._interceptor.pre_apply_updates_to_instances(request, metadata) + pb_request = compute.ApplyUpdatesToInstancesInstanceGroupManagerRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + including_default_value_fields=False, + use_integers_for_enums=False + ) + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.Operation() + pb_resp = compute.Operation.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_apply_updates_to_instances(resp) + return resp + + class _CreateInstances(InstanceGroupManagersRestStub): + def __hash__(self): + return hash("CreateInstances") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.CreateInstancesInstanceGroupManagerRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.Operation: + r"""Call the create instances method over HTTP. + + Args: + request (~.compute.CreateInstancesInstanceGroupManagerRequest): + The request object. A request message for + InstanceGroupManagers.CreateInstances. + See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.Operation: + Represents an Operation resource. Google Compute Engine + has three Operation resources: \* + `Global `__ + \* + `Regional `__ + \* + `Zonal `__ + You can use an operation resource to manage asynchronous + API requests. For more information, read Handling API + responses. Operations can be global, regional or zonal. + - For global operations, use the ``globalOperations`` + resource. - For regional operations, use the + ``regionOperations`` resource. - For zonal operations, + use the ``zonalOperations`` resource. For more + information, read Global, Regional, and Zonal Resources. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/compute/v1/projects/{project}/zones/{zone}/instanceGroupManagers/{instance_group_manager}/createInstances', + 'body': 'instance_group_managers_create_instances_request_resource', + }, + ] + request, metadata = self._interceptor.pre_create_instances(request, metadata) + pb_request = compute.CreateInstancesInstanceGroupManagerRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + including_default_value_fields=False, + use_integers_for_enums=False + ) + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.Operation() + pb_resp = compute.Operation.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_create_instances(resp) + return resp + + class _Delete(InstanceGroupManagersRestStub): + def __hash__(self): + return hash("Delete") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.DeleteInstanceGroupManagerRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.Operation: + r"""Call the delete method over HTTP. + + Args: + request (~.compute.DeleteInstanceGroupManagerRequest): + The request object. A request message for + InstanceGroupManagers.Delete. See the + method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.Operation: + Represents an Operation resource. Google Compute Engine + has three Operation resources: \* + `Global `__ + \* + `Regional `__ + \* + `Zonal `__ + You can use an operation resource to manage asynchronous + API requests. For more information, read Handling API + responses. Operations can be global, regional or zonal. + - For global operations, use the ``globalOperations`` + resource. - For regional operations, use the + ``regionOperations`` resource. - For zonal operations, + use the ``zonalOperations`` resource. For more + information, read Global, Regional, and Zonal Resources. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'delete', + 'uri': '/compute/v1/projects/{project}/zones/{zone}/instanceGroupManagers/{instance_group_manager}', + }, + ] + request, metadata = self._interceptor.pre_delete(request, metadata) + pb_request = compute.DeleteInstanceGroupManagerRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.Operation() + pb_resp = compute.Operation.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_delete(resp) + return resp + + class _DeleteInstances(InstanceGroupManagersRestStub): + def __hash__(self): + return hash("DeleteInstances") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.DeleteInstancesInstanceGroupManagerRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.Operation: + r"""Call the delete instances method over HTTP. + + Args: + request (~.compute.DeleteInstancesInstanceGroupManagerRequest): + The request object. A request message for + InstanceGroupManagers.DeleteInstances. + See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.Operation: + Represents an Operation resource. Google Compute Engine + has three Operation resources: \* + `Global `__ + \* + `Regional `__ + \* + `Zonal `__ + You can use an operation resource to manage asynchronous + API requests. For more information, read Handling API + responses. Operations can be global, regional or zonal. + - For global operations, use the ``globalOperations`` + resource. - For regional operations, use the + ``regionOperations`` resource. - For zonal operations, + use the ``zonalOperations`` resource. For more + information, read Global, Regional, and Zonal Resources. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/compute/v1/projects/{project}/zones/{zone}/instanceGroupManagers/{instance_group_manager}/deleteInstances', + 'body': 'instance_group_managers_delete_instances_request_resource', + }, + ] + request, metadata = self._interceptor.pre_delete_instances(request, metadata) + pb_request = compute.DeleteInstancesInstanceGroupManagerRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + including_default_value_fields=False, + use_integers_for_enums=False + ) + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.Operation() + pb_resp = compute.Operation.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_delete_instances(resp) + return resp + + class _DeletePerInstanceConfigs(InstanceGroupManagersRestStub): + def __hash__(self): + return hash("DeletePerInstanceConfigs") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.DeletePerInstanceConfigsInstanceGroupManagerRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.Operation: + r"""Call the delete per instance + configs method over HTTP. + + Args: + request (~.compute.DeletePerInstanceConfigsInstanceGroupManagerRequest): + The request object. A request message for + InstanceGroupManagers.DeletePerInstanceConfigs. + See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.Operation: + Represents an Operation resource. Google Compute Engine + has three Operation resources: \* + `Global `__ + \* + `Regional `__ + \* + `Zonal `__ + You can use an operation resource to manage asynchronous + API requests. For more information, read Handling API + responses. Operations can be global, regional or zonal. + - For global operations, use the ``globalOperations`` + resource. - For regional operations, use the + ``regionOperations`` resource. - For zonal operations, + use the ``zonalOperations`` resource. For more + information, read Global, Regional, and Zonal Resources. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/compute/v1/projects/{project}/zones/{zone}/instanceGroupManagers/{instance_group_manager}/deletePerInstanceConfigs', + 'body': 'instance_group_managers_delete_per_instance_configs_req_resource', + }, + ] + request, metadata = self._interceptor.pre_delete_per_instance_configs(request, metadata) + pb_request = compute.DeletePerInstanceConfigsInstanceGroupManagerRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + including_default_value_fields=False, + use_integers_for_enums=False + ) + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.Operation() + pb_resp = compute.Operation.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_delete_per_instance_configs(resp) + return resp + + class _Get(InstanceGroupManagersRestStub): + def __hash__(self): + return hash("Get") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.GetInstanceGroupManagerRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.InstanceGroupManager: + r"""Call the get method over HTTP. + + Args: + request (~.compute.GetInstanceGroupManagerRequest): + The request object. A request message for + InstanceGroupManagers.Get. See the + method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.InstanceGroupManager: + Represents a Managed Instance Group + resource. An instance group is a + collection of VM instances that you can + manage as a single entity. For more + information, read Instance groups. For + zonal Managed Instance Group, use the + instanceGroupManagers resource. For + regional Managed Instance Group, use the + regionInstanceGroupManagers resource. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/compute/v1/projects/{project}/zones/{zone}/instanceGroupManagers/{instance_group_manager}', + }, + ] + request, metadata = self._interceptor.pre_get(request, metadata) + pb_request = compute.GetInstanceGroupManagerRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.InstanceGroupManager() + pb_resp = compute.InstanceGroupManager.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get(resp) + return resp + + class _Insert(InstanceGroupManagersRestStub): + def __hash__(self): + return hash("Insert") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.InsertInstanceGroupManagerRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.Operation: + r"""Call the insert method over HTTP. + + Args: + request (~.compute.InsertInstanceGroupManagerRequest): + The request object. A request message for + InstanceGroupManagers.Insert. See the + method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.Operation: + Represents an Operation resource. Google Compute Engine + has three Operation resources: \* + `Global `__ + \* + `Regional `__ + \* + `Zonal `__ + You can use an operation resource to manage asynchronous + API requests. For more information, read Handling API + responses. Operations can be global, regional or zonal. + - For global operations, use the ``globalOperations`` + resource. - For regional operations, use the + ``regionOperations`` resource. - For zonal operations, + use the ``zonalOperations`` resource. For more + information, read Global, Regional, and Zonal Resources. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/compute/v1/projects/{project}/zones/{zone}/instanceGroupManagers', + 'body': 'instance_group_manager_resource', + }, + ] + request, metadata = self._interceptor.pre_insert(request, metadata) + pb_request = compute.InsertInstanceGroupManagerRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + including_default_value_fields=False, + use_integers_for_enums=False + ) + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.Operation() + pb_resp = compute.Operation.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_insert(resp) + return resp + + class _List(InstanceGroupManagersRestStub): + def __hash__(self): + return hash("List") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.ListInstanceGroupManagersRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.InstanceGroupManagerList: + r"""Call the list method over HTTP. + + Args: + request (~.compute.ListInstanceGroupManagersRequest): + The request object. A request message for + InstanceGroupManagers.List. See the + method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.InstanceGroupManagerList: + [Output Only] A list of managed instance groups. + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/compute/v1/projects/{project}/zones/{zone}/instanceGroupManagers', + }, + ] + request, metadata = self._interceptor.pre_list(request, metadata) + pb_request = compute.ListInstanceGroupManagersRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.InstanceGroupManagerList() + pb_resp = compute.InstanceGroupManagerList.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_list(resp) + return resp + + class _ListErrors(InstanceGroupManagersRestStub): + def __hash__(self): + return hash("ListErrors") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.ListErrorsInstanceGroupManagersRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.InstanceGroupManagersListErrorsResponse: + r"""Call the list errors method over HTTP. + + Args: + request (~.compute.ListErrorsInstanceGroupManagersRequest): + The request object. A request message for + InstanceGroupManagers.ListErrors. See + the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.InstanceGroupManagersListErrorsResponse: + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/compute/v1/projects/{project}/zones/{zone}/instanceGroupManagers/{instance_group_manager}/listErrors', + }, + ] + request, metadata = self._interceptor.pre_list_errors(request, metadata) + pb_request = compute.ListErrorsInstanceGroupManagersRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.InstanceGroupManagersListErrorsResponse() + pb_resp = compute.InstanceGroupManagersListErrorsResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_list_errors(resp) + return resp + + class _ListManagedInstances(InstanceGroupManagersRestStub): + def __hash__(self): + return hash("ListManagedInstances") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.ListManagedInstancesInstanceGroupManagersRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.InstanceGroupManagersListManagedInstancesResponse: + r"""Call the list managed instances method over HTTP. + + Args: + request (~.compute.ListManagedInstancesInstanceGroupManagersRequest): + The request object. A request message for + InstanceGroupManagers.ListManagedInstances. + See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.InstanceGroupManagersListManagedInstancesResponse: + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/compute/v1/projects/{project}/zones/{zone}/instanceGroupManagers/{instance_group_manager}/listManagedInstances', + }, + ] + request, metadata = self._interceptor.pre_list_managed_instances(request, metadata) + pb_request = compute.ListManagedInstancesInstanceGroupManagersRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.InstanceGroupManagersListManagedInstancesResponse() + pb_resp = compute.InstanceGroupManagersListManagedInstancesResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_list_managed_instances(resp) + return resp + + class _ListPerInstanceConfigs(InstanceGroupManagersRestStub): + def __hash__(self): + return hash("ListPerInstanceConfigs") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.ListPerInstanceConfigsInstanceGroupManagersRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.InstanceGroupManagersListPerInstanceConfigsResp: + r"""Call the list per instance configs method over HTTP. + + Args: + request (~.compute.ListPerInstanceConfigsInstanceGroupManagersRequest): + The request object. A request message for + InstanceGroupManagers.ListPerInstanceConfigs. + See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.InstanceGroupManagersListPerInstanceConfigsResp: + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/compute/v1/projects/{project}/zones/{zone}/instanceGroupManagers/{instance_group_manager}/listPerInstanceConfigs', + }, + ] + request, metadata = self._interceptor.pre_list_per_instance_configs(request, metadata) + pb_request = compute.ListPerInstanceConfigsInstanceGroupManagersRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.InstanceGroupManagersListPerInstanceConfigsResp() + pb_resp = compute.InstanceGroupManagersListPerInstanceConfigsResp.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_list_per_instance_configs(resp) + return resp + + class _Patch(InstanceGroupManagersRestStub): + def __hash__(self): + return hash("Patch") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.PatchInstanceGroupManagerRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.Operation: + r"""Call the patch method over HTTP. + + Args: + request (~.compute.PatchInstanceGroupManagerRequest): + The request object. A request message for + InstanceGroupManagers.Patch. See the + method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.Operation: + Represents an Operation resource. Google Compute Engine + has three Operation resources: \* + `Global `__ + \* + `Regional `__ + \* + `Zonal `__ + You can use an operation resource to manage asynchronous + API requests. For more information, read Handling API + responses. Operations can be global, regional or zonal. + - For global operations, use the ``globalOperations`` + resource. - For regional operations, use the + ``regionOperations`` resource. - For zonal operations, + use the ``zonalOperations`` resource. For more + information, read Global, Regional, and Zonal Resources. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'patch', + 'uri': '/compute/v1/projects/{project}/zones/{zone}/instanceGroupManagers/{instance_group_manager}', + 'body': 'instance_group_manager_resource', + }, + ] + request, metadata = self._interceptor.pre_patch(request, metadata) + pb_request = compute.PatchInstanceGroupManagerRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + including_default_value_fields=False, + use_integers_for_enums=False + ) + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.Operation() + pb_resp = compute.Operation.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_patch(resp) + return resp + + class _PatchPerInstanceConfigs(InstanceGroupManagersRestStub): + def __hash__(self): + return hash("PatchPerInstanceConfigs") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.PatchPerInstanceConfigsInstanceGroupManagerRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.Operation: + r"""Call the patch per instance + configs method over HTTP. + + Args: + request (~.compute.PatchPerInstanceConfigsInstanceGroupManagerRequest): + The request object. A request message for + InstanceGroupManagers.PatchPerInstanceConfigs. + See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.Operation: + Represents an Operation resource. Google Compute Engine + has three Operation resources: \* + `Global `__ + \* + `Regional `__ + \* + `Zonal `__ + You can use an operation resource to manage asynchronous + API requests. For more information, read Handling API + responses. Operations can be global, regional or zonal. + - For global operations, use the ``globalOperations`` + resource. - For regional operations, use the + ``regionOperations`` resource. - For zonal operations, + use the ``zonalOperations`` resource. For more + information, read Global, Regional, and Zonal Resources. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/compute/v1/projects/{project}/zones/{zone}/instanceGroupManagers/{instance_group_manager}/patchPerInstanceConfigs', + 'body': 'instance_group_managers_patch_per_instance_configs_req_resource', + }, + ] + request, metadata = self._interceptor.pre_patch_per_instance_configs(request, metadata) + pb_request = compute.PatchPerInstanceConfigsInstanceGroupManagerRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + including_default_value_fields=False, + use_integers_for_enums=False + ) + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.Operation() + pb_resp = compute.Operation.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_patch_per_instance_configs(resp) + return resp + + class _RecreateInstances(InstanceGroupManagersRestStub): + def __hash__(self): + return hash("RecreateInstances") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.RecreateInstancesInstanceGroupManagerRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.Operation: + r"""Call the recreate instances method over HTTP. + + Args: + request (~.compute.RecreateInstancesInstanceGroupManagerRequest): + The request object. A request message for + InstanceGroupManagers.RecreateInstances. + See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.Operation: + Represents an Operation resource. Google Compute Engine + has three Operation resources: \* + `Global `__ + \* + `Regional `__ + \* + `Zonal `__ + You can use an operation resource to manage asynchronous + API requests. For more information, read Handling API + responses. Operations can be global, regional or zonal. + - For global operations, use the ``globalOperations`` + resource. - For regional operations, use the + ``regionOperations`` resource. - For zonal operations, + use the ``zonalOperations`` resource. For more + information, read Global, Regional, and Zonal Resources. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/compute/v1/projects/{project}/zones/{zone}/instanceGroupManagers/{instance_group_manager}/recreateInstances', + 'body': 'instance_group_managers_recreate_instances_request_resource', + }, + ] + request, metadata = self._interceptor.pre_recreate_instances(request, metadata) + pb_request = compute.RecreateInstancesInstanceGroupManagerRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + including_default_value_fields=False, + use_integers_for_enums=False + ) + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.Operation() + pb_resp = compute.Operation.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_recreate_instances(resp) + return resp + + class _Resize(InstanceGroupManagersRestStub): + def __hash__(self): + return hash("Resize") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + "size" : 0, } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.ResizeInstanceGroupManagerRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.Operation: + r"""Call the resize method over HTTP. + + Args: + request (~.compute.ResizeInstanceGroupManagerRequest): + The request object. A request message for + InstanceGroupManagers.Resize. See the + method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.Operation: + Represents an Operation resource. Google Compute Engine + has three Operation resources: \* + `Global `__ + \* + `Regional `__ + \* + `Zonal `__ + You can use an operation resource to manage asynchronous + API requests. For more information, read Handling API + responses. Operations can be global, regional or zonal. + - For global operations, use the ``globalOperations`` + resource. - For regional operations, use the + ``regionOperations`` resource. - For zonal operations, + use the ``zonalOperations`` resource. For more + information, read Global, Regional, and Zonal Resources. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/compute/v1/projects/{project}/zones/{zone}/instanceGroupManagers/{instance_group_manager}/resize', + }, + ] + request, metadata = self._interceptor.pre_resize(request, metadata) + pb_request = compute.ResizeInstanceGroupManagerRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.Operation() + pb_resp = compute.Operation.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_resize(resp) + return resp + + class _SetInstanceTemplate(InstanceGroupManagersRestStub): + def __hash__(self): + return hash("SetInstanceTemplate") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.SetInstanceTemplateInstanceGroupManagerRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.Operation: + r"""Call the set instance template method over HTTP. + + Args: + request (~.compute.SetInstanceTemplateInstanceGroupManagerRequest): + The request object. A request message for + InstanceGroupManagers.SetInstanceTemplate. + See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.Operation: + Represents an Operation resource. Google Compute Engine + has three Operation resources: \* + `Global `__ + \* + `Regional `__ + \* + `Zonal `__ + You can use an operation resource to manage asynchronous + API requests. For more information, read Handling API + responses. Operations can be global, regional or zonal. + - For global operations, use the ``globalOperations`` + resource. - For regional operations, use the + ``regionOperations`` resource. - For zonal operations, + use the ``zonalOperations`` resource. For more + information, read Global, Regional, and Zonal Resources. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/compute/v1/projects/{project}/zones/{zone}/instanceGroupManagers/{instance_group_manager}/setInstanceTemplate', + 'body': 'instance_group_managers_set_instance_template_request_resource', + }, + ] + request, metadata = self._interceptor.pre_set_instance_template(request, metadata) + pb_request = compute.SetInstanceTemplateInstanceGroupManagerRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + including_default_value_fields=False, + use_integers_for_enums=False + ) + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.Operation() + pb_resp = compute.Operation.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_set_instance_template(resp) + return resp + + class _SetTargetPools(InstanceGroupManagersRestStub): + def __hash__(self): + return hash("SetTargetPools") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.SetTargetPoolsInstanceGroupManagerRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.Operation: + r"""Call the set target pools method over HTTP. + + Args: + request (~.compute.SetTargetPoolsInstanceGroupManagerRequest): + The request object. A request message for + InstanceGroupManagers.SetTargetPools. + See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.Operation: + Represents an Operation resource. Google Compute Engine + has three Operation resources: \* + `Global `__ + \* + `Regional `__ + \* + `Zonal `__ + You can use an operation resource to manage asynchronous + API requests. For more information, read Handling API + responses. Operations can be global, regional or zonal. + - For global operations, use the ``globalOperations`` + resource. - For regional operations, use the + ``regionOperations`` resource. - For zonal operations, + use the ``zonalOperations`` resource. For more + information, read Global, Regional, and Zonal Resources. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/compute/v1/projects/{project}/zones/{zone}/instanceGroupManagers/{instance_group_manager}/setTargetPools', + 'body': 'instance_group_managers_set_target_pools_request_resource', + }, + ] + request, metadata = self._interceptor.pre_set_target_pools(request, metadata) + pb_request = compute.SetTargetPoolsInstanceGroupManagerRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + including_default_value_fields=False, + use_integers_for_enums=False + ) + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.Operation() + pb_resp = compute.Operation.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_set_target_pools(resp) + return resp + + class _UpdatePerInstanceConfigs(InstanceGroupManagersRestStub): + def __hash__(self): + return hash("UpdatePerInstanceConfigs") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.UpdatePerInstanceConfigsInstanceGroupManagerRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.Operation: + r"""Call the update per instance + configs method over HTTP. + + Args: + request (~.compute.UpdatePerInstanceConfigsInstanceGroupManagerRequest): + The request object. A request message for + InstanceGroupManagers.UpdatePerInstanceConfigs. + See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.Operation: + Represents an Operation resource. Google Compute Engine + has three Operation resources: \* + `Global `__ + \* + `Regional `__ + \* + `Zonal `__ + You can use an operation resource to manage asynchronous + API requests. For more information, read Handling API + responses. Operations can be global, regional or zonal. + - For global operations, use the ``globalOperations`` + resource. - For regional operations, use the + ``regionOperations`` resource. - For zonal operations, + use the ``zonalOperations`` resource. For more + information, read Global, Regional, and Zonal Resources. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/compute/v1/projects/{project}/zones/{zone}/instanceGroupManagers/{instance_group_manager}/updatePerInstanceConfigs', + 'body': 'instance_group_managers_update_per_instance_configs_req_resource', + }, + ] + request, metadata = self._interceptor.pre_update_per_instance_configs(request, metadata) + pb_request = compute.UpdatePerInstanceConfigsInstanceGroupManagerRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + including_default_value_fields=False, + use_integers_for_enums=False + ) + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.Operation() + pb_resp = compute.Operation.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_update_per_instance_configs(resp) + return resp + + @property + def abandon_instances(self) -> Callable[ + [compute.AbandonInstancesInstanceGroupManagerRequest], + compute.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._AbandonInstances(self._session, self._host, self._interceptor) # type: ignore + + @property + def aggregated_list(self) -> Callable[ + [compute.AggregatedListInstanceGroupManagersRequest], + compute.InstanceGroupManagerAggregatedList]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._AggregatedList(self._session, self._host, self._interceptor) # type: ignore + + @property + def apply_updates_to_instances(self) -> Callable[ + [compute.ApplyUpdatesToInstancesInstanceGroupManagerRequest], + compute.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ApplyUpdatesToInstances(self._session, self._host, self._interceptor) # type: ignore + + @property + def create_instances(self) -> Callable[ + [compute.CreateInstancesInstanceGroupManagerRequest], + compute.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._CreateInstances(self._session, self._host, self._interceptor) # type: ignore + + @property + def delete(self) -> Callable[ + [compute.DeleteInstanceGroupManagerRequest], + compute.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._Delete(self._session, self._host, self._interceptor) # type: ignore + + @property + def delete_instances(self) -> Callable[ + [compute.DeleteInstancesInstanceGroupManagerRequest], + compute.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._DeleteInstances(self._session, self._host, self._interceptor) # type: ignore + + @property + def delete_per_instance_configs(self) -> Callable[ + [compute.DeletePerInstanceConfigsInstanceGroupManagerRequest], + compute.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._DeletePerInstanceConfigs(self._session, self._host, self._interceptor) # type: ignore + + @property + def get(self) -> Callable[ + [compute.GetInstanceGroupManagerRequest], + compute.InstanceGroupManager]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._Get(self._session, self._host, self._interceptor) # type: ignore + + @property + def insert(self) -> Callable[ + [compute.InsertInstanceGroupManagerRequest], + compute.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._Insert(self._session, self._host, self._interceptor) # type: ignore + + @property + def list(self) -> Callable[ + [compute.ListInstanceGroupManagersRequest], + compute.InstanceGroupManagerList]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._List(self._session, self._host, self._interceptor) # type: ignore + + @property + def list_errors(self) -> Callable[ + [compute.ListErrorsInstanceGroupManagersRequest], + compute.InstanceGroupManagersListErrorsResponse]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListErrors(self._session, self._host, self._interceptor) # type: ignore + + @property + def list_managed_instances(self) -> Callable[ + [compute.ListManagedInstancesInstanceGroupManagersRequest], + compute.InstanceGroupManagersListManagedInstancesResponse]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListManagedInstances(self._session, self._host, self._interceptor) # type: ignore + + @property + def list_per_instance_configs(self) -> Callable[ + [compute.ListPerInstanceConfigsInstanceGroupManagersRequest], + compute.InstanceGroupManagersListPerInstanceConfigsResp]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListPerInstanceConfigs(self._session, self._host, self._interceptor) # type: ignore + + @property + def patch(self) -> Callable[ + [compute.PatchInstanceGroupManagerRequest], + compute.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._Patch(self._session, self._host, self._interceptor) # type: ignore + + @property + def patch_per_instance_configs(self) -> Callable[ + [compute.PatchPerInstanceConfigsInstanceGroupManagerRequest], + compute.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._PatchPerInstanceConfigs(self._session, self._host, self._interceptor) # type: ignore + + @property + def recreate_instances(self) -> Callable[ + [compute.RecreateInstancesInstanceGroupManagerRequest], + compute.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._RecreateInstances(self._session, self._host, self._interceptor) # type: ignore + + @property + def resize(self) -> Callable[ + [compute.ResizeInstanceGroupManagerRequest], + compute.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._Resize(self._session, self._host, self._interceptor) # type: ignore + + @property + def set_instance_template(self) -> Callable[ + [compute.SetInstanceTemplateInstanceGroupManagerRequest], + compute.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._SetInstanceTemplate(self._session, self._host, self._interceptor) # type: ignore + + @property + def set_target_pools(self) -> Callable[ + [compute.SetTargetPoolsInstanceGroupManagerRequest], + compute.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._SetTargetPools(self._session, self._host, self._interceptor) # type: ignore + + @property + def update_per_instance_configs(self) -> Callable[ + [compute.UpdatePerInstanceConfigsInstanceGroupManagerRequest], + compute.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._UpdatePerInstanceConfigs(self._session, self._host, self._interceptor) # type: ignore + + @property + def kind(self) -> str: + return "rest" + + def close(self): + self._session.close() + + +__all__=( + 'InstanceGroupManagersRestTransport', +) diff --git a/owl-bot-staging/v1/google/cloud/compute_v1/services/instance_groups/__init__.py b/owl-bot-staging/v1/google/cloud/compute_v1/services/instance_groups/__init__.py new file mode 100644 index 000000000..5a0e742d8 --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/compute_v1/services/instance_groups/__init__.py @@ -0,0 +1,20 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from .client import InstanceGroupsClient + +__all__ = ( + 'InstanceGroupsClient', +) diff --git a/owl-bot-staging/v1/google/cloud/compute_v1/services/instance_groups/client.py b/owl-bot-staging/v1/google/cloud/compute_v1/services/instance_groups/client.py new file mode 100644 index 000000000..1702cfa57 --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/compute_v1/services/instance_groups/client.py @@ -0,0 +1,2349 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import functools +import os +import re +from typing import Dict, Mapping, MutableMapping, MutableSequence, Optional, Sequence, Tuple, Type, Union, cast + +from google.cloud.compute_v1 import gapic_version as package_version + +from google.api_core import client_options as client_options_lib +from google.api_core import exceptions as core_exceptions +from google.api_core import extended_operation +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport import mtls # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth.exceptions import MutualTLSChannelError # type: ignore +from google.oauth2 import service_account # type: ignore + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + +from google.api_core import extended_operation # type: ignore +from google.cloud.compute_v1.services.instance_groups import pagers +from google.cloud.compute_v1.types import compute +from .transports.base import InstanceGroupsTransport, DEFAULT_CLIENT_INFO +from .transports.rest import InstanceGroupsRestTransport + + +class InstanceGroupsClientMeta(type): + """Metaclass for the InstanceGroups client. + + This provides class-level methods for building and retrieving + support objects (e.g. transport) without polluting the client instance + objects. + """ + _transport_registry = OrderedDict() # type: Dict[str, Type[InstanceGroupsTransport]] + _transport_registry["rest"] = InstanceGroupsRestTransport + + def get_transport_class(cls, + label: Optional[str] = None, + ) -> Type[InstanceGroupsTransport]: + """Returns an appropriate transport class. + + Args: + label: The name of the desired transport. If none is + provided, then the first transport in the registry is used. + + Returns: + The transport class to use. + """ + # If a specific transport is requested, return that one. + if label: + return cls._transport_registry[label] + + # No transport is requested; return the default (that is, the first one + # in the dictionary). + return next(iter(cls._transport_registry.values())) + + +class InstanceGroupsClient(metaclass=InstanceGroupsClientMeta): + """The InstanceGroups API.""" + + @staticmethod + def _get_default_mtls_endpoint(api_endpoint): + """Converts api endpoint to mTLS endpoint. + + Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to + "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. + Args: + api_endpoint (Optional[str]): the api endpoint to convert. + Returns: + str: converted mTLS api endpoint. + """ + if not api_endpoint: + return api_endpoint + + mtls_endpoint_re = re.compile( + r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?" + ) + + m = mtls_endpoint_re.match(api_endpoint) + name, mtls, sandbox, googledomain = m.groups() + if mtls or not googledomain: + return api_endpoint + + if sandbox: + return api_endpoint.replace( + "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" + ) + + return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") + + DEFAULT_ENDPOINT = "compute.googleapis.com" + DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore + DEFAULT_ENDPOINT + ) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + InstanceGroupsClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_info(info) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + InstanceGroupsClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_file( + filename) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + from_service_account_json = from_service_account_file + + @property + def transport(self) -> InstanceGroupsTransport: + """Returns the transport used by the client instance. + + Returns: + InstanceGroupsTransport: The transport used by the client + instance. + """ + return self._transport + + @staticmethod + def common_billing_account_path(billing_account: str, ) -> str: + """Returns a fully-qualified billing_account string.""" + return "billingAccounts/{billing_account}".format(billing_account=billing_account, ) + + @staticmethod + def parse_common_billing_account_path(path: str) -> Dict[str,str]: + """Parse a billing_account path into its component segments.""" + m = re.match(r"^billingAccounts/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_folder_path(folder: str, ) -> str: + """Returns a fully-qualified folder string.""" + return "folders/{folder}".format(folder=folder, ) + + @staticmethod + def parse_common_folder_path(path: str) -> Dict[str,str]: + """Parse a folder path into its component segments.""" + m = re.match(r"^folders/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_organization_path(organization: str, ) -> str: + """Returns a fully-qualified organization string.""" + return "organizations/{organization}".format(organization=organization, ) + + @staticmethod + def parse_common_organization_path(path: str) -> Dict[str,str]: + """Parse a organization path into its component segments.""" + m = re.match(r"^organizations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_project_path(project: str, ) -> str: + """Returns a fully-qualified project string.""" + return "projects/{project}".format(project=project, ) + + @staticmethod + def parse_common_project_path(path: str) -> Dict[str,str]: + """Parse a project path into its component segments.""" + m = re.match(r"^projects/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_location_path(project: str, location: str, ) -> str: + """Returns a fully-qualified location string.""" + return "projects/{project}/locations/{location}".format(project=project, location=location, ) + + @staticmethod + def parse_common_location_path(path: str) -> Dict[str,str]: + """Parse a location path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @classmethod + def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[client_options_lib.ClientOptions] = None): + """Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + if client_options is None: + client_options = client_options_lib.ClientOptions() + use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") + use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") + if use_client_cert not in ("true", "false"): + raise ValueError("Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`") + if use_mtls_endpoint not in ("auto", "never", "always"): + raise MutualTLSChannelError("Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`") + + # Figure out the client cert source to use. + client_cert_source = None + if use_client_cert == "true": + if client_options.client_cert_source: + client_cert_source = client_options.client_cert_source + elif mtls.has_default_client_cert_source(): + client_cert_source = mtls.default_client_cert_source() + + # Figure out which api endpoint to use. + if client_options.api_endpoint is not None: + api_endpoint = client_options.api_endpoint + elif use_mtls_endpoint == "always" or (use_mtls_endpoint == "auto" and client_cert_source): + api_endpoint = cls.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = cls.DEFAULT_ENDPOINT + + return api_endpoint, client_cert_source + + def __init__(self, *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Optional[Union[str, InstanceGroupsTransport]] = None, + client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the instance groups client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Union[str, InstanceGroupsTransport]): The + transport to use. If set to None, a transport is chosen + automatically. + NOTE: "rest" transport functionality is currently in a + beta state (preview). We welcome your feedback via an + issue in this library's source repository. + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): Custom options for the + client. It won't take effect if a ``transport`` instance is provided. + (1) The ``api_endpoint`` property can be used to override the + default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT + environment variable can also be used to override the endpoint: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto switch to the + default mTLS endpoint if client certificate is present, this is + the default value). However, the ``api_endpoint`` property takes + precedence if provided. + (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide client certificate for mutual TLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + """ + if isinstance(client_options, dict): + client_options = client_options_lib.from_dict(client_options) + if client_options is None: + client_options = client_options_lib.ClientOptions() + client_options = cast(client_options_lib.ClientOptions, client_options) + + api_endpoint, client_cert_source_func = self.get_mtls_endpoint_and_cert_source(client_options) + + api_key_value = getattr(client_options, "api_key", None) + if api_key_value and credentials: + raise ValueError("client_options.api_key and credentials are mutually exclusive") + + # Save or instantiate the transport. + # Ordinarily, we provide the transport, but allowing a custom transport + # instance provides an extensibility point for unusual situations. + if isinstance(transport, InstanceGroupsTransport): + # transport is a InstanceGroupsTransport instance. + if credentials or client_options.credentials_file or api_key_value: + raise ValueError("When providing a transport instance, " + "provide its credentials directly.") + if client_options.scopes: + raise ValueError( + "When providing a transport instance, provide its scopes " + "directly." + ) + self._transport = transport + else: + import google.auth._default # type: ignore + + if api_key_value and hasattr(google.auth._default, "get_api_key_credentials"): + credentials = google.auth._default.get_api_key_credentials(api_key_value) + + Transport = type(self).get_transport_class(transport) + self._transport = Transport( + credentials=credentials, + credentials_file=client_options.credentials_file, + host=api_endpoint, + scopes=client_options.scopes, + client_cert_source_for_mtls=client_cert_source_func, + quota_project_id=client_options.quota_project_id, + client_info=client_info, + always_use_jwt_access=True, + api_audience=client_options.api_audience, + ) + + def add_instances_unary(self, + request: Optional[Union[compute.AddInstancesInstanceGroupRequest, dict]] = None, + *, + project: Optional[str] = None, + zone: Optional[str] = None, + instance_group: Optional[str] = None, + instance_groups_add_instances_request_resource: Optional[compute.InstanceGroupsAddInstancesRequest] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.Operation: + r"""Adds a list of instances to the specified instance + group. All of the instances in the instance group must + be in the same network/subnetwork. Read Adding instances + for more information. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_add_instances(): + # Create a client + client = compute_v1.InstanceGroupsClient() + + # Initialize request argument(s) + request = compute_v1.AddInstancesInstanceGroupRequest( + instance_group="instance_group_value", + project="project_value", + zone="zone_value", + ) + + # Make the request + response = client.add_instances(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.AddInstancesInstanceGroupRequest, dict]): + The request object. A request message for + InstanceGroups.AddInstances. See the + method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + zone (str): + The name of the zone where the + instance group is located. + + This corresponds to the ``zone`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + instance_group (str): + The name of the instance group where + you are adding instances. + + This corresponds to the ``instance_group`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + instance_groups_add_instances_request_resource (google.cloud.compute_v1.types.InstanceGroupsAddInstancesRequest): + The body resource for this request + This corresponds to the ``instance_groups_add_instances_request_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, zone, instance_group, instance_groups_add_instances_request_resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.AddInstancesInstanceGroupRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.AddInstancesInstanceGroupRequest): + request = compute.AddInstancesInstanceGroupRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if zone is not None: + request.zone = zone + if instance_group is not None: + request.instance_group = instance_group + if instance_groups_add_instances_request_resource is not None: + request.instance_groups_add_instances_request_resource = instance_groups_add_instances_request_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.add_instances] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("zone", request.zone), + ("instance_group", request.instance_group), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def add_instances(self, + request: Optional[Union[compute.AddInstancesInstanceGroupRequest, dict]] = None, + *, + project: Optional[str] = None, + zone: Optional[str] = None, + instance_group: Optional[str] = None, + instance_groups_add_instances_request_resource: Optional[compute.InstanceGroupsAddInstancesRequest] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> extended_operation.ExtendedOperation: + r"""Adds a list of instances to the specified instance + group. All of the instances in the instance group must + be in the same network/subnetwork. Read Adding instances + for more information. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_add_instances(): + # Create a client + client = compute_v1.InstanceGroupsClient() + + # Initialize request argument(s) + request = compute_v1.AddInstancesInstanceGroupRequest( + instance_group="instance_group_value", + project="project_value", + zone="zone_value", + ) + + # Make the request + response = client.add_instances(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.AddInstancesInstanceGroupRequest, dict]): + The request object. A request message for + InstanceGroups.AddInstances. See the + method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + zone (str): + The name of the zone where the + instance group is located. + + This corresponds to the ``zone`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + instance_group (str): + The name of the instance group where + you are adding instances. + + This corresponds to the ``instance_group`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + instance_groups_add_instances_request_resource (google.cloud.compute_v1.types.InstanceGroupsAddInstancesRequest): + The body resource for this request + This corresponds to the ``instance_groups_add_instances_request_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, zone, instance_group, instance_groups_add_instances_request_resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.AddInstancesInstanceGroupRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.AddInstancesInstanceGroupRequest): + request = compute.AddInstancesInstanceGroupRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if zone is not None: + request.zone = zone + if instance_group is not None: + request.instance_group = instance_group + if instance_groups_add_instances_request_resource is not None: + request.instance_groups_add_instances_request_resource = instance_groups_add_instances_request_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.add_instances] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("zone", request.zone), + ("instance_group", request.instance_group), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + operation_service = self._transport._zone_operations_client + operation_request = compute.GetZoneOperationRequest() + operation_request.project = request.project + operation_request.zone = request.zone + operation_request.operation = response.name + + get_operation = functools.partial(operation_service.get, operation_request) + # Cancel is not part of extended operations yet. + cancel_operation = lambda: None + + # Note: this class is an implementation detail to provide a uniform + # set of names for certain fields in the extended operation proto message. + # See google.api_core.extended_operation.ExtendedOperation for details + # on these properties and the expected interface. + class _CustomOperation(extended_operation.ExtendedOperation): + @property + def error_message(self): + return self._extended_operation.http_error_message + + @property + def error_code(self): + return self._extended_operation.http_error_status_code + + response = _CustomOperation.make(get_operation, cancel_operation, response) + + # Done; return the response. + return response + + def aggregated_list(self, + request: Optional[Union[compute.AggregatedListInstanceGroupsRequest, dict]] = None, + *, + project: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.AggregatedListPager: + r"""Retrieves the list of instance groups and sorts them + by zone. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_aggregated_list(): + # Create a client + client = compute_v1.InstanceGroupsClient() + + # Initialize request argument(s) + request = compute_v1.AggregatedListInstanceGroupsRequest( + project="project_value", + ) + + # Make the request + page_result = client.aggregated_list(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.AggregatedListInstanceGroupsRequest, dict]): + The request object. A request message for + InstanceGroups.AggregatedList. See the + method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.compute_v1.services.instance_groups.pagers.AggregatedListPager: + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.AggregatedListInstanceGroupsRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.AggregatedListInstanceGroupsRequest): + request = compute.AggregatedListInstanceGroupsRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.aggregated_list] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.AggregatedListPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + def delete_unary(self, + request: Optional[Union[compute.DeleteInstanceGroupRequest, dict]] = None, + *, + project: Optional[str] = None, + zone: Optional[str] = None, + instance_group: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.Operation: + r"""Deletes the specified instance group. The instances + in the group are not deleted. Note that instance group + must not belong to a backend service. Read Deleting an + instance group for more information. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_delete(): + # Create a client + client = compute_v1.InstanceGroupsClient() + + # Initialize request argument(s) + request = compute_v1.DeleteInstanceGroupRequest( + instance_group="instance_group_value", + project="project_value", + zone="zone_value", + ) + + # Make the request + response = client.delete(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.DeleteInstanceGroupRequest, dict]): + The request object. A request message for + InstanceGroups.Delete. See the method + description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + zone (str): + The name of the zone where the + instance group is located. + + This corresponds to the ``zone`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + instance_group (str): + The name of the instance group to + delete. + + This corresponds to the ``instance_group`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, zone, instance_group]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.DeleteInstanceGroupRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.DeleteInstanceGroupRequest): + request = compute.DeleteInstanceGroupRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if zone is not None: + request.zone = zone + if instance_group is not None: + request.instance_group = instance_group + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("zone", request.zone), + ("instance_group", request.instance_group), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def delete(self, + request: Optional[Union[compute.DeleteInstanceGroupRequest, dict]] = None, + *, + project: Optional[str] = None, + zone: Optional[str] = None, + instance_group: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> extended_operation.ExtendedOperation: + r"""Deletes the specified instance group. The instances + in the group are not deleted. Note that instance group + must not belong to a backend service. Read Deleting an + instance group for more information. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_delete(): + # Create a client + client = compute_v1.InstanceGroupsClient() + + # Initialize request argument(s) + request = compute_v1.DeleteInstanceGroupRequest( + instance_group="instance_group_value", + project="project_value", + zone="zone_value", + ) + + # Make the request + response = client.delete(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.DeleteInstanceGroupRequest, dict]): + The request object. A request message for + InstanceGroups.Delete. See the method + description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + zone (str): + The name of the zone where the + instance group is located. + + This corresponds to the ``zone`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + instance_group (str): + The name of the instance group to + delete. + + This corresponds to the ``instance_group`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, zone, instance_group]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.DeleteInstanceGroupRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.DeleteInstanceGroupRequest): + request = compute.DeleteInstanceGroupRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if zone is not None: + request.zone = zone + if instance_group is not None: + request.instance_group = instance_group + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("zone", request.zone), + ("instance_group", request.instance_group), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + operation_service = self._transport._zone_operations_client + operation_request = compute.GetZoneOperationRequest() + operation_request.project = request.project + operation_request.zone = request.zone + operation_request.operation = response.name + + get_operation = functools.partial(operation_service.get, operation_request) + # Cancel is not part of extended operations yet. + cancel_operation = lambda: None + + # Note: this class is an implementation detail to provide a uniform + # set of names for certain fields in the extended operation proto message. + # See google.api_core.extended_operation.ExtendedOperation for details + # on these properties and the expected interface. + class _CustomOperation(extended_operation.ExtendedOperation): + @property + def error_message(self): + return self._extended_operation.http_error_message + + @property + def error_code(self): + return self._extended_operation.http_error_status_code + + response = _CustomOperation.make(get_operation, cancel_operation, response) + + # Done; return the response. + return response + + def get(self, + request: Optional[Union[compute.GetInstanceGroupRequest, dict]] = None, + *, + project: Optional[str] = None, + zone: Optional[str] = None, + instance_group: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.InstanceGroup: + r"""Returns the specified zonal instance group. Get a + list of available zonal instance groups by making a + list() request. For managed instance groups, use the + instanceGroupManagers or regionInstanceGroupManagers + methods instead. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_get(): + # Create a client + client = compute_v1.InstanceGroupsClient() + + # Initialize request argument(s) + request = compute_v1.GetInstanceGroupRequest( + instance_group="instance_group_value", + project="project_value", + zone="zone_value", + ) + + # Make the request + response = client.get(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.GetInstanceGroupRequest, dict]): + The request object. A request message for + InstanceGroups.Get. See the method + description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + zone (str): + The name of the zone where the + instance group is located. + + This corresponds to the ``zone`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + instance_group (str): + The name of the instance group. + This corresponds to the ``instance_group`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.compute_v1.types.InstanceGroup: + Represents an Instance Group + resource. Instance Groups can be used to + configure a target for load balancing. + Instance groups can either be managed or + unmanaged. To create managed instance + groups, use the instanceGroupManager or + regionInstanceGroupManager resource + instead. Use zonal unmanaged instance + groups if you need to apply load + balancing to groups of heterogeneous + instances or if you need to manage the + instances yourself. You cannot create + regional unmanaged instance groups. For + more information, read Instance groups. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, zone, instance_group]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.GetInstanceGroupRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.GetInstanceGroupRequest): + request = compute.GetInstanceGroupRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if zone is not None: + request.zone = zone + if instance_group is not None: + request.instance_group = instance_group + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("zone", request.zone), + ("instance_group", request.instance_group), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def insert_unary(self, + request: Optional[Union[compute.InsertInstanceGroupRequest, dict]] = None, + *, + project: Optional[str] = None, + zone: Optional[str] = None, + instance_group_resource: Optional[compute.InstanceGroup] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.Operation: + r"""Creates an instance group in the specified project + using the parameters that are included in the request. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_insert(): + # Create a client + client = compute_v1.InstanceGroupsClient() + + # Initialize request argument(s) + request = compute_v1.InsertInstanceGroupRequest( + project="project_value", + zone="zone_value", + ) + + # Make the request + response = client.insert(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.InsertInstanceGroupRequest, dict]): + The request object. A request message for + InstanceGroups.Insert. See the method + description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + zone (str): + The name of the zone where you want + to create the instance group. + + This corresponds to the ``zone`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + instance_group_resource (google.cloud.compute_v1.types.InstanceGroup): + The body resource for this request + This corresponds to the ``instance_group_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, zone, instance_group_resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.InsertInstanceGroupRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.InsertInstanceGroupRequest): + request = compute.InsertInstanceGroupRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if zone is not None: + request.zone = zone + if instance_group_resource is not None: + request.instance_group_resource = instance_group_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.insert] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("zone", request.zone), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def insert(self, + request: Optional[Union[compute.InsertInstanceGroupRequest, dict]] = None, + *, + project: Optional[str] = None, + zone: Optional[str] = None, + instance_group_resource: Optional[compute.InstanceGroup] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> extended_operation.ExtendedOperation: + r"""Creates an instance group in the specified project + using the parameters that are included in the request. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_insert(): + # Create a client + client = compute_v1.InstanceGroupsClient() + + # Initialize request argument(s) + request = compute_v1.InsertInstanceGroupRequest( + project="project_value", + zone="zone_value", + ) + + # Make the request + response = client.insert(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.InsertInstanceGroupRequest, dict]): + The request object. A request message for + InstanceGroups.Insert. See the method + description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + zone (str): + The name of the zone where you want + to create the instance group. + + This corresponds to the ``zone`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + instance_group_resource (google.cloud.compute_v1.types.InstanceGroup): + The body resource for this request + This corresponds to the ``instance_group_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, zone, instance_group_resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.InsertInstanceGroupRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.InsertInstanceGroupRequest): + request = compute.InsertInstanceGroupRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if zone is not None: + request.zone = zone + if instance_group_resource is not None: + request.instance_group_resource = instance_group_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.insert] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("zone", request.zone), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + operation_service = self._transport._zone_operations_client + operation_request = compute.GetZoneOperationRequest() + operation_request.project = request.project + operation_request.zone = request.zone + operation_request.operation = response.name + + get_operation = functools.partial(operation_service.get, operation_request) + # Cancel is not part of extended operations yet. + cancel_operation = lambda: None + + # Note: this class is an implementation detail to provide a uniform + # set of names for certain fields in the extended operation proto message. + # See google.api_core.extended_operation.ExtendedOperation for details + # on these properties and the expected interface. + class _CustomOperation(extended_operation.ExtendedOperation): + @property + def error_message(self): + return self._extended_operation.http_error_message + + @property + def error_code(self): + return self._extended_operation.http_error_status_code + + response = _CustomOperation.make(get_operation, cancel_operation, response) + + # Done; return the response. + return response + + def list(self, + request: Optional[Union[compute.ListInstanceGroupsRequest, dict]] = None, + *, + project: Optional[str] = None, + zone: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListPager: + r"""Retrieves the list of zonal instance group resources + contained within the specified zone. For managed + instance groups, use the instanceGroupManagers or + regionInstanceGroupManagers methods instead. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_list(): + # Create a client + client = compute_v1.InstanceGroupsClient() + + # Initialize request argument(s) + request = compute_v1.ListInstanceGroupsRequest( + project="project_value", + zone="zone_value", + ) + + # Make the request + page_result = client.list(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.ListInstanceGroupsRequest, dict]): + The request object. A request message for + InstanceGroups.List. See the method + description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + zone (str): + The name of the zone where the + instance group is located. + + This corresponds to the ``zone`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.compute_v1.services.instance_groups.pagers.ListPager: + A list of InstanceGroup resources. + + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, zone]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.ListInstanceGroupsRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.ListInstanceGroupsRequest): + request = compute.ListInstanceGroupsRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if zone is not None: + request.zone = zone + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("zone", request.zone), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + def list_instances(self, + request: Optional[Union[compute.ListInstancesInstanceGroupsRequest, dict]] = None, + *, + project: Optional[str] = None, + zone: Optional[str] = None, + instance_group: Optional[str] = None, + instance_groups_list_instances_request_resource: Optional[compute.InstanceGroupsListInstancesRequest] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListInstancesPager: + r"""Lists the instances in the specified instance group. The orderBy + query parameter is not supported. The filter query parameter is + supported, but only for expressions that use ``eq`` (equal) or + ``ne`` (not equal) operators. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_list_instances(): + # Create a client + client = compute_v1.InstanceGroupsClient() + + # Initialize request argument(s) + request = compute_v1.ListInstancesInstanceGroupsRequest( + instance_group="instance_group_value", + project="project_value", + zone="zone_value", + ) + + # Make the request + page_result = client.list_instances(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.ListInstancesInstanceGroupsRequest, dict]): + The request object. A request message for + InstanceGroups.ListInstances. See the + method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + zone (str): + The name of the zone where the + instance group is located. + + This corresponds to the ``zone`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + instance_group (str): + The name of the instance group from + which you want to generate a list of + included instances. + + This corresponds to the ``instance_group`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + instance_groups_list_instances_request_resource (google.cloud.compute_v1.types.InstanceGroupsListInstancesRequest): + The body resource for this request + This corresponds to the ``instance_groups_list_instances_request_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.compute_v1.services.instance_groups.pagers.ListInstancesPager: + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, zone, instance_group, instance_groups_list_instances_request_resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.ListInstancesInstanceGroupsRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.ListInstancesInstanceGroupsRequest): + request = compute.ListInstancesInstanceGroupsRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if zone is not None: + request.zone = zone + if instance_group is not None: + request.instance_group = instance_group + if instance_groups_list_instances_request_resource is not None: + request.instance_groups_list_instances_request_resource = instance_groups_list_instances_request_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_instances] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("zone", request.zone), + ("instance_group", request.instance_group), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListInstancesPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + def remove_instances_unary(self, + request: Optional[Union[compute.RemoveInstancesInstanceGroupRequest, dict]] = None, + *, + project: Optional[str] = None, + zone: Optional[str] = None, + instance_group: Optional[str] = None, + instance_groups_remove_instances_request_resource: Optional[compute.InstanceGroupsRemoveInstancesRequest] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.Operation: + r"""Removes one or more instances from the specified + instance group, but does not delete those instances. If + the group is part of a backend service that has enabled + connection draining, it can take up to 60 seconds after + the connection draining duration before the VM instance + is removed or deleted. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_remove_instances(): + # Create a client + client = compute_v1.InstanceGroupsClient() + + # Initialize request argument(s) + request = compute_v1.RemoveInstancesInstanceGroupRequest( + instance_group="instance_group_value", + project="project_value", + zone="zone_value", + ) + + # Make the request + response = client.remove_instances(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.RemoveInstancesInstanceGroupRequest, dict]): + The request object. A request message for + InstanceGroups.RemoveInstances. See the + method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + zone (str): + The name of the zone where the + instance group is located. + + This corresponds to the ``zone`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + instance_group (str): + The name of the instance group where + the specified instances will be removed. + + This corresponds to the ``instance_group`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + instance_groups_remove_instances_request_resource (google.cloud.compute_v1.types.InstanceGroupsRemoveInstancesRequest): + The body resource for this request + This corresponds to the ``instance_groups_remove_instances_request_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, zone, instance_group, instance_groups_remove_instances_request_resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.RemoveInstancesInstanceGroupRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.RemoveInstancesInstanceGroupRequest): + request = compute.RemoveInstancesInstanceGroupRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if zone is not None: + request.zone = zone + if instance_group is not None: + request.instance_group = instance_group + if instance_groups_remove_instances_request_resource is not None: + request.instance_groups_remove_instances_request_resource = instance_groups_remove_instances_request_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.remove_instances] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("zone", request.zone), + ("instance_group", request.instance_group), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def remove_instances(self, + request: Optional[Union[compute.RemoveInstancesInstanceGroupRequest, dict]] = None, + *, + project: Optional[str] = None, + zone: Optional[str] = None, + instance_group: Optional[str] = None, + instance_groups_remove_instances_request_resource: Optional[compute.InstanceGroupsRemoveInstancesRequest] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> extended_operation.ExtendedOperation: + r"""Removes one or more instances from the specified + instance group, but does not delete those instances. If + the group is part of a backend service that has enabled + connection draining, it can take up to 60 seconds after + the connection draining duration before the VM instance + is removed or deleted. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_remove_instances(): + # Create a client + client = compute_v1.InstanceGroupsClient() + + # Initialize request argument(s) + request = compute_v1.RemoveInstancesInstanceGroupRequest( + instance_group="instance_group_value", + project="project_value", + zone="zone_value", + ) + + # Make the request + response = client.remove_instances(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.RemoveInstancesInstanceGroupRequest, dict]): + The request object. A request message for + InstanceGroups.RemoveInstances. See the + method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + zone (str): + The name of the zone where the + instance group is located. + + This corresponds to the ``zone`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + instance_group (str): + The name of the instance group where + the specified instances will be removed. + + This corresponds to the ``instance_group`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + instance_groups_remove_instances_request_resource (google.cloud.compute_v1.types.InstanceGroupsRemoveInstancesRequest): + The body resource for this request + This corresponds to the ``instance_groups_remove_instances_request_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, zone, instance_group, instance_groups_remove_instances_request_resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.RemoveInstancesInstanceGroupRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.RemoveInstancesInstanceGroupRequest): + request = compute.RemoveInstancesInstanceGroupRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if zone is not None: + request.zone = zone + if instance_group is not None: + request.instance_group = instance_group + if instance_groups_remove_instances_request_resource is not None: + request.instance_groups_remove_instances_request_resource = instance_groups_remove_instances_request_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.remove_instances] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("zone", request.zone), + ("instance_group", request.instance_group), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + operation_service = self._transport._zone_operations_client + operation_request = compute.GetZoneOperationRequest() + operation_request.project = request.project + operation_request.zone = request.zone + operation_request.operation = response.name + + get_operation = functools.partial(operation_service.get, operation_request) + # Cancel is not part of extended operations yet. + cancel_operation = lambda: None + + # Note: this class is an implementation detail to provide a uniform + # set of names for certain fields in the extended operation proto message. + # See google.api_core.extended_operation.ExtendedOperation for details + # on these properties and the expected interface. + class _CustomOperation(extended_operation.ExtendedOperation): + @property + def error_message(self): + return self._extended_operation.http_error_message + + @property + def error_code(self): + return self._extended_operation.http_error_status_code + + response = _CustomOperation.make(get_operation, cancel_operation, response) + + # Done; return the response. + return response + + def set_named_ports_unary(self, + request: Optional[Union[compute.SetNamedPortsInstanceGroupRequest, dict]] = None, + *, + project: Optional[str] = None, + zone: Optional[str] = None, + instance_group: Optional[str] = None, + instance_groups_set_named_ports_request_resource: Optional[compute.InstanceGroupsSetNamedPortsRequest] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.Operation: + r"""Sets the named ports for the specified instance + group. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_set_named_ports(): + # Create a client + client = compute_v1.InstanceGroupsClient() + + # Initialize request argument(s) + request = compute_v1.SetNamedPortsInstanceGroupRequest( + instance_group="instance_group_value", + project="project_value", + zone="zone_value", + ) + + # Make the request + response = client.set_named_ports(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.SetNamedPortsInstanceGroupRequest, dict]): + The request object. A request message for + InstanceGroups.SetNamedPorts. See the + method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + zone (str): + The name of the zone where the + instance group is located. + + This corresponds to the ``zone`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + instance_group (str): + The name of the instance group where + the named ports are updated. + + This corresponds to the ``instance_group`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + instance_groups_set_named_ports_request_resource (google.cloud.compute_v1.types.InstanceGroupsSetNamedPortsRequest): + The body resource for this request + This corresponds to the ``instance_groups_set_named_ports_request_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, zone, instance_group, instance_groups_set_named_ports_request_resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.SetNamedPortsInstanceGroupRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.SetNamedPortsInstanceGroupRequest): + request = compute.SetNamedPortsInstanceGroupRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if zone is not None: + request.zone = zone + if instance_group is not None: + request.instance_group = instance_group + if instance_groups_set_named_ports_request_resource is not None: + request.instance_groups_set_named_ports_request_resource = instance_groups_set_named_ports_request_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.set_named_ports] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("zone", request.zone), + ("instance_group", request.instance_group), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def set_named_ports(self, + request: Optional[Union[compute.SetNamedPortsInstanceGroupRequest, dict]] = None, + *, + project: Optional[str] = None, + zone: Optional[str] = None, + instance_group: Optional[str] = None, + instance_groups_set_named_ports_request_resource: Optional[compute.InstanceGroupsSetNamedPortsRequest] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> extended_operation.ExtendedOperation: + r"""Sets the named ports for the specified instance + group. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_set_named_ports(): + # Create a client + client = compute_v1.InstanceGroupsClient() + + # Initialize request argument(s) + request = compute_v1.SetNamedPortsInstanceGroupRequest( + instance_group="instance_group_value", + project="project_value", + zone="zone_value", + ) + + # Make the request + response = client.set_named_ports(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.SetNamedPortsInstanceGroupRequest, dict]): + The request object. A request message for + InstanceGroups.SetNamedPorts. See the + method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + zone (str): + The name of the zone where the + instance group is located. + + This corresponds to the ``zone`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + instance_group (str): + The name of the instance group where + the named ports are updated. + + This corresponds to the ``instance_group`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + instance_groups_set_named_ports_request_resource (google.cloud.compute_v1.types.InstanceGroupsSetNamedPortsRequest): + The body resource for this request + This corresponds to the ``instance_groups_set_named_ports_request_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, zone, instance_group, instance_groups_set_named_ports_request_resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.SetNamedPortsInstanceGroupRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.SetNamedPortsInstanceGroupRequest): + request = compute.SetNamedPortsInstanceGroupRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if zone is not None: + request.zone = zone + if instance_group is not None: + request.instance_group = instance_group + if instance_groups_set_named_ports_request_resource is not None: + request.instance_groups_set_named_ports_request_resource = instance_groups_set_named_ports_request_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.set_named_ports] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("zone", request.zone), + ("instance_group", request.instance_group), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + operation_service = self._transport._zone_operations_client + operation_request = compute.GetZoneOperationRequest() + operation_request.project = request.project + operation_request.zone = request.zone + operation_request.operation = response.name + + get_operation = functools.partial(operation_service.get, operation_request) + # Cancel is not part of extended operations yet. + cancel_operation = lambda: None + + # Note: this class is an implementation detail to provide a uniform + # set of names for certain fields in the extended operation proto message. + # See google.api_core.extended_operation.ExtendedOperation for details + # on these properties and the expected interface. + class _CustomOperation(extended_operation.ExtendedOperation): + @property + def error_message(self): + return self._extended_operation.http_error_message + + @property + def error_code(self): + return self._extended_operation.http_error_status_code + + response = _CustomOperation.make(get_operation, cancel_operation, response) + + # Done; return the response. + return response + + def __enter__(self) -> "InstanceGroupsClient": + return self + + def __exit__(self, type, value, traceback): + """Releases underlying transport's resources. + + .. warning:: + ONLY use as a context manager if the transport is NOT shared + with other clients! Exiting the with block will CLOSE the transport + and may cause errors in other clients! + """ + self.transport.close() + + + + + + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) + + +__all__ = ( + "InstanceGroupsClient", +) diff --git a/owl-bot-staging/v1/google/cloud/compute_v1/services/instance_groups/pagers.py b/owl-bot-staging/v1/google/cloud/compute_v1/services/instance_groups/pagers.py new file mode 100644 index 000000000..fc085e2a8 --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/compute_v1/services/instance_groups/pagers.py @@ -0,0 +1,198 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import Any, AsyncIterator, Awaitable, Callable, Sequence, Tuple, Optional, Iterator + +from google.cloud.compute_v1.types import compute + + +class AggregatedListPager: + """A pager for iterating through ``aggregated_list`` requests. + + This class thinly wraps an initial + :class:`google.cloud.compute_v1.types.InstanceGroupAggregatedList` object, and + provides an ``__iter__`` method to iterate through its + ``items`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``AggregatedList`` requests and continue to iterate + through the ``items`` field on the + corresponding responses. + + All the usual :class:`google.cloud.compute_v1.types.InstanceGroupAggregatedList` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., compute.InstanceGroupAggregatedList], + request: compute.AggregatedListInstanceGroupsRequest, + response: compute.InstanceGroupAggregatedList, + *, + metadata: Sequence[Tuple[str, str]] = ()): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.compute_v1.types.AggregatedListInstanceGroupsRequest): + The initial request object. + response (google.cloud.compute_v1.types.InstanceGroupAggregatedList): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = compute.AggregatedListInstanceGroupsRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[compute.InstanceGroupAggregatedList]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[Tuple[str, compute.InstanceGroupsScopedList]]: + for page in self.pages: + yield from page.items.items() + + def get(self, key: str) -> Optional[compute.InstanceGroupsScopedList]: + return self._response.items.get(key) + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + + +class ListPager: + """A pager for iterating through ``list`` requests. + + This class thinly wraps an initial + :class:`google.cloud.compute_v1.types.InstanceGroupList` object, and + provides an ``__iter__`` method to iterate through its + ``items`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``List`` requests and continue to iterate + through the ``items`` field on the + corresponding responses. + + All the usual :class:`google.cloud.compute_v1.types.InstanceGroupList` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., compute.InstanceGroupList], + request: compute.ListInstanceGroupsRequest, + response: compute.InstanceGroupList, + *, + metadata: Sequence[Tuple[str, str]] = ()): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.compute_v1.types.ListInstanceGroupsRequest): + The initial request object. + response (google.cloud.compute_v1.types.InstanceGroupList): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = compute.ListInstanceGroupsRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[compute.InstanceGroupList]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[compute.InstanceGroup]: + for page in self.pages: + yield from page.items + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + + +class ListInstancesPager: + """A pager for iterating through ``list_instances`` requests. + + This class thinly wraps an initial + :class:`google.cloud.compute_v1.types.InstanceGroupsListInstances` object, and + provides an ``__iter__`` method to iterate through its + ``items`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListInstances`` requests and continue to iterate + through the ``items`` field on the + corresponding responses. + + All the usual :class:`google.cloud.compute_v1.types.InstanceGroupsListInstances` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., compute.InstanceGroupsListInstances], + request: compute.ListInstancesInstanceGroupsRequest, + response: compute.InstanceGroupsListInstances, + *, + metadata: Sequence[Tuple[str, str]] = ()): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.compute_v1.types.ListInstancesInstanceGroupsRequest): + The initial request object. + response (google.cloud.compute_v1.types.InstanceGroupsListInstances): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = compute.ListInstancesInstanceGroupsRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[compute.InstanceGroupsListInstances]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[compute.InstanceWithNamedPorts]: + for page in self.pages: + yield from page.items + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) diff --git a/owl-bot-staging/v1/google/cloud/compute_v1/services/instance_groups/transports/__init__.py b/owl-bot-staging/v1/google/cloud/compute_v1/services/instance_groups/transports/__init__.py new file mode 100644 index 000000000..6da400d8c --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/compute_v1/services/instance_groups/transports/__init__.py @@ -0,0 +1,32 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +from typing import Dict, Type + +from .base import InstanceGroupsTransport +from .rest import InstanceGroupsRestTransport +from .rest import InstanceGroupsRestInterceptor + + +# Compile a registry of transports. +_transport_registry = OrderedDict() # type: Dict[str, Type[InstanceGroupsTransport]] +_transport_registry['rest'] = InstanceGroupsRestTransport + +__all__ = ( + 'InstanceGroupsTransport', + 'InstanceGroupsRestTransport', + 'InstanceGroupsRestInterceptor', +) diff --git a/owl-bot-staging/v1/google/cloud/compute_v1/services/instance_groups/transports/base.py b/owl-bot-staging/v1/google/cloud/compute_v1/services/instance_groups/transports/base.py new file mode 100644 index 000000000..bf1666837 --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/compute_v1/services/instance_groups/transports/base.py @@ -0,0 +1,275 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import abc +from typing import Awaitable, Callable, Dict, Optional, Sequence, Union + +from google.cloud.compute_v1 import gapic_version as package_version + +import google.auth # type: ignore +import google.api_core +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.compute_v1.types import compute +from google.cloud.compute_v1.services import zone_operations + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) + + +class InstanceGroupsTransport(abc.ABC): + """Abstract transport class for InstanceGroups.""" + + AUTH_SCOPES = ( + 'https://www.googleapis.com/auth/compute', + 'https://www.googleapis.com/auth/cloud-platform', + ) + + DEFAULT_HOST: str = 'compute.googleapis.com' + def __init__( + self, *, + host: str = DEFAULT_HOST, + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + **kwargs, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A list of scopes. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + """ + self._extended_operations_services: Dict[str, Any] = {} + + scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} + + # Save the scopes. + self._scopes = scopes + + # If no credentials are provided, then determine the appropriate + # defaults. + if credentials and credentials_file: + raise core_exceptions.DuplicateCredentialArgs("'credentials_file' and 'credentials' are mutually exclusive") + + if credentials_file is not None: + credentials, _ = google.auth.load_credentials_from_file( + credentials_file, + **scopes_kwargs, + quota_project_id=quota_project_id + ) + elif credentials is None: + credentials, _ = google.auth.default(**scopes_kwargs, quota_project_id=quota_project_id) + # Don't apply audience if the credentials file passed from user. + if hasattr(credentials, "with_gdch_audience"): + credentials = credentials.with_gdch_audience(api_audience if api_audience else host) + + # If the credentials are service account credentials, then always try to use self signed JWT. + if always_use_jwt_access and isinstance(credentials, service_account.Credentials) and hasattr(service_account.Credentials, "with_always_use_jwt_access"): + credentials = credentials.with_always_use_jwt_access(True) + + # Save the credentials. + self._credentials = credentials + + # Save the hostname. Default to port 443 (HTTPS) if none is specified. + if ':' not in host: + host += ':443' + self._host = host + + def _prep_wrapped_messages(self, client_info): + # Precompute the wrapped methods. + self._wrapped_methods = { + self.add_instances: gapic_v1.method.wrap_method( + self.add_instances, + default_timeout=None, + client_info=client_info, + ), + self.aggregated_list: gapic_v1.method.wrap_method( + self.aggregated_list, + default_timeout=None, + client_info=client_info, + ), + self.delete: gapic_v1.method.wrap_method( + self.delete, + default_timeout=None, + client_info=client_info, + ), + self.get: gapic_v1.method.wrap_method( + self.get, + default_timeout=None, + client_info=client_info, + ), + self.insert: gapic_v1.method.wrap_method( + self.insert, + default_timeout=None, + client_info=client_info, + ), + self.list: gapic_v1.method.wrap_method( + self.list, + default_timeout=None, + client_info=client_info, + ), + self.list_instances: gapic_v1.method.wrap_method( + self.list_instances, + default_timeout=None, + client_info=client_info, + ), + self.remove_instances: gapic_v1.method.wrap_method( + self.remove_instances, + default_timeout=None, + client_info=client_info, + ), + self.set_named_ports: gapic_v1.method.wrap_method( + self.set_named_ports, + default_timeout=None, + client_info=client_info, + ), + } + + def close(self): + """Closes resources associated with the transport. + + .. warning:: + Only call this method if the transport is NOT shared + with other clients - this may cause errors in other clients! + """ + raise NotImplementedError() + + @property + def add_instances(self) -> Callable[ + [compute.AddInstancesInstanceGroupRequest], + Union[ + compute.Operation, + Awaitable[compute.Operation] + ]]: + raise NotImplementedError() + + @property + def aggregated_list(self) -> Callable[ + [compute.AggregatedListInstanceGroupsRequest], + Union[ + compute.InstanceGroupAggregatedList, + Awaitable[compute.InstanceGroupAggregatedList] + ]]: + raise NotImplementedError() + + @property + def delete(self) -> Callable[ + [compute.DeleteInstanceGroupRequest], + Union[ + compute.Operation, + Awaitable[compute.Operation] + ]]: + raise NotImplementedError() + + @property + def get(self) -> Callable[ + [compute.GetInstanceGroupRequest], + Union[ + compute.InstanceGroup, + Awaitable[compute.InstanceGroup] + ]]: + raise NotImplementedError() + + @property + def insert(self) -> Callable[ + [compute.InsertInstanceGroupRequest], + Union[ + compute.Operation, + Awaitable[compute.Operation] + ]]: + raise NotImplementedError() + + @property + def list(self) -> Callable[ + [compute.ListInstanceGroupsRequest], + Union[ + compute.InstanceGroupList, + Awaitable[compute.InstanceGroupList] + ]]: + raise NotImplementedError() + + @property + def list_instances(self) -> Callable[ + [compute.ListInstancesInstanceGroupsRequest], + Union[ + compute.InstanceGroupsListInstances, + Awaitable[compute.InstanceGroupsListInstances] + ]]: + raise NotImplementedError() + + @property + def remove_instances(self) -> Callable[ + [compute.RemoveInstancesInstanceGroupRequest], + Union[ + compute.Operation, + Awaitable[compute.Operation] + ]]: + raise NotImplementedError() + + @property + def set_named_ports(self) -> Callable[ + [compute.SetNamedPortsInstanceGroupRequest], + Union[ + compute.Operation, + Awaitable[compute.Operation] + ]]: + raise NotImplementedError() + + @property + def kind(self) -> str: + raise NotImplementedError() + + @property + def _zone_operations_client(self) -> zone_operations.ZoneOperationsClient: + ex_op_service = self._extended_operations_services.get("zone_operations") + if not ex_op_service: + ex_op_service = zone_operations.ZoneOperationsClient( + credentials=self._credentials, + transport=self.kind, + ) + self._extended_operations_services["zone_operations"] = ex_op_service + + return ex_op_service + + +__all__ = ( + 'InstanceGroupsTransport', +) diff --git a/owl-bot-staging/v1/google/cloud/compute_v1/services/instance_groups/transports/rest.py b/owl-bot-staging/v1/google/cloud/compute_v1/services/instance_groups/transports/rest.py new file mode 100644 index 000000000..5a9cf488a --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/compute_v1/services/instance_groups/transports/rest.py @@ -0,0 +1,1309 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +from google.auth.transport.requests import AuthorizedSession # type: ignore +import json # type: ignore +import grpc # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.api_core import exceptions as core_exceptions +from google.api_core import retry as retries +from google.api_core import rest_helpers +from google.api_core import rest_streaming +from google.api_core import path_template +from google.api_core import gapic_v1 + +from google.protobuf import json_format +from requests import __version__ as requests_version +import dataclasses +import re +from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union +import warnings + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + + +from google.cloud.compute_v1.types import compute + +from .base import InstanceGroupsTransport, DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, + grpc_version=None, + rest_version=requests_version, +) + + +class InstanceGroupsRestInterceptor: + """Interceptor for InstanceGroups. + + Interceptors are used to manipulate requests, request metadata, and responses + in arbitrary ways. + Example use cases include: + * Logging + * Verifying requests according to service or custom semantics + * Stripping extraneous information from responses + + These use cases and more can be enabled by injecting an + instance of a custom subclass when constructing the InstanceGroupsRestTransport. + + .. code-block:: python + class MyCustomInstanceGroupsInterceptor(InstanceGroupsRestInterceptor): + def pre_add_instances(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_add_instances(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_aggregated_list(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_aggregated_list(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_delete(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_delete(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_get(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_insert(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_insert(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list_instances(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_instances(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_remove_instances(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_remove_instances(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_set_named_ports(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_set_named_ports(self, response): + logging.log(f"Received response: {response}") + return response + + transport = InstanceGroupsRestTransport(interceptor=MyCustomInstanceGroupsInterceptor()) + client = InstanceGroupsClient(transport=transport) + + + """ + def pre_add_instances(self, request: compute.AddInstancesInstanceGroupRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.AddInstancesInstanceGroupRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for add_instances + + Override in a subclass to manipulate the request or metadata + before they are sent to the InstanceGroups server. + """ + return request, metadata + + def post_add_instances(self, response: compute.Operation) -> compute.Operation: + """Post-rpc interceptor for add_instances + + Override in a subclass to manipulate the response + after it is returned by the InstanceGroups server but before + it is returned to user code. + """ + return response + def pre_aggregated_list(self, request: compute.AggregatedListInstanceGroupsRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.AggregatedListInstanceGroupsRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for aggregated_list + + Override in a subclass to manipulate the request or metadata + before they are sent to the InstanceGroups server. + """ + return request, metadata + + def post_aggregated_list(self, response: compute.InstanceGroupAggregatedList) -> compute.InstanceGroupAggregatedList: + """Post-rpc interceptor for aggregated_list + + Override in a subclass to manipulate the response + after it is returned by the InstanceGroups server but before + it is returned to user code. + """ + return response + def pre_delete(self, request: compute.DeleteInstanceGroupRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.DeleteInstanceGroupRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for delete + + Override in a subclass to manipulate the request or metadata + before they are sent to the InstanceGroups server. + """ + return request, metadata + + def post_delete(self, response: compute.Operation) -> compute.Operation: + """Post-rpc interceptor for delete + + Override in a subclass to manipulate the response + after it is returned by the InstanceGroups server but before + it is returned to user code. + """ + return response + def pre_get(self, request: compute.GetInstanceGroupRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.GetInstanceGroupRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get + + Override in a subclass to manipulate the request or metadata + before they are sent to the InstanceGroups server. + """ + return request, metadata + + def post_get(self, response: compute.InstanceGroup) -> compute.InstanceGroup: + """Post-rpc interceptor for get + + Override in a subclass to manipulate the response + after it is returned by the InstanceGroups server but before + it is returned to user code. + """ + return response + def pre_insert(self, request: compute.InsertInstanceGroupRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.InsertInstanceGroupRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for insert + + Override in a subclass to manipulate the request or metadata + before they are sent to the InstanceGroups server. + """ + return request, metadata + + def post_insert(self, response: compute.Operation) -> compute.Operation: + """Post-rpc interceptor for insert + + Override in a subclass to manipulate the response + after it is returned by the InstanceGroups server but before + it is returned to user code. + """ + return response + def pre_list(self, request: compute.ListInstanceGroupsRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.ListInstanceGroupsRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list + + Override in a subclass to manipulate the request or metadata + before they are sent to the InstanceGroups server. + """ + return request, metadata + + def post_list(self, response: compute.InstanceGroupList) -> compute.InstanceGroupList: + """Post-rpc interceptor for list + + Override in a subclass to manipulate the response + after it is returned by the InstanceGroups server but before + it is returned to user code. + """ + return response + def pre_list_instances(self, request: compute.ListInstancesInstanceGroupsRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.ListInstancesInstanceGroupsRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list_instances + + Override in a subclass to manipulate the request or metadata + before they are sent to the InstanceGroups server. + """ + return request, metadata + + def post_list_instances(self, response: compute.InstanceGroupsListInstances) -> compute.InstanceGroupsListInstances: + """Post-rpc interceptor for list_instances + + Override in a subclass to manipulate the response + after it is returned by the InstanceGroups server but before + it is returned to user code. + """ + return response + def pre_remove_instances(self, request: compute.RemoveInstancesInstanceGroupRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.RemoveInstancesInstanceGroupRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for remove_instances + + Override in a subclass to manipulate the request or metadata + before they are sent to the InstanceGroups server. + """ + return request, metadata + + def post_remove_instances(self, response: compute.Operation) -> compute.Operation: + """Post-rpc interceptor for remove_instances + + Override in a subclass to manipulate the response + after it is returned by the InstanceGroups server but before + it is returned to user code. + """ + return response + def pre_set_named_ports(self, request: compute.SetNamedPortsInstanceGroupRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.SetNamedPortsInstanceGroupRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for set_named_ports + + Override in a subclass to manipulate the request or metadata + before they are sent to the InstanceGroups server. + """ + return request, metadata + + def post_set_named_ports(self, response: compute.Operation) -> compute.Operation: + """Post-rpc interceptor for set_named_ports + + Override in a subclass to manipulate the response + after it is returned by the InstanceGroups server but before + it is returned to user code. + """ + return response + + +@dataclasses.dataclass +class InstanceGroupsRestStub: + _session: AuthorizedSession + _host: str + _interceptor: InstanceGroupsRestInterceptor + + +class InstanceGroupsRestTransport(InstanceGroupsTransport): + """REST backend transport for InstanceGroups. + + The InstanceGroups API. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends JSON representations of protocol buffers over HTTP/1.1 + + NOTE: This REST transport functionality is currently in a beta + state (preview). We welcome your feedback via an issue in this + library's source repository. Thank you! + """ + + def __init__(self, *, + host: str = 'compute.googleapis.com', + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + client_cert_source_for_mtls: Optional[Callable[[ + ], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + url_scheme: str = 'https', + interceptor: Optional[InstanceGroupsRestInterceptor] = None, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + NOTE: This REST transport functionality is currently in a beta + state (preview). We welcome your feedback via a GitHub issue in + this library's repository. Thank you! + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + client_cert_source_for_mtls (Callable[[], Tuple[bytes, bytes]]): Client + certificate to configure mutual TLS HTTP channel. It is ignored + if ``channel`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you are developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + url_scheme: the protocol scheme for the API endpoint. Normally + "https", but for testing or local servers, + "http" can be specified. + """ + # Run the base constructor + # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. + # TODO: When custom host (api_endpoint) is set, `scopes` must *also* be set on the + # credentials object + maybe_url_match = re.match("^(?Phttp(?:s)?://)?(?P.*)$", host) + if maybe_url_match is None: + raise ValueError(f"Unexpected hostname structure: {host}") # pragma: NO COVER + + url_match_items = maybe_url_match.groupdict() + + host = f"{url_scheme}://{host}" if not url_match_items["scheme"] else host + + super().__init__( + host=host, + credentials=credentials, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience + ) + self._session = AuthorizedSession( + self._credentials, default_host=self.DEFAULT_HOST) + if client_cert_source_for_mtls: + self._session.configure_mtls_channel(client_cert_source_for_mtls) + self._interceptor = interceptor or InstanceGroupsRestInterceptor() + self._prep_wrapped_messages(client_info) + + class _AddInstances(InstanceGroupsRestStub): + def __hash__(self): + return hash("AddInstances") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.AddInstancesInstanceGroupRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.Operation: + r"""Call the add instances method over HTTP. + + Args: + request (~.compute.AddInstancesInstanceGroupRequest): + The request object. A request message for + InstanceGroups.AddInstances. See the + method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.Operation: + Represents an Operation resource. Google Compute Engine + has three Operation resources: \* + `Global `__ + \* + `Regional `__ + \* + `Zonal `__ + You can use an operation resource to manage asynchronous + API requests. For more information, read Handling API + responses. Operations can be global, regional or zonal. + - For global operations, use the ``globalOperations`` + resource. - For regional operations, use the + ``regionOperations`` resource. - For zonal operations, + use the ``zonalOperations`` resource. For more + information, read Global, Regional, and Zonal Resources. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/compute/v1/projects/{project}/zones/{zone}/instanceGroups/{instance_group}/addInstances', + 'body': 'instance_groups_add_instances_request_resource', + }, + ] + request, metadata = self._interceptor.pre_add_instances(request, metadata) + pb_request = compute.AddInstancesInstanceGroupRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + including_default_value_fields=False, + use_integers_for_enums=False + ) + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.Operation() + pb_resp = compute.Operation.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_add_instances(resp) + return resp + + class _AggregatedList(InstanceGroupsRestStub): + def __hash__(self): + return hash("AggregatedList") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.AggregatedListInstanceGroupsRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.InstanceGroupAggregatedList: + r"""Call the aggregated list method over HTTP. + + Args: + request (~.compute.AggregatedListInstanceGroupsRequest): + The request object. A request message for + InstanceGroups.AggregatedList. See the + method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.InstanceGroupAggregatedList: + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/compute/v1/projects/{project}/aggregated/instanceGroups', + }, + ] + request, metadata = self._interceptor.pre_aggregated_list(request, metadata) + pb_request = compute.AggregatedListInstanceGroupsRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.InstanceGroupAggregatedList() + pb_resp = compute.InstanceGroupAggregatedList.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_aggregated_list(resp) + return resp + + class _Delete(InstanceGroupsRestStub): + def __hash__(self): + return hash("Delete") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.DeleteInstanceGroupRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.Operation: + r"""Call the delete method over HTTP. + + Args: + request (~.compute.DeleteInstanceGroupRequest): + The request object. A request message for + InstanceGroups.Delete. See the method + description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.Operation: + Represents an Operation resource. Google Compute Engine + has three Operation resources: \* + `Global `__ + \* + `Regional `__ + \* + `Zonal `__ + You can use an operation resource to manage asynchronous + API requests. For more information, read Handling API + responses. Operations can be global, regional or zonal. + - For global operations, use the ``globalOperations`` + resource. - For regional operations, use the + ``regionOperations`` resource. - For zonal operations, + use the ``zonalOperations`` resource. For more + information, read Global, Regional, and Zonal Resources. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'delete', + 'uri': '/compute/v1/projects/{project}/zones/{zone}/instanceGroups/{instance_group}', + }, + ] + request, metadata = self._interceptor.pre_delete(request, metadata) + pb_request = compute.DeleteInstanceGroupRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.Operation() + pb_resp = compute.Operation.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_delete(resp) + return resp + + class _Get(InstanceGroupsRestStub): + def __hash__(self): + return hash("Get") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.GetInstanceGroupRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.InstanceGroup: + r"""Call the get method over HTTP. + + Args: + request (~.compute.GetInstanceGroupRequest): + The request object. A request message for + InstanceGroups.Get. See the method + description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.InstanceGroup: + Represents an Instance Group + resource. Instance Groups can be used to + configure a target for load balancing. + Instance groups can either be managed or + unmanaged. To create managed instance + groups, use the instanceGroupManager or + regionInstanceGroupManager resource + instead. Use zonal unmanaged instance + groups if you need to apply load + balancing to groups of heterogeneous + instances or if you need to manage the + instances yourself. You cannot create + regional unmanaged instance groups. For + more information, read Instance groups. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/compute/v1/projects/{project}/zones/{zone}/instanceGroups/{instance_group}', + }, + ] + request, metadata = self._interceptor.pre_get(request, metadata) + pb_request = compute.GetInstanceGroupRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.InstanceGroup() + pb_resp = compute.InstanceGroup.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get(resp) + return resp + + class _Insert(InstanceGroupsRestStub): + def __hash__(self): + return hash("Insert") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.InsertInstanceGroupRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.Operation: + r"""Call the insert method over HTTP. + + Args: + request (~.compute.InsertInstanceGroupRequest): + The request object. A request message for + InstanceGroups.Insert. See the method + description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.Operation: + Represents an Operation resource. Google Compute Engine + has three Operation resources: \* + `Global `__ + \* + `Regional `__ + \* + `Zonal `__ + You can use an operation resource to manage asynchronous + API requests. For more information, read Handling API + responses. Operations can be global, regional or zonal. + - For global operations, use the ``globalOperations`` + resource. - For regional operations, use the + ``regionOperations`` resource. - For zonal operations, + use the ``zonalOperations`` resource. For more + information, read Global, Regional, and Zonal Resources. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/compute/v1/projects/{project}/zones/{zone}/instanceGroups', + 'body': 'instance_group_resource', + }, + ] + request, metadata = self._interceptor.pre_insert(request, metadata) + pb_request = compute.InsertInstanceGroupRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + including_default_value_fields=False, + use_integers_for_enums=False + ) + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.Operation() + pb_resp = compute.Operation.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_insert(resp) + return resp + + class _List(InstanceGroupsRestStub): + def __hash__(self): + return hash("List") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.ListInstanceGroupsRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.InstanceGroupList: + r"""Call the list method over HTTP. + + Args: + request (~.compute.ListInstanceGroupsRequest): + The request object. A request message for + InstanceGroups.List. See the method + description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.InstanceGroupList: + A list of InstanceGroup resources. + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/compute/v1/projects/{project}/zones/{zone}/instanceGroups', + }, + ] + request, metadata = self._interceptor.pre_list(request, metadata) + pb_request = compute.ListInstanceGroupsRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.InstanceGroupList() + pb_resp = compute.InstanceGroupList.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_list(resp) + return resp + + class _ListInstances(InstanceGroupsRestStub): + def __hash__(self): + return hash("ListInstances") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.ListInstancesInstanceGroupsRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.InstanceGroupsListInstances: + r"""Call the list instances method over HTTP. + + Args: + request (~.compute.ListInstancesInstanceGroupsRequest): + The request object. A request message for + InstanceGroups.ListInstances. See the + method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.InstanceGroupsListInstances: + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/compute/v1/projects/{project}/zones/{zone}/instanceGroups/{instance_group}/listInstances', + 'body': 'instance_groups_list_instances_request_resource', + }, + ] + request, metadata = self._interceptor.pre_list_instances(request, metadata) + pb_request = compute.ListInstancesInstanceGroupsRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + including_default_value_fields=False, + use_integers_for_enums=False + ) + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.InstanceGroupsListInstances() + pb_resp = compute.InstanceGroupsListInstances.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_list_instances(resp) + return resp + + class _RemoveInstances(InstanceGroupsRestStub): + def __hash__(self): + return hash("RemoveInstances") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.RemoveInstancesInstanceGroupRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.Operation: + r"""Call the remove instances method over HTTP. + + Args: + request (~.compute.RemoveInstancesInstanceGroupRequest): + The request object. A request message for + InstanceGroups.RemoveInstances. See the + method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.Operation: + Represents an Operation resource. Google Compute Engine + has three Operation resources: \* + `Global `__ + \* + `Regional `__ + \* + `Zonal `__ + You can use an operation resource to manage asynchronous + API requests. For more information, read Handling API + responses. Operations can be global, regional or zonal. + - For global operations, use the ``globalOperations`` + resource. - For regional operations, use the + ``regionOperations`` resource. - For zonal operations, + use the ``zonalOperations`` resource. For more + information, read Global, Regional, and Zonal Resources. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/compute/v1/projects/{project}/zones/{zone}/instanceGroups/{instance_group}/removeInstances', + 'body': 'instance_groups_remove_instances_request_resource', + }, + ] + request, metadata = self._interceptor.pre_remove_instances(request, metadata) + pb_request = compute.RemoveInstancesInstanceGroupRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + including_default_value_fields=False, + use_integers_for_enums=False + ) + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.Operation() + pb_resp = compute.Operation.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_remove_instances(resp) + return resp + + class _SetNamedPorts(InstanceGroupsRestStub): + def __hash__(self): + return hash("SetNamedPorts") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.SetNamedPortsInstanceGroupRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.Operation: + r"""Call the set named ports method over HTTP. + + Args: + request (~.compute.SetNamedPortsInstanceGroupRequest): + The request object. A request message for + InstanceGroups.SetNamedPorts. See the + method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.Operation: + Represents an Operation resource. Google Compute Engine + has three Operation resources: \* + `Global `__ + \* + `Regional `__ + \* + `Zonal `__ + You can use an operation resource to manage asynchronous + API requests. For more information, read Handling API + responses. Operations can be global, regional or zonal. + - For global operations, use the ``globalOperations`` + resource. - For regional operations, use the + ``regionOperations`` resource. - For zonal operations, + use the ``zonalOperations`` resource. For more + information, read Global, Regional, and Zonal Resources. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/compute/v1/projects/{project}/zones/{zone}/instanceGroups/{instance_group}/setNamedPorts', + 'body': 'instance_groups_set_named_ports_request_resource', + }, + ] + request, metadata = self._interceptor.pre_set_named_ports(request, metadata) + pb_request = compute.SetNamedPortsInstanceGroupRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + including_default_value_fields=False, + use_integers_for_enums=False + ) + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.Operation() + pb_resp = compute.Operation.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_set_named_ports(resp) + return resp + + @property + def add_instances(self) -> Callable[ + [compute.AddInstancesInstanceGroupRequest], + compute.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._AddInstances(self._session, self._host, self._interceptor) # type: ignore + + @property + def aggregated_list(self) -> Callable[ + [compute.AggregatedListInstanceGroupsRequest], + compute.InstanceGroupAggregatedList]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._AggregatedList(self._session, self._host, self._interceptor) # type: ignore + + @property + def delete(self) -> Callable[ + [compute.DeleteInstanceGroupRequest], + compute.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._Delete(self._session, self._host, self._interceptor) # type: ignore + + @property + def get(self) -> Callable[ + [compute.GetInstanceGroupRequest], + compute.InstanceGroup]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._Get(self._session, self._host, self._interceptor) # type: ignore + + @property + def insert(self) -> Callable[ + [compute.InsertInstanceGroupRequest], + compute.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._Insert(self._session, self._host, self._interceptor) # type: ignore + + @property + def list(self) -> Callable[ + [compute.ListInstanceGroupsRequest], + compute.InstanceGroupList]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._List(self._session, self._host, self._interceptor) # type: ignore + + @property + def list_instances(self) -> Callable[ + [compute.ListInstancesInstanceGroupsRequest], + compute.InstanceGroupsListInstances]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListInstances(self._session, self._host, self._interceptor) # type: ignore + + @property + def remove_instances(self) -> Callable[ + [compute.RemoveInstancesInstanceGroupRequest], + compute.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._RemoveInstances(self._session, self._host, self._interceptor) # type: ignore + + @property + def set_named_ports(self) -> Callable[ + [compute.SetNamedPortsInstanceGroupRequest], + compute.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._SetNamedPorts(self._session, self._host, self._interceptor) # type: ignore + + @property + def kind(self) -> str: + return "rest" + + def close(self): + self._session.close() + + +__all__=( + 'InstanceGroupsRestTransport', +) diff --git a/owl-bot-staging/v1/google/cloud/compute_v1/services/instance_templates/__init__.py b/owl-bot-staging/v1/google/cloud/compute_v1/services/instance_templates/__init__.py new file mode 100644 index 000000000..8f0b6344b --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/compute_v1/services/instance_templates/__init__.py @@ -0,0 +1,20 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from .client import InstanceTemplatesClient + +__all__ = ( + 'InstanceTemplatesClient', +) diff --git a/owl-bot-staging/v1/google/cloud/compute_v1/services/instance_templates/client.py b/owl-bot-staging/v1/google/cloud/compute_v1/services/instance_templates/client.py new file mode 100644 index 000000000..7d5d657e3 --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/compute_v1/services/instance_templates/client.py @@ -0,0 +1,1670 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import functools +import os +import re +from typing import Dict, Mapping, MutableMapping, MutableSequence, Optional, Sequence, Tuple, Type, Union, cast + +from google.cloud.compute_v1 import gapic_version as package_version + +from google.api_core import client_options as client_options_lib +from google.api_core import exceptions as core_exceptions +from google.api_core import extended_operation +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport import mtls # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth.exceptions import MutualTLSChannelError # type: ignore +from google.oauth2 import service_account # type: ignore + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + +from google.api_core import extended_operation # type: ignore +from google.cloud.compute_v1.services.instance_templates import pagers +from google.cloud.compute_v1.types import compute +from .transports.base import InstanceTemplatesTransport, DEFAULT_CLIENT_INFO +from .transports.rest import InstanceTemplatesRestTransport + + +class InstanceTemplatesClientMeta(type): + """Metaclass for the InstanceTemplates client. + + This provides class-level methods for building and retrieving + support objects (e.g. transport) without polluting the client instance + objects. + """ + _transport_registry = OrderedDict() # type: Dict[str, Type[InstanceTemplatesTransport]] + _transport_registry["rest"] = InstanceTemplatesRestTransport + + def get_transport_class(cls, + label: Optional[str] = None, + ) -> Type[InstanceTemplatesTransport]: + """Returns an appropriate transport class. + + Args: + label: The name of the desired transport. If none is + provided, then the first transport in the registry is used. + + Returns: + The transport class to use. + """ + # If a specific transport is requested, return that one. + if label: + return cls._transport_registry[label] + + # No transport is requested; return the default (that is, the first one + # in the dictionary). + return next(iter(cls._transport_registry.values())) + + +class InstanceTemplatesClient(metaclass=InstanceTemplatesClientMeta): + """The InstanceTemplates API.""" + + @staticmethod + def _get_default_mtls_endpoint(api_endpoint): + """Converts api endpoint to mTLS endpoint. + + Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to + "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. + Args: + api_endpoint (Optional[str]): the api endpoint to convert. + Returns: + str: converted mTLS api endpoint. + """ + if not api_endpoint: + return api_endpoint + + mtls_endpoint_re = re.compile( + r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?" + ) + + m = mtls_endpoint_re.match(api_endpoint) + name, mtls, sandbox, googledomain = m.groups() + if mtls or not googledomain: + return api_endpoint + + if sandbox: + return api_endpoint.replace( + "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" + ) + + return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") + + DEFAULT_ENDPOINT = "compute.googleapis.com" + DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore + DEFAULT_ENDPOINT + ) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + InstanceTemplatesClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_info(info) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + InstanceTemplatesClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_file( + filename) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + from_service_account_json = from_service_account_file + + @property + def transport(self) -> InstanceTemplatesTransport: + """Returns the transport used by the client instance. + + Returns: + InstanceTemplatesTransport: The transport used by the client + instance. + """ + return self._transport + + @staticmethod + def common_billing_account_path(billing_account: str, ) -> str: + """Returns a fully-qualified billing_account string.""" + return "billingAccounts/{billing_account}".format(billing_account=billing_account, ) + + @staticmethod + def parse_common_billing_account_path(path: str) -> Dict[str,str]: + """Parse a billing_account path into its component segments.""" + m = re.match(r"^billingAccounts/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_folder_path(folder: str, ) -> str: + """Returns a fully-qualified folder string.""" + return "folders/{folder}".format(folder=folder, ) + + @staticmethod + def parse_common_folder_path(path: str) -> Dict[str,str]: + """Parse a folder path into its component segments.""" + m = re.match(r"^folders/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_organization_path(organization: str, ) -> str: + """Returns a fully-qualified organization string.""" + return "organizations/{organization}".format(organization=organization, ) + + @staticmethod + def parse_common_organization_path(path: str) -> Dict[str,str]: + """Parse a organization path into its component segments.""" + m = re.match(r"^organizations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_project_path(project: str, ) -> str: + """Returns a fully-qualified project string.""" + return "projects/{project}".format(project=project, ) + + @staticmethod + def parse_common_project_path(path: str) -> Dict[str,str]: + """Parse a project path into its component segments.""" + m = re.match(r"^projects/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_location_path(project: str, location: str, ) -> str: + """Returns a fully-qualified location string.""" + return "projects/{project}/locations/{location}".format(project=project, location=location, ) + + @staticmethod + def parse_common_location_path(path: str) -> Dict[str,str]: + """Parse a location path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @classmethod + def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[client_options_lib.ClientOptions] = None): + """Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + if client_options is None: + client_options = client_options_lib.ClientOptions() + use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") + use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") + if use_client_cert not in ("true", "false"): + raise ValueError("Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`") + if use_mtls_endpoint not in ("auto", "never", "always"): + raise MutualTLSChannelError("Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`") + + # Figure out the client cert source to use. + client_cert_source = None + if use_client_cert == "true": + if client_options.client_cert_source: + client_cert_source = client_options.client_cert_source + elif mtls.has_default_client_cert_source(): + client_cert_source = mtls.default_client_cert_source() + + # Figure out which api endpoint to use. + if client_options.api_endpoint is not None: + api_endpoint = client_options.api_endpoint + elif use_mtls_endpoint == "always" or (use_mtls_endpoint == "auto" and client_cert_source): + api_endpoint = cls.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = cls.DEFAULT_ENDPOINT + + return api_endpoint, client_cert_source + + def __init__(self, *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Optional[Union[str, InstanceTemplatesTransport]] = None, + client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the instance templates client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Union[str, InstanceTemplatesTransport]): The + transport to use. If set to None, a transport is chosen + automatically. + NOTE: "rest" transport functionality is currently in a + beta state (preview). We welcome your feedback via an + issue in this library's source repository. + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): Custom options for the + client. It won't take effect if a ``transport`` instance is provided. + (1) The ``api_endpoint`` property can be used to override the + default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT + environment variable can also be used to override the endpoint: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto switch to the + default mTLS endpoint if client certificate is present, this is + the default value). However, the ``api_endpoint`` property takes + precedence if provided. + (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide client certificate for mutual TLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + """ + if isinstance(client_options, dict): + client_options = client_options_lib.from_dict(client_options) + if client_options is None: + client_options = client_options_lib.ClientOptions() + client_options = cast(client_options_lib.ClientOptions, client_options) + + api_endpoint, client_cert_source_func = self.get_mtls_endpoint_and_cert_source(client_options) + + api_key_value = getattr(client_options, "api_key", None) + if api_key_value and credentials: + raise ValueError("client_options.api_key and credentials are mutually exclusive") + + # Save or instantiate the transport. + # Ordinarily, we provide the transport, but allowing a custom transport + # instance provides an extensibility point for unusual situations. + if isinstance(transport, InstanceTemplatesTransport): + # transport is a InstanceTemplatesTransport instance. + if credentials or client_options.credentials_file or api_key_value: + raise ValueError("When providing a transport instance, " + "provide its credentials directly.") + if client_options.scopes: + raise ValueError( + "When providing a transport instance, provide its scopes " + "directly." + ) + self._transport = transport + else: + import google.auth._default # type: ignore + + if api_key_value and hasattr(google.auth._default, "get_api_key_credentials"): + credentials = google.auth._default.get_api_key_credentials(api_key_value) + + Transport = type(self).get_transport_class(transport) + self._transport = Transport( + credentials=credentials, + credentials_file=client_options.credentials_file, + host=api_endpoint, + scopes=client_options.scopes, + client_cert_source_for_mtls=client_cert_source_func, + quota_project_id=client_options.quota_project_id, + client_info=client_info, + always_use_jwt_access=True, + api_audience=client_options.api_audience, + ) + + def aggregated_list(self, + request: Optional[Union[compute.AggregatedListInstanceTemplatesRequest, dict]] = None, + *, + project: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.AggregatedListPager: + r"""Retrieves the list of all InstanceTemplates + resources, regional and global, available to the + specified project. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_aggregated_list(): + # Create a client + client = compute_v1.InstanceTemplatesClient() + + # Initialize request argument(s) + request = compute_v1.AggregatedListInstanceTemplatesRequest( + project="project_value", + ) + + # Make the request + page_result = client.aggregated_list(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.AggregatedListInstanceTemplatesRequest, dict]): + The request object. A request message for + InstanceTemplates.AggregatedList. See + the method description for details. + project (str): + Name of the project scoping this + request. + + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.compute_v1.services.instance_templates.pagers.AggregatedListPager: + Contains a list of + InstanceTemplatesScopedList. + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.AggregatedListInstanceTemplatesRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.AggregatedListInstanceTemplatesRequest): + request = compute.AggregatedListInstanceTemplatesRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.aggregated_list] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.AggregatedListPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + def delete_unary(self, + request: Optional[Union[compute.DeleteInstanceTemplateRequest, dict]] = None, + *, + project: Optional[str] = None, + instance_template: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.Operation: + r"""Deletes the specified instance template. Deleting an + instance template is permanent and cannot be undone. It + is not possible to delete templates that are already in + use by a managed instance group. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_delete(): + # Create a client + client = compute_v1.InstanceTemplatesClient() + + # Initialize request argument(s) + request = compute_v1.DeleteInstanceTemplateRequest( + instance_template="instance_template_value", + project="project_value", + ) + + # Make the request + response = client.delete(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.DeleteInstanceTemplateRequest, dict]): + The request object. A request message for + InstanceTemplates.Delete. See the method + description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + instance_template (str): + The name of the instance template to + delete. + + This corresponds to the ``instance_template`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, instance_template]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.DeleteInstanceTemplateRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.DeleteInstanceTemplateRequest): + request = compute.DeleteInstanceTemplateRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if instance_template is not None: + request.instance_template = instance_template + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("instance_template", request.instance_template), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def delete(self, + request: Optional[Union[compute.DeleteInstanceTemplateRequest, dict]] = None, + *, + project: Optional[str] = None, + instance_template: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> extended_operation.ExtendedOperation: + r"""Deletes the specified instance template. Deleting an + instance template is permanent and cannot be undone. It + is not possible to delete templates that are already in + use by a managed instance group. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_delete(): + # Create a client + client = compute_v1.InstanceTemplatesClient() + + # Initialize request argument(s) + request = compute_v1.DeleteInstanceTemplateRequest( + instance_template="instance_template_value", + project="project_value", + ) + + # Make the request + response = client.delete(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.DeleteInstanceTemplateRequest, dict]): + The request object. A request message for + InstanceTemplates.Delete. See the method + description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + instance_template (str): + The name of the instance template to + delete. + + This corresponds to the ``instance_template`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, instance_template]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.DeleteInstanceTemplateRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.DeleteInstanceTemplateRequest): + request = compute.DeleteInstanceTemplateRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if instance_template is not None: + request.instance_template = instance_template + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("instance_template", request.instance_template), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + operation_service = self._transport._global_operations_client + operation_request = compute.GetGlobalOperationRequest() + operation_request.project = request.project + operation_request.operation = response.name + + get_operation = functools.partial(operation_service.get, operation_request) + # Cancel is not part of extended operations yet. + cancel_operation = lambda: None + + # Note: this class is an implementation detail to provide a uniform + # set of names for certain fields in the extended operation proto message. + # See google.api_core.extended_operation.ExtendedOperation for details + # on these properties and the expected interface. + class _CustomOperation(extended_operation.ExtendedOperation): + @property + def error_message(self): + return self._extended_operation.http_error_message + + @property + def error_code(self): + return self._extended_operation.http_error_status_code + + response = _CustomOperation.make(get_operation, cancel_operation, response) + + # Done; return the response. + return response + + def get(self, + request: Optional[Union[compute.GetInstanceTemplateRequest, dict]] = None, + *, + project: Optional[str] = None, + instance_template: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.InstanceTemplate: + r"""Returns the specified instance template. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_get(): + # Create a client + client = compute_v1.InstanceTemplatesClient() + + # Initialize request argument(s) + request = compute_v1.GetInstanceTemplateRequest( + instance_template="instance_template_value", + project="project_value", + ) + + # Make the request + response = client.get(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.GetInstanceTemplateRequest, dict]): + The request object. A request message for + InstanceTemplates.Get. See the method + description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + instance_template (str): + The name of the instance template. + This corresponds to the ``instance_template`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.compute_v1.types.InstanceTemplate: + Represents an Instance Template + resource. You can use instance templates + to create VM instances and managed + instance groups. For more information, + read Instance Templates. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, instance_template]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.GetInstanceTemplateRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.GetInstanceTemplateRequest): + request = compute.GetInstanceTemplateRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if instance_template is not None: + request.instance_template = instance_template + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("instance_template", request.instance_template), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get_iam_policy(self, + request: Optional[Union[compute.GetIamPolicyInstanceTemplateRequest, dict]] = None, + *, + project: Optional[str] = None, + resource: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.Policy: + r"""Gets the access control policy for a resource. May be + empty if no such policy or resource exists. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_get_iam_policy(): + # Create a client + client = compute_v1.InstanceTemplatesClient() + + # Initialize request argument(s) + request = compute_v1.GetIamPolicyInstanceTemplateRequest( + project="project_value", + resource="resource_value", + ) + + # Make the request + response = client.get_iam_policy(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.GetIamPolicyInstanceTemplateRequest, dict]): + The request object. A request message for + InstanceTemplates.GetIamPolicy. See the + method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + resource (str): + Name or id of the resource for this + request. + + This corresponds to the ``resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.compute_v1.types.Policy: + An Identity and Access Management (IAM) policy, which + specifies access controls for Google Cloud resources. A + Policy is a collection of bindings. A binding binds one + or more members, or principals, to a single role. + Principals can be user accounts, service accounts, + Google groups, and domains (such as G Suite). A role is + a named list of permissions; each role can be an IAM + predefined role or a user-created custom role. For some + types of Google Cloud resources, a binding can also + specify a condition, which is a logical expression that + allows access to a resource only if the expression + evaluates to true. A condition can add constraints based + on attributes of the request, the resource, or both. To + learn which resources support conditions in their IAM + policies, see the [IAM + documentation](\ https://cloud.google.com/iam/help/conditions/resource-policies). + **JSON example:** { "bindings": [ { "role": + "roles/resourcemanager.organizationAdmin", "members": [ + "user:mike@example.com", "group:admins@example.com", + "domain:google.com", + "serviceAccount:my-project-id@appspot.gserviceaccount.com" + ] }, { "role": + "roles/resourcemanager.organizationViewer", "members": [ + "user:eve@example.com" ], "condition": { "title": + "expirable access", "description": "Does not grant + access after Sep 2020", "expression": "request.time < + timestamp('2020-10-01T00:00:00.000Z')", } } ], "etag": + "BwWWja0YfJA=", "version": 3 } **YAML example:** + bindings: - members: - user:\ mike@example.com - + group:\ admins@example.com - domain:google.com - + serviceAccount:\ my-project-id@appspot.gserviceaccount.com + role: roles/resourcemanager.organizationAdmin - members: + - user:\ eve@example.com role: + roles/resourcemanager.organizationViewer condition: + title: expirable access description: Does not grant + access after Sep 2020 expression: request.time < + timestamp('2020-10-01T00:00:00.000Z') etag: BwWWja0YfJA= + version: 3 For a description of IAM and its features, + see the [IAM + documentation](\ https://cloud.google.com/iam/docs/). + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.GetIamPolicyInstanceTemplateRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.GetIamPolicyInstanceTemplateRequest): + request = compute.GetIamPolicyInstanceTemplateRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if resource is not None: + request.resource = resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_iam_policy] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("resource", request.resource), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def insert_unary(self, + request: Optional[Union[compute.InsertInstanceTemplateRequest, dict]] = None, + *, + project: Optional[str] = None, + instance_template_resource: Optional[compute.InstanceTemplate] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.Operation: + r"""Creates an instance template in the specified project + using the data that is included in the request. If you + are creating a new template to update an existing + instance group, your new instance template must use the + same network or, if applicable, the same subnetwork as + the original template. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_insert(): + # Create a client + client = compute_v1.InstanceTemplatesClient() + + # Initialize request argument(s) + request = compute_v1.InsertInstanceTemplateRequest( + project="project_value", + ) + + # Make the request + response = client.insert(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.InsertInstanceTemplateRequest, dict]): + The request object. A request message for + InstanceTemplates.Insert. See the method + description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + instance_template_resource (google.cloud.compute_v1.types.InstanceTemplate): + The body resource for this request + This corresponds to the ``instance_template_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, instance_template_resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.InsertInstanceTemplateRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.InsertInstanceTemplateRequest): + request = compute.InsertInstanceTemplateRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if instance_template_resource is not None: + request.instance_template_resource = instance_template_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.insert] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def insert(self, + request: Optional[Union[compute.InsertInstanceTemplateRequest, dict]] = None, + *, + project: Optional[str] = None, + instance_template_resource: Optional[compute.InstanceTemplate] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> extended_operation.ExtendedOperation: + r"""Creates an instance template in the specified project + using the data that is included in the request. If you + are creating a new template to update an existing + instance group, your new instance template must use the + same network or, if applicable, the same subnetwork as + the original template. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_insert(): + # Create a client + client = compute_v1.InstanceTemplatesClient() + + # Initialize request argument(s) + request = compute_v1.InsertInstanceTemplateRequest( + project="project_value", + ) + + # Make the request + response = client.insert(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.InsertInstanceTemplateRequest, dict]): + The request object. A request message for + InstanceTemplates.Insert. See the method + description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + instance_template_resource (google.cloud.compute_v1.types.InstanceTemplate): + The body resource for this request + This corresponds to the ``instance_template_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, instance_template_resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.InsertInstanceTemplateRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.InsertInstanceTemplateRequest): + request = compute.InsertInstanceTemplateRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if instance_template_resource is not None: + request.instance_template_resource = instance_template_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.insert] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + operation_service = self._transport._global_operations_client + operation_request = compute.GetGlobalOperationRequest() + operation_request.project = request.project + operation_request.operation = response.name + + get_operation = functools.partial(operation_service.get, operation_request) + # Cancel is not part of extended operations yet. + cancel_operation = lambda: None + + # Note: this class is an implementation detail to provide a uniform + # set of names for certain fields in the extended operation proto message. + # See google.api_core.extended_operation.ExtendedOperation for details + # on these properties and the expected interface. + class _CustomOperation(extended_operation.ExtendedOperation): + @property + def error_message(self): + return self._extended_operation.http_error_message + + @property + def error_code(self): + return self._extended_operation.http_error_status_code + + response = _CustomOperation.make(get_operation, cancel_operation, response) + + # Done; return the response. + return response + + def list(self, + request: Optional[Union[compute.ListInstanceTemplatesRequest, dict]] = None, + *, + project: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListPager: + r"""Retrieves a list of instance templates that are + contained within the specified project. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_list(): + # Create a client + client = compute_v1.InstanceTemplatesClient() + + # Initialize request argument(s) + request = compute_v1.ListInstanceTemplatesRequest( + project="project_value", + ) + + # Make the request + page_result = client.list(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.ListInstanceTemplatesRequest, dict]): + The request object. A request message for + InstanceTemplates.List. See the method + description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.compute_v1.services.instance_templates.pagers.ListPager: + A list of instance templates. + + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.ListInstanceTemplatesRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.ListInstanceTemplatesRequest): + request = compute.ListInstanceTemplatesRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + def set_iam_policy(self, + request: Optional[Union[compute.SetIamPolicyInstanceTemplateRequest, dict]] = None, + *, + project: Optional[str] = None, + resource: Optional[str] = None, + global_set_policy_request_resource: Optional[compute.GlobalSetPolicyRequest] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.Policy: + r"""Sets the access control policy on the specified + resource. Replaces any existing policy. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_set_iam_policy(): + # Create a client + client = compute_v1.InstanceTemplatesClient() + + # Initialize request argument(s) + request = compute_v1.SetIamPolicyInstanceTemplateRequest( + project="project_value", + resource="resource_value", + ) + + # Make the request + response = client.set_iam_policy(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.SetIamPolicyInstanceTemplateRequest, dict]): + The request object. A request message for + InstanceTemplates.SetIamPolicy. See the + method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + resource (str): + Name or id of the resource for this + request. + + This corresponds to the ``resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + global_set_policy_request_resource (google.cloud.compute_v1.types.GlobalSetPolicyRequest): + The body resource for this request + This corresponds to the ``global_set_policy_request_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.compute_v1.types.Policy: + An Identity and Access Management (IAM) policy, which + specifies access controls for Google Cloud resources. A + Policy is a collection of bindings. A binding binds one + or more members, or principals, to a single role. + Principals can be user accounts, service accounts, + Google groups, and domains (such as G Suite). A role is + a named list of permissions; each role can be an IAM + predefined role or a user-created custom role. For some + types of Google Cloud resources, a binding can also + specify a condition, which is a logical expression that + allows access to a resource only if the expression + evaluates to true. A condition can add constraints based + on attributes of the request, the resource, or both. To + learn which resources support conditions in their IAM + policies, see the [IAM + documentation](\ https://cloud.google.com/iam/help/conditions/resource-policies). + **JSON example:** { "bindings": [ { "role": + "roles/resourcemanager.organizationAdmin", "members": [ + "user:mike@example.com", "group:admins@example.com", + "domain:google.com", + "serviceAccount:my-project-id@appspot.gserviceaccount.com" + ] }, { "role": + "roles/resourcemanager.organizationViewer", "members": [ + "user:eve@example.com" ], "condition": { "title": + "expirable access", "description": "Does not grant + access after Sep 2020", "expression": "request.time < + timestamp('2020-10-01T00:00:00.000Z')", } } ], "etag": + "BwWWja0YfJA=", "version": 3 } **YAML example:** + bindings: - members: - user:\ mike@example.com - + group:\ admins@example.com - domain:google.com - + serviceAccount:\ my-project-id@appspot.gserviceaccount.com + role: roles/resourcemanager.organizationAdmin - members: + - user:\ eve@example.com role: + roles/resourcemanager.organizationViewer condition: + title: expirable access description: Does not grant + access after Sep 2020 expression: request.time < + timestamp('2020-10-01T00:00:00.000Z') etag: BwWWja0YfJA= + version: 3 For a description of IAM and its features, + see the [IAM + documentation](\ https://cloud.google.com/iam/docs/). + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, resource, global_set_policy_request_resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.SetIamPolicyInstanceTemplateRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.SetIamPolicyInstanceTemplateRequest): + request = compute.SetIamPolicyInstanceTemplateRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if resource is not None: + request.resource = resource + if global_set_policy_request_resource is not None: + request.global_set_policy_request_resource = global_set_policy_request_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.set_iam_policy] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("resource", request.resource), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def test_iam_permissions(self, + request: Optional[Union[compute.TestIamPermissionsInstanceTemplateRequest, dict]] = None, + *, + project: Optional[str] = None, + resource: Optional[str] = None, + test_permissions_request_resource: Optional[compute.TestPermissionsRequest] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.TestPermissionsResponse: + r"""Returns permissions that a caller has on the + specified resource. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_test_iam_permissions(): + # Create a client + client = compute_v1.InstanceTemplatesClient() + + # Initialize request argument(s) + request = compute_v1.TestIamPermissionsInstanceTemplateRequest( + project="project_value", + resource="resource_value", + ) + + # Make the request + response = client.test_iam_permissions(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.TestIamPermissionsInstanceTemplateRequest, dict]): + The request object. A request message for + InstanceTemplates.TestIamPermissions. + See the method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + resource (str): + Name or id of the resource for this + request. + + This corresponds to the ``resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + test_permissions_request_resource (google.cloud.compute_v1.types.TestPermissionsRequest): + The body resource for this request + This corresponds to the ``test_permissions_request_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.compute_v1.types.TestPermissionsResponse: + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, resource, test_permissions_request_resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.TestIamPermissionsInstanceTemplateRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.TestIamPermissionsInstanceTemplateRequest): + request = compute.TestIamPermissionsInstanceTemplateRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if resource is not None: + request.resource = resource + if test_permissions_request_resource is not None: + request.test_permissions_request_resource = test_permissions_request_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.test_iam_permissions] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("resource", request.resource), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def __enter__(self) -> "InstanceTemplatesClient": + return self + + def __exit__(self, type, value, traceback): + """Releases underlying transport's resources. + + .. warning:: + ONLY use as a context manager if the transport is NOT shared + with other clients! Exiting the with block will CLOSE the transport + and may cause errors in other clients! + """ + self.transport.close() + + + + + + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) + + +__all__ = ( + "InstanceTemplatesClient", +) diff --git a/owl-bot-staging/v1/google/cloud/compute_v1/services/instance_templates/pagers.py b/owl-bot-staging/v1/google/cloud/compute_v1/services/instance_templates/pagers.py new file mode 100644 index 000000000..d9e4ed6af --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/compute_v1/services/instance_templates/pagers.py @@ -0,0 +1,139 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import Any, AsyncIterator, Awaitable, Callable, Sequence, Tuple, Optional, Iterator + +from google.cloud.compute_v1.types import compute + + +class AggregatedListPager: + """A pager for iterating through ``aggregated_list`` requests. + + This class thinly wraps an initial + :class:`google.cloud.compute_v1.types.InstanceTemplateAggregatedList` object, and + provides an ``__iter__`` method to iterate through its + ``items`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``AggregatedList`` requests and continue to iterate + through the ``items`` field on the + corresponding responses. + + All the usual :class:`google.cloud.compute_v1.types.InstanceTemplateAggregatedList` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., compute.InstanceTemplateAggregatedList], + request: compute.AggregatedListInstanceTemplatesRequest, + response: compute.InstanceTemplateAggregatedList, + *, + metadata: Sequence[Tuple[str, str]] = ()): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.compute_v1.types.AggregatedListInstanceTemplatesRequest): + The initial request object. + response (google.cloud.compute_v1.types.InstanceTemplateAggregatedList): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = compute.AggregatedListInstanceTemplatesRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[compute.InstanceTemplateAggregatedList]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[Tuple[str, compute.InstanceTemplatesScopedList]]: + for page in self.pages: + yield from page.items.items() + + def get(self, key: str) -> Optional[compute.InstanceTemplatesScopedList]: + return self._response.items.get(key) + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + + +class ListPager: + """A pager for iterating through ``list`` requests. + + This class thinly wraps an initial + :class:`google.cloud.compute_v1.types.InstanceTemplateList` object, and + provides an ``__iter__`` method to iterate through its + ``items`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``List`` requests and continue to iterate + through the ``items`` field on the + corresponding responses. + + All the usual :class:`google.cloud.compute_v1.types.InstanceTemplateList` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., compute.InstanceTemplateList], + request: compute.ListInstanceTemplatesRequest, + response: compute.InstanceTemplateList, + *, + metadata: Sequence[Tuple[str, str]] = ()): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.compute_v1.types.ListInstanceTemplatesRequest): + The initial request object. + response (google.cloud.compute_v1.types.InstanceTemplateList): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = compute.ListInstanceTemplatesRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[compute.InstanceTemplateList]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[compute.InstanceTemplate]: + for page in self.pages: + yield from page.items + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) diff --git a/owl-bot-staging/v1/google/cloud/compute_v1/services/instance_templates/transports/__init__.py b/owl-bot-staging/v1/google/cloud/compute_v1/services/instance_templates/transports/__init__.py new file mode 100644 index 000000000..5cf4c32d4 --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/compute_v1/services/instance_templates/transports/__init__.py @@ -0,0 +1,32 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +from typing import Dict, Type + +from .base import InstanceTemplatesTransport +from .rest import InstanceTemplatesRestTransport +from .rest import InstanceTemplatesRestInterceptor + + +# Compile a registry of transports. +_transport_registry = OrderedDict() # type: Dict[str, Type[InstanceTemplatesTransport]] +_transport_registry['rest'] = InstanceTemplatesRestTransport + +__all__ = ( + 'InstanceTemplatesTransport', + 'InstanceTemplatesRestTransport', + 'InstanceTemplatesRestInterceptor', +) diff --git a/owl-bot-staging/v1/google/cloud/compute_v1/services/instance_templates/transports/base.py b/owl-bot-staging/v1/google/cloud/compute_v1/services/instance_templates/transports/base.py new file mode 100644 index 000000000..bebac1857 --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/compute_v1/services/instance_templates/transports/base.py @@ -0,0 +1,261 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import abc +from typing import Awaitable, Callable, Dict, Optional, Sequence, Union + +from google.cloud.compute_v1 import gapic_version as package_version + +import google.auth # type: ignore +import google.api_core +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.compute_v1.types import compute +from google.cloud.compute_v1.services import global_operations + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) + + +class InstanceTemplatesTransport(abc.ABC): + """Abstract transport class for InstanceTemplates.""" + + AUTH_SCOPES = ( + 'https://www.googleapis.com/auth/compute', + 'https://www.googleapis.com/auth/cloud-platform', + ) + + DEFAULT_HOST: str = 'compute.googleapis.com' + def __init__( + self, *, + host: str = DEFAULT_HOST, + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + **kwargs, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A list of scopes. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + """ + self._extended_operations_services: Dict[str, Any] = {} + + scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} + + # Save the scopes. + self._scopes = scopes + + # If no credentials are provided, then determine the appropriate + # defaults. + if credentials and credentials_file: + raise core_exceptions.DuplicateCredentialArgs("'credentials_file' and 'credentials' are mutually exclusive") + + if credentials_file is not None: + credentials, _ = google.auth.load_credentials_from_file( + credentials_file, + **scopes_kwargs, + quota_project_id=quota_project_id + ) + elif credentials is None: + credentials, _ = google.auth.default(**scopes_kwargs, quota_project_id=quota_project_id) + # Don't apply audience if the credentials file passed from user. + if hasattr(credentials, "with_gdch_audience"): + credentials = credentials.with_gdch_audience(api_audience if api_audience else host) + + # If the credentials are service account credentials, then always try to use self signed JWT. + if always_use_jwt_access and isinstance(credentials, service_account.Credentials) and hasattr(service_account.Credentials, "with_always_use_jwt_access"): + credentials = credentials.with_always_use_jwt_access(True) + + # Save the credentials. + self._credentials = credentials + + # Save the hostname. Default to port 443 (HTTPS) if none is specified. + if ':' not in host: + host += ':443' + self._host = host + + def _prep_wrapped_messages(self, client_info): + # Precompute the wrapped methods. + self._wrapped_methods = { + self.aggregated_list: gapic_v1.method.wrap_method( + self.aggregated_list, + default_timeout=None, + client_info=client_info, + ), + self.delete: gapic_v1.method.wrap_method( + self.delete, + default_timeout=None, + client_info=client_info, + ), + self.get: gapic_v1.method.wrap_method( + self.get, + default_timeout=None, + client_info=client_info, + ), + self.get_iam_policy: gapic_v1.method.wrap_method( + self.get_iam_policy, + default_timeout=None, + client_info=client_info, + ), + self.insert: gapic_v1.method.wrap_method( + self.insert, + default_timeout=None, + client_info=client_info, + ), + self.list: gapic_v1.method.wrap_method( + self.list, + default_timeout=None, + client_info=client_info, + ), + self.set_iam_policy: gapic_v1.method.wrap_method( + self.set_iam_policy, + default_timeout=None, + client_info=client_info, + ), + self.test_iam_permissions: gapic_v1.method.wrap_method( + self.test_iam_permissions, + default_timeout=None, + client_info=client_info, + ), + } + + def close(self): + """Closes resources associated with the transport. + + .. warning:: + Only call this method if the transport is NOT shared + with other clients - this may cause errors in other clients! + """ + raise NotImplementedError() + + @property + def aggregated_list(self) -> Callable[ + [compute.AggregatedListInstanceTemplatesRequest], + Union[ + compute.InstanceTemplateAggregatedList, + Awaitable[compute.InstanceTemplateAggregatedList] + ]]: + raise NotImplementedError() + + @property + def delete(self) -> Callable[ + [compute.DeleteInstanceTemplateRequest], + Union[ + compute.Operation, + Awaitable[compute.Operation] + ]]: + raise NotImplementedError() + + @property + def get(self) -> Callable[ + [compute.GetInstanceTemplateRequest], + Union[ + compute.InstanceTemplate, + Awaitable[compute.InstanceTemplate] + ]]: + raise NotImplementedError() + + @property + def get_iam_policy(self) -> Callable[ + [compute.GetIamPolicyInstanceTemplateRequest], + Union[ + compute.Policy, + Awaitable[compute.Policy] + ]]: + raise NotImplementedError() + + @property + def insert(self) -> Callable[ + [compute.InsertInstanceTemplateRequest], + Union[ + compute.Operation, + Awaitable[compute.Operation] + ]]: + raise NotImplementedError() + + @property + def list(self) -> Callable[ + [compute.ListInstanceTemplatesRequest], + Union[ + compute.InstanceTemplateList, + Awaitable[compute.InstanceTemplateList] + ]]: + raise NotImplementedError() + + @property + def set_iam_policy(self) -> Callable[ + [compute.SetIamPolicyInstanceTemplateRequest], + Union[ + compute.Policy, + Awaitable[compute.Policy] + ]]: + raise NotImplementedError() + + @property + def test_iam_permissions(self) -> Callable[ + [compute.TestIamPermissionsInstanceTemplateRequest], + Union[ + compute.TestPermissionsResponse, + Awaitable[compute.TestPermissionsResponse] + ]]: + raise NotImplementedError() + + @property + def kind(self) -> str: + raise NotImplementedError() + + @property + def _global_operations_client(self) -> global_operations.GlobalOperationsClient: + ex_op_service = self._extended_operations_services.get("global_operations") + if not ex_op_service: + ex_op_service = global_operations.GlobalOperationsClient( + credentials=self._credentials, + transport=self.kind, + ) + self._extended_operations_services["global_operations"] = ex_op_service + + return ex_op_service + + +__all__ = ( + 'InstanceTemplatesTransport', +) diff --git a/owl-bot-staging/v1/google/cloud/compute_v1/services/instance_templates/transports/rest.py b/owl-bot-staging/v1/google/cloud/compute_v1/services/instance_templates/transports/rest.py new file mode 100644 index 000000000..a4c5bfc14 --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/compute_v1/services/instance_templates/transports/rest.py @@ -0,0 +1,1211 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +from google.auth.transport.requests import AuthorizedSession # type: ignore +import json # type: ignore +import grpc # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.api_core import exceptions as core_exceptions +from google.api_core import retry as retries +from google.api_core import rest_helpers +from google.api_core import rest_streaming +from google.api_core import path_template +from google.api_core import gapic_v1 + +from google.protobuf import json_format +from requests import __version__ as requests_version +import dataclasses +import re +from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union +import warnings + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + + +from google.cloud.compute_v1.types import compute + +from .base import InstanceTemplatesTransport, DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, + grpc_version=None, + rest_version=requests_version, +) + + +class InstanceTemplatesRestInterceptor: + """Interceptor for InstanceTemplates. + + Interceptors are used to manipulate requests, request metadata, and responses + in arbitrary ways. + Example use cases include: + * Logging + * Verifying requests according to service or custom semantics + * Stripping extraneous information from responses + + These use cases and more can be enabled by injecting an + instance of a custom subclass when constructing the InstanceTemplatesRestTransport. + + .. code-block:: python + class MyCustomInstanceTemplatesInterceptor(InstanceTemplatesRestInterceptor): + def pre_aggregated_list(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_aggregated_list(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_delete(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_delete(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_get(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_get_iam_policy(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_iam_policy(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_insert(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_insert(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_set_iam_policy(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_set_iam_policy(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_test_iam_permissions(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_test_iam_permissions(self, response): + logging.log(f"Received response: {response}") + return response + + transport = InstanceTemplatesRestTransport(interceptor=MyCustomInstanceTemplatesInterceptor()) + client = InstanceTemplatesClient(transport=transport) + + + """ + def pre_aggregated_list(self, request: compute.AggregatedListInstanceTemplatesRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.AggregatedListInstanceTemplatesRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for aggregated_list + + Override in a subclass to manipulate the request or metadata + before they are sent to the InstanceTemplates server. + """ + return request, metadata + + def post_aggregated_list(self, response: compute.InstanceTemplateAggregatedList) -> compute.InstanceTemplateAggregatedList: + """Post-rpc interceptor for aggregated_list + + Override in a subclass to manipulate the response + after it is returned by the InstanceTemplates server but before + it is returned to user code. + """ + return response + def pre_delete(self, request: compute.DeleteInstanceTemplateRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.DeleteInstanceTemplateRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for delete + + Override in a subclass to manipulate the request or metadata + before they are sent to the InstanceTemplates server. + """ + return request, metadata + + def post_delete(self, response: compute.Operation) -> compute.Operation: + """Post-rpc interceptor for delete + + Override in a subclass to manipulate the response + after it is returned by the InstanceTemplates server but before + it is returned to user code. + """ + return response + def pre_get(self, request: compute.GetInstanceTemplateRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.GetInstanceTemplateRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get + + Override in a subclass to manipulate the request or metadata + before they are sent to the InstanceTemplates server. + """ + return request, metadata + + def post_get(self, response: compute.InstanceTemplate) -> compute.InstanceTemplate: + """Post-rpc interceptor for get + + Override in a subclass to manipulate the response + after it is returned by the InstanceTemplates server but before + it is returned to user code. + """ + return response + def pre_get_iam_policy(self, request: compute.GetIamPolicyInstanceTemplateRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.GetIamPolicyInstanceTemplateRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_iam_policy + + Override in a subclass to manipulate the request or metadata + before they are sent to the InstanceTemplates server. + """ + return request, metadata + + def post_get_iam_policy(self, response: compute.Policy) -> compute.Policy: + """Post-rpc interceptor for get_iam_policy + + Override in a subclass to manipulate the response + after it is returned by the InstanceTemplates server but before + it is returned to user code. + """ + return response + def pre_insert(self, request: compute.InsertInstanceTemplateRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.InsertInstanceTemplateRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for insert + + Override in a subclass to manipulate the request or metadata + before they are sent to the InstanceTemplates server. + """ + return request, metadata + + def post_insert(self, response: compute.Operation) -> compute.Operation: + """Post-rpc interceptor for insert + + Override in a subclass to manipulate the response + after it is returned by the InstanceTemplates server but before + it is returned to user code. + """ + return response + def pre_list(self, request: compute.ListInstanceTemplatesRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.ListInstanceTemplatesRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list + + Override in a subclass to manipulate the request or metadata + before they are sent to the InstanceTemplates server. + """ + return request, metadata + + def post_list(self, response: compute.InstanceTemplateList) -> compute.InstanceTemplateList: + """Post-rpc interceptor for list + + Override in a subclass to manipulate the response + after it is returned by the InstanceTemplates server but before + it is returned to user code. + """ + return response + def pre_set_iam_policy(self, request: compute.SetIamPolicyInstanceTemplateRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.SetIamPolicyInstanceTemplateRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for set_iam_policy + + Override in a subclass to manipulate the request or metadata + before they are sent to the InstanceTemplates server. + """ + return request, metadata + + def post_set_iam_policy(self, response: compute.Policy) -> compute.Policy: + """Post-rpc interceptor for set_iam_policy + + Override in a subclass to manipulate the response + after it is returned by the InstanceTemplates server but before + it is returned to user code. + """ + return response + def pre_test_iam_permissions(self, request: compute.TestIamPermissionsInstanceTemplateRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.TestIamPermissionsInstanceTemplateRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for test_iam_permissions + + Override in a subclass to manipulate the request or metadata + before they are sent to the InstanceTemplates server. + """ + return request, metadata + + def post_test_iam_permissions(self, response: compute.TestPermissionsResponse) -> compute.TestPermissionsResponse: + """Post-rpc interceptor for test_iam_permissions + + Override in a subclass to manipulate the response + after it is returned by the InstanceTemplates server but before + it is returned to user code. + """ + return response + + +@dataclasses.dataclass +class InstanceTemplatesRestStub: + _session: AuthorizedSession + _host: str + _interceptor: InstanceTemplatesRestInterceptor + + +class InstanceTemplatesRestTransport(InstanceTemplatesTransport): + """REST backend transport for InstanceTemplates. + + The InstanceTemplates API. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends JSON representations of protocol buffers over HTTP/1.1 + + NOTE: This REST transport functionality is currently in a beta + state (preview). We welcome your feedback via an issue in this + library's source repository. Thank you! + """ + + def __init__(self, *, + host: str = 'compute.googleapis.com', + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + client_cert_source_for_mtls: Optional[Callable[[ + ], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + url_scheme: str = 'https', + interceptor: Optional[InstanceTemplatesRestInterceptor] = None, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + NOTE: This REST transport functionality is currently in a beta + state (preview). We welcome your feedback via a GitHub issue in + this library's repository. Thank you! + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + client_cert_source_for_mtls (Callable[[], Tuple[bytes, bytes]]): Client + certificate to configure mutual TLS HTTP channel. It is ignored + if ``channel`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you are developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + url_scheme: the protocol scheme for the API endpoint. Normally + "https", but for testing or local servers, + "http" can be specified. + """ + # Run the base constructor + # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. + # TODO: When custom host (api_endpoint) is set, `scopes` must *also* be set on the + # credentials object + maybe_url_match = re.match("^(?Phttp(?:s)?://)?(?P.*)$", host) + if maybe_url_match is None: + raise ValueError(f"Unexpected hostname structure: {host}") # pragma: NO COVER + + url_match_items = maybe_url_match.groupdict() + + host = f"{url_scheme}://{host}" if not url_match_items["scheme"] else host + + super().__init__( + host=host, + credentials=credentials, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience + ) + self._session = AuthorizedSession( + self._credentials, default_host=self.DEFAULT_HOST) + if client_cert_source_for_mtls: + self._session.configure_mtls_channel(client_cert_source_for_mtls) + self._interceptor = interceptor or InstanceTemplatesRestInterceptor() + self._prep_wrapped_messages(client_info) + + class _AggregatedList(InstanceTemplatesRestStub): + def __hash__(self): + return hash("AggregatedList") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.AggregatedListInstanceTemplatesRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.InstanceTemplateAggregatedList: + r"""Call the aggregated list method over HTTP. + + Args: + request (~.compute.AggregatedListInstanceTemplatesRequest): + The request object. A request message for + InstanceTemplates.AggregatedList. See + the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.InstanceTemplateAggregatedList: + Contains a list of + InstanceTemplatesScopedList. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/compute/v1/projects/{project}/aggregated/instanceTemplates', + }, + ] + request, metadata = self._interceptor.pre_aggregated_list(request, metadata) + pb_request = compute.AggregatedListInstanceTemplatesRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.InstanceTemplateAggregatedList() + pb_resp = compute.InstanceTemplateAggregatedList.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_aggregated_list(resp) + return resp + + class _Delete(InstanceTemplatesRestStub): + def __hash__(self): + return hash("Delete") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.DeleteInstanceTemplateRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.Operation: + r"""Call the delete method over HTTP. + + Args: + request (~.compute.DeleteInstanceTemplateRequest): + The request object. A request message for + InstanceTemplates.Delete. See the method + description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.Operation: + Represents an Operation resource. Google Compute Engine + has three Operation resources: \* + `Global `__ + \* + `Regional `__ + \* + `Zonal `__ + You can use an operation resource to manage asynchronous + API requests. For more information, read Handling API + responses. Operations can be global, regional or zonal. + - For global operations, use the ``globalOperations`` + resource. - For regional operations, use the + ``regionOperations`` resource. - For zonal operations, + use the ``zonalOperations`` resource. For more + information, read Global, Regional, and Zonal Resources. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'delete', + 'uri': '/compute/v1/projects/{project}/global/instanceTemplates/{instance_template}', + }, + ] + request, metadata = self._interceptor.pre_delete(request, metadata) + pb_request = compute.DeleteInstanceTemplateRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.Operation() + pb_resp = compute.Operation.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_delete(resp) + return resp + + class _Get(InstanceTemplatesRestStub): + def __hash__(self): + return hash("Get") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.GetInstanceTemplateRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.InstanceTemplate: + r"""Call the get method over HTTP. + + Args: + request (~.compute.GetInstanceTemplateRequest): + The request object. A request message for + InstanceTemplates.Get. See the method + description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.InstanceTemplate: + Represents an Instance Template + resource. You can use instance templates + to create VM instances and managed + instance groups. For more information, + read Instance Templates. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/compute/v1/projects/{project}/global/instanceTemplates/{instance_template}', + }, + ] + request, metadata = self._interceptor.pre_get(request, metadata) + pb_request = compute.GetInstanceTemplateRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.InstanceTemplate() + pb_resp = compute.InstanceTemplate.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get(resp) + return resp + + class _GetIamPolicy(InstanceTemplatesRestStub): + def __hash__(self): + return hash("GetIamPolicy") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.GetIamPolicyInstanceTemplateRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.Policy: + r"""Call the get iam policy method over HTTP. + + Args: + request (~.compute.GetIamPolicyInstanceTemplateRequest): + The request object. A request message for + InstanceTemplates.GetIamPolicy. See the + method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.Policy: + An Identity and Access Management (IAM) policy, which + specifies access controls for Google Cloud resources. A + ``Policy`` is a collection of ``bindings``. A + ``binding`` binds one or more ``members``, or + principals, to a single ``role``. Principals can be user + accounts, service accounts, Google groups, and domains + (such as G Suite). A ``role`` is a named list of + permissions; each ``role`` can be an IAM predefined role + or a user-created custom role. For some types of Google + Cloud resources, a ``binding`` can also specify a + ``condition``, which is a logical expression that allows + access to a resource only if the expression evaluates to + ``true``. A condition can add constraints based on + attributes of the request, the resource, or both. To + learn which resources support conditions in their IAM + policies, see the `IAM + documentation `__. + **JSON example:** { "bindings": [ { "role": + "roles/resourcemanager.organizationAdmin", "members": [ + "user:mike@example.com", "group:admins@example.com", + "domain:google.com", + "serviceAccount:my-project-id@appspot.gserviceaccount.com" + ] }, { "role": + "roles/resourcemanager.organizationViewer", "members": [ + "user:eve@example.com" ], "condition": { "title": + "expirable access", "description": "Does not grant + access after Sep 2020", "expression": "request.time < + timestamp('2020-10-01T00:00:00.000Z')", } } ], "etag": + "BwWWja0YfJA=", "version": 3 } **YAML example:** + bindings: - members: - user:mike@example.com - + group:admins@example.com - domain:google.com - + serviceAccount:my-project-id@appspot.gserviceaccount.com + role: roles/resourcemanager.organizationAdmin - members: + - user:eve@example.com role: + roles/resourcemanager.organizationViewer condition: + title: expirable access description: Does not grant + access after Sep 2020 expression: request.time < + timestamp('2020-10-01T00:00:00.000Z') etag: BwWWja0YfJA= + version: 3 For a description of IAM and its features, + see the `IAM + documentation `__. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/compute/v1/projects/{project}/global/instanceTemplates/{resource}/getIamPolicy', + }, + ] + request, metadata = self._interceptor.pre_get_iam_policy(request, metadata) + pb_request = compute.GetIamPolicyInstanceTemplateRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.Policy() + pb_resp = compute.Policy.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get_iam_policy(resp) + return resp + + class _Insert(InstanceTemplatesRestStub): + def __hash__(self): + return hash("Insert") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.InsertInstanceTemplateRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.Operation: + r"""Call the insert method over HTTP. + + Args: + request (~.compute.InsertInstanceTemplateRequest): + The request object. A request message for + InstanceTemplates.Insert. See the method + description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.Operation: + Represents an Operation resource. Google Compute Engine + has three Operation resources: \* + `Global `__ + \* + `Regional `__ + \* + `Zonal `__ + You can use an operation resource to manage asynchronous + API requests. For more information, read Handling API + responses. Operations can be global, regional or zonal. + - For global operations, use the ``globalOperations`` + resource. - For regional operations, use the + ``regionOperations`` resource. - For zonal operations, + use the ``zonalOperations`` resource. For more + information, read Global, Regional, and Zonal Resources. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/compute/v1/projects/{project}/global/instanceTemplates', + 'body': 'instance_template_resource', + }, + ] + request, metadata = self._interceptor.pre_insert(request, metadata) + pb_request = compute.InsertInstanceTemplateRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + including_default_value_fields=False, + use_integers_for_enums=False + ) + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.Operation() + pb_resp = compute.Operation.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_insert(resp) + return resp + + class _List(InstanceTemplatesRestStub): + def __hash__(self): + return hash("List") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.ListInstanceTemplatesRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.InstanceTemplateList: + r"""Call the list method over HTTP. + + Args: + request (~.compute.ListInstanceTemplatesRequest): + The request object. A request message for + InstanceTemplates.List. See the method + description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.InstanceTemplateList: + A list of instance templates. + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/compute/v1/projects/{project}/global/instanceTemplates', + }, + ] + request, metadata = self._interceptor.pre_list(request, metadata) + pb_request = compute.ListInstanceTemplatesRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.InstanceTemplateList() + pb_resp = compute.InstanceTemplateList.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_list(resp) + return resp + + class _SetIamPolicy(InstanceTemplatesRestStub): + def __hash__(self): + return hash("SetIamPolicy") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.SetIamPolicyInstanceTemplateRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.Policy: + r"""Call the set iam policy method over HTTP. + + Args: + request (~.compute.SetIamPolicyInstanceTemplateRequest): + The request object. A request message for + InstanceTemplates.SetIamPolicy. See the + method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.Policy: + An Identity and Access Management (IAM) policy, which + specifies access controls for Google Cloud resources. A + ``Policy`` is a collection of ``bindings``. A + ``binding`` binds one or more ``members``, or + principals, to a single ``role``. Principals can be user + accounts, service accounts, Google groups, and domains + (such as G Suite). A ``role`` is a named list of + permissions; each ``role`` can be an IAM predefined role + or a user-created custom role. For some types of Google + Cloud resources, a ``binding`` can also specify a + ``condition``, which is a logical expression that allows + access to a resource only if the expression evaluates to + ``true``. A condition can add constraints based on + attributes of the request, the resource, or both. To + learn which resources support conditions in their IAM + policies, see the `IAM + documentation `__. + **JSON example:** { "bindings": [ { "role": + "roles/resourcemanager.organizationAdmin", "members": [ + "user:mike@example.com", "group:admins@example.com", + "domain:google.com", + "serviceAccount:my-project-id@appspot.gserviceaccount.com" + ] }, { "role": + "roles/resourcemanager.organizationViewer", "members": [ + "user:eve@example.com" ], "condition": { "title": + "expirable access", "description": "Does not grant + access after Sep 2020", "expression": "request.time < + timestamp('2020-10-01T00:00:00.000Z')", } } ], "etag": + "BwWWja0YfJA=", "version": 3 } **YAML example:** + bindings: - members: - user:mike@example.com - + group:admins@example.com - domain:google.com - + serviceAccount:my-project-id@appspot.gserviceaccount.com + role: roles/resourcemanager.organizationAdmin - members: + - user:eve@example.com role: + roles/resourcemanager.organizationViewer condition: + title: expirable access description: Does not grant + access after Sep 2020 expression: request.time < + timestamp('2020-10-01T00:00:00.000Z') etag: BwWWja0YfJA= + version: 3 For a description of IAM and its features, + see the `IAM + documentation `__. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/compute/v1/projects/{project}/global/instanceTemplates/{resource}/setIamPolicy', + 'body': 'global_set_policy_request_resource', + }, + ] + request, metadata = self._interceptor.pre_set_iam_policy(request, metadata) + pb_request = compute.SetIamPolicyInstanceTemplateRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + including_default_value_fields=False, + use_integers_for_enums=False + ) + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.Policy() + pb_resp = compute.Policy.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_set_iam_policy(resp) + return resp + + class _TestIamPermissions(InstanceTemplatesRestStub): + def __hash__(self): + return hash("TestIamPermissions") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.TestIamPermissionsInstanceTemplateRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.TestPermissionsResponse: + r"""Call the test iam permissions method over HTTP. + + Args: + request (~.compute.TestIamPermissionsInstanceTemplateRequest): + The request object. A request message for + InstanceTemplates.TestIamPermissions. + See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.TestPermissionsResponse: + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/compute/v1/projects/{project}/global/instanceTemplates/{resource}/testIamPermissions', + 'body': 'test_permissions_request_resource', + }, + ] + request, metadata = self._interceptor.pre_test_iam_permissions(request, metadata) + pb_request = compute.TestIamPermissionsInstanceTemplateRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + including_default_value_fields=False, + use_integers_for_enums=False + ) + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.TestPermissionsResponse() + pb_resp = compute.TestPermissionsResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_test_iam_permissions(resp) + return resp + + @property + def aggregated_list(self) -> Callable[ + [compute.AggregatedListInstanceTemplatesRequest], + compute.InstanceTemplateAggregatedList]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._AggregatedList(self._session, self._host, self._interceptor) # type: ignore + + @property + def delete(self) -> Callable[ + [compute.DeleteInstanceTemplateRequest], + compute.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._Delete(self._session, self._host, self._interceptor) # type: ignore + + @property + def get(self) -> Callable[ + [compute.GetInstanceTemplateRequest], + compute.InstanceTemplate]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._Get(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_iam_policy(self) -> Callable[ + [compute.GetIamPolicyInstanceTemplateRequest], + compute.Policy]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetIamPolicy(self._session, self._host, self._interceptor) # type: ignore + + @property + def insert(self) -> Callable[ + [compute.InsertInstanceTemplateRequest], + compute.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._Insert(self._session, self._host, self._interceptor) # type: ignore + + @property + def list(self) -> Callable[ + [compute.ListInstanceTemplatesRequest], + compute.InstanceTemplateList]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._List(self._session, self._host, self._interceptor) # type: ignore + + @property + def set_iam_policy(self) -> Callable[ + [compute.SetIamPolicyInstanceTemplateRequest], + compute.Policy]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._SetIamPolicy(self._session, self._host, self._interceptor) # type: ignore + + @property + def test_iam_permissions(self) -> Callable[ + [compute.TestIamPermissionsInstanceTemplateRequest], + compute.TestPermissionsResponse]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._TestIamPermissions(self._session, self._host, self._interceptor) # type: ignore + + @property + def kind(self) -> str: + return "rest" + + def close(self): + self._session.close() + + +__all__=( + 'InstanceTemplatesRestTransport', +) diff --git a/owl-bot-staging/v1/google/cloud/compute_v1/services/instances/__init__.py b/owl-bot-staging/v1/google/cloud/compute_v1/services/instances/__init__.py new file mode 100644 index 000000000..78e18c489 --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/compute_v1/services/instances/__init__.py @@ -0,0 +1,20 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from .client import InstancesClient + +__all__ = ( + 'InstancesClient', +) diff --git a/owl-bot-staging/v1/google/cloud/compute_v1/services/instances/client.py b/owl-bot-staging/v1/google/cloud/compute_v1/services/instances/client.py new file mode 100644 index 000000000..31804addf --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/compute_v1/services/instances/client.py @@ -0,0 +1,11705 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import functools +import os +import re +from typing import Dict, Mapping, MutableMapping, MutableSequence, Optional, Sequence, Tuple, Type, Union, cast + +from google.cloud.compute_v1 import gapic_version as package_version + +from google.api_core import client_options as client_options_lib +from google.api_core import exceptions as core_exceptions +from google.api_core import extended_operation +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport import mtls # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth.exceptions import MutualTLSChannelError # type: ignore +from google.oauth2 import service_account # type: ignore + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + +from google.api_core import extended_operation # type: ignore +from google.cloud.compute_v1.services.instances import pagers +from google.cloud.compute_v1.types import compute +from .transports.base import InstancesTransport, DEFAULT_CLIENT_INFO +from .transports.rest import InstancesRestTransport + + +class InstancesClientMeta(type): + """Metaclass for the Instances client. + + This provides class-level methods for building and retrieving + support objects (e.g. transport) without polluting the client instance + objects. + """ + _transport_registry = OrderedDict() # type: Dict[str, Type[InstancesTransport]] + _transport_registry["rest"] = InstancesRestTransport + + def get_transport_class(cls, + label: Optional[str] = None, + ) -> Type[InstancesTransport]: + """Returns an appropriate transport class. + + Args: + label: The name of the desired transport. If none is + provided, then the first transport in the registry is used. + + Returns: + The transport class to use. + """ + # If a specific transport is requested, return that one. + if label: + return cls._transport_registry[label] + + # No transport is requested; return the default (that is, the first one + # in the dictionary). + return next(iter(cls._transport_registry.values())) + + +class InstancesClient(metaclass=InstancesClientMeta): + """The Instances API.""" + + @staticmethod + def _get_default_mtls_endpoint(api_endpoint): + """Converts api endpoint to mTLS endpoint. + + Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to + "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. + Args: + api_endpoint (Optional[str]): the api endpoint to convert. + Returns: + str: converted mTLS api endpoint. + """ + if not api_endpoint: + return api_endpoint + + mtls_endpoint_re = re.compile( + r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?" + ) + + m = mtls_endpoint_re.match(api_endpoint) + name, mtls, sandbox, googledomain = m.groups() + if mtls or not googledomain: + return api_endpoint + + if sandbox: + return api_endpoint.replace( + "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" + ) + + return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") + + DEFAULT_ENDPOINT = "compute.googleapis.com" + DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore + DEFAULT_ENDPOINT + ) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + InstancesClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_info(info) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + InstancesClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_file( + filename) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + from_service_account_json = from_service_account_file + + @property + def transport(self) -> InstancesTransport: + """Returns the transport used by the client instance. + + Returns: + InstancesTransport: The transport used by the client + instance. + """ + return self._transport + + @staticmethod + def common_billing_account_path(billing_account: str, ) -> str: + """Returns a fully-qualified billing_account string.""" + return "billingAccounts/{billing_account}".format(billing_account=billing_account, ) + + @staticmethod + def parse_common_billing_account_path(path: str) -> Dict[str,str]: + """Parse a billing_account path into its component segments.""" + m = re.match(r"^billingAccounts/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_folder_path(folder: str, ) -> str: + """Returns a fully-qualified folder string.""" + return "folders/{folder}".format(folder=folder, ) + + @staticmethod + def parse_common_folder_path(path: str) -> Dict[str,str]: + """Parse a folder path into its component segments.""" + m = re.match(r"^folders/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_organization_path(organization: str, ) -> str: + """Returns a fully-qualified organization string.""" + return "organizations/{organization}".format(organization=organization, ) + + @staticmethod + def parse_common_organization_path(path: str) -> Dict[str,str]: + """Parse a organization path into its component segments.""" + m = re.match(r"^organizations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_project_path(project: str, ) -> str: + """Returns a fully-qualified project string.""" + return "projects/{project}".format(project=project, ) + + @staticmethod + def parse_common_project_path(path: str) -> Dict[str,str]: + """Parse a project path into its component segments.""" + m = re.match(r"^projects/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_location_path(project: str, location: str, ) -> str: + """Returns a fully-qualified location string.""" + return "projects/{project}/locations/{location}".format(project=project, location=location, ) + + @staticmethod + def parse_common_location_path(path: str) -> Dict[str,str]: + """Parse a location path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @classmethod + def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[client_options_lib.ClientOptions] = None): + """Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + if client_options is None: + client_options = client_options_lib.ClientOptions() + use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") + use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") + if use_client_cert not in ("true", "false"): + raise ValueError("Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`") + if use_mtls_endpoint not in ("auto", "never", "always"): + raise MutualTLSChannelError("Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`") + + # Figure out the client cert source to use. + client_cert_source = None + if use_client_cert == "true": + if client_options.client_cert_source: + client_cert_source = client_options.client_cert_source + elif mtls.has_default_client_cert_source(): + client_cert_source = mtls.default_client_cert_source() + + # Figure out which api endpoint to use. + if client_options.api_endpoint is not None: + api_endpoint = client_options.api_endpoint + elif use_mtls_endpoint == "always" or (use_mtls_endpoint == "auto" and client_cert_source): + api_endpoint = cls.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = cls.DEFAULT_ENDPOINT + + return api_endpoint, client_cert_source + + def __init__(self, *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Optional[Union[str, InstancesTransport]] = None, + client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the instances client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Union[str, InstancesTransport]): The + transport to use. If set to None, a transport is chosen + automatically. + NOTE: "rest" transport functionality is currently in a + beta state (preview). We welcome your feedback via an + issue in this library's source repository. + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): Custom options for the + client. It won't take effect if a ``transport`` instance is provided. + (1) The ``api_endpoint`` property can be used to override the + default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT + environment variable can also be used to override the endpoint: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto switch to the + default mTLS endpoint if client certificate is present, this is + the default value). However, the ``api_endpoint`` property takes + precedence if provided. + (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide client certificate for mutual TLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + """ + if isinstance(client_options, dict): + client_options = client_options_lib.from_dict(client_options) + if client_options is None: + client_options = client_options_lib.ClientOptions() + client_options = cast(client_options_lib.ClientOptions, client_options) + + api_endpoint, client_cert_source_func = self.get_mtls_endpoint_and_cert_source(client_options) + + api_key_value = getattr(client_options, "api_key", None) + if api_key_value and credentials: + raise ValueError("client_options.api_key and credentials are mutually exclusive") + + # Save or instantiate the transport. + # Ordinarily, we provide the transport, but allowing a custom transport + # instance provides an extensibility point for unusual situations. + if isinstance(transport, InstancesTransport): + # transport is a InstancesTransport instance. + if credentials or client_options.credentials_file or api_key_value: + raise ValueError("When providing a transport instance, " + "provide its credentials directly.") + if client_options.scopes: + raise ValueError( + "When providing a transport instance, provide its scopes " + "directly." + ) + self._transport = transport + else: + import google.auth._default # type: ignore + + if api_key_value and hasattr(google.auth._default, "get_api_key_credentials"): + credentials = google.auth._default.get_api_key_credentials(api_key_value) + + Transport = type(self).get_transport_class(transport) + self._transport = Transport( + credentials=credentials, + credentials_file=client_options.credentials_file, + host=api_endpoint, + scopes=client_options.scopes, + client_cert_source_for_mtls=client_cert_source_func, + quota_project_id=client_options.quota_project_id, + client_info=client_info, + always_use_jwt_access=True, + api_audience=client_options.api_audience, + ) + + def add_access_config_unary(self, + request: Optional[Union[compute.AddAccessConfigInstanceRequest, dict]] = None, + *, + project: Optional[str] = None, + zone: Optional[str] = None, + instance: Optional[str] = None, + network_interface: Optional[str] = None, + access_config_resource: Optional[compute.AccessConfig] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.Operation: + r"""Adds an access config to an instance's network + interface. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_add_access_config(): + # Create a client + client = compute_v1.InstancesClient() + + # Initialize request argument(s) + request = compute_v1.AddAccessConfigInstanceRequest( + instance="instance_value", + network_interface="network_interface_value", + project="project_value", + zone="zone_value", + ) + + # Make the request + response = client.add_access_config(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.AddAccessConfigInstanceRequest, dict]): + The request object. A request message for + Instances.AddAccessConfig. See the + method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + zone (str): + The name of the zone for this + request. + + This corresponds to the ``zone`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + instance (str): + The instance name for this request. + This corresponds to the ``instance`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + network_interface (str): + The name of the network interface to + add to this instance. + + This corresponds to the ``network_interface`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + access_config_resource (google.cloud.compute_v1.types.AccessConfig): + The body resource for this request + This corresponds to the ``access_config_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, zone, instance, network_interface, access_config_resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.AddAccessConfigInstanceRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.AddAccessConfigInstanceRequest): + request = compute.AddAccessConfigInstanceRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if zone is not None: + request.zone = zone + if instance is not None: + request.instance = instance + if network_interface is not None: + request.network_interface = network_interface + if access_config_resource is not None: + request.access_config_resource = access_config_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.add_access_config] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("zone", request.zone), + ("instance", request.instance), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def add_access_config(self, + request: Optional[Union[compute.AddAccessConfigInstanceRequest, dict]] = None, + *, + project: Optional[str] = None, + zone: Optional[str] = None, + instance: Optional[str] = None, + network_interface: Optional[str] = None, + access_config_resource: Optional[compute.AccessConfig] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> extended_operation.ExtendedOperation: + r"""Adds an access config to an instance's network + interface. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_add_access_config(): + # Create a client + client = compute_v1.InstancesClient() + + # Initialize request argument(s) + request = compute_v1.AddAccessConfigInstanceRequest( + instance="instance_value", + network_interface="network_interface_value", + project="project_value", + zone="zone_value", + ) + + # Make the request + response = client.add_access_config(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.AddAccessConfigInstanceRequest, dict]): + The request object. A request message for + Instances.AddAccessConfig. See the + method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + zone (str): + The name of the zone for this + request. + + This corresponds to the ``zone`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + instance (str): + The instance name for this request. + This corresponds to the ``instance`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + network_interface (str): + The name of the network interface to + add to this instance. + + This corresponds to the ``network_interface`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + access_config_resource (google.cloud.compute_v1.types.AccessConfig): + The body resource for this request + This corresponds to the ``access_config_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, zone, instance, network_interface, access_config_resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.AddAccessConfigInstanceRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.AddAccessConfigInstanceRequest): + request = compute.AddAccessConfigInstanceRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if zone is not None: + request.zone = zone + if instance is not None: + request.instance = instance + if network_interface is not None: + request.network_interface = network_interface + if access_config_resource is not None: + request.access_config_resource = access_config_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.add_access_config] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("zone", request.zone), + ("instance", request.instance), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + operation_service = self._transport._zone_operations_client + operation_request = compute.GetZoneOperationRequest() + operation_request.project = request.project + operation_request.zone = request.zone + operation_request.operation = response.name + + get_operation = functools.partial(operation_service.get, operation_request) + # Cancel is not part of extended operations yet. + cancel_operation = lambda: None + + # Note: this class is an implementation detail to provide a uniform + # set of names for certain fields in the extended operation proto message. + # See google.api_core.extended_operation.ExtendedOperation for details + # on these properties and the expected interface. + class _CustomOperation(extended_operation.ExtendedOperation): + @property + def error_message(self): + return self._extended_operation.http_error_message + + @property + def error_code(self): + return self._extended_operation.http_error_status_code + + response = _CustomOperation.make(get_operation, cancel_operation, response) + + # Done; return the response. + return response + + def add_resource_policies_unary(self, + request: Optional[Union[compute.AddResourcePoliciesInstanceRequest, dict]] = None, + *, + project: Optional[str] = None, + zone: Optional[str] = None, + instance: Optional[str] = None, + instances_add_resource_policies_request_resource: Optional[compute.InstancesAddResourcePoliciesRequest] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.Operation: + r"""Adds existing resource policies to an instance. You + can only add one policy right now which will be applied + to this instance for scheduling live migrations. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_add_resource_policies(): + # Create a client + client = compute_v1.InstancesClient() + + # Initialize request argument(s) + request = compute_v1.AddResourcePoliciesInstanceRequest( + instance="instance_value", + project="project_value", + zone="zone_value", + ) + + # Make the request + response = client.add_resource_policies(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.AddResourcePoliciesInstanceRequest, dict]): + The request object. A request message for + Instances.AddResourcePolicies. See the + method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + zone (str): + The name of the zone for this + request. + + This corresponds to the ``zone`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + instance (str): + The instance name for this request. + This corresponds to the ``instance`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + instances_add_resource_policies_request_resource (google.cloud.compute_v1.types.InstancesAddResourcePoliciesRequest): + The body resource for this request + This corresponds to the ``instances_add_resource_policies_request_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, zone, instance, instances_add_resource_policies_request_resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.AddResourcePoliciesInstanceRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.AddResourcePoliciesInstanceRequest): + request = compute.AddResourcePoliciesInstanceRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if zone is not None: + request.zone = zone + if instance is not None: + request.instance = instance + if instances_add_resource_policies_request_resource is not None: + request.instances_add_resource_policies_request_resource = instances_add_resource_policies_request_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.add_resource_policies] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("zone", request.zone), + ("instance", request.instance), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def add_resource_policies(self, + request: Optional[Union[compute.AddResourcePoliciesInstanceRequest, dict]] = None, + *, + project: Optional[str] = None, + zone: Optional[str] = None, + instance: Optional[str] = None, + instances_add_resource_policies_request_resource: Optional[compute.InstancesAddResourcePoliciesRequest] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> extended_operation.ExtendedOperation: + r"""Adds existing resource policies to an instance. You + can only add one policy right now which will be applied + to this instance for scheduling live migrations. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_add_resource_policies(): + # Create a client + client = compute_v1.InstancesClient() + + # Initialize request argument(s) + request = compute_v1.AddResourcePoliciesInstanceRequest( + instance="instance_value", + project="project_value", + zone="zone_value", + ) + + # Make the request + response = client.add_resource_policies(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.AddResourcePoliciesInstanceRequest, dict]): + The request object. A request message for + Instances.AddResourcePolicies. See the + method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + zone (str): + The name of the zone for this + request. + + This corresponds to the ``zone`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + instance (str): + The instance name for this request. + This corresponds to the ``instance`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + instances_add_resource_policies_request_resource (google.cloud.compute_v1.types.InstancesAddResourcePoliciesRequest): + The body resource for this request + This corresponds to the ``instances_add_resource_policies_request_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, zone, instance, instances_add_resource_policies_request_resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.AddResourcePoliciesInstanceRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.AddResourcePoliciesInstanceRequest): + request = compute.AddResourcePoliciesInstanceRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if zone is not None: + request.zone = zone + if instance is not None: + request.instance = instance + if instances_add_resource_policies_request_resource is not None: + request.instances_add_resource_policies_request_resource = instances_add_resource_policies_request_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.add_resource_policies] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("zone", request.zone), + ("instance", request.instance), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + operation_service = self._transport._zone_operations_client + operation_request = compute.GetZoneOperationRequest() + operation_request.project = request.project + operation_request.zone = request.zone + operation_request.operation = response.name + + get_operation = functools.partial(operation_service.get, operation_request) + # Cancel is not part of extended operations yet. + cancel_operation = lambda: None + + # Note: this class is an implementation detail to provide a uniform + # set of names for certain fields in the extended operation proto message. + # See google.api_core.extended_operation.ExtendedOperation for details + # on these properties and the expected interface. + class _CustomOperation(extended_operation.ExtendedOperation): + @property + def error_message(self): + return self._extended_operation.http_error_message + + @property + def error_code(self): + return self._extended_operation.http_error_status_code + + response = _CustomOperation.make(get_operation, cancel_operation, response) + + # Done; return the response. + return response + + def aggregated_list(self, + request: Optional[Union[compute.AggregatedListInstancesRequest, dict]] = None, + *, + project: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.AggregatedListPager: + r"""Retrieves an aggregated list of all of the instances + in your project across all regions and zones. The + performance of this method degrades when a filter is + specified on a project that has a very large number of + instances. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_aggregated_list(): + # Create a client + client = compute_v1.InstancesClient() + + # Initialize request argument(s) + request = compute_v1.AggregatedListInstancesRequest( + project="project_value", + ) + + # Make the request + page_result = client.aggregated_list(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.AggregatedListInstancesRequest, dict]): + The request object. A request message for + Instances.AggregatedList. See the method + description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.compute_v1.services.instances.pagers.AggregatedListPager: + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.AggregatedListInstancesRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.AggregatedListInstancesRequest): + request = compute.AggregatedListInstancesRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.aggregated_list] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.AggregatedListPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + def attach_disk_unary(self, + request: Optional[Union[compute.AttachDiskInstanceRequest, dict]] = None, + *, + project: Optional[str] = None, + zone: Optional[str] = None, + instance: Optional[str] = None, + attached_disk_resource: Optional[compute.AttachedDisk] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.Operation: + r"""Attaches an existing Disk resource to an instance. + You must first create the disk before you can attach it. + It is not possible to create and attach a disk at the + same time. For more information, read Adding a + persistent disk to your instance. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_attach_disk(): + # Create a client + client = compute_v1.InstancesClient() + + # Initialize request argument(s) + request = compute_v1.AttachDiskInstanceRequest( + instance="instance_value", + project="project_value", + zone="zone_value", + ) + + # Make the request + response = client.attach_disk(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.AttachDiskInstanceRequest, dict]): + The request object. A request message for + Instances.AttachDisk. See the method + description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + zone (str): + The name of the zone for this + request. + + This corresponds to the ``zone`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + instance (str): + The instance name for this request. + This corresponds to the ``instance`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + attached_disk_resource (google.cloud.compute_v1.types.AttachedDisk): + The body resource for this request + This corresponds to the ``attached_disk_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, zone, instance, attached_disk_resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.AttachDiskInstanceRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.AttachDiskInstanceRequest): + request = compute.AttachDiskInstanceRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if zone is not None: + request.zone = zone + if instance is not None: + request.instance = instance + if attached_disk_resource is not None: + request.attached_disk_resource = attached_disk_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.attach_disk] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("zone", request.zone), + ("instance", request.instance), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def attach_disk(self, + request: Optional[Union[compute.AttachDiskInstanceRequest, dict]] = None, + *, + project: Optional[str] = None, + zone: Optional[str] = None, + instance: Optional[str] = None, + attached_disk_resource: Optional[compute.AttachedDisk] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> extended_operation.ExtendedOperation: + r"""Attaches an existing Disk resource to an instance. + You must first create the disk before you can attach it. + It is not possible to create and attach a disk at the + same time. For more information, read Adding a + persistent disk to your instance. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_attach_disk(): + # Create a client + client = compute_v1.InstancesClient() + + # Initialize request argument(s) + request = compute_v1.AttachDiskInstanceRequest( + instance="instance_value", + project="project_value", + zone="zone_value", + ) + + # Make the request + response = client.attach_disk(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.AttachDiskInstanceRequest, dict]): + The request object. A request message for + Instances.AttachDisk. See the method + description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + zone (str): + The name of the zone for this + request. + + This corresponds to the ``zone`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + instance (str): + The instance name for this request. + This corresponds to the ``instance`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + attached_disk_resource (google.cloud.compute_v1.types.AttachedDisk): + The body resource for this request + This corresponds to the ``attached_disk_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, zone, instance, attached_disk_resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.AttachDiskInstanceRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.AttachDiskInstanceRequest): + request = compute.AttachDiskInstanceRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if zone is not None: + request.zone = zone + if instance is not None: + request.instance = instance + if attached_disk_resource is not None: + request.attached_disk_resource = attached_disk_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.attach_disk] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("zone", request.zone), + ("instance", request.instance), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + operation_service = self._transport._zone_operations_client + operation_request = compute.GetZoneOperationRequest() + operation_request.project = request.project + operation_request.zone = request.zone + operation_request.operation = response.name + + get_operation = functools.partial(operation_service.get, operation_request) + # Cancel is not part of extended operations yet. + cancel_operation = lambda: None + + # Note: this class is an implementation detail to provide a uniform + # set of names for certain fields in the extended operation proto message. + # See google.api_core.extended_operation.ExtendedOperation for details + # on these properties and the expected interface. + class _CustomOperation(extended_operation.ExtendedOperation): + @property + def error_message(self): + return self._extended_operation.http_error_message + + @property + def error_code(self): + return self._extended_operation.http_error_status_code + + response = _CustomOperation.make(get_operation, cancel_operation, response) + + # Done; return the response. + return response + + def bulk_insert_unary(self, + request: Optional[Union[compute.BulkInsertInstanceRequest, dict]] = None, + *, + project: Optional[str] = None, + zone: Optional[str] = None, + bulk_insert_instance_resource_resource: Optional[compute.BulkInsertInstanceResource] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.Operation: + r"""Creates multiple instances. Count specifies the + number of instances to create. For more information, see + About bulk creation of VMs. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_bulk_insert(): + # Create a client + client = compute_v1.InstancesClient() + + # Initialize request argument(s) + request = compute_v1.BulkInsertInstanceRequest( + project="project_value", + zone="zone_value", + ) + + # Make the request + response = client.bulk_insert(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.BulkInsertInstanceRequest, dict]): + The request object. A request message for + Instances.BulkInsert. See the method + description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + zone (str): + The name of the zone for this + request. + + This corresponds to the ``zone`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + bulk_insert_instance_resource_resource (google.cloud.compute_v1.types.BulkInsertInstanceResource): + The body resource for this request + This corresponds to the ``bulk_insert_instance_resource_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, zone, bulk_insert_instance_resource_resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.BulkInsertInstanceRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.BulkInsertInstanceRequest): + request = compute.BulkInsertInstanceRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if zone is not None: + request.zone = zone + if bulk_insert_instance_resource_resource is not None: + request.bulk_insert_instance_resource_resource = bulk_insert_instance_resource_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.bulk_insert] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("zone", request.zone), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def bulk_insert(self, + request: Optional[Union[compute.BulkInsertInstanceRequest, dict]] = None, + *, + project: Optional[str] = None, + zone: Optional[str] = None, + bulk_insert_instance_resource_resource: Optional[compute.BulkInsertInstanceResource] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> extended_operation.ExtendedOperation: + r"""Creates multiple instances. Count specifies the + number of instances to create. For more information, see + About bulk creation of VMs. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_bulk_insert(): + # Create a client + client = compute_v1.InstancesClient() + + # Initialize request argument(s) + request = compute_v1.BulkInsertInstanceRequest( + project="project_value", + zone="zone_value", + ) + + # Make the request + response = client.bulk_insert(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.BulkInsertInstanceRequest, dict]): + The request object. A request message for + Instances.BulkInsert. See the method + description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + zone (str): + The name of the zone for this + request. + + This corresponds to the ``zone`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + bulk_insert_instance_resource_resource (google.cloud.compute_v1.types.BulkInsertInstanceResource): + The body resource for this request + This corresponds to the ``bulk_insert_instance_resource_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, zone, bulk_insert_instance_resource_resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.BulkInsertInstanceRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.BulkInsertInstanceRequest): + request = compute.BulkInsertInstanceRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if zone is not None: + request.zone = zone + if bulk_insert_instance_resource_resource is not None: + request.bulk_insert_instance_resource_resource = bulk_insert_instance_resource_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.bulk_insert] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("zone", request.zone), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + operation_service = self._transport._zone_operations_client + operation_request = compute.GetZoneOperationRequest() + operation_request.project = request.project + operation_request.zone = request.zone + operation_request.operation = response.name + + get_operation = functools.partial(operation_service.get, operation_request) + # Cancel is not part of extended operations yet. + cancel_operation = lambda: None + + # Note: this class is an implementation detail to provide a uniform + # set of names for certain fields in the extended operation proto message. + # See google.api_core.extended_operation.ExtendedOperation for details + # on these properties and the expected interface. + class _CustomOperation(extended_operation.ExtendedOperation): + @property + def error_message(self): + return self._extended_operation.http_error_message + + @property + def error_code(self): + return self._extended_operation.http_error_status_code + + response = _CustomOperation.make(get_operation, cancel_operation, response) + + # Done; return the response. + return response + + def delete_unary(self, + request: Optional[Union[compute.DeleteInstanceRequest, dict]] = None, + *, + project: Optional[str] = None, + zone: Optional[str] = None, + instance: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.Operation: + r"""Deletes the specified Instance resource. For more + information, see Deleting an instance. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_delete(): + # Create a client + client = compute_v1.InstancesClient() + + # Initialize request argument(s) + request = compute_v1.DeleteInstanceRequest( + instance="instance_value", + project="project_value", + zone="zone_value", + ) + + # Make the request + response = client.delete(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.DeleteInstanceRequest, dict]): + The request object. A request message for + Instances.Delete. See the method + description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + zone (str): + The name of the zone for this + request. + + This corresponds to the ``zone`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + instance (str): + Name of the instance resource to + delete. + + This corresponds to the ``instance`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, zone, instance]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.DeleteInstanceRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.DeleteInstanceRequest): + request = compute.DeleteInstanceRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if zone is not None: + request.zone = zone + if instance is not None: + request.instance = instance + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("zone", request.zone), + ("instance", request.instance), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def delete(self, + request: Optional[Union[compute.DeleteInstanceRequest, dict]] = None, + *, + project: Optional[str] = None, + zone: Optional[str] = None, + instance: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> extended_operation.ExtendedOperation: + r"""Deletes the specified Instance resource. For more + information, see Deleting an instance. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_delete(): + # Create a client + client = compute_v1.InstancesClient() + + # Initialize request argument(s) + request = compute_v1.DeleteInstanceRequest( + instance="instance_value", + project="project_value", + zone="zone_value", + ) + + # Make the request + response = client.delete(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.DeleteInstanceRequest, dict]): + The request object. A request message for + Instances.Delete. See the method + description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + zone (str): + The name of the zone for this + request. + + This corresponds to the ``zone`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + instance (str): + Name of the instance resource to + delete. + + This corresponds to the ``instance`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, zone, instance]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.DeleteInstanceRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.DeleteInstanceRequest): + request = compute.DeleteInstanceRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if zone is not None: + request.zone = zone + if instance is not None: + request.instance = instance + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("zone", request.zone), + ("instance", request.instance), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + operation_service = self._transport._zone_operations_client + operation_request = compute.GetZoneOperationRequest() + operation_request.project = request.project + operation_request.zone = request.zone + operation_request.operation = response.name + + get_operation = functools.partial(operation_service.get, operation_request) + # Cancel is not part of extended operations yet. + cancel_operation = lambda: None + + # Note: this class is an implementation detail to provide a uniform + # set of names for certain fields in the extended operation proto message. + # See google.api_core.extended_operation.ExtendedOperation for details + # on these properties and the expected interface. + class _CustomOperation(extended_operation.ExtendedOperation): + @property + def error_message(self): + return self._extended_operation.http_error_message + + @property + def error_code(self): + return self._extended_operation.http_error_status_code + + response = _CustomOperation.make(get_operation, cancel_operation, response) + + # Done; return the response. + return response + + def delete_access_config_unary(self, + request: Optional[Union[compute.DeleteAccessConfigInstanceRequest, dict]] = None, + *, + project: Optional[str] = None, + zone: Optional[str] = None, + instance: Optional[str] = None, + access_config: Optional[str] = None, + network_interface: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.Operation: + r"""Deletes an access config from an instance's network + interface. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_delete_access_config(): + # Create a client + client = compute_v1.InstancesClient() + + # Initialize request argument(s) + request = compute_v1.DeleteAccessConfigInstanceRequest( + access_config="access_config_value", + instance="instance_value", + network_interface="network_interface_value", + project="project_value", + zone="zone_value", + ) + + # Make the request + response = client.delete_access_config(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.DeleteAccessConfigInstanceRequest, dict]): + The request object. A request message for + Instances.DeleteAccessConfig. See the + method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + zone (str): + The name of the zone for this + request. + + This corresponds to the ``zone`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + instance (str): + The instance name for this request. + This corresponds to the ``instance`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + access_config (str): + The name of the access config to + delete. + + This corresponds to the ``access_config`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + network_interface (str): + The name of the network interface. + This corresponds to the ``network_interface`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, zone, instance, access_config, network_interface]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.DeleteAccessConfigInstanceRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.DeleteAccessConfigInstanceRequest): + request = compute.DeleteAccessConfigInstanceRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if zone is not None: + request.zone = zone + if instance is not None: + request.instance = instance + if access_config is not None: + request.access_config = access_config + if network_interface is not None: + request.network_interface = network_interface + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete_access_config] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("zone", request.zone), + ("instance", request.instance), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def delete_access_config(self, + request: Optional[Union[compute.DeleteAccessConfigInstanceRequest, dict]] = None, + *, + project: Optional[str] = None, + zone: Optional[str] = None, + instance: Optional[str] = None, + access_config: Optional[str] = None, + network_interface: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> extended_operation.ExtendedOperation: + r"""Deletes an access config from an instance's network + interface. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_delete_access_config(): + # Create a client + client = compute_v1.InstancesClient() + + # Initialize request argument(s) + request = compute_v1.DeleteAccessConfigInstanceRequest( + access_config="access_config_value", + instance="instance_value", + network_interface="network_interface_value", + project="project_value", + zone="zone_value", + ) + + # Make the request + response = client.delete_access_config(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.DeleteAccessConfigInstanceRequest, dict]): + The request object. A request message for + Instances.DeleteAccessConfig. See the + method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + zone (str): + The name of the zone for this + request. + + This corresponds to the ``zone`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + instance (str): + The instance name for this request. + This corresponds to the ``instance`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + access_config (str): + The name of the access config to + delete. + + This corresponds to the ``access_config`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + network_interface (str): + The name of the network interface. + This corresponds to the ``network_interface`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, zone, instance, access_config, network_interface]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.DeleteAccessConfigInstanceRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.DeleteAccessConfigInstanceRequest): + request = compute.DeleteAccessConfigInstanceRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if zone is not None: + request.zone = zone + if instance is not None: + request.instance = instance + if access_config is not None: + request.access_config = access_config + if network_interface is not None: + request.network_interface = network_interface + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete_access_config] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("zone", request.zone), + ("instance", request.instance), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + operation_service = self._transport._zone_operations_client + operation_request = compute.GetZoneOperationRequest() + operation_request.project = request.project + operation_request.zone = request.zone + operation_request.operation = response.name + + get_operation = functools.partial(operation_service.get, operation_request) + # Cancel is not part of extended operations yet. + cancel_operation = lambda: None + + # Note: this class is an implementation detail to provide a uniform + # set of names for certain fields in the extended operation proto message. + # See google.api_core.extended_operation.ExtendedOperation for details + # on these properties and the expected interface. + class _CustomOperation(extended_operation.ExtendedOperation): + @property + def error_message(self): + return self._extended_operation.http_error_message + + @property + def error_code(self): + return self._extended_operation.http_error_status_code + + response = _CustomOperation.make(get_operation, cancel_operation, response) + + # Done; return the response. + return response + + def detach_disk_unary(self, + request: Optional[Union[compute.DetachDiskInstanceRequest, dict]] = None, + *, + project: Optional[str] = None, + zone: Optional[str] = None, + instance: Optional[str] = None, + device_name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.Operation: + r"""Detaches a disk from an instance. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_detach_disk(): + # Create a client + client = compute_v1.InstancesClient() + + # Initialize request argument(s) + request = compute_v1.DetachDiskInstanceRequest( + device_name="device_name_value", + instance="instance_value", + project="project_value", + zone="zone_value", + ) + + # Make the request + response = client.detach_disk(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.DetachDiskInstanceRequest, dict]): + The request object. A request message for + Instances.DetachDisk. See the method + description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + zone (str): + The name of the zone for this + request. + + This corresponds to the ``zone`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + instance (str): + Instance name for this request. + This corresponds to the ``instance`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + device_name (str): + The device name of the disk to + detach. Make a get() request on the + instance to view currently attached + disks and device names. + + This corresponds to the ``device_name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, zone, instance, device_name]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.DetachDiskInstanceRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.DetachDiskInstanceRequest): + request = compute.DetachDiskInstanceRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if zone is not None: + request.zone = zone + if instance is not None: + request.instance = instance + if device_name is not None: + request.device_name = device_name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.detach_disk] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("zone", request.zone), + ("instance", request.instance), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def detach_disk(self, + request: Optional[Union[compute.DetachDiskInstanceRequest, dict]] = None, + *, + project: Optional[str] = None, + zone: Optional[str] = None, + instance: Optional[str] = None, + device_name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> extended_operation.ExtendedOperation: + r"""Detaches a disk from an instance. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_detach_disk(): + # Create a client + client = compute_v1.InstancesClient() + + # Initialize request argument(s) + request = compute_v1.DetachDiskInstanceRequest( + device_name="device_name_value", + instance="instance_value", + project="project_value", + zone="zone_value", + ) + + # Make the request + response = client.detach_disk(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.DetachDiskInstanceRequest, dict]): + The request object. A request message for + Instances.DetachDisk. See the method + description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + zone (str): + The name of the zone for this + request. + + This corresponds to the ``zone`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + instance (str): + Instance name for this request. + This corresponds to the ``instance`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + device_name (str): + The device name of the disk to + detach. Make a get() request on the + instance to view currently attached + disks and device names. + + This corresponds to the ``device_name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, zone, instance, device_name]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.DetachDiskInstanceRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.DetachDiskInstanceRequest): + request = compute.DetachDiskInstanceRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if zone is not None: + request.zone = zone + if instance is not None: + request.instance = instance + if device_name is not None: + request.device_name = device_name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.detach_disk] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("zone", request.zone), + ("instance", request.instance), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + operation_service = self._transport._zone_operations_client + operation_request = compute.GetZoneOperationRequest() + operation_request.project = request.project + operation_request.zone = request.zone + operation_request.operation = response.name + + get_operation = functools.partial(operation_service.get, operation_request) + # Cancel is not part of extended operations yet. + cancel_operation = lambda: None + + # Note: this class is an implementation detail to provide a uniform + # set of names for certain fields in the extended operation proto message. + # See google.api_core.extended_operation.ExtendedOperation for details + # on these properties and the expected interface. + class _CustomOperation(extended_operation.ExtendedOperation): + @property + def error_message(self): + return self._extended_operation.http_error_message + + @property + def error_code(self): + return self._extended_operation.http_error_status_code + + response = _CustomOperation.make(get_operation, cancel_operation, response) + + # Done; return the response. + return response + + def get(self, + request: Optional[Union[compute.GetInstanceRequest, dict]] = None, + *, + project: Optional[str] = None, + zone: Optional[str] = None, + instance: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.Instance: + r"""Returns the specified Instance resource. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_get(): + # Create a client + client = compute_v1.InstancesClient() + + # Initialize request argument(s) + request = compute_v1.GetInstanceRequest( + instance="instance_value", + project="project_value", + zone="zone_value", + ) + + # Make the request + response = client.get(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.GetInstanceRequest, dict]): + The request object. A request message for Instances.Get. + See the method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + zone (str): + The name of the zone for this + request. + + This corresponds to the ``zone`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + instance (str): + Name of the instance resource to + return. + + This corresponds to the ``instance`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.compute_v1.types.Instance: + Represents an Instance resource. An + instance is a virtual machine that is + hosted on Google Cloud Platform. For + more information, read Virtual Machine + Instances. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, zone, instance]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.GetInstanceRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.GetInstanceRequest): + request = compute.GetInstanceRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if zone is not None: + request.zone = zone + if instance is not None: + request.instance = instance + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("zone", request.zone), + ("instance", request.instance), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get_effective_firewalls(self, + request: Optional[Union[compute.GetEffectiveFirewallsInstanceRequest, dict]] = None, + *, + project: Optional[str] = None, + zone: Optional[str] = None, + instance: Optional[str] = None, + network_interface: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.InstancesGetEffectiveFirewallsResponse: + r"""Returns effective firewalls applied to an interface + of the instance. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_get_effective_firewalls(): + # Create a client + client = compute_v1.InstancesClient() + + # Initialize request argument(s) + request = compute_v1.GetEffectiveFirewallsInstanceRequest( + instance="instance_value", + network_interface="network_interface_value", + project="project_value", + zone="zone_value", + ) + + # Make the request + response = client.get_effective_firewalls(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.GetEffectiveFirewallsInstanceRequest, dict]): + The request object. A request message for + Instances.GetEffectiveFirewalls. See the + method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + zone (str): + The name of the zone for this + request. + + This corresponds to the ``zone`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + instance (str): + Name of the instance scoping this + request. + + This corresponds to the ``instance`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + network_interface (str): + The name of the network interface to + get the effective firewalls. + + This corresponds to the ``network_interface`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.compute_v1.types.InstancesGetEffectiveFirewallsResponse: + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, zone, instance, network_interface]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.GetEffectiveFirewallsInstanceRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.GetEffectiveFirewallsInstanceRequest): + request = compute.GetEffectiveFirewallsInstanceRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if zone is not None: + request.zone = zone + if instance is not None: + request.instance = instance + if network_interface is not None: + request.network_interface = network_interface + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_effective_firewalls] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("zone", request.zone), + ("instance", request.instance), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get_guest_attributes(self, + request: Optional[Union[compute.GetGuestAttributesInstanceRequest, dict]] = None, + *, + project: Optional[str] = None, + zone: Optional[str] = None, + instance: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.GuestAttributes: + r"""Returns the specified guest attributes entry. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_get_guest_attributes(): + # Create a client + client = compute_v1.InstancesClient() + + # Initialize request argument(s) + request = compute_v1.GetGuestAttributesInstanceRequest( + instance="instance_value", + project="project_value", + zone="zone_value", + ) + + # Make the request + response = client.get_guest_attributes(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.GetGuestAttributesInstanceRequest, dict]): + The request object. A request message for + Instances.GetGuestAttributes. See the + method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + zone (str): + The name of the zone for this + request. + + This corresponds to the ``zone`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + instance (str): + Name of the instance scoping this + request. + + This corresponds to the ``instance`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.compute_v1.types.GuestAttributes: + A guest attributes entry. + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, zone, instance]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.GetGuestAttributesInstanceRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.GetGuestAttributesInstanceRequest): + request = compute.GetGuestAttributesInstanceRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if zone is not None: + request.zone = zone + if instance is not None: + request.instance = instance + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_guest_attributes] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("zone", request.zone), + ("instance", request.instance), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get_iam_policy(self, + request: Optional[Union[compute.GetIamPolicyInstanceRequest, dict]] = None, + *, + project: Optional[str] = None, + zone: Optional[str] = None, + resource: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.Policy: + r"""Gets the access control policy for a resource. May be + empty if no such policy or resource exists. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_get_iam_policy(): + # Create a client + client = compute_v1.InstancesClient() + + # Initialize request argument(s) + request = compute_v1.GetIamPolicyInstanceRequest( + project="project_value", + resource="resource_value", + zone="zone_value", + ) + + # Make the request + response = client.get_iam_policy(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.GetIamPolicyInstanceRequest, dict]): + The request object. A request message for + Instances.GetIamPolicy. See the method + description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + zone (str): + The name of the zone for this + request. + + This corresponds to the ``zone`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + resource (str): + Name or id of the resource for this + request. + + This corresponds to the ``resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.compute_v1.types.Policy: + An Identity and Access Management (IAM) policy, which + specifies access controls for Google Cloud resources. A + Policy is a collection of bindings. A binding binds one + or more members, or principals, to a single role. + Principals can be user accounts, service accounts, + Google groups, and domains (such as G Suite). A role is + a named list of permissions; each role can be an IAM + predefined role or a user-created custom role. For some + types of Google Cloud resources, a binding can also + specify a condition, which is a logical expression that + allows access to a resource only if the expression + evaluates to true. A condition can add constraints based + on attributes of the request, the resource, or both. To + learn which resources support conditions in their IAM + policies, see the [IAM + documentation](\ https://cloud.google.com/iam/help/conditions/resource-policies). + **JSON example:** { "bindings": [ { "role": + "roles/resourcemanager.organizationAdmin", "members": [ + "user:mike@example.com", "group:admins@example.com", + "domain:google.com", + "serviceAccount:my-project-id@appspot.gserviceaccount.com" + ] }, { "role": + "roles/resourcemanager.organizationViewer", "members": [ + "user:eve@example.com" ], "condition": { "title": + "expirable access", "description": "Does not grant + access after Sep 2020", "expression": "request.time < + timestamp('2020-10-01T00:00:00.000Z')", } } ], "etag": + "BwWWja0YfJA=", "version": 3 } **YAML example:** + bindings: - members: - user:\ mike@example.com - + group:\ admins@example.com - domain:google.com - + serviceAccount:\ my-project-id@appspot.gserviceaccount.com + role: roles/resourcemanager.organizationAdmin - members: + - user:\ eve@example.com role: + roles/resourcemanager.organizationViewer condition: + title: expirable access description: Does not grant + access after Sep 2020 expression: request.time < + timestamp('2020-10-01T00:00:00.000Z') etag: BwWWja0YfJA= + version: 3 For a description of IAM and its features, + see the [IAM + documentation](\ https://cloud.google.com/iam/docs/). + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, zone, resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.GetIamPolicyInstanceRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.GetIamPolicyInstanceRequest): + request = compute.GetIamPolicyInstanceRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if zone is not None: + request.zone = zone + if resource is not None: + request.resource = resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_iam_policy] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("zone", request.zone), + ("resource", request.resource), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get_screenshot(self, + request: Optional[Union[compute.GetScreenshotInstanceRequest, dict]] = None, + *, + project: Optional[str] = None, + zone: Optional[str] = None, + instance: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.Screenshot: + r"""Returns the screenshot from the specified instance. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_get_screenshot(): + # Create a client + client = compute_v1.InstancesClient() + + # Initialize request argument(s) + request = compute_v1.GetScreenshotInstanceRequest( + instance="instance_value", + project="project_value", + zone="zone_value", + ) + + # Make the request + response = client.get_screenshot(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.GetScreenshotInstanceRequest, dict]): + The request object. A request message for + Instances.GetScreenshot. See the method + description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + zone (str): + The name of the zone for this + request. + + This corresponds to the ``zone`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + instance (str): + Name of the instance scoping this + request. + + This corresponds to the ``instance`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.compute_v1.types.Screenshot: + An instance's screenshot. + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, zone, instance]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.GetScreenshotInstanceRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.GetScreenshotInstanceRequest): + request = compute.GetScreenshotInstanceRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if zone is not None: + request.zone = zone + if instance is not None: + request.instance = instance + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_screenshot] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("zone", request.zone), + ("instance", request.instance), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get_serial_port_output(self, + request: Optional[Union[compute.GetSerialPortOutputInstanceRequest, dict]] = None, + *, + project: Optional[str] = None, + zone: Optional[str] = None, + instance: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.SerialPortOutput: + r"""Returns the last 1 MB of serial port output from the + specified instance. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_get_serial_port_output(): + # Create a client + client = compute_v1.InstancesClient() + + # Initialize request argument(s) + request = compute_v1.GetSerialPortOutputInstanceRequest( + instance="instance_value", + project="project_value", + zone="zone_value", + ) + + # Make the request + response = client.get_serial_port_output(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.GetSerialPortOutputInstanceRequest, dict]): + The request object. A request message for + Instances.GetSerialPortOutput. See the + method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + zone (str): + The name of the zone for this + request. + + This corresponds to the ``zone`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + instance (str): + Name of the instance for this + request. + + This corresponds to the ``instance`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.compute_v1.types.SerialPortOutput: + An instance serial console output. + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, zone, instance]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.GetSerialPortOutputInstanceRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.GetSerialPortOutputInstanceRequest): + request = compute.GetSerialPortOutputInstanceRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if zone is not None: + request.zone = zone + if instance is not None: + request.instance = instance + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_serial_port_output] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("zone", request.zone), + ("instance", request.instance), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get_shielded_instance_identity(self, + request: Optional[Union[compute.GetShieldedInstanceIdentityInstanceRequest, dict]] = None, + *, + project: Optional[str] = None, + zone: Optional[str] = None, + instance: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.ShieldedInstanceIdentity: + r"""Returns the Shielded Instance Identity of an instance + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_get_shielded_instance_identity(): + # Create a client + client = compute_v1.InstancesClient() + + # Initialize request argument(s) + request = compute_v1.GetShieldedInstanceIdentityInstanceRequest( + instance="instance_value", + project="project_value", + zone="zone_value", + ) + + # Make the request + response = client.get_shielded_instance_identity(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.GetShieldedInstanceIdentityInstanceRequest, dict]): + The request object. A request message for + Instances.GetShieldedInstanceIdentity. + See the method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + zone (str): + The name of the zone for this + request. + + This corresponds to the ``zone`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + instance (str): + Name or id of the instance scoping + this request. + + This corresponds to the ``instance`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.compute_v1.types.ShieldedInstanceIdentity: + A Shielded Instance Identity. + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, zone, instance]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.GetShieldedInstanceIdentityInstanceRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.GetShieldedInstanceIdentityInstanceRequest): + request = compute.GetShieldedInstanceIdentityInstanceRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if zone is not None: + request.zone = zone + if instance is not None: + request.instance = instance + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_shielded_instance_identity] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("zone", request.zone), + ("instance", request.instance), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def insert_unary(self, + request: Optional[Union[compute.InsertInstanceRequest, dict]] = None, + *, + project: Optional[str] = None, + zone: Optional[str] = None, + instance_resource: Optional[compute.Instance] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.Operation: + r"""Creates an instance resource in the specified project + using the data included in the request. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_insert(): + # Create a client + client = compute_v1.InstancesClient() + + # Initialize request argument(s) + request = compute_v1.InsertInstanceRequest( + project="project_value", + zone="zone_value", + ) + + # Make the request + response = client.insert(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.InsertInstanceRequest, dict]): + The request object. A request message for + Instances.Insert. See the method + description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + zone (str): + The name of the zone for this + request. + + This corresponds to the ``zone`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + instance_resource (google.cloud.compute_v1.types.Instance): + The body resource for this request + This corresponds to the ``instance_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, zone, instance_resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.InsertInstanceRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.InsertInstanceRequest): + request = compute.InsertInstanceRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if zone is not None: + request.zone = zone + if instance_resource is not None: + request.instance_resource = instance_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.insert] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("zone", request.zone), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def insert(self, + request: Optional[Union[compute.InsertInstanceRequest, dict]] = None, + *, + project: Optional[str] = None, + zone: Optional[str] = None, + instance_resource: Optional[compute.Instance] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> extended_operation.ExtendedOperation: + r"""Creates an instance resource in the specified project + using the data included in the request. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_insert(): + # Create a client + client = compute_v1.InstancesClient() + + # Initialize request argument(s) + request = compute_v1.InsertInstanceRequest( + project="project_value", + zone="zone_value", + ) + + # Make the request + response = client.insert(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.InsertInstanceRequest, dict]): + The request object. A request message for + Instances.Insert. See the method + description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + zone (str): + The name of the zone for this + request. + + This corresponds to the ``zone`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + instance_resource (google.cloud.compute_v1.types.Instance): + The body resource for this request + This corresponds to the ``instance_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, zone, instance_resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.InsertInstanceRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.InsertInstanceRequest): + request = compute.InsertInstanceRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if zone is not None: + request.zone = zone + if instance_resource is not None: + request.instance_resource = instance_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.insert] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("zone", request.zone), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + operation_service = self._transport._zone_operations_client + operation_request = compute.GetZoneOperationRequest() + operation_request.project = request.project + operation_request.zone = request.zone + operation_request.operation = response.name + + get_operation = functools.partial(operation_service.get, operation_request) + # Cancel is not part of extended operations yet. + cancel_operation = lambda: None + + # Note: this class is an implementation detail to provide a uniform + # set of names for certain fields in the extended operation proto message. + # See google.api_core.extended_operation.ExtendedOperation for details + # on these properties and the expected interface. + class _CustomOperation(extended_operation.ExtendedOperation): + @property + def error_message(self): + return self._extended_operation.http_error_message + + @property + def error_code(self): + return self._extended_operation.http_error_status_code + + response = _CustomOperation.make(get_operation, cancel_operation, response) + + # Done; return the response. + return response + + def list(self, + request: Optional[Union[compute.ListInstancesRequest, dict]] = None, + *, + project: Optional[str] = None, + zone: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListPager: + r"""Retrieves the list of instances contained within the + specified zone. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_list(): + # Create a client + client = compute_v1.InstancesClient() + + # Initialize request argument(s) + request = compute_v1.ListInstancesRequest( + project="project_value", + zone="zone_value", + ) + + # Make the request + page_result = client.list(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.ListInstancesRequest, dict]): + The request object. A request message for Instances.List. + See the method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + zone (str): + The name of the zone for this + request. + + This corresponds to the ``zone`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.compute_v1.services.instances.pagers.ListPager: + Contains a list of instances. + + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, zone]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.ListInstancesRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.ListInstancesRequest): + request = compute.ListInstancesRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if zone is not None: + request.zone = zone + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("zone", request.zone), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + def list_referrers(self, + request: Optional[Union[compute.ListReferrersInstancesRequest, dict]] = None, + *, + project: Optional[str] = None, + zone: Optional[str] = None, + instance: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListReferrersPager: + r"""Retrieves a list of resources that refer to the VM + instance specified in the request. For example, if the + VM instance is part of a managed or unmanaged instance + group, the referrers list includes the instance group. + For more information, read Viewing referrers to VM + instances. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_list_referrers(): + # Create a client + client = compute_v1.InstancesClient() + + # Initialize request argument(s) + request = compute_v1.ListReferrersInstancesRequest( + instance="instance_value", + project="project_value", + zone="zone_value", + ) + + # Make the request + page_result = client.list_referrers(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.ListReferrersInstancesRequest, dict]): + The request object. A request message for + Instances.ListReferrers. See the method + description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + zone (str): + The name of the zone for this + request. + + This corresponds to the ``zone`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + instance (str): + Name of the target instance scoping + this request, or '-' if the request + should span over all instances in the + container. + + This corresponds to the ``instance`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.compute_v1.services.instances.pagers.ListReferrersPager: + Contains a list of instance + referrers. + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, zone, instance]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.ListReferrersInstancesRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.ListReferrersInstancesRequest): + request = compute.ListReferrersInstancesRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if zone is not None: + request.zone = zone + if instance is not None: + request.instance = instance + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_referrers] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("zone", request.zone), + ("instance", request.instance), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListReferrersPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + def remove_resource_policies_unary(self, + request: Optional[Union[compute.RemoveResourcePoliciesInstanceRequest, dict]] = None, + *, + project: Optional[str] = None, + zone: Optional[str] = None, + instance: Optional[str] = None, + instances_remove_resource_policies_request_resource: Optional[compute.InstancesRemoveResourcePoliciesRequest] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.Operation: + r"""Removes resource policies from an instance. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_remove_resource_policies(): + # Create a client + client = compute_v1.InstancesClient() + + # Initialize request argument(s) + request = compute_v1.RemoveResourcePoliciesInstanceRequest( + instance="instance_value", + project="project_value", + zone="zone_value", + ) + + # Make the request + response = client.remove_resource_policies(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.RemoveResourcePoliciesInstanceRequest, dict]): + The request object. A request message for + Instances.RemoveResourcePolicies. See + the method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + zone (str): + The name of the zone for this + request. + + This corresponds to the ``zone`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + instance (str): + The instance name for this request. + This corresponds to the ``instance`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + instances_remove_resource_policies_request_resource (google.cloud.compute_v1.types.InstancesRemoveResourcePoliciesRequest): + The body resource for this request + This corresponds to the ``instances_remove_resource_policies_request_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, zone, instance, instances_remove_resource_policies_request_resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.RemoveResourcePoliciesInstanceRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.RemoveResourcePoliciesInstanceRequest): + request = compute.RemoveResourcePoliciesInstanceRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if zone is not None: + request.zone = zone + if instance is not None: + request.instance = instance + if instances_remove_resource_policies_request_resource is not None: + request.instances_remove_resource_policies_request_resource = instances_remove_resource_policies_request_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.remove_resource_policies] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("zone", request.zone), + ("instance", request.instance), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def remove_resource_policies(self, + request: Optional[Union[compute.RemoveResourcePoliciesInstanceRequest, dict]] = None, + *, + project: Optional[str] = None, + zone: Optional[str] = None, + instance: Optional[str] = None, + instances_remove_resource_policies_request_resource: Optional[compute.InstancesRemoveResourcePoliciesRequest] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> extended_operation.ExtendedOperation: + r"""Removes resource policies from an instance. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_remove_resource_policies(): + # Create a client + client = compute_v1.InstancesClient() + + # Initialize request argument(s) + request = compute_v1.RemoveResourcePoliciesInstanceRequest( + instance="instance_value", + project="project_value", + zone="zone_value", + ) + + # Make the request + response = client.remove_resource_policies(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.RemoveResourcePoliciesInstanceRequest, dict]): + The request object. A request message for + Instances.RemoveResourcePolicies. See + the method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + zone (str): + The name of the zone for this + request. + + This corresponds to the ``zone`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + instance (str): + The instance name for this request. + This corresponds to the ``instance`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + instances_remove_resource_policies_request_resource (google.cloud.compute_v1.types.InstancesRemoveResourcePoliciesRequest): + The body resource for this request + This corresponds to the ``instances_remove_resource_policies_request_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, zone, instance, instances_remove_resource_policies_request_resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.RemoveResourcePoliciesInstanceRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.RemoveResourcePoliciesInstanceRequest): + request = compute.RemoveResourcePoliciesInstanceRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if zone is not None: + request.zone = zone + if instance is not None: + request.instance = instance + if instances_remove_resource_policies_request_resource is not None: + request.instances_remove_resource_policies_request_resource = instances_remove_resource_policies_request_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.remove_resource_policies] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("zone", request.zone), + ("instance", request.instance), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + operation_service = self._transport._zone_operations_client + operation_request = compute.GetZoneOperationRequest() + operation_request.project = request.project + operation_request.zone = request.zone + operation_request.operation = response.name + + get_operation = functools.partial(operation_service.get, operation_request) + # Cancel is not part of extended operations yet. + cancel_operation = lambda: None + + # Note: this class is an implementation detail to provide a uniform + # set of names for certain fields in the extended operation proto message. + # See google.api_core.extended_operation.ExtendedOperation for details + # on these properties and the expected interface. + class _CustomOperation(extended_operation.ExtendedOperation): + @property + def error_message(self): + return self._extended_operation.http_error_message + + @property + def error_code(self): + return self._extended_operation.http_error_status_code + + response = _CustomOperation.make(get_operation, cancel_operation, response) + + # Done; return the response. + return response + + def reset_unary(self, + request: Optional[Union[compute.ResetInstanceRequest, dict]] = None, + *, + project: Optional[str] = None, + zone: Optional[str] = None, + instance: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.Operation: + r"""Performs a reset on the instance. This is a hard + reset. The VM does not do a graceful shutdown. For more + information, see Resetting an instance. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_reset(): + # Create a client + client = compute_v1.InstancesClient() + + # Initialize request argument(s) + request = compute_v1.ResetInstanceRequest( + instance="instance_value", + project="project_value", + zone="zone_value", + ) + + # Make the request + response = client.reset(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.ResetInstanceRequest, dict]): + The request object. A request message for + Instances.Reset. See the method + description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + zone (str): + The name of the zone for this + request. + + This corresponds to the ``zone`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + instance (str): + Name of the instance scoping this + request. + + This corresponds to the ``instance`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, zone, instance]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.ResetInstanceRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.ResetInstanceRequest): + request = compute.ResetInstanceRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if zone is not None: + request.zone = zone + if instance is not None: + request.instance = instance + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.reset] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("zone", request.zone), + ("instance", request.instance), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def reset(self, + request: Optional[Union[compute.ResetInstanceRequest, dict]] = None, + *, + project: Optional[str] = None, + zone: Optional[str] = None, + instance: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> extended_operation.ExtendedOperation: + r"""Performs a reset on the instance. This is a hard + reset. The VM does not do a graceful shutdown. For more + information, see Resetting an instance. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_reset(): + # Create a client + client = compute_v1.InstancesClient() + + # Initialize request argument(s) + request = compute_v1.ResetInstanceRequest( + instance="instance_value", + project="project_value", + zone="zone_value", + ) + + # Make the request + response = client.reset(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.ResetInstanceRequest, dict]): + The request object. A request message for + Instances.Reset. See the method + description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + zone (str): + The name of the zone for this + request. + + This corresponds to the ``zone`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + instance (str): + Name of the instance scoping this + request. + + This corresponds to the ``instance`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, zone, instance]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.ResetInstanceRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.ResetInstanceRequest): + request = compute.ResetInstanceRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if zone is not None: + request.zone = zone + if instance is not None: + request.instance = instance + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.reset] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("zone", request.zone), + ("instance", request.instance), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + operation_service = self._transport._zone_operations_client + operation_request = compute.GetZoneOperationRequest() + operation_request.project = request.project + operation_request.zone = request.zone + operation_request.operation = response.name + + get_operation = functools.partial(operation_service.get, operation_request) + # Cancel is not part of extended operations yet. + cancel_operation = lambda: None + + # Note: this class is an implementation detail to provide a uniform + # set of names for certain fields in the extended operation proto message. + # See google.api_core.extended_operation.ExtendedOperation for details + # on these properties and the expected interface. + class _CustomOperation(extended_operation.ExtendedOperation): + @property + def error_message(self): + return self._extended_operation.http_error_message + + @property + def error_code(self): + return self._extended_operation.http_error_status_code + + response = _CustomOperation.make(get_operation, cancel_operation, response) + + # Done; return the response. + return response + + def resume_unary(self, + request: Optional[Union[compute.ResumeInstanceRequest, dict]] = None, + *, + project: Optional[str] = None, + zone: Optional[str] = None, + instance: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.Operation: + r"""Resumes an instance that was suspended using the + instances().suspend method. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_resume(): + # Create a client + client = compute_v1.InstancesClient() + + # Initialize request argument(s) + request = compute_v1.ResumeInstanceRequest( + instance="instance_value", + project="project_value", + zone="zone_value", + ) + + # Make the request + response = client.resume(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.ResumeInstanceRequest, dict]): + The request object. A request message for + Instances.Resume. See the method + description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + zone (str): + The name of the zone for this + request. + + This corresponds to the ``zone`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + instance (str): + Name of the instance resource to + resume. + + This corresponds to the ``instance`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, zone, instance]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.ResumeInstanceRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.ResumeInstanceRequest): + request = compute.ResumeInstanceRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if zone is not None: + request.zone = zone + if instance is not None: + request.instance = instance + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.resume] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("zone", request.zone), + ("instance", request.instance), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def resume(self, + request: Optional[Union[compute.ResumeInstanceRequest, dict]] = None, + *, + project: Optional[str] = None, + zone: Optional[str] = None, + instance: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> extended_operation.ExtendedOperation: + r"""Resumes an instance that was suspended using the + instances().suspend method. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_resume(): + # Create a client + client = compute_v1.InstancesClient() + + # Initialize request argument(s) + request = compute_v1.ResumeInstanceRequest( + instance="instance_value", + project="project_value", + zone="zone_value", + ) + + # Make the request + response = client.resume(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.ResumeInstanceRequest, dict]): + The request object. A request message for + Instances.Resume. See the method + description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + zone (str): + The name of the zone for this + request. + + This corresponds to the ``zone`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + instance (str): + Name of the instance resource to + resume. + + This corresponds to the ``instance`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, zone, instance]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.ResumeInstanceRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.ResumeInstanceRequest): + request = compute.ResumeInstanceRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if zone is not None: + request.zone = zone + if instance is not None: + request.instance = instance + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.resume] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("zone", request.zone), + ("instance", request.instance), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + operation_service = self._transport._zone_operations_client + operation_request = compute.GetZoneOperationRequest() + operation_request.project = request.project + operation_request.zone = request.zone + operation_request.operation = response.name + + get_operation = functools.partial(operation_service.get, operation_request) + # Cancel is not part of extended operations yet. + cancel_operation = lambda: None + + # Note: this class is an implementation detail to provide a uniform + # set of names for certain fields in the extended operation proto message. + # See google.api_core.extended_operation.ExtendedOperation for details + # on these properties and the expected interface. + class _CustomOperation(extended_operation.ExtendedOperation): + @property + def error_message(self): + return self._extended_operation.http_error_message + + @property + def error_code(self): + return self._extended_operation.http_error_status_code + + response = _CustomOperation.make(get_operation, cancel_operation, response) + + # Done; return the response. + return response + + def send_diagnostic_interrupt(self, + request: Optional[Union[compute.SendDiagnosticInterruptInstanceRequest, dict]] = None, + *, + project: Optional[str] = None, + zone: Optional[str] = None, + instance: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.SendDiagnosticInterruptInstanceResponse: + r"""Sends diagnostic interrupt to the instance. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_send_diagnostic_interrupt(): + # Create a client + client = compute_v1.InstancesClient() + + # Initialize request argument(s) + request = compute_v1.SendDiagnosticInterruptInstanceRequest( + instance="instance_value", + project="project_value", + zone="zone_value", + ) + + # Make the request + response = client.send_diagnostic_interrupt(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.SendDiagnosticInterruptInstanceRequest, dict]): + The request object. A request message for + Instances.SendDiagnosticInterrupt. See + the method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + zone (str): + The name of the zone for this + request. + + This corresponds to the ``zone`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + instance (str): + Name of the instance scoping this + request. + + This corresponds to the ``instance`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.compute_v1.types.SendDiagnosticInterruptInstanceResponse: + A response message for + Instances.SendDiagnosticInterrupt. See + the method description for details. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, zone, instance]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.SendDiagnosticInterruptInstanceRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.SendDiagnosticInterruptInstanceRequest): + request = compute.SendDiagnosticInterruptInstanceRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if zone is not None: + request.zone = zone + if instance is not None: + request.instance = instance + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.send_diagnostic_interrupt] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("zone", request.zone), + ("instance", request.instance), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def set_deletion_protection_unary(self, + request: Optional[Union[compute.SetDeletionProtectionInstanceRequest, dict]] = None, + *, + project: Optional[str] = None, + zone: Optional[str] = None, + resource: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.Operation: + r"""Sets deletion protection on the instance. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_set_deletion_protection(): + # Create a client + client = compute_v1.InstancesClient() + + # Initialize request argument(s) + request = compute_v1.SetDeletionProtectionInstanceRequest( + project="project_value", + resource="resource_value", + zone="zone_value", + ) + + # Make the request + response = client.set_deletion_protection(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.SetDeletionProtectionInstanceRequest, dict]): + The request object. A request message for + Instances.SetDeletionProtection. See the + method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + zone (str): + The name of the zone for this + request. + + This corresponds to the ``zone`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + resource (str): + Name or id of the resource for this + request. + + This corresponds to the ``resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, zone, resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.SetDeletionProtectionInstanceRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.SetDeletionProtectionInstanceRequest): + request = compute.SetDeletionProtectionInstanceRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if zone is not None: + request.zone = zone + if resource is not None: + request.resource = resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.set_deletion_protection] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("zone", request.zone), + ("resource", request.resource), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def set_deletion_protection(self, + request: Optional[Union[compute.SetDeletionProtectionInstanceRequest, dict]] = None, + *, + project: Optional[str] = None, + zone: Optional[str] = None, + resource: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> extended_operation.ExtendedOperation: + r"""Sets deletion protection on the instance. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_set_deletion_protection(): + # Create a client + client = compute_v1.InstancesClient() + + # Initialize request argument(s) + request = compute_v1.SetDeletionProtectionInstanceRequest( + project="project_value", + resource="resource_value", + zone="zone_value", + ) + + # Make the request + response = client.set_deletion_protection(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.SetDeletionProtectionInstanceRequest, dict]): + The request object. A request message for + Instances.SetDeletionProtection. See the + method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + zone (str): + The name of the zone for this + request. + + This corresponds to the ``zone`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + resource (str): + Name or id of the resource for this + request. + + This corresponds to the ``resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, zone, resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.SetDeletionProtectionInstanceRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.SetDeletionProtectionInstanceRequest): + request = compute.SetDeletionProtectionInstanceRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if zone is not None: + request.zone = zone + if resource is not None: + request.resource = resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.set_deletion_protection] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("zone", request.zone), + ("resource", request.resource), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + operation_service = self._transport._zone_operations_client + operation_request = compute.GetZoneOperationRequest() + operation_request.project = request.project + operation_request.zone = request.zone + operation_request.operation = response.name + + get_operation = functools.partial(operation_service.get, operation_request) + # Cancel is not part of extended operations yet. + cancel_operation = lambda: None + + # Note: this class is an implementation detail to provide a uniform + # set of names for certain fields in the extended operation proto message. + # See google.api_core.extended_operation.ExtendedOperation for details + # on these properties and the expected interface. + class _CustomOperation(extended_operation.ExtendedOperation): + @property + def error_message(self): + return self._extended_operation.http_error_message + + @property + def error_code(self): + return self._extended_operation.http_error_status_code + + response = _CustomOperation.make(get_operation, cancel_operation, response) + + # Done; return the response. + return response + + def set_disk_auto_delete_unary(self, + request: Optional[Union[compute.SetDiskAutoDeleteInstanceRequest, dict]] = None, + *, + project: Optional[str] = None, + zone: Optional[str] = None, + instance: Optional[str] = None, + auto_delete: Optional[bool] = None, + device_name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.Operation: + r"""Sets the auto-delete flag for a disk attached to an + instance. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_set_disk_auto_delete(): + # Create a client + client = compute_v1.InstancesClient() + + # Initialize request argument(s) + request = compute_v1.SetDiskAutoDeleteInstanceRequest( + auto_delete=True, + device_name="device_name_value", + instance="instance_value", + project="project_value", + zone="zone_value", + ) + + # Make the request + response = client.set_disk_auto_delete(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.SetDiskAutoDeleteInstanceRequest, dict]): + The request object. A request message for + Instances.SetDiskAutoDelete. See the + method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + zone (str): + The name of the zone for this + request. + + This corresponds to the ``zone`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + instance (str): + The instance name for this request. + This corresponds to the ``instance`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + auto_delete (bool): + Whether to auto-delete the disk when + the instance is deleted. + + This corresponds to the ``auto_delete`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + device_name (str): + The device name of the disk to + modify. Make a get() request on the + instance to view currently attached + disks and device names. + + This corresponds to the ``device_name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, zone, instance, auto_delete, device_name]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.SetDiskAutoDeleteInstanceRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.SetDiskAutoDeleteInstanceRequest): + request = compute.SetDiskAutoDeleteInstanceRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if zone is not None: + request.zone = zone + if instance is not None: + request.instance = instance + if auto_delete is not None: + request.auto_delete = auto_delete + if device_name is not None: + request.device_name = device_name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.set_disk_auto_delete] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("zone", request.zone), + ("instance", request.instance), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def set_disk_auto_delete(self, + request: Optional[Union[compute.SetDiskAutoDeleteInstanceRequest, dict]] = None, + *, + project: Optional[str] = None, + zone: Optional[str] = None, + instance: Optional[str] = None, + auto_delete: Optional[bool] = None, + device_name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> extended_operation.ExtendedOperation: + r"""Sets the auto-delete flag for a disk attached to an + instance. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_set_disk_auto_delete(): + # Create a client + client = compute_v1.InstancesClient() + + # Initialize request argument(s) + request = compute_v1.SetDiskAutoDeleteInstanceRequest( + auto_delete=True, + device_name="device_name_value", + instance="instance_value", + project="project_value", + zone="zone_value", + ) + + # Make the request + response = client.set_disk_auto_delete(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.SetDiskAutoDeleteInstanceRequest, dict]): + The request object. A request message for + Instances.SetDiskAutoDelete. See the + method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + zone (str): + The name of the zone for this + request. + + This corresponds to the ``zone`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + instance (str): + The instance name for this request. + This corresponds to the ``instance`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + auto_delete (bool): + Whether to auto-delete the disk when + the instance is deleted. + + This corresponds to the ``auto_delete`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + device_name (str): + The device name of the disk to + modify. Make a get() request on the + instance to view currently attached + disks and device names. + + This corresponds to the ``device_name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, zone, instance, auto_delete, device_name]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.SetDiskAutoDeleteInstanceRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.SetDiskAutoDeleteInstanceRequest): + request = compute.SetDiskAutoDeleteInstanceRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if zone is not None: + request.zone = zone + if instance is not None: + request.instance = instance + if auto_delete is not None: + request.auto_delete = auto_delete + if device_name is not None: + request.device_name = device_name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.set_disk_auto_delete] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("zone", request.zone), + ("instance", request.instance), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + operation_service = self._transport._zone_operations_client + operation_request = compute.GetZoneOperationRequest() + operation_request.project = request.project + operation_request.zone = request.zone + operation_request.operation = response.name + + get_operation = functools.partial(operation_service.get, operation_request) + # Cancel is not part of extended operations yet. + cancel_operation = lambda: None + + # Note: this class is an implementation detail to provide a uniform + # set of names for certain fields in the extended operation proto message. + # See google.api_core.extended_operation.ExtendedOperation for details + # on these properties and the expected interface. + class _CustomOperation(extended_operation.ExtendedOperation): + @property + def error_message(self): + return self._extended_operation.http_error_message + + @property + def error_code(self): + return self._extended_operation.http_error_status_code + + response = _CustomOperation.make(get_operation, cancel_operation, response) + + # Done; return the response. + return response + + def set_iam_policy(self, + request: Optional[Union[compute.SetIamPolicyInstanceRequest, dict]] = None, + *, + project: Optional[str] = None, + zone: Optional[str] = None, + resource: Optional[str] = None, + zone_set_policy_request_resource: Optional[compute.ZoneSetPolicyRequest] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.Policy: + r"""Sets the access control policy on the specified + resource. Replaces any existing policy. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_set_iam_policy(): + # Create a client + client = compute_v1.InstancesClient() + + # Initialize request argument(s) + request = compute_v1.SetIamPolicyInstanceRequest( + project="project_value", + resource="resource_value", + zone="zone_value", + ) + + # Make the request + response = client.set_iam_policy(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.SetIamPolicyInstanceRequest, dict]): + The request object. A request message for + Instances.SetIamPolicy. See the method + description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + zone (str): + The name of the zone for this + request. + + This corresponds to the ``zone`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + resource (str): + Name or id of the resource for this + request. + + This corresponds to the ``resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + zone_set_policy_request_resource (google.cloud.compute_v1.types.ZoneSetPolicyRequest): + The body resource for this request + This corresponds to the ``zone_set_policy_request_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.compute_v1.types.Policy: + An Identity and Access Management (IAM) policy, which + specifies access controls for Google Cloud resources. A + Policy is a collection of bindings. A binding binds one + or more members, or principals, to a single role. + Principals can be user accounts, service accounts, + Google groups, and domains (such as G Suite). A role is + a named list of permissions; each role can be an IAM + predefined role or a user-created custom role. For some + types of Google Cloud resources, a binding can also + specify a condition, which is a logical expression that + allows access to a resource only if the expression + evaluates to true. A condition can add constraints based + on attributes of the request, the resource, or both. To + learn which resources support conditions in their IAM + policies, see the [IAM + documentation](\ https://cloud.google.com/iam/help/conditions/resource-policies). + **JSON example:** { "bindings": [ { "role": + "roles/resourcemanager.organizationAdmin", "members": [ + "user:mike@example.com", "group:admins@example.com", + "domain:google.com", + "serviceAccount:my-project-id@appspot.gserviceaccount.com" + ] }, { "role": + "roles/resourcemanager.organizationViewer", "members": [ + "user:eve@example.com" ], "condition": { "title": + "expirable access", "description": "Does not grant + access after Sep 2020", "expression": "request.time < + timestamp('2020-10-01T00:00:00.000Z')", } } ], "etag": + "BwWWja0YfJA=", "version": 3 } **YAML example:** + bindings: - members: - user:\ mike@example.com - + group:\ admins@example.com - domain:google.com - + serviceAccount:\ my-project-id@appspot.gserviceaccount.com + role: roles/resourcemanager.organizationAdmin - members: + - user:\ eve@example.com role: + roles/resourcemanager.organizationViewer condition: + title: expirable access description: Does not grant + access after Sep 2020 expression: request.time < + timestamp('2020-10-01T00:00:00.000Z') etag: BwWWja0YfJA= + version: 3 For a description of IAM and its features, + see the [IAM + documentation](\ https://cloud.google.com/iam/docs/). + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, zone, resource, zone_set_policy_request_resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.SetIamPolicyInstanceRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.SetIamPolicyInstanceRequest): + request = compute.SetIamPolicyInstanceRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if zone is not None: + request.zone = zone + if resource is not None: + request.resource = resource + if zone_set_policy_request_resource is not None: + request.zone_set_policy_request_resource = zone_set_policy_request_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.set_iam_policy] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("zone", request.zone), + ("resource", request.resource), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def set_labels_unary(self, + request: Optional[Union[compute.SetLabelsInstanceRequest, dict]] = None, + *, + project: Optional[str] = None, + zone: Optional[str] = None, + instance: Optional[str] = None, + instances_set_labels_request_resource: Optional[compute.InstancesSetLabelsRequest] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.Operation: + r"""Sets labels on an instance. To learn more about + labels, read the Labeling Resources documentation. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_set_labels(): + # Create a client + client = compute_v1.InstancesClient() + + # Initialize request argument(s) + request = compute_v1.SetLabelsInstanceRequest( + instance="instance_value", + project="project_value", + zone="zone_value", + ) + + # Make the request + response = client.set_labels(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.SetLabelsInstanceRequest, dict]): + The request object. A request message for + Instances.SetLabels. See the method + description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + zone (str): + The name of the zone for this + request. + + This corresponds to the ``zone`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + instance (str): + Name of the instance scoping this + request. + + This corresponds to the ``instance`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + instances_set_labels_request_resource (google.cloud.compute_v1.types.InstancesSetLabelsRequest): + The body resource for this request + This corresponds to the ``instances_set_labels_request_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, zone, instance, instances_set_labels_request_resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.SetLabelsInstanceRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.SetLabelsInstanceRequest): + request = compute.SetLabelsInstanceRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if zone is not None: + request.zone = zone + if instance is not None: + request.instance = instance + if instances_set_labels_request_resource is not None: + request.instances_set_labels_request_resource = instances_set_labels_request_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.set_labels] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("zone", request.zone), + ("instance", request.instance), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def set_labels(self, + request: Optional[Union[compute.SetLabelsInstanceRequest, dict]] = None, + *, + project: Optional[str] = None, + zone: Optional[str] = None, + instance: Optional[str] = None, + instances_set_labels_request_resource: Optional[compute.InstancesSetLabelsRequest] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> extended_operation.ExtendedOperation: + r"""Sets labels on an instance. To learn more about + labels, read the Labeling Resources documentation. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_set_labels(): + # Create a client + client = compute_v1.InstancesClient() + + # Initialize request argument(s) + request = compute_v1.SetLabelsInstanceRequest( + instance="instance_value", + project="project_value", + zone="zone_value", + ) + + # Make the request + response = client.set_labels(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.SetLabelsInstanceRequest, dict]): + The request object. A request message for + Instances.SetLabels. See the method + description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + zone (str): + The name of the zone for this + request. + + This corresponds to the ``zone`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + instance (str): + Name of the instance scoping this + request. + + This corresponds to the ``instance`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + instances_set_labels_request_resource (google.cloud.compute_v1.types.InstancesSetLabelsRequest): + The body resource for this request + This corresponds to the ``instances_set_labels_request_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, zone, instance, instances_set_labels_request_resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.SetLabelsInstanceRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.SetLabelsInstanceRequest): + request = compute.SetLabelsInstanceRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if zone is not None: + request.zone = zone + if instance is not None: + request.instance = instance + if instances_set_labels_request_resource is not None: + request.instances_set_labels_request_resource = instances_set_labels_request_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.set_labels] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("zone", request.zone), + ("instance", request.instance), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + operation_service = self._transport._zone_operations_client + operation_request = compute.GetZoneOperationRequest() + operation_request.project = request.project + operation_request.zone = request.zone + operation_request.operation = response.name + + get_operation = functools.partial(operation_service.get, operation_request) + # Cancel is not part of extended operations yet. + cancel_operation = lambda: None + + # Note: this class is an implementation detail to provide a uniform + # set of names for certain fields in the extended operation proto message. + # See google.api_core.extended_operation.ExtendedOperation for details + # on these properties and the expected interface. + class _CustomOperation(extended_operation.ExtendedOperation): + @property + def error_message(self): + return self._extended_operation.http_error_message + + @property + def error_code(self): + return self._extended_operation.http_error_status_code + + response = _CustomOperation.make(get_operation, cancel_operation, response) + + # Done; return the response. + return response + + def set_machine_resources_unary(self, + request: Optional[Union[compute.SetMachineResourcesInstanceRequest, dict]] = None, + *, + project: Optional[str] = None, + zone: Optional[str] = None, + instance: Optional[str] = None, + instances_set_machine_resources_request_resource: Optional[compute.InstancesSetMachineResourcesRequest] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.Operation: + r"""Changes the number and/or type of accelerator for a + stopped instance to the values specified in the request. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_set_machine_resources(): + # Create a client + client = compute_v1.InstancesClient() + + # Initialize request argument(s) + request = compute_v1.SetMachineResourcesInstanceRequest( + instance="instance_value", + project="project_value", + zone="zone_value", + ) + + # Make the request + response = client.set_machine_resources(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.SetMachineResourcesInstanceRequest, dict]): + The request object. A request message for + Instances.SetMachineResources. See the + method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + zone (str): + The name of the zone for this + request. + + This corresponds to the ``zone`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + instance (str): + Name of the instance scoping this + request. + + This corresponds to the ``instance`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + instances_set_machine_resources_request_resource (google.cloud.compute_v1.types.InstancesSetMachineResourcesRequest): + The body resource for this request + This corresponds to the ``instances_set_machine_resources_request_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, zone, instance, instances_set_machine_resources_request_resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.SetMachineResourcesInstanceRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.SetMachineResourcesInstanceRequest): + request = compute.SetMachineResourcesInstanceRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if zone is not None: + request.zone = zone + if instance is not None: + request.instance = instance + if instances_set_machine_resources_request_resource is not None: + request.instances_set_machine_resources_request_resource = instances_set_machine_resources_request_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.set_machine_resources] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("zone", request.zone), + ("instance", request.instance), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def set_machine_resources(self, + request: Optional[Union[compute.SetMachineResourcesInstanceRequest, dict]] = None, + *, + project: Optional[str] = None, + zone: Optional[str] = None, + instance: Optional[str] = None, + instances_set_machine_resources_request_resource: Optional[compute.InstancesSetMachineResourcesRequest] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> extended_operation.ExtendedOperation: + r"""Changes the number and/or type of accelerator for a + stopped instance to the values specified in the request. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_set_machine_resources(): + # Create a client + client = compute_v1.InstancesClient() + + # Initialize request argument(s) + request = compute_v1.SetMachineResourcesInstanceRequest( + instance="instance_value", + project="project_value", + zone="zone_value", + ) + + # Make the request + response = client.set_machine_resources(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.SetMachineResourcesInstanceRequest, dict]): + The request object. A request message for + Instances.SetMachineResources. See the + method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + zone (str): + The name of the zone for this + request. + + This corresponds to the ``zone`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + instance (str): + Name of the instance scoping this + request. + + This corresponds to the ``instance`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + instances_set_machine_resources_request_resource (google.cloud.compute_v1.types.InstancesSetMachineResourcesRequest): + The body resource for this request + This corresponds to the ``instances_set_machine_resources_request_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, zone, instance, instances_set_machine_resources_request_resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.SetMachineResourcesInstanceRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.SetMachineResourcesInstanceRequest): + request = compute.SetMachineResourcesInstanceRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if zone is not None: + request.zone = zone + if instance is not None: + request.instance = instance + if instances_set_machine_resources_request_resource is not None: + request.instances_set_machine_resources_request_resource = instances_set_machine_resources_request_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.set_machine_resources] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("zone", request.zone), + ("instance", request.instance), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + operation_service = self._transport._zone_operations_client + operation_request = compute.GetZoneOperationRequest() + operation_request.project = request.project + operation_request.zone = request.zone + operation_request.operation = response.name + + get_operation = functools.partial(operation_service.get, operation_request) + # Cancel is not part of extended operations yet. + cancel_operation = lambda: None + + # Note: this class is an implementation detail to provide a uniform + # set of names for certain fields in the extended operation proto message. + # See google.api_core.extended_operation.ExtendedOperation for details + # on these properties and the expected interface. + class _CustomOperation(extended_operation.ExtendedOperation): + @property + def error_message(self): + return self._extended_operation.http_error_message + + @property + def error_code(self): + return self._extended_operation.http_error_status_code + + response = _CustomOperation.make(get_operation, cancel_operation, response) + + # Done; return the response. + return response + + def set_machine_type_unary(self, + request: Optional[Union[compute.SetMachineTypeInstanceRequest, dict]] = None, + *, + project: Optional[str] = None, + zone: Optional[str] = None, + instance: Optional[str] = None, + instances_set_machine_type_request_resource: Optional[compute.InstancesSetMachineTypeRequest] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.Operation: + r"""Changes the machine type for a stopped instance to + the machine type specified in the request. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_set_machine_type(): + # Create a client + client = compute_v1.InstancesClient() + + # Initialize request argument(s) + request = compute_v1.SetMachineTypeInstanceRequest( + instance="instance_value", + project="project_value", + zone="zone_value", + ) + + # Make the request + response = client.set_machine_type(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.SetMachineTypeInstanceRequest, dict]): + The request object. A request message for + Instances.SetMachineType. See the method + description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + zone (str): + The name of the zone for this + request. + + This corresponds to the ``zone`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + instance (str): + Name of the instance scoping this + request. + + This corresponds to the ``instance`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + instances_set_machine_type_request_resource (google.cloud.compute_v1.types.InstancesSetMachineTypeRequest): + The body resource for this request + This corresponds to the ``instances_set_machine_type_request_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, zone, instance, instances_set_machine_type_request_resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.SetMachineTypeInstanceRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.SetMachineTypeInstanceRequest): + request = compute.SetMachineTypeInstanceRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if zone is not None: + request.zone = zone + if instance is not None: + request.instance = instance + if instances_set_machine_type_request_resource is not None: + request.instances_set_machine_type_request_resource = instances_set_machine_type_request_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.set_machine_type] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("zone", request.zone), + ("instance", request.instance), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def set_machine_type(self, + request: Optional[Union[compute.SetMachineTypeInstanceRequest, dict]] = None, + *, + project: Optional[str] = None, + zone: Optional[str] = None, + instance: Optional[str] = None, + instances_set_machine_type_request_resource: Optional[compute.InstancesSetMachineTypeRequest] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> extended_operation.ExtendedOperation: + r"""Changes the machine type for a stopped instance to + the machine type specified in the request. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_set_machine_type(): + # Create a client + client = compute_v1.InstancesClient() + + # Initialize request argument(s) + request = compute_v1.SetMachineTypeInstanceRequest( + instance="instance_value", + project="project_value", + zone="zone_value", + ) + + # Make the request + response = client.set_machine_type(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.SetMachineTypeInstanceRequest, dict]): + The request object. A request message for + Instances.SetMachineType. See the method + description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + zone (str): + The name of the zone for this + request. + + This corresponds to the ``zone`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + instance (str): + Name of the instance scoping this + request. + + This corresponds to the ``instance`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + instances_set_machine_type_request_resource (google.cloud.compute_v1.types.InstancesSetMachineTypeRequest): + The body resource for this request + This corresponds to the ``instances_set_machine_type_request_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, zone, instance, instances_set_machine_type_request_resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.SetMachineTypeInstanceRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.SetMachineTypeInstanceRequest): + request = compute.SetMachineTypeInstanceRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if zone is not None: + request.zone = zone + if instance is not None: + request.instance = instance + if instances_set_machine_type_request_resource is not None: + request.instances_set_machine_type_request_resource = instances_set_machine_type_request_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.set_machine_type] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("zone", request.zone), + ("instance", request.instance), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + operation_service = self._transport._zone_operations_client + operation_request = compute.GetZoneOperationRequest() + operation_request.project = request.project + operation_request.zone = request.zone + operation_request.operation = response.name + + get_operation = functools.partial(operation_service.get, operation_request) + # Cancel is not part of extended operations yet. + cancel_operation = lambda: None + + # Note: this class is an implementation detail to provide a uniform + # set of names for certain fields in the extended operation proto message. + # See google.api_core.extended_operation.ExtendedOperation for details + # on these properties and the expected interface. + class _CustomOperation(extended_operation.ExtendedOperation): + @property + def error_message(self): + return self._extended_operation.http_error_message + + @property + def error_code(self): + return self._extended_operation.http_error_status_code + + response = _CustomOperation.make(get_operation, cancel_operation, response) + + # Done; return the response. + return response + + def set_metadata_unary(self, + request: Optional[Union[compute.SetMetadataInstanceRequest, dict]] = None, + *, + project: Optional[str] = None, + zone: Optional[str] = None, + instance: Optional[str] = None, + metadata_resource: Optional[compute.Metadata] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.Operation: + r"""Sets metadata for the specified instance to the data + included in the request. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_set_metadata(): + # Create a client + client = compute_v1.InstancesClient() + + # Initialize request argument(s) + request = compute_v1.SetMetadataInstanceRequest( + instance="instance_value", + project="project_value", + zone="zone_value", + ) + + # Make the request + response = client.set_metadata(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.SetMetadataInstanceRequest, dict]): + The request object. A request message for + Instances.SetMetadata. See the method + description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + zone (str): + The name of the zone for this + request. + + This corresponds to the ``zone`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + instance (str): + Name of the instance scoping this + request. + + This corresponds to the ``instance`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + metadata_resource (google.cloud.compute_v1.types.Metadata): + The body resource for this request + This corresponds to the ``metadata_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, zone, instance, metadata_resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.SetMetadataInstanceRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.SetMetadataInstanceRequest): + request = compute.SetMetadataInstanceRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if zone is not None: + request.zone = zone + if instance is not None: + request.instance = instance + if metadata_resource is not None: + request.metadata_resource = metadata_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.set_metadata] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("zone", request.zone), + ("instance", request.instance), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def set_metadata(self, + request: Optional[Union[compute.SetMetadataInstanceRequest, dict]] = None, + *, + project: Optional[str] = None, + zone: Optional[str] = None, + instance: Optional[str] = None, + metadata_resource: Optional[compute.Metadata] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> extended_operation.ExtendedOperation: + r"""Sets metadata for the specified instance to the data + included in the request. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_set_metadata(): + # Create a client + client = compute_v1.InstancesClient() + + # Initialize request argument(s) + request = compute_v1.SetMetadataInstanceRequest( + instance="instance_value", + project="project_value", + zone="zone_value", + ) + + # Make the request + response = client.set_metadata(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.SetMetadataInstanceRequest, dict]): + The request object. A request message for + Instances.SetMetadata. See the method + description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + zone (str): + The name of the zone for this + request. + + This corresponds to the ``zone`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + instance (str): + Name of the instance scoping this + request. + + This corresponds to the ``instance`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + metadata_resource (google.cloud.compute_v1.types.Metadata): + The body resource for this request + This corresponds to the ``metadata_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, zone, instance, metadata_resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.SetMetadataInstanceRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.SetMetadataInstanceRequest): + request = compute.SetMetadataInstanceRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if zone is not None: + request.zone = zone + if instance is not None: + request.instance = instance + if metadata_resource is not None: + request.metadata_resource = metadata_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.set_metadata] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("zone", request.zone), + ("instance", request.instance), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + operation_service = self._transport._zone_operations_client + operation_request = compute.GetZoneOperationRequest() + operation_request.project = request.project + operation_request.zone = request.zone + operation_request.operation = response.name + + get_operation = functools.partial(operation_service.get, operation_request) + # Cancel is not part of extended operations yet. + cancel_operation = lambda: None + + # Note: this class is an implementation detail to provide a uniform + # set of names for certain fields in the extended operation proto message. + # See google.api_core.extended_operation.ExtendedOperation for details + # on these properties and the expected interface. + class _CustomOperation(extended_operation.ExtendedOperation): + @property + def error_message(self): + return self._extended_operation.http_error_message + + @property + def error_code(self): + return self._extended_operation.http_error_status_code + + response = _CustomOperation.make(get_operation, cancel_operation, response) + + # Done; return the response. + return response + + def set_min_cpu_platform_unary(self, + request: Optional[Union[compute.SetMinCpuPlatformInstanceRequest, dict]] = None, + *, + project: Optional[str] = None, + zone: Optional[str] = None, + instance: Optional[str] = None, + instances_set_min_cpu_platform_request_resource: Optional[compute.InstancesSetMinCpuPlatformRequest] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.Operation: + r"""Changes the minimum CPU platform that this instance + should use. This method can only be called on a stopped + instance. For more information, read Specifying a + Minimum CPU Platform. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_set_min_cpu_platform(): + # Create a client + client = compute_v1.InstancesClient() + + # Initialize request argument(s) + request = compute_v1.SetMinCpuPlatformInstanceRequest( + instance="instance_value", + project="project_value", + zone="zone_value", + ) + + # Make the request + response = client.set_min_cpu_platform(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.SetMinCpuPlatformInstanceRequest, dict]): + The request object. A request message for + Instances.SetMinCpuPlatform. See the + method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + zone (str): + The name of the zone for this + request. + + This corresponds to the ``zone`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + instance (str): + Name of the instance scoping this + request. + + This corresponds to the ``instance`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + instances_set_min_cpu_platform_request_resource (google.cloud.compute_v1.types.InstancesSetMinCpuPlatformRequest): + The body resource for this request + This corresponds to the ``instances_set_min_cpu_platform_request_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, zone, instance, instances_set_min_cpu_platform_request_resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.SetMinCpuPlatformInstanceRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.SetMinCpuPlatformInstanceRequest): + request = compute.SetMinCpuPlatformInstanceRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if zone is not None: + request.zone = zone + if instance is not None: + request.instance = instance + if instances_set_min_cpu_platform_request_resource is not None: + request.instances_set_min_cpu_platform_request_resource = instances_set_min_cpu_platform_request_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.set_min_cpu_platform] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("zone", request.zone), + ("instance", request.instance), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def set_min_cpu_platform(self, + request: Optional[Union[compute.SetMinCpuPlatformInstanceRequest, dict]] = None, + *, + project: Optional[str] = None, + zone: Optional[str] = None, + instance: Optional[str] = None, + instances_set_min_cpu_platform_request_resource: Optional[compute.InstancesSetMinCpuPlatformRequest] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> extended_operation.ExtendedOperation: + r"""Changes the minimum CPU platform that this instance + should use. This method can only be called on a stopped + instance. For more information, read Specifying a + Minimum CPU Platform. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_set_min_cpu_platform(): + # Create a client + client = compute_v1.InstancesClient() + + # Initialize request argument(s) + request = compute_v1.SetMinCpuPlatformInstanceRequest( + instance="instance_value", + project="project_value", + zone="zone_value", + ) + + # Make the request + response = client.set_min_cpu_platform(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.SetMinCpuPlatformInstanceRequest, dict]): + The request object. A request message for + Instances.SetMinCpuPlatform. See the + method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + zone (str): + The name of the zone for this + request. + + This corresponds to the ``zone`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + instance (str): + Name of the instance scoping this + request. + + This corresponds to the ``instance`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + instances_set_min_cpu_platform_request_resource (google.cloud.compute_v1.types.InstancesSetMinCpuPlatformRequest): + The body resource for this request + This corresponds to the ``instances_set_min_cpu_platform_request_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, zone, instance, instances_set_min_cpu_platform_request_resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.SetMinCpuPlatformInstanceRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.SetMinCpuPlatformInstanceRequest): + request = compute.SetMinCpuPlatformInstanceRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if zone is not None: + request.zone = zone + if instance is not None: + request.instance = instance + if instances_set_min_cpu_platform_request_resource is not None: + request.instances_set_min_cpu_platform_request_resource = instances_set_min_cpu_platform_request_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.set_min_cpu_platform] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("zone", request.zone), + ("instance", request.instance), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + operation_service = self._transport._zone_operations_client + operation_request = compute.GetZoneOperationRequest() + operation_request.project = request.project + operation_request.zone = request.zone + operation_request.operation = response.name + + get_operation = functools.partial(operation_service.get, operation_request) + # Cancel is not part of extended operations yet. + cancel_operation = lambda: None + + # Note: this class is an implementation detail to provide a uniform + # set of names for certain fields in the extended operation proto message. + # See google.api_core.extended_operation.ExtendedOperation for details + # on these properties and the expected interface. + class _CustomOperation(extended_operation.ExtendedOperation): + @property + def error_message(self): + return self._extended_operation.http_error_message + + @property + def error_code(self): + return self._extended_operation.http_error_status_code + + response = _CustomOperation.make(get_operation, cancel_operation, response) + + # Done; return the response. + return response + + def set_name_unary(self, + request: Optional[Union[compute.SetNameInstanceRequest, dict]] = None, + *, + project: Optional[str] = None, + zone: Optional[str] = None, + instance: Optional[str] = None, + instances_set_name_request_resource: Optional[compute.InstancesSetNameRequest] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.Operation: + r"""Sets name of an instance. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_set_name(): + # Create a client + client = compute_v1.InstancesClient() + + # Initialize request argument(s) + request = compute_v1.SetNameInstanceRequest( + instance="instance_value", + project="project_value", + zone="zone_value", + ) + + # Make the request + response = client.set_name(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.SetNameInstanceRequest, dict]): + The request object. A request message for + Instances.SetName. See the method + description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + zone (str): + The name of the zone for this + request. + + This corresponds to the ``zone`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + instance (str): + The instance name for this request. + This corresponds to the ``instance`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + instances_set_name_request_resource (google.cloud.compute_v1.types.InstancesSetNameRequest): + The body resource for this request + This corresponds to the ``instances_set_name_request_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, zone, instance, instances_set_name_request_resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.SetNameInstanceRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.SetNameInstanceRequest): + request = compute.SetNameInstanceRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if zone is not None: + request.zone = zone + if instance is not None: + request.instance = instance + if instances_set_name_request_resource is not None: + request.instances_set_name_request_resource = instances_set_name_request_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.set_name] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("zone", request.zone), + ("instance", request.instance), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def set_name(self, + request: Optional[Union[compute.SetNameInstanceRequest, dict]] = None, + *, + project: Optional[str] = None, + zone: Optional[str] = None, + instance: Optional[str] = None, + instances_set_name_request_resource: Optional[compute.InstancesSetNameRequest] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> extended_operation.ExtendedOperation: + r"""Sets name of an instance. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_set_name(): + # Create a client + client = compute_v1.InstancesClient() + + # Initialize request argument(s) + request = compute_v1.SetNameInstanceRequest( + instance="instance_value", + project="project_value", + zone="zone_value", + ) + + # Make the request + response = client.set_name(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.SetNameInstanceRequest, dict]): + The request object. A request message for + Instances.SetName. See the method + description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + zone (str): + The name of the zone for this + request. + + This corresponds to the ``zone`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + instance (str): + The instance name for this request. + This corresponds to the ``instance`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + instances_set_name_request_resource (google.cloud.compute_v1.types.InstancesSetNameRequest): + The body resource for this request + This corresponds to the ``instances_set_name_request_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, zone, instance, instances_set_name_request_resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.SetNameInstanceRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.SetNameInstanceRequest): + request = compute.SetNameInstanceRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if zone is not None: + request.zone = zone + if instance is not None: + request.instance = instance + if instances_set_name_request_resource is not None: + request.instances_set_name_request_resource = instances_set_name_request_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.set_name] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("zone", request.zone), + ("instance", request.instance), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + operation_service = self._transport._zone_operations_client + operation_request = compute.GetZoneOperationRequest() + operation_request.project = request.project + operation_request.zone = request.zone + operation_request.operation = response.name + + get_operation = functools.partial(operation_service.get, operation_request) + # Cancel is not part of extended operations yet. + cancel_operation = lambda: None + + # Note: this class is an implementation detail to provide a uniform + # set of names for certain fields in the extended operation proto message. + # See google.api_core.extended_operation.ExtendedOperation for details + # on these properties and the expected interface. + class _CustomOperation(extended_operation.ExtendedOperation): + @property + def error_message(self): + return self._extended_operation.http_error_message + + @property + def error_code(self): + return self._extended_operation.http_error_status_code + + response = _CustomOperation.make(get_operation, cancel_operation, response) + + # Done; return the response. + return response + + def set_scheduling_unary(self, + request: Optional[Union[compute.SetSchedulingInstanceRequest, dict]] = None, + *, + project: Optional[str] = None, + zone: Optional[str] = None, + instance: Optional[str] = None, + scheduling_resource: Optional[compute.Scheduling] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.Operation: + r"""Sets an instance's scheduling options. You can only call this + method on a stopped instance, that is, a VM instance that is in + a ``TERMINATED`` state. See Instance Life Cycle for more + information on the possible instance states. For more + information about setting scheduling options for a VM, see Set + VM host maintenance policy. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_set_scheduling(): + # Create a client + client = compute_v1.InstancesClient() + + # Initialize request argument(s) + request = compute_v1.SetSchedulingInstanceRequest( + instance="instance_value", + project="project_value", + zone="zone_value", + ) + + # Make the request + response = client.set_scheduling(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.SetSchedulingInstanceRequest, dict]): + The request object. A request message for + Instances.SetScheduling. See the method + description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + zone (str): + The name of the zone for this + request. + + This corresponds to the ``zone`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + instance (str): + Instance name for this request. + This corresponds to the ``instance`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + scheduling_resource (google.cloud.compute_v1.types.Scheduling): + The body resource for this request + This corresponds to the ``scheduling_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, zone, instance, scheduling_resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.SetSchedulingInstanceRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.SetSchedulingInstanceRequest): + request = compute.SetSchedulingInstanceRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if zone is not None: + request.zone = zone + if instance is not None: + request.instance = instance + if scheduling_resource is not None: + request.scheduling_resource = scheduling_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.set_scheduling] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("zone", request.zone), + ("instance", request.instance), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def set_scheduling(self, + request: Optional[Union[compute.SetSchedulingInstanceRequest, dict]] = None, + *, + project: Optional[str] = None, + zone: Optional[str] = None, + instance: Optional[str] = None, + scheduling_resource: Optional[compute.Scheduling] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> extended_operation.ExtendedOperation: + r"""Sets an instance's scheduling options. You can only call this + method on a stopped instance, that is, a VM instance that is in + a ``TERMINATED`` state. See Instance Life Cycle for more + information on the possible instance states. For more + information about setting scheduling options for a VM, see Set + VM host maintenance policy. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_set_scheduling(): + # Create a client + client = compute_v1.InstancesClient() + + # Initialize request argument(s) + request = compute_v1.SetSchedulingInstanceRequest( + instance="instance_value", + project="project_value", + zone="zone_value", + ) + + # Make the request + response = client.set_scheduling(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.SetSchedulingInstanceRequest, dict]): + The request object. A request message for + Instances.SetScheduling. See the method + description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + zone (str): + The name of the zone for this + request. + + This corresponds to the ``zone`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + instance (str): + Instance name for this request. + This corresponds to the ``instance`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + scheduling_resource (google.cloud.compute_v1.types.Scheduling): + The body resource for this request + This corresponds to the ``scheduling_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, zone, instance, scheduling_resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.SetSchedulingInstanceRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.SetSchedulingInstanceRequest): + request = compute.SetSchedulingInstanceRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if zone is not None: + request.zone = zone + if instance is not None: + request.instance = instance + if scheduling_resource is not None: + request.scheduling_resource = scheduling_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.set_scheduling] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("zone", request.zone), + ("instance", request.instance), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + operation_service = self._transport._zone_operations_client + operation_request = compute.GetZoneOperationRequest() + operation_request.project = request.project + operation_request.zone = request.zone + operation_request.operation = response.name + + get_operation = functools.partial(operation_service.get, operation_request) + # Cancel is not part of extended operations yet. + cancel_operation = lambda: None + + # Note: this class is an implementation detail to provide a uniform + # set of names for certain fields in the extended operation proto message. + # See google.api_core.extended_operation.ExtendedOperation for details + # on these properties and the expected interface. + class _CustomOperation(extended_operation.ExtendedOperation): + @property + def error_message(self): + return self._extended_operation.http_error_message + + @property + def error_code(self): + return self._extended_operation.http_error_status_code + + response = _CustomOperation.make(get_operation, cancel_operation, response) + + # Done; return the response. + return response + + def set_service_account_unary(self, + request: Optional[Union[compute.SetServiceAccountInstanceRequest, dict]] = None, + *, + project: Optional[str] = None, + zone: Optional[str] = None, + instance: Optional[str] = None, + instances_set_service_account_request_resource: Optional[compute.InstancesSetServiceAccountRequest] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.Operation: + r"""Sets the service account on the instance. For more + information, read Changing the service account and + access scopes for an instance. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_set_service_account(): + # Create a client + client = compute_v1.InstancesClient() + + # Initialize request argument(s) + request = compute_v1.SetServiceAccountInstanceRequest( + instance="instance_value", + project="project_value", + zone="zone_value", + ) + + # Make the request + response = client.set_service_account(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.SetServiceAccountInstanceRequest, dict]): + The request object. A request message for + Instances.SetServiceAccount. See the + method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + zone (str): + The name of the zone for this + request. + + This corresponds to the ``zone`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + instance (str): + Name of the instance resource to + start. + + This corresponds to the ``instance`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + instances_set_service_account_request_resource (google.cloud.compute_v1.types.InstancesSetServiceAccountRequest): + The body resource for this request + This corresponds to the ``instances_set_service_account_request_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, zone, instance, instances_set_service_account_request_resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.SetServiceAccountInstanceRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.SetServiceAccountInstanceRequest): + request = compute.SetServiceAccountInstanceRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if zone is not None: + request.zone = zone + if instance is not None: + request.instance = instance + if instances_set_service_account_request_resource is not None: + request.instances_set_service_account_request_resource = instances_set_service_account_request_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.set_service_account] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("zone", request.zone), + ("instance", request.instance), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def set_service_account(self, + request: Optional[Union[compute.SetServiceAccountInstanceRequest, dict]] = None, + *, + project: Optional[str] = None, + zone: Optional[str] = None, + instance: Optional[str] = None, + instances_set_service_account_request_resource: Optional[compute.InstancesSetServiceAccountRequest] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> extended_operation.ExtendedOperation: + r"""Sets the service account on the instance. For more + information, read Changing the service account and + access scopes for an instance. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_set_service_account(): + # Create a client + client = compute_v1.InstancesClient() + + # Initialize request argument(s) + request = compute_v1.SetServiceAccountInstanceRequest( + instance="instance_value", + project="project_value", + zone="zone_value", + ) + + # Make the request + response = client.set_service_account(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.SetServiceAccountInstanceRequest, dict]): + The request object. A request message for + Instances.SetServiceAccount. See the + method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + zone (str): + The name of the zone for this + request. + + This corresponds to the ``zone`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + instance (str): + Name of the instance resource to + start. + + This corresponds to the ``instance`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + instances_set_service_account_request_resource (google.cloud.compute_v1.types.InstancesSetServiceAccountRequest): + The body resource for this request + This corresponds to the ``instances_set_service_account_request_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, zone, instance, instances_set_service_account_request_resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.SetServiceAccountInstanceRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.SetServiceAccountInstanceRequest): + request = compute.SetServiceAccountInstanceRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if zone is not None: + request.zone = zone + if instance is not None: + request.instance = instance + if instances_set_service_account_request_resource is not None: + request.instances_set_service_account_request_resource = instances_set_service_account_request_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.set_service_account] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("zone", request.zone), + ("instance", request.instance), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + operation_service = self._transport._zone_operations_client + operation_request = compute.GetZoneOperationRequest() + operation_request.project = request.project + operation_request.zone = request.zone + operation_request.operation = response.name + + get_operation = functools.partial(operation_service.get, operation_request) + # Cancel is not part of extended operations yet. + cancel_operation = lambda: None + + # Note: this class is an implementation detail to provide a uniform + # set of names for certain fields in the extended operation proto message. + # See google.api_core.extended_operation.ExtendedOperation for details + # on these properties and the expected interface. + class _CustomOperation(extended_operation.ExtendedOperation): + @property + def error_message(self): + return self._extended_operation.http_error_message + + @property + def error_code(self): + return self._extended_operation.http_error_status_code + + response = _CustomOperation.make(get_operation, cancel_operation, response) + + # Done; return the response. + return response + + def set_shielded_instance_integrity_policy_unary(self, + request: Optional[Union[compute.SetShieldedInstanceIntegrityPolicyInstanceRequest, dict]] = None, + *, + project: Optional[str] = None, + zone: Optional[str] = None, + instance: Optional[str] = None, + shielded_instance_integrity_policy_resource: Optional[compute.ShieldedInstanceIntegrityPolicy] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.Operation: + r"""Sets the Shielded Instance integrity policy for an + instance. You can only use this method on a running + instance. This method supports PATCH semantics and uses + the JSON merge patch format and processing rules. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_set_shielded_instance_integrity_policy(): + # Create a client + client = compute_v1.InstancesClient() + + # Initialize request argument(s) + request = compute_v1.SetShieldedInstanceIntegrityPolicyInstanceRequest( + instance="instance_value", + project="project_value", + zone="zone_value", + ) + + # Make the request + response = client.set_shielded_instance_integrity_policy(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.SetShieldedInstanceIntegrityPolicyInstanceRequest, dict]): + The request object. A request message for + Instances.SetShieldedInstanceIntegrityPolicy. + See the method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + zone (str): + The name of the zone for this + request. + + This corresponds to the ``zone`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + instance (str): + Name or id of the instance scoping + this request. + + This corresponds to the ``instance`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + shielded_instance_integrity_policy_resource (google.cloud.compute_v1.types.ShieldedInstanceIntegrityPolicy): + The body resource for this request + This corresponds to the ``shielded_instance_integrity_policy_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, zone, instance, shielded_instance_integrity_policy_resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.SetShieldedInstanceIntegrityPolicyInstanceRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.SetShieldedInstanceIntegrityPolicyInstanceRequest): + request = compute.SetShieldedInstanceIntegrityPolicyInstanceRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if zone is not None: + request.zone = zone + if instance is not None: + request.instance = instance + if shielded_instance_integrity_policy_resource is not None: + request.shielded_instance_integrity_policy_resource = shielded_instance_integrity_policy_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.set_shielded_instance_integrity_policy] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("zone", request.zone), + ("instance", request.instance), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def set_shielded_instance_integrity_policy(self, + request: Optional[Union[compute.SetShieldedInstanceIntegrityPolicyInstanceRequest, dict]] = None, + *, + project: Optional[str] = None, + zone: Optional[str] = None, + instance: Optional[str] = None, + shielded_instance_integrity_policy_resource: Optional[compute.ShieldedInstanceIntegrityPolicy] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> extended_operation.ExtendedOperation: + r"""Sets the Shielded Instance integrity policy for an + instance. You can only use this method on a running + instance. This method supports PATCH semantics and uses + the JSON merge patch format and processing rules. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_set_shielded_instance_integrity_policy(): + # Create a client + client = compute_v1.InstancesClient() + + # Initialize request argument(s) + request = compute_v1.SetShieldedInstanceIntegrityPolicyInstanceRequest( + instance="instance_value", + project="project_value", + zone="zone_value", + ) + + # Make the request + response = client.set_shielded_instance_integrity_policy(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.SetShieldedInstanceIntegrityPolicyInstanceRequest, dict]): + The request object. A request message for + Instances.SetShieldedInstanceIntegrityPolicy. + See the method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + zone (str): + The name of the zone for this + request. + + This corresponds to the ``zone`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + instance (str): + Name or id of the instance scoping + this request. + + This corresponds to the ``instance`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + shielded_instance_integrity_policy_resource (google.cloud.compute_v1.types.ShieldedInstanceIntegrityPolicy): + The body resource for this request + This corresponds to the ``shielded_instance_integrity_policy_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, zone, instance, shielded_instance_integrity_policy_resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.SetShieldedInstanceIntegrityPolicyInstanceRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.SetShieldedInstanceIntegrityPolicyInstanceRequest): + request = compute.SetShieldedInstanceIntegrityPolicyInstanceRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if zone is not None: + request.zone = zone + if instance is not None: + request.instance = instance + if shielded_instance_integrity_policy_resource is not None: + request.shielded_instance_integrity_policy_resource = shielded_instance_integrity_policy_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.set_shielded_instance_integrity_policy] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("zone", request.zone), + ("instance", request.instance), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + operation_service = self._transport._zone_operations_client + operation_request = compute.GetZoneOperationRequest() + operation_request.project = request.project + operation_request.zone = request.zone + operation_request.operation = response.name + + get_operation = functools.partial(operation_service.get, operation_request) + # Cancel is not part of extended operations yet. + cancel_operation = lambda: None + + # Note: this class is an implementation detail to provide a uniform + # set of names for certain fields in the extended operation proto message. + # See google.api_core.extended_operation.ExtendedOperation for details + # on these properties and the expected interface. + class _CustomOperation(extended_operation.ExtendedOperation): + @property + def error_message(self): + return self._extended_operation.http_error_message + + @property + def error_code(self): + return self._extended_operation.http_error_status_code + + response = _CustomOperation.make(get_operation, cancel_operation, response) + + # Done; return the response. + return response + + def set_tags_unary(self, + request: Optional[Union[compute.SetTagsInstanceRequest, dict]] = None, + *, + project: Optional[str] = None, + zone: Optional[str] = None, + instance: Optional[str] = None, + tags_resource: Optional[compute.Tags] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.Operation: + r"""Sets network tags for the specified instance to the + data included in the request. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_set_tags(): + # Create a client + client = compute_v1.InstancesClient() + + # Initialize request argument(s) + request = compute_v1.SetTagsInstanceRequest( + instance="instance_value", + project="project_value", + zone="zone_value", + ) + + # Make the request + response = client.set_tags(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.SetTagsInstanceRequest, dict]): + The request object. A request message for + Instances.SetTags. See the method + description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + zone (str): + The name of the zone for this + request. + + This corresponds to the ``zone`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + instance (str): + Name of the instance scoping this + request. + + This corresponds to the ``instance`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + tags_resource (google.cloud.compute_v1.types.Tags): + The body resource for this request + This corresponds to the ``tags_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, zone, instance, tags_resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.SetTagsInstanceRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.SetTagsInstanceRequest): + request = compute.SetTagsInstanceRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if zone is not None: + request.zone = zone + if instance is not None: + request.instance = instance + if tags_resource is not None: + request.tags_resource = tags_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.set_tags] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("zone", request.zone), + ("instance", request.instance), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def set_tags(self, + request: Optional[Union[compute.SetTagsInstanceRequest, dict]] = None, + *, + project: Optional[str] = None, + zone: Optional[str] = None, + instance: Optional[str] = None, + tags_resource: Optional[compute.Tags] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> extended_operation.ExtendedOperation: + r"""Sets network tags for the specified instance to the + data included in the request. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_set_tags(): + # Create a client + client = compute_v1.InstancesClient() + + # Initialize request argument(s) + request = compute_v1.SetTagsInstanceRequest( + instance="instance_value", + project="project_value", + zone="zone_value", + ) + + # Make the request + response = client.set_tags(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.SetTagsInstanceRequest, dict]): + The request object. A request message for + Instances.SetTags. See the method + description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + zone (str): + The name of the zone for this + request. + + This corresponds to the ``zone`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + instance (str): + Name of the instance scoping this + request. + + This corresponds to the ``instance`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + tags_resource (google.cloud.compute_v1.types.Tags): + The body resource for this request + This corresponds to the ``tags_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, zone, instance, tags_resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.SetTagsInstanceRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.SetTagsInstanceRequest): + request = compute.SetTagsInstanceRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if zone is not None: + request.zone = zone + if instance is not None: + request.instance = instance + if tags_resource is not None: + request.tags_resource = tags_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.set_tags] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("zone", request.zone), + ("instance", request.instance), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + operation_service = self._transport._zone_operations_client + operation_request = compute.GetZoneOperationRequest() + operation_request.project = request.project + operation_request.zone = request.zone + operation_request.operation = response.name + + get_operation = functools.partial(operation_service.get, operation_request) + # Cancel is not part of extended operations yet. + cancel_operation = lambda: None + + # Note: this class is an implementation detail to provide a uniform + # set of names for certain fields in the extended operation proto message. + # See google.api_core.extended_operation.ExtendedOperation for details + # on these properties and the expected interface. + class _CustomOperation(extended_operation.ExtendedOperation): + @property + def error_message(self): + return self._extended_operation.http_error_message + + @property + def error_code(self): + return self._extended_operation.http_error_status_code + + response = _CustomOperation.make(get_operation, cancel_operation, response) + + # Done; return the response. + return response + + def simulate_maintenance_event_unary(self, + request: Optional[Union[compute.SimulateMaintenanceEventInstanceRequest, dict]] = None, + *, + project: Optional[str] = None, + zone: Optional[str] = None, + instance: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.Operation: + r"""Simulates a host maintenance event on a VM. For more + information, see Simulate a host maintenance event. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_simulate_maintenance_event(): + # Create a client + client = compute_v1.InstancesClient() + + # Initialize request argument(s) + request = compute_v1.SimulateMaintenanceEventInstanceRequest( + instance="instance_value", + project="project_value", + zone="zone_value", + ) + + # Make the request + response = client.simulate_maintenance_event(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.SimulateMaintenanceEventInstanceRequest, dict]): + The request object. A request message for + Instances.SimulateMaintenanceEvent. See + the method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + zone (str): + The name of the zone for this + request. + + This corresponds to the ``zone`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + instance (str): + Name of the instance scoping this + request. + + This corresponds to the ``instance`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, zone, instance]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.SimulateMaintenanceEventInstanceRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.SimulateMaintenanceEventInstanceRequest): + request = compute.SimulateMaintenanceEventInstanceRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if zone is not None: + request.zone = zone + if instance is not None: + request.instance = instance + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.simulate_maintenance_event] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("zone", request.zone), + ("instance", request.instance), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def simulate_maintenance_event(self, + request: Optional[Union[compute.SimulateMaintenanceEventInstanceRequest, dict]] = None, + *, + project: Optional[str] = None, + zone: Optional[str] = None, + instance: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> extended_operation.ExtendedOperation: + r"""Simulates a host maintenance event on a VM. For more + information, see Simulate a host maintenance event. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_simulate_maintenance_event(): + # Create a client + client = compute_v1.InstancesClient() + + # Initialize request argument(s) + request = compute_v1.SimulateMaintenanceEventInstanceRequest( + instance="instance_value", + project="project_value", + zone="zone_value", + ) + + # Make the request + response = client.simulate_maintenance_event(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.SimulateMaintenanceEventInstanceRequest, dict]): + The request object. A request message for + Instances.SimulateMaintenanceEvent. See + the method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + zone (str): + The name of the zone for this + request. + + This corresponds to the ``zone`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + instance (str): + Name of the instance scoping this + request. + + This corresponds to the ``instance`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, zone, instance]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.SimulateMaintenanceEventInstanceRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.SimulateMaintenanceEventInstanceRequest): + request = compute.SimulateMaintenanceEventInstanceRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if zone is not None: + request.zone = zone + if instance is not None: + request.instance = instance + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.simulate_maintenance_event] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("zone", request.zone), + ("instance", request.instance), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + operation_service = self._transport._zone_operations_client + operation_request = compute.GetZoneOperationRequest() + operation_request.project = request.project + operation_request.zone = request.zone + operation_request.operation = response.name + + get_operation = functools.partial(operation_service.get, operation_request) + # Cancel is not part of extended operations yet. + cancel_operation = lambda: None + + # Note: this class is an implementation detail to provide a uniform + # set of names for certain fields in the extended operation proto message. + # See google.api_core.extended_operation.ExtendedOperation for details + # on these properties and the expected interface. + class _CustomOperation(extended_operation.ExtendedOperation): + @property + def error_message(self): + return self._extended_operation.http_error_message + + @property + def error_code(self): + return self._extended_operation.http_error_status_code + + response = _CustomOperation.make(get_operation, cancel_operation, response) + + # Done; return the response. + return response + + def start_unary(self, + request: Optional[Union[compute.StartInstanceRequest, dict]] = None, + *, + project: Optional[str] = None, + zone: Optional[str] = None, + instance: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.Operation: + r"""Starts an instance that was stopped using the + instances().stop method. For more information, see + Restart an instance. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_start(): + # Create a client + client = compute_v1.InstancesClient() + + # Initialize request argument(s) + request = compute_v1.StartInstanceRequest( + instance="instance_value", + project="project_value", + zone="zone_value", + ) + + # Make the request + response = client.start(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.StartInstanceRequest, dict]): + The request object. A request message for + Instances.Start. See the method + description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + zone (str): + The name of the zone for this + request. + + This corresponds to the ``zone`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + instance (str): + Name of the instance resource to + start. + + This corresponds to the ``instance`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, zone, instance]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.StartInstanceRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.StartInstanceRequest): + request = compute.StartInstanceRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if zone is not None: + request.zone = zone + if instance is not None: + request.instance = instance + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.start] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("zone", request.zone), + ("instance", request.instance), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def start(self, + request: Optional[Union[compute.StartInstanceRequest, dict]] = None, + *, + project: Optional[str] = None, + zone: Optional[str] = None, + instance: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> extended_operation.ExtendedOperation: + r"""Starts an instance that was stopped using the + instances().stop method. For more information, see + Restart an instance. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_start(): + # Create a client + client = compute_v1.InstancesClient() + + # Initialize request argument(s) + request = compute_v1.StartInstanceRequest( + instance="instance_value", + project="project_value", + zone="zone_value", + ) + + # Make the request + response = client.start(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.StartInstanceRequest, dict]): + The request object. A request message for + Instances.Start. See the method + description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + zone (str): + The name of the zone for this + request. + + This corresponds to the ``zone`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + instance (str): + Name of the instance resource to + start. + + This corresponds to the ``instance`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, zone, instance]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.StartInstanceRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.StartInstanceRequest): + request = compute.StartInstanceRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if zone is not None: + request.zone = zone + if instance is not None: + request.instance = instance + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.start] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("zone", request.zone), + ("instance", request.instance), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + operation_service = self._transport._zone_operations_client + operation_request = compute.GetZoneOperationRequest() + operation_request.project = request.project + operation_request.zone = request.zone + operation_request.operation = response.name + + get_operation = functools.partial(operation_service.get, operation_request) + # Cancel is not part of extended operations yet. + cancel_operation = lambda: None + + # Note: this class is an implementation detail to provide a uniform + # set of names for certain fields in the extended operation proto message. + # See google.api_core.extended_operation.ExtendedOperation for details + # on these properties and the expected interface. + class _CustomOperation(extended_operation.ExtendedOperation): + @property + def error_message(self): + return self._extended_operation.http_error_message + + @property + def error_code(self): + return self._extended_operation.http_error_status_code + + response = _CustomOperation.make(get_operation, cancel_operation, response) + + # Done; return the response. + return response + + def start_with_encryption_key_unary(self, + request: Optional[Union[compute.StartWithEncryptionKeyInstanceRequest, dict]] = None, + *, + project: Optional[str] = None, + zone: Optional[str] = None, + instance: Optional[str] = None, + instances_start_with_encryption_key_request_resource: Optional[compute.InstancesStartWithEncryptionKeyRequest] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.Operation: + r"""Starts an instance that was stopped using the + instances().stop method. For more information, see + Restart an instance. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_start_with_encryption_key(): + # Create a client + client = compute_v1.InstancesClient() + + # Initialize request argument(s) + request = compute_v1.StartWithEncryptionKeyInstanceRequest( + instance="instance_value", + project="project_value", + zone="zone_value", + ) + + # Make the request + response = client.start_with_encryption_key(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.StartWithEncryptionKeyInstanceRequest, dict]): + The request object. A request message for + Instances.StartWithEncryptionKey. See + the method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + zone (str): + The name of the zone for this + request. + + This corresponds to the ``zone`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + instance (str): + Name of the instance resource to + start. + + This corresponds to the ``instance`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + instances_start_with_encryption_key_request_resource (google.cloud.compute_v1.types.InstancesStartWithEncryptionKeyRequest): + The body resource for this request + This corresponds to the ``instances_start_with_encryption_key_request_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, zone, instance, instances_start_with_encryption_key_request_resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.StartWithEncryptionKeyInstanceRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.StartWithEncryptionKeyInstanceRequest): + request = compute.StartWithEncryptionKeyInstanceRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if zone is not None: + request.zone = zone + if instance is not None: + request.instance = instance + if instances_start_with_encryption_key_request_resource is not None: + request.instances_start_with_encryption_key_request_resource = instances_start_with_encryption_key_request_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.start_with_encryption_key] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("zone", request.zone), + ("instance", request.instance), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def start_with_encryption_key(self, + request: Optional[Union[compute.StartWithEncryptionKeyInstanceRequest, dict]] = None, + *, + project: Optional[str] = None, + zone: Optional[str] = None, + instance: Optional[str] = None, + instances_start_with_encryption_key_request_resource: Optional[compute.InstancesStartWithEncryptionKeyRequest] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> extended_operation.ExtendedOperation: + r"""Starts an instance that was stopped using the + instances().stop method. For more information, see + Restart an instance. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_start_with_encryption_key(): + # Create a client + client = compute_v1.InstancesClient() + + # Initialize request argument(s) + request = compute_v1.StartWithEncryptionKeyInstanceRequest( + instance="instance_value", + project="project_value", + zone="zone_value", + ) + + # Make the request + response = client.start_with_encryption_key(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.StartWithEncryptionKeyInstanceRequest, dict]): + The request object. A request message for + Instances.StartWithEncryptionKey. See + the method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + zone (str): + The name of the zone for this + request. + + This corresponds to the ``zone`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + instance (str): + Name of the instance resource to + start. + + This corresponds to the ``instance`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + instances_start_with_encryption_key_request_resource (google.cloud.compute_v1.types.InstancesStartWithEncryptionKeyRequest): + The body resource for this request + This corresponds to the ``instances_start_with_encryption_key_request_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, zone, instance, instances_start_with_encryption_key_request_resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.StartWithEncryptionKeyInstanceRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.StartWithEncryptionKeyInstanceRequest): + request = compute.StartWithEncryptionKeyInstanceRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if zone is not None: + request.zone = zone + if instance is not None: + request.instance = instance + if instances_start_with_encryption_key_request_resource is not None: + request.instances_start_with_encryption_key_request_resource = instances_start_with_encryption_key_request_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.start_with_encryption_key] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("zone", request.zone), + ("instance", request.instance), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + operation_service = self._transport._zone_operations_client + operation_request = compute.GetZoneOperationRequest() + operation_request.project = request.project + operation_request.zone = request.zone + operation_request.operation = response.name + + get_operation = functools.partial(operation_service.get, operation_request) + # Cancel is not part of extended operations yet. + cancel_operation = lambda: None + + # Note: this class is an implementation detail to provide a uniform + # set of names for certain fields in the extended operation proto message. + # See google.api_core.extended_operation.ExtendedOperation for details + # on these properties and the expected interface. + class _CustomOperation(extended_operation.ExtendedOperation): + @property + def error_message(self): + return self._extended_operation.http_error_message + + @property + def error_code(self): + return self._extended_operation.http_error_status_code + + response = _CustomOperation.make(get_operation, cancel_operation, response) + + # Done; return the response. + return response + + def stop_unary(self, + request: Optional[Union[compute.StopInstanceRequest, dict]] = None, + *, + project: Optional[str] = None, + zone: Optional[str] = None, + instance: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.Operation: + r"""Stops a running instance, shutting it down cleanly, + and allows you to restart the instance at a later time. + Stopped instances do not incur VM usage charges while + they are stopped. However, resources that the VM is + using, such as persistent disks and static IP addresses, + will continue to be charged until they are deleted. For + more information, see Stopping an instance. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_stop(): + # Create a client + client = compute_v1.InstancesClient() + + # Initialize request argument(s) + request = compute_v1.StopInstanceRequest( + instance="instance_value", + project="project_value", + zone="zone_value", + ) + + # Make the request + response = client.stop(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.StopInstanceRequest, dict]): + The request object. A request message for Instances.Stop. + See the method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + zone (str): + The name of the zone for this + request. + + This corresponds to the ``zone`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + instance (str): + Name of the instance resource to + stop. + + This corresponds to the ``instance`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, zone, instance]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.StopInstanceRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.StopInstanceRequest): + request = compute.StopInstanceRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if zone is not None: + request.zone = zone + if instance is not None: + request.instance = instance + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.stop] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("zone", request.zone), + ("instance", request.instance), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def stop(self, + request: Optional[Union[compute.StopInstanceRequest, dict]] = None, + *, + project: Optional[str] = None, + zone: Optional[str] = None, + instance: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> extended_operation.ExtendedOperation: + r"""Stops a running instance, shutting it down cleanly, + and allows you to restart the instance at a later time. + Stopped instances do not incur VM usage charges while + they are stopped. However, resources that the VM is + using, such as persistent disks and static IP addresses, + will continue to be charged until they are deleted. For + more information, see Stopping an instance. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_stop(): + # Create a client + client = compute_v1.InstancesClient() + + # Initialize request argument(s) + request = compute_v1.StopInstanceRequest( + instance="instance_value", + project="project_value", + zone="zone_value", + ) + + # Make the request + response = client.stop(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.StopInstanceRequest, dict]): + The request object. A request message for Instances.Stop. + See the method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + zone (str): + The name of the zone for this + request. + + This corresponds to the ``zone`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + instance (str): + Name of the instance resource to + stop. + + This corresponds to the ``instance`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, zone, instance]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.StopInstanceRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.StopInstanceRequest): + request = compute.StopInstanceRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if zone is not None: + request.zone = zone + if instance is not None: + request.instance = instance + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.stop] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("zone", request.zone), + ("instance", request.instance), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + operation_service = self._transport._zone_operations_client + operation_request = compute.GetZoneOperationRequest() + operation_request.project = request.project + operation_request.zone = request.zone + operation_request.operation = response.name + + get_operation = functools.partial(operation_service.get, operation_request) + # Cancel is not part of extended operations yet. + cancel_operation = lambda: None + + # Note: this class is an implementation detail to provide a uniform + # set of names for certain fields in the extended operation proto message. + # See google.api_core.extended_operation.ExtendedOperation for details + # on these properties and the expected interface. + class _CustomOperation(extended_operation.ExtendedOperation): + @property + def error_message(self): + return self._extended_operation.http_error_message + + @property + def error_code(self): + return self._extended_operation.http_error_status_code + + response = _CustomOperation.make(get_operation, cancel_operation, response) + + # Done; return the response. + return response + + def suspend_unary(self, + request: Optional[Union[compute.SuspendInstanceRequest, dict]] = None, + *, + project: Optional[str] = None, + zone: Optional[str] = None, + instance: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.Operation: + r"""This method suspends a running instance, saving its + state to persistent storage, and allows you to resume + the instance at a later time. Suspended instances have + no compute costs (cores or RAM), and incur only storage + charges for the saved VM memory and localSSD data. Any + charged resources the virtual machine was using, such as + persistent disks and static IP addresses, will continue + to be charged while the instance is suspended. For more + information, see Suspending and resuming an instance. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_suspend(): + # Create a client + client = compute_v1.InstancesClient() + + # Initialize request argument(s) + request = compute_v1.SuspendInstanceRequest( + instance="instance_value", + project="project_value", + zone="zone_value", + ) + + # Make the request + response = client.suspend(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.SuspendInstanceRequest, dict]): + The request object. A request message for + Instances.Suspend. See the method + description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + zone (str): + The name of the zone for this + request. + + This corresponds to the ``zone`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + instance (str): + Name of the instance resource to + suspend. + + This corresponds to the ``instance`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, zone, instance]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.SuspendInstanceRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.SuspendInstanceRequest): + request = compute.SuspendInstanceRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if zone is not None: + request.zone = zone + if instance is not None: + request.instance = instance + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.suspend] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("zone", request.zone), + ("instance", request.instance), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def suspend(self, + request: Optional[Union[compute.SuspendInstanceRequest, dict]] = None, + *, + project: Optional[str] = None, + zone: Optional[str] = None, + instance: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> extended_operation.ExtendedOperation: + r"""This method suspends a running instance, saving its + state to persistent storage, and allows you to resume + the instance at a later time. Suspended instances have + no compute costs (cores or RAM), and incur only storage + charges for the saved VM memory and localSSD data. Any + charged resources the virtual machine was using, such as + persistent disks and static IP addresses, will continue + to be charged while the instance is suspended. For more + information, see Suspending and resuming an instance. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_suspend(): + # Create a client + client = compute_v1.InstancesClient() + + # Initialize request argument(s) + request = compute_v1.SuspendInstanceRequest( + instance="instance_value", + project="project_value", + zone="zone_value", + ) + + # Make the request + response = client.suspend(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.SuspendInstanceRequest, dict]): + The request object. A request message for + Instances.Suspend. See the method + description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + zone (str): + The name of the zone for this + request. + + This corresponds to the ``zone`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + instance (str): + Name of the instance resource to + suspend. + + This corresponds to the ``instance`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, zone, instance]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.SuspendInstanceRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.SuspendInstanceRequest): + request = compute.SuspendInstanceRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if zone is not None: + request.zone = zone + if instance is not None: + request.instance = instance + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.suspend] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("zone", request.zone), + ("instance", request.instance), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + operation_service = self._transport._zone_operations_client + operation_request = compute.GetZoneOperationRequest() + operation_request.project = request.project + operation_request.zone = request.zone + operation_request.operation = response.name + + get_operation = functools.partial(operation_service.get, operation_request) + # Cancel is not part of extended operations yet. + cancel_operation = lambda: None + + # Note: this class is an implementation detail to provide a uniform + # set of names for certain fields in the extended operation proto message. + # See google.api_core.extended_operation.ExtendedOperation for details + # on these properties and the expected interface. + class _CustomOperation(extended_operation.ExtendedOperation): + @property + def error_message(self): + return self._extended_operation.http_error_message + + @property + def error_code(self): + return self._extended_operation.http_error_status_code + + response = _CustomOperation.make(get_operation, cancel_operation, response) + + # Done; return the response. + return response + + def test_iam_permissions(self, + request: Optional[Union[compute.TestIamPermissionsInstanceRequest, dict]] = None, + *, + project: Optional[str] = None, + zone: Optional[str] = None, + resource: Optional[str] = None, + test_permissions_request_resource: Optional[compute.TestPermissionsRequest] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.TestPermissionsResponse: + r"""Returns permissions that a caller has on the + specified resource. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_test_iam_permissions(): + # Create a client + client = compute_v1.InstancesClient() + + # Initialize request argument(s) + request = compute_v1.TestIamPermissionsInstanceRequest( + project="project_value", + resource="resource_value", + zone="zone_value", + ) + + # Make the request + response = client.test_iam_permissions(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.TestIamPermissionsInstanceRequest, dict]): + The request object. A request message for + Instances.TestIamPermissions. See the + method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + zone (str): + The name of the zone for this + request. + + This corresponds to the ``zone`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + resource (str): + Name or id of the resource for this + request. + + This corresponds to the ``resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + test_permissions_request_resource (google.cloud.compute_v1.types.TestPermissionsRequest): + The body resource for this request + This corresponds to the ``test_permissions_request_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.compute_v1.types.TestPermissionsResponse: + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, zone, resource, test_permissions_request_resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.TestIamPermissionsInstanceRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.TestIamPermissionsInstanceRequest): + request = compute.TestIamPermissionsInstanceRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if zone is not None: + request.zone = zone + if resource is not None: + request.resource = resource + if test_permissions_request_resource is not None: + request.test_permissions_request_resource = test_permissions_request_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.test_iam_permissions] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("zone", request.zone), + ("resource", request.resource), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def update_unary(self, + request: Optional[Union[compute.UpdateInstanceRequest, dict]] = None, + *, + project: Optional[str] = None, + zone: Optional[str] = None, + instance: Optional[str] = None, + instance_resource: Optional[compute.Instance] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.Operation: + r"""Updates an instance only if the necessary resources + are available. This method can update only a specific + set of instance properties. See Updating a running + instance for a list of updatable instance properties. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_update(): + # Create a client + client = compute_v1.InstancesClient() + + # Initialize request argument(s) + request = compute_v1.UpdateInstanceRequest( + instance="instance_value", + project="project_value", + zone="zone_value", + ) + + # Make the request + response = client.update(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.UpdateInstanceRequest, dict]): + The request object. A request message for + Instances.Update. See the method + description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + zone (str): + The name of the zone for this + request. + + This corresponds to the ``zone`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + instance (str): + Name of the instance resource to + update. + + This corresponds to the ``instance`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + instance_resource (google.cloud.compute_v1.types.Instance): + The body resource for this request + This corresponds to the ``instance_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, zone, instance, instance_resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.UpdateInstanceRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.UpdateInstanceRequest): + request = compute.UpdateInstanceRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if zone is not None: + request.zone = zone + if instance is not None: + request.instance = instance + if instance_resource is not None: + request.instance_resource = instance_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.update] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("zone", request.zone), + ("instance", request.instance), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def update(self, + request: Optional[Union[compute.UpdateInstanceRequest, dict]] = None, + *, + project: Optional[str] = None, + zone: Optional[str] = None, + instance: Optional[str] = None, + instance_resource: Optional[compute.Instance] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> extended_operation.ExtendedOperation: + r"""Updates an instance only if the necessary resources + are available. This method can update only a specific + set of instance properties. See Updating a running + instance for a list of updatable instance properties. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_update(): + # Create a client + client = compute_v1.InstancesClient() + + # Initialize request argument(s) + request = compute_v1.UpdateInstanceRequest( + instance="instance_value", + project="project_value", + zone="zone_value", + ) + + # Make the request + response = client.update(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.UpdateInstanceRequest, dict]): + The request object. A request message for + Instances.Update. See the method + description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + zone (str): + The name of the zone for this + request. + + This corresponds to the ``zone`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + instance (str): + Name of the instance resource to + update. + + This corresponds to the ``instance`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + instance_resource (google.cloud.compute_v1.types.Instance): + The body resource for this request + This corresponds to the ``instance_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, zone, instance, instance_resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.UpdateInstanceRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.UpdateInstanceRequest): + request = compute.UpdateInstanceRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if zone is not None: + request.zone = zone + if instance is not None: + request.instance = instance + if instance_resource is not None: + request.instance_resource = instance_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.update] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("zone", request.zone), + ("instance", request.instance), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + operation_service = self._transport._zone_operations_client + operation_request = compute.GetZoneOperationRequest() + operation_request.project = request.project + operation_request.zone = request.zone + operation_request.operation = response.name + + get_operation = functools.partial(operation_service.get, operation_request) + # Cancel is not part of extended operations yet. + cancel_operation = lambda: None + + # Note: this class is an implementation detail to provide a uniform + # set of names for certain fields in the extended operation proto message. + # See google.api_core.extended_operation.ExtendedOperation for details + # on these properties and the expected interface. + class _CustomOperation(extended_operation.ExtendedOperation): + @property + def error_message(self): + return self._extended_operation.http_error_message + + @property + def error_code(self): + return self._extended_operation.http_error_status_code + + response = _CustomOperation.make(get_operation, cancel_operation, response) + + # Done; return the response. + return response + + def update_access_config_unary(self, + request: Optional[Union[compute.UpdateAccessConfigInstanceRequest, dict]] = None, + *, + project: Optional[str] = None, + zone: Optional[str] = None, + instance: Optional[str] = None, + network_interface: Optional[str] = None, + access_config_resource: Optional[compute.AccessConfig] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.Operation: + r"""Updates the specified access config from an + instance's network interface with the data included in + the request. This method supports PATCH semantics and + uses the JSON merge patch format and processing rules. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_update_access_config(): + # Create a client + client = compute_v1.InstancesClient() + + # Initialize request argument(s) + request = compute_v1.UpdateAccessConfigInstanceRequest( + instance="instance_value", + network_interface="network_interface_value", + project="project_value", + zone="zone_value", + ) + + # Make the request + response = client.update_access_config(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.UpdateAccessConfigInstanceRequest, dict]): + The request object. A request message for + Instances.UpdateAccessConfig. See the + method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + zone (str): + The name of the zone for this + request. + + This corresponds to the ``zone`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + instance (str): + The instance name for this request. + This corresponds to the ``instance`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + network_interface (str): + The name of the network interface + where the access config is attached. + + This corresponds to the ``network_interface`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + access_config_resource (google.cloud.compute_v1.types.AccessConfig): + The body resource for this request + This corresponds to the ``access_config_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, zone, instance, network_interface, access_config_resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.UpdateAccessConfigInstanceRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.UpdateAccessConfigInstanceRequest): + request = compute.UpdateAccessConfigInstanceRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if zone is not None: + request.zone = zone + if instance is not None: + request.instance = instance + if network_interface is not None: + request.network_interface = network_interface + if access_config_resource is not None: + request.access_config_resource = access_config_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.update_access_config] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("zone", request.zone), + ("instance", request.instance), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def update_access_config(self, + request: Optional[Union[compute.UpdateAccessConfigInstanceRequest, dict]] = None, + *, + project: Optional[str] = None, + zone: Optional[str] = None, + instance: Optional[str] = None, + network_interface: Optional[str] = None, + access_config_resource: Optional[compute.AccessConfig] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> extended_operation.ExtendedOperation: + r"""Updates the specified access config from an + instance's network interface with the data included in + the request. This method supports PATCH semantics and + uses the JSON merge patch format and processing rules. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_update_access_config(): + # Create a client + client = compute_v1.InstancesClient() + + # Initialize request argument(s) + request = compute_v1.UpdateAccessConfigInstanceRequest( + instance="instance_value", + network_interface="network_interface_value", + project="project_value", + zone="zone_value", + ) + + # Make the request + response = client.update_access_config(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.UpdateAccessConfigInstanceRequest, dict]): + The request object. A request message for + Instances.UpdateAccessConfig. See the + method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + zone (str): + The name of the zone for this + request. + + This corresponds to the ``zone`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + instance (str): + The instance name for this request. + This corresponds to the ``instance`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + network_interface (str): + The name of the network interface + where the access config is attached. + + This corresponds to the ``network_interface`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + access_config_resource (google.cloud.compute_v1.types.AccessConfig): + The body resource for this request + This corresponds to the ``access_config_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, zone, instance, network_interface, access_config_resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.UpdateAccessConfigInstanceRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.UpdateAccessConfigInstanceRequest): + request = compute.UpdateAccessConfigInstanceRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if zone is not None: + request.zone = zone + if instance is not None: + request.instance = instance + if network_interface is not None: + request.network_interface = network_interface + if access_config_resource is not None: + request.access_config_resource = access_config_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.update_access_config] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("zone", request.zone), + ("instance", request.instance), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + operation_service = self._transport._zone_operations_client + operation_request = compute.GetZoneOperationRequest() + operation_request.project = request.project + operation_request.zone = request.zone + operation_request.operation = response.name + + get_operation = functools.partial(operation_service.get, operation_request) + # Cancel is not part of extended operations yet. + cancel_operation = lambda: None + + # Note: this class is an implementation detail to provide a uniform + # set of names for certain fields in the extended operation proto message. + # See google.api_core.extended_operation.ExtendedOperation for details + # on these properties and the expected interface. + class _CustomOperation(extended_operation.ExtendedOperation): + @property + def error_message(self): + return self._extended_operation.http_error_message + + @property + def error_code(self): + return self._extended_operation.http_error_status_code + + response = _CustomOperation.make(get_operation, cancel_operation, response) + + # Done; return the response. + return response + + def update_display_device_unary(self, + request: Optional[Union[compute.UpdateDisplayDeviceInstanceRequest, dict]] = None, + *, + project: Optional[str] = None, + zone: Optional[str] = None, + instance: Optional[str] = None, + display_device_resource: Optional[compute.DisplayDevice] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.Operation: + r"""Updates the Display config for a VM instance. You can + only use this method on a stopped VM instance. This + method supports PATCH semantics and uses the JSON merge + patch format and processing rules. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_update_display_device(): + # Create a client + client = compute_v1.InstancesClient() + + # Initialize request argument(s) + request = compute_v1.UpdateDisplayDeviceInstanceRequest( + instance="instance_value", + project="project_value", + zone="zone_value", + ) + + # Make the request + response = client.update_display_device(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.UpdateDisplayDeviceInstanceRequest, dict]): + The request object. A request message for + Instances.UpdateDisplayDevice. See the + method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + zone (str): + The name of the zone for this + request. + + This corresponds to the ``zone`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + instance (str): + Name of the instance scoping this + request. + + This corresponds to the ``instance`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + display_device_resource (google.cloud.compute_v1.types.DisplayDevice): + The body resource for this request + This corresponds to the ``display_device_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, zone, instance, display_device_resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.UpdateDisplayDeviceInstanceRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.UpdateDisplayDeviceInstanceRequest): + request = compute.UpdateDisplayDeviceInstanceRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if zone is not None: + request.zone = zone + if instance is not None: + request.instance = instance + if display_device_resource is not None: + request.display_device_resource = display_device_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.update_display_device] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("zone", request.zone), + ("instance", request.instance), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def update_display_device(self, + request: Optional[Union[compute.UpdateDisplayDeviceInstanceRequest, dict]] = None, + *, + project: Optional[str] = None, + zone: Optional[str] = None, + instance: Optional[str] = None, + display_device_resource: Optional[compute.DisplayDevice] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> extended_operation.ExtendedOperation: + r"""Updates the Display config for a VM instance. You can + only use this method on a stopped VM instance. This + method supports PATCH semantics and uses the JSON merge + patch format and processing rules. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_update_display_device(): + # Create a client + client = compute_v1.InstancesClient() + + # Initialize request argument(s) + request = compute_v1.UpdateDisplayDeviceInstanceRequest( + instance="instance_value", + project="project_value", + zone="zone_value", + ) + + # Make the request + response = client.update_display_device(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.UpdateDisplayDeviceInstanceRequest, dict]): + The request object. A request message for + Instances.UpdateDisplayDevice. See the + method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + zone (str): + The name of the zone for this + request. + + This corresponds to the ``zone`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + instance (str): + Name of the instance scoping this + request. + + This corresponds to the ``instance`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + display_device_resource (google.cloud.compute_v1.types.DisplayDevice): + The body resource for this request + This corresponds to the ``display_device_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, zone, instance, display_device_resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.UpdateDisplayDeviceInstanceRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.UpdateDisplayDeviceInstanceRequest): + request = compute.UpdateDisplayDeviceInstanceRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if zone is not None: + request.zone = zone + if instance is not None: + request.instance = instance + if display_device_resource is not None: + request.display_device_resource = display_device_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.update_display_device] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("zone", request.zone), + ("instance", request.instance), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + operation_service = self._transport._zone_operations_client + operation_request = compute.GetZoneOperationRequest() + operation_request.project = request.project + operation_request.zone = request.zone + operation_request.operation = response.name + + get_operation = functools.partial(operation_service.get, operation_request) + # Cancel is not part of extended operations yet. + cancel_operation = lambda: None + + # Note: this class is an implementation detail to provide a uniform + # set of names for certain fields in the extended operation proto message. + # See google.api_core.extended_operation.ExtendedOperation for details + # on these properties and the expected interface. + class _CustomOperation(extended_operation.ExtendedOperation): + @property + def error_message(self): + return self._extended_operation.http_error_message + + @property + def error_code(self): + return self._extended_operation.http_error_status_code + + response = _CustomOperation.make(get_operation, cancel_operation, response) + + # Done; return the response. + return response + + def update_network_interface_unary(self, + request: Optional[Union[compute.UpdateNetworkInterfaceInstanceRequest, dict]] = None, + *, + project: Optional[str] = None, + zone: Optional[str] = None, + instance: Optional[str] = None, + network_interface: Optional[str] = None, + network_interface_resource: Optional[compute.NetworkInterface] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.Operation: + r"""Updates an instance's network interface. This method + can only update an interface's alias IP range and + attached network. See Modifying alias IP ranges for an + existing instance for instructions on changing alias IP + ranges. See Migrating a VM between networks for + instructions on migrating an interface. This method + follows PATCH semantics. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_update_network_interface(): + # Create a client + client = compute_v1.InstancesClient() + + # Initialize request argument(s) + request = compute_v1.UpdateNetworkInterfaceInstanceRequest( + instance="instance_value", + network_interface="network_interface_value", + project="project_value", + zone="zone_value", + ) + + # Make the request + response = client.update_network_interface(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.UpdateNetworkInterfaceInstanceRequest, dict]): + The request object. A request message for + Instances.UpdateNetworkInterface. See + the method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + zone (str): + The name of the zone for this + request. + + This corresponds to the ``zone`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + instance (str): + The instance name for this request. + This corresponds to the ``instance`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + network_interface (str): + The name of the network interface to + update. + + This corresponds to the ``network_interface`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + network_interface_resource (google.cloud.compute_v1.types.NetworkInterface): + The body resource for this request + This corresponds to the ``network_interface_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, zone, instance, network_interface, network_interface_resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.UpdateNetworkInterfaceInstanceRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.UpdateNetworkInterfaceInstanceRequest): + request = compute.UpdateNetworkInterfaceInstanceRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if zone is not None: + request.zone = zone + if instance is not None: + request.instance = instance + if network_interface is not None: + request.network_interface = network_interface + if network_interface_resource is not None: + request.network_interface_resource = network_interface_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.update_network_interface] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("zone", request.zone), + ("instance", request.instance), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def update_network_interface(self, + request: Optional[Union[compute.UpdateNetworkInterfaceInstanceRequest, dict]] = None, + *, + project: Optional[str] = None, + zone: Optional[str] = None, + instance: Optional[str] = None, + network_interface: Optional[str] = None, + network_interface_resource: Optional[compute.NetworkInterface] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> extended_operation.ExtendedOperation: + r"""Updates an instance's network interface. This method + can only update an interface's alias IP range and + attached network. See Modifying alias IP ranges for an + existing instance for instructions on changing alias IP + ranges. See Migrating a VM between networks for + instructions on migrating an interface. This method + follows PATCH semantics. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_update_network_interface(): + # Create a client + client = compute_v1.InstancesClient() + + # Initialize request argument(s) + request = compute_v1.UpdateNetworkInterfaceInstanceRequest( + instance="instance_value", + network_interface="network_interface_value", + project="project_value", + zone="zone_value", + ) + + # Make the request + response = client.update_network_interface(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.UpdateNetworkInterfaceInstanceRequest, dict]): + The request object. A request message for + Instances.UpdateNetworkInterface. See + the method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + zone (str): + The name of the zone for this + request. + + This corresponds to the ``zone`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + instance (str): + The instance name for this request. + This corresponds to the ``instance`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + network_interface (str): + The name of the network interface to + update. + + This corresponds to the ``network_interface`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + network_interface_resource (google.cloud.compute_v1.types.NetworkInterface): + The body resource for this request + This corresponds to the ``network_interface_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, zone, instance, network_interface, network_interface_resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.UpdateNetworkInterfaceInstanceRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.UpdateNetworkInterfaceInstanceRequest): + request = compute.UpdateNetworkInterfaceInstanceRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if zone is not None: + request.zone = zone + if instance is not None: + request.instance = instance + if network_interface is not None: + request.network_interface = network_interface + if network_interface_resource is not None: + request.network_interface_resource = network_interface_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.update_network_interface] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("zone", request.zone), + ("instance", request.instance), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + operation_service = self._transport._zone_operations_client + operation_request = compute.GetZoneOperationRequest() + operation_request.project = request.project + operation_request.zone = request.zone + operation_request.operation = response.name + + get_operation = functools.partial(operation_service.get, operation_request) + # Cancel is not part of extended operations yet. + cancel_operation = lambda: None + + # Note: this class is an implementation detail to provide a uniform + # set of names for certain fields in the extended operation proto message. + # See google.api_core.extended_operation.ExtendedOperation for details + # on these properties and the expected interface. + class _CustomOperation(extended_operation.ExtendedOperation): + @property + def error_message(self): + return self._extended_operation.http_error_message + + @property + def error_code(self): + return self._extended_operation.http_error_status_code + + response = _CustomOperation.make(get_operation, cancel_operation, response) + + # Done; return the response. + return response + + def update_shielded_instance_config_unary(self, + request: Optional[Union[compute.UpdateShieldedInstanceConfigInstanceRequest, dict]] = None, + *, + project: Optional[str] = None, + zone: Optional[str] = None, + instance: Optional[str] = None, + shielded_instance_config_resource: Optional[compute.ShieldedInstanceConfig] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.Operation: + r"""Updates the Shielded Instance config for an instance. + You can only use this method on a stopped instance. This + method supports PATCH semantics and uses the JSON merge + patch format and processing rules. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_update_shielded_instance_config(): + # Create a client + client = compute_v1.InstancesClient() + + # Initialize request argument(s) + request = compute_v1.UpdateShieldedInstanceConfigInstanceRequest( + instance="instance_value", + project="project_value", + zone="zone_value", + ) + + # Make the request + response = client.update_shielded_instance_config(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.UpdateShieldedInstanceConfigInstanceRequest, dict]): + The request object. A request message for + Instances.UpdateShieldedInstanceConfig. + See the method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + zone (str): + The name of the zone for this + request. + + This corresponds to the ``zone`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + instance (str): + Name or id of the instance scoping + this request. + + This corresponds to the ``instance`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + shielded_instance_config_resource (google.cloud.compute_v1.types.ShieldedInstanceConfig): + The body resource for this request + This corresponds to the ``shielded_instance_config_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, zone, instance, shielded_instance_config_resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.UpdateShieldedInstanceConfigInstanceRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.UpdateShieldedInstanceConfigInstanceRequest): + request = compute.UpdateShieldedInstanceConfigInstanceRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if zone is not None: + request.zone = zone + if instance is not None: + request.instance = instance + if shielded_instance_config_resource is not None: + request.shielded_instance_config_resource = shielded_instance_config_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.update_shielded_instance_config] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("zone", request.zone), + ("instance", request.instance), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def update_shielded_instance_config(self, + request: Optional[Union[compute.UpdateShieldedInstanceConfigInstanceRequest, dict]] = None, + *, + project: Optional[str] = None, + zone: Optional[str] = None, + instance: Optional[str] = None, + shielded_instance_config_resource: Optional[compute.ShieldedInstanceConfig] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> extended_operation.ExtendedOperation: + r"""Updates the Shielded Instance config for an instance. + You can only use this method on a stopped instance. This + method supports PATCH semantics and uses the JSON merge + patch format and processing rules. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_update_shielded_instance_config(): + # Create a client + client = compute_v1.InstancesClient() + + # Initialize request argument(s) + request = compute_v1.UpdateShieldedInstanceConfigInstanceRequest( + instance="instance_value", + project="project_value", + zone="zone_value", + ) + + # Make the request + response = client.update_shielded_instance_config(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.UpdateShieldedInstanceConfigInstanceRequest, dict]): + The request object. A request message for + Instances.UpdateShieldedInstanceConfig. + See the method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + zone (str): + The name of the zone for this + request. + + This corresponds to the ``zone`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + instance (str): + Name or id of the instance scoping + this request. + + This corresponds to the ``instance`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + shielded_instance_config_resource (google.cloud.compute_v1.types.ShieldedInstanceConfig): + The body resource for this request + This corresponds to the ``shielded_instance_config_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, zone, instance, shielded_instance_config_resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.UpdateShieldedInstanceConfigInstanceRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.UpdateShieldedInstanceConfigInstanceRequest): + request = compute.UpdateShieldedInstanceConfigInstanceRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if zone is not None: + request.zone = zone + if instance is not None: + request.instance = instance + if shielded_instance_config_resource is not None: + request.shielded_instance_config_resource = shielded_instance_config_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.update_shielded_instance_config] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("zone", request.zone), + ("instance", request.instance), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + operation_service = self._transport._zone_operations_client + operation_request = compute.GetZoneOperationRequest() + operation_request.project = request.project + operation_request.zone = request.zone + operation_request.operation = response.name + + get_operation = functools.partial(operation_service.get, operation_request) + # Cancel is not part of extended operations yet. + cancel_operation = lambda: None + + # Note: this class is an implementation detail to provide a uniform + # set of names for certain fields in the extended operation proto message. + # See google.api_core.extended_operation.ExtendedOperation for details + # on these properties and the expected interface. + class _CustomOperation(extended_operation.ExtendedOperation): + @property + def error_message(self): + return self._extended_operation.http_error_message + + @property + def error_code(self): + return self._extended_operation.http_error_status_code + + response = _CustomOperation.make(get_operation, cancel_operation, response) + + # Done; return the response. + return response + + def __enter__(self) -> "InstancesClient": + return self + + def __exit__(self, type, value, traceback): + """Releases underlying transport's resources. + + .. warning:: + ONLY use as a context manager if the transport is NOT shared + with other clients! Exiting the with block will CLOSE the transport + and may cause errors in other clients! + """ + self.transport.close() + + + + + + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) + + +__all__ = ( + "InstancesClient", +) diff --git a/owl-bot-staging/v1/google/cloud/compute_v1/services/instances/pagers.py b/owl-bot-staging/v1/google/cloud/compute_v1/services/instances/pagers.py new file mode 100644 index 000000000..347b4265b --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/compute_v1/services/instances/pagers.py @@ -0,0 +1,198 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import Any, AsyncIterator, Awaitable, Callable, Sequence, Tuple, Optional, Iterator + +from google.cloud.compute_v1.types import compute + + +class AggregatedListPager: + """A pager for iterating through ``aggregated_list`` requests. + + This class thinly wraps an initial + :class:`google.cloud.compute_v1.types.InstanceAggregatedList` object, and + provides an ``__iter__`` method to iterate through its + ``items`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``AggregatedList`` requests and continue to iterate + through the ``items`` field on the + corresponding responses. + + All the usual :class:`google.cloud.compute_v1.types.InstanceAggregatedList` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., compute.InstanceAggregatedList], + request: compute.AggregatedListInstancesRequest, + response: compute.InstanceAggregatedList, + *, + metadata: Sequence[Tuple[str, str]] = ()): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.compute_v1.types.AggregatedListInstancesRequest): + The initial request object. + response (google.cloud.compute_v1.types.InstanceAggregatedList): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = compute.AggregatedListInstancesRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[compute.InstanceAggregatedList]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[Tuple[str, compute.InstancesScopedList]]: + for page in self.pages: + yield from page.items.items() + + def get(self, key: str) -> Optional[compute.InstancesScopedList]: + return self._response.items.get(key) + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + + +class ListPager: + """A pager for iterating through ``list`` requests. + + This class thinly wraps an initial + :class:`google.cloud.compute_v1.types.InstanceList` object, and + provides an ``__iter__`` method to iterate through its + ``items`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``List`` requests and continue to iterate + through the ``items`` field on the + corresponding responses. + + All the usual :class:`google.cloud.compute_v1.types.InstanceList` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., compute.InstanceList], + request: compute.ListInstancesRequest, + response: compute.InstanceList, + *, + metadata: Sequence[Tuple[str, str]] = ()): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.compute_v1.types.ListInstancesRequest): + The initial request object. + response (google.cloud.compute_v1.types.InstanceList): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = compute.ListInstancesRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[compute.InstanceList]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[compute.Instance]: + for page in self.pages: + yield from page.items + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + + +class ListReferrersPager: + """A pager for iterating through ``list_referrers`` requests. + + This class thinly wraps an initial + :class:`google.cloud.compute_v1.types.InstanceListReferrers` object, and + provides an ``__iter__`` method to iterate through its + ``items`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListReferrers`` requests and continue to iterate + through the ``items`` field on the + corresponding responses. + + All the usual :class:`google.cloud.compute_v1.types.InstanceListReferrers` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., compute.InstanceListReferrers], + request: compute.ListReferrersInstancesRequest, + response: compute.InstanceListReferrers, + *, + metadata: Sequence[Tuple[str, str]] = ()): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.compute_v1.types.ListReferrersInstancesRequest): + The initial request object. + response (google.cloud.compute_v1.types.InstanceListReferrers): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = compute.ListReferrersInstancesRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[compute.InstanceListReferrers]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[compute.Reference]: + for page in self.pages: + yield from page.items + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) diff --git a/owl-bot-staging/v1/google/cloud/compute_v1/services/instances/transports/__init__.py b/owl-bot-staging/v1/google/cloud/compute_v1/services/instances/transports/__init__.py new file mode 100644 index 000000000..ecd2733ed --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/compute_v1/services/instances/transports/__init__.py @@ -0,0 +1,32 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +from typing import Dict, Type + +from .base import InstancesTransport +from .rest import InstancesRestTransport +from .rest import InstancesRestInterceptor + + +# Compile a registry of transports. +_transport_registry = OrderedDict() # type: Dict[str, Type[InstancesTransport]] +_transport_registry['rest'] = InstancesRestTransport + +__all__ = ( + 'InstancesTransport', + 'InstancesRestTransport', + 'InstancesRestInterceptor', +) diff --git a/owl-bot-staging/v1/google/cloud/compute_v1/services/instances/transports/base.py b/owl-bot-staging/v1/google/cloud/compute_v1/services/instances/transports/base.py new file mode 100644 index 000000000..1e4280d6d --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/compute_v1/services/instances/transports/base.py @@ -0,0 +1,793 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import abc +from typing import Awaitable, Callable, Dict, Optional, Sequence, Union + +from google.cloud.compute_v1 import gapic_version as package_version + +import google.auth # type: ignore +import google.api_core +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.compute_v1.types import compute +from google.cloud.compute_v1.services import zone_operations + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) + + +class InstancesTransport(abc.ABC): + """Abstract transport class for Instances.""" + + AUTH_SCOPES = ( + 'https://www.googleapis.com/auth/compute', + 'https://www.googleapis.com/auth/cloud-platform', + ) + + DEFAULT_HOST: str = 'compute.googleapis.com' + def __init__( + self, *, + host: str = DEFAULT_HOST, + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + **kwargs, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A list of scopes. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + """ + self._extended_operations_services: Dict[str, Any] = {} + + scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} + + # Save the scopes. + self._scopes = scopes + + # If no credentials are provided, then determine the appropriate + # defaults. + if credentials and credentials_file: + raise core_exceptions.DuplicateCredentialArgs("'credentials_file' and 'credentials' are mutually exclusive") + + if credentials_file is not None: + credentials, _ = google.auth.load_credentials_from_file( + credentials_file, + **scopes_kwargs, + quota_project_id=quota_project_id + ) + elif credentials is None: + credentials, _ = google.auth.default(**scopes_kwargs, quota_project_id=quota_project_id) + # Don't apply audience if the credentials file passed from user. + if hasattr(credentials, "with_gdch_audience"): + credentials = credentials.with_gdch_audience(api_audience if api_audience else host) + + # If the credentials are service account credentials, then always try to use self signed JWT. + if always_use_jwt_access and isinstance(credentials, service_account.Credentials) and hasattr(service_account.Credentials, "with_always_use_jwt_access"): + credentials = credentials.with_always_use_jwt_access(True) + + # Save the credentials. + self._credentials = credentials + + # Save the hostname. Default to port 443 (HTTPS) if none is specified. + if ':' not in host: + host += ':443' + self._host = host + + def _prep_wrapped_messages(self, client_info): + # Precompute the wrapped methods. + self._wrapped_methods = { + self.add_access_config: gapic_v1.method.wrap_method( + self.add_access_config, + default_timeout=None, + client_info=client_info, + ), + self.add_resource_policies: gapic_v1.method.wrap_method( + self.add_resource_policies, + default_timeout=None, + client_info=client_info, + ), + self.aggregated_list: gapic_v1.method.wrap_method( + self.aggregated_list, + default_timeout=None, + client_info=client_info, + ), + self.attach_disk: gapic_v1.method.wrap_method( + self.attach_disk, + default_timeout=None, + client_info=client_info, + ), + self.bulk_insert: gapic_v1.method.wrap_method( + self.bulk_insert, + default_timeout=None, + client_info=client_info, + ), + self.delete: gapic_v1.method.wrap_method( + self.delete, + default_timeout=None, + client_info=client_info, + ), + self.delete_access_config: gapic_v1.method.wrap_method( + self.delete_access_config, + default_timeout=None, + client_info=client_info, + ), + self.detach_disk: gapic_v1.method.wrap_method( + self.detach_disk, + default_timeout=None, + client_info=client_info, + ), + self.get: gapic_v1.method.wrap_method( + self.get, + default_timeout=None, + client_info=client_info, + ), + self.get_effective_firewalls: gapic_v1.method.wrap_method( + self.get_effective_firewalls, + default_timeout=None, + client_info=client_info, + ), + self.get_guest_attributes: gapic_v1.method.wrap_method( + self.get_guest_attributes, + default_timeout=None, + client_info=client_info, + ), + self.get_iam_policy: gapic_v1.method.wrap_method( + self.get_iam_policy, + default_timeout=None, + client_info=client_info, + ), + self.get_screenshot: gapic_v1.method.wrap_method( + self.get_screenshot, + default_timeout=None, + client_info=client_info, + ), + self.get_serial_port_output: gapic_v1.method.wrap_method( + self.get_serial_port_output, + default_timeout=None, + client_info=client_info, + ), + self.get_shielded_instance_identity: gapic_v1.method.wrap_method( + self.get_shielded_instance_identity, + default_timeout=None, + client_info=client_info, + ), + self.insert: gapic_v1.method.wrap_method( + self.insert, + default_timeout=None, + client_info=client_info, + ), + self.list: gapic_v1.method.wrap_method( + self.list, + default_timeout=None, + client_info=client_info, + ), + self.list_referrers: gapic_v1.method.wrap_method( + self.list_referrers, + default_timeout=None, + client_info=client_info, + ), + self.remove_resource_policies: gapic_v1.method.wrap_method( + self.remove_resource_policies, + default_timeout=None, + client_info=client_info, + ), + self.reset: gapic_v1.method.wrap_method( + self.reset, + default_timeout=None, + client_info=client_info, + ), + self.resume: gapic_v1.method.wrap_method( + self.resume, + default_timeout=None, + client_info=client_info, + ), + self.send_diagnostic_interrupt: gapic_v1.method.wrap_method( + self.send_diagnostic_interrupt, + default_timeout=None, + client_info=client_info, + ), + self.set_deletion_protection: gapic_v1.method.wrap_method( + self.set_deletion_protection, + default_timeout=None, + client_info=client_info, + ), + self.set_disk_auto_delete: gapic_v1.method.wrap_method( + self.set_disk_auto_delete, + default_timeout=None, + client_info=client_info, + ), + self.set_iam_policy: gapic_v1.method.wrap_method( + self.set_iam_policy, + default_timeout=None, + client_info=client_info, + ), + self.set_labels: gapic_v1.method.wrap_method( + self.set_labels, + default_timeout=None, + client_info=client_info, + ), + self.set_machine_resources: gapic_v1.method.wrap_method( + self.set_machine_resources, + default_timeout=None, + client_info=client_info, + ), + self.set_machine_type: gapic_v1.method.wrap_method( + self.set_machine_type, + default_timeout=None, + client_info=client_info, + ), + self.set_metadata: gapic_v1.method.wrap_method( + self.set_metadata, + default_timeout=None, + client_info=client_info, + ), + self.set_min_cpu_platform: gapic_v1.method.wrap_method( + self.set_min_cpu_platform, + default_timeout=None, + client_info=client_info, + ), + self.set_name: gapic_v1.method.wrap_method( + self.set_name, + default_timeout=None, + client_info=client_info, + ), + self.set_scheduling: gapic_v1.method.wrap_method( + self.set_scheduling, + default_timeout=None, + client_info=client_info, + ), + self.set_service_account: gapic_v1.method.wrap_method( + self.set_service_account, + default_timeout=None, + client_info=client_info, + ), + self.set_shielded_instance_integrity_policy: gapic_v1.method.wrap_method( + self.set_shielded_instance_integrity_policy, + default_timeout=None, + client_info=client_info, + ), + self.set_tags: gapic_v1.method.wrap_method( + self.set_tags, + default_timeout=None, + client_info=client_info, + ), + self.simulate_maintenance_event: gapic_v1.method.wrap_method( + self.simulate_maintenance_event, + default_timeout=None, + client_info=client_info, + ), + self.start: gapic_v1.method.wrap_method( + self.start, + default_timeout=None, + client_info=client_info, + ), + self.start_with_encryption_key: gapic_v1.method.wrap_method( + self.start_with_encryption_key, + default_timeout=None, + client_info=client_info, + ), + self.stop: gapic_v1.method.wrap_method( + self.stop, + default_timeout=None, + client_info=client_info, + ), + self.suspend: gapic_v1.method.wrap_method( + self.suspend, + default_timeout=None, + client_info=client_info, + ), + self.test_iam_permissions: gapic_v1.method.wrap_method( + self.test_iam_permissions, + default_timeout=None, + client_info=client_info, + ), + self.update: gapic_v1.method.wrap_method( + self.update, + default_timeout=None, + client_info=client_info, + ), + self.update_access_config: gapic_v1.method.wrap_method( + self.update_access_config, + default_timeout=None, + client_info=client_info, + ), + self.update_display_device: gapic_v1.method.wrap_method( + self.update_display_device, + default_timeout=None, + client_info=client_info, + ), + self.update_network_interface: gapic_v1.method.wrap_method( + self.update_network_interface, + default_timeout=None, + client_info=client_info, + ), + self.update_shielded_instance_config: gapic_v1.method.wrap_method( + self.update_shielded_instance_config, + default_timeout=None, + client_info=client_info, + ), + } + + def close(self): + """Closes resources associated with the transport. + + .. warning:: + Only call this method if the transport is NOT shared + with other clients - this may cause errors in other clients! + """ + raise NotImplementedError() + + @property + def add_access_config(self) -> Callable[ + [compute.AddAccessConfigInstanceRequest], + Union[ + compute.Operation, + Awaitable[compute.Operation] + ]]: + raise NotImplementedError() + + @property + def add_resource_policies(self) -> Callable[ + [compute.AddResourcePoliciesInstanceRequest], + Union[ + compute.Operation, + Awaitable[compute.Operation] + ]]: + raise NotImplementedError() + + @property + def aggregated_list(self) -> Callable[ + [compute.AggregatedListInstancesRequest], + Union[ + compute.InstanceAggregatedList, + Awaitable[compute.InstanceAggregatedList] + ]]: + raise NotImplementedError() + + @property + def attach_disk(self) -> Callable[ + [compute.AttachDiskInstanceRequest], + Union[ + compute.Operation, + Awaitable[compute.Operation] + ]]: + raise NotImplementedError() + + @property + def bulk_insert(self) -> Callable[ + [compute.BulkInsertInstanceRequest], + Union[ + compute.Operation, + Awaitable[compute.Operation] + ]]: + raise NotImplementedError() + + @property + def delete(self) -> Callable[ + [compute.DeleteInstanceRequest], + Union[ + compute.Operation, + Awaitable[compute.Operation] + ]]: + raise NotImplementedError() + + @property + def delete_access_config(self) -> Callable[ + [compute.DeleteAccessConfigInstanceRequest], + Union[ + compute.Operation, + Awaitable[compute.Operation] + ]]: + raise NotImplementedError() + + @property + def detach_disk(self) -> Callable[ + [compute.DetachDiskInstanceRequest], + Union[ + compute.Operation, + Awaitable[compute.Operation] + ]]: + raise NotImplementedError() + + @property + def get(self) -> Callable[ + [compute.GetInstanceRequest], + Union[ + compute.Instance, + Awaitable[compute.Instance] + ]]: + raise NotImplementedError() + + @property + def get_effective_firewalls(self) -> Callable[ + [compute.GetEffectiveFirewallsInstanceRequest], + Union[ + compute.InstancesGetEffectiveFirewallsResponse, + Awaitable[compute.InstancesGetEffectiveFirewallsResponse] + ]]: + raise NotImplementedError() + + @property + def get_guest_attributes(self) -> Callable[ + [compute.GetGuestAttributesInstanceRequest], + Union[ + compute.GuestAttributes, + Awaitable[compute.GuestAttributes] + ]]: + raise NotImplementedError() + + @property + def get_iam_policy(self) -> Callable[ + [compute.GetIamPolicyInstanceRequest], + Union[ + compute.Policy, + Awaitable[compute.Policy] + ]]: + raise NotImplementedError() + + @property + def get_screenshot(self) -> Callable[ + [compute.GetScreenshotInstanceRequest], + Union[ + compute.Screenshot, + Awaitable[compute.Screenshot] + ]]: + raise NotImplementedError() + + @property + def get_serial_port_output(self) -> Callable[ + [compute.GetSerialPortOutputInstanceRequest], + Union[ + compute.SerialPortOutput, + Awaitable[compute.SerialPortOutput] + ]]: + raise NotImplementedError() + + @property + def get_shielded_instance_identity(self) -> Callable[ + [compute.GetShieldedInstanceIdentityInstanceRequest], + Union[ + compute.ShieldedInstanceIdentity, + Awaitable[compute.ShieldedInstanceIdentity] + ]]: + raise NotImplementedError() + + @property + def insert(self) -> Callable[ + [compute.InsertInstanceRequest], + Union[ + compute.Operation, + Awaitable[compute.Operation] + ]]: + raise NotImplementedError() + + @property + def list(self) -> Callable[ + [compute.ListInstancesRequest], + Union[ + compute.InstanceList, + Awaitable[compute.InstanceList] + ]]: + raise NotImplementedError() + + @property + def list_referrers(self) -> Callable[ + [compute.ListReferrersInstancesRequest], + Union[ + compute.InstanceListReferrers, + Awaitable[compute.InstanceListReferrers] + ]]: + raise NotImplementedError() + + @property + def remove_resource_policies(self) -> Callable[ + [compute.RemoveResourcePoliciesInstanceRequest], + Union[ + compute.Operation, + Awaitable[compute.Operation] + ]]: + raise NotImplementedError() + + @property + def reset(self) -> Callable[ + [compute.ResetInstanceRequest], + Union[ + compute.Operation, + Awaitable[compute.Operation] + ]]: + raise NotImplementedError() + + @property + def resume(self) -> Callable[ + [compute.ResumeInstanceRequest], + Union[ + compute.Operation, + Awaitable[compute.Operation] + ]]: + raise NotImplementedError() + + @property + def send_diagnostic_interrupt(self) -> Callable[ + [compute.SendDiagnosticInterruptInstanceRequest], + Union[ + compute.SendDiagnosticInterruptInstanceResponse, + Awaitable[compute.SendDiagnosticInterruptInstanceResponse] + ]]: + raise NotImplementedError() + + @property + def set_deletion_protection(self) -> Callable[ + [compute.SetDeletionProtectionInstanceRequest], + Union[ + compute.Operation, + Awaitable[compute.Operation] + ]]: + raise NotImplementedError() + + @property + def set_disk_auto_delete(self) -> Callable[ + [compute.SetDiskAutoDeleteInstanceRequest], + Union[ + compute.Operation, + Awaitable[compute.Operation] + ]]: + raise NotImplementedError() + + @property + def set_iam_policy(self) -> Callable[ + [compute.SetIamPolicyInstanceRequest], + Union[ + compute.Policy, + Awaitable[compute.Policy] + ]]: + raise NotImplementedError() + + @property + def set_labels(self) -> Callable[ + [compute.SetLabelsInstanceRequest], + Union[ + compute.Operation, + Awaitable[compute.Operation] + ]]: + raise NotImplementedError() + + @property + def set_machine_resources(self) -> Callable[ + [compute.SetMachineResourcesInstanceRequest], + Union[ + compute.Operation, + Awaitable[compute.Operation] + ]]: + raise NotImplementedError() + + @property + def set_machine_type(self) -> Callable[ + [compute.SetMachineTypeInstanceRequest], + Union[ + compute.Operation, + Awaitable[compute.Operation] + ]]: + raise NotImplementedError() + + @property + def set_metadata(self) -> Callable[ + [compute.SetMetadataInstanceRequest], + Union[ + compute.Operation, + Awaitable[compute.Operation] + ]]: + raise NotImplementedError() + + @property + def set_min_cpu_platform(self) -> Callable[ + [compute.SetMinCpuPlatformInstanceRequest], + Union[ + compute.Operation, + Awaitable[compute.Operation] + ]]: + raise NotImplementedError() + + @property + def set_name(self) -> Callable[ + [compute.SetNameInstanceRequest], + Union[ + compute.Operation, + Awaitable[compute.Operation] + ]]: + raise NotImplementedError() + + @property + def set_scheduling(self) -> Callable[ + [compute.SetSchedulingInstanceRequest], + Union[ + compute.Operation, + Awaitable[compute.Operation] + ]]: + raise NotImplementedError() + + @property + def set_service_account(self) -> Callable[ + [compute.SetServiceAccountInstanceRequest], + Union[ + compute.Operation, + Awaitable[compute.Operation] + ]]: + raise NotImplementedError() + + @property + def set_shielded_instance_integrity_policy(self) -> Callable[ + [compute.SetShieldedInstanceIntegrityPolicyInstanceRequest], + Union[ + compute.Operation, + Awaitable[compute.Operation] + ]]: + raise NotImplementedError() + + @property + def set_tags(self) -> Callable[ + [compute.SetTagsInstanceRequest], + Union[ + compute.Operation, + Awaitable[compute.Operation] + ]]: + raise NotImplementedError() + + @property + def simulate_maintenance_event(self) -> Callable[ + [compute.SimulateMaintenanceEventInstanceRequest], + Union[ + compute.Operation, + Awaitable[compute.Operation] + ]]: + raise NotImplementedError() + + @property + def start(self) -> Callable[ + [compute.StartInstanceRequest], + Union[ + compute.Operation, + Awaitable[compute.Operation] + ]]: + raise NotImplementedError() + + @property + def start_with_encryption_key(self) -> Callable[ + [compute.StartWithEncryptionKeyInstanceRequest], + Union[ + compute.Operation, + Awaitable[compute.Operation] + ]]: + raise NotImplementedError() + + @property + def stop(self) -> Callable[ + [compute.StopInstanceRequest], + Union[ + compute.Operation, + Awaitable[compute.Operation] + ]]: + raise NotImplementedError() + + @property + def suspend(self) -> Callable[ + [compute.SuspendInstanceRequest], + Union[ + compute.Operation, + Awaitable[compute.Operation] + ]]: + raise NotImplementedError() + + @property + def test_iam_permissions(self) -> Callable[ + [compute.TestIamPermissionsInstanceRequest], + Union[ + compute.TestPermissionsResponse, + Awaitable[compute.TestPermissionsResponse] + ]]: + raise NotImplementedError() + + @property + def update(self) -> Callable[ + [compute.UpdateInstanceRequest], + Union[ + compute.Operation, + Awaitable[compute.Operation] + ]]: + raise NotImplementedError() + + @property + def update_access_config(self) -> Callable[ + [compute.UpdateAccessConfigInstanceRequest], + Union[ + compute.Operation, + Awaitable[compute.Operation] + ]]: + raise NotImplementedError() + + @property + def update_display_device(self) -> Callable[ + [compute.UpdateDisplayDeviceInstanceRequest], + Union[ + compute.Operation, + Awaitable[compute.Operation] + ]]: + raise NotImplementedError() + + @property + def update_network_interface(self) -> Callable[ + [compute.UpdateNetworkInterfaceInstanceRequest], + Union[ + compute.Operation, + Awaitable[compute.Operation] + ]]: + raise NotImplementedError() + + @property + def update_shielded_instance_config(self) -> Callable[ + [compute.UpdateShieldedInstanceConfigInstanceRequest], + Union[ + compute.Operation, + Awaitable[compute.Operation] + ]]: + raise NotImplementedError() + + @property + def kind(self) -> str: + raise NotImplementedError() + + @property + def _zone_operations_client(self) -> zone_operations.ZoneOperationsClient: + ex_op_service = self._extended_operations_services.get("zone_operations") + if not ex_op_service: + ex_op_service = zone_operations.ZoneOperationsClient( + credentials=self._credentials, + transport=self.kind, + ) + self._extended_operations_services["zone_operations"] = ex_op_service + + return ex_op_service + + +__all__ = ( + 'InstancesTransport', +) diff --git a/owl-bot-staging/v1/google/cloud/compute_v1/services/instances/transports/rest.py b/owl-bot-staging/v1/google/cloud/compute_v1/services/instances/transports/rest.py new file mode 100644 index 000000000..79549f57f --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/compute_v1/services/instances/transports/rest.py @@ -0,0 +1,6049 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +from google.auth.transport.requests import AuthorizedSession # type: ignore +import json # type: ignore +import grpc # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.api_core import exceptions as core_exceptions +from google.api_core import retry as retries +from google.api_core import rest_helpers +from google.api_core import rest_streaming +from google.api_core import path_template +from google.api_core import gapic_v1 + +from google.protobuf import json_format +from requests import __version__ as requests_version +import dataclasses +import re +from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union +import warnings + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + + +from google.cloud.compute_v1.types import compute + +from .base import InstancesTransport, DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, + grpc_version=None, + rest_version=requests_version, +) + + +class InstancesRestInterceptor: + """Interceptor for Instances. + + Interceptors are used to manipulate requests, request metadata, and responses + in arbitrary ways. + Example use cases include: + * Logging + * Verifying requests according to service or custom semantics + * Stripping extraneous information from responses + + These use cases and more can be enabled by injecting an + instance of a custom subclass when constructing the InstancesRestTransport. + + .. code-block:: python + class MyCustomInstancesInterceptor(InstancesRestInterceptor): + def pre_add_access_config(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_add_access_config(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_add_resource_policies(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_add_resource_policies(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_aggregated_list(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_aggregated_list(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_attach_disk(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_attach_disk(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_bulk_insert(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_bulk_insert(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_delete(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_delete(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_delete_access_config(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_delete_access_config(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_detach_disk(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_detach_disk(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_get(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_get_effective_firewalls(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_effective_firewalls(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_get_guest_attributes(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_guest_attributes(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_get_iam_policy(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_iam_policy(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_get_screenshot(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_screenshot(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_get_serial_port_output(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_serial_port_output(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_get_shielded_instance_identity(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_shielded_instance_identity(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_insert(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_insert(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list_referrers(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_referrers(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_remove_resource_policies(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_remove_resource_policies(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_reset(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_reset(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_resume(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_resume(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_send_diagnostic_interrupt(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_send_diagnostic_interrupt(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_set_deletion_protection(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_set_deletion_protection(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_set_disk_auto_delete(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_set_disk_auto_delete(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_set_iam_policy(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_set_iam_policy(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_set_labels(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_set_labels(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_set_machine_resources(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_set_machine_resources(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_set_machine_type(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_set_machine_type(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_set_metadata(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_set_metadata(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_set_min_cpu_platform(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_set_min_cpu_platform(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_set_name(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_set_name(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_set_scheduling(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_set_scheduling(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_set_service_account(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_set_service_account(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_set_shielded_instance_integrity_policy(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_set_shielded_instance_integrity_policy(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_set_tags(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_set_tags(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_simulate_maintenance_event(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_simulate_maintenance_event(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_start(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_start(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_start_with_encryption_key(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_start_with_encryption_key(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_stop(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_stop(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_suspend(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_suspend(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_test_iam_permissions(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_test_iam_permissions(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_update(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_update(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_update_access_config(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_update_access_config(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_update_display_device(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_update_display_device(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_update_network_interface(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_update_network_interface(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_update_shielded_instance_config(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_update_shielded_instance_config(self, response): + logging.log(f"Received response: {response}") + return response + + transport = InstancesRestTransport(interceptor=MyCustomInstancesInterceptor()) + client = InstancesClient(transport=transport) + + + """ + def pre_add_access_config(self, request: compute.AddAccessConfigInstanceRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.AddAccessConfigInstanceRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for add_access_config + + Override in a subclass to manipulate the request or metadata + before they are sent to the Instances server. + """ + return request, metadata + + def post_add_access_config(self, response: compute.Operation) -> compute.Operation: + """Post-rpc interceptor for add_access_config + + Override in a subclass to manipulate the response + after it is returned by the Instances server but before + it is returned to user code. + """ + return response + def pre_add_resource_policies(self, request: compute.AddResourcePoliciesInstanceRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.AddResourcePoliciesInstanceRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for add_resource_policies + + Override in a subclass to manipulate the request or metadata + before they are sent to the Instances server. + """ + return request, metadata + + def post_add_resource_policies(self, response: compute.Operation) -> compute.Operation: + """Post-rpc interceptor for add_resource_policies + + Override in a subclass to manipulate the response + after it is returned by the Instances server but before + it is returned to user code. + """ + return response + def pre_aggregated_list(self, request: compute.AggregatedListInstancesRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.AggregatedListInstancesRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for aggregated_list + + Override in a subclass to manipulate the request or metadata + before they are sent to the Instances server. + """ + return request, metadata + + def post_aggregated_list(self, response: compute.InstanceAggregatedList) -> compute.InstanceAggregatedList: + """Post-rpc interceptor for aggregated_list + + Override in a subclass to manipulate the response + after it is returned by the Instances server but before + it is returned to user code. + """ + return response + def pre_attach_disk(self, request: compute.AttachDiskInstanceRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.AttachDiskInstanceRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for attach_disk + + Override in a subclass to manipulate the request or metadata + before they are sent to the Instances server. + """ + return request, metadata + + def post_attach_disk(self, response: compute.Operation) -> compute.Operation: + """Post-rpc interceptor for attach_disk + + Override in a subclass to manipulate the response + after it is returned by the Instances server but before + it is returned to user code. + """ + return response + def pre_bulk_insert(self, request: compute.BulkInsertInstanceRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.BulkInsertInstanceRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for bulk_insert + + Override in a subclass to manipulate the request or metadata + before they are sent to the Instances server. + """ + return request, metadata + + def post_bulk_insert(self, response: compute.Operation) -> compute.Operation: + """Post-rpc interceptor for bulk_insert + + Override in a subclass to manipulate the response + after it is returned by the Instances server but before + it is returned to user code. + """ + return response + def pre_delete(self, request: compute.DeleteInstanceRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.DeleteInstanceRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for delete + + Override in a subclass to manipulate the request or metadata + before they are sent to the Instances server. + """ + return request, metadata + + def post_delete(self, response: compute.Operation) -> compute.Operation: + """Post-rpc interceptor for delete + + Override in a subclass to manipulate the response + after it is returned by the Instances server but before + it is returned to user code. + """ + return response + def pre_delete_access_config(self, request: compute.DeleteAccessConfigInstanceRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.DeleteAccessConfigInstanceRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for delete_access_config + + Override in a subclass to manipulate the request or metadata + before they are sent to the Instances server. + """ + return request, metadata + + def post_delete_access_config(self, response: compute.Operation) -> compute.Operation: + """Post-rpc interceptor for delete_access_config + + Override in a subclass to manipulate the response + after it is returned by the Instances server but before + it is returned to user code. + """ + return response + def pre_detach_disk(self, request: compute.DetachDiskInstanceRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.DetachDiskInstanceRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for detach_disk + + Override in a subclass to manipulate the request or metadata + before they are sent to the Instances server. + """ + return request, metadata + + def post_detach_disk(self, response: compute.Operation) -> compute.Operation: + """Post-rpc interceptor for detach_disk + + Override in a subclass to manipulate the response + after it is returned by the Instances server but before + it is returned to user code. + """ + return response + def pre_get(self, request: compute.GetInstanceRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.GetInstanceRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get + + Override in a subclass to manipulate the request or metadata + before they are sent to the Instances server. + """ + return request, metadata + + def post_get(self, response: compute.Instance) -> compute.Instance: + """Post-rpc interceptor for get + + Override in a subclass to manipulate the response + after it is returned by the Instances server but before + it is returned to user code. + """ + return response + def pre_get_effective_firewalls(self, request: compute.GetEffectiveFirewallsInstanceRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.GetEffectiveFirewallsInstanceRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_effective_firewalls + + Override in a subclass to manipulate the request or metadata + before they are sent to the Instances server. + """ + return request, metadata + + def post_get_effective_firewalls(self, response: compute.InstancesGetEffectiveFirewallsResponse) -> compute.InstancesGetEffectiveFirewallsResponse: + """Post-rpc interceptor for get_effective_firewalls + + Override in a subclass to manipulate the response + after it is returned by the Instances server but before + it is returned to user code. + """ + return response + def pre_get_guest_attributes(self, request: compute.GetGuestAttributesInstanceRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.GetGuestAttributesInstanceRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_guest_attributes + + Override in a subclass to manipulate the request or metadata + before they are sent to the Instances server. + """ + return request, metadata + + def post_get_guest_attributes(self, response: compute.GuestAttributes) -> compute.GuestAttributes: + """Post-rpc interceptor for get_guest_attributes + + Override in a subclass to manipulate the response + after it is returned by the Instances server but before + it is returned to user code. + """ + return response + def pre_get_iam_policy(self, request: compute.GetIamPolicyInstanceRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.GetIamPolicyInstanceRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_iam_policy + + Override in a subclass to manipulate the request or metadata + before they are sent to the Instances server. + """ + return request, metadata + + def post_get_iam_policy(self, response: compute.Policy) -> compute.Policy: + """Post-rpc interceptor for get_iam_policy + + Override in a subclass to manipulate the response + after it is returned by the Instances server but before + it is returned to user code. + """ + return response + def pre_get_screenshot(self, request: compute.GetScreenshotInstanceRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.GetScreenshotInstanceRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_screenshot + + Override in a subclass to manipulate the request or metadata + before they are sent to the Instances server. + """ + return request, metadata + + def post_get_screenshot(self, response: compute.Screenshot) -> compute.Screenshot: + """Post-rpc interceptor for get_screenshot + + Override in a subclass to manipulate the response + after it is returned by the Instances server but before + it is returned to user code. + """ + return response + def pre_get_serial_port_output(self, request: compute.GetSerialPortOutputInstanceRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.GetSerialPortOutputInstanceRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_serial_port_output + + Override in a subclass to manipulate the request or metadata + before they are sent to the Instances server. + """ + return request, metadata + + def post_get_serial_port_output(self, response: compute.SerialPortOutput) -> compute.SerialPortOutput: + """Post-rpc interceptor for get_serial_port_output + + Override in a subclass to manipulate the response + after it is returned by the Instances server but before + it is returned to user code. + """ + return response + def pre_get_shielded_instance_identity(self, request: compute.GetShieldedInstanceIdentityInstanceRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.GetShieldedInstanceIdentityInstanceRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_shielded_instance_identity + + Override in a subclass to manipulate the request or metadata + before they are sent to the Instances server. + """ + return request, metadata + + def post_get_shielded_instance_identity(self, response: compute.ShieldedInstanceIdentity) -> compute.ShieldedInstanceIdentity: + """Post-rpc interceptor for get_shielded_instance_identity + + Override in a subclass to manipulate the response + after it is returned by the Instances server but before + it is returned to user code. + """ + return response + def pre_insert(self, request: compute.InsertInstanceRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.InsertInstanceRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for insert + + Override in a subclass to manipulate the request or metadata + before they are sent to the Instances server. + """ + return request, metadata + + def post_insert(self, response: compute.Operation) -> compute.Operation: + """Post-rpc interceptor for insert + + Override in a subclass to manipulate the response + after it is returned by the Instances server but before + it is returned to user code. + """ + return response + def pre_list(self, request: compute.ListInstancesRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.ListInstancesRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list + + Override in a subclass to manipulate the request or metadata + before they are sent to the Instances server. + """ + return request, metadata + + def post_list(self, response: compute.InstanceList) -> compute.InstanceList: + """Post-rpc interceptor for list + + Override in a subclass to manipulate the response + after it is returned by the Instances server but before + it is returned to user code. + """ + return response + def pre_list_referrers(self, request: compute.ListReferrersInstancesRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.ListReferrersInstancesRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list_referrers + + Override in a subclass to manipulate the request or metadata + before they are sent to the Instances server. + """ + return request, metadata + + def post_list_referrers(self, response: compute.InstanceListReferrers) -> compute.InstanceListReferrers: + """Post-rpc interceptor for list_referrers + + Override in a subclass to manipulate the response + after it is returned by the Instances server but before + it is returned to user code. + """ + return response + def pre_remove_resource_policies(self, request: compute.RemoveResourcePoliciesInstanceRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.RemoveResourcePoliciesInstanceRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for remove_resource_policies + + Override in a subclass to manipulate the request or metadata + before they are sent to the Instances server. + """ + return request, metadata + + def post_remove_resource_policies(self, response: compute.Operation) -> compute.Operation: + """Post-rpc interceptor for remove_resource_policies + + Override in a subclass to manipulate the response + after it is returned by the Instances server but before + it is returned to user code. + """ + return response + def pre_reset(self, request: compute.ResetInstanceRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.ResetInstanceRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for reset + + Override in a subclass to manipulate the request or metadata + before they are sent to the Instances server. + """ + return request, metadata + + def post_reset(self, response: compute.Operation) -> compute.Operation: + """Post-rpc interceptor for reset + + Override in a subclass to manipulate the response + after it is returned by the Instances server but before + it is returned to user code. + """ + return response + def pre_resume(self, request: compute.ResumeInstanceRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.ResumeInstanceRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for resume + + Override in a subclass to manipulate the request or metadata + before they are sent to the Instances server. + """ + return request, metadata + + def post_resume(self, response: compute.Operation) -> compute.Operation: + """Post-rpc interceptor for resume + + Override in a subclass to manipulate the response + after it is returned by the Instances server but before + it is returned to user code. + """ + return response + def pre_send_diagnostic_interrupt(self, request: compute.SendDiagnosticInterruptInstanceRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.SendDiagnosticInterruptInstanceRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for send_diagnostic_interrupt + + Override in a subclass to manipulate the request or metadata + before they are sent to the Instances server. + """ + return request, metadata + + def post_send_diagnostic_interrupt(self, response: compute.SendDiagnosticInterruptInstanceResponse) -> compute.SendDiagnosticInterruptInstanceResponse: + """Post-rpc interceptor for send_diagnostic_interrupt + + Override in a subclass to manipulate the response + after it is returned by the Instances server but before + it is returned to user code. + """ + return response + def pre_set_deletion_protection(self, request: compute.SetDeletionProtectionInstanceRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.SetDeletionProtectionInstanceRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for set_deletion_protection + + Override in a subclass to manipulate the request or metadata + before they are sent to the Instances server. + """ + return request, metadata + + def post_set_deletion_protection(self, response: compute.Operation) -> compute.Operation: + """Post-rpc interceptor for set_deletion_protection + + Override in a subclass to manipulate the response + after it is returned by the Instances server but before + it is returned to user code. + """ + return response + def pre_set_disk_auto_delete(self, request: compute.SetDiskAutoDeleteInstanceRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.SetDiskAutoDeleteInstanceRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for set_disk_auto_delete + + Override in a subclass to manipulate the request or metadata + before they are sent to the Instances server. + """ + return request, metadata + + def post_set_disk_auto_delete(self, response: compute.Operation) -> compute.Operation: + """Post-rpc interceptor for set_disk_auto_delete + + Override in a subclass to manipulate the response + after it is returned by the Instances server but before + it is returned to user code. + """ + return response + def pre_set_iam_policy(self, request: compute.SetIamPolicyInstanceRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.SetIamPolicyInstanceRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for set_iam_policy + + Override in a subclass to manipulate the request or metadata + before they are sent to the Instances server. + """ + return request, metadata + + def post_set_iam_policy(self, response: compute.Policy) -> compute.Policy: + """Post-rpc interceptor for set_iam_policy + + Override in a subclass to manipulate the response + after it is returned by the Instances server but before + it is returned to user code. + """ + return response + def pre_set_labels(self, request: compute.SetLabelsInstanceRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.SetLabelsInstanceRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for set_labels + + Override in a subclass to manipulate the request or metadata + before they are sent to the Instances server. + """ + return request, metadata + + def post_set_labels(self, response: compute.Operation) -> compute.Operation: + """Post-rpc interceptor for set_labels + + Override in a subclass to manipulate the response + after it is returned by the Instances server but before + it is returned to user code. + """ + return response + def pre_set_machine_resources(self, request: compute.SetMachineResourcesInstanceRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.SetMachineResourcesInstanceRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for set_machine_resources + + Override in a subclass to manipulate the request or metadata + before they are sent to the Instances server. + """ + return request, metadata + + def post_set_machine_resources(self, response: compute.Operation) -> compute.Operation: + """Post-rpc interceptor for set_machine_resources + + Override in a subclass to manipulate the response + after it is returned by the Instances server but before + it is returned to user code. + """ + return response + def pre_set_machine_type(self, request: compute.SetMachineTypeInstanceRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.SetMachineTypeInstanceRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for set_machine_type + + Override in a subclass to manipulate the request or metadata + before they are sent to the Instances server. + """ + return request, metadata + + def post_set_machine_type(self, response: compute.Operation) -> compute.Operation: + """Post-rpc interceptor for set_machine_type + + Override in a subclass to manipulate the response + after it is returned by the Instances server but before + it is returned to user code. + """ + return response + def pre_set_metadata(self, request: compute.SetMetadataInstanceRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.SetMetadataInstanceRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for set_metadata + + Override in a subclass to manipulate the request or metadata + before they are sent to the Instances server. + """ + return request, metadata + + def post_set_metadata(self, response: compute.Operation) -> compute.Operation: + """Post-rpc interceptor for set_metadata + + Override in a subclass to manipulate the response + after it is returned by the Instances server but before + it is returned to user code. + """ + return response + def pre_set_min_cpu_platform(self, request: compute.SetMinCpuPlatformInstanceRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.SetMinCpuPlatformInstanceRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for set_min_cpu_platform + + Override in a subclass to manipulate the request or metadata + before they are sent to the Instances server. + """ + return request, metadata + + def post_set_min_cpu_platform(self, response: compute.Operation) -> compute.Operation: + """Post-rpc interceptor for set_min_cpu_platform + + Override in a subclass to manipulate the response + after it is returned by the Instances server but before + it is returned to user code. + """ + return response + def pre_set_name(self, request: compute.SetNameInstanceRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.SetNameInstanceRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for set_name + + Override in a subclass to manipulate the request or metadata + before they are sent to the Instances server. + """ + return request, metadata + + def post_set_name(self, response: compute.Operation) -> compute.Operation: + """Post-rpc interceptor for set_name + + Override in a subclass to manipulate the response + after it is returned by the Instances server but before + it is returned to user code. + """ + return response + def pre_set_scheduling(self, request: compute.SetSchedulingInstanceRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.SetSchedulingInstanceRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for set_scheduling + + Override in a subclass to manipulate the request or metadata + before they are sent to the Instances server. + """ + return request, metadata + + def post_set_scheduling(self, response: compute.Operation) -> compute.Operation: + """Post-rpc interceptor for set_scheduling + + Override in a subclass to manipulate the response + after it is returned by the Instances server but before + it is returned to user code. + """ + return response + def pre_set_service_account(self, request: compute.SetServiceAccountInstanceRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.SetServiceAccountInstanceRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for set_service_account + + Override in a subclass to manipulate the request or metadata + before they are sent to the Instances server. + """ + return request, metadata + + def post_set_service_account(self, response: compute.Operation) -> compute.Operation: + """Post-rpc interceptor for set_service_account + + Override in a subclass to manipulate the response + after it is returned by the Instances server but before + it is returned to user code. + """ + return response + def pre_set_shielded_instance_integrity_policy(self, request: compute.SetShieldedInstanceIntegrityPolicyInstanceRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.SetShieldedInstanceIntegrityPolicyInstanceRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for set_shielded_instance_integrity_policy + + Override in a subclass to manipulate the request or metadata + before they are sent to the Instances server. + """ + return request, metadata + + def post_set_shielded_instance_integrity_policy(self, response: compute.Operation) -> compute.Operation: + """Post-rpc interceptor for set_shielded_instance_integrity_policy + + Override in a subclass to manipulate the response + after it is returned by the Instances server but before + it is returned to user code. + """ + return response + def pre_set_tags(self, request: compute.SetTagsInstanceRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.SetTagsInstanceRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for set_tags + + Override in a subclass to manipulate the request or metadata + before they are sent to the Instances server. + """ + return request, metadata + + def post_set_tags(self, response: compute.Operation) -> compute.Operation: + """Post-rpc interceptor for set_tags + + Override in a subclass to manipulate the response + after it is returned by the Instances server but before + it is returned to user code. + """ + return response + def pre_simulate_maintenance_event(self, request: compute.SimulateMaintenanceEventInstanceRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.SimulateMaintenanceEventInstanceRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for simulate_maintenance_event + + Override in a subclass to manipulate the request or metadata + before they are sent to the Instances server. + """ + return request, metadata + + def post_simulate_maintenance_event(self, response: compute.Operation) -> compute.Operation: + """Post-rpc interceptor for simulate_maintenance_event + + Override in a subclass to manipulate the response + after it is returned by the Instances server but before + it is returned to user code. + """ + return response + def pre_start(self, request: compute.StartInstanceRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.StartInstanceRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for start + + Override in a subclass to manipulate the request or metadata + before they are sent to the Instances server. + """ + return request, metadata + + def post_start(self, response: compute.Operation) -> compute.Operation: + """Post-rpc interceptor for start + + Override in a subclass to manipulate the response + after it is returned by the Instances server but before + it is returned to user code. + """ + return response + def pre_start_with_encryption_key(self, request: compute.StartWithEncryptionKeyInstanceRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.StartWithEncryptionKeyInstanceRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for start_with_encryption_key + + Override in a subclass to manipulate the request or metadata + before they are sent to the Instances server. + """ + return request, metadata + + def post_start_with_encryption_key(self, response: compute.Operation) -> compute.Operation: + """Post-rpc interceptor for start_with_encryption_key + + Override in a subclass to manipulate the response + after it is returned by the Instances server but before + it is returned to user code. + """ + return response + def pre_stop(self, request: compute.StopInstanceRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.StopInstanceRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for stop + + Override in a subclass to manipulate the request or metadata + before they are sent to the Instances server. + """ + return request, metadata + + def post_stop(self, response: compute.Operation) -> compute.Operation: + """Post-rpc interceptor for stop + + Override in a subclass to manipulate the response + after it is returned by the Instances server but before + it is returned to user code. + """ + return response + def pre_suspend(self, request: compute.SuspendInstanceRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.SuspendInstanceRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for suspend + + Override in a subclass to manipulate the request or metadata + before they are sent to the Instances server. + """ + return request, metadata + + def post_suspend(self, response: compute.Operation) -> compute.Operation: + """Post-rpc interceptor for suspend + + Override in a subclass to manipulate the response + after it is returned by the Instances server but before + it is returned to user code. + """ + return response + def pre_test_iam_permissions(self, request: compute.TestIamPermissionsInstanceRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.TestIamPermissionsInstanceRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for test_iam_permissions + + Override in a subclass to manipulate the request or metadata + before they are sent to the Instances server. + """ + return request, metadata + + def post_test_iam_permissions(self, response: compute.TestPermissionsResponse) -> compute.TestPermissionsResponse: + """Post-rpc interceptor for test_iam_permissions + + Override in a subclass to manipulate the response + after it is returned by the Instances server but before + it is returned to user code. + """ + return response + def pre_update(self, request: compute.UpdateInstanceRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.UpdateInstanceRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for update + + Override in a subclass to manipulate the request or metadata + before they are sent to the Instances server. + """ + return request, metadata + + def post_update(self, response: compute.Operation) -> compute.Operation: + """Post-rpc interceptor for update + + Override in a subclass to manipulate the response + after it is returned by the Instances server but before + it is returned to user code. + """ + return response + def pre_update_access_config(self, request: compute.UpdateAccessConfigInstanceRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.UpdateAccessConfigInstanceRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for update_access_config + + Override in a subclass to manipulate the request or metadata + before they are sent to the Instances server. + """ + return request, metadata + + def post_update_access_config(self, response: compute.Operation) -> compute.Operation: + """Post-rpc interceptor for update_access_config + + Override in a subclass to manipulate the response + after it is returned by the Instances server but before + it is returned to user code. + """ + return response + def pre_update_display_device(self, request: compute.UpdateDisplayDeviceInstanceRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.UpdateDisplayDeviceInstanceRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for update_display_device + + Override in a subclass to manipulate the request or metadata + before they are sent to the Instances server. + """ + return request, metadata + + def post_update_display_device(self, response: compute.Operation) -> compute.Operation: + """Post-rpc interceptor for update_display_device + + Override in a subclass to manipulate the response + after it is returned by the Instances server but before + it is returned to user code. + """ + return response + def pre_update_network_interface(self, request: compute.UpdateNetworkInterfaceInstanceRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.UpdateNetworkInterfaceInstanceRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for update_network_interface + + Override in a subclass to manipulate the request or metadata + before they are sent to the Instances server. + """ + return request, metadata + + def post_update_network_interface(self, response: compute.Operation) -> compute.Operation: + """Post-rpc interceptor for update_network_interface + + Override in a subclass to manipulate the response + after it is returned by the Instances server but before + it is returned to user code. + """ + return response + def pre_update_shielded_instance_config(self, request: compute.UpdateShieldedInstanceConfigInstanceRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.UpdateShieldedInstanceConfigInstanceRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for update_shielded_instance_config + + Override in a subclass to manipulate the request or metadata + before they are sent to the Instances server. + """ + return request, metadata + + def post_update_shielded_instance_config(self, response: compute.Operation) -> compute.Operation: + """Post-rpc interceptor for update_shielded_instance_config + + Override in a subclass to manipulate the response + after it is returned by the Instances server but before + it is returned to user code. + """ + return response + + +@dataclasses.dataclass +class InstancesRestStub: + _session: AuthorizedSession + _host: str + _interceptor: InstancesRestInterceptor + + +class InstancesRestTransport(InstancesTransport): + """REST backend transport for Instances. + + The Instances API. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends JSON representations of protocol buffers over HTTP/1.1 + + NOTE: This REST transport functionality is currently in a beta + state (preview). We welcome your feedback via an issue in this + library's source repository. Thank you! + """ + + def __init__(self, *, + host: str = 'compute.googleapis.com', + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + client_cert_source_for_mtls: Optional[Callable[[ + ], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + url_scheme: str = 'https', + interceptor: Optional[InstancesRestInterceptor] = None, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + NOTE: This REST transport functionality is currently in a beta + state (preview). We welcome your feedback via a GitHub issue in + this library's repository. Thank you! + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + client_cert_source_for_mtls (Callable[[], Tuple[bytes, bytes]]): Client + certificate to configure mutual TLS HTTP channel. It is ignored + if ``channel`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you are developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + url_scheme: the protocol scheme for the API endpoint. Normally + "https", but for testing or local servers, + "http" can be specified. + """ + # Run the base constructor + # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. + # TODO: When custom host (api_endpoint) is set, `scopes` must *also* be set on the + # credentials object + maybe_url_match = re.match("^(?Phttp(?:s)?://)?(?P.*)$", host) + if maybe_url_match is None: + raise ValueError(f"Unexpected hostname structure: {host}") # pragma: NO COVER + + url_match_items = maybe_url_match.groupdict() + + host = f"{url_scheme}://{host}" if not url_match_items["scheme"] else host + + super().__init__( + host=host, + credentials=credentials, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience + ) + self._session = AuthorizedSession( + self._credentials, default_host=self.DEFAULT_HOST) + if client_cert_source_for_mtls: + self._session.configure_mtls_channel(client_cert_source_for_mtls) + self._interceptor = interceptor or InstancesRestInterceptor() + self._prep_wrapped_messages(client_info) + + class _AddAccessConfig(InstancesRestStub): + def __hash__(self): + return hash("AddAccessConfig") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + "networkInterface" : "", } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.AddAccessConfigInstanceRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.Operation: + r"""Call the add access config method over HTTP. + + Args: + request (~.compute.AddAccessConfigInstanceRequest): + The request object. A request message for + Instances.AddAccessConfig. See the + method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.Operation: + Represents an Operation resource. Google Compute Engine + has three Operation resources: \* + `Global `__ + \* + `Regional `__ + \* + `Zonal `__ + You can use an operation resource to manage asynchronous + API requests. For more information, read Handling API + responses. Operations can be global, regional or zonal. + - For global operations, use the ``globalOperations`` + resource. - For regional operations, use the + ``regionOperations`` resource. - For zonal operations, + use the ``zonalOperations`` resource. For more + information, read Global, Regional, and Zonal Resources. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/compute/v1/projects/{project}/zones/{zone}/instances/{instance}/addAccessConfig', + 'body': 'access_config_resource', + }, + ] + request, metadata = self._interceptor.pre_add_access_config(request, metadata) + pb_request = compute.AddAccessConfigInstanceRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + including_default_value_fields=False, + use_integers_for_enums=False + ) + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.Operation() + pb_resp = compute.Operation.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_add_access_config(resp) + return resp + + class _AddResourcePolicies(InstancesRestStub): + def __hash__(self): + return hash("AddResourcePolicies") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.AddResourcePoliciesInstanceRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.Operation: + r"""Call the add resource policies method over HTTP. + + Args: + request (~.compute.AddResourcePoliciesInstanceRequest): + The request object. A request message for + Instances.AddResourcePolicies. See the + method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.Operation: + Represents an Operation resource. Google Compute Engine + has three Operation resources: \* + `Global `__ + \* + `Regional `__ + \* + `Zonal `__ + You can use an operation resource to manage asynchronous + API requests. For more information, read Handling API + responses. Operations can be global, regional or zonal. + - For global operations, use the ``globalOperations`` + resource. - For regional operations, use the + ``regionOperations`` resource. - For zonal operations, + use the ``zonalOperations`` resource. For more + information, read Global, Regional, and Zonal Resources. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/compute/v1/projects/{project}/zones/{zone}/instances/{instance}/addResourcePolicies', + 'body': 'instances_add_resource_policies_request_resource', + }, + ] + request, metadata = self._interceptor.pre_add_resource_policies(request, metadata) + pb_request = compute.AddResourcePoliciesInstanceRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + including_default_value_fields=False, + use_integers_for_enums=False + ) + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.Operation() + pb_resp = compute.Operation.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_add_resource_policies(resp) + return resp + + class _AggregatedList(InstancesRestStub): + def __hash__(self): + return hash("AggregatedList") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.AggregatedListInstancesRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.InstanceAggregatedList: + r"""Call the aggregated list method over HTTP. + + Args: + request (~.compute.AggregatedListInstancesRequest): + The request object. A request message for + Instances.AggregatedList. See the method + description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.InstanceAggregatedList: + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/compute/v1/projects/{project}/aggregated/instances', + }, + ] + request, metadata = self._interceptor.pre_aggregated_list(request, metadata) + pb_request = compute.AggregatedListInstancesRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.InstanceAggregatedList() + pb_resp = compute.InstanceAggregatedList.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_aggregated_list(resp) + return resp + + class _AttachDisk(InstancesRestStub): + def __hash__(self): + return hash("AttachDisk") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.AttachDiskInstanceRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.Operation: + r"""Call the attach disk method over HTTP. + + Args: + request (~.compute.AttachDiskInstanceRequest): + The request object. A request message for + Instances.AttachDisk. See the method + description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.Operation: + Represents an Operation resource. Google Compute Engine + has three Operation resources: \* + `Global `__ + \* + `Regional `__ + \* + `Zonal `__ + You can use an operation resource to manage asynchronous + API requests. For more information, read Handling API + responses. Operations can be global, regional or zonal. + - For global operations, use the ``globalOperations`` + resource. - For regional operations, use the + ``regionOperations`` resource. - For zonal operations, + use the ``zonalOperations`` resource. For more + information, read Global, Regional, and Zonal Resources. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/compute/v1/projects/{project}/zones/{zone}/instances/{instance}/attachDisk', + 'body': 'attached_disk_resource', + }, + ] + request, metadata = self._interceptor.pre_attach_disk(request, metadata) + pb_request = compute.AttachDiskInstanceRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + including_default_value_fields=False, + use_integers_for_enums=False + ) + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.Operation() + pb_resp = compute.Operation.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_attach_disk(resp) + return resp + + class _BulkInsert(InstancesRestStub): + def __hash__(self): + return hash("BulkInsert") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.BulkInsertInstanceRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.Operation: + r"""Call the bulk insert method over HTTP. + + Args: + request (~.compute.BulkInsertInstanceRequest): + The request object. A request message for + Instances.BulkInsert. See the method + description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.Operation: + Represents an Operation resource. Google Compute Engine + has three Operation resources: \* + `Global `__ + \* + `Regional `__ + \* + `Zonal `__ + You can use an operation resource to manage asynchronous + API requests. For more information, read Handling API + responses. Operations can be global, regional or zonal. + - For global operations, use the ``globalOperations`` + resource. - For regional operations, use the + ``regionOperations`` resource. - For zonal operations, + use the ``zonalOperations`` resource. For more + information, read Global, Regional, and Zonal Resources. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/compute/v1/projects/{project}/zones/{zone}/instances/bulkInsert', + 'body': 'bulk_insert_instance_resource_resource', + }, + ] + request, metadata = self._interceptor.pre_bulk_insert(request, metadata) + pb_request = compute.BulkInsertInstanceRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + including_default_value_fields=False, + use_integers_for_enums=False + ) + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.Operation() + pb_resp = compute.Operation.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_bulk_insert(resp) + return resp + + class _Delete(InstancesRestStub): + def __hash__(self): + return hash("Delete") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.DeleteInstanceRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.Operation: + r"""Call the delete method over HTTP. + + Args: + request (~.compute.DeleteInstanceRequest): + The request object. A request message for + Instances.Delete. See the method + description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.Operation: + Represents an Operation resource. Google Compute Engine + has three Operation resources: \* + `Global `__ + \* + `Regional `__ + \* + `Zonal `__ + You can use an operation resource to manage asynchronous + API requests. For more information, read Handling API + responses. Operations can be global, regional or zonal. + - For global operations, use the ``globalOperations`` + resource. - For regional operations, use the + ``regionOperations`` resource. - For zonal operations, + use the ``zonalOperations`` resource. For more + information, read Global, Regional, and Zonal Resources. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'delete', + 'uri': '/compute/v1/projects/{project}/zones/{zone}/instances/{instance}', + }, + ] + request, metadata = self._interceptor.pre_delete(request, metadata) + pb_request = compute.DeleteInstanceRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.Operation() + pb_resp = compute.Operation.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_delete(resp) + return resp + + class _DeleteAccessConfig(InstancesRestStub): + def __hash__(self): + return hash("DeleteAccessConfig") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + "accessConfig" : "", "networkInterface" : "", } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.DeleteAccessConfigInstanceRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.Operation: + r"""Call the delete access config method over HTTP. + + Args: + request (~.compute.DeleteAccessConfigInstanceRequest): + The request object. A request message for + Instances.DeleteAccessConfig. See the + method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.Operation: + Represents an Operation resource. Google Compute Engine + has three Operation resources: \* + `Global `__ + \* + `Regional `__ + \* + `Zonal `__ + You can use an operation resource to manage asynchronous + API requests. For more information, read Handling API + responses. Operations can be global, regional or zonal. + - For global operations, use the ``globalOperations`` + resource. - For regional operations, use the + ``regionOperations`` resource. - For zonal operations, + use the ``zonalOperations`` resource. For more + information, read Global, Regional, and Zonal Resources. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/compute/v1/projects/{project}/zones/{zone}/instances/{instance}/deleteAccessConfig', + }, + ] + request, metadata = self._interceptor.pre_delete_access_config(request, metadata) + pb_request = compute.DeleteAccessConfigInstanceRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.Operation() + pb_resp = compute.Operation.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_delete_access_config(resp) + return resp + + class _DetachDisk(InstancesRestStub): + def __hash__(self): + return hash("DetachDisk") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + "deviceName" : "", } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.DetachDiskInstanceRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.Operation: + r"""Call the detach disk method over HTTP. + + Args: + request (~.compute.DetachDiskInstanceRequest): + The request object. A request message for + Instances.DetachDisk. See the method + description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.Operation: + Represents an Operation resource. Google Compute Engine + has three Operation resources: \* + `Global `__ + \* + `Regional `__ + \* + `Zonal `__ + You can use an operation resource to manage asynchronous + API requests. For more information, read Handling API + responses. Operations can be global, regional or zonal. + - For global operations, use the ``globalOperations`` + resource. - For regional operations, use the + ``regionOperations`` resource. - For zonal operations, + use the ``zonalOperations`` resource. For more + information, read Global, Regional, and Zonal Resources. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/compute/v1/projects/{project}/zones/{zone}/instances/{instance}/detachDisk', + }, + ] + request, metadata = self._interceptor.pre_detach_disk(request, metadata) + pb_request = compute.DetachDiskInstanceRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.Operation() + pb_resp = compute.Operation.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_detach_disk(resp) + return resp + + class _Get(InstancesRestStub): + def __hash__(self): + return hash("Get") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.GetInstanceRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.Instance: + r"""Call the get method over HTTP. + + Args: + request (~.compute.GetInstanceRequest): + The request object. A request message for Instances.Get. + See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.Instance: + Represents an Instance resource. An + instance is a virtual machine that is + hosted on Google Cloud Platform. For + more information, read Virtual Machine + Instances. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/compute/v1/projects/{project}/zones/{zone}/instances/{instance}', + }, + ] + request, metadata = self._interceptor.pre_get(request, metadata) + pb_request = compute.GetInstanceRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.Instance() + pb_resp = compute.Instance.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get(resp) + return resp + + class _GetEffectiveFirewalls(InstancesRestStub): + def __hash__(self): + return hash("GetEffectiveFirewalls") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + "networkInterface" : "", } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.GetEffectiveFirewallsInstanceRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.InstancesGetEffectiveFirewallsResponse: + r"""Call the get effective firewalls method over HTTP. + + Args: + request (~.compute.GetEffectiveFirewallsInstanceRequest): + The request object. A request message for + Instances.GetEffectiveFirewalls. See the + method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.InstancesGetEffectiveFirewallsResponse: + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/compute/v1/projects/{project}/zones/{zone}/instances/{instance}/getEffectiveFirewalls', + }, + ] + request, metadata = self._interceptor.pre_get_effective_firewalls(request, metadata) + pb_request = compute.GetEffectiveFirewallsInstanceRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.InstancesGetEffectiveFirewallsResponse() + pb_resp = compute.InstancesGetEffectiveFirewallsResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get_effective_firewalls(resp) + return resp + + class _GetGuestAttributes(InstancesRestStub): + def __hash__(self): + return hash("GetGuestAttributes") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.GetGuestAttributesInstanceRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.GuestAttributes: + r"""Call the get guest attributes method over HTTP. + + Args: + request (~.compute.GetGuestAttributesInstanceRequest): + The request object. A request message for + Instances.GetGuestAttributes. See the + method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.GuestAttributes: + A guest attributes entry. + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/compute/v1/projects/{project}/zones/{zone}/instances/{instance}/getGuestAttributes', + }, + ] + request, metadata = self._interceptor.pre_get_guest_attributes(request, metadata) + pb_request = compute.GetGuestAttributesInstanceRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.GuestAttributes() + pb_resp = compute.GuestAttributes.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get_guest_attributes(resp) + return resp + + class _GetIamPolicy(InstancesRestStub): + def __hash__(self): + return hash("GetIamPolicy") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.GetIamPolicyInstanceRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.Policy: + r"""Call the get iam policy method over HTTP. + + Args: + request (~.compute.GetIamPolicyInstanceRequest): + The request object. A request message for + Instances.GetIamPolicy. See the method + description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.Policy: + An Identity and Access Management (IAM) policy, which + specifies access controls for Google Cloud resources. A + ``Policy`` is a collection of ``bindings``. A + ``binding`` binds one or more ``members``, or + principals, to a single ``role``. Principals can be user + accounts, service accounts, Google groups, and domains + (such as G Suite). A ``role`` is a named list of + permissions; each ``role`` can be an IAM predefined role + or a user-created custom role. For some types of Google + Cloud resources, a ``binding`` can also specify a + ``condition``, which is a logical expression that allows + access to a resource only if the expression evaluates to + ``true``. A condition can add constraints based on + attributes of the request, the resource, or both. To + learn which resources support conditions in their IAM + policies, see the `IAM + documentation `__. + **JSON example:** { "bindings": [ { "role": + "roles/resourcemanager.organizationAdmin", "members": [ + "user:mike@example.com", "group:admins@example.com", + "domain:google.com", + "serviceAccount:my-project-id@appspot.gserviceaccount.com" + ] }, { "role": + "roles/resourcemanager.organizationViewer", "members": [ + "user:eve@example.com" ], "condition": { "title": + "expirable access", "description": "Does not grant + access after Sep 2020", "expression": "request.time < + timestamp('2020-10-01T00:00:00.000Z')", } } ], "etag": + "BwWWja0YfJA=", "version": 3 } **YAML example:** + bindings: - members: - user:mike@example.com - + group:admins@example.com - domain:google.com - + serviceAccount:my-project-id@appspot.gserviceaccount.com + role: roles/resourcemanager.organizationAdmin - members: + - user:eve@example.com role: + roles/resourcemanager.organizationViewer condition: + title: expirable access description: Does not grant + access after Sep 2020 expression: request.time < + timestamp('2020-10-01T00:00:00.000Z') etag: BwWWja0YfJA= + version: 3 For a description of IAM and its features, + see the `IAM + documentation `__. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/compute/v1/projects/{project}/zones/{zone}/instances/{resource}/getIamPolicy', + }, + ] + request, metadata = self._interceptor.pre_get_iam_policy(request, metadata) + pb_request = compute.GetIamPolicyInstanceRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.Policy() + pb_resp = compute.Policy.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get_iam_policy(resp) + return resp + + class _GetScreenshot(InstancesRestStub): + def __hash__(self): + return hash("GetScreenshot") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.GetScreenshotInstanceRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.Screenshot: + r"""Call the get screenshot method over HTTP. + + Args: + request (~.compute.GetScreenshotInstanceRequest): + The request object. A request message for + Instances.GetScreenshot. See the method + description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.Screenshot: + An instance's screenshot. + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/compute/v1/projects/{project}/zones/{zone}/instances/{instance}/screenshot', + }, + ] + request, metadata = self._interceptor.pre_get_screenshot(request, metadata) + pb_request = compute.GetScreenshotInstanceRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.Screenshot() + pb_resp = compute.Screenshot.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get_screenshot(resp) + return resp + + class _GetSerialPortOutput(InstancesRestStub): + def __hash__(self): + return hash("GetSerialPortOutput") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.GetSerialPortOutputInstanceRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.SerialPortOutput: + r"""Call the get serial port output method over HTTP. + + Args: + request (~.compute.GetSerialPortOutputInstanceRequest): + The request object. A request message for + Instances.GetSerialPortOutput. See the + method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.SerialPortOutput: + An instance serial console output. + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/compute/v1/projects/{project}/zones/{zone}/instances/{instance}/serialPort', + }, + ] + request, metadata = self._interceptor.pre_get_serial_port_output(request, metadata) + pb_request = compute.GetSerialPortOutputInstanceRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.SerialPortOutput() + pb_resp = compute.SerialPortOutput.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get_serial_port_output(resp) + return resp + + class _GetShieldedInstanceIdentity(InstancesRestStub): + def __hash__(self): + return hash("GetShieldedInstanceIdentity") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.GetShieldedInstanceIdentityInstanceRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.ShieldedInstanceIdentity: + r"""Call the get shielded instance + identity method over HTTP. + + Args: + request (~.compute.GetShieldedInstanceIdentityInstanceRequest): + The request object. A request message for + Instances.GetShieldedInstanceIdentity. + See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.ShieldedInstanceIdentity: + A Shielded Instance Identity. + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/compute/v1/projects/{project}/zones/{zone}/instances/{instance}/getShieldedInstanceIdentity', + }, + ] + request, metadata = self._interceptor.pre_get_shielded_instance_identity(request, metadata) + pb_request = compute.GetShieldedInstanceIdentityInstanceRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.ShieldedInstanceIdentity() + pb_resp = compute.ShieldedInstanceIdentity.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get_shielded_instance_identity(resp) + return resp + + class _Insert(InstancesRestStub): + def __hash__(self): + return hash("Insert") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.InsertInstanceRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.Operation: + r"""Call the insert method over HTTP. + + Args: + request (~.compute.InsertInstanceRequest): + The request object. A request message for + Instances.Insert. See the method + description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.Operation: + Represents an Operation resource. Google Compute Engine + has three Operation resources: \* + `Global `__ + \* + `Regional `__ + \* + `Zonal `__ + You can use an operation resource to manage asynchronous + API requests. For more information, read Handling API + responses. Operations can be global, regional or zonal. + - For global operations, use the ``globalOperations`` + resource. - For regional operations, use the + ``regionOperations`` resource. - For zonal operations, + use the ``zonalOperations`` resource. For more + information, read Global, Regional, and Zonal Resources. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/compute/v1/projects/{project}/zones/{zone}/instances', + 'body': 'instance_resource', + }, + ] + request, metadata = self._interceptor.pre_insert(request, metadata) + pb_request = compute.InsertInstanceRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + including_default_value_fields=False, + use_integers_for_enums=False + ) + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.Operation() + pb_resp = compute.Operation.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_insert(resp) + return resp + + class _List(InstancesRestStub): + def __hash__(self): + return hash("List") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.ListInstancesRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.InstanceList: + r"""Call the list method over HTTP. + + Args: + request (~.compute.ListInstancesRequest): + The request object. A request message for Instances.List. + See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.InstanceList: + Contains a list of instances. + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/compute/v1/projects/{project}/zones/{zone}/instances', + }, + ] + request, metadata = self._interceptor.pre_list(request, metadata) + pb_request = compute.ListInstancesRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.InstanceList() + pb_resp = compute.InstanceList.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_list(resp) + return resp + + class _ListReferrers(InstancesRestStub): + def __hash__(self): + return hash("ListReferrers") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.ListReferrersInstancesRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.InstanceListReferrers: + r"""Call the list referrers method over HTTP. + + Args: + request (~.compute.ListReferrersInstancesRequest): + The request object. A request message for + Instances.ListReferrers. See the method + description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.InstanceListReferrers: + Contains a list of instance + referrers. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/compute/v1/projects/{project}/zones/{zone}/instances/{instance}/referrers', + }, + ] + request, metadata = self._interceptor.pre_list_referrers(request, metadata) + pb_request = compute.ListReferrersInstancesRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.InstanceListReferrers() + pb_resp = compute.InstanceListReferrers.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_list_referrers(resp) + return resp + + class _RemoveResourcePolicies(InstancesRestStub): + def __hash__(self): + return hash("RemoveResourcePolicies") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.RemoveResourcePoliciesInstanceRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.Operation: + r"""Call the remove resource policies method over HTTP. + + Args: + request (~.compute.RemoveResourcePoliciesInstanceRequest): + The request object. A request message for + Instances.RemoveResourcePolicies. See + the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.Operation: + Represents an Operation resource. Google Compute Engine + has three Operation resources: \* + `Global `__ + \* + `Regional `__ + \* + `Zonal `__ + You can use an operation resource to manage asynchronous + API requests. For more information, read Handling API + responses. Operations can be global, regional or zonal. + - For global operations, use the ``globalOperations`` + resource. - For regional operations, use the + ``regionOperations`` resource. - For zonal operations, + use the ``zonalOperations`` resource. For more + information, read Global, Regional, and Zonal Resources. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/compute/v1/projects/{project}/zones/{zone}/instances/{instance}/removeResourcePolicies', + 'body': 'instances_remove_resource_policies_request_resource', + }, + ] + request, metadata = self._interceptor.pre_remove_resource_policies(request, metadata) + pb_request = compute.RemoveResourcePoliciesInstanceRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + including_default_value_fields=False, + use_integers_for_enums=False + ) + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.Operation() + pb_resp = compute.Operation.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_remove_resource_policies(resp) + return resp + + class _Reset(InstancesRestStub): + def __hash__(self): + return hash("Reset") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.ResetInstanceRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.Operation: + r"""Call the reset method over HTTP. + + Args: + request (~.compute.ResetInstanceRequest): + The request object. A request message for + Instances.Reset. See the method + description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.Operation: + Represents an Operation resource. Google Compute Engine + has three Operation resources: \* + `Global `__ + \* + `Regional `__ + \* + `Zonal `__ + You can use an operation resource to manage asynchronous + API requests. For more information, read Handling API + responses. Operations can be global, regional or zonal. + - For global operations, use the ``globalOperations`` + resource. - For regional operations, use the + ``regionOperations`` resource. - For zonal operations, + use the ``zonalOperations`` resource. For more + information, read Global, Regional, and Zonal Resources. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/compute/v1/projects/{project}/zones/{zone}/instances/{instance}/reset', + }, + ] + request, metadata = self._interceptor.pre_reset(request, metadata) + pb_request = compute.ResetInstanceRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.Operation() + pb_resp = compute.Operation.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_reset(resp) + return resp + + class _Resume(InstancesRestStub): + def __hash__(self): + return hash("Resume") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.ResumeInstanceRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.Operation: + r"""Call the resume method over HTTP. + + Args: + request (~.compute.ResumeInstanceRequest): + The request object. A request message for + Instances.Resume. See the method + description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.Operation: + Represents an Operation resource. Google Compute Engine + has three Operation resources: \* + `Global `__ + \* + `Regional `__ + \* + `Zonal `__ + You can use an operation resource to manage asynchronous + API requests. For more information, read Handling API + responses. Operations can be global, regional or zonal. + - For global operations, use the ``globalOperations`` + resource. - For regional operations, use the + ``regionOperations`` resource. - For zonal operations, + use the ``zonalOperations`` resource. For more + information, read Global, Regional, and Zonal Resources. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/compute/v1/projects/{project}/zones/{zone}/instances/{instance}/resume', + }, + ] + request, metadata = self._interceptor.pre_resume(request, metadata) + pb_request = compute.ResumeInstanceRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.Operation() + pb_resp = compute.Operation.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_resume(resp) + return resp + + class _SendDiagnosticInterrupt(InstancesRestStub): + def __hash__(self): + return hash("SendDiagnosticInterrupt") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.SendDiagnosticInterruptInstanceRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.SendDiagnosticInterruptInstanceResponse: + r"""Call the send diagnostic interrupt method over HTTP. + + Args: + request (~.compute.SendDiagnosticInterruptInstanceRequest): + The request object. A request message for + Instances.SendDiagnosticInterrupt. See + the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.SendDiagnosticInterruptInstanceResponse: + A response message for + Instances.SendDiagnosticInterrupt. See + the method description for details. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/compute/v1/projects/{project}/zones/{zone}/instances/{instance}/sendDiagnosticInterrupt', + }, + ] + request, metadata = self._interceptor.pre_send_diagnostic_interrupt(request, metadata) + pb_request = compute.SendDiagnosticInterruptInstanceRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.SendDiagnosticInterruptInstanceResponse() + pb_resp = compute.SendDiagnosticInterruptInstanceResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_send_diagnostic_interrupt(resp) + return resp + + class _SetDeletionProtection(InstancesRestStub): + def __hash__(self): + return hash("SetDeletionProtection") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.SetDeletionProtectionInstanceRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.Operation: + r"""Call the set deletion protection method over HTTP. + + Args: + request (~.compute.SetDeletionProtectionInstanceRequest): + The request object. A request message for + Instances.SetDeletionProtection. See the + method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.Operation: + Represents an Operation resource. Google Compute Engine + has three Operation resources: \* + `Global `__ + \* + `Regional `__ + \* + `Zonal `__ + You can use an operation resource to manage asynchronous + API requests. For more information, read Handling API + responses. Operations can be global, regional or zonal. + - For global operations, use the ``globalOperations`` + resource. - For regional operations, use the + ``regionOperations`` resource. - For zonal operations, + use the ``zonalOperations`` resource. For more + information, read Global, Regional, and Zonal Resources. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/compute/v1/projects/{project}/zones/{zone}/instances/{resource}/setDeletionProtection', + }, + ] + request, metadata = self._interceptor.pre_set_deletion_protection(request, metadata) + pb_request = compute.SetDeletionProtectionInstanceRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.Operation() + pb_resp = compute.Operation.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_set_deletion_protection(resp) + return resp + + class _SetDiskAutoDelete(InstancesRestStub): + def __hash__(self): + return hash("SetDiskAutoDelete") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + "autoDelete" : False, "deviceName" : "", } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.SetDiskAutoDeleteInstanceRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.Operation: + r"""Call the set disk auto delete method over HTTP. + + Args: + request (~.compute.SetDiskAutoDeleteInstanceRequest): + The request object. A request message for + Instances.SetDiskAutoDelete. See the + method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.Operation: + Represents an Operation resource. Google Compute Engine + has three Operation resources: \* + `Global `__ + \* + `Regional `__ + \* + `Zonal `__ + You can use an operation resource to manage asynchronous + API requests. For more information, read Handling API + responses. Operations can be global, regional or zonal. + - For global operations, use the ``globalOperations`` + resource. - For regional operations, use the + ``regionOperations`` resource. - For zonal operations, + use the ``zonalOperations`` resource. For more + information, read Global, Regional, and Zonal Resources. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/compute/v1/projects/{project}/zones/{zone}/instances/{instance}/setDiskAutoDelete', + }, + ] + request, metadata = self._interceptor.pre_set_disk_auto_delete(request, metadata) + pb_request = compute.SetDiskAutoDeleteInstanceRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.Operation() + pb_resp = compute.Operation.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_set_disk_auto_delete(resp) + return resp + + class _SetIamPolicy(InstancesRestStub): + def __hash__(self): + return hash("SetIamPolicy") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.SetIamPolicyInstanceRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.Policy: + r"""Call the set iam policy method over HTTP. + + Args: + request (~.compute.SetIamPolicyInstanceRequest): + The request object. A request message for + Instances.SetIamPolicy. See the method + description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.Policy: + An Identity and Access Management (IAM) policy, which + specifies access controls for Google Cloud resources. A + ``Policy`` is a collection of ``bindings``. A + ``binding`` binds one or more ``members``, or + principals, to a single ``role``. Principals can be user + accounts, service accounts, Google groups, and domains + (such as G Suite). A ``role`` is a named list of + permissions; each ``role`` can be an IAM predefined role + or a user-created custom role. For some types of Google + Cloud resources, a ``binding`` can also specify a + ``condition``, which is a logical expression that allows + access to a resource only if the expression evaluates to + ``true``. A condition can add constraints based on + attributes of the request, the resource, or both. To + learn which resources support conditions in their IAM + policies, see the `IAM + documentation `__. + **JSON example:** { "bindings": [ { "role": + "roles/resourcemanager.organizationAdmin", "members": [ + "user:mike@example.com", "group:admins@example.com", + "domain:google.com", + "serviceAccount:my-project-id@appspot.gserviceaccount.com" + ] }, { "role": + "roles/resourcemanager.organizationViewer", "members": [ + "user:eve@example.com" ], "condition": { "title": + "expirable access", "description": "Does not grant + access after Sep 2020", "expression": "request.time < + timestamp('2020-10-01T00:00:00.000Z')", } } ], "etag": + "BwWWja0YfJA=", "version": 3 } **YAML example:** + bindings: - members: - user:mike@example.com - + group:admins@example.com - domain:google.com - + serviceAccount:my-project-id@appspot.gserviceaccount.com + role: roles/resourcemanager.organizationAdmin - members: + - user:eve@example.com role: + roles/resourcemanager.organizationViewer condition: + title: expirable access description: Does not grant + access after Sep 2020 expression: request.time < + timestamp('2020-10-01T00:00:00.000Z') etag: BwWWja0YfJA= + version: 3 For a description of IAM and its features, + see the `IAM + documentation `__. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/compute/v1/projects/{project}/zones/{zone}/instances/{resource}/setIamPolicy', + 'body': 'zone_set_policy_request_resource', + }, + ] + request, metadata = self._interceptor.pre_set_iam_policy(request, metadata) + pb_request = compute.SetIamPolicyInstanceRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + including_default_value_fields=False, + use_integers_for_enums=False + ) + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.Policy() + pb_resp = compute.Policy.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_set_iam_policy(resp) + return resp + + class _SetLabels(InstancesRestStub): + def __hash__(self): + return hash("SetLabels") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.SetLabelsInstanceRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.Operation: + r"""Call the set labels method over HTTP. + + Args: + request (~.compute.SetLabelsInstanceRequest): + The request object. A request message for + Instances.SetLabels. See the method + description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.Operation: + Represents an Operation resource. Google Compute Engine + has three Operation resources: \* + `Global `__ + \* + `Regional `__ + \* + `Zonal `__ + You can use an operation resource to manage asynchronous + API requests. For more information, read Handling API + responses. Operations can be global, regional or zonal. + - For global operations, use the ``globalOperations`` + resource. - For regional operations, use the + ``regionOperations`` resource. - For zonal operations, + use the ``zonalOperations`` resource. For more + information, read Global, Regional, and Zonal Resources. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/compute/v1/projects/{project}/zones/{zone}/instances/{instance}/setLabels', + 'body': 'instances_set_labels_request_resource', + }, + ] + request, metadata = self._interceptor.pre_set_labels(request, metadata) + pb_request = compute.SetLabelsInstanceRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + including_default_value_fields=False, + use_integers_for_enums=False + ) + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.Operation() + pb_resp = compute.Operation.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_set_labels(resp) + return resp + + class _SetMachineResources(InstancesRestStub): + def __hash__(self): + return hash("SetMachineResources") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.SetMachineResourcesInstanceRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.Operation: + r"""Call the set machine resources method over HTTP. + + Args: + request (~.compute.SetMachineResourcesInstanceRequest): + The request object. A request message for + Instances.SetMachineResources. See the + method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.Operation: + Represents an Operation resource. Google Compute Engine + has three Operation resources: \* + `Global `__ + \* + `Regional `__ + \* + `Zonal `__ + You can use an operation resource to manage asynchronous + API requests. For more information, read Handling API + responses. Operations can be global, regional or zonal. + - For global operations, use the ``globalOperations`` + resource. - For regional operations, use the + ``regionOperations`` resource. - For zonal operations, + use the ``zonalOperations`` resource. For more + information, read Global, Regional, and Zonal Resources. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/compute/v1/projects/{project}/zones/{zone}/instances/{instance}/setMachineResources', + 'body': 'instances_set_machine_resources_request_resource', + }, + ] + request, metadata = self._interceptor.pre_set_machine_resources(request, metadata) + pb_request = compute.SetMachineResourcesInstanceRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + including_default_value_fields=False, + use_integers_for_enums=False + ) + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.Operation() + pb_resp = compute.Operation.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_set_machine_resources(resp) + return resp + + class _SetMachineType(InstancesRestStub): + def __hash__(self): + return hash("SetMachineType") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.SetMachineTypeInstanceRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.Operation: + r"""Call the set machine type method over HTTP. + + Args: + request (~.compute.SetMachineTypeInstanceRequest): + The request object. A request message for + Instances.SetMachineType. See the method + description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.Operation: + Represents an Operation resource. Google Compute Engine + has three Operation resources: \* + `Global `__ + \* + `Regional `__ + \* + `Zonal `__ + You can use an operation resource to manage asynchronous + API requests. For more information, read Handling API + responses. Operations can be global, regional or zonal. + - For global operations, use the ``globalOperations`` + resource. - For regional operations, use the + ``regionOperations`` resource. - For zonal operations, + use the ``zonalOperations`` resource. For more + information, read Global, Regional, and Zonal Resources. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/compute/v1/projects/{project}/zones/{zone}/instances/{instance}/setMachineType', + 'body': 'instances_set_machine_type_request_resource', + }, + ] + request, metadata = self._interceptor.pre_set_machine_type(request, metadata) + pb_request = compute.SetMachineTypeInstanceRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + including_default_value_fields=False, + use_integers_for_enums=False + ) + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.Operation() + pb_resp = compute.Operation.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_set_machine_type(resp) + return resp + + class _SetMetadata(InstancesRestStub): + def __hash__(self): + return hash("SetMetadata") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.SetMetadataInstanceRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.Operation: + r"""Call the set metadata method over HTTP. + + Args: + request (~.compute.SetMetadataInstanceRequest): + The request object. A request message for + Instances.SetMetadata. See the method + description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.Operation: + Represents an Operation resource. Google Compute Engine + has three Operation resources: \* + `Global `__ + \* + `Regional `__ + \* + `Zonal `__ + You can use an operation resource to manage asynchronous + API requests. For more information, read Handling API + responses. Operations can be global, regional or zonal. + - For global operations, use the ``globalOperations`` + resource. - For regional operations, use the + ``regionOperations`` resource. - For zonal operations, + use the ``zonalOperations`` resource. For more + information, read Global, Regional, and Zonal Resources. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/compute/v1/projects/{project}/zones/{zone}/instances/{instance}/setMetadata', + 'body': 'metadata_resource', + }, + ] + request, metadata = self._interceptor.pre_set_metadata(request, metadata) + pb_request = compute.SetMetadataInstanceRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + including_default_value_fields=False, + use_integers_for_enums=False + ) + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.Operation() + pb_resp = compute.Operation.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_set_metadata(resp) + return resp + + class _SetMinCpuPlatform(InstancesRestStub): + def __hash__(self): + return hash("SetMinCpuPlatform") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.SetMinCpuPlatformInstanceRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.Operation: + r"""Call the set min cpu platform method over HTTP. + + Args: + request (~.compute.SetMinCpuPlatformInstanceRequest): + The request object. A request message for + Instances.SetMinCpuPlatform. See the + method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.Operation: + Represents an Operation resource. Google Compute Engine + has three Operation resources: \* + `Global `__ + \* + `Regional `__ + \* + `Zonal `__ + You can use an operation resource to manage asynchronous + API requests. For more information, read Handling API + responses. Operations can be global, regional or zonal. + - For global operations, use the ``globalOperations`` + resource. - For regional operations, use the + ``regionOperations`` resource. - For zonal operations, + use the ``zonalOperations`` resource. For more + information, read Global, Regional, and Zonal Resources. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/compute/v1/projects/{project}/zones/{zone}/instances/{instance}/setMinCpuPlatform', + 'body': 'instances_set_min_cpu_platform_request_resource', + }, + ] + request, metadata = self._interceptor.pre_set_min_cpu_platform(request, metadata) + pb_request = compute.SetMinCpuPlatformInstanceRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + including_default_value_fields=False, + use_integers_for_enums=False + ) + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.Operation() + pb_resp = compute.Operation.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_set_min_cpu_platform(resp) + return resp + + class _SetName(InstancesRestStub): + def __hash__(self): + return hash("SetName") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.SetNameInstanceRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.Operation: + r"""Call the set name method over HTTP. + + Args: + request (~.compute.SetNameInstanceRequest): + The request object. A request message for + Instances.SetName. See the method + description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.Operation: + Represents an Operation resource. Google Compute Engine + has three Operation resources: \* + `Global `__ + \* + `Regional `__ + \* + `Zonal `__ + You can use an operation resource to manage asynchronous + API requests. For more information, read Handling API + responses. Operations can be global, regional or zonal. + - For global operations, use the ``globalOperations`` + resource. - For regional operations, use the + ``regionOperations`` resource. - For zonal operations, + use the ``zonalOperations`` resource. For more + information, read Global, Regional, and Zonal Resources. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/compute/v1/projects/{project}/zones/{zone}/instances/{instance}/setName', + 'body': 'instances_set_name_request_resource', + }, + ] + request, metadata = self._interceptor.pre_set_name(request, metadata) + pb_request = compute.SetNameInstanceRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + including_default_value_fields=False, + use_integers_for_enums=False + ) + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.Operation() + pb_resp = compute.Operation.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_set_name(resp) + return resp + + class _SetScheduling(InstancesRestStub): + def __hash__(self): + return hash("SetScheduling") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.SetSchedulingInstanceRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.Operation: + r"""Call the set scheduling method over HTTP. + + Args: + request (~.compute.SetSchedulingInstanceRequest): + The request object. A request message for + Instances.SetScheduling. See the method + description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.Operation: + Represents an Operation resource. Google Compute Engine + has three Operation resources: \* + `Global `__ + \* + `Regional `__ + \* + `Zonal `__ + You can use an operation resource to manage asynchronous + API requests. For more information, read Handling API + responses. Operations can be global, regional or zonal. + - For global operations, use the ``globalOperations`` + resource. - For regional operations, use the + ``regionOperations`` resource. - For zonal operations, + use the ``zonalOperations`` resource. For more + information, read Global, Regional, and Zonal Resources. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/compute/v1/projects/{project}/zones/{zone}/instances/{instance}/setScheduling', + 'body': 'scheduling_resource', + }, + ] + request, metadata = self._interceptor.pre_set_scheduling(request, metadata) + pb_request = compute.SetSchedulingInstanceRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + including_default_value_fields=False, + use_integers_for_enums=False + ) + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.Operation() + pb_resp = compute.Operation.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_set_scheduling(resp) + return resp + + class _SetServiceAccount(InstancesRestStub): + def __hash__(self): + return hash("SetServiceAccount") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.SetServiceAccountInstanceRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.Operation: + r"""Call the set service account method over HTTP. + + Args: + request (~.compute.SetServiceAccountInstanceRequest): + The request object. A request message for + Instances.SetServiceAccount. See the + method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.Operation: + Represents an Operation resource. Google Compute Engine + has three Operation resources: \* + `Global `__ + \* + `Regional `__ + \* + `Zonal `__ + You can use an operation resource to manage asynchronous + API requests. For more information, read Handling API + responses. Operations can be global, regional or zonal. + - For global operations, use the ``globalOperations`` + resource. - For regional operations, use the + ``regionOperations`` resource. - For zonal operations, + use the ``zonalOperations`` resource. For more + information, read Global, Regional, and Zonal Resources. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/compute/v1/projects/{project}/zones/{zone}/instances/{instance}/setServiceAccount', + 'body': 'instances_set_service_account_request_resource', + }, + ] + request, metadata = self._interceptor.pre_set_service_account(request, metadata) + pb_request = compute.SetServiceAccountInstanceRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + including_default_value_fields=False, + use_integers_for_enums=False + ) + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.Operation() + pb_resp = compute.Operation.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_set_service_account(resp) + return resp + + class _SetShieldedInstanceIntegrityPolicy(InstancesRestStub): + def __hash__(self): + return hash("SetShieldedInstanceIntegrityPolicy") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.SetShieldedInstanceIntegrityPolicyInstanceRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.Operation: + r"""Call the set shielded instance + integrity policy method over HTTP. + + Args: + request (~.compute.SetShieldedInstanceIntegrityPolicyInstanceRequest): + The request object. A request message for + Instances.SetShieldedInstanceIntegrityPolicy. + See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.Operation: + Represents an Operation resource. Google Compute Engine + has three Operation resources: \* + `Global `__ + \* + `Regional `__ + \* + `Zonal `__ + You can use an operation resource to manage asynchronous + API requests. For more information, read Handling API + responses. Operations can be global, regional or zonal. + - For global operations, use the ``globalOperations`` + resource. - For regional operations, use the + ``regionOperations`` resource. - For zonal operations, + use the ``zonalOperations`` resource. For more + information, read Global, Regional, and Zonal Resources. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'patch', + 'uri': '/compute/v1/projects/{project}/zones/{zone}/instances/{instance}/setShieldedInstanceIntegrityPolicy', + 'body': 'shielded_instance_integrity_policy_resource', + }, + ] + request, metadata = self._interceptor.pre_set_shielded_instance_integrity_policy(request, metadata) + pb_request = compute.SetShieldedInstanceIntegrityPolicyInstanceRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + including_default_value_fields=False, + use_integers_for_enums=False + ) + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.Operation() + pb_resp = compute.Operation.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_set_shielded_instance_integrity_policy(resp) + return resp + + class _SetTags(InstancesRestStub): + def __hash__(self): + return hash("SetTags") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.SetTagsInstanceRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.Operation: + r"""Call the set tags method over HTTP. + + Args: + request (~.compute.SetTagsInstanceRequest): + The request object. A request message for + Instances.SetTags. See the method + description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.Operation: + Represents an Operation resource. Google Compute Engine + has three Operation resources: \* + `Global `__ + \* + `Regional `__ + \* + `Zonal `__ + You can use an operation resource to manage asynchronous + API requests. For more information, read Handling API + responses. Operations can be global, regional or zonal. + - For global operations, use the ``globalOperations`` + resource. - For regional operations, use the + ``regionOperations`` resource. - For zonal operations, + use the ``zonalOperations`` resource. For more + information, read Global, Regional, and Zonal Resources. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/compute/v1/projects/{project}/zones/{zone}/instances/{instance}/setTags', + 'body': 'tags_resource', + }, + ] + request, metadata = self._interceptor.pre_set_tags(request, metadata) + pb_request = compute.SetTagsInstanceRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + including_default_value_fields=False, + use_integers_for_enums=False + ) + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.Operation() + pb_resp = compute.Operation.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_set_tags(resp) + return resp + + class _SimulateMaintenanceEvent(InstancesRestStub): + def __hash__(self): + return hash("SimulateMaintenanceEvent") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.SimulateMaintenanceEventInstanceRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.Operation: + r"""Call the simulate maintenance + event method over HTTP. + + Args: + request (~.compute.SimulateMaintenanceEventInstanceRequest): + The request object. A request message for + Instances.SimulateMaintenanceEvent. See + the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.Operation: + Represents an Operation resource. Google Compute Engine + has three Operation resources: \* + `Global `__ + \* + `Regional `__ + \* + `Zonal `__ + You can use an operation resource to manage asynchronous + API requests. For more information, read Handling API + responses. Operations can be global, regional or zonal. + - For global operations, use the ``globalOperations`` + resource. - For regional operations, use the + ``regionOperations`` resource. - For zonal operations, + use the ``zonalOperations`` resource. For more + information, read Global, Regional, and Zonal Resources. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/compute/v1/projects/{project}/zones/{zone}/instances/{instance}/simulateMaintenanceEvent', + }, + ] + request, metadata = self._interceptor.pre_simulate_maintenance_event(request, metadata) + pb_request = compute.SimulateMaintenanceEventInstanceRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.Operation() + pb_resp = compute.Operation.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_simulate_maintenance_event(resp) + return resp + + class _Start(InstancesRestStub): + def __hash__(self): + return hash("Start") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.StartInstanceRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.Operation: + r"""Call the start method over HTTP. + + Args: + request (~.compute.StartInstanceRequest): + The request object. A request message for + Instances.Start. See the method + description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.Operation: + Represents an Operation resource. Google Compute Engine + has three Operation resources: \* + `Global `__ + \* + `Regional `__ + \* + `Zonal `__ + You can use an operation resource to manage asynchronous + API requests. For more information, read Handling API + responses. Operations can be global, regional or zonal. + - For global operations, use the ``globalOperations`` + resource. - For regional operations, use the + ``regionOperations`` resource. - For zonal operations, + use the ``zonalOperations`` resource. For more + information, read Global, Regional, and Zonal Resources. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/compute/v1/projects/{project}/zones/{zone}/instances/{instance}/start', + }, + ] + request, metadata = self._interceptor.pre_start(request, metadata) + pb_request = compute.StartInstanceRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.Operation() + pb_resp = compute.Operation.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_start(resp) + return resp + + class _StartWithEncryptionKey(InstancesRestStub): + def __hash__(self): + return hash("StartWithEncryptionKey") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.StartWithEncryptionKeyInstanceRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.Operation: + r"""Call the start with encryption key method over HTTP. + + Args: + request (~.compute.StartWithEncryptionKeyInstanceRequest): + The request object. A request message for + Instances.StartWithEncryptionKey. See + the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.Operation: + Represents an Operation resource. Google Compute Engine + has three Operation resources: \* + `Global `__ + \* + `Regional `__ + \* + `Zonal `__ + You can use an operation resource to manage asynchronous + API requests. For more information, read Handling API + responses. Operations can be global, regional or zonal. + - For global operations, use the ``globalOperations`` + resource. - For regional operations, use the + ``regionOperations`` resource. - For zonal operations, + use the ``zonalOperations`` resource. For more + information, read Global, Regional, and Zonal Resources. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/compute/v1/projects/{project}/zones/{zone}/instances/{instance}/startWithEncryptionKey', + 'body': 'instances_start_with_encryption_key_request_resource', + }, + ] + request, metadata = self._interceptor.pre_start_with_encryption_key(request, metadata) + pb_request = compute.StartWithEncryptionKeyInstanceRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + including_default_value_fields=False, + use_integers_for_enums=False + ) + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.Operation() + pb_resp = compute.Operation.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_start_with_encryption_key(resp) + return resp + + class _Stop(InstancesRestStub): + def __hash__(self): + return hash("Stop") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.StopInstanceRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.Operation: + r"""Call the stop method over HTTP. + + Args: + request (~.compute.StopInstanceRequest): + The request object. A request message for Instances.Stop. + See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.Operation: + Represents an Operation resource. Google Compute Engine + has three Operation resources: \* + `Global `__ + \* + `Regional `__ + \* + `Zonal `__ + You can use an operation resource to manage asynchronous + API requests. For more information, read Handling API + responses. Operations can be global, regional or zonal. + - For global operations, use the ``globalOperations`` + resource. - For regional operations, use the + ``regionOperations`` resource. - For zonal operations, + use the ``zonalOperations`` resource. For more + information, read Global, Regional, and Zonal Resources. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/compute/v1/projects/{project}/zones/{zone}/instances/{instance}/stop', + }, + ] + request, metadata = self._interceptor.pre_stop(request, metadata) + pb_request = compute.StopInstanceRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.Operation() + pb_resp = compute.Operation.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_stop(resp) + return resp + + class _Suspend(InstancesRestStub): + def __hash__(self): + return hash("Suspend") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.SuspendInstanceRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.Operation: + r"""Call the suspend method over HTTP. + + Args: + request (~.compute.SuspendInstanceRequest): + The request object. A request message for + Instances.Suspend. See the method + description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.Operation: + Represents an Operation resource. Google Compute Engine + has three Operation resources: \* + `Global `__ + \* + `Regional `__ + \* + `Zonal `__ + You can use an operation resource to manage asynchronous + API requests. For more information, read Handling API + responses. Operations can be global, regional or zonal. + - For global operations, use the ``globalOperations`` + resource. - For regional operations, use the + ``regionOperations`` resource. - For zonal operations, + use the ``zonalOperations`` resource. For more + information, read Global, Regional, and Zonal Resources. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/compute/v1/projects/{project}/zones/{zone}/instances/{instance}/suspend', + }, + ] + request, metadata = self._interceptor.pre_suspend(request, metadata) + pb_request = compute.SuspendInstanceRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.Operation() + pb_resp = compute.Operation.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_suspend(resp) + return resp + + class _TestIamPermissions(InstancesRestStub): + def __hash__(self): + return hash("TestIamPermissions") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.TestIamPermissionsInstanceRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.TestPermissionsResponse: + r"""Call the test iam permissions method over HTTP. + + Args: + request (~.compute.TestIamPermissionsInstanceRequest): + The request object. A request message for + Instances.TestIamPermissions. See the + method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.TestPermissionsResponse: + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/compute/v1/projects/{project}/zones/{zone}/instances/{resource}/testIamPermissions', + 'body': 'test_permissions_request_resource', + }, + ] + request, metadata = self._interceptor.pre_test_iam_permissions(request, metadata) + pb_request = compute.TestIamPermissionsInstanceRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + including_default_value_fields=False, + use_integers_for_enums=False + ) + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.TestPermissionsResponse() + pb_resp = compute.TestPermissionsResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_test_iam_permissions(resp) + return resp + + class _Update(InstancesRestStub): + def __hash__(self): + return hash("Update") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.UpdateInstanceRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.Operation: + r"""Call the update method over HTTP. + + Args: + request (~.compute.UpdateInstanceRequest): + The request object. A request message for + Instances.Update. See the method + description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.Operation: + Represents an Operation resource. Google Compute Engine + has three Operation resources: \* + `Global `__ + \* + `Regional `__ + \* + `Zonal `__ + You can use an operation resource to manage asynchronous + API requests. For more information, read Handling API + responses. Operations can be global, regional or zonal. + - For global operations, use the ``globalOperations`` + resource. - For regional operations, use the + ``regionOperations`` resource. - For zonal operations, + use the ``zonalOperations`` resource. For more + information, read Global, Regional, and Zonal Resources. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'put', + 'uri': '/compute/v1/projects/{project}/zones/{zone}/instances/{instance}', + 'body': 'instance_resource', + }, + ] + request, metadata = self._interceptor.pre_update(request, metadata) + pb_request = compute.UpdateInstanceRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + including_default_value_fields=False, + use_integers_for_enums=False + ) + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.Operation() + pb_resp = compute.Operation.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_update(resp) + return resp + + class _UpdateAccessConfig(InstancesRestStub): + def __hash__(self): + return hash("UpdateAccessConfig") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + "networkInterface" : "", } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.UpdateAccessConfigInstanceRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.Operation: + r"""Call the update access config method over HTTP. + + Args: + request (~.compute.UpdateAccessConfigInstanceRequest): + The request object. A request message for + Instances.UpdateAccessConfig. See the + method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.Operation: + Represents an Operation resource. Google Compute Engine + has three Operation resources: \* + `Global `__ + \* + `Regional `__ + \* + `Zonal `__ + You can use an operation resource to manage asynchronous + API requests. For more information, read Handling API + responses. Operations can be global, regional or zonal. + - For global operations, use the ``globalOperations`` + resource. - For regional operations, use the + ``regionOperations`` resource. - For zonal operations, + use the ``zonalOperations`` resource. For more + information, read Global, Regional, and Zonal Resources. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/compute/v1/projects/{project}/zones/{zone}/instances/{instance}/updateAccessConfig', + 'body': 'access_config_resource', + }, + ] + request, metadata = self._interceptor.pre_update_access_config(request, metadata) + pb_request = compute.UpdateAccessConfigInstanceRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + including_default_value_fields=False, + use_integers_for_enums=False + ) + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.Operation() + pb_resp = compute.Operation.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_update_access_config(resp) + return resp + + class _UpdateDisplayDevice(InstancesRestStub): + def __hash__(self): + return hash("UpdateDisplayDevice") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.UpdateDisplayDeviceInstanceRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.Operation: + r"""Call the update display device method over HTTP. + + Args: + request (~.compute.UpdateDisplayDeviceInstanceRequest): + The request object. A request message for + Instances.UpdateDisplayDevice. See the + method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.Operation: + Represents an Operation resource. Google Compute Engine + has three Operation resources: \* + `Global `__ + \* + `Regional `__ + \* + `Zonal `__ + You can use an operation resource to manage asynchronous + API requests. For more information, read Handling API + responses. Operations can be global, regional or zonal. + - For global operations, use the ``globalOperations`` + resource. - For regional operations, use the + ``regionOperations`` resource. - For zonal operations, + use the ``zonalOperations`` resource. For more + information, read Global, Regional, and Zonal Resources. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'patch', + 'uri': '/compute/v1/projects/{project}/zones/{zone}/instances/{instance}/updateDisplayDevice', + 'body': 'display_device_resource', + }, + ] + request, metadata = self._interceptor.pre_update_display_device(request, metadata) + pb_request = compute.UpdateDisplayDeviceInstanceRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + including_default_value_fields=False, + use_integers_for_enums=False + ) + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.Operation() + pb_resp = compute.Operation.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_update_display_device(resp) + return resp + + class _UpdateNetworkInterface(InstancesRestStub): + def __hash__(self): + return hash("UpdateNetworkInterface") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + "networkInterface" : "", } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.UpdateNetworkInterfaceInstanceRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.Operation: + r"""Call the update network interface method over HTTP. + + Args: + request (~.compute.UpdateNetworkInterfaceInstanceRequest): + The request object. A request message for + Instances.UpdateNetworkInterface. See + the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.Operation: + Represents an Operation resource. Google Compute Engine + has three Operation resources: \* + `Global `__ + \* + `Regional `__ + \* + `Zonal `__ + You can use an operation resource to manage asynchronous + API requests. For more information, read Handling API + responses. Operations can be global, regional or zonal. + - For global operations, use the ``globalOperations`` + resource. - For regional operations, use the + ``regionOperations`` resource. - For zonal operations, + use the ``zonalOperations`` resource. For more + information, read Global, Regional, and Zonal Resources. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'patch', + 'uri': '/compute/v1/projects/{project}/zones/{zone}/instances/{instance}/updateNetworkInterface', + 'body': 'network_interface_resource', + }, + ] + request, metadata = self._interceptor.pre_update_network_interface(request, metadata) + pb_request = compute.UpdateNetworkInterfaceInstanceRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + including_default_value_fields=False, + use_integers_for_enums=False + ) + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.Operation() + pb_resp = compute.Operation.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_update_network_interface(resp) + return resp + + class _UpdateShieldedInstanceConfig(InstancesRestStub): + def __hash__(self): + return hash("UpdateShieldedInstanceConfig") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.UpdateShieldedInstanceConfigInstanceRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.Operation: + r"""Call the update shielded instance + config method over HTTP. + + Args: + request (~.compute.UpdateShieldedInstanceConfigInstanceRequest): + The request object. A request message for + Instances.UpdateShieldedInstanceConfig. + See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.Operation: + Represents an Operation resource. Google Compute Engine + has three Operation resources: \* + `Global `__ + \* + `Regional `__ + \* + `Zonal `__ + You can use an operation resource to manage asynchronous + API requests. For more information, read Handling API + responses. Operations can be global, regional or zonal. + - For global operations, use the ``globalOperations`` + resource. - For regional operations, use the + ``regionOperations`` resource. - For zonal operations, + use the ``zonalOperations`` resource. For more + information, read Global, Regional, and Zonal Resources. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'patch', + 'uri': '/compute/v1/projects/{project}/zones/{zone}/instances/{instance}/updateShieldedInstanceConfig', + 'body': 'shielded_instance_config_resource', + }, + ] + request, metadata = self._interceptor.pre_update_shielded_instance_config(request, metadata) + pb_request = compute.UpdateShieldedInstanceConfigInstanceRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + including_default_value_fields=False, + use_integers_for_enums=False + ) + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.Operation() + pb_resp = compute.Operation.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_update_shielded_instance_config(resp) + return resp + + @property + def add_access_config(self) -> Callable[ + [compute.AddAccessConfigInstanceRequest], + compute.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._AddAccessConfig(self._session, self._host, self._interceptor) # type: ignore + + @property + def add_resource_policies(self) -> Callable[ + [compute.AddResourcePoliciesInstanceRequest], + compute.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._AddResourcePolicies(self._session, self._host, self._interceptor) # type: ignore + + @property + def aggregated_list(self) -> Callable[ + [compute.AggregatedListInstancesRequest], + compute.InstanceAggregatedList]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._AggregatedList(self._session, self._host, self._interceptor) # type: ignore + + @property + def attach_disk(self) -> Callable[ + [compute.AttachDiskInstanceRequest], + compute.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._AttachDisk(self._session, self._host, self._interceptor) # type: ignore + + @property + def bulk_insert(self) -> Callable[ + [compute.BulkInsertInstanceRequest], + compute.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._BulkInsert(self._session, self._host, self._interceptor) # type: ignore + + @property + def delete(self) -> Callable[ + [compute.DeleteInstanceRequest], + compute.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._Delete(self._session, self._host, self._interceptor) # type: ignore + + @property + def delete_access_config(self) -> Callable[ + [compute.DeleteAccessConfigInstanceRequest], + compute.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._DeleteAccessConfig(self._session, self._host, self._interceptor) # type: ignore + + @property + def detach_disk(self) -> Callable[ + [compute.DetachDiskInstanceRequest], + compute.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._DetachDisk(self._session, self._host, self._interceptor) # type: ignore + + @property + def get(self) -> Callable[ + [compute.GetInstanceRequest], + compute.Instance]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._Get(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_effective_firewalls(self) -> Callable[ + [compute.GetEffectiveFirewallsInstanceRequest], + compute.InstancesGetEffectiveFirewallsResponse]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetEffectiveFirewalls(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_guest_attributes(self) -> Callable[ + [compute.GetGuestAttributesInstanceRequest], + compute.GuestAttributes]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetGuestAttributes(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_iam_policy(self) -> Callable[ + [compute.GetIamPolicyInstanceRequest], + compute.Policy]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetIamPolicy(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_screenshot(self) -> Callable[ + [compute.GetScreenshotInstanceRequest], + compute.Screenshot]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetScreenshot(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_serial_port_output(self) -> Callable[ + [compute.GetSerialPortOutputInstanceRequest], + compute.SerialPortOutput]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetSerialPortOutput(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_shielded_instance_identity(self) -> Callable[ + [compute.GetShieldedInstanceIdentityInstanceRequest], + compute.ShieldedInstanceIdentity]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetShieldedInstanceIdentity(self._session, self._host, self._interceptor) # type: ignore + + @property + def insert(self) -> Callable[ + [compute.InsertInstanceRequest], + compute.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._Insert(self._session, self._host, self._interceptor) # type: ignore + + @property + def list(self) -> Callable[ + [compute.ListInstancesRequest], + compute.InstanceList]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._List(self._session, self._host, self._interceptor) # type: ignore + + @property + def list_referrers(self) -> Callable[ + [compute.ListReferrersInstancesRequest], + compute.InstanceListReferrers]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListReferrers(self._session, self._host, self._interceptor) # type: ignore + + @property + def remove_resource_policies(self) -> Callable[ + [compute.RemoveResourcePoliciesInstanceRequest], + compute.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._RemoveResourcePolicies(self._session, self._host, self._interceptor) # type: ignore + + @property + def reset(self) -> Callable[ + [compute.ResetInstanceRequest], + compute.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._Reset(self._session, self._host, self._interceptor) # type: ignore + + @property + def resume(self) -> Callable[ + [compute.ResumeInstanceRequest], + compute.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._Resume(self._session, self._host, self._interceptor) # type: ignore + + @property + def send_diagnostic_interrupt(self) -> Callable[ + [compute.SendDiagnosticInterruptInstanceRequest], + compute.SendDiagnosticInterruptInstanceResponse]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._SendDiagnosticInterrupt(self._session, self._host, self._interceptor) # type: ignore + + @property + def set_deletion_protection(self) -> Callable[ + [compute.SetDeletionProtectionInstanceRequest], + compute.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._SetDeletionProtection(self._session, self._host, self._interceptor) # type: ignore + + @property + def set_disk_auto_delete(self) -> Callable[ + [compute.SetDiskAutoDeleteInstanceRequest], + compute.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._SetDiskAutoDelete(self._session, self._host, self._interceptor) # type: ignore + + @property + def set_iam_policy(self) -> Callable[ + [compute.SetIamPolicyInstanceRequest], + compute.Policy]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._SetIamPolicy(self._session, self._host, self._interceptor) # type: ignore + + @property + def set_labels(self) -> Callable[ + [compute.SetLabelsInstanceRequest], + compute.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._SetLabels(self._session, self._host, self._interceptor) # type: ignore + + @property + def set_machine_resources(self) -> Callable[ + [compute.SetMachineResourcesInstanceRequest], + compute.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._SetMachineResources(self._session, self._host, self._interceptor) # type: ignore + + @property + def set_machine_type(self) -> Callable[ + [compute.SetMachineTypeInstanceRequest], + compute.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._SetMachineType(self._session, self._host, self._interceptor) # type: ignore + + @property + def set_metadata(self) -> Callable[ + [compute.SetMetadataInstanceRequest], + compute.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._SetMetadata(self._session, self._host, self._interceptor) # type: ignore + + @property + def set_min_cpu_platform(self) -> Callable[ + [compute.SetMinCpuPlatformInstanceRequest], + compute.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._SetMinCpuPlatform(self._session, self._host, self._interceptor) # type: ignore + + @property + def set_name(self) -> Callable[ + [compute.SetNameInstanceRequest], + compute.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._SetName(self._session, self._host, self._interceptor) # type: ignore + + @property + def set_scheduling(self) -> Callable[ + [compute.SetSchedulingInstanceRequest], + compute.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._SetScheduling(self._session, self._host, self._interceptor) # type: ignore + + @property + def set_service_account(self) -> Callable[ + [compute.SetServiceAccountInstanceRequest], + compute.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._SetServiceAccount(self._session, self._host, self._interceptor) # type: ignore + + @property + def set_shielded_instance_integrity_policy(self) -> Callable[ + [compute.SetShieldedInstanceIntegrityPolicyInstanceRequest], + compute.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._SetShieldedInstanceIntegrityPolicy(self._session, self._host, self._interceptor) # type: ignore + + @property + def set_tags(self) -> Callable[ + [compute.SetTagsInstanceRequest], + compute.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._SetTags(self._session, self._host, self._interceptor) # type: ignore + + @property + def simulate_maintenance_event(self) -> Callable[ + [compute.SimulateMaintenanceEventInstanceRequest], + compute.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._SimulateMaintenanceEvent(self._session, self._host, self._interceptor) # type: ignore + + @property + def start(self) -> Callable[ + [compute.StartInstanceRequest], + compute.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._Start(self._session, self._host, self._interceptor) # type: ignore + + @property + def start_with_encryption_key(self) -> Callable[ + [compute.StartWithEncryptionKeyInstanceRequest], + compute.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._StartWithEncryptionKey(self._session, self._host, self._interceptor) # type: ignore + + @property + def stop(self) -> Callable[ + [compute.StopInstanceRequest], + compute.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._Stop(self._session, self._host, self._interceptor) # type: ignore + + @property + def suspend(self) -> Callable[ + [compute.SuspendInstanceRequest], + compute.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._Suspend(self._session, self._host, self._interceptor) # type: ignore + + @property + def test_iam_permissions(self) -> Callable[ + [compute.TestIamPermissionsInstanceRequest], + compute.TestPermissionsResponse]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._TestIamPermissions(self._session, self._host, self._interceptor) # type: ignore + + @property + def update(self) -> Callable[ + [compute.UpdateInstanceRequest], + compute.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._Update(self._session, self._host, self._interceptor) # type: ignore + + @property + def update_access_config(self) -> Callable[ + [compute.UpdateAccessConfigInstanceRequest], + compute.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._UpdateAccessConfig(self._session, self._host, self._interceptor) # type: ignore + + @property + def update_display_device(self) -> Callable[ + [compute.UpdateDisplayDeviceInstanceRequest], + compute.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._UpdateDisplayDevice(self._session, self._host, self._interceptor) # type: ignore + + @property + def update_network_interface(self) -> Callable[ + [compute.UpdateNetworkInterfaceInstanceRequest], + compute.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._UpdateNetworkInterface(self._session, self._host, self._interceptor) # type: ignore + + @property + def update_shielded_instance_config(self) -> Callable[ + [compute.UpdateShieldedInstanceConfigInstanceRequest], + compute.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._UpdateShieldedInstanceConfig(self._session, self._host, self._interceptor) # type: ignore + + @property + def kind(self) -> str: + return "rest" + + def close(self): + self._session.close() + + +__all__=( + 'InstancesRestTransport', +) diff --git a/owl-bot-staging/v1/google/cloud/compute_v1/services/interconnect_attachments/__init__.py b/owl-bot-staging/v1/google/cloud/compute_v1/services/interconnect_attachments/__init__.py new file mode 100644 index 000000000..7418f4c0d --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/compute_v1/services/interconnect_attachments/__init__.py @@ -0,0 +1,20 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from .client import InterconnectAttachmentsClient + +__all__ = ( + 'InterconnectAttachmentsClient', +) diff --git a/owl-bot-staging/v1/google/cloud/compute_v1/services/interconnect_attachments/client.py b/owl-bot-staging/v1/google/cloud/compute_v1/services/interconnect_attachments/client.py new file mode 100644 index 000000000..f98afefc8 --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/compute_v1/services/interconnect_attachments/client.py @@ -0,0 +1,1873 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import functools +import os +import re +from typing import Dict, Mapping, MutableMapping, MutableSequence, Optional, Sequence, Tuple, Type, Union, cast + +from google.cloud.compute_v1 import gapic_version as package_version + +from google.api_core import client_options as client_options_lib +from google.api_core import exceptions as core_exceptions +from google.api_core import extended_operation +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport import mtls # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth.exceptions import MutualTLSChannelError # type: ignore +from google.oauth2 import service_account # type: ignore + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + +from google.api_core import extended_operation # type: ignore +from google.cloud.compute_v1.services.interconnect_attachments import pagers +from google.cloud.compute_v1.types import compute +from .transports.base import InterconnectAttachmentsTransport, DEFAULT_CLIENT_INFO +from .transports.rest import InterconnectAttachmentsRestTransport + + +class InterconnectAttachmentsClientMeta(type): + """Metaclass for the InterconnectAttachments client. + + This provides class-level methods for building and retrieving + support objects (e.g. transport) without polluting the client instance + objects. + """ + _transport_registry = OrderedDict() # type: Dict[str, Type[InterconnectAttachmentsTransport]] + _transport_registry["rest"] = InterconnectAttachmentsRestTransport + + def get_transport_class(cls, + label: Optional[str] = None, + ) -> Type[InterconnectAttachmentsTransport]: + """Returns an appropriate transport class. + + Args: + label: The name of the desired transport. If none is + provided, then the first transport in the registry is used. + + Returns: + The transport class to use. + """ + # If a specific transport is requested, return that one. + if label: + return cls._transport_registry[label] + + # No transport is requested; return the default (that is, the first one + # in the dictionary). + return next(iter(cls._transport_registry.values())) + + +class InterconnectAttachmentsClient(metaclass=InterconnectAttachmentsClientMeta): + """The InterconnectAttachments API.""" + + @staticmethod + def _get_default_mtls_endpoint(api_endpoint): + """Converts api endpoint to mTLS endpoint. + + Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to + "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. + Args: + api_endpoint (Optional[str]): the api endpoint to convert. + Returns: + str: converted mTLS api endpoint. + """ + if not api_endpoint: + return api_endpoint + + mtls_endpoint_re = re.compile( + r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?" + ) + + m = mtls_endpoint_re.match(api_endpoint) + name, mtls, sandbox, googledomain = m.groups() + if mtls or not googledomain: + return api_endpoint + + if sandbox: + return api_endpoint.replace( + "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" + ) + + return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") + + DEFAULT_ENDPOINT = "compute.googleapis.com" + DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore + DEFAULT_ENDPOINT + ) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + InterconnectAttachmentsClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_info(info) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + InterconnectAttachmentsClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_file( + filename) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + from_service_account_json = from_service_account_file + + @property + def transport(self) -> InterconnectAttachmentsTransport: + """Returns the transport used by the client instance. + + Returns: + InterconnectAttachmentsTransport: The transport used by the client + instance. + """ + return self._transport + + @staticmethod + def common_billing_account_path(billing_account: str, ) -> str: + """Returns a fully-qualified billing_account string.""" + return "billingAccounts/{billing_account}".format(billing_account=billing_account, ) + + @staticmethod + def parse_common_billing_account_path(path: str) -> Dict[str,str]: + """Parse a billing_account path into its component segments.""" + m = re.match(r"^billingAccounts/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_folder_path(folder: str, ) -> str: + """Returns a fully-qualified folder string.""" + return "folders/{folder}".format(folder=folder, ) + + @staticmethod + def parse_common_folder_path(path: str) -> Dict[str,str]: + """Parse a folder path into its component segments.""" + m = re.match(r"^folders/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_organization_path(organization: str, ) -> str: + """Returns a fully-qualified organization string.""" + return "organizations/{organization}".format(organization=organization, ) + + @staticmethod + def parse_common_organization_path(path: str) -> Dict[str,str]: + """Parse a organization path into its component segments.""" + m = re.match(r"^organizations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_project_path(project: str, ) -> str: + """Returns a fully-qualified project string.""" + return "projects/{project}".format(project=project, ) + + @staticmethod + def parse_common_project_path(path: str) -> Dict[str,str]: + """Parse a project path into its component segments.""" + m = re.match(r"^projects/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_location_path(project: str, location: str, ) -> str: + """Returns a fully-qualified location string.""" + return "projects/{project}/locations/{location}".format(project=project, location=location, ) + + @staticmethod + def parse_common_location_path(path: str) -> Dict[str,str]: + """Parse a location path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @classmethod + def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[client_options_lib.ClientOptions] = None): + """Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + if client_options is None: + client_options = client_options_lib.ClientOptions() + use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") + use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") + if use_client_cert not in ("true", "false"): + raise ValueError("Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`") + if use_mtls_endpoint not in ("auto", "never", "always"): + raise MutualTLSChannelError("Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`") + + # Figure out the client cert source to use. + client_cert_source = None + if use_client_cert == "true": + if client_options.client_cert_source: + client_cert_source = client_options.client_cert_source + elif mtls.has_default_client_cert_source(): + client_cert_source = mtls.default_client_cert_source() + + # Figure out which api endpoint to use. + if client_options.api_endpoint is not None: + api_endpoint = client_options.api_endpoint + elif use_mtls_endpoint == "always" or (use_mtls_endpoint == "auto" and client_cert_source): + api_endpoint = cls.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = cls.DEFAULT_ENDPOINT + + return api_endpoint, client_cert_source + + def __init__(self, *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Optional[Union[str, InterconnectAttachmentsTransport]] = None, + client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the interconnect attachments client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Union[str, InterconnectAttachmentsTransport]): The + transport to use. If set to None, a transport is chosen + automatically. + NOTE: "rest" transport functionality is currently in a + beta state (preview). We welcome your feedback via an + issue in this library's source repository. + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): Custom options for the + client. It won't take effect if a ``transport`` instance is provided. + (1) The ``api_endpoint`` property can be used to override the + default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT + environment variable can also be used to override the endpoint: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto switch to the + default mTLS endpoint if client certificate is present, this is + the default value). However, the ``api_endpoint`` property takes + precedence if provided. + (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide client certificate for mutual TLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + """ + if isinstance(client_options, dict): + client_options = client_options_lib.from_dict(client_options) + if client_options is None: + client_options = client_options_lib.ClientOptions() + client_options = cast(client_options_lib.ClientOptions, client_options) + + api_endpoint, client_cert_source_func = self.get_mtls_endpoint_and_cert_source(client_options) + + api_key_value = getattr(client_options, "api_key", None) + if api_key_value and credentials: + raise ValueError("client_options.api_key and credentials are mutually exclusive") + + # Save or instantiate the transport. + # Ordinarily, we provide the transport, but allowing a custom transport + # instance provides an extensibility point for unusual situations. + if isinstance(transport, InterconnectAttachmentsTransport): + # transport is a InterconnectAttachmentsTransport instance. + if credentials or client_options.credentials_file or api_key_value: + raise ValueError("When providing a transport instance, " + "provide its credentials directly.") + if client_options.scopes: + raise ValueError( + "When providing a transport instance, provide its scopes " + "directly." + ) + self._transport = transport + else: + import google.auth._default # type: ignore + + if api_key_value and hasattr(google.auth._default, "get_api_key_credentials"): + credentials = google.auth._default.get_api_key_credentials(api_key_value) + + Transport = type(self).get_transport_class(transport) + self._transport = Transport( + credentials=credentials, + credentials_file=client_options.credentials_file, + host=api_endpoint, + scopes=client_options.scopes, + client_cert_source_for_mtls=client_cert_source_func, + quota_project_id=client_options.quota_project_id, + client_info=client_info, + always_use_jwt_access=True, + api_audience=client_options.api_audience, + ) + + def aggregated_list(self, + request: Optional[Union[compute.AggregatedListInterconnectAttachmentsRequest, dict]] = None, + *, + project: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.AggregatedListPager: + r"""Retrieves an aggregated list of interconnect + attachments. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_aggregated_list(): + # Create a client + client = compute_v1.InterconnectAttachmentsClient() + + # Initialize request argument(s) + request = compute_v1.AggregatedListInterconnectAttachmentsRequest( + project="project_value", + ) + + # Make the request + page_result = client.aggregated_list(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.AggregatedListInterconnectAttachmentsRequest, dict]): + The request object. A request message for + InterconnectAttachments.AggregatedList. + See the method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.compute_v1.services.interconnect_attachments.pagers.AggregatedListPager: + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.AggregatedListInterconnectAttachmentsRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.AggregatedListInterconnectAttachmentsRequest): + request = compute.AggregatedListInterconnectAttachmentsRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.aggregated_list] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.AggregatedListPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + def delete_unary(self, + request: Optional[Union[compute.DeleteInterconnectAttachmentRequest, dict]] = None, + *, + project: Optional[str] = None, + region: Optional[str] = None, + interconnect_attachment: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.Operation: + r"""Deletes the specified interconnect attachment. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_delete(): + # Create a client + client = compute_v1.InterconnectAttachmentsClient() + + # Initialize request argument(s) + request = compute_v1.DeleteInterconnectAttachmentRequest( + interconnect_attachment="interconnect_attachment_value", + project="project_value", + region="region_value", + ) + + # Make the request + response = client.delete(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.DeleteInterconnectAttachmentRequest, dict]): + The request object. A request message for + InterconnectAttachments.Delete. See the + method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + region (str): + Name of the region for this request. + This corresponds to the ``region`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + interconnect_attachment (str): + Name of the interconnect attachment + to delete. + + This corresponds to the ``interconnect_attachment`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, region, interconnect_attachment]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.DeleteInterconnectAttachmentRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.DeleteInterconnectAttachmentRequest): + request = compute.DeleteInterconnectAttachmentRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if region is not None: + request.region = region + if interconnect_attachment is not None: + request.interconnect_attachment = interconnect_attachment + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("region", request.region), + ("interconnect_attachment", request.interconnect_attachment), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def delete(self, + request: Optional[Union[compute.DeleteInterconnectAttachmentRequest, dict]] = None, + *, + project: Optional[str] = None, + region: Optional[str] = None, + interconnect_attachment: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> extended_operation.ExtendedOperation: + r"""Deletes the specified interconnect attachment. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_delete(): + # Create a client + client = compute_v1.InterconnectAttachmentsClient() + + # Initialize request argument(s) + request = compute_v1.DeleteInterconnectAttachmentRequest( + interconnect_attachment="interconnect_attachment_value", + project="project_value", + region="region_value", + ) + + # Make the request + response = client.delete(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.DeleteInterconnectAttachmentRequest, dict]): + The request object. A request message for + InterconnectAttachments.Delete. See the + method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + region (str): + Name of the region for this request. + This corresponds to the ``region`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + interconnect_attachment (str): + Name of the interconnect attachment + to delete. + + This corresponds to the ``interconnect_attachment`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, region, interconnect_attachment]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.DeleteInterconnectAttachmentRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.DeleteInterconnectAttachmentRequest): + request = compute.DeleteInterconnectAttachmentRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if region is not None: + request.region = region + if interconnect_attachment is not None: + request.interconnect_attachment = interconnect_attachment + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("region", request.region), + ("interconnect_attachment", request.interconnect_attachment), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + operation_service = self._transport._region_operations_client + operation_request = compute.GetRegionOperationRequest() + operation_request.project = request.project + operation_request.region = request.region + operation_request.operation = response.name + + get_operation = functools.partial(operation_service.get, operation_request) + # Cancel is not part of extended operations yet. + cancel_operation = lambda: None + + # Note: this class is an implementation detail to provide a uniform + # set of names for certain fields in the extended operation proto message. + # See google.api_core.extended_operation.ExtendedOperation for details + # on these properties and the expected interface. + class _CustomOperation(extended_operation.ExtendedOperation): + @property + def error_message(self): + return self._extended_operation.http_error_message + + @property + def error_code(self): + return self._extended_operation.http_error_status_code + + response = _CustomOperation.make(get_operation, cancel_operation, response) + + # Done; return the response. + return response + + def get(self, + request: Optional[Union[compute.GetInterconnectAttachmentRequest, dict]] = None, + *, + project: Optional[str] = None, + region: Optional[str] = None, + interconnect_attachment: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.InterconnectAttachment: + r"""Returns the specified interconnect attachment. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_get(): + # Create a client + client = compute_v1.InterconnectAttachmentsClient() + + # Initialize request argument(s) + request = compute_v1.GetInterconnectAttachmentRequest( + interconnect_attachment="interconnect_attachment_value", + project="project_value", + region="region_value", + ) + + # Make the request + response = client.get(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.GetInterconnectAttachmentRequest, dict]): + The request object. A request message for + InterconnectAttachments.Get. See the + method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + region (str): + Name of the region for this request. + This corresponds to the ``region`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + interconnect_attachment (str): + Name of the interconnect attachment + to return. + + This corresponds to the ``interconnect_attachment`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.compute_v1.types.InterconnectAttachment: + Represents an Interconnect Attachment + (VLAN) resource. You can use + Interconnect attachments (VLANS) to + connect your Virtual Private Cloud + networks to your on-premises networks + through an Interconnect. For more + information, read Creating VLAN + Attachments. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, region, interconnect_attachment]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.GetInterconnectAttachmentRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.GetInterconnectAttachmentRequest): + request = compute.GetInterconnectAttachmentRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if region is not None: + request.region = region + if interconnect_attachment is not None: + request.interconnect_attachment = interconnect_attachment + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("region", request.region), + ("interconnect_attachment", request.interconnect_attachment), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def insert_unary(self, + request: Optional[Union[compute.InsertInterconnectAttachmentRequest, dict]] = None, + *, + project: Optional[str] = None, + region: Optional[str] = None, + interconnect_attachment_resource: Optional[compute.InterconnectAttachment] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.Operation: + r"""Creates an InterconnectAttachment in the specified + project using the data included in the request. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_insert(): + # Create a client + client = compute_v1.InterconnectAttachmentsClient() + + # Initialize request argument(s) + request = compute_v1.InsertInterconnectAttachmentRequest( + project="project_value", + region="region_value", + ) + + # Make the request + response = client.insert(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.InsertInterconnectAttachmentRequest, dict]): + The request object. A request message for + InterconnectAttachments.Insert. See the + method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + region (str): + Name of the region for this request. + This corresponds to the ``region`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + interconnect_attachment_resource (google.cloud.compute_v1.types.InterconnectAttachment): + The body resource for this request + This corresponds to the ``interconnect_attachment_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, region, interconnect_attachment_resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.InsertInterconnectAttachmentRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.InsertInterconnectAttachmentRequest): + request = compute.InsertInterconnectAttachmentRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if region is not None: + request.region = region + if interconnect_attachment_resource is not None: + request.interconnect_attachment_resource = interconnect_attachment_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.insert] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("region", request.region), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def insert(self, + request: Optional[Union[compute.InsertInterconnectAttachmentRequest, dict]] = None, + *, + project: Optional[str] = None, + region: Optional[str] = None, + interconnect_attachment_resource: Optional[compute.InterconnectAttachment] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> extended_operation.ExtendedOperation: + r"""Creates an InterconnectAttachment in the specified + project using the data included in the request. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_insert(): + # Create a client + client = compute_v1.InterconnectAttachmentsClient() + + # Initialize request argument(s) + request = compute_v1.InsertInterconnectAttachmentRequest( + project="project_value", + region="region_value", + ) + + # Make the request + response = client.insert(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.InsertInterconnectAttachmentRequest, dict]): + The request object. A request message for + InterconnectAttachments.Insert. See the + method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + region (str): + Name of the region for this request. + This corresponds to the ``region`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + interconnect_attachment_resource (google.cloud.compute_v1.types.InterconnectAttachment): + The body resource for this request + This corresponds to the ``interconnect_attachment_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, region, interconnect_attachment_resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.InsertInterconnectAttachmentRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.InsertInterconnectAttachmentRequest): + request = compute.InsertInterconnectAttachmentRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if region is not None: + request.region = region + if interconnect_attachment_resource is not None: + request.interconnect_attachment_resource = interconnect_attachment_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.insert] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("region", request.region), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + operation_service = self._transport._region_operations_client + operation_request = compute.GetRegionOperationRequest() + operation_request.project = request.project + operation_request.region = request.region + operation_request.operation = response.name + + get_operation = functools.partial(operation_service.get, operation_request) + # Cancel is not part of extended operations yet. + cancel_operation = lambda: None + + # Note: this class is an implementation detail to provide a uniform + # set of names for certain fields in the extended operation proto message. + # See google.api_core.extended_operation.ExtendedOperation for details + # on these properties and the expected interface. + class _CustomOperation(extended_operation.ExtendedOperation): + @property + def error_message(self): + return self._extended_operation.http_error_message + + @property + def error_code(self): + return self._extended_operation.http_error_status_code + + response = _CustomOperation.make(get_operation, cancel_operation, response) + + # Done; return the response. + return response + + def list(self, + request: Optional[Union[compute.ListInterconnectAttachmentsRequest, dict]] = None, + *, + project: Optional[str] = None, + region: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListPager: + r"""Retrieves the list of interconnect attachments + contained within the specified region. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_list(): + # Create a client + client = compute_v1.InterconnectAttachmentsClient() + + # Initialize request argument(s) + request = compute_v1.ListInterconnectAttachmentsRequest( + project="project_value", + region="region_value", + ) + + # Make the request + page_result = client.list(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.ListInterconnectAttachmentsRequest, dict]): + The request object. A request message for + InterconnectAttachments.List. See the + method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + region (str): + Name of the region for this request. + This corresponds to the ``region`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.compute_v1.services.interconnect_attachments.pagers.ListPager: + Response to the list request, and + contains a list of interconnect + attachments. Iterating over this object + will yield results and resolve + additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, region]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.ListInterconnectAttachmentsRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.ListInterconnectAttachmentsRequest): + request = compute.ListInterconnectAttachmentsRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if region is not None: + request.region = region + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("region", request.region), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + def patch_unary(self, + request: Optional[Union[compute.PatchInterconnectAttachmentRequest, dict]] = None, + *, + project: Optional[str] = None, + region: Optional[str] = None, + interconnect_attachment: Optional[str] = None, + interconnect_attachment_resource: Optional[compute.InterconnectAttachment] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.Operation: + r"""Updates the specified interconnect attachment with + the data included in the request. This method supports + PATCH semantics and uses the JSON merge patch format and + processing rules. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_patch(): + # Create a client + client = compute_v1.InterconnectAttachmentsClient() + + # Initialize request argument(s) + request = compute_v1.PatchInterconnectAttachmentRequest( + interconnect_attachment="interconnect_attachment_value", + project="project_value", + region="region_value", + ) + + # Make the request + response = client.patch(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.PatchInterconnectAttachmentRequest, dict]): + The request object. A request message for + InterconnectAttachments.Patch. See the + method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + region (str): + Name of the region scoping this + request. + + This corresponds to the ``region`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + interconnect_attachment (str): + Name of the interconnect attachment + to patch. + + This corresponds to the ``interconnect_attachment`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + interconnect_attachment_resource (google.cloud.compute_v1.types.InterconnectAttachment): + The body resource for this request + This corresponds to the ``interconnect_attachment_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, region, interconnect_attachment, interconnect_attachment_resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.PatchInterconnectAttachmentRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.PatchInterconnectAttachmentRequest): + request = compute.PatchInterconnectAttachmentRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if region is not None: + request.region = region + if interconnect_attachment is not None: + request.interconnect_attachment = interconnect_attachment + if interconnect_attachment_resource is not None: + request.interconnect_attachment_resource = interconnect_attachment_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.patch] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("region", request.region), + ("interconnect_attachment", request.interconnect_attachment), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def patch(self, + request: Optional[Union[compute.PatchInterconnectAttachmentRequest, dict]] = None, + *, + project: Optional[str] = None, + region: Optional[str] = None, + interconnect_attachment: Optional[str] = None, + interconnect_attachment_resource: Optional[compute.InterconnectAttachment] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> extended_operation.ExtendedOperation: + r"""Updates the specified interconnect attachment with + the data included in the request. This method supports + PATCH semantics and uses the JSON merge patch format and + processing rules. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_patch(): + # Create a client + client = compute_v1.InterconnectAttachmentsClient() + + # Initialize request argument(s) + request = compute_v1.PatchInterconnectAttachmentRequest( + interconnect_attachment="interconnect_attachment_value", + project="project_value", + region="region_value", + ) + + # Make the request + response = client.patch(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.PatchInterconnectAttachmentRequest, dict]): + The request object. A request message for + InterconnectAttachments.Patch. See the + method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + region (str): + Name of the region scoping this + request. + + This corresponds to the ``region`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + interconnect_attachment (str): + Name of the interconnect attachment + to patch. + + This corresponds to the ``interconnect_attachment`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + interconnect_attachment_resource (google.cloud.compute_v1.types.InterconnectAttachment): + The body resource for this request + This corresponds to the ``interconnect_attachment_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, region, interconnect_attachment, interconnect_attachment_resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.PatchInterconnectAttachmentRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.PatchInterconnectAttachmentRequest): + request = compute.PatchInterconnectAttachmentRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if region is not None: + request.region = region + if interconnect_attachment is not None: + request.interconnect_attachment = interconnect_attachment + if interconnect_attachment_resource is not None: + request.interconnect_attachment_resource = interconnect_attachment_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.patch] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("region", request.region), + ("interconnect_attachment", request.interconnect_attachment), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + operation_service = self._transport._region_operations_client + operation_request = compute.GetRegionOperationRequest() + operation_request.project = request.project + operation_request.region = request.region + operation_request.operation = response.name + + get_operation = functools.partial(operation_service.get, operation_request) + # Cancel is not part of extended operations yet. + cancel_operation = lambda: None + + # Note: this class is an implementation detail to provide a uniform + # set of names for certain fields in the extended operation proto message. + # See google.api_core.extended_operation.ExtendedOperation for details + # on these properties and the expected interface. + class _CustomOperation(extended_operation.ExtendedOperation): + @property + def error_message(self): + return self._extended_operation.http_error_message + + @property + def error_code(self): + return self._extended_operation.http_error_status_code + + response = _CustomOperation.make(get_operation, cancel_operation, response) + + # Done; return the response. + return response + + def set_labels_unary(self, + request: Optional[Union[compute.SetLabelsInterconnectAttachmentRequest, dict]] = None, + *, + project: Optional[str] = None, + region: Optional[str] = None, + resource: Optional[str] = None, + region_set_labels_request_resource: Optional[compute.RegionSetLabelsRequest] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.Operation: + r"""Sets the labels on an InterconnectAttachment. To + learn more about labels, read the Labeling Resources + documentation. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_set_labels(): + # Create a client + client = compute_v1.InterconnectAttachmentsClient() + + # Initialize request argument(s) + request = compute_v1.SetLabelsInterconnectAttachmentRequest( + project="project_value", + region="region_value", + resource="resource_value", + ) + + # Make the request + response = client.set_labels(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.SetLabelsInterconnectAttachmentRequest, dict]): + The request object. A request message for + InterconnectAttachments.SetLabels. See + the method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + region (str): + The region for this request. + This corresponds to the ``region`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + resource (str): + Name or id of the resource for this + request. + + This corresponds to the ``resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + region_set_labels_request_resource (google.cloud.compute_v1.types.RegionSetLabelsRequest): + The body resource for this request + This corresponds to the ``region_set_labels_request_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, region, resource, region_set_labels_request_resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.SetLabelsInterconnectAttachmentRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.SetLabelsInterconnectAttachmentRequest): + request = compute.SetLabelsInterconnectAttachmentRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if region is not None: + request.region = region + if resource is not None: + request.resource = resource + if region_set_labels_request_resource is not None: + request.region_set_labels_request_resource = region_set_labels_request_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.set_labels] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("region", request.region), + ("resource", request.resource), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def set_labels(self, + request: Optional[Union[compute.SetLabelsInterconnectAttachmentRequest, dict]] = None, + *, + project: Optional[str] = None, + region: Optional[str] = None, + resource: Optional[str] = None, + region_set_labels_request_resource: Optional[compute.RegionSetLabelsRequest] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> extended_operation.ExtendedOperation: + r"""Sets the labels on an InterconnectAttachment. To + learn more about labels, read the Labeling Resources + documentation. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_set_labels(): + # Create a client + client = compute_v1.InterconnectAttachmentsClient() + + # Initialize request argument(s) + request = compute_v1.SetLabelsInterconnectAttachmentRequest( + project="project_value", + region="region_value", + resource="resource_value", + ) + + # Make the request + response = client.set_labels(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.SetLabelsInterconnectAttachmentRequest, dict]): + The request object. A request message for + InterconnectAttachments.SetLabels. See + the method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + region (str): + The region for this request. + This corresponds to the ``region`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + resource (str): + Name or id of the resource for this + request. + + This corresponds to the ``resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + region_set_labels_request_resource (google.cloud.compute_v1.types.RegionSetLabelsRequest): + The body resource for this request + This corresponds to the ``region_set_labels_request_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, region, resource, region_set_labels_request_resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.SetLabelsInterconnectAttachmentRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.SetLabelsInterconnectAttachmentRequest): + request = compute.SetLabelsInterconnectAttachmentRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if region is not None: + request.region = region + if resource is not None: + request.resource = resource + if region_set_labels_request_resource is not None: + request.region_set_labels_request_resource = region_set_labels_request_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.set_labels] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("region", request.region), + ("resource", request.resource), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + operation_service = self._transport._region_operations_client + operation_request = compute.GetRegionOperationRequest() + operation_request.project = request.project + operation_request.region = request.region + operation_request.operation = response.name + + get_operation = functools.partial(operation_service.get, operation_request) + # Cancel is not part of extended operations yet. + cancel_operation = lambda: None + + # Note: this class is an implementation detail to provide a uniform + # set of names for certain fields in the extended operation proto message. + # See google.api_core.extended_operation.ExtendedOperation for details + # on these properties and the expected interface. + class _CustomOperation(extended_operation.ExtendedOperation): + @property + def error_message(self): + return self._extended_operation.http_error_message + + @property + def error_code(self): + return self._extended_operation.http_error_status_code + + response = _CustomOperation.make(get_operation, cancel_operation, response) + + # Done; return the response. + return response + + def __enter__(self) -> "InterconnectAttachmentsClient": + return self + + def __exit__(self, type, value, traceback): + """Releases underlying transport's resources. + + .. warning:: + ONLY use as a context manager if the transport is NOT shared + with other clients! Exiting the with block will CLOSE the transport + and may cause errors in other clients! + """ + self.transport.close() + + + + + + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) + + +__all__ = ( + "InterconnectAttachmentsClient", +) diff --git a/owl-bot-staging/v1/google/cloud/compute_v1/services/interconnect_attachments/pagers.py b/owl-bot-staging/v1/google/cloud/compute_v1/services/interconnect_attachments/pagers.py new file mode 100644 index 000000000..aa9e939b5 --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/compute_v1/services/interconnect_attachments/pagers.py @@ -0,0 +1,139 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import Any, AsyncIterator, Awaitable, Callable, Sequence, Tuple, Optional, Iterator + +from google.cloud.compute_v1.types import compute + + +class AggregatedListPager: + """A pager for iterating through ``aggregated_list`` requests. + + This class thinly wraps an initial + :class:`google.cloud.compute_v1.types.InterconnectAttachmentAggregatedList` object, and + provides an ``__iter__`` method to iterate through its + ``items`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``AggregatedList`` requests and continue to iterate + through the ``items`` field on the + corresponding responses. + + All the usual :class:`google.cloud.compute_v1.types.InterconnectAttachmentAggregatedList` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., compute.InterconnectAttachmentAggregatedList], + request: compute.AggregatedListInterconnectAttachmentsRequest, + response: compute.InterconnectAttachmentAggregatedList, + *, + metadata: Sequence[Tuple[str, str]] = ()): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.compute_v1.types.AggregatedListInterconnectAttachmentsRequest): + The initial request object. + response (google.cloud.compute_v1.types.InterconnectAttachmentAggregatedList): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = compute.AggregatedListInterconnectAttachmentsRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[compute.InterconnectAttachmentAggregatedList]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[Tuple[str, compute.InterconnectAttachmentsScopedList]]: + for page in self.pages: + yield from page.items.items() + + def get(self, key: str) -> Optional[compute.InterconnectAttachmentsScopedList]: + return self._response.items.get(key) + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + + +class ListPager: + """A pager for iterating through ``list`` requests. + + This class thinly wraps an initial + :class:`google.cloud.compute_v1.types.InterconnectAttachmentList` object, and + provides an ``__iter__`` method to iterate through its + ``items`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``List`` requests and continue to iterate + through the ``items`` field on the + corresponding responses. + + All the usual :class:`google.cloud.compute_v1.types.InterconnectAttachmentList` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., compute.InterconnectAttachmentList], + request: compute.ListInterconnectAttachmentsRequest, + response: compute.InterconnectAttachmentList, + *, + metadata: Sequence[Tuple[str, str]] = ()): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.compute_v1.types.ListInterconnectAttachmentsRequest): + The initial request object. + response (google.cloud.compute_v1.types.InterconnectAttachmentList): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = compute.ListInterconnectAttachmentsRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[compute.InterconnectAttachmentList]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[compute.InterconnectAttachment]: + for page in self.pages: + yield from page.items + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) diff --git a/owl-bot-staging/v1/google/cloud/compute_v1/services/interconnect_attachments/transports/__init__.py b/owl-bot-staging/v1/google/cloud/compute_v1/services/interconnect_attachments/transports/__init__.py new file mode 100644 index 000000000..f52799a2e --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/compute_v1/services/interconnect_attachments/transports/__init__.py @@ -0,0 +1,32 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +from typing import Dict, Type + +from .base import InterconnectAttachmentsTransport +from .rest import InterconnectAttachmentsRestTransport +from .rest import InterconnectAttachmentsRestInterceptor + + +# Compile a registry of transports. +_transport_registry = OrderedDict() # type: Dict[str, Type[InterconnectAttachmentsTransport]] +_transport_registry['rest'] = InterconnectAttachmentsRestTransport + +__all__ = ( + 'InterconnectAttachmentsTransport', + 'InterconnectAttachmentsRestTransport', + 'InterconnectAttachmentsRestInterceptor', +) diff --git a/owl-bot-staging/v1/google/cloud/compute_v1/services/interconnect_attachments/transports/base.py b/owl-bot-staging/v1/google/cloud/compute_v1/services/interconnect_attachments/transports/base.py new file mode 100644 index 000000000..4d059b4c7 --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/compute_v1/services/interconnect_attachments/transports/base.py @@ -0,0 +1,247 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import abc +from typing import Awaitable, Callable, Dict, Optional, Sequence, Union + +from google.cloud.compute_v1 import gapic_version as package_version + +import google.auth # type: ignore +import google.api_core +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.compute_v1.types import compute +from google.cloud.compute_v1.services import region_operations + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) + + +class InterconnectAttachmentsTransport(abc.ABC): + """Abstract transport class for InterconnectAttachments.""" + + AUTH_SCOPES = ( + 'https://www.googleapis.com/auth/compute', + 'https://www.googleapis.com/auth/cloud-platform', + ) + + DEFAULT_HOST: str = 'compute.googleapis.com' + def __init__( + self, *, + host: str = DEFAULT_HOST, + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + **kwargs, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A list of scopes. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + """ + self._extended_operations_services: Dict[str, Any] = {} + + scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} + + # Save the scopes. + self._scopes = scopes + + # If no credentials are provided, then determine the appropriate + # defaults. + if credentials and credentials_file: + raise core_exceptions.DuplicateCredentialArgs("'credentials_file' and 'credentials' are mutually exclusive") + + if credentials_file is not None: + credentials, _ = google.auth.load_credentials_from_file( + credentials_file, + **scopes_kwargs, + quota_project_id=quota_project_id + ) + elif credentials is None: + credentials, _ = google.auth.default(**scopes_kwargs, quota_project_id=quota_project_id) + # Don't apply audience if the credentials file passed from user. + if hasattr(credentials, "with_gdch_audience"): + credentials = credentials.with_gdch_audience(api_audience if api_audience else host) + + # If the credentials are service account credentials, then always try to use self signed JWT. + if always_use_jwt_access and isinstance(credentials, service_account.Credentials) and hasattr(service_account.Credentials, "with_always_use_jwt_access"): + credentials = credentials.with_always_use_jwt_access(True) + + # Save the credentials. + self._credentials = credentials + + # Save the hostname. Default to port 443 (HTTPS) if none is specified. + if ':' not in host: + host += ':443' + self._host = host + + def _prep_wrapped_messages(self, client_info): + # Precompute the wrapped methods. + self._wrapped_methods = { + self.aggregated_list: gapic_v1.method.wrap_method( + self.aggregated_list, + default_timeout=None, + client_info=client_info, + ), + self.delete: gapic_v1.method.wrap_method( + self.delete, + default_timeout=None, + client_info=client_info, + ), + self.get: gapic_v1.method.wrap_method( + self.get, + default_timeout=None, + client_info=client_info, + ), + self.insert: gapic_v1.method.wrap_method( + self.insert, + default_timeout=None, + client_info=client_info, + ), + self.list: gapic_v1.method.wrap_method( + self.list, + default_timeout=None, + client_info=client_info, + ), + self.patch: gapic_v1.method.wrap_method( + self.patch, + default_timeout=None, + client_info=client_info, + ), + self.set_labels: gapic_v1.method.wrap_method( + self.set_labels, + default_timeout=None, + client_info=client_info, + ), + } + + def close(self): + """Closes resources associated with the transport. + + .. warning:: + Only call this method if the transport is NOT shared + with other clients - this may cause errors in other clients! + """ + raise NotImplementedError() + + @property + def aggregated_list(self) -> Callable[ + [compute.AggregatedListInterconnectAttachmentsRequest], + Union[ + compute.InterconnectAttachmentAggregatedList, + Awaitable[compute.InterconnectAttachmentAggregatedList] + ]]: + raise NotImplementedError() + + @property + def delete(self) -> Callable[ + [compute.DeleteInterconnectAttachmentRequest], + Union[ + compute.Operation, + Awaitable[compute.Operation] + ]]: + raise NotImplementedError() + + @property + def get(self) -> Callable[ + [compute.GetInterconnectAttachmentRequest], + Union[ + compute.InterconnectAttachment, + Awaitable[compute.InterconnectAttachment] + ]]: + raise NotImplementedError() + + @property + def insert(self) -> Callable[ + [compute.InsertInterconnectAttachmentRequest], + Union[ + compute.Operation, + Awaitable[compute.Operation] + ]]: + raise NotImplementedError() + + @property + def list(self) -> Callable[ + [compute.ListInterconnectAttachmentsRequest], + Union[ + compute.InterconnectAttachmentList, + Awaitable[compute.InterconnectAttachmentList] + ]]: + raise NotImplementedError() + + @property + def patch(self) -> Callable[ + [compute.PatchInterconnectAttachmentRequest], + Union[ + compute.Operation, + Awaitable[compute.Operation] + ]]: + raise NotImplementedError() + + @property + def set_labels(self) -> Callable[ + [compute.SetLabelsInterconnectAttachmentRequest], + Union[ + compute.Operation, + Awaitable[compute.Operation] + ]]: + raise NotImplementedError() + + @property + def kind(self) -> str: + raise NotImplementedError() + + @property + def _region_operations_client(self) -> region_operations.RegionOperationsClient: + ex_op_service = self._extended_operations_services.get("region_operations") + if not ex_op_service: + ex_op_service = region_operations.RegionOperationsClient( + credentials=self._credentials, + transport=self.kind, + ) + self._extended_operations_services["region_operations"] = ex_op_service + + return ex_op_service + + +__all__ = ( + 'InterconnectAttachmentsTransport', +) diff --git a/owl-bot-staging/v1/google/cloud/compute_v1/services/interconnect_attachments/transports/rest.py b/owl-bot-staging/v1/google/cloud/compute_v1/services/interconnect_attachments/transports/rest.py new file mode 100644 index 000000000..3081f05b3 --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/compute_v1/services/interconnect_attachments/transports/rest.py @@ -0,0 +1,1053 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +from google.auth.transport.requests import AuthorizedSession # type: ignore +import json # type: ignore +import grpc # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.api_core import exceptions as core_exceptions +from google.api_core import retry as retries +from google.api_core import rest_helpers +from google.api_core import rest_streaming +from google.api_core import path_template +from google.api_core import gapic_v1 + +from google.protobuf import json_format +from requests import __version__ as requests_version +import dataclasses +import re +from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union +import warnings + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + + +from google.cloud.compute_v1.types import compute + +from .base import InterconnectAttachmentsTransport, DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, + grpc_version=None, + rest_version=requests_version, +) + + +class InterconnectAttachmentsRestInterceptor: + """Interceptor for InterconnectAttachments. + + Interceptors are used to manipulate requests, request metadata, and responses + in arbitrary ways. + Example use cases include: + * Logging + * Verifying requests according to service or custom semantics + * Stripping extraneous information from responses + + These use cases and more can be enabled by injecting an + instance of a custom subclass when constructing the InterconnectAttachmentsRestTransport. + + .. code-block:: python + class MyCustomInterconnectAttachmentsInterceptor(InterconnectAttachmentsRestInterceptor): + def pre_aggregated_list(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_aggregated_list(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_delete(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_delete(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_get(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_insert(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_insert(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_patch(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_patch(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_set_labels(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_set_labels(self, response): + logging.log(f"Received response: {response}") + return response + + transport = InterconnectAttachmentsRestTransport(interceptor=MyCustomInterconnectAttachmentsInterceptor()) + client = InterconnectAttachmentsClient(transport=transport) + + + """ + def pre_aggregated_list(self, request: compute.AggregatedListInterconnectAttachmentsRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.AggregatedListInterconnectAttachmentsRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for aggregated_list + + Override in a subclass to manipulate the request or metadata + before they are sent to the InterconnectAttachments server. + """ + return request, metadata + + def post_aggregated_list(self, response: compute.InterconnectAttachmentAggregatedList) -> compute.InterconnectAttachmentAggregatedList: + """Post-rpc interceptor for aggregated_list + + Override in a subclass to manipulate the response + after it is returned by the InterconnectAttachments server but before + it is returned to user code. + """ + return response + def pre_delete(self, request: compute.DeleteInterconnectAttachmentRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.DeleteInterconnectAttachmentRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for delete + + Override in a subclass to manipulate the request or metadata + before they are sent to the InterconnectAttachments server. + """ + return request, metadata + + def post_delete(self, response: compute.Operation) -> compute.Operation: + """Post-rpc interceptor for delete + + Override in a subclass to manipulate the response + after it is returned by the InterconnectAttachments server but before + it is returned to user code. + """ + return response + def pre_get(self, request: compute.GetInterconnectAttachmentRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.GetInterconnectAttachmentRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get + + Override in a subclass to manipulate the request or metadata + before they are sent to the InterconnectAttachments server. + """ + return request, metadata + + def post_get(self, response: compute.InterconnectAttachment) -> compute.InterconnectAttachment: + """Post-rpc interceptor for get + + Override in a subclass to manipulate the response + after it is returned by the InterconnectAttachments server but before + it is returned to user code. + """ + return response + def pre_insert(self, request: compute.InsertInterconnectAttachmentRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.InsertInterconnectAttachmentRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for insert + + Override in a subclass to manipulate the request or metadata + before they are sent to the InterconnectAttachments server. + """ + return request, metadata + + def post_insert(self, response: compute.Operation) -> compute.Operation: + """Post-rpc interceptor for insert + + Override in a subclass to manipulate the response + after it is returned by the InterconnectAttachments server but before + it is returned to user code. + """ + return response + def pre_list(self, request: compute.ListInterconnectAttachmentsRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.ListInterconnectAttachmentsRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list + + Override in a subclass to manipulate the request or metadata + before they are sent to the InterconnectAttachments server. + """ + return request, metadata + + def post_list(self, response: compute.InterconnectAttachmentList) -> compute.InterconnectAttachmentList: + """Post-rpc interceptor for list + + Override in a subclass to manipulate the response + after it is returned by the InterconnectAttachments server but before + it is returned to user code. + """ + return response + def pre_patch(self, request: compute.PatchInterconnectAttachmentRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.PatchInterconnectAttachmentRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for patch + + Override in a subclass to manipulate the request or metadata + before they are sent to the InterconnectAttachments server. + """ + return request, metadata + + def post_patch(self, response: compute.Operation) -> compute.Operation: + """Post-rpc interceptor for patch + + Override in a subclass to manipulate the response + after it is returned by the InterconnectAttachments server but before + it is returned to user code. + """ + return response + def pre_set_labels(self, request: compute.SetLabelsInterconnectAttachmentRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.SetLabelsInterconnectAttachmentRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for set_labels + + Override in a subclass to manipulate the request or metadata + before they are sent to the InterconnectAttachments server. + """ + return request, metadata + + def post_set_labels(self, response: compute.Operation) -> compute.Operation: + """Post-rpc interceptor for set_labels + + Override in a subclass to manipulate the response + after it is returned by the InterconnectAttachments server but before + it is returned to user code. + """ + return response + + +@dataclasses.dataclass +class InterconnectAttachmentsRestStub: + _session: AuthorizedSession + _host: str + _interceptor: InterconnectAttachmentsRestInterceptor + + +class InterconnectAttachmentsRestTransport(InterconnectAttachmentsTransport): + """REST backend transport for InterconnectAttachments. + + The InterconnectAttachments API. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends JSON representations of protocol buffers over HTTP/1.1 + + NOTE: This REST transport functionality is currently in a beta + state (preview). We welcome your feedback via an issue in this + library's source repository. Thank you! + """ + + def __init__(self, *, + host: str = 'compute.googleapis.com', + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + client_cert_source_for_mtls: Optional[Callable[[ + ], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + url_scheme: str = 'https', + interceptor: Optional[InterconnectAttachmentsRestInterceptor] = None, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + NOTE: This REST transport functionality is currently in a beta + state (preview). We welcome your feedback via a GitHub issue in + this library's repository. Thank you! + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + client_cert_source_for_mtls (Callable[[], Tuple[bytes, bytes]]): Client + certificate to configure mutual TLS HTTP channel. It is ignored + if ``channel`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you are developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + url_scheme: the protocol scheme for the API endpoint. Normally + "https", but for testing or local servers, + "http" can be specified. + """ + # Run the base constructor + # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. + # TODO: When custom host (api_endpoint) is set, `scopes` must *also* be set on the + # credentials object + maybe_url_match = re.match("^(?Phttp(?:s)?://)?(?P.*)$", host) + if maybe_url_match is None: + raise ValueError(f"Unexpected hostname structure: {host}") # pragma: NO COVER + + url_match_items = maybe_url_match.groupdict() + + host = f"{url_scheme}://{host}" if not url_match_items["scheme"] else host + + super().__init__( + host=host, + credentials=credentials, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience + ) + self._session = AuthorizedSession( + self._credentials, default_host=self.DEFAULT_HOST) + if client_cert_source_for_mtls: + self._session.configure_mtls_channel(client_cert_source_for_mtls) + self._interceptor = interceptor or InterconnectAttachmentsRestInterceptor() + self._prep_wrapped_messages(client_info) + + class _AggregatedList(InterconnectAttachmentsRestStub): + def __hash__(self): + return hash("AggregatedList") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.AggregatedListInterconnectAttachmentsRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.InterconnectAttachmentAggregatedList: + r"""Call the aggregated list method over HTTP. + + Args: + request (~.compute.AggregatedListInterconnectAttachmentsRequest): + The request object. A request message for + InterconnectAttachments.AggregatedList. + See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.InterconnectAttachmentAggregatedList: + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/compute/v1/projects/{project}/aggregated/interconnectAttachments', + }, + ] + request, metadata = self._interceptor.pre_aggregated_list(request, metadata) + pb_request = compute.AggregatedListInterconnectAttachmentsRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.InterconnectAttachmentAggregatedList() + pb_resp = compute.InterconnectAttachmentAggregatedList.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_aggregated_list(resp) + return resp + + class _Delete(InterconnectAttachmentsRestStub): + def __hash__(self): + return hash("Delete") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.DeleteInterconnectAttachmentRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.Operation: + r"""Call the delete method over HTTP. + + Args: + request (~.compute.DeleteInterconnectAttachmentRequest): + The request object. A request message for + InterconnectAttachments.Delete. See the + method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.Operation: + Represents an Operation resource. Google Compute Engine + has three Operation resources: \* + `Global `__ + \* + `Regional `__ + \* + `Zonal `__ + You can use an operation resource to manage asynchronous + API requests. For more information, read Handling API + responses. Operations can be global, regional or zonal. + - For global operations, use the ``globalOperations`` + resource. - For regional operations, use the + ``regionOperations`` resource. - For zonal operations, + use the ``zonalOperations`` resource. For more + information, read Global, Regional, and Zonal Resources. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'delete', + 'uri': '/compute/v1/projects/{project}/regions/{region}/interconnectAttachments/{interconnect_attachment}', + }, + ] + request, metadata = self._interceptor.pre_delete(request, metadata) + pb_request = compute.DeleteInterconnectAttachmentRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.Operation() + pb_resp = compute.Operation.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_delete(resp) + return resp + + class _Get(InterconnectAttachmentsRestStub): + def __hash__(self): + return hash("Get") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.GetInterconnectAttachmentRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.InterconnectAttachment: + r"""Call the get method over HTTP. + + Args: + request (~.compute.GetInterconnectAttachmentRequest): + The request object. A request message for + InterconnectAttachments.Get. See the + method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.InterconnectAttachment: + Represents an Interconnect Attachment + (VLAN) resource. You can use + Interconnect attachments (VLANS) to + connect your Virtual Private Cloud + networks to your on-premises networks + through an Interconnect. For more + information, read Creating VLAN + Attachments. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/compute/v1/projects/{project}/regions/{region}/interconnectAttachments/{interconnect_attachment}', + }, + ] + request, metadata = self._interceptor.pre_get(request, metadata) + pb_request = compute.GetInterconnectAttachmentRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.InterconnectAttachment() + pb_resp = compute.InterconnectAttachment.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get(resp) + return resp + + class _Insert(InterconnectAttachmentsRestStub): + def __hash__(self): + return hash("Insert") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.InsertInterconnectAttachmentRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.Operation: + r"""Call the insert method over HTTP. + + Args: + request (~.compute.InsertInterconnectAttachmentRequest): + The request object. A request message for + InterconnectAttachments.Insert. See the + method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.Operation: + Represents an Operation resource. Google Compute Engine + has three Operation resources: \* + `Global `__ + \* + `Regional `__ + \* + `Zonal `__ + You can use an operation resource to manage asynchronous + API requests. For more information, read Handling API + responses. Operations can be global, regional or zonal. + - For global operations, use the ``globalOperations`` + resource. - For regional operations, use the + ``regionOperations`` resource. - For zonal operations, + use the ``zonalOperations`` resource. For more + information, read Global, Regional, and Zonal Resources. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/compute/v1/projects/{project}/regions/{region}/interconnectAttachments', + 'body': 'interconnect_attachment_resource', + }, + ] + request, metadata = self._interceptor.pre_insert(request, metadata) + pb_request = compute.InsertInterconnectAttachmentRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + including_default_value_fields=False, + use_integers_for_enums=False + ) + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.Operation() + pb_resp = compute.Operation.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_insert(resp) + return resp + + class _List(InterconnectAttachmentsRestStub): + def __hash__(self): + return hash("List") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.ListInterconnectAttachmentsRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.InterconnectAttachmentList: + r"""Call the list method over HTTP. + + Args: + request (~.compute.ListInterconnectAttachmentsRequest): + The request object. A request message for + InterconnectAttachments.List. See the + method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.InterconnectAttachmentList: + Response to the list request, and + contains a list of interconnect + attachments. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/compute/v1/projects/{project}/regions/{region}/interconnectAttachments', + }, + ] + request, metadata = self._interceptor.pre_list(request, metadata) + pb_request = compute.ListInterconnectAttachmentsRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.InterconnectAttachmentList() + pb_resp = compute.InterconnectAttachmentList.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_list(resp) + return resp + + class _Patch(InterconnectAttachmentsRestStub): + def __hash__(self): + return hash("Patch") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.PatchInterconnectAttachmentRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.Operation: + r"""Call the patch method over HTTP. + + Args: + request (~.compute.PatchInterconnectAttachmentRequest): + The request object. A request message for + InterconnectAttachments.Patch. See the + method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.Operation: + Represents an Operation resource. Google Compute Engine + has three Operation resources: \* + `Global `__ + \* + `Regional `__ + \* + `Zonal `__ + You can use an operation resource to manage asynchronous + API requests. For more information, read Handling API + responses. Operations can be global, regional or zonal. + - For global operations, use the ``globalOperations`` + resource. - For regional operations, use the + ``regionOperations`` resource. - For zonal operations, + use the ``zonalOperations`` resource. For more + information, read Global, Regional, and Zonal Resources. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'patch', + 'uri': '/compute/v1/projects/{project}/regions/{region}/interconnectAttachments/{interconnect_attachment}', + 'body': 'interconnect_attachment_resource', + }, + ] + request, metadata = self._interceptor.pre_patch(request, metadata) + pb_request = compute.PatchInterconnectAttachmentRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + including_default_value_fields=False, + use_integers_for_enums=False + ) + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.Operation() + pb_resp = compute.Operation.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_patch(resp) + return resp + + class _SetLabels(InterconnectAttachmentsRestStub): + def __hash__(self): + return hash("SetLabels") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.SetLabelsInterconnectAttachmentRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.Operation: + r"""Call the set labels method over HTTP. + + Args: + request (~.compute.SetLabelsInterconnectAttachmentRequest): + The request object. A request message for + InterconnectAttachments.SetLabels. See + the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.Operation: + Represents an Operation resource. Google Compute Engine + has three Operation resources: \* + `Global `__ + \* + `Regional `__ + \* + `Zonal `__ + You can use an operation resource to manage asynchronous + API requests. For more information, read Handling API + responses. Operations can be global, regional or zonal. + - For global operations, use the ``globalOperations`` + resource. - For regional operations, use the + ``regionOperations`` resource. - For zonal operations, + use the ``zonalOperations`` resource. For more + information, read Global, Regional, and Zonal Resources. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/compute/v1/projects/{project}/regions/{region}/interconnectAttachments/{resource}/setLabels', + 'body': 'region_set_labels_request_resource', + }, + ] + request, metadata = self._interceptor.pre_set_labels(request, metadata) + pb_request = compute.SetLabelsInterconnectAttachmentRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + including_default_value_fields=False, + use_integers_for_enums=False + ) + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.Operation() + pb_resp = compute.Operation.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_set_labels(resp) + return resp + + @property + def aggregated_list(self) -> Callable[ + [compute.AggregatedListInterconnectAttachmentsRequest], + compute.InterconnectAttachmentAggregatedList]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._AggregatedList(self._session, self._host, self._interceptor) # type: ignore + + @property + def delete(self) -> Callable[ + [compute.DeleteInterconnectAttachmentRequest], + compute.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._Delete(self._session, self._host, self._interceptor) # type: ignore + + @property + def get(self) -> Callable[ + [compute.GetInterconnectAttachmentRequest], + compute.InterconnectAttachment]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._Get(self._session, self._host, self._interceptor) # type: ignore + + @property + def insert(self) -> Callable[ + [compute.InsertInterconnectAttachmentRequest], + compute.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._Insert(self._session, self._host, self._interceptor) # type: ignore + + @property + def list(self) -> Callable[ + [compute.ListInterconnectAttachmentsRequest], + compute.InterconnectAttachmentList]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._List(self._session, self._host, self._interceptor) # type: ignore + + @property + def patch(self) -> Callable[ + [compute.PatchInterconnectAttachmentRequest], + compute.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._Patch(self._session, self._host, self._interceptor) # type: ignore + + @property + def set_labels(self) -> Callable[ + [compute.SetLabelsInterconnectAttachmentRequest], + compute.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._SetLabels(self._session, self._host, self._interceptor) # type: ignore + + @property + def kind(self) -> str: + return "rest" + + def close(self): + self._session.close() + + +__all__=( + 'InterconnectAttachmentsRestTransport', +) diff --git a/owl-bot-staging/v1/google/cloud/compute_v1/services/interconnect_locations/__init__.py b/owl-bot-staging/v1/google/cloud/compute_v1/services/interconnect_locations/__init__.py new file mode 100644 index 000000000..759bc520f --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/compute_v1/services/interconnect_locations/__init__.py @@ -0,0 +1,20 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from .client import InterconnectLocationsClient + +__all__ = ( + 'InterconnectLocationsClient', +) diff --git a/owl-bot-staging/v1/google/cloud/compute_v1/services/interconnect_locations/client.py b/owl-bot-staging/v1/google/cloud/compute_v1/services/interconnect_locations/client.py new file mode 100644 index 000000000..5683d2d00 --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/compute_v1/services/interconnect_locations/client.py @@ -0,0 +1,623 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import os +import re +from typing import Dict, Mapping, MutableMapping, MutableSequence, Optional, Sequence, Tuple, Type, Union, cast + +from google.cloud.compute_v1 import gapic_version as package_version + +from google.api_core import client_options as client_options_lib +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport import mtls # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth.exceptions import MutualTLSChannelError # type: ignore +from google.oauth2 import service_account # type: ignore + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + +from google.cloud.compute_v1.services.interconnect_locations import pagers +from google.cloud.compute_v1.types import compute +from .transports.base import InterconnectLocationsTransport, DEFAULT_CLIENT_INFO +from .transports.rest import InterconnectLocationsRestTransport + + +class InterconnectLocationsClientMeta(type): + """Metaclass for the InterconnectLocations client. + + This provides class-level methods for building and retrieving + support objects (e.g. transport) without polluting the client instance + objects. + """ + _transport_registry = OrderedDict() # type: Dict[str, Type[InterconnectLocationsTransport]] + _transport_registry["rest"] = InterconnectLocationsRestTransport + + def get_transport_class(cls, + label: Optional[str] = None, + ) -> Type[InterconnectLocationsTransport]: + """Returns an appropriate transport class. + + Args: + label: The name of the desired transport. If none is + provided, then the first transport in the registry is used. + + Returns: + The transport class to use. + """ + # If a specific transport is requested, return that one. + if label: + return cls._transport_registry[label] + + # No transport is requested; return the default (that is, the first one + # in the dictionary). + return next(iter(cls._transport_registry.values())) + + +class InterconnectLocationsClient(metaclass=InterconnectLocationsClientMeta): + """The InterconnectLocations API.""" + + @staticmethod + def _get_default_mtls_endpoint(api_endpoint): + """Converts api endpoint to mTLS endpoint. + + Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to + "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. + Args: + api_endpoint (Optional[str]): the api endpoint to convert. + Returns: + str: converted mTLS api endpoint. + """ + if not api_endpoint: + return api_endpoint + + mtls_endpoint_re = re.compile( + r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?" + ) + + m = mtls_endpoint_re.match(api_endpoint) + name, mtls, sandbox, googledomain = m.groups() + if mtls or not googledomain: + return api_endpoint + + if sandbox: + return api_endpoint.replace( + "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" + ) + + return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") + + DEFAULT_ENDPOINT = "compute.googleapis.com" + DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore + DEFAULT_ENDPOINT + ) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + InterconnectLocationsClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_info(info) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + InterconnectLocationsClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_file( + filename) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + from_service_account_json = from_service_account_file + + @property + def transport(self) -> InterconnectLocationsTransport: + """Returns the transport used by the client instance. + + Returns: + InterconnectLocationsTransport: The transport used by the client + instance. + """ + return self._transport + + @staticmethod + def common_billing_account_path(billing_account: str, ) -> str: + """Returns a fully-qualified billing_account string.""" + return "billingAccounts/{billing_account}".format(billing_account=billing_account, ) + + @staticmethod + def parse_common_billing_account_path(path: str) -> Dict[str,str]: + """Parse a billing_account path into its component segments.""" + m = re.match(r"^billingAccounts/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_folder_path(folder: str, ) -> str: + """Returns a fully-qualified folder string.""" + return "folders/{folder}".format(folder=folder, ) + + @staticmethod + def parse_common_folder_path(path: str) -> Dict[str,str]: + """Parse a folder path into its component segments.""" + m = re.match(r"^folders/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_organization_path(organization: str, ) -> str: + """Returns a fully-qualified organization string.""" + return "organizations/{organization}".format(organization=organization, ) + + @staticmethod + def parse_common_organization_path(path: str) -> Dict[str,str]: + """Parse a organization path into its component segments.""" + m = re.match(r"^organizations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_project_path(project: str, ) -> str: + """Returns a fully-qualified project string.""" + return "projects/{project}".format(project=project, ) + + @staticmethod + def parse_common_project_path(path: str) -> Dict[str,str]: + """Parse a project path into its component segments.""" + m = re.match(r"^projects/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_location_path(project: str, location: str, ) -> str: + """Returns a fully-qualified location string.""" + return "projects/{project}/locations/{location}".format(project=project, location=location, ) + + @staticmethod + def parse_common_location_path(path: str) -> Dict[str,str]: + """Parse a location path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @classmethod + def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[client_options_lib.ClientOptions] = None): + """Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + if client_options is None: + client_options = client_options_lib.ClientOptions() + use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") + use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") + if use_client_cert not in ("true", "false"): + raise ValueError("Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`") + if use_mtls_endpoint not in ("auto", "never", "always"): + raise MutualTLSChannelError("Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`") + + # Figure out the client cert source to use. + client_cert_source = None + if use_client_cert == "true": + if client_options.client_cert_source: + client_cert_source = client_options.client_cert_source + elif mtls.has_default_client_cert_source(): + client_cert_source = mtls.default_client_cert_source() + + # Figure out which api endpoint to use. + if client_options.api_endpoint is not None: + api_endpoint = client_options.api_endpoint + elif use_mtls_endpoint == "always" or (use_mtls_endpoint == "auto" and client_cert_source): + api_endpoint = cls.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = cls.DEFAULT_ENDPOINT + + return api_endpoint, client_cert_source + + def __init__(self, *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Optional[Union[str, InterconnectLocationsTransport]] = None, + client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the interconnect locations client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Union[str, InterconnectLocationsTransport]): The + transport to use. If set to None, a transport is chosen + automatically. + NOTE: "rest" transport functionality is currently in a + beta state (preview). We welcome your feedback via an + issue in this library's source repository. + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): Custom options for the + client. It won't take effect if a ``transport`` instance is provided. + (1) The ``api_endpoint`` property can be used to override the + default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT + environment variable can also be used to override the endpoint: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto switch to the + default mTLS endpoint if client certificate is present, this is + the default value). However, the ``api_endpoint`` property takes + precedence if provided. + (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide client certificate for mutual TLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + """ + if isinstance(client_options, dict): + client_options = client_options_lib.from_dict(client_options) + if client_options is None: + client_options = client_options_lib.ClientOptions() + client_options = cast(client_options_lib.ClientOptions, client_options) + + api_endpoint, client_cert_source_func = self.get_mtls_endpoint_and_cert_source(client_options) + + api_key_value = getattr(client_options, "api_key", None) + if api_key_value and credentials: + raise ValueError("client_options.api_key and credentials are mutually exclusive") + + # Save or instantiate the transport. + # Ordinarily, we provide the transport, but allowing a custom transport + # instance provides an extensibility point for unusual situations. + if isinstance(transport, InterconnectLocationsTransport): + # transport is a InterconnectLocationsTransport instance. + if credentials or client_options.credentials_file or api_key_value: + raise ValueError("When providing a transport instance, " + "provide its credentials directly.") + if client_options.scopes: + raise ValueError( + "When providing a transport instance, provide its scopes " + "directly." + ) + self._transport = transport + else: + import google.auth._default # type: ignore + + if api_key_value and hasattr(google.auth._default, "get_api_key_credentials"): + credentials = google.auth._default.get_api_key_credentials(api_key_value) + + Transport = type(self).get_transport_class(transport) + self._transport = Transport( + credentials=credentials, + credentials_file=client_options.credentials_file, + host=api_endpoint, + scopes=client_options.scopes, + client_cert_source_for_mtls=client_cert_source_func, + quota_project_id=client_options.quota_project_id, + client_info=client_info, + always_use_jwt_access=True, + api_audience=client_options.api_audience, + ) + + def get(self, + request: Optional[Union[compute.GetInterconnectLocationRequest, dict]] = None, + *, + project: Optional[str] = None, + interconnect_location: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.InterconnectLocation: + r"""Returns the details for the specified interconnect + location. Gets a list of available interconnect + locations by making a list() request. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_get(): + # Create a client + client = compute_v1.InterconnectLocationsClient() + + # Initialize request argument(s) + request = compute_v1.GetInterconnectLocationRequest( + interconnect_location="interconnect_location_value", + project="project_value", + ) + + # Make the request + response = client.get(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.GetInterconnectLocationRequest, dict]): + The request object. A request message for + InterconnectLocations.Get. See the + method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + interconnect_location (str): + Name of the interconnect location to + return. + + This corresponds to the ``interconnect_location`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.compute_v1.types.InterconnectLocation: + Represents an Interconnect Attachment + (VLAN) Location resource. You can use + this resource to find location details + about an Interconnect attachment (VLAN). + For more information about interconnect + attachments, read Creating VLAN + Attachments. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, interconnect_location]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.GetInterconnectLocationRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.GetInterconnectLocationRequest): + request = compute.GetInterconnectLocationRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if interconnect_location is not None: + request.interconnect_location = interconnect_location + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("interconnect_location", request.interconnect_location), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def list(self, + request: Optional[Union[compute.ListInterconnectLocationsRequest, dict]] = None, + *, + project: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListPager: + r"""Retrieves the list of interconnect locations + available to the specified project. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_list(): + # Create a client + client = compute_v1.InterconnectLocationsClient() + + # Initialize request argument(s) + request = compute_v1.ListInterconnectLocationsRequest( + project="project_value", + ) + + # Make the request + page_result = client.list(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.ListInterconnectLocationsRequest, dict]): + The request object. A request message for + InterconnectLocations.List. See the + method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.compute_v1.services.interconnect_locations.pagers.ListPager: + Response to the list request, and + contains a list of interconnect + locations. Iterating over this object + will yield results and resolve + additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.ListInterconnectLocationsRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.ListInterconnectLocationsRequest): + request = compute.ListInterconnectLocationsRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + def __enter__(self) -> "InterconnectLocationsClient": + return self + + def __exit__(self, type, value, traceback): + """Releases underlying transport's resources. + + .. warning:: + ONLY use as a context manager if the transport is NOT shared + with other clients! Exiting the with block will CLOSE the transport + and may cause errors in other clients! + """ + self.transport.close() + + + + + + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) + + +__all__ = ( + "InterconnectLocationsClient", +) diff --git a/owl-bot-staging/v1/google/cloud/compute_v1/services/interconnect_locations/pagers.py b/owl-bot-staging/v1/google/cloud/compute_v1/services/interconnect_locations/pagers.py new file mode 100644 index 000000000..eed51e419 --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/compute_v1/services/interconnect_locations/pagers.py @@ -0,0 +1,77 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import Any, AsyncIterator, Awaitable, Callable, Sequence, Tuple, Optional, Iterator + +from google.cloud.compute_v1.types import compute + + +class ListPager: + """A pager for iterating through ``list`` requests. + + This class thinly wraps an initial + :class:`google.cloud.compute_v1.types.InterconnectLocationList` object, and + provides an ``__iter__`` method to iterate through its + ``items`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``List`` requests and continue to iterate + through the ``items`` field on the + corresponding responses. + + All the usual :class:`google.cloud.compute_v1.types.InterconnectLocationList` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., compute.InterconnectLocationList], + request: compute.ListInterconnectLocationsRequest, + response: compute.InterconnectLocationList, + *, + metadata: Sequence[Tuple[str, str]] = ()): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.compute_v1.types.ListInterconnectLocationsRequest): + The initial request object. + response (google.cloud.compute_v1.types.InterconnectLocationList): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = compute.ListInterconnectLocationsRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[compute.InterconnectLocationList]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[compute.InterconnectLocation]: + for page in self.pages: + yield from page.items + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) diff --git a/owl-bot-staging/v1/google/cloud/compute_v1/services/interconnect_locations/transports/__init__.py b/owl-bot-staging/v1/google/cloud/compute_v1/services/interconnect_locations/transports/__init__.py new file mode 100644 index 000000000..4f2aa3f89 --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/compute_v1/services/interconnect_locations/transports/__init__.py @@ -0,0 +1,32 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +from typing import Dict, Type + +from .base import InterconnectLocationsTransport +from .rest import InterconnectLocationsRestTransport +from .rest import InterconnectLocationsRestInterceptor + + +# Compile a registry of transports. +_transport_registry = OrderedDict() # type: Dict[str, Type[InterconnectLocationsTransport]] +_transport_registry['rest'] = InterconnectLocationsRestTransport + +__all__ = ( + 'InterconnectLocationsTransport', + 'InterconnectLocationsRestTransport', + 'InterconnectLocationsRestInterceptor', +) diff --git a/owl-bot-staging/v1/google/cloud/compute_v1/services/interconnect_locations/transports/base.py b/owl-bot-staging/v1/google/cloud/compute_v1/services/interconnect_locations/transports/base.py new file mode 100644 index 000000000..80be88d8e --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/compute_v1/services/interconnect_locations/transports/base.py @@ -0,0 +1,164 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import abc +from typing import Awaitable, Callable, Dict, Optional, Sequence, Union + +from google.cloud.compute_v1 import gapic_version as package_version + +import google.auth # type: ignore +import google.api_core +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.compute_v1.types import compute + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) + + +class InterconnectLocationsTransport(abc.ABC): + """Abstract transport class for InterconnectLocations.""" + + AUTH_SCOPES = ( + 'https://www.googleapis.com/auth/compute.readonly', + 'https://www.googleapis.com/auth/compute', + 'https://www.googleapis.com/auth/cloud-platform', + ) + + DEFAULT_HOST: str = 'compute.googleapis.com' + def __init__( + self, *, + host: str = DEFAULT_HOST, + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + **kwargs, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A list of scopes. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + """ + + scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} + + # Save the scopes. + self._scopes = scopes + + # If no credentials are provided, then determine the appropriate + # defaults. + if credentials and credentials_file: + raise core_exceptions.DuplicateCredentialArgs("'credentials_file' and 'credentials' are mutually exclusive") + + if credentials_file is not None: + credentials, _ = google.auth.load_credentials_from_file( + credentials_file, + **scopes_kwargs, + quota_project_id=quota_project_id + ) + elif credentials is None: + credentials, _ = google.auth.default(**scopes_kwargs, quota_project_id=quota_project_id) + # Don't apply audience if the credentials file passed from user. + if hasattr(credentials, "with_gdch_audience"): + credentials = credentials.with_gdch_audience(api_audience if api_audience else host) + + # If the credentials are service account credentials, then always try to use self signed JWT. + if always_use_jwt_access and isinstance(credentials, service_account.Credentials) and hasattr(service_account.Credentials, "with_always_use_jwt_access"): + credentials = credentials.with_always_use_jwt_access(True) + + # Save the credentials. + self._credentials = credentials + + # Save the hostname. Default to port 443 (HTTPS) if none is specified. + if ':' not in host: + host += ':443' + self._host = host + + def _prep_wrapped_messages(self, client_info): + # Precompute the wrapped methods. + self._wrapped_methods = { + self.get: gapic_v1.method.wrap_method( + self.get, + default_timeout=None, + client_info=client_info, + ), + self.list: gapic_v1.method.wrap_method( + self.list, + default_timeout=None, + client_info=client_info, + ), + } + + def close(self): + """Closes resources associated with the transport. + + .. warning:: + Only call this method if the transport is NOT shared + with other clients - this may cause errors in other clients! + """ + raise NotImplementedError() + + @property + def get(self) -> Callable[ + [compute.GetInterconnectLocationRequest], + Union[ + compute.InterconnectLocation, + Awaitable[compute.InterconnectLocation] + ]]: + raise NotImplementedError() + + @property + def list(self) -> Callable[ + [compute.ListInterconnectLocationsRequest], + Union[ + compute.InterconnectLocationList, + Awaitable[compute.InterconnectLocationList] + ]]: + raise NotImplementedError() + + @property + def kind(self) -> str: + raise NotImplementedError() + + +__all__ = ( + 'InterconnectLocationsTransport', +) diff --git a/owl-bot-staging/v1/google/cloud/compute_v1/services/interconnect_locations/transports/rest.py b/owl-bot-staging/v1/google/cloud/compute_v1/services/interconnect_locations/transports/rest.py new file mode 100644 index 000000000..60862b616 --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/compute_v1/services/interconnect_locations/transports/rest.py @@ -0,0 +1,415 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +from google.auth.transport.requests import AuthorizedSession # type: ignore +import json # type: ignore +import grpc # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.api_core import exceptions as core_exceptions +from google.api_core import retry as retries +from google.api_core import rest_helpers +from google.api_core import rest_streaming +from google.api_core import path_template +from google.api_core import gapic_v1 + +from google.protobuf import json_format +from requests import __version__ as requests_version +import dataclasses +import re +from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union +import warnings + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + + +from google.cloud.compute_v1.types import compute + +from .base import InterconnectLocationsTransport, DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, + grpc_version=None, + rest_version=requests_version, +) + + +class InterconnectLocationsRestInterceptor: + """Interceptor for InterconnectLocations. + + Interceptors are used to manipulate requests, request metadata, and responses + in arbitrary ways. + Example use cases include: + * Logging + * Verifying requests according to service or custom semantics + * Stripping extraneous information from responses + + These use cases and more can be enabled by injecting an + instance of a custom subclass when constructing the InterconnectLocationsRestTransport. + + .. code-block:: python + class MyCustomInterconnectLocationsInterceptor(InterconnectLocationsRestInterceptor): + def pre_get(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list(self, response): + logging.log(f"Received response: {response}") + return response + + transport = InterconnectLocationsRestTransport(interceptor=MyCustomInterconnectLocationsInterceptor()) + client = InterconnectLocationsClient(transport=transport) + + + """ + def pre_get(self, request: compute.GetInterconnectLocationRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.GetInterconnectLocationRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get + + Override in a subclass to manipulate the request or metadata + before they are sent to the InterconnectLocations server. + """ + return request, metadata + + def post_get(self, response: compute.InterconnectLocation) -> compute.InterconnectLocation: + """Post-rpc interceptor for get + + Override in a subclass to manipulate the response + after it is returned by the InterconnectLocations server but before + it is returned to user code. + """ + return response + def pre_list(self, request: compute.ListInterconnectLocationsRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.ListInterconnectLocationsRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list + + Override in a subclass to manipulate the request or metadata + before they are sent to the InterconnectLocations server. + """ + return request, metadata + + def post_list(self, response: compute.InterconnectLocationList) -> compute.InterconnectLocationList: + """Post-rpc interceptor for list + + Override in a subclass to manipulate the response + after it is returned by the InterconnectLocations server but before + it is returned to user code. + """ + return response + + +@dataclasses.dataclass +class InterconnectLocationsRestStub: + _session: AuthorizedSession + _host: str + _interceptor: InterconnectLocationsRestInterceptor + + +class InterconnectLocationsRestTransport(InterconnectLocationsTransport): + """REST backend transport for InterconnectLocations. + + The InterconnectLocations API. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends JSON representations of protocol buffers over HTTP/1.1 + + NOTE: This REST transport functionality is currently in a beta + state (preview). We welcome your feedback via an issue in this + library's source repository. Thank you! + """ + + def __init__(self, *, + host: str = 'compute.googleapis.com', + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + client_cert_source_for_mtls: Optional[Callable[[ + ], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + url_scheme: str = 'https', + interceptor: Optional[InterconnectLocationsRestInterceptor] = None, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + NOTE: This REST transport functionality is currently in a beta + state (preview). We welcome your feedback via a GitHub issue in + this library's repository. Thank you! + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + client_cert_source_for_mtls (Callable[[], Tuple[bytes, bytes]]): Client + certificate to configure mutual TLS HTTP channel. It is ignored + if ``channel`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you are developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + url_scheme: the protocol scheme for the API endpoint. Normally + "https", but for testing or local servers, + "http" can be specified. + """ + # Run the base constructor + # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. + # TODO: When custom host (api_endpoint) is set, `scopes` must *also* be set on the + # credentials object + maybe_url_match = re.match("^(?Phttp(?:s)?://)?(?P.*)$", host) + if maybe_url_match is None: + raise ValueError(f"Unexpected hostname structure: {host}") # pragma: NO COVER + + url_match_items = maybe_url_match.groupdict() + + host = f"{url_scheme}://{host}" if not url_match_items["scheme"] else host + + super().__init__( + host=host, + credentials=credentials, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience + ) + self._session = AuthorizedSession( + self._credentials, default_host=self.DEFAULT_HOST) + if client_cert_source_for_mtls: + self._session.configure_mtls_channel(client_cert_source_for_mtls) + self._interceptor = interceptor or InterconnectLocationsRestInterceptor() + self._prep_wrapped_messages(client_info) + + class _Get(InterconnectLocationsRestStub): + def __hash__(self): + return hash("Get") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.GetInterconnectLocationRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.InterconnectLocation: + r"""Call the get method over HTTP. + + Args: + request (~.compute.GetInterconnectLocationRequest): + The request object. A request message for + InterconnectLocations.Get. See the + method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.InterconnectLocation: + Represents an Interconnect Attachment + (VLAN) Location resource. You can use + this resource to find location details + about an Interconnect attachment (VLAN). + For more information about interconnect + attachments, read Creating VLAN + Attachments. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/compute/v1/projects/{project}/global/interconnectLocations/{interconnect_location}', + }, + ] + request, metadata = self._interceptor.pre_get(request, metadata) + pb_request = compute.GetInterconnectLocationRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.InterconnectLocation() + pb_resp = compute.InterconnectLocation.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get(resp) + return resp + + class _List(InterconnectLocationsRestStub): + def __hash__(self): + return hash("List") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.ListInterconnectLocationsRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.InterconnectLocationList: + r"""Call the list method over HTTP. + + Args: + request (~.compute.ListInterconnectLocationsRequest): + The request object. A request message for + InterconnectLocations.List. See the + method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.InterconnectLocationList: + Response to the list request, and + contains a list of interconnect + locations. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/compute/v1/projects/{project}/global/interconnectLocations', + }, + ] + request, metadata = self._interceptor.pre_list(request, metadata) + pb_request = compute.ListInterconnectLocationsRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.InterconnectLocationList() + pb_resp = compute.InterconnectLocationList.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_list(resp) + return resp + + @property + def get(self) -> Callable[ + [compute.GetInterconnectLocationRequest], + compute.InterconnectLocation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._Get(self._session, self._host, self._interceptor) # type: ignore + + @property + def list(self) -> Callable[ + [compute.ListInterconnectLocationsRequest], + compute.InterconnectLocationList]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._List(self._session, self._host, self._interceptor) # type: ignore + + @property + def kind(self) -> str: + return "rest" + + def close(self): + self._session.close() + + +__all__=( + 'InterconnectLocationsRestTransport', +) diff --git a/owl-bot-staging/v1/google/cloud/compute_v1/services/interconnect_remote_locations/__init__.py b/owl-bot-staging/v1/google/cloud/compute_v1/services/interconnect_remote_locations/__init__.py new file mode 100644 index 000000000..9900c3d68 --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/compute_v1/services/interconnect_remote_locations/__init__.py @@ -0,0 +1,20 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from .client import InterconnectRemoteLocationsClient + +__all__ = ( + 'InterconnectRemoteLocationsClient', +) diff --git a/owl-bot-staging/v1/google/cloud/compute_v1/services/interconnect_remote_locations/client.py b/owl-bot-staging/v1/google/cloud/compute_v1/services/interconnect_remote_locations/client.py new file mode 100644 index 000000000..a14448b52 --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/compute_v1/services/interconnect_remote_locations/client.py @@ -0,0 +1,621 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import os +import re +from typing import Dict, Mapping, MutableMapping, MutableSequence, Optional, Sequence, Tuple, Type, Union, cast + +from google.cloud.compute_v1 import gapic_version as package_version + +from google.api_core import client_options as client_options_lib +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport import mtls # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth.exceptions import MutualTLSChannelError # type: ignore +from google.oauth2 import service_account # type: ignore + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + +from google.cloud.compute_v1.services.interconnect_remote_locations import pagers +from google.cloud.compute_v1.types import compute +from .transports.base import InterconnectRemoteLocationsTransport, DEFAULT_CLIENT_INFO +from .transports.rest import InterconnectRemoteLocationsRestTransport + + +class InterconnectRemoteLocationsClientMeta(type): + """Metaclass for the InterconnectRemoteLocations client. + + This provides class-level methods for building and retrieving + support objects (e.g. transport) without polluting the client instance + objects. + """ + _transport_registry = OrderedDict() # type: Dict[str, Type[InterconnectRemoteLocationsTransport]] + _transport_registry["rest"] = InterconnectRemoteLocationsRestTransport + + def get_transport_class(cls, + label: Optional[str] = None, + ) -> Type[InterconnectRemoteLocationsTransport]: + """Returns an appropriate transport class. + + Args: + label: The name of the desired transport. If none is + provided, then the first transport in the registry is used. + + Returns: + The transport class to use. + """ + # If a specific transport is requested, return that one. + if label: + return cls._transport_registry[label] + + # No transport is requested; return the default (that is, the first one + # in the dictionary). + return next(iter(cls._transport_registry.values())) + + +class InterconnectRemoteLocationsClient(metaclass=InterconnectRemoteLocationsClientMeta): + """The InterconnectRemoteLocations API.""" + + @staticmethod + def _get_default_mtls_endpoint(api_endpoint): + """Converts api endpoint to mTLS endpoint. + + Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to + "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. + Args: + api_endpoint (Optional[str]): the api endpoint to convert. + Returns: + str: converted mTLS api endpoint. + """ + if not api_endpoint: + return api_endpoint + + mtls_endpoint_re = re.compile( + r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?" + ) + + m = mtls_endpoint_re.match(api_endpoint) + name, mtls, sandbox, googledomain = m.groups() + if mtls or not googledomain: + return api_endpoint + + if sandbox: + return api_endpoint.replace( + "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" + ) + + return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") + + DEFAULT_ENDPOINT = "compute.googleapis.com" + DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore + DEFAULT_ENDPOINT + ) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + InterconnectRemoteLocationsClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_info(info) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + InterconnectRemoteLocationsClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_file( + filename) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + from_service_account_json = from_service_account_file + + @property + def transport(self) -> InterconnectRemoteLocationsTransport: + """Returns the transport used by the client instance. + + Returns: + InterconnectRemoteLocationsTransport: The transport used by the client + instance. + """ + return self._transport + + @staticmethod + def common_billing_account_path(billing_account: str, ) -> str: + """Returns a fully-qualified billing_account string.""" + return "billingAccounts/{billing_account}".format(billing_account=billing_account, ) + + @staticmethod + def parse_common_billing_account_path(path: str) -> Dict[str,str]: + """Parse a billing_account path into its component segments.""" + m = re.match(r"^billingAccounts/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_folder_path(folder: str, ) -> str: + """Returns a fully-qualified folder string.""" + return "folders/{folder}".format(folder=folder, ) + + @staticmethod + def parse_common_folder_path(path: str) -> Dict[str,str]: + """Parse a folder path into its component segments.""" + m = re.match(r"^folders/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_organization_path(organization: str, ) -> str: + """Returns a fully-qualified organization string.""" + return "organizations/{organization}".format(organization=organization, ) + + @staticmethod + def parse_common_organization_path(path: str) -> Dict[str,str]: + """Parse a organization path into its component segments.""" + m = re.match(r"^organizations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_project_path(project: str, ) -> str: + """Returns a fully-qualified project string.""" + return "projects/{project}".format(project=project, ) + + @staticmethod + def parse_common_project_path(path: str) -> Dict[str,str]: + """Parse a project path into its component segments.""" + m = re.match(r"^projects/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_location_path(project: str, location: str, ) -> str: + """Returns a fully-qualified location string.""" + return "projects/{project}/locations/{location}".format(project=project, location=location, ) + + @staticmethod + def parse_common_location_path(path: str) -> Dict[str,str]: + """Parse a location path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @classmethod + def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[client_options_lib.ClientOptions] = None): + """Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + if client_options is None: + client_options = client_options_lib.ClientOptions() + use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") + use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") + if use_client_cert not in ("true", "false"): + raise ValueError("Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`") + if use_mtls_endpoint not in ("auto", "never", "always"): + raise MutualTLSChannelError("Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`") + + # Figure out the client cert source to use. + client_cert_source = None + if use_client_cert == "true": + if client_options.client_cert_source: + client_cert_source = client_options.client_cert_source + elif mtls.has_default_client_cert_source(): + client_cert_source = mtls.default_client_cert_source() + + # Figure out which api endpoint to use. + if client_options.api_endpoint is not None: + api_endpoint = client_options.api_endpoint + elif use_mtls_endpoint == "always" or (use_mtls_endpoint == "auto" and client_cert_source): + api_endpoint = cls.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = cls.DEFAULT_ENDPOINT + + return api_endpoint, client_cert_source + + def __init__(self, *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Optional[Union[str, InterconnectRemoteLocationsTransport]] = None, + client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the interconnect remote locations client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Union[str, InterconnectRemoteLocationsTransport]): The + transport to use. If set to None, a transport is chosen + automatically. + NOTE: "rest" transport functionality is currently in a + beta state (preview). We welcome your feedback via an + issue in this library's source repository. + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): Custom options for the + client. It won't take effect if a ``transport`` instance is provided. + (1) The ``api_endpoint`` property can be used to override the + default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT + environment variable can also be used to override the endpoint: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto switch to the + default mTLS endpoint if client certificate is present, this is + the default value). However, the ``api_endpoint`` property takes + precedence if provided. + (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide client certificate for mutual TLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + """ + if isinstance(client_options, dict): + client_options = client_options_lib.from_dict(client_options) + if client_options is None: + client_options = client_options_lib.ClientOptions() + client_options = cast(client_options_lib.ClientOptions, client_options) + + api_endpoint, client_cert_source_func = self.get_mtls_endpoint_and_cert_source(client_options) + + api_key_value = getattr(client_options, "api_key", None) + if api_key_value and credentials: + raise ValueError("client_options.api_key and credentials are mutually exclusive") + + # Save or instantiate the transport. + # Ordinarily, we provide the transport, but allowing a custom transport + # instance provides an extensibility point for unusual situations. + if isinstance(transport, InterconnectRemoteLocationsTransport): + # transport is a InterconnectRemoteLocationsTransport instance. + if credentials or client_options.credentials_file or api_key_value: + raise ValueError("When providing a transport instance, " + "provide its credentials directly.") + if client_options.scopes: + raise ValueError( + "When providing a transport instance, provide its scopes " + "directly." + ) + self._transport = transport + else: + import google.auth._default # type: ignore + + if api_key_value and hasattr(google.auth._default, "get_api_key_credentials"): + credentials = google.auth._default.get_api_key_credentials(api_key_value) + + Transport = type(self).get_transport_class(transport) + self._transport = Transport( + credentials=credentials, + credentials_file=client_options.credentials_file, + host=api_endpoint, + scopes=client_options.scopes, + client_cert_source_for_mtls=client_cert_source_func, + quota_project_id=client_options.quota_project_id, + client_info=client_info, + always_use_jwt_access=True, + api_audience=client_options.api_audience, + ) + + def get(self, + request: Optional[Union[compute.GetInterconnectRemoteLocationRequest, dict]] = None, + *, + project: Optional[str] = None, + interconnect_remote_location: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.InterconnectRemoteLocation: + r"""Returns the details for the specified interconnect + remote location. Gets a list of available interconnect + remote locations by making a list() request. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_get(): + # Create a client + client = compute_v1.InterconnectRemoteLocationsClient() + + # Initialize request argument(s) + request = compute_v1.GetInterconnectRemoteLocationRequest( + interconnect_remote_location="interconnect_remote_location_value", + project="project_value", + ) + + # Make the request + response = client.get(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.GetInterconnectRemoteLocationRequest, dict]): + The request object. A request message for + InterconnectRemoteLocations.Get. See the + method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + interconnect_remote_location (str): + Name of the interconnect remote + location to return. + + This corresponds to the ``interconnect_remote_location`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.compute_v1.types.InterconnectRemoteLocation: + Represents a Cross-Cloud Interconnect + Remote Location resource. You can use + this resource to find remote location + details about an Interconnect attachment + (VLAN). + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, interconnect_remote_location]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.GetInterconnectRemoteLocationRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.GetInterconnectRemoteLocationRequest): + request = compute.GetInterconnectRemoteLocationRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if interconnect_remote_location is not None: + request.interconnect_remote_location = interconnect_remote_location + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("interconnect_remote_location", request.interconnect_remote_location), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def list(self, + request: Optional[Union[compute.ListInterconnectRemoteLocationsRequest, dict]] = None, + *, + project: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListPager: + r"""Retrieves the list of interconnect remote locations + available to the specified project. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_list(): + # Create a client + client = compute_v1.InterconnectRemoteLocationsClient() + + # Initialize request argument(s) + request = compute_v1.ListInterconnectRemoteLocationsRequest( + project="project_value", + ) + + # Make the request + page_result = client.list(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.ListInterconnectRemoteLocationsRequest, dict]): + The request object. A request message for + InterconnectRemoteLocations.List. See + the method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.compute_v1.services.interconnect_remote_locations.pagers.ListPager: + Response to the list request, and + contains a list of interconnect remote + locations. Iterating over this object + will yield results and resolve + additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.ListInterconnectRemoteLocationsRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.ListInterconnectRemoteLocationsRequest): + request = compute.ListInterconnectRemoteLocationsRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + def __enter__(self) -> "InterconnectRemoteLocationsClient": + return self + + def __exit__(self, type, value, traceback): + """Releases underlying transport's resources. + + .. warning:: + ONLY use as a context manager if the transport is NOT shared + with other clients! Exiting the with block will CLOSE the transport + and may cause errors in other clients! + """ + self.transport.close() + + + + + + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) + + +__all__ = ( + "InterconnectRemoteLocationsClient", +) diff --git a/owl-bot-staging/v1/google/cloud/compute_v1/services/interconnect_remote_locations/pagers.py b/owl-bot-staging/v1/google/cloud/compute_v1/services/interconnect_remote_locations/pagers.py new file mode 100644 index 000000000..47157ae73 --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/compute_v1/services/interconnect_remote_locations/pagers.py @@ -0,0 +1,77 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import Any, AsyncIterator, Awaitable, Callable, Sequence, Tuple, Optional, Iterator + +from google.cloud.compute_v1.types import compute + + +class ListPager: + """A pager for iterating through ``list`` requests. + + This class thinly wraps an initial + :class:`google.cloud.compute_v1.types.InterconnectRemoteLocationList` object, and + provides an ``__iter__`` method to iterate through its + ``items`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``List`` requests and continue to iterate + through the ``items`` field on the + corresponding responses. + + All the usual :class:`google.cloud.compute_v1.types.InterconnectRemoteLocationList` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., compute.InterconnectRemoteLocationList], + request: compute.ListInterconnectRemoteLocationsRequest, + response: compute.InterconnectRemoteLocationList, + *, + metadata: Sequence[Tuple[str, str]] = ()): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.compute_v1.types.ListInterconnectRemoteLocationsRequest): + The initial request object. + response (google.cloud.compute_v1.types.InterconnectRemoteLocationList): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = compute.ListInterconnectRemoteLocationsRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[compute.InterconnectRemoteLocationList]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[compute.InterconnectRemoteLocation]: + for page in self.pages: + yield from page.items + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) diff --git a/owl-bot-staging/v1/google/cloud/compute_v1/services/interconnect_remote_locations/transports/__init__.py b/owl-bot-staging/v1/google/cloud/compute_v1/services/interconnect_remote_locations/transports/__init__.py new file mode 100644 index 000000000..564897b2e --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/compute_v1/services/interconnect_remote_locations/transports/__init__.py @@ -0,0 +1,32 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +from typing import Dict, Type + +from .base import InterconnectRemoteLocationsTransport +from .rest import InterconnectRemoteLocationsRestTransport +from .rest import InterconnectRemoteLocationsRestInterceptor + + +# Compile a registry of transports. +_transport_registry = OrderedDict() # type: Dict[str, Type[InterconnectRemoteLocationsTransport]] +_transport_registry['rest'] = InterconnectRemoteLocationsRestTransport + +__all__ = ( + 'InterconnectRemoteLocationsTransport', + 'InterconnectRemoteLocationsRestTransport', + 'InterconnectRemoteLocationsRestInterceptor', +) diff --git a/owl-bot-staging/v1/google/cloud/compute_v1/services/interconnect_remote_locations/transports/base.py b/owl-bot-staging/v1/google/cloud/compute_v1/services/interconnect_remote_locations/transports/base.py new file mode 100644 index 000000000..6648afa60 --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/compute_v1/services/interconnect_remote_locations/transports/base.py @@ -0,0 +1,164 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import abc +from typing import Awaitable, Callable, Dict, Optional, Sequence, Union + +from google.cloud.compute_v1 import gapic_version as package_version + +import google.auth # type: ignore +import google.api_core +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.compute_v1.types import compute + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) + + +class InterconnectRemoteLocationsTransport(abc.ABC): + """Abstract transport class for InterconnectRemoteLocations.""" + + AUTH_SCOPES = ( + 'https://www.googleapis.com/auth/compute.readonly', + 'https://www.googleapis.com/auth/compute', + 'https://www.googleapis.com/auth/cloud-platform', + ) + + DEFAULT_HOST: str = 'compute.googleapis.com' + def __init__( + self, *, + host: str = DEFAULT_HOST, + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + **kwargs, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A list of scopes. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + """ + + scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} + + # Save the scopes. + self._scopes = scopes + + # If no credentials are provided, then determine the appropriate + # defaults. + if credentials and credentials_file: + raise core_exceptions.DuplicateCredentialArgs("'credentials_file' and 'credentials' are mutually exclusive") + + if credentials_file is not None: + credentials, _ = google.auth.load_credentials_from_file( + credentials_file, + **scopes_kwargs, + quota_project_id=quota_project_id + ) + elif credentials is None: + credentials, _ = google.auth.default(**scopes_kwargs, quota_project_id=quota_project_id) + # Don't apply audience if the credentials file passed from user. + if hasattr(credentials, "with_gdch_audience"): + credentials = credentials.with_gdch_audience(api_audience if api_audience else host) + + # If the credentials are service account credentials, then always try to use self signed JWT. + if always_use_jwt_access and isinstance(credentials, service_account.Credentials) and hasattr(service_account.Credentials, "with_always_use_jwt_access"): + credentials = credentials.with_always_use_jwt_access(True) + + # Save the credentials. + self._credentials = credentials + + # Save the hostname. Default to port 443 (HTTPS) if none is specified. + if ':' not in host: + host += ':443' + self._host = host + + def _prep_wrapped_messages(self, client_info): + # Precompute the wrapped methods. + self._wrapped_methods = { + self.get: gapic_v1.method.wrap_method( + self.get, + default_timeout=None, + client_info=client_info, + ), + self.list: gapic_v1.method.wrap_method( + self.list, + default_timeout=None, + client_info=client_info, + ), + } + + def close(self): + """Closes resources associated with the transport. + + .. warning:: + Only call this method if the transport is NOT shared + with other clients - this may cause errors in other clients! + """ + raise NotImplementedError() + + @property + def get(self) -> Callable[ + [compute.GetInterconnectRemoteLocationRequest], + Union[ + compute.InterconnectRemoteLocation, + Awaitable[compute.InterconnectRemoteLocation] + ]]: + raise NotImplementedError() + + @property + def list(self) -> Callable[ + [compute.ListInterconnectRemoteLocationsRequest], + Union[ + compute.InterconnectRemoteLocationList, + Awaitable[compute.InterconnectRemoteLocationList] + ]]: + raise NotImplementedError() + + @property + def kind(self) -> str: + raise NotImplementedError() + + +__all__ = ( + 'InterconnectRemoteLocationsTransport', +) diff --git a/owl-bot-staging/v1/google/cloud/compute_v1/services/interconnect_remote_locations/transports/rest.py b/owl-bot-staging/v1/google/cloud/compute_v1/services/interconnect_remote_locations/transports/rest.py new file mode 100644 index 000000000..041a9054f --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/compute_v1/services/interconnect_remote_locations/transports/rest.py @@ -0,0 +1,413 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +from google.auth.transport.requests import AuthorizedSession # type: ignore +import json # type: ignore +import grpc # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.api_core import exceptions as core_exceptions +from google.api_core import retry as retries +from google.api_core import rest_helpers +from google.api_core import rest_streaming +from google.api_core import path_template +from google.api_core import gapic_v1 + +from google.protobuf import json_format +from requests import __version__ as requests_version +import dataclasses +import re +from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union +import warnings + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + + +from google.cloud.compute_v1.types import compute + +from .base import InterconnectRemoteLocationsTransport, DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, + grpc_version=None, + rest_version=requests_version, +) + + +class InterconnectRemoteLocationsRestInterceptor: + """Interceptor for InterconnectRemoteLocations. + + Interceptors are used to manipulate requests, request metadata, and responses + in arbitrary ways. + Example use cases include: + * Logging + * Verifying requests according to service or custom semantics + * Stripping extraneous information from responses + + These use cases and more can be enabled by injecting an + instance of a custom subclass when constructing the InterconnectRemoteLocationsRestTransport. + + .. code-block:: python + class MyCustomInterconnectRemoteLocationsInterceptor(InterconnectRemoteLocationsRestInterceptor): + def pre_get(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list(self, response): + logging.log(f"Received response: {response}") + return response + + transport = InterconnectRemoteLocationsRestTransport(interceptor=MyCustomInterconnectRemoteLocationsInterceptor()) + client = InterconnectRemoteLocationsClient(transport=transport) + + + """ + def pre_get(self, request: compute.GetInterconnectRemoteLocationRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.GetInterconnectRemoteLocationRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get + + Override in a subclass to manipulate the request or metadata + before they are sent to the InterconnectRemoteLocations server. + """ + return request, metadata + + def post_get(self, response: compute.InterconnectRemoteLocation) -> compute.InterconnectRemoteLocation: + """Post-rpc interceptor for get + + Override in a subclass to manipulate the response + after it is returned by the InterconnectRemoteLocations server but before + it is returned to user code. + """ + return response + def pre_list(self, request: compute.ListInterconnectRemoteLocationsRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.ListInterconnectRemoteLocationsRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list + + Override in a subclass to manipulate the request or metadata + before they are sent to the InterconnectRemoteLocations server. + """ + return request, metadata + + def post_list(self, response: compute.InterconnectRemoteLocationList) -> compute.InterconnectRemoteLocationList: + """Post-rpc interceptor for list + + Override in a subclass to manipulate the response + after it is returned by the InterconnectRemoteLocations server but before + it is returned to user code. + """ + return response + + +@dataclasses.dataclass +class InterconnectRemoteLocationsRestStub: + _session: AuthorizedSession + _host: str + _interceptor: InterconnectRemoteLocationsRestInterceptor + + +class InterconnectRemoteLocationsRestTransport(InterconnectRemoteLocationsTransport): + """REST backend transport for InterconnectRemoteLocations. + + The InterconnectRemoteLocations API. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends JSON representations of protocol buffers over HTTP/1.1 + + NOTE: This REST transport functionality is currently in a beta + state (preview). We welcome your feedback via an issue in this + library's source repository. Thank you! + """ + + def __init__(self, *, + host: str = 'compute.googleapis.com', + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + client_cert_source_for_mtls: Optional[Callable[[ + ], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + url_scheme: str = 'https', + interceptor: Optional[InterconnectRemoteLocationsRestInterceptor] = None, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + NOTE: This REST transport functionality is currently in a beta + state (preview). We welcome your feedback via a GitHub issue in + this library's repository. Thank you! + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + client_cert_source_for_mtls (Callable[[], Tuple[bytes, bytes]]): Client + certificate to configure mutual TLS HTTP channel. It is ignored + if ``channel`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you are developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + url_scheme: the protocol scheme for the API endpoint. Normally + "https", but for testing or local servers, + "http" can be specified. + """ + # Run the base constructor + # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. + # TODO: When custom host (api_endpoint) is set, `scopes` must *also* be set on the + # credentials object + maybe_url_match = re.match("^(?Phttp(?:s)?://)?(?P.*)$", host) + if maybe_url_match is None: + raise ValueError(f"Unexpected hostname structure: {host}") # pragma: NO COVER + + url_match_items = maybe_url_match.groupdict() + + host = f"{url_scheme}://{host}" if not url_match_items["scheme"] else host + + super().__init__( + host=host, + credentials=credentials, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience + ) + self._session = AuthorizedSession( + self._credentials, default_host=self.DEFAULT_HOST) + if client_cert_source_for_mtls: + self._session.configure_mtls_channel(client_cert_source_for_mtls) + self._interceptor = interceptor or InterconnectRemoteLocationsRestInterceptor() + self._prep_wrapped_messages(client_info) + + class _Get(InterconnectRemoteLocationsRestStub): + def __hash__(self): + return hash("Get") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.GetInterconnectRemoteLocationRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.InterconnectRemoteLocation: + r"""Call the get method over HTTP. + + Args: + request (~.compute.GetInterconnectRemoteLocationRequest): + The request object. A request message for + InterconnectRemoteLocations.Get. See the + method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.InterconnectRemoteLocation: + Represents a Cross-Cloud Interconnect + Remote Location resource. You can use + this resource to find remote location + details about an Interconnect attachment + (VLAN). + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/compute/v1/projects/{project}/global/interconnectRemoteLocations/{interconnect_remote_location}', + }, + ] + request, metadata = self._interceptor.pre_get(request, metadata) + pb_request = compute.GetInterconnectRemoteLocationRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.InterconnectRemoteLocation() + pb_resp = compute.InterconnectRemoteLocation.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get(resp) + return resp + + class _List(InterconnectRemoteLocationsRestStub): + def __hash__(self): + return hash("List") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.ListInterconnectRemoteLocationsRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.InterconnectRemoteLocationList: + r"""Call the list method over HTTP. + + Args: + request (~.compute.ListInterconnectRemoteLocationsRequest): + The request object. A request message for + InterconnectRemoteLocations.List. See + the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.InterconnectRemoteLocationList: + Response to the list request, and + contains a list of interconnect remote + locations. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/compute/v1/projects/{project}/global/interconnectRemoteLocations', + }, + ] + request, metadata = self._interceptor.pre_list(request, metadata) + pb_request = compute.ListInterconnectRemoteLocationsRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.InterconnectRemoteLocationList() + pb_resp = compute.InterconnectRemoteLocationList.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_list(resp) + return resp + + @property + def get(self) -> Callable[ + [compute.GetInterconnectRemoteLocationRequest], + compute.InterconnectRemoteLocation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._Get(self._session, self._host, self._interceptor) # type: ignore + + @property + def list(self) -> Callable[ + [compute.ListInterconnectRemoteLocationsRequest], + compute.InterconnectRemoteLocationList]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._List(self._session, self._host, self._interceptor) # type: ignore + + @property + def kind(self) -> str: + return "rest" + + def close(self): + self._session.close() + + +__all__=( + 'InterconnectRemoteLocationsRestTransport', +) diff --git a/owl-bot-staging/v1/google/cloud/compute_v1/services/interconnects/__init__.py b/owl-bot-staging/v1/google/cloud/compute_v1/services/interconnects/__init__.py new file mode 100644 index 000000000..67235c724 --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/compute_v1/services/interconnects/__init__.py @@ -0,0 +1,20 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from .client import InterconnectsClient + +__all__ = ( + 'InterconnectsClient', +) diff --git a/owl-bot-staging/v1/google/cloud/compute_v1/services/interconnects/client.py b/owl-bot-staging/v1/google/cloud/compute_v1/services/interconnects/client.py new file mode 100644 index 000000000..f43ebdda9 --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/compute_v1/services/interconnects/client.py @@ -0,0 +1,1753 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import functools +import os +import re +from typing import Dict, Mapping, MutableMapping, MutableSequence, Optional, Sequence, Tuple, Type, Union, cast + +from google.cloud.compute_v1 import gapic_version as package_version + +from google.api_core import client_options as client_options_lib +from google.api_core import exceptions as core_exceptions +from google.api_core import extended_operation +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport import mtls # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth.exceptions import MutualTLSChannelError # type: ignore +from google.oauth2 import service_account # type: ignore + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + +from google.api_core import extended_operation # type: ignore +from google.cloud.compute_v1.services.interconnects import pagers +from google.cloud.compute_v1.types import compute +from .transports.base import InterconnectsTransport, DEFAULT_CLIENT_INFO +from .transports.rest import InterconnectsRestTransport + + +class InterconnectsClientMeta(type): + """Metaclass for the Interconnects client. + + This provides class-level methods for building and retrieving + support objects (e.g. transport) without polluting the client instance + objects. + """ + _transport_registry = OrderedDict() # type: Dict[str, Type[InterconnectsTransport]] + _transport_registry["rest"] = InterconnectsRestTransport + + def get_transport_class(cls, + label: Optional[str] = None, + ) -> Type[InterconnectsTransport]: + """Returns an appropriate transport class. + + Args: + label: The name of the desired transport. If none is + provided, then the first transport in the registry is used. + + Returns: + The transport class to use. + """ + # If a specific transport is requested, return that one. + if label: + return cls._transport_registry[label] + + # No transport is requested; return the default (that is, the first one + # in the dictionary). + return next(iter(cls._transport_registry.values())) + + +class InterconnectsClient(metaclass=InterconnectsClientMeta): + """The Interconnects API.""" + + @staticmethod + def _get_default_mtls_endpoint(api_endpoint): + """Converts api endpoint to mTLS endpoint. + + Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to + "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. + Args: + api_endpoint (Optional[str]): the api endpoint to convert. + Returns: + str: converted mTLS api endpoint. + """ + if not api_endpoint: + return api_endpoint + + mtls_endpoint_re = re.compile( + r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?" + ) + + m = mtls_endpoint_re.match(api_endpoint) + name, mtls, sandbox, googledomain = m.groups() + if mtls or not googledomain: + return api_endpoint + + if sandbox: + return api_endpoint.replace( + "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" + ) + + return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") + + DEFAULT_ENDPOINT = "compute.googleapis.com" + DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore + DEFAULT_ENDPOINT + ) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + InterconnectsClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_info(info) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + InterconnectsClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_file( + filename) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + from_service_account_json = from_service_account_file + + @property + def transport(self) -> InterconnectsTransport: + """Returns the transport used by the client instance. + + Returns: + InterconnectsTransport: The transport used by the client + instance. + """ + return self._transport + + @staticmethod + def common_billing_account_path(billing_account: str, ) -> str: + """Returns a fully-qualified billing_account string.""" + return "billingAccounts/{billing_account}".format(billing_account=billing_account, ) + + @staticmethod + def parse_common_billing_account_path(path: str) -> Dict[str,str]: + """Parse a billing_account path into its component segments.""" + m = re.match(r"^billingAccounts/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_folder_path(folder: str, ) -> str: + """Returns a fully-qualified folder string.""" + return "folders/{folder}".format(folder=folder, ) + + @staticmethod + def parse_common_folder_path(path: str) -> Dict[str,str]: + """Parse a folder path into its component segments.""" + m = re.match(r"^folders/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_organization_path(organization: str, ) -> str: + """Returns a fully-qualified organization string.""" + return "organizations/{organization}".format(organization=organization, ) + + @staticmethod + def parse_common_organization_path(path: str) -> Dict[str,str]: + """Parse a organization path into its component segments.""" + m = re.match(r"^organizations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_project_path(project: str, ) -> str: + """Returns a fully-qualified project string.""" + return "projects/{project}".format(project=project, ) + + @staticmethod + def parse_common_project_path(path: str) -> Dict[str,str]: + """Parse a project path into its component segments.""" + m = re.match(r"^projects/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_location_path(project: str, location: str, ) -> str: + """Returns a fully-qualified location string.""" + return "projects/{project}/locations/{location}".format(project=project, location=location, ) + + @staticmethod + def parse_common_location_path(path: str) -> Dict[str,str]: + """Parse a location path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @classmethod + def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[client_options_lib.ClientOptions] = None): + """Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + if client_options is None: + client_options = client_options_lib.ClientOptions() + use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") + use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") + if use_client_cert not in ("true", "false"): + raise ValueError("Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`") + if use_mtls_endpoint not in ("auto", "never", "always"): + raise MutualTLSChannelError("Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`") + + # Figure out the client cert source to use. + client_cert_source = None + if use_client_cert == "true": + if client_options.client_cert_source: + client_cert_source = client_options.client_cert_source + elif mtls.has_default_client_cert_source(): + client_cert_source = mtls.default_client_cert_source() + + # Figure out which api endpoint to use. + if client_options.api_endpoint is not None: + api_endpoint = client_options.api_endpoint + elif use_mtls_endpoint == "always" or (use_mtls_endpoint == "auto" and client_cert_source): + api_endpoint = cls.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = cls.DEFAULT_ENDPOINT + + return api_endpoint, client_cert_source + + def __init__(self, *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Optional[Union[str, InterconnectsTransport]] = None, + client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the interconnects client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Union[str, InterconnectsTransport]): The + transport to use. If set to None, a transport is chosen + automatically. + NOTE: "rest" transport functionality is currently in a + beta state (preview). We welcome your feedback via an + issue in this library's source repository. + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): Custom options for the + client. It won't take effect if a ``transport`` instance is provided. + (1) The ``api_endpoint`` property can be used to override the + default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT + environment variable can also be used to override the endpoint: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto switch to the + default mTLS endpoint if client certificate is present, this is + the default value). However, the ``api_endpoint`` property takes + precedence if provided. + (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide client certificate for mutual TLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + """ + if isinstance(client_options, dict): + client_options = client_options_lib.from_dict(client_options) + if client_options is None: + client_options = client_options_lib.ClientOptions() + client_options = cast(client_options_lib.ClientOptions, client_options) + + api_endpoint, client_cert_source_func = self.get_mtls_endpoint_and_cert_source(client_options) + + api_key_value = getattr(client_options, "api_key", None) + if api_key_value and credentials: + raise ValueError("client_options.api_key and credentials are mutually exclusive") + + # Save or instantiate the transport. + # Ordinarily, we provide the transport, but allowing a custom transport + # instance provides an extensibility point for unusual situations. + if isinstance(transport, InterconnectsTransport): + # transport is a InterconnectsTransport instance. + if credentials or client_options.credentials_file or api_key_value: + raise ValueError("When providing a transport instance, " + "provide its credentials directly.") + if client_options.scopes: + raise ValueError( + "When providing a transport instance, provide its scopes " + "directly." + ) + self._transport = transport + else: + import google.auth._default # type: ignore + + if api_key_value and hasattr(google.auth._default, "get_api_key_credentials"): + credentials = google.auth._default.get_api_key_credentials(api_key_value) + + Transport = type(self).get_transport_class(transport) + self._transport = Transport( + credentials=credentials, + credentials_file=client_options.credentials_file, + host=api_endpoint, + scopes=client_options.scopes, + client_cert_source_for_mtls=client_cert_source_func, + quota_project_id=client_options.quota_project_id, + client_info=client_info, + always_use_jwt_access=True, + api_audience=client_options.api_audience, + ) + + def delete_unary(self, + request: Optional[Union[compute.DeleteInterconnectRequest, dict]] = None, + *, + project: Optional[str] = None, + interconnect: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.Operation: + r"""Deletes the specified Interconnect. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_delete(): + # Create a client + client = compute_v1.InterconnectsClient() + + # Initialize request argument(s) + request = compute_v1.DeleteInterconnectRequest( + interconnect="interconnect_value", + project="project_value", + ) + + # Make the request + response = client.delete(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.DeleteInterconnectRequest, dict]): + The request object. A request message for + Interconnects.Delete. See the method + description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + interconnect (str): + Name of the interconnect to delete. + This corresponds to the ``interconnect`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, interconnect]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.DeleteInterconnectRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.DeleteInterconnectRequest): + request = compute.DeleteInterconnectRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if interconnect is not None: + request.interconnect = interconnect + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("interconnect", request.interconnect), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def delete(self, + request: Optional[Union[compute.DeleteInterconnectRequest, dict]] = None, + *, + project: Optional[str] = None, + interconnect: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> extended_operation.ExtendedOperation: + r"""Deletes the specified Interconnect. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_delete(): + # Create a client + client = compute_v1.InterconnectsClient() + + # Initialize request argument(s) + request = compute_v1.DeleteInterconnectRequest( + interconnect="interconnect_value", + project="project_value", + ) + + # Make the request + response = client.delete(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.DeleteInterconnectRequest, dict]): + The request object. A request message for + Interconnects.Delete. See the method + description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + interconnect (str): + Name of the interconnect to delete. + This corresponds to the ``interconnect`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, interconnect]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.DeleteInterconnectRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.DeleteInterconnectRequest): + request = compute.DeleteInterconnectRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if interconnect is not None: + request.interconnect = interconnect + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("interconnect", request.interconnect), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + operation_service = self._transport._global_operations_client + operation_request = compute.GetGlobalOperationRequest() + operation_request.project = request.project + operation_request.operation = response.name + + get_operation = functools.partial(operation_service.get, operation_request) + # Cancel is not part of extended operations yet. + cancel_operation = lambda: None + + # Note: this class is an implementation detail to provide a uniform + # set of names for certain fields in the extended operation proto message. + # See google.api_core.extended_operation.ExtendedOperation for details + # on these properties and the expected interface. + class _CustomOperation(extended_operation.ExtendedOperation): + @property + def error_message(self): + return self._extended_operation.http_error_message + + @property + def error_code(self): + return self._extended_operation.http_error_status_code + + response = _CustomOperation.make(get_operation, cancel_operation, response) + + # Done; return the response. + return response + + def get(self, + request: Optional[Union[compute.GetInterconnectRequest, dict]] = None, + *, + project: Optional[str] = None, + interconnect: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.Interconnect: + r"""Returns the specified Interconnect. Get a list of + available Interconnects by making a list() request. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_get(): + # Create a client + client = compute_v1.InterconnectsClient() + + # Initialize request argument(s) + request = compute_v1.GetInterconnectRequest( + interconnect="interconnect_value", + project="project_value", + ) + + # Make the request + response = client.get(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.GetInterconnectRequest, dict]): + The request object. A request message for + Interconnects.Get. See the method + description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + interconnect (str): + Name of the interconnect to return. + This corresponds to the ``interconnect`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.compute_v1.types.Interconnect: + Represents an Interconnect resource. + An Interconnect resource is a dedicated + connection between the Google Cloud + network and your on-premises network. + For more information, read the Dedicated + Interconnect Overview. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, interconnect]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.GetInterconnectRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.GetInterconnectRequest): + request = compute.GetInterconnectRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if interconnect is not None: + request.interconnect = interconnect + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("interconnect", request.interconnect), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get_diagnostics(self, + request: Optional[Union[compute.GetDiagnosticsInterconnectRequest, dict]] = None, + *, + project: Optional[str] = None, + interconnect: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.InterconnectsGetDiagnosticsResponse: + r"""Returns the interconnectDiagnostics for the specified + Interconnect. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_get_diagnostics(): + # Create a client + client = compute_v1.InterconnectsClient() + + # Initialize request argument(s) + request = compute_v1.GetDiagnosticsInterconnectRequest( + interconnect="interconnect_value", + project="project_value", + ) + + # Make the request + response = client.get_diagnostics(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.GetDiagnosticsInterconnectRequest, dict]): + The request object. A request message for + Interconnects.GetDiagnostics. See the + method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + interconnect (str): + Name of the interconnect resource to + query. + + This corresponds to the ``interconnect`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.compute_v1.types.InterconnectsGetDiagnosticsResponse: + Response for the + InterconnectsGetDiagnosticsRequest. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, interconnect]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.GetDiagnosticsInterconnectRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.GetDiagnosticsInterconnectRequest): + request = compute.GetDiagnosticsInterconnectRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if interconnect is not None: + request.interconnect = interconnect + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_diagnostics] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("interconnect", request.interconnect), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def insert_unary(self, + request: Optional[Union[compute.InsertInterconnectRequest, dict]] = None, + *, + project: Optional[str] = None, + interconnect_resource: Optional[compute.Interconnect] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.Operation: + r"""Creates an Interconnect in the specified project + using the data included in the request. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_insert(): + # Create a client + client = compute_v1.InterconnectsClient() + + # Initialize request argument(s) + request = compute_v1.InsertInterconnectRequest( + project="project_value", + ) + + # Make the request + response = client.insert(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.InsertInterconnectRequest, dict]): + The request object. A request message for + Interconnects.Insert. See the method + description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + interconnect_resource (google.cloud.compute_v1.types.Interconnect): + The body resource for this request + This corresponds to the ``interconnect_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, interconnect_resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.InsertInterconnectRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.InsertInterconnectRequest): + request = compute.InsertInterconnectRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if interconnect_resource is not None: + request.interconnect_resource = interconnect_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.insert] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def insert(self, + request: Optional[Union[compute.InsertInterconnectRequest, dict]] = None, + *, + project: Optional[str] = None, + interconnect_resource: Optional[compute.Interconnect] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> extended_operation.ExtendedOperation: + r"""Creates an Interconnect in the specified project + using the data included in the request. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_insert(): + # Create a client + client = compute_v1.InterconnectsClient() + + # Initialize request argument(s) + request = compute_v1.InsertInterconnectRequest( + project="project_value", + ) + + # Make the request + response = client.insert(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.InsertInterconnectRequest, dict]): + The request object. A request message for + Interconnects.Insert. See the method + description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + interconnect_resource (google.cloud.compute_v1.types.Interconnect): + The body resource for this request + This corresponds to the ``interconnect_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, interconnect_resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.InsertInterconnectRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.InsertInterconnectRequest): + request = compute.InsertInterconnectRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if interconnect_resource is not None: + request.interconnect_resource = interconnect_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.insert] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + operation_service = self._transport._global_operations_client + operation_request = compute.GetGlobalOperationRequest() + operation_request.project = request.project + operation_request.operation = response.name + + get_operation = functools.partial(operation_service.get, operation_request) + # Cancel is not part of extended operations yet. + cancel_operation = lambda: None + + # Note: this class is an implementation detail to provide a uniform + # set of names for certain fields in the extended operation proto message. + # See google.api_core.extended_operation.ExtendedOperation for details + # on these properties and the expected interface. + class _CustomOperation(extended_operation.ExtendedOperation): + @property + def error_message(self): + return self._extended_operation.http_error_message + + @property + def error_code(self): + return self._extended_operation.http_error_status_code + + response = _CustomOperation.make(get_operation, cancel_operation, response) + + # Done; return the response. + return response + + def list(self, + request: Optional[Union[compute.ListInterconnectsRequest, dict]] = None, + *, + project: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListPager: + r"""Retrieves the list of Interconnects available to the + specified project. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_list(): + # Create a client + client = compute_v1.InterconnectsClient() + + # Initialize request argument(s) + request = compute_v1.ListInterconnectsRequest( + project="project_value", + ) + + # Make the request + page_result = client.list(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.ListInterconnectsRequest, dict]): + The request object. A request message for + Interconnects.List. See the method + description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.compute_v1.services.interconnects.pagers.ListPager: + Response to the list request, and + contains a list of interconnects. + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.ListInterconnectsRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.ListInterconnectsRequest): + request = compute.ListInterconnectsRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + def patch_unary(self, + request: Optional[Union[compute.PatchInterconnectRequest, dict]] = None, + *, + project: Optional[str] = None, + interconnect: Optional[str] = None, + interconnect_resource: Optional[compute.Interconnect] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.Operation: + r"""Updates the specified Interconnect with the data + included in the request. This method supports PATCH + semantics and uses the JSON merge patch format and + processing rules. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_patch(): + # Create a client + client = compute_v1.InterconnectsClient() + + # Initialize request argument(s) + request = compute_v1.PatchInterconnectRequest( + interconnect="interconnect_value", + project="project_value", + ) + + # Make the request + response = client.patch(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.PatchInterconnectRequest, dict]): + The request object. A request message for + Interconnects.Patch. See the method + description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + interconnect (str): + Name of the interconnect to update. + This corresponds to the ``interconnect`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + interconnect_resource (google.cloud.compute_v1.types.Interconnect): + The body resource for this request + This corresponds to the ``interconnect_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, interconnect, interconnect_resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.PatchInterconnectRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.PatchInterconnectRequest): + request = compute.PatchInterconnectRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if interconnect is not None: + request.interconnect = interconnect + if interconnect_resource is not None: + request.interconnect_resource = interconnect_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.patch] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("interconnect", request.interconnect), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def patch(self, + request: Optional[Union[compute.PatchInterconnectRequest, dict]] = None, + *, + project: Optional[str] = None, + interconnect: Optional[str] = None, + interconnect_resource: Optional[compute.Interconnect] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> extended_operation.ExtendedOperation: + r"""Updates the specified Interconnect with the data + included in the request. This method supports PATCH + semantics and uses the JSON merge patch format and + processing rules. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_patch(): + # Create a client + client = compute_v1.InterconnectsClient() + + # Initialize request argument(s) + request = compute_v1.PatchInterconnectRequest( + interconnect="interconnect_value", + project="project_value", + ) + + # Make the request + response = client.patch(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.PatchInterconnectRequest, dict]): + The request object. A request message for + Interconnects.Patch. See the method + description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + interconnect (str): + Name of the interconnect to update. + This corresponds to the ``interconnect`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + interconnect_resource (google.cloud.compute_v1.types.Interconnect): + The body resource for this request + This corresponds to the ``interconnect_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, interconnect, interconnect_resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.PatchInterconnectRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.PatchInterconnectRequest): + request = compute.PatchInterconnectRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if interconnect is not None: + request.interconnect = interconnect + if interconnect_resource is not None: + request.interconnect_resource = interconnect_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.patch] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("interconnect", request.interconnect), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + operation_service = self._transport._global_operations_client + operation_request = compute.GetGlobalOperationRequest() + operation_request.project = request.project + operation_request.operation = response.name + + get_operation = functools.partial(operation_service.get, operation_request) + # Cancel is not part of extended operations yet. + cancel_operation = lambda: None + + # Note: this class is an implementation detail to provide a uniform + # set of names for certain fields in the extended operation proto message. + # See google.api_core.extended_operation.ExtendedOperation for details + # on these properties and the expected interface. + class _CustomOperation(extended_operation.ExtendedOperation): + @property + def error_message(self): + return self._extended_operation.http_error_message + + @property + def error_code(self): + return self._extended_operation.http_error_status_code + + response = _CustomOperation.make(get_operation, cancel_operation, response) + + # Done; return the response. + return response + + def set_labels_unary(self, + request: Optional[Union[compute.SetLabelsInterconnectRequest, dict]] = None, + *, + project: Optional[str] = None, + resource: Optional[str] = None, + global_set_labels_request_resource: Optional[compute.GlobalSetLabelsRequest] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.Operation: + r"""Sets the labels on an Interconnect. To learn more + about labels, read the Labeling Resources documentation. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_set_labels(): + # Create a client + client = compute_v1.InterconnectsClient() + + # Initialize request argument(s) + request = compute_v1.SetLabelsInterconnectRequest( + project="project_value", + resource="resource_value", + ) + + # Make the request + response = client.set_labels(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.SetLabelsInterconnectRequest, dict]): + The request object. A request message for + Interconnects.SetLabels. See the method + description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + resource (str): + Name or id of the resource for this + request. + + This corresponds to the ``resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + global_set_labels_request_resource (google.cloud.compute_v1.types.GlobalSetLabelsRequest): + The body resource for this request + This corresponds to the ``global_set_labels_request_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, resource, global_set_labels_request_resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.SetLabelsInterconnectRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.SetLabelsInterconnectRequest): + request = compute.SetLabelsInterconnectRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if resource is not None: + request.resource = resource + if global_set_labels_request_resource is not None: + request.global_set_labels_request_resource = global_set_labels_request_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.set_labels] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("resource", request.resource), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def set_labels(self, + request: Optional[Union[compute.SetLabelsInterconnectRequest, dict]] = None, + *, + project: Optional[str] = None, + resource: Optional[str] = None, + global_set_labels_request_resource: Optional[compute.GlobalSetLabelsRequest] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> extended_operation.ExtendedOperation: + r"""Sets the labels on an Interconnect. To learn more + about labels, read the Labeling Resources documentation. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_set_labels(): + # Create a client + client = compute_v1.InterconnectsClient() + + # Initialize request argument(s) + request = compute_v1.SetLabelsInterconnectRequest( + project="project_value", + resource="resource_value", + ) + + # Make the request + response = client.set_labels(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.SetLabelsInterconnectRequest, dict]): + The request object. A request message for + Interconnects.SetLabels. See the method + description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + resource (str): + Name or id of the resource for this + request. + + This corresponds to the ``resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + global_set_labels_request_resource (google.cloud.compute_v1.types.GlobalSetLabelsRequest): + The body resource for this request + This corresponds to the ``global_set_labels_request_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, resource, global_set_labels_request_resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.SetLabelsInterconnectRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.SetLabelsInterconnectRequest): + request = compute.SetLabelsInterconnectRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if resource is not None: + request.resource = resource + if global_set_labels_request_resource is not None: + request.global_set_labels_request_resource = global_set_labels_request_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.set_labels] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("resource", request.resource), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + operation_service = self._transport._global_operations_client + operation_request = compute.GetGlobalOperationRequest() + operation_request.project = request.project + operation_request.operation = response.name + + get_operation = functools.partial(operation_service.get, operation_request) + # Cancel is not part of extended operations yet. + cancel_operation = lambda: None + + # Note: this class is an implementation detail to provide a uniform + # set of names for certain fields in the extended operation proto message. + # See google.api_core.extended_operation.ExtendedOperation for details + # on these properties and the expected interface. + class _CustomOperation(extended_operation.ExtendedOperation): + @property + def error_message(self): + return self._extended_operation.http_error_message + + @property + def error_code(self): + return self._extended_operation.http_error_status_code + + response = _CustomOperation.make(get_operation, cancel_operation, response) + + # Done; return the response. + return response + + def __enter__(self) -> "InterconnectsClient": + return self + + def __exit__(self, type, value, traceback): + """Releases underlying transport's resources. + + .. warning:: + ONLY use as a context manager if the transport is NOT shared + with other clients! Exiting the with block will CLOSE the transport + and may cause errors in other clients! + """ + self.transport.close() + + + + + + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) + + +__all__ = ( + "InterconnectsClient", +) diff --git a/owl-bot-staging/v1/google/cloud/compute_v1/services/interconnects/pagers.py b/owl-bot-staging/v1/google/cloud/compute_v1/services/interconnects/pagers.py new file mode 100644 index 000000000..a686bae15 --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/compute_v1/services/interconnects/pagers.py @@ -0,0 +1,77 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import Any, AsyncIterator, Awaitable, Callable, Sequence, Tuple, Optional, Iterator + +from google.cloud.compute_v1.types import compute + + +class ListPager: + """A pager for iterating through ``list`` requests. + + This class thinly wraps an initial + :class:`google.cloud.compute_v1.types.InterconnectList` object, and + provides an ``__iter__`` method to iterate through its + ``items`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``List`` requests and continue to iterate + through the ``items`` field on the + corresponding responses. + + All the usual :class:`google.cloud.compute_v1.types.InterconnectList` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., compute.InterconnectList], + request: compute.ListInterconnectsRequest, + response: compute.InterconnectList, + *, + metadata: Sequence[Tuple[str, str]] = ()): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.compute_v1.types.ListInterconnectsRequest): + The initial request object. + response (google.cloud.compute_v1.types.InterconnectList): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = compute.ListInterconnectsRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[compute.InterconnectList]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[compute.Interconnect]: + for page in self.pages: + yield from page.items + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) diff --git a/owl-bot-staging/v1/google/cloud/compute_v1/services/interconnects/transports/__init__.py b/owl-bot-staging/v1/google/cloud/compute_v1/services/interconnects/transports/__init__.py new file mode 100644 index 000000000..e9c967559 --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/compute_v1/services/interconnects/transports/__init__.py @@ -0,0 +1,32 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +from typing import Dict, Type + +from .base import InterconnectsTransport +from .rest import InterconnectsRestTransport +from .rest import InterconnectsRestInterceptor + + +# Compile a registry of transports. +_transport_registry = OrderedDict() # type: Dict[str, Type[InterconnectsTransport]] +_transport_registry['rest'] = InterconnectsRestTransport + +__all__ = ( + 'InterconnectsTransport', + 'InterconnectsRestTransport', + 'InterconnectsRestInterceptor', +) diff --git a/owl-bot-staging/v1/google/cloud/compute_v1/services/interconnects/transports/base.py b/owl-bot-staging/v1/google/cloud/compute_v1/services/interconnects/transports/base.py new file mode 100644 index 000000000..615da5c9a --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/compute_v1/services/interconnects/transports/base.py @@ -0,0 +1,247 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import abc +from typing import Awaitable, Callable, Dict, Optional, Sequence, Union + +from google.cloud.compute_v1 import gapic_version as package_version + +import google.auth # type: ignore +import google.api_core +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.compute_v1.types import compute +from google.cloud.compute_v1.services import global_operations + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) + + +class InterconnectsTransport(abc.ABC): + """Abstract transport class for Interconnects.""" + + AUTH_SCOPES = ( + 'https://www.googleapis.com/auth/compute', + 'https://www.googleapis.com/auth/cloud-platform', + ) + + DEFAULT_HOST: str = 'compute.googleapis.com' + def __init__( + self, *, + host: str = DEFAULT_HOST, + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + **kwargs, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A list of scopes. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + """ + self._extended_operations_services: Dict[str, Any] = {} + + scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} + + # Save the scopes. + self._scopes = scopes + + # If no credentials are provided, then determine the appropriate + # defaults. + if credentials and credentials_file: + raise core_exceptions.DuplicateCredentialArgs("'credentials_file' and 'credentials' are mutually exclusive") + + if credentials_file is not None: + credentials, _ = google.auth.load_credentials_from_file( + credentials_file, + **scopes_kwargs, + quota_project_id=quota_project_id + ) + elif credentials is None: + credentials, _ = google.auth.default(**scopes_kwargs, quota_project_id=quota_project_id) + # Don't apply audience if the credentials file passed from user. + if hasattr(credentials, "with_gdch_audience"): + credentials = credentials.with_gdch_audience(api_audience if api_audience else host) + + # If the credentials are service account credentials, then always try to use self signed JWT. + if always_use_jwt_access and isinstance(credentials, service_account.Credentials) and hasattr(service_account.Credentials, "with_always_use_jwt_access"): + credentials = credentials.with_always_use_jwt_access(True) + + # Save the credentials. + self._credentials = credentials + + # Save the hostname. Default to port 443 (HTTPS) if none is specified. + if ':' not in host: + host += ':443' + self._host = host + + def _prep_wrapped_messages(self, client_info): + # Precompute the wrapped methods. + self._wrapped_methods = { + self.delete: gapic_v1.method.wrap_method( + self.delete, + default_timeout=None, + client_info=client_info, + ), + self.get: gapic_v1.method.wrap_method( + self.get, + default_timeout=None, + client_info=client_info, + ), + self.get_diagnostics: gapic_v1.method.wrap_method( + self.get_diagnostics, + default_timeout=None, + client_info=client_info, + ), + self.insert: gapic_v1.method.wrap_method( + self.insert, + default_timeout=None, + client_info=client_info, + ), + self.list: gapic_v1.method.wrap_method( + self.list, + default_timeout=None, + client_info=client_info, + ), + self.patch: gapic_v1.method.wrap_method( + self.patch, + default_timeout=None, + client_info=client_info, + ), + self.set_labels: gapic_v1.method.wrap_method( + self.set_labels, + default_timeout=None, + client_info=client_info, + ), + } + + def close(self): + """Closes resources associated with the transport. + + .. warning:: + Only call this method if the transport is NOT shared + with other clients - this may cause errors in other clients! + """ + raise NotImplementedError() + + @property + def delete(self) -> Callable[ + [compute.DeleteInterconnectRequest], + Union[ + compute.Operation, + Awaitable[compute.Operation] + ]]: + raise NotImplementedError() + + @property + def get(self) -> Callable[ + [compute.GetInterconnectRequest], + Union[ + compute.Interconnect, + Awaitable[compute.Interconnect] + ]]: + raise NotImplementedError() + + @property + def get_diagnostics(self) -> Callable[ + [compute.GetDiagnosticsInterconnectRequest], + Union[ + compute.InterconnectsGetDiagnosticsResponse, + Awaitable[compute.InterconnectsGetDiagnosticsResponse] + ]]: + raise NotImplementedError() + + @property + def insert(self) -> Callable[ + [compute.InsertInterconnectRequest], + Union[ + compute.Operation, + Awaitable[compute.Operation] + ]]: + raise NotImplementedError() + + @property + def list(self) -> Callable[ + [compute.ListInterconnectsRequest], + Union[ + compute.InterconnectList, + Awaitable[compute.InterconnectList] + ]]: + raise NotImplementedError() + + @property + def patch(self) -> Callable[ + [compute.PatchInterconnectRequest], + Union[ + compute.Operation, + Awaitable[compute.Operation] + ]]: + raise NotImplementedError() + + @property + def set_labels(self) -> Callable[ + [compute.SetLabelsInterconnectRequest], + Union[ + compute.Operation, + Awaitable[compute.Operation] + ]]: + raise NotImplementedError() + + @property + def kind(self) -> str: + raise NotImplementedError() + + @property + def _global_operations_client(self) -> global_operations.GlobalOperationsClient: + ex_op_service = self._extended_operations_services.get("global_operations") + if not ex_op_service: + ex_op_service = global_operations.GlobalOperationsClient( + credentials=self._credentials, + transport=self.kind, + ) + self._extended_operations_services["global_operations"] = ex_op_service + + return ex_op_service + + +__all__ = ( + 'InterconnectsTransport', +) diff --git a/owl-bot-staging/v1/google/cloud/compute_v1/services/interconnects/transports/rest.py b/owl-bot-staging/v1/google/cloud/compute_v1/services/interconnects/transports/rest.py new file mode 100644 index 000000000..2b7086312 --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/compute_v1/services/interconnects/transports/rest.py @@ -0,0 +1,1052 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +from google.auth.transport.requests import AuthorizedSession # type: ignore +import json # type: ignore +import grpc # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.api_core import exceptions as core_exceptions +from google.api_core import retry as retries +from google.api_core import rest_helpers +from google.api_core import rest_streaming +from google.api_core import path_template +from google.api_core import gapic_v1 + +from google.protobuf import json_format +from requests import __version__ as requests_version +import dataclasses +import re +from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union +import warnings + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + + +from google.cloud.compute_v1.types import compute + +from .base import InterconnectsTransport, DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, + grpc_version=None, + rest_version=requests_version, +) + + +class InterconnectsRestInterceptor: + """Interceptor for Interconnects. + + Interceptors are used to manipulate requests, request metadata, and responses + in arbitrary ways. + Example use cases include: + * Logging + * Verifying requests according to service or custom semantics + * Stripping extraneous information from responses + + These use cases and more can be enabled by injecting an + instance of a custom subclass when constructing the InterconnectsRestTransport. + + .. code-block:: python + class MyCustomInterconnectsInterceptor(InterconnectsRestInterceptor): + def pre_delete(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_delete(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_get(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_get_diagnostics(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_diagnostics(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_insert(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_insert(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_patch(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_patch(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_set_labels(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_set_labels(self, response): + logging.log(f"Received response: {response}") + return response + + transport = InterconnectsRestTransport(interceptor=MyCustomInterconnectsInterceptor()) + client = InterconnectsClient(transport=transport) + + + """ + def pre_delete(self, request: compute.DeleteInterconnectRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.DeleteInterconnectRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for delete + + Override in a subclass to manipulate the request or metadata + before they are sent to the Interconnects server. + """ + return request, metadata + + def post_delete(self, response: compute.Operation) -> compute.Operation: + """Post-rpc interceptor for delete + + Override in a subclass to manipulate the response + after it is returned by the Interconnects server but before + it is returned to user code. + """ + return response + def pre_get(self, request: compute.GetInterconnectRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.GetInterconnectRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get + + Override in a subclass to manipulate the request or metadata + before they are sent to the Interconnects server. + """ + return request, metadata + + def post_get(self, response: compute.Interconnect) -> compute.Interconnect: + """Post-rpc interceptor for get + + Override in a subclass to manipulate the response + after it is returned by the Interconnects server but before + it is returned to user code. + """ + return response + def pre_get_diagnostics(self, request: compute.GetDiagnosticsInterconnectRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.GetDiagnosticsInterconnectRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_diagnostics + + Override in a subclass to manipulate the request or metadata + before they are sent to the Interconnects server. + """ + return request, metadata + + def post_get_diagnostics(self, response: compute.InterconnectsGetDiagnosticsResponse) -> compute.InterconnectsGetDiagnosticsResponse: + """Post-rpc interceptor for get_diagnostics + + Override in a subclass to manipulate the response + after it is returned by the Interconnects server but before + it is returned to user code. + """ + return response + def pre_insert(self, request: compute.InsertInterconnectRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.InsertInterconnectRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for insert + + Override in a subclass to manipulate the request or metadata + before they are sent to the Interconnects server. + """ + return request, metadata + + def post_insert(self, response: compute.Operation) -> compute.Operation: + """Post-rpc interceptor for insert + + Override in a subclass to manipulate the response + after it is returned by the Interconnects server but before + it is returned to user code. + """ + return response + def pre_list(self, request: compute.ListInterconnectsRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.ListInterconnectsRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list + + Override in a subclass to manipulate the request or metadata + before they are sent to the Interconnects server. + """ + return request, metadata + + def post_list(self, response: compute.InterconnectList) -> compute.InterconnectList: + """Post-rpc interceptor for list + + Override in a subclass to manipulate the response + after it is returned by the Interconnects server but before + it is returned to user code. + """ + return response + def pre_patch(self, request: compute.PatchInterconnectRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.PatchInterconnectRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for patch + + Override in a subclass to manipulate the request or metadata + before they are sent to the Interconnects server. + """ + return request, metadata + + def post_patch(self, response: compute.Operation) -> compute.Operation: + """Post-rpc interceptor for patch + + Override in a subclass to manipulate the response + after it is returned by the Interconnects server but before + it is returned to user code. + """ + return response + def pre_set_labels(self, request: compute.SetLabelsInterconnectRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.SetLabelsInterconnectRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for set_labels + + Override in a subclass to manipulate the request or metadata + before they are sent to the Interconnects server. + """ + return request, metadata + + def post_set_labels(self, response: compute.Operation) -> compute.Operation: + """Post-rpc interceptor for set_labels + + Override in a subclass to manipulate the response + after it is returned by the Interconnects server but before + it is returned to user code. + """ + return response + + +@dataclasses.dataclass +class InterconnectsRestStub: + _session: AuthorizedSession + _host: str + _interceptor: InterconnectsRestInterceptor + + +class InterconnectsRestTransport(InterconnectsTransport): + """REST backend transport for Interconnects. + + The Interconnects API. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends JSON representations of protocol buffers over HTTP/1.1 + + NOTE: This REST transport functionality is currently in a beta + state (preview). We welcome your feedback via an issue in this + library's source repository. Thank you! + """ + + def __init__(self, *, + host: str = 'compute.googleapis.com', + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + client_cert_source_for_mtls: Optional[Callable[[ + ], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + url_scheme: str = 'https', + interceptor: Optional[InterconnectsRestInterceptor] = None, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + NOTE: This REST transport functionality is currently in a beta + state (preview). We welcome your feedback via a GitHub issue in + this library's repository. Thank you! + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + client_cert_source_for_mtls (Callable[[], Tuple[bytes, bytes]]): Client + certificate to configure mutual TLS HTTP channel. It is ignored + if ``channel`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you are developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + url_scheme: the protocol scheme for the API endpoint. Normally + "https", but for testing or local servers, + "http" can be specified. + """ + # Run the base constructor + # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. + # TODO: When custom host (api_endpoint) is set, `scopes` must *also* be set on the + # credentials object + maybe_url_match = re.match("^(?Phttp(?:s)?://)?(?P.*)$", host) + if maybe_url_match is None: + raise ValueError(f"Unexpected hostname structure: {host}") # pragma: NO COVER + + url_match_items = maybe_url_match.groupdict() + + host = f"{url_scheme}://{host}" if not url_match_items["scheme"] else host + + super().__init__( + host=host, + credentials=credentials, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience + ) + self._session = AuthorizedSession( + self._credentials, default_host=self.DEFAULT_HOST) + if client_cert_source_for_mtls: + self._session.configure_mtls_channel(client_cert_source_for_mtls) + self._interceptor = interceptor or InterconnectsRestInterceptor() + self._prep_wrapped_messages(client_info) + + class _Delete(InterconnectsRestStub): + def __hash__(self): + return hash("Delete") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.DeleteInterconnectRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.Operation: + r"""Call the delete method over HTTP. + + Args: + request (~.compute.DeleteInterconnectRequest): + The request object. A request message for + Interconnects.Delete. See the method + description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.Operation: + Represents an Operation resource. Google Compute Engine + has three Operation resources: \* + `Global `__ + \* + `Regional `__ + \* + `Zonal `__ + You can use an operation resource to manage asynchronous + API requests. For more information, read Handling API + responses. Operations can be global, regional or zonal. + - For global operations, use the ``globalOperations`` + resource. - For regional operations, use the + ``regionOperations`` resource. - For zonal operations, + use the ``zonalOperations`` resource. For more + information, read Global, Regional, and Zonal Resources. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'delete', + 'uri': '/compute/v1/projects/{project}/global/interconnects/{interconnect}', + }, + ] + request, metadata = self._interceptor.pre_delete(request, metadata) + pb_request = compute.DeleteInterconnectRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.Operation() + pb_resp = compute.Operation.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_delete(resp) + return resp + + class _Get(InterconnectsRestStub): + def __hash__(self): + return hash("Get") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.GetInterconnectRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.Interconnect: + r"""Call the get method over HTTP. + + Args: + request (~.compute.GetInterconnectRequest): + The request object. A request message for + Interconnects.Get. See the method + description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.Interconnect: + Represents an Interconnect resource. + An Interconnect resource is a dedicated + connection between the Google Cloud + network and your on-premises network. + For more information, read the Dedicated + Interconnect Overview. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/compute/v1/projects/{project}/global/interconnects/{interconnect}', + }, + ] + request, metadata = self._interceptor.pre_get(request, metadata) + pb_request = compute.GetInterconnectRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.Interconnect() + pb_resp = compute.Interconnect.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get(resp) + return resp + + class _GetDiagnostics(InterconnectsRestStub): + def __hash__(self): + return hash("GetDiagnostics") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.GetDiagnosticsInterconnectRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.InterconnectsGetDiagnosticsResponse: + r"""Call the get diagnostics method over HTTP. + + Args: + request (~.compute.GetDiagnosticsInterconnectRequest): + The request object. A request message for + Interconnects.GetDiagnostics. See the + method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.InterconnectsGetDiagnosticsResponse: + Response for the + InterconnectsGetDiagnosticsRequest. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/compute/v1/projects/{project}/global/interconnects/{interconnect}/getDiagnostics', + }, + ] + request, metadata = self._interceptor.pre_get_diagnostics(request, metadata) + pb_request = compute.GetDiagnosticsInterconnectRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.InterconnectsGetDiagnosticsResponse() + pb_resp = compute.InterconnectsGetDiagnosticsResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get_diagnostics(resp) + return resp + + class _Insert(InterconnectsRestStub): + def __hash__(self): + return hash("Insert") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.InsertInterconnectRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.Operation: + r"""Call the insert method over HTTP. + + Args: + request (~.compute.InsertInterconnectRequest): + The request object. A request message for + Interconnects.Insert. See the method + description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.Operation: + Represents an Operation resource. Google Compute Engine + has three Operation resources: \* + `Global `__ + \* + `Regional `__ + \* + `Zonal `__ + You can use an operation resource to manage asynchronous + API requests. For more information, read Handling API + responses. Operations can be global, regional or zonal. + - For global operations, use the ``globalOperations`` + resource. - For regional operations, use the + ``regionOperations`` resource. - For zonal operations, + use the ``zonalOperations`` resource. For more + information, read Global, Regional, and Zonal Resources. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/compute/v1/projects/{project}/global/interconnects', + 'body': 'interconnect_resource', + }, + ] + request, metadata = self._interceptor.pre_insert(request, metadata) + pb_request = compute.InsertInterconnectRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + including_default_value_fields=False, + use_integers_for_enums=False + ) + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.Operation() + pb_resp = compute.Operation.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_insert(resp) + return resp + + class _List(InterconnectsRestStub): + def __hash__(self): + return hash("List") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.ListInterconnectsRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.InterconnectList: + r"""Call the list method over HTTP. + + Args: + request (~.compute.ListInterconnectsRequest): + The request object. A request message for + Interconnects.List. See the method + description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.InterconnectList: + Response to the list request, and + contains a list of interconnects. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/compute/v1/projects/{project}/global/interconnects', + }, + ] + request, metadata = self._interceptor.pre_list(request, metadata) + pb_request = compute.ListInterconnectsRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.InterconnectList() + pb_resp = compute.InterconnectList.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_list(resp) + return resp + + class _Patch(InterconnectsRestStub): + def __hash__(self): + return hash("Patch") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.PatchInterconnectRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.Operation: + r"""Call the patch method over HTTP. + + Args: + request (~.compute.PatchInterconnectRequest): + The request object. A request message for + Interconnects.Patch. See the method + description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.Operation: + Represents an Operation resource. Google Compute Engine + has three Operation resources: \* + `Global `__ + \* + `Regional `__ + \* + `Zonal `__ + You can use an operation resource to manage asynchronous + API requests. For more information, read Handling API + responses. Operations can be global, regional or zonal. + - For global operations, use the ``globalOperations`` + resource. - For regional operations, use the + ``regionOperations`` resource. - For zonal operations, + use the ``zonalOperations`` resource. For more + information, read Global, Regional, and Zonal Resources. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'patch', + 'uri': '/compute/v1/projects/{project}/global/interconnects/{interconnect}', + 'body': 'interconnect_resource', + }, + ] + request, metadata = self._interceptor.pre_patch(request, metadata) + pb_request = compute.PatchInterconnectRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + including_default_value_fields=False, + use_integers_for_enums=False + ) + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.Operation() + pb_resp = compute.Operation.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_patch(resp) + return resp + + class _SetLabels(InterconnectsRestStub): + def __hash__(self): + return hash("SetLabels") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.SetLabelsInterconnectRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.Operation: + r"""Call the set labels method over HTTP. + + Args: + request (~.compute.SetLabelsInterconnectRequest): + The request object. A request message for + Interconnects.SetLabels. See the method + description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.Operation: + Represents an Operation resource. Google Compute Engine + has three Operation resources: \* + `Global `__ + \* + `Regional `__ + \* + `Zonal `__ + You can use an operation resource to manage asynchronous + API requests. For more information, read Handling API + responses. Operations can be global, regional or zonal. + - For global operations, use the ``globalOperations`` + resource. - For regional operations, use the + ``regionOperations`` resource. - For zonal operations, + use the ``zonalOperations`` resource. For more + information, read Global, Regional, and Zonal Resources. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/compute/v1/projects/{project}/global/interconnects/{resource}/setLabels', + 'body': 'global_set_labels_request_resource', + }, + ] + request, metadata = self._interceptor.pre_set_labels(request, metadata) + pb_request = compute.SetLabelsInterconnectRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + including_default_value_fields=False, + use_integers_for_enums=False + ) + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.Operation() + pb_resp = compute.Operation.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_set_labels(resp) + return resp + + @property + def delete(self) -> Callable[ + [compute.DeleteInterconnectRequest], + compute.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._Delete(self._session, self._host, self._interceptor) # type: ignore + + @property + def get(self) -> Callable[ + [compute.GetInterconnectRequest], + compute.Interconnect]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._Get(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_diagnostics(self) -> Callable[ + [compute.GetDiagnosticsInterconnectRequest], + compute.InterconnectsGetDiagnosticsResponse]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetDiagnostics(self._session, self._host, self._interceptor) # type: ignore + + @property + def insert(self) -> Callable[ + [compute.InsertInterconnectRequest], + compute.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._Insert(self._session, self._host, self._interceptor) # type: ignore + + @property + def list(self) -> Callable[ + [compute.ListInterconnectsRequest], + compute.InterconnectList]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._List(self._session, self._host, self._interceptor) # type: ignore + + @property + def patch(self) -> Callable[ + [compute.PatchInterconnectRequest], + compute.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._Patch(self._session, self._host, self._interceptor) # type: ignore + + @property + def set_labels(self) -> Callable[ + [compute.SetLabelsInterconnectRequest], + compute.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._SetLabels(self._session, self._host, self._interceptor) # type: ignore + + @property + def kind(self) -> str: + return "rest" + + def close(self): + self._session.close() + + +__all__=( + 'InterconnectsRestTransport', +) diff --git a/owl-bot-staging/v1/google/cloud/compute_v1/services/license_codes/__init__.py b/owl-bot-staging/v1/google/cloud/compute_v1/services/license_codes/__init__.py new file mode 100644 index 000000000..bf4a3fb3e --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/compute_v1/services/license_codes/__init__.py @@ -0,0 +1,20 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from .client import LicenseCodesClient + +__all__ = ( + 'LicenseCodesClient', +) diff --git a/owl-bot-staging/v1/google/cloud/compute_v1/services/license_codes/client.py b/owl-bot-staging/v1/google/cloud/compute_v1/services/license_codes/client.py new file mode 100644 index 000000000..8c330bc4e --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/compute_v1/services/license_codes/client.py @@ -0,0 +1,627 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import os +import re +from typing import Dict, Mapping, MutableMapping, MutableSequence, Optional, Sequence, Tuple, Type, Union, cast + +from google.cloud.compute_v1 import gapic_version as package_version + +from google.api_core import client_options as client_options_lib +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport import mtls # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth.exceptions import MutualTLSChannelError # type: ignore +from google.oauth2 import service_account # type: ignore + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + +from google.cloud.compute_v1.types import compute +from .transports.base import LicenseCodesTransport, DEFAULT_CLIENT_INFO +from .transports.rest import LicenseCodesRestTransport + + +class LicenseCodesClientMeta(type): + """Metaclass for the LicenseCodes client. + + This provides class-level methods for building and retrieving + support objects (e.g. transport) without polluting the client instance + objects. + """ + _transport_registry = OrderedDict() # type: Dict[str, Type[LicenseCodesTransport]] + _transport_registry["rest"] = LicenseCodesRestTransport + + def get_transport_class(cls, + label: Optional[str] = None, + ) -> Type[LicenseCodesTransport]: + """Returns an appropriate transport class. + + Args: + label: The name of the desired transport. If none is + provided, then the first transport in the registry is used. + + Returns: + The transport class to use. + """ + # If a specific transport is requested, return that one. + if label: + return cls._transport_registry[label] + + # No transport is requested; return the default (that is, the first one + # in the dictionary). + return next(iter(cls._transport_registry.values())) + + +class LicenseCodesClient(metaclass=LicenseCodesClientMeta): + """The LicenseCodes API.""" + + @staticmethod + def _get_default_mtls_endpoint(api_endpoint): + """Converts api endpoint to mTLS endpoint. + + Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to + "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. + Args: + api_endpoint (Optional[str]): the api endpoint to convert. + Returns: + str: converted mTLS api endpoint. + """ + if not api_endpoint: + return api_endpoint + + mtls_endpoint_re = re.compile( + r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?" + ) + + m = mtls_endpoint_re.match(api_endpoint) + name, mtls, sandbox, googledomain = m.groups() + if mtls or not googledomain: + return api_endpoint + + if sandbox: + return api_endpoint.replace( + "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" + ) + + return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") + + DEFAULT_ENDPOINT = "compute.googleapis.com" + DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore + DEFAULT_ENDPOINT + ) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + LicenseCodesClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_info(info) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + LicenseCodesClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_file( + filename) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + from_service_account_json = from_service_account_file + + @property + def transport(self) -> LicenseCodesTransport: + """Returns the transport used by the client instance. + + Returns: + LicenseCodesTransport: The transport used by the client + instance. + """ + return self._transport + + @staticmethod + def common_billing_account_path(billing_account: str, ) -> str: + """Returns a fully-qualified billing_account string.""" + return "billingAccounts/{billing_account}".format(billing_account=billing_account, ) + + @staticmethod + def parse_common_billing_account_path(path: str) -> Dict[str,str]: + """Parse a billing_account path into its component segments.""" + m = re.match(r"^billingAccounts/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_folder_path(folder: str, ) -> str: + """Returns a fully-qualified folder string.""" + return "folders/{folder}".format(folder=folder, ) + + @staticmethod + def parse_common_folder_path(path: str) -> Dict[str,str]: + """Parse a folder path into its component segments.""" + m = re.match(r"^folders/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_organization_path(organization: str, ) -> str: + """Returns a fully-qualified organization string.""" + return "organizations/{organization}".format(organization=organization, ) + + @staticmethod + def parse_common_organization_path(path: str) -> Dict[str,str]: + """Parse a organization path into its component segments.""" + m = re.match(r"^organizations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_project_path(project: str, ) -> str: + """Returns a fully-qualified project string.""" + return "projects/{project}".format(project=project, ) + + @staticmethod + def parse_common_project_path(path: str) -> Dict[str,str]: + """Parse a project path into its component segments.""" + m = re.match(r"^projects/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_location_path(project: str, location: str, ) -> str: + """Returns a fully-qualified location string.""" + return "projects/{project}/locations/{location}".format(project=project, location=location, ) + + @staticmethod + def parse_common_location_path(path: str) -> Dict[str,str]: + """Parse a location path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @classmethod + def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[client_options_lib.ClientOptions] = None): + """Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + if client_options is None: + client_options = client_options_lib.ClientOptions() + use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") + use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") + if use_client_cert not in ("true", "false"): + raise ValueError("Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`") + if use_mtls_endpoint not in ("auto", "never", "always"): + raise MutualTLSChannelError("Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`") + + # Figure out the client cert source to use. + client_cert_source = None + if use_client_cert == "true": + if client_options.client_cert_source: + client_cert_source = client_options.client_cert_source + elif mtls.has_default_client_cert_source(): + client_cert_source = mtls.default_client_cert_source() + + # Figure out which api endpoint to use. + if client_options.api_endpoint is not None: + api_endpoint = client_options.api_endpoint + elif use_mtls_endpoint == "always" or (use_mtls_endpoint == "auto" and client_cert_source): + api_endpoint = cls.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = cls.DEFAULT_ENDPOINT + + return api_endpoint, client_cert_source + + def __init__(self, *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Optional[Union[str, LicenseCodesTransport]] = None, + client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the license codes client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Union[str, LicenseCodesTransport]): The + transport to use. If set to None, a transport is chosen + automatically. + NOTE: "rest" transport functionality is currently in a + beta state (preview). We welcome your feedback via an + issue in this library's source repository. + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): Custom options for the + client. It won't take effect if a ``transport`` instance is provided. + (1) The ``api_endpoint`` property can be used to override the + default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT + environment variable can also be used to override the endpoint: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto switch to the + default mTLS endpoint if client certificate is present, this is + the default value). However, the ``api_endpoint`` property takes + precedence if provided. + (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide client certificate for mutual TLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + """ + if isinstance(client_options, dict): + client_options = client_options_lib.from_dict(client_options) + if client_options is None: + client_options = client_options_lib.ClientOptions() + client_options = cast(client_options_lib.ClientOptions, client_options) + + api_endpoint, client_cert_source_func = self.get_mtls_endpoint_and_cert_source(client_options) + + api_key_value = getattr(client_options, "api_key", None) + if api_key_value and credentials: + raise ValueError("client_options.api_key and credentials are mutually exclusive") + + # Save or instantiate the transport. + # Ordinarily, we provide the transport, but allowing a custom transport + # instance provides an extensibility point for unusual situations. + if isinstance(transport, LicenseCodesTransport): + # transport is a LicenseCodesTransport instance. + if credentials or client_options.credentials_file or api_key_value: + raise ValueError("When providing a transport instance, " + "provide its credentials directly.") + if client_options.scopes: + raise ValueError( + "When providing a transport instance, provide its scopes " + "directly." + ) + self._transport = transport + else: + import google.auth._default # type: ignore + + if api_key_value and hasattr(google.auth._default, "get_api_key_credentials"): + credentials = google.auth._default.get_api_key_credentials(api_key_value) + + Transport = type(self).get_transport_class(transport) + self._transport = Transport( + credentials=credentials, + credentials_file=client_options.credentials_file, + host=api_endpoint, + scopes=client_options.scopes, + client_cert_source_for_mtls=client_cert_source_func, + quota_project_id=client_options.quota_project_id, + client_info=client_info, + always_use_jwt_access=True, + api_audience=client_options.api_audience, + ) + + def get(self, + request: Optional[Union[compute.GetLicenseCodeRequest, dict]] = None, + *, + project: Optional[str] = None, + license_code: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.LicenseCode: + r"""Return a specified license code. License codes are mirrored + across all projects that have permissions to read the License + Code. *Caution* This resource is intended for use only by + third-party partners who are creating Cloud Marketplace images. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_get(): + # Create a client + client = compute_v1.LicenseCodesClient() + + # Initialize request argument(s) + request = compute_v1.GetLicenseCodeRequest( + license_code="license_code_value", + project="project_value", + ) + + # Make the request + response = client.get(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.GetLicenseCodeRequest, dict]): + The request object. A request message for + LicenseCodes.Get. See the method + description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + license_code (str): + Number corresponding to the License + code resource to return. + + This corresponds to the ``license_code`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.compute_v1.types.LicenseCode: + Represents a License Code resource. A License Code is a + unique identifier used to represent a license resource. + *Caution* This resource is intended for use only by + third-party partners who are creating Cloud Marketplace + images. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, license_code]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.GetLicenseCodeRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.GetLicenseCodeRequest): + request = compute.GetLicenseCodeRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if license_code is not None: + request.license_code = license_code + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("license_code", request.license_code), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def test_iam_permissions(self, + request: Optional[Union[compute.TestIamPermissionsLicenseCodeRequest, dict]] = None, + *, + project: Optional[str] = None, + resource: Optional[str] = None, + test_permissions_request_resource: Optional[compute.TestPermissionsRequest] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.TestPermissionsResponse: + r"""Returns permissions that a caller has on the specified resource. + *Caution* This resource is intended for use only by third-party + partners who are creating Cloud Marketplace images. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_test_iam_permissions(): + # Create a client + client = compute_v1.LicenseCodesClient() + + # Initialize request argument(s) + request = compute_v1.TestIamPermissionsLicenseCodeRequest( + project="project_value", + resource="resource_value", + ) + + # Make the request + response = client.test_iam_permissions(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.TestIamPermissionsLicenseCodeRequest, dict]): + The request object. A request message for + LicenseCodes.TestIamPermissions. See the + method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + resource (str): + Name or id of the resource for this + request. + + This corresponds to the ``resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + test_permissions_request_resource (google.cloud.compute_v1.types.TestPermissionsRequest): + The body resource for this request + This corresponds to the ``test_permissions_request_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.compute_v1.types.TestPermissionsResponse: + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, resource, test_permissions_request_resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.TestIamPermissionsLicenseCodeRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.TestIamPermissionsLicenseCodeRequest): + request = compute.TestIamPermissionsLicenseCodeRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if resource is not None: + request.resource = resource + if test_permissions_request_resource is not None: + request.test_permissions_request_resource = test_permissions_request_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.test_iam_permissions] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("resource", request.resource), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def __enter__(self) -> "LicenseCodesClient": + return self + + def __exit__(self, type, value, traceback): + """Releases underlying transport's resources. + + .. warning:: + ONLY use as a context manager if the transport is NOT shared + with other clients! Exiting the with block will CLOSE the transport + and may cause errors in other clients! + """ + self.transport.close() + + + + + + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) + + +__all__ = ( + "LicenseCodesClient", +) diff --git a/owl-bot-staging/v1/google/cloud/compute_v1/services/license_codes/transports/__init__.py b/owl-bot-staging/v1/google/cloud/compute_v1/services/license_codes/transports/__init__.py new file mode 100644 index 000000000..e98713b2c --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/compute_v1/services/license_codes/transports/__init__.py @@ -0,0 +1,32 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +from typing import Dict, Type + +from .base import LicenseCodesTransport +from .rest import LicenseCodesRestTransport +from .rest import LicenseCodesRestInterceptor + + +# Compile a registry of transports. +_transport_registry = OrderedDict() # type: Dict[str, Type[LicenseCodesTransport]] +_transport_registry['rest'] = LicenseCodesRestTransport + +__all__ = ( + 'LicenseCodesTransport', + 'LicenseCodesRestTransport', + 'LicenseCodesRestInterceptor', +) diff --git a/owl-bot-staging/v1/google/cloud/compute_v1/services/license_codes/transports/base.py b/owl-bot-staging/v1/google/cloud/compute_v1/services/license_codes/transports/base.py new file mode 100644 index 000000000..c2ea03dc8 --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/compute_v1/services/license_codes/transports/base.py @@ -0,0 +1,164 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import abc +from typing import Awaitable, Callable, Dict, Optional, Sequence, Union + +from google.cloud.compute_v1 import gapic_version as package_version + +import google.auth # type: ignore +import google.api_core +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.compute_v1.types import compute + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) + + +class LicenseCodesTransport(abc.ABC): + """Abstract transport class for LicenseCodes.""" + + AUTH_SCOPES = ( + 'https://www.googleapis.com/auth/compute.readonly', + 'https://www.googleapis.com/auth/compute', + 'https://www.googleapis.com/auth/cloud-platform', + ) + + DEFAULT_HOST: str = 'compute.googleapis.com' + def __init__( + self, *, + host: str = DEFAULT_HOST, + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + **kwargs, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A list of scopes. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + """ + + scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} + + # Save the scopes. + self._scopes = scopes + + # If no credentials are provided, then determine the appropriate + # defaults. + if credentials and credentials_file: + raise core_exceptions.DuplicateCredentialArgs("'credentials_file' and 'credentials' are mutually exclusive") + + if credentials_file is not None: + credentials, _ = google.auth.load_credentials_from_file( + credentials_file, + **scopes_kwargs, + quota_project_id=quota_project_id + ) + elif credentials is None: + credentials, _ = google.auth.default(**scopes_kwargs, quota_project_id=quota_project_id) + # Don't apply audience if the credentials file passed from user. + if hasattr(credentials, "with_gdch_audience"): + credentials = credentials.with_gdch_audience(api_audience if api_audience else host) + + # If the credentials are service account credentials, then always try to use self signed JWT. + if always_use_jwt_access and isinstance(credentials, service_account.Credentials) and hasattr(service_account.Credentials, "with_always_use_jwt_access"): + credentials = credentials.with_always_use_jwt_access(True) + + # Save the credentials. + self._credentials = credentials + + # Save the hostname. Default to port 443 (HTTPS) if none is specified. + if ':' not in host: + host += ':443' + self._host = host + + def _prep_wrapped_messages(self, client_info): + # Precompute the wrapped methods. + self._wrapped_methods = { + self.get: gapic_v1.method.wrap_method( + self.get, + default_timeout=None, + client_info=client_info, + ), + self.test_iam_permissions: gapic_v1.method.wrap_method( + self.test_iam_permissions, + default_timeout=None, + client_info=client_info, + ), + } + + def close(self): + """Closes resources associated with the transport. + + .. warning:: + Only call this method if the transport is NOT shared + with other clients - this may cause errors in other clients! + """ + raise NotImplementedError() + + @property + def get(self) -> Callable[ + [compute.GetLicenseCodeRequest], + Union[ + compute.LicenseCode, + Awaitable[compute.LicenseCode] + ]]: + raise NotImplementedError() + + @property + def test_iam_permissions(self) -> Callable[ + [compute.TestIamPermissionsLicenseCodeRequest], + Union[ + compute.TestPermissionsResponse, + Awaitable[compute.TestPermissionsResponse] + ]]: + raise NotImplementedError() + + @property + def kind(self) -> str: + raise NotImplementedError() + + +__all__ = ( + 'LicenseCodesTransport', +) diff --git a/owl-bot-staging/v1/google/cloud/compute_v1/services/license_codes/transports/rest.py b/owl-bot-staging/v1/google/cloud/compute_v1/services/license_codes/transports/rest.py new file mode 100644 index 000000000..367250aa4 --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/compute_v1/services/license_codes/transports/rest.py @@ -0,0 +1,419 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +from google.auth.transport.requests import AuthorizedSession # type: ignore +import json # type: ignore +import grpc # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.api_core import exceptions as core_exceptions +from google.api_core import retry as retries +from google.api_core import rest_helpers +from google.api_core import rest_streaming +from google.api_core import path_template +from google.api_core import gapic_v1 + +from google.protobuf import json_format +from requests import __version__ as requests_version +import dataclasses +import re +from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union +import warnings + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + + +from google.cloud.compute_v1.types import compute + +from .base import LicenseCodesTransport, DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, + grpc_version=None, + rest_version=requests_version, +) + + +class LicenseCodesRestInterceptor: + """Interceptor for LicenseCodes. + + Interceptors are used to manipulate requests, request metadata, and responses + in arbitrary ways. + Example use cases include: + * Logging + * Verifying requests according to service or custom semantics + * Stripping extraneous information from responses + + These use cases and more can be enabled by injecting an + instance of a custom subclass when constructing the LicenseCodesRestTransport. + + .. code-block:: python + class MyCustomLicenseCodesInterceptor(LicenseCodesRestInterceptor): + def pre_get(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_test_iam_permissions(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_test_iam_permissions(self, response): + logging.log(f"Received response: {response}") + return response + + transport = LicenseCodesRestTransport(interceptor=MyCustomLicenseCodesInterceptor()) + client = LicenseCodesClient(transport=transport) + + + """ + def pre_get(self, request: compute.GetLicenseCodeRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.GetLicenseCodeRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get + + Override in a subclass to manipulate the request or metadata + before they are sent to the LicenseCodes server. + """ + return request, metadata + + def post_get(self, response: compute.LicenseCode) -> compute.LicenseCode: + """Post-rpc interceptor for get + + Override in a subclass to manipulate the response + after it is returned by the LicenseCodes server but before + it is returned to user code. + """ + return response + def pre_test_iam_permissions(self, request: compute.TestIamPermissionsLicenseCodeRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.TestIamPermissionsLicenseCodeRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for test_iam_permissions + + Override in a subclass to manipulate the request or metadata + before they are sent to the LicenseCodes server. + """ + return request, metadata + + def post_test_iam_permissions(self, response: compute.TestPermissionsResponse) -> compute.TestPermissionsResponse: + """Post-rpc interceptor for test_iam_permissions + + Override in a subclass to manipulate the response + after it is returned by the LicenseCodes server but before + it is returned to user code. + """ + return response + + +@dataclasses.dataclass +class LicenseCodesRestStub: + _session: AuthorizedSession + _host: str + _interceptor: LicenseCodesRestInterceptor + + +class LicenseCodesRestTransport(LicenseCodesTransport): + """REST backend transport for LicenseCodes. + + The LicenseCodes API. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends JSON representations of protocol buffers over HTTP/1.1 + + NOTE: This REST transport functionality is currently in a beta + state (preview). We welcome your feedback via an issue in this + library's source repository. Thank you! + """ + + def __init__(self, *, + host: str = 'compute.googleapis.com', + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + client_cert_source_for_mtls: Optional[Callable[[ + ], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + url_scheme: str = 'https', + interceptor: Optional[LicenseCodesRestInterceptor] = None, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + NOTE: This REST transport functionality is currently in a beta + state (preview). We welcome your feedback via a GitHub issue in + this library's repository. Thank you! + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + client_cert_source_for_mtls (Callable[[], Tuple[bytes, bytes]]): Client + certificate to configure mutual TLS HTTP channel. It is ignored + if ``channel`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you are developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + url_scheme: the protocol scheme for the API endpoint. Normally + "https", but for testing or local servers, + "http" can be specified. + """ + # Run the base constructor + # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. + # TODO: When custom host (api_endpoint) is set, `scopes` must *also* be set on the + # credentials object + maybe_url_match = re.match("^(?Phttp(?:s)?://)?(?P.*)$", host) + if maybe_url_match is None: + raise ValueError(f"Unexpected hostname structure: {host}") # pragma: NO COVER + + url_match_items = maybe_url_match.groupdict() + + host = f"{url_scheme}://{host}" if not url_match_items["scheme"] else host + + super().__init__( + host=host, + credentials=credentials, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience + ) + self._session = AuthorizedSession( + self._credentials, default_host=self.DEFAULT_HOST) + if client_cert_source_for_mtls: + self._session.configure_mtls_channel(client_cert_source_for_mtls) + self._interceptor = interceptor or LicenseCodesRestInterceptor() + self._prep_wrapped_messages(client_info) + + class _Get(LicenseCodesRestStub): + def __hash__(self): + return hash("Get") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.GetLicenseCodeRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.LicenseCode: + r"""Call the get method over HTTP. + + Args: + request (~.compute.GetLicenseCodeRequest): + The request object. A request message for + LicenseCodes.Get. See the method + description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.LicenseCode: + Represents a License Code resource. A License Code is a + unique identifier used to represent a license resource. + *Caution* This resource is intended for use only by + third-party partners who are creating Cloud Marketplace + images. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/compute/v1/projects/{project}/global/licenseCodes/{license_code}', + }, + ] + request, metadata = self._interceptor.pre_get(request, metadata) + pb_request = compute.GetLicenseCodeRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.LicenseCode() + pb_resp = compute.LicenseCode.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get(resp) + return resp + + class _TestIamPermissions(LicenseCodesRestStub): + def __hash__(self): + return hash("TestIamPermissions") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.TestIamPermissionsLicenseCodeRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.TestPermissionsResponse: + r"""Call the test iam permissions method over HTTP. + + Args: + request (~.compute.TestIamPermissionsLicenseCodeRequest): + The request object. A request message for + LicenseCodes.TestIamPermissions. See the + method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.TestPermissionsResponse: + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/compute/v1/projects/{project}/global/licenseCodes/{resource}/testIamPermissions', + 'body': 'test_permissions_request_resource', + }, + ] + request, metadata = self._interceptor.pre_test_iam_permissions(request, metadata) + pb_request = compute.TestIamPermissionsLicenseCodeRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + including_default_value_fields=False, + use_integers_for_enums=False + ) + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.TestPermissionsResponse() + pb_resp = compute.TestPermissionsResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_test_iam_permissions(resp) + return resp + + @property + def get(self) -> Callable[ + [compute.GetLicenseCodeRequest], + compute.LicenseCode]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._Get(self._session, self._host, self._interceptor) # type: ignore + + @property + def test_iam_permissions(self) -> Callable[ + [compute.TestIamPermissionsLicenseCodeRequest], + compute.TestPermissionsResponse]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._TestIamPermissions(self._session, self._host, self._interceptor) # type: ignore + + @property + def kind(self) -> str: + return "rest" + + def close(self): + self._session.close() + + +__all__=( + 'LicenseCodesRestTransport', +) diff --git a/owl-bot-staging/v1/google/cloud/compute_v1/services/licenses/__init__.py b/owl-bot-staging/v1/google/cloud/compute_v1/services/licenses/__init__.py new file mode 100644 index 000000000..8f9d2e2ac --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/compute_v1/services/licenses/__init__.py @@ -0,0 +1,20 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from .client import LicensesClient + +__all__ = ( + 'LicensesClient', +) diff --git a/owl-bot-staging/v1/google/cloud/compute_v1/services/licenses/client.py b/owl-bot-staging/v1/google/cloud/compute_v1/services/licenses/client.py new file mode 100644 index 000000000..03f3d0820 --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/compute_v1/services/licenses/client.py @@ -0,0 +1,1557 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import functools +import os +import re +from typing import Dict, Mapping, MutableMapping, MutableSequence, Optional, Sequence, Tuple, Type, Union, cast + +from google.cloud.compute_v1 import gapic_version as package_version + +from google.api_core import client_options as client_options_lib +from google.api_core import exceptions as core_exceptions +from google.api_core import extended_operation +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport import mtls # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth.exceptions import MutualTLSChannelError # type: ignore +from google.oauth2 import service_account # type: ignore + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + +from google.api_core import extended_operation # type: ignore +from google.cloud.compute_v1.services.licenses import pagers +from google.cloud.compute_v1.types import compute +from .transports.base import LicensesTransport, DEFAULT_CLIENT_INFO +from .transports.rest import LicensesRestTransport + + +class LicensesClientMeta(type): + """Metaclass for the Licenses client. + + This provides class-level methods for building and retrieving + support objects (e.g. transport) without polluting the client instance + objects. + """ + _transport_registry = OrderedDict() # type: Dict[str, Type[LicensesTransport]] + _transport_registry["rest"] = LicensesRestTransport + + def get_transport_class(cls, + label: Optional[str] = None, + ) -> Type[LicensesTransport]: + """Returns an appropriate transport class. + + Args: + label: The name of the desired transport. If none is + provided, then the first transport in the registry is used. + + Returns: + The transport class to use. + """ + # If a specific transport is requested, return that one. + if label: + return cls._transport_registry[label] + + # No transport is requested; return the default (that is, the first one + # in the dictionary). + return next(iter(cls._transport_registry.values())) + + +class LicensesClient(metaclass=LicensesClientMeta): + """The Licenses API.""" + + @staticmethod + def _get_default_mtls_endpoint(api_endpoint): + """Converts api endpoint to mTLS endpoint. + + Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to + "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. + Args: + api_endpoint (Optional[str]): the api endpoint to convert. + Returns: + str: converted mTLS api endpoint. + """ + if not api_endpoint: + return api_endpoint + + mtls_endpoint_re = re.compile( + r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?" + ) + + m = mtls_endpoint_re.match(api_endpoint) + name, mtls, sandbox, googledomain = m.groups() + if mtls or not googledomain: + return api_endpoint + + if sandbox: + return api_endpoint.replace( + "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" + ) + + return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") + + DEFAULT_ENDPOINT = "compute.googleapis.com" + DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore + DEFAULT_ENDPOINT + ) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + LicensesClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_info(info) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + LicensesClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_file( + filename) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + from_service_account_json = from_service_account_file + + @property + def transport(self) -> LicensesTransport: + """Returns the transport used by the client instance. + + Returns: + LicensesTransport: The transport used by the client + instance. + """ + return self._transport + + @staticmethod + def common_billing_account_path(billing_account: str, ) -> str: + """Returns a fully-qualified billing_account string.""" + return "billingAccounts/{billing_account}".format(billing_account=billing_account, ) + + @staticmethod + def parse_common_billing_account_path(path: str) -> Dict[str,str]: + """Parse a billing_account path into its component segments.""" + m = re.match(r"^billingAccounts/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_folder_path(folder: str, ) -> str: + """Returns a fully-qualified folder string.""" + return "folders/{folder}".format(folder=folder, ) + + @staticmethod + def parse_common_folder_path(path: str) -> Dict[str,str]: + """Parse a folder path into its component segments.""" + m = re.match(r"^folders/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_organization_path(organization: str, ) -> str: + """Returns a fully-qualified organization string.""" + return "organizations/{organization}".format(organization=organization, ) + + @staticmethod + def parse_common_organization_path(path: str) -> Dict[str,str]: + """Parse a organization path into its component segments.""" + m = re.match(r"^organizations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_project_path(project: str, ) -> str: + """Returns a fully-qualified project string.""" + return "projects/{project}".format(project=project, ) + + @staticmethod + def parse_common_project_path(path: str) -> Dict[str,str]: + """Parse a project path into its component segments.""" + m = re.match(r"^projects/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_location_path(project: str, location: str, ) -> str: + """Returns a fully-qualified location string.""" + return "projects/{project}/locations/{location}".format(project=project, location=location, ) + + @staticmethod + def parse_common_location_path(path: str) -> Dict[str,str]: + """Parse a location path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @classmethod + def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[client_options_lib.ClientOptions] = None): + """Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + if client_options is None: + client_options = client_options_lib.ClientOptions() + use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") + use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") + if use_client_cert not in ("true", "false"): + raise ValueError("Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`") + if use_mtls_endpoint not in ("auto", "never", "always"): + raise MutualTLSChannelError("Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`") + + # Figure out the client cert source to use. + client_cert_source = None + if use_client_cert == "true": + if client_options.client_cert_source: + client_cert_source = client_options.client_cert_source + elif mtls.has_default_client_cert_source(): + client_cert_source = mtls.default_client_cert_source() + + # Figure out which api endpoint to use. + if client_options.api_endpoint is not None: + api_endpoint = client_options.api_endpoint + elif use_mtls_endpoint == "always" or (use_mtls_endpoint == "auto" and client_cert_source): + api_endpoint = cls.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = cls.DEFAULT_ENDPOINT + + return api_endpoint, client_cert_source + + def __init__(self, *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Optional[Union[str, LicensesTransport]] = None, + client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the licenses client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Union[str, LicensesTransport]): The + transport to use. If set to None, a transport is chosen + automatically. + NOTE: "rest" transport functionality is currently in a + beta state (preview). We welcome your feedback via an + issue in this library's source repository. + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): Custom options for the + client. It won't take effect if a ``transport`` instance is provided. + (1) The ``api_endpoint`` property can be used to override the + default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT + environment variable can also be used to override the endpoint: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto switch to the + default mTLS endpoint if client certificate is present, this is + the default value). However, the ``api_endpoint`` property takes + precedence if provided. + (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide client certificate for mutual TLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + """ + if isinstance(client_options, dict): + client_options = client_options_lib.from_dict(client_options) + if client_options is None: + client_options = client_options_lib.ClientOptions() + client_options = cast(client_options_lib.ClientOptions, client_options) + + api_endpoint, client_cert_source_func = self.get_mtls_endpoint_and_cert_source(client_options) + + api_key_value = getattr(client_options, "api_key", None) + if api_key_value and credentials: + raise ValueError("client_options.api_key and credentials are mutually exclusive") + + # Save or instantiate the transport. + # Ordinarily, we provide the transport, but allowing a custom transport + # instance provides an extensibility point for unusual situations. + if isinstance(transport, LicensesTransport): + # transport is a LicensesTransport instance. + if credentials or client_options.credentials_file or api_key_value: + raise ValueError("When providing a transport instance, " + "provide its credentials directly.") + if client_options.scopes: + raise ValueError( + "When providing a transport instance, provide its scopes " + "directly." + ) + self._transport = transport + else: + import google.auth._default # type: ignore + + if api_key_value and hasattr(google.auth._default, "get_api_key_credentials"): + credentials = google.auth._default.get_api_key_credentials(api_key_value) + + Transport = type(self).get_transport_class(transport) + self._transport = Transport( + credentials=credentials, + credentials_file=client_options.credentials_file, + host=api_endpoint, + scopes=client_options.scopes, + client_cert_source_for_mtls=client_cert_source_func, + quota_project_id=client_options.quota_project_id, + client_info=client_info, + always_use_jwt_access=True, + api_audience=client_options.api_audience, + ) + + def delete_unary(self, + request: Optional[Union[compute.DeleteLicenseRequest, dict]] = None, + *, + project: Optional[str] = None, + license_: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.Operation: + r"""Deletes the specified license. *Caution* This resource is + intended for use only by third-party partners who are creating + Cloud Marketplace images. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_delete(): + # Create a client + client = compute_v1.LicensesClient() + + # Initialize request argument(s) + request = compute_v1.DeleteLicenseRequest( + license_="license__value", + project="project_value", + ) + + # Make the request + response = client.delete(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.DeleteLicenseRequest, dict]): + The request object. A request message for + Licenses.Delete. See the method + description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + license_ (str): + Name of the license resource to + delete. + + This corresponds to the ``license_`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, license_]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.DeleteLicenseRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.DeleteLicenseRequest): + request = compute.DeleteLicenseRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if license_ is not None: + request.license_ = license_ + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("license", request.license_), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def delete(self, + request: Optional[Union[compute.DeleteLicenseRequest, dict]] = None, + *, + project: Optional[str] = None, + license_: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> extended_operation.ExtendedOperation: + r"""Deletes the specified license. *Caution* This resource is + intended for use only by third-party partners who are creating + Cloud Marketplace images. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_delete(): + # Create a client + client = compute_v1.LicensesClient() + + # Initialize request argument(s) + request = compute_v1.DeleteLicenseRequest( + license_="license__value", + project="project_value", + ) + + # Make the request + response = client.delete(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.DeleteLicenseRequest, dict]): + The request object. A request message for + Licenses.Delete. See the method + description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + license_ (str): + Name of the license resource to + delete. + + This corresponds to the ``license_`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, license_]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.DeleteLicenseRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.DeleteLicenseRequest): + request = compute.DeleteLicenseRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if license_ is not None: + request.license_ = license_ + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("license", request.license_), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + operation_service = self._transport._global_operations_client + operation_request = compute.GetGlobalOperationRequest() + operation_request.project = request.project + operation_request.operation = response.name + + get_operation = functools.partial(operation_service.get, operation_request) + # Cancel is not part of extended operations yet. + cancel_operation = lambda: None + + # Note: this class is an implementation detail to provide a uniform + # set of names for certain fields in the extended operation proto message. + # See google.api_core.extended_operation.ExtendedOperation for details + # on these properties and the expected interface. + class _CustomOperation(extended_operation.ExtendedOperation): + @property + def error_message(self): + return self._extended_operation.http_error_message + + @property + def error_code(self): + return self._extended_operation.http_error_status_code + + response = _CustomOperation.make(get_operation, cancel_operation, response) + + # Done; return the response. + return response + + def get(self, + request: Optional[Union[compute.GetLicenseRequest, dict]] = None, + *, + project: Optional[str] = None, + license_: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.License: + r"""Returns the specified License resource. *Caution* This resource + is intended for use only by third-party partners who are + creating Cloud Marketplace images. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_get(): + # Create a client + client = compute_v1.LicensesClient() + + # Initialize request argument(s) + request = compute_v1.GetLicenseRequest( + license_="license__value", + project="project_value", + ) + + # Make the request + response = client.get(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.GetLicenseRequest, dict]): + The request object. A request message for Licenses.Get. + See the method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + license_ (str): + Name of the License resource to + return. + + This corresponds to the ``license_`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.compute_v1.types.License: + Represents a License resource. A License represents + billing and aggregate usage data for public and + marketplace images. *Caution* This resource is intended + for use only by third-party partners who are creating + Cloud Marketplace images. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, license_]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.GetLicenseRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.GetLicenseRequest): + request = compute.GetLicenseRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if license_ is not None: + request.license_ = license_ + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("license", request.license_), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get_iam_policy(self, + request: Optional[Union[compute.GetIamPolicyLicenseRequest, dict]] = None, + *, + project: Optional[str] = None, + resource: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.Policy: + r"""Gets the access control policy for a resource. May be empty if + no such policy or resource exists. *Caution* This resource is + intended for use only by third-party partners who are creating + Cloud Marketplace images. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_get_iam_policy(): + # Create a client + client = compute_v1.LicensesClient() + + # Initialize request argument(s) + request = compute_v1.GetIamPolicyLicenseRequest( + project="project_value", + resource="resource_value", + ) + + # Make the request + response = client.get_iam_policy(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.GetIamPolicyLicenseRequest, dict]): + The request object. A request message for + Licenses.GetIamPolicy. See the method + description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + resource (str): + Name or id of the resource for this + request. + + This corresponds to the ``resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.compute_v1.types.Policy: + An Identity and Access Management (IAM) policy, which + specifies access controls for Google Cloud resources. A + Policy is a collection of bindings. A binding binds one + or more members, or principals, to a single role. + Principals can be user accounts, service accounts, + Google groups, and domains (such as G Suite). A role is + a named list of permissions; each role can be an IAM + predefined role or a user-created custom role. For some + types of Google Cloud resources, a binding can also + specify a condition, which is a logical expression that + allows access to a resource only if the expression + evaluates to true. A condition can add constraints based + on attributes of the request, the resource, or both. To + learn which resources support conditions in their IAM + policies, see the [IAM + documentation](\ https://cloud.google.com/iam/help/conditions/resource-policies). + **JSON example:** { "bindings": [ { "role": + "roles/resourcemanager.organizationAdmin", "members": [ + "user:mike@example.com", "group:admins@example.com", + "domain:google.com", + "serviceAccount:my-project-id@appspot.gserviceaccount.com" + ] }, { "role": + "roles/resourcemanager.organizationViewer", "members": [ + "user:eve@example.com" ], "condition": { "title": + "expirable access", "description": "Does not grant + access after Sep 2020", "expression": "request.time < + timestamp('2020-10-01T00:00:00.000Z')", } } ], "etag": + "BwWWja0YfJA=", "version": 3 } **YAML example:** + bindings: - members: - user:\ mike@example.com - + group:\ admins@example.com - domain:google.com - + serviceAccount:\ my-project-id@appspot.gserviceaccount.com + role: roles/resourcemanager.organizationAdmin - members: + - user:\ eve@example.com role: + roles/resourcemanager.organizationViewer condition: + title: expirable access description: Does not grant + access after Sep 2020 expression: request.time < + timestamp('2020-10-01T00:00:00.000Z') etag: BwWWja0YfJA= + version: 3 For a description of IAM and its features, + see the [IAM + documentation](\ https://cloud.google.com/iam/docs/). + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.GetIamPolicyLicenseRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.GetIamPolicyLicenseRequest): + request = compute.GetIamPolicyLicenseRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if resource is not None: + request.resource = resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_iam_policy] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("resource", request.resource), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def insert_unary(self, + request: Optional[Union[compute.InsertLicenseRequest, dict]] = None, + *, + project: Optional[str] = None, + license_resource: Optional[compute.License] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.Operation: + r"""Create a License resource in the specified project. *Caution* + This resource is intended for use only by third-party partners + who are creating Cloud Marketplace images. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_insert(): + # Create a client + client = compute_v1.LicensesClient() + + # Initialize request argument(s) + request = compute_v1.InsertLicenseRequest( + project="project_value", + ) + + # Make the request + response = client.insert(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.InsertLicenseRequest, dict]): + The request object. A request message for + Licenses.Insert. See the method + description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + license_resource (google.cloud.compute_v1.types.License): + The body resource for this request + This corresponds to the ``license_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, license_resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.InsertLicenseRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.InsertLicenseRequest): + request = compute.InsertLicenseRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if license_resource is not None: + request.license_resource = license_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.insert] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def insert(self, + request: Optional[Union[compute.InsertLicenseRequest, dict]] = None, + *, + project: Optional[str] = None, + license_resource: Optional[compute.License] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> extended_operation.ExtendedOperation: + r"""Create a License resource in the specified project. *Caution* + This resource is intended for use only by third-party partners + who are creating Cloud Marketplace images. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_insert(): + # Create a client + client = compute_v1.LicensesClient() + + # Initialize request argument(s) + request = compute_v1.InsertLicenseRequest( + project="project_value", + ) + + # Make the request + response = client.insert(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.InsertLicenseRequest, dict]): + The request object. A request message for + Licenses.Insert. See the method + description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + license_resource (google.cloud.compute_v1.types.License): + The body resource for this request + This corresponds to the ``license_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, license_resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.InsertLicenseRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.InsertLicenseRequest): + request = compute.InsertLicenseRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if license_resource is not None: + request.license_resource = license_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.insert] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + operation_service = self._transport._global_operations_client + operation_request = compute.GetGlobalOperationRequest() + operation_request.project = request.project + operation_request.operation = response.name + + get_operation = functools.partial(operation_service.get, operation_request) + # Cancel is not part of extended operations yet. + cancel_operation = lambda: None + + # Note: this class is an implementation detail to provide a uniform + # set of names for certain fields in the extended operation proto message. + # See google.api_core.extended_operation.ExtendedOperation for details + # on these properties and the expected interface. + class _CustomOperation(extended_operation.ExtendedOperation): + @property + def error_message(self): + return self._extended_operation.http_error_message + + @property + def error_code(self): + return self._extended_operation.http_error_status_code + + response = _CustomOperation.make(get_operation, cancel_operation, response) + + # Done; return the response. + return response + + def list(self, + request: Optional[Union[compute.ListLicensesRequest, dict]] = None, + *, + project: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListPager: + r"""Retrieves the list of licenses available in the specified + project. This method does not get any licenses that belong to + other projects, including licenses attached to + publicly-available images, like Debian 9. If you want to get a + list of publicly-available licenses, use this method to make a + request to the respective image project, such as debian-cloud or + windows-cloud. *Caution* This resource is intended for use only + by third-party partners who are creating Cloud Marketplace + images. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_list(): + # Create a client + client = compute_v1.LicensesClient() + + # Initialize request argument(s) + request = compute_v1.ListLicensesRequest( + project="project_value", + ) + + # Make the request + page_result = client.list(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.ListLicensesRequest, dict]): + The request object. A request message for Licenses.List. + See the method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.compute_v1.services.licenses.pagers.ListPager: + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.ListLicensesRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.ListLicensesRequest): + request = compute.ListLicensesRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + def set_iam_policy(self, + request: Optional[Union[compute.SetIamPolicyLicenseRequest, dict]] = None, + *, + project: Optional[str] = None, + resource: Optional[str] = None, + global_set_policy_request_resource: Optional[compute.GlobalSetPolicyRequest] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.Policy: + r"""Sets the access control policy on the specified resource. + Replaces any existing policy. *Caution* This resource is + intended for use only by third-party partners who are creating + Cloud Marketplace images. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_set_iam_policy(): + # Create a client + client = compute_v1.LicensesClient() + + # Initialize request argument(s) + request = compute_v1.SetIamPolicyLicenseRequest( + project="project_value", + resource="resource_value", + ) + + # Make the request + response = client.set_iam_policy(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.SetIamPolicyLicenseRequest, dict]): + The request object. A request message for + Licenses.SetIamPolicy. See the method + description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + resource (str): + Name or id of the resource for this + request. + + This corresponds to the ``resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + global_set_policy_request_resource (google.cloud.compute_v1.types.GlobalSetPolicyRequest): + The body resource for this request + This corresponds to the ``global_set_policy_request_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.compute_v1.types.Policy: + An Identity and Access Management (IAM) policy, which + specifies access controls for Google Cloud resources. A + Policy is a collection of bindings. A binding binds one + or more members, or principals, to a single role. + Principals can be user accounts, service accounts, + Google groups, and domains (such as G Suite). A role is + a named list of permissions; each role can be an IAM + predefined role or a user-created custom role. For some + types of Google Cloud resources, a binding can also + specify a condition, which is a logical expression that + allows access to a resource only if the expression + evaluates to true. A condition can add constraints based + on attributes of the request, the resource, or both. To + learn which resources support conditions in their IAM + policies, see the [IAM + documentation](\ https://cloud.google.com/iam/help/conditions/resource-policies). + **JSON example:** { "bindings": [ { "role": + "roles/resourcemanager.organizationAdmin", "members": [ + "user:mike@example.com", "group:admins@example.com", + "domain:google.com", + "serviceAccount:my-project-id@appspot.gserviceaccount.com" + ] }, { "role": + "roles/resourcemanager.organizationViewer", "members": [ + "user:eve@example.com" ], "condition": { "title": + "expirable access", "description": "Does not grant + access after Sep 2020", "expression": "request.time < + timestamp('2020-10-01T00:00:00.000Z')", } } ], "etag": + "BwWWja0YfJA=", "version": 3 } **YAML example:** + bindings: - members: - user:\ mike@example.com - + group:\ admins@example.com - domain:google.com - + serviceAccount:\ my-project-id@appspot.gserviceaccount.com + role: roles/resourcemanager.organizationAdmin - members: + - user:\ eve@example.com role: + roles/resourcemanager.organizationViewer condition: + title: expirable access description: Does not grant + access after Sep 2020 expression: request.time < + timestamp('2020-10-01T00:00:00.000Z') etag: BwWWja0YfJA= + version: 3 For a description of IAM and its features, + see the [IAM + documentation](\ https://cloud.google.com/iam/docs/). + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, resource, global_set_policy_request_resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.SetIamPolicyLicenseRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.SetIamPolicyLicenseRequest): + request = compute.SetIamPolicyLicenseRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if resource is not None: + request.resource = resource + if global_set_policy_request_resource is not None: + request.global_set_policy_request_resource = global_set_policy_request_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.set_iam_policy] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("resource", request.resource), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def test_iam_permissions(self, + request: Optional[Union[compute.TestIamPermissionsLicenseRequest, dict]] = None, + *, + project: Optional[str] = None, + resource: Optional[str] = None, + test_permissions_request_resource: Optional[compute.TestPermissionsRequest] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.TestPermissionsResponse: + r"""Returns permissions that a caller has on the specified resource. + *Caution* This resource is intended for use only by third-party + partners who are creating Cloud Marketplace images. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_test_iam_permissions(): + # Create a client + client = compute_v1.LicensesClient() + + # Initialize request argument(s) + request = compute_v1.TestIamPermissionsLicenseRequest( + project="project_value", + resource="resource_value", + ) + + # Make the request + response = client.test_iam_permissions(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.TestIamPermissionsLicenseRequest, dict]): + The request object. A request message for + Licenses.TestIamPermissions. See the + method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + resource (str): + Name or id of the resource for this + request. + + This corresponds to the ``resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + test_permissions_request_resource (google.cloud.compute_v1.types.TestPermissionsRequest): + The body resource for this request + This corresponds to the ``test_permissions_request_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.compute_v1.types.TestPermissionsResponse: + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, resource, test_permissions_request_resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.TestIamPermissionsLicenseRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.TestIamPermissionsLicenseRequest): + request = compute.TestIamPermissionsLicenseRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if resource is not None: + request.resource = resource + if test_permissions_request_resource is not None: + request.test_permissions_request_resource = test_permissions_request_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.test_iam_permissions] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("resource", request.resource), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def __enter__(self) -> "LicensesClient": + return self + + def __exit__(self, type, value, traceback): + """Releases underlying transport's resources. + + .. warning:: + ONLY use as a context manager if the transport is NOT shared + with other clients! Exiting the with block will CLOSE the transport + and may cause errors in other clients! + """ + self.transport.close() + + + + + + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) + + +__all__ = ( + "LicensesClient", +) diff --git a/owl-bot-staging/v1/google/cloud/compute_v1/services/licenses/pagers.py b/owl-bot-staging/v1/google/cloud/compute_v1/services/licenses/pagers.py new file mode 100644 index 000000000..de25609c1 --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/compute_v1/services/licenses/pagers.py @@ -0,0 +1,77 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import Any, AsyncIterator, Awaitable, Callable, Sequence, Tuple, Optional, Iterator + +from google.cloud.compute_v1.types import compute + + +class ListPager: + """A pager for iterating through ``list`` requests. + + This class thinly wraps an initial + :class:`google.cloud.compute_v1.types.LicensesListResponse` object, and + provides an ``__iter__`` method to iterate through its + ``items`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``List`` requests and continue to iterate + through the ``items`` field on the + corresponding responses. + + All the usual :class:`google.cloud.compute_v1.types.LicensesListResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., compute.LicensesListResponse], + request: compute.ListLicensesRequest, + response: compute.LicensesListResponse, + *, + metadata: Sequence[Tuple[str, str]] = ()): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.compute_v1.types.ListLicensesRequest): + The initial request object. + response (google.cloud.compute_v1.types.LicensesListResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = compute.ListLicensesRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[compute.LicensesListResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[compute.License]: + for page in self.pages: + yield from page.items + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) diff --git a/owl-bot-staging/v1/google/cloud/compute_v1/services/licenses/transports/__init__.py b/owl-bot-staging/v1/google/cloud/compute_v1/services/licenses/transports/__init__.py new file mode 100644 index 000000000..1843e548f --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/compute_v1/services/licenses/transports/__init__.py @@ -0,0 +1,32 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +from typing import Dict, Type + +from .base import LicensesTransport +from .rest import LicensesRestTransport +from .rest import LicensesRestInterceptor + + +# Compile a registry of transports. +_transport_registry = OrderedDict() # type: Dict[str, Type[LicensesTransport]] +_transport_registry['rest'] = LicensesRestTransport + +__all__ = ( + 'LicensesTransport', + 'LicensesRestTransport', + 'LicensesRestInterceptor', +) diff --git a/owl-bot-staging/v1/google/cloud/compute_v1/services/licenses/transports/base.py b/owl-bot-staging/v1/google/cloud/compute_v1/services/licenses/transports/base.py new file mode 100644 index 000000000..593086f1c --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/compute_v1/services/licenses/transports/base.py @@ -0,0 +1,247 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import abc +from typing import Awaitable, Callable, Dict, Optional, Sequence, Union + +from google.cloud.compute_v1 import gapic_version as package_version + +import google.auth # type: ignore +import google.api_core +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.compute_v1.types import compute +from google.cloud.compute_v1.services import global_operations + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) + + +class LicensesTransport(abc.ABC): + """Abstract transport class for Licenses.""" + + AUTH_SCOPES = ( + 'https://www.googleapis.com/auth/compute', + 'https://www.googleapis.com/auth/cloud-platform', + ) + + DEFAULT_HOST: str = 'compute.googleapis.com' + def __init__( + self, *, + host: str = DEFAULT_HOST, + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + **kwargs, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A list of scopes. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + """ + self._extended_operations_services: Dict[str, Any] = {} + + scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} + + # Save the scopes. + self._scopes = scopes + + # If no credentials are provided, then determine the appropriate + # defaults. + if credentials and credentials_file: + raise core_exceptions.DuplicateCredentialArgs("'credentials_file' and 'credentials' are mutually exclusive") + + if credentials_file is not None: + credentials, _ = google.auth.load_credentials_from_file( + credentials_file, + **scopes_kwargs, + quota_project_id=quota_project_id + ) + elif credentials is None: + credentials, _ = google.auth.default(**scopes_kwargs, quota_project_id=quota_project_id) + # Don't apply audience if the credentials file passed from user. + if hasattr(credentials, "with_gdch_audience"): + credentials = credentials.with_gdch_audience(api_audience if api_audience else host) + + # If the credentials are service account credentials, then always try to use self signed JWT. + if always_use_jwt_access and isinstance(credentials, service_account.Credentials) and hasattr(service_account.Credentials, "with_always_use_jwt_access"): + credentials = credentials.with_always_use_jwt_access(True) + + # Save the credentials. + self._credentials = credentials + + # Save the hostname. Default to port 443 (HTTPS) if none is specified. + if ':' not in host: + host += ':443' + self._host = host + + def _prep_wrapped_messages(self, client_info): + # Precompute the wrapped methods. + self._wrapped_methods = { + self.delete: gapic_v1.method.wrap_method( + self.delete, + default_timeout=None, + client_info=client_info, + ), + self.get: gapic_v1.method.wrap_method( + self.get, + default_timeout=None, + client_info=client_info, + ), + self.get_iam_policy: gapic_v1.method.wrap_method( + self.get_iam_policy, + default_timeout=None, + client_info=client_info, + ), + self.insert: gapic_v1.method.wrap_method( + self.insert, + default_timeout=None, + client_info=client_info, + ), + self.list: gapic_v1.method.wrap_method( + self.list, + default_timeout=None, + client_info=client_info, + ), + self.set_iam_policy: gapic_v1.method.wrap_method( + self.set_iam_policy, + default_timeout=None, + client_info=client_info, + ), + self.test_iam_permissions: gapic_v1.method.wrap_method( + self.test_iam_permissions, + default_timeout=None, + client_info=client_info, + ), + } + + def close(self): + """Closes resources associated with the transport. + + .. warning:: + Only call this method if the transport is NOT shared + with other clients - this may cause errors in other clients! + """ + raise NotImplementedError() + + @property + def delete(self) -> Callable[ + [compute.DeleteLicenseRequest], + Union[ + compute.Operation, + Awaitable[compute.Operation] + ]]: + raise NotImplementedError() + + @property + def get(self) -> Callable[ + [compute.GetLicenseRequest], + Union[ + compute.License, + Awaitable[compute.License] + ]]: + raise NotImplementedError() + + @property + def get_iam_policy(self) -> Callable[ + [compute.GetIamPolicyLicenseRequest], + Union[ + compute.Policy, + Awaitable[compute.Policy] + ]]: + raise NotImplementedError() + + @property + def insert(self) -> Callable[ + [compute.InsertLicenseRequest], + Union[ + compute.Operation, + Awaitable[compute.Operation] + ]]: + raise NotImplementedError() + + @property + def list(self) -> Callable[ + [compute.ListLicensesRequest], + Union[ + compute.LicensesListResponse, + Awaitable[compute.LicensesListResponse] + ]]: + raise NotImplementedError() + + @property + def set_iam_policy(self) -> Callable[ + [compute.SetIamPolicyLicenseRequest], + Union[ + compute.Policy, + Awaitable[compute.Policy] + ]]: + raise NotImplementedError() + + @property + def test_iam_permissions(self) -> Callable[ + [compute.TestIamPermissionsLicenseRequest], + Union[ + compute.TestPermissionsResponse, + Awaitable[compute.TestPermissionsResponse] + ]]: + raise NotImplementedError() + + @property + def kind(self) -> str: + raise NotImplementedError() + + @property + def _global_operations_client(self) -> global_operations.GlobalOperationsClient: + ex_op_service = self._extended_operations_services.get("global_operations") + if not ex_op_service: + ex_op_service = global_operations.GlobalOperationsClient( + credentials=self._credentials, + transport=self.kind, + ) + self._extended_operations_services["global_operations"] = ex_op_service + + return ex_op_service + + +__all__ = ( + 'LicensesTransport', +) diff --git a/owl-bot-staging/v1/google/cloud/compute_v1/services/licenses/transports/rest.py b/owl-bot-staging/v1/google/cloud/compute_v1/services/licenses/transports/rest.py new file mode 100644 index 000000000..60cf0f76e --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/compute_v1/services/licenses/transports/rest.py @@ -0,0 +1,1097 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +from google.auth.transport.requests import AuthorizedSession # type: ignore +import json # type: ignore +import grpc # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.api_core import exceptions as core_exceptions +from google.api_core import retry as retries +from google.api_core import rest_helpers +from google.api_core import rest_streaming +from google.api_core import path_template +from google.api_core import gapic_v1 + +from google.protobuf import json_format +from requests import __version__ as requests_version +import dataclasses +import re +from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union +import warnings + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + + +from google.cloud.compute_v1.types import compute + +from .base import LicensesTransport, DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, + grpc_version=None, + rest_version=requests_version, +) + + +class LicensesRestInterceptor: + """Interceptor for Licenses. + + Interceptors are used to manipulate requests, request metadata, and responses + in arbitrary ways. + Example use cases include: + * Logging + * Verifying requests according to service or custom semantics + * Stripping extraneous information from responses + + These use cases and more can be enabled by injecting an + instance of a custom subclass when constructing the LicensesRestTransport. + + .. code-block:: python + class MyCustomLicensesInterceptor(LicensesRestInterceptor): + def pre_delete(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_delete(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_get(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_get_iam_policy(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_iam_policy(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_insert(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_insert(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_set_iam_policy(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_set_iam_policy(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_test_iam_permissions(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_test_iam_permissions(self, response): + logging.log(f"Received response: {response}") + return response + + transport = LicensesRestTransport(interceptor=MyCustomLicensesInterceptor()) + client = LicensesClient(transport=transport) + + + """ + def pre_delete(self, request: compute.DeleteLicenseRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.DeleteLicenseRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for delete + + Override in a subclass to manipulate the request or metadata + before they are sent to the Licenses server. + """ + return request, metadata + + def post_delete(self, response: compute.Operation) -> compute.Operation: + """Post-rpc interceptor for delete + + Override in a subclass to manipulate the response + after it is returned by the Licenses server but before + it is returned to user code. + """ + return response + def pre_get(self, request: compute.GetLicenseRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.GetLicenseRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get + + Override in a subclass to manipulate the request or metadata + before they are sent to the Licenses server. + """ + return request, metadata + + def post_get(self, response: compute.License) -> compute.License: + """Post-rpc interceptor for get + + Override in a subclass to manipulate the response + after it is returned by the Licenses server but before + it is returned to user code. + """ + return response + def pre_get_iam_policy(self, request: compute.GetIamPolicyLicenseRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.GetIamPolicyLicenseRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_iam_policy + + Override in a subclass to manipulate the request or metadata + before they are sent to the Licenses server. + """ + return request, metadata + + def post_get_iam_policy(self, response: compute.Policy) -> compute.Policy: + """Post-rpc interceptor for get_iam_policy + + Override in a subclass to manipulate the response + after it is returned by the Licenses server but before + it is returned to user code. + """ + return response + def pre_insert(self, request: compute.InsertLicenseRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.InsertLicenseRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for insert + + Override in a subclass to manipulate the request or metadata + before they are sent to the Licenses server. + """ + return request, metadata + + def post_insert(self, response: compute.Operation) -> compute.Operation: + """Post-rpc interceptor for insert + + Override in a subclass to manipulate the response + after it is returned by the Licenses server but before + it is returned to user code. + """ + return response + def pre_list(self, request: compute.ListLicensesRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.ListLicensesRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list + + Override in a subclass to manipulate the request or metadata + before they are sent to the Licenses server. + """ + return request, metadata + + def post_list(self, response: compute.LicensesListResponse) -> compute.LicensesListResponse: + """Post-rpc interceptor for list + + Override in a subclass to manipulate the response + after it is returned by the Licenses server but before + it is returned to user code. + """ + return response + def pre_set_iam_policy(self, request: compute.SetIamPolicyLicenseRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.SetIamPolicyLicenseRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for set_iam_policy + + Override in a subclass to manipulate the request or metadata + before they are sent to the Licenses server. + """ + return request, metadata + + def post_set_iam_policy(self, response: compute.Policy) -> compute.Policy: + """Post-rpc interceptor for set_iam_policy + + Override in a subclass to manipulate the response + after it is returned by the Licenses server but before + it is returned to user code. + """ + return response + def pre_test_iam_permissions(self, request: compute.TestIamPermissionsLicenseRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.TestIamPermissionsLicenseRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for test_iam_permissions + + Override in a subclass to manipulate the request or metadata + before they are sent to the Licenses server. + """ + return request, metadata + + def post_test_iam_permissions(self, response: compute.TestPermissionsResponse) -> compute.TestPermissionsResponse: + """Post-rpc interceptor for test_iam_permissions + + Override in a subclass to manipulate the response + after it is returned by the Licenses server but before + it is returned to user code. + """ + return response + + +@dataclasses.dataclass +class LicensesRestStub: + _session: AuthorizedSession + _host: str + _interceptor: LicensesRestInterceptor + + +class LicensesRestTransport(LicensesTransport): + """REST backend transport for Licenses. + + The Licenses API. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends JSON representations of protocol buffers over HTTP/1.1 + + NOTE: This REST transport functionality is currently in a beta + state (preview). We welcome your feedback via an issue in this + library's source repository. Thank you! + """ + + def __init__(self, *, + host: str = 'compute.googleapis.com', + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + client_cert_source_for_mtls: Optional[Callable[[ + ], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + url_scheme: str = 'https', + interceptor: Optional[LicensesRestInterceptor] = None, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + NOTE: This REST transport functionality is currently in a beta + state (preview). We welcome your feedback via a GitHub issue in + this library's repository. Thank you! + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + client_cert_source_for_mtls (Callable[[], Tuple[bytes, bytes]]): Client + certificate to configure mutual TLS HTTP channel. It is ignored + if ``channel`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you are developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + url_scheme: the protocol scheme for the API endpoint. Normally + "https", but for testing or local servers, + "http" can be specified. + """ + # Run the base constructor + # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. + # TODO: When custom host (api_endpoint) is set, `scopes` must *also* be set on the + # credentials object + maybe_url_match = re.match("^(?Phttp(?:s)?://)?(?P.*)$", host) + if maybe_url_match is None: + raise ValueError(f"Unexpected hostname structure: {host}") # pragma: NO COVER + + url_match_items = maybe_url_match.groupdict() + + host = f"{url_scheme}://{host}" if not url_match_items["scheme"] else host + + super().__init__( + host=host, + credentials=credentials, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience + ) + self._session = AuthorizedSession( + self._credentials, default_host=self.DEFAULT_HOST) + if client_cert_source_for_mtls: + self._session.configure_mtls_channel(client_cert_source_for_mtls) + self._interceptor = interceptor or LicensesRestInterceptor() + self._prep_wrapped_messages(client_info) + + class _Delete(LicensesRestStub): + def __hash__(self): + return hash("Delete") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + "license" : "", } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.DeleteLicenseRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.Operation: + r"""Call the delete method over HTTP. + + Args: + request (~.compute.DeleteLicenseRequest): + The request object. A request message for + Licenses.Delete. See the method + description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.Operation: + Represents an Operation resource. Google Compute Engine + has three Operation resources: \* + `Global `__ + \* + `Regional `__ + \* + `Zonal `__ + You can use an operation resource to manage asynchronous + API requests. For more information, read Handling API + responses. Operations can be global, regional or zonal. + - For global operations, use the ``globalOperations`` + resource. - For regional operations, use the + ``regionOperations`` resource. - For zonal operations, + use the ``zonalOperations`` resource. For more + information, read Global, Regional, and Zonal Resources. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'delete', + 'uri': '/compute/v1/projects/{project}/global/licenses/{license_}', + }, + ] + request, metadata = self._interceptor.pre_delete(request, metadata) + pb_request = compute.DeleteLicenseRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.Operation() + pb_resp = compute.Operation.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_delete(resp) + return resp + + class _Get(LicensesRestStub): + def __hash__(self): + return hash("Get") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + "license" : "", } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.GetLicenseRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.License: + r"""Call the get method over HTTP. + + Args: + request (~.compute.GetLicenseRequest): + The request object. A request message for Licenses.Get. + See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.License: + Represents a License resource. A License represents + billing and aggregate usage data for public and + marketplace images. *Caution* This resource is intended + for use only by third-party partners who are creating + Cloud Marketplace images. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/compute/v1/projects/{project}/global/licenses/{license_}', + }, + ] + request, metadata = self._interceptor.pre_get(request, metadata) + pb_request = compute.GetLicenseRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.License() + pb_resp = compute.License.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get(resp) + return resp + + class _GetIamPolicy(LicensesRestStub): + def __hash__(self): + return hash("GetIamPolicy") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.GetIamPolicyLicenseRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.Policy: + r"""Call the get iam policy method over HTTP. + + Args: + request (~.compute.GetIamPolicyLicenseRequest): + The request object. A request message for + Licenses.GetIamPolicy. See the method + description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.Policy: + An Identity and Access Management (IAM) policy, which + specifies access controls for Google Cloud resources. A + ``Policy`` is a collection of ``bindings``. A + ``binding`` binds one or more ``members``, or + principals, to a single ``role``. Principals can be user + accounts, service accounts, Google groups, and domains + (such as G Suite). A ``role`` is a named list of + permissions; each ``role`` can be an IAM predefined role + or a user-created custom role. For some types of Google + Cloud resources, a ``binding`` can also specify a + ``condition``, which is a logical expression that allows + access to a resource only if the expression evaluates to + ``true``. A condition can add constraints based on + attributes of the request, the resource, or both. To + learn which resources support conditions in their IAM + policies, see the `IAM + documentation `__. + **JSON example:** { "bindings": [ { "role": + "roles/resourcemanager.organizationAdmin", "members": [ + "user:mike@example.com", "group:admins@example.com", + "domain:google.com", + "serviceAccount:my-project-id@appspot.gserviceaccount.com" + ] }, { "role": + "roles/resourcemanager.organizationViewer", "members": [ + "user:eve@example.com" ], "condition": { "title": + "expirable access", "description": "Does not grant + access after Sep 2020", "expression": "request.time < + timestamp('2020-10-01T00:00:00.000Z')", } } ], "etag": + "BwWWja0YfJA=", "version": 3 } **YAML example:** + bindings: - members: - user:mike@example.com - + group:admins@example.com - domain:google.com - + serviceAccount:my-project-id@appspot.gserviceaccount.com + role: roles/resourcemanager.organizationAdmin - members: + - user:eve@example.com role: + roles/resourcemanager.organizationViewer condition: + title: expirable access description: Does not grant + access after Sep 2020 expression: request.time < + timestamp('2020-10-01T00:00:00.000Z') etag: BwWWja0YfJA= + version: 3 For a description of IAM and its features, + see the `IAM + documentation `__. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/compute/v1/projects/{project}/global/licenses/{resource}/getIamPolicy', + }, + ] + request, metadata = self._interceptor.pre_get_iam_policy(request, metadata) + pb_request = compute.GetIamPolicyLicenseRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.Policy() + pb_resp = compute.Policy.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get_iam_policy(resp) + return resp + + class _Insert(LicensesRestStub): + def __hash__(self): + return hash("Insert") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.InsertLicenseRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.Operation: + r"""Call the insert method over HTTP. + + Args: + request (~.compute.InsertLicenseRequest): + The request object. A request message for + Licenses.Insert. See the method + description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.Operation: + Represents an Operation resource. Google Compute Engine + has three Operation resources: \* + `Global `__ + \* + `Regional `__ + \* + `Zonal `__ + You can use an operation resource to manage asynchronous + API requests. For more information, read Handling API + responses. Operations can be global, regional or zonal. + - For global operations, use the ``globalOperations`` + resource. - For regional operations, use the + ``regionOperations`` resource. - For zonal operations, + use the ``zonalOperations`` resource. For more + information, read Global, Regional, and Zonal Resources. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/compute/v1/projects/{project}/global/licenses', + 'body': 'license_resource', + }, + ] + request, metadata = self._interceptor.pre_insert(request, metadata) + pb_request = compute.InsertLicenseRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + including_default_value_fields=False, + use_integers_for_enums=False + ) + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.Operation() + pb_resp = compute.Operation.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_insert(resp) + return resp + + class _List(LicensesRestStub): + def __hash__(self): + return hash("List") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.ListLicensesRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.LicensesListResponse: + r"""Call the list method over HTTP. + + Args: + request (~.compute.ListLicensesRequest): + The request object. A request message for Licenses.List. + See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.LicensesListResponse: + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/compute/v1/projects/{project}/global/licenses', + }, + ] + request, metadata = self._interceptor.pre_list(request, metadata) + pb_request = compute.ListLicensesRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.LicensesListResponse() + pb_resp = compute.LicensesListResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_list(resp) + return resp + + class _SetIamPolicy(LicensesRestStub): + def __hash__(self): + return hash("SetIamPolicy") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.SetIamPolicyLicenseRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.Policy: + r"""Call the set iam policy method over HTTP. + + Args: + request (~.compute.SetIamPolicyLicenseRequest): + The request object. A request message for + Licenses.SetIamPolicy. See the method + description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.Policy: + An Identity and Access Management (IAM) policy, which + specifies access controls for Google Cloud resources. A + ``Policy`` is a collection of ``bindings``. A + ``binding`` binds one or more ``members``, or + principals, to a single ``role``. Principals can be user + accounts, service accounts, Google groups, and domains + (such as G Suite). A ``role`` is a named list of + permissions; each ``role`` can be an IAM predefined role + or a user-created custom role. For some types of Google + Cloud resources, a ``binding`` can also specify a + ``condition``, which is a logical expression that allows + access to a resource only if the expression evaluates to + ``true``. A condition can add constraints based on + attributes of the request, the resource, or both. To + learn which resources support conditions in their IAM + policies, see the `IAM + documentation `__. + **JSON example:** { "bindings": [ { "role": + "roles/resourcemanager.organizationAdmin", "members": [ + "user:mike@example.com", "group:admins@example.com", + "domain:google.com", + "serviceAccount:my-project-id@appspot.gserviceaccount.com" + ] }, { "role": + "roles/resourcemanager.organizationViewer", "members": [ + "user:eve@example.com" ], "condition": { "title": + "expirable access", "description": "Does not grant + access after Sep 2020", "expression": "request.time < + timestamp('2020-10-01T00:00:00.000Z')", } } ], "etag": + "BwWWja0YfJA=", "version": 3 } **YAML example:** + bindings: - members: - user:mike@example.com - + group:admins@example.com - domain:google.com - + serviceAccount:my-project-id@appspot.gserviceaccount.com + role: roles/resourcemanager.organizationAdmin - members: + - user:eve@example.com role: + roles/resourcemanager.organizationViewer condition: + title: expirable access description: Does not grant + access after Sep 2020 expression: request.time < + timestamp('2020-10-01T00:00:00.000Z') etag: BwWWja0YfJA= + version: 3 For a description of IAM and its features, + see the `IAM + documentation `__. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/compute/v1/projects/{project}/global/licenses/{resource}/setIamPolicy', + 'body': 'global_set_policy_request_resource', + }, + ] + request, metadata = self._interceptor.pre_set_iam_policy(request, metadata) + pb_request = compute.SetIamPolicyLicenseRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + including_default_value_fields=False, + use_integers_for_enums=False + ) + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.Policy() + pb_resp = compute.Policy.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_set_iam_policy(resp) + return resp + + class _TestIamPermissions(LicensesRestStub): + def __hash__(self): + return hash("TestIamPermissions") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.TestIamPermissionsLicenseRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.TestPermissionsResponse: + r"""Call the test iam permissions method over HTTP. + + Args: + request (~.compute.TestIamPermissionsLicenseRequest): + The request object. A request message for + Licenses.TestIamPermissions. See the + method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.TestPermissionsResponse: + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/compute/v1/projects/{project}/global/licenses/{resource}/testIamPermissions', + 'body': 'test_permissions_request_resource', + }, + ] + request, metadata = self._interceptor.pre_test_iam_permissions(request, metadata) + pb_request = compute.TestIamPermissionsLicenseRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + including_default_value_fields=False, + use_integers_for_enums=False + ) + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.TestPermissionsResponse() + pb_resp = compute.TestPermissionsResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_test_iam_permissions(resp) + return resp + + @property + def delete(self) -> Callable[ + [compute.DeleteLicenseRequest], + compute.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._Delete(self._session, self._host, self._interceptor) # type: ignore + + @property + def get(self) -> Callable[ + [compute.GetLicenseRequest], + compute.License]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._Get(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_iam_policy(self) -> Callable[ + [compute.GetIamPolicyLicenseRequest], + compute.Policy]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetIamPolicy(self._session, self._host, self._interceptor) # type: ignore + + @property + def insert(self) -> Callable[ + [compute.InsertLicenseRequest], + compute.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._Insert(self._session, self._host, self._interceptor) # type: ignore + + @property + def list(self) -> Callable[ + [compute.ListLicensesRequest], + compute.LicensesListResponse]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._List(self._session, self._host, self._interceptor) # type: ignore + + @property + def set_iam_policy(self) -> Callable[ + [compute.SetIamPolicyLicenseRequest], + compute.Policy]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._SetIamPolicy(self._session, self._host, self._interceptor) # type: ignore + + @property + def test_iam_permissions(self) -> Callable[ + [compute.TestIamPermissionsLicenseRequest], + compute.TestPermissionsResponse]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._TestIamPermissions(self._session, self._host, self._interceptor) # type: ignore + + @property + def kind(self) -> str: + return "rest" + + def close(self): + self._session.close() + + +__all__=( + 'LicensesRestTransport', +) diff --git a/owl-bot-staging/v1/google/cloud/compute_v1/services/machine_images/__init__.py b/owl-bot-staging/v1/google/cloud/compute_v1/services/machine_images/__init__.py new file mode 100644 index 000000000..32bc13a27 --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/compute_v1/services/machine_images/__init__.py @@ -0,0 +1,20 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from .client import MachineImagesClient + +__all__ = ( + 'MachineImagesClient', +) diff --git a/owl-bot-staging/v1/google/cloud/compute_v1/services/machine_images/client.py b/owl-bot-staging/v1/google/cloud/compute_v1/services/machine_images/client.py new file mode 100644 index 000000000..591c5f60c --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/compute_v1/services/machine_images/client.py @@ -0,0 +1,1552 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import functools +import os +import re +from typing import Dict, Mapping, MutableMapping, MutableSequence, Optional, Sequence, Tuple, Type, Union, cast + +from google.cloud.compute_v1 import gapic_version as package_version + +from google.api_core import client_options as client_options_lib +from google.api_core import exceptions as core_exceptions +from google.api_core import extended_operation +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport import mtls # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth.exceptions import MutualTLSChannelError # type: ignore +from google.oauth2 import service_account # type: ignore + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + +from google.api_core import extended_operation # type: ignore +from google.cloud.compute_v1.services.machine_images import pagers +from google.cloud.compute_v1.types import compute +from .transports.base import MachineImagesTransport, DEFAULT_CLIENT_INFO +from .transports.rest import MachineImagesRestTransport + + +class MachineImagesClientMeta(type): + """Metaclass for the MachineImages client. + + This provides class-level methods for building and retrieving + support objects (e.g. transport) without polluting the client instance + objects. + """ + _transport_registry = OrderedDict() # type: Dict[str, Type[MachineImagesTransport]] + _transport_registry["rest"] = MachineImagesRestTransport + + def get_transport_class(cls, + label: Optional[str] = None, + ) -> Type[MachineImagesTransport]: + """Returns an appropriate transport class. + + Args: + label: The name of the desired transport. If none is + provided, then the first transport in the registry is used. + + Returns: + The transport class to use. + """ + # If a specific transport is requested, return that one. + if label: + return cls._transport_registry[label] + + # No transport is requested; return the default (that is, the first one + # in the dictionary). + return next(iter(cls._transport_registry.values())) + + +class MachineImagesClient(metaclass=MachineImagesClientMeta): + """The MachineImages API.""" + + @staticmethod + def _get_default_mtls_endpoint(api_endpoint): + """Converts api endpoint to mTLS endpoint. + + Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to + "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. + Args: + api_endpoint (Optional[str]): the api endpoint to convert. + Returns: + str: converted mTLS api endpoint. + """ + if not api_endpoint: + return api_endpoint + + mtls_endpoint_re = re.compile( + r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?" + ) + + m = mtls_endpoint_re.match(api_endpoint) + name, mtls, sandbox, googledomain = m.groups() + if mtls or not googledomain: + return api_endpoint + + if sandbox: + return api_endpoint.replace( + "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" + ) + + return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") + + DEFAULT_ENDPOINT = "compute.googleapis.com" + DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore + DEFAULT_ENDPOINT + ) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + MachineImagesClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_info(info) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + MachineImagesClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_file( + filename) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + from_service_account_json = from_service_account_file + + @property + def transport(self) -> MachineImagesTransport: + """Returns the transport used by the client instance. + + Returns: + MachineImagesTransport: The transport used by the client + instance. + """ + return self._transport + + @staticmethod + def common_billing_account_path(billing_account: str, ) -> str: + """Returns a fully-qualified billing_account string.""" + return "billingAccounts/{billing_account}".format(billing_account=billing_account, ) + + @staticmethod + def parse_common_billing_account_path(path: str) -> Dict[str,str]: + """Parse a billing_account path into its component segments.""" + m = re.match(r"^billingAccounts/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_folder_path(folder: str, ) -> str: + """Returns a fully-qualified folder string.""" + return "folders/{folder}".format(folder=folder, ) + + @staticmethod + def parse_common_folder_path(path: str) -> Dict[str,str]: + """Parse a folder path into its component segments.""" + m = re.match(r"^folders/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_organization_path(organization: str, ) -> str: + """Returns a fully-qualified organization string.""" + return "organizations/{organization}".format(organization=organization, ) + + @staticmethod + def parse_common_organization_path(path: str) -> Dict[str,str]: + """Parse a organization path into its component segments.""" + m = re.match(r"^organizations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_project_path(project: str, ) -> str: + """Returns a fully-qualified project string.""" + return "projects/{project}".format(project=project, ) + + @staticmethod + def parse_common_project_path(path: str) -> Dict[str,str]: + """Parse a project path into its component segments.""" + m = re.match(r"^projects/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_location_path(project: str, location: str, ) -> str: + """Returns a fully-qualified location string.""" + return "projects/{project}/locations/{location}".format(project=project, location=location, ) + + @staticmethod + def parse_common_location_path(path: str) -> Dict[str,str]: + """Parse a location path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @classmethod + def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[client_options_lib.ClientOptions] = None): + """Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + if client_options is None: + client_options = client_options_lib.ClientOptions() + use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") + use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") + if use_client_cert not in ("true", "false"): + raise ValueError("Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`") + if use_mtls_endpoint not in ("auto", "never", "always"): + raise MutualTLSChannelError("Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`") + + # Figure out the client cert source to use. + client_cert_source = None + if use_client_cert == "true": + if client_options.client_cert_source: + client_cert_source = client_options.client_cert_source + elif mtls.has_default_client_cert_source(): + client_cert_source = mtls.default_client_cert_source() + + # Figure out which api endpoint to use. + if client_options.api_endpoint is not None: + api_endpoint = client_options.api_endpoint + elif use_mtls_endpoint == "always" or (use_mtls_endpoint == "auto" and client_cert_source): + api_endpoint = cls.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = cls.DEFAULT_ENDPOINT + + return api_endpoint, client_cert_source + + def __init__(self, *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Optional[Union[str, MachineImagesTransport]] = None, + client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the machine images client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Union[str, MachineImagesTransport]): The + transport to use. If set to None, a transport is chosen + automatically. + NOTE: "rest" transport functionality is currently in a + beta state (preview). We welcome your feedback via an + issue in this library's source repository. + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): Custom options for the + client. It won't take effect if a ``transport`` instance is provided. + (1) The ``api_endpoint`` property can be used to override the + default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT + environment variable can also be used to override the endpoint: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto switch to the + default mTLS endpoint if client certificate is present, this is + the default value). However, the ``api_endpoint`` property takes + precedence if provided. + (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide client certificate for mutual TLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + """ + if isinstance(client_options, dict): + client_options = client_options_lib.from_dict(client_options) + if client_options is None: + client_options = client_options_lib.ClientOptions() + client_options = cast(client_options_lib.ClientOptions, client_options) + + api_endpoint, client_cert_source_func = self.get_mtls_endpoint_and_cert_source(client_options) + + api_key_value = getattr(client_options, "api_key", None) + if api_key_value and credentials: + raise ValueError("client_options.api_key and credentials are mutually exclusive") + + # Save or instantiate the transport. + # Ordinarily, we provide the transport, but allowing a custom transport + # instance provides an extensibility point for unusual situations. + if isinstance(transport, MachineImagesTransport): + # transport is a MachineImagesTransport instance. + if credentials or client_options.credentials_file or api_key_value: + raise ValueError("When providing a transport instance, " + "provide its credentials directly.") + if client_options.scopes: + raise ValueError( + "When providing a transport instance, provide its scopes " + "directly." + ) + self._transport = transport + else: + import google.auth._default # type: ignore + + if api_key_value and hasattr(google.auth._default, "get_api_key_credentials"): + credentials = google.auth._default.get_api_key_credentials(api_key_value) + + Transport = type(self).get_transport_class(transport) + self._transport = Transport( + credentials=credentials, + credentials_file=client_options.credentials_file, + host=api_endpoint, + scopes=client_options.scopes, + client_cert_source_for_mtls=client_cert_source_func, + quota_project_id=client_options.quota_project_id, + client_info=client_info, + always_use_jwt_access=True, + api_audience=client_options.api_audience, + ) + + def delete_unary(self, + request: Optional[Union[compute.DeleteMachineImageRequest, dict]] = None, + *, + project: Optional[str] = None, + machine_image: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.Operation: + r"""Deletes the specified machine image. Deleting a + machine image is permanent and cannot be undone. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_delete(): + # Create a client + client = compute_v1.MachineImagesClient() + + # Initialize request argument(s) + request = compute_v1.DeleteMachineImageRequest( + machine_image="machine_image_value", + project="project_value", + ) + + # Make the request + response = client.delete(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.DeleteMachineImageRequest, dict]): + The request object. A request message for + MachineImages.Delete. See the method + description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + machine_image (str): + The name of the machine image to + delete. + + This corresponds to the ``machine_image`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, machine_image]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.DeleteMachineImageRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.DeleteMachineImageRequest): + request = compute.DeleteMachineImageRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if machine_image is not None: + request.machine_image = machine_image + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("machine_image", request.machine_image), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def delete(self, + request: Optional[Union[compute.DeleteMachineImageRequest, dict]] = None, + *, + project: Optional[str] = None, + machine_image: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> extended_operation.ExtendedOperation: + r"""Deletes the specified machine image. Deleting a + machine image is permanent and cannot be undone. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_delete(): + # Create a client + client = compute_v1.MachineImagesClient() + + # Initialize request argument(s) + request = compute_v1.DeleteMachineImageRequest( + machine_image="machine_image_value", + project="project_value", + ) + + # Make the request + response = client.delete(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.DeleteMachineImageRequest, dict]): + The request object. A request message for + MachineImages.Delete. See the method + description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + machine_image (str): + The name of the machine image to + delete. + + This corresponds to the ``machine_image`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, machine_image]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.DeleteMachineImageRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.DeleteMachineImageRequest): + request = compute.DeleteMachineImageRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if machine_image is not None: + request.machine_image = machine_image + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("machine_image", request.machine_image), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + operation_service = self._transport._global_operations_client + operation_request = compute.GetGlobalOperationRequest() + operation_request.project = request.project + operation_request.operation = response.name + + get_operation = functools.partial(operation_service.get, operation_request) + # Cancel is not part of extended operations yet. + cancel_operation = lambda: None + + # Note: this class is an implementation detail to provide a uniform + # set of names for certain fields in the extended operation proto message. + # See google.api_core.extended_operation.ExtendedOperation for details + # on these properties and the expected interface. + class _CustomOperation(extended_operation.ExtendedOperation): + @property + def error_message(self): + return self._extended_operation.http_error_message + + @property + def error_code(self): + return self._extended_operation.http_error_status_code + + response = _CustomOperation.make(get_operation, cancel_operation, response) + + # Done; return the response. + return response + + def get(self, + request: Optional[Union[compute.GetMachineImageRequest, dict]] = None, + *, + project: Optional[str] = None, + machine_image: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.MachineImage: + r"""Returns the specified machine image. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_get(): + # Create a client + client = compute_v1.MachineImagesClient() + + # Initialize request argument(s) + request = compute_v1.GetMachineImageRequest( + machine_image="machine_image_value", + project="project_value", + ) + + # Make the request + response = client.get(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.GetMachineImageRequest, dict]): + The request object. A request message for + MachineImages.Get. See the method + description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + machine_image (str): + The name of the machine image. + This corresponds to the ``machine_image`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.compute_v1.types.MachineImage: + Represents a machine image resource. + A machine image is a Compute Engine + resource that stores all the + configuration, metadata, permissions, + and data from one or more disks required + to create a Virtual machine (VM) + instance. For more information, see + Machine images. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, machine_image]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.GetMachineImageRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.GetMachineImageRequest): + request = compute.GetMachineImageRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if machine_image is not None: + request.machine_image = machine_image + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("machine_image", request.machine_image), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get_iam_policy(self, + request: Optional[Union[compute.GetIamPolicyMachineImageRequest, dict]] = None, + *, + project: Optional[str] = None, + resource: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.Policy: + r"""Gets the access control policy for a resource. May be + empty if no such policy or resource exists. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_get_iam_policy(): + # Create a client + client = compute_v1.MachineImagesClient() + + # Initialize request argument(s) + request = compute_v1.GetIamPolicyMachineImageRequest( + project="project_value", + resource="resource_value", + ) + + # Make the request + response = client.get_iam_policy(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.GetIamPolicyMachineImageRequest, dict]): + The request object. A request message for + MachineImages.GetIamPolicy. See the + method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + resource (str): + Name or id of the resource for this + request. + + This corresponds to the ``resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.compute_v1.types.Policy: + An Identity and Access Management (IAM) policy, which + specifies access controls for Google Cloud resources. A + Policy is a collection of bindings. A binding binds one + or more members, or principals, to a single role. + Principals can be user accounts, service accounts, + Google groups, and domains (such as G Suite). A role is + a named list of permissions; each role can be an IAM + predefined role or a user-created custom role. For some + types of Google Cloud resources, a binding can also + specify a condition, which is a logical expression that + allows access to a resource only if the expression + evaluates to true. A condition can add constraints based + on attributes of the request, the resource, or both. To + learn which resources support conditions in their IAM + policies, see the [IAM + documentation](\ https://cloud.google.com/iam/help/conditions/resource-policies). + **JSON example:** { "bindings": [ { "role": + "roles/resourcemanager.organizationAdmin", "members": [ + "user:mike@example.com", "group:admins@example.com", + "domain:google.com", + "serviceAccount:my-project-id@appspot.gserviceaccount.com" + ] }, { "role": + "roles/resourcemanager.organizationViewer", "members": [ + "user:eve@example.com" ], "condition": { "title": + "expirable access", "description": "Does not grant + access after Sep 2020", "expression": "request.time < + timestamp('2020-10-01T00:00:00.000Z')", } } ], "etag": + "BwWWja0YfJA=", "version": 3 } **YAML example:** + bindings: - members: - user:\ mike@example.com - + group:\ admins@example.com - domain:google.com - + serviceAccount:\ my-project-id@appspot.gserviceaccount.com + role: roles/resourcemanager.organizationAdmin - members: + - user:\ eve@example.com role: + roles/resourcemanager.organizationViewer condition: + title: expirable access description: Does not grant + access after Sep 2020 expression: request.time < + timestamp('2020-10-01T00:00:00.000Z') etag: BwWWja0YfJA= + version: 3 For a description of IAM and its features, + see the [IAM + documentation](\ https://cloud.google.com/iam/docs/). + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.GetIamPolicyMachineImageRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.GetIamPolicyMachineImageRequest): + request = compute.GetIamPolicyMachineImageRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if resource is not None: + request.resource = resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_iam_policy] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("resource", request.resource), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def insert_unary(self, + request: Optional[Union[compute.InsertMachineImageRequest, dict]] = None, + *, + project: Optional[str] = None, + machine_image_resource: Optional[compute.MachineImage] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.Operation: + r"""Creates a machine image in the specified project + using the data that is included in the request. If you + are creating a new machine image to update an existing + instance, your new machine image should use the same + network or, if applicable, the same subnetwork as the + original instance. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_insert(): + # Create a client + client = compute_v1.MachineImagesClient() + + # Initialize request argument(s) + request = compute_v1.InsertMachineImageRequest( + project="project_value", + ) + + # Make the request + response = client.insert(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.InsertMachineImageRequest, dict]): + The request object. A request message for + MachineImages.Insert. See the method + description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + machine_image_resource (google.cloud.compute_v1.types.MachineImage): + The body resource for this request + This corresponds to the ``machine_image_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, machine_image_resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.InsertMachineImageRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.InsertMachineImageRequest): + request = compute.InsertMachineImageRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if machine_image_resource is not None: + request.machine_image_resource = machine_image_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.insert] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def insert(self, + request: Optional[Union[compute.InsertMachineImageRequest, dict]] = None, + *, + project: Optional[str] = None, + machine_image_resource: Optional[compute.MachineImage] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> extended_operation.ExtendedOperation: + r"""Creates a machine image in the specified project + using the data that is included in the request. If you + are creating a new machine image to update an existing + instance, your new machine image should use the same + network or, if applicable, the same subnetwork as the + original instance. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_insert(): + # Create a client + client = compute_v1.MachineImagesClient() + + # Initialize request argument(s) + request = compute_v1.InsertMachineImageRequest( + project="project_value", + ) + + # Make the request + response = client.insert(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.InsertMachineImageRequest, dict]): + The request object. A request message for + MachineImages.Insert. See the method + description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + machine_image_resource (google.cloud.compute_v1.types.MachineImage): + The body resource for this request + This corresponds to the ``machine_image_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, machine_image_resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.InsertMachineImageRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.InsertMachineImageRequest): + request = compute.InsertMachineImageRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if machine_image_resource is not None: + request.machine_image_resource = machine_image_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.insert] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + operation_service = self._transport._global_operations_client + operation_request = compute.GetGlobalOperationRequest() + operation_request.project = request.project + operation_request.operation = response.name + + get_operation = functools.partial(operation_service.get, operation_request) + # Cancel is not part of extended operations yet. + cancel_operation = lambda: None + + # Note: this class is an implementation detail to provide a uniform + # set of names for certain fields in the extended operation proto message. + # See google.api_core.extended_operation.ExtendedOperation for details + # on these properties and the expected interface. + class _CustomOperation(extended_operation.ExtendedOperation): + @property + def error_message(self): + return self._extended_operation.http_error_message + + @property + def error_code(self): + return self._extended_operation.http_error_status_code + + response = _CustomOperation.make(get_operation, cancel_operation, response) + + # Done; return the response. + return response + + def list(self, + request: Optional[Union[compute.ListMachineImagesRequest, dict]] = None, + *, + project: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListPager: + r"""Retrieves a list of machine images that are contained + within the specified project. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_list(): + # Create a client + client = compute_v1.MachineImagesClient() + + # Initialize request argument(s) + request = compute_v1.ListMachineImagesRequest( + project="project_value", + ) + + # Make the request + page_result = client.list(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.ListMachineImagesRequest, dict]): + The request object. A request message for + MachineImages.List. See the method + description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.compute_v1.services.machine_images.pagers.ListPager: + A list of machine images. + + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.ListMachineImagesRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.ListMachineImagesRequest): + request = compute.ListMachineImagesRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + def set_iam_policy(self, + request: Optional[Union[compute.SetIamPolicyMachineImageRequest, dict]] = None, + *, + project: Optional[str] = None, + resource: Optional[str] = None, + global_set_policy_request_resource: Optional[compute.GlobalSetPolicyRequest] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.Policy: + r"""Sets the access control policy on the specified + resource. Replaces any existing policy. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_set_iam_policy(): + # Create a client + client = compute_v1.MachineImagesClient() + + # Initialize request argument(s) + request = compute_v1.SetIamPolicyMachineImageRequest( + project="project_value", + resource="resource_value", + ) + + # Make the request + response = client.set_iam_policy(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.SetIamPolicyMachineImageRequest, dict]): + The request object. A request message for + MachineImages.SetIamPolicy. See the + method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + resource (str): + Name or id of the resource for this + request. + + This corresponds to the ``resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + global_set_policy_request_resource (google.cloud.compute_v1.types.GlobalSetPolicyRequest): + The body resource for this request + This corresponds to the ``global_set_policy_request_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.compute_v1.types.Policy: + An Identity and Access Management (IAM) policy, which + specifies access controls for Google Cloud resources. A + Policy is a collection of bindings. A binding binds one + or more members, or principals, to a single role. + Principals can be user accounts, service accounts, + Google groups, and domains (such as G Suite). A role is + a named list of permissions; each role can be an IAM + predefined role or a user-created custom role. For some + types of Google Cloud resources, a binding can also + specify a condition, which is a logical expression that + allows access to a resource only if the expression + evaluates to true. A condition can add constraints based + on attributes of the request, the resource, or both. To + learn which resources support conditions in their IAM + policies, see the [IAM + documentation](\ https://cloud.google.com/iam/help/conditions/resource-policies). + **JSON example:** { "bindings": [ { "role": + "roles/resourcemanager.organizationAdmin", "members": [ + "user:mike@example.com", "group:admins@example.com", + "domain:google.com", + "serviceAccount:my-project-id@appspot.gserviceaccount.com" + ] }, { "role": + "roles/resourcemanager.organizationViewer", "members": [ + "user:eve@example.com" ], "condition": { "title": + "expirable access", "description": "Does not grant + access after Sep 2020", "expression": "request.time < + timestamp('2020-10-01T00:00:00.000Z')", } } ], "etag": + "BwWWja0YfJA=", "version": 3 } **YAML example:** + bindings: - members: - user:\ mike@example.com - + group:\ admins@example.com - domain:google.com - + serviceAccount:\ my-project-id@appspot.gserviceaccount.com + role: roles/resourcemanager.organizationAdmin - members: + - user:\ eve@example.com role: + roles/resourcemanager.organizationViewer condition: + title: expirable access description: Does not grant + access after Sep 2020 expression: request.time < + timestamp('2020-10-01T00:00:00.000Z') etag: BwWWja0YfJA= + version: 3 For a description of IAM and its features, + see the [IAM + documentation](\ https://cloud.google.com/iam/docs/). + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, resource, global_set_policy_request_resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.SetIamPolicyMachineImageRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.SetIamPolicyMachineImageRequest): + request = compute.SetIamPolicyMachineImageRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if resource is not None: + request.resource = resource + if global_set_policy_request_resource is not None: + request.global_set_policy_request_resource = global_set_policy_request_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.set_iam_policy] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("resource", request.resource), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def test_iam_permissions(self, + request: Optional[Union[compute.TestIamPermissionsMachineImageRequest, dict]] = None, + *, + project: Optional[str] = None, + resource: Optional[str] = None, + test_permissions_request_resource: Optional[compute.TestPermissionsRequest] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.TestPermissionsResponse: + r"""Returns permissions that a caller has on the + specified resource. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_test_iam_permissions(): + # Create a client + client = compute_v1.MachineImagesClient() + + # Initialize request argument(s) + request = compute_v1.TestIamPermissionsMachineImageRequest( + project="project_value", + resource="resource_value", + ) + + # Make the request + response = client.test_iam_permissions(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.TestIamPermissionsMachineImageRequest, dict]): + The request object. A request message for + MachineImages.TestIamPermissions. See + the method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + resource (str): + Name or id of the resource for this + request. + + This corresponds to the ``resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + test_permissions_request_resource (google.cloud.compute_v1.types.TestPermissionsRequest): + The body resource for this request + This corresponds to the ``test_permissions_request_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.compute_v1.types.TestPermissionsResponse: + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, resource, test_permissions_request_resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.TestIamPermissionsMachineImageRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.TestIamPermissionsMachineImageRequest): + request = compute.TestIamPermissionsMachineImageRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if resource is not None: + request.resource = resource + if test_permissions_request_resource is not None: + request.test_permissions_request_resource = test_permissions_request_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.test_iam_permissions] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("resource", request.resource), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def __enter__(self) -> "MachineImagesClient": + return self + + def __exit__(self, type, value, traceback): + """Releases underlying transport's resources. + + .. warning:: + ONLY use as a context manager if the transport is NOT shared + with other clients! Exiting the with block will CLOSE the transport + and may cause errors in other clients! + """ + self.transport.close() + + + + + + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) + + +__all__ = ( + "MachineImagesClient", +) diff --git a/owl-bot-staging/v1/google/cloud/compute_v1/services/machine_images/pagers.py b/owl-bot-staging/v1/google/cloud/compute_v1/services/machine_images/pagers.py new file mode 100644 index 000000000..675020685 --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/compute_v1/services/machine_images/pagers.py @@ -0,0 +1,77 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import Any, AsyncIterator, Awaitable, Callable, Sequence, Tuple, Optional, Iterator + +from google.cloud.compute_v1.types import compute + + +class ListPager: + """A pager for iterating through ``list`` requests. + + This class thinly wraps an initial + :class:`google.cloud.compute_v1.types.MachineImageList` object, and + provides an ``__iter__`` method to iterate through its + ``items`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``List`` requests and continue to iterate + through the ``items`` field on the + corresponding responses. + + All the usual :class:`google.cloud.compute_v1.types.MachineImageList` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., compute.MachineImageList], + request: compute.ListMachineImagesRequest, + response: compute.MachineImageList, + *, + metadata: Sequence[Tuple[str, str]] = ()): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.compute_v1.types.ListMachineImagesRequest): + The initial request object. + response (google.cloud.compute_v1.types.MachineImageList): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = compute.ListMachineImagesRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[compute.MachineImageList]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[compute.MachineImage]: + for page in self.pages: + yield from page.items + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) diff --git a/owl-bot-staging/v1/google/cloud/compute_v1/services/machine_images/transports/__init__.py b/owl-bot-staging/v1/google/cloud/compute_v1/services/machine_images/transports/__init__.py new file mode 100644 index 000000000..f66a316e0 --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/compute_v1/services/machine_images/transports/__init__.py @@ -0,0 +1,32 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +from typing import Dict, Type + +from .base import MachineImagesTransport +from .rest import MachineImagesRestTransport +from .rest import MachineImagesRestInterceptor + + +# Compile a registry of transports. +_transport_registry = OrderedDict() # type: Dict[str, Type[MachineImagesTransport]] +_transport_registry['rest'] = MachineImagesRestTransport + +__all__ = ( + 'MachineImagesTransport', + 'MachineImagesRestTransport', + 'MachineImagesRestInterceptor', +) diff --git a/owl-bot-staging/v1/google/cloud/compute_v1/services/machine_images/transports/base.py b/owl-bot-staging/v1/google/cloud/compute_v1/services/machine_images/transports/base.py new file mode 100644 index 000000000..9b55d583d --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/compute_v1/services/machine_images/transports/base.py @@ -0,0 +1,247 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import abc +from typing import Awaitable, Callable, Dict, Optional, Sequence, Union + +from google.cloud.compute_v1 import gapic_version as package_version + +import google.auth # type: ignore +import google.api_core +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.compute_v1.types import compute +from google.cloud.compute_v1.services import global_operations + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) + + +class MachineImagesTransport(abc.ABC): + """Abstract transport class for MachineImages.""" + + AUTH_SCOPES = ( + 'https://www.googleapis.com/auth/compute', + 'https://www.googleapis.com/auth/cloud-platform', + ) + + DEFAULT_HOST: str = 'compute.googleapis.com' + def __init__( + self, *, + host: str = DEFAULT_HOST, + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + **kwargs, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A list of scopes. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + """ + self._extended_operations_services: Dict[str, Any] = {} + + scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} + + # Save the scopes. + self._scopes = scopes + + # If no credentials are provided, then determine the appropriate + # defaults. + if credentials and credentials_file: + raise core_exceptions.DuplicateCredentialArgs("'credentials_file' and 'credentials' are mutually exclusive") + + if credentials_file is not None: + credentials, _ = google.auth.load_credentials_from_file( + credentials_file, + **scopes_kwargs, + quota_project_id=quota_project_id + ) + elif credentials is None: + credentials, _ = google.auth.default(**scopes_kwargs, quota_project_id=quota_project_id) + # Don't apply audience if the credentials file passed from user. + if hasattr(credentials, "with_gdch_audience"): + credentials = credentials.with_gdch_audience(api_audience if api_audience else host) + + # If the credentials are service account credentials, then always try to use self signed JWT. + if always_use_jwt_access and isinstance(credentials, service_account.Credentials) and hasattr(service_account.Credentials, "with_always_use_jwt_access"): + credentials = credentials.with_always_use_jwt_access(True) + + # Save the credentials. + self._credentials = credentials + + # Save the hostname. Default to port 443 (HTTPS) if none is specified. + if ':' not in host: + host += ':443' + self._host = host + + def _prep_wrapped_messages(self, client_info): + # Precompute the wrapped methods. + self._wrapped_methods = { + self.delete: gapic_v1.method.wrap_method( + self.delete, + default_timeout=None, + client_info=client_info, + ), + self.get: gapic_v1.method.wrap_method( + self.get, + default_timeout=None, + client_info=client_info, + ), + self.get_iam_policy: gapic_v1.method.wrap_method( + self.get_iam_policy, + default_timeout=None, + client_info=client_info, + ), + self.insert: gapic_v1.method.wrap_method( + self.insert, + default_timeout=None, + client_info=client_info, + ), + self.list: gapic_v1.method.wrap_method( + self.list, + default_timeout=None, + client_info=client_info, + ), + self.set_iam_policy: gapic_v1.method.wrap_method( + self.set_iam_policy, + default_timeout=None, + client_info=client_info, + ), + self.test_iam_permissions: gapic_v1.method.wrap_method( + self.test_iam_permissions, + default_timeout=None, + client_info=client_info, + ), + } + + def close(self): + """Closes resources associated with the transport. + + .. warning:: + Only call this method if the transport is NOT shared + with other clients - this may cause errors in other clients! + """ + raise NotImplementedError() + + @property + def delete(self) -> Callable[ + [compute.DeleteMachineImageRequest], + Union[ + compute.Operation, + Awaitable[compute.Operation] + ]]: + raise NotImplementedError() + + @property + def get(self) -> Callable[ + [compute.GetMachineImageRequest], + Union[ + compute.MachineImage, + Awaitable[compute.MachineImage] + ]]: + raise NotImplementedError() + + @property + def get_iam_policy(self) -> Callable[ + [compute.GetIamPolicyMachineImageRequest], + Union[ + compute.Policy, + Awaitable[compute.Policy] + ]]: + raise NotImplementedError() + + @property + def insert(self) -> Callable[ + [compute.InsertMachineImageRequest], + Union[ + compute.Operation, + Awaitable[compute.Operation] + ]]: + raise NotImplementedError() + + @property + def list(self) -> Callable[ + [compute.ListMachineImagesRequest], + Union[ + compute.MachineImageList, + Awaitable[compute.MachineImageList] + ]]: + raise NotImplementedError() + + @property + def set_iam_policy(self) -> Callable[ + [compute.SetIamPolicyMachineImageRequest], + Union[ + compute.Policy, + Awaitable[compute.Policy] + ]]: + raise NotImplementedError() + + @property + def test_iam_permissions(self) -> Callable[ + [compute.TestIamPermissionsMachineImageRequest], + Union[ + compute.TestPermissionsResponse, + Awaitable[compute.TestPermissionsResponse] + ]]: + raise NotImplementedError() + + @property + def kind(self) -> str: + raise NotImplementedError() + + @property + def _global_operations_client(self) -> global_operations.GlobalOperationsClient: + ex_op_service = self._extended_operations_services.get("global_operations") + if not ex_op_service: + ex_op_service = global_operations.GlobalOperationsClient( + credentials=self._credentials, + transport=self.kind, + ) + self._extended_operations_services["global_operations"] = ex_op_service + + return ex_op_service + + +__all__ = ( + 'MachineImagesTransport', +) diff --git a/owl-bot-staging/v1/google/cloud/compute_v1/services/machine_images/transports/rest.py b/owl-bot-staging/v1/google/cloud/compute_v1/services/machine_images/transports/rest.py new file mode 100644 index 000000000..3774a858b --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/compute_v1/services/machine_images/transports/rest.py @@ -0,0 +1,1102 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +from google.auth.transport.requests import AuthorizedSession # type: ignore +import json # type: ignore +import grpc # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.api_core import exceptions as core_exceptions +from google.api_core import retry as retries +from google.api_core import rest_helpers +from google.api_core import rest_streaming +from google.api_core import path_template +from google.api_core import gapic_v1 + +from google.protobuf import json_format +from requests import __version__ as requests_version +import dataclasses +import re +from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union +import warnings + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + + +from google.cloud.compute_v1.types import compute + +from .base import MachineImagesTransport, DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, + grpc_version=None, + rest_version=requests_version, +) + + +class MachineImagesRestInterceptor: + """Interceptor for MachineImages. + + Interceptors are used to manipulate requests, request metadata, and responses + in arbitrary ways. + Example use cases include: + * Logging + * Verifying requests according to service or custom semantics + * Stripping extraneous information from responses + + These use cases and more can be enabled by injecting an + instance of a custom subclass when constructing the MachineImagesRestTransport. + + .. code-block:: python + class MyCustomMachineImagesInterceptor(MachineImagesRestInterceptor): + def pre_delete(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_delete(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_get(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_get_iam_policy(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_iam_policy(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_insert(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_insert(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_set_iam_policy(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_set_iam_policy(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_test_iam_permissions(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_test_iam_permissions(self, response): + logging.log(f"Received response: {response}") + return response + + transport = MachineImagesRestTransport(interceptor=MyCustomMachineImagesInterceptor()) + client = MachineImagesClient(transport=transport) + + + """ + def pre_delete(self, request: compute.DeleteMachineImageRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.DeleteMachineImageRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for delete + + Override in a subclass to manipulate the request or metadata + before they are sent to the MachineImages server. + """ + return request, metadata + + def post_delete(self, response: compute.Operation) -> compute.Operation: + """Post-rpc interceptor for delete + + Override in a subclass to manipulate the response + after it is returned by the MachineImages server but before + it is returned to user code. + """ + return response + def pre_get(self, request: compute.GetMachineImageRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.GetMachineImageRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get + + Override in a subclass to manipulate the request or metadata + before they are sent to the MachineImages server. + """ + return request, metadata + + def post_get(self, response: compute.MachineImage) -> compute.MachineImage: + """Post-rpc interceptor for get + + Override in a subclass to manipulate the response + after it is returned by the MachineImages server but before + it is returned to user code. + """ + return response + def pre_get_iam_policy(self, request: compute.GetIamPolicyMachineImageRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.GetIamPolicyMachineImageRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_iam_policy + + Override in a subclass to manipulate the request or metadata + before they are sent to the MachineImages server. + """ + return request, metadata + + def post_get_iam_policy(self, response: compute.Policy) -> compute.Policy: + """Post-rpc interceptor for get_iam_policy + + Override in a subclass to manipulate the response + after it is returned by the MachineImages server but before + it is returned to user code. + """ + return response + def pre_insert(self, request: compute.InsertMachineImageRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.InsertMachineImageRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for insert + + Override in a subclass to manipulate the request or metadata + before they are sent to the MachineImages server. + """ + return request, metadata + + def post_insert(self, response: compute.Operation) -> compute.Operation: + """Post-rpc interceptor for insert + + Override in a subclass to manipulate the response + after it is returned by the MachineImages server but before + it is returned to user code. + """ + return response + def pre_list(self, request: compute.ListMachineImagesRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.ListMachineImagesRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list + + Override in a subclass to manipulate the request or metadata + before they are sent to the MachineImages server. + """ + return request, metadata + + def post_list(self, response: compute.MachineImageList) -> compute.MachineImageList: + """Post-rpc interceptor for list + + Override in a subclass to manipulate the response + after it is returned by the MachineImages server but before + it is returned to user code. + """ + return response + def pre_set_iam_policy(self, request: compute.SetIamPolicyMachineImageRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.SetIamPolicyMachineImageRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for set_iam_policy + + Override in a subclass to manipulate the request or metadata + before they are sent to the MachineImages server. + """ + return request, metadata + + def post_set_iam_policy(self, response: compute.Policy) -> compute.Policy: + """Post-rpc interceptor for set_iam_policy + + Override in a subclass to manipulate the response + after it is returned by the MachineImages server but before + it is returned to user code. + """ + return response + def pre_test_iam_permissions(self, request: compute.TestIamPermissionsMachineImageRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.TestIamPermissionsMachineImageRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for test_iam_permissions + + Override in a subclass to manipulate the request or metadata + before they are sent to the MachineImages server. + """ + return request, metadata + + def post_test_iam_permissions(self, response: compute.TestPermissionsResponse) -> compute.TestPermissionsResponse: + """Post-rpc interceptor for test_iam_permissions + + Override in a subclass to manipulate the response + after it is returned by the MachineImages server but before + it is returned to user code. + """ + return response + + +@dataclasses.dataclass +class MachineImagesRestStub: + _session: AuthorizedSession + _host: str + _interceptor: MachineImagesRestInterceptor + + +class MachineImagesRestTransport(MachineImagesTransport): + """REST backend transport for MachineImages. + + The MachineImages API. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends JSON representations of protocol buffers over HTTP/1.1 + + NOTE: This REST transport functionality is currently in a beta + state (preview). We welcome your feedback via an issue in this + library's source repository. Thank you! + """ + + def __init__(self, *, + host: str = 'compute.googleapis.com', + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + client_cert_source_for_mtls: Optional[Callable[[ + ], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + url_scheme: str = 'https', + interceptor: Optional[MachineImagesRestInterceptor] = None, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + NOTE: This REST transport functionality is currently in a beta + state (preview). We welcome your feedback via a GitHub issue in + this library's repository. Thank you! + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + client_cert_source_for_mtls (Callable[[], Tuple[bytes, bytes]]): Client + certificate to configure mutual TLS HTTP channel. It is ignored + if ``channel`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you are developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + url_scheme: the protocol scheme for the API endpoint. Normally + "https", but for testing or local servers, + "http" can be specified. + """ + # Run the base constructor + # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. + # TODO: When custom host (api_endpoint) is set, `scopes` must *also* be set on the + # credentials object + maybe_url_match = re.match("^(?Phttp(?:s)?://)?(?P.*)$", host) + if maybe_url_match is None: + raise ValueError(f"Unexpected hostname structure: {host}") # pragma: NO COVER + + url_match_items = maybe_url_match.groupdict() + + host = f"{url_scheme}://{host}" if not url_match_items["scheme"] else host + + super().__init__( + host=host, + credentials=credentials, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience + ) + self._session = AuthorizedSession( + self._credentials, default_host=self.DEFAULT_HOST) + if client_cert_source_for_mtls: + self._session.configure_mtls_channel(client_cert_source_for_mtls) + self._interceptor = interceptor or MachineImagesRestInterceptor() + self._prep_wrapped_messages(client_info) + + class _Delete(MachineImagesRestStub): + def __hash__(self): + return hash("Delete") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.DeleteMachineImageRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.Operation: + r"""Call the delete method over HTTP. + + Args: + request (~.compute.DeleteMachineImageRequest): + The request object. A request message for + MachineImages.Delete. See the method + description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.Operation: + Represents an Operation resource. Google Compute Engine + has three Operation resources: \* + `Global `__ + \* + `Regional `__ + \* + `Zonal `__ + You can use an operation resource to manage asynchronous + API requests. For more information, read Handling API + responses. Operations can be global, regional or zonal. + - For global operations, use the ``globalOperations`` + resource. - For regional operations, use the + ``regionOperations`` resource. - For zonal operations, + use the ``zonalOperations`` resource. For more + information, read Global, Regional, and Zonal Resources. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'delete', + 'uri': '/compute/v1/projects/{project}/global/machineImages/{machine_image}', + }, + ] + request, metadata = self._interceptor.pre_delete(request, metadata) + pb_request = compute.DeleteMachineImageRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.Operation() + pb_resp = compute.Operation.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_delete(resp) + return resp + + class _Get(MachineImagesRestStub): + def __hash__(self): + return hash("Get") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.GetMachineImageRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.MachineImage: + r"""Call the get method over HTTP. + + Args: + request (~.compute.GetMachineImageRequest): + The request object. A request message for + MachineImages.Get. See the method + description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.MachineImage: + Represents a machine image resource. + A machine image is a Compute Engine + resource that stores all the + configuration, metadata, permissions, + and data from one or more disks required + to create a Virtual machine (VM) + instance. For more information, see + Machine images. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/compute/v1/projects/{project}/global/machineImages/{machine_image}', + }, + ] + request, metadata = self._interceptor.pre_get(request, metadata) + pb_request = compute.GetMachineImageRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.MachineImage() + pb_resp = compute.MachineImage.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get(resp) + return resp + + class _GetIamPolicy(MachineImagesRestStub): + def __hash__(self): + return hash("GetIamPolicy") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.GetIamPolicyMachineImageRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.Policy: + r"""Call the get iam policy method over HTTP. + + Args: + request (~.compute.GetIamPolicyMachineImageRequest): + The request object. A request message for + MachineImages.GetIamPolicy. See the + method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.Policy: + An Identity and Access Management (IAM) policy, which + specifies access controls for Google Cloud resources. A + ``Policy`` is a collection of ``bindings``. A + ``binding`` binds one or more ``members``, or + principals, to a single ``role``. Principals can be user + accounts, service accounts, Google groups, and domains + (such as G Suite). A ``role`` is a named list of + permissions; each ``role`` can be an IAM predefined role + or a user-created custom role. For some types of Google + Cloud resources, a ``binding`` can also specify a + ``condition``, which is a logical expression that allows + access to a resource only if the expression evaluates to + ``true``. A condition can add constraints based on + attributes of the request, the resource, or both. To + learn which resources support conditions in their IAM + policies, see the `IAM + documentation `__. + **JSON example:** { "bindings": [ { "role": + "roles/resourcemanager.organizationAdmin", "members": [ + "user:mike@example.com", "group:admins@example.com", + "domain:google.com", + "serviceAccount:my-project-id@appspot.gserviceaccount.com" + ] }, { "role": + "roles/resourcemanager.organizationViewer", "members": [ + "user:eve@example.com" ], "condition": { "title": + "expirable access", "description": "Does not grant + access after Sep 2020", "expression": "request.time < + timestamp('2020-10-01T00:00:00.000Z')", } } ], "etag": + "BwWWja0YfJA=", "version": 3 } **YAML example:** + bindings: - members: - user:mike@example.com - + group:admins@example.com - domain:google.com - + serviceAccount:my-project-id@appspot.gserviceaccount.com + role: roles/resourcemanager.organizationAdmin - members: + - user:eve@example.com role: + roles/resourcemanager.organizationViewer condition: + title: expirable access description: Does not grant + access after Sep 2020 expression: request.time < + timestamp('2020-10-01T00:00:00.000Z') etag: BwWWja0YfJA= + version: 3 For a description of IAM and its features, + see the `IAM + documentation `__. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/compute/v1/projects/{project}/global/machineImages/{resource}/getIamPolicy', + }, + ] + request, metadata = self._interceptor.pre_get_iam_policy(request, metadata) + pb_request = compute.GetIamPolicyMachineImageRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.Policy() + pb_resp = compute.Policy.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get_iam_policy(resp) + return resp + + class _Insert(MachineImagesRestStub): + def __hash__(self): + return hash("Insert") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.InsertMachineImageRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.Operation: + r"""Call the insert method over HTTP. + + Args: + request (~.compute.InsertMachineImageRequest): + The request object. A request message for + MachineImages.Insert. See the method + description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.Operation: + Represents an Operation resource. Google Compute Engine + has three Operation resources: \* + `Global `__ + \* + `Regional `__ + \* + `Zonal `__ + You can use an operation resource to manage asynchronous + API requests. For more information, read Handling API + responses. Operations can be global, regional or zonal. + - For global operations, use the ``globalOperations`` + resource. - For regional operations, use the + ``regionOperations`` resource. - For zonal operations, + use the ``zonalOperations`` resource. For more + information, read Global, Regional, and Zonal Resources. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/compute/v1/projects/{project}/global/machineImages', + 'body': 'machine_image_resource', + }, + ] + request, metadata = self._interceptor.pre_insert(request, metadata) + pb_request = compute.InsertMachineImageRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + including_default_value_fields=False, + use_integers_for_enums=False + ) + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.Operation() + pb_resp = compute.Operation.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_insert(resp) + return resp + + class _List(MachineImagesRestStub): + def __hash__(self): + return hash("List") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.ListMachineImagesRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.MachineImageList: + r"""Call the list method over HTTP. + + Args: + request (~.compute.ListMachineImagesRequest): + The request object. A request message for + MachineImages.List. See the method + description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.MachineImageList: + A list of machine images. + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/compute/v1/projects/{project}/global/machineImages', + }, + ] + request, metadata = self._interceptor.pre_list(request, metadata) + pb_request = compute.ListMachineImagesRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.MachineImageList() + pb_resp = compute.MachineImageList.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_list(resp) + return resp + + class _SetIamPolicy(MachineImagesRestStub): + def __hash__(self): + return hash("SetIamPolicy") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.SetIamPolicyMachineImageRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.Policy: + r"""Call the set iam policy method over HTTP. + + Args: + request (~.compute.SetIamPolicyMachineImageRequest): + The request object. A request message for + MachineImages.SetIamPolicy. See the + method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.Policy: + An Identity and Access Management (IAM) policy, which + specifies access controls for Google Cloud resources. A + ``Policy`` is a collection of ``bindings``. A + ``binding`` binds one or more ``members``, or + principals, to a single ``role``. Principals can be user + accounts, service accounts, Google groups, and domains + (such as G Suite). A ``role`` is a named list of + permissions; each ``role`` can be an IAM predefined role + or a user-created custom role. For some types of Google + Cloud resources, a ``binding`` can also specify a + ``condition``, which is a logical expression that allows + access to a resource only if the expression evaluates to + ``true``. A condition can add constraints based on + attributes of the request, the resource, or both. To + learn which resources support conditions in their IAM + policies, see the `IAM + documentation `__. + **JSON example:** { "bindings": [ { "role": + "roles/resourcemanager.organizationAdmin", "members": [ + "user:mike@example.com", "group:admins@example.com", + "domain:google.com", + "serviceAccount:my-project-id@appspot.gserviceaccount.com" + ] }, { "role": + "roles/resourcemanager.organizationViewer", "members": [ + "user:eve@example.com" ], "condition": { "title": + "expirable access", "description": "Does not grant + access after Sep 2020", "expression": "request.time < + timestamp('2020-10-01T00:00:00.000Z')", } } ], "etag": + "BwWWja0YfJA=", "version": 3 } **YAML example:** + bindings: - members: - user:mike@example.com - + group:admins@example.com - domain:google.com - + serviceAccount:my-project-id@appspot.gserviceaccount.com + role: roles/resourcemanager.organizationAdmin - members: + - user:eve@example.com role: + roles/resourcemanager.organizationViewer condition: + title: expirable access description: Does not grant + access after Sep 2020 expression: request.time < + timestamp('2020-10-01T00:00:00.000Z') etag: BwWWja0YfJA= + version: 3 For a description of IAM and its features, + see the `IAM + documentation `__. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/compute/v1/projects/{project}/global/machineImages/{resource}/setIamPolicy', + 'body': 'global_set_policy_request_resource', + }, + ] + request, metadata = self._interceptor.pre_set_iam_policy(request, metadata) + pb_request = compute.SetIamPolicyMachineImageRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + including_default_value_fields=False, + use_integers_for_enums=False + ) + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.Policy() + pb_resp = compute.Policy.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_set_iam_policy(resp) + return resp + + class _TestIamPermissions(MachineImagesRestStub): + def __hash__(self): + return hash("TestIamPermissions") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.TestIamPermissionsMachineImageRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.TestPermissionsResponse: + r"""Call the test iam permissions method over HTTP. + + Args: + request (~.compute.TestIamPermissionsMachineImageRequest): + The request object. A request message for + MachineImages.TestIamPermissions. See + the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.TestPermissionsResponse: + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/compute/v1/projects/{project}/global/machineImages/{resource}/testIamPermissions', + 'body': 'test_permissions_request_resource', + }, + ] + request, metadata = self._interceptor.pre_test_iam_permissions(request, metadata) + pb_request = compute.TestIamPermissionsMachineImageRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + including_default_value_fields=False, + use_integers_for_enums=False + ) + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.TestPermissionsResponse() + pb_resp = compute.TestPermissionsResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_test_iam_permissions(resp) + return resp + + @property + def delete(self) -> Callable[ + [compute.DeleteMachineImageRequest], + compute.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._Delete(self._session, self._host, self._interceptor) # type: ignore + + @property + def get(self) -> Callable[ + [compute.GetMachineImageRequest], + compute.MachineImage]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._Get(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_iam_policy(self) -> Callable[ + [compute.GetIamPolicyMachineImageRequest], + compute.Policy]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetIamPolicy(self._session, self._host, self._interceptor) # type: ignore + + @property + def insert(self) -> Callable[ + [compute.InsertMachineImageRequest], + compute.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._Insert(self._session, self._host, self._interceptor) # type: ignore + + @property + def list(self) -> Callable[ + [compute.ListMachineImagesRequest], + compute.MachineImageList]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._List(self._session, self._host, self._interceptor) # type: ignore + + @property + def set_iam_policy(self) -> Callable[ + [compute.SetIamPolicyMachineImageRequest], + compute.Policy]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._SetIamPolicy(self._session, self._host, self._interceptor) # type: ignore + + @property + def test_iam_permissions(self) -> Callable[ + [compute.TestIamPermissionsMachineImageRequest], + compute.TestPermissionsResponse]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._TestIamPermissions(self._session, self._host, self._interceptor) # type: ignore + + @property + def kind(self) -> str: + return "rest" + + def close(self): + self._session.close() + + +__all__=( + 'MachineImagesRestTransport', +) diff --git a/owl-bot-staging/v1/google/cloud/compute_v1/services/machine_types/__init__.py b/owl-bot-staging/v1/google/cloud/compute_v1/services/machine_types/__init__.py new file mode 100644 index 000000000..7b4b06e73 --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/compute_v1/services/machine_types/__init__.py @@ -0,0 +1,20 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from .client import MachineTypesClient + +__all__ = ( + 'MachineTypesClient', +) diff --git a/owl-bot-staging/v1/google/cloud/compute_v1/services/machine_types/client.py b/owl-bot-staging/v1/google/cloud/compute_v1/services/machine_types/client.py new file mode 100644 index 000000000..aec454d6e --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/compute_v1/services/machine_types/client.py @@ -0,0 +1,752 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import os +import re +from typing import Dict, Mapping, MutableMapping, MutableSequence, Optional, Sequence, Tuple, Type, Union, cast + +from google.cloud.compute_v1 import gapic_version as package_version + +from google.api_core import client_options as client_options_lib +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport import mtls # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth.exceptions import MutualTLSChannelError # type: ignore +from google.oauth2 import service_account # type: ignore + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + +from google.cloud.compute_v1.services.machine_types import pagers +from google.cloud.compute_v1.types import compute +from .transports.base import MachineTypesTransport, DEFAULT_CLIENT_INFO +from .transports.rest import MachineTypesRestTransport + + +class MachineTypesClientMeta(type): + """Metaclass for the MachineTypes client. + + This provides class-level methods for building and retrieving + support objects (e.g. transport) without polluting the client instance + objects. + """ + _transport_registry = OrderedDict() # type: Dict[str, Type[MachineTypesTransport]] + _transport_registry["rest"] = MachineTypesRestTransport + + def get_transport_class(cls, + label: Optional[str] = None, + ) -> Type[MachineTypesTransport]: + """Returns an appropriate transport class. + + Args: + label: The name of the desired transport. If none is + provided, then the first transport in the registry is used. + + Returns: + The transport class to use. + """ + # If a specific transport is requested, return that one. + if label: + return cls._transport_registry[label] + + # No transport is requested; return the default (that is, the first one + # in the dictionary). + return next(iter(cls._transport_registry.values())) + + +class MachineTypesClient(metaclass=MachineTypesClientMeta): + """The MachineTypes API.""" + + @staticmethod + def _get_default_mtls_endpoint(api_endpoint): + """Converts api endpoint to mTLS endpoint. + + Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to + "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. + Args: + api_endpoint (Optional[str]): the api endpoint to convert. + Returns: + str: converted mTLS api endpoint. + """ + if not api_endpoint: + return api_endpoint + + mtls_endpoint_re = re.compile( + r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?" + ) + + m = mtls_endpoint_re.match(api_endpoint) + name, mtls, sandbox, googledomain = m.groups() + if mtls or not googledomain: + return api_endpoint + + if sandbox: + return api_endpoint.replace( + "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" + ) + + return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") + + DEFAULT_ENDPOINT = "compute.googleapis.com" + DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore + DEFAULT_ENDPOINT + ) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + MachineTypesClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_info(info) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + MachineTypesClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_file( + filename) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + from_service_account_json = from_service_account_file + + @property + def transport(self) -> MachineTypesTransport: + """Returns the transport used by the client instance. + + Returns: + MachineTypesTransport: The transport used by the client + instance. + """ + return self._transport + + @staticmethod + def common_billing_account_path(billing_account: str, ) -> str: + """Returns a fully-qualified billing_account string.""" + return "billingAccounts/{billing_account}".format(billing_account=billing_account, ) + + @staticmethod + def parse_common_billing_account_path(path: str) -> Dict[str,str]: + """Parse a billing_account path into its component segments.""" + m = re.match(r"^billingAccounts/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_folder_path(folder: str, ) -> str: + """Returns a fully-qualified folder string.""" + return "folders/{folder}".format(folder=folder, ) + + @staticmethod + def parse_common_folder_path(path: str) -> Dict[str,str]: + """Parse a folder path into its component segments.""" + m = re.match(r"^folders/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_organization_path(organization: str, ) -> str: + """Returns a fully-qualified organization string.""" + return "organizations/{organization}".format(organization=organization, ) + + @staticmethod + def parse_common_organization_path(path: str) -> Dict[str,str]: + """Parse a organization path into its component segments.""" + m = re.match(r"^organizations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_project_path(project: str, ) -> str: + """Returns a fully-qualified project string.""" + return "projects/{project}".format(project=project, ) + + @staticmethod + def parse_common_project_path(path: str) -> Dict[str,str]: + """Parse a project path into its component segments.""" + m = re.match(r"^projects/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_location_path(project: str, location: str, ) -> str: + """Returns a fully-qualified location string.""" + return "projects/{project}/locations/{location}".format(project=project, location=location, ) + + @staticmethod + def parse_common_location_path(path: str) -> Dict[str,str]: + """Parse a location path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @classmethod + def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[client_options_lib.ClientOptions] = None): + """Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + if client_options is None: + client_options = client_options_lib.ClientOptions() + use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") + use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") + if use_client_cert not in ("true", "false"): + raise ValueError("Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`") + if use_mtls_endpoint not in ("auto", "never", "always"): + raise MutualTLSChannelError("Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`") + + # Figure out the client cert source to use. + client_cert_source = None + if use_client_cert == "true": + if client_options.client_cert_source: + client_cert_source = client_options.client_cert_source + elif mtls.has_default_client_cert_source(): + client_cert_source = mtls.default_client_cert_source() + + # Figure out which api endpoint to use. + if client_options.api_endpoint is not None: + api_endpoint = client_options.api_endpoint + elif use_mtls_endpoint == "always" or (use_mtls_endpoint == "auto" and client_cert_source): + api_endpoint = cls.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = cls.DEFAULT_ENDPOINT + + return api_endpoint, client_cert_source + + def __init__(self, *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Optional[Union[str, MachineTypesTransport]] = None, + client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the machine types client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Union[str, MachineTypesTransport]): The + transport to use. If set to None, a transport is chosen + automatically. + NOTE: "rest" transport functionality is currently in a + beta state (preview). We welcome your feedback via an + issue in this library's source repository. + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): Custom options for the + client. It won't take effect if a ``transport`` instance is provided. + (1) The ``api_endpoint`` property can be used to override the + default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT + environment variable can also be used to override the endpoint: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto switch to the + default mTLS endpoint if client certificate is present, this is + the default value). However, the ``api_endpoint`` property takes + precedence if provided. + (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide client certificate for mutual TLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + """ + if isinstance(client_options, dict): + client_options = client_options_lib.from_dict(client_options) + if client_options is None: + client_options = client_options_lib.ClientOptions() + client_options = cast(client_options_lib.ClientOptions, client_options) + + api_endpoint, client_cert_source_func = self.get_mtls_endpoint_and_cert_source(client_options) + + api_key_value = getattr(client_options, "api_key", None) + if api_key_value and credentials: + raise ValueError("client_options.api_key and credentials are mutually exclusive") + + # Save or instantiate the transport. + # Ordinarily, we provide the transport, but allowing a custom transport + # instance provides an extensibility point for unusual situations. + if isinstance(transport, MachineTypesTransport): + # transport is a MachineTypesTransport instance. + if credentials or client_options.credentials_file or api_key_value: + raise ValueError("When providing a transport instance, " + "provide its credentials directly.") + if client_options.scopes: + raise ValueError( + "When providing a transport instance, provide its scopes " + "directly." + ) + self._transport = transport + else: + import google.auth._default # type: ignore + + if api_key_value and hasattr(google.auth._default, "get_api_key_credentials"): + credentials = google.auth._default.get_api_key_credentials(api_key_value) + + Transport = type(self).get_transport_class(transport) + self._transport = Transport( + credentials=credentials, + credentials_file=client_options.credentials_file, + host=api_endpoint, + scopes=client_options.scopes, + client_cert_source_for_mtls=client_cert_source_func, + quota_project_id=client_options.quota_project_id, + client_info=client_info, + always_use_jwt_access=True, + api_audience=client_options.api_audience, + ) + + def aggregated_list(self, + request: Optional[Union[compute.AggregatedListMachineTypesRequest, dict]] = None, + *, + project: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.AggregatedListPager: + r"""Retrieves an aggregated list of machine types. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_aggregated_list(): + # Create a client + client = compute_v1.MachineTypesClient() + + # Initialize request argument(s) + request = compute_v1.AggregatedListMachineTypesRequest( + project="project_value", + ) + + # Make the request + page_result = client.aggregated_list(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.AggregatedListMachineTypesRequest, dict]): + The request object. A request message for + MachineTypes.AggregatedList. See the + method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.compute_v1.services.machine_types.pagers.AggregatedListPager: + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.AggregatedListMachineTypesRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.AggregatedListMachineTypesRequest): + request = compute.AggregatedListMachineTypesRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.aggregated_list] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.AggregatedListPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get(self, + request: Optional[Union[compute.GetMachineTypeRequest, dict]] = None, + *, + project: Optional[str] = None, + zone: Optional[str] = None, + machine_type: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.MachineType: + r"""Returns the specified machine type. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_get(): + # Create a client + client = compute_v1.MachineTypesClient() + + # Initialize request argument(s) + request = compute_v1.GetMachineTypeRequest( + machine_type="machine_type_value", + project="project_value", + zone="zone_value", + ) + + # Make the request + response = client.get(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.GetMachineTypeRequest, dict]): + The request object. A request message for + MachineTypes.Get. See the method + description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + zone (str): + The name of the zone for this + request. + + This corresponds to the ``zone`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + machine_type (str): + Name of the machine type to return. + This corresponds to the ``machine_type`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.compute_v1.types.MachineType: + Represents a Machine Type resource. + You can use specific machine types for + your VM instances based on performance + and pricing requirements. For more + information, read Machine Types. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, zone, machine_type]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.GetMachineTypeRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.GetMachineTypeRequest): + request = compute.GetMachineTypeRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if zone is not None: + request.zone = zone + if machine_type is not None: + request.machine_type = machine_type + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("zone", request.zone), + ("machine_type", request.machine_type), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def list(self, + request: Optional[Union[compute.ListMachineTypesRequest, dict]] = None, + *, + project: Optional[str] = None, + zone: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListPager: + r"""Retrieves a list of machine types available to the + specified project. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_list(): + # Create a client + client = compute_v1.MachineTypesClient() + + # Initialize request argument(s) + request = compute_v1.ListMachineTypesRequest( + project="project_value", + zone="zone_value", + ) + + # Make the request + page_result = client.list(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.ListMachineTypesRequest, dict]): + The request object. A request message for + MachineTypes.List. See the method + description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + zone (str): + The name of the zone for this + request. + + This corresponds to the ``zone`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.compute_v1.services.machine_types.pagers.ListPager: + Contains a list of machine types. + + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, zone]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.ListMachineTypesRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.ListMachineTypesRequest): + request = compute.ListMachineTypesRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if zone is not None: + request.zone = zone + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("zone", request.zone), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + def __enter__(self) -> "MachineTypesClient": + return self + + def __exit__(self, type, value, traceback): + """Releases underlying transport's resources. + + .. warning:: + ONLY use as a context manager if the transport is NOT shared + with other clients! Exiting the with block will CLOSE the transport + and may cause errors in other clients! + """ + self.transport.close() + + + + + + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) + + +__all__ = ( + "MachineTypesClient", +) diff --git a/owl-bot-staging/v1/google/cloud/compute_v1/services/machine_types/pagers.py b/owl-bot-staging/v1/google/cloud/compute_v1/services/machine_types/pagers.py new file mode 100644 index 000000000..a87e58b8d --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/compute_v1/services/machine_types/pagers.py @@ -0,0 +1,139 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import Any, AsyncIterator, Awaitable, Callable, Sequence, Tuple, Optional, Iterator + +from google.cloud.compute_v1.types import compute + + +class AggregatedListPager: + """A pager for iterating through ``aggregated_list`` requests. + + This class thinly wraps an initial + :class:`google.cloud.compute_v1.types.MachineTypeAggregatedList` object, and + provides an ``__iter__`` method to iterate through its + ``items`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``AggregatedList`` requests and continue to iterate + through the ``items`` field on the + corresponding responses. + + All the usual :class:`google.cloud.compute_v1.types.MachineTypeAggregatedList` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., compute.MachineTypeAggregatedList], + request: compute.AggregatedListMachineTypesRequest, + response: compute.MachineTypeAggregatedList, + *, + metadata: Sequence[Tuple[str, str]] = ()): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.compute_v1.types.AggregatedListMachineTypesRequest): + The initial request object. + response (google.cloud.compute_v1.types.MachineTypeAggregatedList): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = compute.AggregatedListMachineTypesRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[compute.MachineTypeAggregatedList]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[Tuple[str, compute.MachineTypesScopedList]]: + for page in self.pages: + yield from page.items.items() + + def get(self, key: str) -> Optional[compute.MachineTypesScopedList]: + return self._response.items.get(key) + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + + +class ListPager: + """A pager for iterating through ``list`` requests. + + This class thinly wraps an initial + :class:`google.cloud.compute_v1.types.MachineTypeList` object, and + provides an ``__iter__`` method to iterate through its + ``items`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``List`` requests and continue to iterate + through the ``items`` field on the + corresponding responses. + + All the usual :class:`google.cloud.compute_v1.types.MachineTypeList` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., compute.MachineTypeList], + request: compute.ListMachineTypesRequest, + response: compute.MachineTypeList, + *, + metadata: Sequence[Tuple[str, str]] = ()): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.compute_v1.types.ListMachineTypesRequest): + The initial request object. + response (google.cloud.compute_v1.types.MachineTypeList): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = compute.ListMachineTypesRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[compute.MachineTypeList]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[compute.MachineType]: + for page in self.pages: + yield from page.items + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) diff --git a/owl-bot-staging/v1/google/cloud/compute_v1/services/machine_types/transports/__init__.py b/owl-bot-staging/v1/google/cloud/compute_v1/services/machine_types/transports/__init__.py new file mode 100644 index 000000000..ebed8c8e5 --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/compute_v1/services/machine_types/transports/__init__.py @@ -0,0 +1,32 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +from typing import Dict, Type + +from .base import MachineTypesTransport +from .rest import MachineTypesRestTransport +from .rest import MachineTypesRestInterceptor + + +# Compile a registry of transports. +_transport_registry = OrderedDict() # type: Dict[str, Type[MachineTypesTransport]] +_transport_registry['rest'] = MachineTypesRestTransport + +__all__ = ( + 'MachineTypesTransport', + 'MachineTypesRestTransport', + 'MachineTypesRestInterceptor', +) diff --git a/owl-bot-staging/v1/google/cloud/compute_v1/services/machine_types/transports/base.py b/owl-bot-staging/v1/google/cloud/compute_v1/services/machine_types/transports/base.py new file mode 100644 index 000000000..bf5fd07a9 --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/compute_v1/services/machine_types/transports/base.py @@ -0,0 +1,178 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import abc +from typing import Awaitable, Callable, Dict, Optional, Sequence, Union + +from google.cloud.compute_v1 import gapic_version as package_version + +import google.auth # type: ignore +import google.api_core +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.compute_v1.types import compute + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) + + +class MachineTypesTransport(abc.ABC): + """Abstract transport class for MachineTypes.""" + + AUTH_SCOPES = ( + 'https://www.googleapis.com/auth/compute.readonly', + 'https://www.googleapis.com/auth/compute', + 'https://www.googleapis.com/auth/cloud-platform', + ) + + DEFAULT_HOST: str = 'compute.googleapis.com' + def __init__( + self, *, + host: str = DEFAULT_HOST, + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + **kwargs, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A list of scopes. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + """ + + scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} + + # Save the scopes. + self._scopes = scopes + + # If no credentials are provided, then determine the appropriate + # defaults. + if credentials and credentials_file: + raise core_exceptions.DuplicateCredentialArgs("'credentials_file' and 'credentials' are mutually exclusive") + + if credentials_file is not None: + credentials, _ = google.auth.load_credentials_from_file( + credentials_file, + **scopes_kwargs, + quota_project_id=quota_project_id + ) + elif credentials is None: + credentials, _ = google.auth.default(**scopes_kwargs, quota_project_id=quota_project_id) + # Don't apply audience if the credentials file passed from user. + if hasattr(credentials, "with_gdch_audience"): + credentials = credentials.with_gdch_audience(api_audience if api_audience else host) + + # If the credentials are service account credentials, then always try to use self signed JWT. + if always_use_jwt_access and isinstance(credentials, service_account.Credentials) and hasattr(service_account.Credentials, "with_always_use_jwt_access"): + credentials = credentials.with_always_use_jwt_access(True) + + # Save the credentials. + self._credentials = credentials + + # Save the hostname. Default to port 443 (HTTPS) if none is specified. + if ':' not in host: + host += ':443' + self._host = host + + def _prep_wrapped_messages(self, client_info): + # Precompute the wrapped methods. + self._wrapped_methods = { + self.aggregated_list: gapic_v1.method.wrap_method( + self.aggregated_list, + default_timeout=None, + client_info=client_info, + ), + self.get: gapic_v1.method.wrap_method( + self.get, + default_timeout=None, + client_info=client_info, + ), + self.list: gapic_v1.method.wrap_method( + self.list, + default_timeout=None, + client_info=client_info, + ), + } + + def close(self): + """Closes resources associated with the transport. + + .. warning:: + Only call this method if the transport is NOT shared + with other clients - this may cause errors in other clients! + """ + raise NotImplementedError() + + @property + def aggregated_list(self) -> Callable[ + [compute.AggregatedListMachineTypesRequest], + Union[ + compute.MachineTypeAggregatedList, + Awaitable[compute.MachineTypeAggregatedList] + ]]: + raise NotImplementedError() + + @property + def get(self) -> Callable[ + [compute.GetMachineTypeRequest], + Union[ + compute.MachineType, + Awaitable[compute.MachineType] + ]]: + raise NotImplementedError() + + @property + def list(self) -> Callable[ + [compute.ListMachineTypesRequest], + Union[ + compute.MachineTypeList, + Awaitable[compute.MachineTypeList] + ]]: + raise NotImplementedError() + + @property + def kind(self) -> str: + raise NotImplementedError() + + +__all__ = ( + 'MachineTypesTransport', +) diff --git a/owl-bot-staging/v1/google/cloud/compute_v1/services/machine_types/transports/rest.py b/owl-bot-staging/v1/google/cloud/compute_v1/services/machine_types/transports/rest.py new file mode 100644 index 000000000..18e2e0386 --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/compute_v1/services/machine_types/transports/rest.py @@ -0,0 +1,520 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +from google.auth.transport.requests import AuthorizedSession # type: ignore +import json # type: ignore +import grpc # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.api_core import exceptions as core_exceptions +from google.api_core import retry as retries +from google.api_core import rest_helpers +from google.api_core import rest_streaming +from google.api_core import path_template +from google.api_core import gapic_v1 + +from google.protobuf import json_format +from requests import __version__ as requests_version +import dataclasses +import re +from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union +import warnings + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + + +from google.cloud.compute_v1.types import compute + +from .base import MachineTypesTransport, DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, + grpc_version=None, + rest_version=requests_version, +) + + +class MachineTypesRestInterceptor: + """Interceptor for MachineTypes. + + Interceptors are used to manipulate requests, request metadata, and responses + in arbitrary ways. + Example use cases include: + * Logging + * Verifying requests according to service or custom semantics + * Stripping extraneous information from responses + + These use cases and more can be enabled by injecting an + instance of a custom subclass when constructing the MachineTypesRestTransport. + + .. code-block:: python + class MyCustomMachineTypesInterceptor(MachineTypesRestInterceptor): + def pre_aggregated_list(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_aggregated_list(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_get(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list(self, response): + logging.log(f"Received response: {response}") + return response + + transport = MachineTypesRestTransport(interceptor=MyCustomMachineTypesInterceptor()) + client = MachineTypesClient(transport=transport) + + + """ + def pre_aggregated_list(self, request: compute.AggregatedListMachineTypesRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.AggregatedListMachineTypesRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for aggregated_list + + Override in a subclass to manipulate the request or metadata + before they are sent to the MachineTypes server. + """ + return request, metadata + + def post_aggregated_list(self, response: compute.MachineTypeAggregatedList) -> compute.MachineTypeAggregatedList: + """Post-rpc interceptor for aggregated_list + + Override in a subclass to manipulate the response + after it is returned by the MachineTypes server but before + it is returned to user code. + """ + return response + def pre_get(self, request: compute.GetMachineTypeRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.GetMachineTypeRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get + + Override in a subclass to manipulate the request or metadata + before they are sent to the MachineTypes server. + """ + return request, metadata + + def post_get(self, response: compute.MachineType) -> compute.MachineType: + """Post-rpc interceptor for get + + Override in a subclass to manipulate the response + after it is returned by the MachineTypes server but before + it is returned to user code. + """ + return response + def pre_list(self, request: compute.ListMachineTypesRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.ListMachineTypesRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list + + Override in a subclass to manipulate the request or metadata + before they are sent to the MachineTypes server. + """ + return request, metadata + + def post_list(self, response: compute.MachineTypeList) -> compute.MachineTypeList: + """Post-rpc interceptor for list + + Override in a subclass to manipulate the response + after it is returned by the MachineTypes server but before + it is returned to user code. + """ + return response + + +@dataclasses.dataclass +class MachineTypesRestStub: + _session: AuthorizedSession + _host: str + _interceptor: MachineTypesRestInterceptor + + +class MachineTypesRestTransport(MachineTypesTransport): + """REST backend transport for MachineTypes. + + The MachineTypes API. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends JSON representations of protocol buffers over HTTP/1.1 + + NOTE: This REST transport functionality is currently in a beta + state (preview). We welcome your feedback via an issue in this + library's source repository. Thank you! + """ + + def __init__(self, *, + host: str = 'compute.googleapis.com', + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + client_cert_source_for_mtls: Optional[Callable[[ + ], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + url_scheme: str = 'https', + interceptor: Optional[MachineTypesRestInterceptor] = None, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + NOTE: This REST transport functionality is currently in a beta + state (preview). We welcome your feedback via a GitHub issue in + this library's repository. Thank you! + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + client_cert_source_for_mtls (Callable[[], Tuple[bytes, bytes]]): Client + certificate to configure mutual TLS HTTP channel. It is ignored + if ``channel`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you are developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + url_scheme: the protocol scheme for the API endpoint. Normally + "https", but for testing or local servers, + "http" can be specified. + """ + # Run the base constructor + # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. + # TODO: When custom host (api_endpoint) is set, `scopes` must *also* be set on the + # credentials object + maybe_url_match = re.match("^(?Phttp(?:s)?://)?(?P.*)$", host) + if maybe_url_match is None: + raise ValueError(f"Unexpected hostname structure: {host}") # pragma: NO COVER + + url_match_items = maybe_url_match.groupdict() + + host = f"{url_scheme}://{host}" if not url_match_items["scheme"] else host + + super().__init__( + host=host, + credentials=credentials, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience + ) + self._session = AuthorizedSession( + self._credentials, default_host=self.DEFAULT_HOST) + if client_cert_source_for_mtls: + self._session.configure_mtls_channel(client_cert_source_for_mtls) + self._interceptor = interceptor or MachineTypesRestInterceptor() + self._prep_wrapped_messages(client_info) + + class _AggregatedList(MachineTypesRestStub): + def __hash__(self): + return hash("AggregatedList") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.AggregatedListMachineTypesRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.MachineTypeAggregatedList: + r"""Call the aggregated list method over HTTP. + + Args: + request (~.compute.AggregatedListMachineTypesRequest): + The request object. A request message for + MachineTypes.AggregatedList. See the + method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.MachineTypeAggregatedList: + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/compute/v1/projects/{project}/aggregated/machineTypes', + }, + ] + request, metadata = self._interceptor.pre_aggregated_list(request, metadata) + pb_request = compute.AggregatedListMachineTypesRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.MachineTypeAggregatedList() + pb_resp = compute.MachineTypeAggregatedList.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_aggregated_list(resp) + return resp + + class _Get(MachineTypesRestStub): + def __hash__(self): + return hash("Get") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.GetMachineTypeRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.MachineType: + r"""Call the get method over HTTP. + + Args: + request (~.compute.GetMachineTypeRequest): + The request object. A request message for + MachineTypes.Get. See the method + description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.MachineType: + Represents a Machine Type resource. + You can use specific machine types for + your VM instances based on performance + and pricing requirements. For more + information, read Machine Types. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/compute/v1/projects/{project}/zones/{zone}/machineTypes/{machine_type}', + }, + ] + request, metadata = self._interceptor.pre_get(request, metadata) + pb_request = compute.GetMachineTypeRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.MachineType() + pb_resp = compute.MachineType.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get(resp) + return resp + + class _List(MachineTypesRestStub): + def __hash__(self): + return hash("List") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.ListMachineTypesRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.MachineTypeList: + r"""Call the list method over HTTP. + + Args: + request (~.compute.ListMachineTypesRequest): + The request object. A request message for + MachineTypes.List. See the method + description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.MachineTypeList: + Contains a list of machine types. + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/compute/v1/projects/{project}/zones/{zone}/machineTypes', + }, + ] + request, metadata = self._interceptor.pre_list(request, metadata) + pb_request = compute.ListMachineTypesRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.MachineTypeList() + pb_resp = compute.MachineTypeList.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_list(resp) + return resp + + @property + def aggregated_list(self) -> Callable[ + [compute.AggregatedListMachineTypesRequest], + compute.MachineTypeAggregatedList]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._AggregatedList(self._session, self._host, self._interceptor) # type: ignore + + @property + def get(self) -> Callable[ + [compute.GetMachineTypeRequest], + compute.MachineType]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._Get(self._session, self._host, self._interceptor) # type: ignore + + @property + def list(self) -> Callable[ + [compute.ListMachineTypesRequest], + compute.MachineTypeList]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._List(self._session, self._host, self._interceptor) # type: ignore + + @property + def kind(self) -> str: + return "rest" + + def close(self): + self._session.close() + + +__all__=( + 'MachineTypesRestTransport', +) diff --git a/owl-bot-staging/v1/google/cloud/compute_v1/services/network_attachments/__init__.py b/owl-bot-staging/v1/google/cloud/compute_v1/services/network_attachments/__init__.py new file mode 100644 index 000000000..8ff24f676 --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/compute_v1/services/network_attachments/__init__.py @@ -0,0 +1,20 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from .client import NetworkAttachmentsClient + +__all__ = ( + 'NetworkAttachmentsClient', +) diff --git a/owl-bot-staging/v1/google/cloud/compute_v1/services/network_attachments/client.py b/owl-bot-staging/v1/google/cloud/compute_v1/services/network_attachments/client.py new file mode 100644 index 000000000..d79a6f88d --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/compute_v1/services/network_attachments/client.py @@ -0,0 +1,1754 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import functools +import os +import re +from typing import Dict, Mapping, MutableMapping, MutableSequence, Optional, Sequence, Tuple, Type, Union, cast + +from google.cloud.compute_v1 import gapic_version as package_version + +from google.api_core import client_options as client_options_lib +from google.api_core import exceptions as core_exceptions +from google.api_core import extended_operation +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport import mtls # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth.exceptions import MutualTLSChannelError # type: ignore +from google.oauth2 import service_account # type: ignore + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + +from google.api_core import extended_operation # type: ignore +from google.cloud.compute_v1.services.network_attachments import pagers +from google.cloud.compute_v1.types import compute +from .transports.base import NetworkAttachmentsTransport, DEFAULT_CLIENT_INFO +from .transports.rest import NetworkAttachmentsRestTransport + + +class NetworkAttachmentsClientMeta(type): + """Metaclass for the NetworkAttachments client. + + This provides class-level methods for building and retrieving + support objects (e.g. transport) without polluting the client instance + objects. + """ + _transport_registry = OrderedDict() # type: Dict[str, Type[NetworkAttachmentsTransport]] + _transport_registry["rest"] = NetworkAttachmentsRestTransport + + def get_transport_class(cls, + label: Optional[str] = None, + ) -> Type[NetworkAttachmentsTransport]: + """Returns an appropriate transport class. + + Args: + label: The name of the desired transport. If none is + provided, then the first transport in the registry is used. + + Returns: + The transport class to use. + """ + # If a specific transport is requested, return that one. + if label: + return cls._transport_registry[label] + + # No transport is requested; return the default (that is, the first one + # in the dictionary). + return next(iter(cls._transport_registry.values())) + + +class NetworkAttachmentsClient(metaclass=NetworkAttachmentsClientMeta): + """The NetworkAttachments API.""" + + @staticmethod + def _get_default_mtls_endpoint(api_endpoint): + """Converts api endpoint to mTLS endpoint. + + Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to + "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. + Args: + api_endpoint (Optional[str]): the api endpoint to convert. + Returns: + str: converted mTLS api endpoint. + """ + if not api_endpoint: + return api_endpoint + + mtls_endpoint_re = re.compile( + r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?" + ) + + m = mtls_endpoint_re.match(api_endpoint) + name, mtls, sandbox, googledomain = m.groups() + if mtls or not googledomain: + return api_endpoint + + if sandbox: + return api_endpoint.replace( + "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" + ) + + return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") + + DEFAULT_ENDPOINT = "compute.googleapis.com" + DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore + DEFAULT_ENDPOINT + ) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + NetworkAttachmentsClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_info(info) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + NetworkAttachmentsClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_file( + filename) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + from_service_account_json = from_service_account_file + + @property + def transport(self) -> NetworkAttachmentsTransport: + """Returns the transport used by the client instance. + + Returns: + NetworkAttachmentsTransport: The transport used by the client + instance. + """ + return self._transport + + @staticmethod + def common_billing_account_path(billing_account: str, ) -> str: + """Returns a fully-qualified billing_account string.""" + return "billingAccounts/{billing_account}".format(billing_account=billing_account, ) + + @staticmethod + def parse_common_billing_account_path(path: str) -> Dict[str,str]: + """Parse a billing_account path into its component segments.""" + m = re.match(r"^billingAccounts/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_folder_path(folder: str, ) -> str: + """Returns a fully-qualified folder string.""" + return "folders/{folder}".format(folder=folder, ) + + @staticmethod + def parse_common_folder_path(path: str) -> Dict[str,str]: + """Parse a folder path into its component segments.""" + m = re.match(r"^folders/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_organization_path(organization: str, ) -> str: + """Returns a fully-qualified organization string.""" + return "organizations/{organization}".format(organization=organization, ) + + @staticmethod + def parse_common_organization_path(path: str) -> Dict[str,str]: + """Parse a organization path into its component segments.""" + m = re.match(r"^organizations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_project_path(project: str, ) -> str: + """Returns a fully-qualified project string.""" + return "projects/{project}".format(project=project, ) + + @staticmethod + def parse_common_project_path(path: str) -> Dict[str,str]: + """Parse a project path into its component segments.""" + m = re.match(r"^projects/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_location_path(project: str, location: str, ) -> str: + """Returns a fully-qualified location string.""" + return "projects/{project}/locations/{location}".format(project=project, location=location, ) + + @staticmethod + def parse_common_location_path(path: str) -> Dict[str,str]: + """Parse a location path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @classmethod + def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[client_options_lib.ClientOptions] = None): + """Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + if client_options is None: + client_options = client_options_lib.ClientOptions() + use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") + use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") + if use_client_cert not in ("true", "false"): + raise ValueError("Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`") + if use_mtls_endpoint not in ("auto", "never", "always"): + raise MutualTLSChannelError("Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`") + + # Figure out the client cert source to use. + client_cert_source = None + if use_client_cert == "true": + if client_options.client_cert_source: + client_cert_source = client_options.client_cert_source + elif mtls.has_default_client_cert_source(): + client_cert_source = mtls.default_client_cert_source() + + # Figure out which api endpoint to use. + if client_options.api_endpoint is not None: + api_endpoint = client_options.api_endpoint + elif use_mtls_endpoint == "always" or (use_mtls_endpoint == "auto" and client_cert_source): + api_endpoint = cls.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = cls.DEFAULT_ENDPOINT + + return api_endpoint, client_cert_source + + def __init__(self, *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Optional[Union[str, NetworkAttachmentsTransport]] = None, + client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the network attachments client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Union[str, NetworkAttachmentsTransport]): The + transport to use. If set to None, a transport is chosen + automatically. + NOTE: "rest" transport functionality is currently in a + beta state (preview). We welcome your feedback via an + issue in this library's source repository. + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): Custom options for the + client. It won't take effect if a ``transport`` instance is provided. + (1) The ``api_endpoint`` property can be used to override the + default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT + environment variable can also be used to override the endpoint: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto switch to the + default mTLS endpoint if client certificate is present, this is + the default value). However, the ``api_endpoint`` property takes + precedence if provided. + (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide client certificate for mutual TLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + """ + if isinstance(client_options, dict): + client_options = client_options_lib.from_dict(client_options) + if client_options is None: + client_options = client_options_lib.ClientOptions() + client_options = cast(client_options_lib.ClientOptions, client_options) + + api_endpoint, client_cert_source_func = self.get_mtls_endpoint_and_cert_source(client_options) + + api_key_value = getattr(client_options, "api_key", None) + if api_key_value and credentials: + raise ValueError("client_options.api_key and credentials are mutually exclusive") + + # Save or instantiate the transport. + # Ordinarily, we provide the transport, but allowing a custom transport + # instance provides an extensibility point for unusual situations. + if isinstance(transport, NetworkAttachmentsTransport): + # transport is a NetworkAttachmentsTransport instance. + if credentials or client_options.credentials_file or api_key_value: + raise ValueError("When providing a transport instance, " + "provide its credentials directly.") + if client_options.scopes: + raise ValueError( + "When providing a transport instance, provide its scopes " + "directly." + ) + self._transport = transport + else: + import google.auth._default # type: ignore + + if api_key_value and hasattr(google.auth._default, "get_api_key_credentials"): + credentials = google.auth._default.get_api_key_credentials(api_key_value) + + Transport = type(self).get_transport_class(transport) + self._transport = Transport( + credentials=credentials, + credentials_file=client_options.credentials_file, + host=api_endpoint, + scopes=client_options.scopes, + client_cert_source_for_mtls=client_cert_source_func, + quota_project_id=client_options.quota_project_id, + client_info=client_info, + always_use_jwt_access=True, + api_audience=client_options.api_audience, + ) + + def aggregated_list(self, + request: Optional[Union[compute.AggregatedListNetworkAttachmentsRequest, dict]] = None, + *, + project: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.AggregatedListPager: + r"""Retrieves the list of all NetworkAttachment + resources, regional and global, available to the + specified project. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_aggregated_list(): + # Create a client + client = compute_v1.NetworkAttachmentsClient() + + # Initialize request argument(s) + request = compute_v1.AggregatedListNetworkAttachmentsRequest( + project="project_value", + ) + + # Make the request + page_result = client.aggregated_list(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.AggregatedListNetworkAttachmentsRequest, dict]): + The request object. A request message for + NetworkAttachments.AggregatedList. See + the method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.compute_v1.services.network_attachments.pagers.AggregatedListPager: + Contains a list of + NetworkAttachmentsScopedList. + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.AggregatedListNetworkAttachmentsRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.AggregatedListNetworkAttachmentsRequest): + request = compute.AggregatedListNetworkAttachmentsRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.aggregated_list] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.AggregatedListPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + def delete_unary(self, + request: Optional[Union[compute.DeleteNetworkAttachmentRequest, dict]] = None, + *, + project: Optional[str] = None, + region: Optional[str] = None, + network_attachment: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.Operation: + r"""Deletes the specified NetworkAttachment in the given + scope + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_delete(): + # Create a client + client = compute_v1.NetworkAttachmentsClient() + + # Initialize request argument(s) + request = compute_v1.DeleteNetworkAttachmentRequest( + network_attachment="network_attachment_value", + project="project_value", + region="region_value", + ) + + # Make the request + response = client.delete(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.DeleteNetworkAttachmentRequest, dict]): + The request object. A request message for + NetworkAttachments.Delete. See the + method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + region (str): + Name of the region of this request. + This corresponds to the ``region`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + network_attachment (str): + Name of the NetworkAttachment + resource to delete. + + This corresponds to the ``network_attachment`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, region, network_attachment]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.DeleteNetworkAttachmentRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.DeleteNetworkAttachmentRequest): + request = compute.DeleteNetworkAttachmentRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if region is not None: + request.region = region + if network_attachment is not None: + request.network_attachment = network_attachment + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("region", request.region), + ("network_attachment", request.network_attachment), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def delete(self, + request: Optional[Union[compute.DeleteNetworkAttachmentRequest, dict]] = None, + *, + project: Optional[str] = None, + region: Optional[str] = None, + network_attachment: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> extended_operation.ExtendedOperation: + r"""Deletes the specified NetworkAttachment in the given + scope + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_delete(): + # Create a client + client = compute_v1.NetworkAttachmentsClient() + + # Initialize request argument(s) + request = compute_v1.DeleteNetworkAttachmentRequest( + network_attachment="network_attachment_value", + project="project_value", + region="region_value", + ) + + # Make the request + response = client.delete(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.DeleteNetworkAttachmentRequest, dict]): + The request object. A request message for + NetworkAttachments.Delete. See the + method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + region (str): + Name of the region of this request. + This corresponds to the ``region`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + network_attachment (str): + Name of the NetworkAttachment + resource to delete. + + This corresponds to the ``network_attachment`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, region, network_attachment]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.DeleteNetworkAttachmentRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.DeleteNetworkAttachmentRequest): + request = compute.DeleteNetworkAttachmentRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if region is not None: + request.region = region + if network_attachment is not None: + request.network_attachment = network_attachment + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("region", request.region), + ("network_attachment", request.network_attachment), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + operation_service = self._transport._region_operations_client + operation_request = compute.GetRegionOperationRequest() + operation_request.project = request.project + operation_request.region = request.region + operation_request.operation = response.name + + get_operation = functools.partial(operation_service.get, operation_request) + # Cancel is not part of extended operations yet. + cancel_operation = lambda: None + + # Note: this class is an implementation detail to provide a uniform + # set of names for certain fields in the extended operation proto message. + # See google.api_core.extended_operation.ExtendedOperation for details + # on these properties and the expected interface. + class _CustomOperation(extended_operation.ExtendedOperation): + @property + def error_message(self): + return self._extended_operation.http_error_message + + @property + def error_code(self): + return self._extended_operation.http_error_status_code + + response = _CustomOperation.make(get_operation, cancel_operation, response) + + # Done; return the response. + return response + + def get(self, + request: Optional[Union[compute.GetNetworkAttachmentRequest, dict]] = None, + *, + project: Optional[str] = None, + region: Optional[str] = None, + network_attachment: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.NetworkAttachment: + r"""Returns the specified NetworkAttachment resource in + the given scope. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_get(): + # Create a client + client = compute_v1.NetworkAttachmentsClient() + + # Initialize request argument(s) + request = compute_v1.GetNetworkAttachmentRequest( + network_attachment="network_attachment_value", + project="project_value", + region="region_value", + ) + + # Make the request + response = client.get(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.GetNetworkAttachmentRequest, dict]): + The request object. A request message for + NetworkAttachments.Get. See the method + description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + region (str): + Name of the region of this request. + This corresponds to the ``region`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + network_attachment (str): + Name of the NetworkAttachment + resource to return. + + This corresponds to the ``network_attachment`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.compute_v1.types.NetworkAttachment: + NetworkAttachments A network + attachment resource ... + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, region, network_attachment]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.GetNetworkAttachmentRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.GetNetworkAttachmentRequest): + request = compute.GetNetworkAttachmentRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if region is not None: + request.region = region + if network_attachment is not None: + request.network_attachment = network_attachment + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("region", request.region), + ("network_attachment", request.network_attachment), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get_iam_policy(self, + request: Optional[Union[compute.GetIamPolicyNetworkAttachmentRequest, dict]] = None, + *, + project: Optional[str] = None, + region: Optional[str] = None, + resource: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.Policy: + r"""Gets the access control policy for a resource. May be + empty if no such policy or resource exists. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_get_iam_policy(): + # Create a client + client = compute_v1.NetworkAttachmentsClient() + + # Initialize request argument(s) + request = compute_v1.GetIamPolicyNetworkAttachmentRequest( + project="project_value", + region="region_value", + resource="resource_value", + ) + + # Make the request + response = client.get_iam_policy(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.GetIamPolicyNetworkAttachmentRequest, dict]): + The request object. A request message for + NetworkAttachments.GetIamPolicy. See the + method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + region (str): + The name of the region for this + request. + + This corresponds to the ``region`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + resource (str): + Name or id of the resource for this + request. + + This corresponds to the ``resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.compute_v1.types.Policy: + An Identity and Access Management (IAM) policy, which + specifies access controls for Google Cloud resources. A + Policy is a collection of bindings. A binding binds one + or more members, or principals, to a single role. + Principals can be user accounts, service accounts, + Google groups, and domains (such as G Suite). A role is + a named list of permissions; each role can be an IAM + predefined role or a user-created custom role. For some + types of Google Cloud resources, a binding can also + specify a condition, which is a logical expression that + allows access to a resource only if the expression + evaluates to true. A condition can add constraints based + on attributes of the request, the resource, or both. To + learn which resources support conditions in their IAM + policies, see the [IAM + documentation](\ https://cloud.google.com/iam/help/conditions/resource-policies). + **JSON example:** { "bindings": [ { "role": + "roles/resourcemanager.organizationAdmin", "members": [ + "user:mike@example.com", "group:admins@example.com", + "domain:google.com", + "serviceAccount:my-project-id@appspot.gserviceaccount.com" + ] }, { "role": + "roles/resourcemanager.organizationViewer", "members": [ + "user:eve@example.com" ], "condition": { "title": + "expirable access", "description": "Does not grant + access after Sep 2020", "expression": "request.time < + timestamp('2020-10-01T00:00:00.000Z')", } } ], "etag": + "BwWWja0YfJA=", "version": 3 } **YAML example:** + bindings: - members: - user:\ mike@example.com - + group:\ admins@example.com - domain:google.com - + serviceAccount:\ my-project-id@appspot.gserviceaccount.com + role: roles/resourcemanager.organizationAdmin - members: + - user:\ eve@example.com role: + roles/resourcemanager.organizationViewer condition: + title: expirable access description: Does not grant + access after Sep 2020 expression: request.time < + timestamp('2020-10-01T00:00:00.000Z') etag: BwWWja0YfJA= + version: 3 For a description of IAM and its features, + see the [IAM + documentation](\ https://cloud.google.com/iam/docs/). + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, region, resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.GetIamPolicyNetworkAttachmentRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.GetIamPolicyNetworkAttachmentRequest): + request = compute.GetIamPolicyNetworkAttachmentRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if region is not None: + request.region = region + if resource is not None: + request.resource = resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_iam_policy] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("region", request.region), + ("resource", request.resource), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def insert_unary(self, + request: Optional[Union[compute.InsertNetworkAttachmentRequest, dict]] = None, + *, + project: Optional[str] = None, + region: Optional[str] = None, + network_attachment_resource: Optional[compute.NetworkAttachment] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.Operation: + r"""Creates a NetworkAttachment in the specified project + in the given scope using the parameters that are + included in the request. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_insert(): + # Create a client + client = compute_v1.NetworkAttachmentsClient() + + # Initialize request argument(s) + request = compute_v1.InsertNetworkAttachmentRequest( + project="project_value", + region="region_value", + ) + + # Make the request + response = client.insert(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.InsertNetworkAttachmentRequest, dict]): + The request object. A request message for + NetworkAttachments.Insert. See the + method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + region (str): + Name of the region of this request. + This corresponds to the ``region`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + network_attachment_resource (google.cloud.compute_v1.types.NetworkAttachment): + The body resource for this request + This corresponds to the ``network_attachment_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, region, network_attachment_resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.InsertNetworkAttachmentRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.InsertNetworkAttachmentRequest): + request = compute.InsertNetworkAttachmentRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if region is not None: + request.region = region + if network_attachment_resource is not None: + request.network_attachment_resource = network_attachment_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.insert] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("region", request.region), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def insert(self, + request: Optional[Union[compute.InsertNetworkAttachmentRequest, dict]] = None, + *, + project: Optional[str] = None, + region: Optional[str] = None, + network_attachment_resource: Optional[compute.NetworkAttachment] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> extended_operation.ExtendedOperation: + r"""Creates a NetworkAttachment in the specified project + in the given scope using the parameters that are + included in the request. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_insert(): + # Create a client + client = compute_v1.NetworkAttachmentsClient() + + # Initialize request argument(s) + request = compute_v1.InsertNetworkAttachmentRequest( + project="project_value", + region="region_value", + ) + + # Make the request + response = client.insert(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.InsertNetworkAttachmentRequest, dict]): + The request object. A request message for + NetworkAttachments.Insert. See the + method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + region (str): + Name of the region of this request. + This corresponds to the ``region`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + network_attachment_resource (google.cloud.compute_v1.types.NetworkAttachment): + The body resource for this request + This corresponds to the ``network_attachment_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, region, network_attachment_resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.InsertNetworkAttachmentRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.InsertNetworkAttachmentRequest): + request = compute.InsertNetworkAttachmentRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if region is not None: + request.region = region + if network_attachment_resource is not None: + request.network_attachment_resource = network_attachment_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.insert] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("region", request.region), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + operation_service = self._transport._region_operations_client + operation_request = compute.GetRegionOperationRequest() + operation_request.project = request.project + operation_request.region = request.region + operation_request.operation = response.name + + get_operation = functools.partial(operation_service.get, operation_request) + # Cancel is not part of extended operations yet. + cancel_operation = lambda: None + + # Note: this class is an implementation detail to provide a uniform + # set of names for certain fields in the extended operation proto message. + # See google.api_core.extended_operation.ExtendedOperation for details + # on these properties and the expected interface. + class _CustomOperation(extended_operation.ExtendedOperation): + @property + def error_message(self): + return self._extended_operation.http_error_message + + @property + def error_code(self): + return self._extended_operation.http_error_status_code + + response = _CustomOperation.make(get_operation, cancel_operation, response) + + # Done; return the response. + return response + + def list(self, + request: Optional[Union[compute.ListNetworkAttachmentsRequest, dict]] = None, + *, + project: Optional[str] = None, + region: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListPager: + r"""Lists the NetworkAttachments for a project in the + given scope. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_list(): + # Create a client + client = compute_v1.NetworkAttachmentsClient() + + # Initialize request argument(s) + request = compute_v1.ListNetworkAttachmentsRequest( + project="project_value", + region="region_value", + ) + + # Make the request + page_result = client.list(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.ListNetworkAttachmentsRequest, dict]): + The request object. A request message for + NetworkAttachments.List. See the method + description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + region (str): + Name of the region of this request. + This corresponds to the ``region`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.compute_v1.services.network_attachments.pagers.ListPager: + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, region]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.ListNetworkAttachmentsRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.ListNetworkAttachmentsRequest): + request = compute.ListNetworkAttachmentsRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if region is not None: + request.region = region + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("region", request.region), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + def set_iam_policy(self, + request: Optional[Union[compute.SetIamPolicyNetworkAttachmentRequest, dict]] = None, + *, + project: Optional[str] = None, + region: Optional[str] = None, + resource: Optional[str] = None, + region_set_policy_request_resource: Optional[compute.RegionSetPolicyRequest] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.Policy: + r"""Sets the access control policy on the specified + resource. Replaces any existing policy. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_set_iam_policy(): + # Create a client + client = compute_v1.NetworkAttachmentsClient() + + # Initialize request argument(s) + request = compute_v1.SetIamPolicyNetworkAttachmentRequest( + project="project_value", + region="region_value", + resource="resource_value", + ) + + # Make the request + response = client.set_iam_policy(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.SetIamPolicyNetworkAttachmentRequest, dict]): + The request object. A request message for + NetworkAttachments.SetIamPolicy. See the + method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + region (str): + The name of the region for this + request. + + This corresponds to the ``region`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + resource (str): + Name or id of the resource for this + request. + + This corresponds to the ``resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + region_set_policy_request_resource (google.cloud.compute_v1.types.RegionSetPolicyRequest): + The body resource for this request + This corresponds to the ``region_set_policy_request_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.compute_v1.types.Policy: + An Identity and Access Management (IAM) policy, which + specifies access controls for Google Cloud resources. A + Policy is a collection of bindings. A binding binds one + or more members, or principals, to a single role. + Principals can be user accounts, service accounts, + Google groups, and domains (such as G Suite). A role is + a named list of permissions; each role can be an IAM + predefined role or a user-created custom role. For some + types of Google Cloud resources, a binding can also + specify a condition, which is a logical expression that + allows access to a resource only if the expression + evaluates to true. A condition can add constraints based + on attributes of the request, the resource, or both. To + learn which resources support conditions in their IAM + policies, see the [IAM + documentation](\ https://cloud.google.com/iam/help/conditions/resource-policies). + **JSON example:** { "bindings": [ { "role": + "roles/resourcemanager.organizationAdmin", "members": [ + "user:mike@example.com", "group:admins@example.com", + "domain:google.com", + "serviceAccount:my-project-id@appspot.gserviceaccount.com" + ] }, { "role": + "roles/resourcemanager.organizationViewer", "members": [ + "user:eve@example.com" ], "condition": { "title": + "expirable access", "description": "Does not grant + access after Sep 2020", "expression": "request.time < + timestamp('2020-10-01T00:00:00.000Z')", } } ], "etag": + "BwWWja0YfJA=", "version": 3 } **YAML example:** + bindings: - members: - user:\ mike@example.com - + group:\ admins@example.com - domain:google.com - + serviceAccount:\ my-project-id@appspot.gserviceaccount.com + role: roles/resourcemanager.organizationAdmin - members: + - user:\ eve@example.com role: + roles/resourcemanager.organizationViewer condition: + title: expirable access description: Does not grant + access after Sep 2020 expression: request.time < + timestamp('2020-10-01T00:00:00.000Z') etag: BwWWja0YfJA= + version: 3 For a description of IAM and its features, + see the [IAM + documentation](\ https://cloud.google.com/iam/docs/). + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, region, resource, region_set_policy_request_resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.SetIamPolicyNetworkAttachmentRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.SetIamPolicyNetworkAttachmentRequest): + request = compute.SetIamPolicyNetworkAttachmentRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if region is not None: + request.region = region + if resource is not None: + request.resource = resource + if region_set_policy_request_resource is not None: + request.region_set_policy_request_resource = region_set_policy_request_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.set_iam_policy] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("region", request.region), + ("resource", request.resource), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def test_iam_permissions(self, + request: Optional[Union[compute.TestIamPermissionsNetworkAttachmentRequest, dict]] = None, + *, + project: Optional[str] = None, + region: Optional[str] = None, + resource: Optional[str] = None, + test_permissions_request_resource: Optional[compute.TestPermissionsRequest] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.TestPermissionsResponse: + r"""Returns permissions that a caller has on the + specified resource. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_test_iam_permissions(): + # Create a client + client = compute_v1.NetworkAttachmentsClient() + + # Initialize request argument(s) + request = compute_v1.TestIamPermissionsNetworkAttachmentRequest( + project="project_value", + region="region_value", + resource="resource_value", + ) + + # Make the request + response = client.test_iam_permissions(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.TestIamPermissionsNetworkAttachmentRequest, dict]): + The request object. A request message for + NetworkAttachments.TestIamPermissions. + See the method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + region (str): + The name of the region for this + request. + + This corresponds to the ``region`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + resource (str): + Name or id of the resource for this + request. + + This corresponds to the ``resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + test_permissions_request_resource (google.cloud.compute_v1.types.TestPermissionsRequest): + The body resource for this request + This corresponds to the ``test_permissions_request_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.compute_v1.types.TestPermissionsResponse: + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, region, resource, test_permissions_request_resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.TestIamPermissionsNetworkAttachmentRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.TestIamPermissionsNetworkAttachmentRequest): + request = compute.TestIamPermissionsNetworkAttachmentRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if region is not None: + request.region = region + if resource is not None: + request.resource = resource + if test_permissions_request_resource is not None: + request.test_permissions_request_resource = test_permissions_request_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.test_iam_permissions] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("region", request.region), + ("resource", request.resource), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def __enter__(self) -> "NetworkAttachmentsClient": + return self + + def __exit__(self, type, value, traceback): + """Releases underlying transport's resources. + + .. warning:: + ONLY use as a context manager if the transport is NOT shared + with other clients! Exiting the with block will CLOSE the transport + and may cause errors in other clients! + """ + self.transport.close() + + + + + + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) + + +__all__ = ( + "NetworkAttachmentsClient", +) diff --git a/owl-bot-staging/v1/google/cloud/compute_v1/services/network_attachments/pagers.py b/owl-bot-staging/v1/google/cloud/compute_v1/services/network_attachments/pagers.py new file mode 100644 index 000000000..d298a44ed --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/compute_v1/services/network_attachments/pagers.py @@ -0,0 +1,139 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import Any, AsyncIterator, Awaitable, Callable, Sequence, Tuple, Optional, Iterator + +from google.cloud.compute_v1.types import compute + + +class AggregatedListPager: + """A pager for iterating through ``aggregated_list`` requests. + + This class thinly wraps an initial + :class:`google.cloud.compute_v1.types.NetworkAttachmentAggregatedList` object, and + provides an ``__iter__`` method to iterate through its + ``items`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``AggregatedList`` requests and continue to iterate + through the ``items`` field on the + corresponding responses. + + All the usual :class:`google.cloud.compute_v1.types.NetworkAttachmentAggregatedList` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., compute.NetworkAttachmentAggregatedList], + request: compute.AggregatedListNetworkAttachmentsRequest, + response: compute.NetworkAttachmentAggregatedList, + *, + metadata: Sequence[Tuple[str, str]] = ()): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.compute_v1.types.AggregatedListNetworkAttachmentsRequest): + The initial request object. + response (google.cloud.compute_v1.types.NetworkAttachmentAggregatedList): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = compute.AggregatedListNetworkAttachmentsRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[compute.NetworkAttachmentAggregatedList]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[Tuple[str, compute.NetworkAttachmentsScopedList]]: + for page in self.pages: + yield from page.items.items() + + def get(self, key: str) -> Optional[compute.NetworkAttachmentsScopedList]: + return self._response.items.get(key) + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + + +class ListPager: + """A pager for iterating through ``list`` requests. + + This class thinly wraps an initial + :class:`google.cloud.compute_v1.types.NetworkAttachmentList` object, and + provides an ``__iter__`` method to iterate through its + ``items`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``List`` requests and continue to iterate + through the ``items`` field on the + corresponding responses. + + All the usual :class:`google.cloud.compute_v1.types.NetworkAttachmentList` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., compute.NetworkAttachmentList], + request: compute.ListNetworkAttachmentsRequest, + response: compute.NetworkAttachmentList, + *, + metadata: Sequence[Tuple[str, str]] = ()): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.compute_v1.types.ListNetworkAttachmentsRequest): + The initial request object. + response (google.cloud.compute_v1.types.NetworkAttachmentList): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = compute.ListNetworkAttachmentsRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[compute.NetworkAttachmentList]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[compute.NetworkAttachment]: + for page in self.pages: + yield from page.items + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) diff --git a/owl-bot-staging/v1/google/cloud/compute_v1/services/network_attachments/transports/__init__.py b/owl-bot-staging/v1/google/cloud/compute_v1/services/network_attachments/transports/__init__.py new file mode 100644 index 000000000..166a87664 --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/compute_v1/services/network_attachments/transports/__init__.py @@ -0,0 +1,32 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +from typing import Dict, Type + +from .base import NetworkAttachmentsTransport +from .rest import NetworkAttachmentsRestTransport +from .rest import NetworkAttachmentsRestInterceptor + + +# Compile a registry of transports. +_transport_registry = OrderedDict() # type: Dict[str, Type[NetworkAttachmentsTransport]] +_transport_registry['rest'] = NetworkAttachmentsRestTransport + +__all__ = ( + 'NetworkAttachmentsTransport', + 'NetworkAttachmentsRestTransport', + 'NetworkAttachmentsRestInterceptor', +) diff --git a/owl-bot-staging/v1/google/cloud/compute_v1/services/network_attachments/transports/base.py b/owl-bot-staging/v1/google/cloud/compute_v1/services/network_attachments/transports/base.py new file mode 100644 index 000000000..61db428c8 --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/compute_v1/services/network_attachments/transports/base.py @@ -0,0 +1,261 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import abc +from typing import Awaitable, Callable, Dict, Optional, Sequence, Union + +from google.cloud.compute_v1 import gapic_version as package_version + +import google.auth # type: ignore +import google.api_core +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.compute_v1.types import compute +from google.cloud.compute_v1.services import region_operations + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) + + +class NetworkAttachmentsTransport(abc.ABC): + """Abstract transport class for NetworkAttachments.""" + + AUTH_SCOPES = ( + 'https://www.googleapis.com/auth/compute', + 'https://www.googleapis.com/auth/cloud-platform', + ) + + DEFAULT_HOST: str = 'compute.googleapis.com' + def __init__( + self, *, + host: str = DEFAULT_HOST, + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + **kwargs, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A list of scopes. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + """ + self._extended_operations_services: Dict[str, Any] = {} + + scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} + + # Save the scopes. + self._scopes = scopes + + # If no credentials are provided, then determine the appropriate + # defaults. + if credentials and credentials_file: + raise core_exceptions.DuplicateCredentialArgs("'credentials_file' and 'credentials' are mutually exclusive") + + if credentials_file is not None: + credentials, _ = google.auth.load_credentials_from_file( + credentials_file, + **scopes_kwargs, + quota_project_id=quota_project_id + ) + elif credentials is None: + credentials, _ = google.auth.default(**scopes_kwargs, quota_project_id=quota_project_id) + # Don't apply audience if the credentials file passed from user. + if hasattr(credentials, "with_gdch_audience"): + credentials = credentials.with_gdch_audience(api_audience if api_audience else host) + + # If the credentials are service account credentials, then always try to use self signed JWT. + if always_use_jwt_access and isinstance(credentials, service_account.Credentials) and hasattr(service_account.Credentials, "with_always_use_jwt_access"): + credentials = credentials.with_always_use_jwt_access(True) + + # Save the credentials. + self._credentials = credentials + + # Save the hostname. Default to port 443 (HTTPS) if none is specified. + if ':' not in host: + host += ':443' + self._host = host + + def _prep_wrapped_messages(self, client_info): + # Precompute the wrapped methods. + self._wrapped_methods = { + self.aggregated_list: gapic_v1.method.wrap_method( + self.aggregated_list, + default_timeout=None, + client_info=client_info, + ), + self.delete: gapic_v1.method.wrap_method( + self.delete, + default_timeout=None, + client_info=client_info, + ), + self.get: gapic_v1.method.wrap_method( + self.get, + default_timeout=None, + client_info=client_info, + ), + self.get_iam_policy: gapic_v1.method.wrap_method( + self.get_iam_policy, + default_timeout=None, + client_info=client_info, + ), + self.insert: gapic_v1.method.wrap_method( + self.insert, + default_timeout=None, + client_info=client_info, + ), + self.list: gapic_v1.method.wrap_method( + self.list, + default_timeout=None, + client_info=client_info, + ), + self.set_iam_policy: gapic_v1.method.wrap_method( + self.set_iam_policy, + default_timeout=None, + client_info=client_info, + ), + self.test_iam_permissions: gapic_v1.method.wrap_method( + self.test_iam_permissions, + default_timeout=None, + client_info=client_info, + ), + } + + def close(self): + """Closes resources associated with the transport. + + .. warning:: + Only call this method if the transport is NOT shared + with other clients - this may cause errors in other clients! + """ + raise NotImplementedError() + + @property + def aggregated_list(self) -> Callable[ + [compute.AggregatedListNetworkAttachmentsRequest], + Union[ + compute.NetworkAttachmentAggregatedList, + Awaitable[compute.NetworkAttachmentAggregatedList] + ]]: + raise NotImplementedError() + + @property + def delete(self) -> Callable[ + [compute.DeleteNetworkAttachmentRequest], + Union[ + compute.Operation, + Awaitable[compute.Operation] + ]]: + raise NotImplementedError() + + @property + def get(self) -> Callable[ + [compute.GetNetworkAttachmentRequest], + Union[ + compute.NetworkAttachment, + Awaitable[compute.NetworkAttachment] + ]]: + raise NotImplementedError() + + @property + def get_iam_policy(self) -> Callable[ + [compute.GetIamPolicyNetworkAttachmentRequest], + Union[ + compute.Policy, + Awaitable[compute.Policy] + ]]: + raise NotImplementedError() + + @property + def insert(self) -> Callable[ + [compute.InsertNetworkAttachmentRequest], + Union[ + compute.Operation, + Awaitable[compute.Operation] + ]]: + raise NotImplementedError() + + @property + def list(self) -> Callable[ + [compute.ListNetworkAttachmentsRequest], + Union[ + compute.NetworkAttachmentList, + Awaitable[compute.NetworkAttachmentList] + ]]: + raise NotImplementedError() + + @property + def set_iam_policy(self) -> Callable[ + [compute.SetIamPolicyNetworkAttachmentRequest], + Union[ + compute.Policy, + Awaitable[compute.Policy] + ]]: + raise NotImplementedError() + + @property + def test_iam_permissions(self) -> Callable[ + [compute.TestIamPermissionsNetworkAttachmentRequest], + Union[ + compute.TestPermissionsResponse, + Awaitable[compute.TestPermissionsResponse] + ]]: + raise NotImplementedError() + + @property + def kind(self) -> str: + raise NotImplementedError() + + @property + def _region_operations_client(self) -> region_operations.RegionOperationsClient: + ex_op_service = self._extended_operations_services.get("region_operations") + if not ex_op_service: + ex_op_service = region_operations.RegionOperationsClient( + credentials=self._credentials, + transport=self.kind, + ) + self._extended_operations_services["region_operations"] = ex_op_service + + return ex_op_service + + +__all__ = ( + 'NetworkAttachmentsTransport', +) diff --git a/owl-bot-staging/v1/google/cloud/compute_v1/services/network_attachments/transports/rest.py b/owl-bot-staging/v1/google/cloud/compute_v1/services/network_attachments/transports/rest.py new file mode 100644 index 000000000..88c85fc82 --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/compute_v1/services/network_attachments/transports/rest.py @@ -0,0 +1,1208 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +from google.auth.transport.requests import AuthorizedSession # type: ignore +import json # type: ignore +import grpc # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.api_core import exceptions as core_exceptions +from google.api_core import retry as retries +from google.api_core import rest_helpers +from google.api_core import rest_streaming +from google.api_core import path_template +from google.api_core import gapic_v1 + +from google.protobuf import json_format +from requests import __version__ as requests_version +import dataclasses +import re +from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union +import warnings + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + + +from google.cloud.compute_v1.types import compute + +from .base import NetworkAttachmentsTransport, DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, + grpc_version=None, + rest_version=requests_version, +) + + +class NetworkAttachmentsRestInterceptor: + """Interceptor for NetworkAttachments. + + Interceptors are used to manipulate requests, request metadata, and responses + in arbitrary ways. + Example use cases include: + * Logging + * Verifying requests according to service or custom semantics + * Stripping extraneous information from responses + + These use cases and more can be enabled by injecting an + instance of a custom subclass when constructing the NetworkAttachmentsRestTransport. + + .. code-block:: python + class MyCustomNetworkAttachmentsInterceptor(NetworkAttachmentsRestInterceptor): + def pre_aggregated_list(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_aggregated_list(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_delete(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_delete(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_get(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_get_iam_policy(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_iam_policy(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_insert(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_insert(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_set_iam_policy(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_set_iam_policy(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_test_iam_permissions(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_test_iam_permissions(self, response): + logging.log(f"Received response: {response}") + return response + + transport = NetworkAttachmentsRestTransport(interceptor=MyCustomNetworkAttachmentsInterceptor()) + client = NetworkAttachmentsClient(transport=transport) + + + """ + def pre_aggregated_list(self, request: compute.AggregatedListNetworkAttachmentsRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.AggregatedListNetworkAttachmentsRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for aggregated_list + + Override in a subclass to manipulate the request or metadata + before they are sent to the NetworkAttachments server. + """ + return request, metadata + + def post_aggregated_list(self, response: compute.NetworkAttachmentAggregatedList) -> compute.NetworkAttachmentAggregatedList: + """Post-rpc interceptor for aggregated_list + + Override in a subclass to manipulate the response + after it is returned by the NetworkAttachments server but before + it is returned to user code. + """ + return response + def pre_delete(self, request: compute.DeleteNetworkAttachmentRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.DeleteNetworkAttachmentRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for delete + + Override in a subclass to manipulate the request or metadata + before they are sent to the NetworkAttachments server. + """ + return request, metadata + + def post_delete(self, response: compute.Operation) -> compute.Operation: + """Post-rpc interceptor for delete + + Override in a subclass to manipulate the response + after it is returned by the NetworkAttachments server but before + it is returned to user code. + """ + return response + def pre_get(self, request: compute.GetNetworkAttachmentRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.GetNetworkAttachmentRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get + + Override in a subclass to manipulate the request or metadata + before they are sent to the NetworkAttachments server. + """ + return request, metadata + + def post_get(self, response: compute.NetworkAttachment) -> compute.NetworkAttachment: + """Post-rpc interceptor for get + + Override in a subclass to manipulate the response + after it is returned by the NetworkAttachments server but before + it is returned to user code. + """ + return response + def pre_get_iam_policy(self, request: compute.GetIamPolicyNetworkAttachmentRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.GetIamPolicyNetworkAttachmentRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_iam_policy + + Override in a subclass to manipulate the request or metadata + before they are sent to the NetworkAttachments server. + """ + return request, metadata + + def post_get_iam_policy(self, response: compute.Policy) -> compute.Policy: + """Post-rpc interceptor for get_iam_policy + + Override in a subclass to manipulate the response + after it is returned by the NetworkAttachments server but before + it is returned to user code. + """ + return response + def pre_insert(self, request: compute.InsertNetworkAttachmentRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.InsertNetworkAttachmentRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for insert + + Override in a subclass to manipulate the request or metadata + before they are sent to the NetworkAttachments server. + """ + return request, metadata + + def post_insert(self, response: compute.Operation) -> compute.Operation: + """Post-rpc interceptor for insert + + Override in a subclass to manipulate the response + after it is returned by the NetworkAttachments server but before + it is returned to user code. + """ + return response + def pre_list(self, request: compute.ListNetworkAttachmentsRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.ListNetworkAttachmentsRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list + + Override in a subclass to manipulate the request or metadata + before they are sent to the NetworkAttachments server. + """ + return request, metadata + + def post_list(self, response: compute.NetworkAttachmentList) -> compute.NetworkAttachmentList: + """Post-rpc interceptor for list + + Override in a subclass to manipulate the response + after it is returned by the NetworkAttachments server but before + it is returned to user code. + """ + return response + def pre_set_iam_policy(self, request: compute.SetIamPolicyNetworkAttachmentRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.SetIamPolicyNetworkAttachmentRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for set_iam_policy + + Override in a subclass to manipulate the request or metadata + before they are sent to the NetworkAttachments server. + """ + return request, metadata + + def post_set_iam_policy(self, response: compute.Policy) -> compute.Policy: + """Post-rpc interceptor for set_iam_policy + + Override in a subclass to manipulate the response + after it is returned by the NetworkAttachments server but before + it is returned to user code. + """ + return response + def pre_test_iam_permissions(self, request: compute.TestIamPermissionsNetworkAttachmentRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.TestIamPermissionsNetworkAttachmentRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for test_iam_permissions + + Override in a subclass to manipulate the request or metadata + before they are sent to the NetworkAttachments server. + """ + return request, metadata + + def post_test_iam_permissions(self, response: compute.TestPermissionsResponse) -> compute.TestPermissionsResponse: + """Post-rpc interceptor for test_iam_permissions + + Override in a subclass to manipulate the response + after it is returned by the NetworkAttachments server but before + it is returned to user code. + """ + return response + + +@dataclasses.dataclass +class NetworkAttachmentsRestStub: + _session: AuthorizedSession + _host: str + _interceptor: NetworkAttachmentsRestInterceptor + + +class NetworkAttachmentsRestTransport(NetworkAttachmentsTransport): + """REST backend transport for NetworkAttachments. + + The NetworkAttachments API. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends JSON representations of protocol buffers over HTTP/1.1 + + NOTE: This REST transport functionality is currently in a beta + state (preview). We welcome your feedback via an issue in this + library's source repository. Thank you! + """ + + def __init__(self, *, + host: str = 'compute.googleapis.com', + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + client_cert_source_for_mtls: Optional[Callable[[ + ], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + url_scheme: str = 'https', + interceptor: Optional[NetworkAttachmentsRestInterceptor] = None, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + NOTE: This REST transport functionality is currently in a beta + state (preview). We welcome your feedback via a GitHub issue in + this library's repository. Thank you! + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + client_cert_source_for_mtls (Callable[[], Tuple[bytes, bytes]]): Client + certificate to configure mutual TLS HTTP channel. It is ignored + if ``channel`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you are developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + url_scheme: the protocol scheme for the API endpoint. Normally + "https", but for testing or local servers, + "http" can be specified. + """ + # Run the base constructor + # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. + # TODO: When custom host (api_endpoint) is set, `scopes` must *also* be set on the + # credentials object + maybe_url_match = re.match("^(?Phttp(?:s)?://)?(?P.*)$", host) + if maybe_url_match is None: + raise ValueError(f"Unexpected hostname structure: {host}") # pragma: NO COVER + + url_match_items = maybe_url_match.groupdict() + + host = f"{url_scheme}://{host}" if not url_match_items["scheme"] else host + + super().__init__( + host=host, + credentials=credentials, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience + ) + self._session = AuthorizedSession( + self._credentials, default_host=self.DEFAULT_HOST) + if client_cert_source_for_mtls: + self._session.configure_mtls_channel(client_cert_source_for_mtls) + self._interceptor = interceptor or NetworkAttachmentsRestInterceptor() + self._prep_wrapped_messages(client_info) + + class _AggregatedList(NetworkAttachmentsRestStub): + def __hash__(self): + return hash("AggregatedList") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.AggregatedListNetworkAttachmentsRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.NetworkAttachmentAggregatedList: + r"""Call the aggregated list method over HTTP. + + Args: + request (~.compute.AggregatedListNetworkAttachmentsRequest): + The request object. A request message for + NetworkAttachments.AggregatedList. See + the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.NetworkAttachmentAggregatedList: + Contains a list of + NetworkAttachmentsScopedList. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/compute/v1/projects/{project}/aggregated/networkAttachments', + }, + ] + request, metadata = self._interceptor.pre_aggregated_list(request, metadata) + pb_request = compute.AggregatedListNetworkAttachmentsRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.NetworkAttachmentAggregatedList() + pb_resp = compute.NetworkAttachmentAggregatedList.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_aggregated_list(resp) + return resp + + class _Delete(NetworkAttachmentsRestStub): + def __hash__(self): + return hash("Delete") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.DeleteNetworkAttachmentRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.Operation: + r"""Call the delete method over HTTP. + + Args: + request (~.compute.DeleteNetworkAttachmentRequest): + The request object. A request message for + NetworkAttachments.Delete. See the + method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.Operation: + Represents an Operation resource. Google Compute Engine + has three Operation resources: \* + `Global `__ + \* + `Regional `__ + \* + `Zonal `__ + You can use an operation resource to manage asynchronous + API requests. For more information, read Handling API + responses. Operations can be global, regional or zonal. + - For global operations, use the ``globalOperations`` + resource. - For regional operations, use the + ``regionOperations`` resource. - For zonal operations, + use the ``zonalOperations`` resource. For more + information, read Global, Regional, and Zonal Resources. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'delete', + 'uri': '/compute/v1/projects/{project}/regions/{region}/networkAttachments/{network_attachment}', + }, + ] + request, metadata = self._interceptor.pre_delete(request, metadata) + pb_request = compute.DeleteNetworkAttachmentRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.Operation() + pb_resp = compute.Operation.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_delete(resp) + return resp + + class _Get(NetworkAttachmentsRestStub): + def __hash__(self): + return hash("Get") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.GetNetworkAttachmentRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.NetworkAttachment: + r"""Call the get method over HTTP. + + Args: + request (~.compute.GetNetworkAttachmentRequest): + The request object. A request message for + NetworkAttachments.Get. See the method + description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.NetworkAttachment: + NetworkAttachments A network + attachment resource ... + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/compute/v1/projects/{project}/regions/{region}/networkAttachments/{network_attachment}', + }, + ] + request, metadata = self._interceptor.pre_get(request, metadata) + pb_request = compute.GetNetworkAttachmentRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.NetworkAttachment() + pb_resp = compute.NetworkAttachment.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get(resp) + return resp + + class _GetIamPolicy(NetworkAttachmentsRestStub): + def __hash__(self): + return hash("GetIamPolicy") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.GetIamPolicyNetworkAttachmentRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.Policy: + r"""Call the get iam policy method over HTTP. + + Args: + request (~.compute.GetIamPolicyNetworkAttachmentRequest): + The request object. A request message for + NetworkAttachments.GetIamPolicy. See the + method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.Policy: + An Identity and Access Management (IAM) policy, which + specifies access controls for Google Cloud resources. A + ``Policy`` is a collection of ``bindings``. A + ``binding`` binds one or more ``members``, or + principals, to a single ``role``. Principals can be user + accounts, service accounts, Google groups, and domains + (such as G Suite). A ``role`` is a named list of + permissions; each ``role`` can be an IAM predefined role + or a user-created custom role. For some types of Google + Cloud resources, a ``binding`` can also specify a + ``condition``, which is a logical expression that allows + access to a resource only if the expression evaluates to + ``true``. A condition can add constraints based on + attributes of the request, the resource, or both. To + learn which resources support conditions in their IAM + policies, see the `IAM + documentation `__. + **JSON example:** { "bindings": [ { "role": + "roles/resourcemanager.organizationAdmin", "members": [ + "user:mike@example.com", "group:admins@example.com", + "domain:google.com", + "serviceAccount:my-project-id@appspot.gserviceaccount.com" + ] }, { "role": + "roles/resourcemanager.organizationViewer", "members": [ + "user:eve@example.com" ], "condition": { "title": + "expirable access", "description": "Does not grant + access after Sep 2020", "expression": "request.time < + timestamp('2020-10-01T00:00:00.000Z')", } } ], "etag": + "BwWWja0YfJA=", "version": 3 } **YAML example:** + bindings: - members: - user:mike@example.com - + group:admins@example.com - domain:google.com - + serviceAccount:my-project-id@appspot.gserviceaccount.com + role: roles/resourcemanager.organizationAdmin - members: + - user:eve@example.com role: + roles/resourcemanager.organizationViewer condition: + title: expirable access description: Does not grant + access after Sep 2020 expression: request.time < + timestamp('2020-10-01T00:00:00.000Z') etag: BwWWja0YfJA= + version: 3 For a description of IAM and its features, + see the `IAM + documentation `__. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/compute/v1/projects/{project}/regions/{region}/networkAttachments/{resource}/getIamPolicy', + }, + ] + request, metadata = self._interceptor.pre_get_iam_policy(request, metadata) + pb_request = compute.GetIamPolicyNetworkAttachmentRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.Policy() + pb_resp = compute.Policy.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get_iam_policy(resp) + return resp + + class _Insert(NetworkAttachmentsRestStub): + def __hash__(self): + return hash("Insert") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.InsertNetworkAttachmentRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.Operation: + r"""Call the insert method over HTTP. + + Args: + request (~.compute.InsertNetworkAttachmentRequest): + The request object. A request message for + NetworkAttachments.Insert. See the + method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.Operation: + Represents an Operation resource. Google Compute Engine + has three Operation resources: \* + `Global `__ + \* + `Regional `__ + \* + `Zonal `__ + You can use an operation resource to manage asynchronous + API requests. For more information, read Handling API + responses. Operations can be global, regional or zonal. + - For global operations, use the ``globalOperations`` + resource. - For regional operations, use the + ``regionOperations`` resource. - For zonal operations, + use the ``zonalOperations`` resource. For more + information, read Global, Regional, and Zonal Resources. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/compute/v1/projects/{project}/regions/{region}/networkAttachments', + 'body': 'network_attachment_resource', + }, + ] + request, metadata = self._interceptor.pre_insert(request, metadata) + pb_request = compute.InsertNetworkAttachmentRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + including_default_value_fields=False, + use_integers_for_enums=False + ) + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.Operation() + pb_resp = compute.Operation.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_insert(resp) + return resp + + class _List(NetworkAttachmentsRestStub): + def __hash__(self): + return hash("List") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.ListNetworkAttachmentsRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.NetworkAttachmentList: + r"""Call the list method over HTTP. + + Args: + request (~.compute.ListNetworkAttachmentsRequest): + The request object. A request message for + NetworkAttachments.List. See the method + description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.NetworkAttachmentList: + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/compute/v1/projects/{project}/regions/{region}/networkAttachments', + }, + ] + request, metadata = self._interceptor.pre_list(request, metadata) + pb_request = compute.ListNetworkAttachmentsRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.NetworkAttachmentList() + pb_resp = compute.NetworkAttachmentList.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_list(resp) + return resp + + class _SetIamPolicy(NetworkAttachmentsRestStub): + def __hash__(self): + return hash("SetIamPolicy") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.SetIamPolicyNetworkAttachmentRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.Policy: + r"""Call the set iam policy method over HTTP. + + Args: + request (~.compute.SetIamPolicyNetworkAttachmentRequest): + The request object. A request message for + NetworkAttachments.SetIamPolicy. See the + method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.Policy: + An Identity and Access Management (IAM) policy, which + specifies access controls for Google Cloud resources. A + ``Policy`` is a collection of ``bindings``. A + ``binding`` binds one or more ``members``, or + principals, to a single ``role``. Principals can be user + accounts, service accounts, Google groups, and domains + (such as G Suite). A ``role`` is a named list of + permissions; each ``role`` can be an IAM predefined role + or a user-created custom role. For some types of Google + Cloud resources, a ``binding`` can also specify a + ``condition``, which is a logical expression that allows + access to a resource only if the expression evaluates to + ``true``. A condition can add constraints based on + attributes of the request, the resource, or both. To + learn which resources support conditions in their IAM + policies, see the `IAM + documentation `__. + **JSON example:** { "bindings": [ { "role": + "roles/resourcemanager.organizationAdmin", "members": [ + "user:mike@example.com", "group:admins@example.com", + "domain:google.com", + "serviceAccount:my-project-id@appspot.gserviceaccount.com" + ] }, { "role": + "roles/resourcemanager.organizationViewer", "members": [ + "user:eve@example.com" ], "condition": { "title": + "expirable access", "description": "Does not grant + access after Sep 2020", "expression": "request.time < + timestamp('2020-10-01T00:00:00.000Z')", } } ], "etag": + "BwWWja0YfJA=", "version": 3 } **YAML example:** + bindings: - members: - user:mike@example.com - + group:admins@example.com - domain:google.com - + serviceAccount:my-project-id@appspot.gserviceaccount.com + role: roles/resourcemanager.organizationAdmin - members: + - user:eve@example.com role: + roles/resourcemanager.organizationViewer condition: + title: expirable access description: Does not grant + access after Sep 2020 expression: request.time < + timestamp('2020-10-01T00:00:00.000Z') etag: BwWWja0YfJA= + version: 3 For a description of IAM and its features, + see the `IAM + documentation `__. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/compute/v1/projects/{project}/regions/{region}/networkAttachments/{resource}/setIamPolicy', + 'body': 'region_set_policy_request_resource', + }, + ] + request, metadata = self._interceptor.pre_set_iam_policy(request, metadata) + pb_request = compute.SetIamPolicyNetworkAttachmentRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + including_default_value_fields=False, + use_integers_for_enums=False + ) + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.Policy() + pb_resp = compute.Policy.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_set_iam_policy(resp) + return resp + + class _TestIamPermissions(NetworkAttachmentsRestStub): + def __hash__(self): + return hash("TestIamPermissions") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.TestIamPermissionsNetworkAttachmentRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.TestPermissionsResponse: + r"""Call the test iam permissions method over HTTP. + + Args: + request (~.compute.TestIamPermissionsNetworkAttachmentRequest): + The request object. A request message for + NetworkAttachments.TestIamPermissions. + See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.TestPermissionsResponse: + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/compute/v1/projects/{project}/regions/{region}/networkAttachments/{resource}/testIamPermissions', + 'body': 'test_permissions_request_resource', + }, + ] + request, metadata = self._interceptor.pre_test_iam_permissions(request, metadata) + pb_request = compute.TestIamPermissionsNetworkAttachmentRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + including_default_value_fields=False, + use_integers_for_enums=False + ) + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.TestPermissionsResponse() + pb_resp = compute.TestPermissionsResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_test_iam_permissions(resp) + return resp + + @property + def aggregated_list(self) -> Callable[ + [compute.AggregatedListNetworkAttachmentsRequest], + compute.NetworkAttachmentAggregatedList]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._AggregatedList(self._session, self._host, self._interceptor) # type: ignore + + @property + def delete(self) -> Callable[ + [compute.DeleteNetworkAttachmentRequest], + compute.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._Delete(self._session, self._host, self._interceptor) # type: ignore + + @property + def get(self) -> Callable[ + [compute.GetNetworkAttachmentRequest], + compute.NetworkAttachment]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._Get(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_iam_policy(self) -> Callable[ + [compute.GetIamPolicyNetworkAttachmentRequest], + compute.Policy]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetIamPolicy(self._session, self._host, self._interceptor) # type: ignore + + @property + def insert(self) -> Callable[ + [compute.InsertNetworkAttachmentRequest], + compute.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._Insert(self._session, self._host, self._interceptor) # type: ignore + + @property + def list(self) -> Callable[ + [compute.ListNetworkAttachmentsRequest], + compute.NetworkAttachmentList]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._List(self._session, self._host, self._interceptor) # type: ignore + + @property + def set_iam_policy(self) -> Callable[ + [compute.SetIamPolicyNetworkAttachmentRequest], + compute.Policy]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._SetIamPolicy(self._session, self._host, self._interceptor) # type: ignore + + @property + def test_iam_permissions(self) -> Callable[ + [compute.TestIamPermissionsNetworkAttachmentRequest], + compute.TestPermissionsResponse]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._TestIamPermissions(self._session, self._host, self._interceptor) # type: ignore + + @property + def kind(self) -> str: + return "rest" + + def close(self): + self._session.close() + + +__all__=( + 'NetworkAttachmentsRestTransport', +) diff --git a/owl-bot-staging/v1/google/cloud/compute_v1/services/network_edge_security_services/__init__.py b/owl-bot-staging/v1/google/cloud/compute_v1/services/network_edge_security_services/__init__.py new file mode 100644 index 000000000..60070b444 --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/compute_v1/services/network_edge_security_services/__init__.py @@ -0,0 +1,20 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from .client import NetworkEdgeSecurityServicesClient + +__all__ = ( + 'NetworkEdgeSecurityServicesClient', +) diff --git a/owl-bot-staging/v1/google/cloud/compute_v1/services/network_edge_security_services/client.py b/owl-bot-staging/v1/google/cloud/compute_v1/services/network_edge_security_services/client.py new file mode 100644 index 000000000..89f5cdb2c --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/compute_v1/services/network_edge_security_services/client.py @@ -0,0 +1,1462 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import functools +import os +import re +from typing import Dict, Mapping, MutableMapping, MutableSequence, Optional, Sequence, Tuple, Type, Union, cast + +from google.cloud.compute_v1 import gapic_version as package_version + +from google.api_core import client_options as client_options_lib +from google.api_core import exceptions as core_exceptions +from google.api_core import extended_operation +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport import mtls # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth.exceptions import MutualTLSChannelError # type: ignore +from google.oauth2 import service_account # type: ignore + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + +from google.api_core import extended_operation # type: ignore +from google.cloud.compute_v1.services.network_edge_security_services import pagers +from google.cloud.compute_v1.types import compute +from .transports.base import NetworkEdgeSecurityServicesTransport, DEFAULT_CLIENT_INFO +from .transports.rest import NetworkEdgeSecurityServicesRestTransport + + +class NetworkEdgeSecurityServicesClientMeta(type): + """Metaclass for the NetworkEdgeSecurityServices client. + + This provides class-level methods for building and retrieving + support objects (e.g. transport) without polluting the client instance + objects. + """ + _transport_registry = OrderedDict() # type: Dict[str, Type[NetworkEdgeSecurityServicesTransport]] + _transport_registry["rest"] = NetworkEdgeSecurityServicesRestTransport + + def get_transport_class(cls, + label: Optional[str] = None, + ) -> Type[NetworkEdgeSecurityServicesTransport]: + """Returns an appropriate transport class. + + Args: + label: The name of the desired transport. If none is + provided, then the first transport in the registry is used. + + Returns: + The transport class to use. + """ + # If a specific transport is requested, return that one. + if label: + return cls._transport_registry[label] + + # No transport is requested; return the default (that is, the first one + # in the dictionary). + return next(iter(cls._transport_registry.values())) + + +class NetworkEdgeSecurityServicesClient(metaclass=NetworkEdgeSecurityServicesClientMeta): + """The NetworkEdgeSecurityServices API.""" + + @staticmethod + def _get_default_mtls_endpoint(api_endpoint): + """Converts api endpoint to mTLS endpoint. + + Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to + "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. + Args: + api_endpoint (Optional[str]): the api endpoint to convert. + Returns: + str: converted mTLS api endpoint. + """ + if not api_endpoint: + return api_endpoint + + mtls_endpoint_re = re.compile( + r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?" + ) + + m = mtls_endpoint_re.match(api_endpoint) + name, mtls, sandbox, googledomain = m.groups() + if mtls or not googledomain: + return api_endpoint + + if sandbox: + return api_endpoint.replace( + "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" + ) + + return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") + + DEFAULT_ENDPOINT = "compute.googleapis.com" + DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore + DEFAULT_ENDPOINT + ) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + NetworkEdgeSecurityServicesClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_info(info) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + NetworkEdgeSecurityServicesClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_file( + filename) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + from_service_account_json = from_service_account_file + + @property + def transport(self) -> NetworkEdgeSecurityServicesTransport: + """Returns the transport used by the client instance. + + Returns: + NetworkEdgeSecurityServicesTransport: The transport used by the client + instance. + """ + return self._transport + + @staticmethod + def common_billing_account_path(billing_account: str, ) -> str: + """Returns a fully-qualified billing_account string.""" + return "billingAccounts/{billing_account}".format(billing_account=billing_account, ) + + @staticmethod + def parse_common_billing_account_path(path: str) -> Dict[str,str]: + """Parse a billing_account path into its component segments.""" + m = re.match(r"^billingAccounts/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_folder_path(folder: str, ) -> str: + """Returns a fully-qualified folder string.""" + return "folders/{folder}".format(folder=folder, ) + + @staticmethod + def parse_common_folder_path(path: str) -> Dict[str,str]: + """Parse a folder path into its component segments.""" + m = re.match(r"^folders/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_organization_path(organization: str, ) -> str: + """Returns a fully-qualified organization string.""" + return "organizations/{organization}".format(organization=organization, ) + + @staticmethod + def parse_common_organization_path(path: str) -> Dict[str,str]: + """Parse a organization path into its component segments.""" + m = re.match(r"^organizations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_project_path(project: str, ) -> str: + """Returns a fully-qualified project string.""" + return "projects/{project}".format(project=project, ) + + @staticmethod + def parse_common_project_path(path: str) -> Dict[str,str]: + """Parse a project path into its component segments.""" + m = re.match(r"^projects/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_location_path(project: str, location: str, ) -> str: + """Returns a fully-qualified location string.""" + return "projects/{project}/locations/{location}".format(project=project, location=location, ) + + @staticmethod + def parse_common_location_path(path: str) -> Dict[str,str]: + """Parse a location path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @classmethod + def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[client_options_lib.ClientOptions] = None): + """Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + if client_options is None: + client_options = client_options_lib.ClientOptions() + use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") + use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") + if use_client_cert not in ("true", "false"): + raise ValueError("Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`") + if use_mtls_endpoint not in ("auto", "never", "always"): + raise MutualTLSChannelError("Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`") + + # Figure out the client cert source to use. + client_cert_source = None + if use_client_cert == "true": + if client_options.client_cert_source: + client_cert_source = client_options.client_cert_source + elif mtls.has_default_client_cert_source(): + client_cert_source = mtls.default_client_cert_source() + + # Figure out which api endpoint to use. + if client_options.api_endpoint is not None: + api_endpoint = client_options.api_endpoint + elif use_mtls_endpoint == "always" or (use_mtls_endpoint == "auto" and client_cert_source): + api_endpoint = cls.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = cls.DEFAULT_ENDPOINT + + return api_endpoint, client_cert_source + + def __init__(self, *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Optional[Union[str, NetworkEdgeSecurityServicesTransport]] = None, + client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the network edge security services client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Union[str, NetworkEdgeSecurityServicesTransport]): The + transport to use. If set to None, a transport is chosen + automatically. + NOTE: "rest" transport functionality is currently in a + beta state (preview). We welcome your feedback via an + issue in this library's source repository. + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): Custom options for the + client. It won't take effect if a ``transport`` instance is provided. + (1) The ``api_endpoint`` property can be used to override the + default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT + environment variable can also be used to override the endpoint: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto switch to the + default mTLS endpoint if client certificate is present, this is + the default value). However, the ``api_endpoint`` property takes + precedence if provided. + (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide client certificate for mutual TLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + """ + if isinstance(client_options, dict): + client_options = client_options_lib.from_dict(client_options) + if client_options is None: + client_options = client_options_lib.ClientOptions() + client_options = cast(client_options_lib.ClientOptions, client_options) + + api_endpoint, client_cert_source_func = self.get_mtls_endpoint_and_cert_source(client_options) + + api_key_value = getattr(client_options, "api_key", None) + if api_key_value and credentials: + raise ValueError("client_options.api_key and credentials are mutually exclusive") + + # Save or instantiate the transport. + # Ordinarily, we provide the transport, but allowing a custom transport + # instance provides an extensibility point for unusual situations. + if isinstance(transport, NetworkEdgeSecurityServicesTransport): + # transport is a NetworkEdgeSecurityServicesTransport instance. + if credentials or client_options.credentials_file or api_key_value: + raise ValueError("When providing a transport instance, " + "provide its credentials directly.") + if client_options.scopes: + raise ValueError( + "When providing a transport instance, provide its scopes " + "directly." + ) + self._transport = transport + else: + import google.auth._default # type: ignore + + if api_key_value and hasattr(google.auth._default, "get_api_key_credentials"): + credentials = google.auth._default.get_api_key_credentials(api_key_value) + + Transport = type(self).get_transport_class(transport) + self._transport = Transport( + credentials=credentials, + credentials_file=client_options.credentials_file, + host=api_endpoint, + scopes=client_options.scopes, + client_cert_source_for_mtls=client_cert_source_func, + quota_project_id=client_options.quota_project_id, + client_info=client_info, + always_use_jwt_access=True, + api_audience=client_options.api_audience, + ) + + def aggregated_list(self, + request: Optional[Union[compute.AggregatedListNetworkEdgeSecurityServicesRequest, dict]] = None, + *, + project: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.AggregatedListPager: + r"""Retrieves the list of all NetworkEdgeSecurityService + resources available to the specified project. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_aggregated_list(): + # Create a client + client = compute_v1.NetworkEdgeSecurityServicesClient() + + # Initialize request argument(s) + request = compute_v1.AggregatedListNetworkEdgeSecurityServicesRequest( + project="project_value", + ) + + # Make the request + page_result = client.aggregated_list(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.AggregatedListNetworkEdgeSecurityServicesRequest, dict]): + The request object. A request message for + NetworkEdgeSecurityServices.AggregatedList. + See the method description for details. + project (str): + Name of the project scoping this + request. + + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.compute_v1.services.network_edge_security_services.pagers.AggregatedListPager: + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.AggregatedListNetworkEdgeSecurityServicesRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.AggregatedListNetworkEdgeSecurityServicesRequest): + request = compute.AggregatedListNetworkEdgeSecurityServicesRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.aggregated_list] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.AggregatedListPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + def delete_unary(self, + request: Optional[Union[compute.DeleteNetworkEdgeSecurityServiceRequest, dict]] = None, + *, + project: Optional[str] = None, + region: Optional[str] = None, + network_edge_security_service: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.Operation: + r"""Deletes the specified service. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_delete(): + # Create a client + client = compute_v1.NetworkEdgeSecurityServicesClient() + + # Initialize request argument(s) + request = compute_v1.DeleteNetworkEdgeSecurityServiceRequest( + network_edge_security_service="network_edge_security_service_value", + project="project_value", + region="region_value", + ) + + # Make the request + response = client.delete(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.DeleteNetworkEdgeSecurityServiceRequest, dict]): + The request object. A request message for + NetworkEdgeSecurityServices.Delete. See + the method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + region (str): + Name of the region scoping this + request. + + This corresponds to the ``region`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + network_edge_security_service (str): + Name of the network edge security + service to delete. + + This corresponds to the ``network_edge_security_service`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, region, network_edge_security_service]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.DeleteNetworkEdgeSecurityServiceRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.DeleteNetworkEdgeSecurityServiceRequest): + request = compute.DeleteNetworkEdgeSecurityServiceRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if region is not None: + request.region = region + if network_edge_security_service is not None: + request.network_edge_security_service = network_edge_security_service + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("region", request.region), + ("network_edge_security_service", request.network_edge_security_service), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def delete(self, + request: Optional[Union[compute.DeleteNetworkEdgeSecurityServiceRequest, dict]] = None, + *, + project: Optional[str] = None, + region: Optional[str] = None, + network_edge_security_service: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> extended_operation.ExtendedOperation: + r"""Deletes the specified service. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_delete(): + # Create a client + client = compute_v1.NetworkEdgeSecurityServicesClient() + + # Initialize request argument(s) + request = compute_v1.DeleteNetworkEdgeSecurityServiceRequest( + network_edge_security_service="network_edge_security_service_value", + project="project_value", + region="region_value", + ) + + # Make the request + response = client.delete(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.DeleteNetworkEdgeSecurityServiceRequest, dict]): + The request object. A request message for + NetworkEdgeSecurityServices.Delete. See + the method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + region (str): + Name of the region scoping this + request. + + This corresponds to the ``region`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + network_edge_security_service (str): + Name of the network edge security + service to delete. + + This corresponds to the ``network_edge_security_service`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, region, network_edge_security_service]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.DeleteNetworkEdgeSecurityServiceRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.DeleteNetworkEdgeSecurityServiceRequest): + request = compute.DeleteNetworkEdgeSecurityServiceRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if region is not None: + request.region = region + if network_edge_security_service is not None: + request.network_edge_security_service = network_edge_security_service + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("region", request.region), + ("network_edge_security_service", request.network_edge_security_service), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + operation_service = self._transport._region_operations_client + operation_request = compute.GetRegionOperationRequest() + operation_request.project = request.project + operation_request.region = request.region + operation_request.operation = response.name + + get_operation = functools.partial(operation_service.get, operation_request) + # Cancel is not part of extended operations yet. + cancel_operation = lambda: None + + # Note: this class is an implementation detail to provide a uniform + # set of names for certain fields in the extended operation proto message. + # See google.api_core.extended_operation.ExtendedOperation for details + # on these properties and the expected interface. + class _CustomOperation(extended_operation.ExtendedOperation): + @property + def error_message(self): + return self._extended_operation.http_error_message + + @property + def error_code(self): + return self._extended_operation.http_error_status_code + + response = _CustomOperation.make(get_operation, cancel_operation, response) + + # Done; return the response. + return response + + def get(self, + request: Optional[Union[compute.GetNetworkEdgeSecurityServiceRequest, dict]] = None, + *, + project: Optional[str] = None, + region: Optional[str] = None, + network_edge_security_service: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.NetworkEdgeSecurityService: + r"""Gets a specified NetworkEdgeSecurityService. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_get(): + # Create a client + client = compute_v1.NetworkEdgeSecurityServicesClient() + + # Initialize request argument(s) + request = compute_v1.GetNetworkEdgeSecurityServiceRequest( + network_edge_security_service="network_edge_security_service_value", + project="project_value", + region="region_value", + ) + + # Make the request + response = client.get(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.GetNetworkEdgeSecurityServiceRequest, dict]): + The request object. A request message for + NetworkEdgeSecurityServices.Get. See the + method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + region (str): + Name of the region scoping this + request. + + This corresponds to the ``region`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + network_edge_security_service (str): + Name of the network edge security + service to get. + + This corresponds to the ``network_edge_security_service`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.compute_v1.types.NetworkEdgeSecurityService: + Represents a Google Cloud Armor + network edge security service resource. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, region, network_edge_security_service]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.GetNetworkEdgeSecurityServiceRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.GetNetworkEdgeSecurityServiceRequest): + request = compute.GetNetworkEdgeSecurityServiceRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if region is not None: + request.region = region + if network_edge_security_service is not None: + request.network_edge_security_service = network_edge_security_service + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("region", request.region), + ("network_edge_security_service", request.network_edge_security_service), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def insert_unary(self, + request: Optional[Union[compute.InsertNetworkEdgeSecurityServiceRequest, dict]] = None, + *, + project: Optional[str] = None, + region: Optional[str] = None, + network_edge_security_service_resource: Optional[compute.NetworkEdgeSecurityService] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.Operation: + r"""Creates a new service in the specified project using + the data included in the request. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_insert(): + # Create a client + client = compute_v1.NetworkEdgeSecurityServicesClient() + + # Initialize request argument(s) + request = compute_v1.InsertNetworkEdgeSecurityServiceRequest( + project="project_value", + region="region_value", + ) + + # Make the request + response = client.insert(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.InsertNetworkEdgeSecurityServiceRequest, dict]): + The request object. A request message for + NetworkEdgeSecurityServices.Insert. See + the method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + region (str): + Name of the region scoping this + request. + + This corresponds to the ``region`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + network_edge_security_service_resource (google.cloud.compute_v1.types.NetworkEdgeSecurityService): + The body resource for this request + This corresponds to the ``network_edge_security_service_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, region, network_edge_security_service_resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.InsertNetworkEdgeSecurityServiceRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.InsertNetworkEdgeSecurityServiceRequest): + request = compute.InsertNetworkEdgeSecurityServiceRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if region is not None: + request.region = region + if network_edge_security_service_resource is not None: + request.network_edge_security_service_resource = network_edge_security_service_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.insert] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("region", request.region), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def insert(self, + request: Optional[Union[compute.InsertNetworkEdgeSecurityServiceRequest, dict]] = None, + *, + project: Optional[str] = None, + region: Optional[str] = None, + network_edge_security_service_resource: Optional[compute.NetworkEdgeSecurityService] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> extended_operation.ExtendedOperation: + r"""Creates a new service in the specified project using + the data included in the request. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_insert(): + # Create a client + client = compute_v1.NetworkEdgeSecurityServicesClient() + + # Initialize request argument(s) + request = compute_v1.InsertNetworkEdgeSecurityServiceRequest( + project="project_value", + region="region_value", + ) + + # Make the request + response = client.insert(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.InsertNetworkEdgeSecurityServiceRequest, dict]): + The request object. A request message for + NetworkEdgeSecurityServices.Insert. See + the method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + region (str): + Name of the region scoping this + request. + + This corresponds to the ``region`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + network_edge_security_service_resource (google.cloud.compute_v1.types.NetworkEdgeSecurityService): + The body resource for this request + This corresponds to the ``network_edge_security_service_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, region, network_edge_security_service_resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.InsertNetworkEdgeSecurityServiceRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.InsertNetworkEdgeSecurityServiceRequest): + request = compute.InsertNetworkEdgeSecurityServiceRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if region is not None: + request.region = region + if network_edge_security_service_resource is not None: + request.network_edge_security_service_resource = network_edge_security_service_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.insert] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("region", request.region), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + operation_service = self._transport._region_operations_client + operation_request = compute.GetRegionOperationRequest() + operation_request.project = request.project + operation_request.region = request.region + operation_request.operation = response.name + + get_operation = functools.partial(operation_service.get, operation_request) + # Cancel is not part of extended operations yet. + cancel_operation = lambda: None + + # Note: this class is an implementation detail to provide a uniform + # set of names for certain fields in the extended operation proto message. + # See google.api_core.extended_operation.ExtendedOperation for details + # on these properties and the expected interface. + class _CustomOperation(extended_operation.ExtendedOperation): + @property + def error_message(self): + return self._extended_operation.http_error_message + + @property + def error_code(self): + return self._extended_operation.http_error_status_code + + response = _CustomOperation.make(get_operation, cancel_operation, response) + + # Done; return the response. + return response + + def patch_unary(self, + request: Optional[Union[compute.PatchNetworkEdgeSecurityServiceRequest, dict]] = None, + *, + project: Optional[str] = None, + region: Optional[str] = None, + network_edge_security_service: Optional[str] = None, + network_edge_security_service_resource: Optional[compute.NetworkEdgeSecurityService] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.Operation: + r"""Patches the specified policy with the data included + in the request. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_patch(): + # Create a client + client = compute_v1.NetworkEdgeSecurityServicesClient() + + # Initialize request argument(s) + request = compute_v1.PatchNetworkEdgeSecurityServiceRequest( + network_edge_security_service="network_edge_security_service_value", + project="project_value", + region="region_value", + ) + + # Make the request + response = client.patch(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.PatchNetworkEdgeSecurityServiceRequest, dict]): + The request object. A request message for + NetworkEdgeSecurityServices.Patch. See + the method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + region (str): + Name of the region scoping this + request. + + This corresponds to the ``region`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + network_edge_security_service (str): + Name of the network edge security + service to update. + + This corresponds to the ``network_edge_security_service`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + network_edge_security_service_resource (google.cloud.compute_v1.types.NetworkEdgeSecurityService): + The body resource for this request + This corresponds to the ``network_edge_security_service_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, region, network_edge_security_service, network_edge_security_service_resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.PatchNetworkEdgeSecurityServiceRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.PatchNetworkEdgeSecurityServiceRequest): + request = compute.PatchNetworkEdgeSecurityServiceRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if region is not None: + request.region = region + if network_edge_security_service is not None: + request.network_edge_security_service = network_edge_security_service + if network_edge_security_service_resource is not None: + request.network_edge_security_service_resource = network_edge_security_service_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.patch] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("region", request.region), + ("network_edge_security_service", request.network_edge_security_service), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def patch(self, + request: Optional[Union[compute.PatchNetworkEdgeSecurityServiceRequest, dict]] = None, + *, + project: Optional[str] = None, + region: Optional[str] = None, + network_edge_security_service: Optional[str] = None, + network_edge_security_service_resource: Optional[compute.NetworkEdgeSecurityService] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> extended_operation.ExtendedOperation: + r"""Patches the specified policy with the data included + in the request. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_patch(): + # Create a client + client = compute_v1.NetworkEdgeSecurityServicesClient() + + # Initialize request argument(s) + request = compute_v1.PatchNetworkEdgeSecurityServiceRequest( + network_edge_security_service="network_edge_security_service_value", + project="project_value", + region="region_value", + ) + + # Make the request + response = client.patch(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.PatchNetworkEdgeSecurityServiceRequest, dict]): + The request object. A request message for + NetworkEdgeSecurityServices.Patch. See + the method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + region (str): + Name of the region scoping this + request. + + This corresponds to the ``region`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + network_edge_security_service (str): + Name of the network edge security + service to update. + + This corresponds to the ``network_edge_security_service`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + network_edge_security_service_resource (google.cloud.compute_v1.types.NetworkEdgeSecurityService): + The body resource for this request + This corresponds to the ``network_edge_security_service_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, region, network_edge_security_service, network_edge_security_service_resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.PatchNetworkEdgeSecurityServiceRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.PatchNetworkEdgeSecurityServiceRequest): + request = compute.PatchNetworkEdgeSecurityServiceRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if region is not None: + request.region = region + if network_edge_security_service is not None: + request.network_edge_security_service = network_edge_security_service + if network_edge_security_service_resource is not None: + request.network_edge_security_service_resource = network_edge_security_service_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.patch] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("region", request.region), + ("network_edge_security_service", request.network_edge_security_service), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + operation_service = self._transport._region_operations_client + operation_request = compute.GetRegionOperationRequest() + operation_request.project = request.project + operation_request.region = request.region + operation_request.operation = response.name + + get_operation = functools.partial(operation_service.get, operation_request) + # Cancel is not part of extended operations yet. + cancel_operation = lambda: None + + # Note: this class is an implementation detail to provide a uniform + # set of names for certain fields in the extended operation proto message. + # See google.api_core.extended_operation.ExtendedOperation for details + # on these properties and the expected interface. + class _CustomOperation(extended_operation.ExtendedOperation): + @property + def error_message(self): + return self._extended_operation.http_error_message + + @property + def error_code(self): + return self._extended_operation.http_error_status_code + + response = _CustomOperation.make(get_operation, cancel_operation, response) + + # Done; return the response. + return response + + def __enter__(self) -> "NetworkEdgeSecurityServicesClient": + return self + + def __exit__(self, type, value, traceback): + """Releases underlying transport's resources. + + .. warning:: + ONLY use as a context manager if the transport is NOT shared + with other clients! Exiting the with block will CLOSE the transport + and may cause errors in other clients! + """ + self.transport.close() + + + + + + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) + + +__all__ = ( + "NetworkEdgeSecurityServicesClient", +) diff --git a/owl-bot-staging/v1/google/cloud/compute_v1/services/network_edge_security_services/pagers.py b/owl-bot-staging/v1/google/cloud/compute_v1/services/network_edge_security_services/pagers.py new file mode 100644 index 000000000..1e5922d9b --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/compute_v1/services/network_edge_security_services/pagers.py @@ -0,0 +1,80 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import Any, AsyncIterator, Awaitable, Callable, Sequence, Tuple, Optional, Iterator + +from google.cloud.compute_v1.types import compute + + +class AggregatedListPager: + """A pager for iterating through ``aggregated_list`` requests. + + This class thinly wraps an initial + :class:`google.cloud.compute_v1.types.NetworkEdgeSecurityServiceAggregatedList` object, and + provides an ``__iter__`` method to iterate through its + ``items`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``AggregatedList`` requests and continue to iterate + through the ``items`` field on the + corresponding responses. + + All the usual :class:`google.cloud.compute_v1.types.NetworkEdgeSecurityServiceAggregatedList` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., compute.NetworkEdgeSecurityServiceAggregatedList], + request: compute.AggregatedListNetworkEdgeSecurityServicesRequest, + response: compute.NetworkEdgeSecurityServiceAggregatedList, + *, + metadata: Sequence[Tuple[str, str]] = ()): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.compute_v1.types.AggregatedListNetworkEdgeSecurityServicesRequest): + The initial request object. + response (google.cloud.compute_v1.types.NetworkEdgeSecurityServiceAggregatedList): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = compute.AggregatedListNetworkEdgeSecurityServicesRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[compute.NetworkEdgeSecurityServiceAggregatedList]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[Tuple[str, compute.NetworkEdgeSecurityServicesScopedList]]: + for page in self.pages: + yield from page.items.items() + + def get(self, key: str) -> Optional[compute.NetworkEdgeSecurityServicesScopedList]: + return self._response.items.get(key) + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) diff --git a/owl-bot-staging/v1/google/cloud/compute_v1/services/network_edge_security_services/transports/__init__.py b/owl-bot-staging/v1/google/cloud/compute_v1/services/network_edge_security_services/transports/__init__.py new file mode 100644 index 000000000..31aaeeb7d --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/compute_v1/services/network_edge_security_services/transports/__init__.py @@ -0,0 +1,32 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +from typing import Dict, Type + +from .base import NetworkEdgeSecurityServicesTransport +from .rest import NetworkEdgeSecurityServicesRestTransport +from .rest import NetworkEdgeSecurityServicesRestInterceptor + + +# Compile a registry of transports. +_transport_registry = OrderedDict() # type: Dict[str, Type[NetworkEdgeSecurityServicesTransport]] +_transport_registry['rest'] = NetworkEdgeSecurityServicesRestTransport + +__all__ = ( + 'NetworkEdgeSecurityServicesTransport', + 'NetworkEdgeSecurityServicesRestTransport', + 'NetworkEdgeSecurityServicesRestInterceptor', +) diff --git a/owl-bot-staging/v1/google/cloud/compute_v1/services/network_edge_security_services/transports/base.py b/owl-bot-staging/v1/google/cloud/compute_v1/services/network_edge_security_services/transports/base.py new file mode 100644 index 000000000..d842537d5 --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/compute_v1/services/network_edge_security_services/transports/base.py @@ -0,0 +1,219 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import abc +from typing import Awaitable, Callable, Dict, Optional, Sequence, Union + +from google.cloud.compute_v1 import gapic_version as package_version + +import google.auth # type: ignore +import google.api_core +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.compute_v1.types import compute +from google.cloud.compute_v1.services import region_operations + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) + + +class NetworkEdgeSecurityServicesTransport(abc.ABC): + """Abstract transport class for NetworkEdgeSecurityServices.""" + + AUTH_SCOPES = ( + 'https://www.googleapis.com/auth/compute', + 'https://www.googleapis.com/auth/cloud-platform', + ) + + DEFAULT_HOST: str = 'compute.googleapis.com' + def __init__( + self, *, + host: str = DEFAULT_HOST, + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + **kwargs, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A list of scopes. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + """ + self._extended_operations_services: Dict[str, Any] = {} + + scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} + + # Save the scopes. + self._scopes = scopes + + # If no credentials are provided, then determine the appropriate + # defaults. + if credentials and credentials_file: + raise core_exceptions.DuplicateCredentialArgs("'credentials_file' and 'credentials' are mutually exclusive") + + if credentials_file is not None: + credentials, _ = google.auth.load_credentials_from_file( + credentials_file, + **scopes_kwargs, + quota_project_id=quota_project_id + ) + elif credentials is None: + credentials, _ = google.auth.default(**scopes_kwargs, quota_project_id=quota_project_id) + # Don't apply audience if the credentials file passed from user. + if hasattr(credentials, "with_gdch_audience"): + credentials = credentials.with_gdch_audience(api_audience if api_audience else host) + + # If the credentials are service account credentials, then always try to use self signed JWT. + if always_use_jwt_access and isinstance(credentials, service_account.Credentials) and hasattr(service_account.Credentials, "with_always_use_jwt_access"): + credentials = credentials.with_always_use_jwt_access(True) + + # Save the credentials. + self._credentials = credentials + + # Save the hostname. Default to port 443 (HTTPS) if none is specified. + if ':' not in host: + host += ':443' + self._host = host + + def _prep_wrapped_messages(self, client_info): + # Precompute the wrapped methods. + self._wrapped_methods = { + self.aggregated_list: gapic_v1.method.wrap_method( + self.aggregated_list, + default_timeout=None, + client_info=client_info, + ), + self.delete: gapic_v1.method.wrap_method( + self.delete, + default_timeout=None, + client_info=client_info, + ), + self.get: gapic_v1.method.wrap_method( + self.get, + default_timeout=None, + client_info=client_info, + ), + self.insert: gapic_v1.method.wrap_method( + self.insert, + default_timeout=None, + client_info=client_info, + ), + self.patch: gapic_v1.method.wrap_method( + self.patch, + default_timeout=None, + client_info=client_info, + ), + } + + def close(self): + """Closes resources associated with the transport. + + .. warning:: + Only call this method if the transport is NOT shared + with other clients - this may cause errors in other clients! + """ + raise NotImplementedError() + + @property + def aggregated_list(self) -> Callable[ + [compute.AggregatedListNetworkEdgeSecurityServicesRequest], + Union[ + compute.NetworkEdgeSecurityServiceAggregatedList, + Awaitable[compute.NetworkEdgeSecurityServiceAggregatedList] + ]]: + raise NotImplementedError() + + @property + def delete(self) -> Callable[ + [compute.DeleteNetworkEdgeSecurityServiceRequest], + Union[ + compute.Operation, + Awaitable[compute.Operation] + ]]: + raise NotImplementedError() + + @property + def get(self) -> Callable[ + [compute.GetNetworkEdgeSecurityServiceRequest], + Union[ + compute.NetworkEdgeSecurityService, + Awaitable[compute.NetworkEdgeSecurityService] + ]]: + raise NotImplementedError() + + @property + def insert(self) -> Callable[ + [compute.InsertNetworkEdgeSecurityServiceRequest], + Union[ + compute.Operation, + Awaitable[compute.Operation] + ]]: + raise NotImplementedError() + + @property + def patch(self) -> Callable[ + [compute.PatchNetworkEdgeSecurityServiceRequest], + Union[ + compute.Operation, + Awaitable[compute.Operation] + ]]: + raise NotImplementedError() + + @property + def kind(self) -> str: + raise NotImplementedError() + + @property + def _region_operations_client(self) -> region_operations.RegionOperationsClient: + ex_op_service = self._extended_operations_services.get("region_operations") + if not ex_op_service: + ex_op_service = region_operations.RegionOperationsClient( + credentials=self._credentials, + transport=self.kind, + ) + self._extended_operations_services["region_operations"] = ex_op_service + + return ex_op_service + + +__all__ = ( + 'NetworkEdgeSecurityServicesTransport', +) diff --git a/owl-bot-staging/v1/google/cloud/compute_v1/services/network_edge_security_services/transports/rest.py b/owl-bot-staging/v1/google/cloud/compute_v1/services/network_edge_security_services/transports/rest.py new file mode 100644 index 000000000..e0eb3ddd4 --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/compute_v1/services/network_edge_security_services/transports/rest.py @@ -0,0 +1,800 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +from google.auth.transport.requests import AuthorizedSession # type: ignore +import json # type: ignore +import grpc # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.api_core import exceptions as core_exceptions +from google.api_core import retry as retries +from google.api_core import rest_helpers +from google.api_core import rest_streaming +from google.api_core import path_template +from google.api_core import gapic_v1 + +from google.protobuf import json_format +from requests import __version__ as requests_version +import dataclasses +import re +from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union +import warnings + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + + +from google.cloud.compute_v1.types import compute + +from .base import NetworkEdgeSecurityServicesTransport, DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, + grpc_version=None, + rest_version=requests_version, +) + + +class NetworkEdgeSecurityServicesRestInterceptor: + """Interceptor for NetworkEdgeSecurityServices. + + Interceptors are used to manipulate requests, request metadata, and responses + in arbitrary ways. + Example use cases include: + * Logging + * Verifying requests according to service or custom semantics + * Stripping extraneous information from responses + + These use cases and more can be enabled by injecting an + instance of a custom subclass when constructing the NetworkEdgeSecurityServicesRestTransport. + + .. code-block:: python + class MyCustomNetworkEdgeSecurityServicesInterceptor(NetworkEdgeSecurityServicesRestInterceptor): + def pre_aggregated_list(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_aggregated_list(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_delete(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_delete(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_get(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_insert(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_insert(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_patch(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_patch(self, response): + logging.log(f"Received response: {response}") + return response + + transport = NetworkEdgeSecurityServicesRestTransport(interceptor=MyCustomNetworkEdgeSecurityServicesInterceptor()) + client = NetworkEdgeSecurityServicesClient(transport=transport) + + + """ + def pre_aggregated_list(self, request: compute.AggregatedListNetworkEdgeSecurityServicesRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.AggregatedListNetworkEdgeSecurityServicesRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for aggregated_list + + Override in a subclass to manipulate the request or metadata + before they are sent to the NetworkEdgeSecurityServices server. + """ + return request, metadata + + def post_aggregated_list(self, response: compute.NetworkEdgeSecurityServiceAggregatedList) -> compute.NetworkEdgeSecurityServiceAggregatedList: + """Post-rpc interceptor for aggregated_list + + Override in a subclass to manipulate the response + after it is returned by the NetworkEdgeSecurityServices server but before + it is returned to user code. + """ + return response + def pre_delete(self, request: compute.DeleteNetworkEdgeSecurityServiceRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.DeleteNetworkEdgeSecurityServiceRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for delete + + Override in a subclass to manipulate the request or metadata + before they are sent to the NetworkEdgeSecurityServices server. + """ + return request, metadata + + def post_delete(self, response: compute.Operation) -> compute.Operation: + """Post-rpc interceptor for delete + + Override in a subclass to manipulate the response + after it is returned by the NetworkEdgeSecurityServices server but before + it is returned to user code. + """ + return response + def pre_get(self, request: compute.GetNetworkEdgeSecurityServiceRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.GetNetworkEdgeSecurityServiceRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get + + Override in a subclass to manipulate the request or metadata + before they are sent to the NetworkEdgeSecurityServices server. + """ + return request, metadata + + def post_get(self, response: compute.NetworkEdgeSecurityService) -> compute.NetworkEdgeSecurityService: + """Post-rpc interceptor for get + + Override in a subclass to manipulate the response + after it is returned by the NetworkEdgeSecurityServices server but before + it is returned to user code. + """ + return response + def pre_insert(self, request: compute.InsertNetworkEdgeSecurityServiceRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.InsertNetworkEdgeSecurityServiceRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for insert + + Override in a subclass to manipulate the request or metadata + before they are sent to the NetworkEdgeSecurityServices server. + """ + return request, metadata + + def post_insert(self, response: compute.Operation) -> compute.Operation: + """Post-rpc interceptor for insert + + Override in a subclass to manipulate the response + after it is returned by the NetworkEdgeSecurityServices server but before + it is returned to user code. + """ + return response + def pre_patch(self, request: compute.PatchNetworkEdgeSecurityServiceRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.PatchNetworkEdgeSecurityServiceRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for patch + + Override in a subclass to manipulate the request or metadata + before they are sent to the NetworkEdgeSecurityServices server. + """ + return request, metadata + + def post_patch(self, response: compute.Operation) -> compute.Operation: + """Post-rpc interceptor for patch + + Override in a subclass to manipulate the response + after it is returned by the NetworkEdgeSecurityServices server but before + it is returned to user code. + """ + return response + + +@dataclasses.dataclass +class NetworkEdgeSecurityServicesRestStub: + _session: AuthorizedSession + _host: str + _interceptor: NetworkEdgeSecurityServicesRestInterceptor + + +class NetworkEdgeSecurityServicesRestTransport(NetworkEdgeSecurityServicesTransport): + """REST backend transport for NetworkEdgeSecurityServices. + + The NetworkEdgeSecurityServices API. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends JSON representations of protocol buffers over HTTP/1.1 + + NOTE: This REST transport functionality is currently in a beta + state (preview). We welcome your feedback via an issue in this + library's source repository. Thank you! + """ + + def __init__(self, *, + host: str = 'compute.googleapis.com', + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + client_cert_source_for_mtls: Optional[Callable[[ + ], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + url_scheme: str = 'https', + interceptor: Optional[NetworkEdgeSecurityServicesRestInterceptor] = None, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + NOTE: This REST transport functionality is currently in a beta + state (preview). We welcome your feedback via a GitHub issue in + this library's repository. Thank you! + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + client_cert_source_for_mtls (Callable[[], Tuple[bytes, bytes]]): Client + certificate to configure mutual TLS HTTP channel. It is ignored + if ``channel`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you are developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + url_scheme: the protocol scheme for the API endpoint. Normally + "https", but for testing or local servers, + "http" can be specified. + """ + # Run the base constructor + # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. + # TODO: When custom host (api_endpoint) is set, `scopes` must *also* be set on the + # credentials object + maybe_url_match = re.match("^(?Phttp(?:s)?://)?(?P.*)$", host) + if maybe_url_match is None: + raise ValueError(f"Unexpected hostname structure: {host}") # pragma: NO COVER + + url_match_items = maybe_url_match.groupdict() + + host = f"{url_scheme}://{host}" if not url_match_items["scheme"] else host + + super().__init__( + host=host, + credentials=credentials, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience + ) + self._session = AuthorizedSession( + self._credentials, default_host=self.DEFAULT_HOST) + if client_cert_source_for_mtls: + self._session.configure_mtls_channel(client_cert_source_for_mtls) + self._interceptor = interceptor or NetworkEdgeSecurityServicesRestInterceptor() + self._prep_wrapped_messages(client_info) + + class _AggregatedList(NetworkEdgeSecurityServicesRestStub): + def __hash__(self): + return hash("AggregatedList") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.AggregatedListNetworkEdgeSecurityServicesRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.NetworkEdgeSecurityServiceAggregatedList: + r"""Call the aggregated list method over HTTP. + + Args: + request (~.compute.AggregatedListNetworkEdgeSecurityServicesRequest): + The request object. A request message for + NetworkEdgeSecurityServices.AggregatedList. + See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.NetworkEdgeSecurityServiceAggregatedList: + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/compute/v1/projects/{project}/aggregated/networkEdgeSecurityServices', + }, + ] + request, metadata = self._interceptor.pre_aggregated_list(request, metadata) + pb_request = compute.AggregatedListNetworkEdgeSecurityServicesRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.NetworkEdgeSecurityServiceAggregatedList() + pb_resp = compute.NetworkEdgeSecurityServiceAggregatedList.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_aggregated_list(resp) + return resp + + class _Delete(NetworkEdgeSecurityServicesRestStub): + def __hash__(self): + return hash("Delete") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.DeleteNetworkEdgeSecurityServiceRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.Operation: + r"""Call the delete method over HTTP. + + Args: + request (~.compute.DeleteNetworkEdgeSecurityServiceRequest): + The request object. A request message for + NetworkEdgeSecurityServices.Delete. See + the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.Operation: + Represents an Operation resource. Google Compute Engine + has three Operation resources: \* + `Global `__ + \* + `Regional `__ + \* + `Zonal `__ + You can use an operation resource to manage asynchronous + API requests. For more information, read Handling API + responses. Operations can be global, regional or zonal. + - For global operations, use the ``globalOperations`` + resource. - For regional operations, use the + ``regionOperations`` resource. - For zonal operations, + use the ``zonalOperations`` resource. For more + information, read Global, Regional, and Zonal Resources. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'delete', + 'uri': '/compute/v1/projects/{project}/regions/{region}/networkEdgeSecurityServices/{network_edge_security_service}', + }, + ] + request, metadata = self._interceptor.pre_delete(request, metadata) + pb_request = compute.DeleteNetworkEdgeSecurityServiceRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.Operation() + pb_resp = compute.Operation.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_delete(resp) + return resp + + class _Get(NetworkEdgeSecurityServicesRestStub): + def __hash__(self): + return hash("Get") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.GetNetworkEdgeSecurityServiceRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.NetworkEdgeSecurityService: + r"""Call the get method over HTTP. + + Args: + request (~.compute.GetNetworkEdgeSecurityServiceRequest): + The request object. A request message for + NetworkEdgeSecurityServices.Get. See the + method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.NetworkEdgeSecurityService: + Represents a Google Cloud Armor + network edge security service resource. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/compute/v1/projects/{project}/regions/{region}/networkEdgeSecurityServices/{network_edge_security_service}', + }, + ] + request, metadata = self._interceptor.pre_get(request, metadata) + pb_request = compute.GetNetworkEdgeSecurityServiceRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.NetworkEdgeSecurityService() + pb_resp = compute.NetworkEdgeSecurityService.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get(resp) + return resp + + class _Insert(NetworkEdgeSecurityServicesRestStub): + def __hash__(self): + return hash("Insert") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.InsertNetworkEdgeSecurityServiceRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.Operation: + r"""Call the insert method over HTTP. + + Args: + request (~.compute.InsertNetworkEdgeSecurityServiceRequest): + The request object. A request message for + NetworkEdgeSecurityServices.Insert. See + the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.Operation: + Represents an Operation resource. Google Compute Engine + has three Operation resources: \* + `Global `__ + \* + `Regional `__ + \* + `Zonal `__ + You can use an operation resource to manage asynchronous + API requests. For more information, read Handling API + responses. Operations can be global, regional or zonal. + - For global operations, use the ``globalOperations`` + resource. - For regional operations, use the + ``regionOperations`` resource. - For zonal operations, + use the ``zonalOperations`` resource. For more + information, read Global, Regional, and Zonal Resources. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/compute/v1/projects/{project}/regions/{region}/networkEdgeSecurityServices', + 'body': 'network_edge_security_service_resource', + }, + ] + request, metadata = self._interceptor.pre_insert(request, metadata) + pb_request = compute.InsertNetworkEdgeSecurityServiceRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + including_default_value_fields=False, + use_integers_for_enums=False + ) + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.Operation() + pb_resp = compute.Operation.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_insert(resp) + return resp + + class _Patch(NetworkEdgeSecurityServicesRestStub): + def __hash__(self): + return hash("Patch") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.PatchNetworkEdgeSecurityServiceRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.Operation: + r"""Call the patch method over HTTP. + + Args: + request (~.compute.PatchNetworkEdgeSecurityServiceRequest): + The request object. A request message for + NetworkEdgeSecurityServices.Patch. See + the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.Operation: + Represents an Operation resource. Google Compute Engine + has three Operation resources: \* + `Global `__ + \* + `Regional `__ + \* + `Zonal `__ + You can use an operation resource to manage asynchronous + API requests. For more information, read Handling API + responses. Operations can be global, regional or zonal. + - For global operations, use the ``globalOperations`` + resource. - For regional operations, use the + ``regionOperations`` resource. - For zonal operations, + use the ``zonalOperations`` resource. For more + information, read Global, Regional, and Zonal Resources. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'patch', + 'uri': '/compute/v1/projects/{project}/regions/{region}/networkEdgeSecurityServices/{network_edge_security_service}', + 'body': 'network_edge_security_service_resource', + }, + ] + request, metadata = self._interceptor.pre_patch(request, metadata) + pb_request = compute.PatchNetworkEdgeSecurityServiceRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + including_default_value_fields=False, + use_integers_for_enums=False + ) + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.Operation() + pb_resp = compute.Operation.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_patch(resp) + return resp + + @property + def aggregated_list(self) -> Callable[ + [compute.AggregatedListNetworkEdgeSecurityServicesRequest], + compute.NetworkEdgeSecurityServiceAggregatedList]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._AggregatedList(self._session, self._host, self._interceptor) # type: ignore + + @property + def delete(self) -> Callable[ + [compute.DeleteNetworkEdgeSecurityServiceRequest], + compute.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._Delete(self._session, self._host, self._interceptor) # type: ignore + + @property + def get(self) -> Callable[ + [compute.GetNetworkEdgeSecurityServiceRequest], + compute.NetworkEdgeSecurityService]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._Get(self._session, self._host, self._interceptor) # type: ignore + + @property + def insert(self) -> Callable[ + [compute.InsertNetworkEdgeSecurityServiceRequest], + compute.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._Insert(self._session, self._host, self._interceptor) # type: ignore + + @property + def patch(self) -> Callable[ + [compute.PatchNetworkEdgeSecurityServiceRequest], + compute.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._Patch(self._session, self._host, self._interceptor) # type: ignore + + @property + def kind(self) -> str: + return "rest" + + def close(self): + self._session.close() + + +__all__=( + 'NetworkEdgeSecurityServicesRestTransport', +) diff --git a/owl-bot-staging/v1/google/cloud/compute_v1/services/network_endpoint_groups/__init__.py b/owl-bot-staging/v1/google/cloud/compute_v1/services/network_endpoint_groups/__init__.py new file mode 100644 index 000000000..cbe5aa956 --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/compute_v1/services/network_endpoint_groups/__init__.py @@ -0,0 +1,20 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from .client import NetworkEndpointGroupsClient + +__all__ = ( + 'NetworkEndpointGroupsClient', +) diff --git a/owl-bot-staging/v1/google/cloud/compute_v1/services/network_endpoint_groups/client.py b/owl-bot-staging/v1/google/cloud/compute_v1/services/network_endpoint_groups/client.py new file mode 100644 index 000000000..41bb74a4f --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/compute_v1/services/network_endpoint_groups/client.py @@ -0,0 +1,2192 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import functools +import os +import re +from typing import Dict, Mapping, MutableMapping, MutableSequence, Optional, Sequence, Tuple, Type, Union, cast + +from google.cloud.compute_v1 import gapic_version as package_version + +from google.api_core import client_options as client_options_lib +from google.api_core import exceptions as core_exceptions +from google.api_core import extended_operation +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport import mtls # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth.exceptions import MutualTLSChannelError # type: ignore +from google.oauth2 import service_account # type: ignore + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + +from google.api_core import extended_operation # type: ignore +from google.cloud.compute_v1.services.network_endpoint_groups import pagers +from google.cloud.compute_v1.types import compute +from .transports.base import NetworkEndpointGroupsTransport, DEFAULT_CLIENT_INFO +from .transports.rest import NetworkEndpointGroupsRestTransport + + +class NetworkEndpointGroupsClientMeta(type): + """Metaclass for the NetworkEndpointGroups client. + + This provides class-level methods for building and retrieving + support objects (e.g. transport) without polluting the client instance + objects. + """ + _transport_registry = OrderedDict() # type: Dict[str, Type[NetworkEndpointGroupsTransport]] + _transport_registry["rest"] = NetworkEndpointGroupsRestTransport + + def get_transport_class(cls, + label: Optional[str] = None, + ) -> Type[NetworkEndpointGroupsTransport]: + """Returns an appropriate transport class. + + Args: + label: The name of the desired transport. If none is + provided, then the first transport in the registry is used. + + Returns: + The transport class to use. + """ + # If a specific transport is requested, return that one. + if label: + return cls._transport_registry[label] + + # No transport is requested; return the default (that is, the first one + # in the dictionary). + return next(iter(cls._transport_registry.values())) + + +class NetworkEndpointGroupsClient(metaclass=NetworkEndpointGroupsClientMeta): + """The NetworkEndpointGroups API.""" + + @staticmethod + def _get_default_mtls_endpoint(api_endpoint): + """Converts api endpoint to mTLS endpoint. + + Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to + "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. + Args: + api_endpoint (Optional[str]): the api endpoint to convert. + Returns: + str: converted mTLS api endpoint. + """ + if not api_endpoint: + return api_endpoint + + mtls_endpoint_re = re.compile( + r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?" + ) + + m = mtls_endpoint_re.match(api_endpoint) + name, mtls, sandbox, googledomain = m.groups() + if mtls or not googledomain: + return api_endpoint + + if sandbox: + return api_endpoint.replace( + "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" + ) + + return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") + + DEFAULT_ENDPOINT = "compute.googleapis.com" + DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore + DEFAULT_ENDPOINT + ) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + NetworkEndpointGroupsClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_info(info) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + NetworkEndpointGroupsClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_file( + filename) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + from_service_account_json = from_service_account_file + + @property + def transport(self) -> NetworkEndpointGroupsTransport: + """Returns the transport used by the client instance. + + Returns: + NetworkEndpointGroupsTransport: The transport used by the client + instance. + """ + return self._transport + + @staticmethod + def common_billing_account_path(billing_account: str, ) -> str: + """Returns a fully-qualified billing_account string.""" + return "billingAccounts/{billing_account}".format(billing_account=billing_account, ) + + @staticmethod + def parse_common_billing_account_path(path: str) -> Dict[str,str]: + """Parse a billing_account path into its component segments.""" + m = re.match(r"^billingAccounts/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_folder_path(folder: str, ) -> str: + """Returns a fully-qualified folder string.""" + return "folders/{folder}".format(folder=folder, ) + + @staticmethod + def parse_common_folder_path(path: str) -> Dict[str,str]: + """Parse a folder path into its component segments.""" + m = re.match(r"^folders/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_organization_path(organization: str, ) -> str: + """Returns a fully-qualified organization string.""" + return "organizations/{organization}".format(organization=organization, ) + + @staticmethod + def parse_common_organization_path(path: str) -> Dict[str,str]: + """Parse a organization path into its component segments.""" + m = re.match(r"^organizations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_project_path(project: str, ) -> str: + """Returns a fully-qualified project string.""" + return "projects/{project}".format(project=project, ) + + @staticmethod + def parse_common_project_path(path: str) -> Dict[str,str]: + """Parse a project path into its component segments.""" + m = re.match(r"^projects/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_location_path(project: str, location: str, ) -> str: + """Returns a fully-qualified location string.""" + return "projects/{project}/locations/{location}".format(project=project, location=location, ) + + @staticmethod + def parse_common_location_path(path: str) -> Dict[str,str]: + """Parse a location path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @classmethod + def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[client_options_lib.ClientOptions] = None): + """Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + if client_options is None: + client_options = client_options_lib.ClientOptions() + use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") + use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") + if use_client_cert not in ("true", "false"): + raise ValueError("Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`") + if use_mtls_endpoint not in ("auto", "never", "always"): + raise MutualTLSChannelError("Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`") + + # Figure out the client cert source to use. + client_cert_source = None + if use_client_cert == "true": + if client_options.client_cert_source: + client_cert_source = client_options.client_cert_source + elif mtls.has_default_client_cert_source(): + client_cert_source = mtls.default_client_cert_source() + + # Figure out which api endpoint to use. + if client_options.api_endpoint is not None: + api_endpoint = client_options.api_endpoint + elif use_mtls_endpoint == "always" or (use_mtls_endpoint == "auto" and client_cert_source): + api_endpoint = cls.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = cls.DEFAULT_ENDPOINT + + return api_endpoint, client_cert_source + + def __init__(self, *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Optional[Union[str, NetworkEndpointGroupsTransport]] = None, + client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the network endpoint groups client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Union[str, NetworkEndpointGroupsTransport]): The + transport to use. If set to None, a transport is chosen + automatically. + NOTE: "rest" transport functionality is currently in a + beta state (preview). We welcome your feedback via an + issue in this library's source repository. + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): Custom options for the + client. It won't take effect if a ``transport`` instance is provided. + (1) The ``api_endpoint`` property can be used to override the + default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT + environment variable can also be used to override the endpoint: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto switch to the + default mTLS endpoint if client certificate is present, this is + the default value). However, the ``api_endpoint`` property takes + precedence if provided. + (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide client certificate for mutual TLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + """ + if isinstance(client_options, dict): + client_options = client_options_lib.from_dict(client_options) + if client_options is None: + client_options = client_options_lib.ClientOptions() + client_options = cast(client_options_lib.ClientOptions, client_options) + + api_endpoint, client_cert_source_func = self.get_mtls_endpoint_and_cert_source(client_options) + + api_key_value = getattr(client_options, "api_key", None) + if api_key_value and credentials: + raise ValueError("client_options.api_key and credentials are mutually exclusive") + + # Save or instantiate the transport. + # Ordinarily, we provide the transport, but allowing a custom transport + # instance provides an extensibility point for unusual situations. + if isinstance(transport, NetworkEndpointGroupsTransport): + # transport is a NetworkEndpointGroupsTransport instance. + if credentials or client_options.credentials_file or api_key_value: + raise ValueError("When providing a transport instance, " + "provide its credentials directly.") + if client_options.scopes: + raise ValueError( + "When providing a transport instance, provide its scopes " + "directly." + ) + self._transport = transport + else: + import google.auth._default # type: ignore + + if api_key_value and hasattr(google.auth._default, "get_api_key_credentials"): + credentials = google.auth._default.get_api_key_credentials(api_key_value) + + Transport = type(self).get_transport_class(transport) + self._transport = Transport( + credentials=credentials, + credentials_file=client_options.credentials_file, + host=api_endpoint, + scopes=client_options.scopes, + client_cert_source_for_mtls=client_cert_source_func, + quota_project_id=client_options.quota_project_id, + client_info=client_info, + always_use_jwt_access=True, + api_audience=client_options.api_audience, + ) + + def aggregated_list(self, + request: Optional[Union[compute.AggregatedListNetworkEndpointGroupsRequest, dict]] = None, + *, + project: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.AggregatedListPager: + r"""Retrieves the list of network endpoint groups and + sorts them by zone. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_aggregated_list(): + # Create a client + client = compute_v1.NetworkEndpointGroupsClient() + + # Initialize request argument(s) + request = compute_v1.AggregatedListNetworkEndpointGroupsRequest( + project="project_value", + ) + + # Make the request + page_result = client.aggregated_list(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.AggregatedListNetworkEndpointGroupsRequest, dict]): + The request object. A request message for + NetworkEndpointGroups.AggregatedList. + See the method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.compute_v1.services.network_endpoint_groups.pagers.AggregatedListPager: + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.AggregatedListNetworkEndpointGroupsRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.AggregatedListNetworkEndpointGroupsRequest): + request = compute.AggregatedListNetworkEndpointGroupsRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.aggregated_list] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.AggregatedListPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + def attach_network_endpoints_unary(self, + request: Optional[Union[compute.AttachNetworkEndpointsNetworkEndpointGroupRequest, dict]] = None, + *, + project: Optional[str] = None, + zone: Optional[str] = None, + network_endpoint_group: Optional[str] = None, + network_endpoint_groups_attach_endpoints_request_resource: Optional[compute.NetworkEndpointGroupsAttachEndpointsRequest] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.Operation: + r"""Attach a list of network endpoints to the specified + network endpoint group. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_attach_network_endpoints(): + # Create a client + client = compute_v1.NetworkEndpointGroupsClient() + + # Initialize request argument(s) + request = compute_v1.AttachNetworkEndpointsNetworkEndpointGroupRequest( + network_endpoint_group="network_endpoint_group_value", + project="project_value", + zone="zone_value", + ) + + # Make the request + response = client.attach_network_endpoints(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.AttachNetworkEndpointsNetworkEndpointGroupRequest, dict]): + The request object. A request message for + NetworkEndpointGroups.AttachNetworkEndpoints. + See the method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + zone (str): + The name of the zone where the + network endpoint group is located. It + should comply with RFC1035. + + This corresponds to the ``zone`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + network_endpoint_group (str): + The name of the network endpoint + group where you are attaching network + endpoints to. It should comply with + RFC1035. + + This corresponds to the ``network_endpoint_group`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + network_endpoint_groups_attach_endpoints_request_resource (google.cloud.compute_v1.types.NetworkEndpointGroupsAttachEndpointsRequest): + The body resource for this request + This corresponds to the ``network_endpoint_groups_attach_endpoints_request_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, zone, network_endpoint_group, network_endpoint_groups_attach_endpoints_request_resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.AttachNetworkEndpointsNetworkEndpointGroupRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.AttachNetworkEndpointsNetworkEndpointGroupRequest): + request = compute.AttachNetworkEndpointsNetworkEndpointGroupRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if zone is not None: + request.zone = zone + if network_endpoint_group is not None: + request.network_endpoint_group = network_endpoint_group + if network_endpoint_groups_attach_endpoints_request_resource is not None: + request.network_endpoint_groups_attach_endpoints_request_resource = network_endpoint_groups_attach_endpoints_request_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.attach_network_endpoints] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("zone", request.zone), + ("network_endpoint_group", request.network_endpoint_group), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def attach_network_endpoints(self, + request: Optional[Union[compute.AttachNetworkEndpointsNetworkEndpointGroupRequest, dict]] = None, + *, + project: Optional[str] = None, + zone: Optional[str] = None, + network_endpoint_group: Optional[str] = None, + network_endpoint_groups_attach_endpoints_request_resource: Optional[compute.NetworkEndpointGroupsAttachEndpointsRequest] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> extended_operation.ExtendedOperation: + r"""Attach a list of network endpoints to the specified + network endpoint group. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_attach_network_endpoints(): + # Create a client + client = compute_v1.NetworkEndpointGroupsClient() + + # Initialize request argument(s) + request = compute_v1.AttachNetworkEndpointsNetworkEndpointGroupRequest( + network_endpoint_group="network_endpoint_group_value", + project="project_value", + zone="zone_value", + ) + + # Make the request + response = client.attach_network_endpoints(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.AttachNetworkEndpointsNetworkEndpointGroupRequest, dict]): + The request object. A request message for + NetworkEndpointGroups.AttachNetworkEndpoints. + See the method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + zone (str): + The name of the zone where the + network endpoint group is located. It + should comply with RFC1035. + + This corresponds to the ``zone`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + network_endpoint_group (str): + The name of the network endpoint + group where you are attaching network + endpoints to. It should comply with + RFC1035. + + This corresponds to the ``network_endpoint_group`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + network_endpoint_groups_attach_endpoints_request_resource (google.cloud.compute_v1.types.NetworkEndpointGroupsAttachEndpointsRequest): + The body resource for this request + This corresponds to the ``network_endpoint_groups_attach_endpoints_request_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, zone, network_endpoint_group, network_endpoint_groups_attach_endpoints_request_resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.AttachNetworkEndpointsNetworkEndpointGroupRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.AttachNetworkEndpointsNetworkEndpointGroupRequest): + request = compute.AttachNetworkEndpointsNetworkEndpointGroupRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if zone is not None: + request.zone = zone + if network_endpoint_group is not None: + request.network_endpoint_group = network_endpoint_group + if network_endpoint_groups_attach_endpoints_request_resource is not None: + request.network_endpoint_groups_attach_endpoints_request_resource = network_endpoint_groups_attach_endpoints_request_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.attach_network_endpoints] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("zone", request.zone), + ("network_endpoint_group", request.network_endpoint_group), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + operation_service = self._transport._zone_operations_client + operation_request = compute.GetZoneOperationRequest() + operation_request.project = request.project + operation_request.zone = request.zone + operation_request.operation = response.name + + get_operation = functools.partial(operation_service.get, operation_request) + # Cancel is not part of extended operations yet. + cancel_operation = lambda: None + + # Note: this class is an implementation detail to provide a uniform + # set of names for certain fields in the extended operation proto message. + # See google.api_core.extended_operation.ExtendedOperation for details + # on these properties and the expected interface. + class _CustomOperation(extended_operation.ExtendedOperation): + @property + def error_message(self): + return self._extended_operation.http_error_message + + @property + def error_code(self): + return self._extended_operation.http_error_status_code + + response = _CustomOperation.make(get_operation, cancel_operation, response) + + # Done; return the response. + return response + + def delete_unary(self, + request: Optional[Union[compute.DeleteNetworkEndpointGroupRequest, dict]] = None, + *, + project: Optional[str] = None, + zone: Optional[str] = None, + network_endpoint_group: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.Operation: + r"""Deletes the specified network endpoint group. The + network endpoints in the NEG and the VM instances they + belong to are not terminated when the NEG is deleted. + Note that the NEG cannot be deleted if there are backend + services referencing it. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_delete(): + # Create a client + client = compute_v1.NetworkEndpointGroupsClient() + + # Initialize request argument(s) + request = compute_v1.DeleteNetworkEndpointGroupRequest( + network_endpoint_group="network_endpoint_group_value", + project="project_value", + zone="zone_value", + ) + + # Make the request + response = client.delete(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.DeleteNetworkEndpointGroupRequest, dict]): + The request object. A request message for + NetworkEndpointGroups.Delete. See the + method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + zone (str): + The name of the zone where the + network endpoint group is located. It + should comply with RFC1035. + + This corresponds to the ``zone`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + network_endpoint_group (str): + The name of the network endpoint + group to delete. It should comply with + RFC1035. + + This corresponds to the ``network_endpoint_group`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, zone, network_endpoint_group]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.DeleteNetworkEndpointGroupRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.DeleteNetworkEndpointGroupRequest): + request = compute.DeleteNetworkEndpointGroupRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if zone is not None: + request.zone = zone + if network_endpoint_group is not None: + request.network_endpoint_group = network_endpoint_group + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("zone", request.zone), + ("network_endpoint_group", request.network_endpoint_group), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def delete(self, + request: Optional[Union[compute.DeleteNetworkEndpointGroupRequest, dict]] = None, + *, + project: Optional[str] = None, + zone: Optional[str] = None, + network_endpoint_group: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> extended_operation.ExtendedOperation: + r"""Deletes the specified network endpoint group. The + network endpoints in the NEG and the VM instances they + belong to are not terminated when the NEG is deleted. + Note that the NEG cannot be deleted if there are backend + services referencing it. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_delete(): + # Create a client + client = compute_v1.NetworkEndpointGroupsClient() + + # Initialize request argument(s) + request = compute_v1.DeleteNetworkEndpointGroupRequest( + network_endpoint_group="network_endpoint_group_value", + project="project_value", + zone="zone_value", + ) + + # Make the request + response = client.delete(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.DeleteNetworkEndpointGroupRequest, dict]): + The request object. A request message for + NetworkEndpointGroups.Delete. See the + method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + zone (str): + The name of the zone where the + network endpoint group is located. It + should comply with RFC1035. + + This corresponds to the ``zone`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + network_endpoint_group (str): + The name of the network endpoint + group to delete. It should comply with + RFC1035. + + This corresponds to the ``network_endpoint_group`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, zone, network_endpoint_group]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.DeleteNetworkEndpointGroupRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.DeleteNetworkEndpointGroupRequest): + request = compute.DeleteNetworkEndpointGroupRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if zone is not None: + request.zone = zone + if network_endpoint_group is not None: + request.network_endpoint_group = network_endpoint_group + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("zone", request.zone), + ("network_endpoint_group", request.network_endpoint_group), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + operation_service = self._transport._zone_operations_client + operation_request = compute.GetZoneOperationRequest() + operation_request.project = request.project + operation_request.zone = request.zone + operation_request.operation = response.name + + get_operation = functools.partial(operation_service.get, operation_request) + # Cancel is not part of extended operations yet. + cancel_operation = lambda: None + + # Note: this class is an implementation detail to provide a uniform + # set of names for certain fields in the extended operation proto message. + # See google.api_core.extended_operation.ExtendedOperation for details + # on these properties and the expected interface. + class _CustomOperation(extended_operation.ExtendedOperation): + @property + def error_message(self): + return self._extended_operation.http_error_message + + @property + def error_code(self): + return self._extended_operation.http_error_status_code + + response = _CustomOperation.make(get_operation, cancel_operation, response) + + # Done; return the response. + return response + + def detach_network_endpoints_unary(self, + request: Optional[Union[compute.DetachNetworkEndpointsNetworkEndpointGroupRequest, dict]] = None, + *, + project: Optional[str] = None, + zone: Optional[str] = None, + network_endpoint_group: Optional[str] = None, + network_endpoint_groups_detach_endpoints_request_resource: Optional[compute.NetworkEndpointGroupsDetachEndpointsRequest] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.Operation: + r"""Detach a list of network endpoints from the specified + network endpoint group. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_detach_network_endpoints(): + # Create a client + client = compute_v1.NetworkEndpointGroupsClient() + + # Initialize request argument(s) + request = compute_v1.DetachNetworkEndpointsNetworkEndpointGroupRequest( + network_endpoint_group="network_endpoint_group_value", + project="project_value", + zone="zone_value", + ) + + # Make the request + response = client.detach_network_endpoints(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.DetachNetworkEndpointsNetworkEndpointGroupRequest, dict]): + The request object. A request message for + NetworkEndpointGroups.DetachNetworkEndpoints. + See the method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + zone (str): + The name of the zone where the + network endpoint group is located. It + should comply with RFC1035. + + This corresponds to the ``zone`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + network_endpoint_group (str): + The name of the network endpoint + group where you are removing network + endpoints. It should comply with + RFC1035. + + This corresponds to the ``network_endpoint_group`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + network_endpoint_groups_detach_endpoints_request_resource (google.cloud.compute_v1.types.NetworkEndpointGroupsDetachEndpointsRequest): + The body resource for this request + This corresponds to the ``network_endpoint_groups_detach_endpoints_request_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, zone, network_endpoint_group, network_endpoint_groups_detach_endpoints_request_resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.DetachNetworkEndpointsNetworkEndpointGroupRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.DetachNetworkEndpointsNetworkEndpointGroupRequest): + request = compute.DetachNetworkEndpointsNetworkEndpointGroupRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if zone is not None: + request.zone = zone + if network_endpoint_group is not None: + request.network_endpoint_group = network_endpoint_group + if network_endpoint_groups_detach_endpoints_request_resource is not None: + request.network_endpoint_groups_detach_endpoints_request_resource = network_endpoint_groups_detach_endpoints_request_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.detach_network_endpoints] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("zone", request.zone), + ("network_endpoint_group", request.network_endpoint_group), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def detach_network_endpoints(self, + request: Optional[Union[compute.DetachNetworkEndpointsNetworkEndpointGroupRequest, dict]] = None, + *, + project: Optional[str] = None, + zone: Optional[str] = None, + network_endpoint_group: Optional[str] = None, + network_endpoint_groups_detach_endpoints_request_resource: Optional[compute.NetworkEndpointGroupsDetachEndpointsRequest] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> extended_operation.ExtendedOperation: + r"""Detach a list of network endpoints from the specified + network endpoint group. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_detach_network_endpoints(): + # Create a client + client = compute_v1.NetworkEndpointGroupsClient() + + # Initialize request argument(s) + request = compute_v1.DetachNetworkEndpointsNetworkEndpointGroupRequest( + network_endpoint_group="network_endpoint_group_value", + project="project_value", + zone="zone_value", + ) + + # Make the request + response = client.detach_network_endpoints(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.DetachNetworkEndpointsNetworkEndpointGroupRequest, dict]): + The request object. A request message for + NetworkEndpointGroups.DetachNetworkEndpoints. + See the method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + zone (str): + The name of the zone where the + network endpoint group is located. It + should comply with RFC1035. + + This corresponds to the ``zone`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + network_endpoint_group (str): + The name of the network endpoint + group where you are removing network + endpoints. It should comply with + RFC1035. + + This corresponds to the ``network_endpoint_group`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + network_endpoint_groups_detach_endpoints_request_resource (google.cloud.compute_v1.types.NetworkEndpointGroupsDetachEndpointsRequest): + The body resource for this request + This corresponds to the ``network_endpoint_groups_detach_endpoints_request_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, zone, network_endpoint_group, network_endpoint_groups_detach_endpoints_request_resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.DetachNetworkEndpointsNetworkEndpointGroupRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.DetachNetworkEndpointsNetworkEndpointGroupRequest): + request = compute.DetachNetworkEndpointsNetworkEndpointGroupRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if zone is not None: + request.zone = zone + if network_endpoint_group is not None: + request.network_endpoint_group = network_endpoint_group + if network_endpoint_groups_detach_endpoints_request_resource is not None: + request.network_endpoint_groups_detach_endpoints_request_resource = network_endpoint_groups_detach_endpoints_request_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.detach_network_endpoints] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("zone", request.zone), + ("network_endpoint_group", request.network_endpoint_group), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + operation_service = self._transport._zone_operations_client + operation_request = compute.GetZoneOperationRequest() + operation_request.project = request.project + operation_request.zone = request.zone + operation_request.operation = response.name + + get_operation = functools.partial(operation_service.get, operation_request) + # Cancel is not part of extended operations yet. + cancel_operation = lambda: None + + # Note: this class is an implementation detail to provide a uniform + # set of names for certain fields in the extended operation proto message. + # See google.api_core.extended_operation.ExtendedOperation for details + # on these properties and the expected interface. + class _CustomOperation(extended_operation.ExtendedOperation): + @property + def error_message(self): + return self._extended_operation.http_error_message + + @property + def error_code(self): + return self._extended_operation.http_error_status_code + + response = _CustomOperation.make(get_operation, cancel_operation, response) + + # Done; return the response. + return response + + def get(self, + request: Optional[Union[compute.GetNetworkEndpointGroupRequest, dict]] = None, + *, + project: Optional[str] = None, + zone: Optional[str] = None, + network_endpoint_group: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.NetworkEndpointGroup: + r"""Returns the specified network endpoint group. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_get(): + # Create a client + client = compute_v1.NetworkEndpointGroupsClient() + + # Initialize request argument(s) + request = compute_v1.GetNetworkEndpointGroupRequest( + network_endpoint_group="network_endpoint_group_value", + project="project_value", + zone="zone_value", + ) + + # Make the request + response = client.get(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.GetNetworkEndpointGroupRequest, dict]): + The request object. A request message for + NetworkEndpointGroups.Get. See the + method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + zone (str): + The name of the zone where the + network endpoint group is located. It + should comply with RFC1035. + + This corresponds to the ``zone`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + network_endpoint_group (str): + The name of the network endpoint + group. It should comply with RFC1035. + + This corresponds to the ``network_endpoint_group`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.compute_v1.types.NetworkEndpointGroup: + Represents a collection of network + endpoints. A network endpoint group + (NEG) defines how a set of endpoints + should be reached, whether they are + reachable, and where they are located. + For more information about using NEGs, + see Setting up external HTTP(S) Load + Balancing with internet NEGs, Setting up + zonal NEGs, or Setting up external + HTTP(S) Load Balancing with serverless + NEGs. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, zone, network_endpoint_group]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.GetNetworkEndpointGroupRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.GetNetworkEndpointGroupRequest): + request = compute.GetNetworkEndpointGroupRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if zone is not None: + request.zone = zone + if network_endpoint_group is not None: + request.network_endpoint_group = network_endpoint_group + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("zone", request.zone), + ("network_endpoint_group", request.network_endpoint_group), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def insert_unary(self, + request: Optional[Union[compute.InsertNetworkEndpointGroupRequest, dict]] = None, + *, + project: Optional[str] = None, + zone: Optional[str] = None, + network_endpoint_group_resource: Optional[compute.NetworkEndpointGroup] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.Operation: + r"""Creates a network endpoint group in the specified + project using the parameters that are included in the + request. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_insert(): + # Create a client + client = compute_v1.NetworkEndpointGroupsClient() + + # Initialize request argument(s) + request = compute_v1.InsertNetworkEndpointGroupRequest( + project="project_value", + zone="zone_value", + ) + + # Make the request + response = client.insert(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.InsertNetworkEndpointGroupRequest, dict]): + The request object. A request message for + NetworkEndpointGroups.Insert. See the + method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + zone (str): + The name of the zone where you want + to create the network endpoint group. It + should comply with RFC1035. + + This corresponds to the ``zone`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + network_endpoint_group_resource (google.cloud.compute_v1.types.NetworkEndpointGroup): + The body resource for this request + This corresponds to the ``network_endpoint_group_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, zone, network_endpoint_group_resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.InsertNetworkEndpointGroupRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.InsertNetworkEndpointGroupRequest): + request = compute.InsertNetworkEndpointGroupRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if zone is not None: + request.zone = zone + if network_endpoint_group_resource is not None: + request.network_endpoint_group_resource = network_endpoint_group_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.insert] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("zone", request.zone), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def insert(self, + request: Optional[Union[compute.InsertNetworkEndpointGroupRequest, dict]] = None, + *, + project: Optional[str] = None, + zone: Optional[str] = None, + network_endpoint_group_resource: Optional[compute.NetworkEndpointGroup] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> extended_operation.ExtendedOperation: + r"""Creates a network endpoint group in the specified + project using the parameters that are included in the + request. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_insert(): + # Create a client + client = compute_v1.NetworkEndpointGroupsClient() + + # Initialize request argument(s) + request = compute_v1.InsertNetworkEndpointGroupRequest( + project="project_value", + zone="zone_value", + ) + + # Make the request + response = client.insert(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.InsertNetworkEndpointGroupRequest, dict]): + The request object. A request message for + NetworkEndpointGroups.Insert. See the + method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + zone (str): + The name of the zone where you want + to create the network endpoint group. It + should comply with RFC1035. + + This corresponds to the ``zone`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + network_endpoint_group_resource (google.cloud.compute_v1.types.NetworkEndpointGroup): + The body resource for this request + This corresponds to the ``network_endpoint_group_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, zone, network_endpoint_group_resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.InsertNetworkEndpointGroupRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.InsertNetworkEndpointGroupRequest): + request = compute.InsertNetworkEndpointGroupRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if zone is not None: + request.zone = zone + if network_endpoint_group_resource is not None: + request.network_endpoint_group_resource = network_endpoint_group_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.insert] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("zone", request.zone), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + operation_service = self._transport._zone_operations_client + operation_request = compute.GetZoneOperationRequest() + operation_request.project = request.project + operation_request.zone = request.zone + operation_request.operation = response.name + + get_operation = functools.partial(operation_service.get, operation_request) + # Cancel is not part of extended operations yet. + cancel_operation = lambda: None + + # Note: this class is an implementation detail to provide a uniform + # set of names for certain fields in the extended operation proto message. + # See google.api_core.extended_operation.ExtendedOperation for details + # on these properties and the expected interface. + class _CustomOperation(extended_operation.ExtendedOperation): + @property + def error_message(self): + return self._extended_operation.http_error_message + + @property + def error_code(self): + return self._extended_operation.http_error_status_code + + response = _CustomOperation.make(get_operation, cancel_operation, response) + + # Done; return the response. + return response + + def list(self, + request: Optional[Union[compute.ListNetworkEndpointGroupsRequest, dict]] = None, + *, + project: Optional[str] = None, + zone: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListPager: + r"""Retrieves the list of network endpoint groups that + are located in the specified project and zone. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_list(): + # Create a client + client = compute_v1.NetworkEndpointGroupsClient() + + # Initialize request argument(s) + request = compute_v1.ListNetworkEndpointGroupsRequest( + project="project_value", + zone="zone_value", + ) + + # Make the request + page_result = client.list(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.ListNetworkEndpointGroupsRequest, dict]): + The request object. A request message for + NetworkEndpointGroups.List. See the + method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + zone (str): + The name of the zone where the + network endpoint group is located. It + should comply with RFC1035. + + This corresponds to the ``zone`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.compute_v1.services.network_endpoint_groups.pagers.ListPager: + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, zone]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.ListNetworkEndpointGroupsRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.ListNetworkEndpointGroupsRequest): + request = compute.ListNetworkEndpointGroupsRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if zone is not None: + request.zone = zone + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("zone", request.zone), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + def list_network_endpoints(self, + request: Optional[Union[compute.ListNetworkEndpointsNetworkEndpointGroupsRequest, dict]] = None, + *, + project: Optional[str] = None, + zone: Optional[str] = None, + network_endpoint_group: Optional[str] = None, + network_endpoint_groups_list_endpoints_request_resource: Optional[compute.NetworkEndpointGroupsListEndpointsRequest] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListNetworkEndpointsPager: + r"""Lists the network endpoints in the specified network + endpoint group. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_list_network_endpoints(): + # Create a client + client = compute_v1.NetworkEndpointGroupsClient() + + # Initialize request argument(s) + request = compute_v1.ListNetworkEndpointsNetworkEndpointGroupsRequest( + network_endpoint_group="network_endpoint_group_value", + project="project_value", + zone="zone_value", + ) + + # Make the request + page_result = client.list_network_endpoints(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.ListNetworkEndpointsNetworkEndpointGroupsRequest, dict]): + The request object. A request message for + NetworkEndpointGroups.ListNetworkEndpoints. + See the method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + zone (str): + The name of the zone where the + network endpoint group is located. It + should comply with RFC1035. + + This corresponds to the ``zone`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + network_endpoint_group (str): + The name of the network endpoint + group from which you want to generate a + list of included network endpoints. It + should comply with RFC1035. + + This corresponds to the ``network_endpoint_group`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + network_endpoint_groups_list_endpoints_request_resource (google.cloud.compute_v1.types.NetworkEndpointGroupsListEndpointsRequest): + The body resource for this request + This corresponds to the ``network_endpoint_groups_list_endpoints_request_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.compute_v1.services.network_endpoint_groups.pagers.ListNetworkEndpointsPager: + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, zone, network_endpoint_group, network_endpoint_groups_list_endpoints_request_resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.ListNetworkEndpointsNetworkEndpointGroupsRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.ListNetworkEndpointsNetworkEndpointGroupsRequest): + request = compute.ListNetworkEndpointsNetworkEndpointGroupsRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if zone is not None: + request.zone = zone + if network_endpoint_group is not None: + request.network_endpoint_group = network_endpoint_group + if network_endpoint_groups_list_endpoints_request_resource is not None: + request.network_endpoint_groups_list_endpoints_request_resource = network_endpoint_groups_list_endpoints_request_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_network_endpoints] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("zone", request.zone), + ("network_endpoint_group", request.network_endpoint_group), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListNetworkEndpointsPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + def test_iam_permissions(self, + request: Optional[Union[compute.TestIamPermissionsNetworkEndpointGroupRequest, dict]] = None, + *, + project: Optional[str] = None, + zone: Optional[str] = None, + resource: Optional[str] = None, + test_permissions_request_resource: Optional[compute.TestPermissionsRequest] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.TestPermissionsResponse: + r"""Returns permissions that a caller has on the + specified resource. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_test_iam_permissions(): + # Create a client + client = compute_v1.NetworkEndpointGroupsClient() + + # Initialize request argument(s) + request = compute_v1.TestIamPermissionsNetworkEndpointGroupRequest( + project="project_value", + resource="resource_value", + zone="zone_value", + ) + + # Make the request + response = client.test_iam_permissions(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.TestIamPermissionsNetworkEndpointGroupRequest, dict]): + The request object. A request message for + NetworkEndpointGroups.TestIamPermissions. + See the method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + zone (str): + The name of the zone for this + request. + + This corresponds to the ``zone`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + resource (str): + Name or id of the resource for this + request. + + This corresponds to the ``resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + test_permissions_request_resource (google.cloud.compute_v1.types.TestPermissionsRequest): + The body resource for this request + This corresponds to the ``test_permissions_request_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.compute_v1.types.TestPermissionsResponse: + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, zone, resource, test_permissions_request_resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.TestIamPermissionsNetworkEndpointGroupRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.TestIamPermissionsNetworkEndpointGroupRequest): + request = compute.TestIamPermissionsNetworkEndpointGroupRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if zone is not None: + request.zone = zone + if resource is not None: + request.resource = resource + if test_permissions_request_resource is not None: + request.test_permissions_request_resource = test_permissions_request_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.test_iam_permissions] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("zone", request.zone), + ("resource", request.resource), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def __enter__(self) -> "NetworkEndpointGroupsClient": + return self + + def __exit__(self, type, value, traceback): + """Releases underlying transport's resources. + + .. warning:: + ONLY use as a context manager if the transport is NOT shared + with other clients! Exiting the with block will CLOSE the transport + and may cause errors in other clients! + """ + self.transport.close() + + + + + + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) + + +__all__ = ( + "NetworkEndpointGroupsClient", +) diff --git a/owl-bot-staging/v1/google/cloud/compute_v1/services/network_endpoint_groups/pagers.py b/owl-bot-staging/v1/google/cloud/compute_v1/services/network_endpoint_groups/pagers.py new file mode 100644 index 000000000..9cc099a89 --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/compute_v1/services/network_endpoint_groups/pagers.py @@ -0,0 +1,198 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import Any, AsyncIterator, Awaitable, Callable, Sequence, Tuple, Optional, Iterator + +from google.cloud.compute_v1.types import compute + + +class AggregatedListPager: + """A pager for iterating through ``aggregated_list`` requests. + + This class thinly wraps an initial + :class:`google.cloud.compute_v1.types.NetworkEndpointGroupAggregatedList` object, and + provides an ``__iter__`` method to iterate through its + ``items`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``AggregatedList`` requests and continue to iterate + through the ``items`` field on the + corresponding responses. + + All the usual :class:`google.cloud.compute_v1.types.NetworkEndpointGroupAggregatedList` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., compute.NetworkEndpointGroupAggregatedList], + request: compute.AggregatedListNetworkEndpointGroupsRequest, + response: compute.NetworkEndpointGroupAggregatedList, + *, + metadata: Sequence[Tuple[str, str]] = ()): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.compute_v1.types.AggregatedListNetworkEndpointGroupsRequest): + The initial request object. + response (google.cloud.compute_v1.types.NetworkEndpointGroupAggregatedList): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = compute.AggregatedListNetworkEndpointGroupsRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[compute.NetworkEndpointGroupAggregatedList]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[Tuple[str, compute.NetworkEndpointGroupsScopedList]]: + for page in self.pages: + yield from page.items.items() + + def get(self, key: str) -> Optional[compute.NetworkEndpointGroupsScopedList]: + return self._response.items.get(key) + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + + +class ListPager: + """A pager for iterating through ``list`` requests. + + This class thinly wraps an initial + :class:`google.cloud.compute_v1.types.NetworkEndpointGroupList` object, and + provides an ``__iter__`` method to iterate through its + ``items`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``List`` requests and continue to iterate + through the ``items`` field on the + corresponding responses. + + All the usual :class:`google.cloud.compute_v1.types.NetworkEndpointGroupList` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., compute.NetworkEndpointGroupList], + request: compute.ListNetworkEndpointGroupsRequest, + response: compute.NetworkEndpointGroupList, + *, + metadata: Sequence[Tuple[str, str]] = ()): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.compute_v1.types.ListNetworkEndpointGroupsRequest): + The initial request object. + response (google.cloud.compute_v1.types.NetworkEndpointGroupList): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = compute.ListNetworkEndpointGroupsRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[compute.NetworkEndpointGroupList]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[compute.NetworkEndpointGroup]: + for page in self.pages: + yield from page.items + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + + +class ListNetworkEndpointsPager: + """A pager for iterating through ``list_network_endpoints`` requests. + + This class thinly wraps an initial + :class:`google.cloud.compute_v1.types.NetworkEndpointGroupsListNetworkEndpoints` object, and + provides an ``__iter__`` method to iterate through its + ``items`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListNetworkEndpoints`` requests and continue to iterate + through the ``items`` field on the + corresponding responses. + + All the usual :class:`google.cloud.compute_v1.types.NetworkEndpointGroupsListNetworkEndpoints` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., compute.NetworkEndpointGroupsListNetworkEndpoints], + request: compute.ListNetworkEndpointsNetworkEndpointGroupsRequest, + response: compute.NetworkEndpointGroupsListNetworkEndpoints, + *, + metadata: Sequence[Tuple[str, str]] = ()): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.compute_v1.types.ListNetworkEndpointsNetworkEndpointGroupsRequest): + The initial request object. + response (google.cloud.compute_v1.types.NetworkEndpointGroupsListNetworkEndpoints): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = compute.ListNetworkEndpointsNetworkEndpointGroupsRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[compute.NetworkEndpointGroupsListNetworkEndpoints]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[compute.NetworkEndpointWithHealthStatus]: + for page in self.pages: + yield from page.items + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) diff --git a/owl-bot-staging/v1/google/cloud/compute_v1/services/network_endpoint_groups/transports/__init__.py b/owl-bot-staging/v1/google/cloud/compute_v1/services/network_endpoint_groups/transports/__init__.py new file mode 100644 index 000000000..6fb62a6da --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/compute_v1/services/network_endpoint_groups/transports/__init__.py @@ -0,0 +1,32 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +from typing import Dict, Type + +from .base import NetworkEndpointGroupsTransport +from .rest import NetworkEndpointGroupsRestTransport +from .rest import NetworkEndpointGroupsRestInterceptor + + +# Compile a registry of transports. +_transport_registry = OrderedDict() # type: Dict[str, Type[NetworkEndpointGroupsTransport]] +_transport_registry['rest'] = NetworkEndpointGroupsRestTransport + +__all__ = ( + 'NetworkEndpointGroupsTransport', + 'NetworkEndpointGroupsRestTransport', + 'NetworkEndpointGroupsRestInterceptor', +) diff --git a/owl-bot-staging/v1/google/cloud/compute_v1/services/network_endpoint_groups/transports/base.py b/owl-bot-staging/v1/google/cloud/compute_v1/services/network_endpoint_groups/transports/base.py new file mode 100644 index 000000000..cd9f16d8a --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/compute_v1/services/network_endpoint_groups/transports/base.py @@ -0,0 +1,275 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import abc +from typing import Awaitable, Callable, Dict, Optional, Sequence, Union + +from google.cloud.compute_v1 import gapic_version as package_version + +import google.auth # type: ignore +import google.api_core +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.compute_v1.types import compute +from google.cloud.compute_v1.services import zone_operations + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) + + +class NetworkEndpointGroupsTransport(abc.ABC): + """Abstract transport class for NetworkEndpointGroups.""" + + AUTH_SCOPES = ( + 'https://www.googleapis.com/auth/compute', + 'https://www.googleapis.com/auth/cloud-platform', + ) + + DEFAULT_HOST: str = 'compute.googleapis.com' + def __init__( + self, *, + host: str = DEFAULT_HOST, + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + **kwargs, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A list of scopes. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + """ + self._extended_operations_services: Dict[str, Any] = {} + + scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} + + # Save the scopes. + self._scopes = scopes + + # If no credentials are provided, then determine the appropriate + # defaults. + if credentials and credentials_file: + raise core_exceptions.DuplicateCredentialArgs("'credentials_file' and 'credentials' are mutually exclusive") + + if credentials_file is not None: + credentials, _ = google.auth.load_credentials_from_file( + credentials_file, + **scopes_kwargs, + quota_project_id=quota_project_id + ) + elif credentials is None: + credentials, _ = google.auth.default(**scopes_kwargs, quota_project_id=quota_project_id) + # Don't apply audience if the credentials file passed from user. + if hasattr(credentials, "with_gdch_audience"): + credentials = credentials.with_gdch_audience(api_audience if api_audience else host) + + # If the credentials are service account credentials, then always try to use self signed JWT. + if always_use_jwt_access and isinstance(credentials, service_account.Credentials) and hasattr(service_account.Credentials, "with_always_use_jwt_access"): + credentials = credentials.with_always_use_jwt_access(True) + + # Save the credentials. + self._credentials = credentials + + # Save the hostname. Default to port 443 (HTTPS) if none is specified. + if ':' not in host: + host += ':443' + self._host = host + + def _prep_wrapped_messages(self, client_info): + # Precompute the wrapped methods. + self._wrapped_methods = { + self.aggregated_list: gapic_v1.method.wrap_method( + self.aggregated_list, + default_timeout=None, + client_info=client_info, + ), + self.attach_network_endpoints: gapic_v1.method.wrap_method( + self.attach_network_endpoints, + default_timeout=None, + client_info=client_info, + ), + self.delete: gapic_v1.method.wrap_method( + self.delete, + default_timeout=None, + client_info=client_info, + ), + self.detach_network_endpoints: gapic_v1.method.wrap_method( + self.detach_network_endpoints, + default_timeout=None, + client_info=client_info, + ), + self.get: gapic_v1.method.wrap_method( + self.get, + default_timeout=None, + client_info=client_info, + ), + self.insert: gapic_v1.method.wrap_method( + self.insert, + default_timeout=None, + client_info=client_info, + ), + self.list: gapic_v1.method.wrap_method( + self.list, + default_timeout=None, + client_info=client_info, + ), + self.list_network_endpoints: gapic_v1.method.wrap_method( + self.list_network_endpoints, + default_timeout=None, + client_info=client_info, + ), + self.test_iam_permissions: gapic_v1.method.wrap_method( + self.test_iam_permissions, + default_timeout=None, + client_info=client_info, + ), + } + + def close(self): + """Closes resources associated with the transport. + + .. warning:: + Only call this method if the transport is NOT shared + with other clients - this may cause errors in other clients! + """ + raise NotImplementedError() + + @property + def aggregated_list(self) -> Callable[ + [compute.AggregatedListNetworkEndpointGroupsRequest], + Union[ + compute.NetworkEndpointGroupAggregatedList, + Awaitable[compute.NetworkEndpointGroupAggregatedList] + ]]: + raise NotImplementedError() + + @property + def attach_network_endpoints(self) -> Callable[ + [compute.AttachNetworkEndpointsNetworkEndpointGroupRequest], + Union[ + compute.Operation, + Awaitable[compute.Operation] + ]]: + raise NotImplementedError() + + @property + def delete(self) -> Callable[ + [compute.DeleteNetworkEndpointGroupRequest], + Union[ + compute.Operation, + Awaitable[compute.Operation] + ]]: + raise NotImplementedError() + + @property + def detach_network_endpoints(self) -> Callable[ + [compute.DetachNetworkEndpointsNetworkEndpointGroupRequest], + Union[ + compute.Operation, + Awaitable[compute.Operation] + ]]: + raise NotImplementedError() + + @property + def get(self) -> Callable[ + [compute.GetNetworkEndpointGroupRequest], + Union[ + compute.NetworkEndpointGroup, + Awaitable[compute.NetworkEndpointGroup] + ]]: + raise NotImplementedError() + + @property + def insert(self) -> Callable[ + [compute.InsertNetworkEndpointGroupRequest], + Union[ + compute.Operation, + Awaitable[compute.Operation] + ]]: + raise NotImplementedError() + + @property + def list(self) -> Callable[ + [compute.ListNetworkEndpointGroupsRequest], + Union[ + compute.NetworkEndpointGroupList, + Awaitable[compute.NetworkEndpointGroupList] + ]]: + raise NotImplementedError() + + @property + def list_network_endpoints(self) -> Callable[ + [compute.ListNetworkEndpointsNetworkEndpointGroupsRequest], + Union[ + compute.NetworkEndpointGroupsListNetworkEndpoints, + Awaitable[compute.NetworkEndpointGroupsListNetworkEndpoints] + ]]: + raise NotImplementedError() + + @property + def test_iam_permissions(self) -> Callable[ + [compute.TestIamPermissionsNetworkEndpointGroupRequest], + Union[ + compute.TestPermissionsResponse, + Awaitable[compute.TestPermissionsResponse] + ]]: + raise NotImplementedError() + + @property + def kind(self) -> str: + raise NotImplementedError() + + @property + def _zone_operations_client(self) -> zone_operations.ZoneOperationsClient: + ex_op_service = self._extended_operations_services.get("zone_operations") + if not ex_op_service: + ex_op_service = zone_operations.ZoneOperationsClient( + credentials=self._credentials, + transport=self.kind, + ) + self._extended_operations_services["zone_operations"] = ex_op_service + + return ex_op_service + + +__all__ = ( + 'NetworkEndpointGroupsTransport', +) diff --git a/owl-bot-staging/v1/google/cloud/compute_v1/services/network_endpoint_groups/transports/rest.py b/owl-bot-staging/v1/google/cloud/compute_v1/services/network_endpoint_groups/transports/rest.py new file mode 100644 index 000000000..adbe4f21c --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/compute_v1/services/network_endpoint_groups/transports/rest.py @@ -0,0 +1,1291 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +from google.auth.transport.requests import AuthorizedSession # type: ignore +import json # type: ignore +import grpc # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.api_core import exceptions as core_exceptions +from google.api_core import retry as retries +from google.api_core import rest_helpers +from google.api_core import rest_streaming +from google.api_core import path_template +from google.api_core import gapic_v1 + +from google.protobuf import json_format +from requests import __version__ as requests_version +import dataclasses +import re +from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union +import warnings + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + + +from google.cloud.compute_v1.types import compute + +from .base import NetworkEndpointGroupsTransport, DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, + grpc_version=None, + rest_version=requests_version, +) + + +class NetworkEndpointGroupsRestInterceptor: + """Interceptor for NetworkEndpointGroups. + + Interceptors are used to manipulate requests, request metadata, and responses + in arbitrary ways. + Example use cases include: + * Logging + * Verifying requests according to service or custom semantics + * Stripping extraneous information from responses + + These use cases and more can be enabled by injecting an + instance of a custom subclass when constructing the NetworkEndpointGroupsRestTransport. + + .. code-block:: python + class MyCustomNetworkEndpointGroupsInterceptor(NetworkEndpointGroupsRestInterceptor): + def pre_aggregated_list(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_aggregated_list(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_attach_network_endpoints(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_attach_network_endpoints(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_delete(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_delete(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_detach_network_endpoints(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_detach_network_endpoints(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_get(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_insert(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_insert(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list_network_endpoints(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_network_endpoints(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_test_iam_permissions(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_test_iam_permissions(self, response): + logging.log(f"Received response: {response}") + return response + + transport = NetworkEndpointGroupsRestTransport(interceptor=MyCustomNetworkEndpointGroupsInterceptor()) + client = NetworkEndpointGroupsClient(transport=transport) + + + """ + def pre_aggregated_list(self, request: compute.AggregatedListNetworkEndpointGroupsRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.AggregatedListNetworkEndpointGroupsRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for aggregated_list + + Override in a subclass to manipulate the request or metadata + before they are sent to the NetworkEndpointGroups server. + """ + return request, metadata + + def post_aggregated_list(self, response: compute.NetworkEndpointGroupAggregatedList) -> compute.NetworkEndpointGroupAggregatedList: + """Post-rpc interceptor for aggregated_list + + Override in a subclass to manipulate the response + after it is returned by the NetworkEndpointGroups server but before + it is returned to user code. + """ + return response + def pre_attach_network_endpoints(self, request: compute.AttachNetworkEndpointsNetworkEndpointGroupRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.AttachNetworkEndpointsNetworkEndpointGroupRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for attach_network_endpoints + + Override in a subclass to manipulate the request or metadata + before they are sent to the NetworkEndpointGroups server. + """ + return request, metadata + + def post_attach_network_endpoints(self, response: compute.Operation) -> compute.Operation: + """Post-rpc interceptor for attach_network_endpoints + + Override in a subclass to manipulate the response + after it is returned by the NetworkEndpointGroups server but before + it is returned to user code. + """ + return response + def pre_delete(self, request: compute.DeleteNetworkEndpointGroupRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.DeleteNetworkEndpointGroupRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for delete + + Override in a subclass to manipulate the request or metadata + before they are sent to the NetworkEndpointGroups server. + """ + return request, metadata + + def post_delete(self, response: compute.Operation) -> compute.Operation: + """Post-rpc interceptor for delete + + Override in a subclass to manipulate the response + after it is returned by the NetworkEndpointGroups server but before + it is returned to user code. + """ + return response + def pre_detach_network_endpoints(self, request: compute.DetachNetworkEndpointsNetworkEndpointGroupRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.DetachNetworkEndpointsNetworkEndpointGroupRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for detach_network_endpoints + + Override in a subclass to manipulate the request or metadata + before they are sent to the NetworkEndpointGroups server. + """ + return request, metadata + + def post_detach_network_endpoints(self, response: compute.Operation) -> compute.Operation: + """Post-rpc interceptor for detach_network_endpoints + + Override in a subclass to manipulate the response + after it is returned by the NetworkEndpointGroups server but before + it is returned to user code. + """ + return response + def pre_get(self, request: compute.GetNetworkEndpointGroupRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.GetNetworkEndpointGroupRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get + + Override in a subclass to manipulate the request or metadata + before they are sent to the NetworkEndpointGroups server. + """ + return request, metadata + + def post_get(self, response: compute.NetworkEndpointGroup) -> compute.NetworkEndpointGroup: + """Post-rpc interceptor for get + + Override in a subclass to manipulate the response + after it is returned by the NetworkEndpointGroups server but before + it is returned to user code. + """ + return response + def pre_insert(self, request: compute.InsertNetworkEndpointGroupRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.InsertNetworkEndpointGroupRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for insert + + Override in a subclass to manipulate the request or metadata + before they are sent to the NetworkEndpointGroups server. + """ + return request, metadata + + def post_insert(self, response: compute.Operation) -> compute.Operation: + """Post-rpc interceptor for insert + + Override in a subclass to manipulate the response + after it is returned by the NetworkEndpointGroups server but before + it is returned to user code. + """ + return response + def pre_list(self, request: compute.ListNetworkEndpointGroupsRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.ListNetworkEndpointGroupsRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list + + Override in a subclass to manipulate the request or metadata + before they are sent to the NetworkEndpointGroups server. + """ + return request, metadata + + def post_list(self, response: compute.NetworkEndpointGroupList) -> compute.NetworkEndpointGroupList: + """Post-rpc interceptor for list + + Override in a subclass to manipulate the response + after it is returned by the NetworkEndpointGroups server but before + it is returned to user code. + """ + return response + def pre_list_network_endpoints(self, request: compute.ListNetworkEndpointsNetworkEndpointGroupsRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.ListNetworkEndpointsNetworkEndpointGroupsRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list_network_endpoints + + Override in a subclass to manipulate the request or metadata + before they are sent to the NetworkEndpointGroups server. + """ + return request, metadata + + def post_list_network_endpoints(self, response: compute.NetworkEndpointGroupsListNetworkEndpoints) -> compute.NetworkEndpointGroupsListNetworkEndpoints: + """Post-rpc interceptor for list_network_endpoints + + Override in a subclass to manipulate the response + after it is returned by the NetworkEndpointGroups server but before + it is returned to user code. + """ + return response + def pre_test_iam_permissions(self, request: compute.TestIamPermissionsNetworkEndpointGroupRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.TestIamPermissionsNetworkEndpointGroupRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for test_iam_permissions + + Override in a subclass to manipulate the request or metadata + before they are sent to the NetworkEndpointGroups server. + """ + return request, metadata + + def post_test_iam_permissions(self, response: compute.TestPermissionsResponse) -> compute.TestPermissionsResponse: + """Post-rpc interceptor for test_iam_permissions + + Override in a subclass to manipulate the response + after it is returned by the NetworkEndpointGroups server but before + it is returned to user code. + """ + return response + + +@dataclasses.dataclass +class NetworkEndpointGroupsRestStub: + _session: AuthorizedSession + _host: str + _interceptor: NetworkEndpointGroupsRestInterceptor + + +class NetworkEndpointGroupsRestTransport(NetworkEndpointGroupsTransport): + """REST backend transport for NetworkEndpointGroups. + + The NetworkEndpointGroups API. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends JSON representations of protocol buffers over HTTP/1.1 + + NOTE: This REST transport functionality is currently in a beta + state (preview). We welcome your feedback via an issue in this + library's source repository. Thank you! + """ + + def __init__(self, *, + host: str = 'compute.googleapis.com', + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + client_cert_source_for_mtls: Optional[Callable[[ + ], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + url_scheme: str = 'https', + interceptor: Optional[NetworkEndpointGroupsRestInterceptor] = None, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + NOTE: This REST transport functionality is currently in a beta + state (preview). We welcome your feedback via a GitHub issue in + this library's repository. Thank you! + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + client_cert_source_for_mtls (Callable[[], Tuple[bytes, bytes]]): Client + certificate to configure mutual TLS HTTP channel. It is ignored + if ``channel`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you are developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + url_scheme: the protocol scheme for the API endpoint. Normally + "https", but for testing or local servers, + "http" can be specified. + """ + # Run the base constructor + # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. + # TODO: When custom host (api_endpoint) is set, `scopes` must *also* be set on the + # credentials object + maybe_url_match = re.match("^(?Phttp(?:s)?://)?(?P.*)$", host) + if maybe_url_match is None: + raise ValueError(f"Unexpected hostname structure: {host}") # pragma: NO COVER + + url_match_items = maybe_url_match.groupdict() + + host = f"{url_scheme}://{host}" if not url_match_items["scheme"] else host + + super().__init__( + host=host, + credentials=credentials, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience + ) + self._session = AuthorizedSession( + self._credentials, default_host=self.DEFAULT_HOST) + if client_cert_source_for_mtls: + self._session.configure_mtls_channel(client_cert_source_for_mtls) + self._interceptor = interceptor or NetworkEndpointGroupsRestInterceptor() + self._prep_wrapped_messages(client_info) + + class _AggregatedList(NetworkEndpointGroupsRestStub): + def __hash__(self): + return hash("AggregatedList") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.AggregatedListNetworkEndpointGroupsRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.NetworkEndpointGroupAggregatedList: + r"""Call the aggregated list method over HTTP. + + Args: + request (~.compute.AggregatedListNetworkEndpointGroupsRequest): + The request object. A request message for + NetworkEndpointGroups.AggregatedList. + See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.NetworkEndpointGroupAggregatedList: + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/compute/v1/projects/{project}/aggregated/networkEndpointGroups', + }, + ] + request, metadata = self._interceptor.pre_aggregated_list(request, metadata) + pb_request = compute.AggregatedListNetworkEndpointGroupsRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.NetworkEndpointGroupAggregatedList() + pb_resp = compute.NetworkEndpointGroupAggregatedList.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_aggregated_list(resp) + return resp + + class _AttachNetworkEndpoints(NetworkEndpointGroupsRestStub): + def __hash__(self): + return hash("AttachNetworkEndpoints") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.AttachNetworkEndpointsNetworkEndpointGroupRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.Operation: + r"""Call the attach network endpoints method over HTTP. + + Args: + request (~.compute.AttachNetworkEndpointsNetworkEndpointGroupRequest): + The request object. A request message for + NetworkEndpointGroups.AttachNetworkEndpoints. + See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.Operation: + Represents an Operation resource. Google Compute Engine + has three Operation resources: \* + `Global `__ + \* + `Regional `__ + \* + `Zonal `__ + You can use an operation resource to manage asynchronous + API requests. For more information, read Handling API + responses. Operations can be global, regional or zonal. + - For global operations, use the ``globalOperations`` + resource. - For regional operations, use the + ``regionOperations`` resource. - For zonal operations, + use the ``zonalOperations`` resource. For more + information, read Global, Regional, and Zonal Resources. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/compute/v1/projects/{project}/zones/{zone}/networkEndpointGroups/{network_endpoint_group}/attachNetworkEndpoints', + 'body': 'network_endpoint_groups_attach_endpoints_request_resource', + }, + ] + request, metadata = self._interceptor.pre_attach_network_endpoints(request, metadata) + pb_request = compute.AttachNetworkEndpointsNetworkEndpointGroupRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + including_default_value_fields=False, + use_integers_for_enums=False + ) + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.Operation() + pb_resp = compute.Operation.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_attach_network_endpoints(resp) + return resp + + class _Delete(NetworkEndpointGroupsRestStub): + def __hash__(self): + return hash("Delete") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.DeleteNetworkEndpointGroupRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.Operation: + r"""Call the delete method over HTTP. + + Args: + request (~.compute.DeleteNetworkEndpointGroupRequest): + The request object. A request message for + NetworkEndpointGroups.Delete. See the + method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.Operation: + Represents an Operation resource. Google Compute Engine + has three Operation resources: \* + `Global `__ + \* + `Regional `__ + \* + `Zonal `__ + You can use an operation resource to manage asynchronous + API requests. For more information, read Handling API + responses. Operations can be global, regional or zonal. + - For global operations, use the ``globalOperations`` + resource. - For regional operations, use the + ``regionOperations`` resource. - For zonal operations, + use the ``zonalOperations`` resource. For more + information, read Global, Regional, and Zonal Resources. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'delete', + 'uri': '/compute/v1/projects/{project}/zones/{zone}/networkEndpointGroups/{network_endpoint_group}', + }, + ] + request, metadata = self._interceptor.pre_delete(request, metadata) + pb_request = compute.DeleteNetworkEndpointGroupRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.Operation() + pb_resp = compute.Operation.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_delete(resp) + return resp + + class _DetachNetworkEndpoints(NetworkEndpointGroupsRestStub): + def __hash__(self): + return hash("DetachNetworkEndpoints") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.DetachNetworkEndpointsNetworkEndpointGroupRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.Operation: + r"""Call the detach network endpoints method over HTTP. + + Args: + request (~.compute.DetachNetworkEndpointsNetworkEndpointGroupRequest): + The request object. A request message for + NetworkEndpointGroups.DetachNetworkEndpoints. + See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.Operation: + Represents an Operation resource. Google Compute Engine + has three Operation resources: \* + `Global `__ + \* + `Regional `__ + \* + `Zonal `__ + You can use an operation resource to manage asynchronous + API requests. For more information, read Handling API + responses. Operations can be global, regional or zonal. + - For global operations, use the ``globalOperations`` + resource. - For regional operations, use the + ``regionOperations`` resource. - For zonal operations, + use the ``zonalOperations`` resource. For more + information, read Global, Regional, and Zonal Resources. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/compute/v1/projects/{project}/zones/{zone}/networkEndpointGroups/{network_endpoint_group}/detachNetworkEndpoints', + 'body': 'network_endpoint_groups_detach_endpoints_request_resource', + }, + ] + request, metadata = self._interceptor.pre_detach_network_endpoints(request, metadata) + pb_request = compute.DetachNetworkEndpointsNetworkEndpointGroupRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + including_default_value_fields=False, + use_integers_for_enums=False + ) + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.Operation() + pb_resp = compute.Operation.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_detach_network_endpoints(resp) + return resp + + class _Get(NetworkEndpointGroupsRestStub): + def __hash__(self): + return hash("Get") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.GetNetworkEndpointGroupRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.NetworkEndpointGroup: + r"""Call the get method over HTTP. + + Args: + request (~.compute.GetNetworkEndpointGroupRequest): + The request object. A request message for + NetworkEndpointGroups.Get. See the + method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.NetworkEndpointGroup: + Represents a collection of network + endpoints. A network endpoint group + (NEG) defines how a set of endpoints + should be reached, whether they are + reachable, and where they are located. + For more information about using NEGs, + see Setting up external HTTP(S) Load + Balancing with internet NEGs, Setting up + zonal NEGs, or Setting up external + HTTP(S) Load Balancing with serverless + NEGs. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/compute/v1/projects/{project}/zones/{zone}/networkEndpointGroups/{network_endpoint_group}', + }, + ] + request, metadata = self._interceptor.pre_get(request, metadata) + pb_request = compute.GetNetworkEndpointGroupRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.NetworkEndpointGroup() + pb_resp = compute.NetworkEndpointGroup.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get(resp) + return resp + + class _Insert(NetworkEndpointGroupsRestStub): + def __hash__(self): + return hash("Insert") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.InsertNetworkEndpointGroupRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.Operation: + r"""Call the insert method over HTTP. + + Args: + request (~.compute.InsertNetworkEndpointGroupRequest): + The request object. A request message for + NetworkEndpointGroups.Insert. See the + method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.Operation: + Represents an Operation resource. Google Compute Engine + has three Operation resources: \* + `Global `__ + \* + `Regional `__ + \* + `Zonal `__ + You can use an operation resource to manage asynchronous + API requests. For more information, read Handling API + responses. Operations can be global, regional or zonal. + - For global operations, use the ``globalOperations`` + resource. - For regional operations, use the + ``regionOperations`` resource. - For zonal operations, + use the ``zonalOperations`` resource. For more + information, read Global, Regional, and Zonal Resources. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/compute/v1/projects/{project}/zones/{zone}/networkEndpointGroups', + 'body': 'network_endpoint_group_resource', + }, + ] + request, metadata = self._interceptor.pre_insert(request, metadata) + pb_request = compute.InsertNetworkEndpointGroupRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + including_default_value_fields=False, + use_integers_for_enums=False + ) + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.Operation() + pb_resp = compute.Operation.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_insert(resp) + return resp + + class _List(NetworkEndpointGroupsRestStub): + def __hash__(self): + return hash("List") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.ListNetworkEndpointGroupsRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.NetworkEndpointGroupList: + r"""Call the list method over HTTP. + + Args: + request (~.compute.ListNetworkEndpointGroupsRequest): + The request object. A request message for + NetworkEndpointGroups.List. See the + method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.NetworkEndpointGroupList: + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/compute/v1/projects/{project}/zones/{zone}/networkEndpointGroups', + }, + ] + request, metadata = self._interceptor.pre_list(request, metadata) + pb_request = compute.ListNetworkEndpointGroupsRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.NetworkEndpointGroupList() + pb_resp = compute.NetworkEndpointGroupList.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_list(resp) + return resp + + class _ListNetworkEndpoints(NetworkEndpointGroupsRestStub): + def __hash__(self): + return hash("ListNetworkEndpoints") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.ListNetworkEndpointsNetworkEndpointGroupsRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.NetworkEndpointGroupsListNetworkEndpoints: + r"""Call the list network endpoints method over HTTP. + + Args: + request (~.compute.ListNetworkEndpointsNetworkEndpointGroupsRequest): + The request object. A request message for + NetworkEndpointGroups.ListNetworkEndpoints. + See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.NetworkEndpointGroupsListNetworkEndpoints: + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/compute/v1/projects/{project}/zones/{zone}/networkEndpointGroups/{network_endpoint_group}/listNetworkEndpoints', + 'body': 'network_endpoint_groups_list_endpoints_request_resource', + }, + ] + request, metadata = self._interceptor.pre_list_network_endpoints(request, metadata) + pb_request = compute.ListNetworkEndpointsNetworkEndpointGroupsRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + including_default_value_fields=False, + use_integers_for_enums=False + ) + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.NetworkEndpointGroupsListNetworkEndpoints() + pb_resp = compute.NetworkEndpointGroupsListNetworkEndpoints.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_list_network_endpoints(resp) + return resp + + class _TestIamPermissions(NetworkEndpointGroupsRestStub): + def __hash__(self): + return hash("TestIamPermissions") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.TestIamPermissionsNetworkEndpointGroupRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.TestPermissionsResponse: + r"""Call the test iam permissions method over HTTP. + + Args: + request (~.compute.TestIamPermissionsNetworkEndpointGroupRequest): + The request object. A request message for + NetworkEndpointGroups.TestIamPermissions. + See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.TestPermissionsResponse: + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/compute/v1/projects/{project}/zones/{zone}/networkEndpointGroups/{resource}/testIamPermissions', + 'body': 'test_permissions_request_resource', + }, + ] + request, metadata = self._interceptor.pre_test_iam_permissions(request, metadata) + pb_request = compute.TestIamPermissionsNetworkEndpointGroupRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + including_default_value_fields=False, + use_integers_for_enums=False + ) + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.TestPermissionsResponse() + pb_resp = compute.TestPermissionsResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_test_iam_permissions(resp) + return resp + + @property + def aggregated_list(self) -> Callable[ + [compute.AggregatedListNetworkEndpointGroupsRequest], + compute.NetworkEndpointGroupAggregatedList]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._AggregatedList(self._session, self._host, self._interceptor) # type: ignore + + @property + def attach_network_endpoints(self) -> Callable[ + [compute.AttachNetworkEndpointsNetworkEndpointGroupRequest], + compute.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._AttachNetworkEndpoints(self._session, self._host, self._interceptor) # type: ignore + + @property + def delete(self) -> Callable[ + [compute.DeleteNetworkEndpointGroupRequest], + compute.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._Delete(self._session, self._host, self._interceptor) # type: ignore + + @property + def detach_network_endpoints(self) -> Callable[ + [compute.DetachNetworkEndpointsNetworkEndpointGroupRequest], + compute.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._DetachNetworkEndpoints(self._session, self._host, self._interceptor) # type: ignore + + @property + def get(self) -> Callable[ + [compute.GetNetworkEndpointGroupRequest], + compute.NetworkEndpointGroup]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._Get(self._session, self._host, self._interceptor) # type: ignore + + @property + def insert(self) -> Callable[ + [compute.InsertNetworkEndpointGroupRequest], + compute.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._Insert(self._session, self._host, self._interceptor) # type: ignore + + @property + def list(self) -> Callable[ + [compute.ListNetworkEndpointGroupsRequest], + compute.NetworkEndpointGroupList]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._List(self._session, self._host, self._interceptor) # type: ignore + + @property + def list_network_endpoints(self) -> Callable[ + [compute.ListNetworkEndpointsNetworkEndpointGroupsRequest], + compute.NetworkEndpointGroupsListNetworkEndpoints]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListNetworkEndpoints(self._session, self._host, self._interceptor) # type: ignore + + @property + def test_iam_permissions(self) -> Callable[ + [compute.TestIamPermissionsNetworkEndpointGroupRequest], + compute.TestPermissionsResponse]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._TestIamPermissions(self._session, self._host, self._interceptor) # type: ignore + + @property + def kind(self) -> str: + return "rest" + + def close(self): + self._session.close() + + +__all__=( + 'NetworkEndpointGroupsRestTransport', +) diff --git a/owl-bot-staging/v1/google/cloud/compute_v1/services/network_firewall_policies/__init__.py b/owl-bot-staging/v1/google/cloud/compute_v1/services/network_firewall_policies/__init__.py new file mode 100644 index 000000000..82a9ae825 --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/compute_v1/services/network_firewall_policies/__init__.py @@ -0,0 +1,20 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from .client import NetworkFirewallPoliciesClient + +__all__ = ( + 'NetworkFirewallPoliciesClient', +) diff --git a/owl-bot-staging/v1/google/cloud/compute_v1/services/network_firewall_policies/client.py b/owl-bot-staging/v1/google/cloud/compute_v1/services/network_firewall_policies/client.py new file mode 100644 index 000000000..d4c8839ef --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/compute_v1/services/network_firewall_policies/client.py @@ -0,0 +1,3564 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import functools +import os +import re +from typing import Dict, Mapping, MutableMapping, MutableSequence, Optional, Sequence, Tuple, Type, Union, cast + +from google.cloud.compute_v1 import gapic_version as package_version + +from google.api_core import client_options as client_options_lib +from google.api_core import exceptions as core_exceptions +from google.api_core import extended_operation +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport import mtls # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth.exceptions import MutualTLSChannelError # type: ignore +from google.oauth2 import service_account # type: ignore + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + +from google.api_core import extended_operation # type: ignore +from google.cloud.compute_v1.services.network_firewall_policies import pagers +from google.cloud.compute_v1.types import compute +from .transports.base import NetworkFirewallPoliciesTransport, DEFAULT_CLIENT_INFO +from .transports.rest import NetworkFirewallPoliciesRestTransport + + +class NetworkFirewallPoliciesClientMeta(type): + """Metaclass for the NetworkFirewallPolicies client. + + This provides class-level methods for building and retrieving + support objects (e.g. transport) without polluting the client instance + objects. + """ + _transport_registry = OrderedDict() # type: Dict[str, Type[NetworkFirewallPoliciesTransport]] + _transport_registry["rest"] = NetworkFirewallPoliciesRestTransport + + def get_transport_class(cls, + label: Optional[str] = None, + ) -> Type[NetworkFirewallPoliciesTransport]: + """Returns an appropriate transport class. + + Args: + label: The name of the desired transport. If none is + provided, then the first transport in the registry is used. + + Returns: + The transport class to use. + """ + # If a specific transport is requested, return that one. + if label: + return cls._transport_registry[label] + + # No transport is requested; return the default (that is, the first one + # in the dictionary). + return next(iter(cls._transport_registry.values())) + + +class NetworkFirewallPoliciesClient(metaclass=NetworkFirewallPoliciesClientMeta): + """The NetworkFirewallPolicies API.""" + + @staticmethod + def _get_default_mtls_endpoint(api_endpoint): + """Converts api endpoint to mTLS endpoint. + + Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to + "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. + Args: + api_endpoint (Optional[str]): the api endpoint to convert. + Returns: + str: converted mTLS api endpoint. + """ + if not api_endpoint: + return api_endpoint + + mtls_endpoint_re = re.compile( + r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?" + ) + + m = mtls_endpoint_re.match(api_endpoint) + name, mtls, sandbox, googledomain = m.groups() + if mtls or not googledomain: + return api_endpoint + + if sandbox: + return api_endpoint.replace( + "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" + ) + + return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") + + DEFAULT_ENDPOINT = "compute.googleapis.com" + DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore + DEFAULT_ENDPOINT + ) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + NetworkFirewallPoliciesClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_info(info) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + NetworkFirewallPoliciesClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_file( + filename) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + from_service_account_json = from_service_account_file + + @property + def transport(self) -> NetworkFirewallPoliciesTransport: + """Returns the transport used by the client instance. + + Returns: + NetworkFirewallPoliciesTransport: The transport used by the client + instance. + """ + return self._transport + + @staticmethod + def common_billing_account_path(billing_account: str, ) -> str: + """Returns a fully-qualified billing_account string.""" + return "billingAccounts/{billing_account}".format(billing_account=billing_account, ) + + @staticmethod + def parse_common_billing_account_path(path: str) -> Dict[str,str]: + """Parse a billing_account path into its component segments.""" + m = re.match(r"^billingAccounts/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_folder_path(folder: str, ) -> str: + """Returns a fully-qualified folder string.""" + return "folders/{folder}".format(folder=folder, ) + + @staticmethod + def parse_common_folder_path(path: str) -> Dict[str,str]: + """Parse a folder path into its component segments.""" + m = re.match(r"^folders/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_organization_path(organization: str, ) -> str: + """Returns a fully-qualified organization string.""" + return "organizations/{organization}".format(organization=organization, ) + + @staticmethod + def parse_common_organization_path(path: str) -> Dict[str,str]: + """Parse a organization path into its component segments.""" + m = re.match(r"^organizations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_project_path(project: str, ) -> str: + """Returns a fully-qualified project string.""" + return "projects/{project}".format(project=project, ) + + @staticmethod + def parse_common_project_path(path: str) -> Dict[str,str]: + """Parse a project path into its component segments.""" + m = re.match(r"^projects/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_location_path(project: str, location: str, ) -> str: + """Returns a fully-qualified location string.""" + return "projects/{project}/locations/{location}".format(project=project, location=location, ) + + @staticmethod + def parse_common_location_path(path: str) -> Dict[str,str]: + """Parse a location path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @classmethod + def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[client_options_lib.ClientOptions] = None): + """Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + if client_options is None: + client_options = client_options_lib.ClientOptions() + use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") + use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") + if use_client_cert not in ("true", "false"): + raise ValueError("Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`") + if use_mtls_endpoint not in ("auto", "never", "always"): + raise MutualTLSChannelError("Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`") + + # Figure out the client cert source to use. + client_cert_source = None + if use_client_cert == "true": + if client_options.client_cert_source: + client_cert_source = client_options.client_cert_source + elif mtls.has_default_client_cert_source(): + client_cert_source = mtls.default_client_cert_source() + + # Figure out which api endpoint to use. + if client_options.api_endpoint is not None: + api_endpoint = client_options.api_endpoint + elif use_mtls_endpoint == "always" or (use_mtls_endpoint == "auto" and client_cert_source): + api_endpoint = cls.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = cls.DEFAULT_ENDPOINT + + return api_endpoint, client_cert_source + + def __init__(self, *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Optional[Union[str, NetworkFirewallPoliciesTransport]] = None, + client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the network firewall policies client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Union[str, NetworkFirewallPoliciesTransport]): The + transport to use. If set to None, a transport is chosen + automatically. + NOTE: "rest" transport functionality is currently in a + beta state (preview). We welcome your feedback via an + issue in this library's source repository. + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): Custom options for the + client. It won't take effect if a ``transport`` instance is provided. + (1) The ``api_endpoint`` property can be used to override the + default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT + environment variable can also be used to override the endpoint: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto switch to the + default mTLS endpoint if client certificate is present, this is + the default value). However, the ``api_endpoint`` property takes + precedence if provided. + (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide client certificate for mutual TLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + """ + if isinstance(client_options, dict): + client_options = client_options_lib.from_dict(client_options) + if client_options is None: + client_options = client_options_lib.ClientOptions() + client_options = cast(client_options_lib.ClientOptions, client_options) + + api_endpoint, client_cert_source_func = self.get_mtls_endpoint_and_cert_source(client_options) + + api_key_value = getattr(client_options, "api_key", None) + if api_key_value and credentials: + raise ValueError("client_options.api_key and credentials are mutually exclusive") + + # Save or instantiate the transport. + # Ordinarily, we provide the transport, but allowing a custom transport + # instance provides an extensibility point for unusual situations. + if isinstance(transport, NetworkFirewallPoliciesTransport): + # transport is a NetworkFirewallPoliciesTransport instance. + if credentials or client_options.credentials_file or api_key_value: + raise ValueError("When providing a transport instance, " + "provide its credentials directly.") + if client_options.scopes: + raise ValueError( + "When providing a transport instance, provide its scopes " + "directly." + ) + self._transport = transport + else: + import google.auth._default # type: ignore + + if api_key_value and hasattr(google.auth._default, "get_api_key_credentials"): + credentials = google.auth._default.get_api_key_credentials(api_key_value) + + Transport = type(self).get_transport_class(transport) + self._transport = Transport( + credentials=credentials, + credentials_file=client_options.credentials_file, + host=api_endpoint, + scopes=client_options.scopes, + client_cert_source_for_mtls=client_cert_source_func, + quota_project_id=client_options.quota_project_id, + client_info=client_info, + always_use_jwt_access=True, + api_audience=client_options.api_audience, + ) + + def add_association_unary(self, + request: Optional[Union[compute.AddAssociationNetworkFirewallPolicyRequest, dict]] = None, + *, + project: Optional[str] = None, + firewall_policy: Optional[str] = None, + firewall_policy_association_resource: Optional[compute.FirewallPolicyAssociation] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.Operation: + r"""Inserts an association for the specified firewall + policy. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_add_association(): + # Create a client + client = compute_v1.NetworkFirewallPoliciesClient() + + # Initialize request argument(s) + request = compute_v1.AddAssociationNetworkFirewallPolicyRequest( + firewall_policy="firewall_policy_value", + project="project_value", + ) + + # Make the request + response = client.add_association(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.AddAssociationNetworkFirewallPolicyRequest, dict]): + The request object. A request message for + NetworkFirewallPolicies.AddAssociation. + See the method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + firewall_policy (str): + Name of the firewall policy to + update. + + This corresponds to the ``firewall_policy`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + firewall_policy_association_resource (google.cloud.compute_v1.types.FirewallPolicyAssociation): + The body resource for this request + This corresponds to the ``firewall_policy_association_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, firewall_policy, firewall_policy_association_resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.AddAssociationNetworkFirewallPolicyRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.AddAssociationNetworkFirewallPolicyRequest): + request = compute.AddAssociationNetworkFirewallPolicyRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if firewall_policy is not None: + request.firewall_policy = firewall_policy + if firewall_policy_association_resource is not None: + request.firewall_policy_association_resource = firewall_policy_association_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.add_association] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("firewall_policy", request.firewall_policy), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def add_association(self, + request: Optional[Union[compute.AddAssociationNetworkFirewallPolicyRequest, dict]] = None, + *, + project: Optional[str] = None, + firewall_policy: Optional[str] = None, + firewall_policy_association_resource: Optional[compute.FirewallPolicyAssociation] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> extended_operation.ExtendedOperation: + r"""Inserts an association for the specified firewall + policy. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_add_association(): + # Create a client + client = compute_v1.NetworkFirewallPoliciesClient() + + # Initialize request argument(s) + request = compute_v1.AddAssociationNetworkFirewallPolicyRequest( + firewall_policy="firewall_policy_value", + project="project_value", + ) + + # Make the request + response = client.add_association(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.AddAssociationNetworkFirewallPolicyRequest, dict]): + The request object. A request message for + NetworkFirewallPolicies.AddAssociation. + See the method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + firewall_policy (str): + Name of the firewall policy to + update. + + This corresponds to the ``firewall_policy`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + firewall_policy_association_resource (google.cloud.compute_v1.types.FirewallPolicyAssociation): + The body resource for this request + This corresponds to the ``firewall_policy_association_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, firewall_policy, firewall_policy_association_resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.AddAssociationNetworkFirewallPolicyRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.AddAssociationNetworkFirewallPolicyRequest): + request = compute.AddAssociationNetworkFirewallPolicyRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if firewall_policy is not None: + request.firewall_policy = firewall_policy + if firewall_policy_association_resource is not None: + request.firewall_policy_association_resource = firewall_policy_association_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.add_association] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("firewall_policy", request.firewall_policy), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + operation_service = self._transport._global_operations_client + operation_request = compute.GetGlobalOperationRequest() + operation_request.project = request.project + operation_request.operation = response.name + + get_operation = functools.partial(operation_service.get, operation_request) + # Cancel is not part of extended operations yet. + cancel_operation = lambda: None + + # Note: this class is an implementation detail to provide a uniform + # set of names for certain fields in the extended operation proto message. + # See google.api_core.extended_operation.ExtendedOperation for details + # on these properties and the expected interface. + class _CustomOperation(extended_operation.ExtendedOperation): + @property + def error_message(self): + return self._extended_operation.http_error_message + + @property + def error_code(self): + return self._extended_operation.http_error_status_code + + response = _CustomOperation.make(get_operation, cancel_operation, response) + + # Done; return the response. + return response + + def add_rule_unary(self, + request: Optional[Union[compute.AddRuleNetworkFirewallPolicyRequest, dict]] = None, + *, + project: Optional[str] = None, + firewall_policy: Optional[str] = None, + firewall_policy_rule_resource: Optional[compute.FirewallPolicyRule] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.Operation: + r"""Inserts a rule into a firewall policy. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_add_rule(): + # Create a client + client = compute_v1.NetworkFirewallPoliciesClient() + + # Initialize request argument(s) + request = compute_v1.AddRuleNetworkFirewallPolicyRequest( + firewall_policy="firewall_policy_value", + project="project_value", + ) + + # Make the request + response = client.add_rule(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.AddRuleNetworkFirewallPolicyRequest, dict]): + The request object. A request message for + NetworkFirewallPolicies.AddRule. See the + method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + firewall_policy (str): + Name of the firewall policy to + update. + + This corresponds to the ``firewall_policy`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + firewall_policy_rule_resource (google.cloud.compute_v1.types.FirewallPolicyRule): + The body resource for this request + This corresponds to the ``firewall_policy_rule_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, firewall_policy, firewall_policy_rule_resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.AddRuleNetworkFirewallPolicyRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.AddRuleNetworkFirewallPolicyRequest): + request = compute.AddRuleNetworkFirewallPolicyRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if firewall_policy is not None: + request.firewall_policy = firewall_policy + if firewall_policy_rule_resource is not None: + request.firewall_policy_rule_resource = firewall_policy_rule_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.add_rule] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("firewall_policy", request.firewall_policy), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def add_rule(self, + request: Optional[Union[compute.AddRuleNetworkFirewallPolicyRequest, dict]] = None, + *, + project: Optional[str] = None, + firewall_policy: Optional[str] = None, + firewall_policy_rule_resource: Optional[compute.FirewallPolicyRule] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> extended_operation.ExtendedOperation: + r"""Inserts a rule into a firewall policy. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_add_rule(): + # Create a client + client = compute_v1.NetworkFirewallPoliciesClient() + + # Initialize request argument(s) + request = compute_v1.AddRuleNetworkFirewallPolicyRequest( + firewall_policy="firewall_policy_value", + project="project_value", + ) + + # Make the request + response = client.add_rule(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.AddRuleNetworkFirewallPolicyRequest, dict]): + The request object. A request message for + NetworkFirewallPolicies.AddRule. See the + method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + firewall_policy (str): + Name of the firewall policy to + update. + + This corresponds to the ``firewall_policy`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + firewall_policy_rule_resource (google.cloud.compute_v1.types.FirewallPolicyRule): + The body resource for this request + This corresponds to the ``firewall_policy_rule_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, firewall_policy, firewall_policy_rule_resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.AddRuleNetworkFirewallPolicyRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.AddRuleNetworkFirewallPolicyRequest): + request = compute.AddRuleNetworkFirewallPolicyRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if firewall_policy is not None: + request.firewall_policy = firewall_policy + if firewall_policy_rule_resource is not None: + request.firewall_policy_rule_resource = firewall_policy_rule_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.add_rule] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("firewall_policy", request.firewall_policy), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + operation_service = self._transport._global_operations_client + operation_request = compute.GetGlobalOperationRequest() + operation_request.project = request.project + operation_request.operation = response.name + + get_operation = functools.partial(operation_service.get, operation_request) + # Cancel is not part of extended operations yet. + cancel_operation = lambda: None + + # Note: this class is an implementation detail to provide a uniform + # set of names for certain fields in the extended operation proto message. + # See google.api_core.extended_operation.ExtendedOperation for details + # on these properties and the expected interface. + class _CustomOperation(extended_operation.ExtendedOperation): + @property + def error_message(self): + return self._extended_operation.http_error_message + + @property + def error_code(self): + return self._extended_operation.http_error_status_code + + response = _CustomOperation.make(get_operation, cancel_operation, response) + + # Done; return the response. + return response + + def clone_rules_unary(self, + request: Optional[Union[compute.CloneRulesNetworkFirewallPolicyRequest, dict]] = None, + *, + project: Optional[str] = None, + firewall_policy: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.Operation: + r"""Copies rules to the specified firewall policy. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_clone_rules(): + # Create a client + client = compute_v1.NetworkFirewallPoliciesClient() + + # Initialize request argument(s) + request = compute_v1.CloneRulesNetworkFirewallPolicyRequest( + firewall_policy="firewall_policy_value", + project="project_value", + ) + + # Make the request + response = client.clone_rules(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.CloneRulesNetworkFirewallPolicyRequest, dict]): + The request object. A request message for + NetworkFirewallPolicies.CloneRules. See + the method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + firewall_policy (str): + Name of the firewall policy to + update. + + This corresponds to the ``firewall_policy`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, firewall_policy]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.CloneRulesNetworkFirewallPolicyRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.CloneRulesNetworkFirewallPolicyRequest): + request = compute.CloneRulesNetworkFirewallPolicyRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if firewall_policy is not None: + request.firewall_policy = firewall_policy + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.clone_rules] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("firewall_policy", request.firewall_policy), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def clone_rules(self, + request: Optional[Union[compute.CloneRulesNetworkFirewallPolicyRequest, dict]] = None, + *, + project: Optional[str] = None, + firewall_policy: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> extended_operation.ExtendedOperation: + r"""Copies rules to the specified firewall policy. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_clone_rules(): + # Create a client + client = compute_v1.NetworkFirewallPoliciesClient() + + # Initialize request argument(s) + request = compute_v1.CloneRulesNetworkFirewallPolicyRequest( + firewall_policy="firewall_policy_value", + project="project_value", + ) + + # Make the request + response = client.clone_rules(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.CloneRulesNetworkFirewallPolicyRequest, dict]): + The request object. A request message for + NetworkFirewallPolicies.CloneRules. See + the method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + firewall_policy (str): + Name of the firewall policy to + update. + + This corresponds to the ``firewall_policy`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, firewall_policy]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.CloneRulesNetworkFirewallPolicyRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.CloneRulesNetworkFirewallPolicyRequest): + request = compute.CloneRulesNetworkFirewallPolicyRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if firewall_policy is not None: + request.firewall_policy = firewall_policy + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.clone_rules] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("firewall_policy", request.firewall_policy), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + operation_service = self._transport._global_operations_client + operation_request = compute.GetGlobalOperationRequest() + operation_request.project = request.project + operation_request.operation = response.name + + get_operation = functools.partial(operation_service.get, operation_request) + # Cancel is not part of extended operations yet. + cancel_operation = lambda: None + + # Note: this class is an implementation detail to provide a uniform + # set of names for certain fields in the extended operation proto message. + # See google.api_core.extended_operation.ExtendedOperation for details + # on these properties and the expected interface. + class _CustomOperation(extended_operation.ExtendedOperation): + @property + def error_message(self): + return self._extended_operation.http_error_message + + @property + def error_code(self): + return self._extended_operation.http_error_status_code + + response = _CustomOperation.make(get_operation, cancel_operation, response) + + # Done; return the response. + return response + + def delete_unary(self, + request: Optional[Union[compute.DeleteNetworkFirewallPolicyRequest, dict]] = None, + *, + project: Optional[str] = None, + firewall_policy: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.Operation: + r"""Deletes the specified policy. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_delete(): + # Create a client + client = compute_v1.NetworkFirewallPoliciesClient() + + # Initialize request argument(s) + request = compute_v1.DeleteNetworkFirewallPolicyRequest( + firewall_policy="firewall_policy_value", + project="project_value", + ) + + # Make the request + response = client.delete(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.DeleteNetworkFirewallPolicyRequest, dict]): + The request object. A request message for + NetworkFirewallPolicies.Delete. See the + method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + firewall_policy (str): + Name of the firewall policy to + delete. + + This corresponds to the ``firewall_policy`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, firewall_policy]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.DeleteNetworkFirewallPolicyRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.DeleteNetworkFirewallPolicyRequest): + request = compute.DeleteNetworkFirewallPolicyRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if firewall_policy is not None: + request.firewall_policy = firewall_policy + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("firewall_policy", request.firewall_policy), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def delete(self, + request: Optional[Union[compute.DeleteNetworkFirewallPolicyRequest, dict]] = None, + *, + project: Optional[str] = None, + firewall_policy: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> extended_operation.ExtendedOperation: + r"""Deletes the specified policy. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_delete(): + # Create a client + client = compute_v1.NetworkFirewallPoliciesClient() + + # Initialize request argument(s) + request = compute_v1.DeleteNetworkFirewallPolicyRequest( + firewall_policy="firewall_policy_value", + project="project_value", + ) + + # Make the request + response = client.delete(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.DeleteNetworkFirewallPolicyRequest, dict]): + The request object. A request message for + NetworkFirewallPolicies.Delete. See the + method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + firewall_policy (str): + Name of the firewall policy to + delete. + + This corresponds to the ``firewall_policy`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, firewall_policy]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.DeleteNetworkFirewallPolicyRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.DeleteNetworkFirewallPolicyRequest): + request = compute.DeleteNetworkFirewallPolicyRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if firewall_policy is not None: + request.firewall_policy = firewall_policy + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("firewall_policy", request.firewall_policy), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + operation_service = self._transport._global_operations_client + operation_request = compute.GetGlobalOperationRequest() + operation_request.project = request.project + operation_request.operation = response.name + + get_operation = functools.partial(operation_service.get, operation_request) + # Cancel is not part of extended operations yet. + cancel_operation = lambda: None + + # Note: this class is an implementation detail to provide a uniform + # set of names for certain fields in the extended operation proto message. + # See google.api_core.extended_operation.ExtendedOperation for details + # on these properties and the expected interface. + class _CustomOperation(extended_operation.ExtendedOperation): + @property + def error_message(self): + return self._extended_operation.http_error_message + + @property + def error_code(self): + return self._extended_operation.http_error_status_code + + response = _CustomOperation.make(get_operation, cancel_operation, response) + + # Done; return the response. + return response + + def get(self, + request: Optional[Union[compute.GetNetworkFirewallPolicyRequest, dict]] = None, + *, + project: Optional[str] = None, + firewall_policy: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.FirewallPolicy: + r"""Returns the specified network firewall policy. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_get(): + # Create a client + client = compute_v1.NetworkFirewallPoliciesClient() + + # Initialize request argument(s) + request = compute_v1.GetNetworkFirewallPolicyRequest( + firewall_policy="firewall_policy_value", + project="project_value", + ) + + # Make the request + response = client.get(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.GetNetworkFirewallPolicyRequest, dict]): + The request object. A request message for + NetworkFirewallPolicies.Get. See the + method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + firewall_policy (str): + Name of the firewall policy to get. + This corresponds to the ``firewall_policy`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.compute_v1.types.FirewallPolicy: + Represents a Firewall Policy + resource. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, firewall_policy]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.GetNetworkFirewallPolicyRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.GetNetworkFirewallPolicyRequest): + request = compute.GetNetworkFirewallPolicyRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if firewall_policy is not None: + request.firewall_policy = firewall_policy + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("firewall_policy", request.firewall_policy), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get_association(self, + request: Optional[Union[compute.GetAssociationNetworkFirewallPolicyRequest, dict]] = None, + *, + project: Optional[str] = None, + firewall_policy: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.FirewallPolicyAssociation: + r"""Gets an association with the specified name. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_get_association(): + # Create a client + client = compute_v1.NetworkFirewallPoliciesClient() + + # Initialize request argument(s) + request = compute_v1.GetAssociationNetworkFirewallPolicyRequest( + firewall_policy="firewall_policy_value", + project="project_value", + ) + + # Make the request + response = client.get_association(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.GetAssociationNetworkFirewallPolicyRequest, dict]): + The request object. A request message for + NetworkFirewallPolicies.GetAssociation. + See the method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + firewall_policy (str): + Name of the firewall policy to which + the queried association belongs. + + This corresponds to the ``firewall_policy`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.compute_v1.types.FirewallPolicyAssociation: + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, firewall_policy]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.GetAssociationNetworkFirewallPolicyRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.GetAssociationNetworkFirewallPolicyRequest): + request = compute.GetAssociationNetworkFirewallPolicyRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if firewall_policy is not None: + request.firewall_policy = firewall_policy + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_association] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("firewall_policy", request.firewall_policy), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get_iam_policy(self, + request: Optional[Union[compute.GetIamPolicyNetworkFirewallPolicyRequest, dict]] = None, + *, + project: Optional[str] = None, + resource: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.Policy: + r"""Gets the access control policy for a resource. May be + empty if no such policy or resource exists. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_get_iam_policy(): + # Create a client + client = compute_v1.NetworkFirewallPoliciesClient() + + # Initialize request argument(s) + request = compute_v1.GetIamPolicyNetworkFirewallPolicyRequest( + project="project_value", + resource="resource_value", + ) + + # Make the request + response = client.get_iam_policy(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.GetIamPolicyNetworkFirewallPolicyRequest, dict]): + The request object. A request message for + NetworkFirewallPolicies.GetIamPolicy. + See the method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + resource (str): + Name or id of the resource for this + request. + + This corresponds to the ``resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.compute_v1.types.Policy: + An Identity and Access Management (IAM) policy, which + specifies access controls for Google Cloud resources. A + Policy is a collection of bindings. A binding binds one + or more members, or principals, to a single role. + Principals can be user accounts, service accounts, + Google groups, and domains (such as G Suite). A role is + a named list of permissions; each role can be an IAM + predefined role or a user-created custom role. For some + types of Google Cloud resources, a binding can also + specify a condition, which is a logical expression that + allows access to a resource only if the expression + evaluates to true. A condition can add constraints based + on attributes of the request, the resource, or both. To + learn which resources support conditions in their IAM + policies, see the [IAM + documentation](\ https://cloud.google.com/iam/help/conditions/resource-policies). + **JSON example:** { "bindings": [ { "role": + "roles/resourcemanager.organizationAdmin", "members": [ + "user:mike@example.com", "group:admins@example.com", + "domain:google.com", + "serviceAccount:my-project-id@appspot.gserviceaccount.com" + ] }, { "role": + "roles/resourcemanager.organizationViewer", "members": [ + "user:eve@example.com" ], "condition": { "title": + "expirable access", "description": "Does not grant + access after Sep 2020", "expression": "request.time < + timestamp('2020-10-01T00:00:00.000Z')", } } ], "etag": + "BwWWja0YfJA=", "version": 3 } **YAML example:** + bindings: - members: - user:\ mike@example.com - + group:\ admins@example.com - domain:google.com - + serviceAccount:\ my-project-id@appspot.gserviceaccount.com + role: roles/resourcemanager.organizationAdmin - members: + - user:\ eve@example.com role: + roles/resourcemanager.organizationViewer condition: + title: expirable access description: Does not grant + access after Sep 2020 expression: request.time < + timestamp('2020-10-01T00:00:00.000Z') etag: BwWWja0YfJA= + version: 3 For a description of IAM and its features, + see the [IAM + documentation](\ https://cloud.google.com/iam/docs/). + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.GetIamPolicyNetworkFirewallPolicyRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.GetIamPolicyNetworkFirewallPolicyRequest): + request = compute.GetIamPolicyNetworkFirewallPolicyRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if resource is not None: + request.resource = resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_iam_policy] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("resource", request.resource), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get_rule(self, + request: Optional[Union[compute.GetRuleNetworkFirewallPolicyRequest, dict]] = None, + *, + project: Optional[str] = None, + firewall_policy: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.FirewallPolicyRule: + r"""Gets a rule of the specified priority. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_get_rule(): + # Create a client + client = compute_v1.NetworkFirewallPoliciesClient() + + # Initialize request argument(s) + request = compute_v1.GetRuleNetworkFirewallPolicyRequest( + firewall_policy="firewall_policy_value", + project="project_value", + ) + + # Make the request + response = client.get_rule(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.GetRuleNetworkFirewallPolicyRequest, dict]): + The request object. A request message for + NetworkFirewallPolicies.GetRule. See the + method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + firewall_policy (str): + Name of the firewall policy to which + the queried rule belongs. + + This corresponds to the ``firewall_policy`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.compute_v1.types.FirewallPolicyRule: + Represents a rule that describes one + or more match conditions along with the + action to be taken when traffic matches + this condition (allow or deny). + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, firewall_policy]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.GetRuleNetworkFirewallPolicyRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.GetRuleNetworkFirewallPolicyRequest): + request = compute.GetRuleNetworkFirewallPolicyRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if firewall_policy is not None: + request.firewall_policy = firewall_policy + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_rule] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("firewall_policy", request.firewall_policy), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def insert_unary(self, + request: Optional[Union[compute.InsertNetworkFirewallPolicyRequest, dict]] = None, + *, + project: Optional[str] = None, + firewall_policy_resource: Optional[compute.FirewallPolicy] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.Operation: + r"""Creates a new policy in the specified project using + the data included in the request. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_insert(): + # Create a client + client = compute_v1.NetworkFirewallPoliciesClient() + + # Initialize request argument(s) + request = compute_v1.InsertNetworkFirewallPolicyRequest( + project="project_value", + ) + + # Make the request + response = client.insert(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.InsertNetworkFirewallPolicyRequest, dict]): + The request object. A request message for + NetworkFirewallPolicies.Insert. See the + method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + firewall_policy_resource (google.cloud.compute_v1.types.FirewallPolicy): + The body resource for this request + This corresponds to the ``firewall_policy_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, firewall_policy_resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.InsertNetworkFirewallPolicyRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.InsertNetworkFirewallPolicyRequest): + request = compute.InsertNetworkFirewallPolicyRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if firewall_policy_resource is not None: + request.firewall_policy_resource = firewall_policy_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.insert] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def insert(self, + request: Optional[Union[compute.InsertNetworkFirewallPolicyRequest, dict]] = None, + *, + project: Optional[str] = None, + firewall_policy_resource: Optional[compute.FirewallPolicy] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> extended_operation.ExtendedOperation: + r"""Creates a new policy in the specified project using + the data included in the request. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_insert(): + # Create a client + client = compute_v1.NetworkFirewallPoliciesClient() + + # Initialize request argument(s) + request = compute_v1.InsertNetworkFirewallPolicyRequest( + project="project_value", + ) + + # Make the request + response = client.insert(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.InsertNetworkFirewallPolicyRequest, dict]): + The request object. A request message for + NetworkFirewallPolicies.Insert. See the + method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + firewall_policy_resource (google.cloud.compute_v1.types.FirewallPolicy): + The body resource for this request + This corresponds to the ``firewall_policy_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, firewall_policy_resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.InsertNetworkFirewallPolicyRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.InsertNetworkFirewallPolicyRequest): + request = compute.InsertNetworkFirewallPolicyRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if firewall_policy_resource is not None: + request.firewall_policy_resource = firewall_policy_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.insert] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + operation_service = self._transport._global_operations_client + operation_request = compute.GetGlobalOperationRequest() + operation_request.project = request.project + operation_request.operation = response.name + + get_operation = functools.partial(operation_service.get, operation_request) + # Cancel is not part of extended operations yet. + cancel_operation = lambda: None + + # Note: this class is an implementation detail to provide a uniform + # set of names for certain fields in the extended operation proto message. + # See google.api_core.extended_operation.ExtendedOperation for details + # on these properties and the expected interface. + class _CustomOperation(extended_operation.ExtendedOperation): + @property + def error_message(self): + return self._extended_operation.http_error_message + + @property + def error_code(self): + return self._extended_operation.http_error_status_code + + response = _CustomOperation.make(get_operation, cancel_operation, response) + + # Done; return the response. + return response + + def list(self, + request: Optional[Union[compute.ListNetworkFirewallPoliciesRequest, dict]] = None, + *, + project: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListPager: + r"""Lists all the policies that have been configured for + the specified project. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_list(): + # Create a client + client = compute_v1.NetworkFirewallPoliciesClient() + + # Initialize request argument(s) + request = compute_v1.ListNetworkFirewallPoliciesRequest( + project="project_value", + ) + + # Make the request + page_result = client.list(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.ListNetworkFirewallPoliciesRequest, dict]): + The request object. A request message for + NetworkFirewallPolicies.List. See the + method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.compute_v1.services.network_firewall_policies.pagers.ListPager: + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.ListNetworkFirewallPoliciesRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.ListNetworkFirewallPoliciesRequest): + request = compute.ListNetworkFirewallPoliciesRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + def patch_unary(self, + request: Optional[Union[compute.PatchNetworkFirewallPolicyRequest, dict]] = None, + *, + project: Optional[str] = None, + firewall_policy: Optional[str] = None, + firewall_policy_resource: Optional[compute.FirewallPolicy] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.Operation: + r"""Patches the specified policy with the data included + in the request. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_patch(): + # Create a client + client = compute_v1.NetworkFirewallPoliciesClient() + + # Initialize request argument(s) + request = compute_v1.PatchNetworkFirewallPolicyRequest( + firewall_policy="firewall_policy_value", + project="project_value", + ) + + # Make the request + response = client.patch(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.PatchNetworkFirewallPolicyRequest, dict]): + The request object. A request message for + NetworkFirewallPolicies.Patch. See the + method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + firewall_policy (str): + Name of the firewall policy to + update. + + This corresponds to the ``firewall_policy`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + firewall_policy_resource (google.cloud.compute_v1.types.FirewallPolicy): + The body resource for this request + This corresponds to the ``firewall_policy_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, firewall_policy, firewall_policy_resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.PatchNetworkFirewallPolicyRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.PatchNetworkFirewallPolicyRequest): + request = compute.PatchNetworkFirewallPolicyRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if firewall_policy is not None: + request.firewall_policy = firewall_policy + if firewall_policy_resource is not None: + request.firewall_policy_resource = firewall_policy_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.patch] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("firewall_policy", request.firewall_policy), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def patch(self, + request: Optional[Union[compute.PatchNetworkFirewallPolicyRequest, dict]] = None, + *, + project: Optional[str] = None, + firewall_policy: Optional[str] = None, + firewall_policy_resource: Optional[compute.FirewallPolicy] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> extended_operation.ExtendedOperation: + r"""Patches the specified policy with the data included + in the request. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_patch(): + # Create a client + client = compute_v1.NetworkFirewallPoliciesClient() + + # Initialize request argument(s) + request = compute_v1.PatchNetworkFirewallPolicyRequest( + firewall_policy="firewall_policy_value", + project="project_value", + ) + + # Make the request + response = client.patch(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.PatchNetworkFirewallPolicyRequest, dict]): + The request object. A request message for + NetworkFirewallPolicies.Patch. See the + method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + firewall_policy (str): + Name of the firewall policy to + update. + + This corresponds to the ``firewall_policy`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + firewall_policy_resource (google.cloud.compute_v1.types.FirewallPolicy): + The body resource for this request + This corresponds to the ``firewall_policy_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, firewall_policy, firewall_policy_resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.PatchNetworkFirewallPolicyRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.PatchNetworkFirewallPolicyRequest): + request = compute.PatchNetworkFirewallPolicyRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if firewall_policy is not None: + request.firewall_policy = firewall_policy + if firewall_policy_resource is not None: + request.firewall_policy_resource = firewall_policy_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.patch] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("firewall_policy", request.firewall_policy), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + operation_service = self._transport._global_operations_client + operation_request = compute.GetGlobalOperationRequest() + operation_request.project = request.project + operation_request.operation = response.name + + get_operation = functools.partial(operation_service.get, operation_request) + # Cancel is not part of extended operations yet. + cancel_operation = lambda: None + + # Note: this class is an implementation detail to provide a uniform + # set of names for certain fields in the extended operation proto message. + # See google.api_core.extended_operation.ExtendedOperation for details + # on these properties and the expected interface. + class _CustomOperation(extended_operation.ExtendedOperation): + @property + def error_message(self): + return self._extended_operation.http_error_message + + @property + def error_code(self): + return self._extended_operation.http_error_status_code + + response = _CustomOperation.make(get_operation, cancel_operation, response) + + # Done; return the response. + return response + + def patch_rule_unary(self, + request: Optional[Union[compute.PatchRuleNetworkFirewallPolicyRequest, dict]] = None, + *, + project: Optional[str] = None, + firewall_policy: Optional[str] = None, + firewall_policy_rule_resource: Optional[compute.FirewallPolicyRule] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.Operation: + r"""Patches a rule of the specified priority. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_patch_rule(): + # Create a client + client = compute_v1.NetworkFirewallPoliciesClient() + + # Initialize request argument(s) + request = compute_v1.PatchRuleNetworkFirewallPolicyRequest( + firewall_policy="firewall_policy_value", + project="project_value", + ) + + # Make the request + response = client.patch_rule(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.PatchRuleNetworkFirewallPolicyRequest, dict]): + The request object. A request message for + NetworkFirewallPolicies.PatchRule. See + the method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + firewall_policy (str): + Name of the firewall policy to + update. + + This corresponds to the ``firewall_policy`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + firewall_policy_rule_resource (google.cloud.compute_v1.types.FirewallPolicyRule): + The body resource for this request + This corresponds to the ``firewall_policy_rule_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, firewall_policy, firewall_policy_rule_resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.PatchRuleNetworkFirewallPolicyRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.PatchRuleNetworkFirewallPolicyRequest): + request = compute.PatchRuleNetworkFirewallPolicyRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if firewall_policy is not None: + request.firewall_policy = firewall_policy + if firewall_policy_rule_resource is not None: + request.firewall_policy_rule_resource = firewall_policy_rule_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.patch_rule] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("firewall_policy", request.firewall_policy), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def patch_rule(self, + request: Optional[Union[compute.PatchRuleNetworkFirewallPolicyRequest, dict]] = None, + *, + project: Optional[str] = None, + firewall_policy: Optional[str] = None, + firewall_policy_rule_resource: Optional[compute.FirewallPolicyRule] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> extended_operation.ExtendedOperation: + r"""Patches a rule of the specified priority. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_patch_rule(): + # Create a client + client = compute_v1.NetworkFirewallPoliciesClient() + + # Initialize request argument(s) + request = compute_v1.PatchRuleNetworkFirewallPolicyRequest( + firewall_policy="firewall_policy_value", + project="project_value", + ) + + # Make the request + response = client.patch_rule(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.PatchRuleNetworkFirewallPolicyRequest, dict]): + The request object. A request message for + NetworkFirewallPolicies.PatchRule. See + the method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + firewall_policy (str): + Name of the firewall policy to + update. + + This corresponds to the ``firewall_policy`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + firewall_policy_rule_resource (google.cloud.compute_v1.types.FirewallPolicyRule): + The body resource for this request + This corresponds to the ``firewall_policy_rule_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, firewall_policy, firewall_policy_rule_resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.PatchRuleNetworkFirewallPolicyRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.PatchRuleNetworkFirewallPolicyRequest): + request = compute.PatchRuleNetworkFirewallPolicyRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if firewall_policy is not None: + request.firewall_policy = firewall_policy + if firewall_policy_rule_resource is not None: + request.firewall_policy_rule_resource = firewall_policy_rule_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.patch_rule] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("firewall_policy", request.firewall_policy), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + operation_service = self._transport._global_operations_client + operation_request = compute.GetGlobalOperationRequest() + operation_request.project = request.project + operation_request.operation = response.name + + get_operation = functools.partial(operation_service.get, operation_request) + # Cancel is not part of extended operations yet. + cancel_operation = lambda: None + + # Note: this class is an implementation detail to provide a uniform + # set of names for certain fields in the extended operation proto message. + # See google.api_core.extended_operation.ExtendedOperation for details + # on these properties and the expected interface. + class _CustomOperation(extended_operation.ExtendedOperation): + @property + def error_message(self): + return self._extended_operation.http_error_message + + @property + def error_code(self): + return self._extended_operation.http_error_status_code + + response = _CustomOperation.make(get_operation, cancel_operation, response) + + # Done; return the response. + return response + + def remove_association_unary(self, + request: Optional[Union[compute.RemoveAssociationNetworkFirewallPolicyRequest, dict]] = None, + *, + project: Optional[str] = None, + firewall_policy: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.Operation: + r"""Removes an association for the specified firewall + policy. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_remove_association(): + # Create a client + client = compute_v1.NetworkFirewallPoliciesClient() + + # Initialize request argument(s) + request = compute_v1.RemoveAssociationNetworkFirewallPolicyRequest( + firewall_policy="firewall_policy_value", + project="project_value", + ) + + # Make the request + response = client.remove_association(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.RemoveAssociationNetworkFirewallPolicyRequest, dict]): + The request object. A request message for + NetworkFirewallPolicies.RemoveAssociation. + See the method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + firewall_policy (str): + Name of the firewall policy to + update. + + This corresponds to the ``firewall_policy`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, firewall_policy]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.RemoveAssociationNetworkFirewallPolicyRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.RemoveAssociationNetworkFirewallPolicyRequest): + request = compute.RemoveAssociationNetworkFirewallPolicyRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if firewall_policy is not None: + request.firewall_policy = firewall_policy + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.remove_association] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("firewall_policy", request.firewall_policy), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def remove_association(self, + request: Optional[Union[compute.RemoveAssociationNetworkFirewallPolicyRequest, dict]] = None, + *, + project: Optional[str] = None, + firewall_policy: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> extended_operation.ExtendedOperation: + r"""Removes an association for the specified firewall + policy. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_remove_association(): + # Create a client + client = compute_v1.NetworkFirewallPoliciesClient() + + # Initialize request argument(s) + request = compute_v1.RemoveAssociationNetworkFirewallPolicyRequest( + firewall_policy="firewall_policy_value", + project="project_value", + ) + + # Make the request + response = client.remove_association(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.RemoveAssociationNetworkFirewallPolicyRequest, dict]): + The request object. A request message for + NetworkFirewallPolicies.RemoveAssociation. + See the method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + firewall_policy (str): + Name of the firewall policy to + update. + + This corresponds to the ``firewall_policy`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, firewall_policy]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.RemoveAssociationNetworkFirewallPolicyRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.RemoveAssociationNetworkFirewallPolicyRequest): + request = compute.RemoveAssociationNetworkFirewallPolicyRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if firewall_policy is not None: + request.firewall_policy = firewall_policy + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.remove_association] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("firewall_policy", request.firewall_policy), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + operation_service = self._transport._global_operations_client + operation_request = compute.GetGlobalOperationRequest() + operation_request.project = request.project + operation_request.operation = response.name + + get_operation = functools.partial(operation_service.get, operation_request) + # Cancel is not part of extended operations yet. + cancel_operation = lambda: None + + # Note: this class is an implementation detail to provide a uniform + # set of names for certain fields in the extended operation proto message. + # See google.api_core.extended_operation.ExtendedOperation for details + # on these properties and the expected interface. + class _CustomOperation(extended_operation.ExtendedOperation): + @property + def error_message(self): + return self._extended_operation.http_error_message + + @property + def error_code(self): + return self._extended_operation.http_error_status_code + + response = _CustomOperation.make(get_operation, cancel_operation, response) + + # Done; return the response. + return response + + def remove_rule_unary(self, + request: Optional[Union[compute.RemoveRuleNetworkFirewallPolicyRequest, dict]] = None, + *, + project: Optional[str] = None, + firewall_policy: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.Operation: + r"""Deletes a rule of the specified priority. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_remove_rule(): + # Create a client + client = compute_v1.NetworkFirewallPoliciesClient() + + # Initialize request argument(s) + request = compute_v1.RemoveRuleNetworkFirewallPolicyRequest( + firewall_policy="firewall_policy_value", + project="project_value", + ) + + # Make the request + response = client.remove_rule(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.RemoveRuleNetworkFirewallPolicyRequest, dict]): + The request object. A request message for + NetworkFirewallPolicies.RemoveRule. See + the method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + firewall_policy (str): + Name of the firewall policy to + update. + + This corresponds to the ``firewall_policy`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, firewall_policy]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.RemoveRuleNetworkFirewallPolicyRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.RemoveRuleNetworkFirewallPolicyRequest): + request = compute.RemoveRuleNetworkFirewallPolicyRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if firewall_policy is not None: + request.firewall_policy = firewall_policy + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.remove_rule] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("firewall_policy", request.firewall_policy), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def remove_rule(self, + request: Optional[Union[compute.RemoveRuleNetworkFirewallPolicyRequest, dict]] = None, + *, + project: Optional[str] = None, + firewall_policy: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> extended_operation.ExtendedOperation: + r"""Deletes a rule of the specified priority. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_remove_rule(): + # Create a client + client = compute_v1.NetworkFirewallPoliciesClient() + + # Initialize request argument(s) + request = compute_v1.RemoveRuleNetworkFirewallPolicyRequest( + firewall_policy="firewall_policy_value", + project="project_value", + ) + + # Make the request + response = client.remove_rule(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.RemoveRuleNetworkFirewallPolicyRequest, dict]): + The request object. A request message for + NetworkFirewallPolicies.RemoveRule. See + the method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + firewall_policy (str): + Name of the firewall policy to + update. + + This corresponds to the ``firewall_policy`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, firewall_policy]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.RemoveRuleNetworkFirewallPolicyRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.RemoveRuleNetworkFirewallPolicyRequest): + request = compute.RemoveRuleNetworkFirewallPolicyRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if firewall_policy is not None: + request.firewall_policy = firewall_policy + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.remove_rule] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("firewall_policy", request.firewall_policy), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + operation_service = self._transport._global_operations_client + operation_request = compute.GetGlobalOperationRequest() + operation_request.project = request.project + operation_request.operation = response.name + + get_operation = functools.partial(operation_service.get, operation_request) + # Cancel is not part of extended operations yet. + cancel_operation = lambda: None + + # Note: this class is an implementation detail to provide a uniform + # set of names for certain fields in the extended operation proto message. + # See google.api_core.extended_operation.ExtendedOperation for details + # on these properties and the expected interface. + class _CustomOperation(extended_operation.ExtendedOperation): + @property + def error_message(self): + return self._extended_operation.http_error_message + + @property + def error_code(self): + return self._extended_operation.http_error_status_code + + response = _CustomOperation.make(get_operation, cancel_operation, response) + + # Done; return the response. + return response + + def set_iam_policy(self, + request: Optional[Union[compute.SetIamPolicyNetworkFirewallPolicyRequest, dict]] = None, + *, + project: Optional[str] = None, + resource: Optional[str] = None, + global_set_policy_request_resource: Optional[compute.GlobalSetPolicyRequest] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.Policy: + r"""Sets the access control policy on the specified + resource. Replaces any existing policy. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_set_iam_policy(): + # Create a client + client = compute_v1.NetworkFirewallPoliciesClient() + + # Initialize request argument(s) + request = compute_v1.SetIamPolicyNetworkFirewallPolicyRequest( + project="project_value", + resource="resource_value", + ) + + # Make the request + response = client.set_iam_policy(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.SetIamPolicyNetworkFirewallPolicyRequest, dict]): + The request object. A request message for + NetworkFirewallPolicies.SetIamPolicy. + See the method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + resource (str): + Name or id of the resource for this + request. + + This corresponds to the ``resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + global_set_policy_request_resource (google.cloud.compute_v1.types.GlobalSetPolicyRequest): + The body resource for this request + This corresponds to the ``global_set_policy_request_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.compute_v1.types.Policy: + An Identity and Access Management (IAM) policy, which + specifies access controls for Google Cloud resources. A + Policy is a collection of bindings. A binding binds one + or more members, or principals, to a single role. + Principals can be user accounts, service accounts, + Google groups, and domains (such as G Suite). A role is + a named list of permissions; each role can be an IAM + predefined role or a user-created custom role. For some + types of Google Cloud resources, a binding can also + specify a condition, which is a logical expression that + allows access to a resource only if the expression + evaluates to true. A condition can add constraints based + on attributes of the request, the resource, or both. To + learn which resources support conditions in their IAM + policies, see the [IAM + documentation](\ https://cloud.google.com/iam/help/conditions/resource-policies). + **JSON example:** { "bindings": [ { "role": + "roles/resourcemanager.organizationAdmin", "members": [ + "user:mike@example.com", "group:admins@example.com", + "domain:google.com", + "serviceAccount:my-project-id@appspot.gserviceaccount.com" + ] }, { "role": + "roles/resourcemanager.organizationViewer", "members": [ + "user:eve@example.com" ], "condition": { "title": + "expirable access", "description": "Does not grant + access after Sep 2020", "expression": "request.time < + timestamp('2020-10-01T00:00:00.000Z')", } } ], "etag": + "BwWWja0YfJA=", "version": 3 } **YAML example:** + bindings: - members: - user:\ mike@example.com - + group:\ admins@example.com - domain:google.com - + serviceAccount:\ my-project-id@appspot.gserviceaccount.com + role: roles/resourcemanager.organizationAdmin - members: + - user:\ eve@example.com role: + roles/resourcemanager.organizationViewer condition: + title: expirable access description: Does not grant + access after Sep 2020 expression: request.time < + timestamp('2020-10-01T00:00:00.000Z') etag: BwWWja0YfJA= + version: 3 For a description of IAM and its features, + see the [IAM + documentation](\ https://cloud.google.com/iam/docs/). + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, resource, global_set_policy_request_resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.SetIamPolicyNetworkFirewallPolicyRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.SetIamPolicyNetworkFirewallPolicyRequest): + request = compute.SetIamPolicyNetworkFirewallPolicyRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if resource is not None: + request.resource = resource + if global_set_policy_request_resource is not None: + request.global_set_policy_request_resource = global_set_policy_request_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.set_iam_policy] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("resource", request.resource), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def test_iam_permissions(self, + request: Optional[Union[compute.TestIamPermissionsNetworkFirewallPolicyRequest, dict]] = None, + *, + project: Optional[str] = None, + resource: Optional[str] = None, + test_permissions_request_resource: Optional[compute.TestPermissionsRequest] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.TestPermissionsResponse: + r"""Returns permissions that a caller has on the + specified resource. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_test_iam_permissions(): + # Create a client + client = compute_v1.NetworkFirewallPoliciesClient() + + # Initialize request argument(s) + request = compute_v1.TestIamPermissionsNetworkFirewallPolicyRequest( + project="project_value", + resource="resource_value", + ) + + # Make the request + response = client.test_iam_permissions(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.TestIamPermissionsNetworkFirewallPolicyRequest, dict]): + The request object. A request message for + NetworkFirewallPolicies.TestIamPermissions. + See the method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + resource (str): + Name or id of the resource for this + request. + + This corresponds to the ``resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + test_permissions_request_resource (google.cloud.compute_v1.types.TestPermissionsRequest): + The body resource for this request + This corresponds to the ``test_permissions_request_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.compute_v1.types.TestPermissionsResponse: + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, resource, test_permissions_request_resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.TestIamPermissionsNetworkFirewallPolicyRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.TestIamPermissionsNetworkFirewallPolicyRequest): + request = compute.TestIamPermissionsNetworkFirewallPolicyRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if resource is not None: + request.resource = resource + if test_permissions_request_resource is not None: + request.test_permissions_request_resource = test_permissions_request_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.test_iam_permissions] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("resource", request.resource), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def __enter__(self) -> "NetworkFirewallPoliciesClient": + return self + + def __exit__(self, type, value, traceback): + """Releases underlying transport's resources. + + .. warning:: + ONLY use as a context manager if the transport is NOT shared + with other clients! Exiting the with block will CLOSE the transport + and may cause errors in other clients! + """ + self.transport.close() + + + + + + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) + + +__all__ = ( + "NetworkFirewallPoliciesClient", +) diff --git a/owl-bot-staging/v1/google/cloud/compute_v1/services/network_firewall_policies/pagers.py b/owl-bot-staging/v1/google/cloud/compute_v1/services/network_firewall_policies/pagers.py new file mode 100644 index 000000000..faf3fbb98 --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/compute_v1/services/network_firewall_policies/pagers.py @@ -0,0 +1,77 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import Any, AsyncIterator, Awaitable, Callable, Sequence, Tuple, Optional, Iterator + +from google.cloud.compute_v1.types import compute + + +class ListPager: + """A pager for iterating through ``list`` requests. + + This class thinly wraps an initial + :class:`google.cloud.compute_v1.types.FirewallPolicyList` object, and + provides an ``__iter__`` method to iterate through its + ``items`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``List`` requests and continue to iterate + through the ``items`` field on the + corresponding responses. + + All the usual :class:`google.cloud.compute_v1.types.FirewallPolicyList` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., compute.FirewallPolicyList], + request: compute.ListNetworkFirewallPoliciesRequest, + response: compute.FirewallPolicyList, + *, + metadata: Sequence[Tuple[str, str]] = ()): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.compute_v1.types.ListNetworkFirewallPoliciesRequest): + The initial request object. + response (google.cloud.compute_v1.types.FirewallPolicyList): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = compute.ListNetworkFirewallPoliciesRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[compute.FirewallPolicyList]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[compute.FirewallPolicy]: + for page in self.pages: + yield from page.items + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) diff --git a/owl-bot-staging/v1/google/cloud/compute_v1/services/network_firewall_policies/transports/__init__.py b/owl-bot-staging/v1/google/cloud/compute_v1/services/network_firewall_policies/transports/__init__.py new file mode 100644 index 000000000..195bf756e --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/compute_v1/services/network_firewall_policies/transports/__init__.py @@ -0,0 +1,32 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +from typing import Dict, Type + +from .base import NetworkFirewallPoliciesTransport +from .rest import NetworkFirewallPoliciesRestTransport +from .rest import NetworkFirewallPoliciesRestInterceptor + + +# Compile a registry of transports. +_transport_registry = OrderedDict() # type: Dict[str, Type[NetworkFirewallPoliciesTransport]] +_transport_registry['rest'] = NetworkFirewallPoliciesRestTransport + +__all__ = ( + 'NetworkFirewallPoliciesTransport', + 'NetworkFirewallPoliciesRestTransport', + 'NetworkFirewallPoliciesRestInterceptor', +) diff --git a/owl-bot-staging/v1/google/cloud/compute_v1/services/network_firewall_policies/transports/base.py b/owl-bot-staging/v1/google/cloud/compute_v1/services/network_firewall_policies/transports/base.py new file mode 100644 index 000000000..8b46ac7ea --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/compute_v1/services/network_firewall_policies/transports/base.py @@ -0,0 +1,373 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import abc +from typing import Awaitable, Callable, Dict, Optional, Sequence, Union + +from google.cloud.compute_v1 import gapic_version as package_version + +import google.auth # type: ignore +import google.api_core +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.compute_v1.types import compute +from google.cloud.compute_v1.services import global_operations + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) + + +class NetworkFirewallPoliciesTransport(abc.ABC): + """Abstract transport class for NetworkFirewallPolicies.""" + + AUTH_SCOPES = ( + 'https://www.googleapis.com/auth/compute', + 'https://www.googleapis.com/auth/cloud-platform', + ) + + DEFAULT_HOST: str = 'compute.googleapis.com' + def __init__( + self, *, + host: str = DEFAULT_HOST, + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + **kwargs, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A list of scopes. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + """ + self._extended_operations_services: Dict[str, Any] = {} + + scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} + + # Save the scopes. + self._scopes = scopes + + # If no credentials are provided, then determine the appropriate + # defaults. + if credentials and credentials_file: + raise core_exceptions.DuplicateCredentialArgs("'credentials_file' and 'credentials' are mutually exclusive") + + if credentials_file is not None: + credentials, _ = google.auth.load_credentials_from_file( + credentials_file, + **scopes_kwargs, + quota_project_id=quota_project_id + ) + elif credentials is None: + credentials, _ = google.auth.default(**scopes_kwargs, quota_project_id=quota_project_id) + # Don't apply audience if the credentials file passed from user. + if hasattr(credentials, "with_gdch_audience"): + credentials = credentials.with_gdch_audience(api_audience if api_audience else host) + + # If the credentials are service account credentials, then always try to use self signed JWT. + if always_use_jwt_access and isinstance(credentials, service_account.Credentials) and hasattr(service_account.Credentials, "with_always_use_jwt_access"): + credentials = credentials.with_always_use_jwt_access(True) + + # Save the credentials. + self._credentials = credentials + + # Save the hostname. Default to port 443 (HTTPS) if none is specified. + if ':' not in host: + host += ':443' + self._host = host + + def _prep_wrapped_messages(self, client_info): + # Precompute the wrapped methods. + self._wrapped_methods = { + self.add_association: gapic_v1.method.wrap_method( + self.add_association, + default_timeout=None, + client_info=client_info, + ), + self.add_rule: gapic_v1.method.wrap_method( + self.add_rule, + default_timeout=None, + client_info=client_info, + ), + self.clone_rules: gapic_v1.method.wrap_method( + self.clone_rules, + default_timeout=None, + client_info=client_info, + ), + self.delete: gapic_v1.method.wrap_method( + self.delete, + default_timeout=None, + client_info=client_info, + ), + self.get: gapic_v1.method.wrap_method( + self.get, + default_timeout=None, + client_info=client_info, + ), + self.get_association: gapic_v1.method.wrap_method( + self.get_association, + default_timeout=None, + client_info=client_info, + ), + self.get_iam_policy: gapic_v1.method.wrap_method( + self.get_iam_policy, + default_timeout=None, + client_info=client_info, + ), + self.get_rule: gapic_v1.method.wrap_method( + self.get_rule, + default_timeout=None, + client_info=client_info, + ), + self.insert: gapic_v1.method.wrap_method( + self.insert, + default_timeout=None, + client_info=client_info, + ), + self.list: gapic_v1.method.wrap_method( + self.list, + default_timeout=None, + client_info=client_info, + ), + self.patch: gapic_v1.method.wrap_method( + self.patch, + default_timeout=None, + client_info=client_info, + ), + self.patch_rule: gapic_v1.method.wrap_method( + self.patch_rule, + default_timeout=None, + client_info=client_info, + ), + self.remove_association: gapic_v1.method.wrap_method( + self.remove_association, + default_timeout=None, + client_info=client_info, + ), + self.remove_rule: gapic_v1.method.wrap_method( + self.remove_rule, + default_timeout=None, + client_info=client_info, + ), + self.set_iam_policy: gapic_v1.method.wrap_method( + self.set_iam_policy, + default_timeout=None, + client_info=client_info, + ), + self.test_iam_permissions: gapic_v1.method.wrap_method( + self.test_iam_permissions, + default_timeout=None, + client_info=client_info, + ), + } + + def close(self): + """Closes resources associated with the transport. + + .. warning:: + Only call this method if the transport is NOT shared + with other clients - this may cause errors in other clients! + """ + raise NotImplementedError() + + @property + def add_association(self) -> Callable[ + [compute.AddAssociationNetworkFirewallPolicyRequest], + Union[ + compute.Operation, + Awaitable[compute.Operation] + ]]: + raise NotImplementedError() + + @property + def add_rule(self) -> Callable[ + [compute.AddRuleNetworkFirewallPolicyRequest], + Union[ + compute.Operation, + Awaitable[compute.Operation] + ]]: + raise NotImplementedError() + + @property + def clone_rules(self) -> Callable[ + [compute.CloneRulesNetworkFirewallPolicyRequest], + Union[ + compute.Operation, + Awaitable[compute.Operation] + ]]: + raise NotImplementedError() + + @property + def delete(self) -> Callable[ + [compute.DeleteNetworkFirewallPolicyRequest], + Union[ + compute.Operation, + Awaitable[compute.Operation] + ]]: + raise NotImplementedError() + + @property + def get(self) -> Callable[ + [compute.GetNetworkFirewallPolicyRequest], + Union[ + compute.FirewallPolicy, + Awaitable[compute.FirewallPolicy] + ]]: + raise NotImplementedError() + + @property + def get_association(self) -> Callable[ + [compute.GetAssociationNetworkFirewallPolicyRequest], + Union[ + compute.FirewallPolicyAssociation, + Awaitable[compute.FirewallPolicyAssociation] + ]]: + raise NotImplementedError() + + @property + def get_iam_policy(self) -> Callable[ + [compute.GetIamPolicyNetworkFirewallPolicyRequest], + Union[ + compute.Policy, + Awaitable[compute.Policy] + ]]: + raise NotImplementedError() + + @property + def get_rule(self) -> Callable[ + [compute.GetRuleNetworkFirewallPolicyRequest], + Union[ + compute.FirewallPolicyRule, + Awaitable[compute.FirewallPolicyRule] + ]]: + raise NotImplementedError() + + @property + def insert(self) -> Callable[ + [compute.InsertNetworkFirewallPolicyRequest], + Union[ + compute.Operation, + Awaitable[compute.Operation] + ]]: + raise NotImplementedError() + + @property + def list(self) -> Callable[ + [compute.ListNetworkFirewallPoliciesRequest], + Union[ + compute.FirewallPolicyList, + Awaitable[compute.FirewallPolicyList] + ]]: + raise NotImplementedError() + + @property + def patch(self) -> Callable[ + [compute.PatchNetworkFirewallPolicyRequest], + Union[ + compute.Operation, + Awaitable[compute.Operation] + ]]: + raise NotImplementedError() + + @property + def patch_rule(self) -> Callable[ + [compute.PatchRuleNetworkFirewallPolicyRequest], + Union[ + compute.Operation, + Awaitable[compute.Operation] + ]]: + raise NotImplementedError() + + @property + def remove_association(self) -> Callable[ + [compute.RemoveAssociationNetworkFirewallPolicyRequest], + Union[ + compute.Operation, + Awaitable[compute.Operation] + ]]: + raise NotImplementedError() + + @property + def remove_rule(self) -> Callable[ + [compute.RemoveRuleNetworkFirewallPolicyRequest], + Union[ + compute.Operation, + Awaitable[compute.Operation] + ]]: + raise NotImplementedError() + + @property + def set_iam_policy(self) -> Callable[ + [compute.SetIamPolicyNetworkFirewallPolicyRequest], + Union[ + compute.Policy, + Awaitable[compute.Policy] + ]]: + raise NotImplementedError() + + @property + def test_iam_permissions(self) -> Callable[ + [compute.TestIamPermissionsNetworkFirewallPolicyRequest], + Union[ + compute.TestPermissionsResponse, + Awaitable[compute.TestPermissionsResponse] + ]]: + raise NotImplementedError() + + @property + def kind(self) -> str: + raise NotImplementedError() + + @property + def _global_operations_client(self) -> global_operations.GlobalOperationsClient: + ex_op_service = self._extended_operations_services.get("global_operations") + if not ex_op_service: + ex_op_service = global_operations.GlobalOperationsClient( + credentials=self._credentials, + transport=self.kind, + ) + self._extended_operations_services["global_operations"] = ex_op_service + + return ex_op_service + + +__all__ = ( + 'NetworkFirewallPoliciesTransport', +) diff --git a/owl-bot-staging/v1/google/cloud/compute_v1/services/network_firewall_policies/transports/rest.py b/owl-bot-staging/v1/google/cloud/compute_v1/services/network_firewall_policies/transports/rest.py new file mode 100644 index 000000000..418be872d --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/compute_v1/services/network_firewall_policies/transports/rest.py @@ -0,0 +1,2231 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +from google.auth.transport.requests import AuthorizedSession # type: ignore +import json # type: ignore +import grpc # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.api_core import exceptions as core_exceptions +from google.api_core import retry as retries +from google.api_core import rest_helpers +from google.api_core import rest_streaming +from google.api_core import path_template +from google.api_core import gapic_v1 + +from google.protobuf import json_format +from requests import __version__ as requests_version +import dataclasses +import re +from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union +import warnings + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + + +from google.cloud.compute_v1.types import compute + +from .base import NetworkFirewallPoliciesTransport, DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, + grpc_version=None, + rest_version=requests_version, +) + + +class NetworkFirewallPoliciesRestInterceptor: + """Interceptor for NetworkFirewallPolicies. + + Interceptors are used to manipulate requests, request metadata, and responses + in arbitrary ways. + Example use cases include: + * Logging + * Verifying requests according to service or custom semantics + * Stripping extraneous information from responses + + These use cases and more can be enabled by injecting an + instance of a custom subclass when constructing the NetworkFirewallPoliciesRestTransport. + + .. code-block:: python + class MyCustomNetworkFirewallPoliciesInterceptor(NetworkFirewallPoliciesRestInterceptor): + def pre_add_association(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_add_association(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_add_rule(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_add_rule(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_clone_rules(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_clone_rules(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_delete(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_delete(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_get(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_get_association(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_association(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_get_iam_policy(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_iam_policy(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_get_rule(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_rule(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_insert(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_insert(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_patch(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_patch(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_patch_rule(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_patch_rule(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_remove_association(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_remove_association(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_remove_rule(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_remove_rule(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_set_iam_policy(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_set_iam_policy(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_test_iam_permissions(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_test_iam_permissions(self, response): + logging.log(f"Received response: {response}") + return response + + transport = NetworkFirewallPoliciesRestTransport(interceptor=MyCustomNetworkFirewallPoliciesInterceptor()) + client = NetworkFirewallPoliciesClient(transport=transport) + + + """ + def pre_add_association(self, request: compute.AddAssociationNetworkFirewallPolicyRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.AddAssociationNetworkFirewallPolicyRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for add_association + + Override in a subclass to manipulate the request or metadata + before they are sent to the NetworkFirewallPolicies server. + """ + return request, metadata + + def post_add_association(self, response: compute.Operation) -> compute.Operation: + """Post-rpc interceptor for add_association + + Override in a subclass to manipulate the response + after it is returned by the NetworkFirewallPolicies server but before + it is returned to user code. + """ + return response + def pre_add_rule(self, request: compute.AddRuleNetworkFirewallPolicyRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.AddRuleNetworkFirewallPolicyRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for add_rule + + Override in a subclass to manipulate the request or metadata + before they are sent to the NetworkFirewallPolicies server. + """ + return request, metadata + + def post_add_rule(self, response: compute.Operation) -> compute.Operation: + """Post-rpc interceptor for add_rule + + Override in a subclass to manipulate the response + after it is returned by the NetworkFirewallPolicies server but before + it is returned to user code. + """ + return response + def pre_clone_rules(self, request: compute.CloneRulesNetworkFirewallPolicyRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.CloneRulesNetworkFirewallPolicyRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for clone_rules + + Override in a subclass to manipulate the request or metadata + before they are sent to the NetworkFirewallPolicies server. + """ + return request, metadata + + def post_clone_rules(self, response: compute.Operation) -> compute.Operation: + """Post-rpc interceptor for clone_rules + + Override in a subclass to manipulate the response + after it is returned by the NetworkFirewallPolicies server but before + it is returned to user code. + """ + return response + def pre_delete(self, request: compute.DeleteNetworkFirewallPolicyRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.DeleteNetworkFirewallPolicyRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for delete + + Override in a subclass to manipulate the request or metadata + before they are sent to the NetworkFirewallPolicies server. + """ + return request, metadata + + def post_delete(self, response: compute.Operation) -> compute.Operation: + """Post-rpc interceptor for delete + + Override in a subclass to manipulate the response + after it is returned by the NetworkFirewallPolicies server but before + it is returned to user code. + """ + return response + def pre_get(self, request: compute.GetNetworkFirewallPolicyRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.GetNetworkFirewallPolicyRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get + + Override in a subclass to manipulate the request or metadata + before they are sent to the NetworkFirewallPolicies server. + """ + return request, metadata + + def post_get(self, response: compute.FirewallPolicy) -> compute.FirewallPolicy: + """Post-rpc interceptor for get + + Override in a subclass to manipulate the response + after it is returned by the NetworkFirewallPolicies server but before + it is returned to user code. + """ + return response + def pre_get_association(self, request: compute.GetAssociationNetworkFirewallPolicyRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.GetAssociationNetworkFirewallPolicyRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_association + + Override in a subclass to manipulate the request or metadata + before they are sent to the NetworkFirewallPolicies server. + """ + return request, metadata + + def post_get_association(self, response: compute.FirewallPolicyAssociation) -> compute.FirewallPolicyAssociation: + """Post-rpc interceptor for get_association + + Override in a subclass to manipulate the response + after it is returned by the NetworkFirewallPolicies server but before + it is returned to user code. + """ + return response + def pre_get_iam_policy(self, request: compute.GetIamPolicyNetworkFirewallPolicyRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.GetIamPolicyNetworkFirewallPolicyRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_iam_policy + + Override in a subclass to manipulate the request or metadata + before they are sent to the NetworkFirewallPolicies server. + """ + return request, metadata + + def post_get_iam_policy(self, response: compute.Policy) -> compute.Policy: + """Post-rpc interceptor for get_iam_policy + + Override in a subclass to manipulate the response + after it is returned by the NetworkFirewallPolicies server but before + it is returned to user code. + """ + return response + def pre_get_rule(self, request: compute.GetRuleNetworkFirewallPolicyRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.GetRuleNetworkFirewallPolicyRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_rule + + Override in a subclass to manipulate the request or metadata + before they are sent to the NetworkFirewallPolicies server. + """ + return request, metadata + + def post_get_rule(self, response: compute.FirewallPolicyRule) -> compute.FirewallPolicyRule: + """Post-rpc interceptor for get_rule + + Override in a subclass to manipulate the response + after it is returned by the NetworkFirewallPolicies server but before + it is returned to user code. + """ + return response + def pre_insert(self, request: compute.InsertNetworkFirewallPolicyRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.InsertNetworkFirewallPolicyRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for insert + + Override in a subclass to manipulate the request or metadata + before they are sent to the NetworkFirewallPolicies server. + """ + return request, metadata + + def post_insert(self, response: compute.Operation) -> compute.Operation: + """Post-rpc interceptor for insert + + Override in a subclass to manipulate the response + after it is returned by the NetworkFirewallPolicies server but before + it is returned to user code. + """ + return response + def pre_list(self, request: compute.ListNetworkFirewallPoliciesRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.ListNetworkFirewallPoliciesRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list + + Override in a subclass to manipulate the request or metadata + before they are sent to the NetworkFirewallPolicies server. + """ + return request, metadata + + def post_list(self, response: compute.FirewallPolicyList) -> compute.FirewallPolicyList: + """Post-rpc interceptor for list + + Override in a subclass to manipulate the response + after it is returned by the NetworkFirewallPolicies server but before + it is returned to user code. + """ + return response + def pre_patch(self, request: compute.PatchNetworkFirewallPolicyRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.PatchNetworkFirewallPolicyRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for patch + + Override in a subclass to manipulate the request or metadata + before they are sent to the NetworkFirewallPolicies server. + """ + return request, metadata + + def post_patch(self, response: compute.Operation) -> compute.Operation: + """Post-rpc interceptor for patch + + Override in a subclass to manipulate the response + after it is returned by the NetworkFirewallPolicies server but before + it is returned to user code. + """ + return response + def pre_patch_rule(self, request: compute.PatchRuleNetworkFirewallPolicyRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.PatchRuleNetworkFirewallPolicyRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for patch_rule + + Override in a subclass to manipulate the request or metadata + before they are sent to the NetworkFirewallPolicies server. + """ + return request, metadata + + def post_patch_rule(self, response: compute.Operation) -> compute.Operation: + """Post-rpc interceptor for patch_rule + + Override in a subclass to manipulate the response + after it is returned by the NetworkFirewallPolicies server but before + it is returned to user code. + """ + return response + def pre_remove_association(self, request: compute.RemoveAssociationNetworkFirewallPolicyRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.RemoveAssociationNetworkFirewallPolicyRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for remove_association + + Override in a subclass to manipulate the request or metadata + before they are sent to the NetworkFirewallPolicies server. + """ + return request, metadata + + def post_remove_association(self, response: compute.Operation) -> compute.Operation: + """Post-rpc interceptor for remove_association + + Override in a subclass to manipulate the response + after it is returned by the NetworkFirewallPolicies server but before + it is returned to user code. + """ + return response + def pre_remove_rule(self, request: compute.RemoveRuleNetworkFirewallPolicyRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.RemoveRuleNetworkFirewallPolicyRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for remove_rule + + Override in a subclass to manipulate the request or metadata + before they are sent to the NetworkFirewallPolicies server. + """ + return request, metadata + + def post_remove_rule(self, response: compute.Operation) -> compute.Operation: + """Post-rpc interceptor for remove_rule + + Override in a subclass to manipulate the response + after it is returned by the NetworkFirewallPolicies server but before + it is returned to user code. + """ + return response + def pre_set_iam_policy(self, request: compute.SetIamPolicyNetworkFirewallPolicyRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.SetIamPolicyNetworkFirewallPolicyRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for set_iam_policy + + Override in a subclass to manipulate the request or metadata + before they are sent to the NetworkFirewallPolicies server. + """ + return request, metadata + + def post_set_iam_policy(self, response: compute.Policy) -> compute.Policy: + """Post-rpc interceptor for set_iam_policy + + Override in a subclass to manipulate the response + after it is returned by the NetworkFirewallPolicies server but before + it is returned to user code. + """ + return response + def pre_test_iam_permissions(self, request: compute.TestIamPermissionsNetworkFirewallPolicyRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.TestIamPermissionsNetworkFirewallPolicyRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for test_iam_permissions + + Override in a subclass to manipulate the request or metadata + before they are sent to the NetworkFirewallPolicies server. + """ + return request, metadata + + def post_test_iam_permissions(self, response: compute.TestPermissionsResponse) -> compute.TestPermissionsResponse: + """Post-rpc interceptor for test_iam_permissions + + Override in a subclass to manipulate the response + after it is returned by the NetworkFirewallPolicies server but before + it is returned to user code. + """ + return response + + +@dataclasses.dataclass +class NetworkFirewallPoliciesRestStub: + _session: AuthorizedSession + _host: str + _interceptor: NetworkFirewallPoliciesRestInterceptor + + +class NetworkFirewallPoliciesRestTransport(NetworkFirewallPoliciesTransport): + """REST backend transport for NetworkFirewallPolicies. + + The NetworkFirewallPolicies API. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends JSON representations of protocol buffers over HTTP/1.1 + + NOTE: This REST transport functionality is currently in a beta + state (preview). We welcome your feedback via an issue in this + library's source repository. Thank you! + """ + + def __init__(self, *, + host: str = 'compute.googleapis.com', + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + client_cert_source_for_mtls: Optional[Callable[[ + ], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + url_scheme: str = 'https', + interceptor: Optional[NetworkFirewallPoliciesRestInterceptor] = None, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + NOTE: This REST transport functionality is currently in a beta + state (preview). We welcome your feedback via a GitHub issue in + this library's repository. Thank you! + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + client_cert_source_for_mtls (Callable[[], Tuple[bytes, bytes]]): Client + certificate to configure mutual TLS HTTP channel. It is ignored + if ``channel`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you are developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + url_scheme: the protocol scheme for the API endpoint. Normally + "https", but for testing or local servers, + "http" can be specified. + """ + # Run the base constructor + # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. + # TODO: When custom host (api_endpoint) is set, `scopes` must *also* be set on the + # credentials object + maybe_url_match = re.match("^(?Phttp(?:s)?://)?(?P.*)$", host) + if maybe_url_match is None: + raise ValueError(f"Unexpected hostname structure: {host}") # pragma: NO COVER + + url_match_items = maybe_url_match.groupdict() + + host = f"{url_scheme}://{host}" if not url_match_items["scheme"] else host + + super().__init__( + host=host, + credentials=credentials, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience + ) + self._session = AuthorizedSession( + self._credentials, default_host=self.DEFAULT_HOST) + if client_cert_source_for_mtls: + self._session.configure_mtls_channel(client_cert_source_for_mtls) + self._interceptor = interceptor or NetworkFirewallPoliciesRestInterceptor() + self._prep_wrapped_messages(client_info) + + class _AddAssociation(NetworkFirewallPoliciesRestStub): + def __hash__(self): + return hash("AddAssociation") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.AddAssociationNetworkFirewallPolicyRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.Operation: + r"""Call the add association method over HTTP. + + Args: + request (~.compute.AddAssociationNetworkFirewallPolicyRequest): + The request object. A request message for + NetworkFirewallPolicies.AddAssociation. + See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.Operation: + Represents an Operation resource. Google Compute Engine + has three Operation resources: \* + `Global `__ + \* + `Regional `__ + \* + `Zonal `__ + You can use an operation resource to manage asynchronous + API requests. For more information, read Handling API + responses. Operations can be global, regional or zonal. + - For global operations, use the ``globalOperations`` + resource. - For regional operations, use the + ``regionOperations`` resource. - For zonal operations, + use the ``zonalOperations`` resource. For more + information, read Global, Regional, and Zonal Resources. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/compute/v1/projects/{project}/global/firewallPolicies/{firewall_policy}/addAssociation', + 'body': 'firewall_policy_association_resource', + }, + ] + request, metadata = self._interceptor.pre_add_association(request, metadata) + pb_request = compute.AddAssociationNetworkFirewallPolicyRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + including_default_value_fields=False, + use_integers_for_enums=False + ) + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.Operation() + pb_resp = compute.Operation.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_add_association(resp) + return resp + + class _AddRule(NetworkFirewallPoliciesRestStub): + def __hash__(self): + return hash("AddRule") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.AddRuleNetworkFirewallPolicyRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.Operation: + r"""Call the add rule method over HTTP. + + Args: + request (~.compute.AddRuleNetworkFirewallPolicyRequest): + The request object. A request message for + NetworkFirewallPolicies.AddRule. See the + method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.Operation: + Represents an Operation resource. Google Compute Engine + has three Operation resources: \* + `Global `__ + \* + `Regional `__ + \* + `Zonal `__ + You can use an operation resource to manage asynchronous + API requests. For more information, read Handling API + responses. Operations can be global, regional or zonal. + - For global operations, use the ``globalOperations`` + resource. - For regional operations, use the + ``regionOperations`` resource. - For zonal operations, + use the ``zonalOperations`` resource. For more + information, read Global, Regional, and Zonal Resources. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/compute/v1/projects/{project}/global/firewallPolicies/{firewall_policy}/addRule', + 'body': 'firewall_policy_rule_resource', + }, + ] + request, metadata = self._interceptor.pre_add_rule(request, metadata) + pb_request = compute.AddRuleNetworkFirewallPolicyRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + including_default_value_fields=False, + use_integers_for_enums=False + ) + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.Operation() + pb_resp = compute.Operation.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_add_rule(resp) + return resp + + class _CloneRules(NetworkFirewallPoliciesRestStub): + def __hash__(self): + return hash("CloneRules") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.CloneRulesNetworkFirewallPolicyRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.Operation: + r"""Call the clone rules method over HTTP. + + Args: + request (~.compute.CloneRulesNetworkFirewallPolicyRequest): + The request object. A request message for + NetworkFirewallPolicies.CloneRules. See + the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.Operation: + Represents an Operation resource. Google Compute Engine + has three Operation resources: \* + `Global `__ + \* + `Regional `__ + \* + `Zonal `__ + You can use an operation resource to manage asynchronous + API requests. For more information, read Handling API + responses. Operations can be global, regional or zonal. + - For global operations, use the ``globalOperations`` + resource. - For regional operations, use the + ``regionOperations`` resource. - For zonal operations, + use the ``zonalOperations`` resource. For more + information, read Global, Regional, and Zonal Resources. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/compute/v1/projects/{project}/global/firewallPolicies/{firewall_policy}/cloneRules', + }, + ] + request, metadata = self._interceptor.pre_clone_rules(request, metadata) + pb_request = compute.CloneRulesNetworkFirewallPolicyRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.Operation() + pb_resp = compute.Operation.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_clone_rules(resp) + return resp + + class _Delete(NetworkFirewallPoliciesRestStub): + def __hash__(self): + return hash("Delete") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.DeleteNetworkFirewallPolicyRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.Operation: + r"""Call the delete method over HTTP. + + Args: + request (~.compute.DeleteNetworkFirewallPolicyRequest): + The request object. A request message for + NetworkFirewallPolicies.Delete. See the + method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.Operation: + Represents an Operation resource. Google Compute Engine + has three Operation resources: \* + `Global `__ + \* + `Regional `__ + \* + `Zonal `__ + You can use an operation resource to manage asynchronous + API requests. For more information, read Handling API + responses. Operations can be global, regional or zonal. + - For global operations, use the ``globalOperations`` + resource. - For regional operations, use the + ``regionOperations`` resource. - For zonal operations, + use the ``zonalOperations`` resource. For more + information, read Global, Regional, and Zonal Resources. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'delete', + 'uri': '/compute/v1/projects/{project}/global/firewallPolicies/{firewall_policy}', + }, + ] + request, metadata = self._interceptor.pre_delete(request, metadata) + pb_request = compute.DeleteNetworkFirewallPolicyRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.Operation() + pb_resp = compute.Operation.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_delete(resp) + return resp + + class _Get(NetworkFirewallPoliciesRestStub): + def __hash__(self): + return hash("Get") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.GetNetworkFirewallPolicyRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.FirewallPolicy: + r"""Call the get method over HTTP. + + Args: + request (~.compute.GetNetworkFirewallPolicyRequest): + The request object. A request message for + NetworkFirewallPolicies.Get. See the + method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.FirewallPolicy: + Represents a Firewall Policy + resource. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/compute/v1/projects/{project}/global/firewallPolicies/{firewall_policy}', + }, + ] + request, metadata = self._interceptor.pre_get(request, metadata) + pb_request = compute.GetNetworkFirewallPolicyRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.FirewallPolicy() + pb_resp = compute.FirewallPolicy.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get(resp) + return resp + + class _GetAssociation(NetworkFirewallPoliciesRestStub): + def __hash__(self): + return hash("GetAssociation") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.GetAssociationNetworkFirewallPolicyRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.FirewallPolicyAssociation: + r"""Call the get association method over HTTP. + + Args: + request (~.compute.GetAssociationNetworkFirewallPolicyRequest): + The request object. A request message for + NetworkFirewallPolicies.GetAssociation. + See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.FirewallPolicyAssociation: + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/compute/v1/projects/{project}/global/firewallPolicies/{firewall_policy}/getAssociation', + }, + ] + request, metadata = self._interceptor.pre_get_association(request, metadata) + pb_request = compute.GetAssociationNetworkFirewallPolicyRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.FirewallPolicyAssociation() + pb_resp = compute.FirewallPolicyAssociation.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get_association(resp) + return resp + + class _GetIamPolicy(NetworkFirewallPoliciesRestStub): + def __hash__(self): + return hash("GetIamPolicy") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.GetIamPolicyNetworkFirewallPolicyRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.Policy: + r"""Call the get iam policy method over HTTP. + + Args: + request (~.compute.GetIamPolicyNetworkFirewallPolicyRequest): + The request object. A request message for + NetworkFirewallPolicies.GetIamPolicy. + See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.Policy: + An Identity and Access Management (IAM) policy, which + specifies access controls for Google Cloud resources. A + ``Policy`` is a collection of ``bindings``. A + ``binding`` binds one or more ``members``, or + principals, to a single ``role``. Principals can be user + accounts, service accounts, Google groups, and domains + (such as G Suite). A ``role`` is a named list of + permissions; each ``role`` can be an IAM predefined role + or a user-created custom role. For some types of Google + Cloud resources, a ``binding`` can also specify a + ``condition``, which is a logical expression that allows + access to a resource only if the expression evaluates to + ``true``. A condition can add constraints based on + attributes of the request, the resource, or both. To + learn which resources support conditions in their IAM + policies, see the `IAM + documentation `__. + **JSON example:** { "bindings": [ { "role": + "roles/resourcemanager.organizationAdmin", "members": [ + "user:mike@example.com", "group:admins@example.com", + "domain:google.com", + "serviceAccount:my-project-id@appspot.gserviceaccount.com" + ] }, { "role": + "roles/resourcemanager.organizationViewer", "members": [ + "user:eve@example.com" ], "condition": { "title": + "expirable access", "description": "Does not grant + access after Sep 2020", "expression": "request.time < + timestamp('2020-10-01T00:00:00.000Z')", } } ], "etag": + "BwWWja0YfJA=", "version": 3 } **YAML example:** + bindings: - members: - user:mike@example.com - + group:admins@example.com - domain:google.com - + serviceAccount:my-project-id@appspot.gserviceaccount.com + role: roles/resourcemanager.organizationAdmin - members: + - user:eve@example.com role: + roles/resourcemanager.organizationViewer condition: + title: expirable access description: Does not grant + access after Sep 2020 expression: request.time < + timestamp('2020-10-01T00:00:00.000Z') etag: BwWWja0YfJA= + version: 3 For a description of IAM and its features, + see the `IAM + documentation `__. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/compute/v1/projects/{project}/global/firewallPolicies/{resource}/getIamPolicy', + }, + ] + request, metadata = self._interceptor.pre_get_iam_policy(request, metadata) + pb_request = compute.GetIamPolicyNetworkFirewallPolicyRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.Policy() + pb_resp = compute.Policy.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get_iam_policy(resp) + return resp + + class _GetRule(NetworkFirewallPoliciesRestStub): + def __hash__(self): + return hash("GetRule") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.GetRuleNetworkFirewallPolicyRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.FirewallPolicyRule: + r"""Call the get rule method over HTTP. + + Args: + request (~.compute.GetRuleNetworkFirewallPolicyRequest): + The request object. A request message for + NetworkFirewallPolicies.GetRule. See the + method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.FirewallPolicyRule: + Represents a rule that describes one + or more match conditions along with the + action to be taken when traffic matches + this condition (allow or deny). + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/compute/v1/projects/{project}/global/firewallPolicies/{firewall_policy}/getRule', + }, + ] + request, metadata = self._interceptor.pre_get_rule(request, metadata) + pb_request = compute.GetRuleNetworkFirewallPolicyRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.FirewallPolicyRule() + pb_resp = compute.FirewallPolicyRule.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get_rule(resp) + return resp + + class _Insert(NetworkFirewallPoliciesRestStub): + def __hash__(self): + return hash("Insert") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.InsertNetworkFirewallPolicyRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.Operation: + r"""Call the insert method over HTTP. + + Args: + request (~.compute.InsertNetworkFirewallPolicyRequest): + The request object. A request message for + NetworkFirewallPolicies.Insert. See the + method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.Operation: + Represents an Operation resource. Google Compute Engine + has three Operation resources: \* + `Global `__ + \* + `Regional `__ + \* + `Zonal `__ + You can use an operation resource to manage asynchronous + API requests. For more information, read Handling API + responses. Operations can be global, regional or zonal. + - For global operations, use the ``globalOperations`` + resource. - For regional operations, use the + ``regionOperations`` resource. - For zonal operations, + use the ``zonalOperations`` resource. For more + information, read Global, Regional, and Zonal Resources. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/compute/v1/projects/{project}/global/firewallPolicies', + 'body': 'firewall_policy_resource', + }, + ] + request, metadata = self._interceptor.pre_insert(request, metadata) + pb_request = compute.InsertNetworkFirewallPolicyRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + including_default_value_fields=False, + use_integers_for_enums=False + ) + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.Operation() + pb_resp = compute.Operation.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_insert(resp) + return resp + + class _List(NetworkFirewallPoliciesRestStub): + def __hash__(self): + return hash("List") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.ListNetworkFirewallPoliciesRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.FirewallPolicyList: + r"""Call the list method over HTTP. + + Args: + request (~.compute.ListNetworkFirewallPoliciesRequest): + The request object. A request message for + NetworkFirewallPolicies.List. See the + method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.FirewallPolicyList: + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/compute/v1/projects/{project}/global/firewallPolicies', + }, + ] + request, metadata = self._interceptor.pre_list(request, metadata) + pb_request = compute.ListNetworkFirewallPoliciesRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.FirewallPolicyList() + pb_resp = compute.FirewallPolicyList.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_list(resp) + return resp + + class _Patch(NetworkFirewallPoliciesRestStub): + def __hash__(self): + return hash("Patch") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.PatchNetworkFirewallPolicyRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.Operation: + r"""Call the patch method over HTTP. + + Args: + request (~.compute.PatchNetworkFirewallPolicyRequest): + The request object. A request message for + NetworkFirewallPolicies.Patch. See the + method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.Operation: + Represents an Operation resource. Google Compute Engine + has three Operation resources: \* + `Global `__ + \* + `Regional `__ + \* + `Zonal `__ + You can use an operation resource to manage asynchronous + API requests. For more information, read Handling API + responses. Operations can be global, regional or zonal. + - For global operations, use the ``globalOperations`` + resource. - For regional operations, use the + ``regionOperations`` resource. - For zonal operations, + use the ``zonalOperations`` resource. For more + information, read Global, Regional, and Zonal Resources. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'patch', + 'uri': '/compute/v1/projects/{project}/global/firewallPolicies/{firewall_policy}', + 'body': 'firewall_policy_resource', + }, + ] + request, metadata = self._interceptor.pre_patch(request, metadata) + pb_request = compute.PatchNetworkFirewallPolicyRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + including_default_value_fields=False, + use_integers_for_enums=False + ) + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.Operation() + pb_resp = compute.Operation.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_patch(resp) + return resp + + class _PatchRule(NetworkFirewallPoliciesRestStub): + def __hash__(self): + return hash("PatchRule") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.PatchRuleNetworkFirewallPolicyRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.Operation: + r"""Call the patch rule method over HTTP. + + Args: + request (~.compute.PatchRuleNetworkFirewallPolicyRequest): + The request object. A request message for + NetworkFirewallPolicies.PatchRule. See + the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.Operation: + Represents an Operation resource. Google Compute Engine + has three Operation resources: \* + `Global `__ + \* + `Regional `__ + \* + `Zonal `__ + You can use an operation resource to manage asynchronous + API requests. For more information, read Handling API + responses. Operations can be global, regional or zonal. + - For global operations, use the ``globalOperations`` + resource. - For regional operations, use the + ``regionOperations`` resource. - For zonal operations, + use the ``zonalOperations`` resource. For more + information, read Global, Regional, and Zonal Resources. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/compute/v1/projects/{project}/global/firewallPolicies/{firewall_policy}/patchRule', + 'body': 'firewall_policy_rule_resource', + }, + ] + request, metadata = self._interceptor.pre_patch_rule(request, metadata) + pb_request = compute.PatchRuleNetworkFirewallPolicyRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + including_default_value_fields=False, + use_integers_for_enums=False + ) + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.Operation() + pb_resp = compute.Operation.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_patch_rule(resp) + return resp + + class _RemoveAssociation(NetworkFirewallPoliciesRestStub): + def __hash__(self): + return hash("RemoveAssociation") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.RemoveAssociationNetworkFirewallPolicyRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.Operation: + r"""Call the remove association method over HTTP. + + Args: + request (~.compute.RemoveAssociationNetworkFirewallPolicyRequest): + The request object. A request message for + NetworkFirewallPolicies.RemoveAssociation. + See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.Operation: + Represents an Operation resource. Google Compute Engine + has three Operation resources: \* + `Global `__ + \* + `Regional `__ + \* + `Zonal `__ + You can use an operation resource to manage asynchronous + API requests. For more information, read Handling API + responses. Operations can be global, regional or zonal. + - For global operations, use the ``globalOperations`` + resource. - For regional operations, use the + ``regionOperations`` resource. - For zonal operations, + use the ``zonalOperations`` resource. For more + information, read Global, Regional, and Zonal Resources. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/compute/v1/projects/{project}/global/firewallPolicies/{firewall_policy}/removeAssociation', + }, + ] + request, metadata = self._interceptor.pre_remove_association(request, metadata) + pb_request = compute.RemoveAssociationNetworkFirewallPolicyRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.Operation() + pb_resp = compute.Operation.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_remove_association(resp) + return resp + + class _RemoveRule(NetworkFirewallPoliciesRestStub): + def __hash__(self): + return hash("RemoveRule") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.RemoveRuleNetworkFirewallPolicyRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.Operation: + r"""Call the remove rule method over HTTP. + + Args: + request (~.compute.RemoveRuleNetworkFirewallPolicyRequest): + The request object. A request message for + NetworkFirewallPolicies.RemoveRule. See + the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.Operation: + Represents an Operation resource. Google Compute Engine + has three Operation resources: \* + `Global `__ + \* + `Regional `__ + \* + `Zonal `__ + You can use an operation resource to manage asynchronous + API requests. For more information, read Handling API + responses. Operations can be global, regional or zonal. + - For global operations, use the ``globalOperations`` + resource. - For regional operations, use the + ``regionOperations`` resource. - For zonal operations, + use the ``zonalOperations`` resource. For more + information, read Global, Regional, and Zonal Resources. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/compute/v1/projects/{project}/global/firewallPolicies/{firewall_policy}/removeRule', + }, + ] + request, metadata = self._interceptor.pre_remove_rule(request, metadata) + pb_request = compute.RemoveRuleNetworkFirewallPolicyRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.Operation() + pb_resp = compute.Operation.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_remove_rule(resp) + return resp + + class _SetIamPolicy(NetworkFirewallPoliciesRestStub): + def __hash__(self): + return hash("SetIamPolicy") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.SetIamPolicyNetworkFirewallPolicyRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.Policy: + r"""Call the set iam policy method over HTTP. + + Args: + request (~.compute.SetIamPolicyNetworkFirewallPolicyRequest): + The request object. A request message for + NetworkFirewallPolicies.SetIamPolicy. + See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.Policy: + An Identity and Access Management (IAM) policy, which + specifies access controls for Google Cloud resources. A + ``Policy`` is a collection of ``bindings``. A + ``binding`` binds one or more ``members``, or + principals, to a single ``role``. Principals can be user + accounts, service accounts, Google groups, and domains + (such as G Suite). A ``role`` is a named list of + permissions; each ``role`` can be an IAM predefined role + or a user-created custom role. For some types of Google + Cloud resources, a ``binding`` can also specify a + ``condition``, which is a logical expression that allows + access to a resource only if the expression evaluates to + ``true``. A condition can add constraints based on + attributes of the request, the resource, or both. To + learn which resources support conditions in their IAM + policies, see the `IAM + documentation `__. + **JSON example:** { "bindings": [ { "role": + "roles/resourcemanager.organizationAdmin", "members": [ + "user:mike@example.com", "group:admins@example.com", + "domain:google.com", + "serviceAccount:my-project-id@appspot.gserviceaccount.com" + ] }, { "role": + "roles/resourcemanager.organizationViewer", "members": [ + "user:eve@example.com" ], "condition": { "title": + "expirable access", "description": "Does not grant + access after Sep 2020", "expression": "request.time < + timestamp('2020-10-01T00:00:00.000Z')", } } ], "etag": + "BwWWja0YfJA=", "version": 3 } **YAML example:** + bindings: - members: - user:mike@example.com - + group:admins@example.com - domain:google.com - + serviceAccount:my-project-id@appspot.gserviceaccount.com + role: roles/resourcemanager.organizationAdmin - members: + - user:eve@example.com role: + roles/resourcemanager.organizationViewer condition: + title: expirable access description: Does not grant + access after Sep 2020 expression: request.time < + timestamp('2020-10-01T00:00:00.000Z') etag: BwWWja0YfJA= + version: 3 For a description of IAM and its features, + see the `IAM + documentation `__. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/compute/v1/projects/{project}/global/firewallPolicies/{resource}/setIamPolicy', + 'body': 'global_set_policy_request_resource', + }, + ] + request, metadata = self._interceptor.pre_set_iam_policy(request, metadata) + pb_request = compute.SetIamPolicyNetworkFirewallPolicyRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + including_default_value_fields=False, + use_integers_for_enums=False + ) + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.Policy() + pb_resp = compute.Policy.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_set_iam_policy(resp) + return resp + + class _TestIamPermissions(NetworkFirewallPoliciesRestStub): + def __hash__(self): + return hash("TestIamPermissions") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.TestIamPermissionsNetworkFirewallPolicyRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.TestPermissionsResponse: + r"""Call the test iam permissions method over HTTP. + + Args: + request (~.compute.TestIamPermissionsNetworkFirewallPolicyRequest): + The request object. A request message for + NetworkFirewallPolicies.TestIamPermissions. + See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.TestPermissionsResponse: + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/compute/v1/projects/{project}/global/firewallPolicies/{resource}/testIamPermissions', + 'body': 'test_permissions_request_resource', + }, + ] + request, metadata = self._interceptor.pre_test_iam_permissions(request, metadata) + pb_request = compute.TestIamPermissionsNetworkFirewallPolicyRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + including_default_value_fields=False, + use_integers_for_enums=False + ) + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.TestPermissionsResponse() + pb_resp = compute.TestPermissionsResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_test_iam_permissions(resp) + return resp + + @property + def add_association(self) -> Callable[ + [compute.AddAssociationNetworkFirewallPolicyRequest], + compute.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._AddAssociation(self._session, self._host, self._interceptor) # type: ignore + + @property + def add_rule(self) -> Callable[ + [compute.AddRuleNetworkFirewallPolicyRequest], + compute.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._AddRule(self._session, self._host, self._interceptor) # type: ignore + + @property + def clone_rules(self) -> Callable[ + [compute.CloneRulesNetworkFirewallPolicyRequest], + compute.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._CloneRules(self._session, self._host, self._interceptor) # type: ignore + + @property + def delete(self) -> Callable[ + [compute.DeleteNetworkFirewallPolicyRequest], + compute.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._Delete(self._session, self._host, self._interceptor) # type: ignore + + @property + def get(self) -> Callable[ + [compute.GetNetworkFirewallPolicyRequest], + compute.FirewallPolicy]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._Get(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_association(self) -> Callable[ + [compute.GetAssociationNetworkFirewallPolicyRequest], + compute.FirewallPolicyAssociation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetAssociation(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_iam_policy(self) -> Callable[ + [compute.GetIamPolicyNetworkFirewallPolicyRequest], + compute.Policy]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetIamPolicy(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_rule(self) -> Callable[ + [compute.GetRuleNetworkFirewallPolicyRequest], + compute.FirewallPolicyRule]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetRule(self._session, self._host, self._interceptor) # type: ignore + + @property + def insert(self) -> Callable[ + [compute.InsertNetworkFirewallPolicyRequest], + compute.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._Insert(self._session, self._host, self._interceptor) # type: ignore + + @property + def list(self) -> Callable[ + [compute.ListNetworkFirewallPoliciesRequest], + compute.FirewallPolicyList]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._List(self._session, self._host, self._interceptor) # type: ignore + + @property + def patch(self) -> Callable[ + [compute.PatchNetworkFirewallPolicyRequest], + compute.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._Patch(self._session, self._host, self._interceptor) # type: ignore + + @property + def patch_rule(self) -> Callable[ + [compute.PatchRuleNetworkFirewallPolicyRequest], + compute.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._PatchRule(self._session, self._host, self._interceptor) # type: ignore + + @property + def remove_association(self) -> Callable[ + [compute.RemoveAssociationNetworkFirewallPolicyRequest], + compute.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._RemoveAssociation(self._session, self._host, self._interceptor) # type: ignore + + @property + def remove_rule(self) -> Callable[ + [compute.RemoveRuleNetworkFirewallPolicyRequest], + compute.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._RemoveRule(self._session, self._host, self._interceptor) # type: ignore + + @property + def set_iam_policy(self) -> Callable[ + [compute.SetIamPolicyNetworkFirewallPolicyRequest], + compute.Policy]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._SetIamPolicy(self._session, self._host, self._interceptor) # type: ignore + + @property + def test_iam_permissions(self) -> Callable[ + [compute.TestIamPermissionsNetworkFirewallPolicyRequest], + compute.TestPermissionsResponse]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._TestIamPermissions(self._session, self._host, self._interceptor) # type: ignore + + @property + def kind(self) -> str: + return "rest" + + def close(self): + self._session.close() + + +__all__=( + 'NetworkFirewallPoliciesRestTransport', +) diff --git a/owl-bot-staging/v1/google/cloud/compute_v1/services/networks/__init__.py b/owl-bot-staging/v1/google/cloud/compute_v1/services/networks/__init__.py new file mode 100644 index 000000000..7815bd4c1 --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/compute_v1/services/networks/__init__.py @@ -0,0 +1,20 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from .client import NetworksClient + +__all__ = ( + 'NetworksClient', +) diff --git a/owl-bot-staging/v1/google/cloud/compute_v1/services/networks/client.py b/owl-bot-staging/v1/google/cloud/compute_v1/services/networks/client.py new file mode 100644 index 000000000..c391680c6 --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/compute_v1/services/networks/client.py @@ -0,0 +1,2640 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import functools +import os +import re +from typing import Dict, Mapping, MutableMapping, MutableSequence, Optional, Sequence, Tuple, Type, Union, cast + +from google.cloud.compute_v1 import gapic_version as package_version + +from google.api_core import client_options as client_options_lib +from google.api_core import exceptions as core_exceptions +from google.api_core import extended_operation +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport import mtls # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth.exceptions import MutualTLSChannelError # type: ignore +from google.oauth2 import service_account # type: ignore + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + +from google.api_core import extended_operation # type: ignore +from google.cloud.compute_v1.services.networks import pagers +from google.cloud.compute_v1.types import compute +from .transports.base import NetworksTransport, DEFAULT_CLIENT_INFO +from .transports.rest import NetworksRestTransport + + +class NetworksClientMeta(type): + """Metaclass for the Networks client. + + This provides class-level methods for building and retrieving + support objects (e.g. transport) without polluting the client instance + objects. + """ + _transport_registry = OrderedDict() # type: Dict[str, Type[NetworksTransport]] + _transport_registry["rest"] = NetworksRestTransport + + def get_transport_class(cls, + label: Optional[str] = None, + ) -> Type[NetworksTransport]: + """Returns an appropriate transport class. + + Args: + label: The name of the desired transport. If none is + provided, then the first transport in the registry is used. + + Returns: + The transport class to use. + """ + # If a specific transport is requested, return that one. + if label: + return cls._transport_registry[label] + + # No transport is requested; return the default (that is, the first one + # in the dictionary). + return next(iter(cls._transport_registry.values())) + + +class NetworksClient(metaclass=NetworksClientMeta): + """The Networks API.""" + + @staticmethod + def _get_default_mtls_endpoint(api_endpoint): + """Converts api endpoint to mTLS endpoint. + + Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to + "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. + Args: + api_endpoint (Optional[str]): the api endpoint to convert. + Returns: + str: converted mTLS api endpoint. + """ + if not api_endpoint: + return api_endpoint + + mtls_endpoint_re = re.compile( + r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?" + ) + + m = mtls_endpoint_re.match(api_endpoint) + name, mtls, sandbox, googledomain = m.groups() + if mtls or not googledomain: + return api_endpoint + + if sandbox: + return api_endpoint.replace( + "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" + ) + + return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") + + DEFAULT_ENDPOINT = "compute.googleapis.com" + DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore + DEFAULT_ENDPOINT + ) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + NetworksClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_info(info) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + NetworksClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_file( + filename) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + from_service_account_json = from_service_account_file + + @property + def transport(self) -> NetworksTransport: + """Returns the transport used by the client instance. + + Returns: + NetworksTransport: The transport used by the client + instance. + """ + return self._transport + + @staticmethod + def common_billing_account_path(billing_account: str, ) -> str: + """Returns a fully-qualified billing_account string.""" + return "billingAccounts/{billing_account}".format(billing_account=billing_account, ) + + @staticmethod + def parse_common_billing_account_path(path: str) -> Dict[str,str]: + """Parse a billing_account path into its component segments.""" + m = re.match(r"^billingAccounts/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_folder_path(folder: str, ) -> str: + """Returns a fully-qualified folder string.""" + return "folders/{folder}".format(folder=folder, ) + + @staticmethod + def parse_common_folder_path(path: str) -> Dict[str,str]: + """Parse a folder path into its component segments.""" + m = re.match(r"^folders/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_organization_path(organization: str, ) -> str: + """Returns a fully-qualified organization string.""" + return "organizations/{organization}".format(organization=organization, ) + + @staticmethod + def parse_common_organization_path(path: str) -> Dict[str,str]: + """Parse a organization path into its component segments.""" + m = re.match(r"^organizations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_project_path(project: str, ) -> str: + """Returns a fully-qualified project string.""" + return "projects/{project}".format(project=project, ) + + @staticmethod + def parse_common_project_path(path: str) -> Dict[str,str]: + """Parse a project path into its component segments.""" + m = re.match(r"^projects/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_location_path(project: str, location: str, ) -> str: + """Returns a fully-qualified location string.""" + return "projects/{project}/locations/{location}".format(project=project, location=location, ) + + @staticmethod + def parse_common_location_path(path: str) -> Dict[str,str]: + """Parse a location path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @classmethod + def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[client_options_lib.ClientOptions] = None): + """Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + if client_options is None: + client_options = client_options_lib.ClientOptions() + use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") + use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") + if use_client_cert not in ("true", "false"): + raise ValueError("Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`") + if use_mtls_endpoint not in ("auto", "never", "always"): + raise MutualTLSChannelError("Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`") + + # Figure out the client cert source to use. + client_cert_source = None + if use_client_cert == "true": + if client_options.client_cert_source: + client_cert_source = client_options.client_cert_source + elif mtls.has_default_client_cert_source(): + client_cert_source = mtls.default_client_cert_source() + + # Figure out which api endpoint to use. + if client_options.api_endpoint is not None: + api_endpoint = client_options.api_endpoint + elif use_mtls_endpoint == "always" or (use_mtls_endpoint == "auto" and client_cert_source): + api_endpoint = cls.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = cls.DEFAULT_ENDPOINT + + return api_endpoint, client_cert_source + + def __init__(self, *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Optional[Union[str, NetworksTransport]] = None, + client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the networks client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Union[str, NetworksTransport]): The + transport to use. If set to None, a transport is chosen + automatically. + NOTE: "rest" transport functionality is currently in a + beta state (preview). We welcome your feedback via an + issue in this library's source repository. + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): Custom options for the + client. It won't take effect if a ``transport`` instance is provided. + (1) The ``api_endpoint`` property can be used to override the + default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT + environment variable can also be used to override the endpoint: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto switch to the + default mTLS endpoint if client certificate is present, this is + the default value). However, the ``api_endpoint`` property takes + precedence if provided. + (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide client certificate for mutual TLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + """ + if isinstance(client_options, dict): + client_options = client_options_lib.from_dict(client_options) + if client_options is None: + client_options = client_options_lib.ClientOptions() + client_options = cast(client_options_lib.ClientOptions, client_options) + + api_endpoint, client_cert_source_func = self.get_mtls_endpoint_and_cert_source(client_options) + + api_key_value = getattr(client_options, "api_key", None) + if api_key_value and credentials: + raise ValueError("client_options.api_key and credentials are mutually exclusive") + + # Save or instantiate the transport. + # Ordinarily, we provide the transport, but allowing a custom transport + # instance provides an extensibility point for unusual situations. + if isinstance(transport, NetworksTransport): + # transport is a NetworksTransport instance. + if credentials or client_options.credentials_file or api_key_value: + raise ValueError("When providing a transport instance, " + "provide its credentials directly.") + if client_options.scopes: + raise ValueError( + "When providing a transport instance, provide its scopes " + "directly." + ) + self._transport = transport + else: + import google.auth._default # type: ignore + + if api_key_value and hasattr(google.auth._default, "get_api_key_credentials"): + credentials = google.auth._default.get_api_key_credentials(api_key_value) + + Transport = type(self).get_transport_class(transport) + self._transport = Transport( + credentials=credentials, + credentials_file=client_options.credentials_file, + host=api_endpoint, + scopes=client_options.scopes, + client_cert_source_for_mtls=client_cert_source_func, + quota_project_id=client_options.quota_project_id, + client_info=client_info, + always_use_jwt_access=True, + api_audience=client_options.api_audience, + ) + + def add_peering_unary(self, + request: Optional[Union[compute.AddPeeringNetworkRequest, dict]] = None, + *, + project: Optional[str] = None, + network: Optional[str] = None, + networks_add_peering_request_resource: Optional[compute.NetworksAddPeeringRequest] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.Operation: + r"""Adds a peering to the specified network. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_add_peering(): + # Create a client + client = compute_v1.NetworksClient() + + # Initialize request argument(s) + request = compute_v1.AddPeeringNetworkRequest( + network="network_value", + project="project_value", + ) + + # Make the request + response = client.add_peering(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.AddPeeringNetworkRequest, dict]): + The request object. A request message for + Networks.AddPeering. See the method + description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + network (str): + Name of the network resource to add + peering to. + + This corresponds to the ``network`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + networks_add_peering_request_resource (google.cloud.compute_v1.types.NetworksAddPeeringRequest): + The body resource for this request + This corresponds to the ``networks_add_peering_request_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, network, networks_add_peering_request_resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.AddPeeringNetworkRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.AddPeeringNetworkRequest): + request = compute.AddPeeringNetworkRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if network is not None: + request.network = network + if networks_add_peering_request_resource is not None: + request.networks_add_peering_request_resource = networks_add_peering_request_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.add_peering] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("network", request.network), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def add_peering(self, + request: Optional[Union[compute.AddPeeringNetworkRequest, dict]] = None, + *, + project: Optional[str] = None, + network: Optional[str] = None, + networks_add_peering_request_resource: Optional[compute.NetworksAddPeeringRequest] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> extended_operation.ExtendedOperation: + r"""Adds a peering to the specified network. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_add_peering(): + # Create a client + client = compute_v1.NetworksClient() + + # Initialize request argument(s) + request = compute_v1.AddPeeringNetworkRequest( + network="network_value", + project="project_value", + ) + + # Make the request + response = client.add_peering(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.AddPeeringNetworkRequest, dict]): + The request object. A request message for + Networks.AddPeering. See the method + description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + network (str): + Name of the network resource to add + peering to. + + This corresponds to the ``network`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + networks_add_peering_request_resource (google.cloud.compute_v1.types.NetworksAddPeeringRequest): + The body resource for this request + This corresponds to the ``networks_add_peering_request_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, network, networks_add_peering_request_resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.AddPeeringNetworkRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.AddPeeringNetworkRequest): + request = compute.AddPeeringNetworkRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if network is not None: + request.network = network + if networks_add_peering_request_resource is not None: + request.networks_add_peering_request_resource = networks_add_peering_request_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.add_peering] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("network", request.network), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + operation_service = self._transport._global_operations_client + operation_request = compute.GetGlobalOperationRequest() + operation_request.project = request.project + operation_request.operation = response.name + + get_operation = functools.partial(operation_service.get, operation_request) + # Cancel is not part of extended operations yet. + cancel_operation = lambda: None + + # Note: this class is an implementation detail to provide a uniform + # set of names for certain fields in the extended operation proto message. + # See google.api_core.extended_operation.ExtendedOperation for details + # on these properties and the expected interface. + class _CustomOperation(extended_operation.ExtendedOperation): + @property + def error_message(self): + return self._extended_operation.http_error_message + + @property + def error_code(self): + return self._extended_operation.http_error_status_code + + response = _CustomOperation.make(get_operation, cancel_operation, response) + + # Done; return the response. + return response + + def delete_unary(self, + request: Optional[Union[compute.DeleteNetworkRequest, dict]] = None, + *, + project: Optional[str] = None, + network: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.Operation: + r"""Deletes the specified network. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_delete(): + # Create a client + client = compute_v1.NetworksClient() + + # Initialize request argument(s) + request = compute_v1.DeleteNetworkRequest( + network="network_value", + project="project_value", + ) + + # Make the request + response = client.delete(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.DeleteNetworkRequest, dict]): + The request object. A request message for + Networks.Delete. See the method + description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + network (str): + Name of the network to delete. + This corresponds to the ``network`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, network]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.DeleteNetworkRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.DeleteNetworkRequest): + request = compute.DeleteNetworkRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if network is not None: + request.network = network + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("network", request.network), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def delete(self, + request: Optional[Union[compute.DeleteNetworkRequest, dict]] = None, + *, + project: Optional[str] = None, + network: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> extended_operation.ExtendedOperation: + r"""Deletes the specified network. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_delete(): + # Create a client + client = compute_v1.NetworksClient() + + # Initialize request argument(s) + request = compute_v1.DeleteNetworkRequest( + network="network_value", + project="project_value", + ) + + # Make the request + response = client.delete(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.DeleteNetworkRequest, dict]): + The request object. A request message for + Networks.Delete. See the method + description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + network (str): + Name of the network to delete. + This corresponds to the ``network`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, network]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.DeleteNetworkRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.DeleteNetworkRequest): + request = compute.DeleteNetworkRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if network is not None: + request.network = network + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("network", request.network), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + operation_service = self._transport._global_operations_client + operation_request = compute.GetGlobalOperationRequest() + operation_request.project = request.project + operation_request.operation = response.name + + get_operation = functools.partial(operation_service.get, operation_request) + # Cancel is not part of extended operations yet. + cancel_operation = lambda: None + + # Note: this class is an implementation detail to provide a uniform + # set of names for certain fields in the extended operation proto message. + # See google.api_core.extended_operation.ExtendedOperation for details + # on these properties and the expected interface. + class _CustomOperation(extended_operation.ExtendedOperation): + @property + def error_message(self): + return self._extended_operation.http_error_message + + @property + def error_code(self): + return self._extended_operation.http_error_status_code + + response = _CustomOperation.make(get_operation, cancel_operation, response) + + # Done; return the response. + return response + + def get(self, + request: Optional[Union[compute.GetNetworkRequest, dict]] = None, + *, + project: Optional[str] = None, + network: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.Network: + r"""Returns the specified network. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_get(): + # Create a client + client = compute_v1.NetworksClient() + + # Initialize request argument(s) + request = compute_v1.GetNetworkRequest( + network="network_value", + project="project_value", + ) + + # Make the request + response = client.get(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.GetNetworkRequest, dict]): + The request object. A request message for Networks.Get. + See the method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + network (str): + Name of the network to return. + This corresponds to the ``network`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.compute_v1.types.Network: + Represents a VPC Network resource. + Networks connect resources to each other + and to the internet. For more + information, read Virtual Private Cloud + (VPC) Network. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, network]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.GetNetworkRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.GetNetworkRequest): + request = compute.GetNetworkRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if network is not None: + request.network = network + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("network", request.network), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get_effective_firewalls(self, + request: Optional[Union[compute.GetEffectiveFirewallsNetworkRequest, dict]] = None, + *, + project: Optional[str] = None, + network: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.NetworksGetEffectiveFirewallsResponse: + r"""Returns the effective firewalls on a given network. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_get_effective_firewalls(): + # Create a client + client = compute_v1.NetworksClient() + + # Initialize request argument(s) + request = compute_v1.GetEffectiveFirewallsNetworkRequest( + network="network_value", + project="project_value", + ) + + # Make the request + response = client.get_effective_firewalls(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.GetEffectiveFirewallsNetworkRequest, dict]): + The request object. A request message for + Networks.GetEffectiveFirewalls. See the + method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + network (str): + Name of the network for this request. + This corresponds to the ``network`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.compute_v1.types.NetworksGetEffectiveFirewallsResponse: + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, network]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.GetEffectiveFirewallsNetworkRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.GetEffectiveFirewallsNetworkRequest): + request = compute.GetEffectiveFirewallsNetworkRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if network is not None: + request.network = network + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_effective_firewalls] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("network", request.network), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def insert_unary(self, + request: Optional[Union[compute.InsertNetworkRequest, dict]] = None, + *, + project: Optional[str] = None, + network_resource: Optional[compute.Network] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.Operation: + r"""Creates a network in the specified project using the + data included in the request. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_insert(): + # Create a client + client = compute_v1.NetworksClient() + + # Initialize request argument(s) + request = compute_v1.InsertNetworkRequest( + project="project_value", + ) + + # Make the request + response = client.insert(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.InsertNetworkRequest, dict]): + The request object. A request message for + Networks.Insert. See the method + description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + network_resource (google.cloud.compute_v1.types.Network): + The body resource for this request + This corresponds to the ``network_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, network_resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.InsertNetworkRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.InsertNetworkRequest): + request = compute.InsertNetworkRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if network_resource is not None: + request.network_resource = network_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.insert] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def insert(self, + request: Optional[Union[compute.InsertNetworkRequest, dict]] = None, + *, + project: Optional[str] = None, + network_resource: Optional[compute.Network] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> extended_operation.ExtendedOperation: + r"""Creates a network in the specified project using the + data included in the request. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_insert(): + # Create a client + client = compute_v1.NetworksClient() + + # Initialize request argument(s) + request = compute_v1.InsertNetworkRequest( + project="project_value", + ) + + # Make the request + response = client.insert(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.InsertNetworkRequest, dict]): + The request object. A request message for + Networks.Insert. See the method + description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + network_resource (google.cloud.compute_v1.types.Network): + The body resource for this request + This corresponds to the ``network_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, network_resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.InsertNetworkRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.InsertNetworkRequest): + request = compute.InsertNetworkRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if network_resource is not None: + request.network_resource = network_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.insert] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + operation_service = self._transport._global_operations_client + operation_request = compute.GetGlobalOperationRequest() + operation_request.project = request.project + operation_request.operation = response.name + + get_operation = functools.partial(operation_service.get, operation_request) + # Cancel is not part of extended operations yet. + cancel_operation = lambda: None + + # Note: this class is an implementation detail to provide a uniform + # set of names for certain fields in the extended operation proto message. + # See google.api_core.extended_operation.ExtendedOperation for details + # on these properties and the expected interface. + class _CustomOperation(extended_operation.ExtendedOperation): + @property + def error_message(self): + return self._extended_operation.http_error_message + + @property + def error_code(self): + return self._extended_operation.http_error_status_code + + response = _CustomOperation.make(get_operation, cancel_operation, response) + + # Done; return the response. + return response + + def list(self, + request: Optional[Union[compute.ListNetworksRequest, dict]] = None, + *, + project: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListPager: + r"""Retrieves the list of networks available to the + specified project. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_list(): + # Create a client + client = compute_v1.NetworksClient() + + # Initialize request argument(s) + request = compute_v1.ListNetworksRequest( + project="project_value", + ) + + # Make the request + page_result = client.list(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.ListNetworksRequest, dict]): + The request object. A request message for Networks.List. + See the method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.compute_v1.services.networks.pagers.ListPager: + Contains a list of networks. + + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.ListNetworksRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.ListNetworksRequest): + request = compute.ListNetworksRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + def list_peering_routes(self, + request: Optional[Union[compute.ListPeeringRoutesNetworksRequest, dict]] = None, + *, + project: Optional[str] = None, + network: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListPeeringRoutesPager: + r"""Lists the peering routes exchanged over peering + connection. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_list_peering_routes(): + # Create a client + client = compute_v1.NetworksClient() + + # Initialize request argument(s) + request = compute_v1.ListPeeringRoutesNetworksRequest( + network="network_value", + project="project_value", + ) + + # Make the request + page_result = client.list_peering_routes(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.ListPeeringRoutesNetworksRequest, dict]): + The request object. A request message for + Networks.ListPeeringRoutes. See the + method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + network (str): + Name of the network for this request. + This corresponds to the ``network`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.compute_v1.services.networks.pagers.ListPeeringRoutesPager: + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, network]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.ListPeeringRoutesNetworksRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.ListPeeringRoutesNetworksRequest): + request = compute.ListPeeringRoutesNetworksRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if network is not None: + request.network = network + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_peering_routes] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("network", request.network), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListPeeringRoutesPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + def patch_unary(self, + request: Optional[Union[compute.PatchNetworkRequest, dict]] = None, + *, + project: Optional[str] = None, + network: Optional[str] = None, + network_resource: Optional[compute.Network] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.Operation: + r"""Patches the specified network with the data included + in the request. Only the following fields can be + modified: routingConfig.routingMode. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_patch(): + # Create a client + client = compute_v1.NetworksClient() + + # Initialize request argument(s) + request = compute_v1.PatchNetworkRequest( + network="network_value", + project="project_value", + ) + + # Make the request + response = client.patch(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.PatchNetworkRequest, dict]): + The request object. A request message for Networks.Patch. + See the method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + network (str): + Name of the network to update. + This corresponds to the ``network`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + network_resource (google.cloud.compute_v1.types.Network): + The body resource for this request + This corresponds to the ``network_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, network, network_resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.PatchNetworkRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.PatchNetworkRequest): + request = compute.PatchNetworkRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if network is not None: + request.network = network + if network_resource is not None: + request.network_resource = network_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.patch] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("network", request.network), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def patch(self, + request: Optional[Union[compute.PatchNetworkRequest, dict]] = None, + *, + project: Optional[str] = None, + network: Optional[str] = None, + network_resource: Optional[compute.Network] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> extended_operation.ExtendedOperation: + r"""Patches the specified network with the data included + in the request. Only the following fields can be + modified: routingConfig.routingMode. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_patch(): + # Create a client + client = compute_v1.NetworksClient() + + # Initialize request argument(s) + request = compute_v1.PatchNetworkRequest( + network="network_value", + project="project_value", + ) + + # Make the request + response = client.patch(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.PatchNetworkRequest, dict]): + The request object. A request message for Networks.Patch. + See the method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + network (str): + Name of the network to update. + This corresponds to the ``network`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + network_resource (google.cloud.compute_v1.types.Network): + The body resource for this request + This corresponds to the ``network_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, network, network_resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.PatchNetworkRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.PatchNetworkRequest): + request = compute.PatchNetworkRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if network is not None: + request.network = network + if network_resource is not None: + request.network_resource = network_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.patch] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("network", request.network), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + operation_service = self._transport._global_operations_client + operation_request = compute.GetGlobalOperationRequest() + operation_request.project = request.project + operation_request.operation = response.name + + get_operation = functools.partial(operation_service.get, operation_request) + # Cancel is not part of extended operations yet. + cancel_operation = lambda: None + + # Note: this class is an implementation detail to provide a uniform + # set of names for certain fields in the extended operation proto message. + # See google.api_core.extended_operation.ExtendedOperation for details + # on these properties and the expected interface. + class _CustomOperation(extended_operation.ExtendedOperation): + @property + def error_message(self): + return self._extended_operation.http_error_message + + @property + def error_code(self): + return self._extended_operation.http_error_status_code + + response = _CustomOperation.make(get_operation, cancel_operation, response) + + # Done; return the response. + return response + + def remove_peering_unary(self, + request: Optional[Union[compute.RemovePeeringNetworkRequest, dict]] = None, + *, + project: Optional[str] = None, + network: Optional[str] = None, + networks_remove_peering_request_resource: Optional[compute.NetworksRemovePeeringRequest] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.Operation: + r"""Removes a peering from the specified network. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_remove_peering(): + # Create a client + client = compute_v1.NetworksClient() + + # Initialize request argument(s) + request = compute_v1.RemovePeeringNetworkRequest( + network="network_value", + project="project_value", + ) + + # Make the request + response = client.remove_peering(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.RemovePeeringNetworkRequest, dict]): + The request object. A request message for + Networks.RemovePeering. See the method + description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + network (str): + Name of the network resource to + remove peering from. + + This corresponds to the ``network`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + networks_remove_peering_request_resource (google.cloud.compute_v1.types.NetworksRemovePeeringRequest): + The body resource for this request + This corresponds to the ``networks_remove_peering_request_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, network, networks_remove_peering_request_resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.RemovePeeringNetworkRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.RemovePeeringNetworkRequest): + request = compute.RemovePeeringNetworkRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if network is not None: + request.network = network + if networks_remove_peering_request_resource is not None: + request.networks_remove_peering_request_resource = networks_remove_peering_request_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.remove_peering] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("network", request.network), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def remove_peering(self, + request: Optional[Union[compute.RemovePeeringNetworkRequest, dict]] = None, + *, + project: Optional[str] = None, + network: Optional[str] = None, + networks_remove_peering_request_resource: Optional[compute.NetworksRemovePeeringRequest] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> extended_operation.ExtendedOperation: + r"""Removes a peering from the specified network. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_remove_peering(): + # Create a client + client = compute_v1.NetworksClient() + + # Initialize request argument(s) + request = compute_v1.RemovePeeringNetworkRequest( + network="network_value", + project="project_value", + ) + + # Make the request + response = client.remove_peering(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.RemovePeeringNetworkRequest, dict]): + The request object. A request message for + Networks.RemovePeering. See the method + description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + network (str): + Name of the network resource to + remove peering from. + + This corresponds to the ``network`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + networks_remove_peering_request_resource (google.cloud.compute_v1.types.NetworksRemovePeeringRequest): + The body resource for this request + This corresponds to the ``networks_remove_peering_request_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, network, networks_remove_peering_request_resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.RemovePeeringNetworkRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.RemovePeeringNetworkRequest): + request = compute.RemovePeeringNetworkRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if network is not None: + request.network = network + if networks_remove_peering_request_resource is not None: + request.networks_remove_peering_request_resource = networks_remove_peering_request_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.remove_peering] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("network", request.network), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + operation_service = self._transport._global_operations_client + operation_request = compute.GetGlobalOperationRequest() + operation_request.project = request.project + operation_request.operation = response.name + + get_operation = functools.partial(operation_service.get, operation_request) + # Cancel is not part of extended operations yet. + cancel_operation = lambda: None + + # Note: this class is an implementation detail to provide a uniform + # set of names for certain fields in the extended operation proto message. + # See google.api_core.extended_operation.ExtendedOperation for details + # on these properties and the expected interface. + class _CustomOperation(extended_operation.ExtendedOperation): + @property + def error_message(self): + return self._extended_operation.http_error_message + + @property + def error_code(self): + return self._extended_operation.http_error_status_code + + response = _CustomOperation.make(get_operation, cancel_operation, response) + + # Done; return the response. + return response + + def switch_to_custom_mode_unary(self, + request: Optional[Union[compute.SwitchToCustomModeNetworkRequest, dict]] = None, + *, + project: Optional[str] = None, + network: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.Operation: + r"""Switches the network mode from auto subnet mode to + custom subnet mode. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_switch_to_custom_mode(): + # Create a client + client = compute_v1.NetworksClient() + + # Initialize request argument(s) + request = compute_v1.SwitchToCustomModeNetworkRequest( + network="network_value", + project="project_value", + ) + + # Make the request + response = client.switch_to_custom_mode(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.SwitchToCustomModeNetworkRequest, dict]): + The request object. A request message for + Networks.SwitchToCustomMode. See the + method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + network (str): + Name of the network to be updated. + This corresponds to the ``network`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, network]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.SwitchToCustomModeNetworkRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.SwitchToCustomModeNetworkRequest): + request = compute.SwitchToCustomModeNetworkRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if network is not None: + request.network = network + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.switch_to_custom_mode] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("network", request.network), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def switch_to_custom_mode(self, + request: Optional[Union[compute.SwitchToCustomModeNetworkRequest, dict]] = None, + *, + project: Optional[str] = None, + network: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> extended_operation.ExtendedOperation: + r"""Switches the network mode from auto subnet mode to + custom subnet mode. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_switch_to_custom_mode(): + # Create a client + client = compute_v1.NetworksClient() + + # Initialize request argument(s) + request = compute_v1.SwitchToCustomModeNetworkRequest( + network="network_value", + project="project_value", + ) + + # Make the request + response = client.switch_to_custom_mode(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.SwitchToCustomModeNetworkRequest, dict]): + The request object. A request message for + Networks.SwitchToCustomMode. See the + method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + network (str): + Name of the network to be updated. + This corresponds to the ``network`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, network]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.SwitchToCustomModeNetworkRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.SwitchToCustomModeNetworkRequest): + request = compute.SwitchToCustomModeNetworkRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if network is not None: + request.network = network + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.switch_to_custom_mode] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("network", request.network), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + operation_service = self._transport._global_operations_client + operation_request = compute.GetGlobalOperationRequest() + operation_request.project = request.project + operation_request.operation = response.name + + get_operation = functools.partial(operation_service.get, operation_request) + # Cancel is not part of extended operations yet. + cancel_operation = lambda: None + + # Note: this class is an implementation detail to provide a uniform + # set of names for certain fields in the extended operation proto message. + # See google.api_core.extended_operation.ExtendedOperation for details + # on these properties and the expected interface. + class _CustomOperation(extended_operation.ExtendedOperation): + @property + def error_message(self): + return self._extended_operation.http_error_message + + @property + def error_code(self): + return self._extended_operation.http_error_status_code + + response = _CustomOperation.make(get_operation, cancel_operation, response) + + # Done; return the response. + return response + + def update_peering_unary(self, + request: Optional[Union[compute.UpdatePeeringNetworkRequest, dict]] = None, + *, + project: Optional[str] = None, + network: Optional[str] = None, + networks_update_peering_request_resource: Optional[compute.NetworksUpdatePeeringRequest] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.Operation: + r"""Updates the specified network peering with the data included in + the request. You can only modify the + NetworkPeering.export_custom_routes field and the + NetworkPeering.import_custom_routes field. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_update_peering(): + # Create a client + client = compute_v1.NetworksClient() + + # Initialize request argument(s) + request = compute_v1.UpdatePeeringNetworkRequest( + network="network_value", + project="project_value", + ) + + # Make the request + response = client.update_peering(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.UpdatePeeringNetworkRequest, dict]): + The request object. A request message for + Networks.UpdatePeering. See the method + description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + network (str): + Name of the network resource which + the updated peering is belonging to. + + This corresponds to the ``network`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + networks_update_peering_request_resource (google.cloud.compute_v1.types.NetworksUpdatePeeringRequest): + The body resource for this request + This corresponds to the ``networks_update_peering_request_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, network, networks_update_peering_request_resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.UpdatePeeringNetworkRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.UpdatePeeringNetworkRequest): + request = compute.UpdatePeeringNetworkRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if network is not None: + request.network = network + if networks_update_peering_request_resource is not None: + request.networks_update_peering_request_resource = networks_update_peering_request_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.update_peering] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("network", request.network), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def update_peering(self, + request: Optional[Union[compute.UpdatePeeringNetworkRequest, dict]] = None, + *, + project: Optional[str] = None, + network: Optional[str] = None, + networks_update_peering_request_resource: Optional[compute.NetworksUpdatePeeringRequest] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> extended_operation.ExtendedOperation: + r"""Updates the specified network peering with the data included in + the request. You can only modify the + NetworkPeering.export_custom_routes field and the + NetworkPeering.import_custom_routes field. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_update_peering(): + # Create a client + client = compute_v1.NetworksClient() + + # Initialize request argument(s) + request = compute_v1.UpdatePeeringNetworkRequest( + network="network_value", + project="project_value", + ) + + # Make the request + response = client.update_peering(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.UpdatePeeringNetworkRequest, dict]): + The request object. A request message for + Networks.UpdatePeering. See the method + description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + network (str): + Name of the network resource which + the updated peering is belonging to. + + This corresponds to the ``network`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + networks_update_peering_request_resource (google.cloud.compute_v1.types.NetworksUpdatePeeringRequest): + The body resource for this request + This corresponds to the ``networks_update_peering_request_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, network, networks_update_peering_request_resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.UpdatePeeringNetworkRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.UpdatePeeringNetworkRequest): + request = compute.UpdatePeeringNetworkRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if network is not None: + request.network = network + if networks_update_peering_request_resource is not None: + request.networks_update_peering_request_resource = networks_update_peering_request_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.update_peering] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("network", request.network), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + operation_service = self._transport._global_operations_client + operation_request = compute.GetGlobalOperationRequest() + operation_request.project = request.project + operation_request.operation = response.name + + get_operation = functools.partial(operation_service.get, operation_request) + # Cancel is not part of extended operations yet. + cancel_operation = lambda: None + + # Note: this class is an implementation detail to provide a uniform + # set of names for certain fields in the extended operation proto message. + # See google.api_core.extended_operation.ExtendedOperation for details + # on these properties and the expected interface. + class _CustomOperation(extended_operation.ExtendedOperation): + @property + def error_message(self): + return self._extended_operation.http_error_message + + @property + def error_code(self): + return self._extended_operation.http_error_status_code + + response = _CustomOperation.make(get_operation, cancel_operation, response) + + # Done; return the response. + return response + + def __enter__(self) -> "NetworksClient": + return self + + def __exit__(self, type, value, traceback): + """Releases underlying transport's resources. + + .. warning:: + ONLY use as a context manager if the transport is NOT shared + with other clients! Exiting the with block will CLOSE the transport + and may cause errors in other clients! + """ + self.transport.close() + + + + + + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) + + +__all__ = ( + "NetworksClient", +) diff --git a/owl-bot-staging/v1/google/cloud/compute_v1/services/networks/pagers.py b/owl-bot-staging/v1/google/cloud/compute_v1/services/networks/pagers.py new file mode 100644 index 000000000..e9776830a --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/compute_v1/services/networks/pagers.py @@ -0,0 +1,136 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import Any, AsyncIterator, Awaitable, Callable, Sequence, Tuple, Optional, Iterator + +from google.cloud.compute_v1.types import compute + + +class ListPager: + """A pager for iterating through ``list`` requests. + + This class thinly wraps an initial + :class:`google.cloud.compute_v1.types.NetworkList` object, and + provides an ``__iter__`` method to iterate through its + ``items`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``List`` requests and continue to iterate + through the ``items`` field on the + corresponding responses. + + All the usual :class:`google.cloud.compute_v1.types.NetworkList` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., compute.NetworkList], + request: compute.ListNetworksRequest, + response: compute.NetworkList, + *, + metadata: Sequence[Tuple[str, str]] = ()): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.compute_v1.types.ListNetworksRequest): + The initial request object. + response (google.cloud.compute_v1.types.NetworkList): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = compute.ListNetworksRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[compute.NetworkList]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[compute.Network]: + for page in self.pages: + yield from page.items + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + + +class ListPeeringRoutesPager: + """A pager for iterating through ``list_peering_routes`` requests. + + This class thinly wraps an initial + :class:`google.cloud.compute_v1.types.ExchangedPeeringRoutesList` object, and + provides an ``__iter__`` method to iterate through its + ``items`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListPeeringRoutes`` requests and continue to iterate + through the ``items`` field on the + corresponding responses. + + All the usual :class:`google.cloud.compute_v1.types.ExchangedPeeringRoutesList` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., compute.ExchangedPeeringRoutesList], + request: compute.ListPeeringRoutesNetworksRequest, + response: compute.ExchangedPeeringRoutesList, + *, + metadata: Sequence[Tuple[str, str]] = ()): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.compute_v1.types.ListPeeringRoutesNetworksRequest): + The initial request object. + response (google.cloud.compute_v1.types.ExchangedPeeringRoutesList): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = compute.ListPeeringRoutesNetworksRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[compute.ExchangedPeeringRoutesList]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[compute.ExchangedPeeringRoute]: + for page in self.pages: + yield from page.items + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) diff --git a/owl-bot-staging/v1/google/cloud/compute_v1/services/networks/transports/__init__.py b/owl-bot-staging/v1/google/cloud/compute_v1/services/networks/transports/__init__.py new file mode 100644 index 000000000..2308dc8fd --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/compute_v1/services/networks/transports/__init__.py @@ -0,0 +1,32 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +from typing import Dict, Type + +from .base import NetworksTransport +from .rest import NetworksRestTransport +from .rest import NetworksRestInterceptor + + +# Compile a registry of transports. +_transport_registry = OrderedDict() # type: Dict[str, Type[NetworksTransport]] +_transport_registry['rest'] = NetworksRestTransport + +__all__ = ( + 'NetworksTransport', + 'NetworksRestTransport', + 'NetworksRestInterceptor', +) diff --git a/owl-bot-staging/v1/google/cloud/compute_v1/services/networks/transports/base.py b/owl-bot-staging/v1/google/cloud/compute_v1/services/networks/transports/base.py new file mode 100644 index 000000000..c989a4969 --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/compute_v1/services/networks/transports/base.py @@ -0,0 +1,303 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import abc +from typing import Awaitable, Callable, Dict, Optional, Sequence, Union + +from google.cloud.compute_v1 import gapic_version as package_version + +import google.auth # type: ignore +import google.api_core +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.compute_v1.types import compute +from google.cloud.compute_v1.services import global_operations + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) + + +class NetworksTransport(abc.ABC): + """Abstract transport class for Networks.""" + + AUTH_SCOPES = ( + 'https://www.googleapis.com/auth/compute', + 'https://www.googleapis.com/auth/cloud-platform', + ) + + DEFAULT_HOST: str = 'compute.googleapis.com' + def __init__( + self, *, + host: str = DEFAULT_HOST, + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + **kwargs, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A list of scopes. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + """ + self._extended_operations_services: Dict[str, Any] = {} + + scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} + + # Save the scopes. + self._scopes = scopes + + # If no credentials are provided, then determine the appropriate + # defaults. + if credentials and credentials_file: + raise core_exceptions.DuplicateCredentialArgs("'credentials_file' and 'credentials' are mutually exclusive") + + if credentials_file is not None: + credentials, _ = google.auth.load_credentials_from_file( + credentials_file, + **scopes_kwargs, + quota_project_id=quota_project_id + ) + elif credentials is None: + credentials, _ = google.auth.default(**scopes_kwargs, quota_project_id=quota_project_id) + # Don't apply audience if the credentials file passed from user. + if hasattr(credentials, "with_gdch_audience"): + credentials = credentials.with_gdch_audience(api_audience if api_audience else host) + + # If the credentials are service account credentials, then always try to use self signed JWT. + if always_use_jwt_access and isinstance(credentials, service_account.Credentials) and hasattr(service_account.Credentials, "with_always_use_jwt_access"): + credentials = credentials.with_always_use_jwt_access(True) + + # Save the credentials. + self._credentials = credentials + + # Save the hostname. Default to port 443 (HTTPS) if none is specified. + if ':' not in host: + host += ':443' + self._host = host + + def _prep_wrapped_messages(self, client_info): + # Precompute the wrapped methods. + self._wrapped_methods = { + self.add_peering: gapic_v1.method.wrap_method( + self.add_peering, + default_timeout=None, + client_info=client_info, + ), + self.delete: gapic_v1.method.wrap_method( + self.delete, + default_timeout=None, + client_info=client_info, + ), + self.get: gapic_v1.method.wrap_method( + self.get, + default_timeout=None, + client_info=client_info, + ), + self.get_effective_firewalls: gapic_v1.method.wrap_method( + self.get_effective_firewalls, + default_timeout=None, + client_info=client_info, + ), + self.insert: gapic_v1.method.wrap_method( + self.insert, + default_timeout=None, + client_info=client_info, + ), + self.list: gapic_v1.method.wrap_method( + self.list, + default_timeout=None, + client_info=client_info, + ), + self.list_peering_routes: gapic_v1.method.wrap_method( + self.list_peering_routes, + default_timeout=None, + client_info=client_info, + ), + self.patch: gapic_v1.method.wrap_method( + self.patch, + default_timeout=None, + client_info=client_info, + ), + self.remove_peering: gapic_v1.method.wrap_method( + self.remove_peering, + default_timeout=None, + client_info=client_info, + ), + self.switch_to_custom_mode: gapic_v1.method.wrap_method( + self.switch_to_custom_mode, + default_timeout=None, + client_info=client_info, + ), + self.update_peering: gapic_v1.method.wrap_method( + self.update_peering, + default_timeout=None, + client_info=client_info, + ), + } + + def close(self): + """Closes resources associated with the transport. + + .. warning:: + Only call this method if the transport is NOT shared + with other clients - this may cause errors in other clients! + """ + raise NotImplementedError() + + @property + def add_peering(self) -> Callable[ + [compute.AddPeeringNetworkRequest], + Union[ + compute.Operation, + Awaitable[compute.Operation] + ]]: + raise NotImplementedError() + + @property + def delete(self) -> Callable[ + [compute.DeleteNetworkRequest], + Union[ + compute.Operation, + Awaitable[compute.Operation] + ]]: + raise NotImplementedError() + + @property + def get(self) -> Callable[ + [compute.GetNetworkRequest], + Union[ + compute.Network, + Awaitable[compute.Network] + ]]: + raise NotImplementedError() + + @property + def get_effective_firewalls(self) -> Callable[ + [compute.GetEffectiveFirewallsNetworkRequest], + Union[ + compute.NetworksGetEffectiveFirewallsResponse, + Awaitable[compute.NetworksGetEffectiveFirewallsResponse] + ]]: + raise NotImplementedError() + + @property + def insert(self) -> Callable[ + [compute.InsertNetworkRequest], + Union[ + compute.Operation, + Awaitable[compute.Operation] + ]]: + raise NotImplementedError() + + @property + def list(self) -> Callable[ + [compute.ListNetworksRequest], + Union[ + compute.NetworkList, + Awaitable[compute.NetworkList] + ]]: + raise NotImplementedError() + + @property + def list_peering_routes(self) -> Callable[ + [compute.ListPeeringRoutesNetworksRequest], + Union[ + compute.ExchangedPeeringRoutesList, + Awaitable[compute.ExchangedPeeringRoutesList] + ]]: + raise NotImplementedError() + + @property + def patch(self) -> Callable[ + [compute.PatchNetworkRequest], + Union[ + compute.Operation, + Awaitable[compute.Operation] + ]]: + raise NotImplementedError() + + @property + def remove_peering(self) -> Callable[ + [compute.RemovePeeringNetworkRequest], + Union[ + compute.Operation, + Awaitable[compute.Operation] + ]]: + raise NotImplementedError() + + @property + def switch_to_custom_mode(self) -> Callable[ + [compute.SwitchToCustomModeNetworkRequest], + Union[ + compute.Operation, + Awaitable[compute.Operation] + ]]: + raise NotImplementedError() + + @property + def update_peering(self) -> Callable[ + [compute.UpdatePeeringNetworkRequest], + Union[ + compute.Operation, + Awaitable[compute.Operation] + ]]: + raise NotImplementedError() + + @property + def kind(self) -> str: + raise NotImplementedError() + + @property + def _global_operations_client(self) -> global_operations.GlobalOperationsClient: + ex_op_service = self._extended_operations_services.get("global_operations") + if not ex_op_service: + ex_op_service = global_operations.GlobalOperationsClient( + credentials=self._credentials, + transport=self.kind, + ) + self._extended_operations_services["global_operations"] = ex_op_service + + return ex_op_service + + +__all__ = ( + 'NetworksTransport', +) diff --git a/owl-bot-staging/v1/google/cloud/compute_v1/services/networks/transports/rest.py b/owl-bot-staging/v1/google/cloud/compute_v1/services/networks/transports/rest.py new file mode 100644 index 000000000..298e80c49 --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/compute_v1/services/networks/transports/rest.py @@ -0,0 +1,1547 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +from google.auth.transport.requests import AuthorizedSession # type: ignore +import json # type: ignore +import grpc # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.api_core import exceptions as core_exceptions +from google.api_core import retry as retries +from google.api_core import rest_helpers +from google.api_core import rest_streaming +from google.api_core import path_template +from google.api_core import gapic_v1 + +from google.protobuf import json_format +from requests import __version__ as requests_version +import dataclasses +import re +from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union +import warnings + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + + +from google.cloud.compute_v1.types import compute + +from .base import NetworksTransport, DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, + grpc_version=None, + rest_version=requests_version, +) + + +class NetworksRestInterceptor: + """Interceptor for Networks. + + Interceptors are used to manipulate requests, request metadata, and responses + in arbitrary ways. + Example use cases include: + * Logging + * Verifying requests according to service or custom semantics + * Stripping extraneous information from responses + + These use cases and more can be enabled by injecting an + instance of a custom subclass when constructing the NetworksRestTransport. + + .. code-block:: python + class MyCustomNetworksInterceptor(NetworksRestInterceptor): + def pre_add_peering(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_add_peering(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_delete(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_delete(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_get(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_get_effective_firewalls(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_effective_firewalls(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_insert(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_insert(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list_peering_routes(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_peering_routes(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_patch(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_patch(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_remove_peering(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_remove_peering(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_switch_to_custom_mode(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_switch_to_custom_mode(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_update_peering(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_update_peering(self, response): + logging.log(f"Received response: {response}") + return response + + transport = NetworksRestTransport(interceptor=MyCustomNetworksInterceptor()) + client = NetworksClient(transport=transport) + + + """ + def pre_add_peering(self, request: compute.AddPeeringNetworkRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.AddPeeringNetworkRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for add_peering + + Override in a subclass to manipulate the request or metadata + before they are sent to the Networks server. + """ + return request, metadata + + def post_add_peering(self, response: compute.Operation) -> compute.Operation: + """Post-rpc interceptor for add_peering + + Override in a subclass to manipulate the response + after it is returned by the Networks server but before + it is returned to user code. + """ + return response + def pre_delete(self, request: compute.DeleteNetworkRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.DeleteNetworkRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for delete + + Override in a subclass to manipulate the request or metadata + before they are sent to the Networks server. + """ + return request, metadata + + def post_delete(self, response: compute.Operation) -> compute.Operation: + """Post-rpc interceptor for delete + + Override in a subclass to manipulate the response + after it is returned by the Networks server but before + it is returned to user code. + """ + return response + def pre_get(self, request: compute.GetNetworkRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.GetNetworkRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get + + Override in a subclass to manipulate the request or metadata + before they are sent to the Networks server. + """ + return request, metadata + + def post_get(self, response: compute.Network) -> compute.Network: + """Post-rpc interceptor for get + + Override in a subclass to manipulate the response + after it is returned by the Networks server but before + it is returned to user code. + """ + return response + def pre_get_effective_firewalls(self, request: compute.GetEffectiveFirewallsNetworkRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.GetEffectiveFirewallsNetworkRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_effective_firewalls + + Override in a subclass to manipulate the request or metadata + before they are sent to the Networks server. + """ + return request, metadata + + def post_get_effective_firewalls(self, response: compute.NetworksGetEffectiveFirewallsResponse) -> compute.NetworksGetEffectiveFirewallsResponse: + """Post-rpc interceptor for get_effective_firewalls + + Override in a subclass to manipulate the response + after it is returned by the Networks server but before + it is returned to user code. + """ + return response + def pre_insert(self, request: compute.InsertNetworkRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.InsertNetworkRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for insert + + Override in a subclass to manipulate the request or metadata + before they are sent to the Networks server. + """ + return request, metadata + + def post_insert(self, response: compute.Operation) -> compute.Operation: + """Post-rpc interceptor for insert + + Override in a subclass to manipulate the response + after it is returned by the Networks server but before + it is returned to user code. + """ + return response + def pre_list(self, request: compute.ListNetworksRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.ListNetworksRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list + + Override in a subclass to manipulate the request or metadata + before they are sent to the Networks server. + """ + return request, metadata + + def post_list(self, response: compute.NetworkList) -> compute.NetworkList: + """Post-rpc interceptor for list + + Override in a subclass to manipulate the response + after it is returned by the Networks server but before + it is returned to user code. + """ + return response + def pre_list_peering_routes(self, request: compute.ListPeeringRoutesNetworksRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.ListPeeringRoutesNetworksRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list_peering_routes + + Override in a subclass to manipulate the request or metadata + before they are sent to the Networks server. + """ + return request, metadata + + def post_list_peering_routes(self, response: compute.ExchangedPeeringRoutesList) -> compute.ExchangedPeeringRoutesList: + """Post-rpc interceptor for list_peering_routes + + Override in a subclass to manipulate the response + after it is returned by the Networks server but before + it is returned to user code. + """ + return response + def pre_patch(self, request: compute.PatchNetworkRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.PatchNetworkRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for patch + + Override in a subclass to manipulate the request or metadata + before they are sent to the Networks server. + """ + return request, metadata + + def post_patch(self, response: compute.Operation) -> compute.Operation: + """Post-rpc interceptor for patch + + Override in a subclass to manipulate the response + after it is returned by the Networks server but before + it is returned to user code. + """ + return response + def pre_remove_peering(self, request: compute.RemovePeeringNetworkRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.RemovePeeringNetworkRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for remove_peering + + Override in a subclass to manipulate the request or metadata + before they are sent to the Networks server. + """ + return request, metadata + + def post_remove_peering(self, response: compute.Operation) -> compute.Operation: + """Post-rpc interceptor for remove_peering + + Override in a subclass to manipulate the response + after it is returned by the Networks server but before + it is returned to user code. + """ + return response + def pre_switch_to_custom_mode(self, request: compute.SwitchToCustomModeNetworkRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.SwitchToCustomModeNetworkRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for switch_to_custom_mode + + Override in a subclass to manipulate the request or metadata + before they are sent to the Networks server. + """ + return request, metadata + + def post_switch_to_custom_mode(self, response: compute.Operation) -> compute.Operation: + """Post-rpc interceptor for switch_to_custom_mode + + Override in a subclass to manipulate the response + after it is returned by the Networks server but before + it is returned to user code. + """ + return response + def pre_update_peering(self, request: compute.UpdatePeeringNetworkRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.UpdatePeeringNetworkRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for update_peering + + Override in a subclass to manipulate the request or metadata + before they are sent to the Networks server. + """ + return request, metadata + + def post_update_peering(self, response: compute.Operation) -> compute.Operation: + """Post-rpc interceptor for update_peering + + Override in a subclass to manipulate the response + after it is returned by the Networks server but before + it is returned to user code. + """ + return response + + +@dataclasses.dataclass +class NetworksRestStub: + _session: AuthorizedSession + _host: str + _interceptor: NetworksRestInterceptor + + +class NetworksRestTransport(NetworksTransport): + """REST backend transport for Networks. + + The Networks API. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends JSON representations of protocol buffers over HTTP/1.1 + + NOTE: This REST transport functionality is currently in a beta + state (preview). We welcome your feedback via an issue in this + library's source repository. Thank you! + """ + + def __init__(self, *, + host: str = 'compute.googleapis.com', + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + client_cert_source_for_mtls: Optional[Callable[[ + ], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + url_scheme: str = 'https', + interceptor: Optional[NetworksRestInterceptor] = None, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + NOTE: This REST transport functionality is currently in a beta + state (preview). We welcome your feedback via a GitHub issue in + this library's repository. Thank you! + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + client_cert_source_for_mtls (Callable[[], Tuple[bytes, bytes]]): Client + certificate to configure mutual TLS HTTP channel. It is ignored + if ``channel`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you are developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + url_scheme: the protocol scheme for the API endpoint. Normally + "https", but for testing or local servers, + "http" can be specified. + """ + # Run the base constructor + # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. + # TODO: When custom host (api_endpoint) is set, `scopes` must *also* be set on the + # credentials object + maybe_url_match = re.match("^(?Phttp(?:s)?://)?(?P.*)$", host) + if maybe_url_match is None: + raise ValueError(f"Unexpected hostname structure: {host}") # pragma: NO COVER + + url_match_items = maybe_url_match.groupdict() + + host = f"{url_scheme}://{host}" if not url_match_items["scheme"] else host + + super().__init__( + host=host, + credentials=credentials, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience + ) + self._session = AuthorizedSession( + self._credentials, default_host=self.DEFAULT_HOST) + if client_cert_source_for_mtls: + self._session.configure_mtls_channel(client_cert_source_for_mtls) + self._interceptor = interceptor or NetworksRestInterceptor() + self._prep_wrapped_messages(client_info) + + class _AddPeering(NetworksRestStub): + def __hash__(self): + return hash("AddPeering") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.AddPeeringNetworkRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.Operation: + r"""Call the add peering method over HTTP. + + Args: + request (~.compute.AddPeeringNetworkRequest): + The request object. A request message for + Networks.AddPeering. See the method + description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.Operation: + Represents an Operation resource. Google Compute Engine + has three Operation resources: \* + `Global `__ + \* + `Regional `__ + \* + `Zonal `__ + You can use an operation resource to manage asynchronous + API requests. For more information, read Handling API + responses. Operations can be global, regional or zonal. + - For global operations, use the ``globalOperations`` + resource. - For regional operations, use the + ``regionOperations`` resource. - For zonal operations, + use the ``zonalOperations`` resource. For more + information, read Global, Regional, and Zonal Resources. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/compute/v1/projects/{project}/global/networks/{network}/addPeering', + 'body': 'networks_add_peering_request_resource', + }, + ] + request, metadata = self._interceptor.pre_add_peering(request, metadata) + pb_request = compute.AddPeeringNetworkRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + including_default_value_fields=False, + use_integers_for_enums=False + ) + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.Operation() + pb_resp = compute.Operation.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_add_peering(resp) + return resp + + class _Delete(NetworksRestStub): + def __hash__(self): + return hash("Delete") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.DeleteNetworkRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.Operation: + r"""Call the delete method over HTTP. + + Args: + request (~.compute.DeleteNetworkRequest): + The request object. A request message for + Networks.Delete. See the method + description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.Operation: + Represents an Operation resource. Google Compute Engine + has three Operation resources: \* + `Global `__ + \* + `Regional `__ + \* + `Zonal `__ + You can use an operation resource to manage asynchronous + API requests. For more information, read Handling API + responses. Operations can be global, regional or zonal. + - For global operations, use the ``globalOperations`` + resource. - For regional operations, use the + ``regionOperations`` resource. - For zonal operations, + use the ``zonalOperations`` resource. For more + information, read Global, Regional, and Zonal Resources. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'delete', + 'uri': '/compute/v1/projects/{project}/global/networks/{network}', + }, + ] + request, metadata = self._interceptor.pre_delete(request, metadata) + pb_request = compute.DeleteNetworkRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.Operation() + pb_resp = compute.Operation.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_delete(resp) + return resp + + class _Get(NetworksRestStub): + def __hash__(self): + return hash("Get") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.GetNetworkRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.Network: + r"""Call the get method over HTTP. + + Args: + request (~.compute.GetNetworkRequest): + The request object. A request message for Networks.Get. + See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.Network: + Represents a VPC Network resource. + Networks connect resources to each other + and to the internet. For more + information, read Virtual Private Cloud + (VPC) Network. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/compute/v1/projects/{project}/global/networks/{network}', + }, + ] + request, metadata = self._interceptor.pre_get(request, metadata) + pb_request = compute.GetNetworkRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.Network() + pb_resp = compute.Network.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get(resp) + return resp + + class _GetEffectiveFirewalls(NetworksRestStub): + def __hash__(self): + return hash("GetEffectiveFirewalls") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.GetEffectiveFirewallsNetworkRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.NetworksGetEffectiveFirewallsResponse: + r"""Call the get effective firewalls method over HTTP. + + Args: + request (~.compute.GetEffectiveFirewallsNetworkRequest): + The request object. A request message for + Networks.GetEffectiveFirewalls. See the + method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.NetworksGetEffectiveFirewallsResponse: + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/compute/v1/projects/{project}/global/networks/{network}/getEffectiveFirewalls', + }, + ] + request, metadata = self._interceptor.pre_get_effective_firewalls(request, metadata) + pb_request = compute.GetEffectiveFirewallsNetworkRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.NetworksGetEffectiveFirewallsResponse() + pb_resp = compute.NetworksGetEffectiveFirewallsResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get_effective_firewalls(resp) + return resp + + class _Insert(NetworksRestStub): + def __hash__(self): + return hash("Insert") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.InsertNetworkRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.Operation: + r"""Call the insert method over HTTP. + + Args: + request (~.compute.InsertNetworkRequest): + The request object. A request message for + Networks.Insert. See the method + description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.Operation: + Represents an Operation resource. Google Compute Engine + has three Operation resources: \* + `Global `__ + \* + `Regional `__ + \* + `Zonal `__ + You can use an operation resource to manage asynchronous + API requests. For more information, read Handling API + responses. Operations can be global, regional or zonal. + - For global operations, use the ``globalOperations`` + resource. - For regional operations, use the + ``regionOperations`` resource. - For zonal operations, + use the ``zonalOperations`` resource. For more + information, read Global, Regional, and Zonal Resources. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/compute/v1/projects/{project}/global/networks', + 'body': 'network_resource', + }, + ] + request, metadata = self._interceptor.pre_insert(request, metadata) + pb_request = compute.InsertNetworkRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + including_default_value_fields=False, + use_integers_for_enums=False + ) + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.Operation() + pb_resp = compute.Operation.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_insert(resp) + return resp + + class _List(NetworksRestStub): + def __hash__(self): + return hash("List") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.ListNetworksRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.NetworkList: + r"""Call the list method over HTTP. + + Args: + request (~.compute.ListNetworksRequest): + The request object. A request message for Networks.List. + See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.NetworkList: + Contains a list of networks. + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/compute/v1/projects/{project}/global/networks', + }, + ] + request, metadata = self._interceptor.pre_list(request, metadata) + pb_request = compute.ListNetworksRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.NetworkList() + pb_resp = compute.NetworkList.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_list(resp) + return resp + + class _ListPeeringRoutes(NetworksRestStub): + def __hash__(self): + return hash("ListPeeringRoutes") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.ListPeeringRoutesNetworksRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.ExchangedPeeringRoutesList: + r"""Call the list peering routes method over HTTP. + + Args: + request (~.compute.ListPeeringRoutesNetworksRequest): + The request object. A request message for + Networks.ListPeeringRoutes. See the + method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.ExchangedPeeringRoutesList: + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/compute/v1/projects/{project}/global/networks/{network}/listPeeringRoutes', + }, + ] + request, metadata = self._interceptor.pre_list_peering_routes(request, metadata) + pb_request = compute.ListPeeringRoutesNetworksRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.ExchangedPeeringRoutesList() + pb_resp = compute.ExchangedPeeringRoutesList.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_list_peering_routes(resp) + return resp + + class _Patch(NetworksRestStub): + def __hash__(self): + return hash("Patch") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.PatchNetworkRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.Operation: + r"""Call the patch method over HTTP. + + Args: + request (~.compute.PatchNetworkRequest): + The request object. A request message for Networks.Patch. + See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.Operation: + Represents an Operation resource. Google Compute Engine + has three Operation resources: \* + `Global `__ + \* + `Regional `__ + \* + `Zonal `__ + You can use an operation resource to manage asynchronous + API requests. For more information, read Handling API + responses. Operations can be global, regional or zonal. + - For global operations, use the ``globalOperations`` + resource. - For regional operations, use the + ``regionOperations`` resource. - For zonal operations, + use the ``zonalOperations`` resource. For more + information, read Global, Regional, and Zonal Resources. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'patch', + 'uri': '/compute/v1/projects/{project}/global/networks/{network}', + 'body': 'network_resource', + }, + ] + request, metadata = self._interceptor.pre_patch(request, metadata) + pb_request = compute.PatchNetworkRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + including_default_value_fields=False, + use_integers_for_enums=False + ) + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.Operation() + pb_resp = compute.Operation.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_patch(resp) + return resp + + class _RemovePeering(NetworksRestStub): + def __hash__(self): + return hash("RemovePeering") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.RemovePeeringNetworkRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.Operation: + r"""Call the remove peering method over HTTP. + + Args: + request (~.compute.RemovePeeringNetworkRequest): + The request object. A request message for + Networks.RemovePeering. See the method + description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.Operation: + Represents an Operation resource. Google Compute Engine + has three Operation resources: \* + `Global `__ + \* + `Regional `__ + \* + `Zonal `__ + You can use an operation resource to manage asynchronous + API requests. For more information, read Handling API + responses. Operations can be global, regional or zonal. + - For global operations, use the ``globalOperations`` + resource. - For regional operations, use the + ``regionOperations`` resource. - For zonal operations, + use the ``zonalOperations`` resource. For more + information, read Global, Regional, and Zonal Resources. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/compute/v1/projects/{project}/global/networks/{network}/removePeering', + 'body': 'networks_remove_peering_request_resource', + }, + ] + request, metadata = self._interceptor.pre_remove_peering(request, metadata) + pb_request = compute.RemovePeeringNetworkRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + including_default_value_fields=False, + use_integers_for_enums=False + ) + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.Operation() + pb_resp = compute.Operation.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_remove_peering(resp) + return resp + + class _SwitchToCustomMode(NetworksRestStub): + def __hash__(self): + return hash("SwitchToCustomMode") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.SwitchToCustomModeNetworkRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.Operation: + r"""Call the switch to custom mode method over HTTP. + + Args: + request (~.compute.SwitchToCustomModeNetworkRequest): + The request object. A request message for + Networks.SwitchToCustomMode. See the + method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.Operation: + Represents an Operation resource. Google Compute Engine + has three Operation resources: \* + `Global `__ + \* + `Regional `__ + \* + `Zonal `__ + You can use an operation resource to manage asynchronous + API requests. For more information, read Handling API + responses. Operations can be global, regional or zonal. + - For global operations, use the ``globalOperations`` + resource. - For regional operations, use the + ``regionOperations`` resource. - For zonal operations, + use the ``zonalOperations`` resource. For more + information, read Global, Regional, and Zonal Resources. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/compute/v1/projects/{project}/global/networks/{network}/switchToCustomMode', + }, + ] + request, metadata = self._interceptor.pre_switch_to_custom_mode(request, metadata) + pb_request = compute.SwitchToCustomModeNetworkRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.Operation() + pb_resp = compute.Operation.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_switch_to_custom_mode(resp) + return resp + + class _UpdatePeering(NetworksRestStub): + def __hash__(self): + return hash("UpdatePeering") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.UpdatePeeringNetworkRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.Operation: + r"""Call the update peering method over HTTP. + + Args: + request (~.compute.UpdatePeeringNetworkRequest): + The request object. A request message for + Networks.UpdatePeering. See the method + description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.Operation: + Represents an Operation resource. Google Compute Engine + has three Operation resources: \* + `Global `__ + \* + `Regional `__ + \* + `Zonal `__ + You can use an operation resource to manage asynchronous + API requests. For more information, read Handling API + responses. Operations can be global, regional or zonal. + - For global operations, use the ``globalOperations`` + resource. - For regional operations, use the + ``regionOperations`` resource. - For zonal operations, + use the ``zonalOperations`` resource. For more + information, read Global, Regional, and Zonal Resources. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'patch', + 'uri': '/compute/v1/projects/{project}/global/networks/{network}/updatePeering', + 'body': 'networks_update_peering_request_resource', + }, + ] + request, metadata = self._interceptor.pre_update_peering(request, metadata) + pb_request = compute.UpdatePeeringNetworkRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + including_default_value_fields=False, + use_integers_for_enums=False + ) + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.Operation() + pb_resp = compute.Operation.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_update_peering(resp) + return resp + + @property + def add_peering(self) -> Callable[ + [compute.AddPeeringNetworkRequest], + compute.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._AddPeering(self._session, self._host, self._interceptor) # type: ignore + + @property + def delete(self) -> Callable[ + [compute.DeleteNetworkRequest], + compute.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._Delete(self._session, self._host, self._interceptor) # type: ignore + + @property + def get(self) -> Callable[ + [compute.GetNetworkRequest], + compute.Network]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._Get(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_effective_firewalls(self) -> Callable[ + [compute.GetEffectiveFirewallsNetworkRequest], + compute.NetworksGetEffectiveFirewallsResponse]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetEffectiveFirewalls(self._session, self._host, self._interceptor) # type: ignore + + @property + def insert(self) -> Callable[ + [compute.InsertNetworkRequest], + compute.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._Insert(self._session, self._host, self._interceptor) # type: ignore + + @property + def list(self) -> Callable[ + [compute.ListNetworksRequest], + compute.NetworkList]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._List(self._session, self._host, self._interceptor) # type: ignore + + @property + def list_peering_routes(self) -> Callable[ + [compute.ListPeeringRoutesNetworksRequest], + compute.ExchangedPeeringRoutesList]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListPeeringRoutes(self._session, self._host, self._interceptor) # type: ignore + + @property + def patch(self) -> Callable[ + [compute.PatchNetworkRequest], + compute.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._Patch(self._session, self._host, self._interceptor) # type: ignore + + @property + def remove_peering(self) -> Callable[ + [compute.RemovePeeringNetworkRequest], + compute.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._RemovePeering(self._session, self._host, self._interceptor) # type: ignore + + @property + def switch_to_custom_mode(self) -> Callable[ + [compute.SwitchToCustomModeNetworkRequest], + compute.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._SwitchToCustomMode(self._session, self._host, self._interceptor) # type: ignore + + @property + def update_peering(self) -> Callable[ + [compute.UpdatePeeringNetworkRequest], + compute.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._UpdatePeering(self._session, self._host, self._interceptor) # type: ignore + + @property + def kind(self) -> str: + return "rest" + + def close(self): + self._session.close() + + +__all__=( + 'NetworksRestTransport', +) diff --git a/owl-bot-staging/v1/google/cloud/compute_v1/services/node_groups/__init__.py b/owl-bot-staging/v1/google/cloud/compute_v1/services/node_groups/__init__.py new file mode 100644 index 000000000..5e097eeca --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/compute_v1/services/node_groups/__init__.py @@ -0,0 +1,20 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from .client import NodeGroupsClient + +__all__ = ( + 'NodeGroupsClient', +) diff --git a/owl-bot-staging/v1/google/cloud/compute_v1/services/node_groups/client.py b/owl-bot-staging/v1/google/cloud/compute_v1/services/node_groups/client.py new file mode 100644 index 000000000..06b22e024 --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/compute_v1/services/node_groups/client.py @@ -0,0 +1,3373 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import functools +import os +import re +from typing import Dict, Mapping, MutableMapping, MutableSequence, Optional, Sequence, Tuple, Type, Union, cast + +from google.cloud.compute_v1 import gapic_version as package_version + +from google.api_core import client_options as client_options_lib +from google.api_core import exceptions as core_exceptions +from google.api_core import extended_operation +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport import mtls # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth.exceptions import MutualTLSChannelError # type: ignore +from google.oauth2 import service_account # type: ignore + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + +from google.api_core import extended_operation # type: ignore +from google.cloud.compute_v1.services.node_groups import pagers +from google.cloud.compute_v1.types import compute +from .transports.base import NodeGroupsTransport, DEFAULT_CLIENT_INFO +from .transports.rest import NodeGroupsRestTransport + + +class NodeGroupsClientMeta(type): + """Metaclass for the NodeGroups client. + + This provides class-level methods for building and retrieving + support objects (e.g. transport) without polluting the client instance + objects. + """ + _transport_registry = OrderedDict() # type: Dict[str, Type[NodeGroupsTransport]] + _transport_registry["rest"] = NodeGroupsRestTransport + + def get_transport_class(cls, + label: Optional[str] = None, + ) -> Type[NodeGroupsTransport]: + """Returns an appropriate transport class. + + Args: + label: The name of the desired transport. If none is + provided, then the first transport in the registry is used. + + Returns: + The transport class to use. + """ + # If a specific transport is requested, return that one. + if label: + return cls._transport_registry[label] + + # No transport is requested; return the default (that is, the first one + # in the dictionary). + return next(iter(cls._transport_registry.values())) + + +class NodeGroupsClient(metaclass=NodeGroupsClientMeta): + """The NodeGroups API.""" + + @staticmethod + def _get_default_mtls_endpoint(api_endpoint): + """Converts api endpoint to mTLS endpoint. + + Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to + "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. + Args: + api_endpoint (Optional[str]): the api endpoint to convert. + Returns: + str: converted mTLS api endpoint. + """ + if not api_endpoint: + return api_endpoint + + mtls_endpoint_re = re.compile( + r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?" + ) + + m = mtls_endpoint_re.match(api_endpoint) + name, mtls, sandbox, googledomain = m.groups() + if mtls or not googledomain: + return api_endpoint + + if sandbox: + return api_endpoint.replace( + "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" + ) + + return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") + + DEFAULT_ENDPOINT = "compute.googleapis.com" + DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore + DEFAULT_ENDPOINT + ) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + NodeGroupsClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_info(info) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + NodeGroupsClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_file( + filename) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + from_service_account_json = from_service_account_file + + @property + def transport(self) -> NodeGroupsTransport: + """Returns the transport used by the client instance. + + Returns: + NodeGroupsTransport: The transport used by the client + instance. + """ + return self._transport + + @staticmethod + def common_billing_account_path(billing_account: str, ) -> str: + """Returns a fully-qualified billing_account string.""" + return "billingAccounts/{billing_account}".format(billing_account=billing_account, ) + + @staticmethod + def parse_common_billing_account_path(path: str) -> Dict[str,str]: + """Parse a billing_account path into its component segments.""" + m = re.match(r"^billingAccounts/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_folder_path(folder: str, ) -> str: + """Returns a fully-qualified folder string.""" + return "folders/{folder}".format(folder=folder, ) + + @staticmethod + def parse_common_folder_path(path: str) -> Dict[str,str]: + """Parse a folder path into its component segments.""" + m = re.match(r"^folders/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_organization_path(organization: str, ) -> str: + """Returns a fully-qualified organization string.""" + return "organizations/{organization}".format(organization=organization, ) + + @staticmethod + def parse_common_organization_path(path: str) -> Dict[str,str]: + """Parse a organization path into its component segments.""" + m = re.match(r"^organizations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_project_path(project: str, ) -> str: + """Returns a fully-qualified project string.""" + return "projects/{project}".format(project=project, ) + + @staticmethod + def parse_common_project_path(path: str) -> Dict[str,str]: + """Parse a project path into its component segments.""" + m = re.match(r"^projects/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_location_path(project: str, location: str, ) -> str: + """Returns a fully-qualified location string.""" + return "projects/{project}/locations/{location}".format(project=project, location=location, ) + + @staticmethod + def parse_common_location_path(path: str) -> Dict[str,str]: + """Parse a location path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @classmethod + def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[client_options_lib.ClientOptions] = None): + """Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + if client_options is None: + client_options = client_options_lib.ClientOptions() + use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") + use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") + if use_client_cert not in ("true", "false"): + raise ValueError("Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`") + if use_mtls_endpoint not in ("auto", "never", "always"): + raise MutualTLSChannelError("Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`") + + # Figure out the client cert source to use. + client_cert_source = None + if use_client_cert == "true": + if client_options.client_cert_source: + client_cert_source = client_options.client_cert_source + elif mtls.has_default_client_cert_source(): + client_cert_source = mtls.default_client_cert_source() + + # Figure out which api endpoint to use. + if client_options.api_endpoint is not None: + api_endpoint = client_options.api_endpoint + elif use_mtls_endpoint == "always" or (use_mtls_endpoint == "auto" and client_cert_source): + api_endpoint = cls.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = cls.DEFAULT_ENDPOINT + + return api_endpoint, client_cert_source + + def __init__(self, *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Optional[Union[str, NodeGroupsTransport]] = None, + client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the node groups client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Union[str, NodeGroupsTransport]): The + transport to use. If set to None, a transport is chosen + automatically. + NOTE: "rest" transport functionality is currently in a + beta state (preview). We welcome your feedback via an + issue in this library's source repository. + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): Custom options for the + client. It won't take effect if a ``transport`` instance is provided. + (1) The ``api_endpoint`` property can be used to override the + default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT + environment variable can also be used to override the endpoint: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto switch to the + default mTLS endpoint if client certificate is present, this is + the default value). However, the ``api_endpoint`` property takes + precedence if provided. + (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide client certificate for mutual TLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + """ + if isinstance(client_options, dict): + client_options = client_options_lib.from_dict(client_options) + if client_options is None: + client_options = client_options_lib.ClientOptions() + client_options = cast(client_options_lib.ClientOptions, client_options) + + api_endpoint, client_cert_source_func = self.get_mtls_endpoint_and_cert_source(client_options) + + api_key_value = getattr(client_options, "api_key", None) + if api_key_value and credentials: + raise ValueError("client_options.api_key and credentials are mutually exclusive") + + # Save or instantiate the transport. + # Ordinarily, we provide the transport, but allowing a custom transport + # instance provides an extensibility point for unusual situations. + if isinstance(transport, NodeGroupsTransport): + # transport is a NodeGroupsTransport instance. + if credentials or client_options.credentials_file or api_key_value: + raise ValueError("When providing a transport instance, " + "provide its credentials directly.") + if client_options.scopes: + raise ValueError( + "When providing a transport instance, provide its scopes " + "directly." + ) + self._transport = transport + else: + import google.auth._default # type: ignore + + if api_key_value and hasattr(google.auth._default, "get_api_key_credentials"): + credentials = google.auth._default.get_api_key_credentials(api_key_value) + + Transport = type(self).get_transport_class(transport) + self._transport = Transport( + credentials=credentials, + credentials_file=client_options.credentials_file, + host=api_endpoint, + scopes=client_options.scopes, + client_cert_source_for_mtls=client_cert_source_func, + quota_project_id=client_options.quota_project_id, + client_info=client_info, + always_use_jwt_access=True, + api_audience=client_options.api_audience, + ) + + def add_nodes_unary(self, + request: Optional[Union[compute.AddNodesNodeGroupRequest, dict]] = None, + *, + project: Optional[str] = None, + zone: Optional[str] = None, + node_group: Optional[str] = None, + node_groups_add_nodes_request_resource: Optional[compute.NodeGroupsAddNodesRequest] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.Operation: + r"""Adds specified number of nodes to the node group. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_add_nodes(): + # Create a client + client = compute_v1.NodeGroupsClient() + + # Initialize request argument(s) + request = compute_v1.AddNodesNodeGroupRequest( + node_group="node_group_value", + project="project_value", + zone="zone_value", + ) + + # Make the request + response = client.add_nodes(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.AddNodesNodeGroupRequest, dict]): + The request object. A request message for + NodeGroups.AddNodes. See the method + description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + zone (str): + The name of the zone for this + request. + + This corresponds to the ``zone`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + node_group (str): + Name of the NodeGroup resource. + This corresponds to the ``node_group`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + node_groups_add_nodes_request_resource (google.cloud.compute_v1.types.NodeGroupsAddNodesRequest): + The body resource for this request + This corresponds to the ``node_groups_add_nodes_request_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, zone, node_group, node_groups_add_nodes_request_resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.AddNodesNodeGroupRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.AddNodesNodeGroupRequest): + request = compute.AddNodesNodeGroupRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if zone is not None: + request.zone = zone + if node_group is not None: + request.node_group = node_group + if node_groups_add_nodes_request_resource is not None: + request.node_groups_add_nodes_request_resource = node_groups_add_nodes_request_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.add_nodes] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("zone", request.zone), + ("node_group", request.node_group), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def add_nodes(self, + request: Optional[Union[compute.AddNodesNodeGroupRequest, dict]] = None, + *, + project: Optional[str] = None, + zone: Optional[str] = None, + node_group: Optional[str] = None, + node_groups_add_nodes_request_resource: Optional[compute.NodeGroupsAddNodesRequest] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> extended_operation.ExtendedOperation: + r"""Adds specified number of nodes to the node group. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_add_nodes(): + # Create a client + client = compute_v1.NodeGroupsClient() + + # Initialize request argument(s) + request = compute_v1.AddNodesNodeGroupRequest( + node_group="node_group_value", + project="project_value", + zone="zone_value", + ) + + # Make the request + response = client.add_nodes(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.AddNodesNodeGroupRequest, dict]): + The request object. A request message for + NodeGroups.AddNodes. See the method + description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + zone (str): + The name of the zone for this + request. + + This corresponds to the ``zone`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + node_group (str): + Name of the NodeGroup resource. + This corresponds to the ``node_group`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + node_groups_add_nodes_request_resource (google.cloud.compute_v1.types.NodeGroupsAddNodesRequest): + The body resource for this request + This corresponds to the ``node_groups_add_nodes_request_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, zone, node_group, node_groups_add_nodes_request_resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.AddNodesNodeGroupRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.AddNodesNodeGroupRequest): + request = compute.AddNodesNodeGroupRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if zone is not None: + request.zone = zone + if node_group is not None: + request.node_group = node_group + if node_groups_add_nodes_request_resource is not None: + request.node_groups_add_nodes_request_resource = node_groups_add_nodes_request_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.add_nodes] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("zone", request.zone), + ("node_group", request.node_group), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + operation_service = self._transport._zone_operations_client + operation_request = compute.GetZoneOperationRequest() + operation_request.project = request.project + operation_request.zone = request.zone + operation_request.operation = response.name + + get_operation = functools.partial(operation_service.get, operation_request) + # Cancel is not part of extended operations yet. + cancel_operation = lambda: None + + # Note: this class is an implementation detail to provide a uniform + # set of names for certain fields in the extended operation proto message. + # See google.api_core.extended_operation.ExtendedOperation for details + # on these properties and the expected interface. + class _CustomOperation(extended_operation.ExtendedOperation): + @property + def error_message(self): + return self._extended_operation.http_error_message + + @property + def error_code(self): + return self._extended_operation.http_error_status_code + + response = _CustomOperation.make(get_operation, cancel_operation, response) + + # Done; return the response. + return response + + def aggregated_list(self, + request: Optional[Union[compute.AggregatedListNodeGroupsRequest, dict]] = None, + *, + project: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.AggregatedListPager: + r"""Retrieves an aggregated list of node groups. Note: + use nodeGroups.listNodes for more details about each + group. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_aggregated_list(): + # Create a client + client = compute_v1.NodeGroupsClient() + + # Initialize request argument(s) + request = compute_v1.AggregatedListNodeGroupsRequest( + project="project_value", + ) + + # Make the request + page_result = client.aggregated_list(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.AggregatedListNodeGroupsRequest, dict]): + The request object. A request message for + NodeGroups.AggregatedList. See the + method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.compute_v1.services.node_groups.pagers.AggregatedListPager: + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.AggregatedListNodeGroupsRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.AggregatedListNodeGroupsRequest): + request = compute.AggregatedListNodeGroupsRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.aggregated_list] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.AggregatedListPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + def delete_unary(self, + request: Optional[Union[compute.DeleteNodeGroupRequest, dict]] = None, + *, + project: Optional[str] = None, + zone: Optional[str] = None, + node_group: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.Operation: + r"""Deletes the specified NodeGroup resource. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_delete(): + # Create a client + client = compute_v1.NodeGroupsClient() + + # Initialize request argument(s) + request = compute_v1.DeleteNodeGroupRequest( + node_group="node_group_value", + project="project_value", + zone="zone_value", + ) + + # Make the request + response = client.delete(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.DeleteNodeGroupRequest, dict]): + The request object. A request message for + NodeGroups.Delete. See the method + description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + zone (str): + The name of the zone for this + request. + + This corresponds to the ``zone`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + node_group (str): + Name of the NodeGroup resource to + delete. + + This corresponds to the ``node_group`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, zone, node_group]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.DeleteNodeGroupRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.DeleteNodeGroupRequest): + request = compute.DeleteNodeGroupRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if zone is not None: + request.zone = zone + if node_group is not None: + request.node_group = node_group + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("zone", request.zone), + ("node_group", request.node_group), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def delete(self, + request: Optional[Union[compute.DeleteNodeGroupRequest, dict]] = None, + *, + project: Optional[str] = None, + zone: Optional[str] = None, + node_group: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> extended_operation.ExtendedOperation: + r"""Deletes the specified NodeGroup resource. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_delete(): + # Create a client + client = compute_v1.NodeGroupsClient() + + # Initialize request argument(s) + request = compute_v1.DeleteNodeGroupRequest( + node_group="node_group_value", + project="project_value", + zone="zone_value", + ) + + # Make the request + response = client.delete(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.DeleteNodeGroupRequest, dict]): + The request object. A request message for + NodeGroups.Delete. See the method + description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + zone (str): + The name of the zone for this + request. + + This corresponds to the ``zone`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + node_group (str): + Name of the NodeGroup resource to + delete. + + This corresponds to the ``node_group`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, zone, node_group]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.DeleteNodeGroupRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.DeleteNodeGroupRequest): + request = compute.DeleteNodeGroupRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if zone is not None: + request.zone = zone + if node_group is not None: + request.node_group = node_group + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("zone", request.zone), + ("node_group", request.node_group), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + operation_service = self._transport._zone_operations_client + operation_request = compute.GetZoneOperationRequest() + operation_request.project = request.project + operation_request.zone = request.zone + operation_request.operation = response.name + + get_operation = functools.partial(operation_service.get, operation_request) + # Cancel is not part of extended operations yet. + cancel_operation = lambda: None + + # Note: this class is an implementation detail to provide a uniform + # set of names for certain fields in the extended operation proto message. + # See google.api_core.extended_operation.ExtendedOperation for details + # on these properties and the expected interface. + class _CustomOperation(extended_operation.ExtendedOperation): + @property + def error_message(self): + return self._extended_operation.http_error_message + + @property + def error_code(self): + return self._extended_operation.http_error_status_code + + response = _CustomOperation.make(get_operation, cancel_operation, response) + + # Done; return the response. + return response + + def delete_nodes_unary(self, + request: Optional[Union[compute.DeleteNodesNodeGroupRequest, dict]] = None, + *, + project: Optional[str] = None, + zone: Optional[str] = None, + node_group: Optional[str] = None, + node_groups_delete_nodes_request_resource: Optional[compute.NodeGroupsDeleteNodesRequest] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.Operation: + r"""Deletes specified nodes from the node group. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_delete_nodes(): + # Create a client + client = compute_v1.NodeGroupsClient() + + # Initialize request argument(s) + request = compute_v1.DeleteNodesNodeGroupRequest( + node_group="node_group_value", + project="project_value", + zone="zone_value", + ) + + # Make the request + response = client.delete_nodes(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.DeleteNodesNodeGroupRequest, dict]): + The request object. A request message for + NodeGroups.DeleteNodes. See the method + description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + zone (str): + The name of the zone for this + request. + + This corresponds to the ``zone`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + node_group (str): + Name of the NodeGroup resource whose + nodes will be deleted. + + This corresponds to the ``node_group`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + node_groups_delete_nodes_request_resource (google.cloud.compute_v1.types.NodeGroupsDeleteNodesRequest): + The body resource for this request + This corresponds to the ``node_groups_delete_nodes_request_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, zone, node_group, node_groups_delete_nodes_request_resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.DeleteNodesNodeGroupRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.DeleteNodesNodeGroupRequest): + request = compute.DeleteNodesNodeGroupRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if zone is not None: + request.zone = zone + if node_group is not None: + request.node_group = node_group + if node_groups_delete_nodes_request_resource is not None: + request.node_groups_delete_nodes_request_resource = node_groups_delete_nodes_request_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete_nodes] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("zone", request.zone), + ("node_group", request.node_group), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def delete_nodes(self, + request: Optional[Union[compute.DeleteNodesNodeGroupRequest, dict]] = None, + *, + project: Optional[str] = None, + zone: Optional[str] = None, + node_group: Optional[str] = None, + node_groups_delete_nodes_request_resource: Optional[compute.NodeGroupsDeleteNodesRequest] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> extended_operation.ExtendedOperation: + r"""Deletes specified nodes from the node group. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_delete_nodes(): + # Create a client + client = compute_v1.NodeGroupsClient() + + # Initialize request argument(s) + request = compute_v1.DeleteNodesNodeGroupRequest( + node_group="node_group_value", + project="project_value", + zone="zone_value", + ) + + # Make the request + response = client.delete_nodes(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.DeleteNodesNodeGroupRequest, dict]): + The request object. A request message for + NodeGroups.DeleteNodes. See the method + description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + zone (str): + The name of the zone for this + request. + + This corresponds to the ``zone`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + node_group (str): + Name of the NodeGroup resource whose + nodes will be deleted. + + This corresponds to the ``node_group`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + node_groups_delete_nodes_request_resource (google.cloud.compute_v1.types.NodeGroupsDeleteNodesRequest): + The body resource for this request + This corresponds to the ``node_groups_delete_nodes_request_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, zone, node_group, node_groups_delete_nodes_request_resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.DeleteNodesNodeGroupRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.DeleteNodesNodeGroupRequest): + request = compute.DeleteNodesNodeGroupRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if zone is not None: + request.zone = zone + if node_group is not None: + request.node_group = node_group + if node_groups_delete_nodes_request_resource is not None: + request.node_groups_delete_nodes_request_resource = node_groups_delete_nodes_request_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete_nodes] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("zone", request.zone), + ("node_group", request.node_group), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + operation_service = self._transport._zone_operations_client + operation_request = compute.GetZoneOperationRequest() + operation_request.project = request.project + operation_request.zone = request.zone + operation_request.operation = response.name + + get_operation = functools.partial(operation_service.get, operation_request) + # Cancel is not part of extended operations yet. + cancel_operation = lambda: None + + # Note: this class is an implementation detail to provide a uniform + # set of names for certain fields in the extended operation proto message. + # See google.api_core.extended_operation.ExtendedOperation for details + # on these properties and the expected interface. + class _CustomOperation(extended_operation.ExtendedOperation): + @property + def error_message(self): + return self._extended_operation.http_error_message + + @property + def error_code(self): + return self._extended_operation.http_error_status_code + + response = _CustomOperation.make(get_operation, cancel_operation, response) + + # Done; return the response. + return response + + def get(self, + request: Optional[Union[compute.GetNodeGroupRequest, dict]] = None, + *, + project: Optional[str] = None, + zone: Optional[str] = None, + node_group: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.NodeGroup: + r"""Returns the specified NodeGroup. Get a list of + available NodeGroups by making a list() request. Note: + the "nodes" field should not be used. Use + nodeGroups.listNodes instead. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_get(): + # Create a client + client = compute_v1.NodeGroupsClient() + + # Initialize request argument(s) + request = compute_v1.GetNodeGroupRequest( + node_group="node_group_value", + project="project_value", + zone="zone_value", + ) + + # Make the request + response = client.get(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.GetNodeGroupRequest, dict]): + The request object. A request message for NodeGroups.Get. + See the method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + zone (str): + The name of the zone for this + request. + + This corresponds to the ``zone`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + node_group (str): + Name of the node group to return. + This corresponds to the ``node_group`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.compute_v1.types.NodeGroup: + Represents a sole-tenant Node Group + resource. A sole-tenant node is a + physical server that is dedicated to + hosting VM instances only for your + specific project. Use sole-tenant nodes + to keep your instances physically + separated from instances in other + projects, or to group your instances + together on the same host hardware. For + more information, read Sole-tenant + nodes. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, zone, node_group]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.GetNodeGroupRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.GetNodeGroupRequest): + request = compute.GetNodeGroupRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if zone is not None: + request.zone = zone + if node_group is not None: + request.node_group = node_group + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("zone", request.zone), + ("node_group", request.node_group), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get_iam_policy(self, + request: Optional[Union[compute.GetIamPolicyNodeGroupRequest, dict]] = None, + *, + project: Optional[str] = None, + zone: Optional[str] = None, + resource: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.Policy: + r"""Gets the access control policy for a resource. May be + empty if no such policy or resource exists. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_get_iam_policy(): + # Create a client + client = compute_v1.NodeGroupsClient() + + # Initialize request argument(s) + request = compute_v1.GetIamPolicyNodeGroupRequest( + project="project_value", + resource="resource_value", + zone="zone_value", + ) + + # Make the request + response = client.get_iam_policy(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.GetIamPolicyNodeGroupRequest, dict]): + The request object. A request message for + NodeGroups.GetIamPolicy. See the method + description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + zone (str): + The name of the zone for this + request. + + This corresponds to the ``zone`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + resource (str): + Name or id of the resource for this + request. + + This corresponds to the ``resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.compute_v1.types.Policy: + An Identity and Access Management (IAM) policy, which + specifies access controls for Google Cloud resources. A + Policy is a collection of bindings. A binding binds one + or more members, or principals, to a single role. + Principals can be user accounts, service accounts, + Google groups, and domains (such as G Suite). A role is + a named list of permissions; each role can be an IAM + predefined role or a user-created custom role. For some + types of Google Cloud resources, a binding can also + specify a condition, which is a logical expression that + allows access to a resource only if the expression + evaluates to true. A condition can add constraints based + on attributes of the request, the resource, or both. To + learn which resources support conditions in their IAM + policies, see the [IAM + documentation](\ https://cloud.google.com/iam/help/conditions/resource-policies). + **JSON example:** { "bindings": [ { "role": + "roles/resourcemanager.organizationAdmin", "members": [ + "user:mike@example.com", "group:admins@example.com", + "domain:google.com", + "serviceAccount:my-project-id@appspot.gserviceaccount.com" + ] }, { "role": + "roles/resourcemanager.organizationViewer", "members": [ + "user:eve@example.com" ], "condition": { "title": + "expirable access", "description": "Does not grant + access after Sep 2020", "expression": "request.time < + timestamp('2020-10-01T00:00:00.000Z')", } } ], "etag": + "BwWWja0YfJA=", "version": 3 } **YAML example:** + bindings: - members: - user:\ mike@example.com - + group:\ admins@example.com - domain:google.com - + serviceAccount:\ my-project-id@appspot.gserviceaccount.com + role: roles/resourcemanager.organizationAdmin - members: + - user:\ eve@example.com role: + roles/resourcemanager.organizationViewer condition: + title: expirable access description: Does not grant + access after Sep 2020 expression: request.time < + timestamp('2020-10-01T00:00:00.000Z') etag: BwWWja0YfJA= + version: 3 For a description of IAM and its features, + see the [IAM + documentation](\ https://cloud.google.com/iam/docs/). + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, zone, resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.GetIamPolicyNodeGroupRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.GetIamPolicyNodeGroupRequest): + request = compute.GetIamPolicyNodeGroupRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if zone is not None: + request.zone = zone + if resource is not None: + request.resource = resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_iam_policy] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("zone", request.zone), + ("resource", request.resource), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def insert_unary(self, + request: Optional[Union[compute.InsertNodeGroupRequest, dict]] = None, + *, + project: Optional[str] = None, + zone: Optional[str] = None, + initial_node_count: Optional[int] = None, + node_group_resource: Optional[compute.NodeGroup] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.Operation: + r"""Creates a NodeGroup resource in the specified project + using the data included in the request. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_insert(): + # Create a client + client = compute_v1.NodeGroupsClient() + + # Initialize request argument(s) + request = compute_v1.InsertNodeGroupRequest( + initial_node_count=1911, + project="project_value", + zone="zone_value", + ) + + # Make the request + response = client.insert(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.InsertNodeGroupRequest, dict]): + The request object. A request message for + NodeGroups.Insert. See the method + description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + zone (str): + The name of the zone for this + request. + + This corresponds to the ``zone`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + initial_node_count (int): + Initial count of nodes in the node + group. + + This corresponds to the ``initial_node_count`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + node_group_resource (google.cloud.compute_v1.types.NodeGroup): + The body resource for this request + This corresponds to the ``node_group_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, zone, initial_node_count, node_group_resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.InsertNodeGroupRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.InsertNodeGroupRequest): + request = compute.InsertNodeGroupRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if zone is not None: + request.zone = zone + if initial_node_count is not None: + request.initial_node_count = initial_node_count + if node_group_resource is not None: + request.node_group_resource = node_group_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.insert] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("zone", request.zone), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def insert(self, + request: Optional[Union[compute.InsertNodeGroupRequest, dict]] = None, + *, + project: Optional[str] = None, + zone: Optional[str] = None, + initial_node_count: Optional[int] = None, + node_group_resource: Optional[compute.NodeGroup] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> extended_operation.ExtendedOperation: + r"""Creates a NodeGroup resource in the specified project + using the data included in the request. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_insert(): + # Create a client + client = compute_v1.NodeGroupsClient() + + # Initialize request argument(s) + request = compute_v1.InsertNodeGroupRequest( + initial_node_count=1911, + project="project_value", + zone="zone_value", + ) + + # Make the request + response = client.insert(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.InsertNodeGroupRequest, dict]): + The request object. A request message for + NodeGroups.Insert. See the method + description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + zone (str): + The name of the zone for this + request. + + This corresponds to the ``zone`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + initial_node_count (int): + Initial count of nodes in the node + group. + + This corresponds to the ``initial_node_count`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + node_group_resource (google.cloud.compute_v1.types.NodeGroup): + The body resource for this request + This corresponds to the ``node_group_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, zone, initial_node_count, node_group_resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.InsertNodeGroupRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.InsertNodeGroupRequest): + request = compute.InsertNodeGroupRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if zone is not None: + request.zone = zone + if initial_node_count is not None: + request.initial_node_count = initial_node_count + if node_group_resource is not None: + request.node_group_resource = node_group_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.insert] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("zone", request.zone), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + operation_service = self._transport._zone_operations_client + operation_request = compute.GetZoneOperationRequest() + operation_request.project = request.project + operation_request.zone = request.zone + operation_request.operation = response.name + + get_operation = functools.partial(operation_service.get, operation_request) + # Cancel is not part of extended operations yet. + cancel_operation = lambda: None + + # Note: this class is an implementation detail to provide a uniform + # set of names for certain fields in the extended operation proto message. + # See google.api_core.extended_operation.ExtendedOperation for details + # on these properties and the expected interface. + class _CustomOperation(extended_operation.ExtendedOperation): + @property + def error_message(self): + return self._extended_operation.http_error_message + + @property + def error_code(self): + return self._extended_operation.http_error_status_code + + response = _CustomOperation.make(get_operation, cancel_operation, response) + + # Done; return the response. + return response + + def list(self, + request: Optional[Union[compute.ListNodeGroupsRequest, dict]] = None, + *, + project: Optional[str] = None, + zone: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListPager: + r"""Retrieves a list of node groups available to the + specified project. Note: use nodeGroups.listNodes for + more details about each group. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_list(): + # Create a client + client = compute_v1.NodeGroupsClient() + + # Initialize request argument(s) + request = compute_v1.ListNodeGroupsRequest( + project="project_value", + zone="zone_value", + ) + + # Make the request + page_result = client.list(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.ListNodeGroupsRequest, dict]): + The request object. A request message for + NodeGroups.List. See the method + description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + zone (str): + The name of the zone for this + request. + + This corresponds to the ``zone`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.compute_v1.services.node_groups.pagers.ListPager: + Contains a list of nodeGroups. + + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, zone]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.ListNodeGroupsRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.ListNodeGroupsRequest): + request = compute.ListNodeGroupsRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if zone is not None: + request.zone = zone + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("zone", request.zone), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + def list_nodes(self, + request: Optional[Union[compute.ListNodesNodeGroupsRequest, dict]] = None, + *, + project: Optional[str] = None, + zone: Optional[str] = None, + node_group: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListNodesPager: + r"""Lists nodes in the node group. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_list_nodes(): + # Create a client + client = compute_v1.NodeGroupsClient() + + # Initialize request argument(s) + request = compute_v1.ListNodesNodeGroupsRequest( + node_group="node_group_value", + project="project_value", + zone="zone_value", + ) + + # Make the request + page_result = client.list_nodes(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.ListNodesNodeGroupsRequest, dict]): + The request object. A request message for + NodeGroups.ListNodes. See the method + description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + zone (str): + The name of the zone for this + request. + + This corresponds to the ``zone`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + node_group (str): + Name of the NodeGroup resource whose + nodes you want to list. + + This corresponds to the ``node_group`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.compute_v1.services.node_groups.pagers.ListNodesPager: + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, zone, node_group]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.ListNodesNodeGroupsRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.ListNodesNodeGroupsRequest): + request = compute.ListNodesNodeGroupsRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if zone is not None: + request.zone = zone + if node_group is not None: + request.node_group = node_group + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_nodes] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("zone", request.zone), + ("node_group", request.node_group), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListNodesPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + def patch_unary(self, + request: Optional[Union[compute.PatchNodeGroupRequest, dict]] = None, + *, + project: Optional[str] = None, + zone: Optional[str] = None, + node_group: Optional[str] = None, + node_group_resource: Optional[compute.NodeGroup] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.Operation: + r"""Updates the specified node group. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_patch(): + # Create a client + client = compute_v1.NodeGroupsClient() + + # Initialize request argument(s) + request = compute_v1.PatchNodeGroupRequest( + node_group="node_group_value", + project="project_value", + zone="zone_value", + ) + + # Make the request + response = client.patch(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.PatchNodeGroupRequest, dict]): + The request object. A request message for + NodeGroups.Patch. See the method + description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + zone (str): + The name of the zone for this + request. + + This corresponds to the ``zone`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + node_group (str): + Name of the NodeGroup resource to + update. + + This corresponds to the ``node_group`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + node_group_resource (google.cloud.compute_v1.types.NodeGroup): + The body resource for this request + This corresponds to the ``node_group_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, zone, node_group, node_group_resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.PatchNodeGroupRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.PatchNodeGroupRequest): + request = compute.PatchNodeGroupRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if zone is not None: + request.zone = zone + if node_group is not None: + request.node_group = node_group + if node_group_resource is not None: + request.node_group_resource = node_group_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.patch] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("zone", request.zone), + ("node_group", request.node_group), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def patch(self, + request: Optional[Union[compute.PatchNodeGroupRequest, dict]] = None, + *, + project: Optional[str] = None, + zone: Optional[str] = None, + node_group: Optional[str] = None, + node_group_resource: Optional[compute.NodeGroup] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> extended_operation.ExtendedOperation: + r"""Updates the specified node group. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_patch(): + # Create a client + client = compute_v1.NodeGroupsClient() + + # Initialize request argument(s) + request = compute_v1.PatchNodeGroupRequest( + node_group="node_group_value", + project="project_value", + zone="zone_value", + ) + + # Make the request + response = client.patch(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.PatchNodeGroupRequest, dict]): + The request object. A request message for + NodeGroups.Patch. See the method + description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + zone (str): + The name of the zone for this + request. + + This corresponds to the ``zone`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + node_group (str): + Name of the NodeGroup resource to + update. + + This corresponds to the ``node_group`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + node_group_resource (google.cloud.compute_v1.types.NodeGroup): + The body resource for this request + This corresponds to the ``node_group_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, zone, node_group, node_group_resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.PatchNodeGroupRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.PatchNodeGroupRequest): + request = compute.PatchNodeGroupRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if zone is not None: + request.zone = zone + if node_group is not None: + request.node_group = node_group + if node_group_resource is not None: + request.node_group_resource = node_group_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.patch] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("zone", request.zone), + ("node_group", request.node_group), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + operation_service = self._transport._zone_operations_client + operation_request = compute.GetZoneOperationRequest() + operation_request.project = request.project + operation_request.zone = request.zone + operation_request.operation = response.name + + get_operation = functools.partial(operation_service.get, operation_request) + # Cancel is not part of extended operations yet. + cancel_operation = lambda: None + + # Note: this class is an implementation detail to provide a uniform + # set of names for certain fields in the extended operation proto message. + # See google.api_core.extended_operation.ExtendedOperation for details + # on these properties and the expected interface. + class _CustomOperation(extended_operation.ExtendedOperation): + @property + def error_message(self): + return self._extended_operation.http_error_message + + @property + def error_code(self): + return self._extended_operation.http_error_status_code + + response = _CustomOperation.make(get_operation, cancel_operation, response) + + # Done; return the response. + return response + + def set_iam_policy(self, + request: Optional[Union[compute.SetIamPolicyNodeGroupRequest, dict]] = None, + *, + project: Optional[str] = None, + zone: Optional[str] = None, + resource: Optional[str] = None, + zone_set_policy_request_resource: Optional[compute.ZoneSetPolicyRequest] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.Policy: + r"""Sets the access control policy on the specified + resource. Replaces any existing policy. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_set_iam_policy(): + # Create a client + client = compute_v1.NodeGroupsClient() + + # Initialize request argument(s) + request = compute_v1.SetIamPolicyNodeGroupRequest( + project="project_value", + resource="resource_value", + zone="zone_value", + ) + + # Make the request + response = client.set_iam_policy(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.SetIamPolicyNodeGroupRequest, dict]): + The request object. A request message for + NodeGroups.SetIamPolicy. See the method + description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + zone (str): + The name of the zone for this + request. + + This corresponds to the ``zone`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + resource (str): + Name or id of the resource for this + request. + + This corresponds to the ``resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + zone_set_policy_request_resource (google.cloud.compute_v1.types.ZoneSetPolicyRequest): + The body resource for this request + This corresponds to the ``zone_set_policy_request_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.compute_v1.types.Policy: + An Identity and Access Management (IAM) policy, which + specifies access controls for Google Cloud resources. A + Policy is a collection of bindings. A binding binds one + or more members, or principals, to a single role. + Principals can be user accounts, service accounts, + Google groups, and domains (such as G Suite). A role is + a named list of permissions; each role can be an IAM + predefined role or a user-created custom role. For some + types of Google Cloud resources, a binding can also + specify a condition, which is a logical expression that + allows access to a resource only if the expression + evaluates to true. A condition can add constraints based + on attributes of the request, the resource, or both. To + learn which resources support conditions in their IAM + policies, see the [IAM + documentation](\ https://cloud.google.com/iam/help/conditions/resource-policies). + **JSON example:** { "bindings": [ { "role": + "roles/resourcemanager.organizationAdmin", "members": [ + "user:mike@example.com", "group:admins@example.com", + "domain:google.com", + "serviceAccount:my-project-id@appspot.gserviceaccount.com" + ] }, { "role": + "roles/resourcemanager.organizationViewer", "members": [ + "user:eve@example.com" ], "condition": { "title": + "expirable access", "description": "Does not grant + access after Sep 2020", "expression": "request.time < + timestamp('2020-10-01T00:00:00.000Z')", } } ], "etag": + "BwWWja0YfJA=", "version": 3 } **YAML example:** + bindings: - members: - user:\ mike@example.com - + group:\ admins@example.com - domain:google.com - + serviceAccount:\ my-project-id@appspot.gserviceaccount.com + role: roles/resourcemanager.organizationAdmin - members: + - user:\ eve@example.com role: + roles/resourcemanager.organizationViewer condition: + title: expirable access description: Does not grant + access after Sep 2020 expression: request.time < + timestamp('2020-10-01T00:00:00.000Z') etag: BwWWja0YfJA= + version: 3 For a description of IAM and its features, + see the [IAM + documentation](\ https://cloud.google.com/iam/docs/). + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, zone, resource, zone_set_policy_request_resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.SetIamPolicyNodeGroupRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.SetIamPolicyNodeGroupRequest): + request = compute.SetIamPolicyNodeGroupRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if zone is not None: + request.zone = zone + if resource is not None: + request.resource = resource + if zone_set_policy_request_resource is not None: + request.zone_set_policy_request_resource = zone_set_policy_request_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.set_iam_policy] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("zone", request.zone), + ("resource", request.resource), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def set_node_template_unary(self, + request: Optional[Union[compute.SetNodeTemplateNodeGroupRequest, dict]] = None, + *, + project: Optional[str] = None, + zone: Optional[str] = None, + node_group: Optional[str] = None, + node_groups_set_node_template_request_resource: Optional[compute.NodeGroupsSetNodeTemplateRequest] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.Operation: + r"""Updates the node template of the node group. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_set_node_template(): + # Create a client + client = compute_v1.NodeGroupsClient() + + # Initialize request argument(s) + request = compute_v1.SetNodeTemplateNodeGroupRequest( + node_group="node_group_value", + project="project_value", + zone="zone_value", + ) + + # Make the request + response = client.set_node_template(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.SetNodeTemplateNodeGroupRequest, dict]): + The request object. A request message for + NodeGroups.SetNodeTemplate. See the + method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + zone (str): + The name of the zone for this + request. + + This corresponds to the ``zone`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + node_group (str): + Name of the NodeGroup resource to + update. + + This corresponds to the ``node_group`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + node_groups_set_node_template_request_resource (google.cloud.compute_v1.types.NodeGroupsSetNodeTemplateRequest): + The body resource for this request + This corresponds to the ``node_groups_set_node_template_request_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, zone, node_group, node_groups_set_node_template_request_resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.SetNodeTemplateNodeGroupRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.SetNodeTemplateNodeGroupRequest): + request = compute.SetNodeTemplateNodeGroupRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if zone is not None: + request.zone = zone + if node_group is not None: + request.node_group = node_group + if node_groups_set_node_template_request_resource is not None: + request.node_groups_set_node_template_request_resource = node_groups_set_node_template_request_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.set_node_template] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("zone", request.zone), + ("node_group", request.node_group), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def set_node_template(self, + request: Optional[Union[compute.SetNodeTemplateNodeGroupRequest, dict]] = None, + *, + project: Optional[str] = None, + zone: Optional[str] = None, + node_group: Optional[str] = None, + node_groups_set_node_template_request_resource: Optional[compute.NodeGroupsSetNodeTemplateRequest] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> extended_operation.ExtendedOperation: + r"""Updates the node template of the node group. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_set_node_template(): + # Create a client + client = compute_v1.NodeGroupsClient() + + # Initialize request argument(s) + request = compute_v1.SetNodeTemplateNodeGroupRequest( + node_group="node_group_value", + project="project_value", + zone="zone_value", + ) + + # Make the request + response = client.set_node_template(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.SetNodeTemplateNodeGroupRequest, dict]): + The request object. A request message for + NodeGroups.SetNodeTemplate. See the + method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + zone (str): + The name of the zone for this + request. + + This corresponds to the ``zone`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + node_group (str): + Name of the NodeGroup resource to + update. + + This corresponds to the ``node_group`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + node_groups_set_node_template_request_resource (google.cloud.compute_v1.types.NodeGroupsSetNodeTemplateRequest): + The body resource for this request + This corresponds to the ``node_groups_set_node_template_request_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, zone, node_group, node_groups_set_node_template_request_resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.SetNodeTemplateNodeGroupRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.SetNodeTemplateNodeGroupRequest): + request = compute.SetNodeTemplateNodeGroupRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if zone is not None: + request.zone = zone + if node_group is not None: + request.node_group = node_group + if node_groups_set_node_template_request_resource is not None: + request.node_groups_set_node_template_request_resource = node_groups_set_node_template_request_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.set_node_template] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("zone", request.zone), + ("node_group", request.node_group), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + operation_service = self._transport._zone_operations_client + operation_request = compute.GetZoneOperationRequest() + operation_request.project = request.project + operation_request.zone = request.zone + operation_request.operation = response.name + + get_operation = functools.partial(operation_service.get, operation_request) + # Cancel is not part of extended operations yet. + cancel_operation = lambda: None + + # Note: this class is an implementation detail to provide a uniform + # set of names for certain fields in the extended operation proto message. + # See google.api_core.extended_operation.ExtendedOperation for details + # on these properties and the expected interface. + class _CustomOperation(extended_operation.ExtendedOperation): + @property + def error_message(self): + return self._extended_operation.http_error_message + + @property + def error_code(self): + return self._extended_operation.http_error_status_code + + response = _CustomOperation.make(get_operation, cancel_operation, response) + + # Done; return the response. + return response + + def simulate_maintenance_event_unary(self, + request: Optional[Union[compute.SimulateMaintenanceEventNodeGroupRequest, dict]] = None, + *, + project: Optional[str] = None, + zone: Optional[str] = None, + node_group: Optional[str] = None, + node_groups_simulate_maintenance_event_request_resource: Optional[compute.NodeGroupsSimulateMaintenanceEventRequest] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.Operation: + r"""Simulates maintenance event on specified nodes from + the node group. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_simulate_maintenance_event(): + # Create a client + client = compute_v1.NodeGroupsClient() + + # Initialize request argument(s) + request = compute_v1.SimulateMaintenanceEventNodeGroupRequest( + node_group="node_group_value", + project="project_value", + zone="zone_value", + ) + + # Make the request + response = client.simulate_maintenance_event(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.SimulateMaintenanceEventNodeGroupRequest, dict]): + The request object. A request message for + NodeGroups.SimulateMaintenanceEvent. See + the method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + zone (str): + The name of the zone for this + request. + + This corresponds to the ``zone`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + node_group (str): + Name of the NodeGroup resource whose + nodes will go under maintenance + simulation. + + This corresponds to the ``node_group`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + node_groups_simulate_maintenance_event_request_resource (google.cloud.compute_v1.types.NodeGroupsSimulateMaintenanceEventRequest): + The body resource for this request + This corresponds to the ``node_groups_simulate_maintenance_event_request_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, zone, node_group, node_groups_simulate_maintenance_event_request_resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.SimulateMaintenanceEventNodeGroupRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.SimulateMaintenanceEventNodeGroupRequest): + request = compute.SimulateMaintenanceEventNodeGroupRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if zone is not None: + request.zone = zone + if node_group is not None: + request.node_group = node_group + if node_groups_simulate_maintenance_event_request_resource is not None: + request.node_groups_simulate_maintenance_event_request_resource = node_groups_simulate_maintenance_event_request_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.simulate_maintenance_event] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("zone", request.zone), + ("node_group", request.node_group), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def simulate_maintenance_event(self, + request: Optional[Union[compute.SimulateMaintenanceEventNodeGroupRequest, dict]] = None, + *, + project: Optional[str] = None, + zone: Optional[str] = None, + node_group: Optional[str] = None, + node_groups_simulate_maintenance_event_request_resource: Optional[compute.NodeGroupsSimulateMaintenanceEventRequest] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> extended_operation.ExtendedOperation: + r"""Simulates maintenance event on specified nodes from + the node group. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_simulate_maintenance_event(): + # Create a client + client = compute_v1.NodeGroupsClient() + + # Initialize request argument(s) + request = compute_v1.SimulateMaintenanceEventNodeGroupRequest( + node_group="node_group_value", + project="project_value", + zone="zone_value", + ) + + # Make the request + response = client.simulate_maintenance_event(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.SimulateMaintenanceEventNodeGroupRequest, dict]): + The request object. A request message for + NodeGroups.SimulateMaintenanceEvent. See + the method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + zone (str): + The name of the zone for this + request. + + This corresponds to the ``zone`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + node_group (str): + Name of the NodeGroup resource whose + nodes will go under maintenance + simulation. + + This corresponds to the ``node_group`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + node_groups_simulate_maintenance_event_request_resource (google.cloud.compute_v1.types.NodeGroupsSimulateMaintenanceEventRequest): + The body resource for this request + This corresponds to the ``node_groups_simulate_maintenance_event_request_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, zone, node_group, node_groups_simulate_maintenance_event_request_resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.SimulateMaintenanceEventNodeGroupRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.SimulateMaintenanceEventNodeGroupRequest): + request = compute.SimulateMaintenanceEventNodeGroupRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if zone is not None: + request.zone = zone + if node_group is not None: + request.node_group = node_group + if node_groups_simulate_maintenance_event_request_resource is not None: + request.node_groups_simulate_maintenance_event_request_resource = node_groups_simulate_maintenance_event_request_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.simulate_maintenance_event] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("zone", request.zone), + ("node_group", request.node_group), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + operation_service = self._transport._zone_operations_client + operation_request = compute.GetZoneOperationRequest() + operation_request.project = request.project + operation_request.zone = request.zone + operation_request.operation = response.name + + get_operation = functools.partial(operation_service.get, operation_request) + # Cancel is not part of extended operations yet. + cancel_operation = lambda: None + + # Note: this class is an implementation detail to provide a uniform + # set of names for certain fields in the extended operation proto message. + # See google.api_core.extended_operation.ExtendedOperation for details + # on these properties and the expected interface. + class _CustomOperation(extended_operation.ExtendedOperation): + @property + def error_message(self): + return self._extended_operation.http_error_message + + @property + def error_code(self): + return self._extended_operation.http_error_status_code + + response = _CustomOperation.make(get_operation, cancel_operation, response) + + # Done; return the response. + return response + + def test_iam_permissions(self, + request: Optional[Union[compute.TestIamPermissionsNodeGroupRequest, dict]] = None, + *, + project: Optional[str] = None, + zone: Optional[str] = None, + resource: Optional[str] = None, + test_permissions_request_resource: Optional[compute.TestPermissionsRequest] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.TestPermissionsResponse: + r"""Returns permissions that a caller has on the + specified resource. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_test_iam_permissions(): + # Create a client + client = compute_v1.NodeGroupsClient() + + # Initialize request argument(s) + request = compute_v1.TestIamPermissionsNodeGroupRequest( + project="project_value", + resource="resource_value", + zone="zone_value", + ) + + # Make the request + response = client.test_iam_permissions(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.TestIamPermissionsNodeGroupRequest, dict]): + The request object. A request message for + NodeGroups.TestIamPermissions. See the + method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + zone (str): + The name of the zone for this + request. + + This corresponds to the ``zone`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + resource (str): + Name or id of the resource for this + request. + + This corresponds to the ``resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + test_permissions_request_resource (google.cloud.compute_v1.types.TestPermissionsRequest): + The body resource for this request + This corresponds to the ``test_permissions_request_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.compute_v1.types.TestPermissionsResponse: + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, zone, resource, test_permissions_request_resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.TestIamPermissionsNodeGroupRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.TestIamPermissionsNodeGroupRequest): + request = compute.TestIamPermissionsNodeGroupRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if zone is not None: + request.zone = zone + if resource is not None: + request.resource = resource + if test_permissions_request_resource is not None: + request.test_permissions_request_resource = test_permissions_request_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.test_iam_permissions] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("zone", request.zone), + ("resource", request.resource), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def __enter__(self) -> "NodeGroupsClient": + return self + + def __exit__(self, type, value, traceback): + """Releases underlying transport's resources. + + .. warning:: + ONLY use as a context manager if the transport is NOT shared + with other clients! Exiting the with block will CLOSE the transport + and may cause errors in other clients! + """ + self.transport.close() + + + + + + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) + + +__all__ = ( + "NodeGroupsClient", +) diff --git a/owl-bot-staging/v1/google/cloud/compute_v1/services/node_groups/pagers.py b/owl-bot-staging/v1/google/cloud/compute_v1/services/node_groups/pagers.py new file mode 100644 index 000000000..3e953168f --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/compute_v1/services/node_groups/pagers.py @@ -0,0 +1,198 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import Any, AsyncIterator, Awaitable, Callable, Sequence, Tuple, Optional, Iterator + +from google.cloud.compute_v1.types import compute + + +class AggregatedListPager: + """A pager for iterating through ``aggregated_list`` requests. + + This class thinly wraps an initial + :class:`google.cloud.compute_v1.types.NodeGroupAggregatedList` object, and + provides an ``__iter__`` method to iterate through its + ``items`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``AggregatedList`` requests and continue to iterate + through the ``items`` field on the + corresponding responses. + + All the usual :class:`google.cloud.compute_v1.types.NodeGroupAggregatedList` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., compute.NodeGroupAggregatedList], + request: compute.AggregatedListNodeGroupsRequest, + response: compute.NodeGroupAggregatedList, + *, + metadata: Sequence[Tuple[str, str]] = ()): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.compute_v1.types.AggregatedListNodeGroupsRequest): + The initial request object. + response (google.cloud.compute_v1.types.NodeGroupAggregatedList): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = compute.AggregatedListNodeGroupsRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[compute.NodeGroupAggregatedList]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[Tuple[str, compute.NodeGroupsScopedList]]: + for page in self.pages: + yield from page.items.items() + + def get(self, key: str) -> Optional[compute.NodeGroupsScopedList]: + return self._response.items.get(key) + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + + +class ListPager: + """A pager for iterating through ``list`` requests. + + This class thinly wraps an initial + :class:`google.cloud.compute_v1.types.NodeGroupList` object, and + provides an ``__iter__`` method to iterate through its + ``items`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``List`` requests and continue to iterate + through the ``items`` field on the + corresponding responses. + + All the usual :class:`google.cloud.compute_v1.types.NodeGroupList` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., compute.NodeGroupList], + request: compute.ListNodeGroupsRequest, + response: compute.NodeGroupList, + *, + metadata: Sequence[Tuple[str, str]] = ()): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.compute_v1.types.ListNodeGroupsRequest): + The initial request object. + response (google.cloud.compute_v1.types.NodeGroupList): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = compute.ListNodeGroupsRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[compute.NodeGroupList]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[compute.NodeGroup]: + for page in self.pages: + yield from page.items + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + + +class ListNodesPager: + """A pager for iterating through ``list_nodes`` requests. + + This class thinly wraps an initial + :class:`google.cloud.compute_v1.types.NodeGroupsListNodes` object, and + provides an ``__iter__`` method to iterate through its + ``items`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListNodes`` requests and continue to iterate + through the ``items`` field on the + corresponding responses. + + All the usual :class:`google.cloud.compute_v1.types.NodeGroupsListNodes` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., compute.NodeGroupsListNodes], + request: compute.ListNodesNodeGroupsRequest, + response: compute.NodeGroupsListNodes, + *, + metadata: Sequence[Tuple[str, str]] = ()): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.compute_v1.types.ListNodesNodeGroupsRequest): + The initial request object. + response (google.cloud.compute_v1.types.NodeGroupsListNodes): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = compute.ListNodesNodeGroupsRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[compute.NodeGroupsListNodes]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[compute.NodeGroupNode]: + for page in self.pages: + yield from page.items + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) diff --git a/owl-bot-staging/v1/google/cloud/compute_v1/services/node_groups/transports/__init__.py b/owl-bot-staging/v1/google/cloud/compute_v1/services/node_groups/transports/__init__.py new file mode 100644 index 000000000..9c5b46b01 --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/compute_v1/services/node_groups/transports/__init__.py @@ -0,0 +1,32 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +from typing import Dict, Type + +from .base import NodeGroupsTransport +from .rest import NodeGroupsRestTransport +from .rest import NodeGroupsRestInterceptor + + +# Compile a registry of transports. +_transport_registry = OrderedDict() # type: Dict[str, Type[NodeGroupsTransport]] +_transport_registry['rest'] = NodeGroupsRestTransport + +__all__ = ( + 'NodeGroupsTransport', + 'NodeGroupsRestTransport', + 'NodeGroupsRestInterceptor', +) diff --git a/owl-bot-staging/v1/google/cloud/compute_v1/services/node_groups/transports/base.py b/owl-bot-staging/v1/google/cloud/compute_v1/services/node_groups/transports/base.py new file mode 100644 index 000000000..8b7f39b0c --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/compute_v1/services/node_groups/transports/base.py @@ -0,0 +1,345 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import abc +from typing import Awaitable, Callable, Dict, Optional, Sequence, Union + +from google.cloud.compute_v1 import gapic_version as package_version + +import google.auth # type: ignore +import google.api_core +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.compute_v1.types import compute +from google.cloud.compute_v1.services import zone_operations + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) + + +class NodeGroupsTransport(abc.ABC): + """Abstract transport class for NodeGroups.""" + + AUTH_SCOPES = ( + 'https://www.googleapis.com/auth/compute', + 'https://www.googleapis.com/auth/cloud-platform', + ) + + DEFAULT_HOST: str = 'compute.googleapis.com' + def __init__( + self, *, + host: str = DEFAULT_HOST, + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + **kwargs, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A list of scopes. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + """ + self._extended_operations_services: Dict[str, Any] = {} + + scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} + + # Save the scopes. + self._scopes = scopes + + # If no credentials are provided, then determine the appropriate + # defaults. + if credentials and credentials_file: + raise core_exceptions.DuplicateCredentialArgs("'credentials_file' and 'credentials' are mutually exclusive") + + if credentials_file is not None: + credentials, _ = google.auth.load_credentials_from_file( + credentials_file, + **scopes_kwargs, + quota_project_id=quota_project_id + ) + elif credentials is None: + credentials, _ = google.auth.default(**scopes_kwargs, quota_project_id=quota_project_id) + # Don't apply audience if the credentials file passed from user. + if hasattr(credentials, "with_gdch_audience"): + credentials = credentials.with_gdch_audience(api_audience if api_audience else host) + + # If the credentials are service account credentials, then always try to use self signed JWT. + if always_use_jwt_access and isinstance(credentials, service_account.Credentials) and hasattr(service_account.Credentials, "with_always_use_jwt_access"): + credentials = credentials.with_always_use_jwt_access(True) + + # Save the credentials. + self._credentials = credentials + + # Save the hostname. Default to port 443 (HTTPS) if none is specified. + if ':' not in host: + host += ':443' + self._host = host + + def _prep_wrapped_messages(self, client_info): + # Precompute the wrapped methods. + self._wrapped_methods = { + self.add_nodes: gapic_v1.method.wrap_method( + self.add_nodes, + default_timeout=None, + client_info=client_info, + ), + self.aggregated_list: gapic_v1.method.wrap_method( + self.aggregated_list, + default_timeout=None, + client_info=client_info, + ), + self.delete: gapic_v1.method.wrap_method( + self.delete, + default_timeout=None, + client_info=client_info, + ), + self.delete_nodes: gapic_v1.method.wrap_method( + self.delete_nodes, + default_timeout=None, + client_info=client_info, + ), + self.get: gapic_v1.method.wrap_method( + self.get, + default_timeout=None, + client_info=client_info, + ), + self.get_iam_policy: gapic_v1.method.wrap_method( + self.get_iam_policy, + default_timeout=None, + client_info=client_info, + ), + self.insert: gapic_v1.method.wrap_method( + self.insert, + default_timeout=None, + client_info=client_info, + ), + self.list: gapic_v1.method.wrap_method( + self.list, + default_timeout=None, + client_info=client_info, + ), + self.list_nodes: gapic_v1.method.wrap_method( + self.list_nodes, + default_timeout=None, + client_info=client_info, + ), + self.patch: gapic_v1.method.wrap_method( + self.patch, + default_timeout=None, + client_info=client_info, + ), + self.set_iam_policy: gapic_v1.method.wrap_method( + self.set_iam_policy, + default_timeout=None, + client_info=client_info, + ), + self.set_node_template: gapic_v1.method.wrap_method( + self.set_node_template, + default_timeout=None, + client_info=client_info, + ), + self.simulate_maintenance_event: gapic_v1.method.wrap_method( + self.simulate_maintenance_event, + default_timeout=None, + client_info=client_info, + ), + self.test_iam_permissions: gapic_v1.method.wrap_method( + self.test_iam_permissions, + default_timeout=None, + client_info=client_info, + ), + } + + def close(self): + """Closes resources associated with the transport. + + .. warning:: + Only call this method if the transport is NOT shared + with other clients - this may cause errors in other clients! + """ + raise NotImplementedError() + + @property + def add_nodes(self) -> Callable[ + [compute.AddNodesNodeGroupRequest], + Union[ + compute.Operation, + Awaitable[compute.Operation] + ]]: + raise NotImplementedError() + + @property + def aggregated_list(self) -> Callable[ + [compute.AggregatedListNodeGroupsRequest], + Union[ + compute.NodeGroupAggregatedList, + Awaitable[compute.NodeGroupAggregatedList] + ]]: + raise NotImplementedError() + + @property + def delete(self) -> Callable[ + [compute.DeleteNodeGroupRequest], + Union[ + compute.Operation, + Awaitable[compute.Operation] + ]]: + raise NotImplementedError() + + @property + def delete_nodes(self) -> Callable[ + [compute.DeleteNodesNodeGroupRequest], + Union[ + compute.Operation, + Awaitable[compute.Operation] + ]]: + raise NotImplementedError() + + @property + def get(self) -> Callable[ + [compute.GetNodeGroupRequest], + Union[ + compute.NodeGroup, + Awaitable[compute.NodeGroup] + ]]: + raise NotImplementedError() + + @property + def get_iam_policy(self) -> Callable[ + [compute.GetIamPolicyNodeGroupRequest], + Union[ + compute.Policy, + Awaitable[compute.Policy] + ]]: + raise NotImplementedError() + + @property + def insert(self) -> Callable[ + [compute.InsertNodeGroupRequest], + Union[ + compute.Operation, + Awaitable[compute.Operation] + ]]: + raise NotImplementedError() + + @property + def list(self) -> Callable[ + [compute.ListNodeGroupsRequest], + Union[ + compute.NodeGroupList, + Awaitable[compute.NodeGroupList] + ]]: + raise NotImplementedError() + + @property + def list_nodes(self) -> Callable[ + [compute.ListNodesNodeGroupsRequest], + Union[ + compute.NodeGroupsListNodes, + Awaitable[compute.NodeGroupsListNodes] + ]]: + raise NotImplementedError() + + @property + def patch(self) -> Callable[ + [compute.PatchNodeGroupRequest], + Union[ + compute.Operation, + Awaitable[compute.Operation] + ]]: + raise NotImplementedError() + + @property + def set_iam_policy(self) -> Callable[ + [compute.SetIamPolicyNodeGroupRequest], + Union[ + compute.Policy, + Awaitable[compute.Policy] + ]]: + raise NotImplementedError() + + @property + def set_node_template(self) -> Callable[ + [compute.SetNodeTemplateNodeGroupRequest], + Union[ + compute.Operation, + Awaitable[compute.Operation] + ]]: + raise NotImplementedError() + + @property + def simulate_maintenance_event(self) -> Callable[ + [compute.SimulateMaintenanceEventNodeGroupRequest], + Union[ + compute.Operation, + Awaitable[compute.Operation] + ]]: + raise NotImplementedError() + + @property + def test_iam_permissions(self) -> Callable[ + [compute.TestIamPermissionsNodeGroupRequest], + Union[ + compute.TestPermissionsResponse, + Awaitable[compute.TestPermissionsResponse] + ]]: + raise NotImplementedError() + + @property + def kind(self) -> str: + raise NotImplementedError() + + @property + def _zone_operations_client(self) -> zone_operations.ZoneOperationsClient: + ex_op_service = self._extended_operations_services.get("zone_operations") + if not ex_op_service: + ex_op_service = zone_operations.ZoneOperationsClient( + credentials=self._credentials, + transport=self.kind, + ) + self._extended_operations_services["zone_operations"] = ex_op_service + + return ex_op_service + + +__all__ = ( + 'NodeGroupsTransport', +) diff --git a/owl-bot-staging/v1/google/cloud/compute_v1/services/node_groups/transports/rest.py b/owl-bot-staging/v1/google/cloud/compute_v1/services/node_groups/transports/rest.py new file mode 100644 index 000000000..8a8e26ac2 --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/compute_v1/services/node_groups/transports/rest.py @@ -0,0 +1,1995 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +from google.auth.transport.requests import AuthorizedSession # type: ignore +import json # type: ignore +import grpc # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.api_core import exceptions as core_exceptions +from google.api_core import retry as retries +from google.api_core import rest_helpers +from google.api_core import rest_streaming +from google.api_core import path_template +from google.api_core import gapic_v1 + +from google.protobuf import json_format +from requests import __version__ as requests_version +import dataclasses +import re +from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union +import warnings + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + + +from google.cloud.compute_v1.types import compute + +from .base import NodeGroupsTransport, DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, + grpc_version=None, + rest_version=requests_version, +) + + +class NodeGroupsRestInterceptor: + """Interceptor for NodeGroups. + + Interceptors are used to manipulate requests, request metadata, and responses + in arbitrary ways. + Example use cases include: + * Logging + * Verifying requests according to service or custom semantics + * Stripping extraneous information from responses + + These use cases and more can be enabled by injecting an + instance of a custom subclass when constructing the NodeGroupsRestTransport. + + .. code-block:: python + class MyCustomNodeGroupsInterceptor(NodeGroupsRestInterceptor): + def pre_add_nodes(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_add_nodes(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_aggregated_list(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_aggregated_list(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_delete(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_delete(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_delete_nodes(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_delete_nodes(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_get(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_get_iam_policy(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_iam_policy(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_insert(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_insert(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list_nodes(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_nodes(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_patch(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_patch(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_set_iam_policy(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_set_iam_policy(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_set_node_template(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_set_node_template(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_simulate_maintenance_event(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_simulate_maintenance_event(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_test_iam_permissions(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_test_iam_permissions(self, response): + logging.log(f"Received response: {response}") + return response + + transport = NodeGroupsRestTransport(interceptor=MyCustomNodeGroupsInterceptor()) + client = NodeGroupsClient(transport=transport) + + + """ + def pre_add_nodes(self, request: compute.AddNodesNodeGroupRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.AddNodesNodeGroupRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for add_nodes + + Override in a subclass to manipulate the request or metadata + before they are sent to the NodeGroups server. + """ + return request, metadata + + def post_add_nodes(self, response: compute.Operation) -> compute.Operation: + """Post-rpc interceptor for add_nodes + + Override in a subclass to manipulate the response + after it is returned by the NodeGroups server but before + it is returned to user code. + """ + return response + def pre_aggregated_list(self, request: compute.AggregatedListNodeGroupsRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.AggregatedListNodeGroupsRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for aggregated_list + + Override in a subclass to manipulate the request or metadata + before they are sent to the NodeGroups server. + """ + return request, metadata + + def post_aggregated_list(self, response: compute.NodeGroupAggregatedList) -> compute.NodeGroupAggregatedList: + """Post-rpc interceptor for aggregated_list + + Override in a subclass to manipulate the response + after it is returned by the NodeGroups server but before + it is returned to user code. + """ + return response + def pre_delete(self, request: compute.DeleteNodeGroupRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.DeleteNodeGroupRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for delete + + Override in a subclass to manipulate the request or metadata + before they are sent to the NodeGroups server. + """ + return request, metadata + + def post_delete(self, response: compute.Operation) -> compute.Operation: + """Post-rpc interceptor for delete + + Override in a subclass to manipulate the response + after it is returned by the NodeGroups server but before + it is returned to user code. + """ + return response + def pre_delete_nodes(self, request: compute.DeleteNodesNodeGroupRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.DeleteNodesNodeGroupRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for delete_nodes + + Override in a subclass to manipulate the request or metadata + before they are sent to the NodeGroups server. + """ + return request, metadata + + def post_delete_nodes(self, response: compute.Operation) -> compute.Operation: + """Post-rpc interceptor for delete_nodes + + Override in a subclass to manipulate the response + after it is returned by the NodeGroups server but before + it is returned to user code. + """ + return response + def pre_get(self, request: compute.GetNodeGroupRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.GetNodeGroupRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get + + Override in a subclass to manipulate the request or metadata + before they are sent to the NodeGroups server. + """ + return request, metadata + + def post_get(self, response: compute.NodeGroup) -> compute.NodeGroup: + """Post-rpc interceptor for get + + Override in a subclass to manipulate the response + after it is returned by the NodeGroups server but before + it is returned to user code. + """ + return response + def pre_get_iam_policy(self, request: compute.GetIamPolicyNodeGroupRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.GetIamPolicyNodeGroupRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_iam_policy + + Override in a subclass to manipulate the request or metadata + before they are sent to the NodeGroups server. + """ + return request, metadata + + def post_get_iam_policy(self, response: compute.Policy) -> compute.Policy: + """Post-rpc interceptor for get_iam_policy + + Override in a subclass to manipulate the response + after it is returned by the NodeGroups server but before + it is returned to user code. + """ + return response + def pre_insert(self, request: compute.InsertNodeGroupRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.InsertNodeGroupRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for insert + + Override in a subclass to manipulate the request or metadata + before they are sent to the NodeGroups server. + """ + return request, metadata + + def post_insert(self, response: compute.Operation) -> compute.Operation: + """Post-rpc interceptor for insert + + Override in a subclass to manipulate the response + after it is returned by the NodeGroups server but before + it is returned to user code. + """ + return response + def pre_list(self, request: compute.ListNodeGroupsRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.ListNodeGroupsRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list + + Override in a subclass to manipulate the request or metadata + before they are sent to the NodeGroups server. + """ + return request, metadata + + def post_list(self, response: compute.NodeGroupList) -> compute.NodeGroupList: + """Post-rpc interceptor for list + + Override in a subclass to manipulate the response + after it is returned by the NodeGroups server but before + it is returned to user code. + """ + return response + def pre_list_nodes(self, request: compute.ListNodesNodeGroupsRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.ListNodesNodeGroupsRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list_nodes + + Override in a subclass to manipulate the request or metadata + before they are sent to the NodeGroups server. + """ + return request, metadata + + def post_list_nodes(self, response: compute.NodeGroupsListNodes) -> compute.NodeGroupsListNodes: + """Post-rpc interceptor for list_nodes + + Override in a subclass to manipulate the response + after it is returned by the NodeGroups server but before + it is returned to user code. + """ + return response + def pre_patch(self, request: compute.PatchNodeGroupRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.PatchNodeGroupRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for patch + + Override in a subclass to manipulate the request or metadata + before they are sent to the NodeGroups server. + """ + return request, metadata + + def post_patch(self, response: compute.Operation) -> compute.Operation: + """Post-rpc interceptor for patch + + Override in a subclass to manipulate the response + after it is returned by the NodeGroups server but before + it is returned to user code. + """ + return response + def pre_set_iam_policy(self, request: compute.SetIamPolicyNodeGroupRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.SetIamPolicyNodeGroupRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for set_iam_policy + + Override in a subclass to manipulate the request or metadata + before they are sent to the NodeGroups server. + """ + return request, metadata + + def post_set_iam_policy(self, response: compute.Policy) -> compute.Policy: + """Post-rpc interceptor for set_iam_policy + + Override in a subclass to manipulate the response + after it is returned by the NodeGroups server but before + it is returned to user code. + """ + return response + def pre_set_node_template(self, request: compute.SetNodeTemplateNodeGroupRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.SetNodeTemplateNodeGroupRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for set_node_template + + Override in a subclass to manipulate the request or metadata + before they are sent to the NodeGroups server. + """ + return request, metadata + + def post_set_node_template(self, response: compute.Operation) -> compute.Operation: + """Post-rpc interceptor for set_node_template + + Override in a subclass to manipulate the response + after it is returned by the NodeGroups server but before + it is returned to user code. + """ + return response + def pre_simulate_maintenance_event(self, request: compute.SimulateMaintenanceEventNodeGroupRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.SimulateMaintenanceEventNodeGroupRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for simulate_maintenance_event + + Override in a subclass to manipulate the request or metadata + before they are sent to the NodeGroups server. + """ + return request, metadata + + def post_simulate_maintenance_event(self, response: compute.Operation) -> compute.Operation: + """Post-rpc interceptor for simulate_maintenance_event + + Override in a subclass to manipulate the response + after it is returned by the NodeGroups server but before + it is returned to user code. + """ + return response + def pre_test_iam_permissions(self, request: compute.TestIamPermissionsNodeGroupRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.TestIamPermissionsNodeGroupRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for test_iam_permissions + + Override in a subclass to manipulate the request or metadata + before they are sent to the NodeGroups server. + """ + return request, metadata + + def post_test_iam_permissions(self, response: compute.TestPermissionsResponse) -> compute.TestPermissionsResponse: + """Post-rpc interceptor for test_iam_permissions + + Override in a subclass to manipulate the response + after it is returned by the NodeGroups server but before + it is returned to user code. + """ + return response + + +@dataclasses.dataclass +class NodeGroupsRestStub: + _session: AuthorizedSession + _host: str + _interceptor: NodeGroupsRestInterceptor + + +class NodeGroupsRestTransport(NodeGroupsTransport): + """REST backend transport for NodeGroups. + + The NodeGroups API. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends JSON representations of protocol buffers over HTTP/1.1 + + NOTE: This REST transport functionality is currently in a beta + state (preview). We welcome your feedback via an issue in this + library's source repository. Thank you! + """ + + def __init__(self, *, + host: str = 'compute.googleapis.com', + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + client_cert_source_for_mtls: Optional[Callable[[ + ], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + url_scheme: str = 'https', + interceptor: Optional[NodeGroupsRestInterceptor] = None, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + NOTE: This REST transport functionality is currently in a beta + state (preview). We welcome your feedback via a GitHub issue in + this library's repository. Thank you! + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + client_cert_source_for_mtls (Callable[[], Tuple[bytes, bytes]]): Client + certificate to configure mutual TLS HTTP channel. It is ignored + if ``channel`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you are developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + url_scheme: the protocol scheme for the API endpoint. Normally + "https", but for testing or local servers, + "http" can be specified. + """ + # Run the base constructor + # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. + # TODO: When custom host (api_endpoint) is set, `scopes` must *also* be set on the + # credentials object + maybe_url_match = re.match("^(?Phttp(?:s)?://)?(?P.*)$", host) + if maybe_url_match is None: + raise ValueError(f"Unexpected hostname structure: {host}") # pragma: NO COVER + + url_match_items = maybe_url_match.groupdict() + + host = f"{url_scheme}://{host}" if not url_match_items["scheme"] else host + + super().__init__( + host=host, + credentials=credentials, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience + ) + self._session = AuthorizedSession( + self._credentials, default_host=self.DEFAULT_HOST) + if client_cert_source_for_mtls: + self._session.configure_mtls_channel(client_cert_source_for_mtls) + self._interceptor = interceptor or NodeGroupsRestInterceptor() + self._prep_wrapped_messages(client_info) + + class _AddNodes(NodeGroupsRestStub): + def __hash__(self): + return hash("AddNodes") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.AddNodesNodeGroupRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.Operation: + r"""Call the add nodes method over HTTP. + + Args: + request (~.compute.AddNodesNodeGroupRequest): + The request object. A request message for + NodeGroups.AddNodes. See the method + description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.Operation: + Represents an Operation resource. Google Compute Engine + has three Operation resources: \* + `Global `__ + \* + `Regional `__ + \* + `Zonal `__ + You can use an operation resource to manage asynchronous + API requests. For more information, read Handling API + responses. Operations can be global, regional or zonal. + - For global operations, use the ``globalOperations`` + resource. - For regional operations, use the + ``regionOperations`` resource. - For zonal operations, + use the ``zonalOperations`` resource. For more + information, read Global, Regional, and Zonal Resources. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/compute/v1/projects/{project}/zones/{zone}/nodeGroups/{node_group}/addNodes', + 'body': 'node_groups_add_nodes_request_resource', + }, + ] + request, metadata = self._interceptor.pre_add_nodes(request, metadata) + pb_request = compute.AddNodesNodeGroupRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + including_default_value_fields=False, + use_integers_for_enums=False + ) + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.Operation() + pb_resp = compute.Operation.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_add_nodes(resp) + return resp + + class _AggregatedList(NodeGroupsRestStub): + def __hash__(self): + return hash("AggregatedList") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.AggregatedListNodeGroupsRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.NodeGroupAggregatedList: + r"""Call the aggregated list method over HTTP. + + Args: + request (~.compute.AggregatedListNodeGroupsRequest): + The request object. A request message for + NodeGroups.AggregatedList. See the + method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.NodeGroupAggregatedList: + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/compute/v1/projects/{project}/aggregated/nodeGroups', + }, + ] + request, metadata = self._interceptor.pre_aggregated_list(request, metadata) + pb_request = compute.AggregatedListNodeGroupsRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.NodeGroupAggregatedList() + pb_resp = compute.NodeGroupAggregatedList.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_aggregated_list(resp) + return resp + + class _Delete(NodeGroupsRestStub): + def __hash__(self): + return hash("Delete") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.DeleteNodeGroupRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.Operation: + r"""Call the delete method over HTTP. + + Args: + request (~.compute.DeleteNodeGroupRequest): + The request object. A request message for + NodeGroups.Delete. See the method + description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.Operation: + Represents an Operation resource. Google Compute Engine + has three Operation resources: \* + `Global `__ + \* + `Regional `__ + \* + `Zonal `__ + You can use an operation resource to manage asynchronous + API requests. For more information, read Handling API + responses. Operations can be global, regional or zonal. + - For global operations, use the ``globalOperations`` + resource. - For regional operations, use the + ``regionOperations`` resource. - For zonal operations, + use the ``zonalOperations`` resource. For more + information, read Global, Regional, and Zonal Resources. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'delete', + 'uri': '/compute/v1/projects/{project}/zones/{zone}/nodeGroups/{node_group}', + }, + ] + request, metadata = self._interceptor.pre_delete(request, metadata) + pb_request = compute.DeleteNodeGroupRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.Operation() + pb_resp = compute.Operation.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_delete(resp) + return resp + + class _DeleteNodes(NodeGroupsRestStub): + def __hash__(self): + return hash("DeleteNodes") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.DeleteNodesNodeGroupRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.Operation: + r"""Call the delete nodes method over HTTP. + + Args: + request (~.compute.DeleteNodesNodeGroupRequest): + The request object. A request message for + NodeGroups.DeleteNodes. See the method + description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.Operation: + Represents an Operation resource. Google Compute Engine + has three Operation resources: \* + `Global `__ + \* + `Regional `__ + \* + `Zonal `__ + You can use an operation resource to manage asynchronous + API requests. For more information, read Handling API + responses. Operations can be global, regional or zonal. + - For global operations, use the ``globalOperations`` + resource. - For regional operations, use the + ``regionOperations`` resource. - For zonal operations, + use the ``zonalOperations`` resource. For more + information, read Global, Regional, and Zonal Resources. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/compute/v1/projects/{project}/zones/{zone}/nodeGroups/{node_group}/deleteNodes', + 'body': 'node_groups_delete_nodes_request_resource', + }, + ] + request, metadata = self._interceptor.pre_delete_nodes(request, metadata) + pb_request = compute.DeleteNodesNodeGroupRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + including_default_value_fields=False, + use_integers_for_enums=False + ) + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.Operation() + pb_resp = compute.Operation.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_delete_nodes(resp) + return resp + + class _Get(NodeGroupsRestStub): + def __hash__(self): + return hash("Get") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.GetNodeGroupRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.NodeGroup: + r"""Call the get method over HTTP. + + Args: + request (~.compute.GetNodeGroupRequest): + The request object. A request message for NodeGroups.Get. + See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.NodeGroup: + Represents a sole-tenant Node Group + resource. A sole-tenant node is a + physical server that is dedicated to + hosting VM instances only for your + specific project. Use sole-tenant nodes + to keep your instances physically + separated from instances in other + projects, or to group your instances + together on the same host hardware. For + more information, read Sole-tenant + nodes. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/compute/v1/projects/{project}/zones/{zone}/nodeGroups/{node_group}', + }, + ] + request, metadata = self._interceptor.pre_get(request, metadata) + pb_request = compute.GetNodeGroupRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.NodeGroup() + pb_resp = compute.NodeGroup.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get(resp) + return resp + + class _GetIamPolicy(NodeGroupsRestStub): + def __hash__(self): + return hash("GetIamPolicy") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.GetIamPolicyNodeGroupRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.Policy: + r"""Call the get iam policy method over HTTP. + + Args: + request (~.compute.GetIamPolicyNodeGroupRequest): + The request object. A request message for + NodeGroups.GetIamPolicy. See the method + description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.Policy: + An Identity and Access Management (IAM) policy, which + specifies access controls for Google Cloud resources. A + ``Policy`` is a collection of ``bindings``. A + ``binding`` binds one or more ``members``, or + principals, to a single ``role``. Principals can be user + accounts, service accounts, Google groups, and domains + (such as G Suite). A ``role`` is a named list of + permissions; each ``role`` can be an IAM predefined role + or a user-created custom role. For some types of Google + Cloud resources, a ``binding`` can also specify a + ``condition``, which is a logical expression that allows + access to a resource only if the expression evaluates to + ``true``. A condition can add constraints based on + attributes of the request, the resource, or both. To + learn which resources support conditions in their IAM + policies, see the `IAM + documentation `__. + **JSON example:** { "bindings": [ { "role": + "roles/resourcemanager.organizationAdmin", "members": [ + "user:mike@example.com", "group:admins@example.com", + "domain:google.com", + "serviceAccount:my-project-id@appspot.gserviceaccount.com" + ] }, { "role": + "roles/resourcemanager.organizationViewer", "members": [ + "user:eve@example.com" ], "condition": { "title": + "expirable access", "description": "Does not grant + access after Sep 2020", "expression": "request.time < + timestamp('2020-10-01T00:00:00.000Z')", } } ], "etag": + "BwWWja0YfJA=", "version": 3 } **YAML example:** + bindings: - members: - user:mike@example.com - + group:admins@example.com - domain:google.com - + serviceAccount:my-project-id@appspot.gserviceaccount.com + role: roles/resourcemanager.organizationAdmin - members: + - user:eve@example.com role: + roles/resourcemanager.organizationViewer condition: + title: expirable access description: Does not grant + access after Sep 2020 expression: request.time < + timestamp('2020-10-01T00:00:00.000Z') etag: BwWWja0YfJA= + version: 3 For a description of IAM and its features, + see the `IAM + documentation `__. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/compute/v1/projects/{project}/zones/{zone}/nodeGroups/{resource}/getIamPolicy', + }, + ] + request, metadata = self._interceptor.pre_get_iam_policy(request, metadata) + pb_request = compute.GetIamPolicyNodeGroupRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.Policy() + pb_resp = compute.Policy.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get_iam_policy(resp) + return resp + + class _Insert(NodeGroupsRestStub): + def __hash__(self): + return hash("Insert") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + "initialNodeCount" : 0, } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.InsertNodeGroupRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.Operation: + r"""Call the insert method over HTTP. + + Args: + request (~.compute.InsertNodeGroupRequest): + The request object. A request message for + NodeGroups.Insert. See the method + description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.Operation: + Represents an Operation resource. Google Compute Engine + has three Operation resources: \* + `Global `__ + \* + `Regional `__ + \* + `Zonal `__ + You can use an operation resource to manage asynchronous + API requests. For more information, read Handling API + responses. Operations can be global, regional or zonal. + - For global operations, use the ``globalOperations`` + resource. - For regional operations, use the + ``regionOperations`` resource. - For zonal operations, + use the ``zonalOperations`` resource. For more + information, read Global, Regional, and Zonal Resources. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/compute/v1/projects/{project}/zones/{zone}/nodeGroups', + 'body': 'node_group_resource', + }, + ] + request, metadata = self._interceptor.pre_insert(request, metadata) + pb_request = compute.InsertNodeGroupRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + including_default_value_fields=False, + use_integers_for_enums=False + ) + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.Operation() + pb_resp = compute.Operation.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_insert(resp) + return resp + + class _List(NodeGroupsRestStub): + def __hash__(self): + return hash("List") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.ListNodeGroupsRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.NodeGroupList: + r"""Call the list method over HTTP. + + Args: + request (~.compute.ListNodeGroupsRequest): + The request object. A request message for + NodeGroups.List. See the method + description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.NodeGroupList: + Contains a list of nodeGroups. + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/compute/v1/projects/{project}/zones/{zone}/nodeGroups', + }, + ] + request, metadata = self._interceptor.pre_list(request, metadata) + pb_request = compute.ListNodeGroupsRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.NodeGroupList() + pb_resp = compute.NodeGroupList.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_list(resp) + return resp + + class _ListNodes(NodeGroupsRestStub): + def __hash__(self): + return hash("ListNodes") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.ListNodesNodeGroupsRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.NodeGroupsListNodes: + r"""Call the list nodes method over HTTP. + + Args: + request (~.compute.ListNodesNodeGroupsRequest): + The request object. A request message for + NodeGroups.ListNodes. See the method + description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.NodeGroupsListNodes: + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/compute/v1/projects/{project}/zones/{zone}/nodeGroups/{node_group}/listNodes', + }, + ] + request, metadata = self._interceptor.pre_list_nodes(request, metadata) + pb_request = compute.ListNodesNodeGroupsRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.NodeGroupsListNodes() + pb_resp = compute.NodeGroupsListNodes.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_list_nodes(resp) + return resp + + class _Patch(NodeGroupsRestStub): + def __hash__(self): + return hash("Patch") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.PatchNodeGroupRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.Operation: + r"""Call the patch method over HTTP. + + Args: + request (~.compute.PatchNodeGroupRequest): + The request object. A request message for + NodeGroups.Patch. See the method + description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.Operation: + Represents an Operation resource. Google Compute Engine + has three Operation resources: \* + `Global `__ + \* + `Regional `__ + \* + `Zonal `__ + You can use an operation resource to manage asynchronous + API requests. For more information, read Handling API + responses. Operations can be global, regional or zonal. + - For global operations, use the ``globalOperations`` + resource. - For regional operations, use the + ``regionOperations`` resource. - For zonal operations, + use the ``zonalOperations`` resource. For more + information, read Global, Regional, and Zonal Resources. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'patch', + 'uri': '/compute/v1/projects/{project}/zones/{zone}/nodeGroups/{node_group}', + 'body': 'node_group_resource', + }, + ] + request, metadata = self._interceptor.pre_patch(request, metadata) + pb_request = compute.PatchNodeGroupRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + including_default_value_fields=False, + use_integers_for_enums=False + ) + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.Operation() + pb_resp = compute.Operation.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_patch(resp) + return resp + + class _SetIamPolicy(NodeGroupsRestStub): + def __hash__(self): + return hash("SetIamPolicy") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.SetIamPolicyNodeGroupRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.Policy: + r"""Call the set iam policy method over HTTP. + + Args: + request (~.compute.SetIamPolicyNodeGroupRequest): + The request object. A request message for + NodeGroups.SetIamPolicy. See the method + description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.Policy: + An Identity and Access Management (IAM) policy, which + specifies access controls for Google Cloud resources. A + ``Policy`` is a collection of ``bindings``. A + ``binding`` binds one or more ``members``, or + principals, to a single ``role``. Principals can be user + accounts, service accounts, Google groups, and domains + (such as G Suite). A ``role`` is a named list of + permissions; each ``role`` can be an IAM predefined role + or a user-created custom role. For some types of Google + Cloud resources, a ``binding`` can also specify a + ``condition``, which is a logical expression that allows + access to a resource only if the expression evaluates to + ``true``. A condition can add constraints based on + attributes of the request, the resource, or both. To + learn which resources support conditions in their IAM + policies, see the `IAM + documentation `__. + **JSON example:** { "bindings": [ { "role": + "roles/resourcemanager.organizationAdmin", "members": [ + "user:mike@example.com", "group:admins@example.com", + "domain:google.com", + "serviceAccount:my-project-id@appspot.gserviceaccount.com" + ] }, { "role": + "roles/resourcemanager.organizationViewer", "members": [ + "user:eve@example.com" ], "condition": { "title": + "expirable access", "description": "Does not grant + access after Sep 2020", "expression": "request.time < + timestamp('2020-10-01T00:00:00.000Z')", } } ], "etag": + "BwWWja0YfJA=", "version": 3 } **YAML example:** + bindings: - members: - user:mike@example.com - + group:admins@example.com - domain:google.com - + serviceAccount:my-project-id@appspot.gserviceaccount.com + role: roles/resourcemanager.organizationAdmin - members: + - user:eve@example.com role: + roles/resourcemanager.organizationViewer condition: + title: expirable access description: Does not grant + access after Sep 2020 expression: request.time < + timestamp('2020-10-01T00:00:00.000Z') etag: BwWWja0YfJA= + version: 3 For a description of IAM and its features, + see the `IAM + documentation `__. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/compute/v1/projects/{project}/zones/{zone}/nodeGroups/{resource}/setIamPolicy', + 'body': 'zone_set_policy_request_resource', + }, + ] + request, metadata = self._interceptor.pre_set_iam_policy(request, metadata) + pb_request = compute.SetIamPolicyNodeGroupRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + including_default_value_fields=False, + use_integers_for_enums=False + ) + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.Policy() + pb_resp = compute.Policy.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_set_iam_policy(resp) + return resp + + class _SetNodeTemplate(NodeGroupsRestStub): + def __hash__(self): + return hash("SetNodeTemplate") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.SetNodeTemplateNodeGroupRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.Operation: + r"""Call the set node template method over HTTP. + + Args: + request (~.compute.SetNodeTemplateNodeGroupRequest): + The request object. A request message for + NodeGroups.SetNodeTemplate. See the + method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.Operation: + Represents an Operation resource. Google Compute Engine + has three Operation resources: \* + `Global `__ + \* + `Regional `__ + \* + `Zonal `__ + You can use an operation resource to manage asynchronous + API requests. For more information, read Handling API + responses. Operations can be global, regional or zonal. + - For global operations, use the ``globalOperations`` + resource. - For regional operations, use the + ``regionOperations`` resource. - For zonal operations, + use the ``zonalOperations`` resource. For more + information, read Global, Regional, and Zonal Resources. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/compute/v1/projects/{project}/zones/{zone}/nodeGroups/{node_group}/setNodeTemplate', + 'body': 'node_groups_set_node_template_request_resource', + }, + ] + request, metadata = self._interceptor.pre_set_node_template(request, metadata) + pb_request = compute.SetNodeTemplateNodeGroupRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + including_default_value_fields=False, + use_integers_for_enums=False + ) + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.Operation() + pb_resp = compute.Operation.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_set_node_template(resp) + return resp + + class _SimulateMaintenanceEvent(NodeGroupsRestStub): + def __hash__(self): + return hash("SimulateMaintenanceEvent") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.SimulateMaintenanceEventNodeGroupRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.Operation: + r"""Call the simulate maintenance + event method over HTTP. + + Args: + request (~.compute.SimulateMaintenanceEventNodeGroupRequest): + The request object. A request message for + NodeGroups.SimulateMaintenanceEvent. See + the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.Operation: + Represents an Operation resource. Google Compute Engine + has three Operation resources: \* + `Global `__ + \* + `Regional `__ + \* + `Zonal `__ + You can use an operation resource to manage asynchronous + API requests. For more information, read Handling API + responses. Operations can be global, regional or zonal. + - For global operations, use the ``globalOperations`` + resource. - For regional operations, use the + ``regionOperations`` resource. - For zonal operations, + use the ``zonalOperations`` resource. For more + information, read Global, Regional, and Zonal Resources. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/compute/v1/projects/{project}/zones/{zone}/nodeGroups/{node_group}/simulateMaintenanceEvent', + 'body': 'node_groups_simulate_maintenance_event_request_resource', + }, + ] + request, metadata = self._interceptor.pre_simulate_maintenance_event(request, metadata) + pb_request = compute.SimulateMaintenanceEventNodeGroupRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + including_default_value_fields=False, + use_integers_for_enums=False + ) + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.Operation() + pb_resp = compute.Operation.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_simulate_maintenance_event(resp) + return resp + + class _TestIamPermissions(NodeGroupsRestStub): + def __hash__(self): + return hash("TestIamPermissions") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.TestIamPermissionsNodeGroupRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.TestPermissionsResponse: + r"""Call the test iam permissions method over HTTP. + + Args: + request (~.compute.TestIamPermissionsNodeGroupRequest): + The request object. A request message for + NodeGroups.TestIamPermissions. See the + method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.TestPermissionsResponse: + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/compute/v1/projects/{project}/zones/{zone}/nodeGroups/{resource}/testIamPermissions', + 'body': 'test_permissions_request_resource', + }, + ] + request, metadata = self._interceptor.pre_test_iam_permissions(request, metadata) + pb_request = compute.TestIamPermissionsNodeGroupRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + including_default_value_fields=False, + use_integers_for_enums=False + ) + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.TestPermissionsResponse() + pb_resp = compute.TestPermissionsResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_test_iam_permissions(resp) + return resp + + @property + def add_nodes(self) -> Callable[ + [compute.AddNodesNodeGroupRequest], + compute.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._AddNodes(self._session, self._host, self._interceptor) # type: ignore + + @property + def aggregated_list(self) -> Callable[ + [compute.AggregatedListNodeGroupsRequest], + compute.NodeGroupAggregatedList]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._AggregatedList(self._session, self._host, self._interceptor) # type: ignore + + @property + def delete(self) -> Callable[ + [compute.DeleteNodeGroupRequest], + compute.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._Delete(self._session, self._host, self._interceptor) # type: ignore + + @property + def delete_nodes(self) -> Callable[ + [compute.DeleteNodesNodeGroupRequest], + compute.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._DeleteNodes(self._session, self._host, self._interceptor) # type: ignore + + @property + def get(self) -> Callable[ + [compute.GetNodeGroupRequest], + compute.NodeGroup]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._Get(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_iam_policy(self) -> Callable[ + [compute.GetIamPolicyNodeGroupRequest], + compute.Policy]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetIamPolicy(self._session, self._host, self._interceptor) # type: ignore + + @property + def insert(self) -> Callable[ + [compute.InsertNodeGroupRequest], + compute.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._Insert(self._session, self._host, self._interceptor) # type: ignore + + @property + def list(self) -> Callable[ + [compute.ListNodeGroupsRequest], + compute.NodeGroupList]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._List(self._session, self._host, self._interceptor) # type: ignore + + @property + def list_nodes(self) -> Callable[ + [compute.ListNodesNodeGroupsRequest], + compute.NodeGroupsListNodes]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListNodes(self._session, self._host, self._interceptor) # type: ignore + + @property + def patch(self) -> Callable[ + [compute.PatchNodeGroupRequest], + compute.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._Patch(self._session, self._host, self._interceptor) # type: ignore + + @property + def set_iam_policy(self) -> Callable[ + [compute.SetIamPolicyNodeGroupRequest], + compute.Policy]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._SetIamPolicy(self._session, self._host, self._interceptor) # type: ignore + + @property + def set_node_template(self) -> Callable[ + [compute.SetNodeTemplateNodeGroupRequest], + compute.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._SetNodeTemplate(self._session, self._host, self._interceptor) # type: ignore + + @property + def simulate_maintenance_event(self) -> Callable[ + [compute.SimulateMaintenanceEventNodeGroupRequest], + compute.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._SimulateMaintenanceEvent(self._session, self._host, self._interceptor) # type: ignore + + @property + def test_iam_permissions(self) -> Callable[ + [compute.TestIamPermissionsNodeGroupRequest], + compute.TestPermissionsResponse]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._TestIamPermissions(self._session, self._host, self._interceptor) # type: ignore + + @property + def kind(self) -> str: + return "rest" + + def close(self): + self._session.close() + + +__all__=( + 'NodeGroupsRestTransport', +) diff --git a/owl-bot-staging/v1/google/cloud/compute_v1/services/node_templates/__init__.py b/owl-bot-staging/v1/google/cloud/compute_v1/services/node_templates/__init__.py new file mode 100644 index 000000000..8e18cf57d --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/compute_v1/services/node_templates/__init__.py @@ -0,0 +1,20 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from .client import NodeTemplatesClient + +__all__ = ( + 'NodeTemplatesClient', +) diff --git a/owl-bot-staging/v1/google/cloud/compute_v1/services/node_templates/client.py b/owl-bot-staging/v1/google/cloud/compute_v1/services/node_templates/client.py new file mode 100644 index 000000000..1e277d3e4 --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/compute_v1/services/node_templates/client.py @@ -0,0 +1,1760 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import functools +import os +import re +from typing import Dict, Mapping, MutableMapping, MutableSequence, Optional, Sequence, Tuple, Type, Union, cast + +from google.cloud.compute_v1 import gapic_version as package_version + +from google.api_core import client_options as client_options_lib +from google.api_core import exceptions as core_exceptions +from google.api_core import extended_operation +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport import mtls # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth.exceptions import MutualTLSChannelError # type: ignore +from google.oauth2 import service_account # type: ignore + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + +from google.api_core import extended_operation # type: ignore +from google.cloud.compute_v1.services.node_templates import pagers +from google.cloud.compute_v1.types import compute +from .transports.base import NodeTemplatesTransport, DEFAULT_CLIENT_INFO +from .transports.rest import NodeTemplatesRestTransport + + +class NodeTemplatesClientMeta(type): + """Metaclass for the NodeTemplates client. + + This provides class-level methods for building and retrieving + support objects (e.g. transport) without polluting the client instance + objects. + """ + _transport_registry = OrderedDict() # type: Dict[str, Type[NodeTemplatesTransport]] + _transport_registry["rest"] = NodeTemplatesRestTransport + + def get_transport_class(cls, + label: Optional[str] = None, + ) -> Type[NodeTemplatesTransport]: + """Returns an appropriate transport class. + + Args: + label: The name of the desired transport. If none is + provided, then the first transport in the registry is used. + + Returns: + The transport class to use. + """ + # If a specific transport is requested, return that one. + if label: + return cls._transport_registry[label] + + # No transport is requested; return the default (that is, the first one + # in the dictionary). + return next(iter(cls._transport_registry.values())) + + +class NodeTemplatesClient(metaclass=NodeTemplatesClientMeta): + """The NodeTemplates API.""" + + @staticmethod + def _get_default_mtls_endpoint(api_endpoint): + """Converts api endpoint to mTLS endpoint. + + Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to + "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. + Args: + api_endpoint (Optional[str]): the api endpoint to convert. + Returns: + str: converted mTLS api endpoint. + """ + if not api_endpoint: + return api_endpoint + + mtls_endpoint_re = re.compile( + r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?" + ) + + m = mtls_endpoint_re.match(api_endpoint) + name, mtls, sandbox, googledomain = m.groups() + if mtls or not googledomain: + return api_endpoint + + if sandbox: + return api_endpoint.replace( + "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" + ) + + return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") + + DEFAULT_ENDPOINT = "compute.googleapis.com" + DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore + DEFAULT_ENDPOINT + ) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + NodeTemplatesClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_info(info) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + NodeTemplatesClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_file( + filename) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + from_service_account_json = from_service_account_file + + @property + def transport(self) -> NodeTemplatesTransport: + """Returns the transport used by the client instance. + + Returns: + NodeTemplatesTransport: The transport used by the client + instance. + """ + return self._transport + + @staticmethod + def common_billing_account_path(billing_account: str, ) -> str: + """Returns a fully-qualified billing_account string.""" + return "billingAccounts/{billing_account}".format(billing_account=billing_account, ) + + @staticmethod + def parse_common_billing_account_path(path: str) -> Dict[str,str]: + """Parse a billing_account path into its component segments.""" + m = re.match(r"^billingAccounts/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_folder_path(folder: str, ) -> str: + """Returns a fully-qualified folder string.""" + return "folders/{folder}".format(folder=folder, ) + + @staticmethod + def parse_common_folder_path(path: str) -> Dict[str,str]: + """Parse a folder path into its component segments.""" + m = re.match(r"^folders/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_organization_path(organization: str, ) -> str: + """Returns a fully-qualified organization string.""" + return "organizations/{organization}".format(organization=organization, ) + + @staticmethod + def parse_common_organization_path(path: str) -> Dict[str,str]: + """Parse a organization path into its component segments.""" + m = re.match(r"^organizations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_project_path(project: str, ) -> str: + """Returns a fully-qualified project string.""" + return "projects/{project}".format(project=project, ) + + @staticmethod + def parse_common_project_path(path: str) -> Dict[str,str]: + """Parse a project path into its component segments.""" + m = re.match(r"^projects/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_location_path(project: str, location: str, ) -> str: + """Returns a fully-qualified location string.""" + return "projects/{project}/locations/{location}".format(project=project, location=location, ) + + @staticmethod + def parse_common_location_path(path: str) -> Dict[str,str]: + """Parse a location path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @classmethod + def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[client_options_lib.ClientOptions] = None): + """Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + if client_options is None: + client_options = client_options_lib.ClientOptions() + use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") + use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") + if use_client_cert not in ("true", "false"): + raise ValueError("Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`") + if use_mtls_endpoint not in ("auto", "never", "always"): + raise MutualTLSChannelError("Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`") + + # Figure out the client cert source to use. + client_cert_source = None + if use_client_cert == "true": + if client_options.client_cert_source: + client_cert_source = client_options.client_cert_source + elif mtls.has_default_client_cert_source(): + client_cert_source = mtls.default_client_cert_source() + + # Figure out which api endpoint to use. + if client_options.api_endpoint is not None: + api_endpoint = client_options.api_endpoint + elif use_mtls_endpoint == "always" or (use_mtls_endpoint == "auto" and client_cert_source): + api_endpoint = cls.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = cls.DEFAULT_ENDPOINT + + return api_endpoint, client_cert_source + + def __init__(self, *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Optional[Union[str, NodeTemplatesTransport]] = None, + client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the node templates client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Union[str, NodeTemplatesTransport]): The + transport to use. If set to None, a transport is chosen + automatically. + NOTE: "rest" transport functionality is currently in a + beta state (preview). We welcome your feedback via an + issue in this library's source repository. + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): Custom options for the + client. It won't take effect if a ``transport`` instance is provided. + (1) The ``api_endpoint`` property can be used to override the + default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT + environment variable can also be used to override the endpoint: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto switch to the + default mTLS endpoint if client certificate is present, this is + the default value). However, the ``api_endpoint`` property takes + precedence if provided. + (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide client certificate for mutual TLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + """ + if isinstance(client_options, dict): + client_options = client_options_lib.from_dict(client_options) + if client_options is None: + client_options = client_options_lib.ClientOptions() + client_options = cast(client_options_lib.ClientOptions, client_options) + + api_endpoint, client_cert_source_func = self.get_mtls_endpoint_and_cert_source(client_options) + + api_key_value = getattr(client_options, "api_key", None) + if api_key_value and credentials: + raise ValueError("client_options.api_key and credentials are mutually exclusive") + + # Save or instantiate the transport. + # Ordinarily, we provide the transport, but allowing a custom transport + # instance provides an extensibility point for unusual situations. + if isinstance(transport, NodeTemplatesTransport): + # transport is a NodeTemplatesTransport instance. + if credentials or client_options.credentials_file or api_key_value: + raise ValueError("When providing a transport instance, " + "provide its credentials directly.") + if client_options.scopes: + raise ValueError( + "When providing a transport instance, provide its scopes " + "directly." + ) + self._transport = transport + else: + import google.auth._default # type: ignore + + if api_key_value and hasattr(google.auth._default, "get_api_key_credentials"): + credentials = google.auth._default.get_api_key_credentials(api_key_value) + + Transport = type(self).get_transport_class(transport) + self._transport = Transport( + credentials=credentials, + credentials_file=client_options.credentials_file, + host=api_endpoint, + scopes=client_options.scopes, + client_cert_source_for_mtls=client_cert_source_func, + quota_project_id=client_options.quota_project_id, + client_info=client_info, + always_use_jwt_access=True, + api_audience=client_options.api_audience, + ) + + def aggregated_list(self, + request: Optional[Union[compute.AggregatedListNodeTemplatesRequest, dict]] = None, + *, + project: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.AggregatedListPager: + r"""Retrieves an aggregated list of node templates. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_aggregated_list(): + # Create a client + client = compute_v1.NodeTemplatesClient() + + # Initialize request argument(s) + request = compute_v1.AggregatedListNodeTemplatesRequest( + project="project_value", + ) + + # Make the request + page_result = client.aggregated_list(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.AggregatedListNodeTemplatesRequest, dict]): + The request object. A request message for + NodeTemplates.AggregatedList. See the + method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.compute_v1.services.node_templates.pagers.AggregatedListPager: + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.AggregatedListNodeTemplatesRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.AggregatedListNodeTemplatesRequest): + request = compute.AggregatedListNodeTemplatesRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.aggregated_list] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.AggregatedListPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + def delete_unary(self, + request: Optional[Union[compute.DeleteNodeTemplateRequest, dict]] = None, + *, + project: Optional[str] = None, + region: Optional[str] = None, + node_template: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.Operation: + r"""Deletes the specified NodeTemplate resource. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_delete(): + # Create a client + client = compute_v1.NodeTemplatesClient() + + # Initialize request argument(s) + request = compute_v1.DeleteNodeTemplateRequest( + node_template="node_template_value", + project="project_value", + region="region_value", + ) + + # Make the request + response = client.delete(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.DeleteNodeTemplateRequest, dict]): + The request object. A request message for + NodeTemplates.Delete. See the method + description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + region (str): + The name of the region for this + request. + + This corresponds to the ``region`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + node_template (str): + Name of the NodeTemplate resource to + delete. + + This corresponds to the ``node_template`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, region, node_template]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.DeleteNodeTemplateRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.DeleteNodeTemplateRequest): + request = compute.DeleteNodeTemplateRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if region is not None: + request.region = region + if node_template is not None: + request.node_template = node_template + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("region", request.region), + ("node_template", request.node_template), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def delete(self, + request: Optional[Union[compute.DeleteNodeTemplateRequest, dict]] = None, + *, + project: Optional[str] = None, + region: Optional[str] = None, + node_template: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> extended_operation.ExtendedOperation: + r"""Deletes the specified NodeTemplate resource. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_delete(): + # Create a client + client = compute_v1.NodeTemplatesClient() + + # Initialize request argument(s) + request = compute_v1.DeleteNodeTemplateRequest( + node_template="node_template_value", + project="project_value", + region="region_value", + ) + + # Make the request + response = client.delete(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.DeleteNodeTemplateRequest, dict]): + The request object. A request message for + NodeTemplates.Delete. See the method + description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + region (str): + The name of the region for this + request. + + This corresponds to the ``region`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + node_template (str): + Name of the NodeTemplate resource to + delete. + + This corresponds to the ``node_template`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, region, node_template]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.DeleteNodeTemplateRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.DeleteNodeTemplateRequest): + request = compute.DeleteNodeTemplateRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if region is not None: + request.region = region + if node_template is not None: + request.node_template = node_template + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("region", request.region), + ("node_template", request.node_template), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + operation_service = self._transport._region_operations_client + operation_request = compute.GetRegionOperationRequest() + operation_request.project = request.project + operation_request.region = request.region + operation_request.operation = response.name + + get_operation = functools.partial(operation_service.get, operation_request) + # Cancel is not part of extended operations yet. + cancel_operation = lambda: None + + # Note: this class is an implementation detail to provide a uniform + # set of names for certain fields in the extended operation proto message. + # See google.api_core.extended_operation.ExtendedOperation for details + # on these properties and the expected interface. + class _CustomOperation(extended_operation.ExtendedOperation): + @property + def error_message(self): + return self._extended_operation.http_error_message + + @property + def error_code(self): + return self._extended_operation.http_error_status_code + + response = _CustomOperation.make(get_operation, cancel_operation, response) + + # Done; return the response. + return response + + def get(self, + request: Optional[Union[compute.GetNodeTemplateRequest, dict]] = None, + *, + project: Optional[str] = None, + region: Optional[str] = None, + node_template: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.NodeTemplate: + r"""Returns the specified node template. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_get(): + # Create a client + client = compute_v1.NodeTemplatesClient() + + # Initialize request argument(s) + request = compute_v1.GetNodeTemplateRequest( + node_template="node_template_value", + project="project_value", + region="region_value", + ) + + # Make the request + response = client.get(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.GetNodeTemplateRequest, dict]): + The request object. A request message for + NodeTemplates.Get. See the method + description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + region (str): + The name of the region for this + request. + + This corresponds to the ``region`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + node_template (str): + Name of the node template to return. + This corresponds to the ``node_template`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.compute_v1.types.NodeTemplate: + Represent a sole-tenant Node Template + resource. You can use a template to + define properties for nodes in a node + group. For more information, read + Creating node groups and instances. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, region, node_template]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.GetNodeTemplateRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.GetNodeTemplateRequest): + request = compute.GetNodeTemplateRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if region is not None: + request.region = region + if node_template is not None: + request.node_template = node_template + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("region", request.region), + ("node_template", request.node_template), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get_iam_policy(self, + request: Optional[Union[compute.GetIamPolicyNodeTemplateRequest, dict]] = None, + *, + project: Optional[str] = None, + region: Optional[str] = None, + resource: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.Policy: + r"""Gets the access control policy for a resource. May be + empty if no such policy or resource exists. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_get_iam_policy(): + # Create a client + client = compute_v1.NodeTemplatesClient() + + # Initialize request argument(s) + request = compute_v1.GetIamPolicyNodeTemplateRequest( + project="project_value", + region="region_value", + resource="resource_value", + ) + + # Make the request + response = client.get_iam_policy(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.GetIamPolicyNodeTemplateRequest, dict]): + The request object. A request message for + NodeTemplates.GetIamPolicy. See the + method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + region (str): + The name of the region for this + request. + + This corresponds to the ``region`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + resource (str): + Name or id of the resource for this + request. + + This corresponds to the ``resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.compute_v1.types.Policy: + An Identity and Access Management (IAM) policy, which + specifies access controls for Google Cloud resources. A + Policy is a collection of bindings. A binding binds one + or more members, or principals, to a single role. + Principals can be user accounts, service accounts, + Google groups, and domains (such as G Suite). A role is + a named list of permissions; each role can be an IAM + predefined role or a user-created custom role. For some + types of Google Cloud resources, a binding can also + specify a condition, which is a logical expression that + allows access to a resource only if the expression + evaluates to true. A condition can add constraints based + on attributes of the request, the resource, or both. To + learn which resources support conditions in their IAM + policies, see the [IAM + documentation](\ https://cloud.google.com/iam/help/conditions/resource-policies). + **JSON example:** { "bindings": [ { "role": + "roles/resourcemanager.organizationAdmin", "members": [ + "user:mike@example.com", "group:admins@example.com", + "domain:google.com", + "serviceAccount:my-project-id@appspot.gserviceaccount.com" + ] }, { "role": + "roles/resourcemanager.organizationViewer", "members": [ + "user:eve@example.com" ], "condition": { "title": + "expirable access", "description": "Does not grant + access after Sep 2020", "expression": "request.time < + timestamp('2020-10-01T00:00:00.000Z')", } } ], "etag": + "BwWWja0YfJA=", "version": 3 } **YAML example:** + bindings: - members: - user:\ mike@example.com - + group:\ admins@example.com - domain:google.com - + serviceAccount:\ my-project-id@appspot.gserviceaccount.com + role: roles/resourcemanager.organizationAdmin - members: + - user:\ eve@example.com role: + roles/resourcemanager.organizationViewer condition: + title: expirable access description: Does not grant + access after Sep 2020 expression: request.time < + timestamp('2020-10-01T00:00:00.000Z') etag: BwWWja0YfJA= + version: 3 For a description of IAM and its features, + see the [IAM + documentation](\ https://cloud.google.com/iam/docs/). + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, region, resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.GetIamPolicyNodeTemplateRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.GetIamPolicyNodeTemplateRequest): + request = compute.GetIamPolicyNodeTemplateRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if region is not None: + request.region = region + if resource is not None: + request.resource = resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_iam_policy] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("region", request.region), + ("resource", request.resource), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def insert_unary(self, + request: Optional[Union[compute.InsertNodeTemplateRequest, dict]] = None, + *, + project: Optional[str] = None, + region: Optional[str] = None, + node_template_resource: Optional[compute.NodeTemplate] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.Operation: + r"""Creates a NodeTemplate resource in the specified + project using the data included in the request. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_insert(): + # Create a client + client = compute_v1.NodeTemplatesClient() + + # Initialize request argument(s) + request = compute_v1.InsertNodeTemplateRequest( + project="project_value", + region="region_value", + ) + + # Make the request + response = client.insert(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.InsertNodeTemplateRequest, dict]): + The request object. A request message for + NodeTemplates.Insert. See the method + description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + region (str): + The name of the region for this + request. + + This corresponds to the ``region`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + node_template_resource (google.cloud.compute_v1.types.NodeTemplate): + The body resource for this request + This corresponds to the ``node_template_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, region, node_template_resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.InsertNodeTemplateRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.InsertNodeTemplateRequest): + request = compute.InsertNodeTemplateRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if region is not None: + request.region = region + if node_template_resource is not None: + request.node_template_resource = node_template_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.insert] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("region", request.region), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def insert(self, + request: Optional[Union[compute.InsertNodeTemplateRequest, dict]] = None, + *, + project: Optional[str] = None, + region: Optional[str] = None, + node_template_resource: Optional[compute.NodeTemplate] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> extended_operation.ExtendedOperation: + r"""Creates a NodeTemplate resource in the specified + project using the data included in the request. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_insert(): + # Create a client + client = compute_v1.NodeTemplatesClient() + + # Initialize request argument(s) + request = compute_v1.InsertNodeTemplateRequest( + project="project_value", + region="region_value", + ) + + # Make the request + response = client.insert(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.InsertNodeTemplateRequest, dict]): + The request object. A request message for + NodeTemplates.Insert. See the method + description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + region (str): + The name of the region for this + request. + + This corresponds to the ``region`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + node_template_resource (google.cloud.compute_v1.types.NodeTemplate): + The body resource for this request + This corresponds to the ``node_template_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, region, node_template_resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.InsertNodeTemplateRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.InsertNodeTemplateRequest): + request = compute.InsertNodeTemplateRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if region is not None: + request.region = region + if node_template_resource is not None: + request.node_template_resource = node_template_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.insert] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("region", request.region), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + operation_service = self._transport._region_operations_client + operation_request = compute.GetRegionOperationRequest() + operation_request.project = request.project + operation_request.region = request.region + operation_request.operation = response.name + + get_operation = functools.partial(operation_service.get, operation_request) + # Cancel is not part of extended operations yet. + cancel_operation = lambda: None + + # Note: this class is an implementation detail to provide a uniform + # set of names for certain fields in the extended operation proto message. + # See google.api_core.extended_operation.ExtendedOperation for details + # on these properties and the expected interface. + class _CustomOperation(extended_operation.ExtendedOperation): + @property + def error_message(self): + return self._extended_operation.http_error_message + + @property + def error_code(self): + return self._extended_operation.http_error_status_code + + response = _CustomOperation.make(get_operation, cancel_operation, response) + + # Done; return the response. + return response + + def list(self, + request: Optional[Union[compute.ListNodeTemplatesRequest, dict]] = None, + *, + project: Optional[str] = None, + region: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListPager: + r"""Retrieves a list of node templates available to the + specified project. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_list(): + # Create a client + client = compute_v1.NodeTemplatesClient() + + # Initialize request argument(s) + request = compute_v1.ListNodeTemplatesRequest( + project="project_value", + region="region_value", + ) + + # Make the request + page_result = client.list(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.ListNodeTemplatesRequest, dict]): + The request object. A request message for + NodeTemplates.List. See the method + description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + region (str): + The name of the region for this + request. + + This corresponds to the ``region`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.compute_v1.services.node_templates.pagers.ListPager: + Contains a list of node templates. + + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, region]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.ListNodeTemplatesRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.ListNodeTemplatesRequest): + request = compute.ListNodeTemplatesRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if region is not None: + request.region = region + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("region", request.region), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + def set_iam_policy(self, + request: Optional[Union[compute.SetIamPolicyNodeTemplateRequest, dict]] = None, + *, + project: Optional[str] = None, + region: Optional[str] = None, + resource: Optional[str] = None, + region_set_policy_request_resource: Optional[compute.RegionSetPolicyRequest] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.Policy: + r"""Sets the access control policy on the specified + resource. Replaces any existing policy. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_set_iam_policy(): + # Create a client + client = compute_v1.NodeTemplatesClient() + + # Initialize request argument(s) + request = compute_v1.SetIamPolicyNodeTemplateRequest( + project="project_value", + region="region_value", + resource="resource_value", + ) + + # Make the request + response = client.set_iam_policy(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.SetIamPolicyNodeTemplateRequest, dict]): + The request object. A request message for + NodeTemplates.SetIamPolicy. See the + method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + region (str): + The name of the region for this + request. + + This corresponds to the ``region`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + resource (str): + Name or id of the resource for this + request. + + This corresponds to the ``resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + region_set_policy_request_resource (google.cloud.compute_v1.types.RegionSetPolicyRequest): + The body resource for this request + This corresponds to the ``region_set_policy_request_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.compute_v1.types.Policy: + An Identity and Access Management (IAM) policy, which + specifies access controls for Google Cloud resources. A + Policy is a collection of bindings. A binding binds one + or more members, or principals, to a single role. + Principals can be user accounts, service accounts, + Google groups, and domains (such as G Suite). A role is + a named list of permissions; each role can be an IAM + predefined role or a user-created custom role. For some + types of Google Cloud resources, a binding can also + specify a condition, which is a logical expression that + allows access to a resource only if the expression + evaluates to true. A condition can add constraints based + on attributes of the request, the resource, or both. To + learn which resources support conditions in their IAM + policies, see the [IAM + documentation](\ https://cloud.google.com/iam/help/conditions/resource-policies). + **JSON example:** { "bindings": [ { "role": + "roles/resourcemanager.organizationAdmin", "members": [ + "user:mike@example.com", "group:admins@example.com", + "domain:google.com", + "serviceAccount:my-project-id@appspot.gserviceaccount.com" + ] }, { "role": + "roles/resourcemanager.organizationViewer", "members": [ + "user:eve@example.com" ], "condition": { "title": + "expirable access", "description": "Does not grant + access after Sep 2020", "expression": "request.time < + timestamp('2020-10-01T00:00:00.000Z')", } } ], "etag": + "BwWWja0YfJA=", "version": 3 } **YAML example:** + bindings: - members: - user:\ mike@example.com - + group:\ admins@example.com - domain:google.com - + serviceAccount:\ my-project-id@appspot.gserviceaccount.com + role: roles/resourcemanager.organizationAdmin - members: + - user:\ eve@example.com role: + roles/resourcemanager.organizationViewer condition: + title: expirable access description: Does not grant + access after Sep 2020 expression: request.time < + timestamp('2020-10-01T00:00:00.000Z') etag: BwWWja0YfJA= + version: 3 For a description of IAM and its features, + see the [IAM + documentation](\ https://cloud.google.com/iam/docs/). + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, region, resource, region_set_policy_request_resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.SetIamPolicyNodeTemplateRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.SetIamPolicyNodeTemplateRequest): + request = compute.SetIamPolicyNodeTemplateRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if region is not None: + request.region = region + if resource is not None: + request.resource = resource + if region_set_policy_request_resource is not None: + request.region_set_policy_request_resource = region_set_policy_request_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.set_iam_policy] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("region", request.region), + ("resource", request.resource), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def test_iam_permissions(self, + request: Optional[Union[compute.TestIamPermissionsNodeTemplateRequest, dict]] = None, + *, + project: Optional[str] = None, + region: Optional[str] = None, + resource: Optional[str] = None, + test_permissions_request_resource: Optional[compute.TestPermissionsRequest] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.TestPermissionsResponse: + r"""Returns permissions that a caller has on the + specified resource. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_test_iam_permissions(): + # Create a client + client = compute_v1.NodeTemplatesClient() + + # Initialize request argument(s) + request = compute_v1.TestIamPermissionsNodeTemplateRequest( + project="project_value", + region="region_value", + resource="resource_value", + ) + + # Make the request + response = client.test_iam_permissions(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.TestIamPermissionsNodeTemplateRequest, dict]): + The request object. A request message for + NodeTemplates.TestIamPermissions. See + the method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + region (str): + The name of the region for this + request. + + This corresponds to the ``region`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + resource (str): + Name or id of the resource for this + request. + + This corresponds to the ``resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + test_permissions_request_resource (google.cloud.compute_v1.types.TestPermissionsRequest): + The body resource for this request + This corresponds to the ``test_permissions_request_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.compute_v1.types.TestPermissionsResponse: + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, region, resource, test_permissions_request_resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.TestIamPermissionsNodeTemplateRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.TestIamPermissionsNodeTemplateRequest): + request = compute.TestIamPermissionsNodeTemplateRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if region is not None: + request.region = region + if resource is not None: + request.resource = resource + if test_permissions_request_resource is not None: + request.test_permissions_request_resource = test_permissions_request_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.test_iam_permissions] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("region", request.region), + ("resource", request.resource), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def __enter__(self) -> "NodeTemplatesClient": + return self + + def __exit__(self, type, value, traceback): + """Releases underlying transport's resources. + + .. warning:: + ONLY use as a context manager if the transport is NOT shared + with other clients! Exiting the with block will CLOSE the transport + and may cause errors in other clients! + """ + self.transport.close() + + + + + + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) + + +__all__ = ( + "NodeTemplatesClient", +) diff --git a/owl-bot-staging/v1/google/cloud/compute_v1/services/node_templates/pagers.py b/owl-bot-staging/v1/google/cloud/compute_v1/services/node_templates/pagers.py new file mode 100644 index 000000000..fce70bbdb --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/compute_v1/services/node_templates/pagers.py @@ -0,0 +1,139 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import Any, AsyncIterator, Awaitable, Callable, Sequence, Tuple, Optional, Iterator + +from google.cloud.compute_v1.types import compute + + +class AggregatedListPager: + """A pager for iterating through ``aggregated_list`` requests. + + This class thinly wraps an initial + :class:`google.cloud.compute_v1.types.NodeTemplateAggregatedList` object, and + provides an ``__iter__`` method to iterate through its + ``items`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``AggregatedList`` requests and continue to iterate + through the ``items`` field on the + corresponding responses. + + All the usual :class:`google.cloud.compute_v1.types.NodeTemplateAggregatedList` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., compute.NodeTemplateAggregatedList], + request: compute.AggregatedListNodeTemplatesRequest, + response: compute.NodeTemplateAggregatedList, + *, + metadata: Sequence[Tuple[str, str]] = ()): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.compute_v1.types.AggregatedListNodeTemplatesRequest): + The initial request object. + response (google.cloud.compute_v1.types.NodeTemplateAggregatedList): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = compute.AggregatedListNodeTemplatesRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[compute.NodeTemplateAggregatedList]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[Tuple[str, compute.NodeTemplatesScopedList]]: + for page in self.pages: + yield from page.items.items() + + def get(self, key: str) -> Optional[compute.NodeTemplatesScopedList]: + return self._response.items.get(key) + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + + +class ListPager: + """A pager for iterating through ``list`` requests. + + This class thinly wraps an initial + :class:`google.cloud.compute_v1.types.NodeTemplateList` object, and + provides an ``__iter__`` method to iterate through its + ``items`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``List`` requests and continue to iterate + through the ``items`` field on the + corresponding responses. + + All the usual :class:`google.cloud.compute_v1.types.NodeTemplateList` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., compute.NodeTemplateList], + request: compute.ListNodeTemplatesRequest, + response: compute.NodeTemplateList, + *, + metadata: Sequence[Tuple[str, str]] = ()): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.compute_v1.types.ListNodeTemplatesRequest): + The initial request object. + response (google.cloud.compute_v1.types.NodeTemplateList): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = compute.ListNodeTemplatesRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[compute.NodeTemplateList]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[compute.NodeTemplate]: + for page in self.pages: + yield from page.items + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) diff --git a/owl-bot-staging/v1/google/cloud/compute_v1/services/node_templates/transports/__init__.py b/owl-bot-staging/v1/google/cloud/compute_v1/services/node_templates/transports/__init__.py new file mode 100644 index 000000000..90ad0434a --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/compute_v1/services/node_templates/transports/__init__.py @@ -0,0 +1,32 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +from typing import Dict, Type + +from .base import NodeTemplatesTransport +from .rest import NodeTemplatesRestTransport +from .rest import NodeTemplatesRestInterceptor + + +# Compile a registry of transports. +_transport_registry = OrderedDict() # type: Dict[str, Type[NodeTemplatesTransport]] +_transport_registry['rest'] = NodeTemplatesRestTransport + +__all__ = ( + 'NodeTemplatesTransport', + 'NodeTemplatesRestTransport', + 'NodeTemplatesRestInterceptor', +) diff --git a/owl-bot-staging/v1/google/cloud/compute_v1/services/node_templates/transports/base.py b/owl-bot-staging/v1/google/cloud/compute_v1/services/node_templates/transports/base.py new file mode 100644 index 000000000..cfe0bd086 --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/compute_v1/services/node_templates/transports/base.py @@ -0,0 +1,261 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import abc +from typing import Awaitable, Callable, Dict, Optional, Sequence, Union + +from google.cloud.compute_v1 import gapic_version as package_version + +import google.auth # type: ignore +import google.api_core +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.compute_v1.types import compute +from google.cloud.compute_v1.services import region_operations + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) + + +class NodeTemplatesTransport(abc.ABC): + """Abstract transport class for NodeTemplates.""" + + AUTH_SCOPES = ( + 'https://www.googleapis.com/auth/compute', + 'https://www.googleapis.com/auth/cloud-platform', + ) + + DEFAULT_HOST: str = 'compute.googleapis.com' + def __init__( + self, *, + host: str = DEFAULT_HOST, + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + **kwargs, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A list of scopes. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + """ + self._extended_operations_services: Dict[str, Any] = {} + + scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} + + # Save the scopes. + self._scopes = scopes + + # If no credentials are provided, then determine the appropriate + # defaults. + if credentials and credentials_file: + raise core_exceptions.DuplicateCredentialArgs("'credentials_file' and 'credentials' are mutually exclusive") + + if credentials_file is not None: + credentials, _ = google.auth.load_credentials_from_file( + credentials_file, + **scopes_kwargs, + quota_project_id=quota_project_id + ) + elif credentials is None: + credentials, _ = google.auth.default(**scopes_kwargs, quota_project_id=quota_project_id) + # Don't apply audience if the credentials file passed from user. + if hasattr(credentials, "with_gdch_audience"): + credentials = credentials.with_gdch_audience(api_audience if api_audience else host) + + # If the credentials are service account credentials, then always try to use self signed JWT. + if always_use_jwt_access and isinstance(credentials, service_account.Credentials) and hasattr(service_account.Credentials, "with_always_use_jwt_access"): + credentials = credentials.with_always_use_jwt_access(True) + + # Save the credentials. + self._credentials = credentials + + # Save the hostname. Default to port 443 (HTTPS) if none is specified. + if ':' not in host: + host += ':443' + self._host = host + + def _prep_wrapped_messages(self, client_info): + # Precompute the wrapped methods. + self._wrapped_methods = { + self.aggregated_list: gapic_v1.method.wrap_method( + self.aggregated_list, + default_timeout=None, + client_info=client_info, + ), + self.delete: gapic_v1.method.wrap_method( + self.delete, + default_timeout=None, + client_info=client_info, + ), + self.get: gapic_v1.method.wrap_method( + self.get, + default_timeout=None, + client_info=client_info, + ), + self.get_iam_policy: gapic_v1.method.wrap_method( + self.get_iam_policy, + default_timeout=None, + client_info=client_info, + ), + self.insert: gapic_v1.method.wrap_method( + self.insert, + default_timeout=None, + client_info=client_info, + ), + self.list: gapic_v1.method.wrap_method( + self.list, + default_timeout=None, + client_info=client_info, + ), + self.set_iam_policy: gapic_v1.method.wrap_method( + self.set_iam_policy, + default_timeout=None, + client_info=client_info, + ), + self.test_iam_permissions: gapic_v1.method.wrap_method( + self.test_iam_permissions, + default_timeout=None, + client_info=client_info, + ), + } + + def close(self): + """Closes resources associated with the transport. + + .. warning:: + Only call this method if the transport is NOT shared + with other clients - this may cause errors in other clients! + """ + raise NotImplementedError() + + @property + def aggregated_list(self) -> Callable[ + [compute.AggregatedListNodeTemplatesRequest], + Union[ + compute.NodeTemplateAggregatedList, + Awaitable[compute.NodeTemplateAggregatedList] + ]]: + raise NotImplementedError() + + @property + def delete(self) -> Callable[ + [compute.DeleteNodeTemplateRequest], + Union[ + compute.Operation, + Awaitable[compute.Operation] + ]]: + raise NotImplementedError() + + @property + def get(self) -> Callable[ + [compute.GetNodeTemplateRequest], + Union[ + compute.NodeTemplate, + Awaitable[compute.NodeTemplate] + ]]: + raise NotImplementedError() + + @property + def get_iam_policy(self) -> Callable[ + [compute.GetIamPolicyNodeTemplateRequest], + Union[ + compute.Policy, + Awaitable[compute.Policy] + ]]: + raise NotImplementedError() + + @property + def insert(self) -> Callable[ + [compute.InsertNodeTemplateRequest], + Union[ + compute.Operation, + Awaitable[compute.Operation] + ]]: + raise NotImplementedError() + + @property + def list(self) -> Callable[ + [compute.ListNodeTemplatesRequest], + Union[ + compute.NodeTemplateList, + Awaitable[compute.NodeTemplateList] + ]]: + raise NotImplementedError() + + @property + def set_iam_policy(self) -> Callable[ + [compute.SetIamPolicyNodeTemplateRequest], + Union[ + compute.Policy, + Awaitable[compute.Policy] + ]]: + raise NotImplementedError() + + @property + def test_iam_permissions(self) -> Callable[ + [compute.TestIamPermissionsNodeTemplateRequest], + Union[ + compute.TestPermissionsResponse, + Awaitable[compute.TestPermissionsResponse] + ]]: + raise NotImplementedError() + + @property + def kind(self) -> str: + raise NotImplementedError() + + @property + def _region_operations_client(self) -> region_operations.RegionOperationsClient: + ex_op_service = self._extended_operations_services.get("region_operations") + if not ex_op_service: + ex_op_service = region_operations.RegionOperationsClient( + credentials=self._credentials, + transport=self.kind, + ) + self._extended_operations_services["region_operations"] = ex_op_service + + return ex_op_service + + +__all__ = ( + 'NodeTemplatesTransport', +) diff --git a/owl-bot-staging/v1/google/cloud/compute_v1/services/node_templates/transports/rest.py b/owl-bot-staging/v1/google/cloud/compute_v1/services/node_templates/transports/rest.py new file mode 100644 index 000000000..535b56a19 --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/compute_v1/services/node_templates/transports/rest.py @@ -0,0 +1,1209 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +from google.auth.transport.requests import AuthorizedSession # type: ignore +import json # type: ignore +import grpc # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.api_core import exceptions as core_exceptions +from google.api_core import retry as retries +from google.api_core import rest_helpers +from google.api_core import rest_streaming +from google.api_core import path_template +from google.api_core import gapic_v1 + +from google.protobuf import json_format +from requests import __version__ as requests_version +import dataclasses +import re +from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union +import warnings + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + + +from google.cloud.compute_v1.types import compute + +from .base import NodeTemplatesTransport, DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, + grpc_version=None, + rest_version=requests_version, +) + + +class NodeTemplatesRestInterceptor: + """Interceptor for NodeTemplates. + + Interceptors are used to manipulate requests, request metadata, and responses + in arbitrary ways. + Example use cases include: + * Logging + * Verifying requests according to service or custom semantics + * Stripping extraneous information from responses + + These use cases and more can be enabled by injecting an + instance of a custom subclass when constructing the NodeTemplatesRestTransport. + + .. code-block:: python + class MyCustomNodeTemplatesInterceptor(NodeTemplatesRestInterceptor): + def pre_aggregated_list(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_aggregated_list(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_delete(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_delete(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_get(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_get_iam_policy(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_iam_policy(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_insert(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_insert(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_set_iam_policy(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_set_iam_policy(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_test_iam_permissions(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_test_iam_permissions(self, response): + logging.log(f"Received response: {response}") + return response + + transport = NodeTemplatesRestTransport(interceptor=MyCustomNodeTemplatesInterceptor()) + client = NodeTemplatesClient(transport=transport) + + + """ + def pre_aggregated_list(self, request: compute.AggregatedListNodeTemplatesRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.AggregatedListNodeTemplatesRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for aggregated_list + + Override in a subclass to manipulate the request or metadata + before they are sent to the NodeTemplates server. + """ + return request, metadata + + def post_aggregated_list(self, response: compute.NodeTemplateAggregatedList) -> compute.NodeTemplateAggregatedList: + """Post-rpc interceptor for aggregated_list + + Override in a subclass to manipulate the response + after it is returned by the NodeTemplates server but before + it is returned to user code. + """ + return response + def pre_delete(self, request: compute.DeleteNodeTemplateRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.DeleteNodeTemplateRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for delete + + Override in a subclass to manipulate the request or metadata + before they are sent to the NodeTemplates server. + """ + return request, metadata + + def post_delete(self, response: compute.Operation) -> compute.Operation: + """Post-rpc interceptor for delete + + Override in a subclass to manipulate the response + after it is returned by the NodeTemplates server but before + it is returned to user code. + """ + return response + def pre_get(self, request: compute.GetNodeTemplateRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.GetNodeTemplateRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get + + Override in a subclass to manipulate the request or metadata + before they are sent to the NodeTemplates server. + """ + return request, metadata + + def post_get(self, response: compute.NodeTemplate) -> compute.NodeTemplate: + """Post-rpc interceptor for get + + Override in a subclass to manipulate the response + after it is returned by the NodeTemplates server but before + it is returned to user code. + """ + return response + def pre_get_iam_policy(self, request: compute.GetIamPolicyNodeTemplateRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.GetIamPolicyNodeTemplateRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_iam_policy + + Override in a subclass to manipulate the request or metadata + before they are sent to the NodeTemplates server. + """ + return request, metadata + + def post_get_iam_policy(self, response: compute.Policy) -> compute.Policy: + """Post-rpc interceptor for get_iam_policy + + Override in a subclass to manipulate the response + after it is returned by the NodeTemplates server but before + it is returned to user code. + """ + return response + def pre_insert(self, request: compute.InsertNodeTemplateRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.InsertNodeTemplateRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for insert + + Override in a subclass to manipulate the request or metadata + before they are sent to the NodeTemplates server. + """ + return request, metadata + + def post_insert(self, response: compute.Operation) -> compute.Operation: + """Post-rpc interceptor for insert + + Override in a subclass to manipulate the response + after it is returned by the NodeTemplates server but before + it is returned to user code. + """ + return response + def pre_list(self, request: compute.ListNodeTemplatesRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.ListNodeTemplatesRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list + + Override in a subclass to manipulate the request or metadata + before they are sent to the NodeTemplates server. + """ + return request, metadata + + def post_list(self, response: compute.NodeTemplateList) -> compute.NodeTemplateList: + """Post-rpc interceptor for list + + Override in a subclass to manipulate the response + after it is returned by the NodeTemplates server but before + it is returned to user code. + """ + return response + def pre_set_iam_policy(self, request: compute.SetIamPolicyNodeTemplateRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.SetIamPolicyNodeTemplateRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for set_iam_policy + + Override in a subclass to manipulate the request or metadata + before they are sent to the NodeTemplates server. + """ + return request, metadata + + def post_set_iam_policy(self, response: compute.Policy) -> compute.Policy: + """Post-rpc interceptor for set_iam_policy + + Override in a subclass to manipulate the response + after it is returned by the NodeTemplates server but before + it is returned to user code. + """ + return response + def pre_test_iam_permissions(self, request: compute.TestIamPermissionsNodeTemplateRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.TestIamPermissionsNodeTemplateRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for test_iam_permissions + + Override in a subclass to manipulate the request or metadata + before they are sent to the NodeTemplates server. + """ + return request, metadata + + def post_test_iam_permissions(self, response: compute.TestPermissionsResponse) -> compute.TestPermissionsResponse: + """Post-rpc interceptor for test_iam_permissions + + Override in a subclass to manipulate the response + after it is returned by the NodeTemplates server but before + it is returned to user code. + """ + return response + + +@dataclasses.dataclass +class NodeTemplatesRestStub: + _session: AuthorizedSession + _host: str + _interceptor: NodeTemplatesRestInterceptor + + +class NodeTemplatesRestTransport(NodeTemplatesTransport): + """REST backend transport for NodeTemplates. + + The NodeTemplates API. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends JSON representations of protocol buffers over HTTP/1.1 + + NOTE: This REST transport functionality is currently in a beta + state (preview). We welcome your feedback via an issue in this + library's source repository. Thank you! + """ + + def __init__(self, *, + host: str = 'compute.googleapis.com', + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + client_cert_source_for_mtls: Optional[Callable[[ + ], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + url_scheme: str = 'https', + interceptor: Optional[NodeTemplatesRestInterceptor] = None, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + NOTE: This REST transport functionality is currently in a beta + state (preview). We welcome your feedback via a GitHub issue in + this library's repository. Thank you! + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + client_cert_source_for_mtls (Callable[[], Tuple[bytes, bytes]]): Client + certificate to configure mutual TLS HTTP channel. It is ignored + if ``channel`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you are developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + url_scheme: the protocol scheme for the API endpoint. Normally + "https", but for testing or local servers, + "http" can be specified. + """ + # Run the base constructor + # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. + # TODO: When custom host (api_endpoint) is set, `scopes` must *also* be set on the + # credentials object + maybe_url_match = re.match("^(?Phttp(?:s)?://)?(?P.*)$", host) + if maybe_url_match is None: + raise ValueError(f"Unexpected hostname structure: {host}") # pragma: NO COVER + + url_match_items = maybe_url_match.groupdict() + + host = f"{url_scheme}://{host}" if not url_match_items["scheme"] else host + + super().__init__( + host=host, + credentials=credentials, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience + ) + self._session = AuthorizedSession( + self._credentials, default_host=self.DEFAULT_HOST) + if client_cert_source_for_mtls: + self._session.configure_mtls_channel(client_cert_source_for_mtls) + self._interceptor = interceptor or NodeTemplatesRestInterceptor() + self._prep_wrapped_messages(client_info) + + class _AggregatedList(NodeTemplatesRestStub): + def __hash__(self): + return hash("AggregatedList") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.AggregatedListNodeTemplatesRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.NodeTemplateAggregatedList: + r"""Call the aggregated list method over HTTP. + + Args: + request (~.compute.AggregatedListNodeTemplatesRequest): + The request object. A request message for + NodeTemplates.AggregatedList. See the + method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.NodeTemplateAggregatedList: + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/compute/v1/projects/{project}/aggregated/nodeTemplates', + }, + ] + request, metadata = self._interceptor.pre_aggregated_list(request, metadata) + pb_request = compute.AggregatedListNodeTemplatesRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.NodeTemplateAggregatedList() + pb_resp = compute.NodeTemplateAggregatedList.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_aggregated_list(resp) + return resp + + class _Delete(NodeTemplatesRestStub): + def __hash__(self): + return hash("Delete") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.DeleteNodeTemplateRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.Operation: + r"""Call the delete method over HTTP. + + Args: + request (~.compute.DeleteNodeTemplateRequest): + The request object. A request message for + NodeTemplates.Delete. See the method + description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.Operation: + Represents an Operation resource. Google Compute Engine + has three Operation resources: \* + `Global `__ + \* + `Regional `__ + \* + `Zonal `__ + You can use an operation resource to manage asynchronous + API requests. For more information, read Handling API + responses. Operations can be global, regional or zonal. + - For global operations, use the ``globalOperations`` + resource. - For regional operations, use the + ``regionOperations`` resource. - For zonal operations, + use the ``zonalOperations`` resource. For more + information, read Global, Regional, and Zonal Resources. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'delete', + 'uri': '/compute/v1/projects/{project}/regions/{region}/nodeTemplates/{node_template}', + }, + ] + request, metadata = self._interceptor.pre_delete(request, metadata) + pb_request = compute.DeleteNodeTemplateRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.Operation() + pb_resp = compute.Operation.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_delete(resp) + return resp + + class _Get(NodeTemplatesRestStub): + def __hash__(self): + return hash("Get") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.GetNodeTemplateRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.NodeTemplate: + r"""Call the get method over HTTP. + + Args: + request (~.compute.GetNodeTemplateRequest): + The request object. A request message for + NodeTemplates.Get. See the method + description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.NodeTemplate: + Represent a sole-tenant Node Template + resource. You can use a template to + define properties for nodes in a node + group. For more information, read + Creating node groups and instances. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/compute/v1/projects/{project}/regions/{region}/nodeTemplates/{node_template}', + }, + ] + request, metadata = self._interceptor.pre_get(request, metadata) + pb_request = compute.GetNodeTemplateRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.NodeTemplate() + pb_resp = compute.NodeTemplate.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get(resp) + return resp + + class _GetIamPolicy(NodeTemplatesRestStub): + def __hash__(self): + return hash("GetIamPolicy") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.GetIamPolicyNodeTemplateRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.Policy: + r"""Call the get iam policy method over HTTP. + + Args: + request (~.compute.GetIamPolicyNodeTemplateRequest): + The request object. A request message for + NodeTemplates.GetIamPolicy. See the + method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.Policy: + An Identity and Access Management (IAM) policy, which + specifies access controls for Google Cloud resources. A + ``Policy`` is a collection of ``bindings``. A + ``binding`` binds one or more ``members``, or + principals, to a single ``role``. Principals can be user + accounts, service accounts, Google groups, and domains + (such as G Suite). A ``role`` is a named list of + permissions; each ``role`` can be an IAM predefined role + or a user-created custom role. For some types of Google + Cloud resources, a ``binding`` can also specify a + ``condition``, which is a logical expression that allows + access to a resource only if the expression evaluates to + ``true``. A condition can add constraints based on + attributes of the request, the resource, or both. To + learn which resources support conditions in their IAM + policies, see the `IAM + documentation `__. + **JSON example:** { "bindings": [ { "role": + "roles/resourcemanager.organizationAdmin", "members": [ + "user:mike@example.com", "group:admins@example.com", + "domain:google.com", + "serviceAccount:my-project-id@appspot.gserviceaccount.com" + ] }, { "role": + "roles/resourcemanager.organizationViewer", "members": [ + "user:eve@example.com" ], "condition": { "title": + "expirable access", "description": "Does not grant + access after Sep 2020", "expression": "request.time < + timestamp('2020-10-01T00:00:00.000Z')", } } ], "etag": + "BwWWja0YfJA=", "version": 3 } **YAML example:** + bindings: - members: - user:mike@example.com - + group:admins@example.com - domain:google.com - + serviceAccount:my-project-id@appspot.gserviceaccount.com + role: roles/resourcemanager.organizationAdmin - members: + - user:eve@example.com role: + roles/resourcemanager.organizationViewer condition: + title: expirable access description: Does not grant + access after Sep 2020 expression: request.time < + timestamp('2020-10-01T00:00:00.000Z') etag: BwWWja0YfJA= + version: 3 For a description of IAM and its features, + see the `IAM + documentation `__. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/compute/v1/projects/{project}/regions/{region}/nodeTemplates/{resource}/getIamPolicy', + }, + ] + request, metadata = self._interceptor.pre_get_iam_policy(request, metadata) + pb_request = compute.GetIamPolicyNodeTemplateRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.Policy() + pb_resp = compute.Policy.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get_iam_policy(resp) + return resp + + class _Insert(NodeTemplatesRestStub): + def __hash__(self): + return hash("Insert") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.InsertNodeTemplateRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.Operation: + r"""Call the insert method over HTTP. + + Args: + request (~.compute.InsertNodeTemplateRequest): + The request object. A request message for + NodeTemplates.Insert. See the method + description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.Operation: + Represents an Operation resource. Google Compute Engine + has three Operation resources: \* + `Global `__ + \* + `Regional `__ + \* + `Zonal `__ + You can use an operation resource to manage asynchronous + API requests. For more information, read Handling API + responses. Operations can be global, regional or zonal. + - For global operations, use the ``globalOperations`` + resource. - For regional operations, use the + ``regionOperations`` resource. - For zonal operations, + use the ``zonalOperations`` resource. For more + information, read Global, Regional, and Zonal Resources. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/compute/v1/projects/{project}/regions/{region}/nodeTemplates', + 'body': 'node_template_resource', + }, + ] + request, metadata = self._interceptor.pre_insert(request, metadata) + pb_request = compute.InsertNodeTemplateRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + including_default_value_fields=False, + use_integers_for_enums=False + ) + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.Operation() + pb_resp = compute.Operation.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_insert(resp) + return resp + + class _List(NodeTemplatesRestStub): + def __hash__(self): + return hash("List") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.ListNodeTemplatesRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.NodeTemplateList: + r"""Call the list method over HTTP. + + Args: + request (~.compute.ListNodeTemplatesRequest): + The request object. A request message for + NodeTemplates.List. See the method + description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.NodeTemplateList: + Contains a list of node templates. + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/compute/v1/projects/{project}/regions/{region}/nodeTemplates', + }, + ] + request, metadata = self._interceptor.pre_list(request, metadata) + pb_request = compute.ListNodeTemplatesRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.NodeTemplateList() + pb_resp = compute.NodeTemplateList.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_list(resp) + return resp + + class _SetIamPolicy(NodeTemplatesRestStub): + def __hash__(self): + return hash("SetIamPolicy") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.SetIamPolicyNodeTemplateRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.Policy: + r"""Call the set iam policy method over HTTP. + + Args: + request (~.compute.SetIamPolicyNodeTemplateRequest): + The request object. A request message for + NodeTemplates.SetIamPolicy. See the + method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.Policy: + An Identity and Access Management (IAM) policy, which + specifies access controls for Google Cloud resources. A + ``Policy`` is a collection of ``bindings``. A + ``binding`` binds one or more ``members``, or + principals, to a single ``role``. Principals can be user + accounts, service accounts, Google groups, and domains + (such as G Suite). A ``role`` is a named list of + permissions; each ``role`` can be an IAM predefined role + or a user-created custom role. For some types of Google + Cloud resources, a ``binding`` can also specify a + ``condition``, which is a logical expression that allows + access to a resource only if the expression evaluates to + ``true``. A condition can add constraints based on + attributes of the request, the resource, or both. To + learn which resources support conditions in their IAM + policies, see the `IAM + documentation `__. + **JSON example:** { "bindings": [ { "role": + "roles/resourcemanager.organizationAdmin", "members": [ + "user:mike@example.com", "group:admins@example.com", + "domain:google.com", + "serviceAccount:my-project-id@appspot.gserviceaccount.com" + ] }, { "role": + "roles/resourcemanager.organizationViewer", "members": [ + "user:eve@example.com" ], "condition": { "title": + "expirable access", "description": "Does not grant + access after Sep 2020", "expression": "request.time < + timestamp('2020-10-01T00:00:00.000Z')", } } ], "etag": + "BwWWja0YfJA=", "version": 3 } **YAML example:** + bindings: - members: - user:mike@example.com - + group:admins@example.com - domain:google.com - + serviceAccount:my-project-id@appspot.gserviceaccount.com + role: roles/resourcemanager.organizationAdmin - members: + - user:eve@example.com role: + roles/resourcemanager.organizationViewer condition: + title: expirable access description: Does not grant + access after Sep 2020 expression: request.time < + timestamp('2020-10-01T00:00:00.000Z') etag: BwWWja0YfJA= + version: 3 For a description of IAM and its features, + see the `IAM + documentation `__. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/compute/v1/projects/{project}/regions/{region}/nodeTemplates/{resource}/setIamPolicy', + 'body': 'region_set_policy_request_resource', + }, + ] + request, metadata = self._interceptor.pre_set_iam_policy(request, metadata) + pb_request = compute.SetIamPolicyNodeTemplateRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + including_default_value_fields=False, + use_integers_for_enums=False + ) + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.Policy() + pb_resp = compute.Policy.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_set_iam_policy(resp) + return resp + + class _TestIamPermissions(NodeTemplatesRestStub): + def __hash__(self): + return hash("TestIamPermissions") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.TestIamPermissionsNodeTemplateRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.TestPermissionsResponse: + r"""Call the test iam permissions method over HTTP. + + Args: + request (~.compute.TestIamPermissionsNodeTemplateRequest): + The request object. A request message for + NodeTemplates.TestIamPermissions. See + the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.TestPermissionsResponse: + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/compute/v1/projects/{project}/regions/{region}/nodeTemplates/{resource}/testIamPermissions', + 'body': 'test_permissions_request_resource', + }, + ] + request, metadata = self._interceptor.pre_test_iam_permissions(request, metadata) + pb_request = compute.TestIamPermissionsNodeTemplateRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + including_default_value_fields=False, + use_integers_for_enums=False + ) + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.TestPermissionsResponse() + pb_resp = compute.TestPermissionsResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_test_iam_permissions(resp) + return resp + + @property + def aggregated_list(self) -> Callable[ + [compute.AggregatedListNodeTemplatesRequest], + compute.NodeTemplateAggregatedList]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._AggregatedList(self._session, self._host, self._interceptor) # type: ignore + + @property + def delete(self) -> Callable[ + [compute.DeleteNodeTemplateRequest], + compute.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._Delete(self._session, self._host, self._interceptor) # type: ignore + + @property + def get(self) -> Callable[ + [compute.GetNodeTemplateRequest], + compute.NodeTemplate]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._Get(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_iam_policy(self) -> Callable[ + [compute.GetIamPolicyNodeTemplateRequest], + compute.Policy]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetIamPolicy(self._session, self._host, self._interceptor) # type: ignore + + @property + def insert(self) -> Callable[ + [compute.InsertNodeTemplateRequest], + compute.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._Insert(self._session, self._host, self._interceptor) # type: ignore + + @property + def list(self) -> Callable[ + [compute.ListNodeTemplatesRequest], + compute.NodeTemplateList]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._List(self._session, self._host, self._interceptor) # type: ignore + + @property + def set_iam_policy(self) -> Callable[ + [compute.SetIamPolicyNodeTemplateRequest], + compute.Policy]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._SetIamPolicy(self._session, self._host, self._interceptor) # type: ignore + + @property + def test_iam_permissions(self) -> Callable[ + [compute.TestIamPermissionsNodeTemplateRequest], + compute.TestPermissionsResponse]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._TestIamPermissions(self._session, self._host, self._interceptor) # type: ignore + + @property + def kind(self) -> str: + return "rest" + + def close(self): + self._session.close() + + +__all__=( + 'NodeTemplatesRestTransport', +) diff --git a/owl-bot-staging/v1/google/cloud/compute_v1/services/node_types/__init__.py b/owl-bot-staging/v1/google/cloud/compute_v1/services/node_types/__init__.py new file mode 100644 index 000000000..c55ba5797 --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/compute_v1/services/node_types/__init__.py @@ -0,0 +1,20 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from .client import NodeTypesClient + +__all__ = ( + 'NodeTypesClient', +) diff --git a/owl-bot-staging/v1/google/cloud/compute_v1/services/node_types/client.py b/owl-bot-staging/v1/google/cloud/compute_v1/services/node_types/client.py new file mode 100644 index 000000000..279dab7ec --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/compute_v1/services/node_types/client.py @@ -0,0 +1,755 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import os +import re +from typing import Dict, Mapping, MutableMapping, MutableSequence, Optional, Sequence, Tuple, Type, Union, cast + +from google.cloud.compute_v1 import gapic_version as package_version + +from google.api_core import client_options as client_options_lib +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport import mtls # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth.exceptions import MutualTLSChannelError # type: ignore +from google.oauth2 import service_account # type: ignore + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + +from google.cloud.compute_v1.services.node_types import pagers +from google.cloud.compute_v1.types import compute +from .transports.base import NodeTypesTransport, DEFAULT_CLIENT_INFO +from .transports.rest import NodeTypesRestTransport + + +class NodeTypesClientMeta(type): + """Metaclass for the NodeTypes client. + + This provides class-level methods for building and retrieving + support objects (e.g. transport) without polluting the client instance + objects. + """ + _transport_registry = OrderedDict() # type: Dict[str, Type[NodeTypesTransport]] + _transport_registry["rest"] = NodeTypesRestTransport + + def get_transport_class(cls, + label: Optional[str] = None, + ) -> Type[NodeTypesTransport]: + """Returns an appropriate transport class. + + Args: + label: The name of the desired transport. If none is + provided, then the first transport in the registry is used. + + Returns: + The transport class to use. + """ + # If a specific transport is requested, return that one. + if label: + return cls._transport_registry[label] + + # No transport is requested; return the default (that is, the first one + # in the dictionary). + return next(iter(cls._transport_registry.values())) + + +class NodeTypesClient(metaclass=NodeTypesClientMeta): + """The NodeTypes API.""" + + @staticmethod + def _get_default_mtls_endpoint(api_endpoint): + """Converts api endpoint to mTLS endpoint. + + Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to + "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. + Args: + api_endpoint (Optional[str]): the api endpoint to convert. + Returns: + str: converted mTLS api endpoint. + """ + if not api_endpoint: + return api_endpoint + + mtls_endpoint_re = re.compile( + r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?" + ) + + m = mtls_endpoint_re.match(api_endpoint) + name, mtls, sandbox, googledomain = m.groups() + if mtls or not googledomain: + return api_endpoint + + if sandbox: + return api_endpoint.replace( + "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" + ) + + return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") + + DEFAULT_ENDPOINT = "compute.googleapis.com" + DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore + DEFAULT_ENDPOINT + ) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + NodeTypesClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_info(info) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + NodeTypesClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_file( + filename) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + from_service_account_json = from_service_account_file + + @property + def transport(self) -> NodeTypesTransport: + """Returns the transport used by the client instance. + + Returns: + NodeTypesTransport: The transport used by the client + instance. + """ + return self._transport + + @staticmethod + def common_billing_account_path(billing_account: str, ) -> str: + """Returns a fully-qualified billing_account string.""" + return "billingAccounts/{billing_account}".format(billing_account=billing_account, ) + + @staticmethod + def parse_common_billing_account_path(path: str) -> Dict[str,str]: + """Parse a billing_account path into its component segments.""" + m = re.match(r"^billingAccounts/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_folder_path(folder: str, ) -> str: + """Returns a fully-qualified folder string.""" + return "folders/{folder}".format(folder=folder, ) + + @staticmethod + def parse_common_folder_path(path: str) -> Dict[str,str]: + """Parse a folder path into its component segments.""" + m = re.match(r"^folders/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_organization_path(organization: str, ) -> str: + """Returns a fully-qualified organization string.""" + return "organizations/{organization}".format(organization=organization, ) + + @staticmethod + def parse_common_organization_path(path: str) -> Dict[str,str]: + """Parse a organization path into its component segments.""" + m = re.match(r"^organizations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_project_path(project: str, ) -> str: + """Returns a fully-qualified project string.""" + return "projects/{project}".format(project=project, ) + + @staticmethod + def parse_common_project_path(path: str) -> Dict[str,str]: + """Parse a project path into its component segments.""" + m = re.match(r"^projects/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_location_path(project: str, location: str, ) -> str: + """Returns a fully-qualified location string.""" + return "projects/{project}/locations/{location}".format(project=project, location=location, ) + + @staticmethod + def parse_common_location_path(path: str) -> Dict[str,str]: + """Parse a location path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @classmethod + def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[client_options_lib.ClientOptions] = None): + """Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + if client_options is None: + client_options = client_options_lib.ClientOptions() + use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") + use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") + if use_client_cert not in ("true", "false"): + raise ValueError("Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`") + if use_mtls_endpoint not in ("auto", "never", "always"): + raise MutualTLSChannelError("Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`") + + # Figure out the client cert source to use. + client_cert_source = None + if use_client_cert == "true": + if client_options.client_cert_source: + client_cert_source = client_options.client_cert_source + elif mtls.has_default_client_cert_source(): + client_cert_source = mtls.default_client_cert_source() + + # Figure out which api endpoint to use. + if client_options.api_endpoint is not None: + api_endpoint = client_options.api_endpoint + elif use_mtls_endpoint == "always" or (use_mtls_endpoint == "auto" and client_cert_source): + api_endpoint = cls.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = cls.DEFAULT_ENDPOINT + + return api_endpoint, client_cert_source + + def __init__(self, *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Optional[Union[str, NodeTypesTransport]] = None, + client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the node types client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Union[str, NodeTypesTransport]): The + transport to use. If set to None, a transport is chosen + automatically. + NOTE: "rest" transport functionality is currently in a + beta state (preview). We welcome your feedback via an + issue in this library's source repository. + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): Custom options for the + client. It won't take effect if a ``transport`` instance is provided. + (1) The ``api_endpoint`` property can be used to override the + default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT + environment variable can also be used to override the endpoint: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto switch to the + default mTLS endpoint if client certificate is present, this is + the default value). However, the ``api_endpoint`` property takes + precedence if provided. + (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide client certificate for mutual TLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + """ + if isinstance(client_options, dict): + client_options = client_options_lib.from_dict(client_options) + if client_options is None: + client_options = client_options_lib.ClientOptions() + client_options = cast(client_options_lib.ClientOptions, client_options) + + api_endpoint, client_cert_source_func = self.get_mtls_endpoint_and_cert_source(client_options) + + api_key_value = getattr(client_options, "api_key", None) + if api_key_value and credentials: + raise ValueError("client_options.api_key and credentials are mutually exclusive") + + # Save or instantiate the transport. + # Ordinarily, we provide the transport, but allowing a custom transport + # instance provides an extensibility point for unusual situations. + if isinstance(transport, NodeTypesTransport): + # transport is a NodeTypesTransport instance. + if credentials or client_options.credentials_file or api_key_value: + raise ValueError("When providing a transport instance, " + "provide its credentials directly.") + if client_options.scopes: + raise ValueError( + "When providing a transport instance, provide its scopes " + "directly." + ) + self._transport = transport + else: + import google.auth._default # type: ignore + + if api_key_value and hasattr(google.auth._default, "get_api_key_credentials"): + credentials = google.auth._default.get_api_key_credentials(api_key_value) + + Transport = type(self).get_transport_class(transport) + self._transport = Transport( + credentials=credentials, + credentials_file=client_options.credentials_file, + host=api_endpoint, + scopes=client_options.scopes, + client_cert_source_for_mtls=client_cert_source_func, + quota_project_id=client_options.quota_project_id, + client_info=client_info, + always_use_jwt_access=True, + api_audience=client_options.api_audience, + ) + + def aggregated_list(self, + request: Optional[Union[compute.AggregatedListNodeTypesRequest, dict]] = None, + *, + project: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.AggregatedListPager: + r"""Retrieves an aggregated list of node types. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_aggregated_list(): + # Create a client + client = compute_v1.NodeTypesClient() + + # Initialize request argument(s) + request = compute_v1.AggregatedListNodeTypesRequest( + project="project_value", + ) + + # Make the request + page_result = client.aggregated_list(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.AggregatedListNodeTypesRequest, dict]): + The request object. A request message for + NodeTypes.AggregatedList. See the method + description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.compute_v1.services.node_types.pagers.AggregatedListPager: + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.AggregatedListNodeTypesRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.AggregatedListNodeTypesRequest): + request = compute.AggregatedListNodeTypesRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.aggregated_list] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.AggregatedListPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get(self, + request: Optional[Union[compute.GetNodeTypeRequest, dict]] = None, + *, + project: Optional[str] = None, + zone: Optional[str] = None, + node_type: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.NodeType: + r"""Returns the specified node type. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_get(): + # Create a client + client = compute_v1.NodeTypesClient() + + # Initialize request argument(s) + request = compute_v1.GetNodeTypeRequest( + node_type="node_type_value", + project="project_value", + zone="zone_value", + ) + + # Make the request + response = client.get(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.GetNodeTypeRequest, dict]): + The request object. A request message for NodeTypes.Get. + See the method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + zone (str): + The name of the zone for this + request. + + This corresponds to the ``zone`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + node_type (str): + Name of the node type to return. + This corresponds to the ``node_type`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.compute_v1.types.NodeType: + Represent a sole-tenant Node Type + resource. Each node within a node group + must have a node type. A node type + specifies the total amount of cores and + memory for that node. Currently, the + only available node type is + n1-node-96-624 node type that has 96 + vCPUs and 624 GB of memory, available in + multiple zones. For more information + read Node types. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, zone, node_type]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.GetNodeTypeRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.GetNodeTypeRequest): + request = compute.GetNodeTypeRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if zone is not None: + request.zone = zone + if node_type is not None: + request.node_type = node_type + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("zone", request.zone), + ("node_type", request.node_type), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def list(self, + request: Optional[Union[compute.ListNodeTypesRequest, dict]] = None, + *, + project: Optional[str] = None, + zone: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListPager: + r"""Retrieves a list of node types available to the + specified project. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_list(): + # Create a client + client = compute_v1.NodeTypesClient() + + # Initialize request argument(s) + request = compute_v1.ListNodeTypesRequest( + project="project_value", + zone="zone_value", + ) + + # Make the request + page_result = client.list(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.ListNodeTypesRequest, dict]): + The request object. A request message for NodeTypes.List. + See the method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + zone (str): + The name of the zone for this + request. + + This corresponds to the ``zone`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.compute_v1.services.node_types.pagers.ListPager: + Contains a list of node types. + + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, zone]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.ListNodeTypesRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.ListNodeTypesRequest): + request = compute.ListNodeTypesRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if zone is not None: + request.zone = zone + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("zone", request.zone), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + def __enter__(self) -> "NodeTypesClient": + return self + + def __exit__(self, type, value, traceback): + """Releases underlying transport's resources. + + .. warning:: + ONLY use as a context manager if the transport is NOT shared + with other clients! Exiting the with block will CLOSE the transport + and may cause errors in other clients! + """ + self.transport.close() + + + + + + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) + + +__all__ = ( + "NodeTypesClient", +) diff --git a/owl-bot-staging/v1/google/cloud/compute_v1/services/node_types/pagers.py b/owl-bot-staging/v1/google/cloud/compute_v1/services/node_types/pagers.py new file mode 100644 index 000000000..27891fb64 --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/compute_v1/services/node_types/pagers.py @@ -0,0 +1,139 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import Any, AsyncIterator, Awaitable, Callable, Sequence, Tuple, Optional, Iterator + +from google.cloud.compute_v1.types import compute + + +class AggregatedListPager: + """A pager for iterating through ``aggregated_list`` requests. + + This class thinly wraps an initial + :class:`google.cloud.compute_v1.types.NodeTypeAggregatedList` object, and + provides an ``__iter__`` method to iterate through its + ``items`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``AggregatedList`` requests and continue to iterate + through the ``items`` field on the + corresponding responses. + + All the usual :class:`google.cloud.compute_v1.types.NodeTypeAggregatedList` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., compute.NodeTypeAggregatedList], + request: compute.AggregatedListNodeTypesRequest, + response: compute.NodeTypeAggregatedList, + *, + metadata: Sequence[Tuple[str, str]] = ()): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.compute_v1.types.AggregatedListNodeTypesRequest): + The initial request object. + response (google.cloud.compute_v1.types.NodeTypeAggregatedList): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = compute.AggregatedListNodeTypesRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[compute.NodeTypeAggregatedList]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[Tuple[str, compute.NodeTypesScopedList]]: + for page in self.pages: + yield from page.items.items() + + def get(self, key: str) -> Optional[compute.NodeTypesScopedList]: + return self._response.items.get(key) + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + + +class ListPager: + """A pager for iterating through ``list`` requests. + + This class thinly wraps an initial + :class:`google.cloud.compute_v1.types.NodeTypeList` object, and + provides an ``__iter__`` method to iterate through its + ``items`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``List`` requests and continue to iterate + through the ``items`` field on the + corresponding responses. + + All the usual :class:`google.cloud.compute_v1.types.NodeTypeList` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., compute.NodeTypeList], + request: compute.ListNodeTypesRequest, + response: compute.NodeTypeList, + *, + metadata: Sequence[Tuple[str, str]] = ()): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.compute_v1.types.ListNodeTypesRequest): + The initial request object. + response (google.cloud.compute_v1.types.NodeTypeList): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = compute.ListNodeTypesRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[compute.NodeTypeList]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[compute.NodeType]: + for page in self.pages: + yield from page.items + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) diff --git a/owl-bot-staging/v1/google/cloud/compute_v1/services/node_types/transports/__init__.py b/owl-bot-staging/v1/google/cloud/compute_v1/services/node_types/transports/__init__.py new file mode 100644 index 000000000..a97ed734b --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/compute_v1/services/node_types/transports/__init__.py @@ -0,0 +1,32 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +from typing import Dict, Type + +from .base import NodeTypesTransport +from .rest import NodeTypesRestTransport +from .rest import NodeTypesRestInterceptor + + +# Compile a registry of transports. +_transport_registry = OrderedDict() # type: Dict[str, Type[NodeTypesTransport]] +_transport_registry['rest'] = NodeTypesRestTransport + +__all__ = ( + 'NodeTypesTransport', + 'NodeTypesRestTransport', + 'NodeTypesRestInterceptor', +) diff --git a/owl-bot-staging/v1/google/cloud/compute_v1/services/node_types/transports/base.py b/owl-bot-staging/v1/google/cloud/compute_v1/services/node_types/transports/base.py new file mode 100644 index 000000000..2b6837210 --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/compute_v1/services/node_types/transports/base.py @@ -0,0 +1,178 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import abc +from typing import Awaitable, Callable, Dict, Optional, Sequence, Union + +from google.cloud.compute_v1 import gapic_version as package_version + +import google.auth # type: ignore +import google.api_core +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.compute_v1.types import compute + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) + + +class NodeTypesTransport(abc.ABC): + """Abstract transport class for NodeTypes.""" + + AUTH_SCOPES = ( + 'https://www.googleapis.com/auth/compute.readonly', + 'https://www.googleapis.com/auth/compute', + 'https://www.googleapis.com/auth/cloud-platform', + ) + + DEFAULT_HOST: str = 'compute.googleapis.com' + def __init__( + self, *, + host: str = DEFAULT_HOST, + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + **kwargs, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A list of scopes. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + """ + + scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} + + # Save the scopes. + self._scopes = scopes + + # If no credentials are provided, then determine the appropriate + # defaults. + if credentials and credentials_file: + raise core_exceptions.DuplicateCredentialArgs("'credentials_file' and 'credentials' are mutually exclusive") + + if credentials_file is not None: + credentials, _ = google.auth.load_credentials_from_file( + credentials_file, + **scopes_kwargs, + quota_project_id=quota_project_id + ) + elif credentials is None: + credentials, _ = google.auth.default(**scopes_kwargs, quota_project_id=quota_project_id) + # Don't apply audience if the credentials file passed from user. + if hasattr(credentials, "with_gdch_audience"): + credentials = credentials.with_gdch_audience(api_audience if api_audience else host) + + # If the credentials are service account credentials, then always try to use self signed JWT. + if always_use_jwt_access and isinstance(credentials, service_account.Credentials) and hasattr(service_account.Credentials, "with_always_use_jwt_access"): + credentials = credentials.with_always_use_jwt_access(True) + + # Save the credentials. + self._credentials = credentials + + # Save the hostname. Default to port 443 (HTTPS) if none is specified. + if ':' not in host: + host += ':443' + self._host = host + + def _prep_wrapped_messages(self, client_info): + # Precompute the wrapped methods. + self._wrapped_methods = { + self.aggregated_list: gapic_v1.method.wrap_method( + self.aggregated_list, + default_timeout=None, + client_info=client_info, + ), + self.get: gapic_v1.method.wrap_method( + self.get, + default_timeout=None, + client_info=client_info, + ), + self.list: gapic_v1.method.wrap_method( + self.list, + default_timeout=None, + client_info=client_info, + ), + } + + def close(self): + """Closes resources associated with the transport. + + .. warning:: + Only call this method if the transport is NOT shared + with other clients - this may cause errors in other clients! + """ + raise NotImplementedError() + + @property + def aggregated_list(self) -> Callable[ + [compute.AggregatedListNodeTypesRequest], + Union[ + compute.NodeTypeAggregatedList, + Awaitable[compute.NodeTypeAggregatedList] + ]]: + raise NotImplementedError() + + @property + def get(self) -> Callable[ + [compute.GetNodeTypeRequest], + Union[ + compute.NodeType, + Awaitable[compute.NodeType] + ]]: + raise NotImplementedError() + + @property + def list(self) -> Callable[ + [compute.ListNodeTypesRequest], + Union[ + compute.NodeTypeList, + Awaitable[compute.NodeTypeList] + ]]: + raise NotImplementedError() + + @property + def kind(self) -> str: + raise NotImplementedError() + + +__all__ = ( + 'NodeTypesTransport', +) diff --git a/owl-bot-staging/v1/google/cloud/compute_v1/services/node_types/transports/rest.py b/owl-bot-staging/v1/google/cloud/compute_v1/services/node_types/transports/rest.py new file mode 100644 index 000000000..9bd534efd --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/compute_v1/services/node_types/transports/rest.py @@ -0,0 +1,523 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +from google.auth.transport.requests import AuthorizedSession # type: ignore +import json # type: ignore +import grpc # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.api_core import exceptions as core_exceptions +from google.api_core import retry as retries +from google.api_core import rest_helpers +from google.api_core import rest_streaming +from google.api_core import path_template +from google.api_core import gapic_v1 + +from google.protobuf import json_format +from requests import __version__ as requests_version +import dataclasses +import re +from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union +import warnings + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + + +from google.cloud.compute_v1.types import compute + +from .base import NodeTypesTransport, DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, + grpc_version=None, + rest_version=requests_version, +) + + +class NodeTypesRestInterceptor: + """Interceptor for NodeTypes. + + Interceptors are used to manipulate requests, request metadata, and responses + in arbitrary ways. + Example use cases include: + * Logging + * Verifying requests according to service or custom semantics + * Stripping extraneous information from responses + + These use cases and more can be enabled by injecting an + instance of a custom subclass when constructing the NodeTypesRestTransport. + + .. code-block:: python + class MyCustomNodeTypesInterceptor(NodeTypesRestInterceptor): + def pre_aggregated_list(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_aggregated_list(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_get(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list(self, response): + logging.log(f"Received response: {response}") + return response + + transport = NodeTypesRestTransport(interceptor=MyCustomNodeTypesInterceptor()) + client = NodeTypesClient(transport=transport) + + + """ + def pre_aggregated_list(self, request: compute.AggregatedListNodeTypesRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.AggregatedListNodeTypesRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for aggregated_list + + Override in a subclass to manipulate the request or metadata + before they are sent to the NodeTypes server. + """ + return request, metadata + + def post_aggregated_list(self, response: compute.NodeTypeAggregatedList) -> compute.NodeTypeAggregatedList: + """Post-rpc interceptor for aggregated_list + + Override in a subclass to manipulate the response + after it is returned by the NodeTypes server but before + it is returned to user code. + """ + return response + def pre_get(self, request: compute.GetNodeTypeRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.GetNodeTypeRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get + + Override in a subclass to manipulate the request or metadata + before they are sent to the NodeTypes server. + """ + return request, metadata + + def post_get(self, response: compute.NodeType) -> compute.NodeType: + """Post-rpc interceptor for get + + Override in a subclass to manipulate the response + after it is returned by the NodeTypes server but before + it is returned to user code. + """ + return response + def pre_list(self, request: compute.ListNodeTypesRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.ListNodeTypesRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list + + Override in a subclass to manipulate the request or metadata + before they are sent to the NodeTypes server. + """ + return request, metadata + + def post_list(self, response: compute.NodeTypeList) -> compute.NodeTypeList: + """Post-rpc interceptor for list + + Override in a subclass to manipulate the response + after it is returned by the NodeTypes server but before + it is returned to user code. + """ + return response + + +@dataclasses.dataclass +class NodeTypesRestStub: + _session: AuthorizedSession + _host: str + _interceptor: NodeTypesRestInterceptor + + +class NodeTypesRestTransport(NodeTypesTransport): + """REST backend transport for NodeTypes. + + The NodeTypes API. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends JSON representations of protocol buffers over HTTP/1.1 + + NOTE: This REST transport functionality is currently in a beta + state (preview). We welcome your feedback via an issue in this + library's source repository. Thank you! + """ + + def __init__(self, *, + host: str = 'compute.googleapis.com', + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + client_cert_source_for_mtls: Optional[Callable[[ + ], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + url_scheme: str = 'https', + interceptor: Optional[NodeTypesRestInterceptor] = None, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + NOTE: This REST transport functionality is currently in a beta + state (preview). We welcome your feedback via a GitHub issue in + this library's repository. Thank you! + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + client_cert_source_for_mtls (Callable[[], Tuple[bytes, bytes]]): Client + certificate to configure mutual TLS HTTP channel. It is ignored + if ``channel`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you are developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + url_scheme: the protocol scheme for the API endpoint. Normally + "https", but for testing or local servers, + "http" can be specified. + """ + # Run the base constructor + # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. + # TODO: When custom host (api_endpoint) is set, `scopes` must *also* be set on the + # credentials object + maybe_url_match = re.match("^(?Phttp(?:s)?://)?(?P.*)$", host) + if maybe_url_match is None: + raise ValueError(f"Unexpected hostname structure: {host}") # pragma: NO COVER + + url_match_items = maybe_url_match.groupdict() + + host = f"{url_scheme}://{host}" if not url_match_items["scheme"] else host + + super().__init__( + host=host, + credentials=credentials, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience + ) + self._session = AuthorizedSession( + self._credentials, default_host=self.DEFAULT_HOST) + if client_cert_source_for_mtls: + self._session.configure_mtls_channel(client_cert_source_for_mtls) + self._interceptor = interceptor or NodeTypesRestInterceptor() + self._prep_wrapped_messages(client_info) + + class _AggregatedList(NodeTypesRestStub): + def __hash__(self): + return hash("AggregatedList") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.AggregatedListNodeTypesRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.NodeTypeAggregatedList: + r"""Call the aggregated list method over HTTP. + + Args: + request (~.compute.AggregatedListNodeTypesRequest): + The request object. A request message for + NodeTypes.AggregatedList. See the method + description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.NodeTypeAggregatedList: + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/compute/v1/projects/{project}/aggregated/nodeTypes', + }, + ] + request, metadata = self._interceptor.pre_aggregated_list(request, metadata) + pb_request = compute.AggregatedListNodeTypesRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.NodeTypeAggregatedList() + pb_resp = compute.NodeTypeAggregatedList.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_aggregated_list(resp) + return resp + + class _Get(NodeTypesRestStub): + def __hash__(self): + return hash("Get") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.GetNodeTypeRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.NodeType: + r"""Call the get method over HTTP. + + Args: + request (~.compute.GetNodeTypeRequest): + The request object. A request message for NodeTypes.Get. + See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.NodeType: + Represent a sole-tenant Node Type + resource. Each node within a node group + must have a node type. A node type + specifies the total amount of cores and + memory for that node. Currently, the + only available node type is + n1-node-96-624 node type that has 96 + vCPUs and 624 GB of memory, available in + multiple zones. For more information + read Node types. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/compute/v1/projects/{project}/zones/{zone}/nodeTypes/{node_type}', + }, + ] + request, metadata = self._interceptor.pre_get(request, metadata) + pb_request = compute.GetNodeTypeRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.NodeType() + pb_resp = compute.NodeType.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get(resp) + return resp + + class _List(NodeTypesRestStub): + def __hash__(self): + return hash("List") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.ListNodeTypesRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.NodeTypeList: + r"""Call the list method over HTTP. + + Args: + request (~.compute.ListNodeTypesRequest): + The request object. A request message for NodeTypes.List. + See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.NodeTypeList: + Contains a list of node types. + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/compute/v1/projects/{project}/zones/{zone}/nodeTypes', + }, + ] + request, metadata = self._interceptor.pre_list(request, metadata) + pb_request = compute.ListNodeTypesRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.NodeTypeList() + pb_resp = compute.NodeTypeList.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_list(resp) + return resp + + @property + def aggregated_list(self) -> Callable[ + [compute.AggregatedListNodeTypesRequest], + compute.NodeTypeAggregatedList]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._AggregatedList(self._session, self._host, self._interceptor) # type: ignore + + @property + def get(self) -> Callable[ + [compute.GetNodeTypeRequest], + compute.NodeType]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._Get(self._session, self._host, self._interceptor) # type: ignore + + @property + def list(self) -> Callable[ + [compute.ListNodeTypesRequest], + compute.NodeTypeList]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._List(self._session, self._host, self._interceptor) # type: ignore + + @property + def kind(self) -> str: + return "rest" + + def close(self): + self._session.close() + + +__all__=( + 'NodeTypesRestTransport', +) diff --git a/owl-bot-staging/v1/google/cloud/compute_v1/services/packet_mirrorings/__init__.py b/owl-bot-staging/v1/google/cloud/compute_v1/services/packet_mirrorings/__init__.py new file mode 100644 index 000000000..09537df48 --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/compute_v1/services/packet_mirrorings/__init__.py @@ -0,0 +1,20 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from .client import PacketMirroringsClient + +__all__ = ( + 'PacketMirroringsClient', +) diff --git a/owl-bot-staging/v1/google/cloud/compute_v1/services/packet_mirrorings/client.py b/owl-bot-staging/v1/google/cloud/compute_v1/services/packet_mirrorings/client.py new file mode 100644 index 000000000..2e9917c62 --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/compute_v1/services/packet_mirrorings/client.py @@ -0,0 +1,1716 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import functools +import os +import re +from typing import Dict, Mapping, MutableMapping, MutableSequence, Optional, Sequence, Tuple, Type, Union, cast + +from google.cloud.compute_v1 import gapic_version as package_version + +from google.api_core import client_options as client_options_lib +from google.api_core import exceptions as core_exceptions +from google.api_core import extended_operation +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport import mtls # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth.exceptions import MutualTLSChannelError # type: ignore +from google.oauth2 import service_account # type: ignore + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + +from google.api_core import extended_operation # type: ignore +from google.cloud.compute_v1.services.packet_mirrorings import pagers +from google.cloud.compute_v1.types import compute +from .transports.base import PacketMirroringsTransport, DEFAULT_CLIENT_INFO +from .transports.rest import PacketMirroringsRestTransport + + +class PacketMirroringsClientMeta(type): + """Metaclass for the PacketMirrorings client. + + This provides class-level methods for building and retrieving + support objects (e.g. transport) without polluting the client instance + objects. + """ + _transport_registry = OrderedDict() # type: Dict[str, Type[PacketMirroringsTransport]] + _transport_registry["rest"] = PacketMirroringsRestTransport + + def get_transport_class(cls, + label: Optional[str] = None, + ) -> Type[PacketMirroringsTransport]: + """Returns an appropriate transport class. + + Args: + label: The name of the desired transport. If none is + provided, then the first transport in the registry is used. + + Returns: + The transport class to use. + """ + # If a specific transport is requested, return that one. + if label: + return cls._transport_registry[label] + + # No transport is requested; return the default (that is, the first one + # in the dictionary). + return next(iter(cls._transport_registry.values())) + + +class PacketMirroringsClient(metaclass=PacketMirroringsClientMeta): + """The PacketMirrorings API.""" + + @staticmethod + def _get_default_mtls_endpoint(api_endpoint): + """Converts api endpoint to mTLS endpoint. + + Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to + "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. + Args: + api_endpoint (Optional[str]): the api endpoint to convert. + Returns: + str: converted mTLS api endpoint. + """ + if not api_endpoint: + return api_endpoint + + mtls_endpoint_re = re.compile( + r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?" + ) + + m = mtls_endpoint_re.match(api_endpoint) + name, mtls, sandbox, googledomain = m.groups() + if mtls or not googledomain: + return api_endpoint + + if sandbox: + return api_endpoint.replace( + "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" + ) + + return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") + + DEFAULT_ENDPOINT = "compute.googleapis.com" + DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore + DEFAULT_ENDPOINT + ) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + PacketMirroringsClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_info(info) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + PacketMirroringsClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_file( + filename) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + from_service_account_json = from_service_account_file + + @property + def transport(self) -> PacketMirroringsTransport: + """Returns the transport used by the client instance. + + Returns: + PacketMirroringsTransport: The transport used by the client + instance. + """ + return self._transport + + @staticmethod + def common_billing_account_path(billing_account: str, ) -> str: + """Returns a fully-qualified billing_account string.""" + return "billingAccounts/{billing_account}".format(billing_account=billing_account, ) + + @staticmethod + def parse_common_billing_account_path(path: str) -> Dict[str,str]: + """Parse a billing_account path into its component segments.""" + m = re.match(r"^billingAccounts/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_folder_path(folder: str, ) -> str: + """Returns a fully-qualified folder string.""" + return "folders/{folder}".format(folder=folder, ) + + @staticmethod + def parse_common_folder_path(path: str) -> Dict[str,str]: + """Parse a folder path into its component segments.""" + m = re.match(r"^folders/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_organization_path(organization: str, ) -> str: + """Returns a fully-qualified organization string.""" + return "organizations/{organization}".format(organization=organization, ) + + @staticmethod + def parse_common_organization_path(path: str) -> Dict[str,str]: + """Parse a organization path into its component segments.""" + m = re.match(r"^organizations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_project_path(project: str, ) -> str: + """Returns a fully-qualified project string.""" + return "projects/{project}".format(project=project, ) + + @staticmethod + def parse_common_project_path(path: str) -> Dict[str,str]: + """Parse a project path into its component segments.""" + m = re.match(r"^projects/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_location_path(project: str, location: str, ) -> str: + """Returns a fully-qualified location string.""" + return "projects/{project}/locations/{location}".format(project=project, location=location, ) + + @staticmethod + def parse_common_location_path(path: str) -> Dict[str,str]: + """Parse a location path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @classmethod + def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[client_options_lib.ClientOptions] = None): + """Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + if client_options is None: + client_options = client_options_lib.ClientOptions() + use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") + use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") + if use_client_cert not in ("true", "false"): + raise ValueError("Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`") + if use_mtls_endpoint not in ("auto", "never", "always"): + raise MutualTLSChannelError("Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`") + + # Figure out the client cert source to use. + client_cert_source = None + if use_client_cert == "true": + if client_options.client_cert_source: + client_cert_source = client_options.client_cert_source + elif mtls.has_default_client_cert_source(): + client_cert_source = mtls.default_client_cert_source() + + # Figure out which api endpoint to use. + if client_options.api_endpoint is not None: + api_endpoint = client_options.api_endpoint + elif use_mtls_endpoint == "always" or (use_mtls_endpoint == "auto" and client_cert_source): + api_endpoint = cls.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = cls.DEFAULT_ENDPOINT + + return api_endpoint, client_cert_source + + def __init__(self, *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Optional[Union[str, PacketMirroringsTransport]] = None, + client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the packet mirrorings client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Union[str, PacketMirroringsTransport]): The + transport to use. If set to None, a transport is chosen + automatically. + NOTE: "rest" transport functionality is currently in a + beta state (preview). We welcome your feedback via an + issue in this library's source repository. + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): Custom options for the + client. It won't take effect if a ``transport`` instance is provided. + (1) The ``api_endpoint`` property can be used to override the + default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT + environment variable can also be used to override the endpoint: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto switch to the + default mTLS endpoint if client certificate is present, this is + the default value). However, the ``api_endpoint`` property takes + precedence if provided. + (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide client certificate for mutual TLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + """ + if isinstance(client_options, dict): + client_options = client_options_lib.from_dict(client_options) + if client_options is None: + client_options = client_options_lib.ClientOptions() + client_options = cast(client_options_lib.ClientOptions, client_options) + + api_endpoint, client_cert_source_func = self.get_mtls_endpoint_and_cert_source(client_options) + + api_key_value = getattr(client_options, "api_key", None) + if api_key_value and credentials: + raise ValueError("client_options.api_key and credentials are mutually exclusive") + + # Save or instantiate the transport. + # Ordinarily, we provide the transport, but allowing a custom transport + # instance provides an extensibility point for unusual situations. + if isinstance(transport, PacketMirroringsTransport): + # transport is a PacketMirroringsTransport instance. + if credentials or client_options.credentials_file or api_key_value: + raise ValueError("When providing a transport instance, " + "provide its credentials directly.") + if client_options.scopes: + raise ValueError( + "When providing a transport instance, provide its scopes " + "directly." + ) + self._transport = transport + else: + import google.auth._default # type: ignore + + if api_key_value and hasattr(google.auth._default, "get_api_key_credentials"): + credentials = google.auth._default.get_api_key_credentials(api_key_value) + + Transport = type(self).get_transport_class(transport) + self._transport = Transport( + credentials=credentials, + credentials_file=client_options.credentials_file, + host=api_endpoint, + scopes=client_options.scopes, + client_cert_source_for_mtls=client_cert_source_func, + quota_project_id=client_options.quota_project_id, + client_info=client_info, + always_use_jwt_access=True, + api_audience=client_options.api_audience, + ) + + def aggregated_list(self, + request: Optional[Union[compute.AggregatedListPacketMirroringsRequest, dict]] = None, + *, + project: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.AggregatedListPager: + r"""Retrieves an aggregated list of packetMirrorings. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_aggregated_list(): + # Create a client + client = compute_v1.PacketMirroringsClient() + + # Initialize request argument(s) + request = compute_v1.AggregatedListPacketMirroringsRequest( + project="project_value", + ) + + # Make the request + page_result = client.aggregated_list(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.AggregatedListPacketMirroringsRequest, dict]): + The request object. A request message for + PacketMirrorings.AggregatedList. See the + method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.compute_v1.services.packet_mirrorings.pagers.AggregatedListPager: + Contains a list of packetMirrorings. + + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.AggregatedListPacketMirroringsRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.AggregatedListPacketMirroringsRequest): + request = compute.AggregatedListPacketMirroringsRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.aggregated_list] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.AggregatedListPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + def delete_unary(self, + request: Optional[Union[compute.DeletePacketMirroringRequest, dict]] = None, + *, + project: Optional[str] = None, + region: Optional[str] = None, + packet_mirroring: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.Operation: + r"""Deletes the specified PacketMirroring resource. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_delete(): + # Create a client + client = compute_v1.PacketMirroringsClient() + + # Initialize request argument(s) + request = compute_v1.DeletePacketMirroringRequest( + packet_mirroring="packet_mirroring_value", + project="project_value", + region="region_value", + ) + + # Make the request + response = client.delete(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.DeletePacketMirroringRequest, dict]): + The request object. A request message for + PacketMirrorings.Delete. See the method + description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + region (str): + Name of the region for this request. + This corresponds to the ``region`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + packet_mirroring (str): + Name of the PacketMirroring resource + to delete. + + This corresponds to the ``packet_mirroring`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, region, packet_mirroring]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.DeletePacketMirroringRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.DeletePacketMirroringRequest): + request = compute.DeletePacketMirroringRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if region is not None: + request.region = region + if packet_mirroring is not None: + request.packet_mirroring = packet_mirroring + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("region", request.region), + ("packet_mirroring", request.packet_mirroring), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def delete(self, + request: Optional[Union[compute.DeletePacketMirroringRequest, dict]] = None, + *, + project: Optional[str] = None, + region: Optional[str] = None, + packet_mirroring: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> extended_operation.ExtendedOperation: + r"""Deletes the specified PacketMirroring resource. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_delete(): + # Create a client + client = compute_v1.PacketMirroringsClient() + + # Initialize request argument(s) + request = compute_v1.DeletePacketMirroringRequest( + packet_mirroring="packet_mirroring_value", + project="project_value", + region="region_value", + ) + + # Make the request + response = client.delete(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.DeletePacketMirroringRequest, dict]): + The request object. A request message for + PacketMirrorings.Delete. See the method + description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + region (str): + Name of the region for this request. + This corresponds to the ``region`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + packet_mirroring (str): + Name of the PacketMirroring resource + to delete. + + This corresponds to the ``packet_mirroring`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, region, packet_mirroring]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.DeletePacketMirroringRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.DeletePacketMirroringRequest): + request = compute.DeletePacketMirroringRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if region is not None: + request.region = region + if packet_mirroring is not None: + request.packet_mirroring = packet_mirroring + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("region", request.region), + ("packet_mirroring", request.packet_mirroring), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + operation_service = self._transport._region_operations_client + operation_request = compute.GetRegionOperationRequest() + operation_request.project = request.project + operation_request.region = request.region + operation_request.operation = response.name + + get_operation = functools.partial(operation_service.get, operation_request) + # Cancel is not part of extended operations yet. + cancel_operation = lambda: None + + # Note: this class is an implementation detail to provide a uniform + # set of names for certain fields in the extended operation proto message. + # See google.api_core.extended_operation.ExtendedOperation for details + # on these properties and the expected interface. + class _CustomOperation(extended_operation.ExtendedOperation): + @property + def error_message(self): + return self._extended_operation.http_error_message + + @property + def error_code(self): + return self._extended_operation.http_error_status_code + + response = _CustomOperation.make(get_operation, cancel_operation, response) + + # Done; return the response. + return response + + def get(self, + request: Optional[Union[compute.GetPacketMirroringRequest, dict]] = None, + *, + project: Optional[str] = None, + region: Optional[str] = None, + packet_mirroring: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.PacketMirroring: + r"""Returns the specified PacketMirroring resource. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_get(): + # Create a client + client = compute_v1.PacketMirroringsClient() + + # Initialize request argument(s) + request = compute_v1.GetPacketMirroringRequest( + packet_mirroring="packet_mirroring_value", + project="project_value", + region="region_value", + ) + + # Make the request + response = client.get(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.GetPacketMirroringRequest, dict]): + The request object. A request message for + PacketMirrorings.Get. See the method + description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + region (str): + Name of the region for this request. + This corresponds to the ``region`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + packet_mirroring (str): + Name of the PacketMirroring resource + to return. + + This corresponds to the ``packet_mirroring`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.compute_v1.types.PacketMirroring: + Represents a Packet Mirroring + resource. Packet Mirroring clones the + traffic of specified instances in your + Virtual Private Cloud (VPC) network and + forwards it to a collector destination, + such as an instance group of an internal + TCP/UDP load balancer, for analysis or + examination. For more information about + setting up Packet Mirroring, see Using + Packet Mirroring. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, region, packet_mirroring]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.GetPacketMirroringRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.GetPacketMirroringRequest): + request = compute.GetPacketMirroringRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if region is not None: + request.region = region + if packet_mirroring is not None: + request.packet_mirroring = packet_mirroring + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("region", request.region), + ("packet_mirroring", request.packet_mirroring), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def insert_unary(self, + request: Optional[Union[compute.InsertPacketMirroringRequest, dict]] = None, + *, + project: Optional[str] = None, + region: Optional[str] = None, + packet_mirroring_resource: Optional[compute.PacketMirroring] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.Operation: + r"""Creates a PacketMirroring resource in the specified + project and region using the data included in the + request. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_insert(): + # Create a client + client = compute_v1.PacketMirroringsClient() + + # Initialize request argument(s) + request = compute_v1.InsertPacketMirroringRequest( + project="project_value", + region="region_value", + ) + + # Make the request + response = client.insert(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.InsertPacketMirroringRequest, dict]): + The request object. A request message for + PacketMirrorings.Insert. See the method + description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + region (str): + Name of the region for this request. + This corresponds to the ``region`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + packet_mirroring_resource (google.cloud.compute_v1.types.PacketMirroring): + The body resource for this request + This corresponds to the ``packet_mirroring_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, region, packet_mirroring_resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.InsertPacketMirroringRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.InsertPacketMirroringRequest): + request = compute.InsertPacketMirroringRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if region is not None: + request.region = region + if packet_mirroring_resource is not None: + request.packet_mirroring_resource = packet_mirroring_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.insert] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("region", request.region), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def insert(self, + request: Optional[Union[compute.InsertPacketMirroringRequest, dict]] = None, + *, + project: Optional[str] = None, + region: Optional[str] = None, + packet_mirroring_resource: Optional[compute.PacketMirroring] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> extended_operation.ExtendedOperation: + r"""Creates a PacketMirroring resource in the specified + project and region using the data included in the + request. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_insert(): + # Create a client + client = compute_v1.PacketMirroringsClient() + + # Initialize request argument(s) + request = compute_v1.InsertPacketMirroringRequest( + project="project_value", + region="region_value", + ) + + # Make the request + response = client.insert(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.InsertPacketMirroringRequest, dict]): + The request object. A request message for + PacketMirrorings.Insert. See the method + description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + region (str): + Name of the region for this request. + This corresponds to the ``region`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + packet_mirroring_resource (google.cloud.compute_v1.types.PacketMirroring): + The body resource for this request + This corresponds to the ``packet_mirroring_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, region, packet_mirroring_resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.InsertPacketMirroringRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.InsertPacketMirroringRequest): + request = compute.InsertPacketMirroringRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if region is not None: + request.region = region + if packet_mirroring_resource is not None: + request.packet_mirroring_resource = packet_mirroring_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.insert] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("region", request.region), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + operation_service = self._transport._region_operations_client + operation_request = compute.GetRegionOperationRequest() + operation_request.project = request.project + operation_request.region = request.region + operation_request.operation = response.name + + get_operation = functools.partial(operation_service.get, operation_request) + # Cancel is not part of extended operations yet. + cancel_operation = lambda: None + + # Note: this class is an implementation detail to provide a uniform + # set of names for certain fields in the extended operation proto message. + # See google.api_core.extended_operation.ExtendedOperation for details + # on these properties and the expected interface. + class _CustomOperation(extended_operation.ExtendedOperation): + @property + def error_message(self): + return self._extended_operation.http_error_message + + @property + def error_code(self): + return self._extended_operation.http_error_status_code + + response = _CustomOperation.make(get_operation, cancel_operation, response) + + # Done; return the response. + return response + + def list(self, + request: Optional[Union[compute.ListPacketMirroringsRequest, dict]] = None, + *, + project: Optional[str] = None, + region: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListPager: + r"""Retrieves a list of PacketMirroring resources + available to the specified project and region. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_list(): + # Create a client + client = compute_v1.PacketMirroringsClient() + + # Initialize request argument(s) + request = compute_v1.ListPacketMirroringsRequest( + project="project_value", + region="region_value", + ) + + # Make the request + page_result = client.list(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.ListPacketMirroringsRequest, dict]): + The request object. A request message for + PacketMirrorings.List. See the method + description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + region (str): + Name of the region for this request. + This corresponds to the ``region`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.compute_v1.services.packet_mirrorings.pagers.ListPager: + Contains a list of PacketMirroring + resources. + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, region]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.ListPacketMirroringsRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.ListPacketMirroringsRequest): + request = compute.ListPacketMirroringsRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if region is not None: + request.region = region + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("region", request.region), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + def patch_unary(self, + request: Optional[Union[compute.PatchPacketMirroringRequest, dict]] = None, + *, + project: Optional[str] = None, + region: Optional[str] = None, + packet_mirroring: Optional[str] = None, + packet_mirroring_resource: Optional[compute.PacketMirroring] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.Operation: + r"""Patches the specified PacketMirroring resource with + the data included in the request. This method supports + PATCH semantics and uses JSON merge patch format and + processing rules. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_patch(): + # Create a client + client = compute_v1.PacketMirroringsClient() + + # Initialize request argument(s) + request = compute_v1.PatchPacketMirroringRequest( + packet_mirroring="packet_mirroring_value", + project="project_value", + region="region_value", + ) + + # Make the request + response = client.patch(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.PatchPacketMirroringRequest, dict]): + The request object. A request message for + PacketMirrorings.Patch. See the method + description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + region (str): + Name of the region for this request. + This corresponds to the ``region`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + packet_mirroring (str): + Name of the PacketMirroring resource + to patch. + + This corresponds to the ``packet_mirroring`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + packet_mirroring_resource (google.cloud.compute_v1.types.PacketMirroring): + The body resource for this request + This corresponds to the ``packet_mirroring_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, region, packet_mirroring, packet_mirroring_resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.PatchPacketMirroringRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.PatchPacketMirroringRequest): + request = compute.PatchPacketMirroringRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if region is not None: + request.region = region + if packet_mirroring is not None: + request.packet_mirroring = packet_mirroring + if packet_mirroring_resource is not None: + request.packet_mirroring_resource = packet_mirroring_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.patch] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("region", request.region), + ("packet_mirroring", request.packet_mirroring), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def patch(self, + request: Optional[Union[compute.PatchPacketMirroringRequest, dict]] = None, + *, + project: Optional[str] = None, + region: Optional[str] = None, + packet_mirroring: Optional[str] = None, + packet_mirroring_resource: Optional[compute.PacketMirroring] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> extended_operation.ExtendedOperation: + r"""Patches the specified PacketMirroring resource with + the data included in the request. This method supports + PATCH semantics and uses JSON merge patch format and + processing rules. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_patch(): + # Create a client + client = compute_v1.PacketMirroringsClient() + + # Initialize request argument(s) + request = compute_v1.PatchPacketMirroringRequest( + packet_mirroring="packet_mirroring_value", + project="project_value", + region="region_value", + ) + + # Make the request + response = client.patch(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.PatchPacketMirroringRequest, dict]): + The request object. A request message for + PacketMirrorings.Patch. See the method + description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + region (str): + Name of the region for this request. + This corresponds to the ``region`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + packet_mirroring (str): + Name of the PacketMirroring resource + to patch. + + This corresponds to the ``packet_mirroring`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + packet_mirroring_resource (google.cloud.compute_v1.types.PacketMirroring): + The body resource for this request + This corresponds to the ``packet_mirroring_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, region, packet_mirroring, packet_mirroring_resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.PatchPacketMirroringRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.PatchPacketMirroringRequest): + request = compute.PatchPacketMirroringRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if region is not None: + request.region = region + if packet_mirroring is not None: + request.packet_mirroring = packet_mirroring + if packet_mirroring_resource is not None: + request.packet_mirroring_resource = packet_mirroring_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.patch] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("region", request.region), + ("packet_mirroring", request.packet_mirroring), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + operation_service = self._transport._region_operations_client + operation_request = compute.GetRegionOperationRequest() + operation_request.project = request.project + operation_request.region = request.region + operation_request.operation = response.name + + get_operation = functools.partial(operation_service.get, operation_request) + # Cancel is not part of extended operations yet. + cancel_operation = lambda: None + + # Note: this class is an implementation detail to provide a uniform + # set of names for certain fields in the extended operation proto message. + # See google.api_core.extended_operation.ExtendedOperation for details + # on these properties and the expected interface. + class _CustomOperation(extended_operation.ExtendedOperation): + @property + def error_message(self): + return self._extended_operation.http_error_message + + @property + def error_code(self): + return self._extended_operation.http_error_status_code + + response = _CustomOperation.make(get_operation, cancel_operation, response) + + # Done; return the response. + return response + + def test_iam_permissions(self, + request: Optional[Union[compute.TestIamPermissionsPacketMirroringRequest, dict]] = None, + *, + project: Optional[str] = None, + region: Optional[str] = None, + resource: Optional[str] = None, + test_permissions_request_resource: Optional[compute.TestPermissionsRequest] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.TestPermissionsResponse: + r"""Returns permissions that a caller has on the + specified resource. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_test_iam_permissions(): + # Create a client + client = compute_v1.PacketMirroringsClient() + + # Initialize request argument(s) + request = compute_v1.TestIamPermissionsPacketMirroringRequest( + project="project_value", + region="region_value", + resource="resource_value", + ) + + # Make the request + response = client.test_iam_permissions(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.TestIamPermissionsPacketMirroringRequest, dict]): + The request object. A request message for + PacketMirrorings.TestIamPermissions. See + the method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + region (str): + The name of the region for this + request. + + This corresponds to the ``region`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + resource (str): + Name or id of the resource for this + request. + + This corresponds to the ``resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + test_permissions_request_resource (google.cloud.compute_v1.types.TestPermissionsRequest): + The body resource for this request + This corresponds to the ``test_permissions_request_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.compute_v1.types.TestPermissionsResponse: + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, region, resource, test_permissions_request_resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.TestIamPermissionsPacketMirroringRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.TestIamPermissionsPacketMirroringRequest): + request = compute.TestIamPermissionsPacketMirroringRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if region is not None: + request.region = region + if resource is not None: + request.resource = resource + if test_permissions_request_resource is not None: + request.test_permissions_request_resource = test_permissions_request_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.test_iam_permissions] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("region", request.region), + ("resource", request.resource), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def __enter__(self) -> "PacketMirroringsClient": + return self + + def __exit__(self, type, value, traceback): + """Releases underlying transport's resources. + + .. warning:: + ONLY use as a context manager if the transport is NOT shared + with other clients! Exiting the with block will CLOSE the transport + and may cause errors in other clients! + """ + self.transport.close() + + + + + + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) + + +__all__ = ( + "PacketMirroringsClient", +) diff --git a/owl-bot-staging/v1/google/cloud/compute_v1/services/packet_mirrorings/pagers.py b/owl-bot-staging/v1/google/cloud/compute_v1/services/packet_mirrorings/pagers.py new file mode 100644 index 000000000..fe380641b --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/compute_v1/services/packet_mirrorings/pagers.py @@ -0,0 +1,139 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import Any, AsyncIterator, Awaitable, Callable, Sequence, Tuple, Optional, Iterator + +from google.cloud.compute_v1.types import compute + + +class AggregatedListPager: + """A pager for iterating through ``aggregated_list`` requests. + + This class thinly wraps an initial + :class:`google.cloud.compute_v1.types.PacketMirroringAggregatedList` object, and + provides an ``__iter__`` method to iterate through its + ``items`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``AggregatedList`` requests and continue to iterate + through the ``items`` field on the + corresponding responses. + + All the usual :class:`google.cloud.compute_v1.types.PacketMirroringAggregatedList` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., compute.PacketMirroringAggregatedList], + request: compute.AggregatedListPacketMirroringsRequest, + response: compute.PacketMirroringAggregatedList, + *, + metadata: Sequence[Tuple[str, str]] = ()): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.compute_v1.types.AggregatedListPacketMirroringsRequest): + The initial request object. + response (google.cloud.compute_v1.types.PacketMirroringAggregatedList): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = compute.AggregatedListPacketMirroringsRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[compute.PacketMirroringAggregatedList]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[Tuple[str, compute.PacketMirroringsScopedList]]: + for page in self.pages: + yield from page.items.items() + + def get(self, key: str) -> Optional[compute.PacketMirroringsScopedList]: + return self._response.items.get(key) + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + + +class ListPager: + """A pager for iterating through ``list`` requests. + + This class thinly wraps an initial + :class:`google.cloud.compute_v1.types.PacketMirroringList` object, and + provides an ``__iter__`` method to iterate through its + ``items`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``List`` requests and continue to iterate + through the ``items`` field on the + corresponding responses. + + All the usual :class:`google.cloud.compute_v1.types.PacketMirroringList` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., compute.PacketMirroringList], + request: compute.ListPacketMirroringsRequest, + response: compute.PacketMirroringList, + *, + metadata: Sequence[Tuple[str, str]] = ()): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.compute_v1.types.ListPacketMirroringsRequest): + The initial request object. + response (google.cloud.compute_v1.types.PacketMirroringList): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = compute.ListPacketMirroringsRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[compute.PacketMirroringList]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[compute.PacketMirroring]: + for page in self.pages: + yield from page.items + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) diff --git a/owl-bot-staging/v1/google/cloud/compute_v1/services/packet_mirrorings/transports/__init__.py b/owl-bot-staging/v1/google/cloud/compute_v1/services/packet_mirrorings/transports/__init__.py new file mode 100644 index 000000000..e2a1b3ee3 --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/compute_v1/services/packet_mirrorings/transports/__init__.py @@ -0,0 +1,32 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +from typing import Dict, Type + +from .base import PacketMirroringsTransport +from .rest import PacketMirroringsRestTransport +from .rest import PacketMirroringsRestInterceptor + + +# Compile a registry of transports. +_transport_registry = OrderedDict() # type: Dict[str, Type[PacketMirroringsTransport]] +_transport_registry['rest'] = PacketMirroringsRestTransport + +__all__ = ( + 'PacketMirroringsTransport', + 'PacketMirroringsRestTransport', + 'PacketMirroringsRestInterceptor', +) diff --git a/owl-bot-staging/v1/google/cloud/compute_v1/services/packet_mirrorings/transports/base.py b/owl-bot-staging/v1/google/cloud/compute_v1/services/packet_mirrorings/transports/base.py new file mode 100644 index 000000000..ee126c314 --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/compute_v1/services/packet_mirrorings/transports/base.py @@ -0,0 +1,247 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import abc +from typing import Awaitable, Callable, Dict, Optional, Sequence, Union + +from google.cloud.compute_v1 import gapic_version as package_version + +import google.auth # type: ignore +import google.api_core +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.compute_v1.types import compute +from google.cloud.compute_v1.services import region_operations + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) + + +class PacketMirroringsTransport(abc.ABC): + """Abstract transport class for PacketMirrorings.""" + + AUTH_SCOPES = ( + 'https://www.googleapis.com/auth/compute', + 'https://www.googleapis.com/auth/cloud-platform', + ) + + DEFAULT_HOST: str = 'compute.googleapis.com' + def __init__( + self, *, + host: str = DEFAULT_HOST, + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + **kwargs, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A list of scopes. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + """ + self._extended_operations_services: Dict[str, Any] = {} + + scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} + + # Save the scopes. + self._scopes = scopes + + # If no credentials are provided, then determine the appropriate + # defaults. + if credentials and credentials_file: + raise core_exceptions.DuplicateCredentialArgs("'credentials_file' and 'credentials' are mutually exclusive") + + if credentials_file is not None: + credentials, _ = google.auth.load_credentials_from_file( + credentials_file, + **scopes_kwargs, + quota_project_id=quota_project_id + ) + elif credentials is None: + credentials, _ = google.auth.default(**scopes_kwargs, quota_project_id=quota_project_id) + # Don't apply audience if the credentials file passed from user. + if hasattr(credentials, "with_gdch_audience"): + credentials = credentials.with_gdch_audience(api_audience if api_audience else host) + + # If the credentials are service account credentials, then always try to use self signed JWT. + if always_use_jwt_access and isinstance(credentials, service_account.Credentials) and hasattr(service_account.Credentials, "with_always_use_jwt_access"): + credentials = credentials.with_always_use_jwt_access(True) + + # Save the credentials. + self._credentials = credentials + + # Save the hostname. Default to port 443 (HTTPS) if none is specified. + if ':' not in host: + host += ':443' + self._host = host + + def _prep_wrapped_messages(self, client_info): + # Precompute the wrapped methods. + self._wrapped_methods = { + self.aggregated_list: gapic_v1.method.wrap_method( + self.aggregated_list, + default_timeout=None, + client_info=client_info, + ), + self.delete: gapic_v1.method.wrap_method( + self.delete, + default_timeout=None, + client_info=client_info, + ), + self.get: gapic_v1.method.wrap_method( + self.get, + default_timeout=None, + client_info=client_info, + ), + self.insert: gapic_v1.method.wrap_method( + self.insert, + default_timeout=None, + client_info=client_info, + ), + self.list: gapic_v1.method.wrap_method( + self.list, + default_timeout=None, + client_info=client_info, + ), + self.patch: gapic_v1.method.wrap_method( + self.patch, + default_timeout=None, + client_info=client_info, + ), + self.test_iam_permissions: gapic_v1.method.wrap_method( + self.test_iam_permissions, + default_timeout=None, + client_info=client_info, + ), + } + + def close(self): + """Closes resources associated with the transport. + + .. warning:: + Only call this method if the transport is NOT shared + with other clients - this may cause errors in other clients! + """ + raise NotImplementedError() + + @property + def aggregated_list(self) -> Callable[ + [compute.AggregatedListPacketMirroringsRequest], + Union[ + compute.PacketMirroringAggregatedList, + Awaitable[compute.PacketMirroringAggregatedList] + ]]: + raise NotImplementedError() + + @property + def delete(self) -> Callable[ + [compute.DeletePacketMirroringRequest], + Union[ + compute.Operation, + Awaitable[compute.Operation] + ]]: + raise NotImplementedError() + + @property + def get(self) -> Callable[ + [compute.GetPacketMirroringRequest], + Union[ + compute.PacketMirroring, + Awaitable[compute.PacketMirroring] + ]]: + raise NotImplementedError() + + @property + def insert(self) -> Callable[ + [compute.InsertPacketMirroringRequest], + Union[ + compute.Operation, + Awaitable[compute.Operation] + ]]: + raise NotImplementedError() + + @property + def list(self) -> Callable[ + [compute.ListPacketMirroringsRequest], + Union[ + compute.PacketMirroringList, + Awaitable[compute.PacketMirroringList] + ]]: + raise NotImplementedError() + + @property + def patch(self) -> Callable[ + [compute.PatchPacketMirroringRequest], + Union[ + compute.Operation, + Awaitable[compute.Operation] + ]]: + raise NotImplementedError() + + @property + def test_iam_permissions(self) -> Callable[ + [compute.TestIamPermissionsPacketMirroringRequest], + Union[ + compute.TestPermissionsResponse, + Awaitable[compute.TestPermissionsResponse] + ]]: + raise NotImplementedError() + + @property + def kind(self) -> str: + raise NotImplementedError() + + @property + def _region_operations_client(self) -> region_operations.RegionOperationsClient: + ex_op_service = self._extended_operations_services.get("region_operations") + if not ex_op_service: + ex_op_service = region_operations.RegionOperationsClient( + credentials=self._credentials, + transport=self.kind, + ) + self._extended_operations_services["region_operations"] = ex_op_service + + return ex_op_service + + +__all__ = ( + 'PacketMirroringsTransport', +) diff --git a/owl-bot-staging/v1/google/cloud/compute_v1/services/packet_mirrorings/transports/rest.py b/owl-bot-staging/v1/google/cloud/compute_v1/services/packet_mirrorings/transports/rest.py new file mode 100644 index 000000000..a2e005497 --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/compute_v1/services/packet_mirrorings/transports/rest.py @@ -0,0 +1,1039 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +from google.auth.transport.requests import AuthorizedSession # type: ignore +import json # type: ignore +import grpc # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.api_core import exceptions as core_exceptions +from google.api_core import retry as retries +from google.api_core import rest_helpers +from google.api_core import rest_streaming +from google.api_core import path_template +from google.api_core import gapic_v1 + +from google.protobuf import json_format +from requests import __version__ as requests_version +import dataclasses +import re +from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union +import warnings + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + + +from google.cloud.compute_v1.types import compute + +from .base import PacketMirroringsTransport, DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, + grpc_version=None, + rest_version=requests_version, +) + + +class PacketMirroringsRestInterceptor: + """Interceptor for PacketMirrorings. + + Interceptors are used to manipulate requests, request metadata, and responses + in arbitrary ways. + Example use cases include: + * Logging + * Verifying requests according to service or custom semantics + * Stripping extraneous information from responses + + These use cases and more can be enabled by injecting an + instance of a custom subclass when constructing the PacketMirroringsRestTransport. + + .. code-block:: python + class MyCustomPacketMirroringsInterceptor(PacketMirroringsRestInterceptor): + def pre_aggregated_list(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_aggregated_list(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_delete(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_delete(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_get(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_insert(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_insert(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_patch(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_patch(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_test_iam_permissions(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_test_iam_permissions(self, response): + logging.log(f"Received response: {response}") + return response + + transport = PacketMirroringsRestTransport(interceptor=MyCustomPacketMirroringsInterceptor()) + client = PacketMirroringsClient(transport=transport) + + + """ + def pre_aggregated_list(self, request: compute.AggregatedListPacketMirroringsRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.AggregatedListPacketMirroringsRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for aggregated_list + + Override in a subclass to manipulate the request or metadata + before they are sent to the PacketMirrorings server. + """ + return request, metadata + + def post_aggregated_list(self, response: compute.PacketMirroringAggregatedList) -> compute.PacketMirroringAggregatedList: + """Post-rpc interceptor for aggregated_list + + Override in a subclass to manipulate the response + after it is returned by the PacketMirrorings server but before + it is returned to user code. + """ + return response + def pre_delete(self, request: compute.DeletePacketMirroringRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.DeletePacketMirroringRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for delete + + Override in a subclass to manipulate the request or metadata + before they are sent to the PacketMirrorings server. + """ + return request, metadata + + def post_delete(self, response: compute.Operation) -> compute.Operation: + """Post-rpc interceptor for delete + + Override in a subclass to manipulate the response + after it is returned by the PacketMirrorings server but before + it is returned to user code. + """ + return response + def pre_get(self, request: compute.GetPacketMirroringRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.GetPacketMirroringRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get + + Override in a subclass to manipulate the request or metadata + before they are sent to the PacketMirrorings server. + """ + return request, metadata + + def post_get(self, response: compute.PacketMirroring) -> compute.PacketMirroring: + """Post-rpc interceptor for get + + Override in a subclass to manipulate the response + after it is returned by the PacketMirrorings server but before + it is returned to user code. + """ + return response + def pre_insert(self, request: compute.InsertPacketMirroringRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.InsertPacketMirroringRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for insert + + Override in a subclass to manipulate the request or metadata + before they are sent to the PacketMirrorings server. + """ + return request, metadata + + def post_insert(self, response: compute.Operation) -> compute.Operation: + """Post-rpc interceptor for insert + + Override in a subclass to manipulate the response + after it is returned by the PacketMirrorings server but before + it is returned to user code. + """ + return response + def pre_list(self, request: compute.ListPacketMirroringsRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.ListPacketMirroringsRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list + + Override in a subclass to manipulate the request or metadata + before they are sent to the PacketMirrorings server. + """ + return request, metadata + + def post_list(self, response: compute.PacketMirroringList) -> compute.PacketMirroringList: + """Post-rpc interceptor for list + + Override in a subclass to manipulate the response + after it is returned by the PacketMirrorings server but before + it is returned to user code. + """ + return response + def pre_patch(self, request: compute.PatchPacketMirroringRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.PatchPacketMirroringRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for patch + + Override in a subclass to manipulate the request or metadata + before they are sent to the PacketMirrorings server. + """ + return request, metadata + + def post_patch(self, response: compute.Operation) -> compute.Operation: + """Post-rpc interceptor for patch + + Override in a subclass to manipulate the response + after it is returned by the PacketMirrorings server but before + it is returned to user code. + """ + return response + def pre_test_iam_permissions(self, request: compute.TestIamPermissionsPacketMirroringRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.TestIamPermissionsPacketMirroringRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for test_iam_permissions + + Override in a subclass to manipulate the request or metadata + before they are sent to the PacketMirrorings server. + """ + return request, metadata + + def post_test_iam_permissions(self, response: compute.TestPermissionsResponse) -> compute.TestPermissionsResponse: + """Post-rpc interceptor for test_iam_permissions + + Override in a subclass to manipulate the response + after it is returned by the PacketMirrorings server but before + it is returned to user code. + """ + return response + + +@dataclasses.dataclass +class PacketMirroringsRestStub: + _session: AuthorizedSession + _host: str + _interceptor: PacketMirroringsRestInterceptor + + +class PacketMirroringsRestTransport(PacketMirroringsTransport): + """REST backend transport for PacketMirrorings. + + The PacketMirrorings API. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends JSON representations of protocol buffers over HTTP/1.1 + + NOTE: This REST transport functionality is currently in a beta + state (preview). We welcome your feedback via an issue in this + library's source repository. Thank you! + """ + + def __init__(self, *, + host: str = 'compute.googleapis.com', + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + client_cert_source_for_mtls: Optional[Callable[[ + ], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + url_scheme: str = 'https', + interceptor: Optional[PacketMirroringsRestInterceptor] = None, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + NOTE: This REST transport functionality is currently in a beta + state (preview). We welcome your feedback via a GitHub issue in + this library's repository. Thank you! + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + client_cert_source_for_mtls (Callable[[], Tuple[bytes, bytes]]): Client + certificate to configure mutual TLS HTTP channel. It is ignored + if ``channel`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you are developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + url_scheme: the protocol scheme for the API endpoint. Normally + "https", but for testing or local servers, + "http" can be specified. + """ + # Run the base constructor + # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. + # TODO: When custom host (api_endpoint) is set, `scopes` must *also* be set on the + # credentials object + maybe_url_match = re.match("^(?Phttp(?:s)?://)?(?P.*)$", host) + if maybe_url_match is None: + raise ValueError(f"Unexpected hostname structure: {host}") # pragma: NO COVER + + url_match_items = maybe_url_match.groupdict() + + host = f"{url_scheme}://{host}" if not url_match_items["scheme"] else host + + super().__init__( + host=host, + credentials=credentials, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience + ) + self._session = AuthorizedSession( + self._credentials, default_host=self.DEFAULT_HOST) + if client_cert_source_for_mtls: + self._session.configure_mtls_channel(client_cert_source_for_mtls) + self._interceptor = interceptor or PacketMirroringsRestInterceptor() + self._prep_wrapped_messages(client_info) + + class _AggregatedList(PacketMirroringsRestStub): + def __hash__(self): + return hash("AggregatedList") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.AggregatedListPacketMirroringsRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.PacketMirroringAggregatedList: + r"""Call the aggregated list method over HTTP. + + Args: + request (~.compute.AggregatedListPacketMirroringsRequest): + The request object. A request message for + PacketMirrorings.AggregatedList. See the + method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.PacketMirroringAggregatedList: + Contains a list of packetMirrorings. + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/compute/v1/projects/{project}/aggregated/packetMirrorings', + }, + ] + request, metadata = self._interceptor.pre_aggregated_list(request, metadata) + pb_request = compute.AggregatedListPacketMirroringsRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.PacketMirroringAggregatedList() + pb_resp = compute.PacketMirroringAggregatedList.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_aggregated_list(resp) + return resp + + class _Delete(PacketMirroringsRestStub): + def __hash__(self): + return hash("Delete") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.DeletePacketMirroringRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.Operation: + r"""Call the delete method over HTTP. + + Args: + request (~.compute.DeletePacketMirroringRequest): + The request object. A request message for + PacketMirrorings.Delete. See the method + description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.Operation: + Represents an Operation resource. Google Compute Engine + has three Operation resources: \* + `Global `__ + \* + `Regional `__ + \* + `Zonal `__ + You can use an operation resource to manage asynchronous + API requests. For more information, read Handling API + responses. Operations can be global, regional or zonal. + - For global operations, use the ``globalOperations`` + resource. - For regional operations, use the + ``regionOperations`` resource. - For zonal operations, + use the ``zonalOperations`` resource. For more + information, read Global, Regional, and Zonal Resources. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'delete', + 'uri': '/compute/v1/projects/{project}/regions/{region}/packetMirrorings/{packet_mirroring}', + }, + ] + request, metadata = self._interceptor.pre_delete(request, metadata) + pb_request = compute.DeletePacketMirroringRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.Operation() + pb_resp = compute.Operation.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_delete(resp) + return resp + + class _Get(PacketMirroringsRestStub): + def __hash__(self): + return hash("Get") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.GetPacketMirroringRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.PacketMirroring: + r"""Call the get method over HTTP. + + Args: + request (~.compute.GetPacketMirroringRequest): + The request object. A request message for + PacketMirrorings.Get. See the method + description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.PacketMirroring: + Represents a Packet Mirroring + resource. Packet Mirroring clones the + traffic of specified instances in your + Virtual Private Cloud (VPC) network and + forwards it to a collector destination, + such as an instance group of an internal + TCP/UDP load balancer, for analysis or + examination. For more information about + setting up Packet Mirroring, see Using + Packet Mirroring. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/compute/v1/projects/{project}/regions/{region}/packetMirrorings/{packet_mirroring}', + }, + ] + request, metadata = self._interceptor.pre_get(request, metadata) + pb_request = compute.GetPacketMirroringRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.PacketMirroring() + pb_resp = compute.PacketMirroring.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get(resp) + return resp + + class _Insert(PacketMirroringsRestStub): + def __hash__(self): + return hash("Insert") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.InsertPacketMirroringRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.Operation: + r"""Call the insert method over HTTP. + + Args: + request (~.compute.InsertPacketMirroringRequest): + The request object. A request message for + PacketMirrorings.Insert. See the method + description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.Operation: + Represents an Operation resource. Google Compute Engine + has three Operation resources: \* + `Global `__ + \* + `Regional `__ + \* + `Zonal `__ + You can use an operation resource to manage asynchronous + API requests. For more information, read Handling API + responses. Operations can be global, regional or zonal. + - For global operations, use the ``globalOperations`` + resource. - For regional operations, use the + ``regionOperations`` resource. - For zonal operations, + use the ``zonalOperations`` resource. For more + information, read Global, Regional, and Zonal Resources. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/compute/v1/projects/{project}/regions/{region}/packetMirrorings', + 'body': 'packet_mirroring_resource', + }, + ] + request, metadata = self._interceptor.pre_insert(request, metadata) + pb_request = compute.InsertPacketMirroringRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + including_default_value_fields=False, + use_integers_for_enums=False + ) + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.Operation() + pb_resp = compute.Operation.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_insert(resp) + return resp + + class _List(PacketMirroringsRestStub): + def __hash__(self): + return hash("List") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.ListPacketMirroringsRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.PacketMirroringList: + r"""Call the list method over HTTP. + + Args: + request (~.compute.ListPacketMirroringsRequest): + The request object. A request message for + PacketMirrorings.List. See the method + description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.PacketMirroringList: + Contains a list of PacketMirroring + resources. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/compute/v1/projects/{project}/regions/{region}/packetMirrorings', + }, + ] + request, metadata = self._interceptor.pre_list(request, metadata) + pb_request = compute.ListPacketMirroringsRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.PacketMirroringList() + pb_resp = compute.PacketMirroringList.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_list(resp) + return resp + + class _Patch(PacketMirroringsRestStub): + def __hash__(self): + return hash("Patch") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.PatchPacketMirroringRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.Operation: + r"""Call the patch method over HTTP. + + Args: + request (~.compute.PatchPacketMirroringRequest): + The request object. A request message for + PacketMirrorings.Patch. See the method + description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.Operation: + Represents an Operation resource. Google Compute Engine + has three Operation resources: \* + `Global `__ + \* + `Regional `__ + \* + `Zonal `__ + You can use an operation resource to manage asynchronous + API requests. For more information, read Handling API + responses. Operations can be global, regional or zonal. + - For global operations, use the ``globalOperations`` + resource. - For regional operations, use the + ``regionOperations`` resource. - For zonal operations, + use the ``zonalOperations`` resource. For more + information, read Global, Regional, and Zonal Resources. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'patch', + 'uri': '/compute/v1/projects/{project}/regions/{region}/packetMirrorings/{packet_mirroring}', + 'body': 'packet_mirroring_resource', + }, + ] + request, metadata = self._interceptor.pre_patch(request, metadata) + pb_request = compute.PatchPacketMirroringRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + including_default_value_fields=False, + use_integers_for_enums=False + ) + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.Operation() + pb_resp = compute.Operation.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_patch(resp) + return resp + + class _TestIamPermissions(PacketMirroringsRestStub): + def __hash__(self): + return hash("TestIamPermissions") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.TestIamPermissionsPacketMirroringRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.TestPermissionsResponse: + r"""Call the test iam permissions method over HTTP. + + Args: + request (~.compute.TestIamPermissionsPacketMirroringRequest): + The request object. A request message for + PacketMirrorings.TestIamPermissions. See + the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.TestPermissionsResponse: + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/compute/v1/projects/{project}/regions/{region}/packetMirrorings/{resource}/testIamPermissions', + 'body': 'test_permissions_request_resource', + }, + ] + request, metadata = self._interceptor.pre_test_iam_permissions(request, metadata) + pb_request = compute.TestIamPermissionsPacketMirroringRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + including_default_value_fields=False, + use_integers_for_enums=False + ) + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.TestPermissionsResponse() + pb_resp = compute.TestPermissionsResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_test_iam_permissions(resp) + return resp + + @property + def aggregated_list(self) -> Callable[ + [compute.AggregatedListPacketMirroringsRequest], + compute.PacketMirroringAggregatedList]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._AggregatedList(self._session, self._host, self._interceptor) # type: ignore + + @property + def delete(self) -> Callable[ + [compute.DeletePacketMirroringRequest], + compute.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._Delete(self._session, self._host, self._interceptor) # type: ignore + + @property + def get(self) -> Callable[ + [compute.GetPacketMirroringRequest], + compute.PacketMirroring]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._Get(self._session, self._host, self._interceptor) # type: ignore + + @property + def insert(self) -> Callable[ + [compute.InsertPacketMirroringRequest], + compute.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._Insert(self._session, self._host, self._interceptor) # type: ignore + + @property + def list(self) -> Callable[ + [compute.ListPacketMirroringsRequest], + compute.PacketMirroringList]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._List(self._session, self._host, self._interceptor) # type: ignore + + @property + def patch(self) -> Callable[ + [compute.PatchPacketMirroringRequest], + compute.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._Patch(self._session, self._host, self._interceptor) # type: ignore + + @property + def test_iam_permissions(self) -> Callable[ + [compute.TestIamPermissionsPacketMirroringRequest], + compute.TestPermissionsResponse]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._TestIamPermissions(self._session, self._host, self._interceptor) # type: ignore + + @property + def kind(self) -> str: + return "rest" + + def close(self): + self._session.close() + + +__all__=( + 'PacketMirroringsRestTransport', +) diff --git a/owl-bot-staging/v1/google/cloud/compute_v1/services/projects/__init__.py b/owl-bot-staging/v1/google/cloud/compute_v1/services/projects/__init__.py new file mode 100644 index 000000000..ba7395029 --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/compute_v1/services/projects/__init__.py @@ -0,0 +1,20 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from .client import ProjectsClient + +__all__ = ( + 'ProjectsClient', +) diff --git a/owl-bot-staging/v1/google/cloud/compute_v1/services/projects/client.py b/owl-bot-staging/v1/google/cloud/compute_v1/services/projects/client.py new file mode 100644 index 000000000..def3b852a --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/compute_v1/services/projects/client.py @@ -0,0 +1,3005 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import functools +import os +import re +from typing import Dict, Mapping, MutableMapping, MutableSequence, Optional, Sequence, Tuple, Type, Union, cast + +from google.cloud.compute_v1 import gapic_version as package_version + +from google.api_core import client_options as client_options_lib +from google.api_core import exceptions as core_exceptions +from google.api_core import extended_operation +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport import mtls # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth.exceptions import MutualTLSChannelError # type: ignore +from google.oauth2 import service_account # type: ignore + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + +from google.api_core import extended_operation # type: ignore +from google.cloud.compute_v1.services.projects import pagers +from google.cloud.compute_v1.types import compute +from .transports.base import ProjectsTransport, DEFAULT_CLIENT_INFO +from .transports.rest import ProjectsRestTransport + + +class ProjectsClientMeta(type): + """Metaclass for the Projects client. + + This provides class-level methods for building and retrieving + support objects (e.g. transport) without polluting the client instance + objects. + """ + _transport_registry = OrderedDict() # type: Dict[str, Type[ProjectsTransport]] + _transport_registry["rest"] = ProjectsRestTransport + + def get_transport_class(cls, + label: Optional[str] = None, + ) -> Type[ProjectsTransport]: + """Returns an appropriate transport class. + + Args: + label: The name of the desired transport. If none is + provided, then the first transport in the registry is used. + + Returns: + The transport class to use. + """ + # If a specific transport is requested, return that one. + if label: + return cls._transport_registry[label] + + # No transport is requested; return the default (that is, the first one + # in the dictionary). + return next(iter(cls._transport_registry.values())) + + +class ProjectsClient(metaclass=ProjectsClientMeta): + """The Projects API.""" + + @staticmethod + def _get_default_mtls_endpoint(api_endpoint): + """Converts api endpoint to mTLS endpoint. + + Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to + "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. + Args: + api_endpoint (Optional[str]): the api endpoint to convert. + Returns: + str: converted mTLS api endpoint. + """ + if not api_endpoint: + return api_endpoint + + mtls_endpoint_re = re.compile( + r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?" + ) + + m = mtls_endpoint_re.match(api_endpoint) + name, mtls, sandbox, googledomain = m.groups() + if mtls or not googledomain: + return api_endpoint + + if sandbox: + return api_endpoint.replace( + "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" + ) + + return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") + + DEFAULT_ENDPOINT = "compute.googleapis.com" + DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore + DEFAULT_ENDPOINT + ) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + ProjectsClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_info(info) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + ProjectsClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_file( + filename) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + from_service_account_json = from_service_account_file + + @property + def transport(self) -> ProjectsTransport: + """Returns the transport used by the client instance. + + Returns: + ProjectsTransport: The transport used by the client + instance. + """ + return self._transport + + @staticmethod + def common_billing_account_path(billing_account: str, ) -> str: + """Returns a fully-qualified billing_account string.""" + return "billingAccounts/{billing_account}".format(billing_account=billing_account, ) + + @staticmethod + def parse_common_billing_account_path(path: str) -> Dict[str,str]: + """Parse a billing_account path into its component segments.""" + m = re.match(r"^billingAccounts/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_folder_path(folder: str, ) -> str: + """Returns a fully-qualified folder string.""" + return "folders/{folder}".format(folder=folder, ) + + @staticmethod + def parse_common_folder_path(path: str) -> Dict[str,str]: + """Parse a folder path into its component segments.""" + m = re.match(r"^folders/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_organization_path(organization: str, ) -> str: + """Returns a fully-qualified organization string.""" + return "organizations/{organization}".format(organization=organization, ) + + @staticmethod + def parse_common_organization_path(path: str) -> Dict[str,str]: + """Parse a organization path into its component segments.""" + m = re.match(r"^organizations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_project_path(project: str, ) -> str: + """Returns a fully-qualified project string.""" + return "projects/{project}".format(project=project, ) + + @staticmethod + def parse_common_project_path(path: str) -> Dict[str,str]: + """Parse a project path into its component segments.""" + m = re.match(r"^projects/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_location_path(project: str, location: str, ) -> str: + """Returns a fully-qualified location string.""" + return "projects/{project}/locations/{location}".format(project=project, location=location, ) + + @staticmethod + def parse_common_location_path(path: str) -> Dict[str,str]: + """Parse a location path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @classmethod + def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[client_options_lib.ClientOptions] = None): + """Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + if client_options is None: + client_options = client_options_lib.ClientOptions() + use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") + use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") + if use_client_cert not in ("true", "false"): + raise ValueError("Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`") + if use_mtls_endpoint not in ("auto", "never", "always"): + raise MutualTLSChannelError("Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`") + + # Figure out the client cert source to use. + client_cert_source = None + if use_client_cert == "true": + if client_options.client_cert_source: + client_cert_source = client_options.client_cert_source + elif mtls.has_default_client_cert_source(): + client_cert_source = mtls.default_client_cert_source() + + # Figure out which api endpoint to use. + if client_options.api_endpoint is not None: + api_endpoint = client_options.api_endpoint + elif use_mtls_endpoint == "always" or (use_mtls_endpoint == "auto" and client_cert_source): + api_endpoint = cls.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = cls.DEFAULT_ENDPOINT + + return api_endpoint, client_cert_source + + def __init__(self, *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Optional[Union[str, ProjectsTransport]] = None, + client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the projects client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Union[str, ProjectsTransport]): The + transport to use. If set to None, a transport is chosen + automatically. + NOTE: "rest" transport functionality is currently in a + beta state (preview). We welcome your feedback via an + issue in this library's source repository. + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): Custom options for the + client. It won't take effect if a ``transport`` instance is provided. + (1) The ``api_endpoint`` property can be used to override the + default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT + environment variable can also be used to override the endpoint: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto switch to the + default mTLS endpoint if client certificate is present, this is + the default value). However, the ``api_endpoint`` property takes + precedence if provided. + (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide client certificate for mutual TLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + """ + if isinstance(client_options, dict): + client_options = client_options_lib.from_dict(client_options) + if client_options is None: + client_options = client_options_lib.ClientOptions() + client_options = cast(client_options_lib.ClientOptions, client_options) + + api_endpoint, client_cert_source_func = self.get_mtls_endpoint_and_cert_source(client_options) + + api_key_value = getattr(client_options, "api_key", None) + if api_key_value and credentials: + raise ValueError("client_options.api_key and credentials are mutually exclusive") + + # Save or instantiate the transport. + # Ordinarily, we provide the transport, but allowing a custom transport + # instance provides an extensibility point for unusual situations. + if isinstance(transport, ProjectsTransport): + # transport is a ProjectsTransport instance. + if credentials or client_options.credentials_file or api_key_value: + raise ValueError("When providing a transport instance, " + "provide its credentials directly.") + if client_options.scopes: + raise ValueError( + "When providing a transport instance, provide its scopes " + "directly." + ) + self._transport = transport + else: + import google.auth._default # type: ignore + + if api_key_value and hasattr(google.auth._default, "get_api_key_credentials"): + credentials = google.auth._default.get_api_key_credentials(api_key_value) + + Transport = type(self).get_transport_class(transport) + self._transport = Transport( + credentials=credentials, + credentials_file=client_options.credentials_file, + host=api_endpoint, + scopes=client_options.scopes, + client_cert_source_for_mtls=client_cert_source_func, + quota_project_id=client_options.quota_project_id, + client_info=client_info, + always_use_jwt_access=True, + api_audience=client_options.api_audience, + ) + + def disable_xpn_host_unary(self, + request: Optional[Union[compute.DisableXpnHostProjectRequest, dict]] = None, + *, + project: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.Operation: + r"""Disable this project as a shared VPC host project. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_disable_xpn_host(): + # Create a client + client = compute_v1.ProjectsClient() + + # Initialize request argument(s) + request = compute_v1.DisableXpnHostProjectRequest( + project="project_value", + ) + + # Make the request + response = client.disable_xpn_host(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.DisableXpnHostProjectRequest, dict]): + The request object. A request message for + Projects.DisableXpnHost. See the method + description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.DisableXpnHostProjectRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.DisableXpnHostProjectRequest): + request = compute.DisableXpnHostProjectRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.disable_xpn_host] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def disable_xpn_host(self, + request: Optional[Union[compute.DisableXpnHostProjectRequest, dict]] = None, + *, + project: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> extended_operation.ExtendedOperation: + r"""Disable this project as a shared VPC host project. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_disable_xpn_host(): + # Create a client + client = compute_v1.ProjectsClient() + + # Initialize request argument(s) + request = compute_v1.DisableXpnHostProjectRequest( + project="project_value", + ) + + # Make the request + response = client.disable_xpn_host(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.DisableXpnHostProjectRequest, dict]): + The request object. A request message for + Projects.DisableXpnHost. See the method + description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.DisableXpnHostProjectRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.DisableXpnHostProjectRequest): + request = compute.DisableXpnHostProjectRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.disable_xpn_host] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + operation_service = self._transport._global_operations_client + operation_request = compute.GetGlobalOperationRequest() + operation_request.project = request.project + operation_request.operation = response.name + + get_operation = functools.partial(operation_service.get, operation_request) + # Cancel is not part of extended operations yet. + cancel_operation = lambda: None + + # Note: this class is an implementation detail to provide a uniform + # set of names for certain fields in the extended operation proto message. + # See google.api_core.extended_operation.ExtendedOperation for details + # on these properties and the expected interface. + class _CustomOperation(extended_operation.ExtendedOperation): + @property + def error_message(self): + return self._extended_operation.http_error_message + + @property + def error_code(self): + return self._extended_operation.http_error_status_code + + response = _CustomOperation.make(get_operation, cancel_operation, response) + + # Done; return the response. + return response + + def disable_xpn_resource_unary(self, + request: Optional[Union[compute.DisableXpnResourceProjectRequest, dict]] = None, + *, + project: Optional[str] = None, + projects_disable_xpn_resource_request_resource: Optional[compute.ProjectsDisableXpnResourceRequest] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.Operation: + r"""Disable a service resource (also known as service + project) associated with this host project. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_disable_xpn_resource(): + # Create a client + client = compute_v1.ProjectsClient() + + # Initialize request argument(s) + request = compute_v1.DisableXpnResourceProjectRequest( + project="project_value", + ) + + # Make the request + response = client.disable_xpn_resource(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.DisableXpnResourceProjectRequest, dict]): + The request object. A request message for + Projects.DisableXpnResource. See the + method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + projects_disable_xpn_resource_request_resource (google.cloud.compute_v1.types.ProjectsDisableXpnResourceRequest): + The body resource for this request + This corresponds to the ``projects_disable_xpn_resource_request_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, projects_disable_xpn_resource_request_resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.DisableXpnResourceProjectRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.DisableXpnResourceProjectRequest): + request = compute.DisableXpnResourceProjectRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if projects_disable_xpn_resource_request_resource is not None: + request.projects_disable_xpn_resource_request_resource = projects_disable_xpn_resource_request_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.disable_xpn_resource] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def disable_xpn_resource(self, + request: Optional[Union[compute.DisableXpnResourceProjectRequest, dict]] = None, + *, + project: Optional[str] = None, + projects_disable_xpn_resource_request_resource: Optional[compute.ProjectsDisableXpnResourceRequest] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> extended_operation.ExtendedOperation: + r"""Disable a service resource (also known as service + project) associated with this host project. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_disable_xpn_resource(): + # Create a client + client = compute_v1.ProjectsClient() + + # Initialize request argument(s) + request = compute_v1.DisableXpnResourceProjectRequest( + project="project_value", + ) + + # Make the request + response = client.disable_xpn_resource(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.DisableXpnResourceProjectRequest, dict]): + The request object. A request message for + Projects.DisableXpnResource. See the + method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + projects_disable_xpn_resource_request_resource (google.cloud.compute_v1.types.ProjectsDisableXpnResourceRequest): + The body resource for this request + This corresponds to the ``projects_disable_xpn_resource_request_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, projects_disable_xpn_resource_request_resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.DisableXpnResourceProjectRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.DisableXpnResourceProjectRequest): + request = compute.DisableXpnResourceProjectRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if projects_disable_xpn_resource_request_resource is not None: + request.projects_disable_xpn_resource_request_resource = projects_disable_xpn_resource_request_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.disable_xpn_resource] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + operation_service = self._transport._global_operations_client + operation_request = compute.GetGlobalOperationRequest() + operation_request.project = request.project + operation_request.operation = response.name + + get_operation = functools.partial(operation_service.get, operation_request) + # Cancel is not part of extended operations yet. + cancel_operation = lambda: None + + # Note: this class is an implementation detail to provide a uniform + # set of names for certain fields in the extended operation proto message. + # See google.api_core.extended_operation.ExtendedOperation for details + # on these properties and the expected interface. + class _CustomOperation(extended_operation.ExtendedOperation): + @property + def error_message(self): + return self._extended_operation.http_error_message + + @property + def error_code(self): + return self._extended_operation.http_error_status_code + + response = _CustomOperation.make(get_operation, cancel_operation, response) + + # Done; return the response. + return response + + def enable_xpn_host_unary(self, + request: Optional[Union[compute.EnableXpnHostProjectRequest, dict]] = None, + *, + project: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.Operation: + r"""Enable this project as a shared VPC host project. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_enable_xpn_host(): + # Create a client + client = compute_v1.ProjectsClient() + + # Initialize request argument(s) + request = compute_v1.EnableXpnHostProjectRequest( + project="project_value", + ) + + # Make the request + response = client.enable_xpn_host(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.EnableXpnHostProjectRequest, dict]): + The request object. A request message for + Projects.EnableXpnHost. See the method + description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.EnableXpnHostProjectRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.EnableXpnHostProjectRequest): + request = compute.EnableXpnHostProjectRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.enable_xpn_host] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def enable_xpn_host(self, + request: Optional[Union[compute.EnableXpnHostProjectRequest, dict]] = None, + *, + project: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> extended_operation.ExtendedOperation: + r"""Enable this project as a shared VPC host project. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_enable_xpn_host(): + # Create a client + client = compute_v1.ProjectsClient() + + # Initialize request argument(s) + request = compute_v1.EnableXpnHostProjectRequest( + project="project_value", + ) + + # Make the request + response = client.enable_xpn_host(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.EnableXpnHostProjectRequest, dict]): + The request object. A request message for + Projects.EnableXpnHost. See the method + description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.EnableXpnHostProjectRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.EnableXpnHostProjectRequest): + request = compute.EnableXpnHostProjectRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.enable_xpn_host] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + operation_service = self._transport._global_operations_client + operation_request = compute.GetGlobalOperationRequest() + operation_request.project = request.project + operation_request.operation = response.name + + get_operation = functools.partial(operation_service.get, operation_request) + # Cancel is not part of extended operations yet. + cancel_operation = lambda: None + + # Note: this class is an implementation detail to provide a uniform + # set of names for certain fields in the extended operation proto message. + # See google.api_core.extended_operation.ExtendedOperation for details + # on these properties and the expected interface. + class _CustomOperation(extended_operation.ExtendedOperation): + @property + def error_message(self): + return self._extended_operation.http_error_message + + @property + def error_code(self): + return self._extended_operation.http_error_status_code + + response = _CustomOperation.make(get_operation, cancel_operation, response) + + # Done; return the response. + return response + + def enable_xpn_resource_unary(self, + request: Optional[Union[compute.EnableXpnResourceProjectRequest, dict]] = None, + *, + project: Optional[str] = None, + projects_enable_xpn_resource_request_resource: Optional[compute.ProjectsEnableXpnResourceRequest] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.Operation: + r"""Enable service resource (a.k.a service project) for a + host project, so that subnets in the host project can be + used by instances in the service project. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_enable_xpn_resource(): + # Create a client + client = compute_v1.ProjectsClient() + + # Initialize request argument(s) + request = compute_v1.EnableXpnResourceProjectRequest( + project="project_value", + ) + + # Make the request + response = client.enable_xpn_resource(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.EnableXpnResourceProjectRequest, dict]): + The request object. A request message for + Projects.EnableXpnResource. See the + method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + projects_enable_xpn_resource_request_resource (google.cloud.compute_v1.types.ProjectsEnableXpnResourceRequest): + The body resource for this request + This corresponds to the ``projects_enable_xpn_resource_request_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, projects_enable_xpn_resource_request_resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.EnableXpnResourceProjectRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.EnableXpnResourceProjectRequest): + request = compute.EnableXpnResourceProjectRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if projects_enable_xpn_resource_request_resource is not None: + request.projects_enable_xpn_resource_request_resource = projects_enable_xpn_resource_request_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.enable_xpn_resource] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def enable_xpn_resource(self, + request: Optional[Union[compute.EnableXpnResourceProjectRequest, dict]] = None, + *, + project: Optional[str] = None, + projects_enable_xpn_resource_request_resource: Optional[compute.ProjectsEnableXpnResourceRequest] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> extended_operation.ExtendedOperation: + r"""Enable service resource (a.k.a service project) for a + host project, so that subnets in the host project can be + used by instances in the service project. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_enable_xpn_resource(): + # Create a client + client = compute_v1.ProjectsClient() + + # Initialize request argument(s) + request = compute_v1.EnableXpnResourceProjectRequest( + project="project_value", + ) + + # Make the request + response = client.enable_xpn_resource(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.EnableXpnResourceProjectRequest, dict]): + The request object. A request message for + Projects.EnableXpnResource. See the + method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + projects_enable_xpn_resource_request_resource (google.cloud.compute_v1.types.ProjectsEnableXpnResourceRequest): + The body resource for this request + This corresponds to the ``projects_enable_xpn_resource_request_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, projects_enable_xpn_resource_request_resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.EnableXpnResourceProjectRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.EnableXpnResourceProjectRequest): + request = compute.EnableXpnResourceProjectRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if projects_enable_xpn_resource_request_resource is not None: + request.projects_enable_xpn_resource_request_resource = projects_enable_xpn_resource_request_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.enable_xpn_resource] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + operation_service = self._transport._global_operations_client + operation_request = compute.GetGlobalOperationRequest() + operation_request.project = request.project + operation_request.operation = response.name + + get_operation = functools.partial(operation_service.get, operation_request) + # Cancel is not part of extended operations yet. + cancel_operation = lambda: None + + # Note: this class is an implementation detail to provide a uniform + # set of names for certain fields in the extended operation proto message. + # See google.api_core.extended_operation.ExtendedOperation for details + # on these properties and the expected interface. + class _CustomOperation(extended_operation.ExtendedOperation): + @property + def error_message(self): + return self._extended_operation.http_error_message + + @property + def error_code(self): + return self._extended_operation.http_error_status_code + + response = _CustomOperation.make(get_operation, cancel_operation, response) + + # Done; return the response. + return response + + def get(self, + request: Optional[Union[compute.GetProjectRequest, dict]] = None, + *, + project: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.Project: + r"""Returns the specified Project resource. To decrease latency for + this method, you can optionally omit any unneeded information + from the response by using a field mask. This practice is + especially recommended for unused quota information (the + ``quotas`` field). To exclude one or more fields, set your + request's ``fields`` query parameter to only include the fields + you need. For example, to only include the ``id`` and + ``selfLink`` fields, add the query parameter + ``?fields=id,selfLink`` to your request. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_get(): + # Create a client + client = compute_v1.ProjectsClient() + + # Initialize request argument(s) + request = compute_v1.GetProjectRequest( + project="project_value", + ) + + # Make the request + response = client.get(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.GetProjectRequest, dict]): + The request object. A request message for Projects.Get. + See the method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.compute_v1.types.Project: + Represents a Project resource. A + project is used to organize resources in + a Google Cloud Platform environment. For + more information, read about the + Resource Hierarchy. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.GetProjectRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.GetProjectRequest): + request = compute.GetProjectRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get_xpn_host(self, + request: Optional[Union[compute.GetXpnHostProjectRequest, dict]] = None, + *, + project: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.Project: + r"""Gets the shared VPC host project that this project + links to. May be empty if no link exists. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_get_xpn_host(): + # Create a client + client = compute_v1.ProjectsClient() + + # Initialize request argument(s) + request = compute_v1.GetXpnHostProjectRequest( + project="project_value", + ) + + # Make the request + response = client.get_xpn_host(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.GetXpnHostProjectRequest, dict]): + The request object. A request message for + Projects.GetXpnHost. See the method + description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.compute_v1.types.Project: + Represents a Project resource. A + project is used to organize resources in + a Google Cloud Platform environment. For + more information, read about the + Resource Hierarchy. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.GetXpnHostProjectRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.GetXpnHostProjectRequest): + request = compute.GetXpnHostProjectRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_xpn_host] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get_xpn_resources(self, + request: Optional[Union[compute.GetXpnResourcesProjectsRequest, dict]] = None, + *, + project: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.GetXpnResourcesPager: + r"""Gets service resources (a.k.a service project) + associated with this host project. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_get_xpn_resources(): + # Create a client + client = compute_v1.ProjectsClient() + + # Initialize request argument(s) + request = compute_v1.GetXpnResourcesProjectsRequest( + project="project_value", + ) + + # Make the request + page_result = client.get_xpn_resources(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.GetXpnResourcesProjectsRequest, dict]): + The request object. A request message for + Projects.GetXpnResources. See the method + description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.compute_v1.services.projects.pagers.GetXpnResourcesPager: + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.GetXpnResourcesProjectsRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.GetXpnResourcesProjectsRequest): + request = compute.GetXpnResourcesProjectsRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_xpn_resources] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.GetXpnResourcesPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + def list_xpn_hosts(self, + request: Optional[Union[compute.ListXpnHostsProjectsRequest, dict]] = None, + *, + project: Optional[str] = None, + projects_list_xpn_hosts_request_resource: Optional[compute.ProjectsListXpnHostsRequest] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListXpnHostsPager: + r"""Lists all shared VPC host projects visible to the + user in an organization. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_list_xpn_hosts(): + # Create a client + client = compute_v1.ProjectsClient() + + # Initialize request argument(s) + request = compute_v1.ListXpnHostsProjectsRequest( + project="project_value", + ) + + # Make the request + page_result = client.list_xpn_hosts(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.ListXpnHostsProjectsRequest, dict]): + The request object. A request message for + Projects.ListXpnHosts. See the method + description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + projects_list_xpn_hosts_request_resource (google.cloud.compute_v1.types.ProjectsListXpnHostsRequest): + The body resource for this request + This corresponds to the ``projects_list_xpn_hosts_request_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.compute_v1.services.projects.pagers.ListXpnHostsPager: + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, projects_list_xpn_hosts_request_resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.ListXpnHostsProjectsRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.ListXpnHostsProjectsRequest): + request = compute.ListXpnHostsProjectsRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if projects_list_xpn_hosts_request_resource is not None: + request.projects_list_xpn_hosts_request_resource = projects_list_xpn_hosts_request_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_xpn_hosts] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListXpnHostsPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + def move_disk_unary(self, + request: Optional[Union[compute.MoveDiskProjectRequest, dict]] = None, + *, + project: Optional[str] = None, + disk_move_request_resource: Optional[compute.DiskMoveRequest] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.Operation: + r"""Moves a persistent disk from one zone to another. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_move_disk(): + # Create a client + client = compute_v1.ProjectsClient() + + # Initialize request argument(s) + request = compute_v1.MoveDiskProjectRequest( + project="project_value", + ) + + # Make the request + response = client.move_disk(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.MoveDiskProjectRequest, dict]): + The request object. A request message for + Projects.MoveDisk. See the method + description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + disk_move_request_resource (google.cloud.compute_v1.types.DiskMoveRequest): + The body resource for this request + This corresponds to the ``disk_move_request_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, disk_move_request_resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.MoveDiskProjectRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.MoveDiskProjectRequest): + request = compute.MoveDiskProjectRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if disk_move_request_resource is not None: + request.disk_move_request_resource = disk_move_request_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.move_disk] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def move_disk(self, + request: Optional[Union[compute.MoveDiskProjectRequest, dict]] = None, + *, + project: Optional[str] = None, + disk_move_request_resource: Optional[compute.DiskMoveRequest] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> extended_operation.ExtendedOperation: + r"""Moves a persistent disk from one zone to another. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_move_disk(): + # Create a client + client = compute_v1.ProjectsClient() + + # Initialize request argument(s) + request = compute_v1.MoveDiskProjectRequest( + project="project_value", + ) + + # Make the request + response = client.move_disk(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.MoveDiskProjectRequest, dict]): + The request object. A request message for + Projects.MoveDisk. See the method + description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + disk_move_request_resource (google.cloud.compute_v1.types.DiskMoveRequest): + The body resource for this request + This corresponds to the ``disk_move_request_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, disk_move_request_resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.MoveDiskProjectRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.MoveDiskProjectRequest): + request = compute.MoveDiskProjectRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if disk_move_request_resource is not None: + request.disk_move_request_resource = disk_move_request_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.move_disk] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + operation_service = self._transport._global_operations_client + operation_request = compute.GetGlobalOperationRequest() + operation_request.project = request.project + operation_request.operation = response.name + + get_operation = functools.partial(operation_service.get, operation_request) + # Cancel is not part of extended operations yet. + cancel_operation = lambda: None + + # Note: this class is an implementation detail to provide a uniform + # set of names for certain fields in the extended operation proto message. + # See google.api_core.extended_operation.ExtendedOperation for details + # on these properties and the expected interface. + class _CustomOperation(extended_operation.ExtendedOperation): + @property + def error_message(self): + return self._extended_operation.http_error_message + + @property + def error_code(self): + return self._extended_operation.http_error_status_code + + response = _CustomOperation.make(get_operation, cancel_operation, response) + + # Done; return the response. + return response + + def move_instance_unary(self, + request: Optional[Union[compute.MoveInstanceProjectRequest, dict]] = None, + *, + project: Optional[str] = None, + instance_move_request_resource: Optional[compute.InstanceMoveRequest] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.Operation: + r"""Moves an instance and its attached persistent disks from one + zone to another. *Note*: Moving VMs or disks by using this + method might cause unexpected behavior. For more information, + see the `known + issue `__. + [Deprecated] This method is deprecated. See `moving instance + across + zones `__ + instead. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_move_instance(): + # Create a client + client = compute_v1.ProjectsClient() + + # Initialize request argument(s) + request = compute_v1.MoveInstanceProjectRequest( + project="project_value", + ) + + # Make the request + response = client.move_instance(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.MoveInstanceProjectRequest, dict]): + The request object. A request message for + Projects.MoveInstance. See the method + description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + instance_move_request_resource (google.cloud.compute_v1.types.InstanceMoveRequest): + The body resource for this request + This corresponds to the ``instance_move_request_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, instance_move_request_resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.MoveInstanceProjectRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.MoveInstanceProjectRequest): + request = compute.MoveInstanceProjectRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if instance_move_request_resource is not None: + request.instance_move_request_resource = instance_move_request_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.move_instance] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def move_instance(self, + request: Optional[Union[compute.MoveInstanceProjectRequest, dict]] = None, + *, + project: Optional[str] = None, + instance_move_request_resource: Optional[compute.InstanceMoveRequest] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> extended_operation.ExtendedOperation: + r"""Moves an instance and its attached persistent disks from one + zone to another. *Note*: Moving VMs or disks by using this + method might cause unexpected behavior. For more information, + see the `known + issue `__. + [Deprecated] This method is deprecated. See `moving instance + across + zones `__ + instead. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_move_instance(): + # Create a client + client = compute_v1.ProjectsClient() + + # Initialize request argument(s) + request = compute_v1.MoveInstanceProjectRequest( + project="project_value", + ) + + # Make the request + response = client.move_instance(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.MoveInstanceProjectRequest, dict]): + The request object. A request message for + Projects.MoveInstance. See the method + description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + instance_move_request_resource (google.cloud.compute_v1.types.InstanceMoveRequest): + The body resource for this request + This corresponds to the ``instance_move_request_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, instance_move_request_resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.MoveInstanceProjectRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.MoveInstanceProjectRequest): + request = compute.MoveInstanceProjectRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if instance_move_request_resource is not None: + request.instance_move_request_resource = instance_move_request_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.move_instance] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + operation_service = self._transport._global_operations_client + operation_request = compute.GetGlobalOperationRequest() + operation_request.project = request.project + operation_request.operation = response.name + + get_operation = functools.partial(operation_service.get, operation_request) + # Cancel is not part of extended operations yet. + cancel_operation = lambda: None + + # Note: this class is an implementation detail to provide a uniform + # set of names for certain fields in the extended operation proto message. + # See google.api_core.extended_operation.ExtendedOperation for details + # on these properties and the expected interface. + class _CustomOperation(extended_operation.ExtendedOperation): + @property + def error_message(self): + return self._extended_operation.http_error_message + + @property + def error_code(self): + return self._extended_operation.http_error_status_code + + response = _CustomOperation.make(get_operation, cancel_operation, response) + + # Done; return the response. + return response + + def set_common_instance_metadata_unary(self, + request: Optional[Union[compute.SetCommonInstanceMetadataProjectRequest, dict]] = None, + *, + project: Optional[str] = None, + metadata_resource: Optional[compute.Metadata] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.Operation: + r"""Sets metadata common to all instances within the + specified project using the data included in the + request. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_set_common_instance_metadata(): + # Create a client + client = compute_v1.ProjectsClient() + + # Initialize request argument(s) + request = compute_v1.SetCommonInstanceMetadataProjectRequest( + project="project_value", + ) + + # Make the request + response = client.set_common_instance_metadata(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.SetCommonInstanceMetadataProjectRequest, dict]): + The request object. A request message for + Projects.SetCommonInstanceMetadata. See + the method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + metadata_resource (google.cloud.compute_v1.types.Metadata): + The body resource for this request + This corresponds to the ``metadata_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, metadata_resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.SetCommonInstanceMetadataProjectRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.SetCommonInstanceMetadataProjectRequest): + request = compute.SetCommonInstanceMetadataProjectRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if metadata_resource is not None: + request.metadata_resource = metadata_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.set_common_instance_metadata] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def set_common_instance_metadata(self, + request: Optional[Union[compute.SetCommonInstanceMetadataProjectRequest, dict]] = None, + *, + project: Optional[str] = None, + metadata_resource: Optional[compute.Metadata] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> extended_operation.ExtendedOperation: + r"""Sets metadata common to all instances within the + specified project using the data included in the + request. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_set_common_instance_metadata(): + # Create a client + client = compute_v1.ProjectsClient() + + # Initialize request argument(s) + request = compute_v1.SetCommonInstanceMetadataProjectRequest( + project="project_value", + ) + + # Make the request + response = client.set_common_instance_metadata(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.SetCommonInstanceMetadataProjectRequest, dict]): + The request object. A request message for + Projects.SetCommonInstanceMetadata. See + the method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + metadata_resource (google.cloud.compute_v1.types.Metadata): + The body resource for this request + This corresponds to the ``metadata_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, metadata_resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.SetCommonInstanceMetadataProjectRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.SetCommonInstanceMetadataProjectRequest): + request = compute.SetCommonInstanceMetadataProjectRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if metadata_resource is not None: + request.metadata_resource = metadata_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.set_common_instance_metadata] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + operation_service = self._transport._global_operations_client + operation_request = compute.GetGlobalOperationRequest() + operation_request.project = request.project + operation_request.operation = response.name + + get_operation = functools.partial(operation_service.get, operation_request) + # Cancel is not part of extended operations yet. + cancel_operation = lambda: None + + # Note: this class is an implementation detail to provide a uniform + # set of names for certain fields in the extended operation proto message. + # See google.api_core.extended_operation.ExtendedOperation for details + # on these properties and the expected interface. + class _CustomOperation(extended_operation.ExtendedOperation): + @property + def error_message(self): + return self._extended_operation.http_error_message + + @property + def error_code(self): + return self._extended_operation.http_error_status_code + + response = _CustomOperation.make(get_operation, cancel_operation, response) + + # Done; return the response. + return response + + def set_default_network_tier_unary(self, + request: Optional[Union[compute.SetDefaultNetworkTierProjectRequest, dict]] = None, + *, + project: Optional[str] = None, + projects_set_default_network_tier_request_resource: Optional[compute.ProjectsSetDefaultNetworkTierRequest] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.Operation: + r"""Sets the default network tier of the project. The + default network tier is used when an + address/forwardingRule/instance is created without + specifying the network tier field. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_set_default_network_tier(): + # Create a client + client = compute_v1.ProjectsClient() + + # Initialize request argument(s) + request = compute_v1.SetDefaultNetworkTierProjectRequest( + project="project_value", + ) + + # Make the request + response = client.set_default_network_tier(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.SetDefaultNetworkTierProjectRequest, dict]): + The request object. A request message for + Projects.SetDefaultNetworkTier. See the + method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + projects_set_default_network_tier_request_resource (google.cloud.compute_v1.types.ProjectsSetDefaultNetworkTierRequest): + The body resource for this request + This corresponds to the ``projects_set_default_network_tier_request_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, projects_set_default_network_tier_request_resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.SetDefaultNetworkTierProjectRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.SetDefaultNetworkTierProjectRequest): + request = compute.SetDefaultNetworkTierProjectRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if projects_set_default_network_tier_request_resource is not None: + request.projects_set_default_network_tier_request_resource = projects_set_default_network_tier_request_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.set_default_network_tier] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def set_default_network_tier(self, + request: Optional[Union[compute.SetDefaultNetworkTierProjectRequest, dict]] = None, + *, + project: Optional[str] = None, + projects_set_default_network_tier_request_resource: Optional[compute.ProjectsSetDefaultNetworkTierRequest] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> extended_operation.ExtendedOperation: + r"""Sets the default network tier of the project. The + default network tier is used when an + address/forwardingRule/instance is created without + specifying the network tier field. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_set_default_network_tier(): + # Create a client + client = compute_v1.ProjectsClient() + + # Initialize request argument(s) + request = compute_v1.SetDefaultNetworkTierProjectRequest( + project="project_value", + ) + + # Make the request + response = client.set_default_network_tier(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.SetDefaultNetworkTierProjectRequest, dict]): + The request object. A request message for + Projects.SetDefaultNetworkTier. See the + method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + projects_set_default_network_tier_request_resource (google.cloud.compute_v1.types.ProjectsSetDefaultNetworkTierRequest): + The body resource for this request + This corresponds to the ``projects_set_default_network_tier_request_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, projects_set_default_network_tier_request_resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.SetDefaultNetworkTierProjectRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.SetDefaultNetworkTierProjectRequest): + request = compute.SetDefaultNetworkTierProjectRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if projects_set_default_network_tier_request_resource is not None: + request.projects_set_default_network_tier_request_resource = projects_set_default_network_tier_request_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.set_default_network_tier] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + operation_service = self._transport._global_operations_client + operation_request = compute.GetGlobalOperationRequest() + operation_request.project = request.project + operation_request.operation = response.name + + get_operation = functools.partial(operation_service.get, operation_request) + # Cancel is not part of extended operations yet. + cancel_operation = lambda: None + + # Note: this class is an implementation detail to provide a uniform + # set of names for certain fields in the extended operation proto message. + # See google.api_core.extended_operation.ExtendedOperation for details + # on these properties and the expected interface. + class _CustomOperation(extended_operation.ExtendedOperation): + @property + def error_message(self): + return self._extended_operation.http_error_message + + @property + def error_code(self): + return self._extended_operation.http_error_status_code + + response = _CustomOperation.make(get_operation, cancel_operation, response) + + # Done; return the response. + return response + + def set_usage_export_bucket_unary(self, + request: Optional[Union[compute.SetUsageExportBucketProjectRequest, dict]] = None, + *, + project: Optional[str] = None, + usage_export_location_resource: Optional[compute.UsageExportLocation] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.Operation: + r"""Enables the usage export feature and sets the usage + export bucket where reports are stored. If you provide + an empty request body using this method, the usage + export feature will be disabled. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_set_usage_export_bucket(): + # Create a client + client = compute_v1.ProjectsClient() + + # Initialize request argument(s) + request = compute_v1.SetUsageExportBucketProjectRequest( + project="project_value", + ) + + # Make the request + response = client.set_usage_export_bucket(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.SetUsageExportBucketProjectRequest, dict]): + The request object. A request message for + Projects.SetUsageExportBucket. See the + method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + usage_export_location_resource (google.cloud.compute_v1.types.UsageExportLocation): + The body resource for this request + This corresponds to the ``usage_export_location_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, usage_export_location_resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.SetUsageExportBucketProjectRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.SetUsageExportBucketProjectRequest): + request = compute.SetUsageExportBucketProjectRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if usage_export_location_resource is not None: + request.usage_export_location_resource = usage_export_location_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.set_usage_export_bucket] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def set_usage_export_bucket(self, + request: Optional[Union[compute.SetUsageExportBucketProjectRequest, dict]] = None, + *, + project: Optional[str] = None, + usage_export_location_resource: Optional[compute.UsageExportLocation] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> extended_operation.ExtendedOperation: + r"""Enables the usage export feature and sets the usage + export bucket where reports are stored. If you provide + an empty request body using this method, the usage + export feature will be disabled. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_set_usage_export_bucket(): + # Create a client + client = compute_v1.ProjectsClient() + + # Initialize request argument(s) + request = compute_v1.SetUsageExportBucketProjectRequest( + project="project_value", + ) + + # Make the request + response = client.set_usage_export_bucket(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.SetUsageExportBucketProjectRequest, dict]): + The request object. A request message for + Projects.SetUsageExportBucket. See the + method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + usage_export_location_resource (google.cloud.compute_v1.types.UsageExportLocation): + The body resource for this request + This corresponds to the ``usage_export_location_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, usage_export_location_resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.SetUsageExportBucketProjectRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.SetUsageExportBucketProjectRequest): + request = compute.SetUsageExportBucketProjectRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if usage_export_location_resource is not None: + request.usage_export_location_resource = usage_export_location_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.set_usage_export_bucket] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + operation_service = self._transport._global_operations_client + operation_request = compute.GetGlobalOperationRequest() + operation_request.project = request.project + operation_request.operation = response.name + + get_operation = functools.partial(operation_service.get, operation_request) + # Cancel is not part of extended operations yet. + cancel_operation = lambda: None + + # Note: this class is an implementation detail to provide a uniform + # set of names for certain fields in the extended operation proto message. + # See google.api_core.extended_operation.ExtendedOperation for details + # on these properties and the expected interface. + class _CustomOperation(extended_operation.ExtendedOperation): + @property + def error_message(self): + return self._extended_operation.http_error_message + + @property + def error_code(self): + return self._extended_operation.http_error_status_code + + response = _CustomOperation.make(get_operation, cancel_operation, response) + + # Done; return the response. + return response + + def __enter__(self) -> "ProjectsClient": + return self + + def __exit__(self, type, value, traceback): + """Releases underlying transport's resources. + + .. warning:: + ONLY use as a context manager if the transport is NOT shared + with other clients! Exiting the with block will CLOSE the transport + and may cause errors in other clients! + """ + self.transport.close() + + + + + + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) + + +__all__ = ( + "ProjectsClient", +) diff --git a/owl-bot-staging/v1/google/cloud/compute_v1/services/projects/pagers.py b/owl-bot-staging/v1/google/cloud/compute_v1/services/projects/pagers.py new file mode 100644 index 000000000..fc6e9675a --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/compute_v1/services/projects/pagers.py @@ -0,0 +1,136 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import Any, AsyncIterator, Awaitable, Callable, Sequence, Tuple, Optional, Iterator + +from google.cloud.compute_v1.types import compute + + +class GetXpnResourcesPager: + """A pager for iterating through ``get_xpn_resources`` requests. + + This class thinly wraps an initial + :class:`google.cloud.compute_v1.types.ProjectsGetXpnResources` object, and + provides an ``__iter__`` method to iterate through its + ``resources`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``GetXpnResources`` requests and continue to iterate + through the ``resources`` field on the + corresponding responses. + + All the usual :class:`google.cloud.compute_v1.types.ProjectsGetXpnResources` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., compute.ProjectsGetXpnResources], + request: compute.GetXpnResourcesProjectsRequest, + response: compute.ProjectsGetXpnResources, + *, + metadata: Sequence[Tuple[str, str]] = ()): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.compute_v1.types.GetXpnResourcesProjectsRequest): + The initial request object. + response (google.cloud.compute_v1.types.ProjectsGetXpnResources): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = compute.GetXpnResourcesProjectsRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[compute.ProjectsGetXpnResources]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[compute.XpnResourceId]: + for page in self.pages: + yield from page.resources + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + + +class ListXpnHostsPager: + """A pager for iterating through ``list_xpn_hosts`` requests. + + This class thinly wraps an initial + :class:`google.cloud.compute_v1.types.XpnHostList` object, and + provides an ``__iter__`` method to iterate through its + ``items`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListXpnHosts`` requests and continue to iterate + through the ``items`` field on the + corresponding responses. + + All the usual :class:`google.cloud.compute_v1.types.XpnHostList` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., compute.XpnHostList], + request: compute.ListXpnHostsProjectsRequest, + response: compute.XpnHostList, + *, + metadata: Sequence[Tuple[str, str]] = ()): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.compute_v1.types.ListXpnHostsProjectsRequest): + The initial request object. + response (google.cloud.compute_v1.types.XpnHostList): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = compute.ListXpnHostsProjectsRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[compute.XpnHostList]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[compute.Project]: + for page in self.pages: + yield from page.items + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) diff --git a/owl-bot-staging/v1/google/cloud/compute_v1/services/projects/transports/__init__.py b/owl-bot-staging/v1/google/cloud/compute_v1/services/projects/transports/__init__.py new file mode 100644 index 000000000..c66f626a6 --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/compute_v1/services/projects/transports/__init__.py @@ -0,0 +1,32 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +from typing import Dict, Type + +from .base import ProjectsTransport +from .rest import ProjectsRestTransport +from .rest import ProjectsRestInterceptor + + +# Compile a registry of transports. +_transport_registry = OrderedDict() # type: Dict[str, Type[ProjectsTransport]] +_transport_registry['rest'] = ProjectsRestTransport + +__all__ = ( + 'ProjectsTransport', + 'ProjectsRestTransport', + 'ProjectsRestInterceptor', +) diff --git a/owl-bot-staging/v1/google/cloud/compute_v1/services/projects/transports/base.py b/owl-bot-staging/v1/google/cloud/compute_v1/services/projects/transports/base.py new file mode 100644 index 000000000..b063a52ab --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/compute_v1/services/projects/transports/base.py @@ -0,0 +1,331 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import abc +from typing import Awaitable, Callable, Dict, Optional, Sequence, Union + +from google.cloud.compute_v1 import gapic_version as package_version + +import google.auth # type: ignore +import google.api_core +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.compute_v1.types import compute +from google.cloud.compute_v1.services import global_operations + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) + + +class ProjectsTransport(abc.ABC): + """Abstract transport class for Projects.""" + + AUTH_SCOPES = ( + 'https://www.googleapis.com/auth/compute', + 'https://www.googleapis.com/auth/cloud-platform', + ) + + DEFAULT_HOST: str = 'compute.googleapis.com' + def __init__( + self, *, + host: str = DEFAULT_HOST, + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + **kwargs, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A list of scopes. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + """ + self._extended_operations_services: Dict[str, Any] = {} + + scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} + + # Save the scopes. + self._scopes = scopes + + # If no credentials are provided, then determine the appropriate + # defaults. + if credentials and credentials_file: + raise core_exceptions.DuplicateCredentialArgs("'credentials_file' and 'credentials' are mutually exclusive") + + if credentials_file is not None: + credentials, _ = google.auth.load_credentials_from_file( + credentials_file, + **scopes_kwargs, + quota_project_id=quota_project_id + ) + elif credentials is None: + credentials, _ = google.auth.default(**scopes_kwargs, quota_project_id=quota_project_id) + # Don't apply audience if the credentials file passed from user. + if hasattr(credentials, "with_gdch_audience"): + credentials = credentials.with_gdch_audience(api_audience if api_audience else host) + + # If the credentials are service account credentials, then always try to use self signed JWT. + if always_use_jwt_access and isinstance(credentials, service_account.Credentials) and hasattr(service_account.Credentials, "with_always_use_jwt_access"): + credentials = credentials.with_always_use_jwt_access(True) + + # Save the credentials. + self._credentials = credentials + + # Save the hostname. Default to port 443 (HTTPS) if none is specified. + if ':' not in host: + host += ':443' + self._host = host + + def _prep_wrapped_messages(self, client_info): + # Precompute the wrapped methods. + self._wrapped_methods = { + self.disable_xpn_host: gapic_v1.method.wrap_method( + self.disable_xpn_host, + default_timeout=None, + client_info=client_info, + ), + self.disable_xpn_resource: gapic_v1.method.wrap_method( + self.disable_xpn_resource, + default_timeout=None, + client_info=client_info, + ), + self.enable_xpn_host: gapic_v1.method.wrap_method( + self.enable_xpn_host, + default_timeout=None, + client_info=client_info, + ), + self.enable_xpn_resource: gapic_v1.method.wrap_method( + self.enable_xpn_resource, + default_timeout=None, + client_info=client_info, + ), + self.get: gapic_v1.method.wrap_method( + self.get, + default_timeout=None, + client_info=client_info, + ), + self.get_xpn_host: gapic_v1.method.wrap_method( + self.get_xpn_host, + default_timeout=None, + client_info=client_info, + ), + self.get_xpn_resources: gapic_v1.method.wrap_method( + self.get_xpn_resources, + default_timeout=None, + client_info=client_info, + ), + self.list_xpn_hosts: gapic_v1.method.wrap_method( + self.list_xpn_hosts, + default_timeout=None, + client_info=client_info, + ), + self.move_disk: gapic_v1.method.wrap_method( + self.move_disk, + default_timeout=None, + client_info=client_info, + ), + self.move_instance: gapic_v1.method.wrap_method( + self.move_instance, + default_timeout=None, + client_info=client_info, + ), + self.set_common_instance_metadata: gapic_v1.method.wrap_method( + self.set_common_instance_metadata, + default_timeout=None, + client_info=client_info, + ), + self.set_default_network_tier: gapic_v1.method.wrap_method( + self.set_default_network_tier, + default_timeout=None, + client_info=client_info, + ), + self.set_usage_export_bucket: gapic_v1.method.wrap_method( + self.set_usage_export_bucket, + default_timeout=None, + client_info=client_info, + ), + } + + def close(self): + """Closes resources associated with the transport. + + .. warning:: + Only call this method if the transport is NOT shared + with other clients - this may cause errors in other clients! + """ + raise NotImplementedError() + + @property + def disable_xpn_host(self) -> Callable[ + [compute.DisableXpnHostProjectRequest], + Union[ + compute.Operation, + Awaitable[compute.Operation] + ]]: + raise NotImplementedError() + + @property + def disable_xpn_resource(self) -> Callable[ + [compute.DisableXpnResourceProjectRequest], + Union[ + compute.Operation, + Awaitable[compute.Operation] + ]]: + raise NotImplementedError() + + @property + def enable_xpn_host(self) -> Callable[ + [compute.EnableXpnHostProjectRequest], + Union[ + compute.Operation, + Awaitable[compute.Operation] + ]]: + raise NotImplementedError() + + @property + def enable_xpn_resource(self) -> Callable[ + [compute.EnableXpnResourceProjectRequest], + Union[ + compute.Operation, + Awaitable[compute.Operation] + ]]: + raise NotImplementedError() + + @property + def get(self) -> Callable[ + [compute.GetProjectRequest], + Union[ + compute.Project, + Awaitable[compute.Project] + ]]: + raise NotImplementedError() + + @property + def get_xpn_host(self) -> Callable[ + [compute.GetXpnHostProjectRequest], + Union[ + compute.Project, + Awaitable[compute.Project] + ]]: + raise NotImplementedError() + + @property + def get_xpn_resources(self) -> Callable[ + [compute.GetXpnResourcesProjectsRequest], + Union[ + compute.ProjectsGetXpnResources, + Awaitable[compute.ProjectsGetXpnResources] + ]]: + raise NotImplementedError() + + @property + def list_xpn_hosts(self) -> Callable[ + [compute.ListXpnHostsProjectsRequest], + Union[ + compute.XpnHostList, + Awaitable[compute.XpnHostList] + ]]: + raise NotImplementedError() + + @property + def move_disk(self) -> Callable[ + [compute.MoveDiskProjectRequest], + Union[ + compute.Operation, + Awaitable[compute.Operation] + ]]: + raise NotImplementedError() + + @property + def move_instance(self) -> Callable[ + [compute.MoveInstanceProjectRequest], + Union[ + compute.Operation, + Awaitable[compute.Operation] + ]]: + raise NotImplementedError() + + @property + def set_common_instance_metadata(self) -> Callable[ + [compute.SetCommonInstanceMetadataProjectRequest], + Union[ + compute.Operation, + Awaitable[compute.Operation] + ]]: + raise NotImplementedError() + + @property + def set_default_network_tier(self) -> Callable[ + [compute.SetDefaultNetworkTierProjectRequest], + Union[ + compute.Operation, + Awaitable[compute.Operation] + ]]: + raise NotImplementedError() + + @property + def set_usage_export_bucket(self) -> Callable[ + [compute.SetUsageExportBucketProjectRequest], + Union[ + compute.Operation, + Awaitable[compute.Operation] + ]]: + raise NotImplementedError() + + @property + def kind(self) -> str: + raise NotImplementedError() + + @property + def _global_operations_client(self) -> global_operations.GlobalOperationsClient: + ex_op_service = self._extended_operations_services.get("global_operations") + if not ex_op_service: + ex_op_service = global_operations.GlobalOperationsClient( + credentials=self._credentials, + transport=self.kind, + ) + self._extended_operations_services["global_operations"] = ex_op_service + + return ex_op_service + + +__all__ = ( + 'ProjectsTransport', +) diff --git a/owl-bot-staging/v1/google/cloud/compute_v1/services/projects/transports/rest.py b/owl-bot-staging/v1/google/cloud/compute_v1/services/projects/transports/rest.py new file mode 100644 index 000000000..57c0a5f14 --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/compute_v1/services/projects/transports/rest.py @@ -0,0 +1,1832 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +from google.auth.transport.requests import AuthorizedSession # type: ignore +import json # type: ignore +import grpc # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.api_core import exceptions as core_exceptions +from google.api_core import retry as retries +from google.api_core import rest_helpers +from google.api_core import rest_streaming +from google.api_core import path_template +from google.api_core import gapic_v1 + +from google.protobuf import json_format +from requests import __version__ as requests_version +import dataclasses +import re +from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union +import warnings + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + + +from google.cloud.compute_v1.types import compute + +from .base import ProjectsTransport, DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, + grpc_version=None, + rest_version=requests_version, +) + + +class ProjectsRestInterceptor: + """Interceptor for Projects. + + Interceptors are used to manipulate requests, request metadata, and responses + in arbitrary ways. + Example use cases include: + * Logging + * Verifying requests according to service or custom semantics + * Stripping extraneous information from responses + + These use cases and more can be enabled by injecting an + instance of a custom subclass when constructing the ProjectsRestTransport. + + .. code-block:: python + class MyCustomProjectsInterceptor(ProjectsRestInterceptor): + def pre_disable_xpn_host(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_disable_xpn_host(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_disable_xpn_resource(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_disable_xpn_resource(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_enable_xpn_host(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_enable_xpn_host(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_enable_xpn_resource(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_enable_xpn_resource(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_get(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_get_xpn_host(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_xpn_host(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_get_xpn_resources(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_xpn_resources(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list_xpn_hosts(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_xpn_hosts(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_move_disk(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_move_disk(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_move_instance(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_move_instance(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_set_common_instance_metadata(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_set_common_instance_metadata(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_set_default_network_tier(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_set_default_network_tier(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_set_usage_export_bucket(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_set_usage_export_bucket(self, response): + logging.log(f"Received response: {response}") + return response + + transport = ProjectsRestTransport(interceptor=MyCustomProjectsInterceptor()) + client = ProjectsClient(transport=transport) + + + """ + def pre_disable_xpn_host(self, request: compute.DisableXpnHostProjectRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.DisableXpnHostProjectRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for disable_xpn_host + + Override in a subclass to manipulate the request or metadata + before they are sent to the Projects server. + """ + return request, metadata + + def post_disable_xpn_host(self, response: compute.Operation) -> compute.Operation: + """Post-rpc interceptor for disable_xpn_host + + Override in a subclass to manipulate the response + after it is returned by the Projects server but before + it is returned to user code. + """ + return response + def pre_disable_xpn_resource(self, request: compute.DisableXpnResourceProjectRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.DisableXpnResourceProjectRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for disable_xpn_resource + + Override in a subclass to manipulate the request or metadata + before they are sent to the Projects server. + """ + return request, metadata + + def post_disable_xpn_resource(self, response: compute.Operation) -> compute.Operation: + """Post-rpc interceptor for disable_xpn_resource + + Override in a subclass to manipulate the response + after it is returned by the Projects server but before + it is returned to user code. + """ + return response + def pre_enable_xpn_host(self, request: compute.EnableXpnHostProjectRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.EnableXpnHostProjectRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for enable_xpn_host + + Override in a subclass to manipulate the request or metadata + before they are sent to the Projects server. + """ + return request, metadata + + def post_enable_xpn_host(self, response: compute.Operation) -> compute.Operation: + """Post-rpc interceptor for enable_xpn_host + + Override in a subclass to manipulate the response + after it is returned by the Projects server but before + it is returned to user code. + """ + return response + def pre_enable_xpn_resource(self, request: compute.EnableXpnResourceProjectRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.EnableXpnResourceProjectRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for enable_xpn_resource + + Override in a subclass to manipulate the request or metadata + before they are sent to the Projects server. + """ + return request, metadata + + def post_enable_xpn_resource(self, response: compute.Operation) -> compute.Operation: + """Post-rpc interceptor for enable_xpn_resource + + Override in a subclass to manipulate the response + after it is returned by the Projects server but before + it is returned to user code. + """ + return response + def pre_get(self, request: compute.GetProjectRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.GetProjectRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get + + Override in a subclass to manipulate the request or metadata + before they are sent to the Projects server. + """ + return request, metadata + + def post_get(self, response: compute.Project) -> compute.Project: + """Post-rpc interceptor for get + + Override in a subclass to manipulate the response + after it is returned by the Projects server but before + it is returned to user code. + """ + return response + def pre_get_xpn_host(self, request: compute.GetXpnHostProjectRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.GetXpnHostProjectRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_xpn_host + + Override in a subclass to manipulate the request or metadata + before they are sent to the Projects server. + """ + return request, metadata + + def post_get_xpn_host(self, response: compute.Project) -> compute.Project: + """Post-rpc interceptor for get_xpn_host + + Override in a subclass to manipulate the response + after it is returned by the Projects server but before + it is returned to user code. + """ + return response + def pre_get_xpn_resources(self, request: compute.GetXpnResourcesProjectsRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.GetXpnResourcesProjectsRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_xpn_resources + + Override in a subclass to manipulate the request or metadata + before they are sent to the Projects server. + """ + return request, metadata + + def post_get_xpn_resources(self, response: compute.ProjectsGetXpnResources) -> compute.ProjectsGetXpnResources: + """Post-rpc interceptor for get_xpn_resources + + Override in a subclass to manipulate the response + after it is returned by the Projects server but before + it is returned to user code. + """ + return response + def pre_list_xpn_hosts(self, request: compute.ListXpnHostsProjectsRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.ListXpnHostsProjectsRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list_xpn_hosts + + Override in a subclass to manipulate the request or metadata + before they are sent to the Projects server. + """ + return request, metadata + + def post_list_xpn_hosts(self, response: compute.XpnHostList) -> compute.XpnHostList: + """Post-rpc interceptor for list_xpn_hosts + + Override in a subclass to manipulate the response + after it is returned by the Projects server but before + it is returned to user code. + """ + return response + def pre_move_disk(self, request: compute.MoveDiskProjectRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.MoveDiskProjectRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for move_disk + + Override in a subclass to manipulate the request or metadata + before they are sent to the Projects server. + """ + return request, metadata + + def post_move_disk(self, response: compute.Operation) -> compute.Operation: + """Post-rpc interceptor for move_disk + + Override in a subclass to manipulate the response + after it is returned by the Projects server but before + it is returned to user code. + """ + return response + def pre_move_instance(self, request: compute.MoveInstanceProjectRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.MoveInstanceProjectRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for move_instance + + Override in a subclass to manipulate the request or metadata + before they are sent to the Projects server. + """ + return request, metadata + + def post_move_instance(self, response: compute.Operation) -> compute.Operation: + """Post-rpc interceptor for move_instance + + Override in a subclass to manipulate the response + after it is returned by the Projects server but before + it is returned to user code. + """ + return response + def pre_set_common_instance_metadata(self, request: compute.SetCommonInstanceMetadataProjectRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.SetCommonInstanceMetadataProjectRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for set_common_instance_metadata + + Override in a subclass to manipulate the request or metadata + before they are sent to the Projects server. + """ + return request, metadata + + def post_set_common_instance_metadata(self, response: compute.Operation) -> compute.Operation: + """Post-rpc interceptor for set_common_instance_metadata + + Override in a subclass to manipulate the response + after it is returned by the Projects server but before + it is returned to user code. + """ + return response + def pre_set_default_network_tier(self, request: compute.SetDefaultNetworkTierProjectRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.SetDefaultNetworkTierProjectRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for set_default_network_tier + + Override in a subclass to manipulate the request or metadata + before they are sent to the Projects server. + """ + return request, metadata + + def post_set_default_network_tier(self, response: compute.Operation) -> compute.Operation: + """Post-rpc interceptor for set_default_network_tier + + Override in a subclass to manipulate the response + after it is returned by the Projects server but before + it is returned to user code. + """ + return response + def pre_set_usage_export_bucket(self, request: compute.SetUsageExportBucketProjectRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.SetUsageExportBucketProjectRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for set_usage_export_bucket + + Override in a subclass to manipulate the request or metadata + before they are sent to the Projects server. + """ + return request, metadata + + def post_set_usage_export_bucket(self, response: compute.Operation) -> compute.Operation: + """Post-rpc interceptor for set_usage_export_bucket + + Override in a subclass to manipulate the response + after it is returned by the Projects server but before + it is returned to user code. + """ + return response + + +@dataclasses.dataclass +class ProjectsRestStub: + _session: AuthorizedSession + _host: str + _interceptor: ProjectsRestInterceptor + + +class ProjectsRestTransport(ProjectsTransport): + """REST backend transport for Projects. + + The Projects API. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends JSON representations of protocol buffers over HTTP/1.1 + + NOTE: This REST transport functionality is currently in a beta + state (preview). We welcome your feedback via an issue in this + library's source repository. Thank you! + """ + + def __init__(self, *, + host: str = 'compute.googleapis.com', + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + client_cert_source_for_mtls: Optional[Callable[[ + ], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + url_scheme: str = 'https', + interceptor: Optional[ProjectsRestInterceptor] = None, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + NOTE: This REST transport functionality is currently in a beta + state (preview). We welcome your feedback via a GitHub issue in + this library's repository. Thank you! + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + client_cert_source_for_mtls (Callable[[], Tuple[bytes, bytes]]): Client + certificate to configure mutual TLS HTTP channel. It is ignored + if ``channel`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you are developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + url_scheme: the protocol scheme for the API endpoint. Normally + "https", but for testing or local servers, + "http" can be specified. + """ + # Run the base constructor + # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. + # TODO: When custom host (api_endpoint) is set, `scopes` must *also* be set on the + # credentials object + maybe_url_match = re.match("^(?Phttp(?:s)?://)?(?P.*)$", host) + if maybe_url_match is None: + raise ValueError(f"Unexpected hostname structure: {host}") # pragma: NO COVER + + url_match_items = maybe_url_match.groupdict() + + host = f"{url_scheme}://{host}" if not url_match_items["scheme"] else host + + super().__init__( + host=host, + credentials=credentials, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience + ) + self._session = AuthorizedSession( + self._credentials, default_host=self.DEFAULT_HOST) + if client_cert_source_for_mtls: + self._session.configure_mtls_channel(client_cert_source_for_mtls) + self._interceptor = interceptor or ProjectsRestInterceptor() + self._prep_wrapped_messages(client_info) + + class _DisableXpnHost(ProjectsRestStub): + def __hash__(self): + return hash("DisableXpnHost") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.DisableXpnHostProjectRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.Operation: + r"""Call the disable xpn host method over HTTP. + + Args: + request (~.compute.DisableXpnHostProjectRequest): + The request object. A request message for + Projects.DisableXpnHost. See the method + description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.Operation: + Represents an Operation resource. Google Compute Engine + has three Operation resources: \* + `Global `__ + \* + `Regional `__ + \* + `Zonal `__ + You can use an operation resource to manage asynchronous + API requests. For more information, read Handling API + responses. Operations can be global, regional or zonal. + - For global operations, use the ``globalOperations`` + resource. - For regional operations, use the + ``regionOperations`` resource. - For zonal operations, + use the ``zonalOperations`` resource. For more + information, read Global, Regional, and Zonal Resources. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/compute/v1/projects/{project}/disableXpnHost', + }, + ] + request, metadata = self._interceptor.pre_disable_xpn_host(request, metadata) + pb_request = compute.DisableXpnHostProjectRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.Operation() + pb_resp = compute.Operation.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_disable_xpn_host(resp) + return resp + + class _DisableXpnResource(ProjectsRestStub): + def __hash__(self): + return hash("DisableXpnResource") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.DisableXpnResourceProjectRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.Operation: + r"""Call the disable xpn resource method over HTTP. + + Args: + request (~.compute.DisableXpnResourceProjectRequest): + The request object. A request message for + Projects.DisableXpnResource. See the + method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.Operation: + Represents an Operation resource. Google Compute Engine + has three Operation resources: \* + `Global `__ + \* + `Regional `__ + \* + `Zonal `__ + You can use an operation resource to manage asynchronous + API requests. For more information, read Handling API + responses. Operations can be global, regional or zonal. + - For global operations, use the ``globalOperations`` + resource. - For regional operations, use the + ``regionOperations`` resource. - For zonal operations, + use the ``zonalOperations`` resource. For more + information, read Global, Regional, and Zonal Resources. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/compute/v1/projects/{project}/disableXpnResource', + 'body': 'projects_disable_xpn_resource_request_resource', + }, + ] + request, metadata = self._interceptor.pre_disable_xpn_resource(request, metadata) + pb_request = compute.DisableXpnResourceProjectRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + including_default_value_fields=False, + use_integers_for_enums=False + ) + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.Operation() + pb_resp = compute.Operation.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_disable_xpn_resource(resp) + return resp + + class _EnableXpnHost(ProjectsRestStub): + def __hash__(self): + return hash("EnableXpnHost") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.EnableXpnHostProjectRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.Operation: + r"""Call the enable xpn host method over HTTP. + + Args: + request (~.compute.EnableXpnHostProjectRequest): + The request object. A request message for + Projects.EnableXpnHost. See the method + description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.Operation: + Represents an Operation resource. Google Compute Engine + has three Operation resources: \* + `Global `__ + \* + `Regional `__ + \* + `Zonal `__ + You can use an operation resource to manage asynchronous + API requests. For more information, read Handling API + responses. Operations can be global, regional or zonal. + - For global operations, use the ``globalOperations`` + resource. - For regional operations, use the + ``regionOperations`` resource. - For zonal operations, + use the ``zonalOperations`` resource. For more + information, read Global, Regional, and Zonal Resources. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/compute/v1/projects/{project}/enableXpnHost', + }, + ] + request, metadata = self._interceptor.pre_enable_xpn_host(request, metadata) + pb_request = compute.EnableXpnHostProjectRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.Operation() + pb_resp = compute.Operation.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_enable_xpn_host(resp) + return resp + + class _EnableXpnResource(ProjectsRestStub): + def __hash__(self): + return hash("EnableXpnResource") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.EnableXpnResourceProjectRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.Operation: + r"""Call the enable xpn resource method over HTTP. + + Args: + request (~.compute.EnableXpnResourceProjectRequest): + The request object. A request message for + Projects.EnableXpnResource. See the + method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.Operation: + Represents an Operation resource. Google Compute Engine + has three Operation resources: \* + `Global `__ + \* + `Regional `__ + \* + `Zonal `__ + You can use an operation resource to manage asynchronous + API requests. For more information, read Handling API + responses. Operations can be global, regional or zonal. + - For global operations, use the ``globalOperations`` + resource. - For regional operations, use the + ``regionOperations`` resource. - For zonal operations, + use the ``zonalOperations`` resource. For more + information, read Global, Regional, and Zonal Resources. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/compute/v1/projects/{project}/enableXpnResource', + 'body': 'projects_enable_xpn_resource_request_resource', + }, + ] + request, metadata = self._interceptor.pre_enable_xpn_resource(request, metadata) + pb_request = compute.EnableXpnResourceProjectRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + including_default_value_fields=False, + use_integers_for_enums=False + ) + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.Operation() + pb_resp = compute.Operation.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_enable_xpn_resource(resp) + return resp + + class _Get(ProjectsRestStub): + def __hash__(self): + return hash("Get") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.GetProjectRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.Project: + r"""Call the get method over HTTP. + + Args: + request (~.compute.GetProjectRequest): + The request object. A request message for Projects.Get. + See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.Project: + Represents a Project resource. A + project is used to organize resources in + a Google Cloud Platform environment. For + more information, read about the + Resource Hierarchy. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/compute/v1/projects/{project}', + }, + ] + request, metadata = self._interceptor.pre_get(request, metadata) + pb_request = compute.GetProjectRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.Project() + pb_resp = compute.Project.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get(resp) + return resp + + class _GetXpnHost(ProjectsRestStub): + def __hash__(self): + return hash("GetXpnHost") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.GetXpnHostProjectRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.Project: + r"""Call the get xpn host method over HTTP. + + Args: + request (~.compute.GetXpnHostProjectRequest): + The request object. A request message for + Projects.GetXpnHost. See the method + description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.Project: + Represents a Project resource. A + project is used to organize resources in + a Google Cloud Platform environment. For + more information, read about the + Resource Hierarchy. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/compute/v1/projects/{project}/getXpnHost', + }, + ] + request, metadata = self._interceptor.pre_get_xpn_host(request, metadata) + pb_request = compute.GetXpnHostProjectRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.Project() + pb_resp = compute.Project.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get_xpn_host(resp) + return resp + + class _GetXpnResources(ProjectsRestStub): + def __hash__(self): + return hash("GetXpnResources") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.GetXpnResourcesProjectsRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.ProjectsGetXpnResources: + r"""Call the get xpn resources method over HTTP. + + Args: + request (~.compute.GetXpnResourcesProjectsRequest): + The request object. A request message for + Projects.GetXpnResources. See the method + description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.ProjectsGetXpnResources: + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/compute/v1/projects/{project}/getXpnResources', + }, + ] + request, metadata = self._interceptor.pre_get_xpn_resources(request, metadata) + pb_request = compute.GetXpnResourcesProjectsRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.ProjectsGetXpnResources() + pb_resp = compute.ProjectsGetXpnResources.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get_xpn_resources(resp) + return resp + + class _ListXpnHosts(ProjectsRestStub): + def __hash__(self): + return hash("ListXpnHosts") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.ListXpnHostsProjectsRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.XpnHostList: + r"""Call the list xpn hosts method over HTTP. + + Args: + request (~.compute.ListXpnHostsProjectsRequest): + The request object. A request message for + Projects.ListXpnHosts. See the method + description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.XpnHostList: + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/compute/v1/projects/{project}/listXpnHosts', + 'body': 'projects_list_xpn_hosts_request_resource', + }, + ] + request, metadata = self._interceptor.pre_list_xpn_hosts(request, metadata) + pb_request = compute.ListXpnHostsProjectsRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + including_default_value_fields=False, + use_integers_for_enums=False + ) + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.XpnHostList() + pb_resp = compute.XpnHostList.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_list_xpn_hosts(resp) + return resp + + class _MoveDisk(ProjectsRestStub): + def __hash__(self): + return hash("MoveDisk") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.MoveDiskProjectRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.Operation: + r"""Call the move disk method over HTTP. + + Args: + request (~.compute.MoveDiskProjectRequest): + The request object. A request message for + Projects.MoveDisk. See the method + description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.Operation: + Represents an Operation resource. Google Compute Engine + has three Operation resources: \* + `Global `__ + \* + `Regional `__ + \* + `Zonal `__ + You can use an operation resource to manage asynchronous + API requests. For more information, read Handling API + responses. Operations can be global, regional or zonal. + - For global operations, use the ``globalOperations`` + resource. - For regional operations, use the + ``regionOperations`` resource. - For zonal operations, + use the ``zonalOperations`` resource. For more + information, read Global, Regional, and Zonal Resources. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/compute/v1/projects/{project}/moveDisk', + 'body': 'disk_move_request_resource', + }, + ] + request, metadata = self._interceptor.pre_move_disk(request, metadata) + pb_request = compute.MoveDiskProjectRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + including_default_value_fields=False, + use_integers_for_enums=False + ) + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.Operation() + pb_resp = compute.Operation.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_move_disk(resp) + return resp + + class _MoveInstance(ProjectsRestStub): + def __hash__(self): + return hash("MoveInstance") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.MoveInstanceProjectRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.Operation: + r"""Call the move instance method over HTTP. + + Args: + request (~.compute.MoveInstanceProjectRequest): + The request object. A request message for + Projects.MoveInstance. See the method + description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.Operation: + Represents an Operation resource. Google Compute Engine + has three Operation resources: \* + `Global `__ + \* + `Regional `__ + \* + `Zonal `__ + You can use an operation resource to manage asynchronous + API requests. For more information, read Handling API + responses. Operations can be global, regional or zonal. + - For global operations, use the ``globalOperations`` + resource. - For regional operations, use the + ``regionOperations`` resource. - For zonal operations, + use the ``zonalOperations`` resource. For more + information, read Global, Regional, and Zonal Resources. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/compute/v1/projects/{project}/moveInstance', + 'body': 'instance_move_request_resource', + }, + ] + request, metadata = self._interceptor.pre_move_instance(request, metadata) + pb_request = compute.MoveInstanceProjectRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + including_default_value_fields=False, + use_integers_for_enums=False + ) + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.Operation() + pb_resp = compute.Operation.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_move_instance(resp) + return resp + + class _SetCommonInstanceMetadata(ProjectsRestStub): + def __hash__(self): + return hash("SetCommonInstanceMetadata") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.SetCommonInstanceMetadataProjectRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.Operation: + r"""Call the set common instance + metadata method over HTTP. + + Args: + request (~.compute.SetCommonInstanceMetadataProjectRequest): + The request object. A request message for + Projects.SetCommonInstanceMetadata. See + the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.Operation: + Represents an Operation resource. Google Compute Engine + has three Operation resources: \* + `Global `__ + \* + `Regional `__ + \* + `Zonal `__ + You can use an operation resource to manage asynchronous + API requests. For more information, read Handling API + responses. Operations can be global, regional or zonal. + - For global operations, use the ``globalOperations`` + resource. - For regional operations, use the + ``regionOperations`` resource. - For zonal operations, + use the ``zonalOperations`` resource. For more + information, read Global, Regional, and Zonal Resources. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/compute/v1/projects/{project}/setCommonInstanceMetadata', + 'body': 'metadata_resource', + }, + ] + request, metadata = self._interceptor.pre_set_common_instance_metadata(request, metadata) + pb_request = compute.SetCommonInstanceMetadataProjectRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + including_default_value_fields=False, + use_integers_for_enums=False + ) + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.Operation() + pb_resp = compute.Operation.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_set_common_instance_metadata(resp) + return resp + + class _SetDefaultNetworkTier(ProjectsRestStub): + def __hash__(self): + return hash("SetDefaultNetworkTier") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.SetDefaultNetworkTierProjectRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.Operation: + r"""Call the set default network tier method over HTTP. + + Args: + request (~.compute.SetDefaultNetworkTierProjectRequest): + The request object. A request message for + Projects.SetDefaultNetworkTier. See the + method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.Operation: + Represents an Operation resource. Google Compute Engine + has three Operation resources: \* + `Global `__ + \* + `Regional `__ + \* + `Zonal `__ + You can use an operation resource to manage asynchronous + API requests. For more information, read Handling API + responses. Operations can be global, regional or zonal. + - For global operations, use the ``globalOperations`` + resource. - For regional operations, use the + ``regionOperations`` resource. - For zonal operations, + use the ``zonalOperations`` resource. For more + information, read Global, Regional, and Zonal Resources. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/compute/v1/projects/{project}/setDefaultNetworkTier', + 'body': 'projects_set_default_network_tier_request_resource', + }, + ] + request, metadata = self._interceptor.pre_set_default_network_tier(request, metadata) + pb_request = compute.SetDefaultNetworkTierProjectRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + including_default_value_fields=False, + use_integers_for_enums=False + ) + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.Operation() + pb_resp = compute.Operation.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_set_default_network_tier(resp) + return resp + + class _SetUsageExportBucket(ProjectsRestStub): + def __hash__(self): + return hash("SetUsageExportBucket") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.SetUsageExportBucketProjectRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.Operation: + r"""Call the set usage export bucket method over HTTP. + + Args: + request (~.compute.SetUsageExportBucketProjectRequest): + The request object. A request message for + Projects.SetUsageExportBucket. See the + method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.Operation: + Represents an Operation resource. Google Compute Engine + has three Operation resources: \* + `Global `__ + \* + `Regional `__ + \* + `Zonal `__ + You can use an operation resource to manage asynchronous + API requests. For more information, read Handling API + responses. Operations can be global, regional or zonal. + - For global operations, use the ``globalOperations`` + resource. - For regional operations, use the + ``regionOperations`` resource. - For zonal operations, + use the ``zonalOperations`` resource. For more + information, read Global, Regional, and Zonal Resources. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/compute/v1/projects/{project}/setUsageExportBucket', + 'body': 'usage_export_location_resource', + }, + ] + request, metadata = self._interceptor.pre_set_usage_export_bucket(request, metadata) + pb_request = compute.SetUsageExportBucketProjectRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + including_default_value_fields=False, + use_integers_for_enums=False + ) + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.Operation() + pb_resp = compute.Operation.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_set_usage_export_bucket(resp) + return resp + + @property + def disable_xpn_host(self) -> Callable[ + [compute.DisableXpnHostProjectRequest], + compute.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._DisableXpnHost(self._session, self._host, self._interceptor) # type: ignore + + @property + def disable_xpn_resource(self) -> Callable[ + [compute.DisableXpnResourceProjectRequest], + compute.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._DisableXpnResource(self._session, self._host, self._interceptor) # type: ignore + + @property + def enable_xpn_host(self) -> Callable[ + [compute.EnableXpnHostProjectRequest], + compute.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._EnableXpnHost(self._session, self._host, self._interceptor) # type: ignore + + @property + def enable_xpn_resource(self) -> Callable[ + [compute.EnableXpnResourceProjectRequest], + compute.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._EnableXpnResource(self._session, self._host, self._interceptor) # type: ignore + + @property + def get(self) -> Callable[ + [compute.GetProjectRequest], + compute.Project]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._Get(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_xpn_host(self) -> Callable[ + [compute.GetXpnHostProjectRequest], + compute.Project]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetXpnHost(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_xpn_resources(self) -> Callable[ + [compute.GetXpnResourcesProjectsRequest], + compute.ProjectsGetXpnResources]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetXpnResources(self._session, self._host, self._interceptor) # type: ignore + + @property + def list_xpn_hosts(self) -> Callable[ + [compute.ListXpnHostsProjectsRequest], + compute.XpnHostList]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListXpnHosts(self._session, self._host, self._interceptor) # type: ignore + + @property + def move_disk(self) -> Callable[ + [compute.MoveDiskProjectRequest], + compute.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._MoveDisk(self._session, self._host, self._interceptor) # type: ignore + + @property + def move_instance(self) -> Callable[ + [compute.MoveInstanceProjectRequest], + compute.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._MoveInstance(self._session, self._host, self._interceptor) # type: ignore + + @property + def set_common_instance_metadata(self) -> Callable[ + [compute.SetCommonInstanceMetadataProjectRequest], + compute.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._SetCommonInstanceMetadata(self._session, self._host, self._interceptor) # type: ignore + + @property + def set_default_network_tier(self) -> Callable[ + [compute.SetDefaultNetworkTierProjectRequest], + compute.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._SetDefaultNetworkTier(self._session, self._host, self._interceptor) # type: ignore + + @property + def set_usage_export_bucket(self) -> Callable[ + [compute.SetUsageExportBucketProjectRequest], + compute.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._SetUsageExportBucket(self._session, self._host, self._interceptor) # type: ignore + + @property + def kind(self) -> str: + return "rest" + + def close(self): + self._session.close() + + +__all__=( + 'ProjectsRestTransport', +) diff --git a/owl-bot-staging/v1/google/cloud/compute_v1/services/public_advertised_prefixes/__init__.py b/owl-bot-staging/v1/google/cloud/compute_v1/services/public_advertised_prefixes/__init__.py new file mode 100644 index 000000000..0aaa6e73e --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/compute_v1/services/public_advertised_prefixes/__init__.py @@ -0,0 +1,20 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from .client import PublicAdvertisedPrefixesClient + +__all__ = ( + 'PublicAdvertisedPrefixesClient', +) diff --git a/owl-bot-staging/v1/google/cloud/compute_v1/services/public_advertised_prefixes/client.py b/owl-bot-staging/v1/google/cloud/compute_v1/services/public_advertised_prefixes/client.py new file mode 100644 index 000000000..38ac549de --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/compute_v1/services/public_advertised_prefixes/client.py @@ -0,0 +1,1383 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import functools +import os +import re +from typing import Dict, Mapping, MutableMapping, MutableSequence, Optional, Sequence, Tuple, Type, Union, cast + +from google.cloud.compute_v1 import gapic_version as package_version + +from google.api_core import client_options as client_options_lib +from google.api_core import exceptions as core_exceptions +from google.api_core import extended_operation +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport import mtls # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth.exceptions import MutualTLSChannelError # type: ignore +from google.oauth2 import service_account # type: ignore + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + +from google.api_core import extended_operation # type: ignore +from google.cloud.compute_v1.services.public_advertised_prefixes import pagers +from google.cloud.compute_v1.types import compute +from .transports.base import PublicAdvertisedPrefixesTransport, DEFAULT_CLIENT_INFO +from .transports.rest import PublicAdvertisedPrefixesRestTransport + + +class PublicAdvertisedPrefixesClientMeta(type): + """Metaclass for the PublicAdvertisedPrefixes client. + + This provides class-level methods for building and retrieving + support objects (e.g. transport) without polluting the client instance + objects. + """ + _transport_registry = OrderedDict() # type: Dict[str, Type[PublicAdvertisedPrefixesTransport]] + _transport_registry["rest"] = PublicAdvertisedPrefixesRestTransport + + def get_transport_class(cls, + label: Optional[str] = None, + ) -> Type[PublicAdvertisedPrefixesTransport]: + """Returns an appropriate transport class. + + Args: + label: The name of the desired transport. If none is + provided, then the first transport in the registry is used. + + Returns: + The transport class to use. + """ + # If a specific transport is requested, return that one. + if label: + return cls._transport_registry[label] + + # No transport is requested; return the default (that is, the first one + # in the dictionary). + return next(iter(cls._transport_registry.values())) + + +class PublicAdvertisedPrefixesClient(metaclass=PublicAdvertisedPrefixesClientMeta): + """The PublicAdvertisedPrefixes API.""" + + @staticmethod + def _get_default_mtls_endpoint(api_endpoint): + """Converts api endpoint to mTLS endpoint. + + Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to + "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. + Args: + api_endpoint (Optional[str]): the api endpoint to convert. + Returns: + str: converted mTLS api endpoint. + """ + if not api_endpoint: + return api_endpoint + + mtls_endpoint_re = re.compile( + r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?" + ) + + m = mtls_endpoint_re.match(api_endpoint) + name, mtls, sandbox, googledomain = m.groups() + if mtls or not googledomain: + return api_endpoint + + if sandbox: + return api_endpoint.replace( + "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" + ) + + return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") + + DEFAULT_ENDPOINT = "compute.googleapis.com" + DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore + DEFAULT_ENDPOINT + ) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + PublicAdvertisedPrefixesClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_info(info) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + PublicAdvertisedPrefixesClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_file( + filename) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + from_service_account_json = from_service_account_file + + @property + def transport(self) -> PublicAdvertisedPrefixesTransport: + """Returns the transport used by the client instance. + + Returns: + PublicAdvertisedPrefixesTransport: The transport used by the client + instance. + """ + return self._transport + + @staticmethod + def common_billing_account_path(billing_account: str, ) -> str: + """Returns a fully-qualified billing_account string.""" + return "billingAccounts/{billing_account}".format(billing_account=billing_account, ) + + @staticmethod + def parse_common_billing_account_path(path: str) -> Dict[str,str]: + """Parse a billing_account path into its component segments.""" + m = re.match(r"^billingAccounts/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_folder_path(folder: str, ) -> str: + """Returns a fully-qualified folder string.""" + return "folders/{folder}".format(folder=folder, ) + + @staticmethod + def parse_common_folder_path(path: str) -> Dict[str,str]: + """Parse a folder path into its component segments.""" + m = re.match(r"^folders/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_organization_path(organization: str, ) -> str: + """Returns a fully-qualified organization string.""" + return "organizations/{organization}".format(organization=organization, ) + + @staticmethod + def parse_common_organization_path(path: str) -> Dict[str,str]: + """Parse a organization path into its component segments.""" + m = re.match(r"^organizations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_project_path(project: str, ) -> str: + """Returns a fully-qualified project string.""" + return "projects/{project}".format(project=project, ) + + @staticmethod + def parse_common_project_path(path: str) -> Dict[str,str]: + """Parse a project path into its component segments.""" + m = re.match(r"^projects/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_location_path(project: str, location: str, ) -> str: + """Returns a fully-qualified location string.""" + return "projects/{project}/locations/{location}".format(project=project, location=location, ) + + @staticmethod + def parse_common_location_path(path: str) -> Dict[str,str]: + """Parse a location path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @classmethod + def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[client_options_lib.ClientOptions] = None): + """Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + if client_options is None: + client_options = client_options_lib.ClientOptions() + use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") + use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") + if use_client_cert not in ("true", "false"): + raise ValueError("Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`") + if use_mtls_endpoint not in ("auto", "never", "always"): + raise MutualTLSChannelError("Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`") + + # Figure out the client cert source to use. + client_cert_source = None + if use_client_cert == "true": + if client_options.client_cert_source: + client_cert_source = client_options.client_cert_source + elif mtls.has_default_client_cert_source(): + client_cert_source = mtls.default_client_cert_source() + + # Figure out which api endpoint to use. + if client_options.api_endpoint is not None: + api_endpoint = client_options.api_endpoint + elif use_mtls_endpoint == "always" or (use_mtls_endpoint == "auto" and client_cert_source): + api_endpoint = cls.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = cls.DEFAULT_ENDPOINT + + return api_endpoint, client_cert_source + + def __init__(self, *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Optional[Union[str, PublicAdvertisedPrefixesTransport]] = None, + client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the public advertised prefixes client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Union[str, PublicAdvertisedPrefixesTransport]): The + transport to use. If set to None, a transport is chosen + automatically. + NOTE: "rest" transport functionality is currently in a + beta state (preview). We welcome your feedback via an + issue in this library's source repository. + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): Custom options for the + client. It won't take effect if a ``transport`` instance is provided. + (1) The ``api_endpoint`` property can be used to override the + default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT + environment variable can also be used to override the endpoint: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto switch to the + default mTLS endpoint if client certificate is present, this is + the default value). However, the ``api_endpoint`` property takes + precedence if provided. + (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide client certificate for mutual TLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + """ + if isinstance(client_options, dict): + client_options = client_options_lib.from_dict(client_options) + if client_options is None: + client_options = client_options_lib.ClientOptions() + client_options = cast(client_options_lib.ClientOptions, client_options) + + api_endpoint, client_cert_source_func = self.get_mtls_endpoint_and_cert_source(client_options) + + api_key_value = getattr(client_options, "api_key", None) + if api_key_value and credentials: + raise ValueError("client_options.api_key and credentials are mutually exclusive") + + # Save or instantiate the transport. + # Ordinarily, we provide the transport, but allowing a custom transport + # instance provides an extensibility point for unusual situations. + if isinstance(transport, PublicAdvertisedPrefixesTransport): + # transport is a PublicAdvertisedPrefixesTransport instance. + if credentials or client_options.credentials_file or api_key_value: + raise ValueError("When providing a transport instance, " + "provide its credentials directly.") + if client_options.scopes: + raise ValueError( + "When providing a transport instance, provide its scopes " + "directly." + ) + self._transport = transport + else: + import google.auth._default # type: ignore + + if api_key_value and hasattr(google.auth._default, "get_api_key_credentials"): + credentials = google.auth._default.get_api_key_credentials(api_key_value) + + Transport = type(self).get_transport_class(transport) + self._transport = Transport( + credentials=credentials, + credentials_file=client_options.credentials_file, + host=api_endpoint, + scopes=client_options.scopes, + client_cert_source_for_mtls=client_cert_source_func, + quota_project_id=client_options.quota_project_id, + client_info=client_info, + always_use_jwt_access=True, + api_audience=client_options.api_audience, + ) + + def delete_unary(self, + request: Optional[Union[compute.DeletePublicAdvertisedPrefixeRequest, dict]] = None, + *, + project: Optional[str] = None, + public_advertised_prefix: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.Operation: + r"""Deletes the specified PublicAdvertisedPrefix + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_delete(): + # Create a client + client = compute_v1.PublicAdvertisedPrefixesClient() + + # Initialize request argument(s) + request = compute_v1.DeletePublicAdvertisedPrefixeRequest( + project="project_value", + public_advertised_prefix="public_advertised_prefix_value", + ) + + # Make the request + response = client.delete(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.DeletePublicAdvertisedPrefixeRequest, dict]): + The request object. A request message for + PublicAdvertisedPrefixes.Delete. See the + method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + public_advertised_prefix (str): + Name of the PublicAdvertisedPrefix + resource to delete. + + This corresponds to the ``public_advertised_prefix`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, public_advertised_prefix]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.DeletePublicAdvertisedPrefixeRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.DeletePublicAdvertisedPrefixeRequest): + request = compute.DeletePublicAdvertisedPrefixeRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if public_advertised_prefix is not None: + request.public_advertised_prefix = public_advertised_prefix + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("public_advertised_prefix", request.public_advertised_prefix), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def delete(self, + request: Optional[Union[compute.DeletePublicAdvertisedPrefixeRequest, dict]] = None, + *, + project: Optional[str] = None, + public_advertised_prefix: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> extended_operation.ExtendedOperation: + r"""Deletes the specified PublicAdvertisedPrefix + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_delete(): + # Create a client + client = compute_v1.PublicAdvertisedPrefixesClient() + + # Initialize request argument(s) + request = compute_v1.DeletePublicAdvertisedPrefixeRequest( + project="project_value", + public_advertised_prefix="public_advertised_prefix_value", + ) + + # Make the request + response = client.delete(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.DeletePublicAdvertisedPrefixeRequest, dict]): + The request object. A request message for + PublicAdvertisedPrefixes.Delete. See the + method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + public_advertised_prefix (str): + Name of the PublicAdvertisedPrefix + resource to delete. + + This corresponds to the ``public_advertised_prefix`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, public_advertised_prefix]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.DeletePublicAdvertisedPrefixeRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.DeletePublicAdvertisedPrefixeRequest): + request = compute.DeletePublicAdvertisedPrefixeRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if public_advertised_prefix is not None: + request.public_advertised_prefix = public_advertised_prefix + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("public_advertised_prefix", request.public_advertised_prefix), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + operation_service = self._transport._global_operations_client + operation_request = compute.GetGlobalOperationRequest() + operation_request.project = request.project + operation_request.operation = response.name + + get_operation = functools.partial(operation_service.get, operation_request) + # Cancel is not part of extended operations yet. + cancel_operation = lambda: None + + # Note: this class is an implementation detail to provide a uniform + # set of names for certain fields in the extended operation proto message. + # See google.api_core.extended_operation.ExtendedOperation for details + # on these properties and the expected interface. + class _CustomOperation(extended_operation.ExtendedOperation): + @property + def error_message(self): + return self._extended_operation.http_error_message + + @property + def error_code(self): + return self._extended_operation.http_error_status_code + + response = _CustomOperation.make(get_operation, cancel_operation, response) + + # Done; return the response. + return response + + def get(self, + request: Optional[Union[compute.GetPublicAdvertisedPrefixeRequest, dict]] = None, + *, + project: Optional[str] = None, + public_advertised_prefix: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.PublicAdvertisedPrefix: + r"""Returns the specified PublicAdvertisedPrefix + resource. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_get(): + # Create a client + client = compute_v1.PublicAdvertisedPrefixesClient() + + # Initialize request argument(s) + request = compute_v1.GetPublicAdvertisedPrefixeRequest( + project="project_value", + public_advertised_prefix="public_advertised_prefix_value", + ) + + # Make the request + response = client.get(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.GetPublicAdvertisedPrefixeRequest, dict]): + The request object. A request message for + PublicAdvertisedPrefixes.Get. See the + method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + public_advertised_prefix (str): + Name of the PublicAdvertisedPrefix + resource to return. + + This corresponds to the ``public_advertised_prefix`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.compute_v1.types.PublicAdvertisedPrefix: + A public advertised prefix represents + an aggregated IP prefix or netblock + which customers bring to cloud. The IP + prefix is a single unit of route + advertisement and is announced globally + to the internet. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, public_advertised_prefix]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.GetPublicAdvertisedPrefixeRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.GetPublicAdvertisedPrefixeRequest): + request = compute.GetPublicAdvertisedPrefixeRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if public_advertised_prefix is not None: + request.public_advertised_prefix = public_advertised_prefix + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("public_advertised_prefix", request.public_advertised_prefix), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def insert_unary(self, + request: Optional[Union[compute.InsertPublicAdvertisedPrefixeRequest, dict]] = None, + *, + project: Optional[str] = None, + public_advertised_prefix_resource: Optional[compute.PublicAdvertisedPrefix] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.Operation: + r"""Creates a PublicAdvertisedPrefix in the specified + project using the parameters that are included in the + request. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_insert(): + # Create a client + client = compute_v1.PublicAdvertisedPrefixesClient() + + # Initialize request argument(s) + request = compute_v1.InsertPublicAdvertisedPrefixeRequest( + project="project_value", + ) + + # Make the request + response = client.insert(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.InsertPublicAdvertisedPrefixeRequest, dict]): + The request object. A request message for + PublicAdvertisedPrefixes.Insert. See the + method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + public_advertised_prefix_resource (google.cloud.compute_v1.types.PublicAdvertisedPrefix): + The body resource for this request + This corresponds to the ``public_advertised_prefix_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, public_advertised_prefix_resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.InsertPublicAdvertisedPrefixeRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.InsertPublicAdvertisedPrefixeRequest): + request = compute.InsertPublicAdvertisedPrefixeRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if public_advertised_prefix_resource is not None: + request.public_advertised_prefix_resource = public_advertised_prefix_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.insert] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def insert(self, + request: Optional[Union[compute.InsertPublicAdvertisedPrefixeRequest, dict]] = None, + *, + project: Optional[str] = None, + public_advertised_prefix_resource: Optional[compute.PublicAdvertisedPrefix] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> extended_operation.ExtendedOperation: + r"""Creates a PublicAdvertisedPrefix in the specified + project using the parameters that are included in the + request. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_insert(): + # Create a client + client = compute_v1.PublicAdvertisedPrefixesClient() + + # Initialize request argument(s) + request = compute_v1.InsertPublicAdvertisedPrefixeRequest( + project="project_value", + ) + + # Make the request + response = client.insert(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.InsertPublicAdvertisedPrefixeRequest, dict]): + The request object. A request message for + PublicAdvertisedPrefixes.Insert. See the + method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + public_advertised_prefix_resource (google.cloud.compute_v1.types.PublicAdvertisedPrefix): + The body resource for this request + This corresponds to the ``public_advertised_prefix_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, public_advertised_prefix_resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.InsertPublicAdvertisedPrefixeRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.InsertPublicAdvertisedPrefixeRequest): + request = compute.InsertPublicAdvertisedPrefixeRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if public_advertised_prefix_resource is not None: + request.public_advertised_prefix_resource = public_advertised_prefix_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.insert] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + operation_service = self._transport._global_operations_client + operation_request = compute.GetGlobalOperationRequest() + operation_request.project = request.project + operation_request.operation = response.name + + get_operation = functools.partial(operation_service.get, operation_request) + # Cancel is not part of extended operations yet. + cancel_operation = lambda: None + + # Note: this class is an implementation detail to provide a uniform + # set of names for certain fields in the extended operation proto message. + # See google.api_core.extended_operation.ExtendedOperation for details + # on these properties and the expected interface. + class _CustomOperation(extended_operation.ExtendedOperation): + @property + def error_message(self): + return self._extended_operation.http_error_message + + @property + def error_code(self): + return self._extended_operation.http_error_status_code + + response = _CustomOperation.make(get_operation, cancel_operation, response) + + # Done; return the response. + return response + + def list(self, + request: Optional[Union[compute.ListPublicAdvertisedPrefixesRequest, dict]] = None, + *, + project: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListPager: + r"""Lists the PublicAdvertisedPrefixes for a project. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_list(): + # Create a client + client = compute_v1.PublicAdvertisedPrefixesClient() + + # Initialize request argument(s) + request = compute_v1.ListPublicAdvertisedPrefixesRequest( + project="project_value", + ) + + # Make the request + page_result = client.list(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.ListPublicAdvertisedPrefixesRequest, dict]): + The request object. A request message for + PublicAdvertisedPrefixes.List. See the + method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.compute_v1.services.public_advertised_prefixes.pagers.ListPager: + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.ListPublicAdvertisedPrefixesRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.ListPublicAdvertisedPrefixesRequest): + request = compute.ListPublicAdvertisedPrefixesRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + def patch_unary(self, + request: Optional[Union[compute.PatchPublicAdvertisedPrefixeRequest, dict]] = None, + *, + project: Optional[str] = None, + public_advertised_prefix: Optional[str] = None, + public_advertised_prefix_resource: Optional[compute.PublicAdvertisedPrefix] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.Operation: + r"""Patches the specified Router resource with the data + included in the request. This method supports PATCH + semantics and uses JSON merge patch format and + processing rules. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_patch(): + # Create a client + client = compute_v1.PublicAdvertisedPrefixesClient() + + # Initialize request argument(s) + request = compute_v1.PatchPublicAdvertisedPrefixeRequest( + project="project_value", + public_advertised_prefix="public_advertised_prefix_value", + ) + + # Make the request + response = client.patch(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.PatchPublicAdvertisedPrefixeRequest, dict]): + The request object. A request message for + PublicAdvertisedPrefixes.Patch. See the + method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + public_advertised_prefix (str): + Name of the PublicAdvertisedPrefix + resource to patch. + + This corresponds to the ``public_advertised_prefix`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + public_advertised_prefix_resource (google.cloud.compute_v1.types.PublicAdvertisedPrefix): + The body resource for this request + This corresponds to the ``public_advertised_prefix_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, public_advertised_prefix, public_advertised_prefix_resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.PatchPublicAdvertisedPrefixeRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.PatchPublicAdvertisedPrefixeRequest): + request = compute.PatchPublicAdvertisedPrefixeRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if public_advertised_prefix is not None: + request.public_advertised_prefix = public_advertised_prefix + if public_advertised_prefix_resource is not None: + request.public_advertised_prefix_resource = public_advertised_prefix_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.patch] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("public_advertised_prefix", request.public_advertised_prefix), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def patch(self, + request: Optional[Union[compute.PatchPublicAdvertisedPrefixeRequest, dict]] = None, + *, + project: Optional[str] = None, + public_advertised_prefix: Optional[str] = None, + public_advertised_prefix_resource: Optional[compute.PublicAdvertisedPrefix] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> extended_operation.ExtendedOperation: + r"""Patches the specified Router resource with the data + included in the request. This method supports PATCH + semantics and uses JSON merge patch format and + processing rules. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_patch(): + # Create a client + client = compute_v1.PublicAdvertisedPrefixesClient() + + # Initialize request argument(s) + request = compute_v1.PatchPublicAdvertisedPrefixeRequest( + project="project_value", + public_advertised_prefix="public_advertised_prefix_value", + ) + + # Make the request + response = client.patch(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.PatchPublicAdvertisedPrefixeRequest, dict]): + The request object. A request message for + PublicAdvertisedPrefixes.Patch. See the + method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + public_advertised_prefix (str): + Name of the PublicAdvertisedPrefix + resource to patch. + + This corresponds to the ``public_advertised_prefix`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + public_advertised_prefix_resource (google.cloud.compute_v1.types.PublicAdvertisedPrefix): + The body resource for this request + This corresponds to the ``public_advertised_prefix_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, public_advertised_prefix, public_advertised_prefix_resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.PatchPublicAdvertisedPrefixeRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.PatchPublicAdvertisedPrefixeRequest): + request = compute.PatchPublicAdvertisedPrefixeRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if public_advertised_prefix is not None: + request.public_advertised_prefix = public_advertised_prefix + if public_advertised_prefix_resource is not None: + request.public_advertised_prefix_resource = public_advertised_prefix_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.patch] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("public_advertised_prefix", request.public_advertised_prefix), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + operation_service = self._transport._global_operations_client + operation_request = compute.GetGlobalOperationRequest() + operation_request.project = request.project + operation_request.operation = response.name + + get_operation = functools.partial(operation_service.get, operation_request) + # Cancel is not part of extended operations yet. + cancel_operation = lambda: None + + # Note: this class is an implementation detail to provide a uniform + # set of names for certain fields in the extended operation proto message. + # See google.api_core.extended_operation.ExtendedOperation for details + # on these properties and the expected interface. + class _CustomOperation(extended_operation.ExtendedOperation): + @property + def error_message(self): + return self._extended_operation.http_error_message + + @property + def error_code(self): + return self._extended_operation.http_error_status_code + + response = _CustomOperation.make(get_operation, cancel_operation, response) + + # Done; return the response. + return response + + def __enter__(self) -> "PublicAdvertisedPrefixesClient": + return self + + def __exit__(self, type, value, traceback): + """Releases underlying transport's resources. + + .. warning:: + ONLY use as a context manager if the transport is NOT shared + with other clients! Exiting the with block will CLOSE the transport + and may cause errors in other clients! + """ + self.transport.close() + + + + + + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) + + +__all__ = ( + "PublicAdvertisedPrefixesClient", +) diff --git a/owl-bot-staging/v1/google/cloud/compute_v1/services/public_advertised_prefixes/pagers.py b/owl-bot-staging/v1/google/cloud/compute_v1/services/public_advertised_prefixes/pagers.py new file mode 100644 index 000000000..fe1693548 --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/compute_v1/services/public_advertised_prefixes/pagers.py @@ -0,0 +1,77 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import Any, AsyncIterator, Awaitable, Callable, Sequence, Tuple, Optional, Iterator + +from google.cloud.compute_v1.types import compute + + +class ListPager: + """A pager for iterating through ``list`` requests. + + This class thinly wraps an initial + :class:`google.cloud.compute_v1.types.PublicAdvertisedPrefixList` object, and + provides an ``__iter__`` method to iterate through its + ``items`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``List`` requests and continue to iterate + through the ``items`` field on the + corresponding responses. + + All the usual :class:`google.cloud.compute_v1.types.PublicAdvertisedPrefixList` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., compute.PublicAdvertisedPrefixList], + request: compute.ListPublicAdvertisedPrefixesRequest, + response: compute.PublicAdvertisedPrefixList, + *, + metadata: Sequence[Tuple[str, str]] = ()): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.compute_v1.types.ListPublicAdvertisedPrefixesRequest): + The initial request object. + response (google.cloud.compute_v1.types.PublicAdvertisedPrefixList): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = compute.ListPublicAdvertisedPrefixesRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[compute.PublicAdvertisedPrefixList]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[compute.PublicAdvertisedPrefix]: + for page in self.pages: + yield from page.items + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) diff --git a/owl-bot-staging/v1/google/cloud/compute_v1/services/public_advertised_prefixes/transports/__init__.py b/owl-bot-staging/v1/google/cloud/compute_v1/services/public_advertised_prefixes/transports/__init__.py new file mode 100644 index 000000000..f61fff805 --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/compute_v1/services/public_advertised_prefixes/transports/__init__.py @@ -0,0 +1,32 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +from typing import Dict, Type + +from .base import PublicAdvertisedPrefixesTransport +from .rest import PublicAdvertisedPrefixesRestTransport +from .rest import PublicAdvertisedPrefixesRestInterceptor + + +# Compile a registry of transports. +_transport_registry = OrderedDict() # type: Dict[str, Type[PublicAdvertisedPrefixesTransport]] +_transport_registry['rest'] = PublicAdvertisedPrefixesRestTransport + +__all__ = ( + 'PublicAdvertisedPrefixesTransport', + 'PublicAdvertisedPrefixesRestTransport', + 'PublicAdvertisedPrefixesRestInterceptor', +) diff --git a/owl-bot-staging/v1/google/cloud/compute_v1/services/public_advertised_prefixes/transports/base.py b/owl-bot-staging/v1/google/cloud/compute_v1/services/public_advertised_prefixes/transports/base.py new file mode 100644 index 000000000..51c6f1733 --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/compute_v1/services/public_advertised_prefixes/transports/base.py @@ -0,0 +1,219 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import abc +from typing import Awaitable, Callable, Dict, Optional, Sequence, Union + +from google.cloud.compute_v1 import gapic_version as package_version + +import google.auth # type: ignore +import google.api_core +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.compute_v1.types import compute +from google.cloud.compute_v1.services import global_operations + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) + + +class PublicAdvertisedPrefixesTransport(abc.ABC): + """Abstract transport class for PublicAdvertisedPrefixes.""" + + AUTH_SCOPES = ( + 'https://www.googleapis.com/auth/compute', + 'https://www.googleapis.com/auth/cloud-platform', + ) + + DEFAULT_HOST: str = 'compute.googleapis.com' + def __init__( + self, *, + host: str = DEFAULT_HOST, + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + **kwargs, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A list of scopes. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + """ + self._extended_operations_services: Dict[str, Any] = {} + + scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} + + # Save the scopes. + self._scopes = scopes + + # If no credentials are provided, then determine the appropriate + # defaults. + if credentials and credentials_file: + raise core_exceptions.DuplicateCredentialArgs("'credentials_file' and 'credentials' are mutually exclusive") + + if credentials_file is not None: + credentials, _ = google.auth.load_credentials_from_file( + credentials_file, + **scopes_kwargs, + quota_project_id=quota_project_id + ) + elif credentials is None: + credentials, _ = google.auth.default(**scopes_kwargs, quota_project_id=quota_project_id) + # Don't apply audience if the credentials file passed from user. + if hasattr(credentials, "with_gdch_audience"): + credentials = credentials.with_gdch_audience(api_audience if api_audience else host) + + # If the credentials are service account credentials, then always try to use self signed JWT. + if always_use_jwt_access and isinstance(credentials, service_account.Credentials) and hasattr(service_account.Credentials, "with_always_use_jwt_access"): + credentials = credentials.with_always_use_jwt_access(True) + + # Save the credentials. + self._credentials = credentials + + # Save the hostname. Default to port 443 (HTTPS) if none is specified. + if ':' not in host: + host += ':443' + self._host = host + + def _prep_wrapped_messages(self, client_info): + # Precompute the wrapped methods. + self._wrapped_methods = { + self.delete: gapic_v1.method.wrap_method( + self.delete, + default_timeout=None, + client_info=client_info, + ), + self.get: gapic_v1.method.wrap_method( + self.get, + default_timeout=None, + client_info=client_info, + ), + self.insert: gapic_v1.method.wrap_method( + self.insert, + default_timeout=None, + client_info=client_info, + ), + self.list: gapic_v1.method.wrap_method( + self.list, + default_timeout=None, + client_info=client_info, + ), + self.patch: gapic_v1.method.wrap_method( + self.patch, + default_timeout=None, + client_info=client_info, + ), + } + + def close(self): + """Closes resources associated with the transport. + + .. warning:: + Only call this method if the transport is NOT shared + with other clients - this may cause errors in other clients! + """ + raise NotImplementedError() + + @property + def delete(self) -> Callable[ + [compute.DeletePublicAdvertisedPrefixeRequest], + Union[ + compute.Operation, + Awaitable[compute.Operation] + ]]: + raise NotImplementedError() + + @property + def get(self) -> Callable[ + [compute.GetPublicAdvertisedPrefixeRequest], + Union[ + compute.PublicAdvertisedPrefix, + Awaitable[compute.PublicAdvertisedPrefix] + ]]: + raise NotImplementedError() + + @property + def insert(self) -> Callable[ + [compute.InsertPublicAdvertisedPrefixeRequest], + Union[ + compute.Operation, + Awaitable[compute.Operation] + ]]: + raise NotImplementedError() + + @property + def list(self) -> Callable[ + [compute.ListPublicAdvertisedPrefixesRequest], + Union[ + compute.PublicAdvertisedPrefixList, + Awaitable[compute.PublicAdvertisedPrefixList] + ]]: + raise NotImplementedError() + + @property + def patch(self) -> Callable[ + [compute.PatchPublicAdvertisedPrefixeRequest], + Union[ + compute.Operation, + Awaitable[compute.Operation] + ]]: + raise NotImplementedError() + + @property + def kind(self) -> str: + raise NotImplementedError() + + @property + def _global_operations_client(self) -> global_operations.GlobalOperationsClient: + ex_op_service = self._extended_operations_services.get("global_operations") + if not ex_op_service: + ex_op_service = global_operations.GlobalOperationsClient( + credentials=self._credentials, + transport=self.kind, + ) + self._extended_operations_services["global_operations"] = ex_op_service + + return ex_op_service + + +__all__ = ( + 'PublicAdvertisedPrefixesTransport', +) diff --git a/owl-bot-staging/v1/google/cloud/compute_v1/services/public_advertised_prefixes/transports/rest.py b/owl-bot-staging/v1/google/cloud/compute_v1/services/public_advertised_prefixes/transports/rest.py new file mode 100644 index 000000000..73844ba30 --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/compute_v1/services/public_advertised_prefixes/transports/rest.py @@ -0,0 +1,804 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +from google.auth.transport.requests import AuthorizedSession # type: ignore +import json # type: ignore +import grpc # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.api_core import exceptions as core_exceptions +from google.api_core import retry as retries +from google.api_core import rest_helpers +from google.api_core import rest_streaming +from google.api_core import path_template +from google.api_core import gapic_v1 + +from google.protobuf import json_format +from requests import __version__ as requests_version +import dataclasses +import re +from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union +import warnings + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + + +from google.cloud.compute_v1.types import compute + +from .base import PublicAdvertisedPrefixesTransport, DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, + grpc_version=None, + rest_version=requests_version, +) + + +class PublicAdvertisedPrefixesRestInterceptor: + """Interceptor for PublicAdvertisedPrefixes. + + Interceptors are used to manipulate requests, request metadata, and responses + in arbitrary ways. + Example use cases include: + * Logging + * Verifying requests according to service or custom semantics + * Stripping extraneous information from responses + + These use cases and more can be enabled by injecting an + instance of a custom subclass when constructing the PublicAdvertisedPrefixesRestTransport. + + .. code-block:: python + class MyCustomPublicAdvertisedPrefixesInterceptor(PublicAdvertisedPrefixesRestInterceptor): + def pre_delete(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_delete(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_get(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_insert(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_insert(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_patch(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_patch(self, response): + logging.log(f"Received response: {response}") + return response + + transport = PublicAdvertisedPrefixesRestTransport(interceptor=MyCustomPublicAdvertisedPrefixesInterceptor()) + client = PublicAdvertisedPrefixesClient(transport=transport) + + + """ + def pre_delete(self, request: compute.DeletePublicAdvertisedPrefixeRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.DeletePublicAdvertisedPrefixeRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for delete + + Override in a subclass to manipulate the request or metadata + before they are sent to the PublicAdvertisedPrefixes server. + """ + return request, metadata + + def post_delete(self, response: compute.Operation) -> compute.Operation: + """Post-rpc interceptor for delete + + Override in a subclass to manipulate the response + after it is returned by the PublicAdvertisedPrefixes server but before + it is returned to user code. + """ + return response + def pre_get(self, request: compute.GetPublicAdvertisedPrefixeRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.GetPublicAdvertisedPrefixeRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get + + Override in a subclass to manipulate the request or metadata + before they are sent to the PublicAdvertisedPrefixes server. + """ + return request, metadata + + def post_get(self, response: compute.PublicAdvertisedPrefix) -> compute.PublicAdvertisedPrefix: + """Post-rpc interceptor for get + + Override in a subclass to manipulate the response + after it is returned by the PublicAdvertisedPrefixes server but before + it is returned to user code. + """ + return response + def pre_insert(self, request: compute.InsertPublicAdvertisedPrefixeRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.InsertPublicAdvertisedPrefixeRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for insert + + Override in a subclass to manipulate the request or metadata + before they are sent to the PublicAdvertisedPrefixes server. + """ + return request, metadata + + def post_insert(self, response: compute.Operation) -> compute.Operation: + """Post-rpc interceptor for insert + + Override in a subclass to manipulate the response + after it is returned by the PublicAdvertisedPrefixes server but before + it is returned to user code. + """ + return response + def pre_list(self, request: compute.ListPublicAdvertisedPrefixesRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.ListPublicAdvertisedPrefixesRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list + + Override in a subclass to manipulate the request or metadata + before they are sent to the PublicAdvertisedPrefixes server. + """ + return request, metadata + + def post_list(self, response: compute.PublicAdvertisedPrefixList) -> compute.PublicAdvertisedPrefixList: + """Post-rpc interceptor for list + + Override in a subclass to manipulate the response + after it is returned by the PublicAdvertisedPrefixes server but before + it is returned to user code. + """ + return response + def pre_patch(self, request: compute.PatchPublicAdvertisedPrefixeRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.PatchPublicAdvertisedPrefixeRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for patch + + Override in a subclass to manipulate the request or metadata + before they are sent to the PublicAdvertisedPrefixes server. + """ + return request, metadata + + def post_patch(self, response: compute.Operation) -> compute.Operation: + """Post-rpc interceptor for patch + + Override in a subclass to manipulate the response + after it is returned by the PublicAdvertisedPrefixes server but before + it is returned to user code. + """ + return response + + +@dataclasses.dataclass +class PublicAdvertisedPrefixesRestStub: + _session: AuthorizedSession + _host: str + _interceptor: PublicAdvertisedPrefixesRestInterceptor + + +class PublicAdvertisedPrefixesRestTransport(PublicAdvertisedPrefixesTransport): + """REST backend transport for PublicAdvertisedPrefixes. + + The PublicAdvertisedPrefixes API. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends JSON representations of protocol buffers over HTTP/1.1 + + NOTE: This REST transport functionality is currently in a beta + state (preview). We welcome your feedback via an issue in this + library's source repository. Thank you! + """ + + def __init__(self, *, + host: str = 'compute.googleapis.com', + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + client_cert_source_for_mtls: Optional[Callable[[ + ], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + url_scheme: str = 'https', + interceptor: Optional[PublicAdvertisedPrefixesRestInterceptor] = None, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + NOTE: This REST transport functionality is currently in a beta + state (preview). We welcome your feedback via a GitHub issue in + this library's repository. Thank you! + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + client_cert_source_for_mtls (Callable[[], Tuple[bytes, bytes]]): Client + certificate to configure mutual TLS HTTP channel. It is ignored + if ``channel`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you are developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + url_scheme: the protocol scheme for the API endpoint. Normally + "https", but for testing or local servers, + "http" can be specified. + """ + # Run the base constructor + # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. + # TODO: When custom host (api_endpoint) is set, `scopes` must *also* be set on the + # credentials object + maybe_url_match = re.match("^(?Phttp(?:s)?://)?(?P.*)$", host) + if maybe_url_match is None: + raise ValueError(f"Unexpected hostname structure: {host}") # pragma: NO COVER + + url_match_items = maybe_url_match.groupdict() + + host = f"{url_scheme}://{host}" if not url_match_items["scheme"] else host + + super().__init__( + host=host, + credentials=credentials, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience + ) + self._session = AuthorizedSession( + self._credentials, default_host=self.DEFAULT_HOST) + if client_cert_source_for_mtls: + self._session.configure_mtls_channel(client_cert_source_for_mtls) + self._interceptor = interceptor or PublicAdvertisedPrefixesRestInterceptor() + self._prep_wrapped_messages(client_info) + + class _Delete(PublicAdvertisedPrefixesRestStub): + def __hash__(self): + return hash("Delete") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.DeletePublicAdvertisedPrefixeRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.Operation: + r"""Call the delete method over HTTP. + + Args: + request (~.compute.DeletePublicAdvertisedPrefixeRequest): + The request object. A request message for + PublicAdvertisedPrefixes.Delete. See the + method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.Operation: + Represents an Operation resource. Google Compute Engine + has three Operation resources: \* + `Global `__ + \* + `Regional `__ + \* + `Zonal `__ + You can use an operation resource to manage asynchronous + API requests. For more information, read Handling API + responses. Operations can be global, regional or zonal. + - For global operations, use the ``globalOperations`` + resource. - For regional operations, use the + ``regionOperations`` resource. - For zonal operations, + use the ``zonalOperations`` resource. For more + information, read Global, Regional, and Zonal Resources. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'delete', + 'uri': '/compute/v1/projects/{project}/global/publicAdvertisedPrefixes/{public_advertised_prefix}', + }, + ] + request, metadata = self._interceptor.pre_delete(request, metadata) + pb_request = compute.DeletePublicAdvertisedPrefixeRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.Operation() + pb_resp = compute.Operation.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_delete(resp) + return resp + + class _Get(PublicAdvertisedPrefixesRestStub): + def __hash__(self): + return hash("Get") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.GetPublicAdvertisedPrefixeRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.PublicAdvertisedPrefix: + r"""Call the get method over HTTP. + + Args: + request (~.compute.GetPublicAdvertisedPrefixeRequest): + The request object. A request message for + PublicAdvertisedPrefixes.Get. See the + method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.PublicAdvertisedPrefix: + A public advertised prefix represents + an aggregated IP prefix or netblock + which customers bring to cloud. The IP + prefix is a single unit of route + advertisement and is announced globally + to the internet. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/compute/v1/projects/{project}/global/publicAdvertisedPrefixes/{public_advertised_prefix}', + }, + ] + request, metadata = self._interceptor.pre_get(request, metadata) + pb_request = compute.GetPublicAdvertisedPrefixeRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.PublicAdvertisedPrefix() + pb_resp = compute.PublicAdvertisedPrefix.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get(resp) + return resp + + class _Insert(PublicAdvertisedPrefixesRestStub): + def __hash__(self): + return hash("Insert") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.InsertPublicAdvertisedPrefixeRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.Operation: + r"""Call the insert method over HTTP. + + Args: + request (~.compute.InsertPublicAdvertisedPrefixeRequest): + The request object. A request message for + PublicAdvertisedPrefixes.Insert. See the + method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.Operation: + Represents an Operation resource. Google Compute Engine + has three Operation resources: \* + `Global `__ + \* + `Regional `__ + \* + `Zonal `__ + You can use an operation resource to manage asynchronous + API requests. For more information, read Handling API + responses. Operations can be global, regional or zonal. + - For global operations, use the ``globalOperations`` + resource. - For regional operations, use the + ``regionOperations`` resource. - For zonal operations, + use the ``zonalOperations`` resource. For more + information, read Global, Regional, and Zonal Resources. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/compute/v1/projects/{project}/global/publicAdvertisedPrefixes', + 'body': 'public_advertised_prefix_resource', + }, + ] + request, metadata = self._interceptor.pre_insert(request, metadata) + pb_request = compute.InsertPublicAdvertisedPrefixeRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + including_default_value_fields=False, + use_integers_for_enums=False + ) + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.Operation() + pb_resp = compute.Operation.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_insert(resp) + return resp + + class _List(PublicAdvertisedPrefixesRestStub): + def __hash__(self): + return hash("List") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.ListPublicAdvertisedPrefixesRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.PublicAdvertisedPrefixList: + r"""Call the list method over HTTP. + + Args: + request (~.compute.ListPublicAdvertisedPrefixesRequest): + The request object. A request message for + PublicAdvertisedPrefixes.List. See the + method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.PublicAdvertisedPrefixList: + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/compute/v1/projects/{project}/global/publicAdvertisedPrefixes', + }, + ] + request, metadata = self._interceptor.pre_list(request, metadata) + pb_request = compute.ListPublicAdvertisedPrefixesRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.PublicAdvertisedPrefixList() + pb_resp = compute.PublicAdvertisedPrefixList.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_list(resp) + return resp + + class _Patch(PublicAdvertisedPrefixesRestStub): + def __hash__(self): + return hash("Patch") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.PatchPublicAdvertisedPrefixeRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.Operation: + r"""Call the patch method over HTTP. + + Args: + request (~.compute.PatchPublicAdvertisedPrefixeRequest): + The request object. A request message for + PublicAdvertisedPrefixes.Patch. See the + method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.Operation: + Represents an Operation resource. Google Compute Engine + has three Operation resources: \* + `Global `__ + \* + `Regional `__ + \* + `Zonal `__ + You can use an operation resource to manage asynchronous + API requests. For more information, read Handling API + responses. Operations can be global, regional or zonal. + - For global operations, use the ``globalOperations`` + resource. - For regional operations, use the + ``regionOperations`` resource. - For zonal operations, + use the ``zonalOperations`` resource. For more + information, read Global, Regional, and Zonal Resources. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'patch', + 'uri': '/compute/v1/projects/{project}/global/publicAdvertisedPrefixes/{public_advertised_prefix}', + 'body': 'public_advertised_prefix_resource', + }, + ] + request, metadata = self._interceptor.pre_patch(request, metadata) + pb_request = compute.PatchPublicAdvertisedPrefixeRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + including_default_value_fields=False, + use_integers_for_enums=False + ) + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.Operation() + pb_resp = compute.Operation.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_patch(resp) + return resp + + @property + def delete(self) -> Callable[ + [compute.DeletePublicAdvertisedPrefixeRequest], + compute.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._Delete(self._session, self._host, self._interceptor) # type: ignore + + @property + def get(self) -> Callable[ + [compute.GetPublicAdvertisedPrefixeRequest], + compute.PublicAdvertisedPrefix]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._Get(self._session, self._host, self._interceptor) # type: ignore + + @property + def insert(self) -> Callable[ + [compute.InsertPublicAdvertisedPrefixeRequest], + compute.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._Insert(self._session, self._host, self._interceptor) # type: ignore + + @property + def list(self) -> Callable[ + [compute.ListPublicAdvertisedPrefixesRequest], + compute.PublicAdvertisedPrefixList]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._List(self._session, self._host, self._interceptor) # type: ignore + + @property + def patch(self) -> Callable[ + [compute.PatchPublicAdvertisedPrefixeRequest], + compute.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._Patch(self._session, self._host, self._interceptor) # type: ignore + + @property + def kind(self) -> str: + return "rest" + + def close(self): + self._session.close() + + +__all__=( + 'PublicAdvertisedPrefixesRestTransport', +) diff --git a/owl-bot-staging/v1/google/cloud/compute_v1/services/public_delegated_prefixes/__init__.py b/owl-bot-staging/v1/google/cloud/compute_v1/services/public_delegated_prefixes/__init__.py new file mode 100644 index 000000000..0ed2800f1 --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/compute_v1/services/public_delegated_prefixes/__init__.py @@ -0,0 +1,20 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from .client import PublicDelegatedPrefixesClient + +__all__ = ( + 'PublicDelegatedPrefixesClient', +) diff --git a/owl-bot-staging/v1/google/cloud/compute_v1/services/public_delegated_prefixes/client.py b/owl-bot-staging/v1/google/cloud/compute_v1/services/public_delegated_prefixes/client.py new file mode 100644 index 000000000..7f219edb5 --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/compute_v1/services/public_delegated_prefixes/client.py @@ -0,0 +1,1586 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import functools +import os +import re +from typing import Dict, Mapping, MutableMapping, MutableSequence, Optional, Sequence, Tuple, Type, Union, cast + +from google.cloud.compute_v1 import gapic_version as package_version + +from google.api_core import client_options as client_options_lib +from google.api_core import exceptions as core_exceptions +from google.api_core import extended_operation +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport import mtls # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth.exceptions import MutualTLSChannelError # type: ignore +from google.oauth2 import service_account # type: ignore + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + +from google.api_core import extended_operation # type: ignore +from google.cloud.compute_v1.services.public_delegated_prefixes import pagers +from google.cloud.compute_v1.types import compute +from .transports.base import PublicDelegatedPrefixesTransport, DEFAULT_CLIENT_INFO +from .transports.rest import PublicDelegatedPrefixesRestTransport + + +class PublicDelegatedPrefixesClientMeta(type): + """Metaclass for the PublicDelegatedPrefixes client. + + This provides class-level methods for building and retrieving + support objects (e.g. transport) without polluting the client instance + objects. + """ + _transport_registry = OrderedDict() # type: Dict[str, Type[PublicDelegatedPrefixesTransport]] + _transport_registry["rest"] = PublicDelegatedPrefixesRestTransport + + def get_transport_class(cls, + label: Optional[str] = None, + ) -> Type[PublicDelegatedPrefixesTransport]: + """Returns an appropriate transport class. + + Args: + label: The name of the desired transport. If none is + provided, then the first transport in the registry is used. + + Returns: + The transport class to use. + """ + # If a specific transport is requested, return that one. + if label: + return cls._transport_registry[label] + + # No transport is requested; return the default (that is, the first one + # in the dictionary). + return next(iter(cls._transport_registry.values())) + + +class PublicDelegatedPrefixesClient(metaclass=PublicDelegatedPrefixesClientMeta): + """The PublicDelegatedPrefixes API.""" + + @staticmethod + def _get_default_mtls_endpoint(api_endpoint): + """Converts api endpoint to mTLS endpoint. + + Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to + "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. + Args: + api_endpoint (Optional[str]): the api endpoint to convert. + Returns: + str: converted mTLS api endpoint. + """ + if not api_endpoint: + return api_endpoint + + mtls_endpoint_re = re.compile( + r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?" + ) + + m = mtls_endpoint_re.match(api_endpoint) + name, mtls, sandbox, googledomain = m.groups() + if mtls or not googledomain: + return api_endpoint + + if sandbox: + return api_endpoint.replace( + "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" + ) + + return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") + + DEFAULT_ENDPOINT = "compute.googleapis.com" + DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore + DEFAULT_ENDPOINT + ) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + PublicDelegatedPrefixesClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_info(info) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + PublicDelegatedPrefixesClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_file( + filename) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + from_service_account_json = from_service_account_file + + @property + def transport(self) -> PublicDelegatedPrefixesTransport: + """Returns the transport used by the client instance. + + Returns: + PublicDelegatedPrefixesTransport: The transport used by the client + instance. + """ + return self._transport + + @staticmethod + def common_billing_account_path(billing_account: str, ) -> str: + """Returns a fully-qualified billing_account string.""" + return "billingAccounts/{billing_account}".format(billing_account=billing_account, ) + + @staticmethod + def parse_common_billing_account_path(path: str) -> Dict[str,str]: + """Parse a billing_account path into its component segments.""" + m = re.match(r"^billingAccounts/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_folder_path(folder: str, ) -> str: + """Returns a fully-qualified folder string.""" + return "folders/{folder}".format(folder=folder, ) + + @staticmethod + def parse_common_folder_path(path: str) -> Dict[str,str]: + """Parse a folder path into its component segments.""" + m = re.match(r"^folders/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_organization_path(organization: str, ) -> str: + """Returns a fully-qualified organization string.""" + return "organizations/{organization}".format(organization=organization, ) + + @staticmethod + def parse_common_organization_path(path: str) -> Dict[str,str]: + """Parse a organization path into its component segments.""" + m = re.match(r"^organizations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_project_path(project: str, ) -> str: + """Returns a fully-qualified project string.""" + return "projects/{project}".format(project=project, ) + + @staticmethod + def parse_common_project_path(path: str) -> Dict[str,str]: + """Parse a project path into its component segments.""" + m = re.match(r"^projects/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_location_path(project: str, location: str, ) -> str: + """Returns a fully-qualified location string.""" + return "projects/{project}/locations/{location}".format(project=project, location=location, ) + + @staticmethod + def parse_common_location_path(path: str) -> Dict[str,str]: + """Parse a location path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @classmethod + def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[client_options_lib.ClientOptions] = None): + """Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + if client_options is None: + client_options = client_options_lib.ClientOptions() + use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") + use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") + if use_client_cert not in ("true", "false"): + raise ValueError("Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`") + if use_mtls_endpoint not in ("auto", "never", "always"): + raise MutualTLSChannelError("Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`") + + # Figure out the client cert source to use. + client_cert_source = None + if use_client_cert == "true": + if client_options.client_cert_source: + client_cert_source = client_options.client_cert_source + elif mtls.has_default_client_cert_source(): + client_cert_source = mtls.default_client_cert_source() + + # Figure out which api endpoint to use. + if client_options.api_endpoint is not None: + api_endpoint = client_options.api_endpoint + elif use_mtls_endpoint == "always" or (use_mtls_endpoint == "auto" and client_cert_source): + api_endpoint = cls.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = cls.DEFAULT_ENDPOINT + + return api_endpoint, client_cert_source + + def __init__(self, *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Optional[Union[str, PublicDelegatedPrefixesTransport]] = None, + client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the public delegated prefixes client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Union[str, PublicDelegatedPrefixesTransport]): The + transport to use. If set to None, a transport is chosen + automatically. + NOTE: "rest" transport functionality is currently in a + beta state (preview). We welcome your feedback via an + issue in this library's source repository. + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): Custom options for the + client. It won't take effect if a ``transport`` instance is provided. + (1) The ``api_endpoint`` property can be used to override the + default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT + environment variable can also be used to override the endpoint: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto switch to the + default mTLS endpoint if client certificate is present, this is + the default value). However, the ``api_endpoint`` property takes + precedence if provided. + (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide client certificate for mutual TLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + """ + if isinstance(client_options, dict): + client_options = client_options_lib.from_dict(client_options) + if client_options is None: + client_options = client_options_lib.ClientOptions() + client_options = cast(client_options_lib.ClientOptions, client_options) + + api_endpoint, client_cert_source_func = self.get_mtls_endpoint_and_cert_source(client_options) + + api_key_value = getattr(client_options, "api_key", None) + if api_key_value and credentials: + raise ValueError("client_options.api_key and credentials are mutually exclusive") + + # Save or instantiate the transport. + # Ordinarily, we provide the transport, but allowing a custom transport + # instance provides an extensibility point for unusual situations. + if isinstance(transport, PublicDelegatedPrefixesTransport): + # transport is a PublicDelegatedPrefixesTransport instance. + if credentials or client_options.credentials_file or api_key_value: + raise ValueError("When providing a transport instance, " + "provide its credentials directly.") + if client_options.scopes: + raise ValueError( + "When providing a transport instance, provide its scopes " + "directly." + ) + self._transport = transport + else: + import google.auth._default # type: ignore + + if api_key_value and hasattr(google.auth._default, "get_api_key_credentials"): + credentials = google.auth._default.get_api_key_credentials(api_key_value) + + Transport = type(self).get_transport_class(transport) + self._transport = Transport( + credentials=credentials, + credentials_file=client_options.credentials_file, + host=api_endpoint, + scopes=client_options.scopes, + client_cert_source_for_mtls=client_cert_source_func, + quota_project_id=client_options.quota_project_id, + client_info=client_info, + always_use_jwt_access=True, + api_audience=client_options.api_audience, + ) + + def aggregated_list(self, + request: Optional[Union[compute.AggregatedListPublicDelegatedPrefixesRequest, dict]] = None, + *, + project: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.AggregatedListPager: + r"""Lists all PublicDelegatedPrefix resources owned by + the specific project across all scopes. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_aggregated_list(): + # Create a client + client = compute_v1.PublicDelegatedPrefixesClient() + + # Initialize request argument(s) + request = compute_v1.AggregatedListPublicDelegatedPrefixesRequest( + project="project_value", + ) + + # Make the request + page_result = client.aggregated_list(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.AggregatedListPublicDelegatedPrefixesRequest, dict]): + The request object. A request message for + PublicDelegatedPrefixes.AggregatedList. + See the method description for details. + project (str): + Name of the project scoping this + request. + + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.compute_v1.services.public_delegated_prefixes.pagers.AggregatedListPager: + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.AggregatedListPublicDelegatedPrefixesRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.AggregatedListPublicDelegatedPrefixesRequest): + request = compute.AggregatedListPublicDelegatedPrefixesRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.aggregated_list] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.AggregatedListPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + def delete_unary(self, + request: Optional[Union[compute.DeletePublicDelegatedPrefixeRequest, dict]] = None, + *, + project: Optional[str] = None, + region: Optional[str] = None, + public_delegated_prefix: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.Operation: + r"""Deletes the specified PublicDelegatedPrefix in the + given region. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_delete(): + # Create a client + client = compute_v1.PublicDelegatedPrefixesClient() + + # Initialize request argument(s) + request = compute_v1.DeletePublicDelegatedPrefixeRequest( + project="project_value", + public_delegated_prefix="public_delegated_prefix_value", + region="region_value", + ) + + # Make the request + response = client.delete(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.DeletePublicDelegatedPrefixeRequest, dict]): + The request object. A request message for + PublicDelegatedPrefixes.Delete. See the + method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + region (str): + Name of the region of this request. + This corresponds to the ``region`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + public_delegated_prefix (str): + Name of the PublicDelegatedPrefix + resource to delete. + + This corresponds to the ``public_delegated_prefix`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, region, public_delegated_prefix]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.DeletePublicDelegatedPrefixeRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.DeletePublicDelegatedPrefixeRequest): + request = compute.DeletePublicDelegatedPrefixeRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if region is not None: + request.region = region + if public_delegated_prefix is not None: + request.public_delegated_prefix = public_delegated_prefix + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("region", request.region), + ("public_delegated_prefix", request.public_delegated_prefix), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def delete(self, + request: Optional[Union[compute.DeletePublicDelegatedPrefixeRequest, dict]] = None, + *, + project: Optional[str] = None, + region: Optional[str] = None, + public_delegated_prefix: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> extended_operation.ExtendedOperation: + r"""Deletes the specified PublicDelegatedPrefix in the + given region. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_delete(): + # Create a client + client = compute_v1.PublicDelegatedPrefixesClient() + + # Initialize request argument(s) + request = compute_v1.DeletePublicDelegatedPrefixeRequest( + project="project_value", + public_delegated_prefix="public_delegated_prefix_value", + region="region_value", + ) + + # Make the request + response = client.delete(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.DeletePublicDelegatedPrefixeRequest, dict]): + The request object. A request message for + PublicDelegatedPrefixes.Delete. See the + method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + region (str): + Name of the region of this request. + This corresponds to the ``region`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + public_delegated_prefix (str): + Name of the PublicDelegatedPrefix + resource to delete. + + This corresponds to the ``public_delegated_prefix`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, region, public_delegated_prefix]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.DeletePublicDelegatedPrefixeRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.DeletePublicDelegatedPrefixeRequest): + request = compute.DeletePublicDelegatedPrefixeRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if region is not None: + request.region = region + if public_delegated_prefix is not None: + request.public_delegated_prefix = public_delegated_prefix + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("region", request.region), + ("public_delegated_prefix", request.public_delegated_prefix), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + operation_service = self._transport._region_operations_client + operation_request = compute.GetRegionOperationRequest() + operation_request.project = request.project + operation_request.region = request.region + operation_request.operation = response.name + + get_operation = functools.partial(operation_service.get, operation_request) + # Cancel is not part of extended operations yet. + cancel_operation = lambda: None + + # Note: this class is an implementation detail to provide a uniform + # set of names for certain fields in the extended operation proto message. + # See google.api_core.extended_operation.ExtendedOperation for details + # on these properties and the expected interface. + class _CustomOperation(extended_operation.ExtendedOperation): + @property + def error_message(self): + return self._extended_operation.http_error_message + + @property + def error_code(self): + return self._extended_operation.http_error_status_code + + response = _CustomOperation.make(get_operation, cancel_operation, response) + + # Done; return the response. + return response + + def get(self, + request: Optional[Union[compute.GetPublicDelegatedPrefixeRequest, dict]] = None, + *, + project: Optional[str] = None, + region: Optional[str] = None, + public_delegated_prefix: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.PublicDelegatedPrefix: + r"""Returns the specified PublicDelegatedPrefix resource + in the given region. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_get(): + # Create a client + client = compute_v1.PublicDelegatedPrefixesClient() + + # Initialize request argument(s) + request = compute_v1.GetPublicDelegatedPrefixeRequest( + project="project_value", + public_delegated_prefix="public_delegated_prefix_value", + region="region_value", + ) + + # Make the request + response = client.get(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.GetPublicDelegatedPrefixeRequest, dict]): + The request object. A request message for + PublicDelegatedPrefixes.Get. See the + method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + region (str): + Name of the region of this request. + This corresponds to the ``region`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + public_delegated_prefix (str): + Name of the PublicDelegatedPrefix + resource to return. + + This corresponds to the ``public_delegated_prefix`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.compute_v1.types.PublicDelegatedPrefix: + A PublicDelegatedPrefix resource + represents an IP block within a + PublicAdvertisedPrefix that is + configured within a single cloud scope + (global or region). IPs in the block can + be allocated to resources within that + scope. Public delegated prefixes may be + further broken up into smaller IP blocks + in the same scope as the parent block. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, region, public_delegated_prefix]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.GetPublicDelegatedPrefixeRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.GetPublicDelegatedPrefixeRequest): + request = compute.GetPublicDelegatedPrefixeRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if region is not None: + request.region = region + if public_delegated_prefix is not None: + request.public_delegated_prefix = public_delegated_prefix + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("region", request.region), + ("public_delegated_prefix", request.public_delegated_prefix), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def insert_unary(self, + request: Optional[Union[compute.InsertPublicDelegatedPrefixeRequest, dict]] = None, + *, + project: Optional[str] = None, + region: Optional[str] = None, + public_delegated_prefix_resource: Optional[compute.PublicDelegatedPrefix] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.Operation: + r"""Creates a PublicDelegatedPrefix in the specified + project in the given region using the parameters that + are included in the request. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_insert(): + # Create a client + client = compute_v1.PublicDelegatedPrefixesClient() + + # Initialize request argument(s) + request = compute_v1.InsertPublicDelegatedPrefixeRequest( + project="project_value", + region="region_value", + ) + + # Make the request + response = client.insert(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.InsertPublicDelegatedPrefixeRequest, dict]): + The request object. A request message for + PublicDelegatedPrefixes.Insert. See the + method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + region (str): + Name of the region of this request. + This corresponds to the ``region`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + public_delegated_prefix_resource (google.cloud.compute_v1.types.PublicDelegatedPrefix): + The body resource for this request + This corresponds to the ``public_delegated_prefix_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, region, public_delegated_prefix_resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.InsertPublicDelegatedPrefixeRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.InsertPublicDelegatedPrefixeRequest): + request = compute.InsertPublicDelegatedPrefixeRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if region is not None: + request.region = region + if public_delegated_prefix_resource is not None: + request.public_delegated_prefix_resource = public_delegated_prefix_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.insert] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("region", request.region), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def insert(self, + request: Optional[Union[compute.InsertPublicDelegatedPrefixeRequest, dict]] = None, + *, + project: Optional[str] = None, + region: Optional[str] = None, + public_delegated_prefix_resource: Optional[compute.PublicDelegatedPrefix] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> extended_operation.ExtendedOperation: + r"""Creates a PublicDelegatedPrefix in the specified + project in the given region using the parameters that + are included in the request. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_insert(): + # Create a client + client = compute_v1.PublicDelegatedPrefixesClient() + + # Initialize request argument(s) + request = compute_v1.InsertPublicDelegatedPrefixeRequest( + project="project_value", + region="region_value", + ) + + # Make the request + response = client.insert(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.InsertPublicDelegatedPrefixeRequest, dict]): + The request object. A request message for + PublicDelegatedPrefixes.Insert. See the + method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + region (str): + Name of the region of this request. + This corresponds to the ``region`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + public_delegated_prefix_resource (google.cloud.compute_v1.types.PublicDelegatedPrefix): + The body resource for this request + This corresponds to the ``public_delegated_prefix_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, region, public_delegated_prefix_resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.InsertPublicDelegatedPrefixeRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.InsertPublicDelegatedPrefixeRequest): + request = compute.InsertPublicDelegatedPrefixeRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if region is not None: + request.region = region + if public_delegated_prefix_resource is not None: + request.public_delegated_prefix_resource = public_delegated_prefix_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.insert] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("region", request.region), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + operation_service = self._transport._region_operations_client + operation_request = compute.GetRegionOperationRequest() + operation_request.project = request.project + operation_request.region = request.region + operation_request.operation = response.name + + get_operation = functools.partial(operation_service.get, operation_request) + # Cancel is not part of extended operations yet. + cancel_operation = lambda: None + + # Note: this class is an implementation detail to provide a uniform + # set of names for certain fields in the extended operation proto message. + # See google.api_core.extended_operation.ExtendedOperation for details + # on these properties and the expected interface. + class _CustomOperation(extended_operation.ExtendedOperation): + @property + def error_message(self): + return self._extended_operation.http_error_message + + @property + def error_code(self): + return self._extended_operation.http_error_status_code + + response = _CustomOperation.make(get_operation, cancel_operation, response) + + # Done; return the response. + return response + + def list(self, + request: Optional[Union[compute.ListPublicDelegatedPrefixesRequest, dict]] = None, + *, + project: Optional[str] = None, + region: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListPager: + r"""Lists the PublicDelegatedPrefixes for a project in + the given region. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_list(): + # Create a client + client = compute_v1.PublicDelegatedPrefixesClient() + + # Initialize request argument(s) + request = compute_v1.ListPublicDelegatedPrefixesRequest( + project="project_value", + region="region_value", + ) + + # Make the request + page_result = client.list(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.ListPublicDelegatedPrefixesRequest, dict]): + The request object. A request message for + PublicDelegatedPrefixes.List. See the + method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + region (str): + Name of the region of this request. + This corresponds to the ``region`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.compute_v1.services.public_delegated_prefixes.pagers.ListPager: + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, region]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.ListPublicDelegatedPrefixesRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.ListPublicDelegatedPrefixesRequest): + request = compute.ListPublicDelegatedPrefixesRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if region is not None: + request.region = region + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("region", request.region), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + def patch_unary(self, + request: Optional[Union[compute.PatchPublicDelegatedPrefixeRequest, dict]] = None, + *, + project: Optional[str] = None, + region: Optional[str] = None, + public_delegated_prefix: Optional[str] = None, + public_delegated_prefix_resource: Optional[compute.PublicDelegatedPrefix] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.Operation: + r"""Patches the specified PublicDelegatedPrefix resource + with the data included in the request. This method + supports PATCH semantics and uses JSON merge patch + format and processing rules. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_patch(): + # Create a client + client = compute_v1.PublicDelegatedPrefixesClient() + + # Initialize request argument(s) + request = compute_v1.PatchPublicDelegatedPrefixeRequest( + project="project_value", + public_delegated_prefix="public_delegated_prefix_value", + region="region_value", + ) + + # Make the request + response = client.patch(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.PatchPublicDelegatedPrefixeRequest, dict]): + The request object. A request message for + PublicDelegatedPrefixes.Patch. See the + method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + region (str): + Name of the region for this request. + This corresponds to the ``region`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + public_delegated_prefix (str): + Name of the PublicDelegatedPrefix + resource to patch. + + This corresponds to the ``public_delegated_prefix`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + public_delegated_prefix_resource (google.cloud.compute_v1.types.PublicDelegatedPrefix): + The body resource for this request + This corresponds to the ``public_delegated_prefix_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, region, public_delegated_prefix, public_delegated_prefix_resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.PatchPublicDelegatedPrefixeRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.PatchPublicDelegatedPrefixeRequest): + request = compute.PatchPublicDelegatedPrefixeRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if region is not None: + request.region = region + if public_delegated_prefix is not None: + request.public_delegated_prefix = public_delegated_prefix + if public_delegated_prefix_resource is not None: + request.public_delegated_prefix_resource = public_delegated_prefix_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.patch] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("region", request.region), + ("public_delegated_prefix", request.public_delegated_prefix), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def patch(self, + request: Optional[Union[compute.PatchPublicDelegatedPrefixeRequest, dict]] = None, + *, + project: Optional[str] = None, + region: Optional[str] = None, + public_delegated_prefix: Optional[str] = None, + public_delegated_prefix_resource: Optional[compute.PublicDelegatedPrefix] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> extended_operation.ExtendedOperation: + r"""Patches the specified PublicDelegatedPrefix resource + with the data included in the request. This method + supports PATCH semantics and uses JSON merge patch + format and processing rules. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_patch(): + # Create a client + client = compute_v1.PublicDelegatedPrefixesClient() + + # Initialize request argument(s) + request = compute_v1.PatchPublicDelegatedPrefixeRequest( + project="project_value", + public_delegated_prefix="public_delegated_prefix_value", + region="region_value", + ) + + # Make the request + response = client.patch(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.PatchPublicDelegatedPrefixeRequest, dict]): + The request object. A request message for + PublicDelegatedPrefixes.Patch. See the + method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + region (str): + Name of the region for this request. + This corresponds to the ``region`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + public_delegated_prefix (str): + Name of the PublicDelegatedPrefix + resource to patch. + + This corresponds to the ``public_delegated_prefix`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + public_delegated_prefix_resource (google.cloud.compute_v1.types.PublicDelegatedPrefix): + The body resource for this request + This corresponds to the ``public_delegated_prefix_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, region, public_delegated_prefix, public_delegated_prefix_resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.PatchPublicDelegatedPrefixeRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.PatchPublicDelegatedPrefixeRequest): + request = compute.PatchPublicDelegatedPrefixeRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if region is not None: + request.region = region + if public_delegated_prefix is not None: + request.public_delegated_prefix = public_delegated_prefix + if public_delegated_prefix_resource is not None: + request.public_delegated_prefix_resource = public_delegated_prefix_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.patch] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("region", request.region), + ("public_delegated_prefix", request.public_delegated_prefix), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + operation_service = self._transport._region_operations_client + operation_request = compute.GetRegionOperationRequest() + operation_request.project = request.project + operation_request.region = request.region + operation_request.operation = response.name + + get_operation = functools.partial(operation_service.get, operation_request) + # Cancel is not part of extended operations yet. + cancel_operation = lambda: None + + # Note: this class is an implementation detail to provide a uniform + # set of names for certain fields in the extended operation proto message. + # See google.api_core.extended_operation.ExtendedOperation for details + # on these properties and the expected interface. + class _CustomOperation(extended_operation.ExtendedOperation): + @property + def error_message(self): + return self._extended_operation.http_error_message + + @property + def error_code(self): + return self._extended_operation.http_error_status_code + + response = _CustomOperation.make(get_operation, cancel_operation, response) + + # Done; return the response. + return response + + def __enter__(self) -> "PublicDelegatedPrefixesClient": + return self + + def __exit__(self, type, value, traceback): + """Releases underlying transport's resources. + + .. warning:: + ONLY use as a context manager if the transport is NOT shared + with other clients! Exiting the with block will CLOSE the transport + and may cause errors in other clients! + """ + self.transport.close() + + + + + + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) + + +__all__ = ( + "PublicDelegatedPrefixesClient", +) diff --git a/owl-bot-staging/v1/google/cloud/compute_v1/services/public_delegated_prefixes/pagers.py b/owl-bot-staging/v1/google/cloud/compute_v1/services/public_delegated_prefixes/pagers.py new file mode 100644 index 000000000..77a71fe4a --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/compute_v1/services/public_delegated_prefixes/pagers.py @@ -0,0 +1,139 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import Any, AsyncIterator, Awaitable, Callable, Sequence, Tuple, Optional, Iterator + +from google.cloud.compute_v1.types import compute + + +class AggregatedListPager: + """A pager for iterating through ``aggregated_list`` requests. + + This class thinly wraps an initial + :class:`google.cloud.compute_v1.types.PublicDelegatedPrefixAggregatedList` object, and + provides an ``__iter__`` method to iterate through its + ``items`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``AggregatedList`` requests and continue to iterate + through the ``items`` field on the + corresponding responses. + + All the usual :class:`google.cloud.compute_v1.types.PublicDelegatedPrefixAggregatedList` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., compute.PublicDelegatedPrefixAggregatedList], + request: compute.AggregatedListPublicDelegatedPrefixesRequest, + response: compute.PublicDelegatedPrefixAggregatedList, + *, + metadata: Sequence[Tuple[str, str]] = ()): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.compute_v1.types.AggregatedListPublicDelegatedPrefixesRequest): + The initial request object. + response (google.cloud.compute_v1.types.PublicDelegatedPrefixAggregatedList): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = compute.AggregatedListPublicDelegatedPrefixesRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[compute.PublicDelegatedPrefixAggregatedList]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[Tuple[str, compute.PublicDelegatedPrefixesScopedList]]: + for page in self.pages: + yield from page.items.items() + + def get(self, key: str) -> Optional[compute.PublicDelegatedPrefixesScopedList]: + return self._response.items.get(key) + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + + +class ListPager: + """A pager for iterating through ``list`` requests. + + This class thinly wraps an initial + :class:`google.cloud.compute_v1.types.PublicDelegatedPrefixList` object, and + provides an ``__iter__`` method to iterate through its + ``items`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``List`` requests and continue to iterate + through the ``items`` field on the + corresponding responses. + + All the usual :class:`google.cloud.compute_v1.types.PublicDelegatedPrefixList` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., compute.PublicDelegatedPrefixList], + request: compute.ListPublicDelegatedPrefixesRequest, + response: compute.PublicDelegatedPrefixList, + *, + metadata: Sequence[Tuple[str, str]] = ()): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.compute_v1.types.ListPublicDelegatedPrefixesRequest): + The initial request object. + response (google.cloud.compute_v1.types.PublicDelegatedPrefixList): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = compute.ListPublicDelegatedPrefixesRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[compute.PublicDelegatedPrefixList]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[compute.PublicDelegatedPrefix]: + for page in self.pages: + yield from page.items + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) diff --git a/owl-bot-staging/v1/google/cloud/compute_v1/services/public_delegated_prefixes/transports/__init__.py b/owl-bot-staging/v1/google/cloud/compute_v1/services/public_delegated_prefixes/transports/__init__.py new file mode 100644 index 000000000..e607bfff5 --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/compute_v1/services/public_delegated_prefixes/transports/__init__.py @@ -0,0 +1,32 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +from typing import Dict, Type + +from .base import PublicDelegatedPrefixesTransport +from .rest import PublicDelegatedPrefixesRestTransport +from .rest import PublicDelegatedPrefixesRestInterceptor + + +# Compile a registry of transports. +_transport_registry = OrderedDict() # type: Dict[str, Type[PublicDelegatedPrefixesTransport]] +_transport_registry['rest'] = PublicDelegatedPrefixesRestTransport + +__all__ = ( + 'PublicDelegatedPrefixesTransport', + 'PublicDelegatedPrefixesRestTransport', + 'PublicDelegatedPrefixesRestInterceptor', +) diff --git a/owl-bot-staging/v1/google/cloud/compute_v1/services/public_delegated_prefixes/transports/base.py b/owl-bot-staging/v1/google/cloud/compute_v1/services/public_delegated_prefixes/transports/base.py new file mode 100644 index 000000000..27d92dd08 --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/compute_v1/services/public_delegated_prefixes/transports/base.py @@ -0,0 +1,233 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import abc +from typing import Awaitable, Callable, Dict, Optional, Sequence, Union + +from google.cloud.compute_v1 import gapic_version as package_version + +import google.auth # type: ignore +import google.api_core +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.compute_v1.types import compute +from google.cloud.compute_v1.services import region_operations + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) + + +class PublicDelegatedPrefixesTransport(abc.ABC): + """Abstract transport class for PublicDelegatedPrefixes.""" + + AUTH_SCOPES = ( + 'https://www.googleapis.com/auth/compute', + 'https://www.googleapis.com/auth/cloud-platform', + ) + + DEFAULT_HOST: str = 'compute.googleapis.com' + def __init__( + self, *, + host: str = DEFAULT_HOST, + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + **kwargs, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A list of scopes. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + """ + self._extended_operations_services: Dict[str, Any] = {} + + scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} + + # Save the scopes. + self._scopes = scopes + + # If no credentials are provided, then determine the appropriate + # defaults. + if credentials and credentials_file: + raise core_exceptions.DuplicateCredentialArgs("'credentials_file' and 'credentials' are mutually exclusive") + + if credentials_file is not None: + credentials, _ = google.auth.load_credentials_from_file( + credentials_file, + **scopes_kwargs, + quota_project_id=quota_project_id + ) + elif credentials is None: + credentials, _ = google.auth.default(**scopes_kwargs, quota_project_id=quota_project_id) + # Don't apply audience if the credentials file passed from user. + if hasattr(credentials, "with_gdch_audience"): + credentials = credentials.with_gdch_audience(api_audience if api_audience else host) + + # If the credentials are service account credentials, then always try to use self signed JWT. + if always_use_jwt_access and isinstance(credentials, service_account.Credentials) and hasattr(service_account.Credentials, "with_always_use_jwt_access"): + credentials = credentials.with_always_use_jwt_access(True) + + # Save the credentials. + self._credentials = credentials + + # Save the hostname. Default to port 443 (HTTPS) if none is specified. + if ':' not in host: + host += ':443' + self._host = host + + def _prep_wrapped_messages(self, client_info): + # Precompute the wrapped methods. + self._wrapped_methods = { + self.aggregated_list: gapic_v1.method.wrap_method( + self.aggregated_list, + default_timeout=None, + client_info=client_info, + ), + self.delete: gapic_v1.method.wrap_method( + self.delete, + default_timeout=None, + client_info=client_info, + ), + self.get: gapic_v1.method.wrap_method( + self.get, + default_timeout=None, + client_info=client_info, + ), + self.insert: gapic_v1.method.wrap_method( + self.insert, + default_timeout=None, + client_info=client_info, + ), + self.list: gapic_v1.method.wrap_method( + self.list, + default_timeout=None, + client_info=client_info, + ), + self.patch: gapic_v1.method.wrap_method( + self.patch, + default_timeout=None, + client_info=client_info, + ), + } + + def close(self): + """Closes resources associated with the transport. + + .. warning:: + Only call this method if the transport is NOT shared + with other clients - this may cause errors in other clients! + """ + raise NotImplementedError() + + @property + def aggregated_list(self) -> Callable[ + [compute.AggregatedListPublicDelegatedPrefixesRequest], + Union[ + compute.PublicDelegatedPrefixAggregatedList, + Awaitable[compute.PublicDelegatedPrefixAggregatedList] + ]]: + raise NotImplementedError() + + @property + def delete(self) -> Callable[ + [compute.DeletePublicDelegatedPrefixeRequest], + Union[ + compute.Operation, + Awaitable[compute.Operation] + ]]: + raise NotImplementedError() + + @property + def get(self) -> Callable[ + [compute.GetPublicDelegatedPrefixeRequest], + Union[ + compute.PublicDelegatedPrefix, + Awaitable[compute.PublicDelegatedPrefix] + ]]: + raise NotImplementedError() + + @property + def insert(self) -> Callable[ + [compute.InsertPublicDelegatedPrefixeRequest], + Union[ + compute.Operation, + Awaitable[compute.Operation] + ]]: + raise NotImplementedError() + + @property + def list(self) -> Callable[ + [compute.ListPublicDelegatedPrefixesRequest], + Union[ + compute.PublicDelegatedPrefixList, + Awaitable[compute.PublicDelegatedPrefixList] + ]]: + raise NotImplementedError() + + @property + def patch(self) -> Callable[ + [compute.PatchPublicDelegatedPrefixeRequest], + Union[ + compute.Operation, + Awaitable[compute.Operation] + ]]: + raise NotImplementedError() + + @property + def kind(self) -> str: + raise NotImplementedError() + + @property + def _region_operations_client(self) -> region_operations.RegionOperationsClient: + ex_op_service = self._extended_operations_services.get("region_operations") + if not ex_op_service: + ex_op_service = region_operations.RegionOperationsClient( + credentials=self._credentials, + transport=self.kind, + ) + self._extended_operations_services["region_operations"] = ex_op_service + + return ex_op_service + + +__all__ = ( + 'PublicDelegatedPrefixesTransport', +) diff --git a/owl-bot-staging/v1/google/cloud/compute_v1/services/public_delegated_prefixes/transports/rest.py b/owl-bot-staging/v1/google/cloud/compute_v1/services/public_delegated_prefixes/transports/rest.py new file mode 100644 index 000000000..e5aac3815 --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/compute_v1/services/public_delegated_prefixes/transports/rest.py @@ -0,0 +1,917 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +from google.auth.transport.requests import AuthorizedSession # type: ignore +import json # type: ignore +import grpc # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.api_core import exceptions as core_exceptions +from google.api_core import retry as retries +from google.api_core import rest_helpers +from google.api_core import rest_streaming +from google.api_core import path_template +from google.api_core import gapic_v1 + +from google.protobuf import json_format +from requests import __version__ as requests_version +import dataclasses +import re +from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union +import warnings + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + + +from google.cloud.compute_v1.types import compute + +from .base import PublicDelegatedPrefixesTransport, DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, + grpc_version=None, + rest_version=requests_version, +) + + +class PublicDelegatedPrefixesRestInterceptor: + """Interceptor for PublicDelegatedPrefixes. + + Interceptors are used to manipulate requests, request metadata, and responses + in arbitrary ways. + Example use cases include: + * Logging + * Verifying requests according to service or custom semantics + * Stripping extraneous information from responses + + These use cases and more can be enabled by injecting an + instance of a custom subclass when constructing the PublicDelegatedPrefixesRestTransport. + + .. code-block:: python + class MyCustomPublicDelegatedPrefixesInterceptor(PublicDelegatedPrefixesRestInterceptor): + def pre_aggregated_list(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_aggregated_list(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_delete(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_delete(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_get(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_insert(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_insert(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_patch(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_patch(self, response): + logging.log(f"Received response: {response}") + return response + + transport = PublicDelegatedPrefixesRestTransport(interceptor=MyCustomPublicDelegatedPrefixesInterceptor()) + client = PublicDelegatedPrefixesClient(transport=transport) + + + """ + def pre_aggregated_list(self, request: compute.AggregatedListPublicDelegatedPrefixesRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.AggregatedListPublicDelegatedPrefixesRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for aggregated_list + + Override in a subclass to manipulate the request or metadata + before they are sent to the PublicDelegatedPrefixes server. + """ + return request, metadata + + def post_aggregated_list(self, response: compute.PublicDelegatedPrefixAggregatedList) -> compute.PublicDelegatedPrefixAggregatedList: + """Post-rpc interceptor for aggregated_list + + Override in a subclass to manipulate the response + after it is returned by the PublicDelegatedPrefixes server but before + it is returned to user code. + """ + return response + def pre_delete(self, request: compute.DeletePublicDelegatedPrefixeRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.DeletePublicDelegatedPrefixeRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for delete + + Override in a subclass to manipulate the request or metadata + before they are sent to the PublicDelegatedPrefixes server. + """ + return request, metadata + + def post_delete(self, response: compute.Operation) -> compute.Operation: + """Post-rpc interceptor for delete + + Override in a subclass to manipulate the response + after it is returned by the PublicDelegatedPrefixes server but before + it is returned to user code. + """ + return response + def pre_get(self, request: compute.GetPublicDelegatedPrefixeRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.GetPublicDelegatedPrefixeRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get + + Override in a subclass to manipulate the request or metadata + before they are sent to the PublicDelegatedPrefixes server. + """ + return request, metadata + + def post_get(self, response: compute.PublicDelegatedPrefix) -> compute.PublicDelegatedPrefix: + """Post-rpc interceptor for get + + Override in a subclass to manipulate the response + after it is returned by the PublicDelegatedPrefixes server but before + it is returned to user code. + """ + return response + def pre_insert(self, request: compute.InsertPublicDelegatedPrefixeRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.InsertPublicDelegatedPrefixeRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for insert + + Override in a subclass to manipulate the request or metadata + before they are sent to the PublicDelegatedPrefixes server. + """ + return request, metadata + + def post_insert(self, response: compute.Operation) -> compute.Operation: + """Post-rpc interceptor for insert + + Override in a subclass to manipulate the response + after it is returned by the PublicDelegatedPrefixes server but before + it is returned to user code. + """ + return response + def pre_list(self, request: compute.ListPublicDelegatedPrefixesRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.ListPublicDelegatedPrefixesRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list + + Override in a subclass to manipulate the request or metadata + before they are sent to the PublicDelegatedPrefixes server. + """ + return request, metadata + + def post_list(self, response: compute.PublicDelegatedPrefixList) -> compute.PublicDelegatedPrefixList: + """Post-rpc interceptor for list + + Override in a subclass to manipulate the response + after it is returned by the PublicDelegatedPrefixes server but before + it is returned to user code. + """ + return response + def pre_patch(self, request: compute.PatchPublicDelegatedPrefixeRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.PatchPublicDelegatedPrefixeRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for patch + + Override in a subclass to manipulate the request or metadata + before they are sent to the PublicDelegatedPrefixes server. + """ + return request, metadata + + def post_patch(self, response: compute.Operation) -> compute.Operation: + """Post-rpc interceptor for patch + + Override in a subclass to manipulate the response + after it is returned by the PublicDelegatedPrefixes server but before + it is returned to user code. + """ + return response + + +@dataclasses.dataclass +class PublicDelegatedPrefixesRestStub: + _session: AuthorizedSession + _host: str + _interceptor: PublicDelegatedPrefixesRestInterceptor + + +class PublicDelegatedPrefixesRestTransport(PublicDelegatedPrefixesTransport): + """REST backend transport for PublicDelegatedPrefixes. + + The PublicDelegatedPrefixes API. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends JSON representations of protocol buffers over HTTP/1.1 + + NOTE: This REST transport functionality is currently in a beta + state (preview). We welcome your feedback via an issue in this + library's source repository. Thank you! + """ + + def __init__(self, *, + host: str = 'compute.googleapis.com', + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + client_cert_source_for_mtls: Optional[Callable[[ + ], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + url_scheme: str = 'https', + interceptor: Optional[PublicDelegatedPrefixesRestInterceptor] = None, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + NOTE: This REST transport functionality is currently in a beta + state (preview). We welcome your feedback via a GitHub issue in + this library's repository. Thank you! + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + client_cert_source_for_mtls (Callable[[], Tuple[bytes, bytes]]): Client + certificate to configure mutual TLS HTTP channel. It is ignored + if ``channel`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you are developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + url_scheme: the protocol scheme for the API endpoint. Normally + "https", but for testing or local servers, + "http" can be specified. + """ + # Run the base constructor + # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. + # TODO: When custom host (api_endpoint) is set, `scopes` must *also* be set on the + # credentials object + maybe_url_match = re.match("^(?Phttp(?:s)?://)?(?P.*)$", host) + if maybe_url_match is None: + raise ValueError(f"Unexpected hostname structure: {host}") # pragma: NO COVER + + url_match_items = maybe_url_match.groupdict() + + host = f"{url_scheme}://{host}" if not url_match_items["scheme"] else host + + super().__init__( + host=host, + credentials=credentials, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience + ) + self._session = AuthorizedSession( + self._credentials, default_host=self.DEFAULT_HOST) + if client_cert_source_for_mtls: + self._session.configure_mtls_channel(client_cert_source_for_mtls) + self._interceptor = interceptor or PublicDelegatedPrefixesRestInterceptor() + self._prep_wrapped_messages(client_info) + + class _AggregatedList(PublicDelegatedPrefixesRestStub): + def __hash__(self): + return hash("AggregatedList") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.AggregatedListPublicDelegatedPrefixesRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.PublicDelegatedPrefixAggregatedList: + r"""Call the aggregated list method over HTTP. + + Args: + request (~.compute.AggregatedListPublicDelegatedPrefixesRequest): + The request object. A request message for + PublicDelegatedPrefixes.AggregatedList. + See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.PublicDelegatedPrefixAggregatedList: + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/compute/v1/projects/{project}/aggregated/publicDelegatedPrefixes', + }, + ] + request, metadata = self._interceptor.pre_aggregated_list(request, metadata) + pb_request = compute.AggregatedListPublicDelegatedPrefixesRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.PublicDelegatedPrefixAggregatedList() + pb_resp = compute.PublicDelegatedPrefixAggregatedList.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_aggregated_list(resp) + return resp + + class _Delete(PublicDelegatedPrefixesRestStub): + def __hash__(self): + return hash("Delete") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.DeletePublicDelegatedPrefixeRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.Operation: + r"""Call the delete method over HTTP. + + Args: + request (~.compute.DeletePublicDelegatedPrefixeRequest): + The request object. A request message for + PublicDelegatedPrefixes.Delete. See the + method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.Operation: + Represents an Operation resource. Google Compute Engine + has three Operation resources: \* + `Global `__ + \* + `Regional `__ + \* + `Zonal `__ + You can use an operation resource to manage asynchronous + API requests. For more information, read Handling API + responses. Operations can be global, regional or zonal. + - For global operations, use the ``globalOperations`` + resource. - For regional operations, use the + ``regionOperations`` resource. - For zonal operations, + use the ``zonalOperations`` resource. For more + information, read Global, Regional, and Zonal Resources. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'delete', + 'uri': '/compute/v1/projects/{project}/regions/{region}/publicDelegatedPrefixes/{public_delegated_prefix}', + }, + ] + request, metadata = self._interceptor.pre_delete(request, metadata) + pb_request = compute.DeletePublicDelegatedPrefixeRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.Operation() + pb_resp = compute.Operation.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_delete(resp) + return resp + + class _Get(PublicDelegatedPrefixesRestStub): + def __hash__(self): + return hash("Get") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.GetPublicDelegatedPrefixeRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.PublicDelegatedPrefix: + r"""Call the get method over HTTP. + + Args: + request (~.compute.GetPublicDelegatedPrefixeRequest): + The request object. A request message for + PublicDelegatedPrefixes.Get. See the + method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.PublicDelegatedPrefix: + A PublicDelegatedPrefix resource + represents an IP block within a + PublicAdvertisedPrefix that is + configured within a single cloud scope + (global or region). IPs in the block can + be allocated to resources within that + scope. Public delegated prefixes may be + further broken up into smaller IP blocks + in the same scope as the parent block. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/compute/v1/projects/{project}/regions/{region}/publicDelegatedPrefixes/{public_delegated_prefix}', + }, + ] + request, metadata = self._interceptor.pre_get(request, metadata) + pb_request = compute.GetPublicDelegatedPrefixeRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.PublicDelegatedPrefix() + pb_resp = compute.PublicDelegatedPrefix.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get(resp) + return resp + + class _Insert(PublicDelegatedPrefixesRestStub): + def __hash__(self): + return hash("Insert") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.InsertPublicDelegatedPrefixeRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.Operation: + r"""Call the insert method over HTTP. + + Args: + request (~.compute.InsertPublicDelegatedPrefixeRequest): + The request object. A request message for + PublicDelegatedPrefixes.Insert. See the + method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.Operation: + Represents an Operation resource. Google Compute Engine + has three Operation resources: \* + `Global `__ + \* + `Regional `__ + \* + `Zonal `__ + You can use an operation resource to manage asynchronous + API requests. For more information, read Handling API + responses. Operations can be global, regional or zonal. + - For global operations, use the ``globalOperations`` + resource. - For regional operations, use the + ``regionOperations`` resource. - For zonal operations, + use the ``zonalOperations`` resource. For more + information, read Global, Regional, and Zonal Resources. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/compute/v1/projects/{project}/regions/{region}/publicDelegatedPrefixes', + 'body': 'public_delegated_prefix_resource', + }, + ] + request, metadata = self._interceptor.pre_insert(request, metadata) + pb_request = compute.InsertPublicDelegatedPrefixeRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + including_default_value_fields=False, + use_integers_for_enums=False + ) + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.Operation() + pb_resp = compute.Operation.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_insert(resp) + return resp + + class _List(PublicDelegatedPrefixesRestStub): + def __hash__(self): + return hash("List") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.ListPublicDelegatedPrefixesRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.PublicDelegatedPrefixList: + r"""Call the list method over HTTP. + + Args: + request (~.compute.ListPublicDelegatedPrefixesRequest): + The request object. A request message for + PublicDelegatedPrefixes.List. See the + method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.PublicDelegatedPrefixList: + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/compute/v1/projects/{project}/regions/{region}/publicDelegatedPrefixes', + }, + ] + request, metadata = self._interceptor.pre_list(request, metadata) + pb_request = compute.ListPublicDelegatedPrefixesRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.PublicDelegatedPrefixList() + pb_resp = compute.PublicDelegatedPrefixList.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_list(resp) + return resp + + class _Patch(PublicDelegatedPrefixesRestStub): + def __hash__(self): + return hash("Patch") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.PatchPublicDelegatedPrefixeRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.Operation: + r"""Call the patch method over HTTP. + + Args: + request (~.compute.PatchPublicDelegatedPrefixeRequest): + The request object. A request message for + PublicDelegatedPrefixes.Patch. See the + method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.Operation: + Represents an Operation resource. Google Compute Engine + has three Operation resources: \* + `Global `__ + \* + `Regional `__ + \* + `Zonal `__ + You can use an operation resource to manage asynchronous + API requests. For more information, read Handling API + responses. Operations can be global, regional or zonal. + - For global operations, use the ``globalOperations`` + resource. - For regional operations, use the + ``regionOperations`` resource. - For zonal operations, + use the ``zonalOperations`` resource. For more + information, read Global, Regional, and Zonal Resources. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'patch', + 'uri': '/compute/v1/projects/{project}/regions/{region}/publicDelegatedPrefixes/{public_delegated_prefix}', + 'body': 'public_delegated_prefix_resource', + }, + ] + request, metadata = self._interceptor.pre_patch(request, metadata) + pb_request = compute.PatchPublicDelegatedPrefixeRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + including_default_value_fields=False, + use_integers_for_enums=False + ) + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.Operation() + pb_resp = compute.Operation.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_patch(resp) + return resp + + @property + def aggregated_list(self) -> Callable[ + [compute.AggregatedListPublicDelegatedPrefixesRequest], + compute.PublicDelegatedPrefixAggregatedList]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._AggregatedList(self._session, self._host, self._interceptor) # type: ignore + + @property + def delete(self) -> Callable[ + [compute.DeletePublicDelegatedPrefixeRequest], + compute.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._Delete(self._session, self._host, self._interceptor) # type: ignore + + @property + def get(self) -> Callable[ + [compute.GetPublicDelegatedPrefixeRequest], + compute.PublicDelegatedPrefix]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._Get(self._session, self._host, self._interceptor) # type: ignore + + @property + def insert(self) -> Callable[ + [compute.InsertPublicDelegatedPrefixeRequest], + compute.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._Insert(self._session, self._host, self._interceptor) # type: ignore + + @property + def list(self) -> Callable[ + [compute.ListPublicDelegatedPrefixesRequest], + compute.PublicDelegatedPrefixList]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._List(self._session, self._host, self._interceptor) # type: ignore + + @property + def patch(self) -> Callable[ + [compute.PatchPublicDelegatedPrefixeRequest], + compute.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._Patch(self._session, self._host, self._interceptor) # type: ignore + + @property + def kind(self) -> str: + return "rest" + + def close(self): + self._session.close() + + +__all__=( + 'PublicDelegatedPrefixesRestTransport', +) diff --git a/owl-bot-staging/v1/google/cloud/compute_v1/services/region_autoscalers/__init__.py b/owl-bot-staging/v1/google/cloud/compute_v1/services/region_autoscalers/__init__.py new file mode 100644 index 000000000..7afcba570 --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/compute_v1/services/region_autoscalers/__init__.py @@ -0,0 +1,20 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from .client import RegionAutoscalersClient + +__all__ = ( + 'RegionAutoscalersClient', +) diff --git a/owl-bot-staging/v1/google/cloud/compute_v1/services/region_autoscalers/client.py b/owl-bot-staging/v1/google/cloud/compute_v1/services/region_autoscalers/client.py new file mode 100644 index 000000000..97231463b --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/compute_v1/services/region_autoscalers/client.py @@ -0,0 +1,1724 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import functools +import os +import re +from typing import Dict, Mapping, MutableMapping, MutableSequence, Optional, Sequence, Tuple, Type, Union, cast + +from google.cloud.compute_v1 import gapic_version as package_version + +from google.api_core import client_options as client_options_lib +from google.api_core import exceptions as core_exceptions +from google.api_core import extended_operation +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport import mtls # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth.exceptions import MutualTLSChannelError # type: ignore +from google.oauth2 import service_account # type: ignore + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + +from google.api_core import extended_operation # type: ignore +from google.cloud.compute_v1.services.region_autoscalers import pagers +from google.cloud.compute_v1.types import compute +from .transports.base import RegionAutoscalersTransport, DEFAULT_CLIENT_INFO +from .transports.rest import RegionAutoscalersRestTransport + + +class RegionAutoscalersClientMeta(type): + """Metaclass for the RegionAutoscalers client. + + This provides class-level methods for building and retrieving + support objects (e.g. transport) without polluting the client instance + objects. + """ + _transport_registry = OrderedDict() # type: Dict[str, Type[RegionAutoscalersTransport]] + _transport_registry["rest"] = RegionAutoscalersRestTransport + + def get_transport_class(cls, + label: Optional[str] = None, + ) -> Type[RegionAutoscalersTransport]: + """Returns an appropriate transport class. + + Args: + label: The name of the desired transport. If none is + provided, then the first transport in the registry is used. + + Returns: + The transport class to use. + """ + # If a specific transport is requested, return that one. + if label: + return cls._transport_registry[label] + + # No transport is requested; return the default (that is, the first one + # in the dictionary). + return next(iter(cls._transport_registry.values())) + + +class RegionAutoscalersClient(metaclass=RegionAutoscalersClientMeta): + """The RegionAutoscalers API.""" + + @staticmethod + def _get_default_mtls_endpoint(api_endpoint): + """Converts api endpoint to mTLS endpoint. + + Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to + "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. + Args: + api_endpoint (Optional[str]): the api endpoint to convert. + Returns: + str: converted mTLS api endpoint. + """ + if not api_endpoint: + return api_endpoint + + mtls_endpoint_re = re.compile( + r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?" + ) + + m = mtls_endpoint_re.match(api_endpoint) + name, mtls, sandbox, googledomain = m.groups() + if mtls or not googledomain: + return api_endpoint + + if sandbox: + return api_endpoint.replace( + "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" + ) + + return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") + + DEFAULT_ENDPOINT = "compute.googleapis.com" + DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore + DEFAULT_ENDPOINT + ) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + RegionAutoscalersClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_info(info) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + RegionAutoscalersClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_file( + filename) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + from_service_account_json = from_service_account_file + + @property + def transport(self) -> RegionAutoscalersTransport: + """Returns the transport used by the client instance. + + Returns: + RegionAutoscalersTransport: The transport used by the client + instance. + """ + return self._transport + + @staticmethod + def common_billing_account_path(billing_account: str, ) -> str: + """Returns a fully-qualified billing_account string.""" + return "billingAccounts/{billing_account}".format(billing_account=billing_account, ) + + @staticmethod + def parse_common_billing_account_path(path: str) -> Dict[str,str]: + """Parse a billing_account path into its component segments.""" + m = re.match(r"^billingAccounts/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_folder_path(folder: str, ) -> str: + """Returns a fully-qualified folder string.""" + return "folders/{folder}".format(folder=folder, ) + + @staticmethod + def parse_common_folder_path(path: str) -> Dict[str,str]: + """Parse a folder path into its component segments.""" + m = re.match(r"^folders/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_organization_path(organization: str, ) -> str: + """Returns a fully-qualified organization string.""" + return "organizations/{organization}".format(organization=organization, ) + + @staticmethod + def parse_common_organization_path(path: str) -> Dict[str,str]: + """Parse a organization path into its component segments.""" + m = re.match(r"^organizations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_project_path(project: str, ) -> str: + """Returns a fully-qualified project string.""" + return "projects/{project}".format(project=project, ) + + @staticmethod + def parse_common_project_path(path: str) -> Dict[str,str]: + """Parse a project path into its component segments.""" + m = re.match(r"^projects/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_location_path(project: str, location: str, ) -> str: + """Returns a fully-qualified location string.""" + return "projects/{project}/locations/{location}".format(project=project, location=location, ) + + @staticmethod + def parse_common_location_path(path: str) -> Dict[str,str]: + """Parse a location path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @classmethod + def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[client_options_lib.ClientOptions] = None): + """Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + if client_options is None: + client_options = client_options_lib.ClientOptions() + use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") + use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") + if use_client_cert not in ("true", "false"): + raise ValueError("Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`") + if use_mtls_endpoint not in ("auto", "never", "always"): + raise MutualTLSChannelError("Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`") + + # Figure out the client cert source to use. + client_cert_source = None + if use_client_cert == "true": + if client_options.client_cert_source: + client_cert_source = client_options.client_cert_source + elif mtls.has_default_client_cert_source(): + client_cert_source = mtls.default_client_cert_source() + + # Figure out which api endpoint to use. + if client_options.api_endpoint is not None: + api_endpoint = client_options.api_endpoint + elif use_mtls_endpoint == "always" or (use_mtls_endpoint == "auto" and client_cert_source): + api_endpoint = cls.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = cls.DEFAULT_ENDPOINT + + return api_endpoint, client_cert_source + + def __init__(self, *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Optional[Union[str, RegionAutoscalersTransport]] = None, + client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the region autoscalers client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Union[str, RegionAutoscalersTransport]): The + transport to use. If set to None, a transport is chosen + automatically. + NOTE: "rest" transport functionality is currently in a + beta state (preview). We welcome your feedback via an + issue in this library's source repository. + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): Custom options for the + client. It won't take effect if a ``transport`` instance is provided. + (1) The ``api_endpoint`` property can be used to override the + default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT + environment variable can also be used to override the endpoint: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto switch to the + default mTLS endpoint if client certificate is present, this is + the default value). However, the ``api_endpoint`` property takes + precedence if provided. + (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide client certificate for mutual TLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + """ + if isinstance(client_options, dict): + client_options = client_options_lib.from_dict(client_options) + if client_options is None: + client_options = client_options_lib.ClientOptions() + client_options = cast(client_options_lib.ClientOptions, client_options) + + api_endpoint, client_cert_source_func = self.get_mtls_endpoint_and_cert_source(client_options) + + api_key_value = getattr(client_options, "api_key", None) + if api_key_value and credentials: + raise ValueError("client_options.api_key and credentials are mutually exclusive") + + # Save or instantiate the transport. + # Ordinarily, we provide the transport, but allowing a custom transport + # instance provides an extensibility point for unusual situations. + if isinstance(transport, RegionAutoscalersTransport): + # transport is a RegionAutoscalersTransport instance. + if credentials or client_options.credentials_file or api_key_value: + raise ValueError("When providing a transport instance, " + "provide its credentials directly.") + if client_options.scopes: + raise ValueError( + "When providing a transport instance, provide its scopes " + "directly." + ) + self._transport = transport + else: + import google.auth._default # type: ignore + + if api_key_value and hasattr(google.auth._default, "get_api_key_credentials"): + credentials = google.auth._default.get_api_key_credentials(api_key_value) + + Transport = type(self).get_transport_class(transport) + self._transport = Transport( + credentials=credentials, + credentials_file=client_options.credentials_file, + host=api_endpoint, + scopes=client_options.scopes, + client_cert_source_for_mtls=client_cert_source_func, + quota_project_id=client_options.quota_project_id, + client_info=client_info, + always_use_jwt_access=True, + api_audience=client_options.api_audience, + ) + + def delete_unary(self, + request: Optional[Union[compute.DeleteRegionAutoscalerRequest, dict]] = None, + *, + project: Optional[str] = None, + region: Optional[str] = None, + autoscaler: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.Operation: + r"""Deletes the specified autoscaler. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_delete(): + # Create a client + client = compute_v1.RegionAutoscalersClient() + + # Initialize request argument(s) + request = compute_v1.DeleteRegionAutoscalerRequest( + autoscaler="autoscaler_value", + project="project_value", + region="region_value", + ) + + # Make the request + response = client.delete(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.DeleteRegionAutoscalerRequest, dict]): + The request object. A request message for + RegionAutoscalers.Delete. See the method + description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + region (str): + Name of the region scoping this + request. + + This corresponds to the ``region`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + autoscaler (str): + Name of the autoscaler to delete. + This corresponds to the ``autoscaler`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, region, autoscaler]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.DeleteRegionAutoscalerRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.DeleteRegionAutoscalerRequest): + request = compute.DeleteRegionAutoscalerRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if region is not None: + request.region = region + if autoscaler is not None: + request.autoscaler = autoscaler + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("region", request.region), + ("autoscaler", request.autoscaler), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def delete(self, + request: Optional[Union[compute.DeleteRegionAutoscalerRequest, dict]] = None, + *, + project: Optional[str] = None, + region: Optional[str] = None, + autoscaler: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> extended_operation.ExtendedOperation: + r"""Deletes the specified autoscaler. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_delete(): + # Create a client + client = compute_v1.RegionAutoscalersClient() + + # Initialize request argument(s) + request = compute_v1.DeleteRegionAutoscalerRequest( + autoscaler="autoscaler_value", + project="project_value", + region="region_value", + ) + + # Make the request + response = client.delete(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.DeleteRegionAutoscalerRequest, dict]): + The request object. A request message for + RegionAutoscalers.Delete. See the method + description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + region (str): + Name of the region scoping this + request. + + This corresponds to the ``region`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + autoscaler (str): + Name of the autoscaler to delete. + This corresponds to the ``autoscaler`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, region, autoscaler]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.DeleteRegionAutoscalerRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.DeleteRegionAutoscalerRequest): + request = compute.DeleteRegionAutoscalerRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if region is not None: + request.region = region + if autoscaler is not None: + request.autoscaler = autoscaler + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("region", request.region), + ("autoscaler", request.autoscaler), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + operation_service = self._transport._region_operations_client + operation_request = compute.GetRegionOperationRequest() + operation_request.project = request.project + operation_request.region = request.region + operation_request.operation = response.name + + get_operation = functools.partial(operation_service.get, operation_request) + # Cancel is not part of extended operations yet. + cancel_operation = lambda: None + + # Note: this class is an implementation detail to provide a uniform + # set of names for certain fields in the extended operation proto message. + # See google.api_core.extended_operation.ExtendedOperation for details + # on these properties and the expected interface. + class _CustomOperation(extended_operation.ExtendedOperation): + @property + def error_message(self): + return self._extended_operation.http_error_message + + @property + def error_code(self): + return self._extended_operation.http_error_status_code + + response = _CustomOperation.make(get_operation, cancel_operation, response) + + # Done; return the response. + return response + + def get(self, + request: Optional[Union[compute.GetRegionAutoscalerRequest, dict]] = None, + *, + project: Optional[str] = None, + region: Optional[str] = None, + autoscaler: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.Autoscaler: + r"""Returns the specified autoscaler. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_get(): + # Create a client + client = compute_v1.RegionAutoscalersClient() + + # Initialize request argument(s) + request = compute_v1.GetRegionAutoscalerRequest( + autoscaler="autoscaler_value", + project="project_value", + region="region_value", + ) + + # Make the request + response = client.get(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.GetRegionAutoscalerRequest, dict]): + The request object. A request message for + RegionAutoscalers.Get. See the method + description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + region (str): + Name of the region scoping this + request. + + This corresponds to the ``region`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + autoscaler (str): + Name of the autoscaler to return. + This corresponds to the ``autoscaler`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.compute_v1.types.Autoscaler: + Represents an Autoscaler resource. Google Compute Engine + has two Autoscaler resources: \* + [Zonal](/compute/docs/reference/rest/v1/autoscalers) \* + [Regional](/compute/docs/reference/rest/v1/regionAutoscalers) + Use autoscalers to automatically add or delete instances + from a managed instance group according to your defined + autoscaling policy. For more information, read + Autoscaling Groups of Instances. For zonal managed + instance groups resource, use the autoscaler resource. + For regional managed instance groups, use the + regionAutoscalers resource. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, region, autoscaler]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.GetRegionAutoscalerRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.GetRegionAutoscalerRequest): + request = compute.GetRegionAutoscalerRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if region is not None: + request.region = region + if autoscaler is not None: + request.autoscaler = autoscaler + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("region", request.region), + ("autoscaler", request.autoscaler), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def insert_unary(self, + request: Optional[Union[compute.InsertRegionAutoscalerRequest, dict]] = None, + *, + project: Optional[str] = None, + region: Optional[str] = None, + autoscaler_resource: Optional[compute.Autoscaler] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.Operation: + r"""Creates an autoscaler in the specified project using + the data included in the request. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_insert(): + # Create a client + client = compute_v1.RegionAutoscalersClient() + + # Initialize request argument(s) + request = compute_v1.InsertRegionAutoscalerRequest( + project="project_value", + region="region_value", + ) + + # Make the request + response = client.insert(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.InsertRegionAutoscalerRequest, dict]): + The request object. A request message for + RegionAutoscalers.Insert. See the method + description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + region (str): + Name of the region scoping this + request. + + This corresponds to the ``region`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + autoscaler_resource (google.cloud.compute_v1.types.Autoscaler): + The body resource for this request + This corresponds to the ``autoscaler_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, region, autoscaler_resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.InsertRegionAutoscalerRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.InsertRegionAutoscalerRequest): + request = compute.InsertRegionAutoscalerRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if region is not None: + request.region = region + if autoscaler_resource is not None: + request.autoscaler_resource = autoscaler_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.insert] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("region", request.region), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def insert(self, + request: Optional[Union[compute.InsertRegionAutoscalerRequest, dict]] = None, + *, + project: Optional[str] = None, + region: Optional[str] = None, + autoscaler_resource: Optional[compute.Autoscaler] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> extended_operation.ExtendedOperation: + r"""Creates an autoscaler in the specified project using + the data included in the request. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_insert(): + # Create a client + client = compute_v1.RegionAutoscalersClient() + + # Initialize request argument(s) + request = compute_v1.InsertRegionAutoscalerRequest( + project="project_value", + region="region_value", + ) + + # Make the request + response = client.insert(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.InsertRegionAutoscalerRequest, dict]): + The request object. A request message for + RegionAutoscalers.Insert. See the method + description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + region (str): + Name of the region scoping this + request. + + This corresponds to the ``region`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + autoscaler_resource (google.cloud.compute_v1.types.Autoscaler): + The body resource for this request + This corresponds to the ``autoscaler_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, region, autoscaler_resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.InsertRegionAutoscalerRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.InsertRegionAutoscalerRequest): + request = compute.InsertRegionAutoscalerRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if region is not None: + request.region = region + if autoscaler_resource is not None: + request.autoscaler_resource = autoscaler_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.insert] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("region", request.region), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + operation_service = self._transport._region_operations_client + operation_request = compute.GetRegionOperationRequest() + operation_request.project = request.project + operation_request.region = request.region + operation_request.operation = response.name + + get_operation = functools.partial(operation_service.get, operation_request) + # Cancel is not part of extended operations yet. + cancel_operation = lambda: None + + # Note: this class is an implementation detail to provide a uniform + # set of names for certain fields in the extended operation proto message. + # See google.api_core.extended_operation.ExtendedOperation for details + # on these properties and the expected interface. + class _CustomOperation(extended_operation.ExtendedOperation): + @property + def error_message(self): + return self._extended_operation.http_error_message + + @property + def error_code(self): + return self._extended_operation.http_error_status_code + + response = _CustomOperation.make(get_operation, cancel_operation, response) + + # Done; return the response. + return response + + def list(self, + request: Optional[Union[compute.ListRegionAutoscalersRequest, dict]] = None, + *, + project: Optional[str] = None, + region: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListPager: + r"""Retrieves a list of autoscalers contained within the + specified region. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_list(): + # Create a client + client = compute_v1.RegionAutoscalersClient() + + # Initialize request argument(s) + request = compute_v1.ListRegionAutoscalersRequest( + project="project_value", + region="region_value", + ) + + # Make the request + page_result = client.list(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.ListRegionAutoscalersRequest, dict]): + The request object. A request message for + RegionAutoscalers.List. See the method + description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + region (str): + Name of the region scoping this + request. + + This corresponds to the ``region`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.compute_v1.services.region_autoscalers.pagers.ListPager: + Contains a list of autoscalers. + + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, region]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.ListRegionAutoscalersRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.ListRegionAutoscalersRequest): + request = compute.ListRegionAutoscalersRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if region is not None: + request.region = region + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("region", request.region), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + def patch_unary(self, + request: Optional[Union[compute.PatchRegionAutoscalerRequest, dict]] = None, + *, + project: Optional[str] = None, + region: Optional[str] = None, + autoscaler_resource: Optional[compute.Autoscaler] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.Operation: + r"""Updates an autoscaler in the specified project using + the data included in the request. This method supports + PATCH semantics and uses the JSON merge patch format and + processing rules. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_patch(): + # Create a client + client = compute_v1.RegionAutoscalersClient() + + # Initialize request argument(s) + request = compute_v1.PatchRegionAutoscalerRequest( + project="project_value", + region="region_value", + ) + + # Make the request + response = client.patch(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.PatchRegionAutoscalerRequest, dict]): + The request object. A request message for + RegionAutoscalers.Patch. See the method + description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + region (str): + Name of the region scoping this + request. + + This corresponds to the ``region`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + autoscaler_resource (google.cloud.compute_v1.types.Autoscaler): + The body resource for this request + This corresponds to the ``autoscaler_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, region, autoscaler_resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.PatchRegionAutoscalerRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.PatchRegionAutoscalerRequest): + request = compute.PatchRegionAutoscalerRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if region is not None: + request.region = region + if autoscaler_resource is not None: + request.autoscaler_resource = autoscaler_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.patch] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("region", request.region), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def patch(self, + request: Optional[Union[compute.PatchRegionAutoscalerRequest, dict]] = None, + *, + project: Optional[str] = None, + region: Optional[str] = None, + autoscaler_resource: Optional[compute.Autoscaler] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> extended_operation.ExtendedOperation: + r"""Updates an autoscaler in the specified project using + the data included in the request. This method supports + PATCH semantics and uses the JSON merge patch format and + processing rules. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_patch(): + # Create a client + client = compute_v1.RegionAutoscalersClient() + + # Initialize request argument(s) + request = compute_v1.PatchRegionAutoscalerRequest( + project="project_value", + region="region_value", + ) + + # Make the request + response = client.patch(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.PatchRegionAutoscalerRequest, dict]): + The request object. A request message for + RegionAutoscalers.Patch. See the method + description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + region (str): + Name of the region scoping this + request. + + This corresponds to the ``region`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + autoscaler_resource (google.cloud.compute_v1.types.Autoscaler): + The body resource for this request + This corresponds to the ``autoscaler_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, region, autoscaler_resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.PatchRegionAutoscalerRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.PatchRegionAutoscalerRequest): + request = compute.PatchRegionAutoscalerRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if region is not None: + request.region = region + if autoscaler_resource is not None: + request.autoscaler_resource = autoscaler_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.patch] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("region", request.region), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + operation_service = self._transport._region_operations_client + operation_request = compute.GetRegionOperationRequest() + operation_request.project = request.project + operation_request.region = request.region + operation_request.operation = response.name + + get_operation = functools.partial(operation_service.get, operation_request) + # Cancel is not part of extended operations yet. + cancel_operation = lambda: None + + # Note: this class is an implementation detail to provide a uniform + # set of names for certain fields in the extended operation proto message. + # See google.api_core.extended_operation.ExtendedOperation for details + # on these properties and the expected interface. + class _CustomOperation(extended_operation.ExtendedOperation): + @property + def error_message(self): + return self._extended_operation.http_error_message + + @property + def error_code(self): + return self._extended_operation.http_error_status_code + + response = _CustomOperation.make(get_operation, cancel_operation, response) + + # Done; return the response. + return response + + def update_unary(self, + request: Optional[Union[compute.UpdateRegionAutoscalerRequest, dict]] = None, + *, + project: Optional[str] = None, + region: Optional[str] = None, + autoscaler_resource: Optional[compute.Autoscaler] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.Operation: + r"""Updates an autoscaler in the specified project using + the data included in the request. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_update(): + # Create a client + client = compute_v1.RegionAutoscalersClient() + + # Initialize request argument(s) + request = compute_v1.UpdateRegionAutoscalerRequest( + project="project_value", + region="region_value", + ) + + # Make the request + response = client.update(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.UpdateRegionAutoscalerRequest, dict]): + The request object. A request message for + RegionAutoscalers.Update. See the method + description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + region (str): + Name of the region scoping this + request. + + This corresponds to the ``region`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + autoscaler_resource (google.cloud.compute_v1.types.Autoscaler): + The body resource for this request + This corresponds to the ``autoscaler_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, region, autoscaler_resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.UpdateRegionAutoscalerRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.UpdateRegionAutoscalerRequest): + request = compute.UpdateRegionAutoscalerRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if region is not None: + request.region = region + if autoscaler_resource is not None: + request.autoscaler_resource = autoscaler_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.update] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("region", request.region), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def update(self, + request: Optional[Union[compute.UpdateRegionAutoscalerRequest, dict]] = None, + *, + project: Optional[str] = None, + region: Optional[str] = None, + autoscaler_resource: Optional[compute.Autoscaler] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> extended_operation.ExtendedOperation: + r"""Updates an autoscaler in the specified project using + the data included in the request. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_update(): + # Create a client + client = compute_v1.RegionAutoscalersClient() + + # Initialize request argument(s) + request = compute_v1.UpdateRegionAutoscalerRequest( + project="project_value", + region="region_value", + ) + + # Make the request + response = client.update(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.UpdateRegionAutoscalerRequest, dict]): + The request object. A request message for + RegionAutoscalers.Update. See the method + description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + region (str): + Name of the region scoping this + request. + + This corresponds to the ``region`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + autoscaler_resource (google.cloud.compute_v1.types.Autoscaler): + The body resource for this request + This corresponds to the ``autoscaler_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, region, autoscaler_resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.UpdateRegionAutoscalerRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.UpdateRegionAutoscalerRequest): + request = compute.UpdateRegionAutoscalerRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if region is not None: + request.region = region + if autoscaler_resource is not None: + request.autoscaler_resource = autoscaler_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.update] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("region", request.region), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + operation_service = self._transport._region_operations_client + operation_request = compute.GetRegionOperationRequest() + operation_request.project = request.project + operation_request.region = request.region + operation_request.operation = response.name + + get_operation = functools.partial(operation_service.get, operation_request) + # Cancel is not part of extended operations yet. + cancel_operation = lambda: None + + # Note: this class is an implementation detail to provide a uniform + # set of names for certain fields in the extended operation proto message. + # See google.api_core.extended_operation.ExtendedOperation for details + # on these properties and the expected interface. + class _CustomOperation(extended_operation.ExtendedOperation): + @property + def error_message(self): + return self._extended_operation.http_error_message + + @property + def error_code(self): + return self._extended_operation.http_error_status_code + + response = _CustomOperation.make(get_operation, cancel_operation, response) + + # Done; return the response. + return response + + def __enter__(self) -> "RegionAutoscalersClient": + return self + + def __exit__(self, type, value, traceback): + """Releases underlying transport's resources. + + .. warning:: + ONLY use as a context manager if the transport is NOT shared + with other clients! Exiting the with block will CLOSE the transport + and may cause errors in other clients! + """ + self.transport.close() + + + + + + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) + + +__all__ = ( + "RegionAutoscalersClient", +) diff --git a/owl-bot-staging/v1/google/cloud/compute_v1/services/region_autoscalers/pagers.py b/owl-bot-staging/v1/google/cloud/compute_v1/services/region_autoscalers/pagers.py new file mode 100644 index 000000000..4fa585f61 --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/compute_v1/services/region_autoscalers/pagers.py @@ -0,0 +1,77 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import Any, AsyncIterator, Awaitable, Callable, Sequence, Tuple, Optional, Iterator + +from google.cloud.compute_v1.types import compute + + +class ListPager: + """A pager for iterating through ``list`` requests. + + This class thinly wraps an initial + :class:`google.cloud.compute_v1.types.RegionAutoscalerList` object, and + provides an ``__iter__`` method to iterate through its + ``items`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``List`` requests and continue to iterate + through the ``items`` field on the + corresponding responses. + + All the usual :class:`google.cloud.compute_v1.types.RegionAutoscalerList` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., compute.RegionAutoscalerList], + request: compute.ListRegionAutoscalersRequest, + response: compute.RegionAutoscalerList, + *, + metadata: Sequence[Tuple[str, str]] = ()): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.compute_v1.types.ListRegionAutoscalersRequest): + The initial request object. + response (google.cloud.compute_v1.types.RegionAutoscalerList): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = compute.ListRegionAutoscalersRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[compute.RegionAutoscalerList]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[compute.Autoscaler]: + for page in self.pages: + yield from page.items + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) diff --git a/owl-bot-staging/v1/google/cloud/compute_v1/services/region_autoscalers/transports/__init__.py b/owl-bot-staging/v1/google/cloud/compute_v1/services/region_autoscalers/transports/__init__.py new file mode 100644 index 000000000..9173ddbac --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/compute_v1/services/region_autoscalers/transports/__init__.py @@ -0,0 +1,32 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +from typing import Dict, Type + +from .base import RegionAutoscalersTransport +from .rest import RegionAutoscalersRestTransport +from .rest import RegionAutoscalersRestInterceptor + + +# Compile a registry of transports. +_transport_registry = OrderedDict() # type: Dict[str, Type[RegionAutoscalersTransport]] +_transport_registry['rest'] = RegionAutoscalersRestTransport + +__all__ = ( + 'RegionAutoscalersTransport', + 'RegionAutoscalersRestTransport', + 'RegionAutoscalersRestInterceptor', +) diff --git a/owl-bot-staging/v1/google/cloud/compute_v1/services/region_autoscalers/transports/base.py b/owl-bot-staging/v1/google/cloud/compute_v1/services/region_autoscalers/transports/base.py new file mode 100644 index 000000000..088af1702 --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/compute_v1/services/region_autoscalers/transports/base.py @@ -0,0 +1,233 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import abc +from typing import Awaitable, Callable, Dict, Optional, Sequence, Union + +from google.cloud.compute_v1 import gapic_version as package_version + +import google.auth # type: ignore +import google.api_core +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.compute_v1.types import compute +from google.cloud.compute_v1.services import region_operations + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) + + +class RegionAutoscalersTransport(abc.ABC): + """Abstract transport class for RegionAutoscalers.""" + + AUTH_SCOPES = ( + 'https://www.googleapis.com/auth/compute', + 'https://www.googleapis.com/auth/cloud-platform', + ) + + DEFAULT_HOST: str = 'compute.googleapis.com' + def __init__( + self, *, + host: str = DEFAULT_HOST, + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + **kwargs, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A list of scopes. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + """ + self._extended_operations_services: Dict[str, Any] = {} + + scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} + + # Save the scopes. + self._scopes = scopes + + # If no credentials are provided, then determine the appropriate + # defaults. + if credentials and credentials_file: + raise core_exceptions.DuplicateCredentialArgs("'credentials_file' and 'credentials' are mutually exclusive") + + if credentials_file is not None: + credentials, _ = google.auth.load_credentials_from_file( + credentials_file, + **scopes_kwargs, + quota_project_id=quota_project_id + ) + elif credentials is None: + credentials, _ = google.auth.default(**scopes_kwargs, quota_project_id=quota_project_id) + # Don't apply audience if the credentials file passed from user. + if hasattr(credentials, "with_gdch_audience"): + credentials = credentials.with_gdch_audience(api_audience if api_audience else host) + + # If the credentials are service account credentials, then always try to use self signed JWT. + if always_use_jwt_access and isinstance(credentials, service_account.Credentials) and hasattr(service_account.Credentials, "with_always_use_jwt_access"): + credentials = credentials.with_always_use_jwt_access(True) + + # Save the credentials. + self._credentials = credentials + + # Save the hostname. Default to port 443 (HTTPS) if none is specified. + if ':' not in host: + host += ':443' + self._host = host + + def _prep_wrapped_messages(self, client_info): + # Precompute the wrapped methods. + self._wrapped_methods = { + self.delete: gapic_v1.method.wrap_method( + self.delete, + default_timeout=None, + client_info=client_info, + ), + self.get: gapic_v1.method.wrap_method( + self.get, + default_timeout=None, + client_info=client_info, + ), + self.insert: gapic_v1.method.wrap_method( + self.insert, + default_timeout=None, + client_info=client_info, + ), + self.list: gapic_v1.method.wrap_method( + self.list, + default_timeout=None, + client_info=client_info, + ), + self.patch: gapic_v1.method.wrap_method( + self.patch, + default_timeout=None, + client_info=client_info, + ), + self.update: gapic_v1.method.wrap_method( + self.update, + default_timeout=None, + client_info=client_info, + ), + } + + def close(self): + """Closes resources associated with the transport. + + .. warning:: + Only call this method if the transport is NOT shared + with other clients - this may cause errors in other clients! + """ + raise NotImplementedError() + + @property + def delete(self) -> Callable[ + [compute.DeleteRegionAutoscalerRequest], + Union[ + compute.Operation, + Awaitable[compute.Operation] + ]]: + raise NotImplementedError() + + @property + def get(self) -> Callable[ + [compute.GetRegionAutoscalerRequest], + Union[ + compute.Autoscaler, + Awaitable[compute.Autoscaler] + ]]: + raise NotImplementedError() + + @property + def insert(self) -> Callable[ + [compute.InsertRegionAutoscalerRequest], + Union[ + compute.Operation, + Awaitable[compute.Operation] + ]]: + raise NotImplementedError() + + @property + def list(self) -> Callable[ + [compute.ListRegionAutoscalersRequest], + Union[ + compute.RegionAutoscalerList, + Awaitable[compute.RegionAutoscalerList] + ]]: + raise NotImplementedError() + + @property + def patch(self) -> Callable[ + [compute.PatchRegionAutoscalerRequest], + Union[ + compute.Operation, + Awaitable[compute.Operation] + ]]: + raise NotImplementedError() + + @property + def update(self) -> Callable[ + [compute.UpdateRegionAutoscalerRequest], + Union[ + compute.Operation, + Awaitable[compute.Operation] + ]]: + raise NotImplementedError() + + @property + def kind(self) -> str: + raise NotImplementedError() + + @property + def _region_operations_client(self) -> region_operations.RegionOperationsClient: + ex_op_service = self._extended_operations_services.get("region_operations") + if not ex_op_service: + ex_op_service = region_operations.RegionOperationsClient( + credentials=self._credentials, + transport=self.kind, + ) + self._extended_operations_services["region_operations"] = ex_op_service + + return ex_op_service + + +__all__ = ( + 'RegionAutoscalersTransport', +) diff --git a/owl-bot-staging/v1/google/cloud/compute_v1/services/region_autoscalers/transports/rest.py b/owl-bot-staging/v1/google/cloud/compute_v1/services/region_autoscalers/transports/rest.py new file mode 100644 index 000000000..a26104868 --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/compute_v1/services/region_autoscalers/transports/rest.py @@ -0,0 +1,944 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +from google.auth.transport.requests import AuthorizedSession # type: ignore +import json # type: ignore +import grpc # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.api_core import exceptions as core_exceptions +from google.api_core import retry as retries +from google.api_core import rest_helpers +from google.api_core import rest_streaming +from google.api_core import path_template +from google.api_core import gapic_v1 + +from google.protobuf import json_format +from requests import __version__ as requests_version +import dataclasses +import re +from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union +import warnings + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + + +from google.cloud.compute_v1.types import compute + +from .base import RegionAutoscalersTransport, DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, + grpc_version=None, + rest_version=requests_version, +) + + +class RegionAutoscalersRestInterceptor: + """Interceptor for RegionAutoscalers. + + Interceptors are used to manipulate requests, request metadata, and responses + in arbitrary ways. + Example use cases include: + * Logging + * Verifying requests according to service or custom semantics + * Stripping extraneous information from responses + + These use cases and more can be enabled by injecting an + instance of a custom subclass when constructing the RegionAutoscalersRestTransport. + + .. code-block:: python + class MyCustomRegionAutoscalersInterceptor(RegionAutoscalersRestInterceptor): + def pre_delete(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_delete(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_get(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_insert(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_insert(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_patch(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_patch(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_update(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_update(self, response): + logging.log(f"Received response: {response}") + return response + + transport = RegionAutoscalersRestTransport(interceptor=MyCustomRegionAutoscalersInterceptor()) + client = RegionAutoscalersClient(transport=transport) + + + """ + def pre_delete(self, request: compute.DeleteRegionAutoscalerRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.DeleteRegionAutoscalerRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for delete + + Override in a subclass to manipulate the request or metadata + before they are sent to the RegionAutoscalers server. + """ + return request, metadata + + def post_delete(self, response: compute.Operation) -> compute.Operation: + """Post-rpc interceptor for delete + + Override in a subclass to manipulate the response + after it is returned by the RegionAutoscalers server but before + it is returned to user code. + """ + return response + def pre_get(self, request: compute.GetRegionAutoscalerRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.GetRegionAutoscalerRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get + + Override in a subclass to manipulate the request or metadata + before they are sent to the RegionAutoscalers server. + """ + return request, metadata + + def post_get(self, response: compute.Autoscaler) -> compute.Autoscaler: + """Post-rpc interceptor for get + + Override in a subclass to manipulate the response + after it is returned by the RegionAutoscalers server but before + it is returned to user code. + """ + return response + def pre_insert(self, request: compute.InsertRegionAutoscalerRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.InsertRegionAutoscalerRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for insert + + Override in a subclass to manipulate the request or metadata + before they are sent to the RegionAutoscalers server. + """ + return request, metadata + + def post_insert(self, response: compute.Operation) -> compute.Operation: + """Post-rpc interceptor for insert + + Override in a subclass to manipulate the response + after it is returned by the RegionAutoscalers server but before + it is returned to user code. + """ + return response + def pre_list(self, request: compute.ListRegionAutoscalersRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.ListRegionAutoscalersRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list + + Override in a subclass to manipulate the request or metadata + before they are sent to the RegionAutoscalers server. + """ + return request, metadata + + def post_list(self, response: compute.RegionAutoscalerList) -> compute.RegionAutoscalerList: + """Post-rpc interceptor for list + + Override in a subclass to manipulate the response + after it is returned by the RegionAutoscalers server but before + it is returned to user code. + """ + return response + def pre_patch(self, request: compute.PatchRegionAutoscalerRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.PatchRegionAutoscalerRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for patch + + Override in a subclass to manipulate the request or metadata + before they are sent to the RegionAutoscalers server. + """ + return request, metadata + + def post_patch(self, response: compute.Operation) -> compute.Operation: + """Post-rpc interceptor for patch + + Override in a subclass to manipulate the response + after it is returned by the RegionAutoscalers server but before + it is returned to user code. + """ + return response + def pre_update(self, request: compute.UpdateRegionAutoscalerRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.UpdateRegionAutoscalerRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for update + + Override in a subclass to manipulate the request or metadata + before they are sent to the RegionAutoscalers server. + """ + return request, metadata + + def post_update(self, response: compute.Operation) -> compute.Operation: + """Post-rpc interceptor for update + + Override in a subclass to manipulate the response + after it is returned by the RegionAutoscalers server but before + it is returned to user code. + """ + return response + + +@dataclasses.dataclass +class RegionAutoscalersRestStub: + _session: AuthorizedSession + _host: str + _interceptor: RegionAutoscalersRestInterceptor + + +class RegionAutoscalersRestTransport(RegionAutoscalersTransport): + """REST backend transport for RegionAutoscalers. + + The RegionAutoscalers API. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends JSON representations of protocol buffers over HTTP/1.1 + + NOTE: This REST transport functionality is currently in a beta + state (preview). We welcome your feedback via an issue in this + library's source repository. Thank you! + """ + + def __init__(self, *, + host: str = 'compute.googleapis.com', + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + client_cert_source_for_mtls: Optional[Callable[[ + ], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + url_scheme: str = 'https', + interceptor: Optional[RegionAutoscalersRestInterceptor] = None, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + NOTE: This REST transport functionality is currently in a beta + state (preview). We welcome your feedback via a GitHub issue in + this library's repository. Thank you! + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + client_cert_source_for_mtls (Callable[[], Tuple[bytes, bytes]]): Client + certificate to configure mutual TLS HTTP channel. It is ignored + if ``channel`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you are developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + url_scheme: the protocol scheme for the API endpoint. Normally + "https", but for testing or local servers, + "http" can be specified. + """ + # Run the base constructor + # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. + # TODO: When custom host (api_endpoint) is set, `scopes` must *also* be set on the + # credentials object + maybe_url_match = re.match("^(?Phttp(?:s)?://)?(?P.*)$", host) + if maybe_url_match is None: + raise ValueError(f"Unexpected hostname structure: {host}") # pragma: NO COVER + + url_match_items = maybe_url_match.groupdict() + + host = f"{url_scheme}://{host}" if not url_match_items["scheme"] else host + + super().__init__( + host=host, + credentials=credentials, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience + ) + self._session = AuthorizedSession( + self._credentials, default_host=self.DEFAULT_HOST) + if client_cert_source_for_mtls: + self._session.configure_mtls_channel(client_cert_source_for_mtls) + self._interceptor = interceptor or RegionAutoscalersRestInterceptor() + self._prep_wrapped_messages(client_info) + + class _Delete(RegionAutoscalersRestStub): + def __hash__(self): + return hash("Delete") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.DeleteRegionAutoscalerRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.Operation: + r"""Call the delete method over HTTP. + + Args: + request (~.compute.DeleteRegionAutoscalerRequest): + The request object. A request message for + RegionAutoscalers.Delete. See the method + description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.Operation: + Represents an Operation resource. Google Compute Engine + has three Operation resources: \* + `Global `__ + \* + `Regional `__ + \* + `Zonal `__ + You can use an operation resource to manage asynchronous + API requests. For more information, read Handling API + responses. Operations can be global, regional or zonal. + - For global operations, use the ``globalOperations`` + resource. - For regional operations, use the + ``regionOperations`` resource. - For zonal operations, + use the ``zonalOperations`` resource. For more + information, read Global, Regional, and Zonal Resources. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'delete', + 'uri': '/compute/v1/projects/{project}/regions/{region}/autoscalers/{autoscaler}', + }, + ] + request, metadata = self._interceptor.pre_delete(request, metadata) + pb_request = compute.DeleteRegionAutoscalerRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.Operation() + pb_resp = compute.Operation.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_delete(resp) + return resp + + class _Get(RegionAutoscalersRestStub): + def __hash__(self): + return hash("Get") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.GetRegionAutoscalerRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.Autoscaler: + r"""Call the get method over HTTP. + + Args: + request (~.compute.GetRegionAutoscalerRequest): + The request object. A request message for + RegionAutoscalers.Get. See the method + description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.Autoscaler: + Represents an Autoscaler resource. Google Compute Engine + has two Autoscaler resources: \* + `Zonal `__ + \* + `Regional `__ + Use autoscalers to automatically add or delete instances + from a managed instance group according to your defined + autoscaling policy. For more information, read + Autoscaling Groups of Instances. For zonal managed + instance groups resource, use the autoscaler resource. + For regional managed instance groups, use the + regionAutoscalers resource. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/compute/v1/projects/{project}/regions/{region}/autoscalers/{autoscaler}', + }, + ] + request, metadata = self._interceptor.pre_get(request, metadata) + pb_request = compute.GetRegionAutoscalerRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.Autoscaler() + pb_resp = compute.Autoscaler.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get(resp) + return resp + + class _Insert(RegionAutoscalersRestStub): + def __hash__(self): + return hash("Insert") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.InsertRegionAutoscalerRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.Operation: + r"""Call the insert method over HTTP. + + Args: + request (~.compute.InsertRegionAutoscalerRequest): + The request object. A request message for + RegionAutoscalers.Insert. See the method + description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.Operation: + Represents an Operation resource. Google Compute Engine + has three Operation resources: \* + `Global `__ + \* + `Regional `__ + \* + `Zonal `__ + You can use an operation resource to manage asynchronous + API requests. For more information, read Handling API + responses. Operations can be global, regional or zonal. + - For global operations, use the ``globalOperations`` + resource. - For regional operations, use the + ``regionOperations`` resource. - For zonal operations, + use the ``zonalOperations`` resource. For more + information, read Global, Regional, and Zonal Resources. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/compute/v1/projects/{project}/regions/{region}/autoscalers', + 'body': 'autoscaler_resource', + }, + ] + request, metadata = self._interceptor.pre_insert(request, metadata) + pb_request = compute.InsertRegionAutoscalerRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + including_default_value_fields=False, + use_integers_for_enums=False + ) + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.Operation() + pb_resp = compute.Operation.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_insert(resp) + return resp + + class _List(RegionAutoscalersRestStub): + def __hash__(self): + return hash("List") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.ListRegionAutoscalersRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.RegionAutoscalerList: + r"""Call the list method over HTTP. + + Args: + request (~.compute.ListRegionAutoscalersRequest): + The request object. A request message for + RegionAutoscalers.List. See the method + description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.RegionAutoscalerList: + Contains a list of autoscalers. + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/compute/v1/projects/{project}/regions/{region}/autoscalers', + }, + ] + request, metadata = self._interceptor.pre_list(request, metadata) + pb_request = compute.ListRegionAutoscalersRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.RegionAutoscalerList() + pb_resp = compute.RegionAutoscalerList.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_list(resp) + return resp + + class _Patch(RegionAutoscalersRestStub): + def __hash__(self): + return hash("Patch") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.PatchRegionAutoscalerRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.Operation: + r"""Call the patch method over HTTP. + + Args: + request (~.compute.PatchRegionAutoscalerRequest): + The request object. A request message for + RegionAutoscalers.Patch. See the method + description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.Operation: + Represents an Operation resource. Google Compute Engine + has three Operation resources: \* + `Global `__ + \* + `Regional `__ + \* + `Zonal `__ + You can use an operation resource to manage asynchronous + API requests. For more information, read Handling API + responses. Operations can be global, regional or zonal. + - For global operations, use the ``globalOperations`` + resource. - For regional operations, use the + ``regionOperations`` resource. - For zonal operations, + use the ``zonalOperations`` resource. For more + information, read Global, Regional, and Zonal Resources. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'patch', + 'uri': '/compute/v1/projects/{project}/regions/{region}/autoscalers', + 'body': 'autoscaler_resource', + }, + ] + request, metadata = self._interceptor.pre_patch(request, metadata) + pb_request = compute.PatchRegionAutoscalerRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + including_default_value_fields=False, + use_integers_for_enums=False + ) + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.Operation() + pb_resp = compute.Operation.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_patch(resp) + return resp + + class _Update(RegionAutoscalersRestStub): + def __hash__(self): + return hash("Update") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.UpdateRegionAutoscalerRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.Operation: + r"""Call the update method over HTTP. + + Args: + request (~.compute.UpdateRegionAutoscalerRequest): + The request object. A request message for + RegionAutoscalers.Update. See the method + description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.Operation: + Represents an Operation resource. Google Compute Engine + has three Operation resources: \* + `Global `__ + \* + `Regional `__ + \* + `Zonal `__ + You can use an operation resource to manage asynchronous + API requests. For more information, read Handling API + responses. Operations can be global, regional or zonal. + - For global operations, use the ``globalOperations`` + resource. - For regional operations, use the + ``regionOperations`` resource. - For zonal operations, + use the ``zonalOperations`` resource. For more + information, read Global, Regional, and Zonal Resources. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'put', + 'uri': '/compute/v1/projects/{project}/regions/{region}/autoscalers', + 'body': 'autoscaler_resource', + }, + ] + request, metadata = self._interceptor.pre_update(request, metadata) + pb_request = compute.UpdateRegionAutoscalerRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + including_default_value_fields=False, + use_integers_for_enums=False + ) + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.Operation() + pb_resp = compute.Operation.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_update(resp) + return resp + + @property + def delete(self) -> Callable[ + [compute.DeleteRegionAutoscalerRequest], + compute.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._Delete(self._session, self._host, self._interceptor) # type: ignore + + @property + def get(self) -> Callable[ + [compute.GetRegionAutoscalerRequest], + compute.Autoscaler]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._Get(self._session, self._host, self._interceptor) # type: ignore + + @property + def insert(self) -> Callable[ + [compute.InsertRegionAutoscalerRequest], + compute.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._Insert(self._session, self._host, self._interceptor) # type: ignore + + @property + def list(self) -> Callable[ + [compute.ListRegionAutoscalersRequest], + compute.RegionAutoscalerList]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._List(self._session, self._host, self._interceptor) # type: ignore + + @property + def patch(self) -> Callable[ + [compute.PatchRegionAutoscalerRequest], + compute.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._Patch(self._session, self._host, self._interceptor) # type: ignore + + @property + def update(self) -> Callable[ + [compute.UpdateRegionAutoscalerRequest], + compute.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._Update(self._session, self._host, self._interceptor) # type: ignore + + @property + def kind(self) -> str: + return "rest" + + def close(self): + self._session.close() + + +__all__=( + 'RegionAutoscalersRestTransport', +) diff --git a/owl-bot-staging/v1/google/cloud/compute_v1/services/region_backend_services/__init__.py b/owl-bot-staging/v1/google/cloud/compute_v1/services/region_backend_services/__init__.py new file mode 100644 index 000000000..fd05426ff --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/compute_v1/services/region_backend_services/__init__.py @@ -0,0 +1,20 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from .client import RegionBackendServicesClient + +__all__ = ( + 'RegionBackendServicesClient', +) diff --git a/owl-bot-staging/v1/google/cloud/compute_v1/services/region_backend_services/client.py b/owl-bot-staging/v1/google/cloud/compute_v1/services/region_backend_services/client.py new file mode 100644 index 000000000..5e5debf28 --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/compute_v1/services/region_backend_services/client.py @@ -0,0 +1,2259 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import functools +import os +import re +from typing import Dict, Mapping, MutableMapping, MutableSequence, Optional, Sequence, Tuple, Type, Union, cast + +from google.cloud.compute_v1 import gapic_version as package_version + +from google.api_core import client_options as client_options_lib +from google.api_core import exceptions as core_exceptions +from google.api_core import extended_operation +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport import mtls # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth.exceptions import MutualTLSChannelError # type: ignore +from google.oauth2 import service_account # type: ignore + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + +from google.api_core import extended_operation # type: ignore +from google.cloud.compute_v1.services.region_backend_services import pagers +from google.cloud.compute_v1.types import compute +from .transports.base import RegionBackendServicesTransport, DEFAULT_CLIENT_INFO +from .transports.rest import RegionBackendServicesRestTransport + + +class RegionBackendServicesClientMeta(type): + """Metaclass for the RegionBackendServices client. + + This provides class-level methods for building and retrieving + support objects (e.g. transport) without polluting the client instance + objects. + """ + _transport_registry = OrderedDict() # type: Dict[str, Type[RegionBackendServicesTransport]] + _transport_registry["rest"] = RegionBackendServicesRestTransport + + def get_transport_class(cls, + label: Optional[str] = None, + ) -> Type[RegionBackendServicesTransport]: + """Returns an appropriate transport class. + + Args: + label: The name of the desired transport. If none is + provided, then the first transport in the registry is used. + + Returns: + The transport class to use. + """ + # If a specific transport is requested, return that one. + if label: + return cls._transport_registry[label] + + # No transport is requested; return the default (that is, the first one + # in the dictionary). + return next(iter(cls._transport_registry.values())) + + +class RegionBackendServicesClient(metaclass=RegionBackendServicesClientMeta): + """The RegionBackendServices API.""" + + @staticmethod + def _get_default_mtls_endpoint(api_endpoint): + """Converts api endpoint to mTLS endpoint. + + Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to + "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. + Args: + api_endpoint (Optional[str]): the api endpoint to convert. + Returns: + str: converted mTLS api endpoint. + """ + if not api_endpoint: + return api_endpoint + + mtls_endpoint_re = re.compile( + r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?" + ) + + m = mtls_endpoint_re.match(api_endpoint) + name, mtls, sandbox, googledomain = m.groups() + if mtls or not googledomain: + return api_endpoint + + if sandbox: + return api_endpoint.replace( + "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" + ) + + return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") + + DEFAULT_ENDPOINT = "compute.googleapis.com" + DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore + DEFAULT_ENDPOINT + ) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + RegionBackendServicesClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_info(info) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + RegionBackendServicesClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_file( + filename) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + from_service_account_json = from_service_account_file + + @property + def transport(self) -> RegionBackendServicesTransport: + """Returns the transport used by the client instance. + + Returns: + RegionBackendServicesTransport: The transport used by the client + instance. + """ + return self._transport + + @staticmethod + def common_billing_account_path(billing_account: str, ) -> str: + """Returns a fully-qualified billing_account string.""" + return "billingAccounts/{billing_account}".format(billing_account=billing_account, ) + + @staticmethod + def parse_common_billing_account_path(path: str) -> Dict[str,str]: + """Parse a billing_account path into its component segments.""" + m = re.match(r"^billingAccounts/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_folder_path(folder: str, ) -> str: + """Returns a fully-qualified folder string.""" + return "folders/{folder}".format(folder=folder, ) + + @staticmethod + def parse_common_folder_path(path: str) -> Dict[str,str]: + """Parse a folder path into its component segments.""" + m = re.match(r"^folders/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_organization_path(organization: str, ) -> str: + """Returns a fully-qualified organization string.""" + return "organizations/{organization}".format(organization=organization, ) + + @staticmethod + def parse_common_organization_path(path: str) -> Dict[str,str]: + """Parse a organization path into its component segments.""" + m = re.match(r"^organizations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_project_path(project: str, ) -> str: + """Returns a fully-qualified project string.""" + return "projects/{project}".format(project=project, ) + + @staticmethod + def parse_common_project_path(path: str) -> Dict[str,str]: + """Parse a project path into its component segments.""" + m = re.match(r"^projects/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_location_path(project: str, location: str, ) -> str: + """Returns a fully-qualified location string.""" + return "projects/{project}/locations/{location}".format(project=project, location=location, ) + + @staticmethod + def parse_common_location_path(path: str) -> Dict[str,str]: + """Parse a location path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @classmethod + def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[client_options_lib.ClientOptions] = None): + """Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + if client_options is None: + client_options = client_options_lib.ClientOptions() + use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") + use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") + if use_client_cert not in ("true", "false"): + raise ValueError("Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`") + if use_mtls_endpoint not in ("auto", "never", "always"): + raise MutualTLSChannelError("Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`") + + # Figure out the client cert source to use. + client_cert_source = None + if use_client_cert == "true": + if client_options.client_cert_source: + client_cert_source = client_options.client_cert_source + elif mtls.has_default_client_cert_source(): + client_cert_source = mtls.default_client_cert_source() + + # Figure out which api endpoint to use. + if client_options.api_endpoint is not None: + api_endpoint = client_options.api_endpoint + elif use_mtls_endpoint == "always" or (use_mtls_endpoint == "auto" and client_cert_source): + api_endpoint = cls.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = cls.DEFAULT_ENDPOINT + + return api_endpoint, client_cert_source + + def __init__(self, *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Optional[Union[str, RegionBackendServicesTransport]] = None, + client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the region backend services client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Union[str, RegionBackendServicesTransport]): The + transport to use. If set to None, a transport is chosen + automatically. + NOTE: "rest" transport functionality is currently in a + beta state (preview). We welcome your feedback via an + issue in this library's source repository. + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): Custom options for the + client. It won't take effect if a ``transport`` instance is provided. + (1) The ``api_endpoint`` property can be used to override the + default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT + environment variable can also be used to override the endpoint: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto switch to the + default mTLS endpoint if client certificate is present, this is + the default value). However, the ``api_endpoint`` property takes + precedence if provided. + (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide client certificate for mutual TLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + """ + if isinstance(client_options, dict): + client_options = client_options_lib.from_dict(client_options) + if client_options is None: + client_options = client_options_lib.ClientOptions() + client_options = cast(client_options_lib.ClientOptions, client_options) + + api_endpoint, client_cert_source_func = self.get_mtls_endpoint_and_cert_source(client_options) + + api_key_value = getattr(client_options, "api_key", None) + if api_key_value and credentials: + raise ValueError("client_options.api_key and credentials are mutually exclusive") + + # Save or instantiate the transport. + # Ordinarily, we provide the transport, but allowing a custom transport + # instance provides an extensibility point for unusual situations. + if isinstance(transport, RegionBackendServicesTransport): + # transport is a RegionBackendServicesTransport instance. + if credentials or client_options.credentials_file or api_key_value: + raise ValueError("When providing a transport instance, " + "provide its credentials directly.") + if client_options.scopes: + raise ValueError( + "When providing a transport instance, provide its scopes " + "directly." + ) + self._transport = transport + else: + import google.auth._default # type: ignore + + if api_key_value and hasattr(google.auth._default, "get_api_key_credentials"): + credentials = google.auth._default.get_api_key_credentials(api_key_value) + + Transport = type(self).get_transport_class(transport) + self._transport = Transport( + credentials=credentials, + credentials_file=client_options.credentials_file, + host=api_endpoint, + scopes=client_options.scopes, + client_cert_source_for_mtls=client_cert_source_func, + quota_project_id=client_options.quota_project_id, + client_info=client_info, + always_use_jwt_access=True, + api_audience=client_options.api_audience, + ) + + def delete_unary(self, + request: Optional[Union[compute.DeleteRegionBackendServiceRequest, dict]] = None, + *, + project: Optional[str] = None, + region: Optional[str] = None, + backend_service: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.Operation: + r"""Deletes the specified regional BackendService + resource. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_delete(): + # Create a client + client = compute_v1.RegionBackendServicesClient() + + # Initialize request argument(s) + request = compute_v1.DeleteRegionBackendServiceRequest( + backend_service="backend_service_value", + project="project_value", + region="region_value", + ) + + # Make the request + response = client.delete(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.DeleteRegionBackendServiceRequest, dict]): + The request object. A request message for + RegionBackendServices.Delete. See the + method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + region (str): + Name of the region scoping this + request. + + This corresponds to the ``region`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + backend_service (str): + Name of the BackendService resource + to delete. + + This corresponds to the ``backend_service`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, region, backend_service]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.DeleteRegionBackendServiceRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.DeleteRegionBackendServiceRequest): + request = compute.DeleteRegionBackendServiceRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if region is not None: + request.region = region + if backend_service is not None: + request.backend_service = backend_service + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("region", request.region), + ("backend_service", request.backend_service), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def delete(self, + request: Optional[Union[compute.DeleteRegionBackendServiceRequest, dict]] = None, + *, + project: Optional[str] = None, + region: Optional[str] = None, + backend_service: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> extended_operation.ExtendedOperation: + r"""Deletes the specified regional BackendService + resource. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_delete(): + # Create a client + client = compute_v1.RegionBackendServicesClient() + + # Initialize request argument(s) + request = compute_v1.DeleteRegionBackendServiceRequest( + backend_service="backend_service_value", + project="project_value", + region="region_value", + ) + + # Make the request + response = client.delete(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.DeleteRegionBackendServiceRequest, dict]): + The request object. A request message for + RegionBackendServices.Delete. See the + method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + region (str): + Name of the region scoping this + request. + + This corresponds to the ``region`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + backend_service (str): + Name of the BackendService resource + to delete. + + This corresponds to the ``backend_service`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, region, backend_service]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.DeleteRegionBackendServiceRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.DeleteRegionBackendServiceRequest): + request = compute.DeleteRegionBackendServiceRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if region is not None: + request.region = region + if backend_service is not None: + request.backend_service = backend_service + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("region", request.region), + ("backend_service", request.backend_service), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + operation_service = self._transport._region_operations_client + operation_request = compute.GetRegionOperationRequest() + operation_request.project = request.project + operation_request.region = request.region + operation_request.operation = response.name + + get_operation = functools.partial(operation_service.get, operation_request) + # Cancel is not part of extended operations yet. + cancel_operation = lambda: None + + # Note: this class is an implementation detail to provide a uniform + # set of names for certain fields in the extended operation proto message. + # See google.api_core.extended_operation.ExtendedOperation for details + # on these properties and the expected interface. + class _CustomOperation(extended_operation.ExtendedOperation): + @property + def error_message(self): + return self._extended_operation.http_error_message + + @property + def error_code(self): + return self._extended_operation.http_error_status_code + + response = _CustomOperation.make(get_operation, cancel_operation, response) + + # Done; return the response. + return response + + def get(self, + request: Optional[Union[compute.GetRegionBackendServiceRequest, dict]] = None, + *, + project: Optional[str] = None, + region: Optional[str] = None, + backend_service: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.BackendService: + r"""Returns the specified regional BackendService + resource. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_get(): + # Create a client + client = compute_v1.RegionBackendServicesClient() + + # Initialize request argument(s) + request = compute_v1.GetRegionBackendServiceRequest( + backend_service="backend_service_value", + project="project_value", + region="region_value", + ) + + # Make the request + response = client.get(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.GetRegionBackendServiceRequest, dict]): + The request object. A request message for + RegionBackendServices.Get. See the + method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + region (str): + Name of the region scoping this + request. + + This corresponds to the ``region`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + backend_service (str): + Name of the BackendService resource + to return. + + This corresponds to the ``backend_service`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.compute_v1.types.BackendService: + Represents a Backend Service resource. A backend service + defines how Google Cloud load balancers distribute + traffic. The backend service configuration contains a + set of values, such as the protocol used to connect to + backends, various distribution and session settings, + health checks, and timeouts. These settings provide + fine-grained control over how your load balancer + behaves. Most of the settings have default values that + allow for easy configuration if you need to get started + quickly. Backend services in Google Compute Engine can + be either regionally or globally scoped. \* + [Global](https://cloud.google.com/compute/docs/reference/rest/v1/backendServices) + \* + [Regional](https://cloud.google.com/compute/docs/reference/rest/v1/regionBackendServices) + For more information, see Backend Services. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, region, backend_service]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.GetRegionBackendServiceRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.GetRegionBackendServiceRequest): + request = compute.GetRegionBackendServiceRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if region is not None: + request.region = region + if backend_service is not None: + request.backend_service = backend_service + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("region", request.region), + ("backend_service", request.backend_service), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get_health(self, + request: Optional[Union[compute.GetHealthRegionBackendServiceRequest, dict]] = None, + *, + project: Optional[str] = None, + region: Optional[str] = None, + backend_service: Optional[str] = None, + resource_group_reference_resource: Optional[compute.ResourceGroupReference] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.BackendServiceGroupHealth: + r"""Gets the most recent health check results for this + regional BackendService. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_get_health(): + # Create a client + client = compute_v1.RegionBackendServicesClient() + + # Initialize request argument(s) + request = compute_v1.GetHealthRegionBackendServiceRequest( + backend_service="backend_service_value", + project="project_value", + region="region_value", + ) + + # Make the request + response = client.get_health(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.GetHealthRegionBackendServiceRequest, dict]): + The request object. A request message for + RegionBackendServices.GetHealth. See the + method description for details. + project (str): + + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + region (str): + Name of the region scoping this + request. + + This corresponds to the ``region`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + backend_service (str): + Name of the BackendService resource + for which to get health. + + This corresponds to the ``backend_service`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + resource_group_reference_resource (google.cloud.compute_v1.types.ResourceGroupReference): + The body resource for this request + This corresponds to the ``resource_group_reference_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.compute_v1.types.BackendServiceGroupHealth: + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, region, backend_service, resource_group_reference_resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.GetHealthRegionBackendServiceRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.GetHealthRegionBackendServiceRequest): + request = compute.GetHealthRegionBackendServiceRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if region is not None: + request.region = region + if backend_service is not None: + request.backend_service = backend_service + if resource_group_reference_resource is not None: + request.resource_group_reference_resource = resource_group_reference_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_health] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("region", request.region), + ("backend_service", request.backend_service), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get_iam_policy(self, + request: Optional[Union[compute.GetIamPolicyRegionBackendServiceRequest, dict]] = None, + *, + project: Optional[str] = None, + region: Optional[str] = None, + resource: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.Policy: + r"""Gets the access control policy for a resource. May be + empty if no such policy or resource exists. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_get_iam_policy(): + # Create a client + client = compute_v1.RegionBackendServicesClient() + + # Initialize request argument(s) + request = compute_v1.GetIamPolicyRegionBackendServiceRequest( + project="project_value", + region="region_value", + resource="resource_value", + ) + + # Make the request + response = client.get_iam_policy(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.GetIamPolicyRegionBackendServiceRequest, dict]): + The request object. A request message for + RegionBackendServices.GetIamPolicy. See + the method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + region (str): + The name of the region for this + request. + + This corresponds to the ``region`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + resource (str): + Name or id of the resource for this + request. + + This corresponds to the ``resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.compute_v1.types.Policy: + An Identity and Access Management (IAM) policy, which + specifies access controls for Google Cloud resources. A + Policy is a collection of bindings. A binding binds one + or more members, or principals, to a single role. + Principals can be user accounts, service accounts, + Google groups, and domains (such as G Suite). A role is + a named list of permissions; each role can be an IAM + predefined role or a user-created custom role. For some + types of Google Cloud resources, a binding can also + specify a condition, which is a logical expression that + allows access to a resource only if the expression + evaluates to true. A condition can add constraints based + on attributes of the request, the resource, or both. To + learn which resources support conditions in their IAM + policies, see the [IAM + documentation](\ https://cloud.google.com/iam/help/conditions/resource-policies). + **JSON example:** { "bindings": [ { "role": + "roles/resourcemanager.organizationAdmin", "members": [ + "user:mike@example.com", "group:admins@example.com", + "domain:google.com", + "serviceAccount:my-project-id@appspot.gserviceaccount.com" + ] }, { "role": + "roles/resourcemanager.organizationViewer", "members": [ + "user:eve@example.com" ], "condition": { "title": + "expirable access", "description": "Does not grant + access after Sep 2020", "expression": "request.time < + timestamp('2020-10-01T00:00:00.000Z')", } } ], "etag": + "BwWWja0YfJA=", "version": 3 } **YAML example:** + bindings: - members: - user:\ mike@example.com - + group:\ admins@example.com - domain:google.com - + serviceAccount:\ my-project-id@appspot.gserviceaccount.com + role: roles/resourcemanager.organizationAdmin - members: + - user:\ eve@example.com role: + roles/resourcemanager.organizationViewer condition: + title: expirable access description: Does not grant + access after Sep 2020 expression: request.time < + timestamp('2020-10-01T00:00:00.000Z') etag: BwWWja0YfJA= + version: 3 For a description of IAM and its features, + see the [IAM + documentation](\ https://cloud.google.com/iam/docs/). + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, region, resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.GetIamPolicyRegionBackendServiceRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.GetIamPolicyRegionBackendServiceRequest): + request = compute.GetIamPolicyRegionBackendServiceRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if region is not None: + request.region = region + if resource is not None: + request.resource = resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_iam_policy] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("region", request.region), + ("resource", request.resource), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def insert_unary(self, + request: Optional[Union[compute.InsertRegionBackendServiceRequest, dict]] = None, + *, + project: Optional[str] = None, + region: Optional[str] = None, + backend_service_resource: Optional[compute.BackendService] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.Operation: + r"""Creates a regional BackendService resource in the + specified project using the data included in the + request. For more information, see Backend services + overview. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_insert(): + # Create a client + client = compute_v1.RegionBackendServicesClient() + + # Initialize request argument(s) + request = compute_v1.InsertRegionBackendServiceRequest( + project="project_value", + region="region_value", + ) + + # Make the request + response = client.insert(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.InsertRegionBackendServiceRequest, dict]): + The request object. A request message for + RegionBackendServices.Insert. See the + method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + region (str): + Name of the region scoping this + request. + + This corresponds to the ``region`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + backend_service_resource (google.cloud.compute_v1.types.BackendService): + The body resource for this request + This corresponds to the ``backend_service_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, region, backend_service_resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.InsertRegionBackendServiceRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.InsertRegionBackendServiceRequest): + request = compute.InsertRegionBackendServiceRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if region is not None: + request.region = region + if backend_service_resource is not None: + request.backend_service_resource = backend_service_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.insert] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("region", request.region), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def insert(self, + request: Optional[Union[compute.InsertRegionBackendServiceRequest, dict]] = None, + *, + project: Optional[str] = None, + region: Optional[str] = None, + backend_service_resource: Optional[compute.BackendService] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> extended_operation.ExtendedOperation: + r"""Creates a regional BackendService resource in the + specified project using the data included in the + request. For more information, see Backend services + overview. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_insert(): + # Create a client + client = compute_v1.RegionBackendServicesClient() + + # Initialize request argument(s) + request = compute_v1.InsertRegionBackendServiceRequest( + project="project_value", + region="region_value", + ) + + # Make the request + response = client.insert(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.InsertRegionBackendServiceRequest, dict]): + The request object. A request message for + RegionBackendServices.Insert. See the + method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + region (str): + Name of the region scoping this + request. + + This corresponds to the ``region`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + backend_service_resource (google.cloud.compute_v1.types.BackendService): + The body resource for this request + This corresponds to the ``backend_service_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, region, backend_service_resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.InsertRegionBackendServiceRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.InsertRegionBackendServiceRequest): + request = compute.InsertRegionBackendServiceRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if region is not None: + request.region = region + if backend_service_resource is not None: + request.backend_service_resource = backend_service_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.insert] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("region", request.region), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + operation_service = self._transport._region_operations_client + operation_request = compute.GetRegionOperationRequest() + operation_request.project = request.project + operation_request.region = request.region + operation_request.operation = response.name + + get_operation = functools.partial(operation_service.get, operation_request) + # Cancel is not part of extended operations yet. + cancel_operation = lambda: None + + # Note: this class is an implementation detail to provide a uniform + # set of names for certain fields in the extended operation proto message. + # See google.api_core.extended_operation.ExtendedOperation for details + # on these properties and the expected interface. + class _CustomOperation(extended_operation.ExtendedOperation): + @property + def error_message(self): + return self._extended_operation.http_error_message + + @property + def error_code(self): + return self._extended_operation.http_error_status_code + + response = _CustomOperation.make(get_operation, cancel_operation, response) + + # Done; return the response. + return response + + def list(self, + request: Optional[Union[compute.ListRegionBackendServicesRequest, dict]] = None, + *, + project: Optional[str] = None, + region: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListPager: + r"""Retrieves the list of regional BackendService + resources available to the specified project in the + given region. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_list(): + # Create a client + client = compute_v1.RegionBackendServicesClient() + + # Initialize request argument(s) + request = compute_v1.ListRegionBackendServicesRequest( + project="project_value", + region="region_value", + ) + + # Make the request + page_result = client.list(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.ListRegionBackendServicesRequest, dict]): + The request object. A request message for + RegionBackendServices.List. See the + method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + region (str): + Name of the region scoping this + request. + + This corresponds to the ``region`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.compute_v1.services.region_backend_services.pagers.ListPager: + Contains a list of BackendService + resources. + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, region]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.ListRegionBackendServicesRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.ListRegionBackendServicesRequest): + request = compute.ListRegionBackendServicesRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if region is not None: + request.region = region + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("region", request.region), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + def patch_unary(self, + request: Optional[Union[compute.PatchRegionBackendServiceRequest, dict]] = None, + *, + project: Optional[str] = None, + region: Optional[str] = None, + backend_service: Optional[str] = None, + backend_service_resource: Optional[compute.BackendService] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.Operation: + r"""Updates the specified regional BackendService + resource with the data included in the request. For more + information, see Understanding backend services This + method supports PATCH semantics and uses the JSON merge + patch format and processing rules. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_patch(): + # Create a client + client = compute_v1.RegionBackendServicesClient() + + # Initialize request argument(s) + request = compute_v1.PatchRegionBackendServiceRequest( + backend_service="backend_service_value", + project="project_value", + region="region_value", + ) + + # Make the request + response = client.patch(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.PatchRegionBackendServiceRequest, dict]): + The request object. A request message for + RegionBackendServices.Patch. See the + method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + region (str): + Name of the region scoping this + request. + + This corresponds to the ``region`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + backend_service (str): + Name of the BackendService resource + to patch. + + This corresponds to the ``backend_service`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + backend_service_resource (google.cloud.compute_v1.types.BackendService): + The body resource for this request + This corresponds to the ``backend_service_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, region, backend_service, backend_service_resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.PatchRegionBackendServiceRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.PatchRegionBackendServiceRequest): + request = compute.PatchRegionBackendServiceRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if region is not None: + request.region = region + if backend_service is not None: + request.backend_service = backend_service + if backend_service_resource is not None: + request.backend_service_resource = backend_service_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.patch] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("region", request.region), + ("backend_service", request.backend_service), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def patch(self, + request: Optional[Union[compute.PatchRegionBackendServiceRequest, dict]] = None, + *, + project: Optional[str] = None, + region: Optional[str] = None, + backend_service: Optional[str] = None, + backend_service_resource: Optional[compute.BackendService] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> extended_operation.ExtendedOperation: + r"""Updates the specified regional BackendService + resource with the data included in the request. For more + information, see Understanding backend services This + method supports PATCH semantics and uses the JSON merge + patch format and processing rules. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_patch(): + # Create a client + client = compute_v1.RegionBackendServicesClient() + + # Initialize request argument(s) + request = compute_v1.PatchRegionBackendServiceRequest( + backend_service="backend_service_value", + project="project_value", + region="region_value", + ) + + # Make the request + response = client.patch(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.PatchRegionBackendServiceRequest, dict]): + The request object. A request message for + RegionBackendServices.Patch. See the + method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + region (str): + Name of the region scoping this + request. + + This corresponds to the ``region`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + backend_service (str): + Name of the BackendService resource + to patch. + + This corresponds to the ``backend_service`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + backend_service_resource (google.cloud.compute_v1.types.BackendService): + The body resource for this request + This corresponds to the ``backend_service_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, region, backend_service, backend_service_resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.PatchRegionBackendServiceRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.PatchRegionBackendServiceRequest): + request = compute.PatchRegionBackendServiceRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if region is not None: + request.region = region + if backend_service is not None: + request.backend_service = backend_service + if backend_service_resource is not None: + request.backend_service_resource = backend_service_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.patch] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("region", request.region), + ("backend_service", request.backend_service), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + operation_service = self._transport._region_operations_client + operation_request = compute.GetRegionOperationRequest() + operation_request.project = request.project + operation_request.region = request.region + operation_request.operation = response.name + + get_operation = functools.partial(operation_service.get, operation_request) + # Cancel is not part of extended operations yet. + cancel_operation = lambda: None + + # Note: this class is an implementation detail to provide a uniform + # set of names for certain fields in the extended operation proto message. + # See google.api_core.extended_operation.ExtendedOperation for details + # on these properties and the expected interface. + class _CustomOperation(extended_operation.ExtendedOperation): + @property + def error_message(self): + return self._extended_operation.http_error_message + + @property + def error_code(self): + return self._extended_operation.http_error_status_code + + response = _CustomOperation.make(get_operation, cancel_operation, response) + + # Done; return the response. + return response + + def set_iam_policy(self, + request: Optional[Union[compute.SetIamPolicyRegionBackendServiceRequest, dict]] = None, + *, + project: Optional[str] = None, + region: Optional[str] = None, + resource: Optional[str] = None, + region_set_policy_request_resource: Optional[compute.RegionSetPolicyRequest] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.Policy: + r"""Sets the access control policy on the specified + resource. Replaces any existing policy. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_set_iam_policy(): + # Create a client + client = compute_v1.RegionBackendServicesClient() + + # Initialize request argument(s) + request = compute_v1.SetIamPolicyRegionBackendServiceRequest( + project="project_value", + region="region_value", + resource="resource_value", + ) + + # Make the request + response = client.set_iam_policy(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.SetIamPolicyRegionBackendServiceRequest, dict]): + The request object. A request message for + RegionBackendServices.SetIamPolicy. See + the method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + region (str): + The name of the region for this + request. + + This corresponds to the ``region`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + resource (str): + Name or id of the resource for this + request. + + This corresponds to the ``resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + region_set_policy_request_resource (google.cloud.compute_v1.types.RegionSetPolicyRequest): + The body resource for this request + This corresponds to the ``region_set_policy_request_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.compute_v1.types.Policy: + An Identity and Access Management (IAM) policy, which + specifies access controls for Google Cloud resources. A + Policy is a collection of bindings. A binding binds one + or more members, or principals, to a single role. + Principals can be user accounts, service accounts, + Google groups, and domains (such as G Suite). A role is + a named list of permissions; each role can be an IAM + predefined role or a user-created custom role. For some + types of Google Cloud resources, a binding can also + specify a condition, which is a logical expression that + allows access to a resource only if the expression + evaluates to true. A condition can add constraints based + on attributes of the request, the resource, or both. To + learn which resources support conditions in their IAM + policies, see the [IAM + documentation](\ https://cloud.google.com/iam/help/conditions/resource-policies). + **JSON example:** { "bindings": [ { "role": + "roles/resourcemanager.organizationAdmin", "members": [ + "user:mike@example.com", "group:admins@example.com", + "domain:google.com", + "serviceAccount:my-project-id@appspot.gserviceaccount.com" + ] }, { "role": + "roles/resourcemanager.organizationViewer", "members": [ + "user:eve@example.com" ], "condition": { "title": + "expirable access", "description": "Does not grant + access after Sep 2020", "expression": "request.time < + timestamp('2020-10-01T00:00:00.000Z')", } } ], "etag": + "BwWWja0YfJA=", "version": 3 } **YAML example:** + bindings: - members: - user:\ mike@example.com - + group:\ admins@example.com - domain:google.com - + serviceAccount:\ my-project-id@appspot.gserviceaccount.com + role: roles/resourcemanager.organizationAdmin - members: + - user:\ eve@example.com role: + roles/resourcemanager.organizationViewer condition: + title: expirable access description: Does not grant + access after Sep 2020 expression: request.time < + timestamp('2020-10-01T00:00:00.000Z') etag: BwWWja0YfJA= + version: 3 For a description of IAM and its features, + see the [IAM + documentation](\ https://cloud.google.com/iam/docs/). + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, region, resource, region_set_policy_request_resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.SetIamPolicyRegionBackendServiceRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.SetIamPolicyRegionBackendServiceRequest): + request = compute.SetIamPolicyRegionBackendServiceRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if region is not None: + request.region = region + if resource is not None: + request.resource = resource + if region_set_policy_request_resource is not None: + request.region_set_policy_request_resource = region_set_policy_request_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.set_iam_policy] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("region", request.region), + ("resource", request.resource), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def update_unary(self, + request: Optional[Union[compute.UpdateRegionBackendServiceRequest, dict]] = None, + *, + project: Optional[str] = None, + region: Optional[str] = None, + backend_service: Optional[str] = None, + backend_service_resource: Optional[compute.BackendService] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.Operation: + r"""Updates the specified regional BackendService + resource with the data included in the request. For more + information, see Backend services overview . + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_update(): + # Create a client + client = compute_v1.RegionBackendServicesClient() + + # Initialize request argument(s) + request = compute_v1.UpdateRegionBackendServiceRequest( + backend_service="backend_service_value", + project="project_value", + region="region_value", + ) + + # Make the request + response = client.update(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.UpdateRegionBackendServiceRequest, dict]): + The request object. A request message for + RegionBackendServices.Update. See the + method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + region (str): + Name of the region scoping this + request. + + This corresponds to the ``region`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + backend_service (str): + Name of the BackendService resource + to update. + + This corresponds to the ``backend_service`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + backend_service_resource (google.cloud.compute_v1.types.BackendService): + The body resource for this request + This corresponds to the ``backend_service_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, region, backend_service, backend_service_resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.UpdateRegionBackendServiceRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.UpdateRegionBackendServiceRequest): + request = compute.UpdateRegionBackendServiceRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if region is not None: + request.region = region + if backend_service is not None: + request.backend_service = backend_service + if backend_service_resource is not None: + request.backend_service_resource = backend_service_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.update] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("region", request.region), + ("backend_service", request.backend_service), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def update(self, + request: Optional[Union[compute.UpdateRegionBackendServiceRequest, dict]] = None, + *, + project: Optional[str] = None, + region: Optional[str] = None, + backend_service: Optional[str] = None, + backend_service_resource: Optional[compute.BackendService] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> extended_operation.ExtendedOperation: + r"""Updates the specified regional BackendService + resource with the data included in the request. For more + information, see Backend services overview . + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_update(): + # Create a client + client = compute_v1.RegionBackendServicesClient() + + # Initialize request argument(s) + request = compute_v1.UpdateRegionBackendServiceRequest( + backend_service="backend_service_value", + project="project_value", + region="region_value", + ) + + # Make the request + response = client.update(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.UpdateRegionBackendServiceRequest, dict]): + The request object. A request message for + RegionBackendServices.Update. See the + method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + region (str): + Name of the region scoping this + request. + + This corresponds to the ``region`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + backend_service (str): + Name of the BackendService resource + to update. + + This corresponds to the ``backend_service`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + backend_service_resource (google.cloud.compute_v1.types.BackendService): + The body resource for this request + This corresponds to the ``backend_service_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, region, backend_service, backend_service_resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.UpdateRegionBackendServiceRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.UpdateRegionBackendServiceRequest): + request = compute.UpdateRegionBackendServiceRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if region is not None: + request.region = region + if backend_service is not None: + request.backend_service = backend_service + if backend_service_resource is not None: + request.backend_service_resource = backend_service_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.update] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("region", request.region), + ("backend_service", request.backend_service), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + operation_service = self._transport._region_operations_client + operation_request = compute.GetRegionOperationRequest() + operation_request.project = request.project + operation_request.region = request.region + operation_request.operation = response.name + + get_operation = functools.partial(operation_service.get, operation_request) + # Cancel is not part of extended operations yet. + cancel_operation = lambda: None + + # Note: this class is an implementation detail to provide a uniform + # set of names for certain fields in the extended operation proto message. + # See google.api_core.extended_operation.ExtendedOperation for details + # on these properties and the expected interface. + class _CustomOperation(extended_operation.ExtendedOperation): + @property + def error_message(self): + return self._extended_operation.http_error_message + + @property + def error_code(self): + return self._extended_operation.http_error_status_code + + response = _CustomOperation.make(get_operation, cancel_operation, response) + + # Done; return the response. + return response + + def __enter__(self) -> "RegionBackendServicesClient": + return self + + def __exit__(self, type, value, traceback): + """Releases underlying transport's resources. + + .. warning:: + ONLY use as a context manager if the transport is NOT shared + with other clients! Exiting the with block will CLOSE the transport + and may cause errors in other clients! + """ + self.transport.close() + + + + + + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) + + +__all__ = ( + "RegionBackendServicesClient", +) diff --git a/owl-bot-staging/v1/google/cloud/compute_v1/services/region_backend_services/pagers.py b/owl-bot-staging/v1/google/cloud/compute_v1/services/region_backend_services/pagers.py new file mode 100644 index 000000000..fce80805a --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/compute_v1/services/region_backend_services/pagers.py @@ -0,0 +1,77 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import Any, AsyncIterator, Awaitable, Callable, Sequence, Tuple, Optional, Iterator + +from google.cloud.compute_v1.types import compute + + +class ListPager: + """A pager for iterating through ``list`` requests. + + This class thinly wraps an initial + :class:`google.cloud.compute_v1.types.BackendServiceList` object, and + provides an ``__iter__`` method to iterate through its + ``items`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``List`` requests and continue to iterate + through the ``items`` field on the + corresponding responses. + + All the usual :class:`google.cloud.compute_v1.types.BackendServiceList` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., compute.BackendServiceList], + request: compute.ListRegionBackendServicesRequest, + response: compute.BackendServiceList, + *, + metadata: Sequence[Tuple[str, str]] = ()): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.compute_v1.types.ListRegionBackendServicesRequest): + The initial request object. + response (google.cloud.compute_v1.types.BackendServiceList): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = compute.ListRegionBackendServicesRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[compute.BackendServiceList]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[compute.BackendService]: + for page in self.pages: + yield from page.items + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) diff --git a/owl-bot-staging/v1/google/cloud/compute_v1/services/region_backend_services/transports/__init__.py b/owl-bot-staging/v1/google/cloud/compute_v1/services/region_backend_services/transports/__init__.py new file mode 100644 index 000000000..6dad8661b --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/compute_v1/services/region_backend_services/transports/__init__.py @@ -0,0 +1,32 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +from typing import Dict, Type + +from .base import RegionBackendServicesTransport +from .rest import RegionBackendServicesRestTransport +from .rest import RegionBackendServicesRestInterceptor + + +# Compile a registry of transports. +_transport_registry = OrderedDict() # type: Dict[str, Type[RegionBackendServicesTransport]] +_transport_registry['rest'] = RegionBackendServicesRestTransport + +__all__ = ( + 'RegionBackendServicesTransport', + 'RegionBackendServicesRestTransport', + 'RegionBackendServicesRestInterceptor', +) diff --git a/owl-bot-staging/v1/google/cloud/compute_v1/services/region_backend_services/transports/base.py b/owl-bot-staging/v1/google/cloud/compute_v1/services/region_backend_services/transports/base.py new file mode 100644 index 000000000..1e01e15c8 --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/compute_v1/services/region_backend_services/transports/base.py @@ -0,0 +1,275 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import abc +from typing import Awaitable, Callable, Dict, Optional, Sequence, Union + +from google.cloud.compute_v1 import gapic_version as package_version + +import google.auth # type: ignore +import google.api_core +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.compute_v1.types import compute +from google.cloud.compute_v1.services import region_operations + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) + + +class RegionBackendServicesTransport(abc.ABC): + """Abstract transport class for RegionBackendServices.""" + + AUTH_SCOPES = ( + 'https://www.googleapis.com/auth/compute', + 'https://www.googleapis.com/auth/cloud-platform', + ) + + DEFAULT_HOST: str = 'compute.googleapis.com' + def __init__( + self, *, + host: str = DEFAULT_HOST, + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + **kwargs, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A list of scopes. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + """ + self._extended_operations_services: Dict[str, Any] = {} + + scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} + + # Save the scopes. + self._scopes = scopes + + # If no credentials are provided, then determine the appropriate + # defaults. + if credentials and credentials_file: + raise core_exceptions.DuplicateCredentialArgs("'credentials_file' and 'credentials' are mutually exclusive") + + if credentials_file is not None: + credentials, _ = google.auth.load_credentials_from_file( + credentials_file, + **scopes_kwargs, + quota_project_id=quota_project_id + ) + elif credentials is None: + credentials, _ = google.auth.default(**scopes_kwargs, quota_project_id=quota_project_id) + # Don't apply audience if the credentials file passed from user. + if hasattr(credentials, "with_gdch_audience"): + credentials = credentials.with_gdch_audience(api_audience if api_audience else host) + + # If the credentials are service account credentials, then always try to use self signed JWT. + if always_use_jwt_access and isinstance(credentials, service_account.Credentials) and hasattr(service_account.Credentials, "with_always_use_jwt_access"): + credentials = credentials.with_always_use_jwt_access(True) + + # Save the credentials. + self._credentials = credentials + + # Save the hostname. Default to port 443 (HTTPS) if none is specified. + if ':' not in host: + host += ':443' + self._host = host + + def _prep_wrapped_messages(self, client_info): + # Precompute the wrapped methods. + self._wrapped_methods = { + self.delete: gapic_v1.method.wrap_method( + self.delete, + default_timeout=None, + client_info=client_info, + ), + self.get: gapic_v1.method.wrap_method( + self.get, + default_timeout=None, + client_info=client_info, + ), + self.get_health: gapic_v1.method.wrap_method( + self.get_health, + default_timeout=None, + client_info=client_info, + ), + self.get_iam_policy: gapic_v1.method.wrap_method( + self.get_iam_policy, + default_timeout=None, + client_info=client_info, + ), + self.insert: gapic_v1.method.wrap_method( + self.insert, + default_timeout=None, + client_info=client_info, + ), + self.list: gapic_v1.method.wrap_method( + self.list, + default_timeout=None, + client_info=client_info, + ), + self.patch: gapic_v1.method.wrap_method( + self.patch, + default_timeout=None, + client_info=client_info, + ), + self.set_iam_policy: gapic_v1.method.wrap_method( + self.set_iam_policy, + default_timeout=None, + client_info=client_info, + ), + self.update: gapic_v1.method.wrap_method( + self.update, + default_timeout=None, + client_info=client_info, + ), + } + + def close(self): + """Closes resources associated with the transport. + + .. warning:: + Only call this method if the transport is NOT shared + with other clients - this may cause errors in other clients! + """ + raise NotImplementedError() + + @property + def delete(self) -> Callable[ + [compute.DeleteRegionBackendServiceRequest], + Union[ + compute.Operation, + Awaitable[compute.Operation] + ]]: + raise NotImplementedError() + + @property + def get(self) -> Callable[ + [compute.GetRegionBackendServiceRequest], + Union[ + compute.BackendService, + Awaitable[compute.BackendService] + ]]: + raise NotImplementedError() + + @property + def get_health(self) -> Callable[ + [compute.GetHealthRegionBackendServiceRequest], + Union[ + compute.BackendServiceGroupHealth, + Awaitable[compute.BackendServiceGroupHealth] + ]]: + raise NotImplementedError() + + @property + def get_iam_policy(self) -> Callable[ + [compute.GetIamPolicyRegionBackendServiceRequest], + Union[ + compute.Policy, + Awaitable[compute.Policy] + ]]: + raise NotImplementedError() + + @property + def insert(self) -> Callable[ + [compute.InsertRegionBackendServiceRequest], + Union[ + compute.Operation, + Awaitable[compute.Operation] + ]]: + raise NotImplementedError() + + @property + def list(self) -> Callable[ + [compute.ListRegionBackendServicesRequest], + Union[ + compute.BackendServiceList, + Awaitable[compute.BackendServiceList] + ]]: + raise NotImplementedError() + + @property + def patch(self) -> Callable[ + [compute.PatchRegionBackendServiceRequest], + Union[ + compute.Operation, + Awaitable[compute.Operation] + ]]: + raise NotImplementedError() + + @property + def set_iam_policy(self) -> Callable[ + [compute.SetIamPolicyRegionBackendServiceRequest], + Union[ + compute.Policy, + Awaitable[compute.Policy] + ]]: + raise NotImplementedError() + + @property + def update(self) -> Callable[ + [compute.UpdateRegionBackendServiceRequest], + Union[ + compute.Operation, + Awaitable[compute.Operation] + ]]: + raise NotImplementedError() + + @property + def kind(self) -> str: + raise NotImplementedError() + + @property + def _region_operations_client(self) -> region_operations.RegionOperationsClient: + ex_op_service = self._extended_operations_services.get("region_operations") + if not ex_op_service: + ex_op_service = region_operations.RegionOperationsClient( + credentials=self._credentials, + transport=self.kind, + ) + self._extended_operations_services["region_operations"] = ex_op_service + + return ex_op_service + + +__all__ = ( + 'RegionBackendServicesTransport', +) diff --git a/owl-bot-staging/v1/google/cloud/compute_v1/services/region_backend_services/transports/rest.py b/owl-bot-staging/v1/google/cloud/compute_v1/services/region_backend_services/transports/rest.py new file mode 100644 index 000000000..bff61d72f --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/compute_v1/services/region_backend_services/transports/rest.py @@ -0,0 +1,1379 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +from google.auth.transport.requests import AuthorizedSession # type: ignore +import json # type: ignore +import grpc # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.api_core import exceptions as core_exceptions +from google.api_core import retry as retries +from google.api_core import rest_helpers +from google.api_core import rest_streaming +from google.api_core import path_template +from google.api_core import gapic_v1 + +from google.protobuf import json_format +from requests import __version__ as requests_version +import dataclasses +import re +from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union +import warnings + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + + +from google.cloud.compute_v1.types import compute + +from .base import RegionBackendServicesTransport, DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, + grpc_version=None, + rest_version=requests_version, +) + + +class RegionBackendServicesRestInterceptor: + """Interceptor for RegionBackendServices. + + Interceptors are used to manipulate requests, request metadata, and responses + in arbitrary ways. + Example use cases include: + * Logging + * Verifying requests according to service or custom semantics + * Stripping extraneous information from responses + + These use cases and more can be enabled by injecting an + instance of a custom subclass when constructing the RegionBackendServicesRestTransport. + + .. code-block:: python + class MyCustomRegionBackendServicesInterceptor(RegionBackendServicesRestInterceptor): + def pre_delete(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_delete(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_get(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_get_health(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_health(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_get_iam_policy(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_iam_policy(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_insert(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_insert(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_patch(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_patch(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_set_iam_policy(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_set_iam_policy(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_update(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_update(self, response): + logging.log(f"Received response: {response}") + return response + + transport = RegionBackendServicesRestTransport(interceptor=MyCustomRegionBackendServicesInterceptor()) + client = RegionBackendServicesClient(transport=transport) + + + """ + def pre_delete(self, request: compute.DeleteRegionBackendServiceRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.DeleteRegionBackendServiceRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for delete + + Override in a subclass to manipulate the request or metadata + before they are sent to the RegionBackendServices server. + """ + return request, metadata + + def post_delete(self, response: compute.Operation) -> compute.Operation: + """Post-rpc interceptor for delete + + Override in a subclass to manipulate the response + after it is returned by the RegionBackendServices server but before + it is returned to user code. + """ + return response + def pre_get(self, request: compute.GetRegionBackendServiceRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.GetRegionBackendServiceRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get + + Override in a subclass to manipulate the request or metadata + before they are sent to the RegionBackendServices server. + """ + return request, metadata + + def post_get(self, response: compute.BackendService) -> compute.BackendService: + """Post-rpc interceptor for get + + Override in a subclass to manipulate the response + after it is returned by the RegionBackendServices server but before + it is returned to user code. + """ + return response + def pre_get_health(self, request: compute.GetHealthRegionBackendServiceRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.GetHealthRegionBackendServiceRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_health + + Override in a subclass to manipulate the request or metadata + before they are sent to the RegionBackendServices server. + """ + return request, metadata + + def post_get_health(self, response: compute.BackendServiceGroupHealth) -> compute.BackendServiceGroupHealth: + """Post-rpc interceptor for get_health + + Override in a subclass to manipulate the response + after it is returned by the RegionBackendServices server but before + it is returned to user code. + """ + return response + def pre_get_iam_policy(self, request: compute.GetIamPolicyRegionBackendServiceRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.GetIamPolicyRegionBackendServiceRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_iam_policy + + Override in a subclass to manipulate the request or metadata + before they are sent to the RegionBackendServices server. + """ + return request, metadata + + def post_get_iam_policy(self, response: compute.Policy) -> compute.Policy: + """Post-rpc interceptor for get_iam_policy + + Override in a subclass to manipulate the response + after it is returned by the RegionBackendServices server but before + it is returned to user code. + """ + return response + def pre_insert(self, request: compute.InsertRegionBackendServiceRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.InsertRegionBackendServiceRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for insert + + Override in a subclass to manipulate the request or metadata + before they are sent to the RegionBackendServices server. + """ + return request, metadata + + def post_insert(self, response: compute.Operation) -> compute.Operation: + """Post-rpc interceptor for insert + + Override in a subclass to manipulate the response + after it is returned by the RegionBackendServices server but before + it is returned to user code. + """ + return response + def pre_list(self, request: compute.ListRegionBackendServicesRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.ListRegionBackendServicesRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list + + Override in a subclass to manipulate the request or metadata + before they are sent to the RegionBackendServices server. + """ + return request, metadata + + def post_list(self, response: compute.BackendServiceList) -> compute.BackendServiceList: + """Post-rpc interceptor for list + + Override in a subclass to manipulate the response + after it is returned by the RegionBackendServices server but before + it is returned to user code. + """ + return response + def pre_patch(self, request: compute.PatchRegionBackendServiceRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.PatchRegionBackendServiceRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for patch + + Override in a subclass to manipulate the request or metadata + before they are sent to the RegionBackendServices server. + """ + return request, metadata + + def post_patch(self, response: compute.Operation) -> compute.Operation: + """Post-rpc interceptor for patch + + Override in a subclass to manipulate the response + after it is returned by the RegionBackendServices server but before + it is returned to user code. + """ + return response + def pre_set_iam_policy(self, request: compute.SetIamPolicyRegionBackendServiceRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.SetIamPolicyRegionBackendServiceRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for set_iam_policy + + Override in a subclass to manipulate the request or metadata + before they are sent to the RegionBackendServices server. + """ + return request, metadata + + def post_set_iam_policy(self, response: compute.Policy) -> compute.Policy: + """Post-rpc interceptor for set_iam_policy + + Override in a subclass to manipulate the response + after it is returned by the RegionBackendServices server but before + it is returned to user code. + """ + return response + def pre_update(self, request: compute.UpdateRegionBackendServiceRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.UpdateRegionBackendServiceRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for update + + Override in a subclass to manipulate the request or metadata + before they are sent to the RegionBackendServices server. + """ + return request, metadata + + def post_update(self, response: compute.Operation) -> compute.Operation: + """Post-rpc interceptor for update + + Override in a subclass to manipulate the response + after it is returned by the RegionBackendServices server but before + it is returned to user code. + """ + return response + + +@dataclasses.dataclass +class RegionBackendServicesRestStub: + _session: AuthorizedSession + _host: str + _interceptor: RegionBackendServicesRestInterceptor + + +class RegionBackendServicesRestTransport(RegionBackendServicesTransport): + """REST backend transport for RegionBackendServices. + + The RegionBackendServices API. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends JSON representations of protocol buffers over HTTP/1.1 + + NOTE: This REST transport functionality is currently in a beta + state (preview). We welcome your feedback via an issue in this + library's source repository. Thank you! + """ + + def __init__(self, *, + host: str = 'compute.googleapis.com', + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + client_cert_source_for_mtls: Optional[Callable[[ + ], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + url_scheme: str = 'https', + interceptor: Optional[RegionBackendServicesRestInterceptor] = None, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + NOTE: This REST transport functionality is currently in a beta + state (preview). We welcome your feedback via a GitHub issue in + this library's repository. Thank you! + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + client_cert_source_for_mtls (Callable[[], Tuple[bytes, bytes]]): Client + certificate to configure mutual TLS HTTP channel. It is ignored + if ``channel`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you are developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + url_scheme: the protocol scheme for the API endpoint. Normally + "https", but for testing or local servers, + "http" can be specified. + """ + # Run the base constructor + # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. + # TODO: When custom host (api_endpoint) is set, `scopes` must *also* be set on the + # credentials object + maybe_url_match = re.match("^(?Phttp(?:s)?://)?(?P.*)$", host) + if maybe_url_match is None: + raise ValueError(f"Unexpected hostname structure: {host}") # pragma: NO COVER + + url_match_items = maybe_url_match.groupdict() + + host = f"{url_scheme}://{host}" if not url_match_items["scheme"] else host + + super().__init__( + host=host, + credentials=credentials, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience + ) + self._session = AuthorizedSession( + self._credentials, default_host=self.DEFAULT_HOST) + if client_cert_source_for_mtls: + self._session.configure_mtls_channel(client_cert_source_for_mtls) + self._interceptor = interceptor or RegionBackendServicesRestInterceptor() + self._prep_wrapped_messages(client_info) + + class _Delete(RegionBackendServicesRestStub): + def __hash__(self): + return hash("Delete") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.DeleteRegionBackendServiceRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.Operation: + r"""Call the delete method over HTTP. + + Args: + request (~.compute.DeleteRegionBackendServiceRequest): + The request object. A request message for + RegionBackendServices.Delete. See the + method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.Operation: + Represents an Operation resource. Google Compute Engine + has three Operation resources: \* + `Global `__ + \* + `Regional `__ + \* + `Zonal `__ + You can use an operation resource to manage asynchronous + API requests. For more information, read Handling API + responses. Operations can be global, regional or zonal. + - For global operations, use the ``globalOperations`` + resource. - For regional operations, use the + ``regionOperations`` resource. - For zonal operations, + use the ``zonalOperations`` resource. For more + information, read Global, Regional, and Zonal Resources. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'delete', + 'uri': '/compute/v1/projects/{project}/regions/{region}/backendServices/{backend_service}', + }, + ] + request, metadata = self._interceptor.pre_delete(request, metadata) + pb_request = compute.DeleteRegionBackendServiceRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.Operation() + pb_resp = compute.Operation.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_delete(resp) + return resp + + class _Get(RegionBackendServicesRestStub): + def __hash__(self): + return hash("Get") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.GetRegionBackendServiceRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.BackendService: + r"""Call the get method over HTTP. + + Args: + request (~.compute.GetRegionBackendServiceRequest): + The request object. A request message for + RegionBackendServices.Get. See the + method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.BackendService: + Represents a Backend Service resource. A backend service + defines how Google Cloud load balancers distribute + traffic. The backend service configuration contains a + set of values, such as the protocol used to connect to + backends, various distribution and session settings, + health checks, and timeouts. These settings provide + fine-grained control over how your load balancer + behaves. Most of the settings have default values that + allow for easy configuration if you need to get started + quickly. Backend services in Google Compute Engine can + be either regionally or globally scoped. \* + `Global `__ + \* + `Regional `__ + For more information, see Backend Services. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/compute/v1/projects/{project}/regions/{region}/backendServices/{backend_service}', + }, + ] + request, metadata = self._interceptor.pre_get(request, metadata) + pb_request = compute.GetRegionBackendServiceRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.BackendService() + pb_resp = compute.BackendService.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get(resp) + return resp + + class _GetHealth(RegionBackendServicesRestStub): + def __hash__(self): + return hash("GetHealth") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.GetHealthRegionBackendServiceRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.BackendServiceGroupHealth: + r"""Call the get health method over HTTP. + + Args: + request (~.compute.GetHealthRegionBackendServiceRequest): + The request object. A request message for + RegionBackendServices.GetHealth. See the + method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.BackendServiceGroupHealth: + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/compute/v1/projects/{project}/regions/{region}/backendServices/{backend_service}/getHealth', + 'body': 'resource_group_reference_resource', + }, + ] + request, metadata = self._interceptor.pre_get_health(request, metadata) + pb_request = compute.GetHealthRegionBackendServiceRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + including_default_value_fields=False, + use_integers_for_enums=False + ) + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.BackendServiceGroupHealth() + pb_resp = compute.BackendServiceGroupHealth.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get_health(resp) + return resp + + class _GetIamPolicy(RegionBackendServicesRestStub): + def __hash__(self): + return hash("GetIamPolicy") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.GetIamPolicyRegionBackendServiceRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.Policy: + r"""Call the get iam policy method over HTTP. + + Args: + request (~.compute.GetIamPolicyRegionBackendServiceRequest): + The request object. A request message for + RegionBackendServices.GetIamPolicy. See + the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.Policy: + An Identity and Access Management (IAM) policy, which + specifies access controls for Google Cloud resources. A + ``Policy`` is a collection of ``bindings``. A + ``binding`` binds one or more ``members``, or + principals, to a single ``role``. Principals can be user + accounts, service accounts, Google groups, and domains + (such as G Suite). A ``role`` is a named list of + permissions; each ``role`` can be an IAM predefined role + or a user-created custom role. For some types of Google + Cloud resources, a ``binding`` can also specify a + ``condition``, which is a logical expression that allows + access to a resource only if the expression evaluates to + ``true``. A condition can add constraints based on + attributes of the request, the resource, or both. To + learn which resources support conditions in their IAM + policies, see the `IAM + documentation `__. + **JSON example:** { "bindings": [ { "role": + "roles/resourcemanager.organizationAdmin", "members": [ + "user:mike@example.com", "group:admins@example.com", + "domain:google.com", + "serviceAccount:my-project-id@appspot.gserviceaccount.com" + ] }, { "role": + "roles/resourcemanager.organizationViewer", "members": [ + "user:eve@example.com" ], "condition": { "title": + "expirable access", "description": "Does not grant + access after Sep 2020", "expression": "request.time < + timestamp('2020-10-01T00:00:00.000Z')", } } ], "etag": + "BwWWja0YfJA=", "version": 3 } **YAML example:** + bindings: - members: - user:mike@example.com - + group:admins@example.com - domain:google.com - + serviceAccount:my-project-id@appspot.gserviceaccount.com + role: roles/resourcemanager.organizationAdmin - members: + - user:eve@example.com role: + roles/resourcemanager.organizationViewer condition: + title: expirable access description: Does not grant + access after Sep 2020 expression: request.time < + timestamp('2020-10-01T00:00:00.000Z') etag: BwWWja0YfJA= + version: 3 For a description of IAM and its features, + see the `IAM + documentation `__. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/compute/v1/projects/{project}/regions/{region}/backendServices/{resource}/getIamPolicy', + }, + ] + request, metadata = self._interceptor.pre_get_iam_policy(request, metadata) + pb_request = compute.GetIamPolicyRegionBackendServiceRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.Policy() + pb_resp = compute.Policy.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get_iam_policy(resp) + return resp + + class _Insert(RegionBackendServicesRestStub): + def __hash__(self): + return hash("Insert") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.InsertRegionBackendServiceRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.Operation: + r"""Call the insert method over HTTP. + + Args: + request (~.compute.InsertRegionBackendServiceRequest): + The request object. A request message for + RegionBackendServices.Insert. See the + method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.Operation: + Represents an Operation resource. Google Compute Engine + has three Operation resources: \* + `Global `__ + \* + `Regional `__ + \* + `Zonal `__ + You can use an operation resource to manage asynchronous + API requests. For more information, read Handling API + responses. Operations can be global, regional or zonal. + - For global operations, use the ``globalOperations`` + resource. - For regional operations, use the + ``regionOperations`` resource. - For zonal operations, + use the ``zonalOperations`` resource. For more + information, read Global, Regional, and Zonal Resources. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/compute/v1/projects/{project}/regions/{region}/backendServices', + 'body': 'backend_service_resource', + }, + ] + request, metadata = self._interceptor.pre_insert(request, metadata) + pb_request = compute.InsertRegionBackendServiceRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + including_default_value_fields=False, + use_integers_for_enums=False + ) + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.Operation() + pb_resp = compute.Operation.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_insert(resp) + return resp + + class _List(RegionBackendServicesRestStub): + def __hash__(self): + return hash("List") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.ListRegionBackendServicesRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.BackendServiceList: + r"""Call the list method over HTTP. + + Args: + request (~.compute.ListRegionBackendServicesRequest): + The request object. A request message for + RegionBackendServices.List. See the + method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.BackendServiceList: + Contains a list of BackendService + resources. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/compute/v1/projects/{project}/regions/{region}/backendServices', + }, + ] + request, metadata = self._interceptor.pre_list(request, metadata) + pb_request = compute.ListRegionBackendServicesRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.BackendServiceList() + pb_resp = compute.BackendServiceList.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_list(resp) + return resp + + class _Patch(RegionBackendServicesRestStub): + def __hash__(self): + return hash("Patch") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.PatchRegionBackendServiceRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.Operation: + r"""Call the patch method over HTTP. + + Args: + request (~.compute.PatchRegionBackendServiceRequest): + The request object. A request message for + RegionBackendServices.Patch. See the + method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.Operation: + Represents an Operation resource. Google Compute Engine + has three Operation resources: \* + `Global `__ + \* + `Regional `__ + \* + `Zonal `__ + You can use an operation resource to manage asynchronous + API requests. For more information, read Handling API + responses. Operations can be global, regional or zonal. + - For global operations, use the ``globalOperations`` + resource. - For regional operations, use the + ``regionOperations`` resource. - For zonal operations, + use the ``zonalOperations`` resource. For more + information, read Global, Regional, and Zonal Resources. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'patch', + 'uri': '/compute/v1/projects/{project}/regions/{region}/backendServices/{backend_service}', + 'body': 'backend_service_resource', + }, + ] + request, metadata = self._interceptor.pre_patch(request, metadata) + pb_request = compute.PatchRegionBackendServiceRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + including_default_value_fields=False, + use_integers_for_enums=False + ) + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.Operation() + pb_resp = compute.Operation.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_patch(resp) + return resp + + class _SetIamPolicy(RegionBackendServicesRestStub): + def __hash__(self): + return hash("SetIamPolicy") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.SetIamPolicyRegionBackendServiceRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.Policy: + r"""Call the set iam policy method over HTTP. + + Args: + request (~.compute.SetIamPolicyRegionBackendServiceRequest): + The request object. A request message for + RegionBackendServices.SetIamPolicy. See + the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.Policy: + An Identity and Access Management (IAM) policy, which + specifies access controls for Google Cloud resources. A + ``Policy`` is a collection of ``bindings``. A + ``binding`` binds one or more ``members``, or + principals, to a single ``role``. Principals can be user + accounts, service accounts, Google groups, and domains + (such as G Suite). A ``role`` is a named list of + permissions; each ``role`` can be an IAM predefined role + or a user-created custom role. For some types of Google + Cloud resources, a ``binding`` can also specify a + ``condition``, which is a logical expression that allows + access to a resource only if the expression evaluates to + ``true``. A condition can add constraints based on + attributes of the request, the resource, or both. To + learn which resources support conditions in their IAM + policies, see the `IAM + documentation `__. + **JSON example:** { "bindings": [ { "role": + "roles/resourcemanager.organizationAdmin", "members": [ + "user:mike@example.com", "group:admins@example.com", + "domain:google.com", + "serviceAccount:my-project-id@appspot.gserviceaccount.com" + ] }, { "role": + "roles/resourcemanager.organizationViewer", "members": [ + "user:eve@example.com" ], "condition": { "title": + "expirable access", "description": "Does not grant + access after Sep 2020", "expression": "request.time < + timestamp('2020-10-01T00:00:00.000Z')", } } ], "etag": + "BwWWja0YfJA=", "version": 3 } **YAML example:** + bindings: - members: - user:mike@example.com - + group:admins@example.com - domain:google.com - + serviceAccount:my-project-id@appspot.gserviceaccount.com + role: roles/resourcemanager.organizationAdmin - members: + - user:eve@example.com role: + roles/resourcemanager.organizationViewer condition: + title: expirable access description: Does not grant + access after Sep 2020 expression: request.time < + timestamp('2020-10-01T00:00:00.000Z') etag: BwWWja0YfJA= + version: 3 For a description of IAM and its features, + see the `IAM + documentation `__. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/compute/v1/projects/{project}/regions/{region}/backendServices/{resource}/setIamPolicy', + 'body': 'region_set_policy_request_resource', + }, + ] + request, metadata = self._interceptor.pre_set_iam_policy(request, metadata) + pb_request = compute.SetIamPolicyRegionBackendServiceRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + including_default_value_fields=False, + use_integers_for_enums=False + ) + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.Policy() + pb_resp = compute.Policy.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_set_iam_policy(resp) + return resp + + class _Update(RegionBackendServicesRestStub): + def __hash__(self): + return hash("Update") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.UpdateRegionBackendServiceRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.Operation: + r"""Call the update method over HTTP. + + Args: + request (~.compute.UpdateRegionBackendServiceRequest): + The request object. A request message for + RegionBackendServices.Update. See the + method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.Operation: + Represents an Operation resource. Google Compute Engine + has three Operation resources: \* + `Global `__ + \* + `Regional `__ + \* + `Zonal `__ + You can use an operation resource to manage asynchronous + API requests. For more information, read Handling API + responses. Operations can be global, regional or zonal. + - For global operations, use the ``globalOperations`` + resource. - For regional operations, use the + ``regionOperations`` resource. - For zonal operations, + use the ``zonalOperations`` resource. For more + information, read Global, Regional, and Zonal Resources. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'put', + 'uri': '/compute/v1/projects/{project}/regions/{region}/backendServices/{backend_service}', + 'body': 'backend_service_resource', + }, + ] + request, metadata = self._interceptor.pre_update(request, metadata) + pb_request = compute.UpdateRegionBackendServiceRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + including_default_value_fields=False, + use_integers_for_enums=False + ) + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.Operation() + pb_resp = compute.Operation.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_update(resp) + return resp + + @property + def delete(self) -> Callable[ + [compute.DeleteRegionBackendServiceRequest], + compute.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._Delete(self._session, self._host, self._interceptor) # type: ignore + + @property + def get(self) -> Callable[ + [compute.GetRegionBackendServiceRequest], + compute.BackendService]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._Get(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_health(self) -> Callable[ + [compute.GetHealthRegionBackendServiceRequest], + compute.BackendServiceGroupHealth]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetHealth(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_iam_policy(self) -> Callable[ + [compute.GetIamPolicyRegionBackendServiceRequest], + compute.Policy]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetIamPolicy(self._session, self._host, self._interceptor) # type: ignore + + @property + def insert(self) -> Callable[ + [compute.InsertRegionBackendServiceRequest], + compute.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._Insert(self._session, self._host, self._interceptor) # type: ignore + + @property + def list(self) -> Callable[ + [compute.ListRegionBackendServicesRequest], + compute.BackendServiceList]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._List(self._session, self._host, self._interceptor) # type: ignore + + @property + def patch(self) -> Callable[ + [compute.PatchRegionBackendServiceRequest], + compute.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._Patch(self._session, self._host, self._interceptor) # type: ignore + + @property + def set_iam_policy(self) -> Callable[ + [compute.SetIamPolicyRegionBackendServiceRequest], + compute.Policy]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._SetIamPolicy(self._session, self._host, self._interceptor) # type: ignore + + @property + def update(self) -> Callable[ + [compute.UpdateRegionBackendServiceRequest], + compute.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._Update(self._session, self._host, self._interceptor) # type: ignore + + @property + def kind(self) -> str: + return "rest" + + def close(self): + self._session.close() + + +__all__=( + 'RegionBackendServicesRestTransport', +) diff --git a/owl-bot-staging/v1/google/cloud/compute_v1/services/region_commitments/__init__.py b/owl-bot-staging/v1/google/cloud/compute_v1/services/region_commitments/__init__.py new file mode 100644 index 000000000..922ce6290 --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/compute_v1/services/region_commitments/__init__.py @@ -0,0 +1,20 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from .client import RegionCommitmentsClient + +__all__ = ( + 'RegionCommitmentsClient', +) diff --git a/owl-bot-staging/v1/google/cloud/compute_v1/services/region_commitments/client.py b/owl-bot-staging/v1/google/cloud/compute_v1/services/region_commitments/client.py new file mode 100644 index 000000000..df4df06ac --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/compute_v1/services/region_commitments/client.py @@ -0,0 +1,1310 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import functools +import os +import re +from typing import Dict, Mapping, MutableMapping, MutableSequence, Optional, Sequence, Tuple, Type, Union, cast + +from google.cloud.compute_v1 import gapic_version as package_version + +from google.api_core import client_options as client_options_lib +from google.api_core import exceptions as core_exceptions +from google.api_core import extended_operation +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport import mtls # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth.exceptions import MutualTLSChannelError # type: ignore +from google.oauth2 import service_account # type: ignore + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + +from google.api_core import extended_operation # type: ignore +from google.cloud.compute_v1.services.region_commitments import pagers +from google.cloud.compute_v1.types import compute +from .transports.base import RegionCommitmentsTransport, DEFAULT_CLIENT_INFO +from .transports.rest import RegionCommitmentsRestTransport + + +class RegionCommitmentsClientMeta(type): + """Metaclass for the RegionCommitments client. + + This provides class-level methods for building and retrieving + support objects (e.g. transport) without polluting the client instance + objects. + """ + _transport_registry = OrderedDict() # type: Dict[str, Type[RegionCommitmentsTransport]] + _transport_registry["rest"] = RegionCommitmentsRestTransport + + def get_transport_class(cls, + label: Optional[str] = None, + ) -> Type[RegionCommitmentsTransport]: + """Returns an appropriate transport class. + + Args: + label: The name of the desired transport. If none is + provided, then the first transport in the registry is used. + + Returns: + The transport class to use. + """ + # If a specific transport is requested, return that one. + if label: + return cls._transport_registry[label] + + # No transport is requested; return the default (that is, the first one + # in the dictionary). + return next(iter(cls._transport_registry.values())) + + +class RegionCommitmentsClient(metaclass=RegionCommitmentsClientMeta): + """The RegionCommitments API.""" + + @staticmethod + def _get_default_mtls_endpoint(api_endpoint): + """Converts api endpoint to mTLS endpoint. + + Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to + "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. + Args: + api_endpoint (Optional[str]): the api endpoint to convert. + Returns: + str: converted mTLS api endpoint. + """ + if not api_endpoint: + return api_endpoint + + mtls_endpoint_re = re.compile( + r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?" + ) + + m = mtls_endpoint_re.match(api_endpoint) + name, mtls, sandbox, googledomain = m.groups() + if mtls or not googledomain: + return api_endpoint + + if sandbox: + return api_endpoint.replace( + "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" + ) + + return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") + + DEFAULT_ENDPOINT = "compute.googleapis.com" + DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore + DEFAULT_ENDPOINT + ) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + RegionCommitmentsClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_info(info) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + RegionCommitmentsClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_file( + filename) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + from_service_account_json = from_service_account_file + + @property + def transport(self) -> RegionCommitmentsTransport: + """Returns the transport used by the client instance. + + Returns: + RegionCommitmentsTransport: The transport used by the client + instance. + """ + return self._transport + + @staticmethod + def common_billing_account_path(billing_account: str, ) -> str: + """Returns a fully-qualified billing_account string.""" + return "billingAccounts/{billing_account}".format(billing_account=billing_account, ) + + @staticmethod + def parse_common_billing_account_path(path: str) -> Dict[str,str]: + """Parse a billing_account path into its component segments.""" + m = re.match(r"^billingAccounts/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_folder_path(folder: str, ) -> str: + """Returns a fully-qualified folder string.""" + return "folders/{folder}".format(folder=folder, ) + + @staticmethod + def parse_common_folder_path(path: str) -> Dict[str,str]: + """Parse a folder path into its component segments.""" + m = re.match(r"^folders/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_organization_path(organization: str, ) -> str: + """Returns a fully-qualified organization string.""" + return "organizations/{organization}".format(organization=organization, ) + + @staticmethod + def parse_common_organization_path(path: str) -> Dict[str,str]: + """Parse a organization path into its component segments.""" + m = re.match(r"^organizations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_project_path(project: str, ) -> str: + """Returns a fully-qualified project string.""" + return "projects/{project}".format(project=project, ) + + @staticmethod + def parse_common_project_path(path: str) -> Dict[str,str]: + """Parse a project path into its component segments.""" + m = re.match(r"^projects/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_location_path(project: str, location: str, ) -> str: + """Returns a fully-qualified location string.""" + return "projects/{project}/locations/{location}".format(project=project, location=location, ) + + @staticmethod + def parse_common_location_path(path: str) -> Dict[str,str]: + """Parse a location path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @classmethod + def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[client_options_lib.ClientOptions] = None): + """Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + if client_options is None: + client_options = client_options_lib.ClientOptions() + use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") + use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") + if use_client_cert not in ("true", "false"): + raise ValueError("Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`") + if use_mtls_endpoint not in ("auto", "never", "always"): + raise MutualTLSChannelError("Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`") + + # Figure out the client cert source to use. + client_cert_source = None + if use_client_cert == "true": + if client_options.client_cert_source: + client_cert_source = client_options.client_cert_source + elif mtls.has_default_client_cert_source(): + client_cert_source = mtls.default_client_cert_source() + + # Figure out which api endpoint to use. + if client_options.api_endpoint is not None: + api_endpoint = client_options.api_endpoint + elif use_mtls_endpoint == "always" or (use_mtls_endpoint == "auto" and client_cert_source): + api_endpoint = cls.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = cls.DEFAULT_ENDPOINT + + return api_endpoint, client_cert_source + + def __init__(self, *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Optional[Union[str, RegionCommitmentsTransport]] = None, + client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the region commitments client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Union[str, RegionCommitmentsTransport]): The + transport to use. If set to None, a transport is chosen + automatically. + NOTE: "rest" transport functionality is currently in a + beta state (preview). We welcome your feedback via an + issue in this library's source repository. + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): Custom options for the + client. It won't take effect if a ``transport`` instance is provided. + (1) The ``api_endpoint`` property can be used to override the + default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT + environment variable can also be used to override the endpoint: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto switch to the + default mTLS endpoint if client certificate is present, this is + the default value). However, the ``api_endpoint`` property takes + precedence if provided. + (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide client certificate for mutual TLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + """ + if isinstance(client_options, dict): + client_options = client_options_lib.from_dict(client_options) + if client_options is None: + client_options = client_options_lib.ClientOptions() + client_options = cast(client_options_lib.ClientOptions, client_options) + + api_endpoint, client_cert_source_func = self.get_mtls_endpoint_and_cert_source(client_options) + + api_key_value = getattr(client_options, "api_key", None) + if api_key_value and credentials: + raise ValueError("client_options.api_key and credentials are mutually exclusive") + + # Save or instantiate the transport. + # Ordinarily, we provide the transport, but allowing a custom transport + # instance provides an extensibility point for unusual situations. + if isinstance(transport, RegionCommitmentsTransport): + # transport is a RegionCommitmentsTransport instance. + if credentials or client_options.credentials_file or api_key_value: + raise ValueError("When providing a transport instance, " + "provide its credentials directly.") + if client_options.scopes: + raise ValueError( + "When providing a transport instance, provide its scopes " + "directly." + ) + self._transport = transport + else: + import google.auth._default # type: ignore + + if api_key_value and hasattr(google.auth._default, "get_api_key_credentials"): + credentials = google.auth._default.get_api_key_credentials(api_key_value) + + Transport = type(self).get_transport_class(transport) + self._transport = Transport( + credentials=credentials, + credentials_file=client_options.credentials_file, + host=api_endpoint, + scopes=client_options.scopes, + client_cert_source_for_mtls=client_cert_source_func, + quota_project_id=client_options.quota_project_id, + client_info=client_info, + always_use_jwt_access=True, + api_audience=client_options.api_audience, + ) + + def aggregated_list(self, + request: Optional[Union[compute.AggregatedListRegionCommitmentsRequest, dict]] = None, + *, + project: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.AggregatedListPager: + r"""Retrieves an aggregated list of commitments by + region. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_aggregated_list(): + # Create a client + client = compute_v1.RegionCommitmentsClient() + + # Initialize request argument(s) + request = compute_v1.AggregatedListRegionCommitmentsRequest( + project="project_value", + ) + + # Make the request + page_result = client.aggregated_list(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.AggregatedListRegionCommitmentsRequest, dict]): + The request object. A request message for + RegionCommitments.AggregatedList. See + the method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.compute_v1.services.region_commitments.pagers.AggregatedListPager: + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.AggregatedListRegionCommitmentsRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.AggregatedListRegionCommitmentsRequest): + request = compute.AggregatedListRegionCommitmentsRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.aggregated_list] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.AggregatedListPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get(self, + request: Optional[Union[compute.GetRegionCommitmentRequest, dict]] = None, + *, + project: Optional[str] = None, + region: Optional[str] = None, + commitment: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.Commitment: + r"""Returns the specified commitment resource. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_get(): + # Create a client + client = compute_v1.RegionCommitmentsClient() + + # Initialize request argument(s) + request = compute_v1.GetRegionCommitmentRequest( + commitment="commitment_value", + project="project_value", + region="region_value", + ) + + # Make the request + response = client.get(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.GetRegionCommitmentRequest, dict]): + The request object. A request message for + RegionCommitments.Get. See the method + description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + region (str): + Name of the region for this request. + This corresponds to the ``region`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + commitment (str): + Name of the commitment to return. + This corresponds to the ``commitment`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.compute_v1.types.Commitment: + Represents a regional Commitment + resource. Creating a commitment resource + means that you are purchasing a + committed use contract with an explicit + start and end time. You can create + commitments based on vCPUs and memory + usage and receive discounted rates. For + full details, read Signing Up for + Committed Use Discounts. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, region, commitment]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.GetRegionCommitmentRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.GetRegionCommitmentRequest): + request = compute.GetRegionCommitmentRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if region is not None: + request.region = region + if commitment is not None: + request.commitment = commitment + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("region", request.region), + ("commitment", request.commitment), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def insert_unary(self, + request: Optional[Union[compute.InsertRegionCommitmentRequest, dict]] = None, + *, + project: Optional[str] = None, + region: Optional[str] = None, + commitment_resource: Optional[compute.Commitment] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.Operation: + r"""Creates a commitment in the specified project using + the data included in the request. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_insert(): + # Create a client + client = compute_v1.RegionCommitmentsClient() + + # Initialize request argument(s) + request = compute_v1.InsertRegionCommitmentRequest( + project="project_value", + region="region_value", + ) + + # Make the request + response = client.insert(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.InsertRegionCommitmentRequest, dict]): + The request object. A request message for + RegionCommitments.Insert. See the method + description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + region (str): + Name of the region for this request. + This corresponds to the ``region`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + commitment_resource (google.cloud.compute_v1.types.Commitment): + The body resource for this request + This corresponds to the ``commitment_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, region, commitment_resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.InsertRegionCommitmentRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.InsertRegionCommitmentRequest): + request = compute.InsertRegionCommitmentRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if region is not None: + request.region = region + if commitment_resource is not None: + request.commitment_resource = commitment_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.insert] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("region", request.region), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def insert(self, + request: Optional[Union[compute.InsertRegionCommitmentRequest, dict]] = None, + *, + project: Optional[str] = None, + region: Optional[str] = None, + commitment_resource: Optional[compute.Commitment] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> extended_operation.ExtendedOperation: + r"""Creates a commitment in the specified project using + the data included in the request. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_insert(): + # Create a client + client = compute_v1.RegionCommitmentsClient() + + # Initialize request argument(s) + request = compute_v1.InsertRegionCommitmentRequest( + project="project_value", + region="region_value", + ) + + # Make the request + response = client.insert(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.InsertRegionCommitmentRequest, dict]): + The request object. A request message for + RegionCommitments.Insert. See the method + description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + region (str): + Name of the region for this request. + This corresponds to the ``region`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + commitment_resource (google.cloud.compute_v1.types.Commitment): + The body resource for this request + This corresponds to the ``commitment_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, region, commitment_resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.InsertRegionCommitmentRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.InsertRegionCommitmentRequest): + request = compute.InsertRegionCommitmentRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if region is not None: + request.region = region + if commitment_resource is not None: + request.commitment_resource = commitment_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.insert] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("region", request.region), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + operation_service = self._transport._region_operations_client + operation_request = compute.GetRegionOperationRequest() + operation_request.project = request.project + operation_request.region = request.region + operation_request.operation = response.name + + get_operation = functools.partial(operation_service.get, operation_request) + # Cancel is not part of extended operations yet. + cancel_operation = lambda: None + + # Note: this class is an implementation detail to provide a uniform + # set of names for certain fields in the extended operation proto message. + # See google.api_core.extended_operation.ExtendedOperation for details + # on these properties and the expected interface. + class _CustomOperation(extended_operation.ExtendedOperation): + @property + def error_message(self): + return self._extended_operation.http_error_message + + @property + def error_code(self): + return self._extended_operation.http_error_status_code + + response = _CustomOperation.make(get_operation, cancel_operation, response) + + # Done; return the response. + return response + + def list(self, + request: Optional[Union[compute.ListRegionCommitmentsRequest, dict]] = None, + *, + project: Optional[str] = None, + region: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListPager: + r"""Retrieves a list of commitments contained within the + specified region. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_list(): + # Create a client + client = compute_v1.RegionCommitmentsClient() + + # Initialize request argument(s) + request = compute_v1.ListRegionCommitmentsRequest( + project="project_value", + region="region_value", + ) + + # Make the request + page_result = client.list(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.ListRegionCommitmentsRequest, dict]): + The request object. A request message for + RegionCommitments.List. See the method + description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + region (str): + Name of the region for this request. + This corresponds to the ``region`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.compute_v1.services.region_commitments.pagers.ListPager: + Contains a list of Commitment + resources. + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, region]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.ListRegionCommitmentsRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.ListRegionCommitmentsRequest): + request = compute.ListRegionCommitmentsRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if region is not None: + request.region = region + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("region", request.region), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + def update_unary(self, + request: Optional[Union[compute.UpdateRegionCommitmentRequest, dict]] = None, + *, + project: Optional[str] = None, + region: Optional[str] = None, + commitment: Optional[str] = None, + commitment_resource: Optional[compute.Commitment] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.Operation: + r"""Updates the specified commitment with the data included in the + request. Update is performed only on selected fields included as + part of update-mask. Only the following fields can be modified: + auto_renew. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_update(): + # Create a client + client = compute_v1.RegionCommitmentsClient() + + # Initialize request argument(s) + request = compute_v1.UpdateRegionCommitmentRequest( + commitment="commitment_value", + project="project_value", + region="region_value", + ) + + # Make the request + response = client.update(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.UpdateRegionCommitmentRequest, dict]): + The request object. A request message for + RegionCommitments.Update. See the method + description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + region (str): + Name of the region for this request. + This corresponds to the ``region`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + commitment (str): + Name of the commitment for which auto + renew is being updated. + + This corresponds to the ``commitment`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + commitment_resource (google.cloud.compute_v1.types.Commitment): + The body resource for this request + This corresponds to the ``commitment_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, region, commitment, commitment_resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.UpdateRegionCommitmentRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.UpdateRegionCommitmentRequest): + request = compute.UpdateRegionCommitmentRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if region is not None: + request.region = region + if commitment is not None: + request.commitment = commitment + if commitment_resource is not None: + request.commitment_resource = commitment_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.update] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("region", request.region), + ("commitment", request.commitment), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def update(self, + request: Optional[Union[compute.UpdateRegionCommitmentRequest, dict]] = None, + *, + project: Optional[str] = None, + region: Optional[str] = None, + commitment: Optional[str] = None, + commitment_resource: Optional[compute.Commitment] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> extended_operation.ExtendedOperation: + r"""Updates the specified commitment with the data included in the + request. Update is performed only on selected fields included as + part of update-mask. Only the following fields can be modified: + auto_renew. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_update(): + # Create a client + client = compute_v1.RegionCommitmentsClient() + + # Initialize request argument(s) + request = compute_v1.UpdateRegionCommitmentRequest( + commitment="commitment_value", + project="project_value", + region="region_value", + ) + + # Make the request + response = client.update(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.UpdateRegionCommitmentRequest, dict]): + The request object. A request message for + RegionCommitments.Update. See the method + description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + region (str): + Name of the region for this request. + This corresponds to the ``region`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + commitment (str): + Name of the commitment for which auto + renew is being updated. + + This corresponds to the ``commitment`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + commitment_resource (google.cloud.compute_v1.types.Commitment): + The body resource for this request + This corresponds to the ``commitment_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, region, commitment, commitment_resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.UpdateRegionCommitmentRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.UpdateRegionCommitmentRequest): + request = compute.UpdateRegionCommitmentRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if region is not None: + request.region = region + if commitment is not None: + request.commitment = commitment + if commitment_resource is not None: + request.commitment_resource = commitment_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.update] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("region", request.region), + ("commitment", request.commitment), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + operation_service = self._transport._region_operations_client + operation_request = compute.GetRegionOperationRequest() + operation_request.project = request.project + operation_request.region = request.region + operation_request.operation = response.name + + get_operation = functools.partial(operation_service.get, operation_request) + # Cancel is not part of extended operations yet. + cancel_operation = lambda: None + + # Note: this class is an implementation detail to provide a uniform + # set of names for certain fields in the extended operation proto message. + # See google.api_core.extended_operation.ExtendedOperation for details + # on these properties and the expected interface. + class _CustomOperation(extended_operation.ExtendedOperation): + @property + def error_message(self): + return self._extended_operation.http_error_message + + @property + def error_code(self): + return self._extended_operation.http_error_status_code + + response = _CustomOperation.make(get_operation, cancel_operation, response) + + # Done; return the response. + return response + + def __enter__(self) -> "RegionCommitmentsClient": + return self + + def __exit__(self, type, value, traceback): + """Releases underlying transport's resources. + + .. warning:: + ONLY use as a context manager if the transport is NOT shared + with other clients! Exiting the with block will CLOSE the transport + and may cause errors in other clients! + """ + self.transport.close() + + + + + + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) + + +__all__ = ( + "RegionCommitmentsClient", +) diff --git a/owl-bot-staging/v1/google/cloud/compute_v1/services/region_commitments/pagers.py b/owl-bot-staging/v1/google/cloud/compute_v1/services/region_commitments/pagers.py new file mode 100644 index 000000000..e02b4d29e --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/compute_v1/services/region_commitments/pagers.py @@ -0,0 +1,139 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import Any, AsyncIterator, Awaitable, Callable, Sequence, Tuple, Optional, Iterator + +from google.cloud.compute_v1.types import compute + + +class AggregatedListPager: + """A pager for iterating through ``aggregated_list`` requests. + + This class thinly wraps an initial + :class:`google.cloud.compute_v1.types.CommitmentAggregatedList` object, and + provides an ``__iter__`` method to iterate through its + ``items`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``AggregatedList`` requests and continue to iterate + through the ``items`` field on the + corresponding responses. + + All the usual :class:`google.cloud.compute_v1.types.CommitmentAggregatedList` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., compute.CommitmentAggregatedList], + request: compute.AggregatedListRegionCommitmentsRequest, + response: compute.CommitmentAggregatedList, + *, + metadata: Sequence[Tuple[str, str]] = ()): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.compute_v1.types.AggregatedListRegionCommitmentsRequest): + The initial request object. + response (google.cloud.compute_v1.types.CommitmentAggregatedList): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = compute.AggregatedListRegionCommitmentsRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[compute.CommitmentAggregatedList]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[Tuple[str, compute.CommitmentsScopedList]]: + for page in self.pages: + yield from page.items.items() + + def get(self, key: str) -> Optional[compute.CommitmentsScopedList]: + return self._response.items.get(key) + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + + +class ListPager: + """A pager for iterating through ``list`` requests. + + This class thinly wraps an initial + :class:`google.cloud.compute_v1.types.CommitmentList` object, and + provides an ``__iter__`` method to iterate through its + ``items`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``List`` requests and continue to iterate + through the ``items`` field on the + corresponding responses. + + All the usual :class:`google.cloud.compute_v1.types.CommitmentList` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., compute.CommitmentList], + request: compute.ListRegionCommitmentsRequest, + response: compute.CommitmentList, + *, + metadata: Sequence[Tuple[str, str]] = ()): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.compute_v1.types.ListRegionCommitmentsRequest): + The initial request object. + response (google.cloud.compute_v1.types.CommitmentList): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = compute.ListRegionCommitmentsRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[compute.CommitmentList]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[compute.Commitment]: + for page in self.pages: + yield from page.items + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) diff --git a/owl-bot-staging/v1/google/cloud/compute_v1/services/region_commitments/transports/__init__.py b/owl-bot-staging/v1/google/cloud/compute_v1/services/region_commitments/transports/__init__.py new file mode 100644 index 000000000..9cc9b6020 --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/compute_v1/services/region_commitments/transports/__init__.py @@ -0,0 +1,32 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +from typing import Dict, Type + +from .base import RegionCommitmentsTransport +from .rest import RegionCommitmentsRestTransport +from .rest import RegionCommitmentsRestInterceptor + + +# Compile a registry of transports. +_transport_registry = OrderedDict() # type: Dict[str, Type[RegionCommitmentsTransport]] +_transport_registry['rest'] = RegionCommitmentsRestTransport + +__all__ = ( + 'RegionCommitmentsTransport', + 'RegionCommitmentsRestTransport', + 'RegionCommitmentsRestInterceptor', +) diff --git a/owl-bot-staging/v1/google/cloud/compute_v1/services/region_commitments/transports/base.py b/owl-bot-staging/v1/google/cloud/compute_v1/services/region_commitments/transports/base.py new file mode 100644 index 000000000..f42468428 --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/compute_v1/services/region_commitments/transports/base.py @@ -0,0 +1,219 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import abc +from typing import Awaitable, Callable, Dict, Optional, Sequence, Union + +from google.cloud.compute_v1 import gapic_version as package_version + +import google.auth # type: ignore +import google.api_core +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.compute_v1.types import compute +from google.cloud.compute_v1.services import region_operations + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) + + +class RegionCommitmentsTransport(abc.ABC): + """Abstract transport class for RegionCommitments.""" + + AUTH_SCOPES = ( + 'https://www.googleapis.com/auth/compute', + 'https://www.googleapis.com/auth/cloud-platform', + ) + + DEFAULT_HOST: str = 'compute.googleapis.com' + def __init__( + self, *, + host: str = DEFAULT_HOST, + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + **kwargs, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A list of scopes. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + """ + self._extended_operations_services: Dict[str, Any] = {} + + scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} + + # Save the scopes. + self._scopes = scopes + + # If no credentials are provided, then determine the appropriate + # defaults. + if credentials and credentials_file: + raise core_exceptions.DuplicateCredentialArgs("'credentials_file' and 'credentials' are mutually exclusive") + + if credentials_file is not None: + credentials, _ = google.auth.load_credentials_from_file( + credentials_file, + **scopes_kwargs, + quota_project_id=quota_project_id + ) + elif credentials is None: + credentials, _ = google.auth.default(**scopes_kwargs, quota_project_id=quota_project_id) + # Don't apply audience if the credentials file passed from user. + if hasattr(credentials, "with_gdch_audience"): + credentials = credentials.with_gdch_audience(api_audience if api_audience else host) + + # If the credentials are service account credentials, then always try to use self signed JWT. + if always_use_jwt_access and isinstance(credentials, service_account.Credentials) and hasattr(service_account.Credentials, "with_always_use_jwt_access"): + credentials = credentials.with_always_use_jwt_access(True) + + # Save the credentials. + self._credentials = credentials + + # Save the hostname. Default to port 443 (HTTPS) if none is specified. + if ':' not in host: + host += ':443' + self._host = host + + def _prep_wrapped_messages(self, client_info): + # Precompute the wrapped methods. + self._wrapped_methods = { + self.aggregated_list: gapic_v1.method.wrap_method( + self.aggregated_list, + default_timeout=None, + client_info=client_info, + ), + self.get: gapic_v1.method.wrap_method( + self.get, + default_timeout=None, + client_info=client_info, + ), + self.insert: gapic_v1.method.wrap_method( + self.insert, + default_timeout=None, + client_info=client_info, + ), + self.list: gapic_v1.method.wrap_method( + self.list, + default_timeout=None, + client_info=client_info, + ), + self.update: gapic_v1.method.wrap_method( + self.update, + default_timeout=None, + client_info=client_info, + ), + } + + def close(self): + """Closes resources associated with the transport. + + .. warning:: + Only call this method if the transport is NOT shared + with other clients - this may cause errors in other clients! + """ + raise NotImplementedError() + + @property + def aggregated_list(self) -> Callable[ + [compute.AggregatedListRegionCommitmentsRequest], + Union[ + compute.CommitmentAggregatedList, + Awaitable[compute.CommitmentAggregatedList] + ]]: + raise NotImplementedError() + + @property + def get(self) -> Callable[ + [compute.GetRegionCommitmentRequest], + Union[ + compute.Commitment, + Awaitable[compute.Commitment] + ]]: + raise NotImplementedError() + + @property + def insert(self) -> Callable[ + [compute.InsertRegionCommitmentRequest], + Union[ + compute.Operation, + Awaitable[compute.Operation] + ]]: + raise NotImplementedError() + + @property + def list(self) -> Callable[ + [compute.ListRegionCommitmentsRequest], + Union[ + compute.CommitmentList, + Awaitable[compute.CommitmentList] + ]]: + raise NotImplementedError() + + @property + def update(self) -> Callable[ + [compute.UpdateRegionCommitmentRequest], + Union[ + compute.Operation, + Awaitable[compute.Operation] + ]]: + raise NotImplementedError() + + @property + def kind(self) -> str: + raise NotImplementedError() + + @property + def _region_operations_client(self) -> region_operations.RegionOperationsClient: + ex_op_service = self._extended_operations_services.get("region_operations") + if not ex_op_service: + ex_op_service = region_operations.RegionOperationsClient( + credentials=self._credentials, + transport=self.kind, + ) + self._extended_operations_services["region_operations"] = ex_op_service + + return ex_op_service + + +__all__ = ( + 'RegionCommitmentsTransport', +) diff --git a/owl-bot-staging/v1/google/cloud/compute_v1/services/region_commitments/transports/rest.py b/owl-bot-staging/v1/google/cloud/compute_v1/services/region_commitments/transports/rest.py new file mode 100644 index 000000000..a6861bc8f --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/compute_v1/services/region_commitments/transports/rest.py @@ -0,0 +1,794 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +from google.auth.transport.requests import AuthorizedSession # type: ignore +import json # type: ignore +import grpc # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.api_core import exceptions as core_exceptions +from google.api_core import retry as retries +from google.api_core import rest_helpers +from google.api_core import rest_streaming +from google.api_core import path_template +from google.api_core import gapic_v1 + +from google.protobuf import json_format +from requests import __version__ as requests_version +import dataclasses +import re +from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union +import warnings + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + + +from google.cloud.compute_v1.types import compute + +from .base import RegionCommitmentsTransport, DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, + grpc_version=None, + rest_version=requests_version, +) + + +class RegionCommitmentsRestInterceptor: + """Interceptor for RegionCommitments. + + Interceptors are used to manipulate requests, request metadata, and responses + in arbitrary ways. + Example use cases include: + * Logging + * Verifying requests according to service or custom semantics + * Stripping extraneous information from responses + + These use cases and more can be enabled by injecting an + instance of a custom subclass when constructing the RegionCommitmentsRestTransport. + + .. code-block:: python + class MyCustomRegionCommitmentsInterceptor(RegionCommitmentsRestInterceptor): + def pre_aggregated_list(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_aggregated_list(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_get(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_insert(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_insert(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_update(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_update(self, response): + logging.log(f"Received response: {response}") + return response + + transport = RegionCommitmentsRestTransport(interceptor=MyCustomRegionCommitmentsInterceptor()) + client = RegionCommitmentsClient(transport=transport) + + + """ + def pre_aggregated_list(self, request: compute.AggregatedListRegionCommitmentsRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.AggregatedListRegionCommitmentsRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for aggregated_list + + Override in a subclass to manipulate the request or metadata + before they are sent to the RegionCommitments server. + """ + return request, metadata + + def post_aggregated_list(self, response: compute.CommitmentAggregatedList) -> compute.CommitmentAggregatedList: + """Post-rpc interceptor for aggregated_list + + Override in a subclass to manipulate the response + after it is returned by the RegionCommitments server but before + it is returned to user code. + """ + return response + def pre_get(self, request: compute.GetRegionCommitmentRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.GetRegionCommitmentRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get + + Override in a subclass to manipulate the request or metadata + before they are sent to the RegionCommitments server. + """ + return request, metadata + + def post_get(self, response: compute.Commitment) -> compute.Commitment: + """Post-rpc interceptor for get + + Override in a subclass to manipulate the response + after it is returned by the RegionCommitments server but before + it is returned to user code. + """ + return response + def pre_insert(self, request: compute.InsertRegionCommitmentRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.InsertRegionCommitmentRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for insert + + Override in a subclass to manipulate the request or metadata + before they are sent to the RegionCommitments server. + """ + return request, metadata + + def post_insert(self, response: compute.Operation) -> compute.Operation: + """Post-rpc interceptor for insert + + Override in a subclass to manipulate the response + after it is returned by the RegionCommitments server but before + it is returned to user code. + """ + return response + def pre_list(self, request: compute.ListRegionCommitmentsRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.ListRegionCommitmentsRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list + + Override in a subclass to manipulate the request or metadata + before they are sent to the RegionCommitments server. + """ + return request, metadata + + def post_list(self, response: compute.CommitmentList) -> compute.CommitmentList: + """Post-rpc interceptor for list + + Override in a subclass to manipulate the response + after it is returned by the RegionCommitments server but before + it is returned to user code. + """ + return response + def pre_update(self, request: compute.UpdateRegionCommitmentRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.UpdateRegionCommitmentRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for update + + Override in a subclass to manipulate the request or metadata + before they are sent to the RegionCommitments server. + """ + return request, metadata + + def post_update(self, response: compute.Operation) -> compute.Operation: + """Post-rpc interceptor for update + + Override in a subclass to manipulate the response + after it is returned by the RegionCommitments server but before + it is returned to user code. + """ + return response + + +@dataclasses.dataclass +class RegionCommitmentsRestStub: + _session: AuthorizedSession + _host: str + _interceptor: RegionCommitmentsRestInterceptor + + +class RegionCommitmentsRestTransport(RegionCommitmentsTransport): + """REST backend transport for RegionCommitments. + + The RegionCommitments API. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends JSON representations of protocol buffers over HTTP/1.1 + + NOTE: This REST transport functionality is currently in a beta + state (preview). We welcome your feedback via an issue in this + library's source repository. Thank you! + """ + + def __init__(self, *, + host: str = 'compute.googleapis.com', + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + client_cert_source_for_mtls: Optional[Callable[[ + ], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + url_scheme: str = 'https', + interceptor: Optional[RegionCommitmentsRestInterceptor] = None, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + NOTE: This REST transport functionality is currently in a beta + state (preview). We welcome your feedback via a GitHub issue in + this library's repository. Thank you! + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + client_cert_source_for_mtls (Callable[[], Tuple[bytes, bytes]]): Client + certificate to configure mutual TLS HTTP channel. It is ignored + if ``channel`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you are developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + url_scheme: the protocol scheme for the API endpoint. Normally + "https", but for testing or local servers, + "http" can be specified. + """ + # Run the base constructor + # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. + # TODO: When custom host (api_endpoint) is set, `scopes` must *also* be set on the + # credentials object + maybe_url_match = re.match("^(?Phttp(?:s)?://)?(?P.*)$", host) + if maybe_url_match is None: + raise ValueError(f"Unexpected hostname structure: {host}") # pragma: NO COVER + + url_match_items = maybe_url_match.groupdict() + + host = f"{url_scheme}://{host}" if not url_match_items["scheme"] else host + + super().__init__( + host=host, + credentials=credentials, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience + ) + self._session = AuthorizedSession( + self._credentials, default_host=self.DEFAULT_HOST) + if client_cert_source_for_mtls: + self._session.configure_mtls_channel(client_cert_source_for_mtls) + self._interceptor = interceptor or RegionCommitmentsRestInterceptor() + self._prep_wrapped_messages(client_info) + + class _AggregatedList(RegionCommitmentsRestStub): + def __hash__(self): + return hash("AggregatedList") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.AggregatedListRegionCommitmentsRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.CommitmentAggregatedList: + r"""Call the aggregated list method over HTTP. + + Args: + request (~.compute.AggregatedListRegionCommitmentsRequest): + The request object. A request message for + RegionCommitments.AggregatedList. See + the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.CommitmentAggregatedList: + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/compute/v1/projects/{project}/aggregated/commitments', + }, + ] + request, metadata = self._interceptor.pre_aggregated_list(request, metadata) + pb_request = compute.AggregatedListRegionCommitmentsRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.CommitmentAggregatedList() + pb_resp = compute.CommitmentAggregatedList.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_aggregated_list(resp) + return resp + + class _Get(RegionCommitmentsRestStub): + def __hash__(self): + return hash("Get") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.GetRegionCommitmentRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.Commitment: + r"""Call the get method over HTTP. + + Args: + request (~.compute.GetRegionCommitmentRequest): + The request object. A request message for + RegionCommitments.Get. See the method + description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.Commitment: + Represents a regional Commitment + resource. Creating a commitment resource + means that you are purchasing a + committed use contract with an explicit + start and end time. You can create + commitments based on vCPUs and memory + usage and receive discounted rates. For + full details, read Signing Up for + Committed Use Discounts. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/compute/v1/projects/{project}/regions/{region}/commitments/{commitment}', + }, + ] + request, metadata = self._interceptor.pre_get(request, metadata) + pb_request = compute.GetRegionCommitmentRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.Commitment() + pb_resp = compute.Commitment.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get(resp) + return resp + + class _Insert(RegionCommitmentsRestStub): + def __hash__(self): + return hash("Insert") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.InsertRegionCommitmentRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.Operation: + r"""Call the insert method over HTTP. + + Args: + request (~.compute.InsertRegionCommitmentRequest): + The request object. A request message for + RegionCommitments.Insert. See the method + description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.Operation: + Represents an Operation resource. Google Compute Engine + has three Operation resources: \* + `Global `__ + \* + `Regional `__ + \* + `Zonal `__ + You can use an operation resource to manage asynchronous + API requests. For more information, read Handling API + responses. Operations can be global, regional or zonal. + - For global operations, use the ``globalOperations`` + resource. - For regional operations, use the + ``regionOperations`` resource. - For zonal operations, + use the ``zonalOperations`` resource. For more + information, read Global, Regional, and Zonal Resources. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/compute/v1/projects/{project}/regions/{region}/commitments', + 'body': 'commitment_resource', + }, + ] + request, metadata = self._interceptor.pre_insert(request, metadata) + pb_request = compute.InsertRegionCommitmentRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + including_default_value_fields=False, + use_integers_for_enums=False + ) + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.Operation() + pb_resp = compute.Operation.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_insert(resp) + return resp + + class _List(RegionCommitmentsRestStub): + def __hash__(self): + return hash("List") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.ListRegionCommitmentsRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.CommitmentList: + r"""Call the list method over HTTP. + + Args: + request (~.compute.ListRegionCommitmentsRequest): + The request object. A request message for + RegionCommitments.List. See the method + description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.CommitmentList: + Contains a list of Commitment + resources. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/compute/v1/projects/{project}/regions/{region}/commitments', + }, + ] + request, metadata = self._interceptor.pre_list(request, metadata) + pb_request = compute.ListRegionCommitmentsRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.CommitmentList() + pb_resp = compute.CommitmentList.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_list(resp) + return resp + + class _Update(RegionCommitmentsRestStub): + def __hash__(self): + return hash("Update") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.UpdateRegionCommitmentRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.Operation: + r"""Call the update method over HTTP. + + Args: + request (~.compute.UpdateRegionCommitmentRequest): + The request object. A request message for + RegionCommitments.Update. See the method + description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.Operation: + Represents an Operation resource. Google Compute Engine + has three Operation resources: \* + `Global `__ + \* + `Regional `__ + \* + `Zonal `__ + You can use an operation resource to manage asynchronous + API requests. For more information, read Handling API + responses. Operations can be global, regional or zonal. + - For global operations, use the ``globalOperations`` + resource. - For regional operations, use the + ``regionOperations`` resource. - For zonal operations, + use the ``zonalOperations`` resource. For more + information, read Global, Regional, and Zonal Resources. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'patch', + 'uri': '/compute/v1/projects/{project}/regions/{region}/commitments/{commitment}', + 'body': 'commitment_resource', + }, + ] + request, metadata = self._interceptor.pre_update(request, metadata) + pb_request = compute.UpdateRegionCommitmentRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + including_default_value_fields=False, + use_integers_for_enums=False + ) + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.Operation() + pb_resp = compute.Operation.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_update(resp) + return resp + + @property + def aggregated_list(self) -> Callable[ + [compute.AggregatedListRegionCommitmentsRequest], + compute.CommitmentAggregatedList]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._AggregatedList(self._session, self._host, self._interceptor) # type: ignore + + @property + def get(self) -> Callable[ + [compute.GetRegionCommitmentRequest], + compute.Commitment]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._Get(self._session, self._host, self._interceptor) # type: ignore + + @property + def insert(self) -> Callable[ + [compute.InsertRegionCommitmentRequest], + compute.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._Insert(self._session, self._host, self._interceptor) # type: ignore + + @property + def list(self) -> Callable[ + [compute.ListRegionCommitmentsRequest], + compute.CommitmentList]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._List(self._session, self._host, self._interceptor) # type: ignore + + @property + def update(self) -> Callable[ + [compute.UpdateRegionCommitmentRequest], + compute.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._Update(self._session, self._host, self._interceptor) # type: ignore + + @property + def kind(self) -> str: + return "rest" + + def close(self): + self._session.close() + + +__all__=( + 'RegionCommitmentsRestTransport', +) diff --git a/owl-bot-staging/v1/google/cloud/compute_v1/services/region_disk_types/__init__.py b/owl-bot-staging/v1/google/cloud/compute_v1/services/region_disk_types/__init__.py new file mode 100644 index 000000000..173ad55e7 --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/compute_v1/services/region_disk_types/__init__.py @@ -0,0 +1,20 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from .client import RegionDiskTypesClient + +__all__ = ( + 'RegionDiskTypesClient', +) diff --git a/owl-bot-staging/v1/google/cloud/compute_v1/services/region_disk_types/client.py b/owl-bot-staging/v1/google/cloud/compute_v1/services/region_disk_types/client.py new file mode 100644 index 000000000..4de1964e9 --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/compute_v1/services/region_disk_types/client.py @@ -0,0 +1,645 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import os +import re +from typing import Dict, Mapping, MutableMapping, MutableSequence, Optional, Sequence, Tuple, Type, Union, cast + +from google.cloud.compute_v1 import gapic_version as package_version + +from google.api_core import client_options as client_options_lib +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport import mtls # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth.exceptions import MutualTLSChannelError # type: ignore +from google.oauth2 import service_account # type: ignore + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + +from google.cloud.compute_v1.services.region_disk_types import pagers +from google.cloud.compute_v1.types import compute +from .transports.base import RegionDiskTypesTransport, DEFAULT_CLIENT_INFO +from .transports.rest import RegionDiskTypesRestTransport + + +class RegionDiskTypesClientMeta(type): + """Metaclass for the RegionDiskTypes client. + + This provides class-level methods for building and retrieving + support objects (e.g. transport) without polluting the client instance + objects. + """ + _transport_registry = OrderedDict() # type: Dict[str, Type[RegionDiskTypesTransport]] + _transport_registry["rest"] = RegionDiskTypesRestTransport + + def get_transport_class(cls, + label: Optional[str] = None, + ) -> Type[RegionDiskTypesTransport]: + """Returns an appropriate transport class. + + Args: + label: The name of the desired transport. If none is + provided, then the first transport in the registry is used. + + Returns: + The transport class to use. + """ + # If a specific transport is requested, return that one. + if label: + return cls._transport_registry[label] + + # No transport is requested; return the default (that is, the first one + # in the dictionary). + return next(iter(cls._transport_registry.values())) + + +class RegionDiskTypesClient(metaclass=RegionDiskTypesClientMeta): + """The RegionDiskTypes API.""" + + @staticmethod + def _get_default_mtls_endpoint(api_endpoint): + """Converts api endpoint to mTLS endpoint. + + Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to + "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. + Args: + api_endpoint (Optional[str]): the api endpoint to convert. + Returns: + str: converted mTLS api endpoint. + """ + if not api_endpoint: + return api_endpoint + + mtls_endpoint_re = re.compile( + r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?" + ) + + m = mtls_endpoint_re.match(api_endpoint) + name, mtls, sandbox, googledomain = m.groups() + if mtls or not googledomain: + return api_endpoint + + if sandbox: + return api_endpoint.replace( + "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" + ) + + return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") + + DEFAULT_ENDPOINT = "compute.googleapis.com" + DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore + DEFAULT_ENDPOINT + ) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + RegionDiskTypesClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_info(info) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + RegionDiskTypesClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_file( + filename) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + from_service_account_json = from_service_account_file + + @property + def transport(self) -> RegionDiskTypesTransport: + """Returns the transport used by the client instance. + + Returns: + RegionDiskTypesTransport: The transport used by the client + instance. + """ + return self._transport + + @staticmethod + def common_billing_account_path(billing_account: str, ) -> str: + """Returns a fully-qualified billing_account string.""" + return "billingAccounts/{billing_account}".format(billing_account=billing_account, ) + + @staticmethod + def parse_common_billing_account_path(path: str) -> Dict[str,str]: + """Parse a billing_account path into its component segments.""" + m = re.match(r"^billingAccounts/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_folder_path(folder: str, ) -> str: + """Returns a fully-qualified folder string.""" + return "folders/{folder}".format(folder=folder, ) + + @staticmethod + def parse_common_folder_path(path: str) -> Dict[str,str]: + """Parse a folder path into its component segments.""" + m = re.match(r"^folders/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_organization_path(organization: str, ) -> str: + """Returns a fully-qualified organization string.""" + return "organizations/{organization}".format(organization=organization, ) + + @staticmethod + def parse_common_organization_path(path: str) -> Dict[str,str]: + """Parse a organization path into its component segments.""" + m = re.match(r"^organizations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_project_path(project: str, ) -> str: + """Returns a fully-qualified project string.""" + return "projects/{project}".format(project=project, ) + + @staticmethod + def parse_common_project_path(path: str) -> Dict[str,str]: + """Parse a project path into its component segments.""" + m = re.match(r"^projects/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_location_path(project: str, location: str, ) -> str: + """Returns a fully-qualified location string.""" + return "projects/{project}/locations/{location}".format(project=project, location=location, ) + + @staticmethod + def parse_common_location_path(path: str) -> Dict[str,str]: + """Parse a location path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @classmethod + def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[client_options_lib.ClientOptions] = None): + """Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + if client_options is None: + client_options = client_options_lib.ClientOptions() + use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") + use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") + if use_client_cert not in ("true", "false"): + raise ValueError("Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`") + if use_mtls_endpoint not in ("auto", "never", "always"): + raise MutualTLSChannelError("Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`") + + # Figure out the client cert source to use. + client_cert_source = None + if use_client_cert == "true": + if client_options.client_cert_source: + client_cert_source = client_options.client_cert_source + elif mtls.has_default_client_cert_source(): + client_cert_source = mtls.default_client_cert_source() + + # Figure out which api endpoint to use. + if client_options.api_endpoint is not None: + api_endpoint = client_options.api_endpoint + elif use_mtls_endpoint == "always" or (use_mtls_endpoint == "auto" and client_cert_source): + api_endpoint = cls.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = cls.DEFAULT_ENDPOINT + + return api_endpoint, client_cert_source + + def __init__(self, *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Optional[Union[str, RegionDiskTypesTransport]] = None, + client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the region disk types client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Union[str, RegionDiskTypesTransport]): The + transport to use. If set to None, a transport is chosen + automatically. + NOTE: "rest" transport functionality is currently in a + beta state (preview). We welcome your feedback via an + issue in this library's source repository. + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): Custom options for the + client. It won't take effect if a ``transport`` instance is provided. + (1) The ``api_endpoint`` property can be used to override the + default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT + environment variable can also be used to override the endpoint: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto switch to the + default mTLS endpoint if client certificate is present, this is + the default value). However, the ``api_endpoint`` property takes + precedence if provided. + (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide client certificate for mutual TLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + """ + if isinstance(client_options, dict): + client_options = client_options_lib.from_dict(client_options) + if client_options is None: + client_options = client_options_lib.ClientOptions() + client_options = cast(client_options_lib.ClientOptions, client_options) + + api_endpoint, client_cert_source_func = self.get_mtls_endpoint_and_cert_source(client_options) + + api_key_value = getattr(client_options, "api_key", None) + if api_key_value and credentials: + raise ValueError("client_options.api_key and credentials are mutually exclusive") + + # Save or instantiate the transport. + # Ordinarily, we provide the transport, but allowing a custom transport + # instance provides an extensibility point for unusual situations. + if isinstance(transport, RegionDiskTypesTransport): + # transport is a RegionDiskTypesTransport instance. + if credentials or client_options.credentials_file or api_key_value: + raise ValueError("When providing a transport instance, " + "provide its credentials directly.") + if client_options.scopes: + raise ValueError( + "When providing a transport instance, provide its scopes " + "directly." + ) + self._transport = transport + else: + import google.auth._default # type: ignore + + if api_key_value and hasattr(google.auth._default, "get_api_key_credentials"): + credentials = google.auth._default.get_api_key_credentials(api_key_value) + + Transport = type(self).get_transport_class(transport) + self._transport = Transport( + credentials=credentials, + credentials_file=client_options.credentials_file, + host=api_endpoint, + scopes=client_options.scopes, + client_cert_source_for_mtls=client_cert_source_func, + quota_project_id=client_options.quota_project_id, + client_info=client_info, + always_use_jwt_access=True, + api_audience=client_options.api_audience, + ) + + def get(self, + request: Optional[Union[compute.GetRegionDiskTypeRequest, dict]] = None, + *, + project: Optional[str] = None, + region: Optional[str] = None, + disk_type: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.DiskType: + r"""Returns the specified regional disk type. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_get(): + # Create a client + client = compute_v1.RegionDiskTypesClient() + + # Initialize request argument(s) + request = compute_v1.GetRegionDiskTypeRequest( + disk_type="disk_type_value", + project="project_value", + region="region_value", + ) + + # Make the request + response = client.get(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.GetRegionDiskTypeRequest, dict]): + The request object. A request message for + RegionDiskTypes.Get. See the method + description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + region (str): + The name of the region for this + request. + + This corresponds to the ``region`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + disk_type (str): + Name of the disk type to return. + This corresponds to the ``disk_type`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.compute_v1.types.DiskType: + Represents a Disk Type resource. Google Compute Engine + has two Disk Type resources: \* + [Regional](/compute/docs/reference/rest/v1/regionDiskTypes) + \* [Zonal](/compute/docs/reference/rest/v1/diskTypes) + You can choose from a variety of disk types based on + your needs. For more information, read Storage options. + The diskTypes resource represents disk types for a zonal + persistent disk. For more information, read Zonal + persistent disks. The regionDiskTypes resource + represents disk types for a regional persistent disk. + For more information, read Regional persistent disks. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, region, disk_type]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.GetRegionDiskTypeRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.GetRegionDiskTypeRequest): + request = compute.GetRegionDiskTypeRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if region is not None: + request.region = region + if disk_type is not None: + request.disk_type = disk_type + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("region", request.region), + ("disk_type", request.disk_type), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def list(self, + request: Optional[Union[compute.ListRegionDiskTypesRequest, dict]] = None, + *, + project: Optional[str] = None, + region: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListPager: + r"""Retrieves a list of regional disk types available to + the specified project. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_list(): + # Create a client + client = compute_v1.RegionDiskTypesClient() + + # Initialize request argument(s) + request = compute_v1.ListRegionDiskTypesRequest( + project="project_value", + region="region_value", + ) + + # Make the request + page_result = client.list(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.ListRegionDiskTypesRequest, dict]): + The request object. A request message for + RegionDiskTypes.List. See the method + description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + region (str): + The name of the region for this + request. + + This corresponds to the ``region`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.compute_v1.services.region_disk_types.pagers.ListPager: + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, region]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.ListRegionDiskTypesRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.ListRegionDiskTypesRequest): + request = compute.ListRegionDiskTypesRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if region is not None: + request.region = region + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("region", request.region), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + def __enter__(self) -> "RegionDiskTypesClient": + return self + + def __exit__(self, type, value, traceback): + """Releases underlying transport's resources. + + .. warning:: + ONLY use as a context manager if the transport is NOT shared + with other clients! Exiting the with block will CLOSE the transport + and may cause errors in other clients! + """ + self.transport.close() + + + + + + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) + + +__all__ = ( + "RegionDiskTypesClient", +) diff --git a/owl-bot-staging/v1/google/cloud/compute_v1/services/region_disk_types/pagers.py b/owl-bot-staging/v1/google/cloud/compute_v1/services/region_disk_types/pagers.py new file mode 100644 index 000000000..8ea5b2032 --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/compute_v1/services/region_disk_types/pagers.py @@ -0,0 +1,77 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import Any, AsyncIterator, Awaitable, Callable, Sequence, Tuple, Optional, Iterator + +from google.cloud.compute_v1.types import compute + + +class ListPager: + """A pager for iterating through ``list`` requests. + + This class thinly wraps an initial + :class:`google.cloud.compute_v1.types.RegionDiskTypeList` object, and + provides an ``__iter__`` method to iterate through its + ``items`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``List`` requests and continue to iterate + through the ``items`` field on the + corresponding responses. + + All the usual :class:`google.cloud.compute_v1.types.RegionDiskTypeList` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., compute.RegionDiskTypeList], + request: compute.ListRegionDiskTypesRequest, + response: compute.RegionDiskTypeList, + *, + metadata: Sequence[Tuple[str, str]] = ()): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.compute_v1.types.ListRegionDiskTypesRequest): + The initial request object. + response (google.cloud.compute_v1.types.RegionDiskTypeList): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = compute.ListRegionDiskTypesRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[compute.RegionDiskTypeList]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[compute.DiskType]: + for page in self.pages: + yield from page.items + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) diff --git a/owl-bot-staging/v1/google/cloud/compute_v1/services/region_disk_types/transports/__init__.py b/owl-bot-staging/v1/google/cloud/compute_v1/services/region_disk_types/transports/__init__.py new file mode 100644 index 000000000..cdd991d23 --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/compute_v1/services/region_disk_types/transports/__init__.py @@ -0,0 +1,32 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +from typing import Dict, Type + +from .base import RegionDiskTypesTransport +from .rest import RegionDiskTypesRestTransport +from .rest import RegionDiskTypesRestInterceptor + + +# Compile a registry of transports. +_transport_registry = OrderedDict() # type: Dict[str, Type[RegionDiskTypesTransport]] +_transport_registry['rest'] = RegionDiskTypesRestTransport + +__all__ = ( + 'RegionDiskTypesTransport', + 'RegionDiskTypesRestTransport', + 'RegionDiskTypesRestInterceptor', +) diff --git a/owl-bot-staging/v1/google/cloud/compute_v1/services/region_disk_types/transports/base.py b/owl-bot-staging/v1/google/cloud/compute_v1/services/region_disk_types/transports/base.py new file mode 100644 index 000000000..e4fec218b --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/compute_v1/services/region_disk_types/transports/base.py @@ -0,0 +1,164 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import abc +from typing import Awaitable, Callable, Dict, Optional, Sequence, Union + +from google.cloud.compute_v1 import gapic_version as package_version + +import google.auth # type: ignore +import google.api_core +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.compute_v1.types import compute + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) + + +class RegionDiskTypesTransport(abc.ABC): + """Abstract transport class for RegionDiskTypes.""" + + AUTH_SCOPES = ( + 'https://www.googleapis.com/auth/compute.readonly', + 'https://www.googleapis.com/auth/compute', + 'https://www.googleapis.com/auth/cloud-platform', + ) + + DEFAULT_HOST: str = 'compute.googleapis.com' + def __init__( + self, *, + host: str = DEFAULT_HOST, + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + **kwargs, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A list of scopes. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + """ + + scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} + + # Save the scopes. + self._scopes = scopes + + # If no credentials are provided, then determine the appropriate + # defaults. + if credentials and credentials_file: + raise core_exceptions.DuplicateCredentialArgs("'credentials_file' and 'credentials' are mutually exclusive") + + if credentials_file is not None: + credentials, _ = google.auth.load_credentials_from_file( + credentials_file, + **scopes_kwargs, + quota_project_id=quota_project_id + ) + elif credentials is None: + credentials, _ = google.auth.default(**scopes_kwargs, quota_project_id=quota_project_id) + # Don't apply audience if the credentials file passed from user. + if hasattr(credentials, "with_gdch_audience"): + credentials = credentials.with_gdch_audience(api_audience if api_audience else host) + + # If the credentials are service account credentials, then always try to use self signed JWT. + if always_use_jwt_access and isinstance(credentials, service_account.Credentials) and hasattr(service_account.Credentials, "with_always_use_jwt_access"): + credentials = credentials.with_always_use_jwt_access(True) + + # Save the credentials. + self._credentials = credentials + + # Save the hostname. Default to port 443 (HTTPS) if none is specified. + if ':' not in host: + host += ':443' + self._host = host + + def _prep_wrapped_messages(self, client_info): + # Precompute the wrapped methods. + self._wrapped_methods = { + self.get: gapic_v1.method.wrap_method( + self.get, + default_timeout=None, + client_info=client_info, + ), + self.list: gapic_v1.method.wrap_method( + self.list, + default_timeout=None, + client_info=client_info, + ), + } + + def close(self): + """Closes resources associated with the transport. + + .. warning:: + Only call this method if the transport is NOT shared + with other clients - this may cause errors in other clients! + """ + raise NotImplementedError() + + @property + def get(self) -> Callable[ + [compute.GetRegionDiskTypeRequest], + Union[ + compute.DiskType, + Awaitable[compute.DiskType] + ]]: + raise NotImplementedError() + + @property + def list(self) -> Callable[ + [compute.ListRegionDiskTypesRequest], + Union[ + compute.RegionDiskTypeList, + Awaitable[compute.RegionDiskTypeList] + ]]: + raise NotImplementedError() + + @property + def kind(self) -> str: + raise NotImplementedError() + + +__all__ = ( + 'RegionDiskTypesTransport', +) diff --git a/owl-bot-staging/v1/google/cloud/compute_v1/services/region_disk_types/transports/rest.py b/owl-bot-staging/v1/google/cloud/compute_v1/services/region_disk_types/transports/rest.py new file mode 100644 index 000000000..bcabcff35 --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/compute_v1/services/region_disk_types/transports/rest.py @@ -0,0 +1,416 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +from google.auth.transport.requests import AuthorizedSession # type: ignore +import json # type: ignore +import grpc # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.api_core import exceptions as core_exceptions +from google.api_core import retry as retries +from google.api_core import rest_helpers +from google.api_core import rest_streaming +from google.api_core import path_template +from google.api_core import gapic_v1 + +from google.protobuf import json_format +from requests import __version__ as requests_version +import dataclasses +import re +from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union +import warnings + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + + +from google.cloud.compute_v1.types import compute + +from .base import RegionDiskTypesTransport, DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, + grpc_version=None, + rest_version=requests_version, +) + + +class RegionDiskTypesRestInterceptor: + """Interceptor for RegionDiskTypes. + + Interceptors are used to manipulate requests, request metadata, and responses + in arbitrary ways. + Example use cases include: + * Logging + * Verifying requests according to service or custom semantics + * Stripping extraneous information from responses + + These use cases and more can be enabled by injecting an + instance of a custom subclass when constructing the RegionDiskTypesRestTransport. + + .. code-block:: python + class MyCustomRegionDiskTypesInterceptor(RegionDiskTypesRestInterceptor): + def pre_get(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list(self, response): + logging.log(f"Received response: {response}") + return response + + transport = RegionDiskTypesRestTransport(interceptor=MyCustomRegionDiskTypesInterceptor()) + client = RegionDiskTypesClient(transport=transport) + + + """ + def pre_get(self, request: compute.GetRegionDiskTypeRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.GetRegionDiskTypeRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get + + Override in a subclass to manipulate the request or metadata + before they are sent to the RegionDiskTypes server. + """ + return request, metadata + + def post_get(self, response: compute.DiskType) -> compute.DiskType: + """Post-rpc interceptor for get + + Override in a subclass to manipulate the response + after it is returned by the RegionDiskTypes server but before + it is returned to user code. + """ + return response + def pre_list(self, request: compute.ListRegionDiskTypesRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.ListRegionDiskTypesRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list + + Override in a subclass to manipulate the request or metadata + before they are sent to the RegionDiskTypes server. + """ + return request, metadata + + def post_list(self, response: compute.RegionDiskTypeList) -> compute.RegionDiskTypeList: + """Post-rpc interceptor for list + + Override in a subclass to manipulate the response + after it is returned by the RegionDiskTypes server but before + it is returned to user code. + """ + return response + + +@dataclasses.dataclass +class RegionDiskTypesRestStub: + _session: AuthorizedSession + _host: str + _interceptor: RegionDiskTypesRestInterceptor + + +class RegionDiskTypesRestTransport(RegionDiskTypesTransport): + """REST backend transport for RegionDiskTypes. + + The RegionDiskTypes API. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends JSON representations of protocol buffers over HTTP/1.1 + + NOTE: This REST transport functionality is currently in a beta + state (preview). We welcome your feedback via an issue in this + library's source repository. Thank you! + """ + + def __init__(self, *, + host: str = 'compute.googleapis.com', + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + client_cert_source_for_mtls: Optional[Callable[[ + ], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + url_scheme: str = 'https', + interceptor: Optional[RegionDiskTypesRestInterceptor] = None, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + NOTE: This REST transport functionality is currently in a beta + state (preview). We welcome your feedback via a GitHub issue in + this library's repository. Thank you! + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + client_cert_source_for_mtls (Callable[[], Tuple[bytes, bytes]]): Client + certificate to configure mutual TLS HTTP channel. It is ignored + if ``channel`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you are developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + url_scheme: the protocol scheme for the API endpoint. Normally + "https", but for testing or local servers, + "http" can be specified. + """ + # Run the base constructor + # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. + # TODO: When custom host (api_endpoint) is set, `scopes` must *also* be set on the + # credentials object + maybe_url_match = re.match("^(?Phttp(?:s)?://)?(?P.*)$", host) + if maybe_url_match is None: + raise ValueError(f"Unexpected hostname structure: {host}") # pragma: NO COVER + + url_match_items = maybe_url_match.groupdict() + + host = f"{url_scheme}://{host}" if not url_match_items["scheme"] else host + + super().__init__( + host=host, + credentials=credentials, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience + ) + self._session = AuthorizedSession( + self._credentials, default_host=self.DEFAULT_HOST) + if client_cert_source_for_mtls: + self._session.configure_mtls_channel(client_cert_source_for_mtls) + self._interceptor = interceptor or RegionDiskTypesRestInterceptor() + self._prep_wrapped_messages(client_info) + + class _Get(RegionDiskTypesRestStub): + def __hash__(self): + return hash("Get") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.GetRegionDiskTypeRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.DiskType: + r"""Call the get method over HTTP. + + Args: + request (~.compute.GetRegionDiskTypeRequest): + The request object. A request message for + RegionDiskTypes.Get. See the method + description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.DiskType: + Represents a Disk Type resource. Google Compute Engine + has two Disk Type resources: \* + `Regional `__ + \* `Zonal `__ + You can choose from a variety of disk types based on + your needs. For more information, read Storage options. + The diskTypes resource represents disk types for a zonal + persistent disk. For more information, read Zonal + persistent disks. The regionDiskTypes resource + represents disk types for a regional persistent disk. + For more information, read Regional persistent disks. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/compute/v1/projects/{project}/regions/{region}/diskTypes/{disk_type}', + }, + ] + request, metadata = self._interceptor.pre_get(request, metadata) + pb_request = compute.GetRegionDiskTypeRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.DiskType() + pb_resp = compute.DiskType.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get(resp) + return resp + + class _List(RegionDiskTypesRestStub): + def __hash__(self): + return hash("List") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.ListRegionDiskTypesRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.RegionDiskTypeList: + r"""Call the list method over HTTP. + + Args: + request (~.compute.ListRegionDiskTypesRequest): + The request object. A request message for + RegionDiskTypes.List. See the method + description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.RegionDiskTypeList: + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/compute/v1/projects/{project}/regions/{region}/diskTypes', + }, + ] + request, metadata = self._interceptor.pre_list(request, metadata) + pb_request = compute.ListRegionDiskTypesRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.RegionDiskTypeList() + pb_resp = compute.RegionDiskTypeList.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_list(resp) + return resp + + @property + def get(self) -> Callable[ + [compute.GetRegionDiskTypeRequest], + compute.DiskType]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._Get(self._session, self._host, self._interceptor) # type: ignore + + @property + def list(self) -> Callable[ + [compute.ListRegionDiskTypesRequest], + compute.RegionDiskTypeList]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._List(self._session, self._host, self._interceptor) # type: ignore + + @property + def kind(self) -> str: + return "rest" + + def close(self): + self._session.close() + + +__all__=( + 'RegionDiskTypesRestTransport', +) diff --git a/owl-bot-staging/v1/google/cloud/compute_v1/services/region_disks/__init__.py b/owl-bot-staging/v1/google/cloud/compute_v1/services/region_disks/__init__.py new file mode 100644 index 000000000..e4676166a --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/compute_v1/services/region_disks/__init__.py @@ -0,0 +1,20 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from .client import RegionDisksClient + +__all__ = ( + 'RegionDisksClient', +) diff --git a/owl-bot-staging/v1/google/cloud/compute_v1/services/region_disks/client.py b/owl-bot-staging/v1/google/cloud/compute_v1/services/region_disks/client.py new file mode 100644 index 000000000..7e09b886d --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/compute_v1/services/region_disks/client.py @@ -0,0 +1,4474 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import functools +import os +import re +from typing import Dict, Mapping, MutableMapping, MutableSequence, Optional, Sequence, Tuple, Type, Union, cast + +from google.cloud.compute_v1 import gapic_version as package_version + +from google.api_core import client_options as client_options_lib +from google.api_core import exceptions as core_exceptions +from google.api_core import extended_operation +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport import mtls # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth.exceptions import MutualTLSChannelError # type: ignore +from google.oauth2 import service_account # type: ignore + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + +from google.api_core import extended_operation # type: ignore +from google.cloud.compute_v1.services.region_disks import pagers +from google.cloud.compute_v1.types import compute +from .transports.base import RegionDisksTransport, DEFAULT_CLIENT_INFO +from .transports.rest import RegionDisksRestTransport + + +class RegionDisksClientMeta(type): + """Metaclass for the RegionDisks client. + + This provides class-level methods for building and retrieving + support objects (e.g. transport) without polluting the client instance + objects. + """ + _transport_registry = OrderedDict() # type: Dict[str, Type[RegionDisksTransport]] + _transport_registry["rest"] = RegionDisksRestTransport + + def get_transport_class(cls, + label: Optional[str] = None, + ) -> Type[RegionDisksTransport]: + """Returns an appropriate transport class. + + Args: + label: The name of the desired transport. If none is + provided, then the first transport in the registry is used. + + Returns: + The transport class to use. + """ + # If a specific transport is requested, return that one. + if label: + return cls._transport_registry[label] + + # No transport is requested; return the default (that is, the first one + # in the dictionary). + return next(iter(cls._transport_registry.values())) + + +class RegionDisksClient(metaclass=RegionDisksClientMeta): + """The RegionDisks API.""" + + @staticmethod + def _get_default_mtls_endpoint(api_endpoint): + """Converts api endpoint to mTLS endpoint. + + Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to + "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. + Args: + api_endpoint (Optional[str]): the api endpoint to convert. + Returns: + str: converted mTLS api endpoint. + """ + if not api_endpoint: + return api_endpoint + + mtls_endpoint_re = re.compile( + r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?" + ) + + m = mtls_endpoint_re.match(api_endpoint) + name, mtls, sandbox, googledomain = m.groups() + if mtls or not googledomain: + return api_endpoint + + if sandbox: + return api_endpoint.replace( + "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" + ) + + return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") + + DEFAULT_ENDPOINT = "compute.googleapis.com" + DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore + DEFAULT_ENDPOINT + ) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + RegionDisksClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_info(info) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + RegionDisksClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_file( + filename) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + from_service_account_json = from_service_account_file + + @property + def transport(self) -> RegionDisksTransport: + """Returns the transport used by the client instance. + + Returns: + RegionDisksTransport: The transport used by the client + instance. + """ + return self._transport + + @staticmethod + def common_billing_account_path(billing_account: str, ) -> str: + """Returns a fully-qualified billing_account string.""" + return "billingAccounts/{billing_account}".format(billing_account=billing_account, ) + + @staticmethod + def parse_common_billing_account_path(path: str) -> Dict[str,str]: + """Parse a billing_account path into its component segments.""" + m = re.match(r"^billingAccounts/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_folder_path(folder: str, ) -> str: + """Returns a fully-qualified folder string.""" + return "folders/{folder}".format(folder=folder, ) + + @staticmethod + def parse_common_folder_path(path: str) -> Dict[str,str]: + """Parse a folder path into its component segments.""" + m = re.match(r"^folders/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_organization_path(organization: str, ) -> str: + """Returns a fully-qualified organization string.""" + return "organizations/{organization}".format(organization=organization, ) + + @staticmethod + def parse_common_organization_path(path: str) -> Dict[str,str]: + """Parse a organization path into its component segments.""" + m = re.match(r"^organizations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_project_path(project: str, ) -> str: + """Returns a fully-qualified project string.""" + return "projects/{project}".format(project=project, ) + + @staticmethod + def parse_common_project_path(path: str) -> Dict[str,str]: + """Parse a project path into its component segments.""" + m = re.match(r"^projects/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_location_path(project: str, location: str, ) -> str: + """Returns a fully-qualified location string.""" + return "projects/{project}/locations/{location}".format(project=project, location=location, ) + + @staticmethod + def parse_common_location_path(path: str) -> Dict[str,str]: + """Parse a location path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @classmethod + def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[client_options_lib.ClientOptions] = None): + """Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + if client_options is None: + client_options = client_options_lib.ClientOptions() + use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") + use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") + if use_client_cert not in ("true", "false"): + raise ValueError("Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`") + if use_mtls_endpoint not in ("auto", "never", "always"): + raise MutualTLSChannelError("Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`") + + # Figure out the client cert source to use. + client_cert_source = None + if use_client_cert == "true": + if client_options.client_cert_source: + client_cert_source = client_options.client_cert_source + elif mtls.has_default_client_cert_source(): + client_cert_source = mtls.default_client_cert_source() + + # Figure out which api endpoint to use. + if client_options.api_endpoint is not None: + api_endpoint = client_options.api_endpoint + elif use_mtls_endpoint == "always" or (use_mtls_endpoint == "auto" and client_cert_source): + api_endpoint = cls.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = cls.DEFAULT_ENDPOINT + + return api_endpoint, client_cert_source + + def __init__(self, *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Optional[Union[str, RegionDisksTransport]] = None, + client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the region disks client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Union[str, RegionDisksTransport]): The + transport to use. If set to None, a transport is chosen + automatically. + NOTE: "rest" transport functionality is currently in a + beta state (preview). We welcome your feedback via an + issue in this library's source repository. + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): Custom options for the + client. It won't take effect if a ``transport`` instance is provided. + (1) The ``api_endpoint`` property can be used to override the + default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT + environment variable can also be used to override the endpoint: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto switch to the + default mTLS endpoint if client certificate is present, this is + the default value). However, the ``api_endpoint`` property takes + precedence if provided. + (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide client certificate for mutual TLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + """ + if isinstance(client_options, dict): + client_options = client_options_lib.from_dict(client_options) + if client_options is None: + client_options = client_options_lib.ClientOptions() + client_options = cast(client_options_lib.ClientOptions, client_options) + + api_endpoint, client_cert_source_func = self.get_mtls_endpoint_and_cert_source(client_options) + + api_key_value = getattr(client_options, "api_key", None) + if api_key_value and credentials: + raise ValueError("client_options.api_key and credentials are mutually exclusive") + + # Save or instantiate the transport. + # Ordinarily, we provide the transport, but allowing a custom transport + # instance provides an extensibility point for unusual situations. + if isinstance(transport, RegionDisksTransport): + # transport is a RegionDisksTransport instance. + if credentials or client_options.credentials_file or api_key_value: + raise ValueError("When providing a transport instance, " + "provide its credentials directly.") + if client_options.scopes: + raise ValueError( + "When providing a transport instance, provide its scopes " + "directly." + ) + self._transport = transport + else: + import google.auth._default # type: ignore + + if api_key_value and hasattr(google.auth._default, "get_api_key_credentials"): + credentials = google.auth._default.get_api_key_credentials(api_key_value) + + Transport = type(self).get_transport_class(transport) + self._transport = Transport( + credentials=credentials, + credentials_file=client_options.credentials_file, + host=api_endpoint, + scopes=client_options.scopes, + client_cert_source_for_mtls=client_cert_source_func, + quota_project_id=client_options.quota_project_id, + client_info=client_info, + always_use_jwt_access=True, + api_audience=client_options.api_audience, + ) + + def add_resource_policies_unary(self, + request: Optional[Union[compute.AddResourcePoliciesRegionDiskRequest, dict]] = None, + *, + project: Optional[str] = None, + region: Optional[str] = None, + disk: Optional[str] = None, + region_disks_add_resource_policies_request_resource: Optional[compute.RegionDisksAddResourcePoliciesRequest] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.Operation: + r"""Adds existing resource policies to a regional disk. + You can only add one policy which will be applied to + this disk for scheduling snapshot creation. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_add_resource_policies(): + # Create a client + client = compute_v1.RegionDisksClient() + + # Initialize request argument(s) + request = compute_v1.AddResourcePoliciesRegionDiskRequest( + disk="disk_value", + project="project_value", + region="region_value", + ) + + # Make the request + response = client.add_resource_policies(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.AddResourcePoliciesRegionDiskRequest, dict]): + The request object. A request message for + RegionDisks.AddResourcePolicies. See the + method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + region (str): + The name of the region for this + request. + + This corresponds to the ``region`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + disk (str): + The disk name for this request. + This corresponds to the ``disk`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + region_disks_add_resource_policies_request_resource (google.cloud.compute_v1.types.RegionDisksAddResourcePoliciesRequest): + The body resource for this request + This corresponds to the ``region_disks_add_resource_policies_request_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, region, disk, region_disks_add_resource_policies_request_resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.AddResourcePoliciesRegionDiskRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.AddResourcePoliciesRegionDiskRequest): + request = compute.AddResourcePoliciesRegionDiskRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if region is not None: + request.region = region + if disk is not None: + request.disk = disk + if region_disks_add_resource_policies_request_resource is not None: + request.region_disks_add_resource_policies_request_resource = region_disks_add_resource_policies_request_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.add_resource_policies] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("region", request.region), + ("disk", request.disk), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def add_resource_policies(self, + request: Optional[Union[compute.AddResourcePoliciesRegionDiskRequest, dict]] = None, + *, + project: Optional[str] = None, + region: Optional[str] = None, + disk: Optional[str] = None, + region_disks_add_resource_policies_request_resource: Optional[compute.RegionDisksAddResourcePoliciesRequest] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> extended_operation.ExtendedOperation: + r"""Adds existing resource policies to a regional disk. + You can only add one policy which will be applied to + this disk for scheduling snapshot creation. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_add_resource_policies(): + # Create a client + client = compute_v1.RegionDisksClient() + + # Initialize request argument(s) + request = compute_v1.AddResourcePoliciesRegionDiskRequest( + disk="disk_value", + project="project_value", + region="region_value", + ) + + # Make the request + response = client.add_resource_policies(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.AddResourcePoliciesRegionDiskRequest, dict]): + The request object. A request message for + RegionDisks.AddResourcePolicies. See the + method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + region (str): + The name of the region for this + request. + + This corresponds to the ``region`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + disk (str): + The disk name for this request. + This corresponds to the ``disk`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + region_disks_add_resource_policies_request_resource (google.cloud.compute_v1.types.RegionDisksAddResourcePoliciesRequest): + The body resource for this request + This corresponds to the ``region_disks_add_resource_policies_request_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, region, disk, region_disks_add_resource_policies_request_resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.AddResourcePoliciesRegionDiskRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.AddResourcePoliciesRegionDiskRequest): + request = compute.AddResourcePoliciesRegionDiskRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if region is not None: + request.region = region + if disk is not None: + request.disk = disk + if region_disks_add_resource_policies_request_resource is not None: + request.region_disks_add_resource_policies_request_resource = region_disks_add_resource_policies_request_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.add_resource_policies] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("region", request.region), + ("disk", request.disk), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + operation_service = self._transport._region_operations_client + operation_request = compute.GetRegionOperationRequest() + operation_request.project = request.project + operation_request.region = request.region + operation_request.operation = response.name + + get_operation = functools.partial(operation_service.get, operation_request) + # Cancel is not part of extended operations yet. + cancel_operation = lambda: None + + # Note: this class is an implementation detail to provide a uniform + # set of names for certain fields in the extended operation proto message. + # See google.api_core.extended_operation.ExtendedOperation for details + # on these properties and the expected interface. + class _CustomOperation(extended_operation.ExtendedOperation): + @property + def error_message(self): + return self._extended_operation.http_error_message + + @property + def error_code(self): + return self._extended_operation.http_error_status_code + + response = _CustomOperation.make(get_operation, cancel_operation, response) + + # Done; return the response. + return response + + def bulk_insert_unary(self, + request: Optional[Union[compute.BulkInsertRegionDiskRequest, dict]] = None, + *, + project: Optional[str] = None, + region: Optional[str] = None, + bulk_insert_disk_resource_resource: Optional[compute.BulkInsertDiskResource] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.Operation: + r"""Bulk create a set of disks. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_bulk_insert(): + # Create a client + client = compute_v1.RegionDisksClient() + + # Initialize request argument(s) + request = compute_v1.BulkInsertRegionDiskRequest( + project="project_value", + region="region_value", + ) + + # Make the request + response = client.bulk_insert(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.BulkInsertRegionDiskRequest, dict]): + The request object. A request message for + RegionDisks.BulkInsert. See the method + description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + region (str): + The name of the region for this + request. + + This corresponds to the ``region`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + bulk_insert_disk_resource_resource (google.cloud.compute_v1.types.BulkInsertDiskResource): + The body resource for this request + This corresponds to the ``bulk_insert_disk_resource_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, region, bulk_insert_disk_resource_resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.BulkInsertRegionDiskRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.BulkInsertRegionDiskRequest): + request = compute.BulkInsertRegionDiskRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if region is not None: + request.region = region + if bulk_insert_disk_resource_resource is not None: + request.bulk_insert_disk_resource_resource = bulk_insert_disk_resource_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.bulk_insert] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("region", request.region), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def bulk_insert(self, + request: Optional[Union[compute.BulkInsertRegionDiskRequest, dict]] = None, + *, + project: Optional[str] = None, + region: Optional[str] = None, + bulk_insert_disk_resource_resource: Optional[compute.BulkInsertDiskResource] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> extended_operation.ExtendedOperation: + r"""Bulk create a set of disks. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_bulk_insert(): + # Create a client + client = compute_v1.RegionDisksClient() + + # Initialize request argument(s) + request = compute_v1.BulkInsertRegionDiskRequest( + project="project_value", + region="region_value", + ) + + # Make the request + response = client.bulk_insert(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.BulkInsertRegionDiskRequest, dict]): + The request object. A request message for + RegionDisks.BulkInsert. See the method + description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + region (str): + The name of the region for this + request. + + This corresponds to the ``region`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + bulk_insert_disk_resource_resource (google.cloud.compute_v1.types.BulkInsertDiskResource): + The body resource for this request + This corresponds to the ``bulk_insert_disk_resource_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, region, bulk_insert_disk_resource_resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.BulkInsertRegionDiskRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.BulkInsertRegionDiskRequest): + request = compute.BulkInsertRegionDiskRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if region is not None: + request.region = region + if bulk_insert_disk_resource_resource is not None: + request.bulk_insert_disk_resource_resource = bulk_insert_disk_resource_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.bulk_insert] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("region", request.region), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + operation_service = self._transport._region_operations_client + operation_request = compute.GetRegionOperationRequest() + operation_request.project = request.project + operation_request.region = request.region + operation_request.operation = response.name + + get_operation = functools.partial(operation_service.get, operation_request) + # Cancel is not part of extended operations yet. + cancel_operation = lambda: None + + # Note: this class is an implementation detail to provide a uniform + # set of names for certain fields in the extended operation proto message. + # See google.api_core.extended_operation.ExtendedOperation for details + # on these properties and the expected interface. + class _CustomOperation(extended_operation.ExtendedOperation): + @property + def error_message(self): + return self._extended_operation.http_error_message + + @property + def error_code(self): + return self._extended_operation.http_error_status_code + + response = _CustomOperation.make(get_operation, cancel_operation, response) + + # Done; return the response. + return response + + def create_snapshot_unary(self, + request: Optional[Union[compute.CreateSnapshotRegionDiskRequest, dict]] = None, + *, + project: Optional[str] = None, + region: Optional[str] = None, + disk: Optional[str] = None, + snapshot_resource: Optional[compute.Snapshot] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.Operation: + r"""Creates a snapshot of a specified persistent disk. + For regular snapshot creation, consider using + snapshots.insert instead, as that method supports more + features, such as creating snapshots in a project + different from the source disk project. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_create_snapshot(): + # Create a client + client = compute_v1.RegionDisksClient() + + # Initialize request argument(s) + request = compute_v1.CreateSnapshotRegionDiskRequest( + disk="disk_value", + project="project_value", + region="region_value", + ) + + # Make the request + response = client.create_snapshot(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.CreateSnapshotRegionDiskRequest, dict]): + The request object. A request message for + RegionDisks.CreateSnapshot. See the + method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + region (str): + Name of the region for this request. + This corresponds to the ``region`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + disk (str): + Name of the regional persistent disk + to snapshot. + + This corresponds to the ``disk`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + snapshot_resource (google.cloud.compute_v1.types.Snapshot): + The body resource for this request + This corresponds to the ``snapshot_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, region, disk, snapshot_resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.CreateSnapshotRegionDiskRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.CreateSnapshotRegionDiskRequest): + request = compute.CreateSnapshotRegionDiskRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if region is not None: + request.region = region + if disk is not None: + request.disk = disk + if snapshot_resource is not None: + request.snapshot_resource = snapshot_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.create_snapshot] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("region", request.region), + ("disk", request.disk), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def create_snapshot(self, + request: Optional[Union[compute.CreateSnapshotRegionDiskRequest, dict]] = None, + *, + project: Optional[str] = None, + region: Optional[str] = None, + disk: Optional[str] = None, + snapshot_resource: Optional[compute.Snapshot] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> extended_operation.ExtendedOperation: + r"""Creates a snapshot of a specified persistent disk. + For regular snapshot creation, consider using + snapshots.insert instead, as that method supports more + features, such as creating snapshots in a project + different from the source disk project. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_create_snapshot(): + # Create a client + client = compute_v1.RegionDisksClient() + + # Initialize request argument(s) + request = compute_v1.CreateSnapshotRegionDiskRequest( + disk="disk_value", + project="project_value", + region="region_value", + ) + + # Make the request + response = client.create_snapshot(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.CreateSnapshotRegionDiskRequest, dict]): + The request object. A request message for + RegionDisks.CreateSnapshot. See the + method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + region (str): + Name of the region for this request. + This corresponds to the ``region`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + disk (str): + Name of the regional persistent disk + to snapshot. + + This corresponds to the ``disk`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + snapshot_resource (google.cloud.compute_v1.types.Snapshot): + The body resource for this request + This corresponds to the ``snapshot_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, region, disk, snapshot_resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.CreateSnapshotRegionDiskRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.CreateSnapshotRegionDiskRequest): + request = compute.CreateSnapshotRegionDiskRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if region is not None: + request.region = region + if disk is not None: + request.disk = disk + if snapshot_resource is not None: + request.snapshot_resource = snapshot_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.create_snapshot] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("region", request.region), + ("disk", request.disk), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + operation_service = self._transport._region_operations_client + operation_request = compute.GetRegionOperationRequest() + operation_request.project = request.project + operation_request.region = request.region + operation_request.operation = response.name + + get_operation = functools.partial(operation_service.get, operation_request) + # Cancel is not part of extended operations yet. + cancel_operation = lambda: None + + # Note: this class is an implementation detail to provide a uniform + # set of names for certain fields in the extended operation proto message. + # See google.api_core.extended_operation.ExtendedOperation for details + # on these properties and the expected interface. + class _CustomOperation(extended_operation.ExtendedOperation): + @property + def error_message(self): + return self._extended_operation.http_error_message + + @property + def error_code(self): + return self._extended_operation.http_error_status_code + + response = _CustomOperation.make(get_operation, cancel_operation, response) + + # Done; return the response. + return response + + def delete_unary(self, + request: Optional[Union[compute.DeleteRegionDiskRequest, dict]] = None, + *, + project: Optional[str] = None, + region: Optional[str] = None, + disk: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.Operation: + r"""Deletes the specified regional persistent disk. + Deleting a regional disk removes all the replicas of its + data permanently and is irreversible. However, deleting + a disk does not delete any snapshots previously made + from the disk. You must separately delete snapshots. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_delete(): + # Create a client + client = compute_v1.RegionDisksClient() + + # Initialize request argument(s) + request = compute_v1.DeleteRegionDiskRequest( + disk="disk_value", + project="project_value", + region="region_value", + ) + + # Make the request + response = client.delete(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.DeleteRegionDiskRequest, dict]): + The request object. A request message for + RegionDisks.Delete. See the method + description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + region (str): + Name of the region for this request. + This corresponds to the ``region`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + disk (str): + Name of the regional persistent disk + to delete. + + This corresponds to the ``disk`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, region, disk]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.DeleteRegionDiskRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.DeleteRegionDiskRequest): + request = compute.DeleteRegionDiskRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if region is not None: + request.region = region + if disk is not None: + request.disk = disk + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("region", request.region), + ("disk", request.disk), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def delete(self, + request: Optional[Union[compute.DeleteRegionDiskRequest, dict]] = None, + *, + project: Optional[str] = None, + region: Optional[str] = None, + disk: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> extended_operation.ExtendedOperation: + r"""Deletes the specified regional persistent disk. + Deleting a regional disk removes all the replicas of its + data permanently and is irreversible. However, deleting + a disk does not delete any snapshots previously made + from the disk. You must separately delete snapshots. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_delete(): + # Create a client + client = compute_v1.RegionDisksClient() + + # Initialize request argument(s) + request = compute_v1.DeleteRegionDiskRequest( + disk="disk_value", + project="project_value", + region="region_value", + ) + + # Make the request + response = client.delete(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.DeleteRegionDiskRequest, dict]): + The request object. A request message for + RegionDisks.Delete. See the method + description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + region (str): + Name of the region for this request. + This corresponds to the ``region`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + disk (str): + Name of the regional persistent disk + to delete. + + This corresponds to the ``disk`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, region, disk]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.DeleteRegionDiskRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.DeleteRegionDiskRequest): + request = compute.DeleteRegionDiskRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if region is not None: + request.region = region + if disk is not None: + request.disk = disk + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("region", request.region), + ("disk", request.disk), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + operation_service = self._transport._region_operations_client + operation_request = compute.GetRegionOperationRequest() + operation_request.project = request.project + operation_request.region = request.region + operation_request.operation = response.name + + get_operation = functools.partial(operation_service.get, operation_request) + # Cancel is not part of extended operations yet. + cancel_operation = lambda: None + + # Note: this class is an implementation detail to provide a uniform + # set of names for certain fields in the extended operation proto message. + # See google.api_core.extended_operation.ExtendedOperation for details + # on these properties and the expected interface. + class _CustomOperation(extended_operation.ExtendedOperation): + @property + def error_message(self): + return self._extended_operation.http_error_message + + @property + def error_code(self): + return self._extended_operation.http_error_status_code + + response = _CustomOperation.make(get_operation, cancel_operation, response) + + # Done; return the response. + return response + + def get(self, + request: Optional[Union[compute.GetRegionDiskRequest, dict]] = None, + *, + project: Optional[str] = None, + region: Optional[str] = None, + disk: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.Disk: + r"""Returns a specified regional persistent disk. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_get(): + # Create a client + client = compute_v1.RegionDisksClient() + + # Initialize request argument(s) + request = compute_v1.GetRegionDiskRequest( + disk="disk_value", + project="project_value", + region="region_value", + ) + + # Make the request + response = client.get(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.GetRegionDiskRequest, dict]): + The request object. A request message for + RegionDisks.Get. See the method + description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + region (str): + Name of the region for this request. + This corresponds to the ``region`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + disk (str): + Name of the regional persistent disk + to return. + + This corresponds to the ``disk`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.compute_v1.types.Disk: + Represents a Persistent Disk resource. Google Compute + Engine has two Disk resources: \* + [Zonal](/compute/docs/reference/rest/v1/disks) \* + [Regional](/compute/docs/reference/rest/v1/regionDisks) + Persistent disks are required for running your VM + instances. Create both boot and non-boot (data) + persistent disks. For more information, read Persistent + Disks. For more storage options, read Storage options. + The disks resource represents a zonal persistent disk. + For more information, read Zonal persistent disks. The + regionDisks resource represents a regional persistent + disk. For more information, read Regional resources. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, region, disk]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.GetRegionDiskRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.GetRegionDiskRequest): + request = compute.GetRegionDiskRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if region is not None: + request.region = region + if disk is not None: + request.disk = disk + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("region", request.region), + ("disk", request.disk), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get_iam_policy(self, + request: Optional[Union[compute.GetIamPolicyRegionDiskRequest, dict]] = None, + *, + project: Optional[str] = None, + region: Optional[str] = None, + resource: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.Policy: + r"""Gets the access control policy for a resource. May be + empty if no such policy or resource exists. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_get_iam_policy(): + # Create a client + client = compute_v1.RegionDisksClient() + + # Initialize request argument(s) + request = compute_v1.GetIamPolicyRegionDiskRequest( + project="project_value", + region="region_value", + resource="resource_value", + ) + + # Make the request + response = client.get_iam_policy(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.GetIamPolicyRegionDiskRequest, dict]): + The request object. A request message for + RegionDisks.GetIamPolicy. See the method + description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + region (str): + The name of the region for this + request. + + This corresponds to the ``region`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + resource (str): + Name or id of the resource for this + request. + + This corresponds to the ``resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.compute_v1.types.Policy: + An Identity and Access Management (IAM) policy, which + specifies access controls for Google Cloud resources. A + Policy is a collection of bindings. A binding binds one + or more members, or principals, to a single role. + Principals can be user accounts, service accounts, + Google groups, and domains (such as G Suite). A role is + a named list of permissions; each role can be an IAM + predefined role or a user-created custom role. For some + types of Google Cloud resources, a binding can also + specify a condition, which is a logical expression that + allows access to a resource only if the expression + evaluates to true. A condition can add constraints based + on attributes of the request, the resource, or both. To + learn which resources support conditions in their IAM + policies, see the [IAM + documentation](\ https://cloud.google.com/iam/help/conditions/resource-policies). + **JSON example:** { "bindings": [ { "role": + "roles/resourcemanager.organizationAdmin", "members": [ + "user:mike@example.com", "group:admins@example.com", + "domain:google.com", + "serviceAccount:my-project-id@appspot.gserviceaccount.com" + ] }, { "role": + "roles/resourcemanager.organizationViewer", "members": [ + "user:eve@example.com" ], "condition": { "title": + "expirable access", "description": "Does not grant + access after Sep 2020", "expression": "request.time < + timestamp('2020-10-01T00:00:00.000Z')", } } ], "etag": + "BwWWja0YfJA=", "version": 3 } **YAML example:** + bindings: - members: - user:\ mike@example.com - + group:\ admins@example.com - domain:google.com - + serviceAccount:\ my-project-id@appspot.gserviceaccount.com + role: roles/resourcemanager.organizationAdmin - members: + - user:\ eve@example.com role: + roles/resourcemanager.organizationViewer condition: + title: expirable access description: Does not grant + access after Sep 2020 expression: request.time < + timestamp('2020-10-01T00:00:00.000Z') etag: BwWWja0YfJA= + version: 3 For a description of IAM and its features, + see the [IAM + documentation](\ https://cloud.google.com/iam/docs/). + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, region, resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.GetIamPolicyRegionDiskRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.GetIamPolicyRegionDiskRequest): + request = compute.GetIamPolicyRegionDiskRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if region is not None: + request.region = region + if resource is not None: + request.resource = resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_iam_policy] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("region", request.region), + ("resource", request.resource), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def insert_unary(self, + request: Optional[Union[compute.InsertRegionDiskRequest, dict]] = None, + *, + project: Optional[str] = None, + region: Optional[str] = None, + disk_resource: Optional[compute.Disk] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.Operation: + r"""Creates a persistent regional disk in the specified + project using the data included in the request. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_insert(): + # Create a client + client = compute_v1.RegionDisksClient() + + # Initialize request argument(s) + request = compute_v1.InsertRegionDiskRequest( + project="project_value", + region="region_value", + ) + + # Make the request + response = client.insert(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.InsertRegionDiskRequest, dict]): + The request object. A request message for + RegionDisks.Insert. See the method + description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + region (str): + Name of the region for this request. + This corresponds to the ``region`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + disk_resource (google.cloud.compute_v1.types.Disk): + The body resource for this request + This corresponds to the ``disk_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, region, disk_resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.InsertRegionDiskRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.InsertRegionDiskRequest): + request = compute.InsertRegionDiskRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if region is not None: + request.region = region + if disk_resource is not None: + request.disk_resource = disk_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.insert] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("region", request.region), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def insert(self, + request: Optional[Union[compute.InsertRegionDiskRequest, dict]] = None, + *, + project: Optional[str] = None, + region: Optional[str] = None, + disk_resource: Optional[compute.Disk] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> extended_operation.ExtendedOperation: + r"""Creates a persistent regional disk in the specified + project using the data included in the request. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_insert(): + # Create a client + client = compute_v1.RegionDisksClient() + + # Initialize request argument(s) + request = compute_v1.InsertRegionDiskRequest( + project="project_value", + region="region_value", + ) + + # Make the request + response = client.insert(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.InsertRegionDiskRequest, dict]): + The request object. A request message for + RegionDisks.Insert. See the method + description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + region (str): + Name of the region for this request. + This corresponds to the ``region`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + disk_resource (google.cloud.compute_v1.types.Disk): + The body resource for this request + This corresponds to the ``disk_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, region, disk_resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.InsertRegionDiskRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.InsertRegionDiskRequest): + request = compute.InsertRegionDiskRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if region is not None: + request.region = region + if disk_resource is not None: + request.disk_resource = disk_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.insert] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("region", request.region), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + operation_service = self._transport._region_operations_client + operation_request = compute.GetRegionOperationRequest() + operation_request.project = request.project + operation_request.region = request.region + operation_request.operation = response.name + + get_operation = functools.partial(operation_service.get, operation_request) + # Cancel is not part of extended operations yet. + cancel_operation = lambda: None + + # Note: this class is an implementation detail to provide a uniform + # set of names for certain fields in the extended operation proto message. + # See google.api_core.extended_operation.ExtendedOperation for details + # on these properties and the expected interface. + class _CustomOperation(extended_operation.ExtendedOperation): + @property + def error_message(self): + return self._extended_operation.http_error_message + + @property + def error_code(self): + return self._extended_operation.http_error_status_code + + response = _CustomOperation.make(get_operation, cancel_operation, response) + + # Done; return the response. + return response + + def list(self, + request: Optional[Union[compute.ListRegionDisksRequest, dict]] = None, + *, + project: Optional[str] = None, + region: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListPager: + r"""Retrieves the list of persistent disks contained + within the specified region. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_list(): + # Create a client + client = compute_v1.RegionDisksClient() + + # Initialize request argument(s) + request = compute_v1.ListRegionDisksRequest( + project="project_value", + region="region_value", + ) + + # Make the request + page_result = client.list(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.ListRegionDisksRequest, dict]): + The request object. A request message for + RegionDisks.List. See the method + description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + region (str): + Name of the region for this request. + This corresponds to the ``region`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.compute_v1.services.region_disks.pagers.ListPager: + A list of Disk resources. + + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, region]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.ListRegionDisksRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.ListRegionDisksRequest): + request = compute.ListRegionDisksRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if region is not None: + request.region = region + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("region", request.region), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + def remove_resource_policies_unary(self, + request: Optional[Union[compute.RemoveResourcePoliciesRegionDiskRequest, dict]] = None, + *, + project: Optional[str] = None, + region: Optional[str] = None, + disk: Optional[str] = None, + region_disks_remove_resource_policies_request_resource: Optional[compute.RegionDisksRemoveResourcePoliciesRequest] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.Operation: + r"""Removes resource policies from a regional disk. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_remove_resource_policies(): + # Create a client + client = compute_v1.RegionDisksClient() + + # Initialize request argument(s) + request = compute_v1.RemoveResourcePoliciesRegionDiskRequest( + disk="disk_value", + project="project_value", + region="region_value", + ) + + # Make the request + response = client.remove_resource_policies(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.RemoveResourcePoliciesRegionDiskRequest, dict]): + The request object. A request message for + RegionDisks.RemoveResourcePolicies. See + the method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + region (str): + The name of the region for this + request. + + This corresponds to the ``region`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + disk (str): + The disk name for this request. + This corresponds to the ``disk`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + region_disks_remove_resource_policies_request_resource (google.cloud.compute_v1.types.RegionDisksRemoveResourcePoliciesRequest): + The body resource for this request + This corresponds to the ``region_disks_remove_resource_policies_request_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, region, disk, region_disks_remove_resource_policies_request_resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.RemoveResourcePoliciesRegionDiskRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.RemoveResourcePoliciesRegionDiskRequest): + request = compute.RemoveResourcePoliciesRegionDiskRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if region is not None: + request.region = region + if disk is not None: + request.disk = disk + if region_disks_remove_resource_policies_request_resource is not None: + request.region_disks_remove_resource_policies_request_resource = region_disks_remove_resource_policies_request_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.remove_resource_policies] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("region", request.region), + ("disk", request.disk), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def remove_resource_policies(self, + request: Optional[Union[compute.RemoveResourcePoliciesRegionDiskRequest, dict]] = None, + *, + project: Optional[str] = None, + region: Optional[str] = None, + disk: Optional[str] = None, + region_disks_remove_resource_policies_request_resource: Optional[compute.RegionDisksRemoveResourcePoliciesRequest] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> extended_operation.ExtendedOperation: + r"""Removes resource policies from a regional disk. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_remove_resource_policies(): + # Create a client + client = compute_v1.RegionDisksClient() + + # Initialize request argument(s) + request = compute_v1.RemoveResourcePoliciesRegionDiskRequest( + disk="disk_value", + project="project_value", + region="region_value", + ) + + # Make the request + response = client.remove_resource_policies(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.RemoveResourcePoliciesRegionDiskRequest, dict]): + The request object. A request message for + RegionDisks.RemoveResourcePolicies. See + the method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + region (str): + The name of the region for this + request. + + This corresponds to the ``region`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + disk (str): + The disk name for this request. + This corresponds to the ``disk`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + region_disks_remove_resource_policies_request_resource (google.cloud.compute_v1.types.RegionDisksRemoveResourcePoliciesRequest): + The body resource for this request + This corresponds to the ``region_disks_remove_resource_policies_request_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, region, disk, region_disks_remove_resource_policies_request_resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.RemoveResourcePoliciesRegionDiskRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.RemoveResourcePoliciesRegionDiskRequest): + request = compute.RemoveResourcePoliciesRegionDiskRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if region is not None: + request.region = region + if disk is not None: + request.disk = disk + if region_disks_remove_resource_policies_request_resource is not None: + request.region_disks_remove_resource_policies_request_resource = region_disks_remove_resource_policies_request_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.remove_resource_policies] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("region", request.region), + ("disk", request.disk), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + operation_service = self._transport._region_operations_client + operation_request = compute.GetRegionOperationRequest() + operation_request.project = request.project + operation_request.region = request.region + operation_request.operation = response.name + + get_operation = functools.partial(operation_service.get, operation_request) + # Cancel is not part of extended operations yet. + cancel_operation = lambda: None + + # Note: this class is an implementation detail to provide a uniform + # set of names for certain fields in the extended operation proto message. + # See google.api_core.extended_operation.ExtendedOperation for details + # on these properties and the expected interface. + class _CustomOperation(extended_operation.ExtendedOperation): + @property + def error_message(self): + return self._extended_operation.http_error_message + + @property + def error_code(self): + return self._extended_operation.http_error_status_code + + response = _CustomOperation.make(get_operation, cancel_operation, response) + + # Done; return the response. + return response + + def resize_unary(self, + request: Optional[Union[compute.ResizeRegionDiskRequest, dict]] = None, + *, + project: Optional[str] = None, + region: Optional[str] = None, + disk: Optional[str] = None, + region_disks_resize_request_resource: Optional[compute.RegionDisksResizeRequest] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.Operation: + r"""Resizes the specified regional persistent disk. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_resize(): + # Create a client + client = compute_v1.RegionDisksClient() + + # Initialize request argument(s) + request = compute_v1.ResizeRegionDiskRequest( + disk="disk_value", + project="project_value", + region="region_value", + ) + + # Make the request + response = client.resize(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.ResizeRegionDiskRequest, dict]): + The request object. A request message for + RegionDisks.Resize. See the method + description for details. + project (str): + The project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + region (str): + Name of the region for this request. + This corresponds to the ``region`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + disk (str): + Name of the regional persistent disk. + This corresponds to the ``disk`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + region_disks_resize_request_resource (google.cloud.compute_v1.types.RegionDisksResizeRequest): + The body resource for this request + This corresponds to the ``region_disks_resize_request_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, region, disk, region_disks_resize_request_resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.ResizeRegionDiskRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.ResizeRegionDiskRequest): + request = compute.ResizeRegionDiskRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if region is not None: + request.region = region + if disk is not None: + request.disk = disk + if region_disks_resize_request_resource is not None: + request.region_disks_resize_request_resource = region_disks_resize_request_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.resize] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("region", request.region), + ("disk", request.disk), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def resize(self, + request: Optional[Union[compute.ResizeRegionDiskRequest, dict]] = None, + *, + project: Optional[str] = None, + region: Optional[str] = None, + disk: Optional[str] = None, + region_disks_resize_request_resource: Optional[compute.RegionDisksResizeRequest] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> extended_operation.ExtendedOperation: + r"""Resizes the specified regional persistent disk. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_resize(): + # Create a client + client = compute_v1.RegionDisksClient() + + # Initialize request argument(s) + request = compute_v1.ResizeRegionDiskRequest( + disk="disk_value", + project="project_value", + region="region_value", + ) + + # Make the request + response = client.resize(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.ResizeRegionDiskRequest, dict]): + The request object. A request message for + RegionDisks.Resize. See the method + description for details. + project (str): + The project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + region (str): + Name of the region for this request. + This corresponds to the ``region`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + disk (str): + Name of the regional persistent disk. + This corresponds to the ``disk`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + region_disks_resize_request_resource (google.cloud.compute_v1.types.RegionDisksResizeRequest): + The body resource for this request + This corresponds to the ``region_disks_resize_request_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, region, disk, region_disks_resize_request_resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.ResizeRegionDiskRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.ResizeRegionDiskRequest): + request = compute.ResizeRegionDiskRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if region is not None: + request.region = region + if disk is not None: + request.disk = disk + if region_disks_resize_request_resource is not None: + request.region_disks_resize_request_resource = region_disks_resize_request_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.resize] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("region", request.region), + ("disk", request.disk), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + operation_service = self._transport._region_operations_client + operation_request = compute.GetRegionOperationRequest() + operation_request.project = request.project + operation_request.region = request.region + operation_request.operation = response.name + + get_operation = functools.partial(operation_service.get, operation_request) + # Cancel is not part of extended operations yet. + cancel_operation = lambda: None + + # Note: this class is an implementation detail to provide a uniform + # set of names for certain fields in the extended operation proto message. + # See google.api_core.extended_operation.ExtendedOperation for details + # on these properties and the expected interface. + class _CustomOperation(extended_operation.ExtendedOperation): + @property + def error_message(self): + return self._extended_operation.http_error_message + + @property + def error_code(self): + return self._extended_operation.http_error_status_code + + response = _CustomOperation.make(get_operation, cancel_operation, response) + + # Done; return the response. + return response + + def set_iam_policy(self, + request: Optional[Union[compute.SetIamPolicyRegionDiskRequest, dict]] = None, + *, + project: Optional[str] = None, + region: Optional[str] = None, + resource: Optional[str] = None, + region_set_policy_request_resource: Optional[compute.RegionSetPolicyRequest] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.Policy: + r"""Sets the access control policy on the specified + resource. Replaces any existing policy. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_set_iam_policy(): + # Create a client + client = compute_v1.RegionDisksClient() + + # Initialize request argument(s) + request = compute_v1.SetIamPolicyRegionDiskRequest( + project="project_value", + region="region_value", + resource="resource_value", + ) + + # Make the request + response = client.set_iam_policy(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.SetIamPolicyRegionDiskRequest, dict]): + The request object. A request message for + RegionDisks.SetIamPolicy. See the method + description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + region (str): + The name of the region for this + request. + + This corresponds to the ``region`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + resource (str): + Name or id of the resource for this + request. + + This corresponds to the ``resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + region_set_policy_request_resource (google.cloud.compute_v1.types.RegionSetPolicyRequest): + The body resource for this request + This corresponds to the ``region_set_policy_request_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.compute_v1.types.Policy: + An Identity and Access Management (IAM) policy, which + specifies access controls for Google Cloud resources. A + Policy is a collection of bindings. A binding binds one + or more members, or principals, to a single role. + Principals can be user accounts, service accounts, + Google groups, and domains (such as G Suite). A role is + a named list of permissions; each role can be an IAM + predefined role or a user-created custom role. For some + types of Google Cloud resources, a binding can also + specify a condition, which is a logical expression that + allows access to a resource only if the expression + evaluates to true. A condition can add constraints based + on attributes of the request, the resource, or both. To + learn which resources support conditions in their IAM + policies, see the [IAM + documentation](\ https://cloud.google.com/iam/help/conditions/resource-policies). + **JSON example:** { "bindings": [ { "role": + "roles/resourcemanager.organizationAdmin", "members": [ + "user:mike@example.com", "group:admins@example.com", + "domain:google.com", + "serviceAccount:my-project-id@appspot.gserviceaccount.com" + ] }, { "role": + "roles/resourcemanager.organizationViewer", "members": [ + "user:eve@example.com" ], "condition": { "title": + "expirable access", "description": "Does not grant + access after Sep 2020", "expression": "request.time < + timestamp('2020-10-01T00:00:00.000Z')", } } ], "etag": + "BwWWja0YfJA=", "version": 3 } **YAML example:** + bindings: - members: - user:\ mike@example.com - + group:\ admins@example.com - domain:google.com - + serviceAccount:\ my-project-id@appspot.gserviceaccount.com + role: roles/resourcemanager.organizationAdmin - members: + - user:\ eve@example.com role: + roles/resourcemanager.organizationViewer condition: + title: expirable access description: Does not grant + access after Sep 2020 expression: request.time < + timestamp('2020-10-01T00:00:00.000Z') etag: BwWWja0YfJA= + version: 3 For a description of IAM and its features, + see the [IAM + documentation](\ https://cloud.google.com/iam/docs/). + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, region, resource, region_set_policy_request_resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.SetIamPolicyRegionDiskRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.SetIamPolicyRegionDiskRequest): + request = compute.SetIamPolicyRegionDiskRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if region is not None: + request.region = region + if resource is not None: + request.resource = resource + if region_set_policy_request_resource is not None: + request.region_set_policy_request_resource = region_set_policy_request_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.set_iam_policy] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("region", request.region), + ("resource", request.resource), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def set_labels_unary(self, + request: Optional[Union[compute.SetLabelsRegionDiskRequest, dict]] = None, + *, + project: Optional[str] = None, + region: Optional[str] = None, + resource: Optional[str] = None, + region_set_labels_request_resource: Optional[compute.RegionSetLabelsRequest] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.Operation: + r"""Sets the labels on the target regional disk. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_set_labels(): + # Create a client + client = compute_v1.RegionDisksClient() + + # Initialize request argument(s) + request = compute_v1.SetLabelsRegionDiskRequest( + project="project_value", + region="region_value", + resource="resource_value", + ) + + # Make the request + response = client.set_labels(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.SetLabelsRegionDiskRequest, dict]): + The request object. A request message for + RegionDisks.SetLabels. See the method + description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + region (str): + The region for this request. + This corresponds to the ``region`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + resource (str): + Name or id of the resource for this + request. + + This corresponds to the ``resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + region_set_labels_request_resource (google.cloud.compute_v1.types.RegionSetLabelsRequest): + The body resource for this request + This corresponds to the ``region_set_labels_request_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, region, resource, region_set_labels_request_resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.SetLabelsRegionDiskRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.SetLabelsRegionDiskRequest): + request = compute.SetLabelsRegionDiskRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if region is not None: + request.region = region + if resource is not None: + request.resource = resource + if region_set_labels_request_resource is not None: + request.region_set_labels_request_resource = region_set_labels_request_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.set_labels] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("region", request.region), + ("resource", request.resource), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def set_labels(self, + request: Optional[Union[compute.SetLabelsRegionDiskRequest, dict]] = None, + *, + project: Optional[str] = None, + region: Optional[str] = None, + resource: Optional[str] = None, + region_set_labels_request_resource: Optional[compute.RegionSetLabelsRequest] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> extended_operation.ExtendedOperation: + r"""Sets the labels on the target regional disk. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_set_labels(): + # Create a client + client = compute_v1.RegionDisksClient() + + # Initialize request argument(s) + request = compute_v1.SetLabelsRegionDiskRequest( + project="project_value", + region="region_value", + resource="resource_value", + ) + + # Make the request + response = client.set_labels(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.SetLabelsRegionDiskRequest, dict]): + The request object. A request message for + RegionDisks.SetLabels. See the method + description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + region (str): + The region for this request. + This corresponds to the ``region`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + resource (str): + Name or id of the resource for this + request. + + This corresponds to the ``resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + region_set_labels_request_resource (google.cloud.compute_v1.types.RegionSetLabelsRequest): + The body resource for this request + This corresponds to the ``region_set_labels_request_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, region, resource, region_set_labels_request_resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.SetLabelsRegionDiskRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.SetLabelsRegionDiskRequest): + request = compute.SetLabelsRegionDiskRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if region is not None: + request.region = region + if resource is not None: + request.resource = resource + if region_set_labels_request_resource is not None: + request.region_set_labels_request_resource = region_set_labels_request_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.set_labels] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("region", request.region), + ("resource", request.resource), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + operation_service = self._transport._region_operations_client + operation_request = compute.GetRegionOperationRequest() + operation_request.project = request.project + operation_request.region = request.region + operation_request.operation = response.name + + get_operation = functools.partial(operation_service.get, operation_request) + # Cancel is not part of extended operations yet. + cancel_operation = lambda: None + + # Note: this class is an implementation detail to provide a uniform + # set of names for certain fields in the extended operation proto message. + # See google.api_core.extended_operation.ExtendedOperation for details + # on these properties and the expected interface. + class _CustomOperation(extended_operation.ExtendedOperation): + @property + def error_message(self): + return self._extended_operation.http_error_message + + @property + def error_code(self): + return self._extended_operation.http_error_status_code + + response = _CustomOperation.make(get_operation, cancel_operation, response) + + # Done; return the response. + return response + + def start_async_replication_unary(self, + request: Optional[Union[compute.StartAsyncReplicationRegionDiskRequest, dict]] = None, + *, + project: Optional[str] = None, + region: Optional[str] = None, + disk: Optional[str] = None, + region_disks_start_async_replication_request_resource: Optional[compute.RegionDisksStartAsyncReplicationRequest] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.Operation: + r"""Starts asynchronous replication. Must be invoked on + the primary disk. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_start_async_replication(): + # Create a client + client = compute_v1.RegionDisksClient() + + # Initialize request argument(s) + request = compute_v1.StartAsyncReplicationRegionDiskRequest( + disk="disk_value", + project="project_value", + region="region_value", + ) + + # Make the request + response = client.start_async_replication(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.StartAsyncReplicationRegionDiskRequest, dict]): + The request object. A request message for + RegionDisks.StartAsyncReplication. See + the method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + region (str): + The name of the region for this + request. + + This corresponds to the ``region`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + disk (str): + The name of the persistent disk. + This corresponds to the ``disk`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + region_disks_start_async_replication_request_resource (google.cloud.compute_v1.types.RegionDisksStartAsyncReplicationRequest): + The body resource for this request + This corresponds to the ``region_disks_start_async_replication_request_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, region, disk, region_disks_start_async_replication_request_resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.StartAsyncReplicationRegionDiskRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.StartAsyncReplicationRegionDiskRequest): + request = compute.StartAsyncReplicationRegionDiskRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if region is not None: + request.region = region + if disk is not None: + request.disk = disk + if region_disks_start_async_replication_request_resource is not None: + request.region_disks_start_async_replication_request_resource = region_disks_start_async_replication_request_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.start_async_replication] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("region", request.region), + ("disk", request.disk), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def start_async_replication(self, + request: Optional[Union[compute.StartAsyncReplicationRegionDiskRequest, dict]] = None, + *, + project: Optional[str] = None, + region: Optional[str] = None, + disk: Optional[str] = None, + region_disks_start_async_replication_request_resource: Optional[compute.RegionDisksStartAsyncReplicationRequest] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> extended_operation.ExtendedOperation: + r"""Starts asynchronous replication. Must be invoked on + the primary disk. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_start_async_replication(): + # Create a client + client = compute_v1.RegionDisksClient() + + # Initialize request argument(s) + request = compute_v1.StartAsyncReplicationRegionDiskRequest( + disk="disk_value", + project="project_value", + region="region_value", + ) + + # Make the request + response = client.start_async_replication(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.StartAsyncReplicationRegionDiskRequest, dict]): + The request object. A request message for + RegionDisks.StartAsyncReplication. See + the method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + region (str): + The name of the region for this + request. + + This corresponds to the ``region`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + disk (str): + The name of the persistent disk. + This corresponds to the ``disk`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + region_disks_start_async_replication_request_resource (google.cloud.compute_v1.types.RegionDisksStartAsyncReplicationRequest): + The body resource for this request + This corresponds to the ``region_disks_start_async_replication_request_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, region, disk, region_disks_start_async_replication_request_resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.StartAsyncReplicationRegionDiskRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.StartAsyncReplicationRegionDiskRequest): + request = compute.StartAsyncReplicationRegionDiskRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if region is not None: + request.region = region + if disk is not None: + request.disk = disk + if region_disks_start_async_replication_request_resource is not None: + request.region_disks_start_async_replication_request_resource = region_disks_start_async_replication_request_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.start_async_replication] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("region", request.region), + ("disk", request.disk), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + operation_service = self._transport._region_operations_client + operation_request = compute.GetRegionOperationRequest() + operation_request.project = request.project + operation_request.region = request.region + operation_request.operation = response.name + + get_operation = functools.partial(operation_service.get, operation_request) + # Cancel is not part of extended operations yet. + cancel_operation = lambda: None + + # Note: this class is an implementation detail to provide a uniform + # set of names for certain fields in the extended operation proto message. + # See google.api_core.extended_operation.ExtendedOperation for details + # on these properties and the expected interface. + class _CustomOperation(extended_operation.ExtendedOperation): + @property + def error_message(self): + return self._extended_operation.http_error_message + + @property + def error_code(self): + return self._extended_operation.http_error_status_code + + response = _CustomOperation.make(get_operation, cancel_operation, response) + + # Done; return the response. + return response + + def stop_async_replication_unary(self, + request: Optional[Union[compute.StopAsyncReplicationRegionDiskRequest, dict]] = None, + *, + project: Optional[str] = None, + region: Optional[str] = None, + disk: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.Operation: + r"""Stops asynchronous replication. Can be invoked either + on the primary or on the secondary disk. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_stop_async_replication(): + # Create a client + client = compute_v1.RegionDisksClient() + + # Initialize request argument(s) + request = compute_v1.StopAsyncReplicationRegionDiskRequest( + disk="disk_value", + project="project_value", + region="region_value", + ) + + # Make the request + response = client.stop_async_replication(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.StopAsyncReplicationRegionDiskRequest, dict]): + The request object. A request message for + RegionDisks.StopAsyncReplication. See + the method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + region (str): + The name of the region for this + request. + + This corresponds to the ``region`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + disk (str): + The name of the persistent disk. + This corresponds to the ``disk`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, region, disk]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.StopAsyncReplicationRegionDiskRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.StopAsyncReplicationRegionDiskRequest): + request = compute.StopAsyncReplicationRegionDiskRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if region is not None: + request.region = region + if disk is not None: + request.disk = disk + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.stop_async_replication] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("region", request.region), + ("disk", request.disk), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def stop_async_replication(self, + request: Optional[Union[compute.StopAsyncReplicationRegionDiskRequest, dict]] = None, + *, + project: Optional[str] = None, + region: Optional[str] = None, + disk: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> extended_operation.ExtendedOperation: + r"""Stops asynchronous replication. Can be invoked either + on the primary or on the secondary disk. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_stop_async_replication(): + # Create a client + client = compute_v1.RegionDisksClient() + + # Initialize request argument(s) + request = compute_v1.StopAsyncReplicationRegionDiskRequest( + disk="disk_value", + project="project_value", + region="region_value", + ) + + # Make the request + response = client.stop_async_replication(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.StopAsyncReplicationRegionDiskRequest, dict]): + The request object. A request message for + RegionDisks.StopAsyncReplication. See + the method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + region (str): + The name of the region for this + request. + + This corresponds to the ``region`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + disk (str): + The name of the persistent disk. + This corresponds to the ``disk`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, region, disk]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.StopAsyncReplicationRegionDiskRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.StopAsyncReplicationRegionDiskRequest): + request = compute.StopAsyncReplicationRegionDiskRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if region is not None: + request.region = region + if disk is not None: + request.disk = disk + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.stop_async_replication] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("region", request.region), + ("disk", request.disk), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + operation_service = self._transport._region_operations_client + operation_request = compute.GetRegionOperationRequest() + operation_request.project = request.project + operation_request.region = request.region + operation_request.operation = response.name + + get_operation = functools.partial(operation_service.get, operation_request) + # Cancel is not part of extended operations yet. + cancel_operation = lambda: None + + # Note: this class is an implementation detail to provide a uniform + # set of names for certain fields in the extended operation proto message. + # See google.api_core.extended_operation.ExtendedOperation for details + # on these properties and the expected interface. + class _CustomOperation(extended_operation.ExtendedOperation): + @property + def error_message(self): + return self._extended_operation.http_error_message + + @property + def error_code(self): + return self._extended_operation.http_error_status_code + + response = _CustomOperation.make(get_operation, cancel_operation, response) + + # Done; return the response. + return response + + def stop_group_async_replication_unary(self, + request: Optional[Union[compute.StopGroupAsyncReplicationRegionDiskRequest, dict]] = None, + *, + project: Optional[str] = None, + region: Optional[str] = None, + disks_stop_group_async_replication_resource_resource: Optional[compute.DisksStopGroupAsyncReplicationResource] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.Operation: + r"""Stops asynchronous replication for a consistency + group of disks. Can be invoked either in the primary or + secondary scope. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_stop_group_async_replication(): + # Create a client + client = compute_v1.RegionDisksClient() + + # Initialize request argument(s) + request = compute_v1.StopGroupAsyncReplicationRegionDiskRequest( + project="project_value", + region="region_value", + ) + + # Make the request + response = client.stop_group_async_replication(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.StopGroupAsyncReplicationRegionDiskRequest, dict]): + The request object. A request message for + RegionDisks.StopGroupAsyncReplication. + See the method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + region (str): + The name of the region for this + request. This must be the region of the + primary or secondary disks in the + consistency group. + + This corresponds to the ``region`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + disks_stop_group_async_replication_resource_resource (google.cloud.compute_v1.types.DisksStopGroupAsyncReplicationResource): + The body resource for this request + This corresponds to the ``disks_stop_group_async_replication_resource_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, region, disks_stop_group_async_replication_resource_resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.StopGroupAsyncReplicationRegionDiskRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.StopGroupAsyncReplicationRegionDiskRequest): + request = compute.StopGroupAsyncReplicationRegionDiskRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if region is not None: + request.region = region + if disks_stop_group_async_replication_resource_resource is not None: + request.disks_stop_group_async_replication_resource_resource = disks_stop_group_async_replication_resource_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.stop_group_async_replication] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("region", request.region), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def stop_group_async_replication(self, + request: Optional[Union[compute.StopGroupAsyncReplicationRegionDiskRequest, dict]] = None, + *, + project: Optional[str] = None, + region: Optional[str] = None, + disks_stop_group_async_replication_resource_resource: Optional[compute.DisksStopGroupAsyncReplicationResource] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> extended_operation.ExtendedOperation: + r"""Stops asynchronous replication for a consistency + group of disks. Can be invoked either in the primary or + secondary scope. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_stop_group_async_replication(): + # Create a client + client = compute_v1.RegionDisksClient() + + # Initialize request argument(s) + request = compute_v1.StopGroupAsyncReplicationRegionDiskRequest( + project="project_value", + region="region_value", + ) + + # Make the request + response = client.stop_group_async_replication(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.StopGroupAsyncReplicationRegionDiskRequest, dict]): + The request object. A request message for + RegionDisks.StopGroupAsyncReplication. + See the method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + region (str): + The name of the region for this + request. This must be the region of the + primary or secondary disks in the + consistency group. + + This corresponds to the ``region`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + disks_stop_group_async_replication_resource_resource (google.cloud.compute_v1.types.DisksStopGroupAsyncReplicationResource): + The body resource for this request + This corresponds to the ``disks_stop_group_async_replication_resource_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, region, disks_stop_group_async_replication_resource_resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.StopGroupAsyncReplicationRegionDiskRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.StopGroupAsyncReplicationRegionDiskRequest): + request = compute.StopGroupAsyncReplicationRegionDiskRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if region is not None: + request.region = region + if disks_stop_group_async_replication_resource_resource is not None: + request.disks_stop_group_async_replication_resource_resource = disks_stop_group_async_replication_resource_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.stop_group_async_replication] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("region", request.region), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + operation_service = self._transport._region_operations_client + operation_request = compute.GetRegionOperationRequest() + operation_request.project = request.project + operation_request.region = request.region + operation_request.operation = response.name + + get_operation = functools.partial(operation_service.get, operation_request) + # Cancel is not part of extended operations yet. + cancel_operation = lambda: None + + # Note: this class is an implementation detail to provide a uniform + # set of names for certain fields in the extended operation proto message. + # See google.api_core.extended_operation.ExtendedOperation for details + # on these properties and the expected interface. + class _CustomOperation(extended_operation.ExtendedOperation): + @property + def error_message(self): + return self._extended_operation.http_error_message + + @property + def error_code(self): + return self._extended_operation.http_error_status_code + + response = _CustomOperation.make(get_operation, cancel_operation, response) + + # Done; return the response. + return response + + def test_iam_permissions(self, + request: Optional[Union[compute.TestIamPermissionsRegionDiskRequest, dict]] = None, + *, + project: Optional[str] = None, + region: Optional[str] = None, + resource: Optional[str] = None, + test_permissions_request_resource: Optional[compute.TestPermissionsRequest] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.TestPermissionsResponse: + r"""Returns permissions that a caller has on the + specified resource. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_test_iam_permissions(): + # Create a client + client = compute_v1.RegionDisksClient() + + # Initialize request argument(s) + request = compute_v1.TestIamPermissionsRegionDiskRequest( + project="project_value", + region="region_value", + resource="resource_value", + ) + + # Make the request + response = client.test_iam_permissions(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.TestIamPermissionsRegionDiskRequest, dict]): + The request object. A request message for + RegionDisks.TestIamPermissions. See the + method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + region (str): + The name of the region for this + request. + + This corresponds to the ``region`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + resource (str): + Name or id of the resource for this + request. + + This corresponds to the ``resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + test_permissions_request_resource (google.cloud.compute_v1.types.TestPermissionsRequest): + The body resource for this request + This corresponds to the ``test_permissions_request_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.compute_v1.types.TestPermissionsResponse: + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, region, resource, test_permissions_request_resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.TestIamPermissionsRegionDiskRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.TestIamPermissionsRegionDiskRequest): + request = compute.TestIamPermissionsRegionDiskRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if region is not None: + request.region = region + if resource is not None: + request.resource = resource + if test_permissions_request_resource is not None: + request.test_permissions_request_resource = test_permissions_request_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.test_iam_permissions] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("region", request.region), + ("resource", request.resource), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def update_unary(self, + request: Optional[Union[compute.UpdateRegionDiskRequest, dict]] = None, + *, + project: Optional[str] = None, + region: Optional[str] = None, + disk: Optional[str] = None, + disk_resource: Optional[compute.Disk] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.Operation: + r"""Update the specified disk with the data included in the request. + Update is performed only on selected fields included as part of + update-mask. Only the following fields can be modified: + user_license. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_update(): + # Create a client + client = compute_v1.RegionDisksClient() + + # Initialize request argument(s) + request = compute_v1.UpdateRegionDiskRequest( + disk="disk_value", + project="project_value", + region="region_value", + ) + + # Make the request + response = client.update(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.UpdateRegionDiskRequest, dict]): + The request object. A request message for + RegionDisks.Update. See the method + description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + region (str): + The name of the region for this + request. + + This corresponds to the ``region`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + disk (str): + The disk name for this request. + This corresponds to the ``disk`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + disk_resource (google.cloud.compute_v1.types.Disk): + The body resource for this request + This corresponds to the ``disk_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, region, disk, disk_resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.UpdateRegionDiskRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.UpdateRegionDiskRequest): + request = compute.UpdateRegionDiskRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if region is not None: + request.region = region + if disk is not None: + request.disk = disk + if disk_resource is not None: + request.disk_resource = disk_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.update] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("region", request.region), + ("disk", request.disk), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def update(self, + request: Optional[Union[compute.UpdateRegionDiskRequest, dict]] = None, + *, + project: Optional[str] = None, + region: Optional[str] = None, + disk: Optional[str] = None, + disk_resource: Optional[compute.Disk] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> extended_operation.ExtendedOperation: + r"""Update the specified disk with the data included in the request. + Update is performed only on selected fields included as part of + update-mask. Only the following fields can be modified: + user_license. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_update(): + # Create a client + client = compute_v1.RegionDisksClient() + + # Initialize request argument(s) + request = compute_v1.UpdateRegionDiskRequest( + disk="disk_value", + project="project_value", + region="region_value", + ) + + # Make the request + response = client.update(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.UpdateRegionDiskRequest, dict]): + The request object. A request message for + RegionDisks.Update. See the method + description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + region (str): + The name of the region for this + request. + + This corresponds to the ``region`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + disk (str): + The disk name for this request. + This corresponds to the ``disk`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + disk_resource (google.cloud.compute_v1.types.Disk): + The body resource for this request + This corresponds to the ``disk_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, region, disk, disk_resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.UpdateRegionDiskRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.UpdateRegionDiskRequest): + request = compute.UpdateRegionDiskRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if region is not None: + request.region = region + if disk is not None: + request.disk = disk + if disk_resource is not None: + request.disk_resource = disk_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.update] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("region", request.region), + ("disk", request.disk), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + operation_service = self._transport._region_operations_client + operation_request = compute.GetRegionOperationRequest() + operation_request.project = request.project + operation_request.region = request.region + operation_request.operation = response.name + + get_operation = functools.partial(operation_service.get, operation_request) + # Cancel is not part of extended operations yet. + cancel_operation = lambda: None + + # Note: this class is an implementation detail to provide a uniform + # set of names for certain fields in the extended operation proto message. + # See google.api_core.extended_operation.ExtendedOperation for details + # on these properties and the expected interface. + class _CustomOperation(extended_operation.ExtendedOperation): + @property + def error_message(self): + return self._extended_operation.http_error_message + + @property + def error_code(self): + return self._extended_operation.http_error_status_code + + response = _CustomOperation.make(get_operation, cancel_operation, response) + + # Done; return the response. + return response + + def __enter__(self) -> "RegionDisksClient": + return self + + def __exit__(self, type, value, traceback): + """Releases underlying transport's resources. + + .. warning:: + ONLY use as a context manager if the transport is NOT shared + with other clients! Exiting the with block will CLOSE the transport + and may cause errors in other clients! + """ + self.transport.close() + + + + + + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) + + +__all__ = ( + "RegionDisksClient", +) diff --git a/owl-bot-staging/v1/google/cloud/compute_v1/services/region_disks/pagers.py b/owl-bot-staging/v1/google/cloud/compute_v1/services/region_disks/pagers.py new file mode 100644 index 000000000..38de7308c --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/compute_v1/services/region_disks/pagers.py @@ -0,0 +1,77 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import Any, AsyncIterator, Awaitable, Callable, Sequence, Tuple, Optional, Iterator + +from google.cloud.compute_v1.types import compute + + +class ListPager: + """A pager for iterating through ``list`` requests. + + This class thinly wraps an initial + :class:`google.cloud.compute_v1.types.DiskList` object, and + provides an ``__iter__`` method to iterate through its + ``items`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``List`` requests and continue to iterate + through the ``items`` field on the + corresponding responses. + + All the usual :class:`google.cloud.compute_v1.types.DiskList` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., compute.DiskList], + request: compute.ListRegionDisksRequest, + response: compute.DiskList, + *, + metadata: Sequence[Tuple[str, str]] = ()): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.compute_v1.types.ListRegionDisksRequest): + The initial request object. + response (google.cloud.compute_v1.types.DiskList): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = compute.ListRegionDisksRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[compute.DiskList]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[compute.Disk]: + for page in self.pages: + yield from page.items + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) diff --git a/owl-bot-staging/v1/google/cloud/compute_v1/services/region_disks/transports/__init__.py b/owl-bot-staging/v1/google/cloud/compute_v1/services/region_disks/transports/__init__.py new file mode 100644 index 000000000..77d9b4489 --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/compute_v1/services/region_disks/transports/__init__.py @@ -0,0 +1,32 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +from typing import Dict, Type + +from .base import RegionDisksTransport +from .rest import RegionDisksRestTransport +from .rest import RegionDisksRestInterceptor + + +# Compile a registry of transports. +_transport_registry = OrderedDict() # type: Dict[str, Type[RegionDisksTransport]] +_transport_registry['rest'] = RegionDisksRestTransport + +__all__ = ( + 'RegionDisksTransport', + 'RegionDisksRestTransport', + 'RegionDisksRestInterceptor', +) diff --git a/owl-bot-staging/v1/google/cloud/compute_v1/services/region_disks/transports/base.py b/owl-bot-staging/v1/google/cloud/compute_v1/services/region_disks/transports/base.py new file mode 100644 index 000000000..86a835f56 --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/compute_v1/services/region_disks/transports/base.py @@ -0,0 +1,387 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import abc +from typing import Awaitable, Callable, Dict, Optional, Sequence, Union + +from google.cloud.compute_v1 import gapic_version as package_version + +import google.auth # type: ignore +import google.api_core +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.compute_v1.types import compute +from google.cloud.compute_v1.services import region_operations + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) + + +class RegionDisksTransport(abc.ABC): + """Abstract transport class for RegionDisks.""" + + AUTH_SCOPES = ( + 'https://www.googleapis.com/auth/compute', + 'https://www.googleapis.com/auth/cloud-platform', + ) + + DEFAULT_HOST: str = 'compute.googleapis.com' + def __init__( + self, *, + host: str = DEFAULT_HOST, + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + **kwargs, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A list of scopes. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + """ + self._extended_operations_services: Dict[str, Any] = {} + + scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} + + # Save the scopes. + self._scopes = scopes + + # If no credentials are provided, then determine the appropriate + # defaults. + if credentials and credentials_file: + raise core_exceptions.DuplicateCredentialArgs("'credentials_file' and 'credentials' are mutually exclusive") + + if credentials_file is not None: + credentials, _ = google.auth.load_credentials_from_file( + credentials_file, + **scopes_kwargs, + quota_project_id=quota_project_id + ) + elif credentials is None: + credentials, _ = google.auth.default(**scopes_kwargs, quota_project_id=quota_project_id) + # Don't apply audience if the credentials file passed from user. + if hasattr(credentials, "with_gdch_audience"): + credentials = credentials.with_gdch_audience(api_audience if api_audience else host) + + # If the credentials are service account credentials, then always try to use self signed JWT. + if always_use_jwt_access and isinstance(credentials, service_account.Credentials) and hasattr(service_account.Credentials, "with_always_use_jwt_access"): + credentials = credentials.with_always_use_jwt_access(True) + + # Save the credentials. + self._credentials = credentials + + # Save the hostname. Default to port 443 (HTTPS) if none is specified. + if ':' not in host: + host += ':443' + self._host = host + + def _prep_wrapped_messages(self, client_info): + # Precompute the wrapped methods. + self._wrapped_methods = { + self.add_resource_policies: gapic_v1.method.wrap_method( + self.add_resource_policies, + default_timeout=None, + client_info=client_info, + ), + self.bulk_insert: gapic_v1.method.wrap_method( + self.bulk_insert, + default_timeout=None, + client_info=client_info, + ), + self.create_snapshot: gapic_v1.method.wrap_method( + self.create_snapshot, + default_timeout=None, + client_info=client_info, + ), + self.delete: gapic_v1.method.wrap_method( + self.delete, + default_timeout=None, + client_info=client_info, + ), + self.get: gapic_v1.method.wrap_method( + self.get, + default_timeout=None, + client_info=client_info, + ), + self.get_iam_policy: gapic_v1.method.wrap_method( + self.get_iam_policy, + default_timeout=None, + client_info=client_info, + ), + self.insert: gapic_v1.method.wrap_method( + self.insert, + default_timeout=None, + client_info=client_info, + ), + self.list: gapic_v1.method.wrap_method( + self.list, + default_timeout=None, + client_info=client_info, + ), + self.remove_resource_policies: gapic_v1.method.wrap_method( + self.remove_resource_policies, + default_timeout=None, + client_info=client_info, + ), + self.resize: gapic_v1.method.wrap_method( + self.resize, + default_timeout=None, + client_info=client_info, + ), + self.set_iam_policy: gapic_v1.method.wrap_method( + self.set_iam_policy, + default_timeout=None, + client_info=client_info, + ), + self.set_labels: gapic_v1.method.wrap_method( + self.set_labels, + default_timeout=None, + client_info=client_info, + ), + self.start_async_replication: gapic_v1.method.wrap_method( + self.start_async_replication, + default_timeout=None, + client_info=client_info, + ), + self.stop_async_replication: gapic_v1.method.wrap_method( + self.stop_async_replication, + default_timeout=None, + client_info=client_info, + ), + self.stop_group_async_replication: gapic_v1.method.wrap_method( + self.stop_group_async_replication, + default_timeout=None, + client_info=client_info, + ), + self.test_iam_permissions: gapic_v1.method.wrap_method( + self.test_iam_permissions, + default_timeout=None, + client_info=client_info, + ), + self.update: gapic_v1.method.wrap_method( + self.update, + default_timeout=None, + client_info=client_info, + ), + } + + def close(self): + """Closes resources associated with the transport. + + .. warning:: + Only call this method if the transport is NOT shared + with other clients - this may cause errors in other clients! + """ + raise NotImplementedError() + + @property + def add_resource_policies(self) -> Callable[ + [compute.AddResourcePoliciesRegionDiskRequest], + Union[ + compute.Operation, + Awaitable[compute.Operation] + ]]: + raise NotImplementedError() + + @property + def bulk_insert(self) -> Callable[ + [compute.BulkInsertRegionDiskRequest], + Union[ + compute.Operation, + Awaitable[compute.Operation] + ]]: + raise NotImplementedError() + + @property + def create_snapshot(self) -> Callable[ + [compute.CreateSnapshotRegionDiskRequest], + Union[ + compute.Operation, + Awaitable[compute.Operation] + ]]: + raise NotImplementedError() + + @property + def delete(self) -> Callable[ + [compute.DeleteRegionDiskRequest], + Union[ + compute.Operation, + Awaitable[compute.Operation] + ]]: + raise NotImplementedError() + + @property + def get(self) -> Callable[ + [compute.GetRegionDiskRequest], + Union[ + compute.Disk, + Awaitable[compute.Disk] + ]]: + raise NotImplementedError() + + @property + def get_iam_policy(self) -> Callable[ + [compute.GetIamPolicyRegionDiskRequest], + Union[ + compute.Policy, + Awaitable[compute.Policy] + ]]: + raise NotImplementedError() + + @property + def insert(self) -> Callable[ + [compute.InsertRegionDiskRequest], + Union[ + compute.Operation, + Awaitable[compute.Operation] + ]]: + raise NotImplementedError() + + @property + def list(self) -> Callable[ + [compute.ListRegionDisksRequest], + Union[ + compute.DiskList, + Awaitable[compute.DiskList] + ]]: + raise NotImplementedError() + + @property + def remove_resource_policies(self) -> Callable[ + [compute.RemoveResourcePoliciesRegionDiskRequest], + Union[ + compute.Operation, + Awaitable[compute.Operation] + ]]: + raise NotImplementedError() + + @property + def resize(self) -> Callable[ + [compute.ResizeRegionDiskRequest], + Union[ + compute.Operation, + Awaitable[compute.Operation] + ]]: + raise NotImplementedError() + + @property + def set_iam_policy(self) -> Callable[ + [compute.SetIamPolicyRegionDiskRequest], + Union[ + compute.Policy, + Awaitable[compute.Policy] + ]]: + raise NotImplementedError() + + @property + def set_labels(self) -> Callable[ + [compute.SetLabelsRegionDiskRequest], + Union[ + compute.Operation, + Awaitable[compute.Operation] + ]]: + raise NotImplementedError() + + @property + def start_async_replication(self) -> Callable[ + [compute.StartAsyncReplicationRegionDiskRequest], + Union[ + compute.Operation, + Awaitable[compute.Operation] + ]]: + raise NotImplementedError() + + @property + def stop_async_replication(self) -> Callable[ + [compute.StopAsyncReplicationRegionDiskRequest], + Union[ + compute.Operation, + Awaitable[compute.Operation] + ]]: + raise NotImplementedError() + + @property + def stop_group_async_replication(self) -> Callable[ + [compute.StopGroupAsyncReplicationRegionDiskRequest], + Union[ + compute.Operation, + Awaitable[compute.Operation] + ]]: + raise NotImplementedError() + + @property + def test_iam_permissions(self) -> Callable[ + [compute.TestIamPermissionsRegionDiskRequest], + Union[ + compute.TestPermissionsResponse, + Awaitable[compute.TestPermissionsResponse] + ]]: + raise NotImplementedError() + + @property + def update(self) -> Callable[ + [compute.UpdateRegionDiskRequest], + Union[ + compute.Operation, + Awaitable[compute.Operation] + ]]: + raise NotImplementedError() + + @property + def kind(self) -> str: + raise NotImplementedError() + + @property + def _region_operations_client(self) -> region_operations.RegionOperationsClient: + ex_op_service = self._extended_operations_services.get("region_operations") + if not ex_op_service: + ex_op_service = region_operations.RegionOperationsClient( + credentials=self._credentials, + transport=self.kind, + ) + self._extended_operations_services["region_operations"] = ex_op_service + + return ex_op_service + + +__all__ = ( + 'RegionDisksTransport', +) diff --git a/owl-bot-staging/v1/google/cloud/compute_v1/services/region_disks/transports/rest.py b/owl-bot-staging/v1/google/cloud/compute_v1/services/region_disks/transports/rest.py new file mode 100644 index 000000000..bc4b3710f --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/compute_v1/services/region_disks/transports/rest.py @@ -0,0 +1,2438 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +from google.auth.transport.requests import AuthorizedSession # type: ignore +import json # type: ignore +import grpc # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.api_core import exceptions as core_exceptions +from google.api_core import retry as retries +from google.api_core import rest_helpers +from google.api_core import rest_streaming +from google.api_core import path_template +from google.api_core import gapic_v1 + +from google.protobuf import json_format +from requests import __version__ as requests_version +import dataclasses +import re +from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union +import warnings + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + + +from google.cloud.compute_v1.types import compute + +from .base import RegionDisksTransport, DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, + grpc_version=None, + rest_version=requests_version, +) + + +class RegionDisksRestInterceptor: + """Interceptor for RegionDisks. + + Interceptors are used to manipulate requests, request metadata, and responses + in arbitrary ways. + Example use cases include: + * Logging + * Verifying requests according to service or custom semantics + * Stripping extraneous information from responses + + These use cases and more can be enabled by injecting an + instance of a custom subclass when constructing the RegionDisksRestTransport. + + .. code-block:: python + class MyCustomRegionDisksInterceptor(RegionDisksRestInterceptor): + def pre_add_resource_policies(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_add_resource_policies(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_bulk_insert(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_bulk_insert(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_create_snapshot(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_create_snapshot(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_delete(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_delete(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_get(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_get_iam_policy(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_iam_policy(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_insert(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_insert(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_remove_resource_policies(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_remove_resource_policies(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_resize(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_resize(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_set_iam_policy(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_set_iam_policy(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_set_labels(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_set_labels(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_start_async_replication(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_start_async_replication(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_stop_async_replication(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_stop_async_replication(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_stop_group_async_replication(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_stop_group_async_replication(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_test_iam_permissions(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_test_iam_permissions(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_update(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_update(self, response): + logging.log(f"Received response: {response}") + return response + + transport = RegionDisksRestTransport(interceptor=MyCustomRegionDisksInterceptor()) + client = RegionDisksClient(transport=transport) + + + """ + def pre_add_resource_policies(self, request: compute.AddResourcePoliciesRegionDiskRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.AddResourcePoliciesRegionDiskRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for add_resource_policies + + Override in a subclass to manipulate the request or metadata + before they are sent to the RegionDisks server. + """ + return request, metadata + + def post_add_resource_policies(self, response: compute.Operation) -> compute.Operation: + """Post-rpc interceptor for add_resource_policies + + Override in a subclass to manipulate the response + after it is returned by the RegionDisks server but before + it is returned to user code. + """ + return response + def pre_bulk_insert(self, request: compute.BulkInsertRegionDiskRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.BulkInsertRegionDiskRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for bulk_insert + + Override in a subclass to manipulate the request or metadata + before they are sent to the RegionDisks server. + """ + return request, metadata + + def post_bulk_insert(self, response: compute.Operation) -> compute.Operation: + """Post-rpc interceptor for bulk_insert + + Override in a subclass to manipulate the response + after it is returned by the RegionDisks server but before + it is returned to user code. + """ + return response + def pre_create_snapshot(self, request: compute.CreateSnapshotRegionDiskRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.CreateSnapshotRegionDiskRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for create_snapshot + + Override in a subclass to manipulate the request or metadata + before they are sent to the RegionDisks server. + """ + return request, metadata + + def post_create_snapshot(self, response: compute.Operation) -> compute.Operation: + """Post-rpc interceptor for create_snapshot + + Override in a subclass to manipulate the response + after it is returned by the RegionDisks server but before + it is returned to user code. + """ + return response + def pre_delete(self, request: compute.DeleteRegionDiskRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.DeleteRegionDiskRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for delete + + Override in a subclass to manipulate the request or metadata + before they are sent to the RegionDisks server. + """ + return request, metadata + + def post_delete(self, response: compute.Operation) -> compute.Operation: + """Post-rpc interceptor for delete + + Override in a subclass to manipulate the response + after it is returned by the RegionDisks server but before + it is returned to user code. + """ + return response + def pre_get(self, request: compute.GetRegionDiskRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.GetRegionDiskRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get + + Override in a subclass to manipulate the request or metadata + before they are sent to the RegionDisks server. + """ + return request, metadata + + def post_get(self, response: compute.Disk) -> compute.Disk: + """Post-rpc interceptor for get + + Override in a subclass to manipulate the response + after it is returned by the RegionDisks server but before + it is returned to user code. + """ + return response + def pre_get_iam_policy(self, request: compute.GetIamPolicyRegionDiskRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.GetIamPolicyRegionDiskRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_iam_policy + + Override in a subclass to manipulate the request or metadata + before they are sent to the RegionDisks server. + """ + return request, metadata + + def post_get_iam_policy(self, response: compute.Policy) -> compute.Policy: + """Post-rpc interceptor for get_iam_policy + + Override in a subclass to manipulate the response + after it is returned by the RegionDisks server but before + it is returned to user code. + """ + return response + def pre_insert(self, request: compute.InsertRegionDiskRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.InsertRegionDiskRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for insert + + Override in a subclass to manipulate the request or metadata + before they are sent to the RegionDisks server. + """ + return request, metadata + + def post_insert(self, response: compute.Operation) -> compute.Operation: + """Post-rpc interceptor for insert + + Override in a subclass to manipulate the response + after it is returned by the RegionDisks server but before + it is returned to user code. + """ + return response + def pre_list(self, request: compute.ListRegionDisksRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.ListRegionDisksRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list + + Override in a subclass to manipulate the request or metadata + before they are sent to the RegionDisks server. + """ + return request, metadata + + def post_list(self, response: compute.DiskList) -> compute.DiskList: + """Post-rpc interceptor for list + + Override in a subclass to manipulate the response + after it is returned by the RegionDisks server but before + it is returned to user code. + """ + return response + def pre_remove_resource_policies(self, request: compute.RemoveResourcePoliciesRegionDiskRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.RemoveResourcePoliciesRegionDiskRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for remove_resource_policies + + Override in a subclass to manipulate the request or metadata + before they are sent to the RegionDisks server. + """ + return request, metadata + + def post_remove_resource_policies(self, response: compute.Operation) -> compute.Operation: + """Post-rpc interceptor for remove_resource_policies + + Override in a subclass to manipulate the response + after it is returned by the RegionDisks server but before + it is returned to user code. + """ + return response + def pre_resize(self, request: compute.ResizeRegionDiskRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.ResizeRegionDiskRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for resize + + Override in a subclass to manipulate the request or metadata + before they are sent to the RegionDisks server. + """ + return request, metadata + + def post_resize(self, response: compute.Operation) -> compute.Operation: + """Post-rpc interceptor for resize + + Override in a subclass to manipulate the response + after it is returned by the RegionDisks server but before + it is returned to user code. + """ + return response + def pre_set_iam_policy(self, request: compute.SetIamPolicyRegionDiskRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.SetIamPolicyRegionDiskRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for set_iam_policy + + Override in a subclass to manipulate the request or metadata + before they are sent to the RegionDisks server. + """ + return request, metadata + + def post_set_iam_policy(self, response: compute.Policy) -> compute.Policy: + """Post-rpc interceptor for set_iam_policy + + Override in a subclass to manipulate the response + after it is returned by the RegionDisks server but before + it is returned to user code. + """ + return response + def pre_set_labels(self, request: compute.SetLabelsRegionDiskRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.SetLabelsRegionDiskRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for set_labels + + Override in a subclass to manipulate the request or metadata + before they are sent to the RegionDisks server. + """ + return request, metadata + + def post_set_labels(self, response: compute.Operation) -> compute.Operation: + """Post-rpc interceptor for set_labels + + Override in a subclass to manipulate the response + after it is returned by the RegionDisks server but before + it is returned to user code. + """ + return response + def pre_start_async_replication(self, request: compute.StartAsyncReplicationRegionDiskRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.StartAsyncReplicationRegionDiskRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for start_async_replication + + Override in a subclass to manipulate the request or metadata + before they are sent to the RegionDisks server. + """ + return request, metadata + + def post_start_async_replication(self, response: compute.Operation) -> compute.Operation: + """Post-rpc interceptor for start_async_replication + + Override in a subclass to manipulate the response + after it is returned by the RegionDisks server but before + it is returned to user code. + """ + return response + def pre_stop_async_replication(self, request: compute.StopAsyncReplicationRegionDiskRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.StopAsyncReplicationRegionDiskRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for stop_async_replication + + Override in a subclass to manipulate the request or metadata + before they are sent to the RegionDisks server. + """ + return request, metadata + + def post_stop_async_replication(self, response: compute.Operation) -> compute.Operation: + """Post-rpc interceptor for stop_async_replication + + Override in a subclass to manipulate the response + after it is returned by the RegionDisks server but before + it is returned to user code. + """ + return response + def pre_stop_group_async_replication(self, request: compute.StopGroupAsyncReplicationRegionDiskRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.StopGroupAsyncReplicationRegionDiskRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for stop_group_async_replication + + Override in a subclass to manipulate the request or metadata + before they are sent to the RegionDisks server. + """ + return request, metadata + + def post_stop_group_async_replication(self, response: compute.Operation) -> compute.Operation: + """Post-rpc interceptor for stop_group_async_replication + + Override in a subclass to manipulate the response + after it is returned by the RegionDisks server but before + it is returned to user code. + """ + return response + def pre_test_iam_permissions(self, request: compute.TestIamPermissionsRegionDiskRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.TestIamPermissionsRegionDiskRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for test_iam_permissions + + Override in a subclass to manipulate the request or metadata + before they are sent to the RegionDisks server. + """ + return request, metadata + + def post_test_iam_permissions(self, response: compute.TestPermissionsResponse) -> compute.TestPermissionsResponse: + """Post-rpc interceptor for test_iam_permissions + + Override in a subclass to manipulate the response + after it is returned by the RegionDisks server but before + it is returned to user code. + """ + return response + def pre_update(self, request: compute.UpdateRegionDiskRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.UpdateRegionDiskRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for update + + Override in a subclass to manipulate the request or metadata + before they are sent to the RegionDisks server. + """ + return request, metadata + + def post_update(self, response: compute.Operation) -> compute.Operation: + """Post-rpc interceptor for update + + Override in a subclass to manipulate the response + after it is returned by the RegionDisks server but before + it is returned to user code. + """ + return response + + +@dataclasses.dataclass +class RegionDisksRestStub: + _session: AuthorizedSession + _host: str + _interceptor: RegionDisksRestInterceptor + + +class RegionDisksRestTransport(RegionDisksTransport): + """REST backend transport for RegionDisks. + + The RegionDisks API. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends JSON representations of protocol buffers over HTTP/1.1 + + NOTE: This REST transport functionality is currently in a beta + state (preview). We welcome your feedback via an issue in this + library's source repository. Thank you! + """ + + def __init__(self, *, + host: str = 'compute.googleapis.com', + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + client_cert_source_for_mtls: Optional[Callable[[ + ], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + url_scheme: str = 'https', + interceptor: Optional[RegionDisksRestInterceptor] = None, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + NOTE: This REST transport functionality is currently in a beta + state (preview). We welcome your feedback via a GitHub issue in + this library's repository. Thank you! + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + client_cert_source_for_mtls (Callable[[], Tuple[bytes, bytes]]): Client + certificate to configure mutual TLS HTTP channel. It is ignored + if ``channel`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you are developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + url_scheme: the protocol scheme for the API endpoint. Normally + "https", but for testing or local servers, + "http" can be specified. + """ + # Run the base constructor + # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. + # TODO: When custom host (api_endpoint) is set, `scopes` must *also* be set on the + # credentials object + maybe_url_match = re.match("^(?Phttp(?:s)?://)?(?P.*)$", host) + if maybe_url_match is None: + raise ValueError(f"Unexpected hostname structure: {host}") # pragma: NO COVER + + url_match_items = maybe_url_match.groupdict() + + host = f"{url_scheme}://{host}" if not url_match_items["scheme"] else host + + super().__init__( + host=host, + credentials=credentials, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience + ) + self._session = AuthorizedSession( + self._credentials, default_host=self.DEFAULT_HOST) + if client_cert_source_for_mtls: + self._session.configure_mtls_channel(client_cert_source_for_mtls) + self._interceptor = interceptor or RegionDisksRestInterceptor() + self._prep_wrapped_messages(client_info) + + class _AddResourcePolicies(RegionDisksRestStub): + def __hash__(self): + return hash("AddResourcePolicies") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.AddResourcePoliciesRegionDiskRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.Operation: + r"""Call the add resource policies method over HTTP. + + Args: + request (~.compute.AddResourcePoliciesRegionDiskRequest): + The request object. A request message for + RegionDisks.AddResourcePolicies. See the + method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.Operation: + Represents an Operation resource. Google Compute Engine + has three Operation resources: \* + `Global `__ + \* + `Regional `__ + \* + `Zonal `__ + You can use an operation resource to manage asynchronous + API requests. For more information, read Handling API + responses. Operations can be global, regional or zonal. + - For global operations, use the ``globalOperations`` + resource. - For regional operations, use the + ``regionOperations`` resource. - For zonal operations, + use the ``zonalOperations`` resource. For more + information, read Global, Regional, and Zonal Resources. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/compute/v1/projects/{project}/regions/{region}/disks/{disk}/addResourcePolicies', + 'body': 'region_disks_add_resource_policies_request_resource', + }, + ] + request, metadata = self._interceptor.pre_add_resource_policies(request, metadata) + pb_request = compute.AddResourcePoliciesRegionDiskRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + including_default_value_fields=False, + use_integers_for_enums=False + ) + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.Operation() + pb_resp = compute.Operation.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_add_resource_policies(resp) + return resp + + class _BulkInsert(RegionDisksRestStub): + def __hash__(self): + return hash("BulkInsert") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.BulkInsertRegionDiskRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.Operation: + r"""Call the bulk insert method over HTTP. + + Args: + request (~.compute.BulkInsertRegionDiskRequest): + The request object. A request message for + RegionDisks.BulkInsert. See the method + description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.Operation: + Represents an Operation resource. Google Compute Engine + has three Operation resources: \* + `Global `__ + \* + `Regional `__ + \* + `Zonal `__ + You can use an operation resource to manage asynchronous + API requests. For more information, read Handling API + responses. Operations can be global, regional or zonal. + - For global operations, use the ``globalOperations`` + resource. - For regional operations, use the + ``regionOperations`` resource. - For zonal operations, + use the ``zonalOperations`` resource. For more + information, read Global, Regional, and Zonal Resources. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/compute/v1/projects/{project}/regions/{region}/disks/bulkInsert', + 'body': 'bulk_insert_disk_resource_resource', + }, + ] + request, metadata = self._interceptor.pre_bulk_insert(request, metadata) + pb_request = compute.BulkInsertRegionDiskRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + including_default_value_fields=False, + use_integers_for_enums=False + ) + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.Operation() + pb_resp = compute.Operation.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_bulk_insert(resp) + return resp + + class _CreateSnapshot(RegionDisksRestStub): + def __hash__(self): + return hash("CreateSnapshot") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.CreateSnapshotRegionDiskRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.Operation: + r"""Call the create snapshot method over HTTP. + + Args: + request (~.compute.CreateSnapshotRegionDiskRequest): + The request object. A request message for + RegionDisks.CreateSnapshot. See the + method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.Operation: + Represents an Operation resource. Google Compute Engine + has three Operation resources: \* + `Global `__ + \* + `Regional `__ + \* + `Zonal `__ + You can use an operation resource to manage asynchronous + API requests. For more information, read Handling API + responses. Operations can be global, regional or zonal. + - For global operations, use the ``globalOperations`` + resource. - For regional operations, use the + ``regionOperations`` resource. - For zonal operations, + use the ``zonalOperations`` resource. For more + information, read Global, Regional, and Zonal Resources. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/compute/v1/projects/{project}/regions/{region}/disks/{disk}/createSnapshot', + 'body': 'snapshot_resource', + }, + ] + request, metadata = self._interceptor.pre_create_snapshot(request, metadata) + pb_request = compute.CreateSnapshotRegionDiskRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + including_default_value_fields=False, + use_integers_for_enums=False + ) + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.Operation() + pb_resp = compute.Operation.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_create_snapshot(resp) + return resp + + class _Delete(RegionDisksRestStub): + def __hash__(self): + return hash("Delete") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.DeleteRegionDiskRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.Operation: + r"""Call the delete method over HTTP. + + Args: + request (~.compute.DeleteRegionDiskRequest): + The request object. A request message for + RegionDisks.Delete. See the method + description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.Operation: + Represents an Operation resource. Google Compute Engine + has three Operation resources: \* + `Global `__ + \* + `Regional `__ + \* + `Zonal `__ + You can use an operation resource to manage asynchronous + API requests. For more information, read Handling API + responses. Operations can be global, regional or zonal. + - For global operations, use the ``globalOperations`` + resource. - For regional operations, use the + ``regionOperations`` resource. - For zonal operations, + use the ``zonalOperations`` resource. For more + information, read Global, Regional, and Zonal Resources. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'delete', + 'uri': '/compute/v1/projects/{project}/regions/{region}/disks/{disk}', + }, + ] + request, metadata = self._interceptor.pre_delete(request, metadata) + pb_request = compute.DeleteRegionDiskRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.Operation() + pb_resp = compute.Operation.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_delete(resp) + return resp + + class _Get(RegionDisksRestStub): + def __hash__(self): + return hash("Get") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.GetRegionDiskRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.Disk: + r"""Call the get method over HTTP. + + Args: + request (~.compute.GetRegionDiskRequest): + The request object. A request message for + RegionDisks.Get. See the method + description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.Disk: + Represents a Persistent Disk resource. Google Compute + Engine has two Disk resources: \* + `Zonal `__ \* + `Regional `__ + Persistent disks are required for running your VM + instances. Create both boot and non-boot (data) + persistent disks. For more information, read Persistent + Disks. For more storage options, read Storage options. + The disks resource represents a zonal persistent disk. + For more information, read Zonal persistent disks. The + regionDisks resource represents a regional persistent + disk. For more information, read Regional resources. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/compute/v1/projects/{project}/regions/{region}/disks/{disk}', + }, + ] + request, metadata = self._interceptor.pre_get(request, metadata) + pb_request = compute.GetRegionDiskRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.Disk() + pb_resp = compute.Disk.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get(resp) + return resp + + class _GetIamPolicy(RegionDisksRestStub): + def __hash__(self): + return hash("GetIamPolicy") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.GetIamPolicyRegionDiskRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.Policy: + r"""Call the get iam policy method over HTTP. + + Args: + request (~.compute.GetIamPolicyRegionDiskRequest): + The request object. A request message for + RegionDisks.GetIamPolicy. See the method + description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.Policy: + An Identity and Access Management (IAM) policy, which + specifies access controls for Google Cloud resources. A + ``Policy`` is a collection of ``bindings``. A + ``binding`` binds one or more ``members``, or + principals, to a single ``role``. Principals can be user + accounts, service accounts, Google groups, and domains + (such as G Suite). A ``role`` is a named list of + permissions; each ``role`` can be an IAM predefined role + or a user-created custom role. For some types of Google + Cloud resources, a ``binding`` can also specify a + ``condition``, which is a logical expression that allows + access to a resource only if the expression evaluates to + ``true``. A condition can add constraints based on + attributes of the request, the resource, or both. To + learn which resources support conditions in their IAM + policies, see the `IAM + documentation `__. + **JSON example:** { "bindings": [ { "role": + "roles/resourcemanager.organizationAdmin", "members": [ + "user:mike@example.com", "group:admins@example.com", + "domain:google.com", + "serviceAccount:my-project-id@appspot.gserviceaccount.com" + ] }, { "role": + "roles/resourcemanager.organizationViewer", "members": [ + "user:eve@example.com" ], "condition": { "title": + "expirable access", "description": "Does not grant + access after Sep 2020", "expression": "request.time < + timestamp('2020-10-01T00:00:00.000Z')", } } ], "etag": + "BwWWja0YfJA=", "version": 3 } **YAML example:** + bindings: - members: - user:mike@example.com - + group:admins@example.com - domain:google.com - + serviceAccount:my-project-id@appspot.gserviceaccount.com + role: roles/resourcemanager.organizationAdmin - members: + - user:eve@example.com role: + roles/resourcemanager.organizationViewer condition: + title: expirable access description: Does not grant + access after Sep 2020 expression: request.time < + timestamp('2020-10-01T00:00:00.000Z') etag: BwWWja0YfJA= + version: 3 For a description of IAM and its features, + see the `IAM + documentation `__. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/compute/v1/projects/{project}/regions/{region}/disks/{resource}/getIamPolicy', + }, + ] + request, metadata = self._interceptor.pre_get_iam_policy(request, metadata) + pb_request = compute.GetIamPolicyRegionDiskRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.Policy() + pb_resp = compute.Policy.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get_iam_policy(resp) + return resp + + class _Insert(RegionDisksRestStub): + def __hash__(self): + return hash("Insert") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.InsertRegionDiskRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.Operation: + r"""Call the insert method over HTTP. + + Args: + request (~.compute.InsertRegionDiskRequest): + The request object. A request message for + RegionDisks.Insert. See the method + description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.Operation: + Represents an Operation resource. Google Compute Engine + has three Operation resources: \* + `Global `__ + \* + `Regional `__ + \* + `Zonal `__ + You can use an operation resource to manage asynchronous + API requests. For more information, read Handling API + responses. Operations can be global, regional or zonal. + - For global operations, use the ``globalOperations`` + resource. - For regional operations, use the + ``regionOperations`` resource. - For zonal operations, + use the ``zonalOperations`` resource. For more + information, read Global, Regional, and Zonal Resources. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/compute/v1/projects/{project}/regions/{region}/disks', + 'body': 'disk_resource', + }, + ] + request, metadata = self._interceptor.pre_insert(request, metadata) + pb_request = compute.InsertRegionDiskRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + including_default_value_fields=False, + use_integers_for_enums=False + ) + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.Operation() + pb_resp = compute.Operation.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_insert(resp) + return resp + + class _List(RegionDisksRestStub): + def __hash__(self): + return hash("List") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.ListRegionDisksRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.DiskList: + r"""Call the list method over HTTP. + + Args: + request (~.compute.ListRegionDisksRequest): + The request object. A request message for + RegionDisks.List. See the method + description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.DiskList: + A list of Disk resources. + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/compute/v1/projects/{project}/regions/{region}/disks', + }, + ] + request, metadata = self._interceptor.pre_list(request, metadata) + pb_request = compute.ListRegionDisksRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.DiskList() + pb_resp = compute.DiskList.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_list(resp) + return resp + + class _RemoveResourcePolicies(RegionDisksRestStub): + def __hash__(self): + return hash("RemoveResourcePolicies") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.RemoveResourcePoliciesRegionDiskRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.Operation: + r"""Call the remove resource policies method over HTTP. + + Args: + request (~.compute.RemoveResourcePoliciesRegionDiskRequest): + The request object. A request message for + RegionDisks.RemoveResourcePolicies. See + the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.Operation: + Represents an Operation resource. Google Compute Engine + has three Operation resources: \* + `Global `__ + \* + `Regional `__ + \* + `Zonal `__ + You can use an operation resource to manage asynchronous + API requests. For more information, read Handling API + responses. Operations can be global, regional or zonal. + - For global operations, use the ``globalOperations`` + resource. - For regional operations, use the + ``regionOperations`` resource. - For zonal operations, + use the ``zonalOperations`` resource. For more + information, read Global, Regional, and Zonal Resources. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/compute/v1/projects/{project}/regions/{region}/disks/{disk}/removeResourcePolicies', + 'body': 'region_disks_remove_resource_policies_request_resource', + }, + ] + request, metadata = self._interceptor.pre_remove_resource_policies(request, metadata) + pb_request = compute.RemoveResourcePoliciesRegionDiskRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + including_default_value_fields=False, + use_integers_for_enums=False + ) + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.Operation() + pb_resp = compute.Operation.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_remove_resource_policies(resp) + return resp + + class _Resize(RegionDisksRestStub): + def __hash__(self): + return hash("Resize") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.ResizeRegionDiskRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.Operation: + r"""Call the resize method over HTTP. + + Args: + request (~.compute.ResizeRegionDiskRequest): + The request object. A request message for + RegionDisks.Resize. See the method + description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.Operation: + Represents an Operation resource. Google Compute Engine + has three Operation resources: \* + `Global `__ + \* + `Regional `__ + \* + `Zonal `__ + You can use an operation resource to manage asynchronous + API requests. For more information, read Handling API + responses. Operations can be global, regional or zonal. + - For global operations, use the ``globalOperations`` + resource. - For regional operations, use the + ``regionOperations`` resource. - For zonal operations, + use the ``zonalOperations`` resource. For more + information, read Global, Regional, and Zonal Resources. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/compute/v1/projects/{project}/regions/{region}/disks/{disk}/resize', + 'body': 'region_disks_resize_request_resource', + }, + ] + request, metadata = self._interceptor.pre_resize(request, metadata) + pb_request = compute.ResizeRegionDiskRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + including_default_value_fields=False, + use_integers_for_enums=False + ) + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.Operation() + pb_resp = compute.Operation.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_resize(resp) + return resp + + class _SetIamPolicy(RegionDisksRestStub): + def __hash__(self): + return hash("SetIamPolicy") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.SetIamPolicyRegionDiskRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.Policy: + r"""Call the set iam policy method over HTTP. + + Args: + request (~.compute.SetIamPolicyRegionDiskRequest): + The request object. A request message for + RegionDisks.SetIamPolicy. See the method + description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.Policy: + An Identity and Access Management (IAM) policy, which + specifies access controls for Google Cloud resources. A + ``Policy`` is a collection of ``bindings``. A + ``binding`` binds one or more ``members``, or + principals, to a single ``role``. Principals can be user + accounts, service accounts, Google groups, and domains + (such as G Suite). A ``role`` is a named list of + permissions; each ``role`` can be an IAM predefined role + or a user-created custom role. For some types of Google + Cloud resources, a ``binding`` can also specify a + ``condition``, which is a logical expression that allows + access to a resource only if the expression evaluates to + ``true``. A condition can add constraints based on + attributes of the request, the resource, or both. To + learn which resources support conditions in their IAM + policies, see the `IAM + documentation `__. + **JSON example:** { "bindings": [ { "role": + "roles/resourcemanager.organizationAdmin", "members": [ + "user:mike@example.com", "group:admins@example.com", + "domain:google.com", + "serviceAccount:my-project-id@appspot.gserviceaccount.com" + ] }, { "role": + "roles/resourcemanager.organizationViewer", "members": [ + "user:eve@example.com" ], "condition": { "title": + "expirable access", "description": "Does not grant + access after Sep 2020", "expression": "request.time < + timestamp('2020-10-01T00:00:00.000Z')", } } ], "etag": + "BwWWja0YfJA=", "version": 3 } **YAML example:** + bindings: - members: - user:mike@example.com - + group:admins@example.com - domain:google.com - + serviceAccount:my-project-id@appspot.gserviceaccount.com + role: roles/resourcemanager.organizationAdmin - members: + - user:eve@example.com role: + roles/resourcemanager.organizationViewer condition: + title: expirable access description: Does not grant + access after Sep 2020 expression: request.time < + timestamp('2020-10-01T00:00:00.000Z') etag: BwWWja0YfJA= + version: 3 For a description of IAM and its features, + see the `IAM + documentation `__. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/compute/v1/projects/{project}/regions/{region}/disks/{resource}/setIamPolicy', + 'body': 'region_set_policy_request_resource', + }, + ] + request, metadata = self._interceptor.pre_set_iam_policy(request, metadata) + pb_request = compute.SetIamPolicyRegionDiskRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + including_default_value_fields=False, + use_integers_for_enums=False + ) + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.Policy() + pb_resp = compute.Policy.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_set_iam_policy(resp) + return resp + + class _SetLabels(RegionDisksRestStub): + def __hash__(self): + return hash("SetLabels") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.SetLabelsRegionDiskRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.Operation: + r"""Call the set labels method over HTTP. + + Args: + request (~.compute.SetLabelsRegionDiskRequest): + The request object. A request message for + RegionDisks.SetLabels. See the method + description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.Operation: + Represents an Operation resource. Google Compute Engine + has three Operation resources: \* + `Global `__ + \* + `Regional `__ + \* + `Zonal `__ + You can use an operation resource to manage asynchronous + API requests. For more information, read Handling API + responses. Operations can be global, regional or zonal. + - For global operations, use the ``globalOperations`` + resource. - For regional operations, use the + ``regionOperations`` resource. - For zonal operations, + use the ``zonalOperations`` resource. For more + information, read Global, Regional, and Zonal Resources. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/compute/v1/projects/{project}/regions/{region}/disks/{resource}/setLabels', + 'body': 'region_set_labels_request_resource', + }, + ] + request, metadata = self._interceptor.pre_set_labels(request, metadata) + pb_request = compute.SetLabelsRegionDiskRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + including_default_value_fields=False, + use_integers_for_enums=False + ) + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.Operation() + pb_resp = compute.Operation.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_set_labels(resp) + return resp + + class _StartAsyncReplication(RegionDisksRestStub): + def __hash__(self): + return hash("StartAsyncReplication") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.StartAsyncReplicationRegionDiskRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.Operation: + r"""Call the start async replication method over HTTP. + + Args: + request (~.compute.StartAsyncReplicationRegionDiskRequest): + The request object. A request message for + RegionDisks.StartAsyncReplication. See + the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.Operation: + Represents an Operation resource. Google Compute Engine + has three Operation resources: \* + `Global `__ + \* + `Regional `__ + \* + `Zonal `__ + You can use an operation resource to manage asynchronous + API requests. For more information, read Handling API + responses. Operations can be global, regional or zonal. + - For global operations, use the ``globalOperations`` + resource. - For regional operations, use the + ``regionOperations`` resource. - For zonal operations, + use the ``zonalOperations`` resource. For more + information, read Global, Regional, and Zonal Resources. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/compute/v1/projects/{project}/regions/{region}/disks/{disk}/startAsyncReplication', + 'body': 'region_disks_start_async_replication_request_resource', + }, + ] + request, metadata = self._interceptor.pre_start_async_replication(request, metadata) + pb_request = compute.StartAsyncReplicationRegionDiskRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + including_default_value_fields=False, + use_integers_for_enums=False + ) + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.Operation() + pb_resp = compute.Operation.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_start_async_replication(resp) + return resp + + class _StopAsyncReplication(RegionDisksRestStub): + def __hash__(self): + return hash("StopAsyncReplication") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.StopAsyncReplicationRegionDiskRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.Operation: + r"""Call the stop async replication method over HTTP. + + Args: + request (~.compute.StopAsyncReplicationRegionDiskRequest): + The request object. A request message for + RegionDisks.StopAsyncReplication. See + the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.Operation: + Represents an Operation resource. Google Compute Engine + has three Operation resources: \* + `Global `__ + \* + `Regional `__ + \* + `Zonal `__ + You can use an operation resource to manage asynchronous + API requests. For more information, read Handling API + responses. Operations can be global, regional or zonal. + - For global operations, use the ``globalOperations`` + resource. - For regional operations, use the + ``regionOperations`` resource. - For zonal operations, + use the ``zonalOperations`` resource. For more + information, read Global, Regional, and Zonal Resources. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/compute/v1/projects/{project}/regions/{region}/disks/{disk}/stopAsyncReplication', + }, + ] + request, metadata = self._interceptor.pre_stop_async_replication(request, metadata) + pb_request = compute.StopAsyncReplicationRegionDiskRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.Operation() + pb_resp = compute.Operation.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_stop_async_replication(resp) + return resp + + class _StopGroupAsyncReplication(RegionDisksRestStub): + def __hash__(self): + return hash("StopGroupAsyncReplication") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.StopGroupAsyncReplicationRegionDiskRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.Operation: + r"""Call the stop group async + replication method over HTTP. + + Args: + request (~.compute.StopGroupAsyncReplicationRegionDiskRequest): + The request object. A request message for + RegionDisks.StopGroupAsyncReplication. + See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.Operation: + Represents an Operation resource. Google Compute Engine + has three Operation resources: \* + `Global `__ + \* + `Regional `__ + \* + `Zonal `__ + You can use an operation resource to manage asynchronous + API requests. For more information, read Handling API + responses. Operations can be global, regional or zonal. + - For global operations, use the ``globalOperations`` + resource. - For regional operations, use the + ``regionOperations`` resource. - For zonal operations, + use the ``zonalOperations`` resource. For more + information, read Global, Regional, and Zonal Resources. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/compute/v1/projects/{project}/regions/{region}/disks/stopGroupAsyncReplication', + 'body': 'disks_stop_group_async_replication_resource_resource', + }, + ] + request, metadata = self._interceptor.pre_stop_group_async_replication(request, metadata) + pb_request = compute.StopGroupAsyncReplicationRegionDiskRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + including_default_value_fields=False, + use_integers_for_enums=False + ) + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.Operation() + pb_resp = compute.Operation.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_stop_group_async_replication(resp) + return resp + + class _TestIamPermissions(RegionDisksRestStub): + def __hash__(self): + return hash("TestIamPermissions") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.TestIamPermissionsRegionDiskRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.TestPermissionsResponse: + r"""Call the test iam permissions method over HTTP. + + Args: + request (~.compute.TestIamPermissionsRegionDiskRequest): + The request object. A request message for + RegionDisks.TestIamPermissions. See the + method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.TestPermissionsResponse: + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/compute/v1/projects/{project}/regions/{region}/disks/{resource}/testIamPermissions', + 'body': 'test_permissions_request_resource', + }, + ] + request, metadata = self._interceptor.pre_test_iam_permissions(request, metadata) + pb_request = compute.TestIamPermissionsRegionDiskRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + including_default_value_fields=False, + use_integers_for_enums=False + ) + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.TestPermissionsResponse() + pb_resp = compute.TestPermissionsResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_test_iam_permissions(resp) + return resp + + class _Update(RegionDisksRestStub): + def __hash__(self): + return hash("Update") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.UpdateRegionDiskRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.Operation: + r"""Call the update method over HTTP. + + Args: + request (~.compute.UpdateRegionDiskRequest): + The request object. A request message for + RegionDisks.Update. See the method + description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.Operation: + Represents an Operation resource. Google Compute Engine + has three Operation resources: \* + `Global `__ + \* + `Regional `__ + \* + `Zonal `__ + You can use an operation resource to manage asynchronous + API requests. For more information, read Handling API + responses. Operations can be global, regional or zonal. + - For global operations, use the ``globalOperations`` + resource. - For regional operations, use the + ``regionOperations`` resource. - For zonal operations, + use the ``zonalOperations`` resource. For more + information, read Global, Regional, and Zonal Resources. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'patch', + 'uri': '/compute/v1/projects/{project}/regions/{region}/disks/{disk}', + 'body': 'disk_resource', + }, + ] + request, metadata = self._interceptor.pre_update(request, metadata) + pb_request = compute.UpdateRegionDiskRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + including_default_value_fields=False, + use_integers_for_enums=False + ) + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.Operation() + pb_resp = compute.Operation.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_update(resp) + return resp + + @property + def add_resource_policies(self) -> Callable[ + [compute.AddResourcePoliciesRegionDiskRequest], + compute.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._AddResourcePolicies(self._session, self._host, self._interceptor) # type: ignore + + @property + def bulk_insert(self) -> Callable[ + [compute.BulkInsertRegionDiskRequest], + compute.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._BulkInsert(self._session, self._host, self._interceptor) # type: ignore + + @property + def create_snapshot(self) -> Callable[ + [compute.CreateSnapshotRegionDiskRequest], + compute.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._CreateSnapshot(self._session, self._host, self._interceptor) # type: ignore + + @property + def delete(self) -> Callable[ + [compute.DeleteRegionDiskRequest], + compute.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._Delete(self._session, self._host, self._interceptor) # type: ignore + + @property + def get(self) -> Callable[ + [compute.GetRegionDiskRequest], + compute.Disk]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._Get(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_iam_policy(self) -> Callable[ + [compute.GetIamPolicyRegionDiskRequest], + compute.Policy]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetIamPolicy(self._session, self._host, self._interceptor) # type: ignore + + @property + def insert(self) -> Callable[ + [compute.InsertRegionDiskRequest], + compute.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._Insert(self._session, self._host, self._interceptor) # type: ignore + + @property + def list(self) -> Callable[ + [compute.ListRegionDisksRequest], + compute.DiskList]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._List(self._session, self._host, self._interceptor) # type: ignore + + @property + def remove_resource_policies(self) -> Callable[ + [compute.RemoveResourcePoliciesRegionDiskRequest], + compute.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._RemoveResourcePolicies(self._session, self._host, self._interceptor) # type: ignore + + @property + def resize(self) -> Callable[ + [compute.ResizeRegionDiskRequest], + compute.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._Resize(self._session, self._host, self._interceptor) # type: ignore + + @property + def set_iam_policy(self) -> Callable[ + [compute.SetIamPolicyRegionDiskRequest], + compute.Policy]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._SetIamPolicy(self._session, self._host, self._interceptor) # type: ignore + + @property + def set_labels(self) -> Callable[ + [compute.SetLabelsRegionDiskRequest], + compute.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._SetLabels(self._session, self._host, self._interceptor) # type: ignore + + @property + def start_async_replication(self) -> Callable[ + [compute.StartAsyncReplicationRegionDiskRequest], + compute.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._StartAsyncReplication(self._session, self._host, self._interceptor) # type: ignore + + @property + def stop_async_replication(self) -> Callable[ + [compute.StopAsyncReplicationRegionDiskRequest], + compute.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._StopAsyncReplication(self._session, self._host, self._interceptor) # type: ignore + + @property + def stop_group_async_replication(self) -> Callable[ + [compute.StopGroupAsyncReplicationRegionDiskRequest], + compute.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._StopGroupAsyncReplication(self._session, self._host, self._interceptor) # type: ignore + + @property + def test_iam_permissions(self) -> Callable[ + [compute.TestIamPermissionsRegionDiskRequest], + compute.TestPermissionsResponse]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._TestIamPermissions(self._session, self._host, self._interceptor) # type: ignore + + @property + def update(self) -> Callable[ + [compute.UpdateRegionDiskRequest], + compute.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._Update(self._session, self._host, self._interceptor) # type: ignore + + @property + def kind(self) -> str: + return "rest" + + def close(self): + self._session.close() + + +__all__=( + 'RegionDisksRestTransport', +) diff --git a/owl-bot-staging/v1/google/cloud/compute_v1/services/region_health_check_services/__init__.py b/owl-bot-staging/v1/google/cloud/compute_v1/services/region_health_check_services/__init__.py new file mode 100644 index 000000000..070210727 --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/compute_v1/services/region_health_check_services/__init__.py @@ -0,0 +1,20 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from .client import RegionHealthCheckServicesClient + +__all__ = ( + 'RegionHealthCheckServicesClient', +) diff --git a/owl-bot-staging/v1/google/cloud/compute_v1/services/region_health_check_services/client.py b/owl-bot-staging/v1/google/cloud/compute_v1/services/region_health_check_services/client.py new file mode 100644 index 000000000..388f5d04d --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/compute_v1/services/region_health_check_services/client.py @@ -0,0 +1,1485 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import functools +import os +import re +from typing import Dict, Mapping, MutableMapping, MutableSequence, Optional, Sequence, Tuple, Type, Union, cast + +from google.cloud.compute_v1 import gapic_version as package_version + +from google.api_core import client_options as client_options_lib +from google.api_core import exceptions as core_exceptions +from google.api_core import extended_operation +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport import mtls # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth.exceptions import MutualTLSChannelError # type: ignore +from google.oauth2 import service_account # type: ignore + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + +from google.api_core import extended_operation # type: ignore +from google.cloud.compute_v1.services.region_health_check_services import pagers +from google.cloud.compute_v1.types import compute +from .transports.base import RegionHealthCheckServicesTransport, DEFAULT_CLIENT_INFO +from .transports.rest import RegionHealthCheckServicesRestTransport + + +class RegionHealthCheckServicesClientMeta(type): + """Metaclass for the RegionHealthCheckServices client. + + This provides class-level methods for building and retrieving + support objects (e.g. transport) without polluting the client instance + objects. + """ + _transport_registry = OrderedDict() # type: Dict[str, Type[RegionHealthCheckServicesTransport]] + _transport_registry["rest"] = RegionHealthCheckServicesRestTransport + + def get_transport_class(cls, + label: Optional[str] = None, + ) -> Type[RegionHealthCheckServicesTransport]: + """Returns an appropriate transport class. + + Args: + label: The name of the desired transport. If none is + provided, then the first transport in the registry is used. + + Returns: + The transport class to use. + """ + # If a specific transport is requested, return that one. + if label: + return cls._transport_registry[label] + + # No transport is requested; return the default (that is, the first one + # in the dictionary). + return next(iter(cls._transport_registry.values())) + + +class RegionHealthCheckServicesClient(metaclass=RegionHealthCheckServicesClientMeta): + """The RegionHealthCheckServices API.""" + + @staticmethod + def _get_default_mtls_endpoint(api_endpoint): + """Converts api endpoint to mTLS endpoint. + + Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to + "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. + Args: + api_endpoint (Optional[str]): the api endpoint to convert. + Returns: + str: converted mTLS api endpoint. + """ + if not api_endpoint: + return api_endpoint + + mtls_endpoint_re = re.compile( + r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?" + ) + + m = mtls_endpoint_re.match(api_endpoint) + name, mtls, sandbox, googledomain = m.groups() + if mtls or not googledomain: + return api_endpoint + + if sandbox: + return api_endpoint.replace( + "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" + ) + + return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") + + DEFAULT_ENDPOINT = "compute.googleapis.com" + DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore + DEFAULT_ENDPOINT + ) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + RegionHealthCheckServicesClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_info(info) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + RegionHealthCheckServicesClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_file( + filename) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + from_service_account_json = from_service_account_file + + @property + def transport(self) -> RegionHealthCheckServicesTransport: + """Returns the transport used by the client instance. + + Returns: + RegionHealthCheckServicesTransport: The transport used by the client + instance. + """ + return self._transport + + @staticmethod + def common_billing_account_path(billing_account: str, ) -> str: + """Returns a fully-qualified billing_account string.""" + return "billingAccounts/{billing_account}".format(billing_account=billing_account, ) + + @staticmethod + def parse_common_billing_account_path(path: str) -> Dict[str,str]: + """Parse a billing_account path into its component segments.""" + m = re.match(r"^billingAccounts/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_folder_path(folder: str, ) -> str: + """Returns a fully-qualified folder string.""" + return "folders/{folder}".format(folder=folder, ) + + @staticmethod + def parse_common_folder_path(path: str) -> Dict[str,str]: + """Parse a folder path into its component segments.""" + m = re.match(r"^folders/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_organization_path(organization: str, ) -> str: + """Returns a fully-qualified organization string.""" + return "organizations/{organization}".format(organization=organization, ) + + @staticmethod + def parse_common_organization_path(path: str) -> Dict[str,str]: + """Parse a organization path into its component segments.""" + m = re.match(r"^organizations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_project_path(project: str, ) -> str: + """Returns a fully-qualified project string.""" + return "projects/{project}".format(project=project, ) + + @staticmethod + def parse_common_project_path(path: str) -> Dict[str,str]: + """Parse a project path into its component segments.""" + m = re.match(r"^projects/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_location_path(project: str, location: str, ) -> str: + """Returns a fully-qualified location string.""" + return "projects/{project}/locations/{location}".format(project=project, location=location, ) + + @staticmethod + def parse_common_location_path(path: str) -> Dict[str,str]: + """Parse a location path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @classmethod + def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[client_options_lib.ClientOptions] = None): + """Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + if client_options is None: + client_options = client_options_lib.ClientOptions() + use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") + use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") + if use_client_cert not in ("true", "false"): + raise ValueError("Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`") + if use_mtls_endpoint not in ("auto", "never", "always"): + raise MutualTLSChannelError("Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`") + + # Figure out the client cert source to use. + client_cert_source = None + if use_client_cert == "true": + if client_options.client_cert_source: + client_cert_source = client_options.client_cert_source + elif mtls.has_default_client_cert_source(): + client_cert_source = mtls.default_client_cert_source() + + # Figure out which api endpoint to use. + if client_options.api_endpoint is not None: + api_endpoint = client_options.api_endpoint + elif use_mtls_endpoint == "always" or (use_mtls_endpoint == "auto" and client_cert_source): + api_endpoint = cls.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = cls.DEFAULT_ENDPOINT + + return api_endpoint, client_cert_source + + def __init__(self, *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Optional[Union[str, RegionHealthCheckServicesTransport]] = None, + client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the region health check services client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Union[str, RegionHealthCheckServicesTransport]): The + transport to use. If set to None, a transport is chosen + automatically. + NOTE: "rest" transport functionality is currently in a + beta state (preview). We welcome your feedback via an + issue in this library's source repository. + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): Custom options for the + client. It won't take effect if a ``transport`` instance is provided. + (1) The ``api_endpoint`` property can be used to override the + default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT + environment variable can also be used to override the endpoint: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto switch to the + default mTLS endpoint if client certificate is present, this is + the default value). However, the ``api_endpoint`` property takes + precedence if provided. + (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide client certificate for mutual TLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + """ + if isinstance(client_options, dict): + client_options = client_options_lib.from_dict(client_options) + if client_options is None: + client_options = client_options_lib.ClientOptions() + client_options = cast(client_options_lib.ClientOptions, client_options) + + api_endpoint, client_cert_source_func = self.get_mtls_endpoint_and_cert_source(client_options) + + api_key_value = getattr(client_options, "api_key", None) + if api_key_value and credentials: + raise ValueError("client_options.api_key and credentials are mutually exclusive") + + # Save or instantiate the transport. + # Ordinarily, we provide the transport, but allowing a custom transport + # instance provides an extensibility point for unusual situations. + if isinstance(transport, RegionHealthCheckServicesTransport): + # transport is a RegionHealthCheckServicesTransport instance. + if credentials or client_options.credentials_file or api_key_value: + raise ValueError("When providing a transport instance, " + "provide its credentials directly.") + if client_options.scopes: + raise ValueError( + "When providing a transport instance, provide its scopes " + "directly." + ) + self._transport = transport + else: + import google.auth._default # type: ignore + + if api_key_value and hasattr(google.auth._default, "get_api_key_credentials"): + credentials = google.auth._default.get_api_key_credentials(api_key_value) + + Transport = type(self).get_transport_class(transport) + self._transport = Transport( + credentials=credentials, + credentials_file=client_options.credentials_file, + host=api_endpoint, + scopes=client_options.scopes, + client_cert_source_for_mtls=client_cert_source_func, + quota_project_id=client_options.quota_project_id, + client_info=client_info, + always_use_jwt_access=True, + api_audience=client_options.api_audience, + ) + + def delete_unary(self, + request: Optional[Union[compute.DeleteRegionHealthCheckServiceRequest, dict]] = None, + *, + project: Optional[str] = None, + region: Optional[str] = None, + health_check_service: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.Operation: + r"""Deletes the specified regional HealthCheckService. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_delete(): + # Create a client + client = compute_v1.RegionHealthCheckServicesClient() + + # Initialize request argument(s) + request = compute_v1.DeleteRegionHealthCheckServiceRequest( + health_check_service="health_check_service_value", + project="project_value", + region="region_value", + ) + + # Make the request + response = client.delete(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.DeleteRegionHealthCheckServiceRequest, dict]): + The request object. A request message for + RegionHealthCheckServices.Delete. See + the method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + region (str): + Name of the region scoping this + request. + + This corresponds to the ``region`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + health_check_service (str): + Name of the HealthCheckService to + delete. The name must be 1-63 characters + long, and comply with RFC1035. + + This corresponds to the ``health_check_service`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, region, health_check_service]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.DeleteRegionHealthCheckServiceRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.DeleteRegionHealthCheckServiceRequest): + request = compute.DeleteRegionHealthCheckServiceRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if region is not None: + request.region = region + if health_check_service is not None: + request.health_check_service = health_check_service + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("region", request.region), + ("health_check_service", request.health_check_service), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def delete(self, + request: Optional[Union[compute.DeleteRegionHealthCheckServiceRequest, dict]] = None, + *, + project: Optional[str] = None, + region: Optional[str] = None, + health_check_service: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> extended_operation.ExtendedOperation: + r"""Deletes the specified regional HealthCheckService. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_delete(): + # Create a client + client = compute_v1.RegionHealthCheckServicesClient() + + # Initialize request argument(s) + request = compute_v1.DeleteRegionHealthCheckServiceRequest( + health_check_service="health_check_service_value", + project="project_value", + region="region_value", + ) + + # Make the request + response = client.delete(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.DeleteRegionHealthCheckServiceRequest, dict]): + The request object. A request message for + RegionHealthCheckServices.Delete. See + the method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + region (str): + Name of the region scoping this + request. + + This corresponds to the ``region`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + health_check_service (str): + Name of the HealthCheckService to + delete. The name must be 1-63 characters + long, and comply with RFC1035. + + This corresponds to the ``health_check_service`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, region, health_check_service]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.DeleteRegionHealthCheckServiceRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.DeleteRegionHealthCheckServiceRequest): + request = compute.DeleteRegionHealthCheckServiceRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if region is not None: + request.region = region + if health_check_service is not None: + request.health_check_service = health_check_service + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("region", request.region), + ("health_check_service", request.health_check_service), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + operation_service = self._transport._region_operations_client + operation_request = compute.GetRegionOperationRequest() + operation_request.project = request.project + operation_request.region = request.region + operation_request.operation = response.name + + get_operation = functools.partial(operation_service.get, operation_request) + # Cancel is not part of extended operations yet. + cancel_operation = lambda: None + + # Note: this class is an implementation detail to provide a uniform + # set of names for certain fields in the extended operation proto message. + # See google.api_core.extended_operation.ExtendedOperation for details + # on these properties and the expected interface. + class _CustomOperation(extended_operation.ExtendedOperation): + @property + def error_message(self): + return self._extended_operation.http_error_message + + @property + def error_code(self): + return self._extended_operation.http_error_status_code + + response = _CustomOperation.make(get_operation, cancel_operation, response) + + # Done; return the response. + return response + + def get(self, + request: Optional[Union[compute.GetRegionHealthCheckServiceRequest, dict]] = None, + *, + project: Optional[str] = None, + region: Optional[str] = None, + health_check_service: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.HealthCheckService: + r"""Returns the specified regional HealthCheckService + resource. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_get(): + # Create a client + client = compute_v1.RegionHealthCheckServicesClient() + + # Initialize request argument(s) + request = compute_v1.GetRegionHealthCheckServiceRequest( + health_check_service="health_check_service_value", + project="project_value", + region="region_value", + ) + + # Make the request + response = client.get(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.GetRegionHealthCheckServiceRequest, dict]): + The request object. A request message for + RegionHealthCheckServices.Get. See the + method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + region (str): + Name of the region scoping this + request. + + This corresponds to the ``region`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + health_check_service (str): + Name of the HealthCheckService to + update. The name must be 1-63 characters + long, and comply with RFC1035. + + This corresponds to the ``health_check_service`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.compute_v1.types.HealthCheckService: + Represents a Health-Check as a + Service resource. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, region, health_check_service]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.GetRegionHealthCheckServiceRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.GetRegionHealthCheckServiceRequest): + request = compute.GetRegionHealthCheckServiceRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if region is not None: + request.region = region + if health_check_service is not None: + request.health_check_service = health_check_service + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("region", request.region), + ("health_check_service", request.health_check_service), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def insert_unary(self, + request: Optional[Union[compute.InsertRegionHealthCheckServiceRequest, dict]] = None, + *, + project: Optional[str] = None, + region: Optional[str] = None, + health_check_service_resource: Optional[compute.HealthCheckService] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.Operation: + r"""Creates a regional HealthCheckService resource in the + specified project and region using the data included in + the request. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_insert(): + # Create a client + client = compute_v1.RegionHealthCheckServicesClient() + + # Initialize request argument(s) + request = compute_v1.InsertRegionHealthCheckServiceRequest( + project="project_value", + region="region_value", + ) + + # Make the request + response = client.insert(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.InsertRegionHealthCheckServiceRequest, dict]): + The request object. A request message for + RegionHealthCheckServices.Insert. See + the method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + region (str): + Name of the region scoping this + request. + + This corresponds to the ``region`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + health_check_service_resource (google.cloud.compute_v1.types.HealthCheckService): + The body resource for this request + This corresponds to the ``health_check_service_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, region, health_check_service_resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.InsertRegionHealthCheckServiceRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.InsertRegionHealthCheckServiceRequest): + request = compute.InsertRegionHealthCheckServiceRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if region is not None: + request.region = region + if health_check_service_resource is not None: + request.health_check_service_resource = health_check_service_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.insert] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("region", request.region), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def insert(self, + request: Optional[Union[compute.InsertRegionHealthCheckServiceRequest, dict]] = None, + *, + project: Optional[str] = None, + region: Optional[str] = None, + health_check_service_resource: Optional[compute.HealthCheckService] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> extended_operation.ExtendedOperation: + r"""Creates a regional HealthCheckService resource in the + specified project and region using the data included in + the request. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_insert(): + # Create a client + client = compute_v1.RegionHealthCheckServicesClient() + + # Initialize request argument(s) + request = compute_v1.InsertRegionHealthCheckServiceRequest( + project="project_value", + region="region_value", + ) + + # Make the request + response = client.insert(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.InsertRegionHealthCheckServiceRequest, dict]): + The request object. A request message for + RegionHealthCheckServices.Insert. See + the method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + region (str): + Name of the region scoping this + request. + + This corresponds to the ``region`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + health_check_service_resource (google.cloud.compute_v1.types.HealthCheckService): + The body resource for this request + This corresponds to the ``health_check_service_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, region, health_check_service_resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.InsertRegionHealthCheckServiceRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.InsertRegionHealthCheckServiceRequest): + request = compute.InsertRegionHealthCheckServiceRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if region is not None: + request.region = region + if health_check_service_resource is not None: + request.health_check_service_resource = health_check_service_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.insert] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("region", request.region), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + operation_service = self._transport._region_operations_client + operation_request = compute.GetRegionOperationRequest() + operation_request.project = request.project + operation_request.region = request.region + operation_request.operation = response.name + + get_operation = functools.partial(operation_service.get, operation_request) + # Cancel is not part of extended operations yet. + cancel_operation = lambda: None + + # Note: this class is an implementation detail to provide a uniform + # set of names for certain fields in the extended operation proto message. + # See google.api_core.extended_operation.ExtendedOperation for details + # on these properties and the expected interface. + class _CustomOperation(extended_operation.ExtendedOperation): + @property + def error_message(self): + return self._extended_operation.http_error_message + + @property + def error_code(self): + return self._extended_operation.http_error_status_code + + response = _CustomOperation.make(get_operation, cancel_operation, response) + + # Done; return the response. + return response + + def list(self, + request: Optional[Union[compute.ListRegionHealthCheckServicesRequest, dict]] = None, + *, + project: Optional[str] = None, + region: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListPager: + r"""Lists all the HealthCheckService resources that have + been configured for the specified project in the given + region. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_list(): + # Create a client + client = compute_v1.RegionHealthCheckServicesClient() + + # Initialize request argument(s) + request = compute_v1.ListRegionHealthCheckServicesRequest( + project="project_value", + region="region_value", + ) + + # Make the request + page_result = client.list(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.ListRegionHealthCheckServicesRequest, dict]): + The request object. A request message for + RegionHealthCheckServices.List. See the + method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + region (str): + Name of the region scoping this + request. + + This corresponds to the ``region`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.compute_v1.services.region_health_check_services.pagers.ListPager: + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, region]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.ListRegionHealthCheckServicesRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.ListRegionHealthCheckServicesRequest): + request = compute.ListRegionHealthCheckServicesRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if region is not None: + request.region = region + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("region", request.region), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + def patch_unary(self, + request: Optional[Union[compute.PatchRegionHealthCheckServiceRequest, dict]] = None, + *, + project: Optional[str] = None, + region: Optional[str] = None, + health_check_service: Optional[str] = None, + health_check_service_resource: Optional[compute.HealthCheckService] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.Operation: + r"""Updates the specified regional HealthCheckService + resource with the data included in the request. This + method supports PATCH semantics and uses the JSON merge + patch format and processing rules. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_patch(): + # Create a client + client = compute_v1.RegionHealthCheckServicesClient() + + # Initialize request argument(s) + request = compute_v1.PatchRegionHealthCheckServiceRequest( + health_check_service="health_check_service_value", + project="project_value", + region="region_value", + ) + + # Make the request + response = client.patch(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.PatchRegionHealthCheckServiceRequest, dict]): + The request object. A request message for + RegionHealthCheckServices.Patch. See the + method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + region (str): + Name of the region scoping this + request. + + This corresponds to the ``region`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + health_check_service (str): + Name of the HealthCheckService to + update. The name must be 1-63 characters + long, and comply with RFC1035. + + This corresponds to the ``health_check_service`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + health_check_service_resource (google.cloud.compute_v1.types.HealthCheckService): + The body resource for this request + This corresponds to the ``health_check_service_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, region, health_check_service, health_check_service_resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.PatchRegionHealthCheckServiceRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.PatchRegionHealthCheckServiceRequest): + request = compute.PatchRegionHealthCheckServiceRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if region is not None: + request.region = region + if health_check_service is not None: + request.health_check_service = health_check_service + if health_check_service_resource is not None: + request.health_check_service_resource = health_check_service_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.patch] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("region", request.region), + ("health_check_service", request.health_check_service), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def patch(self, + request: Optional[Union[compute.PatchRegionHealthCheckServiceRequest, dict]] = None, + *, + project: Optional[str] = None, + region: Optional[str] = None, + health_check_service: Optional[str] = None, + health_check_service_resource: Optional[compute.HealthCheckService] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> extended_operation.ExtendedOperation: + r"""Updates the specified regional HealthCheckService + resource with the data included in the request. This + method supports PATCH semantics and uses the JSON merge + patch format and processing rules. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_patch(): + # Create a client + client = compute_v1.RegionHealthCheckServicesClient() + + # Initialize request argument(s) + request = compute_v1.PatchRegionHealthCheckServiceRequest( + health_check_service="health_check_service_value", + project="project_value", + region="region_value", + ) + + # Make the request + response = client.patch(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.PatchRegionHealthCheckServiceRequest, dict]): + The request object. A request message for + RegionHealthCheckServices.Patch. See the + method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + region (str): + Name of the region scoping this + request. + + This corresponds to the ``region`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + health_check_service (str): + Name of the HealthCheckService to + update. The name must be 1-63 characters + long, and comply with RFC1035. + + This corresponds to the ``health_check_service`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + health_check_service_resource (google.cloud.compute_v1.types.HealthCheckService): + The body resource for this request + This corresponds to the ``health_check_service_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, region, health_check_service, health_check_service_resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.PatchRegionHealthCheckServiceRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.PatchRegionHealthCheckServiceRequest): + request = compute.PatchRegionHealthCheckServiceRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if region is not None: + request.region = region + if health_check_service is not None: + request.health_check_service = health_check_service + if health_check_service_resource is not None: + request.health_check_service_resource = health_check_service_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.patch] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("region", request.region), + ("health_check_service", request.health_check_service), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + operation_service = self._transport._region_operations_client + operation_request = compute.GetRegionOperationRequest() + operation_request.project = request.project + operation_request.region = request.region + operation_request.operation = response.name + + get_operation = functools.partial(operation_service.get, operation_request) + # Cancel is not part of extended operations yet. + cancel_operation = lambda: None + + # Note: this class is an implementation detail to provide a uniform + # set of names for certain fields in the extended operation proto message. + # See google.api_core.extended_operation.ExtendedOperation for details + # on these properties and the expected interface. + class _CustomOperation(extended_operation.ExtendedOperation): + @property + def error_message(self): + return self._extended_operation.http_error_message + + @property + def error_code(self): + return self._extended_operation.http_error_status_code + + response = _CustomOperation.make(get_operation, cancel_operation, response) + + # Done; return the response. + return response + + def __enter__(self) -> "RegionHealthCheckServicesClient": + return self + + def __exit__(self, type, value, traceback): + """Releases underlying transport's resources. + + .. warning:: + ONLY use as a context manager if the transport is NOT shared + with other clients! Exiting the with block will CLOSE the transport + and may cause errors in other clients! + """ + self.transport.close() + + + + + + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) + + +__all__ = ( + "RegionHealthCheckServicesClient", +) diff --git a/owl-bot-staging/v1/google/cloud/compute_v1/services/region_health_check_services/pagers.py b/owl-bot-staging/v1/google/cloud/compute_v1/services/region_health_check_services/pagers.py new file mode 100644 index 000000000..79f2f101c --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/compute_v1/services/region_health_check_services/pagers.py @@ -0,0 +1,77 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import Any, AsyncIterator, Awaitable, Callable, Sequence, Tuple, Optional, Iterator + +from google.cloud.compute_v1.types import compute + + +class ListPager: + """A pager for iterating through ``list`` requests. + + This class thinly wraps an initial + :class:`google.cloud.compute_v1.types.HealthCheckServicesList` object, and + provides an ``__iter__`` method to iterate through its + ``items`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``List`` requests and continue to iterate + through the ``items`` field on the + corresponding responses. + + All the usual :class:`google.cloud.compute_v1.types.HealthCheckServicesList` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., compute.HealthCheckServicesList], + request: compute.ListRegionHealthCheckServicesRequest, + response: compute.HealthCheckServicesList, + *, + metadata: Sequence[Tuple[str, str]] = ()): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.compute_v1.types.ListRegionHealthCheckServicesRequest): + The initial request object. + response (google.cloud.compute_v1.types.HealthCheckServicesList): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = compute.ListRegionHealthCheckServicesRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[compute.HealthCheckServicesList]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[compute.HealthCheckService]: + for page in self.pages: + yield from page.items + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) diff --git a/owl-bot-staging/v1/google/cloud/compute_v1/services/region_health_check_services/transports/__init__.py b/owl-bot-staging/v1/google/cloud/compute_v1/services/region_health_check_services/transports/__init__.py new file mode 100644 index 000000000..39c0bd232 --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/compute_v1/services/region_health_check_services/transports/__init__.py @@ -0,0 +1,32 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +from typing import Dict, Type + +from .base import RegionHealthCheckServicesTransport +from .rest import RegionHealthCheckServicesRestTransport +from .rest import RegionHealthCheckServicesRestInterceptor + + +# Compile a registry of transports. +_transport_registry = OrderedDict() # type: Dict[str, Type[RegionHealthCheckServicesTransport]] +_transport_registry['rest'] = RegionHealthCheckServicesRestTransport + +__all__ = ( + 'RegionHealthCheckServicesTransport', + 'RegionHealthCheckServicesRestTransport', + 'RegionHealthCheckServicesRestInterceptor', +) diff --git a/owl-bot-staging/v1/google/cloud/compute_v1/services/region_health_check_services/transports/base.py b/owl-bot-staging/v1/google/cloud/compute_v1/services/region_health_check_services/transports/base.py new file mode 100644 index 000000000..680ef48a3 --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/compute_v1/services/region_health_check_services/transports/base.py @@ -0,0 +1,219 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import abc +from typing import Awaitable, Callable, Dict, Optional, Sequence, Union + +from google.cloud.compute_v1 import gapic_version as package_version + +import google.auth # type: ignore +import google.api_core +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.compute_v1.types import compute +from google.cloud.compute_v1.services import region_operations + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) + + +class RegionHealthCheckServicesTransport(abc.ABC): + """Abstract transport class for RegionHealthCheckServices.""" + + AUTH_SCOPES = ( + 'https://www.googleapis.com/auth/compute', + 'https://www.googleapis.com/auth/cloud-platform', + ) + + DEFAULT_HOST: str = 'compute.googleapis.com' + def __init__( + self, *, + host: str = DEFAULT_HOST, + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + **kwargs, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A list of scopes. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + """ + self._extended_operations_services: Dict[str, Any] = {} + + scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} + + # Save the scopes. + self._scopes = scopes + + # If no credentials are provided, then determine the appropriate + # defaults. + if credentials and credentials_file: + raise core_exceptions.DuplicateCredentialArgs("'credentials_file' and 'credentials' are mutually exclusive") + + if credentials_file is not None: + credentials, _ = google.auth.load_credentials_from_file( + credentials_file, + **scopes_kwargs, + quota_project_id=quota_project_id + ) + elif credentials is None: + credentials, _ = google.auth.default(**scopes_kwargs, quota_project_id=quota_project_id) + # Don't apply audience if the credentials file passed from user. + if hasattr(credentials, "with_gdch_audience"): + credentials = credentials.with_gdch_audience(api_audience if api_audience else host) + + # If the credentials are service account credentials, then always try to use self signed JWT. + if always_use_jwt_access and isinstance(credentials, service_account.Credentials) and hasattr(service_account.Credentials, "with_always_use_jwt_access"): + credentials = credentials.with_always_use_jwt_access(True) + + # Save the credentials. + self._credentials = credentials + + # Save the hostname. Default to port 443 (HTTPS) if none is specified. + if ':' not in host: + host += ':443' + self._host = host + + def _prep_wrapped_messages(self, client_info): + # Precompute the wrapped methods. + self._wrapped_methods = { + self.delete: gapic_v1.method.wrap_method( + self.delete, + default_timeout=None, + client_info=client_info, + ), + self.get: gapic_v1.method.wrap_method( + self.get, + default_timeout=None, + client_info=client_info, + ), + self.insert: gapic_v1.method.wrap_method( + self.insert, + default_timeout=None, + client_info=client_info, + ), + self.list: gapic_v1.method.wrap_method( + self.list, + default_timeout=None, + client_info=client_info, + ), + self.patch: gapic_v1.method.wrap_method( + self.patch, + default_timeout=None, + client_info=client_info, + ), + } + + def close(self): + """Closes resources associated with the transport. + + .. warning:: + Only call this method if the transport is NOT shared + with other clients - this may cause errors in other clients! + """ + raise NotImplementedError() + + @property + def delete(self) -> Callable[ + [compute.DeleteRegionHealthCheckServiceRequest], + Union[ + compute.Operation, + Awaitable[compute.Operation] + ]]: + raise NotImplementedError() + + @property + def get(self) -> Callable[ + [compute.GetRegionHealthCheckServiceRequest], + Union[ + compute.HealthCheckService, + Awaitable[compute.HealthCheckService] + ]]: + raise NotImplementedError() + + @property + def insert(self) -> Callable[ + [compute.InsertRegionHealthCheckServiceRequest], + Union[ + compute.Operation, + Awaitable[compute.Operation] + ]]: + raise NotImplementedError() + + @property + def list(self) -> Callable[ + [compute.ListRegionHealthCheckServicesRequest], + Union[ + compute.HealthCheckServicesList, + Awaitable[compute.HealthCheckServicesList] + ]]: + raise NotImplementedError() + + @property + def patch(self) -> Callable[ + [compute.PatchRegionHealthCheckServiceRequest], + Union[ + compute.Operation, + Awaitable[compute.Operation] + ]]: + raise NotImplementedError() + + @property + def kind(self) -> str: + raise NotImplementedError() + + @property + def _region_operations_client(self) -> region_operations.RegionOperationsClient: + ex_op_service = self._extended_operations_services.get("region_operations") + if not ex_op_service: + ex_op_service = region_operations.RegionOperationsClient( + credentials=self._credentials, + transport=self.kind, + ) + self._extended_operations_services["region_operations"] = ex_op_service + + return ex_op_service + + +__all__ = ( + 'RegionHealthCheckServicesTransport', +) diff --git a/owl-bot-staging/v1/google/cloud/compute_v1/services/region_health_check_services/transports/rest.py b/owl-bot-staging/v1/google/cloud/compute_v1/services/region_health_check_services/transports/rest.py new file mode 100644 index 000000000..3c6520c7c --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/compute_v1/services/region_health_check_services/transports/rest.py @@ -0,0 +1,800 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +from google.auth.transport.requests import AuthorizedSession # type: ignore +import json # type: ignore +import grpc # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.api_core import exceptions as core_exceptions +from google.api_core import retry as retries +from google.api_core import rest_helpers +from google.api_core import rest_streaming +from google.api_core import path_template +from google.api_core import gapic_v1 + +from google.protobuf import json_format +from requests import __version__ as requests_version +import dataclasses +import re +from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union +import warnings + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + + +from google.cloud.compute_v1.types import compute + +from .base import RegionHealthCheckServicesTransport, DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, + grpc_version=None, + rest_version=requests_version, +) + + +class RegionHealthCheckServicesRestInterceptor: + """Interceptor for RegionHealthCheckServices. + + Interceptors are used to manipulate requests, request metadata, and responses + in arbitrary ways. + Example use cases include: + * Logging + * Verifying requests according to service or custom semantics + * Stripping extraneous information from responses + + These use cases and more can be enabled by injecting an + instance of a custom subclass when constructing the RegionHealthCheckServicesRestTransport. + + .. code-block:: python + class MyCustomRegionHealthCheckServicesInterceptor(RegionHealthCheckServicesRestInterceptor): + def pre_delete(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_delete(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_get(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_insert(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_insert(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_patch(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_patch(self, response): + logging.log(f"Received response: {response}") + return response + + transport = RegionHealthCheckServicesRestTransport(interceptor=MyCustomRegionHealthCheckServicesInterceptor()) + client = RegionHealthCheckServicesClient(transport=transport) + + + """ + def pre_delete(self, request: compute.DeleteRegionHealthCheckServiceRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.DeleteRegionHealthCheckServiceRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for delete + + Override in a subclass to manipulate the request or metadata + before they are sent to the RegionHealthCheckServices server. + """ + return request, metadata + + def post_delete(self, response: compute.Operation) -> compute.Operation: + """Post-rpc interceptor for delete + + Override in a subclass to manipulate the response + after it is returned by the RegionHealthCheckServices server but before + it is returned to user code. + """ + return response + def pre_get(self, request: compute.GetRegionHealthCheckServiceRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.GetRegionHealthCheckServiceRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get + + Override in a subclass to manipulate the request or metadata + before they are sent to the RegionHealthCheckServices server. + """ + return request, metadata + + def post_get(self, response: compute.HealthCheckService) -> compute.HealthCheckService: + """Post-rpc interceptor for get + + Override in a subclass to manipulate the response + after it is returned by the RegionHealthCheckServices server but before + it is returned to user code. + """ + return response + def pre_insert(self, request: compute.InsertRegionHealthCheckServiceRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.InsertRegionHealthCheckServiceRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for insert + + Override in a subclass to manipulate the request or metadata + before they are sent to the RegionHealthCheckServices server. + """ + return request, metadata + + def post_insert(self, response: compute.Operation) -> compute.Operation: + """Post-rpc interceptor for insert + + Override in a subclass to manipulate the response + after it is returned by the RegionHealthCheckServices server but before + it is returned to user code. + """ + return response + def pre_list(self, request: compute.ListRegionHealthCheckServicesRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.ListRegionHealthCheckServicesRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list + + Override in a subclass to manipulate the request or metadata + before they are sent to the RegionHealthCheckServices server. + """ + return request, metadata + + def post_list(self, response: compute.HealthCheckServicesList) -> compute.HealthCheckServicesList: + """Post-rpc interceptor for list + + Override in a subclass to manipulate the response + after it is returned by the RegionHealthCheckServices server but before + it is returned to user code. + """ + return response + def pre_patch(self, request: compute.PatchRegionHealthCheckServiceRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.PatchRegionHealthCheckServiceRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for patch + + Override in a subclass to manipulate the request or metadata + before they are sent to the RegionHealthCheckServices server. + """ + return request, metadata + + def post_patch(self, response: compute.Operation) -> compute.Operation: + """Post-rpc interceptor for patch + + Override in a subclass to manipulate the response + after it is returned by the RegionHealthCheckServices server but before + it is returned to user code. + """ + return response + + +@dataclasses.dataclass +class RegionHealthCheckServicesRestStub: + _session: AuthorizedSession + _host: str + _interceptor: RegionHealthCheckServicesRestInterceptor + + +class RegionHealthCheckServicesRestTransport(RegionHealthCheckServicesTransport): + """REST backend transport for RegionHealthCheckServices. + + The RegionHealthCheckServices API. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends JSON representations of protocol buffers over HTTP/1.1 + + NOTE: This REST transport functionality is currently in a beta + state (preview). We welcome your feedback via an issue in this + library's source repository. Thank you! + """ + + def __init__(self, *, + host: str = 'compute.googleapis.com', + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + client_cert_source_for_mtls: Optional[Callable[[ + ], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + url_scheme: str = 'https', + interceptor: Optional[RegionHealthCheckServicesRestInterceptor] = None, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + NOTE: This REST transport functionality is currently in a beta + state (preview). We welcome your feedback via a GitHub issue in + this library's repository. Thank you! + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + client_cert_source_for_mtls (Callable[[], Tuple[bytes, bytes]]): Client + certificate to configure mutual TLS HTTP channel. It is ignored + if ``channel`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you are developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + url_scheme: the protocol scheme for the API endpoint. Normally + "https", but for testing or local servers, + "http" can be specified. + """ + # Run the base constructor + # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. + # TODO: When custom host (api_endpoint) is set, `scopes` must *also* be set on the + # credentials object + maybe_url_match = re.match("^(?Phttp(?:s)?://)?(?P.*)$", host) + if maybe_url_match is None: + raise ValueError(f"Unexpected hostname structure: {host}") # pragma: NO COVER + + url_match_items = maybe_url_match.groupdict() + + host = f"{url_scheme}://{host}" if not url_match_items["scheme"] else host + + super().__init__( + host=host, + credentials=credentials, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience + ) + self._session = AuthorizedSession( + self._credentials, default_host=self.DEFAULT_HOST) + if client_cert_source_for_mtls: + self._session.configure_mtls_channel(client_cert_source_for_mtls) + self._interceptor = interceptor or RegionHealthCheckServicesRestInterceptor() + self._prep_wrapped_messages(client_info) + + class _Delete(RegionHealthCheckServicesRestStub): + def __hash__(self): + return hash("Delete") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.DeleteRegionHealthCheckServiceRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.Operation: + r"""Call the delete method over HTTP. + + Args: + request (~.compute.DeleteRegionHealthCheckServiceRequest): + The request object. A request message for + RegionHealthCheckServices.Delete. See + the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.Operation: + Represents an Operation resource. Google Compute Engine + has three Operation resources: \* + `Global `__ + \* + `Regional `__ + \* + `Zonal `__ + You can use an operation resource to manage asynchronous + API requests. For more information, read Handling API + responses. Operations can be global, regional or zonal. + - For global operations, use the ``globalOperations`` + resource. - For regional operations, use the + ``regionOperations`` resource. - For zonal operations, + use the ``zonalOperations`` resource. For more + information, read Global, Regional, and Zonal Resources. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'delete', + 'uri': '/compute/v1/projects/{project}/regions/{region}/healthCheckServices/{health_check_service}', + }, + ] + request, metadata = self._interceptor.pre_delete(request, metadata) + pb_request = compute.DeleteRegionHealthCheckServiceRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.Operation() + pb_resp = compute.Operation.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_delete(resp) + return resp + + class _Get(RegionHealthCheckServicesRestStub): + def __hash__(self): + return hash("Get") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.GetRegionHealthCheckServiceRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.HealthCheckService: + r"""Call the get method over HTTP. + + Args: + request (~.compute.GetRegionHealthCheckServiceRequest): + The request object. A request message for + RegionHealthCheckServices.Get. See the + method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.HealthCheckService: + Represents a Health-Check as a + Service resource. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/compute/v1/projects/{project}/regions/{region}/healthCheckServices/{health_check_service}', + }, + ] + request, metadata = self._interceptor.pre_get(request, metadata) + pb_request = compute.GetRegionHealthCheckServiceRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.HealthCheckService() + pb_resp = compute.HealthCheckService.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get(resp) + return resp + + class _Insert(RegionHealthCheckServicesRestStub): + def __hash__(self): + return hash("Insert") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.InsertRegionHealthCheckServiceRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.Operation: + r"""Call the insert method over HTTP. + + Args: + request (~.compute.InsertRegionHealthCheckServiceRequest): + The request object. A request message for + RegionHealthCheckServices.Insert. See + the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.Operation: + Represents an Operation resource. Google Compute Engine + has three Operation resources: \* + `Global `__ + \* + `Regional `__ + \* + `Zonal `__ + You can use an operation resource to manage asynchronous + API requests. For more information, read Handling API + responses. Operations can be global, regional or zonal. + - For global operations, use the ``globalOperations`` + resource. - For regional operations, use the + ``regionOperations`` resource. - For zonal operations, + use the ``zonalOperations`` resource. For more + information, read Global, Regional, and Zonal Resources. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/compute/v1/projects/{project}/regions/{region}/healthCheckServices', + 'body': 'health_check_service_resource', + }, + ] + request, metadata = self._interceptor.pre_insert(request, metadata) + pb_request = compute.InsertRegionHealthCheckServiceRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + including_default_value_fields=False, + use_integers_for_enums=False + ) + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.Operation() + pb_resp = compute.Operation.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_insert(resp) + return resp + + class _List(RegionHealthCheckServicesRestStub): + def __hash__(self): + return hash("List") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.ListRegionHealthCheckServicesRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.HealthCheckServicesList: + r"""Call the list method over HTTP. + + Args: + request (~.compute.ListRegionHealthCheckServicesRequest): + The request object. A request message for + RegionHealthCheckServices.List. See the + method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.HealthCheckServicesList: + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/compute/v1/projects/{project}/regions/{region}/healthCheckServices', + }, + ] + request, metadata = self._interceptor.pre_list(request, metadata) + pb_request = compute.ListRegionHealthCheckServicesRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.HealthCheckServicesList() + pb_resp = compute.HealthCheckServicesList.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_list(resp) + return resp + + class _Patch(RegionHealthCheckServicesRestStub): + def __hash__(self): + return hash("Patch") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.PatchRegionHealthCheckServiceRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.Operation: + r"""Call the patch method over HTTP. + + Args: + request (~.compute.PatchRegionHealthCheckServiceRequest): + The request object. A request message for + RegionHealthCheckServices.Patch. See the + method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.Operation: + Represents an Operation resource. Google Compute Engine + has three Operation resources: \* + `Global `__ + \* + `Regional `__ + \* + `Zonal `__ + You can use an operation resource to manage asynchronous + API requests. For more information, read Handling API + responses. Operations can be global, regional or zonal. + - For global operations, use the ``globalOperations`` + resource. - For regional operations, use the + ``regionOperations`` resource. - For zonal operations, + use the ``zonalOperations`` resource. For more + information, read Global, Regional, and Zonal Resources. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'patch', + 'uri': '/compute/v1/projects/{project}/regions/{region}/healthCheckServices/{health_check_service}', + 'body': 'health_check_service_resource', + }, + ] + request, metadata = self._interceptor.pre_patch(request, metadata) + pb_request = compute.PatchRegionHealthCheckServiceRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + including_default_value_fields=False, + use_integers_for_enums=False + ) + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.Operation() + pb_resp = compute.Operation.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_patch(resp) + return resp + + @property + def delete(self) -> Callable[ + [compute.DeleteRegionHealthCheckServiceRequest], + compute.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._Delete(self._session, self._host, self._interceptor) # type: ignore + + @property + def get(self) -> Callable[ + [compute.GetRegionHealthCheckServiceRequest], + compute.HealthCheckService]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._Get(self._session, self._host, self._interceptor) # type: ignore + + @property + def insert(self) -> Callable[ + [compute.InsertRegionHealthCheckServiceRequest], + compute.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._Insert(self._session, self._host, self._interceptor) # type: ignore + + @property + def list(self) -> Callable[ + [compute.ListRegionHealthCheckServicesRequest], + compute.HealthCheckServicesList]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._List(self._session, self._host, self._interceptor) # type: ignore + + @property + def patch(self) -> Callable[ + [compute.PatchRegionHealthCheckServiceRequest], + compute.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._Patch(self._session, self._host, self._interceptor) # type: ignore + + @property + def kind(self) -> str: + return "rest" + + def close(self): + self._session.close() + + +__all__=( + 'RegionHealthCheckServicesRestTransport', +) diff --git a/owl-bot-staging/v1/google/cloud/compute_v1/services/region_health_checks/__init__.py b/owl-bot-staging/v1/google/cloud/compute_v1/services/region_health_checks/__init__.py new file mode 100644 index 000000000..ee32b4912 --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/compute_v1/services/region_health_checks/__init__.py @@ -0,0 +1,20 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from .client import RegionHealthChecksClient + +__all__ = ( + 'RegionHealthChecksClient', +) diff --git a/owl-bot-staging/v1/google/cloud/compute_v1/services/region_health_checks/client.py b/owl-bot-staging/v1/google/cloud/compute_v1/services/region_health_checks/client.py new file mode 100644 index 000000000..b5c90bc9b --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/compute_v1/services/region_health_checks/client.py @@ -0,0 +1,1786 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import functools +import os +import re +from typing import Dict, Mapping, MutableMapping, MutableSequence, Optional, Sequence, Tuple, Type, Union, cast + +from google.cloud.compute_v1 import gapic_version as package_version + +from google.api_core import client_options as client_options_lib +from google.api_core import exceptions as core_exceptions +from google.api_core import extended_operation +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport import mtls # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth.exceptions import MutualTLSChannelError # type: ignore +from google.oauth2 import service_account # type: ignore + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + +from google.api_core import extended_operation # type: ignore +from google.cloud.compute_v1.services.region_health_checks import pagers +from google.cloud.compute_v1.types import compute +from .transports.base import RegionHealthChecksTransport, DEFAULT_CLIENT_INFO +from .transports.rest import RegionHealthChecksRestTransport + + +class RegionHealthChecksClientMeta(type): + """Metaclass for the RegionHealthChecks client. + + This provides class-level methods for building and retrieving + support objects (e.g. transport) without polluting the client instance + objects. + """ + _transport_registry = OrderedDict() # type: Dict[str, Type[RegionHealthChecksTransport]] + _transport_registry["rest"] = RegionHealthChecksRestTransport + + def get_transport_class(cls, + label: Optional[str] = None, + ) -> Type[RegionHealthChecksTransport]: + """Returns an appropriate transport class. + + Args: + label: The name of the desired transport. If none is + provided, then the first transport in the registry is used. + + Returns: + The transport class to use. + """ + # If a specific transport is requested, return that one. + if label: + return cls._transport_registry[label] + + # No transport is requested; return the default (that is, the first one + # in the dictionary). + return next(iter(cls._transport_registry.values())) + + +class RegionHealthChecksClient(metaclass=RegionHealthChecksClientMeta): + """The RegionHealthChecks API.""" + + @staticmethod + def _get_default_mtls_endpoint(api_endpoint): + """Converts api endpoint to mTLS endpoint. + + Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to + "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. + Args: + api_endpoint (Optional[str]): the api endpoint to convert. + Returns: + str: converted mTLS api endpoint. + """ + if not api_endpoint: + return api_endpoint + + mtls_endpoint_re = re.compile( + r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?" + ) + + m = mtls_endpoint_re.match(api_endpoint) + name, mtls, sandbox, googledomain = m.groups() + if mtls or not googledomain: + return api_endpoint + + if sandbox: + return api_endpoint.replace( + "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" + ) + + return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") + + DEFAULT_ENDPOINT = "compute.googleapis.com" + DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore + DEFAULT_ENDPOINT + ) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + RegionHealthChecksClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_info(info) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + RegionHealthChecksClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_file( + filename) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + from_service_account_json = from_service_account_file + + @property + def transport(self) -> RegionHealthChecksTransport: + """Returns the transport used by the client instance. + + Returns: + RegionHealthChecksTransport: The transport used by the client + instance. + """ + return self._transport + + @staticmethod + def common_billing_account_path(billing_account: str, ) -> str: + """Returns a fully-qualified billing_account string.""" + return "billingAccounts/{billing_account}".format(billing_account=billing_account, ) + + @staticmethod + def parse_common_billing_account_path(path: str) -> Dict[str,str]: + """Parse a billing_account path into its component segments.""" + m = re.match(r"^billingAccounts/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_folder_path(folder: str, ) -> str: + """Returns a fully-qualified folder string.""" + return "folders/{folder}".format(folder=folder, ) + + @staticmethod + def parse_common_folder_path(path: str) -> Dict[str,str]: + """Parse a folder path into its component segments.""" + m = re.match(r"^folders/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_organization_path(organization: str, ) -> str: + """Returns a fully-qualified organization string.""" + return "organizations/{organization}".format(organization=organization, ) + + @staticmethod + def parse_common_organization_path(path: str) -> Dict[str,str]: + """Parse a organization path into its component segments.""" + m = re.match(r"^organizations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_project_path(project: str, ) -> str: + """Returns a fully-qualified project string.""" + return "projects/{project}".format(project=project, ) + + @staticmethod + def parse_common_project_path(path: str) -> Dict[str,str]: + """Parse a project path into its component segments.""" + m = re.match(r"^projects/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_location_path(project: str, location: str, ) -> str: + """Returns a fully-qualified location string.""" + return "projects/{project}/locations/{location}".format(project=project, location=location, ) + + @staticmethod + def parse_common_location_path(path: str) -> Dict[str,str]: + """Parse a location path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @classmethod + def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[client_options_lib.ClientOptions] = None): + """Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + if client_options is None: + client_options = client_options_lib.ClientOptions() + use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") + use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") + if use_client_cert not in ("true", "false"): + raise ValueError("Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`") + if use_mtls_endpoint not in ("auto", "never", "always"): + raise MutualTLSChannelError("Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`") + + # Figure out the client cert source to use. + client_cert_source = None + if use_client_cert == "true": + if client_options.client_cert_source: + client_cert_source = client_options.client_cert_source + elif mtls.has_default_client_cert_source(): + client_cert_source = mtls.default_client_cert_source() + + # Figure out which api endpoint to use. + if client_options.api_endpoint is not None: + api_endpoint = client_options.api_endpoint + elif use_mtls_endpoint == "always" or (use_mtls_endpoint == "auto" and client_cert_source): + api_endpoint = cls.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = cls.DEFAULT_ENDPOINT + + return api_endpoint, client_cert_source + + def __init__(self, *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Optional[Union[str, RegionHealthChecksTransport]] = None, + client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the region health checks client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Union[str, RegionHealthChecksTransport]): The + transport to use. If set to None, a transport is chosen + automatically. + NOTE: "rest" transport functionality is currently in a + beta state (preview). We welcome your feedback via an + issue in this library's source repository. + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): Custom options for the + client. It won't take effect if a ``transport`` instance is provided. + (1) The ``api_endpoint`` property can be used to override the + default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT + environment variable can also be used to override the endpoint: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto switch to the + default mTLS endpoint if client certificate is present, this is + the default value). However, the ``api_endpoint`` property takes + precedence if provided. + (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide client certificate for mutual TLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + """ + if isinstance(client_options, dict): + client_options = client_options_lib.from_dict(client_options) + if client_options is None: + client_options = client_options_lib.ClientOptions() + client_options = cast(client_options_lib.ClientOptions, client_options) + + api_endpoint, client_cert_source_func = self.get_mtls_endpoint_and_cert_source(client_options) + + api_key_value = getattr(client_options, "api_key", None) + if api_key_value and credentials: + raise ValueError("client_options.api_key and credentials are mutually exclusive") + + # Save or instantiate the transport. + # Ordinarily, we provide the transport, but allowing a custom transport + # instance provides an extensibility point for unusual situations. + if isinstance(transport, RegionHealthChecksTransport): + # transport is a RegionHealthChecksTransport instance. + if credentials or client_options.credentials_file or api_key_value: + raise ValueError("When providing a transport instance, " + "provide its credentials directly.") + if client_options.scopes: + raise ValueError( + "When providing a transport instance, provide its scopes " + "directly." + ) + self._transport = transport + else: + import google.auth._default # type: ignore + + if api_key_value and hasattr(google.auth._default, "get_api_key_credentials"): + credentials = google.auth._default.get_api_key_credentials(api_key_value) + + Transport = type(self).get_transport_class(transport) + self._transport = Transport( + credentials=credentials, + credentials_file=client_options.credentials_file, + host=api_endpoint, + scopes=client_options.scopes, + client_cert_source_for_mtls=client_cert_source_func, + quota_project_id=client_options.quota_project_id, + client_info=client_info, + always_use_jwt_access=True, + api_audience=client_options.api_audience, + ) + + def delete_unary(self, + request: Optional[Union[compute.DeleteRegionHealthCheckRequest, dict]] = None, + *, + project: Optional[str] = None, + region: Optional[str] = None, + health_check: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.Operation: + r"""Deletes the specified HealthCheck resource. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_delete(): + # Create a client + client = compute_v1.RegionHealthChecksClient() + + # Initialize request argument(s) + request = compute_v1.DeleteRegionHealthCheckRequest( + health_check="health_check_value", + project="project_value", + region="region_value", + ) + + # Make the request + response = client.delete(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.DeleteRegionHealthCheckRequest, dict]): + The request object. A request message for + RegionHealthChecks.Delete. See the + method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + region (str): + Name of the region scoping this + request. + + This corresponds to the ``region`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + health_check (str): + Name of the HealthCheck resource to + delete. + + This corresponds to the ``health_check`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, region, health_check]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.DeleteRegionHealthCheckRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.DeleteRegionHealthCheckRequest): + request = compute.DeleteRegionHealthCheckRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if region is not None: + request.region = region + if health_check is not None: + request.health_check = health_check + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("region", request.region), + ("health_check", request.health_check), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def delete(self, + request: Optional[Union[compute.DeleteRegionHealthCheckRequest, dict]] = None, + *, + project: Optional[str] = None, + region: Optional[str] = None, + health_check: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> extended_operation.ExtendedOperation: + r"""Deletes the specified HealthCheck resource. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_delete(): + # Create a client + client = compute_v1.RegionHealthChecksClient() + + # Initialize request argument(s) + request = compute_v1.DeleteRegionHealthCheckRequest( + health_check="health_check_value", + project="project_value", + region="region_value", + ) + + # Make the request + response = client.delete(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.DeleteRegionHealthCheckRequest, dict]): + The request object. A request message for + RegionHealthChecks.Delete. See the + method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + region (str): + Name of the region scoping this + request. + + This corresponds to the ``region`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + health_check (str): + Name of the HealthCheck resource to + delete. + + This corresponds to the ``health_check`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, region, health_check]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.DeleteRegionHealthCheckRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.DeleteRegionHealthCheckRequest): + request = compute.DeleteRegionHealthCheckRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if region is not None: + request.region = region + if health_check is not None: + request.health_check = health_check + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("region", request.region), + ("health_check", request.health_check), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + operation_service = self._transport._region_operations_client + operation_request = compute.GetRegionOperationRequest() + operation_request.project = request.project + operation_request.region = request.region + operation_request.operation = response.name + + get_operation = functools.partial(operation_service.get, operation_request) + # Cancel is not part of extended operations yet. + cancel_operation = lambda: None + + # Note: this class is an implementation detail to provide a uniform + # set of names for certain fields in the extended operation proto message. + # See google.api_core.extended_operation.ExtendedOperation for details + # on these properties and the expected interface. + class _CustomOperation(extended_operation.ExtendedOperation): + @property + def error_message(self): + return self._extended_operation.http_error_message + + @property + def error_code(self): + return self._extended_operation.http_error_status_code + + response = _CustomOperation.make(get_operation, cancel_operation, response) + + # Done; return the response. + return response + + def get(self, + request: Optional[Union[compute.GetRegionHealthCheckRequest, dict]] = None, + *, + project: Optional[str] = None, + region: Optional[str] = None, + health_check: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.HealthCheck: + r"""Returns the specified HealthCheck resource. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_get(): + # Create a client + client = compute_v1.RegionHealthChecksClient() + + # Initialize request argument(s) + request = compute_v1.GetRegionHealthCheckRequest( + health_check="health_check_value", + project="project_value", + region="region_value", + ) + + # Make the request + response = client.get(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.GetRegionHealthCheckRequest, dict]): + The request object. A request message for + RegionHealthChecks.Get. See the method + description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + region (str): + Name of the region scoping this + request. + + This corresponds to the ``region`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + health_check (str): + Name of the HealthCheck resource to + return. + + This corresponds to the ``health_check`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.compute_v1.types.HealthCheck: + Represents a Health Check resource. Google Compute + Engine has two Health Check resources: \* + [Global](/compute/docs/reference/rest/v1/healthChecks) + \* + [Regional](/compute/docs/reference/rest/v1/regionHealthChecks) + Internal HTTP(S) load balancers must use regional health + checks (compute.v1.regionHealthChecks). Traffic Director + must use global health checks (compute.v1.healthChecks). + Internal TCP/UDP load balancers can use either regional + or global health checks (compute.v1.regionHealthChecks + or compute.v1.healthChecks). External HTTP(S), TCP + proxy, and SSL proxy load balancers as well as managed + instance group auto-healing must use global health + checks (compute.v1.healthChecks). Backend service-based + network load balancers must use regional health checks + (compute.v1.regionHealthChecks). Target pool-based + network load balancers must use legacy HTTP health + checks (compute.v1.httpHealthChecks). For more + information, see Health checks overview. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, region, health_check]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.GetRegionHealthCheckRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.GetRegionHealthCheckRequest): + request = compute.GetRegionHealthCheckRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if region is not None: + request.region = region + if health_check is not None: + request.health_check = health_check + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("region", request.region), + ("health_check", request.health_check), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def insert_unary(self, + request: Optional[Union[compute.InsertRegionHealthCheckRequest, dict]] = None, + *, + project: Optional[str] = None, + region: Optional[str] = None, + health_check_resource: Optional[compute.HealthCheck] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.Operation: + r"""Creates a HealthCheck resource in the specified + project using the data included in the request. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_insert(): + # Create a client + client = compute_v1.RegionHealthChecksClient() + + # Initialize request argument(s) + request = compute_v1.InsertRegionHealthCheckRequest( + project="project_value", + region="region_value", + ) + + # Make the request + response = client.insert(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.InsertRegionHealthCheckRequest, dict]): + The request object. A request message for + RegionHealthChecks.Insert. See the + method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + region (str): + Name of the region scoping this + request. + + This corresponds to the ``region`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + health_check_resource (google.cloud.compute_v1.types.HealthCheck): + The body resource for this request + This corresponds to the ``health_check_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, region, health_check_resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.InsertRegionHealthCheckRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.InsertRegionHealthCheckRequest): + request = compute.InsertRegionHealthCheckRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if region is not None: + request.region = region + if health_check_resource is not None: + request.health_check_resource = health_check_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.insert] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("region", request.region), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def insert(self, + request: Optional[Union[compute.InsertRegionHealthCheckRequest, dict]] = None, + *, + project: Optional[str] = None, + region: Optional[str] = None, + health_check_resource: Optional[compute.HealthCheck] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> extended_operation.ExtendedOperation: + r"""Creates a HealthCheck resource in the specified + project using the data included in the request. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_insert(): + # Create a client + client = compute_v1.RegionHealthChecksClient() + + # Initialize request argument(s) + request = compute_v1.InsertRegionHealthCheckRequest( + project="project_value", + region="region_value", + ) + + # Make the request + response = client.insert(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.InsertRegionHealthCheckRequest, dict]): + The request object. A request message for + RegionHealthChecks.Insert. See the + method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + region (str): + Name of the region scoping this + request. + + This corresponds to the ``region`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + health_check_resource (google.cloud.compute_v1.types.HealthCheck): + The body resource for this request + This corresponds to the ``health_check_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, region, health_check_resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.InsertRegionHealthCheckRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.InsertRegionHealthCheckRequest): + request = compute.InsertRegionHealthCheckRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if region is not None: + request.region = region + if health_check_resource is not None: + request.health_check_resource = health_check_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.insert] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("region", request.region), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + operation_service = self._transport._region_operations_client + operation_request = compute.GetRegionOperationRequest() + operation_request.project = request.project + operation_request.region = request.region + operation_request.operation = response.name + + get_operation = functools.partial(operation_service.get, operation_request) + # Cancel is not part of extended operations yet. + cancel_operation = lambda: None + + # Note: this class is an implementation detail to provide a uniform + # set of names for certain fields in the extended operation proto message. + # See google.api_core.extended_operation.ExtendedOperation for details + # on these properties and the expected interface. + class _CustomOperation(extended_operation.ExtendedOperation): + @property + def error_message(self): + return self._extended_operation.http_error_message + + @property + def error_code(self): + return self._extended_operation.http_error_status_code + + response = _CustomOperation.make(get_operation, cancel_operation, response) + + # Done; return the response. + return response + + def list(self, + request: Optional[Union[compute.ListRegionHealthChecksRequest, dict]] = None, + *, + project: Optional[str] = None, + region: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListPager: + r"""Retrieves the list of HealthCheck resources available + to the specified project. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_list(): + # Create a client + client = compute_v1.RegionHealthChecksClient() + + # Initialize request argument(s) + request = compute_v1.ListRegionHealthChecksRequest( + project="project_value", + region="region_value", + ) + + # Make the request + page_result = client.list(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.ListRegionHealthChecksRequest, dict]): + The request object. A request message for + RegionHealthChecks.List. See the method + description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + region (str): + Name of the region scoping this + request. + + This corresponds to the ``region`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.compute_v1.services.region_health_checks.pagers.ListPager: + Contains a list of HealthCheck + resources. + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, region]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.ListRegionHealthChecksRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.ListRegionHealthChecksRequest): + request = compute.ListRegionHealthChecksRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if region is not None: + request.region = region + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("region", request.region), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + def patch_unary(self, + request: Optional[Union[compute.PatchRegionHealthCheckRequest, dict]] = None, + *, + project: Optional[str] = None, + region: Optional[str] = None, + health_check: Optional[str] = None, + health_check_resource: Optional[compute.HealthCheck] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.Operation: + r"""Updates a HealthCheck resource in the specified + project using the data included in the request. This + method supports PATCH semantics and uses the JSON merge + patch format and processing rules. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_patch(): + # Create a client + client = compute_v1.RegionHealthChecksClient() + + # Initialize request argument(s) + request = compute_v1.PatchRegionHealthCheckRequest( + health_check="health_check_value", + project="project_value", + region="region_value", + ) + + # Make the request + response = client.patch(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.PatchRegionHealthCheckRequest, dict]): + The request object. A request message for + RegionHealthChecks.Patch. See the method + description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + region (str): + Name of the region scoping this + request. + + This corresponds to the ``region`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + health_check (str): + Name of the HealthCheck resource to + patch. + + This corresponds to the ``health_check`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + health_check_resource (google.cloud.compute_v1.types.HealthCheck): + The body resource for this request + This corresponds to the ``health_check_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, region, health_check, health_check_resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.PatchRegionHealthCheckRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.PatchRegionHealthCheckRequest): + request = compute.PatchRegionHealthCheckRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if region is not None: + request.region = region + if health_check is not None: + request.health_check = health_check + if health_check_resource is not None: + request.health_check_resource = health_check_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.patch] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("region", request.region), + ("health_check", request.health_check), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def patch(self, + request: Optional[Union[compute.PatchRegionHealthCheckRequest, dict]] = None, + *, + project: Optional[str] = None, + region: Optional[str] = None, + health_check: Optional[str] = None, + health_check_resource: Optional[compute.HealthCheck] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> extended_operation.ExtendedOperation: + r"""Updates a HealthCheck resource in the specified + project using the data included in the request. This + method supports PATCH semantics and uses the JSON merge + patch format and processing rules. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_patch(): + # Create a client + client = compute_v1.RegionHealthChecksClient() + + # Initialize request argument(s) + request = compute_v1.PatchRegionHealthCheckRequest( + health_check="health_check_value", + project="project_value", + region="region_value", + ) + + # Make the request + response = client.patch(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.PatchRegionHealthCheckRequest, dict]): + The request object. A request message for + RegionHealthChecks.Patch. See the method + description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + region (str): + Name of the region scoping this + request. + + This corresponds to the ``region`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + health_check (str): + Name of the HealthCheck resource to + patch. + + This corresponds to the ``health_check`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + health_check_resource (google.cloud.compute_v1.types.HealthCheck): + The body resource for this request + This corresponds to the ``health_check_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, region, health_check, health_check_resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.PatchRegionHealthCheckRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.PatchRegionHealthCheckRequest): + request = compute.PatchRegionHealthCheckRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if region is not None: + request.region = region + if health_check is not None: + request.health_check = health_check + if health_check_resource is not None: + request.health_check_resource = health_check_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.patch] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("region", request.region), + ("health_check", request.health_check), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + operation_service = self._transport._region_operations_client + operation_request = compute.GetRegionOperationRequest() + operation_request.project = request.project + operation_request.region = request.region + operation_request.operation = response.name + + get_operation = functools.partial(operation_service.get, operation_request) + # Cancel is not part of extended operations yet. + cancel_operation = lambda: None + + # Note: this class is an implementation detail to provide a uniform + # set of names for certain fields in the extended operation proto message. + # See google.api_core.extended_operation.ExtendedOperation for details + # on these properties and the expected interface. + class _CustomOperation(extended_operation.ExtendedOperation): + @property + def error_message(self): + return self._extended_operation.http_error_message + + @property + def error_code(self): + return self._extended_operation.http_error_status_code + + response = _CustomOperation.make(get_operation, cancel_operation, response) + + # Done; return the response. + return response + + def update_unary(self, + request: Optional[Union[compute.UpdateRegionHealthCheckRequest, dict]] = None, + *, + project: Optional[str] = None, + region: Optional[str] = None, + health_check: Optional[str] = None, + health_check_resource: Optional[compute.HealthCheck] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.Operation: + r"""Updates a HealthCheck resource in the specified + project using the data included in the request. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_update(): + # Create a client + client = compute_v1.RegionHealthChecksClient() + + # Initialize request argument(s) + request = compute_v1.UpdateRegionHealthCheckRequest( + health_check="health_check_value", + project="project_value", + region="region_value", + ) + + # Make the request + response = client.update(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.UpdateRegionHealthCheckRequest, dict]): + The request object. A request message for + RegionHealthChecks.Update. See the + method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + region (str): + Name of the region scoping this + request. + + This corresponds to the ``region`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + health_check (str): + Name of the HealthCheck resource to + update. + + This corresponds to the ``health_check`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + health_check_resource (google.cloud.compute_v1.types.HealthCheck): + The body resource for this request + This corresponds to the ``health_check_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, region, health_check, health_check_resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.UpdateRegionHealthCheckRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.UpdateRegionHealthCheckRequest): + request = compute.UpdateRegionHealthCheckRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if region is not None: + request.region = region + if health_check is not None: + request.health_check = health_check + if health_check_resource is not None: + request.health_check_resource = health_check_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.update] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("region", request.region), + ("health_check", request.health_check), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def update(self, + request: Optional[Union[compute.UpdateRegionHealthCheckRequest, dict]] = None, + *, + project: Optional[str] = None, + region: Optional[str] = None, + health_check: Optional[str] = None, + health_check_resource: Optional[compute.HealthCheck] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> extended_operation.ExtendedOperation: + r"""Updates a HealthCheck resource in the specified + project using the data included in the request. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_update(): + # Create a client + client = compute_v1.RegionHealthChecksClient() + + # Initialize request argument(s) + request = compute_v1.UpdateRegionHealthCheckRequest( + health_check="health_check_value", + project="project_value", + region="region_value", + ) + + # Make the request + response = client.update(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.UpdateRegionHealthCheckRequest, dict]): + The request object. A request message for + RegionHealthChecks.Update. See the + method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + region (str): + Name of the region scoping this + request. + + This corresponds to the ``region`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + health_check (str): + Name of the HealthCheck resource to + update. + + This corresponds to the ``health_check`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + health_check_resource (google.cloud.compute_v1.types.HealthCheck): + The body resource for this request + This corresponds to the ``health_check_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, region, health_check, health_check_resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.UpdateRegionHealthCheckRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.UpdateRegionHealthCheckRequest): + request = compute.UpdateRegionHealthCheckRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if region is not None: + request.region = region + if health_check is not None: + request.health_check = health_check + if health_check_resource is not None: + request.health_check_resource = health_check_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.update] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("region", request.region), + ("health_check", request.health_check), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + operation_service = self._transport._region_operations_client + operation_request = compute.GetRegionOperationRequest() + operation_request.project = request.project + operation_request.region = request.region + operation_request.operation = response.name + + get_operation = functools.partial(operation_service.get, operation_request) + # Cancel is not part of extended operations yet. + cancel_operation = lambda: None + + # Note: this class is an implementation detail to provide a uniform + # set of names for certain fields in the extended operation proto message. + # See google.api_core.extended_operation.ExtendedOperation for details + # on these properties and the expected interface. + class _CustomOperation(extended_operation.ExtendedOperation): + @property + def error_message(self): + return self._extended_operation.http_error_message + + @property + def error_code(self): + return self._extended_operation.http_error_status_code + + response = _CustomOperation.make(get_operation, cancel_operation, response) + + # Done; return the response. + return response + + def __enter__(self) -> "RegionHealthChecksClient": + return self + + def __exit__(self, type, value, traceback): + """Releases underlying transport's resources. + + .. warning:: + ONLY use as a context manager if the transport is NOT shared + with other clients! Exiting the with block will CLOSE the transport + and may cause errors in other clients! + """ + self.transport.close() + + + + + + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) + + +__all__ = ( + "RegionHealthChecksClient", +) diff --git a/owl-bot-staging/v1/google/cloud/compute_v1/services/region_health_checks/pagers.py b/owl-bot-staging/v1/google/cloud/compute_v1/services/region_health_checks/pagers.py new file mode 100644 index 000000000..4c716a2d5 --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/compute_v1/services/region_health_checks/pagers.py @@ -0,0 +1,77 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import Any, AsyncIterator, Awaitable, Callable, Sequence, Tuple, Optional, Iterator + +from google.cloud.compute_v1.types import compute + + +class ListPager: + """A pager for iterating through ``list`` requests. + + This class thinly wraps an initial + :class:`google.cloud.compute_v1.types.HealthCheckList` object, and + provides an ``__iter__`` method to iterate through its + ``items`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``List`` requests and continue to iterate + through the ``items`` field on the + corresponding responses. + + All the usual :class:`google.cloud.compute_v1.types.HealthCheckList` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., compute.HealthCheckList], + request: compute.ListRegionHealthChecksRequest, + response: compute.HealthCheckList, + *, + metadata: Sequence[Tuple[str, str]] = ()): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.compute_v1.types.ListRegionHealthChecksRequest): + The initial request object. + response (google.cloud.compute_v1.types.HealthCheckList): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = compute.ListRegionHealthChecksRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[compute.HealthCheckList]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[compute.HealthCheck]: + for page in self.pages: + yield from page.items + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) diff --git a/owl-bot-staging/v1/google/cloud/compute_v1/services/region_health_checks/transports/__init__.py b/owl-bot-staging/v1/google/cloud/compute_v1/services/region_health_checks/transports/__init__.py new file mode 100644 index 000000000..ed8ad6ea8 --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/compute_v1/services/region_health_checks/transports/__init__.py @@ -0,0 +1,32 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +from typing import Dict, Type + +from .base import RegionHealthChecksTransport +from .rest import RegionHealthChecksRestTransport +from .rest import RegionHealthChecksRestInterceptor + + +# Compile a registry of transports. +_transport_registry = OrderedDict() # type: Dict[str, Type[RegionHealthChecksTransport]] +_transport_registry['rest'] = RegionHealthChecksRestTransport + +__all__ = ( + 'RegionHealthChecksTransport', + 'RegionHealthChecksRestTransport', + 'RegionHealthChecksRestInterceptor', +) diff --git a/owl-bot-staging/v1/google/cloud/compute_v1/services/region_health_checks/transports/base.py b/owl-bot-staging/v1/google/cloud/compute_v1/services/region_health_checks/transports/base.py new file mode 100644 index 000000000..2d4fdfd81 --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/compute_v1/services/region_health_checks/transports/base.py @@ -0,0 +1,233 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import abc +from typing import Awaitable, Callable, Dict, Optional, Sequence, Union + +from google.cloud.compute_v1 import gapic_version as package_version + +import google.auth # type: ignore +import google.api_core +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.compute_v1.types import compute +from google.cloud.compute_v1.services import region_operations + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) + + +class RegionHealthChecksTransport(abc.ABC): + """Abstract transport class for RegionHealthChecks.""" + + AUTH_SCOPES = ( + 'https://www.googleapis.com/auth/compute', + 'https://www.googleapis.com/auth/cloud-platform', + ) + + DEFAULT_HOST: str = 'compute.googleapis.com' + def __init__( + self, *, + host: str = DEFAULT_HOST, + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + **kwargs, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A list of scopes. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + """ + self._extended_operations_services: Dict[str, Any] = {} + + scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} + + # Save the scopes. + self._scopes = scopes + + # If no credentials are provided, then determine the appropriate + # defaults. + if credentials and credentials_file: + raise core_exceptions.DuplicateCredentialArgs("'credentials_file' and 'credentials' are mutually exclusive") + + if credentials_file is not None: + credentials, _ = google.auth.load_credentials_from_file( + credentials_file, + **scopes_kwargs, + quota_project_id=quota_project_id + ) + elif credentials is None: + credentials, _ = google.auth.default(**scopes_kwargs, quota_project_id=quota_project_id) + # Don't apply audience if the credentials file passed from user. + if hasattr(credentials, "with_gdch_audience"): + credentials = credentials.with_gdch_audience(api_audience if api_audience else host) + + # If the credentials are service account credentials, then always try to use self signed JWT. + if always_use_jwt_access and isinstance(credentials, service_account.Credentials) and hasattr(service_account.Credentials, "with_always_use_jwt_access"): + credentials = credentials.with_always_use_jwt_access(True) + + # Save the credentials. + self._credentials = credentials + + # Save the hostname. Default to port 443 (HTTPS) if none is specified. + if ':' not in host: + host += ':443' + self._host = host + + def _prep_wrapped_messages(self, client_info): + # Precompute the wrapped methods. + self._wrapped_methods = { + self.delete: gapic_v1.method.wrap_method( + self.delete, + default_timeout=None, + client_info=client_info, + ), + self.get: gapic_v1.method.wrap_method( + self.get, + default_timeout=None, + client_info=client_info, + ), + self.insert: gapic_v1.method.wrap_method( + self.insert, + default_timeout=None, + client_info=client_info, + ), + self.list: gapic_v1.method.wrap_method( + self.list, + default_timeout=None, + client_info=client_info, + ), + self.patch: gapic_v1.method.wrap_method( + self.patch, + default_timeout=None, + client_info=client_info, + ), + self.update: gapic_v1.method.wrap_method( + self.update, + default_timeout=None, + client_info=client_info, + ), + } + + def close(self): + """Closes resources associated with the transport. + + .. warning:: + Only call this method if the transport is NOT shared + with other clients - this may cause errors in other clients! + """ + raise NotImplementedError() + + @property + def delete(self) -> Callable[ + [compute.DeleteRegionHealthCheckRequest], + Union[ + compute.Operation, + Awaitable[compute.Operation] + ]]: + raise NotImplementedError() + + @property + def get(self) -> Callable[ + [compute.GetRegionHealthCheckRequest], + Union[ + compute.HealthCheck, + Awaitable[compute.HealthCheck] + ]]: + raise NotImplementedError() + + @property + def insert(self) -> Callable[ + [compute.InsertRegionHealthCheckRequest], + Union[ + compute.Operation, + Awaitable[compute.Operation] + ]]: + raise NotImplementedError() + + @property + def list(self) -> Callable[ + [compute.ListRegionHealthChecksRequest], + Union[ + compute.HealthCheckList, + Awaitable[compute.HealthCheckList] + ]]: + raise NotImplementedError() + + @property + def patch(self) -> Callable[ + [compute.PatchRegionHealthCheckRequest], + Union[ + compute.Operation, + Awaitable[compute.Operation] + ]]: + raise NotImplementedError() + + @property + def update(self) -> Callable[ + [compute.UpdateRegionHealthCheckRequest], + Union[ + compute.Operation, + Awaitable[compute.Operation] + ]]: + raise NotImplementedError() + + @property + def kind(self) -> str: + raise NotImplementedError() + + @property + def _region_operations_client(self) -> region_operations.RegionOperationsClient: + ex_op_service = self._extended_operations_services.get("region_operations") + if not ex_op_service: + ex_op_service = region_operations.RegionOperationsClient( + credentials=self._credentials, + transport=self.kind, + ) + self._extended_operations_services["region_operations"] = ex_op_service + + return ex_op_service + + +__all__ = ( + 'RegionHealthChecksTransport', +) diff --git a/owl-bot-staging/v1/google/cloud/compute_v1/services/region_health_checks/transports/rest.py b/owl-bot-staging/v1/google/cloud/compute_v1/services/region_health_checks/transports/rest.py new file mode 100644 index 000000000..8466fce55 --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/compute_v1/services/region_health_checks/transports/rest.py @@ -0,0 +1,954 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +from google.auth.transport.requests import AuthorizedSession # type: ignore +import json # type: ignore +import grpc # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.api_core import exceptions as core_exceptions +from google.api_core import retry as retries +from google.api_core import rest_helpers +from google.api_core import rest_streaming +from google.api_core import path_template +from google.api_core import gapic_v1 + +from google.protobuf import json_format +from requests import __version__ as requests_version +import dataclasses +import re +from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union +import warnings + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + + +from google.cloud.compute_v1.types import compute + +from .base import RegionHealthChecksTransport, DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, + grpc_version=None, + rest_version=requests_version, +) + + +class RegionHealthChecksRestInterceptor: + """Interceptor for RegionHealthChecks. + + Interceptors are used to manipulate requests, request metadata, and responses + in arbitrary ways. + Example use cases include: + * Logging + * Verifying requests according to service or custom semantics + * Stripping extraneous information from responses + + These use cases and more can be enabled by injecting an + instance of a custom subclass when constructing the RegionHealthChecksRestTransport. + + .. code-block:: python + class MyCustomRegionHealthChecksInterceptor(RegionHealthChecksRestInterceptor): + def pre_delete(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_delete(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_get(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_insert(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_insert(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_patch(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_patch(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_update(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_update(self, response): + logging.log(f"Received response: {response}") + return response + + transport = RegionHealthChecksRestTransport(interceptor=MyCustomRegionHealthChecksInterceptor()) + client = RegionHealthChecksClient(transport=transport) + + + """ + def pre_delete(self, request: compute.DeleteRegionHealthCheckRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.DeleteRegionHealthCheckRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for delete + + Override in a subclass to manipulate the request or metadata + before they are sent to the RegionHealthChecks server. + """ + return request, metadata + + def post_delete(self, response: compute.Operation) -> compute.Operation: + """Post-rpc interceptor for delete + + Override in a subclass to manipulate the response + after it is returned by the RegionHealthChecks server but before + it is returned to user code. + """ + return response + def pre_get(self, request: compute.GetRegionHealthCheckRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.GetRegionHealthCheckRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get + + Override in a subclass to manipulate the request or metadata + before they are sent to the RegionHealthChecks server. + """ + return request, metadata + + def post_get(self, response: compute.HealthCheck) -> compute.HealthCheck: + """Post-rpc interceptor for get + + Override in a subclass to manipulate the response + after it is returned by the RegionHealthChecks server but before + it is returned to user code. + """ + return response + def pre_insert(self, request: compute.InsertRegionHealthCheckRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.InsertRegionHealthCheckRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for insert + + Override in a subclass to manipulate the request or metadata + before they are sent to the RegionHealthChecks server. + """ + return request, metadata + + def post_insert(self, response: compute.Operation) -> compute.Operation: + """Post-rpc interceptor for insert + + Override in a subclass to manipulate the response + after it is returned by the RegionHealthChecks server but before + it is returned to user code. + """ + return response + def pre_list(self, request: compute.ListRegionHealthChecksRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.ListRegionHealthChecksRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list + + Override in a subclass to manipulate the request or metadata + before they are sent to the RegionHealthChecks server. + """ + return request, metadata + + def post_list(self, response: compute.HealthCheckList) -> compute.HealthCheckList: + """Post-rpc interceptor for list + + Override in a subclass to manipulate the response + after it is returned by the RegionHealthChecks server but before + it is returned to user code. + """ + return response + def pre_patch(self, request: compute.PatchRegionHealthCheckRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.PatchRegionHealthCheckRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for patch + + Override in a subclass to manipulate the request or metadata + before they are sent to the RegionHealthChecks server. + """ + return request, metadata + + def post_patch(self, response: compute.Operation) -> compute.Operation: + """Post-rpc interceptor for patch + + Override in a subclass to manipulate the response + after it is returned by the RegionHealthChecks server but before + it is returned to user code. + """ + return response + def pre_update(self, request: compute.UpdateRegionHealthCheckRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.UpdateRegionHealthCheckRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for update + + Override in a subclass to manipulate the request or metadata + before they are sent to the RegionHealthChecks server. + """ + return request, metadata + + def post_update(self, response: compute.Operation) -> compute.Operation: + """Post-rpc interceptor for update + + Override in a subclass to manipulate the response + after it is returned by the RegionHealthChecks server but before + it is returned to user code. + """ + return response + + +@dataclasses.dataclass +class RegionHealthChecksRestStub: + _session: AuthorizedSession + _host: str + _interceptor: RegionHealthChecksRestInterceptor + + +class RegionHealthChecksRestTransport(RegionHealthChecksTransport): + """REST backend transport for RegionHealthChecks. + + The RegionHealthChecks API. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends JSON representations of protocol buffers over HTTP/1.1 + + NOTE: This REST transport functionality is currently in a beta + state (preview). We welcome your feedback via an issue in this + library's source repository. Thank you! + """ + + def __init__(self, *, + host: str = 'compute.googleapis.com', + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + client_cert_source_for_mtls: Optional[Callable[[ + ], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + url_scheme: str = 'https', + interceptor: Optional[RegionHealthChecksRestInterceptor] = None, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + NOTE: This REST transport functionality is currently in a beta + state (preview). We welcome your feedback via a GitHub issue in + this library's repository. Thank you! + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + client_cert_source_for_mtls (Callable[[], Tuple[bytes, bytes]]): Client + certificate to configure mutual TLS HTTP channel. It is ignored + if ``channel`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you are developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + url_scheme: the protocol scheme for the API endpoint. Normally + "https", but for testing or local servers, + "http" can be specified. + """ + # Run the base constructor + # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. + # TODO: When custom host (api_endpoint) is set, `scopes` must *also* be set on the + # credentials object + maybe_url_match = re.match("^(?Phttp(?:s)?://)?(?P.*)$", host) + if maybe_url_match is None: + raise ValueError(f"Unexpected hostname structure: {host}") # pragma: NO COVER + + url_match_items = maybe_url_match.groupdict() + + host = f"{url_scheme}://{host}" if not url_match_items["scheme"] else host + + super().__init__( + host=host, + credentials=credentials, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience + ) + self._session = AuthorizedSession( + self._credentials, default_host=self.DEFAULT_HOST) + if client_cert_source_for_mtls: + self._session.configure_mtls_channel(client_cert_source_for_mtls) + self._interceptor = interceptor or RegionHealthChecksRestInterceptor() + self._prep_wrapped_messages(client_info) + + class _Delete(RegionHealthChecksRestStub): + def __hash__(self): + return hash("Delete") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.DeleteRegionHealthCheckRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.Operation: + r"""Call the delete method over HTTP. + + Args: + request (~.compute.DeleteRegionHealthCheckRequest): + The request object. A request message for + RegionHealthChecks.Delete. See the + method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.Operation: + Represents an Operation resource. Google Compute Engine + has three Operation resources: \* + `Global `__ + \* + `Regional `__ + \* + `Zonal `__ + You can use an operation resource to manage asynchronous + API requests. For more information, read Handling API + responses. Operations can be global, regional or zonal. + - For global operations, use the ``globalOperations`` + resource. - For regional operations, use the + ``regionOperations`` resource. - For zonal operations, + use the ``zonalOperations`` resource. For more + information, read Global, Regional, and Zonal Resources. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'delete', + 'uri': '/compute/v1/projects/{project}/regions/{region}/healthChecks/{health_check}', + }, + ] + request, metadata = self._interceptor.pre_delete(request, metadata) + pb_request = compute.DeleteRegionHealthCheckRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.Operation() + pb_resp = compute.Operation.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_delete(resp) + return resp + + class _Get(RegionHealthChecksRestStub): + def __hash__(self): + return hash("Get") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.GetRegionHealthCheckRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.HealthCheck: + r"""Call the get method over HTTP. + + Args: + request (~.compute.GetRegionHealthCheckRequest): + The request object. A request message for + RegionHealthChecks.Get. See the method + description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.HealthCheck: + Represents a Health Check resource. Google Compute + Engine has two Health Check resources: \* + `Global `__ + \* + `Regional `__ + Internal HTTP(S) load balancers must use regional health + checks (``compute.v1.regionHealthChecks``). Traffic + Director must use global health checks + (``compute.v1.healthChecks``). Internal TCP/UDP load + balancers can use either regional or global health + checks (``compute.v1.regionHealthChecks`` or + ``compute.v1.healthChecks``). External HTTP(S), TCP + proxy, and SSL proxy load balancers as well as managed + instance group auto-healing must use global health + checks (``compute.v1.healthChecks``). Backend + service-based network load balancers must use regional + health checks (``compute.v1.regionHealthChecks``). + Target pool-based network load balancers must use legacy + HTTP health checks (``compute.v1.httpHealthChecks``). + For more information, see Health checks overview. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/compute/v1/projects/{project}/regions/{region}/healthChecks/{health_check}', + }, + ] + request, metadata = self._interceptor.pre_get(request, metadata) + pb_request = compute.GetRegionHealthCheckRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.HealthCheck() + pb_resp = compute.HealthCheck.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get(resp) + return resp + + class _Insert(RegionHealthChecksRestStub): + def __hash__(self): + return hash("Insert") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.InsertRegionHealthCheckRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.Operation: + r"""Call the insert method over HTTP. + + Args: + request (~.compute.InsertRegionHealthCheckRequest): + The request object. A request message for + RegionHealthChecks.Insert. See the + method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.Operation: + Represents an Operation resource. Google Compute Engine + has three Operation resources: \* + `Global `__ + \* + `Regional `__ + \* + `Zonal `__ + You can use an operation resource to manage asynchronous + API requests. For more information, read Handling API + responses. Operations can be global, regional or zonal. + - For global operations, use the ``globalOperations`` + resource. - For regional operations, use the + ``regionOperations`` resource. - For zonal operations, + use the ``zonalOperations`` resource. For more + information, read Global, Regional, and Zonal Resources. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/compute/v1/projects/{project}/regions/{region}/healthChecks', + 'body': 'health_check_resource', + }, + ] + request, metadata = self._interceptor.pre_insert(request, metadata) + pb_request = compute.InsertRegionHealthCheckRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + including_default_value_fields=False, + use_integers_for_enums=False + ) + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.Operation() + pb_resp = compute.Operation.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_insert(resp) + return resp + + class _List(RegionHealthChecksRestStub): + def __hash__(self): + return hash("List") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.ListRegionHealthChecksRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.HealthCheckList: + r"""Call the list method over HTTP. + + Args: + request (~.compute.ListRegionHealthChecksRequest): + The request object. A request message for + RegionHealthChecks.List. See the method + description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.HealthCheckList: + Contains a list of HealthCheck + resources. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/compute/v1/projects/{project}/regions/{region}/healthChecks', + }, + ] + request, metadata = self._interceptor.pre_list(request, metadata) + pb_request = compute.ListRegionHealthChecksRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.HealthCheckList() + pb_resp = compute.HealthCheckList.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_list(resp) + return resp + + class _Patch(RegionHealthChecksRestStub): + def __hash__(self): + return hash("Patch") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.PatchRegionHealthCheckRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.Operation: + r"""Call the patch method over HTTP. + + Args: + request (~.compute.PatchRegionHealthCheckRequest): + The request object. A request message for + RegionHealthChecks.Patch. See the method + description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.Operation: + Represents an Operation resource. Google Compute Engine + has three Operation resources: \* + `Global `__ + \* + `Regional `__ + \* + `Zonal `__ + You can use an operation resource to manage asynchronous + API requests. For more information, read Handling API + responses. Operations can be global, regional or zonal. + - For global operations, use the ``globalOperations`` + resource. - For regional operations, use the + ``regionOperations`` resource. - For zonal operations, + use the ``zonalOperations`` resource. For more + information, read Global, Regional, and Zonal Resources. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'patch', + 'uri': '/compute/v1/projects/{project}/regions/{region}/healthChecks/{health_check}', + 'body': 'health_check_resource', + }, + ] + request, metadata = self._interceptor.pre_patch(request, metadata) + pb_request = compute.PatchRegionHealthCheckRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + including_default_value_fields=False, + use_integers_for_enums=False + ) + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.Operation() + pb_resp = compute.Operation.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_patch(resp) + return resp + + class _Update(RegionHealthChecksRestStub): + def __hash__(self): + return hash("Update") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.UpdateRegionHealthCheckRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.Operation: + r"""Call the update method over HTTP. + + Args: + request (~.compute.UpdateRegionHealthCheckRequest): + The request object. A request message for + RegionHealthChecks.Update. See the + method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.Operation: + Represents an Operation resource. Google Compute Engine + has three Operation resources: \* + `Global `__ + \* + `Regional `__ + \* + `Zonal `__ + You can use an operation resource to manage asynchronous + API requests. For more information, read Handling API + responses. Operations can be global, regional or zonal. + - For global operations, use the ``globalOperations`` + resource. - For regional operations, use the + ``regionOperations`` resource. - For zonal operations, + use the ``zonalOperations`` resource. For more + information, read Global, Regional, and Zonal Resources. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'put', + 'uri': '/compute/v1/projects/{project}/regions/{region}/healthChecks/{health_check}', + 'body': 'health_check_resource', + }, + ] + request, metadata = self._interceptor.pre_update(request, metadata) + pb_request = compute.UpdateRegionHealthCheckRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + including_default_value_fields=False, + use_integers_for_enums=False + ) + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.Operation() + pb_resp = compute.Operation.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_update(resp) + return resp + + @property + def delete(self) -> Callable[ + [compute.DeleteRegionHealthCheckRequest], + compute.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._Delete(self._session, self._host, self._interceptor) # type: ignore + + @property + def get(self) -> Callable[ + [compute.GetRegionHealthCheckRequest], + compute.HealthCheck]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._Get(self._session, self._host, self._interceptor) # type: ignore + + @property + def insert(self) -> Callable[ + [compute.InsertRegionHealthCheckRequest], + compute.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._Insert(self._session, self._host, self._interceptor) # type: ignore + + @property + def list(self) -> Callable[ + [compute.ListRegionHealthChecksRequest], + compute.HealthCheckList]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._List(self._session, self._host, self._interceptor) # type: ignore + + @property + def patch(self) -> Callable[ + [compute.PatchRegionHealthCheckRequest], + compute.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._Patch(self._session, self._host, self._interceptor) # type: ignore + + @property + def update(self) -> Callable[ + [compute.UpdateRegionHealthCheckRequest], + compute.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._Update(self._session, self._host, self._interceptor) # type: ignore + + @property + def kind(self) -> str: + return "rest" + + def close(self): + self._session.close() + + +__all__=( + 'RegionHealthChecksRestTransport', +) diff --git a/owl-bot-staging/v1/google/cloud/compute_v1/services/region_instance_group_managers/__init__.py b/owl-bot-staging/v1/google/cloud/compute_v1/services/region_instance_group_managers/__init__.py new file mode 100644 index 000000000..96b63c7b4 --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/compute_v1/services/region_instance_group_managers/__init__.py @@ -0,0 +1,20 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from .client import RegionInstanceGroupManagersClient + +__all__ = ( + 'RegionInstanceGroupManagersClient', +) diff --git a/owl-bot-staging/v1/google/cloud/compute_v1/services/region_instance_group_managers/client.py b/owl-bot-staging/v1/google/cloud/compute_v1/services/region_instance_group_managers/client.py new file mode 100644 index 000000000..39251ee15 --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/compute_v1/services/region_instance_group_managers/client.py @@ -0,0 +1,5254 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import functools +import os +import re +from typing import Dict, Mapping, MutableMapping, MutableSequence, Optional, Sequence, Tuple, Type, Union, cast + +from google.cloud.compute_v1 import gapic_version as package_version + +from google.api_core import client_options as client_options_lib +from google.api_core import exceptions as core_exceptions +from google.api_core import extended_operation +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport import mtls # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth.exceptions import MutualTLSChannelError # type: ignore +from google.oauth2 import service_account # type: ignore + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + +from google.api_core import extended_operation # type: ignore +from google.cloud.compute_v1.services.region_instance_group_managers import pagers +from google.cloud.compute_v1.types import compute +from .transports.base import RegionInstanceGroupManagersTransport, DEFAULT_CLIENT_INFO +from .transports.rest import RegionInstanceGroupManagersRestTransport + + +class RegionInstanceGroupManagersClientMeta(type): + """Metaclass for the RegionInstanceGroupManagers client. + + This provides class-level methods for building and retrieving + support objects (e.g. transport) without polluting the client instance + objects. + """ + _transport_registry = OrderedDict() # type: Dict[str, Type[RegionInstanceGroupManagersTransport]] + _transport_registry["rest"] = RegionInstanceGroupManagersRestTransport + + def get_transport_class(cls, + label: Optional[str] = None, + ) -> Type[RegionInstanceGroupManagersTransport]: + """Returns an appropriate transport class. + + Args: + label: The name of the desired transport. If none is + provided, then the first transport in the registry is used. + + Returns: + The transport class to use. + """ + # If a specific transport is requested, return that one. + if label: + return cls._transport_registry[label] + + # No transport is requested; return the default (that is, the first one + # in the dictionary). + return next(iter(cls._transport_registry.values())) + + +class RegionInstanceGroupManagersClient(metaclass=RegionInstanceGroupManagersClientMeta): + """The RegionInstanceGroupManagers API.""" + + @staticmethod + def _get_default_mtls_endpoint(api_endpoint): + """Converts api endpoint to mTLS endpoint. + + Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to + "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. + Args: + api_endpoint (Optional[str]): the api endpoint to convert. + Returns: + str: converted mTLS api endpoint. + """ + if not api_endpoint: + return api_endpoint + + mtls_endpoint_re = re.compile( + r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?" + ) + + m = mtls_endpoint_re.match(api_endpoint) + name, mtls, sandbox, googledomain = m.groups() + if mtls or not googledomain: + return api_endpoint + + if sandbox: + return api_endpoint.replace( + "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" + ) + + return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") + + DEFAULT_ENDPOINT = "compute.googleapis.com" + DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore + DEFAULT_ENDPOINT + ) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + RegionInstanceGroupManagersClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_info(info) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + RegionInstanceGroupManagersClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_file( + filename) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + from_service_account_json = from_service_account_file + + @property + def transport(self) -> RegionInstanceGroupManagersTransport: + """Returns the transport used by the client instance. + + Returns: + RegionInstanceGroupManagersTransport: The transport used by the client + instance. + """ + return self._transport + + @staticmethod + def common_billing_account_path(billing_account: str, ) -> str: + """Returns a fully-qualified billing_account string.""" + return "billingAccounts/{billing_account}".format(billing_account=billing_account, ) + + @staticmethod + def parse_common_billing_account_path(path: str) -> Dict[str,str]: + """Parse a billing_account path into its component segments.""" + m = re.match(r"^billingAccounts/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_folder_path(folder: str, ) -> str: + """Returns a fully-qualified folder string.""" + return "folders/{folder}".format(folder=folder, ) + + @staticmethod + def parse_common_folder_path(path: str) -> Dict[str,str]: + """Parse a folder path into its component segments.""" + m = re.match(r"^folders/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_organization_path(organization: str, ) -> str: + """Returns a fully-qualified organization string.""" + return "organizations/{organization}".format(organization=organization, ) + + @staticmethod + def parse_common_organization_path(path: str) -> Dict[str,str]: + """Parse a organization path into its component segments.""" + m = re.match(r"^organizations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_project_path(project: str, ) -> str: + """Returns a fully-qualified project string.""" + return "projects/{project}".format(project=project, ) + + @staticmethod + def parse_common_project_path(path: str) -> Dict[str,str]: + """Parse a project path into its component segments.""" + m = re.match(r"^projects/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_location_path(project: str, location: str, ) -> str: + """Returns a fully-qualified location string.""" + return "projects/{project}/locations/{location}".format(project=project, location=location, ) + + @staticmethod + def parse_common_location_path(path: str) -> Dict[str,str]: + """Parse a location path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @classmethod + def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[client_options_lib.ClientOptions] = None): + """Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + if client_options is None: + client_options = client_options_lib.ClientOptions() + use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") + use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") + if use_client_cert not in ("true", "false"): + raise ValueError("Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`") + if use_mtls_endpoint not in ("auto", "never", "always"): + raise MutualTLSChannelError("Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`") + + # Figure out the client cert source to use. + client_cert_source = None + if use_client_cert == "true": + if client_options.client_cert_source: + client_cert_source = client_options.client_cert_source + elif mtls.has_default_client_cert_source(): + client_cert_source = mtls.default_client_cert_source() + + # Figure out which api endpoint to use. + if client_options.api_endpoint is not None: + api_endpoint = client_options.api_endpoint + elif use_mtls_endpoint == "always" or (use_mtls_endpoint == "auto" and client_cert_source): + api_endpoint = cls.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = cls.DEFAULT_ENDPOINT + + return api_endpoint, client_cert_source + + def __init__(self, *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Optional[Union[str, RegionInstanceGroupManagersTransport]] = None, + client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the region instance group managers client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Union[str, RegionInstanceGroupManagersTransport]): The + transport to use. If set to None, a transport is chosen + automatically. + NOTE: "rest" transport functionality is currently in a + beta state (preview). We welcome your feedback via an + issue in this library's source repository. + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): Custom options for the + client. It won't take effect if a ``transport`` instance is provided. + (1) The ``api_endpoint`` property can be used to override the + default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT + environment variable can also be used to override the endpoint: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto switch to the + default mTLS endpoint if client certificate is present, this is + the default value). However, the ``api_endpoint`` property takes + precedence if provided. + (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide client certificate for mutual TLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + """ + if isinstance(client_options, dict): + client_options = client_options_lib.from_dict(client_options) + if client_options is None: + client_options = client_options_lib.ClientOptions() + client_options = cast(client_options_lib.ClientOptions, client_options) + + api_endpoint, client_cert_source_func = self.get_mtls_endpoint_and_cert_source(client_options) + + api_key_value = getattr(client_options, "api_key", None) + if api_key_value and credentials: + raise ValueError("client_options.api_key and credentials are mutually exclusive") + + # Save or instantiate the transport. + # Ordinarily, we provide the transport, but allowing a custom transport + # instance provides an extensibility point for unusual situations. + if isinstance(transport, RegionInstanceGroupManagersTransport): + # transport is a RegionInstanceGroupManagersTransport instance. + if credentials or client_options.credentials_file or api_key_value: + raise ValueError("When providing a transport instance, " + "provide its credentials directly.") + if client_options.scopes: + raise ValueError( + "When providing a transport instance, provide its scopes " + "directly." + ) + self._transport = transport + else: + import google.auth._default # type: ignore + + if api_key_value and hasattr(google.auth._default, "get_api_key_credentials"): + credentials = google.auth._default.get_api_key_credentials(api_key_value) + + Transport = type(self).get_transport_class(transport) + self._transport = Transport( + credentials=credentials, + credentials_file=client_options.credentials_file, + host=api_endpoint, + scopes=client_options.scopes, + client_cert_source_for_mtls=client_cert_source_func, + quota_project_id=client_options.quota_project_id, + client_info=client_info, + always_use_jwt_access=True, + api_audience=client_options.api_audience, + ) + + def abandon_instances_unary(self, + request: Optional[Union[compute.AbandonInstancesRegionInstanceGroupManagerRequest, dict]] = None, + *, + project: Optional[str] = None, + region: Optional[str] = None, + instance_group_manager: Optional[str] = None, + region_instance_group_managers_abandon_instances_request_resource: Optional[compute.RegionInstanceGroupManagersAbandonInstancesRequest] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.Operation: + r"""Flags the specified instances to be immediately + removed from the managed instance group. Abandoning an + instance does not delete the instance, but it does + remove the instance from any target pools that are + applied by the managed instance group. This method + reduces the targetSize of the managed instance group by + the number of instances that you abandon. This operation + is marked as DONE when the action is scheduled even if + the instances have not yet been removed from the group. + You must separately verify the status of the abandoning + action with the listmanagedinstances method. If the + group is part of a backend service that has enabled + connection draining, it can take up to 60 seconds after + the connection draining duration has elapsed before the + VM instance is removed or deleted. You can specify a + maximum of 1000 instances with this method per request. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_abandon_instances(): + # Create a client + client = compute_v1.RegionInstanceGroupManagersClient() + + # Initialize request argument(s) + request = compute_v1.AbandonInstancesRegionInstanceGroupManagerRequest( + instance_group_manager="instance_group_manager_value", + project="project_value", + region="region_value", + ) + + # Make the request + response = client.abandon_instances(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.AbandonInstancesRegionInstanceGroupManagerRequest, dict]): + The request object. A request message for + RegionInstanceGroupManagers.AbandonInstances. + See the method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + region (str): + Name of the region scoping this + request. + + This corresponds to the ``region`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + instance_group_manager (str): + Name of the managed instance group. + This corresponds to the ``instance_group_manager`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + region_instance_group_managers_abandon_instances_request_resource (google.cloud.compute_v1.types.RegionInstanceGroupManagersAbandonInstancesRequest): + The body resource for this request + This corresponds to the ``region_instance_group_managers_abandon_instances_request_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, region, instance_group_manager, region_instance_group_managers_abandon_instances_request_resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.AbandonInstancesRegionInstanceGroupManagerRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.AbandonInstancesRegionInstanceGroupManagerRequest): + request = compute.AbandonInstancesRegionInstanceGroupManagerRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if region is not None: + request.region = region + if instance_group_manager is not None: + request.instance_group_manager = instance_group_manager + if region_instance_group_managers_abandon_instances_request_resource is not None: + request.region_instance_group_managers_abandon_instances_request_resource = region_instance_group_managers_abandon_instances_request_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.abandon_instances] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("region", request.region), + ("instance_group_manager", request.instance_group_manager), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def abandon_instances(self, + request: Optional[Union[compute.AbandonInstancesRegionInstanceGroupManagerRequest, dict]] = None, + *, + project: Optional[str] = None, + region: Optional[str] = None, + instance_group_manager: Optional[str] = None, + region_instance_group_managers_abandon_instances_request_resource: Optional[compute.RegionInstanceGroupManagersAbandonInstancesRequest] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> extended_operation.ExtendedOperation: + r"""Flags the specified instances to be immediately + removed from the managed instance group. Abandoning an + instance does not delete the instance, but it does + remove the instance from any target pools that are + applied by the managed instance group. This method + reduces the targetSize of the managed instance group by + the number of instances that you abandon. This operation + is marked as DONE when the action is scheduled even if + the instances have not yet been removed from the group. + You must separately verify the status of the abandoning + action with the listmanagedinstances method. If the + group is part of a backend service that has enabled + connection draining, it can take up to 60 seconds after + the connection draining duration has elapsed before the + VM instance is removed or deleted. You can specify a + maximum of 1000 instances with this method per request. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_abandon_instances(): + # Create a client + client = compute_v1.RegionInstanceGroupManagersClient() + + # Initialize request argument(s) + request = compute_v1.AbandonInstancesRegionInstanceGroupManagerRequest( + instance_group_manager="instance_group_manager_value", + project="project_value", + region="region_value", + ) + + # Make the request + response = client.abandon_instances(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.AbandonInstancesRegionInstanceGroupManagerRequest, dict]): + The request object. A request message for + RegionInstanceGroupManagers.AbandonInstances. + See the method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + region (str): + Name of the region scoping this + request. + + This corresponds to the ``region`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + instance_group_manager (str): + Name of the managed instance group. + This corresponds to the ``instance_group_manager`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + region_instance_group_managers_abandon_instances_request_resource (google.cloud.compute_v1.types.RegionInstanceGroupManagersAbandonInstancesRequest): + The body resource for this request + This corresponds to the ``region_instance_group_managers_abandon_instances_request_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, region, instance_group_manager, region_instance_group_managers_abandon_instances_request_resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.AbandonInstancesRegionInstanceGroupManagerRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.AbandonInstancesRegionInstanceGroupManagerRequest): + request = compute.AbandonInstancesRegionInstanceGroupManagerRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if region is not None: + request.region = region + if instance_group_manager is not None: + request.instance_group_manager = instance_group_manager + if region_instance_group_managers_abandon_instances_request_resource is not None: + request.region_instance_group_managers_abandon_instances_request_resource = region_instance_group_managers_abandon_instances_request_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.abandon_instances] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("region", request.region), + ("instance_group_manager", request.instance_group_manager), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + operation_service = self._transport._region_operations_client + operation_request = compute.GetRegionOperationRequest() + operation_request.project = request.project + operation_request.region = request.region + operation_request.operation = response.name + + get_operation = functools.partial(operation_service.get, operation_request) + # Cancel is not part of extended operations yet. + cancel_operation = lambda: None + + # Note: this class is an implementation detail to provide a uniform + # set of names for certain fields in the extended operation proto message. + # See google.api_core.extended_operation.ExtendedOperation for details + # on these properties and the expected interface. + class _CustomOperation(extended_operation.ExtendedOperation): + @property + def error_message(self): + return self._extended_operation.http_error_message + + @property + def error_code(self): + return self._extended_operation.http_error_status_code + + response = _CustomOperation.make(get_operation, cancel_operation, response) + + # Done; return the response. + return response + + def apply_updates_to_instances_unary(self, + request: Optional[Union[compute.ApplyUpdatesToInstancesRegionInstanceGroupManagerRequest, dict]] = None, + *, + project: Optional[str] = None, + region: Optional[str] = None, + instance_group_manager: Optional[str] = None, + region_instance_group_managers_apply_updates_request_resource: Optional[compute.RegionInstanceGroupManagersApplyUpdatesRequest] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.Operation: + r"""Apply updates to selected instances the managed + instance group. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_apply_updates_to_instances(): + # Create a client + client = compute_v1.RegionInstanceGroupManagersClient() + + # Initialize request argument(s) + request = compute_v1.ApplyUpdatesToInstancesRegionInstanceGroupManagerRequest( + instance_group_manager="instance_group_manager_value", + project="project_value", + region="region_value", + ) + + # Make the request + response = client.apply_updates_to_instances(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.ApplyUpdatesToInstancesRegionInstanceGroupManagerRequest, dict]): + The request object. A request message for + RegionInstanceGroupManagers.ApplyUpdatesToInstances. + See the method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + region (str): + Name of the region scoping this + request, should conform to RFC1035. + + This corresponds to the ``region`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + instance_group_manager (str): + The name of the managed instance + group, should conform to RFC1035. + + This corresponds to the ``instance_group_manager`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + region_instance_group_managers_apply_updates_request_resource (google.cloud.compute_v1.types.RegionInstanceGroupManagersApplyUpdatesRequest): + The body resource for this request + This corresponds to the ``region_instance_group_managers_apply_updates_request_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, region, instance_group_manager, region_instance_group_managers_apply_updates_request_resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.ApplyUpdatesToInstancesRegionInstanceGroupManagerRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.ApplyUpdatesToInstancesRegionInstanceGroupManagerRequest): + request = compute.ApplyUpdatesToInstancesRegionInstanceGroupManagerRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if region is not None: + request.region = region + if instance_group_manager is not None: + request.instance_group_manager = instance_group_manager + if region_instance_group_managers_apply_updates_request_resource is not None: + request.region_instance_group_managers_apply_updates_request_resource = region_instance_group_managers_apply_updates_request_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.apply_updates_to_instances] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("region", request.region), + ("instance_group_manager", request.instance_group_manager), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def apply_updates_to_instances(self, + request: Optional[Union[compute.ApplyUpdatesToInstancesRegionInstanceGroupManagerRequest, dict]] = None, + *, + project: Optional[str] = None, + region: Optional[str] = None, + instance_group_manager: Optional[str] = None, + region_instance_group_managers_apply_updates_request_resource: Optional[compute.RegionInstanceGroupManagersApplyUpdatesRequest] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> extended_operation.ExtendedOperation: + r"""Apply updates to selected instances the managed + instance group. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_apply_updates_to_instances(): + # Create a client + client = compute_v1.RegionInstanceGroupManagersClient() + + # Initialize request argument(s) + request = compute_v1.ApplyUpdatesToInstancesRegionInstanceGroupManagerRequest( + instance_group_manager="instance_group_manager_value", + project="project_value", + region="region_value", + ) + + # Make the request + response = client.apply_updates_to_instances(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.ApplyUpdatesToInstancesRegionInstanceGroupManagerRequest, dict]): + The request object. A request message for + RegionInstanceGroupManagers.ApplyUpdatesToInstances. + See the method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + region (str): + Name of the region scoping this + request, should conform to RFC1035. + + This corresponds to the ``region`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + instance_group_manager (str): + The name of the managed instance + group, should conform to RFC1035. + + This corresponds to the ``instance_group_manager`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + region_instance_group_managers_apply_updates_request_resource (google.cloud.compute_v1.types.RegionInstanceGroupManagersApplyUpdatesRequest): + The body resource for this request + This corresponds to the ``region_instance_group_managers_apply_updates_request_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, region, instance_group_manager, region_instance_group_managers_apply_updates_request_resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.ApplyUpdatesToInstancesRegionInstanceGroupManagerRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.ApplyUpdatesToInstancesRegionInstanceGroupManagerRequest): + request = compute.ApplyUpdatesToInstancesRegionInstanceGroupManagerRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if region is not None: + request.region = region + if instance_group_manager is not None: + request.instance_group_manager = instance_group_manager + if region_instance_group_managers_apply_updates_request_resource is not None: + request.region_instance_group_managers_apply_updates_request_resource = region_instance_group_managers_apply_updates_request_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.apply_updates_to_instances] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("region", request.region), + ("instance_group_manager", request.instance_group_manager), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + operation_service = self._transport._region_operations_client + operation_request = compute.GetRegionOperationRequest() + operation_request.project = request.project + operation_request.region = request.region + operation_request.operation = response.name + + get_operation = functools.partial(operation_service.get, operation_request) + # Cancel is not part of extended operations yet. + cancel_operation = lambda: None + + # Note: this class is an implementation detail to provide a uniform + # set of names for certain fields in the extended operation proto message. + # See google.api_core.extended_operation.ExtendedOperation for details + # on these properties and the expected interface. + class _CustomOperation(extended_operation.ExtendedOperation): + @property + def error_message(self): + return self._extended_operation.http_error_message + + @property + def error_code(self): + return self._extended_operation.http_error_status_code + + response = _CustomOperation.make(get_operation, cancel_operation, response) + + # Done; return the response. + return response + + def create_instances_unary(self, + request: Optional[Union[compute.CreateInstancesRegionInstanceGroupManagerRequest, dict]] = None, + *, + project: Optional[str] = None, + region: Optional[str] = None, + instance_group_manager: Optional[str] = None, + region_instance_group_managers_create_instances_request_resource: Optional[compute.RegionInstanceGroupManagersCreateInstancesRequest] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.Operation: + r"""Creates instances with per-instance configurations in + this regional managed instance group. Instances are + created using the current instance template. The create + instances operation is marked DONE if the + createInstances request is successful. The underlying + actions take additional time. You must separately verify + the status of the creating or actions with the + listmanagedinstances method. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_create_instances(): + # Create a client + client = compute_v1.RegionInstanceGroupManagersClient() + + # Initialize request argument(s) + request = compute_v1.CreateInstancesRegionInstanceGroupManagerRequest( + instance_group_manager="instance_group_manager_value", + project="project_value", + region="region_value", + ) + + # Make the request + response = client.create_instances(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.CreateInstancesRegionInstanceGroupManagerRequest, dict]): + The request object. A request message for + RegionInstanceGroupManagers.CreateInstances. + See the method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + region (str): + The name of the region where the + managed instance group is located. It + should conform to RFC1035. + + This corresponds to the ``region`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + instance_group_manager (str): + The name of the managed instance + group. It should conform to RFC1035. + + This corresponds to the ``instance_group_manager`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + region_instance_group_managers_create_instances_request_resource (google.cloud.compute_v1.types.RegionInstanceGroupManagersCreateInstancesRequest): + The body resource for this request + This corresponds to the ``region_instance_group_managers_create_instances_request_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, region, instance_group_manager, region_instance_group_managers_create_instances_request_resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.CreateInstancesRegionInstanceGroupManagerRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.CreateInstancesRegionInstanceGroupManagerRequest): + request = compute.CreateInstancesRegionInstanceGroupManagerRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if region is not None: + request.region = region + if instance_group_manager is not None: + request.instance_group_manager = instance_group_manager + if region_instance_group_managers_create_instances_request_resource is not None: + request.region_instance_group_managers_create_instances_request_resource = region_instance_group_managers_create_instances_request_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.create_instances] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("region", request.region), + ("instance_group_manager", request.instance_group_manager), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def create_instances(self, + request: Optional[Union[compute.CreateInstancesRegionInstanceGroupManagerRequest, dict]] = None, + *, + project: Optional[str] = None, + region: Optional[str] = None, + instance_group_manager: Optional[str] = None, + region_instance_group_managers_create_instances_request_resource: Optional[compute.RegionInstanceGroupManagersCreateInstancesRequest] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> extended_operation.ExtendedOperation: + r"""Creates instances with per-instance configurations in + this regional managed instance group. Instances are + created using the current instance template. The create + instances operation is marked DONE if the + createInstances request is successful. The underlying + actions take additional time. You must separately verify + the status of the creating or actions with the + listmanagedinstances method. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_create_instances(): + # Create a client + client = compute_v1.RegionInstanceGroupManagersClient() + + # Initialize request argument(s) + request = compute_v1.CreateInstancesRegionInstanceGroupManagerRequest( + instance_group_manager="instance_group_manager_value", + project="project_value", + region="region_value", + ) + + # Make the request + response = client.create_instances(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.CreateInstancesRegionInstanceGroupManagerRequest, dict]): + The request object. A request message for + RegionInstanceGroupManagers.CreateInstances. + See the method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + region (str): + The name of the region where the + managed instance group is located. It + should conform to RFC1035. + + This corresponds to the ``region`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + instance_group_manager (str): + The name of the managed instance + group. It should conform to RFC1035. + + This corresponds to the ``instance_group_manager`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + region_instance_group_managers_create_instances_request_resource (google.cloud.compute_v1.types.RegionInstanceGroupManagersCreateInstancesRequest): + The body resource for this request + This corresponds to the ``region_instance_group_managers_create_instances_request_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, region, instance_group_manager, region_instance_group_managers_create_instances_request_resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.CreateInstancesRegionInstanceGroupManagerRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.CreateInstancesRegionInstanceGroupManagerRequest): + request = compute.CreateInstancesRegionInstanceGroupManagerRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if region is not None: + request.region = region + if instance_group_manager is not None: + request.instance_group_manager = instance_group_manager + if region_instance_group_managers_create_instances_request_resource is not None: + request.region_instance_group_managers_create_instances_request_resource = region_instance_group_managers_create_instances_request_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.create_instances] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("region", request.region), + ("instance_group_manager", request.instance_group_manager), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + operation_service = self._transport._region_operations_client + operation_request = compute.GetRegionOperationRequest() + operation_request.project = request.project + operation_request.region = request.region + operation_request.operation = response.name + + get_operation = functools.partial(operation_service.get, operation_request) + # Cancel is not part of extended operations yet. + cancel_operation = lambda: None + + # Note: this class is an implementation detail to provide a uniform + # set of names for certain fields in the extended operation proto message. + # See google.api_core.extended_operation.ExtendedOperation for details + # on these properties and the expected interface. + class _CustomOperation(extended_operation.ExtendedOperation): + @property + def error_message(self): + return self._extended_operation.http_error_message + + @property + def error_code(self): + return self._extended_operation.http_error_status_code + + response = _CustomOperation.make(get_operation, cancel_operation, response) + + # Done; return the response. + return response + + def delete_unary(self, + request: Optional[Union[compute.DeleteRegionInstanceGroupManagerRequest, dict]] = None, + *, + project: Optional[str] = None, + region: Optional[str] = None, + instance_group_manager: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.Operation: + r"""Deletes the specified managed instance group and all + of the instances in that group. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_delete(): + # Create a client + client = compute_v1.RegionInstanceGroupManagersClient() + + # Initialize request argument(s) + request = compute_v1.DeleteRegionInstanceGroupManagerRequest( + instance_group_manager="instance_group_manager_value", + project="project_value", + region="region_value", + ) + + # Make the request + response = client.delete(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.DeleteRegionInstanceGroupManagerRequest, dict]): + The request object. A request message for + RegionInstanceGroupManagers.Delete. See + the method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + region (str): + Name of the region scoping this + request. + + This corresponds to the ``region`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + instance_group_manager (str): + Name of the managed instance group to + delete. + + This corresponds to the ``instance_group_manager`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, region, instance_group_manager]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.DeleteRegionInstanceGroupManagerRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.DeleteRegionInstanceGroupManagerRequest): + request = compute.DeleteRegionInstanceGroupManagerRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if region is not None: + request.region = region + if instance_group_manager is not None: + request.instance_group_manager = instance_group_manager + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("region", request.region), + ("instance_group_manager", request.instance_group_manager), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def delete(self, + request: Optional[Union[compute.DeleteRegionInstanceGroupManagerRequest, dict]] = None, + *, + project: Optional[str] = None, + region: Optional[str] = None, + instance_group_manager: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> extended_operation.ExtendedOperation: + r"""Deletes the specified managed instance group and all + of the instances in that group. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_delete(): + # Create a client + client = compute_v1.RegionInstanceGroupManagersClient() + + # Initialize request argument(s) + request = compute_v1.DeleteRegionInstanceGroupManagerRequest( + instance_group_manager="instance_group_manager_value", + project="project_value", + region="region_value", + ) + + # Make the request + response = client.delete(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.DeleteRegionInstanceGroupManagerRequest, dict]): + The request object. A request message for + RegionInstanceGroupManagers.Delete. See + the method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + region (str): + Name of the region scoping this + request. + + This corresponds to the ``region`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + instance_group_manager (str): + Name of the managed instance group to + delete. + + This corresponds to the ``instance_group_manager`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, region, instance_group_manager]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.DeleteRegionInstanceGroupManagerRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.DeleteRegionInstanceGroupManagerRequest): + request = compute.DeleteRegionInstanceGroupManagerRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if region is not None: + request.region = region + if instance_group_manager is not None: + request.instance_group_manager = instance_group_manager + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("region", request.region), + ("instance_group_manager", request.instance_group_manager), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + operation_service = self._transport._region_operations_client + operation_request = compute.GetRegionOperationRequest() + operation_request.project = request.project + operation_request.region = request.region + operation_request.operation = response.name + + get_operation = functools.partial(operation_service.get, operation_request) + # Cancel is not part of extended operations yet. + cancel_operation = lambda: None + + # Note: this class is an implementation detail to provide a uniform + # set of names for certain fields in the extended operation proto message. + # See google.api_core.extended_operation.ExtendedOperation for details + # on these properties and the expected interface. + class _CustomOperation(extended_operation.ExtendedOperation): + @property + def error_message(self): + return self._extended_operation.http_error_message + + @property + def error_code(self): + return self._extended_operation.http_error_status_code + + response = _CustomOperation.make(get_operation, cancel_operation, response) + + # Done; return the response. + return response + + def delete_instances_unary(self, + request: Optional[Union[compute.DeleteInstancesRegionInstanceGroupManagerRequest, dict]] = None, + *, + project: Optional[str] = None, + region: Optional[str] = None, + instance_group_manager: Optional[str] = None, + region_instance_group_managers_delete_instances_request_resource: Optional[compute.RegionInstanceGroupManagersDeleteInstancesRequest] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.Operation: + r"""Flags the specified instances in the managed instance + group to be immediately deleted. The instances are also + removed from any target pools of which they were a + member. This method reduces the targetSize of the + managed instance group by the number of instances that + you delete. The deleteInstances operation is marked DONE + if the deleteInstances request is successful. The + underlying actions take additional time. You must + separately verify the status of the deleting action with + the listmanagedinstances method. If the group is part of + a backend service that has enabled connection draining, + it can take up to 60 seconds after the connection + draining duration has elapsed before the VM instance is + removed or deleted. You can specify a maximum of 1000 + instances with this method per request. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_delete_instances(): + # Create a client + client = compute_v1.RegionInstanceGroupManagersClient() + + # Initialize request argument(s) + request = compute_v1.DeleteInstancesRegionInstanceGroupManagerRequest( + instance_group_manager="instance_group_manager_value", + project="project_value", + region="region_value", + ) + + # Make the request + response = client.delete_instances(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.DeleteInstancesRegionInstanceGroupManagerRequest, dict]): + The request object. A request message for + RegionInstanceGroupManagers.DeleteInstances. + See the method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + region (str): + Name of the region scoping this + request. + + This corresponds to the ``region`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + instance_group_manager (str): + Name of the managed instance group. + This corresponds to the ``instance_group_manager`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + region_instance_group_managers_delete_instances_request_resource (google.cloud.compute_v1.types.RegionInstanceGroupManagersDeleteInstancesRequest): + The body resource for this request + This corresponds to the ``region_instance_group_managers_delete_instances_request_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, region, instance_group_manager, region_instance_group_managers_delete_instances_request_resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.DeleteInstancesRegionInstanceGroupManagerRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.DeleteInstancesRegionInstanceGroupManagerRequest): + request = compute.DeleteInstancesRegionInstanceGroupManagerRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if region is not None: + request.region = region + if instance_group_manager is not None: + request.instance_group_manager = instance_group_manager + if region_instance_group_managers_delete_instances_request_resource is not None: + request.region_instance_group_managers_delete_instances_request_resource = region_instance_group_managers_delete_instances_request_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete_instances] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("region", request.region), + ("instance_group_manager", request.instance_group_manager), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def delete_instances(self, + request: Optional[Union[compute.DeleteInstancesRegionInstanceGroupManagerRequest, dict]] = None, + *, + project: Optional[str] = None, + region: Optional[str] = None, + instance_group_manager: Optional[str] = None, + region_instance_group_managers_delete_instances_request_resource: Optional[compute.RegionInstanceGroupManagersDeleteInstancesRequest] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> extended_operation.ExtendedOperation: + r"""Flags the specified instances in the managed instance + group to be immediately deleted. The instances are also + removed from any target pools of which they were a + member. This method reduces the targetSize of the + managed instance group by the number of instances that + you delete. The deleteInstances operation is marked DONE + if the deleteInstances request is successful. The + underlying actions take additional time. You must + separately verify the status of the deleting action with + the listmanagedinstances method. If the group is part of + a backend service that has enabled connection draining, + it can take up to 60 seconds after the connection + draining duration has elapsed before the VM instance is + removed or deleted. You can specify a maximum of 1000 + instances with this method per request. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_delete_instances(): + # Create a client + client = compute_v1.RegionInstanceGroupManagersClient() + + # Initialize request argument(s) + request = compute_v1.DeleteInstancesRegionInstanceGroupManagerRequest( + instance_group_manager="instance_group_manager_value", + project="project_value", + region="region_value", + ) + + # Make the request + response = client.delete_instances(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.DeleteInstancesRegionInstanceGroupManagerRequest, dict]): + The request object. A request message for + RegionInstanceGroupManagers.DeleteInstances. + See the method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + region (str): + Name of the region scoping this + request. + + This corresponds to the ``region`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + instance_group_manager (str): + Name of the managed instance group. + This corresponds to the ``instance_group_manager`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + region_instance_group_managers_delete_instances_request_resource (google.cloud.compute_v1.types.RegionInstanceGroupManagersDeleteInstancesRequest): + The body resource for this request + This corresponds to the ``region_instance_group_managers_delete_instances_request_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, region, instance_group_manager, region_instance_group_managers_delete_instances_request_resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.DeleteInstancesRegionInstanceGroupManagerRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.DeleteInstancesRegionInstanceGroupManagerRequest): + request = compute.DeleteInstancesRegionInstanceGroupManagerRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if region is not None: + request.region = region + if instance_group_manager is not None: + request.instance_group_manager = instance_group_manager + if region_instance_group_managers_delete_instances_request_resource is not None: + request.region_instance_group_managers_delete_instances_request_resource = region_instance_group_managers_delete_instances_request_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete_instances] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("region", request.region), + ("instance_group_manager", request.instance_group_manager), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + operation_service = self._transport._region_operations_client + operation_request = compute.GetRegionOperationRequest() + operation_request.project = request.project + operation_request.region = request.region + operation_request.operation = response.name + + get_operation = functools.partial(operation_service.get, operation_request) + # Cancel is not part of extended operations yet. + cancel_operation = lambda: None + + # Note: this class is an implementation detail to provide a uniform + # set of names for certain fields in the extended operation proto message. + # See google.api_core.extended_operation.ExtendedOperation for details + # on these properties and the expected interface. + class _CustomOperation(extended_operation.ExtendedOperation): + @property + def error_message(self): + return self._extended_operation.http_error_message + + @property + def error_code(self): + return self._extended_operation.http_error_status_code + + response = _CustomOperation.make(get_operation, cancel_operation, response) + + # Done; return the response. + return response + + def delete_per_instance_configs_unary(self, + request: Optional[Union[compute.DeletePerInstanceConfigsRegionInstanceGroupManagerRequest, dict]] = None, + *, + project: Optional[str] = None, + region: Optional[str] = None, + instance_group_manager: Optional[str] = None, + region_instance_group_manager_delete_instance_config_req_resource: Optional[compute.RegionInstanceGroupManagerDeleteInstanceConfigReq] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.Operation: + r"""Deletes selected per-instance configurations for the + managed instance group. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_delete_per_instance_configs(): + # Create a client + client = compute_v1.RegionInstanceGroupManagersClient() + + # Initialize request argument(s) + request = compute_v1.DeletePerInstanceConfigsRegionInstanceGroupManagerRequest( + instance_group_manager="instance_group_manager_value", + project="project_value", + region="region_value", + ) + + # Make the request + response = client.delete_per_instance_configs(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.DeletePerInstanceConfigsRegionInstanceGroupManagerRequest, dict]): + The request object. A request message for + RegionInstanceGroupManagers.DeletePerInstanceConfigs. + See the method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + region (str): + Name of the region scoping this + request, should conform to RFC1035. + + This corresponds to the ``region`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + instance_group_manager (str): + The name of the managed instance + group. It should conform to RFC1035. + + This corresponds to the ``instance_group_manager`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + region_instance_group_manager_delete_instance_config_req_resource (google.cloud.compute_v1.types.RegionInstanceGroupManagerDeleteInstanceConfigReq): + The body resource for this request + This corresponds to the ``region_instance_group_manager_delete_instance_config_req_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, region, instance_group_manager, region_instance_group_manager_delete_instance_config_req_resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.DeletePerInstanceConfigsRegionInstanceGroupManagerRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.DeletePerInstanceConfigsRegionInstanceGroupManagerRequest): + request = compute.DeletePerInstanceConfigsRegionInstanceGroupManagerRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if region is not None: + request.region = region + if instance_group_manager is not None: + request.instance_group_manager = instance_group_manager + if region_instance_group_manager_delete_instance_config_req_resource is not None: + request.region_instance_group_manager_delete_instance_config_req_resource = region_instance_group_manager_delete_instance_config_req_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete_per_instance_configs] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("region", request.region), + ("instance_group_manager", request.instance_group_manager), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def delete_per_instance_configs(self, + request: Optional[Union[compute.DeletePerInstanceConfigsRegionInstanceGroupManagerRequest, dict]] = None, + *, + project: Optional[str] = None, + region: Optional[str] = None, + instance_group_manager: Optional[str] = None, + region_instance_group_manager_delete_instance_config_req_resource: Optional[compute.RegionInstanceGroupManagerDeleteInstanceConfigReq] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> extended_operation.ExtendedOperation: + r"""Deletes selected per-instance configurations for the + managed instance group. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_delete_per_instance_configs(): + # Create a client + client = compute_v1.RegionInstanceGroupManagersClient() + + # Initialize request argument(s) + request = compute_v1.DeletePerInstanceConfigsRegionInstanceGroupManagerRequest( + instance_group_manager="instance_group_manager_value", + project="project_value", + region="region_value", + ) + + # Make the request + response = client.delete_per_instance_configs(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.DeletePerInstanceConfigsRegionInstanceGroupManagerRequest, dict]): + The request object. A request message for + RegionInstanceGroupManagers.DeletePerInstanceConfigs. + See the method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + region (str): + Name of the region scoping this + request, should conform to RFC1035. + + This corresponds to the ``region`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + instance_group_manager (str): + The name of the managed instance + group. It should conform to RFC1035. + + This corresponds to the ``instance_group_manager`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + region_instance_group_manager_delete_instance_config_req_resource (google.cloud.compute_v1.types.RegionInstanceGroupManagerDeleteInstanceConfigReq): + The body resource for this request + This corresponds to the ``region_instance_group_manager_delete_instance_config_req_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, region, instance_group_manager, region_instance_group_manager_delete_instance_config_req_resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.DeletePerInstanceConfigsRegionInstanceGroupManagerRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.DeletePerInstanceConfigsRegionInstanceGroupManagerRequest): + request = compute.DeletePerInstanceConfigsRegionInstanceGroupManagerRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if region is not None: + request.region = region + if instance_group_manager is not None: + request.instance_group_manager = instance_group_manager + if region_instance_group_manager_delete_instance_config_req_resource is not None: + request.region_instance_group_manager_delete_instance_config_req_resource = region_instance_group_manager_delete_instance_config_req_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete_per_instance_configs] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("region", request.region), + ("instance_group_manager", request.instance_group_manager), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + operation_service = self._transport._region_operations_client + operation_request = compute.GetRegionOperationRequest() + operation_request.project = request.project + operation_request.region = request.region + operation_request.operation = response.name + + get_operation = functools.partial(operation_service.get, operation_request) + # Cancel is not part of extended operations yet. + cancel_operation = lambda: None + + # Note: this class is an implementation detail to provide a uniform + # set of names for certain fields in the extended operation proto message. + # See google.api_core.extended_operation.ExtendedOperation for details + # on these properties and the expected interface. + class _CustomOperation(extended_operation.ExtendedOperation): + @property + def error_message(self): + return self._extended_operation.http_error_message + + @property + def error_code(self): + return self._extended_operation.http_error_status_code + + response = _CustomOperation.make(get_operation, cancel_operation, response) + + # Done; return the response. + return response + + def get(self, + request: Optional[Union[compute.GetRegionInstanceGroupManagerRequest, dict]] = None, + *, + project: Optional[str] = None, + region: Optional[str] = None, + instance_group_manager: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.InstanceGroupManager: + r"""Returns all of the details about the specified + managed instance group. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_get(): + # Create a client + client = compute_v1.RegionInstanceGroupManagersClient() + + # Initialize request argument(s) + request = compute_v1.GetRegionInstanceGroupManagerRequest( + instance_group_manager="instance_group_manager_value", + project="project_value", + region="region_value", + ) + + # Make the request + response = client.get(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.GetRegionInstanceGroupManagerRequest, dict]): + The request object. A request message for + RegionInstanceGroupManagers.Get. See the + method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + region (str): + Name of the region scoping this + request. + + This corresponds to the ``region`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + instance_group_manager (str): + Name of the managed instance group to + return. + + This corresponds to the ``instance_group_manager`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.compute_v1.types.InstanceGroupManager: + Represents a Managed Instance Group + resource. An instance group is a + collection of VM instances that you can + manage as a single entity. For more + information, read Instance groups. For + zonal Managed Instance Group, use the + instanceGroupManagers resource. For + regional Managed Instance Group, use the + regionInstanceGroupManagers resource. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, region, instance_group_manager]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.GetRegionInstanceGroupManagerRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.GetRegionInstanceGroupManagerRequest): + request = compute.GetRegionInstanceGroupManagerRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if region is not None: + request.region = region + if instance_group_manager is not None: + request.instance_group_manager = instance_group_manager + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("region", request.region), + ("instance_group_manager", request.instance_group_manager), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def insert_unary(self, + request: Optional[Union[compute.InsertRegionInstanceGroupManagerRequest, dict]] = None, + *, + project: Optional[str] = None, + region: Optional[str] = None, + instance_group_manager_resource: Optional[compute.InstanceGroupManager] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.Operation: + r"""Creates a managed instance group using the + information that you specify in the request. After the + group is created, instances in the group are created + using the specified instance template. This operation is + marked as DONE when the group is created even if the + instances in the group have not yet been created. You + must separately verify the status of the individual + instances with the listmanagedinstances method. A + regional managed instance group can contain up to 2000 + instances. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_insert(): + # Create a client + client = compute_v1.RegionInstanceGroupManagersClient() + + # Initialize request argument(s) + request = compute_v1.InsertRegionInstanceGroupManagerRequest( + project="project_value", + region="region_value", + ) + + # Make the request + response = client.insert(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.InsertRegionInstanceGroupManagerRequest, dict]): + The request object. A request message for + RegionInstanceGroupManagers.Insert. See + the method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + region (str): + Name of the region scoping this + request. + + This corresponds to the ``region`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + instance_group_manager_resource (google.cloud.compute_v1.types.InstanceGroupManager): + The body resource for this request + This corresponds to the ``instance_group_manager_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, region, instance_group_manager_resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.InsertRegionInstanceGroupManagerRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.InsertRegionInstanceGroupManagerRequest): + request = compute.InsertRegionInstanceGroupManagerRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if region is not None: + request.region = region + if instance_group_manager_resource is not None: + request.instance_group_manager_resource = instance_group_manager_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.insert] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("region", request.region), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def insert(self, + request: Optional[Union[compute.InsertRegionInstanceGroupManagerRequest, dict]] = None, + *, + project: Optional[str] = None, + region: Optional[str] = None, + instance_group_manager_resource: Optional[compute.InstanceGroupManager] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> extended_operation.ExtendedOperation: + r"""Creates a managed instance group using the + information that you specify in the request. After the + group is created, instances in the group are created + using the specified instance template. This operation is + marked as DONE when the group is created even if the + instances in the group have not yet been created. You + must separately verify the status of the individual + instances with the listmanagedinstances method. A + regional managed instance group can contain up to 2000 + instances. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_insert(): + # Create a client + client = compute_v1.RegionInstanceGroupManagersClient() + + # Initialize request argument(s) + request = compute_v1.InsertRegionInstanceGroupManagerRequest( + project="project_value", + region="region_value", + ) + + # Make the request + response = client.insert(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.InsertRegionInstanceGroupManagerRequest, dict]): + The request object. A request message for + RegionInstanceGroupManagers.Insert. See + the method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + region (str): + Name of the region scoping this + request. + + This corresponds to the ``region`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + instance_group_manager_resource (google.cloud.compute_v1.types.InstanceGroupManager): + The body resource for this request + This corresponds to the ``instance_group_manager_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, region, instance_group_manager_resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.InsertRegionInstanceGroupManagerRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.InsertRegionInstanceGroupManagerRequest): + request = compute.InsertRegionInstanceGroupManagerRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if region is not None: + request.region = region + if instance_group_manager_resource is not None: + request.instance_group_manager_resource = instance_group_manager_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.insert] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("region", request.region), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + operation_service = self._transport._region_operations_client + operation_request = compute.GetRegionOperationRequest() + operation_request.project = request.project + operation_request.region = request.region + operation_request.operation = response.name + + get_operation = functools.partial(operation_service.get, operation_request) + # Cancel is not part of extended operations yet. + cancel_operation = lambda: None + + # Note: this class is an implementation detail to provide a uniform + # set of names for certain fields in the extended operation proto message. + # See google.api_core.extended_operation.ExtendedOperation for details + # on these properties and the expected interface. + class _CustomOperation(extended_operation.ExtendedOperation): + @property + def error_message(self): + return self._extended_operation.http_error_message + + @property + def error_code(self): + return self._extended_operation.http_error_status_code + + response = _CustomOperation.make(get_operation, cancel_operation, response) + + # Done; return the response. + return response + + def list(self, + request: Optional[Union[compute.ListRegionInstanceGroupManagersRequest, dict]] = None, + *, + project: Optional[str] = None, + region: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListPager: + r"""Retrieves the list of managed instance groups that + are contained within the specified region. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_list(): + # Create a client + client = compute_v1.RegionInstanceGroupManagersClient() + + # Initialize request argument(s) + request = compute_v1.ListRegionInstanceGroupManagersRequest( + project="project_value", + region="region_value", + ) + + # Make the request + page_result = client.list(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.ListRegionInstanceGroupManagersRequest, dict]): + The request object. A request message for + RegionInstanceGroupManagers.List. See + the method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + region (str): + Name of the region scoping this + request. + + This corresponds to the ``region`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.compute_v1.services.region_instance_group_managers.pagers.ListPager: + Contains a list of managed instance + groups. + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, region]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.ListRegionInstanceGroupManagersRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.ListRegionInstanceGroupManagersRequest): + request = compute.ListRegionInstanceGroupManagersRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if region is not None: + request.region = region + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("region", request.region), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + def list_errors(self, + request: Optional[Union[compute.ListErrorsRegionInstanceGroupManagersRequest, dict]] = None, + *, + project: Optional[str] = None, + region: Optional[str] = None, + instance_group_manager: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListErrorsPager: + r"""Lists all errors thrown by actions on instances for a + given regional managed instance group. The filter and + orderBy query parameters are not supported. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_list_errors(): + # Create a client + client = compute_v1.RegionInstanceGroupManagersClient() + + # Initialize request argument(s) + request = compute_v1.ListErrorsRegionInstanceGroupManagersRequest( + instance_group_manager="instance_group_manager_value", + project="project_value", + region="region_value", + ) + + # Make the request + page_result = client.list_errors(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.ListErrorsRegionInstanceGroupManagersRequest, dict]): + The request object. A request message for + RegionInstanceGroupManagers.ListErrors. + See the method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + region (str): + Name of the region scoping this + request. This should conform to RFC1035. + + This corresponds to the ``region`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + instance_group_manager (str): + The name of the managed instance group. It must be a + string that meets the requirements in RFC1035, or an + unsigned long integer: must match regexp pattern: + (?:`a-z `__?)|1-9{0,19}. + + This corresponds to the ``instance_group_manager`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.compute_v1.services.region_instance_group_managers.pagers.ListErrorsPager: + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, region, instance_group_manager]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.ListErrorsRegionInstanceGroupManagersRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.ListErrorsRegionInstanceGroupManagersRequest): + request = compute.ListErrorsRegionInstanceGroupManagersRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if region is not None: + request.region = region + if instance_group_manager is not None: + request.instance_group_manager = instance_group_manager + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_errors] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("region", request.region), + ("instance_group_manager", request.instance_group_manager), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListErrorsPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + def list_managed_instances(self, + request: Optional[Union[compute.ListManagedInstancesRegionInstanceGroupManagersRequest, dict]] = None, + *, + project: Optional[str] = None, + region: Optional[str] = None, + instance_group_manager: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListManagedInstancesPager: + r"""Lists the instances in the managed instance group and instances + that are scheduled to be created. The list includes any current + actions that the group has scheduled for its instances. The + orderBy query parameter is not supported. The ``pageToken`` + query parameter is supported only in the alpha and beta API and + only if the group's ``listManagedInstancesResults`` field is set + to ``PAGINATED``. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_list_managed_instances(): + # Create a client + client = compute_v1.RegionInstanceGroupManagersClient() + + # Initialize request argument(s) + request = compute_v1.ListManagedInstancesRegionInstanceGroupManagersRequest( + instance_group_manager="instance_group_manager_value", + project="project_value", + region="region_value", + ) + + # Make the request + page_result = client.list_managed_instances(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.ListManagedInstancesRegionInstanceGroupManagersRequest, dict]): + The request object. A request message for + RegionInstanceGroupManagers.ListManagedInstances. + See the method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + region (str): + Name of the region scoping this + request. + + This corresponds to the ``region`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + instance_group_manager (str): + The name of the managed instance + group. + + This corresponds to the ``instance_group_manager`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.compute_v1.services.region_instance_group_managers.pagers.ListManagedInstancesPager: + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, region, instance_group_manager]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.ListManagedInstancesRegionInstanceGroupManagersRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.ListManagedInstancesRegionInstanceGroupManagersRequest): + request = compute.ListManagedInstancesRegionInstanceGroupManagersRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if region is not None: + request.region = region + if instance_group_manager is not None: + request.instance_group_manager = instance_group_manager + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_managed_instances] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("region", request.region), + ("instance_group_manager", request.instance_group_manager), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListManagedInstancesPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + def list_per_instance_configs(self, + request: Optional[Union[compute.ListPerInstanceConfigsRegionInstanceGroupManagersRequest, dict]] = None, + *, + project: Optional[str] = None, + region: Optional[str] = None, + instance_group_manager: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListPerInstanceConfigsPager: + r"""Lists all of the per-instance configurations defined + for the managed instance group. The orderBy query + parameter is not supported. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_list_per_instance_configs(): + # Create a client + client = compute_v1.RegionInstanceGroupManagersClient() + + # Initialize request argument(s) + request = compute_v1.ListPerInstanceConfigsRegionInstanceGroupManagersRequest( + instance_group_manager="instance_group_manager_value", + project="project_value", + region="region_value", + ) + + # Make the request + page_result = client.list_per_instance_configs(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.ListPerInstanceConfigsRegionInstanceGroupManagersRequest, dict]): + The request object. A request message for + RegionInstanceGroupManagers.ListPerInstanceConfigs. + See the method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + region (str): + Name of the region scoping this + request, should conform to RFC1035. + + This corresponds to the ``region`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + instance_group_manager (str): + The name of the managed instance + group. It should conform to RFC1035. + + This corresponds to the ``instance_group_manager`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.compute_v1.services.region_instance_group_managers.pagers.ListPerInstanceConfigsPager: + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, region, instance_group_manager]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.ListPerInstanceConfigsRegionInstanceGroupManagersRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.ListPerInstanceConfigsRegionInstanceGroupManagersRequest): + request = compute.ListPerInstanceConfigsRegionInstanceGroupManagersRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if region is not None: + request.region = region + if instance_group_manager is not None: + request.instance_group_manager = instance_group_manager + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_per_instance_configs] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("region", request.region), + ("instance_group_manager", request.instance_group_manager), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListPerInstanceConfigsPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + def patch_unary(self, + request: Optional[Union[compute.PatchRegionInstanceGroupManagerRequest, dict]] = None, + *, + project: Optional[str] = None, + region: Optional[str] = None, + instance_group_manager: Optional[str] = None, + instance_group_manager_resource: Optional[compute.InstanceGroupManager] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.Operation: + r"""Updates a managed instance group using the + information that you specify in the request. This + operation is marked as DONE when the group is patched + even if the instances in the group are still in the + process of being patched. You must separately verify the + status of the individual instances with the + listmanagedinstances method. This method supports PATCH + semantics and uses the JSON merge patch format and + processing rules. If you update your group to specify a + new template or instance configuration, it's possible + that your intended specification for each VM in the + group is different from the current state of that VM. To + learn how to apply an updated configuration to the VMs + in a MIG, see Updating instances in a MIG. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_patch(): + # Create a client + client = compute_v1.RegionInstanceGroupManagersClient() + + # Initialize request argument(s) + request = compute_v1.PatchRegionInstanceGroupManagerRequest( + instance_group_manager="instance_group_manager_value", + project="project_value", + region="region_value", + ) + + # Make the request + response = client.patch(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.PatchRegionInstanceGroupManagerRequest, dict]): + The request object. A request message for + RegionInstanceGroupManagers.Patch. See + the method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + region (str): + Name of the region scoping this + request. + + This corresponds to the ``region`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + instance_group_manager (str): + The name of the instance group + manager. + + This corresponds to the ``instance_group_manager`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + instance_group_manager_resource (google.cloud.compute_v1.types.InstanceGroupManager): + The body resource for this request + This corresponds to the ``instance_group_manager_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, region, instance_group_manager, instance_group_manager_resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.PatchRegionInstanceGroupManagerRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.PatchRegionInstanceGroupManagerRequest): + request = compute.PatchRegionInstanceGroupManagerRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if region is not None: + request.region = region + if instance_group_manager is not None: + request.instance_group_manager = instance_group_manager + if instance_group_manager_resource is not None: + request.instance_group_manager_resource = instance_group_manager_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.patch] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("region", request.region), + ("instance_group_manager", request.instance_group_manager), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def patch(self, + request: Optional[Union[compute.PatchRegionInstanceGroupManagerRequest, dict]] = None, + *, + project: Optional[str] = None, + region: Optional[str] = None, + instance_group_manager: Optional[str] = None, + instance_group_manager_resource: Optional[compute.InstanceGroupManager] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> extended_operation.ExtendedOperation: + r"""Updates a managed instance group using the + information that you specify in the request. This + operation is marked as DONE when the group is patched + even if the instances in the group are still in the + process of being patched. You must separately verify the + status of the individual instances with the + listmanagedinstances method. This method supports PATCH + semantics and uses the JSON merge patch format and + processing rules. If you update your group to specify a + new template or instance configuration, it's possible + that your intended specification for each VM in the + group is different from the current state of that VM. To + learn how to apply an updated configuration to the VMs + in a MIG, see Updating instances in a MIG. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_patch(): + # Create a client + client = compute_v1.RegionInstanceGroupManagersClient() + + # Initialize request argument(s) + request = compute_v1.PatchRegionInstanceGroupManagerRequest( + instance_group_manager="instance_group_manager_value", + project="project_value", + region="region_value", + ) + + # Make the request + response = client.patch(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.PatchRegionInstanceGroupManagerRequest, dict]): + The request object. A request message for + RegionInstanceGroupManagers.Patch. See + the method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + region (str): + Name of the region scoping this + request. + + This corresponds to the ``region`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + instance_group_manager (str): + The name of the instance group + manager. + + This corresponds to the ``instance_group_manager`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + instance_group_manager_resource (google.cloud.compute_v1.types.InstanceGroupManager): + The body resource for this request + This corresponds to the ``instance_group_manager_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, region, instance_group_manager, instance_group_manager_resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.PatchRegionInstanceGroupManagerRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.PatchRegionInstanceGroupManagerRequest): + request = compute.PatchRegionInstanceGroupManagerRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if region is not None: + request.region = region + if instance_group_manager is not None: + request.instance_group_manager = instance_group_manager + if instance_group_manager_resource is not None: + request.instance_group_manager_resource = instance_group_manager_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.patch] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("region", request.region), + ("instance_group_manager", request.instance_group_manager), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + operation_service = self._transport._region_operations_client + operation_request = compute.GetRegionOperationRequest() + operation_request.project = request.project + operation_request.region = request.region + operation_request.operation = response.name + + get_operation = functools.partial(operation_service.get, operation_request) + # Cancel is not part of extended operations yet. + cancel_operation = lambda: None + + # Note: this class is an implementation detail to provide a uniform + # set of names for certain fields in the extended operation proto message. + # See google.api_core.extended_operation.ExtendedOperation for details + # on these properties and the expected interface. + class _CustomOperation(extended_operation.ExtendedOperation): + @property + def error_message(self): + return self._extended_operation.http_error_message + + @property + def error_code(self): + return self._extended_operation.http_error_status_code + + response = _CustomOperation.make(get_operation, cancel_operation, response) + + # Done; return the response. + return response + + def patch_per_instance_configs_unary(self, + request: Optional[Union[compute.PatchPerInstanceConfigsRegionInstanceGroupManagerRequest, dict]] = None, + *, + project: Optional[str] = None, + region: Optional[str] = None, + instance_group_manager: Optional[str] = None, + region_instance_group_manager_patch_instance_config_req_resource: Optional[compute.RegionInstanceGroupManagerPatchInstanceConfigReq] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.Operation: + r"""Inserts or patches per-instance configurations for + the managed instance group. perInstanceConfig.name + serves as a key used to distinguish whether to perform + insert or patch. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_patch_per_instance_configs(): + # Create a client + client = compute_v1.RegionInstanceGroupManagersClient() + + # Initialize request argument(s) + request = compute_v1.PatchPerInstanceConfigsRegionInstanceGroupManagerRequest( + instance_group_manager="instance_group_manager_value", + project="project_value", + region="region_value", + ) + + # Make the request + response = client.patch_per_instance_configs(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.PatchPerInstanceConfigsRegionInstanceGroupManagerRequest, dict]): + The request object. A request message for + RegionInstanceGroupManagers.PatchPerInstanceConfigs. + See the method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + region (str): + Name of the region scoping this + request, should conform to RFC1035. + + This corresponds to the ``region`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + instance_group_manager (str): + The name of the managed instance + group. It should conform to RFC1035. + + This corresponds to the ``instance_group_manager`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + region_instance_group_manager_patch_instance_config_req_resource (google.cloud.compute_v1.types.RegionInstanceGroupManagerPatchInstanceConfigReq): + The body resource for this request + This corresponds to the ``region_instance_group_manager_patch_instance_config_req_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, region, instance_group_manager, region_instance_group_manager_patch_instance_config_req_resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.PatchPerInstanceConfigsRegionInstanceGroupManagerRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.PatchPerInstanceConfigsRegionInstanceGroupManagerRequest): + request = compute.PatchPerInstanceConfigsRegionInstanceGroupManagerRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if region is not None: + request.region = region + if instance_group_manager is not None: + request.instance_group_manager = instance_group_manager + if region_instance_group_manager_patch_instance_config_req_resource is not None: + request.region_instance_group_manager_patch_instance_config_req_resource = region_instance_group_manager_patch_instance_config_req_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.patch_per_instance_configs] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("region", request.region), + ("instance_group_manager", request.instance_group_manager), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def patch_per_instance_configs(self, + request: Optional[Union[compute.PatchPerInstanceConfigsRegionInstanceGroupManagerRequest, dict]] = None, + *, + project: Optional[str] = None, + region: Optional[str] = None, + instance_group_manager: Optional[str] = None, + region_instance_group_manager_patch_instance_config_req_resource: Optional[compute.RegionInstanceGroupManagerPatchInstanceConfigReq] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> extended_operation.ExtendedOperation: + r"""Inserts or patches per-instance configurations for + the managed instance group. perInstanceConfig.name + serves as a key used to distinguish whether to perform + insert or patch. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_patch_per_instance_configs(): + # Create a client + client = compute_v1.RegionInstanceGroupManagersClient() + + # Initialize request argument(s) + request = compute_v1.PatchPerInstanceConfigsRegionInstanceGroupManagerRequest( + instance_group_manager="instance_group_manager_value", + project="project_value", + region="region_value", + ) + + # Make the request + response = client.patch_per_instance_configs(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.PatchPerInstanceConfigsRegionInstanceGroupManagerRequest, dict]): + The request object. A request message for + RegionInstanceGroupManagers.PatchPerInstanceConfigs. + See the method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + region (str): + Name of the region scoping this + request, should conform to RFC1035. + + This corresponds to the ``region`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + instance_group_manager (str): + The name of the managed instance + group. It should conform to RFC1035. + + This corresponds to the ``instance_group_manager`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + region_instance_group_manager_patch_instance_config_req_resource (google.cloud.compute_v1.types.RegionInstanceGroupManagerPatchInstanceConfigReq): + The body resource for this request + This corresponds to the ``region_instance_group_manager_patch_instance_config_req_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, region, instance_group_manager, region_instance_group_manager_patch_instance_config_req_resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.PatchPerInstanceConfigsRegionInstanceGroupManagerRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.PatchPerInstanceConfigsRegionInstanceGroupManagerRequest): + request = compute.PatchPerInstanceConfigsRegionInstanceGroupManagerRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if region is not None: + request.region = region + if instance_group_manager is not None: + request.instance_group_manager = instance_group_manager + if region_instance_group_manager_patch_instance_config_req_resource is not None: + request.region_instance_group_manager_patch_instance_config_req_resource = region_instance_group_manager_patch_instance_config_req_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.patch_per_instance_configs] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("region", request.region), + ("instance_group_manager", request.instance_group_manager), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + operation_service = self._transport._region_operations_client + operation_request = compute.GetRegionOperationRequest() + operation_request.project = request.project + operation_request.region = request.region + operation_request.operation = response.name + + get_operation = functools.partial(operation_service.get, operation_request) + # Cancel is not part of extended operations yet. + cancel_operation = lambda: None + + # Note: this class is an implementation detail to provide a uniform + # set of names for certain fields in the extended operation proto message. + # See google.api_core.extended_operation.ExtendedOperation for details + # on these properties and the expected interface. + class _CustomOperation(extended_operation.ExtendedOperation): + @property + def error_message(self): + return self._extended_operation.http_error_message + + @property + def error_code(self): + return self._extended_operation.http_error_status_code + + response = _CustomOperation.make(get_operation, cancel_operation, response) + + # Done; return the response. + return response + + def recreate_instances_unary(self, + request: Optional[Union[compute.RecreateInstancesRegionInstanceGroupManagerRequest, dict]] = None, + *, + project: Optional[str] = None, + region: Optional[str] = None, + instance_group_manager: Optional[str] = None, + region_instance_group_managers_recreate_request_resource: Optional[compute.RegionInstanceGroupManagersRecreateRequest] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.Operation: + r"""Flags the specified VM instances in the managed + instance group to be immediately recreated. Each + instance is recreated using the group's current + configuration. This operation is marked as DONE when the + flag is set even if the instances have not yet been + recreated. You must separately verify the status of each + instance by checking its currentAction field; for more + information, see Checking the status of managed + instances. If the group is part of a backend service + that has enabled connection draining, it can take up to + 60 seconds after the connection draining duration has + elapsed before the VM instance is removed or deleted. + You can specify a maximum of 1000 instances with this + method per request. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_recreate_instances(): + # Create a client + client = compute_v1.RegionInstanceGroupManagersClient() + + # Initialize request argument(s) + request = compute_v1.RecreateInstancesRegionInstanceGroupManagerRequest( + instance_group_manager="instance_group_manager_value", + project="project_value", + region="region_value", + ) + + # Make the request + response = client.recreate_instances(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.RecreateInstancesRegionInstanceGroupManagerRequest, dict]): + The request object. A request message for + RegionInstanceGroupManagers.RecreateInstances. + See the method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + region (str): + Name of the region scoping this + request. + + This corresponds to the ``region`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + instance_group_manager (str): + Name of the managed instance group. + This corresponds to the ``instance_group_manager`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + region_instance_group_managers_recreate_request_resource (google.cloud.compute_v1.types.RegionInstanceGroupManagersRecreateRequest): + The body resource for this request + This corresponds to the ``region_instance_group_managers_recreate_request_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, region, instance_group_manager, region_instance_group_managers_recreate_request_resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.RecreateInstancesRegionInstanceGroupManagerRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.RecreateInstancesRegionInstanceGroupManagerRequest): + request = compute.RecreateInstancesRegionInstanceGroupManagerRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if region is not None: + request.region = region + if instance_group_manager is not None: + request.instance_group_manager = instance_group_manager + if region_instance_group_managers_recreate_request_resource is not None: + request.region_instance_group_managers_recreate_request_resource = region_instance_group_managers_recreate_request_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.recreate_instances] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("region", request.region), + ("instance_group_manager", request.instance_group_manager), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def recreate_instances(self, + request: Optional[Union[compute.RecreateInstancesRegionInstanceGroupManagerRequest, dict]] = None, + *, + project: Optional[str] = None, + region: Optional[str] = None, + instance_group_manager: Optional[str] = None, + region_instance_group_managers_recreate_request_resource: Optional[compute.RegionInstanceGroupManagersRecreateRequest] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> extended_operation.ExtendedOperation: + r"""Flags the specified VM instances in the managed + instance group to be immediately recreated. Each + instance is recreated using the group's current + configuration. This operation is marked as DONE when the + flag is set even if the instances have not yet been + recreated. You must separately verify the status of each + instance by checking its currentAction field; for more + information, see Checking the status of managed + instances. If the group is part of a backend service + that has enabled connection draining, it can take up to + 60 seconds after the connection draining duration has + elapsed before the VM instance is removed or deleted. + You can specify a maximum of 1000 instances with this + method per request. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_recreate_instances(): + # Create a client + client = compute_v1.RegionInstanceGroupManagersClient() + + # Initialize request argument(s) + request = compute_v1.RecreateInstancesRegionInstanceGroupManagerRequest( + instance_group_manager="instance_group_manager_value", + project="project_value", + region="region_value", + ) + + # Make the request + response = client.recreate_instances(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.RecreateInstancesRegionInstanceGroupManagerRequest, dict]): + The request object. A request message for + RegionInstanceGroupManagers.RecreateInstances. + See the method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + region (str): + Name of the region scoping this + request. + + This corresponds to the ``region`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + instance_group_manager (str): + Name of the managed instance group. + This corresponds to the ``instance_group_manager`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + region_instance_group_managers_recreate_request_resource (google.cloud.compute_v1.types.RegionInstanceGroupManagersRecreateRequest): + The body resource for this request + This corresponds to the ``region_instance_group_managers_recreate_request_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, region, instance_group_manager, region_instance_group_managers_recreate_request_resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.RecreateInstancesRegionInstanceGroupManagerRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.RecreateInstancesRegionInstanceGroupManagerRequest): + request = compute.RecreateInstancesRegionInstanceGroupManagerRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if region is not None: + request.region = region + if instance_group_manager is not None: + request.instance_group_manager = instance_group_manager + if region_instance_group_managers_recreate_request_resource is not None: + request.region_instance_group_managers_recreate_request_resource = region_instance_group_managers_recreate_request_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.recreate_instances] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("region", request.region), + ("instance_group_manager", request.instance_group_manager), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + operation_service = self._transport._region_operations_client + operation_request = compute.GetRegionOperationRequest() + operation_request.project = request.project + operation_request.region = request.region + operation_request.operation = response.name + + get_operation = functools.partial(operation_service.get, operation_request) + # Cancel is not part of extended operations yet. + cancel_operation = lambda: None + + # Note: this class is an implementation detail to provide a uniform + # set of names for certain fields in the extended operation proto message. + # See google.api_core.extended_operation.ExtendedOperation for details + # on these properties and the expected interface. + class _CustomOperation(extended_operation.ExtendedOperation): + @property + def error_message(self): + return self._extended_operation.http_error_message + + @property + def error_code(self): + return self._extended_operation.http_error_status_code + + response = _CustomOperation.make(get_operation, cancel_operation, response) + + # Done; return the response. + return response + + def resize_unary(self, + request: Optional[Union[compute.ResizeRegionInstanceGroupManagerRequest, dict]] = None, + *, + project: Optional[str] = None, + region: Optional[str] = None, + instance_group_manager: Optional[str] = None, + size: Optional[int] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.Operation: + r"""Changes the intended size of the managed instance + group. If you increase the size, the group creates new + instances using the current instance template. If you + decrease the size, the group deletes one or more + instances. The resize operation is marked DONE if the + resize request is successful. The underlying actions + take additional time. You must separately verify the + status of the creating or deleting actions with the + listmanagedinstances method. If the group is part of a + backend service that has enabled connection draining, it + can take up to 60 seconds after the connection draining + duration has elapsed before the VM instance is removed + or deleted. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_resize(): + # Create a client + client = compute_v1.RegionInstanceGroupManagersClient() + + # Initialize request argument(s) + request = compute_v1.ResizeRegionInstanceGroupManagerRequest( + instance_group_manager="instance_group_manager_value", + project="project_value", + region="region_value", + size=443, + ) + + # Make the request + response = client.resize(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.ResizeRegionInstanceGroupManagerRequest, dict]): + The request object. A request message for + RegionInstanceGroupManagers.Resize. See + the method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + region (str): + Name of the region scoping this + request. + + This corresponds to the ``region`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + instance_group_manager (str): + Name of the managed instance group. + This corresponds to the ``instance_group_manager`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + size (int): + Number of instances that should exist + in this instance group manager. + + This corresponds to the ``size`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, region, instance_group_manager, size]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.ResizeRegionInstanceGroupManagerRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.ResizeRegionInstanceGroupManagerRequest): + request = compute.ResizeRegionInstanceGroupManagerRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if region is not None: + request.region = region + if instance_group_manager is not None: + request.instance_group_manager = instance_group_manager + if size is not None: + request.size = size + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.resize] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("region", request.region), + ("instance_group_manager", request.instance_group_manager), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def resize(self, + request: Optional[Union[compute.ResizeRegionInstanceGroupManagerRequest, dict]] = None, + *, + project: Optional[str] = None, + region: Optional[str] = None, + instance_group_manager: Optional[str] = None, + size: Optional[int] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> extended_operation.ExtendedOperation: + r"""Changes the intended size of the managed instance + group. If you increase the size, the group creates new + instances using the current instance template. If you + decrease the size, the group deletes one or more + instances. The resize operation is marked DONE if the + resize request is successful. The underlying actions + take additional time. You must separately verify the + status of the creating or deleting actions with the + listmanagedinstances method. If the group is part of a + backend service that has enabled connection draining, it + can take up to 60 seconds after the connection draining + duration has elapsed before the VM instance is removed + or deleted. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_resize(): + # Create a client + client = compute_v1.RegionInstanceGroupManagersClient() + + # Initialize request argument(s) + request = compute_v1.ResizeRegionInstanceGroupManagerRequest( + instance_group_manager="instance_group_manager_value", + project="project_value", + region="region_value", + size=443, + ) + + # Make the request + response = client.resize(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.ResizeRegionInstanceGroupManagerRequest, dict]): + The request object. A request message for + RegionInstanceGroupManagers.Resize. See + the method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + region (str): + Name of the region scoping this + request. + + This corresponds to the ``region`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + instance_group_manager (str): + Name of the managed instance group. + This corresponds to the ``instance_group_manager`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + size (int): + Number of instances that should exist + in this instance group manager. + + This corresponds to the ``size`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, region, instance_group_manager, size]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.ResizeRegionInstanceGroupManagerRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.ResizeRegionInstanceGroupManagerRequest): + request = compute.ResizeRegionInstanceGroupManagerRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if region is not None: + request.region = region + if instance_group_manager is not None: + request.instance_group_manager = instance_group_manager + if size is not None: + request.size = size + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.resize] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("region", request.region), + ("instance_group_manager", request.instance_group_manager), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + operation_service = self._transport._region_operations_client + operation_request = compute.GetRegionOperationRequest() + operation_request.project = request.project + operation_request.region = request.region + operation_request.operation = response.name + + get_operation = functools.partial(operation_service.get, operation_request) + # Cancel is not part of extended operations yet. + cancel_operation = lambda: None + + # Note: this class is an implementation detail to provide a uniform + # set of names for certain fields in the extended operation proto message. + # See google.api_core.extended_operation.ExtendedOperation for details + # on these properties and the expected interface. + class _CustomOperation(extended_operation.ExtendedOperation): + @property + def error_message(self): + return self._extended_operation.http_error_message + + @property + def error_code(self): + return self._extended_operation.http_error_status_code + + response = _CustomOperation.make(get_operation, cancel_operation, response) + + # Done; return the response. + return response + + def set_instance_template_unary(self, + request: Optional[Union[compute.SetInstanceTemplateRegionInstanceGroupManagerRequest, dict]] = None, + *, + project: Optional[str] = None, + region: Optional[str] = None, + instance_group_manager: Optional[str] = None, + region_instance_group_managers_set_template_request_resource: Optional[compute.RegionInstanceGroupManagersSetTemplateRequest] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.Operation: + r"""Sets the instance template to use when creating new + instances or recreating instances in this group. + Existing instances are not affected. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_set_instance_template(): + # Create a client + client = compute_v1.RegionInstanceGroupManagersClient() + + # Initialize request argument(s) + request = compute_v1.SetInstanceTemplateRegionInstanceGroupManagerRequest( + instance_group_manager="instance_group_manager_value", + project="project_value", + region="region_value", + ) + + # Make the request + response = client.set_instance_template(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.SetInstanceTemplateRegionInstanceGroupManagerRequest, dict]): + The request object. A request message for + RegionInstanceGroupManagers.SetInstanceTemplate. + See the method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + region (str): + Name of the region scoping this + request. + + This corresponds to the ``region`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + instance_group_manager (str): + The name of the managed instance + group. + + This corresponds to the ``instance_group_manager`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + region_instance_group_managers_set_template_request_resource (google.cloud.compute_v1.types.RegionInstanceGroupManagersSetTemplateRequest): + The body resource for this request + This corresponds to the ``region_instance_group_managers_set_template_request_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, region, instance_group_manager, region_instance_group_managers_set_template_request_resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.SetInstanceTemplateRegionInstanceGroupManagerRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.SetInstanceTemplateRegionInstanceGroupManagerRequest): + request = compute.SetInstanceTemplateRegionInstanceGroupManagerRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if region is not None: + request.region = region + if instance_group_manager is not None: + request.instance_group_manager = instance_group_manager + if region_instance_group_managers_set_template_request_resource is not None: + request.region_instance_group_managers_set_template_request_resource = region_instance_group_managers_set_template_request_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.set_instance_template] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("region", request.region), + ("instance_group_manager", request.instance_group_manager), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def set_instance_template(self, + request: Optional[Union[compute.SetInstanceTemplateRegionInstanceGroupManagerRequest, dict]] = None, + *, + project: Optional[str] = None, + region: Optional[str] = None, + instance_group_manager: Optional[str] = None, + region_instance_group_managers_set_template_request_resource: Optional[compute.RegionInstanceGroupManagersSetTemplateRequest] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> extended_operation.ExtendedOperation: + r"""Sets the instance template to use when creating new + instances or recreating instances in this group. + Existing instances are not affected. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_set_instance_template(): + # Create a client + client = compute_v1.RegionInstanceGroupManagersClient() + + # Initialize request argument(s) + request = compute_v1.SetInstanceTemplateRegionInstanceGroupManagerRequest( + instance_group_manager="instance_group_manager_value", + project="project_value", + region="region_value", + ) + + # Make the request + response = client.set_instance_template(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.SetInstanceTemplateRegionInstanceGroupManagerRequest, dict]): + The request object. A request message for + RegionInstanceGroupManagers.SetInstanceTemplate. + See the method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + region (str): + Name of the region scoping this + request. + + This corresponds to the ``region`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + instance_group_manager (str): + The name of the managed instance + group. + + This corresponds to the ``instance_group_manager`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + region_instance_group_managers_set_template_request_resource (google.cloud.compute_v1.types.RegionInstanceGroupManagersSetTemplateRequest): + The body resource for this request + This corresponds to the ``region_instance_group_managers_set_template_request_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, region, instance_group_manager, region_instance_group_managers_set_template_request_resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.SetInstanceTemplateRegionInstanceGroupManagerRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.SetInstanceTemplateRegionInstanceGroupManagerRequest): + request = compute.SetInstanceTemplateRegionInstanceGroupManagerRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if region is not None: + request.region = region + if instance_group_manager is not None: + request.instance_group_manager = instance_group_manager + if region_instance_group_managers_set_template_request_resource is not None: + request.region_instance_group_managers_set_template_request_resource = region_instance_group_managers_set_template_request_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.set_instance_template] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("region", request.region), + ("instance_group_manager", request.instance_group_manager), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + operation_service = self._transport._region_operations_client + operation_request = compute.GetRegionOperationRequest() + operation_request.project = request.project + operation_request.region = request.region + operation_request.operation = response.name + + get_operation = functools.partial(operation_service.get, operation_request) + # Cancel is not part of extended operations yet. + cancel_operation = lambda: None + + # Note: this class is an implementation detail to provide a uniform + # set of names for certain fields in the extended operation proto message. + # See google.api_core.extended_operation.ExtendedOperation for details + # on these properties and the expected interface. + class _CustomOperation(extended_operation.ExtendedOperation): + @property + def error_message(self): + return self._extended_operation.http_error_message + + @property + def error_code(self): + return self._extended_operation.http_error_status_code + + response = _CustomOperation.make(get_operation, cancel_operation, response) + + # Done; return the response. + return response + + def set_target_pools_unary(self, + request: Optional[Union[compute.SetTargetPoolsRegionInstanceGroupManagerRequest, dict]] = None, + *, + project: Optional[str] = None, + region: Optional[str] = None, + instance_group_manager: Optional[str] = None, + region_instance_group_managers_set_target_pools_request_resource: Optional[compute.RegionInstanceGroupManagersSetTargetPoolsRequest] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.Operation: + r"""Modifies the target pools to which all new instances + in this group are assigned. Existing instances in the + group are not affected. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_set_target_pools(): + # Create a client + client = compute_v1.RegionInstanceGroupManagersClient() + + # Initialize request argument(s) + request = compute_v1.SetTargetPoolsRegionInstanceGroupManagerRequest( + instance_group_manager="instance_group_manager_value", + project="project_value", + region="region_value", + ) + + # Make the request + response = client.set_target_pools(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.SetTargetPoolsRegionInstanceGroupManagerRequest, dict]): + The request object. A request message for + RegionInstanceGroupManagers.SetTargetPools. + See the method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + region (str): + Name of the region scoping this + request. + + This corresponds to the ``region`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + instance_group_manager (str): + Name of the managed instance group. + This corresponds to the ``instance_group_manager`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + region_instance_group_managers_set_target_pools_request_resource (google.cloud.compute_v1.types.RegionInstanceGroupManagersSetTargetPoolsRequest): + The body resource for this request + This corresponds to the ``region_instance_group_managers_set_target_pools_request_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, region, instance_group_manager, region_instance_group_managers_set_target_pools_request_resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.SetTargetPoolsRegionInstanceGroupManagerRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.SetTargetPoolsRegionInstanceGroupManagerRequest): + request = compute.SetTargetPoolsRegionInstanceGroupManagerRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if region is not None: + request.region = region + if instance_group_manager is not None: + request.instance_group_manager = instance_group_manager + if region_instance_group_managers_set_target_pools_request_resource is not None: + request.region_instance_group_managers_set_target_pools_request_resource = region_instance_group_managers_set_target_pools_request_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.set_target_pools] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("region", request.region), + ("instance_group_manager", request.instance_group_manager), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def set_target_pools(self, + request: Optional[Union[compute.SetTargetPoolsRegionInstanceGroupManagerRequest, dict]] = None, + *, + project: Optional[str] = None, + region: Optional[str] = None, + instance_group_manager: Optional[str] = None, + region_instance_group_managers_set_target_pools_request_resource: Optional[compute.RegionInstanceGroupManagersSetTargetPoolsRequest] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> extended_operation.ExtendedOperation: + r"""Modifies the target pools to which all new instances + in this group are assigned. Existing instances in the + group are not affected. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_set_target_pools(): + # Create a client + client = compute_v1.RegionInstanceGroupManagersClient() + + # Initialize request argument(s) + request = compute_v1.SetTargetPoolsRegionInstanceGroupManagerRequest( + instance_group_manager="instance_group_manager_value", + project="project_value", + region="region_value", + ) + + # Make the request + response = client.set_target_pools(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.SetTargetPoolsRegionInstanceGroupManagerRequest, dict]): + The request object. A request message for + RegionInstanceGroupManagers.SetTargetPools. + See the method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + region (str): + Name of the region scoping this + request. + + This corresponds to the ``region`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + instance_group_manager (str): + Name of the managed instance group. + This corresponds to the ``instance_group_manager`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + region_instance_group_managers_set_target_pools_request_resource (google.cloud.compute_v1.types.RegionInstanceGroupManagersSetTargetPoolsRequest): + The body resource for this request + This corresponds to the ``region_instance_group_managers_set_target_pools_request_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, region, instance_group_manager, region_instance_group_managers_set_target_pools_request_resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.SetTargetPoolsRegionInstanceGroupManagerRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.SetTargetPoolsRegionInstanceGroupManagerRequest): + request = compute.SetTargetPoolsRegionInstanceGroupManagerRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if region is not None: + request.region = region + if instance_group_manager is not None: + request.instance_group_manager = instance_group_manager + if region_instance_group_managers_set_target_pools_request_resource is not None: + request.region_instance_group_managers_set_target_pools_request_resource = region_instance_group_managers_set_target_pools_request_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.set_target_pools] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("region", request.region), + ("instance_group_manager", request.instance_group_manager), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + operation_service = self._transport._region_operations_client + operation_request = compute.GetRegionOperationRequest() + operation_request.project = request.project + operation_request.region = request.region + operation_request.operation = response.name + + get_operation = functools.partial(operation_service.get, operation_request) + # Cancel is not part of extended operations yet. + cancel_operation = lambda: None + + # Note: this class is an implementation detail to provide a uniform + # set of names for certain fields in the extended operation proto message. + # See google.api_core.extended_operation.ExtendedOperation for details + # on these properties and the expected interface. + class _CustomOperation(extended_operation.ExtendedOperation): + @property + def error_message(self): + return self._extended_operation.http_error_message + + @property + def error_code(self): + return self._extended_operation.http_error_status_code + + response = _CustomOperation.make(get_operation, cancel_operation, response) + + # Done; return the response. + return response + + def update_per_instance_configs_unary(self, + request: Optional[Union[compute.UpdatePerInstanceConfigsRegionInstanceGroupManagerRequest, dict]] = None, + *, + project: Optional[str] = None, + region: Optional[str] = None, + instance_group_manager: Optional[str] = None, + region_instance_group_manager_update_instance_config_req_resource: Optional[compute.RegionInstanceGroupManagerUpdateInstanceConfigReq] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.Operation: + r"""Inserts or updates per-instance configurations for + the managed instance group. perInstanceConfig.name + serves as a key used to distinguish whether to perform + insert or patch. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_update_per_instance_configs(): + # Create a client + client = compute_v1.RegionInstanceGroupManagersClient() + + # Initialize request argument(s) + request = compute_v1.UpdatePerInstanceConfigsRegionInstanceGroupManagerRequest( + instance_group_manager="instance_group_manager_value", + project="project_value", + region="region_value", + ) + + # Make the request + response = client.update_per_instance_configs(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.UpdatePerInstanceConfigsRegionInstanceGroupManagerRequest, dict]): + The request object. A request message for + RegionInstanceGroupManagers.UpdatePerInstanceConfigs. + See the method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + region (str): + Name of the region scoping this + request, should conform to RFC1035. + + This corresponds to the ``region`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + instance_group_manager (str): + The name of the managed instance + group. It should conform to RFC1035. + + This corresponds to the ``instance_group_manager`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + region_instance_group_manager_update_instance_config_req_resource (google.cloud.compute_v1.types.RegionInstanceGroupManagerUpdateInstanceConfigReq): + The body resource for this request + This corresponds to the ``region_instance_group_manager_update_instance_config_req_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, region, instance_group_manager, region_instance_group_manager_update_instance_config_req_resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.UpdatePerInstanceConfigsRegionInstanceGroupManagerRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.UpdatePerInstanceConfigsRegionInstanceGroupManagerRequest): + request = compute.UpdatePerInstanceConfigsRegionInstanceGroupManagerRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if region is not None: + request.region = region + if instance_group_manager is not None: + request.instance_group_manager = instance_group_manager + if region_instance_group_manager_update_instance_config_req_resource is not None: + request.region_instance_group_manager_update_instance_config_req_resource = region_instance_group_manager_update_instance_config_req_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.update_per_instance_configs] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("region", request.region), + ("instance_group_manager", request.instance_group_manager), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def update_per_instance_configs(self, + request: Optional[Union[compute.UpdatePerInstanceConfigsRegionInstanceGroupManagerRequest, dict]] = None, + *, + project: Optional[str] = None, + region: Optional[str] = None, + instance_group_manager: Optional[str] = None, + region_instance_group_manager_update_instance_config_req_resource: Optional[compute.RegionInstanceGroupManagerUpdateInstanceConfigReq] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> extended_operation.ExtendedOperation: + r"""Inserts or updates per-instance configurations for + the managed instance group. perInstanceConfig.name + serves as a key used to distinguish whether to perform + insert or patch. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_update_per_instance_configs(): + # Create a client + client = compute_v1.RegionInstanceGroupManagersClient() + + # Initialize request argument(s) + request = compute_v1.UpdatePerInstanceConfigsRegionInstanceGroupManagerRequest( + instance_group_manager="instance_group_manager_value", + project="project_value", + region="region_value", + ) + + # Make the request + response = client.update_per_instance_configs(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.UpdatePerInstanceConfigsRegionInstanceGroupManagerRequest, dict]): + The request object. A request message for + RegionInstanceGroupManagers.UpdatePerInstanceConfigs. + See the method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + region (str): + Name of the region scoping this + request, should conform to RFC1035. + + This corresponds to the ``region`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + instance_group_manager (str): + The name of the managed instance + group. It should conform to RFC1035. + + This corresponds to the ``instance_group_manager`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + region_instance_group_manager_update_instance_config_req_resource (google.cloud.compute_v1.types.RegionInstanceGroupManagerUpdateInstanceConfigReq): + The body resource for this request + This corresponds to the ``region_instance_group_manager_update_instance_config_req_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, region, instance_group_manager, region_instance_group_manager_update_instance_config_req_resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.UpdatePerInstanceConfigsRegionInstanceGroupManagerRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.UpdatePerInstanceConfigsRegionInstanceGroupManagerRequest): + request = compute.UpdatePerInstanceConfigsRegionInstanceGroupManagerRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if region is not None: + request.region = region + if instance_group_manager is not None: + request.instance_group_manager = instance_group_manager + if region_instance_group_manager_update_instance_config_req_resource is not None: + request.region_instance_group_manager_update_instance_config_req_resource = region_instance_group_manager_update_instance_config_req_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.update_per_instance_configs] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("region", request.region), + ("instance_group_manager", request.instance_group_manager), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + operation_service = self._transport._region_operations_client + operation_request = compute.GetRegionOperationRequest() + operation_request.project = request.project + operation_request.region = request.region + operation_request.operation = response.name + + get_operation = functools.partial(operation_service.get, operation_request) + # Cancel is not part of extended operations yet. + cancel_operation = lambda: None + + # Note: this class is an implementation detail to provide a uniform + # set of names for certain fields in the extended operation proto message. + # See google.api_core.extended_operation.ExtendedOperation for details + # on these properties and the expected interface. + class _CustomOperation(extended_operation.ExtendedOperation): + @property + def error_message(self): + return self._extended_operation.http_error_message + + @property + def error_code(self): + return self._extended_operation.http_error_status_code + + response = _CustomOperation.make(get_operation, cancel_operation, response) + + # Done; return the response. + return response + + def __enter__(self) -> "RegionInstanceGroupManagersClient": + return self + + def __exit__(self, type, value, traceback): + """Releases underlying transport's resources. + + .. warning:: + ONLY use as a context manager if the transport is NOT shared + with other clients! Exiting the with block will CLOSE the transport + and may cause errors in other clients! + """ + self.transport.close() + + + + + + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) + + +__all__ = ( + "RegionInstanceGroupManagersClient", +) diff --git a/owl-bot-staging/v1/google/cloud/compute_v1/services/region_instance_group_managers/pagers.py b/owl-bot-staging/v1/google/cloud/compute_v1/services/region_instance_group_managers/pagers.py new file mode 100644 index 000000000..13cd25dd5 --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/compute_v1/services/region_instance_group_managers/pagers.py @@ -0,0 +1,254 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import Any, AsyncIterator, Awaitable, Callable, Sequence, Tuple, Optional, Iterator + +from google.cloud.compute_v1.types import compute + + +class ListPager: + """A pager for iterating through ``list`` requests. + + This class thinly wraps an initial + :class:`google.cloud.compute_v1.types.RegionInstanceGroupManagerList` object, and + provides an ``__iter__`` method to iterate through its + ``items`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``List`` requests and continue to iterate + through the ``items`` field on the + corresponding responses. + + All the usual :class:`google.cloud.compute_v1.types.RegionInstanceGroupManagerList` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., compute.RegionInstanceGroupManagerList], + request: compute.ListRegionInstanceGroupManagersRequest, + response: compute.RegionInstanceGroupManagerList, + *, + metadata: Sequence[Tuple[str, str]] = ()): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.compute_v1.types.ListRegionInstanceGroupManagersRequest): + The initial request object. + response (google.cloud.compute_v1.types.RegionInstanceGroupManagerList): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = compute.ListRegionInstanceGroupManagersRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[compute.RegionInstanceGroupManagerList]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[compute.InstanceGroupManager]: + for page in self.pages: + yield from page.items + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + + +class ListErrorsPager: + """A pager for iterating through ``list_errors`` requests. + + This class thinly wraps an initial + :class:`google.cloud.compute_v1.types.RegionInstanceGroupManagersListErrorsResponse` object, and + provides an ``__iter__`` method to iterate through its + ``items`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListErrors`` requests and continue to iterate + through the ``items`` field on the + corresponding responses. + + All the usual :class:`google.cloud.compute_v1.types.RegionInstanceGroupManagersListErrorsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., compute.RegionInstanceGroupManagersListErrorsResponse], + request: compute.ListErrorsRegionInstanceGroupManagersRequest, + response: compute.RegionInstanceGroupManagersListErrorsResponse, + *, + metadata: Sequence[Tuple[str, str]] = ()): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.compute_v1.types.ListErrorsRegionInstanceGroupManagersRequest): + The initial request object. + response (google.cloud.compute_v1.types.RegionInstanceGroupManagersListErrorsResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = compute.ListErrorsRegionInstanceGroupManagersRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[compute.RegionInstanceGroupManagersListErrorsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[compute.InstanceManagedByIgmError]: + for page in self.pages: + yield from page.items + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + + +class ListManagedInstancesPager: + """A pager for iterating through ``list_managed_instances`` requests. + + This class thinly wraps an initial + :class:`google.cloud.compute_v1.types.RegionInstanceGroupManagersListInstancesResponse` object, and + provides an ``__iter__`` method to iterate through its + ``managed_instances`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListManagedInstances`` requests and continue to iterate + through the ``managed_instances`` field on the + corresponding responses. + + All the usual :class:`google.cloud.compute_v1.types.RegionInstanceGroupManagersListInstancesResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., compute.RegionInstanceGroupManagersListInstancesResponse], + request: compute.ListManagedInstancesRegionInstanceGroupManagersRequest, + response: compute.RegionInstanceGroupManagersListInstancesResponse, + *, + metadata: Sequence[Tuple[str, str]] = ()): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.compute_v1.types.ListManagedInstancesRegionInstanceGroupManagersRequest): + The initial request object. + response (google.cloud.compute_v1.types.RegionInstanceGroupManagersListInstancesResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = compute.ListManagedInstancesRegionInstanceGroupManagersRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[compute.RegionInstanceGroupManagersListInstancesResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[compute.ManagedInstance]: + for page in self.pages: + yield from page.managed_instances + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + + +class ListPerInstanceConfigsPager: + """A pager for iterating through ``list_per_instance_configs`` requests. + + This class thinly wraps an initial + :class:`google.cloud.compute_v1.types.RegionInstanceGroupManagersListInstanceConfigsResp` object, and + provides an ``__iter__`` method to iterate through its + ``items`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListPerInstanceConfigs`` requests and continue to iterate + through the ``items`` field on the + corresponding responses. + + All the usual :class:`google.cloud.compute_v1.types.RegionInstanceGroupManagersListInstanceConfigsResp` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., compute.RegionInstanceGroupManagersListInstanceConfigsResp], + request: compute.ListPerInstanceConfigsRegionInstanceGroupManagersRequest, + response: compute.RegionInstanceGroupManagersListInstanceConfigsResp, + *, + metadata: Sequence[Tuple[str, str]] = ()): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.compute_v1.types.ListPerInstanceConfigsRegionInstanceGroupManagersRequest): + The initial request object. + response (google.cloud.compute_v1.types.RegionInstanceGroupManagersListInstanceConfigsResp): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = compute.ListPerInstanceConfigsRegionInstanceGroupManagersRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[compute.RegionInstanceGroupManagersListInstanceConfigsResp]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[compute.PerInstanceConfig]: + for page in self.pages: + yield from page.items + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) diff --git a/owl-bot-staging/v1/google/cloud/compute_v1/services/region_instance_group_managers/transports/__init__.py b/owl-bot-staging/v1/google/cloud/compute_v1/services/region_instance_group_managers/transports/__init__.py new file mode 100644 index 000000000..85f0bdba6 --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/compute_v1/services/region_instance_group_managers/transports/__init__.py @@ -0,0 +1,32 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +from typing import Dict, Type + +from .base import RegionInstanceGroupManagersTransport +from .rest import RegionInstanceGroupManagersRestTransport +from .rest import RegionInstanceGroupManagersRestInterceptor + + +# Compile a registry of transports. +_transport_registry = OrderedDict() # type: Dict[str, Type[RegionInstanceGroupManagersTransport]] +_transport_registry['rest'] = RegionInstanceGroupManagersRestTransport + +__all__ = ( + 'RegionInstanceGroupManagersTransport', + 'RegionInstanceGroupManagersRestTransport', + 'RegionInstanceGroupManagersRestInterceptor', +) diff --git a/owl-bot-staging/v1/google/cloud/compute_v1/services/region_instance_group_managers/transports/base.py b/owl-bot-staging/v1/google/cloud/compute_v1/services/region_instance_group_managers/transports/base.py new file mode 100644 index 000000000..1ae87fba2 --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/compute_v1/services/region_instance_group_managers/transports/base.py @@ -0,0 +1,415 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import abc +from typing import Awaitable, Callable, Dict, Optional, Sequence, Union + +from google.cloud.compute_v1 import gapic_version as package_version + +import google.auth # type: ignore +import google.api_core +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.compute_v1.types import compute +from google.cloud.compute_v1.services import region_operations + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) + + +class RegionInstanceGroupManagersTransport(abc.ABC): + """Abstract transport class for RegionInstanceGroupManagers.""" + + AUTH_SCOPES = ( + 'https://www.googleapis.com/auth/compute', + 'https://www.googleapis.com/auth/cloud-platform', + ) + + DEFAULT_HOST: str = 'compute.googleapis.com' + def __init__( + self, *, + host: str = DEFAULT_HOST, + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + **kwargs, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A list of scopes. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + """ + self._extended_operations_services: Dict[str, Any] = {} + + scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} + + # Save the scopes. + self._scopes = scopes + + # If no credentials are provided, then determine the appropriate + # defaults. + if credentials and credentials_file: + raise core_exceptions.DuplicateCredentialArgs("'credentials_file' and 'credentials' are mutually exclusive") + + if credentials_file is not None: + credentials, _ = google.auth.load_credentials_from_file( + credentials_file, + **scopes_kwargs, + quota_project_id=quota_project_id + ) + elif credentials is None: + credentials, _ = google.auth.default(**scopes_kwargs, quota_project_id=quota_project_id) + # Don't apply audience if the credentials file passed from user. + if hasattr(credentials, "with_gdch_audience"): + credentials = credentials.with_gdch_audience(api_audience if api_audience else host) + + # If the credentials are service account credentials, then always try to use self signed JWT. + if always_use_jwt_access and isinstance(credentials, service_account.Credentials) and hasattr(service_account.Credentials, "with_always_use_jwt_access"): + credentials = credentials.with_always_use_jwt_access(True) + + # Save the credentials. + self._credentials = credentials + + # Save the hostname. Default to port 443 (HTTPS) if none is specified. + if ':' not in host: + host += ':443' + self._host = host + + def _prep_wrapped_messages(self, client_info): + # Precompute the wrapped methods. + self._wrapped_methods = { + self.abandon_instances: gapic_v1.method.wrap_method( + self.abandon_instances, + default_timeout=None, + client_info=client_info, + ), + self.apply_updates_to_instances: gapic_v1.method.wrap_method( + self.apply_updates_to_instances, + default_timeout=None, + client_info=client_info, + ), + self.create_instances: gapic_v1.method.wrap_method( + self.create_instances, + default_timeout=None, + client_info=client_info, + ), + self.delete: gapic_v1.method.wrap_method( + self.delete, + default_timeout=None, + client_info=client_info, + ), + self.delete_instances: gapic_v1.method.wrap_method( + self.delete_instances, + default_timeout=None, + client_info=client_info, + ), + self.delete_per_instance_configs: gapic_v1.method.wrap_method( + self.delete_per_instance_configs, + default_timeout=None, + client_info=client_info, + ), + self.get: gapic_v1.method.wrap_method( + self.get, + default_timeout=None, + client_info=client_info, + ), + self.insert: gapic_v1.method.wrap_method( + self.insert, + default_timeout=None, + client_info=client_info, + ), + self.list: gapic_v1.method.wrap_method( + self.list, + default_timeout=None, + client_info=client_info, + ), + self.list_errors: gapic_v1.method.wrap_method( + self.list_errors, + default_timeout=None, + client_info=client_info, + ), + self.list_managed_instances: gapic_v1.method.wrap_method( + self.list_managed_instances, + default_timeout=None, + client_info=client_info, + ), + self.list_per_instance_configs: gapic_v1.method.wrap_method( + self.list_per_instance_configs, + default_timeout=None, + client_info=client_info, + ), + self.patch: gapic_v1.method.wrap_method( + self.patch, + default_timeout=None, + client_info=client_info, + ), + self.patch_per_instance_configs: gapic_v1.method.wrap_method( + self.patch_per_instance_configs, + default_timeout=None, + client_info=client_info, + ), + self.recreate_instances: gapic_v1.method.wrap_method( + self.recreate_instances, + default_timeout=None, + client_info=client_info, + ), + self.resize: gapic_v1.method.wrap_method( + self.resize, + default_timeout=None, + client_info=client_info, + ), + self.set_instance_template: gapic_v1.method.wrap_method( + self.set_instance_template, + default_timeout=None, + client_info=client_info, + ), + self.set_target_pools: gapic_v1.method.wrap_method( + self.set_target_pools, + default_timeout=None, + client_info=client_info, + ), + self.update_per_instance_configs: gapic_v1.method.wrap_method( + self.update_per_instance_configs, + default_timeout=None, + client_info=client_info, + ), + } + + def close(self): + """Closes resources associated with the transport. + + .. warning:: + Only call this method if the transport is NOT shared + with other clients - this may cause errors in other clients! + """ + raise NotImplementedError() + + @property + def abandon_instances(self) -> Callable[ + [compute.AbandonInstancesRegionInstanceGroupManagerRequest], + Union[ + compute.Operation, + Awaitable[compute.Operation] + ]]: + raise NotImplementedError() + + @property + def apply_updates_to_instances(self) -> Callable[ + [compute.ApplyUpdatesToInstancesRegionInstanceGroupManagerRequest], + Union[ + compute.Operation, + Awaitable[compute.Operation] + ]]: + raise NotImplementedError() + + @property + def create_instances(self) -> Callable[ + [compute.CreateInstancesRegionInstanceGroupManagerRequest], + Union[ + compute.Operation, + Awaitable[compute.Operation] + ]]: + raise NotImplementedError() + + @property + def delete(self) -> Callable[ + [compute.DeleteRegionInstanceGroupManagerRequest], + Union[ + compute.Operation, + Awaitable[compute.Operation] + ]]: + raise NotImplementedError() + + @property + def delete_instances(self) -> Callable[ + [compute.DeleteInstancesRegionInstanceGroupManagerRequest], + Union[ + compute.Operation, + Awaitable[compute.Operation] + ]]: + raise NotImplementedError() + + @property + def delete_per_instance_configs(self) -> Callable[ + [compute.DeletePerInstanceConfigsRegionInstanceGroupManagerRequest], + Union[ + compute.Operation, + Awaitable[compute.Operation] + ]]: + raise NotImplementedError() + + @property + def get(self) -> Callable[ + [compute.GetRegionInstanceGroupManagerRequest], + Union[ + compute.InstanceGroupManager, + Awaitable[compute.InstanceGroupManager] + ]]: + raise NotImplementedError() + + @property + def insert(self) -> Callable[ + [compute.InsertRegionInstanceGroupManagerRequest], + Union[ + compute.Operation, + Awaitable[compute.Operation] + ]]: + raise NotImplementedError() + + @property + def list(self) -> Callable[ + [compute.ListRegionInstanceGroupManagersRequest], + Union[ + compute.RegionInstanceGroupManagerList, + Awaitable[compute.RegionInstanceGroupManagerList] + ]]: + raise NotImplementedError() + + @property + def list_errors(self) -> Callable[ + [compute.ListErrorsRegionInstanceGroupManagersRequest], + Union[ + compute.RegionInstanceGroupManagersListErrorsResponse, + Awaitable[compute.RegionInstanceGroupManagersListErrorsResponse] + ]]: + raise NotImplementedError() + + @property + def list_managed_instances(self) -> Callable[ + [compute.ListManagedInstancesRegionInstanceGroupManagersRequest], + Union[ + compute.RegionInstanceGroupManagersListInstancesResponse, + Awaitable[compute.RegionInstanceGroupManagersListInstancesResponse] + ]]: + raise NotImplementedError() + + @property + def list_per_instance_configs(self) -> Callable[ + [compute.ListPerInstanceConfigsRegionInstanceGroupManagersRequest], + Union[ + compute.RegionInstanceGroupManagersListInstanceConfigsResp, + Awaitable[compute.RegionInstanceGroupManagersListInstanceConfigsResp] + ]]: + raise NotImplementedError() + + @property + def patch(self) -> Callable[ + [compute.PatchRegionInstanceGroupManagerRequest], + Union[ + compute.Operation, + Awaitable[compute.Operation] + ]]: + raise NotImplementedError() + + @property + def patch_per_instance_configs(self) -> Callable[ + [compute.PatchPerInstanceConfigsRegionInstanceGroupManagerRequest], + Union[ + compute.Operation, + Awaitable[compute.Operation] + ]]: + raise NotImplementedError() + + @property + def recreate_instances(self) -> Callable[ + [compute.RecreateInstancesRegionInstanceGroupManagerRequest], + Union[ + compute.Operation, + Awaitable[compute.Operation] + ]]: + raise NotImplementedError() + + @property + def resize(self) -> Callable[ + [compute.ResizeRegionInstanceGroupManagerRequest], + Union[ + compute.Operation, + Awaitable[compute.Operation] + ]]: + raise NotImplementedError() + + @property + def set_instance_template(self) -> Callable[ + [compute.SetInstanceTemplateRegionInstanceGroupManagerRequest], + Union[ + compute.Operation, + Awaitable[compute.Operation] + ]]: + raise NotImplementedError() + + @property + def set_target_pools(self) -> Callable[ + [compute.SetTargetPoolsRegionInstanceGroupManagerRequest], + Union[ + compute.Operation, + Awaitable[compute.Operation] + ]]: + raise NotImplementedError() + + @property + def update_per_instance_configs(self) -> Callable[ + [compute.UpdatePerInstanceConfigsRegionInstanceGroupManagerRequest], + Union[ + compute.Operation, + Awaitable[compute.Operation] + ]]: + raise NotImplementedError() + + @property + def kind(self) -> str: + raise NotImplementedError() + + @property + def _region_operations_client(self) -> region_operations.RegionOperationsClient: + ex_op_service = self._extended_operations_services.get("region_operations") + if not ex_op_service: + ex_op_service = region_operations.RegionOperationsClient( + credentials=self._credentials, + transport=self.kind, + ) + self._extended_operations_services["region_operations"] = ex_op_service + + return ex_op_service + + +__all__ = ( + 'RegionInstanceGroupManagersTransport', +) diff --git a/owl-bot-staging/v1/google/cloud/compute_v1/services/region_instance_group_managers/transports/rest.py b/owl-bot-staging/v1/google/cloud/compute_v1/services/region_instance_group_managers/transports/rest.py new file mode 100644 index 000000000..bc1db0145 --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/compute_v1/services/region_instance_group_managers/transports/rest.py @@ -0,0 +1,2608 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +from google.auth.transport.requests import AuthorizedSession # type: ignore +import json # type: ignore +import grpc # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.api_core import exceptions as core_exceptions +from google.api_core import retry as retries +from google.api_core import rest_helpers +from google.api_core import rest_streaming +from google.api_core import path_template +from google.api_core import gapic_v1 + +from google.protobuf import json_format +from requests import __version__ as requests_version +import dataclasses +import re +from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union +import warnings + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + + +from google.cloud.compute_v1.types import compute + +from .base import RegionInstanceGroupManagersTransport, DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, + grpc_version=None, + rest_version=requests_version, +) + + +class RegionInstanceGroupManagersRestInterceptor: + """Interceptor for RegionInstanceGroupManagers. + + Interceptors are used to manipulate requests, request metadata, and responses + in arbitrary ways. + Example use cases include: + * Logging + * Verifying requests according to service or custom semantics + * Stripping extraneous information from responses + + These use cases and more can be enabled by injecting an + instance of a custom subclass when constructing the RegionInstanceGroupManagersRestTransport. + + .. code-block:: python + class MyCustomRegionInstanceGroupManagersInterceptor(RegionInstanceGroupManagersRestInterceptor): + def pre_abandon_instances(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_abandon_instances(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_apply_updates_to_instances(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_apply_updates_to_instances(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_create_instances(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_create_instances(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_delete(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_delete(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_delete_instances(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_delete_instances(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_delete_per_instance_configs(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_delete_per_instance_configs(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_get(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_insert(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_insert(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list_errors(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_errors(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list_managed_instances(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_managed_instances(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list_per_instance_configs(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_per_instance_configs(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_patch(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_patch(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_patch_per_instance_configs(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_patch_per_instance_configs(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_recreate_instances(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_recreate_instances(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_resize(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_resize(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_set_instance_template(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_set_instance_template(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_set_target_pools(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_set_target_pools(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_update_per_instance_configs(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_update_per_instance_configs(self, response): + logging.log(f"Received response: {response}") + return response + + transport = RegionInstanceGroupManagersRestTransport(interceptor=MyCustomRegionInstanceGroupManagersInterceptor()) + client = RegionInstanceGroupManagersClient(transport=transport) + + + """ + def pre_abandon_instances(self, request: compute.AbandonInstancesRegionInstanceGroupManagerRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.AbandonInstancesRegionInstanceGroupManagerRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for abandon_instances + + Override in a subclass to manipulate the request or metadata + before they are sent to the RegionInstanceGroupManagers server. + """ + return request, metadata + + def post_abandon_instances(self, response: compute.Operation) -> compute.Operation: + """Post-rpc interceptor for abandon_instances + + Override in a subclass to manipulate the response + after it is returned by the RegionInstanceGroupManagers server but before + it is returned to user code. + """ + return response + def pre_apply_updates_to_instances(self, request: compute.ApplyUpdatesToInstancesRegionInstanceGroupManagerRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.ApplyUpdatesToInstancesRegionInstanceGroupManagerRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for apply_updates_to_instances + + Override in a subclass to manipulate the request or metadata + before they are sent to the RegionInstanceGroupManagers server. + """ + return request, metadata + + def post_apply_updates_to_instances(self, response: compute.Operation) -> compute.Operation: + """Post-rpc interceptor for apply_updates_to_instances + + Override in a subclass to manipulate the response + after it is returned by the RegionInstanceGroupManagers server but before + it is returned to user code. + """ + return response + def pre_create_instances(self, request: compute.CreateInstancesRegionInstanceGroupManagerRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.CreateInstancesRegionInstanceGroupManagerRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for create_instances + + Override in a subclass to manipulate the request or metadata + before they are sent to the RegionInstanceGroupManagers server. + """ + return request, metadata + + def post_create_instances(self, response: compute.Operation) -> compute.Operation: + """Post-rpc interceptor for create_instances + + Override in a subclass to manipulate the response + after it is returned by the RegionInstanceGroupManagers server but before + it is returned to user code. + """ + return response + def pre_delete(self, request: compute.DeleteRegionInstanceGroupManagerRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.DeleteRegionInstanceGroupManagerRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for delete + + Override in a subclass to manipulate the request or metadata + before they are sent to the RegionInstanceGroupManagers server. + """ + return request, metadata + + def post_delete(self, response: compute.Operation) -> compute.Operation: + """Post-rpc interceptor for delete + + Override in a subclass to manipulate the response + after it is returned by the RegionInstanceGroupManagers server but before + it is returned to user code. + """ + return response + def pre_delete_instances(self, request: compute.DeleteInstancesRegionInstanceGroupManagerRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.DeleteInstancesRegionInstanceGroupManagerRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for delete_instances + + Override in a subclass to manipulate the request or metadata + before they are sent to the RegionInstanceGroupManagers server. + """ + return request, metadata + + def post_delete_instances(self, response: compute.Operation) -> compute.Operation: + """Post-rpc interceptor for delete_instances + + Override in a subclass to manipulate the response + after it is returned by the RegionInstanceGroupManagers server but before + it is returned to user code. + """ + return response + def pre_delete_per_instance_configs(self, request: compute.DeletePerInstanceConfigsRegionInstanceGroupManagerRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.DeletePerInstanceConfigsRegionInstanceGroupManagerRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for delete_per_instance_configs + + Override in a subclass to manipulate the request or metadata + before they are sent to the RegionInstanceGroupManagers server. + """ + return request, metadata + + def post_delete_per_instance_configs(self, response: compute.Operation) -> compute.Operation: + """Post-rpc interceptor for delete_per_instance_configs + + Override in a subclass to manipulate the response + after it is returned by the RegionInstanceGroupManagers server but before + it is returned to user code. + """ + return response + def pre_get(self, request: compute.GetRegionInstanceGroupManagerRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.GetRegionInstanceGroupManagerRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get + + Override in a subclass to manipulate the request or metadata + before they are sent to the RegionInstanceGroupManagers server. + """ + return request, metadata + + def post_get(self, response: compute.InstanceGroupManager) -> compute.InstanceGroupManager: + """Post-rpc interceptor for get + + Override in a subclass to manipulate the response + after it is returned by the RegionInstanceGroupManagers server but before + it is returned to user code. + """ + return response + def pre_insert(self, request: compute.InsertRegionInstanceGroupManagerRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.InsertRegionInstanceGroupManagerRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for insert + + Override in a subclass to manipulate the request or metadata + before they are sent to the RegionInstanceGroupManagers server. + """ + return request, metadata + + def post_insert(self, response: compute.Operation) -> compute.Operation: + """Post-rpc interceptor for insert + + Override in a subclass to manipulate the response + after it is returned by the RegionInstanceGroupManagers server but before + it is returned to user code. + """ + return response + def pre_list(self, request: compute.ListRegionInstanceGroupManagersRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.ListRegionInstanceGroupManagersRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list + + Override in a subclass to manipulate the request or metadata + before they are sent to the RegionInstanceGroupManagers server. + """ + return request, metadata + + def post_list(self, response: compute.RegionInstanceGroupManagerList) -> compute.RegionInstanceGroupManagerList: + """Post-rpc interceptor for list + + Override in a subclass to manipulate the response + after it is returned by the RegionInstanceGroupManagers server but before + it is returned to user code. + """ + return response + def pre_list_errors(self, request: compute.ListErrorsRegionInstanceGroupManagersRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.ListErrorsRegionInstanceGroupManagersRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list_errors + + Override in a subclass to manipulate the request or metadata + before they are sent to the RegionInstanceGroupManagers server. + """ + return request, metadata + + def post_list_errors(self, response: compute.RegionInstanceGroupManagersListErrorsResponse) -> compute.RegionInstanceGroupManagersListErrorsResponse: + """Post-rpc interceptor for list_errors + + Override in a subclass to manipulate the response + after it is returned by the RegionInstanceGroupManagers server but before + it is returned to user code. + """ + return response + def pre_list_managed_instances(self, request: compute.ListManagedInstancesRegionInstanceGroupManagersRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.ListManagedInstancesRegionInstanceGroupManagersRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list_managed_instances + + Override in a subclass to manipulate the request or metadata + before they are sent to the RegionInstanceGroupManagers server. + """ + return request, metadata + + def post_list_managed_instances(self, response: compute.RegionInstanceGroupManagersListInstancesResponse) -> compute.RegionInstanceGroupManagersListInstancesResponse: + """Post-rpc interceptor for list_managed_instances + + Override in a subclass to manipulate the response + after it is returned by the RegionInstanceGroupManagers server but before + it is returned to user code. + """ + return response + def pre_list_per_instance_configs(self, request: compute.ListPerInstanceConfigsRegionInstanceGroupManagersRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.ListPerInstanceConfigsRegionInstanceGroupManagersRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list_per_instance_configs + + Override in a subclass to manipulate the request or metadata + before they are sent to the RegionInstanceGroupManagers server. + """ + return request, metadata + + def post_list_per_instance_configs(self, response: compute.RegionInstanceGroupManagersListInstanceConfigsResp) -> compute.RegionInstanceGroupManagersListInstanceConfigsResp: + """Post-rpc interceptor for list_per_instance_configs + + Override in a subclass to manipulate the response + after it is returned by the RegionInstanceGroupManagers server but before + it is returned to user code. + """ + return response + def pre_patch(self, request: compute.PatchRegionInstanceGroupManagerRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.PatchRegionInstanceGroupManagerRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for patch + + Override in a subclass to manipulate the request or metadata + before they are sent to the RegionInstanceGroupManagers server. + """ + return request, metadata + + def post_patch(self, response: compute.Operation) -> compute.Operation: + """Post-rpc interceptor for patch + + Override in a subclass to manipulate the response + after it is returned by the RegionInstanceGroupManagers server but before + it is returned to user code. + """ + return response + def pre_patch_per_instance_configs(self, request: compute.PatchPerInstanceConfigsRegionInstanceGroupManagerRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.PatchPerInstanceConfigsRegionInstanceGroupManagerRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for patch_per_instance_configs + + Override in a subclass to manipulate the request or metadata + before they are sent to the RegionInstanceGroupManagers server. + """ + return request, metadata + + def post_patch_per_instance_configs(self, response: compute.Operation) -> compute.Operation: + """Post-rpc interceptor for patch_per_instance_configs + + Override in a subclass to manipulate the response + after it is returned by the RegionInstanceGroupManagers server but before + it is returned to user code. + """ + return response + def pre_recreate_instances(self, request: compute.RecreateInstancesRegionInstanceGroupManagerRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.RecreateInstancesRegionInstanceGroupManagerRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for recreate_instances + + Override in a subclass to manipulate the request or metadata + before they are sent to the RegionInstanceGroupManagers server. + """ + return request, metadata + + def post_recreate_instances(self, response: compute.Operation) -> compute.Operation: + """Post-rpc interceptor for recreate_instances + + Override in a subclass to manipulate the response + after it is returned by the RegionInstanceGroupManagers server but before + it is returned to user code. + """ + return response + def pre_resize(self, request: compute.ResizeRegionInstanceGroupManagerRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.ResizeRegionInstanceGroupManagerRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for resize + + Override in a subclass to manipulate the request or metadata + before they are sent to the RegionInstanceGroupManagers server. + """ + return request, metadata + + def post_resize(self, response: compute.Operation) -> compute.Operation: + """Post-rpc interceptor for resize + + Override in a subclass to manipulate the response + after it is returned by the RegionInstanceGroupManagers server but before + it is returned to user code. + """ + return response + def pre_set_instance_template(self, request: compute.SetInstanceTemplateRegionInstanceGroupManagerRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.SetInstanceTemplateRegionInstanceGroupManagerRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for set_instance_template + + Override in a subclass to manipulate the request or metadata + before they are sent to the RegionInstanceGroupManagers server. + """ + return request, metadata + + def post_set_instance_template(self, response: compute.Operation) -> compute.Operation: + """Post-rpc interceptor for set_instance_template + + Override in a subclass to manipulate the response + after it is returned by the RegionInstanceGroupManagers server but before + it is returned to user code. + """ + return response + def pre_set_target_pools(self, request: compute.SetTargetPoolsRegionInstanceGroupManagerRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.SetTargetPoolsRegionInstanceGroupManagerRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for set_target_pools + + Override in a subclass to manipulate the request or metadata + before they are sent to the RegionInstanceGroupManagers server. + """ + return request, metadata + + def post_set_target_pools(self, response: compute.Operation) -> compute.Operation: + """Post-rpc interceptor for set_target_pools + + Override in a subclass to manipulate the response + after it is returned by the RegionInstanceGroupManagers server but before + it is returned to user code. + """ + return response + def pre_update_per_instance_configs(self, request: compute.UpdatePerInstanceConfigsRegionInstanceGroupManagerRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.UpdatePerInstanceConfigsRegionInstanceGroupManagerRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for update_per_instance_configs + + Override in a subclass to manipulate the request or metadata + before they are sent to the RegionInstanceGroupManagers server. + """ + return request, metadata + + def post_update_per_instance_configs(self, response: compute.Operation) -> compute.Operation: + """Post-rpc interceptor for update_per_instance_configs + + Override in a subclass to manipulate the response + after it is returned by the RegionInstanceGroupManagers server but before + it is returned to user code. + """ + return response + + +@dataclasses.dataclass +class RegionInstanceGroupManagersRestStub: + _session: AuthorizedSession + _host: str + _interceptor: RegionInstanceGroupManagersRestInterceptor + + +class RegionInstanceGroupManagersRestTransport(RegionInstanceGroupManagersTransport): + """REST backend transport for RegionInstanceGroupManagers. + + The RegionInstanceGroupManagers API. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends JSON representations of protocol buffers over HTTP/1.1 + + NOTE: This REST transport functionality is currently in a beta + state (preview). We welcome your feedback via an issue in this + library's source repository. Thank you! + """ + + def __init__(self, *, + host: str = 'compute.googleapis.com', + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + client_cert_source_for_mtls: Optional[Callable[[ + ], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + url_scheme: str = 'https', + interceptor: Optional[RegionInstanceGroupManagersRestInterceptor] = None, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + NOTE: This REST transport functionality is currently in a beta + state (preview). We welcome your feedback via a GitHub issue in + this library's repository. Thank you! + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + client_cert_source_for_mtls (Callable[[], Tuple[bytes, bytes]]): Client + certificate to configure mutual TLS HTTP channel. It is ignored + if ``channel`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you are developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + url_scheme: the protocol scheme for the API endpoint. Normally + "https", but for testing or local servers, + "http" can be specified. + """ + # Run the base constructor + # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. + # TODO: When custom host (api_endpoint) is set, `scopes` must *also* be set on the + # credentials object + maybe_url_match = re.match("^(?Phttp(?:s)?://)?(?P.*)$", host) + if maybe_url_match is None: + raise ValueError(f"Unexpected hostname structure: {host}") # pragma: NO COVER + + url_match_items = maybe_url_match.groupdict() + + host = f"{url_scheme}://{host}" if not url_match_items["scheme"] else host + + super().__init__( + host=host, + credentials=credentials, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience + ) + self._session = AuthorizedSession( + self._credentials, default_host=self.DEFAULT_HOST) + if client_cert_source_for_mtls: + self._session.configure_mtls_channel(client_cert_source_for_mtls) + self._interceptor = interceptor or RegionInstanceGroupManagersRestInterceptor() + self._prep_wrapped_messages(client_info) + + class _AbandonInstances(RegionInstanceGroupManagersRestStub): + def __hash__(self): + return hash("AbandonInstances") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.AbandonInstancesRegionInstanceGroupManagerRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.Operation: + r"""Call the abandon instances method over HTTP. + + Args: + request (~.compute.AbandonInstancesRegionInstanceGroupManagerRequest): + The request object. A request message for + RegionInstanceGroupManagers.AbandonInstances. + See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.Operation: + Represents an Operation resource. Google Compute Engine + has three Operation resources: \* + `Global `__ + \* + `Regional `__ + \* + `Zonal `__ + You can use an operation resource to manage asynchronous + API requests. For more information, read Handling API + responses. Operations can be global, regional or zonal. + - For global operations, use the ``globalOperations`` + resource. - For regional operations, use the + ``regionOperations`` resource. - For zonal operations, + use the ``zonalOperations`` resource. For more + information, read Global, Regional, and Zonal Resources. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/compute/v1/projects/{project}/regions/{region}/instanceGroupManagers/{instance_group_manager}/abandonInstances', + 'body': 'region_instance_group_managers_abandon_instances_request_resource', + }, + ] + request, metadata = self._interceptor.pre_abandon_instances(request, metadata) + pb_request = compute.AbandonInstancesRegionInstanceGroupManagerRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + including_default_value_fields=False, + use_integers_for_enums=False + ) + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.Operation() + pb_resp = compute.Operation.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_abandon_instances(resp) + return resp + + class _ApplyUpdatesToInstances(RegionInstanceGroupManagersRestStub): + def __hash__(self): + return hash("ApplyUpdatesToInstances") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.ApplyUpdatesToInstancesRegionInstanceGroupManagerRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.Operation: + r"""Call the apply updates to + instances method over HTTP. + + Args: + request (~.compute.ApplyUpdatesToInstancesRegionInstanceGroupManagerRequest): + The request object. A request message for + RegionInstanceGroupManagers.ApplyUpdatesToInstances. + See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.Operation: + Represents an Operation resource. Google Compute Engine + has three Operation resources: \* + `Global `__ + \* + `Regional `__ + \* + `Zonal `__ + You can use an operation resource to manage asynchronous + API requests. For more information, read Handling API + responses. Operations can be global, regional or zonal. + - For global operations, use the ``globalOperations`` + resource. - For regional operations, use the + ``regionOperations`` resource. - For zonal operations, + use the ``zonalOperations`` resource. For more + information, read Global, Regional, and Zonal Resources. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/compute/v1/projects/{project}/regions/{region}/instanceGroupManagers/{instance_group_manager}/applyUpdatesToInstances', + 'body': 'region_instance_group_managers_apply_updates_request_resource', + }, + ] + request, metadata = self._interceptor.pre_apply_updates_to_instances(request, metadata) + pb_request = compute.ApplyUpdatesToInstancesRegionInstanceGroupManagerRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + including_default_value_fields=False, + use_integers_for_enums=False + ) + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.Operation() + pb_resp = compute.Operation.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_apply_updates_to_instances(resp) + return resp + + class _CreateInstances(RegionInstanceGroupManagersRestStub): + def __hash__(self): + return hash("CreateInstances") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.CreateInstancesRegionInstanceGroupManagerRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.Operation: + r"""Call the create instances method over HTTP. + + Args: + request (~.compute.CreateInstancesRegionInstanceGroupManagerRequest): + The request object. A request message for + RegionInstanceGroupManagers.CreateInstances. + See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.Operation: + Represents an Operation resource. Google Compute Engine + has three Operation resources: \* + `Global `__ + \* + `Regional `__ + \* + `Zonal `__ + You can use an operation resource to manage asynchronous + API requests. For more information, read Handling API + responses. Operations can be global, regional or zonal. + - For global operations, use the ``globalOperations`` + resource. - For regional operations, use the + ``regionOperations`` resource. - For zonal operations, + use the ``zonalOperations`` resource. For more + information, read Global, Regional, and Zonal Resources. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/compute/v1/projects/{project}/regions/{region}/instanceGroupManagers/{instance_group_manager}/createInstances', + 'body': 'region_instance_group_managers_create_instances_request_resource', + }, + ] + request, metadata = self._interceptor.pre_create_instances(request, metadata) + pb_request = compute.CreateInstancesRegionInstanceGroupManagerRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + including_default_value_fields=False, + use_integers_for_enums=False + ) + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.Operation() + pb_resp = compute.Operation.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_create_instances(resp) + return resp + + class _Delete(RegionInstanceGroupManagersRestStub): + def __hash__(self): + return hash("Delete") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.DeleteRegionInstanceGroupManagerRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.Operation: + r"""Call the delete method over HTTP. + + Args: + request (~.compute.DeleteRegionInstanceGroupManagerRequest): + The request object. A request message for + RegionInstanceGroupManagers.Delete. See + the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.Operation: + Represents an Operation resource. Google Compute Engine + has three Operation resources: \* + `Global `__ + \* + `Regional `__ + \* + `Zonal `__ + You can use an operation resource to manage asynchronous + API requests. For more information, read Handling API + responses. Operations can be global, regional or zonal. + - For global operations, use the ``globalOperations`` + resource. - For regional operations, use the + ``regionOperations`` resource. - For zonal operations, + use the ``zonalOperations`` resource. For more + information, read Global, Regional, and Zonal Resources. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'delete', + 'uri': '/compute/v1/projects/{project}/regions/{region}/instanceGroupManagers/{instance_group_manager}', + }, + ] + request, metadata = self._interceptor.pre_delete(request, metadata) + pb_request = compute.DeleteRegionInstanceGroupManagerRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.Operation() + pb_resp = compute.Operation.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_delete(resp) + return resp + + class _DeleteInstances(RegionInstanceGroupManagersRestStub): + def __hash__(self): + return hash("DeleteInstances") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.DeleteInstancesRegionInstanceGroupManagerRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.Operation: + r"""Call the delete instances method over HTTP. + + Args: + request (~.compute.DeleteInstancesRegionInstanceGroupManagerRequest): + The request object. A request message for + RegionInstanceGroupManagers.DeleteInstances. + See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.Operation: + Represents an Operation resource. Google Compute Engine + has three Operation resources: \* + `Global `__ + \* + `Regional `__ + \* + `Zonal `__ + You can use an operation resource to manage asynchronous + API requests. For more information, read Handling API + responses. Operations can be global, regional or zonal. + - For global operations, use the ``globalOperations`` + resource. - For regional operations, use the + ``regionOperations`` resource. - For zonal operations, + use the ``zonalOperations`` resource. For more + information, read Global, Regional, and Zonal Resources. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/compute/v1/projects/{project}/regions/{region}/instanceGroupManagers/{instance_group_manager}/deleteInstances', + 'body': 'region_instance_group_managers_delete_instances_request_resource', + }, + ] + request, metadata = self._interceptor.pre_delete_instances(request, metadata) + pb_request = compute.DeleteInstancesRegionInstanceGroupManagerRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + including_default_value_fields=False, + use_integers_for_enums=False + ) + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.Operation() + pb_resp = compute.Operation.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_delete_instances(resp) + return resp + + class _DeletePerInstanceConfigs(RegionInstanceGroupManagersRestStub): + def __hash__(self): + return hash("DeletePerInstanceConfigs") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.DeletePerInstanceConfigsRegionInstanceGroupManagerRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.Operation: + r"""Call the delete per instance + configs method over HTTP. + + Args: + request (~.compute.DeletePerInstanceConfigsRegionInstanceGroupManagerRequest): + The request object. A request message for + RegionInstanceGroupManagers.DeletePerInstanceConfigs. + See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.Operation: + Represents an Operation resource. Google Compute Engine + has three Operation resources: \* + `Global `__ + \* + `Regional `__ + \* + `Zonal `__ + You can use an operation resource to manage asynchronous + API requests. For more information, read Handling API + responses. Operations can be global, regional or zonal. + - For global operations, use the ``globalOperations`` + resource. - For regional operations, use the + ``regionOperations`` resource. - For zonal operations, + use the ``zonalOperations`` resource. For more + information, read Global, Regional, and Zonal Resources. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/compute/v1/projects/{project}/regions/{region}/instanceGroupManagers/{instance_group_manager}/deletePerInstanceConfigs', + 'body': 'region_instance_group_manager_delete_instance_config_req_resource', + }, + ] + request, metadata = self._interceptor.pre_delete_per_instance_configs(request, metadata) + pb_request = compute.DeletePerInstanceConfigsRegionInstanceGroupManagerRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + including_default_value_fields=False, + use_integers_for_enums=False + ) + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.Operation() + pb_resp = compute.Operation.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_delete_per_instance_configs(resp) + return resp + + class _Get(RegionInstanceGroupManagersRestStub): + def __hash__(self): + return hash("Get") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.GetRegionInstanceGroupManagerRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.InstanceGroupManager: + r"""Call the get method over HTTP. + + Args: + request (~.compute.GetRegionInstanceGroupManagerRequest): + The request object. A request message for + RegionInstanceGroupManagers.Get. See the + method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.InstanceGroupManager: + Represents a Managed Instance Group + resource. An instance group is a + collection of VM instances that you can + manage as a single entity. For more + information, read Instance groups. For + zonal Managed Instance Group, use the + instanceGroupManagers resource. For + regional Managed Instance Group, use the + regionInstanceGroupManagers resource. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/compute/v1/projects/{project}/regions/{region}/instanceGroupManagers/{instance_group_manager}', + }, + ] + request, metadata = self._interceptor.pre_get(request, metadata) + pb_request = compute.GetRegionInstanceGroupManagerRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.InstanceGroupManager() + pb_resp = compute.InstanceGroupManager.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get(resp) + return resp + + class _Insert(RegionInstanceGroupManagersRestStub): + def __hash__(self): + return hash("Insert") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.InsertRegionInstanceGroupManagerRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.Operation: + r"""Call the insert method over HTTP. + + Args: + request (~.compute.InsertRegionInstanceGroupManagerRequest): + The request object. A request message for + RegionInstanceGroupManagers.Insert. See + the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.Operation: + Represents an Operation resource. Google Compute Engine + has three Operation resources: \* + `Global `__ + \* + `Regional `__ + \* + `Zonal `__ + You can use an operation resource to manage asynchronous + API requests. For more information, read Handling API + responses. Operations can be global, regional or zonal. + - For global operations, use the ``globalOperations`` + resource. - For regional operations, use the + ``regionOperations`` resource. - For zonal operations, + use the ``zonalOperations`` resource. For more + information, read Global, Regional, and Zonal Resources. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/compute/v1/projects/{project}/regions/{region}/instanceGroupManagers', + 'body': 'instance_group_manager_resource', + }, + ] + request, metadata = self._interceptor.pre_insert(request, metadata) + pb_request = compute.InsertRegionInstanceGroupManagerRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + including_default_value_fields=False, + use_integers_for_enums=False + ) + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.Operation() + pb_resp = compute.Operation.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_insert(resp) + return resp + + class _List(RegionInstanceGroupManagersRestStub): + def __hash__(self): + return hash("List") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.ListRegionInstanceGroupManagersRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.RegionInstanceGroupManagerList: + r"""Call the list method over HTTP. + + Args: + request (~.compute.ListRegionInstanceGroupManagersRequest): + The request object. A request message for + RegionInstanceGroupManagers.List. See + the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.RegionInstanceGroupManagerList: + Contains a list of managed instance + groups. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/compute/v1/projects/{project}/regions/{region}/instanceGroupManagers', + }, + ] + request, metadata = self._interceptor.pre_list(request, metadata) + pb_request = compute.ListRegionInstanceGroupManagersRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.RegionInstanceGroupManagerList() + pb_resp = compute.RegionInstanceGroupManagerList.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_list(resp) + return resp + + class _ListErrors(RegionInstanceGroupManagersRestStub): + def __hash__(self): + return hash("ListErrors") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.ListErrorsRegionInstanceGroupManagersRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.RegionInstanceGroupManagersListErrorsResponse: + r"""Call the list errors method over HTTP. + + Args: + request (~.compute.ListErrorsRegionInstanceGroupManagersRequest): + The request object. A request message for + RegionInstanceGroupManagers.ListErrors. + See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.RegionInstanceGroupManagersListErrorsResponse: + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/compute/v1/projects/{project}/regions/{region}/instanceGroupManagers/{instance_group_manager}/listErrors', + }, + ] + request, metadata = self._interceptor.pre_list_errors(request, metadata) + pb_request = compute.ListErrorsRegionInstanceGroupManagersRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.RegionInstanceGroupManagersListErrorsResponse() + pb_resp = compute.RegionInstanceGroupManagersListErrorsResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_list_errors(resp) + return resp + + class _ListManagedInstances(RegionInstanceGroupManagersRestStub): + def __hash__(self): + return hash("ListManagedInstances") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.ListManagedInstancesRegionInstanceGroupManagersRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.RegionInstanceGroupManagersListInstancesResponse: + r"""Call the list managed instances method over HTTP. + + Args: + request (~.compute.ListManagedInstancesRegionInstanceGroupManagersRequest): + The request object. A request message for + RegionInstanceGroupManagers.ListManagedInstances. + See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.RegionInstanceGroupManagersListInstancesResponse: + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/compute/v1/projects/{project}/regions/{region}/instanceGroupManagers/{instance_group_manager}/listManagedInstances', + }, + ] + request, metadata = self._interceptor.pre_list_managed_instances(request, metadata) + pb_request = compute.ListManagedInstancesRegionInstanceGroupManagersRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.RegionInstanceGroupManagersListInstancesResponse() + pb_resp = compute.RegionInstanceGroupManagersListInstancesResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_list_managed_instances(resp) + return resp + + class _ListPerInstanceConfigs(RegionInstanceGroupManagersRestStub): + def __hash__(self): + return hash("ListPerInstanceConfigs") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.ListPerInstanceConfigsRegionInstanceGroupManagersRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.RegionInstanceGroupManagersListInstanceConfigsResp: + r"""Call the list per instance configs method over HTTP. + + Args: + request (~.compute.ListPerInstanceConfigsRegionInstanceGroupManagersRequest): + The request object. A request message for + RegionInstanceGroupManagers.ListPerInstanceConfigs. + See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.RegionInstanceGroupManagersListInstanceConfigsResp: + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/compute/v1/projects/{project}/regions/{region}/instanceGroupManagers/{instance_group_manager}/listPerInstanceConfigs', + }, + ] + request, metadata = self._interceptor.pre_list_per_instance_configs(request, metadata) + pb_request = compute.ListPerInstanceConfigsRegionInstanceGroupManagersRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.RegionInstanceGroupManagersListInstanceConfigsResp() + pb_resp = compute.RegionInstanceGroupManagersListInstanceConfigsResp.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_list_per_instance_configs(resp) + return resp + + class _Patch(RegionInstanceGroupManagersRestStub): + def __hash__(self): + return hash("Patch") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.PatchRegionInstanceGroupManagerRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.Operation: + r"""Call the patch method over HTTP. + + Args: + request (~.compute.PatchRegionInstanceGroupManagerRequest): + The request object. A request message for + RegionInstanceGroupManagers.Patch. See + the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.Operation: + Represents an Operation resource. Google Compute Engine + has three Operation resources: \* + `Global `__ + \* + `Regional `__ + \* + `Zonal `__ + You can use an operation resource to manage asynchronous + API requests. For more information, read Handling API + responses. Operations can be global, regional or zonal. + - For global operations, use the ``globalOperations`` + resource. - For regional operations, use the + ``regionOperations`` resource. - For zonal operations, + use the ``zonalOperations`` resource. For more + information, read Global, Regional, and Zonal Resources. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'patch', + 'uri': '/compute/v1/projects/{project}/regions/{region}/instanceGroupManagers/{instance_group_manager}', + 'body': 'instance_group_manager_resource', + }, + ] + request, metadata = self._interceptor.pre_patch(request, metadata) + pb_request = compute.PatchRegionInstanceGroupManagerRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + including_default_value_fields=False, + use_integers_for_enums=False + ) + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.Operation() + pb_resp = compute.Operation.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_patch(resp) + return resp + + class _PatchPerInstanceConfigs(RegionInstanceGroupManagersRestStub): + def __hash__(self): + return hash("PatchPerInstanceConfigs") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.PatchPerInstanceConfigsRegionInstanceGroupManagerRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.Operation: + r"""Call the patch per instance + configs method over HTTP. + + Args: + request (~.compute.PatchPerInstanceConfigsRegionInstanceGroupManagerRequest): + The request object. A request message for + RegionInstanceGroupManagers.PatchPerInstanceConfigs. + See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.Operation: + Represents an Operation resource. Google Compute Engine + has three Operation resources: \* + `Global `__ + \* + `Regional `__ + \* + `Zonal `__ + You can use an operation resource to manage asynchronous + API requests. For more information, read Handling API + responses. Operations can be global, regional or zonal. + - For global operations, use the ``globalOperations`` + resource. - For regional operations, use the + ``regionOperations`` resource. - For zonal operations, + use the ``zonalOperations`` resource. For more + information, read Global, Regional, and Zonal Resources. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/compute/v1/projects/{project}/regions/{region}/instanceGroupManagers/{instance_group_manager}/patchPerInstanceConfigs', + 'body': 'region_instance_group_manager_patch_instance_config_req_resource', + }, + ] + request, metadata = self._interceptor.pre_patch_per_instance_configs(request, metadata) + pb_request = compute.PatchPerInstanceConfigsRegionInstanceGroupManagerRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + including_default_value_fields=False, + use_integers_for_enums=False + ) + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.Operation() + pb_resp = compute.Operation.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_patch_per_instance_configs(resp) + return resp + + class _RecreateInstances(RegionInstanceGroupManagersRestStub): + def __hash__(self): + return hash("RecreateInstances") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.RecreateInstancesRegionInstanceGroupManagerRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.Operation: + r"""Call the recreate instances method over HTTP. + + Args: + request (~.compute.RecreateInstancesRegionInstanceGroupManagerRequest): + The request object. A request message for + RegionInstanceGroupManagers.RecreateInstances. + See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.Operation: + Represents an Operation resource. Google Compute Engine + has three Operation resources: \* + `Global `__ + \* + `Regional `__ + \* + `Zonal `__ + You can use an operation resource to manage asynchronous + API requests. For more information, read Handling API + responses. Operations can be global, regional or zonal. + - For global operations, use the ``globalOperations`` + resource. - For regional operations, use the + ``regionOperations`` resource. - For zonal operations, + use the ``zonalOperations`` resource. For more + information, read Global, Regional, and Zonal Resources. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/compute/v1/projects/{project}/regions/{region}/instanceGroupManagers/{instance_group_manager}/recreateInstances', + 'body': 'region_instance_group_managers_recreate_request_resource', + }, + ] + request, metadata = self._interceptor.pre_recreate_instances(request, metadata) + pb_request = compute.RecreateInstancesRegionInstanceGroupManagerRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + including_default_value_fields=False, + use_integers_for_enums=False + ) + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.Operation() + pb_resp = compute.Operation.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_recreate_instances(resp) + return resp + + class _Resize(RegionInstanceGroupManagersRestStub): + def __hash__(self): + return hash("Resize") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + "size" : 0, } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.ResizeRegionInstanceGroupManagerRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.Operation: + r"""Call the resize method over HTTP. + + Args: + request (~.compute.ResizeRegionInstanceGroupManagerRequest): + The request object. A request message for + RegionInstanceGroupManagers.Resize. See + the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.Operation: + Represents an Operation resource. Google Compute Engine + has three Operation resources: \* + `Global `__ + \* + `Regional `__ + \* + `Zonal `__ + You can use an operation resource to manage asynchronous + API requests. For more information, read Handling API + responses. Operations can be global, regional or zonal. + - For global operations, use the ``globalOperations`` + resource. - For regional operations, use the + ``regionOperations`` resource. - For zonal operations, + use the ``zonalOperations`` resource. For more + information, read Global, Regional, and Zonal Resources. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/compute/v1/projects/{project}/regions/{region}/instanceGroupManagers/{instance_group_manager}/resize', + }, + ] + request, metadata = self._interceptor.pre_resize(request, metadata) + pb_request = compute.ResizeRegionInstanceGroupManagerRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.Operation() + pb_resp = compute.Operation.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_resize(resp) + return resp + + class _SetInstanceTemplate(RegionInstanceGroupManagersRestStub): + def __hash__(self): + return hash("SetInstanceTemplate") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.SetInstanceTemplateRegionInstanceGroupManagerRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.Operation: + r"""Call the set instance template method over HTTP. + + Args: + request (~.compute.SetInstanceTemplateRegionInstanceGroupManagerRequest): + The request object. A request message for + RegionInstanceGroupManagers.SetInstanceTemplate. + See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.Operation: + Represents an Operation resource. Google Compute Engine + has three Operation resources: \* + `Global `__ + \* + `Regional `__ + \* + `Zonal `__ + You can use an operation resource to manage asynchronous + API requests. For more information, read Handling API + responses. Operations can be global, regional or zonal. + - For global operations, use the ``globalOperations`` + resource. - For regional operations, use the + ``regionOperations`` resource. - For zonal operations, + use the ``zonalOperations`` resource. For more + information, read Global, Regional, and Zonal Resources. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/compute/v1/projects/{project}/regions/{region}/instanceGroupManagers/{instance_group_manager}/setInstanceTemplate', + 'body': 'region_instance_group_managers_set_template_request_resource', + }, + ] + request, metadata = self._interceptor.pre_set_instance_template(request, metadata) + pb_request = compute.SetInstanceTemplateRegionInstanceGroupManagerRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + including_default_value_fields=False, + use_integers_for_enums=False + ) + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.Operation() + pb_resp = compute.Operation.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_set_instance_template(resp) + return resp + + class _SetTargetPools(RegionInstanceGroupManagersRestStub): + def __hash__(self): + return hash("SetTargetPools") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.SetTargetPoolsRegionInstanceGroupManagerRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.Operation: + r"""Call the set target pools method over HTTP. + + Args: + request (~.compute.SetTargetPoolsRegionInstanceGroupManagerRequest): + The request object. A request message for + RegionInstanceGroupManagers.SetTargetPools. + See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.Operation: + Represents an Operation resource. Google Compute Engine + has three Operation resources: \* + `Global `__ + \* + `Regional `__ + \* + `Zonal `__ + You can use an operation resource to manage asynchronous + API requests. For more information, read Handling API + responses. Operations can be global, regional or zonal. + - For global operations, use the ``globalOperations`` + resource. - For regional operations, use the + ``regionOperations`` resource. - For zonal operations, + use the ``zonalOperations`` resource. For more + information, read Global, Regional, and Zonal Resources. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/compute/v1/projects/{project}/regions/{region}/instanceGroupManagers/{instance_group_manager}/setTargetPools', + 'body': 'region_instance_group_managers_set_target_pools_request_resource', + }, + ] + request, metadata = self._interceptor.pre_set_target_pools(request, metadata) + pb_request = compute.SetTargetPoolsRegionInstanceGroupManagerRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + including_default_value_fields=False, + use_integers_for_enums=False + ) + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.Operation() + pb_resp = compute.Operation.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_set_target_pools(resp) + return resp + + class _UpdatePerInstanceConfigs(RegionInstanceGroupManagersRestStub): + def __hash__(self): + return hash("UpdatePerInstanceConfigs") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.UpdatePerInstanceConfigsRegionInstanceGroupManagerRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.Operation: + r"""Call the update per instance + configs method over HTTP. + + Args: + request (~.compute.UpdatePerInstanceConfigsRegionInstanceGroupManagerRequest): + The request object. A request message for + RegionInstanceGroupManagers.UpdatePerInstanceConfigs. + See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.Operation: + Represents an Operation resource. Google Compute Engine + has three Operation resources: \* + `Global `__ + \* + `Regional `__ + \* + `Zonal `__ + You can use an operation resource to manage asynchronous + API requests. For more information, read Handling API + responses. Operations can be global, regional or zonal. + - For global operations, use the ``globalOperations`` + resource. - For regional operations, use the + ``regionOperations`` resource. - For zonal operations, + use the ``zonalOperations`` resource. For more + information, read Global, Regional, and Zonal Resources. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/compute/v1/projects/{project}/regions/{region}/instanceGroupManagers/{instance_group_manager}/updatePerInstanceConfigs', + 'body': 'region_instance_group_manager_update_instance_config_req_resource', + }, + ] + request, metadata = self._interceptor.pre_update_per_instance_configs(request, metadata) + pb_request = compute.UpdatePerInstanceConfigsRegionInstanceGroupManagerRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + including_default_value_fields=False, + use_integers_for_enums=False + ) + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.Operation() + pb_resp = compute.Operation.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_update_per_instance_configs(resp) + return resp + + @property + def abandon_instances(self) -> Callable[ + [compute.AbandonInstancesRegionInstanceGroupManagerRequest], + compute.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._AbandonInstances(self._session, self._host, self._interceptor) # type: ignore + + @property + def apply_updates_to_instances(self) -> Callable[ + [compute.ApplyUpdatesToInstancesRegionInstanceGroupManagerRequest], + compute.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ApplyUpdatesToInstances(self._session, self._host, self._interceptor) # type: ignore + + @property + def create_instances(self) -> Callable[ + [compute.CreateInstancesRegionInstanceGroupManagerRequest], + compute.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._CreateInstances(self._session, self._host, self._interceptor) # type: ignore + + @property + def delete(self) -> Callable[ + [compute.DeleteRegionInstanceGroupManagerRequest], + compute.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._Delete(self._session, self._host, self._interceptor) # type: ignore + + @property + def delete_instances(self) -> Callable[ + [compute.DeleteInstancesRegionInstanceGroupManagerRequest], + compute.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._DeleteInstances(self._session, self._host, self._interceptor) # type: ignore + + @property + def delete_per_instance_configs(self) -> Callable[ + [compute.DeletePerInstanceConfigsRegionInstanceGroupManagerRequest], + compute.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._DeletePerInstanceConfigs(self._session, self._host, self._interceptor) # type: ignore + + @property + def get(self) -> Callable[ + [compute.GetRegionInstanceGroupManagerRequest], + compute.InstanceGroupManager]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._Get(self._session, self._host, self._interceptor) # type: ignore + + @property + def insert(self) -> Callable[ + [compute.InsertRegionInstanceGroupManagerRequest], + compute.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._Insert(self._session, self._host, self._interceptor) # type: ignore + + @property + def list(self) -> Callable[ + [compute.ListRegionInstanceGroupManagersRequest], + compute.RegionInstanceGroupManagerList]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._List(self._session, self._host, self._interceptor) # type: ignore + + @property + def list_errors(self) -> Callable[ + [compute.ListErrorsRegionInstanceGroupManagersRequest], + compute.RegionInstanceGroupManagersListErrorsResponse]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListErrors(self._session, self._host, self._interceptor) # type: ignore + + @property + def list_managed_instances(self) -> Callable[ + [compute.ListManagedInstancesRegionInstanceGroupManagersRequest], + compute.RegionInstanceGroupManagersListInstancesResponse]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListManagedInstances(self._session, self._host, self._interceptor) # type: ignore + + @property + def list_per_instance_configs(self) -> Callable[ + [compute.ListPerInstanceConfigsRegionInstanceGroupManagersRequest], + compute.RegionInstanceGroupManagersListInstanceConfigsResp]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListPerInstanceConfigs(self._session, self._host, self._interceptor) # type: ignore + + @property + def patch(self) -> Callable[ + [compute.PatchRegionInstanceGroupManagerRequest], + compute.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._Patch(self._session, self._host, self._interceptor) # type: ignore + + @property + def patch_per_instance_configs(self) -> Callable[ + [compute.PatchPerInstanceConfigsRegionInstanceGroupManagerRequest], + compute.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._PatchPerInstanceConfigs(self._session, self._host, self._interceptor) # type: ignore + + @property + def recreate_instances(self) -> Callable[ + [compute.RecreateInstancesRegionInstanceGroupManagerRequest], + compute.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._RecreateInstances(self._session, self._host, self._interceptor) # type: ignore + + @property + def resize(self) -> Callable[ + [compute.ResizeRegionInstanceGroupManagerRequest], + compute.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._Resize(self._session, self._host, self._interceptor) # type: ignore + + @property + def set_instance_template(self) -> Callable[ + [compute.SetInstanceTemplateRegionInstanceGroupManagerRequest], + compute.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._SetInstanceTemplate(self._session, self._host, self._interceptor) # type: ignore + + @property + def set_target_pools(self) -> Callable[ + [compute.SetTargetPoolsRegionInstanceGroupManagerRequest], + compute.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._SetTargetPools(self._session, self._host, self._interceptor) # type: ignore + + @property + def update_per_instance_configs(self) -> Callable[ + [compute.UpdatePerInstanceConfigsRegionInstanceGroupManagerRequest], + compute.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._UpdatePerInstanceConfigs(self._session, self._host, self._interceptor) # type: ignore + + @property + def kind(self) -> str: + return "rest" + + def close(self): + self._session.close() + + +__all__=( + 'RegionInstanceGroupManagersRestTransport', +) diff --git a/owl-bot-staging/v1/google/cloud/compute_v1/services/region_instance_groups/__init__.py b/owl-bot-staging/v1/google/cloud/compute_v1/services/region_instance_groups/__init__.py new file mode 100644 index 000000000..04a64af04 --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/compute_v1/services/region_instance_groups/__init__.py @@ -0,0 +1,20 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from .client import RegionInstanceGroupsClient + +__all__ = ( + 'RegionInstanceGroupsClient', +) diff --git a/owl-bot-staging/v1/google/cloud/compute_v1/services/region_instance_groups/client.py b/owl-bot-staging/v1/google/cloud/compute_v1/services/region_instance_groups/client.py new file mode 100644 index 000000000..b999b9e83 --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/compute_v1/services/region_instance_groups/client.py @@ -0,0 +1,1093 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import functools +import os +import re +from typing import Dict, Mapping, MutableMapping, MutableSequence, Optional, Sequence, Tuple, Type, Union, cast + +from google.cloud.compute_v1 import gapic_version as package_version + +from google.api_core import client_options as client_options_lib +from google.api_core import exceptions as core_exceptions +from google.api_core import extended_operation +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport import mtls # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth.exceptions import MutualTLSChannelError # type: ignore +from google.oauth2 import service_account # type: ignore + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + +from google.api_core import extended_operation # type: ignore +from google.cloud.compute_v1.services.region_instance_groups import pagers +from google.cloud.compute_v1.types import compute +from .transports.base import RegionInstanceGroupsTransport, DEFAULT_CLIENT_INFO +from .transports.rest import RegionInstanceGroupsRestTransport + + +class RegionInstanceGroupsClientMeta(type): + """Metaclass for the RegionInstanceGroups client. + + This provides class-level methods for building and retrieving + support objects (e.g. transport) without polluting the client instance + objects. + """ + _transport_registry = OrderedDict() # type: Dict[str, Type[RegionInstanceGroupsTransport]] + _transport_registry["rest"] = RegionInstanceGroupsRestTransport + + def get_transport_class(cls, + label: Optional[str] = None, + ) -> Type[RegionInstanceGroupsTransport]: + """Returns an appropriate transport class. + + Args: + label: The name of the desired transport. If none is + provided, then the first transport in the registry is used. + + Returns: + The transport class to use. + """ + # If a specific transport is requested, return that one. + if label: + return cls._transport_registry[label] + + # No transport is requested; return the default (that is, the first one + # in the dictionary). + return next(iter(cls._transport_registry.values())) + + +class RegionInstanceGroupsClient(metaclass=RegionInstanceGroupsClientMeta): + """The RegionInstanceGroups API.""" + + @staticmethod + def _get_default_mtls_endpoint(api_endpoint): + """Converts api endpoint to mTLS endpoint. + + Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to + "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. + Args: + api_endpoint (Optional[str]): the api endpoint to convert. + Returns: + str: converted mTLS api endpoint. + """ + if not api_endpoint: + return api_endpoint + + mtls_endpoint_re = re.compile( + r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?" + ) + + m = mtls_endpoint_re.match(api_endpoint) + name, mtls, sandbox, googledomain = m.groups() + if mtls or not googledomain: + return api_endpoint + + if sandbox: + return api_endpoint.replace( + "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" + ) + + return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") + + DEFAULT_ENDPOINT = "compute.googleapis.com" + DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore + DEFAULT_ENDPOINT + ) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + RegionInstanceGroupsClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_info(info) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + RegionInstanceGroupsClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_file( + filename) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + from_service_account_json = from_service_account_file + + @property + def transport(self) -> RegionInstanceGroupsTransport: + """Returns the transport used by the client instance. + + Returns: + RegionInstanceGroupsTransport: The transport used by the client + instance. + """ + return self._transport + + @staticmethod + def common_billing_account_path(billing_account: str, ) -> str: + """Returns a fully-qualified billing_account string.""" + return "billingAccounts/{billing_account}".format(billing_account=billing_account, ) + + @staticmethod + def parse_common_billing_account_path(path: str) -> Dict[str,str]: + """Parse a billing_account path into its component segments.""" + m = re.match(r"^billingAccounts/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_folder_path(folder: str, ) -> str: + """Returns a fully-qualified folder string.""" + return "folders/{folder}".format(folder=folder, ) + + @staticmethod + def parse_common_folder_path(path: str) -> Dict[str,str]: + """Parse a folder path into its component segments.""" + m = re.match(r"^folders/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_organization_path(organization: str, ) -> str: + """Returns a fully-qualified organization string.""" + return "organizations/{organization}".format(organization=organization, ) + + @staticmethod + def parse_common_organization_path(path: str) -> Dict[str,str]: + """Parse a organization path into its component segments.""" + m = re.match(r"^organizations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_project_path(project: str, ) -> str: + """Returns a fully-qualified project string.""" + return "projects/{project}".format(project=project, ) + + @staticmethod + def parse_common_project_path(path: str) -> Dict[str,str]: + """Parse a project path into its component segments.""" + m = re.match(r"^projects/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_location_path(project: str, location: str, ) -> str: + """Returns a fully-qualified location string.""" + return "projects/{project}/locations/{location}".format(project=project, location=location, ) + + @staticmethod + def parse_common_location_path(path: str) -> Dict[str,str]: + """Parse a location path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @classmethod + def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[client_options_lib.ClientOptions] = None): + """Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + if client_options is None: + client_options = client_options_lib.ClientOptions() + use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") + use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") + if use_client_cert not in ("true", "false"): + raise ValueError("Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`") + if use_mtls_endpoint not in ("auto", "never", "always"): + raise MutualTLSChannelError("Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`") + + # Figure out the client cert source to use. + client_cert_source = None + if use_client_cert == "true": + if client_options.client_cert_source: + client_cert_source = client_options.client_cert_source + elif mtls.has_default_client_cert_source(): + client_cert_source = mtls.default_client_cert_source() + + # Figure out which api endpoint to use. + if client_options.api_endpoint is not None: + api_endpoint = client_options.api_endpoint + elif use_mtls_endpoint == "always" or (use_mtls_endpoint == "auto" and client_cert_source): + api_endpoint = cls.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = cls.DEFAULT_ENDPOINT + + return api_endpoint, client_cert_source + + def __init__(self, *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Optional[Union[str, RegionInstanceGroupsTransport]] = None, + client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the region instance groups client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Union[str, RegionInstanceGroupsTransport]): The + transport to use. If set to None, a transport is chosen + automatically. + NOTE: "rest" transport functionality is currently in a + beta state (preview). We welcome your feedback via an + issue in this library's source repository. + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): Custom options for the + client. It won't take effect if a ``transport`` instance is provided. + (1) The ``api_endpoint`` property can be used to override the + default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT + environment variable can also be used to override the endpoint: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto switch to the + default mTLS endpoint if client certificate is present, this is + the default value). However, the ``api_endpoint`` property takes + precedence if provided. + (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide client certificate for mutual TLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + """ + if isinstance(client_options, dict): + client_options = client_options_lib.from_dict(client_options) + if client_options is None: + client_options = client_options_lib.ClientOptions() + client_options = cast(client_options_lib.ClientOptions, client_options) + + api_endpoint, client_cert_source_func = self.get_mtls_endpoint_and_cert_source(client_options) + + api_key_value = getattr(client_options, "api_key", None) + if api_key_value and credentials: + raise ValueError("client_options.api_key and credentials are mutually exclusive") + + # Save or instantiate the transport. + # Ordinarily, we provide the transport, but allowing a custom transport + # instance provides an extensibility point for unusual situations. + if isinstance(transport, RegionInstanceGroupsTransport): + # transport is a RegionInstanceGroupsTransport instance. + if credentials or client_options.credentials_file or api_key_value: + raise ValueError("When providing a transport instance, " + "provide its credentials directly.") + if client_options.scopes: + raise ValueError( + "When providing a transport instance, provide its scopes " + "directly." + ) + self._transport = transport + else: + import google.auth._default # type: ignore + + if api_key_value and hasattr(google.auth._default, "get_api_key_credentials"): + credentials = google.auth._default.get_api_key_credentials(api_key_value) + + Transport = type(self).get_transport_class(transport) + self._transport = Transport( + credentials=credentials, + credentials_file=client_options.credentials_file, + host=api_endpoint, + scopes=client_options.scopes, + client_cert_source_for_mtls=client_cert_source_func, + quota_project_id=client_options.quota_project_id, + client_info=client_info, + always_use_jwt_access=True, + api_audience=client_options.api_audience, + ) + + def get(self, + request: Optional[Union[compute.GetRegionInstanceGroupRequest, dict]] = None, + *, + project: Optional[str] = None, + region: Optional[str] = None, + instance_group: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.InstanceGroup: + r"""Returns the specified instance group resource. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_get(): + # Create a client + client = compute_v1.RegionInstanceGroupsClient() + + # Initialize request argument(s) + request = compute_v1.GetRegionInstanceGroupRequest( + instance_group="instance_group_value", + project="project_value", + region="region_value", + ) + + # Make the request + response = client.get(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.GetRegionInstanceGroupRequest, dict]): + The request object. A request message for + RegionInstanceGroups.Get. See the method + description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + region (str): + Name of the region scoping this + request. + + This corresponds to the ``region`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + instance_group (str): + Name of the instance group resource + to return. + + This corresponds to the ``instance_group`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.compute_v1.types.InstanceGroup: + Represents an Instance Group + resource. Instance Groups can be used to + configure a target for load balancing. + Instance groups can either be managed or + unmanaged. To create managed instance + groups, use the instanceGroupManager or + regionInstanceGroupManager resource + instead. Use zonal unmanaged instance + groups if you need to apply load + balancing to groups of heterogeneous + instances or if you need to manage the + instances yourself. You cannot create + regional unmanaged instance groups. For + more information, read Instance groups. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, region, instance_group]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.GetRegionInstanceGroupRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.GetRegionInstanceGroupRequest): + request = compute.GetRegionInstanceGroupRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if region is not None: + request.region = region + if instance_group is not None: + request.instance_group = instance_group + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("region", request.region), + ("instance_group", request.instance_group), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def list(self, + request: Optional[Union[compute.ListRegionInstanceGroupsRequest, dict]] = None, + *, + project: Optional[str] = None, + region: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListPager: + r"""Retrieves the list of instance group resources + contained within the specified region. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_list(): + # Create a client + client = compute_v1.RegionInstanceGroupsClient() + + # Initialize request argument(s) + request = compute_v1.ListRegionInstanceGroupsRequest( + project="project_value", + region="region_value", + ) + + # Make the request + page_result = client.list(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.ListRegionInstanceGroupsRequest, dict]): + The request object. A request message for + RegionInstanceGroups.List. See the + method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + region (str): + Name of the region scoping this + request. + + This corresponds to the ``region`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.compute_v1.services.region_instance_groups.pagers.ListPager: + Contains a list of InstanceGroup + resources. + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, region]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.ListRegionInstanceGroupsRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.ListRegionInstanceGroupsRequest): + request = compute.ListRegionInstanceGroupsRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if region is not None: + request.region = region + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("region", request.region), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + def list_instances(self, + request: Optional[Union[compute.ListInstancesRegionInstanceGroupsRequest, dict]] = None, + *, + project: Optional[str] = None, + region: Optional[str] = None, + instance_group: Optional[str] = None, + region_instance_groups_list_instances_request_resource: Optional[compute.RegionInstanceGroupsListInstancesRequest] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListInstancesPager: + r"""Lists the instances in the specified instance group + and displays information about the named ports. + Depending on the specified options, this method can list + all instances or only the instances that are running. + The orderBy query parameter is not supported. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_list_instances(): + # Create a client + client = compute_v1.RegionInstanceGroupsClient() + + # Initialize request argument(s) + request = compute_v1.ListInstancesRegionInstanceGroupsRequest( + instance_group="instance_group_value", + project="project_value", + region="region_value", + ) + + # Make the request + page_result = client.list_instances(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.ListInstancesRegionInstanceGroupsRequest, dict]): + The request object. A request message for + RegionInstanceGroups.ListInstances. See + the method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + region (str): + Name of the region scoping this + request. + + This corresponds to the ``region`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + instance_group (str): + Name of the regional instance group + for which we want to list the instances. + + This corresponds to the ``instance_group`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + region_instance_groups_list_instances_request_resource (google.cloud.compute_v1.types.RegionInstanceGroupsListInstancesRequest): + The body resource for this request + This corresponds to the ``region_instance_groups_list_instances_request_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.compute_v1.services.region_instance_groups.pagers.ListInstancesPager: + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, region, instance_group, region_instance_groups_list_instances_request_resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.ListInstancesRegionInstanceGroupsRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.ListInstancesRegionInstanceGroupsRequest): + request = compute.ListInstancesRegionInstanceGroupsRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if region is not None: + request.region = region + if instance_group is not None: + request.instance_group = instance_group + if region_instance_groups_list_instances_request_resource is not None: + request.region_instance_groups_list_instances_request_resource = region_instance_groups_list_instances_request_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_instances] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("region", request.region), + ("instance_group", request.instance_group), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListInstancesPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + def set_named_ports_unary(self, + request: Optional[Union[compute.SetNamedPortsRegionInstanceGroupRequest, dict]] = None, + *, + project: Optional[str] = None, + region: Optional[str] = None, + instance_group: Optional[str] = None, + region_instance_groups_set_named_ports_request_resource: Optional[compute.RegionInstanceGroupsSetNamedPortsRequest] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.Operation: + r"""Sets the named ports for the specified regional + instance group. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_set_named_ports(): + # Create a client + client = compute_v1.RegionInstanceGroupsClient() + + # Initialize request argument(s) + request = compute_v1.SetNamedPortsRegionInstanceGroupRequest( + instance_group="instance_group_value", + project="project_value", + region="region_value", + ) + + # Make the request + response = client.set_named_ports(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.SetNamedPortsRegionInstanceGroupRequest, dict]): + The request object. A request message for + RegionInstanceGroups.SetNamedPorts. See + the method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + region (str): + Name of the region scoping this + request. + + This corresponds to the ``region`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + instance_group (str): + The name of the regional instance + group where the named ports are updated. + + This corresponds to the ``instance_group`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + region_instance_groups_set_named_ports_request_resource (google.cloud.compute_v1.types.RegionInstanceGroupsSetNamedPortsRequest): + The body resource for this request + This corresponds to the ``region_instance_groups_set_named_ports_request_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, region, instance_group, region_instance_groups_set_named_ports_request_resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.SetNamedPortsRegionInstanceGroupRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.SetNamedPortsRegionInstanceGroupRequest): + request = compute.SetNamedPortsRegionInstanceGroupRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if region is not None: + request.region = region + if instance_group is not None: + request.instance_group = instance_group + if region_instance_groups_set_named_ports_request_resource is not None: + request.region_instance_groups_set_named_ports_request_resource = region_instance_groups_set_named_ports_request_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.set_named_ports] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("region", request.region), + ("instance_group", request.instance_group), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def set_named_ports(self, + request: Optional[Union[compute.SetNamedPortsRegionInstanceGroupRequest, dict]] = None, + *, + project: Optional[str] = None, + region: Optional[str] = None, + instance_group: Optional[str] = None, + region_instance_groups_set_named_ports_request_resource: Optional[compute.RegionInstanceGroupsSetNamedPortsRequest] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> extended_operation.ExtendedOperation: + r"""Sets the named ports for the specified regional + instance group. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_set_named_ports(): + # Create a client + client = compute_v1.RegionInstanceGroupsClient() + + # Initialize request argument(s) + request = compute_v1.SetNamedPortsRegionInstanceGroupRequest( + instance_group="instance_group_value", + project="project_value", + region="region_value", + ) + + # Make the request + response = client.set_named_ports(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.SetNamedPortsRegionInstanceGroupRequest, dict]): + The request object. A request message for + RegionInstanceGroups.SetNamedPorts. See + the method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + region (str): + Name of the region scoping this + request. + + This corresponds to the ``region`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + instance_group (str): + The name of the regional instance + group where the named ports are updated. + + This corresponds to the ``instance_group`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + region_instance_groups_set_named_ports_request_resource (google.cloud.compute_v1.types.RegionInstanceGroupsSetNamedPortsRequest): + The body resource for this request + This corresponds to the ``region_instance_groups_set_named_ports_request_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, region, instance_group, region_instance_groups_set_named_ports_request_resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.SetNamedPortsRegionInstanceGroupRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.SetNamedPortsRegionInstanceGroupRequest): + request = compute.SetNamedPortsRegionInstanceGroupRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if region is not None: + request.region = region + if instance_group is not None: + request.instance_group = instance_group + if region_instance_groups_set_named_ports_request_resource is not None: + request.region_instance_groups_set_named_ports_request_resource = region_instance_groups_set_named_ports_request_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.set_named_ports] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("region", request.region), + ("instance_group", request.instance_group), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + operation_service = self._transport._region_operations_client + operation_request = compute.GetRegionOperationRequest() + operation_request.project = request.project + operation_request.region = request.region + operation_request.operation = response.name + + get_operation = functools.partial(operation_service.get, operation_request) + # Cancel is not part of extended operations yet. + cancel_operation = lambda: None + + # Note: this class is an implementation detail to provide a uniform + # set of names for certain fields in the extended operation proto message. + # See google.api_core.extended_operation.ExtendedOperation for details + # on these properties and the expected interface. + class _CustomOperation(extended_operation.ExtendedOperation): + @property + def error_message(self): + return self._extended_operation.http_error_message + + @property + def error_code(self): + return self._extended_operation.http_error_status_code + + response = _CustomOperation.make(get_operation, cancel_operation, response) + + # Done; return the response. + return response + + def __enter__(self) -> "RegionInstanceGroupsClient": + return self + + def __exit__(self, type, value, traceback): + """Releases underlying transport's resources. + + .. warning:: + ONLY use as a context manager if the transport is NOT shared + with other clients! Exiting the with block will CLOSE the transport + and may cause errors in other clients! + """ + self.transport.close() + + + + + + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) + + +__all__ = ( + "RegionInstanceGroupsClient", +) diff --git a/owl-bot-staging/v1/google/cloud/compute_v1/services/region_instance_groups/pagers.py b/owl-bot-staging/v1/google/cloud/compute_v1/services/region_instance_groups/pagers.py new file mode 100644 index 000000000..68a2c76fc --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/compute_v1/services/region_instance_groups/pagers.py @@ -0,0 +1,136 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import Any, AsyncIterator, Awaitable, Callable, Sequence, Tuple, Optional, Iterator + +from google.cloud.compute_v1.types import compute + + +class ListPager: + """A pager for iterating through ``list`` requests. + + This class thinly wraps an initial + :class:`google.cloud.compute_v1.types.RegionInstanceGroupList` object, and + provides an ``__iter__`` method to iterate through its + ``items`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``List`` requests and continue to iterate + through the ``items`` field on the + corresponding responses. + + All the usual :class:`google.cloud.compute_v1.types.RegionInstanceGroupList` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., compute.RegionInstanceGroupList], + request: compute.ListRegionInstanceGroupsRequest, + response: compute.RegionInstanceGroupList, + *, + metadata: Sequence[Tuple[str, str]] = ()): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.compute_v1.types.ListRegionInstanceGroupsRequest): + The initial request object. + response (google.cloud.compute_v1.types.RegionInstanceGroupList): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = compute.ListRegionInstanceGroupsRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[compute.RegionInstanceGroupList]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[compute.InstanceGroup]: + for page in self.pages: + yield from page.items + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + + +class ListInstancesPager: + """A pager for iterating through ``list_instances`` requests. + + This class thinly wraps an initial + :class:`google.cloud.compute_v1.types.RegionInstanceGroupsListInstances` object, and + provides an ``__iter__`` method to iterate through its + ``items`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListInstances`` requests and continue to iterate + through the ``items`` field on the + corresponding responses. + + All the usual :class:`google.cloud.compute_v1.types.RegionInstanceGroupsListInstances` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., compute.RegionInstanceGroupsListInstances], + request: compute.ListInstancesRegionInstanceGroupsRequest, + response: compute.RegionInstanceGroupsListInstances, + *, + metadata: Sequence[Tuple[str, str]] = ()): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.compute_v1.types.ListInstancesRegionInstanceGroupsRequest): + The initial request object. + response (google.cloud.compute_v1.types.RegionInstanceGroupsListInstances): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = compute.ListInstancesRegionInstanceGroupsRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[compute.RegionInstanceGroupsListInstances]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[compute.InstanceWithNamedPorts]: + for page in self.pages: + yield from page.items + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) diff --git a/owl-bot-staging/v1/google/cloud/compute_v1/services/region_instance_groups/transports/__init__.py b/owl-bot-staging/v1/google/cloud/compute_v1/services/region_instance_groups/transports/__init__.py new file mode 100644 index 000000000..78f3d2aa3 --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/compute_v1/services/region_instance_groups/transports/__init__.py @@ -0,0 +1,32 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +from typing import Dict, Type + +from .base import RegionInstanceGroupsTransport +from .rest import RegionInstanceGroupsRestTransport +from .rest import RegionInstanceGroupsRestInterceptor + + +# Compile a registry of transports. +_transport_registry = OrderedDict() # type: Dict[str, Type[RegionInstanceGroupsTransport]] +_transport_registry['rest'] = RegionInstanceGroupsRestTransport + +__all__ = ( + 'RegionInstanceGroupsTransport', + 'RegionInstanceGroupsRestTransport', + 'RegionInstanceGroupsRestInterceptor', +) diff --git a/owl-bot-staging/v1/google/cloud/compute_v1/services/region_instance_groups/transports/base.py b/owl-bot-staging/v1/google/cloud/compute_v1/services/region_instance_groups/transports/base.py new file mode 100644 index 000000000..5708ec361 --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/compute_v1/services/region_instance_groups/transports/base.py @@ -0,0 +1,205 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import abc +from typing import Awaitable, Callable, Dict, Optional, Sequence, Union + +from google.cloud.compute_v1 import gapic_version as package_version + +import google.auth # type: ignore +import google.api_core +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.compute_v1.types import compute +from google.cloud.compute_v1.services import region_operations + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) + + +class RegionInstanceGroupsTransport(abc.ABC): + """Abstract transport class for RegionInstanceGroups.""" + + AUTH_SCOPES = ( + 'https://www.googleapis.com/auth/compute', + 'https://www.googleapis.com/auth/cloud-platform', + ) + + DEFAULT_HOST: str = 'compute.googleapis.com' + def __init__( + self, *, + host: str = DEFAULT_HOST, + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + **kwargs, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A list of scopes. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + """ + self._extended_operations_services: Dict[str, Any] = {} + + scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} + + # Save the scopes. + self._scopes = scopes + + # If no credentials are provided, then determine the appropriate + # defaults. + if credentials and credentials_file: + raise core_exceptions.DuplicateCredentialArgs("'credentials_file' and 'credentials' are mutually exclusive") + + if credentials_file is not None: + credentials, _ = google.auth.load_credentials_from_file( + credentials_file, + **scopes_kwargs, + quota_project_id=quota_project_id + ) + elif credentials is None: + credentials, _ = google.auth.default(**scopes_kwargs, quota_project_id=quota_project_id) + # Don't apply audience if the credentials file passed from user. + if hasattr(credentials, "with_gdch_audience"): + credentials = credentials.with_gdch_audience(api_audience if api_audience else host) + + # If the credentials are service account credentials, then always try to use self signed JWT. + if always_use_jwt_access and isinstance(credentials, service_account.Credentials) and hasattr(service_account.Credentials, "with_always_use_jwt_access"): + credentials = credentials.with_always_use_jwt_access(True) + + # Save the credentials. + self._credentials = credentials + + # Save the hostname. Default to port 443 (HTTPS) if none is specified. + if ':' not in host: + host += ':443' + self._host = host + + def _prep_wrapped_messages(self, client_info): + # Precompute the wrapped methods. + self._wrapped_methods = { + self.get: gapic_v1.method.wrap_method( + self.get, + default_timeout=None, + client_info=client_info, + ), + self.list: gapic_v1.method.wrap_method( + self.list, + default_timeout=None, + client_info=client_info, + ), + self.list_instances: gapic_v1.method.wrap_method( + self.list_instances, + default_timeout=None, + client_info=client_info, + ), + self.set_named_ports: gapic_v1.method.wrap_method( + self.set_named_ports, + default_timeout=None, + client_info=client_info, + ), + } + + def close(self): + """Closes resources associated with the transport. + + .. warning:: + Only call this method if the transport is NOT shared + with other clients - this may cause errors in other clients! + """ + raise NotImplementedError() + + @property + def get(self) -> Callable[ + [compute.GetRegionInstanceGroupRequest], + Union[ + compute.InstanceGroup, + Awaitable[compute.InstanceGroup] + ]]: + raise NotImplementedError() + + @property + def list(self) -> Callable[ + [compute.ListRegionInstanceGroupsRequest], + Union[ + compute.RegionInstanceGroupList, + Awaitable[compute.RegionInstanceGroupList] + ]]: + raise NotImplementedError() + + @property + def list_instances(self) -> Callable[ + [compute.ListInstancesRegionInstanceGroupsRequest], + Union[ + compute.RegionInstanceGroupsListInstances, + Awaitable[compute.RegionInstanceGroupsListInstances] + ]]: + raise NotImplementedError() + + @property + def set_named_ports(self) -> Callable[ + [compute.SetNamedPortsRegionInstanceGroupRequest], + Union[ + compute.Operation, + Awaitable[compute.Operation] + ]]: + raise NotImplementedError() + + @property + def kind(self) -> str: + raise NotImplementedError() + + @property + def _region_operations_client(self) -> region_operations.RegionOperationsClient: + ex_op_service = self._extended_operations_services.get("region_operations") + if not ex_op_service: + ex_op_service = region_operations.RegionOperationsClient( + credentials=self._credentials, + transport=self.kind, + ) + self._extended_operations_services["region_operations"] = ex_op_service + + return ex_op_service + + +__all__ = ( + 'RegionInstanceGroupsTransport', +) diff --git a/owl-bot-staging/v1/google/cloud/compute_v1/services/region_instance_groups/transports/rest.py b/owl-bot-staging/v1/google/cloud/compute_v1/services/region_instance_groups/transports/rest.py new file mode 100644 index 000000000..7f46891b8 --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/compute_v1/services/region_instance_groups/transports/rest.py @@ -0,0 +1,674 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +from google.auth.transport.requests import AuthorizedSession # type: ignore +import json # type: ignore +import grpc # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.api_core import exceptions as core_exceptions +from google.api_core import retry as retries +from google.api_core import rest_helpers +from google.api_core import rest_streaming +from google.api_core import path_template +from google.api_core import gapic_v1 + +from google.protobuf import json_format +from requests import __version__ as requests_version +import dataclasses +import re +from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union +import warnings + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + + +from google.cloud.compute_v1.types import compute + +from .base import RegionInstanceGroupsTransport, DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, + grpc_version=None, + rest_version=requests_version, +) + + +class RegionInstanceGroupsRestInterceptor: + """Interceptor for RegionInstanceGroups. + + Interceptors are used to manipulate requests, request metadata, and responses + in arbitrary ways. + Example use cases include: + * Logging + * Verifying requests according to service or custom semantics + * Stripping extraneous information from responses + + These use cases and more can be enabled by injecting an + instance of a custom subclass when constructing the RegionInstanceGroupsRestTransport. + + .. code-block:: python + class MyCustomRegionInstanceGroupsInterceptor(RegionInstanceGroupsRestInterceptor): + def pre_get(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list_instances(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_instances(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_set_named_ports(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_set_named_ports(self, response): + logging.log(f"Received response: {response}") + return response + + transport = RegionInstanceGroupsRestTransport(interceptor=MyCustomRegionInstanceGroupsInterceptor()) + client = RegionInstanceGroupsClient(transport=transport) + + + """ + def pre_get(self, request: compute.GetRegionInstanceGroupRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.GetRegionInstanceGroupRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get + + Override in a subclass to manipulate the request or metadata + before they are sent to the RegionInstanceGroups server. + """ + return request, metadata + + def post_get(self, response: compute.InstanceGroup) -> compute.InstanceGroup: + """Post-rpc interceptor for get + + Override in a subclass to manipulate the response + after it is returned by the RegionInstanceGroups server but before + it is returned to user code. + """ + return response + def pre_list(self, request: compute.ListRegionInstanceGroupsRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.ListRegionInstanceGroupsRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list + + Override in a subclass to manipulate the request or metadata + before they are sent to the RegionInstanceGroups server. + """ + return request, metadata + + def post_list(self, response: compute.RegionInstanceGroupList) -> compute.RegionInstanceGroupList: + """Post-rpc interceptor for list + + Override in a subclass to manipulate the response + after it is returned by the RegionInstanceGroups server but before + it is returned to user code. + """ + return response + def pre_list_instances(self, request: compute.ListInstancesRegionInstanceGroupsRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.ListInstancesRegionInstanceGroupsRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list_instances + + Override in a subclass to manipulate the request or metadata + before they are sent to the RegionInstanceGroups server. + """ + return request, metadata + + def post_list_instances(self, response: compute.RegionInstanceGroupsListInstances) -> compute.RegionInstanceGroupsListInstances: + """Post-rpc interceptor for list_instances + + Override in a subclass to manipulate the response + after it is returned by the RegionInstanceGroups server but before + it is returned to user code. + """ + return response + def pre_set_named_ports(self, request: compute.SetNamedPortsRegionInstanceGroupRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.SetNamedPortsRegionInstanceGroupRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for set_named_ports + + Override in a subclass to manipulate the request or metadata + before they are sent to the RegionInstanceGroups server. + """ + return request, metadata + + def post_set_named_ports(self, response: compute.Operation) -> compute.Operation: + """Post-rpc interceptor for set_named_ports + + Override in a subclass to manipulate the response + after it is returned by the RegionInstanceGroups server but before + it is returned to user code. + """ + return response + + +@dataclasses.dataclass +class RegionInstanceGroupsRestStub: + _session: AuthorizedSession + _host: str + _interceptor: RegionInstanceGroupsRestInterceptor + + +class RegionInstanceGroupsRestTransport(RegionInstanceGroupsTransport): + """REST backend transport for RegionInstanceGroups. + + The RegionInstanceGroups API. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends JSON representations of protocol buffers over HTTP/1.1 + + NOTE: This REST transport functionality is currently in a beta + state (preview). We welcome your feedback via an issue in this + library's source repository. Thank you! + """ + + def __init__(self, *, + host: str = 'compute.googleapis.com', + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + client_cert_source_for_mtls: Optional[Callable[[ + ], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + url_scheme: str = 'https', + interceptor: Optional[RegionInstanceGroupsRestInterceptor] = None, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + NOTE: This REST transport functionality is currently in a beta + state (preview). We welcome your feedback via a GitHub issue in + this library's repository. Thank you! + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + client_cert_source_for_mtls (Callable[[], Tuple[bytes, bytes]]): Client + certificate to configure mutual TLS HTTP channel. It is ignored + if ``channel`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you are developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + url_scheme: the protocol scheme for the API endpoint. Normally + "https", but for testing or local servers, + "http" can be specified. + """ + # Run the base constructor + # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. + # TODO: When custom host (api_endpoint) is set, `scopes` must *also* be set on the + # credentials object + maybe_url_match = re.match("^(?Phttp(?:s)?://)?(?P.*)$", host) + if maybe_url_match is None: + raise ValueError(f"Unexpected hostname structure: {host}") # pragma: NO COVER + + url_match_items = maybe_url_match.groupdict() + + host = f"{url_scheme}://{host}" if not url_match_items["scheme"] else host + + super().__init__( + host=host, + credentials=credentials, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience + ) + self._session = AuthorizedSession( + self._credentials, default_host=self.DEFAULT_HOST) + if client_cert_source_for_mtls: + self._session.configure_mtls_channel(client_cert_source_for_mtls) + self._interceptor = interceptor or RegionInstanceGroupsRestInterceptor() + self._prep_wrapped_messages(client_info) + + class _Get(RegionInstanceGroupsRestStub): + def __hash__(self): + return hash("Get") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.GetRegionInstanceGroupRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.InstanceGroup: + r"""Call the get method over HTTP. + + Args: + request (~.compute.GetRegionInstanceGroupRequest): + The request object. A request message for + RegionInstanceGroups.Get. See the method + description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.InstanceGroup: + Represents an Instance Group + resource. Instance Groups can be used to + configure a target for load balancing. + Instance groups can either be managed or + unmanaged. To create managed instance + groups, use the instanceGroupManager or + regionInstanceGroupManager resource + instead. Use zonal unmanaged instance + groups if you need to apply load + balancing to groups of heterogeneous + instances or if you need to manage the + instances yourself. You cannot create + regional unmanaged instance groups. For + more information, read Instance groups. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/compute/v1/projects/{project}/regions/{region}/instanceGroups/{instance_group}', + }, + ] + request, metadata = self._interceptor.pre_get(request, metadata) + pb_request = compute.GetRegionInstanceGroupRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.InstanceGroup() + pb_resp = compute.InstanceGroup.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get(resp) + return resp + + class _List(RegionInstanceGroupsRestStub): + def __hash__(self): + return hash("List") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.ListRegionInstanceGroupsRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.RegionInstanceGroupList: + r"""Call the list method over HTTP. + + Args: + request (~.compute.ListRegionInstanceGroupsRequest): + The request object. A request message for + RegionInstanceGroups.List. See the + method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.RegionInstanceGroupList: + Contains a list of InstanceGroup + resources. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/compute/v1/projects/{project}/regions/{region}/instanceGroups', + }, + ] + request, metadata = self._interceptor.pre_list(request, metadata) + pb_request = compute.ListRegionInstanceGroupsRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.RegionInstanceGroupList() + pb_resp = compute.RegionInstanceGroupList.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_list(resp) + return resp + + class _ListInstances(RegionInstanceGroupsRestStub): + def __hash__(self): + return hash("ListInstances") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.ListInstancesRegionInstanceGroupsRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.RegionInstanceGroupsListInstances: + r"""Call the list instances method over HTTP. + + Args: + request (~.compute.ListInstancesRegionInstanceGroupsRequest): + The request object. A request message for + RegionInstanceGroups.ListInstances. See + the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.RegionInstanceGroupsListInstances: + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/compute/v1/projects/{project}/regions/{region}/instanceGroups/{instance_group}/listInstances', + 'body': 'region_instance_groups_list_instances_request_resource', + }, + ] + request, metadata = self._interceptor.pre_list_instances(request, metadata) + pb_request = compute.ListInstancesRegionInstanceGroupsRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + including_default_value_fields=False, + use_integers_for_enums=False + ) + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.RegionInstanceGroupsListInstances() + pb_resp = compute.RegionInstanceGroupsListInstances.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_list_instances(resp) + return resp + + class _SetNamedPorts(RegionInstanceGroupsRestStub): + def __hash__(self): + return hash("SetNamedPorts") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.SetNamedPortsRegionInstanceGroupRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.Operation: + r"""Call the set named ports method over HTTP. + + Args: + request (~.compute.SetNamedPortsRegionInstanceGroupRequest): + The request object. A request message for + RegionInstanceGroups.SetNamedPorts. See + the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.Operation: + Represents an Operation resource. Google Compute Engine + has three Operation resources: \* + `Global `__ + \* + `Regional `__ + \* + `Zonal `__ + You can use an operation resource to manage asynchronous + API requests. For more information, read Handling API + responses. Operations can be global, regional or zonal. + - For global operations, use the ``globalOperations`` + resource. - For regional operations, use the + ``regionOperations`` resource. - For zonal operations, + use the ``zonalOperations`` resource. For more + information, read Global, Regional, and Zonal Resources. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/compute/v1/projects/{project}/regions/{region}/instanceGroups/{instance_group}/setNamedPorts', + 'body': 'region_instance_groups_set_named_ports_request_resource', + }, + ] + request, metadata = self._interceptor.pre_set_named_ports(request, metadata) + pb_request = compute.SetNamedPortsRegionInstanceGroupRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + including_default_value_fields=False, + use_integers_for_enums=False + ) + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.Operation() + pb_resp = compute.Operation.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_set_named_ports(resp) + return resp + + @property + def get(self) -> Callable[ + [compute.GetRegionInstanceGroupRequest], + compute.InstanceGroup]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._Get(self._session, self._host, self._interceptor) # type: ignore + + @property + def list(self) -> Callable[ + [compute.ListRegionInstanceGroupsRequest], + compute.RegionInstanceGroupList]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._List(self._session, self._host, self._interceptor) # type: ignore + + @property + def list_instances(self) -> Callable[ + [compute.ListInstancesRegionInstanceGroupsRequest], + compute.RegionInstanceGroupsListInstances]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListInstances(self._session, self._host, self._interceptor) # type: ignore + + @property + def set_named_ports(self) -> Callable[ + [compute.SetNamedPortsRegionInstanceGroupRequest], + compute.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._SetNamedPorts(self._session, self._host, self._interceptor) # type: ignore + + @property + def kind(self) -> str: + return "rest" + + def close(self): + self._session.close() + + +__all__=( + 'RegionInstanceGroupsRestTransport', +) diff --git a/owl-bot-staging/v1/google/cloud/compute_v1/services/region_instance_templates/__init__.py b/owl-bot-staging/v1/google/cloud/compute_v1/services/region_instance_templates/__init__.py new file mode 100644 index 000000000..ee58c190d --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/compute_v1/services/region_instance_templates/__init__.py @@ -0,0 +1,20 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from .client import RegionInstanceTemplatesClient + +__all__ = ( + 'RegionInstanceTemplatesClient', +) diff --git a/owl-bot-staging/v1/google/cloud/compute_v1/services/region_instance_templates/client.py b/owl-bot-staging/v1/google/cloud/compute_v1/services/region_instance_templates/client.py new file mode 100644 index 000000000..f3e06a1ab --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/compute_v1/services/region_instance_templates/client.py @@ -0,0 +1,1188 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import functools +import os +import re +from typing import Dict, Mapping, MutableMapping, MutableSequence, Optional, Sequence, Tuple, Type, Union, cast + +from google.cloud.compute_v1 import gapic_version as package_version + +from google.api_core import client_options as client_options_lib +from google.api_core import exceptions as core_exceptions +from google.api_core import extended_operation +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport import mtls # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth.exceptions import MutualTLSChannelError # type: ignore +from google.oauth2 import service_account # type: ignore + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + +from google.api_core import extended_operation # type: ignore +from google.cloud.compute_v1.services.region_instance_templates import pagers +from google.cloud.compute_v1.types import compute +from .transports.base import RegionInstanceTemplatesTransport, DEFAULT_CLIENT_INFO +from .transports.rest import RegionInstanceTemplatesRestTransport + + +class RegionInstanceTemplatesClientMeta(type): + """Metaclass for the RegionInstanceTemplates client. + + This provides class-level methods for building and retrieving + support objects (e.g. transport) without polluting the client instance + objects. + """ + _transport_registry = OrderedDict() # type: Dict[str, Type[RegionInstanceTemplatesTransport]] + _transport_registry["rest"] = RegionInstanceTemplatesRestTransport + + def get_transport_class(cls, + label: Optional[str] = None, + ) -> Type[RegionInstanceTemplatesTransport]: + """Returns an appropriate transport class. + + Args: + label: The name of the desired transport. If none is + provided, then the first transport in the registry is used. + + Returns: + The transport class to use. + """ + # If a specific transport is requested, return that one. + if label: + return cls._transport_registry[label] + + # No transport is requested; return the default (that is, the first one + # in the dictionary). + return next(iter(cls._transport_registry.values())) + + +class RegionInstanceTemplatesClient(metaclass=RegionInstanceTemplatesClientMeta): + """The RegionInstanceTemplates API.""" + + @staticmethod + def _get_default_mtls_endpoint(api_endpoint): + """Converts api endpoint to mTLS endpoint. + + Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to + "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. + Args: + api_endpoint (Optional[str]): the api endpoint to convert. + Returns: + str: converted mTLS api endpoint. + """ + if not api_endpoint: + return api_endpoint + + mtls_endpoint_re = re.compile( + r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?" + ) + + m = mtls_endpoint_re.match(api_endpoint) + name, mtls, sandbox, googledomain = m.groups() + if mtls or not googledomain: + return api_endpoint + + if sandbox: + return api_endpoint.replace( + "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" + ) + + return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") + + DEFAULT_ENDPOINT = "compute.googleapis.com" + DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore + DEFAULT_ENDPOINT + ) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + RegionInstanceTemplatesClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_info(info) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + RegionInstanceTemplatesClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_file( + filename) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + from_service_account_json = from_service_account_file + + @property + def transport(self) -> RegionInstanceTemplatesTransport: + """Returns the transport used by the client instance. + + Returns: + RegionInstanceTemplatesTransport: The transport used by the client + instance. + """ + return self._transport + + @staticmethod + def common_billing_account_path(billing_account: str, ) -> str: + """Returns a fully-qualified billing_account string.""" + return "billingAccounts/{billing_account}".format(billing_account=billing_account, ) + + @staticmethod + def parse_common_billing_account_path(path: str) -> Dict[str,str]: + """Parse a billing_account path into its component segments.""" + m = re.match(r"^billingAccounts/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_folder_path(folder: str, ) -> str: + """Returns a fully-qualified folder string.""" + return "folders/{folder}".format(folder=folder, ) + + @staticmethod + def parse_common_folder_path(path: str) -> Dict[str,str]: + """Parse a folder path into its component segments.""" + m = re.match(r"^folders/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_organization_path(organization: str, ) -> str: + """Returns a fully-qualified organization string.""" + return "organizations/{organization}".format(organization=organization, ) + + @staticmethod + def parse_common_organization_path(path: str) -> Dict[str,str]: + """Parse a organization path into its component segments.""" + m = re.match(r"^organizations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_project_path(project: str, ) -> str: + """Returns a fully-qualified project string.""" + return "projects/{project}".format(project=project, ) + + @staticmethod + def parse_common_project_path(path: str) -> Dict[str,str]: + """Parse a project path into its component segments.""" + m = re.match(r"^projects/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_location_path(project: str, location: str, ) -> str: + """Returns a fully-qualified location string.""" + return "projects/{project}/locations/{location}".format(project=project, location=location, ) + + @staticmethod + def parse_common_location_path(path: str) -> Dict[str,str]: + """Parse a location path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @classmethod + def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[client_options_lib.ClientOptions] = None): + """Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + if client_options is None: + client_options = client_options_lib.ClientOptions() + use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") + use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") + if use_client_cert not in ("true", "false"): + raise ValueError("Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`") + if use_mtls_endpoint not in ("auto", "never", "always"): + raise MutualTLSChannelError("Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`") + + # Figure out the client cert source to use. + client_cert_source = None + if use_client_cert == "true": + if client_options.client_cert_source: + client_cert_source = client_options.client_cert_source + elif mtls.has_default_client_cert_source(): + client_cert_source = mtls.default_client_cert_source() + + # Figure out which api endpoint to use. + if client_options.api_endpoint is not None: + api_endpoint = client_options.api_endpoint + elif use_mtls_endpoint == "always" or (use_mtls_endpoint == "auto" and client_cert_source): + api_endpoint = cls.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = cls.DEFAULT_ENDPOINT + + return api_endpoint, client_cert_source + + def __init__(self, *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Optional[Union[str, RegionInstanceTemplatesTransport]] = None, + client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the region instance templates client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Union[str, RegionInstanceTemplatesTransport]): The + transport to use. If set to None, a transport is chosen + automatically. + NOTE: "rest" transport functionality is currently in a + beta state (preview). We welcome your feedback via an + issue in this library's source repository. + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): Custom options for the + client. It won't take effect if a ``transport`` instance is provided. + (1) The ``api_endpoint`` property can be used to override the + default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT + environment variable can also be used to override the endpoint: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto switch to the + default mTLS endpoint if client certificate is present, this is + the default value). However, the ``api_endpoint`` property takes + precedence if provided. + (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide client certificate for mutual TLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + """ + if isinstance(client_options, dict): + client_options = client_options_lib.from_dict(client_options) + if client_options is None: + client_options = client_options_lib.ClientOptions() + client_options = cast(client_options_lib.ClientOptions, client_options) + + api_endpoint, client_cert_source_func = self.get_mtls_endpoint_and_cert_source(client_options) + + api_key_value = getattr(client_options, "api_key", None) + if api_key_value and credentials: + raise ValueError("client_options.api_key and credentials are mutually exclusive") + + # Save or instantiate the transport. + # Ordinarily, we provide the transport, but allowing a custom transport + # instance provides an extensibility point for unusual situations. + if isinstance(transport, RegionInstanceTemplatesTransport): + # transport is a RegionInstanceTemplatesTransport instance. + if credentials or client_options.credentials_file or api_key_value: + raise ValueError("When providing a transport instance, " + "provide its credentials directly.") + if client_options.scopes: + raise ValueError( + "When providing a transport instance, provide its scopes " + "directly." + ) + self._transport = transport + else: + import google.auth._default # type: ignore + + if api_key_value and hasattr(google.auth._default, "get_api_key_credentials"): + credentials = google.auth._default.get_api_key_credentials(api_key_value) + + Transport = type(self).get_transport_class(transport) + self._transport = Transport( + credentials=credentials, + credentials_file=client_options.credentials_file, + host=api_endpoint, + scopes=client_options.scopes, + client_cert_source_for_mtls=client_cert_source_func, + quota_project_id=client_options.quota_project_id, + client_info=client_info, + always_use_jwt_access=True, + api_audience=client_options.api_audience, + ) + + def delete_unary(self, + request: Optional[Union[compute.DeleteRegionInstanceTemplateRequest, dict]] = None, + *, + project: Optional[str] = None, + region: Optional[str] = None, + instance_template: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.Operation: + r"""Deletes the specified instance template. Deleting an + instance template is permanent and cannot be undone. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_delete(): + # Create a client + client = compute_v1.RegionInstanceTemplatesClient() + + # Initialize request argument(s) + request = compute_v1.DeleteRegionInstanceTemplateRequest( + instance_template="instance_template_value", + project="project_value", + region="region_value", + ) + + # Make the request + response = client.delete(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.DeleteRegionInstanceTemplateRequest, dict]): + The request object. A request message for + RegionInstanceTemplates.Delete. See the + method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + region (str): + The name of the region for this + request. + + This corresponds to the ``region`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + instance_template (str): + The name of the instance template to + delete. + + This corresponds to the ``instance_template`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, region, instance_template]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.DeleteRegionInstanceTemplateRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.DeleteRegionInstanceTemplateRequest): + request = compute.DeleteRegionInstanceTemplateRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if region is not None: + request.region = region + if instance_template is not None: + request.instance_template = instance_template + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("region", request.region), + ("instance_template", request.instance_template), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def delete(self, + request: Optional[Union[compute.DeleteRegionInstanceTemplateRequest, dict]] = None, + *, + project: Optional[str] = None, + region: Optional[str] = None, + instance_template: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> extended_operation.ExtendedOperation: + r"""Deletes the specified instance template. Deleting an + instance template is permanent and cannot be undone. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_delete(): + # Create a client + client = compute_v1.RegionInstanceTemplatesClient() + + # Initialize request argument(s) + request = compute_v1.DeleteRegionInstanceTemplateRequest( + instance_template="instance_template_value", + project="project_value", + region="region_value", + ) + + # Make the request + response = client.delete(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.DeleteRegionInstanceTemplateRequest, dict]): + The request object. A request message for + RegionInstanceTemplates.Delete. See the + method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + region (str): + The name of the region for this + request. + + This corresponds to the ``region`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + instance_template (str): + The name of the instance template to + delete. + + This corresponds to the ``instance_template`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, region, instance_template]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.DeleteRegionInstanceTemplateRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.DeleteRegionInstanceTemplateRequest): + request = compute.DeleteRegionInstanceTemplateRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if region is not None: + request.region = region + if instance_template is not None: + request.instance_template = instance_template + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("region", request.region), + ("instance_template", request.instance_template), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + operation_service = self._transport._region_operations_client + operation_request = compute.GetRegionOperationRequest() + operation_request.project = request.project + operation_request.region = request.region + operation_request.operation = response.name + + get_operation = functools.partial(operation_service.get, operation_request) + # Cancel is not part of extended operations yet. + cancel_operation = lambda: None + + # Note: this class is an implementation detail to provide a uniform + # set of names for certain fields in the extended operation proto message. + # See google.api_core.extended_operation.ExtendedOperation for details + # on these properties and the expected interface. + class _CustomOperation(extended_operation.ExtendedOperation): + @property + def error_message(self): + return self._extended_operation.http_error_message + + @property + def error_code(self): + return self._extended_operation.http_error_status_code + + response = _CustomOperation.make(get_operation, cancel_operation, response) + + # Done; return the response. + return response + + def get(self, + request: Optional[Union[compute.GetRegionInstanceTemplateRequest, dict]] = None, + *, + project: Optional[str] = None, + region: Optional[str] = None, + instance_template: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.InstanceTemplate: + r"""Returns the specified instance template. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_get(): + # Create a client + client = compute_v1.RegionInstanceTemplatesClient() + + # Initialize request argument(s) + request = compute_v1.GetRegionInstanceTemplateRequest( + instance_template="instance_template_value", + project="project_value", + region="region_value", + ) + + # Make the request + response = client.get(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.GetRegionInstanceTemplateRequest, dict]): + The request object. A request message for + RegionInstanceTemplates.Get. See the + method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + region (str): + The name of the region for this + request. + + This corresponds to the ``region`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + instance_template (str): + The name of the instance template. + This corresponds to the ``instance_template`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.compute_v1.types.InstanceTemplate: + Represents an Instance Template + resource. You can use instance templates + to create VM instances and managed + instance groups. For more information, + read Instance Templates. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, region, instance_template]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.GetRegionInstanceTemplateRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.GetRegionInstanceTemplateRequest): + request = compute.GetRegionInstanceTemplateRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if region is not None: + request.region = region + if instance_template is not None: + request.instance_template = instance_template + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("region", request.region), + ("instance_template", request.instance_template), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def insert_unary(self, + request: Optional[Union[compute.InsertRegionInstanceTemplateRequest, dict]] = None, + *, + project: Optional[str] = None, + region: Optional[str] = None, + instance_template_resource: Optional[compute.InstanceTemplate] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.Operation: + r"""Creates an instance template in the specified project + and region using the global instance template whose URL + is included in the request. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_insert(): + # Create a client + client = compute_v1.RegionInstanceTemplatesClient() + + # Initialize request argument(s) + request = compute_v1.InsertRegionInstanceTemplateRequest( + project="project_value", + region="region_value", + ) + + # Make the request + response = client.insert(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.InsertRegionInstanceTemplateRequest, dict]): + The request object. A request message for + RegionInstanceTemplates.Insert. See the + method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + region (str): + The name of the region for this + request. + + This corresponds to the ``region`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + instance_template_resource (google.cloud.compute_v1.types.InstanceTemplate): + The body resource for this request + This corresponds to the ``instance_template_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, region, instance_template_resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.InsertRegionInstanceTemplateRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.InsertRegionInstanceTemplateRequest): + request = compute.InsertRegionInstanceTemplateRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if region is not None: + request.region = region + if instance_template_resource is not None: + request.instance_template_resource = instance_template_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.insert] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("region", request.region), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def insert(self, + request: Optional[Union[compute.InsertRegionInstanceTemplateRequest, dict]] = None, + *, + project: Optional[str] = None, + region: Optional[str] = None, + instance_template_resource: Optional[compute.InstanceTemplate] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> extended_operation.ExtendedOperation: + r"""Creates an instance template in the specified project + and region using the global instance template whose URL + is included in the request. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_insert(): + # Create a client + client = compute_v1.RegionInstanceTemplatesClient() + + # Initialize request argument(s) + request = compute_v1.InsertRegionInstanceTemplateRequest( + project="project_value", + region="region_value", + ) + + # Make the request + response = client.insert(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.InsertRegionInstanceTemplateRequest, dict]): + The request object. A request message for + RegionInstanceTemplates.Insert. See the + method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + region (str): + The name of the region for this + request. + + This corresponds to the ``region`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + instance_template_resource (google.cloud.compute_v1.types.InstanceTemplate): + The body resource for this request + This corresponds to the ``instance_template_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, region, instance_template_resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.InsertRegionInstanceTemplateRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.InsertRegionInstanceTemplateRequest): + request = compute.InsertRegionInstanceTemplateRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if region is not None: + request.region = region + if instance_template_resource is not None: + request.instance_template_resource = instance_template_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.insert] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("region", request.region), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + operation_service = self._transport._region_operations_client + operation_request = compute.GetRegionOperationRequest() + operation_request.project = request.project + operation_request.region = request.region + operation_request.operation = response.name + + get_operation = functools.partial(operation_service.get, operation_request) + # Cancel is not part of extended operations yet. + cancel_operation = lambda: None + + # Note: this class is an implementation detail to provide a uniform + # set of names for certain fields in the extended operation proto message. + # See google.api_core.extended_operation.ExtendedOperation for details + # on these properties and the expected interface. + class _CustomOperation(extended_operation.ExtendedOperation): + @property + def error_message(self): + return self._extended_operation.http_error_message + + @property + def error_code(self): + return self._extended_operation.http_error_status_code + + response = _CustomOperation.make(get_operation, cancel_operation, response) + + # Done; return the response. + return response + + def list(self, + request: Optional[Union[compute.ListRegionInstanceTemplatesRequest, dict]] = None, + *, + project: Optional[str] = None, + region: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListPager: + r"""Retrieves a list of instance templates that are + contained within the specified project and region. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_list(): + # Create a client + client = compute_v1.RegionInstanceTemplatesClient() + + # Initialize request argument(s) + request = compute_v1.ListRegionInstanceTemplatesRequest( + project="project_value", + region="region_value", + ) + + # Make the request + page_result = client.list(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.ListRegionInstanceTemplatesRequest, dict]): + The request object. A request message for + RegionInstanceTemplates.List. See the + method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + region (str): + The name of the regions for this + request. + + This corresponds to the ``region`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.compute_v1.services.region_instance_templates.pagers.ListPager: + A list of instance templates. + + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, region]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.ListRegionInstanceTemplatesRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.ListRegionInstanceTemplatesRequest): + request = compute.ListRegionInstanceTemplatesRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if region is not None: + request.region = region + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("region", request.region), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + def __enter__(self) -> "RegionInstanceTemplatesClient": + return self + + def __exit__(self, type, value, traceback): + """Releases underlying transport's resources. + + .. warning:: + ONLY use as a context manager if the transport is NOT shared + with other clients! Exiting the with block will CLOSE the transport + and may cause errors in other clients! + """ + self.transport.close() + + + + + + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) + + +__all__ = ( + "RegionInstanceTemplatesClient", +) diff --git a/owl-bot-staging/v1/google/cloud/compute_v1/services/region_instance_templates/pagers.py b/owl-bot-staging/v1/google/cloud/compute_v1/services/region_instance_templates/pagers.py new file mode 100644 index 000000000..c1c7c924a --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/compute_v1/services/region_instance_templates/pagers.py @@ -0,0 +1,77 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import Any, AsyncIterator, Awaitable, Callable, Sequence, Tuple, Optional, Iterator + +from google.cloud.compute_v1.types import compute + + +class ListPager: + """A pager for iterating through ``list`` requests. + + This class thinly wraps an initial + :class:`google.cloud.compute_v1.types.InstanceTemplateList` object, and + provides an ``__iter__`` method to iterate through its + ``items`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``List`` requests and continue to iterate + through the ``items`` field on the + corresponding responses. + + All the usual :class:`google.cloud.compute_v1.types.InstanceTemplateList` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., compute.InstanceTemplateList], + request: compute.ListRegionInstanceTemplatesRequest, + response: compute.InstanceTemplateList, + *, + metadata: Sequence[Tuple[str, str]] = ()): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.compute_v1.types.ListRegionInstanceTemplatesRequest): + The initial request object. + response (google.cloud.compute_v1.types.InstanceTemplateList): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = compute.ListRegionInstanceTemplatesRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[compute.InstanceTemplateList]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[compute.InstanceTemplate]: + for page in self.pages: + yield from page.items + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) diff --git a/owl-bot-staging/v1/google/cloud/compute_v1/services/region_instance_templates/transports/__init__.py b/owl-bot-staging/v1/google/cloud/compute_v1/services/region_instance_templates/transports/__init__.py new file mode 100644 index 000000000..7d97d48e5 --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/compute_v1/services/region_instance_templates/transports/__init__.py @@ -0,0 +1,32 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +from typing import Dict, Type + +from .base import RegionInstanceTemplatesTransport +from .rest import RegionInstanceTemplatesRestTransport +from .rest import RegionInstanceTemplatesRestInterceptor + + +# Compile a registry of transports. +_transport_registry = OrderedDict() # type: Dict[str, Type[RegionInstanceTemplatesTransport]] +_transport_registry['rest'] = RegionInstanceTemplatesRestTransport + +__all__ = ( + 'RegionInstanceTemplatesTransport', + 'RegionInstanceTemplatesRestTransport', + 'RegionInstanceTemplatesRestInterceptor', +) diff --git a/owl-bot-staging/v1/google/cloud/compute_v1/services/region_instance_templates/transports/base.py b/owl-bot-staging/v1/google/cloud/compute_v1/services/region_instance_templates/transports/base.py new file mode 100644 index 000000000..7115537cd --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/compute_v1/services/region_instance_templates/transports/base.py @@ -0,0 +1,205 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import abc +from typing import Awaitable, Callable, Dict, Optional, Sequence, Union + +from google.cloud.compute_v1 import gapic_version as package_version + +import google.auth # type: ignore +import google.api_core +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.compute_v1.types import compute +from google.cloud.compute_v1.services import region_operations + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) + + +class RegionInstanceTemplatesTransport(abc.ABC): + """Abstract transport class for RegionInstanceTemplates.""" + + AUTH_SCOPES = ( + 'https://www.googleapis.com/auth/compute', + 'https://www.googleapis.com/auth/cloud-platform', + ) + + DEFAULT_HOST: str = 'compute.googleapis.com' + def __init__( + self, *, + host: str = DEFAULT_HOST, + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + **kwargs, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A list of scopes. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + """ + self._extended_operations_services: Dict[str, Any] = {} + + scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} + + # Save the scopes. + self._scopes = scopes + + # If no credentials are provided, then determine the appropriate + # defaults. + if credentials and credentials_file: + raise core_exceptions.DuplicateCredentialArgs("'credentials_file' and 'credentials' are mutually exclusive") + + if credentials_file is not None: + credentials, _ = google.auth.load_credentials_from_file( + credentials_file, + **scopes_kwargs, + quota_project_id=quota_project_id + ) + elif credentials is None: + credentials, _ = google.auth.default(**scopes_kwargs, quota_project_id=quota_project_id) + # Don't apply audience if the credentials file passed from user. + if hasattr(credentials, "with_gdch_audience"): + credentials = credentials.with_gdch_audience(api_audience if api_audience else host) + + # If the credentials are service account credentials, then always try to use self signed JWT. + if always_use_jwt_access and isinstance(credentials, service_account.Credentials) and hasattr(service_account.Credentials, "with_always_use_jwt_access"): + credentials = credentials.with_always_use_jwt_access(True) + + # Save the credentials. + self._credentials = credentials + + # Save the hostname. Default to port 443 (HTTPS) if none is specified. + if ':' not in host: + host += ':443' + self._host = host + + def _prep_wrapped_messages(self, client_info): + # Precompute the wrapped methods. + self._wrapped_methods = { + self.delete: gapic_v1.method.wrap_method( + self.delete, + default_timeout=None, + client_info=client_info, + ), + self.get: gapic_v1.method.wrap_method( + self.get, + default_timeout=None, + client_info=client_info, + ), + self.insert: gapic_v1.method.wrap_method( + self.insert, + default_timeout=None, + client_info=client_info, + ), + self.list: gapic_v1.method.wrap_method( + self.list, + default_timeout=None, + client_info=client_info, + ), + } + + def close(self): + """Closes resources associated with the transport. + + .. warning:: + Only call this method if the transport is NOT shared + with other clients - this may cause errors in other clients! + """ + raise NotImplementedError() + + @property + def delete(self) -> Callable[ + [compute.DeleteRegionInstanceTemplateRequest], + Union[ + compute.Operation, + Awaitable[compute.Operation] + ]]: + raise NotImplementedError() + + @property + def get(self) -> Callable[ + [compute.GetRegionInstanceTemplateRequest], + Union[ + compute.InstanceTemplate, + Awaitable[compute.InstanceTemplate] + ]]: + raise NotImplementedError() + + @property + def insert(self) -> Callable[ + [compute.InsertRegionInstanceTemplateRequest], + Union[ + compute.Operation, + Awaitable[compute.Operation] + ]]: + raise NotImplementedError() + + @property + def list(self) -> Callable[ + [compute.ListRegionInstanceTemplatesRequest], + Union[ + compute.InstanceTemplateList, + Awaitable[compute.InstanceTemplateList] + ]]: + raise NotImplementedError() + + @property + def kind(self) -> str: + raise NotImplementedError() + + @property + def _region_operations_client(self) -> region_operations.RegionOperationsClient: + ex_op_service = self._extended_operations_services.get("region_operations") + if not ex_op_service: + ex_op_service = region_operations.RegionOperationsClient( + credentials=self._credentials, + transport=self.kind, + ) + self._extended_operations_services["region_operations"] = ex_op_service + + return ex_op_service + + +__all__ = ( + 'RegionInstanceTemplatesTransport', +) diff --git a/owl-bot-staging/v1/google/cloud/compute_v1/services/region_instance_templates/transports/rest.py b/owl-bot-staging/v1/google/cloud/compute_v1/services/region_instance_templates/transports/rest.py new file mode 100644 index 000000000..5955831f6 --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/compute_v1/services/region_instance_templates/transports/rest.py @@ -0,0 +1,669 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +from google.auth.transport.requests import AuthorizedSession # type: ignore +import json # type: ignore +import grpc # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.api_core import exceptions as core_exceptions +from google.api_core import retry as retries +from google.api_core import rest_helpers +from google.api_core import rest_streaming +from google.api_core import path_template +from google.api_core import gapic_v1 + +from google.protobuf import json_format +from requests import __version__ as requests_version +import dataclasses +import re +from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union +import warnings + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + + +from google.cloud.compute_v1.types import compute + +from .base import RegionInstanceTemplatesTransport, DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, + grpc_version=None, + rest_version=requests_version, +) + + +class RegionInstanceTemplatesRestInterceptor: + """Interceptor for RegionInstanceTemplates. + + Interceptors are used to manipulate requests, request metadata, and responses + in arbitrary ways. + Example use cases include: + * Logging + * Verifying requests according to service or custom semantics + * Stripping extraneous information from responses + + These use cases and more can be enabled by injecting an + instance of a custom subclass when constructing the RegionInstanceTemplatesRestTransport. + + .. code-block:: python + class MyCustomRegionInstanceTemplatesInterceptor(RegionInstanceTemplatesRestInterceptor): + def pre_delete(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_delete(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_get(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_insert(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_insert(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list(self, response): + logging.log(f"Received response: {response}") + return response + + transport = RegionInstanceTemplatesRestTransport(interceptor=MyCustomRegionInstanceTemplatesInterceptor()) + client = RegionInstanceTemplatesClient(transport=transport) + + + """ + def pre_delete(self, request: compute.DeleteRegionInstanceTemplateRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.DeleteRegionInstanceTemplateRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for delete + + Override in a subclass to manipulate the request or metadata + before they are sent to the RegionInstanceTemplates server. + """ + return request, metadata + + def post_delete(self, response: compute.Operation) -> compute.Operation: + """Post-rpc interceptor for delete + + Override in a subclass to manipulate the response + after it is returned by the RegionInstanceTemplates server but before + it is returned to user code. + """ + return response + def pre_get(self, request: compute.GetRegionInstanceTemplateRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.GetRegionInstanceTemplateRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get + + Override in a subclass to manipulate the request or metadata + before they are sent to the RegionInstanceTemplates server. + """ + return request, metadata + + def post_get(self, response: compute.InstanceTemplate) -> compute.InstanceTemplate: + """Post-rpc interceptor for get + + Override in a subclass to manipulate the response + after it is returned by the RegionInstanceTemplates server but before + it is returned to user code. + """ + return response + def pre_insert(self, request: compute.InsertRegionInstanceTemplateRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.InsertRegionInstanceTemplateRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for insert + + Override in a subclass to manipulate the request or metadata + before they are sent to the RegionInstanceTemplates server. + """ + return request, metadata + + def post_insert(self, response: compute.Operation) -> compute.Operation: + """Post-rpc interceptor for insert + + Override in a subclass to manipulate the response + after it is returned by the RegionInstanceTemplates server but before + it is returned to user code. + """ + return response + def pre_list(self, request: compute.ListRegionInstanceTemplatesRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.ListRegionInstanceTemplatesRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list + + Override in a subclass to manipulate the request or metadata + before they are sent to the RegionInstanceTemplates server. + """ + return request, metadata + + def post_list(self, response: compute.InstanceTemplateList) -> compute.InstanceTemplateList: + """Post-rpc interceptor for list + + Override in a subclass to manipulate the response + after it is returned by the RegionInstanceTemplates server but before + it is returned to user code. + """ + return response + + +@dataclasses.dataclass +class RegionInstanceTemplatesRestStub: + _session: AuthorizedSession + _host: str + _interceptor: RegionInstanceTemplatesRestInterceptor + + +class RegionInstanceTemplatesRestTransport(RegionInstanceTemplatesTransport): + """REST backend transport for RegionInstanceTemplates. + + The RegionInstanceTemplates API. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends JSON representations of protocol buffers over HTTP/1.1 + + NOTE: This REST transport functionality is currently in a beta + state (preview). We welcome your feedback via an issue in this + library's source repository. Thank you! + """ + + def __init__(self, *, + host: str = 'compute.googleapis.com', + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + client_cert_source_for_mtls: Optional[Callable[[ + ], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + url_scheme: str = 'https', + interceptor: Optional[RegionInstanceTemplatesRestInterceptor] = None, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + NOTE: This REST transport functionality is currently in a beta + state (preview). We welcome your feedback via a GitHub issue in + this library's repository. Thank you! + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + client_cert_source_for_mtls (Callable[[], Tuple[bytes, bytes]]): Client + certificate to configure mutual TLS HTTP channel. It is ignored + if ``channel`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you are developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + url_scheme: the protocol scheme for the API endpoint. Normally + "https", but for testing or local servers, + "http" can be specified. + """ + # Run the base constructor + # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. + # TODO: When custom host (api_endpoint) is set, `scopes` must *also* be set on the + # credentials object + maybe_url_match = re.match("^(?Phttp(?:s)?://)?(?P.*)$", host) + if maybe_url_match is None: + raise ValueError(f"Unexpected hostname structure: {host}") # pragma: NO COVER + + url_match_items = maybe_url_match.groupdict() + + host = f"{url_scheme}://{host}" if not url_match_items["scheme"] else host + + super().__init__( + host=host, + credentials=credentials, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience + ) + self._session = AuthorizedSession( + self._credentials, default_host=self.DEFAULT_HOST) + if client_cert_source_for_mtls: + self._session.configure_mtls_channel(client_cert_source_for_mtls) + self._interceptor = interceptor or RegionInstanceTemplatesRestInterceptor() + self._prep_wrapped_messages(client_info) + + class _Delete(RegionInstanceTemplatesRestStub): + def __hash__(self): + return hash("Delete") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.DeleteRegionInstanceTemplateRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.Operation: + r"""Call the delete method over HTTP. + + Args: + request (~.compute.DeleteRegionInstanceTemplateRequest): + The request object. A request message for + RegionInstanceTemplates.Delete. See the + method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.Operation: + Represents an Operation resource. Google Compute Engine + has three Operation resources: \* + `Global `__ + \* + `Regional `__ + \* + `Zonal `__ + You can use an operation resource to manage asynchronous + API requests. For more information, read Handling API + responses. Operations can be global, regional or zonal. + - For global operations, use the ``globalOperations`` + resource. - For regional operations, use the + ``regionOperations`` resource. - For zonal operations, + use the ``zonalOperations`` resource. For more + information, read Global, Regional, and Zonal Resources. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'delete', + 'uri': '/compute/v1/projects/{project}/regions/{region}/instanceTemplates/{instance_template}', + }, + ] + request, metadata = self._interceptor.pre_delete(request, metadata) + pb_request = compute.DeleteRegionInstanceTemplateRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.Operation() + pb_resp = compute.Operation.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_delete(resp) + return resp + + class _Get(RegionInstanceTemplatesRestStub): + def __hash__(self): + return hash("Get") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.GetRegionInstanceTemplateRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.InstanceTemplate: + r"""Call the get method over HTTP. + + Args: + request (~.compute.GetRegionInstanceTemplateRequest): + The request object. A request message for + RegionInstanceTemplates.Get. See the + method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.InstanceTemplate: + Represents an Instance Template + resource. You can use instance templates + to create VM instances and managed + instance groups. For more information, + read Instance Templates. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/compute/v1/projects/{project}/regions/{region}/instanceTemplates/{instance_template}', + }, + ] + request, metadata = self._interceptor.pre_get(request, metadata) + pb_request = compute.GetRegionInstanceTemplateRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.InstanceTemplate() + pb_resp = compute.InstanceTemplate.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get(resp) + return resp + + class _Insert(RegionInstanceTemplatesRestStub): + def __hash__(self): + return hash("Insert") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.InsertRegionInstanceTemplateRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.Operation: + r"""Call the insert method over HTTP. + + Args: + request (~.compute.InsertRegionInstanceTemplateRequest): + The request object. A request message for + RegionInstanceTemplates.Insert. See the + method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.Operation: + Represents an Operation resource. Google Compute Engine + has three Operation resources: \* + `Global `__ + \* + `Regional `__ + \* + `Zonal `__ + You can use an operation resource to manage asynchronous + API requests. For more information, read Handling API + responses. Operations can be global, regional or zonal. + - For global operations, use the ``globalOperations`` + resource. - For regional operations, use the + ``regionOperations`` resource. - For zonal operations, + use the ``zonalOperations`` resource. For more + information, read Global, Regional, and Zonal Resources. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/compute/v1/projects/{project}/regions/{region}/instanceTemplates', + 'body': 'instance_template_resource', + }, + ] + request, metadata = self._interceptor.pre_insert(request, metadata) + pb_request = compute.InsertRegionInstanceTemplateRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + including_default_value_fields=False, + use_integers_for_enums=False + ) + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.Operation() + pb_resp = compute.Operation.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_insert(resp) + return resp + + class _List(RegionInstanceTemplatesRestStub): + def __hash__(self): + return hash("List") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.ListRegionInstanceTemplatesRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.InstanceTemplateList: + r"""Call the list method over HTTP. + + Args: + request (~.compute.ListRegionInstanceTemplatesRequest): + The request object. A request message for + RegionInstanceTemplates.List. See the + method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.InstanceTemplateList: + A list of instance templates. + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/compute/v1/projects/{project}/regions/{region}/instanceTemplates', + }, + ] + request, metadata = self._interceptor.pre_list(request, metadata) + pb_request = compute.ListRegionInstanceTemplatesRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.InstanceTemplateList() + pb_resp = compute.InstanceTemplateList.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_list(resp) + return resp + + @property + def delete(self) -> Callable[ + [compute.DeleteRegionInstanceTemplateRequest], + compute.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._Delete(self._session, self._host, self._interceptor) # type: ignore + + @property + def get(self) -> Callable[ + [compute.GetRegionInstanceTemplateRequest], + compute.InstanceTemplate]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._Get(self._session, self._host, self._interceptor) # type: ignore + + @property + def insert(self) -> Callable[ + [compute.InsertRegionInstanceTemplateRequest], + compute.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._Insert(self._session, self._host, self._interceptor) # type: ignore + + @property + def list(self) -> Callable[ + [compute.ListRegionInstanceTemplatesRequest], + compute.InstanceTemplateList]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._List(self._session, self._host, self._interceptor) # type: ignore + + @property + def kind(self) -> str: + return "rest" + + def close(self): + self._session.close() + + +__all__=( + 'RegionInstanceTemplatesRestTransport', +) diff --git a/owl-bot-staging/v1/google/cloud/compute_v1/services/region_instances/__init__.py b/owl-bot-staging/v1/google/cloud/compute_v1/services/region_instances/__init__.py new file mode 100644 index 000000000..8788456d1 --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/compute_v1/services/region_instances/__init__.py @@ -0,0 +1,20 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from .client import RegionInstancesClient + +__all__ = ( + 'RegionInstancesClient', +) diff --git a/owl-bot-staging/v1/google/cloud/compute_v1/services/region_instances/client.py b/owl-bot-staging/v1/google/cloud/compute_v1/services/region_instances/client.py new file mode 100644 index 000000000..1fdb3592c --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/compute_v1/services/region_instances/client.py @@ -0,0 +1,659 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import functools +import os +import re +from typing import Dict, Mapping, MutableMapping, MutableSequence, Optional, Sequence, Tuple, Type, Union, cast + +from google.cloud.compute_v1 import gapic_version as package_version + +from google.api_core import client_options as client_options_lib +from google.api_core import exceptions as core_exceptions +from google.api_core import extended_operation +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport import mtls # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth.exceptions import MutualTLSChannelError # type: ignore +from google.oauth2 import service_account # type: ignore + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + +from google.api_core import extended_operation # type: ignore +from google.cloud.compute_v1.types import compute +from .transports.base import RegionInstancesTransport, DEFAULT_CLIENT_INFO +from .transports.rest import RegionInstancesRestTransport + + +class RegionInstancesClientMeta(type): + """Metaclass for the RegionInstances client. + + This provides class-level methods for building and retrieving + support objects (e.g. transport) without polluting the client instance + objects. + """ + _transport_registry = OrderedDict() # type: Dict[str, Type[RegionInstancesTransport]] + _transport_registry["rest"] = RegionInstancesRestTransport + + def get_transport_class(cls, + label: Optional[str] = None, + ) -> Type[RegionInstancesTransport]: + """Returns an appropriate transport class. + + Args: + label: The name of the desired transport. If none is + provided, then the first transport in the registry is used. + + Returns: + The transport class to use. + """ + # If a specific transport is requested, return that one. + if label: + return cls._transport_registry[label] + + # No transport is requested; return the default (that is, the first one + # in the dictionary). + return next(iter(cls._transport_registry.values())) + + +class RegionInstancesClient(metaclass=RegionInstancesClientMeta): + """The RegionInstances API.""" + + @staticmethod + def _get_default_mtls_endpoint(api_endpoint): + """Converts api endpoint to mTLS endpoint. + + Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to + "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. + Args: + api_endpoint (Optional[str]): the api endpoint to convert. + Returns: + str: converted mTLS api endpoint. + """ + if not api_endpoint: + return api_endpoint + + mtls_endpoint_re = re.compile( + r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?" + ) + + m = mtls_endpoint_re.match(api_endpoint) + name, mtls, sandbox, googledomain = m.groups() + if mtls or not googledomain: + return api_endpoint + + if sandbox: + return api_endpoint.replace( + "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" + ) + + return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") + + DEFAULT_ENDPOINT = "compute.googleapis.com" + DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore + DEFAULT_ENDPOINT + ) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + RegionInstancesClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_info(info) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + RegionInstancesClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_file( + filename) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + from_service_account_json = from_service_account_file + + @property + def transport(self) -> RegionInstancesTransport: + """Returns the transport used by the client instance. + + Returns: + RegionInstancesTransport: The transport used by the client + instance. + """ + return self._transport + + @staticmethod + def common_billing_account_path(billing_account: str, ) -> str: + """Returns a fully-qualified billing_account string.""" + return "billingAccounts/{billing_account}".format(billing_account=billing_account, ) + + @staticmethod + def parse_common_billing_account_path(path: str) -> Dict[str,str]: + """Parse a billing_account path into its component segments.""" + m = re.match(r"^billingAccounts/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_folder_path(folder: str, ) -> str: + """Returns a fully-qualified folder string.""" + return "folders/{folder}".format(folder=folder, ) + + @staticmethod + def parse_common_folder_path(path: str) -> Dict[str,str]: + """Parse a folder path into its component segments.""" + m = re.match(r"^folders/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_organization_path(organization: str, ) -> str: + """Returns a fully-qualified organization string.""" + return "organizations/{organization}".format(organization=organization, ) + + @staticmethod + def parse_common_organization_path(path: str) -> Dict[str,str]: + """Parse a organization path into its component segments.""" + m = re.match(r"^organizations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_project_path(project: str, ) -> str: + """Returns a fully-qualified project string.""" + return "projects/{project}".format(project=project, ) + + @staticmethod + def parse_common_project_path(path: str) -> Dict[str,str]: + """Parse a project path into its component segments.""" + m = re.match(r"^projects/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_location_path(project: str, location: str, ) -> str: + """Returns a fully-qualified location string.""" + return "projects/{project}/locations/{location}".format(project=project, location=location, ) + + @staticmethod + def parse_common_location_path(path: str) -> Dict[str,str]: + """Parse a location path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @classmethod + def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[client_options_lib.ClientOptions] = None): + """Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + if client_options is None: + client_options = client_options_lib.ClientOptions() + use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") + use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") + if use_client_cert not in ("true", "false"): + raise ValueError("Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`") + if use_mtls_endpoint not in ("auto", "never", "always"): + raise MutualTLSChannelError("Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`") + + # Figure out the client cert source to use. + client_cert_source = None + if use_client_cert == "true": + if client_options.client_cert_source: + client_cert_source = client_options.client_cert_source + elif mtls.has_default_client_cert_source(): + client_cert_source = mtls.default_client_cert_source() + + # Figure out which api endpoint to use. + if client_options.api_endpoint is not None: + api_endpoint = client_options.api_endpoint + elif use_mtls_endpoint == "always" or (use_mtls_endpoint == "auto" and client_cert_source): + api_endpoint = cls.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = cls.DEFAULT_ENDPOINT + + return api_endpoint, client_cert_source + + def __init__(self, *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Optional[Union[str, RegionInstancesTransport]] = None, + client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the region instances client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Union[str, RegionInstancesTransport]): The + transport to use. If set to None, a transport is chosen + automatically. + NOTE: "rest" transport functionality is currently in a + beta state (preview). We welcome your feedback via an + issue in this library's source repository. + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): Custom options for the + client. It won't take effect if a ``transport`` instance is provided. + (1) The ``api_endpoint`` property can be used to override the + default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT + environment variable can also be used to override the endpoint: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto switch to the + default mTLS endpoint if client certificate is present, this is + the default value). However, the ``api_endpoint`` property takes + precedence if provided. + (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide client certificate for mutual TLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + """ + if isinstance(client_options, dict): + client_options = client_options_lib.from_dict(client_options) + if client_options is None: + client_options = client_options_lib.ClientOptions() + client_options = cast(client_options_lib.ClientOptions, client_options) + + api_endpoint, client_cert_source_func = self.get_mtls_endpoint_and_cert_source(client_options) + + api_key_value = getattr(client_options, "api_key", None) + if api_key_value and credentials: + raise ValueError("client_options.api_key and credentials are mutually exclusive") + + # Save or instantiate the transport. + # Ordinarily, we provide the transport, but allowing a custom transport + # instance provides an extensibility point for unusual situations. + if isinstance(transport, RegionInstancesTransport): + # transport is a RegionInstancesTransport instance. + if credentials or client_options.credentials_file or api_key_value: + raise ValueError("When providing a transport instance, " + "provide its credentials directly.") + if client_options.scopes: + raise ValueError( + "When providing a transport instance, provide its scopes " + "directly." + ) + self._transport = transport + else: + import google.auth._default # type: ignore + + if api_key_value and hasattr(google.auth._default, "get_api_key_credentials"): + credentials = google.auth._default.get_api_key_credentials(api_key_value) + + Transport = type(self).get_transport_class(transport) + self._transport = Transport( + credentials=credentials, + credentials_file=client_options.credentials_file, + host=api_endpoint, + scopes=client_options.scopes, + client_cert_source_for_mtls=client_cert_source_func, + quota_project_id=client_options.quota_project_id, + client_info=client_info, + always_use_jwt_access=True, + api_audience=client_options.api_audience, + ) + + def bulk_insert_unary(self, + request: Optional[Union[compute.BulkInsertRegionInstanceRequest, dict]] = None, + *, + project: Optional[str] = None, + region: Optional[str] = None, + bulk_insert_instance_resource_resource: Optional[compute.BulkInsertInstanceResource] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.Operation: + r"""Creates multiple instances in a given region. Count + specifies the number of instances to create. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_bulk_insert(): + # Create a client + client = compute_v1.RegionInstancesClient() + + # Initialize request argument(s) + request = compute_v1.BulkInsertRegionInstanceRequest( + project="project_value", + region="region_value", + ) + + # Make the request + response = client.bulk_insert(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.BulkInsertRegionInstanceRequest, dict]): + The request object. A request message for + RegionInstances.BulkInsert. See the + method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + region (str): + The name of the region for this + request. + + This corresponds to the ``region`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + bulk_insert_instance_resource_resource (google.cloud.compute_v1.types.BulkInsertInstanceResource): + The body resource for this request + This corresponds to the ``bulk_insert_instance_resource_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, region, bulk_insert_instance_resource_resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.BulkInsertRegionInstanceRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.BulkInsertRegionInstanceRequest): + request = compute.BulkInsertRegionInstanceRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if region is not None: + request.region = region + if bulk_insert_instance_resource_resource is not None: + request.bulk_insert_instance_resource_resource = bulk_insert_instance_resource_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.bulk_insert] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("region", request.region), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def bulk_insert(self, + request: Optional[Union[compute.BulkInsertRegionInstanceRequest, dict]] = None, + *, + project: Optional[str] = None, + region: Optional[str] = None, + bulk_insert_instance_resource_resource: Optional[compute.BulkInsertInstanceResource] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> extended_operation.ExtendedOperation: + r"""Creates multiple instances in a given region. Count + specifies the number of instances to create. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_bulk_insert(): + # Create a client + client = compute_v1.RegionInstancesClient() + + # Initialize request argument(s) + request = compute_v1.BulkInsertRegionInstanceRequest( + project="project_value", + region="region_value", + ) + + # Make the request + response = client.bulk_insert(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.BulkInsertRegionInstanceRequest, dict]): + The request object. A request message for + RegionInstances.BulkInsert. See the + method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + region (str): + The name of the region for this + request. + + This corresponds to the ``region`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + bulk_insert_instance_resource_resource (google.cloud.compute_v1.types.BulkInsertInstanceResource): + The body resource for this request + This corresponds to the ``bulk_insert_instance_resource_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, region, bulk_insert_instance_resource_resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.BulkInsertRegionInstanceRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.BulkInsertRegionInstanceRequest): + request = compute.BulkInsertRegionInstanceRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if region is not None: + request.region = region + if bulk_insert_instance_resource_resource is not None: + request.bulk_insert_instance_resource_resource = bulk_insert_instance_resource_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.bulk_insert] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("region", request.region), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + operation_service = self._transport._region_operations_client + operation_request = compute.GetRegionOperationRequest() + operation_request.project = request.project + operation_request.region = request.region + operation_request.operation = response.name + + get_operation = functools.partial(operation_service.get, operation_request) + # Cancel is not part of extended operations yet. + cancel_operation = lambda: None + + # Note: this class is an implementation detail to provide a uniform + # set of names for certain fields in the extended operation proto message. + # See google.api_core.extended_operation.ExtendedOperation for details + # on these properties and the expected interface. + class _CustomOperation(extended_operation.ExtendedOperation): + @property + def error_message(self): + return self._extended_operation.http_error_message + + @property + def error_code(self): + return self._extended_operation.http_error_status_code + + response = _CustomOperation.make(get_operation, cancel_operation, response) + + # Done; return the response. + return response + + def __enter__(self) -> "RegionInstancesClient": + return self + + def __exit__(self, type, value, traceback): + """Releases underlying transport's resources. + + .. warning:: + ONLY use as a context manager if the transport is NOT shared + with other clients! Exiting the with block will CLOSE the transport + and may cause errors in other clients! + """ + self.transport.close() + + + + + + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) + + +__all__ = ( + "RegionInstancesClient", +) diff --git a/owl-bot-staging/v1/google/cloud/compute_v1/services/region_instances/transports/__init__.py b/owl-bot-staging/v1/google/cloud/compute_v1/services/region_instances/transports/__init__.py new file mode 100644 index 000000000..5a03f91fb --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/compute_v1/services/region_instances/transports/__init__.py @@ -0,0 +1,32 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +from typing import Dict, Type + +from .base import RegionInstancesTransport +from .rest import RegionInstancesRestTransport +from .rest import RegionInstancesRestInterceptor + + +# Compile a registry of transports. +_transport_registry = OrderedDict() # type: Dict[str, Type[RegionInstancesTransport]] +_transport_registry['rest'] = RegionInstancesRestTransport + +__all__ = ( + 'RegionInstancesTransport', + 'RegionInstancesRestTransport', + 'RegionInstancesRestInterceptor', +) diff --git a/owl-bot-staging/v1/google/cloud/compute_v1/services/region_instances/transports/base.py b/owl-bot-staging/v1/google/cloud/compute_v1/services/region_instances/transports/base.py new file mode 100644 index 000000000..a06c76ba9 --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/compute_v1/services/region_instances/transports/base.py @@ -0,0 +1,163 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import abc +from typing import Awaitable, Callable, Dict, Optional, Sequence, Union + +from google.cloud.compute_v1 import gapic_version as package_version + +import google.auth # type: ignore +import google.api_core +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.compute_v1.types import compute +from google.cloud.compute_v1.services import region_operations + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) + + +class RegionInstancesTransport(abc.ABC): + """Abstract transport class for RegionInstances.""" + + AUTH_SCOPES = ( + 'https://www.googleapis.com/auth/compute', + 'https://www.googleapis.com/auth/cloud-platform', + ) + + DEFAULT_HOST: str = 'compute.googleapis.com' + def __init__( + self, *, + host: str = DEFAULT_HOST, + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + **kwargs, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A list of scopes. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + """ + self._extended_operations_services: Dict[str, Any] = {} + + scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} + + # Save the scopes. + self._scopes = scopes + + # If no credentials are provided, then determine the appropriate + # defaults. + if credentials and credentials_file: + raise core_exceptions.DuplicateCredentialArgs("'credentials_file' and 'credentials' are mutually exclusive") + + if credentials_file is not None: + credentials, _ = google.auth.load_credentials_from_file( + credentials_file, + **scopes_kwargs, + quota_project_id=quota_project_id + ) + elif credentials is None: + credentials, _ = google.auth.default(**scopes_kwargs, quota_project_id=quota_project_id) + # Don't apply audience if the credentials file passed from user. + if hasattr(credentials, "with_gdch_audience"): + credentials = credentials.with_gdch_audience(api_audience if api_audience else host) + + # If the credentials are service account credentials, then always try to use self signed JWT. + if always_use_jwt_access and isinstance(credentials, service_account.Credentials) and hasattr(service_account.Credentials, "with_always_use_jwt_access"): + credentials = credentials.with_always_use_jwt_access(True) + + # Save the credentials. + self._credentials = credentials + + # Save the hostname. Default to port 443 (HTTPS) if none is specified. + if ':' not in host: + host += ':443' + self._host = host + + def _prep_wrapped_messages(self, client_info): + # Precompute the wrapped methods. + self._wrapped_methods = { + self.bulk_insert: gapic_v1.method.wrap_method( + self.bulk_insert, + default_timeout=None, + client_info=client_info, + ), + } + + def close(self): + """Closes resources associated with the transport. + + .. warning:: + Only call this method if the transport is NOT shared + with other clients - this may cause errors in other clients! + """ + raise NotImplementedError() + + @property + def bulk_insert(self) -> Callable[ + [compute.BulkInsertRegionInstanceRequest], + Union[ + compute.Operation, + Awaitable[compute.Operation] + ]]: + raise NotImplementedError() + + @property + def kind(self) -> str: + raise NotImplementedError() + + @property + def _region_operations_client(self) -> region_operations.RegionOperationsClient: + ex_op_service = self._extended_operations_services.get("region_operations") + if not ex_op_service: + ex_op_service = region_operations.RegionOperationsClient( + credentials=self._credentials, + transport=self.kind, + ) + self._extended_operations_services["region_operations"] = ex_op_service + + return ex_op_service + + +__all__ = ( + 'RegionInstancesTransport', +) diff --git a/owl-bot-staging/v1/google/cloud/compute_v1/services/region_instances/transports/rest.py b/owl-bot-staging/v1/google/cloud/compute_v1/services/region_instances/transports/rest.py new file mode 100644 index 000000000..a4d5b29a7 --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/compute_v1/services/region_instances/transports/rest.py @@ -0,0 +1,319 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +from google.auth.transport.requests import AuthorizedSession # type: ignore +import json # type: ignore +import grpc # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.api_core import exceptions as core_exceptions +from google.api_core import retry as retries +from google.api_core import rest_helpers +from google.api_core import rest_streaming +from google.api_core import path_template +from google.api_core import gapic_v1 + +from google.protobuf import json_format +from requests import __version__ as requests_version +import dataclasses +import re +from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union +import warnings + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + + +from google.cloud.compute_v1.types import compute + +from .base import RegionInstancesTransport, DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, + grpc_version=None, + rest_version=requests_version, +) + + +class RegionInstancesRestInterceptor: + """Interceptor for RegionInstances. + + Interceptors are used to manipulate requests, request metadata, and responses + in arbitrary ways. + Example use cases include: + * Logging + * Verifying requests according to service or custom semantics + * Stripping extraneous information from responses + + These use cases and more can be enabled by injecting an + instance of a custom subclass when constructing the RegionInstancesRestTransport. + + .. code-block:: python + class MyCustomRegionInstancesInterceptor(RegionInstancesRestInterceptor): + def pre_bulk_insert(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_bulk_insert(self, response): + logging.log(f"Received response: {response}") + return response + + transport = RegionInstancesRestTransport(interceptor=MyCustomRegionInstancesInterceptor()) + client = RegionInstancesClient(transport=transport) + + + """ + def pre_bulk_insert(self, request: compute.BulkInsertRegionInstanceRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.BulkInsertRegionInstanceRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for bulk_insert + + Override in a subclass to manipulate the request or metadata + before they are sent to the RegionInstances server. + """ + return request, metadata + + def post_bulk_insert(self, response: compute.Operation) -> compute.Operation: + """Post-rpc interceptor for bulk_insert + + Override in a subclass to manipulate the response + after it is returned by the RegionInstances server but before + it is returned to user code. + """ + return response + + +@dataclasses.dataclass +class RegionInstancesRestStub: + _session: AuthorizedSession + _host: str + _interceptor: RegionInstancesRestInterceptor + + +class RegionInstancesRestTransport(RegionInstancesTransport): + """REST backend transport for RegionInstances. + + The RegionInstances API. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends JSON representations of protocol buffers over HTTP/1.1 + + NOTE: This REST transport functionality is currently in a beta + state (preview). We welcome your feedback via an issue in this + library's source repository. Thank you! + """ + + def __init__(self, *, + host: str = 'compute.googleapis.com', + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + client_cert_source_for_mtls: Optional[Callable[[ + ], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + url_scheme: str = 'https', + interceptor: Optional[RegionInstancesRestInterceptor] = None, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + NOTE: This REST transport functionality is currently in a beta + state (preview). We welcome your feedback via a GitHub issue in + this library's repository. Thank you! + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + client_cert_source_for_mtls (Callable[[], Tuple[bytes, bytes]]): Client + certificate to configure mutual TLS HTTP channel. It is ignored + if ``channel`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you are developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + url_scheme: the protocol scheme for the API endpoint. Normally + "https", but for testing or local servers, + "http" can be specified. + """ + # Run the base constructor + # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. + # TODO: When custom host (api_endpoint) is set, `scopes` must *also* be set on the + # credentials object + maybe_url_match = re.match("^(?Phttp(?:s)?://)?(?P.*)$", host) + if maybe_url_match is None: + raise ValueError(f"Unexpected hostname structure: {host}") # pragma: NO COVER + + url_match_items = maybe_url_match.groupdict() + + host = f"{url_scheme}://{host}" if not url_match_items["scheme"] else host + + super().__init__( + host=host, + credentials=credentials, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience + ) + self._session = AuthorizedSession( + self._credentials, default_host=self.DEFAULT_HOST) + if client_cert_source_for_mtls: + self._session.configure_mtls_channel(client_cert_source_for_mtls) + self._interceptor = interceptor or RegionInstancesRestInterceptor() + self._prep_wrapped_messages(client_info) + + class _BulkInsert(RegionInstancesRestStub): + def __hash__(self): + return hash("BulkInsert") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.BulkInsertRegionInstanceRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.Operation: + r"""Call the bulk insert method over HTTP. + + Args: + request (~.compute.BulkInsertRegionInstanceRequest): + The request object. A request message for + RegionInstances.BulkInsert. See the + method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.Operation: + Represents an Operation resource. Google Compute Engine + has three Operation resources: \* + `Global `__ + \* + `Regional `__ + \* + `Zonal `__ + You can use an operation resource to manage asynchronous + API requests. For more information, read Handling API + responses. Operations can be global, regional or zonal. + - For global operations, use the ``globalOperations`` + resource. - For regional operations, use the + ``regionOperations`` resource. - For zonal operations, + use the ``zonalOperations`` resource. For more + information, read Global, Regional, and Zonal Resources. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/compute/v1/projects/{project}/regions/{region}/instances/bulkInsert', + 'body': 'bulk_insert_instance_resource_resource', + }, + ] + request, metadata = self._interceptor.pre_bulk_insert(request, metadata) + pb_request = compute.BulkInsertRegionInstanceRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + including_default_value_fields=False, + use_integers_for_enums=False + ) + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.Operation() + pb_resp = compute.Operation.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_bulk_insert(resp) + return resp + + @property + def bulk_insert(self) -> Callable[ + [compute.BulkInsertRegionInstanceRequest], + compute.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._BulkInsert(self._session, self._host, self._interceptor) # type: ignore + + @property + def kind(self) -> str: + return "rest" + + def close(self): + self._session.close() + + +__all__=( + 'RegionInstancesRestTransport', +) diff --git a/owl-bot-staging/v1/google/cloud/compute_v1/services/region_network_endpoint_groups/__init__.py b/owl-bot-staging/v1/google/cloud/compute_v1/services/region_network_endpoint_groups/__init__.py new file mode 100644 index 000000000..0f30da98c --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/compute_v1/services/region_network_endpoint_groups/__init__.py @@ -0,0 +1,20 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from .client import RegionNetworkEndpointGroupsClient + +__all__ = ( + 'RegionNetworkEndpointGroupsClient', +) diff --git a/owl-bot-staging/v1/google/cloud/compute_v1/services/region_network_endpoint_groups/client.py b/owl-bot-staging/v1/google/cloud/compute_v1/services/region_network_endpoint_groups/client.py new file mode 100644 index 000000000..3c36b3396 --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/compute_v1/services/region_network_endpoint_groups/client.py @@ -0,0 +1,1205 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import functools +import os +import re +from typing import Dict, Mapping, MutableMapping, MutableSequence, Optional, Sequence, Tuple, Type, Union, cast + +from google.cloud.compute_v1 import gapic_version as package_version + +from google.api_core import client_options as client_options_lib +from google.api_core import exceptions as core_exceptions +from google.api_core import extended_operation +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport import mtls # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth.exceptions import MutualTLSChannelError # type: ignore +from google.oauth2 import service_account # type: ignore + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + +from google.api_core import extended_operation # type: ignore +from google.cloud.compute_v1.services.region_network_endpoint_groups import pagers +from google.cloud.compute_v1.types import compute +from .transports.base import RegionNetworkEndpointGroupsTransport, DEFAULT_CLIENT_INFO +from .transports.rest import RegionNetworkEndpointGroupsRestTransport + + +class RegionNetworkEndpointGroupsClientMeta(type): + """Metaclass for the RegionNetworkEndpointGroups client. + + This provides class-level methods for building and retrieving + support objects (e.g. transport) without polluting the client instance + objects. + """ + _transport_registry = OrderedDict() # type: Dict[str, Type[RegionNetworkEndpointGroupsTransport]] + _transport_registry["rest"] = RegionNetworkEndpointGroupsRestTransport + + def get_transport_class(cls, + label: Optional[str] = None, + ) -> Type[RegionNetworkEndpointGroupsTransport]: + """Returns an appropriate transport class. + + Args: + label: The name of the desired transport. If none is + provided, then the first transport in the registry is used. + + Returns: + The transport class to use. + """ + # If a specific transport is requested, return that one. + if label: + return cls._transport_registry[label] + + # No transport is requested; return the default (that is, the first one + # in the dictionary). + return next(iter(cls._transport_registry.values())) + + +class RegionNetworkEndpointGroupsClient(metaclass=RegionNetworkEndpointGroupsClientMeta): + """The RegionNetworkEndpointGroups API.""" + + @staticmethod + def _get_default_mtls_endpoint(api_endpoint): + """Converts api endpoint to mTLS endpoint. + + Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to + "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. + Args: + api_endpoint (Optional[str]): the api endpoint to convert. + Returns: + str: converted mTLS api endpoint. + """ + if not api_endpoint: + return api_endpoint + + mtls_endpoint_re = re.compile( + r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?" + ) + + m = mtls_endpoint_re.match(api_endpoint) + name, mtls, sandbox, googledomain = m.groups() + if mtls or not googledomain: + return api_endpoint + + if sandbox: + return api_endpoint.replace( + "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" + ) + + return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") + + DEFAULT_ENDPOINT = "compute.googleapis.com" + DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore + DEFAULT_ENDPOINT + ) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + RegionNetworkEndpointGroupsClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_info(info) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + RegionNetworkEndpointGroupsClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_file( + filename) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + from_service_account_json = from_service_account_file + + @property + def transport(self) -> RegionNetworkEndpointGroupsTransport: + """Returns the transport used by the client instance. + + Returns: + RegionNetworkEndpointGroupsTransport: The transport used by the client + instance. + """ + return self._transport + + @staticmethod + def common_billing_account_path(billing_account: str, ) -> str: + """Returns a fully-qualified billing_account string.""" + return "billingAccounts/{billing_account}".format(billing_account=billing_account, ) + + @staticmethod + def parse_common_billing_account_path(path: str) -> Dict[str,str]: + """Parse a billing_account path into its component segments.""" + m = re.match(r"^billingAccounts/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_folder_path(folder: str, ) -> str: + """Returns a fully-qualified folder string.""" + return "folders/{folder}".format(folder=folder, ) + + @staticmethod + def parse_common_folder_path(path: str) -> Dict[str,str]: + """Parse a folder path into its component segments.""" + m = re.match(r"^folders/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_organization_path(organization: str, ) -> str: + """Returns a fully-qualified organization string.""" + return "organizations/{organization}".format(organization=organization, ) + + @staticmethod + def parse_common_organization_path(path: str) -> Dict[str,str]: + """Parse a organization path into its component segments.""" + m = re.match(r"^organizations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_project_path(project: str, ) -> str: + """Returns a fully-qualified project string.""" + return "projects/{project}".format(project=project, ) + + @staticmethod + def parse_common_project_path(path: str) -> Dict[str,str]: + """Parse a project path into its component segments.""" + m = re.match(r"^projects/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_location_path(project: str, location: str, ) -> str: + """Returns a fully-qualified location string.""" + return "projects/{project}/locations/{location}".format(project=project, location=location, ) + + @staticmethod + def parse_common_location_path(path: str) -> Dict[str,str]: + """Parse a location path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @classmethod + def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[client_options_lib.ClientOptions] = None): + """Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + if client_options is None: + client_options = client_options_lib.ClientOptions() + use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") + use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") + if use_client_cert not in ("true", "false"): + raise ValueError("Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`") + if use_mtls_endpoint not in ("auto", "never", "always"): + raise MutualTLSChannelError("Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`") + + # Figure out the client cert source to use. + client_cert_source = None + if use_client_cert == "true": + if client_options.client_cert_source: + client_cert_source = client_options.client_cert_source + elif mtls.has_default_client_cert_source(): + client_cert_source = mtls.default_client_cert_source() + + # Figure out which api endpoint to use. + if client_options.api_endpoint is not None: + api_endpoint = client_options.api_endpoint + elif use_mtls_endpoint == "always" or (use_mtls_endpoint == "auto" and client_cert_source): + api_endpoint = cls.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = cls.DEFAULT_ENDPOINT + + return api_endpoint, client_cert_source + + def __init__(self, *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Optional[Union[str, RegionNetworkEndpointGroupsTransport]] = None, + client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the region network endpoint groups client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Union[str, RegionNetworkEndpointGroupsTransport]): The + transport to use. If set to None, a transport is chosen + automatically. + NOTE: "rest" transport functionality is currently in a + beta state (preview). We welcome your feedback via an + issue in this library's source repository. + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): Custom options for the + client. It won't take effect if a ``transport`` instance is provided. + (1) The ``api_endpoint`` property can be used to override the + default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT + environment variable can also be used to override the endpoint: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto switch to the + default mTLS endpoint if client certificate is present, this is + the default value). However, the ``api_endpoint`` property takes + precedence if provided. + (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide client certificate for mutual TLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + """ + if isinstance(client_options, dict): + client_options = client_options_lib.from_dict(client_options) + if client_options is None: + client_options = client_options_lib.ClientOptions() + client_options = cast(client_options_lib.ClientOptions, client_options) + + api_endpoint, client_cert_source_func = self.get_mtls_endpoint_and_cert_source(client_options) + + api_key_value = getattr(client_options, "api_key", None) + if api_key_value and credentials: + raise ValueError("client_options.api_key and credentials are mutually exclusive") + + # Save or instantiate the transport. + # Ordinarily, we provide the transport, but allowing a custom transport + # instance provides an extensibility point for unusual situations. + if isinstance(transport, RegionNetworkEndpointGroupsTransport): + # transport is a RegionNetworkEndpointGroupsTransport instance. + if credentials or client_options.credentials_file or api_key_value: + raise ValueError("When providing a transport instance, " + "provide its credentials directly.") + if client_options.scopes: + raise ValueError( + "When providing a transport instance, provide its scopes " + "directly." + ) + self._transport = transport + else: + import google.auth._default # type: ignore + + if api_key_value and hasattr(google.auth._default, "get_api_key_credentials"): + credentials = google.auth._default.get_api_key_credentials(api_key_value) + + Transport = type(self).get_transport_class(transport) + self._transport = Transport( + credentials=credentials, + credentials_file=client_options.credentials_file, + host=api_endpoint, + scopes=client_options.scopes, + client_cert_source_for_mtls=client_cert_source_func, + quota_project_id=client_options.quota_project_id, + client_info=client_info, + always_use_jwt_access=True, + api_audience=client_options.api_audience, + ) + + def delete_unary(self, + request: Optional[Union[compute.DeleteRegionNetworkEndpointGroupRequest, dict]] = None, + *, + project: Optional[str] = None, + region: Optional[str] = None, + network_endpoint_group: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.Operation: + r"""Deletes the specified network endpoint group. Note + that the NEG cannot be deleted if it is configured as a + backend of a backend service. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_delete(): + # Create a client + client = compute_v1.RegionNetworkEndpointGroupsClient() + + # Initialize request argument(s) + request = compute_v1.DeleteRegionNetworkEndpointGroupRequest( + network_endpoint_group="network_endpoint_group_value", + project="project_value", + region="region_value", + ) + + # Make the request + response = client.delete(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.DeleteRegionNetworkEndpointGroupRequest, dict]): + The request object. A request message for + RegionNetworkEndpointGroups.Delete. See + the method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + region (str): + The name of the region where the + network endpoint group is located. It + should comply with RFC1035. + + This corresponds to the ``region`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + network_endpoint_group (str): + The name of the network endpoint + group to delete. It should comply with + RFC1035. + + This corresponds to the ``network_endpoint_group`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, region, network_endpoint_group]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.DeleteRegionNetworkEndpointGroupRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.DeleteRegionNetworkEndpointGroupRequest): + request = compute.DeleteRegionNetworkEndpointGroupRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if region is not None: + request.region = region + if network_endpoint_group is not None: + request.network_endpoint_group = network_endpoint_group + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("region", request.region), + ("network_endpoint_group", request.network_endpoint_group), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def delete(self, + request: Optional[Union[compute.DeleteRegionNetworkEndpointGroupRequest, dict]] = None, + *, + project: Optional[str] = None, + region: Optional[str] = None, + network_endpoint_group: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> extended_operation.ExtendedOperation: + r"""Deletes the specified network endpoint group. Note + that the NEG cannot be deleted if it is configured as a + backend of a backend service. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_delete(): + # Create a client + client = compute_v1.RegionNetworkEndpointGroupsClient() + + # Initialize request argument(s) + request = compute_v1.DeleteRegionNetworkEndpointGroupRequest( + network_endpoint_group="network_endpoint_group_value", + project="project_value", + region="region_value", + ) + + # Make the request + response = client.delete(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.DeleteRegionNetworkEndpointGroupRequest, dict]): + The request object. A request message for + RegionNetworkEndpointGroups.Delete. See + the method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + region (str): + The name of the region where the + network endpoint group is located. It + should comply with RFC1035. + + This corresponds to the ``region`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + network_endpoint_group (str): + The name of the network endpoint + group to delete. It should comply with + RFC1035. + + This corresponds to the ``network_endpoint_group`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, region, network_endpoint_group]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.DeleteRegionNetworkEndpointGroupRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.DeleteRegionNetworkEndpointGroupRequest): + request = compute.DeleteRegionNetworkEndpointGroupRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if region is not None: + request.region = region + if network_endpoint_group is not None: + request.network_endpoint_group = network_endpoint_group + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("region", request.region), + ("network_endpoint_group", request.network_endpoint_group), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + operation_service = self._transport._region_operations_client + operation_request = compute.GetRegionOperationRequest() + operation_request.project = request.project + operation_request.region = request.region + operation_request.operation = response.name + + get_operation = functools.partial(operation_service.get, operation_request) + # Cancel is not part of extended operations yet. + cancel_operation = lambda: None + + # Note: this class is an implementation detail to provide a uniform + # set of names for certain fields in the extended operation proto message. + # See google.api_core.extended_operation.ExtendedOperation for details + # on these properties and the expected interface. + class _CustomOperation(extended_operation.ExtendedOperation): + @property + def error_message(self): + return self._extended_operation.http_error_message + + @property + def error_code(self): + return self._extended_operation.http_error_status_code + + response = _CustomOperation.make(get_operation, cancel_operation, response) + + # Done; return the response. + return response + + def get(self, + request: Optional[Union[compute.GetRegionNetworkEndpointGroupRequest, dict]] = None, + *, + project: Optional[str] = None, + region: Optional[str] = None, + network_endpoint_group: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.NetworkEndpointGroup: + r"""Returns the specified network endpoint group. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_get(): + # Create a client + client = compute_v1.RegionNetworkEndpointGroupsClient() + + # Initialize request argument(s) + request = compute_v1.GetRegionNetworkEndpointGroupRequest( + network_endpoint_group="network_endpoint_group_value", + project="project_value", + region="region_value", + ) + + # Make the request + response = client.get(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.GetRegionNetworkEndpointGroupRequest, dict]): + The request object. A request message for + RegionNetworkEndpointGroups.Get. See the + method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + region (str): + The name of the region where the + network endpoint group is located. It + should comply with RFC1035. + + This corresponds to the ``region`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + network_endpoint_group (str): + The name of the network endpoint + group. It should comply with RFC1035. + + This corresponds to the ``network_endpoint_group`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.compute_v1.types.NetworkEndpointGroup: + Represents a collection of network + endpoints. A network endpoint group + (NEG) defines how a set of endpoints + should be reached, whether they are + reachable, and where they are located. + For more information about using NEGs, + see Setting up external HTTP(S) Load + Balancing with internet NEGs, Setting up + zonal NEGs, or Setting up external + HTTP(S) Load Balancing with serverless + NEGs. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, region, network_endpoint_group]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.GetRegionNetworkEndpointGroupRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.GetRegionNetworkEndpointGroupRequest): + request = compute.GetRegionNetworkEndpointGroupRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if region is not None: + request.region = region + if network_endpoint_group is not None: + request.network_endpoint_group = network_endpoint_group + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("region", request.region), + ("network_endpoint_group", request.network_endpoint_group), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def insert_unary(self, + request: Optional[Union[compute.InsertRegionNetworkEndpointGroupRequest, dict]] = None, + *, + project: Optional[str] = None, + region: Optional[str] = None, + network_endpoint_group_resource: Optional[compute.NetworkEndpointGroup] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.Operation: + r"""Creates a network endpoint group in the specified + project using the parameters that are included in the + request. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_insert(): + # Create a client + client = compute_v1.RegionNetworkEndpointGroupsClient() + + # Initialize request argument(s) + request = compute_v1.InsertRegionNetworkEndpointGroupRequest( + project="project_value", + region="region_value", + ) + + # Make the request + response = client.insert(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.InsertRegionNetworkEndpointGroupRequest, dict]): + The request object. A request message for + RegionNetworkEndpointGroups.Insert. See + the method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + region (str): + The name of the region where you want + to create the network endpoint group. It + should comply with RFC1035. + + This corresponds to the ``region`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + network_endpoint_group_resource (google.cloud.compute_v1.types.NetworkEndpointGroup): + The body resource for this request + This corresponds to the ``network_endpoint_group_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, region, network_endpoint_group_resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.InsertRegionNetworkEndpointGroupRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.InsertRegionNetworkEndpointGroupRequest): + request = compute.InsertRegionNetworkEndpointGroupRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if region is not None: + request.region = region + if network_endpoint_group_resource is not None: + request.network_endpoint_group_resource = network_endpoint_group_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.insert] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("region", request.region), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def insert(self, + request: Optional[Union[compute.InsertRegionNetworkEndpointGroupRequest, dict]] = None, + *, + project: Optional[str] = None, + region: Optional[str] = None, + network_endpoint_group_resource: Optional[compute.NetworkEndpointGroup] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> extended_operation.ExtendedOperation: + r"""Creates a network endpoint group in the specified + project using the parameters that are included in the + request. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_insert(): + # Create a client + client = compute_v1.RegionNetworkEndpointGroupsClient() + + # Initialize request argument(s) + request = compute_v1.InsertRegionNetworkEndpointGroupRequest( + project="project_value", + region="region_value", + ) + + # Make the request + response = client.insert(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.InsertRegionNetworkEndpointGroupRequest, dict]): + The request object. A request message for + RegionNetworkEndpointGroups.Insert. See + the method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + region (str): + The name of the region where you want + to create the network endpoint group. It + should comply with RFC1035. + + This corresponds to the ``region`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + network_endpoint_group_resource (google.cloud.compute_v1.types.NetworkEndpointGroup): + The body resource for this request + This corresponds to the ``network_endpoint_group_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, region, network_endpoint_group_resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.InsertRegionNetworkEndpointGroupRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.InsertRegionNetworkEndpointGroupRequest): + request = compute.InsertRegionNetworkEndpointGroupRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if region is not None: + request.region = region + if network_endpoint_group_resource is not None: + request.network_endpoint_group_resource = network_endpoint_group_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.insert] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("region", request.region), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + operation_service = self._transport._region_operations_client + operation_request = compute.GetRegionOperationRequest() + operation_request.project = request.project + operation_request.region = request.region + operation_request.operation = response.name + + get_operation = functools.partial(operation_service.get, operation_request) + # Cancel is not part of extended operations yet. + cancel_operation = lambda: None + + # Note: this class is an implementation detail to provide a uniform + # set of names for certain fields in the extended operation proto message. + # See google.api_core.extended_operation.ExtendedOperation for details + # on these properties and the expected interface. + class _CustomOperation(extended_operation.ExtendedOperation): + @property + def error_message(self): + return self._extended_operation.http_error_message + + @property + def error_code(self): + return self._extended_operation.http_error_status_code + + response = _CustomOperation.make(get_operation, cancel_operation, response) + + # Done; return the response. + return response + + def list(self, + request: Optional[Union[compute.ListRegionNetworkEndpointGroupsRequest, dict]] = None, + *, + project: Optional[str] = None, + region: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListPager: + r"""Retrieves the list of regional network endpoint + groups available to the specified project in the given + region. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_list(): + # Create a client + client = compute_v1.RegionNetworkEndpointGroupsClient() + + # Initialize request argument(s) + request = compute_v1.ListRegionNetworkEndpointGroupsRequest( + project="project_value", + region="region_value", + ) + + # Make the request + page_result = client.list(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.ListRegionNetworkEndpointGroupsRequest, dict]): + The request object. A request message for + RegionNetworkEndpointGroups.List. See + the method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + region (str): + The name of the region where the + network endpoint group is located. It + should comply with RFC1035. + + This corresponds to the ``region`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.compute_v1.services.region_network_endpoint_groups.pagers.ListPager: + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, region]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.ListRegionNetworkEndpointGroupsRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.ListRegionNetworkEndpointGroupsRequest): + request = compute.ListRegionNetworkEndpointGroupsRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if region is not None: + request.region = region + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("region", request.region), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + def __enter__(self) -> "RegionNetworkEndpointGroupsClient": + return self + + def __exit__(self, type, value, traceback): + """Releases underlying transport's resources. + + .. warning:: + ONLY use as a context manager if the transport is NOT shared + with other clients! Exiting the with block will CLOSE the transport + and may cause errors in other clients! + """ + self.transport.close() + + + + + + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) + + +__all__ = ( + "RegionNetworkEndpointGroupsClient", +) diff --git a/owl-bot-staging/v1/google/cloud/compute_v1/services/region_network_endpoint_groups/pagers.py b/owl-bot-staging/v1/google/cloud/compute_v1/services/region_network_endpoint_groups/pagers.py new file mode 100644 index 000000000..ad76dffd5 --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/compute_v1/services/region_network_endpoint_groups/pagers.py @@ -0,0 +1,77 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import Any, AsyncIterator, Awaitable, Callable, Sequence, Tuple, Optional, Iterator + +from google.cloud.compute_v1.types import compute + + +class ListPager: + """A pager for iterating through ``list`` requests. + + This class thinly wraps an initial + :class:`google.cloud.compute_v1.types.NetworkEndpointGroupList` object, and + provides an ``__iter__`` method to iterate through its + ``items`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``List`` requests and continue to iterate + through the ``items`` field on the + corresponding responses. + + All the usual :class:`google.cloud.compute_v1.types.NetworkEndpointGroupList` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., compute.NetworkEndpointGroupList], + request: compute.ListRegionNetworkEndpointGroupsRequest, + response: compute.NetworkEndpointGroupList, + *, + metadata: Sequence[Tuple[str, str]] = ()): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.compute_v1.types.ListRegionNetworkEndpointGroupsRequest): + The initial request object. + response (google.cloud.compute_v1.types.NetworkEndpointGroupList): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = compute.ListRegionNetworkEndpointGroupsRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[compute.NetworkEndpointGroupList]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[compute.NetworkEndpointGroup]: + for page in self.pages: + yield from page.items + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) diff --git a/owl-bot-staging/v1/google/cloud/compute_v1/services/region_network_endpoint_groups/transports/__init__.py b/owl-bot-staging/v1/google/cloud/compute_v1/services/region_network_endpoint_groups/transports/__init__.py new file mode 100644 index 000000000..904bdf8ad --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/compute_v1/services/region_network_endpoint_groups/transports/__init__.py @@ -0,0 +1,32 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +from typing import Dict, Type + +from .base import RegionNetworkEndpointGroupsTransport +from .rest import RegionNetworkEndpointGroupsRestTransport +from .rest import RegionNetworkEndpointGroupsRestInterceptor + + +# Compile a registry of transports. +_transport_registry = OrderedDict() # type: Dict[str, Type[RegionNetworkEndpointGroupsTransport]] +_transport_registry['rest'] = RegionNetworkEndpointGroupsRestTransport + +__all__ = ( + 'RegionNetworkEndpointGroupsTransport', + 'RegionNetworkEndpointGroupsRestTransport', + 'RegionNetworkEndpointGroupsRestInterceptor', +) diff --git a/owl-bot-staging/v1/google/cloud/compute_v1/services/region_network_endpoint_groups/transports/base.py b/owl-bot-staging/v1/google/cloud/compute_v1/services/region_network_endpoint_groups/transports/base.py new file mode 100644 index 000000000..8dbd4949a --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/compute_v1/services/region_network_endpoint_groups/transports/base.py @@ -0,0 +1,205 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import abc +from typing import Awaitable, Callable, Dict, Optional, Sequence, Union + +from google.cloud.compute_v1 import gapic_version as package_version + +import google.auth # type: ignore +import google.api_core +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.compute_v1.types import compute +from google.cloud.compute_v1.services import region_operations + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) + + +class RegionNetworkEndpointGroupsTransport(abc.ABC): + """Abstract transport class for RegionNetworkEndpointGroups.""" + + AUTH_SCOPES = ( + 'https://www.googleapis.com/auth/compute', + 'https://www.googleapis.com/auth/cloud-platform', + ) + + DEFAULT_HOST: str = 'compute.googleapis.com' + def __init__( + self, *, + host: str = DEFAULT_HOST, + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + **kwargs, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A list of scopes. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + """ + self._extended_operations_services: Dict[str, Any] = {} + + scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} + + # Save the scopes. + self._scopes = scopes + + # If no credentials are provided, then determine the appropriate + # defaults. + if credentials and credentials_file: + raise core_exceptions.DuplicateCredentialArgs("'credentials_file' and 'credentials' are mutually exclusive") + + if credentials_file is not None: + credentials, _ = google.auth.load_credentials_from_file( + credentials_file, + **scopes_kwargs, + quota_project_id=quota_project_id + ) + elif credentials is None: + credentials, _ = google.auth.default(**scopes_kwargs, quota_project_id=quota_project_id) + # Don't apply audience if the credentials file passed from user. + if hasattr(credentials, "with_gdch_audience"): + credentials = credentials.with_gdch_audience(api_audience if api_audience else host) + + # If the credentials are service account credentials, then always try to use self signed JWT. + if always_use_jwt_access and isinstance(credentials, service_account.Credentials) and hasattr(service_account.Credentials, "with_always_use_jwt_access"): + credentials = credentials.with_always_use_jwt_access(True) + + # Save the credentials. + self._credentials = credentials + + # Save the hostname. Default to port 443 (HTTPS) if none is specified. + if ':' not in host: + host += ':443' + self._host = host + + def _prep_wrapped_messages(self, client_info): + # Precompute the wrapped methods. + self._wrapped_methods = { + self.delete: gapic_v1.method.wrap_method( + self.delete, + default_timeout=None, + client_info=client_info, + ), + self.get: gapic_v1.method.wrap_method( + self.get, + default_timeout=None, + client_info=client_info, + ), + self.insert: gapic_v1.method.wrap_method( + self.insert, + default_timeout=None, + client_info=client_info, + ), + self.list: gapic_v1.method.wrap_method( + self.list, + default_timeout=None, + client_info=client_info, + ), + } + + def close(self): + """Closes resources associated with the transport. + + .. warning:: + Only call this method if the transport is NOT shared + with other clients - this may cause errors in other clients! + """ + raise NotImplementedError() + + @property + def delete(self) -> Callable[ + [compute.DeleteRegionNetworkEndpointGroupRequest], + Union[ + compute.Operation, + Awaitable[compute.Operation] + ]]: + raise NotImplementedError() + + @property + def get(self) -> Callable[ + [compute.GetRegionNetworkEndpointGroupRequest], + Union[ + compute.NetworkEndpointGroup, + Awaitable[compute.NetworkEndpointGroup] + ]]: + raise NotImplementedError() + + @property + def insert(self) -> Callable[ + [compute.InsertRegionNetworkEndpointGroupRequest], + Union[ + compute.Operation, + Awaitable[compute.Operation] + ]]: + raise NotImplementedError() + + @property + def list(self) -> Callable[ + [compute.ListRegionNetworkEndpointGroupsRequest], + Union[ + compute.NetworkEndpointGroupList, + Awaitable[compute.NetworkEndpointGroupList] + ]]: + raise NotImplementedError() + + @property + def kind(self) -> str: + raise NotImplementedError() + + @property + def _region_operations_client(self) -> region_operations.RegionOperationsClient: + ex_op_service = self._extended_operations_services.get("region_operations") + if not ex_op_service: + ex_op_service = region_operations.RegionOperationsClient( + credentials=self._credentials, + transport=self.kind, + ) + self._extended_operations_services["region_operations"] = ex_op_service + + return ex_op_service + + +__all__ = ( + 'RegionNetworkEndpointGroupsTransport', +) diff --git a/owl-bot-staging/v1/google/cloud/compute_v1/services/region_network_endpoint_groups/transports/rest.py b/owl-bot-staging/v1/google/cloud/compute_v1/services/region_network_endpoint_groups/transports/rest.py new file mode 100644 index 000000000..7c3b76ec8 --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/compute_v1/services/region_network_endpoint_groups/transports/rest.py @@ -0,0 +1,675 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +from google.auth.transport.requests import AuthorizedSession # type: ignore +import json # type: ignore +import grpc # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.api_core import exceptions as core_exceptions +from google.api_core import retry as retries +from google.api_core import rest_helpers +from google.api_core import rest_streaming +from google.api_core import path_template +from google.api_core import gapic_v1 + +from google.protobuf import json_format +from requests import __version__ as requests_version +import dataclasses +import re +from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union +import warnings + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + + +from google.cloud.compute_v1.types import compute + +from .base import RegionNetworkEndpointGroupsTransport, DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, + grpc_version=None, + rest_version=requests_version, +) + + +class RegionNetworkEndpointGroupsRestInterceptor: + """Interceptor for RegionNetworkEndpointGroups. + + Interceptors are used to manipulate requests, request metadata, and responses + in arbitrary ways. + Example use cases include: + * Logging + * Verifying requests according to service or custom semantics + * Stripping extraneous information from responses + + These use cases and more can be enabled by injecting an + instance of a custom subclass when constructing the RegionNetworkEndpointGroupsRestTransport. + + .. code-block:: python + class MyCustomRegionNetworkEndpointGroupsInterceptor(RegionNetworkEndpointGroupsRestInterceptor): + def pre_delete(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_delete(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_get(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_insert(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_insert(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list(self, response): + logging.log(f"Received response: {response}") + return response + + transport = RegionNetworkEndpointGroupsRestTransport(interceptor=MyCustomRegionNetworkEndpointGroupsInterceptor()) + client = RegionNetworkEndpointGroupsClient(transport=transport) + + + """ + def pre_delete(self, request: compute.DeleteRegionNetworkEndpointGroupRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.DeleteRegionNetworkEndpointGroupRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for delete + + Override in a subclass to manipulate the request or metadata + before they are sent to the RegionNetworkEndpointGroups server. + """ + return request, metadata + + def post_delete(self, response: compute.Operation) -> compute.Operation: + """Post-rpc interceptor for delete + + Override in a subclass to manipulate the response + after it is returned by the RegionNetworkEndpointGroups server but before + it is returned to user code. + """ + return response + def pre_get(self, request: compute.GetRegionNetworkEndpointGroupRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.GetRegionNetworkEndpointGroupRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get + + Override in a subclass to manipulate the request or metadata + before they are sent to the RegionNetworkEndpointGroups server. + """ + return request, metadata + + def post_get(self, response: compute.NetworkEndpointGroup) -> compute.NetworkEndpointGroup: + """Post-rpc interceptor for get + + Override in a subclass to manipulate the response + after it is returned by the RegionNetworkEndpointGroups server but before + it is returned to user code. + """ + return response + def pre_insert(self, request: compute.InsertRegionNetworkEndpointGroupRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.InsertRegionNetworkEndpointGroupRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for insert + + Override in a subclass to manipulate the request or metadata + before they are sent to the RegionNetworkEndpointGroups server. + """ + return request, metadata + + def post_insert(self, response: compute.Operation) -> compute.Operation: + """Post-rpc interceptor for insert + + Override in a subclass to manipulate the response + after it is returned by the RegionNetworkEndpointGroups server but before + it is returned to user code. + """ + return response + def pre_list(self, request: compute.ListRegionNetworkEndpointGroupsRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.ListRegionNetworkEndpointGroupsRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list + + Override in a subclass to manipulate the request or metadata + before they are sent to the RegionNetworkEndpointGroups server. + """ + return request, metadata + + def post_list(self, response: compute.NetworkEndpointGroupList) -> compute.NetworkEndpointGroupList: + """Post-rpc interceptor for list + + Override in a subclass to manipulate the response + after it is returned by the RegionNetworkEndpointGroups server but before + it is returned to user code. + """ + return response + + +@dataclasses.dataclass +class RegionNetworkEndpointGroupsRestStub: + _session: AuthorizedSession + _host: str + _interceptor: RegionNetworkEndpointGroupsRestInterceptor + + +class RegionNetworkEndpointGroupsRestTransport(RegionNetworkEndpointGroupsTransport): + """REST backend transport for RegionNetworkEndpointGroups. + + The RegionNetworkEndpointGroups API. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends JSON representations of protocol buffers over HTTP/1.1 + + NOTE: This REST transport functionality is currently in a beta + state (preview). We welcome your feedback via an issue in this + library's source repository. Thank you! + """ + + def __init__(self, *, + host: str = 'compute.googleapis.com', + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + client_cert_source_for_mtls: Optional[Callable[[ + ], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + url_scheme: str = 'https', + interceptor: Optional[RegionNetworkEndpointGroupsRestInterceptor] = None, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + NOTE: This REST transport functionality is currently in a beta + state (preview). We welcome your feedback via a GitHub issue in + this library's repository. Thank you! + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + client_cert_source_for_mtls (Callable[[], Tuple[bytes, bytes]]): Client + certificate to configure mutual TLS HTTP channel. It is ignored + if ``channel`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you are developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + url_scheme: the protocol scheme for the API endpoint. Normally + "https", but for testing or local servers, + "http" can be specified. + """ + # Run the base constructor + # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. + # TODO: When custom host (api_endpoint) is set, `scopes` must *also* be set on the + # credentials object + maybe_url_match = re.match("^(?Phttp(?:s)?://)?(?P.*)$", host) + if maybe_url_match is None: + raise ValueError(f"Unexpected hostname structure: {host}") # pragma: NO COVER + + url_match_items = maybe_url_match.groupdict() + + host = f"{url_scheme}://{host}" if not url_match_items["scheme"] else host + + super().__init__( + host=host, + credentials=credentials, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience + ) + self._session = AuthorizedSession( + self._credentials, default_host=self.DEFAULT_HOST) + if client_cert_source_for_mtls: + self._session.configure_mtls_channel(client_cert_source_for_mtls) + self._interceptor = interceptor or RegionNetworkEndpointGroupsRestInterceptor() + self._prep_wrapped_messages(client_info) + + class _Delete(RegionNetworkEndpointGroupsRestStub): + def __hash__(self): + return hash("Delete") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.DeleteRegionNetworkEndpointGroupRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.Operation: + r"""Call the delete method over HTTP. + + Args: + request (~.compute.DeleteRegionNetworkEndpointGroupRequest): + The request object. A request message for + RegionNetworkEndpointGroups.Delete. See + the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.Operation: + Represents an Operation resource. Google Compute Engine + has three Operation resources: \* + `Global `__ + \* + `Regional `__ + \* + `Zonal `__ + You can use an operation resource to manage asynchronous + API requests. For more information, read Handling API + responses. Operations can be global, regional or zonal. + - For global operations, use the ``globalOperations`` + resource. - For regional operations, use the + ``regionOperations`` resource. - For zonal operations, + use the ``zonalOperations`` resource. For more + information, read Global, Regional, and Zonal Resources. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'delete', + 'uri': '/compute/v1/projects/{project}/regions/{region}/networkEndpointGroups/{network_endpoint_group}', + }, + ] + request, metadata = self._interceptor.pre_delete(request, metadata) + pb_request = compute.DeleteRegionNetworkEndpointGroupRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.Operation() + pb_resp = compute.Operation.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_delete(resp) + return resp + + class _Get(RegionNetworkEndpointGroupsRestStub): + def __hash__(self): + return hash("Get") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.GetRegionNetworkEndpointGroupRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.NetworkEndpointGroup: + r"""Call the get method over HTTP. + + Args: + request (~.compute.GetRegionNetworkEndpointGroupRequest): + The request object. A request message for + RegionNetworkEndpointGroups.Get. See the + method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.NetworkEndpointGroup: + Represents a collection of network + endpoints. A network endpoint group + (NEG) defines how a set of endpoints + should be reached, whether they are + reachable, and where they are located. + For more information about using NEGs, + see Setting up external HTTP(S) Load + Balancing with internet NEGs, Setting up + zonal NEGs, or Setting up external + HTTP(S) Load Balancing with serverless + NEGs. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/compute/v1/projects/{project}/regions/{region}/networkEndpointGroups/{network_endpoint_group}', + }, + ] + request, metadata = self._interceptor.pre_get(request, metadata) + pb_request = compute.GetRegionNetworkEndpointGroupRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.NetworkEndpointGroup() + pb_resp = compute.NetworkEndpointGroup.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get(resp) + return resp + + class _Insert(RegionNetworkEndpointGroupsRestStub): + def __hash__(self): + return hash("Insert") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.InsertRegionNetworkEndpointGroupRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.Operation: + r"""Call the insert method over HTTP. + + Args: + request (~.compute.InsertRegionNetworkEndpointGroupRequest): + The request object. A request message for + RegionNetworkEndpointGroups.Insert. See + the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.Operation: + Represents an Operation resource. Google Compute Engine + has three Operation resources: \* + `Global `__ + \* + `Regional `__ + \* + `Zonal `__ + You can use an operation resource to manage asynchronous + API requests. For more information, read Handling API + responses. Operations can be global, regional or zonal. + - For global operations, use the ``globalOperations`` + resource. - For regional operations, use the + ``regionOperations`` resource. - For zonal operations, + use the ``zonalOperations`` resource. For more + information, read Global, Regional, and Zonal Resources. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/compute/v1/projects/{project}/regions/{region}/networkEndpointGroups', + 'body': 'network_endpoint_group_resource', + }, + ] + request, metadata = self._interceptor.pre_insert(request, metadata) + pb_request = compute.InsertRegionNetworkEndpointGroupRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + including_default_value_fields=False, + use_integers_for_enums=False + ) + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.Operation() + pb_resp = compute.Operation.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_insert(resp) + return resp + + class _List(RegionNetworkEndpointGroupsRestStub): + def __hash__(self): + return hash("List") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.ListRegionNetworkEndpointGroupsRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.NetworkEndpointGroupList: + r"""Call the list method over HTTP. + + Args: + request (~.compute.ListRegionNetworkEndpointGroupsRequest): + The request object. A request message for + RegionNetworkEndpointGroups.List. See + the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.NetworkEndpointGroupList: + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/compute/v1/projects/{project}/regions/{region}/networkEndpointGroups', + }, + ] + request, metadata = self._interceptor.pre_list(request, metadata) + pb_request = compute.ListRegionNetworkEndpointGroupsRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.NetworkEndpointGroupList() + pb_resp = compute.NetworkEndpointGroupList.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_list(resp) + return resp + + @property + def delete(self) -> Callable[ + [compute.DeleteRegionNetworkEndpointGroupRequest], + compute.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._Delete(self._session, self._host, self._interceptor) # type: ignore + + @property + def get(self) -> Callable[ + [compute.GetRegionNetworkEndpointGroupRequest], + compute.NetworkEndpointGroup]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._Get(self._session, self._host, self._interceptor) # type: ignore + + @property + def insert(self) -> Callable[ + [compute.InsertRegionNetworkEndpointGroupRequest], + compute.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._Insert(self._session, self._host, self._interceptor) # type: ignore + + @property + def list(self) -> Callable[ + [compute.ListRegionNetworkEndpointGroupsRequest], + compute.NetworkEndpointGroupList]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._List(self._session, self._host, self._interceptor) # type: ignore + + @property + def kind(self) -> str: + return "rest" + + def close(self): + self._session.close() + + +__all__=( + 'RegionNetworkEndpointGroupsRestTransport', +) diff --git a/owl-bot-staging/v1/google/cloud/compute_v1/services/region_network_firewall_policies/__init__.py b/owl-bot-staging/v1/google/cloud/compute_v1/services/region_network_firewall_policies/__init__.py new file mode 100644 index 000000000..79122f972 --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/compute_v1/services/region_network_firewall_policies/__init__.py @@ -0,0 +1,20 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from .client import RegionNetworkFirewallPoliciesClient + +__all__ = ( + 'RegionNetworkFirewallPoliciesClient', +) diff --git a/owl-bot-staging/v1/google/cloud/compute_v1/services/region_network_firewall_policies/client.py b/owl-bot-staging/v1/google/cloud/compute_v1/services/region_network_firewall_policies/client.py new file mode 100644 index 000000000..df16c6891 --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/compute_v1/services/region_network_firewall_policies/client.py @@ -0,0 +1,3993 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import functools +import os +import re +from typing import Dict, Mapping, MutableMapping, MutableSequence, Optional, Sequence, Tuple, Type, Union, cast + +from google.cloud.compute_v1 import gapic_version as package_version + +from google.api_core import client_options as client_options_lib +from google.api_core import exceptions as core_exceptions +from google.api_core import extended_operation +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport import mtls # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth.exceptions import MutualTLSChannelError # type: ignore +from google.oauth2 import service_account # type: ignore + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + +from google.api_core import extended_operation # type: ignore +from google.cloud.compute_v1.services.region_network_firewall_policies import pagers +from google.cloud.compute_v1.types import compute +from .transports.base import RegionNetworkFirewallPoliciesTransport, DEFAULT_CLIENT_INFO +from .transports.rest import RegionNetworkFirewallPoliciesRestTransport + + +class RegionNetworkFirewallPoliciesClientMeta(type): + """Metaclass for the RegionNetworkFirewallPolicies client. + + This provides class-level methods for building and retrieving + support objects (e.g. transport) without polluting the client instance + objects. + """ + _transport_registry = OrderedDict() # type: Dict[str, Type[RegionNetworkFirewallPoliciesTransport]] + _transport_registry["rest"] = RegionNetworkFirewallPoliciesRestTransport + + def get_transport_class(cls, + label: Optional[str] = None, + ) -> Type[RegionNetworkFirewallPoliciesTransport]: + """Returns an appropriate transport class. + + Args: + label: The name of the desired transport. If none is + provided, then the first transport in the registry is used. + + Returns: + The transport class to use. + """ + # If a specific transport is requested, return that one. + if label: + return cls._transport_registry[label] + + # No transport is requested; return the default (that is, the first one + # in the dictionary). + return next(iter(cls._transport_registry.values())) + + +class RegionNetworkFirewallPoliciesClient(metaclass=RegionNetworkFirewallPoliciesClientMeta): + """The RegionNetworkFirewallPolicies API.""" + + @staticmethod + def _get_default_mtls_endpoint(api_endpoint): + """Converts api endpoint to mTLS endpoint. + + Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to + "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. + Args: + api_endpoint (Optional[str]): the api endpoint to convert. + Returns: + str: converted mTLS api endpoint. + """ + if not api_endpoint: + return api_endpoint + + mtls_endpoint_re = re.compile( + r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?" + ) + + m = mtls_endpoint_re.match(api_endpoint) + name, mtls, sandbox, googledomain = m.groups() + if mtls or not googledomain: + return api_endpoint + + if sandbox: + return api_endpoint.replace( + "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" + ) + + return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") + + DEFAULT_ENDPOINT = "compute.googleapis.com" + DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore + DEFAULT_ENDPOINT + ) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + RegionNetworkFirewallPoliciesClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_info(info) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + RegionNetworkFirewallPoliciesClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_file( + filename) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + from_service_account_json = from_service_account_file + + @property + def transport(self) -> RegionNetworkFirewallPoliciesTransport: + """Returns the transport used by the client instance. + + Returns: + RegionNetworkFirewallPoliciesTransport: The transport used by the client + instance. + """ + return self._transport + + @staticmethod + def common_billing_account_path(billing_account: str, ) -> str: + """Returns a fully-qualified billing_account string.""" + return "billingAccounts/{billing_account}".format(billing_account=billing_account, ) + + @staticmethod + def parse_common_billing_account_path(path: str) -> Dict[str,str]: + """Parse a billing_account path into its component segments.""" + m = re.match(r"^billingAccounts/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_folder_path(folder: str, ) -> str: + """Returns a fully-qualified folder string.""" + return "folders/{folder}".format(folder=folder, ) + + @staticmethod + def parse_common_folder_path(path: str) -> Dict[str,str]: + """Parse a folder path into its component segments.""" + m = re.match(r"^folders/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_organization_path(organization: str, ) -> str: + """Returns a fully-qualified organization string.""" + return "organizations/{organization}".format(organization=organization, ) + + @staticmethod + def parse_common_organization_path(path: str) -> Dict[str,str]: + """Parse a organization path into its component segments.""" + m = re.match(r"^organizations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_project_path(project: str, ) -> str: + """Returns a fully-qualified project string.""" + return "projects/{project}".format(project=project, ) + + @staticmethod + def parse_common_project_path(path: str) -> Dict[str,str]: + """Parse a project path into its component segments.""" + m = re.match(r"^projects/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_location_path(project: str, location: str, ) -> str: + """Returns a fully-qualified location string.""" + return "projects/{project}/locations/{location}".format(project=project, location=location, ) + + @staticmethod + def parse_common_location_path(path: str) -> Dict[str,str]: + """Parse a location path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @classmethod + def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[client_options_lib.ClientOptions] = None): + """Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + if client_options is None: + client_options = client_options_lib.ClientOptions() + use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") + use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") + if use_client_cert not in ("true", "false"): + raise ValueError("Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`") + if use_mtls_endpoint not in ("auto", "never", "always"): + raise MutualTLSChannelError("Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`") + + # Figure out the client cert source to use. + client_cert_source = None + if use_client_cert == "true": + if client_options.client_cert_source: + client_cert_source = client_options.client_cert_source + elif mtls.has_default_client_cert_source(): + client_cert_source = mtls.default_client_cert_source() + + # Figure out which api endpoint to use. + if client_options.api_endpoint is not None: + api_endpoint = client_options.api_endpoint + elif use_mtls_endpoint == "always" or (use_mtls_endpoint == "auto" and client_cert_source): + api_endpoint = cls.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = cls.DEFAULT_ENDPOINT + + return api_endpoint, client_cert_source + + def __init__(self, *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Optional[Union[str, RegionNetworkFirewallPoliciesTransport]] = None, + client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the region network firewall policies client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Union[str, RegionNetworkFirewallPoliciesTransport]): The + transport to use. If set to None, a transport is chosen + automatically. + NOTE: "rest" transport functionality is currently in a + beta state (preview). We welcome your feedback via an + issue in this library's source repository. + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): Custom options for the + client. It won't take effect if a ``transport`` instance is provided. + (1) The ``api_endpoint`` property can be used to override the + default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT + environment variable can also be used to override the endpoint: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto switch to the + default mTLS endpoint if client certificate is present, this is + the default value). However, the ``api_endpoint`` property takes + precedence if provided. + (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide client certificate for mutual TLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + """ + if isinstance(client_options, dict): + client_options = client_options_lib.from_dict(client_options) + if client_options is None: + client_options = client_options_lib.ClientOptions() + client_options = cast(client_options_lib.ClientOptions, client_options) + + api_endpoint, client_cert_source_func = self.get_mtls_endpoint_and_cert_source(client_options) + + api_key_value = getattr(client_options, "api_key", None) + if api_key_value and credentials: + raise ValueError("client_options.api_key and credentials are mutually exclusive") + + # Save or instantiate the transport. + # Ordinarily, we provide the transport, but allowing a custom transport + # instance provides an extensibility point for unusual situations. + if isinstance(transport, RegionNetworkFirewallPoliciesTransport): + # transport is a RegionNetworkFirewallPoliciesTransport instance. + if credentials or client_options.credentials_file or api_key_value: + raise ValueError("When providing a transport instance, " + "provide its credentials directly.") + if client_options.scopes: + raise ValueError( + "When providing a transport instance, provide its scopes " + "directly." + ) + self._transport = transport + else: + import google.auth._default # type: ignore + + if api_key_value and hasattr(google.auth._default, "get_api_key_credentials"): + credentials = google.auth._default.get_api_key_credentials(api_key_value) + + Transport = type(self).get_transport_class(transport) + self._transport = Transport( + credentials=credentials, + credentials_file=client_options.credentials_file, + host=api_endpoint, + scopes=client_options.scopes, + client_cert_source_for_mtls=client_cert_source_func, + quota_project_id=client_options.quota_project_id, + client_info=client_info, + always_use_jwt_access=True, + api_audience=client_options.api_audience, + ) + + def add_association_unary(self, + request: Optional[Union[compute.AddAssociationRegionNetworkFirewallPolicyRequest, dict]] = None, + *, + project: Optional[str] = None, + region: Optional[str] = None, + firewall_policy: Optional[str] = None, + firewall_policy_association_resource: Optional[compute.FirewallPolicyAssociation] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.Operation: + r"""Inserts an association for the specified network + firewall policy. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_add_association(): + # Create a client + client = compute_v1.RegionNetworkFirewallPoliciesClient() + + # Initialize request argument(s) + request = compute_v1.AddAssociationRegionNetworkFirewallPolicyRequest( + firewall_policy="firewall_policy_value", + project="project_value", + region="region_value", + ) + + # Make the request + response = client.add_association(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.AddAssociationRegionNetworkFirewallPolicyRequest, dict]): + The request object. A request message for + RegionNetworkFirewallPolicies.AddAssociation. + See the method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + region (str): + Name of the region scoping this + request. + + This corresponds to the ``region`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + firewall_policy (str): + Name of the firewall policy to + update. + + This corresponds to the ``firewall_policy`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + firewall_policy_association_resource (google.cloud.compute_v1.types.FirewallPolicyAssociation): + The body resource for this request + This corresponds to the ``firewall_policy_association_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, region, firewall_policy, firewall_policy_association_resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.AddAssociationRegionNetworkFirewallPolicyRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.AddAssociationRegionNetworkFirewallPolicyRequest): + request = compute.AddAssociationRegionNetworkFirewallPolicyRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if region is not None: + request.region = region + if firewall_policy is not None: + request.firewall_policy = firewall_policy + if firewall_policy_association_resource is not None: + request.firewall_policy_association_resource = firewall_policy_association_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.add_association] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("region", request.region), + ("firewall_policy", request.firewall_policy), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def add_association(self, + request: Optional[Union[compute.AddAssociationRegionNetworkFirewallPolicyRequest, dict]] = None, + *, + project: Optional[str] = None, + region: Optional[str] = None, + firewall_policy: Optional[str] = None, + firewall_policy_association_resource: Optional[compute.FirewallPolicyAssociation] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> extended_operation.ExtendedOperation: + r"""Inserts an association for the specified network + firewall policy. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_add_association(): + # Create a client + client = compute_v1.RegionNetworkFirewallPoliciesClient() + + # Initialize request argument(s) + request = compute_v1.AddAssociationRegionNetworkFirewallPolicyRequest( + firewall_policy="firewall_policy_value", + project="project_value", + region="region_value", + ) + + # Make the request + response = client.add_association(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.AddAssociationRegionNetworkFirewallPolicyRequest, dict]): + The request object. A request message for + RegionNetworkFirewallPolicies.AddAssociation. + See the method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + region (str): + Name of the region scoping this + request. + + This corresponds to the ``region`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + firewall_policy (str): + Name of the firewall policy to + update. + + This corresponds to the ``firewall_policy`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + firewall_policy_association_resource (google.cloud.compute_v1.types.FirewallPolicyAssociation): + The body resource for this request + This corresponds to the ``firewall_policy_association_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, region, firewall_policy, firewall_policy_association_resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.AddAssociationRegionNetworkFirewallPolicyRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.AddAssociationRegionNetworkFirewallPolicyRequest): + request = compute.AddAssociationRegionNetworkFirewallPolicyRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if region is not None: + request.region = region + if firewall_policy is not None: + request.firewall_policy = firewall_policy + if firewall_policy_association_resource is not None: + request.firewall_policy_association_resource = firewall_policy_association_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.add_association] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("region", request.region), + ("firewall_policy", request.firewall_policy), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + operation_service = self._transport._region_operations_client + operation_request = compute.GetRegionOperationRequest() + operation_request.project = request.project + operation_request.region = request.region + operation_request.operation = response.name + + get_operation = functools.partial(operation_service.get, operation_request) + # Cancel is not part of extended operations yet. + cancel_operation = lambda: None + + # Note: this class is an implementation detail to provide a uniform + # set of names for certain fields in the extended operation proto message. + # See google.api_core.extended_operation.ExtendedOperation for details + # on these properties and the expected interface. + class _CustomOperation(extended_operation.ExtendedOperation): + @property + def error_message(self): + return self._extended_operation.http_error_message + + @property + def error_code(self): + return self._extended_operation.http_error_status_code + + response = _CustomOperation.make(get_operation, cancel_operation, response) + + # Done; return the response. + return response + + def add_rule_unary(self, + request: Optional[Union[compute.AddRuleRegionNetworkFirewallPolicyRequest, dict]] = None, + *, + project: Optional[str] = None, + region: Optional[str] = None, + firewall_policy: Optional[str] = None, + firewall_policy_rule_resource: Optional[compute.FirewallPolicyRule] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.Operation: + r"""Inserts a rule into a network firewall policy. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_add_rule(): + # Create a client + client = compute_v1.RegionNetworkFirewallPoliciesClient() + + # Initialize request argument(s) + request = compute_v1.AddRuleRegionNetworkFirewallPolicyRequest( + firewall_policy="firewall_policy_value", + project="project_value", + region="region_value", + ) + + # Make the request + response = client.add_rule(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.AddRuleRegionNetworkFirewallPolicyRequest, dict]): + The request object. A request message for + RegionNetworkFirewallPolicies.AddRule. + See the method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + region (str): + Name of the region scoping this + request. + + This corresponds to the ``region`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + firewall_policy (str): + Name of the firewall policy to + update. + + This corresponds to the ``firewall_policy`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + firewall_policy_rule_resource (google.cloud.compute_v1.types.FirewallPolicyRule): + The body resource for this request + This corresponds to the ``firewall_policy_rule_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, region, firewall_policy, firewall_policy_rule_resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.AddRuleRegionNetworkFirewallPolicyRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.AddRuleRegionNetworkFirewallPolicyRequest): + request = compute.AddRuleRegionNetworkFirewallPolicyRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if region is not None: + request.region = region + if firewall_policy is not None: + request.firewall_policy = firewall_policy + if firewall_policy_rule_resource is not None: + request.firewall_policy_rule_resource = firewall_policy_rule_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.add_rule] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("region", request.region), + ("firewall_policy", request.firewall_policy), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def add_rule(self, + request: Optional[Union[compute.AddRuleRegionNetworkFirewallPolicyRequest, dict]] = None, + *, + project: Optional[str] = None, + region: Optional[str] = None, + firewall_policy: Optional[str] = None, + firewall_policy_rule_resource: Optional[compute.FirewallPolicyRule] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> extended_operation.ExtendedOperation: + r"""Inserts a rule into a network firewall policy. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_add_rule(): + # Create a client + client = compute_v1.RegionNetworkFirewallPoliciesClient() + + # Initialize request argument(s) + request = compute_v1.AddRuleRegionNetworkFirewallPolicyRequest( + firewall_policy="firewall_policy_value", + project="project_value", + region="region_value", + ) + + # Make the request + response = client.add_rule(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.AddRuleRegionNetworkFirewallPolicyRequest, dict]): + The request object. A request message for + RegionNetworkFirewallPolicies.AddRule. + See the method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + region (str): + Name of the region scoping this + request. + + This corresponds to the ``region`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + firewall_policy (str): + Name of the firewall policy to + update. + + This corresponds to the ``firewall_policy`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + firewall_policy_rule_resource (google.cloud.compute_v1.types.FirewallPolicyRule): + The body resource for this request + This corresponds to the ``firewall_policy_rule_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, region, firewall_policy, firewall_policy_rule_resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.AddRuleRegionNetworkFirewallPolicyRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.AddRuleRegionNetworkFirewallPolicyRequest): + request = compute.AddRuleRegionNetworkFirewallPolicyRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if region is not None: + request.region = region + if firewall_policy is not None: + request.firewall_policy = firewall_policy + if firewall_policy_rule_resource is not None: + request.firewall_policy_rule_resource = firewall_policy_rule_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.add_rule] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("region", request.region), + ("firewall_policy", request.firewall_policy), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + operation_service = self._transport._region_operations_client + operation_request = compute.GetRegionOperationRequest() + operation_request.project = request.project + operation_request.region = request.region + operation_request.operation = response.name + + get_operation = functools.partial(operation_service.get, operation_request) + # Cancel is not part of extended operations yet. + cancel_operation = lambda: None + + # Note: this class is an implementation detail to provide a uniform + # set of names for certain fields in the extended operation proto message. + # See google.api_core.extended_operation.ExtendedOperation for details + # on these properties and the expected interface. + class _CustomOperation(extended_operation.ExtendedOperation): + @property + def error_message(self): + return self._extended_operation.http_error_message + + @property + def error_code(self): + return self._extended_operation.http_error_status_code + + response = _CustomOperation.make(get_operation, cancel_operation, response) + + # Done; return the response. + return response + + def clone_rules_unary(self, + request: Optional[Union[compute.CloneRulesRegionNetworkFirewallPolicyRequest, dict]] = None, + *, + project: Optional[str] = None, + region: Optional[str] = None, + firewall_policy: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.Operation: + r"""Copies rules to the specified network firewall + policy. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_clone_rules(): + # Create a client + client = compute_v1.RegionNetworkFirewallPoliciesClient() + + # Initialize request argument(s) + request = compute_v1.CloneRulesRegionNetworkFirewallPolicyRequest( + firewall_policy="firewall_policy_value", + project="project_value", + region="region_value", + ) + + # Make the request + response = client.clone_rules(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.CloneRulesRegionNetworkFirewallPolicyRequest, dict]): + The request object. A request message for + RegionNetworkFirewallPolicies.CloneRules. + See the method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + region (str): + Name of the region scoping this + request. + + This corresponds to the ``region`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + firewall_policy (str): + Name of the firewall policy to + update. + + This corresponds to the ``firewall_policy`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, region, firewall_policy]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.CloneRulesRegionNetworkFirewallPolicyRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.CloneRulesRegionNetworkFirewallPolicyRequest): + request = compute.CloneRulesRegionNetworkFirewallPolicyRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if region is not None: + request.region = region + if firewall_policy is not None: + request.firewall_policy = firewall_policy + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.clone_rules] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("region", request.region), + ("firewall_policy", request.firewall_policy), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def clone_rules(self, + request: Optional[Union[compute.CloneRulesRegionNetworkFirewallPolicyRequest, dict]] = None, + *, + project: Optional[str] = None, + region: Optional[str] = None, + firewall_policy: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> extended_operation.ExtendedOperation: + r"""Copies rules to the specified network firewall + policy. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_clone_rules(): + # Create a client + client = compute_v1.RegionNetworkFirewallPoliciesClient() + + # Initialize request argument(s) + request = compute_v1.CloneRulesRegionNetworkFirewallPolicyRequest( + firewall_policy="firewall_policy_value", + project="project_value", + region="region_value", + ) + + # Make the request + response = client.clone_rules(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.CloneRulesRegionNetworkFirewallPolicyRequest, dict]): + The request object. A request message for + RegionNetworkFirewallPolicies.CloneRules. + See the method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + region (str): + Name of the region scoping this + request. + + This corresponds to the ``region`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + firewall_policy (str): + Name of the firewall policy to + update. + + This corresponds to the ``firewall_policy`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, region, firewall_policy]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.CloneRulesRegionNetworkFirewallPolicyRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.CloneRulesRegionNetworkFirewallPolicyRequest): + request = compute.CloneRulesRegionNetworkFirewallPolicyRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if region is not None: + request.region = region + if firewall_policy is not None: + request.firewall_policy = firewall_policy + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.clone_rules] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("region", request.region), + ("firewall_policy", request.firewall_policy), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + operation_service = self._transport._region_operations_client + operation_request = compute.GetRegionOperationRequest() + operation_request.project = request.project + operation_request.region = request.region + operation_request.operation = response.name + + get_operation = functools.partial(operation_service.get, operation_request) + # Cancel is not part of extended operations yet. + cancel_operation = lambda: None + + # Note: this class is an implementation detail to provide a uniform + # set of names for certain fields in the extended operation proto message. + # See google.api_core.extended_operation.ExtendedOperation for details + # on these properties and the expected interface. + class _CustomOperation(extended_operation.ExtendedOperation): + @property + def error_message(self): + return self._extended_operation.http_error_message + + @property + def error_code(self): + return self._extended_operation.http_error_status_code + + response = _CustomOperation.make(get_operation, cancel_operation, response) + + # Done; return the response. + return response + + def delete_unary(self, + request: Optional[Union[compute.DeleteRegionNetworkFirewallPolicyRequest, dict]] = None, + *, + project: Optional[str] = None, + region: Optional[str] = None, + firewall_policy: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.Operation: + r"""Deletes the specified network firewall policy. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_delete(): + # Create a client + client = compute_v1.RegionNetworkFirewallPoliciesClient() + + # Initialize request argument(s) + request = compute_v1.DeleteRegionNetworkFirewallPolicyRequest( + firewall_policy="firewall_policy_value", + project="project_value", + region="region_value", + ) + + # Make the request + response = client.delete(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.DeleteRegionNetworkFirewallPolicyRequest, dict]): + The request object. A request message for + RegionNetworkFirewallPolicies.Delete. + See the method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + region (str): + Name of the region scoping this + request. + + This corresponds to the ``region`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + firewall_policy (str): + Name of the firewall policy to + delete. + + This corresponds to the ``firewall_policy`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, region, firewall_policy]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.DeleteRegionNetworkFirewallPolicyRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.DeleteRegionNetworkFirewallPolicyRequest): + request = compute.DeleteRegionNetworkFirewallPolicyRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if region is not None: + request.region = region + if firewall_policy is not None: + request.firewall_policy = firewall_policy + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("region", request.region), + ("firewall_policy", request.firewall_policy), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def delete(self, + request: Optional[Union[compute.DeleteRegionNetworkFirewallPolicyRequest, dict]] = None, + *, + project: Optional[str] = None, + region: Optional[str] = None, + firewall_policy: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> extended_operation.ExtendedOperation: + r"""Deletes the specified network firewall policy. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_delete(): + # Create a client + client = compute_v1.RegionNetworkFirewallPoliciesClient() + + # Initialize request argument(s) + request = compute_v1.DeleteRegionNetworkFirewallPolicyRequest( + firewall_policy="firewall_policy_value", + project="project_value", + region="region_value", + ) + + # Make the request + response = client.delete(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.DeleteRegionNetworkFirewallPolicyRequest, dict]): + The request object. A request message for + RegionNetworkFirewallPolicies.Delete. + See the method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + region (str): + Name of the region scoping this + request. + + This corresponds to the ``region`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + firewall_policy (str): + Name of the firewall policy to + delete. + + This corresponds to the ``firewall_policy`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, region, firewall_policy]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.DeleteRegionNetworkFirewallPolicyRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.DeleteRegionNetworkFirewallPolicyRequest): + request = compute.DeleteRegionNetworkFirewallPolicyRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if region is not None: + request.region = region + if firewall_policy is not None: + request.firewall_policy = firewall_policy + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("region", request.region), + ("firewall_policy", request.firewall_policy), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + operation_service = self._transport._region_operations_client + operation_request = compute.GetRegionOperationRequest() + operation_request.project = request.project + operation_request.region = request.region + operation_request.operation = response.name + + get_operation = functools.partial(operation_service.get, operation_request) + # Cancel is not part of extended operations yet. + cancel_operation = lambda: None + + # Note: this class is an implementation detail to provide a uniform + # set of names for certain fields in the extended operation proto message. + # See google.api_core.extended_operation.ExtendedOperation for details + # on these properties and the expected interface. + class _CustomOperation(extended_operation.ExtendedOperation): + @property + def error_message(self): + return self._extended_operation.http_error_message + + @property + def error_code(self): + return self._extended_operation.http_error_status_code + + response = _CustomOperation.make(get_operation, cancel_operation, response) + + # Done; return the response. + return response + + def get(self, + request: Optional[Union[compute.GetRegionNetworkFirewallPolicyRequest, dict]] = None, + *, + project: Optional[str] = None, + region: Optional[str] = None, + firewall_policy: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.FirewallPolicy: + r"""Returns the specified network firewall policy. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_get(): + # Create a client + client = compute_v1.RegionNetworkFirewallPoliciesClient() + + # Initialize request argument(s) + request = compute_v1.GetRegionNetworkFirewallPolicyRequest( + firewall_policy="firewall_policy_value", + project="project_value", + region="region_value", + ) + + # Make the request + response = client.get(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.GetRegionNetworkFirewallPolicyRequest, dict]): + The request object. A request message for + RegionNetworkFirewallPolicies.Get. See + the method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + region (str): + Name of the region scoping this + request. + + This corresponds to the ``region`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + firewall_policy (str): + Name of the firewall policy to get. + This corresponds to the ``firewall_policy`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.compute_v1.types.FirewallPolicy: + Represents a Firewall Policy + resource. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, region, firewall_policy]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.GetRegionNetworkFirewallPolicyRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.GetRegionNetworkFirewallPolicyRequest): + request = compute.GetRegionNetworkFirewallPolicyRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if region is not None: + request.region = region + if firewall_policy is not None: + request.firewall_policy = firewall_policy + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("region", request.region), + ("firewall_policy", request.firewall_policy), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get_association(self, + request: Optional[Union[compute.GetAssociationRegionNetworkFirewallPolicyRequest, dict]] = None, + *, + project: Optional[str] = None, + region: Optional[str] = None, + firewall_policy: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.FirewallPolicyAssociation: + r"""Gets an association with the specified name. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_get_association(): + # Create a client + client = compute_v1.RegionNetworkFirewallPoliciesClient() + + # Initialize request argument(s) + request = compute_v1.GetAssociationRegionNetworkFirewallPolicyRequest( + firewall_policy="firewall_policy_value", + project="project_value", + region="region_value", + ) + + # Make the request + response = client.get_association(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.GetAssociationRegionNetworkFirewallPolicyRequest, dict]): + The request object. A request message for + RegionNetworkFirewallPolicies.GetAssociation. + See the method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + region (str): + Name of the region scoping this + request. + + This corresponds to the ``region`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + firewall_policy (str): + Name of the firewall policy to which + the queried association belongs. + + This corresponds to the ``firewall_policy`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.compute_v1.types.FirewallPolicyAssociation: + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, region, firewall_policy]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.GetAssociationRegionNetworkFirewallPolicyRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.GetAssociationRegionNetworkFirewallPolicyRequest): + request = compute.GetAssociationRegionNetworkFirewallPolicyRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if region is not None: + request.region = region + if firewall_policy is not None: + request.firewall_policy = firewall_policy + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_association] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("region", request.region), + ("firewall_policy", request.firewall_policy), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get_effective_firewalls(self, + request: Optional[Union[compute.GetEffectiveFirewallsRegionNetworkFirewallPolicyRequest, dict]] = None, + *, + project: Optional[str] = None, + region: Optional[str] = None, + network: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.RegionNetworkFirewallPoliciesGetEffectiveFirewallsResponse: + r"""Returns the effective firewalls on a given network. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_get_effective_firewalls(): + # Create a client + client = compute_v1.RegionNetworkFirewallPoliciesClient() + + # Initialize request argument(s) + request = compute_v1.GetEffectiveFirewallsRegionNetworkFirewallPolicyRequest( + network="network_value", + project="project_value", + region="region_value", + ) + + # Make the request + response = client.get_effective_firewalls(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.GetEffectiveFirewallsRegionNetworkFirewallPolicyRequest, dict]): + The request object. A request message for + RegionNetworkFirewallPolicies.GetEffectiveFirewalls. + See the method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + region (str): + Name of the region scoping this + request. + + This corresponds to the ``region`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + network (str): + Network reference + This corresponds to the ``network`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.compute_v1.types.RegionNetworkFirewallPoliciesGetEffectiveFirewallsResponse: + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, region, network]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.GetEffectiveFirewallsRegionNetworkFirewallPolicyRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.GetEffectiveFirewallsRegionNetworkFirewallPolicyRequest): + request = compute.GetEffectiveFirewallsRegionNetworkFirewallPolicyRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if region is not None: + request.region = region + if network is not None: + request.network = network + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_effective_firewalls] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("region", request.region), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get_iam_policy(self, + request: Optional[Union[compute.GetIamPolicyRegionNetworkFirewallPolicyRequest, dict]] = None, + *, + project: Optional[str] = None, + region: Optional[str] = None, + resource: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.Policy: + r"""Gets the access control policy for a resource. May be + empty if no such policy or resource exists. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_get_iam_policy(): + # Create a client + client = compute_v1.RegionNetworkFirewallPoliciesClient() + + # Initialize request argument(s) + request = compute_v1.GetIamPolicyRegionNetworkFirewallPolicyRequest( + project="project_value", + region="region_value", + resource="resource_value", + ) + + # Make the request + response = client.get_iam_policy(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.GetIamPolicyRegionNetworkFirewallPolicyRequest, dict]): + The request object. A request message for + RegionNetworkFirewallPolicies.GetIamPolicy. + See the method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + region (str): + The name of the region for this + request. + + This corresponds to the ``region`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + resource (str): + Name or id of the resource for this + request. + + This corresponds to the ``resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.compute_v1.types.Policy: + An Identity and Access Management (IAM) policy, which + specifies access controls for Google Cloud resources. A + Policy is a collection of bindings. A binding binds one + or more members, or principals, to a single role. + Principals can be user accounts, service accounts, + Google groups, and domains (such as G Suite). A role is + a named list of permissions; each role can be an IAM + predefined role or a user-created custom role. For some + types of Google Cloud resources, a binding can also + specify a condition, which is a logical expression that + allows access to a resource only if the expression + evaluates to true. A condition can add constraints based + on attributes of the request, the resource, or both. To + learn which resources support conditions in their IAM + policies, see the [IAM + documentation](\ https://cloud.google.com/iam/help/conditions/resource-policies). + **JSON example:** { "bindings": [ { "role": + "roles/resourcemanager.organizationAdmin", "members": [ + "user:mike@example.com", "group:admins@example.com", + "domain:google.com", + "serviceAccount:my-project-id@appspot.gserviceaccount.com" + ] }, { "role": + "roles/resourcemanager.organizationViewer", "members": [ + "user:eve@example.com" ], "condition": { "title": + "expirable access", "description": "Does not grant + access after Sep 2020", "expression": "request.time < + timestamp('2020-10-01T00:00:00.000Z')", } } ], "etag": + "BwWWja0YfJA=", "version": 3 } **YAML example:** + bindings: - members: - user:\ mike@example.com - + group:\ admins@example.com - domain:google.com - + serviceAccount:\ my-project-id@appspot.gserviceaccount.com + role: roles/resourcemanager.organizationAdmin - members: + - user:\ eve@example.com role: + roles/resourcemanager.organizationViewer condition: + title: expirable access description: Does not grant + access after Sep 2020 expression: request.time < + timestamp('2020-10-01T00:00:00.000Z') etag: BwWWja0YfJA= + version: 3 For a description of IAM and its features, + see the [IAM + documentation](\ https://cloud.google.com/iam/docs/). + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, region, resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.GetIamPolicyRegionNetworkFirewallPolicyRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.GetIamPolicyRegionNetworkFirewallPolicyRequest): + request = compute.GetIamPolicyRegionNetworkFirewallPolicyRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if region is not None: + request.region = region + if resource is not None: + request.resource = resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_iam_policy] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("region", request.region), + ("resource", request.resource), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get_rule(self, + request: Optional[Union[compute.GetRuleRegionNetworkFirewallPolicyRequest, dict]] = None, + *, + project: Optional[str] = None, + region: Optional[str] = None, + firewall_policy: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.FirewallPolicyRule: + r"""Gets a rule of the specified priority. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_get_rule(): + # Create a client + client = compute_v1.RegionNetworkFirewallPoliciesClient() + + # Initialize request argument(s) + request = compute_v1.GetRuleRegionNetworkFirewallPolicyRequest( + firewall_policy="firewall_policy_value", + project="project_value", + region="region_value", + ) + + # Make the request + response = client.get_rule(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.GetRuleRegionNetworkFirewallPolicyRequest, dict]): + The request object. A request message for + RegionNetworkFirewallPolicies.GetRule. + See the method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + region (str): + Name of the region scoping this + request. + + This corresponds to the ``region`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + firewall_policy (str): + Name of the firewall policy to which + the queried rule belongs. + + This corresponds to the ``firewall_policy`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.compute_v1.types.FirewallPolicyRule: + Represents a rule that describes one + or more match conditions along with the + action to be taken when traffic matches + this condition (allow or deny). + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, region, firewall_policy]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.GetRuleRegionNetworkFirewallPolicyRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.GetRuleRegionNetworkFirewallPolicyRequest): + request = compute.GetRuleRegionNetworkFirewallPolicyRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if region is not None: + request.region = region + if firewall_policy is not None: + request.firewall_policy = firewall_policy + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_rule] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("region", request.region), + ("firewall_policy", request.firewall_policy), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def insert_unary(self, + request: Optional[Union[compute.InsertRegionNetworkFirewallPolicyRequest, dict]] = None, + *, + project: Optional[str] = None, + region: Optional[str] = None, + firewall_policy_resource: Optional[compute.FirewallPolicy] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.Operation: + r"""Creates a new network firewall policy in the + specified project and region. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_insert(): + # Create a client + client = compute_v1.RegionNetworkFirewallPoliciesClient() + + # Initialize request argument(s) + request = compute_v1.InsertRegionNetworkFirewallPolicyRequest( + project="project_value", + region="region_value", + ) + + # Make the request + response = client.insert(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.InsertRegionNetworkFirewallPolicyRequest, dict]): + The request object. A request message for + RegionNetworkFirewallPolicies.Insert. + See the method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + region (str): + Name of the region scoping this + request. + + This corresponds to the ``region`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + firewall_policy_resource (google.cloud.compute_v1.types.FirewallPolicy): + The body resource for this request + This corresponds to the ``firewall_policy_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, region, firewall_policy_resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.InsertRegionNetworkFirewallPolicyRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.InsertRegionNetworkFirewallPolicyRequest): + request = compute.InsertRegionNetworkFirewallPolicyRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if region is not None: + request.region = region + if firewall_policy_resource is not None: + request.firewall_policy_resource = firewall_policy_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.insert] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("region", request.region), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def insert(self, + request: Optional[Union[compute.InsertRegionNetworkFirewallPolicyRequest, dict]] = None, + *, + project: Optional[str] = None, + region: Optional[str] = None, + firewall_policy_resource: Optional[compute.FirewallPolicy] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> extended_operation.ExtendedOperation: + r"""Creates a new network firewall policy in the + specified project and region. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_insert(): + # Create a client + client = compute_v1.RegionNetworkFirewallPoliciesClient() + + # Initialize request argument(s) + request = compute_v1.InsertRegionNetworkFirewallPolicyRequest( + project="project_value", + region="region_value", + ) + + # Make the request + response = client.insert(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.InsertRegionNetworkFirewallPolicyRequest, dict]): + The request object. A request message for + RegionNetworkFirewallPolicies.Insert. + See the method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + region (str): + Name of the region scoping this + request. + + This corresponds to the ``region`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + firewall_policy_resource (google.cloud.compute_v1.types.FirewallPolicy): + The body resource for this request + This corresponds to the ``firewall_policy_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, region, firewall_policy_resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.InsertRegionNetworkFirewallPolicyRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.InsertRegionNetworkFirewallPolicyRequest): + request = compute.InsertRegionNetworkFirewallPolicyRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if region is not None: + request.region = region + if firewall_policy_resource is not None: + request.firewall_policy_resource = firewall_policy_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.insert] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("region", request.region), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + operation_service = self._transport._region_operations_client + operation_request = compute.GetRegionOperationRequest() + operation_request.project = request.project + operation_request.region = request.region + operation_request.operation = response.name + + get_operation = functools.partial(operation_service.get, operation_request) + # Cancel is not part of extended operations yet. + cancel_operation = lambda: None + + # Note: this class is an implementation detail to provide a uniform + # set of names for certain fields in the extended operation proto message. + # See google.api_core.extended_operation.ExtendedOperation for details + # on these properties and the expected interface. + class _CustomOperation(extended_operation.ExtendedOperation): + @property + def error_message(self): + return self._extended_operation.http_error_message + + @property + def error_code(self): + return self._extended_operation.http_error_status_code + + response = _CustomOperation.make(get_operation, cancel_operation, response) + + # Done; return the response. + return response + + def list(self, + request: Optional[Union[compute.ListRegionNetworkFirewallPoliciesRequest, dict]] = None, + *, + project: Optional[str] = None, + region: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListPager: + r"""Lists all the network firewall policies that have + been configured for the specified project in the given + region. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_list(): + # Create a client + client = compute_v1.RegionNetworkFirewallPoliciesClient() + + # Initialize request argument(s) + request = compute_v1.ListRegionNetworkFirewallPoliciesRequest( + project="project_value", + region="region_value", + ) + + # Make the request + page_result = client.list(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.ListRegionNetworkFirewallPoliciesRequest, dict]): + The request object. A request message for + RegionNetworkFirewallPolicies.List. See + the method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + region (str): + Name of the region scoping this + request. + + This corresponds to the ``region`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.compute_v1.services.region_network_firewall_policies.pagers.ListPager: + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, region]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.ListRegionNetworkFirewallPoliciesRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.ListRegionNetworkFirewallPoliciesRequest): + request = compute.ListRegionNetworkFirewallPoliciesRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if region is not None: + request.region = region + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("region", request.region), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + def patch_unary(self, + request: Optional[Union[compute.PatchRegionNetworkFirewallPolicyRequest, dict]] = None, + *, + project: Optional[str] = None, + region: Optional[str] = None, + firewall_policy: Optional[str] = None, + firewall_policy_resource: Optional[compute.FirewallPolicy] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.Operation: + r"""Patches the specified network firewall policy. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_patch(): + # Create a client + client = compute_v1.RegionNetworkFirewallPoliciesClient() + + # Initialize request argument(s) + request = compute_v1.PatchRegionNetworkFirewallPolicyRequest( + firewall_policy="firewall_policy_value", + project="project_value", + region="region_value", + ) + + # Make the request + response = client.patch(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.PatchRegionNetworkFirewallPolicyRequest, dict]): + The request object. A request message for + RegionNetworkFirewallPolicies.Patch. See + the method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + region (str): + Name of the region scoping this + request. + + This corresponds to the ``region`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + firewall_policy (str): + Name of the firewall policy to + update. + + This corresponds to the ``firewall_policy`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + firewall_policy_resource (google.cloud.compute_v1.types.FirewallPolicy): + The body resource for this request + This corresponds to the ``firewall_policy_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, region, firewall_policy, firewall_policy_resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.PatchRegionNetworkFirewallPolicyRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.PatchRegionNetworkFirewallPolicyRequest): + request = compute.PatchRegionNetworkFirewallPolicyRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if region is not None: + request.region = region + if firewall_policy is not None: + request.firewall_policy = firewall_policy + if firewall_policy_resource is not None: + request.firewall_policy_resource = firewall_policy_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.patch] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("region", request.region), + ("firewall_policy", request.firewall_policy), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def patch(self, + request: Optional[Union[compute.PatchRegionNetworkFirewallPolicyRequest, dict]] = None, + *, + project: Optional[str] = None, + region: Optional[str] = None, + firewall_policy: Optional[str] = None, + firewall_policy_resource: Optional[compute.FirewallPolicy] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> extended_operation.ExtendedOperation: + r"""Patches the specified network firewall policy. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_patch(): + # Create a client + client = compute_v1.RegionNetworkFirewallPoliciesClient() + + # Initialize request argument(s) + request = compute_v1.PatchRegionNetworkFirewallPolicyRequest( + firewall_policy="firewall_policy_value", + project="project_value", + region="region_value", + ) + + # Make the request + response = client.patch(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.PatchRegionNetworkFirewallPolicyRequest, dict]): + The request object. A request message for + RegionNetworkFirewallPolicies.Patch. See + the method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + region (str): + Name of the region scoping this + request. + + This corresponds to the ``region`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + firewall_policy (str): + Name of the firewall policy to + update. + + This corresponds to the ``firewall_policy`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + firewall_policy_resource (google.cloud.compute_v1.types.FirewallPolicy): + The body resource for this request + This corresponds to the ``firewall_policy_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, region, firewall_policy, firewall_policy_resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.PatchRegionNetworkFirewallPolicyRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.PatchRegionNetworkFirewallPolicyRequest): + request = compute.PatchRegionNetworkFirewallPolicyRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if region is not None: + request.region = region + if firewall_policy is not None: + request.firewall_policy = firewall_policy + if firewall_policy_resource is not None: + request.firewall_policy_resource = firewall_policy_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.patch] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("region", request.region), + ("firewall_policy", request.firewall_policy), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + operation_service = self._transport._region_operations_client + operation_request = compute.GetRegionOperationRequest() + operation_request.project = request.project + operation_request.region = request.region + operation_request.operation = response.name + + get_operation = functools.partial(operation_service.get, operation_request) + # Cancel is not part of extended operations yet. + cancel_operation = lambda: None + + # Note: this class is an implementation detail to provide a uniform + # set of names for certain fields in the extended operation proto message. + # See google.api_core.extended_operation.ExtendedOperation for details + # on these properties and the expected interface. + class _CustomOperation(extended_operation.ExtendedOperation): + @property + def error_message(self): + return self._extended_operation.http_error_message + + @property + def error_code(self): + return self._extended_operation.http_error_status_code + + response = _CustomOperation.make(get_operation, cancel_operation, response) + + # Done; return the response. + return response + + def patch_rule_unary(self, + request: Optional[Union[compute.PatchRuleRegionNetworkFirewallPolicyRequest, dict]] = None, + *, + project: Optional[str] = None, + region: Optional[str] = None, + firewall_policy: Optional[str] = None, + firewall_policy_rule_resource: Optional[compute.FirewallPolicyRule] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.Operation: + r"""Patches a rule of the specified priority. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_patch_rule(): + # Create a client + client = compute_v1.RegionNetworkFirewallPoliciesClient() + + # Initialize request argument(s) + request = compute_v1.PatchRuleRegionNetworkFirewallPolicyRequest( + firewall_policy="firewall_policy_value", + project="project_value", + region="region_value", + ) + + # Make the request + response = client.patch_rule(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.PatchRuleRegionNetworkFirewallPolicyRequest, dict]): + The request object. A request message for + RegionNetworkFirewallPolicies.PatchRule. + See the method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + region (str): + Name of the region scoping this + request. + + This corresponds to the ``region`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + firewall_policy (str): + Name of the firewall policy to + update. + + This corresponds to the ``firewall_policy`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + firewall_policy_rule_resource (google.cloud.compute_v1.types.FirewallPolicyRule): + The body resource for this request + This corresponds to the ``firewall_policy_rule_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, region, firewall_policy, firewall_policy_rule_resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.PatchRuleRegionNetworkFirewallPolicyRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.PatchRuleRegionNetworkFirewallPolicyRequest): + request = compute.PatchRuleRegionNetworkFirewallPolicyRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if region is not None: + request.region = region + if firewall_policy is not None: + request.firewall_policy = firewall_policy + if firewall_policy_rule_resource is not None: + request.firewall_policy_rule_resource = firewall_policy_rule_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.patch_rule] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("region", request.region), + ("firewall_policy", request.firewall_policy), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def patch_rule(self, + request: Optional[Union[compute.PatchRuleRegionNetworkFirewallPolicyRequest, dict]] = None, + *, + project: Optional[str] = None, + region: Optional[str] = None, + firewall_policy: Optional[str] = None, + firewall_policy_rule_resource: Optional[compute.FirewallPolicyRule] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> extended_operation.ExtendedOperation: + r"""Patches a rule of the specified priority. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_patch_rule(): + # Create a client + client = compute_v1.RegionNetworkFirewallPoliciesClient() + + # Initialize request argument(s) + request = compute_v1.PatchRuleRegionNetworkFirewallPolicyRequest( + firewall_policy="firewall_policy_value", + project="project_value", + region="region_value", + ) + + # Make the request + response = client.patch_rule(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.PatchRuleRegionNetworkFirewallPolicyRequest, dict]): + The request object. A request message for + RegionNetworkFirewallPolicies.PatchRule. + See the method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + region (str): + Name of the region scoping this + request. + + This corresponds to the ``region`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + firewall_policy (str): + Name of the firewall policy to + update. + + This corresponds to the ``firewall_policy`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + firewall_policy_rule_resource (google.cloud.compute_v1.types.FirewallPolicyRule): + The body resource for this request + This corresponds to the ``firewall_policy_rule_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, region, firewall_policy, firewall_policy_rule_resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.PatchRuleRegionNetworkFirewallPolicyRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.PatchRuleRegionNetworkFirewallPolicyRequest): + request = compute.PatchRuleRegionNetworkFirewallPolicyRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if region is not None: + request.region = region + if firewall_policy is not None: + request.firewall_policy = firewall_policy + if firewall_policy_rule_resource is not None: + request.firewall_policy_rule_resource = firewall_policy_rule_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.patch_rule] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("region", request.region), + ("firewall_policy", request.firewall_policy), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + operation_service = self._transport._region_operations_client + operation_request = compute.GetRegionOperationRequest() + operation_request.project = request.project + operation_request.region = request.region + operation_request.operation = response.name + + get_operation = functools.partial(operation_service.get, operation_request) + # Cancel is not part of extended operations yet. + cancel_operation = lambda: None + + # Note: this class is an implementation detail to provide a uniform + # set of names for certain fields in the extended operation proto message. + # See google.api_core.extended_operation.ExtendedOperation for details + # on these properties and the expected interface. + class _CustomOperation(extended_operation.ExtendedOperation): + @property + def error_message(self): + return self._extended_operation.http_error_message + + @property + def error_code(self): + return self._extended_operation.http_error_status_code + + response = _CustomOperation.make(get_operation, cancel_operation, response) + + # Done; return the response. + return response + + def remove_association_unary(self, + request: Optional[Union[compute.RemoveAssociationRegionNetworkFirewallPolicyRequest, dict]] = None, + *, + project: Optional[str] = None, + region: Optional[str] = None, + firewall_policy: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.Operation: + r"""Removes an association for the specified network + firewall policy. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_remove_association(): + # Create a client + client = compute_v1.RegionNetworkFirewallPoliciesClient() + + # Initialize request argument(s) + request = compute_v1.RemoveAssociationRegionNetworkFirewallPolicyRequest( + firewall_policy="firewall_policy_value", + project="project_value", + region="region_value", + ) + + # Make the request + response = client.remove_association(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.RemoveAssociationRegionNetworkFirewallPolicyRequest, dict]): + The request object. A request message for + RegionNetworkFirewallPolicies.RemoveAssociation. + See the method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + region (str): + Name of the region scoping this + request. + + This corresponds to the ``region`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + firewall_policy (str): + Name of the firewall policy to + update. + + This corresponds to the ``firewall_policy`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, region, firewall_policy]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.RemoveAssociationRegionNetworkFirewallPolicyRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.RemoveAssociationRegionNetworkFirewallPolicyRequest): + request = compute.RemoveAssociationRegionNetworkFirewallPolicyRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if region is not None: + request.region = region + if firewall_policy is not None: + request.firewall_policy = firewall_policy + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.remove_association] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("region", request.region), + ("firewall_policy", request.firewall_policy), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def remove_association(self, + request: Optional[Union[compute.RemoveAssociationRegionNetworkFirewallPolicyRequest, dict]] = None, + *, + project: Optional[str] = None, + region: Optional[str] = None, + firewall_policy: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> extended_operation.ExtendedOperation: + r"""Removes an association for the specified network + firewall policy. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_remove_association(): + # Create a client + client = compute_v1.RegionNetworkFirewallPoliciesClient() + + # Initialize request argument(s) + request = compute_v1.RemoveAssociationRegionNetworkFirewallPolicyRequest( + firewall_policy="firewall_policy_value", + project="project_value", + region="region_value", + ) + + # Make the request + response = client.remove_association(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.RemoveAssociationRegionNetworkFirewallPolicyRequest, dict]): + The request object. A request message for + RegionNetworkFirewallPolicies.RemoveAssociation. + See the method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + region (str): + Name of the region scoping this + request. + + This corresponds to the ``region`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + firewall_policy (str): + Name of the firewall policy to + update. + + This corresponds to the ``firewall_policy`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, region, firewall_policy]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.RemoveAssociationRegionNetworkFirewallPolicyRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.RemoveAssociationRegionNetworkFirewallPolicyRequest): + request = compute.RemoveAssociationRegionNetworkFirewallPolicyRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if region is not None: + request.region = region + if firewall_policy is not None: + request.firewall_policy = firewall_policy + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.remove_association] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("region", request.region), + ("firewall_policy", request.firewall_policy), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + operation_service = self._transport._region_operations_client + operation_request = compute.GetRegionOperationRequest() + operation_request.project = request.project + operation_request.region = request.region + operation_request.operation = response.name + + get_operation = functools.partial(operation_service.get, operation_request) + # Cancel is not part of extended operations yet. + cancel_operation = lambda: None + + # Note: this class is an implementation detail to provide a uniform + # set of names for certain fields in the extended operation proto message. + # See google.api_core.extended_operation.ExtendedOperation for details + # on these properties and the expected interface. + class _CustomOperation(extended_operation.ExtendedOperation): + @property + def error_message(self): + return self._extended_operation.http_error_message + + @property + def error_code(self): + return self._extended_operation.http_error_status_code + + response = _CustomOperation.make(get_operation, cancel_operation, response) + + # Done; return the response. + return response + + def remove_rule_unary(self, + request: Optional[Union[compute.RemoveRuleRegionNetworkFirewallPolicyRequest, dict]] = None, + *, + project: Optional[str] = None, + region: Optional[str] = None, + firewall_policy: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.Operation: + r"""Deletes a rule of the specified priority. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_remove_rule(): + # Create a client + client = compute_v1.RegionNetworkFirewallPoliciesClient() + + # Initialize request argument(s) + request = compute_v1.RemoveRuleRegionNetworkFirewallPolicyRequest( + firewall_policy="firewall_policy_value", + project="project_value", + region="region_value", + ) + + # Make the request + response = client.remove_rule(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.RemoveRuleRegionNetworkFirewallPolicyRequest, dict]): + The request object. A request message for + RegionNetworkFirewallPolicies.RemoveRule. + See the method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + region (str): + Name of the region scoping this + request. + + This corresponds to the ``region`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + firewall_policy (str): + Name of the firewall policy to + update. + + This corresponds to the ``firewall_policy`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, region, firewall_policy]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.RemoveRuleRegionNetworkFirewallPolicyRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.RemoveRuleRegionNetworkFirewallPolicyRequest): + request = compute.RemoveRuleRegionNetworkFirewallPolicyRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if region is not None: + request.region = region + if firewall_policy is not None: + request.firewall_policy = firewall_policy + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.remove_rule] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("region", request.region), + ("firewall_policy", request.firewall_policy), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def remove_rule(self, + request: Optional[Union[compute.RemoveRuleRegionNetworkFirewallPolicyRequest, dict]] = None, + *, + project: Optional[str] = None, + region: Optional[str] = None, + firewall_policy: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> extended_operation.ExtendedOperation: + r"""Deletes a rule of the specified priority. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_remove_rule(): + # Create a client + client = compute_v1.RegionNetworkFirewallPoliciesClient() + + # Initialize request argument(s) + request = compute_v1.RemoveRuleRegionNetworkFirewallPolicyRequest( + firewall_policy="firewall_policy_value", + project="project_value", + region="region_value", + ) + + # Make the request + response = client.remove_rule(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.RemoveRuleRegionNetworkFirewallPolicyRequest, dict]): + The request object. A request message for + RegionNetworkFirewallPolicies.RemoveRule. + See the method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + region (str): + Name of the region scoping this + request. + + This corresponds to the ``region`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + firewall_policy (str): + Name of the firewall policy to + update. + + This corresponds to the ``firewall_policy`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, region, firewall_policy]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.RemoveRuleRegionNetworkFirewallPolicyRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.RemoveRuleRegionNetworkFirewallPolicyRequest): + request = compute.RemoveRuleRegionNetworkFirewallPolicyRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if region is not None: + request.region = region + if firewall_policy is not None: + request.firewall_policy = firewall_policy + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.remove_rule] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("region", request.region), + ("firewall_policy", request.firewall_policy), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + operation_service = self._transport._region_operations_client + operation_request = compute.GetRegionOperationRequest() + operation_request.project = request.project + operation_request.region = request.region + operation_request.operation = response.name + + get_operation = functools.partial(operation_service.get, operation_request) + # Cancel is not part of extended operations yet. + cancel_operation = lambda: None + + # Note: this class is an implementation detail to provide a uniform + # set of names for certain fields in the extended operation proto message. + # See google.api_core.extended_operation.ExtendedOperation for details + # on these properties and the expected interface. + class _CustomOperation(extended_operation.ExtendedOperation): + @property + def error_message(self): + return self._extended_operation.http_error_message + + @property + def error_code(self): + return self._extended_operation.http_error_status_code + + response = _CustomOperation.make(get_operation, cancel_operation, response) + + # Done; return the response. + return response + + def set_iam_policy(self, + request: Optional[Union[compute.SetIamPolicyRegionNetworkFirewallPolicyRequest, dict]] = None, + *, + project: Optional[str] = None, + region: Optional[str] = None, + resource: Optional[str] = None, + region_set_policy_request_resource: Optional[compute.RegionSetPolicyRequest] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.Policy: + r"""Sets the access control policy on the specified + resource. Replaces any existing policy. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_set_iam_policy(): + # Create a client + client = compute_v1.RegionNetworkFirewallPoliciesClient() + + # Initialize request argument(s) + request = compute_v1.SetIamPolicyRegionNetworkFirewallPolicyRequest( + project="project_value", + region="region_value", + resource="resource_value", + ) + + # Make the request + response = client.set_iam_policy(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.SetIamPolicyRegionNetworkFirewallPolicyRequest, dict]): + The request object. A request message for + RegionNetworkFirewallPolicies.SetIamPolicy. + See the method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + region (str): + The name of the region for this + request. + + This corresponds to the ``region`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + resource (str): + Name or id of the resource for this + request. + + This corresponds to the ``resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + region_set_policy_request_resource (google.cloud.compute_v1.types.RegionSetPolicyRequest): + The body resource for this request + This corresponds to the ``region_set_policy_request_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.compute_v1.types.Policy: + An Identity and Access Management (IAM) policy, which + specifies access controls for Google Cloud resources. A + Policy is a collection of bindings. A binding binds one + or more members, or principals, to a single role. + Principals can be user accounts, service accounts, + Google groups, and domains (such as G Suite). A role is + a named list of permissions; each role can be an IAM + predefined role or a user-created custom role. For some + types of Google Cloud resources, a binding can also + specify a condition, which is a logical expression that + allows access to a resource only if the expression + evaluates to true. A condition can add constraints based + on attributes of the request, the resource, or both. To + learn which resources support conditions in their IAM + policies, see the [IAM + documentation](\ https://cloud.google.com/iam/help/conditions/resource-policies). + **JSON example:** { "bindings": [ { "role": + "roles/resourcemanager.organizationAdmin", "members": [ + "user:mike@example.com", "group:admins@example.com", + "domain:google.com", + "serviceAccount:my-project-id@appspot.gserviceaccount.com" + ] }, { "role": + "roles/resourcemanager.organizationViewer", "members": [ + "user:eve@example.com" ], "condition": { "title": + "expirable access", "description": "Does not grant + access after Sep 2020", "expression": "request.time < + timestamp('2020-10-01T00:00:00.000Z')", } } ], "etag": + "BwWWja0YfJA=", "version": 3 } **YAML example:** + bindings: - members: - user:\ mike@example.com - + group:\ admins@example.com - domain:google.com - + serviceAccount:\ my-project-id@appspot.gserviceaccount.com + role: roles/resourcemanager.organizationAdmin - members: + - user:\ eve@example.com role: + roles/resourcemanager.organizationViewer condition: + title: expirable access description: Does not grant + access after Sep 2020 expression: request.time < + timestamp('2020-10-01T00:00:00.000Z') etag: BwWWja0YfJA= + version: 3 For a description of IAM and its features, + see the [IAM + documentation](\ https://cloud.google.com/iam/docs/). + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, region, resource, region_set_policy_request_resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.SetIamPolicyRegionNetworkFirewallPolicyRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.SetIamPolicyRegionNetworkFirewallPolicyRequest): + request = compute.SetIamPolicyRegionNetworkFirewallPolicyRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if region is not None: + request.region = region + if resource is not None: + request.resource = resource + if region_set_policy_request_resource is not None: + request.region_set_policy_request_resource = region_set_policy_request_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.set_iam_policy] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("region", request.region), + ("resource", request.resource), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def test_iam_permissions(self, + request: Optional[Union[compute.TestIamPermissionsRegionNetworkFirewallPolicyRequest, dict]] = None, + *, + project: Optional[str] = None, + region: Optional[str] = None, + resource: Optional[str] = None, + test_permissions_request_resource: Optional[compute.TestPermissionsRequest] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.TestPermissionsResponse: + r"""Returns permissions that a caller has on the + specified resource. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_test_iam_permissions(): + # Create a client + client = compute_v1.RegionNetworkFirewallPoliciesClient() + + # Initialize request argument(s) + request = compute_v1.TestIamPermissionsRegionNetworkFirewallPolicyRequest( + project="project_value", + region="region_value", + resource="resource_value", + ) + + # Make the request + response = client.test_iam_permissions(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.TestIamPermissionsRegionNetworkFirewallPolicyRequest, dict]): + The request object. A request message for + RegionNetworkFirewallPolicies.TestIamPermissions. + See the method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + region (str): + The name of the region for this + request. + + This corresponds to the ``region`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + resource (str): + Name or id of the resource for this + request. + + This corresponds to the ``resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + test_permissions_request_resource (google.cloud.compute_v1.types.TestPermissionsRequest): + The body resource for this request + This corresponds to the ``test_permissions_request_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.compute_v1.types.TestPermissionsResponse: + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, region, resource, test_permissions_request_resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.TestIamPermissionsRegionNetworkFirewallPolicyRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.TestIamPermissionsRegionNetworkFirewallPolicyRequest): + request = compute.TestIamPermissionsRegionNetworkFirewallPolicyRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if region is not None: + request.region = region + if resource is not None: + request.resource = resource + if test_permissions_request_resource is not None: + request.test_permissions_request_resource = test_permissions_request_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.test_iam_permissions] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("region", request.region), + ("resource", request.resource), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def __enter__(self) -> "RegionNetworkFirewallPoliciesClient": + return self + + def __exit__(self, type, value, traceback): + """Releases underlying transport's resources. + + .. warning:: + ONLY use as a context manager if the transport is NOT shared + with other clients! Exiting the with block will CLOSE the transport + and may cause errors in other clients! + """ + self.transport.close() + + + + + + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) + + +__all__ = ( + "RegionNetworkFirewallPoliciesClient", +) diff --git a/owl-bot-staging/v1/google/cloud/compute_v1/services/region_network_firewall_policies/pagers.py b/owl-bot-staging/v1/google/cloud/compute_v1/services/region_network_firewall_policies/pagers.py new file mode 100644 index 000000000..e70eacf0e --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/compute_v1/services/region_network_firewall_policies/pagers.py @@ -0,0 +1,77 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import Any, AsyncIterator, Awaitable, Callable, Sequence, Tuple, Optional, Iterator + +from google.cloud.compute_v1.types import compute + + +class ListPager: + """A pager for iterating through ``list`` requests. + + This class thinly wraps an initial + :class:`google.cloud.compute_v1.types.FirewallPolicyList` object, and + provides an ``__iter__`` method to iterate through its + ``items`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``List`` requests and continue to iterate + through the ``items`` field on the + corresponding responses. + + All the usual :class:`google.cloud.compute_v1.types.FirewallPolicyList` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., compute.FirewallPolicyList], + request: compute.ListRegionNetworkFirewallPoliciesRequest, + response: compute.FirewallPolicyList, + *, + metadata: Sequence[Tuple[str, str]] = ()): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.compute_v1.types.ListRegionNetworkFirewallPoliciesRequest): + The initial request object. + response (google.cloud.compute_v1.types.FirewallPolicyList): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = compute.ListRegionNetworkFirewallPoliciesRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[compute.FirewallPolicyList]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[compute.FirewallPolicy]: + for page in self.pages: + yield from page.items + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) diff --git a/owl-bot-staging/v1/google/cloud/compute_v1/services/region_network_firewall_policies/transports/__init__.py b/owl-bot-staging/v1/google/cloud/compute_v1/services/region_network_firewall_policies/transports/__init__.py new file mode 100644 index 000000000..6e642f969 --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/compute_v1/services/region_network_firewall_policies/transports/__init__.py @@ -0,0 +1,32 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +from typing import Dict, Type + +from .base import RegionNetworkFirewallPoliciesTransport +from .rest import RegionNetworkFirewallPoliciesRestTransport +from .rest import RegionNetworkFirewallPoliciesRestInterceptor + + +# Compile a registry of transports. +_transport_registry = OrderedDict() # type: Dict[str, Type[RegionNetworkFirewallPoliciesTransport]] +_transport_registry['rest'] = RegionNetworkFirewallPoliciesRestTransport + +__all__ = ( + 'RegionNetworkFirewallPoliciesTransport', + 'RegionNetworkFirewallPoliciesRestTransport', + 'RegionNetworkFirewallPoliciesRestInterceptor', +) diff --git a/owl-bot-staging/v1/google/cloud/compute_v1/services/region_network_firewall_policies/transports/base.py b/owl-bot-staging/v1/google/cloud/compute_v1/services/region_network_firewall_policies/transports/base.py new file mode 100644 index 000000000..e51c6a6ad --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/compute_v1/services/region_network_firewall_policies/transports/base.py @@ -0,0 +1,387 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import abc +from typing import Awaitable, Callable, Dict, Optional, Sequence, Union + +from google.cloud.compute_v1 import gapic_version as package_version + +import google.auth # type: ignore +import google.api_core +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.compute_v1.types import compute +from google.cloud.compute_v1.services import region_operations + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) + + +class RegionNetworkFirewallPoliciesTransport(abc.ABC): + """Abstract transport class for RegionNetworkFirewallPolicies.""" + + AUTH_SCOPES = ( + 'https://www.googleapis.com/auth/compute', + 'https://www.googleapis.com/auth/cloud-platform', + ) + + DEFAULT_HOST: str = 'compute.googleapis.com' + def __init__( + self, *, + host: str = DEFAULT_HOST, + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + **kwargs, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A list of scopes. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + """ + self._extended_operations_services: Dict[str, Any] = {} + + scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} + + # Save the scopes. + self._scopes = scopes + + # If no credentials are provided, then determine the appropriate + # defaults. + if credentials and credentials_file: + raise core_exceptions.DuplicateCredentialArgs("'credentials_file' and 'credentials' are mutually exclusive") + + if credentials_file is not None: + credentials, _ = google.auth.load_credentials_from_file( + credentials_file, + **scopes_kwargs, + quota_project_id=quota_project_id + ) + elif credentials is None: + credentials, _ = google.auth.default(**scopes_kwargs, quota_project_id=quota_project_id) + # Don't apply audience if the credentials file passed from user. + if hasattr(credentials, "with_gdch_audience"): + credentials = credentials.with_gdch_audience(api_audience if api_audience else host) + + # If the credentials are service account credentials, then always try to use self signed JWT. + if always_use_jwt_access and isinstance(credentials, service_account.Credentials) and hasattr(service_account.Credentials, "with_always_use_jwt_access"): + credentials = credentials.with_always_use_jwt_access(True) + + # Save the credentials. + self._credentials = credentials + + # Save the hostname. Default to port 443 (HTTPS) if none is specified. + if ':' not in host: + host += ':443' + self._host = host + + def _prep_wrapped_messages(self, client_info): + # Precompute the wrapped methods. + self._wrapped_methods = { + self.add_association: gapic_v1.method.wrap_method( + self.add_association, + default_timeout=None, + client_info=client_info, + ), + self.add_rule: gapic_v1.method.wrap_method( + self.add_rule, + default_timeout=None, + client_info=client_info, + ), + self.clone_rules: gapic_v1.method.wrap_method( + self.clone_rules, + default_timeout=None, + client_info=client_info, + ), + self.delete: gapic_v1.method.wrap_method( + self.delete, + default_timeout=None, + client_info=client_info, + ), + self.get: gapic_v1.method.wrap_method( + self.get, + default_timeout=None, + client_info=client_info, + ), + self.get_association: gapic_v1.method.wrap_method( + self.get_association, + default_timeout=None, + client_info=client_info, + ), + self.get_effective_firewalls: gapic_v1.method.wrap_method( + self.get_effective_firewalls, + default_timeout=None, + client_info=client_info, + ), + self.get_iam_policy: gapic_v1.method.wrap_method( + self.get_iam_policy, + default_timeout=None, + client_info=client_info, + ), + self.get_rule: gapic_v1.method.wrap_method( + self.get_rule, + default_timeout=None, + client_info=client_info, + ), + self.insert: gapic_v1.method.wrap_method( + self.insert, + default_timeout=None, + client_info=client_info, + ), + self.list: gapic_v1.method.wrap_method( + self.list, + default_timeout=None, + client_info=client_info, + ), + self.patch: gapic_v1.method.wrap_method( + self.patch, + default_timeout=None, + client_info=client_info, + ), + self.patch_rule: gapic_v1.method.wrap_method( + self.patch_rule, + default_timeout=None, + client_info=client_info, + ), + self.remove_association: gapic_v1.method.wrap_method( + self.remove_association, + default_timeout=None, + client_info=client_info, + ), + self.remove_rule: gapic_v1.method.wrap_method( + self.remove_rule, + default_timeout=None, + client_info=client_info, + ), + self.set_iam_policy: gapic_v1.method.wrap_method( + self.set_iam_policy, + default_timeout=None, + client_info=client_info, + ), + self.test_iam_permissions: gapic_v1.method.wrap_method( + self.test_iam_permissions, + default_timeout=None, + client_info=client_info, + ), + } + + def close(self): + """Closes resources associated with the transport. + + .. warning:: + Only call this method if the transport is NOT shared + with other clients - this may cause errors in other clients! + """ + raise NotImplementedError() + + @property + def add_association(self) -> Callable[ + [compute.AddAssociationRegionNetworkFirewallPolicyRequest], + Union[ + compute.Operation, + Awaitable[compute.Operation] + ]]: + raise NotImplementedError() + + @property + def add_rule(self) -> Callable[ + [compute.AddRuleRegionNetworkFirewallPolicyRequest], + Union[ + compute.Operation, + Awaitable[compute.Operation] + ]]: + raise NotImplementedError() + + @property + def clone_rules(self) -> Callable[ + [compute.CloneRulesRegionNetworkFirewallPolicyRequest], + Union[ + compute.Operation, + Awaitable[compute.Operation] + ]]: + raise NotImplementedError() + + @property + def delete(self) -> Callable[ + [compute.DeleteRegionNetworkFirewallPolicyRequest], + Union[ + compute.Operation, + Awaitable[compute.Operation] + ]]: + raise NotImplementedError() + + @property + def get(self) -> Callable[ + [compute.GetRegionNetworkFirewallPolicyRequest], + Union[ + compute.FirewallPolicy, + Awaitable[compute.FirewallPolicy] + ]]: + raise NotImplementedError() + + @property + def get_association(self) -> Callable[ + [compute.GetAssociationRegionNetworkFirewallPolicyRequest], + Union[ + compute.FirewallPolicyAssociation, + Awaitable[compute.FirewallPolicyAssociation] + ]]: + raise NotImplementedError() + + @property + def get_effective_firewalls(self) -> Callable[ + [compute.GetEffectiveFirewallsRegionNetworkFirewallPolicyRequest], + Union[ + compute.RegionNetworkFirewallPoliciesGetEffectiveFirewallsResponse, + Awaitable[compute.RegionNetworkFirewallPoliciesGetEffectiveFirewallsResponse] + ]]: + raise NotImplementedError() + + @property + def get_iam_policy(self) -> Callable[ + [compute.GetIamPolicyRegionNetworkFirewallPolicyRequest], + Union[ + compute.Policy, + Awaitable[compute.Policy] + ]]: + raise NotImplementedError() + + @property + def get_rule(self) -> Callable[ + [compute.GetRuleRegionNetworkFirewallPolicyRequest], + Union[ + compute.FirewallPolicyRule, + Awaitable[compute.FirewallPolicyRule] + ]]: + raise NotImplementedError() + + @property + def insert(self) -> Callable[ + [compute.InsertRegionNetworkFirewallPolicyRequest], + Union[ + compute.Operation, + Awaitable[compute.Operation] + ]]: + raise NotImplementedError() + + @property + def list(self) -> Callable[ + [compute.ListRegionNetworkFirewallPoliciesRequest], + Union[ + compute.FirewallPolicyList, + Awaitable[compute.FirewallPolicyList] + ]]: + raise NotImplementedError() + + @property + def patch(self) -> Callable[ + [compute.PatchRegionNetworkFirewallPolicyRequest], + Union[ + compute.Operation, + Awaitable[compute.Operation] + ]]: + raise NotImplementedError() + + @property + def patch_rule(self) -> Callable[ + [compute.PatchRuleRegionNetworkFirewallPolicyRequest], + Union[ + compute.Operation, + Awaitable[compute.Operation] + ]]: + raise NotImplementedError() + + @property + def remove_association(self) -> Callable[ + [compute.RemoveAssociationRegionNetworkFirewallPolicyRequest], + Union[ + compute.Operation, + Awaitable[compute.Operation] + ]]: + raise NotImplementedError() + + @property + def remove_rule(self) -> Callable[ + [compute.RemoveRuleRegionNetworkFirewallPolicyRequest], + Union[ + compute.Operation, + Awaitable[compute.Operation] + ]]: + raise NotImplementedError() + + @property + def set_iam_policy(self) -> Callable[ + [compute.SetIamPolicyRegionNetworkFirewallPolicyRequest], + Union[ + compute.Policy, + Awaitable[compute.Policy] + ]]: + raise NotImplementedError() + + @property + def test_iam_permissions(self) -> Callable[ + [compute.TestIamPermissionsRegionNetworkFirewallPolicyRequest], + Union[ + compute.TestPermissionsResponse, + Awaitable[compute.TestPermissionsResponse] + ]]: + raise NotImplementedError() + + @property + def kind(self) -> str: + raise NotImplementedError() + + @property + def _region_operations_client(self) -> region_operations.RegionOperationsClient: + ex_op_service = self._extended_operations_services.get("region_operations") + if not ex_op_service: + ex_op_service = region_operations.RegionOperationsClient( + credentials=self._credentials, + transport=self.kind, + ) + self._extended_operations_services["region_operations"] = ex_op_service + + return ex_op_service + + +__all__ = ( + 'RegionNetworkFirewallPoliciesTransport', +) diff --git a/owl-bot-staging/v1/google/cloud/compute_v1/services/region_network_firewall_policies/transports/rest.py b/owl-bot-staging/v1/google/cloud/compute_v1/services/region_network_firewall_policies/transports/rest.py new file mode 100644 index 000000000..469a60a90 --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/compute_v1/services/region_network_firewall_policies/transports/rest.py @@ -0,0 +1,2341 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +from google.auth.transport.requests import AuthorizedSession # type: ignore +import json # type: ignore +import grpc # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.api_core import exceptions as core_exceptions +from google.api_core import retry as retries +from google.api_core import rest_helpers +from google.api_core import rest_streaming +from google.api_core import path_template +from google.api_core import gapic_v1 + +from google.protobuf import json_format +from requests import __version__ as requests_version +import dataclasses +import re +from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union +import warnings + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + + +from google.cloud.compute_v1.types import compute + +from .base import RegionNetworkFirewallPoliciesTransport, DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, + grpc_version=None, + rest_version=requests_version, +) + + +class RegionNetworkFirewallPoliciesRestInterceptor: + """Interceptor for RegionNetworkFirewallPolicies. + + Interceptors are used to manipulate requests, request metadata, and responses + in arbitrary ways. + Example use cases include: + * Logging + * Verifying requests according to service or custom semantics + * Stripping extraneous information from responses + + These use cases and more can be enabled by injecting an + instance of a custom subclass when constructing the RegionNetworkFirewallPoliciesRestTransport. + + .. code-block:: python + class MyCustomRegionNetworkFirewallPoliciesInterceptor(RegionNetworkFirewallPoliciesRestInterceptor): + def pre_add_association(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_add_association(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_add_rule(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_add_rule(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_clone_rules(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_clone_rules(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_delete(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_delete(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_get(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_get_association(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_association(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_get_effective_firewalls(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_effective_firewalls(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_get_iam_policy(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_iam_policy(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_get_rule(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_rule(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_insert(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_insert(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_patch(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_patch(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_patch_rule(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_patch_rule(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_remove_association(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_remove_association(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_remove_rule(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_remove_rule(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_set_iam_policy(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_set_iam_policy(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_test_iam_permissions(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_test_iam_permissions(self, response): + logging.log(f"Received response: {response}") + return response + + transport = RegionNetworkFirewallPoliciesRestTransport(interceptor=MyCustomRegionNetworkFirewallPoliciesInterceptor()) + client = RegionNetworkFirewallPoliciesClient(transport=transport) + + + """ + def pre_add_association(self, request: compute.AddAssociationRegionNetworkFirewallPolicyRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.AddAssociationRegionNetworkFirewallPolicyRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for add_association + + Override in a subclass to manipulate the request or metadata + before they are sent to the RegionNetworkFirewallPolicies server. + """ + return request, metadata + + def post_add_association(self, response: compute.Operation) -> compute.Operation: + """Post-rpc interceptor for add_association + + Override in a subclass to manipulate the response + after it is returned by the RegionNetworkFirewallPolicies server but before + it is returned to user code. + """ + return response + def pre_add_rule(self, request: compute.AddRuleRegionNetworkFirewallPolicyRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.AddRuleRegionNetworkFirewallPolicyRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for add_rule + + Override in a subclass to manipulate the request or metadata + before they are sent to the RegionNetworkFirewallPolicies server. + """ + return request, metadata + + def post_add_rule(self, response: compute.Operation) -> compute.Operation: + """Post-rpc interceptor for add_rule + + Override in a subclass to manipulate the response + after it is returned by the RegionNetworkFirewallPolicies server but before + it is returned to user code. + """ + return response + def pre_clone_rules(self, request: compute.CloneRulesRegionNetworkFirewallPolicyRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.CloneRulesRegionNetworkFirewallPolicyRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for clone_rules + + Override in a subclass to manipulate the request or metadata + before they are sent to the RegionNetworkFirewallPolicies server. + """ + return request, metadata + + def post_clone_rules(self, response: compute.Operation) -> compute.Operation: + """Post-rpc interceptor for clone_rules + + Override in a subclass to manipulate the response + after it is returned by the RegionNetworkFirewallPolicies server but before + it is returned to user code. + """ + return response + def pre_delete(self, request: compute.DeleteRegionNetworkFirewallPolicyRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.DeleteRegionNetworkFirewallPolicyRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for delete + + Override in a subclass to manipulate the request or metadata + before they are sent to the RegionNetworkFirewallPolicies server. + """ + return request, metadata + + def post_delete(self, response: compute.Operation) -> compute.Operation: + """Post-rpc interceptor for delete + + Override in a subclass to manipulate the response + after it is returned by the RegionNetworkFirewallPolicies server but before + it is returned to user code. + """ + return response + def pre_get(self, request: compute.GetRegionNetworkFirewallPolicyRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.GetRegionNetworkFirewallPolicyRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get + + Override in a subclass to manipulate the request or metadata + before they are sent to the RegionNetworkFirewallPolicies server. + """ + return request, metadata + + def post_get(self, response: compute.FirewallPolicy) -> compute.FirewallPolicy: + """Post-rpc interceptor for get + + Override in a subclass to manipulate the response + after it is returned by the RegionNetworkFirewallPolicies server but before + it is returned to user code. + """ + return response + def pre_get_association(self, request: compute.GetAssociationRegionNetworkFirewallPolicyRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.GetAssociationRegionNetworkFirewallPolicyRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_association + + Override in a subclass to manipulate the request or metadata + before they are sent to the RegionNetworkFirewallPolicies server. + """ + return request, metadata + + def post_get_association(self, response: compute.FirewallPolicyAssociation) -> compute.FirewallPolicyAssociation: + """Post-rpc interceptor for get_association + + Override in a subclass to manipulate the response + after it is returned by the RegionNetworkFirewallPolicies server but before + it is returned to user code. + """ + return response + def pre_get_effective_firewalls(self, request: compute.GetEffectiveFirewallsRegionNetworkFirewallPolicyRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.GetEffectiveFirewallsRegionNetworkFirewallPolicyRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_effective_firewalls + + Override in a subclass to manipulate the request or metadata + before they are sent to the RegionNetworkFirewallPolicies server. + """ + return request, metadata + + def post_get_effective_firewalls(self, response: compute.RegionNetworkFirewallPoliciesGetEffectiveFirewallsResponse) -> compute.RegionNetworkFirewallPoliciesGetEffectiveFirewallsResponse: + """Post-rpc interceptor for get_effective_firewalls + + Override in a subclass to manipulate the response + after it is returned by the RegionNetworkFirewallPolicies server but before + it is returned to user code. + """ + return response + def pre_get_iam_policy(self, request: compute.GetIamPolicyRegionNetworkFirewallPolicyRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.GetIamPolicyRegionNetworkFirewallPolicyRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_iam_policy + + Override in a subclass to manipulate the request or metadata + before they are sent to the RegionNetworkFirewallPolicies server. + """ + return request, metadata + + def post_get_iam_policy(self, response: compute.Policy) -> compute.Policy: + """Post-rpc interceptor for get_iam_policy + + Override in a subclass to manipulate the response + after it is returned by the RegionNetworkFirewallPolicies server but before + it is returned to user code. + """ + return response + def pre_get_rule(self, request: compute.GetRuleRegionNetworkFirewallPolicyRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.GetRuleRegionNetworkFirewallPolicyRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_rule + + Override in a subclass to manipulate the request or metadata + before they are sent to the RegionNetworkFirewallPolicies server. + """ + return request, metadata + + def post_get_rule(self, response: compute.FirewallPolicyRule) -> compute.FirewallPolicyRule: + """Post-rpc interceptor for get_rule + + Override in a subclass to manipulate the response + after it is returned by the RegionNetworkFirewallPolicies server but before + it is returned to user code. + """ + return response + def pre_insert(self, request: compute.InsertRegionNetworkFirewallPolicyRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.InsertRegionNetworkFirewallPolicyRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for insert + + Override in a subclass to manipulate the request or metadata + before they are sent to the RegionNetworkFirewallPolicies server. + """ + return request, metadata + + def post_insert(self, response: compute.Operation) -> compute.Operation: + """Post-rpc interceptor for insert + + Override in a subclass to manipulate the response + after it is returned by the RegionNetworkFirewallPolicies server but before + it is returned to user code. + """ + return response + def pre_list(self, request: compute.ListRegionNetworkFirewallPoliciesRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.ListRegionNetworkFirewallPoliciesRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list + + Override in a subclass to manipulate the request or metadata + before they are sent to the RegionNetworkFirewallPolicies server. + """ + return request, metadata + + def post_list(self, response: compute.FirewallPolicyList) -> compute.FirewallPolicyList: + """Post-rpc interceptor for list + + Override in a subclass to manipulate the response + after it is returned by the RegionNetworkFirewallPolicies server but before + it is returned to user code. + """ + return response + def pre_patch(self, request: compute.PatchRegionNetworkFirewallPolicyRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.PatchRegionNetworkFirewallPolicyRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for patch + + Override in a subclass to manipulate the request or metadata + before they are sent to the RegionNetworkFirewallPolicies server. + """ + return request, metadata + + def post_patch(self, response: compute.Operation) -> compute.Operation: + """Post-rpc interceptor for patch + + Override in a subclass to manipulate the response + after it is returned by the RegionNetworkFirewallPolicies server but before + it is returned to user code. + """ + return response + def pre_patch_rule(self, request: compute.PatchRuleRegionNetworkFirewallPolicyRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.PatchRuleRegionNetworkFirewallPolicyRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for patch_rule + + Override in a subclass to manipulate the request or metadata + before they are sent to the RegionNetworkFirewallPolicies server. + """ + return request, metadata + + def post_patch_rule(self, response: compute.Operation) -> compute.Operation: + """Post-rpc interceptor for patch_rule + + Override in a subclass to manipulate the response + after it is returned by the RegionNetworkFirewallPolicies server but before + it is returned to user code. + """ + return response + def pre_remove_association(self, request: compute.RemoveAssociationRegionNetworkFirewallPolicyRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.RemoveAssociationRegionNetworkFirewallPolicyRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for remove_association + + Override in a subclass to manipulate the request or metadata + before they are sent to the RegionNetworkFirewallPolicies server. + """ + return request, metadata + + def post_remove_association(self, response: compute.Operation) -> compute.Operation: + """Post-rpc interceptor for remove_association + + Override in a subclass to manipulate the response + after it is returned by the RegionNetworkFirewallPolicies server but before + it is returned to user code. + """ + return response + def pre_remove_rule(self, request: compute.RemoveRuleRegionNetworkFirewallPolicyRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.RemoveRuleRegionNetworkFirewallPolicyRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for remove_rule + + Override in a subclass to manipulate the request or metadata + before they are sent to the RegionNetworkFirewallPolicies server. + """ + return request, metadata + + def post_remove_rule(self, response: compute.Operation) -> compute.Operation: + """Post-rpc interceptor for remove_rule + + Override in a subclass to manipulate the response + after it is returned by the RegionNetworkFirewallPolicies server but before + it is returned to user code. + """ + return response + def pre_set_iam_policy(self, request: compute.SetIamPolicyRegionNetworkFirewallPolicyRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.SetIamPolicyRegionNetworkFirewallPolicyRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for set_iam_policy + + Override in a subclass to manipulate the request or metadata + before they are sent to the RegionNetworkFirewallPolicies server. + """ + return request, metadata + + def post_set_iam_policy(self, response: compute.Policy) -> compute.Policy: + """Post-rpc interceptor for set_iam_policy + + Override in a subclass to manipulate the response + after it is returned by the RegionNetworkFirewallPolicies server but before + it is returned to user code. + """ + return response + def pre_test_iam_permissions(self, request: compute.TestIamPermissionsRegionNetworkFirewallPolicyRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.TestIamPermissionsRegionNetworkFirewallPolicyRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for test_iam_permissions + + Override in a subclass to manipulate the request or metadata + before they are sent to the RegionNetworkFirewallPolicies server. + """ + return request, metadata + + def post_test_iam_permissions(self, response: compute.TestPermissionsResponse) -> compute.TestPermissionsResponse: + """Post-rpc interceptor for test_iam_permissions + + Override in a subclass to manipulate the response + after it is returned by the RegionNetworkFirewallPolicies server but before + it is returned to user code. + """ + return response + + +@dataclasses.dataclass +class RegionNetworkFirewallPoliciesRestStub: + _session: AuthorizedSession + _host: str + _interceptor: RegionNetworkFirewallPoliciesRestInterceptor + + +class RegionNetworkFirewallPoliciesRestTransport(RegionNetworkFirewallPoliciesTransport): + """REST backend transport for RegionNetworkFirewallPolicies. + + The RegionNetworkFirewallPolicies API. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends JSON representations of protocol buffers over HTTP/1.1 + + NOTE: This REST transport functionality is currently in a beta + state (preview). We welcome your feedback via an issue in this + library's source repository. Thank you! + """ + + def __init__(self, *, + host: str = 'compute.googleapis.com', + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + client_cert_source_for_mtls: Optional[Callable[[ + ], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + url_scheme: str = 'https', + interceptor: Optional[RegionNetworkFirewallPoliciesRestInterceptor] = None, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + NOTE: This REST transport functionality is currently in a beta + state (preview). We welcome your feedback via a GitHub issue in + this library's repository. Thank you! + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + client_cert_source_for_mtls (Callable[[], Tuple[bytes, bytes]]): Client + certificate to configure mutual TLS HTTP channel. It is ignored + if ``channel`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you are developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + url_scheme: the protocol scheme for the API endpoint. Normally + "https", but for testing or local servers, + "http" can be specified. + """ + # Run the base constructor + # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. + # TODO: When custom host (api_endpoint) is set, `scopes` must *also* be set on the + # credentials object + maybe_url_match = re.match("^(?Phttp(?:s)?://)?(?P.*)$", host) + if maybe_url_match is None: + raise ValueError(f"Unexpected hostname structure: {host}") # pragma: NO COVER + + url_match_items = maybe_url_match.groupdict() + + host = f"{url_scheme}://{host}" if not url_match_items["scheme"] else host + + super().__init__( + host=host, + credentials=credentials, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience + ) + self._session = AuthorizedSession( + self._credentials, default_host=self.DEFAULT_HOST) + if client_cert_source_for_mtls: + self._session.configure_mtls_channel(client_cert_source_for_mtls) + self._interceptor = interceptor or RegionNetworkFirewallPoliciesRestInterceptor() + self._prep_wrapped_messages(client_info) + + class _AddAssociation(RegionNetworkFirewallPoliciesRestStub): + def __hash__(self): + return hash("AddAssociation") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.AddAssociationRegionNetworkFirewallPolicyRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.Operation: + r"""Call the add association method over HTTP. + + Args: + request (~.compute.AddAssociationRegionNetworkFirewallPolicyRequest): + The request object. A request message for + RegionNetworkFirewallPolicies.AddAssociation. + See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.Operation: + Represents an Operation resource. Google Compute Engine + has three Operation resources: \* + `Global `__ + \* + `Regional `__ + \* + `Zonal `__ + You can use an operation resource to manage asynchronous + API requests. For more information, read Handling API + responses. Operations can be global, regional or zonal. + - For global operations, use the ``globalOperations`` + resource. - For regional operations, use the + ``regionOperations`` resource. - For zonal operations, + use the ``zonalOperations`` resource. For more + information, read Global, Regional, and Zonal Resources. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/compute/v1/projects/{project}/regions/{region}/firewallPolicies/{firewall_policy}/addAssociation', + 'body': 'firewall_policy_association_resource', + }, + ] + request, metadata = self._interceptor.pre_add_association(request, metadata) + pb_request = compute.AddAssociationRegionNetworkFirewallPolicyRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + including_default_value_fields=False, + use_integers_for_enums=False + ) + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.Operation() + pb_resp = compute.Operation.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_add_association(resp) + return resp + + class _AddRule(RegionNetworkFirewallPoliciesRestStub): + def __hash__(self): + return hash("AddRule") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.AddRuleRegionNetworkFirewallPolicyRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.Operation: + r"""Call the add rule method over HTTP. + + Args: + request (~.compute.AddRuleRegionNetworkFirewallPolicyRequest): + The request object. A request message for + RegionNetworkFirewallPolicies.AddRule. + See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.Operation: + Represents an Operation resource. Google Compute Engine + has three Operation resources: \* + `Global `__ + \* + `Regional `__ + \* + `Zonal `__ + You can use an operation resource to manage asynchronous + API requests. For more information, read Handling API + responses. Operations can be global, regional or zonal. + - For global operations, use the ``globalOperations`` + resource. - For regional operations, use the + ``regionOperations`` resource. - For zonal operations, + use the ``zonalOperations`` resource. For more + information, read Global, Regional, and Zonal Resources. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/compute/v1/projects/{project}/regions/{region}/firewallPolicies/{firewall_policy}/addRule', + 'body': 'firewall_policy_rule_resource', + }, + ] + request, metadata = self._interceptor.pre_add_rule(request, metadata) + pb_request = compute.AddRuleRegionNetworkFirewallPolicyRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + including_default_value_fields=False, + use_integers_for_enums=False + ) + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.Operation() + pb_resp = compute.Operation.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_add_rule(resp) + return resp + + class _CloneRules(RegionNetworkFirewallPoliciesRestStub): + def __hash__(self): + return hash("CloneRules") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.CloneRulesRegionNetworkFirewallPolicyRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.Operation: + r"""Call the clone rules method over HTTP. + + Args: + request (~.compute.CloneRulesRegionNetworkFirewallPolicyRequest): + The request object. A request message for + RegionNetworkFirewallPolicies.CloneRules. + See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.Operation: + Represents an Operation resource. Google Compute Engine + has three Operation resources: \* + `Global `__ + \* + `Regional `__ + \* + `Zonal `__ + You can use an operation resource to manage asynchronous + API requests. For more information, read Handling API + responses. Operations can be global, regional or zonal. + - For global operations, use the ``globalOperations`` + resource. - For regional operations, use the + ``regionOperations`` resource. - For zonal operations, + use the ``zonalOperations`` resource. For more + information, read Global, Regional, and Zonal Resources. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/compute/v1/projects/{project}/regions/{region}/firewallPolicies/{firewall_policy}/cloneRules', + }, + ] + request, metadata = self._interceptor.pre_clone_rules(request, metadata) + pb_request = compute.CloneRulesRegionNetworkFirewallPolicyRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.Operation() + pb_resp = compute.Operation.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_clone_rules(resp) + return resp + + class _Delete(RegionNetworkFirewallPoliciesRestStub): + def __hash__(self): + return hash("Delete") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.DeleteRegionNetworkFirewallPolicyRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.Operation: + r"""Call the delete method over HTTP. + + Args: + request (~.compute.DeleteRegionNetworkFirewallPolicyRequest): + The request object. A request message for + RegionNetworkFirewallPolicies.Delete. + See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.Operation: + Represents an Operation resource. Google Compute Engine + has three Operation resources: \* + `Global `__ + \* + `Regional `__ + \* + `Zonal `__ + You can use an operation resource to manage asynchronous + API requests. For more information, read Handling API + responses. Operations can be global, regional or zonal. + - For global operations, use the ``globalOperations`` + resource. - For regional operations, use the + ``regionOperations`` resource. - For zonal operations, + use the ``zonalOperations`` resource. For more + information, read Global, Regional, and Zonal Resources. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'delete', + 'uri': '/compute/v1/projects/{project}/regions/{region}/firewallPolicies/{firewall_policy}', + }, + ] + request, metadata = self._interceptor.pre_delete(request, metadata) + pb_request = compute.DeleteRegionNetworkFirewallPolicyRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.Operation() + pb_resp = compute.Operation.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_delete(resp) + return resp + + class _Get(RegionNetworkFirewallPoliciesRestStub): + def __hash__(self): + return hash("Get") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.GetRegionNetworkFirewallPolicyRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.FirewallPolicy: + r"""Call the get method over HTTP. + + Args: + request (~.compute.GetRegionNetworkFirewallPolicyRequest): + The request object. A request message for + RegionNetworkFirewallPolicies.Get. See + the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.FirewallPolicy: + Represents a Firewall Policy + resource. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/compute/v1/projects/{project}/regions/{region}/firewallPolicies/{firewall_policy}', + }, + ] + request, metadata = self._interceptor.pre_get(request, metadata) + pb_request = compute.GetRegionNetworkFirewallPolicyRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.FirewallPolicy() + pb_resp = compute.FirewallPolicy.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get(resp) + return resp + + class _GetAssociation(RegionNetworkFirewallPoliciesRestStub): + def __hash__(self): + return hash("GetAssociation") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.GetAssociationRegionNetworkFirewallPolicyRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.FirewallPolicyAssociation: + r"""Call the get association method over HTTP. + + Args: + request (~.compute.GetAssociationRegionNetworkFirewallPolicyRequest): + The request object. A request message for + RegionNetworkFirewallPolicies.GetAssociation. + See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.FirewallPolicyAssociation: + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/compute/v1/projects/{project}/regions/{region}/firewallPolicies/{firewall_policy}/getAssociation', + }, + ] + request, metadata = self._interceptor.pre_get_association(request, metadata) + pb_request = compute.GetAssociationRegionNetworkFirewallPolicyRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.FirewallPolicyAssociation() + pb_resp = compute.FirewallPolicyAssociation.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get_association(resp) + return resp + + class _GetEffectiveFirewalls(RegionNetworkFirewallPoliciesRestStub): + def __hash__(self): + return hash("GetEffectiveFirewalls") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + "network" : "", } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.GetEffectiveFirewallsRegionNetworkFirewallPolicyRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.RegionNetworkFirewallPoliciesGetEffectiveFirewallsResponse: + r"""Call the get effective firewalls method over HTTP. + + Args: + request (~.compute.GetEffectiveFirewallsRegionNetworkFirewallPolicyRequest): + The request object. A request message for + RegionNetworkFirewallPolicies.GetEffectiveFirewalls. + See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.RegionNetworkFirewallPoliciesGetEffectiveFirewallsResponse: + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/compute/v1/projects/{project}/regions/{region}/firewallPolicies/getEffectiveFirewalls', + }, + ] + request, metadata = self._interceptor.pre_get_effective_firewalls(request, metadata) + pb_request = compute.GetEffectiveFirewallsRegionNetworkFirewallPolicyRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.RegionNetworkFirewallPoliciesGetEffectiveFirewallsResponse() + pb_resp = compute.RegionNetworkFirewallPoliciesGetEffectiveFirewallsResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get_effective_firewalls(resp) + return resp + + class _GetIamPolicy(RegionNetworkFirewallPoliciesRestStub): + def __hash__(self): + return hash("GetIamPolicy") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.GetIamPolicyRegionNetworkFirewallPolicyRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.Policy: + r"""Call the get iam policy method over HTTP. + + Args: + request (~.compute.GetIamPolicyRegionNetworkFirewallPolicyRequest): + The request object. A request message for + RegionNetworkFirewallPolicies.GetIamPolicy. + See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.Policy: + An Identity and Access Management (IAM) policy, which + specifies access controls for Google Cloud resources. A + ``Policy`` is a collection of ``bindings``. A + ``binding`` binds one or more ``members``, or + principals, to a single ``role``. Principals can be user + accounts, service accounts, Google groups, and domains + (such as G Suite). A ``role`` is a named list of + permissions; each ``role`` can be an IAM predefined role + or a user-created custom role. For some types of Google + Cloud resources, a ``binding`` can also specify a + ``condition``, which is a logical expression that allows + access to a resource only if the expression evaluates to + ``true``. A condition can add constraints based on + attributes of the request, the resource, or both. To + learn which resources support conditions in their IAM + policies, see the `IAM + documentation `__. + **JSON example:** { "bindings": [ { "role": + "roles/resourcemanager.organizationAdmin", "members": [ + "user:mike@example.com", "group:admins@example.com", + "domain:google.com", + "serviceAccount:my-project-id@appspot.gserviceaccount.com" + ] }, { "role": + "roles/resourcemanager.organizationViewer", "members": [ + "user:eve@example.com" ], "condition": { "title": + "expirable access", "description": "Does not grant + access after Sep 2020", "expression": "request.time < + timestamp('2020-10-01T00:00:00.000Z')", } } ], "etag": + "BwWWja0YfJA=", "version": 3 } **YAML example:** + bindings: - members: - user:mike@example.com - + group:admins@example.com - domain:google.com - + serviceAccount:my-project-id@appspot.gserviceaccount.com + role: roles/resourcemanager.organizationAdmin - members: + - user:eve@example.com role: + roles/resourcemanager.organizationViewer condition: + title: expirable access description: Does not grant + access after Sep 2020 expression: request.time < + timestamp('2020-10-01T00:00:00.000Z') etag: BwWWja0YfJA= + version: 3 For a description of IAM and its features, + see the `IAM + documentation `__. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/compute/v1/projects/{project}/regions/{region}/firewallPolicies/{resource}/getIamPolicy', + }, + ] + request, metadata = self._interceptor.pre_get_iam_policy(request, metadata) + pb_request = compute.GetIamPolicyRegionNetworkFirewallPolicyRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.Policy() + pb_resp = compute.Policy.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get_iam_policy(resp) + return resp + + class _GetRule(RegionNetworkFirewallPoliciesRestStub): + def __hash__(self): + return hash("GetRule") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.GetRuleRegionNetworkFirewallPolicyRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.FirewallPolicyRule: + r"""Call the get rule method over HTTP. + + Args: + request (~.compute.GetRuleRegionNetworkFirewallPolicyRequest): + The request object. A request message for + RegionNetworkFirewallPolicies.GetRule. + See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.FirewallPolicyRule: + Represents a rule that describes one + or more match conditions along with the + action to be taken when traffic matches + this condition (allow or deny). + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/compute/v1/projects/{project}/regions/{region}/firewallPolicies/{firewall_policy}/getRule', + }, + ] + request, metadata = self._interceptor.pre_get_rule(request, metadata) + pb_request = compute.GetRuleRegionNetworkFirewallPolicyRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.FirewallPolicyRule() + pb_resp = compute.FirewallPolicyRule.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get_rule(resp) + return resp + + class _Insert(RegionNetworkFirewallPoliciesRestStub): + def __hash__(self): + return hash("Insert") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.InsertRegionNetworkFirewallPolicyRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.Operation: + r"""Call the insert method over HTTP. + + Args: + request (~.compute.InsertRegionNetworkFirewallPolicyRequest): + The request object. A request message for + RegionNetworkFirewallPolicies.Insert. + See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.Operation: + Represents an Operation resource. Google Compute Engine + has three Operation resources: \* + `Global `__ + \* + `Regional `__ + \* + `Zonal `__ + You can use an operation resource to manage asynchronous + API requests. For more information, read Handling API + responses. Operations can be global, regional or zonal. + - For global operations, use the ``globalOperations`` + resource. - For regional operations, use the + ``regionOperations`` resource. - For zonal operations, + use the ``zonalOperations`` resource. For more + information, read Global, Regional, and Zonal Resources. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/compute/v1/projects/{project}/regions/{region}/firewallPolicies', + 'body': 'firewall_policy_resource', + }, + ] + request, metadata = self._interceptor.pre_insert(request, metadata) + pb_request = compute.InsertRegionNetworkFirewallPolicyRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + including_default_value_fields=False, + use_integers_for_enums=False + ) + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.Operation() + pb_resp = compute.Operation.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_insert(resp) + return resp + + class _List(RegionNetworkFirewallPoliciesRestStub): + def __hash__(self): + return hash("List") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.ListRegionNetworkFirewallPoliciesRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.FirewallPolicyList: + r"""Call the list method over HTTP. + + Args: + request (~.compute.ListRegionNetworkFirewallPoliciesRequest): + The request object. A request message for + RegionNetworkFirewallPolicies.List. See + the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.FirewallPolicyList: + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/compute/v1/projects/{project}/regions/{region}/firewallPolicies', + }, + ] + request, metadata = self._interceptor.pre_list(request, metadata) + pb_request = compute.ListRegionNetworkFirewallPoliciesRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.FirewallPolicyList() + pb_resp = compute.FirewallPolicyList.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_list(resp) + return resp + + class _Patch(RegionNetworkFirewallPoliciesRestStub): + def __hash__(self): + return hash("Patch") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.PatchRegionNetworkFirewallPolicyRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.Operation: + r"""Call the patch method over HTTP. + + Args: + request (~.compute.PatchRegionNetworkFirewallPolicyRequest): + The request object. A request message for + RegionNetworkFirewallPolicies.Patch. See + the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.Operation: + Represents an Operation resource. Google Compute Engine + has three Operation resources: \* + `Global `__ + \* + `Regional `__ + \* + `Zonal `__ + You can use an operation resource to manage asynchronous + API requests. For more information, read Handling API + responses. Operations can be global, regional or zonal. + - For global operations, use the ``globalOperations`` + resource. - For regional operations, use the + ``regionOperations`` resource. - For zonal operations, + use the ``zonalOperations`` resource. For more + information, read Global, Regional, and Zonal Resources. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'patch', + 'uri': '/compute/v1/projects/{project}/regions/{region}/firewallPolicies/{firewall_policy}', + 'body': 'firewall_policy_resource', + }, + ] + request, metadata = self._interceptor.pre_patch(request, metadata) + pb_request = compute.PatchRegionNetworkFirewallPolicyRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + including_default_value_fields=False, + use_integers_for_enums=False + ) + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.Operation() + pb_resp = compute.Operation.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_patch(resp) + return resp + + class _PatchRule(RegionNetworkFirewallPoliciesRestStub): + def __hash__(self): + return hash("PatchRule") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.PatchRuleRegionNetworkFirewallPolicyRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.Operation: + r"""Call the patch rule method over HTTP. + + Args: + request (~.compute.PatchRuleRegionNetworkFirewallPolicyRequest): + The request object. A request message for + RegionNetworkFirewallPolicies.PatchRule. + See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.Operation: + Represents an Operation resource. Google Compute Engine + has three Operation resources: \* + `Global `__ + \* + `Regional `__ + \* + `Zonal `__ + You can use an operation resource to manage asynchronous + API requests. For more information, read Handling API + responses. Operations can be global, regional or zonal. + - For global operations, use the ``globalOperations`` + resource. - For regional operations, use the + ``regionOperations`` resource. - For zonal operations, + use the ``zonalOperations`` resource. For more + information, read Global, Regional, and Zonal Resources. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/compute/v1/projects/{project}/regions/{region}/firewallPolicies/{firewall_policy}/patchRule', + 'body': 'firewall_policy_rule_resource', + }, + ] + request, metadata = self._interceptor.pre_patch_rule(request, metadata) + pb_request = compute.PatchRuleRegionNetworkFirewallPolicyRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + including_default_value_fields=False, + use_integers_for_enums=False + ) + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.Operation() + pb_resp = compute.Operation.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_patch_rule(resp) + return resp + + class _RemoveAssociation(RegionNetworkFirewallPoliciesRestStub): + def __hash__(self): + return hash("RemoveAssociation") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.RemoveAssociationRegionNetworkFirewallPolicyRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.Operation: + r"""Call the remove association method over HTTP. + + Args: + request (~.compute.RemoveAssociationRegionNetworkFirewallPolicyRequest): + The request object. A request message for + RegionNetworkFirewallPolicies.RemoveAssociation. + See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.Operation: + Represents an Operation resource. Google Compute Engine + has three Operation resources: \* + `Global `__ + \* + `Regional `__ + \* + `Zonal `__ + You can use an operation resource to manage asynchronous + API requests. For more information, read Handling API + responses. Operations can be global, regional or zonal. + - For global operations, use the ``globalOperations`` + resource. - For regional operations, use the + ``regionOperations`` resource. - For zonal operations, + use the ``zonalOperations`` resource. For more + information, read Global, Regional, and Zonal Resources. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/compute/v1/projects/{project}/regions/{region}/firewallPolicies/{firewall_policy}/removeAssociation', + }, + ] + request, metadata = self._interceptor.pre_remove_association(request, metadata) + pb_request = compute.RemoveAssociationRegionNetworkFirewallPolicyRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.Operation() + pb_resp = compute.Operation.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_remove_association(resp) + return resp + + class _RemoveRule(RegionNetworkFirewallPoliciesRestStub): + def __hash__(self): + return hash("RemoveRule") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.RemoveRuleRegionNetworkFirewallPolicyRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.Operation: + r"""Call the remove rule method over HTTP. + + Args: + request (~.compute.RemoveRuleRegionNetworkFirewallPolicyRequest): + The request object. A request message for + RegionNetworkFirewallPolicies.RemoveRule. + See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.Operation: + Represents an Operation resource. Google Compute Engine + has three Operation resources: \* + `Global `__ + \* + `Regional `__ + \* + `Zonal `__ + You can use an operation resource to manage asynchronous + API requests. For more information, read Handling API + responses. Operations can be global, regional or zonal. + - For global operations, use the ``globalOperations`` + resource. - For regional operations, use the + ``regionOperations`` resource. - For zonal operations, + use the ``zonalOperations`` resource. For more + information, read Global, Regional, and Zonal Resources. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/compute/v1/projects/{project}/regions/{region}/firewallPolicies/{firewall_policy}/removeRule', + }, + ] + request, metadata = self._interceptor.pre_remove_rule(request, metadata) + pb_request = compute.RemoveRuleRegionNetworkFirewallPolicyRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.Operation() + pb_resp = compute.Operation.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_remove_rule(resp) + return resp + + class _SetIamPolicy(RegionNetworkFirewallPoliciesRestStub): + def __hash__(self): + return hash("SetIamPolicy") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.SetIamPolicyRegionNetworkFirewallPolicyRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.Policy: + r"""Call the set iam policy method over HTTP. + + Args: + request (~.compute.SetIamPolicyRegionNetworkFirewallPolicyRequest): + The request object. A request message for + RegionNetworkFirewallPolicies.SetIamPolicy. + See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.Policy: + An Identity and Access Management (IAM) policy, which + specifies access controls for Google Cloud resources. A + ``Policy`` is a collection of ``bindings``. A + ``binding`` binds one or more ``members``, or + principals, to a single ``role``. Principals can be user + accounts, service accounts, Google groups, and domains + (such as G Suite). A ``role`` is a named list of + permissions; each ``role`` can be an IAM predefined role + or a user-created custom role. For some types of Google + Cloud resources, a ``binding`` can also specify a + ``condition``, which is a logical expression that allows + access to a resource only if the expression evaluates to + ``true``. A condition can add constraints based on + attributes of the request, the resource, or both. To + learn which resources support conditions in their IAM + policies, see the `IAM + documentation `__. + **JSON example:** { "bindings": [ { "role": + "roles/resourcemanager.organizationAdmin", "members": [ + "user:mike@example.com", "group:admins@example.com", + "domain:google.com", + "serviceAccount:my-project-id@appspot.gserviceaccount.com" + ] }, { "role": + "roles/resourcemanager.organizationViewer", "members": [ + "user:eve@example.com" ], "condition": { "title": + "expirable access", "description": "Does not grant + access after Sep 2020", "expression": "request.time < + timestamp('2020-10-01T00:00:00.000Z')", } } ], "etag": + "BwWWja0YfJA=", "version": 3 } **YAML example:** + bindings: - members: - user:mike@example.com - + group:admins@example.com - domain:google.com - + serviceAccount:my-project-id@appspot.gserviceaccount.com + role: roles/resourcemanager.organizationAdmin - members: + - user:eve@example.com role: + roles/resourcemanager.organizationViewer condition: + title: expirable access description: Does not grant + access after Sep 2020 expression: request.time < + timestamp('2020-10-01T00:00:00.000Z') etag: BwWWja0YfJA= + version: 3 For a description of IAM and its features, + see the `IAM + documentation `__. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/compute/v1/projects/{project}/regions/{region}/firewallPolicies/{resource}/setIamPolicy', + 'body': 'region_set_policy_request_resource', + }, + ] + request, metadata = self._interceptor.pre_set_iam_policy(request, metadata) + pb_request = compute.SetIamPolicyRegionNetworkFirewallPolicyRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + including_default_value_fields=False, + use_integers_for_enums=False + ) + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.Policy() + pb_resp = compute.Policy.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_set_iam_policy(resp) + return resp + + class _TestIamPermissions(RegionNetworkFirewallPoliciesRestStub): + def __hash__(self): + return hash("TestIamPermissions") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.TestIamPermissionsRegionNetworkFirewallPolicyRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.TestPermissionsResponse: + r"""Call the test iam permissions method over HTTP. + + Args: + request (~.compute.TestIamPermissionsRegionNetworkFirewallPolicyRequest): + The request object. A request message for + RegionNetworkFirewallPolicies.TestIamPermissions. + See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.TestPermissionsResponse: + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/compute/v1/projects/{project}/regions/{region}/firewallPolicies/{resource}/testIamPermissions', + 'body': 'test_permissions_request_resource', + }, + ] + request, metadata = self._interceptor.pre_test_iam_permissions(request, metadata) + pb_request = compute.TestIamPermissionsRegionNetworkFirewallPolicyRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + including_default_value_fields=False, + use_integers_for_enums=False + ) + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.TestPermissionsResponse() + pb_resp = compute.TestPermissionsResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_test_iam_permissions(resp) + return resp + + @property + def add_association(self) -> Callable[ + [compute.AddAssociationRegionNetworkFirewallPolicyRequest], + compute.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._AddAssociation(self._session, self._host, self._interceptor) # type: ignore + + @property + def add_rule(self) -> Callable[ + [compute.AddRuleRegionNetworkFirewallPolicyRequest], + compute.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._AddRule(self._session, self._host, self._interceptor) # type: ignore + + @property + def clone_rules(self) -> Callable[ + [compute.CloneRulesRegionNetworkFirewallPolicyRequest], + compute.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._CloneRules(self._session, self._host, self._interceptor) # type: ignore + + @property + def delete(self) -> Callable[ + [compute.DeleteRegionNetworkFirewallPolicyRequest], + compute.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._Delete(self._session, self._host, self._interceptor) # type: ignore + + @property + def get(self) -> Callable[ + [compute.GetRegionNetworkFirewallPolicyRequest], + compute.FirewallPolicy]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._Get(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_association(self) -> Callable[ + [compute.GetAssociationRegionNetworkFirewallPolicyRequest], + compute.FirewallPolicyAssociation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetAssociation(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_effective_firewalls(self) -> Callable[ + [compute.GetEffectiveFirewallsRegionNetworkFirewallPolicyRequest], + compute.RegionNetworkFirewallPoliciesGetEffectiveFirewallsResponse]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetEffectiveFirewalls(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_iam_policy(self) -> Callable[ + [compute.GetIamPolicyRegionNetworkFirewallPolicyRequest], + compute.Policy]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetIamPolicy(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_rule(self) -> Callable[ + [compute.GetRuleRegionNetworkFirewallPolicyRequest], + compute.FirewallPolicyRule]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetRule(self._session, self._host, self._interceptor) # type: ignore + + @property + def insert(self) -> Callable[ + [compute.InsertRegionNetworkFirewallPolicyRequest], + compute.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._Insert(self._session, self._host, self._interceptor) # type: ignore + + @property + def list(self) -> Callable[ + [compute.ListRegionNetworkFirewallPoliciesRequest], + compute.FirewallPolicyList]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._List(self._session, self._host, self._interceptor) # type: ignore + + @property + def patch(self) -> Callable[ + [compute.PatchRegionNetworkFirewallPolicyRequest], + compute.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._Patch(self._session, self._host, self._interceptor) # type: ignore + + @property + def patch_rule(self) -> Callable[ + [compute.PatchRuleRegionNetworkFirewallPolicyRequest], + compute.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._PatchRule(self._session, self._host, self._interceptor) # type: ignore + + @property + def remove_association(self) -> Callable[ + [compute.RemoveAssociationRegionNetworkFirewallPolicyRequest], + compute.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._RemoveAssociation(self._session, self._host, self._interceptor) # type: ignore + + @property + def remove_rule(self) -> Callable[ + [compute.RemoveRuleRegionNetworkFirewallPolicyRequest], + compute.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._RemoveRule(self._session, self._host, self._interceptor) # type: ignore + + @property + def set_iam_policy(self) -> Callable[ + [compute.SetIamPolicyRegionNetworkFirewallPolicyRequest], + compute.Policy]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._SetIamPolicy(self._session, self._host, self._interceptor) # type: ignore + + @property + def test_iam_permissions(self) -> Callable[ + [compute.TestIamPermissionsRegionNetworkFirewallPolicyRequest], + compute.TestPermissionsResponse]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._TestIamPermissions(self._session, self._host, self._interceptor) # type: ignore + + @property + def kind(self) -> str: + return "rest" + + def close(self): + self._session.close() + + +__all__=( + 'RegionNetworkFirewallPoliciesRestTransport', +) diff --git a/owl-bot-staging/v1/google/cloud/compute_v1/services/region_notification_endpoints/__init__.py b/owl-bot-staging/v1/google/cloud/compute_v1/services/region_notification_endpoints/__init__.py new file mode 100644 index 000000000..1e587358e --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/compute_v1/services/region_notification_endpoints/__init__.py @@ -0,0 +1,20 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from .client import RegionNotificationEndpointsClient + +__all__ = ( + 'RegionNotificationEndpointsClient', +) diff --git a/owl-bot-staging/v1/google/cloud/compute_v1/services/region_notification_endpoints/client.py b/owl-bot-staging/v1/google/cloud/compute_v1/services/region_notification_endpoints/client.py new file mode 100644 index 000000000..e8de8219f --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/compute_v1/services/region_notification_endpoints/client.py @@ -0,0 +1,1191 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import functools +import os +import re +from typing import Dict, Mapping, MutableMapping, MutableSequence, Optional, Sequence, Tuple, Type, Union, cast + +from google.cloud.compute_v1 import gapic_version as package_version + +from google.api_core import client_options as client_options_lib +from google.api_core import exceptions as core_exceptions +from google.api_core import extended_operation +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport import mtls # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth.exceptions import MutualTLSChannelError # type: ignore +from google.oauth2 import service_account # type: ignore + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + +from google.api_core import extended_operation # type: ignore +from google.cloud.compute_v1.services.region_notification_endpoints import pagers +from google.cloud.compute_v1.types import compute +from .transports.base import RegionNotificationEndpointsTransport, DEFAULT_CLIENT_INFO +from .transports.rest import RegionNotificationEndpointsRestTransport + + +class RegionNotificationEndpointsClientMeta(type): + """Metaclass for the RegionNotificationEndpoints client. + + This provides class-level methods for building and retrieving + support objects (e.g. transport) without polluting the client instance + objects. + """ + _transport_registry = OrderedDict() # type: Dict[str, Type[RegionNotificationEndpointsTransport]] + _transport_registry["rest"] = RegionNotificationEndpointsRestTransport + + def get_transport_class(cls, + label: Optional[str] = None, + ) -> Type[RegionNotificationEndpointsTransport]: + """Returns an appropriate transport class. + + Args: + label: The name of the desired transport. If none is + provided, then the first transport in the registry is used. + + Returns: + The transport class to use. + """ + # If a specific transport is requested, return that one. + if label: + return cls._transport_registry[label] + + # No transport is requested; return the default (that is, the first one + # in the dictionary). + return next(iter(cls._transport_registry.values())) + + +class RegionNotificationEndpointsClient(metaclass=RegionNotificationEndpointsClientMeta): + """The RegionNotificationEndpoints API.""" + + @staticmethod + def _get_default_mtls_endpoint(api_endpoint): + """Converts api endpoint to mTLS endpoint. + + Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to + "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. + Args: + api_endpoint (Optional[str]): the api endpoint to convert. + Returns: + str: converted mTLS api endpoint. + """ + if not api_endpoint: + return api_endpoint + + mtls_endpoint_re = re.compile( + r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?" + ) + + m = mtls_endpoint_re.match(api_endpoint) + name, mtls, sandbox, googledomain = m.groups() + if mtls or not googledomain: + return api_endpoint + + if sandbox: + return api_endpoint.replace( + "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" + ) + + return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") + + DEFAULT_ENDPOINT = "compute.googleapis.com" + DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore + DEFAULT_ENDPOINT + ) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + RegionNotificationEndpointsClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_info(info) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + RegionNotificationEndpointsClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_file( + filename) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + from_service_account_json = from_service_account_file + + @property + def transport(self) -> RegionNotificationEndpointsTransport: + """Returns the transport used by the client instance. + + Returns: + RegionNotificationEndpointsTransport: The transport used by the client + instance. + """ + return self._transport + + @staticmethod + def common_billing_account_path(billing_account: str, ) -> str: + """Returns a fully-qualified billing_account string.""" + return "billingAccounts/{billing_account}".format(billing_account=billing_account, ) + + @staticmethod + def parse_common_billing_account_path(path: str) -> Dict[str,str]: + """Parse a billing_account path into its component segments.""" + m = re.match(r"^billingAccounts/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_folder_path(folder: str, ) -> str: + """Returns a fully-qualified folder string.""" + return "folders/{folder}".format(folder=folder, ) + + @staticmethod + def parse_common_folder_path(path: str) -> Dict[str,str]: + """Parse a folder path into its component segments.""" + m = re.match(r"^folders/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_organization_path(organization: str, ) -> str: + """Returns a fully-qualified organization string.""" + return "organizations/{organization}".format(organization=organization, ) + + @staticmethod + def parse_common_organization_path(path: str) -> Dict[str,str]: + """Parse a organization path into its component segments.""" + m = re.match(r"^organizations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_project_path(project: str, ) -> str: + """Returns a fully-qualified project string.""" + return "projects/{project}".format(project=project, ) + + @staticmethod + def parse_common_project_path(path: str) -> Dict[str,str]: + """Parse a project path into its component segments.""" + m = re.match(r"^projects/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_location_path(project: str, location: str, ) -> str: + """Returns a fully-qualified location string.""" + return "projects/{project}/locations/{location}".format(project=project, location=location, ) + + @staticmethod + def parse_common_location_path(path: str) -> Dict[str,str]: + """Parse a location path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @classmethod + def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[client_options_lib.ClientOptions] = None): + """Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + if client_options is None: + client_options = client_options_lib.ClientOptions() + use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") + use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") + if use_client_cert not in ("true", "false"): + raise ValueError("Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`") + if use_mtls_endpoint not in ("auto", "never", "always"): + raise MutualTLSChannelError("Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`") + + # Figure out the client cert source to use. + client_cert_source = None + if use_client_cert == "true": + if client_options.client_cert_source: + client_cert_source = client_options.client_cert_source + elif mtls.has_default_client_cert_source(): + client_cert_source = mtls.default_client_cert_source() + + # Figure out which api endpoint to use. + if client_options.api_endpoint is not None: + api_endpoint = client_options.api_endpoint + elif use_mtls_endpoint == "always" or (use_mtls_endpoint == "auto" and client_cert_source): + api_endpoint = cls.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = cls.DEFAULT_ENDPOINT + + return api_endpoint, client_cert_source + + def __init__(self, *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Optional[Union[str, RegionNotificationEndpointsTransport]] = None, + client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the region notification endpoints client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Union[str, RegionNotificationEndpointsTransport]): The + transport to use. If set to None, a transport is chosen + automatically. + NOTE: "rest" transport functionality is currently in a + beta state (preview). We welcome your feedback via an + issue in this library's source repository. + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): Custom options for the + client. It won't take effect if a ``transport`` instance is provided. + (1) The ``api_endpoint`` property can be used to override the + default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT + environment variable can also be used to override the endpoint: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto switch to the + default mTLS endpoint if client certificate is present, this is + the default value). However, the ``api_endpoint`` property takes + precedence if provided. + (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide client certificate for mutual TLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + """ + if isinstance(client_options, dict): + client_options = client_options_lib.from_dict(client_options) + if client_options is None: + client_options = client_options_lib.ClientOptions() + client_options = cast(client_options_lib.ClientOptions, client_options) + + api_endpoint, client_cert_source_func = self.get_mtls_endpoint_and_cert_source(client_options) + + api_key_value = getattr(client_options, "api_key", None) + if api_key_value and credentials: + raise ValueError("client_options.api_key and credentials are mutually exclusive") + + # Save or instantiate the transport. + # Ordinarily, we provide the transport, but allowing a custom transport + # instance provides an extensibility point for unusual situations. + if isinstance(transport, RegionNotificationEndpointsTransport): + # transport is a RegionNotificationEndpointsTransport instance. + if credentials or client_options.credentials_file or api_key_value: + raise ValueError("When providing a transport instance, " + "provide its credentials directly.") + if client_options.scopes: + raise ValueError( + "When providing a transport instance, provide its scopes " + "directly." + ) + self._transport = transport + else: + import google.auth._default # type: ignore + + if api_key_value and hasattr(google.auth._default, "get_api_key_credentials"): + credentials = google.auth._default.get_api_key_credentials(api_key_value) + + Transport = type(self).get_transport_class(transport) + self._transport = Transport( + credentials=credentials, + credentials_file=client_options.credentials_file, + host=api_endpoint, + scopes=client_options.scopes, + client_cert_source_for_mtls=client_cert_source_func, + quota_project_id=client_options.quota_project_id, + client_info=client_info, + always_use_jwt_access=True, + api_audience=client_options.api_audience, + ) + + def delete_unary(self, + request: Optional[Union[compute.DeleteRegionNotificationEndpointRequest, dict]] = None, + *, + project: Optional[str] = None, + region: Optional[str] = None, + notification_endpoint: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.Operation: + r"""Deletes the specified NotificationEndpoint in the + given region + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_delete(): + # Create a client + client = compute_v1.RegionNotificationEndpointsClient() + + # Initialize request argument(s) + request = compute_v1.DeleteRegionNotificationEndpointRequest( + notification_endpoint="notification_endpoint_value", + project="project_value", + region="region_value", + ) + + # Make the request + response = client.delete(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.DeleteRegionNotificationEndpointRequest, dict]): + The request object. A request message for + RegionNotificationEndpoints.Delete. See + the method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + region (str): + Name of the region scoping this + request. + + This corresponds to the ``region`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + notification_endpoint (str): + Name of the NotificationEndpoint + resource to delete. + + This corresponds to the ``notification_endpoint`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, region, notification_endpoint]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.DeleteRegionNotificationEndpointRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.DeleteRegionNotificationEndpointRequest): + request = compute.DeleteRegionNotificationEndpointRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if region is not None: + request.region = region + if notification_endpoint is not None: + request.notification_endpoint = notification_endpoint + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("region", request.region), + ("notification_endpoint", request.notification_endpoint), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def delete(self, + request: Optional[Union[compute.DeleteRegionNotificationEndpointRequest, dict]] = None, + *, + project: Optional[str] = None, + region: Optional[str] = None, + notification_endpoint: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> extended_operation.ExtendedOperation: + r"""Deletes the specified NotificationEndpoint in the + given region + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_delete(): + # Create a client + client = compute_v1.RegionNotificationEndpointsClient() + + # Initialize request argument(s) + request = compute_v1.DeleteRegionNotificationEndpointRequest( + notification_endpoint="notification_endpoint_value", + project="project_value", + region="region_value", + ) + + # Make the request + response = client.delete(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.DeleteRegionNotificationEndpointRequest, dict]): + The request object. A request message for + RegionNotificationEndpoints.Delete. See + the method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + region (str): + Name of the region scoping this + request. + + This corresponds to the ``region`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + notification_endpoint (str): + Name of the NotificationEndpoint + resource to delete. + + This corresponds to the ``notification_endpoint`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, region, notification_endpoint]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.DeleteRegionNotificationEndpointRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.DeleteRegionNotificationEndpointRequest): + request = compute.DeleteRegionNotificationEndpointRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if region is not None: + request.region = region + if notification_endpoint is not None: + request.notification_endpoint = notification_endpoint + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("region", request.region), + ("notification_endpoint", request.notification_endpoint), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + operation_service = self._transport._region_operations_client + operation_request = compute.GetRegionOperationRequest() + operation_request.project = request.project + operation_request.region = request.region + operation_request.operation = response.name + + get_operation = functools.partial(operation_service.get, operation_request) + # Cancel is not part of extended operations yet. + cancel_operation = lambda: None + + # Note: this class is an implementation detail to provide a uniform + # set of names for certain fields in the extended operation proto message. + # See google.api_core.extended_operation.ExtendedOperation for details + # on these properties and the expected interface. + class _CustomOperation(extended_operation.ExtendedOperation): + @property + def error_message(self): + return self._extended_operation.http_error_message + + @property + def error_code(self): + return self._extended_operation.http_error_status_code + + response = _CustomOperation.make(get_operation, cancel_operation, response) + + # Done; return the response. + return response + + def get(self, + request: Optional[Union[compute.GetRegionNotificationEndpointRequest, dict]] = None, + *, + project: Optional[str] = None, + region: Optional[str] = None, + notification_endpoint: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.NotificationEndpoint: + r"""Returns the specified NotificationEndpoint resource + in the given region. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_get(): + # Create a client + client = compute_v1.RegionNotificationEndpointsClient() + + # Initialize request argument(s) + request = compute_v1.GetRegionNotificationEndpointRequest( + notification_endpoint="notification_endpoint_value", + project="project_value", + region="region_value", + ) + + # Make the request + response = client.get(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.GetRegionNotificationEndpointRequest, dict]): + The request object. A request message for + RegionNotificationEndpoints.Get. See the + method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + region (str): + Name of the region scoping this + request. + + This corresponds to the ``region`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + notification_endpoint (str): + Name of the NotificationEndpoint + resource to return. + + This corresponds to the ``notification_endpoint`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.compute_v1.types.NotificationEndpoint: + Represents a notification endpoint. A + notification endpoint resource defines + an endpoint to receive notifications + when there are status changes detected + by the associated health check service. + For more information, see Health checks + overview. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, region, notification_endpoint]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.GetRegionNotificationEndpointRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.GetRegionNotificationEndpointRequest): + request = compute.GetRegionNotificationEndpointRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if region is not None: + request.region = region + if notification_endpoint is not None: + request.notification_endpoint = notification_endpoint + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("region", request.region), + ("notification_endpoint", request.notification_endpoint), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def insert_unary(self, + request: Optional[Union[compute.InsertRegionNotificationEndpointRequest, dict]] = None, + *, + project: Optional[str] = None, + region: Optional[str] = None, + notification_endpoint_resource: Optional[compute.NotificationEndpoint] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.Operation: + r"""Create a NotificationEndpoint in the specified + project in the given region using the parameters that + are included in the request. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_insert(): + # Create a client + client = compute_v1.RegionNotificationEndpointsClient() + + # Initialize request argument(s) + request = compute_v1.InsertRegionNotificationEndpointRequest( + project="project_value", + region="region_value", + ) + + # Make the request + response = client.insert(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.InsertRegionNotificationEndpointRequest, dict]): + The request object. A request message for + RegionNotificationEndpoints.Insert. See + the method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + region (str): + Name of the region scoping this + request. + + This corresponds to the ``region`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + notification_endpoint_resource (google.cloud.compute_v1.types.NotificationEndpoint): + The body resource for this request + This corresponds to the ``notification_endpoint_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, region, notification_endpoint_resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.InsertRegionNotificationEndpointRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.InsertRegionNotificationEndpointRequest): + request = compute.InsertRegionNotificationEndpointRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if region is not None: + request.region = region + if notification_endpoint_resource is not None: + request.notification_endpoint_resource = notification_endpoint_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.insert] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("region", request.region), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def insert(self, + request: Optional[Union[compute.InsertRegionNotificationEndpointRequest, dict]] = None, + *, + project: Optional[str] = None, + region: Optional[str] = None, + notification_endpoint_resource: Optional[compute.NotificationEndpoint] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> extended_operation.ExtendedOperation: + r"""Create a NotificationEndpoint in the specified + project in the given region using the parameters that + are included in the request. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_insert(): + # Create a client + client = compute_v1.RegionNotificationEndpointsClient() + + # Initialize request argument(s) + request = compute_v1.InsertRegionNotificationEndpointRequest( + project="project_value", + region="region_value", + ) + + # Make the request + response = client.insert(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.InsertRegionNotificationEndpointRequest, dict]): + The request object. A request message for + RegionNotificationEndpoints.Insert. See + the method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + region (str): + Name of the region scoping this + request. + + This corresponds to the ``region`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + notification_endpoint_resource (google.cloud.compute_v1.types.NotificationEndpoint): + The body resource for this request + This corresponds to the ``notification_endpoint_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, region, notification_endpoint_resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.InsertRegionNotificationEndpointRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.InsertRegionNotificationEndpointRequest): + request = compute.InsertRegionNotificationEndpointRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if region is not None: + request.region = region + if notification_endpoint_resource is not None: + request.notification_endpoint_resource = notification_endpoint_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.insert] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("region", request.region), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + operation_service = self._transport._region_operations_client + operation_request = compute.GetRegionOperationRequest() + operation_request.project = request.project + operation_request.region = request.region + operation_request.operation = response.name + + get_operation = functools.partial(operation_service.get, operation_request) + # Cancel is not part of extended operations yet. + cancel_operation = lambda: None + + # Note: this class is an implementation detail to provide a uniform + # set of names for certain fields in the extended operation proto message. + # See google.api_core.extended_operation.ExtendedOperation for details + # on these properties and the expected interface. + class _CustomOperation(extended_operation.ExtendedOperation): + @property + def error_message(self): + return self._extended_operation.http_error_message + + @property + def error_code(self): + return self._extended_operation.http_error_status_code + + response = _CustomOperation.make(get_operation, cancel_operation, response) + + # Done; return the response. + return response + + def list(self, + request: Optional[Union[compute.ListRegionNotificationEndpointsRequest, dict]] = None, + *, + project: Optional[str] = None, + region: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListPager: + r"""Lists the NotificationEndpoints for a project in the + given region. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_list(): + # Create a client + client = compute_v1.RegionNotificationEndpointsClient() + + # Initialize request argument(s) + request = compute_v1.ListRegionNotificationEndpointsRequest( + project="project_value", + region="region_value", + ) + + # Make the request + page_result = client.list(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.ListRegionNotificationEndpointsRequest, dict]): + The request object. A request message for + RegionNotificationEndpoints.List. See + the method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + region (str): + Name of the region scoping this + request. + + This corresponds to the ``region`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.compute_v1.services.region_notification_endpoints.pagers.ListPager: + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, region]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.ListRegionNotificationEndpointsRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.ListRegionNotificationEndpointsRequest): + request = compute.ListRegionNotificationEndpointsRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if region is not None: + request.region = region + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("region", request.region), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + def __enter__(self) -> "RegionNotificationEndpointsClient": + return self + + def __exit__(self, type, value, traceback): + """Releases underlying transport's resources. + + .. warning:: + ONLY use as a context manager if the transport is NOT shared + with other clients! Exiting the with block will CLOSE the transport + and may cause errors in other clients! + """ + self.transport.close() + + + + + + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) + + +__all__ = ( + "RegionNotificationEndpointsClient", +) diff --git a/owl-bot-staging/v1/google/cloud/compute_v1/services/region_notification_endpoints/pagers.py b/owl-bot-staging/v1/google/cloud/compute_v1/services/region_notification_endpoints/pagers.py new file mode 100644 index 000000000..5a66e5f0b --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/compute_v1/services/region_notification_endpoints/pagers.py @@ -0,0 +1,77 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import Any, AsyncIterator, Awaitable, Callable, Sequence, Tuple, Optional, Iterator + +from google.cloud.compute_v1.types import compute + + +class ListPager: + """A pager for iterating through ``list`` requests. + + This class thinly wraps an initial + :class:`google.cloud.compute_v1.types.NotificationEndpointList` object, and + provides an ``__iter__`` method to iterate through its + ``items`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``List`` requests and continue to iterate + through the ``items`` field on the + corresponding responses. + + All the usual :class:`google.cloud.compute_v1.types.NotificationEndpointList` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., compute.NotificationEndpointList], + request: compute.ListRegionNotificationEndpointsRequest, + response: compute.NotificationEndpointList, + *, + metadata: Sequence[Tuple[str, str]] = ()): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.compute_v1.types.ListRegionNotificationEndpointsRequest): + The initial request object. + response (google.cloud.compute_v1.types.NotificationEndpointList): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = compute.ListRegionNotificationEndpointsRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[compute.NotificationEndpointList]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[compute.NotificationEndpoint]: + for page in self.pages: + yield from page.items + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) diff --git a/owl-bot-staging/v1/google/cloud/compute_v1/services/region_notification_endpoints/transports/__init__.py b/owl-bot-staging/v1/google/cloud/compute_v1/services/region_notification_endpoints/transports/__init__.py new file mode 100644 index 000000000..913f9de5b --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/compute_v1/services/region_notification_endpoints/transports/__init__.py @@ -0,0 +1,32 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +from typing import Dict, Type + +from .base import RegionNotificationEndpointsTransport +from .rest import RegionNotificationEndpointsRestTransport +from .rest import RegionNotificationEndpointsRestInterceptor + + +# Compile a registry of transports. +_transport_registry = OrderedDict() # type: Dict[str, Type[RegionNotificationEndpointsTransport]] +_transport_registry['rest'] = RegionNotificationEndpointsRestTransport + +__all__ = ( + 'RegionNotificationEndpointsTransport', + 'RegionNotificationEndpointsRestTransport', + 'RegionNotificationEndpointsRestInterceptor', +) diff --git a/owl-bot-staging/v1/google/cloud/compute_v1/services/region_notification_endpoints/transports/base.py b/owl-bot-staging/v1/google/cloud/compute_v1/services/region_notification_endpoints/transports/base.py new file mode 100644 index 000000000..6386166de --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/compute_v1/services/region_notification_endpoints/transports/base.py @@ -0,0 +1,205 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import abc +from typing import Awaitable, Callable, Dict, Optional, Sequence, Union + +from google.cloud.compute_v1 import gapic_version as package_version + +import google.auth # type: ignore +import google.api_core +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.compute_v1.types import compute +from google.cloud.compute_v1.services import region_operations + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) + + +class RegionNotificationEndpointsTransport(abc.ABC): + """Abstract transport class for RegionNotificationEndpoints.""" + + AUTH_SCOPES = ( + 'https://www.googleapis.com/auth/compute', + 'https://www.googleapis.com/auth/cloud-platform', + ) + + DEFAULT_HOST: str = 'compute.googleapis.com' + def __init__( + self, *, + host: str = DEFAULT_HOST, + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + **kwargs, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A list of scopes. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + """ + self._extended_operations_services: Dict[str, Any] = {} + + scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} + + # Save the scopes. + self._scopes = scopes + + # If no credentials are provided, then determine the appropriate + # defaults. + if credentials and credentials_file: + raise core_exceptions.DuplicateCredentialArgs("'credentials_file' and 'credentials' are mutually exclusive") + + if credentials_file is not None: + credentials, _ = google.auth.load_credentials_from_file( + credentials_file, + **scopes_kwargs, + quota_project_id=quota_project_id + ) + elif credentials is None: + credentials, _ = google.auth.default(**scopes_kwargs, quota_project_id=quota_project_id) + # Don't apply audience if the credentials file passed from user. + if hasattr(credentials, "with_gdch_audience"): + credentials = credentials.with_gdch_audience(api_audience if api_audience else host) + + # If the credentials are service account credentials, then always try to use self signed JWT. + if always_use_jwt_access and isinstance(credentials, service_account.Credentials) and hasattr(service_account.Credentials, "with_always_use_jwt_access"): + credentials = credentials.with_always_use_jwt_access(True) + + # Save the credentials. + self._credentials = credentials + + # Save the hostname. Default to port 443 (HTTPS) if none is specified. + if ':' not in host: + host += ':443' + self._host = host + + def _prep_wrapped_messages(self, client_info): + # Precompute the wrapped methods. + self._wrapped_methods = { + self.delete: gapic_v1.method.wrap_method( + self.delete, + default_timeout=None, + client_info=client_info, + ), + self.get: gapic_v1.method.wrap_method( + self.get, + default_timeout=None, + client_info=client_info, + ), + self.insert: gapic_v1.method.wrap_method( + self.insert, + default_timeout=None, + client_info=client_info, + ), + self.list: gapic_v1.method.wrap_method( + self.list, + default_timeout=None, + client_info=client_info, + ), + } + + def close(self): + """Closes resources associated with the transport. + + .. warning:: + Only call this method if the transport is NOT shared + with other clients - this may cause errors in other clients! + """ + raise NotImplementedError() + + @property + def delete(self) -> Callable[ + [compute.DeleteRegionNotificationEndpointRequest], + Union[ + compute.Operation, + Awaitable[compute.Operation] + ]]: + raise NotImplementedError() + + @property + def get(self) -> Callable[ + [compute.GetRegionNotificationEndpointRequest], + Union[ + compute.NotificationEndpoint, + Awaitable[compute.NotificationEndpoint] + ]]: + raise NotImplementedError() + + @property + def insert(self) -> Callable[ + [compute.InsertRegionNotificationEndpointRequest], + Union[ + compute.Operation, + Awaitable[compute.Operation] + ]]: + raise NotImplementedError() + + @property + def list(self) -> Callable[ + [compute.ListRegionNotificationEndpointsRequest], + Union[ + compute.NotificationEndpointList, + Awaitable[compute.NotificationEndpointList] + ]]: + raise NotImplementedError() + + @property + def kind(self) -> str: + raise NotImplementedError() + + @property + def _region_operations_client(self) -> region_operations.RegionOperationsClient: + ex_op_service = self._extended_operations_services.get("region_operations") + if not ex_op_service: + ex_op_service = region_operations.RegionOperationsClient( + credentials=self._credentials, + transport=self.kind, + ) + self._extended_operations_services["region_operations"] = ex_op_service + + return ex_op_service + + +__all__ = ( + 'RegionNotificationEndpointsTransport', +) diff --git a/owl-bot-staging/v1/google/cloud/compute_v1/services/region_notification_endpoints/transports/rest.py b/owl-bot-staging/v1/google/cloud/compute_v1/services/region_notification_endpoints/transports/rest.py new file mode 100644 index 000000000..855dba21b --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/compute_v1/services/region_notification_endpoints/transports/rest.py @@ -0,0 +1,671 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +from google.auth.transport.requests import AuthorizedSession # type: ignore +import json # type: ignore +import grpc # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.api_core import exceptions as core_exceptions +from google.api_core import retry as retries +from google.api_core import rest_helpers +from google.api_core import rest_streaming +from google.api_core import path_template +from google.api_core import gapic_v1 + +from google.protobuf import json_format +from requests import __version__ as requests_version +import dataclasses +import re +from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union +import warnings + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + + +from google.cloud.compute_v1.types import compute + +from .base import RegionNotificationEndpointsTransport, DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, + grpc_version=None, + rest_version=requests_version, +) + + +class RegionNotificationEndpointsRestInterceptor: + """Interceptor for RegionNotificationEndpoints. + + Interceptors are used to manipulate requests, request metadata, and responses + in arbitrary ways. + Example use cases include: + * Logging + * Verifying requests according to service or custom semantics + * Stripping extraneous information from responses + + These use cases and more can be enabled by injecting an + instance of a custom subclass when constructing the RegionNotificationEndpointsRestTransport. + + .. code-block:: python + class MyCustomRegionNotificationEndpointsInterceptor(RegionNotificationEndpointsRestInterceptor): + def pre_delete(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_delete(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_get(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_insert(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_insert(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list(self, response): + logging.log(f"Received response: {response}") + return response + + transport = RegionNotificationEndpointsRestTransport(interceptor=MyCustomRegionNotificationEndpointsInterceptor()) + client = RegionNotificationEndpointsClient(transport=transport) + + + """ + def pre_delete(self, request: compute.DeleteRegionNotificationEndpointRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.DeleteRegionNotificationEndpointRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for delete + + Override in a subclass to manipulate the request or metadata + before they are sent to the RegionNotificationEndpoints server. + """ + return request, metadata + + def post_delete(self, response: compute.Operation) -> compute.Operation: + """Post-rpc interceptor for delete + + Override in a subclass to manipulate the response + after it is returned by the RegionNotificationEndpoints server but before + it is returned to user code. + """ + return response + def pre_get(self, request: compute.GetRegionNotificationEndpointRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.GetRegionNotificationEndpointRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get + + Override in a subclass to manipulate the request or metadata + before they are sent to the RegionNotificationEndpoints server. + """ + return request, metadata + + def post_get(self, response: compute.NotificationEndpoint) -> compute.NotificationEndpoint: + """Post-rpc interceptor for get + + Override in a subclass to manipulate the response + after it is returned by the RegionNotificationEndpoints server but before + it is returned to user code. + """ + return response + def pre_insert(self, request: compute.InsertRegionNotificationEndpointRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.InsertRegionNotificationEndpointRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for insert + + Override in a subclass to manipulate the request or metadata + before they are sent to the RegionNotificationEndpoints server. + """ + return request, metadata + + def post_insert(self, response: compute.Operation) -> compute.Operation: + """Post-rpc interceptor for insert + + Override in a subclass to manipulate the response + after it is returned by the RegionNotificationEndpoints server but before + it is returned to user code. + """ + return response + def pre_list(self, request: compute.ListRegionNotificationEndpointsRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.ListRegionNotificationEndpointsRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list + + Override in a subclass to manipulate the request or metadata + before they are sent to the RegionNotificationEndpoints server. + """ + return request, metadata + + def post_list(self, response: compute.NotificationEndpointList) -> compute.NotificationEndpointList: + """Post-rpc interceptor for list + + Override in a subclass to manipulate the response + after it is returned by the RegionNotificationEndpoints server but before + it is returned to user code. + """ + return response + + +@dataclasses.dataclass +class RegionNotificationEndpointsRestStub: + _session: AuthorizedSession + _host: str + _interceptor: RegionNotificationEndpointsRestInterceptor + + +class RegionNotificationEndpointsRestTransport(RegionNotificationEndpointsTransport): + """REST backend transport for RegionNotificationEndpoints. + + The RegionNotificationEndpoints API. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends JSON representations of protocol buffers over HTTP/1.1 + + NOTE: This REST transport functionality is currently in a beta + state (preview). We welcome your feedback via an issue in this + library's source repository. Thank you! + """ + + def __init__(self, *, + host: str = 'compute.googleapis.com', + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + client_cert_source_for_mtls: Optional[Callable[[ + ], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + url_scheme: str = 'https', + interceptor: Optional[RegionNotificationEndpointsRestInterceptor] = None, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + NOTE: This REST transport functionality is currently in a beta + state (preview). We welcome your feedback via a GitHub issue in + this library's repository. Thank you! + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + client_cert_source_for_mtls (Callable[[], Tuple[bytes, bytes]]): Client + certificate to configure mutual TLS HTTP channel. It is ignored + if ``channel`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you are developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + url_scheme: the protocol scheme for the API endpoint. Normally + "https", but for testing or local servers, + "http" can be specified. + """ + # Run the base constructor + # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. + # TODO: When custom host (api_endpoint) is set, `scopes` must *also* be set on the + # credentials object + maybe_url_match = re.match("^(?Phttp(?:s)?://)?(?P.*)$", host) + if maybe_url_match is None: + raise ValueError(f"Unexpected hostname structure: {host}") # pragma: NO COVER + + url_match_items = maybe_url_match.groupdict() + + host = f"{url_scheme}://{host}" if not url_match_items["scheme"] else host + + super().__init__( + host=host, + credentials=credentials, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience + ) + self._session = AuthorizedSession( + self._credentials, default_host=self.DEFAULT_HOST) + if client_cert_source_for_mtls: + self._session.configure_mtls_channel(client_cert_source_for_mtls) + self._interceptor = interceptor or RegionNotificationEndpointsRestInterceptor() + self._prep_wrapped_messages(client_info) + + class _Delete(RegionNotificationEndpointsRestStub): + def __hash__(self): + return hash("Delete") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.DeleteRegionNotificationEndpointRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.Operation: + r"""Call the delete method over HTTP. + + Args: + request (~.compute.DeleteRegionNotificationEndpointRequest): + The request object. A request message for + RegionNotificationEndpoints.Delete. See + the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.Operation: + Represents an Operation resource. Google Compute Engine + has three Operation resources: \* + `Global `__ + \* + `Regional `__ + \* + `Zonal `__ + You can use an operation resource to manage asynchronous + API requests. For more information, read Handling API + responses. Operations can be global, regional or zonal. + - For global operations, use the ``globalOperations`` + resource. - For regional operations, use the + ``regionOperations`` resource. - For zonal operations, + use the ``zonalOperations`` resource. For more + information, read Global, Regional, and Zonal Resources. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'delete', + 'uri': '/compute/v1/projects/{project}/regions/{region}/notificationEndpoints/{notification_endpoint}', + }, + ] + request, metadata = self._interceptor.pre_delete(request, metadata) + pb_request = compute.DeleteRegionNotificationEndpointRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.Operation() + pb_resp = compute.Operation.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_delete(resp) + return resp + + class _Get(RegionNotificationEndpointsRestStub): + def __hash__(self): + return hash("Get") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.GetRegionNotificationEndpointRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.NotificationEndpoint: + r"""Call the get method over HTTP. + + Args: + request (~.compute.GetRegionNotificationEndpointRequest): + The request object. A request message for + RegionNotificationEndpoints.Get. See the + method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.NotificationEndpoint: + Represents a notification endpoint. A + notification endpoint resource defines + an endpoint to receive notifications + when there are status changes detected + by the associated health check service. + For more information, see Health checks + overview. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/compute/v1/projects/{project}/regions/{region}/notificationEndpoints/{notification_endpoint}', + }, + ] + request, metadata = self._interceptor.pre_get(request, metadata) + pb_request = compute.GetRegionNotificationEndpointRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.NotificationEndpoint() + pb_resp = compute.NotificationEndpoint.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get(resp) + return resp + + class _Insert(RegionNotificationEndpointsRestStub): + def __hash__(self): + return hash("Insert") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.InsertRegionNotificationEndpointRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.Operation: + r"""Call the insert method over HTTP. + + Args: + request (~.compute.InsertRegionNotificationEndpointRequest): + The request object. A request message for + RegionNotificationEndpoints.Insert. See + the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.Operation: + Represents an Operation resource. Google Compute Engine + has three Operation resources: \* + `Global `__ + \* + `Regional `__ + \* + `Zonal `__ + You can use an operation resource to manage asynchronous + API requests. For more information, read Handling API + responses. Operations can be global, regional or zonal. + - For global operations, use the ``globalOperations`` + resource. - For regional operations, use the + ``regionOperations`` resource. - For zonal operations, + use the ``zonalOperations`` resource. For more + information, read Global, Regional, and Zonal Resources. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/compute/v1/projects/{project}/regions/{region}/notificationEndpoints', + 'body': 'notification_endpoint_resource', + }, + ] + request, metadata = self._interceptor.pre_insert(request, metadata) + pb_request = compute.InsertRegionNotificationEndpointRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + including_default_value_fields=False, + use_integers_for_enums=False + ) + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.Operation() + pb_resp = compute.Operation.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_insert(resp) + return resp + + class _List(RegionNotificationEndpointsRestStub): + def __hash__(self): + return hash("List") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.ListRegionNotificationEndpointsRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.NotificationEndpointList: + r"""Call the list method over HTTP. + + Args: + request (~.compute.ListRegionNotificationEndpointsRequest): + The request object. A request message for + RegionNotificationEndpoints.List. See + the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.NotificationEndpointList: + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/compute/v1/projects/{project}/regions/{region}/notificationEndpoints', + }, + ] + request, metadata = self._interceptor.pre_list(request, metadata) + pb_request = compute.ListRegionNotificationEndpointsRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.NotificationEndpointList() + pb_resp = compute.NotificationEndpointList.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_list(resp) + return resp + + @property + def delete(self) -> Callable[ + [compute.DeleteRegionNotificationEndpointRequest], + compute.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._Delete(self._session, self._host, self._interceptor) # type: ignore + + @property + def get(self) -> Callable[ + [compute.GetRegionNotificationEndpointRequest], + compute.NotificationEndpoint]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._Get(self._session, self._host, self._interceptor) # type: ignore + + @property + def insert(self) -> Callable[ + [compute.InsertRegionNotificationEndpointRequest], + compute.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._Insert(self._session, self._host, self._interceptor) # type: ignore + + @property + def list(self) -> Callable[ + [compute.ListRegionNotificationEndpointsRequest], + compute.NotificationEndpointList]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._List(self._session, self._host, self._interceptor) # type: ignore + + @property + def kind(self) -> str: + return "rest" + + def close(self): + self._session.close() + + +__all__=( + 'RegionNotificationEndpointsRestTransport', +) diff --git a/owl-bot-staging/v1/google/cloud/compute_v1/services/region_operations/__init__.py b/owl-bot-staging/v1/google/cloud/compute_v1/services/region_operations/__init__.py new file mode 100644 index 000000000..7353f4da8 --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/compute_v1/services/region_operations/__init__.py @@ -0,0 +1,20 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from .client import RegionOperationsClient + +__all__ = ( + 'RegionOperationsClient', +) diff --git a/owl-bot-staging/v1/google/cloud/compute_v1/services/region_operations/client.py b/owl-bot-staging/v1/google/cloud/compute_v1/services/region_operations/client.py new file mode 100644 index 000000000..2181142bc --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/compute_v1/services/region_operations/client.py @@ -0,0 +1,921 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import os +import re +from typing import Dict, Mapping, MutableMapping, MutableSequence, Optional, Sequence, Tuple, Type, Union, cast + +from google.cloud.compute_v1 import gapic_version as package_version + +from google.api_core import client_options as client_options_lib +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport import mtls # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth.exceptions import MutualTLSChannelError # type: ignore +from google.oauth2 import service_account # type: ignore + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + +from google.cloud.compute_v1.services.region_operations import pagers +from google.cloud.compute_v1.types import compute +from .transports.base import RegionOperationsTransport, DEFAULT_CLIENT_INFO +from .transports.rest import RegionOperationsRestTransport + + +class RegionOperationsClientMeta(type): + """Metaclass for the RegionOperations client. + + This provides class-level methods for building and retrieving + support objects (e.g. transport) without polluting the client instance + objects. + """ + _transport_registry = OrderedDict() # type: Dict[str, Type[RegionOperationsTransport]] + _transport_registry["rest"] = RegionOperationsRestTransport + + def get_transport_class(cls, + label: Optional[str] = None, + ) -> Type[RegionOperationsTransport]: + """Returns an appropriate transport class. + + Args: + label: The name of the desired transport. If none is + provided, then the first transport in the registry is used. + + Returns: + The transport class to use. + """ + # If a specific transport is requested, return that one. + if label: + return cls._transport_registry[label] + + # No transport is requested; return the default (that is, the first one + # in the dictionary). + return next(iter(cls._transport_registry.values())) + + +class RegionOperationsClient(metaclass=RegionOperationsClientMeta): + """The RegionOperations API.""" + + @staticmethod + def _get_default_mtls_endpoint(api_endpoint): + """Converts api endpoint to mTLS endpoint. + + Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to + "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. + Args: + api_endpoint (Optional[str]): the api endpoint to convert. + Returns: + str: converted mTLS api endpoint. + """ + if not api_endpoint: + return api_endpoint + + mtls_endpoint_re = re.compile( + r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?" + ) + + m = mtls_endpoint_re.match(api_endpoint) + name, mtls, sandbox, googledomain = m.groups() + if mtls or not googledomain: + return api_endpoint + + if sandbox: + return api_endpoint.replace( + "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" + ) + + return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") + + DEFAULT_ENDPOINT = "compute.googleapis.com" + DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore + DEFAULT_ENDPOINT + ) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + RegionOperationsClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_info(info) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + RegionOperationsClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_file( + filename) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + from_service_account_json = from_service_account_file + + @property + def transport(self) -> RegionOperationsTransport: + """Returns the transport used by the client instance. + + Returns: + RegionOperationsTransport: The transport used by the client + instance. + """ + return self._transport + + @staticmethod + def common_billing_account_path(billing_account: str, ) -> str: + """Returns a fully-qualified billing_account string.""" + return "billingAccounts/{billing_account}".format(billing_account=billing_account, ) + + @staticmethod + def parse_common_billing_account_path(path: str) -> Dict[str,str]: + """Parse a billing_account path into its component segments.""" + m = re.match(r"^billingAccounts/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_folder_path(folder: str, ) -> str: + """Returns a fully-qualified folder string.""" + return "folders/{folder}".format(folder=folder, ) + + @staticmethod + def parse_common_folder_path(path: str) -> Dict[str,str]: + """Parse a folder path into its component segments.""" + m = re.match(r"^folders/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_organization_path(organization: str, ) -> str: + """Returns a fully-qualified organization string.""" + return "organizations/{organization}".format(organization=organization, ) + + @staticmethod + def parse_common_organization_path(path: str) -> Dict[str,str]: + """Parse a organization path into its component segments.""" + m = re.match(r"^organizations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_project_path(project: str, ) -> str: + """Returns a fully-qualified project string.""" + return "projects/{project}".format(project=project, ) + + @staticmethod + def parse_common_project_path(path: str) -> Dict[str,str]: + """Parse a project path into its component segments.""" + m = re.match(r"^projects/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_location_path(project: str, location: str, ) -> str: + """Returns a fully-qualified location string.""" + return "projects/{project}/locations/{location}".format(project=project, location=location, ) + + @staticmethod + def parse_common_location_path(path: str) -> Dict[str,str]: + """Parse a location path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @classmethod + def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[client_options_lib.ClientOptions] = None): + """Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + if client_options is None: + client_options = client_options_lib.ClientOptions() + use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") + use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") + if use_client_cert not in ("true", "false"): + raise ValueError("Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`") + if use_mtls_endpoint not in ("auto", "never", "always"): + raise MutualTLSChannelError("Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`") + + # Figure out the client cert source to use. + client_cert_source = None + if use_client_cert == "true": + if client_options.client_cert_source: + client_cert_source = client_options.client_cert_source + elif mtls.has_default_client_cert_source(): + client_cert_source = mtls.default_client_cert_source() + + # Figure out which api endpoint to use. + if client_options.api_endpoint is not None: + api_endpoint = client_options.api_endpoint + elif use_mtls_endpoint == "always" or (use_mtls_endpoint == "auto" and client_cert_source): + api_endpoint = cls.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = cls.DEFAULT_ENDPOINT + + return api_endpoint, client_cert_source + + def __init__(self, *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Optional[Union[str, RegionOperationsTransport]] = None, + client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the region operations client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Union[str, RegionOperationsTransport]): The + transport to use. If set to None, a transport is chosen + automatically. + NOTE: "rest" transport functionality is currently in a + beta state (preview). We welcome your feedback via an + issue in this library's source repository. + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): Custom options for the + client. It won't take effect if a ``transport`` instance is provided. + (1) The ``api_endpoint`` property can be used to override the + default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT + environment variable can also be used to override the endpoint: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto switch to the + default mTLS endpoint if client certificate is present, this is + the default value). However, the ``api_endpoint`` property takes + precedence if provided. + (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide client certificate for mutual TLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + """ + if isinstance(client_options, dict): + client_options = client_options_lib.from_dict(client_options) + if client_options is None: + client_options = client_options_lib.ClientOptions() + client_options = cast(client_options_lib.ClientOptions, client_options) + + api_endpoint, client_cert_source_func = self.get_mtls_endpoint_and_cert_source(client_options) + + api_key_value = getattr(client_options, "api_key", None) + if api_key_value and credentials: + raise ValueError("client_options.api_key and credentials are mutually exclusive") + + # Save or instantiate the transport. + # Ordinarily, we provide the transport, but allowing a custom transport + # instance provides an extensibility point for unusual situations. + if isinstance(transport, RegionOperationsTransport): + # transport is a RegionOperationsTransport instance. + if credentials or client_options.credentials_file or api_key_value: + raise ValueError("When providing a transport instance, " + "provide its credentials directly.") + if client_options.scopes: + raise ValueError( + "When providing a transport instance, provide its scopes " + "directly." + ) + self._transport = transport + else: + import google.auth._default # type: ignore + + if api_key_value and hasattr(google.auth._default, "get_api_key_credentials"): + credentials = google.auth._default.get_api_key_credentials(api_key_value) + + Transport = type(self).get_transport_class(transport) + self._transport = Transport( + credentials=credentials, + credentials_file=client_options.credentials_file, + host=api_endpoint, + scopes=client_options.scopes, + client_cert_source_for_mtls=client_cert_source_func, + quota_project_id=client_options.quota_project_id, + client_info=client_info, + always_use_jwt_access=True, + api_audience=client_options.api_audience, + ) + + def delete(self, + request: Optional[Union[compute.DeleteRegionOperationRequest, dict]] = None, + *, + project: Optional[str] = None, + region: Optional[str] = None, + operation: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.DeleteRegionOperationResponse: + r"""Deletes the specified region-specific Operations + resource. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_delete(): + # Create a client + client = compute_v1.RegionOperationsClient() + + # Initialize request argument(s) + request = compute_v1.DeleteRegionOperationRequest( + operation="operation_value", + project="project_value", + region="region_value", + ) + + # Make the request + response = client.delete(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.DeleteRegionOperationRequest, dict]): + The request object. A request message for + RegionOperations.Delete. See the method + description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + region (str): + Name of the region for this request. + This corresponds to the ``region`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + operation (str): + Name of the Operations resource to + delete. + + This corresponds to the ``operation`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.compute_v1.types.DeleteRegionOperationResponse: + A response message for + RegionOperations.Delete. See the method + description for details. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, region, operation]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.DeleteRegionOperationRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.DeleteRegionOperationRequest): + request = compute.DeleteRegionOperationRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if region is not None: + request.region = region + if operation is not None: + request.operation = operation + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("region", request.region), + ("operation", request.operation), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get(self, + request: Optional[Union[compute.GetRegionOperationRequest, dict]] = None, + *, + project: Optional[str] = None, + region: Optional[str] = None, + operation: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.Operation: + r"""Retrieves the specified region-specific Operations + resource. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_get(): + # Create a client + client = compute_v1.RegionOperationsClient() + + # Initialize request argument(s) + request = compute_v1.GetRegionOperationRequest( + operation="operation_value", + project="project_value", + region="region_value", + ) + + # Make the request + response = client.get(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.GetRegionOperationRequest, dict]): + The request object. A request message for + RegionOperations.Get. See the method + description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + region (str): + Name of the region for this request. + This corresponds to the ``region`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + operation (str): + Name of the Operations resource to + return. + + This corresponds to the ``operation`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.compute_v1.types.Operation: + Represents an Operation resource. Google Compute Engine + has three Operation resources: \* + [Global](/compute/docs/reference/rest/v1/globalOperations) + \* + [Regional](/compute/docs/reference/rest/v1/regionOperations) + \* + [Zonal](/compute/docs/reference/rest/v1/zoneOperations) + You can use an operation resource to manage asynchronous + API requests. For more information, read Handling API + responses. Operations can be global, regional or zonal. + - For global operations, use the globalOperations + resource. - For regional operations, use the + regionOperations resource. - For zonal operations, use + the zonalOperations resource. For more information, read + Global, Regional, and Zonal Resources. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, region, operation]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.GetRegionOperationRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.GetRegionOperationRequest): + request = compute.GetRegionOperationRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if region is not None: + request.region = region + if operation is not None: + request.operation = operation + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("region", request.region), + ("operation", request.operation), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def list(self, + request: Optional[Union[compute.ListRegionOperationsRequest, dict]] = None, + *, + project: Optional[str] = None, + region: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListPager: + r"""Retrieves a list of Operation resources contained + within the specified region. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_list(): + # Create a client + client = compute_v1.RegionOperationsClient() + + # Initialize request argument(s) + request = compute_v1.ListRegionOperationsRequest( + project="project_value", + region="region_value", + ) + + # Make the request + page_result = client.list(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.ListRegionOperationsRequest, dict]): + The request object. A request message for + RegionOperations.List. See the method + description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + region (str): + Name of the region for this request. + This corresponds to the ``region`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.compute_v1.services.region_operations.pagers.ListPager: + Contains a list of Operation + resources. + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, region]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.ListRegionOperationsRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.ListRegionOperationsRequest): + request = compute.ListRegionOperationsRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if region is not None: + request.region = region + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("region", request.region), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + def wait(self, + request: Optional[Union[compute.WaitRegionOperationRequest, dict]] = None, + *, + project: Optional[str] = None, + region: Optional[str] = None, + operation: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.Operation: + r"""Waits for the specified Operation resource to return as ``DONE`` + or for the request to approach the 2 minute deadline, and + retrieves the specified Operation resource. This method differs + from the ``GET`` method in that it waits for no more than the + default deadline (2 minutes) and then returns the current state + of the operation, which might be ``DONE`` or still in progress. + This method is called on a best-effort basis. Specifically: - In + uncommon cases, when the server is overloaded, the request might + return before the default deadline is reached, or might return + after zero seconds. - If the default deadline is reached, there + is no guarantee that the operation is actually done when the + method returns. Be prepared to retry if the operation is not + ``DONE``. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_wait(): + # Create a client + client = compute_v1.RegionOperationsClient() + + # Initialize request argument(s) + request = compute_v1.WaitRegionOperationRequest( + operation="operation_value", + project="project_value", + region="region_value", + ) + + # Make the request + response = client.wait(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.WaitRegionOperationRequest, dict]): + The request object. A request message for + RegionOperations.Wait. See the method + description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + region (str): + Name of the region for this request. + This corresponds to the ``region`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + operation (str): + Name of the Operations resource to + return. + + This corresponds to the ``operation`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.compute_v1.types.Operation: + Represents an Operation resource. Google Compute Engine + has three Operation resources: \* + [Global](/compute/docs/reference/rest/v1/globalOperations) + \* + [Regional](/compute/docs/reference/rest/v1/regionOperations) + \* + [Zonal](/compute/docs/reference/rest/v1/zoneOperations) + You can use an operation resource to manage asynchronous + API requests. For more information, read Handling API + responses. Operations can be global, regional or zonal. + - For global operations, use the globalOperations + resource. - For regional operations, use the + regionOperations resource. - For zonal operations, use + the zonalOperations resource. For more information, read + Global, Regional, and Zonal Resources. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, region, operation]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.WaitRegionOperationRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.WaitRegionOperationRequest): + request = compute.WaitRegionOperationRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if region is not None: + request.region = region + if operation is not None: + request.operation = operation + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.wait] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("region", request.region), + ("operation", request.operation), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def __enter__(self) -> "RegionOperationsClient": + return self + + def __exit__(self, type, value, traceback): + """Releases underlying transport's resources. + + .. warning:: + ONLY use as a context manager if the transport is NOT shared + with other clients! Exiting the with block will CLOSE the transport + and may cause errors in other clients! + """ + self.transport.close() + + + + + + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) + + +__all__ = ( + "RegionOperationsClient", +) diff --git a/owl-bot-staging/v1/google/cloud/compute_v1/services/region_operations/pagers.py b/owl-bot-staging/v1/google/cloud/compute_v1/services/region_operations/pagers.py new file mode 100644 index 000000000..4c8558a29 --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/compute_v1/services/region_operations/pagers.py @@ -0,0 +1,77 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import Any, AsyncIterator, Awaitable, Callable, Sequence, Tuple, Optional, Iterator + +from google.cloud.compute_v1.types import compute + + +class ListPager: + """A pager for iterating through ``list`` requests. + + This class thinly wraps an initial + :class:`google.cloud.compute_v1.types.OperationList` object, and + provides an ``__iter__`` method to iterate through its + ``items`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``List`` requests and continue to iterate + through the ``items`` field on the + corresponding responses. + + All the usual :class:`google.cloud.compute_v1.types.OperationList` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., compute.OperationList], + request: compute.ListRegionOperationsRequest, + response: compute.OperationList, + *, + metadata: Sequence[Tuple[str, str]] = ()): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.compute_v1.types.ListRegionOperationsRequest): + The initial request object. + response (google.cloud.compute_v1.types.OperationList): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = compute.ListRegionOperationsRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[compute.OperationList]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[compute.Operation]: + for page in self.pages: + yield from page.items + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) diff --git a/owl-bot-staging/v1/google/cloud/compute_v1/services/region_operations/transports/__init__.py b/owl-bot-staging/v1/google/cloud/compute_v1/services/region_operations/transports/__init__.py new file mode 100644 index 000000000..efcd3c076 --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/compute_v1/services/region_operations/transports/__init__.py @@ -0,0 +1,32 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +from typing import Dict, Type + +from .base import RegionOperationsTransport +from .rest import RegionOperationsRestTransport +from .rest import RegionOperationsRestInterceptor + + +# Compile a registry of transports. +_transport_registry = OrderedDict() # type: Dict[str, Type[RegionOperationsTransport]] +_transport_registry['rest'] = RegionOperationsRestTransport + +__all__ = ( + 'RegionOperationsTransport', + 'RegionOperationsRestTransport', + 'RegionOperationsRestInterceptor', +) diff --git a/owl-bot-staging/v1/google/cloud/compute_v1/services/region_operations/transports/base.py b/owl-bot-staging/v1/google/cloud/compute_v1/services/region_operations/transports/base.py new file mode 100644 index 000000000..c4d22a44c --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/compute_v1/services/region_operations/transports/base.py @@ -0,0 +1,191 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import abc +from typing import Awaitable, Callable, Dict, Optional, Sequence, Union + +from google.cloud.compute_v1 import gapic_version as package_version + +import google.auth # type: ignore +import google.api_core +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.compute_v1.types import compute + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) + + +class RegionOperationsTransport(abc.ABC): + """Abstract transport class for RegionOperations.""" + + AUTH_SCOPES = ( + 'https://www.googleapis.com/auth/compute', + 'https://www.googleapis.com/auth/cloud-platform', + ) + + DEFAULT_HOST: str = 'compute.googleapis.com' + def __init__( + self, *, + host: str = DEFAULT_HOST, + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + **kwargs, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A list of scopes. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + """ + + scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} + + # Save the scopes. + self._scopes = scopes + + # If no credentials are provided, then determine the appropriate + # defaults. + if credentials and credentials_file: + raise core_exceptions.DuplicateCredentialArgs("'credentials_file' and 'credentials' are mutually exclusive") + + if credentials_file is not None: + credentials, _ = google.auth.load_credentials_from_file( + credentials_file, + **scopes_kwargs, + quota_project_id=quota_project_id + ) + elif credentials is None: + credentials, _ = google.auth.default(**scopes_kwargs, quota_project_id=quota_project_id) + # Don't apply audience if the credentials file passed from user. + if hasattr(credentials, "with_gdch_audience"): + credentials = credentials.with_gdch_audience(api_audience if api_audience else host) + + # If the credentials are service account credentials, then always try to use self signed JWT. + if always_use_jwt_access and isinstance(credentials, service_account.Credentials) and hasattr(service_account.Credentials, "with_always_use_jwt_access"): + credentials = credentials.with_always_use_jwt_access(True) + + # Save the credentials. + self._credentials = credentials + + # Save the hostname. Default to port 443 (HTTPS) if none is specified. + if ':' not in host: + host += ':443' + self._host = host + + def _prep_wrapped_messages(self, client_info): + # Precompute the wrapped methods. + self._wrapped_methods = { + self.delete: gapic_v1.method.wrap_method( + self.delete, + default_timeout=None, + client_info=client_info, + ), + self.get: gapic_v1.method.wrap_method( + self.get, + default_timeout=None, + client_info=client_info, + ), + self.list: gapic_v1.method.wrap_method( + self.list, + default_timeout=None, + client_info=client_info, + ), + self.wait: gapic_v1.method.wrap_method( + self.wait, + default_timeout=None, + client_info=client_info, + ), + } + + def close(self): + """Closes resources associated with the transport. + + .. warning:: + Only call this method if the transport is NOT shared + with other clients - this may cause errors in other clients! + """ + raise NotImplementedError() + + @property + def delete(self) -> Callable[ + [compute.DeleteRegionOperationRequest], + Union[ + compute.DeleteRegionOperationResponse, + Awaitable[compute.DeleteRegionOperationResponse] + ]]: + raise NotImplementedError() + + @property + def get(self) -> Callable[ + [compute.GetRegionOperationRequest], + Union[ + compute.Operation, + Awaitable[compute.Operation] + ]]: + raise NotImplementedError() + + @property + def list(self) -> Callable[ + [compute.ListRegionOperationsRequest], + Union[ + compute.OperationList, + Awaitable[compute.OperationList] + ]]: + raise NotImplementedError() + + @property + def wait(self) -> Callable[ + [compute.WaitRegionOperationRequest], + Union[ + compute.Operation, + Awaitable[compute.Operation] + ]]: + raise NotImplementedError() + + @property + def kind(self) -> str: + raise NotImplementedError() + + +__all__ = ( + 'RegionOperationsTransport', +) diff --git a/owl-bot-staging/v1/google/cloud/compute_v1/services/region_operations/transports/rest.py b/owl-bot-staging/v1/google/cloud/compute_v1/services/region_operations/transports/rest.py new file mode 100644 index 000000000..0903488fc --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/compute_v1/services/region_operations/transports/rest.py @@ -0,0 +1,660 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +from google.auth.transport.requests import AuthorizedSession # type: ignore +import json # type: ignore +import grpc # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.api_core import exceptions as core_exceptions +from google.api_core import retry as retries +from google.api_core import rest_helpers +from google.api_core import rest_streaming +from google.api_core import path_template +from google.api_core import gapic_v1 + +from google.protobuf import json_format +from requests import __version__ as requests_version +import dataclasses +import re +from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union +import warnings + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + + +from google.cloud.compute_v1.types import compute + +from .base import RegionOperationsTransport, DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, + grpc_version=None, + rest_version=requests_version, +) + + +class RegionOperationsRestInterceptor: + """Interceptor for RegionOperations. + + Interceptors are used to manipulate requests, request metadata, and responses + in arbitrary ways. + Example use cases include: + * Logging + * Verifying requests according to service or custom semantics + * Stripping extraneous information from responses + + These use cases and more can be enabled by injecting an + instance of a custom subclass when constructing the RegionOperationsRestTransport. + + .. code-block:: python + class MyCustomRegionOperationsInterceptor(RegionOperationsRestInterceptor): + def pre_delete(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_delete(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_get(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_wait(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_wait(self, response): + logging.log(f"Received response: {response}") + return response + + transport = RegionOperationsRestTransport(interceptor=MyCustomRegionOperationsInterceptor()) + client = RegionOperationsClient(transport=transport) + + + """ + def pre_delete(self, request: compute.DeleteRegionOperationRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.DeleteRegionOperationRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for delete + + Override in a subclass to manipulate the request or metadata + before they are sent to the RegionOperations server. + """ + return request, metadata + + def post_delete(self, response: compute.DeleteRegionOperationResponse) -> compute.DeleteRegionOperationResponse: + """Post-rpc interceptor for delete + + Override in a subclass to manipulate the response + after it is returned by the RegionOperations server but before + it is returned to user code. + """ + return response + def pre_get(self, request: compute.GetRegionOperationRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.GetRegionOperationRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get + + Override in a subclass to manipulate the request or metadata + before they are sent to the RegionOperations server. + """ + return request, metadata + + def post_get(self, response: compute.Operation) -> compute.Operation: + """Post-rpc interceptor for get + + Override in a subclass to manipulate the response + after it is returned by the RegionOperations server but before + it is returned to user code. + """ + return response + def pre_list(self, request: compute.ListRegionOperationsRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.ListRegionOperationsRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list + + Override in a subclass to manipulate the request or metadata + before they are sent to the RegionOperations server. + """ + return request, metadata + + def post_list(self, response: compute.OperationList) -> compute.OperationList: + """Post-rpc interceptor for list + + Override in a subclass to manipulate the response + after it is returned by the RegionOperations server but before + it is returned to user code. + """ + return response + def pre_wait(self, request: compute.WaitRegionOperationRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.WaitRegionOperationRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for wait + + Override in a subclass to manipulate the request or metadata + before they are sent to the RegionOperations server. + """ + return request, metadata + + def post_wait(self, response: compute.Operation) -> compute.Operation: + """Post-rpc interceptor for wait + + Override in a subclass to manipulate the response + after it is returned by the RegionOperations server but before + it is returned to user code. + """ + return response + + +@dataclasses.dataclass +class RegionOperationsRestStub: + _session: AuthorizedSession + _host: str + _interceptor: RegionOperationsRestInterceptor + + +class RegionOperationsRestTransport(RegionOperationsTransport): + """REST backend transport for RegionOperations. + + The RegionOperations API. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends JSON representations of protocol buffers over HTTP/1.1 + + NOTE: This REST transport functionality is currently in a beta + state (preview). We welcome your feedback via an issue in this + library's source repository. Thank you! + """ + + def __init__(self, *, + host: str = 'compute.googleapis.com', + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + client_cert_source_for_mtls: Optional[Callable[[ + ], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + url_scheme: str = 'https', + interceptor: Optional[RegionOperationsRestInterceptor] = None, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + NOTE: This REST transport functionality is currently in a beta + state (preview). We welcome your feedback via a GitHub issue in + this library's repository. Thank you! + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + client_cert_source_for_mtls (Callable[[], Tuple[bytes, bytes]]): Client + certificate to configure mutual TLS HTTP channel. It is ignored + if ``channel`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you are developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + url_scheme: the protocol scheme for the API endpoint. Normally + "https", but for testing or local servers, + "http" can be specified. + """ + # Run the base constructor + # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. + # TODO: When custom host (api_endpoint) is set, `scopes` must *also* be set on the + # credentials object + maybe_url_match = re.match("^(?Phttp(?:s)?://)?(?P.*)$", host) + if maybe_url_match is None: + raise ValueError(f"Unexpected hostname structure: {host}") # pragma: NO COVER + + url_match_items = maybe_url_match.groupdict() + + host = f"{url_scheme}://{host}" if not url_match_items["scheme"] else host + + super().__init__( + host=host, + credentials=credentials, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience + ) + self._session = AuthorizedSession( + self._credentials, default_host=self.DEFAULT_HOST) + if client_cert_source_for_mtls: + self._session.configure_mtls_channel(client_cert_source_for_mtls) + self._interceptor = interceptor or RegionOperationsRestInterceptor() + self._prep_wrapped_messages(client_info) + + class _Delete(RegionOperationsRestStub): + def __hash__(self): + return hash("Delete") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.DeleteRegionOperationRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.DeleteRegionOperationResponse: + r"""Call the delete method over HTTP. + + Args: + request (~.compute.DeleteRegionOperationRequest): + The request object. A request message for + RegionOperations.Delete. See the method + description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.DeleteRegionOperationResponse: + A response message for + RegionOperations.Delete. See the method + description for details. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'delete', + 'uri': '/compute/v1/projects/{project}/regions/{region}/operations/{operation}', + }, + ] + request, metadata = self._interceptor.pre_delete(request, metadata) + pb_request = compute.DeleteRegionOperationRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.DeleteRegionOperationResponse() + pb_resp = compute.DeleteRegionOperationResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_delete(resp) + return resp + + class _Get(RegionOperationsRestStub): + def __hash__(self): + return hash("Get") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.GetRegionOperationRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.Operation: + r"""Call the get method over HTTP. + + Args: + request (~.compute.GetRegionOperationRequest): + The request object. A request message for + RegionOperations.Get. See the method + description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.Operation: + Represents an Operation resource. Google Compute Engine + has three Operation resources: \* + `Global `__ + \* + `Regional `__ + \* + `Zonal `__ + You can use an operation resource to manage asynchronous + API requests. For more information, read Handling API + responses. Operations can be global, regional or zonal. + - For global operations, use the ``globalOperations`` + resource. - For regional operations, use the + ``regionOperations`` resource. - For zonal operations, + use the ``zonalOperations`` resource. For more + information, read Global, Regional, and Zonal Resources. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/compute/v1/projects/{project}/regions/{region}/operations/{operation}', + }, + ] + request, metadata = self._interceptor.pre_get(request, metadata) + pb_request = compute.GetRegionOperationRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.Operation() + pb_resp = compute.Operation.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get(resp) + return resp + + class _List(RegionOperationsRestStub): + def __hash__(self): + return hash("List") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.ListRegionOperationsRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.OperationList: + r"""Call the list method over HTTP. + + Args: + request (~.compute.ListRegionOperationsRequest): + The request object. A request message for + RegionOperations.List. See the method + description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.OperationList: + Contains a list of Operation + resources. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/compute/v1/projects/{project}/regions/{region}/operations', + }, + ] + request, metadata = self._interceptor.pre_list(request, metadata) + pb_request = compute.ListRegionOperationsRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.OperationList() + pb_resp = compute.OperationList.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_list(resp) + return resp + + class _Wait(RegionOperationsRestStub): + def __hash__(self): + return hash("Wait") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.WaitRegionOperationRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.Operation: + r"""Call the wait method over HTTP. + + Args: + request (~.compute.WaitRegionOperationRequest): + The request object. A request message for + RegionOperations.Wait. See the method + description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.Operation: + Represents an Operation resource. Google Compute Engine + has three Operation resources: \* + `Global `__ + \* + `Regional `__ + \* + `Zonal `__ + You can use an operation resource to manage asynchronous + API requests. For more information, read Handling API + responses. Operations can be global, regional or zonal. + - For global operations, use the ``globalOperations`` + resource. - For regional operations, use the + ``regionOperations`` resource. - For zonal operations, + use the ``zonalOperations`` resource. For more + information, read Global, Regional, and Zonal Resources. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/compute/v1/projects/{project}/regions/{region}/operations/{operation}/wait', + }, + ] + request, metadata = self._interceptor.pre_wait(request, metadata) + pb_request = compute.WaitRegionOperationRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.Operation() + pb_resp = compute.Operation.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_wait(resp) + return resp + + @property + def delete(self) -> Callable[ + [compute.DeleteRegionOperationRequest], + compute.DeleteRegionOperationResponse]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._Delete(self._session, self._host, self._interceptor) # type: ignore + + @property + def get(self) -> Callable[ + [compute.GetRegionOperationRequest], + compute.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._Get(self._session, self._host, self._interceptor) # type: ignore + + @property + def list(self) -> Callable[ + [compute.ListRegionOperationsRequest], + compute.OperationList]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._List(self._session, self._host, self._interceptor) # type: ignore + + @property + def wait(self) -> Callable[ + [compute.WaitRegionOperationRequest], + compute.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._Wait(self._session, self._host, self._interceptor) # type: ignore + + @property + def kind(self) -> str: + return "rest" + + def close(self): + self._session.close() + + +__all__=( + 'RegionOperationsRestTransport', +) diff --git a/owl-bot-staging/v1/google/cloud/compute_v1/services/region_security_policies/__init__.py b/owl-bot-staging/v1/google/cloud/compute_v1/services/region_security_policies/__init__.py new file mode 100644 index 000000000..4cc0fddc5 --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/compute_v1/services/region_security_policies/__init__.py @@ -0,0 +1,20 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from .client import RegionSecurityPoliciesClient + +__all__ = ( + 'RegionSecurityPoliciesClient', +) diff --git a/owl-bot-staging/v1/google/cloud/compute_v1/services/region_security_policies/client.py b/owl-bot-staging/v1/google/cloud/compute_v1/services/region_security_policies/client.py new file mode 100644 index 000000000..aca1fcbe1 --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/compute_v1/services/region_security_policies/client.py @@ -0,0 +1,1483 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import functools +import os +import re +from typing import Dict, Mapping, MutableMapping, MutableSequence, Optional, Sequence, Tuple, Type, Union, cast + +from google.cloud.compute_v1 import gapic_version as package_version + +from google.api_core import client_options as client_options_lib +from google.api_core import exceptions as core_exceptions +from google.api_core import extended_operation +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport import mtls # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth.exceptions import MutualTLSChannelError # type: ignore +from google.oauth2 import service_account # type: ignore + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + +from google.api_core import extended_operation # type: ignore +from google.cloud.compute_v1.services.region_security_policies import pagers +from google.cloud.compute_v1.types import compute +from .transports.base import RegionSecurityPoliciesTransport, DEFAULT_CLIENT_INFO +from .transports.rest import RegionSecurityPoliciesRestTransport + + +class RegionSecurityPoliciesClientMeta(type): + """Metaclass for the RegionSecurityPolicies client. + + This provides class-level methods for building and retrieving + support objects (e.g. transport) without polluting the client instance + objects. + """ + _transport_registry = OrderedDict() # type: Dict[str, Type[RegionSecurityPoliciesTransport]] + _transport_registry["rest"] = RegionSecurityPoliciesRestTransport + + def get_transport_class(cls, + label: Optional[str] = None, + ) -> Type[RegionSecurityPoliciesTransport]: + """Returns an appropriate transport class. + + Args: + label: The name of the desired transport. If none is + provided, then the first transport in the registry is used. + + Returns: + The transport class to use. + """ + # If a specific transport is requested, return that one. + if label: + return cls._transport_registry[label] + + # No transport is requested; return the default (that is, the first one + # in the dictionary). + return next(iter(cls._transport_registry.values())) + + +class RegionSecurityPoliciesClient(metaclass=RegionSecurityPoliciesClientMeta): + """The RegionSecurityPolicies API.""" + + @staticmethod + def _get_default_mtls_endpoint(api_endpoint): + """Converts api endpoint to mTLS endpoint. + + Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to + "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. + Args: + api_endpoint (Optional[str]): the api endpoint to convert. + Returns: + str: converted mTLS api endpoint. + """ + if not api_endpoint: + return api_endpoint + + mtls_endpoint_re = re.compile( + r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?" + ) + + m = mtls_endpoint_re.match(api_endpoint) + name, mtls, sandbox, googledomain = m.groups() + if mtls or not googledomain: + return api_endpoint + + if sandbox: + return api_endpoint.replace( + "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" + ) + + return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") + + DEFAULT_ENDPOINT = "compute.googleapis.com" + DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore + DEFAULT_ENDPOINT + ) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + RegionSecurityPoliciesClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_info(info) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + RegionSecurityPoliciesClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_file( + filename) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + from_service_account_json = from_service_account_file + + @property + def transport(self) -> RegionSecurityPoliciesTransport: + """Returns the transport used by the client instance. + + Returns: + RegionSecurityPoliciesTransport: The transport used by the client + instance. + """ + return self._transport + + @staticmethod + def common_billing_account_path(billing_account: str, ) -> str: + """Returns a fully-qualified billing_account string.""" + return "billingAccounts/{billing_account}".format(billing_account=billing_account, ) + + @staticmethod + def parse_common_billing_account_path(path: str) -> Dict[str,str]: + """Parse a billing_account path into its component segments.""" + m = re.match(r"^billingAccounts/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_folder_path(folder: str, ) -> str: + """Returns a fully-qualified folder string.""" + return "folders/{folder}".format(folder=folder, ) + + @staticmethod + def parse_common_folder_path(path: str) -> Dict[str,str]: + """Parse a folder path into its component segments.""" + m = re.match(r"^folders/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_organization_path(organization: str, ) -> str: + """Returns a fully-qualified organization string.""" + return "organizations/{organization}".format(organization=organization, ) + + @staticmethod + def parse_common_organization_path(path: str) -> Dict[str,str]: + """Parse a organization path into its component segments.""" + m = re.match(r"^organizations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_project_path(project: str, ) -> str: + """Returns a fully-qualified project string.""" + return "projects/{project}".format(project=project, ) + + @staticmethod + def parse_common_project_path(path: str) -> Dict[str,str]: + """Parse a project path into its component segments.""" + m = re.match(r"^projects/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_location_path(project: str, location: str, ) -> str: + """Returns a fully-qualified location string.""" + return "projects/{project}/locations/{location}".format(project=project, location=location, ) + + @staticmethod + def parse_common_location_path(path: str) -> Dict[str,str]: + """Parse a location path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @classmethod + def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[client_options_lib.ClientOptions] = None): + """Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + if client_options is None: + client_options = client_options_lib.ClientOptions() + use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") + use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") + if use_client_cert not in ("true", "false"): + raise ValueError("Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`") + if use_mtls_endpoint not in ("auto", "never", "always"): + raise MutualTLSChannelError("Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`") + + # Figure out the client cert source to use. + client_cert_source = None + if use_client_cert == "true": + if client_options.client_cert_source: + client_cert_source = client_options.client_cert_source + elif mtls.has_default_client_cert_source(): + client_cert_source = mtls.default_client_cert_source() + + # Figure out which api endpoint to use. + if client_options.api_endpoint is not None: + api_endpoint = client_options.api_endpoint + elif use_mtls_endpoint == "always" or (use_mtls_endpoint == "auto" and client_cert_source): + api_endpoint = cls.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = cls.DEFAULT_ENDPOINT + + return api_endpoint, client_cert_source + + def __init__(self, *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Optional[Union[str, RegionSecurityPoliciesTransport]] = None, + client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the region security policies client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Union[str, RegionSecurityPoliciesTransport]): The + transport to use. If set to None, a transport is chosen + automatically. + NOTE: "rest" transport functionality is currently in a + beta state (preview). We welcome your feedback via an + issue in this library's source repository. + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): Custom options for the + client. It won't take effect if a ``transport`` instance is provided. + (1) The ``api_endpoint`` property can be used to override the + default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT + environment variable can also be used to override the endpoint: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto switch to the + default mTLS endpoint if client certificate is present, this is + the default value). However, the ``api_endpoint`` property takes + precedence if provided. + (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide client certificate for mutual TLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + """ + if isinstance(client_options, dict): + client_options = client_options_lib.from_dict(client_options) + if client_options is None: + client_options = client_options_lib.ClientOptions() + client_options = cast(client_options_lib.ClientOptions, client_options) + + api_endpoint, client_cert_source_func = self.get_mtls_endpoint_and_cert_source(client_options) + + api_key_value = getattr(client_options, "api_key", None) + if api_key_value and credentials: + raise ValueError("client_options.api_key and credentials are mutually exclusive") + + # Save or instantiate the transport. + # Ordinarily, we provide the transport, but allowing a custom transport + # instance provides an extensibility point for unusual situations. + if isinstance(transport, RegionSecurityPoliciesTransport): + # transport is a RegionSecurityPoliciesTransport instance. + if credentials or client_options.credentials_file or api_key_value: + raise ValueError("When providing a transport instance, " + "provide its credentials directly.") + if client_options.scopes: + raise ValueError( + "When providing a transport instance, provide its scopes " + "directly." + ) + self._transport = transport + else: + import google.auth._default # type: ignore + + if api_key_value and hasattr(google.auth._default, "get_api_key_credentials"): + credentials = google.auth._default.get_api_key_credentials(api_key_value) + + Transport = type(self).get_transport_class(transport) + self._transport = Transport( + credentials=credentials, + credentials_file=client_options.credentials_file, + host=api_endpoint, + scopes=client_options.scopes, + client_cert_source_for_mtls=client_cert_source_func, + quota_project_id=client_options.quota_project_id, + client_info=client_info, + always_use_jwt_access=True, + api_audience=client_options.api_audience, + ) + + def delete_unary(self, + request: Optional[Union[compute.DeleteRegionSecurityPolicyRequest, dict]] = None, + *, + project: Optional[str] = None, + region: Optional[str] = None, + security_policy: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.Operation: + r"""Deletes the specified policy. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_delete(): + # Create a client + client = compute_v1.RegionSecurityPoliciesClient() + + # Initialize request argument(s) + request = compute_v1.DeleteRegionSecurityPolicyRequest( + project="project_value", + region="region_value", + security_policy="security_policy_value", + ) + + # Make the request + response = client.delete(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.DeleteRegionSecurityPolicyRequest, dict]): + The request object. A request message for + RegionSecurityPolicies.Delete. See the + method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + region (str): + Name of the region scoping this + request. + + This corresponds to the ``region`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + security_policy (str): + Name of the security policy to + delete. + + This corresponds to the ``security_policy`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, region, security_policy]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.DeleteRegionSecurityPolicyRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.DeleteRegionSecurityPolicyRequest): + request = compute.DeleteRegionSecurityPolicyRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if region is not None: + request.region = region + if security_policy is not None: + request.security_policy = security_policy + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("region", request.region), + ("security_policy", request.security_policy), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def delete(self, + request: Optional[Union[compute.DeleteRegionSecurityPolicyRequest, dict]] = None, + *, + project: Optional[str] = None, + region: Optional[str] = None, + security_policy: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> extended_operation.ExtendedOperation: + r"""Deletes the specified policy. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_delete(): + # Create a client + client = compute_v1.RegionSecurityPoliciesClient() + + # Initialize request argument(s) + request = compute_v1.DeleteRegionSecurityPolicyRequest( + project="project_value", + region="region_value", + security_policy="security_policy_value", + ) + + # Make the request + response = client.delete(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.DeleteRegionSecurityPolicyRequest, dict]): + The request object. A request message for + RegionSecurityPolicies.Delete. See the + method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + region (str): + Name of the region scoping this + request. + + This corresponds to the ``region`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + security_policy (str): + Name of the security policy to + delete. + + This corresponds to the ``security_policy`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, region, security_policy]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.DeleteRegionSecurityPolicyRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.DeleteRegionSecurityPolicyRequest): + request = compute.DeleteRegionSecurityPolicyRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if region is not None: + request.region = region + if security_policy is not None: + request.security_policy = security_policy + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("region", request.region), + ("security_policy", request.security_policy), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + operation_service = self._transport._region_operations_client + operation_request = compute.GetRegionOperationRequest() + operation_request.project = request.project + operation_request.region = request.region + operation_request.operation = response.name + + get_operation = functools.partial(operation_service.get, operation_request) + # Cancel is not part of extended operations yet. + cancel_operation = lambda: None + + # Note: this class is an implementation detail to provide a uniform + # set of names for certain fields in the extended operation proto message. + # See google.api_core.extended_operation.ExtendedOperation for details + # on these properties and the expected interface. + class _CustomOperation(extended_operation.ExtendedOperation): + @property + def error_message(self): + return self._extended_operation.http_error_message + + @property + def error_code(self): + return self._extended_operation.http_error_status_code + + response = _CustomOperation.make(get_operation, cancel_operation, response) + + # Done; return the response. + return response + + def get(self, + request: Optional[Union[compute.GetRegionSecurityPolicyRequest, dict]] = None, + *, + project: Optional[str] = None, + region: Optional[str] = None, + security_policy: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.SecurityPolicy: + r"""List all of the ordered rules present in a single + specified policy. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_get(): + # Create a client + client = compute_v1.RegionSecurityPoliciesClient() + + # Initialize request argument(s) + request = compute_v1.GetRegionSecurityPolicyRequest( + project="project_value", + region="region_value", + security_policy="security_policy_value", + ) + + # Make the request + response = client.get(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.GetRegionSecurityPolicyRequest, dict]): + The request object. A request message for + RegionSecurityPolicies.Get. See the + method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + region (str): + Name of the region scoping this + request. + + This corresponds to the ``region`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + security_policy (str): + Name of the security policy to get. + This corresponds to the ``security_policy`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.compute_v1.types.SecurityPolicy: + Represents a Google Cloud Armor + security policy resource. Only external + backend services that use load balancers + can reference a security policy. For + more information, see Google Cloud Armor + security policy overview. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, region, security_policy]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.GetRegionSecurityPolicyRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.GetRegionSecurityPolicyRequest): + request = compute.GetRegionSecurityPolicyRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if region is not None: + request.region = region + if security_policy is not None: + request.security_policy = security_policy + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("region", request.region), + ("security_policy", request.security_policy), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def insert_unary(self, + request: Optional[Union[compute.InsertRegionSecurityPolicyRequest, dict]] = None, + *, + project: Optional[str] = None, + region: Optional[str] = None, + security_policy_resource: Optional[compute.SecurityPolicy] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.Operation: + r"""Creates a new policy in the specified project using + the data included in the request. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_insert(): + # Create a client + client = compute_v1.RegionSecurityPoliciesClient() + + # Initialize request argument(s) + request = compute_v1.InsertRegionSecurityPolicyRequest( + project="project_value", + region="region_value", + ) + + # Make the request + response = client.insert(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.InsertRegionSecurityPolicyRequest, dict]): + The request object. A request message for + RegionSecurityPolicies.Insert. See the + method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + region (str): + Name of the region scoping this + request. + + This corresponds to the ``region`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + security_policy_resource (google.cloud.compute_v1.types.SecurityPolicy): + The body resource for this request + This corresponds to the ``security_policy_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, region, security_policy_resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.InsertRegionSecurityPolicyRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.InsertRegionSecurityPolicyRequest): + request = compute.InsertRegionSecurityPolicyRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if region is not None: + request.region = region + if security_policy_resource is not None: + request.security_policy_resource = security_policy_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.insert] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("region", request.region), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def insert(self, + request: Optional[Union[compute.InsertRegionSecurityPolicyRequest, dict]] = None, + *, + project: Optional[str] = None, + region: Optional[str] = None, + security_policy_resource: Optional[compute.SecurityPolicy] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> extended_operation.ExtendedOperation: + r"""Creates a new policy in the specified project using + the data included in the request. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_insert(): + # Create a client + client = compute_v1.RegionSecurityPoliciesClient() + + # Initialize request argument(s) + request = compute_v1.InsertRegionSecurityPolicyRequest( + project="project_value", + region="region_value", + ) + + # Make the request + response = client.insert(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.InsertRegionSecurityPolicyRequest, dict]): + The request object. A request message for + RegionSecurityPolicies.Insert. See the + method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + region (str): + Name of the region scoping this + request. + + This corresponds to the ``region`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + security_policy_resource (google.cloud.compute_v1.types.SecurityPolicy): + The body resource for this request + This corresponds to the ``security_policy_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, region, security_policy_resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.InsertRegionSecurityPolicyRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.InsertRegionSecurityPolicyRequest): + request = compute.InsertRegionSecurityPolicyRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if region is not None: + request.region = region + if security_policy_resource is not None: + request.security_policy_resource = security_policy_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.insert] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("region", request.region), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + operation_service = self._transport._region_operations_client + operation_request = compute.GetRegionOperationRequest() + operation_request.project = request.project + operation_request.region = request.region + operation_request.operation = response.name + + get_operation = functools.partial(operation_service.get, operation_request) + # Cancel is not part of extended operations yet. + cancel_operation = lambda: None + + # Note: this class is an implementation detail to provide a uniform + # set of names for certain fields in the extended operation proto message. + # See google.api_core.extended_operation.ExtendedOperation for details + # on these properties and the expected interface. + class _CustomOperation(extended_operation.ExtendedOperation): + @property + def error_message(self): + return self._extended_operation.http_error_message + + @property + def error_code(self): + return self._extended_operation.http_error_status_code + + response = _CustomOperation.make(get_operation, cancel_operation, response) + + # Done; return the response. + return response + + def list(self, + request: Optional[Union[compute.ListRegionSecurityPoliciesRequest, dict]] = None, + *, + project: Optional[str] = None, + region: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListPager: + r"""List all the policies that have been configured for + the specified project and region. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_list(): + # Create a client + client = compute_v1.RegionSecurityPoliciesClient() + + # Initialize request argument(s) + request = compute_v1.ListRegionSecurityPoliciesRequest( + project="project_value", + region="region_value", + ) + + # Make the request + page_result = client.list(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.ListRegionSecurityPoliciesRequest, dict]): + The request object. A request message for + RegionSecurityPolicies.List. See the + method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + region (str): + Name of the region scoping this + request. + + This corresponds to the ``region`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.compute_v1.services.region_security_policies.pagers.ListPager: + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, region]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.ListRegionSecurityPoliciesRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.ListRegionSecurityPoliciesRequest): + request = compute.ListRegionSecurityPoliciesRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if region is not None: + request.region = region + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("region", request.region), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + def patch_unary(self, + request: Optional[Union[compute.PatchRegionSecurityPolicyRequest, dict]] = None, + *, + project: Optional[str] = None, + region: Optional[str] = None, + security_policy: Optional[str] = None, + security_policy_resource: Optional[compute.SecurityPolicy] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.Operation: + r"""Patches the specified policy with the data included + in the request. To clear fields in the policy, leave the + fields empty and specify them in the updateMask. This + cannot be used to be update the rules in the policy. + Please use the per rule methods like addRule, patchRule, + and removeRule instead. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_patch(): + # Create a client + client = compute_v1.RegionSecurityPoliciesClient() + + # Initialize request argument(s) + request = compute_v1.PatchRegionSecurityPolicyRequest( + project="project_value", + region="region_value", + security_policy="security_policy_value", + ) + + # Make the request + response = client.patch(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.PatchRegionSecurityPolicyRequest, dict]): + The request object. A request message for + RegionSecurityPolicies.Patch. See the + method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + region (str): + Name of the region scoping this + request. + + This corresponds to the ``region`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + security_policy (str): + Name of the security policy to + update. + + This corresponds to the ``security_policy`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + security_policy_resource (google.cloud.compute_v1.types.SecurityPolicy): + The body resource for this request + This corresponds to the ``security_policy_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, region, security_policy, security_policy_resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.PatchRegionSecurityPolicyRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.PatchRegionSecurityPolicyRequest): + request = compute.PatchRegionSecurityPolicyRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if region is not None: + request.region = region + if security_policy is not None: + request.security_policy = security_policy + if security_policy_resource is not None: + request.security_policy_resource = security_policy_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.patch] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("region", request.region), + ("security_policy", request.security_policy), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def patch(self, + request: Optional[Union[compute.PatchRegionSecurityPolicyRequest, dict]] = None, + *, + project: Optional[str] = None, + region: Optional[str] = None, + security_policy: Optional[str] = None, + security_policy_resource: Optional[compute.SecurityPolicy] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> extended_operation.ExtendedOperation: + r"""Patches the specified policy with the data included + in the request. To clear fields in the policy, leave the + fields empty and specify them in the updateMask. This + cannot be used to be update the rules in the policy. + Please use the per rule methods like addRule, patchRule, + and removeRule instead. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_patch(): + # Create a client + client = compute_v1.RegionSecurityPoliciesClient() + + # Initialize request argument(s) + request = compute_v1.PatchRegionSecurityPolicyRequest( + project="project_value", + region="region_value", + security_policy="security_policy_value", + ) + + # Make the request + response = client.patch(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.PatchRegionSecurityPolicyRequest, dict]): + The request object. A request message for + RegionSecurityPolicies.Patch. See the + method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + region (str): + Name of the region scoping this + request. + + This corresponds to the ``region`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + security_policy (str): + Name of the security policy to + update. + + This corresponds to the ``security_policy`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + security_policy_resource (google.cloud.compute_v1.types.SecurityPolicy): + The body resource for this request + This corresponds to the ``security_policy_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, region, security_policy, security_policy_resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.PatchRegionSecurityPolicyRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.PatchRegionSecurityPolicyRequest): + request = compute.PatchRegionSecurityPolicyRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if region is not None: + request.region = region + if security_policy is not None: + request.security_policy = security_policy + if security_policy_resource is not None: + request.security_policy_resource = security_policy_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.patch] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("region", request.region), + ("security_policy", request.security_policy), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + operation_service = self._transport._region_operations_client + operation_request = compute.GetRegionOperationRequest() + operation_request.project = request.project + operation_request.region = request.region + operation_request.operation = response.name + + get_operation = functools.partial(operation_service.get, operation_request) + # Cancel is not part of extended operations yet. + cancel_operation = lambda: None + + # Note: this class is an implementation detail to provide a uniform + # set of names for certain fields in the extended operation proto message. + # See google.api_core.extended_operation.ExtendedOperation for details + # on these properties and the expected interface. + class _CustomOperation(extended_operation.ExtendedOperation): + @property + def error_message(self): + return self._extended_operation.http_error_message + + @property + def error_code(self): + return self._extended_operation.http_error_status_code + + response = _CustomOperation.make(get_operation, cancel_operation, response) + + # Done; return the response. + return response + + def __enter__(self) -> "RegionSecurityPoliciesClient": + return self + + def __exit__(self, type, value, traceback): + """Releases underlying transport's resources. + + .. warning:: + ONLY use as a context manager if the transport is NOT shared + with other clients! Exiting the with block will CLOSE the transport + and may cause errors in other clients! + """ + self.transport.close() + + + + + + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) + + +__all__ = ( + "RegionSecurityPoliciesClient", +) diff --git a/owl-bot-staging/v1/google/cloud/compute_v1/services/region_security_policies/pagers.py b/owl-bot-staging/v1/google/cloud/compute_v1/services/region_security_policies/pagers.py new file mode 100644 index 000000000..78bbc65b3 --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/compute_v1/services/region_security_policies/pagers.py @@ -0,0 +1,77 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import Any, AsyncIterator, Awaitable, Callable, Sequence, Tuple, Optional, Iterator + +from google.cloud.compute_v1.types import compute + + +class ListPager: + """A pager for iterating through ``list`` requests. + + This class thinly wraps an initial + :class:`google.cloud.compute_v1.types.SecurityPolicyList` object, and + provides an ``__iter__`` method to iterate through its + ``items`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``List`` requests and continue to iterate + through the ``items`` field on the + corresponding responses. + + All the usual :class:`google.cloud.compute_v1.types.SecurityPolicyList` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., compute.SecurityPolicyList], + request: compute.ListRegionSecurityPoliciesRequest, + response: compute.SecurityPolicyList, + *, + metadata: Sequence[Tuple[str, str]] = ()): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.compute_v1.types.ListRegionSecurityPoliciesRequest): + The initial request object. + response (google.cloud.compute_v1.types.SecurityPolicyList): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = compute.ListRegionSecurityPoliciesRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[compute.SecurityPolicyList]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[compute.SecurityPolicy]: + for page in self.pages: + yield from page.items + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) diff --git a/owl-bot-staging/v1/google/cloud/compute_v1/services/region_security_policies/transports/__init__.py b/owl-bot-staging/v1/google/cloud/compute_v1/services/region_security_policies/transports/__init__.py new file mode 100644 index 000000000..290019136 --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/compute_v1/services/region_security_policies/transports/__init__.py @@ -0,0 +1,32 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +from typing import Dict, Type + +from .base import RegionSecurityPoliciesTransport +from .rest import RegionSecurityPoliciesRestTransport +from .rest import RegionSecurityPoliciesRestInterceptor + + +# Compile a registry of transports. +_transport_registry = OrderedDict() # type: Dict[str, Type[RegionSecurityPoliciesTransport]] +_transport_registry['rest'] = RegionSecurityPoliciesRestTransport + +__all__ = ( + 'RegionSecurityPoliciesTransport', + 'RegionSecurityPoliciesRestTransport', + 'RegionSecurityPoliciesRestInterceptor', +) diff --git a/owl-bot-staging/v1/google/cloud/compute_v1/services/region_security_policies/transports/base.py b/owl-bot-staging/v1/google/cloud/compute_v1/services/region_security_policies/transports/base.py new file mode 100644 index 000000000..ebf77661d --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/compute_v1/services/region_security_policies/transports/base.py @@ -0,0 +1,219 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import abc +from typing import Awaitable, Callable, Dict, Optional, Sequence, Union + +from google.cloud.compute_v1 import gapic_version as package_version + +import google.auth # type: ignore +import google.api_core +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.compute_v1.types import compute +from google.cloud.compute_v1.services import region_operations + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) + + +class RegionSecurityPoliciesTransport(abc.ABC): + """Abstract transport class for RegionSecurityPolicies.""" + + AUTH_SCOPES = ( + 'https://www.googleapis.com/auth/compute', + 'https://www.googleapis.com/auth/cloud-platform', + ) + + DEFAULT_HOST: str = 'compute.googleapis.com' + def __init__( + self, *, + host: str = DEFAULT_HOST, + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + **kwargs, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A list of scopes. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + """ + self._extended_operations_services: Dict[str, Any] = {} + + scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} + + # Save the scopes. + self._scopes = scopes + + # If no credentials are provided, then determine the appropriate + # defaults. + if credentials and credentials_file: + raise core_exceptions.DuplicateCredentialArgs("'credentials_file' and 'credentials' are mutually exclusive") + + if credentials_file is not None: + credentials, _ = google.auth.load_credentials_from_file( + credentials_file, + **scopes_kwargs, + quota_project_id=quota_project_id + ) + elif credentials is None: + credentials, _ = google.auth.default(**scopes_kwargs, quota_project_id=quota_project_id) + # Don't apply audience if the credentials file passed from user. + if hasattr(credentials, "with_gdch_audience"): + credentials = credentials.with_gdch_audience(api_audience if api_audience else host) + + # If the credentials are service account credentials, then always try to use self signed JWT. + if always_use_jwt_access and isinstance(credentials, service_account.Credentials) and hasattr(service_account.Credentials, "with_always_use_jwt_access"): + credentials = credentials.with_always_use_jwt_access(True) + + # Save the credentials. + self._credentials = credentials + + # Save the hostname. Default to port 443 (HTTPS) if none is specified. + if ':' not in host: + host += ':443' + self._host = host + + def _prep_wrapped_messages(self, client_info): + # Precompute the wrapped methods. + self._wrapped_methods = { + self.delete: gapic_v1.method.wrap_method( + self.delete, + default_timeout=None, + client_info=client_info, + ), + self.get: gapic_v1.method.wrap_method( + self.get, + default_timeout=None, + client_info=client_info, + ), + self.insert: gapic_v1.method.wrap_method( + self.insert, + default_timeout=None, + client_info=client_info, + ), + self.list: gapic_v1.method.wrap_method( + self.list, + default_timeout=None, + client_info=client_info, + ), + self.patch: gapic_v1.method.wrap_method( + self.patch, + default_timeout=None, + client_info=client_info, + ), + } + + def close(self): + """Closes resources associated with the transport. + + .. warning:: + Only call this method if the transport is NOT shared + with other clients - this may cause errors in other clients! + """ + raise NotImplementedError() + + @property + def delete(self) -> Callable[ + [compute.DeleteRegionSecurityPolicyRequest], + Union[ + compute.Operation, + Awaitable[compute.Operation] + ]]: + raise NotImplementedError() + + @property + def get(self) -> Callable[ + [compute.GetRegionSecurityPolicyRequest], + Union[ + compute.SecurityPolicy, + Awaitable[compute.SecurityPolicy] + ]]: + raise NotImplementedError() + + @property + def insert(self) -> Callable[ + [compute.InsertRegionSecurityPolicyRequest], + Union[ + compute.Operation, + Awaitable[compute.Operation] + ]]: + raise NotImplementedError() + + @property + def list(self) -> Callable[ + [compute.ListRegionSecurityPoliciesRequest], + Union[ + compute.SecurityPolicyList, + Awaitable[compute.SecurityPolicyList] + ]]: + raise NotImplementedError() + + @property + def patch(self) -> Callable[ + [compute.PatchRegionSecurityPolicyRequest], + Union[ + compute.Operation, + Awaitable[compute.Operation] + ]]: + raise NotImplementedError() + + @property + def kind(self) -> str: + raise NotImplementedError() + + @property + def _region_operations_client(self) -> region_operations.RegionOperationsClient: + ex_op_service = self._extended_operations_services.get("region_operations") + if not ex_op_service: + ex_op_service = region_operations.RegionOperationsClient( + credentials=self._credentials, + transport=self.kind, + ) + self._extended_operations_services["region_operations"] = ex_op_service + + return ex_op_service + + +__all__ = ( + 'RegionSecurityPoliciesTransport', +) diff --git a/owl-bot-staging/v1/google/cloud/compute_v1/services/region_security_policies/transports/rest.py b/owl-bot-staging/v1/google/cloud/compute_v1/services/region_security_policies/transports/rest.py new file mode 100644 index 000000000..bb948c214 --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/compute_v1/services/region_security_policies/transports/rest.py @@ -0,0 +1,804 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +from google.auth.transport.requests import AuthorizedSession # type: ignore +import json # type: ignore +import grpc # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.api_core import exceptions as core_exceptions +from google.api_core import retry as retries +from google.api_core import rest_helpers +from google.api_core import rest_streaming +from google.api_core import path_template +from google.api_core import gapic_v1 + +from google.protobuf import json_format +from requests import __version__ as requests_version +import dataclasses +import re +from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union +import warnings + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + + +from google.cloud.compute_v1.types import compute + +from .base import RegionSecurityPoliciesTransport, DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, + grpc_version=None, + rest_version=requests_version, +) + + +class RegionSecurityPoliciesRestInterceptor: + """Interceptor for RegionSecurityPolicies. + + Interceptors are used to manipulate requests, request metadata, and responses + in arbitrary ways. + Example use cases include: + * Logging + * Verifying requests according to service or custom semantics + * Stripping extraneous information from responses + + These use cases and more can be enabled by injecting an + instance of a custom subclass when constructing the RegionSecurityPoliciesRestTransport. + + .. code-block:: python + class MyCustomRegionSecurityPoliciesInterceptor(RegionSecurityPoliciesRestInterceptor): + def pre_delete(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_delete(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_get(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_insert(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_insert(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_patch(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_patch(self, response): + logging.log(f"Received response: {response}") + return response + + transport = RegionSecurityPoliciesRestTransport(interceptor=MyCustomRegionSecurityPoliciesInterceptor()) + client = RegionSecurityPoliciesClient(transport=transport) + + + """ + def pre_delete(self, request: compute.DeleteRegionSecurityPolicyRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.DeleteRegionSecurityPolicyRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for delete + + Override in a subclass to manipulate the request or metadata + before they are sent to the RegionSecurityPolicies server. + """ + return request, metadata + + def post_delete(self, response: compute.Operation) -> compute.Operation: + """Post-rpc interceptor for delete + + Override in a subclass to manipulate the response + after it is returned by the RegionSecurityPolicies server but before + it is returned to user code. + """ + return response + def pre_get(self, request: compute.GetRegionSecurityPolicyRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.GetRegionSecurityPolicyRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get + + Override in a subclass to manipulate the request or metadata + before they are sent to the RegionSecurityPolicies server. + """ + return request, metadata + + def post_get(self, response: compute.SecurityPolicy) -> compute.SecurityPolicy: + """Post-rpc interceptor for get + + Override in a subclass to manipulate the response + after it is returned by the RegionSecurityPolicies server but before + it is returned to user code. + """ + return response + def pre_insert(self, request: compute.InsertRegionSecurityPolicyRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.InsertRegionSecurityPolicyRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for insert + + Override in a subclass to manipulate the request or metadata + before they are sent to the RegionSecurityPolicies server. + """ + return request, metadata + + def post_insert(self, response: compute.Operation) -> compute.Operation: + """Post-rpc interceptor for insert + + Override in a subclass to manipulate the response + after it is returned by the RegionSecurityPolicies server but before + it is returned to user code. + """ + return response + def pre_list(self, request: compute.ListRegionSecurityPoliciesRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.ListRegionSecurityPoliciesRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list + + Override in a subclass to manipulate the request or metadata + before they are sent to the RegionSecurityPolicies server. + """ + return request, metadata + + def post_list(self, response: compute.SecurityPolicyList) -> compute.SecurityPolicyList: + """Post-rpc interceptor for list + + Override in a subclass to manipulate the response + after it is returned by the RegionSecurityPolicies server but before + it is returned to user code. + """ + return response + def pre_patch(self, request: compute.PatchRegionSecurityPolicyRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.PatchRegionSecurityPolicyRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for patch + + Override in a subclass to manipulate the request or metadata + before they are sent to the RegionSecurityPolicies server. + """ + return request, metadata + + def post_patch(self, response: compute.Operation) -> compute.Operation: + """Post-rpc interceptor for patch + + Override in a subclass to manipulate the response + after it is returned by the RegionSecurityPolicies server but before + it is returned to user code. + """ + return response + + +@dataclasses.dataclass +class RegionSecurityPoliciesRestStub: + _session: AuthorizedSession + _host: str + _interceptor: RegionSecurityPoliciesRestInterceptor + + +class RegionSecurityPoliciesRestTransport(RegionSecurityPoliciesTransport): + """REST backend transport for RegionSecurityPolicies. + + The RegionSecurityPolicies API. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends JSON representations of protocol buffers over HTTP/1.1 + + NOTE: This REST transport functionality is currently in a beta + state (preview). We welcome your feedback via an issue in this + library's source repository. Thank you! + """ + + def __init__(self, *, + host: str = 'compute.googleapis.com', + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + client_cert_source_for_mtls: Optional[Callable[[ + ], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + url_scheme: str = 'https', + interceptor: Optional[RegionSecurityPoliciesRestInterceptor] = None, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + NOTE: This REST transport functionality is currently in a beta + state (preview). We welcome your feedback via a GitHub issue in + this library's repository. Thank you! + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + client_cert_source_for_mtls (Callable[[], Tuple[bytes, bytes]]): Client + certificate to configure mutual TLS HTTP channel. It is ignored + if ``channel`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you are developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + url_scheme: the protocol scheme for the API endpoint. Normally + "https", but for testing or local servers, + "http" can be specified. + """ + # Run the base constructor + # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. + # TODO: When custom host (api_endpoint) is set, `scopes` must *also* be set on the + # credentials object + maybe_url_match = re.match("^(?Phttp(?:s)?://)?(?P.*)$", host) + if maybe_url_match is None: + raise ValueError(f"Unexpected hostname structure: {host}") # pragma: NO COVER + + url_match_items = maybe_url_match.groupdict() + + host = f"{url_scheme}://{host}" if not url_match_items["scheme"] else host + + super().__init__( + host=host, + credentials=credentials, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience + ) + self._session = AuthorizedSession( + self._credentials, default_host=self.DEFAULT_HOST) + if client_cert_source_for_mtls: + self._session.configure_mtls_channel(client_cert_source_for_mtls) + self._interceptor = interceptor or RegionSecurityPoliciesRestInterceptor() + self._prep_wrapped_messages(client_info) + + class _Delete(RegionSecurityPoliciesRestStub): + def __hash__(self): + return hash("Delete") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.DeleteRegionSecurityPolicyRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.Operation: + r"""Call the delete method over HTTP. + + Args: + request (~.compute.DeleteRegionSecurityPolicyRequest): + The request object. A request message for + RegionSecurityPolicies.Delete. See the + method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.Operation: + Represents an Operation resource. Google Compute Engine + has three Operation resources: \* + `Global `__ + \* + `Regional `__ + \* + `Zonal `__ + You can use an operation resource to manage asynchronous + API requests. For more information, read Handling API + responses. Operations can be global, regional or zonal. + - For global operations, use the ``globalOperations`` + resource. - For regional operations, use the + ``regionOperations`` resource. - For zonal operations, + use the ``zonalOperations`` resource. For more + information, read Global, Regional, and Zonal Resources. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'delete', + 'uri': '/compute/v1/projects/{project}/regions/{region}/securityPolicies/{security_policy}', + }, + ] + request, metadata = self._interceptor.pre_delete(request, metadata) + pb_request = compute.DeleteRegionSecurityPolicyRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.Operation() + pb_resp = compute.Operation.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_delete(resp) + return resp + + class _Get(RegionSecurityPoliciesRestStub): + def __hash__(self): + return hash("Get") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.GetRegionSecurityPolicyRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.SecurityPolicy: + r"""Call the get method over HTTP. + + Args: + request (~.compute.GetRegionSecurityPolicyRequest): + The request object. A request message for + RegionSecurityPolicies.Get. See the + method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.SecurityPolicy: + Represents a Google Cloud Armor + security policy resource. Only external + backend services that use load balancers + can reference a security policy. For + more information, see Google Cloud Armor + security policy overview. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/compute/v1/projects/{project}/regions/{region}/securityPolicies/{security_policy}', + }, + ] + request, metadata = self._interceptor.pre_get(request, metadata) + pb_request = compute.GetRegionSecurityPolicyRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.SecurityPolicy() + pb_resp = compute.SecurityPolicy.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get(resp) + return resp + + class _Insert(RegionSecurityPoliciesRestStub): + def __hash__(self): + return hash("Insert") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.InsertRegionSecurityPolicyRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.Operation: + r"""Call the insert method over HTTP. + + Args: + request (~.compute.InsertRegionSecurityPolicyRequest): + The request object. A request message for + RegionSecurityPolicies.Insert. See the + method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.Operation: + Represents an Operation resource. Google Compute Engine + has three Operation resources: \* + `Global `__ + \* + `Regional `__ + \* + `Zonal `__ + You can use an operation resource to manage asynchronous + API requests. For more information, read Handling API + responses. Operations can be global, regional or zonal. + - For global operations, use the ``globalOperations`` + resource. - For regional operations, use the + ``regionOperations`` resource. - For zonal operations, + use the ``zonalOperations`` resource. For more + information, read Global, Regional, and Zonal Resources. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/compute/v1/projects/{project}/regions/{region}/securityPolicies', + 'body': 'security_policy_resource', + }, + ] + request, metadata = self._interceptor.pre_insert(request, metadata) + pb_request = compute.InsertRegionSecurityPolicyRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + including_default_value_fields=False, + use_integers_for_enums=False + ) + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.Operation() + pb_resp = compute.Operation.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_insert(resp) + return resp + + class _List(RegionSecurityPoliciesRestStub): + def __hash__(self): + return hash("List") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.ListRegionSecurityPoliciesRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.SecurityPolicyList: + r"""Call the list method over HTTP. + + Args: + request (~.compute.ListRegionSecurityPoliciesRequest): + The request object. A request message for + RegionSecurityPolicies.List. See the + method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.SecurityPolicyList: + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/compute/v1/projects/{project}/regions/{region}/securityPolicies', + }, + ] + request, metadata = self._interceptor.pre_list(request, metadata) + pb_request = compute.ListRegionSecurityPoliciesRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.SecurityPolicyList() + pb_resp = compute.SecurityPolicyList.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_list(resp) + return resp + + class _Patch(RegionSecurityPoliciesRestStub): + def __hash__(self): + return hash("Patch") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.PatchRegionSecurityPolicyRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.Operation: + r"""Call the patch method over HTTP. + + Args: + request (~.compute.PatchRegionSecurityPolicyRequest): + The request object. A request message for + RegionSecurityPolicies.Patch. See the + method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.Operation: + Represents an Operation resource. Google Compute Engine + has three Operation resources: \* + `Global `__ + \* + `Regional `__ + \* + `Zonal `__ + You can use an operation resource to manage asynchronous + API requests. For more information, read Handling API + responses. Operations can be global, regional or zonal. + - For global operations, use the ``globalOperations`` + resource. - For regional operations, use the + ``regionOperations`` resource. - For zonal operations, + use the ``zonalOperations`` resource. For more + information, read Global, Regional, and Zonal Resources. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'patch', + 'uri': '/compute/v1/projects/{project}/regions/{region}/securityPolicies/{security_policy}', + 'body': 'security_policy_resource', + }, + ] + request, metadata = self._interceptor.pre_patch(request, metadata) + pb_request = compute.PatchRegionSecurityPolicyRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + including_default_value_fields=False, + use_integers_for_enums=False + ) + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.Operation() + pb_resp = compute.Operation.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_patch(resp) + return resp + + @property + def delete(self) -> Callable[ + [compute.DeleteRegionSecurityPolicyRequest], + compute.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._Delete(self._session, self._host, self._interceptor) # type: ignore + + @property + def get(self) -> Callable[ + [compute.GetRegionSecurityPolicyRequest], + compute.SecurityPolicy]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._Get(self._session, self._host, self._interceptor) # type: ignore + + @property + def insert(self) -> Callable[ + [compute.InsertRegionSecurityPolicyRequest], + compute.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._Insert(self._session, self._host, self._interceptor) # type: ignore + + @property + def list(self) -> Callable[ + [compute.ListRegionSecurityPoliciesRequest], + compute.SecurityPolicyList]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._List(self._session, self._host, self._interceptor) # type: ignore + + @property + def patch(self) -> Callable[ + [compute.PatchRegionSecurityPolicyRequest], + compute.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._Patch(self._session, self._host, self._interceptor) # type: ignore + + @property + def kind(self) -> str: + return "rest" + + def close(self): + self._session.close() + + +__all__=( + 'RegionSecurityPoliciesRestTransport', +) diff --git a/owl-bot-staging/v1/google/cloud/compute_v1/services/region_ssl_certificates/__init__.py b/owl-bot-staging/v1/google/cloud/compute_v1/services/region_ssl_certificates/__init__.py new file mode 100644 index 000000000..4e614123d --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/compute_v1/services/region_ssl_certificates/__init__.py @@ -0,0 +1,20 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from .client import RegionSslCertificatesClient + +__all__ = ( + 'RegionSslCertificatesClient', +) diff --git a/owl-bot-staging/v1/google/cloud/compute_v1/services/region_ssl_certificates/client.py b/owl-bot-staging/v1/google/cloud/compute_v1/services/region_ssl_certificates/client.py new file mode 100644 index 000000000..061b0d933 --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/compute_v1/services/region_ssl_certificates/client.py @@ -0,0 +1,1204 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import functools +import os +import re +from typing import Dict, Mapping, MutableMapping, MutableSequence, Optional, Sequence, Tuple, Type, Union, cast + +from google.cloud.compute_v1 import gapic_version as package_version + +from google.api_core import client_options as client_options_lib +from google.api_core import exceptions as core_exceptions +from google.api_core import extended_operation +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport import mtls # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth.exceptions import MutualTLSChannelError # type: ignore +from google.oauth2 import service_account # type: ignore + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + +from google.api_core import extended_operation # type: ignore +from google.cloud.compute_v1.services.region_ssl_certificates import pagers +from google.cloud.compute_v1.types import compute +from .transports.base import RegionSslCertificatesTransport, DEFAULT_CLIENT_INFO +from .transports.rest import RegionSslCertificatesRestTransport + + +class RegionSslCertificatesClientMeta(type): + """Metaclass for the RegionSslCertificates client. + + This provides class-level methods for building and retrieving + support objects (e.g. transport) without polluting the client instance + objects. + """ + _transport_registry = OrderedDict() # type: Dict[str, Type[RegionSslCertificatesTransport]] + _transport_registry["rest"] = RegionSslCertificatesRestTransport + + def get_transport_class(cls, + label: Optional[str] = None, + ) -> Type[RegionSslCertificatesTransport]: + """Returns an appropriate transport class. + + Args: + label: The name of the desired transport. If none is + provided, then the first transport in the registry is used. + + Returns: + The transport class to use. + """ + # If a specific transport is requested, return that one. + if label: + return cls._transport_registry[label] + + # No transport is requested; return the default (that is, the first one + # in the dictionary). + return next(iter(cls._transport_registry.values())) + + +class RegionSslCertificatesClient(metaclass=RegionSslCertificatesClientMeta): + """The RegionSslCertificates API.""" + + @staticmethod + def _get_default_mtls_endpoint(api_endpoint): + """Converts api endpoint to mTLS endpoint. + + Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to + "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. + Args: + api_endpoint (Optional[str]): the api endpoint to convert. + Returns: + str: converted mTLS api endpoint. + """ + if not api_endpoint: + return api_endpoint + + mtls_endpoint_re = re.compile( + r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?" + ) + + m = mtls_endpoint_re.match(api_endpoint) + name, mtls, sandbox, googledomain = m.groups() + if mtls or not googledomain: + return api_endpoint + + if sandbox: + return api_endpoint.replace( + "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" + ) + + return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") + + DEFAULT_ENDPOINT = "compute.googleapis.com" + DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore + DEFAULT_ENDPOINT + ) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + RegionSslCertificatesClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_info(info) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + RegionSslCertificatesClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_file( + filename) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + from_service_account_json = from_service_account_file + + @property + def transport(self) -> RegionSslCertificatesTransport: + """Returns the transport used by the client instance. + + Returns: + RegionSslCertificatesTransport: The transport used by the client + instance. + """ + return self._transport + + @staticmethod + def common_billing_account_path(billing_account: str, ) -> str: + """Returns a fully-qualified billing_account string.""" + return "billingAccounts/{billing_account}".format(billing_account=billing_account, ) + + @staticmethod + def parse_common_billing_account_path(path: str) -> Dict[str,str]: + """Parse a billing_account path into its component segments.""" + m = re.match(r"^billingAccounts/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_folder_path(folder: str, ) -> str: + """Returns a fully-qualified folder string.""" + return "folders/{folder}".format(folder=folder, ) + + @staticmethod + def parse_common_folder_path(path: str) -> Dict[str,str]: + """Parse a folder path into its component segments.""" + m = re.match(r"^folders/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_organization_path(organization: str, ) -> str: + """Returns a fully-qualified organization string.""" + return "organizations/{organization}".format(organization=organization, ) + + @staticmethod + def parse_common_organization_path(path: str) -> Dict[str,str]: + """Parse a organization path into its component segments.""" + m = re.match(r"^organizations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_project_path(project: str, ) -> str: + """Returns a fully-qualified project string.""" + return "projects/{project}".format(project=project, ) + + @staticmethod + def parse_common_project_path(path: str) -> Dict[str,str]: + """Parse a project path into its component segments.""" + m = re.match(r"^projects/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_location_path(project: str, location: str, ) -> str: + """Returns a fully-qualified location string.""" + return "projects/{project}/locations/{location}".format(project=project, location=location, ) + + @staticmethod + def parse_common_location_path(path: str) -> Dict[str,str]: + """Parse a location path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @classmethod + def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[client_options_lib.ClientOptions] = None): + """Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + if client_options is None: + client_options = client_options_lib.ClientOptions() + use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") + use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") + if use_client_cert not in ("true", "false"): + raise ValueError("Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`") + if use_mtls_endpoint not in ("auto", "never", "always"): + raise MutualTLSChannelError("Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`") + + # Figure out the client cert source to use. + client_cert_source = None + if use_client_cert == "true": + if client_options.client_cert_source: + client_cert_source = client_options.client_cert_source + elif mtls.has_default_client_cert_source(): + client_cert_source = mtls.default_client_cert_source() + + # Figure out which api endpoint to use. + if client_options.api_endpoint is not None: + api_endpoint = client_options.api_endpoint + elif use_mtls_endpoint == "always" or (use_mtls_endpoint == "auto" and client_cert_source): + api_endpoint = cls.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = cls.DEFAULT_ENDPOINT + + return api_endpoint, client_cert_source + + def __init__(self, *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Optional[Union[str, RegionSslCertificatesTransport]] = None, + client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the region ssl certificates client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Union[str, RegionSslCertificatesTransport]): The + transport to use. If set to None, a transport is chosen + automatically. + NOTE: "rest" transport functionality is currently in a + beta state (preview). We welcome your feedback via an + issue in this library's source repository. + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): Custom options for the + client. It won't take effect if a ``transport`` instance is provided. + (1) The ``api_endpoint`` property can be used to override the + default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT + environment variable can also be used to override the endpoint: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto switch to the + default mTLS endpoint if client certificate is present, this is + the default value). However, the ``api_endpoint`` property takes + precedence if provided. + (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide client certificate for mutual TLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + """ + if isinstance(client_options, dict): + client_options = client_options_lib.from_dict(client_options) + if client_options is None: + client_options = client_options_lib.ClientOptions() + client_options = cast(client_options_lib.ClientOptions, client_options) + + api_endpoint, client_cert_source_func = self.get_mtls_endpoint_and_cert_source(client_options) + + api_key_value = getattr(client_options, "api_key", None) + if api_key_value and credentials: + raise ValueError("client_options.api_key and credentials are mutually exclusive") + + # Save or instantiate the transport. + # Ordinarily, we provide the transport, but allowing a custom transport + # instance provides an extensibility point for unusual situations. + if isinstance(transport, RegionSslCertificatesTransport): + # transport is a RegionSslCertificatesTransport instance. + if credentials or client_options.credentials_file or api_key_value: + raise ValueError("When providing a transport instance, " + "provide its credentials directly.") + if client_options.scopes: + raise ValueError( + "When providing a transport instance, provide its scopes " + "directly." + ) + self._transport = transport + else: + import google.auth._default # type: ignore + + if api_key_value and hasattr(google.auth._default, "get_api_key_credentials"): + credentials = google.auth._default.get_api_key_credentials(api_key_value) + + Transport = type(self).get_transport_class(transport) + self._transport = Transport( + credentials=credentials, + credentials_file=client_options.credentials_file, + host=api_endpoint, + scopes=client_options.scopes, + client_cert_source_for_mtls=client_cert_source_func, + quota_project_id=client_options.quota_project_id, + client_info=client_info, + always_use_jwt_access=True, + api_audience=client_options.api_audience, + ) + + def delete_unary(self, + request: Optional[Union[compute.DeleteRegionSslCertificateRequest, dict]] = None, + *, + project: Optional[str] = None, + region: Optional[str] = None, + ssl_certificate: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.Operation: + r"""Deletes the specified SslCertificate resource in the + region. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_delete(): + # Create a client + client = compute_v1.RegionSslCertificatesClient() + + # Initialize request argument(s) + request = compute_v1.DeleteRegionSslCertificateRequest( + project="project_value", + region="region_value", + ssl_certificate="ssl_certificate_value", + ) + + # Make the request + response = client.delete(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.DeleteRegionSslCertificateRequest, dict]): + The request object. A request message for + RegionSslCertificates.Delete. See the + method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + region (str): + Name of the region scoping this + request. + + This corresponds to the ``region`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + ssl_certificate (str): + Name of the SslCertificate resource + to delete. + + This corresponds to the ``ssl_certificate`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, region, ssl_certificate]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.DeleteRegionSslCertificateRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.DeleteRegionSslCertificateRequest): + request = compute.DeleteRegionSslCertificateRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if region is not None: + request.region = region + if ssl_certificate is not None: + request.ssl_certificate = ssl_certificate + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("region", request.region), + ("ssl_certificate", request.ssl_certificate), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def delete(self, + request: Optional[Union[compute.DeleteRegionSslCertificateRequest, dict]] = None, + *, + project: Optional[str] = None, + region: Optional[str] = None, + ssl_certificate: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> extended_operation.ExtendedOperation: + r"""Deletes the specified SslCertificate resource in the + region. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_delete(): + # Create a client + client = compute_v1.RegionSslCertificatesClient() + + # Initialize request argument(s) + request = compute_v1.DeleteRegionSslCertificateRequest( + project="project_value", + region="region_value", + ssl_certificate="ssl_certificate_value", + ) + + # Make the request + response = client.delete(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.DeleteRegionSslCertificateRequest, dict]): + The request object. A request message for + RegionSslCertificates.Delete. See the + method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + region (str): + Name of the region scoping this + request. + + This corresponds to the ``region`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + ssl_certificate (str): + Name of the SslCertificate resource + to delete. + + This corresponds to the ``ssl_certificate`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, region, ssl_certificate]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.DeleteRegionSslCertificateRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.DeleteRegionSslCertificateRequest): + request = compute.DeleteRegionSslCertificateRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if region is not None: + request.region = region + if ssl_certificate is not None: + request.ssl_certificate = ssl_certificate + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("region", request.region), + ("ssl_certificate", request.ssl_certificate), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + operation_service = self._transport._region_operations_client + operation_request = compute.GetRegionOperationRequest() + operation_request.project = request.project + operation_request.region = request.region + operation_request.operation = response.name + + get_operation = functools.partial(operation_service.get, operation_request) + # Cancel is not part of extended operations yet. + cancel_operation = lambda: None + + # Note: this class is an implementation detail to provide a uniform + # set of names for certain fields in the extended operation proto message. + # See google.api_core.extended_operation.ExtendedOperation for details + # on these properties and the expected interface. + class _CustomOperation(extended_operation.ExtendedOperation): + @property + def error_message(self): + return self._extended_operation.http_error_message + + @property + def error_code(self): + return self._extended_operation.http_error_status_code + + response = _CustomOperation.make(get_operation, cancel_operation, response) + + # Done; return the response. + return response + + def get(self, + request: Optional[Union[compute.GetRegionSslCertificateRequest, dict]] = None, + *, + project: Optional[str] = None, + region: Optional[str] = None, + ssl_certificate: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.SslCertificate: + r"""Returns the specified SslCertificate resource in the + specified region. Get a list of available SSL + certificates by making a list() request. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_get(): + # Create a client + client = compute_v1.RegionSslCertificatesClient() + + # Initialize request argument(s) + request = compute_v1.GetRegionSslCertificateRequest( + project="project_value", + region="region_value", + ssl_certificate="ssl_certificate_value", + ) + + # Make the request + response = client.get(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.GetRegionSslCertificateRequest, dict]): + The request object. A request message for + RegionSslCertificates.Get. See the + method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + region (str): + Name of the region scoping this + request. + + This corresponds to the ``region`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + ssl_certificate (str): + Name of the SslCertificate resource + to return. + + This corresponds to the ``ssl_certificate`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.compute_v1.types.SslCertificate: + Represents an SSL Certificate resource. Google Compute + Engine has two SSL Certificate resources: \* + [Global](/compute/docs/reference/rest/v1/sslCertificates) + \* + [Regional](/compute/docs/reference/rest/v1/regionSslCertificates) + The sslCertificates are used by: - external HTTPS load + balancers - SSL proxy load balancers The + regionSslCertificates are used by internal HTTPS load + balancers. Optionally, certificate file contents that + you upload can contain a set of up to five PEM-encoded + certificates. The API call creates an object + (sslCertificate) that holds this data. You can use SSL + keys and certificates to secure connections to a load + balancer. For more information, read Creating and using + SSL certificates, SSL certificates quotas and limits, + and Troubleshooting SSL certificates. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, region, ssl_certificate]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.GetRegionSslCertificateRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.GetRegionSslCertificateRequest): + request = compute.GetRegionSslCertificateRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if region is not None: + request.region = region + if ssl_certificate is not None: + request.ssl_certificate = ssl_certificate + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("region", request.region), + ("ssl_certificate", request.ssl_certificate), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def insert_unary(self, + request: Optional[Union[compute.InsertRegionSslCertificateRequest, dict]] = None, + *, + project: Optional[str] = None, + region: Optional[str] = None, + ssl_certificate_resource: Optional[compute.SslCertificate] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.Operation: + r"""Creates a SslCertificate resource in the specified + project and region using the data included in the + request + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_insert(): + # Create a client + client = compute_v1.RegionSslCertificatesClient() + + # Initialize request argument(s) + request = compute_v1.InsertRegionSslCertificateRequest( + project="project_value", + region="region_value", + ) + + # Make the request + response = client.insert(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.InsertRegionSslCertificateRequest, dict]): + The request object. A request message for + RegionSslCertificates.Insert. See the + method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + region (str): + Name of the region scoping this + request. + + This corresponds to the ``region`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + ssl_certificate_resource (google.cloud.compute_v1.types.SslCertificate): + The body resource for this request + This corresponds to the ``ssl_certificate_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, region, ssl_certificate_resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.InsertRegionSslCertificateRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.InsertRegionSslCertificateRequest): + request = compute.InsertRegionSslCertificateRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if region is not None: + request.region = region + if ssl_certificate_resource is not None: + request.ssl_certificate_resource = ssl_certificate_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.insert] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("region", request.region), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def insert(self, + request: Optional[Union[compute.InsertRegionSslCertificateRequest, dict]] = None, + *, + project: Optional[str] = None, + region: Optional[str] = None, + ssl_certificate_resource: Optional[compute.SslCertificate] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> extended_operation.ExtendedOperation: + r"""Creates a SslCertificate resource in the specified + project and region using the data included in the + request + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_insert(): + # Create a client + client = compute_v1.RegionSslCertificatesClient() + + # Initialize request argument(s) + request = compute_v1.InsertRegionSslCertificateRequest( + project="project_value", + region="region_value", + ) + + # Make the request + response = client.insert(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.InsertRegionSslCertificateRequest, dict]): + The request object. A request message for + RegionSslCertificates.Insert. See the + method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + region (str): + Name of the region scoping this + request. + + This corresponds to the ``region`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + ssl_certificate_resource (google.cloud.compute_v1.types.SslCertificate): + The body resource for this request + This corresponds to the ``ssl_certificate_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, region, ssl_certificate_resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.InsertRegionSslCertificateRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.InsertRegionSslCertificateRequest): + request = compute.InsertRegionSslCertificateRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if region is not None: + request.region = region + if ssl_certificate_resource is not None: + request.ssl_certificate_resource = ssl_certificate_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.insert] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("region", request.region), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + operation_service = self._transport._region_operations_client + operation_request = compute.GetRegionOperationRequest() + operation_request.project = request.project + operation_request.region = request.region + operation_request.operation = response.name + + get_operation = functools.partial(operation_service.get, operation_request) + # Cancel is not part of extended operations yet. + cancel_operation = lambda: None + + # Note: this class is an implementation detail to provide a uniform + # set of names for certain fields in the extended operation proto message. + # See google.api_core.extended_operation.ExtendedOperation for details + # on these properties and the expected interface. + class _CustomOperation(extended_operation.ExtendedOperation): + @property + def error_message(self): + return self._extended_operation.http_error_message + + @property + def error_code(self): + return self._extended_operation.http_error_status_code + + response = _CustomOperation.make(get_operation, cancel_operation, response) + + # Done; return the response. + return response + + def list(self, + request: Optional[Union[compute.ListRegionSslCertificatesRequest, dict]] = None, + *, + project: Optional[str] = None, + region: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListPager: + r"""Retrieves the list of SslCertificate resources + available to the specified project in the specified + region. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_list(): + # Create a client + client = compute_v1.RegionSslCertificatesClient() + + # Initialize request argument(s) + request = compute_v1.ListRegionSslCertificatesRequest( + project="project_value", + region="region_value", + ) + + # Make the request + page_result = client.list(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.ListRegionSslCertificatesRequest, dict]): + The request object. A request message for + RegionSslCertificates.List. See the + method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + region (str): + Name of the region scoping this + request. + + This corresponds to the ``region`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.compute_v1.services.region_ssl_certificates.pagers.ListPager: + Contains a list of SslCertificate + resources. + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, region]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.ListRegionSslCertificatesRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.ListRegionSslCertificatesRequest): + request = compute.ListRegionSslCertificatesRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if region is not None: + request.region = region + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("region", request.region), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + def __enter__(self) -> "RegionSslCertificatesClient": + return self + + def __exit__(self, type, value, traceback): + """Releases underlying transport's resources. + + .. warning:: + ONLY use as a context manager if the transport is NOT shared + with other clients! Exiting the with block will CLOSE the transport + and may cause errors in other clients! + """ + self.transport.close() + + + + + + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) + + +__all__ = ( + "RegionSslCertificatesClient", +) diff --git a/owl-bot-staging/v1/google/cloud/compute_v1/services/region_ssl_certificates/pagers.py b/owl-bot-staging/v1/google/cloud/compute_v1/services/region_ssl_certificates/pagers.py new file mode 100644 index 000000000..10d6a83d8 --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/compute_v1/services/region_ssl_certificates/pagers.py @@ -0,0 +1,77 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import Any, AsyncIterator, Awaitable, Callable, Sequence, Tuple, Optional, Iterator + +from google.cloud.compute_v1.types import compute + + +class ListPager: + """A pager for iterating through ``list`` requests. + + This class thinly wraps an initial + :class:`google.cloud.compute_v1.types.SslCertificateList` object, and + provides an ``__iter__`` method to iterate through its + ``items`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``List`` requests and continue to iterate + through the ``items`` field on the + corresponding responses. + + All the usual :class:`google.cloud.compute_v1.types.SslCertificateList` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., compute.SslCertificateList], + request: compute.ListRegionSslCertificatesRequest, + response: compute.SslCertificateList, + *, + metadata: Sequence[Tuple[str, str]] = ()): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.compute_v1.types.ListRegionSslCertificatesRequest): + The initial request object. + response (google.cloud.compute_v1.types.SslCertificateList): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = compute.ListRegionSslCertificatesRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[compute.SslCertificateList]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[compute.SslCertificate]: + for page in self.pages: + yield from page.items + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) diff --git a/owl-bot-staging/v1/google/cloud/compute_v1/services/region_ssl_certificates/transports/__init__.py b/owl-bot-staging/v1/google/cloud/compute_v1/services/region_ssl_certificates/transports/__init__.py new file mode 100644 index 000000000..03f0fcdf7 --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/compute_v1/services/region_ssl_certificates/transports/__init__.py @@ -0,0 +1,32 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +from typing import Dict, Type + +from .base import RegionSslCertificatesTransport +from .rest import RegionSslCertificatesRestTransport +from .rest import RegionSslCertificatesRestInterceptor + + +# Compile a registry of transports. +_transport_registry = OrderedDict() # type: Dict[str, Type[RegionSslCertificatesTransport]] +_transport_registry['rest'] = RegionSslCertificatesRestTransport + +__all__ = ( + 'RegionSslCertificatesTransport', + 'RegionSslCertificatesRestTransport', + 'RegionSslCertificatesRestInterceptor', +) diff --git a/owl-bot-staging/v1/google/cloud/compute_v1/services/region_ssl_certificates/transports/base.py b/owl-bot-staging/v1/google/cloud/compute_v1/services/region_ssl_certificates/transports/base.py new file mode 100644 index 000000000..cabd5ab1a --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/compute_v1/services/region_ssl_certificates/transports/base.py @@ -0,0 +1,205 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import abc +from typing import Awaitable, Callable, Dict, Optional, Sequence, Union + +from google.cloud.compute_v1 import gapic_version as package_version + +import google.auth # type: ignore +import google.api_core +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.compute_v1.types import compute +from google.cloud.compute_v1.services import region_operations + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) + + +class RegionSslCertificatesTransport(abc.ABC): + """Abstract transport class for RegionSslCertificates.""" + + AUTH_SCOPES = ( + 'https://www.googleapis.com/auth/compute', + 'https://www.googleapis.com/auth/cloud-platform', + ) + + DEFAULT_HOST: str = 'compute.googleapis.com' + def __init__( + self, *, + host: str = DEFAULT_HOST, + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + **kwargs, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A list of scopes. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + """ + self._extended_operations_services: Dict[str, Any] = {} + + scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} + + # Save the scopes. + self._scopes = scopes + + # If no credentials are provided, then determine the appropriate + # defaults. + if credentials and credentials_file: + raise core_exceptions.DuplicateCredentialArgs("'credentials_file' and 'credentials' are mutually exclusive") + + if credentials_file is not None: + credentials, _ = google.auth.load_credentials_from_file( + credentials_file, + **scopes_kwargs, + quota_project_id=quota_project_id + ) + elif credentials is None: + credentials, _ = google.auth.default(**scopes_kwargs, quota_project_id=quota_project_id) + # Don't apply audience if the credentials file passed from user. + if hasattr(credentials, "with_gdch_audience"): + credentials = credentials.with_gdch_audience(api_audience if api_audience else host) + + # If the credentials are service account credentials, then always try to use self signed JWT. + if always_use_jwt_access and isinstance(credentials, service_account.Credentials) and hasattr(service_account.Credentials, "with_always_use_jwt_access"): + credentials = credentials.with_always_use_jwt_access(True) + + # Save the credentials. + self._credentials = credentials + + # Save the hostname. Default to port 443 (HTTPS) if none is specified. + if ':' not in host: + host += ':443' + self._host = host + + def _prep_wrapped_messages(self, client_info): + # Precompute the wrapped methods. + self._wrapped_methods = { + self.delete: gapic_v1.method.wrap_method( + self.delete, + default_timeout=None, + client_info=client_info, + ), + self.get: gapic_v1.method.wrap_method( + self.get, + default_timeout=None, + client_info=client_info, + ), + self.insert: gapic_v1.method.wrap_method( + self.insert, + default_timeout=None, + client_info=client_info, + ), + self.list: gapic_v1.method.wrap_method( + self.list, + default_timeout=None, + client_info=client_info, + ), + } + + def close(self): + """Closes resources associated with the transport. + + .. warning:: + Only call this method if the transport is NOT shared + with other clients - this may cause errors in other clients! + """ + raise NotImplementedError() + + @property + def delete(self) -> Callable[ + [compute.DeleteRegionSslCertificateRequest], + Union[ + compute.Operation, + Awaitable[compute.Operation] + ]]: + raise NotImplementedError() + + @property + def get(self) -> Callable[ + [compute.GetRegionSslCertificateRequest], + Union[ + compute.SslCertificate, + Awaitable[compute.SslCertificate] + ]]: + raise NotImplementedError() + + @property + def insert(self) -> Callable[ + [compute.InsertRegionSslCertificateRequest], + Union[ + compute.Operation, + Awaitable[compute.Operation] + ]]: + raise NotImplementedError() + + @property + def list(self) -> Callable[ + [compute.ListRegionSslCertificatesRequest], + Union[ + compute.SslCertificateList, + Awaitable[compute.SslCertificateList] + ]]: + raise NotImplementedError() + + @property + def kind(self) -> str: + raise NotImplementedError() + + @property + def _region_operations_client(self) -> region_operations.RegionOperationsClient: + ex_op_service = self._extended_operations_services.get("region_operations") + if not ex_op_service: + ex_op_service = region_operations.RegionOperationsClient( + credentials=self._credentials, + transport=self.kind, + ) + self._extended_operations_services["region_operations"] = ex_op_service + + return ex_op_service + + +__all__ = ( + 'RegionSslCertificatesTransport', +) diff --git a/owl-bot-staging/v1/google/cloud/compute_v1/services/region_ssl_certificates/transports/rest.py b/owl-bot-staging/v1/google/cloud/compute_v1/services/region_ssl_certificates/transports/rest.py new file mode 100644 index 000000000..1991eafde --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/compute_v1/services/region_ssl_certificates/transports/rest.py @@ -0,0 +1,682 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +from google.auth.transport.requests import AuthorizedSession # type: ignore +import json # type: ignore +import grpc # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.api_core import exceptions as core_exceptions +from google.api_core import retry as retries +from google.api_core import rest_helpers +from google.api_core import rest_streaming +from google.api_core import path_template +from google.api_core import gapic_v1 + +from google.protobuf import json_format +from requests import __version__ as requests_version +import dataclasses +import re +from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union +import warnings + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + + +from google.cloud.compute_v1.types import compute + +from .base import RegionSslCertificatesTransport, DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, + grpc_version=None, + rest_version=requests_version, +) + + +class RegionSslCertificatesRestInterceptor: + """Interceptor for RegionSslCertificates. + + Interceptors are used to manipulate requests, request metadata, and responses + in arbitrary ways. + Example use cases include: + * Logging + * Verifying requests according to service or custom semantics + * Stripping extraneous information from responses + + These use cases and more can be enabled by injecting an + instance of a custom subclass when constructing the RegionSslCertificatesRestTransport. + + .. code-block:: python + class MyCustomRegionSslCertificatesInterceptor(RegionSslCertificatesRestInterceptor): + def pre_delete(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_delete(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_get(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_insert(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_insert(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list(self, response): + logging.log(f"Received response: {response}") + return response + + transport = RegionSslCertificatesRestTransport(interceptor=MyCustomRegionSslCertificatesInterceptor()) + client = RegionSslCertificatesClient(transport=transport) + + + """ + def pre_delete(self, request: compute.DeleteRegionSslCertificateRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.DeleteRegionSslCertificateRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for delete + + Override in a subclass to manipulate the request or metadata + before they are sent to the RegionSslCertificates server. + """ + return request, metadata + + def post_delete(self, response: compute.Operation) -> compute.Operation: + """Post-rpc interceptor for delete + + Override in a subclass to manipulate the response + after it is returned by the RegionSslCertificates server but before + it is returned to user code. + """ + return response + def pre_get(self, request: compute.GetRegionSslCertificateRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.GetRegionSslCertificateRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get + + Override in a subclass to manipulate the request or metadata + before they are sent to the RegionSslCertificates server. + """ + return request, metadata + + def post_get(self, response: compute.SslCertificate) -> compute.SslCertificate: + """Post-rpc interceptor for get + + Override in a subclass to manipulate the response + after it is returned by the RegionSslCertificates server but before + it is returned to user code. + """ + return response + def pre_insert(self, request: compute.InsertRegionSslCertificateRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.InsertRegionSslCertificateRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for insert + + Override in a subclass to manipulate the request or metadata + before they are sent to the RegionSslCertificates server. + """ + return request, metadata + + def post_insert(self, response: compute.Operation) -> compute.Operation: + """Post-rpc interceptor for insert + + Override in a subclass to manipulate the response + after it is returned by the RegionSslCertificates server but before + it is returned to user code. + """ + return response + def pre_list(self, request: compute.ListRegionSslCertificatesRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.ListRegionSslCertificatesRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list + + Override in a subclass to manipulate the request or metadata + before they are sent to the RegionSslCertificates server. + """ + return request, metadata + + def post_list(self, response: compute.SslCertificateList) -> compute.SslCertificateList: + """Post-rpc interceptor for list + + Override in a subclass to manipulate the response + after it is returned by the RegionSslCertificates server but before + it is returned to user code. + """ + return response + + +@dataclasses.dataclass +class RegionSslCertificatesRestStub: + _session: AuthorizedSession + _host: str + _interceptor: RegionSslCertificatesRestInterceptor + + +class RegionSslCertificatesRestTransport(RegionSslCertificatesTransport): + """REST backend transport for RegionSslCertificates. + + The RegionSslCertificates API. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends JSON representations of protocol buffers over HTTP/1.1 + + NOTE: This REST transport functionality is currently in a beta + state (preview). We welcome your feedback via an issue in this + library's source repository. Thank you! + """ + + def __init__(self, *, + host: str = 'compute.googleapis.com', + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + client_cert_source_for_mtls: Optional[Callable[[ + ], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + url_scheme: str = 'https', + interceptor: Optional[RegionSslCertificatesRestInterceptor] = None, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + NOTE: This REST transport functionality is currently in a beta + state (preview). We welcome your feedback via a GitHub issue in + this library's repository. Thank you! + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + client_cert_source_for_mtls (Callable[[], Tuple[bytes, bytes]]): Client + certificate to configure mutual TLS HTTP channel. It is ignored + if ``channel`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you are developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + url_scheme: the protocol scheme for the API endpoint. Normally + "https", but for testing or local servers, + "http" can be specified. + """ + # Run the base constructor + # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. + # TODO: When custom host (api_endpoint) is set, `scopes` must *also* be set on the + # credentials object + maybe_url_match = re.match("^(?Phttp(?:s)?://)?(?P.*)$", host) + if maybe_url_match is None: + raise ValueError(f"Unexpected hostname structure: {host}") # pragma: NO COVER + + url_match_items = maybe_url_match.groupdict() + + host = f"{url_scheme}://{host}" if not url_match_items["scheme"] else host + + super().__init__( + host=host, + credentials=credentials, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience + ) + self._session = AuthorizedSession( + self._credentials, default_host=self.DEFAULT_HOST) + if client_cert_source_for_mtls: + self._session.configure_mtls_channel(client_cert_source_for_mtls) + self._interceptor = interceptor or RegionSslCertificatesRestInterceptor() + self._prep_wrapped_messages(client_info) + + class _Delete(RegionSslCertificatesRestStub): + def __hash__(self): + return hash("Delete") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.DeleteRegionSslCertificateRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.Operation: + r"""Call the delete method over HTTP. + + Args: + request (~.compute.DeleteRegionSslCertificateRequest): + The request object. A request message for + RegionSslCertificates.Delete. See the + method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.Operation: + Represents an Operation resource. Google Compute Engine + has three Operation resources: \* + `Global `__ + \* + `Regional `__ + \* + `Zonal `__ + You can use an operation resource to manage asynchronous + API requests. For more information, read Handling API + responses. Operations can be global, regional or zonal. + - For global operations, use the ``globalOperations`` + resource. - For regional operations, use the + ``regionOperations`` resource. - For zonal operations, + use the ``zonalOperations`` resource. For more + information, read Global, Regional, and Zonal Resources. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'delete', + 'uri': '/compute/v1/projects/{project}/regions/{region}/sslCertificates/{ssl_certificate}', + }, + ] + request, metadata = self._interceptor.pre_delete(request, metadata) + pb_request = compute.DeleteRegionSslCertificateRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.Operation() + pb_resp = compute.Operation.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_delete(resp) + return resp + + class _Get(RegionSslCertificatesRestStub): + def __hash__(self): + return hash("Get") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.GetRegionSslCertificateRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.SslCertificate: + r"""Call the get method over HTTP. + + Args: + request (~.compute.GetRegionSslCertificateRequest): + The request object. A request message for + RegionSslCertificates.Get. See the + method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.SslCertificate: + Represents an SSL Certificate resource. Google Compute + Engine has two SSL Certificate resources: \* + `Global `__ + \* + `Regional `__ + The sslCertificates are used by: - external HTTPS load + balancers - SSL proxy load balancers The + regionSslCertificates are used by internal HTTPS load + balancers. Optionally, certificate file contents that + you upload can contain a set of up to five PEM-encoded + certificates. The API call creates an object + (sslCertificate) that holds this data. You can use SSL + keys and certificates to secure connections to a load + balancer. For more information, read Creating and using + SSL certificates, SSL certificates quotas and limits, + and Troubleshooting SSL certificates. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/compute/v1/projects/{project}/regions/{region}/sslCertificates/{ssl_certificate}', + }, + ] + request, metadata = self._interceptor.pre_get(request, metadata) + pb_request = compute.GetRegionSslCertificateRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.SslCertificate() + pb_resp = compute.SslCertificate.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get(resp) + return resp + + class _Insert(RegionSslCertificatesRestStub): + def __hash__(self): + return hash("Insert") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.InsertRegionSslCertificateRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.Operation: + r"""Call the insert method over HTTP. + + Args: + request (~.compute.InsertRegionSslCertificateRequest): + The request object. A request message for + RegionSslCertificates.Insert. See the + method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.Operation: + Represents an Operation resource. Google Compute Engine + has three Operation resources: \* + `Global `__ + \* + `Regional `__ + \* + `Zonal `__ + You can use an operation resource to manage asynchronous + API requests. For more information, read Handling API + responses. Operations can be global, regional or zonal. + - For global operations, use the ``globalOperations`` + resource. - For regional operations, use the + ``regionOperations`` resource. - For zonal operations, + use the ``zonalOperations`` resource. For more + information, read Global, Regional, and Zonal Resources. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/compute/v1/projects/{project}/regions/{region}/sslCertificates', + 'body': 'ssl_certificate_resource', + }, + ] + request, metadata = self._interceptor.pre_insert(request, metadata) + pb_request = compute.InsertRegionSslCertificateRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + including_default_value_fields=False, + use_integers_for_enums=False + ) + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.Operation() + pb_resp = compute.Operation.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_insert(resp) + return resp + + class _List(RegionSslCertificatesRestStub): + def __hash__(self): + return hash("List") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.ListRegionSslCertificatesRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.SslCertificateList: + r"""Call the list method over HTTP. + + Args: + request (~.compute.ListRegionSslCertificatesRequest): + The request object. A request message for + RegionSslCertificates.List. See the + method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.SslCertificateList: + Contains a list of SslCertificate + resources. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/compute/v1/projects/{project}/regions/{region}/sslCertificates', + }, + ] + request, metadata = self._interceptor.pre_list(request, metadata) + pb_request = compute.ListRegionSslCertificatesRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.SslCertificateList() + pb_resp = compute.SslCertificateList.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_list(resp) + return resp + + @property + def delete(self) -> Callable[ + [compute.DeleteRegionSslCertificateRequest], + compute.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._Delete(self._session, self._host, self._interceptor) # type: ignore + + @property + def get(self) -> Callable[ + [compute.GetRegionSslCertificateRequest], + compute.SslCertificate]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._Get(self._session, self._host, self._interceptor) # type: ignore + + @property + def insert(self) -> Callable[ + [compute.InsertRegionSslCertificateRequest], + compute.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._Insert(self._session, self._host, self._interceptor) # type: ignore + + @property + def list(self) -> Callable[ + [compute.ListRegionSslCertificatesRequest], + compute.SslCertificateList]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._List(self._session, self._host, self._interceptor) # type: ignore + + @property + def kind(self) -> str: + return "rest" + + def close(self): + self._session.close() + + +__all__=( + 'RegionSslCertificatesRestTransport', +) diff --git a/owl-bot-staging/v1/google/cloud/compute_v1/services/region_ssl_policies/__init__.py b/owl-bot-staging/v1/google/cloud/compute_v1/services/region_ssl_policies/__init__.py new file mode 100644 index 000000000..31ece0497 --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/compute_v1/services/region_ssl_policies/__init__.py @@ -0,0 +1,20 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from .client import RegionSslPoliciesClient + +__all__ = ( + 'RegionSslPoliciesClient', +) diff --git a/owl-bot-staging/v1/google/cloud/compute_v1/services/region_ssl_policies/client.py b/owl-bot-staging/v1/google/cloud/compute_v1/services/region_ssl_policies/client.py new file mode 100644 index 000000000..90c6fe1aa --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/compute_v1/services/region_ssl_policies/client.py @@ -0,0 +1,1597 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import functools +import os +import re +from typing import Dict, Mapping, MutableMapping, MutableSequence, Optional, Sequence, Tuple, Type, Union, cast + +from google.cloud.compute_v1 import gapic_version as package_version + +from google.api_core import client_options as client_options_lib +from google.api_core import exceptions as core_exceptions +from google.api_core import extended_operation +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport import mtls # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth.exceptions import MutualTLSChannelError # type: ignore +from google.oauth2 import service_account # type: ignore + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + +from google.api_core import extended_operation # type: ignore +from google.cloud.compute_v1.services.region_ssl_policies import pagers +from google.cloud.compute_v1.types import compute +from .transports.base import RegionSslPoliciesTransport, DEFAULT_CLIENT_INFO +from .transports.rest import RegionSslPoliciesRestTransport + + +class RegionSslPoliciesClientMeta(type): + """Metaclass for the RegionSslPolicies client. + + This provides class-level methods for building and retrieving + support objects (e.g. transport) without polluting the client instance + objects. + """ + _transport_registry = OrderedDict() # type: Dict[str, Type[RegionSslPoliciesTransport]] + _transport_registry["rest"] = RegionSslPoliciesRestTransport + + def get_transport_class(cls, + label: Optional[str] = None, + ) -> Type[RegionSslPoliciesTransport]: + """Returns an appropriate transport class. + + Args: + label: The name of the desired transport. If none is + provided, then the first transport in the registry is used. + + Returns: + The transport class to use. + """ + # If a specific transport is requested, return that one. + if label: + return cls._transport_registry[label] + + # No transport is requested; return the default (that is, the first one + # in the dictionary). + return next(iter(cls._transport_registry.values())) + + +class RegionSslPoliciesClient(metaclass=RegionSslPoliciesClientMeta): + """The RegionSslPolicies API.""" + + @staticmethod + def _get_default_mtls_endpoint(api_endpoint): + """Converts api endpoint to mTLS endpoint. + + Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to + "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. + Args: + api_endpoint (Optional[str]): the api endpoint to convert. + Returns: + str: converted mTLS api endpoint. + """ + if not api_endpoint: + return api_endpoint + + mtls_endpoint_re = re.compile( + r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?" + ) + + m = mtls_endpoint_re.match(api_endpoint) + name, mtls, sandbox, googledomain = m.groups() + if mtls or not googledomain: + return api_endpoint + + if sandbox: + return api_endpoint.replace( + "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" + ) + + return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") + + DEFAULT_ENDPOINT = "compute.googleapis.com" + DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore + DEFAULT_ENDPOINT + ) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + RegionSslPoliciesClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_info(info) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + RegionSslPoliciesClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_file( + filename) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + from_service_account_json = from_service_account_file + + @property + def transport(self) -> RegionSslPoliciesTransport: + """Returns the transport used by the client instance. + + Returns: + RegionSslPoliciesTransport: The transport used by the client + instance. + """ + return self._transport + + @staticmethod + def common_billing_account_path(billing_account: str, ) -> str: + """Returns a fully-qualified billing_account string.""" + return "billingAccounts/{billing_account}".format(billing_account=billing_account, ) + + @staticmethod + def parse_common_billing_account_path(path: str) -> Dict[str,str]: + """Parse a billing_account path into its component segments.""" + m = re.match(r"^billingAccounts/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_folder_path(folder: str, ) -> str: + """Returns a fully-qualified folder string.""" + return "folders/{folder}".format(folder=folder, ) + + @staticmethod + def parse_common_folder_path(path: str) -> Dict[str,str]: + """Parse a folder path into its component segments.""" + m = re.match(r"^folders/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_organization_path(organization: str, ) -> str: + """Returns a fully-qualified organization string.""" + return "organizations/{organization}".format(organization=organization, ) + + @staticmethod + def parse_common_organization_path(path: str) -> Dict[str,str]: + """Parse a organization path into its component segments.""" + m = re.match(r"^organizations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_project_path(project: str, ) -> str: + """Returns a fully-qualified project string.""" + return "projects/{project}".format(project=project, ) + + @staticmethod + def parse_common_project_path(path: str) -> Dict[str,str]: + """Parse a project path into its component segments.""" + m = re.match(r"^projects/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_location_path(project: str, location: str, ) -> str: + """Returns a fully-qualified location string.""" + return "projects/{project}/locations/{location}".format(project=project, location=location, ) + + @staticmethod + def parse_common_location_path(path: str) -> Dict[str,str]: + """Parse a location path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @classmethod + def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[client_options_lib.ClientOptions] = None): + """Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + if client_options is None: + client_options = client_options_lib.ClientOptions() + use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") + use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") + if use_client_cert not in ("true", "false"): + raise ValueError("Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`") + if use_mtls_endpoint not in ("auto", "never", "always"): + raise MutualTLSChannelError("Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`") + + # Figure out the client cert source to use. + client_cert_source = None + if use_client_cert == "true": + if client_options.client_cert_source: + client_cert_source = client_options.client_cert_source + elif mtls.has_default_client_cert_source(): + client_cert_source = mtls.default_client_cert_source() + + # Figure out which api endpoint to use. + if client_options.api_endpoint is not None: + api_endpoint = client_options.api_endpoint + elif use_mtls_endpoint == "always" or (use_mtls_endpoint == "auto" and client_cert_source): + api_endpoint = cls.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = cls.DEFAULT_ENDPOINT + + return api_endpoint, client_cert_source + + def __init__(self, *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Optional[Union[str, RegionSslPoliciesTransport]] = None, + client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the region ssl policies client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Union[str, RegionSslPoliciesTransport]): The + transport to use. If set to None, a transport is chosen + automatically. + NOTE: "rest" transport functionality is currently in a + beta state (preview). We welcome your feedback via an + issue in this library's source repository. + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): Custom options for the + client. It won't take effect if a ``transport`` instance is provided. + (1) The ``api_endpoint`` property can be used to override the + default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT + environment variable can also be used to override the endpoint: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto switch to the + default mTLS endpoint if client certificate is present, this is + the default value). However, the ``api_endpoint`` property takes + precedence if provided. + (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide client certificate for mutual TLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + """ + if isinstance(client_options, dict): + client_options = client_options_lib.from_dict(client_options) + if client_options is None: + client_options = client_options_lib.ClientOptions() + client_options = cast(client_options_lib.ClientOptions, client_options) + + api_endpoint, client_cert_source_func = self.get_mtls_endpoint_and_cert_source(client_options) + + api_key_value = getattr(client_options, "api_key", None) + if api_key_value and credentials: + raise ValueError("client_options.api_key and credentials are mutually exclusive") + + # Save or instantiate the transport. + # Ordinarily, we provide the transport, but allowing a custom transport + # instance provides an extensibility point for unusual situations. + if isinstance(transport, RegionSslPoliciesTransport): + # transport is a RegionSslPoliciesTransport instance. + if credentials or client_options.credentials_file or api_key_value: + raise ValueError("When providing a transport instance, " + "provide its credentials directly.") + if client_options.scopes: + raise ValueError( + "When providing a transport instance, provide its scopes " + "directly." + ) + self._transport = transport + else: + import google.auth._default # type: ignore + + if api_key_value and hasattr(google.auth._default, "get_api_key_credentials"): + credentials = google.auth._default.get_api_key_credentials(api_key_value) + + Transport = type(self).get_transport_class(transport) + self._transport = Transport( + credentials=credentials, + credentials_file=client_options.credentials_file, + host=api_endpoint, + scopes=client_options.scopes, + client_cert_source_for_mtls=client_cert_source_func, + quota_project_id=client_options.quota_project_id, + client_info=client_info, + always_use_jwt_access=True, + api_audience=client_options.api_audience, + ) + + def delete_unary(self, + request: Optional[Union[compute.DeleteRegionSslPolicyRequest, dict]] = None, + *, + project: Optional[str] = None, + region: Optional[str] = None, + ssl_policy: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.Operation: + r"""Deletes the specified SSL policy. The SSL policy + resource can be deleted only if it is not in use by any + TargetHttpsProxy or TargetSslProxy resources. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_delete(): + # Create a client + client = compute_v1.RegionSslPoliciesClient() + + # Initialize request argument(s) + request = compute_v1.DeleteRegionSslPolicyRequest( + project="project_value", + region="region_value", + ssl_policy="ssl_policy_value", + ) + + # Make the request + response = client.delete(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.DeleteRegionSslPolicyRequest, dict]): + The request object. A request message for + RegionSslPolicies.Delete. See the method + description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + region (str): + Name of the region scoping this + request. + + This corresponds to the ``region`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + ssl_policy (str): + Name of the SSL policy to delete. The + name must be 1-63 characters long, and + comply with RFC1035. + + This corresponds to the ``ssl_policy`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, region, ssl_policy]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.DeleteRegionSslPolicyRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.DeleteRegionSslPolicyRequest): + request = compute.DeleteRegionSslPolicyRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if region is not None: + request.region = region + if ssl_policy is not None: + request.ssl_policy = ssl_policy + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("region", request.region), + ("ssl_policy", request.ssl_policy), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def delete(self, + request: Optional[Union[compute.DeleteRegionSslPolicyRequest, dict]] = None, + *, + project: Optional[str] = None, + region: Optional[str] = None, + ssl_policy: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> extended_operation.ExtendedOperation: + r"""Deletes the specified SSL policy. The SSL policy + resource can be deleted only if it is not in use by any + TargetHttpsProxy or TargetSslProxy resources. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_delete(): + # Create a client + client = compute_v1.RegionSslPoliciesClient() + + # Initialize request argument(s) + request = compute_v1.DeleteRegionSslPolicyRequest( + project="project_value", + region="region_value", + ssl_policy="ssl_policy_value", + ) + + # Make the request + response = client.delete(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.DeleteRegionSslPolicyRequest, dict]): + The request object. A request message for + RegionSslPolicies.Delete. See the method + description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + region (str): + Name of the region scoping this + request. + + This corresponds to the ``region`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + ssl_policy (str): + Name of the SSL policy to delete. The + name must be 1-63 characters long, and + comply with RFC1035. + + This corresponds to the ``ssl_policy`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, region, ssl_policy]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.DeleteRegionSslPolicyRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.DeleteRegionSslPolicyRequest): + request = compute.DeleteRegionSslPolicyRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if region is not None: + request.region = region + if ssl_policy is not None: + request.ssl_policy = ssl_policy + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("region", request.region), + ("ssl_policy", request.ssl_policy), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + operation_service = self._transport._region_operations_client + operation_request = compute.GetRegionOperationRequest() + operation_request.project = request.project + operation_request.region = request.region + operation_request.operation = response.name + + get_operation = functools.partial(operation_service.get, operation_request) + # Cancel is not part of extended operations yet. + cancel_operation = lambda: None + + # Note: this class is an implementation detail to provide a uniform + # set of names for certain fields in the extended operation proto message. + # See google.api_core.extended_operation.ExtendedOperation for details + # on these properties and the expected interface. + class _CustomOperation(extended_operation.ExtendedOperation): + @property + def error_message(self): + return self._extended_operation.http_error_message + + @property + def error_code(self): + return self._extended_operation.http_error_status_code + + response = _CustomOperation.make(get_operation, cancel_operation, response) + + # Done; return the response. + return response + + def get(self, + request: Optional[Union[compute.GetRegionSslPolicyRequest, dict]] = None, + *, + project: Optional[str] = None, + region: Optional[str] = None, + ssl_policy: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.SslPolicy: + r"""Lists all of the ordered rules present in a single + specified policy. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_get(): + # Create a client + client = compute_v1.RegionSslPoliciesClient() + + # Initialize request argument(s) + request = compute_v1.GetRegionSslPolicyRequest( + project="project_value", + region="region_value", + ssl_policy="ssl_policy_value", + ) + + # Make the request + response = client.get(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.GetRegionSslPolicyRequest, dict]): + The request object. A request message for + RegionSslPolicies.Get. See the method + description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + region (str): + Name of the region scoping this + request. + + This corresponds to the ``region`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + ssl_policy (str): + Name of the SSL policy to update. The + name must be 1-63 characters long, and + comply with RFC1035. + + This corresponds to the ``ssl_policy`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.compute_v1.types.SslPolicy: + Represents an SSL Policy resource. + Use SSL policies to control the SSL + features, such as versions and cipher + suites, offered by an HTTPS or SSL Proxy + load balancer. For more information, + read SSL Policy Concepts. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, region, ssl_policy]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.GetRegionSslPolicyRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.GetRegionSslPolicyRequest): + request = compute.GetRegionSslPolicyRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if region is not None: + request.region = region + if ssl_policy is not None: + request.ssl_policy = ssl_policy + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("region", request.region), + ("ssl_policy", request.ssl_policy), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def insert_unary(self, + request: Optional[Union[compute.InsertRegionSslPolicyRequest, dict]] = None, + *, + project: Optional[str] = None, + region: Optional[str] = None, + ssl_policy_resource: Optional[compute.SslPolicy] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.Operation: + r"""Creates a new policy in the specified project and + region using the data included in the request. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_insert(): + # Create a client + client = compute_v1.RegionSslPoliciesClient() + + # Initialize request argument(s) + request = compute_v1.InsertRegionSslPolicyRequest( + project="project_value", + region="region_value", + ) + + # Make the request + response = client.insert(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.InsertRegionSslPolicyRequest, dict]): + The request object. A request message for + RegionSslPolicies.Insert. See the method + description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + region (str): + Name of the region scoping this + request. + + This corresponds to the ``region`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + ssl_policy_resource (google.cloud.compute_v1.types.SslPolicy): + The body resource for this request + This corresponds to the ``ssl_policy_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, region, ssl_policy_resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.InsertRegionSslPolicyRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.InsertRegionSslPolicyRequest): + request = compute.InsertRegionSslPolicyRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if region is not None: + request.region = region + if ssl_policy_resource is not None: + request.ssl_policy_resource = ssl_policy_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.insert] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("region", request.region), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def insert(self, + request: Optional[Union[compute.InsertRegionSslPolicyRequest, dict]] = None, + *, + project: Optional[str] = None, + region: Optional[str] = None, + ssl_policy_resource: Optional[compute.SslPolicy] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> extended_operation.ExtendedOperation: + r"""Creates a new policy in the specified project and + region using the data included in the request. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_insert(): + # Create a client + client = compute_v1.RegionSslPoliciesClient() + + # Initialize request argument(s) + request = compute_v1.InsertRegionSslPolicyRequest( + project="project_value", + region="region_value", + ) + + # Make the request + response = client.insert(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.InsertRegionSslPolicyRequest, dict]): + The request object. A request message for + RegionSslPolicies.Insert. See the method + description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + region (str): + Name of the region scoping this + request. + + This corresponds to the ``region`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + ssl_policy_resource (google.cloud.compute_v1.types.SslPolicy): + The body resource for this request + This corresponds to the ``ssl_policy_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, region, ssl_policy_resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.InsertRegionSslPolicyRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.InsertRegionSslPolicyRequest): + request = compute.InsertRegionSslPolicyRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if region is not None: + request.region = region + if ssl_policy_resource is not None: + request.ssl_policy_resource = ssl_policy_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.insert] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("region", request.region), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + operation_service = self._transport._region_operations_client + operation_request = compute.GetRegionOperationRequest() + operation_request.project = request.project + operation_request.region = request.region + operation_request.operation = response.name + + get_operation = functools.partial(operation_service.get, operation_request) + # Cancel is not part of extended operations yet. + cancel_operation = lambda: None + + # Note: this class is an implementation detail to provide a uniform + # set of names for certain fields in the extended operation proto message. + # See google.api_core.extended_operation.ExtendedOperation for details + # on these properties and the expected interface. + class _CustomOperation(extended_operation.ExtendedOperation): + @property + def error_message(self): + return self._extended_operation.http_error_message + + @property + def error_code(self): + return self._extended_operation.http_error_status_code + + response = _CustomOperation.make(get_operation, cancel_operation, response) + + # Done; return the response. + return response + + def list(self, + request: Optional[Union[compute.ListRegionSslPoliciesRequest, dict]] = None, + *, + project: Optional[str] = None, + region: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListPager: + r"""Lists all the SSL policies that have been configured + for the specified project and region. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_list(): + # Create a client + client = compute_v1.RegionSslPoliciesClient() + + # Initialize request argument(s) + request = compute_v1.ListRegionSslPoliciesRequest( + project="project_value", + region="region_value", + ) + + # Make the request + page_result = client.list(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.ListRegionSslPoliciesRequest, dict]): + The request object. A request message for + RegionSslPolicies.List. See the method + description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + region (str): + Name of the region scoping this + request. + + This corresponds to the ``region`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.compute_v1.services.region_ssl_policies.pagers.ListPager: + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, region]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.ListRegionSslPoliciesRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.ListRegionSslPoliciesRequest): + request = compute.ListRegionSslPoliciesRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if region is not None: + request.region = region + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("region", request.region), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + def list_available_features(self, + request: Optional[Union[compute.ListAvailableFeaturesRegionSslPoliciesRequest, dict]] = None, + *, + project: Optional[str] = None, + region: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.SslPoliciesListAvailableFeaturesResponse: + r"""Lists all features that can be specified in the SSL + policy when using custom profile. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_list_available_features(): + # Create a client + client = compute_v1.RegionSslPoliciesClient() + + # Initialize request argument(s) + request = compute_v1.ListAvailableFeaturesRegionSslPoliciesRequest( + project="project_value", + region="region_value", + ) + + # Make the request + response = client.list_available_features(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.ListAvailableFeaturesRegionSslPoliciesRequest, dict]): + The request object. A request message for + RegionSslPolicies.ListAvailableFeatures. + See the method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + region (str): + Name of the region scoping this + request. + + This corresponds to the ``region`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.compute_v1.types.SslPoliciesListAvailableFeaturesResponse: + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, region]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.ListAvailableFeaturesRegionSslPoliciesRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.ListAvailableFeaturesRegionSslPoliciesRequest): + request = compute.ListAvailableFeaturesRegionSslPoliciesRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if region is not None: + request.region = region + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_available_features] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("region", request.region), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def patch_unary(self, + request: Optional[Union[compute.PatchRegionSslPolicyRequest, dict]] = None, + *, + project: Optional[str] = None, + region: Optional[str] = None, + ssl_policy: Optional[str] = None, + ssl_policy_resource: Optional[compute.SslPolicy] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.Operation: + r"""Patches the specified SSL policy with the data + included in the request. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_patch(): + # Create a client + client = compute_v1.RegionSslPoliciesClient() + + # Initialize request argument(s) + request = compute_v1.PatchRegionSslPolicyRequest( + project="project_value", + region="region_value", + ssl_policy="ssl_policy_value", + ) + + # Make the request + response = client.patch(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.PatchRegionSslPolicyRequest, dict]): + The request object. A request message for + RegionSslPolicies.Patch. See the method + description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + region (str): + Name of the region scoping this + request. + + This corresponds to the ``region`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + ssl_policy (str): + Name of the SSL policy to update. The + name must be 1-63 characters long, and + comply with RFC1035. + + This corresponds to the ``ssl_policy`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + ssl_policy_resource (google.cloud.compute_v1.types.SslPolicy): + The body resource for this request + This corresponds to the ``ssl_policy_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, region, ssl_policy, ssl_policy_resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.PatchRegionSslPolicyRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.PatchRegionSslPolicyRequest): + request = compute.PatchRegionSslPolicyRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if region is not None: + request.region = region + if ssl_policy is not None: + request.ssl_policy = ssl_policy + if ssl_policy_resource is not None: + request.ssl_policy_resource = ssl_policy_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.patch] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("region", request.region), + ("ssl_policy", request.ssl_policy), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def patch(self, + request: Optional[Union[compute.PatchRegionSslPolicyRequest, dict]] = None, + *, + project: Optional[str] = None, + region: Optional[str] = None, + ssl_policy: Optional[str] = None, + ssl_policy_resource: Optional[compute.SslPolicy] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> extended_operation.ExtendedOperation: + r"""Patches the specified SSL policy with the data + included in the request. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_patch(): + # Create a client + client = compute_v1.RegionSslPoliciesClient() + + # Initialize request argument(s) + request = compute_v1.PatchRegionSslPolicyRequest( + project="project_value", + region="region_value", + ssl_policy="ssl_policy_value", + ) + + # Make the request + response = client.patch(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.PatchRegionSslPolicyRequest, dict]): + The request object. A request message for + RegionSslPolicies.Patch. See the method + description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + region (str): + Name of the region scoping this + request. + + This corresponds to the ``region`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + ssl_policy (str): + Name of the SSL policy to update. The + name must be 1-63 characters long, and + comply with RFC1035. + + This corresponds to the ``ssl_policy`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + ssl_policy_resource (google.cloud.compute_v1.types.SslPolicy): + The body resource for this request + This corresponds to the ``ssl_policy_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, region, ssl_policy, ssl_policy_resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.PatchRegionSslPolicyRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.PatchRegionSslPolicyRequest): + request = compute.PatchRegionSslPolicyRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if region is not None: + request.region = region + if ssl_policy is not None: + request.ssl_policy = ssl_policy + if ssl_policy_resource is not None: + request.ssl_policy_resource = ssl_policy_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.patch] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("region", request.region), + ("ssl_policy", request.ssl_policy), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + operation_service = self._transport._region_operations_client + operation_request = compute.GetRegionOperationRequest() + operation_request.project = request.project + operation_request.region = request.region + operation_request.operation = response.name + + get_operation = functools.partial(operation_service.get, operation_request) + # Cancel is not part of extended operations yet. + cancel_operation = lambda: None + + # Note: this class is an implementation detail to provide a uniform + # set of names for certain fields in the extended operation proto message. + # See google.api_core.extended_operation.ExtendedOperation for details + # on these properties and the expected interface. + class _CustomOperation(extended_operation.ExtendedOperation): + @property + def error_message(self): + return self._extended_operation.http_error_message + + @property + def error_code(self): + return self._extended_operation.http_error_status_code + + response = _CustomOperation.make(get_operation, cancel_operation, response) + + # Done; return the response. + return response + + def __enter__(self) -> "RegionSslPoliciesClient": + return self + + def __exit__(self, type, value, traceback): + """Releases underlying transport's resources. + + .. warning:: + ONLY use as a context manager if the transport is NOT shared + with other clients! Exiting the with block will CLOSE the transport + and may cause errors in other clients! + """ + self.transport.close() + + + + + + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) + + +__all__ = ( + "RegionSslPoliciesClient", +) diff --git a/owl-bot-staging/v1/google/cloud/compute_v1/services/region_ssl_policies/pagers.py b/owl-bot-staging/v1/google/cloud/compute_v1/services/region_ssl_policies/pagers.py new file mode 100644 index 000000000..f2e847a37 --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/compute_v1/services/region_ssl_policies/pagers.py @@ -0,0 +1,77 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import Any, AsyncIterator, Awaitable, Callable, Sequence, Tuple, Optional, Iterator + +from google.cloud.compute_v1.types import compute + + +class ListPager: + """A pager for iterating through ``list`` requests. + + This class thinly wraps an initial + :class:`google.cloud.compute_v1.types.SslPoliciesList` object, and + provides an ``__iter__`` method to iterate through its + ``items`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``List`` requests and continue to iterate + through the ``items`` field on the + corresponding responses. + + All the usual :class:`google.cloud.compute_v1.types.SslPoliciesList` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., compute.SslPoliciesList], + request: compute.ListRegionSslPoliciesRequest, + response: compute.SslPoliciesList, + *, + metadata: Sequence[Tuple[str, str]] = ()): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.compute_v1.types.ListRegionSslPoliciesRequest): + The initial request object. + response (google.cloud.compute_v1.types.SslPoliciesList): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = compute.ListRegionSslPoliciesRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[compute.SslPoliciesList]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[compute.SslPolicy]: + for page in self.pages: + yield from page.items + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) diff --git a/owl-bot-staging/v1/google/cloud/compute_v1/services/region_ssl_policies/transports/__init__.py b/owl-bot-staging/v1/google/cloud/compute_v1/services/region_ssl_policies/transports/__init__.py new file mode 100644 index 000000000..9bf3e29bf --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/compute_v1/services/region_ssl_policies/transports/__init__.py @@ -0,0 +1,32 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +from typing import Dict, Type + +from .base import RegionSslPoliciesTransport +from .rest import RegionSslPoliciesRestTransport +from .rest import RegionSslPoliciesRestInterceptor + + +# Compile a registry of transports. +_transport_registry = OrderedDict() # type: Dict[str, Type[RegionSslPoliciesTransport]] +_transport_registry['rest'] = RegionSslPoliciesRestTransport + +__all__ = ( + 'RegionSslPoliciesTransport', + 'RegionSslPoliciesRestTransport', + 'RegionSslPoliciesRestInterceptor', +) diff --git a/owl-bot-staging/v1/google/cloud/compute_v1/services/region_ssl_policies/transports/base.py b/owl-bot-staging/v1/google/cloud/compute_v1/services/region_ssl_policies/transports/base.py new file mode 100644 index 000000000..031337a47 --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/compute_v1/services/region_ssl_policies/transports/base.py @@ -0,0 +1,233 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import abc +from typing import Awaitable, Callable, Dict, Optional, Sequence, Union + +from google.cloud.compute_v1 import gapic_version as package_version + +import google.auth # type: ignore +import google.api_core +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.compute_v1.types import compute +from google.cloud.compute_v1.services import region_operations + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) + + +class RegionSslPoliciesTransport(abc.ABC): + """Abstract transport class for RegionSslPolicies.""" + + AUTH_SCOPES = ( + 'https://www.googleapis.com/auth/compute', + 'https://www.googleapis.com/auth/cloud-platform', + ) + + DEFAULT_HOST: str = 'compute.googleapis.com' + def __init__( + self, *, + host: str = DEFAULT_HOST, + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + **kwargs, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A list of scopes. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + """ + self._extended_operations_services: Dict[str, Any] = {} + + scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} + + # Save the scopes. + self._scopes = scopes + + # If no credentials are provided, then determine the appropriate + # defaults. + if credentials and credentials_file: + raise core_exceptions.DuplicateCredentialArgs("'credentials_file' and 'credentials' are mutually exclusive") + + if credentials_file is not None: + credentials, _ = google.auth.load_credentials_from_file( + credentials_file, + **scopes_kwargs, + quota_project_id=quota_project_id + ) + elif credentials is None: + credentials, _ = google.auth.default(**scopes_kwargs, quota_project_id=quota_project_id) + # Don't apply audience if the credentials file passed from user. + if hasattr(credentials, "with_gdch_audience"): + credentials = credentials.with_gdch_audience(api_audience if api_audience else host) + + # If the credentials are service account credentials, then always try to use self signed JWT. + if always_use_jwt_access and isinstance(credentials, service_account.Credentials) and hasattr(service_account.Credentials, "with_always_use_jwt_access"): + credentials = credentials.with_always_use_jwt_access(True) + + # Save the credentials. + self._credentials = credentials + + # Save the hostname. Default to port 443 (HTTPS) if none is specified. + if ':' not in host: + host += ':443' + self._host = host + + def _prep_wrapped_messages(self, client_info): + # Precompute the wrapped methods. + self._wrapped_methods = { + self.delete: gapic_v1.method.wrap_method( + self.delete, + default_timeout=None, + client_info=client_info, + ), + self.get: gapic_v1.method.wrap_method( + self.get, + default_timeout=None, + client_info=client_info, + ), + self.insert: gapic_v1.method.wrap_method( + self.insert, + default_timeout=None, + client_info=client_info, + ), + self.list: gapic_v1.method.wrap_method( + self.list, + default_timeout=None, + client_info=client_info, + ), + self.list_available_features: gapic_v1.method.wrap_method( + self.list_available_features, + default_timeout=None, + client_info=client_info, + ), + self.patch: gapic_v1.method.wrap_method( + self.patch, + default_timeout=None, + client_info=client_info, + ), + } + + def close(self): + """Closes resources associated with the transport. + + .. warning:: + Only call this method if the transport is NOT shared + with other clients - this may cause errors in other clients! + """ + raise NotImplementedError() + + @property + def delete(self) -> Callable[ + [compute.DeleteRegionSslPolicyRequest], + Union[ + compute.Operation, + Awaitable[compute.Operation] + ]]: + raise NotImplementedError() + + @property + def get(self) -> Callable[ + [compute.GetRegionSslPolicyRequest], + Union[ + compute.SslPolicy, + Awaitable[compute.SslPolicy] + ]]: + raise NotImplementedError() + + @property + def insert(self) -> Callable[ + [compute.InsertRegionSslPolicyRequest], + Union[ + compute.Operation, + Awaitable[compute.Operation] + ]]: + raise NotImplementedError() + + @property + def list(self) -> Callable[ + [compute.ListRegionSslPoliciesRequest], + Union[ + compute.SslPoliciesList, + Awaitable[compute.SslPoliciesList] + ]]: + raise NotImplementedError() + + @property + def list_available_features(self) -> Callable[ + [compute.ListAvailableFeaturesRegionSslPoliciesRequest], + Union[ + compute.SslPoliciesListAvailableFeaturesResponse, + Awaitable[compute.SslPoliciesListAvailableFeaturesResponse] + ]]: + raise NotImplementedError() + + @property + def patch(self) -> Callable[ + [compute.PatchRegionSslPolicyRequest], + Union[ + compute.Operation, + Awaitable[compute.Operation] + ]]: + raise NotImplementedError() + + @property + def kind(self) -> str: + raise NotImplementedError() + + @property + def _region_operations_client(self) -> region_operations.RegionOperationsClient: + ex_op_service = self._extended_operations_services.get("region_operations") + if not ex_op_service: + ex_op_service = region_operations.RegionOperationsClient( + credentials=self._credentials, + transport=self.kind, + ) + self._extended_operations_services["region_operations"] = ex_op_service + + return ex_op_service + + +__all__ = ( + 'RegionSslPoliciesTransport', +) diff --git a/owl-bot-staging/v1/google/cloud/compute_v1/services/region_ssl_policies/transports/rest.py b/owl-bot-staging/v1/google/cloud/compute_v1/services/region_ssl_policies/transports/rest.py new file mode 100644 index 000000000..f50c79208 --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/compute_v1/services/region_ssl_policies/transports/rest.py @@ -0,0 +1,914 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +from google.auth.transport.requests import AuthorizedSession # type: ignore +import json # type: ignore +import grpc # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.api_core import exceptions as core_exceptions +from google.api_core import retry as retries +from google.api_core import rest_helpers +from google.api_core import rest_streaming +from google.api_core import path_template +from google.api_core import gapic_v1 + +from google.protobuf import json_format +from requests import __version__ as requests_version +import dataclasses +import re +from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union +import warnings + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + + +from google.cloud.compute_v1.types import compute + +from .base import RegionSslPoliciesTransport, DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, + grpc_version=None, + rest_version=requests_version, +) + + +class RegionSslPoliciesRestInterceptor: + """Interceptor for RegionSslPolicies. + + Interceptors are used to manipulate requests, request metadata, and responses + in arbitrary ways. + Example use cases include: + * Logging + * Verifying requests according to service or custom semantics + * Stripping extraneous information from responses + + These use cases and more can be enabled by injecting an + instance of a custom subclass when constructing the RegionSslPoliciesRestTransport. + + .. code-block:: python + class MyCustomRegionSslPoliciesInterceptor(RegionSslPoliciesRestInterceptor): + def pre_delete(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_delete(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_get(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_insert(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_insert(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list_available_features(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_available_features(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_patch(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_patch(self, response): + logging.log(f"Received response: {response}") + return response + + transport = RegionSslPoliciesRestTransport(interceptor=MyCustomRegionSslPoliciesInterceptor()) + client = RegionSslPoliciesClient(transport=transport) + + + """ + def pre_delete(self, request: compute.DeleteRegionSslPolicyRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.DeleteRegionSslPolicyRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for delete + + Override in a subclass to manipulate the request or metadata + before they are sent to the RegionSslPolicies server. + """ + return request, metadata + + def post_delete(self, response: compute.Operation) -> compute.Operation: + """Post-rpc interceptor for delete + + Override in a subclass to manipulate the response + after it is returned by the RegionSslPolicies server but before + it is returned to user code. + """ + return response + def pre_get(self, request: compute.GetRegionSslPolicyRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.GetRegionSslPolicyRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get + + Override in a subclass to manipulate the request or metadata + before they are sent to the RegionSslPolicies server. + """ + return request, metadata + + def post_get(self, response: compute.SslPolicy) -> compute.SslPolicy: + """Post-rpc interceptor for get + + Override in a subclass to manipulate the response + after it is returned by the RegionSslPolicies server but before + it is returned to user code. + """ + return response + def pre_insert(self, request: compute.InsertRegionSslPolicyRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.InsertRegionSslPolicyRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for insert + + Override in a subclass to manipulate the request or metadata + before they are sent to the RegionSslPolicies server. + """ + return request, metadata + + def post_insert(self, response: compute.Operation) -> compute.Operation: + """Post-rpc interceptor for insert + + Override in a subclass to manipulate the response + after it is returned by the RegionSslPolicies server but before + it is returned to user code. + """ + return response + def pre_list(self, request: compute.ListRegionSslPoliciesRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.ListRegionSslPoliciesRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list + + Override in a subclass to manipulate the request or metadata + before they are sent to the RegionSslPolicies server. + """ + return request, metadata + + def post_list(self, response: compute.SslPoliciesList) -> compute.SslPoliciesList: + """Post-rpc interceptor for list + + Override in a subclass to manipulate the response + after it is returned by the RegionSslPolicies server but before + it is returned to user code. + """ + return response + def pre_list_available_features(self, request: compute.ListAvailableFeaturesRegionSslPoliciesRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.ListAvailableFeaturesRegionSslPoliciesRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list_available_features + + Override in a subclass to manipulate the request or metadata + before they are sent to the RegionSslPolicies server. + """ + return request, metadata + + def post_list_available_features(self, response: compute.SslPoliciesListAvailableFeaturesResponse) -> compute.SslPoliciesListAvailableFeaturesResponse: + """Post-rpc interceptor for list_available_features + + Override in a subclass to manipulate the response + after it is returned by the RegionSslPolicies server but before + it is returned to user code. + """ + return response + def pre_patch(self, request: compute.PatchRegionSslPolicyRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.PatchRegionSslPolicyRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for patch + + Override in a subclass to manipulate the request or metadata + before they are sent to the RegionSslPolicies server. + """ + return request, metadata + + def post_patch(self, response: compute.Operation) -> compute.Operation: + """Post-rpc interceptor for patch + + Override in a subclass to manipulate the response + after it is returned by the RegionSslPolicies server but before + it is returned to user code. + """ + return response + + +@dataclasses.dataclass +class RegionSslPoliciesRestStub: + _session: AuthorizedSession + _host: str + _interceptor: RegionSslPoliciesRestInterceptor + + +class RegionSslPoliciesRestTransport(RegionSslPoliciesTransport): + """REST backend transport for RegionSslPolicies. + + The RegionSslPolicies API. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends JSON representations of protocol buffers over HTTP/1.1 + + NOTE: This REST transport functionality is currently in a beta + state (preview). We welcome your feedback via an issue in this + library's source repository. Thank you! + """ + + def __init__(self, *, + host: str = 'compute.googleapis.com', + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + client_cert_source_for_mtls: Optional[Callable[[ + ], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + url_scheme: str = 'https', + interceptor: Optional[RegionSslPoliciesRestInterceptor] = None, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + NOTE: This REST transport functionality is currently in a beta + state (preview). We welcome your feedback via a GitHub issue in + this library's repository. Thank you! + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + client_cert_source_for_mtls (Callable[[], Tuple[bytes, bytes]]): Client + certificate to configure mutual TLS HTTP channel. It is ignored + if ``channel`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you are developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + url_scheme: the protocol scheme for the API endpoint. Normally + "https", but for testing or local servers, + "http" can be specified. + """ + # Run the base constructor + # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. + # TODO: When custom host (api_endpoint) is set, `scopes` must *also* be set on the + # credentials object + maybe_url_match = re.match("^(?Phttp(?:s)?://)?(?P.*)$", host) + if maybe_url_match is None: + raise ValueError(f"Unexpected hostname structure: {host}") # pragma: NO COVER + + url_match_items = maybe_url_match.groupdict() + + host = f"{url_scheme}://{host}" if not url_match_items["scheme"] else host + + super().__init__( + host=host, + credentials=credentials, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience + ) + self._session = AuthorizedSession( + self._credentials, default_host=self.DEFAULT_HOST) + if client_cert_source_for_mtls: + self._session.configure_mtls_channel(client_cert_source_for_mtls) + self._interceptor = interceptor or RegionSslPoliciesRestInterceptor() + self._prep_wrapped_messages(client_info) + + class _Delete(RegionSslPoliciesRestStub): + def __hash__(self): + return hash("Delete") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.DeleteRegionSslPolicyRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.Operation: + r"""Call the delete method over HTTP. + + Args: + request (~.compute.DeleteRegionSslPolicyRequest): + The request object. A request message for + RegionSslPolicies.Delete. See the method + description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.Operation: + Represents an Operation resource. Google Compute Engine + has three Operation resources: \* + `Global `__ + \* + `Regional `__ + \* + `Zonal `__ + You can use an operation resource to manage asynchronous + API requests. For more information, read Handling API + responses. Operations can be global, regional or zonal. + - For global operations, use the ``globalOperations`` + resource. - For regional operations, use the + ``regionOperations`` resource. - For zonal operations, + use the ``zonalOperations`` resource. For more + information, read Global, Regional, and Zonal Resources. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'delete', + 'uri': '/compute/v1/projects/{project}/regions/{region}/sslPolicies/{ssl_policy}', + }, + ] + request, metadata = self._interceptor.pre_delete(request, metadata) + pb_request = compute.DeleteRegionSslPolicyRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.Operation() + pb_resp = compute.Operation.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_delete(resp) + return resp + + class _Get(RegionSslPoliciesRestStub): + def __hash__(self): + return hash("Get") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.GetRegionSslPolicyRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.SslPolicy: + r"""Call the get method over HTTP. + + Args: + request (~.compute.GetRegionSslPolicyRequest): + The request object. A request message for + RegionSslPolicies.Get. See the method + description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.SslPolicy: + Represents an SSL Policy resource. + Use SSL policies to control the SSL + features, such as versions and cipher + suites, offered by an HTTPS or SSL Proxy + load balancer. For more information, + read SSL Policy Concepts. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/compute/v1/projects/{project}/regions/{region}/sslPolicies/{ssl_policy}', + }, + ] + request, metadata = self._interceptor.pre_get(request, metadata) + pb_request = compute.GetRegionSslPolicyRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.SslPolicy() + pb_resp = compute.SslPolicy.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get(resp) + return resp + + class _Insert(RegionSslPoliciesRestStub): + def __hash__(self): + return hash("Insert") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.InsertRegionSslPolicyRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.Operation: + r"""Call the insert method over HTTP. + + Args: + request (~.compute.InsertRegionSslPolicyRequest): + The request object. A request message for + RegionSslPolicies.Insert. See the method + description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.Operation: + Represents an Operation resource. Google Compute Engine + has three Operation resources: \* + `Global `__ + \* + `Regional `__ + \* + `Zonal `__ + You can use an operation resource to manage asynchronous + API requests. For more information, read Handling API + responses. Operations can be global, regional or zonal. + - For global operations, use the ``globalOperations`` + resource. - For regional operations, use the + ``regionOperations`` resource. - For zonal operations, + use the ``zonalOperations`` resource. For more + information, read Global, Regional, and Zonal Resources. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/compute/v1/projects/{project}/regions/{region}/sslPolicies', + 'body': 'ssl_policy_resource', + }, + ] + request, metadata = self._interceptor.pre_insert(request, metadata) + pb_request = compute.InsertRegionSslPolicyRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + including_default_value_fields=False, + use_integers_for_enums=False + ) + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.Operation() + pb_resp = compute.Operation.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_insert(resp) + return resp + + class _List(RegionSslPoliciesRestStub): + def __hash__(self): + return hash("List") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.ListRegionSslPoliciesRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.SslPoliciesList: + r"""Call the list method over HTTP. + + Args: + request (~.compute.ListRegionSslPoliciesRequest): + The request object. A request message for + RegionSslPolicies.List. See the method + description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.SslPoliciesList: + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/compute/v1/projects/{project}/regions/{region}/sslPolicies', + }, + ] + request, metadata = self._interceptor.pre_list(request, metadata) + pb_request = compute.ListRegionSslPoliciesRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.SslPoliciesList() + pb_resp = compute.SslPoliciesList.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_list(resp) + return resp + + class _ListAvailableFeatures(RegionSslPoliciesRestStub): + def __hash__(self): + return hash("ListAvailableFeatures") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.ListAvailableFeaturesRegionSslPoliciesRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.SslPoliciesListAvailableFeaturesResponse: + r"""Call the list available features method over HTTP. + + Args: + request (~.compute.ListAvailableFeaturesRegionSslPoliciesRequest): + The request object. A request message for + RegionSslPolicies.ListAvailableFeatures. + See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.SslPoliciesListAvailableFeaturesResponse: + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/compute/v1/projects/{project}/regions/{region}/sslPolicies/listAvailableFeatures', + }, + ] + request, metadata = self._interceptor.pre_list_available_features(request, metadata) + pb_request = compute.ListAvailableFeaturesRegionSslPoliciesRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.SslPoliciesListAvailableFeaturesResponse() + pb_resp = compute.SslPoliciesListAvailableFeaturesResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_list_available_features(resp) + return resp + + class _Patch(RegionSslPoliciesRestStub): + def __hash__(self): + return hash("Patch") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.PatchRegionSslPolicyRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.Operation: + r"""Call the patch method over HTTP. + + Args: + request (~.compute.PatchRegionSslPolicyRequest): + The request object. A request message for + RegionSslPolicies.Patch. See the method + description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.Operation: + Represents an Operation resource. Google Compute Engine + has three Operation resources: \* + `Global `__ + \* + `Regional `__ + \* + `Zonal `__ + You can use an operation resource to manage asynchronous + API requests. For more information, read Handling API + responses. Operations can be global, regional or zonal. + - For global operations, use the ``globalOperations`` + resource. - For regional operations, use the + ``regionOperations`` resource. - For zonal operations, + use the ``zonalOperations`` resource. For more + information, read Global, Regional, and Zonal Resources. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'patch', + 'uri': '/compute/v1/projects/{project}/regions/{region}/sslPolicies/{ssl_policy}', + 'body': 'ssl_policy_resource', + }, + ] + request, metadata = self._interceptor.pre_patch(request, metadata) + pb_request = compute.PatchRegionSslPolicyRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + including_default_value_fields=False, + use_integers_for_enums=False + ) + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.Operation() + pb_resp = compute.Operation.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_patch(resp) + return resp + + @property + def delete(self) -> Callable[ + [compute.DeleteRegionSslPolicyRequest], + compute.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._Delete(self._session, self._host, self._interceptor) # type: ignore + + @property + def get(self) -> Callable[ + [compute.GetRegionSslPolicyRequest], + compute.SslPolicy]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._Get(self._session, self._host, self._interceptor) # type: ignore + + @property + def insert(self) -> Callable[ + [compute.InsertRegionSslPolicyRequest], + compute.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._Insert(self._session, self._host, self._interceptor) # type: ignore + + @property + def list(self) -> Callable[ + [compute.ListRegionSslPoliciesRequest], + compute.SslPoliciesList]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._List(self._session, self._host, self._interceptor) # type: ignore + + @property + def list_available_features(self) -> Callable[ + [compute.ListAvailableFeaturesRegionSslPoliciesRequest], + compute.SslPoliciesListAvailableFeaturesResponse]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListAvailableFeatures(self._session, self._host, self._interceptor) # type: ignore + + @property + def patch(self) -> Callable[ + [compute.PatchRegionSslPolicyRequest], + compute.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._Patch(self._session, self._host, self._interceptor) # type: ignore + + @property + def kind(self) -> str: + return "rest" + + def close(self): + self._session.close() + + +__all__=( + 'RegionSslPoliciesRestTransport', +) diff --git a/owl-bot-staging/v1/google/cloud/compute_v1/services/region_target_http_proxies/__init__.py b/owl-bot-staging/v1/google/cloud/compute_v1/services/region_target_http_proxies/__init__.py new file mode 100644 index 000000000..cbb2f7b34 --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/compute_v1/services/region_target_http_proxies/__init__.py @@ -0,0 +1,20 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from .client import RegionTargetHttpProxiesClient + +__all__ = ( + 'RegionTargetHttpProxiesClient', +) diff --git a/owl-bot-staging/v1/google/cloud/compute_v1/services/region_target_http_proxies/client.py b/owl-bot-staging/v1/google/cloud/compute_v1/services/region_target_http_proxies/client.py new file mode 100644 index 000000000..cefbc2748 --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/compute_v1/services/region_target_http_proxies/client.py @@ -0,0 +1,1487 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import functools +import os +import re +from typing import Dict, Mapping, MutableMapping, MutableSequence, Optional, Sequence, Tuple, Type, Union, cast + +from google.cloud.compute_v1 import gapic_version as package_version + +from google.api_core import client_options as client_options_lib +from google.api_core import exceptions as core_exceptions +from google.api_core import extended_operation +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport import mtls # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth.exceptions import MutualTLSChannelError # type: ignore +from google.oauth2 import service_account # type: ignore + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + +from google.api_core import extended_operation # type: ignore +from google.cloud.compute_v1.services.region_target_http_proxies import pagers +from google.cloud.compute_v1.types import compute +from .transports.base import RegionTargetHttpProxiesTransport, DEFAULT_CLIENT_INFO +from .transports.rest import RegionTargetHttpProxiesRestTransport + + +class RegionTargetHttpProxiesClientMeta(type): + """Metaclass for the RegionTargetHttpProxies client. + + This provides class-level methods for building and retrieving + support objects (e.g. transport) without polluting the client instance + objects. + """ + _transport_registry = OrderedDict() # type: Dict[str, Type[RegionTargetHttpProxiesTransport]] + _transport_registry["rest"] = RegionTargetHttpProxiesRestTransport + + def get_transport_class(cls, + label: Optional[str] = None, + ) -> Type[RegionTargetHttpProxiesTransport]: + """Returns an appropriate transport class. + + Args: + label: The name of the desired transport. If none is + provided, then the first transport in the registry is used. + + Returns: + The transport class to use. + """ + # If a specific transport is requested, return that one. + if label: + return cls._transport_registry[label] + + # No transport is requested; return the default (that is, the first one + # in the dictionary). + return next(iter(cls._transport_registry.values())) + + +class RegionTargetHttpProxiesClient(metaclass=RegionTargetHttpProxiesClientMeta): + """The RegionTargetHttpProxies API.""" + + @staticmethod + def _get_default_mtls_endpoint(api_endpoint): + """Converts api endpoint to mTLS endpoint. + + Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to + "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. + Args: + api_endpoint (Optional[str]): the api endpoint to convert. + Returns: + str: converted mTLS api endpoint. + """ + if not api_endpoint: + return api_endpoint + + mtls_endpoint_re = re.compile( + r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?" + ) + + m = mtls_endpoint_re.match(api_endpoint) + name, mtls, sandbox, googledomain = m.groups() + if mtls or not googledomain: + return api_endpoint + + if sandbox: + return api_endpoint.replace( + "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" + ) + + return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") + + DEFAULT_ENDPOINT = "compute.googleapis.com" + DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore + DEFAULT_ENDPOINT + ) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + RegionTargetHttpProxiesClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_info(info) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + RegionTargetHttpProxiesClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_file( + filename) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + from_service_account_json = from_service_account_file + + @property + def transport(self) -> RegionTargetHttpProxiesTransport: + """Returns the transport used by the client instance. + + Returns: + RegionTargetHttpProxiesTransport: The transport used by the client + instance. + """ + return self._transport + + @staticmethod + def common_billing_account_path(billing_account: str, ) -> str: + """Returns a fully-qualified billing_account string.""" + return "billingAccounts/{billing_account}".format(billing_account=billing_account, ) + + @staticmethod + def parse_common_billing_account_path(path: str) -> Dict[str,str]: + """Parse a billing_account path into its component segments.""" + m = re.match(r"^billingAccounts/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_folder_path(folder: str, ) -> str: + """Returns a fully-qualified folder string.""" + return "folders/{folder}".format(folder=folder, ) + + @staticmethod + def parse_common_folder_path(path: str) -> Dict[str,str]: + """Parse a folder path into its component segments.""" + m = re.match(r"^folders/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_organization_path(organization: str, ) -> str: + """Returns a fully-qualified organization string.""" + return "organizations/{organization}".format(organization=organization, ) + + @staticmethod + def parse_common_organization_path(path: str) -> Dict[str,str]: + """Parse a organization path into its component segments.""" + m = re.match(r"^organizations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_project_path(project: str, ) -> str: + """Returns a fully-qualified project string.""" + return "projects/{project}".format(project=project, ) + + @staticmethod + def parse_common_project_path(path: str) -> Dict[str,str]: + """Parse a project path into its component segments.""" + m = re.match(r"^projects/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_location_path(project: str, location: str, ) -> str: + """Returns a fully-qualified location string.""" + return "projects/{project}/locations/{location}".format(project=project, location=location, ) + + @staticmethod + def parse_common_location_path(path: str) -> Dict[str,str]: + """Parse a location path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @classmethod + def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[client_options_lib.ClientOptions] = None): + """Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + if client_options is None: + client_options = client_options_lib.ClientOptions() + use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") + use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") + if use_client_cert not in ("true", "false"): + raise ValueError("Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`") + if use_mtls_endpoint not in ("auto", "never", "always"): + raise MutualTLSChannelError("Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`") + + # Figure out the client cert source to use. + client_cert_source = None + if use_client_cert == "true": + if client_options.client_cert_source: + client_cert_source = client_options.client_cert_source + elif mtls.has_default_client_cert_source(): + client_cert_source = mtls.default_client_cert_source() + + # Figure out which api endpoint to use. + if client_options.api_endpoint is not None: + api_endpoint = client_options.api_endpoint + elif use_mtls_endpoint == "always" or (use_mtls_endpoint == "auto" and client_cert_source): + api_endpoint = cls.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = cls.DEFAULT_ENDPOINT + + return api_endpoint, client_cert_source + + def __init__(self, *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Optional[Union[str, RegionTargetHttpProxiesTransport]] = None, + client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the region target http proxies client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Union[str, RegionTargetHttpProxiesTransport]): The + transport to use. If set to None, a transport is chosen + automatically. + NOTE: "rest" transport functionality is currently in a + beta state (preview). We welcome your feedback via an + issue in this library's source repository. + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): Custom options for the + client. It won't take effect if a ``transport`` instance is provided. + (1) The ``api_endpoint`` property can be used to override the + default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT + environment variable can also be used to override the endpoint: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto switch to the + default mTLS endpoint if client certificate is present, this is + the default value). However, the ``api_endpoint`` property takes + precedence if provided. + (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide client certificate for mutual TLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + """ + if isinstance(client_options, dict): + client_options = client_options_lib.from_dict(client_options) + if client_options is None: + client_options = client_options_lib.ClientOptions() + client_options = cast(client_options_lib.ClientOptions, client_options) + + api_endpoint, client_cert_source_func = self.get_mtls_endpoint_and_cert_source(client_options) + + api_key_value = getattr(client_options, "api_key", None) + if api_key_value and credentials: + raise ValueError("client_options.api_key and credentials are mutually exclusive") + + # Save or instantiate the transport. + # Ordinarily, we provide the transport, but allowing a custom transport + # instance provides an extensibility point for unusual situations. + if isinstance(transport, RegionTargetHttpProxiesTransport): + # transport is a RegionTargetHttpProxiesTransport instance. + if credentials or client_options.credentials_file or api_key_value: + raise ValueError("When providing a transport instance, " + "provide its credentials directly.") + if client_options.scopes: + raise ValueError( + "When providing a transport instance, provide its scopes " + "directly." + ) + self._transport = transport + else: + import google.auth._default # type: ignore + + if api_key_value and hasattr(google.auth._default, "get_api_key_credentials"): + credentials = google.auth._default.get_api_key_credentials(api_key_value) + + Transport = type(self).get_transport_class(transport) + self._transport = Transport( + credentials=credentials, + credentials_file=client_options.credentials_file, + host=api_endpoint, + scopes=client_options.scopes, + client_cert_source_for_mtls=client_cert_source_func, + quota_project_id=client_options.quota_project_id, + client_info=client_info, + always_use_jwt_access=True, + api_audience=client_options.api_audience, + ) + + def delete_unary(self, + request: Optional[Union[compute.DeleteRegionTargetHttpProxyRequest, dict]] = None, + *, + project: Optional[str] = None, + region: Optional[str] = None, + target_http_proxy: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.Operation: + r"""Deletes the specified TargetHttpProxy resource. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_delete(): + # Create a client + client = compute_v1.RegionTargetHttpProxiesClient() + + # Initialize request argument(s) + request = compute_v1.DeleteRegionTargetHttpProxyRequest( + project="project_value", + region="region_value", + target_http_proxy="target_http_proxy_value", + ) + + # Make the request + response = client.delete(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.DeleteRegionTargetHttpProxyRequest, dict]): + The request object. A request message for + RegionTargetHttpProxies.Delete. See the + method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + region (str): + Name of the region scoping this + request. + + This corresponds to the ``region`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + target_http_proxy (str): + Name of the TargetHttpProxy resource + to delete. + + This corresponds to the ``target_http_proxy`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, region, target_http_proxy]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.DeleteRegionTargetHttpProxyRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.DeleteRegionTargetHttpProxyRequest): + request = compute.DeleteRegionTargetHttpProxyRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if region is not None: + request.region = region + if target_http_proxy is not None: + request.target_http_proxy = target_http_proxy + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("region", request.region), + ("target_http_proxy", request.target_http_proxy), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def delete(self, + request: Optional[Union[compute.DeleteRegionTargetHttpProxyRequest, dict]] = None, + *, + project: Optional[str] = None, + region: Optional[str] = None, + target_http_proxy: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> extended_operation.ExtendedOperation: + r"""Deletes the specified TargetHttpProxy resource. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_delete(): + # Create a client + client = compute_v1.RegionTargetHttpProxiesClient() + + # Initialize request argument(s) + request = compute_v1.DeleteRegionTargetHttpProxyRequest( + project="project_value", + region="region_value", + target_http_proxy="target_http_proxy_value", + ) + + # Make the request + response = client.delete(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.DeleteRegionTargetHttpProxyRequest, dict]): + The request object. A request message for + RegionTargetHttpProxies.Delete. See the + method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + region (str): + Name of the region scoping this + request. + + This corresponds to the ``region`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + target_http_proxy (str): + Name of the TargetHttpProxy resource + to delete. + + This corresponds to the ``target_http_proxy`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, region, target_http_proxy]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.DeleteRegionTargetHttpProxyRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.DeleteRegionTargetHttpProxyRequest): + request = compute.DeleteRegionTargetHttpProxyRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if region is not None: + request.region = region + if target_http_proxy is not None: + request.target_http_proxy = target_http_proxy + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("region", request.region), + ("target_http_proxy", request.target_http_proxy), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + operation_service = self._transport._region_operations_client + operation_request = compute.GetRegionOperationRequest() + operation_request.project = request.project + operation_request.region = request.region + operation_request.operation = response.name + + get_operation = functools.partial(operation_service.get, operation_request) + # Cancel is not part of extended operations yet. + cancel_operation = lambda: None + + # Note: this class is an implementation detail to provide a uniform + # set of names for certain fields in the extended operation proto message. + # See google.api_core.extended_operation.ExtendedOperation for details + # on these properties and the expected interface. + class _CustomOperation(extended_operation.ExtendedOperation): + @property + def error_message(self): + return self._extended_operation.http_error_message + + @property + def error_code(self): + return self._extended_operation.http_error_status_code + + response = _CustomOperation.make(get_operation, cancel_operation, response) + + # Done; return the response. + return response + + def get(self, + request: Optional[Union[compute.GetRegionTargetHttpProxyRequest, dict]] = None, + *, + project: Optional[str] = None, + region: Optional[str] = None, + target_http_proxy: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.TargetHttpProxy: + r"""Returns the specified TargetHttpProxy resource in the + specified region. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_get(): + # Create a client + client = compute_v1.RegionTargetHttpProxiesClient() + + # Initialize request argument(s) + request = compute_v1.GetRegionTargetHttpProxyRequest( + project="project_value", + region="region_value", + target_http_proxy="target_http_proxy_value", + ) + + # Make the request + response = client.get(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.GetRegionTargetHttpProxyRequest, dict]): + The request object. A request message for + RegionTargetHttpProxies.Get. See the + method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + region (str): + Name of the region scoping this + request. + + This corresponds to the ``region`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + target_http_proxy (str): + Name of the TargetHttpProxy resource + to return. + + This corresponds to the ``target_http_proxy`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.compute_v1.types.TargetHttpProxy: + Represents a Target HTTP Proxy resource. Google Compute + Engine has two Target HTTP Proxy resources: \* + [Global](/compute/docs/reference/rest/v1/targetHttpProxies) + \* + [Regional](/compute/docs/reference/rest/v1/regionTargetHttpProxies) + A target HTTP proxy is a component of GCP HTTP load + balancers. \* targetHttpProxies are used by external + HTTP load balancers and Traffic Director. \* + regionTargetHttpProxies are used by internal HTTP load + balancers. Forwarding rules reference a target HTTP + proxy, and the target proxy then references a URL map. + For more information, read Using Target Proxies and + Forwarding rule concepts. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, region, target_http_proxy]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.GetRegionTargetHttpProxyRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.GetRegionTargetHttpProxyRequest): + request = compute.GetRegionTargetHttpProxyRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if region is not None: + request.region = region + if target_http_proxy is not None: + request.target_http_proxy = target_http_proxy + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("region", request.region), + ("target_http_proxy", request.target_http_proxy), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def insert_unary(self, + request: Optional[Union[compute.InsertRegionTargetHttpProxyRequest, dict]] = None, + *, + project: Optional[str] = None, + region: Optional[str] = None, + target_http_proxy_resource: Optional[compute.TargetHttpProxy] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.Operation: + r"""Creates a TargetHttpProxy resource in the specified + project and region using the data included in the + request. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_insert(): + # Create a client + client = compute_v1.RegionTargetHttpProxiesClient() + + # Initialize request argument(s) + request = compute_v1.InsertRegionTargetHttpProxyRequest( + project="project_value", + region="region_value", + ) + + # Make the request + response = client.insert(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.InsertRegionTargetHttpProxyRequest, dict]): + The request object. A request message for + RegionTargetHttpProxies.Insert. See the + method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + region (str): + Name of the region scoping this + request. + + This corresponds to the ``region`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + target_http_proxy_resource (google.cloud.compute_v1.types.TargetHttpProxy): + The body resource for this request + This corresponds to the ``target_http_proxy_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, region, target_http_proxy_resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.InsertRegionTargetHttpProxyRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.InsertRegionTargetHttpProxyRequest): + request = compute.InsertRegionTargetHttpProxyRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if region is not None: + request.region = region + if target_http_proxy_resource is not None: + request.target_http_proxy_resource = target_http_proxy_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.insert] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("region", request.region), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def insert(self, + request: Optional[Union[compute.InsertRegionTargetHttpProxyRequest, dict]] = None, + *, + project: Optional[str] = None, + region: Optional[str] = None, + target_http_proxy_resource: Optional[compute.TargetHttpProxy] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> extended_operation.ExtendedOperation: + r"""Creates a TargetHttpProxy resource in the specified + project and region using the data included in the + request. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_insert(): + # Create a client + client = compute_v1.RegionTargetHttpProxiesClient() + + # Initialize request argument(s) + request = compute_v1.InsertRegionTargetHttpProxyRequest( + project="project_value", + region="region_value", + ) + + # Make the request + response = client.insert(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.InsertRegionTargetHttpProxyRequest, dict]): + The request object. A request message for + RegionTargetHttpProxies.Insert. See the + method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + region (str): + Name of the region scoping this + request. + + This corresponds to the ``region`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + target_http_proxy_resource (google.cloud.compute_v1.types.TargetHttpProxy): + The body resource for this request + This corresponds to the ``target_http_proxy_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, region, target_http_proxy_resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.InsertRegionTargetHttpProxyRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.InsertRegionTargetHttpProxyRequest): + request = compute.InsertRegionTargetHttpProxyRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if region is not None: + request.region = region + if target_http_proxy_resource is not None: + request.target_http_proxy_resource = target_http_proxy_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.insert] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("region", request.region), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + operation_service = self._transport._region_operations_client + operation_request = compute.GetRegionOperationRequest() + operation_request.project = request.project + operation_request.region = request.region + operation_request.operation = response.name + + get_operation = functools.partial(operation_service.get, operation_request) + # Cancel is not part of extended operations yet. + cancel_operation = lambda: None + + # Note: this class is an implementation detail to provide a uniform + # set of names for certain fields in the extended operation proto message. + # See google.api_core.extended_operation.ExtendedOperation for details + # on these properties and the expected interface. + class _CustomOperation(extended_operation.ExtendedOperation): + @property + def error_message(self): + return self._extended_operation.http_error_message + + @property + def error_code(self): + return self._extended_operation.http_error_status_code + + response = _CustomOperation.make(get_operation, cancel_operation, response) + + # Done; return the response. + return response + + def list(self, + request: Optional[Union[compute.ListRegionTargetHttpProxiesRequest, dict]] = None, + *, + project: Optional[str] = None, + region: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListPager: + r"""Retrieves the list of TargetHttpProxy resources + available to the specified project in the specified + region. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_list(): + # Create a client + client = compute_v1.RegionTargetHttpProxiesClient() + + # Initialize request argument(s) + request = compute_v1.ListRegionTargetHttpProxiesRequest( + project="project_value", + region="region_value", + ) + + # Make the request + page_result = client.list(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.ListRegionTargetHttpProxiesRequest, dict]): + The request object. A request message for + RegionTargetHttpProxies.List. See the + method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + region (str): + Name of the region scoping this + request. + + This corresponds to the ``region`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.compute_v1.services.region_target_http_proxies.pagers.ListPager: + A list of TargetHttpProxy resources. + + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, region]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.ListRegionTargetHttpProxiesRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.ListRegionTargetHttpProxiesRequest): + request = compute.ListRegionTargetHttpProxiesRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if region is not None: + request.region = region + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("region", request.region), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + def set_url_map_unary(self, + request: Optional[Union[compute.SetUrlMapRegionTargetHttpProxyRequest, dict]] = None, + *, + project: Optional[str] = None, + region: Optional[str] = None, + target_http_proxy: Optional[str] = None, + url_map_reference_resource: Optional[compute.UrlMapReference] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.Operation: + r"""Changes the URL map for TargetHttpProxy. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_set_url_map(): + # Create a client + client = compute_v1.RegionTargetHttpProxiesClient() + + # Initialize request argument(s) + request = compute_v1.SetUrlMapRegionTargetHttpProxyRequest( + project="project_value", + region="region_value", + target_http_proxy="target_http_proxy_value", + ) + + # Make the request + response = client.set_url_map(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.SetUrlMapRegionTargetHttpProxyRequest, dict]): + The request object. A request message for + RegionTargetHttpProxies.SetUrlMap. See + the method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + region (str): + Name of the region scoping this + request. + + This corresponds to the ``region`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + target_http_proxy (str): + Name of the TargetHttpProxy to set a + URL map for. + + This corresponds to the ``target_http_proxy`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + url_map_reference_resource (google.cloud.compute_v1.types.UrlMapReference): + The body resource for this request + This corresponds to the ``url_map_reference_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, region, target_http_proxy, url_map_reference_resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.SetUrlMapRegionTargetHttpProxyRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.SetUrlMapRegionTargetHttpProxyRequest): + request = compute.SetUrlMapRegionTargetHttpProxyRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if region is not None: + request.region = region + if target_http_proxy is not None: + request.target_http_proxy = target_http_proxy + if url_map_reference_resource is not None: + request.url_map_reference_resource = url_map_reference_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.set_url_map] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("region", request.region), + ("target_http_proxy", request.target_http_proxy), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def set_url_map(self, + request: Optional[Union[compute.SetUrlMapRegionTargetHttpProxyRequest, dict]] = None, + *, + project: Optional[str] = None, + region: Optional[str] = None, + target_http_proxy: Optional[str] = None, + url_map_reference_resource: Optional[compute.UrlMapReference] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> extended_operation.ExtendedOperation: + r"""Changes the URL map for TargetHttpProxy. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_set_url_map(): + # Create a client + client = compute_v1.RegionTargetHttpProxiesClient() + + # Initialize request argument(s) + request = compute_v1.SetUrlMapRegionTargetHttpProxyRequest( + project="project_value", + region="region_value", + target_http_proxy="target_http_proxy_value", + ) + + # Make the request + response = client.set_url_map(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.SetUrlMapRegionTargetHttpProxyRequest, dict]): + The request object. A request message for + RegionTargetHttpProxies.SetUrlMap. See + the method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + region (str): + Name of the region scoping this + request. + + This corresponds to the ``region`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + target_http_proxy (str): + Name of the TargetHttpProxy to set a + URL map for. + + This corresponds to the ``target_http_proxy`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + url_map_reference_resource (google.cloud.compute_v1.types.UrlMapReference): + The body resource for this request + This corresponds to the ``url_map_reference_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, region, target_http_proxy, url_map_reference_resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.SetUrlMapRegionTargetHttpProxyRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.SetUrlMapRegionTargetHttpProxyRequest): + request = compute.SetUrlMapRegionTargetHttpProxyRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if region is not None: + request.region = region + if target_http_proxy is not None: + request.target_http_proxy = target_http_proxy + if url_map_reference_resource is not None: + request.url_map_reference_resource = url_map_reference_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.set_url_map] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("region", request.region), + ("target_http_proxy", request.target_http_proxy), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + operation_service = self._transport._region_operations_client + operation_request = compute.GetRegionOperationRequest() + operation_request.project = request.project + operation_request.region = request.region + operation_request.operation = response.name + + get_operation = functools.partial(operation_service.get, operation_request) + # Cancel is not part of extended operations yet. + cancel_operation = lambda: None + + # Note: this class is an implementation detail to provide a uniform + # set of names for certain fields in the extended operation proto message. + # See google.api_core.extended_operation.ExtendedOperation for details + # on these properties and the expected interface. + class _CustomOperation(extended_operation.ExtendedOperation): + @property + def error_message(self): + return self._extended_operation.http_error_message + + @property + def error_code(self): + return self._extended_operation.http_error_status_code + + response = _CustomOperation.make(get_operation, cancel_operation, response) + + # Done; return the response. + return response + + def __enter__(self) -> "RegionTargetHttpProxiesClient": + return self + + def __exit__(self, type, value, traceback): + """Releases underlying transport's resources. + + .. warning:: + ONLY use as a context manager if the transport is NOT shared + with other clients! Exiting the with block will CLOSE the transport + and may cause errors in other clients! + """ + self.transport.close() + + + + + + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) + + +__all__ = ( + "RegionTargetHttpProxiesClient", +) diff --git a/owl-bot-staging/v1/google/cloud/compute_v1/services/region_target_http_proxies/pagers.py b/owl-bot-staging/v1/google/cloud/compute_v1/services/region_target_http_proxies/pagers.py new file mode 100644 index 000000000..fc0c638fa --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/compute_v1/services/region_target_http_proxies/pagers.py @@ -0,0 +1,77 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import Any, AsyncIterator, Awaitable, Callable, Sequence, Tuple, Optional, Iterator + +from google.cloud.compute_v1.types import compute + + +class ListPager: + """A pager for iterating through ``list`` requests. + + This class thinly wraps an initial + :class:`google.cloud.compute_v1.types.TargetHttpProxyList` object, and + provides an ``__iter__`` method to iterate through its + ``items`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``List`` requests and continue to iterate + through the ``items`` field on the + corresponding responses. + + All the usual :class:`google.cloud.compute_v1.types.TargetHttpProxyList` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., compute.TargetHttpProxyList], + request: compute.ListRegionTargetHttpProxiesRequest, + response: compute.TargetHttpProxyList, + *, + metadata: Sequence[Tuple[str, str]] = ()): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.compute_v1.types.ListRegionTargetHttpProxiesRequest): + The initial request object. + response (google.cloud.compute_v1.types.TargetHttpProxyList): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = compute.ListRegionTargetHttpProxiesRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[compute.TargetHttpProxyList]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[compute.TargetHttpProxy]: + for page in self.pages: + yield from page.items + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) diff --git a/owl-bot-staging/v1/google/cloud/compute_v1/services/region_target_http_proxies/transports/__init__.py b/owl-bot-staging/v1/google/cloud/compute_v1/services/region_target_http_proxies/transports/__init__.py new file mode 100644 index 000000000..d96edc3db --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/compute_v1/services/region_target_http_proxies/transports/__init__.py @@ -0,0 +1,32 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +from typing import Dict, Type + +from .base import RegionTargetHttpProxiesTransport +from .rest import RegionTargetHttpProxiesRestTransport +from .rest import RegionTargetHttpProxiesRestInterceptor + + +# Compile a registry of transports. +_transport_registry = OrderedDict() # type: Dict[str, Type[RegionTargetHttpProxiesTransport]] +_transport_registry['rest'] = RegionTargetHttpProxiesRestTransport + +__all__ = ( + 'RegionTargetHttpProxiesTransport', + 'RegionTargetHttpProxiesRestTransport', + 'RegionTargetHttpProxiesRestInterceptor', +) diff --git a/owl-bot-staging/v1/google/cloud/compute_v1/services/region_target_http_proxies/transports/base.py b/owl-bot-staging/v1/google/cloud/compute_v1/services/region_target_http_proxies/transports/base.py new file mode 100644 index 000000000..1131f0c13 --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/compute_v1/services/region_target_http_proxies/transports/base.py @@ -0,0 +1,219 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import abc +from typing import Awaitable, Callable, Dict, Optional, Sequence, Union + +from google.cloud.compute_v1 import gapic_version as package_version + +import google.auth # type: ignore +import google.api_core +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.compute_v1.types import compute +from google.cloud.compute_v1.services import region_operations + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) + + +class RegionTargetHttpProxiesTransport(abc.ABC): + """Abstract transport class for RegionTargetHttpProxies.""" + + AUTH_SCOPES = ( + 'https://www.googleapis.com/auth/compute', + 'https://www.googleapis.com/auth/cloud-platform', + ) + + DEFAULT_HOST: str = 'compute.googleapis.com' + def __init__( + self, *, + host: str = DEFAULT_HOST, + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + **kwargs, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A list of scopes. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + """ + self._extended_operations_services: Dict[str, Any] = {} + + scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} + + # Save the scopes. + self._scopes = scopes + + # If no credentials are provided, then determine the appropriate + # defaults. + if credentials and credentials_file: + raise core_exceptions.DuplicateCredentialArgs("'credentials_file' and 'credentials' are mutually exclusive") + + if credentials_file is not None: + credentials, _ = google.auth.load_credentials_from_file( + credentials_file, + **scopes_kwargs, + quota_project_id=quota_project_id + ) + elif credentials is None: + credentials, _ = google.auth.default(**scopes_kwargs, quota_project_id=quota_project_id) + # Don't apply audience if the credentials file passed from user. + if hasattr(credentials, "with_gdch_audience"): + credentials = credentials.with_gdch_audience(api_audience if api_audience else host) + + # If the credentials are service account credentials, then always try to use self signed JWT. + if always_use_jwt_access and isinstance(credentials, service_account.Credentials) and hasattr(service_account.Credentials, "with_always_use_jwt_access"): + credentials = credentials.with_always_use_jwt_access(True) + + # Save the credentials. + self._credentials = credentials + + # Save the hostname. Default to port 443 (HTTPS) if none is specified. + if ':' not in host: + host += ':443' + self._host = host + + def _prep_wrapped_messages(self, client_info): + # Precompute the wrapped methods. + self._wrapped_methods = { + self.delete: gapic_v1.method.wrap_method( + self.delete, + default_timeout=None, + client_info=client_info, + ), + self.get: gapic_v1.method.wrap_method( + self.get, + default_timeout=None, + client_info=client_info, + ), + self.insert: gapic_v1.method.wrap_method( + self.insert, + default_timeout=None, + client_info=client_info, + ), + self.list: gapic_v1.method.wrap_method( + self.list, + default_timeout=None, + client_info=client_info, + ), + self.set_url_map: gapic_v1.method.wrap_method( + self.set_url_map, + default_timeout=None, + client_info=client_info, + ), + } + + def close(self): + """Closes resources associated with the transport. + + .. warning:: + Only call this method if the transport is NOT shared + with other clients - this may cause errors in other clients! + """ + raise NotImplementedError() + + @property + def delete(self) -> Callable[ + [compute.DeleteRegionTargetHttpProxyRequest], + Union[ + compute.Operation, + Awaitable[compute.Operation] + ]]: + raise NotImplementedError() + + @property + def get(self) -> Callable[ + [compute.GetRegionTargetHttpProxyRequest], + Union[ + compute.TargetHttpProxy, + Awaitable[compute.TargetHttpProxy] + ]]: + raise NotImplementedError() + + @property + def insert(self) -> Callable[ + [compute.InsertRegionTargetHttpProxyRequest], + Union[ + compute.Operation, + Awaitable[compute.Operation] + ]]: + raise NotImplementedError() + + @property + def list(self) -> Callable[ + [compute.ListRegionTargetHttpProxiesRequest], + Union[ + compute.TargetHttpProxyList, + Awaitable[compute.TargetHttpProxyList] + ]]: + raise NotImplementedError() + + @property + def set_url_map(self) -> Callable[ + [compute.SetUrlMapRegionTargetHttpProxyRequest], + Union[ + compute.Operation, + Awaitable[compute.Operation] + ]]: + raise NotImplementedError() + + @property + def kind(self) -> str: + raise NotImplementedError() + + @property + def _region_operations_client(self) -> region_operations.RegionOperationsClient: + ex_op_service = self._extended_operations_services.get("region_operations") + if not ex_op_service: + ex_op_service = region_operations.RegionOperationsClient( + credentials=self._credentials, + transport=self.kind, + ) + self._extended_operations_services["region_operations"] = ex_op_service + + return ex_op_service + + +__all__ = ( + 'RegionTargetHttpProxiesTransport', +) diff --git a/owl-bot-staging/v1/google/cloud/compute_v1/services/region_target_http_proxies/transports/rest.py b/owl-bot-staging/v1/google/cloud/compute_v1/services/region_target_http_proxies/transports/rest.py new file mode 100644 index 000000000..4449b7763 --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/compute_v1/services/region_target_http_proxies/transports/rest.py @@ -0,0 +1,811 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +from google.auth.transport.requests import AuthorizedSession # type: ignore +import json # type: ignore +import grpc # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.api_core import exceptions as core_exceptions +from google.api_core import retry as retries +from google.api_core import rest_helpers +from google.api_core import rest_streaming +from google.api_core import path_template +from google.api_core import gapic_v1 + +from google.protobuf import json_format +from requests import __version__ as requests_version +import dataclasses +import re +from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union +import warnings + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + + +from google.cloud.compute_v1.types import compute + +from .base import RegionTargetHttpProxiesTransport, DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, + grpc_version=None, + rest_version=requests_version, +) + + +class RegionTargetHttpProxiesRestInterceptor: + """Interceptor for RegionTargetHttpProxies. + + Interceptors are used to manipulate requests, request metadata, and responses + in arbitrary ways. + Example use cases include: + * Logging + * Verifying requests according to service or custom semantics + * Stripping extraneous information from responses + + These use cases and more can be enabled by injecting an + instance of a custom subclass when constructing the RegionTargetHttpProxiesRestTransport. + + .. code-block:: python + class MyCustomRegionTargetHttpProxiesInterceptor(RegionTargetHttpProxiesRestInterceptor): + def pre_delete(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_delete(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_get(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_insert(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_insert(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_set_url_map(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_set_url_map(self, response): + logging.log(f"Received response: {response}") + return response + + transport = RegionTargetHttpProxiesRestTransport(interceptor=MyCustomRegionTargetHttpProxiesInterceptor()) + client = RegionTargetHttpProxiesClient(transport=transport) + + + """ + def pre_delete(self, request: compute.DeleteRegionTargetHttpProxyRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.DeleteRegionTargetHttpProxyRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for delete + + Override in a subclass to manipulate the request or metadata + before they are sent to the RegionTargetHttpProxies server. + """ + return request, metadata + + def post_delete(self, response: compute.Operation) -> compute.Operation: + """Post-rpc interceptor for delete + + Override in a subclass to manipulate the response + after it is returned by the RegionTargetHttpProxies server but before + it is returned to user code. + """ + return response + def pre_get(self, request: compute.GetRegionTargetHttpProxyRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.GetRegionTargetHttpProxyRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get + + Override in a subclass to manipulate the request or metadata + before they are sent to the RegionTargetHttpProxies server. + """ + return request, metadata + + def post_get(self, response: compute.TargetHttpProxy) -> compute.TargetHttpProxy: + """Post-rpc interceptor for get + + Override in a subclass to manipulate the response + after it is returned by the RegionTargetHttpProxies server but before + it is returned to user code. + """ + return response + def pre_insert(self, request: compute.InsertRegionTargetHttpProxyRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.InsertRegionTargetHttpProxyRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for insert + + Override in a subclass to manipulate the request or metadata + before they are sent to the RegionTargetHttpProxies server. + """ + return request, metadata + + def post_insert(self, response: compute.Operation) -> compute.Operation: + """Post-rpc interceptor for insert + + Override in a subclass to manipulate the response + after it is returned by the RegionTargetHttpProxies server but before + it is returned to user code. + """ + return response + def pre_list(self, request: compute.ListRegionTargetHttpProxiesRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.ListRegionTargetHttpProxiesRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list + + Override in a subclass to manipulate the request or metadata + before they are sent to the RegionTargetHttpProxies server. + """ + return request, metadata + + def post_list(self, response: compute.TargetHttpProxyList) -> compute.TargetHttpProxyList: + """Post-rpc interceptor for list + + Override in a subclass to manipulate the response + after it is returned by the RegionTargetHttpProxies server but before + it is returned to user code. + """ + return response + def pre_set_url_map(self, request: compute.SetUrlMapRegionTargetHttpProxyRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.SetUrlMapRegionTargetHttpProxyRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for set_url_map + + Override in a subclass to manipulate the request or metadata + before they are sent to the RegionTargetHttpProxies server. + """ + return request, metadata + + def post_set_url_map(self, response: compute.Operation) -> compute.Operation: + """Post-rpc interceptor for set_url_map + + Override in a subclass to manipulate the response + after it is returned by the RegionTargetHttpProxies server but before + it is returned to user code. + """ + return response + + +@dataclasses.dataclass +class RegionTargetHttpProxiesRestStub: + _session: AuthorizedSession + _host: str + _interceptor: RegionTargetHttpProxiesRestInterceptor + + +class RegionTargetHttpProxiesRestTransport(RegionTargetHttpProxiesTransport): + """REST backend transport for RegionTargetHttpProxies. + + The RegionTargetHttpProxies API. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends JSON representations of protocol buffers over HTTP/1.1 + + NOTE: This REST transport functionality is currently in a beta + state (preview). We welcome your feedback via an issue in this + library's source repository. Thank you! + """ + + def __init__(self, *, + host: str = 'compute.googleapis.com', + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + client_cert_source_for_mtls: Optional[Callable[[ + ], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + url_scheme: str = 'https', + interceptor: Optional[RegionTargetHttpProxiesRestInterceptor] = None, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + NOTE: This REST transport functionality is currently in a beta + state (preview). We welcome your feedback via a GitHub issue in + this library's repository. Thank you! + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + client_cert_source_for_mtls (Callable[[], Tuple[bytes, bytes]]): Client + certificate to configure mutual TLS HTTP channel. It is ignored + if ``channel`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you are developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + url_scheme: the protocol scheme for the API endpoint. Normally + "https", but for testing or local servers, + "http" can be specified. + """ + # Run the base constructor + # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. + # TODO: When custom host (api_endpoint) is set, `scopes` must *also* be set on the + # credentials object + maybe_url_match = re.match("^(?Phttp(?:s)?://)?(?P.*)$", host) + if maybe_url_match is None: + raise ValueError(f"Unexpected hostname structure: {host}") # pragma: NO COVER + + url_match_items = maybe_url_match.groupdict() + + host = f"{url_scheme}://{host}" if not url_match_items["scheme"] else host + + super().__init__( + host=host, + credentials=credentials, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience + ) + self._session = AuthorizedSession( + self._credentials, default_host=self.DEFAULT_HOST) + if client_cert_source_for_mtls: + self._session.configure_mtls_channel(client_cert_source_for_mtls) + self._interceptor = interceptor or RegionTargetHttpProxiesRestInterceptor() + self._prep_wrapped_messages(client_info) + + class _Delete(RegionTargetHttpProxiesRestStub): + def __hash__(self): + return hash("Delete") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.DeleteRegionTargetHttpProxyRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.Operation: + r"""Call the delete method over HTTP. + + Args: + request (~.compute.DeleteRegionTargetHttpProxyRequest): + The request object. A request message for + RegionTargetHttpProxies.Delete. See the + method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.Operation: + Represents an Operation resource. Google Compute Engine + has three Operation resources: \* + `Global `__ + \* + `Regional `__ + \* + `Zonal `__ + You can use an operation resource to manage asynchronous + API requests. For more information, read Handling API + responses. Operations can be global, regional or zonal. + - For global operations, use the ``globalOperations`` + resource. - For regional operations, use the + ``regionOperations`` resource. - For zonal operations, + use the ``zonalOperations`` resource. For more + information, read Global, Regional, and Zonal Resources. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'delete', + 'uri': '/compute/v1/projects/{project}/regions/{region}/targetHttpProxies/{target_http_proxy}', + }, + ] + request, metadata = self._interceptor.pre_delete(request, metadata) + pb_request = compute.DeleteRegionTargetHttpProxyRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.Operation() + pb_resp = compute.Operation.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_delete(resp) + return resp + + class _Get(RegionTargetHttpProxiesRestStub): + def __hash__(self): + return hash("Get") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.GetRegionTargetHttpProxyRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.TargetHttpProxy: + r"""Call the get method over HTTP. + + Args: + request (~.compute.GetRegionTargetHttpProxyRequest): + The request object. A request message for + RegionTargetHttpProxies.Get. See the + method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.TargetHttpProxy: + Represents a Target HTTP Proxy resource. Google Compute + Engine has two Target HTTP Proxy resources: \* + `Global `__ + \* + `Regional `__ + A target HTTP proxy is a component of GCP HTTP load + balancers. \* targetHttpProxies are used by external + HTTP load balancers and Traffic Director. \* + regionTargetHttpProxies are used by internal HTTP load + balancers. Forwarding rules reference a target HTTP + proxy, and the target proxy then references a URL map. + For more information, read Using Target Proxies and + Forwarding rule concepts. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/compute/v1/projects/{project}/regions/{region}/targetHttpProxies/{target_http_proxy}', + }, + ] + request, metadata = self._interceptor.pre_get(request, metadata) + pb_request = compute.GetRegionTargetHttpProxyRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.TargetHttpProxy() + pb_resp = compute.TargetHttpProxy.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get(resp) + return resp + + class _Insert(RegionTargetHttpProxiesRestStub): + def __hash__(self): + return hash("Insert") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.InsertRegionTargetHttpProxyRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.Operation: + r"""Call the insert method over HTTP. + + Args: + request (~.compute.InsertRegionTargetHttpProxyRequest): + The request object. A request message for + RegionTargetHttpProxies.Insert. See the + method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.Operation: + Represents an Operation resource. Google Compute Engine + has three Operation resources: \* + `Global `__ + \* + `Regional `__ + \* + `Zonal `__ + You can use an operation resource to manage asynchronous + API requests. For more information, read Handling API + responses. Operations can be global, regional or zonal. + - For global operations, use the ``globalOperations`` + resource. - For regional operations, use the + ``regionOperations`` resource. - For zonal operations, + use the ``zonalOperations`` resource. For more + information, read Global, Regional, and Zonal Resources. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/compute/v1/projects/{project}/regions/{region}/targetHttpProxies', + 'body': 'target_http_proxy_resource', + }, + ] + request, metadata = self._interceptor.pre_insert(request, metadata) + pb_request = compute.InsertRegionTargetHttpProxyRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + including_default_value_fields=False, + use_integers_for_enums=False + ) + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.Operation() + pb_resp = compute.Operation.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_insert(resp) + return resp + + class _List(RegionTargetHttpProxiesRestStub): + def __hash__(self): + return hash("List") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.ListRegionTargetHttpProxiesRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.TargetHttpProxyList: + r"""Call the list method over HTTP. + + Args: + request (~.compute.ListRegionTargetHttpProxiesRequest): + The request object. A request message for + RegionTargetHttpProxies.List. See the + method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.TargetHttpProxyList: + A list of TargetHttpProxy resources. + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/compute/v1/projects/{project}/regions/{region}/targetHttpProxies', + }, + ] + request, metadata = self._interceptor.pre_list(request, metadata) + pb_request = compute.ListRegionTargetHttpProxiesRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.TargetHttpProxyList() + pb_resp = compute.TargetHttpProxyList.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_list(resp) + return resp + + class _SetUrlMap(RegionTargetHttpProxiesRestStub): + def __hash__(self): + return hash("SetUrlMap") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.SetUrlMapRegionTargetHttpProxyRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.Operation: + r"""Call the set url map method over HTTP. + + Args: + request (~.compute.SetUrlMapRegionTargetHttpProxyRequest): + The request object. A request message for + RegionTargetHttpProxies.SetUrlMap. See + the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.Operation: + Represents an Operation resource. Google Compute Engine + has three Operation resources: \* + `Global `__ + \* + `Regional `__ + \* + `Zonal `__ + You can use an operation resource to manage asynchronous + API requests. For more information, read Handling API + responses. Operations can be global, regional or zonal. + - For global operations, use the ``globalOperations`` + resource. - For regional operations, use the + ``regionOperations`` resource. - For zonal operations, + use the ``zonalOperations`` resource. For more + information, read Global, Regional, and Zonal Resources. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/compute/v1/projects/{project}/regions/{region}/targetHttpProxies/{target_http_proxy}/setUrlMap', + 'body': 'url_map_reference_resource', + }, + ] + request, metadata = self._interceptor.pre_set_url_map(request, metadata) + pb_request = compute.SetUrlMapRegionTargetHttpProxyRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + including_default_value_fields=False, + use_integers_for_enums=False + ) + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.Operation() + pb_resp = compute.Operation.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_set_url_map(resp) + return resp + + @property + def delete(self) -> Callable[ + [compute.DeleteRegionTargetHttpProxyRequest], + compute.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._Delete(self._session, self._host, self._interceptor) # type: ignore + + @property + def get(self) -> Callable[ + [compute.GetRegionTargetHttpProxyRequest], + compute.TargetHttpProxy]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._Get(self._session, self._host, self._interceptor) # type: ignore + + @property + def insert(self) -> Callable[ + [compute.InsertRegionTargetHttpProxyRequest], + compute.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._Insert(self._session, self._host, self._interceptor) # type: ignore + + @property + def list(self) -> Callable[ + [compute.ListRegionTargetHttpProxiesRequest], + compute.TargetHttpProxyList]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._List(self._session, self._host, self._interceptor) # type: ignore + + @property + def set_url_map(self) -> Callable[ + [compute.SetUrlMapRegionTargetHttpProxyRequest], + compute.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._SetUrlMap(self._session, self._host, self._interceptor) # type: ignore + + @property + def kind(self) -> str: + return "rest" + + def close(self): + self._session.close() + + +__all__=( + 'RegionTargetHttpProxiesRestTransport', +) diff --git a/owl-bot-staging/v1/google/cloud/compute_v1/services/region_target_https_proxies/__init__.py b/owl-bot-staging/v1/google/cloud/compute_v1/services/region_target_https_proxies/__init__.py new file mode 100644 index 000000000..fde0d2cc4 --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/compute_v1/services/region_target_https_proxies/__init__.py @@ -0,0 +1,20 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from .client import RegionTargetHttpsProxiesClient + +__all__ = ( + 'RegionTargetHttpsProxiesClient', +) diff --git a/owl-bot-staging/v1/google/cloud/compute_v1/services/region_target_https_proxies/client.py b/owl-bot-staging/v1/google/cloud/compute_v1/services/region_target_https_proxies/client.py new file mode 100644 index 000000000..24c11300d --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/compute_v1/services/region_target_https_proxies/client.py @@ -0,0 +1,2066 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import functools +import os +import re +from typing import Dict, Mapping, MutableMapping, MutableSequence, Optional, Sequence, Tuple, Type, Union, cast + +from google.cloud.compute_v1 import gapic_version as package_version + +from google.api_core import client_options as client_options_lib +from google.api_core import exceptions as core_exceptions +from google.api_core import extended_operation +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport import mtls # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth.exceptions import MutualTLSChannelError # type: ignore +from google.oauth2 import service_account # type: ignore + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + +from google.api_core import extended_operation # type: ignore +from google.cloud.compute_v1.services.region_target_https_proxies import pagers +from google.cloud.compute_v1.types import compute +from .transports.base import RegionTargetHttpsProxiesTransport, DEFAULT_CLIENT_INFO +from .transports.rest import RegionTargetHttpsProxiesRestTransport + + +class RegionTargetHttpsProxiesClientMeta(type): + """Metaclass for the RegionTargetHttpsProxies client. + + This provides class-level methods for building and retrieving + support objects (e.g. transport) without polluting the client instance + objects. + """ + _transport_registry = OrderedDict() # type: Dict[str, Type[RegionTargetHttpsProxiesTransport]] + _transport_registry["rest"] = RegionTargetHttpsProxiesRestTransport + + def get_transport_class(cls, + label: Optional[str] = None, + ) -> Type[RegionTargetHttpsProxiesTransport]: + """Returns an appropriate transport class. + + Args: + label: The name of the desired transport. If none is + provided, then the first transport in the registry is used. + + Returns: + The transport class to use. + """ + # If a specific transport is requested, return that one. + if label: + return cls._transport_registry[label] + + # No transport is requested; return the default (that is, the first one + # in the dictionary). + return next(iter(cls._transport_registry.values())) + + +class RegionTargetHttpsProxiesClient(metaclass=RegionTargetHttpsProxiesClientMeta): + """The RegionTargetHttpsProxies API.""" + + @staticmethod + def _get_default_mtls_endpoint(api_endpoint): + """Converts api endpoint to mTLS endpoint. + + Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to + "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. + Args: + api_endpoint (Optional[str]): the api endpoint to convert. + Returns: + str: converted mTLS api endpoint. + """ + if not api_endpoint: + return api_endpoint + + mtls_endpoint_re = re.compile( + r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?" + ) + + m = mtls_endpoint_re.match(api_endpoint) + name, mtls, sandbox, googledomain = m.groups() + if mtls or not googledomain: + return api_endpoint + + if sandbox: + return api_endpoint.replace( + "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" + ) + + return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") + + DEFAULT_ENDPOINT = "compute.googleapis.com" + DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore + DEFAULT_ENDPOINT + ) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + RegionTargetHttpsProxiesClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_info(info) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + RegionTargetHttpsProxiesClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_file( + filename) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + from_service_account_json = from_service_account_file + + @property + def transport(self) -> RegionTargetHttpsProxiesTransport: + """Returns the transport used by the client instance. + + Returns: + RegionTargetHttpsProxiesTransport: The transport used by the client + instance. + """ + return self._transport + + @staticmethod + def common_billing_account_path(billing_account: str, ) -> str: + """Returns a fully-qualified billing_account string.""" + return "billingAccounts/{billing_account}".format(billing_account=billing_account, ) + + @staticmethod + def parse_common_billing_account_path(path: str) -> Dict[str,str]: + """Parse a billing_account path into its component segments.""" + m = re.match(r"^billingAccounts/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_folder_path(folder: str, ) -> str: + """Returns a fully-qualified folder string.""" + return "folders/{folder}".format(folder=folder, ) + + @staticmethod + def parse_common_folder_path(path: str) -> Dict[str,str]: + """Parse a folder path into its component segments.""" + m = re.match(r"^folders/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_organization_path(organization: str, ) -> str: + """Returns a fully-qualified organization string.""" + return "organizations/{organization}".format(organization=organization, ) + + @staticmethod + def parse_common_organization_path(path: str) -> Dict[str,str]: + """Parse a organization path into its component segments.""" + m = re.match(r"^organizations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_project_path(project: str, ) -> str: + """Returns a fully-qualified project string.""" + return "projects/{project}".format(project=project, ) + + @staticmethod + def parse_common_project_path(path: str) -> Dict[str,str]: + """Parse a project path into its component segments.""" + m = re.match(r"^projects/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_location_path(project: str, location: str, ) -> str: + """Returns a fully-qualified location string.""" + return "projects/{project}/locations/{location}".format(project=project, location=location, ) + + @staticmethod + def parse_common_location_path(path: str) -> Dict[str,str]: + """Parse a location path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @classmethod + def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[client_options_lib.ClientOptions] = None): + """Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + if client_options is None: + client_options = client_options_lib.ClientOptions() + use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") + use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") + if use_client_cert not in ("true", "false"): + raise ValueError("Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`") + if use_mtls_endpoint not in ("auto", "never", "always"): + raise MutualTLSChannelError("Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`") + + # Figure out the client cert source to use. + client_cert_source = None + if use_client_cert == "true": + if client_options.client_cert_source: + client_cert_source = client_options.client_cert_source + elif mtls.has_default_client_cert_source(): + client_cert_source = mtls.default_client_cert_source() + + # Figure out which api endpoint to use. + if client_options.api_endpoint is not None: + api_endpoint = client_options.api_endpoint + elif use_mtls_endpoint == "always" or (use_mtls_endpoint == "auto" and client_cert_source): + api_endpoint = cls.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = cls.DEFAULT_ENDPOINT + + return api_endpoint, client_cert_source + + def __init__(self, *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Optional[Union[str, RegionTargetHttpsProxiesTransport]] = None, + client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the region target https proxies client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Union[str, RegionTargetHttpsProxiesTransport]): The + transport to use. If set to None, a transport is chosen + automatically. + NOTE: "rest" transport functionality is currently in a + beta state (preview). We welcome your feedback via an + issue in this library's source repository. + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): Custom options for the + client. It won't take effect if a ``transport`` instance is provided. + (1) The ``api_endpoint`` property can be used to override the + default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT + environment variable can also be used to override the endpoint: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto switch to the + default mTLS endpoint if client certificate is present, this is + the default value). However, the ``api_endpoint`` property takes + precedence if provided. + (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide client certificate for mutual TLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + """ + if isinstance(client_options, dict): + client_options = client_options_lib.from_dict(client_options) + if client_options is None: + client_options = client_options_lib.ClientOptions() + client_options = cast(client_options_lib.ClientOptions, client_options) + + api_endpoint, client_cert_source_func = self.get_mtls_endpoint_and_cert_source(client_options) + + api_key_value = getattr(client_options, "api_key", None) + if api_key_value and credentials: + raise ValueError("client_options.api_key and credentials are mutually exclusive") + + # Save or instantiate the transport. + # Ordinarily, we provide the transport, but allowing a custom transport + # instance provides an extensibility point for unusual situations. + if isinstance(transport, RegionTargetHttpsProxiesTransport): + # transport is a RegionTargetHttpsProxiesTransport instance. + if credentials or client_options.credentials_file or api_key_value: + raise ValueError("When providing a transport instance, " + "provide its credentials directly.") + if client_options.scopes: + raise ValueError( + "When providing a transport instance, provide its scopes " + "directly." + ) + self._transport = transport + else: + import google.auth._default # type: ignore + + if api_key_value and hasattr(google.auth._default, "get_api_key_credentials"): + credentials = google.auth._default.get_api_key_credentials(api_key_value) + + Transport = type(self).get_transport_class(transport) + self._transport = Transport( + credentials=credentials, + credentials_file=client_options.credentials_file, + host=api_endpoint, + scopes=client_options.scopes, + client_cert_source_for_mtls=client_cert_source_func, + quota_project_id=client_options.quota_project_id, + client_info=client_info, + always_use_jwt_access=True, + api_audience=client_options.api_audience, + ) + + def delete_unary(self, + request: Optional[Union[compute.DeleteRegionTargetHttpsProxyRequest, dict]] = None, + *, + project: Optional[str] = None, + region: Optional[str] = None, + target_https_proxy: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.Operation: + r"""Deletes the specified TargetHttpsProxy resource. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_delete(): + # Create a client + client = compute_v1.RegionTargetHttpsProxiesClient() + + # Initialize request argument(s) + request = compute_v1.DeleteRegionTargetHttpsProxyRequest( + project="project_value", + region="region_value", + target_https_proxy="target_https_proxy_value", + ) + + # Make the request + response = client.delete(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.DeleteRegionTargetHttpsProxyRequest, dict]): + The request object. A request message for + RegionTargetHttpsProxies.Delete. See the + method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + region (str): + Name of the region scoping this + request. + + This corresponds to the ``region`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + target_https_proxy (str): + Name of the TargetHttpsProxy resource + to delete. + + This corresponds to the ``target_https_proxy`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, region, target_https_proxy]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.DeleteRegionTargetHttpsProxyRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.DeleteRegionTargetHttpsProxyRequest): + request = compute.DeleteRegionTargetHttpsProxyRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if region is not None: + request.region = region + if target_https_proxy is not None: + request.target_https_proxy = target_https_proxy + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("region", request.region), + ("target_https_proxy", request.target_https_proxy), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def delete(self, + request: Optional[Union[compute.DeleteRegionTargetHttpsProxyRequest, dict]] = None, + *, + project: Optional[str] = None, + region: Optional[str] = None, + target_https_proxy: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> extended_operation.ExtendedOperation: + r"""Deletes the specified TargetHttpsProxy resource. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_delete(): + # Create a client + client = compute_v1.RegionTargetHttpsProxiesClient() + + # Initialize request argument(s) + request = compute_v1.DeleteRegionTargetHttpsProxyRequest( + project="project_value", + region="region_value", + target_https_proxy="target_https_proxy_value", + ) + + # Make the request + response = client.delete(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.DeleteRegionTargetHttpsProxyRequest, dict]): + The request object. A request message for + RegionTargetHttpsProxies.Delete. See the + method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + region (str): + Name of the region scoping this + request. + + This corresponds to the ``region`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + target_https_proxy (str): + Name of the TargetHttpsProxy resource + to delete. + + This corresponds to the ``target_https_proxy`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, region, target_https_proxy]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.DeleteRegionTargetHttpsProxyRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.DeleteRegionTargetHttpsProxyRequest): + request = compute.DeleteRegionTargetHttpsProxyRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if region is not None: + request.region = region + if target_https_proxy is not None: + request.target_https_proxy = target_https_proxy + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("region", request.region), + ("target_https_proxy", request.target_https_proxy), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + operation_service = self._transport._region_operations_client + operation_request = compute.GetRegionOperationRequest() + operation_request.project = request.project + operation_request.region = request.region + operation_request.operation = response.name + + get_operation = functools.partial(operation_service.get, operation_request) + # Cancel is not part of extended operations yet. + cancel_operation = lambda: None + + # Note: this class is an implementation detail to provide a uniform + # set of names for certain fields in the extended operation proto message. + # See google.api_core.extended_operation.ExtendedOperation for details + # on these properties and the expected interface. + class _CustomOperation(extended_operation.ExtendedOperation): + @property + def error_message(self): + return self._extended_operation.http_error_message + + @property + def error_code(self): + return self._extended_operation.http_error_status_code + + response = _CustomOperation.make(get_operation, cancel_operation, response) + + # Done; return the response. + return response + + def get(self, + request: Optional[Union[compute.GetRegionTargetHttpsProxyRequest, dict]] = None, + *, + project: Optional[str] = None, + region: Optional[str] = None, + target_https_proxy: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.TargetHttpsProxy: + r"""Returns the specified TargetHttpsProxy resource in + the specified region. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_get(): + # Create a client + client = compute_v1.RegionTargetHttpsProxiesClient() + + # Initialize request argument(s) + request = compute_v1.GetRegionTargetHttpsProxyRequest( + project="project_value", + region="region_value", + target_https_proxy="target_https_proxy_value", + ) + + # Make the request + response = client.get(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.GetRegionTargetHttpsProxyRequest, dict]): + The request object. A request message for + RegionTargetHttpsProxies.Get. See the + method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + region (str): + Name of the region scoping this + request. + + This corresponds to the ``region`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + target_https_proxy (str): + Name of the TargetHttpsProxy resource + to return. + + This corresponds to the ``target_https_proxy`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.compute_v1.types.TargetHttpsProxy: + Represents a Target HTTPS Proxy resource. Google Compute + Engine has two Target HTTPS Proxy resources: \* + [Global](/compute/docs/reference/rest/v1/targetHttpsProxies) + \* + [Regional](/compute/docs/reference/rest/v1/regionTargetHttpsProxies) + A target HTTPS proxy is a component of GCP HTTPS load + balancers. \* targetHttpsProxies are used by external + HTTPS load balancers. \* regionTargetHttpsProxies are + used by internal HTTPS load balancers. Forwarding rules + reference a target HTTPS proxy, and the target proxy + then references a URL map. For more information, read + Using Target Proxies and Forwarding rule concepts. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, region, target_https_proxy]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.GetRegionTargetHttpsProxyRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.GetRegionTargetHttpsProxyRequest): + request = compute.GetRegionTargetHttpsProxyRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if region is not None: + request.region = region + if target_https_proxy is not None: + request.target_https_proxy = target_https_proxy + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("region", request.region), + ("target_https_proxy", request.target_https_proxy), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def insert_unary(self, + request: Optional[Union[compute.InsertRegionTargetHttpsProxyRequest, dict]] = None, + *, + project: Optional[str] = None, + region: Optional[str] = None, + target_https_proxy_resource: Optional[compute.TargetHttpsProxy] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.Operation: + r"""Creates a TargetHttpsProxy resource in the specified + project and region using the data included in the + request. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_insert(): + # Create a client + client = compute_v1.RegionTargetHttpsProxiesClient() + + # Initialize request argument(s) + request = compute_v1.InsertRegionTargetHttpsProxyRequest( + project="project_value", + region="region_value", + ) + + # Make the request + response = client.insert(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.InsertRegionTargetHttpsProxyRequest, dict]): + The request object. A request message for + RegionTargetHttpsProxies.Insert. See the + method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + region (str): + Name of the region scoping this + request. + + This corresponds to the ``region`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + target_https_proxy_resource (google.cloud.compute_v1.types.TargetHttpsProxy): + The body resource for this request + This corresponds to the ``target_https_proxy_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, region, target_https_proxy_resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.InsertRegionTargetHttpsProxyRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.InsertRegionTargetHttpsProxyRequest): + request = compute.InsertRegionTargetHttpsProxyRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if region is not None: + request.region = region + if target_https_proxy_resource is not None: + request.target_https_proxy_resource = target_https_proxy_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.insert] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("region", request.region), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def insert(self, + request: Optional[Union[compute.InsertRegionTargetHttpsProxyRequest, dict]] = None, + *, + project: Optional[str] = None, + region: Optional[str] = None, + target_https_proxy_resource: Optional[compute.TargetHttpsProxy] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> extended_operation.ExtendedOperation: + r"""Creates a TargetHttpsProxy resource in the specified + project and region using the data included in the + request. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_insert(): + # Create a client + client = compute_v1.RegionTargetHttpsProxiesClient() + + # Initialize request argument(s) + request = compute_v1.InsertRegionTargetHttpsProxyRequest( + project="project_value", + region="region_value", + ) + + # Make the request + response = client.insert(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.InsertRegionTargetHttpsProxyRequest, dict]): + The request object. A request message for + RegionTargetHttpsProxies.Insert. See the + method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + region (str): + Name of the region scoping this + request. + + This corresponds to the ``region`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + target_https_proxy_resource (google.cloud.compute_v1.types.TargetHttpsProxy): + The body resource for this request + This corresponds to the ``target_https_proxy_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, region, target_https_proxy_resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.InsertRegionTargetHttpsProxyRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.InsertRegionTargetHttpsProxyRequest): + request = compute.InsertRegionTargetHttpsProxyRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if region is not None: + request.region = region + if target_https_proxy_resource is not None: + request.target_https_proxy_resource = target_https_proxy_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.insert] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("region", request.region), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + operation_service = self._transport._region_operations_client + operation_request = compute.GetRegionOperationRequest() + operation_request.project = request.project + operation_request.region = request.region + operation_request.operation = response.name + + get_operation = functools.partial(operation_service.get, operation_request) + # Cancel is not part of extended operations yet. + cancel_operation = lambda: None + + # Note: this class is an implementation detail to provide a uniform + # set of names for certain fields in the extended operation proto message. + # See google.api_core.extended_operation.ExtendedOperation for details + # on these properties and the expected interface. + class _CustomOperation(extended_operation.ExtendedOperation): + @property + def error_message(self): + return self._extended_operation.http_error_message + + @property + def error_code(self): + return self._extended_operation.http_error_status_code + + response = _CustomOperation.make(get_operation, cancel_operation, response) + + # Done; return the response. + return response + + def list(self, + request: Optional[Union[compute.ListRegionTargetHttpsProxiesRequest, dict]] = None, + *, + project: Optional[str] = None, + region: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListPager: + r"""Retrieves the list of TargetHttpsProxy resources + available to the specified project in the specified + region. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_list(): + # Create a client + client = compute_v1.RegionTargetHttpsProxiesClient() + + # Initialize request argument(s) + request = compute_v1.ListRegionTargetHttpsProxiesRequest( + project="project_value", + region="region_value", + ) + + # Make the request + page_result = client.list(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.ListRegionTargetHttpsProxiesRequest, dict]): + The request object. A request message for + RegionTargetHttpsProxies.List. See the + method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + region (str): + Name of the region scoping this + request. + + This corresponds to the ``region`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.compute_v1.services.region_target_https_proxies.pagers.ListPager: + Contains a list of TargetHttpsProxy + resources. + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, region]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.ListRegionTargetHttpsProxiesRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.ListRegionTargetHttpsProxiesRequest): + request = compute.ListRegionTargetHttpsProxiesRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if region is not None: + request.region = region + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("region", request.region), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + def patch_unary(self, + request: Optional[Union[compute.PatchRegionTargetHttpsProxyRequest, dict]] = None, + *, + project: Optional[str] = None, + region: Optional[str] = None, + target_https_proxy: Optional[str] = None, + target_https_proxy_resource: Optional[compute.TargetHttpsProxy] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.Operation: + r"""Patches the specified regional TargetHttpsProxy + resource with the data included in the request. This + method supports PATCH semantics and uses JSON merge + patch format and processing rules. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_patch(): + # Create a client + client = compute_v1.RegionTargetHttpsProxiesClient() + + # Initialize request argument(s) + request = compute_v1.PatchRegionTargetHttpsProxyRequest( + project="project_value", + region="region_value", + target_https_proxy="target_https_proxy_value", + ) + + # Make the request + response = client.patch(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.PatchRegionTargetHttpsProxyRequest, dict]): + The request object. A request message for + RegionTargetHttpsProxies.Patch. See the + method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + region (str): + Name of the region for this request. + This corresponds to the ``region`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + target_https_proxy (str): + Name of the TargetHttpsProxy resource + to patch. + + This corresponds to the ``target_https_proxy`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + target_https_proxy_resource (google.cloud.compute_v1.types.TargetHttpsProxy): + The body resource for this request + This corresponds to the ``target_https_proxy_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, region, target_https_proxy, target_https_proxy_resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.PatchRegionTargetHttpsProxyRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.PatchRegionTargetHttpsProxyRequest): + request = compute.PatchRegionTargetHttpsProxyRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if region is not None: + request.region = region + if target_https_proxy is not None: + request.target_https_proxy = target_https_proxy + if target_https_proxy_resource is not None: + request.target_https_proxy_resource = target_https_proxy_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.patch] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("region", request.region), + ("target_https_proxy", request.target_https_proxy), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def patch(self, + request: Optional[Union[compute.PatchRegionTargetHttpsProxyRequest, dict]] = None, + *, + project: Optional[str] = None, + region: Optional[str] = None, + target_https_proxy: Optional[str] = None, + target_https_proxy_resource: Optional[compute.TargetHttpsProxy] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> extended_operation.ExtendedOperation: + r"""Patches the specified regional TargetHttpsProxy + resource with the data included in the request. This + method supports PATCH semantics and uses JSON merge + patch format and processing rules. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_patch(): + # Create a client + client = compute_v1.RegionTargetHttpsProxiesClient() + + # Initialize request argument(s) + request = compute_v1.PatchRegionTargetHttpsProxyRequest( + project="project_value", + region="region_value", + target_https_proxy="target_https_proxy_value", + ) + + # Make the request + response = client.patch(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.PatchRegionTargetHttpsProxyRequest, dict]): + The request object. A request message for + RegionTargetHttpsProxies.Patch. See the + method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + region (str): + Name of the region for this request. + This corresponds to the ``region`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + target_https_proxy (str): + Name of the TargetHttpsProxy resource + to patch. + + This corresponds to the ``target_https_proxy`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + target_https_proxy_resource (google.cloud.compute_v1.types.TargetHttpsProxy): + The body resource for this request + This corresponds to the ``target_https_proxy_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, region, target_https_proxy, target_https_proxy_resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.PatchRegionTargetHttpsProxyRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.PatchRegionTargetHttpsProxyRequest): + request = compute.PatchRegionTargetHttpsProxyRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if region is not None: + request.region = region + if target_https_proxy is not None: + request.target_https_proxy = target_https_proxy + if target_https_proxy_resource is not None: + request.target_https_proxy_resource = target_https_proxy_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.patch] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("region", request.region), + ("target_https_proxy", request.target_https_proxy), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + operation_service = self._transport._region_operations_client + operation_request = compute.GetRegionOperationRequest() + operation_request.project = request.project + operation_request.region = request.region + operation_request.operation = response.name + + get_operation = functools.partial(operation_service.get, operation_request) + # Cancel is not part of extended operations yet. + cancel_operation = lambda: None + + # Note: this class is an implementation detail to provide a uniform + # set of names for certain fields in the extended operation proto message. + # See google.api_core.extended_operation.ExtendedOperation for details + # on these properties and the expected interface. + class _CustomOperation(extended_operation.ExtendedOperation): + @property + def error_message(self): + return self._extended_operation.http_error_message + + @property + def error_code(self): + return self._extended_operation.http_error_status_code + + response = _CustomOperation.make(get_operation, cancel_operation, response) + + # Done; return the response. + return response + + def set_ssl_certificates_unary(self, + request: Optional[Union[compute.SetSslCertificatesRegionTargetHttpsProxyRequest, dict]] = None, + *, + project: Optional[str] = None, + region: Optional[str] = None, + target_https_proxy: Optional[str] = None, + region_target_https_proxies_set_ssl_certificates_request_resource: Optional[compute.RegionTargetHttpsProxiesSetSslCertificatesRequest] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.Operation: + r"""Replaces SslCertificates for TargetHttpsProxy. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_set_ssl_certificates(): + # Create a client + client = compute_v1.RegionTargetHttpsProxiesClient() + + # Initialize request argument(s) + request = compute_v1.SetSslCertificatesRegionTargetHttpsProxyRequest( + project="project_value", + region="region_value", + target_https_proxy="target_https_proxy_value", + ) + + # Make the request + response = client.set_ssl_certificates(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.SetSslCertificatesRegionTargetHttpsProxyRequest, dict]): + The request object. A request message for + RegionTargetHttpsProxies.SetSslCertificates. + See the method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + region (str): + Name of the region scoping this + request. + + This corresponds to the ``region`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + target_https_proxy (str): + Name of the TargetHttpsProxy resource + to set an SslCertificates resource for. + + This corresponds to the ``target_https_proxy`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + region_target_https_proxies_set_ssl_certificates_request_resource (google.cloud.compute_v1.types.RegionTargetHttpsProxiesSetSslCertificatesRequest): + The body resource for this request + This corresponds to the ``region_target_https_proxies_set_ssl_certificates_request_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, region, target_https_proxy, region_target_https_proxies_set_ssl_certificates_request_resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.SetSslCertificatesRegionTargetHttpsProxyRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.SetSslCertificatesRegionTargetHttpsProxyRequest): + request = compute.SetSslCertificatesRegionTargetHttpsProxyRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if region is not None: + request.region = region + if target_https_proxy is not None: + request.target_https_proxy = target_https_proxy + if region_target_https_proxies_set_ssl_certificates_request_resource is not None: + request.region_target_https_proxies_set_ssl_certificates_request_resource = region_target_https_proxies_set_ssl_certificates_request_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.set_ssl_certificates] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("region", request.region), + ("target_https_proxy", request.target_https_proxy), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def set_ssl_certificates(self, + request: Optional[Union[compute.SetSslCertificatesRegionTargetHttpsProxyRequest, dict]] = None, + *, + project: Optional[str] = None, + region: Optional[str] = None, + target_https_proxy: Optional[str] = None, + region_target_https_proxies_set_ssl_certificates_request_resource: Optional[compute.RegionTargetHttpsProxiesSetSslCertificatesRequest] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> extended_operation.ExtendedOperation: + r"""Replaces SslCertificates for TargetHttpsProxy. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_set_ssl_certificates(): + # Create a client + client = compute_v1.RegionTargetHttpsProxiesClient() + + # Initialize request argument(s) + request = compute_v1.SetSslCertificatesRegionTargetHttpsProxyRequest( + project="project_value", + region="region_value", + target_https_proxy="target_https_proxy_value", + ) + + # Make the request + response = client.set_ssl_certificates(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.SetSslCertificatesRegionTargetHttpsProxyRequest, dict]): + The request object. A request message for + RegionTargetHttpsProxies.SetSslCertificates. + See the method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + region (str): + Name of the region scoping this + request. + + This corresponds to the ``region`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + target_https_proxy (str): + Name of the TargetHttpsProxy resource + to set an SslCertificates resource for. + + This corresponds to the ``target_https_proxy`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + region_target_https_proxies_set_ssl_certificates_request_resource (google.cloud.compute_v1.types.RegionTargetHttpsProxiesSetSslCertificatesRequest): + The body resource for this request + This corresponds to the ``region_target_https_proxies_set_ssl_certificates_request_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, region, target_https_proxy, region_target_https_proxies_set_ssl_certificates_request_resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.SetSslCertificatesRegionTargetHttpsProxyRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.SetSslCertificatesRegionTargetHttpsProxyRequest): + request = compute.SetSslCertificatesRegionTargetHttpsProxyRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if region is not None: + request.region = region + if target_https_proxy is not None: + request.target_https_proxy = target_https_proxy + if region_target_https_proxies_set_ssl_certificates_request_resource is not None: + request.region_target_https_proxies_set_ssl_certificates_request_resource = region_target_https_proxies_set_ssl_certificates_request_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.set_ssl_certificates] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("region", request.region), + ("target_https_proxy", request.target_https_proxy), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + operation_service = self._transport._region_operations_client + operation_request = compute.GetRegionOperationRequest() + operation_request.project = request.project + operation_request.region = request.region + operation_request.operation = response.name + + get_operation = functools.partial(operation_service.get, operation_request) + # Cancel is not part of extended operations yet. + cancel_operation = lambda: None + + # Note: this class is an implementation detail to provide a uniform + # set of names for certain fields in the extended operation proto message. + # See google.api_core.extended_operation.ExtendedOperation for details + # on these properties and the expected interface. + class _CustomOperation(extended_operation.ExtendedOperation): + @property + def error_message(self): + return self._extended_operation.http_error_message + + @property + def error_code(self): + return self._extended_operation.http_error_status_code + + response = _CustomOperation.make(get_operation, cancel_operation, response) + + # Done; return the response. + return response + + def set_url_map_unary(self, + request: Optional[Union[compute.SetUrlMapRegionTargetHttpsProxyRequest, dict]] = None, + *, + project: Optional[str] = None, + region: Optional[str] = None, + target_https_proxy: Optional[str] = None, + url_map_reference_resource: Optional[compute.UrlMapReference] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.Operation: + r"""Changes the URL map for TargetHttpsProxy. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_set_url_map(): + # Create a client + client = compute_v1.RegionTargetHttpsProxiesClient() + + # Initialize request argument(s) + request = compute_v1.SetUrlMapRegionTargetHttpsProxyRequest( + project="project_value", + region="region_value", + target_https_proxy="target_https_proxy_value", + ) + + # Make the request + response = client.set_url_map(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.SetUrlMapRegionTargetHttpsProxyRequest, dict]): + The request object. A request message for + RegionTargetHttpsProxies.SetUrlMap. See + the method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + region (str): + Name of the region scoping this + request. + + This corresponds to the ``region`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + target_https_proxy (str): + Name of the TargetHttpsProxy to set a + URL map for. + + This corresponds to the ``target_https_proxy`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + url_map_reference_resource (google.cloud.compute_v1.types.UrlMapReference): + The body resource for this request + This corresponds to the ``url_map_reference_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, region, target_https_proxy, url_map_reference_resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.SetUrlMapRegionTargetHttpsProxyRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.SetUrlMapRegionTargetHttpsProxyRequest): + request = compute.SetUrlMapRegionTargetHttpsProxyRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if region is not None: + request.region = region + if target_https_proxy is not None: + request.target_https_proxy = target_https_proxy + if url_map_reference_resource is not None: + request.url_map_reference_resource = url_map_reference_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.set_url_map] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("region", request.region), + ("target_https_proxy", request.target_https_proxy), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def set_url_map(self, + request: Optional[Union[compute.SetUrlMapRegionTargetHttpsProxyRequest, dict]] = None, + *, + project: Optional[str] = None, + region: Optional[str] = None, + target_https_proxy: Optional[str] = None, + url_map_reference_resource: Optional[compute.UrlMapReference] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> extended_operation.ExtendedOperation: + r"""Changes the URL map for TargetHttpsProxy. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_set_url_map(): + # Create a client + client = compute_v1.RegionTargetHttpsProxiesClient() + + # Initialize request argument(s) + request = compute_v1.SetUrlMapRegionTargetHttpsProxyRequest( + project="project_value", + region="region_value", + target_https_proxy="target_https_proxy_value", + ) + + # Make the request + response = client.set_url_map(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.SetUrlMapRegionTargetHttpsProxyRequest, dict]): + The request object. A request message for + RegionTargetHttpsProxies.SetUrlMap. See + the method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + region (str): + Name of the region scoping this + request. + + This corresponds to the ``region`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + target_https_proxy (str): + Name of the TargetHttpsProxy to set a + URL map for. + + This corresponds to the ``target_https_proxy`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + url_map_reference_resource (google.cloud.compute_v1.types.UrlMapReference): + The body resource for this request + This corresponds to the ``url_map_reference_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, region, target_https_proxy, url_map_reference_resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.SetUrlMapRegionTargetHttpsProxyRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.SetUrlMapRegionTargetHttpsProxyRequest): + request = compute.SetUrlMapRegionTargetHttpsProxyRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if region is not None: + request.region = region + if target_https_proxy is not None: + request.target_https_proxy = target_https_proxy + if url_map_reference_resource is not None: + request.url_map_reference_resource = url_map_reference_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.set_url_map] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("region", request.region), + ("target_https_proxy", request.target_https_proxy), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + operation_service = self._transport._region_operations_client + operation_request = compute.GetRegionOperationRequest() + operation_request.project = request.project + operation_request.region = request.region + operation_request.operation = response.name + + get_operation = functools.partial(operation_service.get, operation_request) + # Cancel is not part of extended operations yet. + cancel_operation = lambda: None + + # Note: this class is an implementation detail to provide a uniform + # set of names for certain fields in the extended operation proto message. + # See google.api_core.extended_operation.ExtendedOperation for details + # on these properties and the expected interface. + class _CustomOperation(extended_operation.ExtendedOperation): + @property + def error_message(self): + return self._extended_operation.http_error_message + + @property + def error_code(self): + return self._extended_operation.http_error_status_code + + response = _CustomOperation.make(get_operation, cancel_operation, response) + + # Done; return the response. + return response + + def __enter__(self) -> "RegionTargetHttpsProxiesClient": + return self + + def __exit__(self, type, value, traceback): + """Releases underlying transport's resources. + + .. warning:: + ONLY use as a context manager if the transport is NOT shared + with other clients! Exiting the with block will CLOSE the transport + and may cause errors in other clients! + """ + self.transport.close() + + + + + + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) + + +__all__ = ( + "RegionTargetHttpsProxiesClient", +) diff --git a/owl-bot-staging/v1/google/cloud/compute_v1/services/region_target_https_proxies/pagers.py b/owl-bot-staging/v1/google/cloud/compute_v1/services/region_target_https_proxies/pagers.py new file mode 100644 index 000000000..7806ed390 --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/compute_v1/services/region_target_https_proxies/pagers.py @@ -0,0 +1,77 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import Any, AsyncIterator, Awaitable, Callable, Sequence, Tuple, Optional, Iterator + +from google.cloud.compute_v1.types import compute + + +class ListPager: + """A pager for iterating through ``list`` requests. + + This class thinly wraps an initial + :class:`google.cloud.compute_v1.types.TargetHttpsProxyList` object, and + provides an ``__iter__`` method to iterate through its + ``items`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``List`` requests and continue to iterate + through the ``items`` field on the + corresponding responses. + + All the usual :class:`google.cloud.compute_v1.types.TargetHttpsProxyList` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., compute.TargetHttpsProxyList], + request: compute.ListRegionTargetHttpsProxiesRequest, + response: compute.TargetHttpsProxyList, + *, + metadata: Sequence[Tuple[str, str]] = ()): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.compute_v1.types.ListRegionTargetHttpsProxiesRequest): + The initial request object. + response (google.cloud.compute_v1.types.TargetHttpsProxyList): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = compute.ListRegionTargetHttpsProxiesRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[compute.TargetHttpsProxyList]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[compute.TargetHttpsProxy]: + for page in self.pages: + yield from page.items + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) diff --git a/owl-bot-staging/v1/google/cloud/compute_v1/services/region_target_https_proxies/transports/__init__.py b/owl-bot-staging/v1/google/cloud/compute_v1/services/region_target_https_proxies/transports/__init__.py new file mode 100644 index 000000000..c2bd41723 --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/compute_v1/services/region_target_https_proxies/transports/__init__.py @@ -0,0 +1,32 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +from typing import Dict, Type + +from .base import RegionTargetHttpsProxiesTransport +from .rest import RegionTargetHttpsProxiesRestTransport +from .rest import RegionTargetHttpsProxiesRestInterceptor + + +# Compile a registry of transports. +_transport_registry = OrderedDict() # type: Dict[str, Type[RegionTargetHttpsProxiesTransport]] +_transport_registry['rest'] = RegionTargetHttpsProxiesRestTransport + +__all__ = ( + 'RegionTargetHttpsProxiesTransport', + 'RegionTargetHttpsProxiesRestTransport', + 'RegionTargetHttpsProxiesRestInterceptor', +) diff --git a/owl-bot-staging/v1/google/cloud/compute_v1/services/region_target_https_proxies/transports/base.py b/owl-bot-staging/v1/google/cloud/compute_v1/services/region_target_https_proxies/transports/base.py new file mode 100644 index 000000000..7d2fa6b92 --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/compute_v1/services/region_target_https_proxies/transports/base.py @@ -0,0 +1,247 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import abc +from typing import Awaitable, Callable, Dict, Optional, Sequence, Union + +from google.cloud.compute_v1 import gapic_version as package_version + +import google.auth # type: ignore +import google.api_core +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.compute_v1.types import compute +from google.cloud.compute_v1.services import region_operations + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) + + +class RegionTargetHttpsProxiesTransport(abc.ABC): + """Abstract transport class for RegionTargetHttpsProxies.""" + + AUTH_SCOPES = ( + 'https://www.googleapis.com/auth/compute', + 'https://www.googleapis.com/auth/cloud-platform', + ) + + DEFAULT_HOST: str = 'compute.googleapis.com' + def __init__( + self, *, + host: str = DEFAULT_HOST, + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + **kwargs, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A list of scopes. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + """ + self._extended_operations_services: Dict[str, Any] = {} + + scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} + + # Save the scopes. + self._scopes = scopes + + # If no credentials are provided, then determine the appropriate + # defaults. + if credentials and credentials_file: + raise core_exceptions.DuplicateCredentialArgs("'credentials_file' and 'credentials' are mutually exclusive") + + if credentials_file is not None: + credentials, _ = google.auth.load_credentials_from_file( + credentials_file, + **scopes_kwargs, + quota_project_id=quota_project_id + ) + elif credentials is None: + credentials, _ = google.auth.default(**scopes_kwargs, quota_project_id=quota_project_id) + # Don't apply audience if the credentials file passed from user. + if hasattr(credentials, "with_gdch_audience"): + credentials = credentials.with_gdch_audience(api_audience if api_audience else host) + + # If the credentials are service account credentials, then always try to use self signed JWT. + if always_use_jwt_access and isinstance(credentials, service_account.Credentials) and hasattr(service_account.Credentials, "with_always_use_jwt_access"): + credentials = credentials.with_always_use_jwt_access(True) + + # Save the credentials. + self._credentials = credentials + + # Save the hostname. Default to port 443 (HTTPS) if none is specified. + if ':' not in host: + host += ':443' + self._host = host + + def _prep_wrapped_messages(self, client_info): + # Precompute the wrapped methods. + self._wrapped_methods = { + self.delete: gapic_v1.method.wrap_method( + self.delete, + default_timeout=None, + client_info=client_info, + ), + self.get: gapic_v1.method.wrap_method( + self.get, + default_timeout=None, + client_info=client_info, + ), + self.insert: gapic_v1.method.wrap_method( + self.insert, + default_timeout=None, + client_info=client_info, + ), + self.list: gapic_v1.method.wrap_method( + self.list, + default_timeout=None, + client_info=client_info, + ), + self.patch: gapic_v1.method.wrap_method( + self.patch, + default_timeout=None, + client_info=client_info, + ), + self.set_ssl_certificates: gapic_v1.method.wrap_method( + self.set_ssl_certificates, + default_timeout=None, + client_info=client_info, + ), + self.set_url_map: gapic_v1.method.wrap_method( + self.set_url_map, + default_timeout=None, + client_info=client_info, + ), + } + + def close(self): + """Closes resources associated with the transport. + + .. warning:: + Only call this method if the transport is NOT shared + with other clients - this may cause errors in other clients! + """ + raise NotImplementedError() + + @property + def delete(self) -> Callable[ + [compute.DeleteRegionTargetHttpsProxyRequest], + Union[ + compute.Operation, + Awaitable[compute.Operation] + ]]: + raise NotImplementedError() + + @property + def get(self) -> Callable[ + [compute.GetRegionTargetHttpsProxyRequest], + Union[ + compute.TargetHttpsProxy, + Awaitable[compute.TargetHttpsProxy] + ]]: + raise NotImplementedError() + + @property + def insert(self) -> Callable[ + [compute.InsertRegionTargetHttpsProxyRequest], + Union[ + compute.Operation, + Awaitable[compute.Operation] + ]]: + raise NotImplementedError() + + @property + def list(self) -> Callable[ + [compute.ListRegionTargetHttpsProxiesRequest], + Union[ + compute.TargetHttpsProxyList, + Awaitable[compute.TargetHttpsProxyList] + ]]: + raise NotImplementedError() + + @property + def patch(self) -> Callable[ + [compute.PatchRegionTargetHttpsProxyRequest], + Union[ + compute.Operation, + Awaitable[compute.Operation] + ]]: + raise NotImplementedError() + + @property + def set_ssl_certificates(self) -> Callable[ + [compute.SetSslCertificatesRegionTargetHttpsProxyRequest], + Union[ + compute.Operation, + Awaitable[compute.Operation] + ]]: + raise NotImplementedError() + + @property + def set_url_map(self) -> Callable[ + [compute.SetUrlMapRegionTargetHttpsProxyRequest], + Union[ + compute.Operation, + Awaitable[compute.Operation] + ]]: + raise NotImplementedError() + + @property + def kind(self) -> str: + raise NotImplementedError() + + @property + def _region_operations_client(self) -> region_operations.RegionOperationsClient: + ex_op_service = self._extended_operations_services.get("region_operations") + if not ex_op_service: + ex_op_service = region_operations.RegionOperationsClient( + credentials=self._credentials, + transport=self.kind, + ) + self._extended_operations_services["region_operations"] = ex_op_service + + return ex_op_service + + +__all__ = ( + 'RegionTargetHttpsProxiesTransport', +) diff --git a/owl-bot-staging/v1/google/cloud/compute_v1/services/region_target_https_proxies/transports/rest.py b/owl-bot-staging/v1/google/cloud/compute_v1/services/region_target_https_proxies/transports/rest.py new file mode 100644 index 000000000..cb120f5c2 --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/compute_v1/services/region_target_https_proxies/transports/rest.py @@ -0,0 +1,1080 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +from google.auth.transport.requests import AuthorizedSession # type: ignore +import json # type: ignore +import grpc # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.api_core import exceptions as core_exceptions +from google.api_core import retry as retries +from google.api_core import rest_helpers +from google.api_core import rest_streaming +from google.api_core import path_template +from google.api_core import gapic_v1 + +from google.protobuf import json_format +from requests import __version__ as requests_version +import dataclasses +import re +from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union +import warnings + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + + +from google.cloud.compute_v1.types import compute + +from .base import RegionTargetHttpsProxiesTransport, DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, + grpc_version=None, + rest_version=requests_version, +) + + +class RegionTargetHttpsProxiesRestInterceptor: + """Interceptor for RegionTargetHttpsProxies. + + Interceptors are used to manipulate requests, request metadata, and responses + in arbitrary ways. + Example use cases include: + * Logging + * Verifying requests according to service or custom semantics + * Stripping extraneous information from responses + + These use cases and more can be enabled by injecting an + instance of a custom subclass when constructing the RegionTargetHttpsProxiesRestTransport. + + .. code-block:: python + class MyCustomRegionTargetHttpsProxiesInterceptor(RegionTargetHttpsProxiesRestInterceptor): + def pre_delete(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_delete(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_get(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_insert(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_insert(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_patch(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_patch(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_set_ssl_certificates(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_set_ssl_certificates(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_set_url_map(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_set_url_map(self, response): + logging.log(f"Received response: {response}") + return response + + transport = RegionTargetHttpsProxiesRestTransport(interceptor=MyCustomRegionTargetHttpsProxiesInterceptor()) + client = RegionTargetHttpsProxiesClient(transport=transport) + + + """ + def pre_delete(self, request: compute.DeleteRegionTargetHttpsProxyRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.DeleteRegionTargetHttpsProxyRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for delete + + Override in a subclass to manipulate the request or metadata + before they are sent to the RegionTargetHttpsProxies server. + """ + return request, metadata + + def post_delete(self, response: compute.Operation) -> compute.Operation: + """Post-rpc interceptor for delete + + Override in a subclass to manipulate the response + after it is returned by the RegionTargetHttpsProxies server but before + it is returned to user code. + """ + return response + def pre_get(self, request: compute.GetRegionTargetHttpsProxyRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.GetRegionTargetHttpsProxyRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get + + Override in a subclass to manipulate the request or metadata + before they are sent to the RegionTargetHttpsProxies server. + """ + return request, metadata + + def post_get(self, response: compute.TargetHttpsProxy) -> compute.TargetHttpsProxy: + """Post-rpc interceptor for get + + Override in a subclass to manipulate the response + after it is returned by the RegionTargetHttpsProxies server but before + it is returned to user code. + """ + return response + def pre_insert(self, request: compute.InsertRegionTargetHttpsProxyRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.InsertRegionTargetHttpsProxyRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for insert + + Override in a subclass to manipulate the request or metadata + before they are sent to the RegionTargetHttpsProxies server. + """ + return request, metadata + + def post_insert(self, response: compute.Operation) -> compute.Operation: + """Post-rpc interceptor for insert + + Override in a subclass to manipulate the response + after it is returned by the RegionTargetHttpsProxies server but before + it is returned to user code. + """ + return response + def pre_list(self, request: compute.ListRegionTargetHttpsProxiesRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.ListRegionTargetHttpsProxiesRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list + + Override in a subclass to manipulate the request or metadata + before they are sent to the RegionTargetHttpsProxies server. + """ + return request, metadata + + def post_list(self, response: compute.TargetHttpsProxyList) -> compute.TargetHttpsProxyList: + """Post-rpc interceptor for list + + Override in a subclass to manipulate the response + after it is returned by the RegionTargetHttpsProxies server but before + it is returned to user code. + """ + return response + def pre_patch(self, request: compute.PatchRegionTargetHttpsProxyRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.PatchRegionTargetHttpsProxyRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for patch + + Override in a subclass to manipulate the request or metadata + before they are sent to the RegionTargetHttpsProxies server. + """ + return request, metadata + + def post_patch(self, response: compute.Operation) -> compute.Operation: + """Post-rpc interceptor for patch + + Override in a subclass to manipulate the response + after it is returned by the RegionTargetHttpsProxies server but before + it is returned to user code. + """ + return response + def pre_set_ssl_certificates(self, request: compute.SetSslCertificatesRegionTargetHttpsProxyRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.SetSslCertificatesRegionTargetHttpsProxyRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for set_ssl_certificates + + Override in a subclass to manipulate the request or metadata + before they are sent to the RegionTargetHttpsProxies server. + """ + return request, metadata + + def post_set_ssl_certificates(self, response: compute.Operation) -> compute.Operation: + """Post-rpc interceptor for set_ssl_certificates + + Override in a subclass to manipulate the response + after it is returned by the RegionTargetHttpsProxies server but before + it is returned to user code. + """ + return response + def pre_set_url_map(self, request: compute.SetUrlMapRegionTargetHttpsProxyRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.SetUrlMapRegionTargetHttpsProxyRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for set_url_map + + Override in a subclass to manipulate the request or metadata + before they are sent to the RegionTargetHttpsProxies server. + """ + return request, metadata + + def post_set_url_map(self, response: compute.Operation) -> compute.Operation: + """Post-rpc interceptor for set_url_map + + Override in a subclass to manipulate the response + after it is returned by the RegionTargetHttpsProxies server but before + it is returned to user code. + """ + return response + + +@dataclasses.dataclass +class RegionTargetHttpsProxiesRestStub: + _session: AuthorizedSession + _host: str + _interceptor: RegionTargetHttpsProxiesRestInterceptor + + +class RegionTargetHttpsProxiesRestTransport(RegionTargetHttpsProxiesTransport): + """REST backend transport for RegionTargetHttpsProxies. + + The RegionTargetHttpsProxies API. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends JSON representations of protocol buffers over HTTP/1.1 + + NOTE: This REST transport functionality is currently in a beta + state (preview). We welcome your feedback via an issue in this + library's source repository. Thank you! + """ + + def __init__(self, *, + host: str = 'compute.googleapis.com', + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + client_cert_source_for_mtls: Optional[Callable[[ + ], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + url_scheme: str = 'https', + interceptor: Optional[RegionTargetHttpsProxiesRestInterceptor] = None, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + NOTE: This REST transport functionality is currently in a beta + state (preview). We welcome your feedback via a GitHub issue in + this library's repository. Thank you! + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + client_cert_source_for_mtls (Callable[[], Tuple[bytes, bytes]]): Client + certificate to configure mutual TLS HTTP channel. It is ignored + if ``channel`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you are developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + url_scheme: the protocol scheme for the API endpoint. Normally + "https", but for testing or local servers, + "http" can be specified. + """ + # Run the base constructor + # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. + # TODO: When custom host (api_endpoint) is set, `scopes` must *also* be set on the + # credentials object + maybe_url_match = re.match("^(?Phttp(?:s)?://)?(?P.*)$", host) + if maybe_url_match is None: + raise ValueError(f"Unexpected hostname structure: {host}") # pragma: NO COVER + + url_match_items = maybe_url_match.groupdict() + + host = f"{url_scheme}://{host}" if not url_match_items["scheme"] else host + + super().__init__( + host=host, + credentials=credentials, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience + ) + self._session = AuthorizedSession( + self._credentials, default_host=self.DEFAULT_HOST) + if client_cert_source_for_mtls: + self._session.configure_mtls_channel(client_cert_source_for_mtls) + self._interceptor = interceptor or RegionTargetHttpsProxiesRestInterceptor() + self._prep_wrapped_messages(client_info) + + class _Delete(RegionTargetHttpsProxiesRestStub): + def __hash__(self): + return hash("Delete") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.DeleteRegionTargetHttpsProxyRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.Operation: + r"""Call the delete method over HTTP. + + Args: + request (~.compute.DeleteRegionTargetHttpsProxyRequest): + The request object. A request message for + RegionTargetHttpsProxies.Delete. See the + method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.Operation: + Represents an Operation resource. Google Compute Engine + has three Operation resources: \* + `Global `__ + \* + `Regional `__ + \* + `Zonal `__ + You can use an operation resource to manage asynchronous + API requests. For more information, read Handling API + responses. Operations can be global, regional or zonal. + - For global operations, use the ``globalOperations`` + resource. - For regional operations, use the + ``regionOperations`` resource. - For zonal operations, + use the ``zonalOperations`` resource. For more + information, read Global, Regional, and Zonal Resources. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'delete', + 'uri': '/compute/v1/projects/{project}/regions/{region}/targetHttpsProxies/{target_https_proxy}', + }, + ] + request, metadata = self._interceptor.pre_delete(request, metadata) + pb_request = compute.DeleteRegionTargetHttpsProxyRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.Operation() + pb_resp = compute.Operation.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_delete(resp) + return resp + + class _Get(RegionTargetHttpsProxiesRestStub): + def __hash__(self): + return hash("Get") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.GetRegionTargetHttpsProxyRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.TargetHttpsProxy: + r"""Call the get method over HTTP. + + Args: + request (~.compute.GetRegionTargetHttpsProxyRequest): + The request object. A request message for + RegionTargetHttpsProxies.Get. See the + method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.TargetHttpsProxy: + Represents a Target HTTPS Proxy resource. Google Compute + Engine has two Target HTTPS Proxy resources: \* + `Global `__ + \* + `Regional `__ + A target HTTPS proxy is a component of GCP HTTPS load + balancers. \* targetHttpsProxies are used by external + HTTPS load balancers. \* regionTargetHttpsProxies are + used by internal HTTPS load balancers. Forwarding rules + reference a target HTTPS proxy, and the target proxy + then references a URL map. For more information, read + Using Target Proxies and Forwarding rule concepts. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/compute/v1/projects/{project}/regions/{region}/targetHttpsProxies/{target_https_proxy}', + }, + ] + request, metadata = self._interceptor.pre_get(request, metadata) + pb_request = compute.GetRegionTargetHttpsProxyRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.TargetHttpsProxy() + pb_resp = compute.TargetHttpsProxy.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get(resp) + return resp + + class _Insert(RegionTargetHttpsProxiesRestStub): + def __hash__(self): + return hash("Insert") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.InsertRegionTargetHttpsProxyRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.Operation: + r"""Call the insert method over HTTP. + + Args: + request (~.compute.InsertRegionTargetHttpsProxyRequest): + The request object. A request message for + RegionTargetHttpsProxies.Insert. See the + method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.Operation: + Represents an Operation resource. Google Compute Engine + has three Operation resources: \* + `Global `__ + \* + `Regional `__ + \* + `Zonal `__ + You can use an operation resource to manage asynchronous + API requests. For more information, read Handling API + responses. Operations can be global, regional or zonal. + - For global operations, use the ``globalOperations`` + resource. - For regional operations, use the + ``regionOperations`` resource. - For zonal operations, + use the ``zonalOperations`` resource. For more + information, read Global, Regional, and Zonal Resources. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/compute/v1/projects/{project}/regions/{region}/targetHttpsProxies', + 'body': 'target_https_proxy_resource', + }, + ] + request, metadata = self._interceptor.pre_insert(request, metadata) + pb_request = compute.InsertRegionTargetHttpsProxyRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + including_default_value_fields=False, + use_integers_for_enums=False + ) + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.Operation() + pb_resp = compute.Operation.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_insert(resp) + return resp + + class _List(RegionTargetHttpsProxiesRestStub): + def __hash__(self): + return hash("List") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.ListRegionTargetHttpsProxiesRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.TargetHttpsProxyList: + r"""Call the list method over HTTP. + + Args: + request (~.compute.ListRegionTargetHttpsProxiesRequest): + The request object. A request message for + RegionTargetHttpsProxies.List. See the + method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.TargetHttpsProxyList: + Contains a list of TargetHttpsProxy + resources. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/compute/v1/projects/{project}/regions/{region}/targetHttpsProxies', + }, + ] + request, metadata = self._interceptor.pre_list(request, metadata) + pb_request = compute.ListRegionTargetHttpsProxiesRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.TargetHttpsProxyList() + pb_resp = compute.TargetHttpsProxyList.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_list(resp) + return resp + + class _Patch(RegionTargetHttpsProxiesRestStub): + def __hash__(self): + return hash("Patch") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.PatchRegionTargetHttpsProxyRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.Operation: + r"""Call the patch method over HTTP. + + Args: + request (~.compute.PatchRegionTargetHttpsProxyRequest): + The request object. A request message for + RegionTargetHttpsProxies.Patch. See the + method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.Operation: + Represents an Operation resource. Google Compute Engine + has three Operation resources: \* + `Global `__ + \* + `Regional `__ + \* + `Zonal `__ + You can use an operation resource to manage asynchronous + API requests. For more information, read Handling API + responses. Operations can be global, regional or zonal. + - For global operations, use the ``globalOperations`` + resource. - For regional operations, use the + ``regionOperations`` resource. - For zonal operations, + use the ``zonalOperations`` resource. For more + information, read Global, Regional, and Zonal Resources. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'patch', + 'uri': '/compute/v1/projects/{project}/regions/{region}/targetHttpsProxies/{target_https_proxy}', + 'body': 'target_https_proxy_resource', + }, + ] + request, metadata = self._interceptor.pre_patch(request, metadata) + pb_request = compute.PatchRegionTargetHttpsProxyRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + including_default_value_fields=False, + use_integers_for_enums=False + ) + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.Operation() + pb_resp = compute.Operation.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_patch(resp) + return resp + + class _SetSslCertificates(RegionTargetHttpsProxiesRestStub): + def __hash__(self): + return hash("SetSslCertificates") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.SetSslCertificatesRegionTargetHttpsProxyRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.Operation: + r"""Call the set ssl certificates method over HTTP. + + Args: + request (~.compute.SetSslCertificatesRegionTargetHttpsProxyRequest): + The request object. A request message for + RegionTargetHttpsProxies.SetSslCertificates. + See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.Operation: + Represents an Operation resource. Google Compute Engine + has three Operation resources: \* + `Global `__ + \* + `Regional `__ + \* + `Zonal `__ + You can use an operation resource to manage asynchronous + API requests. For more information, read Handling API + responses. Operations can be global, regional or zonal. + - For global operations, use the ``globalOperations`` + resource. - For regional operations, use the + ``regionOperations`` resource. - For zonal operations, + use the ``zonalOperations`` resource. For more + information, read Global, Regional, and Zonal Resources. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/compute/v1/projects/{project}/regions/{region}/targetHttpsProxies/{target_https_proxy}/setSslCertificates', + 'body': 'region_target_https_proxies_set_ssl_certificates_request_resource', + }, + ] + request, metadata = self._interceptor.pre_set_ssl_certificates(request, metadata) + pb_request = compute.SetSslCertificatesRegionTargetHttpsProxyRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + including_default_value_fields=False, + use_integers_for_enums=False + ) + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.Operation() + pb_resp = compute.Operation.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_set_ssl_certificates(resp) + return resp + + class _SetUrlMap(RegionTargetHttpsProxiesRestStub): + def __hash__(self): + return hash("SetUrlMap") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.SetUrlMapRegionTargetHttpsProxyRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.Operation: + r"""Call the set url map method over HTTP. + + Args: + request (~.compute.SetUrlMapRegionTargetHttpsProxyRequest): + The request object. A request message for + RegionTargetHttpsProxies.SetUrlMap. See + the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.Operation: + Represents an Operation resource. Google Compute Engine + has three Operation resources: \* + `Global `__ + \* + `Regional `__ + \* + `Zonal `__ + You can use an operation resource to manage asynchronous + API requests. For more information, read Handling API + responses. Operations can be global, regional or zonal. + - For global operations, use the ``globalOperations`` + resource. - For regional operations, use the + ``regionOperations`` resource. - For zonal operations, + use the ``zonalOperations`` resource. For more + information, read Global, Regional, and Zonal Resources. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/compute/v1/projects/{project}/regions/{region}/targetHttpsProxies/{target_https_proxy}/setUrlMap', + 'body': 'url_map_reference_resource', + }, + ] + request, metadata = self._interceptor.pre_set_url_map(request, metadata) + pb_request = compute.SetUrlMapRegionTargetHttpsProxyRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + including_default_value_fields=False, + use_integers_for_enums=False + ) + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.Operation() + pb_resp = compute.Operation.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_set_url_map(resp) + return resp + + @property + def delete(self) -> Callable[ + [compute.DeleteRegionTargetHttpsProxyRequest], + compute.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._Delete(self._session, self._host, self._interceptor) # type: ignore + + @property + def get(self) -> Callable[ + [compute.GetRegionTargetHttpsProxyRequest], + compute.TargetHttpsProxy]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._Get(self._session, self._host, self._interceptor) # type: ignore + + @property + def insert(self) -> Callable[ + [compute.InsertRegionTargetHttpsProxyRequest], + compute.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._Insert(self._session, self._host, self._interceptor) # type: ignore + + @property + def list(self) -> Callable[ + [compute.ListRegionTargetHttpsProxiesRequest], + compute.TargetHttpsProxyList]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._List(self._session, self._host, self._interceptor) # type: ignore + + @property + def patch(self) -> Callable[ + [compute.PatchRegionTargetHttpsProxyRequest], + compute.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._Patch(self._session, self._host, self._interceptor) # type: ignore + + @property + def set_ssl_certificates(self) -> Callable[ + [compute.SetSslCertificatesRegionTargetHttpsProxyRequest], + compute.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._SetSslCertificates(self._session, self._host, self._interceptor) # type: ignore + + @property + def set_url_map(self) -> Callable[ + [compute.SetUrlMapRegionTargetHttpsProxyRequest], + compute.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._SetUrlMap(self._session, self._host, self._interceptor) # type: ignore + + @property + def kind(self) -> str: + return "rest" + + def close(self): + self._session.close() + + +__all__=( + 'RegionTargetHttpsProxiesRestTransport', +) diff --git a/owl-bot-staging/v1/google/cloud/compute_v1/services/region_target_tcp_proxies/__init__.py b/owl-bot-staging/v1/google/cloud/compute_v1/services/region_target_tcp_proxies/__init__.py new file mode 100644 index 000000000..eea780b87 --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/compute_v1/services/region_target_tcp_proxies/__init__.py @@ -0,0 +1,20 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from .client import RegionTargetTcpProxiesClient + +__all__ = ( + 'RegionTargetTcpProxiesClient', +) diff --git a/owl-bot-staging/v1/google/cloud/compute_v1/services/region_target_tcp_proxies/client.py b/owl-bot-staging/v1/google/cloud/compute_v1/services/region_target_tcp_proxies/client.py new file mode 100644 index 000000000..529a09cb7 --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/compute_v1/services/region_target_tcp_proxies/client.py @@ -0,0 +1,1191 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import functools +import os +import re +from typing import Dict, Mapping, MutableMapping, MutableSequence, Optional, Sequence, Tuple, Type, Union, cast + +from google.cloud.compute_v1 import gapic_version as package_version + +from google.api_core import client_options as client_options_lib +from google.api_core import exceptions as core_exceptions +from google.api_core import extended_operation +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport import mtls # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth.exceptions import MutualTLSChannelError # type: ignore +from google.oauth2 import service_account # type: ignore + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + +from google.api_core import extended_operation # type: ignore +from google.cloud.compute_v1.services.region_target_tcp_proxies import pagers +from google.cloud.compute_v1.types import compute +from .transports.base import RegionTargetTcpProxiesTransport, DEFAULT_CLIENT_INFO +from .transports.rest import RegionTargetTcpProxiesRestTransport + + +class RegionTargetTcpProxiesClientMeta(type): + """Metaclass for the RegionTargetTcpProxies client. + + This provides class-level methods for building and retrieving + support objects (e.g. transport) without polluting the client instance + objects. + """ + _transport_registry = OrderedDict() # type: Dict[str, Type[RegionTargetTcpProxiesTransport]] + _transport_registry["rest"] = RegionTargetTcpProxiesRestTransport + + def get_transport_class(cls, + label: Optional[str] = None, + ) -> Type[RegionTargetTcpProxiesTransport]: + """Returns an appropriate transport class. + + Args: + label: The name of the desired transport. If none is + provided, then the first transport in the registry is used. + + Returns: + The transport class to use. + """ + # If a specific transport is requested, return that one. + if label: + return cls._transport_registry[label] + + # No transport is requested; return the default (that is, the first one + # in the dictionary). + return next(iter(cls._transport_registry.values())) + + +class RegionTargetTcpProxiesClient(metaclass=RegionTargetTcpProxiesClientMeta): + """The RegionTargetTcpProxies API.""" + + @staticmethod + def _get_default_mtls_endpoint(api_endpoint): + """Converts api endpoint to mTLS endpoint. + + Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to + "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. + Args: + api_endpoint (Optional[str]): the api endpoint to convert. + Returns: + str: converted mTLS api endpoint. + """ + if not api_endpoint: + return api_endpoint + + mtls_endpoint_re = re.compile( + r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?" + ) + + m = mtls_endpoint_re.match(api_endpoint) + name, mtls, sandbox, googledomain = m.groups() + if mtls or not googledomain: + return api_endpoint + + if sandbox: + return api_endpoint.replace( + "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" + ) + + return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") + + DEFAULT_ENDPOINT = "compute.googleapis.com" + DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore + DEFAULT_ENDPOINT + ) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + RegionTargetTcpProxiesClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_info(info) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + RegionTargetTcpProxiesClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_file( + filename) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + from_service_account_json = from_service_account_file + + @property + def transport(self) -> RegionTargetTcpProxiesTransport: + """Returns the transport used by the client instance. + + Returns: + RegionTargetTcpProxiesTransport: The transport used by the client + instance. + """ + return self._transport + + @staticmethod + def common_billing_account_path(billing_account: str, ) -> str: + """Returns a fully-qualified billing_account string.""" + return "billingAccounts/{billing_account}".format(billing_account=billing_account, ) + + @staticmethod + def parse_common_billing_account_path(path: str) -> Dict[str,str]: + """Parse a billing_account path into its component segments.""" + m = re.match(r"^billingAccounts/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_folder_path(folder: str, ) -> str: + """Returns a fully-qualified folder string.""" + return "folders/{folder}".format(folder=folder, ) + + @staticmethod + def parse_common_folder_path(path: str) -> Dict[str,str]: + """Parse a folder path into its component segments.""" + m = re.match(r"^folders/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_organization_path(organization: str, ) -> str: + """Returns a fully-qualified organization string.""" + return "organizations/{organization}".format(organization=organization, ) + + @staticmethod + def parse_common_organization_path(path: str) -> Dict[str,str]: + """Parse a organization path into its component segments.""" + m = re.match(r"^organizations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_project_path(project: str, ) -> str: + """Returns a fully-qualified project string.""" + return "projects/{project}".format(project=project, ) + + @staticmethod + def parse_common_project_path(path: str) -> Dict[str,str]: + """Parse a project path into its component segments.""" + m = re.match(r"^projects/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_location_path(project: str, location: str, ) -> str: + """Returns a fully-qualified location string.""" + return "projects/{project}/locations/{location}".format(project=project, location=location, ) + + @staticmethod + def parse_common_location_path(path: str) -> Dict[str,str]: + """Parse a location path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @classmethod + def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[client_options_lib.ClientOptions] = None): + """Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + if client_options is None: + client_options = client_options_lib.ClientOptions() + use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") + use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") + if use_client_cert not in ("true", "false"): + raise ValueError("Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`") + if use_mtls_endpoint not in ("auto", "never", "always"): + raise MutualTLSChannelError("Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`") + + # Figure out the client cert source to use. + client_cert_source = None + if use_client_cert == "true": + if client_options.client_cert_source: + client_cert_source = client_options.client_cert_source + elif mtls.has_default_client_cert_source(): + client_cert_source = mtls.default_client_cert_source() + + # Figure out which api endpoint to use. + if client_options.api_endpoint is not None: + api_endpoint = client_options.api_endpoint + elif use_mtls_endpoint == "always" or (use_mtls_endpoint == "auto" and client_cert_source): + api_endpoint = cls.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = cls.DEFAULT_ENDPOINT + + return api_endpoint, client_cert_source + + def __init__(self, *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Optional[Union[str, RegionTargetTcpProxiesTransport]] = None, + client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the region target tcp proxies client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Union[str, RegionTargetTcpProxiesTransport]): The + transport to use. If set to None, a transport is chosen + automatically. + NOTE: "rest" transport functionality is currently in a + beta state (preview). We welcome your feedback via an + issue in this library's source repository. + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): Custom options for the + client. It won't take effect if a ``transport`` instance is provided. + (1) The ``api_endpoint`` property can be used to override the + default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT + environment variable can also be used to override the endpoint: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto switch to the + default mTLS endpoint if client certificate is present, this is + the default value). However, the ``api_endpoint`` property takes + precedence if provided. + (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide client certificate for mutual TLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + """ + if isinstance(client_options, dict): + client_options = client_options_lib.from_dict(client_options) + if client_options is None: + client_options = client_options_lib.ClientOptions() + client_options = cast(client_options_lib.ClientOptions, client_options) + + api_endpoint, client_cert_source_func = self.get_mtls_endpoint_and_cert_source(client_options) + + api_key_value = getattr(client_options, "api_key", None) + if api_key_value and credentials: + raise ValueError("client_options.api_key and credentials are mutually exclusive") + + # Save or instantiate the transport. + # Ordinarily, we provide the transport, but allowing a custom transport + # instance provides an extensibility point for unusual situations. + if isinstance(transport, RegionTargetTcpProxiesTransport): + # transport is a RegionTargetTcpProxiesTransport instance. + if credentials or client_options.credentials_file or api_key_value: + raise ValueError("When providing a transport instance, " + "provide its credentials directly.") + if client_options.scopes: + raise ValueError( + "When providing a transport instance, provide its scopes " + "directly." + ) + self._transport = transport + else: + import google.auth._default # type: ignore + + if api_key_value and hasattr(google.auth._default, "get_api_key_credentials"): + credentials = google.auth._default.get_api_key_credentials(api_key_value) + + Transport = type(self).get_transport_class(transport) + self._transport = Transport( + credentials=credentials, + credentials_file=client_options.credentials_file, + host=api_endpoint, + scopes=client_options.scopes, + client_cert_source_for_mtls=client_cert_source_func, + quota_project_id=client_options.quota_project_id, + client_info=client_info, + always_use_jwt_access=True, + api_audience=client_options.api_audience, + ) + + def delete_unary(self, + request: Optional[Union[compute.DeleteRegionTargetTcpProxyRequest, dict]] = None, + *, + project: Optional[str] = None, + region: Optional[str] = None, + target_tcp_proxy: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.Operation: + r"""Deletes the specified TargetTcpProxy resource. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_delete(): + # Create a client + client = compute_v1.RegionTargetTcpProxiesClient() + + # Initialize request argument(s) + request = compute_v1.DeleteRegionTargetTcpProxyRequest( + project="project_value", + region="region_value", + target_tcp_proxy="target_tcp_proxy_value", + ) + + # Make the request + response = client.delete(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.DeleteRegionTargetTcpProxyRequest, dict]): + The request object. A request message for + RegionTargetTcpProxies.Delete. See the + method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + region (str): + Name of the region scoping this + request. + + This corresponds to the ``region`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + target_tcp_proxy (str): + Name of the TargetTcpProxy resource + to delete. + + This corresponds to the ``target_tcp_proxy`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, region, target_tcp_proxy]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.DeleteRegionTargetTcpProxyRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.DeleteRegionTargetTcpProxyRequest): + request = compute.DeleteRegionTargetTcpProxyRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if region is not None: + request.region = region + if target_tcp_proxy is not None: + request.target_tcp_proxy = target_tcp_proxy + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("region", request.region), + ("target_tcp_proxy", request.target_tcp_proxy), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def delete(self, + request: Optional[Union[compute.DeleteRegionTargetTcpProxyRequest, dict]] = None, + *, + project: Optional[str] = None, + region: Optional[str] = None, + target_tcp_proxy: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> extended_operation.ExtendedOperation: + r"""Deletes the specified TargetTcpProxy resource. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_delete(): + # Create a client + client = compute_v1.RegionTargetTcpProxiesClient() + + # Initialize request argument(s) + request = compute_v1.DeleteRegionTargetTcpProxyRequest( + project="project_value", + region="region_value", + target_tcp_proxy="target_tcp_proxy_value", + ) + + # Make the request + response = client.delete(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.DeleteRegionTargetTcpProxyRequest, dict]): + The request object. A request message for + RegionTargetTcpProxies.Delete. See the + method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + region (str): + Name of the region scoping this + request. + + This corresponds to the ``region`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + target_tcp_proxy (str): + Name of the TargetTcpProxy resource + to delete. + + This corresponds to the ``target_tcp_proxy`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, region, target_tcp_proxy]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.DeleteRegionTargetTcpProxyRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.DeleteRegionTargetTcpProxyRequest): + request = compute.DeleteRegionTargetTcpProxyRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if region is not None: + request.region = region + if target_tcp_proxy is not None: + request.target_tcp_proxy = target_tcp_proxy + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("region", request.region), + ("target_tcp_proxy", request.target_tcp_proxy), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + operation_service = self._transport._region_operations_client + operation_request = compute.GetRegionOperationRequest() + operation_request.project = request.project + operation_request.region = request.region + operation_request.operation = response.name + + get_operation = functools.partial(operation_service.get, operation_request) + # Cancel is not part of extended operations yet. + cancel_operation = lambda: None + + # Note: this class is an implementation detail to provide a uniform + # set of names for certain fields in the extended operation proto message. + # See google.api_core.extended_operation.ExtendedOperation for details + # on these properties and the expected interface. + class _CustomOperation(extended_operation.ExtendedOperation): + @property + def error_message(self): + return self._extended_operation.http_error_message + + @property + def error_code(self): + return self._extended_operation.http_error_status_code + + response = _CustomOperation.make(get_operation, cancel_operation, response) + + # Done; return the response. + return response + + def get(self, + request: Optional[Union[compute.GetRegionTargetTcpProxyRequest, dict]] = None, + *, + project: Optional[str] = None, + region: Optional[str] = None, + target_tcp_proxy: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.TargetTcpProxy: + r"""Returns the specified TargetTcpProxy resource. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_get(): + # Create a client + client = compute_v1.RegionTargetTcpProxiesClient() + + # Initialize request argument(s) + request = compute_v1.GetRegionTargetTcpProxyRequest( + project="project_value", + region="region_value", + target_tcp_proxy="target_tcp_proxy_value", + ) + + # Make the request + response = client.get(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.GetRegionTargetTcpProxyRequest, dict]): + The request object. A request message for + RegionTargetTcpProxies.Get. See the + method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + region (str): + Name of the region scoping this + request. + + This corresponds to the ``region`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + target_tcp_proxy (str): + Name of the TargetTcpProxy resource + to return. + + This corresponds to the ``target_tcp_proxy`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.compute_v1.types.TargetTcpProxy: + Represents a Target TCP Proxy + resource. A target TCP proxy is a + component of a TCP Proxy load balancer. + Global forwarding rules reference target + TCP proxy, and the target proxy then + references an external backend service. + For more information, read TCP Proxy + Load Balancing overview. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, region, target_tcp_proxy]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.GetRegionTargetTcpProxyRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.GetRegionTargetTcpProxyRequest): + request = compute.GetRegionTargetTcpProxyRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if region is not None: + request.region = region + if target_tcp_proxy is not None: + request.target_tcp_proxy = target_tcp_proxy + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("region", request.region), + ("target_tcp_proxy", request.target_tcp_proxy), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def insert_unary(self, + request: Optional[Union[compute.InsertRegionTargetTcpProxyRequest, dict]] = None, + *, + project: Optional[str] = None, + region: Optional[str] = None, + target_tcp_proxy_resource: Optional[compute.TargetTcpProxy] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.Operation: + r"""Creates a TargetTcpProxy resource in the specified + project and region using the data included in the + request. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_insert(): + # Create a client + client = compute_v1.RegionTargetTcpProxiesClient() + + # Initialize request argument(s) + request = compute_v1.InsertRegionTargetTcpProxyRequest( + project="project_value", + region="region_value", + ) + + # Make the request + response = client.insert(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.InsertRegionTargetTcpProxyRequest, dict]): + The request object. A request message for + RegionTargetTcpProxies.Insert. See the + method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + region (str): + Name of the region scoping this + request. + + This corresponds to the ``region`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + target_tcp_proxy_resource (google.cloud.compute_v1.types.TargetTcpProxy): + The body resource for this request + This corresponds to the ``target_tcp_proxy_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, region, target_tcp_proxy_resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.InsertRegionTargetTcpProxyRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.InsertRegionTargetTcpProxyRequest): + request = compute.InsertRegionTargetTcpProxyRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if region is not None: + request.region = region + if target_tcp_proxy_resource is not None: + request.target_tcp_proxy_resource = target_tcp_proxy_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.insert] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("region", request.region), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def insert(self, + request: Optional[Union[compute.InsertRegionTargetTcpProxyRequest, dict]] = None, + *, + project: Optional[str] = None, + region: Optional[str] = None, + target_tcp_proxy_resource: Optional[compute.TargetTcpProxy] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> extended_operation.ExtendedOperation: + r"""Creates a TargetTcpProxy resource in the specified + project and region using the data included in the + request. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_insert(): + # Create a client + client = compute_v1.RegionTargetTcpProxiesClient() + + # Initialize request argument(s) + request = compute_v1.InsertRegionTargetTcpProxyRequest( + project="project_value", + region="region_value", + ) + + # Make the request + response = client.insert(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.InsertRegionTargetTcpProxyRequest, dict]): + The request object. A request message for + RegionTargetTcpProxies.Insert. See the + method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + region (str): + Name of the region scoping this + request. + + This corresponds to the ``region`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + target_tcp_proxy_resource (google.cloud.compute_v1.types.TargetTcpProxy): + The body resource for this request + This corresponds to the ``target_tcp_proxy_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, region, target_tcp_proxy_resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.InsertRegionTargetTcpProxyRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.InsertRegionTargetTcpProxyRequest): + request = compute.InsertRegionTargetTcpProxyRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if region is not None: + request.region = region + if target_tcp_proxy_resource is not None: + request.target_tcp_proxy_resource = target_tcp_proxy_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.insert] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("region", request.region), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + operation_service = self._transport._region_operations_client + operation_request = compute.GetRegionOperationRequest() + operation_request.project = request.project + operation_request.region = request.region + operation_request.operation = response.name + + get_operation = functools.partial(operation_service.get, operation_request) + # Cancel is not part of extended operations yet. + cancel_operation = lambda: None + + # Note: this class is an implementation detail to provide a uniform + # set of names for certain fields in the extended operation proto message. + # See google.api_core.extended_operation.ExtendedOperation for details + # on these properties and the expected interface. + class _CustomOperation(extended_operation.ExtendedOperation): + @property + def error_message(self): + return self._extended_operation.http_error_message + + @property + def error_code(self): + return self._extended_operation.http_error_status_code + + response = _CustomOperation.make(get_operation, cancel_operation, response) + + # Done; return the response. + return response + + def list(self, + request: Optional[Union[compute.ListRegionTargetTcpProxiesRequest, dict]] = None, + *, + project: Optional[str] = None, + region: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListPager: + r"""Retrieves a list of TargetTcpProxy resources + available to the specified project in a given region. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_list(): + # Create a client + client = compute_v1.RegionTargetTcpProxiesClient() + + # Initialize request argument(s) + request = compute_v1.ListRegionTargetTcpProxiesRequest( + project="project_value", + region="region_value", + ) + + # Make the request + page_result = client.list(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.ListRegionTargetTcpProxiesRequest, dict]): + The request object. A request message for + RegionTargetTcpProxies.List. See the + method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + region (str): + Name of the region scoping this + request. + + This corresponds to the ``region`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.compute_v1.services.region_target_tcp_proxies.pagers.ListPager: + Contains a list of TargetTcpProxy + resources. + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, region]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.ListRegionTargetTcpProxiesRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.ListRegionTargetTcpProxiesRequest): + request = compute.ListRegionTargetTcpProxiesRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if region is not None: + request.region = region + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("region", request.region), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + def __enter__(self) -> "RegionTargetTcpProxiesClient": + return self + + def __exit__(self, type, value, traceback): + """Releases underlying transport's resources. + + .. warning:: + ONLY use as a context manager if the transport is NOT shared + with other clients! Exiting the with block will CLOSE the transport + and may cause errors in other clients! + """ + self.transport.close() + + + + + + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) + + +__all__ = ( + "RegionTargetTcpProxiesClient", +) diff --git a/owl-bot-staging/v1/google/cloud/compute_v1/services/region_target_tcp_proxies/pagers.py b/owl-bot-staging/v1/google/cloud/compute_v1/services/region_target_tcp_proxies/pagers.py new file mode 100644 index 000000000..0a448baea --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/compute_v1/services/region_target_tcp_proxies/pagers.py @@ -0,0 +1,77 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import Any, AsyncIterator, Awaitable, Callable, Sequence, Tuple, Optional, Iterator + +from google.cloud.compute_v1.types import compute + + +class ListPager: + """A pager for iterating through ``list`` requests. + + This class thinly wraps an initial + :class:`google.cloud.compute_v1.types.TargetTcpProxyList` object, and + provides an ``__iter__`` method to iterate through its + ``items`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``List`` requests and continue to iterate + through the ``items`` field on the + corresponding responses. + + All the usual :class:`google.cloud.compute_v1.types.TargetTcpProxyList` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., compute.TargetTcpProxyList], + request: compute.ListRegionTargetTcpProxiesRequest, + response: compute.TargetTcpProxyList, + *, + metadata: Sequence[Tuple[str, str]] = ()): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.compute_v1.types.ListRegionTargetTcpProxiesRequest): + The initial request object. + response (google.cloud.compute_v1.types.TargetTcpProxyList): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = compute.ListRegionTargetTcpProxiesRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[compute.TargetTcpProxyList]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[compute.TargetTcpProxy]: + for page in self.pages: + yield from page.items + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) diff --git a/owl-bot-staging/v1/google/cloud/compute_v1/services/region_target_tcp_proxies/transports/__init__.py b/owl-bot-staging/v1/google/cloud/compute_v1/services/region_target_tcp_proxies/transports/__init__.py new file mode 100644 index 000000000..5e0a01e47 --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/compute_v1/services/region_target_tcp_proxies/transports/__init__.py @@ -0,0 +1,32 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +from typing import Dict, Type + +from .base import RegionTargetTcpProxiesTransport +from .rest import RegionTargetTcpProxiesRestTransport +from .rest import RegionTargetTcpProxiesRestInterceptor + + +# Compile a registry of transports. +_transport_registry = OrderedDict() # type: Dict[str, Type[RegionTargetTcpProxiesTransport]] +_transport_registry['rest'] = RegionTargetTcpProxiesRestTransport + +__all__ = ( + 'RegionTargetTcpProxiesTransport', + 'RegionTargetTcpProxiesRestTransport', + 'RegionTargetTcpProxiesRestInterceptor', +) diff --git a/owl-bot-staging/v1/google/cloud/compute_v1/services/region_target_tcp_proxies/transports/base.py b/owl-bot-staging/v1/google/cloud/compute_v1/services/region_target_tcp_proxies/transports/base.py new file mode 100644 index 000000000..0c35fe8b6 --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/compute_v1/services/region_target_tcp_proxies/transports/base.py @@ -0,0 +1,205 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import abc +from typing import Awaitable, Callable, Dict, Optional, Sequence, Union + +from google.cloud.compute_v1 import gapic_version as package_version + +import google.auth # type: ignore +import google.api_core +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.compute_v1.types import compute +from google.cloud.compute_v1.services import region_operations + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) + + +class RegionTargetTcpProxiesTransport(abc.ABC): + """Abstract transport class for RegionTargetTcpProxies.""" + + AUTH_SCOPES = ( + 'https://www.googleapis.com/auth/compute', + 'https://www.googleapis.com/auth/cloud-platform', + ) + + DEFAULT_HOST: str = 'compute.googleapis.com' + def __init__( + self, *, + host: str = DEFAULT_HOST, + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + **kwargs, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A list of scopes. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + """ + self._extended_operations_services: Dict[str, Any] = {} + + scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} + + # Save the scopes. + self._scopes = scopes + + # If no credentials are provided, then determine the appropriate + # defaults. + if credentials and credentials_file: + raise core_exceptions.DuplicateCredentialArgs("'credentials_file' and 'credentials' are mutually exclusive") + + if credentials_file is not None: + credentials, _ = google.auth.load_credentials_from_file( + credentials_file, + **scopes_kwargs, + quota_project_id=quota_project_id + ) + elif credentials is None: + credentials, _ = google.auth.default(**scopes_kwargs, quota_project_id=quota_project_id) + # Don't apply audience if the credentials file passed from user. + if hasattr(credentials, "with_gdch_audience"): + credentials = credentials.with_gdch_audience(api_audience if api_audience else host) + + # If the credentials are service account credentials, then always try to use self signed JWT. + if always_use_jwt_access and isinstance(credentials, service_account.Credentials) and hasattr(service_account.Credentials, "with_always_use_jwt_access"): + credentials = credentials.with_always_use_jwt_access(True) + + # Save the credentials. + self._credentials = credentials + + # Save the hostname. Default to port 443 (HTTPS) if none is specified. + if ':' not in host: + host += ':443' + self._host = host + + def _prep_wrapped_messages(self, client_info): + # Precompute the wrapped methods. + self._wrapped_methods = { + self.delete: gapic_v1.method.wrap_method( + self.delete, + default_timeout=None, + client_info=client_info, + ), + self.get: gapic_v1.method.wrap_method( + self.get, + default_timeout=None, + client_info=client_info, + ), + self.insert: gapic_v1.method.wrap_method( + self.insert, + default_timeout=None, + client_info=client_info, + ), + self.list: gapic_v1.method.wrap_method( + self.list, + default_timeout=None, + client_info=client_info, + ), + } + + def close(self): + """Closes resources associated with the transport. + + .. warning:: + Only call this method if the transport is NOT shared + with other clients - this may cause errors in other clients! + """ + raise NotImplementedError() + + @property + def delete(self) -> Callable[ + [compute.DeleteRegionTargetTcpProxyRequest], + Union[ + compute.Operation, + Awaitable[compute.Operation] + ]]: + raise NotImplementedError() + + @property + def get(self) -> Callable[ + [compute.GetRegionTargetTcpProxyRequest], + Union[ + compute.TargetTcpProxy, + Awaitable[compute.TargetTcpProxy] + ]]: + raise NotImplementedError() + + @property + def insert(self) -> Callable[ + [compute.InsertRegionTargetTcpProxyRequest], + Union[ + compute.Operation, + Awaitable[compute.Operation] + ]]: + raise NotImplementedError() + + @property + def list(self) -> Callable[ + [compute.ListRegionTargetTcpProxiesRequest], + Union[ + compute.TargetTcpProxyList, + Awaitable[compute.TargetTcpProxyList] + ]]: + raise NotImplementedError() + + @property + def kind(self) -> str: + raise NotImplementedError() + + @property + def _region_operations_client(self) -> region_operations.RegionOperationsClient: + ex_op_service = self._extended_operations_services.get("region_operations") + if not ex_op_service: + ex_op_service = region_operations.RegionOperationsClient( + credentials=self._credentials, + transport=self.kind, + ) + self._extended_operations_services["region_operations"] = ex_op_service + + return ex_op_service + + +__all__ = ( + 'RegionTargetTcpProxiesTransport', +) diff --git a/owl-bot-staging/v1/google/cloud/compute_v1/services/region_target_tcp_proxies/transports/rest.py b/owl-bot-staging/v1/google/cloud/compute_v1/services/region_target_tcp_proxies/transports/rest.py new file mode 100644 index 000000000..b736ac008 --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/compute_v1/services/region_target_tcp_proxies/transports/rest.py @@ -0,0 +1,674 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +from google.auth.transport.requests import AuthorizedSession # type: ignore +import json # type: ignore +import grpc # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.api_core import exceptions as core_exceptions +from google.api_core import retry as retries +from google.api_core import rest_helpers +from google.api_core import rest_streaming +from google.api_core import path_template +from google.api_core import gapic_v1 + +from google.protobuf import json_format +from requests import __version__ as requests_version +import dataclasses +import re +from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union +import warnings + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + + +from google.cloud.compute_v1.types import compute + +from .base import RegionTargetTcpProxiesTransport, DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, + grpc_version=None, + rest_version=requests_version, +) + + +class RegionTargetTcpProxiesRestInterceptor: + """Interceptor for RegionTargetTcpProxies. + + Interceptors are used to manipulate requests, request metadata, and responses + in arbitrary ways. + Example use cases include: + * Logging + * Verifying requests according to service or custom semantics + * Stripping extraneous information from responses + + These use cases and more can be enabled by injecting an + instance of a custom subclass when constructing the RegionTargetTcpProxiesRestTransport. + + .. code-block:: python + class MyCustomRegionTargetTcpProxiesInterceptor(RegionTargetTcpProxiesRestInterceptor): + def pre_delete(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_delete(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_get(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_insert(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_insert(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list(self, response): + logging.log(f"Received response: {response}") + return response + + transport = RegionTargetTcpProxiesRestTransport(interceptor=MyCustomRegionTargetTcpProxiesInterceptor()) + client = RegionTargetTcpProxiesClient(transport=transport) + + + """ + def pre_delete(self, request: compute.DeleteRegionTargetTcpProxyRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.DeleteRegionTargetTcpProxyRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for delete + + Override in a subclass to manipulate the request or metadata + before they are sent to the RegionTargetTcpProxies server. + """ + return request, metadata + + def post_delete(self, response: compute.Operation) -> compute.Operation: + """Post-rpc interceptor for delete + + Override in a subclass to manipulate the response + after it is returned by the RegionTargetTcpProxies server but before + it is returned to user code. + """ + return response + def pre_get(self, request: compute.GetRegionTargetTcpProxyRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.GetRegionTargetTcpProxyRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get + + Override in a subclass to manipulate the request or metadata + before they are sent to the RegionTargetTcpProxies server. + """ + return request, metadata + + def post_get(self, response: compute.TargetTcpProxy) -> compute.TargetTcpProxy: + """Post-rpc interceptor for get + + Override in a subclass to manipulate the response + after it is returned by the RegionTargetTcpProxies server but before + it is returned to user code. + """ + return response + def pre_insert(self, request: compute.InsertRegionTargetTcpProxyRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.InsertRegionTargetTcpProxyRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for insert + + Override in a subclass to manipulate the request or metadata + before they are sent to the RegionTargetTcpProxies server. + """ + return request, metadata + + def post_insert(self, response: compute.Operation) -> compute.Operation: + """Post-rpc interceptor for insert + + Override in a subclass to manipulate the response + after it is returned by the RegionTargetTcpProxies server but before + it is returned to user code. + """ + return response + def pre_list(self, request: compute.ListRegionTargetTcpProxiesRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.ListRegionTargetTcpProxiesRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list + + Override in a subclass to manipulate the request or metadata + before they are sent to the RegionTargetTcpProxies server. + """ + return request, metadata + + def post_list(self, response: compute.TargetTcpProxyList) -> compute.TargetTcpProxyList: + """Post-rpc interceptor for list + + Override in a subclass to manipulate the response + after it is returned by the RegionTargetTcpProxies server but before + it is returned to user code. + """ + return response + + +@dataclasses.dataclass +class RegionTargetTcpProxiesRestStub: + _session: AuthorizedSession + _host: str + _interceptor: RegionTargetTcpProxiesRestInterceptor + + +class RegionTargetTcpProxiesRestTransport(RegionTargetTcpProxiesTransport): + """REST backend transport for RegionTargetTcpProxies. + + The RegionTargetTcpProxies API. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends JSON representations of protocol buffers over HTTP/1.1 + + NOTE: This REST transport functionality is currently in a beta + state (preview). We welcome your feedback via an issue in this + library's source repository. Thank you! + """ + + def __init__(self, *, + host: str = 'compute.googleapis.com', + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + client_cert_source_for_mtls: Optional[Callable[[ + ], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + url_scheme: str = 'https', + interceptor: Optional[RegionTargetTcpProxiesRestInterceptor] = None, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + NOTE: This REST transport functionality is currently in a beta + state (preview). We welcome your feedback via a GitHub issue in + this library's repository. Thank you! + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + client_cert_source_for_mtls (Callable[[], Tuple[bytes, bytes]]): Client + certificate to configure mutual TLS HTTP channel. It is ignored + if ``channel`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you are developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + url_scheme: the protocol scheme for the API endpoint. Normally + "https", but for testing or local servers, + "http" can be specified. + """ + # Run the base constructor + # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. + # TODO: When custom host (api_endpoint) is set, `scopes` must *also* be set on the + # credentials object + maybe_url_match = re.match("^(?Phttp(?:s)?://)?(?P.*)$", host) + if maybe_url_match is None: + raise ValueError(f"Unexpected hostname structure: {host}") # pragma: NO COVER + + url_match_items = maybe_url_match.groupdict() + + host = f"{url_scheme}://{host}" if not url_match_items["scheme"] else host + + super().__init__( + host=host, + credentials=credentials, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience + ) + self._session = AuthorizedSession( + self._credentials, default_host=self.DEFAULT_HOST) + if client_cert_source_for_mtls: + self._session.configure_mtls_channel(client_cert_source_for_mtls) + self._interceptor = interceptor or RegionTargetTcpProxiesRestInterceptor() + self._prep_wrapped_messages(client_info) + + class _Delete(RegionTargetTcpProxiesRestStub): + def __hash__(self): + return hash("Delete") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.DeleteRegionTargetTcpProxyRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.Operation: + r"""Call the delete method over HTTP. + + Args: + request (~.compute.DeleteRegionTargetTcpProxyRequest): + The request object. A request message for + RegionTargetTcpProxies.Delete. See the + method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.Operation: + Represents an Operation resource. Google Compute Engine + has three Operation resources: \* + `Global `__ + \* + `Regional `__ + \* + `Zonal `__ + You can use an operation resource to manage asynchronous + API requests. For more information, read Handling API + responses. Operations can be global, regional or zonal. + - For global operations, use the ``globalOperations`` + resource. - For regional operations, use the + ``regionOperations`` resource. - For zonal operations, + use the ``zonalOperations`` resource. For more + information, read Global, Regional, and Zonal Resources. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'delete', + 'uri': '/compute/v1/projects/{project}/regions/{region}/targetTcpProxies/{target_tcp_proxy}', + }, + ] + request, metadata = self._interceptor.pre_delete(request, metadata) + pb_request = compute.DeleteRegionTargetTcpProxyRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.Operation() + pb_resp = compute.Operation.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_delete(resp) + return resp + + class _Get(RegionTargetTcpProxiesRestStub): + def __hash__(self): + return hash("Get") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.GetRegionTargetTcpProxyRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.TargetTcpProxy: + r"""Call the get method over HTTP. + + Args: + request (~.compute.GetRegionTargetTcpProxyRequest): + The request object. A request message for + RegionTargetTcpProxies.Get. See the + method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.TargetTcpProxy: + Represents a Target TCP Proxy + resource. A target TCP proxy is a + component of a TCP Proxy load balancer. + Global forwarding rules reference target + TCP proxy, and the target proxy then + references an external backend service. + For more information, read TCP Proxy + Load Balancing overview. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/compute/v1/projects/{project}/regions/{region}/targetTcpProxies/{target_tcp_proxy}', + }, + ] + request, metadata = self._interceptor.pre_get(request, metadata) + pb_request = compute.GetRegionTargetTcpProxyRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.TargetTcpProxy() + pb_resp = compute.TargetTcpProxy.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get(resp) + return resp + + class _Insert(RegionTargetTcpProxiesRestStub): + def __hash__(self): + return hash("Insert") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.InsertRegionTargetTcpProxyRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.Operation: + r"""Call the insert method over HTTP. + + Args: + request (~.compute.InsertRegionTargetTcpProxyRequest): + The request object. A request message for + RegionTargetTcpProxies.Insert. See the + method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.Operation: + Represents an Operation resource. Google Compute Engine + has three Operation resources: \* + `Global `__ + \* + `Regional `__ + \* + `Zonal `__ + You can use an operation resource to manage asynchronous + API requests. For more information, read Handling API + responses. Operations can be global, regional or zonal. + - For global operations, use the ``globalOperations`` + resource. - For regional operations, use the + ``regionOperations`` resource. - For zonal operations, + use the ``zonalOperations`` resource. For more + information, read Global, Regional, and Zonal Resources. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/compute/v1/projects/{project}/regions/{region}/targetTcpProxies', + 'body': 'target_tcp_proxy_resource', + }, + ] + request, metadata = self._interceptor.pre_insert(request, metadata) + pb_request = compute.InsertRegionTargetTcpProxyRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + including_default_value_fields=False, + use_integers_for_enums=False + ) + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.Operation() + pb_resp = compute.Operation.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_insert(resp) + return resp + + class _List(RegionTargetTcpProxiesRestStub): + def __hash__(self): + return hash("List") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.ListRegionTargetTcpProxiesRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.TargetTcpProxyList: + r"""Call the list method over HTTP. + + Args: + request (~.compute.ListRegionTargetTcpProxiesRequest): + The request object. A request message for + RegionTargetTcpProxies.List. See the + method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.TargetTcpProxyList: + Contains a list of TargetTcpProxy + resources. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/compute/v1/projects/{project}/regions/{region}/targetTcpProxies', + }, + ] + request, metadata = self._interceptor.pre_list(request, metadata) + pb_request = compute.ListRegionTargetTcpProxiesRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.TargetTcpProxyList() + pb_resp = compute.TargetTcpProxyList.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_list(resp) + return resp + + @property + def delete(self) -> Callable[ + [compute.DeleteRegionTargetTcpProxyRequest], + compute.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._Delete(self._session, self._host, self._interceptor) # type: ignore + + @property + def get(self) -> Callable[ + [compute.GetRegionTargetTcpProxyRequest], + compute.TargetTcpProxy]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._Get(self._session, self._host, self._interceptor) # type: ignore + + @property + def insert(self) -> Callable[ + [compute.InsertRegionTargetTcpProxyRequest], + compute.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._Insert(self._session, self._host, self._interceptor) # type: ignore + + @property + def list(self) -> Callable[ + [compute.ListRegionTargetTcpProxiesRequest], + compute.TargetTcpProxyList]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._List(self._session, self._host, self._interceptor) # type: ignore + + @property + def kind(self) -> str: + return "rest" + + def close(self): + self._session.close() + + +__all__=( + 'RegionTargetTcpProxiesRestTransport', +) diff --git a/owl-bot-staging/v1/google/cloud/compute_v1/services/region_url_maps/__init__.py b/owl-bot-staging/v1/google/cloud/compute_v1/services/region_url_maps/__init__.py new file mode 100644 index 000000000..cbefecb66 --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/compute_v1/services/region_url_maps/__init__.py @@ -0,0 +1,20 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from .client import RegionUrlMapsClient + +__all__ = ( + 'RegionUrlMapsClient', +) diff --git a/owl-bot-staging/v1/google/cloud/compute_v1/services/region_url_maps/client.py b/owl-bot-staging/v1/google/cloud/compute_v1/services/region_url_maps/client.py new file mode 100644 index 000000000..9479e15d7 --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/compute_v1/services/region_url_maps/client.py @@ -0,0 +1,1916 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import functools +import os +import re +from typing import Dict, Mapping, MutableMapping, MutableSequence, Optional, Sequence, Tuple, Type, Union, cast + +from google.cloud.compute_v1 import gapic_version as package_version + +from google.api_core import client_options as client_options_lib +from google.api_core import exceptions as core_exceptions +from google.api_core import extended_operation +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport import mtls # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth.exceptions import MutualTLSChannelError # type: ignore +from google.oauth2 import service_account # type: ignore + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + +from google.api_core import extended_operation # type: ignore +from google.cloud.compute_v1.services.region_url_maps import pagers +from google.cloud.compute_v1.types import compute +from .transports.base import RegionUrlMapsTransport, DEFAULT_CLIENT_INFO +from .transports.rest import RegionUrlMapsRestTransport + + +class RegionUrlMapsClientMeta(type): + """Metaclass for the RegionUrlMaps client. + + This provides class-level methods for building and retrieving + support objects (e.g. transport) without polluting the client instance + objects. + """ + _transport_registry = OrderedDict() # type: Dict[str, Type[RegionUrlMapsTransport]] + _transport_registry["rest"] = RegionUrlMapsRestTransport + + def get_transport_class(cls, + label: Optional[str] = None, + ) -> Type[RegionUrlMapsTransport]: + """Returns an appropriate transport class. + + Args: + label: The name of the desired transport. If none is + provided, then the first transport in the registry is used. + + Returns: + The transport class to use. + """ + # If a specific transport is requested, return that one. + if label: + return cls._transport_registry[label] + + # No transport is requested; return the default (that is, the first one + # in the dictionary). + return next(iter(cls._transport_registry.values())) + + +class RegionUrlMapsClient(metaclass=RegionUrlMapsClientMeta): + """The RegionUrlMaps API.""" + + @staticmethod + def _get_default_mtls_endpoint(api_endpoint): + """Converts api endpoint to mTLS endpoint. + + Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to + "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. + Args: + api_endpoint (Optional[str]): the api endpoint to convert. + Returns: + str: converted mTLS api endpoint. + """ + if not api_endpoint: + return api_endpoint + + mtls_endpoint_re = re.compile( + r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?" + ) + + m = mtls_endpoint_re.match(api_endpoint) + name, mtls, sandbox, googledomain = m.groups() + if mtls or not googledomain: + return api_endpoint + + if sandbox: + return api_endpoint.replace( + "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" + ) + + return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") + + DEFAULT_ENDPOINT = "compute.googleapis.com" + DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore + DEFAULT_ENDPOINT + ) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + RegionUrlMapsClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_info(info) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + RegionUrlMapsClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_file( + filename) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + from_service_account_json = from_service_account_file + + @property + def transport(self) -> RegionUrlMapsTransport: + """Returns the transport used by the client instance. + + Returns: + RegionUrlMapsTransport: The transport used by the client + instance. + """ + return self._transport + + @staticmethod + def common_billing_account_path(billing_account: str, ) -> str: + """Returns a fully-qualified billing_account string.""" + return "billingAccounts/{billing_account}".format(billing_account=billing_account, ) + + @staticmethod + def parse_common_billing_account_path(path: str) -> Dict[str,str]: + """Parse a billing_account path into its component segments.""" + m = re.match(r"^billingAccounts/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_folder_path(folder: str, ) -> str: + """Returns a fully-qualified folder string.""" + return "folders/{folder}".format(folder=folder, ) + + @staticmethod + def parse_common_folder_path(path: str) -> Dict[str,str]: + """Parse a folder path into its component segments.""" + m = re.match(r"^folders/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_organization_path(organization: str, ) -> str: + """Returns a fully-qualified organization string.""" + return "organizations/{organization}".format(organization=organization, ) + + @staticmethod + def parse_common_organization_path(path: str) -> Dict[str,str]: + """Parse a organization path into its component segments.""" + m = re.match(r"^organizations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_project_path(project: str, ) -> str: + """Returns a fully-qualified project string.""" + return "projects/{project}".format(project=project, ) + + @staticmethod + def parse_common_project_path(path: str) -> Dict[str,str]: + """Parse a project path into its component segments.""" + m = re.match(r"^projects/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_location_path(project: str, location: str, ) -> str: + """Returns a fully-qualified location string.""" + return "projects/{project}/locations/{location}".format(project=project, location=location, ) + + @staticmethod + def parse_common_location_path(path: str) -> Dict[str,str]: + """Parse a location path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @classmethod + def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[client_options_lib.ClientOptions] = None): + """Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + if client_options is None: + client_options = client_options_lib.ClientOptions() + use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") + use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") + if use_client_cert not in ("true", "false"): + raise ValueError("Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`") + if use_mtls_endpoint not in ("auto", "never", "always"): + raise MutualTLSChannelError("Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`") + + # Figure out the client cert source to use. + client_cert_source = None + if use_client_cert == "true": + if client_options.client_cert_source: + client_cert_source = client_options.client_cert_source + elif mtls.has_default_client_cert_source(): + client_cert_source = mtls.default_client_cert_source() + + # Figure out which api endpoint to use. + if client_options.api_endpoint is not None: + api_endpoint = client_options.api_endpoint + elif use_mtls_endpoint == "always" or (use_mtls_endpoint == "auto" and client_cert_source): + api_endpoint = cls.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = cls.DEFAULT_ENDPOINT + + return api_endpoint, client_cert_source + + def __init__(self, *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Optional[Union[str, RegionUrlMapsTransport]] = None, + client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the region url maps client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Union[str, RegionUrlMapsTransport]): The + transport to use. If set to None, a transport is chosen + automatically. + NOTE: "rest" transport functionality is currently in a + beta state (preview). We welcome your feedback via an + issue in this library's source repository. + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): Custom options for the + client. It won't take effect if a ``transport`` instance is provided. + (1) The ``api_endpoint`` property can be used to override the + default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT + environment variable can also be used to override the endpoint: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto switch to the + default mTLS endpoint if client certificate is present, this is + the default value). However, the ``api_endpoint`` property takes + precedence if provided. + (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide client certificate for mutual TLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + """ + if isinstance(client_options, dict): + client_options = client_options_lib.from_dict(client_options) + if client_options is None: + client_options = client_options_lib.ClientOptions() + client_options = cast(client_options_lib.ClientOptions, client_options) + + api_endpoint, client_cert_source_func = self.get_mtls_endpoint_and_cert_source(client_options) + + api_key_value = getattr(client_options, "api_key", None) + if api_key_value and credentials: + raise ValueError("client_options.api_key and credentials are mutually exclusive") + + # Save or instantiate the transport. + # Ordinarily, we provide the transport, but allowing a custom transport + # instance provides an extensibility point for unusual situations. + if isinstance(transport, RegionUrlMapsTransport): + # transport is a RegionUrlMapsTransport instance. + if credentials or client_options.credentials_file or api_key_value: + raise ValueError("When providing a transport instance, " + "provide its credentials directly.") + if client_options.scopes: + raise ValueError( + "When providing a transport instance, provide its scopes " + "directly." + ) + self._transport = transport + else: + import google.auth._default # type: ignore + + if api_key_value and hasattr(google.auth._default, "get_api_key_credentials"): + credentials = google.auth._default.get_api_key_credentials(api_key_value) + + Transport = type(self).get_transport_class(transport) + self._transport = Transport( + credentials=credentials, + credentials_file=client_options.credentials_file, + host=api_endpoint, + scopes=client_options.scopes, + client_cert_source_for_mtls=client_cert_source_func, + quota_project_id=client_options.quota_project_id, + client_info=client_info, + always_use_jwt_access=True, + api_audience=client_options.api_audience, + ) + + def delete_unary(self, + request: Optional[Union[compute.DeleteRegionUrlMapRequest, dict]] = None, + *, + project: Optional[str] = None, + region: Optional[str] = None, + url_map: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.Operation: + r"""Deletes the specified UrlMap resource. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_delete(): + # Create a client + client = compute_v1.RegionUrlMapsClient() + + # Initialize request argument(s) + request = compute_v1.DeleteRegionUrlMapRequest( + project="project_value", + region="region_value", + url_map="url_map_value", + ) + + # Make the request + response = client.delete(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.DeleteRegionUrlMapRequest, dict]): + The request object. A request message for + RegionUrlMaps.Delete. See the method + description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + region (str): + Name of the region scoping this + request. + + This corresponds to the ``region`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + url_map (str): + Name of the UrlMap resource to + delete. + + This corresponds to the ``url_map`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, region, url_map]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.DeleteRegionUrlMapRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.DeleteRegionUrlMapRequest): + request = compute.DeleteRegionUrlMapRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if region is not None: + request.region = region + if url_map is not None: + request.url_map = url_map + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("region", request.region), + ("url_map", request.url_map), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def delete(self, + request: Optional[Union[compute.DeleteRegionUrlMapRequest, dict]] = None, + *, + project: Optional[str] = None, + region: Optional[str] = None, + url_map: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> extended_operation.ExtendedOperation: + r"""Deletes the specified UrlMap resource. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_delete(): + # Create a client + client = compute_v1.RegionUrlMapsClient() + + # Initialize request argument(s) + request = compute_v1.DeleteRegionUrlMapRequest( + project="project_value", + region="region_value", + url_map="url_map_value", + ) + + # Make the request + response = client.delete(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.DeleteRegionUrlMapRequest, dict]): + The request object. A request message for + RegionUrlMaps.Delete. See the method + description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + region (str): + Name of the region scoping this + request. + + This corresponds to the ``region`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + url_map (str): + Name of the UrlMap resource to + delete. + + This corresponds to the ``url_map`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, region, url_map]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.DeleteRegionUrlMapRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.DeleteRegionUrlMapRequest): + request = compute.DeleteRegionUrlMapRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if region is not None: + request.region = region + if url_map is not None: + request.url_map = url_map + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("region", request.region), + ("url_map", request.url_map), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + operation_service = self._transport._region_operations_client + operation_request = compute.GetRegionOperationRequest() + operation_request.project = request.project + operation_request.region = request.region + operation_request.operation = response.name + + get_operation = functools.partial(operation_service.get, operation_request) + # Cancel is not part of extended operations yet. + cancel_operation = lambda: None + + # Note: this class is an implementation detail to provide a uniform + # set of names for certain fields in the extended operation proto message. + # See google.api_core.extended_operation.ExtendedOperation for details + # on these properties and the expected interface. + class _CustomOperation(extended_operation.ExtendedOperation): + @property + def error_message(self): + return self._extended_operation.http_error_message + + @property + def error_code(self): + return self._extended_operation.http_error_status_code + + response = _CustomOperation.make(get_operation, cancel_operation, response) + + # Done; return the response. + return response + + def get(self, + request: Optional[Union[compute.GetRegionUrlMapRequest, dict]] = None, + *, + project: Optional[str] = None, + region: Optional[str] = None, + url_map: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.UrlMap: + r"""Returns the specified UrlMap resource. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_get(): + # Create a client + client = compute_v1.RegionUrlMapsClient() + + # Initialize request argument(s) + request = compute_v1.GetRegionUrlMapRequest( + project="project_value", + region="region_value", + url_map="url_map_value", + ) + + # Make the request + response = client.get(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.GetRegionUrlMapRequest, dict]): + The request object. A request message for + RegionUrlMaps.Get. See the method + description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + region (str): + Name of the region scoping this + request. + + This corresponds to the ``region`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + url_map (str): + Name of the UrlMap resource to + return. + + This corresponds to the ``url_map`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.compute_v1.types.UrlMap: + Represents a URL Map resource. Compute Engine has two + URL Map resources: \* + [Global](/compute/docs/reference/rest/v1/urlMaps) \* + [Regional](/compute/docs/reference/rest/v1/regionUrlMaps) + A URL map resource is a component of certain types of + cloud load balancers and Traffic Director: \* urlMaps + are used by external HTTP(S) load balancers and Traffic + Director. \* regionUrlMaps are used by internal HTTP(S) + load balancers. For a list of supported URL map features + by the load balancer type, see the Load balancing + features: Routing and traffic management table. For a + list of supported URL map features for Traffic Director, + see the Traffic Director features: Routing and traffic + management table. This resource defines mappings from + hostnames and URL paths to either a backend service or a + backend bucket. To use the global urlMaps resource, the + backend service must have a loadBalancingScheme of + either EXTERNAL or INTERNAL_SELF_MANAGED. To use the + regionUrlMaps resource, the backend service must have a + loadBalancingScheme of INTERNAL_MANAGED. For more + information, read URL Map Concepts. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, region, url_map]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.GetRegionUrlMapRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.GetRegionUrlMapRequest): + request = compute.GetRegionUrlMapRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if region is not None: + request.region = region + if url_map is not None: + request.url_map = url_map + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("region", request.region), + ("url_map", request.url_map), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def insert_unary(self, + request: Optional[Union[compute.InsertRegionUrlMapRequest, dict]] = None, + *, + project: Optional[str] = None, + region: Optional[str] = None, + url_map_resource: Optional[compute.UrlMap] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.Operation: + r"""Creates a UrlMap resource in the specified project + using the data included in the request. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_insert(): + # Create a client + client = compute_v1.RegionUrlMapsClient() + + # Initialize request argument(s) + request = compute_v1.InsertRegionUrlMapRequest( + project="project_value", + region="region_value", + ) + + # Make the request + response = client.insert(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.InsertRegionUrlMapRequest, dict]): + The request object. A request message for + RegionUrlMaps.Insert. See the method + description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + region (str): + Name of the region scoping this + request. + + This corresponds to the ``region`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + url_map_resource (google.cloud.compute_v1.types.UrlMap): + The body resource for this request + This corresponds to the ``url_map_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, region, url_map_resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.InsertRegionUrlMapRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.InsertRegionUrlMapRequest): + request = compute.InsertRegionUrlMapRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if region is not None: + request.region = region + if url_map_resource is not None: + request.url_map_resource = url_map_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.insert] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("region", request.region), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def insert(self, + request: Optional[Union[compute.InsertRegionUrlMapRequest, dict]] = None, + *, + project: Optional[str] = None, + region: Optional[str] = None, + url_map_resource: Optional[compute.UrlMap] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> extended_operation.ExtendedOperation: + r"""Creates a UrlMap resource in the specified project + using the data included in the request. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_insert(): + # Create a client + client = compute_v1.RegionUrlMapsClient() + + # Initialize request argument(s) + request = compute_v1.InsertRegionUrlMapRequest( + project="project_value", + region="region_value", + ) + + # Make the request + response = client.insert(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.InsertRegionUrlMapRequest, dict]): + The request object. A request message for + RegionUrlMaps.Insert. See the method + description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + region (str): + Name of the region scoping this + request. + + This corresponds to the ``region`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + url_map_resource (google.cloud.compute_v1.types.UrlMap): + The body resource for this request + This corresponds to the ``url_map_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, region, url_map_resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.InsertRegionUrlMapRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.InsertRegionUrlMapRequest): + request = compute.InsertRegionUrlMapRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if region is not None: + request.region = region + if url_map_resource is not None: + request.url_map_resource = url_map_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.insert] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("region", request.region), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + operation_service = self._transport._region_operations_client + operation_request = compute.GetRegionOperationRequest() + operation_request.project = request.project + operation_request.region = request.region + operation_request.operation = response.name + + get_operation = functools.partial(operation_service.get, operation_request) + # Cancel is not part of extended operations yet. + cancel_operation = lambda: None + + # Note: this class is an implementation detail to provide a uniform + # set of names for certain fields in the extended operation proto message. + # See google.api_core.extended_operation.ExtendedOperation for details + # on these properties and the expected interface. + class _CustomOperation(extended_operation.ExtendedOperation): + @property + def error_message(self): + return self._extended_operation.http_error_message + + @property + def error_code(self): + return self._extended_operation.http_error_status_code + + response = _CustomOperation.make(get_operation, cancel_operation, response) + + # Done; return the response. + return response + + def list(self, + request: Optional[Union[compute.ListRegionUrlMapsRequest, dict]] = None, + *, + project: Optional[str] = None, + region: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListPager: + r"""Retrieves the list of UrlMap resources available to + the specified project in the specified region. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_list(): + # Create a client + client = compute_v1.RegionUrlMapsClient() + + # Initialize request argument(s) + request = compute_v1.ListRegionUrlMapsRequest( + project="project_value", + region="region_value", + ) + + # Make the request + page_result = client.list(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.ListRegionUrlMapsRequest, dict]): + The request object. A request message for + RegionUrlMaps.List. See the method + description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + region (str): + Name of the region scoping this + request. + + This corresponds to the ``region`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.compute_v1.services.region_url_maps.pagers.ListPager: + Contains a list of UrlMap resources. + + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, region]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.ListRegionUrlMapsRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.ListRegionUrlMapsRequest): + request = compute.ListRegionUrlMapsRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if region is not None: + request.region = region + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("region", request.region), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + def patch_unary(self, + request: Optional[Union[compute.PatchRegionUrlMapRequest, dict]] = None, + *, + project: Optional[str] = None, + region: Optional[str] = None, + url_map: Optional[str] = None, + url_map_resource: Optional[compute.UrlMap] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.Operation: + r"""Patches the specified UrlMap resource with the data + included in the request. This method supports PATCH + semantics and uses JSON merge patch format and + processing rules. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_patch(): + # Create a client + client = compute_v1.RegionUrlMapsClient() + + # Initialize request argument(s) + request = compute_v1.PatchRegionUrlMapRequest( + project="project_value", + region="region_value", + url_map="url_map_value", + ) + + # Make the request + response = client.patch(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.PatchRegionUrlMapRequest, dict]): + The request object. A request message for + RegionUrlMaps.Patch. See the method + description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + region (str): + Name of the region scoping this + request. + + This corresponds to the ``region`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + url_map (str): + Name of the UrlMap resource to patch. + This corresponds to the ``url_map`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + url_map_resource (google.cloud.compute_v1.types.UrlMap): + The body resource for this request + This corresponds to the ``url_map_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, region, url_map, url_map_resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.PatchRegionUrlMapRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.PatchRegionUrlMapRequest): + request = compute.PatchRegionUrlMapRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if region is not None: + request.region = region + if url_map is not None: + request.url_map = url_map + if url_map_resource is not None: + request.url_map_resource = url_map_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.patch] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("region", request.region), + ("url_map", request.url_map), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def patch(self, + request: Optional[Union[compute.PatchRegionUrlMapRequest, dict]] = None, + *, + project: Optional[str] = None, + region: Optional[str] = None, + url_map: Optional[str] = None, + url_map_resource: Optional[compute.UrlMap] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> extended_operation.ExtendedOperation: + r"""Patches the specified UrlMap resource with the data + included in the request. This method supports PATCH + semantics and uses JSON merge patch format and + processing rules. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_patch(): + # Create a client + client = compute_v1.RegionUrlMapsClient() + + # Initialize request argument(s) + request = compute_v1.PatchRegionUrlMapRequest( + project="project_value", + region="region_value", + url_map="url_map_value", + ) + + # Make the request + response = client.patch(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.PatchRegionUrlMapRequest, dict]): + The request object. A request message for + RegionUrlMaps.Patch. See the method + description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + region (str): + Name of the region scoping this + request. + + This corresponds to the ``region`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + url_map (str): + Name of the UrlMap resource to patch. + This corresponds to the ``url_map`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + url_map_resource (google.cloud.compute_v1.types.UrlMap): + The body resource for this request + This corresponds to the ``url_map_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, region, url_map, url_map_resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.PatchRegionUrlMapRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.PatchRegionUrlMapRequest): + request = compute.PatchRegionUrlMapRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if region is not None: + request.region = region + if url_map is not None: + request.url_map = url_map + if url_map_resource is not None: + request.url_map_resource = url_map_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.patch] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("region", request.region), + ("url_map", request.url_map), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + operation_service = self._transport._region_operations_client + operation_request = compute.GetRegionOperationRequest() + operation_request.project = request.project + operation_request.region = request.region + operation_request.operation = response.name + + get_operation = functools.partial(operation_service.get, operation_request) + # Cancel is not part of extended operations yet. + cancel_operation = lambda: None + + # Note: this class is an implementation detail to provide a uniform + # set of names for certain fields in the extended operation proto message. + # See google.api_core.extended_operation.ExtendedOperation for details + # on these properties and the expected interface. + class _CustomOperation(extended_operation.ExtendedOperation): + @property + def error_message(self): + return self._extended_operation.http_error_message + + @property + def error_code(self): + return self._extended_operation.http_error_status_code + + response = _CustomOperation.make(get_operation, cancel_operation, response) + + # Done; return the response. + return response + + def update_unary(self, + request: Optional[Union[compute.UpdateRegionUrlMapRequest, dict]] = None, + *, + project: Optional[str] = None, + region: Optional[str] = None, + url_map: Optional[str] = None, + url_map_resource: Optional[compute.UrlMap] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.Operation: + r"""Updates the specified UrlMap resource with the data + included in the request. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_update(): + # Create a client + client = compute_v1.RegionUrlMapsClient() + + # Initialize request argument(s) + request = compute_v1.UpdateRegionUrlMapRequest( + project="project_value", + region="region_value", + url_map="url_map_value", + ) + + # Make the request + response = client.update(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.UpdateRegionUrlMapRequest, dict]): + The request object. A request message for + RegionUrlMaps.Update. See the method + description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + region (str): + Name of the region scoping this + request. + + This corresponds to the ``region`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + url_map (str): + Name of the UrlMap resource to + update. + + This corresponds to the ``url_map`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + url_map_resource (google.cloud.compute_v1.types.UrlMap): + The body resource for this request + This corresponds to the ``url_map_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, region, url_map, url_map_resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.UpdateRegionUrlMapRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.UpdateRegionUrlMapRequest): + request = compute.UpdateRegionUrlMapRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if region is not None: + request.region = region + if url_map is not None: + request.url_map = url_map + if url_map_resource is not None: + request.url_map_resource = url_map_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.update] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("region", request.region), + ("url_map", request.url_map), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def update(self, + request: Optional[Union[compute.UpdateRegionUrlMapRequest, dict]] = None, + *, + project: Optional[str] = None, + region: Optional[str] = None, + url_map: Optional[str] = None, + url_map_resource: Optional[compute.UrlMap] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> extended_operation.ExtendedOperation: + r"""Updates the specified UrlMap resource with the data + included in the request. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_update(): + # Create a client + client = compute_v1.RegionUrlMapsClient() + + # Initialize request argument(s) + request = compute_v1.UpdateRegionUrlMapRequest( + project="project_value", + region="region_value", + url_map="url_map_value", + ) + + # Make the request + response = client.update(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.UpdateRegionUrlMapRequest, dict]): + The request object. A request message for + RegionUrlMaps.Update. See the method + description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + region (str): + Name of the region scoping this + request. + + This corresponds to the ``region`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + url_map (str): + Name of the UrlMap resource to + update. + + This corresponds to the ``url_map`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + url_map_resource (google.cloud.compute_v1.types.UrlMap): + The body resource for this request + This corresponds to the ``url_map_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, region, url_map, url_map_resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.UpdateRegionUrlMapRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.UpdateRegionUrlMapRequest): + request = compute.UpdateRegionUrlMapRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if region is not None: + request.region = region + if url_map is not None: + request.url_map = url_map + if url_map_resource is not None: + request.url_map_resource = url_map_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.update] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("region", request.region), + ("url_map", request.url_map), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + operation_service = self._transport._region_operations_client + operation_request = compute.GetRegionOperationRequest() + operation_request.project = request.project + operation_request.region = request.region + operation_request.operation = response.name + + get_operation = functools.partial(operation_service.get, operation_request) + # Cancel is not part of extended operations yet. + cancel_operation = lambda: None + + # Note: this class is an implementation detail to provide a uniform + # set of names for certain fields in the extended operation proto message. + # See google.api_core.extended_operation.ExtendedOperation for details + # on these properties and the expected interface. + class _CustomOperation(extended_operation.ExtendedOperation): + @property + def error_message(self): + return self._extended_operation.http_error_message + + @property + def error_code(self): + return self._extended_operation.http_error_status_code + + response = _CustomOperation.make(get_operation, cancel_operation, response) + + # Done; return the response. + return response + + def validate(self, + request: Optional[Union[compute.ValidateRegionUrlMapRequest, dict]] = None, + *, + project: Optional[str] = None, + region: Optional[str] = None, + url_map: Optional[str] = None, + region_url_maps_validate_request_resource: Optional[compute.RegionUrlMapsValidateRequest] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.UrlMapsValidateResponse: + r"""Runs static validation for the UrlMap. In particular, + the tests of the provided UrlMap will be run. Calling + this method does NOT create the UrlMap. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_validate(): + # Create a client + client = compute_v1.RegionUrlMapsClient() + + # Initialize request argument(s) + request = compute_v1.ValidateRegionUrlMapRequest( + project="project_value", + region="region_value", + url_map="url_map_value", + ) + + # Make the request + response = client.validate(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.ValidateRegionUrlMapRequest, dict]): + The request object. A request message for + RegionUrlMaps.Validate. See the method + description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + region (str): + Name of the region scoping this + request. + + This corresponds to the ``region`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + url_map (str): + Name of the UrlMap resource to be + validated as. + + This corresponds to the ``url_map`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + region_url_maps_validate_request_resource (google.cloud.compute_v1.types.RegionUrlMapsValidateRequest): + The body resource for this request + This corresponds to the ``region_url_maps_validate_request_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.compute_v1.types.UrlMapsValidateResponse: + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, region, url_map, region_url_maps_validate_request_resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.ValidateRegionUrlMapRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.ValidateRegionUrlMapRequest): + request = compute.ValidateRegionUrlMapRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if region is not None: + request.region = region + if url_map is not None: + request.url_map = url_map + if region_url_maps_validate_request_resource is not None: + request.region_url_maps_validate_request_resource = region_url_maps_validate_request_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.validate] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("region", request.region), + ("url_map", request.url_map), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def __enter__(self) -> "RegionUrlMapsClient": + return self + + def __exit__(self, type, value, traceback): + """Releases underlying transport's resources. + + .. warning:: + ONLY use as a context manager if the transport is NOT shared + with other clients! Exiting the with block will CLOSE the transport + and may cause errors in other clients! + """ + self.transport.close() + + + + + + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) + + +__all__ = ( + "RegionUrlMapsClient", +) diff --git a/owl-bot-staging/v1/google/cloud/compute_v1/services/region_url_maps/pagers.py b/owl-bot-staging/v1/google/cloud/compute_v1/services/region_url_maps/pagers.py new file mode 100644 index 000000000..ccbb750b0 --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/compute_v1/services/region_url_maps/pagers.py @@ -0,0 +1,77 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import Any, AsyncIterator, Awaitable, Callable, Sequence, Tuple, Optional, Iterator + +from google.cloud.compute_v1.types import compute + + +class ListPager: + """A pager for iterating through ``list`` requests. + + This class thinly wraps an initial + :class:`google.cloud.compute_v1.types.UrlMapList` object, and + provides an ``__iter__`` method to iterate through its + ``items`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``List`` requests and continue to iterate + through the ``items`` field on the + corresponding responses. + + All the usual :class:`google.cloud.compute_v1.types.UrlMapList` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., compute.UrlMapList], + request: compute.ListRegionUrlMapsRequest, + response: compute.UrlMapList, + *, + metadata: Sequence[Tuple[str, str]] = ()): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.compute_v1.types.ListRegionUrlMapsRequest): + The initial request object. + response (google.cloud.compute_v1.types.UrlMapList): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = compute.ListRegionUrlMapsRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[compute.UrlMapList]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[compute.UrlMap]: + for page in self.pages: + yield from page.items + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) diff --git a/owl-bot-staging/v1/google/cloud/compute_v1/services/region_url_maps/transports/__init__.py b/owl-bot-staging/v1/google/cloud/compute_v1/services/region_url_maps/transports/__init__.py new file mode 100644 index 000000000..cba8afc6a --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/compute_v1/services/region_url_maps/transports/__init__.py @@ -0,0 +1,32 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +from typing import Dict, Type + +from .base import RegionUrlMapsTransport +from .rest import RegionUrlMapsRestTransport +from .rest import RegionUrlMapsRestInterceptor + + +# Compile a registry of transports. +_transport_registry = OrderedDict() # type: Dict[str, Type[RegionUrlMapsTransport]] +_transport_registry['rest'] = RegionUrlMapsRestTransport + +__all__ = ( + 'RegionUrlMapsTransport', + 'RegionUrlMapsRestTransport', + 'RegionUrlMapsRestInterceptor', +) diff --git a/owl-bot-staging/v1/google/cloud/compute_v1/services/region_url_maps/transports/base.py b/owl-bot-staging/v1/google/cloud/compute_v1/services/region_url_maps/transports/base.py new file mode 100644 index 000000000..2fa5108f8 --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/compute_v1/services/region_url_maps/transports/base.py @@ -0,0 +1,247 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import abc +from typing import Awaitable, Callable, Dict, Optional, Sequence, Union + +from google.cloud.compute_v1 import gapic_version as package_version + +import google.auth # type: ignore +import google.api_core +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.compute_v1.types import compute +from google.cloud.compute_v1.services import region_operations + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) + + +class RegionUrlMapsTransport(abc.ABC): + """Abstract transport class for RegionUrlMaps.""" + + AUTH_SCOPES = ( + 'https://www.googleapis.com/auth/compute', + 'https://www.googleapis.com/auth/cloud-platform', + ) + + DEFAULT_HOST: str = 'compute.googleapis.com' + def __init__( + self, *, + host: str = DEFAULT_HOST, + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + **kwargs, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A list of scopes. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + """ + self._extended_operations_services: Dict[str, Any] = {} + + scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} + + # Save the scopes. + self._scopes = scopes + + # If no credentials are provided, then determine the appropriate + # defaults. + if credentials and credentials_file: + raise core_exceptions.DuplicateCredentialArgs("'credentials_file' and 'credentials' are mutually exclusive") + + if credentials_file is not None: + credentials, _ = google.auth.load_credentials_from_file( + credentials_file, + **scopes_kwargs, + quota_project_id=quota_project_id + ) + elif credentials is None: + credentials, _ = google.auth.default(**scopes_kwargs, quota_project_id=quota_project_id) + # Don't apply audience if the credentials file passed from user. + if hasattr(credentials, "with_gdch_audience"): + credentials = credentials.with_gdch_audience(api_audience if api_audience else host) + + # If the credentials are service account credentials, then always try to use self signed JWT. + if always_use_jwt_access and isinstance(credentials, service_account.Credentials) and hasattr(service_account.Credentials, "with_always_use_jwt_access"): + credentials = credentials.with_always_use_jwt_access(True) + + # Save the credentials. + self._credentials = credentials + + # Save the hostname. Default to port 443 (HTTPS) if none is specified. + if ':' not in host: + host += ':443' + self._host = host + + def _prep_wrapped_messages(self, client_info): + # Precompute the wrapped methods. + self._wrapped_methods = { + self.delete: gapic_v1.method.wrap_method( + self.delete, + default_timeout=None, + client_info=client_info, + ), + self.get: gapic_v1.method.wrap_method( + self.get, + default_timeout=None, + client_info=client_info, + ), + self.insert: gapic_v1.method.wrap_method( + self.insert, + default_timeout=None, + client_info=client_info, + ), + self.list: gapic_v1.method.wrap_method( + self.list, + default_timeout=None, + client_info=client_info, + ), + self.patch: gapic_v1.method.wrap_method( + self.patch, + default_timeout=None, + client_info=client_info, + ), + self.update: gapic_v1.method.wrap_method( + self.update, + default_timeout=None, + client_info=client_info, + ), + self.validate: gapic_v1.method.wrap_method( + self.validate, + default_timeout=None, + client_info=client_info, + ), + } + + def close(self): + """Closes resources associated with the transport. + + .. warning:: + Only call this method if the transport is NOT shared + with other clients - this may cause errors in other clients! + """ + raise NotImplementedError() + + @property + def delete(self) -> Callable[ + [compute.DeleteRegionUrlMapRequest], + Union[ + compute.Operation, + Awaitable[compute.Operation] + ]]: + raise NotImplementedError() + + @property + def get(self) -> Callable[ + [compute.GetRegionUrlMapRequest], + Union[ + compute.UrlMap, + Awaitable[compute.UrlMap] + ]]: + raise NotImplementedError() + + @property + def insert(self) -> Callable[ + [compute.InsertRegionUrlMapRequest], + Union[ + compute.Operation, + Awaitable[compute.Operation] + ]]: + raise NotImplementedError() + + @property + def list(self) -> Callable[ + [compute.ListRegionUrlMapsRequest], + Union[ + compute.UrlMapList, + Awaitable[compute.UrlMapList] + ]]: + raise NotImplementedError() + + @property + def patch(self) -> Callable[ + [compute.PatchRegionUrlMapRequest], + Union[ + compute.Operation, + Awaitable[compute.Operation] + ]]: + raise NotImplementedError() + + @property + def update(self) -> Callable[ + [compute.UpdateRegionUrlMapRequest], + Union[ + compute.Operation, + Awaitable[compute.Operation] + ]]: + raise NotImplementedError() + + @property + def validate(self) -> Callable[ + [compute.ValidateRegionUrlMapRequest], + Union[ + compute.UrlMapsValidateResponse, + Awaitable[compute.UrlMapsValidateResponse] + ]]: + raise NotImplementedError() + + @property + def kind(self) -> str: + raise NotImplementedError() + + @property + def _region_operations_client(self) -> region_operations.RegionOperationsClient: + ex_op_service = self._extended_operations_services.get("region_operations") + if not ex_op_service: + ex_op_service = region_operations.RegionOperationsClient( + credentials=self._credentials, + transport=self.kind, + ) + self._extended_operations_services["region_operations"] = ex_op_service + + return ex_op_service + + +__all__ = ( + 'RegionUrlMapsTransport', +) diff --git a/owl-bot-staging/v1/google/cloud/compute_v1/services/region_url_maps/transports/rest.py b/owl-bot-staging/v1/google/cloud/compute_v1/services/region_url_maps/transports/rest.py new file mode 100644 index 000000000..271c1fe61 --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/compute_v1/services/region_url_maps/transports/rest.py @@ -0,0 +1,1072 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +from google.auth.transport.requests import AuthorizedSession # type: ignore +import json # type: ignore +import grpc # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.api_core import exceptions as core_exceptions +from google.api_core import retry as retries +from google.api_core import rest_helpers +from google.api_core import rest_streaming +from google.api_core import path_template +from google.api_core import gapic_v1 + +from google.protobuf import json_format +from requests import __version__ as requests_version +import dataclasses +import re +from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union +import warnings + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + + +from google.cloud.compute_v1.types import compute + +from .base import RegionUrlMapsTransport, DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, + grpc_version=None, + rest_version=requests_version, +) + + +class RegionUrlMapsRestInterceptor: + """Interceptor for RegionUrlMaps. + + Interceptors are used to manipulate requests, request metadata, and responses + in arbitrary ways. + Example use cases include: + * Logging + * Verifying requests according to service or custom semantics + * Stripping extraneous information from responses + + These use cases and more can be enabled by injecting an + instance of a custom subclass when constructing the RegionUrlMapsRestTransport. + + .. code-block:: python + class MyCustomRegionUrlMapsInterceptor(RegionUrlMapsRestInterceptor): + def pre_delete(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_delete(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_get(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_insert(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_insert(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_patch(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_patch(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_update(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_update(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_validate(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_validate(self, response): + logging.log(f"Received response: {response}") + return response + + transport = RegionUrlMapsRestTransport(interceptor=MyCustomRegionUrlMapsInterceptor()) + client = RegionUrlMapsClient(transport=transport) + + + """ + def pre_delete(self, request: compute.DeleteRegionUrlMapRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.DeleteRegionUrlMapRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for delete + + Override in a subclass to manipulate the request or metadata + before they are sent to the RegionUrlMaps server. + """ + return request, metadata + + def post_delete(self, response: compute.Operation) -> compute.Operation: + """Post-rpc interceptor for delete + + Override in a subclass to manipulate the response + after it is returned by the RegionUrlMaps server but before + it is returned to user code. + """ + return response + def pre_get(self, request: compute.GetRegionUrlMapRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.GetRegionUrlMapRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get + + Override in a subclass to manipulate the request or metadata + before they are sent to the RegionUrlMaps server. + """ + return request, metadata + + def post_get(self, response: compute.UrlMap) -> compute.UrlMap: + """Post-rpc interceptor for get + + Override in a subclass to manipulate the response + after it is returned by the RegionUrlMaps server but before + it is returned to user code. + """ + return response + def pre_insert(self, request: compute.InsertRegionUrlMapRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.InsertRegionUrlMapRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for insert + + Override in a subclass to manipulate the request or metadata + before they are sent to the RegionUrlMaps server. + """ + return request, metadata + + def post_insert(self, response: compute.Operation) -> compute.Operation: + """Post-rpc interceptor for insert + + Override in a subclass to manipulate the response + after it is returned by the RegionUrlMaps server but before + it is returned to user code. + """ + return response + def pre_list(self, request: compute.ListRegionUrlMapsRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.ListRegionUrlMapsRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list + + Override in a subclass to manipulate the request or metadata + before they are sent to the RegionUrlMaps server. + """ + return request, metadata + + def post_list(self, response: compute.UrlMapList) -> compute.UrlMapList: + """Post-rpc interceptor for list + + Override in a subclass to manipulate the response + after it is returned by the RegionUrlMaps server but before + it is returned to user code. + """ + return response + def pre_patch(self, request: compute.PatchRegionUrlMapRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.PatchRegionUrlMapRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for patch + + Override in a subclass to manipulate the request or metadata + before they are sent to the RegionUrlMaps server. + """ + return request, metadata + + def post_patch(self, response: compute.Operation) -> compute.Operation: + """Post-rpc interceptor for patch + + Override in a subclass to manipulate the response + after it is returned by the RegionUrlMaps server but before + it is returned to user code. + """ + return response + def pre_update(self, request: compute.UpdateRegionUrlMapRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.UpdateRegionUrlMapRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for update + + Override in a subclass to manipulate the request or metadata + before they are sent to the RegionUrlMaps server. + """ + return request, metadata + + def post_update(self, response: compute.Operation) -> compute.Operation: + """Post-rpc interceptor for update + + Override in a subclass to manipulate the response + after it is returned by the RegionUrlMaps server but before + it is returned to user code. + """ + return response + def pre_validate(self, request: compute.ValidateRegionUrlMapRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.ValidateRegionUrlMapRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for validate + + Override in a subclass to manipulate the request or metadata + before they are sent to the RegionUrlMaps server. + """ + return request, metadata + + def post_validate(self, response: compute.UrlMapsValidateResponse) -> compute.UrlMapsValidateResponse: + """Post-rpc interceptor for validate + + Override in a subclass to manipulate the response + after it is returned by the RegionUrlMaps server but before + it is returned to user code. + """ + return response + + +@dataclasses.dataclass +class RegionUrlMapsRestStub: + _session: AuthorizedSession + _host: str + _interceptor: RegionUrlMapsRestInterceptor + + +class RegionUrlMapsRestTransport(RegionUrlMapsTransport): + """REST backend transport for RegionUrlMaps. + + The RegionUrlMaps API. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends JSON representations of protocol buffers over HTTP/1.1 + + NOTE: This REST transport functionality is currently in a beta + state (preview). We welcome your feedback via an issue in this + library's source repository. Thank you! + """ + + def __init__(self, *, + host: str = 'compute.googleapis.com', + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + client_cert_source_for_mtls: Optional[Callable[[ + ], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + url_scheme: str = 'https', + interceptor: Optional[RegionUrlMapsRestInterceptor] = None, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + NOTE: This REST transport functionality is currently in a beta + state (preview). We welcome your feedback via a GitHub issue in + this library's repository. Thank you! + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + client_cert_source_for_mtls (Callable[[], Tuple[bytes, bytes]]): Client + certificate to configure mutual TLS HTTP channel. It is ignored + if ``channel`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you are developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + url_scheme: the protocol scheme for the API endpoint. Normally + "https", but for testing or local servers, + "http" can be specified. + """ + # Run the base constructor + # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. + # TODO: When custom host (api_endpoint) is set, `scopes` must *also* be set on the + # credentials object + maybe_url_match = re.match("^(?Phttp(?:s)?://)?(?P.*)$", host) + if maybe_url_match is None: + raise ValueError(f"Unexpected hostname structure: {host}") # pragma: NO COVER + + url_match_items = maybe_url_match.groupdict() + + host = f"{url_scheme}://{host}" if not url_match_items["scheme"] else host + + super().__init__( + host=host, + credentials=credentials, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience + ) + self._session = AuthorizedSession( + self._credentials, default_host=self.DEFAULT_HOST) + if client_cert_source_for_mtls: + self._session.configure_mtls_channel(client_cert_source_for_mtls) + self._interceptor = interceptor or RegionUrlMapsRestInterceptor() + self._prep_wrapped_messages(client_info) + + class _Delete(RegionUrlMapsRestStub): + def __hash__(self): + return hash("Delete") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.DeleteRegionUrlMapRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.Operation: + r"""Call the delete method over HTTP. + + Args: + request (~.compute.DeleteRegionUrlMapRequest): + The request object. A request message for + RegionUrlMaps.Delete. See the method + description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.Operation: + Represents an Operation resource. Google Compute Engine + has three Operation resources: \* + `Global `__ + \* + `Regional `__ + \* + `Zonal `__ + You can use an operation resource to manage asynchronous + API requests. For more information, read Handling API + responses. Operations can be global, regional or zonal. + - For global operations, use the ``globalOperations`` + resource. - For regional operations, use the + ``regionOperations`` resource. - For zonal operations, + use the ``zonalOperations`` resource. For more + information, read Global, Regional, and Zonal Resources. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'delete', + 'uri': '/compute/v1/projects/{project}/regions/{region}/urlMaps/{url_map}', + }, + ] + request, metadata = self._interceptor.pre_delete(request, metadata) + pb_request = compute.DeleteRegionUrlMapRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.Operation() + pb_resp = compute.Operation.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_delete(resp) + return resp + + class _Get(RegionUrlMapsRestStub): + def __hash__(self): + return hash("Get") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.GetRegionUrlMapRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.UrlMap: + r"""Call the get method over HTTP. + + Args: + request (~.compute.GetRegionUrlMapRequest): + The request object. A request message for + RegionUrlMaps.Get. See the method + description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.UrlMap: + Represents a URL Map resource. Compute Engine has two + URL Map resources: \* + `Global `__ \* + `Regional `__ + A URL map resource is a component of certain types of + cloud load balancers and Traffic Director: \* urlMaps + are used by external HTTP(S) load balancers and Traffic + Director. \* regionUrlMaps are used by internal HTTP(S) + load balancers. For a list of supported URL map features + by the load balancer type, see the Load balancing + features: Routing and traffic management table. For a + list of supported URL map features for Traffic Director, + see the Traffic Director features: Routing and traffic + management table. This resource defines mappings from + hostnames and URL paths to either a backend service or a + backend bucket. To use the global urlMaps resource, the + backend service must have a loadBalancingScheme of + either EXTERNAL or INTERNAL_SELF_MANAGED. To use the + regionUrlMaps resource, the backend service must have a + loadBalancingScheme of INTERNAL_MANAGED. For more + information, read URL Map Concepts. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/compute/v1/projects/{project}/regions/{region}/urlMaps/{url_map}', + }, + ] + request, metadata = self._interceptor.pre_get(request, metadata) + pb_request = compute.GetRegionUrlMapRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.UrlMap() + pb_resp = compute.UrlMap.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get(resp) + return resp + + class _Insert(RegionUrlMapsRestStub): + def __hash__(self): + return hash("Insert") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.InsertRegionUrlMapRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.Operation: + r"""Call the insert method over HTTP. + + Args: + request (~.compute.InsertRegionUrlMapRequest): + The request object. A request message for + RegionUrlMaps.Insert. See the method + description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.Operation: + Represents an Operation resource. Google Compute Engine + has three Operation resources: \* + `Global `__ + \* + `Regional `__ + \* + `Zonal `__ + You can use an operation resource to manage asynchronous + API requests. For more information, read Handling API + responses. Operations can be global, regional or zonal. + - For global operations, use the ``globalOperations`` + resource. - For regional operations, use the + ``regionOperations`` resource. - For zonal operations, + use the ``zonalOperations`` resource. For more + information, read Global, Regional, and Zonal Resources. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/compute/v1/projects/{project}/regions/{region}/urlMaps', + 'body': 'url_map_resource', + }, + ] + request, metadata = self._interceptor.pre_insert(request, metadata) + pb_request = compute.InsertRegionUrlMapRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + including_default_value_fields=False, + use_integers_for_enums=False + ) + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.Operation() + pb_resp = compute.Operation.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_insert(resp) + return resp + + class _List(RegionUrlMapsRestStub): + def __hash__(self): + return hash("List") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.ListRegionUrlMapsRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.UrlMapList: + r"""Call the list method over HTTP. + + Args: + request (~.compute.ListRegionUrlMapsRequest): + The request object. A request message for + RegionUrlMaps.List. See the method + description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.UrlMapList: + Contains a list of UrlMap resources. + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/compute/v1/projects/{project}/regions/{region}/urlMaps', + }, + ] + request, metadata = self._interceptor.pre_list(request, metadata) + pb_request = compute.ListRegionUrlMapsRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.UrlMapList() + pb_resp = compute.UrlMapList.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_list(resp) + return resp + + class _Patch(RegionUrlMapsRestStub): + def __hash__(self): + return hash("Patch") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.PatchRegionUrlMapRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.Operation: + r"""Call the patch method over HTTP. + + Args: + request (~.compute.PatchRegionUrlMapRequest): + The request object. A request message for + RegionUrlMaps.Patch. See the method + description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.Operation: + Represents an Operation resource. Google Compute Engine + has three Operation resources: \* + `Global `__ + \* + `Regional `__ + \* + `Zonal `__ + You can use an operation resource to manage asynchronous + API requests. For more information, read Handling API + responses. Operations can be global, regional or zonal. + - For global operations, use the ``globalOperations`` + resource. - For regional operations, use the + ``regionOperations`` resource. - For zonal operations, + use the ``zonalOperations`` resource. For more + information, read Global, Regional, and Zonal Resources. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'patch', + 'uri': '/compute/v1/projects/{project}/regions/{region}/urlMaps/{url_map}', + 'body': 'url_map_resource', + }, + ] + request, metadata = self._interceptor.pre_patch(request, metadata) + pb_request = compute.PatchRegionUrlMapRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + including_default_value_fields=False, + use_integers_for_enums=False + ) + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.Operation() + pb_resp = compute.Operation.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_patch(resp) + return resp + + class _Update(RegionUrlMapsRestStub): + def __hash__(self): + return hash("Update") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.UpdateRegionUrlMapRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.Operation: + r"""Call the update method over HTTP. + + Args: + request (~.compute.UpdateRegionUrlMapRequest): + The request object. A request message for + RegionUrlMaps.Update. See the method + description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.Operation: + Represents an Operation resource. Google Compute Engine + has three Operation resources: \* + `Global `__ + \* + `Regional `__ + \* + `Zonal `__ + You can use an operation resource to manage asynchronous + API requests. For more information, read Handling API + responses. Operations can be global, regional or zonal. + - For global operations, use the ``globalOperations`` + resource. - For regional operations, use the + ``regionOperations`` resource. - For zonal operations, + use the ``zonalOperations`` resource. For more + information, read Global, Regional, and Zonal Resources. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'put', + 'uri': '/compute/v1/projects/{project}/regions/{region}/urlMaps/{url_map}', + 'body': 'url_map_resource', + }, + ] + request, metadata = self._interceptor.pre_update(request, metadata) + pb_request = compute.UpdateRegionUrlMapRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + including_default_value_fields=False, + use_integers_for_enums=False + ) + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.Operation() + pb_resp = compute.Operation.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_update(resp) + return resp + + class _Validate(RegionUrlMapsRestStub): + def __hash__(self): + return hash("Validate") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.ValidateRegionUrlMapRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.UrlMapsValidateResponse: + r"""Call the validate method over HTTP. + + Args: + request (~.compute.ValidateRegionUrlMapRequest): + The request object. A request message for + RegionUrlMaps.Validate. See the method + description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.UrlMapsValidateResponse: + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/compute/v1/projects/{project}/regions/{region}/urlMaps/{url_map}/validate', + 'body': 'region_url_maps_validate_request_resource', + }, + ] + request, metadata = self._interceptor.pre_validate(request, metadata) + pb_request = compute.ValidateRegionUrlMapRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + including_default_value_fields=False, + use_integers_for_enums=False + ) + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.UrlMapsValidateResponse() + pb_resp = compute.UrlMapsValidateResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_validate(resp) + return resp + + @property + def delete(self) -> Callable[ + [compute.DeleteRegionUrlMapRequest], + compute.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._Delete(self._session, self._host, self._interceptor) # type: ignore + + @property + def get(self) -> Callable[ + [compute.GetRegionUrlMapRequest], + compute.UrlMap]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._Get(self._session, self._host, self._interceptor) # type: ignore + + @property + def insert(self) -> Callable[ + [compute.InsertRegionUrlMapRequest], + compute.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._Insert(self._session, self._host, self._interceptor) # type: ignore + + @property + def list(self) -> Callable[ + [compute.ListRegionUrlMapsRequest], + compute.UrlMapList]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._List(self._session, self._host, self._interceptor) # type: ignore + + @property + def patch(self) -> Callable[ + [compute.PatchRegionUrlMapRequest], + compute.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._Patch(self._session, self._host, self._interceptor) # type: ignore + + @property + def update(self) -> Callable[ + [compute.UpdateRegionUrlMapRequest], + compute.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._Update(self._session, self._host, self._interceptor) # type: ignore + + @property + def validate(self) -> Callable[ + [compute.ValidateRegionUrlMapRequest], + compute.UrlMapsValidateResponse]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._Validate(self._session, self._host, self._interceptor) # type: ignore + + @property + def kind(self) -> str: + return "rest" + + def close(self): + self._session.close() + + +__all__=( + 'RegionUrlMapsRestTransport', +) diff --git a/owl-bot-staging/v1/google/cloud/compute_v1/services/regions/__init__.py b/owl-bot-staging/v1/google/cloud/compute_v1/services/regions/__init__.py new file mode 100644 index 000000000..ea41ccb24 --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/compute_v1/services/regions/__init__.py @@ -0,0 +1,20 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from .client import RegionsClient + +__all__ = ( + 'RegionsClient', +) diff --git a/owl-bot-staging/v1/google/cloud/compute_v1/services/regions/client.py b/owl-bot-staging/v1/google/cloud/compute_v1/services/regions/client.py new file mode 100644 index 000000000..9c7cac430 --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/compute_v1/services/regions/client.py @@ -0,0 +1,631 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import os +import re +from typing import Dict, Mapping, MutableMapping, MutableSequence, Optional, Sequence, Tuple, Type, Union, cast + +from google.cloud.compute_v1 import gapic_version as package_version + +from google.api_core import client_options as client_options_lib +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport import mtls # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth.exceptions import MutualTLSChannelError # type: ignore +from google.oauth2 import service_account # type: ignore + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + +from google.cloud.compute_v1.services.regions import pagers +from google.cloud.compute_v1.types import compute +from .transports.base import RegionsTransport, DEFAULT_CLIENT_INFO +from .transports.rest import RegionsRestTransport + + +class RegionsClientMeta(type): + """Metaclass for the Regions client. + + This provides class-level methods for building and retrieving + support objects (e.g. transport) without polluting the client instance + objects. + """ + _transport_registry = OrderedDict() # type: Dict[str, Type[RegionsTransport]] + _transport_registry["rest"] = RegionsRestTransport + + def get_transport_class(cls, + label: Optional[str] = None, + ) -> Type[RegionsTransport]: + """Returns an appropriate transport class. + + Args: + label: The name of the desired transport. If none is + provided, then the first transport in the registry is used. + + Returns: + The transport class to use. + """ + # If a specific transport is requested, return that one. + if label: + return cls._transport_registry[label] + + # No transport is requested; return the default (that is, the first one + # in the dictionary). + return next(iter(cls._transport_registry.values())) + + +class RegionsClient(metaclass=RegionsClientMeta): + """The Regions API.""" + + @staticmethod + def _get_default_mtls_endpoint(api_endpoint): + """Converts api endpoint to mTLS endpoint. + + Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to + "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. + Args: + api_endpoint (Optional[str]): the api endpoint to convert. + Returns: + str: converted mTLS api endpoint. + """ + if not api_endpoint: + return api_endpoint + + mtls_endpoint_re = re.compile( + r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?" + ) + + m = mtls_endpoint_re.match(api_endpoint) + name, mtls, sandbox, googledomain = m.groups() + if mtls or not googledomain: + return api_endpoint + + if sandbox: + return api_endpoint.replace( + "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" + ) + + return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") + + DEFAULT_ENDPOINT = "compute.googleapis.com" + DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore + DEFAULT_ENDPOINT + ) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + RegionsClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_info(info) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + RegionsClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_file( + filename) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + from_service_account_json = from_service_account_file + + @property + def transport(self) -> RegionsTransport: + """Returns the transport used by the client instance. + + Returns: + RegionsTransport: The transport used by the client + instance. + """ + return self._transport + + @staticmethod + def common_billing_account_path(billing_account: str, ) -> str: + """Returns a fully-qualified billing_account string.""" + return "billingAccounts/{billing_account}".format(billing_account=billing_account, ) + + @staticmethod + def parse_common_billing_account_path(path: str) -> Dict[str,str]: + """Parse a billing_account path into its component segments.""" + m = re.match(r"^billingAccounts/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_folder_path(folder: str, ) -> str: + """Returns a fully-qualified folder string.""" + return "folders/{folder}".format(folder=folder, ) + + @staticmethod + def parse_common_folder_path(path: str) -> Dict[str,str]: + """Parse a folder path into its component segments.""" + m = re.match(r"^folders/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_organization_path(organization: str, ) -> str: + """Returns a fully-qualified organization string.""" + return "organizations/{organization}".format(organization=organization, ) + + @staticmethod + def parse_common_organization_path(path: str) -> Dict[str,str]: + """Parse a organization path into its component segments.""" + m = re.match(r"^organizations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_project_path(project: str, ) -> str: + """Returns a fully-qualified project string.""" + return "projects/{project}".format(project=project, ) + + @staticmethod + def parse_common_project_path(path: str) -> Dict[str,str]: + """Parse a project path into its component segments.""" + m = re.match(r"^projects/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_location_path(project: str, location: str, ) -> str: + """Returns a fully-qualified location string.""" + return "projects/{project}/locations/{location}".format(project=project, location=location, ) + + @staticmethod + def parse_common_location_path(path: str) -> Dict[str,str]: + """Parse a location path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @classmethod + def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[client_options_lib.ClientOptions] = None): + """Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + if client_options is None: + client_options = client_options_lib.ClientOptions() + use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") + use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") + if use_client_cert not in ("true", "false"): + raise ValueError("Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`") + if use_mtls_endpoint not in ("auto", "never", "always"): + raise MutualTLSChannelError("Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`") + + # Figure out the client cert source to use. + client_cert_source = None + if use_client_cert == "true": + if client_options.client_cert_source: + client_cert_source = client_options.client_cert_source + elif mtls.has_default_client_cert_source(): + client_cert_source = mtls.default_client_cert_source() + + # Figure out which api endpoint to use. + if client_options.api_endpoint is not None: + api_endpoint = client_options.api_endpoint + elif use_mtls_endpoint == "always" or (use_mtls_endpoint == "auto" and client_cert_source): + api_endpoint = cls.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = cls.DEFAULT_ENDPOINT + + return api_endpoint, client_cert_source + + def __init__(self, *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Optional[Union[str, RegionsTransport]] = None, + client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the regions client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Union[str, RegionsTransport]): The + transport to use. If set to None, a transport is chosen + automatically. + NOTE: "rest" transport functionality is currently in a + beta state (preview). We welcome your feedback via an + issue in this library's source repository. + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): Custom options for the + client. It won't take effect if a ``transport`` instance is provided. + (1) The ``api_endpoint`` property can be used to override the + default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT + environment variable can also be used to override the endpoint: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto switch to the + default mTLS endpoint if client certificate is present, this is + the default value). However, the ``api_endpoint`` property takes + precedence if provided. + (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide client certificate for mutual TLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + """ + if isinstance(client_options, dict): + client_options = client_options_lib.from_dict(client_options) + if client_options is None: + client_options = client_options_lib.ClientOptions() + client_options = cast(client_options_lib.ClientOptions, client_options) + + api_endpoint, client_cert_source_func = self.get_mtls_endpoint_and_cert_source(client_options) + + api_key_value = getattr(client_options, "api_key", None) + if api_key_value and credentials: + raise ValueError("client_options.api_key and credentials are mutually exclusive") + + # Save or instantiate the transport. + # Ordinarily, we provide the transport, but allowing a custom transport + # instance provides an extensibility point for unusual situations. + if isinstance(transport, RegionsTransport): + # transport is a RegionsTransport instance. + if credentials or client_options.credentials_file or api_key_value: + raise ValueError("When providing a transport instance, " + "provide its credentials directly.") + if client_options.scopes: + raise ValueError( + "When providing a transport instance, provide its scopes " + "directly." + ) + self._transport = transport + else: + import google.auth._default # type: ignore + + if api_key_value and hasattr(google.auth._default, "get_api_key_credentials"): + credentials = google.auth._default.get_api_key_credentials(api_key_value) + + Transport = type(self).get_transport_class(transport) + self._transport = Transport( + credentials=credentials, + credentials_file=client_options.credentials_file, + host=api_endpoint, + scopes=client_options.scopes, + client_cert_source_for_mtls=client_cert_source_func, + quota_project_id=client_options.quota_project_id, + client_info=client_info, + always_use_jwt_access=True, + api_audience=client_options.api_audience, + ) + + def get(self, + request: Optional[Union[compute.GetRegionRequest, dict]] = None, + *, + project: Optional[str] = None, + region: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.Region: + r"""Returns the specified Region resource. To decrease latency for + this method, you can optionally omit any unneeded information + from the response by using a field mask. This practice is + especially recommended for unused quota information (the + ``quotas`` field). To exclude one or more fields, set your + request's ``fields`` query parameter to only include the fields + you need. For example, to only include the ``id`` and + ``selfLink`` fields, add the query parameter + ``?fields=id,selfLink`` to your request. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_get(): + # Create a client + client = compute_v1.RegionsClient() + + # Initialize request argument(s) + request = compute_v1.GetRegionRequest( + project="project_value", + region="region_value", + ) + + # Make the request + response = client.get(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.GetRegionRequest, dict]): + The request object. A request message for Regions.Get. + See the method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + region (str): + Name of the region resource to + return. + + This corresponds to the ``region`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.compute_v1.types.Region: + Represents a Region resource. A + region is a geographical area where a + resource is located. For more + information, read Regions and Zones. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, region]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.GetRegionRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.GetRegionRequest): + request = compute.GetRegionRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if region is not None: + request.region = region + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("region", request.region), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def list(self, + request: Optional[Union[compute.ListRegionsRequest, dict]] = None, + *, + project: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListPager: + r"""Retrieves the list of region resources available to the + specified project. To decrease latency for this method, you can + optionally omit any unneeded information from the response by + using a field mask. This practice is especially recommended for + unused quota information (the ``items.quotas`` field). To + exclude one or more fields, set your request's ``fields`` query + parameter to only include the fields you need. For example, to + only include the ``id`` and ``selfLink`` fields, add the query + parameter ``?fields=id,selfLink`` to your request. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_list(): + # Create a client + client = compute_v1.RegionsClient() + + # Initialize request argument(s) + request = compute_v1.ListRegionsRequest( + project="project_value", + ) + + # Make the request + page_result = client.list(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.ListRegionsRequest, dict]): + The request object. A request message for Regions.List. + See the method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.compute_v1.services.regions.pagers.ListPager: + Contains a list of region resources. + + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.ListRegionsRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.ListRegionsRequest): + request = compute.ListRegionsRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + def __enter__(self) -> "RegionsClient": + return self + + def __exit__(self, type, value, traceback): + """Releases underlying transport's resources. + + .. warning:: + ONLY use as a context manager if the transport is NOT shared + with other clients! Exiting the with block will CLOSE the transport + and may cause errors in other clients! + """ + self.transport.close() + + + + + + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) + + +__all__ = ( + "RegionsClient", +) diff --git a/owl-bot-staging/v1/google/cloud/compute_v1/services/regions/pagers.py b/owl-bot-staging/v1/google/cloud/compute_v1/services/regions/pagers.py new file mode 100644 index 000000000..d01f52fb6 --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/compute_v1/services/regions/pagers.py @@ -0,0 +1,77 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import Any, AsyncIterator, Awaitable, Callable, Sequence, Tuple, Optional, Iterator + +from google.cloud.compute_v1.types import compute + + +class ListPager: + """A pager for iterating through ``list`` requests. + + This class thinly wraps an initial + :class:`google.cloud.compute_v1.types.RegionList` object, and + provides an ``__iter__`` method to iterate through its + ``items`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``List`` requests and continue to iterate + through the ``items`` field on the + corresponding responses. + + All the usual :class:`google.cloud.compute_v1.types.RegionList` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., compute.RegionList], + request: compute.ListRegionsRequest, + response: compute.RegionList, + *, + metadata: Sequence[Tuple[str, str]] = ()): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.compute_v1.types.ListRegionsRequest): + The initial request object. + response (google.cloud.compute_v1.types.RegionList): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = compute.ListRegionsRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[compute.RegionList]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[compute.Region]: + for page in self.pages: + yield from page.items + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) diff --git a/owl-bot-staging/v1/google/cloud/compute_v1/services/regions/transports/__init__.py b/owl-bot-staging/v1/google/cloud/compute_v1/services/regions/transports/__init__.py new file mode 100644 index 000000000..9f31db87e --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/compute_v1/services/regions/transports/__init__.py @@ -0,0 +1,32 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +from typing import Dict, Type + +from .base import RegionsTransport +from .rest import RegionsRestTransport +from .rest import RegionsRestInterceptor + + +# Compile a registry of transports. +_transport_registry = OrderedDict() # type: Dict[str, Type[RegionsTransport]] +_transport_registry['rest'] = RegionsRestTransport + +__all__ = ( + 'RegionsTransport', + 'RegionsRestTransport', + 'RegionsRestInterceptor', +) diff --git a/owl-bot-staging/v1/google/cloud/compute_v1/services/regions/transports/base.py b/owl-bot-staging/v1/google/cloud/compute_v1/services/regions/transports/base.py new file mode 100644 index 000000000..48c4561c3 --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/compute_v1/services/regions/transports/base.py @@ -0,0 +1,164 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import abc +from typing import Awaitable, Callable, Dict, Optional, Sequence, Union + +from google.cloud.compute_v1 import gapic_version as package_version + +import google.auth # type: ignore +import google.api_core +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.compute_v1.types import compute + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) + + +class RegionsTransport(abc.ABC): + """Abstract transport class for Regions.""" + + AUTH_SCOPES = ( + 'https://www.googleapis.com/auth/compute.readonly', + 'https://www.googleapis.com/auth/compute', + 'https://www.googleapis.com/auth/cloud-platform', + ) + + DEFAULT_HOST: str = 'compute.googleapis.com' + def __init__( + self, *, + host: str = DEFAULT_HOST, + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + **kwargs, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A list of scopes. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + """ + + scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} + + # Save the scopes. + self._scopes = scopes + + # If no credentials are provided, then determine the appropriate + # defaults. + if credentials and credentials_file: + raise core_exceptions.DuplicateCredentialArgs("'credentials_file' and 'credentials' are mutually exclusive") + + if credentials_file is not None: + credentials, _ = google.auth.load_credentials_from_file( + credentials_file, + **scopes_kwargs, + quota_project_id=quota_project_id + ) + elif credentials is None: + credentials, _ = google.auth.default(**scopes_kwargs, quota_project_id=quota_project_id) + # Don't apply audience if the credentials file passed from user. + if hasattr(credentials, "with_gdch_audience"): + credentials = credentials.with_gdch_audience(api_audience if api_audience else host) + + # If the credentials are service account credentials, then always try to use self signed JWT. + if always_use_jwt_access and isinstance(credentials, service_account.Credentials) and hasattr(service_account.Credentials, "with_always_use_jwt_access"): + credentials = credentials.with_always_use_jwt_access(True) + + # Save the credentials. + self._credentials = credentials + + # Save the hostname. Default to port 443 (HTTPS) if none is specified. + if ':' not in host: + host += ':443' + self._host = host + + def _prep_wrapped_messages(self, client_info): + # Precompute the wrapped methods. + self._wrapped_methods = { + self.get: gapic_v1.method.wrap_method( + self.get, + default_timeout=None, + client_info=client_info, + ), + self.list: gapic_v1.method.wrap_method( + self.list, + default_timeout=None, + client_info=client_info, + ), + } + + def close(self): + """Closes resources associated with the transport. + + .. warning:: + Only call this method if the transport is NOT shared + with other clients - this may cause errors in other clients! + """ + raise NotImplementedError() + + @property + def get(self) -> Callable[ + [compute.GetRegionRequest], + Union[ + compute.Region, + Awaitable[compute.Region] + ]]: + raise NotImplementedError() + + @property + def list(self) -> Callable[ + [compute.ListRegionsRequest], + Union[ + compute.RegionList, + Awaitable[compute.RegionList] + ]]: + raise NotImplementedError() + + @property + def kind(self) -> str: + raise NotImplementedError() + + +__all__ = ( + 'RegionsTransport', +) diff --git a/owl-bot-staging/v1/google/cloud/compute_v1/services/regions/transports/rest.py b/owl-bot-staging/v1/google/cloud/compute_v1/services/regions/transports/rest.py new file mode 100644 index 000000000..d64ac0258 --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/compute_v1/services/regions/transports/rest.py @@ -0,0 +1,407 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +from google.auth.transport.requests import AuthorizedSession # type: ignore +import json # type: ignore +import grpc # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.api_core import exceptions as core_exceptions +from google.api_core import retry as retries +from google.api_core import rest_helpers +from google.api_core import rest_streaming +from google.api_core import path_template +from google.api_core import gapic_v1 + +from google.protobuf import json_format +from requests import __version__ as requests_version +import dataclasses +import re +from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union +import warnings + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + + +from google.cloud.compute_v1.types import compute + +from .base import RegionsTransport, DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, + grpc_version=None, + rest_version=requests_version, +) + + +class RegionsRestInterceptor: + """Interceptor for Regions. + + Interceptors are used to manipulate requests, request metadata, and responses + in arbitrary ways. + Example use cases include: + * Logging + * Verifying requests according to service or custom semantics + * Stripping extraneous information from responses + + These use cases and more can be enabled by injecting an + instance of a custom subclass when constructing the RegionsRestTransport. + + .. code-block:: python + class MyCustomRegionsInterceptor(RegionsRestInterceptor): + def pre_get(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list(self, response): + logging.log(f"Received response: {response}") + return response + + transport = RegionsRestTransport(interceptor=MyCustomRegionsInterceptor()) + client = RegionsClient(transport=transport) + + + """ + def pre_get(self, request: compute.GetRegionRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.GetRegionRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get + + Override in a subclass to manipulate the request or metadata + before they are sent to the Regions server. + """ + return request, metadata + + def post_get(self, response: compute.Region) -> compute.Region: + """Post-rpc interceptor for get + + Override in a subclass to manipulate the response + after it is returned by the Regions server but before + it is returned to user code. + """ + return response + def pre_list(self, request: compute.ListRegionsRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.ListRegionsRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list + + Override in a subclass to manipulate the request or metadata + before they are sent to the Regions server. + """ + return request, metadata + + def post_list(self, response: compute.RegionList) -> compute.RegionList: + """Post-rpc interceptor for list + + Override in a subclass to manipulate the response + after it is returned by the Regions server but before + it is returned to user code. + """ + return response + + +@dataclasses.dataclass +class RegionsRestStub: + _session: AuthorizedSession + _host: str + _interceptor: RegionsRestInterceptor + + +class RegionsRestTransport(RegionsTransport): + """REST backend transport for Regions. + + The Regions API. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends JSON representations of protocol buffers over HTTP/1.1 + + NOTE: This REST transport functionality is currently in a beta + state (preview). We welcome your feedback via an issue in this + library's source repository. Thank you! + """ + + def __init__(self, *, + host: str = 'compute.googleapis.com', + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + client_cert_source_for_mtls: Optional[Callable[[ + ], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + url_scheme: str = 'https', + interceptor: Optional[RegionsRestInterceptor] = None, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + NOTE: This REST transport functionality is currently in a beta + state (preview). We welcome your feedback via a GitHub issue in + this library's repository. Thank you! + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + client_cert_source_for_mtls (Callable[[], Tuple[bytes, bytes]]): Client + certificate to configure mutual TLS HTTP channel. It is ignored + if ``channel`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you are developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + url_scheme: the protocol scheme for the API endpoint. Normally + "https", but for testing or local servers, + "http" can be specified. + """ + # Run the base constructor + # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. + # TODO: When custom host (api_endpoint) is set, `scopes` must *also* be set on the + # credentials object + maybe_url_match = re.match("^(?Phttp(?:s)?://)?(?P.*)$", host) + if maybe_url_match is None: + raise ValueError(f"Unexpected hostname structure: {host}") # pragma: NO COVER + + url_match_items = maybe_url_match.groupdict() + + host = f"{url_scheme}://{host}" if not url_match_items["scheme"] else host + + super().__init__( + host=host, + credentials=credentials, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience + ) + self._session = AuthorizedSession( + self._credentials, default_host=self.DEFAULT_HOST) + if client_cert_source_for_mtls: + self._session.configure_mtls_channel(client_cert_source_for_mtls) + self._interceptor = interceptor or RegionsRestInterceptor() + self._prep_wrapped_messages(client_info) + + class _Get(RegionsRestStub): + def __hash__(self): + return hash("Get") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.GetRegionRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.Region: + r"""Call the get method over HTTP. + + Args: + request (~.compute.GetRegionRequest): + The request object. A request message for Regions.Get. + See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.Region: + Represents a Region resource. A + region is a geographical area where a + resource is located. For more + information, read Regions and Zones. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/compute/v1/projects/{project}/regions/{region}', + }, + ] + request, metadata = self._interceptor.pre_get(request, metadata) + pb_request = compute.GetRegionRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.Region() + pb_resp = compute.Region.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get(resp) + return resp + + class _List(RegionsRestStub): + def __hash__(self): + return hash("List") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.ListRegionsRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.RegionList: + r"""Call the list method over HTTP. + + Args: + request (~.compute.ListRegionsRequest): + The request object. A request message for Regions.List. + See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.RegionList: + Contains a list of region resources. + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/compute/v1/projects/{project}/regions', + }, + ] + request, metadata = self._interceptor.pre_list(request, metadata) + pb_request = compute.ListRegionsRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.RegionList() + pb_resp = compute.RegionList.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_list(resp) + return resp + + @property + def get(self) -> Callable[ + [compute.GetRegionRequest], + compute.Region]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._Get(self._session, self._host, self._interceptor) # type: ignore + + @property + def list(self) -> Callable[ + [compute.ListRegionsRequest], + compute.RegionList]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._List(self._session, self._host, self._interceptor) # type: ignore + + @property + def kind(self) -> str: + return "rest" + + def close(self): + self._session.close() + + +__all__=( + 'RegionsRestTransport', +) diff --git a/owl-bot-staging/v1/google/cloud/compute_v1/services/reservations/__init__.py b/owl-bot-staging/v1/google/cloud/compute_v1/services/reservations/__init__.py new file mode 100644 index 000000000..f8ebdcdf1 --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/compute_v1/services/reservations/__init__.py @@ -0,0 +1,20 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from .client import ReservationsClient + +__all__ = ( + 'ReservationsClient', +) diff --git a/owl-bot-staging/v1/google/cloud/compute_v1/services/reservations/client.py b/owl-bot-staging/v1/google/cloud/compute_v1/services/reservations/client.py new file mode 100644 index 000000000..cc158426b --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/compute_v1/services/reservations/client.py @@ -0,0 +1,2312 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import functools +import os +import re +from typing import Dict, Mapping, MutableMapping, MutableSequence, Optional, Sequence, Tuple, Type, Union, cast + +from google.cloud.compute_v1 import gapic_version as package_version + +from google.api_core import client_options as client_options_lib +from google.api_core import exceptions as core_exceptions +from google.api_core import extended_operation +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport import mtls # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth.exceptions import MutualTLSChannelError # type: ignore +from google.oauth2 import service_account # type: ignore + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + +from google.api_core import extended_operation # type: ignore +from google.cloud.compute_v1.services.reservations import pagers +from google.cloud.compute_v1.types import compute +from .transports.base import ReservationsTransport, DEFAULT_CLIENT_INFO +from .transports.rest import ReservationsRestTransport + + +class ReservationsClientMeta(type): + """Metaclass for the Reservations client. + + This provides class-level methods for building and retrieving + support objects (e.g. transport) without polluting the client instance + objects. + """ + _transport_registry = OrderedDict() # type: Dict[str, Type[ReservationsTransport]] + _transport_registry["rest"] = ReservationsRestTransport + + def get_transport_class(cls, + label: Optional[str] = None, + ) -> Type[ReservationsTransport]: + """Returns an appropriate transport class. + + Args: + label: The name of the desired transport. If none is + provided, then the first transport in the registry is used. + + Returns: + The transport class to use. + """ + # If a specific transport is requested, return that one. + if label: + return cls._transport_registry[label] + + # No transport is requested; return the default (that is, the first one + # in the dictionary). + return next(iter(cls._transport_registry.values())) + + +class ReservationsClient(metaclass=ReservationsClientMeta): + """The Reservations API.""" + + @staticmethod + def _get_default_mtls_endpoint(api_endpoint): + """Converts api endpoint to mTLS endpoint. + + Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to + "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. + Args: + api_endpoint (Optional[str]): the api endpoint to convert. + Returns: + str: converted mTLS api endpoint. + """ + if not api_endpoint: + return api_endpoint + + mtls_endpoint_re = re.compile( + r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?" + ) + + m = mtls_endpoint_re.match(api_endpoint) + name, mtls, sandbox, googledomain = m.groups() + if mtls or not googledomain: + return api_endpoint + + if sandbox: + return api_endpoint.replace( + "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" + ) + + return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") + + DEFAULT_ENDPOINT = "compute.googleapis.com" + DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore + DEFAULT_ENDPOINT + ) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + ReservationsClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_info(info) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + ReservationsClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_file( + filename) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + from_service_account_json = from_service_account_file + + @property + def transport(self) -> ReservationsTransport: + """Returns the transport used by the client instance. + + Returns: + ReservationsTransport: The transport used by the client + instance. + """ + return self._transport + + @staticmethod + def common_billing_account_path(billing_account: str, ) -> str: + """Returns a fully-qualified billing_account string.""" + return "billingAccounts/{billing_account}".format(billing_account=billing_account, ) + + @staticmethod + def parse_common_billing_account_path(path: str) -> Dict[str,str]: + """Parse a billing_account path into its component segments.""" + m = re.match(r"^billingAccounts/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_folder_path(folder: str, ) -> str: + """Returns a fully-qualified folder string.""" + return "folders/{folder}".format(folder=folder, ) + + @staticmethod + def parse_common_folder_path(path: str) -> Dict[str,str]: + """Parse a folder path into its component segments.""" + m = re.match(r"^folders/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_organization_path(organization: str, ) -> str: + """Returns a fully-qualified organization string.""" + return "organizations/{organization}".format(organization=organization, ) + + @staticmethod + def parse_common_organization_path(path: str) -> Dict[str,str]: + """Parse a organization path into its component segments.""" + m = re.match(r"^organizations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_project_path(project: str, ) -> str: + """Returns a fully-qualified project string.""" + return "projects/{project}".format(project=project, ) + + @staticmethod + def parse_common_project_path(path: str) -> Dict[str,str]: + """Parse a project path into its component segments.""" + m = re.match(r"^projects/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_location_path(project: str, location: str, ) -> str: + """Returns a fully-qualified location string.""" + return "projects/{project}/locations/{location}".format(project=project, location=location, ) + + @staticmethod + def parse_common_location_path(path: str) -> Dict[str,str]: + """Parse a location path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @classmethod + def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[client_options_lib.ClientOptions] = None): + """Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + if client_options is None: + client_options = client_options_lib.ClientOptions() + use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") + use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") + if use_client_cert not in ("true", "false"): + raise ValueError("Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`") + if use_mtls_endpoint not in ("auto", "never", "always"): + raise MutualTLSChannelError("Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`") + + # Figure out the client cert source to use. + client_cert_source = None + if use_client_cert == "true": + if client_options.client_cert_source: + client_cert_source = client_options.client_cert_source + elif mtls.has_default_client_cert_source(): + client_cert_source = mtls.default_client_cert_source() + + # Figure out which api endpoint to use. + if client_options.api_endpoint is not None: + api_endpoint = client_options.api_endpoint + elif use_mtls_endpoint == "always" or (use_mtls_endpoint == "auto" and client_cert_source): + api_endpoint = cls.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = cls.DEFAULT_ENDPOINT + + return api_endpoint, client_cert_source + + def __init__(self, *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Optional[Union[str, ReservationsTransport]] = None, + client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the reservations client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Union[str, ReservationsTransport]): The + transport to use. If set to None, a transport is chosen + automatically. + NOTE: "rest" transport functionality is currently in a + beta state (preview). We welcome your feedback via an + issue in this library's source repository. + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): Custom options for the + client. It won't take effect if a ``transport`` instance is provided. + (1) The ``api_endpoint`` property can be used to override the + default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT + environment variable can also be used to override the endpoint: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto switch to the + default mTLS endpoint if client certificate is present, this is + the default value). However, the ``api_endpoint`` property takes + precedence if provided. + (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide client certificate for mutual TLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + """ + if isinstance(client_options, dict): + client_options = client_options_lib.from_dict(client_options) + if client_options is None: + client_options = client_options_lib.ClientOptions() + client_options = cast(client_options_lib.ClientOptions, client_options) + + api_endpoint, client_cert_source_func = self.get_mtls_endpoint_and_cert_source(client_options) + + api_key_value = getattr(client_options, "api_key", None) + if api_key_value and credentials: + raise ValueError("client_options.api_key and credentials are mutually exclusive") + + # Save or instantiate the transport. + # Ordinarily, we provide the transport, but allowing a custom transport + # instance provides an extensibility point for unusual situations. + if isinstance(transport, ReservationsTransport): + # transport is a ReservationsTransport instance. + if credentials or client_options.credentials_file or api_key_value: + raise ValueError("When providing a transport instance, " + "provide its credentials directly.") + if client_options.scopes: + raise ValueError( + "When providing a transport instance, provide its scopes " + "directly." + ) + self._transport = transport + else: + import google.auth._default # type: ignore + + if api_key_value and hasattr(google.auth._default, "get_api_key_credentials"): + credentials = google.auth._default.get_api_key_credentials(api_key_value) + + Transport = type(self).get_transport_class(transport) + self._transport = Transport( + credentials=credentials, + credentials_file=client_options.credentials_file, + host=api_endpoint, + scopes=client_options.scopes, + client_cert_source_for_mtls=client_cert_source_func, + quota_project_id=client_options.quota_project_id, + client_info=client_info, + always_use_jwt_access=True, + api_audience=client_options.api_audience, + ) + + def aggregated_list(self, + request: Optional[Union[compute.AggregatedListReservationsRequest, dict]] = None, + *, + project: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.AggregatedListPager: + r"""Retrieves an aggregated list of reservations. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_aggregated_list(): + # Create a client + client = compute_v1.ReservationsClient() + + # Initialize request argument(s) + request = compute_v1.AggregatedListReservationsRequest( + project="project_value", + ) + + # Make the request + page_result = client.aggregated_list(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.AggregatedListReservationsRequest, dict]): + The request object. A request message for + Reservations.AggregatedList. See the + method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.compute_v1.services.reservations.pagers.AggregatedListPager: + Contains a list of reservations. + + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.AggregatedListReservationsRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.AggregatedListReservationsRequest): + request = compute.AggregatedListReservationsRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.aggregated_list] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.AggregatedListPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + def delete_unary(self, + request: Optional[Union[compute.DeleteReservationRequest, dict]] = None, + *, + project: Optional[str] = None, + zone: Optional[str] = None, + reservation: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.Operation: + r"""Deletes the specified reservation. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_delete(): + # Create a client + client = compute_v1.ReservationsClient() + + # Initialize request argument(s) + request = compute_v1.DeleteReservationRequest( + project="project_value", + reservation="reservation_value", + zone="zone_value", + ) + + # Make the request + response = client.delete(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.DeleteReservationRequest, dict]): + The request object. A request message for + Reservations.Delete. See the method + description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + zone (str): + Name of the zone for this request. + This corresponds to the ``zone`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + reservation (str): + Name of the reservation to delete. + This corresponds to the ``reservation`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, zone, reservation]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.DeleteReservationRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.DeleteReservationRequest): + request = compute.DeleteReservationRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if zone is not None: + request.zone = zone + if reservation is not None: + request.reservation = reservation + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("zone", request.zone), + ("reservation", request.reservation), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def delete(self, + request: Optional[Union[compute.DeleteReservationRequest, dict]] = None, + *, + project: Optional[str] = None, + zone: Optional[str] = None, + reservation: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> extended_operation.ExtendedOperation: + r"""Deletes the specified reservation. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_delete(): + # Create a client + client = compute_v1.ReservationsClient() + + # Initialize request argument(s) + request = compute_v1.DeleteReservationRequest( + project="project_value", + reservation="reservation_value", + zone="zone_value", + ) + + # Make the request + response = client.delete(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.DeleteReservationRequest, dict]): + The request object. A request message for + Reservations.Delete. See the method + description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + zone (str): + Name of the zone for this request. + This corresponds to the ``zone`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + reservation (str): + Name of the reservation to delete. + This corresponds to the ``reservation`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, zone, reservation]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.DeleteReservationRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.DeleteReservationRequest): + request = compute.DeleteReservationRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if zone is not None: + request.zone = zone + if reservation is not None: + request.reservation = reservation + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("zone", request.zone), + ("reservation", request.reservation), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + operation_service = self._transport._zone_operations_client + operation_request = compute.GetZoneOperationRequest() + operation_request.project = request.project + operation_request.zone = request.zone + operation_request.operation = response.name + + get_operation = functools.partial(operation_service.get, operation_request) + # Cancel is not part of extended operations yet. + cancel_operation = lambda: None + + # Note: this class is an implementation detail to provide a uniform + # set of names for certain fields in the extended operation proto message. + # See google.api_core.extended_operation.ExtendedOperation for details + # on these properties and the expected interface. + class _CustomOperation(extended_operation.ExtendedOperation): + @property + def error_message(self): + return self._extended_operation.http_error_message + + @property + def error_code(self): + return self._extended_operation.http_error_status_code + + response = _CustomOperation.make(get_operation, cancel_operation, response) + + # Done; return the response. + return response + + def get(self, + request: Optional[Union[compute.GetReservationRequest, dict]] = None, + *, + project: Optional[str] = None, + zone: Optional[str] = None, + reservation: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.Reservation: + r"""Retrieves information about the specified + reservation. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_get(): + # Create a client + client = compute_v1.ReservationsClient() + + # Initialize request argument(s) + request = compute_v1.GetReservationRequest( + project="project_value", + reservation="reservation_value", + zone="zone_value", + ) + + # Make the request + response = client.get(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.GetReservationRequest, dict]): + The request object. A request message for + Reservations.Get. See the method + description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + zone (str): + Name of the zone for this request. + This corresponds to the ``zone`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + reservation (str): + Name of the reservation to retrieve. + This corresponds to the ``reservation`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.compute_v1.types.Reservation: + Represents a reservation resource. A + reservation ensures that capacity is + held in a specific zone even if the + reserved VMs are not running. For more + information, read Reserving zonal + resources. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, zone, reservation]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.GetReservationRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.GetReservationRequest): + request = compute.GetReservationRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if zone is not None: + request.zone = zone + if reservation is not None: + request.reservation = reservation + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("zone", request.zone), + ("reservation", request.reservation), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get_iam_policy(self, + request: Optional[Union[compute.GetIamPolicyReservationRequest, dict]] = None, + *, + project: Optional[str] = None, + zone: Optional[str] = None, + resource: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.Policy: + r"""Gets the access control policy for a resource. May be + empty if no such policy or resource exists. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_get_iam_policy(): + # Create a client + client = compute_v1.ReservationsClient() + + # Initialize request argument(s) + request = compute_v1.GetIamPolicyReservationRequest( + project="project_value", + resource="resource_value", + zone="zone_value", + ) + + # Make the request + response = client.get_iam_policy(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.GetIamPolicyReservationRequest, dict]): + The request object. A request message for + Reservations.GetIamPolicy. See the + method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + zone (str): + The name of the zone for this + request. + + This corresponds to the ``zone`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + resource (str): + Name or id of the resource for this + request. + + This corresponds to the ``resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.compute_v1.types.Policy: + An Identity and Access Management (IAM) policy, which + specifies access controls for Google Cloud resources. A + Policy is a collection of bindings. A binding binds one + or more members, or principals, to a single role. + Principals can be user accounts, service accounts, + Google groups, and domains (such as G Suite). A role is + a named list of permissions; each role can be an IAM + predefined role or a user-created custom role. For some + types of Google Cloud resources, a binding can also + specify a condition, which is a logical expression that + allows access to a resource only if the expression + evaluates to true. A condition can add constraints based + on attributes of the request, the resource, or both. To + learn which resources support conditions in their IAM + policies, see the [IAM + documentation](\ https://cloud.google.com/iam/help/conditions/resource-policies). + **JSON example:** { "bindings": [ { "role": + "roles/resourcemanager.organizationAdmin", "members": [ + "user:mike@example.com", "group:admins@example.com", + "domain:google.com", + "serviceAccount:my-project-id@appspot.gserviceaccount.com" + ] }, { "role": + "roles/resourcemanager.organizationViewer", "members": [ + "user:eve@example.com" ], "condition": { "title": + "expirable access", "description": "Does not grant + access after Sep 2020", "expression": "request.time < + timestamp('2020-10-01T00:00:00.000Z')", } } ], "etag": + "BwWWja0YfJA=", "version": 3 } **YAML example:** + bindings: - members: - user:\ mike@example.com - + group:\ admins@example.com - domain:google.com - + serviceAccount:\ my-project-id@appspot.gserviceaccount.com + role: roles/resourcemanager.organizationAdmin - members: + - user:\ eve@example.com role: + roles/resourcemanager.organizationViewer condition: + title: expirable access description: Does not grant + access after Sep 2020 expression: request.time < + timestamp('2020-10-01T00:00:00.000Z') etag: BwWWja0YfJA= + version: 3 For a description of IAM and its features, + see the [IAM + documentation](\ https://cloud.google.com/iam/docs/). + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, zone, resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.GetIamPolicyReservationRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.GetIamPolicyReservationRequest): + request = compute.GetIamPolicyReservationRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if zone is not None: + request.zone = zone + if resource is not None: + request.resource = resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_iam_policy] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("zone", request.zone), + ("resource", request.resource), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def insert_unary(self, + request: Optional[Union[compute.InsertReservationRequest, dict]] = None, + *, + project: Optional[str] = None, + zone: Optional[str] = None, + reservation_resource: Optional[compute.Reservation] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.Operation: + r"""Creates a new reservation. For more information, read + Reserving zonal resources. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_insert(): + # Create a client + client = compute_v1.ReservationsClient() + + # Initialize request argument(s) + request = compute_v1.InsertReservationRequest( + project="project_value", + zone="zone_value", + ) + + # Make the request + response = client.insert(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.InsertReservationRequest, dict]): + The request object. A request message for + Reservations.Insert. See the method + description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + zone (str): + Name of the zone for this request. + This corresponds to the ``zone`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + reservation_resource (google.cloud.compute_v1.types.Reservation): + The body resource for this request + This corresponds to the ``reservation_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, zone, reservation_resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.InsertReservationRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.InsertReservationRequest): + request = compute.InsertReservationRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if zone is not None: + request.zone = zone + if reservation_resource is not None: + request.reservation_resource = reservation_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.insert] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("zone", request.zone), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def insert(self, + request: Optional[Union[compute.InsertReservationRequest, dict]] = None, + *, + project: Optional[str] = None, + zone: Optional[str] = None, + reservation_resource: Optional[compute.Reservation] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> extended_operation.ExtendedOperation: + r"""Creates a new reservation. For more information, read + Reserving zonal resources. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_insert(): + # Create a client + client = compute_v1.ReservationsClient() + + # Initialize request argument(s) + request = compute_v1.InsertReservationRequest( + project="project_value", + zone="zone_value", + ) + + # Make the request + response = client.insert(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.InsertReservationRequest, dict]): + The request object. A request message for + Reservations.Insert. See the method + description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + zone (str): + Name of the zone for this request. + This corresponds to the ``zone`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + reservation_resource (google.cloud.compute_v1.types.Reservation): + The body resource for this request + This corresponds to the ``reservation_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, zone, reservation_resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.InsertReservationRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.InsertReservationRequest): + request = compute.InsertReservationRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if zone is not None: + request.zone = zone + if reservation_resource is not None: + request.reservation_resource = reservation_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.insert] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("zone", request.zone), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + operation_service = self._transport._zone_operations_client + operation_request = compute.GetZoneOperationRequest() + operation_request.project = request.project + operation_request.zone = request.zone + operation_request.operation = response.name + + get_operation = functools.partial(operation_service.get, operation_request) + # Cancel is not part of extended operations yet. + cancel_operation = lambda: None + + # Note: this class is an implementation detail to provide a uniform + # set of names for certain fields in the extended operation proto message. + # See google.api_core.extended_operation.ExtendedOperation for details + # on these properties and the expected interface. + class _CustomOperation(extended_operation.ExtendedOperation): + @property + def error_message(self): + return self._extended_operation.http_error_message + + @property + def error_code(self): + return self._extended_operation.http_error_status_code + + response = _CustomOperation.make(get_operation, cancel_operation, response) + + # Done; return the response. + return response + + def list(self, + request: Optional[Union[compute.ListReservationsRequest, dict]] = None, + *, + project: Optional[str] = None, + zone: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListPager: + r"""A list of all the reservations that have been + configured for the specified project in specified zone. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_list(): + # Create a client + client = compute_v1.ReservationsClient() + + # Initialize request argument(s) + request = compute_v1.ListReservationsRequest( + project="project_value", + zone="zone_value", + ) + + # Make the request + page_result = client.list(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.ListReservationsRequest, dict]): + The request object. A request message for + Reservations.List. See the method + description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + zone (str): + Name of the zone for this request. + This corresponds to the ``zone`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.compute_v1.services.reservations.pagers.ListPager: + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, zone]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.ListReservationsRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.ListReservationsRequest): + request = compute.ListReservationsRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if zone is not None: + request.zone = zone + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("zone", request.zone), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + def resize_unary(self, + request: Optional[Union[compute.ResizeReservationRequest, dict]] = None, + *, + project: Optional[str] = None, + zone: Optional[str] = None, + reservation: Optional[str] = None, + reservations_resize_request_resource: Optional[compute.ReservationsResizeRequest] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.Operation: + r"""Resizes the reservation (applicable to standalone + reservations only). For more information, read Modifying + reservations. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_resize(): + # Create a client + client = compute_v1.ReservationsClient() + + # Initialize request argument(s) + request = compute_v1.ResizeReservationRequest( + project="project_value", + reservation="reservation_value", + zone="zone_value", + ) + + # Make the request + response = client.resize(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.ResizeReservationRequest, dict]): + The request object. A request message for + Reservations.Resize. See the method + description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + zone (str): + Name of the zone for this request. + This corresponds to the ``zone`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + reservation (str): + Name of the reservation to update. + This corresponds to the ``reservation`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + reservations_resize_request_resource (google.cloud.compute_v1.types.ReservationsResizeRequest): + The body resource for this request + This corresponds to the ``reservations_resize_request_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, zone, reservation, reservations_resize_request_resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.ResizeReservationRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.ResizeReservationRequest): + request = compute.ResizeReservationRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if zone is not None: + request.zone = zone + if reservation is not None: + request.reservation = reservation + if reservations_resize_request_resource is not None: + request.reservations_resize_request_resource = reservations_resize_request_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.resize] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("zone", request.zone), + ("reservation", request.reservation), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def resize(self, + request: Optional[Union[compute.ResizeReservationRequest, dict]] = None, + *, + project: Optional[str] = None, + zone: Optional[str] = None, + reservation: Optional[str] = None, + reservations_resize_request_resource: Optional[compute.ReservationsResizeRequest] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> extended_operation.ExtendedOperation: + r"""Resizes the reservation (applicable to standalone + reservations only). For more information, read Modifying + reservations. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_resize(): + # Create a client + client = compute_v1.ReservationsClient() + + # Initialize request argument(s) + request = compute_v1.ResizeReservationRequest( + project="project_value", + reservation="reservation_value", + zone="zone_value", + ) + + # Make the request + response = client.resize(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.ResizeReservationRequest, dict]): + The request object. A request message for + Reservations.Resize. See the method + description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + zone (str): + Name of the zone for this request. + This corresponds to the ``zone`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + reservation (str): + Name of the reservation to update. + This corresponds to the ``reservation`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + reservations_resize_request_resource (google.cloud.compute_v1.types.ReservationsResizeRequest): + The body resource for this request + This corresponds to the ``reservations_resize_request_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, zone, reservation, reservations_resize_request_resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.ResizeReservationRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.ResizeReservationRequest): + request = compute.ResizeReservationRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if zone is not None: + request.zone = zone + if reservation is not None: + request.reservation = reservation + if reservations_resize_request_resource is not None: + request.reservations_resize_request_resource = reservations_resize_request_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.resize] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("zone", request.zone), + ("reservation", request.reservation), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + operation_service = self._transport._zone_operations_client + operation_request = compute.GetZoneOperationRequest() + operation_request.project = request.project + operation_request.zone = request.zone + operation_request.operation = response.name + + get_operation = functools.partial(operation_service.get, operation_request) + # Cancel is not part of extended operations yet. + cancel_operation = lambda: None + + # Note: this class is an implementation detail to provide a uniform + # set of names for certain fields in the extended operation proto message. + # See google.api_core.extended_operation.ExtendedOperation for details + # on these properties and the expected interface. + class _CustomOperation(extended_operation.ExtendedOperation): + @property + def error_message(self): + return self._extended_operation.http_error_message + + @property + def error_code(self): + return self._extended_operation.http_error_status_code + + response = _CustomOperation.make(get_operation, cancel_operation, response) + + # Done; return the response. + return response + + def set_iam_policy(self, + request: Optional[Union[compute.SetIamPolicyReservationRequest, dict]] = None, + *, + project: Optional[str] = None, + zone: Optional[str] = None, + resource: Optional[str] = None, + zone_set_policy_request_resource: Optional[compute.ZoneSetPolicyRequest] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.Policy: + r"""Sets the access control policy on the specified + resource. Replaces any existing policy. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_set_iam_policy(): + # Create a client + client = compute_v1.ReservationsClient() + + # Initialize request argument(s) + request = compute_v1.SetIamPolicyReservationRequest( + project="project_value", + resource="resource_value", + zone="zone_value", + ) + + # Make the request + response = client.set_iam_policy(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.SetIamPolicyReservationRequest, dict]): + The request object. A request message for + Reservations.SetIamPolicy. See the + method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + zone (str): + The name of the zone for this + request. + + This corresponds to the ``zone`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + resource (str): + Name or id of the resource for this + request. + + This corresponds to the ``resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + zone_set_policy_request_resource (google.cloud.compute_v1.types.ZoneSetPolicyRequest): + The body resource for this request + This corresponds to the ``zone_set_policy_request_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.compute_v1.types.Policy: + An Identity and Access Management (IAM) policy, which + specifies access controls for Google Cloud resources. A + Policy is a collection of bindings. A binding binds one + or more members, or principals, to a single role. + Principals can be user accounts, service accounts, + Google groups, and domains (such as G Suite). A role is + a named list of permissions; each role can be an IAM + predefined role or a user-created custom role. For some + types of Google Cloud resources, a binding can also + specify a condition, which is a logical expression that + allows access to a resource only if the expression + evaluates to true. A condition can add constraints based + on attributes of the request, the resource, or both. To + learn which resources support conditions in their IAM + policies, see the [IAM + documentation](\ https://cloud.google.com/iam/help/conditions/resource-policies). + **JSON example:** { "bindings": [ { "role": + "roles/resourcemanager.organizationAdmin", "members": [ + "user:mike@example.com", "group:admins@example.com", + "domain:google.com", + "serviceAccount:my-project-id@appspot.gserviceaccount.com" + ] }, { "role": + "roles/resourcemanager.organizationViewer", "members": [ + "user:eve@example.com" ], "condition": { "title": + "expirable access", "description": "Does not grant + access after Sep 2020", "expression": "request.time < + timestamp('2020-10-01T00:00:00.000Z')", } } ], "etag": + "BwWWja0YfJA=", "version": 3 } **YAML example:** + bindings: - members: - user:\ mike@example.com - + group:\ admins@example.com - domain:google.com - + serviceAccount:\ my-project-id@appspot.gserviceaccount.com + role: roles/resourcemanager.organizationAdmin - members: + - user:\ eve@example.com role: + roles/resourcemanager.organizationViewer condition: + title: expirable access description: Does not grant + access after Sep 2020 expression: request.time < + timestamp('2020-10-01T00:00:00.000Z') etag: BwWWja0YfJA= + version: 3 For a description of IAM and its features, + see the [IAM + documentation](\ https://cloud.google.com/iam/docs/). + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, zone, resource, zone_set_policy_request_resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.SetIamPolicyReservationRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.SetIamPolicyReservationRequest): + request = compute.SetIamPolicyReservationRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if zone is not None: + request.zone = zone + if resource is not None: + request.resource = resource + if zone_set_policy_request_resource is not None: + request.zone_set_policy_request_resource = zone_set_policy_request_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.set_iam_policy] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("zone", request.zone), + ("resource", request.resource), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def test_iam_permissions(self, + request: Optional[Union[compute.TestIamPermissionsReservationRequest, dict]] = None, + *, + project: Optional[str] = None, + zone: Optional[str] = None, + resource: Optional[str] = None, + test_permissions_request_resource: Optional[compute.TestPermissionsRequest] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.TestPermissionsResponse: + r"""Returns permissions that a caller has on the + specified resource. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_test_iam_permissions(): + # Create a client + client = compute_v1.ReservationsClient() + + # Initialize request argument(s) + request = compute_v1.TestIamPermissionsReservationRequest( + project="project_value", + resource="resource_value", + zone="zone_value", + ) + + # Make the request + response = client.test_iam_permissions(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.TestIamPermissionsReservationRequest, dict]): + The request object. A request message for + Reservations.TestIamPermissions. See the + method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + zone (str): + The name of the zone for this + request. + + This corresponds to the ``zone`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + resource (str): + Name or id of the resource for this + request. + + This corresponds to the ``resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + test_permissions_request_resource (google.cloud.compute_v1.types.TestPermissionsRequest): + The body resource for this request + This corresponds to the ``test_permissions_request_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.compute_v1.types.TestPermissionsResponse: + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, zone, resource, test_permissions_request_resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.TestIamPermissionsReservationRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.TestIamPermissionsReservationRequest): + request = compute.TestIamPermissionsReservationRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if zone is not None: + request.zone = zone + if resource is not None: + request.resource = resource + if test_permissions_request_resource is not None: + request.test_permissions_request_resource = test_permissions_request_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.test_iam_permissions] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("zone", request.zone), + ("resource", request.resource), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def update_unary(self, + request: Optional[Union[compute.UpdateReservationRequest, dict]] = None, + *, + project: Optional[str] = None, + zone: Optional[str] = None, + reservation: Optional[str] = None, + reservation_resource: Optional[compute.Reservation] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.Operation: + r"""Update share settings of the reservation. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_update(): + # Create a client + client = compute_v1.ReservationsClient() + + # Initialize request argument(s) + request = compute_v1.UpdateReservationRequest( + project="project_value", + reservation="reservation_value", + zone="zone_value", + ) + + # Make the request + response = client.update(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.UpdateReservationRequest, dict]): + The request object. A request message for + Reservations.Update. See the method + description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + zone (str): + Name of the zone for this request. + This corresponds to the ``zone`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + reservation (str): + Name of the reservation to update. + This corresponds to the ``reservation`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + reservation_resource (google.cloud.compute_v1.types.Reservation): + The body resource for this request + This corresponds to the ``reservation_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, zone, reservation, reservation_resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.UpdateReservationRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.UpdateReservationRequest): + request = compute.UpdateReservationRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if zone is not None: + request.zone = zone + if reservation is not None: + request.reservation = reservation + if reservation_resource is not None: + request.reservation_resource = reservation_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.update] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("zone", request.zone), + ("reservation", request.reservation), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def update(self, + request: Optional[Union[compute.UpdateReservationRequest, dict]] = None, + *, + project: Optional[str] = None, + zone: Optional[str] = None, + reservation: Optional[str] = None, + reservation_resource: Optional[compute.Reservation] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> extended_operation.ExtendedOperation: + r"""Update share settings of the reservation. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_update(): + # Create a client + client = compute_v1.ReservationsClient() + + # Initialize request argument(s) + request = compute_v1.UpdateReservationRequest( + project="project_value", + reservation="reservation_value", + zone="zone_value", + ) + + # Make the request + response = client.update(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.UpdateReservationRequest, dict]): + The request object. A request message for + Reservations.Update. See the method + description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + zone (str): + Name of the zone for this request. + This corresponds to the ``zone`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + reservation (str): + Name of the reservation to update. + This corresponds to the ``reservation`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + reservation_resource (google.cloud.compute_v1.types.Reservation): + The body resource for this request + This corresponds to the ``reservation_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, zone, reservation, reservation_resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.UpdateReservationRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.UpdateReservationRequest): + request = compute.UpdateReservationRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if zone is not None: + request.zone = zone + if reservation is not None: + request.reservation = reservation + if reservation_resource is not None: + request.reservation_resource = reservation_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.update] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("zone", request.zone), + ("reservation", request.reservation), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + operation_service = self._transport._zone_operations_client + operation_request = compute.GetZoneOperationRequest() + operation_request.project = request.project + operation_request.zone = request.zone + operation_request.operation = response.name + + get_operation = functools.partial(operation_service.get, operation_request) + # Cancel is not part of extended operations yet. + cancel_operation = lambda: None + + # Note: this class is an implementation detail to provide a uniform + # set of names for certain fields in the extended operation proto message. + # See google.api_core.extended_operation.ExtendedOperation for details + # on these properties and the expected interface. + class _CustomOperation(extended_operation.ExtendedOperation): + @property + def error_message(self): + return self._extended_operation.http_error_message + + @property + def error_code(self): + return self._extended_operation.http_error_status_code + + response = _CustomOperation.make(get_operation, cancel_operation, response) + + # Done; return the response. + return response + + def __enter__(self) -> "ReservationsClient": + return self + + def __exit__(self, type, value, traceback): + """Releases underlying transport's resources. + + .. warning:: + ONLY use as a context manager if the transport is NOT shared + with other clients! Exiting the with block will CLOSE the transport + and may cause errors in other clients! + """ + self.transport.close() + + + + + + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) + + +__all__ = ( + "ReservationsClient", +) diff --git a/owl-bot-staging/v1/google/cloud/compute_v1/services/reservations/pagers.py b/owl-bot-staging/v1/google/cloud/compute_v1/services/reservations/pagers.py new file mode 100644 index 000000000..88a0d28bc --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/compute_v1/services/reservations/pagers.py @@ -0,0 +1,139 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import Any, AsyncIterator, Awaitable, Callable, Sequence, Tuple, Optional, Iterator + +from google.cloud.compute_v1.types import compute + + +class AggregatedListPager: + """A pager for iterating through ``aggregated_list`` requests. + + This class thinly wraps an initial + :class:`google.cloud.compute_v1.types.ReservationAggregatedList` object, and + provides an ``__iter__`` method to iterate through its + ``items`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``AggregatedList`` requests and continue to iterate + through the ``items`` field on the + corresponding responses. + + All the usual :class:`google.cloud.compute_v1.types.ReservationAggregatedList` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., compute.ReservationAggregatedList], + request: compute.AggregatedListReservationsRequest, + response: compute.ReservationAggregatedList, + *, + metadata: Sequence[Tuple[str, str]] = ()): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.compute_v1.types.AggregatedListReservationsRequest): + The initial request object. + response (google.cloud.compute_v1.types.ReservationAggregatedList): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = compute.AggregatedListReservationsRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[compute.ReservationAggregatedList]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[Tuple[str, compute.ReservationsScopedList]]: + for page in self.pages: + yield from page.items.items() + + def get(self, key: str) -> Optional[compute.ReservationsScopedList]: + return self._response.items.get(key) + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + + +class ListPager: + """A pager for iterating through ``list`` requests. + + This class thinly wraps an initial + :class:`google.cloud.compute_v1.types.ReservationList` object, and + provides an ``__iter__`` method to iterate through its + ``items`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``List`` requests and continue to iterate + through the ``items`` field on the + corresponding responses. + + All the usual :class:`google.cloud.compute_v1.types.ReservationList` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., compute.ReservationList], + request: compute.ListReservationsRequest, + response: compute.ReservationList, + *, + metadata: Sequence[Tuple[str, str]] = ()): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.compute_v1.types.ListReservationsRequest): + The initial request object. + response (google.cloud.compute_v1.types.ReservationList): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = compute.ListReservationsRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[compute.ReservationList]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[compute.Reservation]: + for page in self.pages: + yield from page.items + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) diff --git a/owl-bot-staging/v1/google/cloud/compute_v1/services/reservations/transports/__init__.py b/owl-bot-staging/v1/google/cloud/compute_v1/services/reservations/transports/__init__.py new file mode 100644 index 000000000..35ac285a5 --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/compute_v1/services/reservations/transports/__init__.py @@ -0,0 +1,32 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +from typing import Dict, Type + +from .base import ReservationsTransport +from .rest import ReservationsRestTransport +from .rest import ReservationsRestInterceptor + + +# Compile a registry of transports. +_transport_registry = OrderedDict() # type: Dict[str, Type[ReservationsTransport]] +_transport_registry['rest'] = ReservationsRestTransport + +__all__ = ( + 'ReservationsTransport', + 'ReservationsRestTransport', + 'ReservationsRestInterceptor', +) diff --git a/owl-bot-staging/v1/google/cloud/compute_v1/services/reservations/transports/base.py b/owl-bot-staging/v1/google/cloud/compute_v1/services/reservations/transports/base.py new file mode 100644 index 000000000..b8b1150f8 --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/compute_v1/services/reservations/transports/base.py @@ -0,0 +1,289 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import abc +from typing import Awaitable, Callable, Dict, Optional, Sequence, Union + +from google.cloud.compute_v1 import gapic_version as package_version + +import google.auth # type: ignore +import google.api_core +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.compute_v1.types import compute +from google.cloud.compute_v1.services import zone_operations + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) + + +class ReservationsTransport(abc.ABC): + """Abstract transport class for Reservations.""" + + AUTH_SCOPES = ( + 'https://www.googleapis.com/auth/compute', + 'https://www.googleapis.com/auth/cloud-platform', + ) + + DEFAULT_HOST: str = 'compute.googleapis.com' + def __init__( + self, *, + host: str = DEFAULT_HOST, + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + **kwargs, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A list of scopes. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + """ + self._extended_operations_services: Dict[str, Any] = {} + + scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} + + # Save the scopes. + self._scopes = scopes + + # If no credentials are provided, then determine the appropriate + # defaults. + if credentials and credentials_file: + raise core_exceptions.DuplicateCredentialArgs("'credentials_file' and 'credentials' are mutually exclusive") + + if credentials_file is not None: + credentials, _ = google.auth.load_credentials_from_file( + credentials_file, + **scopes_kwargs, + quota_project_id=quota_project_id + ) + elif credentials is None: + credentials, _ = google.auth.default(**scopes_kwargs, quota_project_id=quota_project_id) + # Don't apply audience if the credentials file passed from user. + if hasattr(credentials, "with_gdch_audience"): + credentials = credentials.with_gdch_audience(api_audience if api_audience else host) + + # If the credentials are service account credentials, then always try to use self signed JWT. + if always_use_jwt_access and isinstance(credentials, service_account.Credentials) and hasattr(service_account.Credentials, "with_always_use_jwt_access"): + credentials = credentials.with_always_use_jwt_access(True) + + # Save the credentials. + self._credentials = credentials + + # Save the hostname. Default to port 443 (HTTPS) if none is specified. + if ':' not in host: + host += ':443' + self._host = host + + def _prep_wrapped_messages(self, client_info): + # Precompute the wrapped methods. + self._wrapped_methods = { + self.aggregated_list: gapic_v1.method.wrap_method( + self.aggregated_list, + default_timeout=None, + client_info=client_info, + ), + self.delete: gapic_v1.method.wrap_method( + self.delete, + default_timeout=None, + client_info=client_info, + ), + self.get: gapic_v1.method.wrap_method( + self.get, + default_timeout=None, + client_info=client_info, + ), + self.get_iam_policy: gapic_v1.method.wrap_method( + self.get_iam_policy, + default_timeout=None, + client_info=client_info, + ), + self.insert: gapic_v1.method.wrap_method( + self.insert, + default_timeout=None, + client_info=client_info, + ), + self.list: gapic_v1.method.wrap_method( + self.list, + default_timeout=None, + client_info=client_info, + ), + self.resize: gapic_v1.method.wrap_method( + self.resize, + default_timeout=None, + client_info=client_info, + ), + self.set_iam_policy: gapic_v1.method.wrap_method( + self.set_iam_policy, + default_timeout=None, + client_info=client_info, + ), + self.test_iam_permissions: gapic_v1.method.wrap_method( + self.test_iam_permissions, + default_timeout=None, + client_info=client_info, + ), + self.update: gapic_v1.method.wrap_method( + self.update, + default_timeout=None, + client_info=client_info, + ), + } + + def close(self): + """Closes resources associated with the transport. + + .. warning:: + Only call this method if the transport is NOT shared + with other clients - this may cause errors in other clients! + """ + raise NotImplementedError() + + @property + def aggregated_list(self) -> Callable[ + [compute.AggregatedListReservationsRequest], + Union[ + compute.ReservationAggregatedList, + Awaitable[compute.ReservationAggregatedList] + ]]: + raise NotImplementedError() + + @property + def delete(self) -> Callable[ + [compute.DeleteReservationRequest], + Union[ + compute.Operation, + Awaitable[compute.Operation] + ]]: + raise NotImplementedError() + + @property + def get(self) -> Callable[ + [compute.GetReservationRequest], + Union[ + compute.Reservation, + Awaitable[compute.Reservation] + ]]: + raise NotImplementedError() + + @property + def get_iam_policy(self) -> Callable[ + [compute.GetIamPolicyReservationRequest], + Union[ + compute.Policy, + Awaitable[compute.Policy] + ]]: + raise NotImplementedError() + + @property + def insert(self) -> Callable[ + [compute.InsertReservationRequest], + Union[ + compute.Operation, + Awaitable[compute.Operation] + ]]: + raise NotImplementedError() + + @property + def list(self) -> Callable[ + [compute.ListReservationsRequest], + Union[ + compute.ReservationList, + Awaitable[compute.ReservationList] + ]]: + raise NotImplementedError() + + @property + def resize(self) -> Callable[ + [compute.ResizeReservationRequest], + Union[ + compute.Operation, + Awaitable[compute.Operation] + ]]: + raise NotImplementedError() + + @property + def set_iam_policy(self) -> Callable[ + [compute.SetIamPolicyReservationRequest], + Union[ + compute.Policy, + Awaitable[compute.Policy] + ]]: + raise NotImplementedError() + + @property + def test_iam_permissions(self) -> Callable[ + [compute.TestIamPermissionsReservationRequest], + Union[ + compute.TestPermissionsResponse, + Awaitable[compute.TestPermissionsResponse] + ]]: + raise NotImplementedError() + + @property + def update(self) -> Callable[ + [compute.UpdateReservationRequest], + Union[ + compute.Operation, + Awaitable[compute.Operation] + ]]: + raise NotImplementedError() + + @property + def kind(self) -> str: + raise NotImplementedError() + + @property + def _zone_operations_client(self) -> zone_operations.ZoneOperationsClient: + ex_op_service = self._extended_operations_services.get("zone_operations") + if not ex_op_service: + ex_op_service = zone_operations.ZoneOperationsClient( + credentials=self._credentials, + transport=self.kind, + ) + self._extended_operations_services["zone_operations"] = ex_op_service + + return ex_op_service + + +__all__ = ( + 'ReservationsTransport', +) diff --git a/owl-bot-staging/v1/google/cloud/compute_v1/services/reservations/transports/rest.py b/owl-bot-staging/v1/google/cloud/compute_v1/services/reservations/transports/rest.py new file mode 100644 index 000000000..8093ca8e6 --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/compute_v1/services/reservations/transports/rest.py @@ -0,0 +1,1478 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +from google.auth.transport.requests import AuthorizedSession # type: ignore +import json # type: ignore +import grpc # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.api_core import exceptions as core_exceptions +from google.api_core import retry as retries +from google.api_core import rest_helpers +from google.api_core import rest_streaming +from google.api_core import path_template +from google.api_core import gapic_v1 + +from google.protobuf import json_format +from requests import __version__ as requests_version +import dataclasses +import re +from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union +import warnings + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + + +from google.cloud.compute_v1.types import compute + +from .base import ReservationsTransport, DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, + grpc_version=None, + rest_version=requests_version, +) + + +class ReservationsRestInterceptor: + """Interceptor for Reservations. + + Interceptors are used to manipulate requests, request metadata, and responses + in arbitrary ways. + Example use cases include: + * Logging + * Verifying requests according to service or custom semantics + * Stripping extraneous information from responses + + These use cases and more can be enabled by injecting an + instance of a custom subclass when constructing the ReservationsRestTransport. + + .. code-block:: python + class MyCustomReservationsInterceptor(ReservationsRestInterceptor): + def pre_aggregated_list(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_aggregated_list(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_delete(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_delete(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_get(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_get_iam_policy(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_iam_policy(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_insert(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_insert(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_resize(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_resize(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_set_iam_policy(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_set_iam_policy(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_test_iam_permissions(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_test_iam_permissions(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_update(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_update(self, response): + logging.log(f"Received response: {response}") + return response + + transport = ReservationsRestTransport(interceptor=MyCustomReservationsInterceptor()) + client = ReservationsClient(transport=transport) + + + """ + def pre_aggregated_list(self, request: compute.AggregatedListReservationsRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.AggregatedListReservationsRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for aggregated_list + + Override in a subclass to manipulate the request or metadata + before they are sent to the Reservations server. + """ + return request, metadata + + def post_aggregated_list(self, response: compute.ReservationAggregatedList) -> compute.ReservationAggregatedList: + """Post-rpc interceptor for aggregated_list + + Override in a subclass to manipulate the response + after it is returned by the Reservations server but before + it is returned to user code. + """ + return response + def pre_delete(self, request: compute.DeleteReservationRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.DeleteReservationRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for delete + + Override in a subclass to manipulate the request or metadata + before they are sent to the Reservations server. + """ + return request, metadata + + def post_delete(self, response: compute.Operation) -> compute.Operation: + """Post-rpc interceptor for delete + + Override in a subclass to manipulate the response + after it is returned by the Reservations server but before + it is returned to user code. + """ + return response + def pre_get(self, request: compute.GetReservationRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.GetReservationRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get + + Override in a subclass to manipulate the request or metadata + before they are sent to the Reservations server. + """ + return request, metadata + + def post_get(self, response: compute.Reservation) -> compute.Reservation: + """Post-rpc interceptor for get + + Override in a subclass to manipulate the response + after it is returned by the Reservations server but before + it is returned to user code. + """ + return response + def pre_get_iam_policy(self, request: compute.GetIamPolicyReservationRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.GetIamPolicyReservationRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_iam_policy + + Override in a subclass to manipulate the request or metadata + before they are sent to the Reservations server. + """ + return request, metadata + + def post_get_iam_policy(self, response: compute.Policy) -> compute.Policy: + """Post-rpc interceptor for get_iam_policy + + Override in a subclass to manipulate the response + after it is returned by the Reservations server but before + it is returned to user code. + """ + return response + def pre_insert(self, request: compute.InsertReservationRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.InsertReservationRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for insert + + Override in a subclass to manipulate the request or metadata + before they are sent to the Reservations server. + """ + return request, metadata + + def post_insert(self, response: compute.Operation) -> compute.Operation: + """Post-rpc interceptor for insert + + Override in a subclass to manipulate the response + after it is returned by the Reservations server but before + it is returned to user code. + """ + return response + def pre_list(self, request: compute.ListReservationsRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.ListReservationsRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list + + Override in a subclass to manipulate the request or metadata + before they are sent to the Reservations server. + """ + return request, metadata + + def post_list(self, response: compute.ReservationList) -> compute.ReservationList: + """Post-rpc interceptor for list + + Override in a subclass to manipulate the response + after it is returned by the Reservations server but before + it is returned to user code. + """ + return response + def pre_resize(self, request: compute.ResizeReservationRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.ResizeReservationRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for resize + + Override in a subclass to manipulate the request or metadata + before they are sent to the Reservations server. + """ + return request, metadata + + def post_resize(self, response: compute.Operation) -> compute.Operation: + """Post-rpc interceptor for resize + + Override in a subclass to manipulate the response + after it is returned by the Reservations server but before + it is returned to user code. + """ + return response + def pre_set_iam_policy(self, request: compute.SetIamPolicyReservationRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.SetIamPolicyReservationRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for set_iam_policy + + Override in a subclass to manipulate the request or metadata + before they are sent to the Reservations server. + """ + return request, metadata + + def post_set_iam_policy(self, response: compute.Policy) -> compute.Policy: + """Post-rpc interceptor for set_iam_policy + + Override in a subclass to manipulate the response + after it is returned by the Reservations server but before + it is returned to user code. + """ + return response + def pre_test_iam_permissions(self, request: compute.TestIamPermissionsReservationRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.TestIamPermissionsReservationRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for test_iam_permissions + + Override in a subclass to manipulate the request or metadata + before they are sent to the Reservations server. + """ + return request, metadata + + def post_test_iam_permissions(self, response: compute.TestPermissionsResponse) -> compute.TestPermissionsResponse: + """Post-rpc interceptor for test_iam_permissions + + Override in a subclass to manipulate the response + after it is returned by the Reservations server but before + it is returned to user code. + """ + return response + def pre_update(self, request: compute.UpdateReservationRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.UpdateReservationRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for update + + Override in a subclass to manipulate the request or metadata + before they are sent to the Reservations server. + """ + return request, metadata + + def post_update(self, response: compute.Operation) -> compute.Operation: + """Post-rpc interceptor for update + + Override in a subclass to manipulate the response + after it is returned by the Reservations server but before + it is returned to user code. + """ + return response + + +@dataclasses.dataclass +class ReservationsRestStub: + _session: AuthorizedSession + _host: str + _interceptor: ReservationsRestInterceptor + + +class ReservationsRestTransport(ReservationsTransport): + """REST backend transport for Reservations. + + The Reservations API. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends JSON representations of protocol buffers over HTTP/1.1 + + NOTE: This REST transport functionality is currently in a beta + state (preview). We welcome your feedback via an issue in this + library's source repository. Thank you! + """ + + def __init__(self, *, + host: str = 'compute.googleapis.com', + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + client_cert_source_for_mtls: Optional[Callable[[ + ], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + url_scheme: str = 'https', + interceptor: Optional[ReservationsRestInterceptor] = None, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + NOTE: This REST transport functionality is currently in a beta + state (preview). We welcome your feedback via a GitHub issue in + this library's repository. Thank you! + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + client_cert_source_for_mtls (Callable[[], Tuple[bytes, bytes]]): Client + certificate to configure mutual TLS HTTP channel. It is ignored + if ``channel`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you are developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + url_scheme: the protocol scheme for the API endpoint. Normally + "https", but for testing or local servers, + "http" can be specified. + """ + # Run the base constructor + # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. + # TODO: When custom host (api_endpoint) is set, `scopes` must *also* be set on the + # credentials object + maybe_url_match = re.match("^(?Phttp(?:s)?://)?(?P.*)$", host) + if maybe_url_match is None: + raise ValueError(f"Unexpected hostname structure: {host}") # pragma: NO COVER + + url_match_items = maybe_url_match.groupdict() + + host = f"{url_scheme}://{host}" if not url_match_items["scheme"] else host + + super().__init__( + host=host, + credentials=credentials, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience + ) + self._session = AuthorizedSession( + self._credentials, default_host=self.DEFAULT_HOST) + if client_cert_source_for_mtls: + self._session.configure_mtls_channel(client_cert_source_for_mtls) + self._interceptor = interceptor or ReservationsRestInterceptor() + self._prep_wrapped_messages(client_info) + + class _AggregatedList(ReservationsRestStub): + def __hash__(self): + return hash("AggregatedList") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.AggregatedListReservationsRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.ReservationAggregatedList: + r"""Call the aggregated list method over HTTP. + + Args: + request (~.compute.AggregatedListReservationsRequest): + The request object. A request message for + Reservations.AggregatedList. See the + method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.ReservationAggregatedList: + Contains a list of reservations. + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/compute/v1/projects/{project}/aggregated/reservations', + }, + ] + request, metadata = self._interceptor.pre_aggregated_list(request, metadata) + pb_request = compute.AggregatedListReservationsRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.ReservationAggregatedList() + pb_resp = compute.ReservationAggregatedList.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_aggregated_list(resp) + return resp + + class _Delete(ReservationsRestStub): + def __hash__(self): + return hash("Delete") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.DeleteReservationRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.Operation: + r"""Call the delete method over HTTP. + + Args: + request (~.compute.DeleteReservationRequest): + The request object. A request message for + Reservations.Delete. See the method + description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.Operation: + Represents an Operation resource. Google Compute Engine + has three Operation resources: \* + `Global `__ + \* + `Regional `__ + \* + `Zonal `__ + You can use an operation resource to manage asynchronous + API requests. For more information, read Handling API + responses. Operations can be global, regional or zonal. + - For global operations, use the ``globalOperations`` + resource. - For regional operations, use the + ``regionOperations`` resource. - For zonal operations, + use the ``zonalOperations`` resource. For more + information, read Global, Regional, and Zonal Resources. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'delete', + 'uri': '/compute/v1/projects/{project}/zones/{zone}/reservations/{reservation}', + }, + ] + request, metadata = self._interceptor.pre_delete(request, metadata) + pb_request = compute.DeleteReservationRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.Operation() + pb_resp = compute.Operation.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_delete(resp) + return resp + + class _Get(ReservationsRestStub): + def __hash__(self): + return hash("Get") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.GetReservationRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.Reservation: + r"""Call the get method over HTTP. + + Args: + request (~.compute.GetReservationRequest): + The request object. A request message for + Reservations.Get. See the method + description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.Reservation: + Represents a reservation resource. A + reservation ensures that capacity is + held in a specific zone even if the + reserved VMs are not running. For more + information, read Reserving zonal + resources. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/compute/v1/projects/{project}/zones/{zone}/reservations/{reservation}', + }, + ] + request, metadata = self._interceptor.pre_get(request, metadata) + pb_request = compute.GetReservationRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.Reservation() + pb_resp = compute.Reservation.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get(resp) + return resp + + class _GetIamPolicy(ReservationsRestStub): + def __hash__(self): + return hash("GetIamPolicy") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.GetIamPolicyReservationRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.Policy: + r"""Call the get iam policy method over HTTP. + + Args: + request (~.compute.GetIamPolicyReservationRequest): + The request object. A request message for + Reservations.GetIamPolicy. See the + method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.Policy: + An Identity and Access Management (IAM) policy, which + specifies access controls for Google Cloud resources. A + ``Policy`` is a collection of ``bindings``. A + ``binding`` binds one or more ``members``, or + principals, to a single ``role``. Principals can be user + accounts, service accounts, Google groups, and domains + (such as G Suite). A ``role`` is a named list of + permissions; each ``role`` can be an IAM predefined role + or a user-created custom role. For some types of Google + Cloud resources, a ``binding`` can also specify a + ``condition``, which is a logical expression that allows + access to a resource only if the expression evaluates to + ``true``. A condition can add constraints based on + attributes of the request, the resource, or both. To + learn which resources support conditions in their IAM + policies, see the `IAM + documentation `__. + **JSON example:** { "bindings": [ { "role": + "roles/resourcemanager.organizationAdmin", "members": [ + "user:mike@example.com", "group:admins@example.com", + "domain:google.com", + "serviceAccount:my-project-id@appspot.gserviceaccount.com" + ] }, { "role": + "roles/resourcemanager.organizationViewer", "members": [ + "user:eve@example.com" ], "condition": { "title": + "expirable access", "description": "Does not grant + access after Sep 2020", "expression": "request.time < + timestamp('2020-10-01T00:00:00.000Z')", } } ], "etag": + "BwWWja0YfJA=", "version": 3 } **YAML example:** + bindings: - members: - user:mike@example.com - + group:admins@example.com - domain:google.com - + serviceAccount:my-project-id@appspot.gserviceaccount.com + role: roles/resourcemanager.organizationAdmin - members: + - user:eve@example.com role: + roles/resourcemanager.organizationViewer condition: + title: expirable access description: Does not grant + access after Sep 2020 expression: request.time < + timestamp('2020-10-01T00:00:00.000Z') etag: BwWWja0YfJA= + version: 3 For a description of IAM and its features, + see the `IAM + documentation `__. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/compute/v1/projects/{project}/zones/{zone}/reservations/{resource}/getIamPolicy', + }, + ] + request, metadata = self._interceptor.pre_get_iam_policy(request, metadata) + pb_request = compute.GetIamPolicyReservationRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.Policy() + pb_resp = compute.Policy.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get_iam_policy(resp) + return resp + + class _Insert(ReservationsRestStub): + def __hash__(self): + return hash("Insert") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.InsertReservationRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.Operation: + r"""Call the insert method over HTTP. + + Args: + request (~.compute.InsertReservationRequest): + The request object. A request message for + Reservations.Insert. See the method + description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.Operation: + Represents an Operation resource. Google Compute Engine + has three Operation resources: \* + `Global `__ + \* + `Regional `__ + \* + `Zonal `__ + You can use an operation resource to manage asynchronous + API requests. For more information, read Handling API + responses. Operations can be global, regional or zonal. + - For global operations, use the ``globalOperations`` + resource. - For regional operations, use the + ``regionOperations`` resource. - For zonal operations, + use the ``zonalOperations`` resource. For more + information, read Global, Regional, and Zonal Resources. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/compute/v1/projects/{project}/zones/{zone}/reservations', + 'body': 'reservation_resource', + }, + ] + request, metadata = self._interceptor.pre_insert(request, metadata) + pb_request = compute.InsertReservationRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + including_default_value_fields=False, + use_integers_for_enums=False + ) + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.Operation() + pb_resp = compute.Operation.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_insert(resp) + return resp + + class _List(ReservationsRestStub): + def __hash__(self): + return hash("List") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.ListReservationsRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.ReservationList: + r"""Call the list method over HTTP. + + Args: + request (~.compute.ListReservationsRequest): + The request object. A request message for + Reservations.List. See the method + description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.ReservationList: + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/compute/v1/projects/{project}/zones/{zone}/reservations', + }, + ] + request, metadata = self._interceptor.pre_list(request, metadata) + pb_request = compute.ListReservationsRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.ReservationList() + pb_resp = compute.ReservationList.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_list(resp) + return resp + + class _Resize(ReservationsRestStub): + def __hash__(self): + return hash("Resize") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.ResizeReservationRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.Operation: + r"""Call the resize method over HTTP. + + Args: + request (~.compute.ResizeReservationRequest): + The request object. A request message for + Reservations.Resize. See the method + description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.Operation: + Represents an Operation resource. Google Compute Engine + has three Operation resources: \* + `Global `__ + \* + `Regional `__ + \* + `Zonal `__ + You can use an operation resource to manage asynchronous + API requests. For more information, read Handling API + responses. Operations can be global, regional or zonal. + - For global operations, use the ``globalOperations`` + resource. - For regional operations, use the + ``regionOperations`` resource. - For zonal operations, + use the ``zonalOperations`` resource. For more + information, read Global, Regional, and Zonal Resources. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/compute/v1/projects/{project}/zones/{zone}/reservations/{reservation}/resize', + 'body': 'reservations_resize_request_resource', + }, + ] + request, metadata = self._interceptor.pre_resize(request, metadata) + pb_request = compute.ResizeReservationRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + including_default_value_fields=False, + use_integers_for_enums=False + ) + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.Operation() + pb_resp = compute.Operation.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_resize(resp) + return resp + + class _SetIamPolicy(ReservationsRestStub): + def __hash__(self): + return hash("SetIamPolicy") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.SetIamPolicyReservationRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.Policy: + r"""Call the set iam policy method over HTTP. + + Args: + request (~.compute.SetIamPolicyReservationRequest): + The request object. A request message for + Reservations.SetIamPolicy. See the + method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.Policy: + An Identity and Access Management (IAM) policy, which + specifies access controls for Google Cloud resources. A + ``Policy`` is a collection of ``bindings``. A + ``binding`` binds one or more ``members``, or + principals, to a single ``role``. Principals can be user + accounts, service accounts, Google groups, and domains + (such as G Suite). A ``role`` is a named list of + permissions; each ``role`` can be an IAM predefined role + or a user-created custom role. For some types of Google + Cloud resources, a ``binding`` can also specify a + ``condition``, which is a logical expression that allows + access to a resource only if the expression evaluates to + ``true``. A condition can add constraints based on + attributes of the request, the resource, or both. To + learn which resources support conditions in their IAM + policies, see the `IAM + documentation `__. + **JSON example:** { "bindings": [ { "role": + "roles/resourcemanager.organizationAdmin", "members": [ + "user:mike@example.com", "group:admins@example.com", + "domain:google.com", + "serviceAccount:my-project-id@appspot.gserviceaccount.com" + ] }, { "role": + "roles/resourcemanager.organizationViewer", "members": [ + "user:eve@example.com" ], "condition": { "title": + "expirable access", "description": "Does not grant + access after Sep 2020", "expression": "request.time < + timestamp('2020-10-01T00:00:00.000Z')", } } ], "etag": + "BwWWja0YfJA=", "version": 3 } **YAML example:** + bindings: - members: - user:mike@example.com - + group:admins@example.com - domain:google.com - + serviceAccount:my-project-id@appspot.gserviceaccount.com + role: roles/resourcemanager.organizationAdmin - members: + - user:eve@example.com role: + roles/resourcemanager.organizationViewer condition: + title: expirable access description: Does not grant + access after Sep 2020 expression: request.time < + timestamp('2020-10-01T00:00:00.000Z') etag: BwWWja0YfJA= + version: 3 For a description of IAM and its features, + see the `IAM + documentation `__. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/compute/v1/projects/{project}/zones/{zone}/reservations/{resource}/setIamPolicy', + 'body': 'zone_set_policy_request_resource', + }, + ] + request, metadata = self._interceptor.pre_set_iam_policy(request, metadata) + pb_request = compute.SetIamPolicyReservationRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + including_default_value_fields=False, + use_integers_for_enums=False + ) + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.Policy() + pb_resp = compute.Policy.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_set_iam_policy(resp) + return resp + + class _TestIamPermissions(ReservationsRestStub): + def __hash__(self): + return hash("TestIamPermissions") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.TestIamPermissionsReservationRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.TestPermissionsResponse: + r"""Call the test iam permissions method over HTTP. + + Args: + request (~.compute.TestIamPermissionsReservationRequest): + The request object. A request message for + Reservations.TestIamPermissions. See the + method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.TestPermissionsResponse: + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/compute/v1/projects/{project}/zones/{zone}/reservations/{resource}/testIamPermissions', + 'body': 'test_permissions_request_resource', + }, + ] + request, metadata = self._interceptor.pre_test_iam_permissions(request, metadata) + pb_request = compute.TestIamPermissionsReservationRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + including_default_value_fields=False, + use_integers_for_enums=False + ) + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.TestPermissionsResponse() + pb_resp = compute.TestPermissionsResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_test_iam_permissions(resp) + return resp + + class _Update(ReservationsRestStub): + def __hash__(self): + return hash("Update") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.UpdateReservationRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.Operation: + r"""Call the update method over HTTP. + + Args: + request (~.compute.UpdateReservationRequest): + The request object. A request message for + Reservations.Update. See the method + description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.Operation: + Represents an Operation resource. Google Compute Engine + has three Operation resources: \* + `Global `__ + \* + `Regional `__ + \* + `Zonal `__ + You can use an operation resource to manage asynchronous + API requests. For more information, read Handling API + responses. Operations can be global, regional or zonal. + - For global operations, use the ``globalOperations`` + resource. - For regional operations, use the + ``regionOperations`` resource. - For zonal operations, + use the ``zonalOperations`` resource. For more + information, read Global, Regional, and Zonal Resources. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'patch', + 'uri': '/compute/v1/projects/{project}/zones/{zone}/reservations/{reservation}', + 'body': 'reservation_resource', + }, + ] + request, metadata = self._interceptor.pre_update(request, metadata) + pb_request = compute.UpdateReservationRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + including_default_value_fields=False, + use_integers_for_enums=False + ) + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.Operation() + pb_resp = compute.Operation.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_update(resp) + return resp + + @property + def aggregated_list(self) -> Callable[ + [compute.AggregatedListReservationsRequest], + compute.ReservationAggregatedList]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._AggregatedList(self._session, self._host, self._interceptor) # type: ignore + + @property + def delete(self) -> Callable[ + [compute.DeleteReservationRequest], + compute.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._Delete(self._session, self._host, self._interceptor) # type: ignore + + @property + def get(self) -> Callable[ + [compute.GetReservationRequest], + compute.Reservation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._Get(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_iam_policy(self) -> Callable[ + [compute.GetIamPolicyReservationRequest], + compute.Policy]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetIamPolicy(self._session, self._host, self._interceptor) # type: ignore + + @property + def insert(self) -> Callable[ + [compute.InsertReservationRequest], + compute.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._Insert(self._session, self._host, self._interceptor) # type: ignore + + @property + def list(self) -> Callable[ + [compute.ListReservationsRequest], + compute.ReservationList]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._List(self._session, self._host, self._interceptor) # type: ignore + + @property + def resize(self) -> Callable[ + [compute.ResizeReservationRequest], + compute.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._Resize(self._session, self._host, self._interceptor) # type: ignore + + @property + def set_iam_policy(self) -> Callable[ + [compute.SetIamPolicyReservationRequest], + compute.Policy]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._SetIamPolicy(self._session, self._host, self._interceptor) # type: ignore + + @property + def test_iam_permissions(self) -> Callable[ + [compute.TestIamPermissionsReservationRequest], + compute.TestPermissionsResponse]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._TestIamPermissions(self._session, self._host, self._interceptor) # type: ignore + + @property + def update(self) -> Callable[ + [compute.UpdateReservationRequest], + compute.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._Update(self._session, self._host, self._interceptor) # type: ignore + + @property + def kind(self) -> str: + return "rest" + + def close(self): + self._session.close() + + +__all__=( + 'ReservationsRestTransport', +) diff --git a/owl-bot-staging/v1/google/cloud/compute_v1/services/resource_policies/__init__.py b/owl-bot-staging/v1/google/cloud/compute_v1/services/resource_policies/__init__.py new file mode 100644 index 000000000..871d3f3a0 --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/compute_v1/services/resource_policies/__init__.py @@ -0,0 +1,20 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from .client import ResourcePoliciesClient + +__all__ = ( + 'ResourcePoliciesClient', +) diff --git a/owl-bot-staging/v1/google/cloud/compute_v1/services/resource_policies/client.py b/owl-bot-staging/v1/google/cloud/compute_v1/services/resource_policies/client.py new file mode 100644 index 000000000..bf15fd9ad --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/compute_v1/services/resource_policies/client.py @@ -0,0 +1,2032 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import functools +import os +import re +from typing import Dict, Mapping, MutableMapping, MutableSequence, Optional, Sequence, Tuple, Type, Union, cast + +from google.cloud.compute_v1 import gapic_version as package_version + +from google.api_core import client_options as client_options_lib +from google.api_core import exceptions as core_exceptions +from google.api_core import extended_operation +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport import mtls # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth.exceptions import MutualTLSChannelError # type: ignore +from google.oauth2 import service_account # type: ignore + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + +from google.api_core import extended_operation # type: ignore +from google.cloud.compute_v1.services.resource_policies import pagers +from google.cloud.compute_v1.types import compute +from .transports.base import ResourcePoliciesTransport, DEFAULT_CLIENT_INFO +from .transports.rest import ResourcePoliciesRestTransport + + +class ResourcePoliciesClientMeta(type): + """Metaclass for the ResourcePolicies client. + + This provides class-level methods for building and retrieving + support objects (e.g. transport) without polluting the client instance + objects. + """ + _transport_registry = OrderedDict() # type: Dict[str, Type[ResourcePoliciesTransport]] + _transport_registry["rest"] = ResourcePoliciesRestTransport + + def get_transport_class(cls, + label: Optional[str] = None, + ) -> Type[ResourcePoliciesTransport]: + """Returns an appropriate transport class. + + Args: + label: The name of the desired transport. If none is + provided, then the first transport in the registry is used. + + Returns: + The transport class to use. + """ + # If a specific transport is requested, return that one. + if label: + return cls._transport_registry[label] + + # No transport is requested; return the default (that is, the first one + # in the dictionary). + return next(iter(cls._transport_registry.values())) + + +class ResourcePoliciesClient(metaclass=ResourcePoliciesClientMeta): + """The ResourcePolicies API.""" + + @staticmethod + def _get_default_mtls_endpoint(api_endpoint): + """Converts api endpoint to mTLS endpoint. + + Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to + "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. + Args: + api_endpoint (Optional[str]): the api endpoint to convert. + Returns: + str: converted mTLS api endpoint. + """ + if not api_endpoint: + return api_endpoint + + mtls_endpoint_re = re.compile( + r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?" + ) + + m = mtls_endpoint_re.match(api_endpoint) + name, mtls, sandbox, googledomain = m.groups() + if mtls or not googledomain: + return api_endpoint + + if sandbox: + return api_endpoint.replace( + "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" + ) + + return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") + + DEFAULT_ENDPOINT = "compute.googleapis.com" + DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore + DEFAULT_ENDPOINT + ) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + ResourcePoliciesClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_info(info) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + ResourcePoliciesClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_file( + filename) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + from_service_account_json = from_service_account_file + + @property + def transport(self) -> ResourcePoliciesTransport: + """Returns the transport used by the client instance. + + Returns: + ResourcePoliciesTransport: The transport used by the client + instance. + """ + return self._transport + + @staticmethod + def common_billing_account_path(billing_account: str, ) -> str: + """Returns a fully-qualified billing_account string.""" + return "billingAccounts/{billing_account}".format(billing_account=billing_account, ) + + @staticmethod + def parse_common_billing_account_path(path: str) -> Dict[str,str]: + """Parse a billing_account path into its component segments.""" + m = re.match(r"^billingAccounts/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_folder_path(folder: str, ) -> str: + """Returns a fully-qualified folder string.""" + return "folders/{folder}".format(folder=folder, ) + + @staticmethod + def parse_common_folder_path(path: str) -> Dict[str,str]: + """Parse a folder path into its component segments.""" + m = re.match(r"^folders/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_organization_path(organization: str, ) -> str: + """Returns a fully-qualified organization string.""" + return "organizations/{organization}".format(organization=organization, ) + + @staticmethod + def parse_common_organization_path(path: str) -> Dict[str,str]: + """Parse a organization path into its component segments.""" + m = re.match(r"^organizations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_project_path(project: str, ) -> str: + """Returns a fully-qualified project string.""" + return "projects/{project}".format(project=project, ) + + @staticmethod + def parse_common_project_path(path: str) -> Dict[str,str]: + """Parse a project path into its component segments.""" + m = re.match(r"^projects/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_location_path(project: str, location: str, ) -> str: + """Returns a fully-qualified location string.""" + return "projects/{project}/locations/{location}".format(project=project, location=location, ) + + @staticmethod + def parse_common_location_path(path: str) -> Dict[str,str]: + """Parse a location path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @classmethod + def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[client_options_lib.ClientOptions] = None): + """Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + if client_options is None: + client_options = client_options_lib.ClientOptions() + use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") + use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") + if use_client_cert not in ("true", "false"): + raise ValueError("Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`") + if use_mtls_endpoint not in ("auto", "never", "always"): + raise MutualTLSChannelError("Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`") + + # Figure out the client cert source to use. + client_cert_source = None + if use_client_cert == "true": + if client_options.client_cert_source: + client_cert_source = client_options.client_cert_source + elif mtls.has_default_client_cert_source(): + client_cert_source = mtls.default_client_cert_source() + + # Figure out which api endpoint to use. + if client_options.api_endpoint is not None: + api_endpoint = client_options.api_endpoint + elif use_mtls_endpoint == "always" or (use_mtls_endpoint == "auto" and client_cert_source): + api_endpoint = cls.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = cls.DEFAULT_ENDPOINT + + return api_endpoint, client_cert_source + + def __init__(self, *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Optional[Union[str, ResourcePoliciesTransport]] = None, + client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the resource policies client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Union[str, ResourcePoliciesTransport]): The + transport to use. If set to None, a transport is chosen + automatically. + NOTE: "rest" transport functionality is currently in a + beta state (preview). We welcome your feedback via an + issue in this library's source repository. + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): Custom options for the + client. It won't take effect if a ``transport`` instance is provided. + (1) The ``api_endpoint`` property can be used to override the + default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT + environment variable can also be used to override the endpoint: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto switch to the + default mTLS endpoint if client certificate is present, this is + the default value). However, the ``api_endpoint`` property takes + precedence if provided. + (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide client certificate for mutual TLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + """ + if isinstance(client_options, dict): + client_options = client_options_lib.from_dict(client_options) + if client_options is None: + client_options = client_options_lib.ClientOptions() + client_options = cast(client_options_lib.ClientOptions, client_options) + + api_endpoint, client_cert_source_func = self.get_mtls_endpoint_and_cert_source(client_options) + + api_key_value = getattr(client_options, "api_key", None) + if api_key_value and credentials: + raise ValueError("client_options.api_key and credentials are mutually exclusive") + + # Save or instantiate the transport. + # Ordinarily, we provide the transport, but allowing a custom transport + # instance provides an extensibility point for unusual situations. + if isinstance(transport, ResourcePoliciesTransport): + # transport is a ResourcePoliciesTransport instance. + if credentials or client_options.credentials_file or api_key_value: + raise ValueError("When providing a transport instance, " + "provide its credentials directly.") + if client_options.scopes: + raise ValueError( + "When providing a transport instance, provide its scopes " + "directly." + ) + self._transport = transport + else: + import google.auth._default # type: ignore + + if api_key_value and hasattr(google.auth._default, "get_api_key_credentials"): + credentials = google.auth._default.get_api_key_credentials(api_key_value) + + Transport = type(self).get_transport_class(transport) + self._transport = Transport( + credentials=credentials, + credentials_file=client_options.credentials_file, + host=api_endpoint, + scopes=client_options.scopes, + client_cert_source_for_mtls=client_cert_source_func, + quota_project_id=client_options.quota_project_id, + client_info=client_info, + always_use_jwt_access=True, + api_audience=client_options.api_audience, + ) + + def aggregated_list(self, + request: Optional[Union[compute.AggregatedListResourcePoliciesRequest, dict]] = None, + *, + project: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.AggregatedListPager: + r"""Retrieves an aggregated list of resource policies. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_aggregated_list(): + # Create a client + client = compute_v1.ResourcePoliciesClient() + + # Initialize request argument(s) + request = compute_v1.AggregatedListResourcePoliciesRequest( + project="project_value", + ) + + # Make the request + page_result = client.aggregated_list(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.AggregatedListResourcePoliciesRequest, dict]): + The request object. A request message for + ResourcePolicies.AggregatedList. See the + method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.compute_v1.services.resource_policies.pagers.AggregatedListPager: + Contains a list of resourcePolicies. + + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.AggregatedListResourcePoliciesRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.AggregatedListResourcePoliciesRequest): + request = compute.AggregatedListResourcePoliciesRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.aggregated_list] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.AggregatedListPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + def delete_unary(self, + request: Optional[Union[compute.DeleteResourcePolicyRequest, dict]] = None, + *, + project: Optional[str] = None, + region: Optional[str] = None, + resource_policy: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.Operation: + r"""Deletes the specified resource policy. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_delete(): + # Create a client + client = compute_v1.ResourcePoliciesClient() + + # Initialize request argument(s) + request = compute_v1.DeleteResourcePolicyRequest( + project="project_value", + region="region_value", + resource_policy="resource_policy_value", + ) + + # Make the request + response = client.delete(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.DeleteResourcePolicyRequest, dict]): + The request object. A request message for + ResourcePolicies.Delete. See the method + description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + region (str): + Name of the region for this request. + This corresponds to the ``region`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + resource_policy (str): + Name of the resource policy to + delete. + + This corresponds to the ``resource_policy`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, region, resource_policy]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.DeleteResourcePolicyRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.DeleteResourcePolicyRequest): + request = compute.DeleteResourcePolicyRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if region is not None: + request.region = region + if resource_policy is not None: + request.resource_policy = resource_policy + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("region", request.region), + ("resource_policy", request.resource_policy), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def delete(self, + request: Optional[Union[compute.DeleteResourcePolicyRequest, dict]] = None, + *, + project: Optional[str] = None, + region: Optional[str] = None, + resource_policy: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> extended_operation.ExtendedOperation: + r"""Deletes the specified resource policy. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_delete(): + # Create a client + client = compute_v1.ResourcePoliciesClient() + + # Initialize request argument(s) + request = compute_v1.DeleteResourcePolicyRequest( + project="project_value", + region="region_value", + resource_policy="resource_policy_value", + ) + + # Make the request + response = client.delete(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.DeleteResourcePolicyRequest, dict]): + The request object. A request message for + ResourcePolicies.Delete. See the method + description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + region (str): + Name of the region for this request. + This corresponds to the ``region`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + resource_policy (str): + Name of the resource policy to + delete. + + This corresponds to the ``resource_policy`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, region, resource_policy]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.DeleteResourcePolicyRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.DeleteResourcePolicyRequest): + request = compute.DeleteResourcePolicyRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if region is not None: + request.region = region + if resource_policy is not None: + request.resource_policy = resource_policy + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("region", request.region), + ("resource_policy", request.resource_policy), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + operation_service = self._transport._region_operations_client + operation_request = compute.GetRegionOperationRequest() + operation_request.project = request.project + operation_request.region = request.region + operation_request.operation = response.name + + get_operation = functools.partial(operation_service.get, operation_request) + # Cancel is not part of extended operations yet. + cancel_operation = lambda: None + + # Note: this class is an implementation detail to provide a uniform + # set of names for certain fields in the extended operation proto message. + # See google.api_core.extended_operation.ExtendedOperation for details + # on these properties and the expected interface. + class _CustomOperation(extended_operation.ExtendedOperation): + @property + def error_message(self): + return self._extended_operation.http_error_message + + @property + def error_code(self): + return self._extended_operation.http_error_status_code + + response = _CustomOperation.make(get_operation, cancel_operation, response) + + # Done; return the response. + return response + + def get(self, + request: Optional[Union[compute.GetResourcePolicyRequest, dict]] = None, + *, + project: Optional[str] = None, + region: Optional[str] = None, + resource_policy: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.ResourcePolicy: + r"""Retrieves all information of the specified resource + policy. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_get(): + # Create a client + client = compute_v1.ResourcePoliciesClient() + + # Initialize request argument(s) + request = compute_v1.GetResourcePolicyRequest( + project="project_value", + region="region_value", + resource_policy="resource_policy_value", + ) + + # Make the request + response = client.get(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.GetResourcePolicyRequest, dict]): + The request object. A request message for + ResourcePolicies.Get. See the method + description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + region (str): + Name of the region for this request. + This corresponds to the ``region`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + resource_policy (str): + Name of the resource policy to + retrieve. + + This corresponds to the ``resource_policy`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.compute_v1.types.ResourcePolicy: + Represents a Resource Policy + resource. You can use resource policies + to schedule actions for some Compute + Engine resources. For example, you can + use them to schedule persistent disk + snapshots. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, region, resource_policy]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.GetResourcePolicyRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.GetResourcePolicyRequest): + request = compute.GetResourcePolicyRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if region is not None: + request.region = region + if resource_policy is not None: + request.resource_policy = resource_policy + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("region", request.region), + ("resource_policy", request.resource_policy), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get_iam_policy(self, + request: Optional[Union[compute.GetIamPolicyResourcePolicyRequest, dict]] = None, + *, + project: Optional[str] = None, + region: Optional[str] = None, + resource: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.Policy: + r"""Gets the access control policy for a resource. May be + empty if no such policy or resource exists. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_get_iam_policy(): + # Create a client + client = compute_v1.ResourcePoliciesClient() + + # Initialize request argument(s) + request = compute_v1.GetIamPolicyResourcePolicyRequest( + project="project_value", + region="region_value", + resource="resource_value", + ) + + # Make the request + response = client.get_iam_policy(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.GetIamPolicyResourcePolicyRequest, dict]): + The request object. A request message for + ResourcePolicies.GetIamPolicy. See the + method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + region (str): + The name of the region for this + request. + + This corresponds to the ``region`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + resource (str): + Name or id of the resource for this + request. + + This corresponds to the ``resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.compute_v1.types.Policy: + An Identity and Access Management (IAM) policy, which + specifies access controls for Google Cloud resources. A + Policy is a collection of bindings. A binding binds one + or more members, or principals, to a single role. + Principals can be user accounts, service accounts, + Google groups, and domains (such as G Suite). A role is + a named list of permissions; each role can be an IAM + predefined role or a user-created custom role. For some + types of Google Cloud resources, a binding can also + specify a condition, which is a logical expression that + allows access to a resource only if the expression + evaluates to true. A condition can add constraints based + on attributes of the request, the resource, or both. To + learn which resources support conditions in their IAM + policies, see the [IAM + documentation](\ https://cloud.google.com/iam/help/conditions/resource-policies). + **JSON example:** { "bindings": [ { "role": + "roles/resourcemanager.organizationAdmin", "members": [ + "user:mike@example.com", "group:admins@example.com", + "domain:google.com", + "serviceAccount:my-project-id@appspot.gserviceaccount.com" + ] }, { "role": + "roles/resourcemanager.organizationViewer", "members": [ + "user:eve@example.com" ], "condition": { "title": + "expirable access", "description": "Does not grant + access after Sep 2020", "expression": "request.time < + timestamp('2020-10-01T00:00:00.000Z')", } } ], "etag": + "BwWWja0YfJA=", "version": 3 } **YAML example:** + bindings: - members: - user:\ mike@example.com - + group:\ admins@example.com - domain:google.com - + serviceAccount:\ my-project-id@appspot.gserviceaccount.com + role: roles/resourcemanager.organizationAdmin - members: + - user:\ eve@example.com role: + roles/resourcemanager.organizationViewer condition: + title: expirable access description: Does not grant + access after Sep 2020 expression: request.time < + timestamp('2020-10-01T00:00:00.000Z') etag: BwWWja0YfJA= + version: 3 For a description of IAM and its features, + see the [IAM + documentation](\ https://cloud.google.com/iam/docs/). + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, region, resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.GetIamPolicyResourcePolicyRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.GetIamPolicyResourcePolicyRequest): + request = compute.GetIamPolicyResourcePolicyRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if region is not None: + request.region = region + if resource is not None: + request.resource = resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_iam_policy] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("region", request.region), + ("resource", request.resource), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def insert_unary(self, + request: Optional[Union[compute.InsertResourcePolicyRequest, dict]] = None, + *, + project: Optional[str] = None, + region: Optional[str] = None, + resource_policy_resource: Optional[compute.ResourcePolicy] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.Operation: + r"""Creates a new resource policy. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_insert(): + # Create a client + client = compute_v1.ResourcePoliciesClient() + + # Initialize request argument(s) + request = compute_v1.InsertResourcePolicyRequest( + project="project_value", + region="region_value", + ) + + # Make the request + response = client.insert(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.InsertResourcePolicyRequest, dict]): + The request object. A request message for + ResourcePolicies.Insert. See the method + description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + region (str): + Name of the region for this request. + This corresponds to the ``region`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + resource_policy_resource (google.cloud.compute_v1.types.ResourcePolicy): + The body resource for this request + This corresponds to the ``resource_policy_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, region, resource_policy_resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.InsertResourcePolicyRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.InsertResourcePolicyRequest): + request = compute.InsertResourcePolicyRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if region is not None: + request.region = region + if resource_policy_resource is not None: + request.resource_policy_resource = resource_policy_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.insert] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("region", request.region), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def insert(self, + request: Optional[Union[compute.InsertResourcePolicyRequest, dict]] = None, + *, + project: Optional[str] = None, + region: Optional[str] = None, + resource_policy_resource: Optional[compute.ResourcePolicy] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> extended_operation.ExtendedOperation: + r"""Creates a new resource policy. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_insert(): + # Create a client + client = compute_v1.ResourcePoliciesClient() + + # Initialize request argument(s) + request = compute_v1.InsertResourcePolicyRequest( + project="project_value", + region="region_value", + ) + + # Make the request + response = client.insert(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.InsertResourcePolicyRequest, dict]): + The request object. A request message for + ResourcePolicies.Insert. See the method + description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + region (str): + Name of the region for this request. + This corresponds to the ``region`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + resource_policy_resource (google.cloud.compute_v1.types.ResourcePolicy): + The body resource for this request + This corresponds to the ``resource_policy_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, region, resource_policy_resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.InsertResourcePolicyRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.InsertResourcePolicyRequest): + request = compute.InsertResourcePolicyRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if region is not None: + request.region = region + if resource_policy_resource is not None: + request.resource_policy_resource = resource_policy_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.insert] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("region", request.region), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + operation_service = self._transport._region_operations_client + operation_request = compute.GetRegionOperationRequest() + operation_request.project = request.project + operation_request.region = request.region + operation_request.operation = response.name + + get_operation = functools.partial(operation_service.get, operation_request) + # Cancel is not part of extended operations yet. + cancel_operation = lambda: None + + # Note: this class is an implementation detail to provide a uniform + # set of names for certain fields in the extended operation proto message. + # See google.api_core.extended_operation.ExtendedOperation for details + # on these properties and the expected interface. + class _CustomOperation(extended_operation.ExtendedOperation): + @property + def error_message(self): + return self._extended_operation.http_error_message + + @property + def error_code(self): + return self._extended_operation.http_error_status_code + + response = _CustomOperation.make(get_operation, cancel_operation, response) + + # Done; return the response. + return response + + def list(self, + request: Optional[Union[compute.ListResourcePoliciesRequest, dict]] = None, + *, + project: Optional[str] = None, + region: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListPager: + r"""A list all the resource policies that have been + configured for the specified project in specified + region. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_list(): + # Create a client + client = compute_v1.ResourcePoliciesClient() + + # Initialize request argument(s) + request = compute_v1.ListResourcePoliciesRequest( + project="project_value", + region="region_value", + ) + + # Make the request + page_result = client.list(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.ListResourcePoliciesRequest, dict]): + The request object. A request message for + ResourcePolicies.List. See the method + description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + region (str): + Name of the region for this request. + This corresponds to the ``region`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.compute_v1.services.resource_policies.pagers.ListPager: + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, region]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.ListResourcePoliciesRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.ListResourcePoliciesRequest): + request = compute.ListResourcePoliciesRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if region is not None: + request.region = region + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("region", request.region), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + def patch_unary(self, + request: Optional[Union[compute.PatchResourcePolicyRequest, dict]] = None, + *, + project: Optional[str] = None, + region: Optional[str] = None, + resource_policy: Optional[str] = None, + resource_policy_resource: Optional[compute.ResourcePolicy] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.Operation: + r"""Modify the specified resource policy. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_patch(): + # Create a client + client = compute_v1.ResourcePoliciesClient() + + # Initialize request argument(s) + request = compute_v1.PatchResourcePolicyRequest( + project="project_value", + region="region_value", + resource_policy="resource_policy_value", + ) + + # Make the request + response = client.patch(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.PatchResourcePolicyRequest, dict]): + The request object. A request message for + ResourcePolicies.Patch. See the method + description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + region (str): + Name of the region for this request. + This corresponds to the ``region`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + resource_policy (str): + Id of the resource policy to patch. + This corresponds to the ``resource_policy`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + resource_policy_resource (google.cloud.compute_v1.types.ResourcePolicy): + The body resource for this request + This corresponds to the ``resource_policy_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, region, resource_policy, resource_policy_resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.PatchResourcePolicyRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.PatchResourcePolicyRequest): + request = compute.PatchResourcePolicyRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if region is not None: + request.region = region + if resource_policy is not None: + request.resource_policy = resource_policy + if resource_policy_resource is not None: + request.resource_policy_resource = resource_policy_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.patch] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("region", request.region), + ("resource_policy", request.resource_policy), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def patch(self, + request: Optional[Union[compute.PatchResourcePolicyRequest, dict]] = None, + *, + project: Optional[str] = None, + region: Optional[str] = None, + resource_policy: Optional[str] = None, + resource_policy_resource: Optional[compute.ResourcePolicy] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> extended_operation.ExtendedOperation: + r"""Modify the specified resource policy. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_patch(): + # Create a client + client = compute_v1.ResourcePoliciesClient() + + # Initialize request argument(s) + request = compute_v1.PatchResourcePolicyRequest( + project="project_value", + region="region_value", + resource_policy="resource_policy_value", + ) + + # Make the request + response = client.patch(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.PatchResourcePolicyRequest, dict]): + The request object. A request message for + ResourcePolicies.Patch. See the method + description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + region (str): + Name of the region for this request. + This corresponds to the ``region`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + resource_policy (str): + Id of the resource policy to patch. + This corresponds to the ``resource_policy`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + resource_policy_resource (google.cloud.compute_v1.types.ResourcePolicy): + The body resource for this request + This corresponds to the ``resource_policy_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, region, resource_policy, resource_policy_resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.PatchResourcePolicyRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.PatchResourcePolicyRequest): + request = compute.PatchResourcePolicyRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if region is not None: + request.region = region + if resource_policy is not None: + request.resource_policy = resource_policy + if resource_policy_resource is not None: + request.resource_policy_resource = resource_policy_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.patch] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("region", request.region), + ("resource_policy", request.resource_policy), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + operation_service = self._transport._region_operations_client + operation_request = compute.GetRegionOperationRequest() + operation_request.project = request.project + operation_request.region = request.region + operation_request.operation = response.name + + get_operation = functools.partial(operation_service.get, operation_request) + # Cancel is not part of extended operations yet. + cancel_operation = lambda: None + + # Note: this class is an implementation detail to provide a uniform + # set of names for certain fields in the extended operation proto message. + # See google.api_core.extended_operation.ExtendedOperation for details + # on these properties and the expected interface. + class _CustomOperation(extended_operation.ExtendedOperation): + @property + def error_message(self): + return self._extended_operation.http_error_message + + @property + def error_code(self): + return self._extended_operation.http_error_status_code + + response = _CustomOperation.make(get_operation, cancel_operation, response) + + # Done; return the response. + return response + + def set_iam_policy(self, + request: Optional[Union[compute.SetIamPolicyResourcePolicyRequest, dict]] = None, + *, + project: Optional[str] = None, + region: Optional[str] = None, + resource: Optional[str] = None, + region_set_policy_request_resource: Optional[compute.RegionSetPolicyRequest] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.Policy: + r"""Sets the access control policy on the specified + resource. Replaces any existing policy. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_set_iam_policy(): + # Create a client + client = compute_v1.ResourcePoliciesClient() + + # Initialize request argument(s) + request = compute_v1.SetIamPolicyResourcePolicyRequest( + project="project_value", + region="region_value", + resource="resource_value", + ) + + # Make the request + response = client.set_iam_policy(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.SetIamPolicyResourcePolicyRequest, dict]): + The request object. A request message for + ResourcePolicies.SetIamPolicy. See the + method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + region (str): + The name of the region for this + request. + + This corresponds to the ``region`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + resource (str): + Name or id of the resource for this + request. + + This corresponds to the ``resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + region_set_policy_request_resource (google.cloud.compute_v1.types.RegionSetPolicyRequest): + The body resource for this request + This corresponds to the ``region_set_policy_request_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.compute_v1.types.Policy: + An Identity and Access Management (IAM) policy, which + specifies access controls for Google Cloud resources. A + Policy is a collection of bindings. A binding binds one + or more members, or principals, to a single role. + Principals can be user accounts, service accounts, + Google groups, and domains (such as G Suite). A role is + a named list of permissions; each role can be an IAM + predefined role or a user-created custom role. For some + types of Google Cloud resources, a binding can also + specify a condition, which is a logical expression that + allows access to a resource only if the expression + evaluates to true. A condition can add constraints based + on attributes of the request, the resource, or both. To + learn which resources support conditions in their IAM + policies, see the [IAM + documentation](\ https://cloud.google.com/iam/help/conditions/resource-policies). + **JSON example:** { "bindings": [ { "role": + "roles/resourcemanager.organizationAdmin", "members": [ + "user:mike@example.com", "group:admins@example.com", + "domain:google.com", + "serviceAccount:my-project-id@appspot.gserviceaccount.com" + ] }, { "role": + "roles/resourcemanager.organizationViewer", "members": [ + "user:eve@example.com" ], "condition": { "title": + "expirable access", "description": "Does not grant + access after Sep 2020", "expression": "request.time < + timestamp('2020-10-01T00:00:00.000Z')", } } ], "etag": + "BwWWja0YfJA=", "version": 3 } **YAML example:** + bindings: - members: - user:\ mike@example.com - + group:\ admins@example.com - domain:google.com - + serviceAccount:\ my-project-id@appspot.gserviceaccount.com + role: roles/resourcemanager.organizationAdmin - members: + - user:\ eve@example.com role: + roles/resourcemanager.organizationViewer condition: + title: expirable access description: Does not grant + access after Sep 2020 expression: request.time < + timestamp('2020-10-01T00:00:00.000Z') etag: BwWWja0YfJA= + version: 3 For a description of IAM and its features, + see the [IAM + documentation](\ https://cloud.google.com/iam/docs/). + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, region, resource, region_set_policy_request_resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.SetIamPolicyResourcePolicyRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.SetIamPolicyResourcePolicyRequest): + request = compute.SetIamPolicyResourcePolicyRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if region is not None: + request.region = region + if resource is not None: + request.resource = resource + if region_set_policy_request_resource is not None: + request.region_set_policy_request_resource = region_set_policy_request_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.set_iam_policy] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("region", request.region), + ("resource", request.resource), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def test_iam_permissions(self, + request: Optional[Union[compute.TestIamPermissionsResourcePolicyRequest, dict]] = None, + *, + project: Optional[str] = None, + region: Optional[str] = None, + resource: Optional[str] = None, + test_permissions_request_resource: Optional[compute.TestPermissionsRequest] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.TestPermissionsResponse: + r"""Returns permissions that a caller has on the + specified resource. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_test_iam_permissions(): + # Create a client + client = compute_v1.ResourcePoliciesClient() + + # Initialize request argument(s) + request = compute_v1.TestIamPermissionsResourcePolicyRequest( + project="project_value", + region="region_value", + resource="resource_value", + ) + + # Make the request + response = client.test_iam_permissions(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.TestIamPermissionsResourcePolicyRequest, dict]): + The request object. A request message for + ResourcePolicies.TestIamPermissions. See + the method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + region (str): + The name of the region for this + request. + + This corresponds to the ``region`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + resource (str): + Name or id of the resource for this + request. + + This corresponds to the ``resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + test_permissions_request_resource (google.cloud.compute_v1.types.TestPermissionsRequest): + The body resource for this request + This corresponds to the ``test_permissions_request_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.compute_v1.types.TestPermissionsResponse: + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, region, resource, test_permissions_request_resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.TestIamPermissionsResourcePolicyRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.TestIamPermissionsResourcePolicyRequest): + request = compute.TestIamPermissionsResourcePolicyRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if region is not None: + request.region = region + if resource is not None: + request.resource = resource + if test_permissions_request_resource is not None: + request.test_permissions_request_resource = test_permissions_request_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.test_iam_permissions] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("region", request.region), + ("resource", request.resource), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def __enter__(self) -> "ResourcePoliciesClient": + return self + + def __exit__(self, type, value, traceback): + """Releases underlying transport's resources. + + .. warning:: + ONLY use as a context manager if the transport is NOT shared + with other clients! Exiting the with block will CLOSE the transport + and may cause errors in other clients! + """ + self.transport.close() + + + + + + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) + + +__all__ = ( + "ResourcePoliciesClient", +) diff --git a/owl-bot-staging/v1/google/cloud/compute_v1/services/resource_policies/pagers.py b/owl-bot-staging/v1/google/cloud/compute_v1/services/resource_policies/pagers.py new file mode 100644 index 000000000..7a95a0dd0 --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/compute_v1/services/resource_policies/pagers.py @@ -0,0 +1,139 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import Any, AsyncIterator, Awaitable, Callable, Sequence, Tuple, Optional, Iterator + +from google.cloud.compute_v1.types import compute + + +class AggregatedListPager: + """A pager for iterating through ``aggregated_list`` requests. + + This class thinly wraps an initial + :class:`google.cloud.compute_v1.types.ResourcePolicyAggregatedList` object, and + provides an ``__iter__`` method to iterate through its + ``items`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``AggregatedList`` requests and continue to iterate + through the ``items`` field on the + corresponding responses. + + All the usual :class:`google.cloud.compute_v1.types.ResourcePolicyAggregatedList` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., compute.ResourcePolicyAggregatedList], + request: compute.AggregatedListResourcePoliciesRequest, + response: compute.ResourcePolicyAggregatedList, + *, + metadata: Sequence[Tuple[str, str]] = ()): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.compute_v1.types.AggregatedListResourcePoliciesRequest): + The initial request object. + response (google.cloud.compute_v1.types.ResourcePolicyAggregatedList): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = compute.AggregatedListResourcePoliciesRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[compute.ResourcePolicyAggregatedList]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[Tuple[str, compute.ResourcePoliciesScopedList]]: + for page in self.pages: + yield from page.items.items() + + def get(self, key: str) -> Optional[compute.ResourcePoliciesScopedList]: + return self._response.items.get(key) + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + + +class ListPager: + """A pager for iterating through ``list`` requests. + + This class thinly wraps an initial + :class:`google.cloud.compute_v1.types.ResourcePolicyList` object, and + provides an ``__iter__`` method to iterate through its + ``items`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``List`` requests and continue to iterate + through the ``items`` field on the + corresponding responses. + + All the usual :class:`google.cloud.compute_v1.types.ResourcePolicyList` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., compute.ResourcePolicyList], + request: compute.ListResourcePoliciesRequest, + response: compute.ResourcePolicyList, + *, + metadata: Sequence[Tuple[str, str]] = ()): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.compute_v1.types.ListResourcePoliciesRequest): + The initial request object. + response (google.cloud.compute_v1.types.ResourcePolicyList): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = compute.ListResourcePoliciesRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[compute.ResourcePolicyList]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[compute.ResourcePolicy]: + for page in self.pages: + yield from page.items + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) diff --git a/owl-bot-staging/v1/google/cloud/compute_v1/services/resource_policies/transports/__init__.py b/owl-bot-staging/v1/google/cloud/compute_v1/services/resource_policies/transports/__init__.py new file mode 100644 index 000000000..2bbcfaa0e --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/compute_v1/services/resource_policies/transports/__init__.py @@ -0,0 +1,32 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +from typing import Dict, Type + +from .base import ResourcePoliciesTransport +from .rest import ResourcePoliciesRestTransport +from .rest import ResourcePoliciesRestInterceptor + + +# Compile a registry of transports. +_transport_registry = OrderedDict() # type: Dict[str, Type[ResourcePoliciesTransport]] +_transport_registry['rest'] = ResourcePoliciesRestTransport + +__all__ = ( + 'ResourcePoliciesTransport', + 'ResourcePoliciesRestTransport', + 'ResourcePoliciesRestInterceptor', +) diff --git a/owl-bot-staging/v1/google/cloud/compute_v1/services/resource_policies/transports/base.py b/owl-bot-staging/v1/google/cloud/compute_v1/services/resource_policies/transports/base.py new file mode 100644 index 000000000..cfd24a655 --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/compute_v1/services/resource_policies/transports/base.py @@ -0,0 +1,275 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import abc +from typing import Awaitable, Callable, Dict, Optional, Sequence, Union + +from google.cloud.compute_v1 import gapic_version as package_version + +import google.auth # type: ignore +import google.api_core +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.compute_v1.types import compute +from google.cloud.compute_v1.services import region_operations + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) + + +class ResourcePoliciesTransport(abc.ABC): + """Abstract transport class for ResourcePolicies.""" + + AUTH_SCOPES = ( + 'https://www.googleapis.com/auth/compute', + 'https://www.googleapis.com/auth/cloud-platform', + ) + + DEFAULT_HOST: str = 'compute.googleapis.com' + def __init__( + self, *, + host: str = DEFAULT_HOST, + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + **kwargs, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A list of scopes. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + """ + self._extended_operations_services: Dict[str, Any] = {} + + scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} + + # Save the scopes. + self._scopes = scopes + + # If no credentials are provided, then determine the appropriate + # defaults. + if credentials and credentials_file: + raise core_exceptions.DuplicateCredentialArgs("'credentials_file' and 'credentials' are mutually exclusive") + + if credentials_file is not None: + credentials, _ = google.auth.load_credentials_from_file( + credentials_file, + **scopes_kwargs, + quota_project_id=quota_project_id + ) + elif credentials is None: + credentials, _ = google.auth.default(**scopes_kwargs, quota_project_id=quota_project_id) + # Don't apply audience if the credentials file passed from user. + if hasattr(credentials, "with_gdch_audience"): + credentials = credentials.with_gdch_audience(api_audience if api_audience else host) + + # If the credentials are service account credentials, then always try to use self signed JWT. + if always_use_jwt_access and isinstance(credentials, service_account.Credentials) and hasattr(service_account.Credentials, "with_always_use_jwt_access"): + credentials = credentials.with_always_use_jwt_access(True) + + # Save the credentials. + self._credentials = credentials + + # Save the hostname. Default to port 443 (HTTPS) if none is specified. + if ':' not in host: + host += ':443' + self._host = host + + def _prep_wrapped_messages(self, client_info): + # Precompute the wrapped methods. + self._wrapped_methods = { + self.aggregated_list: gapic_v1.method.wrap_method( + self.aggregated_list, + default_timeout=None, + client_info=client_info, + ), + self.delete: gapic_v1.method.wrap_method( + self.delete, + default_timeout=None, + client_info=client_info, + ), + self.get: gapic_v1.method.wrap_method( + self.get, + default_timeout=None, + client_info=client_info, + ), + self.get_iam_policy: gapic_v1.method.wrap_method( + self.get_iam_policy, + default_timeout=None, + client_info=client_info, + ), + self.insert: gapic_v1.method.wrap_method( + self.insert, + default_timeout=None, + client_info=client_info, + ), + self.list: gapic_v1.method.wrap_method( + self.list, + default_timeout=None, + client_info=client_info, + ), + self.patch: gapic_v1.method.wrap_method( + self.patch, + default_timeout=None, + client_info=client_info, + ), + self.set_iam_policy: gapic_v1.method.wrap_method( + self.set_iam_policy, + default_timeout=None, + client_info=client_info, + ), + self.test_iam_permissions: gapic_v1.method.wrap_method( + self.test_iam_permissions, + default_timeout=None, + client_info=client_info, + ), + } + + def close(self): + """Closes resources associated with the transport. + + .. warning:: + Only call this method if the transport is NOT shared + with other clients - this may cause errors in other clients! + """ + raise NotImplementedError() + + @property + def aggregated_list(self) -> Callable[ + [compute.AggregatedListResourcePoliciesRequest], + Union[ + compute.ResourcePolicyAggregatedList, + Awaitable[compute.ResourcePolicyAggregatedList] + ]]: + raise NotImplementedError() + + @property + def delete(self) -> Callable[ + [compute.DeleteResourcePolicyRequest], + Union[ + compute.Operation, + Awaitable[compute.Operation] + ]]: + raise NotImplementedError() + + @property + def get(self) -> Callable[ + [compute.GetResourcePolicyRequest], + Union[ + compute.ResourcePolicy, + Awaitable[compute.ResourcePolicy] + ]]: + raise NotImplementedError() + + @property + def get_iam_policy(self) -> Callable[ + [compute.GetIamPolicyResourcePolicyRequest], + Union[ + compute.Policy, + Awaitable[compute.Policy] + ]]: + raise NotImplementedError() + + @property + def insert(self) -> Callable[ + [compute.InsertResourcePolicyRequest], + Union[ + compute.Operation, + Awaitable[compute.Operation] + ]]: + raise NotImplementedError() + + @property + def list(self) -> Callable[ + [compute.ListResourcePoliciesRequest], + Union[ + compute.ResourcePolicyList, + Awaitable[compute.ResourcePolicyList] + ]]: + raise NotImplementedError() + + @property + def patch(self) -> Callable[ + [compute.PatchResourcePolicyRequest], + Union[ + compute.Operation, + Awaitable[compute.Operation] + ]]: + raise NotImplementedError() + + @property + def set_iam_policy(self) -> Callable[ + [compute.SetIamPolicyResourcePolicyRequest], + Union[ + compute.Policy, + Awaitable[compute.Policy] + ]]: + raise NotImplementedError() + + @property + def test_iam_permissions(self) -> Callable[ + [compute.TestIamPermissionsResourcePolicyRequest], + Union[ + compute.TestPermissionsResponse, + Awaitable[compute.TestPermissionsResponse] + ]]: + raise NotImplementedError() + + @property + def kind(self) -> str: + raise NotImplementedError() + + @property + def _region_operations_client(self) -> region_operations.RegionOperationsClient: + ex_op_service = self._extended_operations_services.get("region_operations") + if not ex_op_service: + ex_op_service = region_operations.RegionOperationsClient( + credentials=self._credentials, + transport=self.kind, + ) + self._extended_operations_services["region_operations"] = ex_op_service + + return ex_op_service + + +__all__ = ( + 'ResourcePoliciesTransport', +) diff --git a/owl-bot-staging/v1/google/cloud/compute_v1/services/resource_policies/transports/rest.py b/owl-bot-staging/v1/google/cloud/compute_v1/services/resource_policies/transports/rest.py new file mode 100644 index 000000000..001766721 --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/compute_v1/services/resource_policies/transports/rest.py @@ -0,0 +1,1344 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +from google.auth.transport.requests import AuthorizedSession # type: ignore +import json # type: ignore +import grpc # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.api_core import exceptions as core_exceptions +from google.api_core import retry as retries +from google.api_core import rest_helpers +from google.api_core import rest_streaming +from google.api_core import path_template +from google.api_core import gapic_v1 + +from google.protobuf import json_format +from requests import __version__ as requests_version +import dataclasses +import re +from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union +import warnings + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + + +from google.cloud.compute_v1.types import compute + +from .base import ResourcePoliciesTransport, DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, + grpc_version=None, + rest_version=requests_version, +) + + +class ResourcePoliciesRestInterceptor: + """Interceptor for ResourcePolicies. + + Interceptors are used to manipulate requests, request metadata, and responses + in arbitrary ways. + Example use cases include: + * Logging + * Verifying requests according to service or custom semantics + * Stripping extraneous information from responses + + These use cases and more can be enabled by injecting an + instance of a custom subclass when constructing the ResourcePoliciesRestTransport. + + .. code-block:: python + class MyCustomResourcePoliciesInterceptor(ResourcePoliciesRestInterceptor): + def pre_aggregated_list(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_aggregated_list(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_delete(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_delete(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_get(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_get_iam_policy(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_iam_policy(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_insert(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_insert(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_patch(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_patch(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_set_iam_policy(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_set_iam_policy(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_test_iam_permissions(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_test_iam_permissions(self, response): + logging.log(f"Received response: {response}") + return response + + transport = ResourcePoliciesRestTransport(interceptor=MyCustomResourcePoliciesInterceptor()) + client = ResourcePoliciesClient(transport=transport) + + + """ + def pre_aggregated_list(self, request: compute.AggregatedListResourcePoliciesRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.AggregatedListResourcePoliciesRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for aggregated_list + + Override in a subclass to manipulate the request or metadata + before they are sent to the ResourcePolicies server. + """ + return request, metadata + + def post_aggregated_list(self, response: compute.ResourcePolicyAggregatedList) -> compute.ResourcePolicyAggregatedList: + """Post-rpc interceptor for aggregated_list + + Override in a subclass to manipulate the response + after it is returned by the ResourcePolicies server but before + it is returned to user code. + """ + return response + def pre_delete(self, request: compute.DeleteResourcePolicyRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.DeleteResourcePolicyRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for delete + + Override in a subclass to manipulate the request or metadata + before they are sent to the ResourcePolicies server. + """ + return request, metadata + + def post_delete(self, response: compute.Operation) -> compute.Operation: + """Post-rpc interceptor for delete + + Override in a subclass to manipulate the response + after it is returned by the ResourcePolicies server but before + it is returned to user code. + """ + return response + def pre_get(self, request: compute.GetResourcePolicyRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.GetResourcePolicyRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get + + Override in a subclass to manipulate the request or metadata + before they are sent to the ResourcePolicies server. + """ + return request, metadata + + def post_get(self, response: compute.ResourcePolicy) -> compute.ResourcePolicy: + """Post-rpc interceptor for get + + Override in a subclass to manipulate the response + after it is returned by the ResourcePolicies server but before + it is returned to user code. + """ + return response + def pre_get_iam_policy(self, request: compute.GetIamPolicyResourcePolicyRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.GetIamPolicyResourcePolicyRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_iam_policy + + Override in a subclass to manipulate the request or metadata + before they are sent to the ResourcePolicies server. + """ + return request, metadata + + def post_get_iam_policy(self, response: compute.Policy) -> compute.Policy: + """Post-rpc interceptor for get_iam_policy + + Override in a subclass to manipulate the response + after it is returned by the ResourcePolicies server but before + it is returned to user code. + """ + return response + def pre_insert(self, request: compute.InsertResourcePolicyRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.InsertResourcePolicyRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for insert + + Override in a subclass to manipulate the request or metadata + before they are sent to the ResourcePolicies server. + """ + return request, metadata + + def post_insert(self, response: compute.Operation) -> compute.Operation: + """Post-rpc interceptor for insert + + Override in a subclass to manipulate the response + after it is returned by the ResourcePolicies server but before + it is returned to user code. + """ + return response + def pre_list(self, request: compute.ListResourcePoliciesRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.ListResourcePoliciesRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list + + Override in a subclass to manipulate the request or metadata + before they are sent to the ResourcePolicies server. + """ + return request, metadata + + def post_list(self, response: compute.ResourcePolicyList) -> compute.ResourcePolicyList: + """Post-rpc interceptor for list + + Override in a subclass to manipulate the response + after it is returned by the ResourcePolicies server but before + it is returned to user code. + """ + return response + def pre_patch(self, request: compute.PatchResourcePolicyRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.PatchResourcePolicyRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for patch + + Override in a subclass to manipulate the request or metadata + before they are sent to the ResourcePolicies server. + """ + return request, metadata + + def post_patch(self, response: compute.Operation) -> compute.Operation: + """Post-rpc interceptor for patch + + Override in a subclass to manipulate the response + after it is returned by the ResourcePolicies server but before + it is returned to user code. + """ + return response + def pre_set_iam_policy(self, request: compute.SetIamPolicyResourcePolicyRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.SetIamPolicyResourcePolicyRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for set_iam_policy + + Override in a subclass to manipulate the request or metadata + before they are sent to the ResourcePolicies server. + """ + return request, metadata + + def post_set_iam_policy(self, response: compute.Policy) -> compute.Policy: + """Post-rpc interceptor for set_iam_policy + + Override in a subclass to manipulate the response + after it is returned by the ResourcePolicies server but before + it is returned to user code. + """ + return response + def pre_test_iam_permissions(self, request: compute.TestIamPermissionsResourcePolicyRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.TestIamPermissionsResourcePolicyRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for test_iam_permissions + + Override in a subclass to manipulate the request or metadata + before they are sent to the ResourcePolicies server. + """ + return request, metadata + + def post_test_iam_permissions(self, response: compute.TestPermissionsResponse) -> compute.TestPermissionsResponse: + """Post-rpc interceptor for test_iam_permissions + + Override in a subclass to manipulate the response + after it is returned by the ResourcePolicies server but before + it is returned to user code. + """ + return response + + +@dataclasses.dataclass +class ResourcePoliciesRestStub: + _session: AuthorizedSession + _host: str + _interceptor: ResourcePoliciesRestInterceptor + + +class ResourcePoliciesRestTransport(ResourcePoliciesTransport): + """REST backend transport for ResourcePolicies. + + The ResourcePolicies API. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends JSON representations of protocol buffers over HTTP/1.1 + + NOTE: This REST transport functionality is currently in a beta + state (preview). We welcome your feedback via an issue in this + library's source repository. Thank you! + """ + + def __init__(self, *, + host: str = 'compute.googleapis.com', + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + client_cert_source_for_mtls: Optional[Callable[[ + ], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + url_scheme: str = 'https', + interceptor: Optional[ResourcePoliciesRestInterceptor] = None, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + NOTE: This REST transport functionality is currently in a beta + state (preview). We welcome your feedback via a GitHub issue in + this library's repository. Thank you! + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + client_cert_source_for_mtls (Callable[[], Tuple[bytes, bytes]]): Client + certificate to configure mutual TLS HTTP channel. It is ignored + if ``channel`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you are developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + url_scheme: the protocol scheme for the API endpoint. Normally + "https", but for testing or local servers, + "http" can be specified. + """ + # Run the base constructor + # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. + # TODO: When custom host (api_endpoint) is set, `scopes` must *also* be set on the + # credentials object + maybe_url_match = re.match("^(?Phttp(?:s)?://)?(?P.*)$", host) + if maybe_url_match is None: + raise ValueError(f"Unexpected hostname structure: {host}") # pragma: NO COVER + + url_match_items = maybe_url_match.groupdict() + + host = f"{url_scheme}://{host}" if not url_match_items["scheme"] else host + + super().__init__( + host=host, + credentials=credentials, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience + ) + self._session = AuthorizedSession( + self._credentials, default_host=self.DEFAULT_HOST) + if client_cert_source_for_mtls: + self._session.configure_mtls_channel(client_cert_source_for_mtls) + self._interceptor = interceptor or ResourcePoliciesRestInterceptor() + self._prep_wrapped_messages(client_info) + + class _AggregatedList(ResourcePoliciesRestStub): + def __hash__(self): + return hash("AggregatedList") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.AggregatedListResourcePoliciesRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.ResourcePolicyAggregatedList: + r"""Call the aggregated list method over HTTP. + + Args: + request (~.compute.AggregatedListResourcePoliciesRequest): + The request object. A request message for + ResourcePolicies.AggregatedList. See the + method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.ResourcePolicyAggregatedList: + Contains a list of resourcePolicies. + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/compute/v1/projects/{project}/aggregated/resourcePolicies', + }, + ] + request, metadata = self._interceptor.pre_aggregated_list(request, metadata) + pb_request = compute.AggregatedListResourcePoliciesRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.ResourcePolicyAggregatedList() + pb_resp = compute.ResourcePolicyAggregatedList.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_aggregated_list(resp) + return resp + + class _Delete(ResourcePoliciesRestStub): + def __hash__(self): + return hash("Delete") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.DeleteResourcePolicyRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.Operation: + r"""Call the delete method over HTTP. + + Args: + request (~.compute.DeleteResourcePolicyRequest): + The request object. A request message for + ResourcePolicies.Delete. See the method + description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.Operation: + Represents an Operation resource. Google Compute Engine + has three Operation resources: \* + `Global `__ + \* + `Regional `__ + \* + `Zonal `__ + You can use an operation resource to manage asynchronous + API requests. For more information, read Handling API + responses. Operations can be global, regional or zonal. + - For global operations, use the ``globalOperations`` + resource. - For regional operations, use the + ``regionOperations`` resource. - For zonal operations, + use the ``zonalOperations`` resource. For more + information, read Global, Regional, and Zonal Resources. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'delete', + 'uri': '/compute/v1/projects/{project}/regions/{region}/resourcePolicies/{resource_policy}', + }, + ] + request, metadata = self._interceptor.pre_delete(request, metadata) + pb_request = compute.DeleteResourcePolicyRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.Operation() + pb_resp = compute.Operation.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_delete(resp) + return resp + + class _Get(ResourcePoliciesRestStub): + def __hash__(self): + return hash("Get") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.GetResourcePolicyRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.ResourcePolicy: + r"""Call the get method over HTTP. + + Args: + request (~.compute.GetResourcePolicyRequest): + The request object. A request message for + ResourcePolicies.Get. See the method + description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.ResourcePolicy: + Represents a Resource Policy + resource. You can use resource policies + to schedule actions for some Compute + Engine resources. For example, you can + use them to schedule persistent disk + snapshots. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/compute/v1/projects/{project}/regions/{region}/resourcePolicies/{resource_policy}', + }, + ] + request, metadata = self._interceptor.pre_get(request, metadata) + pb_request = compute.GetResourcePolicyRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.ResourcePolicy() + pb_resp = compute.ResourcePolicy.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get(resp) + return resp + + class _GetIamPolicy(ResourcePoliciesRestStub): + def __hash__(self): + return hash("GetIamPolicy") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.GetIamPolicyResourcePolicyRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.Policy: + r"""Call the get iam policy method over HTTP. + + Args: + request (~.compute.GetIamPolicyResourcePolicyRequest): + The request object. A request message for + ResourcePolicies.GetIamPolicy. See the + method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.Policy: + An Identity and Access Management (IAM) policy, which + specifies access controls for Google Cloud resources. A + ``Policy`` is a collection of ``bindings``. A + ``binding`` binds one or more ``members``, or + principals, to a single ``role``. Principals can be user + accounts, service accounts, Google groups, and domains + (such as G Suite). A ``role`` is a named list of + permissions; each ``role`` can be an IAM predefined role + or a user-created custom role. For some types of Google + Cloud resources, a ``binding`` can also specify a + ``condition``, which is a logical expression that allows + access to a resource only if the expression evaluates to + ``true``. A condition can add constraints based on + attributes of the request, the resource, or both. To + learn which resources support conditions in their IAM + policies, see the `IAM + documentation `__. + **JSON example:** { "bindings": [ { "role": + "roles/resourcemanager.organizationAdmin", "members": [ + "user:mike@example.com", "group:admins@example.com", + "domain:google.com", + "serviceAccount:my-project-id@appspot.gserviceaccount.com" + ] }, { "role": + "roles/resourcemanager.organizationViewer", "members": [ + "user:eve@example.com" ], "condition": { "title": + "expirable access", "description": "Does not grant + access after Sep 2020", "expression": "request.time < + timestamp('2020-10-01T00:00:00.000Z')", } } ], "etag": + "BwWWja0YfJA=", "version": 3 } **YAML example:** + bindings: - members: - user:mike@example.com - + group:admins@example.com - domain:google.com - + serviceAccount:my-project-id@appspot.gserviceaccount.com + role: roles/resourcemanager.organizationAdmin - members: + - user:eve@example.com role: + roles/resourcemanager.organizationViewer condition: + title: expirable access description: Does not grant + access after Sep 2020 expression: request.time < + timestamp('2020-10-01T00:00:00.000Z') etag: BwWWja0YfJA= + version: 3 For a description of IAM and its features, + see the `IAM + documentation `__. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/compute/v1/projects/{project}/regions/{region}/resourcePolicies/{resource}/getIamPolicy', + }, + ] + request, metadata = self._interceptor.pre_get_iam_policy(request, metadata) + pb_request = compute.GetIamPolicyResourcePolicyRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.Policy() + pb_resp = compute.Policy.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get_iam_policy(resp) + return resp + + class _Insert(ResourcePoliciesRestStub): + def __hash__(self): + return hash("Insert") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.InsertResourcePolicyRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.Operation: + r"""Call the insert method over HTTP. + + Args: + request (~.compute.InsertResourcePolicyRequest): + The request object. A request message for + ResourcePolicies.Insert. See the method + description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.Operation: + Represents an Operation resource. Google Compute Engine + has three Operation resources: \* + `Global `__ + \* + `Regional `__ + \* + `Zonal `__ + You can use an operation resource to manage asynchronous + API requests. For more information, read Handling API + responses. Operations can be global, regional or zonal. + - For global operations, use the ``globalOperations`` + resource. - For regional operations, use the + ``regionOperations`` resource. - For zonal operations, + use the ``zonalOperations`` resource. For more + information, read Global, Regional, and Zonal Resources. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/compute/v1/projects/{project}/regions/{region}/resourcePolicies', + 'body': 'resource_policy_resource', + }, + ] + request, metadata = self._interceptor.pre_insert(request, metadata) + pb_request = compute.InsertResourcePolicyRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + including_default_value_fields=False, + use_integers_for_enums=False + ) + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.Operation() + pb_resp = compute.Operation.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_insert(resp) + return resp + + class _List(ResourcePoliciesRestStub): + def __hash__(self): + return hash("List") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.ListResourcePoliciesRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.ResourcePolicyList: + r"""Call the list method over HTTP. + + Args: + request (~.compute.ListResourcePoliciesRequest): + The request object. A request message for + ResourcePolicies.List. See the method + description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.ResourcePolicyList: + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/compute/v1/projects/{project}/regions/{region}/resourcePolicies', + }, + ] + request, metadata = self._interceptor.pre_list(request, metadata) + pb_request = compute.ListResourcePoliciesRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.ResourcePolicyList() + pb_resp = compute.ResourcePolicyList.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_list(resp) + return resp + + class _Patch(ResourcePoliciesRestStub): + def __hash__(self): + return hash("Patch") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.PatchResourcePolicyRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.Operation: + r"""Call the patch method over HTTP. + + Args: + request (~.compute.PatchResourcePolicyRequest): + The request object. A request message for + ResourcePolicies.Patch. See the method + description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.Operation: + Represents an Operation resource. Google Compute Engine + has three Operation resources: \* + `Global `__ + \* + `Regional `__ + \* + `Zonal `__ + You can use an operation resource to manage asynchronous + API requests. For more information, read Handling API + responses. Operations can be global, regional or zonal. + - For global operations, use the ``globalOperations`` + resource. - For regional operations, use the + ``regionOperations`` resource. - For zonal operations, + use the ``zonalOperations`` resource. For more + information, read Global, Regional, and Zonal Resources. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'patch', + 'uri': '/compute/v1/projects/{project}/regions/{region}/resourcePolicies/{resource_policy}', + 'body': 'resource_policy_resource', + }, + ] + request, metadata = self._interceptor.pre_patch(request, metadata) + pb_request = compute.PatchResourcePolicyRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + including_default_value_fields=False, + use_integers_for_enums=False + ) + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.Operation() + pb_resp = compute.Operation.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_patch(resp) + return resp + + class _SetIamPolicy(ResourcePoliciesRestStub): + def __hash__(self): + return hash("SetIamPolicy") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.SetIamPolicyResourcePolicyRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.Policy: + r"""Call the set iam policy method over HTTP. + + Args: + request (~.compute.SetIamPolicyResourcePolicyRequest): + The request object. A request message for + ResourcePolicies.SetIamPolicy. See the + method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.Policy: + An Identity and Access Management (IAM) policy, which + specifies access controls for Google Cloud resources. A + ``Policy`` is a collection of ``bindings``. A + ``binding`` binds one or more ``members``, or + principals, to a single ``role``. Principals can be user + accounts, service accounts, Google groups, and domains + (such as G Suite). A ``role`` is a named list of + permissions; each ``role`` can be an IAM predefined role + or a user-created custom role. For some types of Google + Cloud resources, a ``binding`` can also specify a + ``condition``, which is a logical expression that allows + access to a resource only if the expression evaluates to + ``true``. A condition can add constraints based on + attributes of the request, the resource, or both. To + learn which resources support conditions in their IAM + policies, see the `IAM + documentation `__. + **JSON example:** { "bindings": [ { "role": + "roles/resourcemanager.organizationAdmin", "members": [ + "user:mike@example.com", "group:admins@example.com", + "domain:google.com", + "serviceAccount:my-project-id@appspot.gserviceaccount.com" + ] }, { "role": + "roles/resourcemanager.organizationViewer", "members": [ + "user:eve@example.com" ], "condition": { "title": + "expirable access", "description": "Does not grant + access after Sep 2020", "expression": "request.time < + timestamp('2020-10-01T00:00:00.000Z')", } } ], "etag": + "BwWWja0YfJA=", "version": 3 } **YAML example:** + bindings: - members: - user:mike@example.com - + group:admins@example.com - domain:google.com - + serviceAccount:my-project-id@appspot.gserviceaccount.com + role: roles/resourcemanager.organizationAdmin - members: + - user:eve@example.com role: + roles/resourcemanager.organizationViewer condition: + title: expirable access description: Does not grant + access after Sep 2020 expression: request.time < + timestamp('2020-10-01T00:00:00.000Z') etag: BwWWja0YfJA= + version: 3 For a description of IAM and its features, + see the `IAM + documentation `__. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/compute/v1/projects/{project}/regions/{region}/resourcePolicies/{resource}/setIamPolicy', + 'body': 'region_set_policy_request_resource', + }, + ] + request, metadata = self._interceptor.pre_set_iam_policy(request, metadata) + pb_request = compute.SetIamPolicyResourcePolicyRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + including_default_value_fields=False, + use_integers_for_enums=False + ) + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.Policy() + pb_resp = compute.Policy.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_set_iam_policy(resp) + return resp + + class _TestIamPermissions(ResourcePoliciesRestStub): + def __hash__(self): + return hash("TestIamPermissions") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.TestIamPermissionsResourcePolicyRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.TestPermissionsResponse: + r"""Call the test iam permissions method over HTTP. + + Args: + request (~.compute.TestIamPermissionsResourcePolicyRequest): + The request object. A request message for + ResourcePolicies.TestIamPermissions. See + the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.TestPermissionsResponse: + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/compute/v1/projects/{project}/regions/{region}/resourcePolicies/{resource}/testIamPermissions', + 'body': 'test_permissions_request_resource', + }, + ] + request, metadata = self._interceptor.pre_test_iam_permissions(request, metadata) + pb_request = compute.TestIamPermissionsResourcePolicyRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + including_default_value_fields=False, + use_integers_for_enums=False + ) + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.TestPermissionsResponse() + pb_resp = compute.TestPermissionsResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_test_iam_permissions(resp) + return resp + + @property + def aggregated_list(self) -> Callable[ + [compute.AggregatedListResourcePoliciesRequest], + compute.ResourcePolicyAggregatedList]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._AggregatedList(self._session, self._host, self._interceptor) # type: ignore + + @property + def delete(self) -> Callable[ + [compute.DeleteResourcePolicyRequest], + compute.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._Delete(self._session, self._host, self._interceptor) # type: ignore + + @property + def get(self) -> Callable[ + [compute.GetResourcePolicyRequest], + compute.ResourcePolicy]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._Get(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_iam_policy(self) -> Callable[ + [compute.GetIamPolicyResourcePolicyRequest], + compute.Policy]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetIamPolicy(self._session, self._host, self._interceptor) # type: ignore + + @property + def insert(self) -> Callable[ + [compute.InsertResourcePolicyRequest], + compute.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._Insert(self._session, self._host, self._interceptor) # type: ignore + + @property + def list(self) -> Callable[ + [compute.ListResourcePoliciesRequest], + compute.ResourcePolicyList]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._List(self._session, self._host, self._interceptor) # type: ignore + + @property + def patch(self) -> Callable[ + [compute.PatchResourcePolicyRequest], + compute.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._Patch(self._session, self._host, self._interceptor) # type: ignore + + @property + def set_iam_policy(self) -> Callable[ + [compute.SetIamPolicyResourcePolicyRequest], + compute.Policy]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._SetIamPolicy(self._session, self._host, self._interceptor) # type: ignore + + @property + def test_iam_permissions(self) -> Callable[ + [compute.TestIamPermissionsResourcePolicyRequest], + compute.TestPermissionsResponse]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._TestIamPermissions(self._session, self._host, self._interceptor) # type: ignore + + @property + def kind(self) -> str: + return "rest" + + def close(self): + self._session.close() + + +__all__=( + 'ResourcePoliciesRestTransport', +) diff --git a/owl-bot-staging/v1/google/cloud/compute_v1/services/routers/__init__.py b/owl-bot-staging/v1/google/cloud/compute_v1/services/routers/__init__.py new file mode 100644 index 000000000..cd38a4e38 --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/compute_v1/services/routers/__init__.py @@ -0,0 +1,20 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from .client import RoutersClient + +__all__ = ( + 'RoutersClient', +) diff --git a/owl-bot-staging/v1/google/cloud/compute_v1/services/routers/client.py b/owl-bot-staging/v1/google/cloud/compute_v1/services/routers/client.py new file mode 100644 index 000000000..410aa5dcf --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/compute_v1/services/routers/client.py @@ -0,0 +1,2241 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import functools +import os +import re +from typing import Dict, Mapping, MutableMapping, MutableSequence, Optional, Sequence, Tuple, Type, Union, cast + +from google.cloud.compute_v1 import gapic_version as package_version + +from google.api_core import client_options as client_options_lib +from google.api_core import exceptions as core_exceptions +from google.api_core import extended_operation +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport import mtls # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth.exceptions import MutualTLSChannelError # type: ignore +from google.oauth2 import service_account # type: ignore + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + +from google.api_core import extended_operation # type: ignore +from google.cloud.compute_v1.services.routers import pagers +from google.cloud.compute_v1.types import compute +from .transports.base import RoutersTransport, DEFAULT_CLIENT_INFO +from .transports.rest import RoutersRestTransport + + +class RoutersClientMeta(type): + """Metaclass for the Routers client. + + This provides class-level methods for building and retrieving + support objects (e.g. transport) without polluting the client instance + objects. + """ + _transport_registry = OrderedDict() # type: Dict[str, Type[RoutersTransport]] + _transport_registry["rest"] = RoutersRestTransport + + def get_transport_class(cls, + label: Optional[str] = None, + ) -> Type[RoutersTransport]: + """Returns an appropriate transport class. + + Args: + label: The name of the desired transport. If none is + provided, then the first transport in the registry is used. + + Returns: + The transport class to use. + """ + # If a specific transport is requested, return that one. + if label: + return cls._transport_registry[label] + + # No transport is requested; return the default (that is, the first one + # in the dictionary). + return next(iter(cls._transport_registry.values())) + + +class RoutersClient(metaclass=RoutersClientMeta): + """The Routers API.""" + + @staticmethod + def _get_default_mtls_endpoint(api_endpoint): + """Converts api endpoint to mTLS endpoint. + + Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to + "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. + Args: + api_endpoint (Optional[str]): the api endpoint to convert. + Returns: + str: converted mTLS api endpoint. + """ + if not api_endpoint: + return api_endpoint + + mtls_endpoint_re = re.compile( + r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?" + ) + + m = mtls_endpoint_re.match(api_endpoint) + name, mtls, sandbox, googledomain = m.groups() + if mtls or not googledomain: + return api_endpoint + + if sandbox: + return api_endpoint.replace( + "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" + ) + + return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") + + DEFAULT_ENDPOINT = "compute.googleapis.com" + DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore + DEFAULT_ENDPOINT + ) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + RoutersClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_info(info) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + RoutersClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_file( + filename) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + from_service_account_json = from_service_account_file + + @property + def transport(self) -> RoutersTransport: + """Returns the transport used by the client instance. + + Returns: + RoutersTransport: The transport used by the client + instance. + """ + return self._transport + + @staticmethod + def common_billing_account_path(billing_account: str, ) -> str: + """Returns a fully-qualified billing_account string.""" + return "billingAccounts/{billing_account}".format(billing_account=billing_account, ) + + @staticmethod + def parse_common_billing_account_path(path: str) -> Dict[str,str]: + """Parse a billing_account path into its component segments.""" + m = re.match(r"^billingAccounts/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_folder_path(folder: str, ) -> str: + """Returns a fully-qualified folder string.""" + return "folders/{folder}".format(folder=folder, ) + + @staticmethod + def parse_common_folder_path(path: str) -> Dict[str,str]: + """Parse a folder path into its component segments.""" + m = re.match(r"^folders/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_organization_path(organization: str, ) -> str: + """Returns a fully-qualified organization string.""" + return "organizations/{organization}".format(organization=organization, ) + + @staticmethod + def parse_common_organization_path(path: str) -> Dict[str,str]: + """Parse a organization path into its component segments.""" + m = re.match(r"^organizations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_project_path(project: str, ) -> str: + """Returns a fully-qualified project string.""" + return "projects/{project}".format(project=project, ) + + @staticmethod + def parse_common_project_path(path: str) -> Dict[str,str]: + """Parse a project path into its component segments.""" + m = re.match(r"^projects/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_location_path(project: str, location: str, ) -> str: + """Returns a fully-qualified location string.""" + return "projects/{project}/locations/{location}".format(project=project, location=location, ) + + @staticmethod + def parse_common_location_path(path: str) -> Dict[str,str]: + """Parse a location path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @classmethod + def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[client_options_lib.ClientOptions] = None): + """Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + if client_options is None: + client_options = client_options_lib.ClientOptions() + use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") + use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") + if use_client_cert not in ("true", "false"): + raise ValueError("Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`") + if use_mtls_endpoint not in ("auto", "never", "always"): + raise MutualTLSChannelError("Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`") + + # Figure out the client cert source to use. + client_cert_source = None + if use_client_cert == "true": + if client_options.client_cert_source: + client_cert_source = client_options.client_cert_source + elif mtls.has_default_client_cert_source(): + client_cert_source = mtls.default_client_cert_source() + + # Figure out which api endpoint to use. + if client_options.api_endpoint is not None: + api_endpoint = client_options.api_endpoint + elif use_mtls_endpoint == "always" or (use_mtls_endpoint == "auto" and client_cert_source): + api_endpoint = cls.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = cls.DEFAULT_ENDPOINT + + return api_endpoint, client_cert_source + + def __init__(self, *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Optional[Union[str, RoutersTransport]] = None, + client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the routers client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Union[str, RoutersTransport]): The + transport to use. If set to None, a transport is chosen + automatically. + NOTE: "rest" transport functionality is currently in a + beta state (preview). We welcome your feedback via an + issue in this library's source repository. + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): Custom options for the + client. It won't take effect if a ``transport`` instance is provided. + (1) The ``api_endpoint`` property can be used to override the + default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT + environment variable can also be used to override the endpoint: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto switch to the + default mTLS endpoint if client certificate is present, this is + the default value). However, the ``api_endpoint`` property takes + precedence if provided. + (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide client certificate for mutual TLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + """ + if isinstance(client_options, dict): + client_options = client_options_lib.from_dict(client_options) + if client_options is None: + client_options = client_options_lib.ClientOptions() + client_options = cast(client_options_lib.ClientOptions, client_options) + + api_endpoint, client_cert_source_func = self.get_mtls_endpoint_and_cert_source(client_options) + + api_key_value = getattr(client_options, "api_key", None) + if api_key_value and credentials: + raise ValueError("client_options.api_key and credentials are mutually exclusive") + + # Save or instantiate the transport. + # Ordinarily, we provide the transport, but allowing a custom transport + # instance provides an extensibility point for unusual situations. + if isinstance(transport, RoutersTransport): + # transport is a RoutersTransport instance. + if credentials or client_options.credentials_file or api_key_value: + raise ValueError("When providing a transport instance, " + "provide its credentials directly.") + if client_options.scopes: + raise ValueError( + "When providing a transport instance, provide its scopes " + "directly." + ) + self._transport = transport + else: + import google.auth._default # type: ignore + + if api_key_value and hasattr(google.auth._default, "get_api_key_credentials"): + credentials = google.auth._default.get_api_key_credentials(api_key_value) + + Transport = type(self).get_transport_class(transport) + self._transport = Transport( + credentials=credentials, + credentials_file=client_options.credentials_file, + host=api_endpoint, + scopes=client_options.scopes, + client_cert_source_for_mtls=client_cert_source_func, + quota_project_id=client_options.quota_project_id, + client_info=client_info, + always_use_jwt_access=True, + api_audience=client_options.api_audience, + ) + + def aggregated_list(self, + request: Optional[Union[compute.AggregatedListRoutersRequest, dict]] = None, + *, + project: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.AggregatedListPager: + r"""Retrieves an aggregated list of routers. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_aggregated_list(): + # Create a client + client = compute_v1.RoutersClient() + + # Initialize request argument(s) + request = compute_v1.AggregatedListRoutersRequest( + project="project_value", + ) + + # Make the request + page_result = client.aggregated_list(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.AggregatedListRoutersRequest, dict]): + The request object. A request message for + Routers.AggregatedList. See the method + description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.compute_v1.services.routers.pagers.AggregatedListPager: + Contains a list of routers. + + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.AggregatedListRoutersRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.AggregatedListRoutersRequest): + request = compute.AggregatedListRoutersRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.aggregated_list] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.AggregatedListPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + def delete_unary(self, + request: Optional[Union[compute.DeleteRouterRequest, dict]] = None, + *, + project: Optional[str] = None, + region: Optional[str] = None, + router: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.Operation: + r"""Deletes the specified Router resource. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_delete(): + # Create a client + client = compute_v1.RoutersClient() + + # Initialize request argument(s) + request = compute_v1.DeleteRouterRequest( + project="project_value", + region="region_value", + router="router_value", + ) + + # Make the request + response = client.delete(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.DeleteRouterRequest, dict]): + The request object. A request message for Routers.Delete. + See the method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + region (str): + Name of the region for this request. + This corresponds to the ``region`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + router (str): + Name of the Router resource to + delete. + + This corresponds to the ``router`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, region, router]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.DeleteRouterRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.DeleteRouterRequest): + request = compute.DeleteRouterRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if region is not None: + request.region = region + if router is not None: + request.router = router + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("region", request.region), + ("router", request.router), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def delete(self, + request: Optional[Union[compute.DeleteRouterRequest, dict]] = None, + *, + project: Optional[str] = None, + region: Optional[str] = None, + router: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> extended_operation.ExtendedOperation: + r"""Deletes the specified Router resource. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_delete(): + # Create a client + client = compute_v1.RoutersClient() + + # Initialize request argument(s) + request = compute_v1.DeleteRouterRequest( + project="project_value", + region="region_value", + router="router_value", + ) + + # Make the request + response = client.delete(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.DeleteRouterRequest, dict]): + The request object. A request message for Routers.Delete. + See the method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + region (str): + Name of the region for this request. + This corresponds to the ``region`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + router (str): + Name of the Router resource to + delete. + + This corresponds to the ``router`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, region, router]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.DeleteRouterRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.DeleteRouterRequest): + request = compute.DeleteRouterRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if region is not None: + request.region = region + if router is not None: + request.router = router + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("region", request.region), + ("router", request.router), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + operation_service = self._transport._region_operations_client + operation_request = compute.GetRegionOperationRequest() + operation_request.project = request.project + operation_request.region = request.region + operation_request.operation = response.name + + get_operation = functools.partial(operation_service.get, operation_request) + # Cancel is not part of extended operations yet. + cancel_operation = lambda: None + + # Note: this class is an implementation detail to provide a uniform + # set of names for certain fields in the extended operation proto message. + # See google.api_core.extended_operation.ExtendedOperation for details + # on these properties and the expected interface. + class _CustomOperation(extended_operation.ExtendedOperation): + @property + def error_message(self): + return self._extended_operation.http_error_message + + @property + def error_code(self): + return self._extended_operation.http_error_status_code + + response = _CustomOperation.make(get_operation, cancel_operation, response) + + # Done; return the response. + return response + + def get(self, + request: Optional[Union[compute.GetRouterRequest, dict]] = None, + *, + project: Optional[str] = None, + region: Optional[str] = None, + router: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.Router: + r"""Returns the specified Router resource. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_get(): + # Create a client + client = compute_v1.RoutersClient() + + # Initialize request argument(s) + request = compute_v1.GetRouterRequest( + project="project_value", + region="region_value", + router="router_value", + ) + + # Make the request + response = client.get(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.GetRouterRequest, dict]): + The request object. A request message for Routers.Get. + See the method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + region (str): + Name of the region for this request. + This corresponds to the ``region`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + router (str): + Name of the Router resource to + return. + + This corresponds to the ``router`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.compute_v1.types.Router: + Represents a Cloud Router resource. + For more information about Cloud Router, + read the Cloud Router overview. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, region, router]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.GetRouterRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.GetRouterRequest): + request = compute.GetRouterRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if region is not None: + request.region = region + if router is not None: + request.router = router + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("region", request.region), + ("router", request.router), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get_nat_mapping_info(self, + request: Optional[Union[compute.GetNatMappingInfoRoutersRequest, dict]] = None, + *, + project: Optional[str] = None, + region: Optional[str] = None, + router: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.GetNatMappingInfoPager: + r"""Retrieves runtime Nat mapping information of VM + endpoints. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_get_nat_mapping_info(): + # Create a client + client = compute_v1.RoutersClient() + + # Initialize request argument(s) + request = compute_v1.GetNatMappingInfoRoutersRequest( + project="project_value", + region="region_value", + router="router_value", + ) + + # Make the request + page_result = client.get_nat_mapping_info(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.GetNatMappingInfoRoutersRequest, dict]): + The request object. A request message for + Routers.GetNatMappingInfo. See the + method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + region (str): + Name of the region for this request. + This corresponds to the ``region`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + router (str): + Name of the Router resource to query + for Nat Mapping information of VM + endpoints. + + This corresponds to the ``router`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.compute_v1.services.routers.pagers.GetNatMappingInfoPager: + Contains a list of + VmEndpointNatMappings. + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, region, router]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.GetNatMappingInfoRoutersRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.GetNatMappingInfoRoutersRequest): + request = compute.GetNatMappingInfoRoutersRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if region is not None: + request.region = region + if router is not None: + request.router = router + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_nat_mapping_info] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("region", request.region), + ("router", request.router), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.GetNatMappingInfoPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get_router_status(self, + request: Optional[Union[compute.GetRouterStatusRouterRequest, dict]] = None, + *, + project: Optional[str] = None, + region: Optional[str] = None, + router: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.RouterStatusResponse: + r"""Retrieves runtime information of the specified + router. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_get_router_status(): + # Create a client + client = compute_v1.RoutersClient() + + # Initialize request argument(s) + request = compute_v1.GetRouterStatusRouterRequest( + project="project_value", + region="region_value", + router="router_value", + ) + + # Make the request + response = client.get_router_status(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.GetRouterStatusRouterRequest, dict]): + The request object. A request message for + Routers.GetRouterStatus. See the method + description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + region (str): + Name of the region for this request. + This corresponds to the ``region`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + router (str): + Name of the Router resource to query. + This corresponds to the ``router`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.compute_v1.types.RouterStatusResponse: + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, region, router]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.GetRouterStatusRouterRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.GetRouterStatusRouterRequest): + request = compute.GetRouterStatusRouterRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if region is not None: + request.region = region + if router is not None: + request.router = router + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_router_status] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("region", request.region), + ("router", request.router), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def insert_unary(self, + request: Optional[Union[compute.InsertRouterRequest, dict]] = None, + *, + project: Optional[str] = None, + region: Optional[str] = None, + router_resource: Optional[compute.Router] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.Operation: + r"""Creates a Router resource in the specified project + and region using the data included in the request. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_insert(): + # Create a client + client = compute_v1.RoutersClient() + + # Initialize request argument(s) + request = compute_v1.InsertRouterRequest( + project="project_value", + region="region_value", + ) + + # Make the request + response = client.insert(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.InsertRouterRequest, dict]): + The request object. A request message for Routers.Insert. + See the method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + region (str): + Name of the region for this request. + This corresponds to the ``region`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + router_resource (google.cloud.compute_v1.types.Router): + The body resource for this request + This corresponds to the ``router_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, region, router_resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.InsertRouterRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.InsertRouterRequest): + request = compute.InsertRouterRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if region is not None: + request.region = region + if router_resource is not None: + request.router_resource = router_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.insert] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("region", request.region), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def insert(self, + request: Optional[Union[compute.InsertRouterRequest, dict]] = None, + *, + project: Optional[str] = None, + region: Optional[str] = None, + router_resource: Optional[compute.Router] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> extended_operation.ExtendedOperation: + r"""Creates a Router resource in the specified project + and region using the data included in the request. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_insert(): + # Create a client + client = compute_v1.RoutersClient() + + # Initialize request argument(s) + request = compute_v1.InsertRouterRequest( + project="project_value", + region="region_value", + ) + + # Make the request + response = client.insert(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.InsertRouterRequest, dict]): + The request object. A request message for Routers.Insert. + See the method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + region (str): + Name of the region for this request. + This corresponds to the ``region`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + router_resource (google.cloud.compute_v1.types.Router): + The body resource for this request + This corresponds to the ``router_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, region, router_resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.InsertRouterRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.InsertRouterRequest): + request = compute.InsertRouterRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if region is not None: + request.region = region + if router_resource is not None: + request.router_resource = router_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.insert] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("region", request.region), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + operation_service = self._transport._region_operations_client + operation_request = compute.GetRegionOperationRequest() + operation_request.project = request.project + operation_request.region = request.region + operation_request.operation = response.name + + get_operation = functools.partial(operation_service.get, operation_request) + # Cancel is not part of extended operations yet. + cancel_operation = lambda: None + + # Note: this class is an implementation detail to provide a uniform + # set of names for certain fields in the extended operation proto message. + # See google.api_core.extended_operation.ExtendedOperation for details + # on these properties and the expected interface. + class _CustomOperation(extended_operation.ExtendedOperation): + @property + def error_message(self): + return self._extended_operation.http_error_message + + @property + def error_code(self): + return self._extended_operation.http_error_status_code + + response = _CustomOperation.make(get_operation, cancel_operation, response) + + # Done; return the response. + return response + + def list(self, + request: Optional[Union[compute.ListRoutersRequest, dict]] = None, + *, + project: Optional[str] = None, + region: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListPager: + r"""Retrieves a list of Router resources available to the + specified project. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_list(): + # Create a client + client = compute_v1.RoutersClient() + + # Initialize request argument(s) + request = compute_v1.ListRoutersRequest( + project="project_value", + region="region_value", + ) + + # Make the request + page_result = client.list(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.ListRoutersRequest, dict]): + The request object. A request message for Routers.List. + See the method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + region (str): + Name of the region for this request. + This corresponds to the ``region`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.compute_v1.services.routers.pagers.ListPager: + Contains a list of Router resources. + + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, region]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.ListRoutersRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.ListRoutersRequest): + request = compute.ListRoutersRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if region is not None: + request.region = region + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("region", request.region), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + def patch_unary(self, + request: Optional[Union[compute.PatchRouterRequest, dict]] = None, + *, + project: Optional[str] = None, + region: Optional[str] = None, + router: Optional[str] = None, + router_resource: Optional[compute.Router] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.Operation: + r"""Patches the specified Router resource with the data + included in the request. This method supports PATCH + semantics and uses JSON merge patch format and + processing rules. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_patch(): + # Create a client + client = compute_v1.RoutersClient() + + # Initialize request argument(s) + request = compute_v1.PatchRouterRequest( + project="project_value", + region="region_value", + router="router_value", + ) + + # Make the request + response = client.patch(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.PatchRouterRequest, dict]): + The request object. A request message for Routers.Patch. + See the method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + region (str): + Name of the region for this request. + This corresponds to the ``region`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + router (str): + Name of the Router resource to patch. + This corresponds to the ``router`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + router_resource (google.cloud.compute_v1.types.Router): + The body resource for this request + This corresponds to the ``router_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, region, router, router_resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.PatchRouterRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.PatchRouterRequest): + request = compute.PatchRouterRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if region is not None: + request.region = region + if router is not None: + request.router = router + if router_resource is not None: + request.router_resource = router_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.patch] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("region", request.region), + ("router", request.router), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def patch(self, + request: Optional[Union[compute.PatchRouterRequest, dict]] = None, + *, + project: Optional[str] = None, + region: Optional[str] = None, + router: Optional[str] = None, + router_resource: Optional[compute.Router] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> extended_operation.ExtendedOperation: + r"""Patches the specified Router resource with the data + included in the request. This method supports PATCH + semantics and uses JSON merge patch format and + processing rules. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_patch(): + # Create a client + client = compute_v1.RoutersClient() + + # Initialize request argument(s) + request = compute_v1.PatchRouterRequest( + project="project_value", + region="region_value", + router="router_value", + ) + + # Make the request + response = client.patch(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.PatchRouterRequest, dict]): + The request object. A request message for Routers.Patch. + See the method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + region (str): + Name of the region for this request. + This corresponds to the ``region`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + router (str): + Name of the Router resource to patch. + This corresponds to the ``router`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + router_resource (google.cloud.compute_v1.types.Router): + The body resource for this request + This corresponds to the ``router_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, region, router, router_resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.PatchRouterRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.PatchRouterRequest): + request = compute.PatchRouterRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if region is not None: + request.region = region + if router is not None: + request.router = router + if router_resource is not None: + request.router_resource = router_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.patch] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("region", request.region), + ("router", request.router), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + operation_service = self._transport._region_operations_client + operation_request = compute.GetRegionOperationRequest() + operation_request.project = request.project + operation_request.region = request.region + operation_request.operation = response.name + + get_operation = functools.partial(operation_service.get, operation_request) + # Cancel is not part of extended operations yet. + cancel_operation = lambda: None + + # Note: this class is an implementation detail to provide a uniform + # set of names for certain fields in the extended operation proto message. + # See google.api_core.extended_operation.ExtendedOperation for details + # on these properties and the expected interface. + class _CustomOperation(extended_operation.ExtendedOperation): + @property + def error_message(self): + return self._extended_operation.http_error_message + + @property + def error_code(self): + return self._extended_operation.http_error_status_code + + response = _CustomOperation.make(get_operation, cancel_operation, response) + + # Done; return the response. + return response + + def preview(self, + request: Optional[Union[compute.PreviewRouterRequest, dict]] = None, + *, + project: Optional[str] = None, + region: Optional[str] = None, + router: Optional[str] = None, + router_resource: Optional[compute.Router] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.RoutersPreviewResponse: + r"""Preview fields auto-generated during router create + and update operations. Calling this method does NOT + create or update the router. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_preview(): + # Create a client + client = compute_v1.RoutersClient() + + # Initialize request argument(s) + request = compute_v1.PreviewRouterRequest( + project="project_value", + region="region_value", + router="router_value", + ) + + # Make the request + response = client.preview(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.PreviewRouterRequest, dict]): + The request object. A request message for + Routers.Preview. See the method + description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + region (str): + Name of the region for this request. + This corresponds to the ``region`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + router (str): + Name of the Router resource to query. + This corresponds to the ``router`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + router_resource (google.cloud.compute_v1.types.Router): + The body resource for this request + This corresponds to the ``router_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.compute_v1.types.RoutersPreviewResponse: + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, region, router, router_resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.PreviewRouterRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.PreviewRouterRequest): + request = compute.PreviewRouterRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if region is not None: + request.region = region + if router is not None: + request.router = router + if router_resource is not None: + request.router_resource = router_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.preview] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("region", request.region), + ("router", request.router), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def update_unary(self, + request: Optional[Union[compute.UpdateRouterRequest, dict]] = None, + *, + project: Optional[str] = None, + region: Optional[str] = None, + router: Optional[str] = None, + router_resource: Optional[compute.Router] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.Operation: + r"""Updates the specified Router resource with the data + included in the request. This method conforms to PUT + semantics, which requests that the state of the target + resource be created or replaced with the state defined + by the representation enclosed in the request message + payload. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_update(): + # Create a client + client = compute_v1.RoutersClient() + + # Initialize request argument(s) + request = compute_v1.UpdateRouterRequest( + project="project_value", + region="region_value", + router="router_value", + ) + + # Make the request + response = client.update(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.UpdateRouterRequest, dict]): + The request object. A request message for Routers.Update. + See the method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + region (str): + Name of the region for this request. + This corresponds to the ``region`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + router (str): + Name of the Router resource to + update. + + This corresponds to the ``router`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + router_resource (google.cloud.compute_v1.types.Router): + The body resource for this request + This corresponds to the ``router_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, region, router, router_resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.UpdateRouterRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.UpdateRouterRequest): + request = compute.UpdateRouterRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if region is not None: + request.region = region + if router is not None: + request.router = router + if router_resource is not None: + request.router_resource = router_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.update] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("region", request.region), + ("router", request.router), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def update(self, + request: Optional[Union[compute.UpdateRouterRequest, dict]] = None, + *, + project: Optional[str] = None, + region: Optional[str] = None, + router: Optional[str] = None, + router_resource: Optional[compute.Router] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> extended_operation.ExtendedOperation: + r"""Updates the specified Router resource with the data + included in the request. This method conforms to PUT + semantics, which requests that the state of the target + resource be created or replaced with the state defined + by the representation enclosed in the request message + payload. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_update(): + # Create a client + client = compute_v1.RoutersClient() + + # Initialize request argument(s) + request = compute_v1.UpdateRouterRequest( + project="project_value", + region="region_value", + router="router_value", + ) + + # Make the request + response = client.update(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.UpdateRouterRequest, dict]): + The request object. A request message for Routers.Update. + See the method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + region (str): + Name of the region for this request. + This corresponds to the ``region`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + router (str): + Name of the Router resource to + update. + + This corresponds to the ``router`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + router_resource (google.cloud.compute_v1.types.Router): + The body resource for this request + This corresponds to the ``router_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, region, router, router_resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.UpdateRouterRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.UpdateRouterRequest): + request = compute.UpdateRouterRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if region is not None: + request.region = region + if router is not None: + request.router = router + if router_resource is not None: + request.router_resource = router_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.update] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("region", request.region), + ("router", request.router), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + operation_service = self._transport._region_operations_client + operation_request = compute.GetRegionOperationRequest() + operation_request.project = request.project + operation_request.region = request.region + operation_request.operation = response.name + + get_operation = functools.partial(operation_service.get, operation_request) + # Cancel is not part of extended operations yet. + cancel_operation = lambda: None + + # Note: this class is an implementation detail to provide a uniform + # set of names for certain fields in the extended operation proto message. + # See google.api_core.extended_operation.ExtendedOperation for details + # on these properties and the expected interface. + class _CustomOperation(extended_operation.ExtendedOperation): + @property + def error_message(self): + return self._extended_operation.http_error_message + + @property + def error_code(self): + return self._extended_operation.http_error_status_code + + response = _CustomOperation.make(get_operation, cancel_operation, response) + + # Done; return the response. + return response + + def __enter__(self) -> "RoutersClient": + return self + + def __exit__(self, type, value, traceback): + """Releases underlying transport's resources. + + .. warning:: + ONLY use as a context manager if the transport is NOT shared + with other clients! Exiting the with block will CLOSE the transport + and may cause errors in other clients! + """ + self.transport.close() + + + + + + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) + + +__all__ = ( + "RoutersClient", +) diff --git a/owl-bot-staging/v1/google/cloud/compute_v1/services/routers/pagers.py b/owl-bot-staging/v1/google/cloud/compute_v1/services/routers/pagers.py new file mode 100644 index 000000000..dc7361d99 --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/compute_v1/services/routers/pagers.py @@ -0,0 +1,198 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import Any, AsyncIterator, Awaitable, Callable, Sequence, Tuple, Optional, Iterator + +from google.cloud.compute_v1.types import compute + + +class AggregatedListPager: + """A pager for iterating through ``aggregated_list`` requests. + + This class thinly wraps an initial + :class:`google.cloud.compute_v1.types.RouterAggregatedList` object, and + provides an ``__iter__`` method to iterate through its + ``items`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``AggregatedList`` requests and continue to iterate + through the ``items`` field on the + corresponding responses. + + All the usual :class:`google.cloud.compute_v1.types.RouterAggregatedList` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., compute.RouterAggregatedList], + request: compute.AggregatedListRoutersRequest, + response: compute.RouterAggregatedList, + *, + metadata: Sequence[Tuple[str, str]] = ()): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.compute_v1.types.AggregatedListRoutersRequest): + The initial request object. + response (google.cloud.compute_v1.types.RouterAggregatedList): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = compute.AggregatedListRoutersRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[compute.RouterAggregatedList]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[Tuple[str, compute.RoutersScopedList]]: + for page in self.pages: + yield from page.items.items() + + def get(self, key: str) -> Optional[compute.RoutersScopedList]: + return self._response.items.get(key) + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + + +class GetNatMappingInfoPager: + """A pager for iterating through ``get_nat_mapping_info`` requests. + + This class thinly wraps an initial + :class:`google.cloud.compute_v1.types.VmEndpointNatMappingsList` object, and + provides an ``__iter__`` method to iterate through its + ``result`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``GetNatMappingInfo`` requests and continue to iterate + through the ``result`` field on the + corresponding responses. + + All the usual :class:`google.cloud.compute_v1.types.VmEndpointNatMappingsList` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., compute.VmEndpointNatMappingsList], + request: compute.GetNatMappingInfoRoutersRequest, + response: compute.VmEndpointNatMappingsList, + *, + metadata: Sequence[Tuple[str, str]] = ()): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.compute_v1.types.GetNatMappingInfoRoutersRequest): + The initial request object. + response (google.cloud.compute_v1.types.VmEndpointNatMappingsList): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = compute.GetNatMappingInfoRoutersRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[compute.VmEndpointNatMappingsList]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[compute.VmEndpointNatMappings]: + for page in self.pages: + yield from page.result + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + + +class ListPager: + """A pager for iterating through ``list`` requests. + + This class thinly wraps an initial + :class:`google.cloud.compute_v1.types.RouterList` object, and + provides an ``__iter__`` method to iterate through its + ``items`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``List`` requests and continue to iterate + through the ``items`` field on the + corresponding responses. + + All the usual :class:`google.cloud.compute_v1.types.RouterList` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., compute.RouterList], + request: compute.ListRoutersRequest, + response: compute.RouterList, + *, + metadata: Sequence[Tuple[str, str]] = ()): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.compute_v1.types.ListRoutersRequest): + The initial request object. + response (google.cloud.compute_v1.types.RouterList): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = compute.ListRoutersRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[compute.RouterList]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[compute.Router]: + for page in self.pages: + yield from page.items + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) diff --git a/owl-bot-staging/v1/google/cloud/compute_v1/services/routers/transports/__init__.py b/owl-bot-staging/v1/google/cloud/compute_v1/services/routers/transports/__init__.py new file mode 100644 index 000000000..9f74572a0 --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/compute_v1/services/routers/transports/__init__.py @@ -0,0 +1,32 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +from typing import Dict, Type + +from .base import RoutersTransport +from .rest import RoutersRestTransport +from .rest import RoutersRestInterceptor + + +# Compile a registry of transports. +_transport_registry = OrderedDict() # type: Dict[str, Type[RoutersTransport]] +_transport_registry['rest'] = RoutersRestTransport + +__all__ = ( + 'RoutersTransport', + 'RoutersRestTransport', + 'RoutersRestInterceptor', +) diff --git a/owl-bot-staging/v1/google/cloud/compute_v1/services/routers/transports/base.py b/owl-bot-staging/v1/google/cloud/compute_v1/services/routers/transports/base.py new file mode 100644 index 000000000..c8a8c4d0e --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/compute_v1/services/routers/transports/base.py @@ -0,0 +1,289 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import abc +from typing import Awaitable, Callable, Dict, Optional, Sequence, Union + +from google.cloud.compute_v1 import gapic_version as package_version + +import google.auth # type: ignore +import google.api_core +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.compute_v1.types import compute +from google.cloud.compute_v1.services import region_operations + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) + + +class RoutersTransport(abc.ABC): + """Abstract transport class for Routers.""" + + AUTH_SCOPES = ( + 'https://www.googleapis.com/auth/compute', + 'https://www.googleapis.com/auth/cloud-platform', + ) + + DEFAULT_HOST: str = 'compute.googleapis.com' + def __init__( + self, *, + host: str = DEFAULT_HOST, + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + **kwargs, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A list of scopes. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + """ + self._extended_operations_services: Dict[str, Any] = {} + + scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} + + # Save the scopes. + self._scopes = scopes + + # If no credentials are provided, then determine the appropriate + # defaults. + if credentials and credentials_file: + raise core_exceptions.DuplicateCredentialArgs("'credentials_file' and 'credentials' are mutually exclusive") + + if credentials_file is not None: + credentials, _ = google.auth.load_credentials_from_file( + credentials_file, + **scopes_kwargs, + quota_project_id=quota_project_id + ) + elif credentials is None: + credentials, _ = google.auth.default(**scopes_kwargs, quota_project_id=quota_project_id) + # Don't apply audience if the credentials file passed from user. + if hasattr(credentials, "with_gdch_audience"): + credentials = credentials.with_gdch_audience(api_audience if api_audience else host) + + # If the credentials are service account credentials, then always try to use self signed JWT. + if always_use_jwt_access and isinstance(credentials, service_account.Credentials) and hasattr(service_account.Credentials, "with_always_use_jwt_access"): + credentials = credentials.with_always_use_jwt_access(True) + + # Save the credentials. + self._credentials = credentials + + # Save the hostname. Default to port 443 (HTTPS) if none is specified. + if ':' not in host: + host += ':443' + self._host = host + + def _prep_wrapped_messages(self, client_info): + # Precompute the wrapped methods. + self._wrapped_methods = { + self.aggregated_list: gapic_v1.method.wrap_method( + self.aggregated_list, + default_timeout=None, + client_info=client_info, + ), + self.delete: gapic_v1.method.wrap_method( + self.delete, + default_timeout=None, + client_info=client_info, + ), + self.get: gapic_v1.method.wrap_method( + self.get, + default_timeout=None, + client_info=client_info, + ), + self.get_nat_mapping_info: gapic_v1.method.wrap_method( + self.get_nat_mapping_info, + default_timeout=None, + client_info=client_info, + ), + self.get_router_status: gapic_v1.method.wrap_method( + self.get_router_status, + default_timeout=None, + client_info=client_info, + ), + self.insert: gapic_v1.method.wrap_method( + self.insert, + default_timeout=None, + client_info=client_info, + ), + self.list: gapic_v1.method.wrap_method( + self.list, + default_timeout=None, + client_info=client_info, + ), + self.patch: gapic_v1.method.wrap_method( + self.patch, + default_timeout=None, + client_info=client_info, + ), + self.preview: gapic_v1.method.wrap_method( + self.preview, + default_timeout=None, + client_info=client_info, + ), + self.update: gapic_v1.method.wrap_method( + self.update, + default_timeout=None, + client_info=client_info, + ), + } + + def close(self): + """Closes resources associated with the transport. + + .. warning:: + Only call this method if the transport is NOT shared + with other clients - this may cause errors in other clients! + """ + raise NotImplementedError() + + @property + def aggregated_list(self) -> Callable[ + [compute.AggregatedListRoutersRequest], + Union[ + compute.RouterAggregatedList, + Awaitable[compute.RouterAggregatedList] + ]]: + raise NotImplementedError() + + @property + def delete(self) -> Callable[ + [compute.DeleteRouterRequest], + Union[ + compute.Operation, + Awaitable[compute.Operation] + ]]: + raise NotImplementedError() + + @property + def get(self) -> Callable[ + [compute.GetRouterRequest], + Union[ + compute.Router, + Awaitable[compute.Router] + ]]: + raise NotImplementedError() + + @property + def get_nat_mapping_info(self) -> Callable[ + [compute.GetNatMappingInfoRoutersRequest], + Union[ + compute.VmEndpointNatMappingsList, + Awaitable[compute.VmEndpointNatMappingsList] + ]]: + raise NotImplementedError() + + @property + def get_router_status(self) -> Callable[ + [compute.GetRouterStatusRouterRequest], + Union[ + compute.RouterStatusResponse, + Awaitable[compute.RouterStatusResponse] + ]]: + raise NotImplementedError() + + @property + def insert(self) -> Callable[ + [compute.InsertRouterRequest], + Union[ + compute.Operation, + Awaitable[compute.Operation] + ]]: + raise NotImplementedError() + + @property + def list(self) -> Callable[ + [compute.ListRoutersRequest], + Union[ + compute.RouterList, + Awaitable[compute.RouterList] + ]]: + raise NotImplementedError() + + @property + def patch(self) -> Callable[ + [compute.PatchRouterRequest], + Union[ + compute.Operation, + Awaitable[compute.Operation] + ]]: + raise NotImplementedError() + + @property + def preview(self) -> Callable[ + [compute.PreviewRouterRequest], + Union[ + compute.RoutersPreviewResponse, + Awaitable[compute.RoutersPreviewResponse] + ]]: + raise NotImplementedError() + + @property + def update(self) -> Callable[ + [compute.UpdateRouterRequest], + Union[ + compute.Operation, + Awaitable[compute.Operation] + ]]: + raise NotImplementedError() + + @property + def kind(self) -> str: + raise NotImplementedError() + + @property + def _region_operations_client(self) -> region_operations.RegionOperationsClient: + ex_op_service = self._extended_operations_services.get("region_operations") + if not ex_op_service: + ex_op_service = region_operations.RegionOperationsClient( + credentials=self._credentials, + transport=self.kind, + ) + self._extended_operations_services["region_operations"] = ex_op_service + + return ex_op_service + + +__all__ = ( + 'RoutersTransport', +) diff --git a/owl-bot-staging/v1/google/cloud/compute_v1/services/routers/transports/rest.py b/owl-bot-staging/v1/google/cloud/compute_v1/services/routers/transports/rest.py new file mode 100644 index 000000000..5b9ffedb9 --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/compute_v1/services/routers/transports/rest.py @@ -0,0 +1,1380 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +from google.auth.transport.requests import AuthorizedSession # type: ignore +import json # type: ignore +import grpc # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.api_core import exceptions as core_exceptions +from google.api_core import retry as retries +from google.api_core import rest_helpers +from google.api_core import rest_streaming +from google.api_core import path_template +from google.api_core import gapic_v1 + +from google.protobuf import json_format +from requests import __version__ as requests_version +import dataclasses +import re +from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union +import warnings + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + + +from google.cloud.compute_v1.types import compute + +from .base import RoutersTransport, DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, + grpc_version=None, + rest_version=requests_version, +) + + +class RoutersRestInterceptor: + """Interceptor for Routers. + + Interceptors are used to manipulate requests, request metadata, and responses + in arbitrary ways. + Example use cases include: + * Logging + * Verifying requests according to service or custom semantics + * Stripping extraneous information from responses + + These use cases and more can be enabled by injecting an + instance of a custom subclass when constructing the RoutersRestTransport. + + .. code-block:: python + class MyCustomRoutersInterceptor(RoutersRestInterceptor): + def pre_aggregated_list(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_aggregated_list(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_delete(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_delete(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_get(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_get_nat_mapping_info(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_nat_mapping_info(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_get_router_status(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_router_status(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_insert(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_insert(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_patch(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_patch(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_preview(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_preview(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_update(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_update(self, response): + logging.log(f"Received response: {response}") + return response + + transport = RoutersRestTransport(interceptor=MyCustomRoutersInterceptor()) + client = RoutersClient(transport=transport) + + + """ + def pre_aggregated_list(self, request: compute.AggregatedListRoutersRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.AggregatedListRoutersRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for aggregated_list + + Override in a subclass to manipulate the request or metadata + before they are sent to the Routers server. + """ + return request, metadata + + def post_aggregated_list(self, response: compute.RouterAggregatedList) -> compute.RouterAggregatedList: + """Post-rpc interceptor for aggregated_list + + Override in a subclass to manipulate the response + after it is returned by the Routers server but before + it is returned to user code. + """ + return response + def pre_delete(self, request: compute.DeleteRouterRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.DeleteRouterRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for delete + + Override in a subclass to manipulate the request or metadata + before they are sent to the Routers server. + """ + return request, metadata + + def post_delete(self, response: compute.Operation) -> compute.Operation: + """Post-rpc interceptor for delete + + Override in a subclass to manipulate the response + after it is returned by the Routers server but before + it is returned to user code. + """ + return response + def pre_get(self, request: compute.GetRouterRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.GetRouterRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get + + Override in a subclass to manipulate the request or metadata + before they are sent to the Routers server. + """ + return request, metadata + + def post_get(self, response: compute.Router) -> compute.Router: + """Post-rpc interceptor for get + + Override in a subclass to manipulate the response + after it is returned by the Routers server but before + it is returned to user code. + """ + return response + def pre_get_nat_mapping_info(self, request: compute.GetNatMappingInfoRoutersRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.GetNatMappingInfoRoutersRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_nat_mapping_info + + Override in a subclass to manipulate the request or metadata + before they are sent to the Routers server. + """ + return request, metadata + + def post_get_nat_mapping_info(self, response: compute.VmEndpointNatMappingsList) -> compute.VmEndpointNatMappingsList: + """Post-rpc interceptor for get_nat_mapping_info + + Override in a subclass to manipulate the response + after it is returned by the Routers server but before + it is returned to user code. + """ + return response + def pre_get_router_status(self, request: compute.GetRouterStatusRouterRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.GetRouterStatusRouterRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_router_status + + Override in a subclass to manipulate the request or metadata + before they are sent to the Routers server. + """ + return request, metadata + + def post_get_router_status(self, response: compute.RouterStatusResponse) -> compute.RouterStatusResponse: + """Post-rpc interceptor for get_router_status + + Override in a subclass to manipulate the response + after it is returned by the Routers server but before + it is returned to user code. + """ + return response + def pre_insert(self, request: compute.InsertRouterRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.InsertRouterRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for insert + + Override in a subclass to manipulate the request or metadata + before they are sent to the Routers server. + """ + return request, metadata + + def post_insert(self, response: compute.Operation) -> compute.Operation: + """Post-rpc interceptor for insert + + Override in a subclass to manipulate the response + after it is returned by the Routers server but before + it is returned to user code. + """ + return response + def pre_list(self, request: compute.ListRoutersRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.ListRoutersRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list + + Override in a subclass to manipulate the request or metadata + before they are sent to the Routers server. + """ + return request, metadata + + def post_list(self, response: compute.RouterList) -> compute.RouterList: + """Post-rpc interceptor for list + + Override in a subclass to manipulate the response + after it is returned by the Routers server but before + it is returned to user code. + """ + return response + def pre_patch(self, request: compute.PatchRouterRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.PatchRouterRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for patch + + Override in a subclass to manipulate the request or metadata + before they are sent to the Routers server. + """ + return request, metadata + + def post_patch(self, response: compute.Operation) -> compute.Operation: + """Post-rpc interceptor for patch + + Override in a subclass to manipulate the response + after it is returned by the Routers server but before + it is returned to user code. + """ + return response + def pre_preview(self, request: compute.PreviewRouterRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.PreviewRouterRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for preview + + Override in a subclass to manipulate the request or metadata + before they are sent to the Routers server. + """ + return request, metadata + + def post_preview(self, response: compute.RoutersPreviewResponse) -> compute.RoutersPreviewResponse: + """Post-rpc interceptor for preview + + Override in a subclass to manipulate the response + after it is returned by the Routers server but before + it is returned to user code. + """ + return response + def pre_update(self, request: compute.UpdateRouterRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.UpdateRouterRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for update + + Override in a subclass to manipulate the request or metadata + before they are sent to the Routers server. + """ + return request, metadata + + def post_update(self, response: compute.Operation) -> compute.Operation: + """Post-rpc interceptor for update + + Override in a subclass to manipulate the response + after it is returned by the Routers server but before + it is returned to user code. + """ + return response + + +@dataclasses.dataclass +class RoutersRestStub: + _session: AuthorizedSession + _host: str + _interceptor: RoutersRestInterceptor + + +class RoutersRestTransport(RoutersTransport): + """REST backend transport for Routers. + + The Routers API. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends JSON representations of protocol buffers over HTTP/1.1 + + NOTE: This REST transport functionality is currently in a beta + state (preview). We welcome your feedback via an issue in this + library's source repository. Thank you! + """ + + def __init__(self, *, + host: str = 'compute.googleapis.com', + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + client_cert_source_for_mtls: Optional[Callable[[ + ], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + url_scheme: str = 'https', + interceptor: Optional[RoutersRestInterceptor] = None, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + NOTE: This REST transport functionality is currently in a beta + state (preview). We welcome your feedback via a GitHub issue in + this library's repository. Thank you! + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + client_cert_source_for_mtls (Callable[[], Tuple[bytes, bytes]]): Client + certificate to configure mutual TLS HTTP channel. It is ignored + if ``channel`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you are developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + url_scheme: the protocol scheme for the API endpoint. Normally + "https", but for testing or local servers, + "http" can be specified. + """ + # Run the base constructor + # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. + # TODO: When custom host (api_endpoint) is set, `scopes` must *also* be set on the + # credentials object + maybe_url_match = re.match("^(?Phttp(?:s)?://)?(?P.*)$", host) + if maybe_url_match is None: + raise ValueError(f"Unexpected hostname structure: {host}") # pragma: NO COVER + + url_match_items = maybe_url_match.groupdict() + + host = f"{url_scheme}://{host}" if not url_match_items["scheme"] else host + + super().__init__( + host=host, + credentials=credentials, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience + ) + self._session = AuthorizedSession( + self._credentials, default_host=self.DEFAULT_HOST) + if client_cert_source_for_mtls: + self._session.configure_mtls_channel(client_cert_source_for_mtls) + self._interceptor = interceptor or RoutersRestInterceptor() + self._prep_wrapped_messages(client_info) + + class _AggregatedList(RoutersRestStub): + def __hash__(self): + return hash("AggregatedList") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.AggregatedListRoutersRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.RouterAggregatedList: + r"""Call the aggregated list method over HTTP. + + Args: + request (~.compute.AggregatedListRoutersRequest): + The request object. A request message for + Routers.AggregatedList. See the method + description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.RouterAggregatedList: + Contains a list of routers. + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/compute/v1/projects/{project}/aggregated/routers', + }, + ] + request, metadata = self._interceptor.pre_aggregated_list(request, metadata) + pb_request = compute.AggregatedListRoutersRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.RouterAggregatedList() + pb_resp = compute.RouterAggregatedList.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_aggregated_list(resp) + return resp + + class _Delete(RoutersRestStub): + def __hash__(self): + return hash("Delete") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.DeleteRouterRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.Operation: + r"""Call the delete method over HTTP. + + Args: + request (~.compute.DeleteRouterRequest): + The request object. A request message for Routers.Delete. + See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.Operation: + Represents an Operation resource. Google Compute Engine + has three Operation resources: \* + `Global `__ + \* + `Regional `__ + \* + `Zonal `__ + You can use an operation resource to manage asynchronous + API requests. For more information, read Handling API + responses. Operations can be global, regional or zonal. + - For global operations, use the ``globalOperations`` + resource. - For regional operations, use the + ``regionOperations`` resource. - For zonal operations, + use the ``zonalOperations`` resource. For more + information, read Global, Regional, and Zonal Resources. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'delete', + 'uri': '/compute/v1/projects/{project}/regions/{region}/routers/{router}', + }, + ] + request, metadata = self._interceptor.pre_delete(request, metadata) + pb_request = compute.DeleteRouterRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.Operation() + pb_resp = compute.Operation.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_delete(resp) + return resp + + class _Get(RoutersRestStub): + def __hash__(self): + return hash("Get") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.GetRouterRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.Router: + r"""Call the get method over HTTP. + + Args: + request (~.compute.GetRouterRequest): + The request object. A request message for Routers.Get. + See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.Router: + Represents a Cloud Router resource. + For more information about Cloud Router, + read the Cloud Router overview. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/compute/v1/projects/{project}/regions/{region}/routers/{router}', + }, + ] + request, metadata = self._interceptor.pre_get(request, metadata) + pb_request = compute.GetRouterRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.Router() + pb_resp = compute.Router.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get(resp) + return resp + + class _GetNatMappingInfo(RoutersRestStub): + def __hash__(self): + return hash("GetNatMappingInfo") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.GetNatMappingInfoRoutersRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.VmEndpointNatMappingsList: + r"""Call the get nat mapping info method over HTTP. + + Args: + request (~.compute.GetNatMappingInfoRoutersRequest): + The request object. A request message for + Routers.GetNatMappingInfo. See the + method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.VmEndpointNatMappingsList: + Contains a list of + VmEndpointNatMappings. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/compute/v1/projects/{project}/regions/{region}/routers/{router}/getNatMappingInfo', + }, + ] + request, metadata = self._interceptor.pre_get_nat_mapping_info(request, metadata) + pb_request = compute.GetNatMappingInfoRoutersRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.VmEndpointNatMappingsList() + pb_resp = compute.VmEndpointNatMappingsList.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get_nat_mapping_info(resp) + return resp + + class _GetRouterStatus(RoutersRestStub): + def __hash__(self): + return hash("GetRouterStatus") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.GetRouterStatusRouterRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.RouterStatusResponse: + r"""Call the get router status method over HTTP. + + Args: + request (~.compute.GetRouterStatusRouterRequest): + The request object. A request message for + Routers.GetRouterStatus. See the method + description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.RouterStatusResponse: + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/compute/v1/projects/{project}/regions/{region}/routers/{router}/getRouterStatus', + }, + ] + request, metadata = self._interceptor.pre_get_router_status(request, metadata) + pb_request = compute.GetRouterStatusRouterRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.RouterStatusResponse() + pb_resp = compute.RouterStatusResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get_router_status(resp) + return resp + + class _Insert(RoutersRestStub): + def __hash__(self): + return hash("Insert") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.InsertRouterRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.Operation: + r"""Call the insert method over HTTP. + + Args: + request (~.compute.InsertRouterRequest): + The request object. A request message for Routers.Insert. + See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.Operation: + Represents an Operation resource. Google Compute Engine + has three Operation resources: \* + `Global `__ + \* + `Regional `__ + \* + `Zonal `__ + You can use an operation resource to manage asynchronous + API requests. For more information, read Handling API + responses. Operations can be global, regional or zonal. + - For global operations, use the ``globalOperations`` + resource. - For regional operations, use the + ``regionOperations`` resource. - For zonal operations, + use the ``zonalOperations`` resource. For more + information, read Global, Regional, and Zonal Resources. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/compute/v1/projects/{project}/regions/{region}/routers', + 'body': 'router_resource', + }, + ] + request, metadata = self._interceptor.pre_insert(request, metadata) + pb_request = compute.InsertRouterRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + including_default_value_fields=False, + use_integers_for_enums=False + ) + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.Operation() + pb_resp = compute.Operation.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_insert(resp) + return resp + + class _List(RoutersRestStub): + def __hash__(self): + return hash("List") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.ListRoutersRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.RouterList: + r"""Call the list method over HTTP. + + Args: + request (~.compute.ListRoutersRequest): + The request object. A request message for Routers.List. + See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.RouterList: + Contains a list of Router resources. + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/compute/v1/projects/{project}/regions/{region}/routers', + }, + ] + request, metadata = self._interceptor.pre_list(request, metadata) + pb_request = compute.ListRoutersRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.RouterList() + pb_resp = compute.RouterList.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_list(resp) + return resp + + class _Patch(RoutersRestStub): + def __hash__(self): + return hash("Patch") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.PatchRouterRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.Operation: + r"""Call the patch method over HTTP. + + Args: + request (~.compute.PatchRouterRequest): + The request object. A request message for Routers.Patch. + See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.Operation: + Represents an Operation resource. Google Compute Engine + has three Operation resources: \* + `Global `__ + \* + `Regional `__ + \* + `Zonal `__ + You can use an operation resource to manage asynchronous + API requests. For more information, read Handling API + responses. Operations can be global, regional or zonal. + - For global operations, use the ``globalOperations`` + resource. - For regional operations, use the + ``regionOperations`` resource. - For zonal operations, + use the ``zonalOperations`` resource. For more + information, read Global, Regional, and Zonal Resources. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'patch', + 'uri': '/compute/v1/projects/{project}/regions/{region}/routers/{router}', + 'body': 'router_resource', + }, + ] + request, metadata = self._interceptor.pre_patch(request, metadata) + pb_request = compute.PatchRouterRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + including_default_value_fields=False, + use_integers_for_enums=False + ) + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.Operation() + pb_resp = compute.Operation.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_patch(resp) + return resp + + class _Preview(RoutersRestStub): + def __hash__(self): + return hash("Preview") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.PreviewRouterRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.RoutersPreviewResponse: + r"""Call the preview method over HTTP. + + Args: + request (~.compute.PreviewRouterRequest): + The request object. A request message for + Routers.Preview. See the method + description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.RoutersPreviewResponse: + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/compute/v1/projects/{project}/regions/{region}/routers/{router}/preview', + 'body': 'router_resource', + }, + ] + request, metadata = self._interceptor.pre_preview(request, metadata) + pb_request = compute.PreviewRouterRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + including_default_value_fields=False, + use_integers_for_enums=False + ) + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.RoutersPreviewResponse() + pb_resp = compute.RoutersPreviewResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_preview(resp) + return resp + + class _Update(RoutersRestStub): + def __hash__(self): + return hash("Update") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.UpdateRouterRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.Operation: + r"""Call the update method over HTTP. + + Args: + request (~.compute.UpdateRouterRequest): + The request object. A request message for Routers.Update. + See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.Operation: + Represents an Operation resource. Google Compute Engine + has three Operation resources: \* + `Global `__ + \* + `Regional `__ + \* + `Zonal `__ + You can use an operation resource to manage asynchronous + API requests. For more information, read Handling API + responses. Operations can be global, regional or zonal. + - For global operations, use the ``globalOperations`` + resource. - For regional operations, use the + ``regionOperations`` resource. - For zonal operations, + use the ``zonalOperations`` resource. For more + information, read Global, Regional, and Zonal Resources. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'put', + 'uri': '/compute/v1/projects/{project}/regions/{region}/routers/{router}', + 'body': 'router_resource', + }, + ] + request, metadata = self._interceptor.pre_update(request, metadata) + pb_request = compute.UpdateRouterRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + including_default_value_fields=False, + use_integers_for_enums=False + ) + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.Operation() + pb_resp = compute.Operation.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_update(resp) + return resp + + @property + def aggregated_list(self) -> Callable[ + [compute.AggregatedListRoutersRequest], + compute.RouterAggregatedList]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._AggregatedList(self._session, self._host, self._interceptor) # type: ignore + + @property + def delete(self) -> Callable[ + [compute.DeleteRouterRequest], + compute.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._Delete(self._session, self._host, self._interceptor) # type: ignore + + @property + def get(self) -> Callable[ + [compute.GetRouterRequest], + compute.Router]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._Get(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_nat_mapping_info(self) -> Callable[ + [compute.GetNatMappingInfoRoutersRequest], + compute.VmEndpointNatMappingsList]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetNatMappingInfo(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_router_status(self) -> Callable[ + [compute.GetRouterStatusRouterRequest], + compute.RouterStatusResponse]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetRouterStatus(self._session, self._host, self._interceptor) # type: ignore + + @property + def insert(self) -> Callable[ + [compute.InsertRouterRequest], + compute.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._Insert(self._session, self._host, self._interceptor) # type: ignore + + @property + def list(self) -> Callable[ + [compute.ListRoutersRequest], + compute.RouterList]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._List(self._session, self._host, self._interceptor) # type: ignore + + @property + def patch(self) -> Callable[ + [compute.PatchRouterRequest], + compute.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._Patch(self._session, self._host, self._interceptor) # type: ignore + + @property + def preview(self) -> Callable[ + [compute.PreviewRouterRequest], + compute.RoutersPreviewResponse]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._Preview(self._session, self._host, self._interceptor) # type: ignore + + @property + def update(self) -> Callable[ + [compute.UpdateRouterRequest], + compute.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._Update(self._session, self._host, self._interceptor) # type: ignore + + @property + def kind(self) -> str: + return "rest" + + def close(self): + self._session.close() + + +__all__=( + 'RoutersRestTransport', +) diff --git a/owl-bot-staging/v1/google/cloud/compute_v1/services/routes/__init__.py b/owl-bot-staging/v1/google/cloud/compute_v1/services/routes/__init__.py new file mode 100644 index 000000000..b6db823e9 --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/compute_v1/services/routes/__init__.py @@ -0,0 +1,20 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from .client import RoutesClient + +__all__ = ( + 'RoutesClient', +) diff --git a/owl-bot-staging/v1/google/cloud/compute_v1/services/routes/client.py b/owl-bot-staging/v1/google/cloud/compute_v1/services/routes/client.py new file mode 100644 index 000000000..2615e4782 --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/compute_v1/services/routes/client.py @@ -0,0 +1,1101 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import functools +import os +import re +from typing import Dict, Mapping, MutableMapping, MutableSequence, Optional, Sequence, Tuple, Type, Union, cast + +from google.cloud.compute_v1 import gapic_version as package_version + +from google.api_core import client_options as client_options_lib +from google.api_core import exceptions as core_exceptions +from google.api_core import extended_operation +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport import mtls # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth.exceptions import MutualTLSChannelError # type: ignore +from google.oauth2 import service_account # type: ignore + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + +from google.api_core import extended_operation # type: ignore +from google.cloud.compute_v1.services.routes import pagers +from google.cloud.compute_v1.types import compute +from .transports.base import RoutesTransport, DEFAULT_CLIENT_INFO +from .transports.rest import RoutesRestTransport + + +class RoutesClientMeta(type): + """Metaclass for the Routes client. + + This provides class-level methods for building and retrieving + support objects (e.g. transport) without polluting the client instance + objects. + """ + _transport_registry = OrderedDict() # type: Dict[str, Type[RoutesTransport]] + _transport_registry["rest"] = RoutesRestTransport + + def get_transport_class(cls, + label: Optional[str] = None, + ) -> Type[RoutesTransport]: + """Returns an appropriate transport class. + + Args: + label: The name of the desired transport. If none is + provided, then the first transport in the registry is used. + + Returns: + The transport class to use. + """ + # If a specific transport is requested, return that one. + if label: + return cls._transport_registry[label] + + # No transport is requested; return the default (that is, the first one + # in the dictionary). + return next(iter(cls._transport_registry.values())) + + +class RoutesClient(metaclass=RoutesClientMeta): + """The Routes API.""" + + @staticmethod + def _get_default_mtls_endpoint(api_endpoint): + """Converts api endpoint to mTLS endpoint. + + Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to + "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. + Args: + api_endpoint (Optional[str]): the api endpoint to convert. + Returns: + str: converted mTLS api endpoint. + """ + if not api_endpoint: + return api_endpoint + + mtls_endpoint_re = re.compile( + r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?" + ) + + m = mtls_endpoint_re.match(api_endpoint) + name, mtls, sandbox, googledomain = m.groups() + if mtls or not googledomain: + return api_endpoint + + if sandbox: + return api_endpoint.replace( + "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" + ) + + return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") + + DEFAULT_ENDPOINT = "compute.googleapis.com" + DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore + DEFAULT_ENDPOINT + ) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + RoutesClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_info(info) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + RoutesClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_file( + filename) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + from_service_account_json = from_service_account_file + + @property + def transport(self) -> RoutesTransport: + """Returns the transport used by the client instance. + + Returns: + RoutesTransport: The transport used by the client + instance. + """ + return self._transport + + @staticmethod + def common_billing_account_path(billing_account: str, ) -> str: + """Returns a fully-qualified billing_account string.""" + return "billingAccounts/{billing_account}".format(billing_account=billing_account, ) + + @staticmethod + def parse_common_billing_account_path(path: str) -> Dict[str,str]: + """Parse a billing_account path into its component segments.""" + m = re.match(r"^billingAccounts/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_folder_path(folder: str, ) -> str: + """Returns a fully-qualified folder string.""" + return "folders/{folder}".format(folder=folder, ) + + @staticmethod + def parse_common_folder_path(path: str) -> Dict[str,str]: + """Parse a folder path into its component segments.""" + m = re.match(r"^folders/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_organization_path(organization: str, ) -> str: + """Returns a fully-qualified organization string.""" + return "organizations/{organization}".format(organization=organization, ) + + @staticmethod + def parse_common_organization_path(path: str) -> Dict[str,str]: + """Parse a organization path into its component segments.""" + m = re.match(r"^organizations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_project_path(project: str, ) -> str: + """Returns a fully-qualified project string.""" + return "projects/{project}".format(project=project, ) + + @staticmethod + def parse_common_project_path(path: str) -> Dict[str,str]: + """Parse a project path into its component segments.""" + m = re.match(r"^projects/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_location_path(project: str, location: str, ) -> str: + """Returns a fully-qualified location string.""" + return "projects/{project}/locations/{location}".format(project=project, location=location, ) + + @staticmethod + def parse_common_location_path(path: str) -> Dict[str,str]: + """Parse a location path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @classmethod + def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[client_options_lib.ClientOptions] = None): + """Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + if client_options is None: + client_options = client_options_lib.ClientOptions() + use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") + use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") + if use_client_cert not in ("true", "false"): + raise ValueError("Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`") + if use_mtls_endpoint not in ("auto", "never", "always"): + raise MutualTLSChannelError("Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`") + + # Figure out the client cert source to use. + client_cert_source = None + if use_client_cert == "true": + if client_options.client_cert_source: + client_cert_source = client_options.client_cert_source + elif mtls.has_default_client_cert_source(): + client_cert_source = mtls.default_client_cert_source() + + # Figure out which api endpoint to use. + if client_options.api_endpoint is not None: + api_endpoint = client_options.api_endpoint + elif use_mtls_endpoint == "always" or (use_mtls_endpoint == "auto" and client_cert_source): + api_endpoint = cls.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = cls.DEFAULT_ENDPOINT + + return api_endpoint, client_cert_source + + def __init__(self, *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Optional[Union[str, RoutesTransport]] = None, + client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the routes client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Union[str, RoutesTransport]): The + transport to use. If set to None, a transport is chosen + automatically. + NOTE: "rest" transport functionality is currently in a + beta state (preview). We welcome your feedback via an + issue in this library's source repository. + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): Custom options for the + client. It won't take effect if a ``transport`` instance is provided. + (1) The ``api_endpoint`` property can be used to override the + default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT + environment variable can also be used to override the endpoint: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto switch to the + default mTLS endpoint if client certificate is present, this is + the default value). However, the ``api_endpoint`` property takes + precedence if provided. + (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide client certificate for mutual TLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + """ + if isinstance(client_options, dict): + client_options = client_options_lib.from_dict(client_options) + if client_options is None: + client_options = client_options_lib.ClientOptions() + client_options = cast(client_options_lib.ClientOptions, client_options) + + api_endpoint, client_cert_source_func = self.get_mtls_endpoint_and_cert_source(client_options) + + api_key_value = getattr(client_options, "api_key", None) + if api_key_value and credentials: + raise ValueError("client_options.api_key and credentials are mutually exclusive") + + # Save or instantiate the transport. + # Ordinarily, we provide the transport, but allowing a custom transport + # instance provides an extensibility point for unusual situations. + if isinstance(transport, RoutesTransport): + # transport is a RoutesTransport instance. + if credentials or client_options.credentials_file or api_key_value: + raise ValueError("When providing a transport instance, " + "provide its credentials directly.") + if client_options.scopes: + raise ValueError( + "When providing a transport instance, provide its scopes " + "directly." + ) + self._transport = transport + else: + import google.auth._default # type: ignore + + if api_key_value and hasattr(google.auth._default, "get_api_key_credentials"): + credentials = google.auth._default.get_api_key_credentials(api_key_value) + + Transport = type(self).get_transport_class(transport) + self._transport = Transport( + credentials=credentials, + credentials_file=client_options.credentials_file, + host=api_endpoint, + scopes=client_options.scopes, + client_cert_source_for_mtls=client_cert_source_func, + quota_project_id=client_options.quota_project_id, + client_info=client_info, + always_use_jwt_access=True, + api_audience=client_options.api_audience, + ) + + def delete_unary(self, + request: Optional[Union[compute.DeleteRouteRequest, dict]] = None, + *, + project: Optional[str] = None, + route: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.Operation: + r"""Deletes the specified Route resource. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_delete(): + # Create a client + client = compute_v1.RoutesClient() + + # Initialize request argument(s) + request = compute_v1.DeleteRouteRequest( + project="project_value", + route="route_value", + ) + + # Make the request + response = client.delete(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.DeleteRouteRequest, dict]): + The request object. A request message for Routes.Delete. + See the method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + route (str): + Name of the Route resource to delete. + This corresponds to the ``route`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, route]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.DeleteRouteRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.DeleteRouteRequest): + request = compute.DeleteRouteRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if route is not None: + request.route = route + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("route", request.route), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def delete(self, + request: Optional[Union[compute.DeleteRouteRequest, dict]] = None, + *, + project: Optional[str] = None, + route: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> extended_operation.ExtendedOperation: + r"""Deletes the specified Route resource. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_delete(): + # Create a client + client = compute_v1.RoutesClient() + + # Initialize request argument(s) + request = compute_v1.DeleteRouteRequest( + project="project_value", + route="route_value", + ) + + # Make the request + response = client.delete(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.DeleteRouteRequest, dict]): + The request object. A request message for Routes.Delete. + See the method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + route (str): + Name of the Route resource to delete. + This corresponds to the ``route`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, route]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.DeleteRouteRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.DeleteRouteRequest): + request = compute.DeleteRouteRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if route is not None: + request.route = route + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("route", request.route), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + operation_service = self._transport._global_operations_client + operation_request = compute.GetGlobalOperationRequest() + operation_request.project = request.project + operation_request.operation = response.name + + get_operation = functools.partial(operation_service.get, operation_request) + # Cancel is not part of extended operations yet. + cancel_operation = lambda: None + + # Note: this class is an implementation detail to provide a uniform + # set of names for certain fields in the extended operation proto message. + # See google.api_core.extended_operation.ExtendedOperation for details + # on these properties and the expected interface. + class _CustomOperation(extended_operation.ExtendedOperation): + @property + def error_message(self): + return self._extended_operation.http_error_message + + @property + def error_code(self): + return self._extended_operation.http_error_status_code + + response = _CustomOperation.make(get_operation, cancel_operation, response) + + # Done; return the response. + return response + + def get(self, + request: Optional[Union[compute.GetRouteRequest, dict]] = None, + *, + project: Optional[str] = None, + route: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.Route: + r"""Returns the specified Route resource. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_get(): + # Create a client + client = compute_v1.RoutesClient() + + # Initialize request argument(s) + request = compute_v1.GetRouteRequest( + project="project_value", + route="route_value", + ) + + # Make the request + response = client.get(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.GetRouteRequest, dict]): + The request object. A request message for Routes.Get. See + the method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + route (str): + Name of the Route resource to return. + This corresponds to the ``route`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.compute_v1.types.Route: + Represents a Route resource. A route + defines a path from VM instances in the + VPC network to a specific destination. + This destination can be inside or + outside the VPC network. For more + information, read the Routes overview. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, route]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.GetRouteRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.GetRouteRequest): + request = compute.GetRouteRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if route is not None: + request.route = route + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("route", request.route), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def insert_unary(self, + request: Optional[Union[compute.InsertRouteRequest, dict]] = None, + *, + project: Optional[str] = None, + route_resource: Optional[compute.Route] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.Operation: + r"""Creates a Route resource in the specified project + using the data included in the request. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_insert(): + # Create a client + client = compute_v1.RoutesClient() + + # Initialize request argument(s) + request = compute_v1.InsertRouteRequest( + project="project_value", + ) + + # Make the request + response = client.insert(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.InsertRouteRequest, dict]): + The request object. A request message for Routes.Insert. + See the method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + route_resource (google.cloud.compute_v1.types.Route): + The body resource for this request + This corresponds to the ``route_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, route_resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.InsertRouteRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.InsertRouteRequest): + request = compute.InsertRouteRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if route_resource is not None: + request.route_resource = route_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.insert] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def insert(self, + request: Optional[Union[compute.InsertRouteRequest, dict]] = None, + *, + project: Optional[str] = None, + route_resource: Optional[compute.Route] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> extended_operation.ExtendedOperation: + r"""Creates a Route resource in the specified project + using the data included in the request. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_insert(): + # Create a client + client = compute_v1.RoutesClient() + + # Initialize request argument(s) + request = compute_v1.InsertRouteRequest( + project="project_value", + ) + + # Make the request + response = client.insert(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.InsertRouteRequest, dict]): + The request object. A request message for Routes.Insert. + See the method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + route_resource (google.cloud.compute_v1.types.Route): + The body resource for this request + This corresponds to the ``route_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, route_resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.InsertRouteRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.InsertRouteRequest): + request = compute.InsertRouteRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if route_resource is not None: + request.route_resource = route_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.insert] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + operation_service = self._transport._global_operations_client + operation_request = compute.GetGlobalOperationRequest() + operation_request.project = request.project + operation_request.operation = response.name + + get_operation = functools.partial(operation_service.get, operation_request) + # Cancel is not part of extended operations yet. + cancel_operation = lambda: None + + # Note: this class is an implementation detail to provide a uniform + # set of names for certain fields in the extended operation proto message. + # See google.api_core.extended_operation.ExtendedOperation for details + # on these properties and the expected interface. + class _CustomOperation(extended_operation.ExtendedOperation): + @property + def error_message(self): + return self._extended_operation.http_error_message + + @property + def error_code(self): + return self._extended_operation.http_error_status_code + + response = _CustomOperation.make(get_operation, cancel_operation, response) + + # Done; return the response. + return response + + def list(self, + request: Optional[Union[compute.ListRoutesRequest, dict]] = None, + *, + project: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListPager: + r"""Retrieves the list of Route resources available to + the specified project. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_list(): + # Create a client + client = compute_v1.RoutesClient() + + # Initialize request argument(s) + request = compute_v1.ListRoutesRequest( + project="project_value", + ) + + # Make the request + page_result = client.list(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.ListRoutesRequest, dict]): + The request object. A request message for Routes.List. + See the method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.compute_v1.services.routes.pagers.ListPager: + Contains a list of Route resources. + + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.ListRoutesRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.ListRoutesRequest): + request = compute.ListRoutesRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + def __enter__(self) -> "RoutesClient": + return self + + def __exit__(self, type, value, traceback): + """Releases underlying transport's resources. + + .. warning:: + ONLY use as a context manager if the transport is NOT shared + with other clients! Exiting the with block will CLOSE the transport + and may cause errors in other clients! + """ + self.transport.close() + + + + + + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) + + +__all__ = ( + "RoutesClient", +) diff --git a/owl-bot-staging/v1/google/cloud/compute_v1/services/routes/pagers.py b/owl-bot-staging/v1/google/cloud/compute_v1/services/routes/pagers.py new file mode 100644 index 000000000..1f35a9d4e --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/compute_v1/services/routes/pagers.py @@ -0,0 +1,77 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import Any, AsyncIterator, Awaitable, Callable, Sequence, Tuple, Optional, Iterator + +from google.cloud.compute_v1.types import compute + + +class ListPager: + """A pager for iterating through ``list`` requests. + + This class thinly wraps an initial + :class:`google.cloud.compute_v1.types.RouteList` object, and + provides an ``__iter__`` method to iterate through its + ``items`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``List`` requests and continue to iterate + through the ``items`` field on the + corresponding responses. + + All the usual :class:`google.cloud.compute_v1.types.RouteList` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., compute.RouteList], + request: compute.ListRoutesRequest, + response: compute.RouteList, + *, + metadata: Sequence[Tuple[str, str]] = ()): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.compute_v1.types.ListRoutesRequest): + The initial request object. + response (google.cloud.compute_v1.types.RouteList): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = compute.ListRoutesRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[compute.RouteList]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[compute.Route]: + for page in self.pages: + yield from page.items + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) diff --git a/owl-bot-staging/v1/google/cloud/compute_v1/services/routes/transports/__init__.py b/owl-bot-staging/v1/google/cloud/compute_v1/services/routes/transports/__init__.py new file mode 100644 index 000000000..0d5d1ce03 --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/compute_v1/services/routes/transports/__init__.py @@ -0,0 +1,32 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +from typing import Dict, Type + +from .base import RoutesTransport +from .rest import RoutesRestTransport +from .rest import RoutesRestInterceptor + + +# Compile a registry of transports. +_transport_registry = OrderedDict() # type: Dict[str, Type[RoutesTransport]] +_transport_registry['rest'] = RoutesRestTransport + +__all__ = ( + 'RoutesTransport', + 'RoutesRestTransport', + 'RoutesRestInterceptor', +) diff --git a/owl-bot-staging/v1/google/cloud/compute_v1/services/routes/transports/base.py b/owl-bot-staging/v1/google/cloud/compute_v1/services/routes/transports/base.py new file mode 100644 index 000000000..809989f36 --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/compute_v1/services/routes/transports/base.py @@ -0,0 +1,205 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import abc +from typing import Awaitable, Callable, Dict, Optional, Sequence, Union + +from google.cloud.compute_v1 import gapic_version as package_version + +import google.auth # type: ignore +import google.api_core +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.compute_v1.types import compute +from google.cloud.compute_v1.services import global_operations + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) + + +class RoutesTransport(abc.ABC): + """Abstract transport class for Routes.""" + + AUTH_SCOPES = ( + 'https://www.googleapis.com/auth/compute', + 'https://www.googleapis.com/auth/cloud-platform', + ) + + DEFAULT_HOST: str = 'compute.googleapis.com' + def __init__( + self, *, + host: str = DEFAULT_HOST, + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + **kwargs, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A list of scopes. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + """ + self._extended_operations_services: Dict[str, Any] = {} + + scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} + + # Save the scopes. + self._scopes = scopes + + # If no credentials are provided, then determine the appropriate + # defaults. + if credentials and credentials_file: + raise core_exceptions.DuplicateCredentialArgs("'credentials_file' and 'credentials' are mutually exclusive") + + if credentials_file is not None: + credentials, _ = google.auth.load_credentials_from_file( + credentials_file, + **scopes_kwargs, + quota_project_id=quota_project_id + ) + elif credentials is None: + credentials, _ = google.auth.default(**scopes_kwargs, quota_project_id=quota_project_id) + # Don't apply audience if the credentials file passed from user. + if hasattr(credentials, "with_gdch_audience"): + credentials = credentials.with_gdch_audience(api_audience if api_audience else host) + + # If the credentials are service account credentials, then always try to use self signed JWT. + if always_use_jwt_access and isinstance(credentials, service_account.Credentials) and hasattr(service_account.Credentials, "with_always_use_jwt_access"): + credentials = credentials.with_always_use_jwt_access(True) + + # Save the credentials. + self._credentials = credentials + + # Save the hostname. Default to port 443 (HTTPS) if none is specified. + if ':' not in host: + host += ':443' + self._host = host + + def _prep_wrapped_messages(self, client_info): + # Precompute the wrapped methods. + self._wrapped_methods = { + self.delete: gapic_v1.method.wrap_method( + self.delete, + default_timeout=None, + client_info=client_info, + ), + self.get: gapic_v1.method.wrap_method( + self.get, + default_timeout=None, + client_info=client_info, + ), + self.insert: gapic_v1.method.wrap_method( + self.insert, + default_timeout=None, + client_info=client_info, + ), + self.list: gapic_v1.method.wrap_method( + self.list, + default_timeout=None, + client_info=client_info, + ), + } + + def close(self): + """Closes resources associated with the transport. + + .. warning:: + Only call this method if the transport is NOT shared + with other clients - this may cause errors in other clients! + """ + raise NotImplementedError() + + @property + def delete(self) -> Callable[ + [compute.DeleteRouteRequest], + Union[ + compute.Operation, + Awaitable[compute.Operation] + ]]: + raise NotImplementedError() + + @property + def get(self) -> Callable[ + [compute.GetRouteRequest], + Union[ + compute.Route, + Awaitable[compute.Route] + ]]: + raise NotImplementedError() + + @property + def insert(self) -> Callable[ + [compute.InsertRouteRequest], + Union[ + compute.Operation, + Awaitable[compute.Operation] + ]]: + raise NotImplementedError() + + @property + def list(self) -> Callable[ + [compute.ListRoutesRequest], + Union[ + compute.RouteList, + Awaitable[compute.RouteList] + ]]: + raise NotImplementedError() + + @property + def kind(self) -> str: + raise NotImplementedError() + + @property + def _global_operations_client(self) -> global_operations.GlobalOperationsClient: + ex_op_service = self._extended_operations_services.get("global_operations") + if not ex_op_service: + ex_op_service = global_operations.GlobalOperationsClient( + credentials=self._credentials, + transport=self.kind, + ) + self._extended_operations_services["global_operations"] = ex_op_service + + return ex_op_service + + +__all__ = ( + 'RoutesTransport', +) diff --git a/owl-bot-staging/v1/google/cloud/compute_v1/services/routes/transports/rest.py b/owl-bot-staging/v1/google/cloud/compute_v1/services/routes/transports/rest.py new file mode 100644 index 000000000..d8a58b502 --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/compute_v1/services/routes/transports/rest.py @@ -0,0 +1,666 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +from google.auth.transport.requests import AuthorizedSession # type: ignore +import json # type: ignore +import grpc # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.api_core import exceptions as core_exceptions +from google.api_core import retry as retries +from google.api_core import rest_helpers +from google.api_core import rest_streaming +from google.api_core import path_template +from google.api_core import gapic_v1 + +from google.protobuf import json_format +from requests import __version__ as requests_version +import dataclasses +import re +from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union +import warnings + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + + +from google.cloud.compute_v1.types import compute + +from .base import RoutesTransport, DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, + grpc_version=None, + rest_version=requests_version, +) + + +class RoutesRestInterceptor: + """Interceptor for Routes. + + Interceptors are used to manipulate requests, request metadata, and responses + in arbitrary ways. + Example use cases include: + * Logging + * Verifying requests according to service or custom semantics + * Stripping extraneous information from responses + + These use cases and more can be enabled by injecting an + instance of a custom subclass when constructing the RoutesRestTransport. + + .. code-block:: python + class MyCustomRoutesInterceptor(RoutesRestInterceptor): + def pre_delete(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_delete(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_get(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_insert(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_insert(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list(self, response): + logging.log(f"Received response: {response}") + return response + + transport = RoutesRestTransport(interceptor=MyCustomRoutesInterceptor()) + client = RoutesClient(transport=transport) + + + """ + def pre_delete(self, request: compute.DeleteRouteRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.DeleteRouteRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for delete + + Override in a subclass to manipulate the request or metadata + before they are sent to the Routes server. + """ + return request, metadata + + def post_delete(self, response: compute.Operation) -> compute.Operation: + """Post-rpc interceptor for delete + + Override in a subclass to manipulate the response + after it is returned by the Routes server but before + it is returned to user code. + """ + return response + def pre_get(self, request: compute.GetRouteRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.GetRouteRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get + + Override in a subclass to manipulate the request or metadata + before they are sent to the Routes server. + """ + return request, metadata + + def post_get(self, response: compute.Route) -> compute.Route: + """Post-rpc interceptor for get + + Override in a subclass to manipulate the response + after it is returned by the Routes server but before + it is returned to user code. + """ + return response + def pre_insert(self, request: compute.InsertRouteRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.InsertRouteRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for insert + + Override in a subclass to manipulate the request or metadata + before they are sent to the Routes server. + """ + return request, metadata + + def post_insert(self, response: compute.Operation) -> compute.Operation: + """Post-rpc interceptor for insert + + Override in a subclass to manipulate the response + after it is returned by the Routes server but before + it is returned to user code. + """ + return response + def pre_list(self, request: compute.ListRoutesRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.ListRoutesRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list + + Override in a subclass to manipulate the request or metadata + before they are sent to the Routes server. + """ + return request, metadata + + def post_list(self, response: compute.RouteList) -> compute.RouteList: + """Post-rpc interceptor for list + + Override in a subclass to manipulate the response + after it is returned by the Routes server but before + it is returned to user code. + """ + return response + + +@dataclasses.dataclass +class RoutesRestStub: + _session: AuthorizedSession + _host: str + _interceptor: RoutesRestInterceptor + + +class RoutesRestTransport(RoutesTransport): + """REST backend transport for Routes. + + The Routes API. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends JSON representations of protocol buffers over HTTP/1.1 + + NOTE: This REST transport functionality is currently in a beta + state (preview). We welcome your feedback via an issue in this + library's source repository. Thank you! + """ + + def __init__(self, *, + host: str = 'compute.googleapis.com', + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + client_cert_source_for_mtls: Optional[Callable[[ + ], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + url_scheme: str = 'https', + interceptor: Optional[RoutesRestInterceptor] = None, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + NOTE: This REST transport functionality is currently in a beta + state (preview). We welcome your feedback via a GitHub issue in + this library's repository. Thank you! + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + client_cert_source_for_mtls (Callable[[], Tuple[bytes, bytes]]): Client + certificate to configure mutual TLS HTTP channel. It is ignored + if ``channel`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you are developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + url_scheme: the protocol scheme for the API endpoint. Normally + "https", but for testing or local servers, + "http" can be specified. + """ + # Run the base constructor + # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. + # TODO: When custom host (api_endpoint) is set, `scopes` must *also* be set on the + # credentials object + maybe_url_match = re.match("^(?Phttp(?:s)?://)?(?P.*)$", host) + if maybe_url_match is None: + raise ValueError(f"Unexpected hostname structure: {host}") # pragma: NO COVER + + url_match_items = maybe_url_match.groupdict() + + host = f"{url_scheme}://{host}" if not url_match_items["scheme"] else host + + super().__init__( + host=host, + credentials=credentials, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience + ) + self._session = AuthorizedSession( + self._credentials, default_host=self.DEFAULT_HOST) + if client_cert_source_for_mtls: + self._session.configure_mtls_channel(client_cert_source_for_mtls) + self._interceptor = interceptor or RoutesRestInterceptor() + self._prep_wrapped_messages(client_info) + + class _Delete(RoutesRestStub): + def __hash__(self): + return hash("Delete") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.DeleteRouteRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.Operation: + r"""Call the delete method over HTTP. + + Args: + request (~.compute.DeleteRouteRequest): + The request object. A request message for Routes.Delete. + See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.Operation: + Represents an Operation resource. Google Compute Engine + has three Operation resources: \* + `Global `__ + \* + `Regional `__ + \* + `Zonal `__ + You can use an operation resource to manage asynchronous + API requests. For more information, read Handling API + responses. Operations can be global, regional or zonal. + - For global operations, use the ``globalOperations`` + resource. - For regional operations, use the + ``regionOperations`` resource. - For zonal operations, + use the ``zonalOperations`` resource. For more + information, read Global, Regional, and Zonal Resources. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'delete', + 'uri': '/compute/v1/projects/{project}/global/routes/{route}', + }, + ] + request, metadata = self._interceptor.pre_delete(request, metadata) + pb_request = compute.DeleteRouteRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.Operation() + pb_resp = compute.Operation.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_delete(resp) + return resp + + class _Get(RoutesRestStub): + def __hash__(self): + return hash("Get") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.GetRouteRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.Route: + r"""Call the get method over HTTP. + + Args: + request (~.compute.GetRouteRequest): + The request object. A request message for Routes.Get. See + the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.Route: + Represents a Route resource. A route + defines a path from VM instances in the + VPC network to a specific destination. + This destination can be inside or + outside the VPC network. For more + information, read the Routes overview. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/compute/v1/projects/{project}/global/routes/{route}', + }, + ] + request, metadata = self._interceptor.pre_get(request, metadata) + pb_request = compute.GetRouteRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.Route() + pb_resp = compute.Route.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get(resp) + return resp + + class _Insert(RoutesRestStub): + def __hash__(self): + return hash("Insert") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.InsertRouteRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.Operation: + r"""Call the insert method over HTTP. + + Args: + request (~.compute.InsertRouteRequest): + The request object. A request message for Routes.Insert. + See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.Operation: + Represents an Operation resource. Google Compute Engine + has three Operation resources: \* + `Global `__ + \* + `Regional `__ + \* + `Zonal `__ + You can use an operation resource to manage asynchronous + API requests. For more information, read Handling API + responses. Operations can be global, regional or zonal. + - For global operations, use the ``globalOperations`` + resource. - For regional operations, use the + ``regionOperations`` resource. - For zonal operations, + use the ``zonalOperations`` resource. For more + information, read Global, Regional, and Zonal Resources. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/compute/v1/projects/{project}/global/routes', + 'body': 'route_resource', + }, + ] + request, metadata = self._interceptor.pre_insert(request, metadata) + pb_request = compute.InsertRouteRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + including_default_value_fields=False, + use_integers_for_enums=False + ) + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.Operation() + pb_resp = compute.Operation.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_insert(resp) + return resp + + class _List(RoutesRestStub): + def __hash__(self): + return hash("List") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.ListRoutesRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.RouteList: + r"""Call the list method over HTTP. + + Args: + request (~.compute.ListRoutesRequest): + The request object. A request message for Routes.List. + See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.RouteList: + Contains a list of Route resources. + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/compute/v1/projects/{project}/global/routes', + }, + ] + request, metadata = self._interceptor.pre_list(request, metadata) + pb_request = compute.ListRoutesRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.RouteList() + pb_resp = compute.RouteList.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_list(resp) + return resp + + @property + def delete(self) -> Callable[ + [compute.DeleteRouteRequest], + compute.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._Delete(self._session, self._host, self._interceptor) # type: ignore + + @property + def get(self) -> Callable[ + [compute.GetRouteRequest], + compute.Route]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._Get(self._session, self._host, self._interceptor) # type: ignore + + @property + def insert(self) -> Callable[ + [compute.InsertRouteRequest], + compute.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._Insert(self._session, self._host, self._interceptor) # type: ignore + + @property + def list(self) -> Callable[ + [compute.ListRoutesRequest], + compute.RouteList]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._List(self._session, self._host, self._interceptor) # type: ignore + + @property + def kind(self) -> str: + return "rest" + + def close(self): + self._session.close() + + +__all__=( + 'RoutesRestTransport', +) diff --git a/owl-bot-staging/v1/google/cloud/compute_v1/services/security_policies/__init__.py b/owl-bot-staging/v1/google/cloud/compute_v1/services/security_policies/__init__.py new file mode 100644 index 000000000..4c90a3f43 --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/compute_v1/services/security_policies/__init__.py @@ -0,0 +1,20 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from .client import SecurityPoliciesClient + +__all__ = ( + 'SecurityPoliciesClient', +) diff --git a/owl-bot-staging/v1/google/cloud/compute_v1/services/security_policies/client.py b/owl-bot-staging/v1/google/cloud/compute_v1/services/security_policies/client.py new file mode 100644 index 000000000..638e9858d --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/compute_v1/services/security_policies/client.py @@ -0,0 +1,2757 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import functools +import os +import re +from typing import Dict, Mapping, MutableMapping, MutableSequence, Optional, Sequence, Tuple, Type, Union, cast + +from google.cloud.compute_v1 import gapic_version as package_version + +from google.api_core import client_options as client_options_lib +from google.api_core import exceptions as core_exceptions +from google.api_core import extended_operation +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport import mtls # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth.exceptions import MutualTLSChannelError # type: ignore +from google.oauth2 import service_account # type: ignore + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + +from google.api_core import extended_operation # type: ignore +from google.cloud.compute_v1.services.security_policies import pagers +from google.cloud.compute_v1.types import compute +from .transports.base import SecurityPoliciesTransport, DEFAULT_CLIENT_INFO +from .transports.rest import SecurityPoliciesRestTransport + + +class SecurityPoliciesClientMeta(type): + """Metaclass for the SecurityPolicies client. + + This provides class-level methods for building and retrieving + support objects (e.g. transport) without polluting the client instance + objects. + """ + _transport_registry = OrderedDict() # type: Dict[str, Type[SecurityPoliciesTransport]] + _transport_registry["rest"] = SecurityPoliciesRestTransport + + def get_transport_class(cls, + label: Optional[str] = None, + ) -> Type[SecurityPoliciesTransport]: + """Returns an appropriate transport class. + + Args: + label: The name of the desired transport. If none is + provided, then the first transport in the registry is used. + + Returns: + The transport class to use. + """ + # If a specific transport is requested, return that one. + if label: + return cls._transport_registry[label] + + # No transport is requested; return the default (that is, the first one + # in the dictionary). + return next(iter(cls._transport_registry.values())) + + +class SecurityPoliciesClient(metaclass=SecurityPoliciesClientMeta): + """The SecurityPolicies API.""" + + @staticmethod + def _get_default_mtls_endpoint(api_endpoint): + """Converts api endpoint to mTLS endpoint. + + Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to + "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. + Args: + api_endpoint (Optional[str]): the api endpoint to convert. + Returns: + str: converted mTLS api endpoint. + """ + if not api_endpoint: + return api_endpoint + + mtls_endpoint_re = re.compile( + r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?" + ) + + m = mtls_endpoint_re.match(api_endpoint) + name, mtls, sandbox, googledomain = m.groups() + if mtls or not googledomain: + return api_endpoint + + if sandbox: + return api_endpoint.replace( + "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" + ) + + return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") + + DEFAULT_ENDPOINT = "compute.googleapis.com" + DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore + DEFAULT_ENDPOINT + ) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + SecurityPoliciesClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_info(info) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + SecurityPoliciesClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_file( + filename) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + from_service_account_json = from_service_account_file + + @property + def transport(self) -> SecurityPoliciesTransport: + """Returns the transport used by the client instance. + + Returns: + SecurityPoliciesTransport: The transport used by the client + instance. + """ + return self._transport + + @staticmethod + def common_billing_account_path(billing_account: str, ) -> str: + """Returns a fully-qualified billing_account string.""" + return "billingAccounts/{billing_account}".format(billing_account=billing_account, ) + + @staticmethod + def parse_common_billing_account_path(path: str) -> Dict[str,str]: + """Parse a billing_account path into its component segments.""" + m = re.match(r"^billingAccounts/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_folder_path(folder: str, ) -> str: + """Returns a fully-qualified folder string.""" + return "folders/{folder}".format(folder=folder, ) + + @staticmethod + def parse_common_folder_path(path: str) -> Dict[str,str]: + """Parse a folder path into its component segments.""" + m = re.match(r"^folders/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_organization_path(organization: str, ) -> str: + """Returns a fully-qualified organization string.""" + return "organizations/{organization}".format(organization=organization, ) + + @staticmethod + def parse_common_organization_path(path: str) -> Dict[str,str]: + """Parse a organization path into its component segments.""" + m = re.match(r"^organizations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_project_path(project: str, ) -> str: + """Returns a fully-qualified project string.""" + return "projects/{project}".format(project=project, ) + + @staticmethod + def parse_common_project_path(path: str) -> Dict[str,str]: + """Parse a project path into its component segments.""" + m = re.match(r"^projects/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_location_path(project: str, location: str, ) -> str: + """Returns a fully-qualified location string.""" + return "projects/{project}/locations/{location}".format(project=project, location=location, ) + + @staticmethod + def parse_common_location_path(path: str) -> Dict[str,str]: + """Parse a location path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @classmethod + def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[client_options_lib.ClientOptions] = None): + """Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + if client_options is None: + client_options = client_options_lib.ClientOptions() + use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") + use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") + if use_client_cert not in ("true", "false"): + raise ValueError("Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`") + if use_mtls_endpoint not in ("auto", "never", "always"): + raise MutualTLSChannelError("Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`") + + # Figure out the client cert source to use. + client_cert_source = None + if use_client_cert == "true": + if client_options.client_cert_source: + client_cert_source = client_options.client_cert_source + elif mtls.has_default_client_cert_source(): + client_cert_source = mtls.default_client_cert_source() + + # Figure out which api endpoint to use. + if client_options.api_endpoint is not None: + api_endpoint = client_options.api_endpoint + elif use_mtls_endpoint == "always" or (use_mtls_endpoint == "auto" and client_cert_source): + api_endpoint = cls.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = cls.DEFAULT_ENDPOINT + + return api_endpoint, client_cert_source + + def __init__(self, *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Optional[Union[str, SecurityPoliciesTransport]] = None, + client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the security policies client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Union[str, SecurityPoliciesTransport]): The + transport to use. If set to None, a transport is chosen + automatically. + NOTE: "rest" transport functionality is currently in a + beta state (preview). We welcome your feedback via an + issue in this library's source repository. + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): Custom options for the + client. It won't take effect if a ``transport`` instance is provided. + (1) The ``api_endpoint`` property can be used to override the + default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT + environment variable can also be used to override the endpoint: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto switch to the + default mTLS endpoint if client certificate is present, this is + the default value). However, the ``api_endpoint`` property takes + precedence if provided. + (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide client certificate for mutual TLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + """ + if isinstance(client_options, dict): + client_options = client_options_lib.from_dict(client_options) + if client_options is None: + client_options = client_options_lib.ClientOptions() + client_options = cast(client_options_lib.ClientOptions, client_options) + + api_endpoint, client_cert_source_func = self.get_mtls_endpoint_and_cert_source(client_options) + + api_key_value = getattr(client_options, "api_key", None) + if api_key_value and credentials: + raise ValueError("client_options.api_key and credentials are mutually exclusive") + + # Save or instantiate the transport. + # Ordinarily, we provide the transport, but allowing a custom transport + # instance provides an extensibility point for unusual situations. + if isinstance(transport, SecurityPoliciesTransport): + # transport is a SecurityPoliciesTransport instance. + if credentials or client_options.credentials_file or api_key_value: + raise ValueError("When providing a transport instance, " + "provide its credentials directly.") + if client_options.scopes: + raise ValueError( + "When providing a transport instance, provide its scopes " + "directly." + ) + self._transport = transport + else: + import google.auth._default # type: ignore + + if api_key_value and hasattr(google.auth._default, "get_api_key_credentials"): + credentials = google.auth._default.get_api_key_credentials(api_key_value) + + Transport = type(self).get_transport_class(transport) + self._transport = Transport( + credentials=credentials, + credentials_file=client_options.credentials_file, + host=api_endpoint, + scopes=client_options.scopes, + client_cert_source_for_mtls=client_cert_source_func, + quota_project_id=client_options.quota_project_id, + client_info=client_info, + always_use_jwt_access=True, + api_audience=client_options.api_audience, + ) + + def add_rule_unary(self, + request: Optional[Union[compute.AddRuleSecurityPolicyRequest, dict]] = None, + *, + project: Optional[str] = None, + security_policy: Optional[str] = None, + security_policy_rule_resource: Optional[compute.SecurityPolicyRule] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.Operation: + r"""Inserts a rule into a security policy. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_add_rule(): + # Create a client + client = compute_v1.SecurityPoliciesClient() + + # Initialize request argument(s) + request = compute_v1.AddRuleSecurityPolicyRequest( + project="project_value", + security_policy="security_policy_value", + ) + + # Make the request + response = client.add_rule(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.AddRuleSecurityPolicyRequest, dict]): + The request object. A request message for + SecurityPolicies.AddRule. See the method + description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + security_policy (str): + Name of the security policy to + update. + + This corresponds to the ``security_policy`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + security_policy_rule_resource (google.cloud.compute_v1.types.SecurityPolicyRule): + The body resource for this request + This corresponds to the ``security_policy_rule_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, security_policy, security_policy_rule_resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.AddRuleSecurityPolicyRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.AddRuleSecurityPolicyRequest): + request = compute.AddRuleSecurityPolicyRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if security_policy is not None: + request.security_policy = security_policy + if security_policy_rule_resource is not None: + request.security_policy_rule_resource = security_policy_rule_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.add_rule] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("security_policy", request.security_policy), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def add_rule(self, + request: Optional[Union[compute.AddRuleSecurityPolicyRequest, dict]] = None, + *, + project: Optional[str] = None, + security_policy: Optional[str] = None, + security_policy_rule_resource: Optional[compute.SecurityPolicyRule] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> extended_operation.ExtendedOperation: + r"""Inserts a rule into a security policy. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_add_rule(): + # Create a client + client = compute_v1.SecurityPoliciesClient() + + # Initialize request argument(s) + request = compute_v1.AddRuleSecurityPolicyRequest( + project="project_value", + security_policy="security_policy_value", + ) + + # Make the request + response = client.add_rule(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.AddRuleSecurityPolicyRequest, dict]): + The request object. A request message for + SecurityPolicies.AddRule. See the method + description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + security_policy (str): + Name of the security policy to + update. + + This corresponds to the ``security_policy`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + security_policy_rule_resource (google.cloud.compute_v1.types.SecurityPolicyRule): + The body resource for this request + This corresponds to the ``security_policy_rule_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, security_policy, security_policy_rule_resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.AddRuleSecurityPolicyRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.AddRuleSecurityPolicyRequest): + request = compute.AddRuleSecurityPolicyRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if security_policy is not None: + request.security_policy = security_policy + if security_policy_rule_resource is not None: + request.security_policy_rule_resource = security_policy_rule_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.add_rule] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("security_policy", request.security_policy), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + operation_service = self._transport._global_operations_client + operation_request = compute.GetGlobalOperationRequest() + operation_request.project = request.project + operation_request.operation = response.name + + get_operation = functools.partial(operation_service.get, operation_request) + # Cancel is not part of extended operations yet. + cancel_operation = lambda: None + + # Note: this class is an implementation detail to provide a uniform + # set of names for certain fields in the extended operation proto message. + # See google.api_core.extended_operation.ExtendedOperation for details + # on these properties and the expected interface. + class _CustomOperation(extended_operation.ExtendedOperation): + @property + def error_message(self): + return self._extended_operation.http_error_message + + @property + def error_code(self): + return self._extended_operation.http_error_status_code + + response = _CustomOperation.make(get_operation, cancel_operation, response) + + # Done; return the response. + return response + + def aggregated_list(self, + request: Optional[Union[compute.AggregatedListSecurityPoliciesRequest, dict]] = None, + *, + project: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.AggregatedListPager: + r"""Retrieves the list of all SecurityPolicy resources, + regional and global, available to the specified project. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_aggregated_list(): + # Create a client + client = compute_v1.SecurityPoliciesClient() + + # Initialize request argument(s) + request = compute_v1.AggregatedListSecurityPoliciesRequest( + project="project_value", + ) + + # Make the request + page_result = client.aggregated_list(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.AggregatedListSecurityPoliciesRequest, dict]): + The request object. A request message for + SecurityPolicies.AggregatedList. See the + method description for details. + project (str): + Name of the project scoping this + request. + + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.compute_v1.services.security_policies.pagers.AggregatedListPager: + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.AggregatedListSecurityPoliciesRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.AggregatedListSecurityPoliciesRequest): + request = compute.AggregatedListSecurityPoliciesRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.aggregated_list] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.AggregatedListPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + def delete_unary(self, + request: Optional[Union[compute.DeleteSecurityPolicyRequest, dict]] = None, + *, + project: Optional[str] = None, + security_policy: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.Operation: + r"""Deletes the specified policy. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_delete(): + # Create a client + client = compute_v1.SecurityPoliciesClient() + + # Initialize request argument(s) + request = compute_v1.DeleteSecurityPolicyRequest( + project="project_value", + security_policy="security_policy_value", + ) + + # Make the request + response = client.delete(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.DeleteSecurityPolicyRequest, dict]): + The request object. A request message for + SecurityPolicies.Delete. See the method + description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + security_policy (str): + Name of the security policy to + delete. + + This corresponds to the ``security_policy`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, security_policy]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.DeleteSecurityPolicyRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.DeleteSecurityPolicyRequest): + request = compute.DeleteSecurityPolicyRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if security_policy is not None: + request.security_policy = security_policy + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("security_policy", request.security_policy), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def delete(self, + request: Optional[Union[compute.DeleteSecurityPolicyRequest, dict]] = None, + *, + project: Optional[str] = None, + security_policy: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> extended_operation.ExtendedOperation: + r"""Deletes the specified policy. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_delete(): + # Create a client + client = compute_v1.SecurityPoliciesClient() + + # Initialize request argument(s) + request = compute_v1.DeleteSecurityPolicyRequest( + project="project_value", + security_policy="security_policy_value", + ) + + # Make the request + response = client.delete(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.DeleteSecurityPolicyRequest, dict]): + The request object. A request message for + SecurityPolicies.Delete. See the method + description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + security_policy (str): + Name of the security policy to + delete. + + This corresponds to the ``security_policy`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, security_policy]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.DeleteSecurityPolicyRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.DeleteSecurityPolicyRequest): + request = compute.DeleteSecurityPolicyRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if security_policy is not None: + request.security_policy = security_policy + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("security_policy", request.security_policy), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + operation_service = self._transport._global_operations_client + operation_request = compute.GetGlobalOperationRequest() + operation_request.project = request.project + operation_request.operation = response.name + + get_operation = functools.partial(operation_service.get, operation_request) + # Cancel is not part of extended operations yet. + cancel_operation = lambda: None + + # Note: this class is an implementation detail to provide a uniform + # set of names for certain fields in the extended operation proto message. + # See google.api_core.extended_operation.ExtendedOperation for details + # on these properties and the expected interface. + class _CustomOperation(extended_operation.ExtendedOperation): + @property + def error_message(self): + return self._extended_operation.http_error_message + + @property + def error_code(self): + return self._extended_operation.http_error_status_code + + response = _CustomOperation.make(get_operation, cancel_operation, response) + + # Done; return the response. + return response + + def get(self, + request: Optional[Union[compute.GetSecurityPolicyRequest, dict]] = None, + *, + project: Optional[str] = None, + security_policy: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.SecurityPolicy: + r"""List all of the ordered rules present in a single + specified policy. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_get(): + # Create a client + client = compute_v1.SecurityPoliciesClient() + + # Initialize request argument(s) + request = compute_v1.GetSecurityPolicyRequest( + project="project_value", + security_policy="security_policy_value", + ) + + # Make the request + response = client.get(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.GetSecurityPolicyRequest, dict]): + The request object. A request message for + SecurityPolicies.Get. See the method + description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + security_policy (str): + Name of the security policy to get. + This corresponds to the ``security_policy`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.compute_v1.types.SecurityPolicy: + Represents a Google Cloud Armor + security policy resource. Only external + backend services that use load balancers + can reference a security policy. For + more information, see Google Cloud Armor + security policy overview. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, security_policy]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.GetSecurityPolicyRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.GetSecurityPolicyRequest): + request = compute.GetSecurityPolicyRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if security_policy is not None: + request.security_policy = security_policy + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("security_policy", request.security_policy), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get_rule(self, + request: Optional[Union[compute.GetRuleSecurityPolicyRequest, dict]] = None, + *, + project: Optional[str] = None, + security_policy: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.SecurityPolicyRule: + r"""Gets a rule at the specified priority. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_get_rule(): + # Create a client + client = compute_v1.SecurityPoliciesClient() + + # Initialize request argument(s) + request = compute_v1.GetRuleSecurityPolicyRequest( + project="project_value", + security_policy="security_policy_value", + ) + + # Make the request + response = client.get_rule(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.GetRuleSecurityPolicyRequest, dict]): + The request object. A request message for + SecurityPolicies.GetRule. See the method + description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + security_policy (str): + Name of the security policy to which + the queried rule belongs. + + This corresponds to the ``security_policy`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.compute_v1.types.SecurityPolicyRule: + Represents a rule that describes one + or more match conditions along with the + action to be taken when traffic matches + this condition (allow or deny). + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, security_policy]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.GetRuleSecurityPolicyRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.GetRuleSecurityPolicyRequest): + request = compute.GetRuleSecurityPolicyRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if security_policy is not None: + request.security_policy = security_policy + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_rule] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("security_policy", request.security_policy), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def insert_unary(self, + request: Optional[Union[compute.InsertSecurityPolicyRequest, dict]] = None, + *, + project: Optional[str] = None, + security_policy_resource: Optional[compute.SecurityPolicy] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.Operation: + r"""Creates a new policy in the specified project using + the data included in the request. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_insert(): + # Create a client + client = compute_v1.SecurityPoliciesClient() + + # Initialize request argument(s) + request = compute_v1.InsertSecurityPolicyRequest( + project="project_value", + ) + + # Make the request + response = client.insert(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.InsertSecurityPolicyRequest, dict]): + The request object. A request message for + SecurityPolicies.Insert. See the method + description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + security_policy_resource (google.cloud.compute_v1.types.SecurityPolicy): + The body resource for this request + This corresponds to the ``security_policy_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, security_policy_resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.InsertSecurityPolicyRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.InsertSecurityPolicyRequest): + request = compute.InsertSecurityPolicyRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if security_policy_resource is not None: + request.security_policy_resource = security_policy_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.insert] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def insert(self, + request: Optional[Union[compute.InsertSecurityPolicyRequest, dict]] = None, + *, + project: Optional[str] = None, + security_policy_resource: Optional[compute.SecurityPolicy] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> extended_operation.ExtendedOperation: + r"""Creates a new policy in the specified project using + the data included in the request. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_insert(): + # Create a client + client = compute_v1.SecurityPoliciesClient() + + # Initialize request argument(s) + request = compute_v1.InsertSecurityPolicyRequest( + project="project_value", + ) + + # Make the request + response = client.insert(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.InsertSecurityPolicyRequest, dict]): + The request object. A request message for + SecurityPolicies.Insert. See the method + description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + security_policy_resource (google.cloud.compute_v1.types.SecurityPolicy): + The body resource for this request + This corresponds to the ``security_policy_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, security_policy_resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.InsertSecurityPolicyRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.InsertSecurityPolicyRequest): + request = compute.InsertSecurityPolicyRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if security_policy_resource is not None: + request.security_policy_resource = security_policy_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.insert] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + operation_service = self._transport._global_operations_client + operation_request = compute.GetGlobalOperationRequest() + operation_request.project = request.project + operation_request.operation = response.name + + get_operation = functools.partial(operation_service.get, operation_request) + # Cancel is not part of extended operations yet. + cancel_operation = lambda: None + + # Note: this class is an implementation detail to provide a uniform + # set of names for certain fields in the extended operation proto message. + # See google.api_core.extended_operation.ExtendedOperation for details + # on these properties and the expected interface. + class _CustomOperation(extended_operation.ExtendedOperation): + @property + def error_message(self): + return self._extended_operation.http_error_message + + @property + def error_code(self): + return self._extended_operation.http_error_status_code + + response = _CustomOperation.make(get_operation, cancel_operation, response) + + # Done; return the response. + return response + + def list(self, + request: Optional[Union[compute.ListSecurityPoliciesRequest, dict]] = None, + *, + project: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListPager: + r"""List all the policies that have been configured for + the specified project. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_list(): + # Create a client + client = compute_v1.SecurityPoliciesClient() + + # Initialize request argument(s) + request = compute_v1.ListSecurityPoliciesRequest( + project="project_value", + ) + + # Make the request + page_result = client.list(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.ListSecurityPoliciesRequest, dict]): + The request object. A request message for + SecurityPolicies.List. See the method + description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.compute_v1.services.security_policies.pagers.ListPager: + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.ListSecurityPoliciesRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.ListSecurityPoliciesRequest): + request = compute.ListSecurityPoliciesRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + def list_preconfigured_expression_sets(self, + request: Optional[Union[compute.ListPreconfiguredExpressionSetsSecurityPoliciesRequest, dict]] = None, + *, + project: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.SecurityPoliciesListPreconfiguredExpressionSetsResponse: + r"""Gets the current list of preconfigured Web + Application Firewall (WAF) expressions. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_list_preconfigured_expression_sets(): + # Create a client + client = compute_v1.SecurityPoliciesClient() + + # Initialize request argument(s) + request = compute_v1.ListPreconfiguredExpressionSetsSecurityPoliciesRequest( + project="project_value", + ) + + # Make the request + response = client.list_preconfigured_expression_sets(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.ListPreconfiguredExpressionSetsSecurityPoliciesRequest, dict]): + The request object. A request message for + SecurityPolicies.ListPreconfiguredExpressionSets. + See the method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.compute_v1.types.SecurityPoliciesListPreconfiguredExpressionSetsResponse: + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.ListPreconfiguredExpressionSetsSecurityPoliciesRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.ListPreconfiguredExpressionSetsSecurityPoliciesRequest): + request = compute.ListPreconfiguredExpressionSetsSecurityPoliciesRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_preconfigured_expression_sets] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def patch_unary(self, + request: Optional[Union[compute.PatchSecurityPolicyRequest, dict]] = None, + *, + project: Optional[str] = None, + security_policy: Optional[str] = None, + security_policy_resource: Optional[compute.SecurityPolicy] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.Operation: + r"""Patches the specified policy with the data included + in the request. To clear fields in the policy, leave the + fields empty and specify them in the updateMask. This + cannot be used to be update the rules in the policy. + Please use the per rule methods like addRule, patchRule, + and removeRule instead. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_patch(): + # Create a client + client = compute_v1.SecurityPoliciesClient() + + # Initialize request argument(s) + request = compute_v1.PatchSecurityPolicyRequest( + project="project_value", + security_policy="security_policy_value", + ) + + # Make the request + response = client.patch(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.PatchSecurityPolicyRequest, dict]): + The request object. A request message for + SecurityPolicies.Patch. See the method + description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + security_policy (str): + Name of the security policy to + update. + + This corresponds to the ``security_policy`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + security_policy_resource (google.cloud.compute_v1.types.SecurityPolicy): + The body resource for this request + This corresponds to the ``security_policy_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, security_policy, security_policy_resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.PatchSecurityPolicyRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.PatchSecurityPolicyRequest): + request = compute.PatchSecurityPolicyRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if security_policy is not None: + request.security_policy = security_policy + if security_policy_resource is not None: + request.security_policy_resource = security_policy_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.patch] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("security_policy", request.security_policy), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def patch(self, + request: Optional[Union[compute.PatchSecurityPolicyRequest, dict]] = None, + *, + project: Optional[str] = None, + security_policy: Optional[str] = None, + security_policy_resource: Optional[compute.SecurityPolicy] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> extended_operation.ExtendedOperation: + r"""Patches the specified policy with the data included + in the request. To clear fields in the policy, leave the + fields empty and specify them in the updateMask. This + cannot be used to be update the rules in the policy. + Please use the per rule methods like addRule, patchRule, + and removeRule instead. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_patch(): + # Create a client + client = compute_v1.SecurityPoliciesClient() + + # Initialize request argument(s) + request = compute_v1.PatchSecurityPolicyRequest( + project="project_value", + security_policy="security_policy_value", + ) + + # Make the request + response = client.patch(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.PatchSecurityPolicyRequest, dict]): + The request object. A request message for + SecurityPolicies.Patch. See the method + description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + security_policy (str): + Name of the security policy to + update. + + This corresponds to the ``security_policy`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + security_policy_resource (google.cloud.compute_v1.types.SecurityPolicy): + The body resource for this request + This corresponds to the ``security_policy_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, security_policy, security_policy_resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.PatchSecurityPolicyRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.PatchSecurityPolicyRequest): + request = compute.PatchSecurityPolicyRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if security_policy is not None: + request.security_policy = security_policy + if security_policy_resource is not None: + request.security_policy_resource = security_policy_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.patch] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("security_policy", request.security_policy), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + operation_service = self._transport._global_operations_client + operation_request = compute.GetGlobalOperationRequest() + operation_request.project = request.project + operation_request.operation = response.name + + get_operation = functools.partial(operation_service.get, operation_request) + # Cancel is not part of extended operations yet. + cancel_operation = lambda: None + + # Note: this class is an implementation detail to provide a uniform + # set of names for certain fields in the extended operation proto message. + # See google.api_core.extended_operation.ExtendedOperation for details + # on these properties and the expected interface. + class _CustomOperation(extended_operation.ExtendedOperation): + @property + def error_message(self): + return self._extended_operation.http_error_message + + @property + def error_code(self): + return self._extended_operation.http_error_status_code + + response = _CustomOperation.make(get_operation, cancel_operation, response) + + # Done; return the response. + return response + + def patch_rule_unary(self, + request: Optional[Union[compute.PatchRuleSecurityPolicyRequest, dict]] = None, + *, + project: Optional[str] = None, + security_policy: Optional[str] = None, + security_policy_rule_resource: Optional[compute.SecurityPolicyRule] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.Operation: + r"""Patches a rule at the specified priority. To clear + fields in the rule, leave the fields empty and specify + them in the updateMask. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_patch_rule(): + # Create a client + client = compute_v1.SecurityPoliciesClient() + + # Initialize request argument(s) + request = compute_v1.PatchRuleSecurityPolicyRequest( + project="project_value", + security_policy="security_policy_value", + ) + + # Make the request + response = client.patch_rule(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.PatchRuleSecurityPolicyRequest, dict]): + The request object. A request message for + SecurityPolicies.PatchRule. See the + method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + security_policy (str): + Name of the security policy to + update. + + This corresponds to the ``security_policy`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + security_policy_rule_resource (google.cloud.compute_v1.types.SecurityPolicyRule): + The body resource for this request + This corresponds to the ``security_policy_rule_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, security_policy, security_policy_rule_resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.PatchRuleSecurityPolicyRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.PatchRuleSecurityPolicyRequest): + request = compute.PatchRuleSecurityPolicyRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if security_policy is not None: + request.security_policy = security_policy + if security_policy_rule_resource is not None: + request.security_policy_rule_resource = security_policy_rule_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.patch_rule] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("security_policy", request.security_policy), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def patch_rule(self, + request: Optional[Union[compute.PatchRuleSecurityPolicyRequest, dict]] = None, + *, + project: Optional[str] = None, + security_policy: Optional[str] = None, + security_policy_rule_resource: Optional[compute.SecurityPolicyRule] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> extended_operation.ExtendedOperation: + r"""Patches a rule at the specified priority. To clear + fields in the rule, leave the fields empty and specify + them in the updateMask. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_patch_rule(): + # Create a client + client = compute_v1.SecurityPoliciesClient() + + # Initialize request argument(s) + request = compute_v1.PatchRuleSecurityPolicyRequest( + project="project_value", + security_policy="security_policy_value", + ) + + # Make the request + response = client.patch_rule(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.PatchRuleSecurityPolicyRequest, dict]): + The request object. A request message for + SecurityPolicies.PatchRule. See the + method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + security_policy (str): + Name of the security policy to + update. + + This corresponds to the ``security_policy`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + security_policy_rule_resource (google.cloud.compute_v1.types.SecurityPolicyRule): + The body resource for this request + This corresponds to the ``security_policy_rule_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, security_policy, security_policy_rule_resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.PatchRuleSecurityPolicyRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.PatchRuleSecurityPolicyRequest): + request = compute.PatchRuleSecurityPolicyRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if security_policy is not None: + request.security_policy = security_policy + if security_policy_rule_resource is not None: + request.security_policy_rule_resource = security_policy_rule_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.patch_rule] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("security_policy", request.security_policy), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + operation_service = self._transport._global_operations_client + operation_request = compute.GetGlobalOperationRequest() + operation_request.project = request.project + operation_request.operation = response.name + + get_operation = functools.partial(operation_service.get, operation_request) + # Cancel is not part of extended operations yet. + cancel_operation = lambda: None + + # Note: this class is an implementation detail to provide a uniform + # set of names for certain fields in the extended operation proto message. + # See google.api_core.extended_operation.ExtendedOperation for details + # on these properties and the expected interface. + class _CustomOperation(extended_operation.ExtendedOperation): + @property + def error_message(self): + return self._extended_operation.http_error_message + + @property + def error_code(self): + return self._extended_operation.http_error_status_code + + response = _CustomOperation.make(get_operation, cancel_operation, response) + + # Done; return the response. + return response + + def remove_rule_unary(self, + request: Optional[Union[compute.RemoveRuleSecurityPolicyRequest, dict]] = None, + *, + project: Optional[str] = None, + security_policy: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.Operation: + r"""Deletes a rule at the specified priority. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_remove_rule(): + # Create a client + client = compute_v1.SecurityPoliciesClient() + + # Initialize request argument(s) + request = compute_v1.RemoveRuleSecurityPolicyRequest( + project="project_value", + security_policy="security_policy_value", + ) + + # Make the request + response = client.remove_rule(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.RemoveRuleSecurityPolicyRequest, dict]): + The request object. A request message for + SecurityPolicies.RemoveRule. See the + method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + security_policy (str): + Name of the security policy to + update. + + This corresponds to the ``security_policy`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, security_policy]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.RemoveRuleSecurityPolicyRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.RemoveRuleSecurityPolicyRequest): + request = compute.RemoveRuleSecurityPolicyRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if security_policy is not None: + request.security_policy = security_policy + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.remove_rule] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("security_policy", request.security_policy), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def remove_rule(self, + request: Optional[Union[compute.RemoveRuleSecurityPolicyRequest, dict]] = None, + *, + project: Optional[str] = None, + security_policy: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> extended_operation.ExtendedOperation: + r"""Deletes a rule at the specified priority. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_remove_rule(): + # Create a client + client = compute_v1.SecurityPoliciesClient() + + # Initialize request argument(s) + request = compute_v1.RemoveRuleSecurityPolicyRequest( + project="project_value", + security_policy="security_policy_value", + ) + + # Make the request + response = client.remove_rule(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.RemoveRuleSecurityPolicyRequest, dict]): + The request object. A request message for + SecurityPolicies.RemoveRule. See the + method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + security_policy (str): + Name of the security policy to + update. + + This corresponds to the ``security_policy`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, security_policy]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.RemoveRuleSecurityPolicyRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.RemoveRuleSecurityPolicyRequest): + request = compute.RemoveRuleSecurityPolicyRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if security_policy is not None: + request.security_policy = security_policy + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.remove_rule] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("security_policy", request.security_policy), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + operation_service = self._transport._global_operations_client + operation_request = compute.GetGlobalOperationRequest() + operation_request.project = request.project + operation_request.operation = response.name + + get_operation = functools.partial(operation_service.get, operation_request) + # Cancel is not part of extended operations yet. + cancel_operation = lambda: None + + # Note: this class is an implementation detail to provide a uniform + # set of names for certain fields in the extended operation proto message. + # See google.api_core.extended_operation.ExtendedOperation for details + # on these properties and the expected interface. + class _CustomOperation(extended_operation.ExtendedOperation): + @property + def error_message(self): + return self._extended_operation.http_error_message + + @property + def error_code(self): + return self._extended_operation.http_error_status_code + + response = _CustomOperation.make(get_operation, cancel_operation, response) + + # Done; return the response. + return response + + def set_labels_unary(self, + request: Optional[Union[compute.SetLabelsSecurityPolicyRequest, dict]] = None, + *, + project: Optional[str] = None, + resource: Optional[str] = None, + global_set_labels_request_resource: Optional[compute.GlobalSetLabelsRequest] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.Operation: + r"""Sets the labels on a security policy. To learn more + about labels, read the Labeling Resources documentation. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_set_labels(): + # Create a client + client = compute_v1.SecurityPoliciesClient() + + # Initialize request argument(s) + request = compute_v1.SetLabelsSecurityPolicyRequest( + project="project_value", + resource="resource_value", + ) + + # Make the request + response = client.set_labels(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.SetLabelsSecurityPolicyRequest, dict]): + The request object. A request message for + SecurityPolicies.SetLabels. See the + method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + resource (str): + Name or id of the resource for this + request. + + This corresponds to the ``resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + global_set_labels_request_resource (google.cloud.compute_v1.types.GlobalSetLabelsRequest): + The body resource for this request + This corresponds to the ``global_set_labels_request_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, resource, global_set_labels_request_resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.SetLabelsSecurityPolicyRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.SetLabelsSecurityPolicyRequest): + request = compute.SetLabelsSecurityPolicyRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if resource is not None: + request.resource = resource + if global_set_labels_request_resource is not None: + request.global_set_labels_request_resource = global_set_labels_request_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.set_labels] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("resource", request.resource), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def set_labels(self, + request: Optional[Union[compute.SetLabelsSecurityPolicyRequest, dict]] = None, + *, + project: Optional[str] = None, + resource: Optional[str] = None, + global_set_labels_request_resource: Optional[compute.GlobalSetLabelsRequest] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> extended_operation.ExtendedOperation: + r"""Sets the labels on a security policy. To learn more + about labels, read the Labeling Resources documentation. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_set_labels(): + # Create a client + client = compute_v1.SecurityPoliciesClient() + + # Initialize request argument(s) + request = compute_v1.SetLabelsSecurityPolicyRequest( + project="project_value", + resource="resource_value", + ) + + # Make the request + response = client.set_labels(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.SetLabelsSecurityPolicyRequest, dict]): + The request object. A request message for + SecurityPolicies.SetLabels. See the + method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + resource (str): + Name or id of the resource for this + request. + + This corresponds to the ``resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + global_set_labels_request_resource (google.cloud.compute_v1.types.GlobalSetLabelsRequest): + The body resource for this request + This corresponds to the ``global_set_labels_request_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, resource, global_set_labels_request_resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.SetLabelsSecurityPolicyRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.SetLabelsSecurityPolicyRequest): + request = compute.SetLabelsSecurityPolicyRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if resource is not None: + request.resource = resource + if global_set_labels_request_resource is not None: + request.global_set_labels_request_resource = global_set_labels_request_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.set_labels] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("resource", request.resource), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + operation_service = self._transport._global_operations_client + operation_request = compute.GetGlobalOperationRequest() + operation_request.project = request.project + operation_request.operation = response.name + + get_operation = functools.partial(operation_service.get, operation_request) + # Cancel is not part of extended operations yet. + cancel_operation = lambda: None + + # Note: this class is an implementation detail to provide a uniform + # set of names for certain fields in the extended operation proto message. + # See google.api_core.extended_operation.ExtendedOperation for details + # on these properties and the expected interface. + class _CustomOperation(extended_operation.ExtendedOperation): + @property + def error_message(self): + return self._extended_operation.http_error_message + + @property + def error_code(self): + return self._extended_operation.http_error_status_code + + response = _CustomOperation.make(get_operation, cancel_operation, response) + + # Done; return the response. + return response + + def __enter__(self) -> "SecurityPoliciesClient": + return self + + def __exit__(self, type, value, traceback): + """Releases underlying transport's resources. + + .. warning:: + ONLY use as a context manager if the transport is NOT shared + with other clients! Exiting the with block will CLOSE the transport + and may cause errors in other clients! + """ + self.transport.close() + + + + + + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) + + +__all__ = ( + "SecurityPoliciesClient", +) diff --git a/owl-bot-staging/v1/google/cloud/compute_v1/services/security_policies/pagers.py b/owl-bot-staging/v1/google/cloud/compute_v1/services/security_policies/pagers.py new file mode 100644 index 000000000..c3a90920c --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/compute_v1/services/security_policies/pagers.py @@ -0,0 +1,139 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import Any, AsyncIterator, Awaitable, Callable, Sequence, Tuple, Optional, Iterator + +from google.cloud.compute_v1.types import compute + + +class AggregatedListPager: + """A pager for iterating through ``aggregated_list`` requests. + + This class thinly wraps an initial + :class:`google.cloud.compute_v1.types.SecurityPoliciesAggregatedList` object, and + provides an ``__iter__`` method to iterate through its + ``items`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``AggregatedList`` requests and continue to iterate + through the ``items`` field on the + corresponding responses. + + All the usual :class:`google.cloud.compute_v1.types.SecurityPoliciesAggregatedList` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., compute.SecurityPoliciesAggregatedList], + request: compute.AggregatedListSecurityPoliciesRequest, + response: compute.SecurityPoliciesAggregatedList, + *, + metadata: Sequence[Tuple[str, str]] = ()): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.compute_v1.types.AggregatedListSecurityPoliciesRequest): + The initial request object. + response (google.cloud.compute_v1.types.SecurityPoliciesAggregatedList): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = compute.AggregatedListSecurityPoliciesRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[compute.SecurityPoliciesAggregatedList]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[Tuple[str, compute.SecurityPoliciesScopedList]]: + for page in self.pages: + yield from page.items.items() + + def get(self, key: str) -> Optional[compute.SecurityPoliciesScopedList]: + return self._response.items.get(key) + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + + +class ListPager: + """A pager for iterating through ``list`` requests. + + This class thinly wraps an initial + :class:`google.cloud.compute_v1.types.SecurityPolicyList` object, and + provides an ``__iter__`` method to iterate through its + ``items`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``List`` requests and continue to iterate + through the ``items`` field on the + corresponding responses. + + All the usual :class:`google.cloud.compute_v1.types.SecurityPolicyList` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., compute.SecurityPolicyList], + request: compute.ListSecurityPoliciesRequest, + response: compute.SecurityPolicyList, + *, + metadata: Sequence[Tuple[str, str]] = ()): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.compute_v1.types.ListSecurityPoliciesRequest): + The initial request object. + response (google.cloud.compute_v1.types.SecurityPolicyList): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = compute.ListSecurityPoliciesRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[compute.SecurityPolicyList]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[compute.SecurityPolicy]: + for page in self.pages: + yield from page.items + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) diff --git a/owl-bot-staging/v1/google/cloud/compute_v1/services/security_policies/transports/__init__.py b/owl-bot-staging/v1/google/cloud/compute_v1/services/security_policies/transports/__init__.py new file mode 100644 index 000000000..0ff1a9a42 --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/compute_v1/services/security_policies/transports/__init__.py @@ -0,0 +1,32 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +from typing import Dict, Type + +from .base import SecurityPoliciesTransport +from .rest import SecurityPoliciesRestTransport +from .rest import SecurityPoliciesRestInterceptor + + +# Compile a registry of transports. +_transport_registry = OrderedDict() # type: Dict[str, Type[SecurityPoliciesTransport]] +_transport_registry['rest'] = SecurityPoliciesRestTransport + +__all__ = ( + 'SecurityPoliciesTransport', + 'SecurityPoliciesRestTransport', + 'SecurityPoliciesRestInterceptor', +) diff --git a/owl-bot-staging/v1/google/cloud/compute_v1/services/security_policies/transports/base.py b/owl-bot-staging/v1/google/cloud/compute_v1/services/security_policies/transports/base.py new file mode 100644 index 000000000..9e9334093 --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/compute_v1/services/security_policies/transports/base.py @@ -0,0 +1,317 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import abc +from typing import Awaitable, Callable, Dict, Optional, Sequence, Union + +from google.cloud.compute_v1 import gapic_version as package_version + +import google.auth # type: ignore +import google.api_core +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.compute_v1.types import compute +from google.cloud.compute_v1.services import global_operations + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) + + +class SecurityPoliciesTransport(abc.ABC): + """Abstract transport class for SecurityPolicies.""" + + AUTH_SCOPES = ( + 'https://www.googleapis.com/auth/compute', + 'https://www.googleapis.com/auth/cloud-platform', + ) + + DEFAULT_HOST: str = 'compute.googleapis.com' + def __init__( + self, *, + host: str = DEFAULT_HOST, + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + **kwargs, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A list of scopes. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + """ + self._extended_operations_services: Dict[str, Any] = {} + + scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} + + # Save the scopes. + self._scopes = scopes + + # If no credentials are provided, then determine the appropriate + # defaults. + if credentials and credentials_file: + raise core_exceptions.DuplicateCredentialArgs("'credentials_file' and 'credentials' are mutually exclusive") + + if credentials_file is not None: + credentials, _ = google.auth.load_credentials_from_file( + credentials_file, + **scopes_kwargs, + quota_project_id=quota_project_id + ) + elif credentials is None: + credentials, _ = google.auth.default(**scopes_kwargs, quota_project_id=quota_project_id) + # Don't apply audience if the credentials file passed from user. + if hasattr(credentials, "with_gdch_audience"): + credentials = credentials.with_gdch_audience(api_audience if api_audience else host) + + # If the credentials are service account credentials, then always try to use self signed JWT. + if always_use_jwt_access and isinstance(credentials, service_account.Credentials) and hasattr(service_account.Credentials, "with_always_use_jwt_access"): + credentials = credentials.with_always_use_jwt_access(True) + + # Save the credentials. + self._credentials = credentials + + # Save the hostname. Default to port 443 (HTTPS) if none is specified. + if ':' not in host: + host += ':443' + self._host = host + + def _prep_wrapped_messages(self, client_info): + # Precompute the wrapped methods. + self._wrapped_methods = { + self.add_rule: gapic_v1.method.wrap_method( + self.add_rule, + default_timeout=None, + client_info=client_info, + ), + self.aggregated_list: gapic_v1.method.wrap_method( + self.aggregated_list, + default_timeout=None, + client_info=client_info, + ), + self.delete: gapic_v1.method.wrap_method( + self.delete, + default_timeout=None, + client_info=client_info, + ), + self.get: gapic_v1.method.wrap_method( + self.get, + default_timeout=None, + client_info=client_info, + ), + self.get_rule: gapic_v1.method.wrap_method( + self.get_rule, + default_timeout=None, + client_info=client_info, + ), + self.insert: gapic_v1.method.wrap_method( + self.insert, + default_timeout=None, + client_info=client_info, + ), + self.list: gapic_v1.method.wrap_method( + self.list, + default_timeout=None, + client_info=client_info, + ), + self.list_preconfigured_expression_sets: gapic_v1.method.wrap_method( + self.list_preconfigured_expression_sets, + default_timeout=None, + client_info=client_info, + ), + self.patch: gapic_v1.method.wrap_method( + self.patch, + default_timeout=None, + client_info=client_info, + ), + self.patch_rule: gapic_v1.method.wrap_method( + self.patch_rule, + default_timeout=None, + client_info=client_info, + ), + self.remove_rule: gapic_v1.method.wrap_method( + self.remove_rule, + default_timeout=None, + client_info=client_info, + ), + self.set_labels: gapic_v1.method.wrap_method( + self.set_labels, + default_timeout=None, + client_info=client_info, + ), + } + + def close(self): + """Closes resources associated with the transport. + + .. warning:: + Only call this method if the transport is NOT shared + with other clients - this may cause errors in other clients! + """ + raise NotImplementedError() + + @property + def add_rule(self) -> Callable[ + [compute.AddRuleSecurityPolicyRequest], + Union[ + compute.Operation, + Awaitable[compute.Operation] + ]]: + raise NotImplementedError() + + @property + def aggregated_list(self) -> Callable[ + [compute.AggregatedListSecurityPoliciesRequest], + Union[ + compute.SecurityPoliciesAggregatedList, + Awaitable[compute.SecurityPoliciesAggregatedList] + ]]: + raise NotImplementedError() + + @property + def delete(self) -> Callable[ + [compute.DeleteSecurityPolicyRequest], + Union[ + compute.Operation, + Awaitable[compute.Operation] + ]]: + raise NotImplementedError() + + @property + def get(self) -> Callable[ + [compute.GetSecurityPolicyRequest], + Union[ + compute.SecurityPolicy, + Awaitable[compute.SecurityPolicy] + ]]: + raise NotImplementedError() + + @property + def get_rule(self) -> Callable[ + [compute.GetRuleSecurityPolicyRequest], + Union[ + compute.SecurityPolicyRule, + Awaitable[compute.SecurityPolicyRule] + ]]: + raise NotImplementedError() + + @property + def insert(self) -> Callable[ + [compute.InsertSecurityPolicyRequest], + Union[ + compute.Operation, + Awaitable[compute.Operation] + ]]: + raise NotImplementedError() + + @property + def list(self) -> Callable[ + [compute.ListSecurityPoliciesRequest], + Union[ + compute.SecurityPolicyList, + Awaitable[compute.SecurityPolicyList] + ]]: + raise NotImplementedError() + + @property + def list_preconfigured_expression_sets(self) -> Callable[ + [compute.ListPreconfiguredExpressionSetsSecurityPoliciesRequest], + Union[ + compute.SecurityPoliciesListPreconfiguredExpressionSetsResponse, + Awaitable[compute.SecurityPoliciesListPreconfiguredExpressionSetsResponse] + ]]: + raise NotImplementedError() + + @property + def patch(self) -> Callable[ + [compute.PatchSecurityPolicyRequest], + Union[ + compute.Operation, + Awaitable[compute.Operation] + ]]: + raise NotImplementedError() + + @property + def patch_rule(self) -> Callable[ + [compute.PatchRuleSecurityPolicyRequest], + Union[ + compute.Operation, + Awaitable[compute.Operation] + ]]: + raise NotImplementedError() + + @property + def remove_rule(self) -> Callable[ + [compute.RemoveRuleSecurityPolicyRequest], + Union[ + compute.Operation, + Awaitable[compute.Operation] + ]]: + raise NotImplementedError() + + @property + def set_labels(self) -> Callable[ + [compute.SetLabelsSecurityPolicyRequest], + Union[ + compute.Operation, + Awaitable[compute.Operation] + ]]: + raise NotImplementedError() + + @property + def kind(self) -> str: + raise NotImplementedError() + + @property + def _global_operations_client(self) -> global_operations.GlobalOperationsClient: + ex_op_service = self._extended_operations_services.get("global_operations") + if not ex_op_service: + ex_op_service = global_operations.GlobalOperationsClient( + credentials=self._credentials, + transport=self.kind, + ) + self._extended_operations_services["global_operations"] = ex_op_service + + return ex_op_service + + +__all__ = ( + 'SecurityPoliciesTransport', +) diff --git a/owl-bot-staging/v1/google/cloud/compute_v1/services/security_policies/transports/rest.py b/owl-bot-staging/v1/google/cloud/compute_v1/services/security_policies/transports/rest.py new file mode 100644 index 000000000..2a5593e58 --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/compute_v1/services/security_policies/transports/rest.py @@ -0,0 +1,1666 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +from google.auth.transport.requests import AuthorizedSession # type: ignore +import json # type: ignore +import grpc # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.api_core import exceptions as core_exceptions +from google.api_core import retry as retries +from google.api_core import rest_helpers +from google.api_core import rest_streaming +from google.api_core import path_template +from google.api_core import gapic_v1 + +from google.protobuf import json_format +from requests import __version__ as requests_version +import dataclasses +import re +from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union +import warnings + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + + +from google.cloud.compute_v1.types import compute + +from .base import SecurityPoliciesTransport, DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, + grpc_version=None, + rest_version=requests_version, +) + + +class SecurityPoliciesRestInterceptor: + """Interceptor for SecurityPolicies. + + Interceptors are used to manipulate requests, request metadata, and responses + in arbitrary ways. + Example use cases include: + * Logging + * Verifying requests according to service or custom semantics + * Stripping extraneous information from responses + + These use cases and more can be enabled by injecting an + instance of a custom subclass when constructing the SecurityPoliciesRestTransport. + + .. code-block:: python + class MyCustomSecurityPoliciesInterceptor(SecurityPoliciesRestInterceptor): + def pre_add_rule(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_add_rule(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_aggregated_list(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_aggregated_list(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_delete(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_delete(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_get(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_get_rule(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_rule(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_insert(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_insert(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list_preconfigured_expression_sets(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_preconfigured_expression_sets(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_patch(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_patch(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_patch_rule(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_patch_rule(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_remove_rule(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_remove_rule(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_set_labels(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_set_labels(self, response): + logging.log(f"Received response: {response}") + return response + + transport = SecurityPoliciesRestTransport(interceptor=MyCustomSecurityPoliciesInterceptor()) + client = SecurityPoliciesClient(transport=transport) + + + """ + def pre_add_rule(self, request: compute.AddRuleSecurityPolicyRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.AddRuleSecurityPolicyRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for add_rule + + Override in a subclass to manipulate the request or metadata + before they are sent to the SecurityPolicies server. + """ + return request, metadata + + def post_add_rule(self, response: compute.Operation) -> compute.Operation: + """Post-rpc interceptor for add_rule + + Override in a subclass to manipulate the response + after it is returned by the SecurityPolicies server but before + it is returned to user code. + """ + return response + def pre_aggregated_list(self, request: compute.AggregatedListSecurityPoliciesRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.AggregatedListSecurityPoliciesRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for aggregated_list + + Override in a subclass to manipulate the request or metadata + before they are sent to the SecurityPolicies server. + """ + return request, metadata + + def post_aggregated_list(self, response: compute.SecurityPoliciesAggregatedList) -> compute.SecurityPoliciesAggregatedList: + """Post-rpc interceptor for aggregated_list + + Override in a subclass to manipulate the response + after it is returned by the SecurityPolicies server but before + it is returned to user code. + """ + return response + def pre_delete(self, request: compute.DeleteSecurityPolicyRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.DeleteSecurityPolicyRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for delete + + Override in a subclass to manipulate the request or metadata + before they are sent to the SecurityPolicies server. + """ + return request, metadata + + def post_delete(self, response: compute.Operation) -> compute.Operation: + """Post-rpc interceptor for delete + + Override in a subclass to manipulate the response + after it is returned by the SecurityPolicies server but before + it is returned to user code. + """ + return response + def pre_get(self, request: compute.GetSecurityPolicyRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.GetSecurityPolicyRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get + + Override in a subclass to manipulate the request or metadata + before they are sent to the SecurityPolicies server. + """ + return request, metadata + + def post_get(self, response: compute.SecurityPolicy) -> compute.SecurityPolicy: + """Post-rpc interceptor for get + + Override in a subclass to manipulate the response + after it is returned by the SecurityPolicies server but before + it is returned to user code. + """ + return response + def pre_get_rule(self, request: compute.GetRuleSecurityPolicyRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.GetRuleSecurityPolicyRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_rule + + Override in a subclass to manipulate the request or metadata + before they are sent to the SecurityPolicies server. + """ + return request, metadata + + def post_get_rule(self, response: compute.SecurityPolicyRule) -> compute.SecurityPolicyRule: + """Post-rpc interceptor for get_rule + + Override in a subclass to manipulate the response + after it is returned by the SecurityPolicies server but before + it is returned to user code. + """ + return response + def pre_insert(self, request: compute.InsertSecurityPolicyRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.InsertSecurityPolicyRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for insert + + Override in a subclass to manipulate the request or metadata + before they are sent to the SecurityPolicies server. + """ + return request, metadata + + def post_insert(self, response: compute.Operation) -> compute.Operation: + """Post-rpc interceptor for insert + + Override in a subclass to manipulate the response + after it is returned by the SecurityPolicies server but before + it is returned to user code. + """ + return response + def pre_list(self, request: compute.ListSecurityPoliciesRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.ListSecurityPoliciesRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list + + Override in a subclass to manipulate the request or metadata + before they are sent to the SecurityPolicies server. + """ + return request, metadata + + def post_list(self, response: compute.SecurityPolicyList) -> compute.SecurityPolicyList: + """Post-rpc interceptor for list + + Override in a subclass to manipulate the response + after it is returned by the SecurityPolicies server but before + it is returned to user code. + """ + return response + def pre_list_preconfigured_expression_sets(self, request: compute.ListPreconfiguredExpressionSetsSecurityPoliciesRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.ListPreconfiguredExpressionSetsSecurityPoliciesRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list_preconfigured_expression_sets + + Override in a subclass to manipulate the request or metadata + before they are sent to the SecurityPolicies server. + """ + return request, metadata + + def post_list_preconfigured_expression_sets(self, response: compute.SecurityPoliciesListPreconfiguredExpressionSetsResponse) -> compute.SecurityPoliciesListPreconfiguredExpressionSetsResponse: + """Post-rpc interceptor for list_preconfigured_expression_sets + + Override in a subclass to manipulate the response + after it is returned by the SecurityPolicies server but before + it is returned to user code. + """ + return response + def pre_patch(self, request: compute.PatchSecurityPolicyRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.PatchSecurityPolicyRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for patch + + Override in a subclass to manipulate the request or metadata + before they are sent to the SecurityPolicies server. + """ + return request, metadata + + def post_patch(self, response: compute.Operation) -> compute.Operation: + """Post-rpc interceptor for patch + + Override in a subclass to manipulate the response + after it is returned by the SecurityPolicies server but before + it is returned to user code. + """ + return response + def pre_patch_rule(self, request: compute.PatchRuleSecurityPolicyRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.PatchRuleSecurityPolicyRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for patch_rule + + Override in a subclass to manipulate the request or metadata + before they are sent to the SecurityPolicies server. + """ + return request, metadata + + def post_patch_rule(self, response: compute.Operation) -> compute.Operation: + """Post-rpc interceptor for patch_rule + + Override in a subclass to manipulate the response + after it is returned by the SecurityPolicies server but before + it is returned to user code. + """ + return response + def pre_remove_rule(self, request: compute.RemoveRuleSecurityPolicyRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.RemoveRuleSecurityPolicyRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for remove_rule + + Override in a subclass to manipulate the request or metadata + before they are sent to the SecurityPolicies server. + """ + return request, metadata + + def post_remove_rule(self, response: compute.Operation) -> compute.Operation: + """Post-rpc interceptor for remove_rule + + Override in a subclass to manipulate the response + after it is returned by the SecurityPolicies server but before + it is returned to user code. + """ + return response + def pre_set_labels(self, request: compute.SetLabelsSecurityPolicyRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.SetLabelsSecurityPolicyRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for set_labels + + Override in a subclass to manipulate the request or metadata + before they are sent to the SecurityPolicies server. + """ + return request, metadata + + def post_set_labels(self, response: compute.Operation) -> compute.Operation: + """Post-rpc interceptor for set_labels + + Override in a subclass to manipulate the response + after it is returned by the SecurityPolicies server but before + it is returned to user code. + """ + return response + + +@dataclasses.dataclass +class SecurityPoliciesRestStub: + _session: AuthorizedSession + _host: str + _interceptor: SecurityPoliciesRestInterceptor + + +class SecurityPoliciesRestTransport(SecurityPoliciesTransport): + """REST backend transport for SecurityPolicies. + + The SecurityPolicies API. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends JSON representations of protocol buffers over HTTP/1.1 + + NOTE: This REST transport functionality is currently in a beta + state (preview). We welcome your feedback via an issue in this + library's source repository. Thank you! + """ + + def __init__(self, *, + host: str = 'compute.googleapis.com', + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + client_cert_source_for_mtls: Optional[Callable[[ + ], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + url_scheme: str = 'https', + interceptor: Optional[SecurityPoliciesRestInterceptor] = None, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + NOTE: This REST transport functionality is currently in a beta + state (preview). We welcome your feedback via a GitHub issue in + this library's repository. Thank you! + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + client_cert_source_for_mtls (Callable[[], Tuple[bytes, bytes]]): Client + certificate to configure mutual TLS HTTP channel. It is ignored + if ``channel`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you are developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + url_scheme: the protocol scheme for the API endpoint. Normally + "https", but for testing or local servers, + "http" can be specified. + """ + # Run the base constructor + # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. + # TODO: When custom host (api_endpoint) is set, `scopes` must *also* be set on the + # credentials object + maybe_url_match = re.match("^(?Phttp(?:s)?://)?(?P.*)$", host) + if maybe_url_match is None: + raise ValueError(f"Unexpected hostname structure: {host}") # pragma: NO COVER + + url_match_items = maybe_url_match.groupdict() + + host = f"{url_scheme}://{host}" if not url_match_items["scheme"] else host + + super().__init__( + host=host, + credentials=credentials, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience + ) + self._session = AuthorizedSession( + self._credentials, default_host=self.DEFAULT_HOST) + if client_cert_source_for_mtls: + self._session.configure_mtls_channel(client_cert_source_for_mtls) + self._interceptor = interceptor or SecurityPoliciesRestInterceptor() + self._prep_wrapped_messages(client_info) + + class _AddRule(SecurityPoliciesRestStub): + def __hash__(self): + return hash("AddRule") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.AddRuleSecurityPolicyRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.Operation: + r"""Call the add rule method over HTTP. + + Args: + request (~.compute.AddRuleSecurityPolicyRequest): + The request object. A request message for + SecurityPolicies.AddRule. See the method + description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.Operation: + Represents an Operation resource. Google Compute Engine + has three Operation resources: \* + `Global `__ + \* + `Regional `__ + \* + `Zonal `__ + You can use an operation resource to manage asynchronous + API requests. For more information, read Handling API + responses. Operations can be global, regional or zonal. + - For global operations, use the ``globalOperations`` + resource. - For regional operations, use the + ``regionOperations`` resource. - For zonal operations, + use the ``zonalOperations`` resource. For more + information, read Global, Regional, and Zonal Resources. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/compute/v1/projects/{project}/global/securityPolicies/{security_policy}/addRule', + 'body': 'security_policy_rule_resource', + }, + ] + request, metadata = self._interceptor.pre_add_rule(request, metadata) + pb_request = compute.AddRuleSecurityPolicyRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + including_default_value_fields=False, + use_integers_for_enums=False + ) + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.Operation() + pb_resp = compute.Operation.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_add_rule(resp) + return resp + + class _AggregatedList(SecurityPoliciesRestStub): + def __hash__(self): + return hash("AggregatedList") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.AggregatedListSecurityPoliciesRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.SecurityPoliciesAggregatedList: + r"""Call the aggregated list method over HTTP. + + Args: + request (~.compute.AggregatedListSecurityPoliciesRequest): + The request object. A request message for + SecurityPolicies.AggregatedList. See the + method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.SecurityPoliciesAggregatedList: + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/compute/v1/projects/{project}/aggregated/securityPolicies', + }, + ] + request, metadata = self._interceptor.pre_aggregated_list(request, metadata) + pb_request = compute.AggregatedListSecurityPoliciesRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.SecurityPoliciesAggregatedList() + pb_resp = compute.SecurityPoliciesAggregatedList.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_aggregated_list(resp) + return resp + + class _Delete(SecurityPoliciesRestStub): + def __hash__(self): + return hash("Delete") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.DeleteSecurityPolicyRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.Operation: + r"""Call the delete method over HTTP. + + Args: + request (~.compute.DeleteSecurityPolicyRequest): + The request object. A request message for + SecurityPolicies.Delete. See the method + description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.Operation: + Represents an Operation resource. Google Compute Engine + has three Operation resources: \* + `Global `__ + \* + `Regional `__ + \* + `Zonal `__ + You can use an operation resource to manage asynchronous + API requests. For more information, read Handling API + responses. Operations can be global, regional or zonal. + - For global operations, use the ``globalOperations`` + resource. - For regional operations, use the + ``regionOperations`` resource. - For zonal operations, + use the ``zonalOperations`` resource. For more + information, read Global, Regional, and Zonal Resources. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'delete', + 'uri': '/compute/v1/projects/{project}/global/securityPolicies/{security_policy}', + }, + ] + request, metadata = self._interceptor.pre_delete(request, metadata) + pb_request = compute.DeleteSecurityPolicyRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.Operation() + pb_resp = compute.Operation.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_delete(resp) + return resp + + class _Get(SecurityPoliciesRestStub): + def __hash__(self): + return hash("Get") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.GetSecurityPolicyRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.SecurityPolicy: + r"""Call the get method over HTTP. + + Args: + request (~.compute.GetSecurityPolicyRequest): + The request object. A request message for + SecurityPolicies.Get. See the method + description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.SecurityPolicy: + Represents a Google Cloud Armor + security policy resource. Only external + backend services that use load balancers + can reference a security policy. For + more information, see Google Cloud Armor + security policy overview. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/compute/v1/projects/{project}/global/securityPolicies/{security_policy}', + }, + ] + request, metadata = self._interceptor.pre_get(request, metadata) + pb_request = compute.GetSecurityPolicyRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.SecurityPolicy() + pb_resp = compute.SecurityPolicy.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get(resp) + return resp + + class _GetRule(SecurityPoliciesRestStub): + def __hash__(self): + return hash("GetRule") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.GetRuleSecurityPolicyRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.SecurityPolicyRule: + r"""Call the get rule method over HTTP. + + Args: + request (~.compute.GetRuleSecurityPolicyRequest): + The request object. A request message for + SecurityPolicies.GetRule. See the method + description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.SecurityPolicyRule: + Represents a rule that describes one + or more match conditions along with the + action to be taken when traffic matches + this condition (allow or deny). + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/compute/v1/projects/{project}/global/securityPolicies/{security_policy}/getRule', + }, + ] + request, metadata = self._interceptor.pre_get_rule(request, metadata) + pb_request = compute.GetRuleSecurityPolicyRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.SecurityPolicyRule() + pb_resp = compute.SecurityPolicyRule.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get_rule(resp) + return resp + + class _Insert(SecurityPoliciesRestStub): + def __hash__(self): + return hash("Insert") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.InsertSecurityPolicyRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.Operation: + r"""Call the insert method over HTTP. + + Args: + request (~.compute.InsertSecurityPolicyRequest): + The request object. A request message for + SecurityPolicies.Insert. See the method + description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.Operation: + Represents an Operation resource. Google Compute Engine + has three Operation resources: \* + `Global `__ + \* + `Regional `__ + \* + `Zonal `__ + You can use an operation resource to manage asynchronous + API requests. For more information, read Handling API + responses. Operations can be global, regional or zonal. + - For global operations, use the ``globalOperations`` + resource. - For regional operations, use the + ``regionOperations`` resource. - For zonal operations, + use the ``zonalOperations`` resource. For more + information, read Global, Regional, and Zonal Resources. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/compute/v1/projects/{project}/global/securityPolicies', + 'body': 'security_policy_resource', + }, + ] + request, metadata = self._interceptor.pre_insert(request, metadata) + pb_request = compute.InsertSecurityPolicyRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + including_default_value_fields=False, + use_integers_for_enums=False + ) + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.Operation() + pb_resp = compute.Operation.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_insert(resp) + return resp + + class _List(SecurityPoliciesRestStub): + def __hash__(self): + return hash("List") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.ListSecurityPoliciesRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.SecurityPolicyList: + r"""Call the list method over HTTP. + + Args: + request (~.compute.ListSecurityPoliciesRequest): + The request object. A request message for + SecurityPolicies.List. See the method + description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.SecurityPolicyList: + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/compute/v1/projects/{project}/global/securityPolicies', + }, + ] + request, metadata = self._interceptor.pre_list(request, metadata) + pb_request = compute.ListSecurityPoliciesRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.SecurityPolicyList() + pb_resp = compute.SecurityPolicyList.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_list(resp) + return resp + + class _ListPreconfiguredExpressionSets(SecurityPoliciesRestStub): + def __hash__(self): + return hash("ListPreconfiguredExpressionSets") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.ListPreconfiguredExpressionSetsSecurityPoliciesRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.SecurityPoliciesListPreconfiguredExpressionSetsResponse: + r"""Call the list preconfigured + expression sets method over HTTP. + + Args: + request (~.compute.ListPreconfiguredExpressionSetsSecurityPoliciesRequest): + The request object. A request message for + SecurityPolicies.ListPreconfiguredExpressionSets. + See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.SecurityPoliciesListPreconfiguredExpressionSetsResponse: + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/compute/v1/projects/{project}/global/securityPolicies/listPreconfiguredExpressionSets', + }, + ] + request, metadata = self._interceptor.pre_list_preconfigured_expression_sets(request, metadata) + pb_request = compute.ListPreconfiguredExpressionSetsSecurityPoliciesRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.SecurityPoliciesListPreconfiguredExpressionSetsResponse() + pb_resp = compute.SecurityPoliciesListPreconfiguredExpressionSetsResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_list_preconfigured_expression_sets(resp) + return resp + + class _Patch(SecurityPoliciesRestStub): + def __hash__(self): + return hash("Patch") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.PatchSecurityPolicyRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.Operation: + r"""Call the patch method over HTTP. + + Args: + request (~.compute.PatchSecurityPolicyRequest): + The request object. A request message for + SecurityPolicies.Patch. See the method + description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.Operation: + Represents an Operation resource. Google Compute Engine + has three Operation resources: \* + `Global `__ + \* + `Regional `__ + \* + `Zonal `__ + You can use an operation resource to manage asynchronous + API requests. For more information, read Handling API + responses. Operations can be global, regional or zonal. + - For global operations, use the ``globalOperations`` + resource. - For regional operations, use the + ``regionOperations`` resource. - For zonal operations, + use the ``zonalOperations`` resource. For more + information, read Global, Regional, and Zonal Resources. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'patch', + 'uri': '/compute/v1/projects/{project}/global/securityPolicies/{security_policy}', + 'body': 'security_policy_resource', + }, + ] + request, metadata = self._interceptor.pre_patch(request, metadata) + pb_request = compute.PatchSecurityPolicyRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + including_default_value_fields=False, + use_integers_for_enums=False + ) + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.Operation() + pb_resp = compute.Operation.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_patch(resp) + return resp + + class _PatchRule(SecurityPoliciesRestStub): + def __hash__(self): + return hash("PatchRule") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.PatchRuleSecurityPolicyRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.Operation: + r"""Call the patch rule method over HTTP. + + Args: + request (~.compute.PatchRuleSecurityPolicyRequest): + The request object. A request message for + SecurityPolicies.PatchRule. See the + method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.Operation: + Represents an Operation resource. Google Compute Engine + has three Operation resources: \* + `Global `__ + \* + `Regional `__ + \* + `Zonal `__ + You can use an operation resource to manage asynchronous + API requests. For more information, read Handling API + responses. Operations can be global, regional or zonal. + - For global operations, use the ``globalOperations`` + resource. - For regional operations, use the + ``regionOperations`` resource. - For zonal operations, + use the ``zonalOperations`` resource. For more + information, read Global, Regional, and Zonal Resources. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/compute/v1/projects/{project}/global/securityPolicies/{security_policy}/patchRule', + 'body': 'security_policy_rule_resource', + }, + ] + request, metadata = self._interceptor.pre_patch_rule(request, metadata) + pb_request = compute.PatchRuleSecurityPolicyRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + including_default_value_fields=False, + use_integers_for_enums=False + ) + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.Operation() + pb_resp = compute.Operation.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_patch_rule(resp) + return resp + + class _RemoveRule(SecurityPoliciesRestStub): + def __hash__(self): + return hash("RemoveRule") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.RemoveRuleSecurityPolicyRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.Operation: + r"""Call the remove rule method over HTTP. + + Args: + request (~.compute.RemoveRuleSecurityPolicyRequest): + The request object. A request message for + SecurityPolicies.RemoveRule. See the + method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.Operation: + Represents an Operation resource. Google Compute Engine + has three Operation resources: \* + `Global `__ + \* + `Regional `__ + \* + `Zonal `__ + You can use an operation resource to manage asynchronous + API requests. For more information, read Handling API + responses. Operations can be global, regional or zonal. + - For global operations, use the ``globalOperations`` + resource. - For regional operations, use the + ``regionOperations`` resource. - For zonal operations, + use the ``zonalOperations`` resource. For more + information, read Global, Regional, and Zonal Resources. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/compute/v1/projects/{project}/global/securityPolicies/{security_policy}/removeRule', + }, + ] + request, metadata = self._interceptor.pre_remove_rule(request, metadata) + pb_request = compute.RemoveRuleSecurityPolicyRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.Operation() + pb_resp = compute.Operation.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_remove_rule(resp) + return resp + + class _SetLabels(SecurityPoliciesRestStub): + def __hash__(self): + return hash("SetLabels") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.SetLabelsSecurityPolicyRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.Operation: + r"""Call the set labels method over HTTP. + + Args: + request (~.compute.SetLabelsSecurityPolicyRequest): + The request object. A request message for + SecurityPolicies.SetLabels. See the + method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.Operation: + Represents an Operation resource. Google Compute Engine + has three Operation resources: \* + `Global `__ + \* + `Regional `__ + \* + `Zonal `__ + You can use an operation resource to manage asynchronous + API requests. For more information, read Handling API + responses. Operations can be global, regional or zonal. + - For global operations, use the ``globalOperations`` + resource. - For regional operations, use the + ``regionOperations`` resource. - For zonal operations, + use the ``zonalOperations`` resource. For more + information, read Global, Regional, and Zonal Resources. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/compute/v1/projects/{project}/global/securityPolicies/{resource}/setLabels', + 'body': 'global_set_labels_request_resource', + }, + ] + request, metadata = self._interceptor.pre_set_labels(request, metadata) + pb_request = compute.SetLabelsSecurityPolicyRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + including_default_value_fields=False, + use_integers_for_enums=False + ) + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.Operation() + pb_resp = compute.Operation.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_set_labels(resp) + return resp + + @property + def add_rule(self) -> Callable[ + [compute.AddRuleSecurityPolicyRequest], + compute.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._AddRule(self._session, self._host, self._interceptor) # type: ignore + + @property + def aggregated_list(self) -> Callable[ + [compute.AggregatedListSecurityPoliciesRequest], + compute.SecurityPoliciesAggregatedList]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._AggregatedList(self._session, self._host, self._interceptor) # type: ignore + + @property + def delete(self) -> Callable[ + [compute.DeleteSecurityPolicyRequest], + compute.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._Delete(self._session, self._host, self._interceptor) # type: ignore + + @property + def get(self) -> Callable[ + [compute.GetSecurityPolicyRequest], + compute.SecurityPolicy]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._Get(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_rule(self) -> Callable[ + [compute.GetRuleSecurityPolicyRequest], + compute.SecurityPolicyRule]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetRule(self._session, self._host, self._interceptor) # type: ignore + + @property + def insert(self) -> Callable[ + [compute.InsertSecurityPolicyRequest], + compute.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._Insert(self._session, self._host, self._interceptor) # type: ignore + + @property + def list(self) -> Callable[ + [compute.ListSecurityPoliciesRequest], + compute.SecurityPolicyList]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._List(self._session, self._host, self._interceptor) # type: ignore + + @property + def list_preconfigured_expression_sets(self) -> Callable[ + [compute.ListPreconfiguredExpressionSetsSecurityPoliciesRequest], + compute.SecurityPoliciesListPreconfiguredExpressionSetsResponse]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListPreconfiguredExpressionSets(self._session, self._host, self._interceptor) # type: ignore + + @property + def patch(self) -> Callable[ + [compute.PatchSecurityPolicyRequest], + compute.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._Patch(self._session, self._host, self._interceptor) # type: ignore + + @property + def patch_rule(self) -> Callable[ + [compute.PatchRuleSecurityPolicyRequest], + compute.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._PatchRule(self._session, self._host, self._interceptor) # type: ignore + + @property + def remove_rule(self) -> Callable[ + [compute.RemoveRuleSecurityPolicyRequest], + compute.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._RemoveRule(self._session, self._host, self._interceptor) # type: ignore + + @property + def set_labels(self) -> Callable[ + [compute.SetLabelsSecurityPolicyRequest], + compute.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._SetLabels(self._session, self._host, self._interceptor) # type: ignore + + @property + def kind(self) -> str: + return "rest" + + def close(self): + self._session.close() + + +__all__=( + 'SecurityPoliciesRestTransport', +) diff --git a/owl-bot-staging/v1/google/cloud/compute_v1/services/service_attachments/__init__.py b/owl-bot-staging/v1/google/cloud/compute_v1/services/service_attachments/__init__.py new file mode 100644 index 000000000..7cba3ec30 --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/compute_v1/services/service_attachments/__init__.py @@ -0,0 +1,20 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from .client import ServiceAttachmentsClient + +__all__ = ( + 'ServiceAttachmentsClient', +) diff --git a/owl-bot-staging/v1/google/cloud/compute_v1/services/service_attachments/client.py b/owl-bot-staging/v1/google/cloud/compute_v1/services/service_attachments/client.py new file mode 100644 index 000000000..d422026e9 --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/compute_v1/services/service_attachments/client.py @@ -0,0 +1,2061 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import functools +import os +import re +from typing import Dict, Mapping, MutableMapping, MutableSequence, Optional, Sequence, Tuple, Type, Union, cast + +from google.cloud.compute_v1 import gapic_version as package_version + +from google.api_core import client_options as client_options_lib +from google.api_core import exceptions as core_exceptions +from google.api_core import extended_operation +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport import mtls # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth.exceptions import MutualTLSChannelError # type: ignore +from google.oauth2 import service_account # type: ignore + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + +from google.api_core import extended_operation # type: ignore +from google.cloud.compute_v1.services.service_attachments import pagers +from google.cloud.compute_v1.types import compute +from .transports.base import ServiceAttachmentsTransport, DEFAULT_CLIENT_INFO +from .transports.rest import ServiceAttachmentsRestTransport + + +class ServiceAttachmentsClientMeta(type): + """Metaclass for the ServiceAttachments client. + + This provides class-level methods for building and retrieving + support objects (e.g. transport) without polluting the client instance + objects. + """ + _transport_registry = OrderedDict() # type: Dict[str, Type[ServiceAttachmentsTransport]] + _transport_registry["rest"] = ServiceAttachmentsRestTransport + + def get_transport_class(cls, + label: Optional[str] = None, + ) -> Type[ServiceAttachmentsTransport]: + """Returns an appropriate transport class. + + Args: + label: The name of the desired transport. If none is + provided, then the first transport in the registry is used. + + Returns: + The transport class to use. + """ + # If a specific transport is requested, return that one. + if label: + return cls._transport_registry[label] + + # No transport is requested; return the default (that is, the first one + # in the dictionary). + return next(iter(cls._transport_registry.values())) + + +class ServiceAttachmentsClient(metaclass=ServiceAttachmentsClientMeta): + """The ServiceAttachments API.""" + + @staticmethod + def _get_default_mtls_endpoint(api_endpoint): + """Converts api endpoint to mTLS endpoint. + + Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to + "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. + Args: + api_endpoint (Optional[str]): the api endpoint to convert. + Returns: + str: converted mTLS api endpoint. + """ + if not api_endpoint: + return api_endpoint + + mtls_endpoint_re = re.compile( + r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?" + ) + + m = mtls_endpoint_re.match(api_endpoint) + name, mtls, sandbox, googledomain = m.groups() + if mtls or not googledomain: + return api_endpoint + + if sandbox: + return api_endpoint.replace( + "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" + ) + + return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") + + DEFAULT_ENDPOINT = "compute.googleapis.com" + DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore + DEFAULT_ENDPOINT + ) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + ServiceAttachmentsClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_info(info) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + ServiceAttachmentsClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_file( + filename) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + from_service_account_json = from_service_account_file + + @property + def transport(self) -> ServiceAttachmentsTransport: + """Returns the transport used by the client instance. + + Returns: + ServiceAttachmentsTransport: The transport used by the client + instance. + """ + return self._transport + + @staticmethod + def common_billing_account_path(billing_account: str, ) -> str: + """Returns a fully-qualified billing_account string.""" + return "billingAccounts/{billing_account}".format(billing_account=billing_account, ) + + @staticmethod + def parse_common_billing_account_path(path: str) -> Dict[str,str]: + """Parse a billing_account path into its component segments.""" + m = re.match(r"^billingAccounts/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_folder_path(folder: str, ) -> str: + """Returns a fully-qualified folder string.""" + return "folders/{folder}".format(folder=folder, ) + + @staticmethod + def parse_common_folder_path(path: str) -> Dict[str,str]: + """Parse a folder path into its component segments.""" + m = re.match(r"^folders/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_organization_path(organization: str, ) -> str: + """Returns a fully-qualified organization string.""" + return "organizations/{organization}".format(organization=organization, ) + + @staticmethod + def parse_common_organization_path(path: str) -> Dict[str,str]: + """Parse a organization path into its component segments.""" + m = re.match(r"^organizations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_project_path(project: str, ) -> str: + """Returns a fully-qualified project string.""" + return "projects/{project}".format(project=project, ) + + @staticmethod + def parse_common_project_path(path: str) -> Dict[str,str]: + """Parse a project path into its component segments.""" + m = re.match(r"^projects/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_location_path(project: str, location: str, ) -> str: + """Returns a fully-qualified location string.""" + return "projects/{project}/locations/{location}".format(project=project, location=location, ) + + @staticmethod + def parse_common_location_path(path: str) -> Dict[str,str]: + """Parse a location path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @classmethod + def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[client_options_lib.ClientOptions] = None): + """Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + if client_options is None: + client_options = client_options_lib.ClientOptions() + use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") + use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") + if use_client_cert not in ("true", "false"): + raise ValueError("Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`") + if use_mtls_endpoint not in ("auto", "never", "always"): + raise MutualTLSChannelError("Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`") + + # Figure out the client cert source to use. + client_cert_source = None + if use_client_cert == "true": + if client_options.client_cert_source: + client_cert_source = client_options.client_cert_source + elif mtls.has_default_client_cert_source(): + client_cert_source = mtls.default_client_cert_source() + + # Figure out which api endpoint to use. + if client_options.api_endpoint is not None: + api_endpoint = client_options.api_endpoint + elif use_mtls_endpoint == "always" or (use_mtls_endpoint == "auto" and client_cert_source): + api_endpoint = cls.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = cls.DEFAULT_ENDPOINT + + return api_endpoint, client_cert_source + + def __init__(self, *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Optional[Union[str, ServiceAttachmentsTransport]] = None, + client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the service attachments client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Union[str, ServiceAttachmentsTransport]): The + transport to use. If set to None, a transport is chosen + automatically. + NOTE: "rest" transport functionality is currently in a + beta state (preview). We welcome your feedback via an + issue in this library's source repository. + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): Custom options for the + client. It won't take effect if a ``transport`` instance is provided. + (1) The ``api_endpoint`` property can be used to override the + default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT + environment variable can also be used to override the endpoint: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto switch to the + default mTLS endpoint if client certificate is present, this is + the default value). However, the ``api_endpoint`` property takes + precedence if provided. + (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide client certificate for mutual TLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + """ + if isinstance(client_options, dict): + client_options = client_options_lib.from_dict(client_options) + if client_options is None: + client_options = client_options_lib.ClientOptions() + client_options = cast(client_options_lib.ClientOptions, client_options) + + api_endpoint, client_cert_source_func = self.get_mtls_endpoint_and_cert_source(client_options) + + api_key_value = getattr(client_options, "api_key", None) + if api_key_value and credentials: + raise ValueError("client_options.api_key and credentials are mutually exclusive") + + # Save or instantiate the transport. + # Ordinarily, we provide the transport, but allowing a custom transport + # instance provides an extensibility point for unusual situations. + if isinstance(transport, ServiceAttachmentsTransport): + # transport is a ServiceAttachmentsTransport instance. + if credentials or client_options.credentials_file or api_key_value: + raise ValueError("When providing a transport instance, " + "provide its credentials directly.") + if client_options.scopes: + raise ValueError( + "When providing a transport instance, provide its scopes " + "directly." + ) + self._transport = transport + else: + import google.auth._default # type: ignore + + if api_key_value and hasattr(google.auth._default, "get_api_key_credentials"): + credentials = google.auth._default.get_api_key_credentials(api_key_value) + + Transport = type(self).get_transport_class(transport) + self._transport = Transport( + credentials=credentials, + credentials_file=client_options.credentials_file, + host=api_endpoint, + scopes=client_options.scopes, + client_cert_source_for_mtls=client_cert_source_func, + quota_project_id=client_options.quota_project_id, + client_info=client_info, + always_use_jwt_access=True, + api_audience=client_options.api_audience, + ) + + def aggregated_list(self, + request: Optional[Union[compute.AggregatedListServiceAttachmentsRequest, dict]] = None, + *, + project: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.AggregatedListPager: + r"""Retrieves the list of all ServiceAttachment + resources, regional and global, available to the + specified project. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_aggregated_list(): + # Create a client + client = compute_v1.ServiceAttachmentsClient() + + # Initialize request argument(s) + request = compute_v1.AggregatedListServiceAttachmentsRequest( + project="project_value", + ) + + # Make the request + page_result = client.aggregated_list(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.AggregatedListServiceAttachmentsRequest, dict]): + The request object. A request message for + ServiceAttachments.AggregatedList. See + the method description for details. + project (str): + Name of the project scoping this + request. + + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.compute_v1.services.service_attachments.pagers.AggregatedListPager: + Contains a list of + ServiceAttachmentsScopedList. + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.AggregatedListServiceAttachmentsRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.AggregatedListServiceAttachmentsRequest): + request = compute.AggregatedListServiceAttachmentsRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.aggregated_list] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.AggregatedListPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + def delete_unary(self, + request: Optional[Union[compute.DeleteServiceAttachmentRequest, dict]] = None, + *, + project: Optional[str] = None, + region: Optional[str] = None, + service_attachment: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.Operation: + r"""Deletes the specified ServiceAttachment in the given + scope + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_delete(): + # Create a client + client = compute_v1.ServiceAttachmentsClient() + + # Initialize request argument(s) + request = compute_v1.DeleteServiceAttachmentRequest( + project="project_value", + region="region_value", + service_attachment="service_attachment_value", + ) + + # Make the request + response = client.delete(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.DeleteServiceAttachmentRequest, dict]): + The request object. A request message for + ServiceAttachments.Delete. See the + method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + region (str): + Name of the region of this request. + This corresponds to the ``region`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + service_attachment (str): + Name of the ServiceAttachment + resource to delete. + + This corresponds to the ``service_attachment`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, region, service_attachment]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.DeleteServiceAttachmentRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.DeleteServiceAttachmentRequest): + request = compute.DeleteServiceAttachmentRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if region is not None: + request.region = region + if service_attachment is not None: + request.service_attachment = service_attachment + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("region", request.region), + ("service_attachment", request.service_attachment), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def delete(self, + request: Optional[Union[compute.DeleteServiceAttachmentRequest, dict]] = None, + *, + project: Optional[str] = None, + region: Optional[str] = None, + service_attachment: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> extended_operation.ExtendedOperation: + r"""Deletes the specified ServiceAttachment in the given + scope + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_delete(): + # Create a client + client = compute_v1.ServiceAttachmentsClient() + + # Initialize request argument(s) + request = compute_v1.DeleteServiceAttachmentRequest( + project="project_value", + region="region_value", + service_attachment="service_attachment_value", + ) + + # Make the request + response = client.delete(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.DeleteServiceAttachmentRequest, dict]): + The request object. A request message for + ServiceAttachments.Delete. See the + method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + region (str): + Name of the region of this request. + This corresponds to the ``region`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + service_attachment (str): + Name of the ServiceAttachment + resource to delete. + + This corresponds to the ``service_attachment`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, region, service_attachment]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.DeleteServiceAttachmentRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.DeleteServiceAttachmentRequest): + request = compute.DeleteServiceAttachmentRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if region is not None: + request.region = region + if service_attachment is not None: + request.service_attachment = service_attachment + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("region", request.region), + ("service_attachment", request.service_attachment), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + operation_service = self._transport._region_operations_client + operation_request = compute.GetRegionOperationRequest() + operation_request.project = request.project + operation_request.region = request.region + operation_request.operation = response.name + + get_operation = functools.partial(operation_service.get, operation_request) + # Cancel is not part of extended operations yet. + cancel_operation = lambda: None + + # Note: this class is an implementation detail to provide a uniform + # set of names for certain fields in the extended operation proto message. + # See google.api_core.extended_operation.ExtendedOperation for details + # on these properties and the expected interface. + class _CustomOperation(extended_operation.ExtendedOperation): + @property + def error_message(self): + return self._extended_operation.http_error_message + + @property + def error_code(self): + return self._extended_operation.http_error_status_code + + response = _CustomOperation.make(get_operation, cancel_operation, response) + + # Done; return the response. + return response + + def get(self, + request: Optional[Union[compute.GetServiceAttachmentRequest, dict]] = None, + *, + project: Optional[str] = None, + region: Optional[str] = None, + service_attachment: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.ServiceAttachment: + r"""Returns the specified ServiceAttachment resource in + the given scope. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_get(): + # Create a client + client = compute_v1.ServiceAttachmentsClient() + + # Initialize request argument(s) + request = compute_v1.GetServiceAttachmentRequest( + project="project_value", + region="region_value", + service_attachment="service_attachment_value", + ) + + # Make the request + response = client.get(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.GetServiceAttachmentRequest, dict]): + The request object. A request message for + ServiceAttachments.Get. See the method + description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + region (str): + Name of the region of this request. + This corresponds to the ``region`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + service_attachment (str): + Name of the ServiceAttachment + resource to return. + + This corresponds to the ``service_attachment`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.compute_v1.types.ServiceAttachment: + Represents a ServiceAttachment + resource. A service attachment + represents a service that a producer has + exposed. It encapsulates the load + balancer which fronts the service runs + and a list of NAT IP ranges that the + producers uses to represent the + consumers connecting to the service. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, region, service_attachment]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.GetServiceAttachmentRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.GetServiceAttachmentRequest): + request = compute.GetServiceAttachmentRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if region is not None: + request.region = region + if service_attachment is not None: + request.service_attachment = service_attachment + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("region", request.region), + ("service_attachment", request.service_attachment), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get_iam_policy(self, + request: Optional[Union[compute.GetIamPolicyServiceAttachmentRequest, dict]] = None, + *, + project: Optional[str] = None, + region: Optional[str] = None, + resource: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.Policy: + r"""Gets the access control policy for a resource. May be + empty if no such policy or resource exists. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_get_iam_policy(): + # Create a client + client = compute_v1.ServiceAttachmentsClient() + + # Initialize request argument(s) + request = compute_v1.GetIamPolicyServiceAttachmentRequest( + project="project_value", + region="region_value", + resource="resource_value", + ) + + # Make the request + response = client.get_iam_policy(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.GetIamPolicyServiceAttachmentRequest, dict]): + The request object. A request message for + ServiceAttachments.GetIamPolicy. See the + method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + region (str): + The name of the region for this + request. + + This corresponds to the ``region`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + resource (str): + Name or id of the resource for this + request. + + This corresponds to the ``resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.compute_v1.types.Policy: + An Identity and Access Management (IAM) policy, which + specifies access controls for Google Cloud resources. A + Policy is a collection of bindings. A binding binds one + or more members, or principals, to a single role. + Principals can be user accounts, service accounts, + Google groups, and domains (such as G Suite). A role is + a named list of permissions; each role can be an IAM + predefined role or a user-created custom role. For some + types of Google Cloud resources, a binding can also + specify a condition, which is a logical expression that + allows access to a resource only if the expression + evaluates to true. A condition can add constraints based + on attributes of the request, the resource, or both. To + learn which resources support conditions in their IAM + policies, see the [IAM + documentation](\ https://cloud.google.com/iam/help/conditions/resource-policies). + **JSON example:** { "bindings": [ { "role": + "roles/resourcemanager.organizationAdmin", "members": [ + "user:mike@example.com", "group:admins@example.com", + "domain:google.com", + "serviceAccount:my-project-id@appspot.gserviceaccount.com" + ] }, { "role": + "roles/resourcemanager.organizationViewer", "members": [ + "user:eve@example.com" ], "condition": { "title": + "expirable access", "description": "Does not grant + access after Sep 2020", "expression": "request.time < + timestamp('2020-10-01T00:00:00.000Z')", } } ], "etag": + "BwWWja0YfJA=", "version": 3 } **YAML example:** + bindings: - members: - user:\ mike@example.com - + group:\ admins@example.com - domain:google.com - + serviceAccount:\ my-project-id@appspot.gserviceaccount.com + role: roles/resourcemanager.organizationAdmin - members: + - user:\ eve@example.com role: + roles/resourcemanager.organizationViewer condition: + title: expirable access description: Does not grant + access after Sep 2020 expression: request.time < + timestamp('2020-10-01T00:00:00.000Z') etag: BwWWja0YfJA= + version: 3 For a description of IAM and its features, + see the [IAM + documentation](\ https://cloud.google.com/iam/docs/). + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, region, resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.GetIamPolicyServiceAttachmentRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.GetIamPolicyServiceAttachmentRequest): + request = compute.GetIamPolicyServiceAttachmentRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if region is not None: + request.region = region + if resource is not None: + request.resource = resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_iam_policy] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("region", request.region), + ("resource", request.resource), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def insert_unary(self, + request: Optional[Union[compute.InsertServiceAttachmentRequest, dict]] = None, + *, + project: Optional[str] = None, + region: Optional[str] = None, + service_attachment_resource: Optional[compute.ServiceAttachment] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.Operation: + r"""Creates a ServiceAttachment in the specified project + in the given scope using the parameters that are + included in the request. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_insert(): + # Create a client + client = compute_v1.ServiceAttachmentsClient() + + # Initialize request argument(s) + request = compute_v1.InsertServiceAttachmentRequest( + project="project_value", + region="region_value", + ) + + # Make the request + response = client.insert(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.InsertServiceAttachmentRequest, dict]): + The request object. A request message for + ServiceAttachments.Insert. See the + method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + region (str): + Name of the region of this request. + This corresponds to the ``region`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + service_attachment_resource (google.cloud.compute_v1.types.ServiceAttachment): + The body resource for this request + This corresponds to the ``service_attachment_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, region, service_attachment_resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.InsertServiceAttachmentRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.InsertServiceAttachmentRequest): + request = compute.InsertServiceAttachmentRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if region is not None: + request.region = region + if service_attachment_resource is not None: + request.service_attachment_resource = service_attachment_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.insert] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("region", request.region), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def insert(self, + request: Optional[Union[compute.InsertServiceAttachmentRequest, dict]] = None, + *, + project: Optional[str] = None, + region: Optional[str] = None, + service_attachment_resource: Optional[compute.ServiceAttachment] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> extended_operation.ExtendedOperation: + r"""Creates a ServiceAttachment in the specified project + in the given scope using the parameters that are + included in the request. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_insert(): + # Create a client + client = compute_v1.ServiceAttachmentsClient() + + # Initialize request argument(s) + request = compute_v1.InsertServiceAttachmentRequest( + project="project_value", + region="region_value", + ) + + # Make the request + response = client.insert(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.InsertServiceAttachmentRequest, dict]): + The request object. A request message for + ServiceAttachments.Insert. See the + method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + region (str): + Name of the region of this request. + This corresponds to the ``region`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + service_attachment_resource (google.cloud.compute_v1.types.ServiceAttachment): + The body resource for this request + This corresponds to the ``service_attachment_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, region, service_attachment_resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.InsertServiceAttachmentRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.InsertServiceAttachmentRequest): + request = compute.InsertServiceAttachmentRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if region is not None: + request.region = region + if service_attachment_resource is not None: + request.service_attachment_resource = service_attachment_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.insert] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("region", request.region), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + operation_service = self._transport._region_operations_client + operation_request = compute.GetRegionOperationRequest() + operation_request.project = request.project + operation_request.region = request.region + operation_request.operation = response.name + + get_operation = functools.partial(operation_service.get, operation_request) + # Cancel is not part of extended operations yet. + cancel_operation = lambda: None + + # Note: this class is an implementation detail to provide a uniform + # set of names for certain fields in the extended operation proto message. + # See google.api_core.extended_operation.ExtendedOperation for details + # on these properties and the expected interface. + class _CustomOperation(extended_operation.ExtendedOperation): + @property + def error_message(self): + return self._extended_operation.http_error_message + + @property + def error_code(self): + return self._extended_operation.http_error_status_code + + response = _CustomOperation.make(get_operation, cancel_operation, response) + + # Done; return the response. + return response + + def list(self, + request: Optional[Union[compute.ListServiceAttachmentsRequest, dict]] = None, + *, + project: Optional[str] = None, + region: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListPager: + r"""Lists the ServiceAttachments for a project in the + given scope. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_list(): + # Create a client + client = compute_v1.ServiceAttachmentsClient() + + # Initialize request argument(s) + request = compute_v1.ListServiceAttachmentsRequest( + project="project_value", + region="region_value", + ) + + # Make the request + page_result = client.list(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.ListServiceAttachmentsRequest, dict]): + The request object. A request message for + ServiceAttachments.List. See the method + description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + region (str): + Name of the region of this request. + This corresponds to the ``region`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.compute_v1.services.service_attachments.pagers.ListPager: + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, region]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.ListServiceAttachmentsRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.ListServiceAttachmentsRequest): + request = compute.ListServiceAttachmentsRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if region is not None: + request.region = region + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("region", request.region), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + def patch_unary(self, + request: Optional[Union[compute.PatchServiceAttachmentRequest, dict]] = None, + *, + project: Optional[str] = None, + region: Optional[str] = None, + service_attachment: Optional[str] = None, + service_attachment_resource: Optional[compute.ServiceAttachment] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.Operation: + r"""Patches the specified ServiceAttachment resource with + the data included in the request. This method supports + PATCH semantics and uses JSON merge patch format and + processing rules. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_patch(): + # Create a client + client = compute_v1.ServiceAttachmentsClient() + + # Initialize request argument(s) + request = compute_v1.PatchServiceAttachmentRequest( + project="project_value", + region="region_value", + service_attachment="service_attachment_value", + ) + + # Make the request + response = client.patch(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.PatchServiceAttachmentRequest, dict]): + The request object. A request message for + ServiceAttachments.Patch. See the method + description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + region (str): + The region scoping this request and + should conform to RFC1035. + + This corresponds to the ``region`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + service_attachment (str): + The resource id of the + ServiceAttachment to patch. It should + conform to RFC1035 resource name or be a + string form on an unsigned long number. + + This corresponds to the ``service_attachment`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + service_attachment_resource (google.cloud.compute_v1.types.ServiceAttachment): + The body resource for this request + This corresponds to the ``service_attachment_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, region, service_attachment, service_attachment_resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.PatchServiceAttachmentRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.PatchServiceAttachmentRequest): + request = compute.PatchServiceAttachmentRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if region is not None: + request.region = region + if service_attachment is not None: + request.service_attachment = service_attachment + if service_attachment_resource is not None: + request.service_attachment_resource = service_attachment_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.patch] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("region", request.region), + ("service_attachment", request.service_attachment), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def patch(self, + request: Optional[Union[compute.PatchServiceAttachmentRequest, dict]] = None, + *, + project: Optional[str] = None, + region: Optional[str] = None, + service_attachment: Optional[str] = None, + service_attachment_resource: Optional[compute.ServiceAttachment] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> extended_operation.ExtendedOperation: + r"""Patches the specified ServiceAttachment resource with + the data included in the request. This method supports + PATCH semantics and uses JSON merge patch format and + processing rules. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_patch(): + # Create a client + client = compute_v1.ServiceAttachmentsClient() + + # Initialize request argument(s) + request = compute_v1.PatchServiceAttachmentRequest( + project="project_value", + region="region_value", + service_attachment="service_attachment_value", + ) + + # Make the request + response = client.patch(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.PatchServiceAttachmentRequest, dict]): + The request object. A request message for + ServiceAttachments.Patch. See the method + description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + region (str): + The region scoping this request and + should conform to RFC1035. + + This corresponds to the ``region`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + service_attachment (str): + The resource id of the + ServiceAttachment to patch. It should + conform to RFC1035 resource name or be a + string form on an unsigned long number. + + This corresponds to the ``service_attachment`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + service_attachment_resource (google.cloud.compute_v1.types.ServiceAttachment): + The body resource for this request + This corresponds to the ``service_attachment_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, region, service_attachment, service_attachment_resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.PatchServiceAttachmentRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.PatchServiceAttachmentRequest): + request = compute.PatchServiceAttachmentRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if region is not None: + request.region = region + if service_attachment is not None: + request.service_attachment = service_attachment + if service_attachment_resource is not None: + request.service_attachment_resource = service_attachment_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.patch] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("region", request.region), + ("service_attachment", request.service_attachment), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + operation_service = self._transport._region_operations_client + operation_request = compute.GetRegionOperationRequest() + operation_request.project = request.project + operation_request.region = request.region + operation_request.operation = response.name + + get_operation = functools.partial(operation_service.get, operation_request) + # Cancel is not part of extended operations yet. + cancel_operation = lambda: None + + # Note: this class is an implementation detail to provide a uniform + # set of names for certain fields in the extended operation proto message. + # See google.api_core.extended_operation.ExtendedOperation for details + # on these properties and the expected interface. + class _CustomOperation(extended_operation.ExtendedOperation): + @property + def error_message(self): + return self._extended_operation.http_error_message + + @property + def error_code(self): + return self._extended_operation.http_error_status_code + + response = _CustomOperation.make(get_operation, cancel_operation, response) + + # Done; return the response. + return response + + def set_iam_policy(self, + request: Optional[Union[compute.SetIamPolicyServiceAttachmentRequest, dict]] = None, + *, + project: Optional[str] = None, + region: Optional[str] = None, + resource: Optional[str] = None, + region_set_policy_request_resource: Optional[compute.RegionSetPolicyRequest] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.Policy: + r"""Sets the access control policy on the specified + resource. Replaces any existing policy. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_set_iam_policy(): + # Create a client + client = compute_v1.ServiceAttachmentsClient() + + # Initialize request argument(s) + request = compute_v1.SetIamPolicyServiceAttachmentRequest( + project="project_value", + region="region_value", + resource="resource_value", + ) + + # Make the request + response = client.set_iam_policy(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.SetIamPolicyServiceAttachmentRequest, dict]): + The request object. A request message for + ServiceAttachments.SetIamPolicy. See the + method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + region (str): + The name of the region for this + request. + + This corresponds to the ``region`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + resource (str): + Name or id of the resource for this + request. + + This corresponds to the ``resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + region_set_policy_request_resource (google.cloud.compute_v1.types.RegionSetPolicyRequest): + The body resource for this request + This corresponds to the ``region_set_policy_request_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.compute_v1.types.Policy: + An Identity and Access Management (IAM) policy, which + specifies access controls for Google Cloud resources. A + Policy is a collection of bindings. A binding binds one + or more members, or principals, to a single role. + Principals can be user accounts, service accounts, + Google groups, and domains (such as G Suite). A role is + a named list of permissions; each role can be an IAM + predefined role or a user-created custom role. For some + types of Google Cloud resources, a binding can also + specify a condition, which is a logical expression that + allows access to a resource only if the expression + evaluates to true. A condition can add constraints based + on attributes of the request, the resource, or both. To + learn which resources support conditions in their IAM + policies, see the [IAM + documentation](\ https://cloud.google.com/iam/help/conditions/resource-policies). + **JSON example:** { "bindings": [ { "role": + "roles/resourcemanager.organizationAdmin", "members": [ + "user:mike@example.com", "group:admins@example.com", + "domain:google.com", + "serviceAccount:my-project-id@appspot.gserviceaccount.com" + ] }, { "role": + "roles/resourcemanager.organizationViewer", "members": [ + "user:eve@example.com" ], "condition": { "title": + "expirable access", "description": "Does not grant + access after Sep 2020", "expression": "request.time < + timestamp('2020-10-01T00:00:00.000Z')", } } ], "etag": + "BwWWja0YfJA=", "version": 3 } **YAML example:** + bindings: - members: - user:\ mike@example.com - + group:\ admins@example.com - domain:google.com - + serviceAccount:\ my-project-id@appspot.gserviceaccount.com + role: roles/resourcemanager.organizationAdmin - members: + - user:\ eve@example.com role: + roles/resourcemanager.organizationViewer condition: + title: expirable access description: Does not grant + access after Sep 2020 expression: request.time < + timestamp('2020-10-01T00:00:00.000Z') etag: BwWWja0YfJA= + version: 3 For a description of IAM and its features, + see the [IAM + documentation](\ https://cloud.google.com/iam/docs/). + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, region, resource, region_set_policy_request_resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.SetIamPolicyServiceAttachmentRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.SetIamPolicyServiceAttachmentRequest): + request = compute.SetIamPolicyServiceAttachmentRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if region is not None: + request.region = region + if resource is not None: + request.resource = resource + if region_set_policy_request_resource is not None: + request.region_set_policy_request_resource = region_set_policy_request_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.set_iam_policy] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("region", request.region), + ("resource", request.resource), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def test_iam_permissions(self, + request: Optional[Union[compute.TestIamPermissionsServiceAttachmentRequest, dict]] = None, + *, + project: Optional[str] = None, + region: Optional[str] = None, + resource: Optional[str] = None, + test_permissions_request_resource: Optional[compute.TestPermissionsRequest] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.TestPermissionsResponse: + r"""Returns permissions that a caller has on the + specified resource. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_test_iam_permissions(): + # Create a client + client = compute_v1.ServiceAttachmentsClient() + + # Initialize request argument(s) + request = compute_v1.TestIamPermissionsServiceAttachmentRequest( + project="project_value", + region="region_value", + resource="resource_value", + ) + + # Make the request + response = client.test_iam_permissions(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.TestIamPermissionsServiceAttachmentRequest, dict]): + The request object. A request message for + ServiceAttachments.TestIamPermissions. + See the method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + region (str): + The name of the region for this + request. + + This corresponds to the ``region`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + resource (str): + Name or id of the resource for this + request. + + This corresponds to the ``resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + test_permissions_request_resource (google.cloud.compute_v1.types.TestPermissionsRequest): + The body resource for this request + This corresponds to the ``test_permissions_request_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.compute_v1.types.TestPermissionsResponse: + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, region, resource, test_permissions_request_resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.TestIamPermissionsServiceAttachmentRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.TestIamPermissionsServiceAttachmentRequest): + request = compute.TestIamPermissionsServiceAttachmentRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if region is not None: + request.region = region + if resource is not None: + request.resource = resource + if test_permissions_request_resource is not None: + request.test_permissions_request_resource = test_permissions_request_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.test_iam_permissions] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("region", request.region), + ("resource", request.resource), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def __enter__(self) -> "ServiceAttachmentsClient": + return self + + def __exit__(self, type, value, traceback): + """Releases underlying transport's resources. + + .. warning:: + ONLY use as a context manager if the transport is NOT shared + with other clients! Exiting the with block will CLOSE the transport + and may cause errors in other clients! + """ + self.transport.close() + + + + + + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) + + +__all__ = ( + "ServiceAttachmentsClient", +) diff --git a/owl-bot-staging/v1/google/cloud/compute_v1/services/service_attachments/pagers.py b/owl-bot-staging/v1/google/cloud/compute_v1/services/service_attachments/pagers.py new file mode 100644 index 000000000..2e56f2f8c --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/compute_v1/services/service_attachments/pagers.py @@ -0,0 +1,139 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import Any, AsyncIterator, Awaitable, Callable, Sequence, Tuple, Optional, Iterator + +from google.cloud.compute_v1.types import compute + + +class AggregatedListPager: + """A pager for iterating through ``aggregated_list`` requests. + + This class thinly wraps an initial + :class:`google.cloud.compute_v1.types.ServiceAttachmentAggregatedList` object, and + provides an ``__iter__`` method to iterate through its + ``items`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``AggregatedList`` requests and continue to iterate + through the ``items`` field on the + corresponding responses. + + All the usual :class:`google.cloud.compute_v1.types.ServiceAttachmentAggregatedList` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., compute.ServiceAttachmentAggregatedList], + request: compute.AggregatedListServiceAttachmentsRequest, + response: compute.ServiceAttachmentAggregatedList, + *, + metadata: Sequence[Tuple[str, str]] = ()): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.compute_v1.types.AggregatedListServiceAttachmentsRequest): + The initial request object. + response (google.cloud.compute_v1.types.ServiceAttachmentAggregatedList): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = compute.AggregatedListServiceAttachmentsRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[compute.ServiceAttachmentAggregatedList]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[Tuple[str, compute.ServiceAttachmentsScopedList]]: + for page in self.pages: + yield from page.items.items() + + def get(self, key: str) -> Optional[compute.ServiceAttachmentsScopedList]: + return self._response.items.get(key) + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + + +class ListPager: + """A pager for iterating through ``list`` requests. + + This class thinly wraps an initial + :class:`google.cloud.compute_v1.types.ServiceAttachmentList` object, and + provides an ``__iter__`` method to iterate through its + ``items`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``List`` requests and continue to iterate + through the ``items`` field on the + corresponding responses. + + All the usual :class:`google.cloud.compute_v1.types.ServiceAttachmentList` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., compute.ServiceAttachmentList], + request: compute.ListServiceAttachmentsRequest, + response: compute.ServiceAttachmentList, + *, + metadata: Sequence[Tuple[str, str]] = ()): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.compute_v1.types.ListServiceAttachmentsRequest): + The initial request object. + response (google.cloud.compute_v1.types.ServiceAttachmentList): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = compute.ListServiceAttachmentsRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[compute.ServiceAttachmentList]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[compute.ServiceAttachment]: + for page in self.pages: + yield from page.items + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) diff --git a/owl-bot-staging/v1/google/cloud/compute_v1/services/service_attachments/transports/__init__.py b/owl-bot-staging/v1/google/cloud/compute_v1/services/service_attachments/transports/__init__.py new file mode 100644 index 000000000..6ea9620ca --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/compute_v1/services/service_attachments/transports/__init__.py @@ -0,0 +1,32 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +from typing import Dict, Type + +from .base import ServiceAttachmentsTransport +from .rest import ServiceAttachmentsRestTransport +from .rest import ServiceAttachmentsRestInterceptor + + +# Compile a registry of transports. +_transport_registry = OrderedDict() # type: Dict[str, Type[ServiceAttachmentsTransport]] +_transport_registry['rest'] = ServiceAttachmentsRestTransport + +__all__ = ( + 'ServiceAttachmentsTransport', + 'ServiceAttachmentsRestTransport', + 'ServiceAttachmentsRestInterceptor', +) diff --git a/owl-bot-staging/v1/google/cloud/compute_v1/services/service_attachments/transports/base.py b/owl-bot-staging/v1/google/cloud/compute_v1/services/service_attachments/transports/base.py new file mode 100644 index 000000000..d0e735b26 --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/compute_v1/services/service_attachments/transports/base.py @@ -0,0 +1,275 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import abc +from typing import Awaitable, Callable, Dict, Optional, Sequence, Union + +from google.cloud.compute_v1 import gapic_version as package_version + +import google.auth # type: ignore +import google.api_core +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.compute_v1.types import compute +from google.cloud.compute_v1.services import region_operations + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) + + +class ServiceAttachmentsTransport(abc.ABC): + """Abstract transport class for ServiceAttachments.""" + + AUTH_SCOPES = ( + 'https://www.googleapis.com/auth/compute', + 'https://www.googleapis.com/auth/cloud-platform', + ) + + DEFAULT_HOST: str = 'compute.googleapis.com' + def __init__( + self, *, + host: str = DEFAULT_HOST, + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + **kwargs, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A list of scopes. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + """ + self._extended_operations_services: Dict[str, Any] = {} + + scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} + + # Save the scopes. + self._scopes = scopes + + # If no credentials are provided, then determine the appropriate + # defaults. + if credentials and credentials_file: + raise core_exceptions.DuplicateCredentialArgs("'credentials_file' and 'credentials' are mutually exclusive") + + if credentials_file is not None: + credentials, _ = google.auth.load_credentials_from_file( + credentials_file, + **scopes_kwargs, + quota_project_id=quota_project_id + ) + elif credentials is None: + credentials, _ = google.auth.default(**scopes_kwargs, quota_project_id=quota_project_id) + # Don't apply audience if the credentials file passed from user. + if hasattr(credentials, "with_gdch_audience"): + credentials = credentials.with_gdch_audience(api_audience if api_audience else host) + + # If the credentials are service account credentials, then always try to use self signed JWT. + if always_use_jwt_access and isinstance(credentials, service_account.Credentials) and hasattr(service_account.Credentials, "with_always_use_jwt_access"): + credentials = credentials.with_always_use_jwt_access(True) + + # Save the credentials. + self._credentials = credentials + + # Save the hostname. Default to port 443 (HTTPS) if none is specified. + if ':' not in host: + host += ':443' + self._host = host + + def _prep_wrapped_messages(self, client_info): + # Precompute the wrapped methods. + self._wrapped_methods = { + self.aggregated_list: gapic_v1.method.wrap_method( + self.aggregated_list, + default_timeout=None, + client_info=client_info, + ), + self.delete: gapic_v1.method.wrap_method( + self.delete, + default_timeout=None, + client_info=client_info, + ), + self.get: gapic_v1.method.wrap_method( + self.get, + default_timeout=None, + client_info=client_info, + ), + self.get_iam_policy: gapic_v1.method.wrap_method( + self.get_iam_policy, + default_timeout=None, + client_info=client_info, + ), + self.insert: gapic_v1.method.wrap_method( + self.insert, + default_timeout=None, + client_info=client_info, + ), + self.list: gapic_v1.method.wrap_method( + self.list, + default_timeout=None, + client_info=client_info, + ), + self.patch: gapic_v1.method.wrap_method( + self.patch, + default_timeout=None, + client_info=client_info, + ), + self.set_iam_policy: gapic_v1.method.wrap_method( + self.set_iam_policy, + default_timeout=None, + client_info=client_info, + ), + self.test_iam_permissions: gapic_v1.method.wrap_method( + self.test_iam_permissions, + default_timeout=None, + client_info=client_info, + ), + } + + def close(self): + """Closes resources associated with the transport. + + .. warning:: + Only call this method if the transport is NOT shared + with other clients - this may cause errors in other clients! + """ + raise NotImplementedError() + + @property + def aggregated_list(self) -> Callable[ + [compute.AggregatedListServiceAttachmentsRequest], + Union[ + compute.ServiceAttachmentAggregatedList, + Awaitable[compute.ServiceAttachmentAggregatedList] + ]]: + raise NotImplementedError() + + @property + def delete(self) -> Callable[ + [compute.DeleteServiceAttachmentRequest], + Union[ + compute.Operation, + Awaitable[compute.Operation] + ]]: + raise NotImplementedError() + + @property + def get(self) -> Callable[ + [compute.GetServiceAttachmentRequest], + Union[ + compute.ServiceAttachment, + Awaitable[compute.ServiceAttachment] + ]]: + raise NotImplementedError() + + @property + def get_iam_policy(self) -> Callable[ + [compute.GetIamPolicyServiceAttachmentRequest], + Union[ + compute.Policy, + Awaitable[compute.Policy] + ]]: + raise NotImplementedError() + + @property + def insert(self) -> Callable[ + [compute.InsertServiceAttachmentRequest], + Union[ + compute.Operation, + Awaitable[compute.Operation] + ]]: + raise NotImplementedError() + + @property + def list(self) -> Callable[ + [compute.ListServiceAttachmentsRequest], + Union[ + compute.ServiceAttachmentList, + Awaitable[compute.ServiceAttachmentList] + ]]: + raise NotImplementedError() + + @property + def patch(self) -> Callable[ + [compute.PatchServiceAttachmentRequest], + Union[ + compute.Operation, + Awaitable[compute.Operation] + ]]: + raise NotImplementedError() + + @property + def set_iam_policy(self) -> Callable[ + [compute.SetIamPolicyServiceAttachmentRequest], + Union[ + compute.Policy, + Awaitable[compute.Policy] + ]]: + raise NotImplementedError() + + @property + def test_iam_permissions(self) -> Callable[ + [compute.TestIamPermissionsServiceAttachmentRequest], + Union[ + compute.TestPermissionsResponse, + Awaitable[compute.TestPermissionsResponse] + ]]: + raise NotImplementedError() + + @property + def kind(self) -> str: + raise NotImplementedError() + + @property + def _region_operations_client(self) -> region_operations.RegionOperationsClient: + ex_op_service = self._extended_operations_services.get("region_operations") + if not ex_op_service: + ex_op_service = region_operations.RegionOperationsClient( + credentials=self._credentials, + transport=self.kind, + ) + self._extended_operations_services["region_operations"] = ex_op_service + + return ex_op_service + + +__all__ = ( + 'ServiceAttachmentsTransport', +) diff --git a/owl-bot-staging/v1/google/cloud/compute_v1/services/service_attachments/transports/rest.py b/owl-bot-staging/v1/google/cloud/compute_v1/services/service_attachments/transports/rest.py new file mode 100644 index 000000000..4dfba32e0 --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/compute_v1/services/service_attachments/transports/rest.py @@ -0,0 +1,1348 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +from google.auth.transport.requests import AuthorizedSession # type: ignore +import json # type: ignore +import grpc # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.api_core import exceptions as core_exceptions +from google.api_core import retry as retries +from google.api_core import rest_helpers +from google.api_core import rest_streaming +from google.api_core import path_template +from google.api_core import gapic_v1 + +from google.protobuf import json_format +from requests import __version__ as requests_version +import dataclasses +import re +from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union +import warnings + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + + +from google.cloud.compute_v1.types import compute + +from .base import ServiceAttachmentsTransport, DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, + grpc_version=None, + rest_version=requests_version, +) + + +class ServiceAttachmentsRestInterceptor: + """Interceptor for ServiceAttachments. + + Interceptors are used to manipulate requests, request metadata, and responses + in arbitrary ways. + Example use cases include: + * Logging + * Verifying requests according to service or custom semantics + * Stripping extraneous information from responses + + These use cases and more can be enabled by injecting an + instance of a custom subclass when constructing the ServiceAttachmentsRestTransport. + + .. code-block:: python + class MyCustomServiceAttachmentsInterceptor(ServiceAttachmentsRestInterceptor): + def pre_aggregated_list(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_aggregated_list(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_delete(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_delete(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_get(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_get_iam_policy(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_iam_policy(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_insert(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_insert(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_patch(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_patch(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_set_iam_policy(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_set_iam_policy(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_test_iam_permissions(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_test_iam_permissions(self, response): + logging.log(f"Received response: {response}") + return response + + transport = ServiceAttachmentsRestTransport(interceptor=MyCustomServiceAttachmentsInterceptor()) + client = ServiceAttachmentsClient(transport=transport) + + + """ + def pre_aggregated_list(self, request: compute.AggregatedListServiceAttachmentsRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.AggregatedListServiceAttachmentsRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for aggregated_list + + Override in a subclass to manipulate the request or metadata + before they are sent to the ServiceAttachments server. + """ + return request, metadata + + def post_aggregated_list(self, response: compute.ServiceAttachmentAggregatedList) -> compute.ServiceAttachmentAggregatedList: + """Post-rpc interceptor for aggregated_list + + Override in a subclass to manipulate the response + after it is returned by the ServiceAttachments server but before + it is returned to user code. + """ + return response + def pre_delete(self, request: compute.DeleteServiceAttachmentRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.DeleteServiceAttachmentRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for delete + + Override in a subclass to manipulate the request or metadata + before they are sent to the ServiceAttachments server. + """ + return request, metadata + + def post_delete(self, response: compute.Operation) -> compute.Operation: + """Post-rpc interceptor for delete + + Override in a subclass to manipulate the response + after it is returned by the ServiceAttachments server but before + it is returned to user code. + """ + return response + def pre_get(self, request: compute.GetServiceAttachmentRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.GetServiceAttachmentRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get + + Override in a subclass to manipulate the request or metadata + before they are sent to the ServiceAttachments server. + """ + return request, metadata + + def post_get(self, response: compute.ServiceAttachment) -> compute.ServiceAttachment: + """Post-rpc interceptor for get + + Override in a subclass to manipulate the response + after it is returned by the ServiceAttachments server but before + it is returned to user code. + """ + return response + def pre_get_iam_policy(self, request: compute.GetIamPolicyServiceAttachmentRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.GetIamPolicyServiceAttachmentRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_iam_policy + + Override in a subclass to manipulate the request or metadata + before they are sent to the ServiceAttachments server. + """ + return request, metadata + + def post_get_iam_policy(self, response: compute.Policy) -> compute.Policy: + """Post-rpc interceptor for get_iam_policy + + Override in a subclass to manipulate the response + after it is returned by the ServiceAttachments server but before + it is returned to user code. + """ + return response + def pre_insert(self, request: compute.InsertServiceAttachmentRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.InsertServiceAttachmentRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for insert + + Override in a subclass to manipulate the request or metadata + before they are sent to the ServiceAttachments server. + """ + return request, metadata + + def post_insert(self, response: compute.Operation) -> compute.Operation: + """Post-rpc interceptor for insert + + Override in a subclass to manipulate the response + after it is returned by the ServiceAttachments server but before + it is returned to user code. + """ + return response + def pre_list(self, request: compute.ListServiceAttachmentsRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.ListServiceAttachmentsRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list + + Override in a subclass to manipulate the request or metadata + before they are sent to the ServiceAttachments server. + """ + return request, metadata + + def post_list(self, response: compute.ServiceAttachmentList) -> compute.ServiceAttachmentList: + """Post-rpc interceptor for list + + Override in a subclass to manipulate the response + after it is returned by the ServiceAttachments server but before + it is returned to user code. + """ + return response + def pre_patch(self, request: compute.PatchServiceAttachmentRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.PatchServiceAttachmentRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for patch + + Override in a subclass to manipulate the request or metadata + before they are sent to the ServiceAttachments server. + """ + return request, metadata + + def post_patch(self, response: compute.Operation) -> compute.Operation: + """Post-rpc interceptor for patch + + Override in a subclass to manipulate the response + after it is returned by the ServiceAttachments server but before + it is returned to user code. + """ + return response + def pre_set_iam_policy(self, request: compute.SetIamPolicyServiceAttachmentRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.SetIamPolicyServiceAttachmentRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for set_iam_policy + + Override in a subclass to manipulate the request or metadata + before they are sent to the ServiceAttachments server. + """ + return request, metadata + + def post_set_iam_policy(self, response: compute.Policy) -> compute.Policy: + """Post-rpc interceptor for set_iam_policy + + Override in a subclass to manipulate the response + after it is returned by the ServiceAttachments server but before + it is returned to user code. + """ + return response + def pre_test_iam_permissions(self, request: compute.TestIamPermissionsServiceAttachmentRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.TestIamPermissionsServiceAttachmentRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for test_iam_permissions + + Override in a subclass to manipulate the request or metadata + before they are sent to the ServiceAttachments server. + """ + return request, metadata + + def post_test_iam_permissions(self, response: compute.TestPermissionsResponse) -> compute.TestPermissionsResponse: + """Post-rpc interceptor for test_iam_permissions + + Override in a subclass to manipulate the response + after it is returned by the ServiceAttachments server but before + it is returned to user code. + """ + return response + + +@dataclasses.dataclass +class ServiceAttachmentsRestStub: + _session: AuthorizedSession + _host: str + _interceptor: ServiceAttachmentsRestInterceptor + + +class ServiceAttachmentsRestTransport(ServiceAttachmentsTransport): + """REST backend transport for ServiceAttachments. + + The ServiceAttachments API. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends JSON representations of protocol buffers over HTTP/1.1 + + NOTE: This REST transport functionality is currently in a beta + state (preview). We welcome your feedback via an issue in this + library's source repository. Thank you! + """ + + def __init__(self, *, + host: str = 'compute.googleapis.com', + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + client_cert_source_for_mtls: Optional[Callable[[ + ], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + url_scheme: str = 'https', + interceptor: Optional[ServiceAttachmentsRestInterceptor] = None, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + NOTE: This REST transport functionality is currently in a beta + state (preview). We welcome your feedback via a GitHub issue in + this library's repository. Thank you! + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + client_cert_source_for_mtls (Callable[[], Tuple[bytes, bytes]]): Client + certificate to configure mutual TLS HTTP channel. It is ignored + if ``channel`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you are developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + url_scheme: the protocol scheme for the API endpoint. Normally + "https", but for testing or local servers, + "http" can be specified. + """ + # Run the base constructor + # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. + # TODO: When custom host (api_endpoint) is set, `scopes` must *also* be set on the + # credentials object + maybe_url_match = re.match("^(?Phttp(?:s)?://)?(?P.*)$", host) + if maybe_url_match is None: + raise ValueError(f"Unexpected hostname structure: {host}") # pragma: NO COVER + + url_match_items = maybe_url_match.groupdict() + + host = f"{url_scheme}://{host}" if not url_match_items["scheme"] else host + + super().__init__( + host=host, + credentials=credentials, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience + ) + self._session = AuthorizedSession( + self._credentials, default_host=self.DEFAULT_HOST) + if client_cert_source_for_mtls: + self._session.configure_mtls_channel(client_cert_source_for_mtls) + self._interceptor = interceptor or ServiceAttachmentsRestInterceptor() + self._prep_wrapped_messages(client_info) + + class _AggregatedList(ServiceAttachmentsRestStub): + def __hash__(self): + return hash("AggregatedList") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.AggregatedListServiceAttachmentsRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.ServiceAttachmentAggregatedList: + r"""Call the aggregated list method over HTTP. + + Args: + request (~.compute.AggregatedListServiceAttachmentsRequest): + The request object. A request message for + ServiceAttachments.AggregatedList. See + the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.ServiceAttachmentAggregatedList: + Contains a list of + ServiceAttachmentsScopedList. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/compute/v1/projects/{project}/aggregated/serviceAttachments', + }, + ] + request, metadata = self._interceptor.pre_aggregated_list(request, metadata) + pb_request = compute.AggregatedListServiceAttachmentsRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.ServiceAttachmentAggregatedList() + pb_resp = compute.ServiceAttachmentAggregatedList.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_aggregated_list(resp) + return resp + + class _Delete(ServiceAttachmentsRestStub): + def __hash__(self): + return hash("Delete") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.DeleteServiceAttachmentRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.Operation: + r"""Call the delete method over HTTP. + + Args: + request (~.compute.DeleteServiceAttachmentRequest): + The request object. A request message for + ServiceAttachments.Delete. See the + method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.Operation: + Represents an Operation resource. Google Compute Engine + has three Operation resources: \* + `Global `__ + \* + `Regional `__ + \* + `Zonal `__ + You can use an operation resource to manage asynchronous + API requests. For more information, read Handling API + responses. Operations can be global, regional or zonal. + - For global operations, use the ``globalOperations`` + resource. - For regional operations, use the + ``regionOperations`` resource. - For zonal operations, + use the ``zonalOperations`` resource. For more + information, read Global, Regional, and Zonal Resources. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'delete', + 'uri': '/compute/v1/projects/{project}/regions/{region}/serviceAttachments/{service_attachment}', + }, + ] + request, metadata = self._interceptor.pre_delete(request, metadata) + pb_request = compute.DeleteServiceAttachmentRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.Operation() + pb_resp = compute.Operation.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_delete(resp) + return resp + + class _Get(ServiceAttachmentsRestStub): + def __hash__(self): + return hash("Get") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.GetServiceAttachmentRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.ServiceAttachment: + r"""Call the get method over HTTP. + + Args: + request (~.compute.GetServiceAttachmentRequest): + The request object. A request message for + ServiceAttachments.Get. See the method + description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.ServiceAttachment: + Represents a ServiceAttachment + resource. A service attachment + represents a service that a producer has + exposed. It encapsulates the load + balancer which fronts the service runs + and a list of NAT IP ranges that the + producers uses to represent the + consumers connecting to the service. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/compute/v1/projects/{project}/regions/{region}/serviceAttachments/{service_attachment}', + }, + ] + request, metadata = self._interceptor.pre_get(request, metadata) + pb_request = compute.GetServiceAttachmentRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.ServiceAttachment() + pb_resp = compute.ServiceAttachment.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get(resp) + return resp + + class _GetIamPolicy(ServiceAttachmentsRestStub): + def __hash__(self): + return hash("GetIamPolicy") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.GetIamPolicyServiceAttachmentRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.Policy: + r"""Call the get iam policy method over HTTP. + + Args: + request (~.compute.GetIamPolicyServiceAttachmentRequest): + The request object. A request message for + ServiceAttachments.GetIamPolicy. See the + method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.Policy: + An Identity and Access Management (IAM) policy, which + specifies access controls for Google Cloud resources. A + ``Policy`` is a collection of ``bindings``. A + ``binding`` binds one or more ``members``, or + principals, to a single ``role``. Principals can be user + accounts, service accounts, Google groups, and domains + (such as G Suite). A ``role`` is a named list of + permissions; each ``role`` can be an IAM predefined role + or a user-created custom role. For some types of Google + Cloud resources, a ``binding`` can also specify a + ``condition``, which is a logical expression that allows + access to a resource only if the expression evaluates to + ``true``. A condition can add constraints based on + attributes of the request, the resource, or both. To + learn which resources support conditions in their IAM + policies, see the `IAM + documentation `__. + **JSON example:** { "bindings": [ { "role": + "roles/resourcemanager.organizationAdmin", "members": [ + "user:mike@example.com", "group:admins@example.com", + "domain:google.com", + "serviceAccount:my-project-id@appspot.gserviceaccount.com" + ] }, { "role": + "roles/resourcemanager.organizationViewer", "members": [ + "user:eve@example.com" ], "condition": { "title": + "expirable access", "description": "Does not grant + access after Sep 2020", "expression": "request.time < + timestamp('2020-10-01T00:00:00.000Z')", } } ], "etag": + "BwWWja0YfJA=", "version": 3 } **YAML example:** + bindings: - members: - user:mike@example.com - + group:admins@example.com - domain:google.com - + serviceAccount:my-project-id@appspot.gserviceaccount.com + role: roles/resourcemanager.organizationAdmin - members: + - user:eve@example.com role: + roles/resourcemanager.organizationViewer condition: + title: expirable access description: Does not grant + access after Sep 2020 expression: request.time < + timestamp('2020-10-01T00:00:00.000Z') etag: BwWWja0YfJA= + version: 3 For a description of IAM and its features, + see the `IAM + documentation `__. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/compute/v1/projects/{project}/regions/{region}/serviceAttachments/{resource}/getIamPolicy', + }, + ] + request, metadata = self._interceptor.pre_get_iam_policy(request, metadata) + pb_request = compute.GetIamPolicyServiceAttachmentRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.Policy() + pb_resp = compute.Policy.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get_iam_policy(resp) + return resp + + class _Insert(ServiceAttachmentsRestStub): + def __hash__(self): + return hash("Insert") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.InsertServiceAttachmentRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.Operation: + r"""Call the insert method over HTTP. + + Args: + request (~.compute.InsertServiceAttachmentRequest): + The request object. A request message for + ServiceAttachments.Insert. See the + method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.Operation: + Represents an Operation resource. Google Compute Engine + has three Operation resources: \* + `Global `__ + \* + `Regional `__ + \* + `Zonal `__ + You can use an operation resource to manage asynchronous + API requests. For more information, read Handling API + responses. Operations can be global, regional or zonal. + - For global operations, use the ``globalOperations`` + resource. - For regional operations, use the + ``regionOperations`` resource. - For zonal operations, + use the ``zonalOperations`` resource. For more + information, read Global, Regional, and Zonal Resources. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/compute/v1/projects/{project}/regions/{region}/serviceAttachments', + 'body': 'service_attachment_resource', + }, + ] + request, metadata = self._interceptor.pre_insert(request, metadata) + pb_request = compute.InsertServiceAttachmentRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + including_default_value_fields=False, + use_integers_for_enums=False + ) + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.Operation() + pb_resp = compute.Operation.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_insert(resp) + return resp + + class _List(ServiceAttachmentsRestStub): + def __hash__(self): + return hash("List") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.ListServiceAttachmentsRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.ServiceAttachmentList: + r"""Call the list method over HTTP. + + Args: + request (~.compute.ListServiceAttachmentsRequest): + The request object. A request message for + ServiceAttachments.List. See the method + description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.ServiceAttachmentList: + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/compute/v1/projects/{project}/regions/{region}/serviceAttachments', + }, + ] + request, metadata = self._interceptor.pre_list(request, metadata) + pb_request = compute.ListServiceAttachmentsRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.ServiceAttachmentList() + pb_resp = compute.ServiceAttachmentList.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_list(resp) + return resp + + class _Patch(ServiceAttachmentsRestStub): + def __hash__(self): + return hash("Patch") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.PatchServiceAttachmentRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.Operation: + r"""Call the patch method over HTTP. + + Args: + request (~.compute.PatchServiceAttachmentRequest): + The request object. A request message for + ServiceAttachments.Patch. See the method + description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.Operation: + Represents an Operation resource. Google Compute Engine + has three Operation resources: \* + `Global `__ + \* + `Regional `__ + \* + `Zonal `__ + You can use an operation resource to manage asynchronous + API requests. For more information, read Handling API + responses. Operations can be global, regional or zonal. + - For global operations, use the ``globalOperations`` + resource. - For regional operations, use the + ``regionOperations`` resource. - For zonal operations, + use the ``zonalOperations`` resource. For more + information, read Global, Regional, and Zonal Resources. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'patch', + 'uri': '/compute/v1/projects/{project}/regions/{region}/serviceAttachments/{service_attachment}', + 'body': 'service_attachment_resource', + }, + ] + request, metadata = self._interceptor.pre_patch(request, metadata) + pb_request = compute.PatchServiceAttachmentRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + including_default_value_fields=False, + use_integers_for_enums=False + ) + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.Operation() + pb_resp = compute.Operation.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_patch(resp) + return resp + + class _SetIamPolicy(ServiceAttachmentsRestStub): + def __hash__(self): + return hash("SetIamPolicy") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.SetIamPolicyServiceAttachmentRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.Policy: + r"""Call the set iam policy method over HTTP. + + Args: + request (~.compute.SetIamPolicyServiceAttachmentRequest): + The request object. A request message for + ServiceAttachments.SetIamPolicy. See the + method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.Policy: + An Identity and Access Management (IAM) policy, which + specifies access controls for Google Cloud resources. A + ``Policy`` is a collection of ``bindings``. A + ``binding`` binds one or more ``members``, or + principals, to a single ``role``. Principals can be user + accounts, service accounts, Google groups, and domains + (such as G Suite). A ``role`` is a named list of + permissions; each ``role`` can be an IAM predefined role + or a user-created custom role. For some types of Google + Cloud resources, a ``binding`` can also specify a + ``condition``, which is a logical expression that allows + access to a resource only if the expression evaluates to + ``true``. A condition can add constraints based on + attributes of the request, the resource, or both. To + learn which resources support conditions in their IAM + policies, see the `IAM + documentation `__. + **JSON example:** { "bindings": [ { "role": + "roles/resourcemanager.organizationAdmin", "members": [ + "user:mike@example.com", "group:admins@example.com", + "domain:google.com", + "serviceAccount:my-project-id@appspot.gserviceaccount.com" + ] }, { "role": + "roles/resourcemanager.organizationViewer", "members": [ + "user:eve@example.com" ], "condition": { "title": + "expirable access", "description": "Does not grant + access after Sep 2020", "expression": "request.time < + timestamp('2020-10-01T00:00:00.000Z')", } } ], "etag": + "BwWWja0YfJA=", "version": 3 } **YAML example:** + bindings: - members: - user:mike@example.com - + group:admins@example.com - domain:google.com - + serviceAccount:my-project-id@appspot.gserviceaccount.com + role: roles/resourcemanager.organizationAdmin - members: + - user:eve@example.com role: + roles/resourcemanager.organizationViewer condition: + title: expirable access description: Does not grant + access after Sep 2020 expression: request.time < + timestamp('2020-10-01T00:00:00.000Z') etag: BwWWja0YfJA= + version: 3 For a description of IAM and its features, + see the `IAM + documentation `__. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/compute/v1/projects/{project}/regions/{region}/serviceAttachments/{resource}/setIamPolicy', + 'body': 'region_set_policy_request_resource', + }, + ] + request, metadata = self._interceptor.pre_set_iam_policy(request, metadata) + pb_request = compute.SetIamPolicyServiceAttachmentRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + including_default_value_fields=False, + use_integers_for_enums=False + ) + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.Policy() + pb_resp = compute.Policy.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_set_iam_policy(resp) + return resp + + class _TestIamPermissions(ServiceAttachmentsRestStub): + def __hash__(self): + return hash("TestIamPermissions") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.TestIamPermissionsServiceAttachmentRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.TestPermissionsResponse: + r"""Call the test iam permissions method over HTTP. + + Args: + request (~.compute.TestIamPermissionsServiceAttachmentRequest): + The request object. A request message for + ServiceAttachments.TestIamPermissions. + See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.TestPermissionsResponse: + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/compute/v1/projects/{project}/regions/{region}/serviceAttachments/{resource}/testIamPermissions', + 'body': 'test_permissions_request_resource', + }, + ] + request, metadata = self._interceptor.pre_test_iam_permissions(request, metadata) + pb_request = compute.TestIamPermissionsServiceAttachmentRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + including_default_value_fields=False, + use_integers_for_enums=False + ) + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.TestPermissionsResponse() + pb_resp = compute.TestPermissionsResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_test_iam_permissions(resp) + return resp + + @property + def aggregated_list(self) -> Callable[ + [compute.AggregatedListServiceAttachmentsRequest], + compute.ServiceAttachmentAggregatedList]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._AggregatedList(self._session, self._host, self._interceptor) # type: ignore + + @property + def delete(self) -> Callable[ + [compute.DeleteServiceAttachmentRequest], + compute.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._Delete(self._session, self._host, self._interceptor) # type: ignore + + @property + def get(self) -> Callable[ + [compute.GetServiceAttachmentRequest], + compute.ServiceAttachment]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._Get(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_iam_policy(self) -> Callable[ + [compute.GetIamPolicyServiceAttachmentRequest], + compute.Policy]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetIamPolicy(self._session, self._host, self._interceptor) # type: ignore + + @property + def insert(self) -> Callable[ + [compute.InsertServiceAttachmentRequest], + compute.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._Insert(self._session, self._host, self._interceptor) # type: ignore + + @property + def list(self) -> Callable[ + [compute.ListServiceAttachmentsRequest], + compute.ServiceAttachmentList]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._List(self._session, self._host, self._interceptor) # type: ignore + + @property + def patch(self) -> Callable[ + [compute.PatchServiceAttachmentRequest], + compute.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._Patch(self._session, self._host, self._interceptor) # type: ignore + + @property + def set_iam_policy(self) -> Callable[ + [compute.SetIamPolicyServiceAttachmentRequest], + compute.Policy]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._SetIamPolicy(self._session, self._host, self._interceptor) # type: ignore + + @property + def test_iam_permissions(self) -> Callable[ + [compute.TestIamPermissionsServiceAttachmentRequest], + compute.TestPermissionsResponse]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._TestIamPermissions(self._session, self._host, self._interceptor) # type: ignore + + @property + def kind(self) -> str: + return "rest" + + def close(self): + self._session.close() + + +__all__=( + 'ServiceAttachmentsRestTransport', +) diff --git a/owl-bot-staging/v1/google/cloud/compute_v1/services/snapshots/__init__.py b/owl-bot-staging/v1/google/cloud/compute_v1/services/snapshots/__init__.py new file mode 100644 index 000000000..0706c0f42 --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/compute_v1/services/snapshots/__init__.py @@ -0,0 +1,20 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from .client import SnapshotsClient + +__all__ = ( + 'SnapshotsClient', +) diff --git a/owl-bot-staging/v1/google/cloud/compute_v1/services/snapshots/client.py b/owl-bot-staging/v1/google/cloud/compute_v1/services/snapshots/client.py new file mode 100644 index 000000000..6f00ad1a2 --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/compute_v1/services/snapshots/client.py @@ -0,0 +1,1825 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import functools +import os +import re +from typing import Dict, Mapping, MutableMapping, MutableSequence, Optional, Sequence, Tuple, Type, Union, cast + +from google.cloud.compute_v1 import gapic_version as package_version + +from google.api_core import client_options as client_options_lib +from google.api_core import exceptions as core_exceptions +from google.api_core import extended_operation +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport import mtls # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth.exceptions import MutualTLSChannelError # type: ignore +from google.oauth2 import service_account # type: ignore + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + +from google.api_core import extended_operation # type: ignore +from google.cloud.compute_v1.services.snapshots import pagers +from google.cloud.compute_v1.types import compute +from .transports.base import SnapshotsTransport, DEFAULT_CLIENT_INFO +from .transports.rest import SnapshotsRestTransport + + +class SnapshotsClientMeta(type): + """Metaclass for the Snapshots client. + + This provides class-level methods for building and retrieving + support objects (e.g. transport) without polluting the client instance + objects. + """ + _transport_registry = OrderedDict() # type: Dict[str, Type[SnapshotsTransport]] + _transport_registry["rest"] = SnapshotsRestTransport + + def get_transport_class(cls, + label: Optional[str] = None, + ) -> Type[SnapshotsTransport]: + """Returns an appropriate transport class. + + Args: + label: The name of the desired transport. If none is + provided, then the first transport in the registry is used. + + Returns: + The transport class to use. + """ + # If a specific transport is requested, return that one. + if label: + return cls._transport_registry[label] + + # No transport is requested; return the default (that is, the first one + # in the dictionary). + return next(iter(cls._transport_registry.values())) + + +class SnapshotsClient(metaclass=SnapshotsClientMeta): + """The Snapshots API.""" + + @staticmethod + def _get_default_mtls_endpoint(api_endpoint): + """Converts api endpoint to mTLS endpoint. + + Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to + "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. + Args: + api_endpoint (Optional[str]): the api endpoint to convert. + Returns: + str: converted mTLS api endpoint. + """ + if not api_endpoint: + return api_endpoint + + mtls_endpoint_re = re.compile( + r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?" + ) + + m = mtls_endpoint_re.match(api_endpoint) + name, mtls, sandbox, googledomain = m.groups() + if mtls or not googledomain: + return api_endpoint + + if sandbox: + return api_endpoint.replace( + "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" + ) + + return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") + + DEFAULT_ENDPOINT = "compute.googleapis.com" + DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore + DEFAULT_ENDPOINT + ) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + SnapshotsClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_info(info) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + SnapshotsClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_file( + filename) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + from_service_account_json = from_service_account_file + + @property + def transport(self) -> SnapshotsTransport: + """Returns the transport used by the client instance. + + Returns: + SnapshotsTransport: The transport used by the client + instance. + """ + return self._transport + + @staticmethod + def common_billing_account_path(billing_account: str, ) -> str: + """Returns a fully-qualified billing_account string.""" + return "billingAccounts/{billing_account}".format(billing_account=billing_account, ) + + @staticmethod + def parse_common_billing_account_path(path: str) -> Dict[str,str]: + """Parse a billing_account path into its component segments.""" + m = re.match(r"^billingAccounts/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_folder_path(folder: str, ) -> str: + """Returns a fully-qualified folder string.""" + return "folders/{folder}".format(folder=folder, ) + + @staticmethod + def parse_common_folder_path(path: str) -> Dict[str,str]: + """Parse a folder path into its component segments.""" + m = re.match(r"^folders/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_organization_path(organization: str, ) -> str: + """Returns a fully-qualified organization string.""" + return "organizations/{organization}".format(organization=organization, ) + + @staticmethod + def parse_common_organization_path(path: str) -> Dict[str,str]: + """Parse a organization path into its component segments.""" + m = re.match(r"^organizations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_project_path(project: str, ) -> str: + """Returns a fully-qualified project string.""" + return "projects/{project}".format(project=project, ) + + @staticmethod + def parse_common_project_path(path: str) -> Dict[str,str]: + """Parse a project path into its component segments.""" + m = re.match(r"^projects/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_location_path(project: str, location: str, ) -> str: + """Returns a fully-qualified location string.""" + return "projects/{project}/locations/{location}".format(project=project, location=location, ) + + @staticmethod + def parse_common_location_path(path: str) -> Dict[str,str]: + """Parse a location path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @classmethod + def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[client_options_lib.ClientOptions] = None): + """Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + if client_options is None: + client_options = client_options_lib.ClientOptions() + use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") + use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") + if use_client_cert not in ("true", "false"): + raise ValueError("Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`") + if use_mtls_endpoint not in ("auto", "never", "always"): + raise MutualTLSChannelError("Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`") + + # Figure out the client cert source to use. + client_cert_source = None + if use_client_cert == "true": + if client_options.client_cert_source: + client_cert_source = client_options.client_cert_source + elif mtls.has_default_client_cert_source(): + client_cert_source = mtls.default_client_cert_source() + + # Figure out which api endpoint to use. + if client_options.api_endpoint is not None: + api_endpoint = client_options.api_endpoint + elif use_mtls_endpoint == "always" or (use_mtls_endpoint == "auto" and client_cert_source): + api_endpoint = cls.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = cls.DEFAULT_ENDPOINT + + return api_endpoint, client_cert_source + + def __init__(self, *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Optional[Union[str, SnapshotsTransport]] = None, + client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the snapshots client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Union[str, SnapshotsTransport]): The + transport to use. If set to None, a transport is chosen + automatically. + NOTE: "rest" transport functionality is currently in a + beta state (preview). We welcome your feedback via an + issue in this library's source repository. + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): Custom options for the + client. It won't take effect if a ``transport`` instance is provided. + (1) The ``api_endpoint`` property can be used to override the + default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT + environment variable can also be used to override the endpoint: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto switch to the + default mTLS endpoint if client certificate is present, this is + the default value). However, the ``api_endpoint`` property takes + precedence if provided. + (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide client certificate for mutual TLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + """ + if isinstance(client_options, dict): + client_options = client_options_lib.from_dict(client_options) + if client_options is None: + client_options = client_options_lib.ClientOptions() + client_options = cast(client_options_lib.ClientOptions, client_options) + + api_endpoint, client_cert_source_func = self.get_mtls_endpoint_and_cert_source(client_options) + + api_key_value = getattr(client_options, "api_key", None) + if api_key_value and credentials: + raise ValueError("client_options.api_key and credentials are mutually exclusive") + + # Save or instantiate the transport. + # Ordinarily, we provide the transport, but allowing a custom transport + # instance provides an extensibility point for unusual situations. + if isinstance(transport, SnapshotsTransport): + # transport is a SnapshotsTransport instance. + if credentials or client_options.credentials_file or api_key_value: + raise ValueError("When providing a transport instance, " + "provide its credentials directly.") + if client_options.scopes: + raise ValueError( + "When providing a transport instance, provide its scopes " + "directly." + ) + self._transport = transport + else: + import google.auth._default # type: ignore + + if api_key_value and hasattr(google.auth._default, "get_api_key_credentials"): + credentials = google.auth._default.get_api_key_credentials(api_key_value) + + Transport = type(self).get_transport_class(transport) + self._transport = Transport( + credentials=credentials, + credentials_file=client_options.credentials_file, + host=api_endpoint, + scopes=client_options.scopes, + client_cert_source_for_mtls=client_cert_source_func, + quota_project_id=client_options.quota_project_id, + client_info=client_info, + always_use_jwt_access=True, + api_audience=client_options.api_audience, + ) + + def delete_unary(self, + request: Optional[Union[compute.DeleteSnapshotRequest, dict]] = None, + *, + project: Optional[str] = None, + snapshot: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.Operation: + r"""Deletes the specified Snapshot resource. Keep in mind + that deleting a single snapshot might not necessarily + delete all the data on that snapshot. If any data on the + snapshot that is marked for deletion is needed for + subsequent snapshots, the data will be moved to the next + corresponding snapshot. For more information, see + Deleting snapshots. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_delete(): + # Create a client + client = compute_v1.SnapshotsClient() + + # Initialize request argument(s) + request = compute_v1.DeleteSnapshotRequest( + project="project_value", + snapshot="snapshot_value", + ) + + # Make the request + response = client.delete(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.DeleteSnapshotRequest, dict]): + The request object. A request message for + Snapshots.Delete. See the method + description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + snapshot (str): + Name of the Snapshot resource to + delete. + + This corresponds to the ``snapshot`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, snapshot]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.DeleteSnapshotRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.DeleteSnapshotRequest): + request = compute.DeleteSnapshotRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if snapshot is not None: + request.snapshot = snapshot + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("snapshot", request.snapshot), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def delete(self, + request: Optional[Union[compute.DeleteSnapshotRequest, dict]] = None, + *, + project: Optional[str] = None, + snapshot: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> extended_operation.ExtendedOperation: + r"""Deletes the specified Snapshot resource. Keep in mind + that deleting a single snapshot might not necessarily + delete all the data on that snapshot. If any data on the + snapshot that is marked for deletion is needed for + subsequent snapshots, the data will be moved to the next + corresponding snapshot. For more information, see + Deleting snapshots. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_delete(): + # Create a client + client = compute_v1.SnapshotsClient() + + # Initialize request argument(s) + request = compute_v1.DeleteSnapshotRequest( + project="project_value", + snapshot="snapshot_value", + ) + + # Make the request + response = client.delete(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.DeleteSnapshotRequest, dict]): + The request object. A request message for + Snapshots.Delete. See the method + description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + snapshot (str): + Name of the Snapshot resource to + delete. + + This corresponds to the ``snapshot`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, snapshot]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.DeleteSnapshotRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.DeleteSnapshotRequest): + request = compute.DeleteSnapshotRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if snapshot is not None: + request.snapshot = snapshot + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("snapshot", request.snapshot), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + operation_service = self._transport._global_operations_client + operation_request = compute.GetGlobalOperationRequest() + operation_request.project = request.project + operation_request.operation = response.name + + get_operation = functools.partial(operation_service.get, operation_request) + # Cancel is not part of extended operations yet. + cancel_operation = lambda: None + + # Note: this class is an implementation detail to provide a uniform + # set of names for certain fields in the extended operation proto message. + # See google.api_core.extended_operation.ExtendedOperation for details + # on these properties and the expected interface. + class _CustomOperation(extended_operation.ExtendedOperation): + @property + def error_message(self): + return self._extended_operation.http_error_message + + @property + def error_code(self): + return self._extended_operation.http_error_status_code + + response = _CustomOperation.make(get_operation, cancel_operation, response) + + # Done; return the response. + return response + + def get(self, + request: Optional[Union[compute.GetSnapshotRequest, dict]] = None, + *, + project: Optional[str] = None, + snapshot: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.Snapshot: + r"""Returns the specified Snapshot resource. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_get(): + # Create a client + client = compute_v1.SnapshotsClient() + + # Initialize request argument(s) + request = compute_v1.GetSnapshotRequest( + project="project_value", + snapshot="snapshot_value", + ) + + # Make the request + response = client.get(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.GetSnapshotRequest, dict]): + The request object. A request message for Snapshots.Get. + See the method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + snapshot (str): + Name of the Snapshot resource to + return. + + This corresponds to the ``snapshot`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.compute_v1.types.Snapshot: + Represents a Persistent Disk Snapshot + resource. You can use snapshots to back + up data on a regular interval. For more + information, read Creating persistent + disk snapshots. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, snapshot]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.GetSnapshotRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.GetSnapshotRequest): + request = compute.GetSnapshotRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if snapshot is not None: + request.snapshot = snapshot + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("snapshot", request.snapshot), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get_iam_policy(self, + request: Optional[Union[compute.GetIamPolicySnapshotRequest, dict]] = None, + *, + project: Optional[str] = None, + resource: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.Policy: + r"""Gets the access control policy for a resource. May be + empty if no such policy or resource exists. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_get_iam_policy(): + # Create a client + client = compute_v1.SnapshotsClient() + + # Initialize request argument(s) + request = compute_v1.GetIamPolicySnapshotRequest( + project="project_value", + resource="resource_value", + ) + + # Make the request + response = client.get_iam_policy(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.GetIamPolicySnapshotRequest, dict]): + The request object. A request message for + Snapshots.GetIamPolicy. See the method + description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + resource (str): + Name or id of the resource for this + request. + + This corresponds to the ``resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.compute_v1.types.Policy: + An Identity and Access Management (IAM) policy, which + specifies access controls for Google Cloud resources. A + Policy is a collection of bindings. A binding binds one + or more members, or principals, to a single role. + Principals can be user accounts, service accounts, + Google groups, and domains (such as G Suite). A role is + a named list of permissions; each role can be an IAM + predefined role or a user-created custom role. For some + types of Google Cloud resources, a binding can also + specify a condition, which is a logical expression that + allows access to a resource only if the expression + evaluates to true. A condition can add constraints based + on attributes of the request, the resource, or both. To + learn which resources support conditions in their IAM + policies, see the [IAM + documentation](\ https://cloud.google.com/iam/help/conditions/resource-policies). + **JSON example:** { "bindings": [ { "role": + "roles/resourcemanager.organizationAdmin", "members": [ + "user:mike@example.com", "group:admins@example.com", + "domain:google.com", + "serviceAccount:my-project-id@appspot.gserviceaccount.com" + ] }, { "role": + "roles/resourcemanager.organizationViewer", "members": [ + "user:eve@example.com" ], "condition": { "title": + "expirable access", "description": "Does not grant + access after Sep 2020", "expression": "request.time < + timestamp('2020-10-01T00:00:00.000Z')", } } ], "etag": + "BwWWja0YfJA=", "version": 3 } **YAML example:** + bindings: - members: - user:\ mike@example.com - + group:\ admins@example.com - domain:google.com - + serviceAccount:\ my-project-id@appspot.gserviceaccount.com + role: roles/resourcemanager.organizationAdmin - members: + - user:\ eve@example.com role: + roles/resourcemanager.organizationViewer condition: + title: expirable access description: Does not grant + access after Sep 2020 expression: request.time < + timestamp('2020-10-01T00:00:00.000Z') etag: BwWWja0YfJA= + version: 3 For a description of IAM and its features, + see the [IAM + documentation](\ https://cloud.google.com/iam/docs/). + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.GetIamPolicySnapshotRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.GetIamPolicySnapshotRequest): + request = compute.GetIamPolicySnapshotRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if resource is not None: + request.resource = resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_iam_policy] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("resource", request.resource), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def insert_unary(self, + request: Optional[Union[compute.InsertSnapshotRequest, dict]] = None, + *, + project: Optional[str] = None, + snapshot_resource: Optional[compute.Snapshot] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.Operation: + r"""Creates a snapshot in the specified project using the + data included in the request. For regular snapshot + creation, consider using this method instead of + disks.createSnapshot, as this method supports more + features, such as creating snapshots in a project + different from the source disk project. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_insert(): + # Create a client + client = compute_v1.SnapshotsClient() + + # Initialize request argument(s) + request = compute_v1.InsertSnapshotRequest( + project="project_value", + ) + + # Make the request + response = client.insert(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.InsertSnapshotRequest, dict]): + The request object. A request message for + Snapshots.Insert. See the method + description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + snapshot_resource (google.cloud.compute_v1.types.Snapshot): + The body resource for this request + This corresponds to the ``snapshot_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, snapshot_resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.InsertSnapshotRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.InsertSnapshotRequest): + request = compute.InsertSnapshotRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if snapshot_resource is not None: + request.snapshot_resource = snapshot_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.insert] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def insert(self, + request: Optional[Union[compute.InsertSnapshotRequest, dict]] = None, + *, + project: Optional[str] = None, + snapshot_resource: Optional[compute.Snapshot] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> extended_operation.ExtendedOperation: + r"""Creates a snapshot in the specified project using the + data included in the request. For regular snapshot + creation, consider using this method instead of + disks.createSnapshot, as this method supports more + features, such as creating snapshots in a project + different from the source disk project. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_insert(): + # Create a client + client = compute_v1.SnapshotsClient() + + # Initialize request argument(s) + request = compute_v1.InsertSnapshotRequest( + project="project_value", + ) + + # Make the request + response = client.insert(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.InsertSnapshotRequest, dict]): + The request object. A request message for + Snapshots.Insert. See the method + description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + snapshot_resource (google.cloud.compute_v1.types.Snapshot): + The body resource for this request + This corresponds to the ``snapshot_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, snapshot_resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.InsertSnapshotRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.InsertSnapshotRequest): + request = compute.InsertSnapshotRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if snapshot_resource is not None: + request.snapshot_resource = snapshot_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.insert] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + operation_service = self._transport._global_operations_client + operation_request = compute.GetGlobalOperationRequest() + operation_request.project = request.project + operation_request.operation = response.name + + get_operation = functools.partial(operation_service.get, operation_request) + # Cancel is not part of extended operations yet. + cancel_operation = lambda: None + + # Note: this class is an implementation detail to provide a uniform + # set of names for certain fields in the extended operation proto message. + # See google.api_core.extended_operation.ExtendedOperation for details + # on these properties and the expected interface. + class _CustomOperation(extended_operation.ExtendedOperation): + @property + def error_message(self): + return self._extended_operation.http_error_message + + @property + def error_code(self): + return self._extended_operation.http_error_status_code + + response = _CustomOperation.make(get_operation, cancel_operation, response) + + # Done; return the response. + return response + + def list(self, + request: Optional[Union[compute.ListSnapshotsRequest, dict]] = None, + *, + project: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListPager: + r"""Retrieves the list of Snapshot resources contained + within the specified project. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_list(): + # Create a client + client = compute_v1.SnapshotsClient() + + # Initialize request argument(s) + request = compute_v1.ListSnapshotsRequest( + project="project_value", + ) + + # Make the request + page_result = client.list(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.ListSnapshotsRequest, dict]): + The request object. A request message for Snapshots.List. + See the method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.compute_v1.services.snapshots.pagers.ListPager: + Contains a list of Snapshot + resources. + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.ListSnapshotsRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.ListSnapshotsRequest): + request = compute.ListSnapshotsRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + def set_iam_policy(self, + request: Optional[Union[compute.SetIamPolicySnapshotRequest, dict]] = None, + *, + project: Optional[str] = None, + resource: Optional[str] = None, + global_set_policy_request_resource: Optional[compute.GlobalSetPolicyRequest] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.Policy: + r"""Sets the access control policy on the specified + resource. Replaces any existing policy. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_set_iam_policy(): + # Create a client + client = compute_v1.SnapshotsClient() + + # Initialize request argument(s) + request = compute_v1.SetIamPolicySnapshotRequest( + project="project_value", + resource="resource_value", + ) + + # Make the request + response = client.set_iam_policy(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.SetIamPolicySnapshotRequest, dict]): + The request object. A request message for + Snapshots.SetIamPolicy. See the method + description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + resource (str): + Name or id of the resource for this + request. + + This corresponds to the ``resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + global_set_policy_request_resource (google.cloud.compute_v1.types.GlobalSetPolicyRequest): + The body resource for this request + This corresponds to the ``global_set_policy_request_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.compute_v1.types.Policy: + An Identity and Access Management (IAM) policy, which + specifies access controls for Google Cloud resources. A + Policy is a collection of bindings. A binding binds one + or more members, or principals, to a single role. + Principals can be user accounts, service accounts, + Google groups, and domains (such as G Suite). A role is + a named list of permissions; each role can be an IAM + predefined role or a user-created custom role. For some + types of Google Cloud resources, a binding can also + specify a condition, which is a logical expression that + allows access to a resource only if the expression + evaluates to true. A condition can add constraints based + on attributes of the request, the resource, or both. To + learn which resources support conditions in their IAM + policies, see the [IAM + documentation](\ https://cloud.google.com/iam/help/conditions/resource-policies). + **JSON example:** { "bindings": [ { "role": + "roles/resourcemanager.organizationAdmin", "members": [ + "user:mike@example.com", "group:admins@example.com", + "domain:google.com", + "serviceAccount:my-project-id@appspot.gserviceaccount.com" + ] }, { "role": + "roles/resourcemanager.organizationViewer", "members": [ + "user:eve@example.com" ], "condition": { "title": + "expirable access", "description": "Does not grant + access after Sep 2020", "expression": "request.time < + timestamp('2020-10-01T00:00:00.000Z')", } } ], "etag": + "BwWWja0YfJA=", "version": 3 } **YAML example:** + bindings: - members: - user:\ mike@example.com - + group:\ admins@example.com - domain:google.com - + serviceAccount:\ my-project-id@appspot.gserviceaccount.com + role: roles/resourcemanager.organizationAdmin - members: + - user:\ eve@example.com role: + roles/resourcemanager.organizationViewer condition: + title: expirable access description: Does not grant + access after Sep 2020 expression: request.time < + timestamp('2020-10-01T00:00:00.000Z') etag: BwWWja0YfJA= + version: 3 For a description of IAM and its features, + see the [IAM + documentation](\ https://cloud.google.com/iam/docs/). + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, resource, global_set_policy_request_resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.SetIamPolicySnapshotRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.SetIamPolicySnapshotRequest): + request = compute.SetIamPolicySnapshotRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if resource is not None: + request.resource = resource + if global_set_policy_request_resource is not None: + request.global_set_policy_request_resource = global_set_policy_request_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.set_iam_policy] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("resource", request.resource), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def set_labels_unary(self, + request: Optional[Union[compute.SetLabelsSnapshotRequest, dict]] = None, + *, + project: Optional[str] = None, + resource: Optional[str] = None, + global_set_labels_request_resource: Optional[compute.GlobalSetLabelsRequest] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.Operation: + r"""Sets the labels on a snapshot. To learn more about + labels, read the Labeling Resources documentation. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_set_labels(): + # Create a client + client = compute_v1.SnapshotsClient() + + # Initialize request argument(s) + request = compute_v1.SetLabelsSnapshotRequest( + project="project_value", + resource="resource_value", + ) + + # Make the request + response = client.set_labels(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.SetLabelsSnapshotRequest, dict]): + The request object. A request message for + Snapshots.SetLabels. See the method + description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + resource (str): + Name or id of the resource for this + request. + + This corresponds to the ``resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + global_set_labels_request_resource (google.cloud.compute_v1.types.GlobalSetLabelsRequest): + The body resource for this request + This corresponds to the ``global_set_labels_request_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, resource, global_set_labels_request_resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.SetLabelsSnapshotRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.SetLabelsSnapshotRequest): + request = compute.SetLabelsSnapshotRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if resource is not None: + request.resource = resource + if global_set_labels_request_resource is not None: + request.global_set_labels_request_resource = global_set_labels_request_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.set_labels] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("resource", request.resource), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def set_labels(self, + request: Optional[Union[compute.SetLabelsSnapshotRequest, dict]] = None, + *, + project: Optional[str] = None, + resource: Optional[str] = None, + global_set_labels_request_resource: Optional[compute.GlobalSetLabelsRequest] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> extended_operation.ExtendedOperation: + r"""Sets the labels on a snapshot. To learn more about + labels, read the Labeling Resources documentation. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_set_labels(): + # Create a client + client = compute_v1.SnapshotsClient() + + # Initialize request argument(s) + request = compute_v1.SetLabelsSnapshotRequest( + project="project_value", + resource="resource_value", + ) + + # Make the request + response = client.set_labels(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.SetLabelsSnapshotRequest, dict]): + The request object. A request message for + Snapshots.SetLabels. See the method + description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + resource (str): + Name or id of the resource for this + request. + + This corresponds to the ``resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + global_set_labels_request_resource (google.cloud.compute_v1.types.GlobalSetLabelsRequest): + The body resource for this request + This corresponds to the ``global_set_labels_request_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, resource, global_set_labels_request_resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.SetLabelsSnapshotRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.SetLabelsSnapshotRequest): + request = compute.SetLabelsSnapshotRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if resource is not None: + request.resource = resource + if global_set_labels_request_resource is not None: + request.global_set_labels_request_resource = global_set_labels_request_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.set_labels] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("resource", request.resource), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + operation_service = self._transport._global_operations_client + operation_request = compute.GetGlobalOperationRequest() + operation_request.project = request.project + operation_request.operation = response.name + + get_operation = functools.partial(operation_service.get, operation_request) + # Cancel is not part of extended operations yet. + cancel_operation = lambda: None + + # Note: this class is an implementation detail to provide a uniform + # set of names for certain fields in the extended operation proto message. + # See google.api_core.extended_operation.ExtendedOperation for details + # on these properties and the expected interface. + class _CustomOperation(extended_operation.ExtendedOperation): + @property + def error_message(self): + return self._extended_operation.http_error_message + + @property + def error_code(self): + return self._extended_operation.http_error_status_code + + response = _CustomOperation.make(get_operation, cancel_operation, response) + + # Done; return the response. + return response + + def test_iam_permissions(self, + request: Optional[Union[compute.TestIamPermissionsSnapshotRequest, dict]] = None, + *, + project: Optional[str] = None, + resource: Optional[str] = None, + test_permissions_request_resource: Optional[compute.TestPermissionsRequest] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.TestPermissionsResponse: + r"""Returns permissions that a caller has on the + specified resource. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_test_iam_permissions(): + # Create a client + client = compute_v1.SnapshotsClient() + + # Initialize request argument(s) + request = compute_v1.TestIamPermissionsSnapshotRequest( + project="project_value", + resource="resource_value", + ) + + # Make the request + response = client.test_iam_permissions(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.TestIamPermissionsSnapshotRequest, dict]): + The request object. A request message for + Snapshots.TestIamPermissions. See the + method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + resource (str): + Name or id of the resource for this + request. + + This corresponds to the ``resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + test_permissions_request_resource (google.cloud.compute_v1.types.TestPermissionsRequest): + The body resource for this request + This corresponds to the ``test_permissions_request_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.compute_v1.types.TestPermissionsResponse: + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, resource, test_permissions_request_resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.TestIamPermissionsSnapshotRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.TestIamPermissionsSnapshotRequest): + request = compute.TestIamPermissionsSnapshotRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if resource is not None: + request.resource = resource + if test_permissions_request_resource is not None: + request.test_permissions_request_resource = test_permissions_request_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.test_iam_permissions] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("resource", request.resource), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def __enter__(self) -> "SnapshotsClient": + return self + + def __exit__(self, type, value, traceback): + """Releases underlying transport's resources. + + .. warning:: + ONLY use as a context manager if the transport is NOT shared + with other clients! Exiting the with block will CLOSE the transport + and may cause errors in other clients! + """ + self.transport.close() + + + + + + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) + + +__all__ = ( + "SnapshotsClient", +) diff --git a/owl-bot-staging/v1/google/cloud/compute_v1/services/snapshots/pagers.py b/owl-bot-staging/v1/google/cloud/compute_v1/services/snapshots/pagers.py new file mode 100644 index 000000000..4fea7430e --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/compute_v1/services/snapshots/pagers.py @@ -0,0 +1,77 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import Any, AsyncIterator, Awaitable, Callable, Sequence, Tuple, Optional, Iterator + +from google.cloud.compute_v1.types import compute + + +class ListPager: + """A pager for iterating through ``list`` requests. + + This class thinly wraps an initial + :class:`google.cloud.compute_v1.types.SnapshotList` object, and + provides an ``__iter__`` method to iterate through its + ``items`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``List`` requests and continue to iterate + through the ``items`` field on the + corresponding responses. + + All the usual :class:`google.cloud.compute_v1.types.SnapshotList` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., compute.SnapshotList], + request: compute.ListSnapshotsRequest, + response: compute.SnapshotList, + *, + metadata: Sequence[Tuple[str, str]] = ()): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.compute_v1.types.ListSnapshotsRequest): + The initial request object. + response (google.cloud.compute_v1.types.SnapshotList): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = compute.ListSnapshotsRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[compute.SnapshotList]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[compute.Snapshot]: + for page in self.pages: + yield from page.items + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) diff --git a/owl-bot-staging/v1/google/cloud/compute_v1/services/snapshots/transports/__init__.py b/owl-bot-staging/v1/google/cloud/compute_v1/services/snapshots/transports/__init__.py new file mode 100644 index 000000000..f833dd316 --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/compute_v1/services/snapshots/transports/__init__.py @@ -0,0 +1,32 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +from typing import Dict, Type + +from .base import SnapshotsTransport +from .rest import SnapshotsRestTransport +from .rest import SnapshotsRestInterceptor + + +# Compile a registry of transports. +_transport_registry = OrderedDict() # type: Dict[str, Type[SnapshotsTransport]] +_transport_registry['rest'] = SnapshotsRestTransport + +__all__ = ( + 'SnapshotsTransport', + 'SnapshotsRestTransport', + 'SnapshotsRestInterceptor', +) diff --git a/owl-bot-staging/v1/google/cloud/compute_v1/services/snapshots/transports/base.py b/owl-bot-staging/v1/google/cloud/compute_v1/services/snapshots/transports/base.py new file mode 100644 index 000000000..a50146502 --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/compute_v1/services/snapshots/transports/base.py @@ -0,0 +1,261 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import abc +from typing import Awaitable, Callable, Dict, Optional, Sequence, Union + +from google.cloud.compute_v1 import gapic_version as package_version + +import google.auth # type: ignore +import google.api_core +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.compute_v1.types import compute +from google.cloud.compute_v1.services import global_operations + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) + + +class SnapshotsTransport(abc.ABC): + """Abstract transport class for Snapshots.""" + + AUTH_SCOPES = ( + 'https://www.googleapis.com/auth/compute', + 'https://www.googleapis.com/auth/cloud-platform', + ) + + DEFAULT_HOST: str = 'compute.googleapis.com' + def __init__( + self, *, + host: str = DEFAULT_HOST, + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + **kwargs, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A list of scopes. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + """ + self._extended_operations_services: Dict[str, Any] = {} + + scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} + + # Save the scopes. + self._scopes = scopes + + # If no credentials are provided, then determine the appropriate + # defaults. + if credentials and credentials_file: + raise core_exceptions.DuplicateCredentialArgs("'credentials_file' and 'credentials' are mutually exclusive") + + if credentials_file is not None: + credentials, _ = google.auth.load_credentials_from_file( + credentials_file, + **scopes_kwargs, + quota_project_id=quota_project_id + ) + elif credentials is None: + credentials, _ = google.auth.default(**scopes_kwargs, quota_project_id=quota_project_id) + # Don't apply audience if the credentials file passed from user. + if hasattr(credentials, "with_gdch_audience"): + credentials = credentials.with_gdch_audience(api_audience if api_audience else host) + + # If the credentials are service account credentials, then always try to use self signed JWT. + if always_use_jwt_access and isinstance(credentials, service_account.Credentials) and hasattr(service_account.Credentials, "with_always_use_jwt_access"): + credentials = credentials.with_always_use_jwt_access(True) + + # Save the credentials. + self._credentials = credentials + + # Save the hostname. Default to port 443 (HTTPS) if none is specified. + if ':' not in host: + host += ':443' + self._host = host + + def _prep_wrapped_messages(self, client_info): + # Precompute the wrapped methods. + self._wrapped_methods = { + self.delete: gapic_v1.method.wrap_method( + self.delete, + default_timeout=None, + client_info=client_info, + ), + self.get: gapic_v1.method.wrap_method( + self.get, + default_timeout=None, + client_info=client_info, + ), + self.get_iam_policy: gapic_v1.method.wrap_method( + self.get_iam_policy, + default_timeout=None, + client_info=client_info, + ), + self.insert: gapic_v1.method.wrap_method( + self.insert, + default_timeout=None, + client_info=client_info, + ), + self.list: gapic_v1.method.wrap_method( + self.list, + default_timeout=None, + client_info=client_info, + ), + self.set_iam_policy: gapic_v1.method.wrap_method( + self.set_iam_policy, + default_timeout=None, + client_info=client_info, + ), + self.set_labels: gapic_v1.method.wrap_method( + self.set_labels, + default_timeout=None, + client_info=client_info, + ), + self.test_iam_permissions: gapic_v1.method.wrap_method( + self.test_iam_permissions, + default_timeout=None, + client_info=client_info, + ), + } + + def close(self): + """Closes resources associated with the transport. + + .. warning:: + Only call this method if the transport is NOT shared + with other clients - this may cause errors in other clients! + """ + raise NotImplementedError() + + @property + def delete(self) -> Callable[ + [compute.DeleteSnapshotRequest], + Union[ + compute.Operation, + Awaitable[compute.Operation] + ]]: + raise NotImplementedError() + + @property + def get(self) -> Callable[ + [compute.GetSnapshotRequest], + Union[ + compute.Snapshot, + Awaitable[compute.Snapshot] + ]]: + raise NotImplementedError() + + @property + def get_iam_policy(self) -> Callable[ + [compute.GetIamPolicySnapshotRequest], + Union[ + compute.Policy, + Awaitable[compute.Policy] + ]]: + raise NotImplementedError() + + @property + def insert(self) -> Callable[ + [compute.InsertSnapshotRequest], + Union[ + compute.Operation, + Awaitable[compute.Operation] + ]]: + raise NotImplementedError() + + @property + def list(self) -> Callable[ + [compute.ListSnapshotsRequest], + Union[ + compute.SnapshotList, + Awaitable[compute.SnapshotList] + ]]: + raise NotImplementedError() + + @property + def set_iam_policy(self) -> Callable[ + [compute.SetIamPolicySnapshotRequest], + Union[ + compute.Policy, + Awaitable[compute.Policy] + ]]: + raise NotImplementedError() + + @property + def set_labels(self) -> Callable[ + [compute.SetLabelsSnapshotRequest], + Union[ + compute.Operation, + Awaitable[compute.Operation] + ]]: + raise NotImplementedError() + + @property + def test_iam_permissions(self) -> Callable[ + [compute.TestIamPermissionsSnapshotRequest], + Union[ + compute.TestPermissionsResponse, + Awaitable[compute.TestPermissionsResponse] + ]]: + raise NotImplementedError() + + @property + def kind(self) -> str: + raise NotImplementedError() + + @property + def _global_operations_client(self) -> global_operations.GlobalOperationsClient: + ex_op_service = self._extended_operations_services.get("global_operations") + if not ex_op_service: + ex_op_service = global_operations.GlobalOperationsClient( + credentials=self._credentials, + transport=self.kind, + ) + self._extended_operations_services["global_operations"] = ex_op_service + + return ex_op_service + + +__all__ = ( + 'SnapshotsTransport', +) diff --git a/owl-bot-staging/v1/google/cloud/compute_v1/services/snapshots/transports/rest.py b/owl-bot-staging/v1/google/cloud/compute_v1/services/snapshots/transports/rest.py new file mode 100644 index 000000000..62c95a862 --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/compute_v1/services/snapshots/transports/rest.py @@ -0,0 +1,1233 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +from google.auth.transport.requests import AuthorizedSession # type: ignore +import json # type: ignore +import grpc # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.api_core import exceptions as core_exceptions +from google.api_core import retry as retries +from google.api_core import rest_helpers +from google.api_core import rest_streaming +from google.api_core import path_template +from google.api_core import gapic_v1 + +from google.protobuf import json_format +from requests import __version__ as requests_version +import dataclasses +import re +from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union +import warnings + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + + +from google.cloud.compute_v1.types import compute + +from .base import SnapshotsTransport, DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, + grpc_version=None, + rest_version=requests_version, +) + + +class SnapshotsRestInterceptor: + """Interceptor for Snapshots. + + Interceptors are used to manipulate requests, request metadata, and responses + in arbitrary ways. + Example use cases include: + * Logging + * Verifying requests according to service or custom semantics + * Stripping extraneous information from responses + + These use cases and more can be enabled by injecting an + instance of a custom subclass when constructing the SnapshotsRestTransport. + + .. code-block:: python + class MyCustomSnapshotsInterceptor(SnapshotsRestInterceptor): + def pre_delete(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_delete(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_get(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_get_iam_policy(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_iam_policy(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_insert(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_insert(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_set_iam_policy(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_set_iam_policy(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_set_labels(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_set_labels(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_test_iam_permissions(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_test_iam_permissions(self, response): + logging.log(f"Received response: {response}") + return response + + transport = SnapshotsRestTransport(interceptor=MyCustomSnapshotsInterceptor()) + client = SnapshotsClient(transport=transport) + + + """ + def pre_delete(self, request: compute.DeleteSnapshotRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.DeleteSnapshotRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for delete + + Override in a subclass to manipulate the request or metadata + before they are sent to the Snapshots server. + """ + return request, metadata + + def post_delete(self, response: compute.Operation) -> compute.Operation: + """Post-rpc interceptor for delete + + Override in a subclass to manipulate the response + after it is returned by the Snapshots server but before + it is returned to user code. + """ + return response + def pre_get(self, request: compute.GetSnapshotRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.GetSnapshotRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get + + Override in a subclass to manipulate the request or metadata + before they are sent to the Snapshots server. + """ + return request, metadata + + def post_get(self, response: compute.Snapshot) -> compute.Snapshot: + """Post-rpc interceptor for get + + Override in a subclass to manipulate the response + after it is returned by the Snapshots server but before + it is returned to user code. + """ + return response + def pre_get_iam_policy(self, request: compute.GetIamPolicySnapshotRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.GetIamPolicySnapshotRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_iam_policy + + Override in a subclass to manipulate the request or metadata + before they are sent to the Snapshots server. + """ + return request, metadata + + def post_get_iam_policy(self, response: compute.Policy) -> compute.Policy: + """Post-rpc interceptor for get_iam_policy + + Override in a subclass to manipulate the response + after it is returned by the Snapshots server but before + it is returned to user code. + """ + return response + def pre_insert(self, request: compute.InsertSnapshotRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.InsertSnapshotRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for insert + + Override in a subclass to manipulate the request or metadata + before they are sent to the Snapshots server. + """ + return request, metadata + + def post_insert(self, response: compute.Operation) -> compute.Operation: + """Post-rpc interceptor for insert + + Override in a subclass to manipulate the response + after it is returned by the Snapshots server but before + it is returned to user code. + """ + return response + def pre_list(self, request: compute.ListSnapshotsRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.ListSnapshotsRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list + + Override in a subclass to manipulate the request or metadata + before they are sent to the Snapshots server. + """ + return request, metadata + + def post_list(self, response: compute.SnapshotList) -> compute.SnapshotList: + """Post-rpc interceptor for list + + Override in a subclass to manipulate the response + after it is returned by the Snapshots server but before + it is returned to user code. + """ + return response + def pre_set_iam_policy(self, request: compute.SetIamPolicySnapshotRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.SetIamPolicySnapshotRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for set_iam_policy + + Override in a subclass to manipulate the request or metadata + before they are sent to the Snapshots server. + """ + return request, metadata + + def post_set_iam_policy(self, response: compute.Policy) -> compute.Policy: + """Post-rpc interceptor for set_iam_policy + + Override in a subclass to manipulate the response + after it is returned by the Snapshots server but before + it is returned to user code. + """ + return response + def pre_set_labels(self, request: compute.SetLabelsSnapshotRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.SetLabelsSnapshotRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for set_labels + + Override in a subclass to manipulate the request or metadata + before they are sent to the Snapshots server. + """ + return request, metadata + + def post_set_labels(self, response: compute.Operation) -> compute.Operation: + """Post-rpc interceptor for set_labels + + Override in a subclass to manipulate the response + after it is returned by the Snapshots server but before + it is returned to user code. + """ + return response + def pre_test_iam_permissions(self, request: compute.TestIamPermissionsSnapshotRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.TestIamPermissionsSnapshotRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for test_iam_permissions + + Override in a subclass to manipulate the request or metadata + before they are sent to the Snapshots server. + """ + return request, metadata + + def post_test_iam_permissions(self, response: compute.TestPermissionsResponse) -> compute.TestPermissionsResponse: + """Post-rpc interceptor for test_iam_permissions + + Override in a subclass to manipulate the response + after it is returned by the Snapshots server but before + it is returned to user code. + """ + return response + + +@dataclasses.dataclass +class SnapshotsRestStub: + _session: AuthorizedSession + _host: str + _interceptor: SnapshotsRestInterceptor + + +class SnapshotsRestTransport(SnapshotsTransport): + """REST backend transport for Snapshots. + + The Snapshots API. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends JSON representations of protocol buffers over HTTP/1.1 + + NOTE: This REST transport functionality is currently in a beta + state (preview). We welcome your feedback via an issue in this + library's source repository. Thank you! + """ + + def __init__(self, *, + host: str = 'compute.googleapis.com', + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + client_cert_source_for_mtls: Optional[Callable[[ + ], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + url_scheme: str = 'https', + interceptor: Optional[SnapshotsRestInterceptor] = None, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + NOTE: This REST transport functionality is currently in a beta + state (preview). We welcome your feedback via a GitHub issue in + this library's repository. Thank you! + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + client_cert_source_for_mtls (Callable[[], Tuple[bytes, bytes]]): Client + certificate to configure mutual TLS HTTP channel. It is ignored + if ``channel`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you are developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + url_scheme: the protocol scheme for the API endpoint. Normally + "https", but for testing or local servers, + "http" can be specified. + """ + # Run the base constructor + # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. + # TODO: When custom host (api_endpoint) is set, `scopes` must *also* be set on the + # credentials object + maybe_url_match = re.match("^(?Phttp(?:s)?://)?(?P.*)$", host) + if maybe_url_match is None: + raise ValueError(f"Unexpected hostname structure: {host}") # pragma: NO COVER + + url_match_items = maybe_url_match.groupdict() + + host = f"{url_scheme}://{host}" if not url_match_items["scheme"] else host + + super().__init__( + host=host, + credentials=credentials, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience + ) + self._session = AuthorizedSession( + self._credentials, default_host=self.DEFAULT_HOST) + if client_cert_source_for_mtls: + self._session.configure_mtls_channel(client_cert_source_for_mtls) + self._interceptor = interceptor or SnapshotsRestInterceptor() + self._prep_wrapped_messages(client_info) + + class _Delete(SnapshotsRestStub): + def __hash__(self): + return hash("Delete") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.DeleteSnapshotRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.Operation: + r"""Call the delete method over HTTP. + + Args: + request (~.compute.DeleteSnapshotRequest): + The request object. A request message for + Snapshots.Delete. See the method + description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.Operation: + Represents an Operation resource. Google Compute Engine + has three Operation resources: \* + `Global `__ + \* + `Regional `__ + \* + `Zonal `__ + You can use an operation resource to manage asynchronous + API requests. For more information, read Handling API + responses. Operations can be global, regional or zonal. + - For global operations, use the ``globalOperations`` + resource. - For regional operations, use the + ``regionOperations`` resource. - For zonal operations, + use the ``zonalOperations`` resource. For more + information, read Global, Regional, and Zonal Resources. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'delete', + 'uri': '/compute/v1/projects/{project}/global/snapshots/{snapshot}', + }, + ] + request, metadata = self._interceptor.pre_delete(request, metadata) + pb_request = compute.DeleteSnapshotRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.Operation() + pb_resp = compute.Operation.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_delete(resp) + return resp + + class _Get(SnapshotsRestStub): + def __hash__(self): + return hash("Get") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.GetSnapshotRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.Snapshot: + r"""Call the get method over HTTP. + + Args: + request (~.compute.GetSnapshotRequest): + The request object. A request message for Snapshots.Get. + See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.Snapshot: + Represents a Persistent Disk Snapshot + resource. You can use snapshots to back + up data on a regular interval. For more + information, read Creating persistent + disk snapshots. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/compute/v1/projects/{project}/global/snapshots/{snapshot}', + }, + ] + request, metadata = self._interceptor.pre_get(request, metadata) + pb_request = compute.GetSnapshotRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.Snapshot() + pb_resp = compute.Snapshot.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get(resp) + return resp + + class _GetIamPolicy(SnapshotsRestStub): + def __hash__(self): + return hash("GetIamPolicy") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.GetIamPolicySnapshotRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.Policy: + r"""Call the get iam policy method over HTTP. + + Args: + request (~.compute.GetIamPolicySnapshotRequest): + The request object. A request message for + Snapshots.GetIamPolicy. See the method + description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.Policy: + An Identity and Access Management (IAM) policy, which + specifies access controls for Google Cloud resources. A + ``Policy`` is a collection of ``bindings``. A + ``binding`` binds one or more ``members``, or + principals, to a single ``role``. Principals can be user + accounts, service accounts, Google groups, and domains + (such as G Suite). A ``role`` is a named list of + permissions; each ``role`` can be an IAM predefined role + or a user-created custom role. For some types of Google + Cloud resources, a ``binding`` can also specify a + ``condition``, which is a logical expression that allows + access to a resource only if the expression evaluates to + ``true``. A condition can add constraints based on + attributes of the request, the resource, or both. To + learn which resources support conditions in their IAM + policies, see the `IAM + documentation `__. + **JSON example:** { "bindings": [ { "role": + "roles/resourcemanager.organizationAdmin", "members": [ + "user:mike@example.com", "group:admins@example.com", + "domain:google.com", + "serviceAccount:my-project-id@appspot.gserviceaccount.com" + ] }, { "role": + "roles/resourcemanager.organizationViewer", "members": [ + "user:eve@example.com" ], "condition": { "title": + "expirable access", "description": "Does not grant + access after Sep 2020", "expression": "request.time < + timestamp('2020-10-01T00:00:00.000Z')", } } ], "etag": + "BwWWja0YfJA=", "version": 3 } **YAML example:** + bindings: - members: - user:mike@example.com - + group:admins@example.com - domain:google.com - + serviceAccount:my-project-id@appspot.gserviceaccount.com + role: roles/resourcemanager.organizationAdmin - members: + - user:eve@example.com role: + roles/resourcemanager.organizationViewer condition: + title: expirable access description: Does not grant + access after Sep 2020 expression: request.time < + timestamp('2020-10-01T00:00:00.000Z') etag: BwWWja0YfJA= + version: 3 For a description of IAM and its features, + see the `IAM + documentation `__. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/compute/v1/projects/{project}/global/snapshots/{resource}/getIamPolicy', + }, + ] + request, metadata = self._interceptor.pre_get_iam_policy(request, metadata) + pb_request = compute.GetIamPolicySnapshotRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.Policy() + pb_resp = compute.Policy.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get_iam_policy(resp) + return resp + + class _Insert(SnapshotsRestStub): + def __hash__(self): + return hash("Insert") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.InsertSnapshotRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.Operation: + r"""Call the insert method over HTTP. + + Args: + request (~.compute.InsertSnapshotRequest): + The request object. A request message for + Snapshots.Insert. See the method + description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.Operation: + Represents an Operation resource. Google Compute Engine + has three Operation resources: \* + `Global `__ + \* + `Regional `__ + \* + `Zonal `__ + You can use an operation resource to manage asynchronous + API requests. For more information, read Handling API + responses. Operations can be global, regional or zonal. + - For global operations, use the ``globalOperations`` + resource. - For regional operations, use the + ``regionOperations`` resource. - For zonal operations, + use the ``zonalOperations`` resource. For more + information, read Global, Regional, and Zonal Resources. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/compute/v1/projects/{project}/global/snapshots', + 'body': 'snapshot_resource', + }, + ] + request, metadata = self._interceptor.pre_insert(request, metadata) + pb_request = compute.InsertSnapshotRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + including_default_value_fields=False, + use_integers_for_enums=False + ) + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.Operation() + pb_resp = compute.Operation.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_insert(resp) + return resp + + class _List(SnapshotsRestStub): + def __hash__(self): + return hash("List") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.ListSnapshotsRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.SnapshotList: + r"""Call the list method over HTTP. + + Args: + request (~.compute.ListSnapshotsRequest): + The request object. A request message for Snapshots.List. + See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.SnapshotList: + Contains a list of Snapshot + resources. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/compute/v1/projects/{project}/global/snapshots', + }, + ] + request, metadata = self._interceptor.pre_list(request, metadata) + pb_request = compute.ListSnapshotsRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.SnapshotList() + pb_resp = compute.SnapshotList.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_list(resp) + return resp + + class _SetIamPolicy(SnapshotsRestStub): + def __hash__(self): + return hash("SetIamPolicy") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.SetIamPolicySnapshotRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.Policy: + r"""Call the set iam policy method over HTTP. + + Args: + request (~.compute.SetIamPolicySnapshotRequest): + The request object. A request message for + Snapshots.SetIamPolicy. See the method + description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.Policy: + An Identity and Access Management (IAM) policy, which + specifies access controls for Google Cloud resources. A + ``Policy`` is a collection of ``bindings``. A + ``binding`` binds one or more ``members``, or + principals, to a single ``role``. Principals can be user + accounts, service accounts, Google groups, and domains + (such as G Suite). A ``role`` is a named list of + permissions; each ``role`` can be an IAM predefined role + or a user-created custom role. For some types of Google + Cloud resources, a ``binding`` can also specify a + ``condition``, which is a logical expression that allows + access to a resource only if the expression evaluates to + ``true``. A condition can add constraints based on + attributes of the request, the resource, or both. To + learn which resources support conditions in their IAM + policies, see the `IAM + documentation `__. + **JSON example:** { "bindings": [ { "role": + "roles/resourcemanager.organizationAdmin", "members": [ + "user:mike@example.com", "group:admins@example.com", + "domain:google.com", + "serviceAccount:my-project-id@appspot.gserviceaccount.com" + ] }, { "role": + "roles/resourcemanager.organizationViewer", "members": [ + "user:eve@example.com" ], "condition": { "title": + "expirable access", "description": "Does not grant + access after Sep 2020", "expression": "request.time < + timestamp('2020-10-01T00:00:00.000Z')", } } ], "etag": + "BwWWja0YfJA=", "version": 3 } **YAML example:** + bindings: - members: - user:mike@example.com - + group:admins@example.com - domain:google.com - + serviceAccount:my-project-id@appspot.gserviceaccount.com + role: roles/resourcemanager.organizationAdmin - members: + - user:eve@example.com role: + roles/resourcemanager.organizationViewer condition: + title: expirable access description: Does not grant + access after Sep 2020 expression: request.time < + timestamp('2020-10-01T00:00:00.000Z') etag: BwWWja0YfJA= + version: 3 For a description of IAM and its features, + see the `IAM + documentation `__. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/compute/v1/projects/{project}/global/snapshots/{resource}/setIamPolicy', + 'body': 'global_set_policy_request_resource', + }, + ] + request, metadata = self._interceptor.pre_set_iam_policy(request, metadata) + pb_request = compute.SetIamPolicySnapshotRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + including_default_value_fields=False, + use_integers_for_enums=False + ) + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.Policy() + pb_resp = compute.Policy.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_set_iam_policy(resp) + return resp + + class _SetLabels(SnapshotsRestStub): + def __hash__(self): + return hash("SetLabels") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.SetLabelsSnapshotRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.Operation: + r"""Call the set labels method over HTTP. + + Args: + request (~.compute.SetLabelsSnapshotRequest): + The request object. A request message for + Snapshots.SetLabels. See the method + description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.Operation: + Represents an Operation resource. Google Compute Engine + has three Operation resources: \* + `Global `__ + \* + `Regional `__ + \* + `Zonal `__ + You can use an operation resource to manage asynchronous + API requests. For more information, read Handling API + responses. Operations can be global, regional or zonal. + - For global operations, use the ``globalOperations`` + resource. - For regional operations, use the + ``regionOperations`` resource. - For zonal operations, + use the ``zonalOperations`` resource. For more + information, read Global, Regional, and Zonal Resources. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/compute/v1/projects/{project}/global/snapshots/{resource}/setLabels', + 'body': 'global_set_labels_request_resource', + }, + ] + request, metadata = self._interceptor.pre_set_labels(request, metadata) + pb_request = compute.SetLabelsSnapshotRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + including_default_value_fields=False, + use_integers_for_enums=False + ) + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.Operation() + pb_resp = compute.Operation.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_set_labels(resp) + return resp + + class _TestIamPermissions(SnapshotsRestStub): + def __hash__(self): + return hash("TestIamPermissions") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.TestIamPermissionsSnapshotRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.TestPermissionsResponse: + r"""Call the test iam permissions method over HTTP. + + Args: + request (~.compute.TestIamPermissionsSnapshotRequest): + The request object. A request message for + Snapshots.TestIamPermissions. See the + method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.TestPermissionsResponse: + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/compute/v1/projects/{project}/global/snapshots/{resource}/testIamPermissions', + 'body': 'test_permissions_request_resource', + }, + ] + request, metadata = self._interceptor.pre_test_iam_permissions(request, metadata) + pb_request = compute.TestIamPermissionsSnapshotRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + including_default_value_fields=False, + use_integers_for_enums=False + ) + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.TestPermissionsResponse() + pb_resp = compute.TestPermissionsResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_test_iam_permissions(resp) + return resp + + @property + def delete(self) -> Callable[ + [compute.DeleteSnapshotRequest], + compute.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._Delete(self._session, self._host, self._interceptor) # type: ignore + + @property + def get(self) -> Callable[ + [compute.GetSnapshotRequest], + compute.Snapshot]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._Get(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_iam_policy(self) -> Callable[ + [compute.GetIamPolicySnapshotRequest], + compute.Policy]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetIamPolicy(self._session, self._host, self._interceptor) # type: ignore + + @property + def insert(self) -> Callable[ + [compute.InsertSnapshotRequest], + compute.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._Insert(self._session, self._host, self._interceptor) # type: ignore + + @property + def list(self) -> Callable[ + [compute.ListSnapshotsRequest], + compute.SnapshotList]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._List(self._session, self._host, self._interceptor) # type: ignore + + @property + def set_iam_policy(self) -> Callable[ + [compute.SetIamPolicySnapshotRequest], + compute.Policy]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._SetIamPolicy(self._session, self._host, self._interceptor) # type: ignore + + @property + def set_labels(self) -> Callable[ + [compute.SetLabelsSnapshotRequest], + compute.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._SetLabels(self._session, self._host, self._interceptor) # type: ignore + + @property + def test_iam_permissions(self) -> Callable[ + [compute.TestIamPermissionsSnapshotRequest], + compute.TestPermissionsResponse]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._TestIamPermissions(self._session, self._host, self._interceptor) # type: ignore + + @property + def kind(self) -> str: + return "rest" + + def close(self): + self._session.close() + + +__all__=( + 'SnapshotsRestTransport', +) diff --git a/owl-bot-staging/v1/google/cloud/compute_v1/services/ssl_certificates/__init__.py b/owl-bot-staging/v1/google/cloud/compute_v1/services/ssl_certificates/__init__.py new file mode 100644 index 000000000..5b48aa4ec --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/compute_v1/services/ssl_certificates/__init__.py @@ -0,0 +1,20 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from .client import SslCertificatesClient + +__all__ = ( + 'SslCertificatesClient', +) diff --git a/owl-bot-staging/v1/google/cloud/compute_v1/services/ssl_certificates/client.py b/owl-bot-staging/v1/google/cloud/compute_v1/services/ssl_certificates/client.py new file mode 100644 index 000000000..583c5e006 --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/compute_v1/services/ssl_certificates/client.py @@ -0,0 +1,1237 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import functools +import os +import re +from typing import Dict, Mapping, MutableMapping, MutableSequence, Optional, Sequence, Tuple, Type, Union, cast + +from google.cloud.compute_v1 import gapic_version as package_version + +from google.api_core import client_options as client_options_lib +from google.api_core import exceptions as core_exceptions +from google.api_core import extended_operation +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport import mtls # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth.exceptions import MutualTLSChannelError # type: ignore +from google.oauth2 import service_account # type: ignore + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + +from google.api_core import extended_operation # type: ignore +from google.cloud.compute_v1.services.ssl_certificates import pagers +from google.cloud.compute_v1.types import compute +from .transports.base import SslCertificatesTransport, DEFAULT_CLIENT_INFO +from .transports.rest import SslCertificatesRestTransport + + +class SslCertificatesClientMeta(type): + """Metaclass for the SslCertificates client. + + This provides class-level methods for building and retrieving + support objects (e.g. transport) without polluting the client instance + objects. + """ + _transport_registry = OrderedDict() # type: Dict[str, Type[SslCertificatesTransport]] + _transport_registry["rest"] = SslCertificatesRestTransport + + def get_transport_class(cls, + label: Optional[str] = None, + ) -> Type[SslCertificatesTransport]: + """Returns an appropriate transport class. + + Args: + label: The name of the desired transport. If none is + provided, then the first transport in the registry is used. + + Returns: + The transport class to use. + """ + # If a specific transport is requested, return that one. + if label: + return cls._transport_registry[label] + + # No transport is requested; return the default (that is, the first one + # in the dictionary). + return next(iter(cls._transport_registry.values())) + + +class SslCertificatesClient(metaclass=SslCertificatesClientMeta): + """The SslCertificates API.""" + + @staticmethod + def _get_default_mtls_endpoint(api_endpoint): + """Converts api endpoint to mTLS endpoint. + + Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to + "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. + Args: + api_endpoint (Optional[str]): the api endpoint to convert. + Returns: + str: converted mTLS api endpoint. + """ + if not api_endpoint: + return api_endpoint + + mtls_endpoint_re = re.compile( + r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?" + ) + + m = mtls_endpoint_re.match(api_endpoint) + name, mtls, sandbox, googledomain = m.groups() + if mtls or not googledomain: + return api_endpoint + + if sandbox: + return api_endpoint.replace( + "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" + ) + + return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") + + DEFAULT_ENDPOINT = "compute.googleapis.com" + DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore + DEFAULT_ENDPOINT + ) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + SslCertificatesClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_info(info) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + SslCertificatesClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_file( + filename) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + from_service_account_json = from_service_account_file + + @property + def transport(self) -> SslCertificatesTransport: + """Returns the transport used by the client instance. + + Returns: + SslCertificatesTransport: The transport used by the client + instance. + """ + return self._transport + + @staticmethod + def common_billing_account_path(billing_account: str, ) -> str: + """Returns a fully-qualified billing_account string.""" + return "billingAccounts/{billing_account}".format(billing_account=billing_account, ) + + @staticmethod + def parse_common_billing_account_path(path: str) -> Dict[str,str]: + """Parse a billing_account path into its component segments.""" + m = re.match(r"^billingAccounts/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_folder_path(folder: str, ) -> str: + """Returns a fully-qualified folder string.""" + return "folders/{folder}".format(folder=folder, ) + + @staticmethod + def parse_common_folder_path(path: str) -> Dict[str,str]: + """Parse a folder path into its component segments.""" + m = re.match(r"^folders/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_organization_path(organization: str, ) -> str: + """Returns a fully-qualified organization string.""" + return "organizations/{organization}".format(organization=organization, ) + + @staticmethod + def parse_common_organization_path(path: str) -> Dict[str,str]: + """Parse a organization path into its component segments.""" + m = re.match(r"^organizations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_project_path(project: str, ) -> str: + """Returns a fully-qualified project string.""" + return "projects/{project}".format(project=project, ) + + @staticmethod + def parse_common_project_path(path: str) -> Dict[str,str]: + """Parse a project path into its component segments.""" + m = re.match(r"^projects/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_location_path(project: str, location: str, ) -> str: + """Returns a fully-qualified location string.""" + return "projects/{project}/locations/{location}".format(project=project, location=location, ) + + @staticmethod + def parse_common_location_path(path: str) -> Dict[str,str]: + """Parse a location path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @classmethod + def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[client_options_lib.ClientOptions] = None): + """Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + if client_options is None: + client_options = client_options_lib.ClientOptions() + use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") + use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") + if use_client_cert not in ("true", "false"): + raise ValueError("Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`") + if use_mtls_endpoint not in ("auto", "never", "always"): + raise MutualTLSChannelError("Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`") + + # Figure out the client cert source to use. + client_cert_source = None + if use_client_cert == "true": + if client_options.client_cert_source: + client_cert_source = client_options.client_cert_source + elif mtls.has_default_client_cert_source(): + client_cert_source = mtls.default_client_cert_source() + + # Figure out which api endpoint to use. + if client_options.api_endpoint is not None: + api_endpoint = client_options.api_endpoint + elif use_mtls_endpoint == "always" or (use_mtls_endpoint == "auto" and client_cert_source): + api_endpoint = cls.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = cls.DEFAULT_ENDPOINT + + return api_endpoint, client_cert_source + + def __init__(self, *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Optional[Union[str, SslCertificatesTransport]] = None, + client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the ssl certificates client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Union[str, SslCertificatesTransport]): The + transport to use. If set to None, a transport is chosen + automatically. + NOTE: "rest" transport functionality is currently in a + beta state (preview). We welcome your feedback via an + issue in this library's source repository. + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): Custom options for the + client. It won't take effect if a ``transport`` instance is provided. + (1) The ``api_endpoint`` property can be used to override the + default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT + environment variable can also be used to override the endpoint: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto switch to the + default mTLS endpoint if client certificate is present, this is + the default value). However, the ``api_endpoint`` property takes + precedence if provided. + (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide client certificate for mutual TLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + """ + if isinstance(client_options, dict): + client_options = client_options_lib.from_dict(client_options) + if client_options is None: + client_options = client_options_lib.ClientOptions() + client_options = cast(client_options_lib.ClientOptions, client_options) + + api_endpoint, client_cert_source_func = self.get_mtls_endpoint_and_cert_source(client_options) + + api_key_value = getattr(client_options, "api_key", None) + if api_key_value and credentials: + raise ValueError("client_options.api_key and credentials are mutually exclusive") + + # Save or instantiate the transport. + # Ordinarily, we provide the transport, but allowing a custom transport + # instance provides an extensibility point for unusual situations. + if isinstance(transport, SslCertificatesTransport): + # transport is a SslCertificatesTransport instance. + if credentials or client_options.credentials_file or api_key_value: + raise ValueError("When providing a transport instance, " + "provide its credentials directly.") + if client_options.scopes: + raise ValueError( + "When providing a transport instance, provide its scopes " + "directly." + ) + self._transport = transport + else: + import google.auth._default # type: ignore + + if api_key_value and hasattr(google.auth._default, "get_api_key_credentials"): + credentials = google.auth._default.get_api_key_credentials(api_key_value) + + Transport = type(self).get_transport_class(transport) + self._transport = Transport( + credentials=credentials, + credentials_file=client_options.credentials_file, + host=api_endpoint, + scopes=client_options.scopes, + client_cert_source_for_mtls=client_cert_source_func, + quota_project_id=client_options.quota_project_id, + client_info=client_info, + always_use_jwt_access=True, + api_audience=client_options.api_audience, + ) + + def aggregated_list(self, + request: Optional[Union[compute.AggregatedListSslCertificatesRequest, dict]] = None, + *, + project: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.AggregatedListPager: + r"""Retrieves the list of all SslCertificate resources, + regional and global, available to the specified project. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_aggregated_list(): + # Create a client + client = compute_v1.SslCertificatesClient() + + # Initialize request argument(s) + request = compute_v1.AggregatedListSslCertificatesRequest( + project="project_value", + ) + + # Make the request + page_result = client.aggregated_list(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.AggregatedListSslCertificatesRequest, dict]): + The request object. A request message for + SslCertificates.AggregatedList. See the + method description for details. + project (str): + Name of the project scoping this + request. + + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.compute_v1.services.ssl_certificates.pagers.AggregatedListPager: + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.AggregatedListSslCertificatesRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.AggregatedListSslCertificatesRequest): + request = compute.AggregatedListSslCertificatesRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.aggregated_list] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.AggregatedListPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + def delete_unary(self, + request: Optional[Union[compute.DeleteSslCertificateRequest, dict]] = None, + *, + project: Optional[str] = None, + ssl_certificate: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.Operation: + r"""Deletes the specified SslCertificate resource. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_delete(): + # Create a client + client = compute_v1.SslCertificatesClient() + + # Initialize request argument(s) + request = compute_v1.DeleteSslCertificateRequest( + project="project_value", + ssl_certificate="ssl_certificate_value", + ) + + # Make the request + response = client.delete(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.DeleteSslCertificateRequest, dict]): + The request object. A request message for + SslCertificates.Delete. See the method + description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + ssl_certificate (str): + Name of the SslCertificate resource + to delete. + + This corresponds to the ``ssl_certificate`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, ssl_certificate]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.DeleteSslCertificateRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.DeleteSslCertificateRequest): + request = compute.DeleteSslCertificateRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if ssl_certificate is not None: + request.ssl_certificate = ssl_certificate + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("ssl_certificate", request.ssl_certificate), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def delete(self, + request: Optional[Union[compute.DeleteSslCertificateRequest, dict]] = None, + *, + project: Optional[str] = None, + ssl_certificate: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> extended_operation.ExtendedOperation: + r"""Deletes the specified SslCertificate resource. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_delete(): + # Create a client + client = compute_v1.SslCertificatesClient() + + # Initialize request argument(s) + request = compute_v1.DeleteSslCertificateRequest( + project="project_value", + ssl_certificate="ssl_certificate_value", + ) + + # Make the request + response = client.delete(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.DeleteSslCertificateRequest, dict]): + The request object. A request message for + SslCertificates.Delete. See the method + description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + ssl_certificate (str): + Name of the SslCertificate resource + to delete. + + This corresponds to the ``ssl_certificate`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, ssl_certificate]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.DeleteSslCertificateRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.DeleteSslCertificateRequest): + request = compute.DeleteSslCertificateRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if ssl_certificate is not None: + request.ssl_certificate = ssl_certificate + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("ssl_certificate", request.ssl_certificate), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + operation_service = self._transport._global_operations_client + operation_request = compute.GetGlobalOperationRequest() + operation_request.project = request.project + operation_request.operation = response.name + + get_operation = functools.partial(operation_service.get, operation_request) + # Cancel is not part of extended operations yet. + cancel_operation = lambda: None + + # Note: this class is an implementation detail to provide a uniform + # set of names for certain fields in the extended operation proto message. + # See google.api_core.extended_operation.ExtendedOperation for details + # on these properties and the expected interface. + class _CustomOperation(extended_operation.ExtendedOperation): + @property + def error_message(self): + return self._extended_operation.http_error_message + + @property + def error_code(self): + return self._extended_operation.http_error_status_code + + response = _CustomOperation.make(get_operation, cancel_operation, response) + + # Done; return the response. + return response + + def get(self, + request: Optional[Union[compute.GetSslCertificateRequest, dict]] = None, + *, + project: Optional[str] = None, + ssl_certificate: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.SslCertificate: + r"""Returns the specified SslCertificate resource. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_get(): + # Create a client + client = compute_v1.SslCertificatesClient() + + # Initialize request argument(s) + request = compute_v1.GetSslCertificateRequest( + project="project_value", + ssl_certificate="ssl_certificate_value", + ) + + # Make the request + response = client.get(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.GetSslCertificateRequest, dict]): + The request object. A request message for + SslCertificates.Get. See the method + description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + ssl_certificate (str): + Name of the SslCertificate resource + to return. + + This corresponds to the ``ssl_certificate`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.compute_v1.types.SslCertificate: + Represents an SSL Certificate resource. Google Compute + Engine has two SSL Certificate resources: \* + [Global](/compute/docs/reference/rest/v1/sslCertificates) + \* + [Regional](/compute/docs/reference/rest/v1/regionSslCertificates) + The sslCertificates are used by: - external HTTPS load + balancers - SSL proxy load balancers The + regionSslCertificates are used by internal HTTPS load + balancers. Optionally, certificate file contents that + you upload can contain a set of up to five PEM-encoded + certificates. The API call creates an object + (sslCertificate) that holds this data. You can use SSL + keys and certificates to secure connections to a load + balancer. For more information, read Creating and using + SSL certificates, SSL certificates quotas and limits, + and Troubleshooting SSL certificates. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, ssl_certificate]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.GetSslCertificateRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.GetSslCertificateRequest): + request = compute.GetSslCertificateRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if ssl_certificate is not None: + request.ssl_certificate = ssl_certificate + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("ssl_certificate", request.ssl_certificate), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def insert_unary(self, + request: Optional[Union[compute.InsertSslCertificateRequest, dict]] = None, + *, + project: Optional[str] = None, + ssl_certificate_resource: Optional[compute.SslCertificate] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.Operation: + r"""Creates a SslCertificate resource in the specified + project using the data included in the request. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_insert(): + # Create a client + client = compute_v1.SslCertificatesClient() + + # Initialize request argument(s) + request = compute_v1.InsertSslCertificateRequest( + project="project_value", + ) + + # Make the request + response = client.insert(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.InsertSslCertificateRequest, dict]): + The request object. A request message for + SslCertificates.Insert. See the method + description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + ssl_certificate_resource (google.cloud.compute_v1.types.SslCertificate): + The body resource for this request + This corresponds to the ``ssl_certificate_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, ssl_certificate_resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.InsertSslCertificateRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.InsertSslCertificateRequest): + request = compute.InsertSslCertificateRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if ssl_certificate_resource is not None: + request.ssl_certificate_resource = ssl_certificate_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.insert] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def insert(self, + request: Optional[Union[compute.InsertSslCertificateRequest, dict]] = None, + *, + project: Optional[str] = None, + ssl_certificate_resource: Optional[compute.SslCertificate] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> extended_operation.ExtendedOperation: + r"""Creates a SslCertificate resource in the specified + project using the data included in the request. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_insert(): + # Create a client + client = compute_v1.SslCertificatesClient() + + # Initialize request argument(s) + request = compute_v1.InsertSslCertificateRequest( + project="project_value", + ) + + # Make the request + response = client.insert(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.InsertSslCertificateRequest, dict]): + The request object. A request message for + SslCertificates.Insert. See the method + description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + ssl_certificate_resource (google.cloud.compute_v1.types.SslCertificate): + The body resource for this request + This corresponds to the ``ssl_certificate_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, ssl_certificate_resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.InsertSslCertificateRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.InsertSslCertificateRequest): + request = compute.InsertSslCertificateRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if ssl_certificate_resource is not None: + request.ssl_certificate_resource = ssl_certificate_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.insert] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + operation_service = self._transport._global_operations_client + operation_request = compute.GetGlobalOperationRequest() + operation_request.project = request.project + operation_request.operation = response.name + + get_operation = functools.partial(operation_service.get, operation_request) + # Cancel is not part of extended operations yet. + cancel_operation = lambda: None + + # Note: this class is an implementation detail to provide a uniform + # set of names for certain fields in the extended operation proto message. + # See google.api_core.extended_operation.ExtendedOperation for details + # on these properties and the expected interface. + class _CustomOperation(extended_operation.ExtendedOperation): + @property + def error_message(self): + return self._extended_operation.http_error_message + + @property + def error_code(self): + return self._extended_operation.http_error_status_code + + response = _CustomOperation.make(get_operation, cancel_operation, response) + + # Done; return the response. + return response + + def list(self, + request: Optional[Union[compute.ListSslCertificatesRequest, dict]] = None, + *, + project: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListPager: + r"""Retrieves the list of SslCertificate resources + available to the specified project. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_list(): + # Create a client + client = compute_v1.SslCertificatesClient() + + # Initialize request argument(s) + request = compute_v1.ListSslCertificatesRequest( + project="project_value", + ) + + # Make the request + page_result = client.list(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.ListSslCertificatesRequest, dict]): + The request object. A request message for + SslCertificates.List. See the method + description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.compute_v1.services.ssl_certificates.pagers.ListPager: + Contains a list of SslCertificate + resources. + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.ListSslCertificatesRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.ListSslCertificatesRequest): + request = compute.ListSslCertificatesRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + def __enter__(self) -> "SslCertificatesClient": + return self + + def __exit__(self, type, value, traceback): + """Releases underlying transport's resources. + + .. warning:: + ONLY use as a context manager if the transport is NOT shared + with other clients! Exiting the with block will CLOSE the transport + and may cause errors in other clients! + """ + self.transport.close() + + + + + + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) + + +__all__ = ( + "SslCertificatesClient", +) diff --git a/owl-bot-staging/v1/google/cloud/compute_v1/services/ssl_certificates/pagers.py b/owl-bot-staging/v1/google/cloud/compute_v1/services/ssl_certificates/pagers.py new file mode 100644 index 000000000..5da54b9e8 --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/compute_v1/services/ssl_certificates/pagers.py @@ -0,0 +1,139 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import Any, AsyncIterator, Awaitable, Callable, Sequence, Tuple, Optional, Iterator + +from google.cloud.compute_v1.types import compute + + +class AggregatedListPager: + """A pager for iterating through ``aggregated_list`` requests. + + This class thinly wraps an initial + :class:`google.cloud.compute_v1.types.SslCertificateAggregatedList` object, and + provides an ``__iter__`` method to iterate through its + ``items`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``AggregatedList`` requests and continue to iterate + through the ``items`` field on the + corresponding responses. + + All the usual :class:`google.cloud.compute_v1.types.SslCertificateAggregatedList` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., compute.SslCertificateAggregatedList], + request: compute.AggregatedListSslCertificatesRequest, + response: compute.SslCertificateAggregatedList, + *, + metadata: Sequence[Tuple[str, str]] = ()): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.compute_v1.types.AggregatedListSslCertificatesRequest): + The initial request object. + response (google.cloud.compute_v1.types.SslCertificateAggregatedList): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = compute.AggregatedListSslCertificatesRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[compute.SslCertificateAggregatedList]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[Tuple[str, compute.SslCertificatesScopedList]]: + for page in self.pages: + yield from page.items.items() + + def get(self, key: str) -> Optional[compute.SslCertificatesScopedList]: + return self._response.items.get(key) + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + + +class ListPager: + """A pager for iterating through ``list`` requests. + + This class thinly wraps an initial + :class:`google.cloud.compute_v1.types.SslCertificateList` object, and + provides an ``__iter__`` method to iterate through its + ``items`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``List`` requests and continue to iterate + through the ``items`` field on the + corresponding responses. + + All the usual :class:`google.cloud.compute_v1.types.SslCertificateList` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., compute.SslCertificateList], + request: compute.ListSslCertificatesRequest, + response: compute.SslCertificateList, + *, + metadata: Sequence[Tuple[str, str]] = ()): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.compute_v1.types.ListSslCertificatesRequest): + The initial request object. + response (google.cloud.compute_v1.types.SslCertificateList): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = compute.ListSslCertificatesRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[compute.SslCertificateList]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[compute.SslCertificate]: + for page in self.pages: + yield from page.items + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) diff --git a/owl-bot-staging/v1/google/cloud/compute_v1/services/ssl_certificates/transports/__init__.py b/owl-bot-staging/v1/google/cloud/compute_v1/services/ssl_certificates/transports/__init__.py new file mode 100644 index 000000000..daa2108f1 --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/compute_v1/services/ssl_certificates/transports/__init__.py @@ -0,0 +1,32 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +from typing import Dict, Type + +from .base import SslCertificatesTransport +from .rest import SslCertificatesRestTransport +from .rest import SslCertificatesRestInterceptor + + +# Compile a registry of transports. +_transport_registry = OrderedDict() # type: Dict[str, Type[SslCertificatesTransport]] +_transport_registry['rest'] = SslCertificatesRestTransport + +__all__ = ( + 'SslCertificatesTransport', + 'SslCertificatesRestTransport', + 'SslCertificatesRestInterceptor', +) diff --git a/owl-bot-staging/v1/google/cloud/compute_v1/services/ssl_certificates/transports/base.py b/owl-bot-staging/v1/google/cloud/compute_v1/services/ssl_certificates/transports/base.py new file mode 100644 index 000000000..05273e4d0 --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/compute_v1/services/ssl_certificates/transports/base.py @@ -0,0 +1,219 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import abc +from typing import Awaitable, Callable, Dict, Optional, Sequence, Union + +from google.cloud.compute_v1 import gapic_version as package_version + +import google.auth # type: ignore +import google.api_core +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.compute_v1.types import compute +from google.cloud.compute_v1.services import global_operations + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) + + +class SslCertificatesTransport(abc.ABC): + """Abstract transport class for SslCertificates.""" + + AUTH_SCOPES = ( + 'https://www.googleapis.com/auth/compute', + 'https://www.googleapis.com/auth/cloud-platform', + ) + + DEFAULT_HOST: str = 'compute.googleapis.com' + def __init__( + self, *, + host: str = DEFAULT_HOST, + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + **kwargs, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A list of scopes. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + """ + self._extended_operations_services: Dict[str, Any] = {} + + scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} + + # Save the scopes. + self._scopes = scopes + + # If no credentials are provided, then determine the appropriate + # defaults. + if credentials and credentials_file: + raise core_exceptions.DuplicateCredentialArgs("'credentials_file' and 'credentials' are mutually exclusive") + + if credentials_file is not None: + credentials, _ = google.auth.load_credentials_from_file( + credentials_file, + **scopes_kwargs, + quota_project_id=quota_project_id + ) + elif credentials is None: + credentials, _ = google.auth.default(**scopes_kwargs, quota_project_id=quota_project_id) + # Don't apply audience if the credentials file passed from user. + if hasattr(credentials, "with_gdch_audience"): + credentials = credentials.with_gdch_audience(api_audience if api_audience else host) + + # If the credentials are service account credentials, then always try to use self signed JWT. + if always_use_jwt_access and isinstance(credentials, service_account.Credentials) and hasattr(service_account.Credentials, "with_always_use_jwt_access"): + credentials = credentials.with_always_use_jwt_access(True) + + # Save the credentials. + self._credentials = credentials + + # Save the hostname. Default to port 443 (HTTPS) if none is specified. + if ':' not in host: + host += ':443' + self._host = host + + def _prep_wrapped_messages(self, client_info): + # Precompute the wrapped methods. + self._wrapped_methods = { + self.aggregated_list: gapic_v1.method.wrap_method( + self.aggregated_list, + default_timeout=None, + client_info=client_info, + ), + self.delete: gapic_v1.method.wrap_method( + self.delete, + default_timeout=None, + client_info=client_info, + ), + self.get: gapic_v1.method.wrap_method( + self.get, + default_timeout=None, + client_info=client_info, + ), + self.insert: gapic_v1.method.wrap_method( + self.insert, + default_timeout=None, + client_info=client_info, + ), + self.list: gapic_v1.method.wrap_method( + self.list, + default_timeout=None, + client_info=client_info, + ), + } + + def close(self): + """Closes resources associated with the transport. + + .. warning:: + Only call this method if the transport is NOT shared + with other clients - this may cause errors in other clients! + """ + raise NotImplementedError() + + @property + def aggregated_list(self) -> Callable[ + [compute.AggregatedListSslCertificatesRequest], + Union[ + compute.SslCertificateAggregatedList, + Awaitable[compute.SslCertificateAggregatedList] + ]]: + raise NotImplementedError() + + @property + def delete(self) -> Callable[ + [compute.DeleteSslCertificateRequest], + Union[ + compute.Operation, + Awaitable[compute.Operation] + ]]: + raise NotImplementedError() + + @property + def get(self) -> Callable[ + [compute.GetSslCertificateRequest], + Union[ + compute.SslCertificate, + Awaitable[compute.SslCertificate] + ]]: + raise NotImplementedError() + + @property + def insert(self) -> Callable[ + [compute.InsertSslCertificateRequest], + Union[ + compute.Operation, + Awaitable[compute.Operation] + ]]: + raise NotImplementedError() + + @property + def list(self) -> Callable[ + [compute.ListSslCertificatesRequest], + Union[ + compute.SslCertificateList, + Awaitable[compute.SslCertificateList] + ]]: + raise NotImplementedError() + + @property + def kind(self) -> str: + raise NotImplementedError() + + @property + def _global_operations_client(self) -> global_operations.GlobalOperationsClient: + ex_op_service = self._extended_operations_services.get("global_operations") + if not ex_op_service: + ex_op_service = global_operations.GlobalOperationsClient( + credentials=self._credentials, + transport=self.kind, + ) + self._extended_operations_services["global_operations"] = ex_op_service + + return ex_op_service + + +__all__ = ( + 'SslCertificatesTransport', +) diff --git a/owl-bot-staging/v1/google/cloud/compute_v1/services/ssl_certificates/transports/rest.py b/owl-bot-staging/v1/google/cloud/compute_v1/services/ssl_certificates/transports/rest.py new file mode 100644 index 000000000..2fa29268f --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/compute_v1/services/ssl_certificates/transports/rest.py @@ -0,0 +1,792 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +from google.auth.transport.requests import AuthorizedSession # type: ignore +import json # type: ignore +import grpc # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.api_core import exceptions as core_exceptions +from google.api_core import retry as retries +from google.api_core import rest_helpers +from google.api_core import rest_streaming +from google.api_core import path_template +from google.api_core import gapic_v1 + +from google.protobuf import json_format +from requests import __version__ as requests_version +import dataclasses +import re +from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union +import warnings + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + + +from google.cloud.compute_v1.types import compute + +from .base import SslCertificatesTransport, DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, + grpc_version=None, + rest_version=requests_version, +) + + +class SslCertificatesRestInterceptor: + """Interceptor for SslCertificates. + + Interceptors are used to manipulate requests, request metadata, and responses + in arbitrary ways. + Example use cases include: + * Logging + * Verifying requests according to service or custom semantics + * Stripping extraneous information from responses + + These use cases and more can be enabled by injecting an + instance of a custom subclass when constructing the SslCertificatesRestTransport. + + .. code-block:: python + class MyCustomSslCertificatesInterceptor(SslCertificatesRestInterceptor): + def pre_aggregated_list(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_aggregated_list(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_delete(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_delete(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_get(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_insert(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_insert(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list(self, response): + logging.log(f"Received response: {response}") + return response + + transport = SslCertificatesRestTransport(interceptor=MyCustomSslCertificatesInterceptor()) + client = SslCertificatesClient(transport=transport) + + + """ + def pre_aggregated_list(self, request: compute.AggregatedListSslCertificatesRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.AggregatedListSslCertificatesRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for aggregated_list + + Override in a subclass to manipulate the request or metadata + before they are sent to the SslCertificates server. + """ + return request, metadata + + def post_aggregated_list(self, response: compute.SslCertificateAggregatedList) -> compute.SslCertificateAggregatedList: + """Post-rpc interceptor for aggregated_list + + Override in a subclass to manipulate the response + after it is returned by the SslCertificates server but before + it is returned to user code. + """ + return response + def pre_delete(self, request: compute.DeleteSslCertificateRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.DeleteSslCertificateRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for delete + + Override in a subclass to manipulate the request or metadata + before they are sent to the SslCertificates server. + """ + return request, metadata + + def post_delete(self, response: compute.Operation) -> compute.Operation: + """Post-rpc interceptor for delete + + Override in a subclass to manipulate the response + after it is returned by the SslCertificates server but before + it is returned to user code. + """ + return response + def pre_get(self, request: compute.GetSslCertificateRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.GetSslCertificateRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get + + Override in a subclass to manipulate the request or metadata + before they are sent to the SslCertificates server. + """ + return request, metadata + + def post_get(self, response: compute.SslCertificate) -> compute.SslCertificate: + """Post-rpc interceptor for get + + Override in a subclass to manipulate the response + after it is returned by the SslCertificates server but before + it is returned to user code. + """ + return response + def pre_insert(self, request: compute.InsertSslCertificateRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.InsertSslCertificateRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for insert + + Override in a subclass to manipulate the request or metadata + before they are sent to the SslCertificates server. + """ + return request, metadata + + def post_insert(self, response: compute.Operation) -> compute.Operation: + """Post-rpc interceptor for insert + + Override in a subclass to manipulate the response + after it is returned by the SslCertificates server but before + it is returned to user code. + """ + return response + def pre_list(self, request: compute.ListSslCertificatesRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.ListSslCertificatesRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list + + Override in a subclass to manipulate the request or metadata + before they are sent to the SslCertificates server. + """ + return request, metadata + + def post_list(self, response: compute.SslCertificateList) -> compute.SslCertificateList: + """Post-rpc interceptor for list + + Override in a subclass to manipulate the response + after it is returned by the SslCertificates server but before + it is returned to user code. + """ + return response + + +@dataclasses.dataclass +class SslCertificatesRestStub: + _session: AuthorizedSession + _host: str + _interceptor: SslCertificatesRestInterceptor + + +class SslCertificatesRestTransport(SslCertificatesTransport): + """REST backend transport for SslCertificates. + + The SslCertificates API. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends JSON representations of protocol buffers over HTTP/1.1 + + NOTE: This REST transport functionality is currently in a beta + state (preview). We welcome your feedback via an issue in this + library's source repository. Thank you! + """ + + def __init__(self, *, + host: str = 'compute.googleapis.com', + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + client_cert_source_for_mtls: Optional[Callable[[ + ], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + url_scheme: str = 'https', + interceptor: Optional[SslCertificatesRestInterceptor] = None, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + NOTE: This REST transport functionality is currently in a beta + state (preview). We welcome your feedback via a GitHub issue in + this library's repository. Thank you! + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + client_cert_source_for_mtls (Callable[[], Tuple[bytes, bytes]]): Client + certificate to configure mutual TLS HTTP channel. It is ignored + if ``channel`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you are developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + url_scheme: the protocol scheme for the API endpoint. Normally + "https", but for testing or local servers, + "http" can be specified. + """ + # Run the base constructor + # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. + # TODO: When custom host (api_endpoint) is set, `scopes` must *also* be set on the + # credentials object + maybe_url_match = re.match("^(?Phttp(?:s)?://)?(?P.*)$", host) + if maybe_url_match is None: + raise ValueError(f"Unexpected hostname structure: {host}") # pragma: NO COVER + + url_match_items = maybe_url_match.groupdict() + + host = f"{url_scheme}://{host}" if not url_match_items["scheme"] else host + + super().__init__( + host=host, + credentials=credentials, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience + ) + self._session = AuthorizedSession( + self._credentials, default_host=self.DEFAULT_HOST) + if client_cert_source_for_mtls: + self._session.configure_mtls_channel(client_cert_source_for_mtls) + self._interceptor = interceptor or SslCertificatesRestInterceptor() + self._prep_wrapped_messages(client_info) + + class _AggregatedList(SslCertificatesRestStub): + def __hash__(self): + return hash("AggregatedList") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.AggregatedListSslCertificatesRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.SslCertificateAggregatedList: + r"""Call the aggregated list method over HTTP. + + Args: + request (~.compute.AggregatedListSslCertificatesRequest): + The request object. A request message for + SslCertificates.AggregatedList. See the + method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.SslCertificateAggregatedList: + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/compute/v1/projects/{project}/aggregated/sslCertificates', + }, + ] + request, metadata = self._interceptor.pre_aggregated_list(request, metadata) + pb_request = compute.AggregatedListSslCertificatesRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.SslCertificateAggregatedList() + pb_resp = compute.SslCertificateAggregatedList.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_aggregated_list(resp) + return resp + + class _Delete(SslCertificatesRestStub): + def __hash__(self): + return hash("Delete") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.DeleteSslCertificateRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.Operation: + r"""Call the delete method over HTTP. + + Args: + request (~.compute.DeleteSslCertificateRequest): + The request object. A request message for + SslCertificates.Delete. See the method + description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.Operation: + Represents an Operation resource. Google Compute Engine + has three Operation resources: \* + `Global `__ + \* + `Regional `__ + \* + `Zonal `__ + You can use an operation resource to manage asynchronous + API requests. For more information, read Handling API + responses. Operations can be global, regional or zonal. + - For global operations, use the ``globalOperations`` + resource. - For regional operations, use the + ``regionOperations`` resource. - For zonal operations, + use the ``zonalOperations`` resource. For more + information, read Global, Regional, and Zonal Resources. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'delete', + 'uri': '/compute/v1/projects/{project}/global/sslCertificates/{ssl_certificate}', + }, + ] + request, metadata = self._interceptor.pre_delete(request, metadata) + pb_request = compute.DeleteSslCertificateRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.Operation() + pb_resp = compute.Operation.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_delete(resp) + return resp + + class _Get(SslCertificatesRestStub): + def __hash__(self): + return hash("Get") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.GetSslCertificateRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.SslCertificate: + r"""Call the get method over HTTP. + + Args: + request (~.compute.GetSslCertificateRequest): + The request object. A request message for + SslCertificates.Get. See the method + description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.SslCertificate: + Represents an SSL Certificate resource. Google Compute + Engine has two SSL Certificate resources: \* + `Global `__ + \* + `Regional `__ + The sslCertificates are used by: - external HTTPS load + balancers - SSL proxy load balancers The + regionSslCertificates are used by internal HTTPS load + balancers. Optionally, certificate file contents that + you upload can contain a set of up to five PEM-encoded + certificates. The API call creates an object + (sslCertificate) that holds this data. You can use SSL + keys and certificates to secure connections to a load + balancer. For more information, read Creating and using + SSL certificates, SSL certificates quotas and limits, + and Troubleshooting SSL certificates. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/compute/v1/projects/{project}/global/sslCertificates/{ssl_certificate}', + }, + ] + request, metadata = self._interceptor.pre_get(request, metadata) + pb_request = compute.GetSslCertificateRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.SslCertificate() + pb_resp = compute.SslCertificate.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get(resp) + return resp + + class _Insert(SslCertificatesRestStub): + def __hash__(self): + return hash("Insert") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.InsertSslCertificateRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.Operation: + r"""Call the insert method over HTTP. + + Args: + request (~.compute.InsertSslCertificateRequest): + The request object. A request message for + SslCertificates.Insert. See the method + description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.Operation: + Represents an Operation resource. Google Compute Engine + has three Operation resources: \* + `Global `__ + \* + `Regional `__ + \* + `Zonal `__ + You can use an operation resource to manage asynchronous + API requests. For more information, read Handling API + responses. Operations can be global, regional or zonal. + - For global operations, use the ``globalOperations`` + resource. - For regional operations, use the + ``regionOperations`` resource. - For zonal operations, + use the ``zonalOperations`` resource. For more + information, read Global, Regional, and Zonal Resources. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/compute/v1/projects/{project}/global/sslCertificates', + 'body': 'ssl_certificate_resource', + }, + ] + request, metadata = self._interceptor.pre_insert(request, metadata) + pb_request = compute.InsertSslCertificateRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + including_default_value_fields=False, + use_integers_for_enums=False + ) + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.Operation() + pb_resp = compute.Operation.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_insert(resp) + return resp + + class _List(SslCertificatesRestStub): + def __hash__(self): + return hash("List") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.ListSslCertificatesRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.SslCertificateList: + r"""Call the list method over HTTP. + + Args: + request (~.compute.ListSslCertificatesRequest): + The request object. A request message for + SslCertificates.List. See the method + description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.SslCertificateList: + Contains a list of SslCertificate + resources. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/compute/v1/projects/{project}/global/sslCertificates', + }, + ] + request, metadata = self._interceptor.pre_list(request, metadata) + pb_request = compute.ListSslCertificatesRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.SslCertificateList() + pb_resp = compute.SslCertificateList.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_list(resp) + return resp + + @property + def aggregated_list(self) -> Callable[ + [compute.AggregatedListSslCertificatesRequest], + compute.SslCertificateAggregatedList]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._AggregatedList(self._session, self._host, self._interceptor) # type: ignore + + @property + def delete(self) -> Callable[ + [compute.DeleteSslCertificateRequest], + compute.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._Delete(self._session, self._host, self._interceptor) # type: ignore + + @property + def get(self) -> Callable[ + [compute.GetSslCertificateRequest], + compute.SslCertificate]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._Get(self._session, self._host, self._interceptor) # type: ignore + + @property + def insert(self) -> Callable[ + [compute.InsertSslCertificateRequest], + compute.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._Insert(self._session, self._host, self._interceptor) # type: ignore + + @property + def list(self) -> Callable[ + [compute.ListSslCertificatesRequest], + compute.SslCertificateList]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._List(self._session, self._host, self._interceptor) # type: ignore + + @property + def kind(self) -> str: + return "rest" + + def close(self): + self._session.close() + + +__all__=( + 'SslCertificatesRestTransport', +) diff --git a/owl-bot-staging/v1/google/cloud/compute_v1/services/ssl_policies/__init__.py b/owl-bot-staging/v1/google/cloud/compute_v1/services/ssl_policies/__init__.py new file mode 100644 index 000000000..f64c9c93e --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/compute_v1/services/ssl_policies/__init__.py @@ -0,0 +1,20 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from .client import SslPoliciesClient + +__all__ = ( + 'SslPoliciesClient', +) diff --git a/owl-bot-staging/v1/google/cloud/compute_v1/services/ssl_policies/client.py b/owl-bot-staging/v1/google/cloud/compute_v1/services/ssl_policies/client.py new file mode 100644 index 000000000..fc5d2a0a8 --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/compute_v1/services/ssl_policies/client.py @@ -0,0 +1,1598 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import functools +import os +import re +from typing import Dict, Mapping, MutableMapping, MutableSequence, Optional, Sequence, Tuple, Type, Union, cast + +from google.cloud.compute_v1 import gapic_version as package_version + +from google.api_core import client_options as client_options_lib +from google.api_core import exceptions as core_exceptions +from google.api_core import extended_operation +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport import mtls # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth.exceptions import MutualTLSChannelError # type: ignore +from google.oauth2 import service_account # type: ignore + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + +from google.api_core import extended_operation # type: ignore +from google.cloud.compute_v1.services.ssl_policies import pagers +from google.cloud.compute_v1.types import compute +from .transports.base import SslPoliciesTransport, DEFAULT_CLIENT_INFO +from .transports.rest import SslPoliciesRestTransport + + +class SslPoliciesClientMeta(type): + """Metaclass for the SslPolicies client. + + This provides class-level methods for building and retrieving + support objects (e.g. transport) without polluting the client instance + objects. + """ + _transport_registry = OrderedDict() # type: Dict[str, Type[SslPoliciesTransport]] + _transport_registry["rest"] = SslPoliciesRestTransport + + def get_transport_class(cls, + label: Optional[str] = None, + ) -> Type[SslPoliciesTransport]: + """Returns an appropriate transport class. + + Args: + label: The name of the desired transport. If none is + provided, then the first transport in the registry is used. + + Returns: + The transport class to use. + """ + # If a specific transport is requested, return that one. + if label: + return cls._transport_registry[label] + + # No transport is requested; return the default (that is, the first one + # in the dictionary). + return next(iter(cls._transport_registry.values())) + + +class SslPoliciesClient(metaclass=SslPoliciesClientMeta): + """The SslPolicies API.""" + + @staticmethod + def _get_default_mtls_endpoint(api_endpoint): + """Converts api endpoint to mTLS endpoint. + + Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to + "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. + Args: + api_endpoint (Optional[str]): the api endpoint to convert. + Returns: + str: converted mTLS api endpoint. + """ + if not api_endpoint: + return api_endpoint + + mtls_endpoint_re = re.compile( + r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?" + ) + + m = mtls_endpoint_re.match(api_endpoint) + name, mtls, sandbox, googledomain = m.groups() + if mtls or not googledomain: + return api_endpoint + + if sandbox: + return api_endpoint.replace( + "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" + ) + + return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") + + DEFAULT_ENDPOINT = "compute.googleapis.com" + DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore + DEFAULT_ENDPOINT + ) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + SslPoliciesClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_info(info) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + SslPoliciesClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_file( + filename) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + from_service_account_json = from_service_account_file + + @property + def transport(self) -> SslPoliciesTransport: + """Returns the transport used by the client instance. + + Returns: + SslPoliciesTransport: The transport used by the client + instance. + """ + return self._transport + + @staticmethod + def common_billing_account_path(billing_account: str, ) -> str: + """Returns a fully-qualified billing_account string.""" + return "billingAccounts/{billing_account}".format(billing_account=billing_account, ) + + @staticmethod + def parse_common_billing_account_path(path: str) -> Dict[str,str]: + """Parse a billing_account path into its component segments.""" + m = re.match(r"^billingAccounts/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_folder_path(folder: str, ) -> str: + """Returns a fully-qualified folder string.""" + return "folders/{folder}".format(folder=folder, ) + + @staticmethod + def parse_common_folder_path(path: str) -> Dict[str,str]: + """Parse a folder path into its component segments.""" + m = re.match(r"^folders/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_organization_path(organization: str, ) -> str: + """Returns a fully-qualified organization string.""" + return "organizations/{organization}".format(organization=organization, ) + + @staticmethod + def parse_common_organization_path(path: str) -> Dict[str,str]: + """Parse a organization path into its component segments.""" + m = re.match(r"^organizations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_project_path(project: str, ) -> str: + """Returns a fully-qualified project string.""" + return "projects/{project}".format(project=project, ) + + @staticmethod + def parse_common_project_path(path: str) -> Dict[str,str]: + """Parse a project path into its component segments.""" + m = re.match(r"^projects/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_location_path(project: str, location: str, ) -> str: + """Returns a fully-qualified location string.""" + return "projects/{project}/locations/{location}".format(project=project, location=location, ) + + @staticmethod + def parse_common_location_path(path: str) -> Dict[str,str]: + """Parse a location path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @classmethod + def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[client_options_lib.ClientOptions] = None): + """Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + if client_options is None: + client_options = client_options_lib.ClientOptions() + use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") + use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") + if use_client_cert not in ("true", "false"): + raise ValueError("Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`") + if use_mtls_endpoint not in ("auto", "never", "always"): + raise MutualTLSChannelError("Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`") + + # Figure out the client cert source to use. + client_cert_source = None + if use_client_cert == "true": + if client_options.client_cert_source: + client_cert_source = client_options.client_cert_source + elif mtls.has_default_client_cert_source(): + client_cert_source = mtls.default_client_cert_source() + + # Figure out which api endpoint to use. + if client_options.api_endpoint is not None: + api_endpoint = client_options.api_endpoint + elif use_mtls_endpoint == "always" or (use_mtls_endpoint == "auto" and client_cert_source): + api_endpoint = cls.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = cls.DEFAULT_ENDPOINT + + return api_endpoint, client_cert_source + + def __init__(self, *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Optional[Union[str, SslPoliciesTransport]] = None, + client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the ssl policies client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Union[str, SslPoliciesTransport]): The + transport to use. If set to None, a transport is chosen + automatically. + NOTE: "rest" transport functionality is currently in a + beta state (preview). We welcome your feedback via an + issue in this library's source repository. + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): Custom options for the + client. It won't take effect if a ``transport`` instance is provided. + (1) The ``api_endpoint`` property can be used to override the + default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT + environment variable can also be used to override the endpoint: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto switch to the + default mTLS endpoint if client certificate is present, this is + the default value). However, the ``api_endpoint`` property takes + precedence if provided. + (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide client certificate for mutual TLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + """ + if isinstance(client_options, dict): + client_options = client_options_lib.from_dict(client_options) + if client_options is None: + client_options = client_options_lib.ClientOptions() + client_options = cast(client_options_lib.ClientOptions, client_options) + + api_endpoint, client_cert_source_func = self.get_mtls_endpoint_and_cert_source(client_options) + + api_key_value = getattr(client_options, "api_key", None) + if api_key_value and credentials: + raise ValueError("client_options.api_key and credentials are mutually exclusive") + + # Save or instantiate the transport. + # Ordinarily, we provide the transport, but allowing a custom transport + # instance provides an extensibility point for unusual situations. + if isinstance(transport, SslPoliciesTransport): + # transport is a SslPoliciesTransport instance. + if credentials or client_options.credentials_file or api_key_value: + raise ValueError("When providing a transport instance, " + "provide its credentials directly.") + if client_options.scopes: + raise ValueError( + "When providing a transport instance, provide its scopes " + "directly." + ) + self._transport = transport + else: + import google.auth._default # type: ignore + + if api_key_value and hasattr(google.auth._default, "get_api_key_credentials"): + credentials = google.auth._default.get_api_key_credentials(api_key_value) + + Transport = type(self).get_transport_class(transport) + self._transport = Transport( + credentials=credentials, + credentials_file=client_options.credentials_file, + host=api_endpoint, + scopes=client_options.scopes, + client_cert_source_for_mtls=client_cert_source_func, + quota_project_id=client_options.quota_project_id, + client_info=client_info, + always_use_jwt_access=True, + api_audience=client_options.api_audience, + ) + + def aggregated_list(self, + request: Optional[Union[compute.AggregatedListSslPoliciesRequest, dict]] = None, + *, + project: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.AggregatedListPager: + r"""Retrieves the list of all SslPolicy resources, + regional and global, available to the specified project. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_aggregated_list(): + # Create a client + client = compute_v1.SslPoliciesClient() + + # Initialize request argument(s) + request = compute_v1.AggregatedListSslPoliciesRequest( + project="project_value", + ) + + # Make the request + page_result = client.aggregated_list(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.AggregatedListSslPoliciesRequest, dict]): + The request object. A request message for + SslPolicies.AggregatedList. See the + method description for details. + project (str): + Name of the project scoping this + request. + + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.compute_v1.services.ssl_policies.pagers.AggregatedListPager: + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.AggregatedListSslPoliciesRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.AggregatedListSslPoliciesRequest): + request = compute.AggregatedListSslPoliciesRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.aggregated_list] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.AggregatedListPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + def delete_unary(self, + request: Optional[Union[compute.DeleteSslPolicyRequest, dict]] = None, + *, + project: Optional[str] = None, + ssl_policy: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.Operation: + r"""Deletes the specified SSL policy. The SSL policy + resource can be deleted only if it is not in use by any + TargetHttpsProxy or TargetSslProxy resources. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_delete(): + # Create a client + client = compute_v1.SslPoliciesClient() + + # Initialize request argument(s) + request = compute_v1.DeleteSslPolicyRequest( + project="project_value", + ssl_policy="ssl_policy_value", + ) + + # Make the request + response = client.delete(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.DeleteSslPolicyRequest, dict]): + The request object. A request message for + SslPolicies.Delete. See the method + description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + ssl_policy (str): + Name of the SSL policy to delete. The + name must be 1-63 characters long, and + comply with RFC1035. + + This corresponds to the ``ssl_policy`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, ssl_policy]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.DeleteSslPolicyRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.DeleteSslPolicyRequest): + request = compute.DeleteSslPolicyRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if ssl_policy is not None: + request.ssl_policy = ssl_policy + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("ssl_policy", request.ssl_policy), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def delete(self, + request: Optional[Union[compute.DeleteSslPolicyRequest, dict]] = None, + *, + project: Optional[str] = None, + ssl_policy: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> extended_operation.ExtendedOperation: + r"""Deletes the specified SSL policy. The SSL policy + resource can be deleted only if it is not in use by any + TargetHttpsProxy or TargetSslProxy resources. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_delete(): + # Create a client + client = compute_v1.SslPoliciesClient() + + # Initialize request argument(s) + request = compute_v1.DeleteSslPolicyRequest( + project="project_value", + ssl_policy="ssl_policy_value", + ) + + # Make the request + response = client.delete(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.DeleteSslPolicyRequest, dict]): + The request object. A request message for + SslPolicies.Delete. See the method + description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + ssl_policy (str): + Name of the SSL policy to delete. The + name must be 1-63 characters long, and + comply with RFC1035. + + This corresponds to the ``ssl_policy`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, ssl_policy]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.DeleteSslPolicyRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.DeleteSslPolicyRequest): + request = compute.DeleteSslPolicyRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if ssl_policy is not None: + request.ssl_policy = ssl_policy + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("ssl_policy", request.ssl_policy), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + operation_service = self._transport._global_operations_client + operation_request = compute.GetGlobalOperationRequest() + operation_request.project = request.project + operation_request.operation = response.name + + get_operation = functools.partial(operation_service.get, operation_request) + # Cancel is not part of extended operations yet. + cancel_operation = lambda: None + + # Note: this class is an implementation detail to provide a uniform + # set of names for certain fields in the extended operation proto message. + # See google.api_core.extended_operation.ExtendedOperation for details + # on these properties and the expected interface. + class _CustomOperation(extended_operation.ExtendedOperation): + @property + def error_message(self): + return self._extended_operation.http_error_message + + @property + def error_code(self): + return self._extended_operation.http_error_status_code + + response = _CustomOperation.make(get_operation, cancel_operation, response) + + # Done; return the response. + return response + + def get(self, + request: Optional[Union[compute.GetSslPolicyRequest, dict]] = None, + *, + project: Optional[str] = None, + ssl_policy: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.SslPolicy: + r"""Lists all of the ordered rules present in a single + specified policy. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_get(): + # Create a client + client = compute_v1.SslPoliciesClient() + + # Initialize request argument(s) + request = compute_v1.GetSslPolicyRequest( + project="project_value", + ssl_policy="ssl_policy_value", + ) + + # Make the request + response = client.get(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.GetSslPolicyRequest, dict]): + The request object. A request message for + SslPolicies.Get. See the method + description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + ssl_policy (str): + Name of the SSL policy to update. The + name must be 1-63 characters long, and + comply with RFC1035. + + This corresponds to the ``ssl_policy`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.compute_v1.types.SslPolicy: + Represents an SSL Policy resource. + Use SSL policies to control the SSL + features, such as versions and cipher + suites, offered by an HTTPS or SSL Proxy + load balancer. For more information, + read SSL Policy Concepts. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, ssl_policy]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.GetSslPolicyRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.GetSslPolicyRequest): + request = compute.GetSslPolicyRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if ssl_policy is not None: + request.ssl_policy = ssl_policy + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("ssl_policy", request.ssl_policy), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def insert_unary(self, + request: Optional[Union[compute.InsertSslPolicyRequest, dict]] = None, + *, + project: Optional[str] = None, + ssl_policy_resource: Optional[compute.SslPolicy] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.Operation: + r"""Returns the specified SSL policy resource. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_insert(): + # Create a client + client = compute_v1.SslPoliciesClient() + + # Initialize request argument(s) + request = compute_v1.InsertSslPolicyRequest( + project="project_value", + ) + + # Make the request + response = client.insert(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.InsertSslPolicyRequest, dict]): + The request object. A request message for + SslPolicies.Insert. See the method + description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + ssl_policy_resource (google.cloud.compute_v1.types.SslPolicy): + The body resource for this request + This corresponds to the ``ssl_policy_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, ssl_policy_resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.InsertSslPolicyRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.InsertSslPolicyRequest): + request = compute.InsertSslPolicyRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if ssl_policy_resource is not None: + request.ssl_policy_resource = ssl_policy_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.insert] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def insert(self, + request: Optional[Union[compute.InsertSslPolicyRequest, dict]] = None, + *, + project: Optional[str] = None, + ssl_policy_resource: Optional[compute.SslPolicy] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> extended_operation.ExtendedOperation: + r"""Returns the specified SSL policy resource. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_insert(): + # Create a client + client = compute_v1.SslPoliciesClient() + + # Initialize request argument(s) + request = compute_v1.InsertSslPolicyRequest( + project="project_value", + ) + + # Make the request + response = client.insert(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.InsertSslPolicyRequest, dict]): + The request object. A request message for + SslPolicies.Insert. See the method + description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + ssl_policy_resource (google.cloud.compute_v1.types.SslPolicy): + The body resource for this request + This corresponds to the ``ssl_policy_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, ssl_policy_resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.InsertSslPolicyRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.InsertSslPolicyRequest): + request = compute.InsertSslPolicyRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if ssl_policy_resource is not None: + request.ssl_policy_resource = ssl_policy_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.insert] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + operation_service = self._transport._global_operations_client + operation_request = compute.GetGlobalOperationRequest() + operation_request.project = request.project + operation_request.operation = response.name + + get_operation = functools.partial(operation_service.get, operation_request) + # Cancel is not part of extended operations yet. + cancel_operation = lambda: None + + # Note: this class is an implementation detail to provide a uniform + # set of names for certain fields in the extended operation proto message. + # See google.api_core.extended_operation.ExtendedOperation for details + # on these properties and the expected interface. + class _CustomOperation(extended_operation.ExtendedOperation): + @property + def error_message(self): + return self._extended_operation.http_error_message + + @property + def error_code(self): + return self._extended_operation.http_error_status_code + + response = _CustomOperation.make(get_operation, cancel_operation, response) + + # Done; return the response. + return response + + def list(self, + request: Optional[Union[compute.ListSslPoliciesRequest, dict]] = None, + *, + project: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListPager: + r"""Lists all the SSL policies that have been configured + for the specified project. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_list(): + # Create a client + client = compute_v1.SslPoliciesClient() + + # Initialize request argument(s) + request = compute_v1.ListSslPoliciesRequest( + project="project_value", + ) + + # Make the request + page_result = client.list(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.ListSslPoliciesRequest, dict]): + The request object. A request message for + SslPolicies.List. See the method + description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.compute_v1.services.ssl_policies.pagers.ListPager: + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.ListSslPoliciesRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.ListSslPoliciesRequest): + request = compute.ListSslPoliciesRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + def list_available_features(self, + request: Optional[Union[compute.ListAvailableFeaturesSslPoliciesRequest, dict]] = None, + *, + project: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.SslPoliciesListAvailableFeaturesResponse: + r"""Lists all features that can be specified in the SSL + policy when using custom profile. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_list_available_features(): + # Create a client + client = compute_v1.SslPoliciesClient() + + # Initialize request argument(s) + request = compute_v1.ListAvailableFeaturesSslPoliciesRequest( + project="project_value", + ) + + # Make the request + response = client.list_available_features(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.ListAvailableFeaturesSslPoliciesRequest, dict]): + The request object. A request message for + SslPolicies.ListAvailableFeatures. See + the method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.compute_v1.types.SslPoliciesListAvailableFeaturesResponse: + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.ListAvailableFeaturesSslPoliciesRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.ListAvailableFeaturesSslPoliciesRequest): + request = compute.ListAvailableFeaturesSslPoliciesRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_available_features] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def patch_unary(self, + request: Optional[Union[compute.PatchSslPolicyRequest, dict]] = None, + *, + project: Optional[str] = None, + ssl_policy: Optional[str] = None, + ssl_policy_resource: Optional[compute.SslPolicy] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.Operation: + r"""Patches the specified SSL policy with the data + included in the request. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_patch(): + # Create a client + client = compute_v1.SslPoliciesClient() + + # Initialize request argument(s) + request = compute_v1.PatchSslPolicyRequest( + project="project_value", + ssl_policy="ssl_policy_value", + ) + + # Make the request + response = client.patch(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.PatchSslPolicyRequest, dict]): + The request object. A request message for + SslPolicies.Patch. See the method + description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + ssl_policy (str): + Name of the SSL policy to update. The + name must be 1-63 characters long, and + comply with RFC1035. + + This corresponds to the ``ssl_policy`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + ssl_policy_resource (google.cloud.compute_v1.types.SslPolicy): + The body resource for this request + This corresponds to the ``ssl_policy_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, ssl_policy, ssl_policy_resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.PatchSslPolicyRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.PatchSslPolicyRequest): + request = compute.PatchSslPolicyRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if ssl_policy is not None: + request.ssl_policy = ssl_policy + if ssl_policy_resource is not None: + request.ssl_policy_resource = ssl_policy_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.patch] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("ssl_policy", request.ssl_policy), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def patch(self, + request: Optional[Union[compute.PatchSslPolicyRequest, dict]] = None, + *, + project: Optional[str] = None, + ssl_policy: Optional[str] = None, + ssl_policy_resource: Optional[compute.SslPolicy] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> extended_operation.ExtendedOperation: + r"""Patches the specified SSL policy with the data + included in the request. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_patch(): + # Create a client + client = compute_v1.SslPoliciesClient() + + # Initialize request argument(s) + request = compute_v1.PatchSslPolicyRequest( + project="project_value", + ssl_policy="ssl_policy_value", + ) + + # Make the request + response = client.patch(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.PatchSslPolicyRequest, dict]): + The request object. A request message for + SslPolicies.Patch. See the method + description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + ssl_policy (str): + Name of the SSL policy to update. The + name must be 1-63 characters long, and + comply with RFC1035. + + This corresponds to the ``ssl_policy`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + ssl_policy_resource (google.cloud.compute_v1.types.SslPolicy): + The body resource for this request + This corresponds to the ``ssl_policy_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, ssl_policy, ssl_policy_resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.PatchSslPolicyRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.PatchSslPolicyRequest): + request = compute.PatchSslPolicyRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if ssl_policy is not None: + request.ssl_policy = ssl_policy + if ssl_policy_resource is not None: + request.ssl_policy_resource = ssl_policy_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.patch] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("ssl_policy", request.ssl_policy), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + operation_service = self._transport._global_operations_client + operation_request = compute.GetGlobalOperationRequest() + operation_request.project = request.project + operation_request.operation = response.name + + get_operation = functools.partial(operation_service.get, operation_request) + # Cancel is not part of extended operations yet. + cancel_operation = lambda: None + + # Note: this class is an implementation detail to provide a uniform + # set of names for certain fields in the extended operation proto message. + # See google.api_core.extended_operation.ExtendedOperation for details + # on these properties and the expected interface. + class _CustomOperation(extended_operation.ExtendedOperation): + @property + def error_message(self): + return self._extended_operation.http_error_message + + @property + def error_code(self): + return self._extended_operation.http_error_status_code + + response = _CustomOperation.make(get_operation, cancel_operation, response) + + # Done; return the response. + return response + + def __enter__(self) -> "SslPoliciesClient": + return self + + def __exit__(self, type, value, traceback): + """Releases underlying transport's resources. + + .. warning:: + ONLY use as a context manager if the transport is NOT shared + with other clients! Exiting the with block will CLOSE the transport + and may cause errors in other clients! + """ + self.transport.close() + + + + + + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) + + +__all__ = ( + "SslPoliciesClient", +) diff --git a/owl-bot-staging/v1/google/cloud/compute_v1/services/ssl_policies/pagers.py b/owl-bot-staging/v1/google/cloud/compute_v1/services/ssl_policies/pagers.py new file mode 100644 index 000000000..dc4ad37d2 --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/compute_v1/services/ssl_policies/pagers.py @@ -0,0 +1,139 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import Any, AsyncIterator, Awaitable, Callable, Sequence, Tuple, Optional, Iterator + +from google.cloud.compute_v1.types import compute + + +class AggregatedListPager: + """A pager for iterating through ``aggregated_list`` requests. + + This class thinly wraps an initial + :class:`google.cloud.compute_v1.types.SslPoliciesAggregatedList` object, and + provides an ``__iter__`` method to iterate through its + ``items`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``AggregatedList`` requests and continue to iterate + through the ``items`` field on the + corresponding responses. + + All the usual :class:`google.cloud.compute_v1.types.SslPoliciesAggregatedList` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., compute.SslPoliciesAggregatedList], + request: compute.AggregatedListSslPoliciesRequest, + response: compute.SslPoliciesAggregatedList, + *, + metadata: Sequence[Tuple[str, str]] = ()): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.compute_v1.types.AggregatedListSslPoliciesRequest): + The initial request object. + response (google.cloud.compute_v1.types.SslPoliciesAggregatedList): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = compute.AggregatedListSslPoliciesRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[compute.SslPoliciesAggregatedList]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[Tuple[str, compute.SslPoliciesScopedList]]: + for page in self.pages: + yield from page.items.items() + + def get(self, key: str) -> Optional[compute.SslPoliciesScopedList]: + return self._response.items.get(key) + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + + +class ListPager: + """A pager for iterating through ``list`` requests. + + This class thinly wraps an initial + :class:`google.cloud.compute_v1.types.SslPoliciesList` object, and + provides an ``__iter__`` method to iterate through its + ``items`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``List`` requests and continue to iterate + through the ``items`` field on the + corresponding responses. + + All the usual :class:`google.cloud.compute_v1.types.SslPoliciesList` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., compute.SslPoliciesList], + request: compute.ListSslPoliciesRequest, + response: compute.SslPoliciesList, + *, + metadata: Sequence[Tuple[str, str]] = ()): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.compute_v1.types.ListSslPoliciesRequest): + The initial request object. + response (google.cloud.compute_v1.types.SslPoliciesList): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = compute.ListSslPoliciesRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[compute.SslPoliciesList]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[compute.SslPolicy]: + for page in self.pages: + yield from page.items + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) diff --git a/owl-bot-staging/v1/google/cloud/compute_v1/services/ssl_policies/transports/__init__.py b/owl-bot-staging/v1/google/cloud/compute_v1/services/ssl_policies/transports/__init__.py new file mode 100644 index 000000000..54d7121bc --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/compute_v1/services/ssl_policies/transports/__init__.py @@ -0,0 +1,32 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +from typing import Dict, Type + +from .base import SslPoliciesTransport +from .rest import SslPoliciesRestTransport +from .rest import SslPoliciesRestInterceptor + + +# Compile a registry of transports. +_transport_registry = OrderedDict() # type: Dict[str, Type[SslPoliciesTransport]] +_transport_registry['rest'] = SslPoliciesRestTransport + +__all__ = ( + 'SslPoliciesTransport', + 'SslPoliciesRestTransport', + 'SslPoliciesRestInterceptor', +) diff --git a/owl-bot-staging/v1/google/cloud/compute_v1/services/ssl_policies/transports/base.py b/owl-bot-staging/v1/google/cloud/compute_v1/services/ssl_policies/transports/base.py new file mode 100644 index 000000000..de584a752 --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/compute_v1/services/ssl_policies/transports/base.py @@ -0,0 +1,247 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import abc +from typing import Awaitable, Callable, Dict, Optional, Sequence, Union + +from google.cloud.compute_v1 import gapic_version as package_version + +import google.auth # type: ignore +import google.api_core +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.compute_v1.types import compute +from google.cloud.compute_v1.services import global_operations + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) + + +class SslPoliciesTransport(abc.ABC): + """Abstract transport class for SslPolicies.""" + + AUTH_SCOPES = ( + 'https://www.googleapis.com/auth/compute', + 'https://www.googleapis.com/auth/cloud-platform', + ) + + DEFAULT_HOST: str = 'compute.googleapis.com' + def __init__( + self, *, + host: str = DEFAULT_HOST, + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + **kwargs, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A list of scopes. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + """ + self._extended_operations_services: Dict[str, Any] = {} + + scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} + + # Save the scopes. + self._scopes = scopes + + # If no credentials are provided, then determine the appropriate + # defaults. + if credentials and credentials_file: + raise core_exceptions.DuplicateCredentialArgs("'credentials_file' and 'credentials' are mutually exclusive") + + if credentials_file is not None: + credentials, _ = google.auth.load_credentials_from_file( + credentials_file, + **scopes_kwargs, + quota_project_id=quota_project_id + ) + elif credentials is None: + credentials, _ = google.auth.default(**scopes_kwargs, quota_project_id=quota_project_id) + # Don't apply audience if the credentials file passed from user. + if hasattr(credentials, "with_gdch_audience"): + credentials = credentials.with_gdch_audience(api_audience if api_audience else host) + + # If the credentials are service account credentials, then always try to use self signed JWT. + if always_use_jwt_access and isinstance(credentials, service_account.Credentials) and hasattr(service_account.Credentials, "with_always_use_jwt_access"): + credentials = credentials.with_always_use_jwt_access(True) + + # Save the credentials. + self._credentials = credentials + + # Save the hostname. Default to port 443 (HTTPS) if none is specified. + if ':' not in host: + host += ':443' + self._host = host + + def _prep_wrapped_messages(self, client_info): + # Precompute the wrapped methods. + self._wrapped_methods = { + self.aggregated_list: gapic_v1.method.wrap_method( + self.aggregated_list, + default_timeout=None, + client_info=client_info, + ), + self.delete: gapic_v1.method.wrap_method( + self.delete, + default_timeout=None, + client_info=client_info, + ), + self.get: gapic_v1.method.wrap_method( + self.get, + default_timeout=None, + client_info=client_info, + ), + self.insert: gapic_v1.method.wrap_method( + self.insert, + default_timeout=None, + client_info=client_info, + ), + self.list: gapic_v1.method.wrap_method( + self.list, + default_timeout=None, + client_info=client_info, + ), + self.list_available_features: gapic_v1.method.wrap_method( + self.list_available_features, + default_timeout=None, + client_info=client_info, + ), + self.patch: gapic_v1.method.wrap_method( + self.patch, + default_timeout=None, + client_info=client_info, + ), + } + + def close(self): + """Closes resources associated with the transport. + + .. warning:: + Only call this method if the transport is NOT shared + with other clients - this may cause errors in other clients! + """ + raise NotImplementedError() + + @property + def aggregated_list(self) -> Callable[ + [compute.AggregatedListSslPoliciesRequest], + Union[ + compute.SslPoliciesAggregatedList, + Awaitable[compute.SslPoliciesAggregatedList] + ]]: + raise NotImplementedError() + + @property + def delete(self) -> Callable[ + [compute.DeleteSslPolicyRequest], + Union[ + compute.Operation, + Awaitable[compute.Operation] + ]]: + raise NotImplementedError() + + @property + def get(self) -> Callable[ + [compute.GetSslPolicyRequest], + Union[ + compute.SslPolicy, + Awaitable[compute.SslPolicy] + ]]: + raise NotImplementedError() + + @property + def insert(self) -> Callable[ + [compute.InsertSslPolicyRequest], + Union[ + compute.Operation, + Awaitable[compute.Operation] + ]]: + raise NotImplementedError() + + @property + def list(self) -> Callable[ + [compute.ListSslPoliciesRequest], + Union[ + compute.SslPoliciesList, + Awaitable[compute.SslPoliciesList] + ]]: + raise NotImplementedError() + + @property + def list_available_features(self) -> Callable[ + [compute.ListAvailableFeaturesSslPoliciesRequest], + Union[ + compute.SslPoliciesListAvailableFeaturesResponse, + Awaitable[compute.SslPoliciesListAvailableFeaturesResponse] + ]]: + raise NotImplementedError() + + @property + def patch(self) -> Callable[ + [compute.PatchSslPolicyRequest], + Union[ + compute.Operation, + Awaitable[compute.Operation] + ]]: + raise NotImplementedError() + + @property + def kind(self) -> str: + raise NotImplementedError() + + @property + def _global_operations_client(self) -> global_operations.GlobalOperationsClient: + ex_op_service = self._extended_operations_services.get("global_operations") + if not ex_op_service: + ex_op_service = global_operations.GlobalOperationsClient( + credentials=self._credentials, + transport=self.kind, + ) + self._extended_operations_services["global_operations"] = ex_op_service + + return ex_op_service + + +__all__ = ( + 'SslPoliciesTransport', +) diff --git a/owl-bot-staging/v1/google/cloud/compute_v1/services/ssl_policies/transports/rest.py b/owl-bot-staging/v1/google/cloud/compute_v1/services/ssl_policies/transports/rest.py new file mode 100644 index 000000000..43ec98efc --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/compute_v1/services/ssl_policies/transports/rest.py @@ -0,0 +1,1024 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +from google.auth.transport.requests import AuthorizedSession # type: ignore +import json # type: ignore +import grpc # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.api_core import exceptions as core_exceptions +from google.api_core import retry as retries +from google.api_core import rest_helpers +from google.api_core import rest_streaming +from google.api_core import path_template +from google.api_core import gapic_v1 + +from google.protobuf import json_format +from requests import __version__ as requests_version +import dataclasses +import re +from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union +import warnings + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + + +from google.cloud.compute_v1.types import compute + +from .base import SslPoliciesTransport, DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, + grpc_version=None, + rest_version=requests_version, +) + + +class SslPoliciesRestInterceptor: + """Interceptor for SslPolicies. + + Interceptors are used to manipulate requests, request metadata, and responses + in arbitrary ways. + Example use cases include: + * Logging + * Verifying requests according to service or custom semantics + * Stripping extraneous information from responses + + These use cases and more can be enabled by injecting an + instance of a custom subclass when constructing the SslPoliciesRestTransport. + + .. code-block:: python + class MyCustomSslPoliciesInterceptor(SslPoliciesRestInterceptor): + def pre_aggregated_list(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_aggregated_list(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_delete(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_delete(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_get(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_insert(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_insert(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list_available_features(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_available_features(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_patch(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_patch(self, response): + logging.log(f"Received response: {response}") + return response + + transport = SslPoliciesRestTransport(interceptor=MyCustomSslPoliciesInterceptor()) + client = SslPoliciesClient(transport=transport) + + + """ + def pre_aggregated_list(self, request: compute.AggregatedListSslPoliciesRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.AggregatedListSslPoliciesRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for aggregated_list + + Override in a subclass to manipulate the request or metadata + before they are sent to the SslPolicies server. + """ + return request, metadata + + def post_aggregated_list(self, response: compute.SslPoliciesAggregatedList) -> compute.SslPoliciesAggregatedList: + """Post-rpc interceptor for aggregated_list + + Override in a subclass to manipulate the response + after it is returned by the SslPolicies server but before + it is returned to user code. + """ + return response + def pre_delete(self, request: compute.DeleteSslPolicyRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.DeleteSslPolicyRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for delete + + Override in a subclass to manipulate the request or metadata + before they are sent to the SslPolicies server. + """ + return request, metadata + + def post_delete(self, response: compute.Operation) -> compute.Operation: + """Post-rpc interceptor for delete + + Override in a subclass to manipulate the response + after it is returned by the SslPolicies server but before + it is returned to user code. + """ + return response + def pre_get(self, request: compute.GetSslPolicyRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.GetSslPolicyRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get + + Override in a subclass to manipulate the request or metadata + before they are sent to the SslPolicies server. + """ + return request, metadata + + def post_get(self, response: compute.SslPolicy) -> compute.SslPolicy: + """Post-rpc interceptor for get + + Override in a subclass to manipulate the response + after it is returned by the SslPolicies server but before + it is returned to user code. + """ + return response + def pre_insert(self, request: compute.InsertSslPolicyRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.InsertSslPolicyRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for insert + + Override in a subclass to manipulate the request or metadata + before they are sent to the SslPolicies server. + """ + return request, metadata + + def post_insert(self, response: compute.Operation) -> compute.Operation: + """Post-rpc interceptor for insert + + Override in a subclass to manipulate the response + after it is returned by the SslPolicies server but before + it is returned to user code. + """ + return response + def pre_list(self, request: compute.ListSslPoliciesRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.ListSslPoliciesRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list + + Override in a subclass to manipulate the request or metadata + before they are sent to the SslPolicies server. + """ + return request, metadata + + def post_list(self, response: compute.SslPoliciesList) -> compute.SslPoliciesList: + """Post-rpc interceptor for list + + Override in a subclass to manipulate the response + after it is returned by the SslPolicies server but before + it is returned to user code. + """ + return response + def pre_list_available_features(self, request: compute.ListAvailableFeaturesSslPoliciesRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.ListAvailableFeaturesSslPoliciesRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list_available_features + + Override in a subclass to manipulate the request or metadata + before they are sent to the SslPolicies server. + """ + return request, metadata + + def post_list_available_features(self, response: compute.SslPoliciesListAvailableFeaturesResponse) -> compute.SslPoliciesListAvailableFeaturesResponse: + """Post-rpc interceptor for list_available_features + + Override in a subclass to manipulate the response + after it is returned by the SslPolicies server but before + it is returned to user code. + """ + return response + def pre_patch(self, request: compute.PatchSslPolicyRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.PatchSslPolicyRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for patch + + Override in a subclass to manipulate the request or metadata + before they are sent to the SslPolicies server. + """ + return request, metadata + + def post_patch(self, response: compute.Operation) -> compute.Operation: + """Post-rpc interceptor for patch + + Override in a subclass to manipulate the response + after it is returned by the SslPolicies server but before + it is returned to user code. + """ + return response + + +@dataclasses.dataclass +class SslPoliciesRestStub: + _session: AuthorizedSession + _host: str + _interceptor: SslPoliciesRestInterceptor + + +class SslPoliciesRestTransport(SslPoliciesTransport): + """REST backend transport for SslPolicies. + + The SslPolicies API. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends JSON representations of protocol buffers over HTTP/1.1 + + NOTE: This REST transport functionality is currently in a beta + state (preview). We welcome your feedback via an issue in this + library's source repository. Thank you! + """ + + def __init__(self, *, + host: str = 'compute.googleapis.com', + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + client_cert_source_for_mtls: Optional[Callable[[ + ], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + url_scheme: str = 'https', + interceptor: Optional[SslPoliciesRestInterceptor] = None, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + NOTE: This REST transport functionality is currently in a beta + state (preview). We welcome your feedback via a GitHub issue in + this library's repository. Thank you! + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + client_cert_source_for_mtls (Callable[[], Tuple[bytes, bytes]]): Client + certificate to configure mutual TLS HTTP channel. It is ignored + if ``channel`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you are developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + url_scheme: the protocol scheme for the API endpoint. Normally + "https", but for testing or local servers, + "http" can be specified. + """ + # Run the base constructor + # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. + # TODO: When custom host (api_endpoint) is set, `scopes` must *also* be set on the + # credentials object + maybe_url_match = re.match("^(?Phttp(?:s)?://)?(?P.*)$", host) + if maybe_url_match is None: + raise ValueError(f"Unexpected hostname structure: {host}") # pragma: NO COVER + + url_match_items = maybe_url_match.groupdict() + + host = f"{url_scheme}://{host}" if not url_match_items["scheme"] else host + + super().__init__( + host=host, + credentials=credentials, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience + ) + self._session = AuthorizedSession( + self._credentials, default_host=self.DEFAULT_HOST) + if client_cert_source_for_mtls: + self._session.configure_mtls_channel(client_cert_source_for_mtls) + self._interceptor = interceptor or SslPoliciesRestInterceptor() + self._prep_wrapped_messages(client_info) + + class _AggregatedList(SslPoliciesRestStub): + def __hash__(self): + return hash("AggregatedList") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.AggregatedListSslPoliciesRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.SslPoliciesAggregatedList: + r"""Call the aggregated list method over HTTP. + + Args: + request (~.compute.AggregatedListSslPoliciesRequest): + The request object. A request message for + SslPolicies.AggregatedList. See the + method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.SslPoliciesAggregatedList: + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/compute/v1/projects/{project}/aggregated/sslPolicies', + }, + ] + request, metadata = self._interceptor.pre_aggregated_list(request, metadata) + pb_request = compute.AggregatedListSslPoliciesRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.SslPoliciesAggregatedList() + pb_resp = compute.SslPoliciesAggregatedList.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_aggregated_list(resp) + return resp + + class _Delete(SslPoliciesRestStub): + def __hash__(self): + return hash("Delete") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.DeleteSslPolicyRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.Operation: + r"""Call the delete method over HTTP. + + Args: + request (~.compute.DeleteSslPolicyRequest): + The request object. A request message for + SslPolicies.Delete. See the method + description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.Operation: + Represents an Operation resource. Google Compute Engine + has three Operation resources: \* + `Global `__ + \* + `Regional `__ + \* + `Zonal `__ + You can use an operation resource to manage asynchronous + API requests. For more information, read Handling API + responses. Operations can be global, regional or zonal. + - For global operations, use the ``globalOperations`` + resource. - For regional operations, use the + ``regionOperations`` resource. - For zonal operations, + use the ``zonalOperations`` resource. For more + information, read Global, Regional, and Zonal Resources. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'delete', + 'uri': '/compute/v1/projects/{project}/global/sslPolicies/{ssl_policy}', + }, + ] + request, metadata = self._interceptor.pre_delete(request, metadata) + pb_request = compute.DeleteSslPolicyRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.Operation() + pb_resp = compute.Operation.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_delete(resp) + return resp + + class _Get(SslPoliciesRestStub): + def __hash__(self): + return hash("Get") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.GetSslPolicyRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.SslPolicy: + r"""Call the get method over HTTP. + + Args: + request (~.compute.GetSslPolicyRequest): + The request object. A request message for + SslPolicies.Get. See the method + description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.SslPolicy: + Represents an SSL Policy resource. + Use SSL policies to control the SSL + features, such as versions and cipher + suites, offered by an HTTPS or SSL Proxy + load balancer. For more information, + read SSL Policy Concepts. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/compute/v1/projects/{project}/global/sslPolicies/{ssl_policy}', + }, + ] + request, metadata = self._interceptor.pre_get(request, metadata) + pb_request = compute.GetSslPolicyRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.SslPolicy() + pb_resp = compute.SslPolicy.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get(resp) + return resp + + class _Insert(SslPoliciesRestStub): + def __hash__(self): + return hash("Insert") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.InsertSslPolicyRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.Operation: + r"""Call the insert method over HTTP. + + Args: + request (~.compute.InsertSslPolicyRequest): + The request object. A request message for + SslPolicies.Insert. See the method + description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.Operation: + Represents an Operation resource. Google Compute Engine + has three Operation resources: \* + `Global `__ + \* + `Regional `__ + \* + `Zonal `__ + You can use an operation resource to manage asynchronous + API requests. For more information, read Handling API + responses. Operations can be global, regional or zonal. + - For global operations, use the ``globalOperations`` + resource. - For regional operations, use the + ``regionOperations`` resource. - For zonal operations, + use the ``zonalOperations`` resource. For more + information, read Global, Regional, and Zonal Resources. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/compute/v1/projects/{project}/global/sslPolicies', + 'body': 'ssl_policy_resource', + }, + ] + request, metadata = self._interceptor.pre_insert(request, metadata) + pb_request = compute.InsertSslPolicyRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + including_default_value_fields=False, + use_integers_for_enums=False + ) + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.Operation() + pb_resp = compute.Operation.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_insert(resp) + return resp + + class _List(SslPoliciesRestStub): + def __hash__(self): + return hash("List") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.ListSslPoliciesRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.SslPoliciesList: + r"""Call the list method over HTTP. + + Args: + request (~.compute.ListSslPoliciesRequest): + The request object. A request message for + SslPolicies.List. See the method + description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.SslPoliciesList: + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/compute/v1/projects/{project}/global/sslPolicies', + }, + ] + request, metadata = self._interceptor.pre_list(request, metadata) + pb_request = compute.ListSslPoliciesRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.SslPoliciesList() + pb_resp = compute.SslPoliciesList.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_list(resp) + return resp + + class _ListAvailableFeatures(SslPoliciesRestStub): + def __hash__(self): + return hash("ListAvailableFeatures") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.ListAvailableFeaturesSslPoliciesRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.SslPoliciesListAvailableFeaturesResponse: + r"""Call the list available features method over HTTP. + + Args: + request (~.compute.ListAvailableFeaturesSslPoliciesRequest): + The request object. A request message for + SslPolicies.ListAvailableFeatures. See + the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.SslPoliciesListAvailableFeaturesResponse: + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/compute/v1/projects/{project}/global/sslPolicies/listAvailableFeatures', + }, + ] + request, metadata = self._interceptor.pre_list_available_features(request, metadata) + pb_request = compute.ListAvailableFeaturesSslPoliciesRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.SslPoliciesListAvailableFeaturesResponse() + pb_resp = compute.SslPoliciesListAvailableFeaturesResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_list_available_features(resp) + return resp + + class _Patch(SslPoliciesRestStub): + def __hash__(self): + return hash("Patch") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.PatchSslPolicyRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.Operation: + r"""Call the patch method over HTTP. + + Args: + request (~.compute.PatchSslPolicyRequest): + The request object. A request message for + SslPolicies.Patch. See the method + description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.Operation: + Represents an Operation resource. Google Compute Engine + has three Operation resources: \* + `Global `__ + \* + `Regional `__ + \* + `Zonal `__ + You can use an operation resource to manage asynchronous + API requests. For more information, read Handling API + responses. Operations can be global, regional or zonal. + - For global operations, use the ``globalOperations`` + resource. - For regional operations, use the + ``regionOperations`` resource. - For zonal operations, + use the ``zonalOperations`` resource. For more + information, read Global, Regional, and Zonal Resources. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'patch', + 'uri': '/compute/v1/projects/{project}/global/sslPolicies/{ssl_policy}', + 'body': 'ssl_policy_resource', + }, + ] + request, metadata = self._interceptor.pre_patch(request, metadata) + pb_request = compute.PatchSslPolicyRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + including_default_value_fields=False, + use_integers_for_enums=False + ) + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.Operation() + pb_resp = compute.Operation.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_patch(resp) + return resp + + @property + def aggregated_list(self) -> Callable[ + [compute.AggregatedListSslPoliciesRequest], + compute.SslPoliciesAggregatedList]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._AggregatedList(self._session, self._host, self._interceptor) # type: ignore + + @property + def delete(self) -> Callable[ + [compute.DeleteSslPolicyRequest], + compute.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._Delete(self._session, self._host, self._interceptor) # type: ignore + + @property + def get(self) -> Callable[ + [compute.GetSslPolicyRequest], + compute.SslPolicy]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._Get(self._session, self._host, self._interceptor) # type: ignore + + @property + def insert(self) -> Callable[ + [compute.InsertSslPolicyRequest], + compute.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._Insert(self._session, self._host, self._interceptor) # type: ignore + + @property + def list(self) -> Callable[ + [compute.ListSslPoliciesRequest], + compute.SslPoliciesList]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._List(self._session, self._host, self._interceptor) # type: ignore + + @property + def list_available_features(self) -> Callable[ + [compute.ListAvailableFeaturesSslPoliciesRequest], + compute.SslPoliciesListAvailableFeaturesResponse]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListAvailableFeatures(self._session, self._host, self._interceptor) # type: ignore + + @property + def patch(self) -> Callable[ + [compute.PatchSslPolicyRequest], + compute.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._Patch(self._session, self._host, self._interceptor) # type: ignore + + @property + def kind(self) -> str: + return "rest" + + def close(self): + self._session.close() + + +__all__=( + 'SslPoliciesRestTransport', +) diff --git a/owl-bot-staging/v1/google/cloud/compute_v1/services/subnetworks/__init__.py b/owl-bot-staging/v1/google/cloud/compute_v1/services/subnetworks/__init__.py new file mode 100644 index 000000000..9dbd4c1de --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/compute_v1/services/subnetworks/__init__.py @@ -0,0 +1,20 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from .client import SubnetworksClient + +__all__ = ( + 'SubnetworksClient', +) diff --git a/owl-bot-staging/v1/google/cloud/compute_v1/services/subnetworks/client.py b/owl-bot-staging/v1/google/cloud/compute_v1/services/subnetworks/client.py new file mode 100644 index 000000000..b0c74c8d4 --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/compute_v1/services/subnetworks/client.py @@ -0,0 +1,2753 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import functools +import os +import re +from typing import Dict, Mapping, MutableMapping, MutableSequence, Optional, Sequence, Tuple, Type, Union, cast + +from google.cloud.compute_v1 import gapic_version as package_version + +from google.api_core import client_options as client_options_lib +from google.api_core import exceptions as core_exceptions +from google.api_core import extended_operation +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport import mtls # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth.exceptions import MutualTLSChannelError # type: ignore +from google.oauth2 import service_account # type: ignore + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + +from google.api_core import extended_operation # type: ignore +from google.cloud.compute_v1.services.subnetworks import pagers +from google.cloud.compute_v1.types import compute +from .transports.base import SubnetworksTransport, DEFAULT_CLIENT_INFO +from .transports.rest import SubnetworksRestTransport + + +class SubnetworksClientMeta(type): + """Metaclass for the Subnetworks client. + + This provides class-level methods for building and retrieving + support objects (e.g. transport) without polluting the client instance + objects. + """ + _transport_registry = OrderedDict() # type: Dict[str, Type[SubnetworksTransport]] + _transport_registry["rest"] = SubnetworksRestTransport + + def get_transport_class(cls, + label: Optional[str] = None, + ) -> Type[SubnetworksTransport]: + """Returns an appropriate transport class. + + Args: + label: The name of the desired transport. If none is + provided, then the first transport in the registry is used. + + Returns: + The transport class to use. + """ + # If a specific transport is requested, return that one. + if label: + return cls._transport_registry[label] + + # No transport is requested; return the default (that is, the first one + # in the dictionary). + return next(iter(cls._transport_registry.values())) + + +class SubnetworksClient(metaclass=SubnetworksClientMeta): + """The Subnetworks API.""" + + @staticmethod + def _get_default_mtls_endpoint(api_endpoint): + """Converts api endpoint to mTLS endpoint. + + Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to + "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. + Args: + api_endpoint (Optional[str]): the api endpoint to convert. + Returns: + str: converted mTLS api endpoint. + """ + if not api_endpoint: + return api_endpoint + + mtls_endpoint_re = re.compile( + r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?" + ) + + m = mtls_endpoint_re.match(api_endpoint) + name, mtls, sandbox, googledomain = m.groups() + if mtls or not googledomain: + return api_endpoint + + if sandbox: + return api_endpoint.replace( + "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" + ) + + return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") + + DEFAULT_ENDPOINT = "compute.googleapis.com" + DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore + DEFAULT_ENDPOINT + ) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + SubnetworksClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_info(info) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + SubnetworksClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_file( + filename) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + from_service_account_json = from_service_account_file + + @property + def transport(self) -> SubnetworksTransport: + """Returns the transport used by the client instance. + + Returns: + SubnetworksTransport: The transport used by the client + instance. + """ + return self._transport + + @staticmethod + def common_billing_account_path(billing_account: str, ) -> str: + """Returns a fully-qualified billing_account string.""" + return "billingAccounts/{billing_account}".format(billing_account=billing_account, ) + + @staticmethod + def parse_common_billing_account_path(path: str) -> Dict[str,str]: + """Parse a billing_account path into its component segments.""" + m = re.match(r"^billingAccounts/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_folder_path(folder: str, ) -> str: + """Returns a fully-qualified folder string.""" + return "folders/{folder}".format(folder=folder, ) + + @staticmethod + def parse_common_folder_path(path: str) -> Dict[str,str]: + """Parse a folder path into its component segments.""" + m = re.match(r"^folders/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_organization_path(organization: str, ) -> str: + """Returns a fully-qualified organization string.""" + return "organizations/{organization}".format(organization=organization, ) + + @staticmethod + def parse_common_organization_path(path: str) -> Dict[str,str]: + """Parse a organization path into its component segments.""" + m = re.match(r"^organizations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_project_path(project: str, ) -> str: + """Returns a fully-qualified project string.""" + return "projects/{project}".format(project=project, ) + + @staticmethod + def parse_common_project_path(path: str) -> Dict[str,str]: + """Parse a project path into its component segments.""" + m = re.match(r"^projects/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_location_path(project: str, location: str, ) -> str: + """Returns a fully-qualified location string.""" + return "projects/{project}/locations/{location}".format(project=project, location=location, ) + + @staticmethod + def parse_common_location_path(path: str) -> Dict[str,str]: + """Parse a location path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @classmethod + def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[client_options_lib.ClientOptions] = None): + """Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + if client_options is None: + client_options = client_options_lib.ClientOptions() + use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") + use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") + if use_client_cert not in ("true", "false"): + raise ValueError("Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`") + if use_mtls_endpoint not in ("auto", "never", "always"): + raise MutualTLSChannelError("Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`") + + # Figure out the client cert source to use. + client_cert_source = None + if use_client_cert == "true": + if client_options.client_cert_source: + client_cert_source = client_options.client_cert_source + elif mtls.has_default_client_cert_source(): + client_cert_source = mtls.default_client_cert_source() + + # Figure out which api endpoint to use. + if client_options.api_endpoint is not None: + api_endpoint = client_options.api_endpoint + elif use_mtls_endpoint == "always" or (use_mtls_endpoint == "auto" and client_cert_source): + api_endpoint = cls.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = cls.DEFAULT_ENDPOINT + + return api_endpoint, client_cert_source + + def __init__(self, *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Optional[Union[str, SubnetworksTransport]] = None, + client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the subnetworks client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Union[str, SubnetworksTransport]): The + transport to use. If set to None, a transport is chosen + automatically. + NOTE: "rest" transport functionality is currently in a + beta state (preview). We welcome your feedback via an + issue in this library's source repository. + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): Custom options for the + client. It won't take effect if a ``transport`` instance is provided. + (1) The ``api_endpoint`` property can be used to override the + default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT + environment variable can also be used to override the endpoint: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto switch to the + default mTLS endpoint if client certificate is present, this is + the default value). However, the ``api_endpoint`` property takes + precedence if provided. + (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide client certificate for mutual TLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + """ + if isinstance(client_options, dict): + client_options = client_options_lib.from_dict(client_options) + if client_options is None: + client_options = client_options_lib.ClientOptions() + client_options = cast(client_options_lib.ClientOptions, client_options) + + api_endpoint, client_cert_source_func = self.get_mtls_endpoint_and_cert_source(client_options) + + api_key_value = getattr(client_options, "api_key", None) + if api_key_value and credentials: + raise ValueError("client_options.api_key and credentials are mutually exclusive") + + # Save or instantiate the transport. + # Ordinarily, we provide the transport, but allowing a custom transport + # instance provides an extensibility point for unusual situations. + if isinstance(transport, SubnetworksTransport): + # transport is a SubnetworksTransport instance. + if credentials or client_options.credentials_file or api_key_value: + raise ValueError("When providing a transport instance, " + "provide its credentials directly.") + if client_options.scopes: + raise ValueError( + "When providing a transport instance, provide its scopes " + "directly." + ) + self._transport = transport + else: + import google.auth._default # type: ignore + + if api_key_value and hasattr(google.auth._default, "get_api_key_credentials"): + credentials = google.auth._default.get_api_key_credentials(api_key_value) + + Transport = type(self).get_transport_class(transport) + self._transport = Transport( + credentials=credentials, + credentials_file=client_options.credentials_file, + host=api_endpoint, + scopes=client_options.scopes, + client_cert_source_for_mtls=client_cert_source_func, + quota_project_id=client_options.quota_project_id, + client_info=client_info, + always_use_jwt_access=True, + api_audience=client_options.api_audience, + ) + + def aggregated_list(self, + request: Optional[Union[compute.AggregatedListSubnetworksRequest, dict]] = None, + *, + project: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.AggregatedListPager: + r"""Retrieves an aggregated list of subnetworks. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_aggregated_list(): + # Create a client + client = compute_v1.SubnetworksClient() + + # Initialize request argument(s) + request = compute_v1.AggregatedListSubnetworksRequest( + project="project_value", + ) + + # Make the request + page_result = client.aggregated_list(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.AggregatedListSubnetworksRequest, dict]): + The request object. A request message for + Subnetworks.AggregatedList. See the + method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.compute_v1.services.subnetworks.pagers.AggregatedListPager: + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.AggregatedListSubnetworksRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.AggregatedListSubnetworksRequest): + request = compute.AggregatedListSubnetworksRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.aggregated_list] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.AggregatedListPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + def delete_unary(self, + request: Optional[Union[compute.DeleteSubnetworkRequest, dict]] = None, + *, + project: Optional[str] = None, + region: Optional[str] = None, + subnetwork: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.Operation: + r"""Deletes the specified subnetwork. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_delete(): + # Create a client + client = compute_v1.SubnetworksClient() + + # Initialize request argument(s) + request = compute_v1.DeleteSubnetworkRequest( + project="project_value", + region="region_value", + subnetwork="subnetwork_value", + ) + + # Make the request + response = client.delete(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.DeleteSubnetworkRequest, dict]): + The request object. A request message for + Subnetworks.Delete. See the method + description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + region (str): + Name of the region scoping this + request. + + This corresponds to the ``region`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + subnetwork (str): + Name of the Subnetwork resource to + delete. + + This corresponds to the ``subnetwork`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, region, subnetwork]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.DeleteSubnetworkRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.DeleteSubnetworkRequest): + request = compute.DeleteSubnetworkRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if region is not None: + request.region = region + if subnetwork is not None: + request.subnetwork = subnetwork + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("region", request.region), + ("subnetwork", request.subnetwork), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def delete(self, + request: Optional[Union[compute.DeleteSubnetworkRequest, dict]] = None, + *, + project: Optional[str] = None, + region: Optional[str] = None, + subnetwork: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> extended_operation.ExtendedOperation: + r"""Deletes the specified subnetwork. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_delete(): + # Create a client + client = compute_v1.SubnetworksClient() + + # Initialize request argument(s) + request = compute_v1.DeleteSubnetworkRequest( + project="project_value", + region="region_value", + subnetwork="subnetwork_value", + ) + + # Make the request + response = client.delete(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.DeleteSubnetworkRequest, dict]): + The request object. A request message for + Subnetworks.Delete. See the method + description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + region (str): + Name of the region scoping this + request. + + This corresponds to the ``region`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + subnetwork (str): + Name of the Subnetwork resource to + delete. + + This corresponds to the ``subnetwork`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, region, subnetwork]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.DeleteSubnetworkRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.DeleteSubnetworkRequest): + request = compute.DeleteSubnetworkRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if region is not None: + request.region = region + if subnetwork is not None: + request.subnetwork = subnetwork + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("region", request.region), + ("subnetwork", request.subnetwork), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + operation_service = self._transport._region_operations_client + operation_request = compute.GetRegionOperationRequest() + operation_request.project = request.project + operation_request.region = request.region + operation_request.operation = response.name + + get_operation = functools.partial(operation_service.get, operation_request) + # Cancel is not part of extended operations yet. + cancel_operation = lambda: None + + # Note: this class is an implementation detail to provide a uniform + # set of names for certain fields in the extended operation proto message. + # See google.api_core.extended_operation.ExtendedOperation for details + # on these properties and the expected interface. + class _CustomOperation(extended_operation.ExtendedOperation): + @property + def error_message(self): + return self._extended_operation.http_error_message + + @property + def error_code(self): + return self._extended_operation.http_error_status_code + + response = _CustomOperation.make(get_operation, cancel_operation, response) + + # Done; return the response. + return response + + def expand_ip_cidr_range_unary(self, + request: Optional[Union[compute.ExpandIpCidrRangeSubnetworkRequest, dict]] = None, + *, + project: Optional[str] = None, + region: Optional[str] = None, + subnetwork: Optional[str] = None, + subnetworks_expand_ip_cidr_range_request_resource: Optional[compute.SubnetworksExpandIpCidrRangeRequest] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.Operation: + r"""Expands the IP CIDR range of the subnetwork to a + specified value. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_expand_ip_cidr_range(): + # Create a client + client = compute_v1.SubnetworksClient() + + # Initialize request argument(s) + request = compute_v1.ExpandIpCidrRangeSubnetworkRequest( + project="project_value", + region="region_value", + subnetwork="subnetwork_value", + ) + + # Make the request + response = client.expand_ip_cidr_range(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.ExpandIpCidrRangeSubnetworkRequest, dict]): + The request object. A request message for + Subnetworks.ExpandIpCidrRange. See the + method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + region (str): + Name of the region scoping this + request. + + This corresponds to the ``region`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + subnetwork (str): + Name of the Subnetwork resource to + update. + + This corresponds to the ``subnetwork`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + subnetworks_expand_ip_cidr_range_request_resource (google.cloud.compute_v1.types.SubnetworksExpandIpCidrRangeRequest): + The body resource for this request + This corresponds to the ``subnetworks_expand_ip_cidr_range_request_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, region, subnetwork, subnetworks_expand_ip_cidr_range_request_resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.ExpandIpCidrRangeSubnetworkRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.ExpandIpCidrRangeSubnetworkRequest): + request = compute.ExpandIpCidrRangeSubnetworkRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if region is not None: + request.region = region + if subnetwork is not None: + request.subnetwork = subnetwork + if subnetworks_expand_ip_cidr_range_request_resource is not None: + request.subnetworks_expand_ip_cidr_range_request_resource = subnetworks_expand_ip_cidr_range_request_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.expand_ip_cidr_range] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("region", request.region), + ("subnetwork", request.subnetwork), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def expand_ip_cidr_range(self, + request: Optional[Union[compute.ExpandIpCidrRangeSubnetworkRequest, dict]] = None, + *, + project: Optional[str] = None, + region: Optional[str] = None, + subnetwork: Optional[str] = None, + subnetworks_expand_ip_cidr_range_request_resource: Optional[compute.SubnetworksExpandIpCidrRangeRequest] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> extended_operation.ExtendedOperation: + r"""Expands the IP CIDR range of the subnetwork to a + specified value. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_expand_ip_cidr_range(): + # Create a client + client = compute_v1.SubnetworksClient() + + # Initialize request argument(s) + request = compute_v1.ExpandIpCidrRangeSubnetworkRequest( + project="project_value", + region="region_value", + subnetwork="subnetwork_value", + ) + + # Make the request + response = client.expand_ip_cidr_range(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.ExpandIpCidrRangeSubnetworkRequest, dict]): + The request object. A request message for + Subnetworks.ExpandIpCidrRange. See the + method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + region (str): + Name of the region scoping this + request. + + This corresponds to the ``region`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + subnetwork (str): + Name of the Subnetwork resource to + update. + + This corresponds to the ``subnetwork`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + subnetworks_expand_ip_cidr_range_request_resource (google.cloud.compute_v1.types.SubnetworksExpandIpCidrRangeRequest): + The body resource for this request + This corresponds to the ``subnetworks_expand_ip_cidr_range_request_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, region, subnetwork, subnetworks_expand_ip_cidr_range_request_resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.ExpandIpCidrRangeSubnetworkRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.ExpandIpCidrRangeSubnetworkRequest): + request = compute.ExpandIpCidrRangeSubnetworkRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if region is not None: + request.region = region + if subnetwork is not None: + request.subnetwork = subnetwork + if subnetworks_expand_ip_cidr_range_request_resource is not None: + request.subnetworks_expand_ip_cidr_range_request_resource = subnetworks_expand_ip_cidr_range_request_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.expand_ip_cidr_range] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("region", request.region), + ("subnetwork", request.subnetwork), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + operation_service = self._transport._region_operations_client + operation_request = compute.GetRegionOperationRequest() + operation_request.project = request.project + operation_request.region = request.region + operation_request.operation = response.name + + get_operation = functools.partial(operation_service.get, operation_request) + # Cancel is not part of extended operations yet. + cancel_operation = lambda: None + + # Note: this class is an implementation detail to provide a uniform + # set of names for certain fields in the extended operation proto message. + # See google.api_core.extended_operation.ExtendedOperation for details + # on these properties and the expected interface. + class _CustomOperation(extended_operation.ExtendedOperation): + @property + def error_message(self): + return self._extended_operation.http_error_message + + @property + def error_code(self): + return self._extended_operation.http_error_status_code + + response = _CustomOperation.make(get_operation, cancel_operation, response) + + # Done; return the response. + return response + + def get(self, + request: Optional[Union[compute.GetSubnetworkRequest, dict]] = None, + *, + project: Optional[str] = None, + region: Optional[str] = None, + subnetwork: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.Subnetwork: + r"""Returns the specified subnetwork. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_get(): + # Create a client + client = compute_v1.SubnetworksClient() + + # Initialize request argument(s) + request = compute_v1.GetSubnetworkRequest( + project="project_value", + region="region_value", + subnetwork="subnetwork_value", + ) + + # Make the request + response = client.get(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.GetSubnetworkRequest, dict]): + The request object. A request message for + Subnetworks.Get. See the method + description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + region (str): + Name of the region scoping this + request. + + This corresponds to the ``region`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + subnetwork (str): + Name of the Subnetwork resource to + return. + + This corresponds to the ``subnetwork`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.compute_v1.types.Subnetwork: + Represents a Subnetwork resource. A + subnetwork (also known as a subnet) is a + logical partition of a Virtual Private + Cloud network with one primary IP range + and zero or more secondary IP ranges. + For more information, read Virtual + Private Cloud (VPC) Network. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, region, subnetwork]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.GetSubnetworkRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.GetSubnetworkRequest): + request = compute.GetSubnetworkRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if region is not None: + request.region = region + if subnetwork is not None: + request.subnetwork = subnetwork + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("region", request.region), + ("subnetwork", request.subnetwork), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get_iam_policy(self, + request: Optional[Union[compute.GetIamPolicySubnetworkRequest, dict]] = None, + *, + project: Optional[str] = None, + region: Optional[str] = None, + resource: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.Policy: + r"""Gets the access control policy for a resource. May be + empty if no such policy or resource exists. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_get_iam_policy(): + # Create a client + client = compute_v1.SubnetworksClient() + + # Initialize request argument(s) + request = compute_v1.GetIamPolicySubnetworkRequest( + project="project_value", + region="region_value", + resource="resource_value", + ) + + # Make the request + response = client.get_iam_policy(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.GetIamPolicySubnetworkRequest, dict]): + The request object. A request message for + Subnetworks.GetIamPolicy. See the method + description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + region (str): + The name of the region for this + request. + + This corresponds to the ``region`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + resource (str): + Name or id of the resource for this + request. + + This corresponds to the ``resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.compute_v1.types.Policy: + An Identity and Access Management (IAM) policy, which + specifies access controls for Google Cloud resources. A + Policy is a collection of bindings. A binding binds one + or more members, or principals, to a single role. + Principals can be user accounts, service accounts, + Google groups, and domains (such as G Suite). A role is + a named list of permissions; each role can be an IAM + predefined role or a user-created custom role. For some + types of Google Cloud resources, a binding can also + specify a condition, which is a logical expression that + allows access to a resource only if the expression + evaluates to true. A condition can add constraints based + on attributes of the request, the resource, or both. To + learn which resources support conditions in their IAM + policies, see the [IAM + documentation](\ https://cloud.google.com/iam/help/conditions/resource-policies). + **JSON example:** { "bindings": [ { "role": + "roles/resourcemanager.organizationAdmin", "members": [ + "user:mike@example.com", "group:admins@example.com", + "domain:google.com", + "serviceAccount:my-project-id@appspot.gserviceaccount.com" + ] }, { "role": + "roles/resourcemanager.organizationViewer", "members": [ + "user:eve@example.com" ], "condition": { "title": + "expirable access", "description": "Does not grant + access after Sep 2020", "expression": "request.time < + timestamp('2020-10-01T00:00:00.000Z')", } } ], "etag": + "BwWWja0YfJA=", "version": 3 } **YAML example:** + bindings: - members: - user:\ mike@example.com - + group:\ admins@example.com - domain:google.com - + serviceAccount:\ my-project-id@appspot.gserviceaccount.com + role: roles/resourcemanager.organizationAdmin - members: + - user:\ eve@example.com role: + roles/resourcemanager.organizationViewer condition: + title: expirable access description: Does not grant + access after Sep 2020 expression: request.time < + timestamp('2020-10-01T00:00:00.000Z') etag: BwWWja0YfJA= + version: 3 For a description of IAM and its features, + see the [IAM + documentation](\ https://cloud.google.com/iam/docs/). + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, region, resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.GetIamPolicySubnetworkRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.GetIamPolicySubnetworkRequest): + request = compute.GetIamPolicySubnetworkRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if region is not None: + request.region = region + if resource is not None: + request.resource = resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_iam_policy] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("region", request.region), + ("resource", request.resource), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def insert_unary(self, + request: Optional[Union[compute.InsertSubnetworkRequest, dict]] = None, + *, + project: Optional[str] = None, + region: Optional[str] = None, + subnetwork_resource: Optional[compute.Subnetwork] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.Operation: + r"""Creates a subnetwork in the specified project using + the data included in the request. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_insert(): + # Create a client + client = compute_v1.SubnetworksClient() + + # Initialize request argument(s) + request = compute_v1.InsertSubnetworkRequest( + project="project_value", + region="region_value", + ) + + # Make the request + response = client.insert(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.InsertSubnetworkRequest, dict]): + The request object. A request message for + Subnetworks.Insert. See the method + description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + region (str): + Name of the region scoping this + request. + + This corresponds to the ``region`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + subnetwork_resource (google.cloud.compute_v1.types.Subnetwork): + The body resource for this request + This corresponds to the ``subnetwork_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, region, subnetwork_resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.InsertSubnetworkRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.InsertSubnetworkRequest): + request = compute.InsertSubnetworkRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if region is not None: + request.region = region + if subnetwork_resource is not None: + request.subnetwork_resource = subnetwork_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.insert] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("region", request.region), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def insert(self, + request: Optional[Union[compute.InsertSubnetworkRequest, dict]] = None, + *, + project: Optional[str] = None, + region: Optional[str] = None, + subnetwork_resource: Optional[compute.Subnetwork] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> extended_operation.ExtendedOperation: + r"""Creates a subnetwork in the specified project using + the data included in the request. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_insert(): + # Create a client + client = compute_v1.SubnetworksClient() + + # Initialize request argument(s) + request = compute_v1.InsertSubnetworkRequest( + project="project_value", + region="region_value", + ) + + # Make the request + response = client.insert(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.InsertSubnetworkRequest, dict]): + The request object. A request message for + Subnetworks.Insert. See the method + description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + region (str): + Name of the region scoping this + request. + + This corresponds to the ``region`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + subnetwork_resource (google.cloud.compute_v1.types.Subnetwork): + The body resource for this request + This corresponds to the ``subnetwork_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, region, subnetwork_resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.InsertSubnetworkRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.InsertSubnetworkRequest): + request = compute.InsertSubnetworkRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if region is not None: + request.region = region + if subnetwork_resource is not None: + request.subnetwork_resource = subnetwork_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.insert] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("region", request.region), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + operation_service = self._transport._region_operations_client + operation_request = compute.GetRegionOperationRequest() + operation_request.project = request.project + operation_request.region = request.region + operation_request.operation = response.name + + get_operation = functools.partial(operation_service.get, operation_request) + # Cancel is not part of extended operations yet. + cancel_operation = lambda: None + + # Note: this class is an implementation detail to provide a uniform + # set of names for certain fields in the extended operation proto message. + # See google.api_core.extended_operation.ExtendedOperation for details + # on these properties and the expected interface. + class _CustomOperation(extended_operation.ExtendedOperation): + @property + def error_message(self): + return self._extended_operation.http_error_message + + @property + def error_code(self): + return self._extended_operation.http_error_status_code + + response = _CustomOperation.make(get_operation, cancel_operation, response) + + # Done; return the response. + return response + + def list(self, + request: Optional[Union[compute.ListSubnetworksRequest, dict]] = None, + *, + project: Optional[str] = None, + region: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListPager: + r"""Retrieves a list of subnetworks available to the + specified project. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_list(): + # Create a client + client = compute_v1.SubnetworksClient() + + # Initialize request argument(s) + request = compute_v1.ListSubnetworksRequest( + project="project_value", + region="region_value", + ) + + # Make the request + page_result = client.list(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.ListSubnetworksRequest, dict]): + The request object. A request message for + Subnetworks.List. See the method + description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + region (str): + Name of the region scoping this + request. + + This corresponds to the ``region`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.compute_v1.services.subnetworks.pagers.ListPager: + Contains a list of Subnetwork + resources. + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, region]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.ListSubnetworksRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.ListSubnetworksRequest): + request = compute.ListSubnetworksRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if region is not None: + request.region = region + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("region", request.region), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + def list_usable(self, + request: Optional[Union[compute.ListUsableSubnetworksRequest, dict]] = None, + *, + project: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListUsablePager: + r"""Retrieves an aggregated list of all usable + subnetworks in the project. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_list_usable(): + # Create a client + client = compute_v1.SubnetworksClient() + + # Initialize request argument(s) + request = compute_v1.ListUsableSubnetworksRequest( + project="project_value", + ) + + # Make the request + page_result = client.list_usable(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.ListUsableSubnetworksRequest, dict]): + The request object. A request message for + Subnetworks.ListUsable. See the method + description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.compute_v1.services.subnetworks.pagers.ListUsablePager: + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.ListUsableSubnetworksRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.ListUsableSubnetworksRequest): + request = compute.ListUsableSubnetworksRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_usable] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListUsablePager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + def patch_unary(self, + request: Optional[Union[compute.PatchSubnetworkRequest, dict]] = None, + *, + project: Optional[str] = None, + region: Optional[str] = None, + subnetwork: Optional[str] = None, + subnetwork_resource: Optional[compute.Subnetwork] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.Operation: + r"""Patches the specified subnetwork with the data + included in the request. Only certain fields can be + updated with a patch request as indicated in the field + descriptions. You must specify the current fingerprint + of the subnetwork resource being patched. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_patch(): + # Create a client + client = compute_v1.SubnetworksClient() + + # Initialize request argument(s) + request = compute_v1.PatchSubnetworkRequest( + project="project_value", + region="region_value", + subnetwork="subnetwork_value", + ) + + # Make the request + response = client.patch(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.PatchSubnetworkRequest, dict]): + The request object. A request message for + Subnetworks.Patch. See the method + description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + region (str): + Name of the region scoping this + request. + + This corresponds to the ``region`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + subnetwork (str): + Name of the Subnetwork resource to + patch. + + This corresponds to the ``subnetwork`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + subnetwork_resource (google.cloud.compute_v1.types.Subnetwork): + The body resource for this request + This corresponds to the ``subnetwork_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, region, subnetwork, subnetwork_resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.PatchSubnetworkRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.PatchSubnetworkRequest): + request = compute.PatchSubnetworkRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if region is not None: + request.region = region + if subnetwork is not None: + request.subnetwork = subnetwork + if subnetwork_resource is not None: + request.subnetwork_resource = subnetwork_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.patch] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("region", request.region), + ("subnetwork", request.subnetwork), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def patch(self, + request: Optional[Union[compute.PatchSubnetworkRequest, dict]] = None, + *, + project: Optional[str] = None, + region: Optional[str] = None, + subnetwork: Optional[str] = None, + subnetwork_resource: Optional[compute.Subnetwork] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> extended_operation.ExtendedOperation: + r"""Patches the specified subnetwork with the data + included in the request. Only certain fields can be + updated with a patch request as indicated in the field + descriptions. You must specify the current fingerprint + of the subnetwork resource being patched. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_patch(): + # Create a client + client = compute_v1.SubnetworksClient() + + # Initialize request argument(s) + request = compute_v1.PatchSubnetworkRequest( + project="project_value", + region="region_value", + subnetwork="subnetwork_value", + ) + + # Make the request + response = client.patch(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.PatchSubnetworkRequest, dict]): + The request object. A request message for + Subnetworks.Patch. See the method + description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + region (str): + Name of the region scoping this + request. + + This corresponds to the ``region`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + subnetwork (str): + Name of the Subnetwork resource to + patch. + + This corresponds to the ``subnetwork`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + subnetwork_resource (google.cloud.compute_v1.types.Subnetwork): + The body resource for this request + This corresponds to the ``subnetwork_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, region, subnetwork, subnetwork_resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.PatchSubnetworkRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.PatchSubnetworkRequest): + request = compute.PatchSubnetworkRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if region is not None: + request.region = region + if subnetwork is not None: + request.subnetwork = subnetwork + if subnetwork_resource is not None: + request.subnetwork_resource = subnetwork_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.patch] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("region", request.region), + ("subnetwork", request.subnetwork), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + operation_service = self._transport._region_operations_client + operation_request = compute.GetRegionOperationRequest() + operation_request.project = request.project + operation_request.region = request.region + operation_request.operation = response.name + + get_operation = functools.partial(operation_service.get, operation_request) + # Cancel is not part of extended operations yet. + cancel_operation = lambda: None + + # Note: this class is an implementation detail to provide a uniform + # set of names for certain fields in the extended operation proto message. + # See google.api_core.extended_operation.ExtendedOperation for details + # on these properties and the expected interface. + class _CustomOperation(extended_operation.ExtendedOperation): + @property + def error_message(self): + return self._extended_operation.http_error_message + + @property + def error_code(self): + return self._extended_operation.http_error_status_code + + response = _CustomOperation.make(get_operation, cancel_operation, response) + + # Done; return the response. + return response + + def set_iam_policy(self, + request: Optional[Union[compute.SetIamPolicySubnetworkRequest, dict]] = None, + *, + project: Optional[str] = None, + region: Optional[str] = None, + resource: Optional[str] = None, + region_set_policy_request_resource: Optional[compute.RegionSetPolicyRequest] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.Policy: + r"""Sets the access control policy on the specified + resource. Replaces any existing policy. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_set_iam_policy(): + # Create a client + client = compute_v1.SubnetworksClient() + + # Initialize request argument(s) + request = compute_v1.SetIamPolicySubnetworkRequest( + project="project_value", + region="region_value", + resource="resource_value", + ) + + # Make the request + response = client.set_iam_policy(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.SetIamPolicySubnetworkRequest, dict]): + The request object. A request message for + Subnetworks.SetIamPolicy. See the method + description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + region (str): + The name of the region for this + request. + + This corresponds to the ``region`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + resource (str): + Name or id of the resource for this + request. + + This corresponds to the ``resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + region_set_policy_request_resource (google.cloud.compute_v1.types.RegionSetPolicyRequest): + The body resource for this request + This corresponds to the ``region_set_policy_request_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.compute_v1.types.Policy: + An Identity and Access Management (IAM) policy, which + specifies access controls for Google Cloud resources. A + Policy is a collection of bindings. A binding binds one + or more members, or principals, to a single role. + Principals can be user accounts, service accounts, + Google groups, and domains (such as G Suite). A role is + a named list of permissions; each role can be an IAM + predefined role or a user-created custom role. For some + types of Google Cloud resources, a binding can also + specify a condition, which is a logical expression that + allows access to a resource only if the expression + evaluates to true. A condition can add constraints based + on attributes of the request, the resource, or both. To + learn which resources support conditions in their IAM + policies, see the [IAM + documentation](\ https://cloud.google.com/iam/help/conditions/resource-policies). + **JSON example:** { "bindings": [ { "role": + "roles/resourcemanager.organizationAdmin", "members": [ + "user:mike@example.com", "group:admins@example.com", + "domain:google.com", + "serviceAccount:my-project-id@appspot.gserviceaccount.com" + ] }, { "role": + "roles/resourcemanager.organizationViewer", "members": [ + "user:eve@example.com" ], "condition": { "title": + "expirable access", "description": "Does not grant + access after Sep 2020", "expression": "request.time < + timestamp('2020-10-01T00:00:00.000Z')", } } ], "etag": + "BwWWja0YfJA=", "version": 3 } **YAML example:** + bindings: - members: - user:\ mike@example.com - + group:\ admins@example.com - domain:google.com - + serviceAccount:\ my-project-id@appspot.gserviceaccount.com + role: roles/resourcemanager.organizationAdmin - members: + - user:\ eve@example.com role: + roles/resourcemanager.organizationViewer condition: + title: expirable access description: Does not grant + access after Sep 2020 expression: request.time < + timestamp('2020-10-01T00:00:00.000Z') etag: BwWWja0YfJA= + version: 3 For a description of IAM and its features, + see the [IAM + documentation](\ https://cloud.google.com/iam/docs/). + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, region, resource, region_set_policy_request_resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.SetIamPolicySubnetworkRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.SetIamPolicySubnetworkRequest): + request = compute.SetIamPolicySubnetworkRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if region is not None: + request.region = region + if resource is not None: + request.resource = resource + if region_set_policy_request_resource is not None: + request.region_set_policy_request_resource = region_set_policy_request_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.set_iam_policy] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("region", request.region), + ("resource", request.resource), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def set_private_ip_google_access_unary(self, + request: Optional[Union[compute.SetPrivateIpGoogleAccessSubnetworkRequest, dict]] = None, + *, + project: Optional[str] = None, + region: Optional[str] = None, + subnetwork: Optional[str] = None, + subnetworks_set_private_ip_google_access_request_resource: Optional[compute.SubnetworksSetPrivateIpGoogleAccessRequest] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.Operation: + r"""Set whether VMs in this subnet can access Google + services without assigning external IP addresses through + Private Google Access. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_set_private_ip_google_access(): + # Create a client + client = compute_v1.SubnetworksClient() + + # Initialize request argument(s) + request = compute_v1.SetPrivateIpGoogleAccessSubnetworkRequest( + project="project_value", + region="region_value", + subnetwork="subnetwork_value", + ) + + # Make the request + response = client.set_private_ip_google_access(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.SetPrivateIpGoogleAccessSubnetworkRequest, dict]): + The request object. A request message for + Subnetworks.SetPrivateIpGoogleAccess. + See the method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + region (str): + Name of the region scoping this + request. + + This corresponds to the ``region`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + subnetwork (str): + Name of the Subnetwork resource. + This corresponds to the ``subnetwork`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + subnetworks_set_private_ip_google_access_request_resource (google.cloud.compute_v1.types.SubnetworksSetPrivateIpGoogleAccessRequest): + The body resource for this request + This corresponds to the ``subnetworks_set_private_ip_google_access_request_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, region, subnetwork, subnetworks_set_private_ip_google_access_request_resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.SetPrivateIpGoogleAccessSubnetworkRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.SetPrivateIpGoogleAccessSubnetworkRequest): + request = compute.SetPrivateIpGoogleAccessSubnetworkRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if region is not None: + request.region = region + if subnetwork is not None: + request.subnetwork = subnetwork + if subnetworks_set_private_ip_google_access_request_resource is not None: + request.subnetworks_set_private_ip_google_access_request_resource = subnetworks_set_private_ip_google_access_request_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.set_private_ip_google_access] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("region", request.region), + ("subnetwork", request.subnetwork), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def set_private_ip_google_access(self, + request: Optional[Union[compute.SetPrivateIpGoogleAccessSubnetworkRequest, dict]] = None, + *, + project: Optional[str] = None, + region: Optional[str] = None, + subnetwork: Optional[str] = None, + subnetworks_set_private_ip_google_access_request_resource: Optional[compute.SubnetworksSetPrivateIpGoogleAccessRequest] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> extended_operation.ExtendedOperation: + r"""Set whether VMs in this subnet can access Google + services without assigning external IP addresses through + Private Google Access. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_set_private_ip_google_access(): + # Create a client + client = compute_v1.SubnetworksClient() + + # Initialize request argument(s) + request = compute_v1.SetPrivateIpGoogleAccessSubnetworkRequest( + project="project_value", + region="region_value", + subnetwork="subnetwork_value", + ) + + # Make the request + response = client.set_private_ip_google_access(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.SetPrivateIpGoogleAccessSubnetworkRequest, dict]): + The request object. A request message for + Subnetworks.SetPrivateIpGoogleAccess. + See the method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + region (str): + Name of the region scoping this + request. + + This corresponds to the ``region`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + subnetwork (str): + Name of the Subnetwork resource. + This corresponds to the ``subnetwork`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + subnetworks_set_private_ip_google_access_request_resource (google.cloud.compute_v1.types.SubnetworksSetPrivateIpGoogleAccessRequest): + The body resource for this request + This corresponds to the ``subnetworks_set_private_ip_google_access_request_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, region, subnetwork, subnetworks_set_private_ip_google_access_request_resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.SetPrivateIpGoogleAccessSubnetworkRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.SetPrivateIpGoogleAccessSubnetworkRequest): + request = compute.SetPrivateIpGoogleAccessSubnetworkRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if region is not None: + request.region = region + if subnetwork is not None: + request.subnetwork = subnetwork + if subnetworks_set_private_ip_google_access_request_resource is not None: + request.subnetworks_set_private_ip_google_access_request_resource = subnetworks_set_private_ip_google_access_request_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.set_private_ip_google_access] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("region", request.region), + ("subnetwork", request.subnetwork), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + operation_service = self._transport._region_operations_client + operation_request = compute.GetRegionOperationRequest() + operation_request.project = request.project + operation_request.region = request.region + operation_request.operation = response.name + + get_operation = functools.partial(operation_service.get, operation_request) + # Cancel is not part of extended operations yet. + cancel_operation = lambda: None + + # Note: this class is an implementation detail to provide a uniform + # set of names for certain fields in the extended operation proto message. + # See google.api_core.extended_operation.ExtendedOperation for details + # on these properties and the expected interface. + class _CustomOperation(extended_operation.ExtendedOperation): + @property + def error_message(self): + return self._extended_operation.http_error_message + + @property + def error_code(self): + return self._extended_operation.http_error_status_code + + response = _CustomOperation.make(get_operation, cancel_operation, response) + + # Done; return the response. + return response + + def test_iam_permissions(self, + request: Optional[Union[compute.TestIamPermissionsSubnetworkRequest, dict]] = None, + *, + project: Optional[str] = None, + region: Optional[str] = None, + resource: Optional[str] = None, + test_permissions_request_resource: Optional[compute.TestPermissionsRequest] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.TestPermissionsResponse: + r"""Returns permissions that a caller has on the + specified resource. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_test_iam_permissions(): + # Create a client + client = compute_v1.SubnetworksClient() + + # Initialize request argument(s) + request = compute_v1.TestIamPermissionsSubnetworkRequest( + project="project_value", + region="region_value", + resource="resource_value", + ) + + # Make the request + response = client.test_iam_permissions(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.TestIamPermissionsSubnetworkRequest, dict]): + The request object. A request message for + Subnetworks.TestIamPermissions. See the + method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + region (str): + The name of the region for this + request. + + This corresponds to the ``region`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + resource (str): + Name or id of the resource for this + request. + + This corresponds to the ``resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + test_permissions_request_resource (google.cloud.compute_v1.types.TestPermissionsRequest): + The body resource for this request + This corresponds to the ``test_permissions_request_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.compute_v1.types.TestPermissionsResponse: + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, region, resource, test_permissions_request_resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.TestIamPermissionsSubnetworkRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.TestIamPermissionsSubnetworkRequest): + request = compute.TestIamPermissionsSubnetworkRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if region is not None: + request.region = region + if resource is not None: + request.resource = resource + if test_permissions_request_resource is not None: + request.test_permissions_request_resource = test_permissions_request_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.test_iam_permissions] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("region", request.region), + ("resource", request.resource), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def __enter__(self) -> "SubnetworksClient": + return self + + def __exit__(self, type, value, traceback): + """Releases underlying transport's resources. + + .. warning:: + ONLY use as a context manager if the transport is NOT shared + with other clients! Exiting the with block will CLOSE the transport + and may cause errors in other clients! + """ + self.transport.close() + + + + + + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) + + +__all__ = ( + "SubnetworksClient", +) diff --git a/owl-bot-staging/v1/google/cloud/compute_v1/services/subnetworks/pagers.py b/owl-bot-staging/v1/google/cloud/compute_v1/services/subnetworks/pagers.py new file mode 100644 index 000000000..5cf7ba367 --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/compute_v1/services/subnetworks/pagers.py @@ -0,0 +1,198 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import Any, AsyncIterator, Awaitable, Callable, Sequence, Tuple, Optional, Iterator + +from google.cloud.compute_v1.types import compute + + +class AggregatedListPager: + """A pager for iterating through ``aggregated_list`` requests. + + This class thinly wraps an initial + :class:`google.cloud.compute_v1.types.SubnetworkAggregatedList` object, and + provides an ``__iter__`` method to iterate through its + ``items`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``AggregatedList`` requests and continue to iterate + through the ``items`` field on the + corresponding responses. + + All the usual :class:`google.cloud.compute_v1.types.SubnetworkAggregatedList` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., compute.SubnetworkAggregatedList], + request: compute.AggregatedListSubnetworksRequest, + response: compute.SubnetworkAggregatedList, + *, + metadata: Sequence[Tuple[str, str]] = ()): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.compute_v1.types.AggregatedListSubnetworksRequest): + The initial request object. + response (google.cloud.compute_v1.types.SubnetworkAggregatedList): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = compute.AggregatedListSubnetworksRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[compute.SubnetworkAggregatedList]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[Tuple[str, compute.SubnetworksScopedList]]: + for page in self.pages: + yield from page.items.items() + + def get(self, key: str) -> Optional[compute.SubnetworksScopedList]: + return self._response.items.get(key) + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + + +class ListPager: + """A pager for iterating through ``list`` requests. + + This class thinly wraps an initial + :class:`google.cloud.compute_v1.types.SubnetworkList` object, and + provides an ``__iter__`` method to iterate through its + ``items`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``List`` requests and continue to iterate + through the ``items`` field on the + corresponding responses. + + All the usual :class:`google.cloud.compute_v1.types.SubnetworkList` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., compute.SubnetworkList], + request: compute.ListSubnetworksRequest, + response: compute.SubnetworkList, + *, + metadata: Sequence[Tuple[str, str]] = ()): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.compute_v1.types.ListSubnetworksRequest): + The initial request object. + response (google.cloud.compute_v1.types.SubnetworkList): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = compute.ListSubnetworksRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[compute.SubnetworkList]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[compute.Subnetwork]: + for page in self.pages: + yield from page.items + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + + +class ListUsablePager: + """A pager for iterating through ``list_usable`` requests. + + This class thinly wraps an initial + :class:`google.cloud.compute_v1.types.UsableSubnetworksAggregatedList` object, and + provides an ``__iter__`` method to iterate through its + ``items`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListUsable`` requests and continue to iterate + through the ``items`` field on the + corresponding responses. + + All the usual :class:`google.cloud.compute_v1.types.UsableSubnetworksAggregatedList` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., compute.UsableSubnetworksAggregatedList], + request: compute.ListUsableSubnetworksRequest, + response: compute.UsableSubnetworksAggregatedList, + *, + metadata: Sequence[Tuple[str, str]] = ()): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.compute_v1.types.ListUsableSubnetworksRequest): + The initial request object. + response (google.cloud.compute_v1.types.UsableSubnetworksAggregatedList): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = compute.ListUsableSubnetworksRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[compute.UsableSubnetworksAggregatedList]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[compute.UsableSubnetwork]: + for page in self.pages: + yield from page.items + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) diff --git a/owl-bot-staging/v1/google/cloud/compute_v1/services/subnetworks/transports/__init__.py b/owl-bot-staging/v1/google/cloud/compute_v1/services/subnetworks/transports/__init__.py new file mode 100644 index 000000000..20326fa49 --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/compute_v1/services/subnetworks/transports/__init__.py @@ -0,0 +1,32 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +from typing import Dict, Type + +from .base import SubnetworksTransport +from .rest import SubnetworksRestTransport +from .rest import SubnetworksRestInterceptor + + +# Compile a registry of transports. +_transport_registry = OrderedDict() # type: Dict[str, Type[SubnetworksTransport]] +_transport_registry['rest'] = SubnetworksRestTransport + +__all__ = ( + 'SubnetworksTransport', + 'SubnetworksRestTransport', + 'SubnetworksRestInterceptor', +) diff --git a/owl-bot-staging/v1/google/cloud/compute_v1/services/subnetworks/transports/base.py b/owl-bot-staging/v1/google/cloud/compute_v1/services/subnetworks/transports/base.py new file mode 100644 index 000000000..efe9ec237 --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/compute_v1/services/subnetworks/transports/base.py @@ -0,0 +1,317 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import abc +from typing import Awaitable, Callable, Dict, Optional, Sequence, Union + +from google.cloud.compute_v1 import gapic_version as package_version + +import google.auth # type: ignore +import google.api_core +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.compute_v1.types import compute +from google.cloud.compute_v1.services import region_operations + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) + + +class SubnetworksTransport(abc.ABC): + """Abstract transport class for Subnetworks.""" + + AUTH_SCOPES = ( + 'https://www.googleapis.com/auth/compute', + 'https://www.googleapis.com/auth/cloud-platform', + ) + + DEFAULT_HOST: str = 'compute.googleapis.com' + def __init__( + self, *, + host: str = DEFAULT_HOST, + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + **kwargs, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A list of scopes. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + """ + self._extended_operations_services: Dict[str, Any] = {} + + scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} + + # Save the scopes. + self._scopes = scopes + + # If no credentials are provided, then determine the appropriate + # defaults. + if credentials and credentials_file: + raise core_exceptions.DuplicateCredentialArgs("'credentials_file' and 'credentials' are mutually exclusive") + + if credentials_file is not None: + credentials, _ = google.auth.load_credentials_from_file( + credentials_file, + **scopes_kwargs, + quota_project_id=quota_project_id + ) + elif credentials is None: + credentials, _ = google.auth.default(**scopes_kwargs, quota_project_id=quota_project_id) + # Don't apply audience if the credentials file passed from user. + if hasattr(credentials, "with_gdch_audience"): + credentials = credentials.with_gdch_audience(api_audience if api_audience else host) + + # If the credentials are service account credentials, then always try to use self signed JWT. + if always_use_jwt_access and isinstance(credentials, service_account.Credentials) and hasattr(service_account.Credentials, "with_always_use_jwt_access"): + credentials = credentials.with_always_use_jwt_access(True) + + # Save the credentials. + self._credentials = credentials + + # Save the hostname. Default to port 443 (HTTPS) if none is specified. + if ':' not in host: + host += ':443' + self._host = host + + def _prep_wrapped_messages(self, client_info): + # Precompute the wrapped methods. + self._wrapped_methods = { + self.aggregated_list: gapic_v1.method.wrap_method( + self.aggregated_list, + default_timeout=None, + client_info=client_info, + ), + self.delete: gapic_v1.method.wrap_method( + self.delete, + default_timeout=None, + client_info=client_info, + ), + self.expand_ip_cidr_range: gapic_v1.method.wrap_method( + self.expand_ip_cidr_range, + default_timeout=None, + client_info=client_info, + ), + self.get: gapic_v1.method.wrap_method( + self.get, + default_timeout=None, + client_info=client_info, + ), + self.get_iam_policy: gapic_v1.method.wrap_method( + self.get_iam_policy, + default_timeout=None, + client_info=client_info, + ), + self.insert: gapic_v1.method.wrap_method( + self.insert, + default_timeout=None, + client_info=client_info, + ), + self.list: gapic_v1.method.wrap_method( + self.list, + default_timeout=None, + client_info=client_info, + ), + self.list_usable: gapic_v1.method.wrap_method( + self.list_usable, + default_timeout=None, + client_info=client_info, + ), + self.patch: gapic_v1.method.wrap_method( + self.patch, + default_timeout=None, + client_info=client_info, + ), + self.set_iam_policy: gapic_v1.method.wrap_method( + self.set_iam_policy, + default_timeout=None, + client_info=client_info, + ), + self.set_private_ip_google_access: gapic_v1.method.wrap_method( + self.set_private_ip_google_access, + default_timeout=None, + client_info=client_info, + ), + self.test_iam_permissions: gapic_v1.method.wrap_method( + self.test_iam_permissions, + default_timeout=None, + client_info=client_info, + ), + } + + def close(self): + """Closes resources associated with the transport. + + .. warning:: + Only call this method if the transport is NOT shared + with other clients - this may cause errors in other clients! + """ + raise NotImplementedError() + + @property + def aggregated_list(self) -> Callable[ + [compute.AggregatedListSubnetworksRequest], + Union[ + compute.SubnetworkAggregatedList, + Awaitable[compute.SubnetworkAggregatedList] + ]]: + raise NotImplementedError() + + @property + def delete(self) -> Callable[ + [compute.DeleteSubnetworkRequest], + Union[ + compute.Operation, + Awaitable[compute.Operation] + ]]: + raise NotImplementedError() + + @property + def expand_ip_cidr_range(self) -> Callable[ + [compute.ExpandIpCidrRangeSubnetworkRequest], + Union[ + compute.Operation, + Awaitable[compute.Operation] + ]]: + raise NotImplementedError() + + @property + def get(self) -> Callable[ + [compute.GetSubnetworkRequest], + Union[ + compute.Subnetwork, + Awaitable[compute.Subnetwork] + ]]: + raise NotImplementedError() + + @property + def get_iam_policy(self) -> Callable[ + [compute.GetIamPolicySubnetworkRequest], + Union[ + compute.Policy, + Awaitable[compute.Policy] + ]]: + raise NotImplementedError() + + @property + def insert(self) -> Callable[ + [compute.InsertSubnetworkRequest], + Union[ + compute.Operation, + Awaitable[compute.Operation] + ]]: + raise NotImplementedError() + + @property + def list(self) -> Callable[ + [compute.ListSubnetworksRequest], + Union[ + compute.SubnetworkList, + Awaitable[compute.SubnetworkList] + ]]: + raise NotImplementedError() + + @property + def list_usable(self) -> Callable[ + [compute.ListUsableSubnetworksRequest], + Union[ + compute.UsableSubnetworksAggregatedList, + Awaitable[compute.UsableSubnetworksAggregatedList] + ]]: + raise NotImplementedError() + + @property + def patch(self) -> Callable[ + [compute.PatchSubnetworkRequest], + Union[ + compute.Operation, + Awaitable[compute.Operation] + ]]: + raise NotImplementedError() + + @property + def set_iam_policy(self) -> Callable[ + [compute.SetIamPolicySubnetworkRequest], + Union[ + compute.Policy, + Awaitable[compute.Policy] + ]]: + raise NotImplementedError() + + @property + def set_private_ip_google_access(self) -> Callable[ + [compute.SetPrivateIpGoogleAccessSubnetworkRequest], + Union[ + compute.Operation, + Awaitable[compute.Operation] + ]]: + raise NotImplementedError() + + @property + def test_iam_permissions(self) -> Callable[ + [compute.TestIamPermissionsSubnetworkRequest], + Union[ + compute.TestPermissionsResponse, + Awaitable[compute.TestPermissionsResponse] + ]]: + raise NotImplementedError() + + @property + def kind(self) -> str: + raise NotImplementedError() + + @property + def _region_operations_client(self) -> region_operations.RegionOperationsClient: + ex_op_service = self._extended_operations_services.get("region_operations") + if not ex_op_service: + ex_op_service = region_operations.RegionOperationsClient( + credentials=self._credentials, + transport=self.kind, + ) + self._extended_operations_services["region_operations"] = ex_op_service + + return ex_op_service + + +__all__ = ( + 'SubnetworksTransport', +) diff --git a/owl-bot-staging/v1/google/cloud/compute_v1/services/subnetworks/transports/rest.py b/owl-bot-staging/v1/google/cloud/compute_v1/services/subnetworks/transports/rest.py new file mode 100644 index 000000000..0b426c6e6 --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/compute_v1/services/subnetworks/transports/rest.py @@ -0,0 +1,1726 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +from google.auth.transport.requests import AuthorizedSession # type: ignore +import json # type: ignore +import grpc # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.api_core import exceptions as core_exceptions +from google.api_core import retry as retries +from google.api_core import rest_helpers +from google.api_core import rest_streaming +from google.api_core import path_template +from google.api_core import gapic_v1 + +from google.protobuf import json_format +from requests import __version__ as requests_version +import dataclasses +import re +from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union +import warnings + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + + +from google.cloud.compute_v1.types import compute + +from .base import SubnetworksTransport, DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, + grpc_version=None, + rest_version=requests_version, +) + + +class SubnetworksRestInterceptor: + """Interceptor for Subnetworks. + + Interceptors are used to manipulate requests, request metadata, and responses + in arbitrary ways. + Example use cases include: + * Logging + * Verifying requests according to service or custom semantics + * Stripping extraneous information from responses + + These use cases and more can be enabled by injecting an + instance of a custom subclass when constructing the SubnetworksRestTransport. + + .. code-block:: python + class MyCustomSubnetworksInterceptor(SubnetworksRestInterceptor): + def pre_aggregated_list(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_aggregated_list(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_delete(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_delete(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_expand_ip_cidr_range(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_expand_ip_cidr_range(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_get(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_get_iam_policy(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_iam_policy(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_insert(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_insert(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list_usable(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_usable(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_patch(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_patch(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_set_iam_policy(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_set_iam_policy(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_set_private_ip_google_access(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_set_private_ip_google_access(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_test_iam_permissions(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_test_iam_permissions(self, response): + logging.log(f"Received response: {response}") + return response + + transport = SubnetworksRestTransport(interceptor=MyCustomSubnetworksInterceptor()) + client = SubnetworksClient(transport=transport) + + + """ + def pre_aggregated_list(self, request: compute.AggregatedListSubnetworksRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.AggregatedListSubnetworksRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for aggregated_list + + Override in a subclass to manipulate the request or metadata + before they are sent to the Subnetworks server. + """ + return request, metadata + + def post_aggregated_list(self, response: compute.SubnetworkAggregatedList) -> compute.SubnetworkAggregatedList: + """Post-rpc interceptor for aggregated_list + + Override in a subclass to manipulate the response + after it is returned by the Subnetworks server but before + it is returned to user code. + """ + return response + def pre_delete(self, request: compute.DeleteSubnetworkRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.DeleteSubnetworkRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for delete + + Override in a subclass to manipulate the request or metadata + before they are sent to the Subnetworks server. + """ + return request, metadata + + def post_delete(self, response: compute.Operation) -> compute.Operation: + """Post-rpc interceptor for delete + + Override in a subclass to manipulate the response + after it is returned by the Subnetworks server but before + it is returned to user code. + """ + return response + def pre_expand_ip_cidr_range(self, request: compute.ExpandIpCidrRangeSubnetworkRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.ExpandIpCidrRangeSubnetworkRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for expand_ip_cidr_range + + Override in a subclass to manipulate the request or metadata + before they are sent to the Subnetworks server. + """ + return request, metadata + + def post_expand_ip_cidr_range(self, response: compute.Operation) -> compute.Operation: + """Post-rpc interceptor for expand_ip_cidr_range + + Override in a subclass to manipulate the response + after it is returned by the Subnetworks server but before + it is returned to user code. + """ + return response + def pre_get(self, request: compute.GetSubnetworkRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.GetSubnetworkRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get + + Override in a subclass to manipulate the request or metadata + before they are sent to the Subnetworks server. + """ + return request, metadata + + def post_get(self, response: compute.Subnetwork) -> compute.Subnetwork: + """Post-rpc interceptor for get + + Override in a subclass to manipulate the response + after it is returned by the Subnetworks server but before + it is returned to user code. + """ + return response + def pre_get_iam_policy(self, request: compute.GetIamPolicySubnetworkRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.GetIamPolicySubnetworkRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_iam_policy + + Override in a subclass to manipulate the request or metadata + before they are sent to the Subnetworks server. + """ + return request, metadata + + def post_get_iam_policy(self, response: compute.Policy) -> compute.Policy: + """Post-rpc interceptor for get_iam_policy + + Override in a subclass to manipulate the response + after it is returned by the Subnetworks server but before + it is returned to user code. + """ + return response + def pre_insert(self, request: compute.InsertSubnetworkRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.InsertSubnetworkRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for insert + + Override in a subclass to manipulate the request or metadata + before they are sent to the Subnetworks server. + """ + return request, metadata + + def post_insert(self, response: compute.Operation) -> compute.Operation: + """Post-rpc interceptor for insert + + Override in a subclass to manipulate the response + after it is returned by the Subnetworks server but before + it is returned to user code. + """ + return response + def pre_list(self, request: compute.ListSubnetworksRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.ListSubnetworksRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list + + Override in a subclass to manipulate the request or metadata + before they are sent to the Subnetworks server. + """ + return request, metadata + + def post_list(self, response: compute.SubnetworkList) -> compute.SubnetworkList: + """Post-rpc interceptor for list + + Override in a subclass to manipulate the response + after it is returned by the Subnetworks server but before + it is returned to user code. + """ + return response + def pre_list_usable(self, request: compute.ListUsableSubnetworksRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.ListUsableSubnetworksRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list_usable + + Override in a subclass to manipulate the request or metadata + before they are sent to the Subnetworks server. + """ + return request, metadata + + def post_list_usable(self, response: compute.UsableSubnetworksAggregatedList) -> compute.UsableSubnetworksAggregatedList: + """Post-rpc interceptor for list_usable + + Override in a subclass to manipulate the response + after it is returned by the Subnetworks server but before + it is returned to user code. + """ + return response + def pre_patch(self, request: compute.PatchSubnetworkRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.PatchSubnetworkRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for patch + + Override in a subclass to manipulate the request or metadata + before they are sent to the Subnetworks server. + """ + return request, metadata + + def post_patch(self, response: compute.Operation) -> compute.Operation: + """Post-rpc interceptor for patch + + Override in a subclass to manipulate the response + after it is returned by the Subnetworks server but before + it is returned to user code. + """ + return response + def pre_set_iam_policy(self, request: compute.SetIamPolicySubnetworkRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.SetIamPolicySubnetworkRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for set_iam_policy + + Override in a subclass to manipulate the request or metadata + before they are sent to the Subnetworks server. + """ + return request, metadata + + def post_set_iam_policy(self, response: compute.Policy) -> compute.Policy: + """Post-rpc interceptor for set_iam_policy + + Override in a subclass to manipulate the response + after it is returned by the Subnetworks server but before + it is returned to user code. + """ + return response + def pre_set_private_ip_google_access(self, request: compute.SetPrivateIpGoogleAccessSubnetworkRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.SetPrivateIpGoogleAccessSubnetworkRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for set_private_ip_google_access + + Override in a subclass to manipulate the request or metadata + before they are sent to the Subnetworks server. + """ + return request, metadata + + def post_set_private_ip_google_access(self, response: compute.Operation) -> compute.Operation: + """Post-rpc interceptor for set_private_ip_google_access + + Override in a subclass to manipulate the response + after it is returned by the Subnetworks server but before + it is returned to user code. + """ + return response + def pre_test_iam_permissions(self, request: compute.TestIamPermissionsSubnetworkRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.TestIamPermissionsSubnetworkRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for test_iam_permissions + + Override in a subclass to manipulate the request or metadata + before they are sent to the Subnetworks server. + """ + return request, metadata + + def post_test_iam_permissions(self, response: compute.TestPermissionsResponse) -> compute.TestPermissionsResponse: + """Post-rpc interceptor for test_iam_permissions + + Override in a subclass to manipulate the response + after it is returned by the Subnetworks server but before + it is returned to user code. + """ + return response + + +@dataclasses.dataclass +class SubnetworksRestStub: + _session: AuthorizedSession + _host: str + _interceptor: SubnetworksRestInterceptor + + +class SubnetworksRestTransport(SubnetworksTransport): + """REST backend transport for Subnetworks. + + The Subnetworks API. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends JSON representations of protocol buffers over HTTP/1.1 + + NOTE: This REST transport functionality is currently in a beta + state (preview). We welcome your feedback via an issue in this + library's source repository. Thank you! + """ + + def __init__(self, *, + host: str = 'compute.googleapis.com', + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + client_cert_source_for_mtls: Optional[Callable[[ + ], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + url_scheme: str = 'https', + interceptor: Optional[SubnetworksRestInterceptor] = None, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + NOTE: This REST transport functionality is currently in a beta + state (preview). We welcome your feedback via a GitHub issue in + this library's repository. Thank you! + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + client_cert_source_for_mtls (Callable[[], Tuple[bytes, bytes]]): Client + certificate to configure mutual TLS HTTP channel. It is ignored + if ``channel`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you are developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + url_scheme: the protocol scheme for the API endpoint. Normally + "https", but for testing or local servers, + "http" can be specified. + """ + # Run the base constructor + # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. + # TODO: When custom host (api_endpoint) is set, `scopes` must *also* be set on the + # credentials object + maybe_url_match = re.match("^(?Phttp(?:s)?://)?(?P.*)$", host) + if maybe_url_match is None: + raise ValueError(f"Unexpected hostname structure: {host}") # pragma: NO COVER + + url_match_items = maybe_url_match.groupdict() + + host = f"{url_scheme}://{host}" if not url_match_items["scheme"] else host + + super().__init__( + host=host, + credentials=credentials, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience + ) + self._session = AuthorizedSession( + self._credentials, default_host=self.DEFAULT_HOST) + if client_cert_source_for_mtls: + self._session.configure_mtls_channel(client_cert_source_for_mtls) + self._interceptor = interceptor or SubnetworksRestInterceptor() + self._prep_wrapped_messages(client_info) + + class _AggregatedList(SubnetworksRestStub): + def __hash__(self): + return hash("AggregatedList") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.AggregatedListSubnetworksRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.SubnetworkAggregatedList: + r"""Call the aggregated list method over HTTP. + + Args: + request (~.compute.AggregatedListSubnetworksRequest): + The request object. A request message for + Subnetworks.AggregatedList. See the + method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.SubnetworkAggregatedList: + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/compute/v1/projects/{project}/aggregated/subnetworks', + }, + ] + request, metadata = self._interceptor.pre_aggregated_list(request, metadata) + pb_request = compute.AggregatedListSubnetworksRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.SubnetworkAggregatedList() + pb_resp = compute.SubnetworkAggregatedList.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_aggregated_list(resp) + return resp + + class _Delete(SubnetworksRestStub): + def __hash__(self): + return hash("Delete") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.DeleteSubnetworkRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.Operation: + r"""Call the delete method over HTTP. + + Args: + request (~.compute.DeleteSubnetworkRequest): + The request object. A request message for + Subnetworks.Delete. See the method + description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.Operation: + Represents an Operation resource. Google Compute Engine + has three Operation resources: \* + `Global `__ + \* + `Regional `__ + \* + `Zonal `__ + You can use an operation resource to manage asynchronous + API requests. For more information, read Handling API + responses. Operations can be global, regional or zonal. + - For global operations, use the ``globalOperations`` + resource. - For regional operations, use the + ``regionOperations`` resource. - For zonal operations, + use the ``zonalOperations`` resource. For more + information, read Global, Regional, and Zonal Resources. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'delete', + 'uri': '/compute/v1/projects/{project}/regions/{region}/subnetworks/{subnetwork}', + }, + ] + request, metadata = self._interceptor.pre_delete(request, metadata) + pb_request = compute.DeleteSubnetworkRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.Operation() + pb_resp = compute.Operation.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_delete(resp) + return resp + + class _ExpandIpCidrRange(SubnetworksRestStub): + def __hash__(self): + return hash("ExpandIpCidrRange") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.ExpandIpCidrRangeSubnetworkRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.Operation: + r"""Call the expand ip cidr range method over HTTP. + + Args: + request (~.compute.ExpandIpCidrRangeSubnetworkRequest): + The request object. A request message for + Subnetworks.ExpandIpCidrRange. See the + method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.Operation: + Represents an Operation resource. Google Compute Engine + has three Operation resources: \* + `Global `__ + \* + `Regional `__ + \* + `Zonal `__ + You can use an operation resource to manage asynchronous + API requests. For more information, read Handling API + responses. Operations can be global, regional or zonal. + - For global operations, use the ``globalOperations`` + resource. - For regional operations, use the + ``regionOperations`` resource. - For zonal operations, + use the ``zonalOperations`` resource. For more + information, read Global, Regional, and Zonal Resources. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/compute/v1/projects/{project}/regions/{region}/subnetworks/{subnetwork}/expandIpCidrRange', + 'body': 'subnetworks_expand_ip_cidr_range_request_resource', + }, + ] + request, metadata = self._interceptor.pre_expand_ip_cidr_range(request, metadata) + pb_request = compute.ExpandIpCidrRangeSubnetworkRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + including_default_value_fields=False, + use_integers_for_enums=False + ) + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.Operation() + pb_resp = compute.Operation.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_expand_ip_cidr_range(resp) + return resp + + class _Get(SubnetworksRestStub): + def __hash__(self): + return hash("Get") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.GetSubnetworkRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.Subnetwork: + r"""Call the get method over HTTP. + + Args: + request (~.compute.GetSubnetworkRequest): + The request object. A request message for + Subnetworks.Get. See the method + description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.Subnetwork: + Represents a Subnetwork resource. A + subnetwork (also known as a subnet) is a + logical partition of a Virtual Private + Cloud network with one primary IP range + and zero or more secondary IP ranges. + For more information, read Virtual + Private Cloud (VPC) Network. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/compute/v1/projects/{project}/regions/{region}/subnetworks/{subnetwork}', + }, + ] + request, metadata = self._interceptor.pre_get(request, metadata) + pb_request = compute.GetSubnetworkRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.Subnetwork() + pb_resp = compute.Subnetwork.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get(resp) + return resp + + class _GetIamPolicy(SubnetworksRestStub): + def __hash__(self): + return hash("GetIamPolicy") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.GetIamPolicySubnetworkRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.Policy: + r"""Call the get iam policy method over HTTP. + + Args: + request (~.compute.GetIamPolicySubnetworkRequest): + The request object. A request message for + Subnetworks.GetIamPolicy. See the method + description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.Policy: + An Identity and Access Management (IAM) policy, which + specifies access controls for Google Cloud resources. A + ``Policy`` is a collection of ``bindings``. A + ``binding`` binds one or more ``members``, or + principals, to a single ``role``. Principals can be user + accounts, service accounts, Google groups, and domains + (such as G Suite). A ``role`` is a named list of + permissions; each ``role`` can be an IAM predefined role + or a user-created custom role. For some types of Google + Cloud resources, a ``binding`` can also specify a + ``condition``, which is a logical expression that allows + access to a resource only if the expression evaluates to + ``true``. A condition can add constraints based on + attributes of the request, the resource, or both. To + learn which resources support conditions in their IAM + policies, see the `IAM + documentation `__. + **JSON example:** { "bindings": [ { "role": + "roles/resourcemanager.organizationAdmin", "members": [ + "user:mike@example.com", "group:admins@example.com", + "domain:google.com", + "serviceAccount:my-project-id@appspot.gserviceaccount.com" + ] }, { "role": + "roles/resourcemanager.organizationViewer", "members": [ + "user:eve@example.com" ], "condition": { "title": + "expirable access", "description": "Does not grant + access after Sep 2020", "expression": "request.time < + timestamp('2020-10-01T00:00:00.000Z')", } } ], "etag": + "BwWWja0YfJA=", "version": 3 } **YAML example:** + bindings: - members: - user:mike@example.com - + group:admins@example.com - domain:google.com - + serviceAccount:my-project-id@appspot.gserviceaccount.com + role: roles/resourcemanager.organizationAdmin - members: + - user:eve@example.com role: + roles/resourcemanager.organizationViewer condition: + title: expirable access description: Does not grant + access after Sep 2020 expression: request.time < + timestamp('2020-10-01T00:00:00.000Z') etag: BwWWja0YfJA= + version: 3 For a description of IAM and its features, + see the `IAM + documentation `__. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/compute/v1/projects/{project}/regions/{region}/subnetworks/{resource}/getIamPolicy', + }, + ] + request, metadata = self._interceptor.pre_get_iam_policy(request, metadata) + pb_request = compute.GetIamPolicySubnetworkRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.Policy() + pb_resp = compute.Policy.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get_iam_policy(resp) + return resp + + class _Insert(SubnetworksRestStub): + def __hash__(self): + return hash("Insert") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.InsertSubnetworkRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.Operation: + r"""Call the insert method over HTTP. + + Args: + request (~.compute.InsertSubnetworkRequest): + The request object. A request message for + Subnetworks.Insert. See the method + description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.Operation: + Represents an Operation resource. Google Compute Engine + has three Operation resources: \* + `Global `__ + \* + `Regional `__ + \* + `Zonal `__ + You can use an operation resource to manage asynchronous + API requests. For more information, read Handling API + responses. Operations can be global, regional or zonal. + - For global operations, use the ``globalOperations`` + resource. - For regional operations, use the + ``regionOperations`` resource. - For zonal operations, + use the ``zonalOperations`` resource. For more + information, read Global, Regional, and Zonal Resources. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/compute/v1/projects/{project}/regions/{region}/subnetworks', + 'body': 'subnetwork_resource', + }, + ] + request, metadata = self._interceptor.pre_insert(request, metadata) + pb_request = compute.InsertSubnetworkRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + including_default_value_fields=False, + use_integers_for_enums=False + ) + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.Operation() + pb_resp = compute.Operation.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_insert(resp) + return resp + + class _List(SubnetworksRestStub): + def __hash__(self): + return hash("List") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.ListSubnetworksRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.SubnetworkList: + r"""Call the list method over HTTP. + + Args: + request (~.compute.ListSubnetworksRequest): + The request object. A request message for + Subnetworks.List. See the method + description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.SubnetworkList: + Contains a list of Subnetwork + resources. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/compute/v1/projects/{project}/regions/{region}/subnetworks', + }, + ] + request, metadata = self._interceptor.pre_list(request, metadata) + pb_request = compute.ListSubnetworksRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.SubnetworkList() + pb_resp = compute.SubnetworkList.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_list(resp) + return resp + + class _ListUsable(SubnetworksRestStub): + def __hash__(self): + return hash("ListUsable") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.ListUsableSubnetworksRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.UsableSubnetworksAggregatedList: + r"""Call the list usable method over HTTP. + + Args: + request (~.compute.ListUsableSubnetworksRequest): + The request object. A request message for + Subnetworks.ListUsable. See the method + description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.UsableSubnetworksAggregatedList: + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/compute/v1/projects/{project}/aggregated/subnetworks/listUsable', + }, + ] + request, metadata = self._interceptor.pre_list_usable(request, metadata) + pb_request = compute.ListUsableSubnetworksRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.UsableSubnetworksAggregatedList() + pb_resp = compute.UsableSubnetworksAggregatedList.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_list_usable(resp) + return resp + + class _Patch(SubnetworksRestStub): + def __hash__(self): + return hash("Patch") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.PatchSubnetworkRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.Operation: + r"""Call the patch method over HTTP. + + Args: + request (~.compute.PatchSubnetworkRequest): + The request object. A request message for + Subnetworks.Patch. See the method + description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.Operation: + Represents an Operation resource. Google Compute Engine + has three Operation resources: \* + `Global `__ + \* + `Regional `__ + \* + `Zonal `__ + You can use an operation resource to manage asynchronous + API requests. For more information, read Handling API + responses. Operations can be global, regional or zonal. + - For global operations, use the ``globalOperations`` + resource. - For regional operations, use the + ``regionOperations`` resource. - For zonal operations, + use the ``zonalOperations`` resource. For more + information, read Global, Regional, and Zonal Resources. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'patch', + 'uri': '/compute/v1/projects/{project}/regions/{region}/subnetworks/{subnetwork}', + 'body': 'subnetwork_resource', + }, + ] + request, metadata = self._interceptor.pre_patch(request, metadata) + pb_request = compute.PatchSubnetworkRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + including_default_value_fields=False, + use_integers_for_enums=False + ) + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.Operation() + pb_resp = compute.Operation.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_patch(resp) + return resp + + class _SetIamPolicy(SubnetworksRestStub): + def __hash__(self): + return hash("SetIamPolicy") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.SetIamPolicySubnetworkRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.Policy: + r"""Call the set iam policy method over HTTP. + + Args: + request (~.compute.SetIamPolicySubnetworkRequest): + The request object. A request message for + Subnetworks.SetIamPolicy. See the method + description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.Policy: + An Identity and Access Management (IAM) policy, which + specifies access controls for Google Cloud resources. A + ``Policy`` is a collection of ``bindings``. A + ``binding`` binds one or more ``members``, or + principals, to a single ``role``. Principals can be user + accounts, service accounts, Google groups, and domains + (such as G Suite). A ``role`` is a named list of + permissions; each ``role`` can be an IAM predefined role + or a user-created custom role. For some types of Google + Cloud resources, a ``binding`` can also specify a + ``condition``, which is a logical expression that allows + access to a resource only if the expression evaluates to + ``true``. A condition can add constraints based on + attributes of the request, the resource, or both. To + learn which resources support conditions in their IAM + policies, see the `IAM + documentation `__. + **JSON example:** { "bindings": [ { "role": + "roles/resourcemanager.organizationAdmin", "members": [ + "user:mike@example.com", "group:admins@example.com", + "domain:google.com", + "serviceAccount:my-project-id@appspot.gserviceaccount.com" + ] }, { "role": + "roles/resourcemanager.organizationViewer", "members": [ + "user:eve@example.com" ], "condition": { "title": + "expirable access", "description": "Does not grant + access after Sep 2020", "expression": "request.time < + timestamp('2020-10-01T00:00:00.000Z')", } } ], "etag": + "BwWWja0YfJA=", "version": 3 } **YAML example:** + bindings: - members: - user:mike@example.com - + group:admins@example.com - domain:google.com - + serviceAccount:my-project-id@appspot.gserviceaccount.com + role: roles/resourcemanager.organizationAdmin - members: + - user:eve@example.com role: + roles/resourcemanager.organizationViewer condition: + title: expirable access description: Does not grant + access after Sep 2020 expression: request.time < + timestamp('2020-10-01T00:00:00.000Z') etag: BwWWja0YfJA= + version: 3 For a description of IAM and its features, + see the `IAM + documentation `__. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/compute/v1/projects/{project}/regions/{region}/subnetworks/{resource}/setIamPolicy', + 'body': 'region_set_policy_request_resource', + }, + ] + request, metadata = self._interceptor.pre_set_iam_policy(request, metadata) + pb_request = compute.SetIamPolicySubnetworkRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + including_default_value_fields=False, + use_integers_for_enums=False + ) + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.Policy() + pb_resp = compute.Policy.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_set_iam_policy(resp) + return resp + + class _SetPrivateIpGoogleAccess(SubnetworksRestStub): + def __hash__(self): + return hash("SetPrivateIpGoogleAccess") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.SetPrivateIpGoogleAccessSubnetworkRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.Operation: + r"""Call the set private ip google + access method over HTTP. + + Args: + request (~.compute.SetPrivateIpGoogleAccessSubnetworkRequest): + The request object. A request message for + Subnetworks.SetPrivateIpGoogleAccess. + See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.Operation: + Represents an Operation resource. Google Compute Engine + has three Operation resources: \* + `Global `__ + \* + `Regional `__ + \* + `Zonal `__ + You can use an operation resource to manage asynchronous + API requests. For more information, read Handling API + responses. Operations can be global, regional or zonal. + - For global operations, use the ``globalOperations`` + resource. - For regional operations, use the + ``regionOperations`` resource. - For zonal operations, + use the ``zonalOperations`` resource. For more + information, read Global, Regional, and Zonal Resources. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/compute/v1/projects/{project}/regions/{region}/subnetworks/{subnetwork}/setPrivateIpGoogleAccess', + 'body': 'subnetworks_set_private_ip_google_access_request_resource', + }, + ] + request, metadata = self._interceptor.pre_set_private_ip_google_access(request, metadata) + pb_request = compute.SetPrivateIpGoogleAccessSubnetworkRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + including_default_value_fields=False, + use_integers_for_enums=False + ) + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.Operation() + pb_resp = compute.Operation.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_set_private_ip_google_access(resp) + return resp + + class _TestIamPermissions(SubnetworksRestStub): + def __hash__(self): + return hash("TestIamPermissions") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.TestIamPermissionsSubnetworkRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.TestPermissionsResponse: + r"""Call the test iam permissions method over HTTP. + + Args: + request (~.compute.TestIamPermissionsSubnetworkRequest): + The request object. A request message for + Subnetworks.TestIamPermissions. See the + method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.TestPermissionsResponse: + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/compute/v1/projects/{project}/regions/{region}/subnetworks/{resource}/testIamPermissions', + 'body': 'test_permissions_request_resource', + }, + ] + request, metadata = self._interceptor.pre_test_iam_permissions(request, metadata) + pb_request = compute.TestIamPermissionsSubnetworkRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + including_default_value_fields=False, + use_integers_for_enums=False + ) + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.TestPermissionsResponse() + pb_resp = compute.TestPermissionsResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_test_iam_permissions(resp) + return resp + + @property + def aggregated_list(self) -> Callable[ + [compute.AggregatedListSubnetworksRequest], + compute.SubnetworkAggregatedList]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._AggregatedList(self._session, self._host, self._interceptor) # type: ignore + + @property + def delete(self) -> Callable[ + [compute.DeleteSubnetworkRequest], + compute.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._Delete(self._session, self._host, self._interceptor) # type: ignore + + @property + def expand_ip_cidr_range(self) -> Callable[ + [compute.ExpandIpCidrRangeSubnetworkRequest], + compute.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ExpandIpCidrRange(self._session, self._host, self._interceptor) # type: ignore + + @property + def get(self) -> Callable[ + [compute.GetSubnetworkRequest], + compute.Subnetwork]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._Get(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_iam_policy(self) -> Callable[ + [compute.GetIamPolicySubnetworkRequest], + compute.Policy]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetIamPolicy(self._session, self._host, self._interceptor) # type: ignore + + @property + def insert(self) -> Callable[ + [compute.InsertSubnetworkRequest], + compute.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._Insert(self._session, self._host, self._interceptor) # type: ignore + + @property + def list(self) -> Callable[ + [compute.ListSubnetworksRequest], + compute.SubnetworkList]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._List(self._session, self._host, self._interceptor) # type: ignore + + @property + def list_usable(self) -> Callable[ + [compute.ListUsableSubnetworksRequest], + compute.UsableSubnetworksAggregatedList]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListUsable(self._session, self._host, self._interceptor) # type: ignore + + @property + def patch(self) -> Callable[ + [compute.PatchSubnetworkRequest], + compute.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._Patch(self._session, self._host, self._interceptor) # type: ignore + + @property + def set_iam_policy(self) -> Callable[ + [compute.SetIamPolicySubnetworkRequest], + compute.Policy]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._SetIamPolicy(self._session, self._host, self._interceptor) # type: ignore + + @property + def set_private_ip_google_access(self) -> Callable[ + [compute.SetPrivateIpGoogleAccessSubnetworkRequest], + compute.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._SetPrivateIpGoogleAccess(self._session, self._host, self._interceptor) # type: ignore + + @property + def test_iam_permissions(self) -> Callable[ + [compute.TestIamPermissionsSubnetworkRequest], + compute.TestPermissionsResponse]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._TestIamPermissions(self._session, self._host, self._interceptor) # type: ignore + + @property + def kind(self) -> str: + return "rest" + + def close(self): + self._session.close() + + +__all__=( + 'SubnetworksRestTransport', +) diff --git a/owl-bot-staging/v1/google/cloud/compute_v1/services/target_grpc_proxies/__init__.py b/owl-bot-staging/v1/google/cloud/compute_v1/services/target_grpc_proxies/__init__.py new file mode 100644 index 000000000..ac1f7a7e5 --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/compute_v1/services/target_grpc_proxies/__init__.py @@ -0,0 +1,20 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from .client import TargetGrpcProxiesClient + +__all__ = ( + 'TargetGrpcProxiesClient', +) diff --git a/owl-bot-staging/v1/google/cloud/compute_v1/services/target_grpc_proxies/client.py b/owl-bot-staging/v1/google/cloud/compute_v1/services/target_grpc_proxies/client.py new file mode 100644 index 000000000..e600b58fd --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/compute_v1/services/target_grpc_proxies/client.py @@ -0,0 +1,1387 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import functools +import os +import re +from typing import Dict, Mapping, MutableMapping, MutableSequence, Optional, Sequence, Tuple, Type, Union, cast + +from google.cloud.compute_v1 import gapic_version as package_version + +from google.api_core import client_options as client_options_lib +from google.api_core import exceptions as core_exceptions +from google.api_core import extended_operation +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport import mtls # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth.exceptions import MutualTLSChannelError # type: ignore +from google.oauth2 import service_account # type: ignore + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + +from google.api_core import extended_operation # type: ignore +from google.cloud.compute_v1.services.target_grpc_proxies import pagers +from google.cloud.compute_v1.types import compute +from .transports.base import TargetGrpcProxiesTransport, DEFAULT_CLIENT_INFO +from .transports.rest import TargetGrpcProxiesRestTransport + + +class TargetGrpcProxiesClientMeta(type): + """Metaclass for the TargetGrpcProxies client. + + This provides class-level methods for building and retrieving + support objects (e.g. transport) without polluting the client instance + objects. + """ + _transport_registry = OrderedDict() # type: Dict[str, Type[TargetGrpcProxiesTransport]] + _transport_registry["rest"] = TargetGrpcProxiesRestTransport + + def get_transport_class(cls, + label: Optional[str] = None, + ) -> Type[TargetGrpcProxiesTransport]: + """Returns an appropriate transport class. + + Args: + label: The name of the desired transport. If none is + provided, then the first transport in the registry is used. + + Returns: + The transport class to use. + """ + # If a specific transport is requested, return that one. + if label: + return cls._transport_registry[label] + + # No transport is requested; return the default (that is, the first one + # in the dictionary). + return next(iter(cls._transport_registry.values())) + + +class TargetGrpcProxiesClient(metaclass=TargetGrpcProxiesClientMeta): + """The TargetGrpcProxies API.""" + + @staticmethod + def _get_default_mtls_endpoint(api_endpoint): + """Converts api endpoint to mTLS endpoint. + + Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to + "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. + Args: + api_endpoint (Optional[str]): the api endpoint to convert. + Returns: + str: converted mTLS api endpoint. + """ + if not api_endpoint: + return api_endpoint + + mtls_endpoint_re = re.compile( + r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?" + ) + + m = mtls_endpoint_re.match(api_endpoint) + name, mtls, sandbox, googledomain = m.groups() + if mtls or not googledomain: + return api_endpoint + + if sandbox: + return api_endpoint.replace( + "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" + ) + + return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") + + DEFAULT_ENDPOINT = "compute.googleapis.com" + DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore + DEFAULT_ENDPOINT + ) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + TargetGrpcProxiesClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_info(info) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + TargetGrpcProxiesClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_file( + filename) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + from_service_account_json = from_service_account_file + + @property + def transport(self) -> TargetGrpcProxiesTransport: + """Returns the transport used by the client instance. + + Returns: + TargetGrpcProxiesTransport: The transport used by the client + instance. + """ + return self._transport + + @staticmethod + def common_billing_account_path(billing_account: str, ) -> str: + """Returns a fully-qualified billing_account string.""" + return "billingAccounts/{billing_account}".format(billing_account=billing_account, ) + + @staticmethod + def parse_common_billing_account_path(path: str) -> Dict[str,str]: + """Parse a billing_account path into its component segments.""" + m = re.match(r"^billingAccounts/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_folder_path(folder: str, ) -> str: + """Returns a fully-qualified folder string.""" + return "folders/{folder}".format(folder=folder, ) + + @staticmethod + def parse_common_folder_path(path: str) -> Dict[str,str]: + """Parse a folder path into its component segments.""" + m = re.match(r"^folders/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_organization_path(organization: str, ) -> str: + """Returns a fully-qualified organization string.""" + return "organizations/{organization}".format(organization=organization, ) + + @staticmethod + def parse_common_organization_path(path: str) -> Dict[str,str]: + """Parse a organization path into its component segments.""" + m = re.match(r"^organizations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_project_path(project: str, ) -> str: + """Returns a fully-qualified project string.""" + return "projects/{project}".format(project=project, ) + + @staticmethod + def parse_common_project_path(path: str) -> Dict[str,str]: + """Parse a project path into its component segments.""" + m = re.match(r"^projects/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_location_path(project: str, location: str, ) -> str: + """Returns a fully-qualified location string.""" + return "projects/{project}/locations/{location}".format(project=project, location=location, ) + + @staticmethod + def parse_common_location_path(path: str) -> Dict[str,str]: + """Parse a location path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @classmethod + def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[client_options_lib.ClientOptions] = None): + """Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + if client_options is None: + client_options = client_options_lib.ClientOptions() + use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") + use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") + if use_client_cert not in ("true", "false"): + raise ValueError("Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`") + if use_mtls_endpoint not in ("auto", "never", "always"): + raise MutualTLSChannelError("Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`") + + # Figure out the client cert source to use. + client_cert_source = None + if use_client_cert == "true": + if client_options.client_cert_source: + client_cert_source = client_options.client_cert_source + elif mtls.has_default_client_cert_source(): + client_cert_source = mtls.default_client_cert_source() + + # Figure out which api endpoint to use. + if client_options.api_endpoint is not None: + api_endpoint = client_options.api_endpoint + elif use_mtls_endpoint == "always" or (use_mtls_endpoint == "auto" and client_cert_source): + api_endpoint = cls.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = cls.DEFAULT_ENDPOINT + + return api_endpoint, client_cert_source + + def __init__(self, *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Optional[Union[str, TargetGrpcProxiesTransport]] = None, + client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the target grpc proxies client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Union[str, TargetGrpcProxiesTransport]): The + transport to use. If set to None, a transport is chosen + automatically. + NOTE: "rest" transport functionality is currently in a + beta state (preview). We welcome your feedback via an + issue in this library's source repository. + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): Custom options for the + client. It won't take effect if a ``transport`` instance is provided. + (1) The ``api_endpoint`` property can be used to override the + default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT + environment variable can also be used to override the endpoint: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto switch to the + default mTLS endpoint if client certificate is present, this is + the default value). However, the ``api_endpoint`` property takes + precedence if provided. + (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide client certificate for mutual TLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + """ + if isinstance(client_options, dict): + client_options = client_options_lib.from_dict(client_options) + if client_options is None: + client_options = client_options_lib.ClientOptions() + client_options = cast(client_options_lib.ClientOptions, client_options) + + api_endpoint, client_cert_source_func = self.get_mtls_endpoint_and_cert_source(client_options) + + api_key_value = getattr(client_options, "api_key", None) + if api_key_value and credentials: + raise ValueError("client_options.api_key and credentials are mutually exclusive") + + # Save or instantiate the transport. + # Ordinarily, we provide the transport, but allowing a custom transport + # instance provides an extensibility point for unusual situations. + if isinstance(transport, TargetGrpcProxiesTransport): + # transport is a TargetGrpcProxiesTransport instance. + if credentials or client_options.credentials_file or api_key_value: + raise ValueError("When providing a transport instance, " + "provide its credentials directly.") + if client_options.scopes: + raise ValueError( + "When providing a transport instance, provide its scopes " + "directly." + ) + self._transport = transport + else: + import google.auth._default # type: ignore + + if api_key_value and hasattr(google.auth._default, "get_api_key_credentials"): + credentials = google.auth._default.get_api_key_credentials(api_key_value) + + Transport = type(self).get_transport_class(transport) + self._transport = Transport( + credentials=credentials, + credentials_file=client_options.credentials_file, + host=api_endpoint, + scopes=client_options.scopes, + client_cert_source_for_mtls=client_cert_source_func, + quota_project_id=client_options.quota_project_id, + client_info=client_info, + always_use_jwt_access=True, + api_audience=client_options.api_audience, + ) + + def delete_unary(self, + request: Optional[Union[compute.DeleteTargetGrpcProxyRequest, dict]] = None, + *, + project: Optional[str] = None, + target_grpc_proxy: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.Operation: + r"""Deletes the specified TargetGrpcProxy in the given + scope + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_delete(): + # Create a client + client = compute_v1.TargetGrpcProxiesClient() + + # Initialize request argument(s) + request = compute_v1.DeleteTargetGrpcProxyRequest( + project="project_value", + target_grpc_proxy="target_grpc_proxy_value", + ) + + # Make the request + response = client.delete(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.DeleteTargetGrpcProxyRequest, dict]): + The request object. A request message for + TargetGrpcProxies.Delete. See the method + description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + target_grpc_proxy (str): + Name of the TargetGrpcProxy resource + to delete. + + This corresponds to the ``target_grpc_proxy`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, target_grpc_proxy]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.DeleteTargetGrpcProxyRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.DeleteTargetGrpcProxyRequest): + request = compute.DeleteTargetGrpcProxyRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if target_grpc_proxy is not None: + request.target_grpc_proxy = target_grpc_proxy + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("target_grpc_proxy", request.target_grpc_proxy), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def delete(self, + request: Optional[Union[compute.DeleteTargetGrpcProxyRequest, dict]] = None, + *, + project: Optional[str] = None, + target_grpc_proxy: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> extended_operation.ExtendedOperation: + r"""Deletes the specified TargetGrpcProxy in the given + scope + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_delete(): + # Create a client + client = compute_v1.TargetGrpcProxiesClient() + + # Initialize request argument(s) + request = compute_v1.DeleteTargetGrpcProxyRequest( + project="project_value", + target_grpc_proxy="target_grpc_proxy_value", + ) + + # Make the request + response = client.delete(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.DeleteTargetGrpcProxyRequest, dict]): + The request object. A request message for + TargetGrpcProxies.Delete. See the method + description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + target_grpc_proxy (str): + Name of the TargetGrpcProxy resource + to delete. + + This corresponds to the ``target_grpc_proxy`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, target_grpc_proxy]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.DeleteTargetGrpcProxyRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.DeleteTargetGrpcProxyRequest): + request = compute.DeleteTargetGrpcProxyRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if target_grpc_proxy is not None: + request.target_grpc_proxy = target_grpc_proxy + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("target_grpc_proxy", request.target_grpc_proxy), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + operation_service = self._transport._global_operations_client + operation_request = compute.GetGlobalOperationRequest() + operation_request.project = request.project + operation_request.operation = response.name + + get_operation = functools.partial(operation_service.get, operation_request) + # Cancel is not part of extended operations yet. + cancel_operation = lambda: None + + # Note: this class is an implementation detail to provide a uniform + # set of names for certain fields in the extended operation proto message. + # See google.api_core.extended_operation.ExtendedOperation for details + # on these properties and the expected interface. + class _CustomOperation(extended_operation.ExtendedOperation): + @property + def error_message(self): + return self._extended_operation.http_error_message + + @property + def error_code(self): + return self._extended_operation.http_error_status_code + + response = _CustomOperation.make(get_operation, cancel_operation, response) + + # Done; return the response. + return response + + def get(self, + request: Optional[Union[compute.GetTargetGrpcProxyRequest, dict]] = None, + *, + project: Optional[str] = None, + target_grpc_proxy: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.TargetGrpcProxy: + r"""Returns the specified TargetGrpcProxy resource in the + given scope. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_get(): + # Create a client + client = compute_v1.TargetGrpcProxiesClient() + + # Initialize request argument(s) + request = compute_v1.GetTargetGrpcProxyRequest( + project="project_value", + target_grpc_proxy="target_grpc_proxy_value", + ) + + # Make the request + response = client.get(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.GetTargetGrpcProxyRequest, dict]): + The request object. A request message for + TargetGrpcProxies.Get. See the method + description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + target_grpc_proxy (str): + Name of the TargetGrpcProxy resource + to return. + + This corresponds to the ``target_grpc_proxy`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.compute_v1.types.TargetGrpcProxy: + Represents a Target gRPC Proxy resource. A target gRPC + proxy is a component of load balancers intended for load + balancing gRPC traffic. Only global forwarding rules + with load balancing scheme INTERNAL_SELF_MANAGED can + reference a target gRPC proxy. The target gRPC Proxy + references a URL map that specifies how traffic is + routed to gRPC backend services. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, target_grpc_proxy]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.GetTargetGrpcProxyRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.GetTargetGrpcProxyRequest): + request = compute.GetTargetGrpcProxyRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if target_grpc_proxy is not None: + request.target_grpc_proxy = target_grpc_proxy + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("target_grpc_proxy", request.target_grpc_proxy), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def insert_unary(self, + request: Optional[Union[compute.InsertTargetGrpcProxyRequest, dict]] = None, + *, + project: Optional[str] = None, + target_grpc_proxy_resource: Optional[compute.TargetGrpcProxy] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.Operation: + r"""Creates a TargetGrpcProxy in the specified project in + the given scope using the parameters that are included + in the request. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_insert(): + # Create a client + client = compute_v1.TargetGrpcProxiesClient() + + # Initialize request argument(s) + request = compute_v1.InsertTargetGrpcProxyRequest( + project="project_value", + ) + + # Make the request + response = client.insert(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.InsertTargetGrpcProxyRequest, dict]): + The request object. A request message for + TargetGrpcProxies.Insert. See the method + description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + target_grpc_proxy_resource (google.cloud.compute_v1.types.TargetGrpcProxy): + The body resource for this request + This corresponds to the ``target_grpc_proxy_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, target_grpc_proxy_resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.InsertTargetGrpcProxyRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.InsertTargetGrpcProxyRequest): + request = compute.InsertTargetGrpcProxyRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if target_grpc_proxy_resource is not None: + request.target_grpc_proxy_resource = target_grpc_proxy_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.insert] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def insert(self, + request: Optional[Union[compute.InsertTargetGrpcProxyRequest, dict]] = None, + *, + project: Optional[str] = None, + target_grpc_proxy_resource: Optional[compute.TargetGrpcProxy] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> extended_operation.ExtendedOperation: + r"""Creates a TargetGrpcProxy in the specified project in + the given scope using the parameters that are included + in the request. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_insert(): + # Create a client + client = compute_v1.TargetGrpcProxiesClient() + + # Initialize request argument(s) + request = compute_v1.InsertTargetGrpcProxyRequest( + project="project_value", + ) + + # Make the request + response = client.insert(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.InsertTargetGrpcProxyRequest, dict]): + The request object. A request message for + TargetGrpcProxies.Insert. See the method + description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + target_grpc_proxy_resource (google.cloud.compute_v1.types.TargetGrpcProxy): + The body resource for this request + This corresponds to the ``target_grpc_proxy_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, target_grpc_proxy_resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.InsertTargetGrpcProxyRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.InsertTargetGrpcProxyRequest): + request = compute.InsertTargetGrpcProxyRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if target_grpc_proxy_resource is not None: + request.target_grpc_proxy_resource = target_grpc_proxy_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.insert] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + operation_service = self._transport._global_operations_client + operation_request = compute.GetGlobalOperationRequest() + operation_request.project = request.project + operation_request.operation = response.name + + get_operation = functools.partial(operation_service.get, operation_request) + # Cancel is not part of extended operations yet. + cancel_operation = lambda: None + + # Note: this class is an implementation detail to provide a uniform + # set of names for certain fields in the extended operation proto message. + # See google.api_core.extended_operation.ExtendedOperation for details + # on these properties and the expected interface. + class _CustomOperation(extended_operation.ExtendedOperation): + @property + def error_message(self): + return self._extended_operation.http_error_message + + @property + def error_code(self): + return self._extended_operation.http_error_status_code + + response = _CustomOperation.make(get_operation, cancel_operation, response) + + # Done; return the response. + return response + + def list(self, + request: Optional[Union[compute.ListTargetGrpcProxiesRequest, dict]] = None, + *, + project: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListPager: + r"""Lists the TargetGrpcProxies for a project in the + given scope. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_list(): + # Create a client + client = compute_v1.TargetGrpcProxiesClient() + + # Initialize request argument(s) + request = compute_v1.ListTargetGrpcProxiesRequest( + project="project_value", + ) + + # Make the request + page_result = client.list(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.ListTargetGrpcProxiesRequest, dict]): + The request object. A request message for + TargetGrpcProxies.List. See the method + description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.compute_v1.services.target_grpc_proxies.pagers.ListPager: + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.ListTargetGrpcProxiesRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.ListTargetGrpcProxiesRequest): + request = compute.ListTargetGrpcProxiesRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + def patch_unary(self, + request: Optional[Union[compute.PatchTargetGrpcProxyRequest, dict]] = None, + *, + project: Optional[str] = None, + target_grpc_proxy: Optional[str] = None, + target_grpc_proxy_resource: Optional[compute.TargetGrpcProxy] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.Operation: + r"""Patches the specified TargetGrpcProxy resource with + the data included in the request. This method supports + PATCH semantics and uses JSON merge patch format and + processing rules. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_patch(): + # Create a client + client = compute_v1.TargetGrpcProxiesClient() + + # Initialize request argument(s) + request = compute_v1.PatchTargetGrpcProxyRequest( + project="project_value", + target_grpc_proxy="target_grpc_proxy_value", + ) + + # Make the request + response = client.patch(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.PatchTargetGrpcProxyRequest, dict]): + The request object. A request message for + TargetGrpcProxies.Patch. See the method + description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + target_grpc_proxy (str): + Name of the TargetGrpcProxy resource + to patch. + + This corresponds to the ``target_grpc_proxy`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + target_grpc_proxy_resource (google.cloud.compute_v1.types.TargetGrpcProxy): + The body resource for this request + This corresponds to the ``target_grpc_proxy_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, target_grpc_proxy, target_grpc_proxy_resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.PatchTargetGrpcProxyRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.PatchTargetGrpcProxyRequest): + request = compute.PatchTargetGrpcProxyRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if target_grpc_proxy is not None: + request.target_grpc_proxy = target_grpc_proxy + if target_grpc_proxy_resource is not None: + request.target_grpc_proxy_resource = target_grpc_proxy_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.patch] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("target_grpc_proxy", request.target_grpc_proxy), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def patch(self, + request: Optional[Union[compute.PatchTargetGrpcProxyRequest, dict]] = None, + *, + project: Optional[str] = None, + target_grpc_proxy: Optional[str] = None, + target_grpc_proxy_resource: Optional[compute.TargetGrpcProxy] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> extended_operation.ExtendedOperation: + r"""Patches the specified TargetGrpcProxy resource with + the data included in the request. This method supports + PATCH semantics and uses JSON merge patch format and + processing rules. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_patch(): + # Create a client + client = compute_v1.TargetGrpcProxiesClient() + + # Initialize request argument(s) + request = compute_v1.PatchTargetGrpcProxyRequest( + project="project_value", + target_grpc_proxy="target_grpc_proxy_value", + ) + + # Make the request + response = client.patch(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.PatchTargetGrpcProxyRequest, dict]): + The request object. A request message for + TargetGrpcProxies.Patch. See the method + description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + target_grpc_proxy (str): + Name of the TargetGrpcProxy resource + to patch. + + This corresponds to the ``target_grpc_proxy`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + target_grpc_proxy_resource (google.cloud.compute_v1.types.TargetGrpcProxy): + The body resource for this request + This corresponds to the ``target_grpc_proxy_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, target_grpc_proxy, target_grpc_proxy_resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.PatchTargetGrpcProxyRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.PatchTargetGrpcProxyRequest): + request = compute.PatchTargetGrpcProxyRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if target_grpc_proxy is not None: + request.target_grpc_proxy = target_grpc_proxy + if target_grpc_proxy_resource is not None: + request.target_grpc_proxy_resource = target_grpc_proxy_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.patch] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("target_grpc_proxy", request.target_grpc_proxy), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + operation_service = self._transport._global_operations_client + operation_request = compute.GetGlobalOperationRequest() + operation_request.project = request.project + operation_request.operation = response.name + + get_operation = functools.partial(operation_service.get, operation_request) + # Cancel is not part of extended operations yet. + cancel_operation = lambda: None + + # Note: this class is an implementation detail to provide a uniform + # set of names for certain fields in the extended operation proto message. + # See google.api_core.extended_operation.ExtendedOperation for details + # on these properties and the expected interface. + class _CustomOperation(extended_operation.ExtendedOperation): + @property + def error_message(self): + return self._extended_operation.http_error_message + + @property + def error_code(self): + return self._extended_operation.http_error_status_code + + response = _CustomOperation.make(get_operation, cancel_operation, response) + + # Done; return the response. + return response + + def __enter__(self) -> "TargetGrpcProxiesClient": + return self + + def __exit__(self, type, value, traceback): + """Releases underlying transport's resources. + + .. warning:: + ONLY use as a context manager if the transport is NOT shared + with other clients! Exiting the with block will CLOSE the transport + and may cause errors in other clients! + """ + self.transport.close() + + + + + + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) + + +__all__ = ( + "TargetGrpcProxiesClient", +) diff --git a/owl-bot-staging/v1/google/cloud/compute_v1/services/target_grpc_proxies/pagers.py b/owl-bot-staging/v1/google/cloud/compute_v1/services/target_grpc_proxies/pagers.py new file mode 100644 index 000000000..6dbedf2c8 --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/compute_v1/services/target_grpc_proxies/pagers.py @@ -0,0 +1,77 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import Any, AsyncIterator, Awaitable, Callable, Sequence, Tuple, Optional, Iterator + +from google.cloud.compute_v1.types import compute + + +class ListPager: + """A pager for iterating through ``list`` requests. + + This class thinly wraps an initial + :class:`google.cloud.compute_v1.types.TargetGrpcProxyList` object, and + provides an ``__iter__`` method to iterate through its + ``items`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``List`` requests and continue to iterate + through the ``items`` field on the + corresponding responses. + + All the usual :class:`google.cloud.compute_v1.types.TargetGrpcProxyList` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., compute.TargetGrpcProxyList], + request: compute.ListTargetGrpcProxiesRequest, + response: compute.TargetGrpcProxyList, + *, + metadata: Sequence[Tuple[str, str]] = ()): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.compute_v1.types.ListTargetGrpcProxiesRequest): + The initial request object. + response (google.cloud.compute_v1.types.TargetGrpcProxyList): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = compute.ListTargetGrpcProxiesRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[compute.TargetGrpcProxyList]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[compute.TargetGrpcProxy]: + for page in self.pages: + yield from page.items + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) diff --git a/owl-bot-staging/v1/google/cloud/compute_v1/services/target_grpc_proxies/transports/__init__.py b/owl-bot-staging/v1/google/cloud/compute_v1/services/target_grpc_proxies/transports/__init__.py new file mode 100644 index 000000000..fbb6028a6 --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/compute_v1/services/target_grpc_proxies/transports/__init__.py @@ -0,0 +1,32 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +from typing import Dict, Type + +from .base import TargetGrpcProxiesTransport +from .rest import TargetGrpcProxiesRestTransport +from .rest import TargetGrpcProxiesRestInterceptor + + +# Compile a registry of transports. +_transport_registry = OrderedDict() # type: Dict[str, Type[TargetGrpcProxiesTransport]] +_transport_registry['rest'] = TargetGrpcProxiesRestTransport + +__all__ = ( + 'TargetGrpcProxiesTransport', + 'TargetGrpcProxiesRestTransport', + 'TargetGrpcProxiesRestInterceptor', +) diff --git a/owl-bot-staging/v1/google/cloud/compute_v1/services/target_grpc_proxies/transports/base.py b/owl-bot-staging/v1/google/cloud/compute_v1/services/target_grpc_proxies/transports/base.py new file mode 100644 index 000000000..cbd6ac524 --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/compute_v1/services/target_grpc_proxies/transports/base.py @@ -0,0 +1,219 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import abc +from typing import Awaitable, Callable, Dict, Optional, Sequence, Union + +from google.cloud.compute_v1 import gapic_version as package_version + +import google.auth # type: ignore +import google.api_core +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.compute_v1.types import compute +from google.cloud.compute_v1.services import global_operations + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) + + +class TargetGrpcProxiesTransport(abc.ABC): + """Abstract transport class for TargetGrpcProxies.""" + + AUTH_SCOPES = ( + 'https://www.googleapis.com/auth/compute', + 'https://www.googleapis.com/auth/cloud-platform', + ) + + DEFAULT_HOST: str = 'compute.googleapis.com' + def __init__( + self, *, + host: str = DEFAULT_HOST, + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + **kwargs, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A list of scopes. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + """ + self._extended_operations_services: Dict[str, Any] = {} + + scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} + + # Save the scopes. + self._scopes = scopes + + # If no credentials are provided, then determine the appropriate + # defaults. + if credentials and credentials_file: + raise core_exceptions.DuplicateCredentialArgs("'credentials_file' and 'credentials' are mutually exclusive") + + if credentials_file is not None: + credentials, _ = google.auth.load_credentials_from_file( + credentials_file, + **scopes_kwargs, + quota_project_id=quota_project_id + ) + elif credentials is None: + credentials, _ = google.auth.default(**scopes_kwargs, quota_project_id=quota_project_id) + # Don't apply audience if the credentials file passed from user. + if hasattr(credentials, "with_gdch_audience"): + credentials = credentials.with_gdch_audience(api_audience if api_audience else host) + + # If the credentials are service account credentials, then always try to use self signed JWT. + if always_use_jwt_access and isinstance(credentials, service_account.Credentials) and hasattr(service_account.Credentials, "with_always_use_jwt_access"): + credentials = credentials.with_always_use_jwt_access(True) + + # Save the credentials. + self._credentials = credentials + + # Save the hostname. Default to port 443 (HTTPS) if none is specified. + if ':' not in host: + host += ':443' + self._host = host + + def _prep_wrapped_messages(self, client_info): + # Precompute the wrapped methods. + self._wrapped_methods = { + self.delete: gapic_v1.method.wrap_method( + self.delete, + default_timeout=None, + client_info=client_info, + ), + self.get: gapic_v1.method.wrap_method( + self.get, + default_timeout=None, + client_info=client_info, + ), + self.insert: gapic_v1.method.wrap_method( + self.insert, + default_timeout=None, + client_info=client_info, + ), + self.list: gapic_v1.method.wrap_method( + self.list, + default_timeout=None, + client_info=client_info, + ), + self.patch: gapic_v1.method.wrap_method( + self.patch, + default_timeout=None, + client_info=client_info, + ), + } + + def close(self): + """Closes resources associated with the transport. + + .. warning:: + Only call this method if the transport is NOT shared + with other clients - this may cause errors in other clients! + """ + raise NotImplementedError() + + @property + def delete(self) -> Callable[ + [compute.DeleteTargetGrpcProxyRequest], + Union[ + compute.Operation, + Awaitable[compute.Operation] + ]]: + raise NotImplementedError() + + @property + def get(self) -> Callable[ + [compute.GetTargetGrpcProxyRequest], + Union[ + compute.TargetGrpcProxy, + Awaitable[compute.TargetGrpcProxy] + ]]: + raise NotImplementedError() + + @property + def insert(self) -> Callable[ + [compute.InsertTargetGrpcProxyRequest], + Union[ + compute.Operation, + Awaitable[compute.Operation] + ]]: + raise NotImplementedError() + + @property + def list(self) -> Callable[ + [compute.ListTargetGrpcProxiesRequest], + Union[ + compute.TargetGrpcProxyList, + Awaitable[compute.TargetGrpcProxyList] + ]]: + raise NotImplementedError() + + @property + def patch(self) -> Callable[ + [compute.PatchTargetGrpcProxyRequest], + Union[ + compute.Operation, + Awaitable[compute.Operation] + ]]: + raise NotImplementedError() + + @property + def kind(self) -> str: + raise NotImplementedError() + + @property + def _global_operations_client(self) -> global_operations.GlobalOperationsClient: + ex_op_service = self._extended_operations_services.get("global_operations") + if not ex_op_service: + ex_op_service = global_operations.GlobalOperationsClient( + credentials=self._credentials, + transport=self.kind, + ) + self._extended_operations_services["global_operations"] = ex_op_service + + return ex_op_service + + +__all__ = ( + 'TargetGrpcProxiesTransport', +) diff --git a/owl-bot-staging/v1/google/cloud/compute_v1/services/target_grpc_proxies/transports/rest.py b/owl-bot-staging/v1/google/cloud/compute_v1/services/target_grpc_proxies/transports/rest.py new file mode 100644 index 000000000..9a025eb2e --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/compute_v1/services/target_grpc_proxies/transports/rest.py @@ -0,0 +1,805 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +from google.auth.transport.requests import AuthorizedSession # type: ignore +import json # type: ignore +import grpc # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.api_core import exceptions as core_exceptions +from google.api_core import retry as retries +from google.api_core import rest_helpers +from google.api_core import rest_streaming +from google.api_core import path_template +from google.api_core import gapic_v1 + +from google.protobuf import json_format +from requests import __version__ as requests_version +import dataclasses +import re +from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union +import warnings + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + + +from google.cloud.compute_v1.types import compute + +from .base import TargetGrpcProxiesTransport, DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, + grpc_version=None, + rest_version=requests_version, +) + + +class TargetGrpcProxiesRestInterceptor: + """Interceptor for TargetGrpcProxies. + + Interceptors are used to manipulate requests, request metadata, and responses + in arbitrary ways. + Example use cases include: + * Logging + * Verifying requests according to service or custom semantics + * Stripping extraneous information from responses + + These use cases and more can be enabled by injecting an + instance of a custom subclass when constructing the TargetGrpcProxiesRestTransport. + + .. code-block:: python + class MyCustomTargetGrpcProxiesInterceptor(TargetGrpcProxiesRestInterceptor): + def pre_delete(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_delete(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_get(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_insert(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_insert(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_patch(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_patch(self, response): + logging.log(f"Received response: {response}") + return response + + transport = TargetGrpcProxiesRestTransport(interceptor=MyCustomTargetGrpcProxiesInterceptor()) + client = TargetGrpcProxiesClient(transport=transport) + + + """ + def pre_delete(self, request: compute.DeleteTargetGrpcProxyRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.DeleteTargetGrpcProxyRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for delete + + Override in a subclass to manipulate the request or metadata + before they are sent to the TargetGrpcProxies server. + """ + return request, metadata + + def post_delete(self, response: compute.Operation) -> compute.Operation: + """Post-rpc interceptor for delete + + Override in a subclass to manipulate the response + after it is returned by the TargetGrpcProxies server but before + it is returned to user code. + """ + return response + def pre_get(self, request: compute.GetTargetGrpcProxyRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.GetTargetGrpcProxyRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get + + Override in a subclass to manipulate the request or metadata + before they are sent to the TargetGrpcProxies server. + """ + return request, metadata + + def post_get(self, response: compute.TargetGrpcProxy) -> compute.TargetGrpcProxy: + """Post-rpc interceptor for get + + Override in a subclass to manipulate the response + after it is returned by the TargetGrpcProxies server but before + it is returned to user code. + """ + return response + def pre_insert(self, request: compute.InsertTargetGrpcProxyRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.InsertTargetGrpcProxyRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for insert + + Override in a subclass to manipulate the request or metadata + before they are sent to the TargetGrpcProxies server. + """ + return request, metadata + + def post_insert(self, response: compute.Operation) -> compute.Operation: + """Post-rpc interceptor for insert + + Override in a subclass to manipulate the response + after it is returned by the TargetGrpcProxies server but before + it is returned to user code. + """ + return response + def pre_list(self, request: compute.ListTargetGrpcProxiesRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.ListTargetGrpcProxiesRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list + + Override in a subclass to manipulate the request or metadata + before they are sent to the TargetGrpcProxies server. + """ + return request, metadata + + def post_list(self, response: compute.TargetGrpcProxyList) -> compute.TargetGrpcProxyList: + """Post-rpc interceptor for list + + Override in a subclass to manipulate the response + after it is returned by the TargetGrpcProxies server but before + it is returned to user code. + """ + return response + def pre_patch(self, request: compute.PatchTargetGrpcProxyRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.PatchTargetGrpcProxyRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for patch + + Override in a subclass to manipulate the request or metadata + before they are sent to the TargetGrpcProxies server. + """ + return request, metadata + + def post_patch(self, response: compute.Operation) -> compute.Operation: + """Post-rpc interceptor for patch + + Override in a subclass to manipulate the response + after it is returned by the TargetGrpcProxies server but before + it is returned to user code. + """ + return response + + +@dataclasses.dataclass +class TargetGrpcProxiesRestStub: + _session: AuthorizedSession + _host: str + _interceptor: TargetGrpcProxiesRestInterceptor + + +class TargetGrpcProxiesRestTransport(TargetGrpcProxiesTransport): + """REST backend transport for TargetGrpcProxies. + + The TargetGrpcProxies API. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends JSON representations of protocol buffers over HTTP/1.1 + + NOTE: This REST transport functionality is currently in a beta + state (preview). We welcome your feedback via an issue in this + library's source repository. Thank you! + """ + + def __init__(self, *, + host: str = 'compute.googleapis.com', + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + client_cert_source_for_mtls: Optional[Callable[[ + ], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + url_scheme: str = 'https', + interceptor: Optional[TargetGrpcProxiesRestInterceptor] = None, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + NOTE: This REST transport functionality is currently in a beta + state (preview). We welcome your feedback via a GitHub issue in + this library's repository. Thank you! + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + client_cert_source_for_mtls (Callable[[], Tuple[bytes, bytes]]): Client + certificate to configure mutual TLS HTTP channel. It is ignored + if ``channel`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you are developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + url_scheme: the protocol scheme for the API endpoint. Normally + "https", but for testing or local servers, + "http" can be specified. + """ + # Run the base constructor + # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. + # TODO: When custom host (api_endpoint) is set, `scopes` must *also* be set on the + # credentials object + maybe_url_match = re.match("^(?Phttp(?:s)?://)?(?P.*)$", host) + if maybe_url_match is None: + raise ValueError(f"Unexpected hostname structure: {host}") # pragma: NO COVER + + url_match_items = maybe_url_match.groupdict() + + host = f"{url_scheme}://{host}" if not url_match_items["scheme"] else host + + super().__init__( + host=host, + credentials=credentials, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience + ) + self._session = AuthorizedSession( + self._credentials, default_host=self.DEFAULT_HOST) + if client_cert_source_for_mtls: + self._session.configure_mtls_channel(client_cert_source_for_mtls) + self._interceptor = interceptor or TargetGrpcProxiesRestInterceptor() + self._prep_wrapped_messages(client_info) + + class _Delete(TargetGrpcProxiesRestStub): + def __hash__(self): + return hash("Delete") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.DeleteTargetGrpcProxyRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.Operation: + r"""Call the delete method over HTTP. + + Args: + request (~.compute.DeleteTargetGrpcProxyRequest): + The request object. A request message for + TargetGrpcProxies.Delete. See the method + description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.Operation: + Represents an Operation resource. Google Compute Engine + has three Operation resources: \* + `Global `__ + \* + `Regional `__ + \* + `Zonal `__ + You can use an operation resource to manage asynchronous + API requests. For more information, read Handling API + responses. Operations can be global, regional or zonal. + - For global operations, use the ``globalOperations`` + resource. - For regional operations, use the + ``regionOperations`` resource. - For zonal operations, + use the ``zonalOperations`` resource. For more + information, read Global, Regional, and Zonal Resources. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'delete', + 'uri': '/compute/v1/projects/{project}/global/targetGrpcProxies/{target_grpc_proxy}', + }, + ] + request, metadata = self._interceptor.pre_delete(request, metadata) + pb_request = compute.DeleteTargetGrpcProxyRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.Operation() + pb_resp = compute.Operation.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_delete(resp) + return resp + + class _Get(TargetGrpcProxiesRestStub): + def __hash__(self): + return hash("Get") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.GetTargetGrpcProxyRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.TargetGrpcProxy: + r"""Call the get method over HTTP. + + Args: + request (~.compute.GetTargetGrpcProxyRequest): + The request object. A request message for + TargetGrpcProxies.Get. See the method + description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.TargetGrpcProxy: + Represents a Target gRPC Proxy resource. A target gRPC + proxy is a component of load balancers intended for load + balancing gRPC traffic. Only global forwarding rules + with load balancing scheme INTERNAL_SELF_MANAGED can + reference a target gRPC proxy. The target gRPC Proxy + references a URL map that specifies how traffic is + routed to gRPC backend services. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/compute/v1/projects/{project}/global/targetGrpcProxies/{target_grpc_proxy}', + }, + ] + request, metadata = self._interceptor.pre_get(request, metadata) + pb_request = compute.GetTargetGrpcProxyRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.TargetGrpcProxy() + pb_resp = compute.TargetGrpcProxy.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get(resp) + return resp + + class _Insert(TargetGrpcProxiesRestStub): + def __hash__(self): + return hash("Insert") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.InsertTargetGrpcProxyRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.Operation: + r"""Call the insert method over HTTP. + + Args: + request (~.compute.InsertTargetGrpcProxyRequest): + The request object. A request message for + TargetGrpcProxies.Insert. See the method + description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.Operation: + Represents an Operation resource. Google Compute Engine + has three Operation resources: \* + `Global `__ + \* + `Regional `__ + \* + `Zonal `__ + You can use an operation resource to manage asynchronous + API requests. For more information, read Handling API + responses. Operations can be global, regional or zonal. + - For global operations, use the ``globalOperations`` + resource. - For regional operations, use the + ``regionOperations`` resource. - For zonal operations, + use the ``zonalOperations`` resource. For more + information, read Global, Regional, and Zonal Resources. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/compute/v1/projects/{project}/global/targetGrpcProxies', + 'body': 'target_grpc_proxy_resource', + }, + ] + request, metadata = self._interceptor.pre_insert(request, metadata) + pb_request = compute.InsertTargetGrpcProxyRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + including_default_value_fields=False, + use_integers_for_enums=False + ) + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.Operation() + pb_resp = compute.Operation.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_insert(resp) + return resp + + class _List(TargetGrpcProxiesRestStub): + def __hash__(self): + return hash("List") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.ListTargetGrpcProxiesRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.TargetGrpcProxyList: + r"""Call the list method over HTTP. + + Args: + request (~.compute.ListTargetGrpcProxiesRequest): + The request object. A request message for + TargetGrpcProxies.List. See the method + description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.TargetGrpcProxyList: + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/compute/v1/projects/{project}/global/targetGrpcProxies', + }, + ] + request, metadata = self._interceptor.pre_list(request, metadata) + pb_request = compute.ListTargetGrpcProxiesRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.TargetGrpcProxyList() + pb_resp = compute.TargetGrpcProxyList.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_list(resp) + return resp + + class _Patch(TargetGrpcProxiesRestStub): + def __hash__(self): + return hash("Patch") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.PatchTargetGrpcProxyRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.Operation: + r"""Call the patch method over HTTP. + + Args: + request (~.compute.PatchTargetGrpcProxyRequest): + The request object. A request message for + TargetGrpcProxies.Patch. See the method + description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.Operation: + Represents an Operation resource. Google Compute Engine + has three Operation resources: \* + `Global `__ + \* + `Regional `__ + \* + `Zonal `__ + You can use an operation resource to manage asynchronous + API requests. For more information, read Handling API + responses. Operations can be global, regional or zonal. + - For global operations, use the ``globalOperations`` + resource. - For regional operations, use the + ``regionOperations`` resource. - For zonal operations, + use the ``zonalOperations`` resource. For more + information, read Global, Regional, and Zonal Resources. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'patch', + 'uri': '/compute/v1/projects/{project}/global/targetGrpcProxies/{target_grpc_proxy}', + 'body': 'target_grpc_proxy_resource', + }, + ] + request, metadata = self._interceptor.pre_patch(request, metadata) + pb_request = compute.PatchTargetGrpcProxyRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + including_default_value_fields=False, + use_integers_for_enums=False + ) + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.Operation() + pb_resp = compute.Operation.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_patch(resp) + return resp + + @property + def delete(self) -> Callable[ + [compute.DeleteTargetGrpcProxyRequest], + compute.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._Delete(self._session, self._host, self._interceptor) # type: ignore + + @property + def get(self) -> Callable[ + [compute.GetTargetGrpcProxyRequest], + compute.TargetGrpcProxy]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._Get(self._session, self._host, self._interceptor) # type: ignore + + @property + def insert(self) -> Callable[ + [compute.InsertTargetGrpcProxyRequest], + compute.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._Insert(self._session, self._host, self._interceptor) # type: ignore + + @property + def list(self) -> Callable[ + [compute.ListTargetGrpcProxiesRequest], + compute.TargetGrpcProxyList]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._List(self._session, self._host, self._interceptor) # type: ignore + + @property + def patch(self) -> Callable[ + [compute.PatchTargetGrpcProxyRequest], + compute.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._Patch(self._session, self._host, self._interceptor) # type: ignore + + @property + def kind(self) -> str: + return "rest" + + def close(self): + self._session.close() + + +__all__=( + 'TargetGrpcProxiesRestTransport', +) diff --git a/owl-bot-staging/v1/google/cloud/compute_v1/services/target_http_proxies/__init__.py b/owl-bot-staging/v1/google/cloud/compute_v1/services/target_http_proxies/__init__.py new file mode 100644 index 000000000..9bc1cbc01 --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/compute_v1/services/target_http_proxies/__init__.py @@ -0,0 +1,20 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from .client import TargetHttpProxiesClient + +__all__ = ( + 'TargetHttpProxiesClient', +) diff --git a/owl-bot-staging/v1/google/cloud/compute_v1/services/target_http_proxies/client.py b/owl-bot-staging/v1/google/cloud/compute_v1/services/target_http_proxies/client.py new file mode 100644 index 000000000..0540b69bd --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/compute_v1/services/target_http_proxies/client.py @@ -0,0 +1,1768 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import functools +import os +import re +from typing import Dict, Mapping, MutableMapping, MutableSequence, Optional, Sequence, Tuple, Type, Union, cast + +from google.cloud.compute_v1 import gapic_version as package_version + +from google.api_core import client_options as client_options_lib +from google.api_core import exceptions as core_exceptions +from google.api_core import extended_operation +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport import mtls # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth.exceptions import MutualTLSChannelError # type: ignore +from google.oauth2 import service_account # type: ignore + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + +from google.api_core import extended_operation # type: ignore +from google.cloud.compute_v1.services.target_http_proxies import pagers +from google.cloud.compute_v1.types import compute +from .transports.base import TargetHttpProxiesTransport, DEFAULT_CLIENT_INFO +from .transports.rest import TargetHttpProxiesRestTransport + + +class TargetHttpProxiesClientMeta(type): + """Metaclass for the TargetHttpProxies client. + + This provides class-level methods for building and retrieving + support objects (e.g. transport) without polluting the client instance + objects. + """ + _transport_registry = OrderedDict() # type: Dict[str, Type[TargetHttpProxiesTransport]] + _transport_registry["rest"] = TargetHttpProxiesRestTransport + + def get_transport_class(cls, + label: Optional[str] = None, + ) -> Type[TargetHttpProxiesTransport]: + """Returns an appropriate transport class. + + Args: + label: The name of the desired transport. If none is + provided, then the first transport in the registry is used. + + Returns: + The transport class to use. + """ + # If a specific transport is requested, return that one. + if label: + return cls._transport_registry[label] + + # No transport is requested; return the default (that is, the first one + # in the dictionary). + return next(iter(cls._transport_registry.values())) + + +class TargetHttpProxiesClient(metaclass=TargetHttpProxiesClientMeta): + """The TargetHttpProxies API.""" + + @staticmethod + def _get_default_mtls_endpoint(api_endpoint): + """Converts api endpoint to mTLS endpoint. + + Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to + "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. + Args: + api_endpoint (Optional[str]): the api endpoint to convert. + Returns: + str: converted mTLS api endpoint. + """ + if not api_endpoint: + return api_endpoint + + mtls_endpoint_re = re.compile( + r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?" + ) + + m = mtls_endpoint_re.match(api_endpoint) + name, mtls, sandbox, googledomain = m.groups() + if mtls or not googledomain: + return api_endpoint + + if sandbox: + return api_endpoint.replace( + "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" + ) + + return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") + + DEFAULT_ENDPOINT = "compute.googleapis.com" + DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore + DEFAULT_ENDPOINT + ) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + TargetHttpProxiesClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_info(info) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + TargetHttpProxiesClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_file( + filename) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + from_service_account_json = from_service_account_file + + @property + def transport(self) -> TargetHttpProxiesTransport: + """Returns the transport used by the client instance. + + Returns: + TargetHttpProxiesTransport: The transport used by the client + instance. + """ + return self._transport + + @staticmethod + def common_billing_account_path(billing_account: str, ) -> str: + """Returns a fully-qualified billing_account string.""" + return "billingAccounts/{billing_account}".format(billing_account=billing_account, ) + + @staticmethod + def parse_common_billing_account_path(path: str) -> Dict[str,str]: + """Parse a billing_account path into its component segments.""" + m = re.match(r"^billingAccounts/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_folder_path(folder: str, ) -> str: + """Returns a fully-qualified folder string.""" + return "folders/{folder}".format(folder=folder, ) + + @staticmethod + def parse_common_folder_path(path: str) -> Dict[str,str]: + """Parse a folder path into its component segments.""" + m = re.match(r"^folders/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_organization_path(organization: str, ) -> str: + """Returns a fully-qualified organization string.""" + return "organizations/{organization}".format(organization=organization, ) + + @staticmethod + def parse_common_organization_path(path: str) -> Dict[str,str]: + """Parse a organization path into its component segments.""" + m = re.match(r"^organizations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_project_path(project: str, ) -> str: + """Returns a fully-qualified project string.""" + return "projects/{project}".format(project=project, ) + + @staticmethod + def parse_common_project_path(path: str) -> Dict[str,str]: + """Parse a project path into its component segments.""" + m = re.match(r"^projects/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_location_path(project: str, location: str, ) -> str: + """Returns a fully-qualified location string.""" + return "projects/{project}/locations/{location}".format(project=project, location=location, ) + + @staticmethod + def parse_common_location_path(path: str) -> Dict[str,str]: + """Parse a location path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @classmethod + def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[client_options_lib.ClientOptions] = None): + """Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + if client_options is None: + client_options = client_options_lib.ClientOptions() + use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") + use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") + if use_client_cert not in ("true", "false"): + raise ValueError("Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`") + if use_mtls_endpoint not in ("auto", "never", "always"): + raise MutualTLSChannelError("Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`") + + # Figure out the client cert source to use. + client_cert_source = None + if use_client_cert == "true": + if client_options.client_cert_source: + client_cert_source = client_options.client_cert_source + elif mtls.has_default_client_cert_source(): + client_cert_source = mtls.default_client_cert_source() + + # Figure out which api endpoint to use. + if client_options.api_endpoint is not None: + api_endpoint = client_options.api_endpoint + elif use_mtls_endpoint == "always" or (use_mtls_endpoint == "auto" and client_cert_source): + api_endpoint = cls.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = cls.DEFAULT_ENDPOINT + + return api_endpoint, client_cert_source + + def __init__(self, *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Optional[Union[str, TargetHttpProxiesTransport]] = None, + client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the target http proxies client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Union[str, TargetHttpProxiesTransport]): The + transport to use. If set to None, a transport is chosen + automatically. + NOTE: "rest" transport functionality is currently in a + beta state (preview). We welcome your feedback via an + issue in this library's source repository. + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): Custom options for the + client. It won't take effect if a ``transport`` instance is provided. + (1) The ``api_endpoint`` property can be used to override the + default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT + environment variable can also be used to override the endpoint: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto switch to the + default mTLS endpoint if client certificate is present, this is + the default value). However, the ``api_endpoint`` property takes + precedence if provided. + (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide client certificate for mutual TLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + """ + if isinstance(client_options, dict): + client_options = client_options_lib.from_dict(client_options) + if client_options is None: + client_options = client_options_lib.ClientOptions() + client_options = cast(client_options_lib.ClientOptions, client_options) + + api_endpoint, client_cert_source_func = self.get_mtls_endpoint_and_cert_source(client_options) + + api_key_value = getattr(client_options, "api_key", None) + if api_key_value and credentials: + raise ValueError("client_options.api_key and credentials are mutually exclusive") + + # Save or instantiate the transport. + # Ordinarily, we provide the transport, but allowing a custom transport + # instance provides an extensibility point for unusual situations. + if isinstance(transport, TargetHttpProxiesTransport): + # transport is a TargetHttpProxiesTransport instance. + if credentials or client_options.credentials_file or api_key_value: + raise ValueError("When providing a transport instance, " + "provide its credentials directly.") + if client_options.scopes: + raise ValueError( + "When providing a transport instance, provide its scopes " + "directly." + ) + self._transport = transport + else: + import google.auth._default # type: ignore + + if api_key_value and hasattr(google.auth._default, "get_api_key_credentials"): + credentials = google.auth._default.get_api_key_credentials(api_key_value) + + Transport = type(self).get_transport_class(transport) + self._transport = Transport( + credentials=credentials, + credentials_file=client_options.credentials_file, + host=api_endpoint, + scopes=client_options.scopes, + client_cert_source_for_mtls=client_cert_source_func, + quota_project_id=client_options.quota_project_id, + client_info=client_info, + always_use_jwt_access=True, + api_audience=client_options.api_audience, + ) + + def aggregated_list(self, + request: Optional[Union[compute.AggregatedListTargetHttpProxiesRequest, dict]] = None, + *, + project: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.AggregatedListPager: + r"""Retrieves the list of all TargetHttpProxy resources, + regional and global, available to the specified project. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_aggregated_list(): + # Create a client + client = compute_v1.TargetHttpProxiesClient() + + # Initialize request argument(s) + request = compute_v1.AggregatedListTargetHttpProxiesRequest( + project="project_value", + ) + + # Make the request + page_result = client.aggregated_list(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.AggregatedListTargetHttpProxiesRequest, dict]): + The request object. A request message for + TargetHttpProxies.AggregatedList. See + the method description for details. + project (str): + Name of the project scoping this + request. + + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.compute_v1.services.target_http_proxies.pagers.AggregatedListPager: + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.AggregatedListTargetHttpProxiesRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.AggregatedListTargetHttpProxiesRequest): + request = compute.AggregatedListTargetHttpProxiesRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.aggregated_list] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.AggregatedListPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + def delete_unary(self, + request: Optional[Union[compute.DeleteTargetHttpProxyRequest, dict]] = None, + *, + project: Optional[str] = None, + target_http_proxy: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.Operation: + r"""Deletes the specified TargetHttpProxy resource. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_delete(): + # Create a client + client = compute_v1.TargetHttpProxiesClient() + + # Initialize request argument(s) + request = compute_v1.DeleteTargetHttpProxyRequest( + project="project_value", + target_http_proxy="target_http_proxy_value", + ) + + # Make the request + response = client.delete(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.DeleteTargetHttpProxyRequest, dict]): + The request object. A request message for + TargetHttpProxies.Delete. See the method + description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + target_http_proxy (str): + Name of the TargetHttpProxy resource + to delete. + + This corresponds to the ``target_http_proxy`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, target_http_proxy]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.DeleteTargetHttpProxyRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.DeleteTargetHttpProxyRequest): + request = compute.DeleteTargetHttpProxyRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if target_http_proxy is not None: + request.target_http_proxy = target_http_proxy + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("target_http_proxy", request.target_http_proxy), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def delete(self, + request: Optional[Union[compute.DeleteTargetHttpProxyRequest, dict]] = None, + *, + project: Optional[str] = None, + target_http_proxy: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> extended_operation.ExtendedOperation: + r"""Deletes the specified TargetHttpProxy resource. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_delete(): + # Create a client + client = compute_v1.TargetHttpProxiesClient() + + # Initialize request argument(s) + request = compute_v1.DeleteTargetHttpProxyRequest( + project="project_value", + target_http_proxy="target_http_proxy_value", + ) + + # Make the request + response = client.delete(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.DeleteTargetHttpProxyRequest, dict]): + The request object. A request message for + TargetHttpProxies.Delete. See the method + description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + target_http_proxy (str): + Name of the TargetHttpProxy resource + to delete. + + This corresponds to the ``target_http_proxy`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, target_http_proxy]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.DeleteTargetHttpProxyRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.DeleteTargetHttpProxyRequest): + request = compute.DeleteTargetHttpProxyRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if target_http_proxy is not None: + request.target_http_proxy = target_http_proxy + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("target_http_proxy", request.target_http_proxy), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + operation_service = self._transport._global_operations_client + operation_request = compute.GetGlobalOperationRequest() + operation_request.project = request.project + operation_request.operation = response.name + + get_operation = functools.partial(operation_service.get, operation_request) + # Cancel is not part of extended operations yet. + cancel_operation = lambda: None + + # Note: this class is an implementation detail to provide a uniform + # set of names for certain fields in the extended operation proto message. + # See google.api_core.extended_operation.ExtendedOperation for details + # on these properties and the expected interface. + class _CustomOperation(extended_operation.ExtendedOperation): + @property + def error_message(self): + return self._extended_operation.http_error_message + + @property + def error_code(self): + return self._extended_operation.http_error_status_code + + response = _CustomOperation.make(get_operation, cancel_operation, response) + + # Done; return the response. + return response + + def get(self, + request: Optional[Union[compute.GetTargetHttpProxyRequest, dict]] = None, + *, + project: Optional[str] = None, + target_http_proxy: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.TargetHttpProxy: + r"""Returns the specified TargetHttpProxy resource. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_get(): + # Create a client + client = compute_v1.TargetHttpProxiesClient() + + # Initialize request argument(s) + request = compute_v1.GetTargetHttpProxyRequest( + project="project_value", + target_http_proxy="target_http_proxy_value", + ) + + # Make the request + response = client.get(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.GetTargetHttpProxyRequest, dict]): + The request object. A request message for + TargetHttpProxies.Get. See the method + description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + target_http_proxy (str): + Name of the TargetHttpProxy resource + to return. + + This corresponds to the ``target_http_proxy`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.compute_v1.types.TargetHttpProxy: + Represents a Target HTTP Proxy resource. Google Compute + Engine has two Target HTTP Proxy resources: \* + [Global](/compute/docs/reference/rest/v1/targetHttpProxies) + \* + [Regional](/compute/docs/reference/rest/v1/regionTargetHttpProxies) + A target HTTP proxy is a component of GCP HTTP load + balancers. \* targetHttpProxies are used by external + HTTP load balancers and Traffic Director. \* + regionTargetHttpProxies are used by internal HTTP load + balancers. Forwarding rules reference a target HTTP + proxy, and the target proxy then references a URL map. + For more information, read Using Target Proxies and + Forwarding rule concepts. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, target_http_proxy]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.GetTargetHttpProxyRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.GetTargetHttpProxyRequest): + request = compute.GetTargetHttpProxyRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if target_http_proxy is not None: + request.target_http_proxy = target_http_proxy + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("target_http_proxy", request.target_http_proxy), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def insert_unary(self, + request: Optional[Union[compute.InsertTargetHttpProxyRequest, dict]] = None, + *, + project: Optional[str] = None, + target_http_proxy_resource: Optional[compute.TargetHttpProxy] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.Operation: + r"""Creates a TargetHttpProxy resource in the specified + project using the data included in the request. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_insert(): + # Create a client + client = compute_v1.TargetHttpProxiesClient() + + # Initialize request argument(s) + request = compute_v1.InsertTargetHttpProxyRequest( + project="project_value", + ) + + # Make the request + response = client.insert(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.InsertTargetHttpProxyRequest, dict]): + The request object. A request message for + TargetHttpProxies.Insert. See the method + description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + target_http_proxy_resource (google.cloud.compute_v1.types.TargetHttpProxy): + The body resource for this request + This corresponds to the ``target_http_proxy_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, target_http_proxy_resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.InsertTargetHttpProxyRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.InsertTargetHttpProxyRequest): + request = compute.InsertTargetHttpProxyRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if target_http_proxy_resource is not None: + request.target_http_proxy_resource = target_http_proxy_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.insert] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def insert(self, + request: Optional[Union[compute.InsertTargetHttpProxyRequest, dict]] = None, + *, + project: Optional[str] = None, + target_http_proxy_resource: Optional[compute.TargetHttpProxy] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> extended_operation.ExtendedOperation: + r"""Creates a TargetHttpProxy resource in the specified + project using the data included in the request. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_insert(): + # Create a client + client = compute_v1.TargetHttpProxiesClient() + + # Initialize request argument(s) + request = compute_v1.InsertTargetHttpProxyRequest( + project="project_value", + ) + + # Make the request + response = client.insert(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.InsertTargetHttpProxyRequest, dict]): + The request object. A request message for + TargetHttpProxies.Insert. See the method + description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + target_http_proxy_resource (google.cloud.compute_v1.types.TargetHttpProxy): + The body resource for this request + This corresponds to the ``target_http_proxy_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, target_http_proxy_resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.InsertTargetHttpProxyRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.InsertTargetHttpProxyRequest): + request = compute.InsertTargetHttpProxyRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if target_http_proxy_resource is not None: + request.target_http_proxy_resource = target_http_proxy_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.insert] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + operation_service = self._transport._global_operations_client + operation_request = compute.GetGlobalOperationRequest() + operation_request.project = request.project + operation_request.operation = response.name + + get_operation = functools.partial(operation_service.get, operation_request) + # Cancel is not part of extended operations yet. + cancel_operation = lambda: None + + # Note: this class is an implementation detail to provide a uniform + # set of names for certain fields in the extended operation proto message. + # See google.api_core.extended_operation.ExtendedOperation for details + # on these properties and the expected interface. + class _CustomOperation(extended_operation.ExtendedOperation): + @property + def error_message(self): + return self._extended_operation.http_error_message + + @property + def error_code(self): + return self._extended_operation.http_error_status_code + + response = _CustomOperation.make(get_operation, cancel_operation, response) + + # Done; return the response. + return response + + def list(self, + request: Optional[Union[compute.ListTargetHttpProxiesRequest, dict]] = None, + *, + project: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListPager: + r"""Retrieves the list of TargetHttpProxy resources + available to the specified project. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_list(): + # Create a client + client = compute_v1.TargetHttpProxiesClient() + + # Initialize request argument(s) + request = compute_v1.ListTargetHttpProxiesRequest( + project="project_value", + ) + + # Make the request + page_result = client.list(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.ListTargetHttpProxiesRequest, dict]): + The request object. A request message for + TargetHttpProxies.List. See the method + description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.compute_v1.services.target_http_proxies.pagers.ListPager: + A list of TargetHttpProxy resources. + + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.ListTargetHttpProxiesRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.ListTargetHttpProxiesRequest): + request = compute.ListTargetHttpProxiesRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + def patch_unary(self, + request: Optional[Union[compute.PatchTargetHttpProxyRequest, dict]] = None, + *, + project: Optional[str] = None, + target_http_proxy: Optional[str] = None, + target_http_proxy_resource: Optional[compute.TargetHttpProxy] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.Operation: + r"""Patches the specified TargetHttpProxy resource with + the data included in the request. This method supports + PATCH semantics and uses JSON merge patch format and + processing rules. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_patch(): + # Create a client + client = compute_v1.TargetHttpProxiesClient() + + # Initialize request argument(s) + request = compute_v1.PatchTargetHttpProxyRequest( + project="project_value", + target_http_proxy="target_http_proxy_value", + ) + + # Make the request + response = client.patch(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.PatchTargetHttpProxyRequest, dict]): + The request object. A request message for + TargetHttpProxies.Patch. See the method + description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + target_http_proxy (str): + Name of the TargetHttpProxy resource + to patch. + + This corresponds to the ``target_http_proxy`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + target_http_proxy_resource (google.cloud.compute_v1.types.TargetHttpProxy): + The body resource for this request + This corresponds to the ``target_http_proxy_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, target_http_proxy, target_http_proxy_resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.PatchTargetHttpProxyRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.PatchTargetHttpProxyRequest): + request = compute.PatchTargetHttpProxyRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if target_http_proxy is not None: + request.target_http_proxy = target_http_proxy + if target_http_proxy_resource is not None: + request.target_http_proxy_resource = target_http_proxy_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.patch] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("target_http_proxy", request.target_http_proxy), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def patch(self, + request: Optional[Union[compute.PatchTargetHttpProxyRequest, dict]] = None, + *, + project: Optional[str] = None, + target_http_proxy: Optional[str] = None, + target_http_proxy_resource: Optional[compute.TargetHttpProxy] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> extended_operation.ExtendedOperation: + r"""Patches the specified TargetHttpProxy resource with + the data included in the request. This method supports + PATCH semantics and uses JSON merge patch format and + processing rules. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_patch(): + # Create a client + client = compute_v1.TargetHttpProxiesClient() + + # Initialize request argument(s) + request = compute_v1.PatchTargetHttpProxyRequest( + project="project_value", + target_http_proxy="target_http_proxy_value", + ) + + # Make the request + response = client.patch(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.PatchTargetHttpProxyRequest, dict]): + The request object. A request message for + TargetHttpProxies.Patch. See the method + description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + target_http_proxy (str): + Name of the TargetHttpProxy resource + to patch. + + This corresponds to the ``target_http_proxy`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + target_http_proxy_resource (google.cloud.compute_v1.types.TargetHttpProxy): + The body resource for this request + This corresponds to the ``target_http_proxy_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, target_http_proxy, target_http_proxy_resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.PatchTargetHttpProxyRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.PatchTargetHttpProxyRequest): + request = compute.PatchTargetHttpProxyRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if target_http_proxy is not None: + request.target_http_proxy = target_http_proxy + if target_http_proxy_resource is not None: + request.target_http_proxy_resource = target_http_proxy_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.patch] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("target_http_proxy", request.target_http_proxy), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + operation_service = self._transport._global_operations_client + operation_request = compute.GetGlobalOperationRequest() + operation_request.project = request.project + operation_request.operation = response.name + + get_operation = functools.partial(operation_service.get, operation_request) + # Cancel is not part of extended operations yet. + cancel_operation = lambda: None + + # Note: this class is an implementation detail to provide a uniform + # set of names for certain fields in the extended operation proto message. + # See google.api_core.extended_operation.ExtendedOperation for details + # on these properties and the expected interface. + class _CustomOperation(extended_operation.ExtendedOperation): + @property + def error_message(self): + return self._extended_operation.http_error_message + + @property + def error_code(self): + return self._extended_operation.http_error_status_code + + response = _CustomOperation.make(get_operation, cancel_operation, response) + + # Done; return the response. + return response + + def set_url_map_unary(self, + request: Optional[Union[compute.SetUrlMapTargetHttpProxyRequest, dict]] = None, + *, + project: Optional[str] = None, + target_http_proxy: Optional[str] = None, + url_map_reference_resource: Optional[compute.UrlMapReference] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.Operation: + r"""Changes the URL map for TargetHttpProxy. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_set_url_map(): + # Create a client + client = compute_v1.TargetHttpProxiesClient() + + # Initialize request argument(s) + request = compute_v1.SetUrlMapTargetHttpProxyRequest( + project="project_value", + target_http_proxy="target_http_proxy_value", + ) + + # Make the request + response = client.set_url_map(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.SetUrlMapTargetHttpProxyRequest, dict]): + The request object. A request message for + TargetHttpProxies.SetUrlMap. See the + method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + target_http_proxy (str): + Name of the TargetHttpProxy to set a + URL map for. + + This corresponds to the ``target_http_proxy`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + url_map_reference_resource (google.cloud.compute_v1.types.UrlMapReference): + The body resource for this request + This corresponds to the ``url_map_reference_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, target_http_proxy, url_map_reference_resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.SetUrlMapTargetHttpProxyRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.SetUrlMapTargetHttpProxyRequest): + request = compute.SetUrlMapTargetHttpProxyRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if target_http_proxy is not None: + request.target_http_proxy = target_http_proxy + if url_map_reference_resource is not None: + request.url_map_reference_resource = url_map_reference_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.set_url_map] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("target_http_proxy", request.target_http_proxy), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def set_url_map(self, + request: Optional[Union[compute.SetUrlMapTargetHttpProxyRequest, dict]] = None, + *, + project: Optional[str] = None, + target_http_proxy: Optional[str] = None, + url_map_reference_resource: Optional[compute.UrlMapReference] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> extended_operation.ExtendedOperation: + r"""Changes the URL map for TargetHttpProxy. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_set_url_map(): + # Create a client + client = compute_v1.TargetHttpProxiesClient() + + # Initialize request argument(s) + request = compute_v1.SetUrlMapTargetHttpProxyRequest( + project="project_value", + target_http_proxy="target_http_proxy_value", + ) + + # Make the request + response = client.set_url_map(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.SetUrlMapTargetHttpProxyRequest, dict]): + The request object. A request message for + TargetHttpProxies.SetUrlMap. See the + method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + target_http_proxy (str): + Name of the TargetHttpProxy to set a + URL map for. + + This corresponds to the ``target_http_proxy`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + url_map_reference_resource (google.cloud.compute_v1.types.UrlMapReference): + The body resource for this request + This corresponds to the ``url_map_reference_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, target_http_proxy, url_map_reference_resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.SetUrlMapTargetHttpProxyRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.SetUrlMapTargetHttpProxyRequest): + request = compute.SetUrlMapTargetHttpProxyRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if target_http_proxy is not None: + request.target_http_proxy = target_http_proxy + if url_map_reference_resource is not None: + request.url_map_reference_resource = url_map_reference_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.set_url_map] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("target_http_proxy", request.target_http_proxy), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + operation_service = self._transport._global_operations_client + operation_request = compute.GetGlobalOperationRequest() + operation_request.project = request.project + operation_request.operation = response.name + + get_operation = functools.partial(operation_service.get, operation_request) + # Cancel is not part of extended operations yet. + cancel_operation = lambda: None + + # Note: this class is an implementation detail to provide a uniform + # set of names for certain fields in the extended operation proto message. + # See google.api_core.extended_operation.ExtendedOperation for details + # on these properties and the expected interface. + class _CustomOperation(extended_operation.ExtendedOperation): + @property + def error_message(self): + return self._extended_operation.http_error_message + + @property + def error_code(self): + return self._extended_operation.http_error_status_code + + response = _CustomOperation.make(get_operation, cancel_operation, response) + + # Done; return the response. + return response + + def __enter__(self) -> "TargetHttpProxiesClient": + return self + + def __exit__(self, type, value, traceback): + """Releases underlying transport's resources. + + .. warning:: + ONLY use as a context manager if the transport is NOT shared + with other clients! Exiting the with block will CLOSE the transport + and may cause errors in other clients! + """ + self.transport.close() + + + + + + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) + + +__all__ = ( + "TargetHttpProxiesClient", +) diff --git a/owl-bot-staging/v1/google/cloud/compute_v1/services/target_http_proxies/pagers.py b/owl-bot-staging/v1/google/cloud/compute_v1/services/target_http_proxies/pagers.py new file mode 100644 index 000000000..8280e1097 --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/compute_v1/services/target_http_proxies/pagers.py @@ -0,0 +1,139 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import Any, AsyncIterator, Awaitable, Callable, Sequence, Tuple, Optional, Iterator + +from google.cloud.compute_v1.types import compute + + +class AggregatedListPager: + """A pager for iterating through ``aggregated_list`` requests. + + This class thinly wraps an initial + :class:`google.cloud.compute_v1.types.TargetHttpProxyAggregatedList` object, and + provides an ``__iter__`` method to iterate through its + ``items`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``AggregatedList`` requests and continue to iterate + through the ``items`` field on the + corresponding responses. + + All the usual :class:`google.cloud.compute_v1.types.TargetHttpProxyAggregatedList` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., compute.TargetHttpProxyAggregatedList], + request: compute.AggregatedListTargetHttpProxiesRequest, + response: compute.TargetHttpProxyAggregatedList, + *, + metadata: Sequence[Tuple[str, str]] = ()): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.compute_v1.types.AggregatedListTargetHttpProxiesRequest): + The initial request object. + response (google.cloud.compute_v1.types.TargetHttpProxyAggregatedList): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = compute.AggregatedListTargetHttpProxiesRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[compute.TargetHttpProxyAggregatedList]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[Tuple[str, compute.TargetHttpProxiesScopedList]]: + for page in self.pages: + yield from page.items.items() + + def get(self, key: str) -> Optional[compute.TargetHttpProxiesScopedList]: + return self._response.items.get(key) + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + + +class ListPager: + """A pager for iterating through ``list`` requests. + + This class thinly wraps an initial + :class:`google.cloud.compute_v1.types.TargetHttpProxyList` object, and + provides an ``__iter__`` method to iterate through its + ``items`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``List`` requests and continue to iterate + through the ``items`` field on the + corresponding responses. + + All the usual :class:`google.cloud.compute_v1.types.TargetHttpProxyList` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., compute.TargetHttpProxyList], + request: compute.ListTargetHttpProxiesRequest, + response: compute.TargetHttpProxyList, + *, + metadata: Sequence[Tuple[str, str]] = ()): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.compute_v1.types.ListTargetHttpProxiesRequest): + The initial request object. + response (google.cloud.compute_v1.types.TargetHttpProxyList): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = compute.ListTargetHttpProxiesRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[compute.TargetHttpProxyList]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[compute.TargetHttpProxy]: + for page in self.pages: + yield from page.items + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) diff --git a/owl-bot-staging/v1/google/cloud/compute_v1/services/target_http_proxies/transports/__init__.py b/owl-bot-staging/v1/google/cloud/compute_v1/services/target_http_proxies/transports/__init__.py new file mode 100644 index 000000000..ce59db8f5 --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/compute_v1/services/target_http_proxies/transports/__init__.py @@ -0,0 +1,32 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +from typing import Dict, Type + +from .base import TargetHttpProxiesTransport +from .rest import TargetHttpProxiesRestTransport +from .rest import TargetHttpProxiesRestInterceptor + + +# Compile a registry of transports. +_transport_registry = OrderedDict() # type: Dict[str, Type[TargetHttpProxiesTransport]] +_transport_registry['rest'] = TargetHttpProxiesRestTransport + +__all__ = ( + 'TargetHttpProxiesTransport', + 'TargetHttpProxiesRestTransport', + 'TargetHttpProxiesRestInterceptor', +) diff --git a/owl-bot-staging/v1/google/cloud/compute_v1/services/target_http_proxies/transports/base.py b/owl-bot-staging/v1/google/cloud/compute_v1/services/target_http_proxies/transports/base.py new file mode 100644 index 000000000..18d4d0331 --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/compute_v1/services/target_http_proxies/transports/base.py @@ -0,0 +1,247 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import abc +from typing import Awaitable, Callable, Dict, Optional, Sequence, Union + +from google.cloud.compute_v1 import gapic_version as package_version + +import google.auth # type: ignore +import google.api_core +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.compute_v1.types import compute +from google.cloud.compute_v1.services import global_operations + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) + + +class TargetHttpProxiesTransport(abc.ABC): + """Abstract transport class for TargetHttpProxies.""" + + AUTH_SCOPES = ( + 'https://www.googleapis.com/auth/compute', + 'https://www.googleapis.com/auth/cloud-platform', + ) + + DEFAULT_HOST: str = 'compute.googleapis.com' + def __init__( + self, *, + host: str = DEFAULT_HOST, + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + **kwargs, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A list of scopes. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + """ + self._extended_operations_services: Dict[str, Any] = {} + + scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} + + # Save the scopes. + self._scopes = scopes + + # If no credentials are provided, then determine the appropriate + # defaults. + if credentials and credentials_file: + raise core_exceptions.DuplicateCredentialArgs("'credentials_file' and 'credentials' are mutually exclusive") + + if credentials_file is not None: + credentials, _ = google.auth.load_credentials_from_file( + credentials_file, + **scopes_kwargs, + quota_project_id=quota_project_id + ) + elif credentials is None: + credentials, _ = google.auth.default(**scopes_kwargs, quota_project_id=quota_project_id) + # Don't apply audience if the credentials file passed from user. + if hasattr(credentials, "with_gdch_audience"): + credentials = credentials.with_gdch_audience(api_audience if api_audience else host) + + # If the credentials are service account credentials, then always try to use self signed JWT. + if always_use_jwt_access and isinstance(credentials, service_account.Credentials) and hasattr(service_account.Credentials, "with_always_use_jwt_access"): + credentials = credentials.with_always_use_jwt_access(True) + + # Save the credentials. + self._credentials = credentials + + # Save the hostname. Default to port 443 (HTTPS) if none is specified. + if ':' not in host: + host += ':443' + self._host = host + + def _prep_wrapped_messages(self, client_info): + # Precompute the wrapped methods. + self._wrapped_methods = { + self.aggregated_list: gapic_v1.method.wrap_method( + self.aggregated_list, + default_timeout=None, + client_info=client_info, + ), + self.delete: gapic_v1.method.wrap_method( + self.delete, + default_timeout=None, + client_info=client_info, + ), + self.get: gapic_v1.method.wrap_method( + self.get, + default_timeout=None, + client_info=client_info, + ), + self.insert: gapic_v1.method.wrap_method( + self.insert, + default_timeout=None, + client_info=client_info, + ), + self.list: gapic_v1.method.wrap_method( + self.list, + default_timeout=None, + client_info=client_info, + ), + self.patch: gapic_v1.method.wrap_method( + self.patch, + default_timeout=None, + client_info=client_info, + ), + self.set_url_map: gapic_v1.method.wrap_method( + self.set_url_map, + default_timeout=None, + client_info=client_info, + ), + } + + def close(self): + """Closes resources associated with the transport. + + .. warning:: + Only call this method if the transport is NOT shared + with other clients - this may cause errors in other clients! + """ + raise NotImplementedError() + + @property + def aggregated_list(self) -> Callable[ + [compute.AggregatedListTargetHttpProxiesRequest], + Union[ + compute.TargetHttpProxyAggregatedList, + Awaitable[compute.TargetHttpProxyAggregatedList] + ]]: + raise NotImplementedError() + + @property + def delete(self) -> Callable[ + [compute.DeleteTargetHttpProxyRequest], + Union[ + compute.Operation, + Awaitable[compute.Operation] + ]]: + raise NotImplementedError() + + @property + def get(self) -> Callable[ + [compute.GetTargetHttpProxyRequest], + Union[ + compute.TargetHttpProxy, + Awaitable[compute.TargetHttpProxy] + ]]: + raise NotImplementedError() + + @property + def insert(self) -> Callable[ + [compute.InsertTargetHttpProxyRequest], + Union[ + compute.Operation, + Awaitable[compute.Operation] + ]]: + raise NotImplementedError() + + @property + def list(self) -> Callable[ + [compute.ListTargetHttpProxiesRequest], + Union[ + compute.TargetHttpProxyList, + Awaitable[compute.TargetHttpProxyList] + ]]: + raise NotImplementedError() + + @property + def patch(self) -> Callable[ + [compute.PatchTargetHttpProxyRequest], + Union[ + compute.Operation, + Awaitable[compute.Operation] + ]]: + raise NotImplementedError() + + @property + def set_url_map(self) -> Callable[ + [compute.SetUrlMapTargetHttpProxyRequest], + Union[ + compute.Operation, + Awaitable[compute.Operation] + ]]: + raise NotImplementedError() + + @property + def kind(self) -> str: + raise NotImplementedError() + + @property + def _global_operations_client(self) -> global_operations.GlobalOperationsClient: + ex_op_service = self._extended_operations_services.get("global_operations") + if not ex_op_service: + ex_op_service = global_operations.GlobalOperationsClient( + credentials=self._credentials, + transport=self.kind, + ) + self._extended_operations_services["global_operations"] = ex_op_service + + return ex_op_service + + +__all__ = ( + 'TargetHttpProxiesTransport', +) diff --git a/owl-bot-staging/v1/google/cloud/compute_v1/services/target_http_proxies/transports/rest.py b/owl-bot-staging/v1/google/cloud/compute_v1/services/target_http_proxies/transports/rest.py new file mode 100644 index 000000000..4a3bb9f26 --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/compute_v1/services/target_http_proxies/transports/rest.py @@ -0,0 +1,1055 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +from google.auth.transport.requests import AuthorizedSession # type: ignore +import json # type: ignore +import grpc # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.api_core import exceptions as core_exceptions +from google.api_core import retry as retries +from google.api_core import rest_helpers +from google.api_core import rest_streaming +from google.api_core import path_template +from google.api_core import gapic_v1 + +from google.protobuf import json_format +from requests import __version__ as requests_version +import dataclasses +import re +from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union +import warnings + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + + +from google.cloud.compute_v1.types import compute + +from .base import TargetHttpProxiesTransport, DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, + grpc_version=None, + rest_version=requests_version, +) + + +class TargetHttpProxiesRestInterceptor: + """Interceptor for TargetHttpProxies. + + Interceptors are used to manipulate requests, request metadata, and responses + in arbitrary ways. + Example use cases include: + * Logging + * Verifying requests according to service or custom semantics + * Stripping extraneous information from responses + + These use cases and more can be enabled by injecting an + instance of a custom subclass when constructing the TargetHttpProxiesRestTransport. + + .. code-block:: python + class MyCustomTargetHttpProxiesInterceptor(TargetHttpProxiesRestInterceptor): + def pre_aggregated_list(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_aggregated_list(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_delete(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_delete(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_get(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_insert(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_insert(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_patch(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_patch(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_set_url_map(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_set_url_map(self, response): + logging.log(f"Received response: {response}") + return response + + transport = TargetHttpProxiesRestTransport(interceptor=MyCustomTargetHttpProxiesInterceptor()) + client = TargetHttpProxiesClient(transport=transport) + + + """ + def pre_aggregated_list(self, request: compute.AggregatedListTargetHttpProxiesRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.AggregatedListTargetHttpProxiesRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for aggregated_list + + Override in a subclass to manipulate the request or metadata + before they are sent to the TargetHttpProxies server. + """ + return request, metadata + + def post_aggregated_list(self, response: compute.TargetHttpProxyAggregatedList) -> compute.TargetHttpProxyAggregatedList: + """Post-rpc interceptor for aggregated_list + + Override in a subclass to manipulate the response + after it is returned by the TargetHttpProxies server but before + it is returned to user code. + """ + return response + def pre_delete(self, request: compute.DeleteTargetHttpProxyRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.DeleteTargetHttpProxyRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for delete + + Override in a subclass to manipulate the request or metadata + before they are sent to the TargetHttpProxies server. + """ + return request, metadata + + def post_delete(self, response: compute.Operation) -> compute.Operation: + """Post-rpc interceptor for delete + + Override in a subclass to manipulate the response + after it is returned by the TargetHttpProxies server but before + it is returned to user code. + """ + return response + def pre_get(self, request: compute.GetTargetHttpProxyRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.GetTargetHttpProxyRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get + + Override in a subclass to manipulate the request or metadata + before they are sent to the TargetHttpProxies server. + """ + return request, metadata + + def post_get(self, response: compute.TargetHttpProxy) -> compute.TargetHttpProxy: + """Post-rpc interceptor for get + + Override in a subclass to manipulate the response + after it is returned by the TargetHttpProxies server but before + it is returned to user code. + """ + return response + def pre_insert(self, request: compute.InsertTargetHttpProxyRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.InsertTargetHttpProxyRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for insert + + Override in a subclass to manipulate the request or metadata + before they are sent to the TargetHttpProxies server. + """ + return request, metadata + + def post_insert(self, response: compute.Operation) -> compute.Operation: + """Post-rpc interceptor for insert + + Override in a subclass to manipulate the response + after it is returned by the TargetHttpProxies server but before + it is returned to user code. + """ + return response + def pre_list(self, request: compute.ListTargetHttpProxiesRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.ListTargetHttpProxiesRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list + + Override in a subclass to manipulate the request or metadata + before they are sent to the TargetHttpProxies server. + """ + return request, metadata + + def post_list(self, response: compute.TargetHttpProxyList) -> compute.TargetHttpProxyList: + """Post-rpc interceptor for list + + Override in a subclass to manipulate the response + after it is returned by the TargetHttpProxies server but before + it is returned to user code. + """ + return response + def pre_patch(self, request: compute.PatchTargetHttpProxyRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.PatchTargetHttpProxyRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for patch + + Override in a subclass to manipulate the request or metadata + before they are sent to the TargetHttpProxies server. + """ + return request, metadata + + def post_patch(self, response: compute.Operation) -> compute.Operation: + """Post-rpc interceptor for patch + + Override in a subclass to manipulate the response + after it is returned by the TargetHttpProxies server but before + it is returned to user code. + """ + return response + def pre_set_url_map(self, request: compute.SetUrlMapTargetHttpProxyRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.SetUrlMapTargetHttpProxyRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for set_url_map + + Override in a subclass to manipulate the request or metadata + before they are sent to the TargetHttpProxies server. + """ + return request, metadata + + def post_set_url_map(self, response: compute.Operation) -> compute.Operation: + """Post-rpc interceptor for set_url_map + + Override in a subclass to manipulate the response + after it is returned by the TargetHttpProxies server but before + it is returned to user code. + """ + return response + + +@dataclasses.dataclass +class TargetHttpProxiesRestStub: + _session: AuthorizedSession + _host: str + _interceptor: TargetHttpProxiesRestInterceptor + + +class TargetHttpProxiesRestTransport(TargetHttpProxiesTransport): + """REST backend transport for TargetHttpProxies. + + The TargetHttpProxies API. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends JSON representations of protocol buffers over HTTP/1.1 + + NOTE: This REST transport functionality is currently in a beta + state (preview). We welcome your feedback via an issue in this + library's source repository. Thank you! + """ + + def __init__(self, *, + host: str = 'compute.googleapis.com', + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + client_cert_source_for_mtls: Optional[Callable[[ + ], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + url_scheme: str = 'https', + interceptor: Optional[TargetHttpProxiesRestInterceptor] = None, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + NOTE: This REST transport functionality is currently in a beta + state (preview). We welcome your feedback via a GitHub issue in + this library's repository. Thank you! + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + client_cert_source_for_mtls (Callable[[], Tuple[bytes, bytes]]): Client + certificate to configure mutual TLS HTTP channel. It is ignored + if ``channel`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you are developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + url_scheme: the protocol scheme for the API endpoint. Normally + "https", but for testing or local servers, + "http" can be specified. + """ + # Run the base constructor + # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. + # TODO: When custom host (api_endpoint) is set, `scopes` must *also* be set on the + # credentials object + maybe_url_match = re.match("^(?Phttp(?:s)?://)?(?P.*)$", host) + if maybe_url_match is None: + raise ValueError(f"Unexpected hostname structure: {host}") # pragma: NO COVER + + url_match_items = maybe_url_match.groupdict() + + host = f"{url_scheme}://{host}" if not url_match_items["scheme"] else host + + super().__init__( + host=host, + credentials=credentials, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience + ) + self._session = AuthorizedSession( + self._credentials, default_host=self.DEFAULT_HOST) + if client_cert_source_for_mtls: + self._session.configure_mtls_channel(client_cert_source_for_mtls) + self._interceptor = interceptor or TargetHttpProxiesRestInterceptor() + self._prep_wrapped_messages(client_info) + + class _AggregatedList(TargetHttpProxiesRestStub): + def __hash__(self): + return hash("AggregatedList") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.AggregatedListTargetHttpProxiesRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.TargetHttpProxyAggregatedList: + r"""Call the aggregated list method over HTTP. + + Args: + request (~.compute.AggregatedListTargetHttpProxiesRequest): + The request object. A request message for + TargetHttpProxies.AggregatedList. See + the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.TargetHttpProxyAggregatedList: + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/compute/v1/projects/{project}/aggregated/targetHttpProxies', + }, + ] + request, metadata = self._interceptor.pre_aggregated_list(request, metadata) + pb_request = compute.AggregatedListTargetHttpProxiesRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.TargetHttpProxyAggregatedList() + pb_resp = compute.TargetHttpProxyAggregatedList.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_aggregated_list(resp) + return resp + + class _Delete(TargetHttpProxiesRestStub): + def __hash__(self): + return hash("Delete") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.DeleteTargetHttpProxyRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.Operation: + r"""Call the delete method over HTTP. + + Args: + request (~.compute.DeleteTargetHttpProxyRequest): + The request object. A request message for + TargetHttpProxies.Delete. See the method + description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.Operation: + Represents an Operation resource. Google Compute Engine + has three Operation resources: \* + `Global `__ + \* + `Regional `__ + \* + `Zonal `__ + You can use an operation resource to manage asynchronous + API requests. For more information, read Handling API + responses. Operations can be global, regional or zonal. + - For global operations, use the ``globalOperations`` + resource. - For regional operations, use the + ``regionOperations`` resource. - For zonal operations, + use the ``zonalOperations`` resource. For more + information, read Global, Regional, and Zonal Resources. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'delete', + 'uri': '/compute/v1/projects/{project}/global/targetHttpProxies/{target_http_proxy}', + }, + ] + request, metadata = self._interceptor.pre_delete(request, metadata) + pb_request = compute.DeleteTargetHttpProxyRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.Operation() + pb_resp = compute.Operation.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_delete(resp) + return resp + + class _Get(TargetHttpProxiesRestStub): + def __hash__(self): + return hash("Get") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.GetTargetHttpProxyRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.TargetHttpProxy: + r"""Call the get method over HTTP. + + Args: + request (~.compute.GetTargetHttpProxyRequest): + The request object. A request message for + TargetHttpProxies.Get. See the method + description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.TargetHttpProxy: + Represents a Target HTTP Proxy resource. Google Compute + Engine has two Target HTTP Proxy resources: \* + `Global `__ + \* + `Regional `__ + A target HTTP proxy is a component of GCP HTTP load + balancers. \* targetHttpProxies are used by external + HTTP load balancers and Traffic Director. \* + regionTargetHttpProxies are used by internal HTTP load + balancers. Forwarding rules reference a target HTTP + proxy, and the target proxy then references a URL map. + For more information, read Using Target Proxies and + Forwarding rule concepts. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/compute/v1/projects/{project}/global/targetHttpProxies/{target_http_proxy}', + }, + ] + request, metadata = self._interceptor.pre_get(request, metadata) + pb_request = compute.GetTargetHttpProxyRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.TargetHttpProxy() + pb_resp = compute.TargetHttpProxy.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get(resp) + return resp + + class _Insert(TargetHttpProxiesRestStub): + def __hash__(self): + return hash("Insert") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.InsertTargetHttpProxyRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.Operation: + r"""Call the insert method over HTTP. + + Args: + request (~.compute.InsertTargetHttpProxyRequest): + The request object. A request message for + TargetHttpProxies.Insert. See the method + description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.Operation: + Represents an Operation resource. Google Compute Engine + has three Operation resources: \* + `Global `__ + \* + `Regional `__ + \* + `Zonal `__ + You can use an operation resource to manage asynchronous + API requests. For more information, read Handling API + responses. Operations can be global, regional or zonal. + - For global operations, use the ``globalOperations`` + resource. - For regional operations, use the + ``regionOperations`` resource. - For zonal operations, + use the ``zonalOperations`` resource. For more + information, read Global, Regional, and Zonal Resources. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/compute/v1/projects/{project}/global/targetHttpProxies', + 'body': 'target_http_proxy_resource', + }, + ] + request, metadata = self._interceptor.pre_insert(request, metadata) + pb_request = compute.InsertTargetHttpProxyRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + including_default_value_fields=False, + use_integers_for_enums=False + ) + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.Operation() + pb_resp = compute.Operation.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_insert(resp) + return resp + + class _List(TargetHttpProxiesRestStub): + def __hash__(self): + return hash("List") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.ListTargetHttpProxiesRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.TargetHttpProxyList: + r"""Call the list method over HTTP. + + Args: + request (~.compute.ListTargetHttpProxiesRequest): + The request object. A request message for + TargetHttpProxies.List. See the method + description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.TargetHttpProxyList: + A list of TargetHttpProxy resources. + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/compute/v1/projects/{project}/global/targetHttpProxies', + }, + ] + request, metadata = self._interceptor.pre_list(request, metadata) + pb_request = compute.ListTargetHttpProxiesRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.TargetHttpProxyList() + pb_resp = compute.TargetHttpProxyList.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_list(resp) + return resp + + class _Patch(TargetHttpProxiesRestStub): + def __hash__(self): + return hash("Patch") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.PatchTargetHttpProxyRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.Operation: + r"""Call the patch method over HTTP. + + Args: + request (~.compute.PatchTargetHttpProxyRequest): + The request object. A request message for + TargetHttpProxies.Patch. See the method + description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.Operation: + Represents an Operation resource. Google Compute Engine + has three Operation resources: \* + `Global `__ + \* + `Regional `__ + \* + `Zonal `__ + You can use an operation resource to manage asynchronous + API requests. For more information, read Handling API + responses. Operations can be global, regional or zonal. + - For global operations, use the ``globalOperations`` + resource. - For regional operations, use the + ``regionOperations`` resource. - For zonal operations, + use the ``zonalOperations`` resource. For more + information, read Global, Regional, and Zonal Resources. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'patch', + 'uri': '/compute/v1/projects/{project}/global/targetHttpProxies/{target_http_proxy}', + 'body': 'target_http_proxy_resource', + }, + ] + request, metadata = self._interceptor.pre_patch(request, metadata) + pb_request = compute.PatchTargetHttpProxyRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + including_default_value_fields=False, + use_integers_for_enums=False + ) + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.Operation() + pb_resp = compute.Operation.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_patch(resp) + return resp + + class _SetUrlMap(TargetHttpProxiesRestStub): + def __hash__(self): + return hash("SetUrlMap") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.SetUrlMapTargetHttpProxyRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.Operation: + r"""Call the set url map method over HTTP. + + Args: + request (~.compute.SetUrlMapTargetHttpProxyRequest): + The request object. A request message for + TargetHttpProxies.SetUrlMap. See the + method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.Operation: + Represents an Operation resource. Google Compute Engine + has three Operation resources: \* + `Global `__ + \* + `Regional `__ + \* + `Zonal `__ + You can use an operation resource to manage asynchronous + API requests. For more information, read Handling API + responses. Operations can be global, regional or zonal. + - For global operations, use the ``globalOperations`` + resource. - For regional operations, use the + ``regionOperations`` resource. - For zonal operations, + use the ``zonalOperations`` resource. For more + information, read Global, Regional, and Zonal Resources. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/compute/v1/projects/{project}/targetHttpProxies/{target_http_proxy}/setUrlMap', + 'body': 'url_map_reference_resource', + }, + ] + request, metadata = self._interceptor.pre_set_url_map(request, metadata) + pb_request = compute.SetUrlMapTargetHttpProxyRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + including_default_value_fields=False, + use_integers_for_enums=False + ) + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.Operation() + pb_resp = compute.Operation.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_set_url_map(resp) + return resp + + @property + def aggregated_list(self) -> Callable[ + [compute.AggregatedListTargetHttpProxiesRequest], + compute.TargetHttpProxyAggregatedList]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._AggregatedList(self._session, self._host, self._interceptor) # type: ignore + + @property + def delete(self) -> Callable[ + [compute.DeleteTargetHttpProxyRequest], + compute.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._Delete(self._session, self._host, self._interceptor) # type: ignore + + @property + def get(self) -> Callable[ + [compute.GetTargetHttpProxyRequest], + compute.TargetHttpProxy]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._Get(self._session, self._host, self._interceptor) # type: ignore + + @property + def insert(self) -> Callable[ + [compute.InsertTargetHttpProxyRequest], + compute.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._Insert(self._session, self._host, self._interceptor) # type: ignore + + @property + def list(self) -> Callable[ + [compute.ListTargetHttpProxiesRequest], + compute.TargetHttpProxyList]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._List(self._session, self._host, self._interceptor) # type: ignore + + @property + def patch(self) -> Callable[ + [compute.PatchTargetHttpProxyRequest], + compute.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._Patch(self._session, self._host, self._interceptor) # type: ignore + + @property + def set_url_map(self) -> Callable[ + [compute.SetUrlMapTargetHttpProxyRequest], + compute.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._SetUrlMap(self._session, self._host, self._interceptor) # type: ignore + + @property + def kind(self) -> str: + return "rest" + + def close(self): + self._session.close() + + +__all__=( + 'TargetHttpProxiesRestTransport', +) diff --git a/owl-bot-staging/v1/google/cloud/compute_v1/services/target_https_proxies/__init__.py b/owl-bot-staging/v1/google/cloud/compute_v1/services/target_https_proxies/__init__.py new file mode 100644 index 000000000..18d20c1a3 --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/compute_v1/services/target_https_proxies/__init__.py @@ -0,0 +1,20 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from .client import TargetHttpsProxiesClient + +__all__ = ( + 'TargetHttpsProxiesClient', +) diff --git a/owl-bot-staging/v1/google/cloud/compute_v1/services/target_https_proxies/client.py b/owl-bot-staging/v1/google/cloud/compute_v1/services/target_https_proxies/client.py new file mode 100644 index 000000000..fc3ebef9f --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/compute_v1/services/target_https_proxies/client.py @@ -0,0 +1,2841 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import functools +import os +import re +from typing import Dict, Mapping, MutableMapping, MutableSequence, Optional, Sequence, Tuple, Type, Union, cast + +from google.cloud.compute_v1 import gapic_version as package_version + +from google.api_core import client_options as client_options_lib +from google.api_core import exceptions as core_exceptions +from google.api_core import extended_operation +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport import mtls # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth.exceptions import MutualTLSChannelError # type: ignore +from google.oauth2 import service_account # type: ignore + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + +from google.api_core import extended_operation # type: ignore +from google.cloud.compute_v1.services.target_https_proxies import pagers +from google.cloud.compute_v1.types import compute +from .transports.base import TargetHttpsProxiesTransport, DEFAULT_CLIENT_INFO +from .transports.rest import TargetHttpsProxiesRestTransport + + +class TargetHttpsProxiesClientMeta(type): + """Metaclass for the TargetHttpsProxies client. + + This provides class-level methods for building and retrieving + support objects (e.g. transport) without polluting the client instance + objects. + """ + _transport_registry = OrderedDict() # type: Dict[str, Type[TargetHttpsProxiesTransport]] + _transport_registry["rest"] = TargetHttpsProxiesRestTransport + + def get_transport_class(cls, + label: Optional[str] = None, + ) -> Type[TargetHttpsProxiesTransport]: + """Returns an appropriate transport class. + + Args: + label: The name of the desired transport. If none is + provided, then the first transport in the registry is used. + + Returns: + The transport class to use. + """ + # If a specific transport is requested, return that one. + if label: + return cls._transport_registry[label] + + # No transport is requested; return the default (that is, the first one + # in the dictionary). + return next(iter(cls._transport_registry.values())) + + +class TargetHttpsProxiesClient(metaclass=TargetHttpsProxiesClientMeta): + """The TargetHttpsProxies API.""" + + @staticmethod + def _get_default_mtls_endpoint(api_endpoint): + """Converts api endpoint to mTLS endpoint. + + Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to + "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. + Args: + api_endpoint (Optional[str]): the api endpoint to convert. + Returns: + str: converted mTLS api endpoint. + """ + if not api_endpoint: + return api_endpoint + + mtls_endpoint_re = re.compile( + r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?" + ) + + m = mtls_endpoint_re.match(api_endpoint) + name, mtls, sandbox, googledomain = m.groups() + if mtls or not googledomain: + return api_endpoint + + if sandbox: + return api_endpoint.replace( + "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" + ) + + return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") + + DEFAULT_ENDPOINT = "compute.googleapis.com" + DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore + DEFAULT_ENDPOINT + ) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + TargetHttpsProxiesClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_info(info) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + TargetHttpsProxiesClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_file( + filename) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + from_service_account_json = from_service_account_file + + @property + def transport(self) -> TargetHttpsProxiesTransport: + """Returns the transport used by the client instance. + + Returns: + TargetHttpsProxiesTransport: The transport used by the client + instance. + """ + return self._transport + + @staticmethod + def common_billing_account_path(billing_account: str, ) -> str: + """Returns a fully-qualified billing_account string.""" + return "billingAccounts/{billing_account}".format(billing_account=billing_account, ) + + @staticmethod + def parse_common_billing_account_path(path: str) -> Dict[str,str]: + """Parse a billing_account path into its component segments.""" + m = re.match(r"^billingAccounts/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_folder_path(folder: str, ) -> str: + """Returns a fully-qualified folder string.""" + return "folders/{folder}".format(folder=folder, ) + + @staticmethod + def parse_common_folder_path(path: str) -> Dict[str,str]: + """Parse a folder path into its component segments.""" + m = re.match(r"^folders/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_organization_path(organization: str, ) -> str: + """Returns a fully-qualified organization string.""" + return "organizations/{organization}".format(organization=organization, ) + + @staticmethod + def parse_common_organization_path(path: str) -> Dict[str,str]: + """Parse a organization path into its component segments.""" + m = re.match(r"^organizations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_project_path(project: str, ) -> str: + """Returns a fully-qualified project string.""" + return "projects/{project}".format(project=project, ) + + @staticmethod + def parse_common_project_path(path: str) -> Dict[str,str]: + """Parse a project path into its component segments.""" + m = re.match(r"^projects/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_location_path(project: str, location: str, ) -> str: + """Returns a fully-qualified location string.""" + return "projects/{project}/locations/{location}".format(project=project, location=location, ) + + @staticmethod + def parse_common_location_path(path: str) -> Dict[str,str]: + """Parse a location path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @classmethod + def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[client_options_lib.ClientOptions] = None): + """Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + if client_options is None: + client_options = client_options_lib.ClientOptions() + use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") + use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") + if use_client_cert not in ("true", "false"): + raise ValueError("Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`") + if use_mtls_endpoint not in ("auto", "never", "always"): + raise MutualTLSChannelError("Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`") + + # Figure out the client cert source to use. + client_cert_source = None + if use_client_cert == "true": + if client_options.client_cert_source: + client_cert_source = client_options.client_cert_source + elif mtls.has_default_client_cert_source(): + client_cert_source = mtls.default_client_cert_source() + + # Figure out which api endpoint to use. + if client_options.api_endpoint is not None: + api_endpoint = client_options.api_endpoint + elif use_mtls_endpoint == "always" or (use_mtls_endpoint == "auto" and client_cert_source): + api_endpoint = cls.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = cls.DEFAULT_ENDPOINT + + return api_endpoint, client_cert_source + + def __init__(self, *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Optional[Union[str, TargetHttpsProxiesTransport]] = None, + client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the target https proxies client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Union[str, TargetHttpsProxiesTransport]): The + transport to use. If set to None, a transport is chosen + automatically. + NOTE: "rest" transport functionality is currently in a + beta state (preview). We welcome your feedback via an + issue in this library's source repository. + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): Custom options for the + client. It won't take effect if a ``transport`` instance is provided. + (1) The ``api_endpoint`` property can be used to override the + default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT + environment variable can also be used to override the endpoint: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto switch to the + default mTLS endpoint if client certificate is present, this is + the default value). However, the ``api_endpoint`` property takes + precedence if provided. + (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide client certificate for mutual TLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + """ + if isinstance(client_options, dict): + client_options = client_options_lib.from_dict(client_options) + if client_options is None: + client_options = client_options_lib.ClientOptions() + client_options = cast(client_options_lib.ClientOptions, client_options) + + api_endpoint, client_cert_source_func = self.get_mtls_endpoint_and_cert_source(client_options) + + api_key_value = getattr(client_options, "api_key", None) + if api_key_value and credentials: + raise ValueError("client_options.api_key and credentials are mutually exclusive") + + # Save or instantiate the transport. + # Ordinarily, we provide the transport, but allowing a custom transport + # instance provides an extensibility point for unusual situations. + if isinstance(transport, TargetHttpsProxiesTransport): + # transport is a TargetHttpsProxiesTransport instance. + if credentials or client_options.credentials_file or api_key_value: + raise ValueError("When providing a transport instance, " + "provide its credentials directly.") + if client_options.scopes: + raise ValueError( + "When providing a transport instance, provide its scopes " + "directly." + ) + self._transport = transport + else: + import google.auth._default # type: ignore + + if api_key_value and hasattr(google.auth._default, "get_api_key_credentials"): + credentials = google.auth._default.get_api_key_credentials(api_key_value) + + Transport = type(self).get_transport_class(transport) + self._transport = Transport( + credentials=credentials, + credentials_file=client_options.credentials_file, + host=api_endpoint, + scopes=client_options.scopes, + client_cert_source_for_mtls=client_cert_source_func, + quota_project_id=client_options.quota_project_id, + client_info=client_info, + always_use_jwt_access=True, + api_audience=client_options.api_audience, + ) + + def aggregated_list(self, + request: Optional[Union[compute.AggregatedListTargetHttpsProxiesRequest, dict]] = None, + *, + project: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.AggregatedListPager: + r"""Retrieves the list of all TargetHttpsProxy resources, + regional and global, available to the specified project. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_aggregated_list(): + # Create a client + client = compute_v1.TargetHttpsProxiesClient() + + # Initialize request argument(s) + request = compute_v1.AggregatedListTargetHttpsProxiesRequest( + project="project_value", + ) + + # Make the request + page_result = client.aggregated_list(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.AggregatedListTargetHttpsProxiesRequest, dict]): + The request object. A request message for + TargetHttpsProxies.AggregatedList. See + the method description for details. + project (str): + Name of the project scoping this + request. + + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.compute_v1.services.target_https_proxies.pagers.AggregatedListPager: + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.AggregatedListTargetHttpsProxiesRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.AggregatedListTargetHttpsProxiesRequest): + request = compute.AggregatedListTargetHttpsProxiesRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.aggregated_list] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.AggregatedListPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + def delete_unary(self, + request: Optional[Union[compute.DeleteTargetHttpsProxyRequest, dict]] = None, + *, + project: Optional[str] = None, + target_https_proxy: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.Operation: + r"""Deletes the specified TargetHttpsProxy resource. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_delete(): + # Create a client + client = compute_v1.TargetHttpsProxiesClient() + + # Initialize request argument(s) + request = compute_v1.DeleteTargetHttpsProxyRequest( + project="project_value", + target_https_proxy="target_https_proxy_value", + ) + + # Make the request + response = client.delete(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.DeleteTargetHttpsProxyRequest, dict]): + The request object. A request message for + TargetHttpsProxies.Delete. See the + method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + target_https_proxy (str): + Name of the TargetHttpsProxy resource + to delete. + + This corresponds to the ``target_https_proxy`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, target_https_proxy]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.DeleteTargetHttpsProxyRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.DeleteTargetHttpsProxyRequest): + request = compute.DeleteTargetHttpsProxyRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if target_https_proxy is not None: + request.target_https_proxy = target_https_proxy + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("target_https_proxy", request.target_https_proxy), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def delete(self, + request: Optional[Union[compute.DeleteTargetHttpsProxyRequest, dict]] = None, + *, + project: Optional[str] = None, + target_https_proxy: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> extended_operation.ExtendedOperation: + r"""Deletes the specified TargetHttpsProxy resource. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_delete(): + # Create a client + client = compute_v1.TargetHttpsProxiesClient() + + # Initialize request argument(s) + request = compute_v1.DeleteTargetHttpsProxyRequest( + project="project_value", + target_https_proxy="target_https_proxy_value", + ) + + # Make the request + response = client.delete(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.DeleteTargetHttpsProxyRequest, dict]): + The request object. A request message for + TargetHttpsProxies.Delete. See the + method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + target_https_proxy (str): + Name of the TargetHttpsProxy resource + to delete. + + This corresponds to the ``target_https_proxy`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, target_https_proxy]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.DeleteTargetHttpsProxyRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.DeleteTargetHttpsProxyRequest): + request = compute.DeleteTargetHttpsProxyRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if target_https_proxy is not None: + request.target_https_proxy = target_https_proxy + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("target_https_proxy", request.target_https_proxy), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + operation_service = self._transport._global_operations_client + operation_request = compute.GetGlobalOperationRequest() + operation_request.project = request.project + operation_request.operation = response.name + + get_operation = functools.partial(operation_service.get, operation_request) + # Cancel is not part of extended operations yet. + cancel_operation = lambda: None + + # Note: this class is an implementation detail to provide a uniform + # set of names for certain fields in the extended operation proto message. + # See google.api_core.extended_operation.ExtendedOperation for details + # on these properties and the expected interface. + class _CustomOperation(extended_operation.ExtendedOperation): + @property + def error_message(self): + return self._extended_operation.http_error_message + + @property + def error_code(self): + return self._extended_operation.http_error_status_code + + response = _CustomOperation.make(get_operation, cancel_operation, response) + + # Done; return the response. + return response + + def get(self, + request: Optional[Union[compute.GetTargetHttpsProxyRequest, dict]] = None, + *, + project: Optional[str] = None, + target_https_proxy: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.TargetHttpsProxy: + r"""Returns the specified TargetHttpsProxy resource. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_get(): + # Create a client + client = compute_v1.TargetHttpsProxiesClient() + + # Initialize request argument(s) + request = compute_v1.GetTargetHttpsProxyRequest( + project="project_value", + target_https_proxy="target_https_proxy_value", + ) + + # Make the request + response = client.get(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.GetTargetHttpsProxyRequest, dict]): + The request object. A request message for + TargetHttpsProxies.Get. See the method + description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + target_https_proxy (str): + Name of the TargetHttpsProxy resource + to return. + + This corresponds to the ``target_https_proxy`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.compute_v1.types.TargetHttpsProxy: + Represents a Target HTTPS Proxy resource. Google Compute + Engine has two Target HTTPS Proxy resources: \* + [Global](/compute/docs/reference/rest/v1/targetHttpsProxies) + \* + [Regional](/compute/docs/reference/rest/v1/regionTargetHttpsProxies) + A target HTTPS proxy is a component of GCP HTTPS load + balancers. \* targetHttpsProxies are used by external + HTTPS load balancers. \* regionTargetHttpsProxies are + used by internal HTTPS load balancers. Forwarding rules + reference a target HTTPS proxy, and the target proxy + then references a URL map. For more information, read + Using Target Proxies and Forwarding rule concepts. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, target_https_proxy]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.GetTargetHttpsProxyRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.GetTargetHttpsProxyRequest): + request = compute.GetTargetHttpsProxyRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if target_https_proxy is not None: + request.target_https_proxy = target_https_proxy + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("target_https_proxy", request.target_https_proxy), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def insert_unary(self, + request: Optional[Union[compute.InsertTargetHttpsProxyRequest, dict]] = None, + *, + project: Optional[str] = None, + target_https_proxy_resource: Optional[compute.TargetHttpsProxy] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.Operation: + r"""Creates a TargetHttpsProxy resource in the specified + project using the data included in the request. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_insert(): + # Create a client + client = compute_v1.TargetHttpsProxiesClient() + + # Initialize request argument(s) + request = compute_v1.InsertTargetHttpsProxyRequest( + project="project_value", + ) + + # Make the request + response = client.insert(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.InsertTargetHttpsProxyRequest, dict]): + The request object. A request message for + TargetHttpsProxies.Insert. See the + method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + target_https_proxy_resource (google.cloud.compute_v1.types.TargetHttpsProxy): + The body resource for this request + This corresponds to the ``target_https_proxy_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, target_https_proxy_resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.InsertTargetHttpsProxyRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.InsertTargetHttpsProxyRequest): + request = compute.InsertTargetHttpsProxyRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if target_https_proxy_resource is not None: + request.target_https_proxy_resource = target_https_proxy_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.insert] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def insert(self, + request: Optional[Union[compute.InsertTargetHttpsProxyRequest, dict]] = None, + *, + project: Optional[str] = None, + target_https_proxy_resource: Optional[compute.TargetHttpsProxy] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> extended_operation.ExtendedOperation: + r"""Creates a TargetHttpsProxy resource in the specified + project using the data included in the request. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_insert(): + # Create a client + client = compute_v1.TargetHttpsProxiesClient() + + # Initialize request argument(s) + request = compute_v1.InsertTargetHttpsProxyRequest( + project="project_value", + ) + + # Make the request + response = client.insert(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.InsertTargetHttpsProxyRequest, dict]): + The request object. A request message for + TargetHttpsProxies.Insert. See the + method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + target_https_proxy_resource (google.cloud.compute_v1.types.TargetHttpsProxy): + The body resource for this request + This corresponds to the ``target_https_proxy_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, target_https_proxy_resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.InsertTargetHttpsProxyRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.InsertTargetHttpsProxyRequest): + request = compute.InsertTargetHttpsProxyRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if target_https_proxy_resource is not None: + request.target_https_proxy_resource = target_https_proxy_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.insert] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + operation_service = self._transport._global_operations_client + operation_request = compute.GetGlobalOperationRequest() + operation_request.project = request.project + operation_request.operation = response.name + + get_operation = functools.partial(operation_service.get, operation_request) + # Cancel is not part of extended operations yet. + cancel_operation = lambda: None + + # Note: this class is an implementation detail to provide a uniform + # set of names for certain fields in the extended operation proto message. + # See google.api_core.extended_operation.ExtendedOperation for details + # on these properties and the expected interface. + class _CustomOperation(extended_operation.ExtendedOperation): + @property + def error_message(self): + return self._extended_operation.http_error_message + + @property + def error_code(self): + return self._extended_operation.http_error_status_code + + response = _CustomOperation.make(get_operation, cancel_operation, response) + + # Done; return the response. + return response + + def list(self, + request: Optional[Union[compute.ListTargetHttpsProxiesRequest, dict]] = None, + *, + project: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListPager: + r"""Retrieves the list of TargetHttpsProxy resources + available to the specified project. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_list(): + # Create a client + client = compute_v1.TargetHttpsProxiesClient() + + # Initialize request argument(s) + request = compute_v1.ListTargetHttpsProxiesRequest( + project="project_value", + ) + + # Make the request + page_result = client.list(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.ListTargetHttpsProxiesRequest, dict]): + The request object. A request message for + TargetHttpsProxies.List. See the method + description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.compute_v1.services.target_https_proxies.pagers.ListPager: + Contains a list of TargetHttpsProxy + resources. + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.ListTargetHttpsProxiesRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.ListTargetHttpsProxiesRequest): + request = compute.ListTargetHttpsProxiesRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + def patch_unary(self, + request: Optional[Union[compute.PatchTargetHttpsProxyRequest, dict]] = None, + *, + project: Optional[str] = None, + target_https_proxy: Optional[str] = None, + target_https_proxy_resource: Optional[compute.TargetHttpsProxy] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.Operation: + r"""Patches the specified TargetHttpsProxy resource with + the data included in the request. This method supports + PATCH semantics and uses JSON merge patch format and + processing rules. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_patch(): + # Create a client + client = compute_v1.TargetHttpsProxiesClient() + + # Initialize request argument(s) + request = compute_v1.PatchTargetHttpsProxyRequest( + project="project_value", + target_https_proxy="target_https_proxy_value", + ) + + # Make the request + response = client.patch(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.PatchTargetHttpsProxyRequest, dict]): + The request object. A request message for + TargetHttpsProxies.Patch. See the method + description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + target_https_proxy (str): + Name of the TargetHttpsProxy resource + to patch. + + This corresponds to the ``target_https_proxy`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + target_https_proxy_resource (google.cloud.compute_v1.types.TargetHttpsProxy): + The body resource for this request + This corresponds to the ``target_https_proxy_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, target_https_proxy, target_https_proxy_resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.PatchTargetHttpsProxyRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.PatchTargetHttpsProxyRequest): + request = compute.PatchTargetHttpsProxyRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if target_https_proxy is not None: + request.target_https_proxy = target_https_proxy + if target_https_proxy_resource is not None: + request.target_https_proxy_resource = target_https_proxy_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.patch] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("target_https_proxy", request.target_https_proxy), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def patch(self, + request: Optional[Union[compute.PatchTargetHttpsProxyRequest, dict]] = None, + *, + project: Optional[str] = None, + target_https_proxy: Optional[str] = None, + target_https_proxy_resource: Optional[compute.TargetHttpsProxy] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> extended_operation.ExtendedOperation: + r"""Patches the specified TargetHttpsProxy resource with + the data included in the request. This method supports + PATCH semantics and uses JSON merge patch format and + processing rules. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_patch(): + # Create a client + client = compute_v1.TargetHttpsProxiesClient() + + # Initialize request argument(s) + request = compute_v1.PatchTargetHttpsProxyRequest( + project="project_value", + target_https_proxy="target_https_proxy_value", + ) + + # Make the request + response = client.patch(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.PatchTargetHttpsProxyRequest, dict]): + The request object. A request message for + TargetHttpsProxies.Patch. See the method + description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + target_https_proxy (str): + Name of the TargetHttpsProxy resource + to patch. + + This corresponds to the ``target_https_proxy`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + target_https_proxy_resource (google.cloud.compute_v1.types.TargetHttpsProxy): + The body resource for this request + This corresponds to the ``target_https_proxy_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, target_https_proxy, target_https_proxy_resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.PatchTargetHttpsProxyRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.PatchTargetHttpsProxyRequest): + request = compute.PatchTargetHttpsProxyRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if target_https_proxy is not None: + request.target_https_proxy = target_https_proxy + if target_https_proxy_resource is not None: + request.target_https_proxy_resource = target_https_proxy_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.patch] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("target_https_proxy", request.target_https_proxy), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + operation_service = self._transport._global_operations_client + operation_request = compute.GetGlobalOperationRequest() + operation_request.project = request.project + operation_request.operation = response.name + + get_operation = functools.partial(operation_service.get, operation_request) + # Cancel is not part of extended operations yet. + cancel_operation = lambda: None + + # Note: this class is an implementation detail to provide a uniform + # set of names for certain fields in the extended operation proto message. + # See google.api_core.extended_operation.ExtendedOperation for details + # on these properties and the expected interface. + class _CustomOperation(extended_operation.ExtendedOperation): + @property + def error_message(self): + return self._extended_operation.http_error_message + + @property + def error_code(self): + return self._extended_operation.http_error_status_code + + response = _CustomOperation.make(get_operation, cancel_operation, response) + + # Done; return the response. + return response + + def set_certificate_map_unary(self, + request: Optional[Union[compute.SetCertificateMapTargetHttpsProxyRequest, dict]] = None, + *, + project: Optional[str] = None, + target_https_proxy: Optional[str] = None, + target_https_proxies_set_certificate_map_request_resource: Optional[compute.TargetHttpsProxiesSetCertificateMapRequest] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.Operation: + r"""Changes the Certificate Map for TargetHttpsProxy. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_set_certificate_map(): + # Create a client + client = compute_v1.TargetHttpsProxiesClient() + + # Initialize request argument(s) + request = compute_v1.SetCertificateMapTargetHttpsProxyRequest( + project="project_value", + target_https_proxy="target_https_proxy_value", + ) + + # Make the request + response = client.set_certificate_map(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.SetCertificateMapTargetHttpsProxyRequest, dict]): + The request object. A request message for + TargetHttpsProxies.SetCertificateMap. + See the method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + target_https_proxy (str): + Name of the TargetHttpsProxy resource + whose CertificateMap is to be set. The + name must be 1-63 characters long, and + comply with RFC1035. + + This corresponds to the ``target_https_proxy`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + target_https_proxies_set_certificate_map_request_resource (google.cloud.compute_v1.types.TargetHttpsProxiesSetCertificateMapRequest): + The body resource for this request + This corresponds to the ``target_https_proxies_set_certificate_map_request_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, target_https_proxy, target_https_proxies_set_certificate_map_request_resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.SetCertificateMapTargetHttpsProxyRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.SetCertificateMapTargetHttpsProxyRequest): + request = compute.SetCertificateMapTargetHttpsProxyRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if target_https_proxy is not None: + request.target_https_proxy = target_https_proxy + if target_https_proxies_set_certificate_map_request_resource is not None: + request.target_https_proxies_set_certificate_map_request_resource = target_https_proxies_set_certificate_map_request_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.set_certificate_map] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("target_https_proxy", request.target_https_proxy), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def set_certificate_map(self, + request: Optional[Union[compute.SetCertificateMapTargetHttpsProxyRequest, dict]] = None, + *, + project: Optional[str] = None, + target_https_proxy: Optional[str] = None, + target_https_proxies_set_certificate_map_request_resource: Optional[compute.TargetHttpsProxiesSetCertificateMapRequest] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> extended_operation.ExtendedOperation: + r"""Changes the Certificate Map for TargetHttpsProxy. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_set_certificate_map(): + # Create a client + client = compute_v1.TargetHttpsProxiesClient() + + # Initialize request argument(s) + request = compute_v1.SetCertificateMapTargetHttpsProxyRequest( + project="project_value", + target_https_proxy="target_https_proxy_value", + ) + + # Make the request + response = client.set_certificate_map(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.SetCertificateMapTargetHttpsProxyRequest, dict]): + The request object. A request message for + TargetHttpsProxies.SetCertificateMap. + See the method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + target_https_proxy (str): + Name of the TargetHttpsProxy resource + whose CertificateMap is to be set. The + name must be 1-63 characters long, and + comply with RFC1035. + + This corresponds to the ``target_https_proxy`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + target_https_proxies_set_certificate_map_request_resource (google.cloud.compute_v1.types.TargetHttpsProxiesSetCertificateMapRequest): + The body resource for this request + This corresponds to the ``target_https_proxies_set_certificate_map_request_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, target_https_proxy, target_https_proxies_set_certificate_map_request_resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.SetCertificateMapTargetHttpsProxyRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.SetCertificateMapTargetHttpsProxyRequest): + request = compute.SetCertificateMapTargetHttpsProxyRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if target_https_proxy is not None: + request.target_https_proxy = target_https_proxy + if target_https_proxies_set_certificate_map_request_resource is not None: + request.target_https_proxies_set_certificate_map_request_resource = target_https_proxies_set_certificate_map_request_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.set_certificate_map] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("target_https_proxy", request.target_https_proxy), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + operation_service = self._transport._global_operations_client + operation_request = compute.GetGlobalOperationRequest() + operation_request.project = request.project + operation_request.operation = response.name + + get_operation = functools.partial(operation_service.get, operation_request) + # Cancel is not part of extended operations yet. + cancel_operation = lambda: None + + # Note: this class is an implementation detail to provide a uniform + # set of names for certain fields in the extended operation proto message. + # See google.api_core.extended_operation.ExtendedOperation for details + # on these properties and the expected interface. + class _CustomOperation(extended_operation.ExtendedOperation): + @property + def error_message(self): + return self._extended_operation.http_error_message + + @property + def error_code(self): + return self._extended_operation.http_error_status_code + + response = _CustomOperation.make(get_operation, cancel_operation, response) + + # Done; return the response. + return response + + def set_quic_override_unary(self, + request: Optional[Union[compute.SetQuicOverrideTargetHttpsProxyRequest, dict]] = None, + *, + project: Optional[str] = None, + target_https_proxy: Optional[str] = None, + target_https_proxies_set_quic_override_request_resource: Optional[compute.TargetHttpsProxiesSetQuicOverrideRequest] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.Operation: + r"""Sets the QUIC override policy for TargetHttpsProxy. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_set_quic_override(): + # Create a client + client = compute_v1.TargetHttpsProxiesClient() + + # Initialize request argument(s) + request = compute_v1.SetQuicOverrideTargetHttpsProxyRequest( + project="project_value", + target_https_proxy="target_https_proxy_value", + ) + + # Make the request + response = client.set_quic_override(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.SetQuicOverrideTargetHttpsProxyRequest, dict]): + The request object. A request message for + TargetHttpsProxies.SetQuicOverride. See + the method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + target_https_proxy (str): + Name of the TargetHttpsProxy resource + to set the QUIC override policy for. The + name should conform to RFC1035. + + This corresponds to the ``target_https_proxy`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + target_https_proxies_set_quic_override_request_resource (google.cloud.compute_v1.types.TargetHttpsProxiesSetQuicOverrideRequest): + The body resource for this request + This corresponds to the ``target_https_proxies_set_quic_override_request_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, target_https_proxy, target_https_proxies_set_quic_override_request_resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.SetQuicOverrideTargetHttpsProxyRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.SetQuicOverrideTargetHttpsProxyRequest): + request = compute.SetQuicOverrideTargetHttpsProxyRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if target_https_proxy is not None: + request.target_https_proxy = target_https_proxy + if target_https_proxies_set_quic_override_request_resource is not None: + request.target_https_proxies_set_quic_override_request_resource = target_https_proxies_set_quic_override_request_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.set_quic_override] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("target_https_proxy", request.target_https_proxy), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def set_quic_override(self, + request: Optional[Union[compute.SetQuicOverrideTargetHttpsProxyRequest, dict]] = None, + *, + project: Optional[str] = None, + target_https_proxy: Optional[str] = None, + target_https_proxies_set_quic_override_request_resource: Optional[compute.TargetHttpsProxiesSetQuicOverrideRequest] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> extended_operation.ExtendedOperation: + r"""Sets the QUIC override policy for TargetHttpsProxy. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_set_quic_override(): + # Create a client + client = compute_v1.TargetHttpsProxiesClient() + + # Initialize request argument(s) + request = compute_v1.SetQuicOverrideTargetHttpsProxyRequest( + project="project_value", + target_https_proxy="target_https_proxy_value", + ) + + # Make the request + response = client.set_quic_override(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.SetQuicOverrideTargetHttpsProxyRequest, dict]): + The request object. A request message for + TargetHttpsProxies.SetQuicOverride. See + the method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + target_https_proxy (str): + Name of the TargetHttpsProxy resource + to set the QUIC override policy for. The + name should conform to RFC1035. + + This corresponds to the ``target_https_proxy`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + target_https_proxies_set_quic_override_request_resource (google.cloud.compute_v1.types.TargetHttpsProxiesSetQuicOverrideRequest): + The body resource for this request + This corresponds to the ``target_https_proxies_set_quic_override_request_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, target_https_proxy, target_https_proxies_set_quic_override_request_resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.SetQuicOverrideTargetHttpsProxyRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.SetQuicOverrideTargetHttpsProxyRequest): + request = compute.SetQuicOverrideTargetHttpsProxyRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if target_https_proxy is not None: + request.target_https_proxy = target_https_proxy + if target_https_proxies_set_quic_override_request_resource is not None: + request.target_https_proxies_set_quic_override_request_resource = target_https_proxies_set_quic_override_request_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.set_quic_override] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("target_https_proxy", request.target_https_proxy), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + operation_service = self._transport._global_operations_client + operation_request = compute.GetGlobalOperationRequest() + operation_request.project = request.project + operation_request.operation = response.name + + get_operation = functools.partial(operation_service.get, operation_request) + # Cancel is not part of extended operations yet. + cancel_operation = lambda: None + + # Note: this class is an implementation detail to provide a uniform + # set of names for certain fields in the extended operation proto message. + # See google.api_core.extended_operation.ExtendedOperation for details + # on these properties and the expected interface. + class _CustomOperation(extended_operation.ExtendedOperation): + @property + def error_message(self): + return self._extended_operation.http_error_message + + @property + def error_code(self): + return self._extended_operation.http_error_status_code + + response = _CustomOperation.make(get_operation, cancel_operation, response) + + # Done; return the response. + return response + + def set_ssl_certificates_unary(self, + request: Optional[Union[compute.SetSslCertificatesTargetHttpsProxyRequest, dict]] = None, + *, + project: Optional[str] = None, + target_https_proxy: Optional[str] = None, + target_https_proxies_set_ssl_certificates_request_resource: Optional[compute.TargetHttpsProxiesSetSslCertificatesRequest] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.Operation: + r"""Replaces SslCertificates for TargetHttpsProxy. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_set_ssl_certificates(): + # Create a client + client = compute_v1.TargetHttpsProxiesClient() + + # Initialize request argument(s) + request = compute_v1.SetSslCertificatesTargetHttpsProxyRequest( + project="project_value", + target_https_proxy="target_https_proxy_value", + ) + + # Make the request + response = client.set_ssl_certificates(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.SetSslCertificatesTargetHttpsProxyRequest, dict]): + The request object. A request message for + TargetHttpsProxies.SetSslCertificates. + See the method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + target_https_proxy (str): + Name of the TargetHttpsProxy resource + to set an SslCertificates resource for. + + This corresponds to the ``target_https_proxy`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + target_https_proxies_set_ssl_certificates_request_resource (google.cloud.compute_v1.types.TargetHttpsProxiesSetSslCertificatesRequest): + The body resource for this request + This corresponds to the ``target_https_proxies_set_ssl_certificates_request_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, target_https_proxy, target_https_proxies_set_ssl_certificates_request_resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.SetSslCertificatesTargetHttpsProxyRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.SetSslCertificatesTargetHttpsProxyRequest): + request = compute.SetSslCertificatesTargetHttpsProxyRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if target_https_proxy is not None: + request.target_https_proxy = target_https_proxy + if target_https_proxies_set_ssl_certificates_request_resource is not None: + request.target_https_proxies_set_ssl_certificates_request_resource = target_https_proxies_set_ssl_certificates_request_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.set_ssl_certificates] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("target_https_proxy", request.target_https_proxy), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def set_ssl_certificates(self, + request: Optional[Union[compute.SetSslCertificatesTargetHttpsProxyRequest, dict]] = None, + *, + project: Optional[str] = None, + target_https_proxy: Optional[str] = None, + target_https_proxies_set_ssl_certificates_request_resource: Optional[compute.TargetHttpsProxiesSetSslCertificatesRequest] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> extended_operation.ExtendedOperation: + r"""Replaces SslCertificates for TargetHttpsProxy. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_set_ssl_certificates(): + # Create a client + client = compute_v1.TargetHttpsProxiesClient() + + # Initialize request argument(s) + request = compute_v1.SetSslCertificatesTargetHttpsProxyRequest( + project="project_value", + target_https_proxy="target_https_proxy_value", + ) + + # Make the request + response = client.set_ssl_certificates(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.SetSslCertificatesTargetHttpsProxyRequest, dict]): + The request object. A request message for + TargetHttpsProxies.SetSslCertificates. + See the method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + target_https_proxy (str): + Name of the TargetHttpsProxy resource + to set an SslCertificates resource for. + + This corresponds to the ``target_https_proxy`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + target_https_proxies_set_ssl_certificates_request_resource (google.cloud.compute_v1.types.TargetHttpsProxiesSetSslCertificatesRequest): + The body resource for this request + This corresponds to the ``target_https_proxies_set_ssl_certificates_request_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, target_https_proxy, target_https_proxies_set_ssl_certificates_request_resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.SetSslCertificatesTargetHttpsProxyRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.SetSslCertificatesTargetHttpsProxyRequest): + request = compute.SetSslCertificatesTargetHttpsProxyRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if target_https_proxy is not None: + request.target_https_proxy = target_https_proxy + if target_https_proxies_set_ssl_certificates_request_resource is not None: + request.target_https_proxies_set_ssl_certificates_request_resource = target_https_proxies_set_ssl_certificates_request_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.set_ssl_certificates] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("target_https_proxy", request.target_https_proxy), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + operation_service = self._transport._global_operations_client + operation_request = compute.GetGlobalOperationRequest() + operation_request.project = request.project + operation_request.operation = response.name + + get_operation = functools.partial(operation_service.get, operation_request) + # Cancel is not part of extended operations yet. + cancel_operation = lambda: None + + # Note: this class is an implementation detail to provide a uniform + # set of names for certain fields in the extended operation proto message. + # See google.api_core.extended_operation.ExtendedOperation for details + # on these properties and the expected interface. + class _CustomOperation(extended_operation.ExtendedOperation): + @property + def error_message(self): + return self._extended_operation.http_error_message + + @property + def error_code(self): + return self._extended_operation.http_error_status_code + + response = _CustomOperation.make(get_operation, cancel_operation, response) + + # Done; return the response. + return response + + def set_ssl_policy_unary(self, + request: Optional[Union[compute.SetSslPolicyTargetHttpsProxyRequest, dict]] = None, + *, + project: Optional[str] = None, + target_https_proxy: Optional[str] = None, + ssl_policy_reference_resource: Optional[compute.SslPolicyReference] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.Operation: + r"""Sets the SSL policy for TargetHttpsProxy. The SSL + policy specifies the server-side support for SSL + features. This affects connections between clients and + the HTTPS proxy load balancer. They do not affect the + connection between the load balancer and the backends. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_set_ssl_policy(): + # Create a client + client = compute_v1.TargetHttpsProxiesClient() + + # Initialize request argument(s) + request = compute_v1.SetSslPolicyTargetHttpsProxyRequest( + project="project_value", + target_https_proxy="target_https_proxy_value", + ) + + # Make the request + response = client.set_ssl_policy(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.SetSslPolicyTargetHttpsProxyRequest, dict]): + The request object. A request message for + TargetHttpsProxies.SetSslPolicy. See the + method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + target_https_proxy (str): + Name of the TargetHttpsProxy resource + whose SSL policy is to be set. The name + must be 1-63 characters long, and comply + with RFC1035. + + This corresponds to the ``target_https_proxy`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + ssl_policy_reference_resource (google.cloud.compute_v1.types.SslPolicyReference): + The body resource for this request + This corresponds to the ``ssl_policy_reference_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, target_https_proxy, ssl_policy_reference_resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.SetSslPolicyTargetHttpsProxyRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.SetSslPolicyTargetHttpsProxyRequest): + request = compute.SetSslPolicyTargetHttpsProxyRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if target_https_proxy is not None: + request.target_https_proxy = target_https_proxy + if ssl_policy_reference_resource is not None: + request.ssl_policy_reference_resource = ssl_policy_reference_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.set_ssl_policy] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("target_https_proxy", request.target_https_proxy), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def set_ssl_policy(self, + request: Optional[Union[compute.SetSslPolicyTargetHttpsProxyRequest, dict]] = None, + *, + project: Optional[str] = None, + target_https_proxy: Optional[str] = None, + ssl_policy_reference_resource: Optional[compute.SslPolicyReference] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> extended_operation.ExtendedOperation: + r"""Sets the SSL policy for TargetHttpsProxy. The SSL + policy specifies the server-side support for SSL + features. This affects connections between clients and + the HTTPS proxy load balancer. They do not affect the + connection between the load balancer and the backends. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_set_ssl_policy(): + # Create a client + client = compute_v1.TargetHttpsProxiesClient() + + # Initialize request argument(s) + request = compute_v1.SetSslPolicyTargetHttpsProxyRequest( + project="project_value", + target_https_proxy="target_https_proxy_value", + ) + + # Make the request + response = client.set_ssl_policy(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.SetSslPolicyTargetHttpsProxyRequest, dict]): + The request object. A request message for + TargetHttpsProxies.SetSslPolicy. See the + method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + target_https_proxy (str): + Name of the TargetHttpsProxy resource + whose SSL policy is to be set. The name + must be 1-63 characters long, and comply + with RFC1035. + + This corresponds to the ``target_https_proxy`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + ssl_policy_reference_resource (google.cloud.compute_v1.types.SslPolicyReference): + The body resource for this request + This corresponds to the ``ssl_policy_reference_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, target_https_proxy, ssl_policy_reference_resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.SetSslPolicyTargetHttpsProxyRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.SetSslPolicyTargetHttpsProxyRequest): + request = compute.SetSslPolicyTargetHttpsProxyRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if target_https_proxy is not None: + request.target_https_proxy = target_https_proxy + if ssl_policy_reference_resource is not None: + request.ssl_policy_reference_resource = ssl_policy_reference_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.set_ssl_policy] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("target_https_proxy", request.target_https_proxy), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + operation_service = self._transport._global_operations_client + operation_request = compute.GetGlobalOperationRequest() + operation_request.project = request.project + operation_request.operation = response.name + + get_operation = functools.partial(operation_service.get, operation_request) + # Cancel is not part of extended operations yet. + cancel_operation = lambda: None + + # Note: this class is an implementation detail to provide a uniform + # set of names for certain fields in the extended operation proto message. + # See google.api_core.extended_operation.ExtendedOperation for details + # on these properties and the expected interface. + class _CustomOperation(extended_operation.ExtendedOperation): + @property + def error_message(self): + return self._extended_operation.http_error_message + + @property + def error_code(self): + return self._extended_operation.http_error_status_code + + response = _CustomOperation.make(get_operation, cancel_operation, response) + + # Done; return the response. + return response + + def set_url_map_unary(self, + request: Optional[Union[compute.SetUrlMapTargetHttpsProxyRequest, dict]] = None, + *, + project: Optional[str] = None, + target_https_proxy: Optional[str] = None, + url_map_reference_resource: Optional[compute.UrlMapReference] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.Operation: + r"""Changes the URL map for TargetHttpsProxy. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_set_url_map(): + # Create a client + client = compute_v1.TargetHttpsProxiesClient() + + # Initialize request argument(s) + request = compute_v1.SetUrlMapTargetHttpsProxyRequest( + project="project_value", + target_https_proxy="target_https_proxy_value", + ) + + # Make the request + response = client.set_url_map(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.SetUrlMapTargetHttpsProxyRequest, dict]): + The request object. A request message for + TargetHttpsProxies.SetUrlMap. See the + method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + target_https_proxy (str): + Name of the TargetHttpsProxy resource + whose URL map is to be set. + + This corresponds to the ``target_https_proxy`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + url_map_reference_resource (google.cloud.compute_v1.types.UrlMapReference): + The body resource for this request + This corresponds to the ``url_map_reference_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, target_https_proxy, url_map_reference_resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.SetUrlMapTargetHttpsProxyRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.SetUrlMapTargetHttpsProxyRequest): + request = compute.SetUrlMapTargetHttpsProxyRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if target_https_proxy is not None: + request.target_https_proxy = target_https_proxy + if url_map_reference_resource is not None: + request.url_map_reference_resource = url_map_reference_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.set_url_map] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("target_https_proxy", request.target_https_proxy), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def set_url_map(self, + request: Optional[Union[compute.SetUrlMapTargetHttpsProxyRequest, dict]] = None, + *, + project: Optional[str] = None, + target_https_proxy: Optional[str] = None, + url_map_reference_resource: Optional[compute.UrlMapReference] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> extended_operation.ExtendedOperation: + r"""Changes the URL map for TargetHttpsProxy. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_set_url_map(): + # Create a client + client = compute_v1.TargetHttpsProxiesClient() + + # Initialize request argument(s) + request = compute_v1.SetUrlMapTargetHttpsProxyRequest( + project="project_value", + target_https_proxy="target_https_proxy_value", + ) + + # Make the request + response = client.set_url_map(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.SetUrlMapTargetHttpsProxyRequest, dict]): + The request object. A request message for + TargetHttpsProxies.SetUrlMap. See the + method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + target_https_proxy (str): + Name of the TargetHttpsProxy resource + whose URL map is to be set. + + This corresponds to the ``target_https_proxy`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + url_map_reference_resource (google.cloud.compute_v1.types.UrlMapReference): + The body resource for this request + This corresponds to the ``url_map_reference_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, target_https_proxy, url_map_reference_resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.SetUrlMapTargetHttpsProxyRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.SetUrlMapTargetHttpsProxyRequest): + request = compute.SetUrlMapTargetHttpsProxyRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if target_https_proxy is not None: + request.target_https_proxy = target_https_proxy + if url_map_reference_resource is not None: + request.url_map_reference_resource = url_map_reference_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.set_url_map] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("target_https_proxy", request.target_https_proxy), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + operation_service = self._transport._global_operations_client + operation_request = compute.GetGlobalOperationRequest() + operation_request.project = request.project + operation_request.operation = response.name + + get_operation = functools.partial(operation_service.get, operation_request) + # Cancel is not part of extended operations yet. + cancel_operation = lambda: None + + # Note: this class is an implementation detail to provide a uniform + # set of names for certain fields in the extended operation proto message. + # See google.api_core.extended_operation.ExtendedOperation for details + # on these properties and the expected interface. + class _CustomOperation(extended_operation.ExtendedOperation): + @property + def error_message(self): + return self._extended_operation.http_error_message + + @property + def error_code(self): + return self._extended_operation.http_error_status_code + + response = _CustomOperation.make(get_operation, cancel_operation, response) + + # Done; return the response. + return response + + def __enter__(self) -> "TargetHttpsProxiesClient": + return self + + def __exit__(self, type, value, traceback): + """Releases underlying transport's resources. + + .. warning:: + ONLY use as a context manager if the transport is NOT shared + with other clients! Exiting the with block will CLOSE the transport + and may cause errors in other clients! + """ + self.transport.close() + + + + + + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) + + +__all__ = ( + "TargetHttpsProxiesClient", +) diff --git a/owl-bot-staging/v1/google/cloud/compute_v1/services/target_https_proxies/pagers.py b/owl-bot-staging/v1/google/cloud/compute_v1/services/target_https_proxies/pagers.py new file mode 100644 index 000000000..27e8f6ec8 --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/compute_v1/services/target_https_proxies/pagers.py @@ -0,0 +1,139 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import Any, AsyncIterator, Awaitable, Callable, Sequence, Tuple, Optional, Iterator + +from google.cloud.compute_v1.types import compute + + +class AggregatedListPager: + """A pager for iterating through ``aggregated_list`` requests. + + This class thinly wraps an initial + :class:`google.cloud.compute_v1.types.TargetHttpsProxyAggregatedList` object, and + provides an ``__iter__`` method to iterate through its + ``items`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``AggregatedList`` requests and continue to iterate + through the ``items`` field on the + corresponding responses. + + All the usual :class:`google.cloud.compute_v1.types.TargetHttpsProxyAggregatedList` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., compute.TargetHttpsProxyAggregatedList], + request: compute.AggregatedListTargetHttpsProxiesRequest, + response: compute.TargetHttpsProxyAggregatedList, + *, + metadata: Sequence[Tuple[str, str]] = ()): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.compute_v1.types.AggregatedListTargetHttpsProxiesRequest): + The initial request object. + response (google.cloud.compute_v1.types.TargetHttpsProxyAggregatedList): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = compute.AggregatedListTargetHttpsProxiesRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[compute.TargetHttpsProxyAggregatedList]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[Tuple[str, compute.TargetHttpsProxiesScopedList]]: + for page in self.pages: + yield from page.items.items() + + def get(self, key: str) -> Optional[compute.TargetHttpsProxiesScopedList]: + return self._response.items.get(key) + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + + +class ListPager: + """A pager for iterating through ``list`` requests. + + This class thinly wraps an initial + :class:`google.cloud.compute_v1.types.TargetHttpsProxyList` object, and + provides an ``__iter__`` method to iterate through its + ``items`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``List`` requests and continue to iterate + through the ``items`` field on the + corresponding responses. + + All the usual :class:`google.cloud.compute_v1.types.TargetHttpsProxyList` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., compute.TargetHttpsProxyList], + request: compute.ListTargetHttpsProxiesRequest, + response: compute.TargetHttpsProxyList, + *, + metadata: Sequence[Tuple[str, str]] = ()): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.compute_v1.types.ListTargetHttpsProxiesRequest): + The initial request object. + response (google.cloud.compute_v1.types.TargetHttpsProxyList): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = compute.ListTargetHttpsProxiesRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[compute.TargetHttpsProxyList]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[compute.TargetHttpsProxy]: + for page in self.pages: + yield from page.items + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) diff --git a/owl-bot-staging/v1/google/cloud/compute_v1/services/target_https_proxies/transports/__init__.py b/owl-bot-staging/v1/google/cloud/compute_v1/services/target_https_proxies/transports/__init__.py new file mode 100644 index 000000000..48b4eb3fd --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/compute_v1/services/target_https_proxies/transports/__init__.py @@ -0,0 +1,32 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +from typing import Dict, Type + +from .base import TargetHttpsProxiesTransport +from .rest import TargetHttpsProxiesRestTransport +from .rest import TargetHttpsProxiesRestInterceptor + + +# Compile a registry of transports. +_transport_registry = OrderedDict() # type: Dict[str, Type[TargetHttpsProxiesTransport]] +_transport_registry['rest'] = TargetHttpsProxiesRestTransport + +__all__ = ( + 'TargetHttpsProxiesTransport', + 'TargetHttpsProxiesRestTransport', + 'TargetHttpsProxiesRestInterceptor', +) diff --git a/owl-bot-staging/v1/google/cloud/compute_v1/services/target_https_proxies/transports/base.py b/owl-bot-staging/v1/google/cloud/compute_v1/services/target_https_proxies/transports/base.py new file mode 100644 index 000000000..287528437 --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/compute_v1/services/target_https_proxies/transports/base.py @@ -0,0 +1,303 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import abc +from typing import Awaitable, Callable, Dict, Optional, Sequence, Union + +from google.cloud.compute_v1 import gapic_version as package_version + +import google.auth # type: ignore +import google.api_core +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.compute_v1.types import compute +from google.cloud.compute_v1.services import global_operations + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) + + +class TargetHttpsProxiesTransport(abc.ABC): + """Abstract transport class for TargetHttpsProxies.""" + + AUTH_SCOPES = ( + 'https://www.googleapis.com/auth/compute', + 'https://www.googleapis.com/auth/cloud-platform', + ) + + DEFAULT_HOST: str = 'compute.googleapis.com' + def __init__( + self, *, + host: str = DEFAULT_HOST, + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + **kwargs, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A list of scopes. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + """ + self._extended_operations_services: Dict[str, Any] = {} + + scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} + + # Save the scopes. + self._scopes = scopes + + # If no credentials are provided, then determine the appropriate + # defaults. + if credentials and credentials_file: + raise core_exceptions.DuplicateCredentialArgs("'credentials_file' and 'credentials' are mutually exclusive") + + if credentials_file is not None: + credentials, _ = google.auth.load_credentials_from_file( + credentials_file, + **scopes_kwargs, + quota_project_id=quota_project_id + ) + elif credentials is None: + credentials, _ = google.auth.default(**scopes_kwargs, quota_project_id=quota_project_id) + # Don't apply audience if the credentials file passed from user. + if hasattr(credentials, "with_gdch_audience"): + credentials = credentials.with_gdch_audience(api_audience if api_audience else host) + + # If the credentials are service account credentials, then always try to use self signed JWT. + if always_use_jwt_access and isinstance(credentials, service_account.Credentials) and hasattr(service_account.Credentials, "with_always_use_jwt_access"): + credentials = credentials.with_always_use_jwt_access(True) + + # Save the credentials. + self._credentials = credentials + + # Save the hostname. Default to port 443 (HTTPS) if none is specified. + if ':' not in host: + host += ':443' + self._host = host + + def _prep_wrapped_messages(self, client_info): + # Precompute the wrapped methods. + self._wrapped_methods = { + self.aggregated_list: gapic_v1.method.wrap_method( + self.aggregated_list, + default_timeout=None, + client_info=client_info, + ), + self.delete: gapic_v1.method.wrap_method( + self.delete, + default_timeout=None, + client_info=client_info, + ), + self.get: gapic_v1.method.wrap_method( + self.get, + default_timeout=None, + client_info=client_info, + ), + self.insert: gapic_v1.method.wrap_method( + self.insert, + default_timeout=None, + client_info=client_info, + ), + self.list: gapic_v1.method.wrap_method( + self.list, + default_timeout=None, + client_info=client_info, + ), + self.patch: gapic_v1.method.wrap_method( + self.patch, + default_timeout=None, + client_info=client_info, + ), + self.set_certificate_map: gapic_v1.method.wrap_method( + self.set_certificate_map, + default_timeout=None, + client_info=client_info, + ), + self.set_quic_override: gapic_v1.method.wrap_method( + self.set_quic_override, + default_timeout=None, + client_info=client_info, + ), + self.set_ssl_certificates: gapic_v1.method.wrap_method( + self.set_ssl_certificates, + default_timeout=None, + client_info=client_info, + ), + self.set_ssl_policy: gapic_v1.method.wrap_method( + self.set_ssl_policy, + default_timeout=None, + client_info=client_info, + ), + self.set_url_map: gapic_v1.method.wrap_method( + self.set_url_map, + default_timeout=None, + client_info=client_info, + ), + } + + def close(self): + """Closes resources associated with the transport. + + .. warning:: + Only call this method if the transport is NOT shared + with other clients - this may cause errors in other clients! + """ + raise NotImplementedError() + + @property + def aggregated_list(self) -> Callable[ + [compute.AggregatedListTargetHttpsProxiesRequest], + Union[ + compute.TargetHttpsProxyAggregatedList, + Awaitable[compute.TargetHttpsProxyAggregatedList] + ]]: + raise NotImplementedError() + + @property + def delete(self) -> Callable[ + [compute.DeleteTargetHttpsProxyRequest], + Union[ + compute.Operation, + Awaitable[compute.Operation] + ]]: + raise NotImplementedError() + + @property + def get(self) -> Callable[ + [compute.GetTargetHttpsProxyRequest], + Union[ + compute.TargetHttpsProxy, + Awaitable[compute.TargetHttpsProxy] + ]]: + raise NotImplementedError() + + @property + def insert(self) -> Callable[ + [compute.InsertTargetHttpsProxyRequest], + Union[ + compute.Operation, + Awaitable[compute.Operation] + ]]: + raise NotImplementedError() + + @property + def list(self) -> Callable[ + [compute.ListTargetHttpsProxiesRequest], + Union[ + compute.TargetHttpsProxyList, + Awaitable[compute.TargetHttpsProxyList] + ]]: + raise NotImplementedError() + + @property + def patch(self) -> Callable[ + [compute.PatchTargetHttpsProxyRequest], + Union[ + compute.Operation, + Awaitable[compute.Operation] + ]]: + raise NotImplementedError() + + @property + def set_certificate_map(self) -> Callable[ + [compute.SetCertificateMapTargetHttpsProxyRequest], + Union[ + compute.Operation, + Awaitable[compute.Operation] + ]]: + raise NotImplementedError() + + @property + def set_quic_override(self) -> Callable[ + [compute.SetQuicOverrideTargetHttpsProxyRequest], + Union[ + compute.Operation, + Awaitable[compute.Operation] + ]]: + raise NotImplementedError() + + @property + def set_ssl_certificates(self) -> Callable[ + [compute.SetSslCertificatesTargetHttpsProxyRequest], + Union[ + compute.Operation, + Awaitable[compute.Operation] + ]]: + raise NotImplementedError() + + @property + def set_ssl_policy(self) -> Callable[ + [compute.SetSslPolicyTargetHttpsProxyRequest], + Union[ + compute.Operation, + Awaitable[compute.Operation] + ]]: + raise NotImplementedError() + + @property + def set_url_map(self) -> Callable[ + [compute.SetUrlMapTargetHttpsProxyRequest], + Union[ + compute.Operation, + Awaitable[compute.Operation] + ]]: + raise NotImplementedError() + + @property + def kind(self) -> str: + raise NotImplementedError() + + @property + def _global_operations_client(self) -> global_operations.GlobalOperationsClient: + ex_op_service = self._extended_operations_services.get("global_operations") + if not ex_op_service: + ex_op_service = global_operations.GlobalOperationsClient( + credentials=self._credentials, + transport=self.kind, + ) + self._extended_operations_services["global_operations"] = ex_op_service + + return ex_op_service + + +__all__ = ( + 'TargetHttpsProxiesTransport', +) diff --git a/owl-bot-staging/v1/google/cloud/compute_v1/services/target_https_proxies/transports/rest.py b/owl-bot-staging/v1/google/cloud/compute_v1/services/target_https_proxies/transports/rest.py new file mode 100644 index 000000000..207e2fb69 --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/compute_v1/services/target_https_proxies/transports/rest.py @@ -0,0 +1,1592 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +from google.auth.transport.requests import AuthorizedSession # type: ignore +import json # type: ignore +import grpc # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.api_core import exceptions as core_exceptions +from google.api_core import retry as retries +from google.api_core import rest_helpers +from google.api_core import rest_streaming +from google.api_core import path_template +from google.api_core import gapic_v1 + +from google.protobuf import json_format +from requests import __version__ as requests_version +import dataclasses +import re +from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union +import warnings + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + + +from google.cloud.compute_v1.types import compute + +from .base import TargetHttpsProxiesTransport, DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, + grpc_version=None, + rest_version=requests_version, +) + + +class TargetHttpsProxiesRestInterceptor: + """Interceptor for TargetHttpsProxies. + + Interceptors are used to manipulate requests, request metadata, and responses + in arbitrary ways. + Example use cases include: + * Logging + * Verifying requests according to service or custom semantics + * Stripping extraneous information from responses + + These use cases and more can be enabled by injecting an + instance of a custom subclass when constructing the TargetHttpsProxiesRestTransport. + + .. code-block:: python + class MyCustomTargetHttpsProxiesInterceptor(TargetHttpsProxiesRestInterceptor): + def pre_aggregated_list(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_aggregated_list(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_delete(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_delete(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_get(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_insert(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_insert(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_patch(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_patch(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_set_certificate_map(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_set_certificate_map(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_set_quic_override(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_set_quic_override(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_set_ssl_certificates(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_set_ssl_certificates(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_set_ssl_policy(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_set_ssl_policy(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_set_url_map(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_set_url_map(self, response): + logging.log(f"Received response: {response}") + return response + + transport = TargetHttpsProxiesRestTransport(interceptor=MyCustomTargetHttpsProxiesInterceptor()) + client = TargetHttpsProxiesClient(transport=transport) + + + """ + def pre_aggregated_list(self, request: compute.AggregatedListTargetHttpsProxiesRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.AggregatedListTargetHttpsProxiesRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for aggregated_list + + Override in a subclass to manipulate the request or metadata + before they are sent to the TargetHttpsProxies server. + """ + return request, metadata + + def post_aggregated_list(self, response: compute.TargetHttpsProxyAggregatedList) -> compute.TargetHttpsProxyAggregatedList: + """Post-rpc interceptor for aggregated_list + + Override in a subclass to manipulate the response + after it is returned by the TargetHttpsProxies server but before + it is returned to user code. + """ + return response + def pre_delete(self, request: compute.DeleteTargetHttpsProxyRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.DeleteTargetHttpsProxyRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for delete + + Override in a subclass to manipulate the request or metadata + before they are sent to the TargetHttpsProxies server. + """ + return request, metadata + + def post_delete(self, response: compute.Operation) -> compute.Operation: + """Post-rpc interceptor for delete + + Override in a subclass to manipulate the response + after it is returned by the TargetHttpsProxies server but before + it is returned to user code. + """ + return response + def pre_get(self, request: compute.GetTargetHttpsProxyRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.GetTargetHttpsProxyRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get + + Override in a subclass to manipulate the request or metadata + before they are sent to the TargetHttpsProxies server. + """ + return request, metadata + + def post_get(self, response: compute.TargetHttpsProxy) -> compute.TargetHttpsProxy: + """Post-rpc interceptor for get + + Override in a subclass to manipulate the response + after it is returned by the TargetHttpsProxies server but before + it is returned to user code. + """ + return response + def pre_insert(self, request: compute.InsertTargetHttpsProxyRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.InsertTargetHttpsProxyRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for insert + + Override in a subclass to manipulate the request or metadata + before they are sent to the TargetHttpsProxies server. + """ + return request, metadata + + def post_insert(self, response: compute.Operation) -> compute.Operation: + """Post-rpc interceptor for insert + + Override in a subclass to manipulate the response + after it is returned by the TargetHttpsProxies server but before + it is returned to user code. + """ + return response + def pre_list(self, request: compute.ListTargetHttpsProxiesRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.ListTargetHttpsProxiesRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list + + Override in a subclass to manipulate the request or metadata + before they are sent to the TargetHttpsProxies server. + """ + return request, metadata + + def post_list(self, response: compute.TargetHttpsProxyList) -> compute.TargetHttpsProxyList: + """Post-rpc interceptor for list + + Override in a subclass to manipulate the response + after it is returned by the TargetHttpsProxies server but before + it is returned to user code. + """ + return response + def pre_patch(self, request: compute.PatchTargetHttpsProxyRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.PatchTargetHttpsProxyRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for patch + + Override in a subclass to manipulate the request or metadata + before they are sent to the TargetHttpsProxies server. + """ + return request, metadata + + def post_patch(self, response: compute.Operation) -> compute.Operation: + """Post-rpc interceptor for patch + + Override in a subclass to manipulate the response + after it is returned by the TargetHttpsProxies server but before + it is returned to user code. + """ + return response + def pre_set_certificate_map(self, request: compute.SetCertificateMapTargetHttpsProxyRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.SetCertificateMapTargetHttpsProxyRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for set_certificate_map + + Override in a subclass to manipulate the request or metadata + before they are sent to the TargetHttpsProxies server. + """ + return request, metadata + + def post_set_certificate_map(self, response: compute.Operation) -> compute.Operation: + """Post-rpc interceptor for set_certificate_map + + Override in a subclass to manipulate the response + after it is returned by the TargetHttpsProxies server but before + it is returned to user code. + """ + return response + def pre_set_quic_override(self, request: compute.SetQuicOverrideTargetHttpsProxyRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.SetQuicOverrideTargetHttpsProxyRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for set_quic_override + + Override in a subclass to manipulate the request or metadata + before they are sent to the TargetHttpsProxies server. + """ + return request, metadata + + def post_set_quic_override(self, response: compute.Operation) -> compute.Operation: + """Post-rpc interceptor for set_quic_override + + Override in a subclass to manipulate the response + after it is returned by the TargetHttpsProxies server but before + it is returned to user code. + """ + return response + def pre_set_ssl_certificates(self, request: compute.SetSslCertificatesTargetHttpsProxyRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.SetSslCertificatesTargetHttpsProxyRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for set_ssl_certificates + + Override in a subclass to manipulate the request or metadata + before they are sent to the TargetHttpsProxies server. + """ + return request, metadata + + def post_set_ssl_certificates(self, response: compute.Operation) -> compute.Operation: + """Post-rpc interceptor for set_ssl_certificates + + Override in a subclass to manipulate the response + after it is returned by the TargetHttpsProxies server but before + it is returned to user code. + """ + return response + def pre_set_ssl_policy(self, request: compute.SetSslPolicyTargetHttpsProxyRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.SetSslPolicyTargetHttpsProxyRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for set_ssl_policy + + Override in a subclass to manipulate the request or metadata + before they are sent to the TargetHttpsProxies server. + """ + return request, metadata + + def post_set_ssl_policy(self, response: compute.Operation) -> compute.Operation: + """Post-rpc interceptor for set_ssl_policy + + Override in a subclass to manipulate the response + after it is returned by the TargetHttpsProxies server but before + it is returned to user code. + """ + return response + def pre_set_url_map(self, request: compute.SetUrlMapTargetHttpsProxyRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.SetUrlMapTargetHttpsProxyRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for set_url_map + + Override in a subclass to manipulate the request or metadata + before they are sent to the TargetHttpsProxies server. + """ + return request, metadata + + def post_set_url_map(self, response: compute.Operation) -> compute.Operation: + """Post-rpc interceptor for set_url_map + + Override in a subclass to manipulate the response + after it is returned by the TargetHttpsProxies server but before + it is returned to user code. + """ + return response + + +@dataclasses.dataclass +class TargetHttpsProxiesRestStub: + _session: AuthorizedSession + _host: str + _interceptor: TargetHttpsProxiesRestInterceptor + + +class TargetHttpsProxiesRestTransport(TargetHttpsProxiesTransport): + """REST backend transport for TargetHttpsProxies. + + The TargetHttpsProxies API. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends JSON representations of protocol buffers over HTTP/1.1 + + NOTE: This REST transport functionality is currently in a beta + state (preview). We welcome your feedback via an issue in this + library's source repository. Thank you! + """ + + def __init__(self, *, + host: str = 'compute.googleapis.com', + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + client_cert_source_for_mtls: Optional[Callable[[ + ], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + url_scheme: str = 'https', + interceptor: Optional[TargetHttpsProxiesRestInterceptor] = None, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + NOTE: This REST transport functionality is currently in a beta + state (preview). We welcome your feedback via a GitHub issue in + this library's repository. Thank you! + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + client_cert_source_for_mtls (Callable[[], Tuple[bytes, bytes]]): Client + certificate to configure mutual TLS HTTP channel. It is ignored + if ``channel`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you are developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + url_scheme: the protocol scheme for the API endpoint. Normally + "https", but for testing or local servers, + "http" can be specified. + """ + # Run the base constructor + # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. + # TODO: When custom host (api_endpoint) is set, `scopes` must *also* be set on the + # credentials object + maybe_url_match = re.match("^(?Phttp(?:s)?://)?(?P.*)$", host) + if maybe_url_match is None: + raise ValueError(f"Unexpected hostname structure: {host}") # pragma: NO COVER + + url_match_items = maybe_url_match.groupdict() + + host = f"{url_scheme}://{host}" if not url_match_items["scheme"] else host + + super().__init__( + host=host, + credentials=credentials, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience + ) + self._session = AuthorizedSession( + self._credentials, default_host=self.DEFAULT_HOST) + if client_cert_source_for_mtls: + self._session.configure_mtls_channel(client_cert_source_for_mtls) + self._interceptor = interceptor or TargetHttpsProxiesRestInterceptor() + self._prep_wrapped_messages(client_info) + + class _AggregatedList(TargetHttpsProxiesRestStub): + def __hash__(self): + return hash("AggregatedList") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.AggregatedListTargetHttpsProxiesRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.TargetHttpsProxyAggregatedList: + r"""Call the aggregated list method over HTTP. + + Args: + request (~.compute.AggregatedListTargetHttpsProxiesRequest): + The request object. A request message for + TargetHttpsProxies.AggregatedList. See + the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.TargetHttpsProxyAggregatedList: + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/compute/v1/projects/{project}/aggregated/targetHttpsProxies', + }, + ] + request, metadata = self._interceptor.pre_aggregated_list(request, metadata) + pb_request = compute.AggregatedListTargetHttpsProxiesRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.TargetHttpsProxyAggregatedList() + pb_resp = compute.TargetHttpsProxyAggregatedList.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_aggregated_list(resp) + return resp + + class _Delete(TargetHttpsProxiesRestStub): + def __hash__(self): + return hash("Delete") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.DeleteTargetHttpsProxyRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.Operation: + r"""Call the delete method over HTTP. + + Args: + request (~.compute.DeleteTargetHttpsProxyRequest): + The request object. A request message for + TargetHttpsProxies.Delete. See the + method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.Operation: + Represents an Operation resource. Google Compute Engine + has three Operation resources: \* + `Global `__ + \* + `Regional `__ + \* + `Zonal `__ + You can use an operation resource to manage asynchronous + API requests. For more information, read Handling API + responses. Operations can be global, regional or zonal. + - For global operations, use the ``globalOperations`` + resource. - For regional operations, use the + ``regionOperations`` resource. - For zonal operations, + use the ``zonalOperations`` resource. For more + information, read Global, Regional, and Zonal Resources. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'delete', + 'uri': '/compute/v1/projects/{project}/global/targetHttpsProxies/{target_https_proxy}', + }, + ] + request, metadata = self._interceptor.pre_delete(request, metadata) + pb_request = compute.DeleteTargetHttpsProxyRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.Operation() + pb_resp = compute.Operation.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_delete(resp) + return resp + + class _Get(TargetHttpsProxiesRestStub): + def __hash__(self): + return hash("Get") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.GetTargetHttpsProxyRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.TargetHttpsProxy: + r"""Call the get method over HTTP. + + Args: + request (~.compute.GetTargetHttpsProxyRequest): + The request object. A request message for + TargetHttpsProxies.Get. See the method + description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.TargetHttpsProxy: + Represents a Target HTTPS Proxy resource. Google Compute + Engine has two Target HTTPS Proxy resources: \* + `Global `__ + \* + `Regional `__ + A target HTTPS proxy is a component of GCP HTTPS load + balancers. \* targetHttpsProxies are used by external + HTTPS load balancers. \* regionTargetHttpsProxies are + used by internal HTTPS load balancers. Forwarding rules + reference a target HTTPS proxy, and the target proxy + then references a URL map. For more information, read + Using Target Proxies and Forwarding rule concepts. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/compute/v1/projects/{project}/global/targetHttpsProxies/{target_https_proxy}', + }, + ] + request, metadata = self._interceptor.pre_get(request, metadata) + pb_request = compute.GetTargetHttpsProxyRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.TargetHttpsProxy() + pb_resp = compute.TargetHttpsProxy.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get(resp) + return resp + + class _Insert(TargetHttpsProxiesRestStub): + def __hash__(self): + return hash("Insert") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.InsertTargetHttpsProxyRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.Operation: + r"""Call the insert method over HTTP. + + Args: + request (~.compute.InsertTargetHttpsProxyRequest): + The request object. A request message for + TargetHttpsProxies.Insert. See the + method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.Operation: + Represents an Operation resource. Google Compute Engine + has three Operation resources: \* + `Global `__ + \* + `Regional `__ + \* + `Zonal `__ + You can use an operation resource to manage asynchronous + API requests. For more information, read Handling API + responses. Operations can be global, regional or zonal. + - For global operations, use the ``globalOperations`` + resource. - For regional operations, use the + ``regionOperations`` resource. - For zonal operations, + use the ``zonalOperations`` resource. For more + information, read Global, Regional, and Zonal Resources. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/compute/v1/projects/{project}/global/targetHttpsProxies', + 'body': 'target_https_proxy_resource', + }, + ] + request, metadata = self._interceptor.pre_insert(request, metadata) + pb_request = compute.InsertTargetHttpsProxyRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + including_default_value_fields=False, + use_integers_for_enums=False + ) + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.Operation() + pb_resp = compute.Operation.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_insert(resp) + return resp + + class _List(TargetHttpsProxiesRestStub): + def __hash__(self): + return hash("List") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.ListTargetHttpsProxiesRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.TargetHttpsProxyList: + r"""Call the list method over HTTP. + + Args: + request (~.compute.ListTargetHttpsProxiesRequest): + The request object. A request message for + TargetHttpsProxies.List. See the method + description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.TargetHttpsProxyList: + Contains a list of TargetHttpsProxy + resources. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/compute/v1/projects/{project}/global/targetHttpsProxies', + }, + ] + request, metadata = self._interceptor.pre_list(request, metadata) + pb_request = compute.ListTargetHttpsProxiesRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.TargetHttpsProxyList() + pb_resp = compute.TargetHttpsProxyList.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_list(resp) + return resp + + class _Patch(TargetHttpsProxiesRestStub): + def __hash__(self): + return hash("Patch") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.PatchTargetHttpsProxyRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.Operation: + r"""Call the patch method over HTTP. + + Args: + request (~.compute.PatchTargetHttpsProxyRequest): + The request object. A request message for + TargetHttpsProxies.Patch. See the method + description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.Operation: + Represents an Operation resource. Google Compute Engine + has three Operation resources: \* + `Global `__ + \* + `Regional `__ + \* + `Zonal `__ + You can use an operation resource to manage asynchronous + API requests. For more information, read Handling API + responses. Operations can be global, regional or zonal. + - For global operations, use the ``globalOperations`` + resource. - For regional operations, use the + ``regionOperations`` resource. - For zonal operations, + use the ``zonalOperations`` resource. For more + information, read Global, Regional, and Zonal Resources. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'patch', + 'uri': '/compute/v1/projects/{project}/global/targetHttpsProxies/{target_https_proxy}', + 'body': 'target_https_proxy_resource', + }, + ] + request, metadata = self._interceptor.pre_patch(request, metadata) + pb_request = compute.PatchTargetHttpsProxyRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + including_default_value_fields=False, + use_integers_for_enums=False + ) + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.Operation() + pb_resp = compute.Operation.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_patch(resp) + return resp + + class _SetCertificateMap(TargetHttpsProxiesRestStub): + def __hash__(self): + return hash("SetCertificateMap") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.SetCertificateMapTargetHttpsProxyRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.Operation: + r"""Call the set certificate map method over HTTP. + + Args: + request (~.compute.SetCertificateMapTargetHttpsProxyRequest): + The request object. A request message for + TargetHttpsProxies.SetCertificateMap. + See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.Operation: + Represents an Operation resource. Google Compute Engine + has three Operation resources: \* + `Global `__ + \* + `Regional `__ + \* + `Zonal `__ + You can use an operation resource to manage asynchronous + API requests. For more information, read Handling API + responses. Operations can be global, regional or zonal. + - For global operations, use the ``globalOperations`` + resource. - For regional operations, use the + ``regionOperations`` resource. - For zonal operations, + use the ``zonalOperations`` resource. For more + information, read Global, Regional, and Zonal Resources. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/compute/v1/projects/{project}/global/targetHttpsProxies/{target_https_proxy}/setCertificateMap', + 'body': 'target_https_proxies_set_certificate_map_request_resource', + }, + ] + request, metadata = self._interceptor.pre_set_certificate_map(request, metadata) + pb_request = compute.SetCertificateMapTargetHttpsProxyRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + including_default_value_fields=False, + use_integers_for_enums=False + ) + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.Operation() + pb_resp = compute.Operation.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_set_certificate_map(resp) + return resp + + class _SetQuicOverride(TargetHttpsProxiesRestStub): + def __hash__(self): + return hash("SetQuicOverride") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.SetQuicOverrideTargetHttpsProxyRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.Operation: + r"""Call the set quic override method over HTTP. + + Args: + request (~.compute.SetQuicOverrideTargetHttpsProxyRequest): + The request object. A request message for + TargetHttpsProxies.SetQuicOverride. See + the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.Operation: + Represents an Operation resource. Google Compute Engine + has three Operation resources: \* + `Global `__ + \* + `Regional `__ + \* + `Zonal `__ + You can use an operation resource to manage asynchronous + API requests. For more information, read Handling API + responses. Operations can be global, regional or zonal. + - For global operations, use the ``globalOperations`` + resource. - For regional operations, use the + ``regionOperations`` resource. - For zonal operations, + use the ``zonalOperations`` resource. For more + information, read Global, Regional, and Zonal Resources. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/compute/v1/projects/{project}/global/targetHttpsProxies/{target_https_proxy}/setQuicOverride', + 'body': 'target_https_proxies_set_quic_override_request_resource', + }, + ] + request, metadata = self._interceptor.pre_set_quic_override(request, metadata) + pb_request = compute.SetQuicOverrideTargetHttpsProxyRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + including_default_value_fields=False, + use_integers_for_enums=False + ) + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.Operation() + pb_resp = compute.Operation.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_set_quic_override(resp) + return resp + + class _SetSslCertificates(TargetHttpsProxiesRestStub): + def __hash__(self): + return hash("SetSslCertificates") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.SetSslCertificatesTargetHttpsProxyRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.Operation: + r"""Call the set ssl certificates method over HTTP. + + Args: + request (~.compute.SetSslCertificatesTargetHttpsProxyRequest): + The request object. A request message for + TargetHttpsProxies.SetSslCertificates. + See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.Operation: + Represents an Operation resource. Google Compute Engine + has three Operation resources: \* + `Global `__ + \* + `Regional `__ + \* + `Zonal `__ + You can use an operation resource to manage asynchronous + API requests. For more information, read Handling API + responses. Operations can be global, regional or zonal. + - For global operations, use the ``globalOperations`` + resource. - For regional operations, use the + ``regionOperations`` resource. - For zonal operations, + use the ``zonalOperations`` resource. For more + information, read Global, Regional, and Zonal Resources. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/compute/v1/projects/{project}/targetHttpsProxies/{target_https_proxy}/setSslCertificates', + 'body': 'target_https_proxies_set_ssl_certificates_request_resource', + }, + ] + request, metadata = self._interceptor.pre_set_ssl_certificates(request, metadata) + pb_request = compute.SetSslCertificatesTargetHttpsProxyRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + including_default_value_fields=False, + use_integers_for_enums=False + ) + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.Operation() + pb_resp = compute.Operation.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_set_ssl_certificates(resp) + return resp + + class _SetSslPolicy(TargetHttpsProxiesRestStub): + def __hash__(self): + return hash("SetSslPolicy") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.SetSslPolicyTargetHttpsProxyRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.Operation: + r"""Call the set ssl policy method over HTTP. + + Args: + request (~.compute.SetSslPolicyTargetHttpsProxyRequest): + The request object. A request message for + TargetHttpsProxies.SetSslPolicy. See the + method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.Operation: + Represents an Operation resource. Google Compute Engine + has three Operation resources: \* + `Global `__ + \* + `Regional `__ + \* + `Zonal `__ + You can use an operation resource to manage asynchronous + API requests. For more information, read Handling API + responses. Operations can be global, regional or zonal. + - For global operations, use the ``globalOperations`` + resource. - For regional operations, use the + ``regionOperations`` resource. - For zonal operations, + use the ``zonalOperations`` resource. For more + information, read Global, Regional, and Zonal Resources. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/compute/v1/projects/{project}/global/targetHttpsProxies/{target_https_proxy}/setSslPolicy', + 'body': 'ssl_policy_reference_resource', + }, + ] + request, metadata = self._interceptor.pre_set_ssl_policy(request, metadata) + pb_request = compute.SetSslPolicyTargetHttpsProxyRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + including_default_value_fields=False, + use_integers_for_enums=False + ) + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.Operation() + pb_resp = compute.Operation.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_set_ssl_policy(resp) + return resp + + class _SetUrlMap(TargetHttpsProxiesRestStub): + def __hash__(self): + return hash("SetUrlMap") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.SetUrlMapTargetHttpsProxyRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.Operation: + r"""Call the set url map method over HTTP. + + Args: + request (~.compute.SetUrlMapTargetHttpsProxyRequest): + The request object. A request message for + TargetHttpsProxies.SetUrlMap. See the + method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.Operation: + Represents an Operation resource. Google Compute Engine + has three Operation resources: \* + `Global `__ + \* + `Regional `__ + \* + `Zonal `__ + You can use an operation resource to manage asynchronous + API requests. For more information, read Handling API + responses. Operations can be global, regional or zonal. + - For global operations, use the ``globalOperations`` + resource. - For regional operations, use the + ``regionOperations`` resource. - For zonal operations, + use the ``zonalOperations`` resource. For more + information, read Global, Regional, and Zonal Resources. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/compute/v1/projects/{project}/targetHttpsProxies/{target_https_proxy}/setUrlMap', + 'body': 'url_map_reference_resource', + }, + ] + request, metadata = self._interceptor.pre_set_url_map(request, metadata) + pb_request = compute.SetUrlMapTargetHttpsProxyRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + including_default_value_fields=False, + use_integers_for_enums=False + ) + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.Operation() + pb_resp = compute.Operation.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_set_url_map(resp) + return resp + + @property + def aggregated_list(self) -> Callable[ + [compute.AggregatedListTargetHttpsProxiesRequest], + compute.TargetHttpsProxyAggregatedList]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._AggregatedList(self._session, self._host, self._interceptor) # type: ignore + + @property + def delete(self) -> Callable[ + [compute.DeleteTargetHttpsProxyRequest], + compute.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._Delete(self._session, self._host, self._interceptor) # type: ignore + + @property + def get(self) -> Callable[ + [compute.GetTargetHttpsProxyRequest], + compute.TargetHttpsProxy]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._Get(self._session, self._host, self._interceptor) # type: ignore + + @property + def insert(self) -> Callable[ + [compute.InsertTargetHttpsProxyRequest], + compute.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._Insert(self._session, self._host, self._interceptor) # type: ignore + + @property + def list(self) -> Callable[ + [compute.ListTargetHttpsProxiesRequest], + compute.TargetHttpsProxyList]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._List(self._session, self._host, self._interceptor) # type: ignore + + @property + def patch(self) -> Callable[ + [compute.PatchTargetHttpsProxyRequest], + compute.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._Patch(self._session, self._host, self._interceptor) # type: ignore + + @property + def set_certificate_map(self) -> Callable[ + [compute.SetCertificateMapTargetHttpsProxyRequest], + compute.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._SetCertificateMap(self._session, self._host, self._interceptor) # type: ignore + + @property + def set_quic_override(self) -> Callable[ + [compute.SetQuicOverrideTargetHttpsProxyRequest], + compute.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._SetQuicOverride(self._session, self._host, self._interceptor) # type: ignore + + @property + def set_ssl_certificates(self) -> Callable[ + [compute.SetSslCertificatesTargetHttpsProxyRequest], + compute.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._SetSslCertificates(self._session, self._host, self._interceptor) # type: ignore + + @property + def set_ssl_policy(self) -> Callable[ + [compute.SetSslPolicyTargetHttpsProxyRequest], + compute.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._SetSslPolicy(self._session, self._host, self._interceptor) # type: ignore + + @property + def set_url_map(self) -> Callable[ + [compute.SetUrlMapTargetHttpsProxyRequest], + compute.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._SetUrlMap(self._session, self._host, self._interceptor) # type: ignore + + @property + def kind(self) -> str: + return "rest" + + def close(self): + self._session.close() + + +__all__=( + 'TargetHttpsProxiesRestTransport', +) diff --git a/owl-bot-staging/v1/google/cloud/compute_v1/services/target_instances/__init__.py b/owl-bot-staging/v1/google/cloud/compute_v1/services/target_instances/__init__.py new file mode 100644 index 000000000..7a82ff949 --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/compute_v1/services/target_instances/__init__.py @@ -0,0 +1,20 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from .client import TargetInstancesClient + +__all__ = ( + 'TargetInstancesClient', +) diff --git a/owl-bot-staging/v1/google/cloud/compute_v1/services/target_instances/client.py b/owl-bot-staging/v1/google/cloud/compute_v1/services/target_instances/client.py new file mode 100644 index 000000000..228c4698e --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/compute_v1/services/target_instances/client.py @@ -0,0 +1,1300 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import functools +import os +import re +from typing import Dict, Mapping, MutableMapping, MutableSequence, Optional, Sequence, Tuple, Type, Union, cast + +from google.cloud.compute_v1 import gapic_version as package_version + +from google.api_core import client_options as client_options_lib +from google.api_core import exceptions as core_exceptions +from google.api_core import extended_operation +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport import mtls # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth.exceptions import MutualTLSChannelError # type: ignore +from google.oauth2 import service_account # type: ignore + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + +from google.api_core import extended_operation # type: ignore +from google.cloud.compute_v1.services.target_instances import pagers +from google.cloud.compute_v1.types import compute +from .transports.base import TargetInstancesTransport, DEFAULT_CLIENT_INFO +from .transports.rest import TargetInstancesRestTransport + + +class TargetInstancesClientMeta(type): + """Metaclass for the TargetInstances client. + + This provides class-level methods for building and retrieving + support objects (e.g. transport) without polluting the client instance + objects. + """ + _transport_registry = OrderedDict() # type: Dict[str, Type[TargetInstancesTransport]] + _transport_registry["rest"] = TargetInstancesRestTransport + + def get_transport_class(cls, + label: Optional[str] = None, + ) -> Type[TargetInstancesTransport]: + """Returns an appropriate transport class. + + Args: + label: The name of the desired transport. If none is + provided, then the first transport in the registry is used. + + Returns: + The transport class to use. + """ + # If a specific transport is requested, return that one. + if label: + return cls._transport_registry[label] + + # No transport is requested; return the default (that is, the first one + # in the dictionary). + return next(iter(cls._transport_registry.values())) + + +class TargetInstancesClient(metaclass=TargetInstancesClientMeta): + """The TargetInstances API.""" + + @staticmethod + def _get_default_mtls_endpoint(api_endpoint): + """Converts api endpoint to mTLS endpoint. + + Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to + "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. + Args: + api_endpoint (Optional[str]): the api endpoint to convert. + Returns: + str: converted mTLS api endpoint. + """ + if not api_endpoint: + return api_endpoint + + mtls_endpoint_re = re.compile( + r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?" + ) + + m = mtls_endpoint_re.match(api_endpoint) + name, mtls, sandbox, googledomain = m.groups() + if mtls or not googledomain: + return api_endpoint + + if sandbox: + return api_endpoint.replace( + "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" + ) + + return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") + + DEFAULT_ENDPOINT = "compute.googleapis.com" + DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore + DEFAULT_ENDPOINT + ) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + TargetInstancesClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_info(info) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + TargetInstancesClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_file( + filename) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + from_service_account_json = from_service_account_file + + @property + def transport(self) -> TargetInstancesTransport: + """Returns the transport used by the client instance. + + Returns: + TargetInstancesTransport: The transport used by the client + instance. + """ + return self._transport + + @staticmethod + def common_billing_account_path(billing_account: str, ) -> str: + """Returns a fully-qualified billing_account string.""" + return "billingAccounts/{billing_account}".format(billing_account=billing_account, ) + + @staticmethod + def parse_common_billing_account_path(path: str) -> Dict[str,str]: + """Parse a billing_account path into its component segments.""" + m = re.match(r"^billingAccounts/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_folder_path(folder: str, ) -> str: + """Returns a fully-qualified folder string.""" + return "folders/{folder}".format(folder=folder, ) + + @staticmethod + def parse_common_folder_path(path: str) -> Dict[str,str]: + """Parse a folder path into its component segments.""" + m = re.match(r"^folders/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_organization_path(organization: str, ) -> str: + """Returns a fully-qualified organization string.""" + return "organizations/{organization}".format(organization=organization, ) + + @staticmethod + def parse_common_organization_path(path: str) -> Dict[str,str]: + """Parse a organization path into its component segments.""" + m = re.match(r"^organizations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_project_path(project: str, ) -> str: + """Returns a fully-qualified project string.""" + return "projects/{project}".format(project=project, ) + + @staticmethod + def parse_common_project_path(path: str) -> Dict[str,str]: + """Parse a project path into its component segments.""" + m = re.match(r"^projects/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_location_path(project: str, location: str, ) -> str: + """Returns a fully-qualified location string.""" + return "projects/{project}/locations/{location}".format(project=project, location=location, ) + + @staticmethod + def parse_common_location_path(path: str) -> Dict[str,str]: + """Parse a location path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @classmethod + def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[client_options_lib.ClientOptions] = None): + """Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + if client_options is None: + client_options = client_options_lib.ClientOptions() + use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") + use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") + if use_client_cert not in ("true", "false"): + raise ValueError("Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`") + if use_mtls_endpoint not in ("auto", "never", "always"): + raise MutualTLSChannelError("Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`") + + # Figure out the client cert source to use. + client_cert_source = None + if use_client_cert == "true": + if client_options.client_cert_source: + client_cert_source = client_options.client_cert_source + elif mtls.has_default_client_cert_source(): + client_cert_source = mtls.default_client_cert_source() + + # Figure out which api endpoint to use. + if client_options.api_endpoint is not None: + api_endpoint = client_options.api_endpoint + elif use_mtls_endpoint == "always" or (use_mtls_endpoint == "auto" and client_cert_source): + api_endpoint = cls.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = cls.DEFAULT_ENDPOINT + + return api_endpoint, client_cert_source + + def __init__(self, *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Optional[Union[str, TargetInstancesTransport]] = None, + client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the target instances client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Union[str, TargetInstancesTransport]): The + transport to use. If set to None, a transport is chosen + automatically. + NOTE: "rest" transport functionality is currently in a + beta state (preview). We welcome your feedback via an + issue in this library's source repository. + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): Custom options for the + client. It won't take effect if a ``transport`` instance is provided. + (1) The ``api_endpoint`` property can be used to override the + default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT + environment variable can also be used to override the endpoint: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto switch to the + default mTLS endpoint if client certificate is present, this is + the default value). However, the ``api_endpoint`` property takes + precedence if provided. + (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide client certificate for mutual TLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + """ + if isinstance(client_options, dict): + client_options = client_options_lib.from_dict(client_options) + if client_options is None: + client_options = client_options_lib.ClientOptions() + client_options = cast(client_options_lib.ClientOptions, client_options) + + api_endpoint, client_cert_source_func = self.get_mtls_endpoint_and_cert_source(client_options) + + api_key_value = getattr(client_options, "api_key", None) + if api_key_value and credentials: + raise ValueError("client_options.api_key and credentials are mutually exclusive") + + # Save or instantiate the transport. + # Ordinarily, we provide the transport, but allowing a custom transport + # instance provides an extensibility point for unusual situations. + if isinstance(transport, TargetInstancesTransport): + # transport is a TargetInstancesTransport instance. + if credentials or client_options.credentials_file or api_key_value: + raise ValueError("When providing a transport instance, " + "provide its credentials directly.") + if client_options.scopes: + raise ValueError( + "When providing a transport instance, provide its scopes " + "directly." + ) + self._transport = transport + else: + import google.auth._default # type: ignore + + if api_key_value and hasattr(google.auth._default, "get_api_key_credentials"): + credentials = google.auth._default.get_api_key_credentials(api_key_value) + + Transport = type(self).get_transport_class(transport) + self._transport = Transport( + credentials=credentials, + credentials_file=client_options.credentials_file, + host=api_endpoint, + scopes=client_options.scopes, + client_cert_source_for_mtls=client_cert_source_func, + quota_project_id=client_options.quota_project_id, + client_info=client_info, + always_use_jwt_access=True, + api_audience=client_options.api_audience, + ) + + def aggregated_list(self, + request: Optional[Union[compute.AggregatedListTargetInstancesRequest, dict]] = None, + *, + project: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.AggregatedListPager: + r"""Retrieves an aggregated list of target instances. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_aggregated_list(): + # Create a client + client = compute_v1.TargetInstancesClient() + + # Initialize request argument(s) + request = compute_v1.AggregatedListTargetInstancesRequest( + project="project_value", + ) + + # Make the request + page_result = client.aggregated_list(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.AggregatedListTargetInstancesRequest, dict]): + The request object. A request message for + TargetInstances.AggregatedList. See the + method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.compute_v1.services.target_instances.pagers.AggregatedListPager: + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.AggregatedListTargetInstancesRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.AggregatedListTargetInstancesRequest): + request = compute.AggregatedListTargetInstancesRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.aggregated_list] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.AggregatedListPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + def delete_unary(self, + request: Optional[Union[compute.DeleteTargetInstanceRequest, dict]] = None, + *, + project: Optional[str] = None, + zone: Optional[str] = None, + target_instance: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.Operation: + r"""Deletes the specified TargetInstance resource. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_delete(): + # Create a client + client = compute_v1.TargetInstancesClient() + + # Initialize request argument(s) + request = compute_v1.DeleteTargetInstanceRequest( + project="project_value", + target_instance="target_instance_value", + zone="zone_value", + ) + + # Make the request + response = client.delete(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.DeleteTargetInstanceRequest, dict]): + The request object. A request message for + TargetInstances.Delete. See the method + description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + zone (str): + Name of the zone scoping this + request. + + This corresponds to the ``zone`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + target_instance (str): + Name of the TargetInstance resource + to delete. + + This corresponds to the ``target_instance`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, zone, target_instance]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.DeleteTargetInstanceRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.DeleteTargetInstanceRequest): + request = compute.DeleteTargetInstanceRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if zone is not None: + request.zone = zone + if target_instance is not None: + request.target_instance = target_instance + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("zone", request.zone), + ("target_instance", request.target_instance), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def delete(self, + request: Optional[Union[compute.DeleteTargetInstanceRequest, dict]] = None, + *, + project: Optional[str] = None, + zone: Optional[str] = None, + target_instance: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> extended_operation.ExtendedOperation: + r"""Deletes the specified TargetInstance resource. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_delete(): + # Create a client + client = compute_v1.TargetInstancesClient() + + # Initialize request argument(s) + request = compute_v1.DeleteTargetInstanceRequest( + project="project_value", + target_instance="target_instance_value", + zone="zone_value", + ) + + # Make the request + response = client.delete(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.DeleteTargetInstanceRequest, dict]): + The request object. A request message for + TargetInstances.Delete. See the method + description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + zone (str): + Name of the zone scoping this + request. + + This corresponds to the ``zone`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + target_instance (str): + Name of the TargetInstance resource + to delete. + + This corresponds to the ``target_instance`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, zone, target_instance]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.DeleteTargetInstanceRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.DeleteTargetInstanceRequest): + request = compute.DeleteTargetInstanceRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if zone is not None: + request.zone = zone + if target_instance is not None: + request.target_instance = target_instance + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("zone", request.zone), + ("target_instance", request.target_instance), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + operation_service = self._transport._zone_operations_client + operation_request = compute.GetZoneOperationRequest() + operation_request.project = request.project + operation_request.zone = request.zone + operation_request.operation = response.name + + get_operation = functools.partial(operation_service.get, operation_request) + # Cancel is not part of extended operations yet. + cancel_operation = lambda: None + + # Note: this class is an implementation detail to provide a uniform + # set of names for certain fields in the extended operation proto message. + # See google.api_core.extended_operation.ExtendedOperation for details + # on these properties and the expected interface. + class _CustomOperation(extended_operation.ExtendedOperation): + @property + def error_message(self): + return self._extended_operation.http_error_message + + @property + def error_code(self): + return self._extended_operation.http_error_status_code + + response = _CustomOperation.make(get_operation, cancel_operation, response) + + # Done; return the response. + return response + + def get(self, + request: Optional[Union[compute.GetTargetInstanceRequest, dict]] = None, + *, + project: Optional[str] = None, + zone: Optional[str] = None, + target_instance: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.TargetInstance: + r"""Returns the specified TargetInstance resource. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_get(): + # Create a client + client = compute_v1.TargetInstancesClient() + + # Initialize request argument(s) + request = compute_v1.GetTargetInstanceRequest( + project="project_value", + target_instance="target_instance_value", + zone="zone_value", + ) + + # Make the request + response = client.get(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.GetTargetInstanceRequest, dict]): + The request object. A request message for + TargetInstances.Get. See the method + description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + zone (str): + Name of the zone scoping this + request. + + This corresponds to the ``zone`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + target_instance (str): + Name of the TargetInstance resource + to return. + + This corresponds to the ``target_instance`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.compute_v1.types.TargetInstance: + Represents a Target Instance + resource. You can use a target instance + to handle traffic for one or more + forwarding rules, which is ideal for + forwarding protocol traffic that is + managed by a single source. For example, + ESP, AH, TCP, or UDP. For more + information, read Target instances. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, zone, target_instance]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.GetTargetInstanceRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.GetTargetInstanceRequest): + request = compute.GetTargetInstanceRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if zone is not None: + request.zone = zone + if target_instance is not None: + request.target_instance = target_instance + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("zone", request.zone), + ("target_instance", request.target_instance), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def insert_unary(self, + request: Optional[Union[compute.InsertTargetInstanceRequest, dict]] = None, + *, + project: Optional[str] = None, + zone: Optional[str] = None, + target_instance_resource: Optional[compute.TargetInstance] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.Operation: + r"""Creates a TargetInstance resource in the specified + project and zone using the data included in the request. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_insert(): + # Create a client + client = compute_v1.TargetInstancesClient() + + # Initialize request argument(s) + request = compute_v1.InsertTargetInstanceRequest( + project="project_value", + zone="zone_value", + ) + + # Make the request + response = client.insert(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.InsertTargetInstanceRequest, dict]): + The request object. A request message for + TargetInstances.Insert. See the method + description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + zone (str): + Name of the zone scoping this + request. + + This corresponds to the ``zone`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + target_instance_resource (google.cloud.compute_v1.types.TargetInstance): + The body resource for this request + This corresponds to the ``target_instance_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, zone, target_instance_resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.InsertTargetInstanceRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.InsertTargetInstanceRequest): + request = compute.InsertTargetInstanceRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if zone is not None: + request.zone = zone + if target_instance_resource is not None: + request.target_instance_resource = target_instance_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.insert] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("zone", request.zone), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def insert(self, + request: Optional[Union[compute.InsertTargetInstanceRequest, dict]] = None, + *, + project: Optional[str] = None, + zone: Optional[str] = None, + target_instance_resource: Optional[compute.TargetInstance] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> extended_operation.ExtendedOperation: + r"""Creates a TargetInstance resource in the specified + project and zone using the data included in the request. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_insert(): + # Create a client + client = compute_v1.TargetInstancesClient() + + # Initialize request argument(s) + request = compute_v1.InsertTargetInstanceRequest( + project="project_value", + zone="zone_value", + ) + + # Make the request + response = client.insert(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.InsertTargetInstanceRequest, dict]): + The request object. A request message for + TargetInstances.Insert. See the method + description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + zone (str): + Name of the zone scoping this + request. + + This corresponds to the ``zone`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + target_instance_resource (google.cloud.compute_v1.types.TargetInstance): + The body resource for this request + This corresponds to the ``target_instance_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, zone, target_instance_resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.InsertTargetInstanceRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.InsertTargetInstanceRequest): + request = compute.InsertTargetInstanceRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if zone is not None: + request.zone = zone + if target_instance_resource is not None: + request.target_instance_resource = target_instance_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.insert] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("zone", request.zone), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + operation_service = self._transport._zone_operations_client + operation_request = compute.GetZoneOperationRequest() + operation_request.project = request.project + operation_request.zone = request.zone + operation_request.operation = response.name + + get_operation = functools.partial(operation_service.get, operation_request) + # Cancel is not part of extended operations yet. + cancel_operation = lambda: None + + # Note: this class is an implementation detail to provide a uniform + # set of names for certain fields in the extended operation proto message. + # See google.api_core.extended_operation.ExtendedOperation for details + # on these properties and the expected interface. + class _CustomOperation(extended_operation.ExtendedOperation): + @property + def error_message(self): + return self._extended_operation.http_error_message + + @property + def error_code(self): + return self._extended_operation.http_error_status_code + + response = _CustomOperation.make(get_operation, cancel_operation, response) + + # Done; return the response. + return response + + def list(self, + request: Optional[Union[compute.ListTargetInstancesRequest, dict]] = None, + *, + project: Optional[str] = None, + zone: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListPager: + r"""Retrieves a list of TargetInstance resources + available to the specified project and zone. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_list(): + # Create a client + client = compute_v1.TargetInstancesClient() + + # Initialize request argument(s) + request = compute_v1.ListTargetInstancesRequest( + project="project_value", + zone="zone_value", + ) + + # Make the request + page_result = client.list(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.ListTargetInstancesRequest, dict]): + The request object. A request message for + TargetInstances.List. See the method + description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + zone (str): + Name of the zone scoping this + request. + + This corresponds to the ``zone`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.compute_v1.services.target_instances.pagers.ListPager: + Contains a list of TargetInstance + resources. + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, zone]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.ListTargetInstancesRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.ListTargetInstancesRequest): + request = compute.ListTargetInstancesRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if zone is not None: + request.zone = zone + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("zone", request.zone), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + def __enter__(self) -> "TargetInstancesClient": + return self + + def __exit__(self, type, value, traceback): + """Releases underlying transport's resources. + + .. warning:: + ONLY use as a context manager if the transport is NOT shared + with other clients! Exiting the with block will CLOSE the transport + and may cause errors in other clients! + """ + self.transport.close() + + + + + + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) + + +__all__ = ( + "TargetInstancesClient", +) diff --git a/owl-bot-staging/v1/google/cloud/compute_v1/services/target_instances/pagers.py b/owl-bot-staging/v1/google/cloud/compute_v1/services/target_instances/pagers.py new file mode 100644 index 000000000..32e03985d --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/compute_v1/services/target_instances/pagers.py @@ -0,0 +1,139 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import Any, AsyncIterator, Awaitable, Callable, Sequence, Tuple, Optional, Iterator + +from google.cloud.compute_v1.types import compute + + +class AggregatedListPager: + """A pager for iterating through ``aggregated_list`` requests. + + This class thinly wraps an initial + :class:`google.cloud.compute_v1.types.TargetInstanceAggregatedList` object, and + provides an ``__iter__`` method to iterate through its + ``items`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``AggregatedList`` requests and continue to iterate + through the ``items`` field on the + corresponding responses. + + All the usual :class:`google.cloud.compute_v1.types.TargetInstanceAggregatedList` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., compute.TargetInstanceAggregatedList], + request: compute.AggregatedListTargetInstancesRequest, + response: compute.TargetInstanceAggregatedList, + *, + metadata: Sequence[Tuple[str, str]] = ()): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.compute_v1.types.AggregatedListTargetInstancesRequest): + The initial request object. + response (google.cloud.compute_v1.types.TargetInstanceAggregatedList): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = compute.AggregatedListTargetInstancesRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[compute.TargetInstanceAggregatedList]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[Tuple[str, compute.TargetInstancesScopedList]]: + for page in self.pages: + yield from page.items.items() + + def get(self, key: str) -> Optional[compute.TargetInstancesScopedList]: + return self._response.items.get(key) + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + + +class ListPager: + """A pager for iterating through ``list`` requests. + + This class thinly wraps an initial + :class:`google.cloud.compute_v1.types.TargetInstanceList` object, and + provides an ``__iter__`` method to iterate through its + ``items`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``List`` requests and continue to iterate + through the ``items`` field on the + corresponding responses. + + All the usual :class:`google.cloud.compute_v1.types.TargetInstanceList` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., compute.TargetInstanceList], + request: compute.ListTargetInstancesRequest, + response: compute.TargetInstanceList, + *, + metadata: Sequence[Tuple[str, str]] = ()): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.compute_v1.types.ListTargetInstancesRequest): + The initial request object. + response (google.cloud.compute_v1.types.TargetInstanceList): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = compute.ListTargetInstancesRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[compute.TargetInstanceList]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[compute.TargetInstance]: + for page in self.pages: + yield from page.items + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) diff --git a/owl-bot-staging/v1/google/cloud/compute_v1/services/target_instances/transports/__init__.py b/owl-bot-staging/v1/google/cloud/compute_v1/services/target_instances/transports/__init__.py new file mode 100644 index 000000000..7a2b07a97 --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/compute_v1/services/target_instances/transports/__init__.py @@ -0,0 +1,32 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +from typing import Dict, Type + +from .base import TargetInstancesTransport +from .rest import TargetInstancesRestTransport +from .rest import TargetInstancesRestInterceptor + + +# Compile a registry of transports. +_transport_registry = OrderedDict() # type: Dict[str, Type[TargetInstancesTransport]] +_transport_registry['rest'] = TargetInstancesRestTransport + +__all__ = ( + 'TargetInstancesTransport', + 'TargetInstancesRestTransport', + 'TargetInstancesRestInterceptor', +) diff --git a/owl-bot-staging/v1/google/cloud/compute_v1/services/target_instances/transports/base.py b/owl-bot-staging/v1/google/cloud/compute_v1/services/target_instances/transports/base.py new file mode 100644 index 000000000..9296bc0ed --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/compute_v1/services/target_instances/transports/base.py @@ -0,0 +1,219 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import abc +from typing import Awaitable, Callable, Dict, Optional, Sequence, Union + +from google.cloud.compute_v1 import gapic_version as package_version + +import google.auth # type: ignore +import google.api_core +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.compute_v1.types import compute +from google.cloud.compute_v1.services import zone_operations + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) + + +class TargetInstancesTransport(abc.ABC): + """Abstract transport class for TargetInstances.""" + + AUTH_SCOPES = ( + 'https://www.googleapis.com/auth/compute', + 'https://www.googleapis.com/auth/cloud-platform', + ) + + DEFAULT_HOST: str = 'compute.googleapis.com' + def __init__( + self, *, + host: str = DEFAULT_HOST, + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + **kwargs, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A list of scopes. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + """ + self._extended_operations_services: Dict[str, Any] = {} + + scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} + + # Save the scopes. + self._scopes = scopes + + # If no credentials are provided, then determine the appropriate + # defaults. + if credentials and credentials_file: + raise core_exceptions.DuplicateCredentialArgs("'credentials_file' and 'credentials' are mutually exclusive") + + if credentials_file is not None: + credentials, _ = google.auth.load_credentials_from_file( + credentials_file, + **scopes_kwargs, + quota_project_id=quota_project_id + ) + elif credentials is None: + credentials, _ = google.auth.default(**scopes_kwargs, quota_project_id=quota_project_id) + # Don't apply audience if the credentials file passed from user. + if hasattr(credentials, "with_gdch_audience"): + credentials = credentials.with_gdch_audience(api_audience if api_audience else host) + + # If the credentials are service account credentials, then always try to use self signed JWT. + if always_use_jwt_access and isinstance(credentials, service_account.Credentials) and hasattr(service_account.Credentials, "with_always_use_jwt_access"): + credentials = credentials.with_always_use_jwt_access(True) + + # Save the credentials. + self._credentials = credentials + + # Save the hostname. Default to port 443 (HTTPS) if none is specified. + if ':' not in host: + host += ':443' + self._host = host + + def _prep_wrapped_messages(self, client_info): + # Precompute the wrapped methods. + self._wrapped_methods = { + self.aggregated_list: gapic_v1.method.wrap_method( + self.aggregated_list, + default_timeout=None, + client_info=client_info, + ), + self.delete: gapic_v1.method.wrap_method( + self.delete, + default_timeout=None, + client_info=client_info, + ), + self.get: gapic_v1.method.wrap_method( + self.get, + default_timeout=None, + client_info=client_info, + ), + self.insert: gapic_v1.method.wrap_method( + self.insert, + default_timeout=None, + client_info=client_info, + ), + self.list: gapic_v1.method.wrap_method( + self.list, + default_timeout=None, + client_info=client_info, + ), + } + + def close(self): + """Closes resources associated with the transport. + + .. warning:: + Only call this method if the transport is NOT shared + with other clients - this may cause errors in other clients! + """ + raise NotImplementedError() + + @property + def aggregated_list(self) -> Callable[ + [compute.AggregatedListTargetInstancesRequest], + Union[ + compute.TargetInstanceAggregatedList, + Awaitable[compute.TargetInstanceAggregatedList] + ]]: + raise NotImplementedError() + + @property + def delete(self) -> Callable[ + [compute.DeleteTargetInstanceRequest], + Union[ + compute.Operation, + Awaitable[compute.Operation] + ]]: + raise NotImplementedError() + + @property + def get(self) -> Callable[ + [compute.GetTargetInstanceRequest], + Union[ + compute.TargetInstance, + Awaitable[compute.TargetInstance] + ]]: + raise NotImplementedError() + + @property + def insert(self) -> Callable[ + [compute.InsertTargetInstanceRequest], + Union[ + compute.Operation, + Awaitable[compute.Operation] + ]]: + raise NotImplementedError() + + @property + def list(self) -> Callable[ + [compute.ListTargetInstancesRequest], + Union[ + compute.TargetInstanceList, + Awaitable[compute.TargetInstanceList] + ]]: + raise NotImplementedError() + + @property + def kind(self) -> str: + raise NotImplementedError() + + @property + def _zone_operations_client(self) -> zone_operations.ZoneOperationsClient: + ex_op_service = self._extended_operations_services.get("zone_operations") + if not ex_op_service: + ex_op_service = zone_operations.ZoneOperationsClient( + credentials=self._credentials, + transport=self.kind, + ) + self._extended_operations_services["zone_operations"] = ex_op_service + + return ex_op_service + + +__all__ = ( + 'TargetInstancesTransport', +) diff --git a/owl-bot-staging/v1/google/cloud/compute_v1/services/target_instances/transports/rest.py b/owl-bot-staging/v1/google/cloud/compute_v1/services/target_instances/transports/rest.py new file mode 100644 index 000000000..ae9c1c782 --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/compute_v1/services/target_instances/transports/rest.py @@ -0,0 +1,784 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +from google.auth.transport.requests import AuthorizedSession # type: ignore +import json # type: ignore +import grpc # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.api_core import exceptions as core_exceptions +from google.api_core import retry as retries +from google.api_core import rest_helpers +from google.api_core import rest_streaming +from google.api_core import path_template +from google.api_core import gapic_v1 + +from google.protobuf import json_format +from requests import __version__ as requests_version +import dataclasses +import re +from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union +import warnings + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + + +from google.cloud.compute_v1.types import compute + +from .base import TargetInstancesTransport, DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, + grpc_version=None, + rest_version=requests_version, +) + + +class TargetInstancesRestInterceptor: + """Interceptor for TargetInstances. + + Interceptors are used to manipulate requests, request metadata, and responses + in arbitrary ways. + Example use cases include: + * Logging + * Verifying requests according to service or custom semantics + * Stripping extraneous information from responses + + These use cases and more can be enabled by injecting an + instance of a custom subclass when constructing the TargetInstancesRestTransport. + + .. code-block:: python + class MyCustomTargetInstancesInterceptor(TargetInstancesRestInterceptor): + def pre_aggregated_list(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_aggregated_list(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_delete(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_delete(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_get(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_insert(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_insert(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list(self, response): + logging.log(f"Received response: {response}") + return response + + transport = TargetInstancesRestTransport(interceptor=MyCustomTargetInstancesInterceptor()) + client = TargetInstancesClient(transport=transport) + + + """ + def pre_aggregated_list(self, request: compute.AggregatedListTargetInstancesRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.AggregatedListTargetInstancesRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for aggregated_list + + Override in a subclass to manipulate the request or metadata + before they are sent to the TargetInstances server. + """ + return request, metadata + + def post_aggregated_list(self, response: compute.TargetInstanceAggregatedList) -> compute.TargetInstanceAggregatedList: + """Post-rpc interceptor for aggregated_list + + Override in a subclass to manipulate the response + after it is returned by the TargetInstances server but before + it is returned to user code. + """ + return response + def pre_delete(self, request: compute.DeleteTargetInstanceRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.DeleteTargetInstanceRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for delete + + Override in a subclass to manipulate the request or metadata + before they are sent to the TargetInstances server. + """ + return request, metadata + + def post_delete(self, response: compute.Operation) -> compute.Operation: + """Post-rpc interceptor for delete + + Override in a subclass to manipulate the response + after it is returned by the TargetInstances server but before + it is returned to user code. + """ + return response + def pre_get(self, request: compute.GetTargetInstanceRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.GetTargetInstanceRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get + + Override in a subclass to manipulate the request or metadata + before they are sent to the TargetInstances server. + """ + return request, metadata + + def post_get(self, response: compute.TargetInstance) -> compute.TargetInstance: + """Post-rpc interceptor for get + + Override in a subclass to manipulate the response + after it is returned by the TargetInstances server but before + it is returned to user code. + """ + return response + def pre_insert(self, request: compute.InsertTargetInstanceRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.InsertTargetInstanceRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for insert + + Override in a subclass to manipulate the request or metadata + before they are sent to the TargetInstances server. + """ + return request, metadata + + def post_insert(self, response: compute.Operation) -> compute.Operation: + """Post-rpc interceptor for insert + + Override in a subclass to manipulate the response + after it is returned by the TargetInstances server but before + it is returned to user code. + """ + return response + def pre_list(self, request: compute.ListTargetInstancesRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.ListTargetInstancesRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list + + Override in a subclass to manipulate the request or metadata + before they are sent to the TargetInstances server. + """ + return request, metadata + + def post_list(self, response: compute.TargetInstanceList) -> compute.TargetInstanceList: + """Post-rpc interceptor for list + + Override in a subclass to manipulate the response + after it is returned by the TargetInstances server but before + it is returned to user code. + """ + return response + + +@dataclasses.dataclass +class TargetInstancesRestStub: + _session: AuthorizedSession + _host: str + _interceptor: TargetInstancesRestInterceptor + + +class TargetInstancesRestTransport(TargetInstancesTransport): + """REST backend transport for TargetInstances. + + The TargetInstances API. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends JSON representations of protocol buffers over HTTP/1.1 + + NOTE: This REST transport functionality is currently in a beta + state (preview). We welcome your feedback via an issue in this + library's source repository. Thank you! + """ + + def __init__(self, *, + host: str = 'compute.googleapis.com', + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + client_cert_source_for_mtls: Optional[Callable[[ + ], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + url_scheme: str = 'https', + interceptor: Optional[TargetInstancesRestInterceptor] = None, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + NOTE: This REST transport functionality is currently in a beta + state (preview). We welcome your feedback via a GitHub issue in + this library's repository. Thank you! + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + client_cert_source_for_mtls (Callable[[], Tuple[bytes, bytes]]): Client + certificate to configure mutual TLS HTTP channel. It is ignored + if ``channel`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you are developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + url_scheme: the protocol scheme for the API endpoint. Normally + "https", but for testing or local servers, + "http" can be specified. + """ + # Run the base constructor + # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. + # TODO: When custom host (api_endpoint) is set, `scopes` must *also* be set on the + # credentials object + maybe_url_match = re.match("^(?Phttp(?:s)?://)?(?P.*)$", host) + if maybe_url_match is None: + raise ValueError(f"Unexpected hostname structure: {host}") # pragma: NO COVER + + url_match_items = maybe_url_match.groupdict() + + host = f"{url_scheme}://{host}" if not url_match_items["scheme"] else host + + super().__init__( + host=host, + credentials=credentials, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience + ) + self._session = AuthorizedSession( + self._credentials, default_host=self.DEFAULT_HOST) + if client_cert_source_for_mtls: + self._session.configure_mtls_channel(client_cert_source_for_mtls) + self._interceptor = interceptor or TargetInstancesRestInterceptor() + self._prep_wrapped_messages(client_info) + + class _AggregatedList(TargetInstancesRestStub): + def __hash__(self): + return hash("AggregatedList") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.AggregatedListTargetInstancesRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.TargetInstanceAggregatedList: + r"""Call the aggregated list method over HTTP. + + Args: + request (~.compute.AggregatedListTargetInstancesRequest): + The request object. A request message for + TargetInstances.AggregatedList. See the + method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.TargetInstanceAggregatedList: + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/compute/v1/projects/{project}/aggregated/targetInstances', + }, + ] + request, metadata = self._interceptor.pre_aggregated_list(request, metadata) + pb_request = compute.AggregatedListTargetInstancesRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.TargetInstanceAggregatedList() + pb_resp = compute.TargetInstanceAggregatedList.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_aggregated_list(resp) + return resp + + class _Delete(TargetInstancesRestStub): + def __hash__(self): + return hash("Delete") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.DeleteTargetInstanceRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.Operation: + r"""Call the delete method over HTTP. + + Args: + request (~.compute.DeleteTargetInstanceRequest): + The request object. A request message for + TargetInstances.Delete. See the method + description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.Operation: + Represents an Operation resource. Google Compute Engine + has three Operation resources: \* + `Global `__ + \* + `Regional `__ + \* + `Zonal `__ + You can use an operation resource to manage asynchronous + API requests. For more information, read Handling API + responses. Operations can be global, regional or zonal. + - For global operations, use the ``globalOperations`` + resource. - For regional operations, use the + ``regionOperations`` resource. - For zonal operations, + use the ``zonalOperations`` resource. For more + information, read Global, Regional, and Zonal Resources. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'delete', + 'uri': '/compute/v1/projects/{project}/zones/{zone}/targetInstances/{target_instance}', + }, + ] + request, metadata = self._interceptor.pre_delete(request, metadata) + pb_request = compute.DeleteTargetInstanceRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.Operation() + pb_resp = compute.Operation.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_delete(resp) + return resp + + class _Get(TargetInstancesRestStub): + def __hash__(self): + return hash("Get") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.GetTargetInstanceRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.TargetInstance: + r"""Call the get method over HTTP. + + Args: + request (~.compute.GetTargetInstanceRequest): + The request object. A request message for + TargetInstances.Get. See the method + description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.TargetInstance: + Represents a Target Instance + resource. You can use a target instance + to handle traffic for one or more + forwarding rules, which is ideal for + forwarding protocol traffic that is + managed by a single source. For example, + ESP, AH, TCP, or UDP. For more + information, read Target instances. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/compute/v1/projects/{project}/zones/{zone}/targetInstances/{target_instance}', + }, + ] + request, metadata = self._interceptor.pre_get(request, metadata) + pb_request = compute.GetTargetInstanceRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.TargetInstance() + pb_resp = compute.TargetInstance.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get(resp) + return resp + + class _Insert(TargetInstancesRestStub): + def __hash__(self): + return hash("Insert") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.InsertTargetInstanceRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.Operation: + r"""Call the insert method over HTTP. + + Args: + request (~.compute.InsertTargetInstanceRequest): + The request object. A request message for + TargetInstances.Insert. See the method + description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.Operation: + Represents an Operation resource. Google Compute Engine + has three Operation resources: \* + `Global `__ + \* + `Regional `__ + \* + `Zonal `__ + You can use an operation resource to manage asynchronous + API requests. For more information, read Handling API + responses. Operations can be global, regional or zonal. + - For global operations, use the ``globalOperations`` + resource. - For regional operations, use the + ``regionOperations`` resource. - For zonal operations, + use the ``zonalOperations`` resource. For more + information, read Global, Regional, and Zonal Resources. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/compute/v1/projects/{project}/zones/{zone}/targetInstances', + 'body': 'target_instance_resource', + }, + ] + request, metadata = self._interceptor.pre_insert(request, metadata) + pb_request = compute.InsertTargetInstanceRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + including_default_value_fields=False, + use_integers_for_enums=False + ) + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.Operation() + pb_resp = compute.Operation.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_insert(resp) + return resp + + class _List(TargetInstancesRestStub): + def __hash__(self): + return hash("List") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.ListTargetInstancesRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.TargetInstanceList: + r"""Call the list method over HTTP. + + Args: + request (~.compute.ListTargetInstancesRequest): + The request object. A request message for + TargetInstances.List. See the method + description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.TargetInstanceList: + Contains a list of TargetInstance + resources. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/compute/v1/projects/{project}/zones/{zone}/targetInstances', + }, + ] + request, metadata = self._interceptor.pre_list(request, metadata) + pb_request = compute.ListTargetInstancesRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.TargetInstanceList() + pb_resp = compute.TargetInstanceList.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_list(resp) + return resp + + @property + def aggregated_list(self) -> Callable[ + [compute.AggregatedListTargetInstancesRequest], + compute.TargetInstanceAggregatedList]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._AggregatedList(self._session, self._host, self._interceptor) # type: ignore + + @property + def delete(self) -> Callable[ + [compute.DeleteTargetInstanceRequest], + compute.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._Delete(self._session, self._host, self._interceptor) # type: ignore + + @property + def get(self) -> Callable[ + [compute.GetTargetInstanceRequest], + compute.TargetInstance]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._Get(self._session, self._host, self._interceptor) # type: ignore + + @property + def insert(self) -> Callable[ + [compute.InsertTargetInstanceRequest], + compute.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._Insert(self._session, self._host, self._interceptor) # type: ignore + + @property + def list(self) -> Callable[ + [compute.ListTargetInstancesRequest], + compute.TargetInstanceList]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._List(self._session, self._host, self._interceptor) # type: ignore + + @property + def kind(self) -> str: + return "rest" + + def close(self): + self._session.close() + + +__all__=( + 'TargetInstancesRestTransport', +) diff --git a/owl-bot-staging/v1/google/cloud/compute_v1/services/target_pools/__init__.py b/owl-bot-staging/v1/google/cloud/compute_v1/services/target_pools/__init__.py new file mode 100644 index 000000000..0128a969a --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/compute_v1/services/target_pools/__init__.py @@ -0,0 +1,20 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from .client import TargetPoolsClient + +__all__ = ( + 'TargetPoolsClient', +) diff --git a/owl-bot-staging/v1/google/cloud/compute_v1/services/target_pools/client.py b/owl-bot-staging/v1/google/cloud/compute_v1/services/target_pools/client.py new file mode 100644 index 000000000..de41b8a5a --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/compute_v1/services/target_pools/client.py @@ -0,0 +1,2873 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import functools +import os +import re +from typing import Dict, Mapping, MutableMapping, MutableSequence, Optional, Sequence, Tuple, Type, Union, cast + +from google.cloud.compute_v1 import gapic_version as package_version + +from google.api_core import client_options as client_options_lib +from google.api_core import exceptions as core_exceptions +from google.api_core import extended_operation +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport import mtls # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth.exceptions import MutualTLSChannelError # type: ignore +from google.oauth2 import service_account # type: ignore + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + +from google.api_core import extended_operation # type: ignore +from google.cloud.compute_v1.services.target_pools import pagers +from google.cloud.compute_v1.types import compute +from .transports.base import TargetPoolsTransport, DEFAULT_CLIENT_INFO +from .transports.rest import TargetPoolsRestTransport + + +class TargetPoolsClientMeta(type): + """Metaclass for the TargetPools client. + + This provides class-level methods for building and retrieving + support objects (e.g. transport) without polluting the client instance + objects. + """ + _transport_registry = OrderedDict() # type: Dict[str, Type[TargetPoolsTransport]] + _transport_registry["rest"] = TargetPoolsRestTransport + + def get_transport_class(cls, + label: Optional[str] = None, + ) -> Type[TargetPoolsTransport]: + """Returns an appropriate transport class. + + Args: + label: The name of the desired transport. If none is + provided, then the first transport in the registry is used. + + Returns: + The transport class to use. + """ + # If a specific transport is requested, return that one. + if label: + return cls._transport_registry[label] + + # No transport is requested; return the default (that is, the first one + # in the dictionary). + return next(iter(cls._transport_registry.values())) + + +class TargetPoolsClient(metaclass=TargetPoolsClientMeta): + """The TargetPools API.""" + + @staticmethod + def _get_default_mtls_endpoint(api_endpoint): + """Converts api endpoint to mTLS endpoint. + + Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to + "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. + Args: + api_endpoint (Optional[str]): the api endpoint to convert. + Returns: + str: converted mTLS api endpoint. + """ + if not api_endpoint: + return api_endpoint + + mtls_endpoint_re = re.compile( + r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?" + ) + + m = mtls_endpoint_re.match(api_endpoint) + name, mtls, sandbox, googledomain = m.groups() + if mtls or not googledomain: + return api_endpoint + + if sandbox: + return api_endpoint.replace( + "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" + ) + + return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") + + DEFAULT_ENDPOINT = "compute.googleapis.com" + DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore + DEFAULT_ENDPOINT + ) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + TargetPoolsClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_info(info) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + TargetPoolsClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_file( + filename) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + from_service_account_json = from_service_account_file + + @property + def transport(self) -> TargetPoolsTransport: + """Returns the transport used by the client instance. + + Returns: + TargetPoolsTransport: The transport used by the client + instance. + """ + return self._transport + + @staticmethod + def common_billing_account_path(billing_account: str, ) -> str: + """Returns a fully-qualified billing_account string.""" + return "billingAccounts/{billing_account}".format(billing_account=billing_account, ) + + @staticmethod + def parse_common_billing_account_path(path: str) -> Dict[str,str]: + """Parse a billing_account path into its component segments.""" + m = re.match(r"^billingAccounts/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_folder_path(folder: str, ) -> str: + """Returns a fully-qualified folder string.""" + return "folders/{folder}".format(folder=folder, ) + + @staticmethod + def parse_common_folder_path(path: str) -> Dict[str,str]: + """Parse a folder path into its component segments.""" + m = re.match(r"^folders/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_organization_path(organization: str, ) -> str: + """Returns a fully-qualified organization string.""" + return "organizations/{organization}".format(organization=organization, ) + + @staticmethod + def parse_common_organization_path(path: str) -> Dict[str,str]: + """Parse a organization path into its component segments.""" + m = re.match(r"^organizations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_project_path(project: str, ) -> str: + """Returns a fully-qualified project string.""" + return "projects/{project}".format(project=project, ) + + @staticmethod + def parse_common_project_path(path: str) -> Dict[str,str]: + """Parse a project path into its component segments.""" + m = re.match(r"^projects/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_location_path(project: str, location: str, ) -> str: + """Returns a fully-qualified location string.""" + return "projects/{project}/locations/{location}".format(project=project, location=location, ) + + @staticmethod + def parse_common_location_path(path: str) -> Dict[str,str]: + """Parse a location path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @classmethod + def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[client_options_lib.ClientOptions] = None): + """Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + if client_options is None: + client_options = client_options_lib.ClientOptions() + use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") + use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") + if use_client_cert not in ("true", "false"): + raise ValueError("Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`") + if use_mtls_endpoint not in ("auto", "never", "always"): + raise MutualTLSChannelError("Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`") + + # Figure out the client cert source to use. + client_cert_source = None + if use_client_cert == "true": + if client_options.client_cert_source: + client_cert_source = client_options.client_cert_source + elif mtls.has_default_client_cert_source(): + client_cert_source = mtls.default_client_cert_source() + + # Figure out which api endpoint to use. + if client_options.api_endpoint is not None: + api_endpoint = client_options.api_endpoint + elif use_mtls_endpoint == "always" or (use_mtls_endpoint == "auto" and client_cert_source): + api_endpoint = cls.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = cls.DEFAULT_ENDPOINT + + return api_endpoint, client_cert_source + + def __init__(self, *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Optional[Union[str, TargetPoolsTransport]] = None, + client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the target pools client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Union[str, TargetPoolsTransport]): The + transport to use. If set to None, a transport is chosen + automatically. + NOTE: "rest" transport functionality is currently in a + beta state (preview). We welcome your feedback via an + issue in this library's source repository. + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): Custom options for the + client. It won't take effect if a ``transport`` instance is provided. + (1) The ``api_endpoint`` property can be used to override the + default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT + environment variable can also be used to override the endpoint: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto switch to the + default mTLS endpoint if client certificate is present, this is + the default value). However, the ``api_endpoint`` property takes + precedence if provided. + (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide client certificate for mutual TLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + """ + if isinstance(client_options, dict): + client_options = client_options_lib.from_dict(client_options) + if client_options is None: + client_options = client_options_lib.ClientOptions() + client_options = cast(client_options_lib.ClientOptions, client_options) + + api_endpoint, client_cert_source_func = self.get_mtls_endpoint_and_cert_source(client_options) + + api_key_value = getattr(client_options, "api_key", None) + if api_key_value and credentials: + raise ValueError("client_options.api_key and credentials are mutually exclusive") + + # Save or instantiate the transport. + # Ordinarily, we provide the transport, but allowing a custom transport + # instance provides an extensibility point for unusual situations. + if isinstance(transport, TargetPoolsTransport): + # transport is a TargetPoolsTransport instance. + if credentials or client_options.credentials_file or api_key_value: + raise ValueError("When providing a transport instance, " + "provide its credentials directly.") + if client_options.scopes: + raise ValueError( + "When providing a transport instance, provide its scopes " + "directly." + ) + self._transport = transport + else: + import google.auth._default # type: ignore + + if api_key_value and hasattr(google.auth._default, "get_api_key_credentials"): + credentials = google.auth._default.get_api_key_credentials(api_key_value) + + Transport = type(self).get_transport_class(transport) + self._transport = Transport( + credentials=credentials, + credentials_file=client_options.credentials_file, + host=api_endpoint, + scopes=client_options.scopes, + client_cert_source_for_mtls=client_cert_source_func, + quota_project_id=client_options.quota_project_id, + client_info=client_info, + always_use_jwt_access=True, + api_audience=client_options.api_audience, + ) + + def add_health_check_unary(self, + request: Optional[Union[compute.AddHealthCheckTargetPoolRequest, dict]] = None, + *, + project: Optional[str] = None, + region: Optional[str] = None, + target_pool: Optional[str] = None, + target_pools_add_health_check_request_resource: Optional[compute.TargetPoolsAddHealthCheckRequest] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.Operation: + r"""Adds health check URLs to a target pool. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_add_health_check(): + # Create a client + client = compute_v1.TargetPoolsClient() + + # Initialize request argument(s) + request = compute_v1.AddHealthCheckTargetPoolRequest( + project="project_value", + region="region_value", + target_pool="target_pool_value", + ) + + # Make the request + response = client.add_health_check(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.AddHealthCheckTargetPoolRequest, dict]): + The request object. A request message for + TargetPools.AddHealthCheck. See the + method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + region (str): + Name of the region scoping this + request. + + This corresponds to the ``region`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + target_pool (str): + Name of the target pool to add a + health check to. + + This corresponds to the ``target_pool`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + target_pools_add_health_check_request_resource (google.cloud.compute_v1.types.TargetPoolsAddHealthCheckRequest): + The body resource for this request + This corresponds to the ``target_pools_add_health_check_request_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, region, target_pool, target_pools_add_health_check_request_resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.AddHealthCheckTargetPoolRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.AddHealthCheckTargetPoolRequest): + request = compute.AddHealthCheckTargetPoolRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if region is not None: + request.region = region + if target_pool is not None: + request.target_pool = target_pool + if target_pools_add_health_check_request_resource is not None: + request.target_pools_add_health_check_request_resource = target_pools_add_health_check_request_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.add_health_check] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("region", request.region), + ("target_pool", request.target_pool), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def add_health_check(self, + request: Optional[Union[compute.AddHealthCheckTargetPoolRequest, dict]] = None, + *, + project: Optional[str] = None, + region: Optional[str] = None, + target_pool: Optional[str] = None, + target_pools_add_health_check_request_resource: Optional[compute.TargetPoolsAddHealthCheckRequest] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> extended_operation.ExtendedOperation: + r"""Adds health check URLs to a target pool. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_add_health_check(): + # Create a client + client = compute_v1.TargetPoolsClient() + + # Initialize request argument(s) + request = compute_v1.AddHealthCheckTargetPoolRequest( + project="project_value", + region="region_value", + target_pool="target_pool_value", + ) + + # Make the request + response = client.add_health_check(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.AddHealthCheckTargetPoolRequest, dict]): + The request object. A request message for + TargetPools.AddHealthCheck. See the + method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + region (str): + Name of the region scoping this + request. + + This corresponds to the ``region`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + target_pool (str): + Name of the target pool to add a + health check to. + + This corresponds to the ``target_pool`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + target_pools_add_health_check_request_resource (google.cloud.compute_v1.types.TargetPoolsAddHealthCheckRequest): + The body resource for this request + This corresponds to the ``target_pools_add_health_check_request_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, region, target_pool, target_pools_add_health_check_request_resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.AddHealthCheckTargetPoolRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.AddHealthCheckTargetPoolRequest): + request = compute.AddHealthCheckTargetPoolRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if region is not None: + request.region = region + if target_pool is not None: + request.target_pool = target_pool + if target_pools_add_health_check_request_resource is not None: + request.target_pools_add_health_check_request_resource = target_pools_add_health_check_request_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.add_health_check] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("region", request.region), + ("target_pool", request.target_pool), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + operation_service = self._transport._region_operations_client + operation_request = compute.GetRegionOperationRequest() + operation_request.project = request.project + operation_request.region = request.region + operation_request.operation = response.name + + get_operation = functools.partial(operation_service.get, operation_request) + # Cancel is not part of extended operations yet. + cancel_operation = lambda: None + + # Note: this class is an implementation detail to provide a uniform + # set of names for certain fields in the extended operation proto message. + # See google.api_core.extended_operation.ExtendedOperation for details + # on these properties and the expected interface. + class _CustomOperation(extended_operation.ExtendedOperation): + @property + def error_message(self): + return self._extended_operation.http_error_message + + @property + def error_code(self): + return self._extended_operation.http_error_status_code + + response = _CustomOperation.make(get_operation, cancel_operation, response) + + # Done; return the response. + return response + + def add_instance_unary(self, + request: Optional[Union[compute.AddInstanceTargetPoolRequest, dict]] = None, + *, + project: Optional[str] = None, + region: Optional[str] = None, + target_pool: Optional[str] = None, + target_pools_add_instance_request_resource: Optional[compute.TargetPoolsAddInstanceRequest] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.Operation: + r"""Adds an instance to a target pool. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_add_instance(): + # Create a client + client = compute_v1.TargetPoolsClient() + + # Initialize request argument(s) + request = compute_v1.AddInstanceTargetPoolRequest( + project="project_value", + region="region_value", + target_pool="target_pool_value", + ) + + # Make the request + response = client.add_instance(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.AddInstanceTargetPoolRequest, dict]): + The request object. A request message for + TargetPools.AddInstance. See the method + description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + region (str): + Name of the region scoping this + request. + + This corresponds to the ``region`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + target_pool (str): + Name of the TargetPool resource to + add instances to. + + This corresponds to the ``target_pool`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + target_pools_add_instance_request_resource (google.cloud.compute_v1.types.TargetPoolsAddInstanceRequest): + The body resource for this request + This corresponds to the ``target_pools_add_instance_request_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, region, target_pool, target_pools_add_instance_request_resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.AddInstanceTargetPoolRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.AddInstanceTargetPoolRequest): + request = compute.AddInstanceTargetPoolRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if region is not None: + request.region = region + if target_pool is not None: + request.target_pool = target_pool + if target_pools_add_instance_request_resource is not None: + request.target_pools_add_instance_request_resource = target_pools_add_instance_request_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.add_instance] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("region", request.region), + ("target_pool", request.target_pool), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def add_instance(self, + request: Optional[Union[compute.AddInstanceTargetPoolRequest, dict]] = None, + *, + project: Optional[str] = None, + region: Optional[str] = None, + target_pool: Optional[str] = None, + target_pools_add_instance_request_resource: Optional[compute.TargetPoolsAddInstanceRequest] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> extended_operation.ExtendedOperation: + r"""Adds an instance to a target pool. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_add_instance(): + # Create a client + client = compute_v1.TargetPoolsClient() + + # Initialize request argument(s) + request = compute_v1.AddInstanceTargetPoolRequest( + project="project_value", + region="region_value", + target_pool="target_pool_value", + ) + + # Make the request + response = client.add_instance(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.AddInstanceTargetPoolRequest, dict]): + The request object. A request message for + TargetPools.AddInstance. See the method + description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + region (str): + Name of the region scoping this + request. + + This corresponds to the ``region`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + target_pool (str): + Name of the TargetPool resource to + add instances to. + + This corresponds to the ``target_pool`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + target_pools_add_instance_request_resource (google.cloud.compute_v1.types.TargetPoolsAddInstanceRequest): + The body resource for this request + This corresponds to the ``target_pools_add_instance_request_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, region, target_pool, target_pools_add_instance_request_resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.AddInstanceTargetPoolRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.AddInstanceTargetPoolRequest): + request = compute.AddInstanceTargetPoolRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if region is not None: + request.region = region + if target_pool is not None: + request.target_pool = target_pool + if target_pools_add_instance_request_resource is not None: + request.target_pools_add_instance_request_resource = target_pools_add_instance_request_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.add_instance] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("region", request.region), + ("target_pool", request.target_pool), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + operation_service = self._transport._region_operations_client + operation_request = compute.GetRegionOperationRequest() + operation_request.project = request.project + operation_request.region = request.region + operation_request.operation = response.name + + get_operation = functools.partial(operation_service.get, operation_request) + # Cancel is not part of extended operations yet. + cancel_operation = lambda: None + + # Note: this class is an implementation detail to provide a uniform + # set of names for certain fields in the extended operation proto message. + # See google.api_core.extended_operation.ExtendedOperation for details + # on these properties and the expected interface. + class _CustomOperation(extended_operation.ExtendedOperation): + @property + def error_message(self): + return self._extended_operation.http_error_message + + @property + def error_code(self): + return self._extended_operation.http_error_status_code + + response = _CustomOperation.make(get_operation, cancel_operation, response) + + # Done; return the response. + return response + + def aggregated_list(self, + request: Optional[Union[compute.AggregatedListTargetPoolsRequest, dict]] = None, + *, + project: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.AggregatedListPager: + r"""Retrieves an aggregated list of target pools. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_aggregated_list(): + # Create a client + client = compute_v1.TargetPoolsClient() + + # Initialize request argument(s) + request = compute_v1.AggregatedListTargetPoolsRequest( + project="project_value", + ) + + # Make the request + page_result = client.aggregated_list(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.AggregatedListTargetPoolsRequest, dict]): + The request object. A request message for + TargetPools.AggregatedList. See the + method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.compute_v1.services.target_pools.pagers.AggregatedListPager: + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.AggregatedListTargetPoolsRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.AggregatedListTargetPoolsRequest): + request = compute.AggregatedListTargetPoolsRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.aggregated_list] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.AggregatedListPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + def delete_unary(self, + request: Optional[Union[compute.DeleteTargetPoolRequest, dict]] = None, + *, + project: Optional[str] = None, + region: Optional[str] = None, + target_pool: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.Operation: + r"""Deletes the specified target pool. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_delete(): + # Create a client + client = compute_v1.TargetPoolsClient() + + # Initialize request argument(s) + request = compute_v1.DeleteTargetPoolRequest( + project="project_value", + region="region_value", + target_pool="target_pool_value", + ) + + # Make the request + response = client.delete(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.DeleteTargetPoolRequest, dict]): + The request object. A request message for + TargetPools.Delete. See the method + description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + region (str): + Name of the region scoping this + request. + + This corresponds to the ``region`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + target_pool (str): + Name of the TargetPool resource to + delete. + + This corresponds to the ``target_pool`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, region, target_pool]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.DeleteTargetPoolRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.DeleteTargetPoolRequest): + request = compute.DeleteTargetPoolRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if region is not None: + request.region = region + if target_pool is not None: + request.target_pool = target_pool + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("region", request.region), + ("target_pool", request.target_pool), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def delete(self, + request: Optional[Union[compute.DeleteTargetPoolRequest, dict]] = None, + *, + project: Optional[str] = None, + region: Optional[str] = None, + target_pool: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> extended_operation.ExtendedOperation: + r"""Deletes the specified target pool. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_delete(): + # Create a client + client = compute_v1.TargetPoolsClient() + + # Initialize request argument(s) + request = compute_v1.DeleteTargetPoolRequest( + project="project_value", + region="region_value", + target_pool="target_pool_value", + ) + + # Make the request + response = client.delete(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.DeleteTargetPoolRequest, dict]): + The request object. A request message for + TargetPools.Delete. See the method + description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + region (str): + Name of the region scoping this + request. + + This corresponds to the ``region`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + target_pool (str): + Name of the TargetPool resource to + delete. + + This corresponds to the ``target_pool`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, region, target_pool]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.DeleteTargetPoolRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.DeleteTargetPoolRequest): + request = compute.DeleteTargetPoolRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if region is not None: + request.region = region + if target_pool is not None: + request.target_pool = target_pool + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("region", request.region), + ("target_pool", request.target_pool), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + operation_service = self._transport._region_operations_client + operation_request = compute.GetRegionOperationRequest() + operation_request.project = request.project + operation_request.region = request.region + operation_request.operation = response.name + + get_operation = functools.partial(operation_service.get, operation_request) + # Cancel is not part of extended operations yet. + cancel_operation = lambda: None + + # Note: this class is an implementation detail to provide a uniform + # set of names for certain fields in the extended operation proto message. + # See google.api_core.extended_operation.ExtendedOperation for details + # on these properties and the expected interface. + class _CustomOperation(extended_operation.ExtendedOperation): + @property + def error_message(self): + return self._extended_operation.http_error_message + + @property + def error_code(self): + return self._extended_operation.http_error_status_code + + response = _CustomOperation.make(get_operation, cancel_operation, response) + + # Done; return the response. + return response + + def get(self, + request: Optional[Union[compute.GetTargetPoolRequest, dict]] = None, + *, + project: Optional[str] = None, + region: Optional[str] = None, + target_pool: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.TargetPool: + r"""Returns the specified target pool. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_get(): + # Create a client + client = compute_v1.TargetPoolsClient() + + # Initialize request argument(s) + request = compute_v1.GetTargetPoolRequest( + project="project_value", + region="region_value", + target_pool="target_pool_value", + ) + + # Make the request + response = client.get(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.GetTargetPoolRequest, dict]): + The request object. A request message for + TargetPools.Get. See the method + description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + region (str): + Name of the region scoping this + request. + + This corresponds to the ``region`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + target_pool (str): + Name of the TargetPool resource to + return. + + This corresponds to the ``target_pool`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.compute_v1.types.TargetPool: + Represents a Target Pool resource. + Target pools are used for network + TCP/UDP load balancing. A target pool + references member instances, an + associated legacy HttpHealthCheck + resource, and, optionally, a backup + target pool. For more information, read + Using target pools. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, region, target_pool]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.GetTargetPoolRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.GetTargetPoolRequest): + request = compute.GetTargetPoolRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if region is not None: + request.region = region + if target_pool is not None: + request.target_pool = target_pool + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("region", request.region), + ("target_pool", request.target_pool), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get_health(self, + request: Optional[Union[compute.GetHealthTargetPoolRequest, dict]] = None, + *, + project: Optional[str] = None, + region: Optional[str] = None, + target_pool: Optional[str] = None, + instance_reference_resource: Optional[compute.InstanceReference] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.TargetPoolInstanceHealth: + r"""Gets the most recent health check results for each IP + for the instance that is referenced by the given target + pool. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_get_health(): + # Create a client + client = compute_v1.TargetPoolsClient() + + # Initialize request argument(s) + request = compute_v1.GetHealthTargetPoolRequest( + project="project_value", + region="region_value", + target_pool="target_pool_value", + ) + + # Make the request + response = client.get_health(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.GetHealthTargetPoolRequest, dict]): + The request object. A request message for + TargetPools.GetHealth. See the method + description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + region (str): + Name of the region scoping this + request. + + This corresponds to the ``region`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + target_pool (str): + Name of the TargetPool resource to + which the queried instance belongs. + + This corresponds to the ``target_pool`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + instance_reference_resource (google.cloud.compute_v1.types.InstanceReference): + The body resource for this request + This corresponds to the ``instance_reference_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.compute_v1.types.TargetPoolInstanceHealth: + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, region, target_pool, instance_reference_resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.GetHealthTargetPoolRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.GetHealthTargetPoolRequest): + request = compute.GetHealthTargetPoolRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if region is not None: + request.region = region + if target_pool is not None: + request.target_pool = target_pool + if instance_reference_resource is not None: + request.instance_reference_resource = instance_reference_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_health] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("region", request.region), + ("target_pool", request.target_pool), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def insert_unary(self, + request: Optional[Union[compute.InsertTargetPoolRequest, dict]] = None, + *, + project: Optional[str] = None, + region: Optional[str] = None, + target_pool_resource: Optional[compute.TargetPool] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.Operation: + r"""Creates a target pool in the specified project and + region using the data included in the request. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_insert(): + # Create a client + client = compute_v1.TargetPoolsClient() + + # Initialize request argument(s) + request = compute_v1.InsertTargetPoolRequest( + project="project_value", + region="region_value", + ) + + # Make the request + response = client.insert(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.InsertTargetPoolRequest, dict]): + The request object. A request message for + TargetPools.Insert. See the method + description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + region (str): + Name of the region scoping this + request. + + This corresponds to the ``region`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + target_pool_resource (google.cloud.compute_v1.types.TargetPool): + The body resource for this request + This corresponds to the ``target_pool_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, region, target_pool_resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.InsertTargetPoolRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.InsertTargetPoolRequest): + request = compute.InsertTargetPoolRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if region is not None: + request.region = region + if target_pool_resource is not None: + request.target_pool_resource = target_pool_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.insert] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("region", request.region), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def insert(self, + request: Optional[Union[compute.InsertTargetPoolRequest, dict]] = None, + *, + project: Optional[str] = None, + region: Optional[str] = None, + target_pool_resource: Optional[compute.TargetPool] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> extended_operation.ExtendedOperation: + r"""Creates a target pool in the specified project and + region using the data included in the request. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_insert(): + # Create a client + client = compute_v1.TargetPoolsClient() + + # Initialize request argument(s) + request = compute_v1.InsertTargetPoolRequest( + project="project_value", + region="region_value", + ) + + # Make the request + response = client.insert(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.InsertTargetPoolRequest, dict]): + The request object. A request message for + TargetPools.Insert. See the method + description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + region (str): + Name of the region scoping this + request. + + This corresponds to the ``region`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + target_pool_resource (google.cloud.compute_v1.types.TargetPool): + The body resource for this request + This corresponds to the ``target_pool_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, region, target_pool_resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.InsertTargetPoolRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.InsertTargetPoolRequest): + request = compute.InsertTargetPoolRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if region is not None: + request.region = region + if target_pool_resource is not None: + request.target_pool_resource = target_pool_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.insert] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("region", request.region), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + operation_service = self._transport._region_operations_client + operation_request = compute.GetRegionOperationRequest() + operation_request.project = request.project + operation_request.region = request.region + operation_request.operation = response.name + + get_operation = functools.partial(operation_service.get, operation_request) + # Cancel is not part of extended operations yet. + cancel_operation = lambda: None + + # Note: this class is an implementation detail to provide a uniform + # set of names for certain fields in the extended operation proto message. + # See google.api_core.extended_operation.ExtendedOperation for details + # on these properties and the expected interface. + class _CustomOperation(extended_operation.ExtendedOperation): + @property + def error_message(self): + return self._extended_operation.http_error_message + + @property + def error_code(self): + return self._extended_operation.http_error_status_code + + response = _CustomOperation.make(get_operation, cancel_operation, response) + + # Done; return the response. + return response + + def list(self, + request: Optional[Union[compute.ListTargetPoolsRequest, dict]] = None, + *, + project: Optional[str] = None, + region: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListPager: + r"""Retrieves a list of target pools available to the + specified project and region. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_list(): + # Create a client + client = compute_v1.TargetPoolsClient() + + # Initialize request argument(s) + request = compute_v1.ListTargetPoolsRequest( + project="project_value", + region="region_value", + ) + + # Make the request + page_result = client.list(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.ListTargetPoolsRequest, dict]): + The request object. A request message for + TargetPools.List. See the method + description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + region (str): + Name of the region scoping this + request. + + This corresponds to the ``region`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.compute_v1.services.target_pools.pagers.ListPager: + Contains a list of TargetPool + resources. + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, region]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.ListTargetPoolsRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.ListTargetPoolsRequest): + request = compute.ListTargetPoolsRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if region is not None: + request.region = region + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("region", request.region), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + def remove_health_check_unary(self, + request: Optional[Union[compute.RemoveHealthCheckTargetPoolRequest, dict]] = None, + *, + project: Optional[str] = None, + region: Optional[str] = None, + target_pool: Optional[str] = None, + target_pools_remove_health_check_request_resource: Optional[compute.TargetPoolsRemoveHealthCheckRequest] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.Operation: + r"""Removes health check URL from a target pool. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_remove_health_check(): + # Create a client + client = compute_v1.TargetPoolsClient() + + # Initialize request argument(s) + request = compute_v1.RemoveHealthCheckTargetPoolRequest( + project="project_value", + region="region_value", + target_pool="target_pool_value", + ) + + # Make the request + response = client.remove_health_check(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.RemoveHealthCheckTargetPoolRequest, dict]): + The request object. A request message for + TargetPools.RemoveHealthCheck. See the + method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + region (str): + Name of the region for this request. + This corresponds to the ``region`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + target_pool (str): + Name of the target pool to remove + health checks from. + + This corresponds to the ``target_pool`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + target_pools_remove_health_check_request_resource (google.cloud.compute_v1.types.TargetPoolsRemoveHealthCheckRequest): + The body resource for this request + This corresponds to the ``target_pools_remove_health_check_request_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, region, target_pool, target_pools_remove_health_check_request_resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.RemoveHealthCheckTargetPoolRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.RemoveHealthCheckTargetPoolRequest): + request = compute.RemoveHealthCheckTargetPoolRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if region is not None: + request.region = region + if target_pool is not None: + request.target_pool = target_pool + if target_pools_remove_health_check_request_resource is not None: + request.target_pools_remove_health_check_request_resource = target_pools_remove_health_check_request_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.remove_health_check] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("region", request.region), + ("target_pool", request.target_pool), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def remove_health_check(self, + request: Optional[Union[compute.RemoveHealthCheckTargetPoolRequest, dict]] = None, + *, + project: Optional[str] = None, + region: Optional[str] = None, + target_pool: Optional[str] = None, + target_pools_remove_health_check_request_resource: Optional[compute.TargetPoolsRemoveHealthCheckRequest] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> extended_operation.ExtendedOperation: + r"""Removes health check URL from a target pool. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_remove_health_check(): + # Create a client + client = compute_v1.TargetPoolsClient() + + # Initialize request argument(s) + request = compute_v1.RemoveHealthCheckTargetPoolRequest( + project="project_value", + region="region_value", + target_pool="target_pool_value", + ) + + # Make the request + response = client.remove_health_check(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.RemoveHealthCheckTargetPoolRequest, dict]): + The request object. A request message for + TargetPools.RemoveHealthCheck. See the + method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + region (str): + Name of the region for this request. + This corresponds to the ``region`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + target_pool (str): + Name of the target pool to remove + health checks from. + + This corresponds to the ``target_pool`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + target_pools_remove_health_check_request_resource (google.cloud.compute_v1.types.TargetPoolsRemoveHealthCheckRequest): + The body resource for this request + This corresponds to the ``target_pools_remove_health_check_request_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, region, target_pool, target_pools_remove_health_check_request_resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.RemoveHealthCheckTargetPoolRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.RemoveHealthCheckTargetPoolRequest): + request = compute.RemoveHealthCheckTargetPoolRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if region is not None: + request.region = region + if target_pool is not None: + request.target_pool = target_pool + if target_pools_remove_health_check_request_resource is not None: + request.target_pools_remove_health_check_request_resource = target_pools_remove_health_check_request_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.remove_health_check] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("region", request.region), + ("target_pool", request.target_pool), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + operation_service = self._transport._region_operations_client + operation_request = compute.GetRegionOperationRequest() + operation_request.project = request.project + operation_request.region = request.region + operation_request.operation = response.name + + get_operation = functools.partial(operation_service.get, operation_request) + # Cancel is not part of extended operations yet. + cancel_operation = lambda: None + + # Note: this class is an implementation detail to provide a uniform + # set of names for certain fields in the extended operation proto message. + # See google.api_core.extended_operation.ExtendedOperation for details + # on these properties and the expected interface. + class _CustomOperation(extended_operation.ExtendedOperation): + @property + def error_message(self): + return self._extended_operation.http_error_message + + @property + def error_code(self): + return self._extended_operation.http_error_status_code + + response = _CustomOperation.make(get_operation, cancel_operation, response) + + # Done; return the response. + return response + + def remove_instance_unary(self, + request: Optional[Union[compute.RemoveInstanceTargetPoolRequest, dict]] = None, + *, + project: Optional[str] = None, + region: Optional[str] = None, + target_pool: Optional[str] = None, + target_pools_remove_instance_request_resource: Optional[compute.TargetPoolsRemoveInstanceRequest] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.Operation: + r"""Removes instance URL from a target pool. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_remove_instance(): + # Create a client + client = compute_v1.TargetPoolsClient() + + # Initialize request argument(s) + request = compute_v1.RemoveInstanceTargetPoolRequest( + project="project_value", + region="region_value", + target_pool="target_pool_value", + ) + + # Make the request + response = client.remove_instance(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.RemoveInstanceTargetPoolRequest, dict]): + The request object. A request message for + TargetPools.RemoveInstance. See the + method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + region (str): + Name of the region scoping this + request. + + This corresponds to the ``region`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + target_pool (str): + Name of the TargetPool resource to + remove instances from. + + This corresponds to the ``target_pool`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + target_pools_remove_instance_request_resource (google.cloud.compute_v1.types.TargetPoolsRemoveInstanceRequest): + The body resource for this request + This corresponds to the ``target_pools_remove_instance_request_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, region, target_pool, target_pools_remove_instance_request_resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.RemoveInstanceTargetPoolRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.RemoveInstanceTargetPoolRequest): + request = compute.RemoveInstanceTargetPoolRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if region is not None: + request.region = region + if target_pool is not None: + request.target_pool = target_pool + if target_pools_remove_instance_request_resource is not None: + request.target_pools_remove_instance_request_resource = target_pools_remove_instance_request_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.remove_instance] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("region", request.region), + ("target_pool", request.target_pool), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def remove_instance(self, + request: Optional[Union[compute.RemoveInstanceTargetPoolRequest, dict]] = None, + *, + project: Optional[str] = None, + region: Optional[str] = None, + target_pool: Optional[str] = None, + target_pools_remove_instance_request_resource: Optional[compute.TargetPoolsRemoveInstanceRequest] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> extended_operation.ExtendedOperation: + r"""Removes instance URL from a target pool. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_remove_instance(): + # Create a client + client = compute_v1.TargetPoolsClient() + + # Initialize request argument(s) + request = compute_v1.RemoveInstanceTargetPoolRequest( + project="project_value", + region="region_value", + target_pool="target_pool_value", + ) + + # Make the request + response = client.remove_instance(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.RemoveInstanceTargetPoolRequest, dict]): + The request object. A request message for + TargetPools.RemoveInstance. See the + method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + region (str): + Name of the region scoping this + request. + + This corresponds to the ``region`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + target_pool (str): + Name of the TargetPool resource to + remove instances from. + + This corresponds to the ``target_pool`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + target_pools_remove_instance_request_resource (google.cloud.compute_v1.types.TargetPoolsRemoveInstanceRequest): + The body resource for this request + This corresponds to the ``target_pools_remove_instance_request_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, region, target_pool, target_pools_remove_instance_request_resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.RemoveInstanceTargetPoolRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.RemoveInstanceTargetPoolRequest): + request = compute.RemoveInstanceTargetPoolRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if region is not None: + request.region = region + if target_pool is not None: + request.target_pool = target_pool + if target_pools_remove_instance_request_resource is not None: + request.target_pools_remove_instance_request_resource = target_pools_remove_instance_request_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.remove_instance] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("region", request.region), + ("target_pool", request.target_pool), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + operation_service = self._transport._region_operations_client + operation_request = compute.GetRegionOperationRequest() + operation_request.project = request.project + operation_request.region = request.region + operation_request.operation = response.name + + get_operation = functools.partial(operation_service.get, operation_request) + # Cancel is not part of extended operations yet. + cancel_operation = lambda: None + + # Note: this class is an implementation detail to provide a uniform + # set of names for certain fields in the extended operation proto message. + # See google.api_core.extended_operation.ExtendedOperation for details + # on these properties and the expected interface. + class _CustomOperation(extended_operation.ExtendedOperation): + @property + def error_message(self): + return self._extended_operation.http_error_message + + @property + def error_code(self): + return self._extended_operation.http_error_status_code + + response = _CustomOperation.make(get_operation, cancel_operation, response) + + # Done; return the response. + return response + + def set_backup_unary(self, + request: Optional[Union[compute.SetBackupTargetPoolRequest, dict]] = None, + *, + project: Optional[str] = None, + region: Optional[str] = None, + target_pool: Optional[str] = None, + target_reference_resource: Optional[compute.TargetReference] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.Operation: + r"""Changes a backup target pool's configurations. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_set_backup(): + # Create a client + client = compute_v1.TargetPoolsClient() + + # Initialize request argument(s) + request = compute_v1.SetBackupTargetPoolRequest( + project="project_value", + region="region_value", + target_pool="target_pool_value", + ) + + # Make the request + response = client.set_backup(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.SetBackupTargetPoolRequest, dict]): + The request object. A request message for + TargetPools.SetBackup. See the method + description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + region (str): + Name of the region scoping this + request. + + This corresponds to the ``region`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + target_pool (str): + Name of the TargetPool resource to + set a backup pool for. + + This corresponds to the ``target_pool`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + target_reference_resource (google.cloud.compute_v1.types.TargetReference): + The body resource for this request + This corresponds to the ``target_reference_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, region, target_pool, target_reference_resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.SetBackupTargetPoolRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.SetBackupTargetPoolRequest): + request = compute.SetBackupTargetPoolRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if region is not None: + request.region = region + if target_pool is not None: + request.target_pool = target_pool + if target_reference_resource is not None: + request.target_reference_resource = target_reference_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.set_backup] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("region", request.region), + ("target_pool", request.target_pool), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def set_backup(self, + request: Optional[Union[compute.SetBackupTargetPoolRequest, dict]] = None, + *, + project: Optional[str] = None, + region: Optional[str] = None, + target_pool: Optional[str] = None, + target_reference_resource: Optional[compute.TargetReference] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> extended_operation.ExtendedOperation: + r"""Changes a backup target pool's configurations. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_set_backup(): + # Create a client + client = compute_v1.TargetPoolsClient() + + # Initialize request argument(s) + request = compute_v1.SetBackupTargetPoolRequest( + project="project_value", + region="region_value", + target_pool="target_pool_value", + ) + + # Make the request + response = client.set_backup(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.SetBackupTargetPoolRequest, dict]): + The request object. A request message for + TargetPools.SetBackup. See the method + description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + region (str): + Name of the region scoping this + request. + + This corresponds to the ``region`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + target_pool (str): + Name of the TargetPool resource to + set a backup pool for. + + This corresponds to the ``target_pool`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + target_reference_resource (google.cloud.compute_v1.types.TargetReference): + The body resource for this request + This corresponds to the ``target_reference_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, region, target_pool, target_reference_resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.SetBackupTargetPoolRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.SetBackupTargetPoolRequest): + request = compute.SetBackupTargetPoolRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if region is not None: + request.region = region + if target_pool is not None: + request.target_pool = target_pool + if target_reference_resource is not None: + request.target_reference_resource = target_reference_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.set_backup] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("region", request.region), + ("target_pool", request.target_pool), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + operation_service = self._transport._region_operations_client + operation_request = compute.GetRegionOperationRequest() + operation_request.project = request.project + operation_request.region = request.region + operation_request.operation = response.name + + get_operation = functools.partial(operation_service.get, operation_request) + # Cancel is not part of extended operations yet. + cancel_operation = lambda: None + + # Note: this class is an implementation detail to provide a uniform + # set of names for certain fields in the extended operation proto message. + # See google.api_core.extended_operation.ExtendedOperation for details + # on these properties and the expected interface. + class _CustomOperation(extended_operation.ExtendedOperation): + @property + def error_message(self): + return self._extended_operation.http_error_message + + @property + def error_code(self): + return self._extended_operation.http_error_status_code + + response = _CustomOperation.make(get_operation, cancel_operation, response) + + # Done; return the response. + return response + + def __enter__(self) -> "TargetPoolsClient": + return self + + def __exit__(self, type, value, traceback): + """Releases underlying transport's resources. + + .. warning:: + ONLY use as a context manager if the transport is NOT shared + with other clients! Exiting the with block will CLOSE the transport + and may cause errors in other clients! + """ + self.transport.close() + + + + + + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) + + +__all__ = ( + "TargetPoolsClient", +) diff --git a/owl-bot-staging/v1/google/cloud/compute_v1/services/target_pools/pagers.py b/owl-bot-staging/v1/google/cloud/compute_v1/services/target_pools/pagers.py new file mode 100644 index 000000000..ab91e1433 --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/compute_v1/services/target_pools/pagers.py @@ -0,0 +1,139 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import Any, AsyncIterator, Awaitable, Callable, Sequence, Tuple, Optional, Iterator + +from google.cloud.compute_v1.types import compute + + +class AggregatedListPager: + """A pager for iterating through ``aggregated_list`` requests. + + This class thinly wraps an initial + :class:`google.cloud.compute_v1.types.TargetPoolAggregatedList` object, and + provides an ``__iter__`` method to iterate through its + ``items`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``AggregatedList`` requests and continue to iterate + through the ``items`` field on the + corresponding responses. + + All the usual :class:`google.cloud.compute_v1.types.TargetPoolAggregatedList` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., compute.TargetPoolAggregatedList], + request: compute.AggregatedListTargetPoolsRequest, + response: compute.TargetPoolAggregatedList, + *, + metadata: Sequence[Tuple[str, str]] = ()): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.compute_v1.types.AggregatedListTargetPoolsRequest): + The initial request object. + response (google.cloud.compute_v1.types.TargetPoolAggregatedList): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = compute.AggregatedListTargetPoolsRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[compute.TargetPoolAggregatedList]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[Tuple[str, compute.TargetPoolsScopedList]]: + for page in self.pages: + yield from page.items.items() + + def get(self, key: str) -> Optional[compute.TargetPoolsScopedList]: + return self._response.items.get(key) + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + + +class ListPager: + """A pager for iterating through ``list`` requests. + + This class thinly wraps an initial + :class:`google.cloud.compute_v1.types.TargetPoolList` object, and + provides an ``__iter__`` method to iterate through its + ``items`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``List`` requests and continue to iterate + through the ``items`` field on the + corresponding responses. + + All the usual :class:`google.cloud.compute_v1.types.TargetPoolList` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., compute.TargetPoolList], + request: compute.ListTargetPoolsRequest, + response: compute.TargetPoolList, + *, + metadata: Sequence[Tuple[str, str]] = ()): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.compute_v1.types.ListTargetPoolsRequest): + The initial request object. + response (google.cloud.compute_v1.types.TargetPoolList): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = compute.ListTargetPoolsRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[compute.TargetPoolList]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[compute.TargetPool]: + for page in self.pages: + yield from page.items + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) diff --git a/owl-bot-staging/v1/google/cloud/compute_v1/services/target_pools/transports/__init__.py b/owl-bot-staging/v1/google/cloud/compute_v1/services/target_pools/transports/__init__.py new file mode 100644 index 000000000..4efb85f97 --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/compute_v1/services/target_pools/transports/__init__.py @@ -0,0 +1,32 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +from typing import Dict, Type + +from .base import TargetPoolsTransport +from .rest import TargetPoolsRestTransport +from .rest import TargetPoolsRestInterceptor + + +# Compile a registry of transports. +_transport_registry = OrderedDict() # type: Dict[str, Type[TargetPoolsTransport]] +_transport_registry['rest'] = TargetPoolsRestTransport + +__all__ = ( + 'TargetPoolsTransport', + 'TargetPoolsRestTransport', + 'TargetPoolsRestInterceptor', +) diff --git a/owl-bot-staging/v1/google/cloud/compute_v1/services/target_pools/transports/base.py b/owl-bot-staging/v1/google/cloud/compute_v1/services/target_pools/transports/base.py new file mode 100644 index 000000000..4696509fe --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/compute_v1/services/target_pools/transports/base.py @@ -0,0 +1,303 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import abc +from typing import Awaitable, Callable, Dict, Optional, Sequence, Union + +from google.cloud.compute_v1 import gapic_version as package_version + +import google.auth # type: ignore +import google.api_core +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.compute_v1.types import compute +from google.cloud.compute_v1.services import region_operations + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) + + +class TargetPoolsTransport(abc.ABC): + """Abstract transport class for TargetPools.""" + + AUTH_SCOPES = ( + 'https://www.googleapis.com/auth/compute', + 'https://www.googleapis.com/auth/cloud-platform', + ) + + DEFAULT_HOST: str = 'compute.googleapis.com' + def __init__( + self, *, + host: str = DEFAULT_HOST, + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + **kwargs, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A list of scopes. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + """ + self._extended_operations_services: Dict[str, Any] = {} + + scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} + + # Save the scopes. + self._scopes = scopes + + # If no credentials are provided, then determine the appropriate + # defaults. + if credentials and credentials_file: + raise core_exceptions.DuplicateCredentialArgs("'credentials_file' and 'credentials' are mutually exclusive") + + if credentials_file is not None: + credentials, _ = google.auth.load_credentials_from_file( + credentials_file, + **scopes_kwargs, + quota_project_id=quota_project_id + ) + elif credentials is None: + credentials, _ = google.auth.default(**scopes_kwargs, quota_project_id=quota_project_id) + # Don't apply audience if the credentials file passed from user. + if hasattr(credentials, "with_gdch_audience"): + credentials = credentials.with_gdch_audience(api_audience if api_audience else host) + + # If the credentials are service account credentials, then always try to use self signed JWT. + if always_use_jwt_access and isinstance(credentials, service_account.Credentials) and hasattr(service_account.Credentials, "with_always_use_jwt_access"): + credentials = credentials.with_always_use_jwt_access(True) + + # Save the credentials. + self._credentials = credentials + + # Save the hostname. Default to port 443 (HTTPS) if none is specified. + if ':' not in host: + host += ':443' + self._host = host + + def _prep_wrapped_messages(self, client_info): + # Precompute the wrapped methods. + self._wrapped_methods = { + self.add_health_check: gapic_v1.method.wrap_method( + self.add_health_check, + default_timeout=None, + client_info=client_info, + ), + self.add_instance: gapic_v1.method.wrap_method( + self.add_instance, + default_timeout=None, + client_info=client_info, + ), + self.aggregated_list: gapic_v1.method.wrap_method( + self.aggregated_list, + default_timeout=None, + client_info=client_info, + ), + self.delete: gapic_v1.method.wrap_method( + self.delete, + default_timeout=None, + client_info=client_info, + ), + self.get: gapic_v1.method.wrap_method( + self.get, + default_timeout=None, + client_info=client_info, + ), + self.get_health: gapic_v1.method.wrap_method( + self.get_health, + default_timeout=None, + client_info=client_info, + ), + self.insert: gapic_v1.method.wrap_method( + self.insert, + default_timeout=None, + client_info=client_info, + ), + self.list: gapic_v1.method.wrap_method( + self.list, + default_timeout=None, + client_info=client_info, + ), + self.remove_health_check: gapic_v1.method.wrap_method( + self.remove_health_check, + default_timeout=None, + client_info=client_info, + ), + self.remove_instance: gapic_v1.method.wrap_method( + self.remove_instance, + default_timeout=None, + client_info=client_info, + ), + self.set_backup: gapic_v1.method.wrap_method( + self.set_backup, + default_timeout=None, + client_info=client_info, + ), + } + + def close(self): + """Closes resources associated with the transport. + + .. warning:: + Only call this method if the transport is NOT shared + with other clients - this may cause errors in other clients! + """ + raise NotImplementedError() + + @property + def add_health_check(self) -> Callable[ + [compute.AddHealthCheckTargetPoolRequest], + Union[ + compute.Operation, + Awaitable[compute.Operation] + ]]: + raise NotImplementedError() + + @property + def add_instance(self) -> Callable[ + [compute.AddInstanceTargetPoolRequest], + Union[ + compute.Operation, + Awaitable[compute.Operation] + ]]: + raise NotImplementedError() + + @property + def aggregated_list(self) -> Callable[ + [compute.AggregatedListTargetPoolsRequest], + Union[ + compute.TargetPoolAggregatedList, + Awaitable[compute.TargetPoolAggregatedList] + ]]: + raise NotImplementedError() + + @property + def delete(self) -> Callable[ + [compute.DeleteTargetPoolRequest], + Union[ + compute.Operation, + Awaitable[compute.Operation] + ]]: + raise NotImplementedError() + + @property + def get(self) -> Callable[ + [compute.GetTargetPoolRequest], + Union[ + compute.TargetPool, + Awaitable[compute.TargetPool] + ]]: + raise NotImplementedError() + + @property + def get_health(self) -> Callable[ + [compute.GetHealthTargetPoolRequest], + Union[ + compute.TargetPoolInstanceHealth, + Awaitable[compute.TargetPoolInstanceHealth] + ]]: + raise NotImplementedError() + + @property + def insert(self) -> Callable[ + [compute.InsertTargetPoolRequest], + Union[ + compute.Operation, + Awaitable[compute.Operation] + ]]: + raise NotImplementedError() + + @property + def list(self) -> Callable[ + [compute.ListTargetPoolsRequest], + Union[ + compute.TargetPoolList, + Awaitable[compute.TargetPoolList] + ]]: + raise NotImplementedError() + + @property + def remove_health_check(self) -> Callable[ + [compute.RemoveHealthCheckTargetPoolRequest], + Union[ + compute.Operation, + Awaitable[compute.Operation] + ]]: + raise NotImplementedError() + + @property + def remove_instance(self) -> Callable[ + [compute.RemoveInstanceTargetPoolRequest], + Union[ + compute.Operation, + Awaitable[compute.Operation] + ]]: + raise NotImplementedError() + + @property + def set_backup(self) -> Callable[ + [compute.SetBackupTargetPoolRequest], + Union[ + compute.Operation, + Awaitable[compute.Operation] + ]]: + raise NotImplementedError() + + @property + def kind(self) -> str: + raise NotImplementedError() + + @property + def _region_operations_client(self) -> region_operations.RegionOperationsClient: + ex_op_service = self._extended_operations_services.get("region_operations") + if not ex_op_service: + ex_op_service = region_operations.RegionOperationsClient( + credentials=self._credentials, + transport=self.kind, + ) + self._extended_operations_services["region_operations"] = ex_op_service + + return ex_op_service + + +__all__ = ( + 'TargetPoolsTransport', +) diff --git a/owl-bot-staging/v1/google/cloud/compute_v1/services/target_pools/transports/rest.py b/owl-bot-staging/v1/google/cloud/compute_v1/services/target_pools/transports/rest.py new file mode 100644 index 000000000..0856aae91 --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/compute_v1/services/target_pools/transports/rest.py @@ -0,0 +1,1573 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +from google.auth.transport.requests import AuthorizedSession # type: ignore +import json # type: ignore +import grpc # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.api_core import exceptions as core_exceptions +from google.api_core import retry as retries +from google.api_core import rest_helpers +from google.api_core import rest_streaming +from google.api_core import path_template +from google.api_core import gapic_v1 + +from google.protobuf import json_format +from requests import __version__ as requests_version +import dataclasses +import re +from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union +import warnings + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + + +from google.cloud.compute_v1.types import compute + +from .base import TargetPoolsTransport, DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, + grpc_version=None, + rest_version=requests_version, +) + + +class TargetPoolsRestInterceptor: + """Interceptor for TargetPools. + + Interceptors are used to manipulate requests, request metadata, and responses + in arbitrary ways. + Example use cases include: + * Logging + * Verifying requests according to service or custom semantics + * Stripping extraneous information from responses + + These use cases and more can be enabled by injecting an + instance of a custom subclass when constructing the TargetPoolsRestTransport. + + .. code-block:: python + class MyCustomTargetPoolsInterceptor(TargetPoolsRestInterceptor): + def pre_add_health_check(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_add_health_check(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_add_instance(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_add_instance(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_aggregated_list(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_aggregated_list(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_delete(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_delete(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_get(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_get_health(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_health(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_insert(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_insert(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_remove_health_check(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_remove_health_check(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_remove_instance(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_remove_instance(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_set_backup(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_set_backup(self, response): + logging.log(f"Received response: {response}") + return response + + transport = TargetPoolsRestTransport(interceptor=MyCustomTargetPoolsInterceptor()) + client = TargetPoolsClient(transport=transport) + + + """ + def pre_add_health_check(self, request: compute.AddHealthCheckTargetPoolRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.AddHealthCheckTargetPoolRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for add_health_check + + Override in a subclass to manipulate the request or metadata + before they are sent to the TargetPools server. + """ + return request, metadata + + def post_add_health_check(self, response: compute.Operation) -> compute.Operation: + """Post-rpc interceptor for add_health_check + + Override in a subclass to manipulate the response + after it is returned by the TargetPools server but before + it is returned to user code. + """ + return response + def pre_add_instance(self, request: compute.AddInstanceTargetPoolRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.AddInstanceTargetPoolRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for add_instance + + Override in a subclass to manipulate the request or metadata + before they are sent to the TargetPools server. + """ + return request, metadata + + def post_add_instance(self, response: compute.Operation) -> compute.Operation: + """Post-rpc interceptor for add_instance + + Override in a subclass to manipulate the response + after it is returned by the TargetPools server but before + it is returned to user code. + """ + return response + def pre_aggregated_list(self, request: compute.AggregatedListTargetPoolsRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.AggregatedListTargetPoolsRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for aggregated_list + + Override in a subclass to manipulate the request or metadata + before they are sent to the TargetPools server. + """ + return request, metadata + + def post_aggregated_list(self, response: compute.TargetPoolAggregatedList) -> compute.TargetPoolAggregatedList: + """Post-rpc interceptor for aggregated_list + + Override in a subclass to manipulate the response + after it is returned by the TargetPools server but before + it is returned to user code. + """ + return response + def pre_delete(self, request: compute.DeleteTargetPoolRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.DeleteTargetPoolRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for delete + + Override in a subclass to manipulate the request or metadata + before they are sent to the TargetPools server. + """ + return request, metadata + + def post_delete(self, response: compute.Operation) -> compute.Operation: + """Post-rpc interceptor for delete + + Override in a subclass to manipulate the response + after it is returned by the TargetPools server but before + it is returned to user code. + """ + return response + def pre_get(self, request: compute.GetTargetPoolRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.GetTargetPoolRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get + + Override in a subclass to manipulate the request or metadata + before they are sent to the TargetPools server. + """ + return request, metadata + + def post_get(self, response: compute.TargetPool) -> compute.TargetPool: + """Post-rpc interceptor for get + + Override in a subclass to manipulate the response + after it is returned by the TargetPools server but before + it is returned to user code. + """ + return response + def pre_get_health(self, request: compute.GetHealthTargetPoolRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.GetHealthTargetPoolRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_health + + Override in a subclass to manipulate the request or metadata + before they are sent to the TargetPools server. + """ + return request, metadata + + def post_get_health(self, response: compute.TargetPoolInstanceHealth) -> compute.TargetPoolInstanceHealth: + """Post-rpc interceptor for get_health + + Override in a subclass to manipulate the response + after it is returned by the TargetPools server but before + it is returned to user code. + """ + return response + def pre_insert(self, request: compute.InsertTargetPoolRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.InsertTargetPoolRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for insert + + Override in a subclass to manipulate the request or metadata + before they are sent to the TargetPools server. + """ + return request, metadata + + def post_insert(self, response: compute.Operation) -> compute.Operation: + """Post-rpc interceptor for insert + + Override in a subclass to manipulate the response + after it is returned by the TargetPools server but before + it is returned to user code. + """ + return response + def pre_list(self, request: compute.ListTargetPoolsRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.ListTargetPoolsRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list + + Override in a subclass to manipulate the request or metadata + before they are sent to the TargetPools server. + """ + return request, metadata + + def post_list(self, response: compute.TargetPoolList) -> compute.TargetPoolList: + """Post-rpc interceptor for list + + Override in a subclass to manipulate the response + after it is returned by the TargetPools server but before + it is returned to user code. + """ + return response + def pre_remove_health_check(self, request: compute.RemoveHealthCheckTargetPoolRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.RemoveHealthCheckTargetPoolRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for remove_health_check + + Override in a subclass to manipulate the request or metadata + before they are sent to the TargetPools server. + """ + return request, metadata + + def post_remove_health_check(self, response: compute.Operation) -> compute.Operation: + """Post-rpc interceptor for remove_health_check + + Override in a subclass to manipulate the response + after it is returned by the TargetPools server but before + it is returned to user code. + """ + return response + def pre_remove_instance(self, request: compute.RemoveInstanceTargetPoolRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.RemoveInstanceTargetPoolRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for remove_instance + + Override in a subclass to manipulate the request or metadata + before they are sent to the TargetPools server. + """ + return request, metadata + + def post_remove_instance(self, response: compute.Operation) -> compute.Operation: + """Post-rpc interceptor for remove_instance + + Override in a subclass to manipulate the response + after it is returned by the TargetPools server but before + it is returned to user code. + """ + return response + def pre_set_backup(self, request: compute.SetBackupTargetPoolRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.SetBackupTargetPoolRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for set_backup + + Override in a subclass to manipulate the request or metadata + before they are sent to the TargetPools server. + """ + return request, metadata + + def post_set_backup(self, response: compute.Operation) -> compute.Operation: + """Post-rpc interceptor for set_backup + + Override in a subclass to manipulate the response + after it is returned by the TargetPools server but before + it is returned to user code. + """ + return response + + +@dataclasses.dataclass +class TargetPoolsRestStub: + _session: AuthorizedSession + _host: str + _interceptor: TargetPoolsRestInterceptor + + +class TargetPoolsRestTransport(TargetPoolsTransport): + """REST backend transport for TargetPools. + + The TargetPools API. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends JSON representations of protocol buffers over HTTP/1.1 + + NOTE: This REST transport functionality is currently in a beta + state (preview). We welcome your feedback via an issue in this + library's source repository. Thank you! + """ + + def __init__(self, *, + host: str = 'compute.googleapis.com', + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + client_cert_source_for_mtls: Optional[Callable[[ + ], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + url_scheme: str = 'https', + interceptor: Optional[TargetPoolsRestInterceptor] = None, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + NOTE: This REST transport functionality is currently in a beta + state (preview). We welcome your feedback via a GitHub issue in + this library's repository. Thank you! + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + client_cert_source_for_mtls (Callable[[], Tuple[bytes, bytes]]): Client + certificate to configure mutual TLS HTTP channel. It is ignored + if ``channel`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you are developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + url_scheme: the protocol scheme for the API endpoint. Normally + "https", but for testing or local servers, + "http" can be specified. + """ + # Run the base constructor + # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. + # TODO: When custom host (api_endpoint) is set, `scopes` must *also* be set on the + # credentials object + maybe_url_match = re.match("^(?Phttp(?:s)?://)?(?P.*)$", host) + if maybe_url_match is None: + raise ValueError(f"Unexpected hostname structure: {host}") # pragma: NO COVER + + url_match_items = maybe_url_match.groupdict() + + host = f"{url_scheme}://{host}" if not url_match_items["scheme"] else host + + super().__init__( + host=host, + credentials=credentials, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience + ) + self._session = AuthorizedSession( + self._credentials, default_host=self.DEFAULT_HOST) + if client_cert_source_for_mtls: + self._session.configure_mtls_channel(client_cert_source_for_mtls) + self._interceptor = interceptor or TargetPoolsRestInterceptor() + self._prep_wrapped_messages(client_info) + + class _AddHealthCheck(TargetPoolsRestStub): + def __hash__(self): + return hash("AddHealthCheck") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.AddHealthCheckTargetPoolRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.Operation: + r"""Call the add health check method over HTTP. + + Args: + request (~.compute.AddHealthCheckTargetPoolRequest): + The request object. A request message for + TargetPools.AddHealthCheck. See the + method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.Operation: + Represents an Operation resource. Google Compute Engine + has three Operation resources: \* + `Global `__ + \* + `Regional `__ + \* + `Zonal `__ + You can use an operation resource to manage asynchronous + API requests. For more information, read Handling API + responses. Operations can be global, regional or zonal. + - For global operations, use the ``globalOperations`` + resource. - For regional operations, use the + ``regionOperations`` resource. - For zonal operations, + use the ``zonalOperations`` resource. For more + information, read Global, Regional, and Zonal Resources. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/compute/v1/projects/{project}/regions/{region}/targetPools/{target_pool}/addHealthCheck', + 'body': 'target_pools_add_health_check_request_resource', + }, + ] + request, metadata = self._interceptor.pre_add_health_check(request, metadata) + pb_request = compute.AddHealthCheckTargetPoolRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + including_default_value_fields=False, + use_integers_for_enums=False + ) + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.Operation() + pb_resp = compute.Operation.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_add_health_check(resp) + return resp + + class _AddInstance(TargetPoolsRestStub): + def __hash__(self): + return hash("AddInstance") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.AddInstanceTargetPoolRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.Operation: + r"""Call the add instance method over HTTP. + + Args: + request (~.compute.AddInstanceTargetPoolRequest): + The request object. A request message for + TargetPools.AddInstance. See the method + description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.Operation: + Represents an Operation resource. Google Compute Engine + has three Operation resources: \* + `Global `__ + \* + `Regional `__ + \* + `Zonal `__ + You can use an operation resource to manage asynchronous + API requests. For more information, read Handling API + responses. Operations can be global, regional or zonal. + - For global operations, use the ``globalOperations`` + resource. - For regional operations, use the + ``regionOperations`` resource. - For zonal operations, + use the ``zonalOperations`` resource. For more + information, read Global, Regional, and Zonal Resources. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/compute/v1/projects/{project}/regions/{region}/targetPools/{target_pool}/addInstance', + 'body': 'target_pools_add_instance_request_resource', + }, + ] + request, metadata = self._interceptor.pre_add_instance(request, metadata) + pb_request = compute.AddInstanceTargetPoolRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + including_default_value_fields=False, + use_integers_for_enums=False + ) + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.Operation() + pb_resp = compute.Operation.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_add_instance(resp) + return resp + + class _AggregatedList(TargetPoolsRestStub): + def __hash__(self): + return hash("AggregatedList") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.AggregatedListTargetPoolsRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.TargetPoolAggregatedList: + r"""Call the aggregated list method over HTTP. + + Args: + request (~.compute.AggregatedListTargetPoolsRequest): + The request object. A request message for + TargetPools.AggregatedList. See the + method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.TargetPoolAggregatedList: + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/compute/v1/projects/{project}/aggregated/targetPools', + }, + ] + request, metadata = self._interceptor.pre_aggregated_list(request, metadata) + pb_request = compute.AggregatedListTargetPoolsRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.TargetPoolAggregatedList() + pb_resp = compute.TargetPoolAggregatedList.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_aggregated_list(resp) + return resp + + class _Delete(TargetPoolsRestStub): + def __hash__(self): + return hash("Delete") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.DeleteTargetPoolRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.Operation: + r"""Call the delete method over HTTP. + + Args: + request (~.compute.DeleteTargetPoolRequest): + The request object. A request message for + TargetPools.Delete. See the method + description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.Operation: + Represents an Operation resource. Google Compute Engine + has three Operation resources: \* + `Global `__ + \* + `Regional `__ + \* + `Zonal `__ + You can use an operation resource to manage asynchronous + API requests. For more information, read Handling API + responses. Operations can be global, regional or zonal. + - For global operations, use the ``globalOperations`` + resource. - For regional operations, use the + ``regionOperations`` resource. - For zonal operations, + use the ``zonalOperations`` resource. For more + information, read Global, Regional, and Zonal Resources. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'delete', + 'uri': '/compute/v1/projects/{project}/regions/{region}/targetPools/{target_pool}', + }, + ] + request, metadata = self._interceptor.pre_delete(request, metadata) + pb_request = compute.DeleteTargetPoolRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.Operation() + pb_resp = compute.Operation.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_delete(resp) + return resp + + class _Get(TargetPoolsRestStub): + def __hash__(self): + return hash("Get") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.GetTargetPoolRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.TargetPool: + r"""Call the get method over HTTP. + + Args: + request (~.compute.GetTargetPoolRequest): + The request object. A request message for + TargetPools.Get. See the method + description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.TargetPool: + Represents a Target Pool resource. + Target pools are used for network + TCP/UDP load balancing. A target pool + references member instances, an + associated legacy HttpHealthCheck + resource, and, optionally, a backup + target pool. For more information, read + Using target pools. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/compute/v1/projects/{project}/regions/{region}/targetPools/{target_pool}', + }, + ] + request, metadata = self._interceptor.pre_get(request, metadata) + pb_request = compute.GetTargetPoolRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.TargetPool() + pb_resp = compute.TargetPool.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get(resp) + return resp + + class _GetHealth(TargetPoolsRestStub): + def __hash__(self): + return hash("GetHealth") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.GetHealthTargetPoolRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.TargetPoolInstanceHealth: + r"""Call the get health method over HTTP. + + Args: + request (~.compute.GetHealthTargetPoolRequest): + The request object. A request message for + TargetPools.GetHealth. See the method + description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.TargetPoolInstanceHealth: + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/compute/v1/projects/{project}/regions/{region}/targetPools/{target_pool}/getHealth', + 'body': 'instance_reference_resource', + }, + ] + request, metadata = self._interceptor.pre_get_health(request, metadata) + pb_request = compute.GetHealthTargetPoolRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + including_default_value_fields=False, + use_integers_for_enums=False + ) + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.TargetPoolInstanceHealth() + pb_resp = compute.TargetPoolInstanceHealth.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get_health(resp) + return resp + + class _Insert(TargetPoolsRestStub): + def __hash__(self): + return hash("Insert") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.InsertTargetPoolRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.Operation: + r"""Call the insert method over HTTP. + + Args: + request (~.compute.InsertTargetPoolRequest): + The request object. A request message for + TargetPools.Insert. See the method + description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.Operation: + Represents an Operation resource. Google Compute Engine + has three Operation resources: \* + `Global `__ + \* + `Regional `__ + \* + `Zonal `__ + You can use an operation resource to manage asynchronous + API requests. For more information, read Handling API + responses. Operations can be global, regional or zonal. + - For global operations, use the ``globalOperations`` + resource. - For regional operations, use the + ``regionOperations`` resource. - For zonal operations, + use the ``zonalOperations`` resource. For more + information, read Global, Regional, and Zonal Resources. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/compute/v1/projects/{project}/regions/{region}/targetPools', + 'body': 'target_pool_resource', + }, + ] + request, metadata = self._interceptor.pre_insert(request, metadata) + pb_request = compute.InsertTargetPoolRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + including_default_value_fields=False, + use_integers_for_enums=False + ) + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.Operation() + pb_resp = compute.Operation.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_insert(resp) + return resp + + class _List(TargetPoolsRestStub): + def __hash__(self): + return hash("List") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.ListTargetPoolsRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.TargetPoolList: + r"""Call the list method over HTTP. + + Args: + request (~.compute.ListTargetPoolsRequest): + The request object. A request message for + TargetPools.List. See the method + description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.TargetPoolList: + Contains a list of TargetPool + resources. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/compute/v1/projects/{project}/regions/{region}/targetPools', + }, + ] + request, metadata = self._interceptor.pre_list(request, metadata) + pb_request = compute.ListTargetPoolsRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.TargetPoolList() + pb_resp = compute.TargetPoolList.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_list(resp) + return resp + + class _RemoveHealthCheck(TargetPoolsRestStub): + def __hash__(self): + return hash("RemoveHealthCheck") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.RemoveHealthCheckTargetPoolRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.Operation: + r"""Call the remove health check method over HTTP. + + Args: + request (~.compute.RemoveHealthCheckTargetPoolRequest): + The request object. A request message for + TargetPools.RemoveHealthCheck. See the + method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.Operation: + Represents an Operation resource. Google Compute Engine + has three Operation resources: \* + `Global `__ + \* + `Regional `__ + \* + `Zonal `__ + You can use an operation resource to manage asynchronous + API requests. For more information, read Handling API + responses. Operations can be global, regional or zonal. + - For global operations, use the ``globalOperations`` + resource. - For regional operations, use the + ``regionOperations`` resource. - For zonal operations, + use the ``zonalOperations`` resource. For more + information, read Global, Regional, and Zonal Resources. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/compute/v1/projects/{project}/regions/{region}/targetPools/{target_pool}/removeHealthCheck', + 'body': 'target_pools_remove_health_check_request_resource', + }, + ] + request, metadata = self._interceptor.pre_remove_health_check(request, metadata) + pb_request = compute.RemoveHealthCheckTargetPoolRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + including_default_value_fields=False, + use_integers_for_enums=False + ) + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.Operation() + pb_resp = compute.Operation.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_remove_health_check(resp) + return resp + + class _RemoveInstance(TargetPoolsRestStub): + def __hash__(self): + return hash("RemoveInstance") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.RemoveInstanceTargetPoolRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.Operation: + r"""Call the remove instance method over HTTP. + + Args: + request (~.compute.RemoveInstanceTargetPoolRequest): + The request object. A request message for + TargetPools.RemoveInstance. See the + method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.Operation: + Represents an Operation resource. Google Compute Engine + has three Operation resources: \* + `Global `__ + \* + `Regional `__ + \* + `Zonal `__ + You can use an operation resource to manage asynchronous + API requests. For more information, read Handling API + responses. Operations can be global, regional or zonal. + - For global operations, use the ``globalOperations`` + resource. - For regional operations, use the + ``regionOperations`` resource. - For zonal operations, + use the ``zonalOperations`` resource. For more + information, read Global, Regional, and Zonal Resources. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/compute/v1/projects/{project}/regions/{region}/targetPools/{target_pool}/removeInstance', + 'body': 'target_pools_remove_instance_request_resource', + }, + ] + request, metadata = self._interceptor.pre_remove_instance(request, metadata) + pb_request = compute.RemoveInstanceTargetPoolRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + including_default_value_fields=False, + use_integers_for_enums=False + ) + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.Operation() + pb_resp = compute.Operation.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_remove_instance(resp) + return resp + + class _SetBackup(TargetPoolsRestStub): + def __hash__(self): + return hash("SetBackup") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.SetBackupTargetPoolRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.Operation: + r"""Call the set backup method over HTTP. + + Args: + request (~.compute.SetBackupTargetPoolRequest): + The request object. A request message for + TargetPools.SetBackup. See the method + description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.Operation: + Represents an Operation resource. Google Compute Engine + has three Operation resources: \* + `Global `__ + \* + `Regional `__ + \* + `Zonal `__ + You can use an operation resource to manage asynchronous + API requests. For more information, read Handling API + responses. Operations can be global, regional or zonal. + - For global operations, use the ``globalOperations`` + resource. - For regional operations, use the + ``regionOperations`` resource. - For zonal operations, + use the ``zonalOperations`` resource. For more + information, read Global, Regional, and Zonal Resources. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/compute/v1/projects/{project}/regions/{region}/targetPools/{target_pool}/setBackup', + 'body': 'target_reference_resource', + }, + ] + request, metadata = self._interceptor.pre_set_backup(request, metadata) + pb_request = compute.SetBackupTargetPoolRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + including_default_value_fields=False, + use_integers_for_enums=False + ) + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.Operation() + pb_resp = compute.Operation.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_set_backup(resp) + return resp + + @property + def add_health_check(self) -> Callable[ + [compute.AddHealthCheckTargetPoolRequest], + compute.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._AddHealthCheck(self._session, self._host, self._interceptor) # type: ignore + + @property + def add_instance(self) -> Callable[ + [compute.AddInstanceTargetPoolRequest], + compute.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._AddInstance(self._session, self._host, self._interceptor) # type: ignore + + @property + def aggregated_list(self) -> Callable[ + [compute.AggregatedListTargetPoolsRequest], + compute.TargetPoolAggregatedList]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._AggregatedList(self._session, self._host, self._interceptor) # type: ignore + + @property + def delete(self) -> Callable[ + [compute.DeleteTargetPoolRequest], + compute.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._Delete(self._session, self._host, self._interceptor) # type: ignore + + @property + def get(self) -> Callable[ + [compute.GetTargetPoolRequest], + compute.TargetPool]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._Get(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_health(self) -> Callable[ + [compute.GetHealthTargetPoolRequest], + compute.TargetPoolInstanceHealth]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetHealth(self._session, self._host, self._interceptor) # type: ignore + + @property + def insert(self) -> Callable[ + [compute.InsertTargetPoolRequest], + compute.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._Insert(self._session, self._host, self._interceptor) # type: ignore + + @property + def list(self) -> Callable[ + [compute.ListTargetPoolsRequest], + compute.TargetPoolList]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._List(self._session, self._host, self._interceptor) # type: ignore + + @property + def remove_health_check(self) -> Callable[ + [compute.RemoveHealthCheckTargetPoolRequest], + compute.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._RemoveHealthCheck(self._session, self._host, self._interceptor) # type: ignore + + @property + def remove_instance(self) -> Callable[ + [compute.RemoveInstanceTargetPoolRequest], + compute.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._RemoveInstance(self._session, self._host, self._interceptor) # type: ignore + + @property + def set_backup(self) -> Callable[ + [compute.SetBackupTargetPoolRequest], + compute.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._SetBackup(self._session, self._host, self._interceptor) # type: ignore + + @property + def kind(self) -> str: + return "rest" + + def close(self): + self._session.close() + + +__all__=( + 'TargetPoolsRestTransport', +) diff --git a/owl-bot-staging/v1/google/cloud/compute_v1/services/target_ssl_proxies/__init__.py b/owl-bot-staging/v1/google/cloud/compute_v1/services/target_ssl_proxies/__init__.py new file mode 100644 index 000000000..c668f593a --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/compute_v1/services/target_ssl_proxies/__init__.py @@ -0,0 +1,20 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from .client import TargetSslProxiesClient + +__all__ = ( + 'TargetSslProxiesClient', +) diff --git a/owl-bot-staging/v1/google/cloud/compute_v1/services/target_ssl_proxies/client.py b/owl-bot-staging/v1/google/cloud/compute_v1/services/target_ssl_proxies/client.py new file mode 100644 index 000000000..2d1b4891e --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/compute_v1/services/target_ssl_proxies/client.py @@ -0,0 +1,2455 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import functools +import os +import re +from typing import Dict, Mapping, MutableMapping, MutableSequence, Optional, Sequence, Tuple, Type, Union, cast + +from google.cloud.compute_v1 import gapic_version as package_version + +from google.api_core import client_options as client_options_lib +from google.api_core import exceptions as core_exceptions +from google.api_core import extended_operation +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport import mtls # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth.exceptions import MutualTLSChannelError # type: ignore +from google.oauth2 import service_account # type: ignore + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + +from google.api_core import extended_operation # type: ignore +from google.cloud.compute_v1.services.target_ssl_proxies import pagers +from google.cloud.compute_v1.types import compute +from .transports.base import TargetSslProxiesTransport, DEFAULT_CLIENT_INFO +from .transports.rest import TargetSslProxiesRestTransport + + +class TargetSslProxiesClientMeta(type): + """Metaclass for the TargetSslProxies client. + + This provides class-level methods for building and retrieving + support objects (e.g. transport) without polluting the client instance + objects. + """ + _transport_registry = OrderedDict() # type: Dict[str, Type[TargetSslProxiesTransport]] + _transport_registry["rest"] = TargetSslProxiesRestTransport + + def get_transport_class(cls, + label: Optional[str] = None, + ) -> Type[TargetSslProxiesTransport]: + """Returns an appropriate transport class. + + Args: + label: The name of the desired transport. If none is + provided, then the first transport in the registry is used. + + Returns: + The transport class to use. + """ + # If a specific transport is requested, return that one. + if label: + return cls._transport_registry[label] + + # No transport is requested; return the default (that is, the first one + # in the dictionary). + return next(iter(cls._transport_registry.values())) + + +class TargetSslProxiesClient(metaclass=TargetSslProxiesClientMeta): + """The TargetSslProxies API.""" + + @staticmethod + def _get_default_mtls_endpoint(api_endpoint): + """Converts api endpoint to mTLS endpoint. + + Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to + "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. + Args: + api_endpoint (Optional[str]): the api endpoint to convert. + Returns: + str: converted mTLS api endpoint. + """ + if not api_endpoint: + return api_endpoint + + mtls_endpoint_re = re.compile( + r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?" + ) + + m = mtls_endpoint_re.match(api_endpoint) + name, mtls, sandbox, googledomain = m.groups() + if mtls or not googledomain: + return api_endpoint + + if sandbox: + return api_endpoint.replace( + "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" + ) + + return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") + + DEFAULT_ENDPOINT = "compute.googleapis.com" + DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore + DEFAULT_ENDPOINT + ) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + TargetSslProxiesClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_info(info) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + TargetSslProxiesClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_file( + filename) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + from_service_account_json = from_service_account_file + + @property + def transport(self) -> TargetSslProxiesTransport: + """Returns the transport used by the client instance. + + Returns: + TargetSslProxiesTransport: The transport used by the client + instance. + """ + return self._transport + + @staticmethod + def common_billing_account_path(billing_account: str, ) -> str: + """Returns a fully-qualified billing_account string.""" + return "billingAccounts/{billing_account}".format(billing_account=billing_account, ) + + @staticmethod + def parse_common_billing_account_path(path: str) -> Dict[str,str]: + """Parse a billing_account path into its component segments.""" + m = re.match(r"^billingAccounts/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_folder_path(folder: str, ) -> str: + """Returns a fully-qualified folder string.""" + return "folders/{folder}".format(folder=folder, ) + + @staticmethod + def parse_common_folder_path(path: str) -> Dict[str,str]: + """Parse a folder path into its component segments.""" + m = re.match(r"^folders/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_organization_path(organization: str, ) -> str: + """Returns a fully-qualified organization string.""" + return "organizations/{organization}".format(organization=organization, ) + + @staticmethod + def parse_common_organization_path(path: str) -> Dict[str,str]: + """Parse a organization path into its component segments.""" + m = re.match(r"^organizations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_project_path(project: str, ) -> str: + """Returns a fully-qualified project string.""" + return "projects/{project}".format(project=project, ) + + @staticmethod + def parse_common_project_path(path: str) -> Dict[str,str]: + """Parse a project path into its component segments.""" + m = re.match(r"^projects/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_location_path(project: str, location: str, ) -> str: + """Returns a fully-qualified location string.""" + return "projects/{project}/locations/{location}".format(project=project, location=location, ) + + @staticmethod + def parse_common_location_path(path: str) -> Dict[str,str]: + """Parse a location path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @classmethod + def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[client_options_lib.ClientOptions] = None): + """Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + if client_options is None: + client_options = client_options_lib.ClientOptions() + use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") + use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") + if use_client_cert not in ("true", "false"): + raise ValueError("Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`") + if use_mtls_endpoint not in ("auto", "never", "always"): + raise MutualTLSChannelError("Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`") + + # Figure out the client cert source to use. + client_cert_source = None + if use_client_cert == "true": + if client_options.client_cert_source: + client_cert_source = client_options.client_cert_source + elif mtls.has_default_client_cert_source(): + client_cert_source = mtls.default_client_cert_source() + + # Figure out which api endpoint to use. + if client_options.api_endpoint is not None: + api_endpoint = client_options.api_endpoint + elif use_mtls_endpoint == "always" or (use_mtls_endpoint == "auto" and client_cert_source): + api_endpoint = cls.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = cls.DEFAULT_ENDPOINT + + return api_endpoint, client_cert_source + + def __init__(self, *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Optional[Union[str, TargetSslProxiesTransport]] = None, + client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the target ssl proxies client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Union[str, TargetSslProxiesTransport]): The + transport to use. If set to None, a transport is chosen + automatically. + NOTE: "rest" transport functionality is currently in a + beta state (preview). We welcome your feedback via an + issue in this library's source repository. + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): Custom options for the + client. It won't take effect if a ``transport`` instance is provided. + (1) The ``api_endpoint`` property can be used to override the + default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT + environment variable can also be used to override the endpoint: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto switch to the + default mTLS endpoint if client certificate is present, this is + the default value). However, the ``api_endpoint`` property takes + precedence if provided. + (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide client certificate for mutual TLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + """ + if isinstance(client_options, dict): + client_options = client_options_lib.from_dict(client_options) + if client_options is None: + client_options = client_options_lib.ClientOptions() + client_options = cast(client_options_lib.ClientOptions, client_options) + + api_endpoint, client_cert_source_func = self.get_mtls_endpoint_and_cert_source(client_options) + + api_key_value = getattr(client_options, "api_key", None) + if api_key_value and credentials: + raise ValueError("client_options.api_key and credentials are mutually exclusive") + + # Save or instantiate the transport. + # Ordinarily, we provide the transport, but allowing a custom transport + # instance provides an extensibility point for unusual situations. + if isinstance(transport, TargetSslProxiesTransport): + # transport is a TargetSslProxiesTransport instance. + if credentials or client_options.credentials_file or api_key_value: + raise ValueError("When providing a transport instance, " + "provide its credentials directly.") + if client_options.scopes: + raise ValueError( + "When providing a transport instance, provide its scopes " + "directly." + ) + self._transport = transport + else: + import google.auth._default # type: ignore + + if api_key_value and hasattr(google.auth._default, "get_api_key_credentials"): + credentials = google.auth._default.get_api_key_credentials(api_key_value) + + Transport = type(self).get_transport_class(transport) + self._transport = Transport( + credentials=credentials, + credentials_file=client_options.credentials_file, + host=api_endpoint, + scopes=client_options.scopes, + client_cert_source_for_mtls=client_cert_source_func, + quota_project_id=client_options.quota_project_id, + client_info=client_info, + always_use_jwt_access=True, + api_audience=client_options.api_audience, + ) + + def delete_unary(self, + request: Optional[Union[compute.DeleteTargetSslProxyRequest, dict]] = None, + *, + project: Optional[str] = None, + target_ssl_proxy: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.Operation: + r"""Deletes the specified TargetSslProxy resource. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_delete(): + # Create a client + client = compute_v1.TargetSslProxiesClient() + + # Initialize request argument(s) + request = compute_v1.DeleteTargetSslProxyRequest( + project="project_value", + target_ssl_proxy="target_ssl_proxy_value", + ) + + # Make the request + response = client.delete(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.DeleteTargetSslProxyRequest, dict]): + The request object. A request message for + TargetSslProxies.Delete. See the method + description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + target_ssl_proxy (str): + Name of the TargetSslProxy resource + to delete. + + This corresponds to the ``target_ssl_proxy`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, target_ssl_proxy]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.DeleteTargetSslProxyRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.DeleteTargetSslProxyRequest): + request = compute.DeleteTargetSslProxyRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if target_ssl_proxy is not None: + request.target_ssl_proxy = target_ssl_proxy + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("target_ssl_proxy", request.target_ssl_proxy), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def delete(self, + request: Optional[Union[compute.DeleteTargetSslProxyRequest, dict]] = None, + *, + project: Optional[str] = None, + target_ssl_proxy: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> extended_operation.ExtendedOperation: + r"""Deletes the specified TargetSslProxy resource. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_delete(): + # Create a client + client = compute_v1.TargetSslProxiesClient() + + # Initialize request argument(s) + request = compute_v1.DeleteTargetSslProxyRequest( + project="project_value", + target_ssl_proxy="target_ssl_proxy_value", + ) + + # Make the request + response = client.delete(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.DeleteTargetSslProxyRequest, dict]): + The request object. A request message for + TargetSslProxies.Delete. See the method + description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + target_ssl_proxy (str): + Name of the TargetSslProxy resource + to delete. + + This corresponds to the ``target_ssl_proxy`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, target_ssl_proxy]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.DeleteTargetSslProxyRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.DeleteTargetSslProxyRequest): + request = compute.DeleteTargetSslProxyRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if target_ssl_proxy is not None: + request.target_ssl_proxy = target_ssl_proxy + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("target_ssl_proxy", request.target_ssl_proxy), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + operation_service = self._transport._global_operations_client + operation_request = compute.GetGlobalOperationRequest() + operation_request.project = request.project + operation_request.operation = response.name + + get_operation = functools.partial(operation_service.get, operation_request) + # Cancel is not part of extended operations yet. + cancel_operation = lambda: None + + # Note: this class is an implementation detail to provide a uniform + # set of names for certain fields in the extended operation proto message. + # See google.api_core.extended_operation.ExtendedOperation for details + # on these properties and the expected interface. + class _CustomOperation(extended_operation.ExtendedOperation): + @property + def error_message(self): + return self._extended_operation.http_error_message + + @property + def error_code(self): + return self._extended_operation.http_error_status_code + + response = _CustomOperation.make(get_operation, cancel_operation, response) + + # Done; return the response. + return response + + def get(self, + request: Optional[Union[compute.GetTargetSslProxyRequest, dict]] = None, + *, + project: Optional[str] = None, + target_ssl_proxy: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.TargetSslProxy: + r"""Returns the specified TargetSslProxy resource. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_get(): + # Create a client + client = compute_v1.TargetSslProxiesClient() + + # Initialize request argument(s) + request = compute_v1.GetTargetSslProxyRequest( + project="project_value", + target_ssl_proxy="target_ssl_proxy_value", + ) + + # Make the request + response = client.get(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.GetTargetSslProxyRequest, dict]): + The request object. A request message for + TargetSslProxies.Get. See the method + description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + target_ssl_proxy (str): + Name of the TargetSslProxy resource + to return. + + This corresponds to the ``target_ssl_proxy`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.compute_v1.types.TargetSslProxy: + Represents a Target SSL Proxy + resource. A target SSL proxy is a + component of a SSL Proxy load balancer. + Global forwarding rules reference a + target SSL proxy, and the target proxy + then references an external backend + service. For more information, read + Using Target Proxies. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, target_ssl_proxy]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.GetTargetSslProxyRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.GetTargetSslProxyRequest): + request = compute.GetTargetSslProxyRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if target_ssl_proxy is not None: + request.target_ssl_proxy = target_ssl_proxy + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("target_ssl_proxy", request.target_ssl_proxy), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def insert_unary(self, + request: Optional[Union[compute.InsertTargetSslProxyRequest, dict]] = None, + *, + project: Optional[str] = None, + target_ssl_proxy_resource: Optional[compute.TargetSslProxy] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.Operation: + r"""Creates a TargetSslProxy resource in the specified + project using the data included in the request. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_insert(): + # Create a client + client = compute_v1.TargetSslProxiesClient() + + # Initialize request argument(s) + request = compute_v1.InsertTargetSslProxyRequest( + project="project_value", + ) + + # Make the request + response = client.insert(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.InsertTargetSslProxyRequest, dict]): + The request object. A request message for + TargetSslProxies.Insert. See the method + description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + target_ssl_proxy_resource (google.cloud.compute_v1.types.TargetSslProxy): + The body resource for this request + This corresponds to the ``target_ssl_proxy_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, target_ssl_proxy_resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.InsertTargetSslProxyRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.InsertTargetSslProxyRequest): + request = compute.InsertTargetSslProxyRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if target_ssl_proxy_resource is not None: + request.target_ssl_proxy_resource = target_ssl_proxy_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.insert] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def insert(self, + request: Optional[Union[compute.InsertTargetSslProxyRequest, dict]] = None, + *, + project: Optional[str] = None, + target_ssl_proxy_resource: Optional[compute.TargetSslProxy] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> extended_operation.ExtendedOperation: + r"""Creates a TargetSslProxy resource in the specified + project using the data included in the request. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_insert(): + # Create a client + client = compute_v1.TargetSslProxiesClient() + + # Initialize request argument(s) + request = compute_v1.InsertTargetSslProxyRequest( + project="project_value", + ) + + # Make the request + response = client.insert(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.InsertTargetSslProxyRequest, dict]): + The request object. A request message for + TargetSslProxies.Insert. See the method + description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + target_ssl_proxy_resource (google.cloud.compute_v1.types.TargetSslProxy): + The body resource for this request + This corresponds to the ``target_ssl_proxy_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, target_ssl_proxy_resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.InsertTargetSslProxyRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.InsertTargetSslProxyRequest): + request = compute.InsertTargetSslProxyRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if target_ssl_proxy_resource is not None: + request.target_ssl_proxy_resource = target_ssl_proxy_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.insert] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + operation_service = self._transport._global_operations_client + operation_request = compute.GetGlobalOperationRequest() + operation_request.project = request.project + operation_request.operation = response.name + + get_operation = functools.partial(operation_service.get, operation_request) + # Cancel is not part of extended operations yet. + cancel_operation = lambda: None + + # Note: this class is an implementation detail to provide a uniform + # set of names for certain fields in the extended operation proto message. + # See google.api_core.extended_operation.ExtendedOperation for details + # on these properties and the expected interface. + class _CustomOperation(extended_operation.ExtendedOperation): + @property + def error_message(self): + return self._extended_operation.http_error_message + + @property + def error_code(self): + return self._extended_operation.http_error_status_code + + response = _CustomOperation.make(get_operation, cancel_operation, response) + + # Done; return the response. + return response + + def list(self, + request: Optional[Union[compute.ListTargetSslProxiesRequest, dict]] = None, + *, + project: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListPager: + r"""Retrieves the list of TargetSslProxy resources + available to the specified project. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_list(): + # Create a client + client = compute_v1.TargetSslProxiesClient() + + # Initialize request argument(s) + request = compute_v1.ListTargetSslProxiesRequest( + project="project_value", + ) + + # Make the request + page_result = client.list(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.ListTargetSslProxiesRequest, dict]): + The request object. A request message for + TargetSslProxies.List. See the method + description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.compute_v1.services.target_ssl_proxies.pagers.ListPager: + Contains a list of TargetSslProxy + resources. + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.ListTargetSslProxiesRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.ListTargetSslProxiesRequest): + request = compute.ListTargetSslProxiesRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + def set_backend_service_unary(self, + request: Optional[Union[compute.SetBackendServiceTargetSslProxyRequest, dict]] = None, + *, + project: Optional[str] = None, + target_ssl_proxy: Optional[str] = None, + target_ssl_proxies_set_backend_service_request_resource: Optional[compute.TargetSslProxiesSetBackendServiceRequest] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.Operation: + r"""Changes the BackendService for TargetSslProxy. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_set_backend_service(): + # Create a client + client = compute_v1.TargetSslProxiesClient() + + # Initialize request argument(s) + request = compute_v1.SetBackendServiceTargetSslProxyRequest( + project="project_value", + target_ssl_proxy="target_ssl_proxy_value", + ) + + # Make the request + response = client.set_backend_service(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.SetBackendServiceTargetSslProxyRequest, dict]): + The request object. A request message for + TargetSslProxies.SetBackendService. See + the method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + target_ssl_proxy (str): + Name of the TargetSslProxy resource + whose BackendService resource is to be + set. + + This corresponds to the ``target_ssl_proxy`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + target_ssl_proxies_set_backend_service_request_resource (google.cloud.compute_v1.types.TargetSslProxiesSetBackendServiceRequest): + The body resource for this request + This corresponds to the ``target_ssl_proxies_set_backend_service_request_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, target_ssl_proxy, target_ssl_proxies_set_backend_service_request_resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.SetBackendServiceTargetSslProxyRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.SetBackendServiceTargetSslProxyRequest): + request = compute.SetBackendServiceTargetSslProxyRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if target_ssl_proxy is not None: + request.target_ssl_proxy = target_ssl_proxy + if target_ssl_proxies_set_backend_service_request_resource is not None: + request.target_ssl_proxies_set_backend_service_request_resource = target_ssl_proxies_set_backend_service_request_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.set_backend_service] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("target_ssl_proxy", request.target_ssl_proxy), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def set_backend_service(self, + request: Optional[Union[compute.SetBackendServiceTargetSslProxyRequest, dict]] = None, + *, + project: Optional[str] = None, + target_ssl_proxy: Optional[str] = None, + target_ssl_proxies_set_backend_service_request_resource: Optional[compute.TargetSslProxiesSetBackendServiceRequest] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> extended_operation.ExtendedOperation: + r"""Changes the BackendService for TargetSslProxy. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_set_backend_service(): + # Create a client + client = compute_v1.TargetSslProxiesClient() + + # Initialize request argument(s) + request = compute_v1.SetBackendServiceTargetSslProxyRequest( + project="project_value", + target_ssl_proxy="target_ssl_proxy_value", + ) + + # Make the request + response = client.set_backend_service(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.SetBackendServiceTargetSslProxyRequest, dict]): + The request object. A request message for + TargetSslProxies.SetBackendService. See + the method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + target_ssl_proxy (str): + Name of the TargetSslProxy resource + whose BackendService resource is to be + set. + + This corresponds to the ``target_ssl_proxy`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + target_ssl_proxies_set_backend_service_request_resource (google.cloud.compute_v1.types.TargetSslProxiesSetBackendServiceRequest): + The body resource for this request + This corresponds to the ``target_ssl_proxies_set_backend_service_request_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, target_ssl_proxy, target_ssl_proxies_set_backend_service_request_resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.SetBackendServiceTargetSslProxyRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.SetBackendServiceTargetSslProxyRequest): + request = compute.SetBackendServiceTargetSslProxyRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if target_ssl_proxy is not None: + request.target_ssl_proxy = target_ssl_proxy + if target_ssl_proxies_set_backend_service_request_resource is not None: + request.target_ssl_proxies_set_backend_service_request_resource = target_ssl_proxies_set_backend_service_request_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.set_backend_service] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("target_ssl_proxy", request.target_ssl_proxy), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + operation_service = self._transport._global_operations_client + operation_request = compute.GetGlobalOperationRequest() + operation_request.project = request.project + operation_request.operation = response.name + + get_operation = functools.partial(operation_service.get, operation_request) + # Cancel is not part of extended operations yet. + cancel_operation = lambda: None + + # Note: this class is an implementation detail to provide a uniform + # set of names for certain fields in the extended operation proto message. + # See google.api_core.extended_operation.ExtendedOperation for details + # on these properties and the expected interface. + class _CustomOperation(extended_operation.ExtendedOperation): + @property + def error_message(self): + return self._extended_operation.http_error_message + + @property + def error_code(self): + return self._extended_operation.http_error_status_code + + response = _CustomOperation.make(get_operation, cancel_operation, response) + + # Done; return the response. + return response + + def set_certificate_map_unary(self, + request: Optional[Union[compute.SetCertificateMapTargetSslProxyRequest, dict]] = None, + *, + project: Optional[str] = None, + target_ssl_proxy: Optional[str] = None, + target_ssl_proxies_set_certificate_map_request_resource: Optional[compute.TargetSslProxiesSetCertificateMapRequest] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.Operation: + r"""Changes the Certificate Map for TargetSslProxy. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_set_certificate_map(): + # Create a client + client = compute_v1.TargetSslProxiesClient() + + # Initialize request argument(s) + request = compute_v1.SetCertificateMapTargetSslProxyRequest( + project="project_value", + target_ssl_proxy="target_ssl_proxy_value", + ) + + # Make the request + response = client.set_certificate_map(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.SetCertificateMapTargetSslProxyRequest, dict]): + The request object. A request message for + TargetSslProxies.SetCertificateMap. See + the method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + target_ssl_proxy (str): + Name of the TargetSslProxy resource + whose CertificateMap is to be set. The + name must be 1-63 characters long, and + comply with RFC1035. + + This corresponds to the ``target_ssl_proxy`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + target_ssl_proxies_set_certificate_map_request_resource (google.cloud.compute_v1.types.TargetSslProxiesSetCertificateMapRequest): + The body resource for this request + This corresponds to the ``target_ssl_proxies_set_certificate_map_request_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, target_ssl_proxy, target_ssl_proxies_set_certificate_map_request_resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.SetCertificateMapTargetSslProxyRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.SetCertificateMapTargetSslProxyRequest): + request = compute.SetCertificateMapTargetSslProxyRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if target_ssl_proxy is not None: + request.target_ssl_proxy = target_ssl_proxy + if target_ssl_proxies_set_certificate_map_request_resource is not None: + request.target_ssl_proxies_set_certificate_map_request_resource = target_ssl_proxies_set_certificate_map_request_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.set_certificate_map] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("target_ssl_proxy", request.target_ssl_proxy), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def set_certificate_map(self, + request: Optional[Union[compute.SetCertificateMapTargetSslProxyRequest, dict]] = None, + *, + project: Optional[str] = None, + target_ssl_proxy: Optional[str] = None, + target_ssl_proxies_set_certificate_map_request_resource: Optional[compute.TargetSslProxiesSetCertificateMapRequest] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> extended_operation.ExtendedOperation: + r"""Changes the Certificate Map for TargetSslProxy. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_set_certificate_map(): + # Create a client + client = compute_v1.TargetSslProxiesClient() + + # Initialize request argument(s) + request = compute_v1.SetCertificateMapTargetSslProxyRequest( + project="project_value", + target_ssl_proxy="target_ssl_proxy_value", + ) + + # Make the request + response = client.set_certificate_map(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.SetCertificateMapTargetSslProxyRequest, dict]): + The request object. A request message for + TargetSslProxies.SetCertificateMap. See + the method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + target_ssl_proxy (str): + Name of the TargetSslProxy resource + whose CertificateMap is to be set. The + name must be 1-63 characters long, and + comply with RFC1035. + + This corresponds to the ``target_ssl_proxy`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + target_ssl_proxies_set_certificate_map_request_resource (google.cloud.compute_v1.types.TargetSslProxiesSetCertificateMapRequest): + The body resource for this request + This corresponds to the ``target_ssl_proxies_set_certificate_map_request_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, target_ssl_proxy, target_ssl_proxies_set_certificate_map_request_resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.SetCertificateMapTargetSslProxyRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.SetCertificateMapTargetSslProxyRequest): + request = compute.SetCertificateMapTargetSslProxyRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if target_ssl_proxy is not None: + request.target_ssl_proxy = target_ssl_proxy + if target_ssl_proxies_set_certificate_map_request_resource is not None: + request.target_ssl_proxies_set_certificate_map_request_resource = target_ssl_proxies_set_certificate_map_request_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.set_certificate_map] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("target_ssl_proxy", request.target_ssl_proxy), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + operation_service = self._transport._global_operations_client + operation_request = compute.GetGlobalOperationRequest() + operation_request.project = request.project + operation_request.operation = response.name + + get_operation = functools.partial(operation_service.get, operation_request) + # Cancel is not part of extended operations yet. + cancel_operation = lambda: None + + # Note: this class is an implementation detail to provide a uniform + # set of names for certain fields in the extended operation proto message. + # See google.api_core.extended_operation.ExtendedOperation for details + # on these properties and the expected interface. + class _CustomOperation(extended_operation.ExtendedOperation): + @property + def error_message(self): + return self._extended_operation.http_error_message + + @property + def error_code(self): + return self._extended_operation.http_error_status_code + + response = _CustomOperation.make(get_operation, cancel_operation, response) + + # Done; return the response. + return response + + def set_proxy_header_unary(self, + request: Optional[Union[compute.SetProxyHeaderTargetSslProxyRequest, dict]] = None, + *, + project: Optional[str] = None, + target_ssl_proxy: Optional[str] = None, + target_ssl_proxies_set_proxy_header_request_resource: Optional[compute.TargetSslProxiesSetProxyHeaderRequest] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.Operation: + r"""Changes the ProxyHeaderType for TargetSslProxy. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_set_proxy_header(): + # Create a client + client = compute_v1.TargetSslProxiesClient() + + # Initialize request argument(s) + request = compute_v1.SetProxyHeaderTargetSslProxyRequest( + project="project_value", + target_ssl_proxy="target_ssl_proxy_value", + ) + + # Make the request + response = client.set_proxy_header(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.SetProxyHeaderTargetSslProxyRequest, dict]): + The request object. A request message for + TargetSslProxies.SetProxyHeader. See the + method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + target_ssl_proxy (str): + Name of the TargetSslProxy resource + whose ProxyHeader is to be set. + + This corresponds to the ``target_ssl_proxy`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + target_ssl_proxies_set_proxy_header_request_resource (google.cloud.compute_v1.types.TargetSslProxiesSetProxyHeaderRequest): + The body resource for this request + This corresponds to the ``target_ssl_proxies_set_proxy_header_request_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, target_ssl_proxy, target_ssl_proxies_set_proxy_header_request_resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.SetProxyHeaderTargetSslProxyRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.SetProxyHeaderTargetSslProxyRequest): + request = compute.SetProxyHeaderTargetSslProxyRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if target_ssl_proxy is not None: + request.target_ssl_proxy = target_ssl_proxy + if target_ssl_proxies_set_proxy_header_request_resource is not None: + request.target_ssl_proxies_set_proxy_header_request_resource = target_ssl_proxies_set_proxy_header_request_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.set_proxy_header] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("target_ssl_proxy", request.target_ssl_proxy), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def set_proxy_header(self, + request: Optional[Union[compute.SetProxyHeaderTargetSslProxyRequest, dict]] = None, + *, + project: Optional[str] = None, + target_ssl_proxy: Optional[str] = None, + target_ssl_proxies_set_proxy_header_request_resource: Optional[compute.TargetSslProxiesSetProxyHeaderRequest] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> extended_operation.ExtendedOperation: + r"""Changes the ProxyHeaderType for TargetSslProxy. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_set_proxy_header(): + # Create a client + client = compute_v1.TargetSslProxiesClient() + + # Initialize request argument(s) + request = compute_v1.SetProxyHeaderTargetSslProxyRequest( + project="project_value", + target_ssl_proxy="target_ssl_proxy_value", + ) + + # Make the request + response = client.set_proxy_header(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.SetProxyHeaderTargetSslProxyRequest, dict]): + The request object. A request message for + TargetSslProxies.SetProxyHeader. See the + method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + target_ssl_proxy (str): + Name of the TargetSslProxy resource + whose ProxyHeader is to be set. + + This corresponds to the ``target_ssl_proxy`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + target_ssl_proxies_set_proxy_header_request_resource (google.cloud.compute_v1.types.TargetSslProxiesSetProxyHeaderRequest): + The body resource for this request + This corresponds to the ``target_ssl_proxies_set_proxy_header_request_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, target_ssl_proxy, target_ssl_proxies_set_proxy_header_request_resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.SetProxyHeaderTargetSslProxyRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.SetProxyHeaderTargetSslProxyRequest): + request = compute.SetProxyHeaderTargetSslProxyRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if target_ssl_proxy is not None: + request.target_ssl_proxy = target_ssl_proxy + if target_ssl_proxies_set_proxy_header_request_resource is not None: + request.target_ssl_proxies_set_proxy_header_request_resource = target_ssl_proxies_set_proxy_header_request_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.set_proxy_header] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("target_ssl_proxy", request.target_ssl_proxy), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + operation_service = self._transport._global_operations_client + operation_request = compute.GetGlobalOperationRequest() + operation_request.project = request.project + operation_request.operation = response.name + + get_operation = functools.partial(operation_service.get, operation_request) + # Cancel is not part of extended operations yet. + cancel_operation = lambda: None + + # Note: this class is an implementation detail to provide a uniform + # set of names for certain fields in the extended operation proto message. + # See google.api_core.extended_operation.ExtendedOperation for details + # on these properties and the expected interface. + class _CustomOperation(extended_operation.ExtendedOperation): + @property + def error_message(self): + return self._extended_operation.http_error_message + + @property + def error_code(self): + return self._extended_operation.http_error_status_code + + response = _CustomOperation.make(get_operation, cancel_operation, response) + + # Done; return the response. + return response + + def set_ssl_certificates_unary(self, + request: Optional[Union[compute.SetSslCertificatesTargetSslProxyRequest, dict]] = None, + *, + project: Optional[str] = None, + target_ssl_proxy: Optional[str] = None, + target_ssl_proxies_set_ssl_certificates_request_resource: Optional[compute.TargetSslProxiesSetSslCertificatesRequest] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.Operation: + r"""Changes SslCertificates for TargetSslProxy. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_set_ssl_certificates(): + # Create a client + client = compute_v1.TargetSslProxiesClient() + + # Initialize request argument(s) + request = compute_v1.SetSslCertificatesTargetSslProxyRequest( + project="project_value", + target_ssl_proxy="target_ssl_proxy_value", + ) + + # Make the request + response = client.set_ssl_certificates(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.SetSslCertificatesTargetSslProxyRequest, dict]): + The request object. A request message for + TargetSslProxies.SetSslCertificates. See + the method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + target_ssl_proxy (str): + Name of the TargetSslProxy resource + whose SslCertificate resource is to be + set. + + This corresponds to the ``target_ssl_proxy`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + target_ssl_proxies_set_ssl_certificates_request_resource (google.cloud.compute_v1.types.TargetSslProxiesSetSslCertificatesRequest): + The body resource for this request + This corresponds to the ``target_ssl_proxies_set_ssl_certificates_request_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, target_ssl_proxy, target_ssl_proxies_set_ssl_certificates_request_resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.SetSslCertificatesTargetSslProxyRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.SetSslCertificatesTargetSslProxyRequest): + request = compute.SetSslCertificatesTargetSslProxyRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if target_ssl_proxy is not None: + request.target_ssl_proxy = target_ssl_proxy + if target_ssl_proxies_set_ssl_certificates_request_resource is not None: + request.target_ssl_proxies_set_ssl_certificates_request_resource = target_ssl_proxies_set_ssl_certificates_request_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.set_ssl_certificates] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("target_ssl_proxy", request.target_ssl_proxy), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def set_ssl_certificates(self, + request: Optional[Union[compute.SetSslCertificatesTargetSslProxyRequest, dict]] = None, + *, + project: Optional[str] = None, + target_ssl_proxy: Optional[str] = None, + target_ssl_proxies_set_ssl_certificates_request_resource: Optional[compute.TargetSslProxiesSetSslCertificatesRequest] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> extended_operation.ExtendedOperation: + r"""Changes SslCertificates for TargetSslProxy. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_set_ssl_certificates(): + # Create a client + client = compute_v1.TargetSslProxiesClient() + + # Initialize request argument(s) + request = compute_v1.SetSslCertificatesTargetSslProxyRequest( + project="project_value", + target_ssl_proxy="target_ssl_proxy_value", + ) + + # Make the request + response = client.set_ssl_certificates(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.SetSslCertificatesTargetSslProxyRequest, dict]): + The request object. A request message for + TargetSslProxies.SetSslCertificates. See + the method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + target_ssl_proxy (str): + Name of the TargetSslProxy resource + whose SslCertificate resource is to be + set. + + This corresponds to the ``target_ssl_proxy`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + target_ssl_proxies_set_ssl_certificates_request_resource (google.cloud.compute_v1.types.TargetSslProxiesSetSslCertificatesRequest): + The body resource for this request + This corresponds to the ``target_ssl_proxies_set_ssl_certificates_request_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, target_ssl_proxy, target_ssl_proxies_set_ssl_certificates_request_resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.SetSslCertificatesTargetSslProxyRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.SetSslCertificatesTargetSslProxyRequest): + request = compute.SetSslCertificatesTargetSslProxyRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if target_ssl_proxy is not None: + request.target_ssl_proxy = target_ssl_proxy + if target_ssl_proxies_set_ssl_certificates_request_resource is not None: + request.target_ssl_proxies_set_ssl_certificates_request_resource = target_ssl_proxies_set_ssl_certificates_request_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.set_ssl_certificates] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("target_ssl_proxy", request.target_ssl_proxy), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + operation_service = self._transport._global_operations_client + operation_request = compute.GetGlobalOperationRequest() + operation_request.project = request.project + operation_request.operation = response.name + + get_operation = functools.partial(operation_service.get, operation_request) + # Cancel is not part of extended operations yet. + cancel_operation = lambda: None + + # Note: this class is an implementation detail to provide a uniform + # set of names for certain fields in the extended operation proto message. + # See google.api_core.extended_operation.ExtendedOperation for details + # on these properties and the expected interface. + class _CustomOperation(extended_operation.ExtendedOperation): + @property + def error_message(self): + return self._extended_operation.http_error_message + + @property + def error_code(self): + return self._extended_operation.http_error_status_code + + response = _CustomOperation.make(get_operation, cancel_operation, response) + + # Done; return the response. + return response + + def set_ssl_policy_unary(self, + request: Optional[Union[compute.SetSslPolicyTargetSslProxyRequest, dict]] = None, + *, + project: Optional[str] = None, + target_ssl_proxy: Optional[str] = None, + ssl_policy_reference_resource: Optional[compute.SslPolicyReference] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.Operation: + r"""Sets the SSL policy for TargetSslProxy. The SSL + policy specifies the server-side support for SSL + features. This affects connections between clients and + the SSL proxy load balancer. They do not affect the + connection between the load balancer and the backends. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_set_ssl_policy(): + # Create a client + client = compute_v1.TargetSslProxiesClient() + + # Initialize request argument(s) + request = compute_v1.SetSslPolicyTargetSslProxyRequest( + project="project_value", + target_ssl_proxy="target_ssl_proxy_value", + ) + + # Make the request + response = client.set_ssl_policy(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.SetSslPolicyTargetSslProxyRequest, dict]): + The request object. A request message for + TargetSslProxies.SetSslPolicy. See the + method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + target_ssl_proxy (str): + Name of the TargetSslProxy resource + whose SSL policy is to be set. The name + must be 1-63 characters long, and comply + with RFC1035. + + This corresponds to the ``target_ssl_proxy`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + ssl_policy_reference_resource (google.cloud.compute_v1.types.SslPolicyReference): + The body resource for this request + This corresponds to the ``ssl_policy_reference_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, target_ssl_proxy, ssl_policy_reference_resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.SetSslPolicyTargetSslProxyRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.SetSslPolicyTargetSslProxyRequest): + request = compute.SetSslPolicyTargetSslProxyRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if target_ssl_proxy is not None: + request.target_ssl_proxy = target_ssl_proxy + if ssl_policy_reference_resource is not None: + request.ssl_policy_reference_resource = ssl_policy_reference_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.set_ssl_policy] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("target_ssl_proxy", request.target_ssl_proxy), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def set_ssl_policy(self, + request: Optional[Union[compute.SetSslPolicyTargetSslProxyRequest, dict]] = None, + *, + project: Optional[str] = None, + target_ssl_proxy: Optional[str] = None, + ssl_policy_reference_resource: Optional[compute.SslPolicyReference] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> extended_operation.ExtendedOperation: + r"""Sets the SSL policy for TargetSslProxy. The SSL + policy specifies the server-side support for SSL + features. This affects connections between clients and + the SSL proxy load balancer. They do not affect the + connection between the load balancer and the backends. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_set_ssl_policy(): + # Create a client + client = compute_v1.TargetSslProxiesClient() + + # Initialize request argument(s) + request = compute_v1.SetSslPolicyTargetSslProxyRequest( + project="project_value", + target_ssl_proxy="target_ssl_proxy_value", + ) + + # Make the request + response = client.set_ssl_policy(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.SetSslPolicyTargetSslProxyRequest, dict]): + The request object. A request message for + TargetSslProxies.SetSslPolicy. See the + method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + target_ssl_proxy (str): + Name of the TargetSslProxy resource + whose SSL policy is to be set. The name + must be 1-63 characters long, and comply + with RFC1035. + + This corresponds to the ``target_ssl_proxy`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + ssl_policy_reference_resource (google.cloud.compute_v1.types.SslPolicyReference): + The body resource for this request + This corresponds to the ``ssl_policy_reference_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, target_ssl_proxy, ssl_policy_reference_resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.SetSslPolicyTargetSslProxyRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.SetSslPolicyTargetSslProxyRequest): + request = compute.SetSslPolicyTargetSslProxyRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if target_ssl_proxy is not None: + request.target_ssl_proxy = target_ssl_proxy + if ssl_policy_reference_resource is not None: + request.ssl_policy_reference_resource = ssl_policy_reference_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.set_ssl_policy] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("target_ssl_proxy", request.target_ssl_proxy), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + operation_service = self._transport._global_operations_client + operation_request = compute.GetGlobalOperationRequest() + operation_request.project = request.project + operation_request.operation = response.name + + get_operation = functools.partial(operation_service.get, operation_request) + # Cancel is not part of extended operations yet. + cancel_operation = lambda: None + + # Note: this class is an implementation detail to provide a uniform + # set of names for certain fields in the extended operation proto message. + # See google.api_core.extended_operation.ExtendedOperation for details + # on these properties and the expected interface. + class _CustomOperation(extended_operation.ExtendedOperation): + @property + def error_message(self): + return self._extended_operation.http_error_message + + @property + def error_code(self): + return self._extended_operation.http_error_status_code + + response = _CustomOperation.make(get_operation, cancel_operation, response) + + # Done; return the response. + return response + + def __enter__(self) -> "TargetSslProxiesClient": + return self + + def __exit__(self, type, value, traceback): + """Releases underlying transport's resources. + + .. warning:: + ONLY use as a context manager if the transport is NOT shared + with other clients! Exiting the with block will CLOSE the transport + and may cause errors in other clients! + """ + self.transport.close() + + + + + + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) + + +__all__ = ( + "TargetSslProxiesClient", +) diff --git a/owl-bot-staging/v1/google/cloud/compute_v1/services/target_ssl_proxies/pagers.py b/owl-bot-staging/v1/google/cloud/compute_v1/services/target_ssl_proxies/pagers.py new file mode 100644 index 000000000..660467ac4 --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/compute_v1/services/target_ssl_proxies/pagers.py @@ -0,0 +1,77 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import Any, AsyncIterator, Awaitable, Callable, Sequence, Tuple, Optional, Iterator + +from google.cloud.compute_v1.types import compute + + +class ListPager: + """A pager for iterating through ``list`` requests. + + This class thinly wraps an initial + :class:`google.cloud.compute_v1.types.TargetSslProxyList` object, and + provides an ``__iter__`` method to iterate through its + ``items`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``List`` requests and continue to iterate + through the ``items`` field on the + corresponding responses. + + All the usual :class:`google.cloud.compute_v1.types.TargetSslProxyList` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., compute.TargetSslProxyList], + request: compute.ListTargetSslProxiesRequest, + response: compute.TargetSslProxyList, + *, + metadata: Sequence[Tuple[str, str]] = ()): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.compute_v1.types.ListTargetSslProxiesRequest): + The initial request object. + response (google.cloud.compute_v1.types.TargetSslProxyList): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = compute.ListTargetSslProxiesRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[compute.TargetSslProxyList]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[compute.TargetSslProxy]: + for page in self.pages: + yield from page.items + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) diff --git a/owl-bot-staging/v1/google/cloud/compute_v1/services/target_ssl_proxies/transports/__init__.py b/owl-bot-staging/v1/google/cloud/compute_v1/services/target_ssl_proxies/transports/__init__.py new file mode 100644 index 000000000..10877c0c9 --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/compute_v1/services/target_ssl_proxies/transports/__init__.py @@ -0,0 +1,32 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +from typing import Dict, Type + +from .base import TargetSslProxiesTransport +from .rest import TargetSslProxiesRestTransport +from .rest import TargetSslProxiesRestInterceptor + + +# Compile a registry of transports. +_transport_registry = OrderedDict() # type: Dict[str, Type[TargetSslProxiesTransport]] +_transport_registry['rest'] = TargetSslProxiesRestTransport + +__all__ = ( + 'TargetSslProxiesTransport', + 'TargetSslProxiesRestTransport', + 'TargetSslProxiesRestInterceptor', +) diff --git a/owl-bot-staging/v1/google/cloud/compute_v1/services/target_ssl_proxies/transports/base.py b/owl-bot-staging/v1/google/cloud/compute_v1/services/target_ssl_proxies/transports/base.py new file mode 100644 index 000000000..72e45485c --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/compute_v1/services/target_ssl_proxies/transports/base.py @@ -0,0 +1,275 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import abc +from typing import Awaitable, Callable, Dict, Optional, Sequence, Union + +from google.cloud.compute_v1 import gapic_version as package_version + +import google.auth # type: ignore +import google.api_core +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.compute_v1.types import compute +from google.cloud.compute_v1.services import global_operations + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) + + +class TargetSslProxiesTransport(abc.ABC): + """Abstract transport class for TargetSslProxies.""" + + AUTH_SCOPES = ( + 'https://www.googleapis.com/auth/compute', + 'https://www.googleapis.com/auth/cloud-platform', + ) + + DEFAULT_HOST: str = 'compute.googleapis.com' + def __init__( + self, *, + host: str = DEFAULT_HOST, + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + **kwargs, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A list of scopes. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + """ + self._extended_operations_services: Dict[str, Any] = {} + + scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} + + # Save the scopes. + self._scopes = scopes + + # If no credentials are provided, then determine the appropriate + # defaults. + if credentials and credentials_file: + raise core_exceptions.DuplicateCredentialArgs("'credentials_file' and 'credentials' are mutually exclusive") + + if credentials_file is not None: + credentials, _ = google.auth.load_credentials_from_file( + credentials_file, + **scopes_kwargs, + quota_project_id=quota_project_id + ) + elif credentials is None: + credentials, _ = google.auth.default(**scopes_kwargs, quota_project_id=quota_project_id) + # Don't apply audience if the credentials file passed from user. + if hasattr(credentials, "with_gdch_audience"): + credentials = credentials.with_gdch_audience(api_audience if api_audience else host) + + # If the credentials are service account credentials, then always try to use self signed JWT. + if always_use_jwt_access and isinstance(credentials, service_account.Credentials) and hasattr(service_account.Credentials, "with_always_use_jwt_access"): + credentials = credentials.with_always_use_jwt_access(True) + + # Save the credentials. + self._credentials = credentials + + # Save the hostname. Default to port 443 (HTTPS) if none is specified. + if ':' not in host: + host += ':443' + self._host = host + + def _prep_wrapped_messages(self, client_info): + # Precompute the wrapped methods. + self._wrapped_methods = { + self.delete: gapic_v1.method.wrap_method( + self.delete, + default_timeout=None, + client_info=client_info, + ), + self.get: gapic_v1.method.wrap_method( + self.get, + default_timeout=None, + client_info=client_info, + ), + self.insert: gapic_v1.method.wrap_method( + self.insert, + default_timeout=None, + client_info=client_info, + ), + self.list: gapic_v1.method.wrap_method( + self.list, + default_timeout=None, + client_info=client_info, + ), + self.set_backend_service: gapic_v1.method.wrap_method( + self.set_backend_service, + default_timeout=None, + client_info=client_info, + ), + self.set_certificate_map: gapic_v1.method.wrap_method( + self.set_certificate_map, + default_timeout=None, + client_info=client_info, + ), + self.set_proxy_header: gapic_v1.method.wrap_method( + self.set_proxy_header, + default_timeout=None, + client_info=client_info, + ), + self.set_ssl_certificates: gapic_v1.method.wrap_method( + self.set_ssl_certificates, + default_timeout=None, + client_info=client_info, + ), + self.set_ssl_policy: gapic_v1.method.wrap_method( + self.set_ssl_policy, + default_timeout=None, + client_info=client_info, + ), + } + + def close(self): + """Closes resources associated with the transport. + + .. warning:: + Only call this method if the transport is NOT shared + with other clients - this may cause errors in other clients! + """ + raise NotImplementedError() + + @property + def delete(self) -> Callable[ + [compute.DeleteTargetSslProxyRequest], + Union[ + compute.Operation, + Awaitable[compute.Operation] + ]]: + raise NotImplementedError() + + @property + def get(self) -> Callable[ + [compute.GetTargetSslProxyRequest], + Union[ + compute.TargetSslProxy, + Awaitable[compute.TargetSslProxy] + ]]: + raise NotImplementedError() + + @property + def insert(self) -> Callable[ + [compute.InsertTargetSslProxyRequest], + Union[ + compute.Operation, + Awaitable[compute.Operation] + ]]: + raise NotImplementedError() + + @property + def list(self) -> Callable[ + [compute.ListTargetSslProxiesRequest], + Union[ + compute.TargetSslProxyList, + Awaitable[compute.TargetSslProxyList] + ]]: + raise NotImplementedError() + + @property + def set_backend_service(self) -> Callable[ + [compute.SetBackendServiceTargetSslProxyRequest], + Union[ + compute.Operation, + Awaitable[compute.Operation] + ]]: + raise NotImplementedError() + + @property + def set_certificate_map(self) -> Callable[ + [compute.SetCertificateMapTargetSslProxyRequest], + Union[ + compute.Operation, + Awaitable[compute.Operation] + ]]: + raise NotImplementedError() + + @property + def set_proxy_header(self) -> Callable[ + [compute.SetProxyHeaderTargetSslProxyRequest], + Union[ + compute.Operation, + Awaitable[compute.Operation] + ]]: + raise NotImplementedError() + + @property + def set_ssl_certificates(self) -> Callable[ + [compute.SetSslCertificatesTargetSslProxyRequest], + Union[ + compute.Operation, + Awaitable[compute.Operation] + ]]: + raise NotImplementedError() + + @property + def set_ssl_policy(self) -> Callable[ + [compute.SetSslPolicyTargetSslProxyRequest], + Union[ + compute.Operation, + Awaitable[compute.Operation] + ]]: + raise NotImplementedError() + + @property + def kind(self) -> str: + raise NotImplementedError() + + @property + def _global_operations_client(self) -> global_operations.GlobalOperationsClient: + ex_op_service = self._extended_operations_services.get("global_operations") + if not ex_op_service: + ex_op_service = global_operations.GlobalOperationsClient( + credentials=self._credentials, + transport=self.kind, + ) + self._extended_operations_services["global_operations"] = ex_op_service + + return ex_op_service + + +__all__ = ( + 'TargetSslProxiesTransport', +) diff --git a/owl-bot-staging/v1/google/cloud/compute_v1/services/target_ssl_proxies/transports/rest.py b/owl-bot-staging/v1/google/cloud/compute_v1/services/target_ssl_proxies/transports/rest.py new file mode 100644 index 000000000..42ba3259d --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/compute_v1/services/target_ssl_proxies/transports/rest.py @@ -0,0 +1,1344 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +from google.auth.transport.requests import AuthorizedSession # type: ignore +import json # type: ignore +import grpc # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.api_core import exceptions as core_exceptions +from google.api_core import retry as retries +from google.api_core import rest_helpers +from google.api_core import rest_streaming +from google.api_core import path_template +from google.api_core import gapic_v1 + +from google.protobuf import json_format +from requests import __version__ as requests_version +import dataclasses +import re +from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union +import warnings + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + + +from google.cloud.compute_v1.types import compute + +from .base import TargetSslProxiesTransport, DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, + grpc_version=None, + rest_version=requests_version, +) + + +class TargetSslProxiesRestInterceptor: + """Interceptor for TargetSslProxies. + + Interceptors are used to manipulate requests, request metadata, and responses + in arbitrary ways. + Example use cases include: + * Logging + * Verifying requests according to service or custom semantics + * Stripping extraneous information from responses + + These use cases and more can be enabled by injecting an + instance of a custom subclass when constructing the TargetSslProxiesRestTransport. + + .. code-block:: python + class MyCustomTargetSslProxiesInterceptor(TargetSslProxiesRestInterceptor): + def pre_delete(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_delete(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_get(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_insert(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_insert(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_set_backend_service(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_set_backend_service(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_set_certificate_map(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_set_certificate_map(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_set_proxy_header(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_set_proxy_header(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_set_ssl_certificates(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_set_ssl_certificates(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_set_ssl_policy(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_set_ssl_policy(self, response): + logging.log(f"Received response: {response}") + return response + + transport = TargetSslProxiesRestTransport(interceptor=MyCustomTargetSslProxiesInterceptor()) + client = TargetSslProxiesClient(transport=transport) + + + """ + def pre_delete(self, request: compute.DeleteTargetSslProxyRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.DeleteTargetSslProxyRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for delete + + Override in a subclass to manipulate the request or metadata + before they are sent to the TargetSslProxies server. + """ + return request, metadata + + def post_delete(self, response: compute.Operation) -> compute.Operation: + """Post-rpc interceptor for delete + + Override in a subclass to manipulate the response + after it is returned by the TargetSslProxies server but before + it is returned to user code. + """ + return response + def pre_get(self, request: compute.GetTargetSslProxyRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.GetTargetSslProxyRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get + + Override in a subclass to manipulate the request or metadata + before they are sent to the TargetSslProxies server. + """ + return request, metadata + + def post_get(self, response: compute.TargetSslProxy) -> compute.TargetSslProxy: + """Post-rpc interceptor for get + + Override in a subclass to manipulate the response + after it is returned by the TargetSslProxies server but before + it is returned to user code. + """ + return response + def pre_insert(self, request: compute.InsertTargetSslProxyRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.InsertTargetSslProxyRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for insert + + Override in a subclass to manipulate the request or metadata + before they are sent to the TargetSslProxies server. + """ + return request, metadata + + def post_insert(self, response: compute.Operation) -> compute.Operation: + """Post-rpc interceptor for insert + + Override in a subclass to manipulate the response + after it is returned by the TargetSslProxies server but before + it is returned to user code. + """ + return response + def pre_list(self, request: compute.ListTargetSslProxiesRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.ListTargetSslProxiesRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list + + Override in a subclass to manipulate the request or metadata + before they are sent to the TargetSslProxies server. + """ + return request, metadata + + def post_list(self, response: compute.TargetSslProxyList) -> compute.TargetSslProxyList: + """Post-rpc interceptor for list + + Override in a subclass to manipulate the response + after it is returned by the TargetSslProxies server but before + it is returned to user code. + """ + return response + def pre_set_backend_service(self, request: compute.SetBackendServiceTargetSslProxyRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.SetBackendServiceTargetSslProxyRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for set_backend_service + + Override in a subclass to manipulate the request or metadata + before they are sent to the TargetSslProxies server. + """ + return request, metadata + + def post_set_backend_service(self, response: compute.Operation) -> compute.Operation: + """Post-rpc interceptor for set_backend_service + + Override in a subclass to manipulate the response + after it is returned by the TargetSslProxies server but before + it is returned to user code. + """ + return response + def pre_set_certificate_map(self, request: compute.SetCertificateMapTargetSslProxyRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.SetCertificateMapTargetSslProxyRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for set_certificate_map + + Override in a subclass to manipulate the request or metadata + before they are sent to the TargetSslProxies server. + """ + return request, metadata + + def post_set_certificate_map(self, response: compute.Operation) -> compute.Operation: + """Post-rpc interceptor for set_certificate_map + + Override in a subclass to manipulate the response + after it is returned by the TargetSslProxies server but before + it is returned to user code. + """ + return response + def pre_set_proxy_header(self, request: compute.SetProxyHeaderTargetSslProxyRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.SetProxyHeaderTargetSslProxyRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for set_proxy_header + + Override in a subclass to manipulate the request or metadata + before they are sent to the TargetSslProxies server. + """ + return request, metadata + + def post_set_proxy_header(self, response: compute.Operation) -> compute.Operation: + """Post-rpc interceptor for set_proxy_header + + Override in a subclass to manipulate the response + after it is returned by the TargetSslProxies server but before + it is returned to user code. + """ + return response + def pre_set_ssl_certificates(self, request: compute.SetSslCertificatesTargetSslProxyRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.SetSslCertificatesTargetSslProxyRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for set_ssl_certificates + + Override in a subclass to manipulate the request or metadata + before they are sent to the TargetSslProxies server. + """ + return request, metadata + + def post_set_ssl_certificates(self, response: compute.Operation) -> compute.Operation: + """Post-rpc interceptor for set_ssl_certificates + + Override in a subclass to manipulate the response + after it is returned by the TargetSslProxies server but before + it is returned to user code. + """ + return response + def pre_set_ssl_policy(self, request: compute.SetSslPolicyTargetSslProxyRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.SetSslPolicyTargetSslProxyRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for set_ssl_policy + + Override in a subclass to manipulate the request or metadata + before they are sent to the TargetSslProxies server. + """ + return request, metadata + + def post_set_ssl_policy(self, response: compute.Operation) -> compute.Operation: + """Post-rpc interceptor for set_ssl_policy + + Override in a subclass to manipulate the response + after it is returned by the TargetSslProxies server but before + it is returned to user code. + """ + return response + + +@dataclasses.dataclass +class TargetSslProxiesRestStub: + _session: AuthorizedSession + _host: str + _interceptor: TargetSslProxiesRestInterceptor + + +class TargetSslProxiesRestTransport(TargetSslProxiesTransport): + """REST backend transport for TargetSslProxies. + + The TargetSslProxies API. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends JSON representations of protocol buffers over HTTP/1.1 + + NOTE: This REST transport functionality is currently in a beta + state (preview). We welcome your feedback via an issue in this + library's source repository. Thank you! + """ + + def __init__(self, *, + host: str = 'compute.googleapis.com', + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + client_cert_source_for_mtls: Optional[Callable[[ + ], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + url_scheme: str = 'https', + interceptor: Optional[TargetSslProxiesRestInterceptor] = None, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + NOTE: This REST transport functionality is currently in a beta + state (preview). We welcome your feedback via a GitHub issue in + this library's repository. Thank you! + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + client_cert_source_for_mtls (Callable[[], Tuple[bytes, bytes]]): Client + certificate to configure mutual TLS HTTP channel. It is ignored + if ``channel`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you are developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + url_scheme: the protocol scheme for the API endpoint. Normally + "https", but for testing or local servers, + "http" can be specified. + """ + # Run the base constructor + # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. + # TODO: When custom host (api_endpoint) is set, `scopes` must *also* be set on the + # credentials object + maybe_url_match = re.match("^(?Phttp(?:s)?://)?(?P.*)$", host) + if maybe_url_match is None: + raise ValueError(f"Unexpected hostname structure: {host}") # pragma: NO COVER + + url_match_items = maybe_url_match.groupdict() + + host = f"{url_scheme}://{host}" if not url_match_items["scheme"] else host + + super().__init__( + host=host, + credentials=credentials, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience + ) + self._session = AuthorizedSession( + self._credentials, default_host=self.DEFAULT_HOST) + if client_cert_source_for_mtls: + self._session.configure_mtls_channel(client_cert_source_for_mtls) + self._interceptor = interceptor or TargetSslProxiesRestInterceptor() + self._prep_wrapped_messages(client_info) + + class _Delete(TargetSslProxiesRestStub): + def __hash__(self): + return hash("Delete") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.DeleteTargetSslProxyRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.Operation: + r"""Call the delete method over HTTP. + + Args: + request (~.compute.DeleteTargetSslProxyRequest): + The request object. A request message for + TargetSslProxies.Delete. See the method + description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.Operation: + Represents an Operation resource. Google Compute Engine + has three Operation resources: \* + `Global `__ + \* + `Regional `__ + \* + `Zonal `__ + You can use an operation resource to manage asynchronous + API requests. For more information, read Handling API + responses. Operations can be global, regional or zonal. + - For global operations, use the ``globalOperations`` + resource. - For regional operations, use the + ``regionOperations`` resource. - For zonal operations, + use the ``zonalOperations`` resource. For more + information, read Global, Regional, and Zonal Resources. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'delete', + 'uri': '/compute/v1/projects/{project}/global/targetSslProxies/{target_ssl_proxy}', + }, + ] + request, metadata = self._interceptor.pre_delete(request, metadata) + pb_request = compute.DeleteTargetSslProxyRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.Operation() + pb_resp = compute.Operation.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_delete(resp) + return resp + + class _Get(TargetSslProxiesRestStub): + def __hash__(self): + return hash("Get") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.GetTargetSslProxyRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.TargetSslProxy: + r"""Call the get method over HTTP. + + Args: + request (~.compute.GetTargetSslProxyRequest): + The request object. A request message for + TargetSslProxies.Get. See the method + description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.TargetSslProxy: + Represents a Target SSL Proxy + resource. A target SSL proxy is a + component of a SSL Proxy load balancer. + Global forwarding rules reference a + target SSL proxy, and the target proxy + then references an external backend + service. For more information, read + Using Target Proxies. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/compute/v1/projects/{project}/global/targetSslProxies/{target_ssl_proxy}', + }, + ] + request, metadata = self._interceptor.pre_get(request, metadata) + pb_request = compute.GetTargetSslProxyRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.TargetSslProxy() + pb_resp = compute.TargetSslProxy.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get(resp) + return resp + + class _Insert(TargetSslProxiesRestStub): + def __hash__(self): + return hash("Insert") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.InsertTargetSslProxyRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.Operation: + r"""Call the insert method over HTTP. + + Args: + request (~.compute.InsertTargetSslProxyRequest): + The request object. A request message for + TargetSslProxies.Insert. See the method + description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.Operation: + Represents an Operation resource. Google Compute Engine + has three Operation resources: \* + `Global `__ + \* + `Regional `__ + \* + `Zonal `__ + You can use an operation resource to manage asynchronous + API requests. For more information, read Handling API + responses. Operations can be global, regional or zonal. + - For global operations, use the ``globalOperations`` + resource. - For regional operations, use the + ``regionOperations`` resource. - For zonal operations, + use the ``zonalOperations`` resource. For more + information, read Global, Regional, and Zonal Resources. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/compute/v1/projects/{project}/global/targetSslProxies', + 'body': 'target_ssl_proxy_resource', + }, + ] + request, metadata = self._interceptor.pre_insert(request, metadata) + pb_request = compute.InsertTargetSslProxyRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + including_default_value_fields=False, + use_integers_for_enums=False + ) + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.Operation() + pb_resp = compute.Operation.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_insert(resp) + return resp + + class _List(TargetSslProxiesRestStub): + def __hash__(self): + return hash("List") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.ListTargetSslProxiesRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.TargetSslProxyList: + r"""Call the list method over HTTP. + + Args: + request (~.compute.ListTargetSslProxiesRequest): + The request object. A request message for + TargetSslProxies.List. See the method + description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.TargetSslProxyList: + Contains a list of TargetSslProxy + resources. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/compute/v1/projects/{project}/global/targetSslProxies', + }, + ] + request, metadata = self._interceptor.pre_list(request, metadata) + pb_request = compute.ListTargetSslProxiesRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.TargetSslProxyList() + pb_resp = compute.TargetSslProxyList.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_list(resp) + return resp + + class _SetBackendService(TargetSslProxiesRestStub): + def __hash__(self): + return hash("SetBackendService") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.SetBackendServiceTargetSslProxyRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.Operation: + r"""Call the set backend service method over HTTP. + + Args: + request (~.compute.SetBackendServiceTargetSslProxyRequest): + The request object. A request message for + TargetSslProxies.SetBackendService. See + the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.Operation: + Represents an Operation resource. Google Compute Engine + has three Operation resources: \* + `Global `__ + \* + `Regional `__ + \* + `Zonal `__ + You can use an operation resource to manage asynchronous + API requests. For more information, read Handling API + responses. Operations can be global, regional or zonal. + - For global operations, use the ``globalOperations`` + resource. - For regional operations, use the + ``regionOperations`` resource. - For zonal operations, + use the ``zonalOperations`` resource. For more + information, read Global, Regional, and Zonal Resources. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/compute/v1/projects/{project}/global/targetSslProxies/{target_ssl_proxy}/setBackendService', + 'body': 'target_ssl_proxies_set_backend_service_request_resource', + }, + ] + request, metadata = self._interceptor.pre_set_backend_service(request, metadata) + pb_request = compute.SetBackendServiceTargetSslProxyRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + including_default_value_fields=False, + use_integers_for_enums=False + ) + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.Operation() + pb_resp = compute.Operation.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_set_backend_service(resp) + return resp + + class _SetCertificateMap(TargetSslProxiesRestStub): + def __hash__(self): + return hash("SetCertificateMap") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.SetCertificateMapTargetSslProxyRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.Operation: + r"""Call the set certificate map method over HTTP. + + Args: + request (~.compute.SetCertificateMapTargetSslProxyRequest): + The request object. A request message for + TargetSslProxies.SetCertificateMap. See + the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.Operation: + Represents an Operation resource. Google Compute Engine + has three Operation resources: \* + `Global `__ + \* + `Regional `__ + \* + `Zonal `__ + You can use an operation resource to manage asynchronous + API requests. For more information, read Handling API + responses. Operations can be global, regional or zonal. + - For global operations, use the ``globalOperations`` + resource. - For regional operations, use the + ``regionOperations`` resource. - For zonal operations, + use the ``zonalOperations`` resource. For more + information, read Global, Regional, and Zonal Resources. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/compute/v1/projects/{project}/global/targetSslProxies/{target_ssl_proxy}/setCertificateMap', + 'body': 'target_ssl_proxies_set_certificate_map_request_resource', + }, + ] + request, metadata = self._interceptor.pre_set_certificate_map(request, metadata) + pb_request = compute.SetCertificateMapTargetSslProxyRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + including_default_value_fields=False, + use_integers_for_enums=False + ) + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.Operation() + pb_resp = compute.Operation.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_set_certificate_map(resp) + return resp + + class _SetProxyHeader(TargetSslProxiesRestStub): + def __hash__(self): + return hash("SetProxyHeader") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.SetProxyHeaderTargetSslProxyRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.Operation: + r"""Call the set proxy header method over HTTP. + + Args: + request (~.compute.SetProxyHeaderTargetSslProxyRequest): + The request object. A request message for + TargetSslProxies.SetProxyHeader. See the + method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.Operation: + Represents an Operation resource. Google Compute Engine + has three Operation resources: \* + `Global `__ + \* + `Regional `__ + \* + `Zonal `__ + You can use an operation resource to manage asynchronous + API requests. For more information, read Handling API + responses. Operations can be global, regional or zonal. + - For global operations, use the ``globalOperations`` + resource. - For regional operations, use the + ``regionOperations`` resource. - For zonal operations, + use the ``zonalOperations`` resource. For more + information, read Global, Regional, and Zonal Resources. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/compute/v1/projects/{project}/global/targetSslProxies/{target_ssl_proxy}/setProxyHeader', + 'body': 'target_ssl_proxies_set_proxy_header_request_resource', + }, + ] + request, metadata = self._interceptor.pre_set_proxy_header(request, metadata) + pb_request = compute.SetProxyHeaderTargetSslProxyRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + including_default_value_fields=False, + use_integers_for_enums=False + ) + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.Operation() + pb_resp = compute.Operation.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_set_proxy_header(resp) + return resp + + class _SetSslCertificates(TargetSslProxiesRestStub): + def __hash__(self): + return hash("SetSslCertificates") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.SetSslCertificatesTargetSslProxyRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.Operation: + r"""Call the set ssl certificates method over HTTP. + + Args: + request (~.compute.SetSslCertificatesTargetSslProxyRequest): + The request object. A request message for + TargetSslProxies.SetSslCertificates. See + the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.Operation: + Represents an Operation resource. Google Compute Engine + has three Operation resources: \* + `Global `__ + \* + `Regional `__ + \* + `Zonal `__ + You can use an operation resource to manage asynchronous + API requests. For more information, read Handling API + responses. Operations can be global, regional or zonal. + - For global operations, use the ``globalOperations`` + resource. - For regional operations, use the + ``regionOperations`` resource. - For zonal operations, + use the ``zonalOperations`` resource. For more + information, read Global, Regional, and Zonal Resources. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/compute/v1/projects/{project}/global/targetSslProxies/{target_ssl_proxy}/setSslCertificates', + 'body': 'target_ssl_proxies_set_ssl_certificates_request_resource', + }, + ] + request, metadata = self._interceptor.pre_set_ssl_certificates(request, metadata) + pb_request = compute.SetSslCertificatesTargetSslProxyRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + including_default_value_fields=False, + use_integers_for_enums=False + ) + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.Operation() + pb_resp = compute.Operation.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_set_ssl_certificates(resp) + return resp + + class _SetSslPolicy(TargetSslProxiesRestStub): + def __hash__(self): + return hash("SetSslPolicy") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.SetSslPolicyTargetSslProxyRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.Operation: + r"""Call the set ssl policy method over HTTP. + + Args: + request (~.compute.SetSslPolicyTargetSslProxyRequest): + The request object. A request message for + TargetSslProxies.SetSslPolicy. See the + method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.Operation: + Represents an Operation resource. Google Compute Engine + has three Operation resources: \* + `Global `__ + \* + `Regional `__ + \* + `Zonal `__ + You can use an operation resource to manage asynchronous + API requests. For more information, read Handling API + responses. Operations can be global, regional or zonal. + - For global operations, use the ``globalOperations`` + resource. - For regional operations, use the + ``regionOperations`` resource. - For zonal operations, + use the ``zonalOperations`` resource. For more + information, read Global, Regional, and Zonal Resources. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/compute/v1/projects/{project}/global/targetSslProxies/{target_ssl_proxy}/setSslPolicy', + 'body': 'ssl_policy_reference_resource', + }, + ] + request, metadata = self._interceptor.pre_set_ssl_policy(request, metadata) + pb_request = compute.SetSslPolicyTargetSslProxyRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + including_default_value_fields=False, + use_integers_for_enums=False + ) + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.Operation() + pb_resp = compute.Operation.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_set_ssl_policy(resp) + return resp + + @property + def delete(self) -> Callable[ + [compute.DeleteTargetSslProxyRequest], + compute.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._Delete(self._session, self._host, self._interceptor) # type: ignore + + @property + def get(self) -> Callable[ + [compute.GetTargetSslProxyRequest], + compute.TargetSslProxy]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._Get(self._session, self._host, self._interceptor) # type: ignore + + @property + def insert(self) -> Callable[ + [compute.InsertTargetSslProxyRequest], + compute.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._Insert(self._session, self._host, self._interceptor) # type: ignore + + @property + def list(self) -> Callable[ + [compute.ListTargetSslProxiesRequest], + compute.TargetSslProxyList]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._List(self._session, self._host, self._interceptor) # type: ignore + + @property + def set_backend_service(self) -> Callable[ + [compute.SetBackendServiceTargetSslProxyRequest], + compute.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._SetBackendService(self._session, self._host, self._interceptor) # type: ignore + + @property + def set_certificate_map(self) -> Callable[ + [compute.SetCertificateMapTargetSslProxyRequest], + compute.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._SetCertificateMap(self._session, self._host, self._interceptor) # type: ignore + + @property + def set_proxy_header(self) -> Callable[ + [compute.SetProxyHeaderTargetSslProxyRequest], + compute.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._SetProxyHeader(self._session, self._host, self._interceptor) # type: ignore + + @property + def set_ssl_certificates(self) -> Callable[ + [compute.SetSslCertificatesTargetSslProxyRequest], + compute.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._SetSslCertificates(self._session, self._host, self._interceptor) # type: ignore + + @property + def set_ssl_policy(self) -> Callable[ + [compute.SetSslPolicyTargetSslProxyRequest], + compute.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._SetSslPolicy(self._session, self._host, self._interceptor) # type: ignore + + @property + def kind(self) -> str: + return "rest" + + def close(self): + self._session.close() + + +__all__=( + 'TargetSslProxiesRestTransport', +) diff --git a/owl-bot-staging/v1/google/cloud/compute_v1/services/target_tcp_proxies/__init__.py b/owl-bot-staging/v1/google/cloud/compute_v1/services/target_tcp_proxies/__init__.py new file mode 100644 index 000000000..5583fe047 --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/compute_v1/services/target_tcp_proxies/__init__.py @@ -0,0 +1,20 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from .client import TargetTcpProxiesClient + +__all__ = ( + 'TargetTcpProxiesClient', +) diff --git a/owl-bot-staging/v1/google/cloud/compute_v1/services/target_tcp_proxies/client.py b/owl-bot-staging/v1/google/cloud/compute_v1/services/target_tcp_proxies/client.py new file mode 100644 index 000000000..b8fc037a0 --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/compute_v1/services/target_tcp_proxies/client.py @@ -0,0 +1,1759 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import functools +import os +import re +from typing import Dict, Mapping, MutableMapping, MutableSequence, Optional, Sequence, Tuple, Type, Union, cast + +from google.cloud.compute_v1 import gapic_version as package_version + +from google.api_core import client_options as client_options_lib +from google.api_core import exceptions as core_exceptions +from google.api_core import extended_operation +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport import mtls # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth.exceptions import MutualTLSChannelError # type: ignore +from google.oauth2 import service_account # type: ignore + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + +from google.api_core import extended_operation # type: ignore +from google.cloud.compute_v1.services.target_tcp_proxies import pagers +from google.cloud.compute_v1.types import compute +from .transports.base import TargetTcpProxiesTransport, DEFAULT_CLIENT_INFO +from .transports.rest import TargetTcpProxiesRestTransport + + +class TargetTcpProxiesClientMeta(type): + """Metaclass for the TargetTcpProxies client. + + This provides class-level methods for building and retrieving + support objects (e.g. transport) without polluting the client instance + objects. + """ + _transport_registry = OrderedDict() # type: Dict[str, Type[TargetTcpProxiesTransport]] + _transport_registry["rest"] = TargetTcpProxiesRestTransport + + def get_transport_class(cls, + label: Optional[str] = None, + ) -> Type[TargetTcpProxiesTransport]: + """Returns an appropriate transport class. + + Args: + label: The name of the desired transport. If none is + provided, then the first transport in the registry is used. + + Returns: + The transport class to use. + """ + # If a specific transport is requested, return that one. + if label: + return cls._transport_registry[label] + + # No transport is requested; return the default (that is, the first one + # in the dictionary). + return next(iter(cls._transport_registry.values())) + + +class TargetTcpProxiesClient(metaclass=TargetTcpProxiesClientMeta): + """The TargetTcpProxies API.""" + + @staticmethod + def _get_default_mtls_endpoint(api_endpoint): + """Converts api endpoint to mTLS endpoint. + + Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to + "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. + Args: + api_endpoint (Optional[str]): the api endpoint to convert. + Returns: + str: converted mTLS api endpoint. + """ + if not api_endpoint: + return api_endpoint + + mtls_endpoint_re = re.compile( + r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?" + ) + + m = mtls_endpoint_re.match(api_endpoint) + name, mtls, sandbox, googledomain = m.groups() + if mtls or not googledomain: + return api_endpoint + + if sandbox: + return api_endpoint.replace( + "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" + ) + + return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") + + DEFAULT_ENDPOINT = "compute.googleapis.com" + DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore + DEFAULT_ENDPOINT + ) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + TargetTcpProxiesClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_info(info) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + TargetTcpProxiesClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_file( + filename) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + from_service_account_json = from_service_account_file + + @property + def transport(self) -> TargetTcpProxiesTransport: + """Returns the transport used by the client instance. + + Returns: + TargetTcpProxiesTransport: The transport used by the client + instance. + """ + return self._transport + + @staticmethod + def common_billing_account_path(billing_account: str, ) -> str: + """Returns a fully-qualified billing_account string.""" + return "billingAccounts/{billing_account}".format(billing_account=billing_account, ) + + @staticmethod + def parse_common_billing_account_path(path: str) -> Dict[str,str]: + """Parse a billing_account path into its component segments.""" + m = re.match(r"^billingAccounts/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_folder_path(folder: str, ) -> str: + """Returns a fully-qualified folder string.""" + return "folders/{folder}".format(folder=folder, ) + + @staticmethod + def parse_common_folder_path(path: str) -> Dict[str,str]: + """Parse a folder path into its component segments.""" + m = re.match(r"^folders/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_organization_path(organization: str, ) -> str: + """Returns a fully-qualified organization string.""" + return "organizations/{organization}".format(organization=organization, ) + + @staticmethod + def parse_common_organization_path(path: str) -> Dict[str,str]: + """Parse a organization path into its component segments.""" + m = re.match(r"^organizations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_project_path(project: str, ) -> str: + """Returns a fully-qualified project string.""" + return "projects/{project}".format(project=project, ) + + @staticmethod + def parse_common_project_path(path: str) -> Dict[str,str]: + """Parse a project path into its component segments.""" + m = re.match(r"^projects/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_location_path(project: str, location: str, ) -> str: + """Returns a fully-qualified location string.""" + return "projects/{project}/locations/{location}".format(project=project, location=location, ) + + @staticmethod + def parse_common_location_path(path: str) -> Dict[str,str]: + """Parse a location path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @classmethod + def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[client_options_lib.ClientOptions] = None): + """Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + if client_options is None: + client_options = client_options_lib.ClientOptions() + use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") + use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") + if use_client_cert not in ("true", "false"): + raise ValueError("Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`") + if use_mtls_endpoint not in ("auto", "never", "always"): + raise MutualTLSChannelError("Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`") + + # Figure out the client cert source to use. + client_cert_source = None + if use_client_cert == "true": + if client_options.client_cert_source: + client_cert_source = client_options.client_cert_source + elif mtls.has_default_client_cert_source(): + client_cert_source = mtls.default_client_cert_source() + + # Figure out which api endpoint to use. + if client_options.api_endpoint is not None: + api_endpoint = client_options.api_endpoint + elif use_mtls_endpoint == "always" or (use_mtls_endpoint == "auto" and client_cert_source): + api_endpoint = cls.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = cls.DEFAULT_ENDPOINT + + return api_endpoint, client_cert_source + + def __init__(self, *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Optional[Union[str, TargetTcpProxiesTransport]] = None, + client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the target tcp proxies client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Union[str, TargetTcpProxiesTransport]): The + transport to use. If set to None, a transport is chosen + automatically. + NOTE: "rest" transport functionality is currently in a + beta state (preview). We welcome your feedback via an + issue in this library's source repository. + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): Custom options for the + client. It won't take effect if a ``transport`` instance is provided. + (1) The ``api_endpoint`` property can be used to override the + default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT + environment variable can also be used to override the endpoint: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto switch to the + default mTLS endpoint if client certificate is present, this is + the default value). However, the ``api_endpoint`` property takes + precedence if provided. + (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide client certificate for mutual TLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + """ + if isinstance(client_options, dict): + client_options = client_options_lib.from_dict(client_options) + if client_options is None: + client_options = client_options_lib.ClientOptions() + client_options = cast(client_options_lib.ClientOptions, client_options) + + api_endpoint, client_cert_source_func = self.get_mtls_endpoint_and_cert_source(client_options) + + api_key_value = getattr(client_options, "api_key", None) + if api_key_value and credentials: + raise ValueError("client_options.api_key and credentials are mutually exclusive") + + # Save or instantiate the transport. + # Ordinarily, we provide the transport, but allowing a custom transport + # instance provides an extensibility point for unusual situations. + if isinstance(transport, TargetTcpProxiesTransport): + # transport is a TargetTcpProxiesTransport instance. + if credentials or client_options.credentials_file or api_key_value: + raise ValueError("When providing a transport instance, " + "provide its credentials directly.") + if client_options.scopes: + raise ValueError( + "When providing a transport instance, provide its scopes " + "directly." + ) + self._transport = transport + else: + import google.auth._default # type: ignore + + if api_key_value and hasattr(google.auth._default, "get_api_key_credentials"): + credentials = google.auth._default.get_api_key_credentials(api_key_value) + + Transport = type(self).get_transport_class(transport) + self._transport = Transport( + credentials=credentials, + credentials_file=client_options.credentials_file, + host=api_endpoint, + scopes=client_options.scopes, + client_cert_source_for_mtls=client_cert_source_func, + quota_project_id=client_options.quota_project_id, + client_info=client_info, + always_use_jwt_access=True, + api_audience=client_options.api_audience, + ) + + def aggregated_list(self, + request: Optional[Union[compute.AggregatedListTargetTcpProxiesRequest, dict]] = None, + *, + project: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.AggregatedListPager: + r"""Retrieves the list of all TargetTcpProxy resources, + regional and global, available to the specified project. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_aggregated_list(): + # Create a client + client = compute_v1.TargetTcpProxiesClient() + + # Initialize request argument(s) + request = compute_v1.AggregatedListTargetTcpProxiesRequest( + project="project_value", + ) + + # Make the request + page_result = client.aggregated_list(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.AggregatedListTargetTcpProxiesRequest, dict]): + The request object. A request message for + TargetTcpProxies.AggregatedList. See the + method description for details. + project (str): + Name of the project scoping this + request. + + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.compute_v1.services.target_tcp_proxies.pagers.AggregatedListPager: + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.AggregatedListTargetTcpProxiesRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.AggregatedListTargetTcpProxiesRequest): + request = compute.AggregatedListTargetTcpProxiesRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.aggregated_list] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.AggregatedListPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + def delete_unary(self, + request: Optional[Union[compute.DeleteTargetTcpProxyRequest, dict]] = None, + *, + project: Optional[str] = None, + target_tcp_proxy: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.Operation: + r"""Deletes the specified TargetTcpProxy resource. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_delete(): + # Create a client + client = compute_v1.TargetTcpProxiesClient() + + # Initialize request argument(s) + request = compute_v1.DeleteTargetTcpProxyRequest( + project="project_value", + target_tcp_proxy="target_tcp_proxy_value", + ) + + # Make the request + response = client.delete(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.DeleteTargetTcpProxyRequest, dict]): + The request object. A request message for + TargetTcpProxies.Delete. See the method + description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + target_tcp_proxy (str): + Name of the TargetTcpProxy resource + to delete. + + This corresponds to the ``target_tcp_proxy`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, target_tcp_proxy]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.DeleteTargetTcpProxyRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.DeleteTargetTcpProxyRequest): + request = compute.DeleteTargetTcpProxyRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if target_tcp_proxy is not None: + request.target_tcp_proxy = target_tcp_proxy + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("target_tcp_proxy", request.target_tcp_proxy), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def delete(self, + request: Optional[Union[compute.DeleteTargetTcpProxyRequest, dict]] = None, + *, + project: Optional[str] = None, + target_tcp_proxy: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> extended_operation.ExtendedOperation: + r"""Deletes the specified TargetTcpProxy resource. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_delete(): + # Create a client + client = compute_v1.TargetTcpProxiesClient() + + # Initialize request argument(s) + request = compute_v1.DeleteTargetTcpProxyRequest( + project="project_value", + target_tcp_proxy="target_tcp_proxy_value", + ) + + # Make the request + response = client.delete(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.DeleteTargetTcpProxyRequest, dict]): + The request object. A request message for + TargetTcpProxies.Delete. See the method + description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + target_tcp_proxy (str): + Name of the TargetTcpProxy resource + to delete. + + This corresponds to the ``target_tcp_proxy`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, target_tcp_proxy]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.DeleteTargetTcpProxyRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.DeleteTargetTcpProxyRequest): + request = compute.DeleteTargetTcpProxyRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if target_tcp_proxy is not None: + request.target_tcp_proxy = target_tcp_proxy + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("target_tcp_proxy", request.target_tcp_proxy), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + operation_service = self._transport._global_operations_client + operation_request = compute.GetGlobalOperationRequest() + operation_request.project = request.project + operation_request.operation = response.name + + get_operation = functools.partial(operation_service.get, operation_request) + # Cancel is not part of extended operations yet. + cancel_operation = lambda: None + + # Note: this class is an implementation detail to provide a uniform + # set of names for certain fields in the extended operation proto message. + # See google.api_core.extended_operation.ExtendedOperation for details + # on these properties and the expected interface. + class _CustomOperation(extended_operation.ExtendedOperation): + @property + def error_message(self): + return self._extended_operation.http_error_message + + @property + def error_code(self): + return self._extended_operation.http_error_status_code + + response = _CustomOperation.make(get_operation, cancel_operation, response) + + # Done; return the response. + return response + + def get(self, + request: Optional[Union[compute.GetTargetTcpProxyRequest, dict]] = None, + *, + project: Optional[str] = None, + target_tcp_proxy: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.TargetTcpProxy: + r"""Returns the specified TargetTcpProxy resource. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_get(): + # Create a client + client = compute_v1.TargetTcpProxiesClient() + + # Initialize request argument(s) + request = compute_v1.GetTargetTcpProxyRequest( + project="project_value", + target_tcp_proxy="target_tcp_proxy_value", + ) + + # Make the request + response = client.get(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.GetTargetTcpProxyRequest, dict]): + The request object. A request message for + TargetTcpProxies.Get. See the method + description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + target_tcp_proxy (str): + Name of the TargetTcpProxy resource + to return. + + This corresponds to the ``target_tcp_proxy`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.compute_v1.types.TargetTcpProxy: + Represents a Target TCP Proxy + resource. A target TCP proxy is a + component of a TCP Proxy load balancer. + Global forwarding rules reference target + TCP proxy, and the target proxy then + references an external backend service. + For more information, read TCP Proxy + Load Balancing overview. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, target_tcp_proxy]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.GetTargetTcpProxyRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.GetTargetTcpProxyRequest): + request = compute.GetTargetTcpProxyRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if target_tcp_proxy is not None: + request.target_tcp_proxy = target_tcp_proxy + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("target_tcp_proxy", request.target_tcp_proxy), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def insert_unary(self, + request: Optional[Union[compute.InsertTargetTcpProxyRequest, dict]] = None, + *, + project: Optional[str] = None, + target_tcp_proxy_resource: Optional[compute.TargetTcpProxy] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.Operation: + r"""Creates a TargetTcpProxy resource in the specified + project using the data included in the request. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_insert(): + # Create a client + client = compute_v1.TargetTcpProxiesClient() + + # Initialize request argument(s) + request = compute_v1.InsertTargetTcpProxyRequest( + project="project_value", + ) + + # Make the request + response = client.insert(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.InsertTargetTcpProxyRequest, dict]): + The request object. A request message for + TargetTcpProxies.Insert. See the method + description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + target_tcp_proxy_resource (google.cloud.compute_v1.types.TargetTcpProxy): + The body resource for this request + This corresponds to the ``target_tcp_proxy_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, target_tcp_proxy_resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.InsertTargetTcpProxyRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.InsertTargetTcpProxyRequest): + request = compute.InsertTargetTcpProxyRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if target_tcp_proxy_resource is not None: + request.target_tcp_proxy_resource = target_tcp_proxy_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.insert] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def insert(self, + request: Optional[Union[compute.InsertTargetTcpProxyRequest, dict]] = None, + *, + project: Optional[str] = None, + target_tcp_proxy_resource: Optional[compute.TargetTcpProxy] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> extended_operation.ExtendedOperation: + r"""Creates a TargetTcpProxy resource in the specified + project using the data included in the request. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_insert(): + # Create a client + client = compute_v1.TargetTcpProxiesClient() + + # Initialize request argument(s) + request = compute_v1.InsertTargetTcpProxyRequest( + project="project_value", + ) + + # Make the request + response = client.insert(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.InsertTargetTcpProxyRequest, dict]): + The request object. A request message for + TargetTcpProxies.Insert. See the method + description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + target_tcp_proxy_resource (google.cloud.compute_v1.types.TargetTcpProxy): + The body resource for this request + This corresponds to the ``target_tcp_proxy_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, target_tcp_proxy_resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.InsertTargetTcpProxyRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.InsertTargetTcpProxyRequest): + request = compute.InsertTargetTcpProxyRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if target_tcp_proxy_resource is not None: + request.target_tcp_proxy_resource = target_tcp_proxy_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.insert] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + operation_service = self._transport._global_operations_client + operation_request = compute.GetGlobalOperationRequest() + operation_request.project = request.project + operation_request.operation = response.name + + get_operation = functools.partial(operation_service.get, operation_request) + # Cancel is not part of extended operations yet. + cancel_operation = lambda: None + + # Note: this class is an implementation detail to provide a uniform + # set of names for certain fields in the extended operation proto message. + # See google.api_core.extended_operation.ExtendedOperation for details + # on these properties and the expected interface. + class _CustomOperation(extended_operation.ExtendedOperation): + @property + def error_message(self): + return self._extended_operation.http_error_message + + @property + def error_code(self): + return self._extended_operation.http_error_status_code + + response = _CustomOperation.make(get_operation, cancel_operation, response) + + # Done; return the response. + return response + + def list(self, + request: Optional[Union[compute.ListTargetTcpProxiesRequest, dict]] = None, + *, + project: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListPager: + r"""Retrieves the list of TargetTcpProxy resources + available to the specified project. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_list(): + # Create a client + client = compute_v1.TargetTcpProxiesClient() + + # Initialize request argument(s) + request = compute_v1.ListTargetTcpProxiesRequest( + project="project_value", + ) + + # Make the request + page_result = client.list(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.ListTargetTcpProxiesRequest, dict]): + The request object. A request message for + TargetTcpProxies.List. See the method + description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.compute_v1.services.target_tcp_proxies.pagers.ListPager: + Contains a list of TargetTcpProxy + resources. + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.ListTargetTcpProxiesRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.ListTargetTcpProxiesRequest): + request = compute.ListTargetTcpProxiesRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + def set_backend_service_unary(self, + request: Optional[Union[compute.SetBackendServiceTargetTcpProxyRequest, dict]] = None, + *, + project: Optional[str] = None, + target_tcp_proxy: Optional[str] = None, + target_tcp_proxies_set_backend_service_request_resource: Optional[compute.TargetTcpProxiesSetBackendServiceRequest] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.Operation: + r"""Changes the BackendService for TargetTcpProxy. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_set_backend_service(): + # Create a client + client = compute_v1.TargetTcpProxiesClient() + + # Initialize request argument(s) + request = compute_v1.SetBackendServiceTargetTcpProxyRequest( + project="project_value", + target_tcp_proxy="target_tcp_proxy_value", + ) + + # Make the request + response = client.set_backend_service(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.SetBackendServiceTargetTcpProxyRequest, dict]): + The request object. A request message for + TargetTcpProxies.SetBackendService. See + the method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + target_tcp_proxy (str): + Name of the TargetTcpProxy resource + whose BackendService resource is to be + set. + + This corresponds to the ``target_tcp_proxy`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + target_tcp_proxies_set_backend_service_request_resource (google.cloud.compute_v1.types.TargetTcpProxiesSetBackendServiceRequest): + The body resource for this request + This corresponds to the ``target_tcp_proxies_set_backend_service_request_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, target_tcp_proxy, target_tcp_proxies_set_backend_service_request_resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.SetBackendServiceTargetTcpProxyRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.SetBackendServiceTargetTcpProxyRequest): + request = compute.SetBackendServiceTargetTcpProxyRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if target_tcp_proxy is not None: + request.target_tcp_proxy = target_tcp_proxy + if target_tcp_proxies_set_backend_service_request_resource is not None: + request.target_tcp_proxies_set_backend_service_request_resource = target_tcp_proxies_set_backend_service_request_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.set_backend_service] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("target_tcp_proxy", request.target_tcp_proxy), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def set_backend_service(self, + request: Optional[Union[compute.SetBackendServiceTargetTcpProxyRequest, dict]] = None, + *, + project: Optional[str] = None, + target_tcp_proxy: Optional[str] = None, + target_tcp_proxies_set_backend_service_request_resource: Optional[compute.TargetTcpProxiesSetBackendServiceRequest] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> extended_operation.ExtendedOperation: + r"""Changes the BackendService for TargetTcpProxy. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_set_backend_service(): + # Create a client + client = compute_v1.TargetTcpProxiesClient() + + # Initialize request argument(s) + request = compute_v1.SetBackendServiceTargetTcpProxyRequest( + project="project_value", + target_tcp_proxy="target_tcp_proxy_value", + ) + + # Make the request + response = client.set_backend_service(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.SetBackendServiceTargetTcpProxyRequest, dict]): + The request object. A request message for + TargetTcpProxies.SetBackendService. See + the method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + target_tcp_proxy (str): + Name of the TargetTcpProxy resource + whose BackendService resource is to be + set. + + This corresponds to the ``target_tcp_proxy`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + target_tcp_proxies_set_backend_service_request_resource (google.cloud.compute_v1.types.TargetTcpProxiesSetBackendServiceRequest): + The body resource for this request + This corresponds to the ``target_tcp_proxies_set_backend_service_request_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, target_tcp_proxy, target_tcp_proxies_set_backend_service_request_resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.SetBackendServiceTargetTcpProxyRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.SetBackendServiceTargetTcpProxyRequest): + request = compute.SetBackendServiceTargetTcpProxyRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if target_tcp_proxy is not None: + request.target_tcp_proxy = target_tcp_proxy + if target_tcp_proxies_set_backend_service_request_resource is not None: + request.target_tcp_proxies_set_backend_service_request_resource = target_tcp_proxies_set_backend_service_request_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.set_backend_service] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("target_tcp_proxy", request.target_tcp_proxy), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + operation_service = self._transport._global_operations_client + operation_request = compute.GetGlobalOperationRequest() + operation_request.project = request.project + operation_request.operation = response.name + + get_operation = functools.partial(operation_service.get, operation_request) + # Cancel is not part of extended operations yet. + cancel_operation = lambda: None + + # Note: this class is an implementation detail to provide a uniform + # set of names for certain fields in the extended operation proto message. + # See google.api_core.extended_operation.ExtendedOperation for details + # on these properties and the expected interface. + class _CustomOperation(extended_operation.ExtendedOperation): + @property + def error_message(self): + return self._extended_operation.http_error_message + + @property + def error_code(self): + return self._extended_operation.http_error_status_code + + response = _CustomOperation.make(get_operation, cancel_operation, response) + + # Done; return the response. + return response + + def set_proxy_header_unary(self, + request: Optional[Union[compute.SetProxyHeaderTargetTcpProxyRequest, dict]] = None, + *, + project: Optional[str] = None, + target_tcp_proxy: Optional[str] = None, + target_tcp_proxies_set_proxy_header_request_resource: Optional[compute.TargetTcpProxiesSetProxyHeaderRequest] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.Operation: + r"""Changes the ProxyHeaderType for TargetTcpProxy. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_set_proxy_header(): + # Create a client + client = compute_v1.TargetTcpProxiesClient() + + # Initialize request argument(s) + request = compute_v1.SetProxyHeaderTargetTcpProxyRequest( + project="project_value", + target_tcp_proxy="target_tcp_proxy_value", + ) + + # Make the request + response = client.set_proxy_header(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.SetProxyHeaderTargetTcpProxyRequest, dict]): + The request object. A request message for + TargetTcpProxies.SetProxyHeader. See the + method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + target_tcp_proxy (str): + Name of the TargetTcpProxy resource + whose ProxyHeader is to be set. + + This corresponds to the ``target_tcp_proxy`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + target_tcp_proxies_set_proxy_header_request_resource (google.cloud.compute_v1.types.TargetTcpProxiesSetProxyHeaderRequest): + The body resource for this request + This corresponds to the ``target_tcp_proxies_set_proxy_header_request_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, target_tcp_proxy, target_tcp_proxies_set_proxy_header_request_resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.SetProxyHeaderTargetTcpProxyRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.SetProxyHeaderTargetTcpProxyRequest): + request = compute.SetProxyHeaderTargetTcpProxyRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if target_tcp_proxy is not None: + request.target_tcp_proxy = target_tcp_proxy + if target_tcp_proxies_set_proxy_header_request_resource is not None: + request.target_tcp_proxies_set_proxy_header_request_resource = target_tcp_proxies_set_proxy_header_request_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.set_proxy_header] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("target_tcp_proxy", request.target_tcp_proxy), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def set_proxy_header(self, + request: Optional[Union[compute.SetProxyHeaderTargetTcpProxyRequest, dict]] = None, + *, + project: Optional[str] = None, + target_tcp_proxy: Optional[str] = None, + target_tcp_proxies_set_proxy_header_request_resource: Optional[compute.TargetTcpProxiesSetProxyHeaderRequest] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> extended_operation.ExtendedOperation: + r"""Changes the ProxyHeaderType for TargetTcpProxy. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_set_proxy_header(): + # Create a client + client = compute_v1.TargetTcpProxiesClient() + + # Initialize request argument(s) + request = compute_v1.SetProxyHeaderTargetTcpProxyRequest( + project="project_value", + target_tcp_proxy="target_tcp_proxy_value", + ) + + # Make the request + response = client.set_proxy_header(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.SetProxyHeaderTargetTcpProxyRequest, dict]): + The request object. A request message for + TargetTcpProxies.SetProxyHeader. See the + method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + target_tcp_proxy (str): + Name of the TargetTcpProxy resource + whose ProxyHeader is to be set. + + This corresponds to the ``target_tcp_proxy`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + target_tcp_proxies_set_proxy_header_request_resource (google.cloud.compute_v1.types.TargetTcpProxiesSetProxyHeaderRequest): + The body resource for this request + This corresponds to the ``target_tcp_proxies_set_proxy_header_request_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, target_tcp_proxy, target_tcp_proxies_set_proxy_header_request_resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.SetProxyHeaderTargetTcpProxyRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.SetProxyHeaderTargetTcpProxyRequest): + request = compute.SetProxyHeaderTargetTcpProxyRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if target_tcp_proxy is not None: + request.target_tcp_proxy = target_tcp_proxy + if target_tcp_proxies_set_proxy_header_request_resource is not None: + request.target_tcp_proxies_set_proxy_header_request_resource = target_tcp_proxies_set_proxy_header_request_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.set_proxy_header] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("target_tcp_proxy", request.target_tcp_proxy), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + operation_service = self._transport._global_operations_client + operation_request = compute.GetGlobalOperationRequest() + operation_request.project = request.project + operation_request.operation = response.name + + get_operation = functools.partial(operation_service.get, operation_request) + # Cancel is not part of extended operations yet. + cancel_operation = lambda: None + + # Note: this class is an implementation detail to provide a uniform + # set of names for certain fields in the extended operation proto message. + # See google.api_core.extended_operation.ExtendedOperation for details + # on these properties and the expected interface. + class _CustomOperation(extended_operation.ExtendedOperation): + @property + def error_message(self): + return self._extended_operation.http_error_message + + @property + def error_code(self): + return self._extended_operation.http_error_status_code + + response = _CustomOperation.make(get_operation, cancel_operation, response) + + # Done; return the response. + return response + + def __enter__(self) -> "TargetTcpProxiesClient": + return self + + def __exit__(self, type, value, traceback): + """Releases underlying transport's resources. + + .. warning:: + ONLY use as a context manager if the transport is NOT shared + with other clients! Exiting the with block will CLOSE the transport + and may cause errors in other clients! + """ + self.transport.close() + + + + + + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) + + +__all__ = ( + "TargetTcpProxiesClient", +) diff --git a/owl-bot-staging/v1/google/cloud/compute_v1/services/target_tcp_proxies/pagers.py b/owl-bot-staging/v1/google/cloud/compute_v1/services/target_tcp_proxies/pagers.py new file mode 100644 index 000000000..7802d09c3 --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/compute_v1/services/target_tcp_proxies/pagers.py @@ -0,0 +1,139 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import Any, AsyncIterator, Awaitable, Callable, Sequence, Tuple, Optional, Iterator + +from google.cloud.compute_v1.types import compute + + +class AggregatedListPager: + """A pager for iterating through ``aggregated_list`` requests. + + This class thinly wraps an initial + :class:`google.cloud.compute_v1.types.TargetTcpProxyAggregatedList` object, and + provides an ``__iter__`` method to iterate through its + ``items`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``AggregatedList`` requests and continue to iterate + through the ``items`` field on the + corresponding responses. + + All the usual :class:`google.cloud.compute_v1.types.TargetTcpProxyAggregatedList` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., compute.TargetTcpProxyAggregatedList], + request: compute.AggregatedListTargetTcpProxiesRequest, + response: compute.TargetTcpProxyAggregatedList, + *, + metadata: Sequence[Tuple[str, str]] = ()): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.compute_v1.types.AggregatedListTargetTcpProxiesRequest): + The initial request object. + response (google.cloud.compute_v1.types.TargetTcpProxyAggregatedList): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = compute.AggregatedListTargetTcpProxiesRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[compute.TargetTcpProxyAggregatedList]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[Tuple[str, compute.TargetTcpProxiesScopedList]]: + for page in self.pages: + yield from page.items.items() + + def get(self, key: str) -> Optional[compute.TargetTcpProxiesScopedList]: + return self._response.items.get(key) + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + + +class ListPager: + """A pager for iterating through ``list`` requests. + + This class thinly wraps an initial + :class:`google.cloud.compute_v1.types.TargetTcpProxyList` object, and + provides an ``__iter__`` method to iterate through its + ``items`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``List`` requests and continue to iterate + through the ``items`` field on the + corresponding responses. + + All the usual :class:`google.cloud.compute_v1.types.TargetTcpProxyList` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., compute.TargetTcpProxyList], + request: compute.ListTargetTcpProxiesRequest, + response: compute.TargetTcpProxyList, + *, + metadata: Sequence[Tuple[str, str]] = ()): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.compute_v1.types.ListTargetTcpProxiesRequest): + The initial request object. + response (google.cloud.compute_v1.types.TargetTcpProxyList): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = compute.ListTargetTcpProxiesRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[compute.TargetTcpProxyList]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[compute.TargetTcpProxy]: + for page in self.pages: + yield from page.items + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) diff --git a/owl-bot-staging/v1/google/cloud/compute_v1/services/target_tcp_proxies/transports/__init__.py b/owl-bot-staging/v1/google/cloud/compute_v1/services/target_tcp_proxies/transports/__init__.py new file mode 100644 index 000000000..5cfbb2572 --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/compute_v1/services/target_tcp_proxies/transports/__init__.py @@ -0,0 +1,32 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +from typing import Dict, Type + +from .base import TargetTcpProxiesTransport +from .rest import TargetTcpProxiesRestTransport +from .rest import TargetTcpProxiesRestInterceptor + + +# Compile a registry of transports. +_transport_registry = OrderedDict() # type: Dict[str, Type[TargetTcpProxiesTransport]] +_transport_registry['rest'] = TargetTcpProxiesRestTransport + +__all__ = ( + 'TargetTcpProxiesTransport', + 'TargetTcpProxiesRestTransport', + 'TargetTcpProxiesRestInterceptor', +) diff --git a/owl-bot-staging/v1/google/cloud/compute_v1/services/target_tcp_proxies/transports/base.py b/owl-bot-staging/v1/google/cloud/compute_v1/services/target_tcp_proxies/transports/base.py new file mode 100644 index 000000000..241b56996 --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/compute_v1/services/target_tcp_proxies/transports/base.py @@ -0,0 +1,247 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import abc +from typing import Awaitable, Callable, Dict, Optional, Sequence, Union + +from google.cloud.compute_v1 import gapic_version as package_version + +import google.auth # type: ignore +import google.api_core +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.compute_v1.types import compute +from google.cloud.compute_v1.services import global_operations + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) + + +class TargetTcpProxiesTransport(abc.ABC): + """Abstract transport class for TargetTcpProxies.""" + + AUTH_SCOPES = ( + 'https://www.googleapis.com/auth/compute', + 'https://www.googleapis.com/auth/cloud-platform', + ) + + DEFAULT_HOST: str = 'compute.googleapis.com' + def __init__( + self, *, + host: str = DEFAULT_HOST, + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + **kwargs, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A list of scopes. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + """ + self._extended_operations_services: Dict[str, Any] = {} + + scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} + + # Save the scopes. + self._scopes = scopes + + # If no credentials are provided, then determine the appropriate + # defaults. + if credentials and credentials_file: + raise core_exceptions.DuplicateCredentialArgs("'credentials_file' and 'credentials' are mutually exclusive") + + if credentials_file is not None: + credentials, _ = google.auth.load_credentials_from_file( + credentials_file, + **scopes_kwargs, + quota_project_id=quota_project_id + ) + elif credentials is None: + credentials, _ = google.auth.default(**scopes_kwargs, quota_project_id=quota_project_id) + # Don't apply audience if the credentials file passed from user. + if hasattr(credentials, "with_gdch_audience"): + credentials = credentials.with_gdch_audience(api_audience if api_audience else host) + + # If the credentials are service account credentials, then always try to use self signed JWT. + if always_use_jwt_access and isinstance(credentials, service_account.Credentials) and hasattr(service_account.Credentials, "with_always_use_jwt_access"): + credentials = credentials.with_always_use_jwt_access(True) + + # Save the credentials. + self._credentials = credentials + + # Save the hostname. Default to port 443 (HTTPS) if none is specified. + if ':' not in host: + host += ':443' + self._host = host + + def _prep_wrapped_messages(self, client_info): + # Precompute the wrapped methods. + self._wrapped_methods = { + self.aggregated_list: gapic_v1.method.wrap_method( + self.aggregated_list, + default_timeout=None, + client_info=client_info, + ), + self.delete: gapic_v1.method.wrap_method( + self.delete, + default_timeout=None, + client_info=client_info, + ), + self.get: gapic_v1.method.wrap_method( + self.get, + default_timeout=None, + client_info=client_info, + ), + self.insert: gapic_v1.method.wrap_method( + self.insert, + default_timeout=None, + client_info=client_info, + ), + self.list: gapic_v1.method.wrap_method( + self.list, + default_timeout=None, + client_info=client_info, + ), + self.set_backend_service: gapic_v1.method.wrap_method( + self.set_backend_service, + default_timeout=None, + client_info=client_info, + ), + self.set_proxy_header: gapic_v1.method.wrap_method( + self.set_proxy_header, + default_timeout=None, + client_info=client_info, + ), + } + + def close(self): + """Closes resources associated with the transport. + + .. warning:: + Only call this method if the transport is NOT shared + with other clients - this may cause errors in other clients! + """ + raise NotImplementedError() + + @property + def aggregated_list(self) -> Callable[ + [compute.AggregatedListTargetTcpProxiesRequest], + Union[ + compute.TargetTcpProxyAggregatedList, + Awaitable[compute.TargetTcpProxyAggregatedList] + ]]: + raise NotImplementedError() + + @property + def delete(self) -> Callable[ + [compute.DeleteTargetTcpProxyRequest], + Union[ + compute.Operation, + Awaitable[compute.Operation] + ]]: + raise NotImplementedError() + + @property + def get(self) -> Callable[ + [compute.GetTargetTcpProxyRequest], + Union[ + compute.TargetTcpProxy, + Awaitable[compute.TargetTcpProxy] + ]]: + raise NotImplementedError() + + @property + def insert(self) -> Callable[ + [compute.InsertTargetTcpProxyRequest], + Union[ + compute.Operation, + Awaitable[compute.Operation] + ]]: + raise NotImplementedError() + + @property + def list(self) -> Callable[ + [compute.ListTargetTcpProxiesRequest], + Union[ + compute.TargetTcpProxyList, + Awaitable[compute.TargetTcpProxyList] + ]]: + raise NotImplementedError() + + @property + def set_backend_service(self) -> Callable[ + [compute.SetBackendServiceTargetTcpProxyRequest], + Union[ + compute.Operation, + Awaitable[compute.Operation] + ]]: + raise NotImplementedError() + + @property + def set_proxy_header(self) -> Callable[ + [compute.SetProxyHeaderTargetTcpProxyRequest], + Union[ + compute.Operation, + Awaitable[compute.Operation] + ]]: + raise NotImplementedError() + + @property + def kind(self) -> str: + raise NotImplementedError() + + @property + def _global_operations_client(self) -> global_operations.GlobalOperationsClient: + ex_op_service = self._extended_operations_services.get("global_operations") + if not ex_op_service: + ex_op_service = global_operations.GlobalOperationsClient( + credentials=self._credentials, + transport=self.kind, + ) + self._extended_operations_services["global_operations"] = ex_op_service + + return ex_op_service + + +__all__ = ( + 'TargetTcpProxiesTransport', +) diff --git a/owl-bot-staging/v1/google/cloud/compute_v1/services/target_tcp_proxies/transports/rest.py b/owl-bot-staging/v1/google/cloud/compute_v1/services/target_tcp_proxies/transports/rest.py new file mode 100644 index 000000000..4d860a542 --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/compute_v1/services/target_tcp_proxies/transports/rest.py @@ -0,0 +1,1052 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +from google.auth.transport.requests import AuthorizedSession # type: ignore +import json # type: ignore +import grpc # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.api_core import exceptions as core_exceptions +from google.api_core import retry as retries +from google.api_core import rest_helpers +from google.api_core import rest_streaming +from google.api_core import path_template +from google.api_core import gapic_v1 + +from google.protobuf import json_format +from requests import __version__ as requests_version +import dataclasses +import re +from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union +import warnings + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + + +from google.cloud.compute_v1.types import compute + +from .base import TargetTcpProxiesTransport, DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, + grpc_version=None, + rest_version=requests_version, +) + + +class TargetTcpProxiesRestInterceptor: + """Interceptor for TargetTcpProxies. + + Interceptors are used to manipulate requests, request metadata, and responses + in arbitrary ways. + Example use cases include: + * Logging + * Verifying requests according to service or custom semantics + * Stripping extraneous information from responses + + These use cases and more can be enabled by injecting an + instance of a custom subclass when constructing the TargetTcpProxiesRestTransport. + + .. code-block:: python + class MyCustomTargetTcpProxiesInterceptor(TargetTcpProxiesRestInterceptor): + def pre_aggregated_list(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_aggregated_list(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_delete(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_delete(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_get(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_insert(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_insert(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_set_backend_service(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_set_backend_service(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_set_proxy_header(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_set_proxy_header(self, response): + logging.log(f"Received response: {response}") + return response + + transport = TargetTcpProxiesRestTransport(interceptor=MyCustomTargetTcpProxiesInterceptor()) + client = TargetTcpProxiesClient(transport=transport) + + + """ + def pre_aggregated_list(self, request: compute.AggregatedListTargetTcpProxiesRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.AggregatedListTargetTcpProxiesRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for aggregated_list + + Override in a subclass to manipulate the request or metadata + before they are sent to the TargetTcpProxies server. + """ + return request, metadata + + def post_aggregated_list(self, response: compute.TargetTcpProxyAggregatedList) -> compute.TargetTcpProxyAggregatedList: + """Post-rpc interceptor for aggregated_list + + Override in a subclass to manipulate the response + after it is returned by the TargetTcpProxies server but before + it is returned to user code. + """ + return response + def pre_delete(self, request: compute.DeleteTargetTcpProxyRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.DeleteTargetTcpProxyRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for delete + + Override in a subclass to manipulate the request or metadata + before they are sent to the TargetTcpProxies server. + """ + return request, metadata + + def post_delete(self, response: compute.Operation) -> compute.Operation: + """Post-rpc interceptor for delete + + Override in a subclass to manipulate the response + after it is returned by the TargetTcpProxies server but before + it is returned to user code. + """ + return response + def pre_get(self, request: compute.GetTargetTcpProxyRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.GetTargetTcpProxyRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get + + Override in a subclass to manipulate the request or metadata + before they are sent to the TargetTcpProxies server. + """ + return request, metadata + + def post_get(self, response: compute.TargetTcpProxy) -> compute.TargetTcpProxy: + """Post-rpc interceptor for get + + Override in a subclass to manipulate the response + after it is returned by the TargetTcpProxies server but before + it is returned to user code. + """ + return response + def pre_insert(self, request: compute.InsertTargetTcpProxyRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.InsertTargetTcpProxyRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for insert + + Override in a subclass to manipulate the request or metadata + before they are sent to the TargetTcpProxies server. + """ + return request, metadata + + def post_insert(self, response: compute.Operation) -> compute.Operation: + """Post-rpc interceptor for insert + + Override in a subclass to manipulate the response + after it is returned by the TargetTcpProxies server but before + it is returned to user code. + """ + return response + def pre_list(self, request: compute.ListTargetTcpProxiesRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.ListTargetTcpProxiesRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list + + Override in a subclass to manipulate the request or metadata + before they are sent to the TargetTcpProxies server. + """ + return request, metadata + + def post_list(self, response: compute.TargetTcpProxyList) -> compute.TargetTcpProxyList: + """Post-rpc interceptor for list + + Override in a subclass to manipulate the response + after it is returned by the TargetTcpProxies server but before + it is returned to user code. + """ + return response + def pre_set_backend_service(self, request: compute.SetBackendServiceTargetTcpProxyRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.SetBackendServiceTargetTcpProxyRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for set_backend_service + + Override in a subclass to manipulate the request or metadata + before they are sent to the TargetTcpProxies server. + """ + return request, metadata + + def post_set_backend_service(self, response: compute.Operation) -> compute.Operation: + """Post-rpc interceptor for set_backend_service + + Override in a subclass to manipulate the response + after it is returned by the TargetTcpProxies server but before + it is returned to user code. + """ + return response + def pre_set_proxy_header(self, request: compute.SetProxyHeaderTargetTcpProxyRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.SetProxyHeaderTargetTcpProxyRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for set_proxy_header + + Override in a subclass to manipulate the request or metadata + before they are sent to the TargetTcpProxies server. + """ + return request, metadata + + def post_set_proxy_header(self, response: compute.Operation) -> compute.Operation: + """Post-rpc interceptor for set_proxy_header + + Override in a subclass to manipulate the response + after it is returned by the TargetTcpProxies server but before + it is returned to user code. + """ + return response + + +@dataclasses.dataclass +class TargetTcpProxiesRestStub: + _session: AuthorizedSession + _host: str + _interceptor: TargetTcpProxiesRestInterceptor + + +class TargetTcpProxiesRestTransport(TargetTcpProxiesTransport): + """REST backend transport for TargetTcpProxies. + + The TargetTcpProxies API. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends JSON representations of protocol buffers over HTTP/1.1 + + NOTE: This REST transport functionality is currently in a beta + state (preview). We welcome your feedback via an issue in this + library's source repository. Thank you! + """ + + def __init__(self, *, + host: str = 'compute.googleapis.com', + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + client_cert_source_for_mtls: Optional[Callable[[ + ], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + url_scheme: str = 'https', + interceptor: Optional[TargetTcpProxiesRestInterceptor] = None, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + NOTE: This REST transport functionality is currently in a beta + state (preview). We welcome your feedback via a GitHub issue in + this library's repository. Thank you! + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + client_cert_source_for_mtls (Callable[[], Tuple[bytes, bytes]]): Client + certificate to configure mutual TLS HTTP channel. It is ignored + if ``channel`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you are developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + url_scheme: the protocol scheme for the API endpoint. Normally + "https", but for testing or local servers, + "http" can be specified. + """ + # Run the base constructor + # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. + # TODO: When custom host (api_endpoint) is set, `scopes` must *also* be set on the + # credentials object + maybe_url_match = re.match("^(?Phttp(?:s)?://)?(?P.*)$", host) + if maybe_url_match is None: + raise ValueError(f"Unexpected hostname structure: {host}") # pragma: NO COVER + + url_match_items = maybe_url_match.groupdict() + + host = f"{url_scheme}://{host}" if not url_match_items["scheme"] else host + + super().__init__( + host=host, + credentials=credentials, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience + ) + self._session = AuthorizedSession( + self._credentials, default_host=self.DEFAULT_HOST) + if client_cert_source_for_mtls: + self._session.configure_mtls_channel(client_cert_source_for_mtls) + self._interceptor = interceptor or TargetTcpProxiesRestInterceptor() + self._prep_wrapped_messages(client_info) + + class _AggregatedList(TargetTcpProxiesRestStub): + def __hash__(self): + return hash("AggregatedList") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.AggregatedListTargetTcpProxiesRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.TargetTcpProxyAggregatedList: + r"""Call the aggregated list method over HTTP. + + Args: + request (~.compute.AggregatedListTargetTcpProxiesRequest): + The request object. A request message for + TargetTcpProxies.AggregatedList. See the + method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.TargetTcpProxyAggregatedList: + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/compute/v1/projects/{project}/aggregated/targetTcpProxies', + }, + ] + request, metadata = self._interceptor.pre_aggregated_list(request, metadata) + pb_request = compute.AggregatedListTargetTcpProxiesRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.TargetTcpProxyAggregatedList() + pb_resp = compute.TargetTcpProxyAggregatedList.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_aggregated_list(resp) + return resp + + class _Delete(TargetTcpProxiesRestStub): + def __hash__(self): + return hash("Delete") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.DeleteTargetTcpProxyRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.Operation: + r"""Call the delete method over HTTP. + + Args: + request (~.compute.DeleteTargetTcpProxyRequest): + The request object. A request message for + TargetTcpProxies.Delete. See the method + description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.Operation: + Represents an Operation resource. Google Compute Engine + has three Operation resources: \* + `Global `__ + \* + `Regional `__ + \* + `Zonal `__ + You can use an operation resource to manage asynchronous + API requests. For more information, read Handling API + responses. Operations can be global, regional or zonal. + - For global operations, use the ``globalOperations`` + resource. - For regional operations, use the + ``regionOperations`` resource. - For zonal operations, + use the ``zonalOperations`` resource. For more + information, read Global, Regional, and Zonal Resources. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'delete', + 'uri': '/compute/v1/projects/{project}/global/targetTcpProxies/{target_tcp_proxy}', + }, + ] + request, metadata = self._interceptor.pre_delete(request, metadata) + pb_request = compute.DeleteTargetTcpProxyRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.Operation() + pb_resp = compute.Operation.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_delete(resp) + return resp + + class _Get(TargetTcpProxiesRestStub): + def __hash__(self): + return hash("Get") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.GetTargetTcpProxyRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.TargetTcpProxy: + r"""Call the get method over HTTP. + + Args: + request (~.compute.GetTargetTcpProxyRequest): + The request object. A request message for + TargetTcpProxies.Get. See the method + description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.TargetTcpProxy: + Represents a Target TCP Proxy + resource. A target TCP proxy is a + component of a TCP Proxy load balancer. + Global forwarding rules reference target + TCP proxy, and the target proxy then + references an external backend service. + For more information, read TCP Proxy + Load Balancing overview. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/compute/v1/projects/{project}/global/targetTcpProxies/{target_tcp_proxy}', + }, + ] + request, metadata = self._interceptor.pre_get(request, metadata) + pb_request = compute.GetTargetTcpProxyRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.TargetTcpProxy() + pb_resp = compute.TargetTcpProxy.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get(resp) + return resp + + class _Insert(TargetTcpProxiesRestStub): + def __hash__(self): + return hash("Insert") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.InsertTargetTcpProxyRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.Operation: + r"""Call the insert method over HTTP. + + Args: + request (~.compute.InsertTargetTcpProxyRequest): + The request object. A request message for + TargetTcpProxies.Insert. See the method + description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.Operation: + Represents an Operation resource. Google Compute Engine + has three Operation resources: \* + `Global `__ + \* + `Regional `__ + \* + `Zonal `__ + You can use an operation resource to manage asynchronous + API requests. For more information, read Handling API + responses. Operations can be global, regional or zonal. + - For global operations, use the ``globalOperations`` + resource. - For regional operations, use the + ``regionOperations`` resource. - For zonal operations, + use the ``zonalOperations`` resource. For more + information, read Global, Regional, and Zonal Resources. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/compute/v1/projects/{project}/global/targetTcpProxies', + 'body': 'target_tcp_proxy_resource', + }, + ] + request, metadata = self._interceptor.pre_insert(request, metadata) + pb_request = compute.InsertTargetTcpProxyRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + including_default_value_fields=False, + use_integers_for_enums=False + ) + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.Operation() + pb_resp = compute.Operation.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_insert(resp) + return resp + + class _List(TargetTcpProxiesRestStub): + def __hash__(self): + return hash("List") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.ListTargetTcpProxiesRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.TargetTcpProxyList: + r"""Call the list method over HTTP. + + Args: + request (~.compute.ListTargetTcpProxiesRequest): + The request object. A request message for + TargetTcpProxies.List. See the method + description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.TargetTcpProxyList: + Contains a list of TargetTcpProxy + resources. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/compute/v1/projects/{project}/global/targetTcpProxies', + }, + ] + request, metadata = self._interceptor.pre_list(request, metadata) + pb_request = compute.ListTargetTcpProxiesRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.TargetTcpProxyList() + pb_resp = compute.TargetTcpProxyList.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_list(resp) + return resp + + class _SetBackendService(TargetTcpProxiesRestStub): + def __hash__(self): + return hash("SetBackendService") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.SetBackendServiceTargetTcpProxyRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.Operation: + r"""Call the set backend service method over HTTP. + + Args: + request (~.compute.SetBackendServiceTargetTcpProxyRequest): + The request object. A request message for + TargetTcpProxies.SetBackendService. See + the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.Operation: + Represents an Operation resource. Google Compute Engine + has three Operation resources: \* + `Global `__ + \* + `Regional `__ + \* + `Zonal `__ + You can use an operation resource to manage asynchronous + API requests. For more information, read Handling API + responses. Operations can be global, regional or zonal. + - For global operations, use the ``globalOperations`` + resource. - For regional operations, use the + ``regionOperations`` resource. - For zonal operations, + use the ``zonalOperations`` resource. For more + information, read Global, Regional, and Zonal Resources. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/compute/v1/projects/{project}/global/targetTcpProxies/{target_tcp_proxy}/setBackendService', + 'body': 'target_tcp_proxies_set_backend_service_request_resource', + }, + ] + request, metadata = self._interceptor.pre_set_backend_service(request, metadata) + pb_request = compute.SetBackendServiceTargetTcpProxyRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + including_default_value_fields=False, + use_integers_for_enums=False + ) + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.Operation() + pb_resp = compute.Operation.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_set_backend_service(resp) + return resp + + class _SetProxyHeader(TargetTcpProxiesRestStub): + def __hash__(self): + return hash("SetProxyHeader") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.SetProxyHeaderTargetTcpProxyRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.Operation: + r"""Call the set proxy header method over HTTP. + + Args: + request (~.compute.SetProxyHeaderTargetTcpProxyRequest): + The request object. A request message for + TargetTcpProxies.SetProxyHeader. See the + method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.Operation: + Represents an Operation resource. Google Compute Engine + has three Operation resources: \* + `Global `__ + \* + `Regional `__ + \* + `Zonal `__ + You can use an operation resource to manage asynchronous + API requests. For more information, read Handling API + responses. Operations can be global, regional or zonal. + - For global operations, use the ``globalOperations`` + resource. - For regional operations, use the + ``regionOperations`` resource. - For zonal operations, + use the ``zonalOperations`` resource. For more + information, read Global, Regional, and Zonal Resources. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/compute/v1/projects/{project}/global/targetTcpProxies/{target_tcp_proxy}/setProxyHeader', + 'body': 'target_tcp_proxies_set_proxy_header_request_resource', + }, + ] + request, metadata = self._interceptor.pre_set_proxy_header(request, metadata) + pb_request = compute.SetProxyHeaderTargetTcpProxyRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + including_default_value_fields=False, + use_integers_for_enums=False + ) + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.Operation() + pb_resp = compute.Operation.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_set_proxy_header(resp) + return resp + + @property + def aggregated_list(self) -> Callable[ + [compute.AggregatedListTargetTcpProxiesRequest], + compute.TargetTcpProxyAggregatedList]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._AggregatedList(self._session, self._host, self._interceptor) # type: ignore + + @property + def delete(self) -> Callable[ + [compute.DeleteTargetTcpProxyRequest], + compute.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._Delete(self._session, self._host, self._interceptor) # type: ignore + + @property + def get(self) -> Callable[ + [compute.GetTargetTcpProxyRequest], + compute.TargetTcpProxy]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._Get(self._session, self._host, self._interceptor) # type: ignore + + @property + def insert(self) -> Callable[ + [compute.InsertTargetTcpProxyRequest], + compute.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._Insert(self._session, self._host, self._interceptor) # type: ignore + + @property + def list(self) -> Callable[ + [compute.ListTargetTcpProxiesRequest], + compute.TargetTcpProxyList]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._List(self._session, self._host, self._interceptor) # type: ignore + + @property + def set_backend_service(self) -> Callable[ + [compute.SetBackendServiceTargetTcpProxyRequest], + compute.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._SetBackendService(self._session, self._host, self._interceptor) # type: ignore + + @property + def set_proxy_header(self) -> Callable[ + [compute.SetProxyHeaderTargetTcpProxyRequest], + compute.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._SetProxyHeader(self._session, self._host, self._interceptor) # type: ignore + + @property + def kind(self) -> str: + return "rest" + + def close(self): + self._session.close() + + +__all__=( + 'TargetTcpProxiesRestTransport', +) diff --git a/owl-bot-staging/v1/google/cloud/compute_v1/services/target_vpn_gateways/__init__.py b/owl-bot-staging/v1/google/cloud/compute_v1/services/target_vpn_gateways/__init__.py new file mode 100644 index 000000000..a0eae7cc5 --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/compute_v1/services/target_vpn_gateways/__init__.py @@ -0,0 +1,20 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from .client import TargetVpnGatewaysClient + +__all__ = ( + 'TargetVpnGatewaysClient', +) diff --git a/owl-bot-staging/v1/google/cloud/compute_v1/services/target_vpn_gateways/client.py b/owl-bot-staging/v1/google/cloud/compute_v1/services/target_vpn_gateways/client.py new file mode 100644 index 000000000..ab83fbad9 --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/compute_v1/services/target_vpn_gateways/client.py @@ -0,0 +1,1572 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import functools +import os +import re +from typing import Dict, Mapping, MutableMapping, MutableSequence, Optional, Sequence, Tuple, Type, Union, cast + +from google.cloud.compute_v1 import gapic_version as package_version + +from google.api_core import client_options as client_options_lib +from google.api_core import exceptions as core_exceptions +from google.api_core import extended_operation +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport import mtls # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth.exceptions import MutualTLSChannelError # type: ignore +from google.oauth2 import service_account # type: ignore + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + +from google.api_core import extended_operation # type: ignore +from google.cloud.compute_v1.services.target_vpn_gateways import pagers +from google.cloud.compute_v1.types import compute +from .transports.base import TargetVpnGatewaysTransport, DEFAULT_CLIENT_INFO +from .transports.rest import TargetVpnGatewaysRestTransport + + +class TargetVpnGatewaysClientMeta(type): + """Metaclass for the TargetVpnGateways client. + + This provides class-level methods for building and retrieving + support objects (e.g. transport) without polluting the client instance + objects. + """ + _transport_registry = OrderedDict() # type: Dict[str, Type[TargetVpnGatewaysTransport]] + _transport_registry["rest"] = TargetVpnGatewaysRestTransport + + def get_transport_class(cls, + label: Optional[str] = None, + ) -> Type[TargetVpnGatewaysTransport]: + """Returns an appropriate transport class. + + Args: + label: The name of the desired transport. If none is + provided, then the first transport in the registry is used. + + Returns: + The transport class to use. + """ + # If a specific transport is requested, return that one. + if label: + return cls._transport_registry[label] + + # No transport is requested; return the default (that is, the first one + # in the dictionary). + return next(iter(cls._transport_registry.values())) + + +class TargetVpnGatewaysClient(metaclass=TargetVpnGatewaysClientMeta): + """The TargetVpnGateways API.""" + + @staticmethod + def _get_default_mtls_endpoint(api_endpoint): + """Converts api endpoint to mTLS endpoint. + + Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to + "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. + Args: + api_endpoint (Optional[str]): the api endpoint to convert. + Returns: + str: converted mTLS api endpoint. + """ + if not api_endpoint: + return api_endpoint + + mtls_endpoint_re = re.compile( + r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?" + ) + + m = mtls_endpoint_re.match(api_endpoint) + name, mtls, sandbox, googledomain = m.groups() + if mtls or not googledomain: + return api_endpoint + + if sandbox: + return api_endpoint.replace( + "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" + ) + + return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") + + DEFAULT_ENDPOINT = "compute.googleapis.com" + DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore + DEFAULT_ENDPOINT + ) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + TargetVpnGatewaysClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_info(info) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + TargetVpnGatewaysClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_file( + filename) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + from_service_account_json = from_service_account_file + + @property + def transport(self) -> TargetVpnGatewaysTransport: + """Returns the transport used by the client instance. + + Returns: + TargetVpnGatewaysTransport: The transport used by the client + instance. + """ + return self._transport + + @staticmethod + def common_billing_account_path(billing_account: str, ) -> str: + """Returns a fully-qualified billing_account string.""" + return "billingAccounts/{billing_account}".format(billing_account=billing_account, ) + + @staticmethod + def parse_common_billing_account_path(path: str) -> Dict[str,str]: + """Parse a billing_account path into its component segments.""" + m = re.match(r"^billingAccounts/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_folder_path(folder: str, ) -> str: + """Returns a fully-qualified folder string.""" + return "folders/{folder}".format(folder=folder, ) + + @staticmethod + def parse_common_folder_path(path: str) -> Dict[str,str]: + """Parse a folder path into its component segments.""" + m = re.match(r"^folders/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_organization_path(organization: str, ) -> str: + """Returns a fully-qualified organization string.""" + return "organizations/{organization}".format(organization=organization, ) + + @staticmethod + def parse_common_organization_path(path: str) -> Dict[str,str]: + """Parse a organization path into its component segments.""" + m = re.match(r"^organizations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_project_path(project: str, ) -> str: + """Returns a fully-qualified project string.""" + return "projects/{project}".format(project=project, ) + + @staticmethod + def parse_common_project_path(path: str) -> Dict[str,str]: + """Parse a project path into its component segments.""" + m = re.match(r"^projects/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_location_path(project: str, location: str, ) -> str: + """Returns a fully-qualified location string.""" + return "projects/{project}/locations/{location}".format(project=project, location=location, ) + + @staticmethod + def parse_common_location_path(path: str) -> Dict[str,str]: + """Parse a location path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @classmethod + def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[client_options_lib.ClientOptions] = None): + """Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + if client_options is None: + client_options = client_options_lib.ClientOptions() + use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") + use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") + if use_client_cert not in ("true", "false"): + raise ValueError("Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`") + if use_mtls_endpoint not in ("auto", "never", "always"): + raise MutualTLSChannelError("Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`") + + # Figure out the client cert source to use. + client_cert_source = None + if use_client_cert == "true": + if client_options.client_cert_source: + client_cert_source = client_options.client_cert_source + elif mtls.has_default_client_cert_source(): + client_cert_source = mtls.default_client_cert_source() + + # Figure out which api endpoint to use. + if client_options.api_endpoint is not None: + api_endpoint = client_options.api_endpoint + elif use_mtls_endpoint == "always" or (use_mtls_endpoint == "auto" and client_cert_source): + api_endpoint = cls.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = cls.DEFAULT_ENDPOINT + + return api_endpoint, client_cert_source + + def __init__(self, *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Optional[Union[str, TargetVpnGatewaysTransport]] = None, + client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the target vpn gateways client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Union[str, TargetVpnGatewaysTransport]): The + transport to use. If set to None, a transport is chosen + automatically. + NOTE: "rest" transport functionality is currently in a + beta state (preview). We welcome your feedback via an + issue in this library's source repository. + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): Custom options for the + client. It won't take effect if a ``transport`` instance is provided. + (1) The ``api_endpoint`` property can be used to override the + default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT + environment variable can also be used to override the endpoint: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto switch to the + default mTLS endpoint if client certificate is present, this is + the default value). However, the ``api_endpoint`` property takes + precedence if provided. + (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide client certificate for mutual TLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + """ + if isinstance(client_options, dict): + client_options = client_options_lib.from_dict(client_options) + if client_options is None: + client_options = client_options_lib.ClientOptions() + client_options = cast(client_options_lib.ClientOptions, client_options) + + api_endpoint, client_cert_source_func = self.get_mtls_endpoint_and_cert_source(client_options) + + api_key_value = getattr(client_options, "api_key", None) + if api_key_value and credentials: + raise ValueError("client_options.api_key and credentials are mutually exclusive") + + # Save or instantiate the transport. + # Ordinarily, we provide the transport, but allowing a custom transport + # instance provides an extensibility point for unusual situations. + if isinstance(transport, TargetVpnGatewaysTransport): + # transport is a TargetVpnGatewaysTransport instance. + if credentials or client_options.credentials_file or api_key_value: + raise ValueError("When providing a transport instance, " + "provide its credentials directly.") + if client_options.scopes: + raise ValueError( + "When providing a transport instance, provide its scopes " + "directly." + ) + self._transport = transport + else: + import google.auth._default # type: ignore + + if api_key_value and hasattr(google.auth._default, "get_api_key_credentials"): + credentials = google.auth._default.get_api_key_credentials(api_key_value) + + Transport = type(self).get_transport_class(transport) + self._transport = Transport( + credentials=credentials, + credentials_file=client_options.credentials_file, + host=api_endpoint, + scopes=client_options.scopes, + client_cert_source_for_mtls=client_cert_source_func, + quota_project_id=client_options.quota_project_id, + client_info=client_info, + always_use_jwt_access=True, + api_audience=client_options.api_audience, + ) + + def aggregated_list(self, + request: Optional[Union[compute.AggregatedListTargetVpnGatewaysRequest, dict]] = None, + *, + project: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.AggregatedListPager: + r"""Retrieves an aggregated list of target VPN gateways. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_aggregated_list(): + # Create a client + client = compute_v1.TargetVpnGatewaysClient() + + # Initialize request argument(s) + request = compute_v1.AggregatedListTargetVpnGatewaysRequest( + project="project_value", + ) + + # Make the request + page_result = client.aggregated_list(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.AggregatedListTargetVpnGatewaysRequest, dict]): + The request object. A request message for + TargetVpnGateways.AggregatedList. See + the method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.compute_v1.services.target_vpn_gateways.pagers.AggregatedListPager: + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.AggregatedListTargetVpnGatewaysRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.AggregatedListTargetVpnGatewaysRequest): + request = compute.AggregatedListTargetVpnGatewaysRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.aggregated_list] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.AggregatedListPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + def delete_unary(self, + request: Optional[Union[compute.DeleteTargetVpnGatewayRequest, dict]] = None, + *, + project: Optional[str] = None, + region: Optional[str] = None, + target_vpn_gateway: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.Operation: + r"""Deletes the specified target VPN gateway. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_delete(): + # Create a client + client = compute_v1.TargetVpnGatewaysClient() + + # Initialize request argument(s) + request = compute_v1.DeleteTargetVpnGatewayRequest( + project="project_value", + region="region_value", + target_vpn_gateway="target_vpn_gateway_value", + ) + + # Make the request + response = client.delete(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.DeleteTargetVpnGatewayRequest, dict]): + The request object. A request message for + TargetVpnGateways.Delete. See the method + description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + region (str): + Name of the region for this request. + This corresponds to the ``region`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + target_vpn_gateway (str): + Name of the target VPN gateway to + delete. + + This corresponds to the ``target_vpn_gateway`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, region, target_vpn_gateway]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.DeleteTargetVpnGatewayRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.DeleteTargetVpnGatewayRequest): + request = compute.DeleteTargetVpnGatewayRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if region is not None: + request.region = region + if target_vpn_gateway is not None: + request.target_vpn_gateway = target_vpn_gateway + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("region", request.region), + ("target_vpn_gateway", request.target_vpn_gateway), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def delete(self, + request: Optional[Union[compute.DeleteTargetVpnGatewayRequest, dict]] = None, + *, + project: Optional[str] = None, + region: Optional[str] = None, + target_vpn_gateway: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> extended_operation.ExtendedOperation: + r"""Deletes the specified target VPN gateway. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_delete(): + # Create a client + client = compute_v1.TargetVpnGatewaysClient() + + # Initialize request argument(s) + request = compute_v1.DeleteTargetVpnGatewayRequest( + project="project_value", + region="region_value", + target_vpn_gateway="target_vpn_gateway_value", + ) + + # Make the request + response = client.delete(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.DeleteTargetVpnGatewayRequest, dict]): + The request object. A request message for + TargetVpnGateways.Delete. See the method + description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + region (str): + Name of the region for this request. + This corresponds to the ``region`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + target_vpn_gateway (str): + Name of the target VPN gateway to + delete. + + This corresponds to the ``target_vpn_gateway`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, region, target_vpn_gateway]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.DeleteTargetVpnGatewayRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.DeleteTargetVpnGatewayRequest): + request = compute.DeleteTargetVpnGatewayRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if region is not None: + request.region = region + if target_vpn_gateway is not None: + request.target_vpn_gateway = target_vpn_gateway + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("region", request.region), + ("target_vpn_gateway", request.target_vpn_gateway), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + operation_service = self._transport._region_operations_client + operation_request = compute.GetRegionOperationRequest() + operation_request.project = request.project + operation_request.region = request.region + operation_request.operation = response.name + + get_operation = functools.partial(operation_service.get, operation_request) + # Cancel is not part of extended operations yet. + cancel_operation = lambda: None + + # Note: this class is an implementation detail to provide a uniform + # set of names for certain fields in the extended operation proto message. + # See google.api_core.extended_operation.ExtendedOperation for details + # on these properties and the expected interface. + class _CustomOperation(extended_operation.ExtendedOperation): + @property + def error_message(self): + return self._extended_operation.http_error_message + + @property + def error_code(self): + return self._extended_operation.http_error_status_code + + response = _CustomOperation.make(get_operation, cancel_operation, response) + + # Done; return the response. + return response + + def get(self, + request: Optional[Union[compute.GetTargetVpnGatewayRequest, dict]] = None, + *, + project: Optional[str] = None, + region: Optional[str] = None, + target_vpn_gateway: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.TargetVpnGateway: + r"""Returns the specified target VPN gateway. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_get(): + # Create a client + client = compute_v1.TargetVpnGatewaysClient() + + # Initialize request argument(s) + request = compute_v1.GetTargetVpnGatewayRequest( + project="project_value", + region="region_value", + target_vpn_gateway="target_vpn_gateway_value", + ) + + # Make the request + response = client.get(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.GetTargetVpnGatewayRequest, dict]): + The request object. A request message for + TargetVpnGateways.Get. See the method + description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + region (str): + Name of the region for this request. + This corresponds to the ``region`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + target_vpn_gateway (str): + Name of the target VPN gateway to + return. + + This corresponds to the ``target_vpn_gateway`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.compute_v1.types.TargetVpnGateway: + Represents a Target VPN Gateway + resource. The target VPN gateway + resource represents a Classic Cloud VPN + gateway. For more information, read the + the Cloud VPN Overview. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, region, target_vpn_gateway]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.GetTargetVpnGatewayRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.GetTargetVpnGatewayRequest): + request = compute.GetTargetVpnGatewayRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if region is not None: + request.region = region + if target_vpn_gateway is not None: + request.target_vpn_gateway = target_vpn_gateway + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("region", request.region), + ("target_vpn_gateway", request.target_vpn_gateway), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def insert_unary(self, + request: Optional[Union[compute.InsertTargetVpnGatewayRequest, dict]] = None, + *, + project: Optional[str] = None, + region: Optional[str] = None, + target_vpn_gateway_resource: Optional[compute.TargetVpnGateway] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.Operation: + r"""Creates a target VPN gateway in the specified project + and region using the data included in the request. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_insert(): + # Create a client + client = compute_v1.TargetVpnGatewaysClient() + + # Initialize request argument(s) + request = compute_v1.InsertTargetVpnGatewayRequest( + project="project_value", + region="region_value", + ) + + # Make the request + response = client.insert(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.InsertTargetVpnGatewayRequest, dict]): + The request object. A request message for + TargetVpnGateways.Insert. See the method + description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + region (str): + Name of the region for this request. + This corresponds to the ``region`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + target_vpn_gateway_resource (google.cloud.compute_v1.types.TargetVpnGateway): + The body resource for this request + This corresponds to the ``target_vpn_gateway_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, region, target_vpn_gateway_resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.InsertTargetVpnGatewayRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.InsertTargetVpnGatewayRequest): + request = compute.InsertTargetVpnGatewayRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if region is not None: + request.region = region + if target_vpn_gateway_resource is not None: + request.target_vpn_gateway_resource = target_vpn_gateway_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.insert] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("region", request.region), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def insert(self, + request: Optional[Union[compute.InsertTargetVpnGatewayRequest, dict]] = None, + *, + project: Optional[str] = None, + region: Optional[str] = None, + target_vpn_gateway_resource: Optional[compute.TargetVpnGateway] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> extended_operation.ExtendedOperation: + r"""Creates a target VPN gateway in the specified project + and region using the data included in the request. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_insert(): + # Create a client + client = compute_v1.TargetVpnGatewaysClient() + + # Initialize request argument(s) + request = compute_v1.InsertTargetVpnGatewayRequest( + project="project_value", + region="region_value", + ) + + # Make the request + response = client.insert(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.InsertTargetVpnGatewayRequest, dict]): + The request object. A request message for + TargetVpnGateways.Insert. See the method + description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + region (str): + Name of the region for this request. + This corresponds to the ``region`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + target_vpn_gateway_resource (google.cloud.compute_v1.types.TargetVpnGateway): + The body resource for this request + This corresponds to the ``target_vpn_gateway_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, region, target_vpn_gateway_resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.InsertTargetVpnGatewayRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.InsertTargetVpnGatewayRequest): + request = compute.InsertTargetVpnGatewayRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if region is not None: + request.region = region + if target_vpn_gateway_resource is not None: + request.target_vpn_gateway_resource = target_vpn_gateway_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.insert] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("region", request.region), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + operation_service = self._transport._region_operations_client + operation_request = compute.GetRegionOperationRequest() + operation_request.project = request.project + operation_request.region = request.region + operation_request.operation = response.name + + get_operation = functools.partial(operation_service.get, operation_request) + # Cancel is not part of extended operations yet. + cancel_operation = lambda: None + + # Note: this class is an implementation detail to provide a uniform + # set of names for certain fields in the extended operation proto message. + # See google.api_core.extended_operation.ExtendedOperation for details + # on these properties and the expected interface. + class _CustomOperation(extended_operation.ExtendedOperation): + @property + def error_message(self): + return self._extended_operation.http_error_message + + @property + def error_code(self): + return self._extended_operation.http_error_status_code + + response = _CustomOperation.make(get_operation, cancel_operation, response) + + # Done; return the response. + return response + + def list(self, + request: Optional[Union[compute.ListTargetVpnGatewaysRequest, dict]] = None, + *, + project: Optional[str] = None, + region: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListPager: + r"""Retrieves a list of target VPN gateways available to + the specified project and region. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_list(): + # Create a client + client = compute_v1.TargetVpnGatewaysClient() + + # Initialize request argument(s) + request = compute_v1.ListTargetVpnGatewaysRequest( + project="project_value", + region="region_value", + ) + + # Make the request + page_result = client.list(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.ListTargetVpnGatewaysRequest, dict]): + The request object. A request message for + TargetVpnGateways.List. See the method + description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + region (str): + Name of the region for this request. + This corresponds to the ``region`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.compute_v1.services.target_vpn_gateways.pagers.ListPager: + Contains a list of TargetVpnGateway + resources. + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, region]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.ListTargetVpnGatewaysRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.ListTargetVpnGatewaysRequest): + request = compute.ListTargetVpnGatewaysRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if region is not None: + request.region = region + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("region", request.region), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + def set_labels_unary(self, + request: Optional[Union[compute.SetLabelsTargetVpnGatewayRequest, dict]] = None, + *, + project: Optional[str] = None, + region: Optional[str] = None, + resource: Optional[str] = None, + region_set_labels_request_resource: Optional[compute.RegionSetLabelsRequest] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.Operation: + r"""Sets the labels on a TargetVpnGateway. To learn more + about labels, read the Labeling Resources documentation. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_set_labels(): + # Create a client + client = compute_v1.TargetVpnGatewaysClient() + + # Initialize request argument(s) + request = compute_v1.SetLabelsTargetVpnGatewayRequest( + project="project_value", + region="region_value", + resource="resource_value", + ) + + # Make the request + response = client.set_labels(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.SetLabelsTargetVpnGatewayRequest, dict]): + The request object. A request message for + TargetVpnGateways.SetLabels. See the + method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + region (str): + The region for this request. + This corresponds to the ``region`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + resource (str): + Name or id of the resource for this + request. + + This corresponds to the ``resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + region_set_labels_request_resource (google.cloud.compute_v1.types.RegionSetLabelsRequest): + The body resource for this request + This corresponds to the ``region_set_labels_request_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, region, resource, region_set_labels_request_resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.SetLabelsTargetVpnGatewayRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.SetLabelsTargetVpnGatewayRequest): + request = compute.SetLabelsTargetVpnGatewayRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if region is not None: + request.region = region + if resource is not None: + request.resource = resource + if region_set_labels_request_resource is not None: + request.region_set_labels_request_resource = region_set_labels_request_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.set_labels] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("region", request.region), + ("resource", request.resource), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def set_labels(self, + request: Optional[Union[compute.SetLabelsTargetVpnGatewayRequest, dict]] = None, + *, + project: Optional[str] = None, + region: Optional[str] = None, + resource: Optional[str] = None, + region_set_labels_request_resource: Optional[compute.RegionSetLabelsRequest] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> extended_operation.ExtendedOperation: + r"""Sets the labels on a TargetVpnGateway. To learn more + about labels, read the Labeling Resources documentation. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_set_labels(): + # Create a client + client = compute_v1.TargetVpnGatewaysClient() + + # Initialize request argument(s) + request = compute_v1.SetLabelsTargetVpnGatewayRequest( + project="project_value", + region="region_value", + resource="resource_value", + ) + + # Make the request + response = client.set_labels(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.SetLabelsTargetVpnGatewayRequest, dict]): + The request object. A request message for + TargetVpnGateways.SetLabels. See the + method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + region (str): + The region for this request. + This corresponds to the ``region`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + resource (str): + Name or id of the resource for this + request. + + This corresponds to the ``resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + region_set_labels_request_resource (google.cloud.compute_v1.types.RegionSetLabelsRequest): + The body resource for this request + This corresponds to the ``region_set_labels_request_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, region, resource, region_set_labels_request_resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.SetLabelsTargetVpnGatewayRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.SetLabelsTargetVpnGatewayRequest): + request = compute.SetLabelsTargetVpnGatewayRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if region is not None: + request.region = region + if resource is not None: + request.resource = resource + if region_set_labels_request_resource is not None: + request.region_set_labels_request_resource = region_set_labels_request_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.set_labels] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("region", request.region), + ("resource", request.resource), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + operation_service = self._transport._region_operations_client + operation_request = compute.GetRegionOperationRequest() + operation_request.project = request.project + operation_request.region = request.region + operation_request.operation = response.name + + get_operation = functools.partial(operation_service.get, operation_request) + # Cancel is not part of extended operations yet. + cancel_operation = lambda: None + + # Note: this class is an implementation detail to provide a uniform + # set of names for certain fields in the extended operation proto message. + # See google.api_core.extended_operation.ExtendedOperation for details + # on these properties and the expected interface. + class _CustomOperation(extended_operation.ExtendedOperation): + @property + def error_message(self): + return self._extended_operation.http_error_message + + @property + def error_code(self): + return self._extended_operation.http_error_status_code + + response = _CustomOperation.make(get_operation, cancel_operation, response) + + # Done; return the response. + return response + + def __enter__(self) -> "TargetVpnGatewaysClient": + return self + + def __exit__(self, type, value, traceback): + """Releases underlying transport's resources. + + .. warning:: + ONLY use as a context manager if the transport is NOT shared + with other clients! Exiting the with block will CLOSE the transport + and may cause errors in other clients! + """ + self.transport.close() + + + + + + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) + + +__all__ = ( + "TargetVpnGatewaysClient", +) diff --git a/owl-bot-staging/v1/google/cloud/compute_v1/services/target_vpn_gateways/pagers.py b/owl-bot-staging/v1/google/cloud/compute_v1/services/target_vpn_gateways/pagers.py new file mode 100644 index 000000000..5d43bad78 --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/compute_v1/services/target_vpn_gateways/pagers.py @@ -0,0 +1,139 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import Any, AsyncIterator, Awaitable, Callable, Sequence, Tuple, Optional, Iterator + +from google.cloud.compute_v1.types import compute + + +class AggregatedListPager: + """A pager for iterating through ``aggregated_list`` requests. + + This class thinly wraps an initial + :class:`google.cloud.compute_v1.types.TargetVpnGatewayAggregatedList` object, and + provides an ``__iter__`` method to iterate through its + ``items`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``AggregatedList`` requests and continue to iterate + through the ``items`` field on the + corresponding responses. + + All the usual :class:`google.cloud.compute_v1.types.TargetVpnGatewayAggregatedList` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., compute.TargetVpnGatewayAggregatedList], + request: compute.AggregatedListTargetVpnGatewaysRequest, + response: compute.TargetVpnGatewayAggregatedList, + *, + metadata: Sequence[Tuple[str, str]] = ()): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.compute_v1.types.AggregatedListTargetVpnGatewaysRequest): + The initial request object. + response (google.cloud.compute_v1.types.TargetVpnGatewayAggregatedList): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = compute.AggregatedListTargetVpnGatewaysRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[compute.TargetVpnGatewayAggregatedList]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[Tuple[str, compute.TargetVpnGatewaysScopedList]]: + for page in self.pages: + yield from page.items.items() + + def get(self, key: str) -> Optional[compute.TargetVpnGatewaysScopedList]: + return self._response.items.get(key) + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + + +class ListPager: + """A pager for iterating through ``list`` requests. + + This class thinly wraps an initial + :class:`google.cloud.compute_v1.types.TargetVpnGatewayList` object, and + provides an ``__iter__`` method to iterate through its + ``items`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``List`` requests and continue to iterate + through the ``items`` field on the + corresponding responses. + + All the usual :class:`google.cloud.compute_v1.types.TargetVpnGatewayList` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., compute.TargetVpnGatewayList], + request: compute.ListTargetVpnGatewaysRequest, + response: compute.TargetVpnGatewayList, + *, + metadata: Sequence[Tuple[str, str]] = ()): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.compute_v1.types.ListTargetVpnGatewaysRequest): + The initial request object. + response (google.cloud.compute_v1.types.TargetVpnGatewayList): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = compute.ListTargetVpnGatewaysRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[compute.TargetVpnGatewayList]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[compute.TargetVpnGateway]: + for page in self.pages: + yield from page.items + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) diff --git a/owl-bot-staging/v1/google/cloud/compute_v1/services/target_vpn_gateways/transports/__init__.py b/owl-bot-staging/v1/google/cloud/compute_v1/services/target_vpn_gateways/transports/__init__.py new file mode 100644 index 000000000..a49015fd4 --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/compute_v1/services/target_vpn_gateways/transports/__init__.py @@ -0,0 +1,32 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +from typing import Dict, Type + +from .base import TargetVpnGatewaysTransport +from .rest import TargetVpnGatewaysRestTransport +from .rest import TargetVpnGatewaysRestInterceptor + + +# Compile a registry of transports. +_transport_registry = OrderedDict() # type: Dict[str, Type[TargetVpnGatewaysTransport]] +_transport_registry['rest'] = TargetVpnGatewaysRestTransport + +__all__ = ( + 'TargetVpnGatewaysTransport', + 'TargetVpnGatewaysRestTransport', + 'TargetVpnGatewaysRestInterceptor', +) diff --git a/owl-bot-staging/v1/google/cloud/compute_v1/services/target_vpn_gateways/transports/base.py b/owl-bot-staging/v1/google/cloud/compute_v1/services/target_vpn_gateways/transports/base.py new file mode 100644 index 000000000..b0772f4ef --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/compute_v1/services/target_vpn_gateways/transports/base.py @@ -0,0 +1,233 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import abc +from typing import Awaitable, Callable, Dict, Optional, Sequence, Union + +from google.cloud.compute_v1 import gapic_version as package_version + +import google.auth # type: ignore +import google.api_core +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.compute_v1.types import compute +from google.cloud.compute_v1.services import region_operations + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) + + +class TargetVpnGatewaysTransport(abc.ABC): + """Abstract transport class for TargetVpnGateways.""" + + AUTH_SCOPES = ( + 'https://www.googleapis.com/auth/compute', + 'https://www.googleapis.com/auth/cloud-platform', + ) + + DEFAULT_HOST: str = 'compute.googleapis.com' + def __init__( + self, *, + host: str = DEFAULT_HOST, + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + **kwargs, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A list of scopes. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + """ + self._extended_operations_services: Dict[str, Any] = {} + + scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} + + # Save the scopes. + self._scopes = scopes + + # If no credentials are provided, then determine the appropriate + # defaults. + if credentials and credentials_file: + raise core_exceptions.DuplicateCredentialArgs("'credentials_file' and 'credentials' are mutually exclusive") + + if credentials_file is not None: + credentials, _ = google.auth.load_credentials_from_file( + credentials_file, + **scopes_kwargs, + quota_project_id=quota_project_id + ) + elif credentials is None: + credentials, _ = google.auth.default(**scopes_kwargs, quota_project_id=quota_project_id) + # Don't apply audience if the credentials file passed from user. + if hasattr(credentials, "with_gdch_audience"): + credentials = credentials.with_gdch_audience(api_audience if api_audience else host) + + # If the credentials are service account credentials, then always try to use self signed JWT. + if always_use_jwt_access and isinstance(credentials, service_account.Credentials) and hasattr(service_account.Credentials, "with_always_use_jwt_access"): + credentials = credentials.with_always_use_jwt_access(True) + + # Save the credentials. + self._credentials = credentials + + # Save the hostname. Default to port 443 (HTTPS) if none is specified. + if ':' not in host: + host += ':443' + self._host = host + + def _prep_wrapped_messages(self, client_info): + # Precompute the wrapped methods. + self._wrapped_methods = { + self.aggregated_list: gapic_v1.method.wrap_method( + self.aggregated_list, + default_timeout=None, + client_info=client_info, + ), + self.delete: gapic_v1.method.wrap_method( + self.delete, + default_timeout=None, + client_info=client_info, + ), + self.get: gapic_v1.method.wrap_method( + self.get, + default_timeout=None, + client_info=client_info, + ), + self.insert: gapic_v1.method.wrap_method( + self.insert, + default_timeout=None, + client_info=client_info, + ), + self.list: gapic_v1.method.wrap_method( + self.list, + default_timeout=None, + client_info=client_info, + ), + self.set_labels: gapic_v1.method.wrap_method( + self.set_labels, + default_timeout=None, + client_info=client_info, + ), + } + + def close(self): + """Closes resources associated with the transport. + + .. warning:: + Only call this method if the transport is NOT shared + with other clients - this may cause errors in other clients! + """ + raise NotImplementedError() + + @property + def aggregated_list(self) -> Callable[ + [compute.AggregatedListTargetVpnGatewaysRequest], + Union[ + compute.TargetVpnGatewayAggregatedList, + Awaitable[compute.TargetVpnGatewayAggregatedList] + ]]: + raise NotImplementedError() + + @property + def delete(self) -> Callable[ + [compute.DeleteTargetVpnGatewayRequest], + Union[ + compute.Operation, + Awaitable[compute.Operation] + ]]: + raise NotImplementedError() + + @property + def get(self) -> Callable[ + [compute.GetTargetVpnGatewayRequest], + Union[ + compute.TargetVpnGateway, + Awaitable[compute.TargetVpnGateway] + ]]: + raise NotImplementedError() + + @property + def insert(self) -> Callable[ + [compute.InsertTargetVpnGatewayRequest], + Union[ + compute.Operation, + Awaitable[compute.Operation] + ]]: + raise NotImplementedError() + + @property + def list(self) -> Callable[ + [compute.ListTargetVpnGatewaysRequest], + Union[ + compute.TargetVpnGatewayList, + Awaitable[compute.TargetVpnGatewayList] + ]]: + raise NotImplementedError() + + @property + def set_labels(self) -> Callable[ + [compute.SetLabelsTargetVpnGatewayRequest], + Union[ + compute.Operation, + Awaitable[compute.Operation] + ]]: + raise NotImplementedError() + + @property + def kind(self) -> str: + raise NotImplementedError() + + @property + def _region_operations_client(self) -> region_operations.RegionOperationsClient: + ex_op_service = self._extended_operations_services.get("region_operations") + if not ex_op_service: + ex_op_service = region_operations.RegionOperationsClient( + credentials=self._credentials, + transport=self.kind, + ) + self._extended_operations_services["region_operations"] = ex_op_service + + return ex_op_service + + +__all__ = ( + 'TargetVpnGatewaysTransport', +) diff --git a/owl-bot-staging/v1/google/cloud/compute_v1/services/target_vpn_gateways/transports/rest.py b/owl-bot-staging/v1/google/cloud/compute_v1/services/target_vpn_gateways/transports/rest.py new file mode 100644 index 000000000..f782227e9 --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/compute_v1/services/target_vpn_gateways/transports/rest.py @@ -0,0 +1,915 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +from google.auth.transport.requests import AuthorizedSession # type: ignore +import json # type: ignore +import grpc # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.api_core import exceptions as core_exceptions +from google.api_core import retry as retries +from google.api_core import rest_helpers +from google.api_core import rest_streaming +from google.api_core import path_template +from google.api_core import gapic_v1 + +from google.protobuf import json_format +from requests import __version__ as requests_version +import dataclasses +import re +from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union +import warnings + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + + +from google.cloud.compute_v1.types import compute + +from .base import TargetVpnGatewaysTransport, DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, + grpc_version=None, + rest_version=requests_version, +) + + +class TargetVpnGatewaysRestInterceptor: + """Interceptor for TargetVpnGateways. + + Interceptors are used to manipulate requests, request metadata, and responses + in arbitrary ways. + Example use cases include: + * Logging + * Verifying requests according to service or custom semantics + * Stripping extraneous information from responses + + These use cases and more can be enabled by injecting an + instance of a custom subclass when constructing the TargetVpnGatewaysRestTransport. + + .. code-block:: python + class MyCustomTargetVpnGatewaysInterceptor(TargetVpnGatewaysRestInterceptor): + def pre_aggregated_list(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_aggregated_list(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_delete(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_delete(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_get(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_insert(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_insert(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_set_labels(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_set_labels(self, response): + logging.log(f"Received response: {response}") + return response + + transport = TargetVpnGatewaysRestTransport(interceptor=MyCustomTargetVpnGatewaysInterceptor()) + client = TargetVpnGatewaysClient(transport=transport) + + + """ + def pre_aggregated_list(self, request: compute.AggregatedListTargetVpnGatewaysRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.AggregatedListTargetVpnGatewaysRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for aggregated_list + + Override in a subclass to manipulate the request or metadata + before they are sent to the TargetVpnGateways server. + """ + return request, metadata + + def post_aggregated_list(self, response: compute.TargetVpnGatewayAggregatedList) -> compute.TargetVpnGatewayAggregatedList: + """Post-rpc interceptor for aggregated_list + + Override in a subclass to manipulate the response + after it is returned by the TargetVpnGateways server but before + it is returned to user code. + """ + return response + def pre_delete(self, request: compute.DeleteTargetVpnGatewayRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.DeleteTargetVpnGatewayRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for delete + + Override in a subclass to manipulate the request or metadata + before they are sent to the TargetVpnGateways server. + """ + return request, metadata + + def post_delete(self, response: compute.Operation) -> compute.Operation: + """Post-rpc interceptor for delete + + Override in a subclass to manipulate the response + after it is returned by the TargetVpnGateways server but before + it is returned to user code. + """ + return response + def pre_get(self, request: compute.GetTargetVpnGatewayRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.GetTargetVpnGatewayRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get + + Override in a subclass to manipulate the request or metadata + before they are sent to the TargetVpnGateways server. + """ + return request, metadata + + def post_get(self, response: compute.TargetVpnGateway) -> compute.TargetVpnGateway: + """Post-rpc interceptor for get + + Override in a subclass to manipulate the response + after it is returned by the TargetVpnGateways server but before + it is returned to user code. + """ + return response + def pre_insert(self, request: compute.InsertTargetVpnGatewayRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.InsertTargetVpnGatewayRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for insert + + Override in a subclass to manipulate the request or metadata + before they are sent to the TargetVpnGateways server. + """ + return request, metadata + + def post_insert(self, response: compute.Operation) -> compute.Operation: + """Post-rpc interceptor for insert + + Override in a subclass to manipulate the response + after it is returned by the TargetVpnGateways server but before + it is returned to user code. + """ + return response + def pre_list(self, request: compute.ListTargetVpnGatewaysRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.ListTargetVpnGatewaysRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list + + Override in a subclass to manipulate the request or metadata + before they are sent to the TargetVpnGateways server. + """ + return request, metadata + + def post_list(self, response: compute.TargetVpnGatewayList) -> compute.TargetVpnGatewayList: + """Post-rpc interceptor for list + + Override in a subclass to manipulate the response + after it is returned by the TargetVpnGateways server but before + it is returned to user code. + """ + return response + def pre_set_labels(self, request: compute.SetLabelsTargetVpnGatewayRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.SetLabelsTargetVpnGatewayRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for set_labels + + Override in a subclass to manipulate the request or metadata + before they are sent to the TargetVpnGateways server. + """ + return request, metadata + + def post_set_labels(self, response: compute.Operation) -> compute.Operation: + """Post-rpc interceptor for set_labels + + Override in a subclass to manipulate the response + after it is returned by the TargetVpnGateways server but before + it is returned to user code. + """ + return response + + +@dataclasses.dataclass +class TargetVpnGatewaysRestStub: + _session: AuthorizedSession + _host: str + _interceptor: TargetVpnGatewaysRestInterceptor + + +class TargetVpnGatewaysRestTransport(TargetVpnGatewaysTransport): + """REST backend transport for TargetVpnGateways. + + The TargetVpnGateways API. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends JSON representations of protocol buffers over HTTP/1.1 + + NOTE: This REST transport functionality is currently in a beta + state (preview). We welcome your feedback via an issue in this + library's source repository. Thank you! + """ + + def __init__(self, *, + host: str = 'compute.googleapis.com', + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + client_cert_source_for_mtls: Optional[Callable[[ + ], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + url_scheme: str = 'https', + interceptor: Optional[TargetVpnGatewaysRestInterceptor] = None, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + NOTE: This REST transport functionality is currently in a beta + state (preview). We welcome your feedback via a GitHub issue in + this library's repository. Thank you! + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + client_cert_source_for_mtls (Callable[[], Tuple[bytes, bytes]]): Client + certificate to configure mutual TLS HTTP channel. It is ignored + if ``channel`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you are developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + url_scheme: the protocol scheme for the API endpoint. Normally + "https", but for testing or local servers, + "http" can be specified. + """ + # Run the base constructor + # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. + # TODO: When custom host (api_endpoint) is set, `scopes` must *also* be set on the + # credentials object + maybe_url_match = re.match("^(?Phttp(?:s)?://)?(?P.*)$", host) + if maybe_url_match is None: + raise ValueError(f"Unexpected hostname structure: {host}") # pragma: NO COVER + + url_match_items = maybe_url_match.groupdict() + + host = f"{url_scheme}://{host}" if not url_match_items["scheme"] else host + + super().__init__( + host=host, + credentials=credentials, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience + ) + self._session = AuthorizedSession( + self._credentials, default_host=self.DEFAULT_HOST) + if client_cert_source_for_mtls: + self._session.configure_mtls_channel(client_cert_source_for_mtls) + self._interceptor = interceptor or TargetVpnGatewaysRestInterceptor() + self._prep_wrapped_messages(client_info) + + class _AggregatedList(TargetVpnGatewaysRestStub): + def __hash__(self): + return hash("AggregatedList") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.AggregatedListTargetVpnGatewaysRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.TargetVpnGatewayAggregatedList: + r"""Call the aggregated list method over HTTP. + + Args: + request (~.compute.AggregatedListTargetVpnGatewaysRequest): + The request object. A request message for + TargetVpnGateways.AggregatedList. See + the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.TargetVpnGatewayAggregatedList: + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/compute/v1/projects/{project}/aggregated/targetVpnGateways', + }, + ] + request, metadata = self._interceptor.pre_aggregated_list(request, metadata) + pb_request = compute.AggregatedListTargetVpnGatewaysRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.TargetVpnGatewayAggregatedList() + pb_resp = compute.TargetVpnGatewayAggregatedList.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_aggregated_list(resp) + return resp + + class _Delete(TargetVpnGatewaysRestStub): + def __hash__(self): + return hash("Delete") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.DeleteTargetVpnGatewayRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.Operation: + r"""Call the delete method over HTTP. + + Args: + request (~.compute.DeleteTargetVpnGatewayRequest): + The request object. A request message for + TargetVpnGateways.Delete. See the method + description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.Operation: + Represents an Operation resource. Google Compute Engine + has three Operation resources: \* + `Global `__ + \* + `Regional `__ + \* + `Zonal `__ + You can use an operation resource to manage asynchronous + API requests. For more information, read Handling API + responses. Operations can be global, regional or zonal. + - For global operations, use the ``globalOperations`` + resource. - For regional operations, use the + ``regionOperations`` resource. - For zonal operations, + use the ``zonalOperations`` resource. For more + information, read Global, Regional, and Zonal Resources. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'delete', + 'uri': '/compute/v1/projects/{project}/regions/{region}/targetVpnGateways/{target_vpn_gateway}', + }, + ] + request, metadata = self._interceptor.pre_delete(request, metadata) + pb_request = compute.DeleteTargetVpnGatewayRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.Operation() + pb_resp = compute.Operation.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_delete(resp) + return resp + + class _Get(TargetVpnGatewaysRestStub): + def __hash__(self): + return hash("Get") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.GetTargetVpnGatewayRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.TargetVpnGateway: + r"""Call the get method over HTTP. + + Args: + request (~.compute.GetTargetVpnGatewayRequest): + The request object. A request message for + TargetVpnGateways.Get. See the method + description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.TargetVpnGateway: + Represents a Target VPN Gateway + resource. The target VPN gateway + resource represents a Classic Cloud VPN + gateway. For more information, read the + the Cloud VPN Overview. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/compute/v1/projects/{project}/regions/{region}/targetVpnGateways/{target_vpn_gateway}', + }, + ] + request, metadata = self._interceptor.pre_get(request, metadata) + pb_request = compute.GetTargetVpnGatewayRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.TargetVpnGateway() + pb_resp = compute.TargetVpnGateway.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get(resp) + return resp + + class _Insert(TargetVpnGatewaysRestStub): + def __hash__(self): + return hash("Insert") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.InsertTargetVpnGatewayRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.Operation: + r"""Call the insert method over HTTP. + + Args: + request (~.compute.InsertTargetVpnGatewayRequest): + The request object. A request message for + TargetVpnGateways.Insert. See the method + description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.Operation: + Represents an Operation resource. Google Compute Engine + has three Operation resources: \* + `Global `__ + \* + `Regional `__ + \* + `Zonal `__ + You can use an operation resource to manage asynchronous + API requests. For more information, read Handling API + responses. Operations can be global, regional or zonal. + - For global operations, use the ``globalOperations`` + resource. - For regional operations, use the + ``regionOperations`` resource. - For zonal operations, + use the ``zonalOperations`` resource. For more + information, read Global, Regional, and Zonal Resources. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/compute/v1/projects/{project}/regions/{region}/targetVpnGateways', + 'body': 'target_vpn_gateway_resource', + }, + ] + request, metadata = self._interceptor.pre_insert(request, metadata) + pb_request = compute.InsertTargetVpnGatewayRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + including_default_value_fields=False, + use_integers_for_enums=False + ) + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.Operation() + pb_resp = compute.Operation.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_insert(resp) + return resp + + class _List(TargetVpnGatewaysRestStub): + def __hash__(self): + return hash("List") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.ListTargetVpnGatewaysRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.TargetVpnGatewayList: + r"""Call the list method over HTTP. + + Args: + request (~.compute.ListTargetVpnGatewaysRequest): + The request object. A request message for + TargetVpnGateways.List. See the method + description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.TargetVpnGatewayList: + Contains a list of TargetVpnGateway + resources. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/compute/v1/projects/{project}/regions/{region}/targetVpnGateways', + }, + ] + request, metadata = self._interceptor.pre_list(request, metadata) + pb_request = compute.ListTargetVpnGatewaysRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.TargetVpnGatewayList() + pb_resp = compute.TargetVpnGatewayList.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_list(resp) + return resp + + class _SetLabels(TargetVpnGatewaysRestStub): + def __hash__(self): + return hash("SetLabels") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.SetLabelsTargetVpnGatewayRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.Operation: + r"""Call the set labels method over HTTP. + + Args: + request (~.compute.SetLabelsTargetVpnGatewayRequest): + The request object. A request message for + TargetVpnGateways.SetLabels. See the + method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.Operation: + Represents an Operation resource. Google Compute Engine + has three Operation resources: \* + `Global `__ + \* + `Regional `__ + \* + `Zonal `__ + You can use an operation resource to manage asynchronous + API requests. For more information, read Handling API + responses. Operations can be global, regional or zonal. + - For global operations, use the ``globalOperations`` + resource. - For regional operations, use the + ``regionOperations`` resource. - For zonal operations, + use the ``zonalOperations`` resource. For more + information, read Global, Regional, and Zonal Resources. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/compute/v1/projects/{project}/regions/{region}/targetVpnGateways/{resource}/setLabels', + 'body': 'region_set_labels_request_resource', + }, + ] + request, metadata = self._interceptor.pre_set_labels(request, metadata) + pb_request = compute.SetLabelsTargetVpnGatewayRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + including_default_value_fields=False, + use_integers_for_enums=False + ) + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.Operation() + pb_resp = compute.Operation.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_set_labels(resp) + return resp + + @property + def aggregated_list(self) -> Callable[ + [compute.AggregatedListTargetVpnGatewaysRequest], + compute.TargetVpnGatewayAggregatedList]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._AggregatedList(self._session, self._host, self._interceptor) # type: ignore + + @property + def delete(self) -> Callable[ + [compute.DeleteTargetVpnGatewayRequest], + compute.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._Delete(self._session, self._host, self._interceptor) # type: ignore + + @property + def get(self) -> Callable[ + [compute.GetTargetVpnGatewayRequest], + compute.TargetVpnGateway]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._Get(self._session, self._host, self._interceptor) # type: ignore + + @property + def insert(self) -> Callable[ + [compute.InsertTargetVpnGatewayRequest], + compute.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._Insert(self._session, self._host, self._interceptor) # type: ignore + + @property + def list(self) -> Callable[ + [compute.ListTargetVpnGatewaysRequest], + compute.TargetVpnGatewayList]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._List(self._session, self._host, self._interceptor) # type: ignore + + @property + def set_labels(self) -> Callable[ + [compute.SetLabelsTargetVpnGatewayRequest], + compute.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._SetLabels(self._session, self._host, self._interceptor) # type: ignore + + @property + def kind(self) -> str: + return "rest" + + def close(self): + self._session.close() + + +__all__=( + 'TargetVpnGatewaysRestTransport', +) diff --git a/owl-bot-staging/v1/google/cloud/compute_v1/services/url_maps/__init__.py b/owl-bot-staging/v1/google/cloud/compute_v1/services/url_maps/__init__.py new file mode 100644 index 000000000..ad8e4606f --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/compute_v1/services/url_maps/__init__.py @@ -0,0 +1,20 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from .client import UrlMapsClient + +__all__ = ( + 'UrlMapsClient', +) diff --git a/owl-bot-staging/v1/google/cloud/compute_v1/services/url_maps/client.py b/owl-bot-staging/v1/google/cloud/compute_v1/services/url_maps/client.py new file mode 100644 index 000000000..adeb3cbf3 --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/compute_v1/services/url_maps/client.py @@ -0,0 +1,2154 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import functools +import os +import re +from typing import Dict, Mapping, MutableMapping, MutableSequence, Optional, Sequence, Tuple, Type, Union, cast + +from google.cloud.compute_v1 import gapic_version as package_version + +from google.api_core import client_options as client_options_lib +from google.api_core import exceptions as core_exceptions +from google.api_core import extended_operation +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport import mtls # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth.exceptions import MutualTLSChannelError # type: ignore +from google.oauth2 import service_account # type: ignore + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + +from google.api_core import extended_operation # type: ignore +from google.cloud.compute_v1.services.url_maps import pagers +from google.cloud.compute_v1.types import compute +from .transports.base import UrlMapsTransport, DEFAULT_CLIENT_INFO +from .transports.rest import UrlMapsRestTransport + + +class UrlMapsClientMeta(type): + """Metaclass for the UrlMaps client. + + This provides class-level methods for building and retrieving + support objects (e.g. transport) without polluting the client instance + objects. + """ + _transport_registry = OrderedDict() # type: Dict[str, Type[UrlMapsTransport]] + _transport_registry["rest"] = UrlMapsRestTransport + + def get_transport_class(cls, + label: Optional[str] = None, + ) -> Type[UrlMapsTransport]: + """Returns an appropriate transport class. + + Args: + label: The name of the desired transport. If none is + provided, then the first transport in the registry is used. + + Returns: + The transport class to use. + """ + # If a specific transport is requested, return that one. + if label: + return cls._transport_registry[label] + + # No transport is requested; return the default (that is, the first one + # in the dictionary). + return next(iter(cls._transport_registry.values())) + + +class UrlMapsClient(metaclass=UrlMapsClientMeta): + """The UrlMaps API.""" + + @staticmethod + def _get_default_mtls_endpoint(api_endpoint): + """Converts api endpoint to mTLS endpoint. + + Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to + "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. + Args: + api_endpoint (Optional[str]): the api endpoint to convert. + Returns: + str: converted mTLS api endpoint. + """ + if not api_endpoint: + return api_endpoint + + mtls_endpoint_re = re.compile( + r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?" + ) + + m = mtls_endpoint_re.match(api_endpoint) + name, mtls, sandbox, googledomain = m.groups() + if mtls or not googledomain: + return api_endpoint + + if sandbox: + return api_endpoint.replace( + "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" + ) + + return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") + + DEFAULT_ENDPOINT = "compute.googleapis.com" + DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore + DEFAULT_ENDPOINT + ) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + UrlMapsClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_info(info) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + UrlMapsClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_file( + filename) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + from_service_account_json = from_service_account_file + + @property + def transport(self) -> UrlMapsTransport: + """Returns the transport used by the client instance. + + Returns: + UrlMapsTransport: The transport used by the client + instance. + """ + return self._transport + + @staticmethod + def common_billing_account_path(billing_account: str, ) -> str: + """Returns a fully-qualified billing_account string.""" + return "billingAccounts/{billing_account}".format(billing_account=billing_account, ) + + @staticmethod + def parse_common_billing_account_path(path: str) -> Dict[str,str]: + """Parse a billing_account path into its component segments.""" + m = re.match(r"^billingAccounts/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_folder_path(folder: str, ) -> str: + """Returns a fully-qualified folder string.""" + return "folders/{folder}".format(folder=folder, ) + + @staticmethod + def parse_common_folder_path(path: str) -> Dict[str,str]: + """Parse a folder path into its component segments.""" + m = re.match(r"^folders/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_organization_path(organization: str, ) -> str: + """Returns a fully-qualified organization string.""" + return "organizations/{organization}".format(organization=organization, ) + + @staticmethod + def parse_common_organization_path(path: str) -> Dict[str,str]: + """Parse a organization path into its component segments.""" + m = re.match(r"^organizations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_project_path(project: str, ) -> str: + """Returns a fully-qualified project string.""" + return "projects/{project}".format(project=project, ) + + @staticmethod + def parse_common_project_path(path: str) -> Dict[str,str]: + """Parse a project path into its component segments.""" + m = re.match(r"^projects/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_location_path(project: str, location: str, ) -> str: + """Returns a fully-qualified location string.""" + return "projects/{project}/locations/{location}".format(project=project, location=location, ) + + @staticmethod + def parse_common_location_path(path: str) -> Dict[str,str]: + """Parse a location path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @classmethod + def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[client_options_lib.ClientOptions] = None): + """Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + if client_options is None: + client_options = client_options_lib.ClientOptions() + use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") + use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") + if use_client_cert not in ("true", "false"): + raise ValueError("Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`") + if use_mtls_endpoint not in ("auto", "never", "always"): + raise MutualTLSChannelError("Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`") + + # Figure out the client cert source to use. + client_cert_source = None + if use_client_cert == "true": + if client_options.client_cert_source: + client_cert_source = client_options.client_cert_source + elif mtls.has_default_client_cert_source(): + client_cert_source = mtls.default_client_cert_source() + + # Figure out which api endpoint to use. + if client_options.api_endpoint is not None: + api_endpoint = client_options.api_endpoint + elif use_mtls_endpoint == "always" or (use_mtls_endpoint == "auto" and client_cert_source): + api_endpoint = cls.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = cls.DEFAULT_ENDPOINT + + return api_endpoint, client_cert_source + + def __init__(self, *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Optional[Union[str, UrlMapsTransport]] = None, + client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the url maps client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Union[str, UrlMapsTransport]): The + transport to use. If set to None, a transport is chosen + automatically. + NOTE: "rest" transport functionality is currently in a + beta state (preview). We welcome your feedback via an + issue in this library's source repository. + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): Custom options for the + client. It won't take effect if a ``transport`` instance is provided. + (1) The ``api_endpoint`` property can be used to override the + default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT + environment variable can also be used to override the endpoint: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto switch to the + default mTLS endpoint if client certificate is present, this is + the default value). However, the ``api_endpoint`` property takes + precedence if provided. + (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide client certificate for mutual TLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + """ + if isinstance(client_options, dict): + client_options = client_options_lib.from_dict(client_options) + if client_options is None: + client_options = client_options_lib.ClientOptions() + client_options = cast(client_options_lib.ClientOptions, client_options) + + api_endpoint, client_cert_source_func = self.get_mtls_endpoint_and_cert_source(client_options) + + api_key_value = getattr(client_options, "api_key", None) + if api_key_value and credentials: + raise ValueError("client_options.api_key and credentials are mutually exclusive") + + # Save or instantiate the transport. + # Ordinarily, we provide the transport, but allowing a custom transport + # instance provides an extensibility point for unusual situations. + if isinstance(transport, UrlMapsTransport): + # transport is a UrlMapsTransport instance. + if credentials or client_options.credentials_file or api_key_value: + raise ValueError("When providing a transport instance, " + "provide its credentials directly.") + if client_options.scopes: + raise ValueError( + "When providing a transport instance, provide its scopes " + "directly." + ) + self._transport = transport + else: + import google.auth._default # type: ignore + + if api_key_value and hasattr(google.auth._default, "get_api_key_credentials"): + credentials = google.auth._default.get_api_key_credentials(api_key_value) + + Transport = type(self).get_transport_class(transport) + self._transport = Transport( + credentials=credentials, + credentials_file=client_options.credentials_file, + host=api_endpoint, + scopes=client_options.scopes, + client_cert_source_for_mtls=client_cert_source_func, + quota_project_id=client_options.quota_project_id, + client_info=client_info, + always_use_jwt_access=True, + api_audience=client_options.api_audience, + ) + + def aggregated_list(self, + request: Optional[Union[compute.AggregatedListUrlMapsRequest, dict]] = None, + *, + project: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.AggregatedListPager: + r"""Retrieves the list of all UrlMap resources, regional + and global, available to the specified project. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_aggregated_list(): + # Create a client + client = compute_v1.UrlMapsClient() + + # Initialize request argument(s) + request = compute_v1.AggregatedListUrlMapsRequest( + project="project_value", + ) + + # Make the request + page_result = client.aggregated_list(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.AggregatedListUrlMapsRequest, dict]): + The request object. A request message for + UrlMaps.AggregatedList. See the method + description for details. + project (str): + Name of the project scoping this + request. + + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.compute_v1.services.url_maps.pagers.AggregatedListPager: + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.AggregatedListUrlMapsRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.AggregatedListUrlMapsRequest): + request = compute.AggregatedListUrlMapsRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.aggregated_list] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.AggregatedListPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + def delete_unary(self, + request: Optional[Union[compute.DeleteUrlMapRequest, dict]] = None, + *, + project: Optional[str] = None, + url_map: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.Operation: + r"""Deletes the specified UrlMap resource. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_delete(): + # Create a client + client = compute_v1.UrlMapsClient() + + # Initialize request argument(s) + request = compute_v1.DeleteUrlMapRequest( + project="project_value", + url_map="url_map_value", + ) + + # Make the request + response = client.delete(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.DeleteUrlMapRequest, dict]): + The request object. A request message for UrlMaps.Delete. + See the method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + url_map (str): + Name of the UrlMap resource to + delete. + + This corresponds to the ``url_map`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, url_map]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.DeleteUrlMapRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.DeleteUrlMapRequest): + request = compute.DeleteUrlMapRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if url_map is not None: + request.url_map = url_map + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("url_map", request.url_map), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def delete(self, + request: Optional[Union[compute.DeleteUrlMapRequest, dict]] = None, + *, + project: Optional[str] = None, + url_map: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> extended_operation.ExtendedOperation: + r"""Deletes the specified UrlMap resource. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_delete(): + # Create a client + client = compute_v1.UrlMapsClient() + + # Initialize request argument(s) + request = compute_v1.DeleteUrlMapRequest( + project="project_value", + url_map="url_map_value", + ) + + # Make the request + response = client.delete(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.DeleteUrlMapRequest, dict]): + The request object. A request message for UrlMaps.Delete. + See the method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + url_map (str): + Name of the UrlMap resource to + delete. + + This corresponds to the ``url_map`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, url_map]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.DeleteUrlMapRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.DeleteUrlMapRequest): + request = compute.DeleteUrlMapRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if url_map is not None: + request.url_map = url_map + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("url_map", request.url_map), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + operation_service = self._transport._global_operations_client + operation_request = compute.GetGlobalOperationRequest() + operation_request.project = request.project + operation_request.operation = response.name + + get_operation = functools.partial(operation_service.get, operation_request) + # Cancel is not part of extended operations yet. + cancel_operation = lambda: None + + # Note: this class is an implementation detail to provide a uniform + # set of names for certain fields in the extended operation proto message. + # See google.api_core.extended_operation.ExtendedOperation for details + # on these properties and the expected interface. + class _CustomOperation(extended_operation.ExtendedOperation): + @property + def error_message(self): + return self._extended_operation.http_error_message + + @property + def error_code(self): + return self._extended_operation.http_error_status_code + + response = _CustomOperation.make(get_operation, cancel_operation, response) + + # Done; return the response. + return response + + def get(self, + request: Optional[Union[compute.GetUrlMapRequest, dict]] = None, + *, + project: Optional[str] = None, + url_map: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.UrlMap: + r"""Returns the specified UrlMap resource. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_get(): + # Create a client + client = compute_v1.UrlMapsClient() + + # Initialize request argument(s) + request = compute_v1.GetUrlMapRequest( + project="project_value", + url_map="url_map_value", + ) + + # Make the request + response = client.get(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.GetUrlMapRequest, dict]): + The request object. A request message for UrlMaps.Get. + See the method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + url_map (str): + Name of the UrlMap resource to + return. + + This corresponds to the ``url_map`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.compute_v1.types.UrlMap: + Represents a URL Map resource. Compute Engine has two + URL Map resources: \* + [Global](/compute/docs/reference/rest/v1/urlMaps) \* + [Regional](/compute/docs/reference/rest/v1/regionUrlMaps) + A URL map resource is a component of certain types of + cloud load balancers and Traffic Director: \* urlMaps + are used by external HTTP(S) load balancers and Traffic + Director. \* regionUrlMaps are used by internal HTTP(S) + load balancers. For a list of supported URL map features + by the load balancer type, see the Load balancing + features: Routing and traffic management table. For a + list of supported URL map features for Traffic Director, + see the Traffic Director features: Routing and traffic + management table. This resource defines mappings from + hostnames and URL paths to either a backend service or a + backend bucket. To use the global urlMaps resource, the + backend service must have a loadBalancingScheme of + either EXTERNAL or INTERNAL_SELF_MANAGED. To use the + regionUrlMaps resource, the backend service must have a + loadBalancingScheme of INTERNAL_MANAGED. For more + information, read URL Map Concepts. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, url_map]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.GetUrlMapRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.GetUrlMapRequest): + request = compute.GetUrlMapRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if url_map is not None: + request.url_map = url_map + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("url_map", request.url_map), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def insert_unary(self, + request: Optional[Union[compute.InsertUrlMapRequest, dict]] = None, + *, + project: Optional[str] = None, + url_map_resource: Optional[compute.UrlMap] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.Operation: + r"""Creates a UrlMap resource in the specified project + using the data included in the request. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_insert(): + # Create a client + client = compute_v1.UrlMapsClient() + + # Initialize request argument(s) + request = compute_v1.InsertUrlMapRequest( + project="project_value", + ) + + # Make the request + response = client.insert(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.InsertUrlMapRequest, dict]): + The request object. A request message for UrlMaps.Insert. + See the method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + url_map_resource (google.cloud.compute_v1.types.UrlMap): + The body resource for this request + This corresponds to the ``url_map_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, url_map_resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.InsertUrlMapRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.InsertUrlMapRequest): + request = compute.InsertUrlMapRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if url_map_resource is not None: + request.url_map_resource = url_map_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.insert] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def insert(self, + request: Optional[Union[compute.InsertUrlMapRequest, dict]] = None, + *, + project: Optional[str] = None, + url_map_resource: Optional[compute.UrlMap] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> extended_operation.ExtendedOperation: + r"""Creates a UrlMap resource in the specified project + using the data included in the request. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_insert(): + # Create a client + client = compute_v1.UrlMapsClient() + + # Initialize request argument(s) + request = compute_v1.InsertUrlMapRequest( + project="project_value", + ) + + # Make the request + response = client.insert(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.InsertUrlMapRequest, dict]): + The request object. A request message for UrlMaps.Insert. + See the method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + url_map_resource (google.cloud.compute_v1.types.UrlMap): + The body resource for this request + This corresponds to the ``url_map_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, url_map_resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.InsertUrlMapRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.InsertUrlMapRequest): + request = compute.InsertUrlMapRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if url_map_resource is not None: + request.url_map_resource = url_map_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.insert] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + operation_service = self._transport._global_operations_client + operation_request = compute.GetGlobalOperationRequest() + operation_request.project = request.project + operation_request.operation = response.name + + get_operation = functools.partial(operation_service.get, operation_request) + # Cancel is not part of extended operations yet. + cancel_operation = lambda: None + + # Note: this class is an implementation detail to provide a uniform + # set of names for certain fields in the extended operation proto message. + # See google.api_core.extended_operation.ExtendedOperation for details + # on these properties and the expected interface. + class _CustomOperation(extended_operation.ExtendedOperation): + @property + def error_message(self): + return self._extended_operation.http_error_message + + @property + def error_code(self): + return self._extended_operation.http_error_status_code + + response = _CustomOperation.make(get_operation, cancel_operation, response) + + # Done; return the response. + return response + + def invalidate_cache_unary(self, + request: Optional[Union[compute.InvalidateCacheUrlMapRequest, dict]] = None, + *, + project: Optional[str] = None, + url_map: Optional[str] = None, + cache_invalidation_rule_resource: Optional[compute.CacheInvalidationRule] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.Operation: + r"""Initiates a cache invalidation operation, invalidating the + specified path, scoped to the specified UrlMap. For more + information, see `Invalidating cached + content `__. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_invalidate_cache(): + # Create a client + client = compute_v1.UrlMapsClient() + + # Initialize request argument(s) + request = compute_v1.InvalidateCacheUrlMapRequest( + project="project_value", + url_map="url_map_value", + ) + + # Make the request + response = client.invalidate_cache(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.InvalidateCacheUrlMapRequest, dict]): + The request object. A request message for + UrlMaps.InvalidateCache. See the method + description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + url_map (str): + Name of the UrlMap scoping this + request. + + This corresponds to the ``url_map`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + cache_invalidation_rule_resource (google.cloud.compute_v1.types.CacheInvalidationRule): + The body resource for this request + This corresponds to the ``cache_invalidation_rule_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, url_map, cache_invalidation_rule_resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.InvalidateCacheUrlMapRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.InvalidateCacheUrlMapRequest): + request = compute.InvalidateCacheUrlMapRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if url_map is not None: + request.url_map = url_map + if cache_invalidation_rule_resource is not None: + request.cache_invalidation_rule_resource = cache_invalidation_rule_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.invalidate_cache] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("url_map", request.url_map), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def invalidate_cache(self, + request: Optional[Union[compute.InvalidateCacheUrlMapRequest, dict]] = None, + *, + project: Optional[str] = None, + url_map: Optional[str] = None, + cache_invalidation_rule_resource: Optional[compute.CacheInvalidationRule] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> extended_operation.ExtendedOperation: + r"""Initiates a cache invalidation operation, invalidating the + specified path, scoped to the specified UrlMap. For more + information, see `Invalidating cached + content `__. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_invalidate_cache(): + # Create a client + client = compute_v1.UrlMapsClient() + + # Initialize request argument(s) + request = compute_v1.InvalidateCacheUrlMapRequest( + project="project_value", + url_map="url_map_value", + ) + + # Make the request + response = client.invalidate_cache(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.InvalidateCacheUrlMapRequest, dict]): + The request object. A request message for + UrlMaps.InvalidateCache. See the method + description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + url_map (str): + Name of the UrlMap scoping this + request. + + This corresponds to the ``url_map`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + cache_invalidation_rule_resource (google.cloud.compute_v1.types.CacheInvalidationRule): + The body resource for this request + This corresponds to the ``cache_invalidation_rule_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, url_map, cache_invalidation_rule_resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.InvalidateCacheUrlMapRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.InvalidateCacheUrlMapRequest): + request = compute.InvalidateCacheUrlMapRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if url_map is not None: + request.url_map = url_map + if cache_invalidation_rule_resource is not None: + request.cache_invalidation_rule_resource = cache_invalidation_rule_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.invalidate_cache] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("url_map", request.url_map), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + operation_service = self._transport._global_operations_client + operation_request = compute.GetGlobalOperationRequest() + operation_request.project = request.project + operation_request.operation = response.name + + get_operation = functools.partial(operation_service.get, operation_request) + # Cancel is not part of extended operations yet. + cancel_operation = lambda: None + + # Note: this class is an implementation detail to provide a uniform + # set of names for certain fields in the extended operation proto message. + # See google.api_core.extended_operation.ExtendedOperation for details + # on these properties and the expected interface. + class _CustomOperation(extended_operation.ExtendedOperation): + @property + def error_message(self): + return self._extended_operation.http_error_message + + @property + def error_code(self): + return self._extended_operation.http_error_status_code + + response = _CustomOperation.make(get_operation, cancel_operation, response) + + # Done; return the response. + return response + + def list(self, + request: Optional[Union[compute.ListUrlMapsRequest, dict]] = None, + *, + project: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListPager: + r"""Retrieves the list of UrlMap resources available to + the specified project. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_list(): + # Create a client + client = compute_v1.UrlMapsClient() + + # Initialize request argument(s) + request = compute_v1.ListUrlMapsRequest( + project="project_value", + ) + + # Make the request + page_result = client.list(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.ListUrlMapsRequest, dict]): + The request object. A request message for UrlMaps.List. + See the method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.compute_v1.services.url_maps.pagers.ListPager: + Contains a list of UrlMap resources. + + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.ListUrlMapsRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.ListUrlMapsRequest): + request = compute.ListUrlMapsRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + def patch_unary(self, + request: Optional[Union[compute.PatchUrlMapRequest, dict]] = None, + *, + project: Optional[str] = None, + url_map: Optional[str] = None, + url_map_resource: Optional[compute.UrlMap] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.Operation: + r"""Patches the specified UrlMap resource with the data + included in the request. This method supports PATCH + semantics and uses the JSON merge patch format and + processing rules. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_patch(): + # Create a client + client = compute_v1.UrlMapsClient() + + # Initialize request argument(s) + request = compute_v1.PatchUrlMapRequest( + project="project_value", + url_map="url_map_value", + ) + + # Make the request + response = client.patch(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.PatchUrlMapRequest, dict]): + The request object. A request message for UrlMaps.Patch. + See the method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + url_map (str): + Name of the UrlMap resource to patch. + This corresponds to the ``url_map`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + url_map_resource (google.cloud.compute_v1.types.UrlMap): + The body resource for this request + This corresponds to the ``url_map_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, url_map, url_map_resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.PatchUrlMapRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.PatchUrlMapRequest): + request = compute.PatchUrlMapRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if url_map is not None: + request.url_map = url_map + if url_map_resource is not None: + request.url_map_resource = url_map_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.patch] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("url_map", request.url_map), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def patch(self, + request: Optional[Union[compute.PatchUrlMapRequest, dict]] = None, + *, + project: Optional[str] = None, + url_map: Optional[str] = None, + url_map_resource: Optional[compute.UrlMap] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> extended_operation.ExtendedOperation: + r"""Patches the specified UrlMap resource with the data + included in the request. This method supports PATCH + semantics and uses the JSON merge patch format and + processing rules. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_patch(): + # Create a client + client = compute_v1.UrlMapsClient() + + # Initialize request argument(s) + request = compute_v1.PatchUrlMapRequest( + project="project_value", + url_map="url_map_value", + ) + + # Make the request + response = client.patch(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.PatchUrlMapRequest, dict]): + The request object. A request message for UrlMaps.Patch. + See the method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + url_map (str): + Name of the UrlMap resource to patch. + This corresponds to the ``url_map`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + url_map_resource (google.cloud.compute_v1.types.UrlMap): + The body resource for this request + This corresponds to the ``url_map_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, url_map, url_map_resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.PatchUrlMapRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.PatchUrlMapRequest): + request = compute.PatchUrlMapRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if url_map is not None: + request.url_map = url_map + if url_map_resource is not None: + request.url_map_resource = url_map_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.patch] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("url_map", request.url_map), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + operation_service = self._transport._global_operations_client + operation_request = compute.GetGlobalOperationRequest() + operation_request.project = request.project + operation_request.operation = response.name + + get_operation = functools.partial(operation_service.get, operation_request) + # Cancel is not part of extended operations yet. + cancel_operation = lambda: None + + # Note: this class is an implementation detail to provide a uniform + # set of names for certain fields in the extended operation proto message. + # See google.api_core.extended_operation.ExtendedOperation for details + # on these properties and the expected interface. + class _CustomOperation(extended_operation.ExtendedOperation): + @property + def error_message(self): + return self._extended_operation.http_error_message + + @property + def error_code(self): + return self._extended_operation.http_error_status_code + + response = _CustomOperation.make(get_operation, cancel_operation, response) + + # Done; return the response. + return response + + def update_unary(self, + request: Optional[Union[compute.UpdateUrlMapRequest, dict]] = None, + *, + project: Optional[str] = None, + url_map: Optional[str] = None, + url_map_resource: Optional[compute.UrlMap] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.Operation: + r"""Updates the specified UrlMap resource with the data + included in the request. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_update(): + # Create a client + client = compute_v1.UrlMapsClient() + + # Initialize request argument(s) + request = compute_v1.UpdateUrlMapRequest( + project="project_value", + url_map="url_map_value", + ) + + # Make the request + response = client.update(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.UpdateUrlMapRequest, dict]): + The request object. A request message for UrlMaps.Update. + See the method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + url_map (str): + Name of the UrlMap resource to + update. + + This corresponds to the ``url_map`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + url_map_resource (google.cloud.compute_v1.types.UrlMap): + The body resource for this request + This corresponds to the ``url_map_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, url_map, url_map_resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.UpdateUrlMapRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.UpdateUrlMapRequest): + request = compute.UpdateUrlMapRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if url_map is not None: + request.url_map = url_map + if url_map_resource is not None: + request.url_map_resource = url_map_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.update] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("url_map", request.url_map), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def update(self, + request: Optional[Union[compute.UpdateUrlMapRequest, dict]] = None, + *, + project: Optional[str] = None, + url_map: Optional[str] = None, + url_map_resource: Optional[compute.UrlMap] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> extended_operation.ExtendedOperation: + r"""Updates the specified UrlMap resource with the data + included in the request. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_update(): + # Create a client + client = compute_v1.UrlMapsClient() + + # Initialize request argument(s) + request = compute_v1.UpdateUrlMapRequest( + project="project_value", + url_map="url_map_value", + ) + + # Make the request + response = client.update(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.UpdateUrlMapRequest, dict]): + The request object. A request message for UrlMaps.Update. + See the method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + url_map (str): + Name of the UrlMap resource to + update. + + This corresponds to the ``url_map`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + url_map_resource (google.cloud.compute_v1.types.UrlMap): + The body resource for this request + This corresponds to the ``url_map_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, url_map, url_map_resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.UpdateUrlMapRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.UpdateUrlMapRequest): + request = compute.UpdateUrlMapRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if url_map is not None: + request.url_map = url_map + if url_map_resource is not None: + request.url_map_resource = url_map_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.update] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("url_map", request.url_map), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + operation_service = self._transport._global_operations_client + operation_request = compute.GetGlobalOperationRequest() + operation_request.project = request.project + operation_request.operation = response.name + + get_operation = functools.partial(operation_service.get, operation_request) + # Cancel is not part of extended operations yet. + cancel_operation = lambda: None + + # Note: this class is an implementation detail to provide a uniform + # set of names for certain fields in the extended operation proto message. + # See google.api_core.extended_operation.ExtendedOperation for details + # on these properties and the expected interface. + class _CustomOperation(extended_operation.ExtendedOperation): + @property + def error_message(self): + return self._extended_operation.http_error_message + + @property + def error_code(self): + return self._extended_operation.http_error_status_code + + response = _CustomOperation.make(get_operation, cancel_operation, response) + + # Done; return the response. + return response + + def validate(self, + request: Optional[Union[compute.ValidateUrlMapRequest, dict]] = None, + *, + project: Optional[str] = None, + url_map: Optional[str] = None, + url_maps_validate_request_resource: Optional[compute.UrlMapsValidateRequest] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.UrlMapsValidateResponse: + r"""Runs static validation for the UrlMap. In particular, + the tests of the provided UrlMap will be run. Calling + this method does NOT create the UrlMap. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_validate(): + # Create a client + client = compute_v1.UrlMapsClient() + + # Initialize request argument(s) + request = compute_v1.ValidateUrlMapRequest( + project="project_value", + url_map="url_map_value", + ) + + # Make the request + response = client.validate(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.ValidateUrlMapRequest, dict]): + The request object. A request message for + UrlMaps.Validate. See the method + description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + url_map (str): + Name of the UrlMap resource to be + validated as. + + This corresponds to the ``url_map`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + url_maps_validate_request_resource (google.cloud.compute_v1.types.UrlMapsValidateRequest): + The body resource for this request + This corresponds to the ``url_maps_validate_request_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.compute_v1.types.UrlMapsValidateResponse: + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, url_map, url_maps_validate_request_resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.ValidateUrlMapRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.ValidateUrlMapRequest): + request = compute.ValidateUrlMapRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if url_map is not None: + request.url_map = url_map + if url_maps_validate_request_resource is not None: + request.url_maps_validate_request_resource = url_maps_validate_request_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.validate] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("url_map", request.url_map), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def __enter__(self) -> "UrlMapsClient": + return self + + def __exit__(self, type, value, traceback): + """Releases underlying transport's resources. + + .. warning:: + ONLY use as a context manager if the transport is NOT shared + with other clients! Exiting the with block will CLOSE the transport + and may cause errors in other clients! + """ + self.transport.close() + + + + + + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) + + +__all__ = ( + "UrlMapsClient", +) diff --git a/owl-bot-staging/v1/google/cloud/compute_v1/services/url_maps/pagers.py b/owl-bot-staging/v1/google/cloud/compute_v1/services/url_maps/pagers.py new file mode 100644 index 000000000..a33789643 --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/compute_v1/services/url_maps/pagers.py @@ -0,0 +1,139 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import Any, AsyncIterator, Awaitable, Callable, Sequence, Tuple, Optional, Iterator + +from google.cloud.compute_v1.types import compute + + +class AggregatedListPager: + """A pager for iterating through ``aggregated_list`` requests. + + This class thinly wraps an initial + :class:`google.cloud.compute_v1.types.UrlMapsAggregatedList` object, and + provides an ``__iter__`` method to iterate through its + ``items`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``AggregatedList`` requests and continue to iterate + through the ``items`` field on the + corresponding responses. + + All the usual :class:`google.cloud.compute_v1.types.UrlMapsAggregatedList` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., compute.UrlMapsAggregatedList], + request: compute.AggregatedListUrlMapsRequest, + response: compute.UrlMapsAggregatedList, + *, + metadata: Sequence[Tuple[str, str]] = ()): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.compute_v1.types.AggregatedListUrlMapsRequest): + The initial request object. + response (google.cloud.compute_v1.types.UrlMapsAggregatedList): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = compute.AggregatedListUrlMapsRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[compute.UrlMapsAggregatedList]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[Tuple[str, compute.UrlMapsScopedList]]: + for page in self.pages: + yield from page.items.items() + + def get(self, key: str) -> Optional[compute.UrlMapsScopedList]: + return self._response.items.get(key) + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + + +class ListPager: + """A pager for iterating through ``list`` requests. + + This class thinly wraps an initial + :class:`google.cloud.compute_v1.types.UrlMapList` object, and + provides an ``__iter__`` method to iterate through its + ``items`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``List`` requests and continue to iterate + through the ``items`` field on the + corresponding responses. + + All the usual :class:`google.cloud.compute_v1.types.UrlMapList` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., compute.UrlMapList], + request: compute.ListUrlMapsRequest, + response: compute.UrlMapList, + *, + metadata: Sequence[Tuple[str, str]] = ()): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.compute_v1.types.ListUrlMapsRequest): + The initial request object. + response (google.cloud.compute_v1.types.UrlMapList): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = compute.ListUrlMapsRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[compute.UrlMapList]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[compute.UrlMap]: + for page in self.pages: + yield from page.items + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) diff --git a/owl-bot-staging/v1/google/cloud/compute_v1/services/url_maps/transports/__init__.py b/owl-bot-staging/v1/google/cloud/compute_v1/services/url_maps/transports/__init__.py new file mode 100644 index 000000000..f91406467 --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/compute_v1/services/url_maps/transports/__init__.py @@ -0,0 +1,32 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +from typing import Dict, Type + +from .base import UrlMapsTransport +from .rest import UrlMapsRestTransport +from .rest import UrlMapsRestInterceptor + + +# Compile a registry of transports. +_transport_registry = OrderedDict() # type: Dict[str, Type[UrlMapsTransport]] +_transport_registry['rest'] = UrlMapsRestTransport + +__all__ = ( + 'UrlMapsTransport', + 'UrlMapsRestTransport', + 'UrlMapsRestInterceptor', +) diff --git a/owl-bot-staging/v1/google/cloud/compute_v1/services/url_maps/transports/base.py b/owl-bot-staging/v1/google/cloud/compute_v1/services/url_maps/transports/base.py new file mode 100644 index 000000000..022d7d790 --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/compute_v1/services/url_maps/transports/base.py @@ -0,0 +1,275 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import abc +from typing import Awaitable, Callable, Dict, Optional, Sequence, Union + +from google.cloud.compute_v1 import gapic_version as package_version + +import google.auth # type: ignore +import google.api_core +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.compute_v1.types import compute +from google.cloud.compute_v1.services import global_operations + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) + + +class UrlMapsTransport(abc.ABC): + """Abstract transport class for UrlMaps.""" + + AUTH_SCOPES = ( + 'https://www.googleapis.com/auth/compute', + 'https://www.googleapis.com/auth/cloud-platform', + ) + + DEFAULT_HOST: str = 'compute.googleapis.com' + def __init__( + self, *, + host: str = DEFAULT_HOST, + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + **kwargs, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A list of scopes. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + """ + self._extended_operations_services: Dict[str, Any] = {} + + scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} + + # Save the scopes. + self._scopes = scopes + + # If no credentials are provided, then determine the appropriate + # defaults. + if credentials and credentials_file: + raise core_exceptions.DuplicateCredentialArgs("'credentials_file' and 'credentials' are mutually exclusive") + + if credentials_file is not None: + credentials, _ = google.auth.load_credentials_from_file( + credentials_file, + **scopes_kwargs, + quota_project_id=quota_project_id + ) + elif credentials is None: + credentials, _ = google.auth.default(**scopes_kwargs, quota_project_id=quota_project_id) + # Don't apply audience if the credentials file passed from user. + if hasattr(credentials, "with_gdch_audience"): + credentials = credentials.with_gdch_audience(api_audience if api_audience else host) + + # If the credentials are service account credentials, then always try to use self signed JWT. + if always_use_jwt_access and isinstance(credentials, service_account.Credentials) and hasattr(service_account.Credentials, "with_always_use_jwt_access"): + credentials = credentials.with_always_use_jwt_access(True) + + # Save the credentials. + self._credentials = credentials + + # Save the hostname. Default to port 443 (HTTPS) if none is specified. + if ':' not in host: + host += ':443' + self._host = host + + def _prep_wrapped_messages(self, client_info): + # Precompute the wrapped methods. + self._wrapped_methods = { + self.aggregated_list: gapic_v1.method.wrap_method( + self.aggregated_list, + default_timeout=None, + client_info=client_info, + ), + self.delete: gapic_v1.method.wrap_method( + self.delete, + default_timeout=None, + client_info=client_info, + ), + self.get: gapic_v1.method.wrap_method( + self.get, + default_timeout=None, + client_info=client_info, + ), + self.insert: gapic_v1.method.wrap_method( + self.insert, + default_timeout=None, + client_info=client_info, + ), + self.invalidate_cache: gapic_v1.method.wrap_method( + self.invalidate_cache, + default_timeout=None, + client_info=client_info, + ), + self.list: gapic_v1.method.wrap_method( + self.list, + default_timeout=None, + client_info=client_info, + ), + self.patch: gapic_v1.method.wrap_method( + self.patch, + default_timeout=None, + client_info=client_info, + ), + self.update: gapic_v1.method.wrap_method( + self.update, + default_timeout=None, + client_info=client_info, + ), + self.validate: gapic_v1.method.wrap_method( + self.validate, + default_timeout=None, + client_info=client_info, + ), + } + + def close(self): + """Closes resources associated with the transport. + + .. warning:: + Only call this method if the transport is NOT shared + with other clients - this may cause errors in other clients! + """ + raise NotImplementedError() + + @property + def aggregated_list(self) -> Callable[ + [compute.AggregatedListUrlMapsRequest], + Union[ + compute.UrlMapsAggregatedList, + Awaitable[compute.UrlMapsAggregatedList] + ]]: + raise NotImplementedError() + + @property + def delete(self) -> Callable[ + [compute.DeleteUrlMapRequest], + Union[ + compute.Operation, + Awaitable[compute.Operation] + ]]: + raise NotImplementedError() + + @property + def get(self) -> Callable[ + [compute.GetUrlMapRequest], + Union[ + compute.UrlMap, + Awaitable[compute.UrlMap] + ]]: + raise NotImplementedError() + + @property + def insert(self) -> Callable[ + [compute.InsertUrlMapRequest], + Union[ + compute.Operation, + Awaitable[compute.Operation] + ]]: + raise NotImplementedError() + + @property + def invalidate_cache(self) -> Callable[ + [compute.InvalidateCacheUrlMapRequest], + Union[ + compute.Operation, + Awaitable[compute.Operation] + ]]: + raise NotImplementedError() + + @property + def list(self) -> Callable[ + [compute.ListUrlMapsRequest], + Union[ + compute.UrlMapList, + Awaitable[compute.UrlMapList] + ]]: + raise NotImplementedError() + + @property + def patch(self) -> Callable[ + [compute.PatchUrlMapRequest], + Union[ + compute.Operation, + Awaitable[compute.Operation] + ]]: + raise NotImplementedError() + + @property + def update(self) -> Callable[ + [compute.UpdateUrlMapRequest], + Union[ + compute.Operation, + Awaitable[compute.Operation] + ]]: + raise NotImplementedError() + + @property + def validate(self) -> Callable[ + [compute.ValidateUrlMapRequest], + Union[ + compute.UrlMapsValidateResponse, + Awaitable[compute.UrlMapsValidateResponse] + ]]: + raise NotImplementedError() + + @property + def kind(self) -> str: + raise NotImplementedError() + + @property + def _global_operations_client(self) -> global_operations.GlobalOperationsClient: + ex_op_service = self._extended_operations_services.get("global_operations") + if not ex_op_service: + ex_op_service = global_operations.GlobalOperationsClient( + credentials=self._credentials, + transport=self.kind, + ) + self._extended_operations_services["global_operations"] = ex_op_service + + return ex_op_service + + +__all__ = ( + 'UrlMapsTransport', +) diff --git a/owl-bot-staging/v1/google/cloud/compute_v1/services/url_maps/transports/rest.py b/owl-bot-staging/v1/google/cloud/compute_v1/services/url_maps/transports/rest.py new file mode 100644 index 000000000..9e2e215f2 --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/compute_v1/services/url_maps/transports/rest.py @@ -0,0 +1,1310 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +from google.auth.transport.requests import AuthorizedSession # type: ignore +import json # type: ignore +import grpc # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.api_core import exceptions as core_exceptions +from google.api_core import retry as retries +from google.api_core import rest_helpers +from google.api_core import rest_streaming +from google.api_core import path_template +from google.api_core import gapic_v1 + +from google.protobuf import json_format +from requests import __version__ as requests_version +import dataclasses +import re +from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union +import warnings + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + + +from google.cloud.compute_v1.types import compute + +from .base import UrlMapsTransport, DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, + grpc_version=None, + rest_version=requests_version, +) + + +class UrlMapsRestInterceptor: + """Interceptor for UrlMaps. + + Interceptors are used to manipulate requests, request metadata, and responses + in arbitrary ways. + Example use cases include: + * Logging + * Verifying requests according to service or custom semantics + * Stripping extraneous information from responses + + These use cases and more can be enabled by injecting an + instance of a custom subclass when constructing the UrlMapsRestTransport. + + .. code-block:: python + class MyCustomUrlMapsInterceptor(UrlMapsRestInterceptor): + def pre_aggregated_list(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_aggregated_list(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_delete(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_delete(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_get(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_insert(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_insert(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_invalidate_cache(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_invalidate_cache(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_patch(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_patch(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_update(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_update(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_validate(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_validate(self, response): + logging.log(f"Received response: {response}") + return response + + transport = UrlMapsRestTransport(interceptor=MyCustomUrlMapsInterceptor()) + client = UrlMapsClient(transport=transport) + + + """ + def pre_aggregated_list(self, request: compute.AggregatedListUrlMapsRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.AggregatedListUrlMapsRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for aggregated_list + + Override in a subclass to manipulate the request or metadata + before they are sent to the UrlMaps server. + """ + return request, metadata + + def post_aggregated_list(self, response: compute.UrlMapsAggregatedList) -> compute.UrlMapsAggregatedList: + """Post-rpc interceptor for aggregated_list + + Override in a subclass to manipulate the response + after it is returned by the UrlMaps server but before + it is returned to user code. + """ + return response + def pre_delete(self, request: compute.DeleteUrlMapRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.DeleteUrlMapRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for delete + + Override in a subclass to manipulate the request or metadata + before they are sent to the UrlMaps server. + """ + return request, metadata + + def post_delete(self, response: compute.Operation) -> compute.Operation: + """Post-rpc interceptor for delete + + Override in a subclass to manipulate the response + after it is returned by the UrlMaps server but before + it is returned to user code. + """ + return response + def pre_get(self, request: compute.GetUrlMapRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.GetUrlMapRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get + + Override in a subclass to manipulate the request or metadata + before they are sent to the UrlMaps server. + """ + return request, metadata + + def post_get(self, response: compute.UrlMap) -> compute.UrlMap: + """Post-rpc interceptor for get + + Override in a subclass to manipulate the response + after it is returned by the UrlMaps server but before + it is returned to user code. + """ + return response + def pre_insert(self, request: compute.InsertUrlMapRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.InsertUrlMapRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for insert + + Override in a subclass to manipulate the request or metadata + before they are sent to the UrlMaps server. + """ + return request, metadata + + def post_insert(self, response: compute.Operation) -> compute.Operation: + """Post-rpc interceptor for insert + + Override in a subclass to manipulate the response + after it is returned by the UrlMaps server but before + it is returned to user code. + """ + return response + def pre_invalidate_cache(self, request: compute.InvalidateCacheUrlMapRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.InvalidateCacheUrlMapRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for invalidate_cache + + Override in a subclass to manipulate the request or metadata + before they are sent to the UrlMaps server. + """ + return request, metadata + + def post_invalidate_cache(self, response: compute.Operation) -> compute.Operation: + """Post-rpc interceptor for invalidate_cache + + Override in a subclass to manipulate the response + after it is returned by the UrlMaps server but before + it is returned to user code. + """ + return response + def pre_list(self, request: compute.ListUrlMapsRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.ListUrlMapsRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list + + Override in a subclass to manipulate the request or metadata + before they are sent to the UrlMaps server. + """ + return request, metadata + + def post_list(self, response: compute.UrlMapList) -> compute.UrlMapList: + """Post-rpc interceptor for list + + Override in a subclass to manipulate the response + after it is returned by the UrlMaps server but before + it is returned to user code. + """ + return response + def pre_patch(self, request: compute.PatchUrlMapRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.PatchUrlMapRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for patch + + Override in a subclass to manipulate the request or metadata + before they are sent to the UrlMaps server. + """ + return request, metadata + + def post_patch(self, response: compute.Operation) -> compute.Operation: + """Post-rpc interceptor for patch + + Override in a subclass to manipulate the response + after it is returned by the UrlMaps server but before + it is returned to user code. + """ + return response + def pre_update(self, request: compute.UpdateUrlMapRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.UpdateUrlMapRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for update + + Override in a subclass to manipulate the request or metadata + before they are sent to the UrlMaps server. + """ + return request, metadata + + def post_update(self, response: compute.Operation) -> compute.Operation: + """Post-rpc interceptor for update + + Override in a subclass to manipulate the response + after it is returned by the UrlMaps server but before + it is returned to user code. + """ + return response + def pre_validate(self, request: compute.ValidateUrlMapRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.ValidateUrlMapRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for validate + + Override in a subclass to manipulate the request or metadata + before they are sent to the UrlMaps server. + """ + return request, metadata + + def post_validate(self, response: compute.UrlMapsValidateResponse) -> compute.UrlMapsValidateResponse: + """Post-rpc interceptor for validate + + Override in a subclass to manipulate the response + after it is returned by the UrlMaps server but before + it is returned to user code. + """ + return response + + +@dataclasses.dataclass +class UrlMapsRestStub: + _session: AuthorizedSession + _host: str + _interceptor: UrlMapsRestInterceptor + + +class UrlMapsRestTransport(UrlMapsTransport): + """REST backend transport for UrlMaps. + + The UrlMaps API. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends JSON representations of protocol buffers over HTTP/1.1 + + NOTE: This REST transport functionality is currently in a beta + state (preview). We welcome your feedback via an issue in this + library's source repository. Thank you! + """ + + def __init__(self, *, + host: str = 'compute.googleapis.com', + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + client_cert_source_for_mtls: Optional[Callable[[ + ], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + url_scheme: str = 'https', + interceptor: Optional[UrlMapsRestInterceptor] = None, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + NOTE: This REST transport functionality is currently in a beta + state (preview). We welcome your feedback via a GitHub issue in + this library's repository. Thank you! + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + client_cert_source_for_mtls (Callable[[], Tuple[bytes, bytes]]): Client + certificate to configure mutual TLS HTTP channel. It is ignored + if ``channel`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you are developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + url_scheme: the protocol scheme for the API endpoint. Normally + "https", but for testing or local servers, + "http" can be specified. + """ + # Run the base constructor + # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. + # TODO: When custom host (api_endpoint) is set, `scopes` must *also* be set on the + # credentials object + maybe_url_match = re.match("^(?Phttp(?:s)?://)?(?P.*)$", host) + if maybe_url_match is None: + raise ValueError(f"Unexpected hostname structure: {host}") # pragma: NO COVER + + url_match_items = maybe_url_match.groupdict() + + host = f"{url_scheme}://{host}" if not url_match_items["scheme"] else host + + super().__init__( + host=host, + credentials=credentials, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience + ) + self._session = AuthorizedSession( + self._credentials, default_host=self.DEFAULT_HOST) + if client_cert_source_for_mtls: + self._session.configure_mtls_channel(client_cert_source_for_mtls) + self._interceptor = interceptor or UrlMapsRestInterceptor() + self._prep_wrapped_messages(client_info) + + class _AggregatedList(UrlMapsRestStub): + def __hash__(self): + return hash("AggregatedList") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.AggregatedListUrlMapsRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.UrlMapsAggregatedList: + r"""Call the aggregated list method over HTTP. + + Args: + request (~.compute.AggregatedListUrlMapsRequest): + The request object. A request message for + UrlMaps.AggregatedList. See the method + description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.UrlMapsAggregatedList: + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/compute/v1/projects/{project}/aggregated/urlMaps', + }, + ] + request, metadata = self._interceptor.pre_aggregated_list(request, metadata) + pb_request = compute.AggregatedListUrlMapsRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.UrlMapsAggregatedList() + pb_resp = compute.UrlMapsAggregatedList.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_aggregated_list(resp) + return resp + + class _Delete(UrlMapsRestStub): + def __hash__(self): + return hash("Delete") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.DeleteUrlMapRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.Operation: + r"""Call the delete method over HTTP. + + Args: + request (~.compute.DeleteUrlMapRequest): + The request object. A request message for UrlMaps.Delete. + See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.Operation: + Represents an Operation resource. Google Compute Engine + has three Operation resources: \* + `Global `__ + \* + `Regional `__ + \* + `Zonal `__ + You can use an operation resource to manage asynchronous + API requests. For more information, read Handling API + responses. Operations can be global, regional or zonal. + - For global operations, use the ``globalOperations`` + resource. - For regional operations, use the + ``regionOperations`` resource. - For zonal operations, + use the ``zonalOperations`` resource. For more + information, read Global, Regional, and Zonal Resources. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'delete', + 'uri': '/compute/v1/projects/{project}/global/urlMaps/{url_map}', + }, + ] + request, metadata = self._interceptor.pre_delete(request, metadata) + pb_request = compute.DeleteUrlMapRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.Operation() + pb_resp = compute.Operation.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_delete(resp) + return resp + + class _Get(UrlMapsRestStub): + def __hash__(self): + return hash("Get") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.GetUrlMapRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.UrlMap: + r"""Call the get method over HTTP. + + Args: + request (~.compute.GetUrlMapRequest): + The request object. A request message for UrlMaps.Get. + See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.UrlMap: + Represents a URL Map resource. Compute Engine has two + URL Map resources: \* + `Global `__ \* + `Regional `__ + A URL map resource is a component of certain types of + cloud load balancers and Traffic Director: \* urlMaps + are used by external HTTP(S) load balancers and Traffic + Director. \* regionUrlMaps are used by internal HTTP(S) + load balancers. For a list of supported URL map features + by the load balancer type, see the Load balancing + features: Routing and traffic management table. For a + list of supported URL map features for Traffic Director, + see the Traffic Director features: Routing and traffic + management table. This resource defines mappings from + hostnames and URL paths to either a backend service or a + backend bucket. To use the global urlMaps resource, the + backend service must have a loadBalancingScheme of + either EXTERNAL or INTERNAL_SELF_MANAGED. To use the + regionUrlMaps resource, the backend service must have a + loadBalancingScheme of INTERNAL_MANAGED. For more + information, read URL Map Concepts. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/compute/v1/projects/{project}/global/urlMaps/{url_map}', + }, + ] + request, metadata = self._interceptor.pre_get(request, metadata) + pb_request = compute.GetUrlMapRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.UrlMap() + pb_resp = compute.UrlMap.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get(resp) + return resp + + class _Insert(UrlMapsRestStub): + def __hash__(self): + return hash("Insert") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.InsertUrlMapRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.Operation: + r"""Call the insert method over HTTP. + + Args: + request (~.compute.InsertUrlMapRequest): + The request object. A request message for UrlMaps.Insert. + See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.Operation: + Represents an Operation resource. Google Compute Engine + has three Operation resources: \* + `Global `__ + \* + `Regional `__ + \* + `Zonal `__ + You can use an operation resource to manage asynchronous + API requests. For more information, read Handling API + responses. Operations can be global, regional or zonal. + - For global operations, use the ``globalOperations`` + resource. - For regional operations, use the + ``regionOperations`` resource. - For zonal operations, + use the ``zonalOperations`` resource. For more + information, read Global, Regional, and Zonal Resources. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/compute/v1/projects/{project}/global/urlMaps', + 'body': 'url_map_resource', + }, + ] + request, metadata = self._interceptor.pre_insert(request, metadata) + pb_request = compute.InsertUrlMapRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + including_default_value_fields=False, + use_integers_for_enums=False + ) + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.Operation() + pb_resp = compute.Operation.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_insert(resp) + return resp + + class _InvalidateCache(UrlMapsRestStub): + def __hash__(self): + return hash("InvalidateCache") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.InvalidateCacheUrlMapRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.Operation: + r"""Call the invalidate cache method over HTTP. + + Args: + request (~.compute.InvalidateCacheUrlMapRequest): + The request object. A request message for + UrlMaps.InvalidateCache. See the method + description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.Operation: + Represents an Operation resource. Google Compute Engine + has three Operation resources: \* + `Global `__ + \* + `Regional `__ + \* + `Zonal `__ + You can use an operation resource to manage asynchronous + API requests. For more information, read Handling API + responses. Operations can be global, regional or zonal. + - For global operations, use the ``globalOperations`` + resource. - For regional operations, use the + ``regionOperations`` resource. - For zonal operations, + use the ``zonalOperations`` resource. For more + information, read Global, Regional, and Zonal Resources. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/compute/v1/projects/{project}/global/urlMaps/{url_map}/invalidateCache', + 'body': 'cache_invalidation_rule_resource', + }, + ] + request, metadata = self._interceptor.pre_invalidate_cache(request, metadata) + pb_request = compute.InvalidateCacheUrlMapRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + including_default_value_fields=False, + use_integers_for_enums=False + ) + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.Operation() + pb_resp = compute.Operation.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_invalidate_cache(resp) + return resp + + class _List(UrlMapsRestStub): + def __hash__(self): + return hash("List") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.ListUrlMapsRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.UrlMapList: + r"""Call the list method over HTTP. + + Args: + request (~.compute.ListUrlMapsRequest): + The request object. A request message for UrlMaps.List. + See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.UrlMapList: + Contains a list of UrlMap resources. + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/compute/v1/projects/{project}/global/urlMaps', + }, + ] + request, metadata = self._interceptor.pre_list(request, metadata) + pb_request = compute.ListUrlMapsRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.UrlMapList() + pb_resp = compute.UrlMapList.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_list(resp) + return resp + + class _Patch(UrlMapsRestStub): + def __hash__(self): + return hash("Patch") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.PatchUrlMapRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.Operation: + r"""Call the patch method over HTTP. + + Args: + request (~.compute.PatchUrlMapRequest): + The request object. A request message for UrlMaps.Patch. + See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.Operation: + Represents an Operation resource. Google Compute Engine + has three Operation resources: \* + `Global `__ + \* + `Regional `__ + \* + `Zonal `__ + You can use an operation resource to manage asynchronous + API requests. For more information, read Handling API + responses. Operations can be global, regional or zonal. + - For global operations, use the ``globalOperations`` + resource. - For regional operations, use the + ``regionOperations`` resource. - For zonal operations, + use the ``zonalOperations`` resource. For more + information, read Global, Regional, and Zonal Resources. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'patch', + 'uri': '/compute/v1/projects/{project}/global/urlMaps/{url_map}', + 'body': 'url_map_resource', + }, + ] + request, metadata = self._interceptor.pre_patch(request, metadata) + pb_request = compute.PatchUrlMapRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + including_default_value_fields=False, + use_integers_for_enums=False + ) + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.Operation() + pb_resp = compute.Operation.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_patch(resp) + return resp + + class _Update(UrlMapsRestStub): + def __hash__(self): + return hash("Update") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.UpdateUrlMapRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.Operation: + r"""Call the update method over HTTP. + + Args: + request (~.compute.UpdateUrlMapRequest): + The request object. A request message for UrlMaps.Update. + See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.Operation: + Represents an Operation resource. Google Compute Engine + has three Operation resources: \* + `Global `__ + \* + `Regional `__ + \* + `Zonal `__ + You can use an operation resource to manage asynchronous + API requests. For more information, read Handling API + responses. Operations can be global, regional or zonal. + - For global operations, use the ``globalOperations`` + resource. - For regional operations, use the + ``regionOperations`` resource. - For zonal operations, + use the ``zonalOperations`` resource. For more + information, read Global, Regional, and Zonal Resources. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'put', + 'uri': '/compute/v1/projects/{project}/global/urlMaps/{url_map}', + 'body': 'url_map_resource', + }, + ] + request, metadata = self._interceptor.pre_update(request, metadata) + pb_request = compute.UpdateUrlMapRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + including_default_value_fields=False, + use_integers_for_enums=False + ) + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.Operation() + pb_resp = compute.Operation.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_update(resp) + return resp + + class _Validate(UrlMapsRestStub): + def __hash__(self): + return hash("Validate") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.ValidateUrlMapRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.UrlMapsValidateResponse: + r"""Call the validate method over HTTP. + + Args: + request (~.compute.ValidateUrlMapRequest): + The request object. A request message for + UrlMaps.Validate. See the method + description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.UrlMapsValidateResponse: + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/compute/v1/projects/{project}/global/urlMaps/{url_map}/validate', + 'body': 'url_maps_validate_request_resource', + }, + ] + request, metadata = self._interceptor.pre_validate(request, metadata) + pb_request = compute.ValidateUrlMapRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + including_default_value_fields=False, + use_integers_for_enums=False + ) + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.UrlMapsValidateResponse() + pb_resp = compute.UrlMapsValidateResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_validate(resp) + return resp + + @property + def aggregated_list(self) -> Callable[ + [compute.AggregatedListUrlMapsRequest], + compute.UrlMapsAggregatedList]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._AggregatedList(self._session, self._host, self._interceptor) # type: ignore + + @property + def delete(self) -> Callable[ + [compute.DeleteUrlMapRequest], + compute.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._Delete(self._session, self._host, self._interceptor) # type: ignore + + @property + def get(self) -> Callable[ + [compute.GetUrlMapRequest], + compute.UrlMap]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._Get(self._session, self._host, self._interceptor) # type: ignore + + @property + def insert(self) -> Callable[ + [compute.InsertUrlMapRequest], + compute.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._Insert(self._session, self._host, self._interceptor) # type: ignore + + @property + def invalidate_cache(self) -> Callable[ + [compute.InvalidateCacheUrlMapRequest], + compute.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._InvalidateCache(self._session, self._host, self._interceptor) # type: ignore + + @property + def list(self) -> Callable[ + [compute.ListUrlMapsRequest], + compute.UrlMapList]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._List(self._session, self._host, self._interceptor) # type: ignore + + @property + def patch(self) -> Callable[ + [compute.PatchUrlMapRequest], + compute.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._Patch(self._session, self._host, self._interceptor) # type: ignore + + @property + def update(self) -> Callable[ + [compute.UpdateUrlMapRequest], + compute.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._Update(self._session, self._host, self._interceptor) # type: ignore + + @property + def validate(self) -> Callable[ + [compute.ValidateUrlMapRequest], + compute.UrlMapsValidateResponse]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._Validate(self._session, self._host, self._interceptor) # type: ignore + + @property + def kind(self) -> str: + return "rest" + + def close(self): + self._session.close() + + +__all__=( + 'UrlMapsRestTransport', +) diff --git a/owl-bot-staging/v1/google/cloud/compute_v1/services/vpn_gateways/__init__.py b/owl-bot-staging/v1/google/cloud/compute_v1/services/vpn_gateways/__init__.py new file mode 100644 index 000000000..db79f1e1a --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/compute_v1/services/vpn_gateways/__init__.py @@ -0,0 +1,20 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from .client import VpnGatewaysClient + +__all__ = ( + 'VpnGatewaysClient', +) diff --git a/owl-bot-staging/v1/google/cloud/compute_v1/services/vpn_gateways/client.py b/owl-bot-staging/v1/google/cloud/compute_v1/services/vpn_gateways/client.py new file mode 100644 index 000000000..8ae4ccc52 --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/compute_v1/services/vpn_gateways/client.py @@ -0,0 +1,1819 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import functools +import os +import re +from typing import Dict, Mapping, MutableMapping, MutableSequence, Optional, Sequence, Tuple, Type, Union, cast + +from google.cloud.compute_v1 import gapic_version as package_version + +from google.api_core import client_options as client_options_lib +from google.api_core import exceptions as core_exceptions +from google.api_core import extended_operation +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport import mtls # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth.exceptions import MutualTLSChannelError # type: ignore +from google.oauth2 import service_account # type: ignore + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + +from google.api_core import extended_operation # type: ignore +from google.cloud.compute_v1.services.vpn_gateways import pagers +from google.cloud.compute_v1.types import compute +from .transports.base import VpnGatewaysTransport, DEFAULT_CLIENT_INFO +from .transports.rest import VpnGatewaysRestTransport + + +class VpnGatewaysClientMeta(type): + """Metaclass for the VpnGateways client. + + This provides class-level methods for building and retrieving + support objects (e.g. transport) without polluting the client instance + objects. + """ + _transport_registry = OrderedDict() # type: Dict[str, Type[VpnGatewaysTransport]] + _transport_registry["rest"] = VpnGatewaysRestTransport + + def get_transport_class(cls, + label: Optional[str] = None, + ) -> Type[VpnGatewaysTransport]: + """Returns an appropriate transport class. + + Args: + label: The name of the desired transport. If none is + provided, then the first transport in the registry is used. + + Returns: + The transport class to use. + """ + # If a specific transport is requested, return that one. + if label: + return cls._transport_registry[label] + + # No transport is requested; return the default (that is, the first one + # in the dictionary). + return next(iter(cls._transport_registry.values())) + + +class VpnGatewaysClient(metaclass=VpnGatewaysClientMeta): + """The VpnGateways API.""" + + @staticmethod + def _get_default_mtls_endpoint(api_endpoint): + """Converts api endpoint to mTLS endpoint. + + Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to + "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. + Args: + api_endpoint (Optional[str]): the api endpoint to convert. + Returns: + str: converted mTLS api endpoint. + """ + if not api_endpoint: + return api_endpoint + + mtls_endpoint_re = re.compile( + r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?" + ) + + m = mtls_endpoint_re.match(api_endpoint) + name, mtls, sandbox, googledomain = m.groups() + if mtls or not googledomain: + return api_endpoint + + if sandbox: + return api_endpoint.replace( + "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" + ) + + return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") + + DEFAULT_ENDPOINT = "compute.googleapis.com" + DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore + DEFAULT_ENDPOINT + ) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + VpnGatewaysClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_info(info) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + VpnGatewaysClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_file( + filename) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + from_service_account_json = from_service_account_file + + @property + def transport(self) -> VpnGatewaysTransport: + """Returns the transport used by the client instance. + + Returns: + VpnGatewaysTransport: The transport used by the client + instance. + """ + return self._transport + + @staticmethod + def common_billing_account_path(billing_account: str, ) -> str: + """Returns a fully-qualified billing_account string.""" + return "billingAccounts/{billing_account}".format(billing_account=billing_account, ) + + @staticmethod + def parse_common_billing_account_path(path: str) -> Dict[str,str]: + """Parse a billing_account path into its component segments.""" + m = re.match(r"^billingAccounts/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_folder_path(folder: str, ) -> str: + """Returns a fully-qualified folder string.""" + return "folders/{folder}".format(folder=folder, ) + + @staticmethod + def parse_common_folder_path(path: str) -> Dict[str,str]: + """Parse a folder path into its component segments.""" + m = re.match(r"^folders/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_organization_path(organization: str, ) -> str: + """Returns a fully-qualified organization string.""" + return "organizations/{organization}".format(organization=organization, ) + + @staticmethod + def parse_common_organization_path(path: str) -> Dict[str,str]: + """Parse a organization path into its component segments.""" + m = re.match(r"^organizations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_project_path(project: str, ) -> str: + """Returns a fully-qualified project string.""" + return "projects/{project}".format(project=project, ) + + @staticmethod + def parse_common_project_path(path: str) -> Dict[str,str]: + """Parse a project path into its component segments.""" + m = re.match(r"^projects/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_location_path(project: str, location: str, ) -> str: + """Returns a fully-qualified location string.""" + return "projects/{project}/locations/{location}".format(project=project, location=location, ) + + @staticmethod + def parse_common_location_path(path: str) -> Dict[str,str]: + """Parse a location path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @classmethod + def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[client_options_lib.ClientOptions] = None): + """Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + if client_options is None: + client_options = client_options_lib.ClientOptions() + use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") + use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") + if use_client_cert not in ("true", "false"): + raise ValueError("Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`") + if use_mtls_endpoint not in ("auto", "never", "always"): + raise MutualTLSChannelError("Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`") + + # Figure out the client cert source to use. + client_cert_source = None + if use_client_cert == "true": + if client_options.client_cert_source: + client_cert_source = client_options.client_cert_source + elif mtls.has_default_client_cert_source(): + client_cert_source = mtls.default_client_cert_source() + + # Figure out which api endpoint to use. + if client_options.api_endpoint is not None: + api_endpoint = client_options.api_endpoint + elif use_mtls_endpoint == "always" or (use_mtls_endpoint == "auto" and client_cert_source): + api_endpoint = cls.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = cls.DEFAULT_ENDPOINT + + return api_endpoint, client_cert_source + + def __init__(self, *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Optional[Union[str, VpnGatewaysTransport]] = None, + client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the vpn gateways client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Union[str, VpnGatewaysTransport]): The + transport to use. If set to None, a transport is chosen + automatically. + NOTE: "rest" transport functionality is currently in a + beta state (preview). We welcome your feedback via an + issue in this library's source repository. + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): Custom options for the + client. It won't take effect if a ``transport`` instance is provided. + (1) The ``api_endpoint`` property can be used to override the + default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT + environment variable can also be used to override the endpoint: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto switch to the + default mTLS endpoint if client certificate is present, this is + the default value). However, the ``api_endpoint`` property takes + precedence if provided. + (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide client certificate for mutual TLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + """ + if isinstance(client_options, dict): + client_options = client_options_lib.from_dict(client_options) + if client_options is None: + client_options = client_options_lib.ClientOptions() + client_options = cast(client_options_lib.ClientOptions, client_options) + + api_endpoint, client_cert_source_func = self.get_mtls_endpoint_and_cert_source(client_options) + + api_key_value = getattr(client_options, "api_key", None) + if api_key_value and credentials: + raise ValueError("client_options.api_key and credentials are mutually exclusive") + + # Save or instantiate the transport. + # Ordinarily, we provide the transport, but allowing a custom transport + # instance provides an extensibility point for unusual situations. + if isinstance(transport, VpnGatewaysTransport): + # transport is a VpnGatewaysTransport instance. + if credentials or client_options.credentials_file or api_key_value: + raise ValueError("When providing a transport instance, " + "provide its credentials directly.") + if client_options.scopes: + raise ValueError( + "When providing a transport instance, provide its scopes " + "directly." + ) + self._transport = transport + else: + import google.auth._default # type: ignore + + if api_key_value and hasattr(google.auth._default, "get_api_key_credentials"): + credentials = google.auth._default.get_api_key_credentials(api_key_value) + + Transport = type(self).get_transport_class(transport) + self._transport = Transport( + credentials=credentials, + credentials_file=client_options.credentials_file, + host=api_endpoint, + scopes=client_options.scopes, + client_cert_source_for_mtls=client_cert_source_func, + quota_project_id=client_options.quota_project_id, + client_info=client_info, + always_use_jwt_access=True, + api_audience=client_options.api_audience, + ) + + def aggregated_list(self, + request: Optional[Union[compute.AggregatedListVpnGatewaysRequest, dict]] = None, + *, + project: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.AggregatedListPager: + r"""Retrieves an aggregated list of VPN gateways. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_aggregated_list(): + # Create a client + client = compute_v1.VpnGatewaysClient() + + # Initialize request argument(s) + request = compute_v1.AggregatedListVpnGatewaysRequest( + project="project_value", + ) + + # Make the request + page_result = client.aggregated_list(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.AggregatedListVpnGatewaysRequest, dict]): + The request object. A request message for + VpnGateways.AggregatedList. See the + method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.compute_v1.services.vpn_gateways.pagers.AggregatedListPager: + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.AggregatedListVpnGatewaysRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.AggregatedListVpnGatewaysRequest): + request = compute.AggregatedListVpnGatewaysRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.aggregated_list] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.AggregatedListPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + def delete_unary(self, + request: Optional[Union[compute.DeleteVpnGatewayRequest, dict]] = None, + *, + project: Optional[str] = None, + region: Optional[str] = None, + vpn_gateway: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.Operation: + r"""Deletes the specified VPN gateway. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_delete(): + # Create a client + client = compute_v1.VpnGatewaysClient() + + # Initialize request argument(s) + request = compute_v1.DeleteVpnGatewayRequest( + project="project_value", + region="region_value", + vpn_gateway="vpn_gateway_value", + ) + + # Make the request + response = client.delete(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.DeleteVpnGatewayRequest, dict]): + The request object. A request message for + VpnGateways.Delete. See the method + description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + region (str): + Name of the region for this request. + This corresponds to the ``region`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + vpn_gateway (str): + Name of the VPN gateway to delete. + This corresponds to the ``vpn_gateway`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, region, vpn_gateway]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.DeleteVpnGatewayRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.DeleteVpnGatewayRequest): + request = compute.DeleteVpnGatewayRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if region is not None: + request.region = region + if vpn_gateway is not None: + request.vpn_gateway = vpn_gateway + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("region", request.region), + ("vpn_gateway", request.vpn_gateway), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def delete(self, + request: Optional[Union[compute.DeleteVpnGatewayRequest, dict]] = None, + *, + project: Optional[str] = None, + region: Optional[str] = None, + vpn_gateway: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> extended_operation.ExtendedOperation: + r"""Deletes the specified VPN gateway. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_delete(): + # Create a client + client = compute_v1.VpnGatewaysClient() + + # Initialize request argument(s) + request = compute_v1.DeleteVpnGatewayRequest( + project="project_value", + region="region_value", + vpn_gateway="vpn_gateway_value", + ) + + # Make the request + response = client.delete(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.DeleteVpnGatewayRequest, dict]): + The request object. A request message for + VpnGateways.Delete. See the method + description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + region (str): + Name of the region for this request. + This corresponds to the ``region`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + vpn_gateway (str): + Name of the VPN gateway to delete. + This corresponds to the ``vpn_gateway`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, region, vpn_gateway]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.DeleteVpnGatewayRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.DeleteVpnGatewayRequest): + request = compute.DeleteVpnGatewayRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if region is not None: + request.region = region + if vpn_gateway is not None: + request.vpn_gateway = vpn_gateway + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("region", request.region), + ("vpn_gateway", request.vpn_gateway), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + operation_service = self._transport._region_operations_client + operation_request = compute.GetRegionOperationRequest() + operation_request.project = request.project + operation_request.region = request.region + operation_request.operation = response.name + + get_operation = functools.partial(operation_service.get, operation_request) + # Cancel is not part of extended operations yet. + cancel_operation = lambda: None + + # Note: this class is an implementation detail to provide a uniform + # set of names for certain fields in the extended operation proto message. + # See google.api_core.extended_operation.ExtendedOperation for details + # on these properties and the expected interface. + class _CustomOperation(extended_operation.ExtendedOperation): + @property + def error_message(self): + return self._extended_operation.http_error_message + + @property + def error_code(self): + return self._extended_operation.http_error_status_code + + response = _CustomOperation.make(get_operation, cancel_operation, response) + + # Done; return the response. + return response + + def get(self, + request: Optional[Union[compute.GetVpnGatewayRequest, dict]] = None, + *, + project: Optional[str] = None, + region: Optional[str] = None, + vpn_gateway: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.VpnGateway: + r"""Returns the specified VPN gateway. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_get(): + # Create a client + client = compute_v1.VpnGatewaysClient() + + # Initialize request argument(s) + request = compute_v1.GetVpnGatewayRequest( + project="project_value", + region="region_value", + vpn_gateway="vpn_gateway_value", + ) + + # Make the request + response = client.get(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.GetVpnGatewayRequest, dict]): + The request object. A request message for + VpnGateways.Get. See the method + description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + region (str): + Name of the region for this request. + This corresponds to the ``region`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + vpn_gateway (str): + Name of the VPN gateway to return. + This corresponds to the ``vpn_gateway`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.compute_v1.types.VpnGateway: + Represents a HA VPN gateway. HA VPN + is a high-availability (HA) Cloud VPN + solution that lets you securely connect + your on-premises network to your Google + Cloud Virtual Private Cloud network + through an IPsec VPN connection in a + single region. For more information + about Cloud HA VPN solutions, see Cloud + VPN topologies . + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, region, vpn_gateway]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.GetVpnGatewayRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.GetVpnGatewayRequest): + request = compute.GetVpnGatewayRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if region is not None: + request.region = region + if vpn_gateway is not None: + request.vpn_gateway = vpn_gateway + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("region", request.region), + ("vpn_gateway", request.vpn_gateway), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get_status(self, + request: Optional[Union[compute.GetStatusVpnGatewayRequest, dict]] = None, + *, + project: Optional[str] = None, + region: Optional[str] = None, + vpn_gateway: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.VpnGatewaysGetStatusResponse: + r"""Returns the status for the specified VPN gateway. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_get_status(): + # Create a client + client = compute_v1.VpnGatewaysClient() + + # Initialize request argument(s) + request = compute_v1.GetStatusVpnGatewayRequest( + project="project_value", + region="region_value", + vpn_gateway="vpn_gateway_value", + ) + + # Make the request + response = client.get_status(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.GetStatusVpnGatewayRequest, dict]): + The request object. A request message for + VpnGateways.GetStatus. See the method + description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + region (str): + Name of the region for this request. + This corresponds to the ``region`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + vpn_gateway (str): + Name of the VPN gateway to return. + This corresponds to the ``vpn_gateway`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.compute_v1.types.VpnGatewaysGetStatusResponse: + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, region, vpn_gateway]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.GetStatusVpnGatewayRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.GetStatusVpnGatewayRequest): + request = compute.GetStatusVpnGatewayRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if region is not None: + request.region = region + if vpn_gateway is not None: + request.vpn_gateway = vpn_gateway + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_status] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("region", request.region), + ("vpn_gateway", request.vpn_gateway), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def insert_unary(self, + request: Optional[Union[compute.InsertVpnGatewayRequest, dict]] = None, + *, + project: Optional[str] = None, + region: Optional[str] = None, + vpn_gateway_resource: Optional[compute.VpnGateway] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.Operation: + r"""Creates a VPN gateway in the specified project and + region using the data included in the request. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_insert(): + # Create a client + client = compute_v1.VpnGatewaysClient() + + # Initialize request argument(s) + request = compute_v1.InsertVpnGatewayRequest( + project="project_value", + region="region_value", + ) + + # Make the request + response = client.insert(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.InsertVpnGatewayRequest, dict]): + The request object. A request message for + VpnGateways.Insert. See the method + description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + region (str): + Name of the region for this request. + This corresponds to the ``region`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + vpn_gateway_resource (google.cloud.compute_v1.types.VpnGateway): + The body resource for this request + This corresponds to the ``vpn_gateway_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, region, vpn_gateway_resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.InsertVpnGatewayRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.InsertVpnGatewayRequest): + request = compute.InsertVpnGatewayRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if region is not None: + request.region = region + if vpn_gateway_resource is not None: + request.vpn_gateway_resource = vpn_gateway_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.insert] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("region", request.region), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def insert(self, + request: Optional[Union[compute.InsertVpnGatewayRequest, dict]] = None, + *, + project: Optional[str] = None, + region: Optional[str] = None, + vpn_gateway_resource: Optional[compute.VpnGateway] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> extended_operation.ExtendedOperation: + r"""Creates a VPN gateway in the specified project and + region using the data included in the request. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_insert(): + # Create a client + client = compute_v1.VpnGatewaysClient() + + # Initialize request argument(s) + request = compute_v1.InsertVpnGatewayRequest( + project="project_value", + region="region_value", + ) + + # Make the request + response = client.insert(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.InsertVpnGatewayRequest, dict]): + The request object. A request message for + VpnGateways.Insert. See the method + description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + region (str): + Name of the region for this request. + This corresponds to the ``region`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + vpn_gateway_resource (google.cloud.compute_v1.types.VpnGateway): + The body resource for this request + This corresponds to the ``vpn_gateway_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, region, vpn_gateway_resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.InsertVpnGatewayRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.InsertVpnGatewayRequest): + request = compute.InsertVpnGatewayRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if region is not None: + request.region = region + if vpn_gateway_resource is not None: + request.vpn_gateway_resource = vpn_gateway_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.insert] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("region", request.region), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + operation_service = self._transport._region_operations_client + operation_request = compute.GetRegionOperationRequest() + operation_request.project = request.project + operation_request.region = request.region + operation_request.operation = response.name + + get_operation = functools.partial(operation_service.get, operation_request) + # Cancel is not part of extended operations yet. + cancel_operation = lambda: None + + # Note: this class is an implementation detail to provide a uniform + # set of names for certain fields in the extended operation proto message. + # See google.api_core.extended_operation.ExtendedOperation for details + # on these properties and the expected interface. + class _CustomOperation(extended_operation.ExtendedOperation): + @property + def error_message(self): + return self._extended_operation.http_error_message + + @property + def error_code(self): + return self._extended_operation.http_error_status_code + + response = _CustomOperation.make(get_operation, cancel_operation, response) + + # Done; return the response. + return response + + def list(self, + request: Optional[Union[compute.ListVpnGatewaysRequest, dict]] = None, + *, + project: Optional[str] = None, + region: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListPager: + r"""Retrieves a list of VPN gateways available to the + specified project and region. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_list(): + # Create a client + client = compute_v1.VpnGatewaysClient() + + # Initialize request argument(s) + request = compute_v1.ListVpnGatewaysRequest( + project="project_value", + region="region_value", + ) + + # Make the request + page_result = client.list(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.ListVpnGatewaysRequest, dict]): + The request object. A request message for + VpnGateways.List. See the method + description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + region (str): + Name of the region for this request. + This corresponds to the ``region`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.compute_v1.services.vpn_gateways.pagers.ListPager: + Contains a list of VpnGateway + resources. + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, region]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.ListVpnGatewaysRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.ListVpnGatewaysRequest): + request = compute.ListVpnGatewaysRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if region is not None: + request.region = region + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("region", request.region), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + def set_labels_unary(self, + request: Optional[Union[compute.SetLabelsVpnGatewayRequest, dict]] = None, + *, + project: Optional[str] = None, + region: Optional[str] = None, + resource: Optional[str] = None, + region_set_labels_request_resource: Optional[compute.RegionSetLabelsRequest] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.Operation: + r"""Sets the labels on a VpnGateway. To learn more about + labels, read the Labeling Resources documentation. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_set_labels(): + # Create a client + client = compute_v1.VpnGatewaysClient() + + # Initialize request argument(s) + request = compute_v1.SetLabelsVpnGatewayRequest( + project="project_value", + region="region_value", + resource="resource_value", + ) + + # Make the request + response = client.set_labels(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.SetLabelsVpnGatewayRequest, dict]): + The request object. A request message for + VpnGateways.SetLabels. See the method + description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + region (str): + The region for this request. + This corresponds to the ``region`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + resource (str): + Name or id of the resource for this + request. + + This corresponds to the ``resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + region_set_labels_request_resource (google.cloud.compute_v1.types.RegionSetLabelsRequest): + The body resource for this request + This corresponds to the ``region_set_labels_request_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, region, resource, region_set_labels_request_resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.SetLabelsVpnGatewayRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.SetLabelsVpnGatewayRequest): + request = compute.SetLabelsVpnGatewayRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if region is not None: + request.region = region + if resource is not None: + request.resource = resource + if region_set_labels_request_resource is not None: + request.region_set_labels_request_resource = region_set_labels_request_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.set_labels] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("region", request.region), + ("resource", request.resource), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def set_labels(self, + request: Optional[Union[compute.SetLabelsVpnGatewayRequest, dict]] = None, + *, + project: Optional[str] = None, + region: Optional[str] = None, + resource: Optional[str] = None, + region_set_labels_request_resource: Optional[compute.RegionSetLabelsRequest] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> extended_operation.ExtendedOperation: + r"""Sets the labels on a VpnGateway. To learn more about + labels, read the Labeling Resources documentation. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_set_labels(): + # Create a client + client = compute_v1.VpnGatewaysClient() + + # Initialize request argument(s) + request = compute_v1.SetLabelsVpnGatewayRequest( + project="project_value", + region="region_value", + resource="resource_value", + ) + + # Make the request + response = client.set_labels(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.SetLabelsVpnGatewayRequest, dict]): + The request object. A request message for + VpnGateways.SetLabels. See the method + description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + region (str): + The region for this request. + This corresponds to the ``region`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + resource (str): + Name or id of the resource for this + request. + + This corresponds to the ``resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + region_set_labels_request_resource (google.cloud.compute_v1.types.RegionSetLabelsRequest): + The body resource for this request + This corresponds to the ``region_set_labels_request_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, region, resource, region_set_labels_request_resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.SetLabelsVpnGatewayRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.SetLabelsVpnGatewayRequest): + request = compute.SetLabelsVpnGatewayRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if region is not None: + request.region = region + if resource is not None: + request.resource = resource + if region_set_labels_request_resource is not None: + request.region_set_labels_request_resource = region_set_labels_request_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.set_labels] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("region", request.region), + ("resource", request.resource), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + operation_service = self._transport._region_operations_client + operation_request = compute.GetRegionOperationRequest() + operation_request.project = request.project + operation_request.region = request.region + operation_request.operation = response.name + + get_operation = functools.partial(operation_service.get, operation_request) + # Cancel is not part of extended operations yet. + cancel_operation = lambda: None + + # Note: this class is an implementation detail to provide a uniform + # set of names for certain fields in the extended operation proto message. + # See google.api_core.extended_operation.ExtendedOperation for details + # on these properties and the expected interface. + class _CustomOperation(extended_operation.ExtendedOperation): + @property + def error_message(self): + return self._extended_operation.http_error_message + + @property + def error_code(self): + return self._extended_operation.http_error_status_code + + response = _CustomOperation.make(get_operation, cancel_operation, response) + + # Done; return the response. + return response + + def test_iam_permissions(self, + request: Optional[Union[compute.TestIamPermissionsVpnGatewayRequest, dict]] = None, + *, + project: Optional[str] = None, + region: Optional[str] = None, + resource: Optional[str] = None, + test_permissions_request_resource: Optional[compute.TestPermissionsRequest] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.TestPermissionsResponse: + r"""Returns permissions that a caller has on the + specified resource. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_test_iam_permissions(): + # Create a client + client = compute_v1.VpnGatewaysClient() + + # Initialize request argument(s) + request = compute_v1.TestIamPermissionsVpnGatewayRequest( + project="project_value", + region="region_value", + resource="resource_value", + ) + + # Make the request + response = client.test_iam_permissions(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.TestIamPermissionsVpnGatewayRequest, dict]): + The request object. A request message for + VpnGateways.TestIamPermissions. See the + method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + region (str): + The name of the region for this + request. + + This corresponds to the ``region`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + resource (str): + Name or id of the resource for this + request. + + This corresponds to the ``resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + test_permissions_request_resource (google.cloud.compute_v1.types.TestPermissionsRequest): + The body resource for this request + This corresponds to the ``test_permissions_request_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.compute_v1.types.TestPermissionsResponse: + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, region, resource, test_permissions_request_resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.TestIamPermissionsVpnGatewayRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.TestIamPermissionsVpnGatewayRequest): + request = compute.TestIamPermissionsVpnGatewayRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if region is not None: + request.region = region + if resource is not None: + request.resource = resource + if test_permissions_request_resource is not None: + request.test_permissions_request_resource = test_permissions_request_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.test_iam_permissions] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("region", request.region), + ("resource", request.resource), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def __enter__(self) -> "VpnGatewaysClient": + return self + + def __exit__(self, type, value, traceback): + """Releases underlying transport's resources. + + .. warning:: + ONLY use as a context manager if the transport is NOT shared + with other clients! Exiting the with block will CLOSE the transport + and may cause errors in other clients! + """ + self.transport.close() + + + + + + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) + + +__all__ = ( + "VpnGatewaysClient", +) diff --git a/owl-bot-staging/v1/google/cloud/compute_v1/services/vpn_gateways/pagers.py b/owl-bot-staging/v1/google/cloud/compute_v1/services/vpn_gateways/pagers.py new file mode 100644 index 000000000..ac66af25c --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/compute_v1/services/vpn_gateways/pagers.py @@ -0,0 +1,139 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import Any, AsyncIterator, Awaitable, Callable, Sequence, Tuple, Optional, Iterator + +from google.cloud.compute_v1.types import compute + + +class AggregatedListPager: + """A pager for iterating through ``aggregated_list`` requests. + + This class thinly wraps an initial + :class:`google.cloud.compute_v1.types.VpnGatewayAggregatedList` object, and + provides an ``__iter__`` method to iterate through its + ``items`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``AggregatedList`` requests and continue to iterate + through the ``items`` field on the + corresponding responses. + + All the usual :class:`google.cloud.compute_v1.types.VpnGatewayAggregatedList` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., compute.VpnGatewayAggregatedList], + request: compute.AggregatedListVpnGatewaysRequest, + response: compute.VpnGatewayAggregatedList, + *, + metadata: Sequence[Tuple[str, str]] = ()): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.compute_v1.types.AggregatedListVpnGatewaysRequest): + The initial request object. + response (google.cloud.compute_v1.types.VpnGatewayAggregatedList): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = compute.AggregatedListVpnGatewaysRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[compute.VpnGatewayAggregatedList]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[Tuple[str, compute.VpnGatewaysScopedList]]: + for page in self.pages: + yield from page.items.items() + + def get(self, key: str) -> Optional[compute.VpnGatewaysScopedList]: + return self._response.items.get(key) + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + + +class ListPager: + """A pager for iterating through ``list`` requests. + + This class thinly wraps an initial + :class:`google.cloud.compute_v1.types.VpnGatewayList` object, and + provides an ``__iter__`` method to iterate through its + ``items`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``List`` requests and continue to iterate + through the ``items`` field on the + corresponding responses. + + All the usual :class:`google.cloud.compute_v1.types.VpnGatewayList` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., compute.VpnGatewayList], + request: compute.ListVpnGatewaysRequest, + response: compute.VpnGatewayList, + *, + metadata: Sequence[Tuple[str, str]] = ()): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.compute_v1.types.ListVpnGatewaysRequest): + The initial request object. + response (google.cloud.compute_v1.types.VpnGatewayList): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = compute.ListVpnGatewaysRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[compute.VpnGatewayList]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[compute.VpnGateway]: + for page in self.pages: + yield from page.items + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) diff --git a/owl-bot-staging/v1/google/cloud/compute_v1/services/vpn_gateways/transports/__init__.py b/owl-bot-staging/v1/google/cloud/compute_v1/services/vpn_gateways/transports/__init__.py new file mode 100644 index 000000000..80cbfd091 --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/compute_v1/services/vpn_gateways/transports/__init__.py @@ -0,0 +1,32 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +from typing import Dict, Type + +from .base import VpnGatewaysTransport +from .rest import VpnGatewaysRestTransport +from .rest import VpnGatewaysRestInterceptor + + +# Compile a registry of transports. +_transport_registry = OrderedDict() # type: Dict[str, Type[VpnGatewaysTransport]] +_transport_registry['rest'] = VpnGatewaysRestTransport + +__all__ = ( + 'VpnGatewaysTransport', + 'VpnGatewaysRestTransport', + 'VpnGatewaysRestInterceptor', +) diff --git a/owl-bot-staging/v1/google/cloud/compute_v1/services/vpn_gateways/transports/base.py b/owl-bot-staging/v1/google/cloud/compute_v1/services/vpn_gateways/transports/base.py new file mode 100644 index 000000000..3869555ba --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/compute_v1/services/vpn_gateways/transports/base.py @@ -0,0 +1,261 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import abc +from typing import Awaitable, Callable, Dict, Optional, Sequence, Union + +from google.cloud.compute_v1 import gapic_version as package_version + +import google.auth # type: ignore +import google.api_core +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.compute_v1.types import compute +from google.cloud.compute_v1.services import region_operations + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) + + +class VpnGatewaysTransport(abc.ABC): + """Abstract transport class for VpnGateways.""" + + AUTH_SCOPES = ( + 'https://www.googleapis.com/auth/compute', + 'https://www.googleapis.com/auth/cloud-platform', + ) + + DEFAULT_HOST: str = 'compute.googleapis.com' + def __init__( + self, *, + host: str = DEFAULT_HOST, + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + **kwargs, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A list of scopes. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + """ + self._extended_operations_services: Dict[str, Any] = {} + + scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} + + # Save the scopes. + self._scopes = scopes + + # If no credentials are provided, then determine the appropriate + # defaults. + if credentials and credentials_file: + raise core_exceptions.DuplicateCredentialArgs("'credentials_file' and 'credentials' are mutually exclusive") + + if credentials_file is not None: + credentials, _ = google.auth.load_credentials_from_file( + credentials_file, + **scopes_kwargs, + quota_project_id=quota_project_id + ) + elif credentials is None: + credentials, _ = google.auth.default(**scopes_kwargs, quota_project_id=quota_project_id) + # Don't apply audience if the credentials file passed from user. + if hasattr(credentials, "with_gdch_audience"): + credentials = credentials.with_gdch_audience(api_audience if api_audience else host) + + # If the credentials are service account credentials, then always try to use self signed JWT. + if always_use_jwt_access and isinstance(credentials, service_account.Credentials) and hasattr(service_account.Credentials, "with_always_use_jwt_access"): + credentials = credentials.with_always_use_jwt_access(True) + + # Save the credentials. + self._credentials = credentials + + # Save the hostname. Default to port 443 (HTTPS) if none is specified. + if ':' not in host: + host += ':443' + self._host = host + + def _prep_wrapped_messages(self, client_info): + # Precompute the wrapped methods. + self._wrapped_methods = { + self.aggregated_list: gapic_v1.method.wrap_method( + self.aggregated_list, + default_timeout=None, + client_info=client_info, + ), + self.delete: gapic_v1.method.wrap_method( + self.delete, + default_timeout=None, + client_info=client_info, + ), + self.get: gapic_v1.method.wrap_method( + self.get, + default_timeout=None, + client_info=client_info, + ), + self.get_status: gapic_v1.method.wrap_method( + self.get_status, + default_timeout=None, + client_info=client_info, + ), + self.insert: gapic_v1.method.wrap_method( + self.insert, + default_timeout=None, + client_info=client_info, + ), + self.list: gapic_v1.method.wrap_method( + self.list, + default_timeout=None, + client_info=client_info, + ), + self.set_labels: gapic_v1.method.wrap_method( + self.set_labels, + default_timeout=None, + client_info=client_info, + ), + self.test_iam_permissions: gapic_v1.method.wrap_method( + self.test_iam_permissions, + default_timeout=None, + client_info=client_info, + ), + } + + def close(self): + """Closes resources associated with the transport. + + .. warning:: + Only call this method if the transport is NOT shared + with other clients - this may cause errors in other clients! + """ + raise NotImplementedError() + + @property + def aggregated_list(self) -> Callable[ + [compute.AggregatedListVpnGatewaysRequest], + Union[ + compute.VpnGatewayAggregatedList, + Awaitable[compute.VpnGatewayAggregatedList] + ]]: + raise NotImplementedError() + + @property + def delete(self) -> Callable[ + [compute.DeleteVpnGatewayRequest], + Union[ + compute.Operation, + Awaitable[compute.Operation] + ]]: + raise NotImplementedError() + + @property + def get(self) -> Callable[ + [compute.GetVpnGatewayRequest], + Union[ + compute.VpnGateway, + Awaitable[compute.VpnGateway] + ]]: + raise NotImplementedError() + + @property + def get_status(self) -> Callable[ + [compute.GetStatusVpnGatewayRequest], + Union[ + compute.VpnGatewaysGetStatusResponse, + Awaitable[compute.VpnGatewaysGetStatusResponse] + ]]: + raise NotImplementedError() + + @property + def insert(self) -> Callable[ + [compute.InsertVpnGatewayRequest], + Union[ + compute.Operation, + Awaitable[compute.Operation] + ]]: + raise NotImplementedError() + + @property + def list(self) -> Callable[ + [compute.ListVpnGatewaysRequest], + Union[ + compute.VpnGatewayList, + Awaitable[compute.VpnGatewayList] + ]]: + raise NotImplementedError() + + @property + def set_labels(self) -> Callable[ + [compute.SetLabelsVpnGatewayRequest], + Union[ + compute.Operation, + Awaitable[compute.Operation] + ]]: + raise NotImplementedError() + + @property + def test_iam_permissions(self) -> Callable[ + [compute.TestIamPermissionsVpnGatewayRequest], + Union[ + compute.TestPermissionsResponse, + Awaitable[compute.TestPermissionsResponse] + ]]: + raise NotImplementedError() + + @property + def kind(self) -> str: + raise NotImplementedError() + + @property + def _region_operations_client(self) -> region_operations.RegionOperationsClient: + ex_op_service = self._extended_operations_services.get("region_operations") + if not ex_op_service: + ex_op_service = region_operations.RegionOperationsClient( + credentials=self._credentials, + transport=self.kind, + ) + self._extended_operations_services["region_operations"] = ex_op_service + + return ex_op_service + + +__all__ = ( + 'VpnGatewaysTransport', +) diff --git a/owl-bot-staging/v1/google/cloud/compute_v1/services/vpn_gateways/transports/rest.py b/owl-bot-staging/v1/google/cloud/compute_v1/services/vpn_gateways/transports/rest.py new file mode 100644 index 000000000..cb81f0606 --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/compute_v1/services/vpn_gateways/transports/rest.py @@ -0,0 +1,1148 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +from google.auth.transport.requests import AuthorizedSession # type: ignore +import json # type: ignore +import grpc # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.api_core import exceptions as core_exceptions +from google.api_core import retry as retries +from google.api_core import rest_helpers +from google.api_core import rest_streaming +from google.api_core import path_template +from google.api_core import gapic_v1 + +from google.protobuf import json_format +from requests import __version__ as requests_version +import dataclasses +import re +from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union +import warnings + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + + +from google.cloud.compute_v1.types import compute + +from .base import VpnGatewaysTransport, DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, + grpc_version=None, + rest_version=requests_version, +) + + +class VpnGatewaysRestInterceptor: + """Interceptor for VpnGateways. + + Interceptors are used to manipulate requests, request metadata, and responses + in arbitrary ways. + Example use cases include: + * Logging + * Verifying requests according to service or custom semantics + * Stripping extraneous information from responses + + These use cases and more can be enabled by injecting an + instance of a custom subclass when constructing the VpnGatewaysRestTransport. + + .. code-block:: python + class MyCustomVpnGatewaysInterceptor(VpnGatewaysRestInterceptor): + def pre_aggregated_list(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_aggregated_list(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_delete(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_delete(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_get(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_get_status(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_status(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_insert(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_insert(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_set_labels(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_set_labels(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_test_iam_permissions(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_test_iam_permissions(self, response): + logging.log(f"Received response: {response}") + return response + + transport = VpnGatewaysRestTransport(interceptor=MyCustomVpnGatewaysInterceptor()) + client = VpnGatewaysClient(transport=transport) + + + """ + def pre_aggregated_list(self, request: compute.AggregatedListVpnGatewaysRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.AggregatedListVpnGatewaysRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for aggregated_list + + Override in a subclass to manipulate the request or metadata + before they are sent to the VpnGateways server. + """ + return request, metadata + + def post_aggregated_list(self, response: compute.VpnGatewayAggregatedList) -> compute.VpnGatewayAggregatedList: + """Post-rpc interceptor for aggregated_list + + Override in a subclass to manipulate the response + after it is returned by the VpnGateways server but before + it is returned to user code. + """ + return response + def pre_delete(self, request: compute.DeleteVpnGatewayRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.DeleteVpnGatewayRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for delete + + Override in a subclass to manipulate the request or metadata + before they are sent to the VpnGateways server. + """ + return request, metadata + + def post_delete(self, response: compute.Operation) -> compute.Operation: + """Post-rpc interceptor for delete + + Override in a subclass to manipulate the response + after it is returned by the VpnGateways server but before + it is returned to user code. + """ + return response + def pre_get(self, request: compute.GetVpnGatewayRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.GetVpnGatewayRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get + + Override in a subclass to manipulate the request or metadata + before they are sent to the VpnGateways server. + """ + return request, metadata + + def post_get(self, response: compute.VpnGateway) -> compute.VpnGateway: + """Post-rpc interceptor for get + + Override in a subclass to manipulate the response + after it is returned by the VpnGateways server but before + it is returned to user code. + """ + return response + def pre_get_status(self, request: compute.GetStatusVpnGatewayRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.GetStatusVpnGatewayRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_status + + Override in a subclass to manipulate the request or metadata + before they are sent to the VpnGateways server. + """ + return request, metadata + + def post_get_status(self, response: compute.VpnGatewaysGetStatusResponse) -> compute.VpnGatewaysGetStatusResponse: + """Post-rpc interceptor for get_status + + Override in a subclass to manipulate the response + after it is returned by the VpnGateways server but before + it is returned to user code. + """ + return response + def pre_insert(self, request: compute.InsertVpnGatewayRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.InsertVpnGatewayRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for insert + + Override in a subclass to manipulate the request or metadata + before they are sent to the VpnGateways server. + """ + return request, metadata + + def post_insert(self, response: compute.Operation) -> compute.Operation: + """Post-rpc interceptor for insert + + Override in a subclass to manipulate the response + after it is returned by the VpnGateways server but before + it is returned to user code. + """ + return response + def pre_list(self, request: compute.ListVpnGatewaysRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.ListVpnGatewaysRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list + + Override in a subclass to manipulate the request or metadata + before they are sent to the VpnGateways server. + """ + return request, metadata + + def post_list(self, response: compute.VpnGatewayList) -> compute.VpnGatewayList: + """Post-rpc interceptor for list + + Override in a subclass to manipulate the response + after it is returned by the VpnGateways server but before + it is returned to user code. + """ + return response + def pre_set_labels(self, request: compute.SetLabelsVpnGatewayRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.SetLabelsVpnGatewayRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for set_labels + + Override in a subclass to manipulate the request or metadata + before they are sent to the VpnGateways server. + """ + return request, metadata + + def post_set_labels(self, response: compute.Operation) -> compute.Operation: + """Post-rpc interceptor for set_labels + + Override in a subclass to manipulate the response + after it is returned by the VpnGateways server but before + it is returned to user code. + """ + return response + def pre_test_iam_permissions(self, request: compute.TestIamPermissionsVpnGatewayRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.TestIamPermissionsVpnGatewayRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for test_iam_permissions + + Override in a subclass to manipulate the request or metadata + before they are sent to the VpnGateways server. + """ + return request, metadata + + def post_test_iam_permissions(self, response: compute.TestPermissionsResponse) -> compute.TestPermissionsResponse: + """Post-rpc interceptor for test_iam_permissions + + Override in a subclass to manipulate the response + after it is returned by the VpnGateways server but before + it is returned to user code. + """ + return response + + +@dataclasses.dataclass +class VpnGatewaysRestStub: + _session: AuthorizedSession + _host: str + _interceptor: VpnGatewaysRestInterceptor + + +class VpnGatewaysRestTransport(VpnGatewaysTransport): + """REST backend transport for VpnGateways. + + The VpnGateways API. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends JSON representations of protocol buffers over HTTP/1.1 + + NOTE: This REST transport functionality is currently in a beta + state (preview). We welcome your feedback via an issue in this + library's source repository. Thank you! + """ + + def __init__(self, *, + host: str = 'compute.googleapis.com', + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + client_cert_source_for_mtls: Optional[Callable[[ + ], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + url_scheme: str = 'https', + interceptor: Optional[VpnGatewaysRestInterceptor] = None, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + NOTE: This REST transport functionality is currently in a beta + state (preview). We welcome your feedback via a GitHub issue in + this library's repository. Thank you! + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + client_cert_source_for_mtls (Callable[[], Tuple[bytes, bytes]]): Client + certificate to configure mutual TLS HTTP channel. It is ignored + if ``channel`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you are developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + url_scheme: the protocol scheme for the API endpoint. Normally + "https", but for testing or local servers, + "http" can be specified. + """ + # Run the base constructor + # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. + # TODO: When custom host (api_endpoint) is set, `scopes` must *also* be set on the + # credentials object + maybe_url_match = re.match("^(?Phttp(?:s)?://)?(?P.*)$", host) + if maybe_url_match is None: + raise ValueError(f"Unexpected hostname structure: {host}") # pragma: NO COVER + + url_match_items = maybe_url_match.groupdict() + + host = f"{url_scheme}://{host}" if not url_match_items["scheme"] else host + + super().__init__( + host=host, + credentials=credentials, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience + ) + self._session = AuthorizedSession( + self._credentials, default_host=self.DEFAULT_HOST) + if client_cert_source_for_mtls: + self._session.configure_mtls_channel(client_cert_source_for_mtls) + self._interceptor = interceptor or VpnGatewaysRestInterceptor() + self._prep_wrapped_messages(client_info) + + class _AggregatedList(VpnGatewaysRestStub): + def __hash__(self): + return hash("AggregatedList") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.AggregatedListVpnGatewaysRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.VpnGatewayAggregatedList: + r"""Call the aggregated list method over HTTP. + + Args: + request (~.compute.AggregatedListVpnGatewaysRequest): + The request object. A request message for + VpnGateways.AggregatedList. See the + method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.VpnGatewayAggregatedList: + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/compute/v1/projects/{project}/aggregated/vpnGateways', + }, + ] + request, metadata = self._interceptor.pre_aggregated_list(request, metadata) + pb_request = compute.AggregatedListVpnGatewaysRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.VpnGatewayAggregatedList() + pb_resp = compute.VpnGatewayAggregatedList.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_aggregated_list(resp) + return resp + + class _Delete(VpnGatewaysRestStub): + def __hash__(self): + return hash("Delete") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.DeleteVpnGatewayRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.Operation: + r"""Call the delete method over HTTP. + + Args: + request (~.compute.DeleteVpnGatewayRequest): + The request object. A request message for + VpnGateways.Delete. See the method + description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.Operation: + Represents an Operation resource. Google Compute Engine + has three Operation resources: \* + `Global `__ + \* + `Regional `__ + \* + `Zonal `__ + You can use an operation resource to manage asynchronous + API requests. For more information, read Handling API + responses. Operations can be global, regional or zonal. + - For global operations, use the ``globalOperations`` + resource. - For regional operations, use the + ``regionOperations`` resource. - For zonal operations, + use the ``zonalOperations`` resource. For more + information, read Global, Regional, and Zonal Resources. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'delete', + 'uri': '/compute/v1/projects/{project}/regions/{region}/vpnGateways/{vpn_gateway}', + }, + ] + request, metadata = self._interceptor.pre_delete(request, metadata) + pb_request = compute.DeleteVpnGatewayRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.Operation() + pb_resp = compute.Operation.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_delete(resp) + return resp + + class _Get(VpnGatewaysRestStub): + def __hash__(self): + return hash("Get") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.GetVpnGatewayRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.VpnGateway: + r"""Call the get method over HTTP. + + Args: + request (~.compute.GetVpnGatewayRequest): + The request object. A request message for + VpnGateways.Get. See the method + description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.VpnGateway: + Represents a HA VPN gateway. HA VPN + is a high-availability (HA) Cloud VPN + solution that lets you securely connect + your on-premises network to your Google + Cloud Virtual Private Cloud network + through an IPsec VPN connection in a + single region. For more information + about Cloud HA VPN solutions, see Cloud + VPN topologies . + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/compute/v1/projects/{project}/regions/{region}/vpnGateways/{vpn_gateway}', + }, + ] + request, metadata = self._interceptor.pre_get(request, metadata) + pb_request = compute.GetVpnGatewayRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.VpnGateway() + pb_resp = compute.VpnGateway.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get(resp) + return resp + + class _GetStatus(VpnGatewaysRestStub): + def __hash__(self): + return hash("GetStatus") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.GetStatusVpnGatewayRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.VpnGatewaysGetStatusResponse: + r"""Call the get status method over HTTP. + + Args: + request (~.compute.GetStatusVpnGatewayRequest): + The request object. A request message for + VpnGateways.GetStatus. See the method + description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.VpnGatewaysGetStatusResponse: + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/compute/v1/projects/{project}/regions/{region}/vpnGateways/{vpn_gateway}/getStatus', + }, + ] + request, metadata = self._interceptor.pre_get_status(request, metadata) + pb_request = compute.GetStatusVpnGatewayRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.VpnGatewaysGetStatusResponse() + pb_resp = compute.VpnGatewaysGetStatusResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get_status(resp) + return resp + + class _Insert(VpnGatewaysRestStub): + def __hash__(self): + return hash("Insert") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.InsertVpnGatewayRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.Operation: + r"""Call the insert method over HTTP. + + Args: + request (~.compute.InsertVpnGatewayRequest): + The request object. A request message for + VpnGateways.Insert. See the method + description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.Operation: + Represents an Operation resource. Google Compute Engine + has three Operation resources: \* + `Global `__ + \* + `Regional `__ + \* + `Zonal `__ + You can use an operation resource to manage asynchronous + API requests. For more information, read Handling API + responses. Operations can be global, regional or zonal. + - For global operations, use the ``globalOperations`` + resource. - For regional operations, use the + ``regionOperations`` resource. - For zonal operations, + use the ``zonalOperations`` resource. For more + information, read Global, Regional, and Zonal Resources. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/compute/v1/projects/{project}/regions/{region}/vpnGateways', + 'body': 'vpn_gateway_resource', + }, + ] + request, metadata = self._interceptor.pre_insert(request, metadata) + pb_request = compute.InsertVpnGatewayRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + including_default_value_fields=False, + use_integers_for_enums=False + ) + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.Operation() + pb_resp = compute.Operation.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_insert(resp) + return resp + + class _List(VpnGatewaysRestStub): + def __hash__(self): + return hash("List") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.ListVpnGatewaysRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.VpnGatewayList: + r"""Call the list method over HTTP. + + Args: + request (~.compute.ListVpnGatewaysRequest): + The request object. A request message for + VpnGateways.List. See the method + description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.VpnGatewayList: + Contains a list of VpnGateway + resources. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/compute/v1/projects/{project}/regions/{region}/vpnGateways', + }, + ] + request, metadata = self._interceptor.pre_list(request, metadata) + pb_request = compute.ListVpnGatewaysRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.VpnGatewayList() + pb_resp = compute.VpnGatewayList.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_list(resp) + return resp + + class _SetLabels(VpnGatewaysRestStub): + def __hash__(self): + return hash("SetLabels") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.SetLabelsVpnGatewayRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.Operation: + r"""Call the set labels method over HTTP. + + Args: + request (~.compute.SetLabelsVpnGatewayRequest): + The request object. A request message for + VpnGateways.SetLabels. See the method + description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.Operation: + Represents an Operation resource. Google Compute Engine + has three Operation resources: \* + `Global `__ + \* + `Regional `__ + \* + `Zonal `__ + You can use an operation resource to manage asynchronous + API requests. For more information, read Handling API + responses. Operations can be global, regional or zonal. + - For global operations, use the ``globalOperations`` + resource. - For regional operations, use the + ``regionOperations`` resource. - For zonal operations, + use the ``zonalOperations`` resource. For more + information, read Global, Regional, and Zonal Resources. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/compute/v1/projects/{project}/regions/{region}/vpnGateways/{resource}/setLabels', + 'body': 'region_set_labels_request_resource', + }, + ] + request, metadata = self._interceptor.pre_set_labels(request, metadata) + pb_request = compute.SetLabelsVpnGatewayRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + including_default_value_fields=False, + use_integers_for_enums=False + ) + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.Operation() + pb_resp = compute.Operation.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_set_labels(resp) + return resp + + class _TestIamPermissions(VpnGatewaysRestStub): + def __hash__(self): + return hash("TestIamPermissions") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.TestIamPermissionsVpnGatewayRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.TestPermissionsResponse: + r"""Call the test iam permissions method over HTTP. + + Args: + request (~.compute.TestIamPermissionsVpnGatewayRequest): + The request object. A request message for + VpnGateways.TestIamPermissions. See the + method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.TestPermissionsResponse: + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/compute/v1/projects/{project}/regions/{region}/vpnGateways/{resource}/testIamPermissions', + 'body': 'test_permissions_request_resource', + }, + ] + request, metadata = self._interceptor.pre_test_iam_permissions(request, metadata) + pb_request = compute.TestIamPermissionsVpnGatewayRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + including_default_value_fields=False, + use_integers_for_enums=False + ) + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.TestPermissionsResponse() + pb_resp = compute.TestPermissionsResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_test_iam_permissions(resp) + return resp + + @property + def aggregated_list(self) -> Callable[ + [compute.AggregatedListVpnGatewaysRequest], + compute.VpnGatewayAggregatedList]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._AggregatedList(self._session, self._host, self._interceptor) # type: ignore + + @property + def delete(self) -> Callable[ + [compute.DeleteVpnGatewayRequest], + compute.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._Delete(self._session, self._host, self._interceptor) # type: ignore + + @property + def get(self) -> Callable[ + [compute.GetVpnGatewayRequest], + compute.VpnGateway]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._Get(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_status(self) -> Callable[ + [compute.GetStatusVpnGatewayRequest], + compute.VpnGatewaysGetStatusResponse]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetStatus(self._session, self._host, self._interceptor) # type: ignore + + @property + def insert(self) -> Callable[ + [compute.InsertVpnGatewayRequest], + compute.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._Insert(self._session, self._host, self._interceptor) # type: ignore + + @property + def list(self) -> Callable[ + [compute.ListVpnGatewaysRequest], + compute.VpnGatewayList]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._List(self._session, self._host, self._interceptor) # type: ignore + + @property + def set_labels(self) -> Callable[ + [compute.SetLabelsVpnGatewayRequest], + compute.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._SetLabels(self._session, self._host, self._interceptor) # type: ignore + + @property + def test_iam_permissions(self) -> Callable[ + [compute.TestIamPermissionsVpnGatewayRequest], + compute.TestPermissionsResponse]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._TestIamPermissions(self._session, self._host, self._interceptor) # type: ignore + + @property + def kind(self) -> str: + return "rest" + + def close(self): + self._session.close() + + +__all__=( + 'VpnGatewaysRestTransport', +) diff --git a/owl-bot-staging/v1/google/cloud/compute_v1/services/vpn_tunnels/__init__.py b/owl-bot-staging/v1/google/cloud/compute_v1/services/vpn_tunnels/__init__.py new file mode 100644 index 000000000..0bfd676d5 --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/compute_v1/services/vpn_tunnels/__init__.py @@ -0,0 +1,20 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from .client import VpnTunnelsClient + +__all__ = ( + 'VpnTunnelsClient', +) diff --git a/owl-bot-staging/v1/google/cloud/compute_v1/services/vpn_tunnels/client.py b/owl-bot-staging/v1/google/cloud/compute_v1/services/vpn_tunnels/client.py new file mode 100644 index 000000000..fcc2e43ef --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/compute_v1/services/vpn_tunnels/client.py @@ -0,0 +1,1569 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import functools +import os +import re +from typing import Dict, Mapping, MutableMapping, MutableSequence, Optional, Sequence, Tuple, Type, Union, cast + +from google.cloud.compute_v1 import gapic_version as package_version + +from google.api_core import client_options as client_options_lib +from google.api_core import exceptions as core_exceptions +from google.api_core import extended_operation +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport import mtls # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth.exceptions import MutualTLSChannelError # type: ignore +from google.oauth2 import service_account # type: ignore + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + +from google.api_core import extended_operation # type: ignore +from google.cloud.compute_v1.services.vpn_tunnels import pagers +from google.cloud.compute_v1.types import compute +from .transports.base import VpnTunnelsTransport, DEFAULT_CLIENT_INFO +from .transports.rest import VpnTunnelsRestTransport + + +class VpnTunnelsClientMeta(type): + """Metaclass for the VpnTunnels client. + + This provides class-level methods for building and retrieving + support objects (e.g. transport) without polluting the client instance + objects. + """ + _transport_registry = OrderedDict() # type: Dict[str, Type[VpnTunnelsTransport]] + _transport_registry["rest"] = VpnTunnelsRestTransport + + def get_transport_class(cls, + label: Optional[str] = None, + ) -> Type[VpnTunnelsTransport]: + """Returns an appropriate transport class. + + Args: + label: The name of the desired transport. If none is + provided, then the first transport in the registry is used. + + Returns: + The transport class to use. + """ + # If a specific transport is requested, return that one. + if label: + return cls._transport_registry[label] + + # No transport is requested; return the default (that is, the first one + # in the dictionary). + return next(iter(cls._transport_registry.values())) + + +class VpnTunnelsClient(metaclass=VpnTunnelsClientMeta): + """The VpnTunnels API.""" + + @staticmethod + def _get_default_mtls_endpoint(api_endpoint): + """Converts api endpoint to mTLS endpoint. + + Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to + "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. + Args: + api_endpoint (Optional[str]): the api endpoint to convert. + Returns: + str: converted mTLS api endpoint. + """ + if not api_endpoint: + return api_endpoint + + mtls_endpoint_re = re.compile( + r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?" + ) + + m = mtls_endpoint_re.match(api_endpoint) + name, mtls, sandbox, googledomain = m.groups() + if mtls or not googledomain: + return api_endpoint + + if sandbox: + return api_endpoint.replace( + "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" + ) + + return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") + + DEFAULT_ENDPOINT = "compute.googleapis.com" + DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore + DEFAULT_ENDPOINT + ) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + VpnTunnelsClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_info(info) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + VpnTunnelsClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_file( + filename) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + from_service_account_json = from_service_account_file + + @property + def transport(self) -> VpnTunnelsTransport: + """Returns the transport used by the client instance. + + Returns: + VpnTunnelsTransport: The transport used by the client + instance. + """ + return self._transport + + @staticmethod + def common_billing_account_path(billing_account: str, ) -> str: + """Returns a fully-qualified billing_account string.""" + return "billingAccounts/{billing_account}".format(billing_account=billing_account, ) + + @staticmethod + def parse_common_billing_account_path(path: str) -> Dict[str,str]: + """Parse a billing_account path into its component segments.""" + m = re.match(r"^billingAccounts/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_folder_path(folder: str, ) -> str: + """Returns a fully-qualified folder string.""" + return "folders/{folder}".format(folder=folder, ) + + @staticmethod + def parse_common_folder_path(path: str) -> Dict[str,str]: + """Parse a folder path into its component segments.""" + m = re.match(r"^folders/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_organization_path(organization: str, ) -> str: + """Returns a fully-qualified organization string.""" + return "organizations/{organization}".format(organization=organization, ) + + @staticmethod + def parse_common_organization_path(path: str) -> Dict[str,str]: + """Parse a organization path into its component segments.""" + m = re.match(r"^organizations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_project_path(project: str, ) -> str: + """Returns a fully-qualified project string.""" + return "projects/{project}".format(project=project, ) + + @staticmethod + def parse_common_project_path(path: str) -> Dict[str,str]: + """Parse a project path into its component segments.""" + m = re.match(r"^projects/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_location_path(project: str, location: str, ) -> str: + """Returns a fully-qualified location string.""" + return "projects/{project}/locations/{location}".format(project=project, location=location, ) + + @staticmethod + def parse_common_location_path(path: str) -> Dict[str,str]: + """Parse a location path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @classmethod + def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[client_options_lib.ClientOptions] = None): + """Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + if client_options is None: + client_options = client_options_lib.ClientOptions() + use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") + use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") + if use_client_cert not in ("true", "false"): + raise ValueError("Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`") + if use_mtls_endpoint not in ("auto", "never", "always"): + raise MutualTLSChannelError("Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`") + + # Figure out the client cert source to use. + client_cert_source = None + if use_client_cert == "true": + if client_options.client_cert_source: + client_cert_source = client_options.client_cert_source + elif mtls.has_default_client_cert_source(): + client_cert_source = mtls.default_client_cert_source() + + # Figure out which api endpoint to use. + if client_options.api_endpoint is not None: + api_endpoint = client_options.api_endpoint + elif use_mtls_endpoint == "always" or (use_mtls_endpoint == "auto" and client_cert_source): + api_endpoint = cls.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = cls.DEFAULT_ENDPOINT + + return api_endpoint, client_cert_source + + def __init__(self, *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Optional[Union[str, VpnTunnelsTransport]] = None, + client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the vpn tunnels client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Union[str, VpnTunnelsTransport]): The + transport to use. If set to None, a transport is chosen + automatically. + NOTE: "rest" transport functionality is currently in a + beta state (preview). We welcome your feedback via an + issue in this library's source repository. + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): Custom options for the + client. It won't take effect if a ``transport`` instance is provided. + (1) The ``api_endpoint`` property can be used to override the + default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT + environment variable can also be used to override the endpoint: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto switch to the + default mTLS endpoint if client certificate is present, this is + the default value). However, the ``api_endpoint`` property takes + precedence if provided. + (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide client certificate for mutual TLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + """ + if isinstance(client_options, dict): + client_options = client_options_lib.from_dict(client_options) + if client_options is None: + client_options = client_options_lib.ClientOptions() + client_options = cast(client_options_lib.ClientOptions, client_options) + + api_endpoint, client_cert_source_func = self.get_mtls_endpoint_and_cert_source(client_options) + + api_key_value = getattr(client_options, "api_key", None) + if api_key_value and credentials: + raise ValueError("client_options.api_key and credentials are mutually exclusive") + + # Save or instantiate the transport. + # Ordinarily, we provide the transport, but allowing a custom transport + # instance provides an extensibility point for unusual situations. + if isinstance(transport, VpnTunnelsTransport): + # transport is a VpnTunnelsTransport instance. + if credentials or client_options.credentials_file or api_key_value: + raise ValueError("When providing a transport instance, " + "provide its credentials directly.") + if client_options.scopes: + raise ValueError( + "When providing a transport instance, provide its scopes " + "directly." + ) + self._transport = transport + else: + import google.auth._default # type: ignore + + if api_key_value and hasattr(google.auth._default, "get_api_key_credentials"): + credentials = google.auth._default.get_api_key_credentials(api_key_value) + + Transport = type(self).get_transport_class(transport) + self._transport = Transport( + credentials=credentials, + credentials_file=client_options.credentials_file, + host=api_endpoint, + scopes=client_options.scopes, + client_cert_source_for_mtls=client_cert_source_func, + quota_project_id=client_options.quota_project_id, + client_info=client_info, + always_use_jwt_access=True, + api_audience=client_options.api_audience, + ) + + def aggregated_list(self, + request: Optional[Union[compute.AggregatedListVpnTunnelsRequest, dict]] = None, + *, + project: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.AggregatedListPager: + r"""Retrieves an aggregated list of VPN tunnels. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_aggregated_list(): + # Create a client + client = compute_v1.VpnTunnelsClient() + + # Initialize request argument(s) + request = compute_v1.AggregatedListVpnTunnelsRequest( + project="project_value", + ) + + # Make the request + page_result = client.aggregated_list(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.AggregatedListVpnTunnelsRequest, dict]): + The request object. A request message for + VpnTunnels.AggregatedList. See the + method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.compute_v1.services.vpn_tunnels.pagers.AggregatedListPager: + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.AggregatedListVpnTunnelsRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.AggregatedListVpnTunnelsRequest): + request = compute.AggregatedListVpnTunnelsRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.aggregated_list] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.AggregatedListPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + def delete_unary(self, + request: Optional[Union[compute.DeleteVpnTunnelRequest, dict]] = None, + *, + project: Optional[str] = None, + region: Optional[str] = None, + vpn_tunnel: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.Operation: + r"""Deletes the specified VpnTunnel resource. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_delete(): + # Create a client + client = compute_v1.VpnTunnelsClient() + + # Initialize request argument(s) + request = compute_v1.DeleteVpnTunnelRequest( + project="project_value", + region="region_value", + vpn_tunnel="vpn_tunnel_value", + ) + + # Make the request + response = client.delete(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.DeleteVpnTunnelRequest, dict]): + The request object. A request message for + VpnTunnels.Delete. See the method + description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + region (str): + Name of the region for this request. + This corresponds to the ``region`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + vpn_tunnel (str): + Name of the VpnTunnel resource to + delete. + + This corresponds to the ``vpn_tunnel`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, region, vpn_tunnel]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.DeleteVpnTunnelRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.DeleteVpnTunnelRequest): + request = compute.DeleteVpnTunnelRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if region is not None: + request.region = region + if vpn_tunnel is not None: + request.vpn_tunnel = vpn_tunnel + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("region", request.region), + ("vpn_tunnel", request.vpn_tunnel), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def delete(self, + request: Optional[Union[compute.DeleteVpnTunnelRequest, dict]] = None, + *, + project: Optional[str] = None, + region: Optional[str] = None, + vpn_tunnel: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> extended_operation.ExtendedOperation: + r"""Deletes the specified VpnTunnel resource. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_delete(): + # Create a client + client = compute_v1.VpnTunnelsClient() + + # Initialize request argument(s) + request = compute_v1.DeleteVpnTunnelRequest( + project="project_value", + region="region_value", + vpn_tunnel="vpn_tunnel_value", + ) + + # Make the request + response = client.delete(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.DeleteVpnTunnelRequest, dict]): + The request object. A request message for + VpnTunnels.Delete. See the method + description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + region (str): + Name of the region for this request. + This corresponds to the ``region`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + vpn_tunnel (str): + Name of the VpnTunnel resource to + delete. + + This corresponds to the ``vpn_tunnel`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, region, vpn_tunnel]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.DeleteVpnTunnelRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.DeleteVpnTunnelRequest): + request = compute.DeleteVpnTunnelRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if region is not None: + request.region = region + if vpn_tunnel is not None: + request.vpn_tunnel = vpn_tunnel + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("region", request.region), + ("vpn_tunnel", request.vpn_tunnel), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + operation_service = self._transport._region_operations_client + operation_request = compute.GetRegionOperationRequest() + operation_request.project = request.project + operation_request.region = request.region + operation_request.operation = response.name + + get_operation = functools.partial(operation_service.get, operation_request) + # Cancel is not part of extended operations yet. + cancel_operation = lambda: None + + # Note: this class is an implementation detail to provide a uniform + # set of names for certain fields in the extended operation proto message. + # See google.api_core.extended_operation.ExtendedOperation for details + # on these properties and the expected interface. + class _CustomOperation(extended_operation.ExtendedOperation): + @property + def error_message(self): + return self._extended_operation.http_error_message + + @property + def error_code(self): + return self._extended_operation.http_error_status_code + + response = _CustomOperation.make(get_operation, cancel_operation, response) + + # Done; return the response. + return response + + def get(self, + request: Optional[Union[compute.GetVpnTunnelRequest, dict]] = None, + *, + project: Optional[str] = None, + region: Optional[str] = None, + vpn_tunnel: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.VpnTunnel: + r"""Returns the specified VpnTunnel resource. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_get(): + # Create a client + client = compute_v1.VpnTunnelsClient() + + # Initialize request argument(s) + request = compute_v1.GetVpnTunnelRequest( + project="project_value", + region="region_value", + vpn_tunnel="vpn_tunnel_value", + ) + + # Make the request + response = client.get(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.GetVpnTunnelRequest, dict]): + The request object. A request message for VpnTunnels.Get. + See the method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + region (str): + Name of the region for this request. + This corresponds to the ``region`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + vpn_tunnel (str): + Name of the VpnTunnel resource to + return. + + This corresponds to the ``vpn_tunnel`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.compute_v1.types.VpnTunnel: + Represents a Cloud VPN Tunnel + resource. For more information about + VPN, read the the Cloud VPN Overview. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, region, vpn_tunnel]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.GetVpnTunnelRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.GetVpnTunnelRequest): + request = compute.GetVpnTunnelRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if region is not None: + request.region = region + if vpn_tunnel is not None: + request.vpn_tunnel = vpn_tunnel + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("region", request.region), + ("vpn_tunnel", request.vpn_tunnel), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def insert_unary(self, + request: Optional[Union[compute.InsertVpnTunnelRequest, dict]] = None, + *, + project: Optional[str] = None, + region: Optional[str] = None, + vpn_tunnel_resource: Optional[compute.VpnTunnel] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.Operation: + r"""Creates a VpnTunnel resource in the specified project + and region using the data included in the request. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_insert(): + # Create a client + client = compute_v1.VpnTunnelsClient() + + # Initialize request argument(s) + request = compute_v1.InsertVpnTunnelRequest( + project="project_value", + region="region_value", + ) + + # Make the request + response = client.insert(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.InsertVpnTunnelRequest, dict]): + The request object. A request message for + VpnTunnels.Insert. See the method + description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + region (str): + Name of the region for this request. + This corresponds to the ``region`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + vpn_tunnel_resource (google.cloud.compute_v1.types.VpnTunnel): + The body resource for this request + This corresponds to the ``vpn_tunnel_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, region, vpn_tunnel_resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.InsertVpnTunnelRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.InsertVpnTunnelRequest): + request = compute.InsertVpnTunnelRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if region is not None: + request.region = region + if vpn_tunnel_resource is not None: + request.vpn_tunnel_resource = vpn_tunnel_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.insert] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("region", request.region), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def insert(self, + request: Optional[Union[compute.InsertVpnTunnelRequest, dict]] = None, + *, + project: Optional[str] = None, + region: Optional[str] = None, + vpn_tunnel_resource: Optional[compute.VpnTunnel] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> extended_operation.ExtendedOperation: + r"""Creates a VpnTunnel resource in the specified project + and region using the data included in the request. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_insert(): + # Create a client + client = compute_v1.VpnTunnelsClient() + + # Initialize request argument(s) + request = compute_v1.InsertVpnTunnelRequest( + project="project_value", + region="region_value", + ) + + # Make the request + response = client.insert(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.InsertVpnTunnelRequest, dict]): + The request object. A request message for + VpnTunnels.Insert. See the method + description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + region (str): + Name of the region for this request. + This corresponds to the ``region`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + vpn_tunnel_resource (google.cloud.compute_v1.types.VpnTunnel): + The body resource for this request + This corresponds to the ``vpn_tunnel_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, region, vpn_tunnel_resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.InsertVpnTunnelRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.InsertVpnTunnelRequest): + request = compute.InsertVpnTunnelRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if region is not None: + request.region = region + if vpn_tunnel_resource is not None: + request.vpn_tunnel_resource = vpn_tunnel_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.insert] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("region", request.region), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + operation_service = self._transport._region_operations_client + operation_request = compute.GetRegionOperationRequest() + operation_request.project = request.project + operation_request.region = request.region + operation_request.operation = response.name + + get_operation = functools.partial(operation_service.get, operation_request) + # Cancel is not part of extended operations yet. + cancel_operation = lambda: None + + # Note: this class is an implementation detail to provide a uniform + # set of names for certain fields in the extended operation proto message. + # See google.api_core.extended_operation.ExtendedOperation for details + # on these properties and the expected interface. + class _CustomOperation(extended_operation.ExtendedOperation): + @property + def error_message(self): + return self._extended_operation.http_error_message + + @property + def error_code(self): + return self._extended_operation.http_error_status_code + + response = _CustomOperation.make(get_operation, cancel_operation, response) + + # Done; return the response. + return response + + def list(self, + request: Optional[Union[compute.ListVpnTunnelsRequest, dict]] = None, + *, + project: Optional[str] = None, + region: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListPager: + r"""Retrieves a list of VpnTunnel resources contained in + the specified project and region. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_list(): + # Create a client + client = compute_v1.VpnTunnelsClient() + + # Initialize request argument(s) + request = compute_v1.ListVpnTunnelsRequest( + project="project_value", + region="region_value", + ) + + # Make the request + page_result = client.list(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.ListVpnTunnelsRequest, dict]): + The request object. A request message for + VpnTunnels.List. See the method + description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + region (str): + Name of the region for this request. + This corresponds to the ``region`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.compute_v1.services.vpn_tunnels.pagers.ListPager: + Contains a list of VpnTunnel + resources. + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, region]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.ListVpnTunnelsRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.ListVpnTunnelsRequest): + request = compute.ListVpnTunnelsRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if region is not None: + request.region = region + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("region", request.region), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + def set_labels_unary(self, + request: Optional[Union[compute.SetLabelsVpnTunnelRequest, dict]] = None, + *, + project: Optional[str] = None, + region: Optional[str] = None, + resource: Optional[str] = None, + region_set_labels_request_resource: Optional[compute.RegionSetLabelsRequest] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.Operation: + r"""Sets the labels on a VpnTunnel. To learn more about + labels, read the Labeling Resources documentation. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_set_labels(): + # Create a client + client = compute_v1.VpnTunnelsClient() + + # Initialize request argument(s) + request = compute_v1.SetLabelsVpnTunnelRequest( + project="project_value", + region="region_value", + resource="resource_value", + ) + + # Make the request + response = client.set_labels(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.SetLabelsVpnTunnelRequest, dict]): + The request object. A request message for + VpnTunnels.SetLabels. See the method + description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + region (str): + The region for this request. + This corresponds to the ``region`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + resource (str): + Name or id of the resource for this + request. + + This corresponds to the ``resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + region_set_labels_request_resource (google.cloud.compute_v1.types.RegionSetLabelsRequest): + The body resource for this request + This corresponds to the ``region_set_labels_request_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, region, resource, region_set_labels_request_resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.SetLabelsVpnTunnelRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.SetLabelsVpnTunnelRequest): + request = compute.SetLabelsVpnTunnelRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if region is not None: + request.region = region + if resource is not None: + request.resource = resource + if region_set_labels_request_resource is not None: + request.region_set_labels_request_resource = region_set_labels_request_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.set_labels] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("region", request.region), + ("resource", request.resource), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def set_labels(self, + request: Optional[Union[compute.SetLabelsVpnTunnelRequest, dict]] = None, + *, + project: Optional[str] = None, + region: Optional[str] = None, + resource: Optional[str] = None, + region_set_labels_request_resource: Optional[compute.RegionSetLabelsRequest] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> extended_operation.ExtendedOperation: + r"""Sets the labels on a VpnTunnel. To learn more about + labels, read the Labeling Resources documentation. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_set_labels(): + # Create a client + client = compute_v1.VpnTunnelsClient() + + # Initialize request argument(s) + request = compute_v1.SetLabelsVpnTunnelRequest( + project="project_value", + region="region_value", + resource="resource_value", + ) + + # Make the request + response = client.set_labels(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.SetLabelsVpnTunnelRequest, dict]): + The request object. A request message for + VpnTunnels.SetLabels. See the method + description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + region (str): + The region for this request. + This corresponds to the ``region`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + resource (str): + Name or id of the resource for this + request. + + This corresponds to the ``resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + region_set_labels_request_resource (google.cloud.compute_v1.types.RegionSetLabelsRequest): + The body resource for this request + This corresponds to the ``region_set_labels_request_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, region, resource, region_set_labels_request_resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.SetLabelsVpnTunnelRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.SetLabelsVpnTunnelRequest): + request = compute.SetLabelsVpnTunnelRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if region is not None: + request.region = region + if resource is not None: + request.resource = resource + if region_set_labels_request_resource is not None: + request.region_set_labels_request_resource = region_set_labels_request_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.set_labels] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("region", request.region), + ("resource", request.resource), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + operation_service = self._transport._region_operations_client + operation_request = compute.GetRegionOperationRequest() + operation_request.project = request.project + operation_request.region = request.region + operation_request.operation = response.name + + get_operation = functools.partial(operation_service.get, operation_request) + # Cancel is not part of extended operations yet. + cancel_operation = lambda: None + + # Note: this class is an implementation detail to provide a uniform + # set of names for certain fields in the extended operation proto message. + # See google.api_core.extended_operation.ExtendedOperation for details + # on these properties and the expected interface. + class _CustomOperation(extended_operation.ExtendedOperation): + @property + def error_message(self): + return self._extended_operation.http_error_message + + @property + def error_code(self): + return self._extended_operation.http_error_status_code + + response = _CustomOperation.make(get_operation, cancel_operation, response) + + # Done; return the response. + return response + + def __enter__(self) -> "VpnTunnelsClient": + return self + + def __exit__(self, type, value, traceback): + """Releases underlying transport's resources. + + .. warning:: + ONLY use as a context manager if the transport is NOT shared + with other clients! Exiting the with block will CLOSE the transport + and may cause errors in other clients! + """ + self.transport.close() + + + + + + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) + + +__all__ = ( + "VpnTunnelsClient", +) diff --git a/owl-bot-staging/v1/google/cloud/compute_v1/services/vpn_tunnels/pagers.py b/owl-bot-staging/v1/google/cloud/compute_v1/services/vpn_tunnels/pagers.py new file mode 100644 index 000000000..0fdc01546 --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/compute_v1/services/vpn_tunnels/pagers.py @@ -0,0 +1,139 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import Any, AsyncIterator, Awaitable, Callable, Sequence, Tuple, Optional, Iterator + +from google.cloud.compute_v1.types import compute + + +class AggregatedListPager: + """A pager for iterating through ``aggregated_list`` requests. + + This class thinly wraps an initial + :class:`google.cloud.compute_v1.types.VpnTunnelAggregatedList` object, and + provides an ``__iter__`` method to iterate through its + ``items`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``AggregatedList`` requests and continue to iterate + through the ``items`` field on the + corresponding responses. + + All the usual :class:`google.cloud.compute_v1.types.VpnTunnelAggregatedList` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., compute.VpnTunnelAggregatedList], + request: compute.AggregatedListVpnTunnelsRequest, + response: compute.VpnTunnelAggregatedList, + *, + metadata: Sequence[Tuple[str, str]] = ()): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.compute_v1.types.AggregatedListVpnTunnelsRequest): + The initial request object. + response (google.cloud.compute_v1.types.VpnTunnelAggregatedList): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = compute.AggregatedListVpnTunnelsRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[compute.VpnTunnelAggregatedList]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[Tuple[str, compute.VpnTunnelsScopedList]]: + for page in self.pages: + yield from page.items.items() + + def get(self, key: str) -> Optional[compute.VpnTunnelsScopedList]: + return self._response.items.get(key) + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + + +class ListPager: + """A pager for iterating through ``list`` requests. + + This class thinly wraps an initial + :class:`google.cloud.compute_v1.types.VpnTunnelList` object, and + provides an ``__iter__`` method to iterate through its + ``items`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``List`` requests and continue to iterate + through the ``items`` field on the + corresponding responses. + + All the usual :class:`google.cloud.compute_v1.types.VpnTunnelList` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., compute.VpnTunnelList], + request: compute.ListVpnTunnelsRequest, + response: compute.VpnTunnelList, + *, + metadata: Sequence[Tuple[str, str]] = ()): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.compute_v1.types.ListVpnTunnelsRequest): + The initial request object. + response (google.cloud.compute_v1.types.VpnTunnelList): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = compute.ListVpnTunnelsRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[compute.VpnTunnelList]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[compute.VpnTunnel]: + for page in self.pages: + yield from page.items + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) diff --git a/owl-bot-staging/v1/google/cloud/compute_v1/services/vpn_tunnels/transports/__init__.py b/owl-bot-staging/v1/google/cloud/compute_v1/services/vpn_tunnels/transports/__init__.py new file mode 100644 index 000000000..59414c8f6 --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/compute_v1/services/vpn_tunnels/transports/__init__.py @@ -0,0 +1,32 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +from typing import Dict, Type + +from .base import VpnTunnelsTransport +from .rest import VpnTunnelsRestTransport +from .rest import VpnTunnelsRestInterceptor + + +# Compile a registry of transports. +_transport_registry = OrderedDict() # type: Dict[str, Type[VpnTunnelsTransport]] +_transport_registry['rest'] = VpnTunnelsRestTransport + +__all__ = ( + 'VpnTunnelsTransport', + 'VpnTunnelsRestTransport', + 'VpnTunnelsRestInterceptor', +) diff --git a/owl-bot-staging/v1/google/cloud/compute_v1/services/vpn_tunnels/transports/base.py b/owl-bot-staging/v1/google/cloud/compute_v1/services/vpn_tunnels/transports/base.py new file mode 100644 index 000000000..9916379b3 --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/compute_v1/services/vpn_tunnels/transports/base.py @@ -0,0 +1,233 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import abc +from typing import Awaitable, Callable, Dict, Optional, Sequence, Union + +from google.cloud.compute_v1 import gapic_version as package_version + +import google.auth # type: ignore +import google.api_core +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.compute_v1.types import compute +from google.cloud.compute_v1.services import region_operations + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) + + +class VpnTunnelsTransport(abc.ABC): + """Abstract transport class for VpnTunnels.""" + + AUTH_SCOPES = ( + 'https://www.googleapis.com/auth/compute', + 'https://www.googleapis.com/auth/cloud-platform', + ) + + DEFAULT_HOST: str = 'compute.googleapis.com' + def __init__( + self, *, + host: str = DEFAULT_HOST, + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + **kwargs, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A list of scopes. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + """ + self._extended_operations_services: Dict[str, Any] = {} + + scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} + + # Save the scopes. + self._scopes = scopes + + # If no credentials are provided, then determine the appropriate + # defaults. + if credentials and credentials_file: + raise core_exceptions.DuplicateCredentialArgs("'credentials_file' and 'credentials' are mutually exclusive") + + if credentials_file is not None: + credentials, _ = google.auth.load_credentials_from_file( + credentials_file, + **scopes_kwargs, + quota_project_id=quota_project_id + ) + elif credentials is None: + credentials, _ = google.auth.default(**scopes_kwargs, quota_project_id=quota_project_id) + # Don't apply audience if the credentials file passed from user. + if hasattr(credentials, "with_gdch_audience"): + credentials = credentials.with_gdch_audience(api_audience if api_audience else host) + + # If the credentials are service account credentials, then always try to use self signed JWT. + if always_use_jwt_access and isinstance(credentials, service_account.Credentials) and hasattr(service_account.Credentials, "with_always_use_jwt_access"): + credentials = credentials.with_always_use_jwt_access(True) + + # Save the credentials. + self._credentials = credentials + + # Save the hostname. Default to port 443 (HTTPS) if none is specified. + if ':' not in host: + host += ':443' + self._host = host + + def _prep_wrapped_messages(self, client_info): + # Precompute the wrapped methods. + self._wrapped_methods = { + self.aggregated_list: gapic_v1.method.wrap_method( + self.aggregated_list, + default_timeout=None, + client_info=client_info, + ), + self.delete: gapic_v1.method.wrap_method( + self.delete, + default_timeout=None, + client_info=client_info, + ), + self.get: gapic_v1.method.wrap_method( + self.get, + default_timeout=None, + client_info=client_info, + ), + self.insert: gapic_v1.method.wrap_method( + self.insert, + default_timeout=None, + client_info=client_info, + ), + self.list: gapic_v1.method.wrap_method( + self.list, + default_timeout=None, + client_info=client_info, + ), + self.set_labels: gapic_v1.method.wrap_method( + self.set_labels, + default_timeout=None, + client_info=client_info, + ), + } + + def close(self): + """Closes resources associated with the transport. + + .. warning:: + Only call this method if the transport is NOT shared + with other clients - this may cause errors in other clients! + """ + raise NotImplementedError() + + @property + def aggregated_list(self) -> Callable[ + [compute.AggregatedListVpnTunnelsRequest], + Union[ + compute.VpnTunnelAggregatedList, + Awaitable[compute.VpnTunnelAggregatedList] + ]]: + raise NotImplementedError() + + @property + def delete(self) -> Callable[ + [compute.DeleteVpnTunnelRequest], + Union[ + compute.Operation, + Awaitable[compute.Operation] + ]]: + raise NotImplementedError() + + @property + def get(self) -> Callable[ + [compute.GetVpnTunnelRequest], + Union[ + compute.VpnTunnel, + Awaitable[compute.VpnTunnel] + ]]: + raise NotImplementedError() + + @property + def insert(self) -> Callable[ + [compute.InsertVpnTunnelRequest], + Union[ + compute.Operation, + Awaitable[compute.Operation] + ]]: + raise NotImplementedError() + + @property + def list(self) -> Callable[ + [compute.ListVpnTunnelsRequest], + Union[ + compute.VpnTunnelList, + Awaitable[compute.VpnTunnelList] + ]]: + raise NotImplementedError() + + @property + def set_labels(self) -> Callable[ + [compute.SetLabelsVpnTunnelRequest], + Union[ + compute.Operation, + Awaitable[compute.Operation] + ]]: + raise NotImplementedError() + + @property + def kind(self) -> str: + raise NotImplementedError() + + @property + def _region_operations_client(self) -> region_operations.RegionOperationsClient: + ex_op_service = self._extended_operations_services.get("region_operations") + if not ex_op_service: + ex_op_service = region_operations.RegionOperationsClient( + credentials=self._credentials, + transport=self.kind, + ) + self._extended_operations_services["region_operations"] = ex_op_service + + return ex_op_service + + +__all__ = ( + 'VpnTunnelsTransport', +) diff --git a/owl-bot-staging/v1/google/cloud/compute_v1/services/vpn_tunnels/transports/rest.py b/owl-bot-staging/v1/google/cloud/compute_v1/services/vpn_tunnels/transports/rest.py new file mode 100644 index 000000000..5784dd98d --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/compute_v1/services/vpn_tunnels/transports/rest.py @@ -0,0 +1,912 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +from google.auth.transport.requests import AuthorizedSession # type: ignore +import json # type: ignore +import grpc # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.api_core import exceptions as core_exceptions +from google.api_core import retry as retries +from google.api_core import rest_helpers +from google.api_core import rest_streaming +from google.api_core import path_template +from google.api_core import gapic_v1 + +from google.protobuf import json_format +from requests import __version__ as requests_version +import dataclasses +import re +from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union +import warnings + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + + +from google.cloud.compute_v1.types import compute + +from .base import VpnTunnelsTransport, DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, + grpc_version=None, + rest_version=requests_version, +) + + +class VpnTunnelsRestInterceptor: + """Interceptor for VpnTunnels. + + Interceptors are used to manipulate requests, request metadata, and responses + in arbitrary ways. + Example use cases include: + * Logging + * Verifying requests according to service or custom semantics + * Stripping extraneous information from responses + + These use cases and more can be enabled by injecting an + instance of a custom subclass when constructing the VpnTunnelsRestTransport. + + .. code-block:: python + class MyCustomVpnTunnelsInterceptor(VpnTunnelsRestInterceptor): + def pre_aggregated_list(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_aggregated_list(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_delete(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_delete(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_get(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_insert(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_insert(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_set_labels(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_set_labels(self, response): + logging.log(f"Received response: {response}") + return response + + transport = VpnTunnelsRestTransport(interceptor=MyCustomVpnTunnelsInterceptor()) + client = VpnTunnelsClient(transport=transport) + + + """ + def pre_aggregated_list(self, request: compute.AggregatedListVpnTunnelsRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.AggregatedListVpnTunnelsRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for aggregated_list + + Override in a subclass to manipulate the request or metadata + before they are sent to the VpnTunnels server. + """ + return request, metadata + + def post_aggregated_list(self, response: compute.VpnTunnelAggregatedList) -> compute.VpnTunnelAggregatedList: + """Post-rpc interceptor for aggregated_list + + Override in a subclass to manipulate the response + after it is returned by the VpnTunnels server but before + it is returned to user code. + """ + return response + def pre_delete(self, request: compute.DeleteVpnTunnelRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.DeleteVpnTunnelRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for delete + + Override in a subclass to manipulate the request or metadata + before they are sent to the VpnTunnels server. + """ + return request, metadata + + def post_delete(self, response: compute.Operation) -> compute.Operation: + """Post-rpc interceptor for delete + + Override in a subclass to manipulate the response + after it is returned by the VpnTunnels server but before + it is returned to user code. + """ + return response + def pre_get(self, request: compute.GetVpnTunnelRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.GetVpnTunnelRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get + + Override in a subclass to manipulate the request or metadata + before they are sent to the VpnTunnels server. + """ + return request, metadata + + def post_get(self, response: compute.VpnTunnel) -> compute.VpnTunnel: + """Post-rpc interceptor for get + + Override in a subclass to manipulate the response + after it is returned by the VpnTunnels server but before + it is returned to user code. + """ + return response + def pre_insert(self, request: compute.InsertVpnTunnelRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.InsertVpnTunnelRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for insert + + Override in a subclass to manipulate the request or metadata + before they are sent to the VpnTunnels server. + """ + return request, metadata + + def post_insert(self, response: compute.Operation) -> compute.Operation: + """Post-rpc interceptor for insert + + Override in a subclass to manipulate the response + after it is returned by the VpnTunnels server but before + it is returned to user code. + """ + return response + def pre_list(self, request: compute.ListVpnTunnelsRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.ListVpnTunnelsRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list + + Override in a subclass to manipulate the request or metadata + before they are sent to the VpnTunnels server. + """ + return request, metadata + + def post_list(self, response: compute.VpnTunnelList) -> compute.VpnTunnelList: + """Post-rpc interceptor for list + + Override in a subclass to manipulate the response + after it is returned by the VpnTunnels server but before + it is returned to user code. + """ + return response + def pre_set_labels(self, request: compute.SetLabelsVpnTunnelRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.SetLabelsVpnTunnelRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for set_labels + + Override in a subclass to manipulate the request or metadata + before they are sent to the VpnTunnels server. + """ + return request, metadata + + def post_set_labels(self, response: compute.Operation) -> compute.Operation: + """Post-rpc interceptor for set_labels + + Override in a subclass to manipulate the response + after it is returned by the VpnTunnels server but before + it is returned to user code. + """ + return response + + +@dataclasses.dataclass +class VpnTunnelsRestStub: + _session: AuthorizedSession + _host: str + _interceptor: VpnTunnelsRestInterceptor + + +class VpnTunnelsRestTransport(VpnTunnelsTransport): + """REST backend transport for VpnTunnels. + + The VpnTunnels API. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends JSON representations of protocol buffers over HTTP/1.1 + + NOTE: This REST transport functionality is currently in a beta + state (preview). We welcome your feedback via an issue in this + library's source repository. Thank you! + """ + + def __init__(self, *, + host: str = 'compute.googleapis.com', + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + client_cert_source_for_mtls: Optional[Callable[[ + ], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + url_scheme: str = 'https', + interceptor: Optional[VpnTunnelsRestInterceptor] = None, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + NOTE: This REST transport functionality is currently in a beta + state (preview). We welcome your feedback via a GitHub issue in + this library's repository. Thank you! + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + client_cert_source_for_mtls (Callable[[], Tuple[bytes, bytes]]): Client + certificate to configure mutual TLS HTTP channel. It is ignored + if ``channel`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you are developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + url_scheme: the protocol scheme for the API endpoint. Normally + "https", but for testing or local servers, + "http" can be specified. + """ + # Run the base constructor + # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. + # TODO: When custom host (api_endpoint) is set, `scopes` must *also* be set on the + # credentials object + maybe_url_match = re.match("^(?Phttp(?:s)?://)?(?P.*)$", host) + if maybe_url_match is None: + raise ValueError(f"Unexpected hostname structure: {host}") # pragma: NO COVER + + url_match_items = maybe_url_match.groupdict() + + host = f"{url_scheme}://{host}" if not url_match_items["scheme"] else host + + super().__init__( + host=host, + credentials=credentials, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience + ) + self._session = AuthorizedSession( + self._credentials, default_host=self.DEFAULT_HOST) + if client_cert_source_for_mtls: + self._session.configure_mtls_channel(client_cert_source_for_mtls) + self._interceptor = interceptor or VpnTunnelsRestInterceptor() + self._prep_wrapped_messages(client_info) + + class _AggregatedList(VpnTunnelsRestStub): + def __hash__(self): + return hash("AggregatedList") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.AggregatedListVpnTunnelsRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.VpnTunnelAggregatedList: + r"""Call the aggregated list method over HTTP. + + Args: + request (~.compute.AggregatedListVpnTunnelsRequest): + The request object. A request message for + VpnTunnels.AggregatedList. See the + method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.VpnTunnelAggregatedList: + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/compute/v1/projects/{project}/aggregated/vpnTunnels', + }, + ] + request, metadata = self._interceptor.pre_aggregated_list(request, metadata) + pb_request = compute.AggregatedListVpnTunnelsRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.VpnTunnelAggregatedList() + pb_resp = compute.VpnTunnelAggregatedList.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_aggregated_list(resp) + return resp + + class _Delete(VpnTunnelsRestStub): + def __hash__(self): + return hash("Delete") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.DeleteVpnTunnelRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.Operation: + r"""Call the delete method over HTTP. + + Args: + request (~.compute.DeleteVpnTunnelRequest): + The request object. A request message for + VpnTunnels.Delete. See the method + description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.Operation: + Represents an Operation resource. Google Compute Engine + has three Operation resources: \* + `Global `__ + \* + `Regional `__ + \* + `Zonal `__ + You can use an operation resource to manage asynchronous + API requests. For more information, read Handling API + responses. Operations can be global, regional or zonal. + - For global operations, use the ``globalOperations`` + resource. - For regional operations, use the + ``regionOperations`` resource. - For zonal operations, + use the ``zonalOperations`` resource. For more + information, read Global, Regional, and Zonal Resources. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'delete', + 'uri': '/compute/v1/projects/{project}/regions/{region}/vpnTunnels/{vpn_tunnel}', + }, + ] + request, metadata = self._interceptor.pre_delete(request, metadata) + pb_request = compute.DeleteVpnTunnelRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.Operation() + pb_resp = compute.Operation.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_delete(resp) + return resp + + class _Get(VpnTunnelsRestStub): + def __hash__(self): + return hash("Get") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.GetVpnTunnelRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.VpnTunnel: + r"""Call the get method over HTTP. + + Args: + request (~.compute.GetVpnTunnelRequest): + The request object. A request message for VpnTunnels.Get. + See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.VpnTunnel: + Represents a Cloud VPN Tunnel + resource. For more information about + VPN, read the the Cloud VPN Overview. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/compute/v1/projects/{project}/regions/{region}/vpnTunnels/{vpn_tunnel}', + }, + ] + request, metadata = self._interceptor.pre_get(request, metadata) + pb_request = compute.GetVpnTunnelRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.VpnTunnel() + pb_resp = compute.VpnTunnel.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get(resp) + return resp + + class _Insert(VpnTunnelsRestStub): + def __hash__(self): + return hash("Insert") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.InsertVpnTunnelRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.Operation: + r"""Call the insert method over HTTP. + + Args: + request (~.compute.InsertVpnTunnelRequest): + The request object. A request message for + VpnTunnels.Insert. See the method + description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.Operation: + Represents an Operation resource. Google Compute Engine + has three Operation resources: \* + `Global `__ + \* + `Regional `__ + \* + `Zonal `__ + You can use an operation resource to manage asynchronous + API requests. For more information, read Handling API + responses. Operations can be global, regional or zonal. + - For global operations, use the ``globalOperations`` + resource. - For regional operations, use the + ``regionOperations`` resource. - For zonal operations, + use the ``zonalOperations`` resource. For more + information, read Global, Regional, and Zonal Resources. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/compute/v1/projects/{project}/regions/{region}/vpnTunnels', + 'body': 'vpn_tunnel_resource', + }, + ] + request, metadata = self._interceptor.pre_insert(request, metadata) + pb_request = compute.InsertVpnTunnelRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + including_default_value_fields=False, + use_integers_for_enums=False + ) + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.Operation() + pb_resp = compute.Operation.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_insert(resp) + return resp + + class _List(VpnTunnelsRestStub): + def __hash__(self): + return hash("List") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.ListVpnTunnelsRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.VpnTunnelList: + r"""Call the list method over HTTP. + + Args: + request (~.compute.ListVpnTunnelsRequest): + The request object. A request message for + VpnTunnels.List. See the method + description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.VpnTunnelList: + Contains a list of VpnTunnel + resources. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/compute/v1/projects/{project}/regions/{region}/vpnTunnels', + }, + ] + request, metadata = self._interceptor.pre_list(request, metadata) + pb_request = compute.ListVpnTunnelsRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.VpnTunnelList() + pb_resp = compute.VpnTunnelList.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_list(resp) + return resp + + class _SetLabels(VpnTunnelsRestStub): + def __hash__(self): + return hash("SetLabels") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.SetLabelsVpnTunnelRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.Operation: + r"""Call the set labels method over HTTP. + + Args: + request (~.compute.SetLabelsVpnTunnelRequest): + The request object. A request message for + VpnTunnels.SetLabels. See the method + description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.Operation: + Represents an Operation resource. Google Compute Engine + has three Operation resources: \* + `Global `__ + \* + `Regional `__ + \* + `Zonal `__ + You can use an operation resource to manage asynchronous + API requests. For more information, read Handling API + responses. Operations can be global, regional or zonal. + - For global operations, use the ``globalOperations`` + resource. - For regional operations, use the + ``regionOperations`` resource. - For zonal operations, + use the ``zonalOperations`` resource. For more + information, read Global, Regional, and Zonal Resources. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/compute/v1/projects/{project}/regions/{region}/vpnTunnels/{resource}/setLabels', + 'body': 'region_set_labels_request_resource', + }, + ] + request, metadata = self._interceptor.pre_set_labels(request, metadata) + pb_request = compute.SetLabelsVpnTunnelRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + including_default_value_fields=False, + use_integers_for_enums=False + ) + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.Operation() + pb_resp = compute.Operation.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_set_labels(resp) + return resp + + @property + def aggregated_list(self) -> Callable[ + [compute.AggregatedListVpnTunnelsRequest], + compute.VpnTunnelAggregatedList]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._AggregatedList(self._session, self._host, self._interceptor) # type: ignore + + @property + def delete(self) -> Callable[ + [compute.DeleteVpnTunnelRequest], + compute.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._Delete(self._session, self._host, self._interceptor) # type: ignore + + @property + def get(self) -> Callable[ + [compute.GetVpnTunnelRequest], + compute.VpnTunnel]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._Get(self._session, self._host, self._interceptor) # type: ignore + + @property + def insert(self) -> Callable[ + [compute.InsertVpnTunnelRequest], + compute.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._Insert(self._session, self._host, self._interceptor) # type: ignore + + @property + def list(self) -> Callable[ + [compute.ListVpnTunnelsRequest], + compute.VpnTunnelList]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._List(self._session, self._host, self._interceptor) # type: ignore + + @property + def set_labels(self) -> Callable[ + [compute.SetLabelsVpnTunnelRequest], + compute.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._SetLabels(self._session, self._host, self._interceptor) # type: ignore + + @property + def kind(self) -> str: + return "rest" + + def close(self): + self._session.close() + + +__all__=( + 'VpnTunnelsRestTransport', +) diff --git a/owl-bot-staging/v1/google/cloud/compute_v1/services/zone_operations/__init__.py b/owl-bot-staging/v1/google/cloud/compute_v1/services/zone_operations/__init__.py new file mode 100644 index 000000000..581b319d2 --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/compute_v1/services/zone_operations/__init__.py @@ -0,0 +1,20 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from .client import ZoneOperationsClient + +__all__ = ( + 'ZoneOperationsClient', +) diff --git a/owl-bot-staging/v1/google/cloud/compute_v1/services/zone_operations/client.py b/owl-bot-staging/v1/google/cloud/compute_v1/services/zone_operations/client.py new file mode 100644 index 000000000..3017b777b --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/compute_v1/services/zone_operations/client.py @@ -0,0 +1,920 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import os +import re +from typing import Dict, Mapping, MutableMapping, MutableSequence, Optional, Sequence, Tuple, Type, Union, cast + +from google.cloud.compute_v1 import gapic_version as package_version + +from google.api_core import client_options as client_options_lib +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport import mtls # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth.exceptions import MutualTLSChannelError # type: ignore +from google.oauth2 import service_account # type: ignore + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + +from google.cloud.compute_v1.services.zone_operations import pagers +from google.cloud.compute_v1.types import compute +from .transports.base import ZoneOperationsTransport, DEFAULT_CLIENT_INFO +from .transports.rest import ZoneOperationsRestTransport + + +class ZoneOperationsClientMeta(type): + """Metaclass for the ZoneOperations client. + + This provides class-level methods for building and retrieving + support objects (e.g. transport) without polluting the client instance + objects. + """ + _transport_registry = OrderedDict() # type: Dict[str, Type[ZoneOperationsTransport]] + _transport_registry["rest"] = ZoneOperationsRestTransport + + def get_transport_class(cls, + label: Optional[str] = None, + ) -> Type[ZoneOperationsTransport]: + """Returns an appropriate transport class. + + Args: + label: The name of the desired transport. If none is + provided, then the first transport in the registry is used. + + Returns: + The transport class to use. + """ + # If a specific transport is requested, return that one. + if label: + return cls._transport_registry[label] + + # No transport is requested; return the default (that is, the first one + # in the dictionary). + return next(iter(cls._transport_registry.values())) + + +class ZoneOperationsClient(metaclass=ZoneOperationsClientMeta): + """The ZoneOperations API.""" + + @staticmethod + def _get_default_mtls_endpoint(api_endpoint): + """Converts api endpoint to mTLS endpoint. + + Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to + "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. + Args: + api_endpoint (Optional[str]): the api endpoint to convert. + Returns: + str: converted mTLS api endpoint. + """ + if not api_endpoint: + return api_endpoint + + mtls_endpoint_re = re.compile( + r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?" + ) + + m = mtls_endpoint_re.match(api_endpoint) + name, mtls, sandbox, googledomain = m.groups() + if mtls or not googledomain: + return api_endpoint + + if sandbox: + return api_endpoint.replace( + "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" + ) + + return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") + + DEFAULT_ENDPOINT = "compute.googleapis.com" + DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore + DEFAULT_ENDPOINT + ) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + ZoneOperationsClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_info(info) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + ZoneOperationsClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_file( + filename) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + from_service_account_json = from_service_account_file + + @property + def transport(self) -> ZoneOperationsTransport: + """Returns the transport used by the client instance. + + Returns: + ZoneOperationsTransport: The transport used by the client + instance. + """ + return self._transport + + @staticmethod + def common_billing_account_path(billing_account: str, ) -> str: + """Returns a fully-qualified billing_account string.""" + return "billingAccounts/{billing_account}".format(billing_account=billing_account, ) + + @staticmethod + def parse_common_billing_account_path(path: str) -> Dict[str,str]: + """Parse a billing_account path into its component segments.""" + m = re.match(r"^billingAccounts/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_folder_path(folder: str, ) -> str: + """Returns a fully-qualified folder string.""" + return "folders/{folder}".format(folder=folder, ) + + @staticmethod + def parse_common_folder_path(path: str) -> Dict[str,str]: + """Parse a folder path into its component segments.""" + m = re.match(r"^folders/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_organization_path(organization: str, ) -> str: + """Returns a fully-qualified organization string.""" + return "organizations/{organization}".format(organization=organization, ) + + @staticmethod + def parse_common_organization_path(path: str) -> Dict[str,str]: + """Parse a organization path into its component segments.""" + m = re.match(r"^organizations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_project_path(project: str, ) -> str: + """Returns a fully-qualified project string.""" + return "projects/{project}".format(project=project, ) + + @staticmethod + def parse_common_project_path(path: str) -> Dict[str,str]: + """Parse a project path into its component segments.""" + m = re.match(r"^projects/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_location_path(project: str, location: str, ) -> str: + """Returns a fully-qualified location string.""" + return "projects/{project}/locations/{location}".format(project=project, location=location, ) + + @staticmethod + def parse_common_location_path(path: str) -> Dict[str,str]: + """Parse a location path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @classmethod + def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[client_options_lib.ClientOptions] = None): + """Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + if client_options is None: + client_options = client_options_lib.ClientOptions() + use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") + use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") + if use_client_cert not in ("true", "false"): + raise ValueError("Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`") + if use_mtls_endpoint not in ("auto", "never", "always"): + raise MutualTLSChannelError("Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`") + + # Figure out the client cert source to use. + client_cert_source = None + if use_client_cert == "true": + if client_options.client_cert_source: + client_cert_source = client_options.client_cert_source + elif mtls.has_default_client_cert_source(): + client_cert_source = mtls.default_client_cert_source() + + # Figure out which api endpoint to use. + if client_options.api_endpoint is not None: + api_endpoint = client_options.api_endpoint + elif use_mtls_endpoint == "always" or (use_mtls_endpoint == "auto" and client_cert_source): + api_endpoint = cls.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = cls.DEFAULT_ENDPOINT + + return api_endpoint, client_cert_source + + def __init__(self, *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Optional[Union[str, ZoneOperationsTransport]] = None, + client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the zone operations client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Union[str, ZoneOperationsTransport]): The + transport to use. If set to None, a transport is chosen + automatically. + NOTE: "rest" transport functionality is currently in a + beta state (preview). We welcome your feedback via an + issue in this library's source repository. + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): Custom options for the + client. It won't take effect if a ``transport`` instance is provided. + (1) The ``api_endpoint`` property can be used to override the + default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT + environment variable can also be used to override the endpoint: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto switch to the + default mTLS endpoint if client certificate is present, this is + the default value). However, the ``api_endpoint`` property takes + precedence if provided. + (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide client certificate for mutual TLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + """ + if isinstance(client_options, dict): + client_options = client_options_lib.from_dict(client_options) + if client_options is None: + client_options = client_options_lib.ClientOptions() + client_options = cast(client_options_lib.ClientOptions, client_options) + + api_endpoint, client_cert_source_func = self.get_mtls_endpoint_and_cert_source(client_options) + + api_key_value = getattr(client_options, "api_key", None) + if api_key_value and credentials: + raise ValueError("client_options.api_key and credentials are mutually exclusive") + + # Save or instantiate the transport. + # Ordinarily, we provide the transport, but allowing a custom transport + # instance provides an extensibility point for unusual situations. + if isinstance(transport, ZoneOperationsTransport): + # transport is a ZoneOperationsTransport instance. + if credentials or client_options.credentials_file or api_key_value: + raise ValueError("When providing a transport instance, " + "provide its credentials directly.") + if client_options.scopes: + raise ValueError( + "When providing a transport instance, provide its scopes " + "directly." + ) + self._transport = transport + else: + import google.auth._default # type: ignore + + if api_key_value and hasattr(google.auth._default, "get_api_key_credentials"): + credentials = google.auth._default.get_api_key_credentials(api_key_value) + + Transport = type(self).get_transport_class(transport) + self._transport = Transport( + credentials=credentials, + credentials_file=client_options.credentials_file, + host=api_endpoint, + scopes=client_options.scopes, + client_cert_source_for_mtls=client_cert_source_func, + quota_project_id=client_options.quota_project_id, + client_info=client_info, + always_use_jwt_access=True, + api_audience=client_options.api_audience, + ) + + def delete(self, + request: Optional[Union[compute.DeleteZoneOperationRequest, dict]] = None, + *, + project: Optional[str] = None, + zone: Optional[str] = None, + operation: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.DeleteZoneOperationResponse: + r"""Deletes the specified zone-specific Operations + resource. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_delete(): + # Create a client + client = compute_v1.ZoneOperationsClient() + + # Initialize request argument(s) + request = compute_v1.DeleteZoneOperationRequest( + operation="operation_value", + project="project_value", + zone="zone_value", + ) + + # Make the request + response = client.delete(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.DeleteZoneOperationRequest, dict]): + The request object. A request message for + ZoneOperations.Delete. See the method + description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + zone (str): + Name of the zone for this request. + This corresponds to the ``zone`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + operation (str): + Name of the Operations resource to + delete. + + This corresponds to the ``operation`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.compute_v1.types.DeleteZoneOperationResponse: + A response message for + ZoneOperations.Delete. See the method + description for details. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, zone, operation]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.DeleteZoneOperationRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.DeleteZoneOperationRequest): + request = compute.DeleteZoneOperationRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if zone is not None: + request.zone = zone + if operation is not None: + request.operation = operation + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("zone", request.zone), + ("operation", request.operation), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get(self, + request: Optional[Union[compute.GetZoneOperationRequest, dict]] = None, + *, + project: Optional[str] = None, + zone: Optional[str] = None, + operation: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.Operation: + r"""Retrieves the specified zone-specific Operations + resource. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_get(): + # Create a client + client = compute_v1.ZoneOperationsClient() + + # Initialize request argument(s) + request = compute_v1.GetZoneOperationRequest( + operation="operation_value", + project="project_value", + zone="zone_value", + ) + + # Make the request + response = client.get(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.GetZoneOperationRequest, dict]): + The request object. A request message for + ZoneOperations.Get. See the method + description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + zone (str): + Name of the zone for this request. + This corresponds to the ``zone`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + operation (str): + Name of the Operations resource to + return. + + This corresponds to the ``operation`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.compute_v1.types.Operation: + Represents an Operation resource. Google Compute Engine + has three Operation resources: \* + [Global](/compute/docs/reference/rest/v1/globalOperations) + \* + [Regional](/compute/docs/reference/rest/v1/regionOperations) + \* + [Zonal](/compute/docs/reference/rest/v1/zoneOperations) + You can use an operation resource to manage asynchronous + API requests. For more information, read Handling API + responses. Operations can be global, regional or zonal. + - For global operations, use the globalOperations + resource. - For regional operations, use the + regionOperations resource. - For zonal operations, use + the zonalOperations resource. For more information, read + Global, Regional, and Zonal Resources. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, zone, operation]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.GetZoneOperationRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.GetZoneOperationRequest): + request = compute.GetZoneOperationRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if zone is not None: + request.zone = zone + if operation is not None: + request.operation = operation + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("zone", request.zone), + ("operation", request.operation), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def list(self, + request: Optional[Union[compute.ListZoneOperationsRequest, dict]] = None, + *, + project: Optional[str] = None, + zone: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListPager: + r"""Retrieves a list of Operation resources contained + within the specified zone. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_list(): + # Create a client + client = compute_v1.ZoneOperationsClient() + + # Initialize request argument(s) + request = compute_v1.ListZoneOperationsRequest( + project="project_value", + zone="zone_value", + ) + + # Make the request + page_result = client.list(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.ListZoneOperationsRequest, dict]): + The request object. A request message for + ZoneOperations.List. See the method + description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + zone (str): + Name of the zone for request. + This corresponds to the ``zone`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.compute_v1.services.zone_operations.pagers.ListPager: + Contains a list of Operation + resources. + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, zone]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.ListZoneOperationsRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.ListZoneOperationsRequest): + request = compute.ListZoneOperationsRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if zone is not None: + request.zone = zone + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("zone", request.zone), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + def wait(self, + request: Optional[Union[compute.WaitZoneOperationRequest, dict]] = None, + *, + project: Optional[str] = None, + zone: Optional[str] = None, + operation: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.Operation: + r"""Waits for the specified Operation resource to return as ``DONE`` + or for the request to approach the 2 minute deadline, and + retrieves the specified Operation resource. This method waits + for no more than the 2 minutes and then returns the current + state of the operation, which might be ``DONE`` or still in + progress. This method is called on a best-effort basis. + Specifically: - In uncommon cases, when the server is + overloaded, the request might return before the default deadline + is reached, or might return after zero seconds. - If the default + deadline is reached, there is no guarantee that the operation is + actually done when the method returns. Be prepared to retry if + the operation is not ``DONE``. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_wait(): + # Create a client + client = compute_v1.ZoneOperationsClient() + + # Initialize request argument(s) + request = compute_v1.WaitZoneOperationRequest( + operation="operation_value", + project="project_value", + zone="zone_value", + ) + + # Make the request + response = client.wait(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.WaitZoneOperationRequest, dict]): + The request object. A request message for + ZoneOperations.Wait. See the method + description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + zone (str): + Name of the zone for this request. + This corresponds to the ``zone`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + operation (str): + Name of the Operations resource to + return. + + This corresponds to the ``operation`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.compute_v1.types.Operation: + Represents an Operation resource. Google Compute Engine + has three Operation resources: \* + [Global](/compute/docs/reference/rest/v1/globalOperations) + \* + [Regional](/compute/docs/reference/rest/v1/regionOperations) + \* + [Zonal](/compute/docs/reference/rest/v1/zoneOperations) + You can use an operation resource to manage asynchronous + API requests. For more information, read Handling API + responses. Operations can be global, regional or zonal. + - For global operations, use the globalOperations + resource. - For regional operations, use the + regionOperations resource. - For zonal operations, use + the zonalOperations resource. For more information, read + Global, Regional, and Zonal Resources. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, zone, operation]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.WaitZoneOperationRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.WaitZoneOperationRequest): + request = compute.WaitZoneOperationRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if zone is not None: + request.zone = zone + if operation is not None: + request.operation = operation + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.wait] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("zone", request.zone), + ("operation", request.operation), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def __enter__(self) -> "ZoneOperationsClient": + return self + + def __exit__(self, type, value, traceback): + """Releases underlying transport's resources. + + .. warning:: + ONLY use as a context manager if the transport is NOT shared + with other clients! Exiting the with block will CLOSE the transport + and may cause errors in other clients! + """ + self.transport.close() + + + + + + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) + + +__all__ = ( + "ZoneOperationsClient", +) diff --git a/owl-bot-staging/v1/google/cloud/compute_v1/services/zone_operations/pagers.py b/owl-bot-staging/v1/google/cloud/compute_v1/services/zone_operations/pagers.py new file mode 100644 index 000000000..e74a83706 --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/compute_v1/services/zone_operations/pagers.py @@ -0,0 +1,77 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import Any, AsyncIterator, Awaitable, Callable, Sequence, Tuple, Optional, Iterator + +from google.cloud.compute_v1.types import compute + + +class ListPager: + """A pager for iterating through ``list`` requests. + + This class thinly wraps an initial + :class:`google.cloud.compute_v1.types.OperationList` object, and + provides an ``__iter__`` method to iterate through its + ``items`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``List`` requests and continue to iterate + through the ``items`` field on the + corresponding responses. + + All the usual :class:`google.cloud.compute_v1.types.OperationList` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., compute.OperationList], + request: compute.ListZoneOperationsRequest, + response: compute.OperationList, + *, + metadata: Sequence[Tuple[str, str]] = ()): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.compute_v1.types.ListZoneOperationsRequest): + The initial request object. + response (google.cloud.compute_v1.types.OperationList): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = compute.ListZoneOperationsRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[compute.OperationList]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[compute.Operation]: + for page in self.pages: + yield from page.items + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) diff --git a/owl-bot-staging/v1/google/cloud/compute_v1/services/zone_operations/transports/__init__.py b/owl-bot-staging/v1/google/cloud/compute_v1/services/zone_operations/transports/__init__.py new file mode 100644 index 000000000..1846e9b7c --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/compute_v1/services/zone_operations/transports/__init__.py @@ -0,0 +1,32 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +from typing import Dict, Type + +from .base import ZoneOperationsTransport +from .rest import ZoneOperationsRestTransport +from .rest import ZoneOperationsRestInterceptor + + +# Compile a registry of transports. +_transport_registry = OrderedDict() # type: Dict[str, Type[ZoneOperationsTransport]] +_transport_registry['rest'] = ZoneOperationsRestTransport + +__all__ = ( + 'ZoneOperationsTransport', + 'ZoneOperationsRestTransport', + 'ZoneOperationsRestInterceptor', +) diff --git a/owl-bot-staging/v1/google/cloud/compute_v1/services/zone_operations/transports/base.py b/owl-bot-staging/v1/google/cloud/compute_v1/services/zone_operations/transports/base.py new file mode 100644 index 000000000..69b8e5715 --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/compute_v1/services/zone_operations/transports/base.py @@ -0,0 +1,191 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import abc +from typing import Awaitable, Callable, Dict, Optional, Sequence, Union + +from google.cloud.compute_v1 import gapic_version as package_version + +import google.auth # type: ignore +import google.api_core +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.compute_v1.types import compute + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) + + +class ZoneOperationsTransport(abc.ABC): + """Abstract transport class for ZoneOperations.""" + + AUTH_SCOPES = ( + 'https://www.googleapis.com/auth/compute', + 'https://www.googleapis.com/auth/cloud-platform', + ) + + DEFAULT_HOST: str = 'compute.googleapis.com' + def __init__( + self, *, + host: str = DEFAULT_HOST, + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + **kwargs, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A list of scopes. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + """ + + scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} + + # Save the scopes. + self._scopes = scopes + + # If no credentials are provided, then determine the appropriate + # defaults. + if credentials and credentials_file: + raise core_exceptions.DuplicateCredentialArgs("'credentials_file' and 'credentials' are mutually exclusive") + + if credentials_file is not None: + credentials, _ = google.auth.load_credentials_from_file( + credentials_file, + **scopes_kwargs, + quota_project_id=quota_project_id + ) + elif credentials is None: + credentials, _ = google.auth.default(**scopes_kwargs, quota_project_id=quota_project_id) + # Don't apply audience if the credentials file passed from user. + if hasattr(credentials, "with_gdch_audience"): + credentials = credentials.with_gdch_audience(api_audience if api_audience else host) + + # If the credentials are service account credentials, then always try to use self signed JWT. + if always_use_jwt_access and isinstance(credentials, service_account.Credentials) and hasattr(service_account.Credentials, "with_always_use_jwt_access"): + credentials = credentials.with_always_use_jwt_access(True) + + # Save the credentials. + self._credentials = credentials + + # Save the hostname. Default to port 443 (HTTPS) if none is specified. + if ':' not in host: + host += ':443' + self._host = host + + def _prep_wrapped_messages(self, client_info): + # Precompute the wrapped methods. + self._wrapped_methods = { + self.delete: gapic_v1.method.wrap_method( + self.delete, + default_timeout=None, + client_info=client_info, + ), + self.get: gapic_v1.method.wrap_method( + self.get, + default_timeout=None, + client_info=client_info, + ), + self.list: gapic_v1.method.wrap_method( + self.list, + default_timeout=None, + client_info=client_info, + ), + self.wait: gapic_v1.method.wrap_method( + self.wait, + default_timeout=None, + client_info=client_info, + ), + } + + def close(self): + """Closes resources associated with the transport. + + .. warning:: + Only call this method if the transport is NOT shared + with other clients - this may cause errors in other clients! + """ + raise NotImplementedError() + + @property + def delete(self) -> Callable[ + [compute.DeleteZoneOperationRequest], + Union[ + compute.DeleteZoneOperationResponse, + Awaitable[compute.DeleteZoneOperationResponse] + ]]: + raise NotImplementedError() + + @property + def get(self) -> Callable[ + [compute.GetZoneOperationRequest], + Union[ + compute.Operation, + Awaitable[compute.Operation] + ]]: + raise NotImplementedError() + + @property + def list(self) -> Callable[ + [compute.ListZoneOperationsRequest], + Union[ + compute.OperationList, + Awaitable[compute.OperationList] + ]]: + raise NotImplementedError() + + @property + def wait(self) -> Callable[ + [compute.WaitZoneOperationRequest], + Union[ + compute.Operation, + Awaitable[compute.Operation] + ]]: + raise NotImplementedError() + + @property + def kind(self) -> str: + raise NotImplementedError() + + +__all__ = ( + 'ZoneOperationsTransport', +) diff --git a/owl-bot-staging/v1/google/cloud/compute_v1/services/zone_operations/transports/rest.py b/owl-bot-staging/v1/google/cloud/compute_v1/services/zone_operations/transports/rest.py new file mode 100644 index 000000000..858b25fb6 --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/compute_v1/services/zone_operations/transports/rest.py @@ -0,0 +1,660 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +from google.auth.transport.requests import AuthorizedSession # type: ignore +import json # type: ignore +import grpc # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.api_core import exceptions as core_exceptions +from google.api_core import retry as retries +from google.api_core import rest_helpers +from google.api_core import rest_streaming +from google.api_core import path_template +from google.api_core import gapic_v1 + +from google.protobuf import json_format +from requests import __version__ as requests_version +import dataclasses +import re +from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union +import warnings + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + + +from google.cloud.compute_v1.types import compute + +from .base import ZoneOperationsTransport, DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, + grpc_version=None, + rest_version=requests_version, +) + + +class ZoneOperationsRestInterceptor: + """Interceptor for ZoneOperations. + + Interceptors are used to manipulate requests, request metadata, and responses + in arbitrary ways. + Example use cases include: + * Logging + * Verifying requests according to service or custom semantics + * Stripping extraneous information from responses + + These use cases and more can be enabled by injecting an + instance of a custom subclass when constructing the ZoneOperationsRestTransport. + + .. code-block:: python + class MyCustomZoneOperationsInterceptor(ZoneOperationsRestInterceptor): + def pre_delete(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_delete(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_get(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_wait(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_wait(self, response): + logging.log(f"Received response: {response}") + return response + + transport = ZoneOperationsRestTransport(interceptor=MyCustomZoneOperationsInterceptor()) + client = ZoneOperationsClient(transport=transport) + + + """ + def pre_delete(self, request: compute.DeleteZoneOperationRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.DeleteZoneOperationRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for delete + + Override in a subclass to manipulate the request or metadata + before they are sent to the ZoneOperations server. + """ + return request, metadata + + def post_delete(self, response: compute.DeleteZoneOperationResponse) -> compute.DeleteZoneOperationResponse: + """Post-rpc interceptor for delete + + Override in a subclass to manipulate the response + after it is returned by the ZoneOperations server but before + it is returned to user code. + """ + return response + def pre_get(self, request: compute.GetZoneOperationRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.GetZoneOperationRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get + + Override in a subclass to manipulate the request or metadata + before they are sent to the ZoneOperations server. + """ + return request, metadata + + def post_get(self, response: compute.Operation) -> compute.Operation: + """Post-rpc interceptor for get + + Override in a subclass to manipulate the response + after it is returned by the ZoneOperations server but before + it is returned to user code. + """ + return response + def pre_list(self, request: compute.ListZoneOperationsRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.ListZoneOperationsRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list + + Override in a subclass to manipulate the request or metadata + before they are sent to the ZoneOperations server. + """ + return request, metadata + + def post_list(self, response: compute.OperationList) -> compute.OperationList: + """Post-rpc interceptor for list + + Override in a subclass to manipulate the response + after it is returned by the ZoneOperations server but before + it is returned to user code. + """ + return response + def pre_wait(self, request: compute.WaitZoneOperationRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.WaitZoneOperationRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for wait + + Override in a subclass to manipulate the request or metadata + before they are sent to the ZoneOperations server. + """ + return request, metadata + + def post_wait(self, response: compute.Operation) -> compute.Operation: + """Post-rpc interceptor for wait + + Override in a subclass to manipulate the response + after it is returned by the ZoneOperations server but before + it is returned to user code. + """ + return response + + +@dataclasses.dataclass +class ZoneOperationsRestStub: + _session: AuthorizedSession + _host: str + _interceptor: ZoneOperationsRestInterceptor + + +class ZoneOperationsRestTransport(ZoneOperationsTransport): + """REST backend transport for ZoneOperations. + + The ZoneOperations API. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends JSON representations of protocol buffers over HTTP/1.1 + + NOTE: This REST transport functionality is currently in a beta + state (preview). We welcome your feedback via an issue in this + library's source repository. Thank you! + """ + + def __init__(self, *, + host: str = 'compute.googleapis.com', + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + client_cert_source_for_mtls: Optional[Callable[[ + ], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + url_scheme: str = 'https', + interceptor: Optional[ZoneOperationsRestInterceptor] = None, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + NOTE: This REST transport functionality is currently in a beta + state (preview). We welcome your feedback via a GitHub issue in + this library's repository. Thank you! + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + client_cert_source_for_mtls (Callable[[], Tuple[bytes, bytes]]): Client + certificate to configure mutual TLS HTTP channel. It is ignored + if ``channel`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you are developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + url_scheme: the protocol scheme for the API endpoint. Normally + "https", but for testing or local servers, + "http" can be specified. + """ + # Run the base constructor + # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. + # TODO: When custom host (api_endpoint) is set, `scopes` must *also* be set on the + # credentials object + maybe_url_match = re.match("^(?Phttp(?:s)?://)?(?P.*)$", host) + if maybe_url_match is None: + raise ValueError(f"Unexpected hostname structure: {host}") # pragma: NO COVER + + url_match_items = maybe_url_match.groupdict() + + host = f"{url_scheme}://{host}" if not url_match_items["scheme"] else host + + super().__init__( + host=host, + credentials=credentials, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience + ) + self._session = AuthorizedSession( + self._credentials, default_host=self.DEFAULT_HOST) + if client_cert_source_for_mtls: + self._session.configure_mtls_channel(client_cert_source_for_mtls) + self._interceptor = interceptor or ZoneOperationsRestInterceptor() + self._prep_wrapped_messages(client_info) + + class _Delete(ZoneOperationsRestStub): + def __hash__(self): + return hash("Delete") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.DeleteZoneOperationRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.DeleteZoneOperationResponse: + r"""Call the delete method over HTTP. + + Args: + request (~.compute.DeleteZoneOperationRequest): + The request object. A request message for + ZoneOperations.Delete. See the method + description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.DeleteZoneOperationResponse: + A response message for + ZoneOperations.Delete. See the method + description for details. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'delete', + 'uri': '/compute/v1/projects/{project}/zones/{zone}/operations/{operation}', + }, + ] + request, metadata = self._interceptor.pre_delete(request, metadata) + pb_request = compute.DeleteZoneOperationRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.DeleteZoneOperationResponse() + pb_resp = compute.DeleteZoneOperationResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_delete(resp) + return resp + + class _Get(ZoneOperationsRestStub): + def __hash__(self): + return hash("Get") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.GetZoneOperationRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.Operation: + r"""Call the get method over HTTP. + + Args: + request (~.compute.GetZoneOperationRequest): + The request object. A request message for + ZoneOperations.Get. See the method + description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.Operation: + Represents an Operation resource. Google Compute Engine + has three Operation resources: \* + `Global `__ + \* + `Regional `__ + \* + `Zonal `__ + You can use an operation resource to manage asynchronous + API requests. For more information, read Handling API + responses. Operations can be global, regional or zonal. + - For global operations, use the ``globalOperations`` + resource. - For regional operations, use the + ``regionOperations`` resource. - For zonal operations, + use the ``zonalOperations`` resource. For more + information, read Global, Regional, and Zonal Resources. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/compute/v1/projects/{project}/zones/{zone}/operations/{operation}', + }, + ] + request, metadata = self._interceptor.pre_get(request, metadata) + pb_request = compute.GetZoneOperationRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.Operation() + pb_resp = compute.Operation.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get(resp) + return resp + + class _List(ZoneOperationsRestStub): + def __hash__(self): + return hash("List") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.ListZoneOperationsRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.OperationList: + r"""Call the list method over HTTP. + + Args: + request (~.compute.ListZoneOperationsRequest): + The request object. A request message for + ZoneOperations.List. See the method + description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.OperationList: + Contains a list of Operation + resources. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/compute/v1/projects/{project}/zones/{zone}/operations', + }, + ] + request, metadata = self._interceptor.pre_list(request, metadata) + pb_request = compute.ListZoneOperationsRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.OperationList() + pb_resp = compute.OperationList.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_list(resp) + return resp + + class _Wait(ZoneOperationsRestStub): + def __hash__(self): + return hash("Wait") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.WaitZoneOperationRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.Operation: + r"""Call the wait method over HTTP. + + Args: + request (~.compute.WaitZoneOperationRequest): + The request object. A request message for + ZoneOperations.Wait. See the method + description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.Operation: + Represents an Operation resource. Google Compute Engine + has three Operation resources: \* + `Global `__ + \* + `Regional `__ + \* + `Zonal `__ + You can use an operation resource to manage asynchronous + API requests. For more information, read Handling API + responses. Operations can be global, regional or zonal. + - For global operations, use the ``globalOperations`` + resource. - For regional operations, use the + ``regionOperations`` resource. - For zonal operations, + use the ``zonalOperations`` resource. For more + information, read Global, Regional, and Zonal Resources. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/compute/v1/projects/{project}/zones/{zone}/operations/{operation}/wait', + }, + ] + request, metadata = self._interceptor.pre_wait(request, metadata) + pb_request = compute.WaitZoneOperationRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.Operation() + pb_resp = compute.Operation.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_wait(resp) + return resp + + @property + def delete(self) -> Callable[ + [compute.DeleteZoneOperationRequest], + compute.DeleteZoneOperationResponse]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._Delete(self._session, self._host, self._interceptor) # type: ignore + + @property + def get(self) -> Callable[ + [compute.GetZoneOperationRequest], + compute.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._Get(self._session, self._host, self._interceptor) # type: ignore + + @property + def list(self) -> Callable[ + [compute.ListZoneOperationsRequest], + compute.OperationList]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._List(self._session, self._host, self._interceptor) # type: ignore + + @property + def wait(self) -> Callable[ + [compute.WaitZoneOperationRequest], + compute.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._Wait(self._session, self._host, self._interceptor) # type: ignore + + @property + def kind(self) -> str: + return "rest" + + def close(self): + self._session.close() + + +__all__=( + 'ZoneOperationsRestTransport', +) diff --git a/owl-bot-staging/v1/google/cloud/compute_v1/services/zones/__init__.py b/owl-bot-staging/v1/google/cloud/compute_v1/services/zones/__init__.py new file mode 100644 index 000000000..4f98d8867 --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/compute_v1/services/zones/__init__.py @@ -0,0 +1,20 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from .client import ZonesClient + +__all__ = ( + 'ZonesClient', +) diff --git a/owl-bot-staging/v1/google/cloud/compute_v1/services/zones/client.py b/owl-bot-staging/v1/google/cloud/compute_v1/services/zones/client.py new file mode 100644 index 000000000..c1d85c2b1 --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/compute_v1/services/zones/client.py @@ -0,0 +1,616 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import os +import re +from typing import Dict, Mapping, MutableMapping, MutableSequence, Optional, Sequence, Tuple, Type, Union, cast + +from google.cloud.compute_v1 import gapic_version as package_version + +from google.api_core import client_options as client_options_lib +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport import mtls # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth.exceptions import MutualTLSChannelError # type: ignore +from google.oauth2 import service_account # type: ignore + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + +from google.cloud.compute_v1.services.zones import pagers +from google.cloud.compute_v1.types import compute +from .transports.base import ZonesTransport, DEFAULT_CLIENT_INFO +from .transports.rest import ZonesRestTransport + + +class ZonesClientMeta(type): + """Metaclass for the Zones client. + + This provides class-level methods for building and retrieving + support objects (e.g. transport) without polluting the client instance + objects. + """ + _transport_registry = OrderedDict() # type: Dict[str, Type[ZonesTransport]] + _transport_registry["rest"] = ZonesRestTransport + + def get_transport_class(cls, + label: Optional[str] = None, + ) -> Type[ZonesTransport]: + """Returns an appropriate transport class. + + Args: + label: The name of the desired transport. If none is + provided, then the first transport in the registry is used. + + Returns: + The transport class to use. + """ + # If a specific transport is requested, return that one. + if label: + return cls._transport_registry[label] + + # No transport is requested; return the default (that is, the first one + # in the dictionary). + return next(iter(cls._transport_registry.values())) + + +class ZonesClient(metaclass=ZonesClientMeta): + """The Zones API.""" + + @staticmethod + def _get_default_mtls_endpoint(api_endpoint): + """Converts api endpoint to mTLS endpoint. + + Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to + "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. + Args: + api_endpoint (Optional[str]): the api endpoint to convert. + Returns: + str: converted mTLS api endpoint. + """ + if not api_endpoint: + return api_endpoint + + mtls_endpoint_re = re.compile( + r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?" + ) + + m = mtls_endpoint_re.match(api_endpoint) + name, mtls, sandbox, googledomain = m.groups() + if mtls or not googledomain: + return api_endpoint + + if sandbox: + return api_endpoint.replace( + "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" + ) + + return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") + + DEFAULT_ENDPOINT = "compute.googleapis.com" + DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore + DEFAULT_ENDPOINT + ) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + ZonesClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_info(info) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + ZonesClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_file( + filename) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + from_service_account_json = from_service_account_file + + @property + def transport(self) -> ZonesTransport: + """Returns the transport used by the client instance. + + Returns: + ZonesTransport: The transport used by the client + instance. + """ + return self._transport + + @staticmethod + def common_billing_account_path(billing_account: str, ) -> str: + """Returns a fully-qualified billing_account string.""" + return "billingAccounts/{billing_account}".format(billing_account=billing_account, ) + + @staticmethod + def parse_common_billing_account_path(path: str) -> Dict[str,str]: + """Parse a billing_account path into its component segments.""" + m = re.match(r"^billingAccounts/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_folder_path(folder: str, ) -> str: + """Returns a fully-qualified folder string.""" + return "folders/{folder}".format(folder=folder, ) + + @staticmethod + def parse_common_folder_path(path: str) -> Dict[str,str]: + """Parse a folder path into its component segments.""" + m = re.match(r"^folders/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_organization_path(organization: str, ) -> str: + """Returns a fully-qualified organization string.""" + return "organizations/{organization}".format(organization=organization, ) + + @staticmethod + def parse_common_organization_path(path: str) -> Dict[str,str]: + """Parse a organization path into its component segments.""" + m = re.match(r"^organizations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_project_path(project: str, ) -> str: + """Returns a fully-qualified project string.""" + return "projects/{project}".format(project=project, ) + + @staticmethod + def parse_common_project_path(path: str) -> Dict[str,str]: + """Parse a project path into its component segments.""" + m = re.match(r"^projects/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_location_path(project: str, location: str, ) -> str: + """Returns a fully-qualified location string.""" + return "projects/{project}/locations/{location}".format(project=project, location=location, ) + + @staticmethod + def parse_common_location_path(path: str) -> Dict[str,str]: + """Parse a location path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @classmethod + def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[client_options_lib.ClientOptions] = None): + """Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + if client_options is None: + client_options = client_options_lib.ClientOptions() + use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") + use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") + if use_client_cert not in ("true", "false"): + raise ValueError("Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`") + if use_mtls_endpoint not in ("auto", "never", "always"): + raise MutualTLSChannelError("Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`") + + # Figure out the client cert source to use. + client_cert_source = None + if use_client_cert == "true": + if client_options.client_cert_source: + client_cert_source = client_options.client_cert_source + elif mtls.has_default_client_cert_source(): + client_cert_source = mtls.default_client_cert_source() + + # Figure out which api endpoint to use. + if client_options.api_endpoint is not None: + api_endpoint = client_options.api_endpoint + elif use_mtls_endpoint == "always" or (use_mtls_endpoint == "auto" and client_cert_source): + api_endpoint = cls.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = cls.DEFAULT_ENDPOINT + + return api_endpoint, client_cert_source + + def __init__(self, *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Optional[Union[str, ZonesTransport]] = None, + client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the zones client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Union[str, ZonesTransport]): The + transport to use. If set to None, a transport is chosen + automatically. + NOTE: "rest" transport functionality is currently in a + beta state (preview). We welcome your feedback via an + issue in this library's source repository. + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): Custom options for the + client. It won't take effect if a ``transport`` instance is provided. + (1) The ``api_endpoint`` property can be used to override the + default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT + environment variable can also be used to override the endpoint: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto switch to the + default mTLS endpoint if client certificate is present, this is + the default value). However, the ``api_endpoint`` property takes + precedence if provided. + (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide client certificate for mutual TLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + """ + if isinstance(client_options, dict): + client_options = client_options_lib.from_dict(client_options) + if client_options is None: + client_options = client_options_lib.ClientOptions() + client_options = cast(client_options_lib.ClientOptions, client_options) + + api_endpoint, client_cert_source_func = self.get_mtls_endpoint_and_cert_source(client_options) + + api_key_value = getattr(client_options, "api_key", None) + if api_key_value and credentials: + raise ValueError("client_options.api_key and credentials are mutually exclusive") + + # Save or instantiate the transport. + # Ordinarily, we provide the transport, but allowing a custom transport + # instance provides an extensibility point for unusual situations. + if isinstance(transport, ZonesTransport): + # transport is a ZonesTransport instance. + if credentials or client_options.credentials_file or api_key_value: + raise ValueError("When providing a transport instance, " + "provide its credentials directly.") + if client_options.scopes: + raise ValueError( + "When providing a transport instance, provide its scopes " + "directly." + ) + self._transport = transport + else: + import google.auth._default # type: ignore + + if api_key_value and hasattr(google.auth._default, "get_api_key_credentials"): + credentials = google.auth._default.get_api_key_credentials(api_key_value) + + Transport = type(self).get_transport_class(transport) + self._transport = Transport( + credentials=credentials, + credentials_file=client_options.credentials_file, + host=api_endpoint, + scopes=client_options.scopes, + client_cert_source_for_mtls=client_cert_source_func, + quota_project_id=client_options.quota_project_id, + client_info=client_info, + always_use_jwt_access=True, + api_audience=client_options.api_audience, + ) + + def get(self, + request: Optional[Union[compute.GetZoneRequest, dict]] = None, + *, + project: Optional[str] = None, + zone: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.Zone: + r"""Returns the specified Zone resource. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_get(): + # Create a client + client = compute_v1.ZonesClient() + + # Initialize request argument(s) + request = compute_v1.GetZoneRequest( + project="project_value", + zone="zone_value", + ) + + # Make the request + response = client.get(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.GetZoneRequest, dict]): + The request object. A request message for Zones.Get. See + the method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + zone (str): + Name of the zone resource to return. + This corresponds to the ``zone`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.compute_v1.types.Zone: + Represents a Zone resource. A zone is + a deployment area. These deployment + areas are subsets of a region. For + example the zone us-east1-a is located + in the us-east1 region. For more + information, read Regions and Zones. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, zone]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.GetZoneRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.GetZoneRequest): + request = compute.GetZoneRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if zone is not None: + request.zone = zone + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + ("zone", request.zone), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def list(self, + request: Optional[Union[compute.ListZonesRequest, dict]] = None, + *, + project: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListPager: + r"""Retrieves the list of Zone resources available to the + specified project. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_list(): + # Create a client + client = compute_v1.ZonesClient() + + # Initialize request argument(s) + request = compute_v1.ListZonesRequest( + project="project_value", + ) + + # Make the request + page_result = client.list(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.ListZonesRequest, dict]): + The request object. A request message for Zones.List. See + the method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.compute_v1.services.zones.pagers.ListPager: + Contains a list of zone resources. + + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a compute.ListZonesRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, compute.ListZonesRequest): + request = compute.ListZonesRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + def __enter__(self) -> "ZonesClient": + return self + + def __exit__(self, type, value, traceback): + """Releases underlying transport's resources. + + .. warning:: + ONLY use as a context manager if the transport is NOT shared + with other clients! Exiting the with block will CLOSE the transport + and may cause errors in other clients! + """ + self.transport.close() + + + + + + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) + + +__all__ = ( + "ZonesClient", +) diff --git a/owl-bot-staging/v1/google/cloud/compute_v1/services/zones/pagers.py b/owl-bot-staging/v1/google/cloud/compute_v1/services/zones/pagers.py new file mode 100644 index 000000000..374ba77c2 --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/compute_v1/services/zones/pagers.py @@ -0,0 +1,77 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import Any, AsyncIterator, Awaitable, Callable, Sequence, Tuple, Optional, Iterator + +from google.cloud.compute_v1.types import compute + + +class ListPager: + """A pager for iterating through ``list`` requests. + + This class thinly wraps an initial + :class:`google.cloud.compute_v1.types.ZoneList` object, and + provides an ``__iter__`` method to iterate through its + ``items`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``List`` requests and continue to iterate + through the ``items`` field on the + corresponding responses. + + All the usual :class:`google.cloud.compute_v1.types.ZoneList` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., compute.ZoneList], + request: compute.ListZonesRequest, + response: compute.ZoneList, + *, + metadata: Sequence[Tuple[str, str]] = ()): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.compute_v1.types.ListZonesRequest): + The initial request object. + response (google.cloud.compute_v1.types.ZoneList): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = compute.ListZonesRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[compute.ZoneList]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[compute.Zone]: + for page in self.pages: + yield from page.items + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) diff --git a/owl-bot-staging/v1/google/cloud/compute_v1/services/zones/transports/__init__.py b/owl-bot-staging/v1/google/cloud/compute_v1/services/zones/transports/__init__.py new file mode 100644 index 000000000..79e01c760 --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/compute_v1/services/zones/transports/__init__.py @@ -0,0 +1,32 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +from typing import Dict, Type + +from .base import ZonesTransport +from .rest import ZonesRestTransport +from .rest import ZonesRestInterceptor + + +# Compile a registry of transports. +_transport_registry = OrderedDict() # type: Dict[str, Type[ZonesTransport]] +_transport_registry['rest'] = ZonesRestTransport + +__all__ = ( + 'ZonesTransport', + 'ZonesRestTransport', + 'ZonesRestInterceptor', +) diff --git a/owl-bot-staging/v1/google/cloud/compute_v1/services/zones/transports/base.py b/owl-bot-staging/v1/google/cloud/compute_v1/services/zones/transports/base.py new file mode 100644 index 000000000..e4b6bd97a --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/compute_v1/services/zones/transports/base.py @@ -0,0 +1,164 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import abc +from typing import Awaitable, Callable, Dict, Optional, Sequence, Union + +from google.cloud.compute_v1 import gapic_version as package_version + +import google.auth # type: ignore +import google.api_core +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.compute_v1.types import compute + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) + + +class ZonesTransport(abc.ABC): + """Abstract transport class for Zones.""" + + AUTH_SCOPES = ( + 'https://www.googleapis.com/auth/compute.readonly', + 'https://www.googleapis.com/auth/compute', + 'https://www.googleapis.com/auth/cloud-platform', + ) + + DEFAULT_HOST: str = 'compute.googleapis.com' + def __init__( + self, *, + host: str = DEFAULT_HOST, + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + **kwargs, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A list of scopes. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + """ + + scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} + + # Save the scopes. + self._scopes = scopes + + # If no credentials are provided, then determine the appropriate + # defaults. + if credentials and credentials_file: + raise core_exceptions.DuplicateCredentialArgs("'credentials_file' and 'credentials' are mutually exclusive") + + if credentials_file is not None: + credentials, _ = google.auth.load_credentials_from_file( + credentials_file, + **scopes_kwargs, + quota_project_id=quota_project_id + ) + elif credentials is None: + credentials, _ = google.auth.default(**scopes_kwargs, quota_project_id=quota_project_id) + # Don't apply audience if the credentials file passed from user. + if hasattr(credentials, "with_gdch_audience"): + credentials = credentials.with_gdch_audience(api_audience if api_audience else host) + + # If the credentials are service account credentials, then always try to use self signed JWT. + if always_use_jwt_access and isinstance(credentials, service_account.Credentials) and hasattr(service_account.Credentials, "with_always_use_jwt_access"): + credentials = credentials.with_always_use_jwt_access(True) + + # Save the credentials. + self._credentials = credentials + + # Save the hostname. Default to port 443 (HTTPS) if none is specified. + if ':' not in host: + host += ':443' + self._host = host + + def _prep_wrapped_messages(self, client_info): + # Precompute the wrapped methods. + self._wrapped_methods = { + self.get: gapic_v1.method.wrap_method( + self.get, + default_timeout=None, + client_info=client_info, + ), + self.list: gapic_v1.method.wrap_method( + self.list, + default_timeout=None, + client_info=client_info, + ), + } + + def close(self): + """Closes resources associated with the transport. + + .. warning:: + Only call this method if the transport is NOT shared + with other clients - this may cause errors in other clients! + """ + raise NotImplementedError() + + @property + def get(self) -> Callable[ + [compute.GetZoneRequest], + Union[ + compute.Zone, + Awaitable[compute.Zone] + ]]: + raise NotImplementedError() + + @property + def list(self) -> Callable[ + [compute.ListZonesRequest], + Union[ + compute.ZoneList, + Awaitable[compute.ZoneList] + ]]: + raise NotImplementedError() + + @property + def kind(self) -> str: + raise NotImplementedError() + + +__all__ = ( + 'ZonesTransport', +) diff --git a/owl-bot-staging/v1/google/cloud/compute_v1/services/zones/transports/rest.py b/owl-bot-staging/v1/google/cloud/compute_v1/services/zones/transports/rest.py new file mode 100644 index 000000000..c568f5212 --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/compute_v1/services/zones/transports/rest.py @@ -0,0 +1,409 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +from google.auth.transport.requests import AuthorizedSession # type: ignore +import json # type: ignore +import grpc # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.api_core import exceptions as core_exceptions +from google.api_core import retry as retries +from google.api_core import rest_helpers +from google.api_core import rest_streaming +from google.api_core import path_template +from google.api_core import gapic_v1 + +from google.protobuf import json_format +from requests import __version__ as requests_version +import dataclasses +import re +from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union +import warnings + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + + +from google.cloud.compute_v1.types import compute + +from .base import ZonesTransport, DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, + grpc_version=None, + rest_version=requests_version, +) + + +class ZonesRestInterceptor: + """Interceptor for Zones. + + Interceptors are used to manipulate requests, request metadata, and responses + in arbitrary ways. + Example use cases include: + * Logging + * Verifying requests according to service or custom semantics + * Stripping extraneous information from responses + + These use cases and more can be enabled by injecting an + instance of a custom subclass when constructing the ZonesRestTransport. + + .. code-block:: python + class MyCustomZonesInterceptor(ZonesRestInterceptor): + def pre_get(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list(self, response): + logging.log(f"Received response: {response}") + return response + + transport = ZonesRestTransport(interceptor=MyCustomZonesInterceptor()) + client = ZonesClient(transport=transport) + + + """ + def pre_get(self, request: compute.GetZoneRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.GetZoneRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get + + Override in a subclass to manipulate the request or metadata + before they are sent to the Zones server. + """ + return request, metadata + + def post_get(self, response: compute.Zone) -> compute.Zone: + """Post-rpc interceptor for get + + Override in a subclass to manipulate the response + after it is returned by the Zones server but before + it is returned to user code. + """ + return response + def pre_list(self, request: compute.ListZonesRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[compute.ListZonesRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list + + Override in a subclass to manipulate the request or metadata + before they are sent to the Zones server. + """ + return request, metadata + + def post_list(self, response: compute.ZoneList) -> compute.ZoneList: + """Post-rpc interceptor for list + + Override in a subclass to manipulate the response + after it is returned by the Zones server but before + it is returned to user code. + """ + return response + + +@dataclasses.dataclass +class ZonesRestStub: + _session: AuthorizedSession + _host: str + _interceptor: ZonesRestInterceptor + + +class ZonesRestTransport(ZonesTransport): + """REST backend transport for Zones. + + The Zones API. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends JSON representations of protocol buffers over HTTP/1.1 + + NOTE: This REST transport functionality is currently in a beta + state (preview). We welcome your feedback via an issue in this + library's source repository. Thank you! + """ + + def __init__(self, *, + host: str = 'compute.googleapis.com', + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + client_cert_source_for_mtls: Optional[Callable[[ + ], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + url_scheme: str = 'https', + interceptor: Optional[ZonesRestInterceptor] = None, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + NOTE: This REST transport functionality is currently in a beta + state (preview). We welcome your feedback via a GitHub issue in + this library's repository. Thank you! + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + client_cert_source_for_mtls (Callable[[], Tuple[bytes, bytes]]): Client + certificate to configure mutual TLS HTTP channel. It is ignored + if ``channel`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you are developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + url_scheme: the protocol scheme for the API endpoint. Normally + "https", but for testing or local servers, + "http" can be specified. + """ + # Run the base constructor + # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. + # TODO: When custom host (api_endpoint) is set, `scopes` must *also* be set on the + # credentials object + maybe_url_match = re.match("^(?Phttp(?:s)?://)?(?P.*)$", host) + if maybe_url_match is None: + raise ValueError(f"Unexpected hostname structure: {host}") # pragma: NO COVER + + url_match_items = maybe_url_match.groupdict() + + host = f"{url_scheme}://{host}" if not url_match_items["scheme"] else host + + super().__init__( + host=host, + credentials=credentials, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience + ) + self._session = AuthorizedSession( + self._credentials, default_host=self.DEFAULT_HOST) + if client_cert_source_for_mtls: + self._session.configure_mtls_channel(client_cert_source_for_mtls) + self._interceptor = interceptor or ZonesRestInterceptor() + self._prep_wrapped_messages(client_info) + + class _Get(ZonesRestStub): + def __hash__(self): + return hash("Get") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.GetZoneRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.Zone: + r"""Call the get method over HTTP. + + Args: + request (~.compute.GetZoneRequest): + The request object. A request message for Zones.Get. See + the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.Zone: + Represents a Zone resource. A zone is + a deployment area. These deployment + areas are subsets of a region. For + example the zone us-east1-a is located + in the us-east1 region. For more + information, read Regions and Zones. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/compute/v1/projects/{project}/zones/{zone}', + }, + ] + request, metadata = self._interceptor.pre_get(request, metadata) + pb_request = compute.GetZoneRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.Zone() + pb_resp = compute.Zone.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get(resp) + return resp + + class _List(ZonesRestStub): + def __hash__(self): + return hash("List") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: compute.ListZonesRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> compute.ZoneList: + r"""Call the list method over HTTP. + + Args: + request (~.compute.ListZonesRequest): + The request object. A request message for Zones.List. See + the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.ZoneList: + Contains a list of zone resources. + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/compute/v1/projects/{project}/zones', + }, + ] + request, metadata = self._interceptor.pre_list(request, metadata) + pb_request = compute.ListZonesRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.ZoneList() + pb_resp = compute.ZoneList.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_list(resp) + return resp + + @property + def get(self) -> Callable[ + [compute.GetZoneRequest], + compute.Zone]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._Get(self._session, self._host, self._interceptor) # type: ignore + + @property + def list(self) -> Callable[ + [compute.ListZonesRequest], + compute.ZoneList]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._List(self._session, self._host, self._interceptor) # type: ignore + + @property + def kind(self) -> str: + return "rest" + + def close(self): + self._session.close() + + +__all__=( + 'ZonesRestTransport', +) diff --git a/owl-bot-staging/v1/google/cloud/compute_v1/types/__init__.py b/owl-bot-staging/v1/google/cloud/compute_v1/types/__init__.py new file mode 100644 index 000000000..5630a2bd5 --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/compute_v1/types/__init__.py @@ -0,0 +1,2748 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from .compute import ( + AbandonInstancesInstanceGroupManagerRequest, + AbandonInstancesRegionInstanceGroupManagerRequest, + AcceleratorConfig, + Accelerators, + AcceleratorType, + AcceleratorTypeAggregatedList, + AcceleratorTypeList, + AcceleratorTypesScopedList, + AccessConfig, + AddAccessConfigInstanceRequest, + AddAssociationFirewallPolicyRequest, + AddAssociationNetworkFirewallPolicyRequest, + AddAssociationRegionNetworkFirewallPolicyRequest, + AddHealthCheckTargetPoolRequest, + AddInstancesInstanceGroupRequest, + AddInstanceTargetPoolRequest, + AddNodesNodeGroupRequest, + AddPeeringNetworkRequest, + AddResourcePoliciesDiskRequest, + AddResourcePoliciesInstanceRequest, + AddResourcePoliciesRegionDiskRequest, + Address, + AddressAggregatedList, + AddressesScopedList, + AddressList, + AddRuleFirewallPolicyRequest, + AddRuleNetworkFirewallPolicyRequest, + AddRuleRegionNetworkFirewallPolicyRequest, + AddRuleSecurityPolicyRequest, + AddSignedUrlKeyBackendBucketRequest, + AddSignedUrlKeyBackendServiceRequest, + AdvancedMachineFeatures, + AggregatedListAcceleratorTypesRequest, + AggregatedListAddressesRequest, + AggregatedListAutoscalersRequest, + AggregatedListBackendServicesRequest, + AggregatedListDisksRequest, + AggregatedListDiskTypesRequest, + AggregatedListForwardingRulesRequest, + AggregatedListGlobalOperationsRequest, + AggregatedListHealthChecksRequest, + AggregatedListInstanceGroupManagersRequest, + AggregatedListInstanceGroupsRequest, + AggregatedListInstancesRequest, + AggregatedListInstanceTemplatesRequest, + AggregatedListInterconnectAttachmentsRequest, + AggregatedListMachineTypesRequest, + AggregatedListNetworkAttachmentsRequest, + AggregatedListNetworkEdgeSecurityServicesRequest, + AggregatedListNetworkEndpointGroupsRequest, + AggregatedListNodeGroupsRequest, + AggregatedListNodeTemplatesRequest, + AggregatedListNodeTypesRequest, + AggregatedListPacketMirroringsRequest, + AggregatedListPublicDelegatedPrefixesRequest, + AggregatedListRegionCommitmentsRequest, + AggregatedListReservationsRequest, + AggregatedListResourcePoliciesRequest, + AggregatedListRoutersRequest, + AggregatedListSecurityPoliciesRequest, + AggregatedListServiceAttachmentsRequest, + AggregatedListSslCertificatesRequest, + AggregatedListSslPoliciesRequest, + AggregatedListSubnetworksRequest, + AggregatedListTargetHttpProxiesRequest, + AggregatedListTargetHttpsProxiesRequest, + AggregatedListTargetInstancesRequest, + AggregatedListTargetPoolsRequest, + AggregatedListTargetTcpProxiesRequest, + AggregatedListTargetVpnGatewaysRequest, + AggregatedListUrlMapsRequest, + AggregatedListVpnGatewaysRequest, + AggregatedListVpnTunnelsRequest, + AliasIpRange, + AllocationResourceStatus, + AllocationResourceStatusSpecificSKUAllocation, + AllocationSpecificSKUAllocationAllocatedInstancePropertiesReservedDisk, + AllocationSpecificSKUAllocationReservedInstanceProperties, + AllocationSpecificSKUReservation, + Allowed, + ApplyUpdatesToInstancesInstanceGroupManagerRequest, + ApplyUpdatesToInstancesRegionInstanceGroupManagerRequest, + AttachDiskInstanceRequest, + AttachedDisk, + AttachedDiskInitializeParams, + AttachNetworkEndpointsGlobalNetworkEndpointGroupRequest, + AttachNetworkEndpointsNetworkEndpointGroupRequest, + AuditConfig, + AuditLogConfig, + AuthorizationLoggingOptions, + Autoscaler, + AutoscalerAggregatedList, + AutoscalerList, + AutoscalersScopedList, + AutoscalerStatusDetails, + AutoscalingPolicy, + AutoscalingPolicyCpuUtilization, + AutoscalingPolicyCustomMetricUtilization, + AutoscalingPolicyLoadBalancingUtilization, + AutoscalingPolicyScaleInControl, + AutoscalingPolicyScalingSchedule, + Backend, + BackendBucket, + BackendBucketCdnPolicy, + BackendBucketCdnPolicyBypassCacheOnRequestHeader, + BackendBucketCdnPolicyCacheKeyPolicy, + BackendBucketCdnPolicyNegativeCachingPolicy, + BackendBucketList, + BackendService, + BackendServiceAggregatedList, + BackendServiceCdnPolicy, + BackendServiceCdnPolicyBypassCacheOnRequestHeader, + BackendServiceCdnPolicyNegativeCachingPolicy, + BackendServiceConnectionTrackingPolicy, + BackendServiceFailoverPolicy, + BackendServiceGroupHealth, + BackendServiceIAP, + BackendServiceList, + BackendServiceLocalityLoadBalancingPolicyConfig, + BackendServiceLocalityLoadBalancingPolicyConfigCustomPolicy, + BackendServiceLocalityLoadBalancingPolicyConfigPolicy, + BackendServiceLogConfig, + BackendServiceReference, + BackendServicesScopedList, + BfdPacket, + BfdStatus, + BfdStatusPacketCounts, + Binding, + BulkInsertDiskRequest, + BulkInsertDiskResource, + BulkInsertInstanceRequest, + BulkInsertInstanceResource, + BulkInsertInstanceResourcePerInstanceProperties, + BulkInsertRegionDiskRequest, + BulkInsertRegionInstanceRequest, + CacheInvalidationRule, + CacheKeyPolicy, + CircuitBreakers, + CloneRulesFirewallPolicyRequest, + CloneRulesNetworkFirewallPolicyRequest, + CloneRulesRegionNetworkFirewallPolicyRequest, + Commitment, + CommitmentAggregatedList, + CommitmentList, + CommitmentsScopedList, + Condition, + ConfidentialInstanceConfig, + ConnectionDraining, + ConsistentHashLoadBalancerSettings, + ConsistentHashLoadBalancerSettingsHttpCookie, + CorsPolicy, + CreateInstancesInstanceGroupManagerRequest, + CreateInstancesRegionInstanceGroupManagerRequest, + CreateSnapshotDiskRequest, + CreateSnapshotRegionDiskRequest, + CustomerEncryptionKey, + CustomerEncryptionKeyProtectedDisk, + Data, + DeleteAccessConfigInstanceRequest, + DeleteAddressRequest, + DeleteAutoscalerRequest, + DeleteBackendBucketRequest, + DeleteBackendServiceRequest, + DeleteDiskRequest, + DeleteExternalVpnGatewayRequest, + DeleteFirewallPolicyRequest, + DeleteFirewallRequest, + DeleteForwardingRuleRequest, + DeleteGlobalAddressRequest, + DeleteGlobalForwardingRuleRequest, + DeleteGlobalNetworkEndpointGroupRequest, + DeleteGlobalOperationRequest, + DeleteGlobalOperationResponse, + DeleteGlobalOrganizationOperationRequest, + DeleteGlobalOrganizationOperationResponse, + DeleteGlobalPublicDelegatedPrefixeRequest, + DeleteHealthCheckRequest, + DeleteImageRequest, + DeleteInstanceGroupManagerRequest, + DeleteInstanceGroupRequest, + DeleteInstanceRequest, + DeleteInstancesInstanceGroupManagerRequest, + DeleteInstancesRegionInstanceGroupManagerRequest, + DeleteInstanceTemplateRequest, + DeleteInterconnectAttachmentRequest, + DeleteInterconnectRequest, + DeleteLicenseRequest, + DeleteMachineImageRequest, + DeleteNetworkAttachmentRequest, + DeleteNetworkEdgeSecurityServiceRequest, + DeleteNetworkEndpointGroupRequest, + DeleteNetworkFirewallPolicyRequest, + DeleteNetworkRequest, + DeleteNodeGroupRequest, + DeleteNodesNodeGroupRequest, + DeleteNodeTemplateRequest, + DeletePacketMirroringRequest, + DeletePerInstanceConfigsInstanceGroupManagerRequest, + DeletePerInstanceConfigsRegionInstanceGroupManagerRequest, + DeletePublicAdvertisedPrefixeRequest, + DeletePublicDelegatedPrefixeRequest, + DeleteRegionAutoscalerRequest, + DeleteRegionBackendServiceRequest, + DeleteRegionDiskRequest, + DeleteRegionHealthCheckRequest, + DeleteRegionHealthCheckServiceRequest, + DeleteRegionInstanceGroupManagerRequest, + DeleteRegionInstanceTemplateRequest, + DeleteRegionNetworkEndpointGroupRequest, + DeleteRegionNetworkFirewallPolicyRequest, + DeleteRegionNotificationEndpointRequest, + DeleteRegionOperationRequest, + DeleteRegionOperationResponse, + DeleteRegionSecurityPolicyRequest, + DeleteRegionSslCertificateRequest, + DeleteRegionSslPolicyRequest, + DeleteRegionTargetHttpProxyRequest, + DeleteRegionTargetHttpsProxyRequest, + DeleteRegionTargetTcpProxyRequest, + DeleteRegionUrlMapRequest, + DeleteReservationRequest, + DeleteResourcePolicyRequest, + DeleteRouteRequest, + DeleteRouterRequest, + DeleteSecurityPolicyRequest, + DeleteServiceAttachmentRequest, + DeleteSignedUrlKeyBackendBucketRequest, + DeleteSignedUrlKeyBackendServiceRequest, + DeleteSnapshotRequest, + DeleteSslCertificateRequest, + DeleteSslPolicyRequest, + DeleteSubnetworkRequest, + DeleteTargetGrpcProxyRequest, + DeleteTargetHttpProxyRequest, + DeleteTargetHttpsProxyRequest, + DeleteTargetInstanceRequest, + DeleteTargetPoolRequest, + DeleteTargetSslProxyRequest, + DeleteTargetTcpProxyRequest, + DeleteTargetVpnGatewayRequest, + DeleteUrlMapRequest, + DeleteVpnGatewayRequest, + DeleteVpnTunnelRequest, + DeleteZoneOperationRequest, + DeleteZoneOperationResponse, + Denied, + DeprecateImageRequest, + DeprecationStatus, + DetachDiskInstanceRequest, + DetachNetworkEndpointsGlobalNetworkEndpointGroupRequest, + DetachNetworkEndpointsNetworkEndpointGroupRequest, + DisableXpnHostProjectRequest, + DisableXpnResourceProjectRequest, + Disk, + DiskAggregatedList, + DiskAsyncReplication, + DiskAsyncReplicationList, + DiskInstantiationConfig, + DiskList, + DiskMoveRequest, + DiskParams, + DiskResourceStatus, + DiskResourceStatusAsyncReplicationStatus, + DisksAddResourcePoliciesRequest, + DisksRemoveResourcePoliciesRequest, + DisksResizeRequest, + DisksScopedList, + DisksStartAsyncReplicationRequest, + DisksStopGroupAsyncReplicationResource, + DiskType, + DiskTypeAggregatedList, + DiskTypeList, + DiskTypesScopedList, + DisplayDevice, + DistributionPolicy, + DistributionPolicyZoneConfiguration, + Duration, + EnableXpnHostProjectRequest, + EnableXpnResourceProjectRequest, + Error, + ErrorDetails, + ErrorInfo, + Errors, + ExchangedPeeringRoute, + ExchangedPeeringRoutesList, + ExpandIpCidrRangeSubnetworkRequest, + Expr, + ExternalVpnGateway, + ExternalVpnGatewayInterface, + ExternalVpnGatewayList, + FileContentBuffer, + Firewall, + FirewallList, + FirewallLogConfig, + FirewallPoliciesListAssociationsResponse, + FirewallPolicy, + FirewallPolicyAssociation, + FirewallPolicyList, + FirewallPolicyRule, + FirewallPolicyRuleMatcher, + FirewallPolicyRuleMatcherLayer4Config, + FirewallPolicyRuleSecureTag, + FixedOrPercent, + ForwardingRule, + ForwardingRuleAggregatedList, + ForwardingRuleList, + ForwardingRuleReference, + ForwardingRuleServiceDirectoryRegistration, + ForwardingRulesScopedList, + GetAcceleratorTypeRequest, + GetAddressRequest, + GetAssociationFirewallPolicyRequest, + GetAssociationNetworkFirewallPolicyRequest, + GetAssociationRegionNetworkFirewallPolicyRequest, + GetAutoscalerRequest, + GetBackendBucketRequest, + GetBackendServiceRequest, + GetDiagnosticsInterconnectRequest, + GetDiskRequest, + GetDiskTypeRequest, + GetEffectiveFirewallsInstanceRequest, + GetEffectiveFirewallsNetworkRequest, + GetEffectiveFirewallsRegionNetworkFirewallPolicyRequest, + GetExternalVpnGatewayRequest, + GetFirewallPolicyRequest, + GetFirewallRequest, + GetForwardingRuleRequest, + GetFromFamilyImageRequest, + GetGlobalAddressRequest, + GetGlobalForwardingRuleRequest, + GetGlobalNetworkEndpointGroupRequest, + GetGlobalOperationRequest, + GetGlobalOrganizationOperationRequest, + GetGlobalPublicDelegatedPrefixeRequest, + GetGuestAttributesInstanceRequest, + GetHealthBackendServiceRequest, + GetHealthCheckRequest, + GetHealthRegionBackendServiceRequest, + GetHealthTargetPoolRequest, + GetIamPolicyBackendServiceRequest, + GetIamPolicyDiskRequest, + GetIamPolicyFirewallPolicyRequest, + GetIamPolicyImageRequest, + GetIamPolicyInstanceRequest, + GetIamPolicyInstanceTemplateRequest, + GetIamPolicyLicenseRequest, + GetIamPolicyMachineImageRequest, + GetIamPolicyNetworkAttachmentRequest, + GetIamPolicyNetworkFirewallPolicyRequest, + GetIamPolicyNodeGroupRequest, + GetIamPolicyNodeTemplateRequest, + GetIamPolicyRegionBackendServiceRequest, + GetIamPolicyRegionDiskRequest, + GetIamPolicyRegionNetworkFirewallPolicyRequest, + GetIamPolicyReservationRequest, + GetIamPolicyResourcePolicyRequest, + GetIamPolicyServiceAttachmentRequest, + GetIamPolicySnapshotRequest, + GetIamPolicySubnetworkRequest, + GetImageFamilyViewRequest, + GetImageRequest, + GetInstanceGroupManagerRequest, + GetInstanceGroupRequest, + GetInstanceRequest, + GetInstanceTemplateRequest, + GetInterconnectAttachmentRequest, + GetInterconnectLocationRequest, + GetInterconnectRemoteLocationRequest, + GetInterconnectRequest, + GetLicenseCodeRequest, + GetLicenseRequest, + GetMachineImageRequest, + GetMachineTypeRequest, + GetNatMappingInfoRoutersRequest, + GetNetworkAttachmentRequest, + GetNetworkEdgeSecurityServiceRequest, + GetNetworkEndpointGroupRequest, + GetNetworkFirewallPolicyRequest, + GetNetworkRequest, + GetNodeGroupRequest, + GetNodeTemplateRequest, + GetNodeTypeRequest, + GetPacketMirroringRequest, + GetProjectRequest, + GetPublicAdvertisedPrefixeRequest, + GetPublicDelegatedPrefixeRequest, + GetRegionAutoscalerRequest, + GetRegionBackendServiceRequest, + GetRegionCommitmentRequest, + GetRegionDiskRequest, + GetRegionDiskTypeRequest, + GetRegionHealthCheckRequest, + GetRegionHealthCheckServiceRequest, + GetRegionInstanceGroupManagerRequest, + GetRegionInstanceGroupRequest, + GetRegionInstanceTemplateRequest, + GetRegionNetworkEndpointGroupRequest, + GetRegionNetworkFirewallPolicyRequest, + GetRegionNotificationEndpointRequest, + GetRegionOperationRequest, + GetRegionRequest, + GetRegionSecurityPolicyRequest, + GetRegionSslCertificateRequest, + GetRegionSslPolicyRequest, + GetRegionTargetHttpProxyRequest, + GetRegionTargetHttpsProxyRequest, + GetRegionTargetTcpProxyRequest, + GetRegionUrlMapRequest, + GetReservationRequest, + GetResourcePolicyRequest, + GetRouteRequest, + GetRouterRequest, + GetRouterStatusRouterRequest, + GetRuleFirewallPolicyRequest, + GetRuleNetworkFirewallPolicyRequest, + GetRuleRegionNetworkFirewallPolicyRequest, + GetRuleSecurityPolicyRequest, + GetScreenshotInstanceRequest, + GetSecurityPolicyRequest, + GetSerialPortOutputInstanceRequest, + GetServiceAttachmentRequest, + GetShieldedInstanceIdentityInstanceRequest, + GetSnapshotRequest, + GetSslCertificateRequest, + GetSslPolicyRequest, + GetStatusVpnGatewayRequest, + GetSubnetworkRequest, + GetTargetGrpcProxyRequest, + GetTargetHttpProxyRequest, + GetTargetHttpsProxyRequest, + GetTargetInstanceRequest, + GetTargetPoolRequest, + GetTargetSslProxyRequest, + GetTargetTcpProxyRequest, + GetTargetVpnGatewayRequest, + GetUrlMapRequest, + GetVpnGatewayRequest, + GetVpnTunnelRequest, + GetXpnHostProjectRequest, + GetXpnResourcesProjectsRequest, + GetZoneOperationRequest, + GetZoneRequest, + GlobalAddressesMoveRequest, + GlobalNetworkEndpointGroupsAttachEndpointsRequest, + GlobalNetworkEndpointGroupsDetachEndpointsRequest, + GlobalOrganizationSetPolicyRequest, + GlobalSetLabelsRequest, + GlobalSetPolicyRequest, + GRPCHealthCheck, + GuestAttributes, + GuestAttributesEntry, + GuestAttributesValue, + GuestOsFeature, + HealthCheck, + HealthCheckList, + HealthCheckLogConfig, + HealthCheckReference, + HealthChecksAggregatedList, + HealthCheckService, + HealthCheckServiceReference, + HealthCheckServicesList, + HealthChecksScopedList, + HealthStatus, + HealthStatusForNetworkEndpoint, + Help, + HelpLink, + HostRule, + HTTP2HealthCheck, + HttpFaultAbort, + HttpFaultDelay, + HttpFaultInjection, + HttpHeaderAction, + HttpHeaderMatch, + HttpHeaderOption, + HTTPHealthCheck, + HttpQueryParameterMatch, + HttpRedirectAction, + HttpRetryPolicy, + HttpRouteAction, + HttpRouteRule, + HttpRouteRuleMatch, + HTTPSHealthCheck, + Image, + ImageFamilyView, + ImageList, + InitialStateConfig, + InsertAddressRequest, + InsertAutoscalerRequest, + InsertBackendBucketRequest, + InsertBackendServiceRequest, + InsertDiskRequest, + InsertExternalVpnGatewayRequest, + InsertFirewallPolicyRequest, + InsertFirewallRequest, + InsertForwardingRuleRequest, + InsertGlobalAddressRequest, + InsertGlobalForwardingRuleRequest, + InsertGlobalNetworkEndpointGroupRequest, + InsertGlobalPublicDelegatedPrefixeRequest, + InsertHealthCheckRequest, + InsertImageRequest, + InsertInstanceGroupManagerRequest, + InsertInstanceGroupRequest, + InsertInstanceRequest, + InsertInstanceTemplateRequest, + InsertInterconnectAttachmentRequest, + InsertInterconnectRequest, + InsertLicenseRequest, + InsertMachineImageRequest, + InsertNetworkAttachmentRequest, + InsertNetworkEdgeSecurityServiceRequest, + InsertNetworkEndpointGroupRequest, + InsertNetworkFirewallPolicyRequest, + InsertNetworkRequest, + InsertNodeGroupRequest, + InsertNodeTemplateRequest, + InsertPacketMirroringRequest, + InsertPublicAdvertisedPrefixeRequest, + InsertPublicDelegatedPrefixeRequest, + InsertRegionAutoscalerRequest, + InsertRegionBackendServiceRequest, + InsertRegionCommitmentRequest, + InsertRegionDiskRequest, + InsertRegionHealthCheckRequest, + InsertRegionHealthCheckServiceRequest, + InsertRegionInstanceGroupManagerRequest, + InsertRegionInstanceTemplateRequest, + InsertRegionNetworkEndpointGroupRequest, + InsertRegionNetworkFirewallPolicyRequest, + InsertRegionNotificationEndpointRequest, + InsertRegionSecurityPolicyRequest, + InsertRegionSslCertificateRequest, + InsertRegionSslPolicyRequest, + InsertRegionTargetHttpProxyRequest, + InsertRegionTargetHttpsProxyRequest, + InsertRegionTargetTcpProxyRequest, + InsertRegionUrlMapRequest, + InsertReservationRequest, + InsertResourcePolicyRequest, + InsertRouteRequest, + InsertRouterRequest, + InsertSecurityPolicyRequest, + InsertServiceAttachmentRequest, + InsertSnapshotRequest, + InsertSslCertificateRequest, + InsertSslPolicyRequest, + InsertSubnetworkRequest, + InsertTargetGrpcProxyRequest, + InsertTargetHttpProxyRequest, + InsertTargetHttpsProxyRequest, + InsertTargetInstanceRequest, + InsertTargetPoolRequest, + InsertTargetSslProxyRequest, + InsertTargetTcpProxyRequest, + InsertTargetVpnGatewayRequest, + InsertUrlMapRequest, + InsertVpnGatewayRequest, + InsertVpnTunnelRequest, + Instance, + InstanceAggregatedList, + InstanceConsumptionData, + InstanceConsumptionInfo, + InstanceGroup, + InstanceGroupAggregatedList, + InstanceGroupList, + InstanceGroupManager, + InstanceGroupManagerActionsSummary, + InstanceGroupManagerAggregatedList, + InstanceGroupManagerAutoHealingPolicy, + InstanceGroupManagerInstanceLifecyclePolicy, + InstanceGroupManagerList, + InstanceGroupManagersAbandonInstancesRequest, + InstanceGroupManagersApplyUpdatesRequest, + InstanceGroupManagersCreateInstancesRequest, + InstanceGroupManagersDeleteInstancesRequest, + InstanceGroupManagersDeletePerInstanceConfigsReq, + InstanceGroupManagersListErrorsResponse, + InstanceGroupManagersListManagedInstancesResponse, + InstanceGroupManagersListPerInstanceConfigsResp, + InstanceGroupManagersPatchPerInstanceConfigsReq, + InstanceGroupManagersRecreateInstancesRequest, + InstanceGroupManagersScopedList, + InstanceGroupManagersSetInstanceTemplateRequest, + InstanceGroupManagersSetTargetPoolsRequest, + InstanceGroupManagerStatus, + InstanceGroupManagerStatusStateful, + InstanceGroupManagerStatusStatefulPerInstanceConfigs, + InstanceGroupManagerStatusVersionTarget, + InstanceGroupManagersUpdatePerInstanceConfigsReq, + InstanceGroupManagerUpdatePolicy, + InstanceGroupManagerVersion, + InstanceGroupsAddInstancesRequest, + InstanceGroupsListInstances, + InstanceGroupsListInstancesRequest, + InstanceGroupsRemoveInstancesRequest, + InstanceGroupsScopedList, + InstanceGroupsSetNamedPortsRequest, + InstanceList, + InstanceListReferrers, + InstanceManagedByIgmError, + InstanceManagedByIgmErrorInstanceActionDetails, + InstanceManagedByIgmErrorManagedInstanceError, + InstanceMoveRequest, + InstanceParams, + InstanceProperties, + InstanceReference, + InstancesAddResourcePoliciesRequest, + InstancesGetEffectiveFirewallsResponse, + InstancesGetEffectiveFirewallsResponseEffectiveFirewallPolicy, + InstancesRemoveResourcePoliciesRequest, + InstancesScopedList, + InstancesSetLabelsRequest, + InstancesSetMachineResourcesRequest, + InstancesSetMachineTypeRequest, + InstancesSetMinCpuPlatformRequest, + InstancesSetNameRequest, + InstancesSetServiceAccountRequest, + InstancesStartWithEncryptionKeyRequest, + InstanceTemplate, + InstanceTemplateAggregatedList, + InstanceTemplateList, + InstanceTemplatesScopedList, + InstanceWithNamedPorts, + Int64RangeMatch, + Interconnect, + InterconnectAttachment, + InterconnectAttachmentAggregatedList, + InterconnectAttachmentConfigurationConstraints, + InterconnectAttachmentConfigurationConstraintsBgpPeerASNRange, + InterconnectAttachmentList, + InterconnectAttachmentPartnerMetadata, + InterconnectAttachmentPrivateInfo, + InterconnectAttachmentsScopedList, + InterconnectCircuitInfo, + InterconnectDiagnostics, + InterconnectDiagnosticsARPEntry, + InterconnectDiagnosticsLinkLACPStatus, + InterconnectDiagnosticsLinkOpticalPower, + InterconnectDiagnosticsLinkStatus, + InterconnectList, + InterconnectLocation, + InterconnectLocationList, + InterconnectLocationRegionInfo, + InterconnectOutageNotification, + InterconnectRemoteLocation, + InterconnectRemoteLocationConstraints, + InterconnectRemoteLocationConstraintsSubnetLengthRange, + InterconnectRemoteLocationList, + InterconnectRemoteLocationPermittedConnections, + InterconnectsGetDiagnosticsResponse, + InvalidateCacheUrlMapRequest, + Items, + License, + LicenseCode, + LicenseCodeLicenseAlias, + LicenseResourceCommitment, + LicenseResourceRequirements, + LicensesListResponse, + ListAcceleratorTypesRequest, + ListAddressesRequest, + ListAssociationsFirewallPolicyRequest, + ListAutoscalersRequest, + ListAvailableFeaturesRegionSslPoliciesRequest, + ListAvailableFeaturesSslPoliciesRequest, + ListBackendBucketsRequest, + ListBackendServicesRequest, + ListDisksRequest, + ListDiskTypesRequest, + ListErrorsInstanceGroupManagersRequest, + ListErrorsRegionInstanceGroupManagersRequest, + ListExternalVpnGatewaysRequest, + ListFirewallPoliciesRequest, + ListFirewallsRequest, + ListForwardingRulesRequest, + ListGlobalAddressesRequest, + ListGlobalForwardingRulesRequest, + ListGlobalNetworkEndpointGroupsRequest, + ListGlobalOperationsRequest, + ListGlobalOrganizationOperationsRequest, + ListGlobalPublicDelegatedPrefixesRequest, + ListHealthChecksRequest, + ListImagesRequest, + ListInstanceGroupManagersRequest, + ListInstanceGroupsRequest, + ListInstancesInstanceGroupsRequest, + ListInstancesRegionInstanceGroupsRequest, + ListInstancesRequest, + ListInstanceTemplatesRequest, + ListInterconnectAttachmentsRequest, + ListInterconnectLocationsRequest, + ListInterconnectRemoteLocationsRequest, + ListInterconnectsRequest, + ListLicensesRequest, + ListMachineImagesRequest, + ListMachineTypesRequest, + ListManagedInstancesInstanceGroupManagersRequest, + ListManagedInstancesRegionInstanceGroupManagersRequest, + ListNetworkAttachmentsRequest, + ListNetworkEndpointGroupsRequest, + ListNetworkEndpointsGlobalNetworkEndpointGroupsRequest, + ListNetworkEndpointsNetworkEndpointGroupsRequest, + ListNetworkFirewallPoliciesRequest, + ListNetworksRequest, + ListNodeGroupsRequest, + ListNodesNodeGroupsRequest, + ListNodeTemplatesRequest, + ListNodeTypesRequest, + ListPacketMirroringsRequest, + ListPeeringRoutesNetworksRequest, + ListPerInstanceConfigsInstanceGroupManagersRequest, + ListPerInstanceConfigsRegionInstanceGroupManagersRequest, + ListPreconfiguredExpressionSetsSecurityPoliciesRequest, + ListPublicAdvertisedPrefixesRequest, + ListPublicDelegatedPrefixesRequest, + ListReferrersInstancesRequest, + ListRegionAutoscalersRequest, + ListRegionBackendServicesRequest, + ListRegionCommitmentsRequest, + ListRegionDisksRequest, + ListRegionDiskTypesRequest, + ListRegionHealthCheckServicesRequest, + ListRegionHealthChecksRequest, + ListRegionInstanceGroupManagersRequest, + ListRegionInstanceGroupsRequest, + ListRegionInstanceTemplatesRequest, + ListRegionNetworkEndpointGroupsRequest, + ListRegionNetworkFirewallPoliciesRequest, + ListRegionNotificationEndpointsRequest, + ListRegionOperationsRequest, + ListRegionSecurityPoliciesRequest, + ListRegionsRequest, + ListRegionSslCertificatesRequest, + ListRegionSslPoliciesRequest, + ListRegionTargetHttpProxiesRequest, + ListRegionTargetHttpsProxiesRequest, + ListRegionTargetTcpProxiesRequest, + ListRegionUrlMapsRequest, + ListReservationsRequest, + ListResourcePoliciesRequest, + ListRoutersRequest, + ListRoutesRequest, + ListSecurityPoliciesRequest, + ListServiceAttachmentsRequest, + ListSnapshotsRequest, + ListSslCertificatesRequest, + ListSslPoliciesRequest, + ListSubnetworksRequest, + ListTargetGrpcProxiesRequest, + ListTargetHttpProxiesRequest, + ListTargetHttpsProxiesRequest, + ListTargetInstancesRequest, + ListTargetPoolsRequest, + ListTargetSslProxiesRequest, + ListTargetTcpProxiesRequest, + ListTargetVpnGatewaysRequest, + ListUrlMapsRequest, + ListUsableSubnetworksRequest, + ListVpnGatewaysRequest, + ListVpnTunnelsRequest, + ListXpnHostsProjectsRequest, + ListZoneOperationsRequest, + ListZonesRequest, + LocalDisk, + LocalizedMessage, + LocationPolicy, + LocationPolicyLocation, + LocationPolicyLocationConstraints, + LogConfig, + LogConfigCloudAuditOptions, + LogConfigCounterOptions, + LogConfigCounterOptionsCustomField, + LogConfigDataAccessOptions, + MachineImage, + MachineImageList, + MachineType, + MachineTypeAggregatedList, + MachineTypeList, + MachineTypesScopedList, + ManagedInstance, + ManagedInstanceInstanceHealth, + ManagedInstanceLastAttempt, + ManagedInstanceVersion, + Metadata, + MetadataFilter, + MetadataFilterLabelMatch, + MoveAddressRequest, + MoveDiskProjectRequest, + MoveFirewallPolicyRequest, + MoveGlobalAddressRequest, + MoveInstanceProjectRequest, + NamedPort, + Network, + NetworkAttachment, + NetworkAttachmentAggregatedList, + NetworkAttachmentConnectedEndpoint, + NetworkAttachmentList, + NetworkAttachmentsScopedList, + NetworkEdgeSecurityService, + NetworkEdgeSecurityServiceAggregatedList, + NetworkEdgeSecurityServicesScopedList, + NetworkEndpoint, + NetworkEndpointGroup, + NetworkEndpointGroupAggregatedList, + NetworkEndpointGroupAppEngine, + NetworkEndpointGroupCloudFunction, + NetworkEndpointGroupCloudRun, + NetworkEndpointGroupList, + NetworkEndpointGroupPscData, + NetworkEndpointGroupsAttachEndpointsRequest, + NetworkEndpointGroupsDetachEndpointsRequest, + NetworkEndpointGroupsListEndpointsRequest, + NetworkEndpointGroupsListNetworkEndpoints, + NetworkEndpointGroupsScopedList, + NetworkEndpointWithHealthStatus, + NetworkInterface, + NetworkList, + NetworkPeering, + NetworkPerformanceConfig, + NetworkRoutingConfig, + NetworksAddPeeringRequest, + NetworksGetEffectiveFirewallsResponse, + NetworksGetEffectiveFirewallsResponseEffectiveFirewallPolicy, + NetworksRemovePeeringRequest, + NetworksUpdatePeeringRequest, + NodeGroup, + NodeGroupAggregatedList, + NodeGroupAutoscalingPolicy, + NodeGroupList, + NodeGroupMaintenanceWindow, + NodeGroupNode, + NodeGroupsAddNodesRequest, + NodeGroupsDeleteNodesRequest, + NodeGroupsListNodes, + NodeGroupsScopedList, + NodeGroupsSetNodeTemplateRequest, + NodeGroupsSimulateMaintenanceEventRequest, + NodeTemplate, + NodeTemplateAggregatedList, + NodeTemplateList, + NodeTemplateNodeTypeFlexibility, + NodeTemplatesScopedList, + NodeType, + NodeTypeAggregatedList, + NodeTypeList, + NodeTypesScopedList, + NotificationEndpoint, + NotificationEndpointGrpcSettings, + NotificationEndpointList, + Operation, + OperationAggregatedList, + OperationList, + OperationsScopedList, + OutlierDetection, + PacketIntervals, + PacketMirroring, + PacketMirroringAggregatedList, + PacketMirroringFilter, + PacketMirroringForwardingRuleInfo, + PacketMirroringList, + PacketMirroringMirroredResourceInfo, + PacketMirroringMirroredResourceInfoInstanceInfo, + PacketMirroringMirroredResourceInfoSubnetInfo, + PacketMirroringNetworkInfo, + PacketMirroringsScopedList, + PatchAutoscalerRequest, + PatchBackendBucketRequest, + PatchBackendServiceRequest, + PatchFirewallPolicyRequest, + PatchFirewallRequest, + PatchForwardingRuleRequest, + PatchGlobalForwardingRuleRequest, + PatchGlobalPublicDelegatedPrefixeRequest, + PatchHealthCheckRequest, + PatchImageRequest, + PatchInstanceGroupManagerRequest, + PatchInterconnectAttachmentRequest, + PatchInterconnectRequest, + PatchNetworkEdgeSecurityServiceRequest, + PatchNetworkFirewallPolicyRequest, + PatchNetworkRequest, + PatchNodeGroupRequest, + PatchPacketMirroringRequest, + PatchPerInstanceConfigsInstanceGroupManagerRequest, + PatchPerInstanceConfigsRegionInstanceGroupManagerRequest, + PatchPublicAdvertisedPrefixeRequest, + PatchPublicDelegatedPrefixeRequest, + PatchRegionAutoscalerRequest, + PatchRegionBackendServiceRequest, + PatchRegionHealthCheckRequest, + PatchRegionHealthCheckServiceRequest, + PatchRegionInstanceGroupManagerRequest, + PatchRegionNetworkFirewallPolicyRequest, + PatchRegionSecurityPolicyRequest, + PatchRegionSslPolicyRequest, + PatchRegionTargetHttpsProxyRequest, + PatchRegionUrlMapRequest, + PatchResourcePolicyRequest, + PatchRouterRequest, + PatchRuleFirewallPolicyRequest, + PatchRuleNetworkFirewallPolicyRequest, + PatchRuleRegionNetworkFirewallPolicyRequest, + PatchRuleSecurityPolicyRequest, + PatchSecurityPolicyRequest, + PatchServiceAttachmentRequest, + PatchSslPolicyRequest, + PatchSubnetworkRequest, + PatchTargetGrpcProxyRequest, + PatchTargetHttpProxyRequest, + PatchTargetHttpsProxyRequest, + PatchUrlMapRequest, + PathMatcher, + PathRule, + PerInstanceConfig, + Policy, + PreconfiguredWafSet, + PreservedState, + PreservedStatePreservedDisk, + PreviewRouterRequest, + Project, + ProjectsDisableXpnResourceRequest, + ProjectsEnableXpnResourceRequest, + ProjectsGetXpnResources, + ProjectsListXpnHostsRequest, + ProjectsSetDefaultNetworkTierRequest, + PublicAdvertisedPrefix, + PublicAdvertisedPrefixList, + PublicAdvertisedPrefixPublicDelegatedPrefix, + PublicDelegatedPrefix, + PublicDelegatedPrefixAggregatedList, + PublicDelegatedPrefixesScopedList, + PublicDelegatedPrefixList, + PublicDelegatedPrefixPublicDelegatedSubPrefix, + Quota, + QuotaExceededInfo, + RawDisk, + RecreateInstancesInstanceGroupManagerRequest, + RecreateInstancesRegionInstanceGroupManagerRequest, + Reference, + Region, + RegionAddressesMoveRequest, + RegionAutoscalerList, + RegionDisksAddResourcePoliciesRequest, + RegionDisksRemoveResourcePoliciesRequest, + RegionDisksResizeRequest, + RegionDisksStartAsyncReplicationRequest, + RegionDiskTypeList, + RegionInstanceGroupList, + RegionInstanceGroupManagerDeleteInstanceConfigReq, + RegionInstanceGroupManagerList, + RegionInstanceGroupManagerPatchInstanceConfigReq, + RegionInstanceGroupManagersAbandonInstancesRequest, + RegionInstanceGroupManagersApplyUpdatesRequest, + RegionInstanceGroupManagersCreateInstancesRequest, + RegionInstanceGroupManagersDeleteInstancesRequest, + RegionInstanceGroupManagersListErrorsResponse, + RegionInstanceGroupManagersListInstanceConfigsResp, + RegionInstanceGroupManagersListInstancesResponse, + RegionInstanceGroupManagersRecreateRequest, + RegionInstanceGroupManagersSetTargetPoolsRequest, + RegionInstanceGroupManagersSetTemplateRequest, + RegionInstanceGroupManagerUpdateInstanceConfigReq, + RegionInstanceGroupsListInstances, + RegionInstanceGroupsListInstancesRequest, + RegionInstanceGroupsSetNamedPortsRequest, + RegionList, + RegionNetworkFirewallPoliciesGetEffectiveFirewallsResponse, + RegionNetworkFirewallPoliciesGetEffectiveFirewallsResponseEffectiveFirewallPolicy, + RegionSetLabelsRequest, + RegionSetPolicyRequest, + RegionTargetHttpsProxiesSetSslCertificatesRequest, + RegionUrlMapsValidateRequest, + RemoveAssociationFirewallPolicyRequest, + RemoveAssociationNetworkFirewallPolicyRequest, + RemoveAssociationRegionNetworkFirewallPolicyRequest, + RemoveHealthCheckTargetPoolRequest, + RemoveInstancesInstanceGroupRequest, + RemoveInstanceTargetPoolRequest, + RemovePeeringNetworkRequest, + RemoveResourcePoliciesDiskRequest, + RemoveResourcePoliciesInstanceRequest, + RemoveResourcePoliciesRegionDiskRequest, + RemoveRuleFirewallPolicyRequest, + RemoveRuleNetworkFirewallPolicyRequest, + RemoveRuleRegionNetworkFirewallPolicyRequest, + RemoveRuleSecurityPolicyRequest, + RequestMirrorPolicy, + Reservation, + ReservationAffinity, + ReservationAggregatedList, + ReservationList, + ReservationsResizeRequest, + ReservationsScopedList, + ResetInstanceRequest, + ResizeDiskRequest, + ResizeInstanceGroupManagerRequest, + ResizeRegionDiskRequest, + ResizeRegionInstanceGroupManagerRequest, + ResizeReservationRequest, + ResourceCommitment, + ResourceGroupReference, + ResourcePoliciesScopedList, + ResourcePolicy, + ResourcePolicyAggregatedList, + ResourcePolicyDailyCycle, + ResourcePolicyDiskConsistencyGroupPolicy, + ResourcePolicyGroupPlacementPolicy, + ResourcePolicyHourlyCycle, + ResourcePolicyInstanceSchedulePolicy, + ResourcePolicyInstanceSchedulePolicySchedule, + ResourcePolicyList, + ResourcePolicyResourceStatus, + ResourcePolicyResourceStatusInstanceSchedulePolicyStatus, + ResourcePolicySnapshotSchedulePolicy, + ResourcePolicySnapshotSchedulePolicyRetentionPolicy, + ResourcePolicySnapshotSchedulePolicySchedule, + ResourcePolicySnapshotSchedulePolicySnapshotProperties, + ResourcePolicyWeeklyCycle, + ResourcePolicyWeeklyCycleDayOfWeek, + ResourceStatus, + ResumeInstanceRequest, + Route, + RouteAsPath, + RouteList, + Router, + RouterAdvertisedIpRange, + RouterAggregatedList, + RouterBgp, + RouterBgpPeer, + RouterBgpPeerBfd, + RouterBgpPeerCustomLearnedIpRange, + RouterInterface, + RouterList, + RouterMd5AuthenticationKey, + RouterNat, + RouterNatLogConfig, + RouterNatRule, + RouterNatRuleAction, + RouterNatSubnetworkToNat, + RoutersPreviewResponse, + RoutersScopedList, + RouterStatus, + RouterStatusBgpPeerStatus, + RouterStatusNatStatus, + RouterStatusNatStatusNatRuleStatus, + RouterStatusResponse, + Rule, + SavedAttachedDisk, + SavedDisk, + ScalingScheduleStatus, + Scheduling, + SchedulingNodeAffinity, + ScratchDisks, + Screenshot, + SecurityPoliciesAggregatedList, + SecurityPoliciesListPreconfiguredExpressionSetsResponse, + SecurityPoliciesScopedList, + SecurityPoliciesWafConfig, + SecurityPolicy, + SecurityPolicyAdaptiveProtectionConfig, + SecurityPolicyAdaptiveProtectionConfigLayer7DdosDefenseConfig, + SecurityPolicyAdvancedOptionsConfig, + SecurityPolicyAdvancedOptionsConfigJsonCustomConfig, + SecurityPolicyDdosProtectionConfig, + SecurityPolicyList, + SecurityPolicyRecaptchaOptionsConfig, + SecurityPolicyReference, + SecurityPolicyRule, + SecurityPolicyRuleHttpHeaderAction, + SecurityPolicyRuleHttpHeaderActionHttpHeaderOption, + SecurityPolicyRuleMatcher, + SecurityPolicyRuleMatcherConfig, + SecurityPolicyRulePreconfiguredWafConfig, + SecurityPolicyRulePreconfiguredWafConfigExclusion, + SecurityPolicyRulePreconfiguredWafConfigExclusionFieldParams, + SecurityPolicyRuleRateLimitOptions, + SecurityPolicyRuleRateLimitOptionsEnforceOnKeyConfig, + SecurityPolicyRuleRateLimitOptionsThreshold, + SecurityPolicyRuleRedirectOptions, + SecuritySettings, + SendDiagnosticInterruptInstanceRequest, + SendDiagnosticInterruptInstanceResponse, + SerialPortOutput, + ServerBinding, + ServiceAccount, + ServiceAttachment, + ServiceAttachmentAggregatedList, + ServiceAttachmentConnectedEndpoint, + ServiceAttachmentConsumerProjectLimit, + ServiceAttachmentList, + ServiceAttachmentsScopedList, + SetBackendServiceTargetSslProxyRequest, + SetBackendServiceTargetTcpProxyRequest, + SetBackupTargetPoolRequest, + SetCertificateMapTargetHttpsProxyRequest, + SetCertificateMapTargetSslProxyRequest, + SetCommonInstanceMetadataProjectRequest, + SetDefaultNetworkTierProjectRequest, + SetDeletionProtectionInstanceRequest, + SetDiskAutoDeleteInstanceRequest, + SetEdgeSecurityPolicyBackendBucketRequest, + SetEdgeSecurityPolicyBackendServiceRequest, + SetIamPolicyBackendServiceRequest, + SetIamPolicyDiskRequest, + SetIamPolicyFirewallPolicyRequest, + SetIamPolicyImageRequest, + SetIamPolicyInstanceRequest, + SetIamPolicyInstanceTemplateRequest, + SetIamPolicyLicenseRequest, + SetIamPolicyMachineImageRequest, + SetIamPolicyNetworkAttachmentRequest, + SetIamPolicyNetworkFirewallPolicyRequest, + SetIamPolicyNodeGroupRequest, + SetIamPolicyNodeTemplateRequest, + SetIamPolicyRegionBackendServiceRequest, + SetIamPolicyRegionDiskRequest, + SetIamPolicyRegionNetworkFirewallPolicyRequest, + SetIamPolicyReservationRequest, + SetIamPolicyResourcePolicyRequest, + SetIamPolicyServiceAttachmentRequest, + SetIamPolicySnapshotRequest, + SetIamPolicySubnetworkRequest, + SetInstanceTemplateInstanceGroupManagerRequest, + SetInstanceTemplateRegionInstanceGroupManagerRequest, + SetLabelsAddressRequest, + SetLabelsDiskRequest, + SetLabelsExternalVpnGatewayRequest, + SetLabelsForwardingRuleRequest, + SetLabelsGlobalAddressRequest, + SetLabelsGlobalForwardingRuleRequest, + SetLabelsImageRequest, + SetLabelsInstanceRequest, + SetLabelsInterconnectAttachmentRequest, + SetLabelsInterconnectRequest, + SetLabelsRegionDiskRequest, + SetLabelsSecurityPolicyRequest, + SetLabelsSnapshotRequest, + SetLabelsTargetVpnGatewayRequest, + SetLabelsVpnGatewayRequest, + SetLabelsVpnTunnelRequest, + SetMachineResourcesInstanceRequest, + SetMachineTypeInstanceRequest, + SetMetadataInstanceRequest, + SetMinCpuPlatformInstanceRequest, + SetNamedPortsInstanceGroupRequest, + SetNamedPortsRegionInstanceGroupRequest, + SetNameInstanceRequest, + SetNodeTemplateNodeGroupRequest, + SetPrivateIpGoogleAccessSubnetworkRequest, + SetProxyHeaderTargetSslProxyRequest, + SetProxyHeaderTargetTcpProxyRequest, + SetQuicOverrideTargetHttpsProxyRequest, + SetSchedulingInstanceRequest, + SetSecurityPolicyBackendServiceRequest, + SetServiceAccountInstanceRequest, + SetShieldedInstanceIntegrityPolicyInstanceRequest, + SetSslCertificatesRegionTargetHttpsProxyRequest, + SetSslCertificatesTargetHttpsProxyRequest, + SetSslCertificatesTargetSslProxyRequest, + SetSslPolicyTargetHttpsProxyRequest, + SetSslPolicyTargetSslProxyRequest, + SetTagsInstanceRequest, + SetTargetForwardingRuleRequest, + SetTargetGlobalForwardingRuleRequest, + SetTargetPoolsInstanceGroupManagerRequest, + SetTargetPoolsRegionInstanceGroupManagerRequest, + SetUrlMapRegionTargetHttpProxyRequest, + SetUrlMapRegionTargetHttpsProxyRequest, + SetUrlMapTargetHttpProxyRequest, + SetUrlMapTargetHttpsProxyRequest, + SetUsageExportBucketProjectRequest, + ShareSettings, + ShareSettingsProjectConfig, + ShieldedInstanceConfig, + ShieldedInstanceIdentity, + ShieldedInstanceIdentityEntry, + ShieldedInstanceIntegrityPolicy, + SignedUrlKey, + SimulateMaintenanceEventInstanceRequest, + SimulateMaintenanceEventNodeGroupRequest, + Snapshot, + SnapshotList, + SourceDiskEncryptionKey, + SourceInstanceParams, + SourceInstanceProperties, + SslCertificate, + SslCertificateAggregatedList, + SslCertificateList, + SslCertificateManagedSslCertificate, + SslCertificateSelfManagedSslCertificate, + SslCertificatesScopedList, + SSLHealthCheck, + SslPoliciesAggregatedList, + SslPoliciesList, + SslPoliciesListAvailableFeaturesResponse, + SslPoliciesScopedList, + SslPolicy, + SslPolicyReference, + StartAsyncReplicationDiskRequest, + StartAsyncReplicationRegionDiskRequest, + StartInstanceRequest, + StartWithEncryptionKeyInstanceRequest, + StatefulPolicy, + StatefulPolicyPreservedState, + StatefulPolicyPreservedStateDiskDevice, + StopAsyncReplicationDiskRequest, + StopAsyncReplicationRegionDiskRequest, + StopGroupAsyncReplicationDiskRequest, + StopGroupAsyncReplicationRegionDiskRequest, + StopInstanceRequest, + Subnetwork, + SubnetworkAggregatedList, + SubnetworkList, + SubnetworkLogConfig, + SubnetworkSecondaryRange, + SubnetworksExpandIpCidrRangeRequest, + SubnetworksScopedList, + SubnetworksSetPrivateIpGoogleAccessRequest, + Subsetting, + SuspendInstanceRequest, + SwitchToCustomModeNetworkRequest, + Tags, + TargetGrpcProxy, + TargetGrpcProxyList, + TargetHttpProxiesScopedList, + TargetHttpProxy, + TargetHttpProxyAggregatedList, + TargetHttpProxyList, + TargetHttpsProxiesScopedList, + TargetHttpsProxiesSetCertificateMapRequest, + TargetHttpsProxiesSetQuicOverrideRequest, + TargetHttpsProxiesSetSslCertificatesRequest, + TargetHttpsProxy, + TargetHttpsProxyAggregatedList, + TargetHttpsProxyList, + TargetInstance, + TargetInstanceAggregatedList, + TargetInstanceList, + TargetInstancesScopedList, + TargetPool, + TargetPoolAggregatedList, + TargetPoolInstanceHealth, + TargetPoolList, + TargetPoolsAddHealthCheckRequest, + TargetPoolsAddInstanceRequest, + TargetPoolsRemoveHealthCheckRequest, + TargetPoolsRemoveInstanceRequest, + TargetPoolsScopedList, + TargetReference, + TargetSslProxiesSetBackendServiceRequest, + TargetSslProxiesSetCertificateMapRequest, + TargetSslProxiesSetProxyHeaderRequest, + TargetSslProxiesSetSslCertificatesRequest, + TargetSslProxy, + TargetSslProxyList, + TargetTcpProxiesScopedList, + TargetTcpProxiesSetBackendServiceRequest, + TargetTcpProxiesSetProxyHeaderRequest, + TargetTcpProxy, + TargetTcpProxyAggregatedList, + TargetTcpProxyList, + TargetVpnGateway, + TargetVpnGatewayAggregatedList, + TargetVpnGatewayList, + TargetVpnGatewaysScopedList, + TCPHealthCheck, + TestFailure, + TestIamPermissionsDiskRequest, + TestIamPermissionsExternalVpnGatewayRequest, + TestIamPermissionsFirewallPolicyRequest, + TestIamPermissionsImageRequest, + TestIamPermissionsInstanceRequest, + TestIamPermissionsInstanceTemplateRequest, + TestIamPermissionsLicenseCodeRequest, + TestIamPermissionsLicenseRequest, + TestIamPermissionsMachineImageRequest, + TestIamPermissionsNetworkAttachmentRequest, + TestIamPermissionsNetworkEndpointGroupRequest, + TestIamPermissionsNetworkFirewallPolicyRequest, + TestIamPermissionsNodeGroupRequest, + TestIamPermissionsNodeTemplateRequest, + TestIamPermissionsPacketMirroringRequest, + TestIamPermissionsRegionDiskRequest, + TestIamPermissionsRegionNetworkFirewallPolicyRequest, + TestIamPermissionsReservationRequest, + TestIamPermissionsResourcePolicyRequest, + TestIamPermissionsServiceAttachmentRequest, + TestIamPermissionsSnapshotRequest, + TestIamPermissionsSubnetworkRequest, + TestIamPermissionsVpnGatewayRequest, + TestPermissionsRequest, + TestPermissionsResponse, + Uint128, + UpdateAccessConfigInstanceRequest, + UpdateAutoscalerRequest, + UpdateBackendBucketRequest, + UpdateBackendServiceRequest, + UpdateDiskRequest, + UpdateDisplayDeviceInstanceRequest, + UpdateFirewallRequest, + UpdateHealthCheckRequest, + UpdateInstanceRequest, + UpdateNetworkInterfaceInstanceRequest, + UpdatePeeringNetworkRequest, + UpdatePerInstanceConfigsInstanceGroupManagerRequest, + UpdatePerInstanceConfigsRegionInstanceGroupManagerRequest, + UpdateRegionAutoscalerRequest, + UpdateRegionBackendServiceRequest, + UpdateRegionCommitmentRequest, + UpdateRegionDiskRequest, + UpdateRegionHealthCheckRequest, + UpdateRegionUrlMapRequest, + UpdateReservationRequest, + UpdateRouterRequest, + UpdateShieldedInstanceConfigInstanceRequest, + UpdateUrlMapRequest, + UrlMap, + UrlMapList, + UrlMapReference, + UrlMapsAggregatedList, + UrlMapsScopedList, + UrlMapsValidateRequest, + UrlMapsValidateResponse, + UrlMapTest, + UrlMapTestHeader, + UrlMapValidationResult, + UrlRewrite, + UsableSubnetwork, + UsableSubnetworksAggregatedList, + UsableSubnetworkSecondaryRange, + UsageExportLocation, + ValidateRegionUrlMapRequest, + ValidateUrlMapRequest, + VmEndpointNatMappings, + VmEndpointNatMappingsInterfaceNatMappings, + VmEndpointNatMappingsInterfaceNatMappingsNatRuleMappings, + VmEndpointNatMappingsList, + VpnGateway, + VpnGatewayAggregatedList, + VpnGatewayList, + VpnGatewaysGetStatusResponse, + VpnGatewaysScopedList, + VpnGatewayStatus, + VpnGatewayStatusHighAvailabilityRequirementState, + VpnGatewayStatusTunnel, + VpnGatewayStatusVpnConnection, + VpnGatewayVpnGatewayInterface, + VpnTunnel, + VpnTunnelAggregatedList, + VpnTunnelList, + VpnTunnelsScopedList, + WafExpressionSet, + WafExpressionSetExpression, + WaitGlobalOperationRequest, + WaitRegionOperationRequest, + WaitZoneOperationRequest, + Warning, + Warnings, + WeightedBackendService, + XpnHostList, + XpnResourceId, + Zone, + ZoneList, + ZoneSetLabelsRequest, + ZoneSetPolicyRequest, +) + +__all__ = ( + 'AbandonInstancesInstanceGroupManagerRequest', + 'AbandonInstancesRegionInstanceGroupManagerRequest', + 'AcceleratorConfig', + 'Accelerators', + 'AcceleratorType', + 'AcceleratorTypeAggregatedList', + 'AcceleratorTypeList', + 'AcceleratorTypesScopedList', + 'AccessConfig', + 'AddAccessConfigInstanceRequest', + 'AddAssociationFirewallPolicyRequest', + 'AddAssociationNetworkFirewallPolicyRequest', + 'AddAssociationRegionNetworkFirewallPolicyRequest', + 'AddHealthCheckTargetPoolRequest', + 'AddInstancesInstanceGroupRequest', + 'AddInstanceTargetPoolRequest', + 'AddNodesNodeGroupRequest', + 'AddPeeringNetworkRequest', + 'AddResourcePoliciesDiskRequest', + 'AddResourcePoliciesInstanceRequest', + 'AddResourcePoliciesRegionDiskRequest', + 'Address', + 'AddressAggregatedList', + 'AddressesScopedList', + 'AddressList', + 'AddRuleFirewallPolicyRequest', + 'AddRuleNetworkFirewallPolicyRequest', + 'AddRuleRegionNetworkFirewallPolicyRequest', + 'AddRuleSecurityPolicyRequest', + 'AddSignedUrlKeyBackendBucketRequest', + 'AddSignedUrlKeyBackendServiceRequest', + 'AdvancedMachineFeatures', + 'AggregatedListAcceleratorTypesRequest', + 'AggregatedListAddressesRequest', + 'AggregatedListAutoscalersRequest', + 'AggregatedListBackendServicesRequest', + 'AggregatedListDisksRequest', + 'AggregatedListDiskTypesRequest', + 'AggregatedListForwardingRulesRequest', + 'AggregatedListGlobalOperationsRequest', + 'AggregatedListHealthChecksRequest', + 'AggregatedListInstanceGroupManagersRequest', + 'AggregatedListInstanceGroupsRequest', + 'AggregatedListInstancesRequest', + 'AggregatedListInstanceTemplatesRequest', + 'AggregatedListInterconnectAttachmentsRequest', + 'AggregatedListMachineTypesRequest', + 'AggregatedListNetworkAttachmentsRequest', + 'AggregatedListNetworkEdgeSecurityServicesRequest', + 'AggregatedListNetworkEndpointGroupsRequest', + 'AggregatedListNodeGroupsRequest', + 'AggregatedListNodeTemplatesRequest', + 'AggregatedListNodeTypesRequest', + 'AggregatedListPacketMirroringsRequest', + 'AggregatedListPublicDelegatedPrefixesRequest', + 'AggregatedListRegionCommitmentsRequest', + 'AggregatedListReservationsRequest', + 'AggregatedListResourcePoliciesRequest', + 'AggregatedListRoutersRequest', + 'AggregatedListSecurityPoliciesRequest', + 'AggregatedListServiceAttachmentsRequest', + 'AggregatedListSslCertificatesRequest', + 'AggregatedListSslPoliciesRequest', + 'AggregatedListSubnetworksRequest', + 'AggregatedListTargetHttpProxiesRequest', + 'AggregatedListTargetHttpsProxiesRequest', + 'AggregatedListTargetInstancesRequest', + 'AggregatedListTargetPoolsRequest', + 'AggregatedListTargetTcpProxiesRequest', + 'AggregatedListTargetVpnGatewaysRequest', + 'AggregatedListUrlMapsRequest', + 'AggregatedListVpnGatewaysRequest', + 'AggregatedListVpnTunnelsRequest', + 'AliasIpRange', + 'AllocationResourceStatus', + 'AllocationResourceStatusSpecificSKUAllocation', + 'AllocationSpecificSKUAllocationAllocatedInstancePropertiesReservedDisk', + 'AllocationSpecificSKUAllocationReservedInstanceProperties', + 'AllocationSpecificSKUReservation', + 'Allowed', + 'ApplyUpdatesToInstancesInstanceGroupManagerRequest', + 'ApplyUpdatesToInstancesRegionInstanceGroupManagerRequest', + 'AttachDiskInstanceRequest', + 'AttachedDisk', + 'AttachedDiskInitializeParams', + 'AttachNetworkEndpointsGlobalNetworkEndpointGroupRequest', + 'AttachNetworkEndpointsNetworkEndpointGroupRequest', + 'AuditConfig', + 'AuditLogConfig', + 'AuthorizationLoggingOptions', + 'Autoscaler', + 'AutoscalerAggregatedList', + 'AutoscalerList', + 'AutoscalersScopedList', + 'AutoscalerStatusDetails', + 'AutoscalingPolicy', + 'AutoscalingPolicyCpuUtilization', + 'AutoscalingPolicyCustomMetricUtilization', + 'AutoscalingPolicyLoadBalancingUtilization', + 'AutoscalingPolicyScaleInControl', + 'AutoscalingPolicyScalingSchedule', + 'Backend', + 'BackendBucket', + 'BackendBucketCdnPolicy', + 'BackendBucketCdnPolicyBypassCacheOnRequestHeader', + 'BackendBucketCdnPolicyCacheKeyPolicy', + 'BackendBucketCdnPolicyNegativeCachingPolicy', + 'BackendBucketList', + 'BackendService', + 'BackendServiceAggregatedList', + 'BackendServiceCdnPolicy', + 'BackendServiceCdnPolicyBypassCacheOnRequestHeader', + 'BackendServiceCdnPolicyNegativeCachingPolicy', + 'BackendServiceConnectionTrackingPolicy', + 'BackendServiceFailoverPolicy', + 'BackendServiceGroupHealth', + 'BackendServiceIAP', + 'BackendServiceList', + 'BackendServiceLocalityLoadBalancingPolicyConfig', + 'BackendServiceLocalityLoadBalancingPolicyConfigCustomPolicy', + 'BackendServiceLocalityLoadBalancingPolicyConfigPolicy', + 'BackendServiceLogConfig', + 'BackendServiceReference', + 'BackendServicesScopedList', + 'BfdPacket', + 'BfdStatus', + 'BfdStatusPacketCounts', + 'Binding', + 'BulkInsertDiskRequest', + 'BulkInsertDiskResource', + 'BulkInsertInstanceRequest', + 'BulkInsertInstanceResource', + 'BulkInsertInstanceResourcePerInstanceProperties', + 'BulkInsertRegionDiskRequest', + 'BulkInsertRegionInstanceRequest', + 'CacheInvalidationRule', + 'CacheKeyPolicy', + 'CircuitBreakers', + 'CloneRulesFirewallPolicyRequest', + 'CloneRulesNetworkFirewallPolicyRequest', + 'CloneRulesRegionNetworkFirewallPolicyRequest', + 'Commitment', + 'CommitmentAggregatedList', + 'CommitmentList', + 'CommitmentsScopedList', + 'Condition', + 'ConfidentialInstanceConfig', + 'ConnectionDraining', + 'ConsistentHashLoadBalancerSettings', + 'ConsistentHashLoadBalancerSettingsHttpCookie', + 'CorsPolicy', + 'CreateInstancesInstanceGroupManagerRequest', + 'CreateInstancesRegionInstanceGroupManagerRequest', + 'CreateSnapshotDiskRequest', + 'CreateSnapshotRegionDiskRequest', + 'CustomerEncryptionKey', + 'CustomerEncryptionKeyProtectedDisk', + 'Data', + 'DeleteAccessConfigInstanceRequest', + 'DeleteAddressRequest', + 'DeleteAutoscalerRequest', + 'DeleteBackendBucketRequest', + 'DeleteBackendServiceRequest', + 'DeleteDiskRequest', + 'DeleteExternalVpnGatewayRequest', + 'DeleteFirewallPolicyRequest', + 'DeleteFirewallRequest', + 'DeleteForwardingRuleRequest', + 'DeleteGlobalAddressRequest', + 'DeleteGlobalForwardingRuleRequest', + 'DeleteGlobalNetworkEndpointGroupRequest', + 'DeleteGlobalOperationRequest', + 'DeleteGlobalOperationResponse', + 'DeleteGlobalOrganizationOperationRequest', + 'DeleteGlobalOrganizationOperationResponse', + 'DeleteGlobalPublicDelegatedPrefixeRequest', + 'DeleteHealthCheckRequest', + 'DeleteImageRequest', + 'DeleteInstanceGroupManagerRequest', + 'DeleteInstanceGroupRequest', + 'DeleteInstanceRequest', + 'DeleteInstancesInstanceGroupManagerRequest', + 'DeleteInstancesRegionInstanceGroupManagerRequest', + 'DeleteInstanceTemplateRequest', + 'DeleteInterconnectAttachmentRequest', + 'DeleteInterconnectRequest', + 'DeleteLicenseRequest', + 'DeleteMachineImageRequest', + 'DeleteNetworkAttachmentRequest', + 'DeleteNetworkEdgeSecurityServiceRequest', + 'DeleteNetworkEndpointGroupRequest', + 'DeleteNetworkFirewallPolicyRequest', + 'DeleteNetworkRequest', + 'DeleteNodeGroupRequest', + 'DeleteNodesNodeGroupRequest', + 'DeleteNodeTemplateRequest', + 'DeletePacketMirroringRequest', + 'DeletePerInstanceConfigsInstanceGroupManagerRequest', + 'DeletePerInstanceConfigsRegionInstanceGroupManagerRequest', + 'DeletePublicAdvertisedPrefixeRequest', + 'DeletePublicDelegatedPrefixeRequest', + 'DeleteRegionAutoscalerRequest', + 'DeleteRegionBackendServiceRequest', + 'DeleteRegionDiskRequest', + 'DeleteRegionHealthCheckRequest', + 'DeleteRegionHealthCheckServiceRequest', + 'DeleteRegionInstanceGroupManagerRequest', + 'DeleteRegionInstanceTemplateRequest', + 'DeleteRegionNetworkEndpointGroupRequest', + 'DeleteRegionNetworkFirewallPolicyRequest', + 'DeleteRegionNotificationEndpointRequest', + 'DeleteRegionOperationRequest', + 'DeleteRegionOperationResponse', + 'DeleteRegionSecurityPolicyRequest', + 'DeleteRegionSslCertificateRequest', + 'DeleteRegionSslPolicyRequest', + 'DeleteRegionTargetHttpProxyRequest', + 'DeleteRegionTargetHttpsProxyRequest', + 'DeleteRegionTargetTcpProxyRequest', + 'DeleteRegionUrlMapRequest', + 'DeleteReservationRequest', + 'DeleteResourcePolicyRequest', + 'DeleteRouteRequest', + 'DeleteRouterRequest', + 'DeleteSecurityPolicyRequest', + 'DeleteServiceAttachmentRequest', + 'DeleteSignedUrlKeyBackendBucketRequest', + 'DeleteSignedUrlKeyBackendServiceRequest', + 'DeleteSnapshotRequest', + 'DeleteSslCertificateRequest', + 'DeleteSslPolicyRequest', + 'DeleteSubnetworkRequest', + 'DeleteTargetGrpcProxyRequest', + 'DeleteTargetHttpProxyRequest', + 'DeleteTargetHttpsProxyRequest', + 'DeleteTargetInstanceRequest', + 'DeleteTargetPoolRequest', + 'DeleteTargetSslProxyRequest', + 'DeleteTargetTcpProxyRequest', + 'DeleteTargetVpnGatewayRequest', + 'DeleteUrlMapRequest', + 'DeleteVpnGatewayRequest', + 'DeleteVpnTunnelRequest', + 'DeleteZoneOperationRequest', + 'DeleteZoneOperationResponse', + 'Denied', + 'DeprecateImageRequest', + 'DeprecationStatus', + 'DetachDiskInstanceRequest', + 'DetachNetworkEndpointsGlobalNetworkEndpointGroupRequest', + 'DetachNetworkEndpointsNetworkEndpointGroupRequest', + 'DisableXpnHostProjectRequest', + 'DisableXpnResourceProjectRequest', + 'Disk', + 'DiskAggregatedList', + 'DiskAsyncReplication', + 'DiskAsyncReplicationList', + 'DiskInstantiationConfig', + 'DiskList', + 'DiskMoveRequest', + 'DiskParams', + 'DiskResourceStatus', + 'DiskResourceStatusAsyncReplicationStatus', + 'DisksAddResourcePoliciesRequest', + 'DisksRemoveResourcePoliciesRequest', + 'DisksResizeRequest', + 'DisksScopedList', + 'DisksStartAsyncReplicationRequest', + 'DisksStopGroupAsyncReplicationResource', + 'DiskType', + 'DiskTypeAggregatedList', + 'DiskTypeList', + 'DiskTypesScopedList', + 'DisplayDevice', + 'DistributionPolicy', + 'DistributionPolicyZoneConfiguration', + 'Duration', + 'EnableXpnHostProjectRequest', + 'EnableXpnResourceProjectRequest', + 'Error', + 'ErrorDetails', + 'ErrorInfo', + 'Errors', + 'ExchangedPeeringRoute', + 'ExchangedPeeringRoutesList', + 'ExpandIpCidrRangeSubnetworkRequest', + 'Expr', + 'ExternalVpnGateway', + 'ExternalVpnGatewayInterface', + 'ExternalVpnGatewayList', + 'FileContentBuffer', + 'Firewall', + 'FirewallList', + 'FirewallLogConfig', + 'FirewallPoliciesListAssociationsResponse', + 'FirewallPolicy', + 'FirewallPolicyAssociation', + 'FirewallPolicyList', + 'FirewallPolicyRule', + 'FirewallPolicyRuleMatcher', + 'FirewallPolicyRuleMatcherLayer4Config', + 'FirewallPolicyRuleSecureTag', + 'FixedOrPercent', + 'ForwardingRule', + 'ForwardingRuleAggregatedList', + 'ForwardingRuleList', + 'ForwardingRuleReference', + 'ForwardingRuleServiceDirectoryRegistration', + 'ForwardingRulesScopedList', + 'GetAcceleratorTypeRequest', + 'GetAddressRequest', + 'GetAssociationFirewallPolicyRequest', + 'GetAssociationNetworkFirewallPolicyRequest', + 'GetAssociationRegionNetworkFirewallPolicyRequest', + 'GetAutoscalerRequest', + 'GetBackendBucketRequest', + 'GetBackendServiceRequest', + 'GetDiagnosticsInterconnectRequest', + 'GetDiskRequest', + 'GetDiskTypeRequest', + 'GetEffectiveFirewallsInstanceRequest', + 'GetEffectiveFirewallsNetworkRequest', + 'GetEffectiveFirewallsRegionNetworkFirewallPolicyRequest', + 'GetExternalVpnGatewayRequest', + 'GetFirewallPolicyRequest', + 'GetFirewallRequest', + 'GetForwardingRuleRequest', + 'GetFromFamilyImageRequest', + 'GetGlobalAddressRequest', + 'GetGlobalForwardingRuleRequest', + 'GetGlobalNetworkEndpointGroupRequest', + 'GetGlobalOperationRequest', + 'GetGlobalOrganizationOperationRequest', + 'GetGlobalPublicDelegatedPrefixeRequest', + 'GetGuestAttributesInstanceRequest', + 'GetHealthBackendServiceRequest', + 'GetHealthCheckRequest', + 'GetHealthRegionBackendServiceRequest', + 'GetHealthTargetPoolRequest', + 'GetIamPolicyBackendServiceRequest', + 'GetIamPolicyDiskRequest', + 'GetIamPolicyFirewallPolicyRequest', + 'GetIamPolicyImageRequest', + 'GetIamPolicyInstanceRequest', + 'GetIamPolicyInstanceTemplateRequest', + 'GetIamPolicyLicenseRequest', + 'GetIamPolicyMachineImageRequest', + 'GetIamPolicyNetworkAttachmentRequest', + 'GetIamPolicyNetworkFirewallPolicyRequest', + 'GetIamPolicyNodeGroupRequest', + 'GetIamPolicyNodeTemplateRequest', + 'GetIamPolicyRegionBackendServiceRequest', + 'GetIamPolicyRegionDiskRequest', + 'GetIamPolicyRegionNetworkFirewallPolicyRequest', + 'GetIamPolicyReservationRequest', + 'GetIamPolicyResourcePolicyRequest', + 'GetIamPolicyServiceAttachmentRequest', + 'GetIamPolicySnapshotRequest', + 'GetIamPolicySubnetworkRequest', + 'GetImageFamilyViewRequest', + 'GetImageRequest', + 'GetInstanceGroupManagerRequest', + 'GetInstanceGroupRequest', + 'GetInstanceRequest', + 'GetInstanceTemplateRequest', + 'GetInterconnectAttachmentRequest', + 'GetInterconnectLocationRequest', + 'GetInterconnectRemoteLocationRequest', + 'GetInterconnectRequest', + 'GetLicenseCodeRequest', + 'GetLicenseRequest', + 'GetMachineImageRequest', + 'GetMachineTypeRequest', + 'GetNatMappingInfoRoutersRequest', + 'GetNetworkAttachmentRequest', + 'GetNetworkEdgeSecurityServiceRequest', + 'GetNetworkEndpointGroupRequest', + 'GetNetworkFirewallPolicyRequest', + 'GetNetworkRequest', + 'GetNodeGroupRequest', + 'GetNodeTemplateRequest', + 'GetNodeTypeRequest', + 'GetPacketMirroringRequest', + 'GetProjectRequest', + 'GetPublicAdvertisedPrefixeRequest', + 'GetPublicDelegatedPrefixeRequest', + 'GetRegionAutoscalerRequest', + 'GetRegionBackendServiceRequest', + 'GetRegionCommitmentRequest', + 'GetRegionDiskRequest', + 'GetRegionDiskTypeRequest', + 'GetRegionHealthCheckRequest', + 'GetRegionHealthCheckServiceRequest', + 'GetRegionInstanceGroupManagerRequest', + 'GetRegionInstanceGroupRequest', + 'GetRegionInstanceTemplateRequest', + 'GetRegionNetworkEndpointGroupRequest', + 'GetRegionNetworkFirewallPolicyRequest', + 'GetRegionNotificationEndpointRequest', + 'GetRegionOperationRequest', + 'GetRegionRequest', + 'GetRegionSecurityPolicyRequest', + 'GetRegionSslCertificateRequest', + 'GetRegionSslPolicyRequest', + 'GetRegionTargetHttpProxyRequest', + 'GetRegionTargetHttpsProxyRequest', + 'GetRegionTargetTcpProxyRequest', + 'GetRegionUrlMapRequest', + 'GetReservationRequest', + 'GetResourcePolicyRequest', + 'GetRouteRequest', + 'GetRouterRequest', + 'GetRouterStatusRouterRequest', + 'GetRuleFirewallPolicyRequest', + 'GetRuleNetworkFirewallPolicyRequest', + 'GetRuleRegionNetworkFirewallPolicyRequest', + 'GetRuleSecurityPolicyRequest', + 'GetScreenshotInstanceRequest', + 'GetSecurityPolicyRequest', + 'GetSerialPortOutputInstanceRequest', + 'GetServiceAttachmentRequest', + 'GetShieldedInstanceIdentityInstanceRequest', + 'GetSnapshotRequest', + 'GetSslCertificateRequest', + 'GetSslPolicyRequest', + 'GetStatusVpnGatewayRequest', + 'GetSubnetworkRequest', + 'GetTargetGrpcProxyRequest', + 'GetTargetHttpProxyRequest', + 'GetTargetHttpsProxyRequest', + 'GetTargetInstanceRequest', + 'GetTargetPoolRequest', + 'GetTargetSslProxyRequest', + 'GetTargetTcpProxyRequest', + 'GetTargetVpnGatewayRequest', + 'GetUrlMapRequest', + 'GetVpnGatewayRequest', + 'GetVpnTunnelRequest', + 'GetXpnHostProjectRequest', + 'GetXpnResourcesProjectsRequest', + 'GetZoneOperationRequest', + 'GetZoneRequest', + 'GlobalAddressesMoveRequest', + 'GlobalNetworkEndpointGroupsAttachEndpointsRequest', + 'GlobalNetworkEndpointGroupsDetachEndpointsRequest', + 'GlobalOrganizationSetPolicyRequest', + 'GlobalSetLabelsRequest', + 'GlobalSetPolicyRequest', + 'GRPCHealthCheck', + 'GuestAttributes', + 'GuestAttributesEntry', + 'GuestAttributesValue', + 'GuestOsFeature', + 'HealthCheck', + 'HealthCheckList', + 'HealthCheckLogConfig', + 'HealthCheckReference', + 'HealthChecksAggregatedList', + 'HealthCheckService', + 'HealthCheckServiceReference', + 'HealthCheckServicesList', + 'HealthChecksScopedList', + 'HealthStatus', + 'HealthStatusForNetworkEndpoint', + 'Help', + 'HelpLink', + 'HostRule', + 'HTTP2HealthCheck', + 'HttpFaultAbort', + 'HttpFaultDelay', + 'HttpFaultInjection', + 'HttpHeaderAction', + 'HttpHeaderMatch', + 'HttpHeaderOption', + 'HTTPHealthCheck', + 'HttpQueryParameterMatch', + 'HttpRedirectAction', + 'HttpRetryPolicy', + 'HttpRouteAction', + 'HttpRouteRule', + 'HttpRouteRuleMatch', + 'HTTPSHealthCheck', + 'Image', + 'ImageFamilyView', + 'ImageList', + 'InitialStateConfig', + 'InsertAddressRequest', + 'InsertAutoscalerRequest', + 'InsertBackendBucketRequest', + 'InsertBackendServiceRequest', + 'InsertDiskRequest', + 'InsertExternalVpnGatewayRequest', + 'InsertFirewallPolicyRequest', + 'InsertFirewallRequest', + 'InsertForwardingRuleRequest', + 'InsertGlobalAddressRequest', + 'InsertGlobalForwardingRuleRequest', + 'InsertGlobalNetworkEndpointGroupRequest', + 'InsertGlobalPublicDelegatedPrefixeRequest', + 'InsertHealthCheckRequest', + 'InsertImageRequest', + 'InsertInstanceGroupManagerRequest', + 'InsertInstanceGroupRequest', + 'InsertInstanceRequest', + 'InsertInstanceTemplateRequest', + 'InsertInterconnectAttachmentRequest', + 'InsertInterconnectRequest', + 'InsertLicenseRequest', + 'InsertMachineImageRequest', + 'InsertNetworkAttachmentRequest', + 'InsertNetworkEdgeSecurityServiceRequest', + 'InsertNetworkEndpointGroupRequest', + 'InsertNetworkFirewallPolicyRequest', + 'InsertNetworkRequest', + 'InsertNodeGroupRequest', + 'InsertNodeTemplateRequest', + 'InsertPacketMirroringRequest', + 'InsertPublicAdvertisedPrefixeRequest', + 'InsertPublicDelegatedPrefixeRequest', + 'InsertRegionAutoscalerRequest', + 'InsertRegionBackendServiceRequest', + 'InsertRegionCommitmentRequest', + 'InsertRegionDiskRequest', + 'InsertRegionHealthCheckRequest', + 'InsertRegionHealthCheckServiceRequest', + 'InsertRegionInstanceGroupManagerRequest', + 'InsertRegionInstanceTemplateRequest', + 'InsertRegionNetworkEndpointGroupRequest', + 'InsertRegionNetworkFirewallPolicyRequest', + 'InsertRegionNotificationEndpointRequest', + 'InsertRegionSecurityPolicyRequest', + 'InsertRegionSslCertificateRequest', + 'InsertRegionSslPolicyRequest', + 'InsertRegionTargetHttpProxyRequest', + 'InsertRegionTargetHttpsProxyRequest', + 'InsertRegionTargetTcpProxyRequest', + 'InsertRegionUrlMapRequest', + 'InsertReservationRequest', + 'InsertResourcePolicyRequest', + 'InsertRouteRequest', + 'InsertRouterRequest', + 'InsertSecurityPolicyRequest', + 'InsertServiceAttachmentRequest', + 'InsertSnapshotRequest', + 'InsertSslCertificateRequest', + 'InsertSslPolicyRequest', + 'InsertSubnetworkRequest', + 'InsertTargetGrpcProxyRequest', + 'InsertTargetHttpProxyRequest', + 'InsertTargetHttpsProxyRequest', + 'InsertTargetInstanceRequest', + 'InsertTargetPoolRequest', + 'InsertTargetSslProxyRequest', + 'InsertTargetTcpProxyRequest', + 'InsertTargetVpnGatewayRequest', + 'InsertUrlMapRequest', + 'InsertVpnGatewayRequest', + 'InsertVpnTunnelRequest', + 'Instance', + 'InstanceAggregatedList', + 'InstanceConsumptionData', + 'InstanceConsumptionInfo', + 'InstanceGroup', + 'InstanceGroupAggregatedList', + 'InstanceGroupList', + 'InstanceGroupManager', + 'InstanceGroupManagerActionsSummary', + 'InstanceGroupManagerAggregatedList', + 'InstanceGroupManagerAutoHealingPolicy', + 'InstanceGroupManagerInstanceLifecyclePolicy', + 'InstanceGroupManagerList', + 'InstanceGroupManagersAbandonInstancesRequest', + 'InstanceGroupManagersApplyUpdatesRequest', + 'InstanceGroupManagersCreateInstancesRequest', + 'InstanceGroupManagersDeleteInstancesRequest', + 'InstanceGroupManagersDeletePerInstanceConfigsReq', + 'InstanceGroupManagersListErrorsResponse', + 'InstanceGroupManagersListManagedInstancesResponse', + 'InstanceGroupManagersListPerInstanceConfigsResp', + 'InstanceGroupManagersPatchPerInstanceConfigsReq', + 'InstanceGroupManagersRecreateInstancesRequest', + 'InstanceGroupManagersScopedList', + 'InstanceGroupManagersSetInstanceTemplateRequest', + 'InstanceGroupManagersSetTargetPoolsRequest', + 'InstanceGroupManagerStatus', + 'InstanceGroupManagerStatusStateful', + 'InstanceGroupManagerStatusStatefulPerInstanceConfigs', + 'InstanceGroupManagerStatusVersionTarget', + 'InstanceGroupManagersUpdatePerInstanceConfigsReq', + 'InstanceGroupManagerUpdatePolicy', + 'InstanceGroupManagerVersion', + 'InstanceGroupsAddInstancesRequest', + 'InstanceGroupsListInstances', + 'InstanceGroupsListInstancesRequest', + 'InstanceGroupsRemoveInstancesRequest', + 'InstanceGroupsScopedList', + 'InstanceGroupsSetNamedPortsRequest', + 'InstanceList', + 'InstanceListReferrers', + 'InstanceManagedByIgmError', + 'InstanceManagedByIgmErrorInstanceActionDetails', + 'InstanceManagedByIgmErrorManagedInstanceError', + 'InstanceMoveRequest', + 'InstanceParams', + 'InstanceProperties', + 'InstanceReference', + 'InstancesAddResourcePoliciesRequest', + 'InstancesGetEffectiveFirewallsResponse', + 'InstancesGetEffectiveFirewallsResponseEffectiveFirewallPolicy', + 'InstancesRemoveResourcePoliciesRequest', + 'InstancesScopedList', + 'InstancesSetLabelsRequest', + 'InstancesSetMachineResourcesRequest', + 'InstancesSetMachineTypeRequest', + 'InstancesSetMinCpuPlatformRequest', + 'InstancesSetNameRequest', + 'InstancesSetServiceAccountRequest', + 'InstancesStartWithEncryptionKeyRequest', + 'InstanceTemplate', + 'InstanceTemplateAggregatedList', + 'InstanceTemplateList', + 'InstanceTemplatesScopedList', + 'InstanceWithNamedPorts', + 'Int64RangeMatch', + 'Interconnect', + 'InterconnectAttachment', + 'InterconnectAttachmentAggregatedList', + 'InterconnectAttachmentConfigurationConstraints', + 'InterconnectAttachmentConfigurationConstraintsBgpPeerASNRange', + 'InterconnectAttachmentList', + 'InterconnectAttachmentPartnerMetadata', + 'InterconnectAttachmentPrivateInfo', + 'InterconnectAttachmentsScopedList', + 'InterconnectCircuitInfo', + 'InterconnectDiagnostics', + 'InterconnectDiagnosticsARPEntry', + 'InterconnectDiagnosticsLinkLACPStatus', + 'InterconnectDiagnosticsLinkOpticalPower', + 'InterconnectDiagnosticsLinkStatus', + 'InterconnectList', + 'InterconnectLocation', + 'InterconnectLocationList', + 'InterconnectLocationRegionInfo', + 'InterconnectOutageNotification', + 'InterconnectRemoteLocation', + 'InterconnectRemoteLocationConstraints', + 'InterconnectRemoteLocationConstraintsSubnetLengthRange', + 'InterconnectRemoteLocationList', + 'InterconnectRemoteLocationPermittedConnections', + 'InterconnectsGetDiagnosticsResponse', + 'InvalidateCacheUrlMapRequest', + 'Items', + 'License', + 'LicenseCode', + 'LicenseCodeLicenseAlias', + 'LicenseResourceCommitment', + 'LicenseResourceRequirements', + 'LicensesListResponse', + 'ListAcceleratorTypesRequest', + 'ListAddressesRequest', + 'ListAssociationsFirewallPolicyRequest', + 'ListAutoscalersRequest', + 'ListAvailableFeaturesRegionSslPoliciesRequest', + 'ListAvailableFeaturesSslPoliciesRequest', + 'ListBackendBucketsRequest', + 'ListBackendServicesRequest', + 'ListDisksRequest', + 'ListDiskTypesRequest', + 'ListErrorsInstanceGroupManagersRequest', + 'ListErrorsRegionInstanceGroupManagersRequest', + 'ListExternalVpnGatewaysRequest', + 'ListFirewallPoliciesRequest', + 'ListFirewallsRequest', + 'ListForwardingRulesRequest', + 'ListGlobalAddressesRequest', + 'ListGlobalForwardingRulesRequest', + 'ListGlobalNetworkEndpointGroupsRequest', + 'ListGlobalOperationsRequest', + 'ListGlobalOrganizationOperationsRequest', + 'ListGlobalPublicDelegatedPrefixesRequest', + 'ListHealthChecksRequest', + 'ListImagesRequest', + 'ListInstanceGroupManagersRequest', + 'ListInstanceGroupsRequest', + 'ListInstancesInstanceGroupsRequest', + 'ListInstancesRegionInstanceGroupsRequest', + 'ListInstancesRequest', + 'ListInstanceTemplatesRequest', + 'ListInterconnectAttachmentsRequest', + 'ListInterconnectLocationsRequest', + 'ListInterconnectRemoteLocationsRequest', + 'ListInterconnectsRequest', + 'ListLicensesRequest', + 'ListMachineImagesRequest', + 'ListMachineTypesRequest', + 'ListManagedInstancesInstanceGroupManagersRequest', + 'ListManagedInstancesRegionInstanceGroupManagersRequest', + 'ListNetworkAttachmentsRequest', + 'ListNetworkEndpointGroupsRequest', + 'ListNetworkEndpointsGlobalNetworkEndpointGroupsRequest', + 'ListNetworkEndpointsNetworkEndpointGroupsRequest', + 'ListNetworkFirewallPoliciesRequest', + 'ListNetworksRequest', + 'ListNodeGroupsRequest', + 'ListNodesNodeGroupsRequest', + 'ListNodeTemplatesRequest', + 'ListNodeTypesRequest', + 'ListPacketMirroringsRequest', + 'ListPeeringRoutesNetworksRequest', + 'ListPerInstanceConfigsInstanceGroupManagersRequest', + 'ListPerInstanceConfigsRegionInstanceGroupManagersRequest', + 'ListPreconfiguredExpressionSetsSecurityPoliciesRequest', + 'ListPublicAdvertisedPrefixesRequest', + 'ListPublicDelegatedPrefixesRequest', + 'ListReferrersInstancesRequest', + 'ListRegionAutoscalersRequest', + 'ListRegionBackendServicesRequest', + 'ListRegionCommitmentsRequest', + 'ListRegionDisksRequest', + 'ListRegionDiskTypesRequest', + 'ListRegionHealthCheckServicesRequest', + 'ListRegionHealthChecksRequest', + 'ListRegionInstanceGroupManagersRequest', + 'ListRegionInstanceGroupsRequest', + 'ListRegionInstanceTemplatesRequest', + 'ListRegionNetworkEndpointGroupsRequest', + 'ListRegionNetworkFirewallPoliciesRequest', + 'ListRegionNotificationEndpointsRequest', + 'ListRegionOperationsRequest', + 'ListRegionSecurityPoliciesRequest', + 'ListRegionsRequest', + 'ListRegionSslCertificatesRequest', + 'ListRegionSslPoliciesRequest', + 'ListRegionTargetHttpProxiesRequest', + 'ListRegionTargetHttpsProxiesRequest', + 'ListRegionTargetTcpProxiesRequest', + 'ListRegionUrlMapsRequest', + 'ListReservationsRequest', + 'ListResourcePoliciesRequest', + 'ListRoutersRequest', + 'ListRoutesRequest', + 'ListSecurityPoliciesRequest', + 'ListServiceAttachmentsRequest', + 'ListSnapshotsRequest', + 'ListSslCertificatesRequest', + 'ListSslPoliciesRequest', + 'ListSubnetworksRequest', + 'ListTargetGrpcProxiesRequest', + 'ListTargetHttpProxiesRequest', + 'ListTargetHttpsProxiesRequest', + 'ListTargetInstancesRequest', + 'ListTargetPoolsRequest', + 'ListTargetSslProxiesRequest', + 'ListTargetTcpProxiesRequest', + 'ListTargetVpnGatewaysRequest', + 'ListUrlMapsRequest', + 'ListUsableSubnetworksRequest', + 'ListVpnGatewaysRequest', + 'ListVpnTunnelsRequest', + 'ListXpnHostsProjectsRequest', + 'ListZoneOperationsRequest', + 'ListZonesRequest', + 'LocalDisk', + 'LocalizedMessage', + 'LocationPolicy', + 'LocationPolicyLocation', + 'LocationPolicyLocationConstraints', + 'LogConfig', + 'LogConfigCloudAuditOptions', + 'LogConfigCounterOptions', + 'LogConfigCounterOptionsCustomField', + 'LogConfigDataAccessOptions', + 'MachineImage', + 'MachineImageList', + 'MachineType', + 'MachineTypeAggregatedList', + 'MachineTypeList', + 'MachineTypesScopedList', + 'ManagedInstance', + 'ManagedInstanceInstanceHealth', + 'ManagedInstanceLastAttempt', + 'ManagedInstanceVersion', + 'Metadata', + 'MetadataFilter', + 'MetadataFilterLabelMatch', + 'MoveAddressRequest', + 'MoveDiskProjectRequest', + 'MoveFirewallPolicyRequest', + 'MoveGlobalAddressRequest', + 'MoveInstanceProjectRequest', + 'NamedPort', + 'Network', + 'NetworkAttachment', + 'NetworkAttachmentAggregatedList', + 'NetworkAttachmentConnectedEndpoint', + 'NetworkAttachmentList', + 'NetworkAttachmentsScopedList', + 'NetworkEdgeSecurityService', + 'NetworkEdgeSecurityServiceAggregatedList', + 'NetworkEdgeSecurityServicesScopedList', + 'NetworkEndpoint', + 'NetworkEndpointGroup', + 'NetworkEndpointGroupAggregatedList', + 'NetworkEndpointGroupAppEngine', + 'NetworkEndpointGroupCloudFunction', + 'NetworkEndpointGroupCloudRun', + 'NetworkEndpointGroupList', + 'NetworkEndpointGroupPscData', + 'NetworkEndpointGroupsAttachEndpointsRequest', + 'NetworkEndpointGroupsDetachEndpointsRequest', + 'NetworkEndpointGroupsListEndpointsRequest', + 'NetworkEndpointGroupsListNetworkEndpoints', + 'NetworkEndpointGroupsScopedList', + 'NetworkEndpointWithHealthStatus', + 'NetworkInterface', + 'NetworkList', + 'NetworkPeering', + 'NetworkPerformanceConfig', + 'NetworkRoutingConfig', + 'NetworksAddPeeringRequest', + 'NetworksGetEffectiveFirewallsResponse', + 'NetworksGetEffectiveFirewallsResponseEffectiveFirewallPolicy', + 'NetworksRemovePeeringRequest', + 'NetworksUpdatePeeringRequest', + 'NodeGroup', + 'NodeGroupAggregatedList', + 'NodeGroupAutoscalingPolicy', + 'NodeGroupList', + 'NodeGroupMaintenanceWindow', + 'NodeGroupNode', + 'NodeGroupsAddNodesRequest', + 'NodeGroupsDeleteNodesRequest', + 'NodeGroupsListNodes', + 'NodeGroupsScopedList', + 'NodeGroupsSetNodeTemplateRequest', + 'NodeGroupsSimulateMaintenanceEventRequest', + 'NodeTemplate', + 'NodeTemplateAggregatedList', + 'NodeTemplateList', + 'NodeTemplateNodeTypeFlexibility', + 'NodeTemplatesScopedList', + 'NodeType', + 'NodeTypeAggregatedList', + 'NodeTypeList', + 'NodeTypesScopedList', + 'NotificationEndpoint', + 'NotificationEndpointGrpcSettings', + 'NotificationEndpointList', + 'Operation', + 'OperationAggregatedList', + 'OperationList', + 'OperationsScopedList', + 'OutlierDetection', + 'PacketIntervals', + 'PacketMirroring', + 'PacketMirroringAggregatedList', + 'PacketMirroringFilter', + 'PacketMirroringForwardingRuleInfo', + 'PacketMirroringList', + 'PacketMirroringMirroredResourceInfo', + 'PacketMirroringMirroredResourceInfoInstanceInfo', + 'PacketMirroringMirroredResourceInfoSubnetInfo', + 'PacketMirroringNetworkInfo', + 'PacketMirroringsScopedList', + 'PatchAutoscalerRequest', + 'PatchBackendBucketRequest', + 'PatchBackendServiceRequest', + 'PatchFirewallPolicyRequest', + 'PatchFirewallRequest', + 'PatchForwardingRuleRequest', + 'PatchGlobalForwardingRuleRequest', + 'PatchGlobalPublicDelegatedPrefixeRequest', + 'PatchHealthCheckRequest', + 'PatchImageRequest', + 'PatchInstanceGroupManagerRequest', + 'PatchInterconnectAttachmentRequest', + 'PatchInterconnectRequest', + 'PatchNetworkEdgeSecurityServiceRequest', + 'PatchNetworkFirewallPolicyRequest', + 'PatchNetworkRequest', + 'PatchNodeGroupRequest', + 'PatchPacketMirroringRequest', + 'PatchPerInstanceConfigsInstanceGroupManagerRequest', + 'PatchPerInstanceConfigsRegionInstanceGroupManagerRequest', + 'PatchPublicAdvertisedPrefixeRequest', + 'PatchPublicDelegatedPrefixeRequest', + 'PatchRegionAutoscalerRequest', + 'PatchRegionBackendServiceRequest', + 'PatchRegionHealthCheckRequest', + 'PatchRegionHealthCheckServiceRequest', + 'PatchRegionInstanceGroupManagerRequest', + 'PatchRegionNetworkFirewallPolicyRequest', + 'PatchRegionSecurityPolicyRequest', + 'PatchRegionSslPolicyRequest', + 'PatchRegionTargetHttpsProxyRequest', + 'PatchRegionUrlMapRequest', + 'PatchResourcePolicyRequest', + 'PatchRouterRequest', + 'PatchRuleFirewallPolicyRequest', + 'PatchRuleNetworkFirewallPolicyRequest', + 'PatchRuleRegionNetworkFirewallPolicyRequest', + 'PatchRuleSecurityPolicyRequest', + 'PatchSecurityPolicyRequest', + 'PatchServiceAttachmentRequest', + 'PatchSslPolicyRequest', + 'PatchSubnetworkRequest', + 'PatchTargetGrpcProxyRequest', + 'PatchTargetHttpProxyRequest', + 'PatchTargetHttpsProxyRequest', + 'PatchUrlMapRequest', + 'PathMatcher', + 'PathRule', + 'PerInstanceConfig', + 'Policy', + 'PreconfiguredWafSet', + 'PreservedState', + 'PreservedStatePreservedDisk', + 'PreviewRouterRequest', + 'Project', + 'ProjectsDisableXpnResourceRequest', + 'ProjectsEnableXpnResourceRequest', + 'ProjectsGetXpnResources', + 'ProjectsListXpnHostsRequest', + 'ProjectsSetDefaultNetworkTierRequest', + 'PublicAdvertisedPrefix', + 'PublicAdvertisedPrefixList', + 'PublicAdvertisedPrefixPublicDelegatedPrefix', + 'PublicDelegatedPrefix', + 'PublicDelegatedPrefixAggregatedList', + 'PublicDelegatedPrefixesScopedList', + 'PublicDelegatedPrefixList', + 'PublicDelegatedPrefixPublicDelegatedSubPrefix', + 'Quota', + 'QuotaExceededInfo', + 'RawDisk', + 'RecreateInstancesInstanceGroupManagerRequest', + 'RecreateInstancesRegionInstanceGroupManagerRequest', + 'Reference', + 'Region', + 'RegionAddressesMoveRequest', + 'RegionAutoscalerList', + 'RegionDisksAddResourcePoliciesRequest', + 'RegionDisksRemoveResourcePoliciesRequest', + 'RegionDisksResizeRequest', + 'RegionDisksStartAsyncReplicationRequest', + 'RegionDiskTypeList', + 'RegionInstanceGroupList', + 'RegionInstanceGroupManagerDeleteInstanceConfigReq', + 'RegionInstanceGroupManagerList', + 'RegionInstanceGroupManagerPatchInstanceConfigReq', + 'RegionInstanceGroupManagersAbandonInstancesRequest', + 'RegionInstanceGroupManagersApplyUpdatesRequest', + 'RegionInstanceGroupManagersCreateInstancesRequest', + 'RegionInstanceGroupManagersDeleteInstancesRequest', + 'RegionInstanceGroupManagersListErrorsResponse', + 'RegionInstanceGroupManagersListInstanceConfigsResp', + 'RegionInstanceGroupManagersListInstancesResponse', + 'RegionInstanceGroupManagersRecreateRequest', + 'RegionInstanceGroupManagersSetTargetPoolsRequest', + 'RegionInstanceGroupManagersSetTemplateRequest', + 'RegionInstanceGroupManagerUpdateInstanceConfigReq', + 'RegionInstanceGroupsListInstances', + 'RegionInstanceGroupsListInstancesRequest', + 'RegionInstanceGroupsSetNamedPortsRequest', + 'RegionList', + 'RegionNetworkFirewallPoliciesGetEffectiveFirewallsResponse', + 'RegionNetworkFirewallPoliciesGetEffectiveFirewallsResponseEffectiveFirewallPolicy', + 'RegionSetLabelsRequest', + 'RegionSetPolicyRequest', + 'RegionTargetHttpsProxiesSetSslCertificatesRequest', + 'RegionUrlMapsValidateRequest', + 'RemoveAssociationFirewallPolicyRequest', + 'RemoveAssociationNetworkFirewallPolicyRequest', + 'RemoveAssociationRegionNetworkFirewallPolicyRequest', + 'RemoveHealthCheckTargetPoolRequest', + 'RemoveInstancesInstanceGroupRequest', + 'RemoveInstanceTargetPoolRequest', + 'RemovePeeringNetworkRequest', + 'RemoveResourcePoliciesDiskRequest', + 'RemoveResourcePoliciesInstanceRequest', + 'RemoveResourcePoliciesRegionDiskRequest', + 'RemoveRuleFirewallPolicyRequest', + 'RemoveRuleNetworkFirewallPolicyRequest', + 'RemoveRuleRegionNetworkFirewallPolicyRequest', + 'RemoveRuleSecurityPolicyRequest', + 'RequestMirrorPolicy', + 'Reservation', + 'ReservationAffinity', + 'ReservationAggregatedList', + 'ReservationList', + 'ReservationsResizeRequest', + 'ReservationsScopedList', + 'ResetInstanceRequest', + 'ResizeDiskRequest', + 'ResizeInstanceGroupManagerRequest', + 'ResizeRegionDiskRequest', + 'ResizeRegionInstanceGroupManagerRequest', + 'ResizeReservationRequest', + 'ResourceCommitment', + 'ResourceGroupReference', + 'ResourcePoliciesScopedList', + 'ResourcePolicy', + 'ResourcePolicyAggregatedList', + 'ResourcePolicyDailyCycle', + 'ResourcePolicyDiskConsistencyGroupPolicy', + 'ResourcePolicyGroupPlacementPolicy', + 'ResourcePolicyHourlyCycle', + 'ResourcePolicyInstanceSchedulePolicy', + 'ResourcePolicyInstanceSchedulePolicySchedule', + 'ResourcePolicyList', + 'ResourcePolicyResourceStatus', + 'ResourcePolicyResourceStatusInstanceSchedulePolicyStatus', + 'ResourcePolicySnapshotSchedulePolicy', + 'ResourcePolicySnapshotSchedulePolicyRetentionPolicy', + 'ResourcePolicySnapshotSchedulePolicySchedule', + 'ResourcePolicySnapshotSchedulePolicySnapshotProperties', + 'ResourcePolicyWeeklyCycle', + 'ResourcePolicyWeeklyCycleDayOfWeek', + 'ResourceStatus', + 'ResumeInstanceRequest', + 'Route', + 'RouteAsPath', + 'RouteList', + 'Router', + 'RouterAdvertisedIpRange', + 'RouterAggregatedList', + 'RouterBgp', + 'RouterBgpPeer', + 'RouterBgpPeerBfd', + 'RouterBgpPeerCustomLearnedIpRange', + 'RouterInterface', + 'RouterList', + 'RouterMd5AuthenticationKey', + 'RouterNat', + 'RouterNatLogConfig', + 'RouterNatRule', + 'RouterNatRuleAction', + 'RouterNatSubnetworkToNat', + 'RoutersPreviewResponse', + 'RoutersScopedList', + 'RouterStatus', + 'RouterStatusBgpPeerStatus', + 'RouterStatusNatStatus', + 'RouterStatusNatStatusNatRuleStatus', + 'RouterStatusResponse', + 'Rule', + 'SavedAttachedDisk', + 'SavedDisk', + 'ScalingScheduleStatus', + 'Scheduling', + 'SchedulingNodeAffinity', + 'ScratchDisks', + 'Screenshot', + 'SecurityPoliciesAggregatedList', + 'SecurityPoliciesListPreconfiguredExpressionSetsResponse', + 'SecurityPoliciesScopedList', + 'SecurityPoliciesWafConfig', + 'SecurityPolicy', + 'SecurityPolicyAdaptiveProtectionConfig', + 'SecurityPolicyAdaptiveProtectionConfigLayer7DdosDefenseConfig', + 'SecurityPolicyAdvancedOptionsConfig', + 'SecurityPolicyAdvancedOptionsConfigJsonCustomConfig', + 'SecurityPolicyDdosProtectionConfig', + 'SecurityPolicyList', + 'SecurityPolicyRecaptchaOptionsConfig', + 'SecurityPolicyReference', + 'SecurityPolicyRule', + 'SecurityPolicyRuleHttpHeaderAction', + 'SecurityPolicyRuleHttpHeaderActionHttpHeaderOption', + 'SecurityPolicyRuleMatcher', + 'SecurityPolicyRuleMatcherConfig', + 'SecurityPolicyRulePreconfiguredWafConfig', + 'SecurityPolicyRulePreconfiguredWafConfigExclusion', + 'SecurityPolicyRulePreconfiguredWafConfigExclusionFieldParams', + 'SecurityPolicyRuleRateLimitOptions', + 'SecurityPolicyRuleRateLimitOptionsEnforceOnKeyConfig', + 'SecurityPolicyRuleRateLimitOptionsThreshold', + 'SecurityPolicyRuleRedirectOptions', + 'SecuritySettings', + 'SendDiagnosticInterruptInstanceRequest', + 'SendDiagnosticInterruptInstanceResponse', + 'SerialPortOutput', + 'ServerBinding', + 'ServiceAccount', + 'ServiceAttachment', + 'ServiceAttachmentAggregatedList', + 'ServiceAttachmentConnectedEndpoint', + 'ServiceAttachmentConsumerProjectLimit', + 'ServiceAttachmentList', + 'ServiceAttachmentsScopedList', + 'SetBackendServiceTargetSslProxyRequest', + 'SetBackendServiceTargetTcpProxyRequest', + 'SetBackupTargetPoolRequest', + 'SetCertificateMapTargetHttpsProxyRequest', + 'SetCertificateMapTargetSslProxyRequest', + 'SetCommonInstanceMetadataProjectRequest', + 'SetDefaultNetworkTierProjectRequest', + 'SetDeletionProtectionInstanceRequest', + 'SetDiskAutoDeleteInstanceRequest', + 'SetEdgeSecurityPolicyBackendBucketRequest', + 'SetEdgeSecurityPolicyBackendServiceRequest', + 'SetIamPolicyBackendServiceRequest', + 'SetIamPolicyDiskRequest', + 'SetIamPolicyFirewallPolicyRequest', + 'SetIamPolicyImageRequest', + 'SetIamPolicyInstanceRequest', + 'SetIamPolicyInstanceTemplateRequest', + 'SetIamPolicyLicenseRequest', + 'SetIamPolicyMachineImageRequest', + 'SetIamPolicyNetworkAttachmentRequest', + 'SetIamPolicyNetworkFirewallPolicyRequest', + 'SetIamPolicyNodeGroupRequest', + 'SetIamPolicyNodeTemplateRequest', + 'SetIamPolicyRegionBackendServiceRequest', + 'SetIamPolicyRegionDiskRequest', + 'SetIamPolicyRegionNetworkFirewallPolicyRequest', + 'SetIamPolicyReservationRequest', + 'SetIamPolicyResourcePolicyRequest', + 'SetIamPolicyServiceAttachmentRequest', + 'SetIamPolicySnapshotRequest', + 'SetIamPolicySubnetworkRequest', + 'SetInstanceTemplateInstanceGroupManagerRequest', + 'SetInstanceTemplateRegionInstanceGroupManagerRequest', + 'SetLabelsAddressRequest', + 'SetLabelsDiskRequest', + 'SetLabelsExternalVpnGatewayRequest', + 'SetLabelsForwardingRuleRequest', + 'SetLabelsGlobalAddressRequest', + 'SetLabelsGlobalForwardingRuleRequest', + 'SetLabelsImageRequest', + 'SetLabelsInstanceRequest', + 'SetLabelsInterconnectAttachmentRequest', + 'SetLabelsInterconnectRequest', + 'SetLabelsRegionDiskRequest', + 'SetLabelsSecurityPolicyRequest', + 'SetLabelsSnapshotRequest', + 'SetLabelsTargetVpnGatewayRequest', + 'SetLabelsVpnGatewayRequest', + 'SetLabelsVpnTunnelRequest', + 'SetMachineResourcesInstanceRequest', + 'SetMachineTypeInstanceRequest', + 'SetMetadataInstanceRequest', + 'SetMinCpuPlatformInstanceRequest', + 'SetNamedPortsInstanceGroupRequest', + 'SetNamedPortsRegionInstanceGroupRequest', + 'SetNameInstanceRequest', + 'SetNodeTemplateNodeGroupRequest', + 'SetPrivateIpGoogleAccessSubnetworkRequest', + 'SetProxyHeaderTargetSslProxyRequest', + 'SetProxyHeaderTargetTcpProxyRequest', + 'SetQuicOverrideTargetHttpsProxyRequest', + 'SetSchedulingInstanceRequest', + 'SetSecurityPolicyBackendServiceRequest', + 'SetServiceAccountInstanceRequest', + 'SetShieldedInstanceIntegrityPolicyInstanceRequest', + 'SetSslCertificatesRegionTargetHttpsProxyRequest', + 'SetSslCertificatesTargetHttpsProxyRequest', + 'SetSslCertificatesTargetSslProxyRequest', + 'SetSslPolicyTargetHttpsProxyRequest', + 'SetSslPolicyTargetSslProxyRequest', + 'SetTagsInstanceRequest', + 'SetTargetForwardingRuleRequest', + 'SetTargetGlobalForwardingRuleRequest', + 'SetTargetPoolsInstanceGroupManagerRequest', + 'SetTargetPoolsRegionInstanceGroupManagerRequest', + 'SetUrlMapRegionTargetHttpProxyRequest', + 'SetUrlMapRegionTargetHttpsProxyRequest', + 'SetUrlMapTargetHttpProxyRequest', + 'SetUrlMapTargetHttpsProxyRequest', + 'SetUsageExportBucketProjectRequest', + 'ShareSettings', + 'ShareSettingsProjectConfig', + 'ShieldedInstanceConfig', + 'ShieldedInstanceIdentity', + 'ShieldedInstanceIdentityEntry', + 'ShieldedInstanceIntegrityPolicy', + 'SignedUrlKey', + 'SimulateMaintenanceEventInstanceRequest', + 'SimulateMaintenanceEventNodeGroupRequest', + 'Snapshot', + 'SnapshotList', + 'SourceDiskEncryptionKey', + 'SourceInstanceParams', + 'SourceInstanceProperties', + 'SslCertificate', + 'SslCertificateAggregatedList', + 'SslCertificateList', + 'SslCertificateManagedSslCertificate', + 'SslCertificateSelfManagedSslCertificate', + 'SslCertificatesScopedList', + 'SSLHealthCheck', + 'SslPoliciesAggregatedList', + 'SslPoliciesList', + 'SslPoliciesListAvailableFeaturesResponse', + 'SslPoliciesScopedList', + 'SslPolicy', + 'SslPolicyReference', + 'StartAsyncReplicationDiskRequest', + 'StartAsyncReplicationRegionDiskRequest', + 'StartInstanceRequest', + 'StartWithEncryptionKeyInstanceRequest', + 'StatefulPolicy', + 'StatefulPolicyPreservedState', + 'StatefulPolicyPreservedStateDiskDevice', + 'StopAsyncReplicationDiskRequest', + 'StopAsyncReplicationRegionDiskRequest', + 'StopGroupAsyncReplicationDiskRequest', + 'StopGroupAsyncReplicationRegionDiskRequest', + 'StopInstanceRequest', + 'Subnetwork', + 'SubnetworkAggregatedList', + 'SubnetworkList', + 'SubnetworkLogConfig', + 'SubnetworkSecondaryRange', + 'SubnetworksExpandIpCidrRangeRequest', + 'SubnetworksScopedList', + 'SubnetworksSetPrivateIpGoogleAccessRequest', + 'Subsetting', + 'SuspendInstanceRequest', + 'SwitchToCustomModeNetworkRequest', + 'Tags', + 'TargetGrpcProxy', + 'TargetGrpcProxyList', + 'TargetHttpProxiesScopedList', + 'TargetHttpProxy', + 'TargetHttpProxyAggregatedList', + 'TargetHttpProxyList', + 'TargetHttpsProxiesScopedList', + 'TargetHttpsProxiesSetCertificateMapRequest', + 'TargetHttpsProxiesSetQuicOverrideRequest', + 'TargetHttpsProxiesSetSslCertificatesRequest', + 'TargetHttpsProxy', + 'TargetHttpsProxyAggregatedList', + 'TargetHttpsProxyList', + 'TargetInstance', + 'TargetInstanceAggregatedList', + 'TargetInstanceList', + 'TargetInstancesScopedList', + 'TargetPool', + 'TargetPoolAggregatedList', + 'TargetPoolInstanceHealth', + 'TargetPoolList', + 'TargetPoolsAddHealthCheckRequest', + 'TargetPoolsAddInstanceRequest', + 'TargetPoolsRemoveHealthCheckRequest', + 'TargetPoolsRemoveInstanceRequest', + 'TargetPoolsScopedList', + 'TargetReference', + 'TargetSslProxiesSetBackendServiceRequest', + 'TargetSslProxiesSetCertificateMapRequest', + 'TargetSslProxiesSetProxyHeaderRequest', + 'TargetSslProxiesSetSslCertificatesRequest', + 'TargetSslProxy', + 'TargetSslProxyList', + 'TargetTcpProxiesScopedList', + 'TargetTcpProxiesSetBackendServiceRequest', + 'TargetTcpProxiesSetProxyHeaderRequest', + 'TargetTcpProxy', + 'TargetTcpProxyAggregatedList', + 'TargetTcpProxyList', + 'TargetVpnGateway', + 'TargetVpnGatewayAggregatedList', + 'TargetVpnGatewayList', + 'TargetVpnGatewaysScopedList', + 'TCPHealthCheck', + 'TestFailure', + 'TestIamPermissionsDiskRequest', + 'TestIamPermissionsExternalVpnGatewayRequest', + 'TestIamPermissionsFirewallPolicyRequest', + 'TestIamPermissionsImageRequest', + 'TestIamPermissionsInstanceRequest', + 'TestIamPermissionsInstanceTemplateRequest', + 'TestIamPermissionsLicenseCodeRequest', + 'TestIamPermissionsLicenseRequest', + 'TestIamPermissionsMachineImageRequest', + 'TestIamPermissionsNetworkAttachmentRequest', + 'TestIamPermissionsNetworkEndpointGroupRequest', + 'TestIamPermissionsNetworkFirewallPolicyRequest', + 'TestIamPermissionsNodeGroupRequest', + 'TestIamPermissionsNodeTemplateRequest', + 'TestIamPermissionsPacketMirroringRequest', + 'TestIamPermissionsRegionDiskRequest', + 'TestIamPermissionsRegionNetworkFirewallPolicyRequest', + 'TestIamPermissionsReservationRequest', + 'TestIamPermissionsResourcePolicyRequest', + 'TestIamPermissionsServiceAttachmentRequest', + 'TestIamPermissionsSnapshotRequest', + 'TestIamPermissionsSubnetworkRequest', + 'TestIamPermissionsVpnGatewayRequest', + 'TestPermissionsRequest', + 'TestPermissionsResponse', + 'Uint128', + 'UpdateAccessConfigInstanceRequest', + 'UpdateAutoscalerRequest', + 'UpdateBackendBucketRequest', + 'UpdateBackendServiceRequest', + 'UpdateDiskRequest', + 'UpdateDisplayDeviceInstanceRequest', + 'UpdateFirewallRequest', + 'UpdateHealthCheckRequest', + 'UpdateInstanceRequest', + 'UpdateNetworkInterfaceInstanceRequest', + 'UpdatePeeringNetworkRequest', + 'UpdatePerInstanceConfigsInstanceGroupManagerRequest', + 'UpdatePerInstanceConfigsRegionInstanceGroupManagerRequest', + 'UpdateRegionAutoscalerRequest', + 'UpdateRegionBackendServiceRequest', + 'UpdateRegionCommitmentRequest', + 'UpdateRegionDiskRequest', + 'UpdateRegionHealthCheckRequest', + 'UpdateRegionUrlMapRequest', + 'UpdateReservationRequest', + 'UpdateRouterRequest', + 'UpdateShieldedInstanceConfigInstanceRequest', + 'UpdateUrlMapRequest', + 'UrlMap', + 'UrlMapList', + 'UrlMapReference', + 'UrlMapsAggregatedList', + 'UrlMapsScopedList', + 'UrlMapsValidateRequest', + 'UrlMapsValidateResponse', + 'UrlMapTest', + 'UrlMapTestHeader', + 'UrlMapValidationResult', + 'UrlRewrite', + 'UsableSubnetwork', + 'UsableSubnetworksAggregatedList', + 'UsableSubnetworkSecondaryRange', + 'UsageExportLocation', + 'ValidateRegionUrlMapRequest', + 'ValidateUrlMapRequest', + 'VmEndpointNatMappings', + 'VmEndpointNatMappingsInterfaceNatMappings', + 'VmEndpointNatMappingsInterfaceNatMappingsNatRuleMappings', + 'VmEndpointNatMappingsList', + 'VpnGateway', + 'VpnGatewayAggregatedList', + 'VpnGatewayList', + 'VpnGatewaysGetStatusResponse', + 'VpnGatewaysScopedList', + 'VpnGatewayStatus', + 'VpnGatewayStatusHighAvailabilityRequirementState', + 'VpnGatewayStatusTunnel', + 'VpnGatewayStatusVpnConnection', + 'VpnGatewayVpnGatewayInterface', + 'VpnTunnel', + 'VpnTunnelAggregatedList', + 'VpnTunnelList', + 'VpnTunnelsScopedList', + 'WafExpressionSet', + 'WafExpressionSetExpression', + 'WaitGlobalOperationRequest', + 'WaitRegionOperationRequest', + 'WaitZoneOperationRequest', + 'Warning', + 'Warnings', + 'WeightedBackendService', + 'XpnHostList', + 'XpnResourceId', + 'Zone', + 'ZoneList', + 'ZoneSetLabelsRequest', + 'ZoneSetPolicyRequest', +) diff --git a/owl-bot-staging/v1/google/cloud/compute_v1/types/compute.py b/owl-bot-staging/v1/google/cloud/compute_v1/types/compute.py new file mode 100644 index 000000000..ea027d88c --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/compute_v1/types/compute.py @@ -0,0 +1,99653 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +import proto # type: ignore + + +__protobuf__ = proto.module( + package='google.cloud.compute.v1', + manifest={ + 'AbandonInstancesInstanceGroupManagerRequest', + 'AbandonInstancesRegionInstanceGroupManagerRequest', + 'AcceleratorConfig', + 'AcceleratorType', + 'AcceleratorTypeAggregatedList', + 'AcceleratorTypeList', + 'AcceleratorTypesScopedList', + 'Accelerators', + 'AccessConfig', + 'AddAccessConfigInstanceRequest', + 'AddAssociationFirewallPolicyRequest', + 'AddAssociationNetworkFirewallPolicyRequest', + 'AddAssociationRegionNetworkFirewallPolicyRequest', + 'AddHealthCheckTargetPoolRequest', + 'AddInstanceTargetPoolRequest', + 'AddInstancesInstanceGroupRequest', + 'AddNodesNodeGroupRequest', + 'AddPeeringNetworkRequest', + 'AddResourcePoliciesDiskRequest', + 'AddResourcePoliciesInstanceRequest', + 'AddResourcePoliciesRegionDiskRequest', + 'AddRuleFirewallPolicyRequest', + 'AddRuleNetworkFirewallPolicyRequest', + 'AddRuleRegionNetworkFirewallPolicyRequest', + 'AddRuleSecurityPolicyRequest', + 'AddSignedUrlKeyBackendBucketRequest', + 'AddSignedUrlKeyBackendServiceRequest', + 'Address', + 'AddressAggregatedList', + 'AddressList', + 'AddressesScopedList', + 'AdvancedMachineFeatures', + 'AggregatedListAcceleratorTypesRequest', + 'AggregatedListAddressesRequest', + 'AggregatedListAutoscalersRequest', + 'AggregatedListBackendServicesRequest', + 'AggregatedListDiskTypesRequest', + 'AggregatedListDisksRequest', + 'AggregatedListForwardingRulesRequest', + 'AggregatedListGlobalOperationsRequest', + 'AggregatedListHealthChecksRequest', + 'AggregatedListInstanceGroupManagersRequest', + 'AggregatedListInstanceGroupsRequest', + 'AggregatedListInstanceTemplatesRequest', + 'AggregatedListInstancesRequest', + 'AggregatedListInterconnectAttachmentsRequest', + 'AggregatedListMachineTypesRequest', + 'AggregatedListNetworkAttachmentsRequest', + 'AggregatedListNetworkEdgeSecurityServicesRequest', + 'AggregatedListNetworkEndpointGroupsRequest', + 'AggregatedListNodeGroupsRequest', + 'AggregatedListNodeTemplatesRequest', + 'AggregatedListNodeTypesRequest', + 'AggregatedListPacketMirroringsRequest', + 'AggregatedListPublicDelegatedPrefixesRequest', + 'AggregatedListRegionCommitmentsRequest', + 'AggregatedListReservationsRequest', + 'AggregatedListResourcePoliciesRequest', + 'AggregatedListRoutersRequest', + 'AggregatedListSecurityPoliciesRequest', + 'AggregatedListServiceAttachmentsRequest', + 'AggregatedListSslCertificatesRequest', + 'AggregatedListSslPoliciesRequest', + 'AggregatedListSubnetworksRequest', + 'AggregatedListTargetHttpProxiesRequest', + 'AggregatedListTargetHttpsProxiesRequest', + 'AggregatedListTargetInstancesRequest', + 'AggregatedListTargetPoolsRequest', + 'AggregatedListTargetTcpProxiesRequest', + 'AggregatedListTargetVpnGatewaysRequest', + 'AggregatedListUrlMapsRequest', + 'AggregatedListVpnGatewaysRequest', + 'AggregatedListVpnTunnelsRequest', + 'AliasIpRange', + 'AllocationResourceStatus', + 'AllocationResourceStatusSpecificSKUAllocation', + 'AllocationSpecificSKUAllocationAllocatedInstancePropertiesReservedDisk', + 'AllocationSpecificSKUAllocationReservedInstanceProperties', + 'AllocationSpecificSKUReservation', + 'Allowed', + 'ApplyUpdatesToInstancesInstanceGroupManagerRequest', + 'ApplyUpdatesToInstancesRegionInstanceGroupManagerRequest', + 'AttachDiskInstanceRequest', + 'AttachNetworkEndpointsGlobalNetworkEndpointGroupRequest', + 'AttachNetworkEndpointsNetworkEndpointGroupRequest', + 'AttachedDisk', + 'AttachedDiskInitializeParams', + 'AuditConfig', + 'AuditLogConfig', + 'AuthorizationLoggingOptions', + 'Autoscaler', + 'AutoscalerAggregatedList', + 'AutoscalerList', + 'AutoscalerStatusDetails', + 'AutoscalersScopedList', + 'AutoscalingPolicy', + 'AutoscalingPolicyCpuUtilization', + 'AutoscalingPolicyCustomMetricUtilization', + 'AutoscalingPolicyLoadBalancingUtilization', + 'AutoscalingPolicyScaleInControl', + 'AutoscalingPolicyScalingSchedule', + 'Backend', + 'BackendBucket', + 'BackendBucketCdnPolicy', + 'BackendBucketCdnPolicyBypassCacheOnRequestHeader', + 'BackendBucketCdnPolicyCacheKeyPolicy', + 'BackendBucketCdnPolicyNegativeCachingPolicy', + 'BackendBucketList', + 'BackendService', + 'BackendServiceAggregatedList', + 'BackendServiceCdnPolicy', + 'BackendServiceCdnPolicyBypassCacheOnRequestHeader', + 'BackendServiceCdnPolicyNegativeCachingPolicy', + 'BackendServiceConnectionTrackingPolicy', + 'BackendServiceFailoverPolicy', + 'BackendServiceGroupHealth', + 'BackendServiceIAP', + 'BackendServiceList', + 'BackendServiceLocalityLoadBalancingPolicyConfig', + 'BackendServiceLocalityLoadBalancingPolicyConfigCustomPolicy', + 'BackendServiceLocalityLoadBalancingPolicyConfigPolicy', + 'BackendServiceLogConfig', + 'BackendServiceReference', + 'BackendServicesScopedList', + 'BfdPacket', + 'BfdStatus', + 'BfdStatusPacketCounts', + 'Binding', + 'BulkInsertDiskRequest', + 'BulkInsertDiskResource', + 'BulkInsertInstanceRequest', + 'BulkInsertInstanceResource', + 'BulkInsertInstanceResourcePerInstanceProperties', + 'BulkInsertRegionDiskRequest', + 'BulkInsertRegionInstanceRequest', + 'CacheInvalidationRule', + 'CacheKeyPolicy', + 'CircuitBreakers', + 'CloneRulesFirewallPolicyRequest', + 'CloneRulesNetworkFirewallPolicyRequest', + 'CloneRulesRegionNetworkFirewallPolicyRequest', + 'Commitment', + 'CommitmentAggregatedList', + 'CommitmentList', + 'CommitmentsScopedList', + 'Condition', + 'ConfidentialInstanceConfig', + 'ConnectionDraining', + 'ConsistentHashLoadBalancerSettings', + 'ConsistentHashLoadBalancerSettingsHttpCookie', + 'CorsPolicy', + 'CreateInstancesInstanceGroupManagerRequest', + 'CreateInstancesRegionInstanceGroupManagerRequest', + 'CreateSnapshotDiskRequest', + 'CreateSnapshotRegionDiskRequest', + 'CustomerEncryptionKey', + 'CustomerEncryptionKeyProtectedDisk', + 'Data', + 'DeleteAccessConfigInstanceRequest', + 'DeleteAddressRequest', + 'DeleteAutoscalerRequest', + 'DeleteBackendBucketRequest', + 'DeleteBackendServiceRequest', + 'DeleteDiskRequest', + 'DeleteExternalVpnGatewayRequest', + 'DeleteFirewallPolicyRequest', + 'DeleteFirewallRequest', + 'DeleteForwardingRuleRequest', + 'DeleteGlobalAddressRequest', + 'DeleteGlobalForwardingRuleRequest', + 'DeleteGlobalNetworkEndpointGroupRequest', + 'DeleteGlobalOperationRequest', + 'DeleteGlobalOperationResponse', + 'DeleteGlobalOrganizationOperationRequest', + 'DeleteGlobalOrganizationOperationResponse', + 'DeleteGlobalPublicDelegatedPrefixeRequest', + 'DeleteHealthCheckRequest', + 'DeleteImageRequest', + 'DeleteInstanceGroupManagerRequest', + 'DeleteInstanceGroupRequest', + 'DeleteInstanceRequest', + 'DeleteInstanceTemplateRequest', + 'DeleteInstancesInstanceGroupManagerRequest', + 'DeleteInstancesRegionInstanceGroupManagerRequest', + 'DeleteInterconnectAttachmentRequest', + 'DeleteInterconnectRequest', + 'DeleteLicenseRequest', + 'DeleteMachineImageRequest', + 'DeleteNetworkAttachmentRequest', + 'DeleteNetworkEdgeSecurityServiceRequest', + 'DeleteNetworkEndpointGroupRequest', + 'DeleteNetworkFirewallPolicyRequest', + 'DeleteNetworkRequest', + 'DeleteNodeGroupRequest', + 'DeleteNodeTemplateRequest', + 'DeleteNodesNodeGroupRequest', + 'DeletePacketMirroringRequest', + 'DeletePerInstanceConfigsInstanceGroupManagerRequest', + 'DeletePerInstanceConfigsRegionInstanceGroupManagerRequest', + 'DeletePublicAdvertisedPrefixeRequest', + 'DeletePublicDelegatedPrefixeRequest', + 'DeleteRegionAutoscalerRequest', + 'DeleteRegionBackendServiceRequest', + 'DeleteRegionDiskRequest', + 'DeleteRegionHealthCheckRequest', + 'DeleteRegionHealthCheckServiceRequest', + 'DeleteRegionInstanceGroupManagerRequest', + 'DeleteRegionInstanceTemplateRequest', + 'DeleteRegionNetworkEndpointGroupRequest', + 'DeleteRegionNetworkFirewallPolicyRequest', + 'DeleteRegionNotificationEndpointRequest', + 'DeleteRegionOperationRequest', + 'DeleteRegionOperationResponse', + 'DeleteRegionSecurityPolicyRequest', + 'DeleteRegionSslCertificateRequest', + 'DeleteRegionSslPolicyRequest', + 'DeleteRegionTargetHttpProxyRequest', + 'DeleteRegionTargetHttpsProxyRequest', + 'DeleteRegionTargetTcpProxyRequest', + 'DeleteRegionUrlMapRequest', + 'DeleteReservationRequest', + 'DeleteResourcePolicyRequest', + 'DeleteRouteRequest', + 'DeleteRouterRequest', + 'DeleteSecurityPolicyRequest', + 'DeleteServiceAttachmentRequest', + 'DeleteSignedUrlKeyBackendBucketRequest', + 'DeleteSignedUrlKeyBackendServiceRequest', + 'DeleteSnapshotRequest', + 'DeleteSslCertificateRequest', + 'DeleteSslPolicyRequest', + 'DeleteSubnetworkRequest', + 'DeleteTargetGrpcProxyRequest', + 'DeleteTargetHttpProxyRequest', + 'DeleteTargetHttpsProxyRequest', + 'DeleteTargetInstanceRequest', + 'DeleteTargetPoolRequest', + 'DeleteTargetSslProxyRequest', + 'DeleteTargetTcpProxyRequest', + 'DeleteTargetVpnGatewayRequest', + 'DeleteUrlMapRequest', + 'DeleteVpnGatewayRequest', + 'DeleteVpnTunnelRequest', + 'DeleteZoneOperationRequest', + 'DeleteZoneOperationResponse', + 'Denied', + 'DeprecateImageRequest', + 'DeprecationStatus', + 'DetachDiskInstanceRequest', + 'DetachNetworkEndpointsGlobalNetworkEndpointGroupRequest', + 'DetachNetworkEndpointsNetworkEndpointGroupRequest', + 'DisableXpnHostProjectRequest', + 'DisableXpnResourceProjectRequest', + 'Disk', + 'DiskAggregatedList', + 'DiskAsyncReplication', + 'DiskAsyncReplicationList', + 'DiskInstantiationConfig', + 'DiskList', + 'DiskMoveRequest', + 'DiskParams', + 'DiskResourceStatus', + 'DiskResourceStatusAsyncReplicationStatus', + 'DiskType', + 'DiskTypeAggregatedList', + 'DiskTypeList', + 'DiskTypesScopedList', + 'DisksAddResourcePoliciesRequest', + 'DisksRemoveResourcePoliciesRequest', + 'DisksResizeRequest', + 'DisksScopedList', + 'DisksStartAsyncReplicationRequest', + 'DisksStopGroupAsyncReplicationResource', + 'DisplayDevice', + 'DistributionPolicy', + 'DistributionPolicyZoneConfiguration', + 'Duration', + 'EnableXpnHostProjectRequest', + 'EnableXpnResourceProjectRequest', + 'Error', + 'ErrorDetails', + 'ErrorInfo', + 'Errors', + 'ExchangedPeeringRoute', + 'ExchangedPeeringRoutesList', + 'ExpandIpCidrRangeSubnetworkRequest', + 'Expr', + 'ExternalVpnGateway', + 'ExternalVpnGatewayInterface', + 'ExternalVpnGatewayList', + 'FileContentBuffer', + 'Firewall', + 'FirewallList', + 'FirewallLogConfig', + 'FirewallPoliciesListAssociationsResponse', + 'FirewallPolicy', + 'FirewallPolicyAssociation', + 'FirewallPolicyList', + 'FirewallPolicyRule', + 'FirewallPolicyRuleMatcher', + 'FirewallPolicyRuleMatcherLayer4Config', + 'FirewallPolicyRuleSecureTag', + 'FixedOrPercent', + 'ForwardingRule', + 'ForwardingRuleAggregatedList', + 'ForwardingRuleList', + 'ForwardingRuleReference', + 'ForwardingRuleServiceDirectoryRegistration', + 'ForwardingRulesScopedList', + 'GRPCHealthCheck', + 'GetAcceleratorTypeRequest', + 'GetAddressRequest', + 'GetAssociationFirewallPolicyRequest', + 'GetAssociationNetworkFirewallPolicyRequest', + 'GetAssociationRegionNetworkFirewallPolicyRequest', + 'GetAutoscalerRequest', + 'GetBackendBucketRequest', + 'GetBackendServiceRequest', + 'GetDiagnosticsInterconnectRequest', + 'GetDiskRequest', + 'GetDiskTypeRequest', + 'GetEffectiveFirewallsInstanceRequest', + 'GetEffectiveFirewallsNetworkRequest', + 'GetEffectiveFirewallsRegionNetworkFirewallPolicyRequest', + 'GetExternalVpnGatewayRequest', + 'GetFirewallPolicyRequest', + 'GetFirewallRequest', + 'GetForwardingRuleRequest', + 'GetFromFamilyImageRequest', + 'GetGlobalAddressRequest', + 'GetGlobalForwardingRuleRequest', + 'GetGlobalNetworkEndpointGroupRequest', + 'GetGlobalOperationRequest', + 'GetGlobalOrganizationOperationRequest', + 'GetGlobalPublicDelegatedPrefixeRequest', + 'GetGuestAttributesInstanceRequest', + 'GetHealthBackendServiceRequest', + 'GetHealthCheckRequest', + 'GetHealthRegionBackendServiceRequest', + 'GetHealthTargetPoolRequest', + 'GetIamPolicyBackendServiceRequest', + 'GetIamPolicyDiskRequest', + 'GetIamPolicyFirewallPolicyRequest', + 'GetIamPolicyImageRequest', + 'GetIamPolicyInstanceRequest', + 'GetIamPolicyInstanceTemplateRequest', + 'GetIamPolicyLicenseRequest', + 'GetIamPolicyMachineImageRequest', + 'GetIamPolicyNetworkAttachmentRequest', + 'GetIamPolicyNetworkFirewallPolicyRequest', + 'GetIamPolicyNodeGroupRequest', + 'GetIamPolicyNodeTemplateRequest', + 'GetIamPolicyRegionBackendServiceRequest', + 'GetIamPolicyRegionDiskRequest', + 'GetIamPolicyRegionNetworkFirewallPolicyRequest', + 'GetIamPolicyReservationRequest', + 'GetIamPolicyResourcePolicyRequest', + 'GetIamPolicyServiceAttachmentRequest', + 'GetIamPolicySnapshotRequest', + 'GetIamPolicySubnetworkRequest', + 'GetImageFamilyViewRequest', + 'GetImageRequest', + 'GetInstanceGroupManagerRequest', + 'GetInstanceGroupRequest', + 'GetInstanceRequest', + 'GetInstanceTemplateRequest', + 'GetInterconnectAttachmentRequest', + 'GetInterconnectLocationRequest', + 'GetInterconnectRemoteLocationRequest', + 'GetInterconnectRequest', + 'GetLicenseCodeRequest', + 'GetLicenseRequest', + 'GetMachineImageRequest', + 'GetMachineTypeRequest', + 'GetNatMappingInfoRoutersRequest', + 'GetNetworkAttachmentRequest', + 'GetNetworkEdgeSecurityServiceRequest', + 'GetNetworkEndpointGroupRequest', + 'GetNetworkFirewallPolicyRequest', + 'GetNetworkRequest', + 'GetNodeGroupRequest', + 'GetNodeTemplateRequest', + 'GetNodeTypeRequest', + 'GetPacketMirroringRequest', + 'GetProjectRequest', + 'GetPublicAdvertisedPrefixeRequest', + 'GetPublicDelegatedPrefixeRequest', + 'GetRegionAutoscalerRequest', + 'GetRegionBackendServiceRequest', + 'GetRegionCommitmentRequest', + 'GetRegionDiskRequest', + 'GetRegionDiskTypeRequest', + 'GetRegionHealthCheckRequest', + 'GetRegionHealthCheckServiceRequest', + 'GetRegionInstanceGroupManagerRequest', + 'GetRegionInstanceGroupRequest', + 'GetRegionInstanceTemplateRequest', + 'GetRegionNetworkEndpointGroupRequest', + 'GetRegionNetworkFirewallPolicyRequest', + 'GetRegionNotificationEndpointRequest', + 'GetRegionOperationRequest', + 'GetRegionRequest', + 'GetRegionSecurityPolicyRequest', + 'GetRegionSslCertificateRequest', + 'GetRegionSslPolicyRequest', + 'GetRegionTargetHttpProxyRequest', + 'GetRegionTargetHttpsProxyRequest', + 'GetRegionTargetTcpProxyRequest', + 'GetRegionUrlMapRequest', + 'GetReservationRequest', + 'GetResourcePolicyRequest', + 'GetRouteRequest', + 'GetRouterRequest', + 'GetRouterStatusRouterRequest', + 'GetRuleFirewallPolicyRequest', + 'GetRuleNetworkFirewallPolicyRequest', + 'GetRuleRegionNetworkFirewallPolicyRequest', + 'GetRuleSecurityPolicyRequest', + 'GetScreenshotInstanceRequest', + 'GetSecurityPolicyRequest', + 'GetSerialPortOutputInstanceRequest', + 'GetServiceAttachmentRequest', + 'GetShieldedInstanceIdentityInstanceRequest', + 'GetSnapshotRequest', + 'GetSslCertificateRequest', + 'GetSslPolicyRequest', + 'GetStatusVpnGatewayRequest', + 'GetSubnetworkRequest', + 'GetTargetGrpcProxyRequest', + 'GetTargetHttpProxyRequest', + 'GetTargetHttpsProxyRequest', + 'GetTargetInstanceRequest', + 'GetTargetPoolRequest', + 'GetTargetSslProxyRequest', + 'GetTargetTcpProxyRequest', + 'GetTargetVpnGatewayRequest', + 'GetUrlMapRequest', + 'GetVpnGatewayRequest', + 'GetVpnTunnelRequest', + 'GetXpnHostProjectRequest', + 'GetXpnResourcesProjectsRequest', + 'GetZoneOperationRequest', + 'GetZoneRequest', + 'GlobalAddressesMoveRequest', + 'GlobalNetworkEndpointGroupsAttachEndpointsRequest', + 'GlobalNetworkEndpointGroupsDetachEndpointsRequest', + 'GlobalOrganizationSetPolicyRequest', + 'GlobalSetLabelsRequest', + 'GlobalSetPolicyRequest', + 'GuestAttributes', + 'GuestAttributesEntry', + 'GuestAttributesValue', + 'GuestOsFeature', + 'HTTP2HealthCheck', + 'HTTPHealthCheck', + 'HTTPSHealthCheck', + 'HealthCheck', + 'HealthCheckList', + 'HealthCheckLogConfig', + 'HealthCheckReference', + 'HealthCheckService', + 'HealthCheckServiceReference', + 'HealthCheckServicesList', + 'HealthChecksAggregatedList', + 'HealthChecksScopedList', + 'HealthStatus', + 'HealthStatusForNetworkEndpoint', + 'Help', + 'HelpLink', + 'HostRule', + 'HttpFaultAbort', + 'HttpFaultDelay', + 'HttpFaultInjection', + 'HttpHeaderAction', + 'HttpHeaderMatch', + 'HttpHeaderOption', + 'HttpQueryParameterMatch', + 'HttpRedirectAction', + 'HttpRetryPolicy', + 'HttpRouteAction', + 'HttpRouteRule', + 'HttpRouteRuleMatch', + 'Image', + 'ImageFamilyView', + 'ImageList', + 'InitialStateConfig', + 'InsertAddressRequest', + 'InsertAutoscalerRequest', + 'InsertBackendBucketRequest', + 'InsertBackendServiceRequest', + 'InsertDiskRequest', + 'InsertExternalVpnGatewayRequest', + 'InsertFirewallPolicyRequest', + 'InsertFirewallRequest', + 'InsertForwardingRuleRequest', + 'InsertGlobalAddressRequest', + 'InsertGlobalForwardingRuleRequest', + 'InsertGlobalNetworkEndpointGroupRequest', + 'InsertGlobalPublicDelegatedPrefixeRequest', + 'InsertHealthCheckRequest', + 'InsertImageRequest', + 'InsertInstanceGroupManagerRequest', + 'InsertInstanceGroupRequest', + 'InsertInstanceRequest', + 'InsertInstanceTemplateRequest', + 'InsertInterconnectAttachmentRequest', + 'InsertInterconnectRequest', + 'InsertLicenseRequest', + 'InsertMachineImageRequest', + 'InsertNetworkAttachmentRequest', + 'InsertNetworkEdgeSecurityServiceRequest', + 'InsertNetworkEndpointGroupRequest', + 'InsertNetworkFirewallPolicyRequest', + 'InsertNetworkRequest', + 'InsertNodeGroupRequest', + 'InsertNodeTemplateRequest', + 'InsertPacketMirroringRequest', + 'InsertPublicAdvertisedPrefixeRequest', + 'InsertPublicDelegatedPrefixeRequest', + 'InsertRegionAutoscalerRequest', + 'InsertRegionBackendServiceRequest', + 'InsertRegionCommitmentRequest', + 'InsertRegionDiskRequest', + 'InsertRegionHealthCheckRequest', + 'InsertRegionHealthCheckServiceRequest', + 'InsertRegionInstanceGroupManagerRequest', + 'InsertRegionInstanceTemplateRequest', + 'InsertRegionNetworkEndpointGroupRequest', + 'InsertRegionNetworkFirewallPolicyRequest', + 'InsertRegionNotificationEndpointRequest', + 'InsertRegionSecurityPolicyRequest', + 'InsertRegionSslCertificateRequest', + 'InsertRegionSslPolicyRequest', + 'InsertRegionTargetHttpProxyRequest', + 'InsertRegionTargetHttpsProxyRequest', + 'InsertRegionTargetTcpProxyRequest', + 'InsertRegionUrlMapRequest', + 'InsertReservationRequest', + 'InsertResourcePolicyRequest', + 'InsertRouteRequest', + 'InsertRouterRequest', + 'InsertSecurityPolicyRequest', + 'InsertServiceAttachmentRequest', + 'InsertSnapshotRequest', + 'InsertSslCertificateRequest', + 'InsertSslPolicyRequest', + 'InsertSubnetworkRequest', + 'InsertTargetGrpcProxyRequest', + 'InsertTargetHttpProxyRequest', + 'InsertTargetHttpsProxyRequest', + 'InsertTargetInstanceRequest', + 'InsertTargetPoolRequest', + 'InsertTargetSslProxyRequest', + 'InsertTargetTcpProxyRequest', + 'InsertTargetVpnGatewayRequest', + 'InsertUrlMapRequest', + 'InsertVpnGatewayRequest', + 'InsertVpnTunnelRequest', + 'Instance', + 'InstanceAggregatedList', + 'InstanceConsumptionData', + 'InstanceConsumptionInfo', + 'InstanceGroup', + 'InstanceGroupAggregatedList', + 'InstanceGroupList', + 'InstanceGroupManager', + 'InstanceGroupManagerActionsSummary', + 'InstanceGroupManagerAggregatedList', + 'InstanceGroupManagerAutoHealingPolicy', + 'InstanceGroupManagerInstanceLifecyclePolicy', + 'InstanceGroupManagerList', + 'InstanceGroupManagerStatus', + 'InstanceGroupManagerStatusStateful', + 'InstanceGroupManagerStatusStatefulPerInstanceConfigs', + 'InstanceGroupManagerStatusVersionTarget', + 'InstanceGroupManagerUpdatePolicy', + 'InstanceGroupManagerVersion', + 'InstanceGroupManagersAbandonInstancesRequest', + 'InstanceGroupManagersApplyUpdatesRequest', + 'InstanceGroupManagersCreateInstancesRequest', + 'InstanceGroupManagersDeleteInstancesRequest', + 'InstanceGroupManagersDeletePerInstanceConfigsReq', + 'InstanceGroupManagersListErrorsResponse', + 'InstanceGroupManagersListManagedInstancesResponse', + 'InstanceGroupManagersListPerInstanceConfigsResp', + 'InstanceGroupManagersPatchPerInstanceConfigsReq', + 'InstanceGroupManagersRecreateInstancesRequest', + 'InstanceGroupManagersScopedList', + 'InstanceGroupManagersSetInstanceTemplateRequest', + 'InstanceGroupManagersSetTargetPoolsRequest', + 'InstanceGroupManagersUpdatePerInstanceConfigsReq', + 'InstanceGroupsAddInstancesRequest', + 'InstanceGroupsListInstances', + 'InstanceGroupsListInstancesRequest', + 'InstanceGroupsRemoveInstancesRequest', + 'InstanceGroupsScopedList', + 'InstanceGroupsSetNamedPortsRequest', + 'InstanceList', + 'InstanceListReferrers', + 'InstanceManagedByIgmError', + 'InstanceManagedByIgmErrorInstanceActionDetails', + 'InstanceManagedByIgmErrorManagedInstanceError', + 'InstanceMoveRequest', + 'InstanceParams', + 'InstanceProperties', + 'InstanceReference', + 'InstanceTemplate', + 'InstanceTemplateAggregatedList', + 'InstanceTemplateList', + 'InstanceTemplatesScopedList', + 'InstanceWithNamedPorts', + 'InstancesAddResourcePoliciesRequest', + 'InstancesGetEffectiveFirewallsResponse', + 'InstancesGetEffectiveFirewallsResponseEffectiveFirewallPolicy', + 'InstancesRemoveResourcePoliciesRequest', + 'InstancesScopedList', + 'InstancesSetLabelsRequest', + 'InstancesSetMachineResourcesRequest', + 'InstancesSetMachineTypeRequest', + 'InstancesSetMinCpuPlatformRequest', + 'InstancesSetNameRequest', + 'InstancesSetServiceAccountRequest', + 'InstancesStartWithEncryptionKeyRequest', + 'Int64RangeMatch', + 'Interconnect', + 'InterconnectAttachment', + 'InterconnectAttachmentAggregatedList', + 'InterconnectAttachmentConfigurationConstraints', + 'InterconnectAttachmentConfigurationConstraintsBgpPeerASNRange', + 'InterconnectAttachmentList', + 'InterconnectAttachmentPartnerMetadata', + 'InterconnectAttachmentPrivateInfo', + 'InterconnectAttachmentsScopedList', + 'InterconnectCircuitInfo', + 'InterconnectDiagnostics', + 'InterconnectDiagnosticsARPEntry', + 'InterconnectDiagnosticsLinkLACPStatus', + 'InterconnectDiagnosticsLinkOpticalPower', + 'InterconnectDiagnosticsLinkStatus', + 'InterconnectList', + 'InterconnectLocation', + 'InterconnectLocationList', + 'InterconnectLocationRegionInfo', + 'InterconnectOutageNotification', + 'InterconnectRemoteLocation', + 'InterconnectRemoteLocationConstraints', + 'InterconnectRemoteLocationConstraintsSubnetLengthRange', + 'InterconnectRemoteLocationList', + 'InterconnectRemoteLocationPermittedConnections', + 'InterconnectsGetDiagnosticsResponse', + 'InvalidateCacheUrlMapRequest', + 'Items', + 'License', + 'LicenseCode', + 'LicenseCodeLicenseAlias', + 'LicenseResourceCommitment', + 'LicenseResourceRequirements', + 'LicensesListResponse', + 'ListAcceleratorTypesRequest', + 'ListAddressesRequest', + 'ListAssociationsFirewallPolicyRequest', + 'ListAutoscalersRequest', + 'ListAvailableFeaturesRegionSslPoliciesRequest', + 'ListAvailableFeaturesSslPoliciesRequest', + 'ListBackendBucketsRequest', + 'ListBackendServicesRequest', + 'ListDiskTypesRequest', + 'ListDisksRequest', + 'ListErrorsInstanceGroupManagersRequest', + 'ListErrorsRegionInstanceGroupManagersRequest', + 'ListExternalVpnGatewaysRequest', + 'ListFirewallPoliciesRequest', + 'ListFirewallsRequest', + 'ListForwardingRulesRequest', + 'ListGlobalAddressesRequest', + 'ListGlobalForwardingRulesRequest', + 'ListGlobalNetworkEndpointGroupsRequest', + 'ListGlobalOperationsRequest', + 'ListGlobalOrganizationOperationsRequest', + 'ListGlobalPublicDelegatedPrefixesRequest', + 'ListHealthChecksRequest', + 'ListImagesRequest', + 'ListInstanceGroupManagersRequest', + 'ListInstanceGroupsRequest', + 'ListInstanceTemplatesRequest', + 'ListInstancesInstanceGroupsRequest', + 'ListInstancesRegionInstanceGroupsRequest', + 'ListInstancesRequest', + 'ListInterconnectAttachmentsRequest', + 'ListInterconnectLocationsRequest', + 'ListInterconnectRemoteLocationsRequest', + 'ListInterconnectsRequest', + 'ListLicensesRequest', + 'ListMachineImagesRequest', + 'ListMachineTypesRequest', + 'ListManagedInstancesInstanceGroupManagersRequest', + 'ListManagedInstancesRegionInstanceGroupManagersRequest', + 'ListNetworkAttachmentsRequest', + 'ListNetworkEndpointGroupsRequest', + 'ListNetworkEndpointsGlobalNetworkEndpointGroupsRequest', + 'ListNetworkEndpointsNetworkEndpointGroupsRequest', + 'ListNetworkFirewallPoliciesRequest', + 'ListNetworksRequest', + 'ListNodeGroupsRequest', + 'ListNodeTemplatesRequest', + 'ListNodeTypesRequest', + 'ListNodesNodeGroupsRequest', + 'ListPacketMirroringsRequest', + 'ListPeeringRoutesNetworksRequest', + 'ListPerInstanceConfigsInstanceGroupManagersRequest', + 'ListPerInstanceConfigsRegionInstanceGroupManagersRequest', + 'ListPreconfiguredExpressionSetsSecurityPoliciesRequest', + 'ListPublicAdvertisedPrefixesRequest', + 'ListPublicDelegatedPrefixesRequest', + 'ListReferrersInstancesRequest', + 'ListRegionAutoscalersRequest', + 'ListRegionBackendServicesRequest', + 'ListRegionCommitmentsRequest', + 'ListRegionDiskTypesRequest', + 'ListRegionDisksRequest', + 'ListRegionHealthCheckServicesRequest', + 'ListRegionHealthChecksRequest', + 'ListRegionInstanceGroupManagersRequest', + 'ListRegionInstanceGroupsRequest', + 'ListRegionInstanceTemplatesRequest', + 'ListRegionNetworkEndpointGroupsRequest', + 'ListRegionNetworkFirewallPoliciesRequest', + 'ListRegionNotificationEndpointsRequest', + 'ListRegionOperationsRequest', + 'ListRegionSecurityPoliciesRequest', + 'ListRegionSslCertificatesRequest', + 'ListRegionSslPoliciesRequest', + 'ListRegionTargetHttpProxiesRequest', + 'ListRegionTargetHttpsProxiesRequest', + 'ListRegionTargetTcpProxiesRequest', + 'ListRegionUrlMapsRequest', + 'ListRegionsRequest', + 'ListReservationsRequest', + 'ListResourcePoliciesRequest', + 'ListRoutersRequest', + 'ListRoutesRequest', + 'ListSecurityPoliciesRequest', + 'ListServiceAttachmentsRequest', + 'ListSnapshotsRequest', + 'ListSslCertificatesRequest', + 'ListSslPoliciesRequest', + 'ListSubnetworksRequest', + 'ListTargetGrpcProxiesRequest', + 'ListTargetHttpProxiesRequest', + 'ListTargetHttpsProxiesRequest', + 'ListTargetInstancesRequest', + 'ListTargetPoolsRequest', + 'ListTargetSslProxiesRequest', + 'ListTargetTcpProxiesRequest', + 'ListTargetVpnGatewaysRequest', + 'ListUrlMapsRequest', + 'ListUsableSubnetworksRequest', + 'ListVpnGatewaysRequest', + 'ListVpnTunnelsRequest', + 'ListXpnHostsProjectsRequest', + 'ListZoneOperationsRequest', + 'ListZonesRequest', + 'LocalDisk', + 'LocalizedMessage', + 'LocationPolicy', + 'LocationPolicyLocation', + 'LocationPolicyLocationConstraints', + 'LogConfig', + 'LogConfigCloudAuditOptions', + 'LogConfigCounterOptions', + 'LogConfigCounterOptionsCustomField', + 'LogConfigDataAccessOptions', + 'MachineImage', + 'MachineImageList', + 'MachineType', + 'MachineTypeAggregatedList', + 'MachineTypeList', + 'MachineTypesScopedList', + 'ManagedInstance', + 'ManagedInstanceInstanceHealth', + 'ManagedInstanceLastAttempt', + 'ManagedInstanceVersion', + 'Metadata', + 'MetadataFilter', + 'MetadataFilterLabelMatch', + 'MoveAddressRequest', + 'MoveDiskProjectRequest', + 'MoveFirewallPolicyRequest', + 'MoveGlobalAddressRequest', + 'MoveInstanceProjectRequest', + 'NamedPort', + 'Network', + 'NetworkAttachment', + 'NetworkAttachmentAggregatedList', + 'NetworkAttachmentConnectedEndpoint', + 'NetworkAttachmentList', + 'NetworkAttachmentsScopedList', + 'NetworkEdgeSecurityService', + 'NetworkEdgeSecurityServiceAggregatedList', + 'NetworkEdgeSecurityServicesScopedList', + 'NetworkEndpoint', + 'NetworkEndpointGroup', + 'NetworkEndpointGroupAggregatedList', + 'NetworkEndpointGroupAppEngine', + 'NetworkEndpointGroupCloudFunction', + 'NetworkEndpointGroupCloudRun', + 'NetworkEndpointGroupList', + 'NetworkEndpointGroupPscData', + 'NetworkEndpointGroupsAttachEndpointsRequest', + 'NetworkEndpointGroupsDetachEndpointsRequest', + 'NetworkEndpointGroupsListEndpointsRequest', + 'NetworkEndpointGroupsListNetworkEndpoints', + 'NetworkEndpointGroupsScopedList', + 'NetworkEndpointWithHealthStatus', + 'NetworkInterface', + 'NetworkList', + 'NetworkPeering', + 'NetworkPerformanceConfig', + 'NetworkRoutingConfig', + 'NetworksAddPeeringRequest', + 'NetworksGetEffectiveFirewallsResponse', + 'NetworksGetEffectiveFirewallsResponseEffectiveFirewallPolicy', + 'NetworksRemovePeeringRequest', + 'NetworksUpdatePeeringRequest', + 'NodeGroup', + 'NodeGroupAggregatedList', + 'NodeGroupAutoscalingPolicy', + 'NodeGroupList', + 'NodeGroupMaintenanceWindow', + 'NodeGroupNode', + 'NodeGroupsAddNodesRequest', + 'NodeGroupsDeleteNodesRequest', + 'NodeGroupsListNodes', + 'NodeGroupsScopedList', + 'NodeGroupsSetNodeTemplateRequest', + 'NodeGroupsSimulateMaintenanceEventRequest', + 'NodeTemplate', + 'NodeTemplateAggregatedList', + 'NodeTemplateList', + 'NodeTemplateNodeTypeFlexibility', + 'NodeTemplatesScopedList', + 'NodeType', + 'NodeTypeAggregatedList', + 'NodeTypeList', + 'NodeTypesScopedList', + 'NotificationEndpoint', + 'NotificationEndpointGrpcSettings', + 'NotificationEndpointList', + 'Operation', + 'OperationAggregatedList', + 'OperationList', + 'OperationsScopedList', + 'OutlierDetection', + 'PacketIntervals', + 'PacketMirroring', + 'PacketMirroringAggregatedList', + 'PacketMirroringFilter', + 'PacketMirroringForwardingRuleInfo', + 'PacketMirroringList', + 'PacketMirroringMirroredResourceInfo', + 'PacketMirroringMirroredResourceInfoInstanceInfo', + 'PacketMirroringMirroredResourceInfoSubnetInfo', + 'PacketMirroringNetworkInfo', + 'PacketMirroringsScopedList', + 'PatchAutoscalerRequest', + 'PatchBackendBucketRequest', + 'PatchBackendServiceRequest', + 'PatchFirewallPolicyRequest', + 'PatchFirewallRequest', + 'PatchForwardingRuleRequest', + 'PatchGlobalForwardingRuleRequest', + 'PatchGlobalPublicDelegatedPrefixeRequest', + 'PatchHealthCheckRequest', + 'PatchImageRequest', + 'PatchInstanceGroupManagerRequest', + 'PatchInterconnectAttachmentRequest', + 'PatchInterconnectRequest', + 'PatchNetworkEdgeSecurityServiceRequest', + 'PatchNetworkFirewallPolicyRequest', + 'PatchNetworkRequest', + 'PatchNodeGroupRequest', + 'PatchPacketMirroringRequest', + 'PatchPerInstanceConfigsInstanceGroupManagerRequest', + 'PatchPerInstanceConfigsRegionInstanceGroupManagerRequest', + 'PatchPublicAdvertisedPrefixeRequest', + 'PatchPublicDelegatedPrefixeRequest', + 'PatchRegionAutoscalerRequest', + 'PatchRegionBackendServiceRequest', + 'PatchRegionHealthCheckRequest', + 'PatchRegionHealthCheckServiceRequest', + 'PatchRegionInstanceGroupManagerRequest', + 'PatchRegionNetworkFirewallPolicyRequest', + 'PatchRegionSecurityPolicyRequest', + 'PatchRegionSslPolicyRequest', + 'PatchRegionTargetHttpsProxyRequest', + 'PatchRegionUrlMapRequest', + 'PatchResourcePolicyRequest', + 'PatchRouterRequest', + 'PatchRuleFirewallPolicyRequest', + 'PatchRuleNetworkFirewallPolicyRequest', + 'PatchRuleRegionNetworkFirewallPolicyRequest', + 'PatchRuleSecurityPolicyRequest', + 'PatchSecurityPolicyRequest', + 'PatchServiceAttachmentRequest', + 'PatchSslPolicyRequest', + 'PatchSubnetworkRequest', + 'PatchTargetGrpcProxyRequest', + 'PatchTargetHttpProxyRequest', + 'PatchTargetHttpsProxyRequest', + 'PatchUrlMapRequest', + 'PathMatcher', + 'PathRule', + 'PerInstanceConfig', + 'Policy', + 'PreconfiguredWafSet', + 'PreservedState', + 'PreservedStatePreservedDisk', + 'PreviewRouterRequest', + 'Project', + 'ProjectsDisableXpnResourceRequest', + 'ProjectsEnableXpnResourceRequest', + 'ProjectsGetXpnResources', + 'ProjectsListXpnHostsRequest', + 'ProjectsSetDefaultNetworkTierRequest', + 'PublicAdvertisedPrefix', + 'PublicAdvertisedPrefixList', + 'PublicAdvertisedPrefixPublicDelegatedPrefix', + 'PublicDelegatedPrefix', + 'PublicDelegatedPrefixAggregatedList', + 'PublicDelegatedPrefixList', + 'PublicDelegatedPrefixPublicDelegatedSubPrefix', + 'PublicDelegatedPrefixesScopedList', + 'Quota', + 'QuotaExceededInfo', + 'RawDisk', + 'RecreateInstancesInstanceGroupManagerRequest', + 'RecreateInstancesRegionInstanceGroupManagerRequest', + 'Reference', + 'Region', + 'RegionAddressesMoveRequest', + 'RegionAutoscalerList', + 'RegionDiskTypeList', + 'RegionDisksAddResourcePoliciesRequest', + 'RegionDisksRemoveResourcePoliciesRequest', + 'RegionDisksResizeRequest', + 'RegionDisksStartAsyncReplicationRequest', + 'RegionInstanceGroupList', + 'RegionInstanceGroupManagerDeleteInstanceConfigReq', + 'RegionInstanceGroupManagerList', + 'RegionInstanceGroupManagerPatchInstanceConfigReq', + 'RegionInstanceGroupManagerUpdateInstanceConfigReq', + 'RegionInstanceGroupManagersAbandonInstancesRequest', + 'RegionInstanceGroupManagersApplyUpdatesRequest', + 'RegionInstanceGroupManagersCreateInstancesRequest', + 'RegionInstanceGroupManagersDeleteInstancesRequest', + 'RegionInstanceGroupManagersListErrorsResponse', + 'RegionInstanceGroupManagersListInstanceConfigsResp', + 'RegionInstanceGroupManagersListInstancesResponse', + 'RegionInstanceGroupManagersRecreateRequest', + 'RegionInstanceGroupManagersSetTargetPoolsRequest', + 'RegionInstanceGroupManagersSetTemplateRequest', + 'RegionInstanceGroupsListInstances', + 'RegionInstanceGroupsListInstancesRequest', + 'RegionInstanceGroupsSetNamedPortsRequest', + 'RegionList', + 'RegionNetworkFirewallPoliciesGetEffectiveFirewallsResponse', + 'RegionNetworkFirewallPoliciesGetEffectiveFirewallsResponseEffectiveFirewallPolicy', + 'RegionSetLabelsRequest', + 'RegionSetPolicyRequest', + 'RegionTargetHttpsProxiesSetSslCertificatesRequest', + 'RegionUrlMapsValidateRequest', + 'RemoveAssociationFirewallPolicyRequest', + 'RemoveAssociationNetworkFirewallPolicyRequest', + 'RemoveAssociationRegionNetworkFirewallPolicyRequest', + 'RemoveHealthCheckTargetPoolRequest', + 'RemoveInstanceTargetPoolRequest', + 'RemoveInstancesInstanceGroupRequest', + 'RemovePeeringNetworkRequest', + 'RemoveResourcePoliciesDiskRequest', + 'RemoveResourcePoliciesInstanceRequest', + 'RemoveResourcePoliciesRegionDiskRequest', + 'RemoveRuleFirewallPolicyRequest', + 'RemoveRuleNetworkFirewallPolicyRequest', + 'RemoveRuleRegionNetworkFirewallPolicyRequest', + 'RemoveRuleSecurityPolicyRequest', + 'RequestMirrorPolicy', + 'Reservation', + 'ReservationAffinity', + 'ReservationAggregatedList', + 'ReservationList', + 'ReservationsResizeRequest', + 'ReservationsScopedList', + 'ResetInstanceRequest', + 'ResizeDiskRequest', + 'ResizeInstanceGroupManagerRequest', + 'ResizeRegionDiskRequest', + 'ResizeRegionInstanceGroupManagerRequest', + 'ResizeReservationRequest', + 'ResourceCommitment', + 'ResourceGroupReference', + 'ResourcePoliciesScopedList', + 'ResourcePolicy', + 'ResourcePolicyAggregatedList', + 'ResourcePolicyDailyCycle', + 'ResourcePolicyDiskConsistencyGroupPolicy', + 'ResourcePolicyGroupPlacementPolicy', + 'ResourcePolicyHourlyCycle', + 'ResourcePolicyInstanceSchedulePolicy', + 'ResourcePolicyInstanceSchedulePolicySchedule', + 'ResourcePolicyList', + 'ResourcePolicyResourceStatus', + 'ResourcePolicyResourceStatusInstanceSchedulePolicyStatus', + 'ResourcePolicySnapshotSchedulePolicy', + 'ResourcePolicySnapshotSchedulePolicyRetentionPolicy', + 'ResourcePolicySnapshotSchedulePolicySchedule', + 'ResourcePolicySnapshotSchedulePolicySnapshotProperties', + 'ResourcePolicyWeeklyCycle', + 'ResourcePolicyWeeklyCycleDayOfWeek', + 'ResourceStatus', + 'ResumeInstanceRequest', + 'Route', + 'RouteAsPath', + 'RouteList', + 'Router', + 'RouterAdvertisedIpRange', + 'RouterAggregatedList', + 'RouterBgp', + 'RouterBgpPeer', + 'RouterBgpPeerBfd', + 'RouterBgpPeerCustomLearnedIpRange', + 'RouterInterface', + 'RouterList', + 'RouterMd5AuthenticationKey', + 'RouterNat', + 'RouterNatLogConfig', + 'RouterNatRule', + 'RouterNatRuleAction', + 'RouterNatSubnetworkToNat', + 'RouterStatus', + 'RouterStatusBgpPeerStatus', + 'RouterStatusNatStatus', + 'RouterStatusNatStatusNatRuleStatus', + 'RouterStatusResponse', + 'RoutersPreviewResponse', + 'RoutersScopedList', + 'Rule', + 'SSLHealthCheck', + 'SavedAttachedDisk', + 'SavedDisk', + 'ScalingScheduleStatus', + 'Scheduling', + 'SchedulingNodeAffinity', + 'ScratchDisks', + 'Screenshot', + 'SecurityPoliciesAggregatedList', + 'SecurityPoliciesListPreconfiguredExpressionSetsResponse', + 'SecurityPoliciesScopedList', + 'SecurityPoliciesWafConfig', + 'SecurityPolicy', + 'SecurityPolicyAdaptiveProtectionConfig', + 'SecurityPolicyAdaptiveProtectionConfigLayer7DdosDefenseConfig', + 'SecurityPolicyAdvancedOptionsConfig', + 'SecurityPolicyAdvancedOptionsConfigJsonCustomConfig', + 'SecurityPolicyDdosProtectionConfig', + 'SecurityPolicyList', + 'SecurityPolicyRecaptchaOptionsConfig', + 'SecurityPolicyReference', + 'SecurityPolicyRule', + 'SecurityPolicyRuleHttpHeaderAction', + 'SecurityPolicyRuleHttpHeaderActionHttpHeaderOption', + 'SecurityPolicyRuleMatcher', + 'SecurityPolicyRuleMatcherConfig', + 'SecurityPolicyRulePreconfiguredWafConfig', + 'SecurityPolicyRulePreconfiguredWafConfigExclusion', + 'SecurityPolicyRulePreconfiguredWafConfigExclusionFieldParams', + 'SecurityPolicyRuleRateLimitOptions', + 'SecurityPolicyRuleRateLimitOptionsEnforceOnKeyConfig', + 'SecurityPolicyRuleRateLimitOptionsThreshold', + 'SecurityPolicyRuleRedirectOptions', + 'SecuritySettings', + 'SendDiagnosticInterruptInstanceRequest', + 'SendDiagnosticInterruptInstanceResponse', + 'SerialPortOutput', + 'ServerBinding', + 'ServiceAccount', + 'ServiceAttachment', + 'ServiceAttachmentAggregatedList', + 'ServiceAttachmentConnectedEndpoint', + 'ServiceAttachmentConsumerProjectLimit', + 'ServiceAttachmentList', + 'ServiceAttachmentsScopedList', + 'SetBackendServiceTargetSslProxyRequest', + 'SetBackendServiceTargetTcpProxyRequest', + 'SetBackupTargetPoolRequest', + 'SetCertificateMapTargetHttpsProxyRequest', + 'SetCertificateMapTargetSslProxyRequest', + 'SetCommonInstanceMetadataProjectRequest', + 'SetDefaultNetworkTierProjectRequest', + 'SetDeletionProtectionInstanceRequest', + 'SetDiskAutoDeleteInstanceRequest', + 'SetEdgeSecurityPolicyBackendBucketRequest', + 'SetEdgeSecurityPolicyBackendServiceRequest', + 'SetIamPolicyBackendServiceRequest', + 'SetIamPolicyDiskRequest', + 'SetIamPolicyFirewallPolicyRequest', + 'SetIamPolicyImageRequest', + 'SetIamPolicyInstanceRequest', + 'SetIamPolicyInstanceTemplateRequest', + 'SetIamPolicyLicenseRequest', + 'SetIamPolicyMachineImageRequest', + 'SetIamPolicyNetworkAttachmentRequest', + 'SetIamPolicyNetworkFirewallPolicyRequest', + 'SetIamPolicyNodeGroupRequest', + 'SetIamPolicyNodeTemplateRequest', + 'SetIamPolicyRegionBackendServiceRequest', + 'SetIamPolicyRegionDiskRequest', + 'SetIamPolicyRegionNetworkFirewallPolicyRequest', + 'SetIamPolicyReservationRequest', + 'SetIamPolicyResourcePolicyRequest', + 'SetIamPolicyServiceAttachmentRequest', + 'SetIamPolicySnapshotRequest', + 'SetIamPolicySubnetworkRequest', + 'SetInstanceTemplateInstanceGroupManagerRequest', + 'SetInstanceTemplateRegionInstanceGroupManagerRequest', + 'SetLabelsAddressRequest', + 'SetLabelsDiskRequest', + 'SetLabelsExternalVpnGatewayRequest', + 'SetLabelsForwardingRuleRequest', + 'SetLabelsGlobalAddressRequest', + 'SetLabelsGlobalForwardingRuleRequest', + 'SetLabelsImageRequest', + 'SetLabelsInstanceRequest', + 'SetLabelsInterconnectAttachmentRequest', + 'SetLabelsInterconnectRequest', + 'SetLabelsRegionDiskRequest', + 'SetLabelsSecurityPolicyRequest', + 'SetLabelsSnapshotRequest', + 'SetLabelsTargetVpnGatewayRequest', + 'SetLabelsVpnGatewayRequest', + 'SetLabelsVpnTunnelRequest', + 'SetMachineResourcesInstanceRequest', + 'SetMachineTypeInstanceRequest', + 'SetMetadataInstanceRequest', + 'SetMinCpuPlatformInstanceRequest', + 'SetNameInstanceRequest', + 'SetNamedPortsInstanceGroupRequest', + 'SetNamedPortsRegionInstanceGroupRequest', + 'SetNodeTemplateNodeGroupRequest', + 'SetPrivateIpGoogleAccessSubnetworkRequest', + 'SetProxyHeaderTargetSslProxyRequest', + 'SetProxyHeaderTargetTcpProxyRequest', + 'SetQuicOverrideTargetHttpsProxyRequest', + 'SetSchedulingInstanceRequest', + 'SetSecurityPolicyBackendServiceRequest', + 'SetServiceAccountInstanceRequest', + 'SetShieldedInstanceIntegrityPolicyInstanceRequest', + 'SetSslCertificatesRegionTargetHttpsProxyRequest', + 'SetSslCertificatesTargetHttpsProxyRequest', + 'SetSslCertificatesTargetSslProxyRequest', + 'SetSslPolicyTargetHttpsProxyRequest', + 'SetSslPolicyTargetSslProxyRequest', + 'SetTagsInstanceRequest', + 'SetTargetForwardingRuleRequest', + 'SetTargetGlobalForwardingRuleRequest', + 'SetTargetPoolsInstanceGroupManagerRequest', + 'SetTargetPoolsRegionInstanceGroupManagerRequest', + 'SetUrlMapRegionTargetHttpProxyRequest', + 'SetUrlMapRegionTargetHttpsProxyRequest', + 'SetUrlMapTargetHttpProxyRequest', + 'SetUrlMapTargetHttpsProxyRequest', + 'SetUsageExportBucketProjectRequest', + 'ShareSettings', + 'ShareSettingsProjectConfig', + 'ShieldedInstanceConfig', + 'ShieldedInstanceIdentity', + 'ShieldedInstanceIdentityEntry', + 'ShieldedInstanceIntegrityPolicy', + 'SignedUrlKey', + 'SimulateMaintenanceEventInstanceRequest', + 'SimulateMaintenanceEventNodeGroupRequest', + 'Snapshot', + 'SnapshotList', + 'SourceDiskEncryptionKey', + 'SourceInstanceParams', + 'SourceInstanceProperties', + 'SslCertificate', + 'SslCertificateAggregatedList', + 'SslCertificateList', + 'SslCertificateManagedSslCertificate', + 'SslCertificateSelfManagedSslCertificate', + 'SslCertificatesScopedList', + 'SslPoliciesAggregatedList', + 'SslPoliciesList', + 'SslPoliciesListAvailableFeaturesResponse', + 'SslPoliciesScopedList', + 'SslPolicy', + 'SslPolicyReference', + 'StartAsyncReplicationDiskRequest', + 'StartAsyncReplicationRegionDiskRequest', + 'StartInstanceRequest', + 'StartWithEncryptionKeyInstanceRequest', + 'StatefulPolicy', + 'StatefulPolicyPreservedState', + 'StatefulPolicyPreservedStateDiskDevice', + 'StopAsyncReplicationDiskRequest', + 'StopAsyncReplicationRegionDiskRequest', + 'StopGroupAsyncReplicationDiskRequest', + 'StopGroupAsyncReplicationRegionDiskRequest', + 'StopInstanceRequest', + 'Subnetwork', + 'SubnetworkAggregatedList', + 'SubnetworkList', + 'SubnetworkLogConfig', + 'SubnetworkSecondaryRange', + 'SubnetworksExpandIpCidrRangeRequest', + 'SubnetworksScopedList', + 'SubnetworksSetPrivateIpGoogleAccessRequest', + 'Subsetting', + 'SuspendInstanceRequest', + 'SwitchToCustomModeNetworkRequest', + 'TCPHealthCheck', + 'Tags', + 'TargetGrpcProxy', + 'TargetGrpcProxyList', + 'TargetHttpProxiesScopedList', + 'TargetHttpProxy', + 'TargetHttpProxyAggregatedList', + 'TargetHttpProxyList', + 'TargetHttpsProxiesScopedList', + 'TargetHttpsProxiesSetCertificateMapRequest', + 'TargetHttpsProxiesSetQuicOverrideRequest', + 'TargetHttpsProxiesSetSslCertificatesRequest', + 'TargetHttpsProxy', + 'TargetHttpsProxyAggregatedList', + 'TargetHttpsProxyList', + 'TargetInstance', + 'TargetInstanceAggregatedList', + 'TargetInstanceList', + 'TargetInstancesScopedList', + 'TargetPool', + 'TargetPoolAggregatedList', + 'TargetPoolInstanceHealth', + 'TargetPoolList', + 'TargetPoolsAddHealthCheckRequest', + 'TargetPoolsAddInstanceRequest', + 'TargetPoolsRemoveHealthCheckRequest', + 'TargetPoolsRemoveInstanceRequest', + 'TargetPoolsScopedList', + 'TargetReference', + 'TargetSslProxiesSetBackendServiceRequest', + 'TargetSslProxiesSetCertificateMapRequest', + 'TargetSslProxiesSetProxyHeaderRequest', + 'TargetSslProxiesSetSslCertificatesRequest', + 'TargetSslProxy', + 'TargetSslProxyList', + 'TargetTcpProxiesScopedList', + 'TargetTcpProxiesSetBackendServiceRequest', + 'TargetTcpProxiesSetProxyHeaderRequest', + 'TargetTcpProxy', + 'TargetTcpProxyAggregatedList', + 'TargetTcpProxyList', + 'TargetVpnGateway', + 'TargetVpnGatewayAggregatedList', + 'TargetVpnGatewayList', + 'TargetVpnGatewaysScopedList', + 'TestFailure', + 'TestIamPermissionsDiskRequest', + 'TestIamPermissionsExternalVpnGatewayRequest', + 'TestIamPermissionsFirewallPolicyRequest', + 'TestIamPermissionsImageRequest', + 'TestIamPermissionsInstanceRequest', + 'TestIamPermissionsInstanceTemplateRequest', + 'TestIamPermissionsLicenseCodeRequest', + 'TestIamPermissionsLicenseRequest', + 'TestIamPermissionsMachineImageRequest', + 'TestIamPermissionsNetworkAttachmentRequest', + 'TestIamPermissionsNetworkEndpointGroupRequest', + 'TestIamPermissionsNetworkFirewallPolicyRequest', + 'TestIamPermissionsNodeGroupRequest', + 'TestIamPermissionsNodeTemplateRequest', + 'TestIamPermissionsPacketMirroringRequest', + 'TestIamPermissionsRegionDiskRequest', + 'TestIamPermissionsRegionNetworkFirewallPolicyRequest', + 'TestIamPermissionsReservationRequest', + 'TestIamPermissionsResourcePolicyRequest', + 'TestIamPermissionsServiceAttachmentRequest', + 'TestIamPermissionsSnapshotRequest', + 'TestIamPermissionsSubnetworkRequest', + 'TestIamPermissionsVpnGatewayRequest', + 'TestPermissionsRequest', + 'TestPermissionsResponse', + 'Uint128', + 'UpdateAccessConfigInstanceRequest', + 'UpdateAutoscalerRequest', + 'UpdateBackendBucketRequest', + 'UpdateBackendServiceRequest', + 'UpdateDiskRequest', + 'UpdateDisplayDeviceInstanceRequest', + 'UpdateFirewallRequest', + 'UpdateHealthCheckRequest', + 'UpdateInstanceRequest', + 'UpdateNetworkInterfaceInstanceRequest', + 'UpdatePeeringNetworkRequest', + 'UpdatePerInstanceConfigsInstanceGroupManagerRequest', + 'UpdatePerInstanceConfigsRegionInstanceGroupManagerRequest', + 'UpdateRegionAutoscalerRequest', + 'UpdateRegionBackendServiceRequest', + 'UpdateRegionCommitmentRequest', + 'UpdateRegionDiskRequest', + 'UpdateRegionHealthCheckRequest', + 'UpdateRegionUrlMapRequest', + 'UpdateReservationRequest', + 'UpdateRouterRequest', + 'UpdateShieldedInstanceConfigInstanceRequest', + 'UpdateUrlMapRequest', + 'UrlMap', + 'UrlMapList', + 'UrlMapReference', + 'UrlMapTest', + 'UrlMapTestHeader', + 'UrlMapValidationResult', + 'UrlMapsAggregatedList', + 'UrlMapsScopedList', + 'UrlMapsValidateRequest', + 'UrlMapsValidateResponse', + 'UrlRewrite', + 'UsableSubnetwork', + 'UsableSubnetworkSecondaryRange', + 'UsableSubnetworksAggregatedList', + 'UsageExportLocation', + 'ValidateRegionUrlMapRequest', + 'ValidateUrlMapRequest', + 'VmEndpointNatMappings', + 'VmEndpointNatMappingsInterfaceNatMappings', + 'VmEndpointNatMappingsInterfaceNatMappingsNatRuleMappings', + 'VmEndpointNatMappingsList', + 'VpnGateway', + 'VpnGatewayAggregatedList', + 'VpnGatewayList', + 'VpnGatewayStatus', + 'VpnGatewayStatusHighAvailabilityRequirementState', + 'VpnGatewayStatusTunnel', + 'VpnGatewayStatusVpnConnection', + 'VpnGatewayVpnGatewayInterface', + 'VpnGatewaysGetStatusResponse', + 'VpnGatewaysScopedList', + 'VpnTunnel', + 'VpnTunnelAggregatedList', + 'VpnTunnelList', + 'VpnTunnelsScopedList', + 'WafExpressionSet', + 'WafExpressionSetExpression', + 'WaitGlobalOperationRequest', + 'WaitRegionOperationRequest', + 'WaitZoneOperationRequest', + 'Warning', + 'Warnings', + 'WeightedBackendService', + 'XpnHostList', + 'XpnResourceId', + 'Zone', + 'ZoneList', + 'ZoneSetLabelsRequest', + 'ZoneSetPolicyRequest', + }, +) + + +class AbandonInstancesInstanceGroupManagerRequest(proto.Message): + r"""Messages + + A request message for InstanceGroupManagers.AbandonInstances. + See the method description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + instance_group_manager (str): + The name of the managed instance group. + instance_group_managers_abandon_instances_request_resource (google.cloud.compute_v1.types.InstanceGroupManagersAbandonInstancesRequest): + The body resource for this request + project (str): + Project ID for this request. + request_id (str): + An optional request ID to identify requests. + Specify a unique request ID so that if you must + retry your request, the server will know to + ignore the request if it has already been + completed. For example, consider a situation + where you make an initial request and the + request times out. If you make the request again + with the same request ID, the server can check + if original operation with the same request ID + was received, and if so, will ignore the second + request. This prevents clients from accidentally + creating duplicate commitments. The request ID + must be a valid UUID with the exception that + zero UUID is not supported ( + 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. + zone (str): + The name of the zone where the managed + instance group is located. + """ + + instance_group_manager: str = proto.Field( + proto.STRING, + number=249363395, + ) + instance_group_managers_abandon_instances_request_resource: 'InstanceGroupManagersAbandonInstancesRequest' = proto.Field( + proto.MESSAGE, + number=320929016, + message='InstanceGroupManagersAbandonInstancesRequest', + ) + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + request_id: str = proto.Field( + proto.STRING, + number=37109963, + optional=True, + ) + zone: str = proto.Field( + proto.STRING, + number=3744684, + ) + + +class AbandonInstancesRegionInstanceGroupManagerRequest(proto.Message): + r"""A request message for + RegionInstanceGroupManagers.AbandonInstances. See the method + description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + instance_group_manager (str): + Name of the managed instance group. + project (str): + Project ID for this request. + region (str): + Name of the region scoping this request. + region_instance_group_managers_abandon_instances_request_resource (google.cloud.compute_v1.types.RegionInstanceGroupManagersAbandonInstancesRequest): + The body resource for this request + request_id (str): + An optional request ID to identify requests. + Specify a unique request ID so that if you must + retry your request, the server will know to + ignore the request if it has already been + completed. For example, consider a situation + where you make an initial request and the + request times out. If you make the request again + with the same request ID, the server can check + if original operation with the same request ID + was received, and if so, will ignore the second + request. This prevents clients from accidentally + creating duplicate commitments. The request ID + must be a valid UUID with the exception that + zero UUID is not supported ( + 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. + """ + + instance_group_manager: str = proto.Field( + proto.STRING, + number=249363395, + ) + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + region: str = proto.Field( + proto.STRING, + number=138946292, + ) + region_instance_group_managers_abandon_instances_request_resource: 'RegionInstanceGroupManagersAbandonInstancesRequest' = proto.Field( + proto.MESSAGE, + number=488499491, + message='RegionInstanceGroupManagersAbandonInstancesRequest', + ) + request_id: str = proto.Field( + proto.STRING, + number=37109963, + optional=True, + ) + + +class AcceleratorConfig(proto.Message): + r"""A specification of the type and number of accelerator cards + attached to the instance. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + accelerator_count (int): + The number of the guest accelerator cards + exposed to this instance. + + This field is a member of `oneof`_ ``_accelerator_count``. + accelerator_type (str): + Full or partial URL of the accelerator type + resource to attach to this instance. For + example: + projects/my-project/zones/us-central1-c/acceleratorTypes/nvidia-tesla-p100 + If you are creating an instance template, + specify only the accelerator name. See GPUs on + Compute Engine for a full list of accelerator + types. + + This field is a member of `oneof`_ ``_accelerator_type``. + """ + + accelerator_count: int = proto.Field( + proto.INT32, + number=504879675, + optional=True, + ) + accelerator_type: str = proto.Field( + proto.STRING, + number=138031246, + optional=True, + ) + + +class AcceleratorType(proto.Message): + r"""Represents an Accelerator Type resource. Google Cloud + Platform provides graphics processing units (accelerators) that + you can add to VM instances to improve or accelerate performance + when working with intensive workloads. For more information, + read GPUs on Compute Engine. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + creation_timestamp (str): + [Output Only] Creation timestamp in RFC3339 text format. + + This field is a member of `oneof`_ ``_creation_timestamp``. + deprecated (google.cloud.compute_v1.types.DeprecationStatus): + [Output Only] The deprecation status associated with this + accelerator type. + + This field is a member of `oneof`_ ``_deprecated``. + description (str): + [Output Only] An optional textual description of the + resource. + + This field is a member of `oneof`_ ``_description``. + id (int): + [Output Only] The unique identifier for the resource. This + identifier is defined by the server. + + This field is a member of `oneof`_ ``_id``. + kind (str): + [Output Only] The type of the resource. Always + compute#acceleratorType for accelerator types. + + This field is a member of `oneof`_ ``_kind``. + maximum_cards_per_instance (int): + [Output Only] Maximum number of accelerator cards allowed + per instance. + + This field is a member of `oneof`_ ``_maximum_cards_per_instance``. + name (str): + [Output Only] Name of the resource. + + This field is a member of `oneof`_ ``_name``. + self_link (str): + [Output Only] Server-defined, fully qualified URL for this + resource. + + This field is a member of `oneof`_ ``_self_link``. + zone (str): + [Output Only] The name of the zone where the accelerator + type resides, such as us-central1-a. You must specify this + field as part of the HTTP request URL. It is not settable as + a field in the request body. + + This field is a member of `oneof`_ ``_zone``. + """ + + creation_timestamp: str = proto.Field( + proto.STRING, + number=30525366, + optional=True, + ) + deprecated: 'DeprecationStatus' = proto.Field( + proto.MESSAGE, + number=515138995, + optional=True, + message='DeprecationStatus', + ) + description: str = proto.Field( + proto.STRING, + number=422937596, + optional=True, + ) + id: int = proto.Field( + proto.UINT64, + number=3355, + optional=True, + ) + kind: str = proto.Field( + proto.STRING, + number=3292052, + optional=True, + ) + maximum_cards_per_instance: int = proto.Field( + proto.INT32, + number=263814482, + optional=True, + ) + name: str = proto.Field( + proto.STRING, + number=3373707, + optional=True, + ) + self_link: str = proto.Field( + proto.STRING, + number=456214797, + optional=True, + ) + zone: str = proto.Field( + proto.STRING, + number=3744684, + optional=True, + ) + + +class AcceleratorTypeAggregatedList(proto.Message): + r""" + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + id (str): + [Output Only] Unique identifier for the resource; defined by + the server. + + This field is a member of `oneof`_ ``_id``. + items (MutableMapping[str, google.cloud.compute_v1.types.AcceleratorTypesScopedList]): + A list of AcceleratorTypesScopedList + resources. + kind (str): + [Output Only] Type of resource. Always + compute#acceleratorTypeAggregatedList for aggregated lists + of accelerator types. + + This field is a member of `oneof`_ ``_kind``. + next_page_token (str): + [Output Only] This token allows you to get the next page of + results for list requests. If the number of results is + larger than maxResults, use the nextPageToken as a value for + the query parameter pageToken in the next list request. + Subsequent list requests will have their own nextPageToken + to continue paging through the results. + + This field is a member of `oneof`_ ``_next_page_token``. + self_link (str): + [Output Only] Server-defined URL for this resource. + + This field is a member of `oneof`_ ``_self_link``. + unreachables (MutableSequence[str]): + [Output Only] Unreachable resources. + warning (google.cloud.compute_v1.types.Warning): + [Output Only] Informational warning message. + + This field is a member of `oneof`_ ``_warning``. + """ + + @property + def raw_page(self): + return self + + id: str = proto.Field( + proto.STRING, + number=3355, + optional=True, + ) + items: MutableMapping[str, 'AcceleratorTypesScopedList'] = proto.MapField( + proto.STRING, + proto.MESSAGE, + number=100526016, + message='AcceleratorTypesScopedList', + ) + kind: str = proto.Field( + proto.STRING, + number=3292052, + optional=True, + ) + next_page_token: str = proto.Field( + proto.STRING, + number=79797525, + optional=True, + ) + self_link: str = proto.Field( + proto.STRING, + number=456214797, + optional=True, + ) + unreachables: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=243372063, + ) + warning: 'Warning' = proto.Field( + proto.MESSAGE, + number=50704284, + optional=True, + message='Warning', + ) + + +class AcceleratorTypeList(proto.Message): + r"""Contains a list of accelerator types. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + id (str): + [Output Only] Unique identifier for the resource; defined by + the server. + + This field is a member of `oneof`_ ``_id``. + items (MutableSequence[google.cloud.compute_v1.types.AcceleratorType]): + A list of AcceleratorType resources. + kind (str): + [Output Only] Type of resource. Always + compute#acceleratorTypeList for lists of accelerator types. + + This field is a member of `oneof`_ ``_kind``. + next_page_token (str): + [Output Only] This token allows you to get the next page of + results for list requests. If the number of results is + larger than maxResults, use the nextPageToken as a value for + the query parameter pageToken in the next list request. + Subsequent list requests will have their own nextPageToken + to continue paging through the results. + + This field is a member of `oneof`_ ``_next_page_token``. + self_link (str): + [Output Only] Server-defined URL for this resource. + + This field is a member of `oneof`_ ``_self_link``. + warning (google.cloud.compute_v1.types.Warning): + [Output Only] Informational warning message. + + This field is a member of `oneof`_ ``_warning``. + """ + + @property + def raw_page(self): + return self + + id: str = proto.Field( + proto.STRING, + number=3355, + optional=True, + ) + items: MutableSequence['AcceleratorType'] = proto.RepeatedField( + proto.MESSAGE, + number=100526016, + message='AcceleratorType', + ) + kind: str = proto.Field( + proto.STRING, + number=3292052, + optional=True, + ) + next_page_token: str = proto.Field( + proto.STRING, + number=79797525, + optional=True, + ) + self_link: str = proto.Field( + proto.STRING, + number=456214797, + optional=True, + ) + warning: 'Warning' = proto.Field( + proto.MESSAGE, + number=50704284, + optional=True, + message='Warning', + ) + + +class AcceleratorTypesScopedList(proto.Message): + r""" + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + accelerator_types (MutableSequence[google.cloud.compute_v1.types.AcceleratorType]): + [Output Only] A list of accelerator types contained in this + scope. + warning (google.cloud.compute_v1.types.Warning): + [Output Only] An informational warning that appears when the + accelerator types list is empty. + + This field is a member of `oneof`_ ``_warning``. + """ + + accelerator_types: MutableSequence['AcceleratorType'] = proto.RepeatedField( + proto.MESSAGE, + number=520872357, + message='AcceleratorType', + ) + warning: 'Warning' = proto.Field( + proto.MESSAGE, + number=50704284, + optional=True, + message='Warning', + ) + + +class Accelerators(proto.Message): + r""" + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + guest_accelerator_count (int): + Number of accelerator cards exposed to the + guest. + + This field is a member of `oneof`_ ``_guest_accelerator_count``. + guest_accelerator_type (str): + The accelerator type resource name, not a + full URL, e.g. nvidia-tesla-t4. + + This field is a member of `oneof`_ ``_guest_accelerator_type``. + """ + + guest_accelerator_count: int = proto.Field( + proto.INT32, + number=479079316, + optional=True, + ) + guest_accelerator_type: str = proto.Field( + proto.STRING, + number=293064725, + optional=True, + ) + + +class AccessConfig(proto.Message): + r"""An access configuration attached to an instance's network + interface. Only one access config per instance is supported. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + external_ipv6 (str): + Applies to ipv6AccessConfigs only. The first + IPv6 address of the external IPv6 range + associated with this instance, prefix length is + stored in externalIpv6PrefixLength in + ipv6AccessConfig. To use a static external IP + address, it must be unused and in the same + region as the instance's zone. If not specified, + Google Cloud will automatically assign an + external IPv6 address from the instance's + subnetwork. + + This field is a member of `oneof`_ ``_external_ipv6``. + external_ipv6_prefix_length (int): + Applies to ipv6AccessConfigs only. The prefix + length of the external IPv6 range. + + This field is a member of `oneof`_ ``_external_ipv6_prefix_length``. + kind (str): + [Output Only] Type of the resource. Always + compute#accessConfig for access configs. + + This field is a member of `oneof`_ ``_kind``. + name (str): + The name of this access configuration. In + accessConfigs (IPv4), the default and + recommended name is External NAT, but you can + use any arbitrary string, such as My external IP + or Network Access. In ipv6AccessConfigs, the + recommend name is External IPv6. + + This field is a member of `oneof`_ ``_name``. + nat_i_p (str): + Applies to accessConfigs (IPv4) only. An + external IP address associated with this + instance. Specify an unused static external IP + address available to the project or leave this + field undefined to use an IP from a shared + ephemeral IP address pool. If you specify a + static external IP address, it must live in the + same region as the zone of the instance. + + This field is a member of `oneof`_ ``_nat_i_p``. + network_tier (str): + This signifies the networking tier used for + configuring this access configuration and can + only take the following values: PREMIUM, + STANDARD. If an AccessConfig is specified + without a valid external IP address, an + ephemeral IP will be created with this + networkTier. If an AccessConfig with a valid + external IP address is specified, it must match + that of the networkTier associated with the + Address resource owning that IP. Check the + NetworkTier enum for the list of possible + values. + + This field is a member of `oneof`_ ``_network_tier``. + public_ptr_domain_name (str): + The DNS domain name for the public PTR record. You can set + this field only if the ``setPublicPtr`` field is enabled in + accessConfig. If this field is unspecified in + ipv6AccessConfig, a default PTR record will be createc for + first IP in associated external IPv6 range. + + This field is a member of `oneof`_ ``_public_ptr_domain_name``. + set_public_ptr (bool): + Specifies whether a public DNS 'PTR' record + should be created to map the external IP address + of the instance to a DNS domain name. This field + is not used in ipv6AccessConfig. A default PTR + record will be created if the VM has external + IPv6 range associated. + + This field is a member of `oneof`_ ``_set_public_ptr``. + type_ (str): + The type of configuration. In accessConfigs (IPv4), the + default and only option is ONE_TO_ONE_NAT. In + ipv6AccessConfigs, the default and only option is + DIRECT_IPV6. Check the Type enum for the list of possible + values. + + This field is a member of `oneof`_ ``_type``. + """ + class NetworkTier(proto.Enum): + r"""This signifies the networking tier used for configuring this + access configuration and can only take the following values: + PREMIUM, STANDARD. If an AccessConfig is specified without a + valid external IP address, an ephemeral IP will be created with + this networkTier. If an AccessConfig with a valid external IP + address is specified, it must match that of the networkTier + associated with the Address resource owning that IP. + + Values: + UNDEFINED_NETWORK_TIER (0): + A value indicating that the enum field is not + set. + FIXED_STANDARD (310464328): + Public internet quality with fixed bandwidth. + PREMIUM (399530551): + High quality, Google-grade network tier, + support for all networking products. + STANDARD (484642493): + Public internet quality, only limited support + for other networking products. + STANDARD_OVERRIDES_FIXED_STANDARD (465847234): + (Output only) Temporary tier for FIXED_STANDARD when fixed + standard tier is expired or not configured. + """ + UNDEFINED_NETWORK_TIER = 0 + FIXED_STANDARD = 310464328 + PREMIUM = 399530551 + STANDARD = 484642493 + STANDARD_OVERRIDES_FIXED_STANDARD = 465847234 + + class Type(proto.Enum): + r"""The type of configuration. In accessConfigs (IPv4), the default and + only option is ONE_TO_ONE_NAT. In ipv6AccessConfigs, the default and + only option is DIRECT_IPV6. + + Values: + UNDEFINED_TYPE (0): + A value indicating that the enum field is not + set. + DIRECT_IPV6 (4397213): + No description available. + ONE_TO_ONE_NAT (84090205): + No description available. + """ + UNDEFINED_TYPE = 0 + DIRECT_IPV6 = 4397213 + ONE_TO_ONE_NAT = 84090205 + + external_ipv6: str = proto.Field( + proto.STRING, + number=532703707, + optional=True, + ) + external_ipv6_prefix_length: int = proto.Field( + proto.INT32, + number=425672143, + optional=True, + ) + kind: str = proto.Field( + proto.STRING, + number=3292052, + optional=True, + ) + name: str = proto.Field( + proto.STRING, + number=3373707, + optional=True, + ) + nat_i_p: str = proto.Field( + proto.STRING, + number=117634556, + optional=True, + ) + network_tier: str = proto.Field( + proto.STRING, + number=517397843, + optional=True, + ) + public_ptr_domain_name: str = proto.Field( + proto.STRING, + number=316599167, + optional=True, + ) + set_public_ptr: bool = proto.Field( + proto.BOOL, + number=523870229, + optional=True, + ) + type_: str = proto.Field( + proto.STRING, + number=3575610, + optional=True, + ) + + +class AddAccessConfigInstanceRequest(proto.Message): + r"""A request message for Instances.AddAccessConfig. See the + method description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + access_config_resource (google.cloud.compute_v1.types.AccessConfig): + The body resource for this request + instance (str): + The instance name for this request. + network_interface (str): + The name of the network interface to add to + this instance. + project (str): + Project ID for this request. + request_id (str): + An optional request ID to identify requests. + Specify a unique request ID so that if you must + retry your request, the server will know to + ignore the request if it has already been + completed. For example, consider a situation + where you make an initial request and the + request times out. If you make the request again + with the same request ID, the server can check + if original operation with the same request ID + was received, and if so, will ignore the second + request. This prevents clients from accidentally + creating duplicate commitments. The request ID + must be a valid UUID with the exception that + zero UUID is not supported ( + 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. + zone (str): + The name of the zone for this request. + """ + + access_config_resource: 'AccessConfig' = proto.Field( + proto.MESSAGE, + number=387825552, + message='AccessConfig', + ) + instance: str = proto.Field( + proto.STRING, + number=18257045, + ) + network_interface: str = proto.Field( + proto.STRING, + number=365387880, + ) + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + request_id: str = proto.Field( + proto.STRING, + number=37109963, + optional=True, + ) + zone: str = proto.Field( + proto.STRING, + number=3744684, + ) + + +class AddAssociationFirewallPolicyRequest(proto.Message): + r"""A request message for FirewallPolicies.AddAssociation. See + the method description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + firewall_policy (str): + Name of the firewall policy to update. + firewall_policy_association_resource (google.cloud.compute_v1.types.FirewallPolicyAssociation): + The body resource for this request + replace_existing_association (bool): + Indicates whether or not to replace it if an + association of the attachment already exists. + This is false by default, in which case an error + will be returned if an association already + exists. + + This field is a member of `oneof`_ ``_replace_existing_association``. + request_id (str): + An optional request ID to identify requests. + Specify a unique request ID so that if you must + retry your request, the server will know to + ignore the request if it has already been + completed. For example, consider a situation + where you make an initial request and the + request times out. If you make the request again + with the same request ID, the server can check + if original operation with the same request ID + was received, and if so, will ignore the second + request. This prevents clients from accidentally + creating duplicate commitments. The request ID + must be a valid UUID with the exception that + zero UUID is not supported ( + 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. + """ + + firewall_policy: str = proto.Field( + proto.STRING, + number=498173265, + ) + firewall_policy_association_resource: 'FirewallPolicyAssociation' = proto.Field( + proto.MESSAGE, + number=259546170, + message='FirewallPolicyAssociation', + ) + replace_existing_association: bool = proto.Field( + proto.BOOL, + number=209541240, + optional=True, + ) + request_id: str = proto.Field( + proto.STRING, + number=37109963, + optional=True, + ) + + +class AddAssociationNetworkFirewallPolicyRequest(proto.Message): + r"""A request message for NetworkFirewallPolicies.AddAssociation. + See the method description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + firewall_policy (str): + Name of the firewall policy to update. + firewall_policy_association_resource (google.cloud.compute_v1.types.FirewallPolicyAssociation): + The body resource for this request + project (str): + Project ID for this request. + replace_existing_association (bool): + Indicates whether or not to replace it if an + association of the attachment already exists. + This is false by default, in which case an error + will be returned if an association already + exists. + + This field is a member of `oneof`_ ``_replace_existing_association``. + request_id (str): + An optional request ID to identify requests. + Specify a unique request ID so that if you must + retry your request, the server will know to + ignore the request if it has already been + completed. For example, consider a situation + where you make an initial request and the + request times out. If you make the request again + with the same request ID, the server can check + if original operation with the same request ID + was received, and if so, will ignore the second + request. This prevents clients from accidentally + creating duplicate commitments. The request ID + must be a valid UUID with the exception that + zero UUID is not supported ( + 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. + """ + + firewall_policy: str = proto.Field( + proto.STRING, + number=498173265, + ) + firewall_policy_association_resource: 'FirewallPolicyAssociation' = proto.Field( + proto.MESSAGE, + number=259546170, + message='FirewallPolicyAssociation', + ) + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + replace_existing_association: bool = proto.Field( + proto.BOOL, + number=209541240, + optional=True, + ) + request_id: str = proto.Field( + proto.STRING, + number=37109963, + optional=True, + ) + + +class AddAssociationRegionNetworkFirewallPolicyRequest(proto.Message): + r"""A request message for + RegionNetworkFirewallPolicies.AddAssociation. See the method + description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + firewall_policy (str): + Name of the firewall policy to update. + firewall_policy_association_resource (google.cloud.compute_v1.types.FirewallPolicyAssociation): + The body resource for this request + project (str): + Project ID for this request. + region (str): + Name of the region scoping this request. + replace_existing_association (bool): + Indicates whether or not to replace it if an + association already exists. This is false by + default, in which case an error will be returned + if an association already exists. + + This field is a member of `oneof`_ ``_replace_existing_association``. + request_id (str): + An optional request ID to identify requests. + Specify a unique request ID so that if you must + retry your request, the server will know to + ignore the request if it has already been + completed. For example, consider a situation + where you make an initial request and the + request times out. If you make the request again + with the same request ID, the server can check + if original operation with the same request ID + was received, and if so, will ignore the second + request. This prevents clients from accidentally + creating duplicate commitments. The request ID + must be a valid UUID with the exception that + zero UUID is not supported ( + 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. + """ + + firewall_policy: str = proto.Field( + proto.STRING, + number=498173265, + ) + firewall_policy_association_resource: 'FirewallPolicyAssociation' = proto.Field( + proto.MESSAGE, + number=259546170, + message='FirewallPolicyAssociation', + ) + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + region: str = proto.Field( + proto.STRING, + number=138946292, + ) + replace_existing_association: bool = proto.Field( + proto.BOOL, + number=209541240, + optional=True, + ) + request_id: str = proto.Field( + proto.STRING, + number=37109963, + optional=True, + ) + + +class AddHealthCheckTargetPoolRequest(proto.Message): + r"""A request message for TargetPools.AddHealthCheck. See the + method description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + project (str): + Project ID for this request. + region (str): + Name of the region scoping this request. + request_id (str): + An optional request ID to identify requests. + Specify a unique request ID so that if you must + retry your request, the server will know to + ignore the request if it has already been + completed. For example, consider a situation + where you make an initial request and the + request times out. If you make the request again + with the same request ID, the server can check + if original operation with the same request ID + was received, and if so, will ignore the second + request. This prevents clients from accidentally + creating duplicate commitments. The request ID + must be a valid UUID with the exception that + zero UUID is not supported ( + 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. + target_pool (str): + Name of the target pool to add a health check + to. + target_pools_add_health_check_request_resource (google.cloud.compute_v1.types.TargetPoolsAddHealthCheckRequest): + The body resource for this request + """ + + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + region: str = proto.Field( + proto.STRING, + number=138946292, + ) + request_id: str = proto.Field( + proto.STRING, + number=37109963, + optional=True, + ) + target_pool: str = proto.Field( + proto.STRING, + number=62796298, + ) + target_pools_add_health_check_request_resource: 'TargetPoolsAddHealthCheckRequest' = proto.Field( + proto.MESSAGE, + number=269573412, + message='TargetPoolsAddHealthCheckRequest', + ) + + +class AddInstanceTargetPoolRequest(proto.Message): + r"""A request message for TargetPools.AddInstance. See the method + description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + project (str): + Project ID for this request. + region (str): + Name of the region scoping this request. + request_id (str): + An optional request ID to identify requests. + Specify a unique request ID so that if you must + retry your request, the server will know to + ignore the request if it has already been + completed. For example, consider a situation + where you make an initial request and the + request times out. If you make the request again + with the same request ID, the server can check + if original operation with the same request ID + was received, and if so, will ignore the second + request. This prevents clients from accidentally + creating duplicate commitments. The request ID + must be a valid UUID with the exception that + zero UUID is not supported ( + 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. + target_pool (str): + Name of the TargetPool resource to add + instances to. + target_pools_add_instance_request_resource (google.cloud.compute_v1.types.TargetPoolsAddInstanceRequest): + The body resource for this request + """ + + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + region: str = proto.Field( + proto.STRING, + number=138946292, + ) + request_id: str = proto.Field( + proto.STRING, + number=37109963, + optional=True, + ) + target_pool: str = proto.Field( + proto.STRING, + number=62796298, + ) + target_pools_add_instance_request_resource: 'TargetPoolsAddInstanceRequest' = proto.Field( + proto.MESSAGE, + number=428796404, + message='TargetPoolsAddInstanceRequest', + ) + + +class AddInstancesInstanceGroupRequest(proto.Message): + r"""A request message for InstanceGroups.AddInstances. See the + method description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + instance_group (str): + The name of the instance group where you are + adding instances. + instance_groups_add_instances_request_resource (google.cloud.compute_v1.types.InstanceGroupsAddInstancesRequest): + The body resource for this request + project (str): + Project ID for this request. + request_id (str): + An optional request ID to identify requests. + Specify a unique request ID so that if you must + retry your request, the server will know to + ignore the request if it has already been + completed. For example, consider a situation + where you make an initial request and the + request times out. If you make the request again + with the same request ID, the server can check + if original operation with the same request ID + was received, and if so, will ignore the second + request. This prevents clients from accidentally + creating duplicate commitments. The request ID + must be a valid UUID with the exception that + zero UUID is not supported ( + 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. + zone (str): + The name of the zone where the instance group + is located. + """ + + instance_group: str = proto.Field( + proto.STRING, + number=81095253, + ) + instance_groups_add_instances_request_resource: 'InstanceGroupsAddInstancesRequest' = proto.Field( + proto.MESSAGE, + number=453713246, + message='InstanceGroupsAddInstancesRequest', + ) + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + request_id: str = proto.Field( + proto.STRING, + number=37109963, + optional=True, + ) + zone: str = proto.Field( + proto.STRING, + number=3744684, + ) + + +class AddNodesNodeGroupRequest(proto.Message): + r"""A request message for NodeGroups.AddNodes. See the method + description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + node_group (str): + Name of the NodeGroup resource. + node_groups_add_nodes_request_resource (google.cloud.compute_v1.types.NodeGroupsAddNodesRequest): + The body resource for this request + project (str): + Project ID for this request. + request_id (str): + An optional request ID to identify requests. + Specify a unique request ID so that if you must + retry your request, the server will know to + ignore the request if it has already been + completed. For example, consider a situation + where you make an initial request and the + request times out. If you make the request again + with the same request ID, the server can check + if original operation with the same request ID + was received, and if so, will ignore the second + request. This prevents clients from accidentally + creating duplicate commitments. The request ID + must be a valid UUID with the exception that + zero UUID is not supported ( + 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. + zone (str): + The name of the zone for this request. + """ + + node_group: str = proto.Field( + proto.STRING, + number=469958146, + ) + node_groups_add_nodes_request_resource: 'NodeGroupsAddNodesRequest' = proto.Field( + proto.MESSAGE, + number=131263288, + message='NodeGroupsAddNodesRequest', + ) + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + request_id: str = proto.Field( + proto.STRING, + number=37109963, + optional=True, + ) + zone: str = proto.Field( + proto.STRING, + number=3744684, + ) + + +class AddPeeringNetworkRequest(proto.Message): + r"""A request message for Networks.AddPeering. See the method + description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + network (str): + Name of the network resource to add peering + to. + networks_add_peering_request_resource (google.cloud.compute_v1.types.NetworksAddPeeringRequest): + The body resource for this request + project (str): + Project ID for this request. + request_id (str): + An optional request ID to identify requests. + Specify a unique request ID so that if you must + retry your request, the server will know to + ignore the request if it has already been + completed. For example, consider a situation + where you make an initial request and the + request times out. If you make the request again + with the same request ID, the server can check + if original operation with the same request ID + was received, and if so, will ignore the second + request. This prevents clients from accidentally + creating duplicate commitments. The request ID + must be a valid UUID with the exception that + zero UUID is not supported ( + 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. + """ + + network: str = proto.Field( + proto.STRING, + number=232872494, + ) + networks_add_peering_request_resource: 'NetworksAddPeeringRequest' = proto.Field( + proto.MESSAGE, + number=388810421, + message='NetworksAddPeeringRequest', + ) + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + request_id: str = proto.Field( + proto.STRING, + number=37109963, + optional=True, + ) + + +class AddResourcePoliciesDiskRequest(proto.Message): + r"""A request message for Disks.AddResourcePolicies. See the + method description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + disk (str): + The disk name for this request. + disks_add_resource_policies_request_resource (google.cloud.compute_v1.types.DisksAddResourcePoliciesRequest): + The body resource for this request + project (str): + Project ID for this request. + request_id (str): + An optional request ID to identify requests. + Specify a unique request ID so that if you must + retry your request, the server will know to + ignore the request if it has already been + completed. For example, consider a situation + where you make an initial request and the + request times out. If you make the request again + with the same request ID, the server can check + if original operation with the same request ID + was received, and if so, will ignore the second + request. This prevents clients from accidentally + creating duplicate commitments. The request ID + must be a valid UUID with the exception that + zero UUID is not supported ( + 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. + zone (str): + The name of the zone for this request. + """ + + disk: str = proto.Field( + proto.STRING, + number=3083677, + ) + disks_add_resource_policies_request_resource: 'DisksAddResourcePoliciesRequest' = proto.Field( + proto.MESSAGE, + number=496483363, + message='DisksAddResourcePoliciesRequest', + ) + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + request_id: str = proto.Field( + proto.STRING, + number=37109963, + optional=True, + ) + zone: str = proto.Field( + proto.STRING, + number=3744684, + ) + + +class AddResourcePoliciesInstanceRequest(proto.Message): + r"""A request message for Instances.AddResourcePolicies. See the + method description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + instance (str): + The instance name for this request. + instances_add_resource_policies_request_resource (google.cloud.compute_v1.types.InstancesAddResourcePoliciesRequest): + The body resource for this request + project (str): + Project ID for this request. + request_id (str): + An optional request ID to identify requests. + Specify a unique request ID so that if you must + retry your request, the server will know to + ignore the request if it has already been + completed. For example, consider a situation + where you make an initial request and the + request times out. If you make the request again + with the same request ID, the server can check + if original operation with the same request ID + was received, and if so, will ignore the second + request. This prevents clients from accidentally + creating duplicate commitments. The request ID + must be a valid UUID with the exception that + zero UUID is not supported ( + 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. + zone (str): + The name of the zone for this request. + """ + + instance: str = proto.Field( + proto.STRING, + number=18257045, + ) + instances_add_resource_policies_request_resource: 'InstancesAddResourcePoliciesRequest' = proto.Field( + proto.MESSAGE, + number=489351963, + message='InstancesAddResourcePoliciesRequest', + ) + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + request_id: str = proto.Field( + proto.STRING, + number=37109963, + optional=True, + ) + zone: str = proto.Field( + proto.STRING, + number=3744684, + ) + + +class AddResourcePoliciesRegionDiskRequest(proto.Message): + r"""A request message for RegionDisks.AddResourcePolicies. See + the method description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + disk (str): + The disk name for this request. + project (str): + Project ID for this request. + region (str): + The name of the region for this request. + region_disks_add_resource_policies_request_resource (google.cloud.compute_v1.types.RegionDisksAddResourcePoliciesRequest): + The body resource for this request + request_id (str): + An optional request ID to identify requests. + Specify a unique request ID so that if you must + retry your request, the server will know to + ignore the request if it has already been + completed. For example, consider a situation + where you make an initial request and the + request times out. If you make the request again + with the same request ID, the server can check + if original operation with the same request ID + was received, and if so, will ignore the second + request. This prevents clients from accidentally + creating duplicate commitments. The request ID + must be a valid UUID with the exception that + zero UUID is not supported ( + 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. + """ + + disk: str = proto.Field( + proto.STRING, + number=3083677, + ) + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + region: str = proto.Field( + proto.STRING, + number=138946292, + ) + region_disks_add_resource_policies_request_resource: 'RegionDisksAddResourcePoliciesRequest' = proto.Field( + proto.MESSAGE, + number=284196750, + message='RegionDisksAddResourcePoliciesRequest', + ) + request_id: str = proto.Field( + proto.STRING, + number=37109963, + optional=True, + ) + + +class AddRuleFirewallPolicyRequest(proto.Message): + r"""A request message for FirewallPolicies.AddRule. See the + method description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + firewall_policy (str): + Name of the firewall policy to update. + firewall_policy_rule_resource (google.cloud.compute_v1.types.FirewallPolicyRule): + The body resource for this request + request_id (str): + An optional request ID to identify requests. + Specify a unique request ID so that if you must + retry your request, the server will know to + ignore the request if it has already been + completed. For example, consider a situation + where you make an initial request and the + request times out. If you make the request again + with the same request ID, the server can check + if original operation with the same request ID + was received, and if so, will ignore the second + request. This prevents clients from accidentally + creating duplicate commitments. The request ID + must be a valid UUID with the exception that + zero UUID is not supported ( + 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. + """ + + firewall_policy: str = proto.Field( + proto.STRING, + number=498173265, + ) + firewall_policy_rule_resource: 'FirewallPolicyRule' = proto.Field( + proto.MESSAGE, + number=250523523, + message='FirewallPolicyRule', + ) + request_id: str = proto.Field( + proto.STRING, + number=37109963, + optional=True, + ) + + +class AddRuleNetworkFirewallPolicyRequest(proto.Message): + r"""A request message for NetworkFirewallPolicies.AddRule. See + the method description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + firewall_policy (str): + Name of the firewall policy to update. + firewall_policy_rule_resource (google.cloud.compute_v1.types.FirewallPolicyRule): + The body resource for this request + max_priority (int): + When rule.priority is not specified, auto + choose a unused priority between minPriority and + maxPriority>. This field is exclusive with + rule.priority. + + This field is a member of `oneof`_ ``_max_priority``. + min_priority (int): + When rule.priority is not specified, auto + choose a unused priority between minPriority and + maxPriority>. This field is exclusive with + rule.priority. + + This field is a member of `oneof`_ ``_min_priority``. + project (str): + Project ID for this request. + request_id (str): + An optional request ID to identify requests. + Specify a unique request ID so that if you must + retry your request, the server will know to + ignore the request if it has already been + completed. For example, consider a situation + where you make an initial request and the + request times out. If you make the request again + with the same request ID, the server can check + if original operation with the same request ID + was received, and if so, will ignore the second + request. This prevents clients from accidentally + creating duplicate commitments. The request ID + must be a valid UUID with the exception that + zero UUID is not supported ( + 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. + """ + + firewall_policy: str = proto.Field( + proto.STRING, + number=498173265, + ) + firewall_policy_rule_resource: 'FirewallPolicyRule' = proto.Field( + proto.MESSAGE, + number=250523523, + message='FirewallPolicyRule', + ) + max_priority: int = proto.Field( + proto.INT32, + number=329635359, + optional=True, + ) + min_priority: int = proto.Field( + proto.INT32, + number=267190513, + optional=True, + ) + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + request_id: str = proto.Field( + proto.STRING, + number=37109963, + optional=True, + ) + + +class AddRuleRegionNetworkFirewallPolicyRequest(proto.Message): + r"""A request message for RegionNetworkFirewallPolicies.AddRule. + See the method description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + firewall_policy (str): + Name of the firewall policy to update. + firewall_policy_rule_resource (google.cloud.compute_v1.types.FirewallPolicyRule): + The body resource for this request + max_priority (int): + When rule.priority is not specified, auto + choose a unused priority between minPriority and + maxPriority>. This field is exclusive with + rule.priority. + + This field is a member of `oneof`_ ``_max_priority``. + min_priority (int): + When rule.priority is not specified, auto + choose a unused priority between minPriority and + maxPriority>. This field is exclusive with + rule.priority. + + This field is a member of `oneof`_ ``_min_priority``. + project (str): + Project ID for this request. + region (str): + Name of the region scoping this request. + request_id (str): + An optional request ID to identify requests. + Specify a unique request ID so that if you must + retry your request, the server will know to + ignore the request if it has already been + completed. For example, consider a situation + where you make an initial request and the + request times out. If you make the request again + with the same request ID, the server can check + if original operation with the same request ID + was received, and if so, will ignore the second + request. This prevents clients from accidentally + creating duplicate commitments. The request ID + must be a valid UUID with the exception that + zero UUID is not supported ( + 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. + """ + + firewall_policy: str = proto.Field( + proto.STRING, + number=498173265, + ) + firewall_policy_rule_resource: 'FirewallPolicyRule' = proto.Field( + proto.MESSAGE, + number=250523523, + message='FirewallPolicyRule', + ) + max_priority: int = proto.Field( + proto.INT32, + number=329635359, + optional=True, + ) + min_priority: int = proto.Field( + proto.INT32, + number=267190513, + optional=True, + ) + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + region: str = proto.Field( + proto.STRING, + number=138946292, + ) + request_id: str = proto.Field( + proto.STRING, + number=37109963, + optional=True, + ) + + +class AddRuleSecurityPolicyRequest(proto.Message): + r"""A request message for SecurityPolicies.AddRule. See the + method description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + project (str): + Project ID for this request. + security_policy (str): + Name of the security policy to update. + security_policy_rule_resource (google.cloud.compute_v1.types.SecurityPolicyRule): + The body resource for this request + validate_only (bool): + If true, the request will not be committed. + + This field is a member of `oneof`_ ``_validate_only``. + """ + + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + security_policy: str = proto.Field( + proto.STRING, + number=171082513, + ) + security_policy_rule_resource: 'SecurityPolicyRule' = proto.Field( + proto.MESSAGE, + number=402693443, + message='SecurityPolicyRule', + ) + validate_only: bool = proto.Field( + proto.BOOL, + number=242744629, + optional=True, + ) + + +class AddSignedUrlKeyBackendBucketRequest(proto.Message): + r"""A request message for BackendBuckets.AddSignedUrlKey. See the + method description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + backend_bucket (str): + Name of the BackendBucket resource to which + the Signed URL Key should be added. The name + should conform to RFC1035. + project (str): + Project ID for this request. + request_id (str): + An optional request ID to identify requests. + Specify a unique request ID so that if you must + retry your request, the server will know to + ignore the request if it has already been + completed. For example, consider a situation + where you make an initial request and the + request times out. If you make the request again + with the same request ID, the server can check + if original operation with the same request ID + was received, and if so, will ignore the second + request. This prevents clients from accidentally + creating duplicate commitments. The request ID + must be a valid UUID with the exception that + zero UUID is not supported ( + 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. + signed_url_key_resource (google.cloud.compute_v1.types.SignedUrlKey): + The body resource for this request + """ + + backend_bucket: str = proto.Field( + proto.STRING, + number=91714037, + ) + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + request_id: str = proto.Field( + proto.STRING, + number=37109963, + optional=True, + ) + signed_url_key_resource: 'SignedUrlKey' = proto.Field( + proto.MESSAGE, + number=457625985, + message='SignedUrlKey', + ) + + +class AddSignedUrlKeyBackendServiceRequest(proto.Message): + r"""A request message for BackendServices.AddSignedUrlKey. See + the method description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + backend_service (str): + Name of the BackendService resource to which + the Signed URL Key should be added. The name + should conform to RFC1035. + project (str): + Project ID for this request. + request_id (str): + An optional request ID to identify requests. + Specify a unique request ID so that if you must + retry your request, the server will know to + ignore the request if it has already been + completed. For example, consider a situation + where you make an initial request and the + request times out. If you make the request again + with the same request ID, the server can check + if original operation with the same request ID + was received, and if so, will ignore the second + request. This prevents clients from accidentally + creating duplicate commitments. The request ID + must be a valid UUID with the exception that + zero UUID is not supported ( + 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. + signed_url_key_resource (google.cloud.compute_v1.types.SignedUrlKey): + The body resource for this request + """ + + backend_service: str = proto.Field( + proto.STRING, + number=306946058, + ) + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + request_id: str = proto.Field( + proto.STRING, + number=37109963, + optional=True, + ) + signed_url_key_resource: 'SignedUrlKey' = proto.Field( + proto.MESSAGE, + number=457625985, + message='SignedUrlKey', + ) + + +class Address(proto.Message): + r"""Represents an IP Address resource. Google Compute Engine has two IP + Address resources: \* `Global (external and + internal) `__ + \* `Regional (external and + internal) `__ + For more information, see Reserving a static external IP address. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + address (str): + The static IP address represented by this + resource. + + This field is a member of `oneof`_ ``_address``. + address_type (str): + The type of address to reserve, either + INTERNAL or EXTERNAL. If unspecified, defaults + to EXTERNAL. Check the AddressType enum for the + list of possible values. + + This field is a member of `oneof`_ ``_address_type``. + creation_timestamp (str): + [Output Only] Creation timestamp in RFC3339 text format. + + This field is a member of `oneof`_ ``_creation_timestamp``. + description (str): + An optional description of this resource. + Provide this field when you create the resource. + + This field is a member of `oneof`_ ``_description``. + id (int): + [Output Only] The unique identifier for the resource. This + identifier is defined by the server. + + This field is a member of `oneof`_ ``_id``. + ip_version (str): + The IP version that will be used by this + address. Valid options are IPV4 or IPV6. Check + the IpVersion enum for the list of possible + values. + + This field is a member of `oneof`_ ``_ip_version``. + ipv6_endpoint_type (str): + The endpoint type of this address, which + should be VM or NETLB. This is used for deciding + which type of endpoint this address can be used + after the external IPv6 address reservation. + Check the Ipv6EndpointType enum for the list of + possible values. + + This field is a member of `oneof`_ ``_ipv6_endpoint_type``. + kind (str): + [Output Only] Type of the resource. Always compute#address + for addresses. + + This field is a member of `oneof`_ ``_kind``. + label_fingerprint (str): + A fingerprint for the labels being applied to + this Address, which is essentially a hash of the + labels set used for optimistic locking. The + fingerprint is initially generated by Compute + Engine and changes after every request to modify + or update labels. You must always provide an + up-to-date fingerprint hash in order to update + or change labels, otherwise the request will + fail with error 412 conditionNotMet. To see the + latest fingerprint, make a get() request to + retrieve an Address. + + This field is a member of `oneof`_ ``_label_fingerprint``. + labels (MutableMapping[str, str]): + Labels for this resource. These can only be + added or modified by the setLabels method. Each + label key/value pair must comply with RFC1035. + Label values may be empty. + name (str): + Name of the resource. Provided by the client when the + resource is created. The name must be 1-63 characters long, + and comply with RFC1035. Specifically, the name must be 1-63 + characters long and match the regular expression + ``[a-z]([-a-z0-9]*[a-z0-9])?``. The first character must be + a lowercase letter, and all following characters (except for + the last character) must be a dash, lowercase letter, or + digit. The last character must be a lowercase letter or + digit. + + This field is a member of `oneof`_ ``_name``. + network (str): + The URL of the network in which to reserve the address. This + field can only be used with INTERNAL type with the + VPC_PEERING purpose. + + This field is a member of `oneof`_ ``_network``. + network_tier (str): + This signifies the networking tier used for + configuring this address and can only take the + following values: PREMIUM or STANDARD. Internal + IP addresses are always Premium Tier; global + external IP addresses are always Premium Tier; + regional external IP addresses can be either + Standard or Premium Tier. If this field is not + specified, it is assumed to be PREMIUM. Check + the NetworkTier enum for the list of possible + values. + + This field is a member of `oneof`_ ``_network_tier``. + prefix_length (int): + The prefix length if the resource represents + an IP range. + + This field is a member of `oneof`_ ``_prefix_length``. + purpose (str): + The purpose of this resource, which can be one of the + following values: - GCE_ENDPOINT for addresses that are used + by VM instances, alias IP ranges, load balancers, and + similar resources. - DNS_RESOLVER for a DNS resolver address + in a subnetwork for a Cloud DNS inbound forwarder IP + addresses (regional internal IP address in a subnet of a VPC + network) - VPC_PEERING for global internal IP addresses used + for private services access allocated ranges. - NAT_AUTO for + the regional external IP addresses used by Cloud NAT when + allocating addresses using automatic NAT IP address + allocation. - IPSEC_INTERCONNECT for addresses created from + a private IP range that are reserved for a VLAN attachment + in an *HA VPN over Cloud Interconnect* configuration. These + addresses are regional resources. - + ``SHARED_LOADBALANCER_VIP`` for an internal IP address that + is assigned to multiple internal forwarding rules. - + ``PRIVATE_SERVICE_CONNECT`` for a private network address + that is used to configure Private Service Connect. Only + global internal addresses can use this purpose. Check the + Purpose enum for the list of possible values. + + This field is a member of `oneof`_ ``_purpose``. + region (str): + [Output Only] The URL of the region where a regional address + resides. For regional addresses, you must specify the region + as a path parameter in the HTTP request URL. *This field is + not applicable to global addresses.* + + This field is a member of `oneof`_ ``_region``. + self_link (str): + [Output Only] Server-defined URL for the resource. + + This field is a member of `oneof`_ ``_self_link``. + status (str): + [Output Only] The status of the address, which can be one of + RESERVING, RESERVED, or IN_USE. An address that is RESERVING + is currently in the process of being reserved. A RESERVED + address is currently reserved and available to use. An + IN_USE address is currently being used by another resource + and is not available. Check the Status enum for the list of + possible values. + + This field is a member of `oneof`_ ``_status``. + subnetwork (str): + The URL of the subnetwork in which to reserve the address. + If an IP address is specified, it must be within the + subnetwork's IP range. This field can only be used with + INTERNAL type with a GCE_ENDPOINT or DNS_RESOLVER purpose. + + This field is a member of `oneof`_ ``_subnetwork``. + users (MutableSequence[str]): + [Output Only] The URLs of the resources that are using this + address. + """ + class AddressType(proto.Enum): + r"""The type of address to reserve, either INTERNAL or EXTERNAL. + If unspecified, defaults to EXTERNAL. + + Values: + UNDEFINED_ADDRESS_TYPE (0): + A value indicating that the enum field is not + set. + EXTERNAL (35607499): + A publicly visible external IP address. + INTERNAL (279295677): + A private network IP address, for use with an + Instance or Internal Load Balancer forwarding + rule. + UNSPECIFIED_TYPE (53933922): + No description available. + """ + UNDEFINED_ADDRESS_TYPE = 0 + EXTERNAL = 35607499 + INTERNAL = 279295677 + UNSPECIFIED_TYPE = 53933922 + + class IpVersion(proto.Enum): + r"""The IP version that will be used by this address. Valid + options are IPV4 or IPV6. + + Values: + UNDEFINED_IP_VERSION (0): + A value indicating that the enum field is not + set. + IPV4 (2254341): + No description available. + IPV6 (2254343): + No description available. + UNSPECIFIED_VERSION (21850000): + No description available. + """ + UNDEFINED_IP_VERSION = 0 + IPV4 = 2254341 + IPV6 = 2254343 + UNSPECIFIED_VERSION = 21850000 + + class Ipv6EndpointType(proto.Enum): + r"""The endpoint type of this address, which should be VM or + NETLB. This is used for deciding which type of endpoint this + address can be used after the external IPv6 address reservation. + + Values: + UNDEFINED_IPV6_ENDPOINT_TYPE (0): + A value indicating that the enum field is not + set. + NETLB (74173363): + Reserved IPv6 address can be used on network + load balancer. + VM (2743): + Reserved IPv6 address can be used on VM. + """ + UNDEFINED_IPV6_ENDPOINT_TYPE = 0 + NETLB = 74173363 + VM = 2743 + + class NetworkTier(proto.Enum): + r"""This signifies the networking tier used for configuring this + address and can only take the following values: PREMIUM or + STANDARD. Internal IP addresses are always Premium Tier; global + external IP addresses are always Premium Tier; regional external + IP addresses can be either Standard or Premium Tier. If this + field is not specified, it is assumed to be PREMIUM. + + Values: + UNDEFINED_NETWORK_TIER (0): + A value indicating that the enum field is not + set. + FIXED_STANDARD (310464328): + Public internet quality with fixed bandwidth. + PREMIUM (399530551): + High quality, Google-grade network tier, + support for all networking products. + STANDARD (484642493): + Public internet quality, only limited support + for other networking products. + STANDARD_OVERRIDES_FIXED_STANDARD (465847234): + (Output only) Temporary tier for FIXED_STANDARD when fixed + standard tier is expired or not configured. + """ + UNDEFINED_NETWORK_TIER = 0 + FIXED_STANDARD = 310464328 + PREMIUM = 399530551 + STANDARD = 484642493 + STANDARD_OVERRIDES_FIXED_STANDARD = 465847234 + + class Purpose(proto.Enum): + r"""The purpose of this resource, which can be one of the following + values: - GCE_ENDPOINT for addresses that are used by VM instances, + alias IP ranges, load balancers, and similar resources. - + DNS_RESOLVER for a DNS resolver address in a subnetwork for a Cloud + DNS inbound forwarder IP addresses (regional internal IP address in + a subnet of a VPC network) - VPC_PEERING for global internal IP + addresses used for private services access allocated ranges. - + NAT_AUTO for the regional external IP addresses used by Cloud NAT + when allocating addresses using automatic NAT IP address allocation. + - IPSEC_INTERCONNECT for addresses created from a private IP range + that are reserved for a VLAN attachment in an *HA VPN over Cloud + Interconnect* configuration. These addresses are regional resources. + - ``SHARED_LOADBALANCER_VIP`` for an internal IP address that is + assigned to multiple internal forwarding rules. - + ``PRIVATE_SERVICE_CONNECT`` for a private network address that is + used to configure Private Service Connect. Only global internal + addresses can use this purpose. + + Values: + UNDEFINED_PURPOSE (0): + A value indicating that the enum field is not + set. + DNS_RESOLVER (476114556): + DNS resolver address in the subnetwork. + GCE_ENDPOINT (230515243): + VM internal/alias IP, Internal LB service IP, + etc. + IPSEC_INTERCONNECT (340437251): + A regional internal IP address range reserved + for the VLAN attachment that is used in HA VPN + over Cloud Interconnect. This regional internal + IP address range must not overlap with any IP + address range of subnet/route in the VPC network + and its peering networks. After the VLAN + attachment is created with the reserved IP + address range, when creating a new VPN gateway, + its interface IP address is allocated from the + associated VLAN attachment’s IP address range. + NAT_AUTO (163666477): + External IP automatically reserved for Cloud + NAT. + PRIVATE_SERVICE_CONNECT (48134724): + A private network IP address that can be used + to configure Private Service Connect. This + purpose can be specified only for GLOBAL + addresses of Type INTERNAL + SERVERLESS (270492508): + A regional internal IP address range reserved + for Serverless. + SHARED_LOADBALANCER_VIP (294447572): + A private network IP address that can be + shared by multiple Internal Load Balancer + forwarding rules. + VPC_PEERING (400800170): + IP range for peer networks. + """ + UNDEFINED_PURPOSE = 0 + DNS_RESOLVER = 476114556 + GCE_ENDPOINT = 230515243 + IPSEC_INTERCONNECT = 340437251 + NAT_AUTO = 163666477 + PRIVATE_SERVICE_CONNECT = 48134724 + SERVERLESS = 270492508 + SHARED_LOADBALANCER_VIP = 294447572 + VPC_PEERING = 400800170 + + class Status(proto.Enum): + r"""[Output Only] The status of the address, which can be one of + RESERVING, RESERVED, or IN_USE. An address that is RESERVING is + currently in the process of being reserved. A RESERVED address is + currently reserved and available to use. An IN_USE address is + currently being used by another resource and is not available. + + Values: + UNDEFINED_STATUS (0): + A value indicating that the enum field is not + set. + IN_USE (17393485): + Address is being used by another resource and + is not available. + RESERVED (432241448): + Address is reserved and available to use. + RESERVING (514587225): + Address is being reserved. + """ + UNDEFINED_STATUS = 0 + IN_USE = 17393485 + RESERVED = 432241448 + RESERVING = 514587225 + + address: str = proto.Field( + proto.STRING, + number=462920692, + optional=True, + ) + address_type: str = proto.Field( + proto.STRING, + number=264307877, + optional=True, + ) + creation_timestamp: str = proto.Field( + proto.STRING, + number=30525366, + optional=True, + ) + description: str = proto.Field( + proto.STRING, + number=422937596, + optional=True, + ) + id: int = proto.Field( + proto.UINT64, + number=3355, + optional=True, + ) + ip_version: str = proto.Field( + proto.STRING, + number=294959552, + optional=True, + ) + ipv6_endpoint_type: str = proto.Field( + proto.STRING, + number=97501004, + optional=True, + ) + kind: str = proto.Field( + proto.STRING, + number=3292052, + optional=True, + ) + label_fingerprint: str = proto.Field( + proto.STRING, + number=178124825, + optional=True, + ) + labels: MutableMapping[str, str] = proto.MapField( + proto.STRING, + proto.STRING, + number=500195327, + ) + name: str = proto.Field( + proto.STRING, + number=3373707, + optional=True, + ) + network: str = proto.Field( + proto.STRING, + number=232872494, + optional=True, + ) + network_tier: str = proto.Field( + proto.STRING, + number=517397843, + optional=True, + ) + prefix_length: int = proto.Field( + proto.INT32, + number=453565747, + optional=True, + ) + purpose: str = proto.Field( + proto.STRING, + number=316407070, + optional=True, + ) + region: str = proto.Field( + proto.STRING, + number=138946292, + optional=True, + ) + self_link: str = proto.Field( + proto.STRING, + number=456214797, + optional=True, + ) + status: str = proto.Field( + proto.STRING, + number=181260274, + optional=True, + ) + subnetwork: str = proto.Field( + proto.STRING, + number=307827694, + optional=True, + ) + users: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=111578632, + ) + + +class AddressAggregatedList(proto.Message): + r""" + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + id (str): + [Output Only] Unique identifier for the resource; defined by + the server. + + This field is a member of `oneof`_ ``_id``. + items (MutableMapping[str, google.cloud.compute_v1.types.AddressesScopedList]): + A list of AddressesScopedList resources. + kind (str): + [Output Only] Type of resource. Always + compute#addressAggregatedList for aggregated lists of + addresses. + + This field is a member of `oneof`_ ``_kind``. + next_page_token (str): + [Output Only] This token allows you to get the next page of + results for list requests. If the number of results is + larger than maxResults, use the nextPageToken as a value for + the query parameter pageToken in the next list request. + Subsequent list requests will have their own nextPageToken + to continue paging through the results. + + This field is a member of `oneof`_ ``_next_page_token``. + self_link (str): + [Output Only] Server-defined URL for this resource. + + This field is a member of `oneof`_ ``_self_link``. + unreachables (MutableSequence[str]): + [Output Only] Unreachable resources. + warning (google.cloud.compute_v1.types.Warning): + [Output Only] Informational warning message. + + This field is a member of `oneof`_ ``_warning``. + """ + + @property + def raw_page(self): + return self + + id: str = proto.Field( + proto.STRING, + number=3355, + optional=True, + ) + items: MutableMapping[str, 'AddressesScopedList'] = proto.MapField( + proto.STRING, + proto.MESSAGE, + number=100526016, + message='AddressesScopedList', + ) + kind: str = proto.Field( + proto.STRING, + number=3292052, + optional=True, + ) + next_page_token: str = proto.Field( + proto.STRING, + number=79797525, + optional=True, + ) + self_link: str = proto.Field( + proto.STRING, + number=456214797, + optional=True, + ) + unreachables: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=243372063, + ) + warning: 'Warning' = proto.Field( + proto.MESSAGE, + number=50704284, + optional=True, + message='Warning', + ) + + +class AddressList(proto.Message): + r"""Contains a list of addresses. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + id (str): + [Output Only] Unique identifier for the resource; defined by + the server. + + This field is a member of `oneof`_ ``_id``. + items (MutableSequence[google.cloud.compute_v1.types.Address]): + A list of Address resources. + kind (str): + [Output Only] Type of resource. Always compute#addressList + for lists of addresses. + + This field is a member of `oneof`_ ``_kind``. + next_page_token (str): + [Output Only] This token allows you to get the next page of + results for list requests. If the number of results is + larger than maxResults, use the nextPageToken as a value for + the query parameter pageToken in the next list request. + Subsequent list requests will have their own nextPageToken + to continue paging through the results. + + This field is a member of `oneof`_ ``_next_page_token``. + self_link (str): + [Output Only] Server-defined URL for this resource. + + This field is a member of `oneof`_ ``_self_link``. + warning (google.cloud.compute_v1.types.Warning): + [Output Only] Informational warning message. + + This field is a member of `oneof`_ ``_warning``. + """ + + @property + def raw_page(self): + return self + + id: str = proto.Field( + proto.STRING, + number=3355, + optional=True, + ) + items: MutableSequence['Address'] = proto.RepeatedField( + proto.MESSAGE, + number=100526016, + message='Address', + ) + kind: str = proto.Field( + proto.STRING, + number=3292052, + optional=True, + ) + next_page_token: str = proto.Field( + proto.STRING, + number=79797525, + optional=True, + ) + self_link: str = proto.Field( + proto.STRING, + number=456214797, + optional=True, + ) + warning: 'Warning' = proto.Field( + proto.MESSAGE, + number=50704284, + optional=True, + message='Warning', + ) + + +class AddressesScopedList(proto.Message): + r""" + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + addresses (MutableSequence[google.cloud.compute_v1.types.Address]): + [Output Only] A list of addresses contained in this scope. + warning (google.cloud.compute_v1.types.Warning): + [Output Only] Informational warning which replaces the list + of addresses when the list is empty. + + This field is a member of `oneof`_ ``_warning``. + """ + + addresses: MutableSequence['Address'] = proto.RepeatedField( + proto.MESSAGE, + number=337673122, + message='Address', + ) + warning: 'Warning' = proto.Field( + proto.MESSAGE, + number=50704284, + optional=True, + message='Warning', + ) + + +class AdvancedMachineFeatures(proto.Message): + r"""Specifies options for controlling advanced machine features. + Options that would traditionally be configured in a BIOS belong + here. Features that require operating system support may have + corresponding entries in the GuestOsFeatures of an Image (e.g., + whether or not the OS in the Image supports nested + virtualization being enabled or disabled). + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + enable_nested_virtualization (bool): + Whether to enable nested virtualization or + not (default is false). + + This field is a member of `oneof`_ ``_enable_nested_virtualization``. + enable_uefi_networking (bool): + Whether to enable UEFI networking for + instance creation. + + This field is a member of `oneof`_ ``_enable_uefi_networking``. + threads_per_core (int): + The number of threads per physical core. To + disable simultaneous multithreading (SMT) set + this to 1. If unset, the maximum number of + threads supported per core by the underlying + processor is assumed. + + This field is a member of `oneof`_ ``_threads_per_core``. + visible_core_count (int): + The number of physical cores to expose to an + instance. Multiply by the number of threads per + core to compute the total number of virtual CPUs + to expose to the instance. If unset, the number + of cores is inferred from the instance's nominal + CPU count and the underlying platform's SMT + width. + + This field is a member of `oneof`_ ``_visible_core_count``. + """ + + enable_nested_virtualization: bool = proto.Field( + proto.BOOL, + number=16639365, + optional=True, + ) + enable_uefi_networking: bool = proto.Field( + proto.BOOL, + number=334485668, + optional=True, + ) + threads_per_core: int = proto.Field( + proto.INT32, + number=352611671, + optional=True, + ) + visible_core_count: int = proto.Field( + proto.INT32, + number=193198684, + optional=True, + ) + + +class AggregatedListAcceleratorTypesRequest(proto.Message): + r"""A request message for AcceleratorTypes.AggregatedList. See + the method description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + filter (str): + A filter expression that filters resources listed in the + response. Most Compute resources support two types of filter + expressions: expressions that support regular expressions + and expressions that follow API improvement proposal + AIP-160. If you want to use AIP-160, your expression must + specify the field name, an operator, and the value that you + want to use for filtering. The value must be a string, a + number, or a boolean. The operator must be either ``=``, + ``!=``, ``>``, ``<``, ``<=``, ``>=`` or ``:``. For example, + if you are filtering Compute Engine instances, you can + exclude instances named ``example-instance`` by specifying + ``name != example-instance``. The ``:`` operator can be used + with string fields to match substrings. For non-string + fields it is equivalent to the ``=`` operator. The ``:*`` + comparison can be used to test whether a key has been + defined. For example, to find all objects with ``owner`` + label use: ``labels.owner:*`` You can also filter nested + fields. For example, you could specify + ``scheduling.automaticRestart = false`` to include instances + only if they are not scheduled for automatic restarts. You + can use filtering on nested fields to filter based on + resource labels. To filter on multiple expressions, provide + each separate expression within parentheses. For example: + ``(scheduling.automaticRestart = true) (cpuPlatform = "Intel Skylake")`` + By default, each expression is an ``AND`` expression. + However, you can include ``AND`` and ``OR`` expressions + explicitly. For example: + ``(cpuPlatform = "Intel Skylake") OR (cpuPlatform = "Intel Broadwell") AND (scheduling.automaticRestart = true)`` + If you want to use a regular expression, use the ``eq`` + (equal) or ``ne`` (not equal) operator against a single + un-parenthesized expression with or without quotes or + against multiple parenthesized expressions. Examples: + ``fieldname eq unquoted literal`` + ``fieldname eq 'single quoted literal'`` + ``fieldname eq "double quoted literal"`` + ``(fieldname1 eq literal) (fieldname2 ne "literal")`` The + literal value is interpreted as a regular expression using + Google RE2 library syntax. The literal value must match the + entire field. For example, to filter for instances that do + not end with name "instance", you would use + ``name ne .*instance``. + + This field is a member of `oneof`_ ``_filter``. + include_all_scopes (bool): + Indicates whether every visible scope for + each scope type (zone, region, global) should be + included in the response. For new resource types + added after this field, the flag has no effect + as new resource types will always include every + visible scope for each scope type in response. + For resource types which predate this field, if + this flag is omitted or false, only scopes of + the scope types where the resource type is + expected to be found will be included. + + This field is a member of `oneof`_ ``_include_all_scopes``. + max_results (int): + The maximum number of results per page that should be + returned. If the number of available results is larger than + ``maxResults``, Compute Engine returns a ``nextPageToken`` + that can be used to get the next page of results in + subsequent list requests. Acceptable values are ``0`` to + ``500``, inclusive. (Default: ``500``) + + This field is a member of `oneof`_ ``_max_results``. + order_by (str): + Sorts list results by a certain order. By default, results + are returned in alphanumerical order based on the resource + name. You can also sort results in descending order based on + the creation timestamp using + ``orderBy="creationTimestamp desc"``. This sorts results + based on the ``creationTimestamp`` field in reverse + chronological order (newest result first). Use this to sort + resources like operations so that the newest operation is + returned first. Currently, only sorting by ``name`` or + ``creationTimestamp desc`` is supported. + + This field is a member of `oneof`_ ``_order_by``. + page_token (str): + Specifies a page token to use. Set ``pageToken`` to the + ``nextPageToken`` returned by a previous list request to get + the next page of results. + + This field is a member of `oneof`_ ``_page_token``. + project (str): + Project ID for this request. + return_partial_success (bool): + Opt-in for partial success behavior which + provides partial results in case of failure. The + default value is false. + + This field is a member of `oneof`_ ``_return_partial_success``. + """ + + filter: str = proto.Field( + proto.STRING, + number=336120696, + optional=True, + ) + include_all_scopes: bool = proto.Field( + proto.BOOL, + number=391327988, + optional=True, + ) + max_results: int = proto.Field( + proto.UINT32, + number=54715419, + optional=True, + ) + order_by: str = proto.Field( + proto.STRING, + number=160562920, + optional=True, + ) + page_token: str = proto.Field( + proto.STRING, + number=19994697, + optional=True, + ) + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + return_partial_success: bool = proto.Field( + proto.BOOL, + number=517198390, + optional=True, + ) + + +class AggregatedListAddressesRequest(proto.Message): + r"""A request message for Addresses.AggregatedList. See the + method description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + filter (str): + A filter expression that filters resources listed in the + response. Most Compute resources support two types of filter + expressions: expressions that support regular expressions + and expressions that follow API improvement proposal + AIP-160. If you want to use AIP-160, your expression must + specify the field name, an operator, and the value that you + want to use for filtering. The value must be a string, a + number, or a boolean. The operator must be either ``=``, + ``!=``, ``>``, ``<``, ``<=``, ``>=`` or ``:``. For example, + if you are filtering Compute Engine instances, you can + exclude instances named ``example-instance`` by specifying + ``name != example-instance``. The ``:`` operator can be used + with string fields to match substrings. For non-string + fields it is equivalent to the ``=`` operator. The ``:*`` + comparison can be used to test whether a key has been + defined. For example, to find all objects with ``owner`` + label use: ``labels.owner:*`` You can also filter nested + fields. For example, you could specify + ``scheduling.automaticRestart = false`` to include instances + only if they are not scheduled for automatic restarts. You + can use filtering on nested fields to filter based on + resource labels. To filter on multiple expressions, provide + each separate expression within parentheses. For example: + ``(scheduling.automaticRestart = true) (cpuPlatform = "Intel Skylake")`` + By default, each expression is an ``AND`` expression. + However, you can include ``AND`` and ``OR`` expressions + explicitly. For example: + ``(cpuPlatform = "Intel Skylake") OR (cpuPlatform = "Intel Broadwell") AND (scheduling.automaticRestart = true)`` + If you want to use a regular expression, use the ``eq`` + (equal) or ``ne`` (not equal) operator against a single + un-parenthesized expression with or without quotes or + against multiple parenthesized expressions. Examples: + ``fieldname eq unquoted literal`` + ``fieldname eq 'single quoted literal'`` + ``fieldname eq "double quoted literal"`` + ``(fieldname1 eq literal) (fieldname2 ne "literal")`` The + literal value is interpreted as a regular expression using + Google RE2 library syntax. The literal value must match the + entire field. For example, to filter for instances that do + not end with name "instance", you would use + ``name ne .*instance``. + + This field is a member of `oneof`_ ``_filter``. + include_all_scopes (bool): + Indicates whether every visible scope for + each scope type (zone, region, global) should be + included in the response. For new resource types + added after this field, the flag has no effect + as new resource types will always include every + visible scope for each scope type in response. + For resource types which predate this field, if + this flag is omitted or false, only scopes of + the scope types where the resource type is + expected to be found will be included. + + This field is a member of `oneof`_ ``_include_all_scopes``. + max_results (int): + The maximum number of results per page that should be + returned. If the number of available results is larger than + ``maxResults``, Compute Engine returns a ``nextPageToken`` + that can be used to get the next page of results in + subsequent list requests. Acceptable values are ``0`` to + ``500``, inclusive. (Default: ``500``) + + This field is a member of `oneof`_ ``_max_results``. + order_by (str): + Sorts list results by a certain order. By default, results + are returned in alphanumerical order based on the resource + name. You can also sort results in descending order based on + the creation timestamp using + ``orderBy="creationTimestamp desc"``. This sorts results + based on the ``creationTimestamp`` field in reverse + chronological order (newest result first). Use this to sort + resources like operations so that the newest operation is + returned first. Currently, only sorting by ``name`` or + ``creationTimestamp desc`` is supported. + + This field is a member of `oneof`_ ``_order_by``. + page_token (str): + Specifies a page token to use. Set ``pageToken`` to the + ``nextPageToken`` returned by a previous list request to get + the next page of results. + + This field is a member of `oneof`_ ``_page_token``. + project (str): + Project ID for this request. + return_partial_success (bool): + Opt-in for partial success behavior which + provides partial results in case of failure. The + default value is false. + + This field is a member of `oneof`_ ``_return_partial_success``. + """ + + filter: str = proto.Field( + proto.STRING, + number=336120696, + optional=True, + ) + include_all_scopes: bool = proto.Field( + proto.BOOL, + number=391327988, + optional=True, + ) + max_results: int = proto.Field( + proto.UINT32, + number=54715419, + optional=True, + ) + order_by: str = proto.Field( + proto.STRING, + number=160562920, + optional=True, + ) + page_token: str = proto.Field( + proto.STRING, + number=19994697, + optional=True, + ) + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + return_partial_success: bool = proto.Field( + proto.BOOL, + number=517198390, + optional=True, + ) + + +class AggregatedListAutoscalersRequest(proto.Message): + r"""A request message for Autoscalers.AggregatedList. See the + method description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + filter (str): + A filter expression that filters resources listed in the + response. Most Compute resources support two types of filter + expressions: expressions that support regular expressions + and expressions that follow API improvement proposal + AIP-160. If you want to use AIP-160, your expression must + specify the field name, an operator, and the value that you + want to use for filtering. The value must be a string, a + number, or a boolean. The operator must be either ``=``, + ``!=``, ``>``, ``<``, ``<=``, ``>=`` or ``:``. For example, + if you are filtering Compute Engine instances, you can + exclude instances named ``example-instance`` by specifying + ``name != example-instance``. The ``:`` operator can be used + with string fields to match substrings. For non-string + fields it is equivalent to the ``=`` operator. The ``:*`` + comparison can be used to test whether a key has been + defined. For example, to find all objects with ``owner`` + label use: ``labels.owner:*`` You can also filter nested + fields. For example, you could specify + ``scheduling.automaticRestart = false`` to include instances + only if they are not scheduled for automatic restarts. You + can use filtering on nested fields to filter based on + resource labels. To filter on multiple expressions, provide + each separate expression within parentheses. For example: + ``(scheduling.automaticRestart = true) (cpuPlatform = "Intel Skylake")`` + By default, each expression is an ``AND`` expression. + However, you can include ``AND`` and ``OR`` expressions + explicitly. For example: + ``(cpuPlatform = "Intel Skylake") OR (cpuPlatform = "Intel Broadwell") AND (scheduling.automaticRestart = true)`` + If you want to use a regular expression, use the ``eq`` + (equal) or ``ne`` (not equal) operator against a single + un-parenthesized expression with or without quotes or + against multiple parenthesized expressions. Examples: + ``fieldname eq unquoted literal`` + ``fieldname eq 'single quoted literal'`` + ``fieldname eq "double quoted literal"`` + ``(fieldname1 eq literal) (fieldname2 ne "literal")`` The + literal value is interpreted as a regular expression using + Google RE2 library syntax. The literal value must match the + entire field. For example, to filter for instances that do + not end with name "instance", you would use + ``name ne .*instance``. + + This field is a member of `oneof`_ ``_filter``. + include_all_scopes (bool): + Indicates whether every visible scope for + each scope type (zone, region, global) should be + included in the response. For new resource types + added after this field, the flag has no effect + as new resource types will always include every + visible scope for each scope type in response. + For resource types which predate this field, if + this flag is omitted or false, only scopes of + the scope types where the resource type is + expected to be found will be included. + + This field is a member of `oneof`_ ``_include_all_scopes``. + max_results (int): + The maximum number of results per page that should be + returned. If the number of available results is larger than + ``maxResults``, Compute Engine returns a ``nextPageToken`` + that can be used to get the next page of results in + subsequent list requests. Acceptable values are ``0`` to + ``500``, inclusive. (Default: ``500``) + + This field is a member of `oneof`_ ``_max_results``. + order_by (str): + Sorts list results by a certain order. By default, results + are returned in alphanumerical order based on the resource + name. You can also sort results in descending order based on + the creation timestamp using + ``orderBy="creationTimestamp desc"``. This sorts results + based on the ``creationTimestamp`` field in reverse + chronological order (newest result first). Use this to sort + resources like operations so that the newest operation is + returned first. Currently, only sorting by ``name`` or + ``creationTimestamp desc`` is supported. + + This field is a member of `oneof`_ ``_order_by``. + page_token (str): + Specifies a page token to use. Set ``pageToken`` to the + ``nextPageToken`` returned by a previous list request to get + the next page of results. + + This field is a member of `oneof`_ ``_page_token``. + project (str): + Project ID for this request. + return_partial_success (bool): + Opt-in for partial success behavior which + provides partial results in case of failure. The + default value is false. + + This field is a member of `oneof`_ ``_return_partial_success``. + """ + + filter: str = proto.Field( + proto.STRING, + number=336120696, + optional=True, + ) + include_all_scopes: bool = proto.Field( + proto.BOOL, + number=391327988, + optional=True, + ) + max_results: int = proto.Field( + proto.UINT32, + number=54715419, + optional=True, + ) + order_by: str = proto.Field( + proto.STRING, + number=160562920, + optional=True, + ) + page_token: str = proto.Field( + proto.STRING, + number=19994697, + optional=True, + ) + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + return_partial_success: bool = proto.Field( + proto.BOOL, + number=517198390, + optional=True, + ) + + +class AggregatedListBackendServicesRequest(proto.Message): + r"""A request message for BackendServices.AggregatedList. See the + method description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + filter (str): + A filter expression that filters resources listed in the + response. Most Compute resources support two types of filter + expressions: expressions that support regular expressions + and expressions that follow API improvement proposal + AIP-160. If you want to use AIP-160, your expression must + specify the field name, an operator, and the value that you + want to use for filtering. The value must be a string, a + number, or a boolean. The operator must be either ``=``, + ``!=``, ``>``, ``<``, ``<=``, ``>=`` or ``:``. For example, + if you are filtering Compute Engine instances, you can + exclude instances named ``example-instance`` by specifying + ``name != example-instance``. The ``:`` operator can be used + with string fields to match substrings. For non-string + fields it is equivalent to the ``=`` operator. The ``:*`` + comparison can be used to test whether a key has been + defined. For example, to find all objects with ``owner`` + label use: ``labels.owner:*`` You can also filter nested + fields. For example, you could specify + ``scheduling.automaticRestart = false`` to include instances + only if they are not scheduled for automatic restarts. You + can use filtering on nested fields to filter based on + resource labels. To filter on multiple expressions, provide + each separate expression within parentheses. For example: + ``(scheduling.automaticRestart = true) (cpuPlatform = "Intel Skylake")`` + By default, each expression is an ``AND`` expression. + However, you can include ``AND`` and ``OR`` expressions + explicitly. For example: + ``(cpuPlatform = "Intel Skylake") OR (cpuPlatform = "Intel Broadwell") AND (scheduling.automaticRestart = true)`` + If you want to use a regular expression, use the ``eq`` + (equal) or ``ne`` (not equal) operator against a single + un-parenthesized expression with or without quotes or + against multiple parenthesized expressions. Examples: + ``fieldname eq unquoted literal`` + ``fieldname eq 'single quoted literal'`` + ``fieldname eq "double quoted literal"`` + ``(fieldname1 eq literal) (fieldname2 ne "literal")`` The + literal value is interpreted as a regular expression using + Google RE2 library syntax. The literal value must match the + entire field. For example, to filter for instances that do + not end with name "instance", you would use + ``name ne .*instance``. + + This field is a member of `oneof`_ ``_filter``. + include_all_scopes (bool): + Indicates whether every visible scope for + each scope type (zone, region, global) should be + included in the response. For new resource types + added after this field, the flag has no effect + as new resource types will always include every + visible scope for each scope type in response. + For resource types which predate this field, if + this flag is omitted or false, only scopes of + the scope types where the resource type is + expected to be found will be included. + + This field is a member of `oneof`_ ``_include_all_scopes``. + max_results (int): + The maximum number of results per page that should be + returned. If the number of available results is larger than + ``maxResults``, Compute Engine returns a ``nextPageToken`` + that can be used to get the next page of results in + subsequent list requests. Acceptable values are ``0`` to + ``500``, inclusive. (Default: ``500``) + + This field is a member of `oneof`_ ``_max_results``. + order_by (str): + Sorts list results by a certain order. By default, results + are returned in alphanumerical order based on the resource + name. You can also sort results in descending order based on + the creation timestamp using + ``orderBy="creationTimestamp desc"``. This sorts results + based on the ``creationTimestamp`` field in reverse + chronological order (newest result first). Use this to sort + resources like operations so that the newest operation is + returned first. Currently, only sorting by ``name`` or + ``creationTimestamp desc`` is supported. + + This field is a member of `oneof`_ ``_order_by``. + page_token (str): + Specifies a page token to use. Set ``pageToken`` to the + ``nextPageToken`` returned by a previous list request to get + the next page of results. + + This field is a member of `oneof`_ ``_page_token``. + project (str): + Name of the project scoping this request. + return_partial_success (bool): + Opt-in for partial success behavior which + provides partial results in case of failure. The + default value is false. + + This field is a member of `oneof`_ ``_return_partial_success``. + """ + + filter: str = proto.Field( + proto.STRING, + number=336120696, + optional=True, + ) + include_all_scopes: bool = proto.Field( + proto.BOOL, + number=391327988, + optional=True, + ) + max_results: int = proto.Field( + proto.UINT32, + number=54715419, + optional=True, + ) + order_by: str = proto.Field( + proto.STRING, + number=160562920, + optional=True, + ) + page_token: str = proto.Field( + proto.STRING, + number=19994697, + optional=True, + ) + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + return_partial_success: bool = proto.Field( + proto.BOOL, + number=517198390, + optional=True, + ) + + +class AggregatedListDiskTypesRequest(proto.Message): + r"""A request message for DiskTypes.AggregatedList. See the + method description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + filter (str): + A filter expression that filters resources listed in the + response. Most Compute resources support two types of filter + expressions: expressions that support regular expressions + and expressions that follow API improvement proposal + AIP-160. If you want to use AIP-160, your expression must + specify the field name, an operator, and the value that you + want to use for filtering. The value must be a string, a + number, or a boolean. The operator must be either ``=``, + ``!=``, ``>``, ``<``, ``<=``, ``>=`` or ``:``. For example, + if you are filtering Compute Engine instances, you can + exclude instances named ``example-instance`` by specifying + ``name != example-instance``. The ``:`` operator can be used + with string fields to match substrings. For non-string + fields it is equivalent to the ``=`` operator. The ``:*`` + comparison can be used to test whether a key has been + defined. For example, to find all objects with ``owner`` + label use: ``labels.owner:*`` You can also filter nested + fields. For example, you could specify + ``scheduling.automaticRestart = false`` to include instances + only if they are not scheduled for automatic restarts. You + can use filtering on nested fields to filter based on + resource labels. To filter on multiple expressions, provide + each separate expression within parentheses. For example: + ``(scheduling.automaticRestart = true) (cpuPlatform = "Intel Skylake")`` + By default, each expression is an ``AND`` expression. + However, you can include ``AND`` and ``OR`` expressions + explicitly. For example: + ``(cpuPlatform = "Intel Skylake") OR (cpuPlatform = "Intel Broadwell") AND (scheduling.automaticRestart = true)`` + If you want to use a regular expression, use the ``eq`` + (equal) or ``ne`` (not equal) operator against a single + un-parenthesized expression with or without quotes or + against multiple parenthesized expressions. Examples: + ``fieldname eq unquoted literal`` + ``fieldname eq 'single quoted literal'`` + ``fieldname eq "double quoted literal"`` + ``(fieldname1 eq literal) (fieldname2 ne "literal")`` The + literal value is interpreted as a regular expression using + Google RE2 library syntax. The literal value must match the + entire field. For example, to filter for instances that do + not end with name "instance", you would use + ``name ne .*instance``. + + This field is a member of `oneof`_ ``_filter``. + include_all_scopes (bool): + Indicates whether every visible scope for + each scope type (zone, region, global) should be + included in the response. For new resource types + added after this field, the flag has no effect + as new resource types will always include every + visible scope for each scope type in response. + For resource types which predate this field, if + this flag is omitted or false, only scopes of + the scope types where the resource type is + expected to be found will be included. + + This field is a member of `oneof`_ ``_include_all_scopes``. + max_results (int): + The maximum number of results per page that should be + returned. If the number of available results is larger than + ``maxResults``, Compute Engine returns a ``nextPageToken`` + that can be used to get the next page of results in + subsequent list requests. Acceptable values are ``0`` to + ``500``, inclusive. (Default: ``500``) + + This field is a member of `oneof`_ ``_max_results``. + order_by (str): + Sorts list results by a certain order. By default, results + are returned in alphanumerical order based on the resource + name. You can also sort results in descending order based on + the creation timestamp using + ``orderBy="creationTimestamp desc"``. This sorts results + based on the ``creationTimestamp`` field in reverse + chronological order (newest result first). Use this to sort + resources like operations so that the newest operation is + returned first. Currently, only sorting by ``name`` or + ``creationTimestamp desc`` is supported. + + This field is a member of `oneof`_ ``_order_by``. + page_token (str): + Specifies a page token to use. Set ``pageToken`` to the + ``nextPageToken`` returned by a previous list request to get + the next page of results. + + This field is a member of `oneof`_ ``_page_token``. + project (str): + Project ID for this request. + return_partial_success (bool): + Opt-in for partial success behavior which + provides partial results in case of failure. The + default value is false. + + This field is a member of `oneof`_ ``_return_partial_success``. + """ + + filter: str = proto.Field( + proto.STRING, + number=336120696, + optional=True, + ) + include_all_scopes: bool = proto.Field( + proto.BOOL, + number=391327988, + optional=True, + ) + max_results: int = proto.Field( + proto.UINT32, + number=54715419, + optional=True, + ) + order_by: str = proto.Field( + proto.STRING, + number=160562920, + optional=True, + ) + page_token: str = proto.Field( + proto.STRING, + number=19994697, + optional=True, + ) + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + return_partial_success: bool = proto.Field( + proto.BOOL, + number=517198390, + optional=True, + ) + + +class AggregatedListDisksRequest(proto.Message): + r"""A request message for Disks.AggregatedList. See the method + description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + filter (str): + A filter expression that filters resources listed in the + response. Most Compute resources support two types of filter + expressions: expressions that support regular expressions + and expressions that follow API improvement proposal + AIP-160. If you want to use AIP-160, your expression must + specify the field name, an operator, and the value that you + want to use for filtering. The value must be a string, a + number, or a boolean. The operator must be either ``=``, + ``!=``, ``>``, ``<``, ``<=``, ``>=`` or ``:``. For example, + if you are filtering Compute Engine instances, you can + exclude instances named ``example-instance`` by specifying + ``name != example-instance``. The ``:`` operator can be used + with string fields to match substrings. For non-string + fields it is equivalent to the ``=`` operator. The ``:*`` + comparison can be used to test whether a key has been + defined. For example, to find all objects with ``owner`` + label use: ``labels.owner:*`` You can also filter nested + fields. For example, you could specify + ``scheduling.automaticRestart = false`` to include instances + only if they are not scheduled for automatic restarts. You + can use filtering on nested fields to filter based on + resource labels. To filter on multiple expressions, provide + each separate expression within parentheses. For example: + ``(scheduling.automaticRestart = true) (cpuPlatform = "Intel Skylake")`` + By default, each expression is an ``AND`` expression. + However, you can include ``AND`` and ``OR`` expressions + explicitly. For example: + ``(cpuPlatform = "Intel Skylake") OR (cpuPlatform = "Intel Broadwell") AND (scheduling.automaticRestart = true)`` + If you want to use a regular expression, use the ``eq`` + (equal) or ``ne`` (not equal) operator against a single + un-parenthesized expression with or without quotes or + against multiple parenthesized expressions. Examples: + ``fieldname eq unquoted literal`` + ``fieldname eq 'single quoted literal'`` + ``fieldname eq "double quoted literal"`` + ``(fieldname1 eq literal) (fieldname2 ne "literal")`` The + literal value is interpreted as a regular expression using + Google RE2 library syntax. The literal value must match the + entire field. For example, to filter for instances that do + not end with name "instance", you would use + ``name ne .*instance``. + + This field is a member of `oneof`_ ``_filter``. + include_all_scopes (bool): + Indicates whether every visible scope for + each scope type (zone, region, global) should be + included in the response. For new resource types + added after this field, the flag has no effect + as new resource types will always include every + visible scope for each scope type in response. + For resource types which predate this field, if + this flag is omitted or false, only scopes of + the scope types where the resource type is + expected to be found will be included. + + This field is a member of `oneof`_ ``_include_all_scopes``. + max_results (int): + The maximum number of results per page that should be + returned. If the number of available results is larger than + ``maxResults``, Compute Engine returns a ``nextPageToken`` + that can be used to get the next page of results in + subsequent list requests. Acceptable values are ``0`` to + ``500``, inclusive. (Default: ``500``) + + This field is a member of `oneof`_ ``_max_results``. + order_by (str): + Sorts list results by a certain order. By default, results + are returned in alphanumerical order based on the resource + name. You can also sort results in descending order based on + the creation timestamp using + ``orderBy="creationTimestamp desc"``. This sorts results + based on the ``creationTimestamp`` field in reverse + chronological order (newest result first). Use this to sort + resources like operations so that the newest operation is + returned first. Currently, only sorting by ``name`` or + ``creationTimestamp desc`` is supported. + + This field is a member of `oneof`_ ``_order_by``. + page_token (str): + Specifies a page token to use. Set ``pageToken`` to the + ``nextPageToken`` returned by a previous list request to get + the next page of results. + + This field is a member of `oneof`_ ``_page_token``. + project (str): + Project ID for this request. + return_partial_success (bool): + Opt-in for partial success behavior which + provides partial results in case of failure. The + default value is false. + + This field is a member of `oneof`_ ``_return_partial_success``. + """ + + filter: str = proto.Field( + proto.STRING, + number=336120696, + optional=True, + ) + include_all_scopes: bool = proto.Field( + proto.BOOL, + number=391327988, + optional=True, + ) + max_results: int = proto.Field( + proto.UINT32, + number=54715419, + optional=True, + ) + order_by: str = proto.Field( + proto.STRING, + number=160562920, + optional=True, + ) + page_token: str = proto.Field( + proto.STRING, + number=19994697, + optional=True, + ) + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + return_partial_success: bool = proto.Field( + proto.BOOL, + number=517198390, + optional=True, + ) + + +class AggregatedListForwardingRulesRequest(proto.Message): + r"""A request message for ForwardingRules.AggregatedList. See the + method description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + filter (str): + A filter expression that filters resources listed in the + response. Most Compute resources support two types of filter + expressions: expressions that support regular expressions + and expressions that follow API improvement proposal + AIP-160. If you want to use AIP-160, your expression must + specify the field name, an operator, and the value that you + want to use for filtering. The value must be a string, a + number, or a boolean. The operator must be either ``=``, + ``!=``, ``>``, ``<``, ``<=``, ``>=`` or ``:``. For example, + if you are filtering Compute Engine instances, you can + exclude instances named ``example-instance`` by specifying + ``name != example-instance``. The ``:`` operator can be used + with string fields to match substrings. For non-string + fields it is equivalent to the ``=`` operator. The ``:*`` + comparison can be used to test whether a key has been + defined. For example, to find all objects with ``owner`` + label use: ``labels.owner:*`` You can also filter nested + fields. For example, you could specify + ``scheduling.automaticRestart = false`` to include instances + only if they are not scheduled for automatic restarts. You + can use filtering on nested fields to filter based on + resource labels. To filter on multiple expressions, provide + each separate expression within parentheses. For example: + ``(scheduling.automaticRestart = true) (cpuPlatform = "Intel Skylake")`` + By default, each expression is an ``AND`` expression. + However, you can include ``AND`` and ``OR`` expressions + explicitly. For example: + ``(cpuPlatform = "Intel Skylake") OR (cpuPlatform = "Intel Broadwell") AND (scheduling.automaticRestart = true)`` + If you want to use a regular expression, use the ``eq`` + (equal) or ``ne`` (not equal) operator against a single + un-parenthesized expression with or without quotes or + against multiple parenthesized expressions. Examples: + ``fieldname eq unquoted literal`` + ``fieldname eq 'single quoted literal'`` + ``fieldname eq "double quoted literal"`` + ``(fieldname1 eq literal) (fieldname2 ne "literal")`` The + literal value is interpreted as a regular expression using + Google RE2 library syntax. The literal value must match the + entire field. For example, to filter for instances that do + not end with name "instance", you would use + ``name ne .*instance``. + + This field is a member of `oneof`_ ``_filter``. + include_all_scopes (bool): + Indicates whether every visible scope for + each scope type (zone, region, global) should be + included in the response. For new resource types + added after this field, the flag has no effect + as new resource types will always include every + visible scope for each scope type in response. + For resource types which predate this field, if + this flag is omitted or false, only scopes of + the scope types where the resource type is + expected to be found will be included. + + This field is a member of `oneof`_ ``_include_all_scopes``. + max_results (int): + The maximum number of results per page that should be + returned. If the number of available results is larger than + ``maxResults``, Compute Engine returns a ``nextPageToken`` + that can be used to get the next page of results in + subsequent list requests. Acceptable values are ``0`` to + ``500``, inclusive. (Default: ``500``) + + This field is a member of `oneof`_ ``_max_results``. + order_by (str): + Sorts list results by a certain order. By default, results + are returned in alphanumerical order based on the resource + name. You can also sort results in descending order based on + the creation timestamp using + ``orderBy="creationTimestamp desc"``. This sorts results + based on the ``creationTimestamp`` field in reverse + chronological order (newest result first). Use this to sort + resources like operations so that the newest operation is + returned first. Currently, only sorting by ``name`` or + ``creationTimestamp desc`` is supported. + + This field is a member of `oneof`_ ``_order_by``. + page_token (str): + Specifies a page token to use. Set ``pageToken`` to the + ``nextPageToken`` returned by a previous list request to get + the next page of results. + + This field is a member of `oneof`_ ``_page_token``. + project (str): + Project ID for this request. + return_partial_success (bool): + Opt-in for partial success behavior which + provides partial results in case of failure. The + default value is false. + + This field is a member of `oneof`_ ``_return_partial_success``. + """ + + filter: str = proto.Field( + proto.STRING, + number=336120696, + optional=True, + ) + include_all_scopes: bool = proto.Field( + proto.BOOL, + number=391327988, + optional=True, + ) + max_results: int = proto.Field( + proto.UINT32, + number=54715419, + optional=True, + ) + order_by: str = proto.Field( + proto.STRING, + number=160562920, + optional=True, + ) + page_token: str = proto.Field( + proto.STRING, + number=19994697, + optional=True, + ) + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + return_partial_success: bool = proto.Field( + proto.BOOL, + number=517198390, + optional=True, + ) + + +class AggregatedListGlobalOperationsRequest(proto.Message): + r"""A request message for GlobalOperations.AggregatedList. See + the method description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + filter (str): + A filter expression that filters resources listed in the + response. Most Compute resources support two types of filter + expressions: expressions that support regular expressions + and expressions that follow API improvement proposal + AIP-160. If you want to use AIP-160, your expression must + specify the field name, an operator, and the value that you + want to use for filtering. The value must be a string, a + number, or a boolean. The operator must be either ``=``, + ``!=``, ``>``, ``<``, ``<=``, ``>=`` or ``:``. For example, + if you are filtering Compute Engine instances, you can + exclude instances named ``example-instance`` by specifying + ``name != example-instance``. The ``:`` operator can be used + with string fields to match substrings. For non-string + fields it is equivalent to the ``=`` operator. The ``:*`` + comparison can be used to test whether a key has been + defined. For example, to find all objects with ``owner`` + label use: ``labels.owner:*`` You can also filter nested + fields. For example, you could specify + ``scheduling.automaticRestart = false`` to include instances + only if they are not scheduled for automatic restarts. You + can use filtering on nested fields to filter based on + resource labels. To filter on multiple expressions, provide + each separate expression within parentheses. For example: + ``(scheduling.automaticRestart = true) (cpuPlatform = "Intel Skylake")`` + By default, each expression is an ``AND`` expression. + However, you can include ``AND`` and ``OR`` expressions + explicitly. For example: + ``(cpuPlatform = "Intel Skylake") OR (cpuPlatform = "Intel Broadwell") AND (scheduling.automaticRestart = true)`` + If you want to use a regular expression, use the ``eq`` + (equal) or ``ne`` (not equal) operator against a single + un-parenthesized expression with or without quotes or + against multiple parenthesized expressions. Examples: + ``fieldname eq unquoted literal`` + ``fieldname eq 'single quoted literal'`` + ``fieldname eq "double quoted literal"`` + ``(fieldname1 eq literal) (fieldname2 ne "literal")`` The + literal value is interpreted as a regular expression using + Google RE2 library syntax. The literal value must match the + entire field. For example, to filter for instances that do + not end with name "instance", you would use + ``name ne .*instance``. + + This field is a member of `oneof`_ ``_filter``. + include_all_scopes (bool): + Indicates whether every visible scope for + each scope type (zone, region, global) should be + included in the response. For new resource types + added after this field, the flag has no effect + as new resource types will always include every + visible scope for each scope type in response. + For resource types which predate this field, if + this flag is omitted or false, only scopes of + the scope types where the resource type is + expected to be found will be included. + + This field is a member of `oneof`_ ``_include_all_scopes``. + max_results (int): + The maximum number of results per page that should be + returned. If the number of available results is larger than + ``maxResults``, Compute Engine returns a ``nextPageToken`` + that can be used to get the next page of results in + subsequent list requests. Acceptable values are ``0`` to + ``500``, inclusive. (Default: ``500``) + + This field is a member of `oneof`_ ``_max_results``. + order_by (str): + Sorts list results by a certain order. By default, results + are returned in alphanumerical order based on the resource + name. You can also sort results in descending order based on + the creation timestamp using + ``orderBy="creationTimestamp desc"``. This sorts results + based on the ``creationTimestamp`` field in reverse + chronological order (newest result first). Use this to sort + resources like operations so that the newest operation is + returned first. Currently, only sorting by ``name`` or + ``creationTimestamp desc`` is supported. + + This field is a member of `oneof`_ ``_order_by``. + page_token (str): + Specifies a page token to use. Set ``pageToken`` to the + ``nextPageToken`` returned by a previous list request to get + the next page of results. + + This field is a member of `oneof`_ ``_page_token``. + project (str): + Project ID for this request. + return_partial_success (bool): + Opt-in for partial success behavior which + provides partial results in case of failure. The + default value is false. + + This field is a member of `oneof`_ ``_return_partial_success``. + """ + + filter: str = proto.Field( + proto.STRING, + number=336120696, + optional=True, + ) + include_all_scopes: bool = proto.Field( + proto.BOOL, + number=391327988, + optional=True, + ) + max_results: int = proto.Field( + proto.UINT32, + number=54715419, + optional=True, + ) + order_by: str = proto.Field( + proto.STRING, + number=160562920, + optional=True, + ) + page_token: str = proto.Field( + proto.STRING, + number=19994697, + optional=True, + ) + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + return_partial_success: bool = proto.Field( + proto.BOOL, + number=517198390, + optional=True, + ) + + +class AggregatedListHealthChecksRequest(proto.Message): + r"""A request message for HealthChecks.AggregatedList. See the + method description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + filter (str): + A filter expression that filters resources listed in the + response. Most Compute resources support two types of filter + expressions: expressions that support regular expressions + and expressions that follow API improvement proposal + AIP-160. If you want to use AIP-160, your expression must + specify the field name, an operator, and the value that you + want to use for filtering. The value must be a string, a + number, or a boolean. The operator must be either ``=``, + ``!=``, ``>``, ``<``, ``<=``, ``>=`` or ``:``. For example, + if you are filtering Compute Engine instances, you can + exclude instances named ``example-instance`` by specifying + ``name != example-instance``. The ``:`` operator can be used + with string fields to match substrings. For non-string + fields it is equivalent to the ``=`` operator. The ``:*`` + comparison can be used to test whether a key has been + defined. For example, to find all objects with ``owner`` + label use: ``labels.owner:*`` You can also filter nested + fields. For example, you could specify + ``scheduling.automaticRestart = false`` to include instances + only if they are not scheduled for automatic restarts. You + can use filtering on nested fields to filter based on + resource labels. To filter on multiple expressions, provide + each separate expression within parentheses. For example: + ``(scheduling.automaticRestart = true) (cpuPlatform = "Intel Skylake")`` + By default, each expression is an ``AND`` expression. + However, you can include ``AND`` and ``OR`` expressions + explicitly. For example: + ``(cpuPlatform = "Intel Skylake") OR (cpuPlatform = "Intel Broadwell") AND (scheduling.automaticRestart = true)`` + If you want to use a regular expression, use the ``eq`` + (equal) or ``ne`` (not equal) operator against a single + un-parenthesized expression with or without quotes or + against multiple parenthesized expressions. Examples: + ``fieldname eq unquoted literal`` + ``fieldname eq 'single quoted literal'`` + ``fieldname eq "double quoted literal"`` + ``(fieldname1 eq literal) (fieldname2 ne "literal")`` The + literal value is interpreted as a regular expression using + Google RE2 library syntax. The literal value must match the + entire field. For example, to filter for instances that do + not end with name "instance", you would use + ``name ne .*instance``. + + This field is a member of `oneof`_ ``_filter``. + include_all_scopes (bool): + Indicates whether every visible scope for + each scope type (zone, region, global) should be + included in the response. For new resource types + added after this field, the flag has no effect + as new resource types will always include every + visible scope for each scope type in response. + For resource types which predate this field, if + this flag is omitted or false, only scopes of + the scope types where the resource type is + expected to be found will be included. + + This field is a member of `oneof`_ ``_include_all_scopes``. + max_results (int): + The maximum number of results per page that should be + returned. If the number of available results is larger than + ``maxResults``, Compute Engine returns a ``nextPageToken`` + that can be used to get the next page of results in + subsequent list requests. Acceptable values are ``0`` to + ``500``, inclusive. (Default: ``500``) + + This field is a member of `oneof`_ ``_max_results``. + order_by (str): + Sorts list results by a certain order. By default, results + are returned in alphanumerical order based on the resource + name. You can also sort results in descending order based on + the creation timestamp using + ``orderBy="creationTimestamp desc"``. This sorts results + based on the ``creationTimestamp`` field in reverse + chronological order (newest result first). Use this to sort + resources like operations so that the newest operation is + returned first. Currently, only sorting by ``name`` or + ``creationTimestamp desc`` is supported. + + This field is a member of `oneof`_ ``_order_by``. + page_token (str): + Specifies a page token to use. Set ``pageToken`` to the + ``nextPageToken`` returned by a previous list request to get + the next page of results. + + This field is a member of `oneof`_ ``_page_token``. + project (str): + Name of the project scoping this request. + return_partial_success (bool): + Opt-in for partial success behavior which + provides partial results in case of failure. The + default value is false. + + This field is a member of `oneof`_ ``_return_partial_success``. + """ + + filter: str = proto.Field( + proto.STRING, + number=336120696, + optional=True, + ) + include_all_scopes: bool = proto.Field( + proto.BOOL, + number=391327988, + optional=True, + ) + max_results: int = proto.Field( + proto.UINT32, + number=54715419, + optional=True, + ) + order_by: str = proto.Field( + proto.STRING, + number=160562920, + optional=True, + ) + page_token: str = proto.Field( + proto.STRING, + number=19994697, + optional=True, + ) + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + return_partial_success: bool = proto.Field( + proto.BOOL, + number=517198390, + optional=True, + ) + + +class AggregatedListInstanceGroupManagersRequest(proto.Message): + r"""A request message for InstanceGroupManagers.AggregatedList. + See the method description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + filter (str): + A filter expression that filters resources listed in the + response. Most Compute resources support two types of filter + expressions: expressions that support regular expressions + and expressions that follow API improvement proposal + AIP-160. If you want to use AIP-160, your expression must + specify the field name, an operator, and the value that you + want to use for filtering. The value must be a string, a + number, or a boolean. The operator must be either ``=``, + ``!=``, ``>``, ``<``, ``<=``, ``>=`` or ``:``. For example, + if you are filtering Compute Engine instances, you can + exclude instances named ``example-instance`` by specifying + ``name != example-instance``. The ``:`` operator can be used + with string fields to match substrings. For non-string + fields it is equivalent to the ``=`` operator. The ``:*`` + comparison can be used to test whether a key has been + defined. For example, to find all objects with ``owner`` + label use: ``labels.owner:*`` You can also filter nested + fields. For example, you could specify + ``scheduling.automaticRestart = false`` to include instances + only if they are not scheduled for automatic restarts. You + can use filtering on nested fields to filter based on + resource labels. To filter on multiple expressions, provide + each separate expression within parentheses. For example: + ``(scheduling.automaticRestart = true) (cpuPlatform = "Intel Skylake")`` + By default, each expression is an ``AND`` expression. + However, you can include ``AND`` and ``OR`` expressions + explicitly. For example: + ``(cpuPlatform = "Intel Skylake") OR (cpuPlatform = "Intel Broadwell") AND (scheduling.automaticRestart = true)`` + If you want to use a regular expression, use the ``eq`` + (equal) or ``ne`` (not equal) operator against a single + un-parenthesized expression with or without quotes or + against multiple parenthesized expressions. Examples: + ``fieldname eq unquoted literal`` + ``fieldname eq 'single quoted literal'`` + ``fieldname eq "double quoted literal"`` + ``(fieldname1 eq literal) (fieldname2 ne "literal")`` The + literal value is interpreted as a regular expression using + Google RE2 library syntax. The literal value must match the + entire field. For example, to filter for instances that do + not end with name "instance", you would use + ``name ne .*instance``. + + This field is a member of `oneof`_ ``_filter``. + include_all_scopes (bool): + Indicates whether every visible scope for + each scope type (zone, region, global) should be + included in the response. For new resource types + added after this field, the flag has no effect + as new resource types will always include every + visible scope for each scope type in response. + For resource types which predate this field, if + this flag is omitted or false, only scopes of + the scope types where the resource type is + expected to be found will be included. + + This field is a member of `oneof`_ ``_include_all_scopes``. + max_results (int): + The maximum number of results per page that should be + returned. If the number of available results is larger than + ``maxResults``, Compute Engine returns a ``nextPageToken`` + that can be used to get the next page of results in + subsequent list requests. Acceptable values are ``0`` to + ``500``, inclusive. (Default: ``500``) + + This field is a member of `oneof`_ ``_max_results``. + order_by (str): + Sorts list results by a certain order. By default, results + are returned in alphanumerical order based on the resource + name. You can also sort results in descending order based on + the creation timestamp using + ``orderBy="creationTimestamp desc"``. This sorts results + based on the ``creationTimestamp`` field in reverse + chronological order (newest result first). Use this to sort + resources like operations so that the newest operation is + returned first. Currently, only sorting by ``name`` or + ``creationTimestamp desc`` is supported. + + This field is a member of `oneof`_ ``_order_by``. + page_token (str): + Specifies a page token to use. Set ``pageToken`` to the + ``nextPageToken`` returned by a previous list request to get + the next page of results. + + This field is a member of `oneof`_ ``_page_token``. + project (str): + Project ID for this request. + return_partial_success (bool): + Opt-in for partial success behavior which + provides partial results in case of failure. The + default value is false. + + This field is a member of `oneof`_ ``_return_partial_success``. + """ + + filter: str = proto.Field( + proto.STRING, + number=336120696, + optional=True, + ) + include_all_scopes: bool = proto.Field( + proto.BOOL, + number=391327988, + optional=True, + ) + max_results: int = proto.Field( + proto.UINT32, + number=54715419, + optional=True, + ) + order_by: str = proto.Field( + proto.STRING, + number=160562920, + optional=True, + ) + page_token: str = proto.Field( + proto.STRING, + number=19994697, + optional=True, + ) + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + return_partial_success: bool = proto.Field( + proto.BOOL, + number=517198390, + optional=True, + ) + + +class AggregatedListInstanceGroupsRequest(proto.Message): + r"""A request message for InstanceGroups.AggregatedList. See the + method description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + filter (str): + A filter expression that filters resources listed in the + response. Most Compute resources support two types of filter + expressions: expressions that support regular expressions + and expressions that follow API improvement proposal + AIP-160. If you want to use AIP-160, your expression must + specify the field name, an operator, and the value that you + want to use for filtering. The value must be a string, a + number, or a boolean. The operator must be either ``=``, + ``!=``, ``>``, ``<``, ``<=``, ``>=`` or ``:``. For example, + if you are filtering Compute Engine instances, you can + exclude instances named ``example-instance`` by specifying + ``name != example-instance``. The ``:`` operator can be used + with string fields to match substrings. For non-string + fields it is equivalent to the ``=`` operator. The ``:*`` + comparison can be used to test whether a key has been + defined. For example, to find all objects with ``owner`` + label use: ``labels.owner:*`` You can also filter nested + fields. For example, you could specify + ``scheduling.automaticRestart = false`` to include instances + only if they are not scheduled for automatic restarts. You + can use filtering on nested fields to filter based on + resource labels. To filter on multiple expressions, provide + each separate expression within parentheses. For example: + ``(scheduling.automaticRestart = true) (cpuPlatform = "Intel Skylake")`` + By default, each expression is an ``AND`` expression. + However, you can include ``AND`` and ``OR`` expressions + explicitly. For example: + ``(cpuPlatform = "Intel Skylake") OR (cpuPlatform = "Intel Broadwell") AND (scheduling.automaticRestart = true)`` + If you want to use a regular expression, use the ``eq`` + (equal) or ``ne`` (not equal) operator against a single + un-parenthesized expression with or without quotes or + against multiple parenthesized expressions. Examples: + ``fieldname eq unquoted literal`` + ``fieldname eq 'single quoted literal'`` + ``fieldname eq "double quoted literal"`` + ``(fieldname1 eq literal) (fieldname2 ne "literal")`` The + literal value is interpreted as a regular expression using + Google RE2 library syntax. The literal value must match the + entire field. For example, to filter for instances that do + not end with name "instance", you would use + ``name ne .*instance``. + + This field is a member of `oneof`_ ``_filter``. + include_all_scopes (bool): + Indicates whether every visible scope for + each scope type (zone, region, global) should be + included in the response. For new resource types + added after this field, the flag has no effect + as new resource types will always include every + visible scope for each scope type in response. + For resource types which predate this field, if + this flag is omitted or false, only scopes of + the scope types where the resource type is + expected to be found will be included. + + This field is a member of `oneof`_ ``_include_all_scopes``. + max_results (int): + The maximum number of results per page that should be + returned. If the number of available results is larger than + ``maxResults``, Compute Engine returns a ``nextPageToken`` + that can be used to get the next page of results in + subsequent list requests. Acceptable values are ``0`` to + ``500``, inclusive. (Default: ``500``) + + This field is a member of `oneof`_ ``_max_results``. + order_by (str): + Sorts list results by a certain order. By default, results + are returned in alphanumerical order based on the resource + name. You can also sort results in descending order based on + the creation timestamp using + ``orderBy="creationTimestamp desc"``. This sorts results + based on the ``creationTimestamp`` field in reverse + chronological order (newest result first). Use this to sort + resources like operations so that the newest operation is + returned first. Currently, only sorting by ``name`` or + ``creationTimestamp desc`` is supported. + + This field is a member of `oneof`_ ``_order_by``. + page_token (str): + Specifies a page token to use. Set ``pageToken`` to the + ``nextPageToken`` returned by a previous list request to get + the next page of results. + + This field is a member of `oneof`_ ``_page_token``. + project (str): + Project ID for this request. + return_partial_success (bool): + Opt-in for partial success behavior which + provides partial results in case of failure. The + default value is false. + + This field is a member of `oneof`_ ``_return_partial_success``. + """ + + filter: str = proto.Field( + proto.STRING, + number=336120696, + optional=True, + ) + include_all_scopes: bool = proto.Field( + proto.BOOL, + number=391327988, + optional=True, + ) + max_results: int = proto.Field( + proto.UINT32, + number=54715419, + optional=True, + ) + order_by: str = proto.Field( + proto.STRING, + number=160562920, + optional=True, + ) + page_token: str = proto.Field( + proto.STRING, + number=19994697, + optional=True, + ) + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + return_partial_success: bool = proto.Field( + proto.BOOL, + number=517198390, + optional=True, + ) + + +class AggregatedListInstanceTemplatesRequest(proto.Message): + r"""A request message for InstanceTemplates.AggregatedList. See + the method description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + filter (str): + A filter expression that filters resources listed in the + response. Most Compute resources support two types of filter + expressions: expressions that support regular expressions + and expressions that follow API improvement proposal + AIP-160. If you want to use AIP-160, your expression must + specify the field name, an operator, and the value that you + want to use for filtering. The value must be a string, a + number, or a boolean. The operator must be either ``=``, + ``!=``, ``>``, ``<``, ``<=``, ``>=`` or ``:``. For example, + if you are filtering Compute Engine instances, you can + exclude instances named ``example-instance`` by specifying + ``name != example-instance``. The ``:`` operator can be used + with string fields to match substrings. For non-string + fields it is equivalent to the ``=`` operator. The ``:*`` + comparison can be used to test whether a key has been + defined. For example, to find all objects with ``owner`` + label use: ``labels.owner:*`` You can also filter nested + fields. For example, you could specify + ``scheduling.automaticRestart = false`` to include instances + only if they are not scheduled for automatic restarts. You + can use filtering on nested fields to filter based on + resource labels. To filter on multiple expressions, provide + each separate expression within parentheses. For example: + ``(scheduling.automaticRestart = true) (cpuPlatform = "Intel Skylake")`` + By default, each expression is an ``AND`` expression. + However, you can include ``AND`` and ``OR`` expressions + explicitly. For example: + ``(cpuPlatform = "Intel Skylake") OR (cpuPlatform = "Intel Broadwell") AND (scheduling.automaticRestart = true)`` + If you want to use a regular expression, use the ``eq`` + (equal) or ``ne`` (not equal) operator against a single + un-parenthesized expression with or without quotes or + against multiple parenthesized expressions. Examples: + ``fieldname eq unquoted literal`` + ``fieldname eq 'single quoted literal'`` + ``fieldname eq "double quoted literal"`` + ``(fieldname1 eq literal) (fieldname2 ne "literal")`` The + literal value is interpreted as a regular expression using + Google RE2 library syntax. The literal value must match the + entire field. For example, to filter for instances that do + not end with name "instance", you would use + ``name ne .*instance``. + + This field is a member of `oneof`_ ``_filter``. + include_all_scopes (bool): + Indicates whether every visible scope for + each scope type (zone, region, global) should be + included in the response. For new resource types + added after this field, the flag has no effect + as new resource types will always include every + visible scope for each scope type in response. + For resource types which predate this field, if + this flag is omitted or false, only scopes of + the scope types where the resource type is + expected to be found will be included. + + This field is a member of `oneof`_ ``_include_all_scopes``. + max_results (int): + The maximum number of results per page that should be + returned. If the number of available results is larger than + ``maxResults``, Compute Engine returns a ``nextPageToken`` + that can be used to get the next page of results in + subsequent list requests. Acceptable values are ``0`` to + ``500``, inclusive. (Default: ``500``) + + This field is a member of `oneof`_ ``_max_results``. + order_by (str): + Sorts list results by a certain order. By default, results + are returned in alphanumerical order based on the resource + name. You can also sort results in descending order based on + the creation timestamp using + ``orderBy="creationTimestamp desc"``. This sorts results + based on the ``creationTimestamp`` field in reverse + chronological order (newest result first). Use this to sort + resources like operations so that the newest operation is + returned first. Currently, only sorting by ``name`` or + ``creationTimestamp desc`` is supported. + + This field is a member of `oneof`_ ``_order_by``. + page_token (str): + Specifies a page token to use. Set ``pageToken`` to the + ``nextPageToken`` returned by a previous list request to get + the next page of results. + + This field is a member of `oneof`_ ``_page_token``. + project (str): + Name of the project scoping this request. + return_partial_success (bool): + Opt-in for partial success behavior which + provides partial results in case of failure. The + default value is false. + + This field is a member of `oneof`_ ``_return_partial_success``. + """ + + filter: str = proto.Field( + proto.STRING, + number=336120696, + optional=True, + ) + include_all_scopes: bool = proto.Field( + proto.BOOL, + number=391327988, + optional=True, + ) + max_results: int = proto.Field( + proto.UINT32, + number=54715419, + optional=True, + ) + order_by: str = proto.Field( + proto.STRING, + number=160562920, + optional=True, + ) + page_token: str = proto.Field( + proto.STRING, + number=19994697, + optional=True, + ) + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + return_partial_success: bool = proto.Field( + proto.BOOL, + number=517198390, + optional=True, + ) + + +class AggregatedListInstancesRequest(proto.Message): + r"""A request message for Instances.AggregatedList. See the + method description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + filter (str): + A filter expression that filters resources listed in the + response. Most Compute resources support two types of filter + expressions: expressions that support regular expressions + and expressions that follow API improvement proposal + AIP-160. If you want to use AIP-160, your expression must + specify the field name, an operator, and the value that you + want to use for filtering. The value must be a string, a + number, or a boolean. The operator must be either ``=``, + ``!=``, ``>``, ``<``, ``<=``, ``>=`` or ``:``. For example, + if you are filtering Compute Engine instances, you can + exclude instances named ``example-instance`` by specifying + ``name != example-instance``. The ``:`` operator can be used + with string fields to match substrings. For non-string + fields it is equivalent to the ``=`` operator. The ``:*`` + comparison can be used to test whether a key has been + defined. For example, to find all objects with ``owner`` + label use: ``labels.owner:*`` You can also filter nested + fields. For example, you could specify + ``scheduling.automaticRestart = false`` to include instances + only if they are not scheduled for automatic restarts. You + can use filtering on nested fields to filter based on + resource labels. To filter on multiple expressions, provide + each separate expression within parentheses. For example: + ``(scheduling.automaticRestart = true) (cpuPlatform = "Intel Skylake")`` + By default, each expression is an ``AND`` expression. + However, you can include ``AND`` and ``OR`` expressions + explicitly. For example: + ``(cpuPlatform = "Intel Skylake") OR (cpuPlatform = "Intel Broadwell") AND (scheduling.automaticRestart = true)`` + If you want to use a regular expression, use the ``eq`` + (equal) or ``ne`` (not equal) operator against a single + un-parenthesized expression with or without quotes or + against multiple parenthesized expressions. Examples: + ``fieldname eq unquoted literal`` + ``fieldname eq 'single quoted literal'`` + ``fieldname eq "double quoted literal"`` + ``(fieldname1 eq literal) (fieldname2 ne "literal")`` The + literal value is interpreted as a regular expression using + Google RE2 library syntax. The literal value must match the + entire field. For example, to filter for instances that do + not end with name "instance", you would use + ``name ne .*instance``. + + This field is a member of `oneof`_ ``_filter``. + include_all_scopes (bool): + Indicates whether every visible scope for + each scope type (zone, region, global) should be + included in the response. For new resource types + added after this field, the flag has no effect + as new resource types will always include every + visible scope for each scope type in response. + For resource types which predate this field, if + this flag is omitted or false, only scopes of + the scope types where the resource type is + expected to be found will be included. + + This field is a member of `oneof`_ ``_include_all_scopes``. + max_results (int): + The maximum number of results per page that should be + returned. If the number of available results is larger than + ``maxResults``, Compute Engine returns a ``nextPageToken`` + that can be used to get the next page of results in + subsequent list requests. Acceptable values are ``0`` to + ``500``, inclusive. (Default: ``500``) + + This field is a member of `oneof`_ ``_max_results``. + order_by (str): + Sorts list results by a certain order. By default, results + are returned in alphanumerical order based on the resource + name. You can also sort results in descending order based on + the creation timestamp using + ``orderBy="creationTimestamp desc"``. This sorts results + based on the ``creationTimestamp`` field in reverse + chronological order (newest result first). Use this to sort + resources like operations so that the newest operation is + returned first. Currently, only sorting by ``name`` or + ``creationTimestamp desc`` is supported. + + This field is a member of `oneof`_ ``_order_by``. + page_token (str): + Specifies a page token to use. Set ``pageToken`` to the + ``nextPageToken`` returned by a previous list request to get + the next page of results. + + This field is a member of `oneof`_ ``_page_token``. + project (str): + Project ID for this request. + return_partial_success (bool): + Opt-in for partial success behavior which + provides partial results in case of failure. The + default value is false. + + This field is a member of `oneof`_ ``_return_partial_success``. + """ + + filter: str = proto.Field( + proto.STRING, + number=336120696, + optional=True, + ) + include_all_scopes: bool = proto.Field( + proto.BOOL, + number=391327988, + optional=True, + ) + max_results: int = proto.Field( + proto.UINT32, + number=54715419, + optional=True, + ) + order_by: str = proto.Field( + proto.STRING, + number=160562920, + optional=True, + ) + page_token: str = proto.Field( + proto.STRING, + number=19994697, + optional=True, + ) + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + return_partial_success: bool = proto.Field( + proto.BOOL, + number=517198390, + optional=True, + ) + + +class AggregatedListInterconnectAttachmentsRequest(proto.Message): + r"""A request message for InterconnectAttachments.AggregatedList. + See the method description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + filter (str): + A filter expression that filters resources listed in the + response. Most Compute resources support two types of filter + expressions: expressions that support regular expressions + and expressions that follow API improvement proposal + AIP-160. If you want to use AIP-160, your expression must + specify the field name, an operator, and the value that you + want to use for filtering. The value must be a string, a + number, or a boolean. The operator must be either ``=``, + ``!=``, ``>``, ``<``, ``<=``, ``>=`` or ``:``. For example, + if you are filtering Compute Engine instances, you can + exclude instances named ``example-instance`` by specifying + ``name != example-instance``. The ``:`` operator can be used + with string fields to match substrings. For non-string + fields it is equivalent to the ``=`` operator. The ``:*`` + comparison can be used to test whether a key has been + defined. For example, to find all objects with ``owner`` + label use: ``labels.owner:*`` You can also filter nested + fields. For example, you could specify + ``scheduling.automaticRestart = false`` to include instances + only if they are not scheduled for automatic restarts. You + can use filtering on nested fields to filter based on + resource labels. To filter on multiple expressions, provide + each separate expression within parentheses. For example: + ``(scheduling.automaticRestart = true) (cpuPlatform = "Intel Skylake")`` + By default, each expression is an ``AND`` expression. + However, you can include ``AND`` and ``OR`` expressions + explicitly. For example: + ``(cpuPlatform = "Intel Skylake") OR (cpuPlatform = "Intel Broadwell") AND (scheduling.automaticRestart = true)`` + If you want to use a regular expression, use the ``eq`` + (equal) or ``ne`` (not equal) operator against a single + un-parenthesized expression with or without quotes or + against multiple parenthesized expressions. Examples: + ``fieldname eq unquoted literal`` + ``fieldname eq 'single quoted literal'`` + ``fieldname eq "double quoted literal"`` + ``(fieldname1 eq literal) (fieldname2 ne "literal")`` The + literal value is interpreted as a regular expression using + Google RE2 library syntax. The literal value must match the + entire field. For example, to filter for instances that do + not end with name "instance", you would use + ``name ne .*instance``. + + This field is a member of `oneof`_ ``_filter``. + include_all_scopes (bool): + Indicates whether every visible scope for + each scope type (zone, region, global) should be + included in the response. For new resource types + added after this field, the flag has no effect + as new resource types will always include every + visible scope for each scope type in response. + For resource types which predate this field, if + this flag is omitted or false, only scopes of + the scope types where the resource type is + expected to be found will be included. + + This field is a member of `oneof`_ ``_include_all_scopes``. + max_results (int): + The maximum number of results per page that should be + returned. If the number of available results is larger than + ``maxResults``, Compute Engine returns a ``nextPageToken`` + that can be used to get the next page of results in + subsequent list requests. Acceptable values are ``0`` to + ``500``, inclusive. (Default: ``500``) + + This field is a member of `oneof`_ ``_max_results``. + order_by (str): + Sorts list results by a certain order. By default, results + are returned in alphanumerical order based on the resource + name. You can also sort results in descending order based on + the creation timestamp using + ``orderBy="creationTimestamp desc"``. This sorts results + based on the ``creationTimestamp`` field in reverse + chronological order (newest result first). Use this to sort + resources like operations so that the newest operation is + returned first. Currently, only sorting by ``name`` or + ``creationTimestamp desc`` is supported. + + This field is a member of `oneof`_ ``_order_by``. + page_token (str): + Specifies a page token to use. Set ``pageToken`` to the + ``nextPageToken`` returned by a previous list request to get + the next page of results. + + This field is a member of `oneof`_ ``_page_token``. + project (str): + Project ID for this request. + return_partial_success (bool): + Opt-in for partial success behavior which + provides partial results in case of failure. The + default value is false. + + This field is a member of `oneof`_ ``_return_partial_success``. + """ + + filter: str = proto.Field( + proto.STRING, + number=336120696, + optional=True, + ) + include_all_scopes: bool = proto.Field( + proto.BOOL, + number=391327988, + optional=True, + ) + max_results: int = proto.Field( + proto.UINT32, + number=54715419, + optional=True, + ) + order_by: str = proto.Field( + proto.STRING, + number=160562920, + optional=True, + ) + page_token: str = proto.Field( + proto.STRING, + number=19994697, + optional=True, + ) + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + return_partial_success: bool = proto.Field( + proto.BOOL, + number=517198390, + optional=True, + ) + + +class AggregatedListMachineTypesRequest(proto.Message): + r"""A request message for MachineTypes.AggregatedList. See the + method description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + filter (str): + A filter expression that filters resources listed in the + response. Most Compute resources support two types of filter + expressions: expressions that support regular expressions + and expressions that follow API improvement proposal + AIP-160. If you want to use AIP-160, your expression must + specify the field name, an operator, and the value that you + want to use for filtering. The value must be a string, a + number, or a boolean. The operator must be either ``=``, + ``!=``, ``>``, ``<``, ``<=``, ``>=`` or ``:``. For example, + if you are filtering Compute Engine instances, you can + exclude instances named ``example-instance`` by specifying + ``name != example-instance``. The ``:`` operator can be used + with string fields to match substrings. For non-string + fields it is equivalent to the ``=`` operator. The ``:*`` + comparison can be used to test whether a key has been + defined. For example, to find all objects with ``owner`` + label use: ``labels.owner:*`` You can also filter nested + fields. For example, you could specify + ``scheduling.automaticRestart = false`` to include instances + only if they are not scheduled for automatic restarts. You + can use filtering on nested fields to filter based on + resource labels. To filter on multiple expressions, provide + each separate expression within parentheses. For example: + ``(scheduling.automaticRestart = true) (cpuPlatform = "Intel Skylake")`` + By default, each expression is an ``AND`` expression. + However, you can include ``AND`` and ``OR`` expressions + explicitly. For example: + ``(cpuPlatform = "Intel Skylake") OR (cpuPlatform = "Intel Broadwell") AND (scheduling.automaticRestart = true)`` + If you want to use a regular expression, use the ``eq`` + (equal) or ``ne`` (not equal) operator against a single + un-parenthesized expression with or without quotes or + against multiple parenthesized expressions. Examples: + ``fieldname eq unquoted literal`` + ``fieldname eq 'single quoted literal'`` + ``fieldname eq "double quoted literal"`` + ``(fieldname1 eq literal) (fieldname2 ne "literal")`` The + literal value is interpreted as a regular expression using + Google RE2 library syntax. The literal value must match the + entire field. For example, to filter for instances that do + not end with name "instance", you would use + ``name ne .*instance``. + + This field is a member of `oneof`_ ``_filter``. + include_all_scopes (bool): + Indicates whether every visible scope for + each scope type (zone, region, global) should be + included in the response. For new resource types + added after this field, the flag has no effect + as new resource types will always include every + visible scope for each scope type in response. + For resource types which predate this field, if + this flag is omitted or false, only scopes of + the scope types where the resource type is + expected to be found will be included. + + This field is a member of `oneof`_ ``_include_all_scopes``. + max_results (int): + The maximum number of results per page that should be + returned. If the number of available results is larger than + ``maxResults``, Compute Engine returns a ``nextPageToken`` + that can be used to get the next page of results in + subsequent list requests. Acceptable values are ``0`` to + ``500``, inclusive. (Default: ``500``) + + This field is a member of `oneof`_ ``_max_results``. + order_by (str): + Sorts list results by a certain order. By default, results + are returned in alphanumerical order based on the resource + name. You can also sort results in descending order based on + the creation timestamp using + ``orderBy="creationTimestamp desc"``. This sorts results + based on the ``creationTimestamp`` field in reverse + chronological order (newest result first). Use this to sort + resources like operations so that the newest operation is + returned first. Currently, only sorting by ``name`` or + ``creationTimestamp desc`` is supported. + + This field is a member of `oneof`_ ``_order_by``. + page_token (str): + Specifies a page token to use. Set ``pageToken`` to the + ``nextPageToken`` returned by a previous list request to get + the next page of results. + + This field is a member of `oneof`_ ``_page_token``. + project (str): + Project ID for this request. + return_partial_success (bool): + Opt-in for partial success behavior which + provides partial results in case of failure. The + default value is false. + + This field is a member of `oneof`_ ``_return_partial_success``. + """ + + filter: str = proto.Field( + proto.STRING, + number=336120696, + optional=True, + ) + include_all_scopes: bool = proto.Field( + proto.BOOL, + number=391327988, + optional=True, + ) + max_results: int = proto.Field( + proto.UINT32, + number=54715419, + optional=True, + ) + order_by: str = proto.Field( + proto.STRING, + number=160562920, + optional=True, + ) + page_token: str = proto.Field( + proto.STRING, + number=19994697, + optional=True, + ) + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + return_partial_success: bool = proto.Field( + proto.BOOL, + number=517198390, + optional=True, + ) + + +class AggregatedListNetworkAttachmentsRequest(proto.Message): + r"""A request message for NetworkAttachments.AggregatedList. See + the method description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + filter (str): + A filter expression that filters resources listed in the + response. Most Compute resources support two types of filter + expressions: expressions that support regular expressions + and expressions that follow API improvement proposal + AIP-160. If you want to use AIP-160, your expression must + specify the field name, an operator, and the value that you + want to use for filtering. The value must be a string, a + number, or a boolean. The operator must be either ``=``, + ``!=``, ``>``, ``<``, ``<=``, ``>=`` or ``:``. For example, + if you are filtering Compute Engine instances, you can + exclude instances named ``example-instance`` by specifying + ``name != example-instance``. The ``:`` operator can be used + with string fields to match substrings. For non-string + fields it is equivalent to the ``=`` operator. The ``:*`` + comparison can be used to test whether a key has been + defined. For example, to find all objects with ``owner`` + label use: ``labels.owner:*`` You can also filter nested + fields. For example, you could specify + ``scheduling.automaticRestart = false`` to include instances + only if they are not scheduled for automatic restarts. You + can use filtering on nested fields to filter based on + resource labels. To filter on multiple expressions, provide + each separate expression within parentheses. For example: + ``(scheduling.automaticRestart = true) (cpuPlatform = "Intel Skylake")`` + By default, each expression is an ``AND`` expression. + However, you can include ``AND`` and ``OR`` expressions + explicitly. For example: + ``(cpuPlatform = "Intel Skylake") OR (cpuPlatform = "Intel Broadwell") AND (scheduling.automaticRestart = true)`` + If you want to use a regular expression, use the ``eq`` + (equal) or ``ne`` (not equal) operator against a single + un-parenthesized expression with or without quotes or + against multiple parenthesized expressions. Examples: + ``fieldname eq unquoted literal`` + ``fieldname eq 'single quoted literal'`` + ``fieldname eq "double quoted literal"`` + ``(fieldname1 eq literal) (fieldname2 ne "literal")`` The + literal value is interpreted as a regular expression using + Google RE2 library syntax. The literal value must match the + entire field. For example, to filter for instances that do + not end with name "instance", you would use + ``name ne .*instance``. + + This field is a member of `oneof`_ ``_filter``. + include_all_scopes (bool): + Indicates whether every visible scope for + each scope type (zone, region, global) should be + included in the response. For new resource types + added after this field, the flag has no effect + as new resource types will always include every + visible scope for each scope type in response. + For resource types which predate this field, if + this flag is omitted or false, only scopes of + the scope types where the resource type is + expected to be found will be included. + + This field is a member of `oneof`_ ``_include_all_scopes``. + max_results (int): + The maximum number of results per page that should be + returned. If the number of available results is larger than + ``maxResults``, Compute Engine returns a ``nextPageToken`` + that can be used to get the next page of results in + subsequent list requests. Acceptable values are ``0`` to + ``500``, inclusive. (Default: ``500``) + + This field is a member of `oneof`_ ``_max_results``. + order_by (str): + Sorts list results by a certain order. By default, results + are returned in alphanumerical order based on the resource + name. You can also sort results in descending order based on + the creation timestamp using + ``orderBy="creationTimestamp desc"``. This sorts results + based on the ``creationTimestamp`` field in reverse + chronological order (newest result first). Use this to sort + resources like operations so that the newest operation is + returned first. Currently, only sorting by ``name`` or + ``creationTimestamp desc`` is supported. + + This field is a member of `oneof`_ ``_order_by``. + page_token (str): + Specifies a page token to use. Set ``pageToken`` to the + ``nextPageToken`` returned by a previous list request to get + the next page of results. + + This field is a member of `oneof`_ ``_page_token``. + project (str): + Project ID for this request. + return_partial_success (bool): + Opt-in for partial success behavior which + provides partial results in case of failure. The + default value is false. + + This field is a member of `oneof`_ ``_return_partial_success``. + """ + + filter: str = proto.Field( + proto.STRING, + number=336120696, + optional=True, + ) + include_all_scopes: bool = proto.Field( + proto.BOOL, + number=391327988, + optional=True, + ) + max_results: int = proto.Field( + proto.UINT32, + number=54715419, + optional=True, + ) + order_by: str = proto.Field( + proto.STRING, + number=160562920, + optional=True, + ) + page_token: str = proto.Field( + proto.STRING, + number=19994697, + optional=True, + ) + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + return_partial_success: bool = proto.Field( + proto.BOOL, + number=517198390, + optional=True, + ) + + +class AggregatedListNetworkEdgeSecurityServicesRequest(proto.Message): + r"""A request message for + NetworkEdgeSecurityServices.AggregatedList. See the method + description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + filter (str): + A filter expression that filters resources listed in the + response. Most Compute resources support two types of filter + expressions: expressions that support regular expressions + and expressions that follow API improvement proposal + AIP-160. If you want to use AIP-160, your expression must + specify the field name, an operator, and the value that you + want to use for filtering. The value must be a string, a + number, or a boolean. The operator must be either ``=``, + ``!=``, ``>``, ``<``, ``<=``, ``>=`` or ``:``. For example, + if you are filtering Compute Engine instances, you can + exclude instances named ``example-instance`` by specifying + ``name != example-instance``. The ``:`` operator can be used + with string fields to match substrings. For non-string + fields it is equivalent to the ``=`` operator. The ``:*`` + comparison can be used to test whether a key has been + defined. For example, to find all objects with ``owner`` + label use: ``labels.owner:*`` You can also filter nested + fields. For example, you could specify + ``scheduling.automaticRestart = false`` to include instances + only if they are not scheduled for automatic restarts. You + can use filtering on nested fields to filter based on + resource labels. To filter on multiple expressions, provide + each separate expression within parentheses. For example: + ``(scheduling.automaticRestart = true) (cpuPlatform = "Intel Skylake")`` + By default, each expression is an ``AND`` expression. + However, you can include ``AND`` and ``OR`` expressions + explicitly. For example: + ``(cpuPlatform = "Intel Skylake") OR (cpuPlatform = "Intel Broadwell") AND (scheduling.automaticRestart = true)`` + If you want to use a regular expression, use the ``eq`` + (equal) or ``ne`` (not equal) operator against a single + un-parenthesized expression with or without quotes or + against multiple parenthesized expressions. Examples: + ``fieldname eq unquoted literal`` + ``fieldname eq 'single quoted literal'`` + ``fieldname eq "double quoted literal"`` + ``(fieldname1 eq literal) (fieldname2 ne "literal")`` The + literal value is interpreted as a regular expression using + Google RE2 library syntax. The literal value must match the + entire field. For example, to filter for instances that do + not end with name "instance", you would use + ``name ne .*instance``. + + This field is a member of `oneof`_ ``_filter``. + include_all_scopes (bool): + Indicates whether every visible scope for + each scope type (zone, region, global) should be + included in the response. For new resource types + added after this field, the flag has no effect + as new resource types will always include every + visible scope for each scope type in response. + For resource types which predate this field, if + this flag is omitted or false, only scopes of + the scope types where the resource type is + expected to be found will be included. + + This field is a member of `oneof`_ ``_include_all_scopes``. + max_results (int): + The maximum number of results per page that should be + returned. If the number of available results is larger than + ``maxResults``, Compute Engine returns a ``nextPageToken`` + that can be used to get the next page of results in + subsequent list requests. Acceptable values are ``0`` to + ``500``, inclusive. (Default: ``500``) + + This field is a member of `oneof`_ ``_max_results``. + order_by (str): + Sorts list results by a certain order. By default, results + are returned in alphanumerical order based on the resource + name. You can also sort results in descending order based on + the creation timestamp using + ``orderBy="creationTimestamp desc"``. This sorts results + based on the ``creationTimestamp`` field in reverse + chronological order (newest result first). Use this to sort + resources like operations so that the newest operation is + returned first. Currently, only sorting by ``name`` or + ``creationTimestamp desc`` is supported. + + This field is a member of `oneof`_ ``_order_by``. + page_token (str): + Specifies a page token to use. Set ``pageToken`` to the + ``nextPageToken`` returned by a previous list request to get + the next page of results. + + This field is a member of `oneof`_ ``_page_token``. + project (str): + Name of the project scoping this request. + return_partial_success (bool): + Opt-in for partial success behavior which + provides partial results in case of failure. The + default value is false. + + This field is a member of `oneof`_ ``_return_partial_success``. + """ + + filter: str = proto.Field( + proto.STRING, + number=336120696, + optional=True, + ) + include_all_scopes: bool = proto.Field( + proto.BOOL, + number=391327988, + optional=True, + ) + max_results: int = proto.Field( + proto.UINT32, + number=54715419, + optional=True, + ) + order_by: str = proto.Field( + proto.STRING, + number=160562920, + optional=True, + ) + page_token: str = proto.Field( + proto.STRING, + number=19994697, + optional=True, + ) + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + return_partial_success: bool = proto.Field( + proto.BOOL, + number=517198390, + optional=True, + ) + + +class AggregatedListNetworkEndpointGroupsRequest(proto.Message): + r"""A request message for NetworkEndpointGroups.AggregatedList. + See the method description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + filter (str): + A filter expression that filters resources listed in the + response. Most Compute resources support two types of filter + expressions: expressions that support regular expressions + and expressions that follow API improvement proposal + AIP-160. If you want to use AIP-160, your expression must + specify the field name, an operator, and the value that you + want to use for filtering. The value must be a string, a + number, or a boolean. The operator must be either ``=``, + ``!=``, ``>``, ``<``, ``<=``, ``>=`` or ``:``. For example, + if you are filtering Compute Engine instances, you can + exclude instances named ``example-instance`` by specifying + ``name != example-instance``. The ``:`` operator can be used + with string fields to match substrings. For non-string + fields it is equivalent to the ``=`` operator. The ``:*`` + comparison can be used to test whether a key has been + defined. For example, to find all objects with ``owner`` + label use: ``labels.owner:*`` You can also filter nested + fields. For example, you could specify + ``scheduling.automaticRestart = false`` to include instances + only if they are not scheduled for automatic restarts. You + can use filtering on nested fields to filter based on + resource labels. To filter on multiple expressions, provide + each separate expression within parentheses. For example: + ``(scheduling.automaticRestart = true) (cpuPlatform = "Intel Skylake")`` + By default, each expression is an ``AND`` expression. + However, you can include ``AND`` and ``OR`` expressions + explicitly. For example: + ``(cpuPlatform = "Intel Skylake") OR (cpuPlatform = "Intel Broadwell") AND (scheduling.automaticRestart = true)`` + If you want to use a regular expression, use the ``eq`` + (equal) or ``ne`` (not equal) operator against a single + un-parenthesized expression with or without quotes or + against multiple parenthesized expressions. Examples: + ``fieldname eq unquoted literal`` + ``fieldname eq 'single quoted literal'`` + ``fieldname eq "double quoted literal"`` + ``(fieldname1 eq literal) (fieldname2 ne "literal")`` The + literal value is interpreted as a regular expression using + Google RE2 library syntax. The literal value must match the + entire field. For example, to filter for instances that do + not end with name "instance", you would use + ``name ne .*instance``. + + This field is a member of `oneof`_ ``_filter``. + include_all_scopes (bool): + Indicates whether every visible scope for + each scope type (zone, region, global) should be + included in the response. For new resource types + added after this field, the flag has no effect + as new resource types will always include every + visible scope for each scope type in response. + For resource types which predate this field, if + this flag is omitted or false, only scopes of + the scope types where the resource type is + expected to be found will be included. + + This field is a member of `oneof`_ ``_include_all_scopes``. + max_results (int): + The maximum number of results per page that should be + returned. If the number of available results is larger than + ``maxResults``, Compute Engine returns a ``nextPageToken`` + that can be used to get the next page of results in + subsequent list requests. Acceptable values are ``0`` to + ``500``, inclusive. (Default: ``500``) + + This field is a member of `oneof`_ ``_max_results``. + order_by (str): + Sorts list results by a certain order. By default, results + are returned in alphanumerical order based on the resource + name. You can also sort results in descending order based on + the creation timestamp using + ``orderBy="creationTimestamp desc"``. This sorts results + based on the ``creationTimestamp`` field in reverse + chronological order (newest result first). Use this to sort + resources like operations so that the newest operation is + returned first. Currently, only sorting by ``name`` or + ``creationTimestamp desc`` is supported. + + This field is a member of `oneof`_ ``_order_by``. + page_token (str): + Specifies a page token to use. Set ``pageToken`` to the + ``nextPageToken`` returned by a previous list request to get + the next page of results. + + This field is a member of `oneof`_ ``_page_token``. + project (str): + Project ID for this request. + return_partial_success (bool): + Opt-in for partial success behavior which + provides partial results in case of failure. The + default value is false. + + This field is a member of `oneof`_ ``_return_partial_success``. + """ + + filter: str = proto.Field( + proto.STRING, + number=336120696, + optional=True, + ) + include_all_scopes: bool = proto.Field( + proto.BOOL, + number=391327988, + optional=True, + ) + max_results: int = proto.Field( + proto.UINT32, + number=54715419, + optional=True, + ) + order_by: str = proto.Field( + proto.STRING, + number=160562920, + optional=True, + ) + page_token: str = proto.Field( + proto.STRING, + number=19994697, + optional=True, + ) + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + return_partial_success: bool = proto.Field( + proto.BOOL, + number=517198390, + optional=True, + ) + + +class AggregatedListNodeGroupsRequest(proto.Message): + r"""A request message for NodeGroups.AggregatedList. See the + method description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + filter (str): + A filter expression that filters resources listed in the + response. Most Compute resources support two types of filter + expressions: expressions that support regular expressions + and expressions that follow API improvement proposal + AIP-160. If you want to use AIP-160, your expression must + specify the field name, an operator, and the value that you + want to use for filtering. The value must be a string, a + number, or a boolean. The operator must be either ``=``, + ``!=``, ``>``, ``<``, ``<=``, ``>=`` or ``:``. For example, + if you are filtering Compute Engine instances, you can + exclude instances named ``example-instance`` by specifying + ``name != example-instance``. The ``:`` operator can be used + with string fields to match substrings. For non-string + fields it is equivalent to the ``=`` operator. The ``:*`` + comparison can be used to test whether a key has been + defined. For example, to find all objects with ``owner`` + label use: ``labels.owner:*`` You can also filter nested + fields. For example, you could specify + ``scheduling.automaticRestart = false`` to include instances + only if they are not scheduled for automatic restarts. You + can use filtering on nested fields to filter based on + resource labels. To filter on multiple expressions, provide + each separate expression within parentheses. For example: + ``(scheduling.automaticRestart = true) (cpuPlatform = "Intel Skylake")`` + By default, each expression is an ``AND`` expression. + However, you can include ``AND`` and ``OR`` expressions + explicitly. For example: + ``(cpuPlatform = "Intel Skylake") OR (cpuPlatform = "Intel Broadwell") AND (scheduling.automaticRestart = true)`` + If you want to use a regular expression, use the ``eq`` + (equal) or ``ne`` (not equal) operator against a single + un-parenthesized expression with or without quotes or + against multiple parenthesized expressions. Examples: + ``fieldname eq unquoted literal`` + ``fieldname eq 'single quoted literal'`` + ``fieldname eq "double quoted literal"`` + ``(fieldname1 eq literal) (fieldname2 ne "literal")`` The + literal value is interpreted as a regular expression using + Google RE2 library syntax. The literal value must match the + entire field. For example, to filter for instances that do + not end with name "instance", you would use + ``name ne .*instance``. + + This field is a member of `oneof`_ ``_filter``. + include_all_scopes (bool): + Indicates whether every visible scope for + each scope type (zone, region, global) should be + included in the response. For new resource types + added after this field, the flag has no effect + as new resource types will always include every + visible scope for each scope type in response. + For resource types which predate this field, if + this flag is omitted or false, only scopes of + the scope types where the resource type is + expected to be found will be included. + + This field is a member of `oneof`_ ``_include_all_scopes``. + max_results (int): + The maximum number of results per page that should be + returned. If the number of available results is larger than + ``maxResults``, Compute Engine returns a ``nextPageToken`` + that can be used to get the next page of results in + subsequent list requests. Acceptable values are ``0`` to + ``500``, inclusive. (Default: ``500``) + + This field is a member of `oneof`_ ``_max_results``. + order_by (str): + Sorts list results by a certain order. By default, results + are returned in alphanumerical order based on the resource + name. You can also sort results in descending order based on + the creation timestamp using + ``orderBy="creationTimestamp desc"``. This sorts results + based on the ``creationTimestamp`` field in reverse + chronological order (newest result first). Use this to sort + resources like operations so that the newest operation is + returned first. Currently, only sorting by ``name`` or + ``creationTimestamp desc`` is supported. + + This field is a member of `oneof`_ ``_order_by``. + page_token (str): + Specifies a page token to use. Set ``pageToken`` to the + ``nextPageToken`` returned by a previous list request to get + the next page of results. + + This field is a member of `oneof`_ ``_page_token``. + project (str): + Project ID for this request. + return_partial_success (bool): + Opt-in for partial success behavior which + provides partial results in case of failure. The + default value is false. + + This field is a member of `oneof`_ ``_return_partial_success``. + """ + + filter: str = proto.Field( + proto.STRING, + number=336120696, + optional=True, + ) + include_all_scopes: bool = proto.Field( + proto.BOOL, + number=391327988, + optional=True, + ) + max_results: int = proto.Field( + proto.UINT32, + number=54715419, + optional=True, + ) + order_by: str = proto.Field( + proto.STRING, + number=160562920, + optional=True, + ) + page_token: str = proto.Field( + proto.STRING, + number=19994697, + optional=True, + ) + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + return_partial_success: bool = proto.Field( + proto.BOOL, + number=517198390, + optional=True, + ) + + +class AggregatedListNodeTemplatesRequest(proto.Message): + r"""A request message for NodeTemplates.AggregatedList. See the + method description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + filter (str): + A filter expression that filters resources listed in the + response. Most Compute resources support two types of filter + expressions: expressions that support regular expressions + and expressions that follow API improvement proposal + AIP-160. If you want to use AIP-160, your expression must + specify the field name, an operator, and the value that you + want to use for filtering. The value must be a string, a + number, or a boolean. The operator must be either ``=``, + ``!=``, ``>``, ``<``, ``<=``, ``>=`` or ``:``. For example, + if you are filtering Compute Engine instances, you can + exclude instances named ``example-instance`` by specifying + ``name != example-instance``. The ``:`` operator can be used + with string fields to match substrings. For non-string + fields it is equivalent to the ``=`` operator. The ``:*`` + comparison can be used to test whether a key has been + defined. For example, to find all objects with ``owner`` + label use: ``labels.owner:*`` You can also filter nested + fields. For example, you could specify + ``scheduling.automaticRestart = false`` to include instances + only if they are not scheduled for automatic restarts. You + can use filtering on nested fields to filter based on + resource labels. To filter on multiple expressions, provide + each separate expression within parentheses. For example: + ``(scheduling.automaticRestart = true) (cpuPlatform = "Intel Skylake")`` + By default, each expression is an ``AND`` expression. + However, you can include ``AND`` and ``OR`` expressions + explicitly. For example: + ``(cpuPlatform = "Intel Skylake") OR (cpuPlatform = "Intel Broadwell") AND (scheduling.automaticRestart = true)`` + If you want to use a regular expression, use the ``eq`` + (equal) or ``ne`` (not equal) operator against a single + un-parenthesized expression with or without quotes or + against multiple parenthesized expressions. Examples: + ``fieldname eq unquoted literal`` + ``fieldname eq 'single quoted literal'`` + ``fieldname eq "double quoted literal"`` + ``(fieldname1 eq literal) (fieldname2 ne "literal")`` The + literal value is interpreted as a regular expression using + Google RE2 library syntax. The literal value must match the + entire field. For example, to filter for instances that do + not end with name "instance", you would use + ``name ne .*instance``. + + This field is a member of `oneof`_ ``_filter``. + include_all_scopes (bool): + Indicates whether every visible scope for + each scope type (zone, region, global) should be + included in the response. For new resource types + added after this field, the flag has no effect + as new resource types will always include every + visible scope for each scope type in response. + For resource types which predate this field, if + this flag is omitted or false, only scopes of + the scope types where the resource type is + expected to be found will be included. + + This field is a member of `oneof`_ ``_include_all_scopes``. + max_results (int): + The maximum number of results per page that should be + returned. If the number of available results is larger than + ``maxResults``, Compute Engine returns a ``nextPageToken`` + that can be used to get the next page of results in + subsequent list requests. Acceptable values are ``0`` to + ``500``, inclusive. (Default: ``500``) + + This field is a member of `oneof`_ ``_max_results``. + order_by (str): + Sorts list results by a certain order. By default, results + are returned in alphanumerical order based on the resource + name. You can also sort results in descending order based on + the creation timestamp using + ``orderBy="creationTimestamp desc"``. This sorts results + based on the ``creationTimestamp`` field in reverse + chronological order (newest result first). Use this to sort + resources like operations so that the newest operation is + returned first. Currently, only sorting by ``name`` or + ``creationTimestamp desc`` is supported. + + This field is a member of `oneof`_ ``_order_by``. + page_token (str): + Specifies a page token to use. Set ``pageToken`` to the + ``nextPageToken`` returned by a previous list request to get + the next page of results. + + This field is a member of `oneof`_ ``_page_token``. + project (str): + Project ID for this request. + return_partial_success (bool): + Opt-in for partial success behavior which + provides partial results in case of failure. The + default value is false. + + This field is a member of `oneof`_ ``_return_partial_success``. + """ + + filter: str = proto.Field( + proto.STRING, + number=336120696, + optional=True, + ) + include_all_scopes: bool = proto.Field( + proto.BOOL, + number=391327988, + optional=True, + ) + max_results: int = proto.Field( + proto.UINT32, + number=54715419, + optional=True, + ) + order_by: str = proto.Field( + proto.STRING, + number=160562920, + optional=True, + ) + page_token: str = proto.Field( + proto.STRING, + number=19994697, + optional=True, + ) + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + return_partial_success: bool = proto.Field( + proto.BOOL, + number=517198390, + optional=True, + ) + + +class AggregatedListNodeTypesRequest(proto.Message): + r"""A request message for NodeTypes.AggregatedList. See the + method description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + filter (str): + A filter expression that filters resources listed in the + response. Most Compute resources support two types of filter + expressions: expressions that support regular expressions + and expressions that follow API improvement proposal + AIP-160. If you want to use AIP-160, your expression must + specify the field name, an operator, and the value that you + want to use for filtering. The value must be a string, a + number, or a boolean. The operator must be either ``=``, + ``!=``, ``>``, ``<``, ``<=``, ``>=`` or ``:``. For example, + if you are filtering Compute Engine instances, you can + exclude instances named ``example-instance`` by specifying + ``name != example-instance``. The ``:`` operator can be used + with string fields to match substrings. For non-string + fields it is equivalent to the ``=`` operator. The ``:*`` + comparison can be used to test whether a key has been + defined. For example, to find all objects with ``owner`` + label use: ``labels.owner:*`` You can also filter nested + fields. For example, you could specify + ``scheduling.automaticRestart = false`` to include instances + only if they are not scheduled for automatic restarts. You + can use filtering on nested fields to filter based on + resource labels. To filter on multiple expressions, provide + each separate expression within parentheses. For example: + ``(scheduling.automaticRestart = true) (cpuPlatform = "Intel Skylake")`` + By default, each expression is an ``AND`` expression. + However, you can include ``AND`` and ``OR`` expressions + explicitly. For example: + ``(cpuPlatform = "Intel Skylake") OR (cpuPlatform = "Intel Broadwell") AND (scheduling.automaticRestart = true)`` + If you want to use a regular expression, use the ``eq`` + (equal) or ``ne`` (not equal) operator against a single + un-parenthesized expression with or without quotes or + against multiple parenthesized expressions. Examples: + ``fieldname eq unquoted literal`` + ``fieldname eq 'single quoted literal'`` + ``fieldname eq "double quoted literal"`` + ``(fieldname1 eq literal) (fieldname2 ne "literal")`` The + literal value is interpreted as a regular expression using + Google RE2 library syntax. The literal value must match the + entire field. For example, to filter for instances that do + not end with name "instance", you would use + ``name ne .*instance``. + + This field is a member of `oneof`_ ``_filter``. + include_all_scopes (bool): + Indicates whether every visible scope for + each scope type (zone, region, global) should be + included in the response. For new resource types + added after this field, the flag has no effect + as new resource types will always include every + visible scope for each scope type in response. + For resource types which predate this field, if + this flag is omitted or false, only scopes of + the scope types where the resource type is + expected to be found will be included. + + This field is a member of `oneof`_ ``_include_all_scopes``. + max_results (int): + The maximum number of results per page that should be + returned. If the number of available results is larger than + ``maxResults``, Compute Engine returns a ``nextPageToken`` + that can be used to get the next page of results in + subsequent list requests. Acceptable values are ``0`` to + ``500``, inclusive. (Default: ``500``) + + This field is a member of `oneof`_ ``_max_results``. + order_by (str): + Sorts list results by a certain order. By default, results + are returned in alphanumerical order based on the resource + name. You can also sort results in descending order based on + the creation timestamp using + ``orderBy="creationTimestamp desc"``. This sorts results + based on the ``creationTimestamp`` field in reverse + chronological order (newest result first). Use this to sort + resources like operations so that the newest operation is + returned first. Currently, only sorting by ``name`` or + ``creationTimestamp desc`` is supported. + + This field is a member of `oneof`_ ``_order_by``. + page_token (str): + Specifies a page token to use. Set ``pageToken`` to the + ``nextPageToken`` returned by a previous list request to get + the next page of results. + + This field is a member of `oneof`_ ``_page_token``. + project (str): + Project ID for this request. + return_partial_success (bool): + Opt-in for partial success behavior which + provides partial results in case of failure. The + default value is false. + + This field is a member of `oneof`_ ``_return_partial_success``. + """ + + filter: str = proto.Field( + proto.STRING, + number=336120696, + optional=True, + ) + include_all_scopes: bool = proto.Field( + proto.BOOL, + number=391327988, + optional=True, + ) + max_results: int = proto.Field( + proto.UINT32, + number=54715419, + optional=True, + ) + order_by: str = proto.Field( + proto.STRING, + number=160562920, + optional=True, + ) + page_token: str = proto.Field( + proto.STRING, + number=19994697, + optional=True, + ) + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + return_partial_success: bool = proto.Field( + proto.BOOL, + number=517198390, + optional=True, + ) + + +class AggregatedListPacketMirroringsRequest(proto.Message): + r"""A request message for PacketMirrorings.AggregatedList. See + the method description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + filter (str): + A filter expression that filters resources listed in the + response. Most Compute resources support two types of filter + expressions: expressions that support regular expressions + and expressions that follow API improvement proposal + AIP-160. If you want to use AIP-160, your expression must + specify the field name, an operator, and the value that you + want to use for filtering. The value must be a string, a + number, or a boolean. The operator must be either ``=``, + ``!=``, ``>``, ``<``, ``<=``, ``>=`` or ``:``. For example, + if you are filtering Compute Engine instances, you can + exclude instances named ``example-instance`` by specifying + ``name != example-instance``. The ``:`` operator can be used + with string fields to match substrings. For non-string + fields it is equivalent to the ``=`` operator. The ``:*`` + comparison can be used to test whether a key has been + defined. For example, to find all objects with ``owner`` + label use: ``labels.owner:*`` You can also filter nested + fields. For example, you could specify + ``scheduling.automaticRestart = false`` to include instances + only if they are not scheduled for automatic restarts. You + can use filtering on nested fields to filter based on + resource labels. To filter on multiple expressions, provide + each separate expression within parentheses. For example: + ``(scheduling.automaticRestart = true) (cpuPlatform = "Intel Skylake")`` + By default, each expression is an ``AND`` expression. + However, you can include ``AND`` and ``OR`` expressions + explicitly. For example: + ``(cpuPlatform = "Intel Skylake") OR (cpuPlatform = "Intel Broadwell") AND (scheduling.automaticRestart = true)`` + If you want to use a regular expression, use the ``eq`` + (equal) or ``ne`` (not equal) operator against a single + un-parenthesized expression with or without quotes or + against multiple parenthesized expressions. Examples: + ``fieldname eq unquoted literal`` + ``fieldname eq 'single quoted literal'`` + ``fieldname eq "double quoted literal"`` + ``(fieldname1 eq literal) (fieldname2 ne "literal")`` The + literal value is interpreted as a regular expression using + Google RE2 library syntax. The literal value must match the + entire field. For example, to filter for instances that do + not end with name "instance", you would use + ``name ne .*instance``. + + This field is a member of `oneof`_ ``_filter``. + include_all_scopes (bool): + Indicates whether every visible scope for + each scope type (zone, region, global) should be + included in the response. For new resource types + added after this field, the flag has no effect + as new resource types will always include every + visible scope for each scope type in response. + For resource types which predate this field, if + this flag is omitted or false, only scopes of + the scope types where the resource type is + expected to be found will be included. + + This field is a member of `oneof`_ ``_include_all_scopes``. + max_results (int): + The maximum number of results per page that should be + returned. If the number of available results is larger than + ``maxResults``, Compute Engine returns a ``nextPageToken`` + that can be used to get the next page of results in + subsequent list requests. Acceptable values are ``0`` to + ``500``, inclusive. (Default: ``500``) + + This field is a member of `oneof`_ ``_max_results``. + order_by (str): + Sorts list results by a certain order. By default, results + are returned in alphanumerical order based on the resource + name. You can also sort results in descending order based on + the creation timestamp using + ``orderBy="creationTimestamp desc"``. This sorts results + based on the ``creationTimestamp`` field in reverse + chronological order (newest result first). Use this to sort + resources like operations so that the newest operation is + returned first. Currently, only sorting by ``name`` or + ``creationTimestamp desc`` is supported. + + This field is a member of `oneof`_ ``_order_by``. + page_token (str): + Specifies a page token to use. Set ``pageToken`` to the + ``nextPageToken`` returned by a previous list request to get + the next page of results. + + This field is a member of `oneof`_ ``_page_token``. + project (str): + Project ID for this request. + return_partial_success (bool): + Opt-in for partial success behavior which + provides partial results in case of failure. The + default value is false. + + This field is a member of `oneof`_ ``_return_partial_success``. + """ + + filter: str = proto.Field( + proto.STRING, + number=336120696, + optional=True, + ) + include_all_scopes: bool = proto.Field( + proto.BOOL, + number=391327988, + optional=True, + ) + max_results: int = proto.Field( + proto.UINT32, + number=54715419, + optional=True, + ) + order_by: str = proto.Field( + proto.STRING, + number=160562920, + optional=True, + ) + page_token: str = proto.Field( + proto.STRING, + number=19994697, + optional=True, + ) + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + return_partial_success: bool = proto.Field( + proto.BOOL, + number=517198390, + optional=True, + ) + + +class AggregatedListPublicDelegatedPrefixesRequest(proto.Message): + r"""A request message for PublicDelegatedPrefixes.AggregatedList. + See the method description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + filter (str): + A filter expression that filters resources listed in the + response. Most Compute resources support two types of filter + expressions: expressions that support regular expressions + and expressions that follow API improvement proposal + AIP-160. If you want to use AIP-160, your expression must + specify the field name, an operator, and the value that you + want to use for filtering. The value must be a string, a + number, or a boolean. The operator must be either ``=``, + ``!=``, ``>``, ``<``, ``<=``, ``>=`` or ``:``. For example, + if you are filtering Compute Engine instances, you can + exclude instances named ``example-instance`` by specifying + ``name != example-instance``. The ``:`` operator can be used + with string fields to match substrings. For non-string + fields it is equivalent to the ``=`` operator. The ``:*`` + comparison can be used to test whether a key has been + defined. For example, to find all objects with ``owner`` + label use: ``labels.owner:*`` You can also filter nested + fields. For example, you could specify + ``scheduling.automaticRestart = false`` to include instances + only if they are not scheduled for automatic restarts. You + can use filtering on nested fields to filter based on + resource labels. To filter on multiple expressions, provide + each separate expression within parentheses. For example: + ``(scheduling.automaticRestart = true) (cpuPlatform = "Intel Skylake")`` + By default, each expression is an ``AND`` expression. + However, you can include ``AND`` and ``OR`` expressions + explicitly. For example: + ``(cpuPlatform = "Intel Skylake") OR (cpuPlatform = "Intel Broadwell") AND (scheduling.automaticRestart = true)`` + If you want to use a regular expression, use the ``eq`` + (equal) or ``ne`` (not equal) operator against a single + un-parenthesized expression with or without quotes or + against multiple parenthesized expressions. Examples: + ``fieldname eq unquoted literal`` + ``fieldname eq 'single quoted literal'`` + ``fieldname eq "double quoted literal"`` + ``(fieldname1 eq literal) (fieldname2 ne "literal")`` The + literal value is interpreted as a regular expression using + Google RE2 library syntax. The literal value must match the + entire field. For example, to filter for instances that do + not end with name "instance", you would use + ``name ne .*instance``. + + This field is a member of `oneof`_ ``_filter``. + include_all_scopes (bool): + Indicates whether every visible scope for + each scope type (zone, region, global) should be + included in the response. For new resource types + added after this field, the flag has no effect + as new resource types will always include every + visible scope for each scope type in response. + For resource types which predate this field, if + this flag is omitted or false, only scopes of + the scope types where the resource type is + expected to be found will be included. + + This field is a member of `oneof`_ ``_include_all_scopes``. + max_results (int): + The maximum number of results per page that should be + returned. If the number of available results is larger than + ``maxResults``, Compute Engine returns a ``nextPageToken`` + that can be used to get the next page of results in + subsequent list requests. Acceptable values are ``0`` to + ``500``, inclusive. (Default: ``500``) + + This field is a member of `oneof`_ ``_max_results``. + order_by (str): + Sorts list results by a certain order. By default, results + are returned in alphanumerical order based on the resource + name. You can also sort results in descending order based on + the creation timestamp using + ``orderBy="creationTimestamp desc"``. This sorts results + based on the ``creationTimestamp`` field in reverse + chronological order (newest result first). Use this to sort + resources like operations so that the newest operation is + returned first. Currently, only sorting by ``name`` or + ``creationTimestamp desc`` is supported. + + This field is a member of `oneof`_ ``_order_by``. + page_token (str): + Specifies a page token to use. Set ``pageToken`` to the + ``nextPageToken`` returned by a previous list request to get + the next page of results. + + This field is a member of `oneof`_ ``_page_token``. + project (str): + Name of the project scoping this request. + return_partial_success (bool): + Opt-in for partial success behavior which + provides partial results in case of failure. The + default value is false. + + This field is a member of `oneof`_ ``_return_partial_success``. + """ + + filter: str = proto.Field( + proto.STRING, + number=336120696, + optional=True, + ) + include_all_scopes: bool = proto.Field( + proto.BOOL, + number=391327988, + optional=True, + ) + max_results: int = proto.Field( + proto.UINT32, + number=54715419, + optional=True, + ) + order_by: str = proto.Field( + proto.STRING, + number=160562920, + optional=True, + ) + page_token: str = proto.Field( + proto.STRING, + number=19994697, + optional=True, + ) + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + return_partial_success: bool = proto.Field( + proto.BOOL, + number=517198390, + optional=True, + ) + + +class AggregatedListRegionCommitmentsRequest(proto.Message): + r"""A request message for RegionCommitments.AggregatedList. See + the method description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + filter (str): + A filter expression that filters resources listed in the + response. Most Compute resources support two types of filter + expressions: expressions that support regular expressions + and expressions that follow API improvement proposal + AIP-160. If you want to use AIP-160, your expression must + specify the field name, an operator, and the value that you + want to use for filtering. The value must be a string, a + number, or a boolean. The operator must be either ``=``, + ``!=``, ``>``, ``<``, ``<=``, ``>=`` or ``:``. For example, + if you are filtering Compute Engine instances, you can + exclude instances named ``example-instance`` by specifying + ``name != example-instance``. The ``:`` operator can be used + with string fields to match substrings. For non-string + fields it is equivalent to the ``=`` operator. The ``:*`` + comparison can be used to test whether a key has been + defined. For example, to find all objects with ``owner`` + label use: ``labels.owner:*`` You can also filter nested + fields. For example, you could specify + ``scheduling.automaticRestart = false`` to include instances + only if they are not scheduled for automatic restarts. You + can use filtering on nested fields to filter based on + resource labels. To filter on multiple expressions, provide + each separate expression within parentheses. For example: + ``(scheduling.automaticRestart = true) (cpuPlatform = "Intel Skylake")`` + By default, each expression is an ``AND`` expression. + However, you can include ``AND`` and ``OR`` expressions + explicitly. For example: + ``(cpuPlatform = "Intel Skylake") OR (cpuPlatform = "Intel Broadwell") AND (scheduling.automaticRestart = true)`` + If you want to use a regular expression, use the ``eq`` + (equal) or ``ne`` (not equal) operator against a single + un-parenthesized expression with or without quotes or + against multiple parenthesized expressions. Examples: + ``fieldname eq unquoted literal`` + ``fieldname eq 'single quoted literal'`` + ``fieldname eq "double quoted literal"`` + ``(fieldname1 eq literal) (fieldname2 ne "literal")`` The + literal value is interpreted as a regular expression using + Google RE2 library syntax. The literal value must match the + entire field. For example, to filter for instances that do + not end with name "instance", you would use + ``name ne .*instance``. + + This field is a member of `oneof`_ ``_filter``. + include_all_scopes (bool): + Indicates whether every visible scope for + each scope type (zone, region, global) should be + included in the response. For new resource types + added after this field, the flag has no effect + as new resource types will always include every + visible scope for each scope type in response. + For resource types which predate this field, if + this flag is omitted or false, only scopes of + the scope types where the resource type is + expected to be found will be included. + + This field is a member of `oneof`_ ``_include_all_scopes``. + max_results (int): + The maximum number of results per page that should be + returned. If the number of available results is larger than + ``maxResults``, Compute Engine returns a ``nextPageToken`` + that can be used to get the next page of results in + subsequent list requests. Acceptable values are ``0`` to + ``500``, inclusive. (Default: ``500``) + + This field is a member of `oneof`_ ``_max_results``. + order_by (str): + Sorts list results by a certain order. By default, results + are returned in alphanumerical order based on the resource + name. You can also sort results in descending order based on + the creation timestamp using + ``orderBy="creationTimestamp desc"``. This sorts results + based on the ``creationTimestamp`` field in reverse + chronological order (newest result first). Use this to sort + resources like operations so that the newest operation is + returned first. Currently, only sorting by ``name`` or + ``creationTimestamp desc`` is supported. + + This field is a member of `oneof`_ ``_order_by``. + page_token (str): + Specifies a page token to use. Set ``pageToken`` to the + ``nextPageToken`` returned by a previous list request to get + the next page of results. + + This field is a member of `oneof`_ ``_page_token``. + project (str): + Project ID for this request. + return_partial_success (bool): + Opt-in for partial success behavior which + provides partial results in case of failure. The + default value is false. + + This field is a member of `oneof`_ ``_return_partial_success``. + """ + + filter: str = proto.Field( + proto.STRING, + number=336120696, + optional=True, + ) + include_all_scopes: bool = proto.Field( + proto.BOOL, + number=391327988, + optional=True, + ) + max_results: int = proto.Field( + proto.UINT32, + number=54715419, + optional=True, + ) + order_by: str = proto.Field( + proto.STRING, + number=160562920, + optional=True, + ) + page_token: str = proto.Field( + proto.STRING, + number=19994697, + optional=True, + ) + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + return_partial_success: bool = proto.Field( + proto.BOOL, + number=517198390, + optional=True, + ) + + +class AggregatedListReservationsRequest(proto.Message): + r"""A request message for Reservations.AggregatedList. See the + method description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + filter (str): + A filter expression that filters resources listed in the + response. Most Compute resources support two types of filter + expressions: expressions that support regular expressions + and expressions that follow API improvement proposal + AIP-160. If you want to use AIP-160, your expression must + specify the field name, an operator, and the value that you + want to use for filtering. The value must be a string, a + number, or a boolean. The operator must be either ``=``, + ``!=``, ``>``, ``<``, ``<=``, ``>=`` or ``:``. For example, + if you are filtering Compute Engine instances, you can + exclude instances named ``example-instance`` by specifying + ``name != example-instance``. The ``:`` operator can be used + with string fields to match substrings. For non-string + fields it is equivalent to the ``=`` operator. The ``:*`` + comparison can be used to test whether a key has been + defined. For example, to find all objects with ``owner`` + label use: ``labels.owner:*`` You can also filter nested + fields. For example, you could specify + ``scheduling.automaticRestart = false`` to include instances + only if they are not scheduled for automatic restarts. You + can use filtering on nested fields to filter based on + resource labels. To filter on multiple expressions, provide + each separate expression within parentheses. For example: + ``(scheduling.automaticRestart = true) (cpuPlatform = "Intel Skylake")`` + By default, each expression is an ``AND`` expression. + However, you can include ``AND`` and ``OR`` expressions + explicitly. For example: + ``(cpuPlatform = "Intel Skylake") OR (cpuPlatform = "Intel Broadwell") AND (scheduling.automaticRestart = true)`` + If you want to use a regular expression, use the ``eq`` + (equal) or ``ne`` (not equal) operator against a single + un-parenthesized expression with or without quotes or + against multiple parenthesized expressions. Examples: + ``fieldname eq unquoted literal`` + ``fieldname eq 'single quoted literal'`` + ``fieldname eq "double quoted literal"`` + ``(fieldname1 eq literal) (fieldname2 ne "literal")`` The + literal value is interpreted as a regular expression using + Google RE2 library syntax. The literal value must match the + entire field. For example, to filter for instances that do + not end with name "instance", you would use + ``name ne .*instance``. + + This field is a member of `oneof`_ ``_filter``. + include_all_scopes (bool): + Indicates whether every visible scope for + each scope type (zone, region, global) should be + included in the response. For new resource types + added after this field, the flag has no effect + as new resource types will always include every + visible scope for each scope type in response. + For resource types which predate this field, if + this flag is omitted or false, only scopes of + the scope types where the resource type is + expected to be found will be included. + + This field is a member of `oneof`_ ``_include_all_scopes``. + max_results (int): + The maximum number of results per page that should be + returned. If the number of available results is larger than + ``maxResults``, Compute Engine returns a ``nextPageToken`` + that can be used to get the next page of results in + subsequent list requests. Acceptable values are ``0`` to + ``500``, inclusive. (Default: ``500``) + + This field is a member of `oneof`_ ``_max_results``. + order_by (str): + Sorts list results by a certain order. By default, results + are returned in alphanumerical order based on the resource + name. You can also sort results in descending order based on + the creation timestamp using + ``orderBy="creationTimestamp desc"``. This sorts results + based on the ``creationTimestamp`` field in reverse + chronological order (newest result first). Use this to sort + resources like operations so that the newest operation is + returned first. Currently, only sorting by ``name`` or + ``creationTimestamp desc`` is supported. + + This field is a member of `oneof`_ ``_order_by``. + page_token (str): + Specifies a page token to use. Set ``pageToken`` to the + ``nextPageToken`` returned by a previous list request to get + the next page of results. + + This field is a member of `oneof`_ ``_page_token``. + project (str): + Project ID for this request. + return_partial_success (bool): + Opt-in for partial success behavior which + provides partial results in case of failure. The + default value is false. + + This field is a member of `oneof`_ ``_return_partial_success``. + """ + + filter: str = proto.Field( + proto.STRING, + number=336120696, + optional=True, + ) + include_all_scopes: bool = proto.Field( + proto.BOOL, + number=391327988, + optional=True, + ) + max_results: int = proto.Field( + proto.UINT32, + number=54715419, + optional=True, + ) + order_by: str = proto.Field( + proto.STRING, + number=160562920, + optional=True, + ) + page_token: str = proto.Field( + proto.STRING, + number=19994697, + optional=True, + ) + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + return_partial_success: bool = proto.Field( + proto.BOOL, + number=517198390, + optional=True, + ) + + +class AggregatedListResourcePoliciesRequest(proto.Message): + r"""A request message for ResourcePolicies.AggregatedList. See + the method description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + filter (str): + A filter expression that filters resources listed in the + response. Most Compute resources support two types of filter + expressions: expressions that support regular expressions + and expressions that follow API improvement proposal + AIP-160. If you want to use AIP-160, your expression must + specify the field name, an operator, and the value that you + want to use for filtering. The value must be a string, a + number, or a boolean. The operator must be either ``=``, + ``!=``, ``>``, ``<``, ``<=``, ``>=`` or ``:``. For example, + if you are filtering Compute Engine instances, you can + exclude instances named ``example-instance`` by specifying + ``name != example-instance``. The ``:`` operator can be used + with string fields to match substrings. For non-string + fields it is equivalent to the ``=`` operator. The ``:*`` + comparison can be used to test whether a key has been + defined. For example, to find all objects with ``owner`` + label use: ``labels.owner:*`` You can also filter nested + fields. For example, you could specify + ``scheduling.automaticRestart = false`` to include instances + only if they are not scheduled for automatic restarts. You + can use filtering on nested fields to filter based on + resource labels. To filter on multiple expressions, provide + each separate expression within parentheses. For example: + ``(scheduling.automaticRestart = true) (cpuPlatform = "Intel Skylake")`` + By default, each expression is an ``AND`` expression. + However, you can include ``AND`` and ``OR`` expressions + explicitly. For example: + ``(cpuPlatform = "Intel Skylake") OR (cpuPlatform = "Intel Broadwell") AND (scheduling.automaticRestart = true)`` + If you want to use a regular expression, use the ``eq`` + (equal) or ``ne`` (not equal) operator against a single + un-parenthesized expression with or without quotes or + against multiple parenthesized expressions. Examples: + ``fieldname eq unquoted literal`` + ``fieldname eq 'single quoted literal'`` + ``fieldname eq "double quoted literal"`` + ``(fieldname1 eq literal) (fieldname2 ne "literal")`` The + literal value is interpreted as a regular expression using + Google RE2 library syntax. The literal value must match the + entire field. For example, to filter for instances that do + not end with name "instance", you would use + ``name ne .*instance``. + + This field is a member of `oneof`_ ``_filter``. + include_all_scopes (bool): + Indicates whether every visible scope for + each scope type (zone, region, global) should be + included in the response. For new resource types + added after this field, the flag has no effect + as new resource types will always include every + visible scope for each scope type in response. + For resource types which predate this field, if + this flag is omitted or false, only scopes of + the scope types where the resource type is + expected to be found will be included. + + This field is a member of `oneof`_ ``_include_all_scopes``. + max_results (int): + The maximum number of results per page that should be + returned. If the number of available results is larger than + ``maxResults``, Compute Engine returns a ``nextPageToken`` + that can be used to get the next page of results in + subsequent list requests. Acceptable values are ``0`` to + ``500``, inclusive. (Default: ``500``) + + This field is a member of `oneof`_ ``_max_results``. + order_by (str): + Sorts list results by a certain order. By default, results + are returned in alphanumerical order based on the resource + name. You can also sort results in descending order based on + the creation timestamp using + ``orderBy="creationTimestamp desc"``. This sorts results + based on the ``creationTimestamp`` field in reverse + chronological order (newest result first). Use this to sort + resources like operations so that the newest operation is + returned first. Currently, only sorting by ``name`` or + ``creationTimestamp desc`` is supported. + + This field is a member of `oneof`_ ``_order_by``. + page_token (str): + Specifies a page token to use. Set ``pageToken`` to the + ``nextPageToken`` returned by a previous list request to get + the next page of results. + + This field is a member of `oneof`_ ``_page_token``. + project (str): + Project ID for this request. + return_partial_success (bool): + Opt-in for partial success behavior which + provides partial results in case of failure. The + default value is false. + + This field is a member of `oneof`_ ``_return_partial_success``. + """ + + filter: str = proto.Field( + proto.STRING, + number=336120696, + optional=True, + ) + include_all_scopes: bool = proto.Field( + proto.BOOL, + number=391327988, + optional=True, + ) + max_results: int = proto.Field( + proto.UINT32, + number=54715419, + optional=True, + ) + order_by: str = proto.Field( + proto.STRING, + number=160562920, + optional=True, + ) + page_token: str = proto.Field( + proto.STRING, + number=19994697, + optional=True, + ) + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + return_partial_success: bool = proto.Field( + proto.BOOL, + number=517198390, + optional=True, + ) + + +class AggregatedListRoutersRequest(proto.Message): + r"""A request message for Routers.AggregatedList. See the method + description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + filter (str): + A filter expression that filters resources listed in the + response. Most Compute resources support two types of filter + expressions: expressions that support regular expressions + and expressions that follow API improvement proposal + AIP-160. If you want to use AIP-160, your expression must + specify the field name, an operator, and the value that you + want to use for filtering. The value must be a string, a + number, or a boolean. The operator must be either ``=``, + ``!=``, ``>``, ``<``, ``<=``, ``>=`` or ``:``. For example, + if you are filtering Compute Engine instances, you can + exclude instances named ``example-instance`` by specifying + ``name != example-instance``. The ``:`` operator can be used + with string fields to match substrings. For non-string + fields it is equivalent to the ``=`` operator. The ``:*`` + comparison can be used to test whether a key has been + defined. For example, to find all objects with ``owner`` + label use: ``labels.owner:*`` You can also filter nested + fields. For example, you could specify + ``scheduling.automaticRestart = false`` to include instances + only if they are not scheduled for automatic restarts. You + can use filtering on nested fields to filter based on + resource labels. To filter on multiple expressions, provide + each separate expression within parentheses. For example: + ``(scheduling.automaticRestart = true) (cpuPlatform = "Intel Skylake")`` + By default, each expression is an ``AND`` expression. + However, you can include ``AND`` and ``OR`` expressions + explicitly. For example: + ``(cpuPlatform = "Intel Skylake") OR (cpuPlatform = "Intel Broadwell") AND (scheduling.automaticRestart = true)`` + If you want to use a regular expression, use the ``eq`` + (equal) or ``ne`` (not equal) operator against a single + un-parenthesized expression with or without quotes or + against multiple parenthesized expressions. Examples: + ``fieldname eq unquoted literal`` + ``fieldname eq 'single quoted literal'`` + ``fieldname eq "double quoted literal"`` + ``(fieldname1 eq literal) (fieldname2 ne "literal")`` The + literal value is interpreted as a regular expression using + Google RE2 library syntax. The literal value must match the + entire field. For example, to filter for instances that do + not end with name "instance", you would use + ``name ne .*instance``. + + This field is a member of `oneof`_ ``_filter``. + include_all_scopes (bool): + Indicates whether every visible scope for + each scope type (zone, region, global) should be + included in the response. For new resource types + added after this field, the flag has no effect + as new resource types will always include every + visible scope for each scope type in response. + For resource types which predate this field, if + this flag is omitted or false, only scopes of + the scope types where the resource type is + expected to be found will be included. + + This field is a member of `oneof`_ ``_include_all_scopes``. + max_results (int): + The maximum number of results per page that should be + returned. If the number of available results is larger than + ``maxResults``, Compute Engine returns a ``nextPageToken`` + that can be used to get the next page of results in + subsequent list requests. Acceptable values are ``0`` to + ``500``, inclusive. (Default: ``500``) + + This field is a member of `oneof`_ ``_max_results``. + order_by (str): + Sorts list results by a certain order. By default, results + are returned in alphanumerical order based on the resource + name. You can also sort results in descending order based on + the creation timestamp using + ``orderBy="creationTimestamp desc"``. This sorts results + based on the ``creationTimestamp`` field in reverse + chronological order (newest result first). Use this to sort + resources like operations so that the newest operation is + returned first. Currently, only sorting by ``name`` or + ``creationTimestamp desc`` is supported. + + This field is a member of `oneof`_ ``_order_by``. + page_token (str): + Specifies a page token to use. Set ``pageToken`` to the + ``nextPageToken`` returned by a previous list request to get + the next page of results. + + This field is a member of `oneof`_ ``_page_token``. + project (str): + Project ID for this request. + return_partial_success (bool): + Opt-in for partial success behavior which + provides partial results in case of failure. The + default value is false. + + This field is a member of `oneof`_ ``_return_partial_success``. + """ + + filter: str = proto.Field( + proto.STRING, + number=336120696, + optional=True, + ) + include_all_scopes: bool = proto.Field( + proto.BOOL, + number=391327988, + optional=True, + ) + max_results: int = proto.Field( + proto.UINT32, + number=54715419, + optional=True, + ) + order_by: str = proto.Field( + proto.STRING, + number=160562920, + optional=True, + ) + page_token: str = proto.Field( + proto.STRING, + number=19994697, + optional=True, + ) + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + return_partial_success: bool = proto.Field( + proto.BOOL, + number=517198390, + optional=True, + ) + + +class AggregatedListSecurityPoliciesRequest(proto.Message): + r"""A request message for SecurityPolicies.AggregatedList. See + the method description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + filter (str): + A filter expression that filters resources listed in the + response. Most Compute resources support two types of filter + expressions: expressions that support regular expressions + and expressions that follow API improvement proposal + AIP-160. If you want to use AIP-160, your expression must + specify the field name, an operator, and the value that you + want to use for filtering. The value must be a string, a + number, or a boolean. The operator must be either ``=``, + ``!=``, ``>``, ``<``, ``<=``, ``>=`` or ``:``. For example, + if you are filtering Compute Engine instances, you can + exclude instances named ``example-instance`` by specifying + ``name != example-instance``. The ``:`` operator can be used + with string fields to match substrings. For non-string + fields it is equivalent to the ``=`` operator. The ``:*`` + comparison can be used to test whether a key has been + defined. For example, to find all objects with ``owner`` + label use: ``labels.owner:*`` You can also filter nested + fields. For example, you could specify + ``scheduling.automaticRestart = false`` to include instances + only if they are not scheduled for automatic restarts. You + can use filtering on nested fields to filter based on + resource labels. To filter on multiple expressions, provide + each separate expression within parentheses. For example: + ``(scheduling.automaticRestart = true) (cpuPlatform = "Intel Skylake")`` + By default, each expression is an ``AND`` expression. + However, you can include ``AND`` and ``OR`` expressions + explicitly. For example: + ``(cpuPlatform = "Intel Skylake") OR (cpuPlatform = "Intel Broadwell") AND (scheduling.automaticRestart = true)`` + If you want to use a regular expression, use the ``eq`` + (equal) or ``ne`` (not equal) operator against a single + un-parenthesized expression with or without quotes or + against multiple parenthesized expressions. Examples: + ``fieldname eq unquoted literal`` + ``fieldname eq 'single quoted literal'`` + ``fieldname eq "double quoted literal"`` + ``(fieldname1 eq literal) (fieldname2 ne "literal")`` The + literal value is interpreted as a regular expression using + Google RE2 library syntax. The literal value must match the + entire field. For example, to filter for instances that do + not end with name "instance", you would use + ``name ne .*instance``. + + This field is a member of `oneof`_ ``_filter``. + include_all_scopes (bool): + Indicates whether every visible scope for + each scope type (zone, region, global) should be + included in the response. For new resource types + added after this field, the flag has no effect + as new resource types will always include every + visible scope for each scope type in response. + For resource types which predate this field, if + this flag is omitted or false, only scopes of + the scope types where the resource type is + expected to be found will be included. + + This field is a member of `oneof`_ ``_include_all_scopes``. + max_results (int): + The maximum number of results per page that should be + returned. If the number of available results is larger than + ``maxResults``, Compute Engine returns a ``nextPageToken`` + that can be used to get the next page of results in + subsequent list requests. Acceptable values are ``0`` to + ``500``, inclusive. (Default: ``500``) + + This field is a member of `oneof`_ ``_max_results``. + order_by (str): + Sorts list results by a certain order. By default, results + are returned in alphanumerical order based on the resource + name. You can also sort results in descending order based on + the creation timestamp using + ``orderBy="creationTimestamp desc"``. This sorts results + based on the ``creationTimestamp`` field in reverse + chronological order (newest result first). Use this to sort + resources like operations so that the newest operation is + returned first. Currently, only sorting by ``name`` or + ``creationTimestamp desc`` is supported. + + This field is a member of `oneof`_ ``_order_by``. + page_token (str): + Specifies a page token to use. Set ``pageToken`` to the + ``nextPageToken`` returned by a previous list request to get + the next page of results. + + This field is a member of `oneof`_ ``_page_token``. + project (str): + Name of the project scoping this request. + return_partial_success (bool): + Opt-in for partial success behavior which + provides partial results in case of failure. The + default value is false. + + This field is a member of `oneof`_ ``_return_partial_success``. + """ + + filter: str = proto.Field( + proto.STRING, + number=336120696, + optional=True, + ) + include_all_scopes: bool = proto.Field( + proto.BOOL, + number=391327988, + optional=True, + ) + max_results: int = proto.Field( + proto.UINT32, + number=54715419, + optional=True, + ) + order_by: str = proto.Field( + proto.STRING, + number=160562920, + optional=True, + ) + page_token: str = proto.Field( + proto.STRING, + number=19994697, + optional=True, + ) + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + return_partial_success: bool = proto.Field( + proto.BOOL, + number=517198390, + optional=True, + ) + + +class AggregatedListServiceAttachmentsRequest(proto.Message): + r"""A request message for ServiceAttachments.AggregatedList. See + the method description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + filter (str): + A filter expression that filters resources listed in the + response. Most Compute resources support two types of filter + expressions: expressions that support regular expressions + and expressions that follow API improvement proposal + AIP-160. If you want to use AIP-160, your expression must + specify the field name, an operator, and the value that you + want to use for filtering. The value must be a string, a + number, or a boolean. The operator must be either ``=``, + ``!=``, ``>``, ``<``, ``<=``, ``>=`` or ``:``. For example, + if you are filtering Compute Engine instances, you can + exclude instances named ``example-instance`` by specifying + ``name != example-instance``. The ``:`` operator can be used + with string fields to match substrings. For non-string + fields it is equivalent to the ``=`` operator. The ``:*`` + comparison can be used to test whether a key has been + defined. For example, to find all objects with ``owner`` + label use: ``labels.owner:*`` You can also filter nested + fields. For example, you could specify + ``scheduling.automaticRestart = false`` to include instances + only if they are not scheduled for automatic restarts. You + can use filtering on nested fields to filter based on + resource labels. To filter on multiple expressions, provide + each separate expression within parentheses. For example: + ``(scheduling.automaticRestart = true) (cpuPlatform = "Intel Skylake")`` + By default, each expression is an ``AND`` expression. + However, you can include ``AND`` and ``OR`` expressions + explicitly. For example: + ``(cpuPlatform = "Intel Skylake") OR (cpuPlatform = "Intel Broadwell") AND (scheduling.automaticRestart = true)`` + If you want to use a regular expression, use the ``eq`` + (equal) or ``ne`` (not equal) operator against a single + un-parenthesized expression with or without quotes or + against multiple parenthesized expressions. Examples: + ``fieldname eq unquoted literal`` + ``fieldname eq 'single quoted literal'`` + ``fieldname eq "double quoted literal"`` + ``(fieldname1 eq literal) (fieldname2 ne "literal")`` The + literal value is interpreted as a regular expression using + Google RE2 library syntax. The literal value must match the + entire field. For example, to filter for instances that do + not end with name "instance", you would use + ``name ne .*instance``. + + This field is a member of `oneof`_ ``_filter``. + include_all_scopes (bool): + Indicates whether every visible scope for + each scope type (zone, region, global) should be + included in the response. For new resource types + added after this field, the flag has no effect + as new resource types will always include every + visible scope for each scope type in response. + For resource types which predate this field, if + this flag is omitted or false, only scopes of + the scope types where the resource type is + expected to be found will be included. + + This field is a member of `oneof`_ ``_include_all_scopes``. + max_results (int): + The maximum number of results per page that should be + returned. If the number of available results is larger than + ``maxResults``, Compute Engine returns a ``nextPageToken`` + that can be used to get the next page of results in + subsequent list requests. Acceptable values are ``0`` to + ``500``, inclusive. (Default: ``500``) + + This field is a member of `oneof`_ ``_max_results``. + order_by (str): + Sorts list results by a certain order. By default, results + are returned in alphanumerical order based on the resource + name. You can also sort results in descending order based on + the creation timestamp using + ``orderBy="creationTimestamp desc"``. This sorts results + based on the ``creationTimestamp`` field in reverse + chronological order (newest result first). Use this to sort + resources like operations so that the newest operation is + returned first. Currently, only sorting by ``name`` or + ``creationTimestamp desc`` is supported. + + This field is a member of `oneof`_ ``_order_by``. + page_token (str): + Specifies a page token to use. Set ``pageToken`` to the + ``nextPageToken`` returned by a previous list request to get + the next page of results. + + This field is a member of `oneof`_ ``_page_token``. + project (str): + Name of the project scoping this request. + return_partial_success (bool): + Opt-in for partial success behavior which + provides partial results in case of failure. The + default value is false. + + This field is a member of `oneof`_ ``_return_partial_success``. + """ + + filter: str = proto.Field( + proto.STRING, + number=336120696, + optional=True, + ) + include_all_scopes: bool = proto.Field( + proto.BOOL, + number=391327988, + optional=True, + ) + max_results: int = proto.Field( + proto.UINT32, + number=54715419, + optional=True, + ) + order_by: str = proto.Field( + proto.STRING, + number=160562920, + optional=True, + ) + page_token: str = proto.Field( + proto.STRING, + number=19994697, + optional=True, + ) + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + return_partial_success: bool = proto.Field( + proto.BOOL, + number=517198390, + optional=True, + ) + + +class AggregatedListSslCertificatesRequest(proto.Message): + r"""A request message for SslCertificates.AggregatedList. See the + method description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + filter (str): + A filter expression that filters resources listed in the + response. Most Compute resources support two types of filter + expressions: expressions that support regular expressions + and expressions that follow API improvement proposal + AIP-160. If you want to use AIP-160, your expression must + specify the field name, an operator, and the value that you + want to use for filtering. The value must be a string, a + number, or a boolean. The operator must be either ``=``, + ``!=``, ``>``, ``<``, ``<=``, ``>=`` or ``:``. For example, + if you are filtering Compute Engine instances, you can + exclude instances named ``example-instance`` by specifying + ``name != example-instance``. The ``:`` operator can be used + with string fields to match substrings. For non-string + fields it is equivalent to the ``=`` operator. The ``:*`` + comparison can be used to test whether a key has been + defined. For example, to find all objects with ``owner`` + label use: ``labels.owner:*`` You can also filter nested + fields. For example, you could specify + ``scheduling.automaticRestart = false`` to include instances + only if they are not scheduled for automatic restarts. You + can use filtering on nested fields to filter based on + resource labels. To filter on multiple expressions, provide + each separate expression within parentheses. For example: + ``(scheduling.automaticRestart = true) (cpuPlatform = "Intel Skylake")`` + By default, each expression is an ``AND`` expression. + However, you can include ``AND`` and ``OR`` expressions + explicitly. For example: + ``(cpuPlatform = "Intel Skylake") OR (cpuPlatform = "Intel Broadwell") AND (scheduling.automaticRestart = true)`` + If you want to use a regular expression, use the ``eq`` + (equal) or ``ne`` (not equal) operator against a single + un-parenthesized expression with or without quotes or + against multiple parenthesized expressions. Examples: + ``fieldname eq unquoted literal`` + ``fieldname eq 'single quoted literal'`` + ``fieldname eq "double quoted literal"`` + ``(fieldname1 eq literal) (fieldname2 ne "literal")`` The + literal value is interpreted as a regular expression using + Google RE2 library syntax. The literal value must match the + entire field. For example, to filter for instances that do + not end with name "instance", you would use + ``name ne .*instance``. + + This field is a member of `oneof`_ ``_filter``. + include_all_scopes (bool): + Indicates whether every visible scope for + each scope type (zone, region, global) should be + included in the response. For new resource types + added after this field, the flag has no effect + as new resource types will always include every + visible scope for each scope type in response. + For resource types which predate this field, if + this flag is omitted or false, only scopes of + the scope types where the resource type is + expected to be found will be included. + + This field is a member of `oneof`_ ``_include_all_scopes``. + max_results (int): + The maximum number of results per page that should be + returned. If the number of available results is larger than + ``maxResults``, Compute Engine returns a ``nextPageToken`` + that can be used to get the next page of results in + subsequent list requests. Acceptable values are ``0`` to + ``500``, inclusive. (Default: ``500``) + + This field is a member of `oneof`_ ``_max_results``. + order_by (str): + Sorts list results by a certain order. By default, results + are returned in alphanumerical order based on the resource + name. You can also sort results in descending order based on + the creation timestamp using + ``orderBy="creationTimestamp desc"``. This sorts results + based on the ``creationTimestamp`` field in reverse + chronological order (newest result first). Use this to sort + resources like operations so that the newest operation is + returned first. Currently, only sorting by ``name`` or + ``creationTimestamp desc`` is supported. + + This field is a member of `oneof`_ ``_order_by``. + page_token (str): + Specifies a page token to use. Set ``pageToken`` to the + ``nextPageToken`` returned by a previous list request to get + the next page of results. + + This field is a member of `oneof`_ ``_page_token``. + project (str): + Name of the project scoping this request. + return_partial_success (bool): + Opt-in for partial success behavior which + provides partial results in case of failure. The + default value is false. + + This field is a member of `oneof`_ ``_return_partial_success``. + """ + + filter: str = proto.Field( + proto.STRING, + number=336120696, + optional=True, + ) + include_all_scopes: bool = proto.Field( + proto.BOOL, + number=391327988, + optional=True, + ) + max_results: int = proto.Field( + proto.UINT32, + number=54715419, + optional=True, + ) + order_by: str = proto.Field( + proto.STRING, + number=160562920, + optional=True, + ) + page_token: str = proto.Field( + proto.STRING, + number=19994697, + optional=True, + ) + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + return_partial_success: bool = proto.Field( + proto.BOOL, + number=517198390, + optional=True, + ) + + +class AggregatedListSslPoliciesRequest(proto.Message): + r"""A request message for SslPolicies.AggregatedList. See the + method description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + filter (str): + A filter expression that filters resources listed in the + response. Most Compute resources support two types of filter + expressions: expressions that support regular expressions + and expressions that follow API improvement proposal + AIP-160. If you want to use AIP-160, your expression must + specify the field name, an operator, and the value that you + want to use for filtering. The value must be a string, a + number, or a boolean. The operator must be either ``=``, + ``!=``, ``>``, ``<``, ``<=``, ``>=`` or ``:``. For example, + if you are filtering Compute Engine instances, you can + exclude instances named ``example-instance`` by specifying + ``name != example-instance``. The ``:`` operator can be used + with string fields to match substrings. For non-string + fields it is equivalent to the ``=`` operator. The ``:*`` + comparison can be used to test whether a key has been + defined. For example, to find all objects with ``owner`` + label use: ``labels.owner:*`` You can also filter nested + fields. For example, you could specify + ``scheduling.automaticRestart = false`` to include instances + only if they are not scheduled for automatic restarts. You + can use filtering on nested fields to filter based on + resource labels. To filter on multiple expressions, provide + each separate expression within parentheses. For example: + ``(scheduling.automaticRestart = true) (cpuPlatform = "Intel Skylake")`` + By default, each expression is an ``AND`` expression. + However, you can include ``AND`` and ``OR`` expressions + explicitly. For example: + ``(cpuPlatform = "Intel Skylake") OR (cpuPlatform = "Intel Broadwell") AND (scheduling.automaticRestart = true)`` + If you want to use a regular expression, use the ``eq`` + (equal) or ``ne`` (not equal) operator against a single + un-parenthesized expression with or without quotes or + against multiple parenthesized expressions. Examples: + ``fieldname eq unquoted literal`` + ``fieldname eq 'single quoted literal'`` + ``fieldname eq "double quoted literal"`` + ``(fieldname1 eq literal) (fieldname2 ne "literal")`` The + literal value is interpreted as a regular expression using + Google RE2 library syntax. The literal value must match the + entire field. For example, to filter for instances that do + not end with name "instance", you would use + ``name ne .*instance``. + + This field is a member of `oneof`_ ``_filter``. + include_all_scopes (bool): + Indicates whether every visible scope for + each scope type (zone, region, global) should be + included in the response. For new resource types + added after this field, the flag has no effect + as new resource types will always include every + visible scope for each scope type in response. + For resource types which predate this field, if + this flag is omitted or false, only scopes of + the scope types where the resource type is + expected to be found will be included. + + This field is a member of `oneof`_ ``_include_all_scopes``. + max_results (int): + The maximum number of results per page that should be + returned. If the number of available results is larger than + ``maxResults``, Compute Engine returns a ``nextPageToken`` + that can be used to get the next page of results in + subsequent list requests. Acceptable values are ``0`` to + ``500``, inclusive. (Default: ``500``) + + This field is a member of `oneof`_ ``_max_results``. + order_by (str): + Sorts list results by a certain order. By default, results + are returned in alphanumerical order based on the resource + name. You can also sort results in descending order based on + the creation timestamp using + ``orderBy="creationTimestamp desc"``. This sorts results + based on the ``creationTimestamp`` field in reverse + chronological order (newest result first). Use this to sort + resources like operations so that the newest operation is + returned first. Currently, only sorting by ``name`` or + ``creationTimestamp desc`` is supported. + + This field is a member of `oneof`_ ``_order_by``. + page_token (str): + Specifies a page token to use. Set ``pageToken`` to the + ``nextPageToken`` returned by a previous list request to get + the next page of results. + + This field is a member of `oneof`_ ``_page_token``. + project (str): + Name of the project scoping this request. + return_partial_success (bool): + Opt-in for partial success behavior which + provides partial results in case of failure. The + default value is false. + + This field is a member of `oneof`_ ``_return_partial_success``. + """ + + filter: str = proto.Field( + proto.STRING, + number=336120696, + optional=True, + ) + include_all_scopes: bool = proto.Field( + proto.BOOL, + number=391327988, + optional=True, + ) + max_results: int = proto.Field( + proto.UINT32, + number=54715419, + optional=True, + ) + order_by: str = proto.Field( + proto.STRING, + number=160562920, + optional=True, + ) + page_token: str = proto.Field( + proto.STRING, + number=19994697, + optional=True, + ) + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + return_partial_success: bool = proto.Field( + proto.BOOL, + number=517198390, + optional=True, + ) + + +class AggregatedListSubnetworksRequest(proto.Message): + r"""A request message for Subnetworks.AggregatedList. See the + method description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + filter (str): + A filter expression that filters resources listed in the + response. Most Compute resources support two types of filter + expressions: expressions that support regular expressions + and expressions that follow API improvement proposal + AIP-160. If you want to use AIP-160, your expression must + specify the field name, an operator, and the value that you + want to use for filtering. The value must be a string, a + number, or a boolean. The operator must be either ``=``, + ``!=``, ``>``, ``<``, ``<=``, ``>=`` or ``:``. For example, + if you are filtering Compute Engine instances, you can + exclude instances named ``example-instance`` by specifying + ``name != example-instance``. The ``:`` operator can be used + with string fields to match substrings. For non-string + fields it is equivalent to the ``=`` operator. The ``:*`` + comparison can be used to test whether a key has been + defined. For example, to find all objects with ``owner`` + label use: ``labels.owner:*`` You can also filter nested + fields. For example, you could specify + ``scheduling.automaticRestart = false`` to include instances + only if they are not scheduled for automatic restarts. You + can use filtering on nested fields to filter based on + resource labels. To filter on multiple expressions, provide + each separate expression within parentheses. For example: + ``(scheduling.automaticRestart = true) (cpuPlatform = "Intel Skylake")`` + By default, each expression is an ``AND`` expression. + However, you can include ``AND`` and ``OR`` expressions + explicitly. For example: + ``(cpuPlatform = "Intel Skylake") OR (cpuPlatform = "Intel Broadwell") AND (scheduling.automaticRestart = true)`` + If you want to use a regular expression, use the ``eq`` + (equal) or ``ne`` (not equal) operator against a single + un-parenthesized expression with or without quotes or + against multiple parenthesized expressions. Examples: + ``fieldname eq unquoted literal`` + ``fieldname eq 'single quoted literal'`` + ``fieldname eq "double quoted literal"`` + ``(fieldname1 eq literal) (fieldname2 ne "literal")`` The + literal value is interpreted as a regular expression using + Google RE2 library syntax. The literal value must match the + entire field. For example, to filter for instances that do + not end with name "instance", you would use + ``name ne .*instance``. + + This field is a member of `oneof`_ ``_filter``. + include_all_scopes (bool): + Indicates whether every visible scope for + each scope type (zone, region, global) should be + included in the response. For new resource types + added after this field, the flag has no effect + as new resource types will always include every + visible scope for each scope type in response. + For resource types which predate this field, if + this flag is omitted or false, only scopes of + the scope types where the resource type is + expected to be found will be included. + + This field is a member of `oneof`_ ``_include_all_scopes``. + max_results (int): + The maximum number of results per page that should be + returned. If the number of available results is larger than + ``maxResults``, Compute Engine returns a ``nextPageToken`` + that can be used to get the next page of results in + subsequent list requests. Acceptable values are ``0`` to + ``500``, inclusive. (Default: ``500``) + + This field is a member of `oneof`_ ``_max_results``. + order_by (str): + Sorts list results by a certain order. By default, results + are returned in alphanumerical order based on the resource + name. You can also sort results in descending order based on + the creation timestamp using + ``orderBy="creationTimestamp desc"``. This sorts results + based on the ``creationTimestamp`` field in reverse + chronological order (newest result first). Use this to sort + resources like operations so that the newest operation is + returned first. Currently, only sorting by ``name`` or + ``creationTimestamp desc`` is supported. + + This field is a member of `oneof`_ ``_order_by``. + page_token (str): + Specifies a page token to use. Set ``pageToken`` to the + ``nextPageToken`` returned by a previous list request to get + the next page of results. + + This field is a member of `oneof`_ ``_page_token``. + project (str): + Project ID for this request. + return_partial_success (bool): + Opt-in for partial success behavior which + provides partial results in case of failure. The + default value is false. + + This field is a member of `oneof`_ ``_return_partial_success``. + """ + + filter: str = proto.Field( + proto.STRING, + number=336120696, + optional=True, + ) + include_all_scopes: bool = proto.Field( + proto.BOOL, + number=391327988, + optional=True, + ) + max_results: int = proto.Field( + proto.UINT32, + number=54715419, + optional=True, + ) + order_by: str = proto.Field( + proto.STRING, + number=160562920, + optional=True, + ) + page_token: str = proto.Field( + proto.STRING, + number=19994697, + optional=True, + ) + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + return_partial_success: bool = proto.Field( + proto.BOOL, + number=517198390, + optional=True, + ) + + +class AggregatedListTargetHttpProxiesRequest(proto.Message): + r"""A request message for TargetHttpProxies.AggregatedList. See + the method description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + filter (str): + A filter expression that filters resources listed in the + response. Most Compute resources support two types of filter + expressions: expressions that support regular expressions + and expressions that follow API improvement proposal + AIP-160. If you want to use AIP-160, your expression must + specify the field name, an operator, and the value that you + want to use for filtering. The value must be a string, a + number, or a boolean. The operator must be either ``=``, + ``!=``, ``>``, ``<``, ``<=``, ``>=`` or ``:``. For example, + if you are filtering Compute Engine instances, you can + exclude instances named ``example-instance`` by specifying + ``name != example-instance``. The ``:`` operator can be used + with string fields to match substrings. For non-string + fields it is equivalent to the ``=`` operator. The ``:*`` + comparison can be used to test whether a key has been + defined. For example, to find all objects with ``owner`` + label use: ``labels.owner:*`` You can also filter nested + fields. For example, you could specify + ``scheduling.automaticRestart = false`` to include instances + only if they are not scheduled for automatic restarts. You + can use filtering on nested fields to filter based on + resource labels. To filter on multiple expressions, provide + each separate expression within parentheses. For example: + ``(scheduling.automaticRestart = true) (cpuPlatform = "Intel Skylake")`` + By default, each expression is an ``AND`` expression. + However, you can include ``AND`` and ``OR`` expressions + explicitly. For example: + ``(cpuPlatform = "Intel Skylake") OR (cpuPlatform = "Intel Broadwell") AND (scheduling.automaticRestart = true)`` + If you want to use a regular expression, use the ``eq`` + (equal) or ``ne`` (not equal) operator against a single + un-parenthesized expression with or without quotes or + against multiple parenthesized expressions. Examples: + ``fieldname eq unquoted literal`` + ``fieldname eq 'single quoted literal'`` + ``fieldname eq "double quoted literal"`` + ``(fieldname1 eq literal) (fieldname2 ne "literal")`` The + literal value is interpreted as a regular expression using + Google RE2 library syntax. The literal value must match the + entire field. For example, to filter for instances that do + not end with name "instance", you would use + ``name ne .*instance``. + + This field is a member of `oneof`_ ``_filter``. + include_all_scopes (bool): + Indicates whether every visible scope for + each scope type (zone, region, global) should be + included in the response. For new resource types + added after this field, the flag has no effect + as new resource types will always include every + visible scope for each scope type in response. + For resource types which predate this field, if + this flag is omitted or false, only scopes of + the scope types where the resource type is + expected to be found will be included. + + This field is a member of `oneof`_ ``_include_all_scopes``. + max_results (int): + The maximum number of results per page that should be + returned. If the number of available results is larger than + ``maxResults``, Compute Engine returns a ``nextPageToken`` + that can be used to get the next page of results in + subsequent list requests. Acceptable values are ``0`` to + ``500``, inclusive. (Default: ``500``) + + This field is a member of `oneof`_ ``_max_results``. + order_by (str): + Sorts list results by a certain order. By default, results + are returned in alphanumerical order based on the resource + name. You can also sort results in descending order based on + the creation timestamp using + ``orderBy="creationTimestamp desc"``. This sorts results + based on the ``creationTimestamp`` field in reverse + chronological order (newest result first). Use this to sort + resources like operations so that the newest operation is + returned first. Currently, only sorting by ``name`` or + ``creationTimestamp desc`` is supported. + + This field is a member of `oneof`_ ``_order_by``. + page_token (str): + Specifies a page token to use. Set ``pageToken`` to the + ``nextPageToken`` returned by a previous list request to get + the next page of results. + + This field is a member of `oneof`_ ``_page_token``. + project (str): + Name of the project scoping this request. + return_partial_success (bool): + Opt-in for partial success behavior which + provides partial results in case of failure. The + default value is false. + + This field is a member of `oneof`_ ``_return_partial_success``. + """ + + filter: str = proto.Field( + proto.STRING, + number=336120696, + optional=True, + ) + include_all_scopes: bool = proto.Field( + proto.BOOL, + number=391327988, + optional=True, + ) + max_results: int = proto.Field( + proto.UINT32, + number=54715419, + optional=True, + ) + order_by: str = proto.Field( + proto.STRING, + number=160562920, + optional=True, + ) + page_token: str = proto.Field( + proto.STRING, + number=19994697, + optional=True, + ) + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + return_partial_success: bool = proto.Field( + proto.BOOL, + number=517198390, + optional=True, + ) + + +class AggregatedListTargetHttpsProxiesRequest(proto.Message): + r"""A request message for TargetHttpsProxies.AggregatedList. See + the method description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + filter (str): + A filter expression that filters resources listed in the + response. Most Compute resources support two types of filter + expressions: expressions that support regular expressions + and expressions that follow API improvement proposal + AIP-160. If you want to use AIP-160, your expression must + specify the field name, an operator, and the value that you + want to use for filtering. The value must be a string, a + number, or a boolean. The operator must be either ``=``, + ``!=``, ``>``, ``<``, ``<=``, ``>=`` or ``:``. For example, + if you are filtering Compute Engine instances, you can + exclude instances named ``example-instance`` by specifying + ``name != example-instance``. The ``:`` operator can be used + with string fields to match substrings. For non-string + fields it is equivalent to the ``=`` operator. The ``:*`` + comparison can be used to test whether a key has been + defined. For example, to find all objects with ``owner`` + label use: ``labels.owner:*`` You can also filter nested + fields. For example, you could specify + ``scheduling.automaticRestart = false`` to include instances + only if they are not scheduled for automatic restarts. You + can use filtering on nested fields to filter based on + resource labels. To filter on multiple expressions, provide + each separate expression within parentheses. For example: + ``(scheduling.automaticRestart = true) (cpuPlatform = "Intel Skylake")`` + By default, each expression is an ``AND`` expression. + However, you can include ``AND`` and ``OR`` expressions + explicitly. For example: + ``(cpuPlatform = "Intel Skylake") OR (cpuPlatform = "Intel Broadwell") AND (scheduling.automaticRestart = true)`` + If you want to use a regular expression, use the ``eq`` + (equal) or ``ne`` (not equal) operator against a single + un-parenthesized expression with or without quotes or + against multiple parenthesized expressions. Examples: + ``fieldname eq unquoted literal`` + ``fieldname eq 'single quoted literal'`` + ``fieldname eq "double quoted literal"`` + ``(fieldname1 eq literal) (fieldname2 ne "literal")`` The + literal value is interpreted as a regular expression using + Google RE2 library syntax. The literal value must match the + entire field. For example, to filter for instances that do + not end with name "instance", you would use + ``name ne .*instance``. + + This field is a member of `oneof`_ ``_filter``. + include_all_scopes (bool): + Indicates whether every visible scope for + each scope type (zone, region, global) should be + included in the response. For new resource types + added after this field, the flag has no effect + as new resource types will always include every + visible scope for each scope type in response. + For resource types which predate this field, if + this flag is omitted or false, only scopes of + the scope types where the resource type is + expected to be found will be included. + + This field is a member of `oneof`_ ``_include_all_scopes``. + max_results (int): + The maximum number of results per page that should be + returned. If the number of available results is larger than + ``maxResults``, Compute Engine returns a ``nextPageToken`` + that can be used to get the next page of results in + subsequent list requests. Acceptable values are ``0`` to + ``500``, inclusive. (Default: ``500``) + + This field is a member of `oneof`_ ``_max_results``. + order_by (str): + Sorts list results by a certain order. By default, results + are returned in alphanumerical order based on the resource + name. You can also sort results in descending order based on + the creation timestamp using + ``orderBy="creationTimestamp desc"``. This sorts results + based on the ``creationTimestamp`` field in reverse + chronological order (newest result first). Use this to sort + resources like operations so that the newest operation is + returned first. Currently, only sorting by ``name`` or + ``creationTimestamp desc`` is supported. + + This field is a member of `oneof`_ ``_order_by``. + page_token (str): + Specifies a page token to use. Set ``pageToken`` to the + ``nextPageToken`` returned by a previous list request to get + the next page of results. + + This field is a member of `oneof`_ ``_page_token``. + project (str): + Name of the project scoping this request. + return_partial_success (bool): + Opt-in for partial success behavior which + provides partial results in case of failure. The + default value is false. + + This field is a member of `oneof`_ ``_return_partial_success``. + """ + + filter: str = proto.Field( + proto.STRING, + number=336120696, + optional=True, + ) + include_all_scopes: bool = proto.Field( + proto.BOOL, + number=391327988, + optional=True, + ) + max_results: int = proto.Field( + proto.UINT32, + number=54715419, + optional=True, + ) + order_by: str = proto.Field( + proto.STRING, + number=160562920, + optional=True, + ) + page_token: str = proto.Field( + proto.STRING, + number=19994697, + optional=True, + ) + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + return_partial_success: bool = proto.Field( + proto.BOOL, + number=517198390, + optional=True, + ) + + +class AggregatedListTargetInstancesRequest(proto.Message): + r"""A request message for TargetInstances.AggregatedList. See the + method description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + filter (str): + A filter expression that filters resources listed in the + response. Most Compute resources support two types of filter + expressions: expressions that support regular expressions + and expressions that follow API improvement proposal + AIP-160. If you want to use AIP-160, your expression must + specify the field name, an operator, and the value that you + want to use for filtering. The value must be a string, a + number, or a boolean. The operator must be either ``=``, + ``!=``, ``>``, ``<``, ``<=``, ``>=`` or ``:``. For example, + if you are filtering Compute Engine instances, you can + exclude instances named ``example-instance`` by specifying + ``name != example-instance``. The ``:`` operator can be used + with string fields to match substrings. For non-string + fields it is equivalent to the ``=`` operator. The ``:*`` + comparison can be used to test whether a key has been + defined. For example, to find all objects with ``owner`` + label use: ``labels.owner:*`` You can also filter nested + fields. For example, you could specify + ``scheduling.automaticRestart = false`` to include instances + only if they are not scheduled for automatic restarts. You + can use filtering on nested fields to filter based on + resource labels. To filter on multiple expressions, provide + each separate expression within parentheses. For example: + ``(scheduling.automaticRestart = true) (cpuPlatform = "Intel Skylake")`` + By default, each expression is an ``AND`` expression. + However, you can include ``AND`` and ``OR`` expressions + explicitly. For example: + ``(cpuPlatform = "Intel Skylake") OR (cpuPlatform = "Intel Broadwell") AND (scheduling.automaticRestart = true)`` + If you want to use a regular expression, use the ``eq`` + (equal) or ``ne`` (not equal) operator against a single + un-parenthesized expression with or without quotes or + against multiple parenthesized expressions. Examples: + ``fieldname eq unquoted literal`` + ``fieldname eq 'single quoted literal'`` + ``fieldname eq "double quoted literal"`` + ``(fieldname1 eq literal) (fieldname2 ne "literal")`` The + literal value is interpreted as a regular expression using + Google RE2 library syntax. The literal value must match the + entire field. For example, to filter for instances that do + not end with name "instance", you would use + ``name ne .*instance``. + + This field is a member of `oneof`_ ``_filter``. + include_all_scopes (bool): + Indicates whether every visible scope for + each scope type (zone, region, global) should be + included in the response. For new resource types + added after this field, the flag has no effect + as new resource types will always include every + visible scope for each scope type in response. + For resource types which predate this field, if + this flag is omitted or false, only scopes of + the scope types where the resource type is + expected to be found will be included. + + This field is a member of `oneof`_ ``_include_all_scopes``. + max_results (int): + The maximum number of results per page that should be + returned. If the number of available results is larger than + ``maxResults``, Compute Engine returns a ``nextPageToken`` + that can be used to get the next page of results in + subsequent list requests. Acceptable values are ``0`` to + ``500``, inclusive. (Default: ``500``) + + This field is a member of `oneof`_ ``_max_results``. + order_by (str): + Sorts list results by a certain order. By default, results + are returned in alphanumerical order based on the resource + name. You can also sort results in descending order based on + the creation timestamp using + ``orderBy="creationTimestamp desc"``. This sorts results + based on the ``creationTimestamp`` field in reverse + chronological order (newest result first). Use this to sort + resources like operations so that the newest operation is + returned first. Currently, only sorting by ``name`` or + ``creationTimestamp desc`` is supported. + + This field is a member of `oneof`_ ``_order_by``. + page_token (str): + Specifies a page token to use. Set ``pageToken`` to the + ``nextPageToken`` returned by a previous list request to get + the next page of results. + + This field is a member of `oneof`_ ``_page_token``. + project (str): + Project ID for this request. + return_partial_success (bool): + Opt-in for partial success behavior which + provides partial results in case of failure. The + default value is false. + + This field is a member of `oneof`_ ``_return_partial_success``. + """ + + filter: str = proto.Field( + proto.STRING, + number=336120696, + optional=True, + ) + include_all_scopes: bool = proto.Field( + proto.BOOL, + number=391327988, + optional=True, + ) + max_results: int = proto.Field( + proto.UINT32, + number=54715419, + optional=True, + ) + order_by: str = proto.Field( + proto.STRING, + number=160562920, + optional=True, + ) + page_token: str = proto.Field( + proto.STRING, + number=19994697, + optional=True, + ) + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + return_partial_success: bool = proto.Field( + proto.BOOL, + number=517198390, + optional=True, + ) + + +class AggregatedListTargetPoolsRequest(proto.Message): + r"""A request message for TargetPools.AggregatedList. See the + method description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + filter (str): + A filter expression that filters resources listed in the + response. Most Compute resources support two types of filter + expressions: expressions that support regular expressions + and expressions that follow API improvement proposal + AIP-160. If you want to use AIP-160, your expression must + specify the field name, an operator, and the value that you + want to use for filtering. The value must be a string, a + number, or a boolean. The operator must be either ``=``, + ``!=``, ``>``, ``<``, ``<=``, ``>=`` or ``:``. For example, + if you are filtering Compute Engine instances, you can + exclude instances named ``example-instance`` by specifying + ``name != example-instance``. The ``:`` operator can be used + with string fields to match substrings. For non-string + fields it is equivalent to the ``=`` operator. The ``:*`` + comparison can be used to test whether a key has been + defined. For example, to find all objects with ``owner`` + label use: ``labels.owner:*`` You can also filter nested + fields. For example, you could specify + ``scheduling.automaticRestart = false`` to include instances + only if they are not scheduled for automatic restarts. You + can use filtering on nested fields to filter based on + resource labels. To filter on multiple expressions, provide + each separate expression within parentheses. For example: + ``(scheduling.automaticRestart = true) (cpuPlatform = "Intel Skylake")`` + By default, each expression is an ``AND`` expression. + However, you can include ``AND`` and ``OR`` expressions + explicitly. For example: + ``(cpuPlatform = "Intel Skylake") OR (cpuPlatform = "Intel Broadwell") AND (scheduling.automaticRestart = true)`` + If you want to use a regular expression, use the ``eq`` + (equal) or ``ne`` (not equal) operator against a single + un-parenthesized expression with or without quotes or + against multiple parenthesized expressions. Examples: + ``fieldname eq unquoted literal`` + ``fieldname eq 'single quoted literal'`` + ``fieldname eq "double quoted literal"`` + ``(fieldname1 eq literal) (fieldname2 ne "literal")`` The + literal value is interpreted as a regular expression using + Google RE2 library syntax. The literal value must match the + entire field. For example, to filter for instances that do + not end with name "instance", you would use + ``name ne .*instance``. + + This field is a member of `oneof`_ ``_filter``. + include_all_scopes (bool): + Indicates whether every visible scope for + each scope type (zone, region, global) should be + included in the response. For new resource types + added after this field, the flag has no effect + as new resource types will always include every + visible scope for each scope type in response. + For resource types which predate this field, if + this flag is omitted or false, only scopes of + the scope types where the resource type is + expected to be found will be included. + + This field is a member of `oneof`_ ``_include_all_scopes``. + max_results (int): + The maximum number of results per page that should be + returned. If the number of available results is larger than + ``maxResults``, Compute Engine returns a ``nextPageToken`` + that can be used to get the next page of results in + subsequent list requests. Acceptable values are ``0`` to + ``500``, inclusive. (Default: ``500``) + + This field is a member of `oneof`_ ``_max_results``. + order_by (str): + Sorts list results by a certain order. By default, results + are returned in alphanumerical order based on the resource + name. You can also sort results in descending order based on + the creation timestamp using + ``orderBy="creationTimestamp desc"``. This sorts results + based on the ``creationTimestamp`` field in reverse + chronological order (newest result first). Use this to sort + resources like operations so that the newest operation is + returned first. Currently, only sorting by ``name`` or + ``creationTimestamp desc`` is supported. + + This field is a member of `oneof`_ ``_order_by``. + page_token (str): + Specifies a page token to use. Set ``pageToken`` to the + ``nextPageToken`` returned by a previous list request to get + the next page of results. + + This field is a member of `oneof`_ ``_page_token``. + project (str): + Project ID for this request. + return_partial_success (bool): + Opt-in for partial success behavior which + provides partial results in case of failure. The + default value is false. + + This field is a member of `oneof`_ ``_return_partial_success``. + """ + + filter: str = proto.Field( + proto.STRING, + number=336120696, + optional=True, + ) + include_all_scopes: bool = proto.Field( + proto.BOOL, + number=391327988, + optional=True, + ) + max_results: int = proto.Field( + proto.UINT32, + number=54715419, + optional=True, + ) + order_by: str = proto.Field( + proto.STRING, + number=160562920, + optional=True, + ) + page_token: str = proto.Field( + proto.STRING, + number=19994697, + optional=True, + ) + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + return_partial_success: bool = proto.Field( + proto.BOOL, + number=517198390, + optional=True, + ) + + +class AggregatedListTargetTcpProxiesRequest(proto.Message): + r"""A request message for TargetTcpProxies.AggregatedList. See + the method description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + filter (str): + A filter expression that filters resources listed in the + response. Most Compute resources support two types of filter + expressions: expressions that support regular expressions + and expressions that follow API improvement proposal + AIP-160. If you want to use AIP-160, your expression must + specify the field name, an operator, and the value that you + want to use for filtering. The value must be a string, a + number, or a boolean. The operator must be either ``=``, + ``!=``, ``>``, ``<``, ``<=``, ``>=`` or ``:``. For example, + if you are filtering Compute Engine instances, you can + exclude instances named ``example-instance`` by specifying + ``name != example-instance``. The ``:`` operator can be used + with string fields to match substrings. For non-string + fields it is equivalent to the ``=`` operator. The ``:*`` + comparison can be used to test whether a key has been + defined. For example, to find all objects with ``owner`` + label use: ``labels.owner:*`` You can also filter nested + fields. For example, you could specify + ``scheduling.automaticRestart = false`` to include instances + only if they are not scheduled for automatic restarts. You + can use filtering on nested fields to filter based on + resource labels. To filter on multiple expressions, provide + each separate expression within parentheses. For example: + ``(scheduling.automaticRestart = true) (cpuPlatform = "Intel Skylake")`` + By default, each expression is an ``AND`` expression. + However, you can include ``AND`` and ``OR`` expressions + explicitly. For example: + ``(cpuPlatform = "Intel Skylake") OR (cpuPlatform = "Intel Broadwell") AND (scheduling.automaticRestart = true)`` + If you want to use a regular expression, use the ``eq`` + (equal) or ``ne`` (not equal) operator against a single + un-parenthesized expression with or without quotes or + against multiple parenthesized expressions. Examples: + ``fieldname eq unquoted literal`` + ``fieldname eq 'single quoted literal'`` + ``fieldname eq "double quoted literal"`` + ``(fieldname1 eq literal) (fieldname2 ne "literal")`` The + literal value is interpreted as a regular expression using + Google RE2 library syntax. The literal value must match the + entire field. For example, to filter for instances that do + not end with name "instance", you would use + ``name ne .*instance``. + + This field is a member of `oneof`_ ``_filter``. + include_all_scopes (bool): + Indicates whether every visible scope for + each scope type (zone, region, global) should be + included in the response. For new resource types + added after this field, the flag has no effect + as new resource types will always include every + visible scope for each scope type in response. + For resource types which predate this field, if + this flag is omitted or false, only scopes of + the scope types where the resource type is + expected to be found will be included. + + This field is a member of `oneof`_ ``_include_all_scopes``. + max_results (int): + The maximum number of results per page that should be + returned. If the number of available results is larger than + ``maxResults``, Compute Engine returns a ``nextPageToken`` + that can be used to get the next page of results in + subsequent list requests. Acceptable values are ``0`` to + ``500``, inclusive. (Default: ``500``) + + This field is a member of `oneof`_ ``_max_results``. + order_by (str): + Sorts list results by a certain order. By default, results + are returned in alphanumerical order based on the resource + name. You can also sort results in descending order based on + the creation timestamp using + ``orderBy="creationTimestamp desc"``. This sorts results + based on the ``creationTimestamp`` field in reverse + chronological order (newest result first). Use this to sort + resources like operations so that the newest operation is + returned first. Currently, only sorting by ``name`` or + ``creationTimestamp desc`` is supported. + + This field is a member of `oneof`_ ``_order_by``. + page_token (str): + Specifies a page token to use. Set ``pageToken`` to the + ``nextPageToken`` returned by a previous list request to get + the next page of results. + + This field is a member of `oneof`_ ``_page_token``. + project (str): + Name of the project scoping this request. + return_partial_success (bool): + Opt-in for partial success behavior which + provides partial results in case of failure. The + default value is false. + + This field is a member of `oneof`_ ``_return_partial_success``. + """ + + filter: str = proto.Field( + proto.STRING, + number=336120696, + optional=True, + ) + include_all_scopes: bool = proto.Field( + proto.BOOL, + number=391327988, + optional=True, + ) + max_results: int = proto.Field( + proto.UINT32, + number=54715419, + optional=True, + ) + order_by: str = proto.Field( + proto.STRING, + number=160562920, + optional=True, + ) + page_token: str = proto.Field( + proto.STRING, + number=19994697, + optional=True, + ) + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + return_partial_success: bool = proto.Field( + proto.BOOL, + number=517198390, + optional=True, + ) + + +class AggregatedListTargetVpnGatewaysRequest(proto.Message): + r"""A request message for TargetVpnGateways.AggregatedList. See + the method description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + filter (str): + A filter expression that filters resources listed in the + response. Most Compute resources support two types of filter + expressions: expressions that support regular expressions + and expressions that follow API improvement proposal + AIP-160. If you want to use AIP-160, your expression must + specify the field name, an operator, and the value that you + want to use for filtering. The value must be a string, a + number, or a boolean. The operator must be either ``=``, + ``!=``, ``>``, ``<``, ``<=``, ``>=`` or ``:``. For example, + if you are filtering Compute Engine instances, you can + exclude instances named ``example-instance`` by specifying + ``name != example-instance``. The ``:`` operator can be used + with string fields to match substrings. For non-string + fields it is equivalent to the ``=`` operator. The ``:*`` + comparison can be used to test whether a key has been + defined. For example, to find all objects with ``owner`` + label use: ``labels.owner:*`` You can also filter nested + fields. For example, you could specify + ``scheduling.automaticRestart = false`` to include instances + only if they are not scheduled for automatic restarts. You + can use filtering on nested fields to filter based on + resource labels. To filter on multiple expressions, provide + each separate expression within parentheses. For example: + ``(scheduling.automaticRestart = true) (cpuPlatform = "Intel Skylake")`` + By default, each expression is an ``AND`` expression. + However, you can include ``AND`` and ``OR`` expressions + explicitly. For example: + ``(cpuPlatform = "Intel Skylake") OR (cpuPlatform = "Intel Broadwell") AND (scheduling.automaticRestart = true)`` + If you want to use a regular expression, use the ``eq`` + (equal) or ``ne`` (not equal) operator against a single + un-parenthesized expression with or without quotes or + against multiple parenthesized expressions. Examples: + ``fieldname eq unquoted literal`` + ``fieldname eq 'single quoted literal'`` + ``fieldname eq "double quoted literal"`` + ``(fieldname1 eq literal) (fieldname2 ne "literal")`` The + literal value is interpreted as a regular expression using + Google RE2 library syntax. The literal value must match the + entire field. For example, to filter for instances that do + not end with name "instance", you would use + ``name ne .*instance``. + + This field is a member of `oneof`_ ``_filter``. + include_all_scopes (bool): + Indicates whether every visible scope for + each scope type (zone, region, global) should be + included in the response. For new resource types + added after this field, the flag has no effect + as new resource types will always include every + visible scope for each scope type in response. + For resource types which predate this field, if + this flag is omitted or false, only scopes of + the scope types where the resource type is + expected to be found will be included. + + This field is a member of `oneof`_ ``_include_all_scopes``. + max_results (int): + The maximum number of results per page that should be + returned. If the number of available results is larger than + ``maxResults``, Compute Engine returns a ``nextPageToken`` + that can be used to get the next page of results in + subsequent list requests. Acceptable values are ``0`` to + ``500``, inclusive. (Default: ``500``) + + This field is a member of `oneof`_ ``_max_results``. + order_by (str): + Sorts list results by a certain order. By default, results + are returned in alphanumerical order based on the resource + name. You can also sort results in descending order based on + the creation timestamp using + ``orderBy="creationTimestamp desc"``. This sorts results + based on the ``creationTimestamp`` field in reverse + chronological order (newest result first). Use this to sort + resources like operations so that the newest operation is + returned first. Currently, only sorting by ``name`` or + ``creationTimestamp desc`` is supported. + + This field is a member of `oneof`_ ``_order_by``. + page_token (str): + Specifies a page token to use. Set ``pageToken`` to the + ``nextPageToken`` returned by a previous list request to get + the next page of results. + + This field is a member of `oneof`_ ``_page_token``. + project (str): + Project ID for this request. + return_partial_success (bool): + Opt-in for partial success behavior which + provides partial results in case of failure. The + default value is false. + + This field is a member of `oneof`_ ``_return_partial_success``. + """ + + filter: str = proto.Field( + proto.STRING, + number=336120696, + optional=True, + ) + include_all_scopes: bool = proto.Field( + proto.BOOL, + number=391327988, + optional=True, + ) + max_results: int = proto.Field( + proto.UINT32, + number=54715419, + optional=True, + ) + order_by: str = proto.Field( + proto.STRING, + number=160562920, + optional=True, + ) + page_token: str = proto.Field( + proto.STRING, + number=19994697, + optional=True, + ) + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + return_partial_success: bool = proto.Field( + proto.BOOL, + number=517198390, + optional=True, + ) + + +class AggregatedListUrlMapsRequest(proto.Message): + r"""A request message for UrlMaps.AggregatedList. See the method + description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + filter (str): + A filter expression that filters resources listed in the + response. Most Compute resources support two types of filter + expressions: expressions that support regular expressions + and expressions that follow API improvement proposal + AIP-160. If you want to use AIP-160, your expression must + specify the field name, an operator, and the value that you + want to use for filtering. The value must be a string, a + number, or a boolean. The operator must be either ``=``, + ``!=``, ``>``, ``<``, ``<=``, ``>=`` or ``:``. For example, + if you are filtering Compute Engine instances, you can + exclude instances named ``example-instance`` by specifying + ``name != example-instance``. The ``:`` operator can be used + with string fields to match substrings. For non-string + fields it is equivalent to the ``=`` operator. The ``:*`` + comparison can be used to test whether a key has been + defined. For example, to find all objects with ``owner`` + label use: ``labels.owner:*`` You can also filter nested + fields. For example, you could specify + ``scheduling.automaticRestart = false`` to include instances + only if they are not scheduled for automatic restarts. You + can use filtering on nested fields to filter based on + resource labels. To filter on multiple expressions, provide + each separate expression within parentheses. For example: + ``(scheduling.automaticRestart = true) (cpuPlatform = "Intel Skylake")`` + By default, each expression is an ``AND`` expression. + However, you can include ``AND`` and ``OR`` expressions + explicitly. For example: + ``(cpuPlatform = "Intel Skylake") OR (cpuPlatform = "Intel Broadwell") AND (scheduling.automaticRestart = true)`` + If you want to use a regular expression, use the ``eq`` + (equal) or ``ne`` (not equal) operator against a single + un-parenthesized expression with or without quotes or + against multiple parenthesized expressions. Examples: + ``fieldname eq unquoted literal`` + ``fieldname eq 'single quoted literal'`` + ``fieldname eq "double quoted literal"`` + ``(fieldname1 eq literal) (fieldname2 ne "literal")`` The + literal value is interpreted as a regular expression using + Google RE2 library syntax. The literal value must match the + entire field. For example, to filter for instances that do + not end with name "instance", you would use + ``name ne .*instance``. + + This field is a member of `oneof`_ ``_filter``. + include_all_scopes (bool): + Indicates whether every visible scope for + each scope type (zone, region, global) should be + included in the response. For new resource types + added after this field, the flag has no effect + as new resource types will always include every + visible scope for each scope type in response. + For resource types which predate this field, if + this flag is omitted or false, only scopes of + the scope types where the resource type is + expected to be found will be included. + + This field is a member of `oneof`_ ``_include_all_scopes``. + max_results (int): + The maximum number of results per page that should be + returned. If the number of available results is larger than + ``maxResults``, Compute Engine returns a ``nextPageToken`` + that can be used to get the next page of results in + subsequent list requests. Acceptable values are ``0`` to + ``500``, inclusive. (Default: ``500``) + + This field is a member of `oneof`_ ``_max_results``. + order_by (str): + Sorts list results by a certain order. By default, results + are returned in alphanumerical order based on the resource + name. You can also sort results in descending order based on + the creation timestamp using + ``orderBy="creationTimestamp desc"``. This sorts results + based on the ``creationTimestamp`` field in reverse + chronological order (newest result first). Use this to sort + resources like operations so that the newest operation is + returned first. Currently, only sorting by ``name`` or + ``creationTimestamp desc`` is supported. + + This field is a member of `oneof`_ ``_order_by``. + page_token (str): + Specifies a page token to use. Set ``pageToken`` to the + ``nextPageToken`` returned by a previous list request to get + the next page of results. + + This field is a member of `oneof`_ ``_page_token``. + project (str): + Name of the project scoping this request. + return_partial_success (bool): + Opt-in for partial success behavior which + provides partial results in case of failure. The + default value is false. + + This field is a member of `oneof`_ ``_return_partial_success``. + """ + + filter: str = proto.Field( + proto.STRING, + number=336120696, + optional=True, + ) + include_all_scopes: bool = proto.Field( + proto.BOOL, + number=391327988, + optional=True, + ) + max_results: int = proto.Field( + proto.UINT32, + number=54715419, + optional=True, + ) + order_by: str = proto.Field( + proto.STRING, + number=160562920, + optional=True, + ) + page_token: str = proto.Field( + proto.STRING, + number=19994697, + optional=True, + ) + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + return_partial_success: bool = proto.Field( + proto.BOOL, + number=517198390, + optional=True, + ) + + +class AggregatedListVpnGatewaysRequest(proto.Message): + r"""A request message for VpnGateways.AggregatedList. See the + method description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + filter (str): + A filter expression that filters resources listed in the + response. Most Compute resources support two types of filter + expressions: expressions that support regular expressions + and expressions that follow API improvement proposal + AIP-160. If you want to use AIP-160, your expression must + specify the field name, an operator, and the value that you + want to use for filtering. The value must be a string, a + number, or a boolean. The operator must be either ``=``, + ``!=``, ``>``, ``<``, ``<=``, ``>=`` or ``:``. For example, + if you are filtering Compute Engine instances, you can + exclude instances named ``example-instance`` by specifying + ``name != example-instance``. The ``:`` operator can be used + with string fields to match substrings. For non-string + fields it is equivalent to the ``=`` operator. The ``:*`` + comparison can be used to test whether a key has been + defined. For example, to find all objects with ``owner`` + label use: ``labels.owner:*`` You can also filter nested + fields. For example, you could specify + ``scheduling.automaticRestart = false`` to include instances + only if they are not scheduled for automatic restarts. You + can use filtering on nested fields to filter based on + resource labels. To filter on multiple expressions, provide + each separate expression within parentheses. For example: + ``(scheduling.automaticRestart = true) (cpuPlatform = "Intel Skylake")`` + By default, each expression is an ``AND`` expression. + However, you can include ``AND`` and ``OR`` expressions + explicitly. For example: + ``(cpuPlatform = "Intel Skylake") OR (cpuPlatform = "Intel Broadwell") AND (scheduling.automaticRestart = true)`` + If you want to use a regular expression, use the ``eq`` + (equal) or ``ne`` (not equal) operator against a single + un-parenthesized expression with or without quotes or + against multiple parenthesized expressions. Examples: + ``fieldname eq unquoted literal`` + ``fieldname eq 'single quoted literal'`` + ``fieldname eq "double quoted literal"`` + ``(fieldname1 eq literal) (fieldname2 ne "literal")`` The + literal value is interpreted as a regular expression using + Google RE2 library syntax. The literal value must match the + entire field. For example, to filter for instances that do + not end with name "instance", you would use + ``name ne .*instance``. + + This field is a member of `oneof`_ ``_filter``. + include_all_scopes (bool): + Indicates whether every visible scope for + each scope type (zone, region, global) should be + included in the response. For new resource types + added after this field, the flag has no effect + as new resource types will always include every + visible scope for each scope type in response. + For resource types which predate this field, if + this flag is omitted or false, only scopes of + the scope types where the resource type is + expected to be found will be included. + + This field is a member of `oneof`_ ``_include_all_scopes``. + max_results (int): + The maximum number of results per page that should be + returned. If the number of available results is larger than + ``maxResults``, Compute Engine returns a ``nextPageToken`` + that can be used to get the next page of results in + subsequent list requests. Acceptable values are ``0`` to + ``500``, inclusive. (Default: ``500``) + + This field is a member of `oneof`_ ``_max_results``. + order_by (str): + Sorts list results by a certain order. By default, results + are returned in alphanumerical order based on the resource + name. You can also sort results in descending order based on + the creation timestamp using + ``orderBy="creationTimestamp desc"``. This sorts results + based on the ``creationTimestamp`` field in reverse + chronological order (newest result first). Use this to sort + resources like operations so that the newest operation is + returned first. Currently, only sorting by ``name`` or + ``creationTimestamp desc`` is supported. + + This field is a member of `oneof`_ ``_order_by``. + page_token (str): + Specifies a page token to use. Set ``pageToken`` to the + ``nextPageToken`` returned by a previous list request to get + the next page of results. + + This field is a member of `oneof`_ ``_page_token``. + project (str): + Project ID for this request. + return_partial_success (bool): + Opt-in for partial success behavior which + provides partial results in case of failure. The + default value is false. + + This field is a member of `oneof`_ ``_return_partial_success``. + """ + + filter: str = proto.Field( + proto.STRING, + number=336120696, + optional=True, + ) + include_all_scopes: bool = proto.Field( + proto.BOOL, + number=391327988, + optional=True, + ) + max_results: int = proto.Field( + proto.UINT32, + number=54715419, + optional=True, + ) + order_by: str = proto.Field( + proto.STRING, + number=160562920, + optional=True, + ) + page_token: str = proto.Field( + proto.STRING, + number=19994697, + optional=True, + ) + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + return_partial_success: bool = proto.Field( + proto.BOOL, + number=517198390, + optional=True, + ) + + +class AggregatedListVpnTunnelsRequest(proto.Message): + r"""A request message for VpnTunnels.AggregatedList. See the + method description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + filter (str): + A filter expression that filters resources listed in the + response. Most Compute resources support two types of filter + expressions: expressions that support regular expressions + and expressions that follow API improvement proposal + AIP-160. If you want to use AIP-160, your expression must + specify the field name, an operator, and the value that you + want to use for filtering. The value must be a string, a + number, or a boolean. The operator must be either ``=``, + ``!=``, ``>``, ``<``, ``<=``, ``>=`` or ``:``. For example, + if you are filtering Compute Engine instances, you can + exclude instances named ``example-instance`` by specifying + ``name != example-instance``. The ``:`` operator can be used + with string fields to match substrings. For non-string + fields it is equivalent to the ``=`` operator. The ``:*`` + comparison can be used to test whether a key has been + defined. For example, to find all objects with ``owner`` + label use: ``labels.owner:*`` You can also filter nested + fields. For example, you could specify + ``scheduling.automaticRestart = false`` to include instances + only if they are not scheduled for automatic restarts. You + can use filtering on nested fields to filter based on + resource labels. To filter on multiple expressions, provide + each separate expression within parentheses. For example: + ``(scheduling.automaticRestart = true) (cpuPlatform = "Intel Skylake")`` + By default, each expression is an ``AND`` expression. + However, you can include ``AND`` and ``OR`` expressions + explicitly. For example: + ``(cpuPlatform = "Intel Skylake") OR (cpuPlatform = "Intel Broadwell") AND (scheduling.automaticRestart = true)`` + If you want to use a regular expression, use the ``eq`` + (equal) or ``ne`` (not equal) operator against a single + un-parenthesized expression with or without quotes or + against multiple parenthesized expressions. Examples: + ``fieldname eq unquoted literal`` + ``fieldname eq 'single quoted literal'`` + ``fieldname eq "double quoted literal"`` + ``(fieldname1 eq literal) (fieldname2 ne "literal")`` The + literal value is interpreted as a regular expression using + Google RE2 library syntax. The literal value must match the + entire field. For example, to filter for instances that do + not end with name "instance", you would use + ``name ne .*instance``. + + This field is a member of `oneof`_ ``_filter``. + include_all_scopes (bool): + Indicates whether every visible scope for + each scope type (zone, region, global) should be + included in the response. For new resource types + added after this field, the flag has no effect + as new resource types will always include every + visible scope for each scope type in response. + For resource types which predate this field, if + this flag is omitted or false, only scopes of + the scope types where the resource type is + expected to be found will be included. + + This field is a member of `oneof`_ ``_include_all_scopes``. + max_results (int): + The maximum number of results per page that should be + returned. If the number of available results is larger than + ``maxResults``, Compute Engine returns a ``nextPageToken`` + that can be used to get the next page of results in + subsequent list requests. Acceptable values are ``0`` to + ``500``, inclusive. (Default: ``500``) + + This field is a member of `oneof`_ ``_max_results``. + order_by (str): + Sorts list results by a certain order. By default, results + are returned in alphanumerical order based on the resource + name. You can also sort results in descending order based on + the creation timestamp using + ``orderBy="creationTimestamp desc"``. This sorts results + based on the ``creationTimestamp`` field in reverse + chronological order (newest result first). Use this to sort + resources like operations so that the newest operation is + returned first. Currently, only sorting by ``name`` or + ``creationTimestamp desc`` is supported. + + This field is a member of `oneof`_ ``_order_by``. + page_token (str): + Specifies a page token to use. Set ``pageToken`` to the + ``nextPageToken`` returned by a previous list request to get + the next page of results. + + This field is a member of `oneof`_ ``_page_token``. + project (str): + Project ID for this request. + return_partial_success (bool): + Opt-in for partial success behavior which + provides partial results in case of failure. The + default value is false. + + This field is a member of `oneof`_ ``_return_partial_success``. + """ + + filter: str = proto.Field( + proto.STRING, + number=336120696, + optional=True, + ) + include_all_scopes: bool = proto.Field( + proto.BOOL, + number=391327988, + optional=True, + ) + max_results: int = proto.Field( + proto.UINT32, + number=54715419, + optional=True, + ) + order_by: str = proto.Field( + proto.STRING, + number=160562920, + optional=True, + ) + page_token: str = proto.Field( + proto.STRING, + number=19994697, + optional=True, + ) + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + return_partial_success: bool = proto.Field( + proto.BOOL, + number=517198390, + optional=True, + ) + + +class AliasIpRange(proto.Message): + r"""An alias IP range attached to an instance's network + interface. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + ip_cidr_range (str): + The IP alias ranges to allocate for this + interface. This IP CIDR range must belong to the + specified subnetwork and cannot contain IP + addresses reserved by system or used by other + network interfaces. This range may be a single + IP address (such as 10.2.3.4), a netmask (such + as /24) or a CIDR-formatted string (such as + 10.1.2.0/24). + + This field is a member of `oneof`_ ``_ip_cidr_range``. + subnetwork_range_name (str): + The name of a subnetwork secondary IP range + from which to allocate an IP alias range. If not + specified, the primary range of the subnetwork + is used. + + This field is a member of `oneof`_ ``_subnetwork_range_name``. + """ + + ip_cidr_range: str = proto.Field( + proto.STRING, + number=98117322, + optional=True, + ) + subnetwork_range_name: str = proto.Field( + proto.STRING, + number=387995966, + optional=True, + ) + + +class AllocationResourceStatus(proto.Message): + r"""[Output Only] Contains output only fields. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + specific_sku_allocation (google.cloud.compute_v1.types.AllocationResourceStatusSpecificSKUAllocation): + Allocation Properties of this reservation. + + This field is a member of `oneof`_ ``_specific_sku_allocation``. + """ + + specific_sku_allocation: 'AllocationResourceStatusSpecificSKUAllocation' = proto.Field( + proto.MESSAGE, + number=196231151, + optional=True, + message='AllocationResourceStatusSpecificSKUAllocation', + ) + + +class AllocationResourceStatusSpecificSKUAllocation(proto.Message): + r"""Contains Properties set for the reservation. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + source_instance_template_id (str): + ID of the instance template used to populate + reservation properties. + + This field is a member of `oneof`_ ``_source_instance_template_id``. + """ + + source_instance_template_id: str = proto.Field( + proto.STRING, + number=111196154, + optional=True, + ) + + +class AllocationSpecificSKUAllocationAllocatedInstancePropertiesReservedDisk(proto.Message): + r""" + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + disk_size_gb (int): + Specifies the size of the disk in base-2 GB. + + This field is a member of `oneof`_ ``_disk_size_gb``. + interface (str): + Specifies the disk interface to use for + attaching this disk, which is either SCSI or + NVME. The default is SCSI. For performance + characteristics of SCSI over NVMe, see Local SSD + performance. Check the Interface enum for the + list of possible values. + + This field is a member of `oneof`_ ``_interface``. + """ + class Interface(proto.Enum): + r"""Specifies the disk interface to use for attaching this disk, + which is either SCSI or NVME. The default is SCSI. For + performance characteristics of SCSI over NVMe, see Local SSD + performance. + + Values: + UNDEFINED_INTERFACE (0): + A value indicating that the enum field is not + set. + NVME (2408800): + No description available. + SCSI (2539686): + No description available. + """ + UNDEFINED_INTERFACE = 0 + NVME = 2408800 + SCSI = 2539686 + + disk_size_gb: int = proto.Field( + proto.INT64, + number=316263735, + optional=True, + ) + interface: str = proto.Field( + proto.STRING, + number=502623545, + optional=True, + ) + + +class AllocationSpecificSKUAllocationReservedInstanceProperties(proto.Message): + r"""Properties of the SKU instances being reserved. Next ID: 9 + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + guest_accelerators (MutableSequence[google.cloud.compute_v1.types.AcceleratorConfig]): + Specifies accelerator type and count. + local_ssds (MutableSequence[google.cloud.compute_v1.types.AllocationSpecificSKUAllocationAllocatedInstancePropertiesReservedDisk]): + Specifies amount of local ssd to reserve with + each instance. The type of disk is local-ssd. + location_hint (str): + An opaque location hint used to place the + allocation close to other resources. This field + is for use by internal tools that use the public + API. + + This field is a member of `oneof`_ ``_location_hint``. + machine_type (str): + Specifies type of machine (name only) which has fixed number + of vCPUs and fixed amount of memory. This also includes + specifying custom machine type following + custom-NUMBER_OF_CPUS-AMOUNT_OF_MEMORY pattern. + + This field is a member of `oneof`_ ``_machine_type``. + min_cpu_platform (str): + Minimum cpu platform the reservation. + + This field is a member of `oneof`_ ``_min_cpu_platform``. + """ + + guest_accelerators: MutableSequence['AcceleratorConfig'] = proto.RepeatedField( + proto.MESSAGE, + number=463595119, + message='AcceleratorConfig', + ) + local_ssds: MutableSequence['AllocationSpecificSKUAllocationAllocatedInstancePropertiesReservedDisk'] = proto.RepeatedField( + proto.MESSAGE, + number=229951299, + message='AllocationSpecificSKUAllocationAllocatedInstancePropertiesReservedDisk', + ) + location_hint: str = proto.Field( + proto.STRING, + number=350519505, + optional=True, + ) + machine_type: str = proto.Field( + proto.STRING, + number=227711026, + optional=True, + ) + min_cpu_platform: str = proto.Field( + proto.STRING, + number=242912759, + optional=True, + ) + + +class AllocationSpecificSKUReservation(proto.Message): + r"""This reservation type allows to pre allocate specific + instance configuration. Next ID: 6 + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + assured_count (int): + [Output Only] Indicates how many instances are actually + usable currently. + + This field is a member of `oneof`_ ``_assured_count``. + count (int): + Specifies the number of resources that are + allocated. + + This field is a member of `oneof`_ ``_count``. + in_use_count (int): + [Output Only] Indicates how many instances are in use. + + This field is a member of `oneof`_ ``_in_use_count``. + instance_properties (google.cloud.compute_v1.types.AllocationSpecificSKUAllocationReservedInstanceProperties): + The instance properties for the reservation. + + This field is a member of `oneof`_ ``_instance_properties``. + source_instance_template (str): + Specifies the instance template to create the + reservation. If you use this field, you must + exclude the instanceProperties field. This field + is optional, and it can be a full or partial + URL. For example, the following are all valid + URLs to an instance template: - + https://www.googleapis.com/compute/v1/projects/project + /global/instanceTemplates/instanceTemplate - + projects/project/global/instanceTemplates/instanceTemplate + - global/instanceTemplates/instanceTemplate + + This field is a member of `oneof`_ ``_source_instance_template``. + """ + + assured_count: int = proto.Field( + proto.INT64, + number=281197645, + optional=True, + ) + count: int = proto.Field( + proto.INT64, + number=94851343, + optional=True, + ) + in_use_count: int = proto.Field( + proto.INT64, + number=493458877, + optional=True, + ) + instance_properties: 'AllocationSpecificSKUAllocationReservedInstanceProperties' = proto.Field( + proto.MESSAGE, + number=215355165, + optional=True, + message='AllocationSpecificSKUAllocationReservedInstanceProperties', + ) + source_instance_template: str = proto.Field( + proto.STRING, + number=332423616, + optional=True, + ) + + +class Allowed(proto.Message): + r""" + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + I_p_protocol (str): + The IP protocol to which this rule applies. + The protocol type is required when creating a + firewall rule. This value can either be one of + the following well known protocol strings (tcp, + udp, icmp, esp, ah, ipip, sctp) or the IP + protocol number. + + This field is a member of `oneof`_ ``_I_p_protocol``. + ports (MutableSequence[str]): + An optional list of ports to which this rule applies. This + field is only applicable for the UDP or TCP protocol. Each + entry must be either an integer or a range. If not + specified, this rule applies to connections through any + port. Example inputs include: ["22"], ["80","443"], and + ["12345-12349"]. + """ + + I_p_protocol: str = proto.Field( + proto.STRING, + number=488094525, + optional=True, + ) + ports: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=106854418, + ) + + +class ApplyUpdatesToInstancesInstanceGroupManagerRequest(proto.Message): + r"""A request message for + InstanceGroupManagers.ApplyUpdatesToInstances. See the method + description for details. + + Attributes: + instance_group_manager (str): + The name of the managed instance group, + should conform to RFC1035. + instance_group_managers_apply_updates_request_resource (google.cloud.compute_v1.types.InstanceGroupManagersApplyUpdatesRequest): + The body resource for this request + project (str): + Project ID for this request. + zone (str): + The name of the zone where the managed + instance group is located. Should conform to + RFC1035. + """ + + instance_group_manager: str = proto.Field( + proto.STRING, + number=249363395, + ) + instance_group_managers_apply_updates_request_resource: 'InstanceGroupManagersApplyUpdatesRequest' = proto.Field( + proto.MESSAGE, + number=259242835, + message='InstanceGroupManagersApplyUpdatesRequest', + ) + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + zone: str = proto.Field( + proto.STRING, + number=3744684, + ) + + +class ApplyUpdatesToInstancesRegionInstanceGroupManagerRequest(proto.Message): + r"""A request message for + RegionInstanceGroupManagers.ApplyUpdatesToInstances. See the + method description for details. + + Attributes: + instance_group_manager (str): + The name of the managed instance group, + should conform to RFC1035. + project (str): + Project ID for this request. + region (str): + Name of the region scoping this request, + should conform to RFC1035. + region_instance_group_managers_apply_updates_request_resource (google.cloud.compute_v1.types.RegionInstanceGroupManagersApplyUpdatesRequest): + The body resource for this request + """ + + instance_group_manager: str = proto.Field( + proto.STRING, + number=249363395, + ) + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + region: str = proto.Field( + proto.STRING, + number=138946292, + ) + region_instance_group_managers_apply_updates_request_resource: 'RegionInstanceGroupManagersApplyUpdatesRequest' = proto.Field( + proto.MESSAGE, + number=76248318, + message='RegionInstanceGroupManagersApplyUpdatesRequest', + ) + + +class AttachDiskInstanceRequest(proto.Message): + r"""A request message for Instances.AttachDisk. See the method + description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + attached_disk_resource (google.cloud.compute_v1.types.AttachedDisk): + The body resource for this request + force_attach (bool): + Whether to force attach the regional disk + even if it's currently attached to another + instance. If you try to force attach a zonal + disk to an instance, you will receive an error. + + This field is a member of `oneof`_ ``_force_attach``. + instance (str): + The instance name for this request. + project (str): + Project ID for this request. + request_id (str): + An optional request ID to identify requests. + Specify a unique request ID so that if you must + retry your request, the server will know to + ignore the request if it has already been + completed. For example, consider a situation + where you make an initial request and the + request times out. If you make the request again + with the same request ID, the server can check + if original operation with the same request ID + was received, and if so, will ignore the second + request. This prevents clients from accidentally + creating duplicate commitments. The request ID + must be a valid UUID with the exception that + zero UUID is not supported ( + 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. + zone (str): + The name of the zone for this request. + """ + + attached_disk_resource: 'AttachedDisk' = proto.Field( + proto.MESSAGE, + number=90605845, + message='AttachedDisk', + ) + force_attach: bool = proto.Field( + proto.BOOL, + number=142758425, + optional=True, + ) + instance: str = proto.Field( + proto.STRING, + number=18257045, + ) + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + request_id: str = proto.Field( + proto.STRING, + number=37109963, + optional=True, + ) + zone: str = proto.Field( + proto.STRING, + number=3744684, + ) + + +class AttachNetworkEndpointsGlobalNetworkEndpointGroupRequest(proto.Message): + r"""A request message for + GlobalNetworkEndpointGroups.AttachNetworkEndpoints. See the + method description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + global_network_endpoint_groups_attach_endpoints_request_resource (google.cloud.compute_v1.types.GlobalNetworkEndpointGroupsAttachEndpointsRequest): + The body resource for this request + network_endpoint_group (str): + The name of the network endpoint group where + you are attaching network endpoints to. It + should comply with RFC1035. + project (str): + Project ID for this request. + request_id (str): + An optional request ID to identify requests. + Specify a unique request ID so that if you must + retry your request, the server will know to + ignore the request if it has already been + completed. For example, consider a situation + where you make an initial request and the + request times out. If you make the request again + with the same request ID, the server can check + if original operation with the same request ID + was received, and if so, will ignore the second + request. This prevents clients from accidentally + creating duplicate commitments. The request ID + must be a valid UUID with the exception that + zero UUID is not supported ( + 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. + """ + + global_network_endpoint_groups_attach_endpoints_request_resource: 'GlobalNetworkEndpointGroupsAttachEndpointsRequest' = proto.Field( + proto.MESSAGE, + number=30691563, + message='GlobalNetworkEndpointGroupsAttachEndpointsRequest', + ) + network_endpoint_group: str = proto.Field( + proto.STRING, + number=433907078, + ) + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + request_id: str = proto.Field( + proto.STRING, + number=37109963, + optional=True, + ) + + +class AttachNetworkEndpointsNetworkEndpointGroupRequest(proto.Message): + r"""A request message for + NetworkEndpointGroups.AttachNetworkEndpoints. See the method + description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + network_endpoint_group (str): + The name of the network endpoint group where + you are attaching network endpoints to. It + should comply with RFC1035. + network_endpoint_groups_attach_endpoints_request_resource (google.cloud.compute_v1.types.NetworkEndpointGroupsAttachEndpointsRequest): + The body resource for this request + project (str): + Project ID for this request. + request_id (str): + An optional request ID to identify requests. + Specify a unique request ID so that if you must + retry your request, the server will know to + ignore the request if it has already been + completed. For example, consider a situation + where you make an initial request and the + request times out. If you make the request again + with the same request ID, the server can check + if original operation with the same request ID + was received, and if so, will ignore the second + request. This prevents clients from accidentally + creating duplicate commitments. The request ID + must be a valid UUID with the exception that + zero UUID is not supported ( + 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. + zone (str): + The name of the zone where the network + endpoint group is located. It should comply with + RFC1035. + """ + + network_endpoint_group: str = proto.Field( + proto.STRING, + number=433907078, + ) + network_endpoint_groups_attach_endpoints_request_resource: 'NetworkEndpointGroupsAttachEndpointsRequest' = proto.Field( + proto.MESSAGE, + number=531079, + message='NetworkEndpointGroupsAttachEndpointsRequest', + ) + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + request_id: str = proto.Field( + proto.STRING, + number=37109963, + optional=True, + ) + zone: str = proto.Field( + proto.STRING, + number=3744684, + ) + + +class AttachedDisk(proto.Message): + r"""An instance-attached disk resource. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + architecture (str): + [Output Only] The architecture of the attached disk. Valid + values are ARM64 or X86_64. Check the Architecture enum for + the list of possible values. + + This field is a member of `oneof`_ ``_architecture``. + auto_delete (bool): + Specifies whether the disk will be + auto-deleted when the instance is deleted (but + not when the disk is detached from the + instance). + + This field is a member of `oneof`_ ``_auto_delete``. + boot (bool): + Indicates that this is a boot disk. The + virtual machine will use the first partition of + the disk for its root filesystem. + + This field is a member of `oneof`_ ``_boot``. + device_name (str): + Specifies a unique device name of your choice that is + reflected into the /dev/disk/by-id/google-\* tree of a Linux + operating system running within the instance. This name can + be used to reference the device for mounting, resizing, and + so on, from within the instance. If not specified, the + server chooses a default device name to apply to this disk, + in the form persistent-disk-x, where x is a number assigned + by Google Compute Engine. This field is only applicable for + persistent disks. + + This field is a member of `oneof`_ ``_device_name``. + disk_encryption_key (google.cloud.compute_v1.types.CustomerEncryptionKey): + Encrypts or decrypts a disk using a + customer-supplied encryption key. If you are + creating a new disk, this field encrypts the new + disk using an encryption key that you provide. + If you are attaching an existing disk that is + already encrypted, this field decrypts the disk + using the customer-supplied encryption key. If + you encrypt a disk using a customer-supplied + key, you must provide the same key again when + you attempt to use this resource at a later + time. For example, you must provide the key when + you create a snapshot or an image from the disk + or when you attach the disk to a virtual machine + instance. If you do not provide an encryption + key, then the disk will be encrypted using an + automatically generated key and you do not need + to provide a key to use the disk later. Instance + templates do not store customer-supplied + encryption keys, so you cannot use your own keys + to encrypt disks in a managed instance group. + + This field is a member of `oneof`_ ``_disk_encryption_key``. + disk_size_gb (int): + The size of the disk in GB. + + This field is a member of `oneof`_ ``_disk_size_gb``. + force_attach (bool): + [Input Only] Whether to force attach the regional disk even + if it's currently attached to another instance. If you try + to force attach a zonal disk to an instance, you will + receive an error. + + This field is a member of `oneof`_ ``_force_attach``. + guest_os_features (MutableSequence[google.cloud.compute_v1.types.GuestOsFeature]): + A list of features to enable on the guest + operating system. Applicable only for bootable + images. Read Enabling guest operating system + features to see a list of available options. + index (int): + [Output Only] A zero-based index to this disk, where 0 is + reserved for the boot disk. If you have many disks attached + to an instance, each disk would have a unique index number. + + This field is a member of `oneof`_ ``_index``. + initialize_params (google.cloud.compute_v1.types.AttachedDiskInitializeParams): + [Input Only] Specifies the parameters for a new disk that + will be created alongside the new instance. Use + initialization parameters to create boot disks or local SSDs + attached to the new instance. This property is mutually + exclusive with the source property; you can only define one + or the other, but not both. + + This field is a member of `oneof`_ ``_initialize_params``. + interface (str): + Specifies the disk interface to use for + attaching this disk, which is either SCSI or + NVME. For most machine types, the default is + SCSI. Local SSDs can use either NVME or SCSI. In + certain configurations, persistent disks can use + NVMe. For more information, see About persistent + disks. Check the Interface enum for the list of + possible values. + + This field is a member of `oneof`_ ``_interface``. + kind (str): + [Output Only] Type of the resource. Always + compute#attachedDisk for attached disks. + + This field is a member of `oneof`_ ``_kind``. + licenses (MutableSequence[str]): + [Output Only] Any valid publicly visible licenses. + mode (str): + The mode in which to attach this disk, either READ_WRITE or + READ_ONLY. If not specified, the default is to attach the + disk in READ_WRITE mode. Check the Mode enum for the list of + possible values. + + This field is a member of `oneof`_ ``_mode``. + saved_state (str): + For LocalSSD disks on VM Instances in STOPPED or SUSPENDED + state, this field is set to PRESERVED if the LocalSSD data + has been saved to a persistent location by customer request. + (see the discard_local_ssd option on Stop/Suspend). + Read-only in the api. Check the SavedState enum for the list + of possible values. + + This field is a member of `oneof`_ ``_saved_state``. + shielded_instance_initial_state (google.cloud.compute_v1.types.InitialStateConfig): + [Output Only] shielded vm initial state stored on disk + + This field is a member of `oneof`_ ``_shielded_instance_initial_state``. + source (str): + Specifies a valid partial or full URL to an + existing Persistent Disk resource. When creating + a new instance, one of + initializeParams.sourceImage or + initializeParams.sourceSnapshot or disks.source + is required except for local SSD. If desired, + you can also attach existing non-root persistent + disks using this property. This field is only + applicable for persistent disks. Note that for + InstanceTemplate, specify the disk name for + zonal disk, and the URL for regional disk. + + This field is a member of `oneof`_ ``_source``. + type_ (str): + Specifies the type of the disk, either + SCRATCH or PERSISTENT. If not specified, the + default is PERSISTENT. Check the Type enum for + the list of possible values. + + This field is a member of `oneof`_ ``_type``. + """ + class Architecture(proto.Enum): + r"""[Output Only] The architecture of the attached disk. Valid values + are ARM64 or X86_64. + + Values: + UNDEFINED_ARCHITECTURE (0): + A value indicating that the enum field is not + set. + ARCHITECTURE_UNSPECIFIED (394750507): + Default value indicating Architecture is not + set. + ARM64 (62547450): + Machines with architecture ARM64 + X86_64 (425300551): + Machines with architecture X86_64 + """ + UNDEFINED_ARCHITECTURE = 0 + ARCHITECTURE_UNSPECIFIED = 394750507 + ARM64 = 62547450 + X86_64 = 425300551 + + class Interface(proto.Enum): + r"""Specifies the disk interface to use for attaching this disk, + which is either SCSI or NVME. For most machine types, the + default is SCSI. Local SSDs can use either NVME or SCSI. In + certain configurations, persistent disks can use NVMe. For more + information, see About persistent disks. + + Values: + UNDEFINED_INTERFACE (0): + A value indicating that the enum field is not + set. + NVME (2408800): + No description available. + SCSI (2539686): + No description available. + """ + UNDEFINED_INTERFACE = 0 + NVME = 2408800 + SCSI = 2539686 + + class Mode(proto.Enum): + r"""The mode in which to attach this disk, either READ_WRITE or + READ_ONLY. If not specified, the default is to attach the disk in + READ_WRITE mode. + + Values: + UNDEFINED_MODE (0): + A value indicating that the enum field is not + set. + READ_ONLY (91950261): + Attaches this disk in read-only mode. + Multiple virtual machines can use a disk in + read-only mode at a time. + READ_WRITE (173607894): + *[Default]* Attaches this disk in read-write mode. Only one + virtual machine at a time can be attached to a disk in + read-write mode. + """ + UNDEFINED_MODE = 0 + READ_ONLY = 91950261 + READ_WRITE = 173607894 + + class SavedState(proto.Enum): + r"""For LocalSSD disks on VM Instances in STOPPED or SUSPENDED state, + this field is set to PRESERVED if the LocalSSD data has been saved + to a persistent location by customer request. (see the + discard_local_ssd option on Stop/Suspend). Read-only in the api. + + Values: + UNDEFINED_SAVED_STATE (0): + A value indicating that the enum field is not + set. + DISK_SAVED_STATE_UNSPECIFIED (391290831): + *[Default]* Disk state has not been preserved. + PRESERVED (254159736): + Disk state has been preserved. + """ + UNDEFINED_SAVED_STATE = 0 + DISK_SAVED_STATE_UNSPECIFIED = 391290831 + PRESERVED = 254159736 + + class Type(proto.Enum): + r"""Specifies the type of the disk, either SCRATCH or PERSISTENT. + If not specified, the default is PERSISTENT. + + Values: + UNDEFINED_TYPE (0): + A value indicating that the enum field is not + set. + PERSISTENT (460683927): + No description available. + SCRATCH (496778970): + No description available. + """ + UNDEFINED_TYPE = 0 + PERSISTENT = 460683927 + SCRATCH = 496778970 + + architecture: str = proto.Field( + proto.STRING, + number=302803283, + optional=True, + ) + auto_delete: bool = proto.Field( + proto.BOOL, + number=464761403, + optional=True, + ) + boot: bool = proto.Field( + proto.BOOL, + number=3029746, + optional=True, + ) + device_name: str = proto.Field( + proto.STRING, + number=67541716, + optional=True, + ) + disk_encryption_key: 'CustomerEncryptionKey' = proto.Field( + proto.MESSAGE, + number=271660677, + optional=True, + message='CustomerEncryptionKey', + ) + disk_size_gb: int = proto.Field( + proto.INT64, + number=316263735, + optional=True, + ) + force_attach: bool = proto.Field( + proto.BOOL, + number=142758425, + optional=True, + ) + guest_os_features: MutableSequence['GuestOsFeature'] = proto.RepeatedField( + proto.MESSAGE, + number=79294545, + message='GuestOsFeature', + ) + index: int = proto.Field( + proto.INT32, + number=100346066, + optional=True, + ) + initialize_params: 'AttachedDiskInitializeParams' = proto.Field( + proto.MESSAGE, + number=17697045, + optional=True, + message='AttachedDiskInitializeParams', + ) + interface: str = proto.Field( + proto.STRING, + number=502623545, + optional=True, + ) + kind: str = proto.Field( + proto.STRING, + number=3292052, + optional=True, + ) + licenses: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=337642578, + ) + mode: str = proto.Field( + proto.STRING, + number=3357091, + optional=True, + ) + saved_state: str = proto.Field( + proto.STRING, + number=411587801, + optional=True, + ) + shielded_instance_initial_state: 'InitialStateConfig' = proto.Field( + proto.MESSAGE, + number=192356867, + optional=True, + message='InitialStateConfig', + ) + source: str = proto.Field( + proto.STRING, + number=177235995, + optional=True, + ) + type_: str = proto.Field( + proto.STRING, + number=3575610, + optional=True, + ) + + +class AttachedDiskInitializeParams(proto.Message): + r"""[Input Only] Specifies the parameters for a new disk that will be + created alongside the new instance. Use initialization parameters to + create boot disks or local SSDs attached to the new instance. This + field is persisted and returned for instanceTemplate and not + returned in the context of instance. This property is mutually + exclusive with the source property; you can only define one or the + other, but not both. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + architecture (str): + The architecture of the attached disk. Valid values are + arm64 or x86_64. Check the Architecture enum for the list of + possible values. + + This field is a member of `oneof`_ ``_architecture``. + description (str): + An optional description. Provide this + property when creating the disk. + + This field is a member of `oneof`_ ``_description``. + disk_name (str): + Specifies the disk name. If not specified, + the default is to use the name of the instance. + If a disk with the same name already exists in + the given region, the existing disk is attached + to the new instance and the new disk is not + created. + + This field is a member of `oneof`_ ``_disk_name``. + disk_size_gb (int): + Specifies the size of the disk in base-2 GB. + The size must be at least 10 GB. If you specify + a sourceImage, which is required for boot disks, + the default size is the size of the sourceImage. + If you do not specify a sourceImage, the default + disk size is 500 GB. + + This field is a member of `oneof`_ ``_disk_size_gb``. + disk_type (str): + Specifies the disk type to use to create the + instance. If not specified, the default is + pd-standard, specified using the full URL. For + example: + https://www.googleapis.com/compute/v1/projects/project/zones/zone + /diskTypes/pd-standard For a full list of + acceptable values, see Persistent disk types. If + you specify this field when creating a VM, you + can provide either the full or partial URL. For + example, the following values are valid: - + https://www.googleapis.com/compute/v1/projects/project/zones/zone + /diskTypes/diskType - + projects/project/zones/zone/diskTypes/diskType - + zones/zone/diskTypes/diskType If you specify + this field when creating or updating an instance + template or all-instances configuration, specify + the type of the disk, not the URL. For example: + pd-standard. + + This field is a member of `oneof`_ ``_disk_type``. + labels (MutableMapping[str, str]): + Labels to apply to this disk. These can be + later modified by the disks.setLabels method. + This field is only applicable for persistent + disks. + licenses (MutableSequence[str]): + A list of publicly visible licenses. Reserved + for Google's use. + on_update_action (str): + Specifies which action to take on instance + update with this disk. Default is to use the + existing disk. Check the OnUpdateAction enum for + the list of possible values. + + This field is a member of `oneof`_ ``_on_update_action``. + provisioned_iops (int): + Indicates how many IOPS to provision for the + disk. This sets the number of I/O operations per + second that the disk can handle. Values must be + between 10,000 and 120,000. For more details, + see the Extreme persistent disk documentation. + + This field is a member of `oneof`_ ``_provisioned_iops``. + provisioned_throughput (int): + Indicates how much throughput to provision + for the disk. This sets the number of throughput + mb per second that the disk can handle. Values + must be between 1 and 7,124. + + This field is a member of `oneof`_ ``_provisioned_throughput``. + replica_zones (MutableSequence[str]): + Required for each regional disk associated + with the instance. Specify the URLs of the zones + where the disk should be replicated to. You must + provide exactly two replica zones, and one zone + must be the same as the instance zone. You can't + use this option with boot disks. + resource_manager_tags (MutableMapping[str, str]): + Resource manager tags to be bound to the disk. Tag keys and + values have the same definition as resource manager tags. + Keys must be in the format ``tagKeys/{tag_key_id}``, and + values are in the format ``tagValues/456``. The field is + ignored (both PUT & PATCH) when empty. + resource_policies (MutableSequence[str]): + Resource policies applied to this disk for + automatic snapshot creations. Specified using + the full or partial URL. For instance template, + specify only the resource policy name. + source_image (str): + The source image to create this disk. When + creating a new instance, one of + initializeParams.sourceImage or + initializeParams.sourceSnapshot or disks.source + is required except for local SSD. To create a + disk with one of the public operating system + images, specify the image by its family name. + For example, specify family/debian-9 to use the + latest Debian 9 image: + projects/debian-cloud/global/images/family/debian-9 + Alternatively, use a specific version of a + public operating system image: + projects/debian-cloud/global/images/debian-9-stretch-vYYYYMMDD + To create a disk with a custom image that you + created, specify the image name in the following + format: global/images/my-custom-image You can + also specify a custom image by its image family, + which returns the latest version of the image in + that family. Replace the image name with + family/family-name: + global/images/family/my-image-family If the + source image is deleted later, this field will + not be set. + + This field is a member of `oneof`_ ``_source_image``. + source_image_encryption_key (google.cloud.compute_v1.types.CustomerEncryptionKey): + The customer-supplied encryption key of the + source image. Required if the source image is + protected by a customer-supplied encryption key. + InstanceTemplate and InstancePropertiesPatch do + not store customer-supplied encryption keys, so + you cannot create disks for instances in a + managed instance group if the source images are + encrypted with your own keys. + + This field is a member of `oneof`_ ``_source_image_encryption_key``. + source_snapshot (str): + The source snapshot to create this disk. When + creating a new instance, one of + initializeParams.sourceSnapshot or + initializeParams.sourceImage or disks.source is + required except for local SSD. To create a disk + with a snapshot that you created, specify the + snapshot name in the following format: + global/snapshots/my-backup If the source + snapshot is deleted later, this field will not + be set. + + This field is a member of `oneof`_ ``_source_snapshot``. + source_snapshot_encryption_key (google.cloud.compute_v1.types.CustomerEncryptionKey): + The customer-supplied encryption key of the + source snapshot. + + This field is a member of `oneof`_ ``_source_snapshot_encryption_key``. + """ + class Architecture(proto.Enum): + r"""The architecture of the attached disk. Valid values are arm64 or + x86_64. + + Values: + UNDEFINED_ARCHITECTURE (0): + A value indicating that the enum field is not + set. + ARCHITECTURE_UNSPECIFIED (394750507): + Default value indicating Architecture is not + set. + ARM64 (62547450): + Machines with architecture ARM64 + X86_64 (425300551): + Machines with architecture X86_64 + """ + UNDEFINED_ARCHITECTURE = 0 + ARCHITECTURE_UNSPECIFIED = 394750507 + ARM64 = 62547450 + X86_64 = 425300551 + + class OnUpdateAction(proto.Enum): + r"""Specifies which action to take on instance update with this + disk. Default is to use the existing disk. + + Values: + UNDEFINED_ON_UPDATE_ACTION (0): + A value indicating that the enum field is not + set. + RECREATE_DISK (494767853): + Always recreate the disk. + RECREATE_DISK_IF_SOURCE_CHANGED (398099712): + Recreate the disk if source (image, snapshot) + of this disk is different from source of + existing disk. + USE_EXISTING_DISK (232682233): + Use the existing disk, this is the default + behaviour. + """ + UNDEFINED_ON_UPDATE_ACTION = 0 + RECREATE_DISK = 494767853 + RECREATE_DISK_IF_SOURCE_CHANGED = 398099712 + USE_EXISTING_DISK = 232682233 + + architecture: str = proto.Field( + proto.STRING, + number=302803283, + optional=True, + ) + description: str = proto.Field( + proto.STRING, + number=422937596, + optional=True, + ) + disk_name: str = proto.Field( + proto.STRING, + number=92807149, + optional=True, + ) + disk_size_gb: int = proto.Field( + proto.INT64, + number=316263735, + optional=True, + ) + disk_type: str = proto.Field( + proto.STRING, + number=93009052, + optional=True, + ) + labels: MutableMapping[str, str] = proto.MapField( + proto.STRING, + proto.STRING, + number=500195327, + ) + licenses: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=337642578, + ) + on_update_action: str = proto.Field( + proto.STRING, + number=202451980, + optional=True, + ) + provisioned_iops: int = proto.Field( + proto.INT64, + number=186769108, + optional=True, + ) + provisioned_throughput: int = proto.Field( + proto.INT64, + number=526524181, + optional=True, + ) + replica_zones: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=48438272, + ) + resource_manager_tags: MutableMapping[str, str] = proto.MapField( + proto.STRING, + proto.STRING, + number=377671164, + ) + resource_policies: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=22220385, + ) + source_image: str = proto.Field( + proto.STRING, + number=50443319, + optional=True, + ) + source_image_encryption_key: 'CustomerEncryptionKey' = proto.Field( + proto.MESSAGE, + number=381503659, + optional=True, + message='CustomerEncryptionKey', + ) + source_snapshot: str = proto.Field( + proto.STRING, + number=126061928, + optional=True, + ) + source_snapshot_encryption_key: 'CustomerEncryptionKey' = proto.Field( + proto.MESSAGE, + number=303679322, + optional=True, + message='CustomerEncryptionKey', + ) + + +class AuditConfig(proto.Message): + r"""Specifies the audit configuration for a service. The configuration + determines which permission types are logged, and what identities, + if any, are exempted from logging. An AuditConfig must have one or + more AuditLogConfigs. If there are AuditConfigs for both + ``allServices`` and a specific service, the union of the two + AuditConfigs is used for that service: the log_types specified in + each AuditConfig are enabled, and the exempted_members in each + AuditLogConfig are exempted. Example Policy with multiple + AuditConfigs: { "audit_configs": [ { "service": "allServices", + "audit_log_configs": [ { "log_type": "DATA_READ", + "exempted_members": [ "user:jose@example.com" ] }, { "log_type": + "DATA_WRITE" }, { "log_type": "ADMIN_READ" } ] }, { "service": + "sampleservice.googleapis.com", "audit_log_configs": [ { "log_type": + "DATA_READ" }, { "log_type": "DATA_WRITE", "exempted_members": [ + "user:aliya@example.com" ] } ] } ] } For sampleservice, this policy + enables DATA_READ, DATA_WRITE and ADMIN_READ logging. It also + exempts jose@example.com from DATA_READ logging, and + aliya@example.com from DATA_WRITE logging. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + audit_log_configs (MutableSequence[google.cloud.compute_v1.types.AuditLogConfig]): + The configuration for logging of each type of + permission. + exempted_members (MutableSequence[str]): + This is deprecated and has no effect. Do not + use. + service (str): + Specifies a service that will be enabled for audit logging. + For example, ``storage.googleapis.com``, + ``cloudsql.googleapis.com``. ``allServices`` is a special + value that covers all services. + + This field is a member of `oneof`_ ``_service``. + """ + + audit_log_configs: MutableSequence['AuditLogConfig'] = proto.RepeatedField( + proto.MESSAGE, + number=488420626, + message='AuditLogConfig', + ) + exempted_members: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=232615576, + ) + service: str = proto.Field( + proto.STRING, + number=373540533, + optional=True, + ) + + +class AuditLogConfig(proto.Message): + r"""Provides the configuration for logging a type of permissions. + Example: { "audit_log_configs": [ { "log_type": "DATA_READ", + "exempted_members": [ "user:jose@example.com" ] }, { "log_type": + "DATA_WRITE" } ] } This enables 'DATA_READ' and 'DATA_WRITE' + logging, while exempting jose@example.com from DATA_READ logging. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + exempted_members (MutableSequence[str]): + Specifies the identities that do not cause + logging for this type of permission. Follows the + same format of Binding.members. + ignore_child_exemptions (bool): + This is deprecated and has no effect. Do not + use. + + This field is a member of `oneof`_ ``_ignore_child_exemptions``. + log_type (str): + The log type that this config enables. + Check the LogType enum for the list of possible + values. + + This field is a member of `oneof`_ ``_log_type``. + """ + class LogType(proto.Enum): + r"""The log type that this config enables. + + Values: + UNDEFINED_LOG_TYPE (0): + A value indicating that the enum field is not + set. + ADMIN_READ (128951462): + Admin reads. Example: CloudIAM getIamPolicy + DATA_READ (305224971): + Data reads. Example: CloudSQL Users list + DATA_WRITE (340181738): + Data writes. Example: CloudSQL Users create + LOG_TYPE_UNSPECIFIED (154527053): + Default case. Should never be this. + """ + UNDEFINED_LOG_TYPE = 0 + ADMIN_READ = 128951462 + DATA_READ = 305224971 + DATA_WRITE = 340181738 + LOG_TYPE_UNSPECIFIED = 154527053 + + exempted_members: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=232615576, + ) + ignore_child_exemptions: bool = proto.Field( + proto.BOOL, + number=70141850, + optional=True, + ) + log_type: str = proto.Field( + proto.STRING, + number=403115861, + optional=True, + ) + + +class AuthorizationLoggingOptions(proto.Message): + r"""This is deprecated and has no effect. Do not use. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + permission_type (str): + This is deprecated and has no effect. Do not + use. Check the PermissionType enum for the list + of possible values. + + This field is a member of `oneof`_ ``_permission_type``. + """ + class PermissionType(proto.Enum): + r"""This is deprecated and has no effect. Do not use. + + Values: + UNDEFINED_PERMISSION_TYPE (0): + A value indicating that the enum field is not + set. + ADMIN_READ (128951462): + This is deprecated and has no effect. Do not + use. + ADMIN_WRITE (244412079): + This is deprecated and has no effect. Do not + use. + DATA_READ (305224971): + This is deprecated and has no effect. Do not + use. + DATA_WRITE (340181738): + This is deprecated and has no effect. Do not + use. + PERMISSION_TYPE_UNSPECIFIED (440313346): + This is deprecated and has no effect. Do not + use. + """ + UNDEFINED_PERMISSION_TYPE = 0 + ADMIN_READ = 128951462 + ADMIN_WRITE = 244412079 + DATA_READ = 305224971 + DATA_WRITE = 340181738 + PERMISSION_TYPE_UNSPECIFIED = 440313346 + + permission_type: str = proto.Field( + proto.STRING, + number=525978538, + optional=True, + ) + + +class Autoscaler(proto.Message): + r"""Represents an Autoscaler resource. Google Compute Engine has two + Autoscaler resources: \* + `Zonal `__ \* + `Regional `__ Use + autoscalers to automatically add or delete instances from a managed + instance group according to your defined autoscaling policy. For + more information, read Autoscaling Groups of Instances. For zonal + managed instance groups resource, use the autoscaler resource. For + regional managed instance groups, use the regionAutoscalers + resource. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + autoscaling_policy (google.cloud.compute_v1.types.AutoscalingPolicy): + The configuration parameters for the + autoscaling algorithm. You can define one or + more signals for an autoscaler: cpuUtilization, + customMetricUtilizations, and + loadBalancingUtilization. If none of these are + specified, the default will be to autoscale + based on cpuUtilization to 0.6 or 60%. + + This field is a member of `oneof`_ ``_autoscaling_policy``. + creation_timestamp (str): + [Output Only] Creation timestamp in RFC3339 text format. + + This field is a member of `oneof`_ ``_creation_timestamp``. + description (str): + An optional description of this resource. + Provide this property when you create the + resource. + + This field is a member of `oneof`_ ``_description``. + id (int): + [Output Only] The unique identifier for the resource. This + identifier is defined by the server. + + This field is a member of `oneof`_ ``_id``. + kind (str): + [Output Only] Type of the resource. Always + compute#autoscaler for autoscalers. + + This field is a member of `oneof`_ ``_kind``. + name (str): + Name of the resource. Provided by the client when the + resource is created. The name must be 1-63 characters long, + and comply with RFC1035. Specifically, the name must be 1-63 + characters long and match the regular expression + ``[a-z]([-a-z0-9]*[a-z0-9])?`` which means the first + character must be a lowercase letter, and all following + characters must be a dash, lowercase letter, or digit, + except the last character, which cannot be a dash. + + This field is a member of `oneof`_ ``_name``. + recommended_size (int): + [Output Only] Target recommended MIG size (number of + instances) computed by autoscaler. Autoscaler calculates the + recommended MIG size even when the autoscaling policy mode + is different from ON. This field is empty when autoscaler is + not connected to an existing managed instance group or + autoscaler did not generate its prediction. + + This field is a member of `oneof`_ ``_recommended_size``. + region (str): + [Output Only] URL of the region where the instance group + resides (for autoscalers living in regional scope). + + This field is a member of `oneof`_ ``_region``. + scaling_schedule_status (MutableMapping[str, google.cloud.compute_v1.types.ScalingScheduleStatus]): + [Output Only] Status information of existing scaling + schedules. + self_link (str): + [Output Only] Server-defined URL for the resource. + + This field is a member of `oneof`_ ``_self_link``. + status (str): + [Output Only] The status of the autoscaler configuration. + Current set of possible values: - PENDING: Autoscaler + backend hasn't read new/updated configuration. - DELETING: + Configuration is being deleted. - ACTIVE: Configuration is + acknowledged to be effective. Some warnings might be present + in the statusDetails field. - ERROR: Configuration has + errors. Actionable for users. Details are present in the + statusDetails field. New values might be added in the + future. Check the Status enum for the list of possible + values. + + This field is a member of `oneof`_ ``_status``. + status_details (MutableSequence[google.cloud.compute_v1.types.AutoscalerStatusDetails]): + [Output Only] Human-readable details about the current state + of the autoscaler. Read the documentation for Commonly + returned status messages for examples of status messages you + might encounter. + target (str): + URL of the managed instance group that this + autoscaler will scale. This field is required + when creating an autoscaler. + + This field is a member of `oneof`_ ``_target``. + zone (str): + [Output Only] URL of the zone where the instance group + resides (for autoscalers living in zonal scope). + + This field is a member of `oneof`_ ``_zone``. + """ + class Status(proto.Enum): + r"""[Output Only] The status of the autoscaler configuration. Current + set of possible values: - PENDING: Autoscaler backend hasn't read + new/updated configuration. - DELETING: Configuration is being + deleted. - ACTIVE: Configuration is acknowledged to be effective. + Some warnings might be present in the statusDetails field. - ERROR: + Configuration has errors. Actionable for users. Details are present + in the statusDetails field. New values might be added in the future. + + Values: + UNDEFINED_STATUS (0): + A value indicating that the enum field is not + set. + ACTIVE (314733318): + Configuration is acknowledged to be effective + DELETING (528602024): + Configuration is being deleted + ERROR (66247144): + Configuration has errors. Actionable for + users. + PENDING (35394935): + Autoscaler backend hasn't read new/updated + configuration + """ + UNDEFINED_STATUS = 0 + ACTIVE = 314733318 + DELETING = 528602024 + ERROR = 66247144 + PENDING = 35394935 + + autoscaling_policy: 'AutoscalingPolicy' = proto.Field( + proto.MESSAGE, + number=221950041, + optional=True, + message='AutoscalingPolicy', + ) + creation_timestamp: str = proto.Field( + proto.STRING, + number=30525366, + optional=True, + ) + description: str = proto.Field( + proto.STRING, + number=422937596, + optional=True, + ) + id: int = proto.Field( + proto.UINT64, + number=3355, + optional=True, + ) + kind: str = proto.Field( + proto.STRING, + number=3292052, + optional=True, + ) + name: str = proto.Field( + proto.STRING, + number=3373707, + optional=True, + ) + recommended_size: int = proto.Field( + proto.INT32, + number=257915749, + optional=True, + ) + region: str = proto.Field( + proto.STRING, + number=138946292, + optional=True, + ) + scaling_schedule_status: MutableMapping[str, 'ScalingScheduleStatus'] = proto.MapField( + proto.STRING, + proto.MESSAGE, + number=465950178, + message='ScalingScheduleStatus', + ) + self_link: str = proto.Field( + proto.STRING, + number=456214797, + optional=True, + ) + status: str = proto.Field( + proto.STRING, + number=181260274, + optional=True, + ) + status_details: MutableSequence['AutoscalerStatusDetails'] = proto.RepeatedField( + proto.MESSAGE, + number=363353845, + message='AutoscalerStatusDetails', + ) + target: str = proto.Field( + proto.STRING, + number=192835985, + optional=True, + ) + zone: str = proto.Field( + proto.STRING, + number=3744684, + optional=True, + ) + + +class AutoscalerAggregatedList(proto.Message): + r""" + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + id (str): + [Output Only] Unique identifier for the resource; defined by + the server. + + This field is a member of `oneof`_ ``_id``. + items (MutableMapping[str, google.cloud.compute_v1.types.AutoscalersScopedList]): + A list of AutoscalersScopedList resources. + kind (str): + [Output Only] Type of resource. Always + compute#autoscalerAggregatedList for aggregated lists of + autoscalers. + + This field is a member of `oneof`_ ``_kind``. + next_page_token (str): + [Output Only] This token allows you to get the next page of + results for list requests. If the number of results is + larger than maxResults, use the nextPageToken as a value for + the query parameter pageToken in the next list request. + Subsequent list requests will have their own nextPageToken + to continue paging through the results. + + This field is a member of `oneof`_ ``_next_page_token``. + self_link (str): + [Output Only] Server-defined URL for this resource. + + This field is a member of `oneof`_ ``_self_link``. + unreachables (MutableSequence[str]): + [Output Only] Unreachable resources. end_interface: + MixerListResponseWithEtagBuilder + warning (google.cloud.compute_v1.types.Warning): + [Output Only] Informational warning message. + + This field is a member of `oneof`_ ``_warning``. + """ + + @property + def raw_page(self): + return self + + id: str = proto.Field( + proto.STRING, + number=3355, + optional=True, + ) + items: MutableMapping[str, 'AutoscalersScopedList'] = proto.MapField( + proto.STRING, + proto.MESSAGE, + number=100526016, + message='AutoscalersScopedList', + ) + kind: str = proto.Field( + proto.STRING, + number=3292052, + optional=True, + ) + next_page_token: str = proto.Field( + proto.STRING, + number=79797525, + optional=True, + ) + self_link: str = proto.Field( + proto.STRING, + number=456214797, + optional=True, + ) + unreachables: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=243372063, + ) + warning: 'Warning' = proto.Field( + proto.MESSAGE, + number=50704284, + optional=True, + message='Warning', + ) + + +class AutoscalerList(proto.Message): + r"""Contains a list of Autoscaler resources. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + id (str): + [Output Only] Unique identifier for the resource; defined by + the server. + + This field is a member of `oneof`_ ``_id``. + items (MutableSequence[google.cloud.compute_v1.types.Autoscaler]): + A list of Autoscaler resources. + kind (str): + [Output Only] Type of resource. Always + compute#autoscalerList for lists of autoscalers. + + This field is a member of `oneof`_ ``_kind``. + next_page_token (str): + [Output Only] This token allows you to get the next page of + results for list requests. If the number of results is + larger than maxResults, use the nextPageToken as a value for + the query parameter pageToken in the next list request. + Subsequent list requests will have their own nextPageToken + to continue paging through the results. + + This field is a member of `oneof`_ ``_next_page_token``. + self_link (str): + [Output Only] Server-defined URL for this resource. + + This field is a member of `oneof`_ ``_self_link``. + warning (google.cloud.compute_v1.types.Warning): + [Output Only] Informational warning message. + + This field is a member of `oneof`_ ``_warning``. + """ + + @property + def raw_page(self): + return self + + id: str = proto.Field( + proto.STRING, + number=3355, + optional=True, + ) + items: MutableSequence['Autoscaler'] = proto.RepeatedField( + proto.MESSAGE, + number=100526016, + message='Autoscaler', + ) + kind: str = proto.Field( + proto.STRING, + number=3292052, + optional=True, + ) + next_page_token: str = proto.Field( + proto.STRING, + number=79797525, + optional=True, + ) + self_link: str = proto.Field( + proto.STRING, + number=456214797, + optional=True, + ) + warning: 'Warning' = proto.Field( + proto.MESSAGE, + number=50704284, + optional=True, + message='Warning', + ) + + +class AutoscalerStatusDetails(proto.Message): + r""" + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + message (str): + The status message. + + This field is a member of `oneof`_ ``_message``. + type_ (str): + The type of error, warning, or notice returned. Current set + of possible values: - ALL_INSTANCES_UNHEALTHY (WARNING): All + instances in the instance group are unhealthy (not in + RUNNING state). - BACKEND_SERVICE_DOES_NOT_EXIST (ERROR): + There is no backend service attached to the instance group. + - CAPPED_AT_MAX_NUM_REPLICAS (WARNING): Autoscaler + recommends a size greater than maxNumReplicas. - + CUSTOM_METRIC_DATA_POINTS_TOO_SPARSE (WARNING): The custom + metric samples are not exported often enough to be a + credible base for autoscaling. - CUSTOM_METRIC_INVALID + (ERROR): The custom metric that was specified does not exist + or does not have the necessary labels. - MIN_EQUALS_MAX + (WARNING): The minNumReplicas is equal to maxNumReplicas. + This means the autoscaler cannot add or remove instances + from the instance group. - MISSING_CUSTOM_METRIC_DATA_POINTS + (WARNING): The autoscaler did not receive any data from the + custom metric configured for autoscaling. - + MISSING_LOAD_BALANCING_DATA_POINTS (WARNING): The autoscaler + is configured to scale based on a load balancing signal but + the instance group has not received any requests from the + load balancer. - MODE_OFF (WARNING): Autoscaling is turned + off. The number of instances in the group won't change + automatically. The autoscaling configuration is preserved. - + MODE_ONLY_UP (WARNING): Autoscaling is in the "Autoscale + only out" mode. The autoscaler can add instances but not + remove any. - MORE_THAN_ONE_BACKEND_SERVICE (ERROR): The + instance group cannot be autoscaled because it has more than + one backend service attached to it. - + NOT_ENOUGH_QUOTA_AVAILABLE (ERROR): There is insufficient + quota for the necessary resources, such as CPU or number of + instances. - REGION_RESOURCE_STOCKOUT (ERROR): Shown only + for regional autoscalers: there is a resource stockout in + the chosen region. - SCALING_TARGET_DOES_NOT_EXIST (ERROR): + The target to be scaled does not exist. - + UNSUPPORTED_MAX_RATE_LOAD_BALANCING_CONFIGURATION (ERROR): + Autoscaling does not work with an HTTP/S load balancer that + has been configured for maxRate. - ZONE_RESOURCE_STOCKOUT + (ERROR): For zonal autoscalers: there is a resource stockout + in the chosen zone. For regional autoscalers: in at least + one of the zones you're using there is a resource stockout. + New values might be added in the future. Some of the values + might not be available in all API versions. Check the Type + enum for the list of possible values. + + This field is a member of `oneof`_ ``_type``. + """ + class Type(proto.Enum): + r"""The type of error, warning, or notice returned. Current set of + possible values: - ALL_INSTANCES_UNHEALTHY (WARNING): All instances + in the instance group are unhealthy (not in RUNNING state). - + BACKEND_SERVICE_DOES_NOT_EXIST (ERROR): There is no backend service + attached to the instance group. - CAPPED_AT_MAX_NUM_REPLICAS + (WARNING): Autoscaler recommends a size greater than maxNumReplicas. + - CUSTOM_METRIC_DATA_POINTS_TOO_SPARSE (WARNING): The custom metric + samples are not exported often enough to be a credible base for + autoscaling. - CUSTOM_METRIC_INVALID (ERROR): The custom metric that + was specified does not exist or does not have the necessary labels. + - MIN_EQUALS_MAX (WARNING): The minNumReplicas is equal to + maxNumReplicas. This means the autoscaler cannot add or remove + instances from the instance group. - + MISSING_CUSTOM_METRIC_DATA_POINTS (WARNING): The autoscaler did not + receive any data from the custom metric configured for autoscaling. + - MISSING_LOAD_BALANCING_DATA_POINTS (WARNING): The autoscaler is + configured to scale based on a load balancing signal but the + instance group has not received any requests from the load balancer. + - MODE_OFF (WARNING): Autoscaling is turned off. The number of + instances in the group won't change automatically. The autoscaling + configuration is preserved. - MODE_ONLY_UP (WARNING): Autoscaling is + in the "Autoscale only out" mode. The autoscaler can add instances + but not remove any. - MORE_THAN_ONE_BACKEND_SERVICE (ERROR): The + instance group cannot be autoscaled because it has more than one + backend service attached to it. - NOT_ENOUGH_QUOTA_AVAILABLE + (ERROR): There is insufficient quota for the necessary resources, + such as CPU or number of instances. - REGION_RESOURCE_STOCKOUT + (ERROR): Shown only for regional autoscalers: there is a resource + stockout in the chosen region. - SCALING_TARGET_DOES_NOT_EXIST + (ERROR): The target to be scaled does not exist. - + UNSUPPORTED_MAX_RATE_LOAD_BALANCING_CONFIGURATION (ERROR): + Autoscaling does not work with an HTTP/S load balancer that has been + configured for maxRate. - ZONE_RESOURCE_STOCKOUT (ERROR): For zonal + autoscalers: there is a resource stockout in the chosen zone. For + regional autoscalers: in at least one of the zones you're using + there is a resource stockout. New values might be added in the + future. Some of the values might not be available in all API + versions. + + Values: + UNDEFINED_TYPE (0): + A value indicating that the enum field is not + set. + ALL_INSTANCES_UNHEALTHY (404965477): + All instances in the instance group are + unhealthy (not in RUNNING state). + BACKEND_SERVICE_DOES_NOT_EXIST (191417626): + There is no backend service attached to the + instance group. + CAPPED_AT_MAX_NUM_REPLICAS (518617): + Autoscaler recommends a size greater than + maxNumReplicas. + CUSTOM_METRIC_DATA_POINTS_TOO_SPARSE (328964659): + The custom metric samples are not exported + often enough to be a credible base for + autoscaling. + CUSTOM_METRIC_INVALID (204430550): + The custom metric that was specified does not + exist or does not have the necessary labels. + MIN_EQUALS_MAX (2821361): + The minNumReplicas is equal to + maxNumReplicas. This means the autoscaler cannot + add or remove instances from the instance group. + MISSING_CUSTOM_METRIC_DATA_POINTS (94885086): + The autoscaler did not receive any data from + the custom metric configured for autoscaling. + MISSING_LOAD_BALANCING_DATA_POINTS (509858898): + The autoscaler is configured to scale based + on a load balancing signal but the instance + group has not received any requests from the + load balancer. + MODE_OFF (164169907): + Autoscaling is turned off. The number of + instances in the group won't change + automatically. The autoscaling configuration is + preserved. + MODE_ONLY_SCALE_OUT (3840994): + Autoscaling is in the "Autoscale only scale + out" mode. Instances in the group will be only + added. + MODE_ONLY_UP (100969842): + Autoscaling is in the "Autoscale only out" + mode. Instances in the group will be only added. + MORE_THAN_ONE_BACKEND_SERVICE (151922141): + The instance group cannot be autoscaled + because it has more than one backend service + attached to it. + NOT_ENOUGH_QUOTA_AVAILABLE (403101631): + There is insufficient quota for the necessary + resources, such as CPU or number of instances. + REGION_RESOURCE_STOCKOUT (528622846): + Showed only for regional autoscalers: there + is a resource stockout in the chosen region. + SCALING_TARGET_DOES_NOT_EXIST (122636699): + The target to be scaled does not exist. + SCHEDULED_INSTANCES_GREATER_THAN_AUTOSCALER_MAX (29275586): + For some scaling schedules + minRequiredReplicas is greater than + maxNumReplicas. Autoscaler always recommends at + most maxNumReplicas instances. + SCHEDULED_INSTANCES_LESS_THAN_AUTOSCALER_MIN (398287669): + For some scaling schedules + minRequiredReplicas is less than minNumReplicas. + Autoscaler always recommends at least + minNumReplicas instances. + UNKNOWN (433141802): + No description available. + UNSUPPORTED_MAX_RATE_LOAD_BALANCING_CONFIGURATION (330845009): + Autoscaling does not work with an HTTP/S load + balancer that has been configured for maxRate. + ZONE_RESOURCE_STOCKOUT (210200502): + For zonal autoscalers: there is a resource + stockout in the chosen zone. For regional + autoscalers: in at least one of the zones you're + using there is a resource stockout. + """ + UNDEFINED_TYPE = 0 + ALL_INSTANCES_UNHEALTHY = 404965477 + BACKEND_SERVICE_DOES_NOT_EXIST = 191417626 + CAPPED_AT_MAX_NUM_REPLICAS = 518617 + CUSTOM_METRIC_DATA_POINTS_TOO_SPARSE = 328964659 + CUSTOM_METRIC_INVALID = 204430550 + MIN_EQUALS_MAX = 2821361 + MISSING_CUSTOM_METRIC_DATA_POINTS = 94885086 + MISSING_LOAD_BALANCING_DATA_POINTS = 509858898 + MODE_OFF = 164169907 + MODE_ONLY_SCALE_OUT = 3840994 + MODE_ONLY_UP = 100969842 + MORE_THAN_ONE_BACKEND_SERVICE = 151922141 + NOT_ENOUGH_QUOTA_AVAILABLE = 403101631 + REGION_RESOURCE_STOCKOUT = 528622846 + SCALING_TARGET_DOES_NOT_EXIST = 122636699 + SCHEDULED_INSTANCES_GREATER_THAN_AUTOSCALER_MAX = 29275586 + SCHEDULED_INSTANCES_LESS_THAN_AUTOSCALER_MIN = 398287669 + UNKNOWN = 433141802 + UNSUPPORTED_MAX_RATE_LOAD_BALANCING_CONFIGURATION = 330845009 + ZONE_RESOURCE_STOCKOUT = 210200502 + + message: str = proto.Field( + proto.STRING, + number=418054151, + optional=True, + ) + type_: str = proto.Field( + proto.STRING, + number=3575610, + optional=True, + ) + + +class AutoscalersScopedList(proto.Message): + r""" + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + autoscalers (MutableSequence[google.cloud.compute_v1.types.Autoscaler]): + [Output Only] A list of autoscalers contained in this scope. + warning (google.cloud.compute_v1.types.Warning): + [Output Only] Informational warning which replaces the list + of autoscalers when the list is empty. + + This field is a member of `oneof`_ ``_warning``. + """ + + autoscalers: MutableSequence['Autoscaler'] = proto.RepeatedField( + proto.MESSAGE, + number=465771644, + message='Autoscaler', + ) + warning: 'Warning' = proto.Field( + proto.MESSAGE, + number=50704284, + optional=True, + message='Warning', + ) + + +class AutoscalingPolicy(proto.Message): + r"""Cloud Autoscaler policy. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + cool_down_period_sec (int): + The number of seconds that your application takes to + initialize on a VM instance. This is referred to as the + `initialization + period `__. + Specifying an accurate initialization period improves + autoscaler decisions. For example, when scaling out, the + autoscaler ignores data from VMs that are still initializing + because those VMs might not yet represent normal usage of + your application. The default initialization period is 60 + seconds. Initialization periods might vary because of + numerous factors. We recommend that you test how long your + application takes to initialize. To do this, create a VM and + time your application's startup process. + + This field is a member of `oneof`_ ``_cool_down_period_sec``. + cpu_utilization (google.cloud.compute_v1.types.AutoscalingPolicyCpuUtilization): + Defines the CPU utilization policy that + allows the autoscaler to scale based on the + average CPU utilization of a managed instance + group. + + This field is a member of `oneof`_ ``_cpu_utilization``. + custom_metric_utilizations (MutableSequence[google.cloud.compute_v1.types.AutoscalingPolicyCustomMetricUtilization]): + Configuration parameters of autoscaling based + on a custom metric. + load_balancing_utilization (google.cloud.compute_v1.types.AutoscalingPolicyLoadBalancingUtilization): + Configuration parameters of autoscaling based + on load balancer. + + This field is a member of `oneof`_ ``_load_balancing_utilization``. + max_num_replicas (int): + The maximum number of instances that the + autoscaler can scale out to. This is required + when creating or updating an autoscaler. The + maximum number of replicas must not be lower + than minimal number of replicas. + + This field is a member of `oneof`_ ``_max_num_replicas``. + min_num_replicas (int): + The minimum number of replicas that the + autoscaler can scale in to. This cannot be less + than 0. If not provided, autoscaler chooses a + default value depending on maximum number of + instances allowed. + + This field is a member of `oneof`_ ``_min_num_replicas``. + mode (str): + Defines the operating mode for this policy. The following + modes are available: - OFF: Disables the autoscaler but + maintains its configuration. - ONLY_SCALE_OUT: Restricts the + autoscaler to add VM instances only. - ON: Enables all + autoscaler activities according to its policy. For more + information, see "Turning off or restricting an autoscaler" + Check the Mode enum for the list of possible values. + + This field is a member of `oneof`_ ``_mode``. + scale_in_control (google.cloud.compute_v1.types.AutoscalingPolicyScaleInControl): + + This field is a member of `oneof`_ ``_scale_in_control``. + scaling_schedules (MutableMapping[str, google.cloud.compute_v1.types.AutoscalingPolicyScalingSchedule]): + Scaling schedules defined for an autoscaler. Multiple + schedules can be set on an autoscaler, and they can overlap. + During overlapping periods the greatest + min_required_replicas of all scaling schedules is applied. + Up to 128 scaling schedules are allowed. + """ + class Mode(proto.Enum): + r"""Defines the operating mode for this policy. The following modes are + available: - OFF: Disables the autoscaler but maintains its + configuration. - ONLY_SCALE_OUT: Restricts the autoscaler to add VM + instances only. - ON: Enables all autoscaler activities according to + its policy. For more information, see "Turning off or restricting an + autoscaler" + + Values: + UNDEFINED_MODE (0): + A value indicating that the enum field is not + set. + OFF (78159): + Do not automatically scale the MIG in or out. The + recommended_size field contains the size of MIG that would + be set if the actuation mode was enabled. + ON (2527): + Automatically scale the MIG in and out + according to the policy. + ONLY_SCALE_OUT (152713670): + Automatically create VMs according to the + policy, but do not scale the MIG in. + ONLY_UP (478095374): + Automatically create VMs according to the + policy, but do not scale the MIG in. + """ + UNDEFINED_MODE = 0 + OFF = 78159 + ON = 2527 + ONLY_SCALE_OUT = 152713670 + ONLY_UP = 478095374 + + cool_down_period_sec: int = proto.Field( + proto.INT32, + number=107692954, + optional=True, + ) + cpu_utilization: 'AutoscalingPolicyCpuUtilization' = proto.Field( + proto.MESSAGE, + number=381211147, + optional=True, + message='AutoscalingPolicyCpuUtilization', + ) + custom_metric_utilizations: MutableSequence['AutoscalingPolicyCustomMetricUtilization'] = proto.RepeatedField( + proto.MESSAGE, + number=131972850, + message='AutoscalingPolicyCustomMetricUtilization', + ) + load_balancing_utilization: 'AutoscalingPolicyLoadBalancingUtilization' = proto.Field( + proto.MESSAGE, + number=429746403, + optional=True, + message='AutoscalingPolicyLoadBalancingUtilization', + ) + max_num_replicas: int = proto.Field( + proto.INT32, + number=62327375, + optional=True, + ) + min_num_replicas: int = proto.Field( + proto.INT32, + number=535329825, + optional=True, + ) + mode: str = proto.Field( + proto.STRING, + number=3357091, + optional=True, + ) + scale_in_control: 'AutoscalingPolicyScaleInControl' = proto.Field( + proto.MESSAGE, + number=527670872, + optional=True, + message='AutoscalingPolicyScaleInControl', + ) + scaling_schedules: MutableMapping[str, 'AutoscalingPolicyScalingSchedule'] = proto.MapField( + proto.STRING, + proto.MESSAGE, + number=355416580, + message='AutoscalingPolicyScalingSchedule', + ) + + +class AutoscalingPolicyCpuUtilization(proto.Message): + r"""CPU utilization policy. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + predictive_method (str): + Indicates whether predictive autoscaling based on CPU metric + is enabled. Valid values are: \* NONE (default). No + predictive method is used. The autoscaler scales the group + to meet current demand based on real-time metrics. \* + OPTIMIZE_AVAILABILITY. Predictive autoscaling improves + availability by monitoring daily and weekly load patterns + and scaling out ahead of anticipated demand. Check the + PredictiveMethod enum for the list of possible values. + + This field is a member of `oneof`_ ``_predictive_method``. + utilization_target (float): + The target CPU utilization that the autoscaler maintains. + Must be a float value in the range (0, 1]. If not specified, + the default is 0.6. If the CPU level is below the target + utilization, the autoscaler scales in the number of + instances until it reaches the minimum number of instances + you specified or until the average CPU of your instances + reaches the target utilization. If the average CPU is above + the target utilization, the autoscaler scales out until it + reaches the maximum number of instances you specified or + until the average utilization reaches the target + utilization. + + This field is a member of `oneof`_ ``_utilization_target``. + """ + class PredictiveMethod(proto.Enum): + r"""Indicates whether predictive autoscaling based on CPU metric is + enabled. Valid values are: \* NONE (default). No predictive method + is used. The autoscaler scales the group to meet current demand + based on real-time metrics. \* OPTIMIZE_AVAILABILITY. Predictive + autoscaling improves availability by monitoring daily and weekly + load patterns and scaling out ahead of anticipated demand. + + Values: + UNDEFINED_PREDICTIVE_METHOD (0): + A value indicating that the enum field is not + set. + NONE (2402104): + No predictive method is used. The autoscaler + scales the group to meet current demand based on + real-time metrics + OPTIMIZE_AVAILABILITY (11629437): + Predictive autoscaling improves availability + by monitoring daily and weekly load patterns and + scaling out ahead of anticipated demand. + """ + UNDEFINED_PREDICTIVE_METHOD = 0 + NONE = 2402104 + OPTIMIZE_AVAILABILITY = 11629437 + + predictive_method: str = proto.Field( + proto.STRING, + number=390220737, + optional=True, + ) + utilization_target: float = proto.Field( + proto.DOUBLE, + number=215905870, + optional=True, + ) + + +class AutoscalingPolicyCustomMetricUtilization(proto.Message): + r"""Custom utilization metric policy. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + filter (str): + A filter string, compatible with a Stackdriver Monitoring + filter string for TimeSeries.list API call. This filter is + used to select a specific TimeSeries for the purpose of + autoscaling and to determine whether the metric is exporting + per-instance or per-group data. For the filter to be valid + for autoscaling purposes, the following rules apply: - You + can only use the AND operator for joining selectors. - You + can only use direct equality comparison operator (=) without + any functions for each selector. - You can specify the + metric in both the filter string and in the metric field. + However, if specified in both places, the metric must be + identical. - The monitored resource type determines what + kind of values are expected for the metric. If it is a + gce_instance, the autoscaler expects the metric to include a + separate TimeSeries for each instance in a group. In such a + case, you cannot filter on resource labels. If the resource + type is any other value, the autoscaler expects this metric + to contain values that apply to the entire autoscaled + instance group and resource label filtering can be performed + to point autoscaler at the correct TimeSeries to scale upon. + This is called a *per-group metric* for the purpose of + autoscaling. If not specified, the type defaults to + gce_instance. Try to provide a filter that is selective + enough to pick just one TimeSeries for the autoscaled group + or for each of the instances (if you are using gce_instance + resource type). If multiple TimeSeries are returned upon the + query execution, the autoscaler will sum their respective + values to obtain its scaling value. + + This field is a member of `oneof`_ ``_filter``. + metric (str): + The identifier (type) of the Stackdriver + Monitoring metric. The metric cannot have + negative values. The metric must have a value + type of INT64 or DOUBLE. + + This field is a member of `oneof`_ ``_metric``. + single_instance_assignment (float): + If scaling is based on a per-group metric value that + represents the total amount of work to be done or resource + usage, set this value to an amount assigned for a single + instance of the scaled group. Autoscaler keeps the number of + instances proportional to the value of this metric. The + metric itself does not change value due to group resizing. A + good metric to use with the target is for example + pubsub.googleapis.com/subscription/num_undelivered_messages + or a custom metric exporting the total number of requests + coming to your instances. A bad example would be a metric + exporting an average or median latency, since this value + can't include a chunk assignable to a single instance, it + could be better used with utilization_target instead. + + This field is a member of `oneof`_ ``_single_instance_assignment``. + utilization_target (float): + The target value of the metric that autoscaler maintains. + This must be a positive value. A utilization metric scales + number of virtual machines handling requests to increase or + decrease proportionally to the metric. For example, a good + metric to use as a utilization_target is + https://www.googleapis.com/compute/v1/instance/network/received_bytes_count. + The autoscaler works to keep this value constant for each of + the instances. + + This field is a member of `oneof`_ ``_utilization_target``. + utilization_target_type (str): + Defines how target utilization value is expressed for a + Stackdriver Monitoring metric. Either GAUGE, + DELTA_PER_SECOND, or DELTA_PER_MINUTE. Check the + UtilizationTargetType enum for the list of possible values. + + This field is a member of `oneof`_ ``_utilization_target_type``. + """ + class UtilizationTargetType(proto.Enum): + r"""Defines how target utilization value is expressed for a Stackdriver + Monitoring metric. Either GAUGE, DELTA_PER_SECOND, or + DELTA_PER_MINUTE. + + Values: + UNDEFINED_UTILIZATION_TARGET_TYPE (0): + A value indicating that the enum field is not + set. + DELTA_PER_MINUTE (87432861): + Sets the utilization target value for a + cumulative or delta metric, expressed as the + rate of growth per minute. + DELTA_PER_SECOND (255180029): + Sets the utilization target value for a + cumulative or delta metric, expressed as the + rate of growth per second. + GAUGE (67590361): + Sets the utilization target value for a gauge + metric. The autoscaler will collect the average + utilization of the virtual machines from the + last couple of minutes, and compare the value to + the utilization target value to perform + autoscaling. + """ + UNDEFINED_UTILIZATION_TARGET_TYPE = 0 + DELTA_PER_MINUTE = 87432861 + DELTA_PER_SECOND = 255180029 + GAUGE = 67590361 + + filter: str = proto.Field( + proto.STRING, + number=336120696, + optional=True, + ) + metric: str = proto.Field( + proto.STRING, + number=533067184, + optional=True, + ) + single_instance_assignment: float = proto.Field( + proto.DOUBLE, + number=504768064, + optional=True, + ) + utilization_target: float = proto.Field( + proto.DOUBLE, + number=215905870, + optional=True, + ) + utilization_target_type: str = proto.Field( + proto.STRING, + number=340169355, + optional=True, + ) + + +class AutoscalingPolicyLoadBalancingUtilization(proto.Message): + r"""Configuration parameters of autoscaling based on load + balancing. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + utilization_target (float): + Fraction of backend capacity utilization (set + in HTTP(S) load balancing configuration) that + the autoscaler maintains. Must be a positive + float value. If not defined, the default is 0.8. + + This field is a member of `oneof`_ ``_utilization_target``. + """ + + utilization_target: float = proto.Field( + proto.DOUBLE, + number=215905870, + optional=True, + ) + + +class AutoscalingPolicyScaleInControl(proto.Message): + r"""Configuration that allows for slower scale in so that even if + Autoscaler recommends an abrupt scale in of a MIG, it will be + throttled as specified by the parameters below. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + max_scaled_in_replicas (google.cloud.compute_v1.types.FixedOrPercent): + Maximum allowed number (or %) of VMs that can + be deducted from the peak recommendation during + the window autoscaler looks at when computing + recommendations. Possibly all these VMs can be + deleted at once so user service needs to be + prepared to lose that many VMs in one step. + + This field is a member of `oneof`_ ``_max_scaled_in_replicas``. + time_window_sec (int): + How far back autoscaling looks when computing + recommendations to include directives regarding + slower scale in, as described above. + + This field is a member of `oneof`_ ``_time_window_sec``. + """ + + max_scaled_in_replicas: 'FixedOrPercent' = proto.Field( + proto.MESSAGE, + number=180710123, + optional=True, + message='FixedOrPercent', + ) + time_window_sec: int = proto.Field( + proto.INT32, + number=36405300, + optional=True, + ) + + +class AutoscalingPolicyScalingSchedule(proto.Message): + r"""Scaling based on user-defined schedule. The message describes + a single scaling schedule. A scaling schedule changes the + minimum number of VM instances an autoscaler can recommend, + which can trigger scaling out. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + description (str): + A description of a scaling schedule. + + This field is a member of `oneof`_ ``_description``. + disabled (bool): + A boolean value that specifies whether a + scaling schedule can influence autoscaler + recommendations. If set to true, then a scaling + schedule has no effect. This field is optional, + and its value is false by default. + + This field is a member of `oneof`_ ``_disabled``. + duration_sec (int): + The duration of time intervals, in seconds, + for which this scaling schedule is to run. The + minimum allowed value is 300. This field is + required. + + This field is a member of `oneof`_ ``_duration_sec``. + min_required_replicas (int): + The minimum number of VM instances that the + autoscaler will recommend in time intervals + starting according to schedule. This field is + required. + + This field is a member of `oneof`_ ``_min_required_replicas``. + schedule (str): + The start timestamps of time intervals when this scaling + schedule is to provide a scaling signal. This field uses the + extended cron format (with an optional year field). The + expression can describe a single timestamp if the optional + year is set, in which case the scaling schedule runs once. + The schedule is interpreted with respect to time_zone. This + field is required. Note: These timestamps only describe when + autoscaler starts providing the scaling signal. The VMs need + additional time to become serving. + + This field is a member of `oneof`_ ``_schedule``. + time_zone (str): + The time zone to use when interpreting the schedule. The + value of this field must be a time zone name from the tz + database: http://en.wikipedia.org/wiki/Tz_database. This + field is assigned a default value of “UTC” if left empty. + + This field is a member of `oneof`_ ``_time_zone``. + """ + + description: str = proto.Field( + proto.STRING, + number=422937596, + optional=True, + ) + disabled: bool = proto.Field( + proto.BOOL, + number=270940796, + optional=True, + ) + duration_sec: int = proto.Field( + proto.INT32, + number=212356902, + optional=True, + ) + min_required_replicas: int = proto.Field( + proto.INT32, + number=365514414, + optional=True, + ) + schedule: str = proto.Field( + proto.STRING, + number=375820951, + optional=True, + ) + time_zone: str = proto.Field( + proto.STRING, + number=36848094, + optional=True, + ) + + +class Backend(proto.Message): + r"""Message containing information of one individual backend. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + balancing_mode (str): + Specifies how to determine whether the + backend of a load balancer can handle additional + traffic or is fully loaded. For usage + guidelines, see Connection balancing mode. + Backends must use compatible balancing modes. + For more information, see Supported balancing + modes and target capacity settings and + Restrictions and guidance for instance groups. + Note: Currently, if you use the API to configure + incompatible balancing modes, the configuration + might be accepted even though it has no impact + and is ignored. Specifically, + Backend.maxUtilization is ignored when + Backend.balancingMode is RATE. In the future, + this incompatible combination will be rejected. + Check the BalancingMode enum for the list of + possible values. + + This field is a member of `oneof`_ ``_balancing_mode``. + capacity_scaler (float): + A multiplier applied to the backend's target capacity of its + balancing mode. The default value is 1, which means the + group serves up to 100% of its configured capacity + (depending on balancingMode). A setting of 0 means the group + is completely drained, offering 0% of its available + capacity. The valid ranges are 0.0 and [0.1,1.0]. You cannot + configure a setting larger than 0 and smaller than 0.1. You + cannot configure a setting of 0 when there is only one + backend attached to the backend service. Not available with + backends that don't support using a balancingMode. This + includes backends such as global internet NEGs, regional + serverless NEGs, and PSC NEGs. + + This field is a member of `oneof`_ ``_capacity_scaler``. + description (str): + An optional description of this resource. + Provide this property when you create the + resource. + + This field is a member of `oneof`_ ``_description``. + failover (bool): + This field designates whether this is a + failover backend. More than one failover backend + can be configured for a given BackendService. + + This field is a member of `oneof`_ ``_failover``. + group (str): + The fully-qualified URL of an instance group or network + endpoint group (NEG) resource. To determine what types of + backends a load balancer supports, see the `Backend services + overview `__. + You must use the *fully-qualified* URL (starting with + https://www.googleapis.com/) to specify the instance group + or NEG. Partial URLs are not supported. + + This field is a member of `oneof`_ ``_group``. + max_connections (int): + Defines a target maximum number of + simultaneous connections. For usage guidelines, + see Connection balancing mode and Utilization + balancing mode. Not available if the backend's + balancingMode is RATE. + + This field is a member of `oneof`_ ``_max_connections``. + max_connections_per_endpoint (int): + Defines a target maximum number of + simultaneous connections. For usage guidelines, + see Connection balancing mode and Utilization + balancing mode. Not available if the backend's + balancingMode is RATE. + + This field is a member of `oneof`_ ``_max_connections_per_endpoint``. + max_connections_per_instance (int): + Defines a target maximum number of + simultaneous connections. For usage guidelines, + see Connection balancing mode and Utilization + balancing mode. Not available if the backend's + balancingMode is RATE. + + This field is a member of `oneof`_ ``_max_connections_per_instance``. + max_rate (int): + Defines a maximum number of HTTP requests per + second (RPS). For usage guidelines, see Rate + balancing mode and Utilization balancing mode. + Not available if the backend's balancingMode is + CONNECTION. + + This field is a member of `oneof`_ ``_max_rate``. + max_rate_per_endpoint (float): + Defines a maximum target for requests per + second (RPS). For usage guidelines, see Rate + balancing mode and Utilization balancing mode. + Not available if the backend's balancingMode is + CONNECTION. + + This field is a member of `oneof`_ ``_max_rate_per_endpoint``. + max_rate_per_instance (float): + Defines a maximum target for requests per + second (RPS). For usage guidelines, see Rate + balancing mode and Utilization balancing mode. + Not available if the backend's balancingMode is + CONNECTION. + + This field is a member of `oneof`_ ``_max_rate_per_instance``. + max_utilization (float): + Optional parameter to define a target capacity for the + UTILIZATION balancing mode. The valid range is [0.0, 1.0]. + For usage guidelines, see Utilization balancing mode. + + This field is a member of `oneof`_ ``_max_utilization``. + """ + class BalancingMode(proto.Enum): + r"""Specifies how to determine whether the backend of a load + balancer can handle additional traffic or is fully loaded. For + usage guidelines, see Connection balancing mode. Backends must + use compatible balancing modes. For more information, see + Supported balancing modes and target capacity settings and + Restrictions and guidance for instance groups. Note: Currently, + if you use the API to configure incompatible balancing modes, + the configuration might be accepted even though it has no impact + and is ignored. Specifically, Backend.maxUtilization is ignored + when Backend.balancingMode is RATE. In the future, this + incompatible combination will be rejected. + + Values: + UNDEFINED_BALANCING_MODE (0): + A value indicating that the enum field is not + set. + CONNECTION (246311646): + Balance based on the number of simultaneous + connections. + RATE (2508000): + Balance based on requests per second (RPS). + UTILIZATION (157008386): + Balance based on the backend utilization. + """ + UNDEFINED_BALANCING_MODE = 0 + CONNECTION = 246311646 + RATE = 2508000 + UTILIZATION = 157008386 + + balancing_mode: str = proto.Field( + proto.STRING, + number=430286217, + optional=True, + ) + capacity_scaler: float = proto.Field( + proto.FLOAT, + number=315958157, + optional=True, + ) + description: str = proto.Field( + proto.STRING, + number=422937596, + optional=True, + ) + failover: bool = proto.Field( + proto.BOOL, + number=138892530, + optional=True, + ) + group: str = proto.Field( + proto.STRING, + number=98629247, + optional=True, + ) + max_connections: int = proto.Field( + proto.INT32, + number=110652154, + optional=True, + ) + max_connections_per_endpoint: int = proto.Field( + proto.INT32, + number=216904604, + optional=True, + ) + max_connections_per_instance: int = proto.Field( + proto.INT32, + number=104671900, + optional=True, + ) + max_rate: int = proto.Field( + proto.INT32, + number=408035035, + optional=True, + ) + max_rate_per_endpoint: float = proto.Field( + proto.FLOAT, + number=129832283, + optional=True, + ) + max_rate_per_instance: float = proto.Field( + proto.FLOAT, + number=17599579, + optional=True, + ) + max_utilization: float = proto.Field( + proto.FLOAT, + number=148192199, + optional=True, + ) + + +class BackendBucket(proto.Message): + r"""Represents a Cloud Storage Bucket resource. This Cloud + Storage bucket resource is referenced by a URL map of a load + balancer. For more information, read Backend Buckets. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + bucket_name (str): + Cloud Storage bucket name. + + This field is a member of `oneof`_ ``_bucket_name``. + cdn_policy (google.cloud.compute_v1.types.BackendBucketCdnPolicy): + Cloud CDN configuration for this + BackendBucket. + + This field is a member of `oneof`_ ``_cdn_policy``. + compression_mode (str): + Compress text responses using Brotli or gzip + compression, based on the client's + Accept-Encoding header. Check the + CompressionMode enum for the list of possible + values. + + This field is a member of `oneof`_ ``_compression_mode``. + creation_timestamp (str): + [Output Only] Creation timestamp in RFC3339 text format. + + This field is a member of `oneof`_ ``_creation_timestamp``. + custom_response_headers (MutableSequence[str]): + Headers that the Application Load Balancer + should add to proxied responses. + description (str): + An optional textual description of the + resource; provided by the client when the + resource is created. + + This field is a member of `oneof`_ ``_description``. + edge_security_policy (str): + [Output Only] The resource URL for the edge security policy + associated with this backend bucket. + + This field is a member of `oneof`_ ``_edge_security_policy``. + enable_cdn (bool): + If true, enable Cloud CDN for this + BackendBucket. + + This field is a member of `oneof`_ ``_enable_cdn``. + id (int): + [Output Only] Unique identifier for the resource; defined by + the server. + + This field is a member of `oneof`_ ``_id``. + kind (str): + Type of the resource. + + This field is a member of `oneof`_ ``_kind``. + name (str): + Name of the resource. Provided by the client when the + resource is created. The name must be 1-63 characters long, + and comply with RFC1035. Specifically, the name must be 1-63 + characters long and match the regular expression + ``[a-z]([-a-z0-9]*[a-z0-9])?`` which means the first + character must be a lowercase letter, and all following + characters must be a dash, lowercase letter, or digit, + except the last character, which cannot be a dash. + + This field is a member of `oneof`_ ``_name``. + self_link (str): + [Output Only] Server-defined URL for the resource. + + This field is a member of `oneof`_ ``_self_link``. + """ + class CompressionMode(proto.Enum): + r"""Compress text responses using Brotli or gzip compression, + based on the client's Accept-Encoding header. + + Values: + UNDEFINED_COMPRESSION_MODE (0): + A value indicating that the enum field is not + set. + AUTOMATIC (165298699): + Automatically uses the best compression based + on the Accept-Encoding header sent by the + client. + DISABLED (516696700): + Disables compression. Existing compressed + responses cached by Cloud CDN will not be served + to clients. + """ + UNDEFINED_COMPRESSION_MODE = 0 + AUTOMATIC = 165298699 + DISABLED = 516696700 + + bucket_name: str = proto.Field( + proto.STRING, + number=283610048, + optional=True, + ) + cdn_policy: 'BackendBucketCdnPolicy' = proto.Field( + proto.MESSAGE, + number=213976452, + optional=True, + message='BackendBucketCdnPolicy', + ) + compression_mode: str = proto.Field( + proto.STRING, + number=95520988, + optional=True, + ) + creation_timestamp: str = proto.Field( + proto.STRING, + number=30525366, + optional=True, + ) + custom_response_headers: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=387539094, + ) + description: str = proto.Field( + proto.STRING, + number=422937596, + optional=True, + ) + edge_security_policy: str = proto.Field( + proto.STRING, + number=41036943, + optional=True, + ) + enable_cdn: bool = proto.Field( + proto.BOOL, + number=282942321, + optional=True, + ) + id: int = proto.Field( + proto.UINT64, + number=3355, + optional=True, + ) + kind: str = proto.Field( + proto.STRING, + number=3292052, + optional=True, + ) + name: str = proto.Field( + proto.STRING, + number=3373707, + optional=True, + ) + self_link: str = proto.Field( + proto.STRING, + number=456214797, + optional=True, + ) + + +class BackendBucketCdnPolicy(proto.Message): + r"""Message containing Cloud CDN configuration for a backend + bucket. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + bypass_cache_on_request_headers (MutableSequence[google.cloud.compute_v1.types.BackendBucketCdnPolicyBypassCacheOnRequestHeader]): + Bypass the cache when the specified request + headers are matched - e.g. Pragma or + Authorization headers. Up to 5 headers can be + specified. The cache is bypassed for all + cdnPolicy.cacheMode settings. + cache_key_policy (google.cloud.compute_v1.types.BackendBucketCdnPolicyCacheKeyPolicy): + The CacheKeyPolicy for this CdnPolicy. + + This field is a member of `oneof`_ ``_cache_key_policy``. + cache_mode (str): + Specifies the cache setting for all responses from this + backend. The possible values are: USE_ORIGIN_HEADERS + Requires the origin to set valid caching headers to cache + content. Responses without these headers will not be cached + at Google's edge, and will require a full trip to the origin + on every request, potentially impacting performance and + increasing load on the origin server. FORCE_CACHE_ALL Cache + all content, ignoring any "private", "no-store" or + "no-cache" directives in Cache-Control response headers. + Warning: this may result in Cloud CDN caching private, + per-user (user identifiable) content. CACHE_ALL_STATIC + Automatically cache static content, including common image + formats, media (video and audio), and web assets (JavaScript + and CSS). Requests and responses that are marked as + uncacheable, as well as dynamic content (including HTML), + will not be cached. Check the CacheMode enum for the list of + possible values. + + This field is a member of `oneof`_ ``_cache_mode``. + client_ttl (int): + Specifies a separate client (e.g. browser client) maximum + TTL. This is used to clamp the max-age (or Expires) value + sent to the client. With FORCE_CACHE_ALL, the lesser of + client_ttl and default_ttl is used for the response max-age + directive, along with a "public" directive. For cacheable + content in CACHE_ALL_STATIC mode, client_ttl clamps the + max-age from the origin (if specified), or else sets the + response max-age directive to the lesser of the client_ttl + and default_ttl, and also ensures a "public" cache-control + directive is present. If a client TTL is not specified, a + default value (1 hour) will be used. The maximum allowed + value is 31,622,400s (1 year). + + This field is a member of `oneof`_ ``_client_ttl``. + default_ttl (int): + Specifies the default TTL for cached content served by this + origin for responses that do not have an existing valid TTL + (max-age or s-max-age). Setting a TTL of "0" means "always + revalidate". The value of defaultTTL cannot be set to a + value greater than that of maxTTL, but can be equal. When + the cacheMode is set to FORCE_CACHE_ALL, the defaultTTL will + overwrite the TTL set in all responses. The maximum allowed + value is 31,622,400s (1 year), noting that infrequently + accessed objects may be evicted from the cache before the + defined TTL. + + This field is a member of `oneof`_ ``_default_ttl``. + max_ttl (int): + Specifies the maximum allowed TTL for cached + content served by this origin. Cache directives + that attempt to set a max-age or s-maxage higher + than this, or an Expires header more than maxTTL + seconds in the future will be capped at the + value of maxTTL, as if it were the value of an + s-maxage Cache-Control directive. Headers sent + to the client will not be modified. Setting a + TTL of "0" means "always revalidate". The + maximum allowed value is 31,622,400s (1 year), + noting that infrequently accessed objects may be + evicted from the cache before the defined TTL. + + This field is a member of `oneof`_ ``_max_ttl``. + negative_caching (bool): + Negative caching allows per-status code TTLs to be set, in + order to apply fine-grained caching for common errors or + redirects. This can reduce the load on your origin and + improve end-user experience by reducing response latency. + When the cache mode is set to CACHE_ALL_STATIC or + USE_ORIGIN_HEADERS, negative caching applies to responses + with the specified response code that lack any + Cache-Control, Expires, or Pragma: no-cache directives. When + the cache mode is set to FORCE_CACHE_ALL, negative caching + applies to all responses with the specified response code, + and override any caching headers. By default, Cloud CDN will + apply the following default TTLs to these status codes: HTTP + 300 (Multiple Choice), 301, 308 (Permanent Redirects): 10m + HTTP 404 (Not Found), 410 (Gone), 451 (Unavailable For Legal + Reasons): 120s HTTP 405 (Method Not Found), 421 (Misdirected + Request), 501 (Not Implemented): 60s. These defaults can be + overridden in negative_caching_policy. + + This field is a member of `oneof`_ ``_negative_caching``. + negative_caching_policy (MutableSequence[google.cloud.compute_v1.types.BackendBucketCdnPolicyNegativeCachingPolicy]): + Sets a cache TTL for the specified HTTP status code. + negative_caching must be enabled to configure + negative_caching_policy. Omitting the policy and leaving + negative_caching enabled will use Cloud CDN's default cache + TTLs. Note that when specifying an explicit + negative_caching_policy, you should take care to specify a + cache TTL for all response codes that you wish to cache. + Cloud CDN will not apply any default negative caching when a + policy exists. + request_coalescing (bool): + If true then Cloud CDN will combine multiple + concurrent cache fill requests into a small + number of requests to the origin. + + This field is a member of `oneof`_ ``_request_coalescing``. + serve_while_stale (int): + Serve existing content from the cache (if + available) when revalidating content with the + origin, or when an error is encountered when + refreshing the cache. This setting defines the + default "max-stale" duration for any cached + responses that do not specify a max-stale + directive. Stale responses that exceed the TTL + configured here will not be served. The default + limit (max-stale) is 86400s (1 day), which will + allow stale content to be served up to this + limit beyond the max-age (or s-max-age) of a + cached response. The maximum allowed value is + 604800 (1 week). Set this to zero (0) to disable + serve-while-stale. + + This field is a member of `oneof`_ ``_serve_while_stale``. + signed_url_cache_max_age_sec (int): + Maximum number of seconds the response to a signed URL + request will be considered fresh. After this time period, + the response will be revalidated before being served. + Defaults to 1hr (3600s). When serving responses to signed + URL requests, Cloud CDN will internally behave as though all + responses from this backend had a "Cache-Control: public, + max-age=[TTL]" header, regardless of any existing + Cache-Control header. The actual headers served in responses + will not be altered. + + This field is a member of `oneof`_ ``_signed_url_cache_max_age_sec``. + signed_url_key_names (MutableSequence[str]): + [Output Only] Names of the keys for signing request URLs. + """ + class CacheMode(proto.Enum): + r"""Specifies the cache setting for all responses from this backend. The + possible values are: USE_ORIGIN_HEADERS Requires the origin to set + valid caching headers to cache content. Responses without these + headers will not be cached at Google's edge, and will require a full + trip to the origin on every request, potentially impacting + performance and increasing load on the origin server. + FORCE_CACHE_ALL Cache all content, ignoring any "private", + "no-store" or "no-cache" directives in Cache-Control response + headers. Warning: this may result in Cloud CDN caching private, + per-user (user identifiable) content. CACHE_ALL_STATIC Automatically + cache static content, including common image formats, media (video + and audio), and web assets (JavaScript and CSS). Requests and + responses that are marked as uncacheable, as well as dynamic content + (including HTML), will not be cached. + + Values: + UNDEFINED_CACHE_MODE (0): + A value indicating that the enum field is not + set. + CACHE_ALL_STATIC (355027945): + Automatically cache static content, including + common image formats, media (video and audio), + and web assets (JavaScript and CSS). Requests + and responses that are marked as uncacheable, as + well as dynamic content (including HTML), will + not be cached. + FORCE_CACHE_ALL (486026928): + Cache all content, ignoring any "private", + "no-store" or "no-cache" directives in + Cache-Control response headers. Warning: this + may result in Cloud CDN caching private, + per-user (user identifiable) content. + INVALID_CACHE_MODE (381295560): + No description available. + USE_ORIGIN_HEADERS (55380261): + Requires the origin to set valid caching + headers to cache content. Responses without + these headers will not be cached at Google's + edge, and will require a full trip to the origin + on every request, potentially impacting + performance and increasing load on the origin + server. + """ + UNDEFINED_CACHE_MODE = 0 + CACHE_ALL_STATIC = 355027945 + FORCE_CACHE_ALL = 486026928 + INVALID_CACHE_MODE = 381295560 + USE_ORIGIN_HEADERS = 55380261 + + bypass_cache_on_request_headers: MutableSequence['BackendBucketCdnPolicyBypassCacheOnRequestHeader'] = proto.RepeatedField( + proto.MESSAGE, + number=486203082, + message='BackendBucketCdnPolicyBypassCacheOnRequestHeader', + ) + cache_key_policy: 'BackendBucketCdnPolicyCacheKeyPolicy' = proto.Field( + proto.MESSAGE, + number=159263727, + optional=True, + message='BackendBucketCdnPolicyCacheKeyPolicy', + ) + cache_mode: str = proto.Field( + proto.STRING, + number=28877888, + optional=True, + ) + client_ttl: int = proto.Field( + proto.INT32, + number=29034360, + optional=True, + ) + default_ttl: int = proto.Field( + proto.INT32, + number=100253422, + optional=True, + ) + max_ttl: int = proto.Field( + proto.INT32, + number=307578001, + optional=True, + ) + negative_caching: bool = proto.Field( + proto.BOOL, + number=336110005, + optional=True, + ) + negative_caching_policy: MutableSequence['BackendBucketCdnPolicyNegativeCachingPolicy'] = proto.RepeatedField( + proto.MESSAGE, + number=155359996, + message='BackendBucketCdnPolicyNegativeCachingPolicy', + ) + request_coalescing: bool = proto.Field( + proto.BOOL, + number=532808276, + optional=True, + ) + serve_while_stale: int = proto.Field( + proto.INT32, + number=236682203, + optional=True, + ) + signed_url_cache_max_age_sec: int = proto.Field( + proto.INT64, + number=269374534, + optional=True, + ) + signed_url_key_names: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=371848885, + ) + + +class BackendBucketCdnPolicyBypassCacheOnRequestHeader(proto.Message): + r"""Bypass the cache when the specified request headers are present, + e.g. Pragma or Authorization headers. Values are case insensitive. + The presence of such a header overrides the cache_mode setting. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + header_name (str): + The header field name to match on when + bypassing cache. Values are case-insensitive. + + This field is a member of `oneof`_ ``_header_name``. + """ + + header_name: str = proto.Field( + proto.STRING, + number=110223613, + optional=True, + ) + + +class BackendBucketCdnPolicyCacheKeyPolicy(proto.Message): + r"""Message containing what to include in the cache key for a + request for Cloud CDN. + + Attributes: + include_http_headers (MutableSequence[str]): + Allows HTTP request headers (by name) to be + used in the cache key. + query_string_whitelist (MutableSequence[str]): + Names of query string parameters to include + in cache keys. Default parameters are always + included. '&' and '=' will be percent encoded + and not treated as delimiters. + """ + + include_http_headers: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=2489606, + ) + query_string_whitelist: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=52456496, + ) + + +class BackendBucketCdnPolicyNegativeCachingPolicy(proto.Message): + r"""Specify CDN TTLs for response error codes. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + code (int): + The HTTP status code to define a TTL against. + Only HTTP status codes 300, 301, 302, 307, 308, + 404, 405, 410, 421, 451 and 501 are can be + specified as values, and you cannot specify a + status code more than once. + + This field is a member of `oneof`_ ``_code``. + ttl (int): + The TTL (in seconds) for which to cache + responses with the corresponding status code. + The maximum allowed value is 1800s (30 minutes), + noting that infrequently accessed objects may be + evicted from the cache before the defined TTL. + + This field is a member of `oneof`_ ``_ttl``. + """ + + code: int = proto.Field( + proto.INT32, + number=3059181, + optional=True, + ) + ttl: int = proto.Field( + proto.INT32, + number=115180, + optional=True, + ) + + +class BackendBucketList(proto.Message): + r"""Contains a list of BackendBucket resources. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + id (str): + [Output Only] Unique identifier for the resource; defined by + the server. + + This field is a member of `oneof`_ ``_id``. + items (MutableSequence[google.cloud.compute_v1.types.BackendBucket]): + A list of BackendBucket resources. + kind (str): + Type of resource. + + This field is a member of `oneof`_ ``_kind``. + next_page_token (str): + [Output Only] This token allows you to get the next page of + results for list requests. If the number of results is + larger than maxResults, use the nextPageToken as a value for + the query parameter pageToken in the next list request. + Subsequent list requests will have their own nextPageToken + to continue paging through the results. + + This field is a member of `oneof`_ ``_next_page_token``. + self_link (str): + [Output Only] Server-defined URL for this resource. + + This field is a member of `oneof`_ ``_self_link``. + warning (google.cloud.compute_v1.types.Warning): + [Output Only] Informational warning message. + + This field is a member of `oneof`_ ``_warning``. + """ + + @property + def raw_page(self): + return self + + id: str = proto.Field( + proto.STRING, + number=3355, + optional=True, + ) + items: MutableSequence['BackendBucket'] = proto.RepeatedField( + proto.MESSAGE, + number=100526016, + message='BackendBucket', + ) + kind: str = proto.Field( + proto.STRING, + number=3292052, + optional=True, + ) + next_page_token: str = proto.Field( + proto.STRING, + number=79797525, + optional=True, + ) + self_link: str = proto.Field( + proto.STRING, + number=456214797, + optional=True, + ) + warning: 'Warning' = proto.Field( + proto.MESSAGE, + number=50704284, + optional=True, + message='Warning', + ) + + +class BackendService(proto.Message): + r"""Represents a Backend Service resource. A backend service defines how + Google Cloud load balancers distribute traffic. The backend service + configuration contains a set of values, such as the protocol used to + connect to backends, various distribution and session settings, + health checks, and timeouts. These settings provide fine-grained + control over how your load balancer behaves. Most of the settings + have default values that allow for easy configuration if you need to + get started quickly. Backend services in Google Compute Engine can + be either regionally or globally scoped. \* + `Global `__ + \* + `Regional `__ + For more information, see Backend Services. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + affinity_cookie_ttl_sec (int): + Lifetime of cookies in seconds. This setting is applicable + to external and internal HTTP(S) load balancers and Traffic + Director and requires GENERATED_COOKIE or HTTP_COOKIE + session affinity. If set to 0, the cookie is non-persistent + and lasts only until the end of the browser session (or + equivalent). The maximum allowed value is two weeks + (1,209,600). Not supported when the backend service is + referenced by a URL map that is bound to target gRPC proxy + that has validateForProxyless field set to true. + + This field is a member of `oneof`_ ``_affinity_cookie_ttl_sec``. + backends (MutableSequence[google.cloud.compute_v1.types.Backend]): + The list of backends that serve this + BackendService. + cdn_policy (google.cloud.compute_v1.types.BackendServiceCdnPolicy): + Cloud CDN configuration for this + BackendService. Only available for specified + load balancer types. + + This field is a member of `oneof`_ ``_cdn_policy``. + circuit_breakers (google.cloud.compute_v1.types.CircuitBreakers): + + This field is a member of `oneof`_ ``_circuit_breakers``. + compression_mode (str): + Compress text responses using Brotli or gzip + compression, based on the client's + Accept-Encoding header. Check the + CompressionMode enum for the list of possible + values. + + This field is a member of `oneof`_ ``_compression_mode``. + connection_draining (google.cloud.compute_v1.types.ConnectionDraining): + + This field is a member of `oneof`_ ``_connection_draining``. + connection_tracking_policy (google.cloud.compute_v1.types.BackendServiceConnectionTrackingPolicy): + Connection Tracking configuration for this + BackendService. Connection tracking policy + settings are only available for Network Load + Balancing and Internal TCP/UDP Load Balancing. + + This field is a member of `oneof`_ ``_connection_tracking_policy``. + consistent_hash (google.cloud.compute_v1.types.ConsistentHashLoadBalancerSettings): + Consistent Hash-based load balancing can be used to provide + soft session affinity based on HTTP headers, cookies or + other properties. This load balancing policy is applicable + only for HTTP connections. The affinity to a particular + destination host will be lost when one or more hosts are + added/removed from the destination service. This field + specifies parameters that control consistent hashing. This + field is only applicable when localityLbPolicy is set to + MAGLEV or RING_HASH. This field is applicable to either: - A + regional backend service with the service_protocol set to + HTTP, HTTPS, or HTTP2, and load_balancing_scheme set to + INTERNAL_MANAGED. - A global backend service with the + load_balancing_scheme set to INTERNAL_SELF_MANAGED. + + This field is a member of `oneof`_ ``_consistent_hash``. + creation_timestamp (str): + [Output Only] Creation timestamp in RFC3339 text format. + + This field is a member of `oneof`_ ``_creation_timestamp``. + custom_request_headers (MutableSequence[str]): + Headers that the load balancer adds to proxied requests. See + `Creating custom + headers `__. + custom_response_headers (MutableSequence[str]): + Headers that the load balancer adds to proxied responses. + See `Creating custom + headers `__. + description (str): + An optional description of this resource. + Provide this property when you create the + resource. + + This field is a member of `oneof`_ ``_description``. + edge_security_policy (str): + [Output Only] The resource URL for the edge security policy + associated with this backend service. + + This field is a member of `oneof`_ ``_edge_security_policy``. + enable_c_d_n (bool): + If true, enables Cloud CDN for the backend + service of an external HTTP(S) load balancer. + + This field is a member of `oneof`_ ``_enable_c_d_n``. + failover_policy (google.cloud.compute_v1.types.BackendServiceFailoverPolicy): + Requires at least one backend instance group to be defined + as a backup (failover) backend. For load balancers that have + configurable failover: `Internal TCP/UDP Load + Balancing `__ + and `external TCP/UDP Load + Balancing `__. + + This field is a member of `oneof`_ ``_failover_policy``. + fingerprint (str): + Fingerprint of this resource. A hash of the + contents stored in this object. This field is + used in optimistic locking. This field will be + ignored when inserting a BackendService. An + up-to-date fingerprint must be provided in order + to update the BackendService, otherwise the + request will fail with error 412 + conditionNotMet. To see the latest fingerprint, + make a get() request to retrieve a + BackendService. + + This field is a member of `oneof`_ ``_fingerprint``. + health_checks (MutableSequence[str]): + The list of URLs to the healthChecks, + httpHealthChecks (legacy), or httpsHealthChecks + (legacy) resource for health checking this + backend service. Not all backend services + support legacy health checks. See Load balancer + guide. Currently, at most one health check can + be specified for each backend service. Backend + services with instance group or zonal NEG + backends must have a health check. Backend + services with internet or serverless NEG + backends must not have a health check. + iap (google.cloud.compute_v1.types.BackendServiceIAP): + The configurations for Identity-Aware Proxy + on this resource. Not available for Internal + TCP/UDP Load Balancing and Network Load + Balancing. + + This field is a member of `oneof`_ ``_iap``. + id (int): + [Output Only] The unique identifier for the resource. This + identifier is defined by the server. + + This field is a member of `oneof`_ ``_id``. + kind (str): + [Output Only] Type of resource. Always + compute#backendService for backend services. + + This field is a member of `oneof`_ ``_kind``. + load_balancing_scheme (str): + Specifies the load balancer type. A backend + service created for one type of load balancer + cannot be used with another. For more + information, refer to Choosing a load balancer. + Check the LoadBalancingScheme enum for the list + of possible values. + + This field is a member of `oneof`_ ``_load_balancing_scheme``. + locality_lb_policies (MutableSequence[google.cloud.compute_v1.types.BackendServiceLocalityLoadBalancingPolicyConfig]): + A list of locality load-balancing policies to be used in + order of preference. When you use localityLbPolicies, you + must set at least one value for either the + localityLbPolicies[].policy or the + localityLbPolicies[].customPolicy field. localityLbPolicies + overrides any value set in the localityLbPolicy field. For + an example of how to use this field, see Define a list of + preferred policies. Caution: This field and its children are + intended for use in a service mesh that includes gRPC + clients only. Envoy proxies can't use backend services that + have this configuration. + locality_lb_policy (str): + The load balancing algorithm used within the scope of the + locality. The possible values are: - ROUND_ROBIN: This is a + simple policy in which each healthy backend is selected in + round robin order. This is the default. - LEAST_REQUEST: An + O(1) algorithm which selects two random healthy hosts and + picks the host which has fewer active requests. - RING_HASH: + The ring/modulo hash load balancer implements consistent + hashing to backends. The algorithm has the property that the + addition/removal of a host from a set of N hosts only + affects 1/N of the requests. - RANDOM: The load balancer + selects a random healthy host. - ORIGINAL_DESTINATION: + Backend host is selected based on the client connection + metadata, i.e., connections are opened to the same address + as the destination address of the incoming connection before + the connection was redirected to the load balancer. - + MAGLEV: used as a drop in replacement for the ring hash load + balancer. Maglev is not as stable as ring hash but has + faster table lookup build times and host selection times. + For more information about Maglev, see + https://ai.google/research/pubs/pub44824 This field is + applicable to either: - A regional backend service with the + service_protocol set to HTTP, HTTPS, or HTTP2, and + load_balancing_scheme set to INTERNAL_MANAGED. - A global + backend service with the load_balancing_scheme set to + INTERNAL_SELF_MANAGED. If sessionAffinity is not NONE, and + this field is not set to MAGLEV or RING_HASH, session + affinity settings will not take effect. Only ROUND_ROBIN and + RING_HASH are supported when the backend service is + referenced by a URL map that is bound to target gRPC proxy + that has validateForProxyless field set to true. Check the + LocalityLbPolicy enum for the list of possible values. + + This field is a member of `oneof`_ ``_locality_lb_policy``. + log_config (google.cloud.compute_v1.types.BackendServiceLogConfig): + This field denotes the logging options for + the load balancer traffic served by this backend + service. If logging is enabled, logs will be + exported to Stackdriver. + + This field is a member of `oneof`_ ``_log_config``. + max_stream_duration (google.cloud.compute_v1.types.Duration): + Specifies the default maximum duration (timeout) for streams + to this service. Duration is computed from the beginning of + the stream until the response has been completely processed, + including all retries. A stream that does not complete in + this duration is closed. If not specified, there will be no + timeout limit, i.e. the maximum duration is infinite. This + value can be overridden in the PathMatcher configuration of + the UrlMap that references this backend service. This field + is only allowed when the loadBalancingScheme of the backend + service is INTERNAL_SELF_MANAGED. + + This field is a member of `oneof`_ ``_max_stream_duration``. + metadatas (MutableMapping[str, str]): + Deployment metadata associated with the + resource to be set by a GKE hub controller and + read by the backend RCTH + name (str): + Name of the resource. Provided by the client when the + resource is created. The name must be 1-63 characters long, + and comply with RFC1035. Specifically, the name must be 1-63 + characters long and match the regular expression + ``[a-z]([-a-z0-9]*[a-z0-9])?`` which means the first + character must be a lowercase letter, and all following + characters must be a dash, lowercase letter, or digit, + except the last character, which cannot be a dash. + + This field is a member of `oneof`_ ``_name``. + network (str): + The URL of the network to which this backend + service belongs. This field can only be + specified when the load balancing scheme is set + to INTERNAL. + + This field is a member of `oneof`_ ``_network``. + outlier_detection (google.cloud.compute_v1.types.OutlierDetection): + Settings controlling the ejection of unhealthy backend + endpoints from the load balancing pool of each individual + proxy instance that processes the traffic for the given + backend service. If not set, this feature is considered + disabled. Results of the outlier detection algorithm + (ejection of endpoints from the load balancing pool and + returning them back to the pool) are executed independently + by each proxy instance of the load balancer. In most cases, + more than one proxy instance handles the traffic received by + a backend service. Thus, it is possible that an unhealthy + endpoint is detected and ejected by only some of the + proxies, and while this happens, other proxies may continue + to send requests to the same unhealthy endpoint until they + detect and eject the unhealthy endpoint. Applicable backend + endpoints can be: - VM instances in an Instance Group - + Endpoints in a Zonal NEG (GCE_VM_IP, GCE_VM_IP_PORT) - + Endpoints in a Hybrid Connectivity NEG + (NON_GCP_PRIVATE_IP_PORT) - Serverless NEGs, that resolve to + Cloud Run, App Engine, or Cloud Functions Services - Private + Service Connect NEGs, that resolve to Google-managed + regional API endpoints or managed services published using + Private Service Connect Applicable backend service types can + be: - A global backend service with the loadBalancingScheme + set to INTERNAL_SELF_MANAGED or EXTERNAL_MANAGED. - A + regional backend service with the serviceProtocol set to + HTTP, HTTPS, or HTTP2, and loadBalancingScheme set to + INTERNAL_MANAGED or EXTERNAL_MANAGED. Not supported for + Serverless NEGs. Not supported when the backend service is + referenced by a URL map that is bound to target gRPC proxy + that has validateForProxyless field set to true. + + This field is a member of `oneof`_ ``_outlier_detection``. + port (int): + Deprecated in favor of portName. The TCP port + to connect on the backend. The default value is + 80. For Internal TCP/UDP Load Balancing and + Network Load Balancing, omit port. + + This field is a member of `oneof`_ ``_port``. + port_name (str): + A named port on a backend instance group representing the + port for communication to the backend VMs in that group. The + named port must be `defined on each backend instance + group `__. + This parameter has no meaning if the backends are NEGs. For + Internal TCP/UDP Load Balancing and Network Load Balancing, + omit port_name. + + This field is a member of `oneof`_ ``_port_name``. + protocol (str): + The protocol this BackendService uses to + communicate with backends. Possible values are + HTTP, HTTPS, HTTP2, TCP, SSL, UDP or GRPC. + depending on the chosen load balancer or Traffic + Director configuration. Refer to the + documentation for the load balancers or for + Traffic Director for more information. Must be + set to GRPC when the backend service is + referenced by a URL map that is bound to target + gRPC proxy. Check the Protocol enum for the list + of possible values. + + This field is a member of `oneof`_ ``_protocol``. + region (str): + [Output Only] URL of the region where the regional backend + service resides. This field is not applicable to global + backend services. You must specify this field as part of the + HTTP request URL. It is not settable as a field in the + request body. + + This field is a member of `oneof`_ ``_region``. + security_policy (str): + [Output Only] The resource URL for the security policy + associated with this backend service. + + This field is a member of `oneof`_ ``_security_policy``. + security_settings (google.cloud.compute_v1.types.SecuritySettings): + This field specifies the security settings that apply to + this backend service. This field is applicable to a global + backend service with the load_balancing_scheme set to + INTERNAL_SELF_MANAGED. + + This field is a member of `oneof`_ ``_security_settings``. + self_link (str): + [Output Only] Server-defined URL for the resource. + + This field is a member of `oneof`_ ``_self_link``. + service_bindings (MutableSequence[str]): + URLs of networkservices.ServiceBinding resources. Can only + be set if load balancing scheme is INTERNAL_SELF_MANAGED. If + set, lists of backends and health checks must be both empty. + session_affinity (str): + Type of session affinity to use. The default is NONE. Only + NONE and HEADER_FIELD are supported when the backend service + is referenced by a URL map that is bound to target gRPC + proxy that has validateForProxyless field set to true. For + more details, see: `Session + Affinity `__. + Check the SessionAffinity enum for the list of possible + values. + + This field is a member of `oneof`_ ``_session_affinity``. + subsetting (google.cloud.compute_v1.types.Subsetting): + + This field is a member of `oneof`_ ``_subsetting``. + timeout_sec (int): + The backend service timeout has a different + meaning depending on the type of load balancer. + For more information see, Backend service + settings. The default is 30 seconds. The full + range of timeout values allowed goes from 1 + through 2,147,483,647 seconds. This value can be + overridden in the PathMatcher configuration of + the UrlMap that references this backend service. + Not supported when the backend service is + referenced by a URL map that is bound to target + gRPC proxy that has validateForProxyless field + set to true. Instead, use maxStreamDuration. + + This field is a member of `oneof`_ ``_timeout_sec``. + """ + class CompressionMode(proto.Enum): + r"""Compress text responses using Brotli or gzip compression, + based on the client's Accept-Encoding header. + + Values: + UNDEFINED_COMPRESSION_MODE (0): + A value indicating that the enum field is not + set. + AUTOMATIC (165298699): + Automatically uses the best compression based + on the Accept-Encoding header sent by the + client. + DISABLED (516696700): + Disables compression. Existing compressed + responses cached by Cloud CDN will not be served + to clients. + """ + UNDEFINED_COMPRESSION_MODE = 0 + AUTOMATIC = 165298699 + DISABLED = 516696700 + + class LoadBalancingScheme(proto.Enum): + r"""Specifies the load balancer type. A backend service created + for one type of load balancer cannot be used with another. For + more information, refer to Choosing a load balancer. + + Values: + UNDEFINED_LOAD_BALANCING_SCHEME (0): + A value indicating that the enum field is not + set. + EXTERNAL (35607499): + Signifies that this will be used for external + HTTP(S), SSL Proxy, TCP Proxy, or Network Load + Balancing + EXTERNAL_MANAGED (512006923): + Signifies that this will be used for External + Managed HTTP(S) Load Balancing. + INTERNAL (279295677): + Signifies that this will be used for Internal + TCP/UDP Load Balancing. + INTERNAL_MANAGED (37350397): + Signifies that this will be used for Internal + HTTP(S) Load Balancing. + INTERNAL_SELF_MANAGED (236211150): + Signifies that this will be used by Traffic + Director. + INVALID_LOAD_BALANCING_SCHEME (275352060): + No description available. + """ + UNDEFINED_LOAD_BALANCING_SCHEME = 0 + EXTERNAL = 35607499 + EXTERNAL_MANAGED = 512006923 + INTERNAL = 279295677 + INTERNAL_MANAGED = 37350397 + INTERNAL_SELF_MANAGED = 236211150 + INVALID_LOAD_BALANCING_SCHEME = 275352060 + + class LocalityLbPolicy(proto.Enum): + r"""The load balancing algorithm used within the scope of the locality. + The possible values are: - ROUND_ROBIN: This is a simple policy in + which each healthy backend is selected in round robin order. This is + the default. - LEAST_REQUEST: An O(1) algorithm which selects two + random healthy hosts and picks the host which has fewer active + requests. - RING_HASH: The ring/modulo hash load balancer implements + consistent hashing to backends. The algorithm has the property that + the addition/removal of a host from a set of N hosts only affects + 1/N of the requests. - RANDOM: The load balancer selects a random + healthy host. - ORIGINAL_DESTINATION: Backend host is selected based + on the client connection metadata, i.e., connections are opened to + the same address as the destination address of the incoming + connection before the connection was redirected to the load + balancer. - MAGLEV: used as a drop in replacement for the ring hash + load balancer. Maglev is not as stable as ring hash but has faster + table lookup build times and host selection times. For more + information about Maglev, see + https://ai.google/research/pubs/pub44824 This field is applicable to + either: - A regional backend service with the service_protocol set + to HTTP, HTTPS, or HTTP2, and load_balancing_scheme set to + INTERNAL_MANAGED. - A global backend service with the + load_balancing_scheme set to INTERNAL_SELF_MANAGED. If + sessionAffinity is not NONE, and this field is not set to MAGLEV or + RING_HASH, session affinity settings will not take effect. Only + ROUND_ROBIN and RING_HASH are supported when the backend service is + referenced by a URL map that is bound to target gRPC proxy that has + validateForProxyless field set to true. + + Values: + UNDEFINED_LOCALITY_LB_POLICY (0): + A value indicating that the enum field is not + set. + INVALID_LB_POLICY (323318707): + No description available. + LEAST_REQUEST (46604921): + An O(1) algorithm which selects two random + healthy hosts and picks the host which has fewer + active requests. + MAGLEV (119180266): + This algorithm implements consistent hashing + to backends. Maglev can be used as a drop in + replacement for the ring hash load balancer. + Maglev is not as stable as ring hash but has + faster table lookup build times and host + selection times. For more information about + Maglev, see + https://ai.google/research/pubs/pub44824 + ORIGINAL_DESTINATION (166297216): + Backend host is selected based on the client + connection metadata, i.e., connections are + opened to the same address as the destination + address of the incoming connection before the + connection was redirected to the load balancer. + RANDOM (262527171): + The load balancer selects a random healthy + host. + RING_HASH (432795069): + The ring/modulo hash load balancer implements + consistent hashing to backends. The algorithm + has the property that the addition/removal of a + host from a set of N hosts only affects 1/N of + the requests. + ROUND_ROBIN (153895801): + This is a simple policy in which each healthy + backend is selected in round robin order. This + is the default. + WEIGHTED_MAGLEV (254930962): + Per-instance weighted Load Balancing via health check + reported weights. If set, the Backend Service must configure + a non legacy HTTP-based Health Check, and health check + replies are expected to contain non-standard HTTP response + header field X-Load-Balancing-Endpoint-Weight to specify the + per-instance weights. If set, Load Balancing is weighted + based on the per-instance weights reported in the last + processed health check replies, as long as every instance + either reported a valid weight or had UNAVAILABLE_WEIGHT. + Otherwise, Load Balancing remains equal-weight. This option + is only supported in Network Load Balancing. + """ + UNDEFINED_LOCALITY_LB_POLICY = 0 + INVALID_LB_POLICY = 323318707 + LEAST_REQUEST = 46604921 + MAGLEV = 119180266 + ORIGINAL_DESTINATION = 166297216 + RANDOM = 262527171 + RING_HASH = 432795069 + ROUND_ROBIN = 153895801 + WEIGHTED_MAGLEV = 254930962 + + class Protocol(proto.Enum): + r"""The protocol this BackendService uses to communicate with + backends. Possible values are HTTP, HTTPS, HTTP2, TCP, SSL, UDP + or GRPC. depending on the chosen load balancer or Traffic + Director configuration. Refer to the documentation for the load + balancers or for Traffic Director for more information. Must be + set to GRPC when the backend service is referenced by a URL map + that is bound to target gRPC proxy. + + Values: + UNDEFINED_PROTOCOL (0): + A value indicating that the enum field is not + set. + GRPC (2196510): + gRPC (available for Traffic Director). + HTTP (2228360): + No description available. + HTTP2 (69079210): + HTTP/2 with SSL. + HTTPS (69079243): + No description available. + SSL (82412): + TCP proxying with SSL. + TCP (82881): + TCP proxying or TCP pass-through. + UDP (83873): + UDP. + UNSPECIFIED (526786327): + If a Backend Service has UNSPECIFIED as its + protocol, it can be used with any L3/L4 + Forwarding Rules. + """ + UNDEFINED_PROTOCOL = 0 + GRPC = 2196510 + HTTP = 2228360 + HTTP2 = 69079210 + HTTPS = 69079243 + SSL = 82412 + TCP = 82881 + UDP = 83873 + UNSPECIFIED = 526786327 + + class SessionAffinity(proto.Enum): + r"""Type of session affinity to use. The default is NONE. Only NONE and + HEADER_FIELD are supported when the backend service is referenced by + a URL map that is bound to target gRPC proxy that has + validateForProxyless field set to true. For more details, see: + `Session + Affinity `__. + + Values: + UNDEFINED_SESSION_AFFINITY (0): + A value indicating that the enum field is not + set. + CLIENT_IP (345665051): + 2-tuple hash on packet's source and + destination IP addresses. Connections from the + same source IP address to the same destination + IP address will be served by the same backend VM + while that VM remains healthy. + CLIENT_IP_NO_DESTINATION (106122516): + 1-tuple hash only on packet's source IP + address. Connections from the same source IP + address will be served by the same backend VM + while that VM remains healthy. This option can + only be used for Internal TCP/UDP Load + Balancing. + CLIENT_IP_PORT_PROTO (221722926): + 5-tuple hash on packet's source and + destination IP addresses, IP protocol, and + source and destination ports. Connections for + the same IP protocol from the same source IP + address and port to the same destination IP + address and port will be served by the same + backend VM while that VM remains healthy. This + option cannot be used for HTTP(S) load + balancing. + CLIENT_IP_PROTO (25322148): + 3-tuple hash on packet's source and + destination IP addresses, and IP protocol. + Connections for the same IP protocol from the + same source IP address to the same destination + IP address will be served by the same backend VM + while that VM remains healthy. This option + cannot be used for HTTP(S) load balancing. + GENERATED_COOKIE (370321204): + Hash based on a cookie generated by the L7 + loadbalancer. Only valid for HTTP(S) load + balancing. + HEADER_FIELD (200737960): + The hash is based on a user specified header + field. + HTTP_COOKIE (494981627): + The hash is based on a user provided cookie. + NONE (2402104): + No session affinity. Connections from the + same client IP may go to any instance in the + pool. + """ + UNDEFINED_SESSION_AFFINITY = 0 + CLIENT_IP = 345665051 + CLIENT_IP_NO_DESTINATION = 106122516 + CLIENT_IP_PORT_PROTO = 221722926 + CLIENT_IP_PROTO = 25322148 + GENERATED_COOKIE = 370321204 + HEADER_FIELD = 200737960 + HTTP_COOKIE = 494981627 + NONE = 2402104 + + affinity_cookie_ttl_sec: int = proto.Field( + proto.INT32, + number=369996954, + optional=True, + ) + backends: MutableSequence['Backend'] = proto.RepeatedField( + proto.MESSAGE, + number=510839903, + message='Backend', + ) + cdn_policy: 'BackendServiceCdnPolicy' = proto.Field( + proto.MESSAGE, + number=213976452, + optional=True, + message='BackendServiceCdnPolicy', + ) + circuit_breakers: 'CircuitBreakers' = proto.Field( + proto.MESSAGE, + number=421340061, + optional=True, + message='CircuitBreakers', + ) + compression_mode: str = proto.Field( + proto.STRING, + number=95520988, + optional=True, + ) + connection_draining: 'ConnectionDraining' = proto.Field( + proto.MESSAGE, + number=461096747, + optional=True, + message='ConnectionDraining', + ) + connection_tracking_policy: 'BackendServiceConnectionTrackingPolicy' = proto.Field( + proto.MESSAGE, + number=143994969, + optional=True, + message='BackendServiceConnectionTrackingPolicy', + ) + consistent_hash: 'ConsistentHashLoadBalancerSettings' = proto.Field( + proto.MESSAGE, + number=905883, + optional=True, + message='ConsistentHashLoadBalancerSettings', + ) + creation_timestamp: str = proto.Field( + proto.STRING, + number=30525366, + optional=True, + ) + custom_request_headers: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=27977992, + ) + custom_response_headers: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=387539094, + ) + description: str = proto.Field( + proto.STRING, + number=422937596, + optional=True, + ) + edge_security_policy: str = proto.Field( + proto.STRING, + number=41036943, + optional=True, + ) + enable_c_d_n: bool = proto.Field( + proto.BOOL, + number=250733499, + optional=True, + ) + failover_policy: 'BackendServiceFailoverPolicy' = proto.Field( + proto.MESSAGE, + number=105658655, + optional=True, + message='BackendServiceFailoverPolicy', + ) + fingerprint: str = proto.Field( + proto.STRING, + number=234678500, + optional=True, + ) + health_checks: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=448370606, + ) + iap: 'BackendServiceIAP' = proto.Field( + proto.MESSAGE, + number=104024, + optional=True, + message='BackendServiceIAP', + ) + id: int = proto.Field( + proto.UINT64, + number=3355, + optional=True, + ) + kind: str = proto.Field( + proto.STRING, + number=3292052, + optional=True, + ) + load_balancing_scheme: str = proto.Field( + proto.STRING, + number=363890244, + optional=True, + ) + locality_lb_policies: MutableSequence['BackendServiceLocalityLoadBalancingPolicyConfig'] = proto.RepeatedField( + proto.MESSAGE, + number=140982557, + message='BackendServiceLocalityLoadBalancingPolicyConfig', + ) + locality_lb_policy: str = proto.Field( + proto.STRING, + number=131431487, + optional=True, + ) + log_config: 'BackendServiceLogConfig' = proto.Field( + proto.MESSAGE, + number=351299741, + optional=True, + message='BackendServiceLogConfig', + ) + max_stream_duration: 'Duration' = proto.Field( + proto.MESSAGE, + number=61428376, + optional=True, + message='Duration', + ) + metadatas: MutableMapping[str, str] = proto.MapField( + proto.STRING, + proto.STRING, + number=8514340, + ) + name: str = proto.Field( + proto.STRING, + number=3373707, + optional=True, + ) + network: str = proto.Field( + proto.STRING, + number=232872494, + optional=True, + ) + outlier_detection: 'OutlierDetection' = proto.Field( + proto.MESSAGE, + number=354625086, + optional=True, + message='OutlierDetection', + ) + port: int = proto.Field( + proto.INT32, + number=3446913, + optional=True, + ) + port_name: str = proto.Field( + proto.STRING, + number=41534345, + optional=True, + ) + protocol: str = proto.Field( + proto.STRING, + number=84577944, + optional=True, + ) + region: str = proto.Field( + proto.STRING, + number=138946292, + optional=True, + ) + security_policy: str = proto.Field( + proto.STRING, + number=171082513, + optional=True, + ) + security_settings: 'SecuritySettings' = proto.Field( + proto.MESSAGE, + number=478649922, + optional=True, + message='SecuritySettings', + ) + self_link: str = proto.Field( + proto.STRING, + number=456214797, + optional=True, + ) + service_bindings: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=133581016, + ) + session_affinity: str = proto.Field( + proto.STRING, + number=463888561, + optional=True, + ) + subsetting: 'Subsetting' = proto.Field( + proto.MESSAGE, + number=450283536, + optional=True, + message='Subsetting', + ) + timeout_sec: int = proto.Field( + proto.INT32, + number=79994995, + optional=True, + ) + + +class BackendServiceAggregatedList(proto.Message): + r"""Contains a list of BackendServicesScopedList. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + id (str): + [Output Only] Unique identifier for the resource; defined by + the server. + + This field is a member of `oneof`_ ``_id``. + items (MutableMapping[str, google.cloud.compute_v1.types.BackendServicesScopedList]): + A list of BackendServicesScopedList + resources. + kind (str): + Type of resource. + + This field is a member of `oneof`_ ``_kind``. + next_page_token (str): + [Output Only] This token allows you to get the next page of + results for list requests. If the number of results is + larger than maxResults, use the nextPageToken as a value for + the query parameter pageToken in the next list request. + Subsequent list requests will have their own nextPageToken + to continue paging through the results. + + This field is a member of `oneof`_ ``_next_page_token``. + self_link (str): + [Output Only] Server-defined URL for this resource. + + This field is a member of `oneof`_ ``_self_link``. + unreachables (MutableSequence[str]): + [Output Only] Unreachable resources. + warning (google.cloud.compute_v1.types.Warning): + [Output Only] Informational warning message. + + This field is a member of `oneof`_ ``_warning``. + """ + + @property + def raw_page(self): + return self + + id: str = proto.Field( + proto.STRING, + number=3355, + optional=True, + ) + items: MutableMapping[str, 'BackendServicesScopedList'] = proto.MapField( + proto.STRING, + proto.MESSAGE, + number=100526016, + message='BackendServicesScopedList', + ) + kind: str = proto.Field( + proto.STRING, + number=3292052, + optional=True, + ) + next_page_token: str = proto.Field( + proto.STRING, + number=79797525, + optional=True, + ) + self_link: str = proto.Field( + proto.STRING, + number=456214797, + optional=True, + ) + unreachables: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=243372063, + ) + warning: 'Warning' = proto.Field( + proto.MESSAGE, + number=50704284, + optional=True, + message='Warning', + ) + + +class BackendServiceCdnPolicy(proto.Message): + r"""Message containing Cloud CDN configuration for a backend + service. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + bypass_cache_on_request_headers (MutableSequence[google.cloud.compute_v1.types.BackendServiceCdnPolicyBypassCacheOnRequestHeader]): + Bypass the cache when the specified request + headers are matched - e.g. Pragma or + Authorization headers. Up to 5 headers can be + specified. The cache is bypassed for all + cdnPolicy.cacheMode settings. + cache_key_policy (google.cloud.compute_v1.types.CacheKeyPolicy): + The CacheKeyPolicy for this CdnPolicy. + + This field is a member of `oneof`_ ``_cache_key_policy``. + cache_mode (str): + Specifies the cache setting for all responses from this + backend. The possible values are: USE_ORIGIN_HEADERS + Requires the origin to set valid caching headers to cache + content. Responses without these headers will not be cached + at Google's edge, and will require a full trip to the origin + on every request, potentially impacting performance and + increasing load on the origin server. FORCE_CACHE_ALL Cache + all content, ignoring any "private", "no-store" or + "no-cache" directives in Cache-Control response headers. + Warning: this may result in Cloud CDN caching private, + per-user (user identifiable) content. CACHE_ALL_STATIC + Automatically cache static content, including common image + formats, media (video and audio), and web assets (JavaScript + and CSS). Requests and responses that are marked as + uncacheable, as well as dynamic content (including HTML), + will not be cached. Check the CacheMode enum for the list of + possible values. + + This field is a member of `oneof`_ ``_cache_mode``. + client_ttl (int): + Specifies a separate client (e.g. browser client) maximum + TTL. This is used to clamp the max-age (or Expires) value + sent to the client. With FORCE_CACHE_ALL, the lesser of + client_ttl and default_ttl is used for the response max-age + directive, along with a "public" directive. For cacheable + content in CACHE_ALL_STATIC mode, client_ttl clamps the + max-age from the origin (if specified), or else sets the + response max-age directive to the lesser of the client_ttl + and default_ttl, and also ensures a "public" cache-control + directive is present. If a client TTL is not specified, a + default value (1 hour) will be used. The maximum allowed + value is 31,622,400s (1 year). + + This field is a member of `oneof`_ ``_client_ttl``. + default_ttl (int): + Specifies the default TTL for cached content served by this + origin for responses that do not have an existing valid TTL + (max-age or s-max-age). Setting a TTL of "0" means "always + revalidate". The value of defaultTTL cannot be set to a + value greater than that of maxTTL, but can be equal. When + the cacheMode is set to FORCE_CACHE_ALL, the defaultTTL will + overwrite the TTL set in all responses. The maximum allowed + value is 31,622,400s (1 year), noting that infrequently + accessed objects may be evicted from the cache before the + defined TTL. + + This field is a member of `oneof`_ ``_default_ttl``. + max_ttl (int): + Specifies the maximum allowed TTL for cached + content served by this origin. Cache directives + that attempt to set a max-age or s-maxage higher + than this, or an Expires header more than maxTTL + seconds in the future will be capped at the + value of maxTTL, as if it were the value of an + s-maxage Cache-Control directive. Headers sent + to the client will not be modified. Setting a + TTL of "0" means "always revalidate". The + maximum allowed value is 31,622,400s (1 year), + noting that infrequently accessed objects may be + evicted from the cache before the defined TTL. + + This field is a member of `oneof`_ ``_max_ttl``. + negative_caching (bool): + Negative caching allows per-status code TTLs to be set, in + order to apply fine-grained caching for common errors or + redirects. This can reduce the load on your origin and + improve end-user experience by reducing response latency. + When the cache mode is set to CACHE_ALL_STATIC or + USE_ORIGIN_HEADERS, negative caching applies to responses + with the specified response code that lack any + Cache-Control, Expires, or Pragma: no-cache directives. When + the cache mode is set to FORCE_CACHE_ALL, negative caching + applies to all responses with the specified response code, + and override any caching headers. By default, Cloud CDN will + apply the following default TTLs to these status codes: HTTP + 300 (Multiple Choice), 301, 308 (Permanent Redirects): 10m + HTTP 404 (Not Found), 410 (Gone), 451 (Unavailable For Legal + Reasons): 120s HTTP 405 (Method Not Found), 421 (Misdirected + Request), 501 (Not Implemented): 60s. These defaults can be + overridden in negative_caching_policy. + + This field is a member of `oneof`_ ``_negative_caching``. + negative_caching_policy (MutableSequence[google.cloud.compute_v1.types.BackendServiceCdnPolicyNegativeCachingPolicy]): + Sets a cache TTL for the specified HTTP status code. + negative_caching must be enabled to configure + negative_caching_policy. Omitting the policy and leaving + negative_caching enabled will use Cloud CDN's default cache + TTLs. Note that when specifying an explicit + negative_caching_policy, you should take care to specify a + cache TTL for all response codes that you wish to cache. + Cloud CDN will not apply any default negative caching when a + policy exists. + request_coalescing (bool): + If true then Cloud CDN will combine multiple + concurrent cache fill requests into a small + number of requests to the origin. + + This field is a member of `oneof`_ ``_request_coalescing``. + serve_while_stale (int): + Serve existing content from the cache (if + available) when revalidating content with the + origin, or when an error is encountered when + refreshing the cache. This setting defines the + default "max-stale" duration for any cached + responses that do not specify a max-stale + directive. Stale responses that exceed the TTL + configured here will not be served. The default + limit (max-stale) is 86400s (1 day), which will + allow stale content to be served up to this + limit beyond the max-age (or s-max-age) of a + cached response. The maximum allowed value is + 604800 (1 week). Set this to zero (0) to disable + serve-while-stale. + + This field is a member of `oneof`_ ``_serve_while_stale``. + signed_url_cache_max_age_sec (int): + Maximum number of seconds the response to a signed URL + request will be considered fresh. After this time period, + the response will be revalidated before being served. + Defaults to 1hr (3600s). When serving responses to signed + URL requests, Cloud CDN will internally behave as though all + responses from this backend had a "Cache-Control: public, + max-age=[TTL]" header, regardless of any existing + Cache-Control header. The actual headers served in responses + will not be altered. + + This field is a member of `oneof`_ ``_signed_url_cache_max_age_sec``. + signed_url_key_names (MutableSequence[str]): + [Output Only] Names of the keys for signing request URLs. + """ + class CacheMode(proto.Enum): + r"""Specifies the cache setting for all responses from this backend. The + possible values are: USE_ORIGIN_HEADERS Requires the origin to set + valid caching headers to cache content. Responses without these + headers will not be cached at Google's edge, and will require a full + trip to the origin on every request, potentially impacting + performance and increasing load on the origin server. + FORCE_CACHE_ALL Cache all content, ignoring any "private", + "no-store" or "no-cache" directives in Cache-Control response + headers. Warning: this may result in Cloud CDN caching private, + per-user (user identifiable) content. CACHE_ALL_STATIC Automatically + cache static content, including common image formats, media (video + and audio), and web assets (JavaScript and CSS). Requests and + responses that are marked as uncacheable, as well as dynamic content + (including HTML), will not be cached. + + Values: + UNDEFINED_CACHE_MODE (0): + A value indicating that the enum field is not + set. + CACHE_ALL_STATIC (355027945): + Automatically cache static content, including + common image formats, media (video and audio), + and web assets (JavaScript and CSS). Requests + and responses that are marked as uncacheable, as + well as dynamic content (including HTML), will + not be cached. + FORCE_CACHE_ALL (486026928): + Cache all content, ignoring any "private", + "no-store" or "no-cache" directives in + Cache-Control response headers. Warning: this + may result in Cloud CDN caching private, + per-user (user identifiable) content. + INVALID_CACHE_MODE (381295560): + No description available. + USE_ORIGIN_HEADERS (55380261): + Requires the origin to set valid caching + headers to cache content. Responses without + these headers will not be cached at Google's + edge, and will require a full trip to the origin + on every request, potentially impacting + performance and increasing load on the origin + server. + """ + UNDEFINED_CACHE_MODE = 0 + CACHE_ALL_STATIC = 355027945 + FORCE_CACHE_ALL = 486026928 + INVALID_CACHE_MODE = 381295560 + USE_ORIGIN_HEADERS = 55380261 + + bypass_cache_on_request_headers: MutableSequence['BackendServiceCdnPolicyBypassCacheOnRequestHeader'] = proto.RepeatedField( + proto.MESSAGE, + number=486203082, + message='BackendServiceCdnPolicyBypassCacheOnRequestHeader', + ) + cache_key_policy: 'CacheKeyPolicy' = proto.Field( + proto.MESSAGE, + number=159263727, + optional=True, + message='CacheKeyPolicy', + ) + cache_mode: str = proto.Field( + proto.STRING, + number=28877888, + optional=True, + ) + client_ttl: int = proto.Field( + proto.INT32, + number=29034360, + optional=True, + ) + default_ttl: int = proto.Field( + proto.INT32, + number=100253422, + optional=True, + ) + max_ttl: int = proto.Field( + proto.INT32, + number=307578001, + optional=True, + ) + negative_caching: bool = proto.Field( + proto.BOOL, + number=336110005, + optional=True, + ) + negative_caching_policy: MutableSequence['BackendServiceCdnPolicyNegativeCachingPolicy'] = proto.RepeatedField( + proto.MESSAGE, + number=155359996, + message='BackendServiceCdnPolicyNegativeCachingPolicy', + ) + request_coalescing: bool = proto.Field( + proto.BOOL, + number=532808276, + optional=True, + ) + serve_while_stale: int = proto.Field( + proto.INT32, + number=236682203, + optional=True, + ) + signed_url_cache_max_age_sec: int = proto.Field( + proto.INT64, + number=269374534, + optional=True, + ) + signed_url_key_names: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=371848885, + ) + + +class BackendServiceCdnPolicyBypassCacheOnRequestHeader(proto.Message): + r"""Bypass the cache when the specified request headers are present, + e.g. Pragma or Authorization headers. Values are case insensitive. + The presence of such a header overrides the cache_mode setting. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + header_name (str): + The header field name to match on when + bypassing cache. Values are case-insensitive. + + This field is a member of `oneof`_ ``_header_name``. + """ + + header_name: str = proto.Field( + proto.STRING, + number=110223613, + optional=True, + ) + + +class BackendServiceCdnPolicyNegativeCachingPolicy(proto.Message): + r"""Specify CDN TTLs for response error codes. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + code (int): + The HTTP status code to define a TTL against. + Only HTTP status codes 300, 301, 302, 307, 308, + 404, 405, 410, 421, 451 and 501 are can be + specified as values, and you cannot specify a + status code more than once. + + This field is a member of `oneof`_ ``_code``. + ttl (int): + The TTL (in seconds) for which to cache + responses with the corresponding status code. + The maximum allowed value is 1800s (30 minutes), + noting that infrequently accessed objects may be + evicted from the cache before the defined TTL. + + This field is a member of `oneof`_ ``_ttl``. + """ + + code: int = proto.Field( + proto.INT32, + number=3059181, + optional=True, + ) + ttl: int = proto.Field( + proto.INT32, + number=115180, + optional=True, + ) + + +class BackendServiceConnectionTrackingPolicy(proto.Message): + r"""Connection Tracking configuration for this BackendService. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + connection_persistence_on_unhealthy_backends (str): + Specifies connection persistence when backends are + unhealthy. The default value is DEFAULT_FOR_PROTOCOL. If set + to DEFAULT_FOR_PROTOCOL, the existing connections persist on + unhealthy backends only for connection-oriented protocols + (TCP and SCTP) and only if the Tracking Mode is + PER_CONNECTION (default tracking mode) or the Session + Affinity is configured for 5-tuple. They do not persist for + UDP. If set to NEVER_PERSIST, after a backend becomes + unhealthy, the existing connections on the unhealthy backend + are never persisted on the unhealthy backend. They are + always diverted to newly selected healthy backends (unless + all backends are unhealthy). If set to ALWAYS_PERSIST, + existing connections always persist on unhealthy backends + regardless of protocol and session affinity. It is generally + not recommended to use this mode overriding the default. For + more details, see `Connection Persistence for Network Load + Balancing `__ + and `Connection Persistence for Internal TCP/UDP Load + Balancing `__. + Check the ConnectionPersistenceOnUnhealthyBackends enum for + the list of possible values. + + This field is a member of `oneof`_ ``_connection_persistence_on_unhealthy_backends``. + enable_strong_affinity (bool): + Enable Strong Session Affinity for Network + Load Balancing. This option is not available + publicly. + + This field is a member of `oneof`_ ``_enable_strong_affinity``. + idle_timeout_sec (int): + Specifies how long to keep a Connection Tracking entry while + there is no matching traffic (in seconds). For Internal + TCP/UDP Load Balancing: - The minimum (default) is 10 + minutes and the maximum is 16 hours. - It can be set only if + Connection Tracking is less than 5-tuple (i.e. Session + Affinity is CLIENT_IP_NO_DESTINATION, CLIENT_IP or + CLIENT_IP_PROTO, and Tracking Mode is PER_SESSION). For + Network Load Balancer the default is 60 seconds. This option + is not available publicly. + + This field is a member of `oneof`_ ``_idle_timeout_sec``. + tracking_mode (str): + Specifies the key used for connection tracking. There are + two options: - PER_CONNECTION: This is the default mode. The + Connection Tracking is performed as per the Connection Key + (default Hash Method) for the specific protocol. - + PER_SESSION: The Connection Tracking is performed as per the + configured Session Affinity. It matches the configured + Session Affinity. For more details, see `Tracking Mode for + Network Load + Balancing `__ + and `Tracking Mode for Internal TCP/UDP Load + Balancing `__. + Check the TrackingMode enum for the list of possible values. + + This field is a member of `oneof`_ ``_tracking_mode``. + """ + class ConnectionPersistenceOnUnhealthyBackends(proto.Enum): + r"""Specifies connection persistence when backends are unhealthy. The + default value is DEFAULT_FOR_PROTOCOL. If set to + DEFAULT_FOR_PROTOCOL, the existing connections persist on unhealthy + backends only for connection-oriented protocols (TCP and SCTP) and + only if the Tracking Mode is PER_CONNECTION (default tracking mode) + or the Session Affinity is configured for 5-tuple. They do not + persist for UDP. If set to NEVER_PERSIST, after a backend becomes + unhealthy, the existing connections on the unhealthy backend are + never persisted on the unhealthy backend. They are always diverted + to newly selected healthy backends (unless all backends are + unhealthy). If set to ALWAYS_PERSIST, existing connections always + persist on unhealthy backends regardless of protocol and session + affinity. It is generally not recommended to use this mode + overriding the default. For more details, see `Connection + Persistence for Network Load + Balancing `__ + and `Connection Persistence for Internal TCP/UDP Load + Balancing `__. + + Values: + UNDEFINED_CONNECTION_PERSISTENCE_ON_UNHEALTHY_BACKENDS (0): + A value indicating that the enum field is not + set. + ALWAYS_PERSIST (38400900): + No description available. + DEFAULT_FOR_PROTOCOL (145265356): + No description available. + NEVER_PERSIST (138646241): + No description available. + """ + UNDEFINED_CONNECTION_PERSISTENCE_ON_UNHEALTHY_BACKENDS = 0 + ALWAYS_PERSIST = 38400900 + DEFAULT_FOR_PROTOCOL = 145265356 + NEVER_PERSIST = 138646241 + + class TrackingMode(proto.Enum): + r"""Specifies the key used for connection tracking. There are two + options: - PER_CONNECTION: This is the default mode. The Connection + Tracking is performed as per the Connection Key (default Hash + Method) for the specific protocol. - PER_SESSION: The Connection + Tracking is performed as per the configured Session Affinity. It + matches the configured Session Affinity. For more details, see + `Tracking Mode for Network Load + Balancing `__ + and `Tracking Mode for Internal TCP/UDP Load + Balancing `__. + + Values: + UNDEFINED_TRACKING_MODE (0): + A value indicating that the enum field is not + set. + INVALID_TRACKING_MODE (49234371): + No description available. + PER_CONNECTION (85162848): + No description available. + PER_SESSION (182099252): + No description available. + """ + UNDEFINED_TRACKING_MODE = 0 + INVALID_TRACKING_MODE = 49234371 + PER_CONNECTION = 85162848 + PER_SESSION = 182099252 + + connection_persistence_on_unhealthy_backends: str = proto.Field( + proto.STRING, + number=152439033, + optional=True, + ) + enable_strong_affinity: bool = proto.Field( + proto.BOOL, + number=24539924, + optional=True, + ) + idle_timeout_sec: int = proto.Field( + proto.INT32, + number=24977544, + optional=True, + ) + tracking_mode: str = proto.Field( + proto.STRING, + number=127757867, + optional=True, + ) + + +class BackendServiceFailoverPolicy(proto.Message): + r"""For load balancers that have configurable failover: `Internal + TCP/UDP Load + Balancing `__ + and `external TCP/UDP Load + Balancing `__. + On failover or failback, this field indicates whether connection + draining will be honored. Google Cloud has a fixed connection + draining timeout of 10 minutes. A setting of true terminates + existing TCP connections to the active pool during failover and + failback, immediately draining traffic. A setting of false allows + existing TCP connections to persist, even on VMs no longer in the + active pool, for up to the duration of the connection draining + timeout (10 minutes). + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + disable_connection_drain_on_failover (bool): + This can be set to true only if the protocol + is TCP. The default is false. + + This field is a member of `oneof`_ ``_disable_connection_drain_on_failover``. + drop_traffic_if_unhealthy (bool): + If set to true, connections to the load balancer are dropped + when all primary and all backup backend VMs are unhealthy.If + set to false, connections are distributed among all primary + VMs when all primary and all backup backend VMs are + unhealthy. For load balancers that have configurable + failover: `Internal TCP/UDP Load + Balancing `__ + and `external TCP/UDP Load + Balancing `__. + The default is false. + + This field is a member of `oneof`_ ``_drop_traffic_if_unhealthy``. + failover_ratio (float): + The value of the field must be in the range [0, 1]. If the + value is 0, the load balancer performs a failover when the + number of healthy primary VMs equals zero. For all other + values, the load balancer performs a failover when the total + number of healthy primary VMs is less than this ratio. For + load balancers that have configurable failover: `Internal + TCP/UDP Load + Balancing `__ + and `external TCP/UDP Load + Balancing `__. + + This field is a member of `oneof`_ ``_failover_ratio``. + """ + + disable_connection_drain_on_failover: bool = proto.Field( + proto.BOOL, + number=182150753, + optional=True, + ) + drop_traffic_if_unhealthy: bool = proto.Field( + proto.BOOL, + number=112289428, + optional=True, + ) + failover_ratio: float = proto.Field( + proto.FLOAT, + number=212667006, + optional=True, + ) + + +class BackendServiceGroupHealth(proto.Message): + r""" + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + annotations (MutableMapping[str, str]): + Metadata defined as annotations on the + network endpoint group. + health_status (MutableSequence[google.cloud.compute_v1.types.HealthStatus]): + Health state of the backend instances or + endpoints in requested instance or network + endpoint group, determined based on configured + health checks. + kind (str): + [Output Only] Type of resource. Always + compute#backendServiceGroupHealth for the health of backend + services. + + This field is a member of `oneof`_ ``_kind``. + """ + + annotations: MutableMapping[str, str] = proto.MapField( + proto.STRING, + proto.STRING, + number=112032548, + ) + health_status: MutableSequence['HealthStatus'] = proto.RepeatedField( + proto.MESSAGE, + number=380545845, + message='HealthStatus', + ) + kind: str = proto.Field( + proto.STRING, + number=3292052, + optional=True, + ) + + +class BackendServiceIAP(proto.Message): + r"""Identity-Aware Proxy + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + enabled (bool): + Whether the serving infrastructure will + authenticate and authorize all incoming + requests. If true, the oauth2ClientId and + oauth2ClientSecret fields must be non-empty. + + This field is a member of `oneof`_ ``_enabled``. + oauth2_client_id (str): + OAuth2 client ID to use for the + authentication flow. + + This field is a member of `oneof`_ ``_oauth2_client_id``. + oauth2_client_secret (str): + OAuth2 client secret to use for the + authentication flow. For security reasons, this + value cannot be retrieved via the API. Instead, + the SHA-256 hash of the value is returned in the + oauth2ClientSecretSha256 field. @InputOnly + + This field is a member of `oneof`_ ``_oauth2_client_secret``. + oauth2_client_secret_sha256 (str): + [Output Only] SHA256 hash value for the field + oauth2_client_secret above. + + This field is a member of `oneof`_ ``_oauth2_client_secret_sha256``. + """ + + enabled: bool = proto.Field( + proto.BOOL, + number=1018689, + optional=True, + ) + oauth2_client_id: str = proto.Field( + proto.STRING, + number=314017611, + optional=True, + ) + oauth2_client_secret: str = proto.Field( + proto.STRING, + number=50999520, + optional=True, + ) + oauth2_client_secret_sha256: str = proto.Field( + proto.STRING, + number=112903782, + optional=True, + ) + + +class BackendServiceList(proto.Message): + r"""Contains a list of BackendService resources. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + id (str): + [Output Only] Unique identifier for the resource; defined by + the server. + + This field is a member of `oneof`_ ``_id``. + items (MutableSequence[google.cloud.compute_v1.types.BackendService]): + A list of BackendService resources. + kind (str): + [Output Only] Type of resource. Always + compute#backendServiceList for lists of backend services. + + This field is a member of `oneof`_ ``_kind``. + next_page_token (str): + [Output Only] This token allows you to get the next page of + results for list requests. If the number of results is + larger than maxResults, use the nextPageToken as a value for + the query parameter pageToken in the next list request. + Subsequent list requests will have their own nextPageToken + to continue paging through the results. + + This field is a member of `oneof`_ ``_next_page_token``. + self_link (str): + [Output Only] Server-defined URL for this resource. + + This field is a member of `oneof`_ ``_self_link``. + warning (google.cloud.compute_v1.types.Warning): + [Output Only] Informational warning message. + + This field is a member of `oneof`_ ``_warning``. + """ + + @property + def raw_page(self): + return self + + id: str = proto.Field( + proto.STRING, + number=3355, + optional=True, + ) + items: MutableSequence['BackendService'] = proto.RepeatedField( + proto.MESSAGE, + number=100526016, + message='BackendService', + ) + kind: str = proto.Field( + proto.STRING, + number=3292052, + optional=True, + ) + next_page_token: str = proto.Field( + proto.STRING, + number=79797525, + optional=True, + ) + self_link: str = proto.Field( + proto.STRING, + number=456214797, + optional=True, + ) + warning: 'Warning' = proto.Field( + proto.MESSAGE, + number=50704284, + optional=True, + message='Warning', + ) + + +class BackendServiceLocalityLoadBalancingPolicyConfig(proto.Message): + r"""Container for either a built-in LB policy supported by gRPC + or Envoy or a custom one implemented by the end user. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + custom_policy (google.cloud.compute_v1.types.BackendServiceLocalityLoadBalancingPolicyConfigCustomPolicy): + + This field is a member of `oneof`_ ``_custom_policy``. + policy (google.cloud.compute_v1.types.BackendServiceLocalityLoadBalancingPolicyConfigPolicy): + + This field is a member of `oneof`_ ``_policy``. + """ + + custom_policy: 'BackendServiceLocalityLoadBalancingPolicyConfigCustomPolicy' = proto.Field( + proto.MESSAGE, + number=4818368, + optional=True, + message='BackendServiceLocalityLoadBalancingPolicyConfigCustomPolicy', + ) + policy: 'BackendServiceLocalityLoadBalancingPolicyConfigPolicy' = proto.Field( + proto.MESSAGE, + number=91071794, + optional=True, + message='BackendServiceLocalityLoadBalancingPolicyConfigPolicy', + ) + + +class BackendServiceLocalityLoadBalancingPolicyConfigCustomPolicy(proto.Message): + r"""The configuration for a custom policy implemented by the user + and deployed with the client. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + data (str): + An optional, arbitrary JSON object with + configuration data, understood by a locally + installed custom policy implementation. + + This field is a member of `oneof`_ ``_data``. + name (str): + Identifies the custom policy. The value + should match the name of a custom implementation + registered on the gRPC clients. It should follow + protocol buffer message naming conventions and + include the full path (for example, + myorg.CustomLbPolicy). The maximum length is 256 + characters. Do not specify the same custom + policy more than once for a backend. If you do, + the configuration is rejected. For an example of + how to use this field, see Use a custom policy. + + This field is a member of `oneof`_ ``_name``. + """ + + data: str = proto.Field( + proto.STRING, + number=3076010, + optional=True, + ) + name: str = proto.Field( + proto.STRING, + number=3373707, + optional=True, + ) + + +class BackendServiceLocalityLoadBalancingPolicyConfigPolicy(proto.Message): + r"""The configuration for a built-in load balancing policy. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + name (str): + The name of a locality load-balancing policy. Valid values + include ROUND_ROBIN and, for Java clients, LEAST_REQUEST. + For information about these values, see the description of + localityLbPolicy. Do not specify the same policy more than + once for a backend. If you do, the configuration is + rejected. Check the Name enum for the list of possible + values. + + This field is a member of `oneof`_ ``_name``. + """ + class Name(proto.Enum): + r"""The name of a locality load-balancing policy. Valid values include + ROUND_ROBIN and, for Java clients, LEAST_REQUEST. For information + about these values, see the description of localityLbPolicy. Do not + specify the same policy more than once for a backend. If you do, the + configuration is rejected. + + Values: + UNDEFINED_NAME (0): + A value indicating that the enum field is not + set. + INVALID_LB_POLICY (323318707): + No description available. + LEAST_REQUEST (46604921): + An O(1) algorithm which selects two random + healthy hosts and picks the host which has fewer + active requests. + MAGLEV (119180266): + This algorithm implements consistent hashing + to backends. Maglev can be used as a drop in + replacement for the ring hash load balancer. + Maglev is not as stable as ring hash but has + faster table lookup build times and host + selection times. For more information about + Maglev, see + https://ai.google/research/pubs/pub44824 + ORIGINAL_DESTINATION (166297216): + Backend host is selected based on the client + connection metadata, i.e., connections are + opened to the same address as the destination + address of the incoming connection before the + connection was redirected to the load balancer. + RANDOM (262527171): + The load balancer selects a random healthy + host. + RING_HASH (432795069): + The ring/modulo hash load balancer implements + consistent hashing to backends. The algorithm + has the property that the addition/removal of a + host from a set of N hosts only affects 1/N of + the requests. + ROUND_ROBIN (153895801): + This is a simple policy in which each healthy + backend is selected in round robin order. This + is the default. + WEIGHTED_MAGLEV (254930962): + Per-instance weighted Load Balancing via health check + reported weights. If set, the Backend Service must configure + a non legacy HTTP-based Health Check, and health check + replies are expected to contain non-standard HTTP response + header field X-Load-Balancing-Endpoint-Weight to specify the + per-instance weights. If set, Load Balancing is weighted + based on the per-instance weights reported in the last + processed health check replies, as long as every instance + either reported a valid weight or had UNAVAILABLE_WEIGHT. + Otherwise, Load Balancing remains equal-weight. This option + is only supported in Network Load Balancing. + """ + UNDEFINED_NAME = 0 + INVALID_LB_POLICY = 323318707 + LEAST_REQUEST = 46604921 + MAGLEV = 119180266 + ORIGINAL_DESTINATION = 166297216 + RANDOM = 262527171 + RING_HASH = 432795069 + ROUND_ROBIN = 153895801 + WEIGHTED_MAGLEV = 254930962 + + name: str = proto.Field( + proto.STRING, + number=3373707, + optional=True, + ) + + +class BackendServiceLogConfig(proto.Message): + r"""The available logging options for the load balancer traffic + served by this backend service. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + enable (bool): + Denotes whether to enable logging for the + load balancer traffic served by this backend + service. The default value is false. + + This field is a member of `oneof`_ ``_enable``. + optional_fields (MutableSequence[str]): + This field can only be specified if logging + is enabled for this backend service and + "logConfig.optionalMode" was set to CUSTOM. + Contains a list of optional fields you want to + include in the logs. For example: + serverInstance, serverGkeDetails.cluster, + serverGkeDetails.pod.podNamespace + optional_mode (str): + This field can only be specified if logging is enabled for + this backend service. Configures whether all, none or a + subset of optional fields should be added to the reported + logs. One of [INCLUDE_ALL_OPTIONAL, EXCLUDE_ALL_OPTIONAL, + CUSTOM]. Default is EXCLUDE_ALL_OPTIONAL. Check the + OptionalMode enum for the list of possible values. + + This field is a member of `oneof`_ ``_optional_mode``. + sample_rate (float): + This field can only be specified if logging is enabled for + this backend service. The value of the field must be in [0, + 1]. This configures the sampling rate of requests to the + load balancer where 1.0 means all logged requests are + reported and 0.0 means no logged requests are reported. The + default value is 1.0. + + This field is a member of `oneof`_ ``_sample_rate``. + """ + class OptionalMode(proto.Enum): + r"""This field can only be specified if logging is enabled for this + backend service. Configures whether all, none or a subset of + optional fields should be added to the reported logs. One of + [INCLUDE_ALL_OPTIONAL, EXCLUDE_ALL_OPTIONAL, CUSTOM]. Default is + EXCLUDE_ALL_OPTIONAL. + + Values: + UNDEFINED_OPTIONAL_MODE (0): + A value indicating that the enum field is not + set. + CUSTOM (388595569): + A subset of optional fields. + EXCLUDE_ALL_OPTIONAL (168636099): + None optional fields. + INCLUDE_ALL_OPTIONAL (535606965): + All optional fields. + """ + UNDEFINED_OPTIONAL_MODE = 0 + CUSTOM = 388595569 + EXCLUDE_ALL_OPTIONAL = 168636099 + INCLUDE_ALL_OPTIONAL = 535606965 + + enable: bool = proto.Field( + proto.BOOL, + number=311764355, + optional=True, + ) + optional_fields: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=528589944, + ) + optional_mode: str = proto.Field( + proto.STRING, + number=128697122, + optional=True, + ) + sample_rate: float = proto.Field( + proto.FLOAT, + number=153193045, + optional=True, + ) + + +class BackendServiceReference(proto.Message): + r""" + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + backend_service (str): + + This field is a member of `oneof`_ ``_backend_service``. + """ + + backend_service: str = proto.Field( + proto.STRING, + number=306946058, + optional=True, + ) + + +class BackendServicesScopedList(proto.Message): + r""" + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + backend_services (MutableSequence[google.cloud.compute_v1.types.BackendService]): + A list of BackendServices contained in this + scope. + warning (google.cloud.compute_v1.types.Warning): + Informational warning which replaces the list + of backend services when the list is empty. + + This field is a member of `oneof`_ ``_warning``. + """ + + backend_services: MutableSequence['BackendService'] = proto.RepeatedField( + proto.MESSAGE, + number=388522409, + message='BackendService', + ) + warning: 'Warning' = proto.Field( + proto.MESSAGE, + number=50704284, + optional=True, + message='Warning', + ) + + +class BfdPacket(proto.Message): + r""" + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + authentication_present (bool): + The Authentication Present bit of the BFD + packet. This is specified in section 4.1 of + RFC5880 + + This field is a member of `oneof`_ ``_authentication_present``. + control_plane_independent (bool): + The Control Plane Independent bit of the BFD + packet. This is specified in section 4.1 of + RFC5880 + + This field is a member of `oneof`_ ``_control_plane_independent``. + demand (bool): + The demand bit of the BFD packet. This is + specified in section 4.1 of RFC5880 + + This field is a member of `oneof`_ ``_demand``. + diagnostic (str): + The diagnostic code specifies the local + system's reason for the last change in session + state. This allows remote systems to determine + the reason that the previous session failed, for + example. These diagnostic codes are specified in + section 4.1 of RFC5880 Check the Diagnostic enum + for the list of possible values. + + This field is a member of `oneof`_ ``_diagnostic``. + final (bool): + The Final bit of the BFD packet. This is + specified in section 4.1 of RFC5880 + + This field is a member of `oneof`_ ``_final``. + length (int): + The length of the BFD Control packet in + bytes. This is specified in section 4.1 of + RFC5880 + + This field is a member of `oneof`_ ``_length``. + min_echo_rx_interval_ms (int): + The Required Min Echo RX Interval value in + the BFD packet. This is specified in section 4.1 + of RFC5880 + + This field is a member of `oneof`_ ``_min_echo_rx_interval_ms``. + min_rx_interval_ms (int): + The Required Min RX Interval value in the BFD + packet. This is specified in section 4.1 of + RFC5880 + + This field is a member of `oneof`_ ``_min_rx_interval_ms``. + min_tx_interval_ms (int): + The Desired Min TX Interval value in the BFD + packet. This is specified in section 4.1 of + RFC5880 + + This field is a member of `oneof`_ ``_min_tx_interval_ms``. + multiplier (int): + The detection time multiplier of the BFD + packet. This is specified in section 4.1 of + RFC5880 + + This field is a member of `oneof`_ ``_multiplier``. + multipoint (bool): + The multipoint bit of the BFD packet. This is + specified in section 4.1 of RFC5880 + + This field is a member of `oneof`_ ``_multipoint``. + my_discriminator (int): + The My Discriminator value in the BFD packet. + This is specified in section 4.1 of RFC5880 + + This field is a member of `oneof`_ ``_my_discriminator``. + poll (bool): + The Poll bit of the BFD packet. This is + specified in section 4.1 of RFC5880 + + This field is a member of `oneof`_ ``_poll``. + state (str): + The current BFD session state as seen by the + transmitting system. These states are specified + in section 4.1 of RFC5880 Check the State enum + for the list of possible values. + + This field is a member of `oneof`_ ``_state``. + version (int): + The version number of the BFD protocol, as + specified in section 4.1 of RFC5880. + + This field is a member of `oneof`_ ``_version``. + your_discriminator (int): + The Your Discriminator value in the BFD + packet. This is specified in section 4.1 of + RFC5880 + + This field is a member of `oneof`_ ``_your_discriminator``. + """ + class Diagnostic(proto.Enum): + r"""The diagnostic code specifies the local system's reason for + the last change in session state. This allows remote systems to + determine the reason that the previous session failed, for + example. These diagnostic codes are specified in section 4.1 of + RFC5880 + + Values: + UNDEFINED_DIAGNOSTIC (0): + A value indicating that the enum field is not + set. + ADMINISTRATIVELY_DOWN (121685798): + No description available. + CONCATENATED_PATH_DOWN (26186892): + No description available. + CONTROL_DETECTION_TIME_EXPIRED (135478383): + No description available. + DIAGNOSTIC_UNSPECIFIED (58720895): + No description available. + ECHO_FUNCTION_FAILED (220687466): + No description available. + FORWARDING_PLANE_RESET (19715882): + No description available. + NEIGHBOR_SIGNALED_SESSION_DOWN (374226742): + No description available. + NO_DIAGNOSTIC (222503141): + No description available. + PATH_DOWN (290605180): + No description available. + REVERSE_CONCATENATED_PATH_DOWN (479337129): + No description available. + """ + UNDEFINED_DIAGNOSTIC = 0 + ADMINISTRATIVELY_DOWN = 121685798 + CONCATENATED_PATH_DOWN = 26186892 + CONTROL_DETECTION_TIME_EXPIRED = 135478383 + DIAGNOSTIC_UNSPECIFIED = 58720895 + ECHO_FUNCTION_FAILED = 220687466 + FORWARDING_PLANE_RESET = 19715882 + NEIGHBOR_SIGNALED_SESSION_DOWN = 374226742 + NO_DIAGNOSTIC = 222503141 + PATH_DOWN = 290605180 + REVERSE_CONCATENATED_PATH_DOWN = 479337129 + + class State(proto.Enum): + r"""The current BFD session state as seen by the transmitting + system. These states are specified in section 4.1 of RFC5880 + + Values: + UNDEFINED_STATE (0): + A value indicating that the enum field is not + set. + ADMIN_DOWN (128544690): + No description available. + DOWN (2104482): + No description available. + INIT (2252048): + No description available. + STATE_UNSPECIFIED (470755401): + No description available. + UP (2715): + No description available. + """ + UNDEFINED_STATE = 0 + ADMIN_DOWN = 128544690 + DOWN = 2104482 + INIT = 2252048 + STATE_UNSPECIFIED = 470755401 + UP = 2715 + + authentication_present: bool = proto.Field( + proto.BOOL, + number=105974260, + optional=True, + ) + control_plane_independent: bool = proto.Field( + proto.BOOL, + number=62363573, + optional=True, + ) + demand: bool = proto.Field( + proto.BOOL, + number=275180107, + optional=True, + ) + diagnostic: str = proto.Field( + proto.STRING, + number=62708647, + optional=True, + ) + final: bool = proto.Field( + proto.BOOL, + number=97436022, + optional=True, + ) + length: int = proto.Field( + proto.UINT32, + number=504249062, + optional=True, + ) + min_echo_rx_interval_ms: int = proto.Field( + proto.UINT32, + number=97286868, + optional=True, + ) + min_rx_interval_ms: int = proto.Field( + proto.UINT32, + number=463399028, + optional=True, + ) + min_tx_interval_ms: int = proto.Field( + proto.UINT32, + number=526023602, + optional=True, + ) + multiplier: int = proto.Field( + proto.UINT32, + number=191331777, + optional=True, + ) + multipoint: bool = proto.Field( + proto.BOOL, + number=191421431, + optional=True, + ) + my_discriminator: int = proto.Field( + proto.UINT32, + number=76663113, + optional=True, + ) + poll: bool = proto.Field( + proto.BOOL, + number=3446719, + optional=True, + ) + state: str = proto.Field( + proto.STRING, + number=109757585, + optional=True, + ) + version: int = proto.Field( + proto.UINT32, + number=351608024, + optional=True, + ) + your_discriminator: int = proto.Field( + proto.UINT32, + number=515634064, + optional=True, + ) + + +class BfdStatus(proto.Message): + r"""Next free: 15 + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + bfd_session_initialization_mode (str): + The BFD session initialization mode for this + BGP peer. If set to ACTIVE, the Cloud Router + will initiate the BFD session for this BGP peer. + If set to PASSIVE, the Cloud Router will wait + for the peer router to initiate the BFD session + for this BGP peer. If set to DISABLED, BFD is + disabled for this BGP peer. Check the + BfdSessionInitializationMode enum for the list + of possible values. + + This field is a member of `oneof`_ ``_bfd_session_initialization_mode``. + config_update_timestamp_micros (int): + Unix timestamp of the most recent config + update. + + This field is a member of `oneof`_ ``_config_update_timestamp_micros``. + control_packet_counts (google.cloud.compute_v1.types.BfdStatusPacketCounts): + Control packet counts for the current BFD + session. + + This field is a member of `oneof`_ ``_control_packet_counts``. + control_packet_intervals (MutableSequence[google.cloud.compute_v1.types.PacketIntervals]): + Inter-packet time interval statistics for + control packets. + local_diagnostic (str): + The diagnostic code specifies the local + system's reason for the last change in session + state. This allows remote systems to determine + the reason that the previous session failed, for + example. These diagnostic codes are specified in + section 4.1 of RFC5880 Check the LocalDiagnostic + enum for the list of possible values. + + This field is a member of `oneof`_ ``_local_diagnostic``. + local_state (str): + The current BFD session state as seen by the + transmitting system. These states are specified + in section 4.1 of RFC5880 Check the LocalState + enum for the list of possible values. + + This field is a member of `oneof`_ ``_local_state``. + negotiated_local_control_tx_interval_ms (int): + Negotiated transmit interval for control + packets. + + This field is a member of `oneof`_ ``_negotiated_local_control_tx_interval_ms``. + rx_packet (google.cloud.compute_v1.types.BfdPacket): + The most recent Rx control packet for this + BFD session. + + This field is a member of `oneof`_ ``_rx_packet``. + tx_packet (google.cloud.compute_v1.types.BfdPacket): + The most recent Tx control packet for this + BFD session. + + This field is a member of `oneof`_ ``_tx_packet``. + uptime_ms (int): + Session uptime in milliseconds. Value will be + 0 if session is not up. + + This field is a member of `oneof`_ ``_uptime_ms``. + """ + class BfdSessionInitializationMode(proto.Enum): + r"""The BFD session initialization mode for this BGP peer. If set + to ACTIVE, the Cloud Router will initiate the BFD session for + this BGP peer. If set to PASSIVE, the Cloud Router will wait for + the peer router to initiate the BFD session for this BGP peer. + If set to DISABLED, BFD is disabled for this BGP peer. + + Values: + UNDEFINED_BFD_SESSION_INITIALIZATION_MODE (0): + A value indicating that the enum field is not + set. + ACTIVE (314733318): + No description available. + DISABLED (516696700): + No description available. + PASSIVE (462813959): + No description available. + """ + UNDEFINED_BFD_SESSION_INITIALIZATION_MODE = 0 + ACTIVE = 314733318 + DISABLED = 516696700 + PASSIVE = 462813959 + + class LocalDiagnostic(proto.Enum): + r"""The diagnostic code specifies the local system's reason for + the last change in session state. This allows remote systems to + determine the reason that the previous session failed, for + example. These diagnostic codes are specified in section 4.1 of + RFC5880 + + Values: + UNDEFINED_LOCAL_DIAGNOSTIC (0): + A value indicating that the enum field is not + set. + ADMINISTRATIVELY_DOWN (121685798): + No description available. + CONCATENATED_PATH_DOWN (26186892): + No description available. + CONTROL_DETECTION_TIME_EXPIRED (135478383): + No description available. + DIAGNOSTIC_UNSPECIFIED (58720895): + No description available. + ECHO_FUNCTION_FAILED (220687466): + No description available. + FORWARDING_PLANE_RESET (19715882): + No description available. + NEIGHBOR_SIGNALED_SESSION_DOWN (374226742): + No description available. + NO_DIAGNOSTIC (222503141): + No description available. + PATH_DOWN (290605180): + No description available. + REVERSE_CONCATENATED_PATH_DOWN (479337129): + No description available. + """ + UNDEFINED_LOCAL_DIAGNOSTIC = 0 + ADMINISTRATIVELY_DOWN = 121685798 + CONCATENATED_PATH_DOWN = 26186892 + CONTROL_DETECTION_TIME_EXPIRED = 135478383 + DIAGNOSTIC_UNSPECIFIED = 58720895 + ECHO_FUNCTION_FAILED = 220687466 + FORWARDING_PLANE_RESET = 19715882 + NEIGHBOR_SIGNALED_SESSION_DOWN = 374226742 + NO_DIAGNOSTIC = 222503141 + PATH_DOWN = 290605180 + REVERSE_CONCATENATED_PATH_DOWN = 479337129 + + class LocalState(proto.Enum): + r"""The current BFD session state as seen by the transmitting + system. These states are specified in section 4.1 of RFC5880 + + Values: + UNDEFINED_LOCAL_STATE (0): + A value indicating that the enum field is not + set. + ADMIN_DOWN (128544690): + No description available. + DOWN (2104482): + No description available. + INIT (2252048): + No description available. + STATE_UNSPECIFIED (470755401): + No description available. + UP (2715): + No description available. + """ + UNDEFINED_LOCAL_STATE = 0 + ADMIN_DOWN = 128544690 + DOWN = 2104482 + INIT = 2252048 + STATE_UNSPECIFIED = 470755401 + UP = 2715 + + bfd_session_initialization_mode: str = proto.Field( + proto.STRING, + number=218156954, + optional=True, + ) + config_update_timestamp_micros: int = proto.Field( + proto.INT64, + number=457195569, + optional=True, + ) + control_packet_counts: 'BfdStatusPacketCounts' = proto.Field( + proto.MESSAGE, + number=132573561, + optional=True, + message='BfdStatusPacketCounts', + ) + control_packet_intervals: MutableSequence['PacketIntervals'] = proto.RepeatedField( + proto.MESSAGE, + number=500806649, + message='PacketIntervals', + ) + local_diagnostic: str = proto.Field( + proto.STRING, + number=463737083, + optional=True, + ) + local_state: str = proto.Field( + proto.STRING, + number=149195453, + optional=True, + ) + negotiated_local_control_tx_interval_ms: int = proto.Field( + proto.UINT32, + number=21768340, + optional=True, + ) + rx_packet: 'BfdPacket' = proto.Field( + proto.MESSAGE, + number=505069729, + optional=True, + message='BfdPacket', + ) + tx_packet: 'BfdPacket' = proto.Field( + proto.MESSAGE, + number=111386275, + optional=True, + message='BfdPacket', + ) + uptime_ms: int = proto.Field( + proto.INT64, + number=125398365, + optional=True, + ) + + +class BfdStatusPacketCounts(proto.Message): + r""" + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + num_rx (int): + Number of packets received since the + beginning of the current BFD session. + + This field is a member of `oneof`_ ``_num_rx``. + num_rx_rejected (int): + Number of packets received that were rejected + because of errors since the beginning of the + current BFD session. + + This field is a member of `oneof`_ ``_num_rx_rejected``. + num_rx_successful (int): + Number of packets received that were + successfully processed since the beginning of + the current BFD session. + + This field is a member of `oneof`_ ``_num_rx_successful``. + num_tx (int): + Number of packets transmitted since the + beginning of the current BFD session. + + This field is a member of `oneof`_ ``_num_tx``. + """ + + num_rx: int = proto.Field( + proto.UINT32, + number=39375263, + optional=True, + ) + num_rx_rejected: int = proto.Field( + proto.UINT32, + number=281007902, + optional=True, + ) + num_rx_successful: int = proto.Field( + proto.UINT32, + number=455361850, + optional=True, + ) + num_tx: int = proto.Field( + proto.UINT32, + number=39375325, + optional=True, + ) + + +class Binding(proto.Message): + r"""Associates ``members``, or principals, with a ``role``. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + binding_id (str): + This is deprecated and has no effect. Do not + use. + + This field is a member of `oneof`_ ``_binding_id``. + condition (google.cloud.compute_v1.types.Expr): + The condition that is associated with this binding. If the + condition evaluates to ``true``, then this binding applies + to the current request. If the condition evaluates to + ``false``, then this binding does not apply to the current + request. However, a different role binding might grant the + same role to one or more of the principals in this binding. + To learn which resources support conditions in their IAM + policies, see the `IAM + documentation `__. + + This field is a member of `oneof`_ ``_condition``. + members (MutableSequence[str]): + Specifies the principals requesting access for a Google + Cloud resource. ``members`` can have the following values: + \* ``allUsers``: A special identifier that represents anyone + who is on the internet; with or without a Google account. \* + ``allAuthenticatedUsers``: A special identifier that + represents anyone who is authenticated with a Google account + or a service account. Does not include identities that come + from external identity providers (IdPs) through identity + federation. \* ``user:{emailid}``: An email address that + represents a specific Google account. For example, + ``alice@example.com`` . \* ``serviceAccount:{emailid}``: An + email address that represents a Google service account. For + example, ``my-other-app@appspot.gserviceaccount.com``. \* + ``serviceAccount:{projectid}.svc.id.goog[{namespace}/{kubernetes-sa}]``: + An identifier for a `Kubernetes service + account `__. + For example, + ``my-project.svc.id.goog[my-namespace/my-kubernetes-sa]``. + \* ``group:{emailid}``: An email address that represents a + Google group. For example, ``admins@example.com``. \* + ``domain:{domain}``: The G Suite domain (primary) that + represents all the users of that domain. For example, + ``google.com`` or ``example.com``. \* + ``deleted:user:{emailid}?uid={uniqueid}``: An email address + (plus unique identifier) representing a user that has been + recently deleted. For example, + ``alice@example.com?uid=123456789012345678901``. If the user + is recovered, this value reverts to ``user:{emailid}`` and + the recovered user retains the role in the binding. \* + ``deleted:serviceAccount:{emailid}?uid={uniqueid}``: An + email address (plus unique identifier) representing a + service account that has been recently deleted. For example, + ``my-other-app@appspot.gserviceaccount.com?uid=123456789012345678901``. + If the service account is undeleted, this value reverts to + ``serviceAccount:{emailid}`` and the undeleted service + account retains the role in the binding. \* + ``deleted:group:{emailid}?uid={uniqueid}``: An email address + (plus unique identifier) representing a Google group that + has been recently deleted. For example, + ``admins@example.com?uid=123456789012345678901``. If the + group is recovered, this value reverts to + ``group:{emailid}`` and the recovered group retains the role + in the binding. + role (str): + Role that is assigned to the list of ``members``, or + principals. For example, ``roles/viewer``, ``roles/editor``, + or ``roles/owner``. + + This field is a member of `oneof`_ ``_role``. + """ + + binding_id: str = proto.Field( + proto.STRING, + number=441088277, + optional=True, + ) + condition: 'Expr' = proto.Field( + proto.MESSAGE, + number=212430107, + optional=True, + message='Expr', + ) + members: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=412010777, + ) + role: str = proto.Field( + proto.STRING, + number=3506294, + optional=True, + ) + + +class BulkInsertDiskRequest(proto.Message): + r"""A request message for Disks.BulkInsert. See the method + description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + bulk_insert_disk_resource_resource (google.cloud.compute_v1.types.BulkInsertDiskResource): + The body resource for this request + project (str): + Project ID for this request. + request_id (str): + An optional request ID to identify requests. + Specify a unique request ID so that if you must + retry your request, the server will know to + ignore the request if it has already been + completed. For example, consider a situation + where you make an initial request and the + request times out. If you make the request again + with the same request ID, the server can check + if original operation with the same request ID + was received, and if so, will ignore the second + request. This prevents clients from accidentally + creating duplicate commitments. The request ID + must be a valid UUID with the exception that + zero UUID is not supported ( + 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. + zone (str): + The name of the zone for this request. + """ + + bulk_insert_disk_resource_resource: 'BulkInsertDiskResource' = proto.Field( + proto.MESSAGE, + number=289799382, + message='BulkInsertDiskResource', + ) + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + request_id: str = proto.Field( + proto.STRING, + number=37109963, + optional=True, + ) + zone: str = proto.Field( + proto.STRING, + number=3744684, + ) + + +class BulkInsertDiskResource(proto.Message): + r"""A transient resource used in compute.disks.bulkInsert and + compute.regionDisks.bulkInsert. It is only used to process + requests and is not persisted. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + source_consistency_group_policy (str): + The URL of the DiskConsistencyGroupPolicy for + the group of disks to clone. This may be a full + or partial URL, such as: - + https://www.googleapis.com/compute/v1/projects/project/regions/region + /resourcePolicies/resourcePolicy - + projects/project/regions/region/resourcePolicies/resourcePolicy + - regions/region/resourcePolicies/resourcePolicy + + This field is a member of `oneof`_ ``_source_consistency_group_policy``. + """ + + source_consistency_group_policy: str = proto.Field( + proto.STRING, + number=19616093, + optional=True, + ) + + +class BulkInsertInstanceRequest(proto.Message): + r"""A request message for Instances.BulkInsert. See the method + description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + bulk_insert_instance_resource_resource (google.cloud.compute_v1.types.BulkInsertInstanceResource): + The body resource for this request + project (str): + Project ID for this request. + request_id (str): + An optional request ID to identify requests. + Specify a unique request ID so that if you must + retry your request, the server will know to + ignore the request if it has already been + completed. For example, consider a situation + where you make an initial request and the + request times out. If you make the request again + with the same request ID, the server can check + if original operation with the same request ID + was received, and if so, will ignore the second + request. This prevents clients from accidentally + creating duplicate commitments. The request ID + must be a valid UUID with the exception that + zero UUID is not supported ( + 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. + zone (str): + The name of the zone for this request. + """ + + bulk_insert_instance_resource_resource: 'BulkInsertInstanceResource' = proto.Field( + proto.MESSAGE, + number=41427278, + message='BulkInsertInstanceResource', + ) + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + request_id: str = proto.Field( + proto.STRING, + number=37109963, + optional=True, + ) + zone: str = proto.Field( + proto.STRING, + number=3744684, + ) + + +class BulkInsertInstanceResource(proto.Message): + r"""A transient resource used in compute.instances.bulkInsert and + compute.regionInstances.bulkInsert . This resource is not + persisted anywhere, it is used only for processing the requests. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + count (int): + The maximum number of instances to create. + + This field is a member of `oneof`_ ``_count``. + instance_properties (google.cloud.compute_v1.types.InstanceProperties): + The instance properties defining the VM + instances to be created. Required if + sourceInstanceTemplate is not provided. + + This field is a member of `oneof`_ ``_instance_properties``. + location_policy (google.cloud.compute_v1.types.LocationPolicy): + Policy for chosing target zone. For more + information, see Create VMs in bulk . + + This field is a member of `oneof`_ ``_location_policy``. + min_count (int): + The minimum number of instances to create. If no min_count + is specified then count is used as the default value. If + min_count instances cannot be created, then no instances + will be created and instances already created will be + deleted. + + This field is a member of `oneof`_ ``_min_count``. + name_pattern (str): + The string pattern used for the names of the VMs. Either + name_pattern or per_instance_properties must be set. The + pattern must contain one continuous sequence of placeholder + hash characters (#) with each character corresponding to one + digit of the generated instance name. Example: a + name_pattern of inst-#### generates instance names such as + inst-0001 and inst-0002. If existing instances in the same + project and zone have names that match the name pattern then + the generated instance numbers start after the biggest + existing number. For example, if there exists an instance + with name inst-0050, then instance names generated using the + pattern inst-#### begin with inst-0051. The name pattern + placeholder #...# can contain up to 18 characters. + + This field is a member of `oneof`_ ``_name_pattern``. + per_instance_properties (MutableMapping[str, google.cloud.compute_v1.types.BulkInsertInstanceResourcePerInstanceProperties]): + Per-instance properties to be set on individual instances. + Keys of this map specify requested instance names. Can be + empty if name_pattern is used. + source_instance_template (str): + Specifies the instance template from which to + create instances. You may combine + sourceInstanceTemplate with instanceProperties + to override specific values from an existing + instance template. Bulk API follows the + semantics of JSON Merge Patch described by RFC + 7396. It can be a full or partial URL. For + example, the following are all valid URLs to an + instance template: - + https://www.googleapis.com/compute/v1/projects/project + /global/instanceTemplates/instanceTemplate - + projects/project/global/instanceTemplates/instanceTemplate + - global/instanceTemplates/instanceTemplate This + field is optional. + + This field is a member of `oneof`_ ``_source_instance_template``. + """ + + count: int = proto.Field( + proto.INT64, + number=94851343, + optional=True, + ) + instance_properties: 'InstanceProperties' = proto.Field( + proto.MESSAGE, + number=215355165, + optional=True, + message='InstanceProperties', + ) + location_policy: 'LocationPolicy' = proto.Field( + proto.MESSAGE, + number=465689852, + optional=True, + message='LocationPolicy', + ) + min_count: int = proto.Field( + proto.INT64, + number=523228386, + optional=True, + ) + name_pattern: str = proto.Field( + proto.STRING, + number=413815260, + optional=True, + ) + per_instance_properties: MutableMapping[str, 'BulkInsertInstanceResourcePerInstanceProperties'] = proto.MapField( + proto.STRING, + proto.MESSAGE, + number=108502267, + message='BulkInsertInstanceResourcePerInstanceProperties', + ) + source_instance_template: str = proto.Field( + proto.STRING, + number=332423616, + optional=True, + ) + + +class BulkInsertInstanceResourcePerInstanceProperties(proto.Message): + r"""Per-instance properties to be set on individual instances. To + be extended in the future. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + name (str): + This field is only temporary. It will be + removed. Do not use it. + + This field is a member of `oneof`_ ``_name``. + """ + + name: str = proto.Field( + proto.STRING, + number=3373707, + optional=True, + ) + + +class BulkInsertRegionDiskRequest(proto.Message): + r"""A request message for RegionDisks.BulkInsert. See the method + description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + bulk_insert_disk_resource_resource (google.cloud.compute_v1.types.BulkInsertDiskResource): + The body resource for this request + project (str): + Project ID for this request. + region (str): + The name of the region for this request. + request_id (str): + An optional request ID to identify requests. + Specify a unique request ID so that if you must + retry your request, the server will know to + ignore the request if it has already been + completed. For example, consider a situation + where you make an initial request and the + request times out. If you make the request again + with the same request ID, the server can check + if original operation with the same request ID + was received, and if so, will ignore the second + request. This prevents clients from accidentally + creating duplicate commitments. The request ID + must be a valid UUID with the exception that + zero UUID is not supported ( + 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. + """ + + bulk_insert_disk_resource_resource: 'BulkInsertDiskResource' = proto.Field( + proto.MESSAGE, + number=289799382, + message='BulkInsertDiskResource', + ) + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + region: str = proto.Field( + proto.STRING, + number=138946292, + ) + request_id: str = proto.Field( + proto.STRING, + number=37109963, + optional=True, + ) + + +class BulkInsertRegionInstanceRequest(proto.Message): + r"""A request message for RegionInstances.BulkInsert. See the + method description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + bulk_insert_instance_resource_resource (google.cloud.compute_v1.types.BulkInsertInstanceResource): + The body resource for this request + project (str): + Project ID for this request. + region (str): + The name of the region for this request. + request_id (str): + An optional request ID to identify requests. + Specify a unique request ID so that if you must + retry your request, the server will know to + ignore the request if it has already been + completed. For example, consider a situation + where you make an initial request and the + request times out. If you make the request again + with the same request ID, the server can check + if original operation with the same request ID + was received, and if so, will ignore the second + request. This prevents clients from accidentally + creating duplicate commitments. The request ID + must be a valid UUID with the exception that + zero UUID is not supported ( + 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. + """ + + bulk_insert_instance_resource_resource: 'BulkInsertInstanceResource' = proto.Field( + proto.MESSAGE, + number=41427278, + message='BulkInsertInstanceResource', + ) + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + region: str = proto.Field( + proto.STRING, + number=138946292, + ) + request_id: str = proto.Field( + proto.STRING, + number=37109963, + optional=True, + ) + + +class CacheInvalidationRule(proto.Message): + r""" + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + host (str): + If set, this invalidation rule will only + apply to requests with a Host header matching + host. + + This field is a member of `oneof`_ ``_host``. + path (str): + + This field is a member of `oneof`_ ``_path``. + """ + + host: str = proto.Field( + proto.STRING, + number=3208616, + optional=True, + ) + path: str = proto.Field( + proto.STRING, + number=3433509, + optional=True, + ) + + +class CacheKeyPolicy(proto.Message): + r"""Message containing what to include in the cache key for a + request for Cloud CDN. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + include_host (bool): + If true, requests to different hosts will be + cached separately. + + This field is a member of `oneof`_ ``_include_host``. + include_http_headers (MutableSequence[str]): + Allows HTTP request headers (by name) to be + used in the cache key. + include_named_cookies (MutableSequence[str]): + Allows HTTP cookies (by name) to be used in + the cache key. The name=value pair will be used + in the cache key Cloud CDN generates. + include_protocol (bool): + If true, http and https requests will be + cached separately. + + This field is a member of `oneof`_ ``_include_protocol``. + include_query_string (bool): + If true, include query string parameters in the cache key + according to query_string_whitelist and + query_string_blacklist. If neither is set, the entire query + string will be included. If false, the query string will be + excluded from the cache key entirely. + + This field is a member of `oneof`_ ``_include_query_string``. + query_string_blacklist (MutableSequence[str]): + Names of query string parameters to exclude in cache keys. + All other parameters will be included. Either specify + query_string_whitelist or query_string_blacklist, not both. + '&' and '=' will be percent encoded and not treated as + delimiters. + query_string_whitelist (MutableSequence[str]): + Names of query string parameters to include in cache keys. + All other parameters will be excluded. Either specify + query_string_whitelist or query_string_blacklist, not both. + '&' and '=' will be percent encoded and not treated as + delimiters. + """ + + include_host: bool = proto.Field( + proto.BOOL, + number=486867679, + optional=True, + ) + include_http_headers: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=2489606, + ) + include_named_cookies: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=87316530, + ) + include_protocol: bool = proto.Field( + proto.BOOL, + number=303507535, + optional=True, + ) + include_query_string: bool = proto.Field( + proto.BOOL, + number=474036639, + optional=True, + ) + query_string_blacklist: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=354964742, + ) + query_string_whitelist: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=52456496, + ) + + +class CircuitBreakers(proto.Message): + r"""Settings controlling the volume of requests, connections and + retries to this backend service. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + max_connections (int): + The maximum number of connections to the + backend service. If not specified, there is no + limit. Not supported when the backend service is + referenced by a URL map that is bound to target + gRPC proxy that has validateForProxyless field + set to true. + + This field is a member of `oneof`_ ``_max_connections``. + max_pending_requests (int): + The maximum number of pending requests + allowed to the backend service. If not + specified, there is no limit. Not supported when + the backend service is referenced by a URL map + that is bound to target gRPC proxy that has + validateForProxyless field set to true. + + This field is a member of `oneof`_ ``_max_pending_requests``. + max_requests (int): + The maximum number of parallel requests that + allowed to the backend service. If not + specified, there is no limit. + + This field is a member of `oneof`_ ``_max_requests``. + max_requests_per_connection (int): + Maximum requests for a single connection to + the backend service. This parameter is respected + by both the HTTP/1.1 and HTTP/2 implementations. + If not specified, there is no limit. Setting + this parameter to 1 will effectively disable + keep alive. Not supported when the backend + service is referenced by a URL map that is bound + to target gRPC proxy that has + validateForProxyless field set to true. + + This field is a member of `oneof`_ ``_max_requests_per_connection``. + max_retries (int): + The maximum number of parallel retries + allowed to the backend cluster. If not + specified, the default is 1. Not supported when + the backend service is referenced by a URL map + that is bound to target gRPC proxy that has + validateForProxyless field set to true. + + This field is a member of `oneof`_ ``_max_retries``. + """ + + max_connections: int = proto.Field( + proto.INT32, + number=110652154, + optional=True, + ) + max_pending_requests: int = proto.Field( + proto.INT32, + number=375558887, + optional=True, + ) + max_requests: int = proto.Field( + proto.INT32, + number=28097599, + optional=True, + ) + max_requests_per_connection: int = proto.Field( + proto.INT32, + number=361630528, + optional=True, + ) + max_retries: int = proto.Field( + proto.INT32, + number=55546219, + optional=True, + ) + + +class CloneRulesFirewallPolicyRequest(proto.Message): + r"""A request message for FirewallPolicies.CloneRules. See the + method description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + firewall_policy (str): + Name of the firewall policy to update. + request_id (str): + An optional request ID to identify requests. + Specify a unique request ID so that if you must + retry your request, the server will know to + ignore the request if it has already been + completed. For example, consider a situation + where you make an initial request and the + request times out. If you make the request again + with the same request ID, the server can check + if original operation with the same request ID + was received, and if so, will ignore the second + request. This prevents clients from accidentally + creating duplicate commitments. The request ID + must be a valid UUID with the exception that + zero UUID is not supported ( + 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. + source_firewall_policy (str): + The firewall policy from which to copy rules. + + This field is a member of `oneof`_ ``_source_firewall_policy``. + """ + + firewall_policy: str = proto.Field( + proto.STRING, + number=498173265, + ) + request_id: str = proto.Field( + proto.STRING, + number=37109963, + optional=True, + ) + source_firewall_policy: str = proto.Field( + proto.STRING, + number=25013549, + optional=True, + ) + + +class CloneRulesNetworkFirewallPolicyRequest(proto.Message): + r"""A request message for NetworkFirewallPolicies.CloneRules. See + the method description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + firewall_policy (str): + Name of the firewall policy to update. + project (str): + Project ID for this request. + request_id (str): + An optional request ID to identify requests. + Specify a unique request ID so that if you must + retry your request, the server will know to + ignore the request if it has already been + completed. For example, consider a situation + where you make an initial request and the + request times out. If you make the request again + with the same request ID, the server can check + if original operation with the same request ID + was received, and if so, will ignore the second + request. This prevents clients from accidentally + creating duplicate commitments. The request ID + must be a valid UUID with the exception that + zero UUID is not supported ( + 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. + source_firewall_policy (str): + The firewall policy from which to copy rules. + + This field is a member of `oneof`_ ``_source_firewall_policy``. + """ + + firewall_policy: str = proto.Field( + proto.STRING, + number=498173265, + ) + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + request_id: str = proto.Field( + proto.STRING, + number=37109963, + optional=True, + ) + source_firewall_policy: str = proto.Field( + proto.STRING, + number=25013549, + optional=True, + ) + + +class CloneRulesRegionNetworkFirewallPolicyRequest(proto.Message): + r"""A request message for + RegionNetworkFirewallPolicies.CloneRules. See the method + description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + firewall_policy (str): + Name of the firewall policy to update. + project (str): + Project ID for this request. + region (str): + Name of the region scoping this request. + request_id (str): + An optional request ID to identify requests. + Specify a unique request ID so that if you must + retry your request, the server will know to + ignore the request if it has already been + completed. For example, consider a situation + where you make an initial request and the + request times out. If you make the request again + with the same request ID, the server can check + if original operation with the same request ID + was received, and if so, will ignore the second + request. This prevents clients from accidentally + creating duplicate commitments. The request ID + must be a valid UUID with the exception that + zero UUID is not supported ( + 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. + source_firewall_policy (str): + The firewall policy from which to copy rules. + + This field is a member of `oneof`_ ``_source_firewall_policy``. + """ + + firewall_policy: str = proto.Field( + proto.STRING, + number=498173265, + ) + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + region: str = proto.Field( + proto.STRING, + number=138946292, + ) + request_id: str = proto.Field( + proto.STRING, + number=37109963, + optional=True, + ) + source_firewall_policy: str = proto.Field( + proto.STRING, + number=25013549, + optional=True, + ) + + +class Commitment(proto.Message): + r"""Represents a regional Commitment resource. Creating a + commitment resource means that you are purchasing a committed + use contract with an explicit start and end time. You can create + commitments based on vCPUs and memory usage and receive + discounted rates. For full details, read Signing Up for + Committed Use Discounts. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + auto_renew (bool): + Specifies whether to enable automatic renewal + for the commitment. The default value is false + if not specified. The field can be updated until + the day of the commitment expiration at 12:00am + PST. If the field is set to true, the commitment + will be automatically renewed for either one or + three years according to the terms of the + existing commitment. + + This field is a member of `oneof`_ ``_auto_renew``. + category (str): + The category of the commitment. Category + MACHINE specifies commitments composed of + machine resources such as VCPU or MEMORY, listed + in resources. Category LICENSE specifies + commitments composed of software licenses, + listed in licenseResources. Note that only + MACHINE commitments should have a Type + specified. Check the Category enum for the list + of possible values. + + This field is a member of `oneof`_ ``_category``. + creation_timestamp (str): + [Output Only] Creation timestamp in RFC3339 text format. + + This field is a member of `oneof`_ ``_creation_timestamp``. + description (str): + An optional description of this resource. + Provide this property when you create the + resource. + + This field is a member of `oneof`_ ``_description``. + end_timestamp (str): + [Output Only] Commitment end time in RFC3339 text format. + + This field is a member of `oneof`_ ``_end_timestamp``. + id (int): + [Output Only] The unique identifier for the resource. This + identifier is defined by the server. + + This field is a member of `oneof`_ ``_id``. + kind (str): + [Output Only] Type of the resource. Always + compute#commitment for commitments. + + This field is a member of `oneof`_ ``_kind``. + license_resource (google.cloud.compute_v1.types.LicenseResourceCommitment): + The license specification required as part of + a license commitment. + + This field is a member of `oneof`_ ``_license_resource``. + merge_source_commitments (MutableSequence[str]): + List of source commitments to be merged into + a new commitment. + name (str): + Name of the resource. Provided by the client when the + resource is created. The name must be 1-63 characters long, + and comply with RFC1035. Specifically, the name must be 1-63 + characters long and match the regular expression + ``[a-z]([-a-z0-9]*[a-z0-9])?`` which means the first + character must be a lowercase letter, and all following + characters must be a dash, lowercase letter, or digit, + except the last character, which cannot be a dash. + + This field is a member of `oneof`_ ``_name``. + plan (str): + The plan for this commitment, which determines duration and + discount rate. The currently supported plans are + TWELVE_MONTH (1 year), and THIRTY_SIX_MONTH (3 years). Check + the Plan enum for the list of possible values. + + This field is a member of `oneof`_ ``_plan``. + region (str): + [Output Only] URL of the region where this commitment may be + used. + + This field is a member of `oneof`_ ``_region``. + reservations (MutableSequence[google.cloud.compute_v1.types.Reservation]): + List of reservations in this commitment. + resources (MutableSequence[google.cloud.compute_v1.types.ResourceCommitment]): + A list of commitment amounts for particular + resources. Note that VCPU and MEMORY resource + commitments must occur together. + self_link (str): + [Output Only] Server-defined URL for the resource. + + This field is a member of `oneof`_ ``_self_link``. + split_source_commitment (str): + Source commitment to be split into a new + commitment. + + This field is a member of `oneof`_ ``_split_source_commitment``. + start_timestamp (str): + [Output Only] Commitment start time in RFC3339 text format. + + This field is a member of `oneof`_ ``_start_timestamp``. + status (str): + [Output Only] Status of the commitment with regards to + eventual expiration (each commitment has an end date + defined). One of the following values: NOT_YET_ACTIVE, + ACTIVE, EXPIRED. Check the Status enum for the list of + possible values. + + This field is a member of `oneof`_ ``_status``. + status_message (str): + [Output Only] An optional, human-readable explanation of the + status. + + This field is a member of `oneof`_ ``_status_message``. + type_ (str): + The type of commitment, which affects the discount rate and + the eligible resources. Type MEMORY_OPTIMIZED specifies a + commitment that will only apply to memory optimized + machines. Type ACCELERATOR_OPTIMIZED specifies a commitment + that will only apply to accelerator optimized machines. + Check the Type enum for the list of possible values. + + This field is a member of `oneof`_ ``_type``. + """ + class Category(proto.Enum): + r"""The category of the commitment. Category MACHINE specifies + commitments composed of machine resources such as VCPU or + MEMORY, listed in resources. Category LICENSE specifies + commitments composed of software licenses, listed in + licenseResources. Note that only MACHINE commitments should have + a Type specified. + + Values: + UNDEFINED_CATEGORY (0): + A value indicating that the enum field is not + set. + CATEGORY_UNSPECIFIED (509189462): + No description available. + LICENSE (347869217): + No description available. + MACHINE (469553191): + No description available. + """ + UNDEFINED_CATEGORY = 0 + CATEGORY_UNSPECIFIED = 509189462 + LICENSE = 347869217 + MACHINE = 469553191 + + class Plan(proto.Enum): + r"""The plan for this commitment, which determines duration and discount + rate. The currently supported plans are TWELVE_MONTH (1 year), and + THIRTY_SIX_MONTH (3 years). + + Values: + UNDEFINED_PLAN (0): + A value indicating that the enum field is not + set. + INVALID (530283991): + No description available. + THIRTY_SIX_MONTH (266295942): + No description available. + TWELVE_MONTH (173083962): + No description available. + """ + UNDEFINED_PLAN = 0 + INVALID = 530283991 + THIRTY_SIX_MONTH = 266295942 + TWELVE_MONTH = 173083962 + + class Status(proto.Enum): + r"""[Output Only] Status of the commitment with regards to eventual + expiration (each commitment has an end date defined). One of the + following values: NOT_YET_ACTIVE, ACTIVE, EXPIRED. + + Values: + UNDEFINED_STATUS (0): + A value indicating that the enum field is not + set. + ACTIVE (314733318): + No description available. + CANCELLED (41957681): + Deprecate CANCELED status. Will use separate + status to differentiate cancel by mergeCud or + manual cancellation. + CREATING (455564985): + No description available. + EXPIRED (482489093): + No description available. + NOT_YET_ACTIVE (20607337): + No description available. + """ + UNDEFINED_STATUS = 0 + ACTIVE = 314733318 + CANCELLED = 41957681 + CREATING = 455564985 + EXPIRED = 482489093 + NOT_YET_ACTIVE = 20607337 + + class Type(proto.Enum): + r"""The type of commitment, which affects the discount rate and the + eligible resources. Type MEMORY_OPTIMIZED specifies a commitment + that will only apply to memory optimized machines. Type + ACCELERATOR_OPTIMIZED specifies a commitment that will only apply to + accelerator optimized machines. + + Values: + UNDEFINED_TYPE (0): + A value indicating that the enum field is not + set. + ACCELERATOR_OPTIMIZED (280848403): + No description available. + COMPUTE_OPTIMIZED (158349023): + No description available. + COMPUTE_OPTIMIZED_C2D (383246453): + No description available. + COMPUTE_OPTIMIZED_C3 (428004784): + No description available. + GENERAL_PURPOSE (299793543): + No description available. + GENERAL_PURPOSE_E2 (301911877): + No description available. + GENERAL_PURPOSE_N2 (301912156): + No description available. + GENERAL_PURPOSE_N2D (232471400): + No description available. + GENERAL_PURPOSE_T2D (232477166): + No description available. + GRAPHICS_OPTIMIZED (68500563): + No description available. + MEMORY_OPTIMIZED (281753417): + No description available. + MEMORY_OPTIMIZED_M3 (276301372): + No description available. + TYPE_UNSPECIFIED (437714322): + No description available. + """ + UNDEFINED_TYPE = 0 + ACCELERATOR_OPTIMIZED = 280848403 + COMPUTE_OPTIMIZED = 158349023 + COMPUTE_OPTIMIZED_C2D = 383246453 + COMPUTE_OPTIMIZED_C3 = 428004784 + GENERAL_PURPOSE = 299793543 + GENERAL_PURPOSE_E2 = 301911877 + GENERAL_PURPOSE_N2 = 301912156 + GENERAL_PURPOSE_N2D = 232471400 + GENERAL_PURPOSE_T2D = 232477166 + GRAPHICS_OPTIMIZED = 68500563 + MEMORY_OPTIMIZED = 281753417 + MEMORY_OPTIMIZED_M3 = 276301372 + TYPE_UNSPECIFIED = 437714322 + + auto_renew: bool = proto.Field( + proto.BOOL, + number=495520765, + optional=True, + ) + category: str = proto.Field( + proto.STRING, + number=50511102, + optional=True, + ) + creation_timestamp: str = proto.Field( + proto.STRING, + number=30525366, + optional=True, + ) + description: str = proto.Field( + proto.STRING, + number=422937596, + optional=True, + ) + end_timestamp: str = proto.Field( + proto.STRING, + number=468096690, + optional=True, + ) + id: int = proto.Field( + proto.UINT64, + number=3355, + optional=True, + ) + kind: str = proto.Field( + proto.STRING, + number=3292052, + optional=True, + ) + license_resource: 'LicenseResourceCommitment' = proto.Field( + proto.MESSAGE, + number=437955148, + optional=True, + message='LicenseResourceCommitment', + ) + merge_source_commitments: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=188093761, + ) + name: str = proto.Field( + proto.STRING, + number=3373707, + optional=True, + ) + plan: str = proto.Field( + proto.STRING, + number=3443497, + optional=True, + ) + region: str = proto.Field( + proto.STRING, + number=138946292, + optional=True, + ) + reservations: MutableSequence['Reservation'] = proto.RepeatedField( + proto.MESSAGE, + number=399717927, + message='Reservation', + ) + resources: MutableSequence['ResourceCommitment'] = proto.RepeatedField( + proto.MESSAGE, + number=164412965, + message='ResourceCommitment', + ) + self_link: str = proto.Field( + proto.STRING, + number=456214797, + optional=True, + ) + split_source_commitment: str = proto.Field( + proto.STRING, + number=402611156, + optional=True, + ) + start_timestamp: str = proto.Field( + proto.STRING, + number=83645817, + optional=True, + ) + status: str = proto.Field( + proto.STRING, + number=181260274, + optional=True, + ) + status_message: str = proto.Field( + proto.STRING, + number=297428154, + optional=True, + ) + type_: str = proto.Field( + proto.STRING, + number=3575610, + optional=True, + ) + + +class CommitmentAggregatedList(proto.Message): + r""" + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + id (str): + [Output Only] Unique identifier for the resource; defined by + the server. + + This field is a member of `oneof`_ ``_id``. + items (MutableMapping[str, google.cloud.compute_v1.types.CommitmentsScopedList]): + A list of CommitmentsScopedList resources. + kind (str): + [Output Only] Type of resource. Always + compute#commitmentAggregatedList for aggregated lists of + commitments. + + This field is a member of `oneof`_ ``_kind``. + next_page_token (str): + [Output Only] This token allows you to get the next page of + results for list requests. If the number of results is + larger than maxResults, use the nextPageToken as a value for + the query parameter pageToken in the next list request. + Subsequent list requests will have their own nextPageToken + to continue paging through the results. + + This field is a member of `oneof`_ ``_next_page_token``. + self_link (str): + [Output Only] Server-defined URL for this resource. + + This field is a member of `oneof`_ ``_self_link``. + unreachables (MutableSequence[str]): + [Output Only] Unreachable resources. + warning (google.cloud.compute_v1.types.Warning): + [Output Only] Informational warning message. + + This field is a member of `oneof`_ ``_warning``. + """ + + @property + def raw_page(self): + return self + + id: str = proto.Field( + proto.STRING, + number=3355, + optional=True, + ) + items: MutableMapping[str, 'CommitmentsScopedList'] = proto.MapField( + proto.STRING, + proto.MESSAGE, + number=100526016, + message='CommitmentsScopedList', + ) + kind: str = proto.Field( + proto.STRING, + number=3292052, + optional=True, + ) + next_page_token: str = proto.Field( + proto.STRING, + number=79797525, + optional=True, + ) + self_link: str = proto.Field( + proto.STRING, + number=456214797, + optional=True, + ) + unreachables: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=243372063, + ) + warning: 'Warning' = proto.Field( + proto.MESSAGE, + number=50704284, + optional=True, + message='Warning', + ) + + +class CommitmentList(proto.Message): + r"""Contains a list of Commitment resources. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + id (str): + [Output Only] Unique identifier for the resource; defined by + the server. + + This field is a member of `oneof`_ ``_id``. + items (MutableSequence[google.cloud.compute_v1.types.Commitment]): + A list of Commitment resources. + kind (str): + [Output Only] Type of resource. Always + compute#commitmentList for lists of commitments. + + This field is a member of `oneof`_ ``_kind``. + next_page_token (str): + [Output Only] This token allows you to get the next page of + results for list requests. If the number of results is + larger than maxResults, use the nextPageToken as a value for + the query parameter pageToken in the next list request. + Subsequent list requests will have their own nextPageToken + to continue paging through the results. + + This field is a member of `oneof`_ ``_next_page_token``. + self_link (str): + [Output Only] Server-defined URL for this resource. + + This field is a member of `oneof`_ ``_self_link``. + warning (google.cloud.compute_v1.types.Warning): + [Output Only] Informational warning message. + + This field is a member of `oneof`_ ``_warning``. + """ + + @property + def raw_page(self): + return self + + id: str = proto.Field( + proto.STRING, + number=3355, + optional=True, + ) + items: MutableSequence['Commitment'] = proto.RepeatedField( + proto.MESSAGE, + number=100526016, + message='Commitment', + ) + kind: str = proto.Field( + proto.STRING, + number=3292052, + optional=True, + ) + next_page_token: str = proto.Field( + proto.STRING, + number=79797525, + optional=True, + ) + self_link: str = proto.Field( + proto.STRING, + number=456214797, + optional=True, + ) + warning: 'Warning' = proto.Field( + proto.MESSAGE, + number=50704284, + optional=True, + message='Warning', + ) + + +class CommitmentsScopedList(proto.Message): + r""" + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + commitments (MutableSequence[google.cloud.compute_v1.types.Commitment]): + [Output Only] A list of commitments contained in this scope. + warning (google.cloud.compute_v1.types.Warning): + [Output Only] Informational warning which replaces the list + of commitments when the list is empty. + + This field is a member of `oneof`_ ``_warning``. + """ + + commitments: MutableSequence['Commitment'] = proto.RepeatedField( + proto.MESSAGE, + number=450664446, + message='Commitment', + ) + warning: 'Warning' = proto.Field( + proto.MESSAGE, + number=50704284, + optional=True, + message='Warning', + ) + + +class Condition(proto.Message): + r"""This is deprecated and has no effect. Do not use. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + iam (str): + This is deprecated and has no effect. Do not + use. Check the Iam enum for the list of possible + values. + + This field is a member of `oneof`_ ``_iam``. + op (str): + This is deprecated and has no effect. Do not + use. Check the Op enum for the list of possible + values. + + This field is a member of `oneof`_ ``_op``. + svc (str): + This is deprecated and has no effect. Do not + use. + + This field is a member of `oneof`_ ``_svc``. + sys (str): + This is deprecated and has no effect. Do not + use. Check the Sys enum for the list of possible + values. + + This field is a member of `oneof`_ ``_sys``. + values (MutableSequence[str]): + This is deprecated and has no effect. Do not + use. + """ + class Iam(proto.Enum): + r"""This is deprecated and has no effect. Do not use. Additional + supported values which may be not listed in the enum directly due to + technical reasons: NO_ATTR + + Values: + UNDEFINED_IAM (0): + A value indicating that the enum field is not + set. + APPROVER (357258949): + This is deprecated and has no effect. Do not + use. + ATTRIBUTION (232465503): + This is deprecated and has no effect. Do not + use. + AUTHORITY (504108835): + This is deprecated and has no effect. Do not + use. + CREDENTIALS_TYPE (348222141): + This is deprecated and has no effect. Do not + use. + CREDS_ASSERTION (332343272): + This is deprecated and has no effect. Do not + use. + JUSTIFICATION_TYPE (206147719): + This is deprecated and has no effect. Do not + use. + SECURITY_REALM (526269616): + This is deprecated and has no effect. Do not + use. + """ + UNDEFINED_IAM = 0 + APPROVER = 357258949 + ATTRIBUTION = 232465503 + AUTHORITY = 504108835 + CREDENTIALS_TYPE = 348222141 + CREDS_ASSERTION = 332343272 + JUSTIFICATION_TYPE = 206147719 + SECURITY_REALM = 526269616 + + class Op(proto.Enum): + r"""This is deprecated and has no effect. Do not use. + + Values: + UNDEFINED_OP (0): + A value indicating that the enum field is not + set. + DISCHARGED (266338274): + This is deprecated and has no effect. Do not + use. + EQUALS (442201023): + This is deprecated and has no effect. Do not + use. + IN (2341): + This is deprecated and has no effect. Do not + use. + NOT_EQUALS (19718859): + This is deprecated and has no effect. Do not + use. + NOT_IN (161144369): + This is deprecated and has no effect. Do not + use. + NO_OP (74481951): + This is deprecated and has no effect. Do not + use. + """ + UNDEFINED_OP = 0 + DISCHARGED = 266338274 + EQUALS = 442201023 + IN = 2341 + NOT_EQUALS = 19718859 + NOT_IN = 161144369 + NO_OP = 74481951 + + class Sys(proto.Enum): + r"""This is deprecated and has no effect. Do not use. Additional + supported values which may be not listed in the enum directly due to + technical reasons: NO_ATTR + + Values: + UNDEFINED_SYS (0): + A value indicating that the enum field is not + set. + IP (2343): + This is deprecated and has no effect. Do not + use. + NAME (2388619): + This is deprecated and has no effect. Do not + use. + REGION (266017524): + This is deprecated and has no effect. Do not + use. + SERVICE (17781397): + This is deprecated and has no effect. Do not + use. + """ + UNDEFINED_SYS = 0 + IP = 2343 + NAME = 2388619 + REGION = 266017524 + SERVICE = 17781397 + + iam: str = proto.Field( + proto.STRING, + number=104021, + optional=True, + ) + op: str = proto.Field( + proto.STRING, + number=3553, + optional=True, + ) + svc: str = proto.Field( + proto.STRING, + number=114272, + optional=True, + ) + sys: str = proto.Field( + proto.STRING, + number=114381, + optional=True, + ) + values: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=249928994, + ) + + +class ConfidentialInstanceConfig(proto.Message): + r"""A set of Confidential Instance options. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + enable_confidential_compute (bool): + Defines whether the instance should have + confidential compute enabled. + + This field is a member of `oneof`_ ``_enable_confidential_compute``. + """ + + enable_confidential_compute: bool = proto.Field( + proto.BOOL, + number=102135228, + optional=True, + ) + + +class ConnectionDraining(proto.Message): + r"""Message containing connection draining configuration. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + draining_timeout_sec (int): + Configures a duration timeout for existing + requests on a removed backend instance. For + supported load balancers and protocols, as + described in Enabling connection draining. + + This field is a member of `oneof`_ ``_draining_timeout_sec``. + """ + + draining_timeout_sec: int = proto.Field( + proto.INT32, + number=225127070, + optional=True, + ) + + +class ConsistentHashLoadBalancerSettings(proto.Message): + r"""This message defines settings for a consistent hash style + load balancer. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + http_cookie (google.cloud.compute_v1.types.ConsistentHashLoadBalancerSettingsHttpCookie): + Hash is based on HTTP Cookie. This field describes a HTTP + cookie that will be used as the hash key for the consistent + hash load balancer. If the cookie is not present, it will be + generated. This field is applicable if the sessionAffinity + is set to HTTP_COOKIE. Not supported when the backend + service is referenced by a URL map that is bound to target + gRPC proxy that has validateForProxyless field set to true. + + This field is a member of `oneof`_ ``_http_cookie``. + http_header_name (str): + The hash based on the value of the specified header field. + This field is applicable if the sessionAffinity is set to + HEADER_FIELD. + + This field is a member of `oneof`_ ``_http_header_name``. + minimum_ring_size (int): + The minimum number of virtual nodes to use + for the hash ring. Defaults to 1024. Larger ring + sizes result in more granular load + distributions. If the number of hosts in the + load balancing pool is larger than the ring + size, each host will be assigned a single + virtual node. + + This field is a member of `oneof`_ ``_minimum_ring_size``. + """ + + http_cookie: 'ConsistentHashLoadBalancerSettingsHttpCookie' = proto.Field( + proto.MESSAGE, + number=6673915, + optional=True, + message='ConsistentHashLoadBalancerSettingsHttpCookie', + ) + http_header_name: str = proto.Field( + proto.STRING, + number=234798022, + optional=True, + ) + minimum_ring_size: int = proto.Field( + proto.INT64, + number=234380735, + optional=True, + ) + + +class ConsistentHashLoadBalancerSettingsHttpCookie(proto.Message): + r"""The information about the HTTP Cookie on which the hash + function is based for load balancing policies that use a + consistent hash. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + name (str): + Name of the cookie. + + This field is a member of `oneof`_ ``_name``. + path (str): + Path to set for the cookie. + + This field is a member of `oneof`_ ``_path``. + ttl (google.cloud.compute_v1.types.Duration): + Lifetime of the cookie. + + This field is a member of `oneof`_ ``_ttl``. + """ + + name: str = proto.Field( + proto.STRING, + number=3373707, + optional=True, + ) + path: str = proto.Field( + proto.STRING, + number=3433509, + optional=True, + ) + ttl: 'Duration' = proto.Field( + proto.MESSAGE, + number=115180, + optional=True, + message='Duration', + ) + + +class CorsPolicy(proto.Message): + r"""The specification for allowing client-side cross-origin + requests. For more information about the W3C recommendation for + cross-origin resource sharing (CORS), see Fetch API Living + Standard. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + allow_credentials (bool): + In response to a preflight request, setting + this to true indicates that the actual request + can include user credentials. This field + translates to the + Access-Control-Allow-Credentials header. Default + is false. + + This field is a member of `oneof`_ ``_allow_credentials``. + allow_headers (MutableSequence[str]): + Specifies the content for the + Access-Control-Allow-Headers header. + allow_methods (MutableSequence[str]): + Specifies the content for the + Access-Control-Allow-Methods header. + allow_origin_regexes (MutableSequence[str]): + Specifies a regular expression that matches allowed origins. + For more information about the regular expression syntax, + see Syntax. An origin is allowed if it matches either an + item in allowOrigins or an item in allowOriginRegexes. + Regular expressions can only be used when the + loadBalancingScheme is set to INTERNAL_SELF_MANAGED. + allow_origins (MutableSequence[str]): + Specifies the list of origins that is allowed + to do CORS requests. An origin is allowed if it + matches either an item in allowOrigins or an + item in allowOriginRegexes. + disabled (bool): + If true, the setting specifies the CORS + policy is disabled. The default value of false, + which indicates that the CORS policy is in + effect. + + This field is a member of `oneof`_ ``_disabled``. + expose_headers (MutableSequence[str]): + Specifies the content for the + Access-Control-Expose-Headers header. + max_age (int): + Specifies how long results of a preflight + request can be cached in seconds. This field + translates to the Access-Control-Max-Age header. + + This field is a member of `oneof`_ ``_max_age``. + """ + + allow_credentials: bool = proto.Field( + proto.BOOL, + number=481263366, + optional=True, + ) + allow_headers: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=45179024, + ) + allow_methods: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=205405372, + ) + allow_origin_regexes: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=215385810, + ) + allow_origins: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=194914071, + ) + disabled: bool = proto.Field( + proto.BOOL, + number=270940796, + optional=True, + ) + expose_headers: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=247604747, + ) + max_age: int = proto.Field( + proto.INT32, + number=307559332, + optional=True, + ) + + +class CreateInstancesInstanceGroupManagerRequest(proto.Message): + r"""A request message for InstanceGroupManagers.CreateInstances. + See the method description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + instance_group_manager (str): + The name of the managed instance group. It + should conform to RFC1035. + instance_group_managers_create_instances_request_resource (google.cloud.compute_v1.types.InstanceGroupManagersCreateInstancesRequest): + The body resource for this request + project (str): + Project ID for this request. + request_id (str): + An optional request ID to identify requests. + Specify a unique request ID so that if you must + retry your request, the server will know to + ignore the request if it has already been + completed. For example, consider a situation + where you make an initial request and the + request times out. If you make the request again + with the same request ID, the server can check + if original operation with the same request ID + was received, and if so, will ignore the second + request. The request ID must be a valid UUID + with the exception that zero UUID is not + supported ( + 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. + zone (str): + The name of the zone where the managed + instance group is located. It should conform to + RFC1035. + """ + + instance_group_manager: str = proto.Field( + proto.STRING, + number=249363395, + ) + instance_group_managers_create_instances_request_resource: 'InstanceGroupManagersCreateInstancesRequest' = proto.Field( + proto.MESSAGE, + number=24558867, + message='InstanceGroupManagersCreateInstancesRequest', + ) + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + request_id: str = proto.Field( + proto.STRING, + number=37109963, + optional=True, + ) + zone: str = proto.Field( + proto.STRING, + number=3744684, + ) + + +class CreateInstancesRegionInstanceGroupManagerRequest(proto.Message): + r"""A request message for + RegionInstanceGroupManagers.CreateInstances. See the method + description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + instance_group_manager (str): + The name of the managed instance group. It + should conform to RFC1035. + project (str): + Project ID for this request. + region (str): + The name of the region where the managed + instance group is located. It should conform to + RFC1035. + region_instance_group_managers_create_instances_request_resource (google.cloud.compute_v1.types.RegionInstanceGroupManagersCreateInstancesRequest): + The body resource for this request + request_id (str): + An optional request ID to identify requests. + Specify a unique request ID so that if you must + retry your request, the server will know to + ignore the request if it has already been + completed. For example, consider a situation + where you make an initial request and the + request times out. If you make the request again + with the same request ID, the server can check + if original operation with the same request ID + was received, and if so, will ignore the second + request. The request ID must be a valid UUID + with the exception that zero UUID is not + supported ( + 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. + """ + + instance_group_manager: str = proto.Field( + proto.STRING, + number=249363395, + ) + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + region: str = proto.Field( + proto.STRING, + number=138946292, + ) + region_instance_group_managers_create_instances_request_resource: 'RegionInstanceGroupManagersCreateInstancesRequest' = proto.Field( + proto.MESSAGE, + number=359014280, + message='RegionInstanceGroupManagersCreateInstancesRequest', + ) + request_id: str = proto.Field( + proto.STRING, + number=37109963, + optional=True, + ) + + +class CreateSnapshotDiskRequest(proto.Message): + r"""A request message for Disks.CreateSnapshot. See the method + description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + disk (str): + Name of the persistent disk to snapshot. + guest_flush (bool): + [Input Only] Whether to attempt an application consistent + snapshot by informing the OS to prepare for the snapshot + process. + + This field is a member of `oneof`_ ``_guest_flush``. + project (str): + Project ID for this request. + request_id (str): + An optional request ID to identify requests. + Specify a unique request ID so that if you must + retry your request, the server will know to + ignore the request if it has already been + completed. For example, consider a situation + where you make an initial request and the + request times out. If you make the request again + with the same request ID, the server can check + if original operation with the same request ID + was received, and if so, will ignore the second + request. This prevents clients from accidentally + creating duplicate commitments. The request ID + must be a valid UUID with the exception that + zero UUID is not supported ( + 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. + snapshot_resource (google.cloud.compute_v1.types.Snapshot): + The body resource for this request + zone (str): + The name of the zone for this request. + """ + + disk: str = proto.Field( + proto.STRING, + number=3083677, + ) + guest_flush: bool = proto.Field( + proto.BOOL, + number=385550813, + optional=True, + ) + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + request_id: str = proto.Field( + proto.STRING, + number=37109963, + optional=True, + ) + snapshot_resource: 'Snapshot' = proto.Field( + proto.MESSAGE, + number=481319977, + message='Snapshot', + ) + zone: str = proto.Field( + proto.STRING, + number=3744684, + ) + + +class CreateSnapshotRegionDiskRequest(proto.Message): + r"""A request message for RegionDisks.CreateSnapshot. See the + method description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + disk (str): + Name of the regional persistent disk to + snapshot. + project (str): + Project ID for this request. + region (str): + Name of the region for this request. + request_id (str): + An optional request ID to identify requests. + Specify a unique request ID so that if you must + retry your request, the server will know to + ignore the request if it has already been + completed. For example, consider a situation + where you make an initial request and the + request times out. If you make the request again + with the same request ID, the server can check + if original operation with the same request ID + was received, and if so, will ignore the second + request. This prevents clients from accidentally + creating duplicate commitments. The request ID + must be a valid UUID with the exception that + zero UUID is not supported ( + 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. + snapshot_resource (google.cloud.compute_v1.types.Snapshot): + The body resource for this request + """ + + disk: str = proto.Field( + proto.STRING, + number=3083677, + ) + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + region: str = proto.Field( + proto.STRING, + number=138946292, + ) + request_id: str = proto.Field( + proto.STRING, + number=37109963, + optional=True, + ) + snapshot_resource: 'Snapshot' = proto.Field( + proto.MESSAGE, + number=481319977, + message='Snapshot', + ) + + +class CustomerEncryptionKey(proto.Message): + r""" + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + kms_key_name (str): + The name of the encryption key that is stored in Google + Cloud KMS. For example: "kmsKeyName": + "projects/kms_project_id/locations/region/keyRings/ + key_region/cryptoKeys/key The fully-qualifed key name may be + returned for resource GET requests. For example: + "kmsKeyName": + "projects/kms_project_id/locations/region/keyRings/ + key_region/cryptoKeys/key /cryptoKeyVersions/1 + + This field is a member of `oneof`_ ``_kms_key_name``. + kms_key_service_account (str): + The service account being used for the encryption request + for the given KMS key. If absent, the Compute Engine default + service account is used. For example: + "kmsKeyServiceAccount": + "name@project_id.iam.gserviceaccount.com/ + + This field is a member of `oneof`_ ``_kms_key_service_account``. + raw_key (str): + Specifies a 256-bit customer-supplied + encryption key, encoded in RFC 4648 base64 to + either encrypt or decrypt this resource. You can + provide either the rawKey or the + rsaEncryptedKey. For example: "rawKey": + "SGVsbG8gZnJvbSBHb29nbGUgQ2xvdWQgUGxhdGZvcm0=". + + This field is a member of `oneof`_ ``_raw_key``. + rsa_encrypted_key (str): + Specifies an RFC 4648 base64 encoded, + RSA-wrapped 2048-bit customer-supplied + encryption key to either encrypt or decrypt this + resource. You can provide either the rawKey or + the rsaEncryptedKey. For example: + "rsaEncryptedKey": + "ieCx/NcW06PcT7Ep1X6LUTc/hLvUDYyzSZPPVCVPTVEohpeHASqC8uw5TzyO9U+Fka9JFH + z0mBibXUInrC/jEk014kCK/NPjYgEMOyssZ4ZINPKxlUh2zn1bV+MCaTICrdmuSBTWlUUiFoD + D6PYznLwh8ZNdaheCeZ8ewEXgFQ8V+sDroLaN3Xs3MDTXQEMMoNUXMCZEIpg9Vtp9x2oe==" + The key must meet the following requirements + before you can provide it to Compute Engine: 1. + The key is wrapped using a RSA public key + certificate provided by Google. 2. After being + wrapped, the key must be encoded in RFC 4648 + base64 encoding. Gets the RSA public key + certificate provided by Google at: + https://cloud-certs.storage.googleapis.com/google-cloud-csek-ingress.pem + + This field is a member of `oneof`_ ``_rsa_encrypted_key``. + sha256 (str): + [Output only] The RFC 4648 base64 encoded SHA-256 hash of + the customer-supplied encryption key that protects this + resource. + + This field is a member of `oneof`_ ``_sha256``. + """ + + kms_key_name: str = proto.Field( + proto.STRING, + number=484373913, + optional=True, + ) + kms_key_service_account: str = proto.Field( + proto.STRING, + number=209986261, + optional=True, + ) + raw_key: str = proto.Field( + proto.STRING, + number=449196488, + optional=True, + ) + rsa_encrypted_key: str = proto.Field( + proto.STRING, + number=335487397, + optional=True, + ) + sha256: str = proto.Field( + proto.STRING, + number=170112551, + optional=True, + ) + + +class CustomerEncryptionKeyProtectedDisk(proto.Message): + r""" + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + disk_encryption_key (google.cloud.compute_v1.types.CustomerEncryptionKey): + Decrypts data associated with the disk with a + customer-supplied encryption key. + + This field is a member of `oneof`_ ``_disk_encryption_key``. + source (str): + Specifies a valid partial or full URL to an existing + Persistent Disk resource. This field is only applicable for + persistent disks. For example: "source": + "/compute/v1/projects/project_id/zones/zone/disks/ disk_name + + This field is a member of `oneof`_ ``_source``. + """ + + disk_encryption_key: 'CustomerEncryptionKey' = proto.Field( + proto.MESSAGE, + number=271660677, + optional=True, + message='CustomerEncryptionKey', + ) + source: str = proto.Field( + proto.STRING, + number=177235995, + optional=True, + ) + + +class Data(proto.Message): + r""" + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + key (str): + [Output Only] A key that provides more detail on the warning + being returned. For example, for warnings where there are no + results in a list request for a particular zone, this key + might be scope and the key value might be the zone name. + Other examples might be a key indicating a deprecated + resource and a suggested replacement, or a warning about + invalid network settings (for example, if an instance + attempts to perform IP forwarding but is not enabled for IP + forwarding). + + This field is a member of `oneof`_ ``_key``. + value (str): + [Output Only] A warning data value corresponding to the key. + + This field is a member of `oneof`_ ``_value``. + """ + + key: str = proto.Field( + proto.STRING, + number=106079, + optional=True, + ) + value: str = proto.Field( + proto.STRING, + number=111972721, + optional=True, + ) + + +class DeleteAccessConfigInstanceRequest(proto.Message): + r"""A request message for Instances.DeleteAccessConfig. See the + method description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + access_config (str): + The name of the access config to delete. + instance (str): + The instance name for this request. + network_interface (str): + The name of the network interface. + project (str): + Project ID for this request. + request_id (str): + An optional request ID to identify requests. + Specify a unique request ID so that if you must + retry your request, the server will know to + ignore the request if it has already been + completed. For example, consider a situation + where you make an initial request and the + request times out. If you make the request again + with the same request ID, the server can check + if original operation with the same request ID + was received, and if so, will ignore the second + request. This prevents clients from accidentally + creating duplicate commitments. The request ID + must be a valid UUID with the exception that + zero UUID is not supported ( + 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. + zone (str): + The name of the zone for this request. + """ + + access_config: str = proto.Field( + proto.STRING, + number=72856189, + ) + instance: str = proto.Field( + proto.STRING, + number=18257045, + ) + network_interface: str = proto.Field( + proto.STRING, + number=365387880, + ) + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + request_id: str = proto.Field( + proto.STRING, + number=37109963, + optional=True, + ) + zone: str = proto.Field( + proto.STRING, + number=3744684, + ) + + +class DeleteAddressRequest(proto.Message): + r"""A request message for Addresses.Delete. See the method + description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + address (str): + Name of the address resource to delete. + project (str): + Project ID for this request. + region (str): + Name of the region for this request. + request_id (str): + An optional request ID to identify requests. + Specify a unique request ID so that if you must + retry your request, the server will know to + ignore the request if it has already been + completed. For example, consider a situation + where you make an initial request and the + request times out. If you make the request again + with the same request ID, the server can check + if original operation with the same request ID + was received, and if so, will ignore the second + request. This prevents clients from accidentally + creating duplicate commitments. The request ID + must be a valid UUID with the exception that + zero UUID is not supported ( + 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. + """ + + address: str = proto.Field( + proto.STRING, + number=462920692, + ) + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + region: str = proto.Field( + proto.STRING, + number=138946292, + ) + request_id: str = proto.Field( + proto.STRING, + number=37109963, + optional=True, + ) + + +class DeleteAutoscalerRequest(proto.Message): + r"""A request message for Autoscalers.Delete. See the method + description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + autoscaler (str): + Name of the autoscaler to delete. + project (str): + Project ID for this request. + request_id (str): + An optional request ID to identify requests. + Specify a unique request ID so that if you must + retry your request, the server will know to + ignore the request if it has already been + completed. For example, consider a situation + where you make an initial request and the + request times out. If you make the request again + with the same request ID, the server can check + if original operation with the same request ID + was received, and if so, will ignore the second + request. This prevents clients from accidentally + creating duplicate commitments. The request ID + must be a valid UUID with the exception that + zero UUID is not supported ( + 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. + zone (str): + Name of the zone for this request. + """ + + autoscaler: str = proto.Field( + proto.STRING, + number=517258967, + ) + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + request_id: str = proto.Field( + proto.STRING, + number=37109963, + optional=True, + ) + zone: str = proto.Field( + proto.STRING, + number=3744684, + ) + + +class DeleteBackendBucketRequest(proto.Message): + r"""A request message for BackendBuckets.Delete. See the method + description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + backend_bucket (str): + Name of the BackendBucket resource to delete. + project (str): + Project ID for this request. + request_id (str): + An optional request ID to identify requests. + Specify a unique request ID so that if you must + retry your request, the server will know to + ignore the request if it has already been + completed. For example, consider a situation + where you make an initial request and the + request times out. If you make the request again + with the same request ID, the server can check + if original operation with the same request ID + was received, and if so, will ignore the second + request. This prevents clients from accidentally + creating duplicate commitments. The request ID + must be a valid UUID with the exception that + zero UUID is not supported ( + 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. + """ + + backend_bucket: str = proto.Field( + proto.STRING, + number=91714037, + ) + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + request_id: str = proto.Field( + proto.STRING, + number=37109963, + optional=True, + ) + + +class DeleteBackendServiceRequest(proto.Message): + r"""A request message for BackendServices.Delete. See the method + description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + backend_service (str): + Name of the BackendService resource to + delete. + project (str): + Project ID for this request. + request_id (str): + An optional request ID to identify requests. + Specify a unique request ID so that if you must + retry your request, the server will know to + ignore the request if it has already been + completed. For example, consider a situation + where you make an initial request and the + request times out. If you make the request again + with the same request ID, the server can check + if original operation with the same request ID + was received, and if so, will ignore the second + request. This prevents clients from accidentally + creating duplicate commitments. The request ID + must be a valid UUID with the exception that + zero UUID is not supported ( + 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. + """ + + backend_service: str = proto.Field( + proto.STRING, + number=306946058, + ) + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + request_id: str = proto.Field( + proto.STRING, + number=37109963, + optional=True, + ) + + +class DeleteDiskRequest(proto.Message): + r"""A request message for Disks.Delete. See the method + description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + disk (str): + Name of the persistent disk to delete. + project (str): + Project ID for this request. + request_id (str): + An optional request ID to identify requests. + Specify a unique request ID so that if you must + retry your request, the server will know to + ignore the request if it has already been + completed. For example, consider a situation + where you make an initial request and the + request times out. If you make the request again + with the same request ID, the server can check + if original operation with the same request ID + was received, and if so, will ignore the second + request. This prevents clients from accidentally + creating duplicate commitments. The request ID + must be a valid UUID with the exception that + zero UUID is not supported ( + 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. + zone (str): + The name of the zone for this request. + """ + + disk: str = proto.Field( + proto.STRING, + number=3083677, + ) + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + request_id: str = proto.Field( + proto.STRING, + number=37109963, + optional=True, + ) + zone: str = proto.Field( + proto.STRING, + number=3744684, + ) + + +class DeleteExternalVpnGatewayRequest(proto.Message): + r"""A request message for ExternalVpnGateways.Delete. See the + method description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + external_vpn_gateway (str): + Name of the externalVpnGateways to delete. + project (str): + Project ID for this request. + request_id (str): + An optional request ID to identify requests. + Specify a unique request ID so that if you must + retry your request, the server will know to + ignore the request if it has already been + completed. For example, consider a situation + where you make an initial request and the + request times out. If you make the request again + with the same request ID, the server can check + if original operation with the same request ID + was received, and if so, will ignore the second + request. This prevents clients from accidentally + creating duplicate commitments. The request ID + must be a valid UUID with the exception that + zero UUID is not supported ( + 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. + """ + + external_vpn_gateway: str = proto.Field( + proto.STRING, + number=109898629, + ) + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + request_id: str = proto.Field( + proto.STRING, + number=37109963, + optional=True, + ) + + +class DeleteFirewallPolicyRequest(proto.Message): + r"""A request message for FirewallPolicies.Delete. See the method + description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + firewall_policy (str): + Name of the firewall policy to delete. + request_id (str): + An optional request ID to identify requests. + Specify a unique request ID so that if you must + retry your request, the server will know to + ignore the request if it has already been + completed. For example, consider a situation + where you make an initial request and the + request times out. If you make the request again + with the same request ID, the server can check + if original operation with the same request ID + was received, and if so, will ignore the second + request. This prevents clients from accidentally + creating duplicate commitments. The request ID + must be a valid UUID with the exception that + zero UUID is not supported ( + 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. + """ + + firewall_policy: str = proto.Field( + proto.STRING, + number=498173265, + ) + request_id: str = proto.Field( + proto.STRING, + number=37109963, + optional=True, + ) + + +class DeleteFirewallRequest(proto.Message): + r"""A request message for Firewalls.Delete. See the method + description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + firewall (str): + Name of the firewall rule to delete. + project (str): + Project ID for this request. + request_id (str): + An optional request ID to identify requests. + Specify a unique request ID so that if you must + retry your request, the server will know to + ignore the request if it has already been + completed. For example, consider a situation + where you make an initial request and the + request times out. If you make the request again + with the same request ID, the server can check + if original operation with the same request ID + was received, and if so, will ignore the second + request. This prevents clients from accidentally + creating duplicate commitments. The request ID + must be a valid UUID with the exception that + zero UUID is not supported ( + 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. + """ + + firewall: str = proto.Field( + proto.STRING, + number=511016192, + ) + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + request_id: str = proto.Field( + proto.STRING, + number=37109963, + optional=True, + ) + + +class DeleteForwardingRuleRequest(proto.Message): + r"""A request message for ForwardingRules.Delete. See the method + description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + forwarding_rule (str): + Name of the ForwardingRule resource to + delete. + project (str): + Project ID for this request. + region (str): + Name of the region scoping this request. + request_id (str): + An optional request ID to identify requests. + Specify a unique request ID so that if you must + retry your request, the server will know to + ignore the request if it has already been + completed. For example, consider a situation + where you make an initial request and the + request times out. If you make the request again + with the same request ID, the server can check + if original operation with the same request ID + was received, and if so, will ignore the second + request. This prevents clients from accidentally + creating duplicate commitments. The request ID + must be a valid UUID with the exception that + zero UUID is not supported ( + 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. + """ + + forwarding_rule: str = proto.Field( + proto.STRING, + number=269964030, + ) + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + region: str = proto.Field( + proto.STRING, + number=138946292, + ) + request_id: str = proto.Field( + proto.STRING, + number=37109963, + optional=True, + ) + + +class DeleteGlobalAddressRequest(proto.Message): + r"""A request message for GlobalAddresses.Delete. See the method + description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + address (str): + Name of the address resource to delete. + project (str): + Project ID for this request. + request_id (str): + An optional request ID to identify requests. + Specify a unique request ID so that if you must + retry your request, the server will know to + ignore the request if it has already been + completed. For example, consider a situation + where you make an initial request and the + request times out. If you make the request again + with the same request ID, the server can check + if original operation with the same request ID + was received, and if so, will ignore the second + request. This prevents clients from accidentally + creating duplicate commitments. The request ID + must be a valid UUID with the exception that + zero UUID is not supported ( + 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. + """ + + address: str = proto.Field( + proto.STRING, + number=462920692, + ) + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + request_id: str = proto.Field( + proto.STRING, + number=37109963, + optional=True, + ) + + +class DeleteGlobalForwardingRuleRequest(proto.Message): + r"""A request message for GlobalForwardingRules.Delete. See the + method description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + forwarding_rule (str): + Name of the ForwardingRule resource to + delete. + project (str): + Project ID for this request. + request_id (str): + An optional request ID to identify requests. + Specify a unique request ID so that if you must + retry your request, the server will know to + ignore the request if it has already been + completed. For example, consider a situation + where you make an initial request and the + request times out. If you make the request again + with the same request ID, the server can check + if original operation with the same request ID + was received, and if so, will ignore the second + request. This prevents clients from accidentally + creating duplicate commitments. The request ID + must be a valid UUID with the exception that + zero UUID is not supported ( + 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. + """ + + forwarding_rule: str = proto.Field( + proto.STRING, + number=269964030, + ) + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + request_id: str = proto.Field( + proto.STRING, + number=37109963, + optional=True, + ) + + +class DeleteGlobalNetworkEndpointGroupRequest(proto.Message): + r"""A request message for GlobalNetworkEndpointGroups.Delete. See + the method description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + network_endpoint_group (str): + The name of the network endpoint group to + delete. It should comply with RFC1035. + project (str): + Project ID for this request. + request_id (str): + An optional request ID to identify requests. + Specify a unique request ID so that if you must + retry your request, the server will know to + ignore the request if it has already been + completed. For example, consider a situation + where you make an initial request and the + request times out. If you make the request again + with the same request ID, the server can check + if original operation with the same request ID + was received, and if so, will ignore the second + request. This prevents clients from accidentally + creating duplicate commitments. The request ID + must be a valid UUID with the exception that + zero UUID is not supported ( + 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. + """ + + network_endpoint_group: str = proto.Field( + proto.STRING, + number=433907078, + ) + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + request_id: str = proto.Field( + proto.STRING, + number=37109963, + optional=True, + ) + + +class DeleteGlobalOperationRequest(proto.Message): + r"""A request message for GlobalOperations.Delete. See the method + description for details. + + Attributes: + operation (str): + Name of the Operations resource to delete. + project (str): + Project ID for this request. + """ + + operation: str = proto.Field( + proto.STRING, + number=52090215, + ) + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + + +class DeleteGlobalOperationResponse(proto.Message): + r"""A response message for GlobalOperations.Delete. See the + method description for details. + + """ + + +class DeleteGlobalOrganizationOperationRequest(proto.Message): + r"""A request message for GlobalOrganizationOperations.Delete. + See the method description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + operation (str): + Name of the Operations resource to delete. + parent_id (str): + Parent ID for this request. + + This field is a member of `oneof`_ ``_parent_id``. + """ + + operation: str = proto.Field( + proto.STRING, + number=52090215, + ) + parent_id: str = proto.Field( + proto.STRING, + number=459714768, + optional=True, + ) + + +class DeleteGlobalOrganizationOperationResponse(proto.Message): + r"""A response message for GlobalOrganizationOperations.Delete. + See the method description for details. + + """ + + +class DeleteGlobalPublicDelegatedPrefixeRequest(proto.Message): + r"""A request message for GlobalPublicDelegatedPrefixes.Delete. + See the method description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + project (str): + Project ID for this request. + public_delegated_prefix (str): + Name of the PublicDelegatedPrefix resource to + delete. + request_id (str): + An optional request ID to identify requests. + Specify a unique request ID so that if you must + retry your request, the server will know to + ignore the request if it has already been + completed. For example, consider a situation + where you make an initial request and the + request times out. If you make the request again + with the same request ID, the server can check + if original operation with the same request ID + was received, and if so, will ignore the second + request. This prevents clients from accidentally + creating duplicate commitments. The request ID + must be a valid UUID with the exception that + zero UUID is not supported ( + 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. + """ + + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + public_delegated_prefix: str = proto.Field( + proto.STRING, + number=204238440, + ) + request_id: str = proto.Field( + proto.STRING, + number=37109963, + optional=True, + ) + + +class DeleteHealthCheckRequest(proto.Message): + r"""A request message for HealthChecks.Delete. See the method + description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + health_check (str): + Name of the HealthCheck resource to delete. + project (str): + Project ID for this request. + request_id (str): + An optional request ID to identify requests. + Specify a unique request ID so that if you must + retry your request, the server will know to + ignore the request if it has already been + completed. For example, consider a situation + where you make an initial request and the + request times out. If you make the request again + with the same request ID, the server can check + if original operation with the same request ID + was received, and if so, will ignore the second + request. This prevents clients from accidentally + creating duplicate commitments. The request ID + must be a valid UUID with the exception that + zero UUID is not supported ( + 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. + """ + + health_check: str = proto.Field( + proto.STRING, + number=308876645, + ) + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + request_id: str = proto.Field( + proto.STRING, + number=37109963, + optional=True, + ) + + +class DeleteImageRequest(proto.Message): + r"""A request message for Images.Delete. See the method + description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + image (str): + Name of the image resource to delete. + project (str): + Project ID for this request. + request_id (str): + An optional request ID to identify requests. + Specify a unique request ID so that if you must + retry your request, the server will know to + ignore the request if it has already been + completed. For example, consider a situation + where you make an initial request and the + request times out. If you make the request again + with the same request ID, the server can check + if original operation with the same request ID + was received, and if so, will ignore the second + request. This prevents clients from accidentally + creating duplicate commitments. The request ID + must be a valid UUID with the exception that + zero UUID is not supported ( + 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. + """ + + image: str = proto.Field( + proto.STRING, + number=100313435, + ) + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + request_id: str = proto.Field( + proto.STRING, + number=37109963, + optional=True, + ) + + +class DeleteInstanceGroupManagerRequest(proto.Message): + r"""A request message for InstanceGroupManagers.Delete. See the + method description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + instance_group_manager (str): + The name of the managed instance group to + delete. + project (str): + Project ID for this request. + request_id (str): + An optional request ID to identify requests. + Specify a unique request ID so that if you must + retry your request, the server will know to + ignore the request if it has already been + completed. For example, consider a situation + where you make an initial request and the + request times out. If you make the request again + with the same request ID, the server can check + if original operation with the same request ID + was received, and if so, will ignore the second + request. This prevents clients from accidentally + creating duplicate commitments. The request ID + must be a valid UUID with the exception that + zero UUID is not supported ( + 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. + zone (str): + The name of the zone where the managed + instance group is located. + """ + + instance_group_manager: str = proto.Field( + proto.STRING, + number=249363395, + ) + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + request_id: str = proto.Field( + proto.STRING, + number=37109963, + optional=True, + ) + zone: str = proto.Field( + proto.STRING, + number=3744684, + ) + + +class DeleteInstanceGroupRequest(proto.Message): + r"""A request message for InstanceGroups.Delete. See the method + description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + instance_group (str): + The name of the instance group to delete. + project (str): + Project ID for this request. + request_id (str): + An optional request ID to identify requests. + Specify a unique request ID so that if you must + retry your request, the server will know to + ignore the request if it has already been + completed. For example, consider a situation + where you make an initial request and the + request times out. If you make the request again + with the same request ID, the server can check + if original operation with the same request ID + was received, and if so, will ignore the second + request. This prevents clients from accidentally + creating duplicate commitments. The request ID + must be a valid UUID with the exception that + zero UUID is not supported ( + 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. + zone (str): + The name of the zone where the instance group + is located. + """ + + instance_group: str = proto.Field( + proto.STRING, + number=81095253, + ) + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + request_id: str = proto.Field( + proto.STRING, + number=37109963, + optional=True, + ) + zone: str = proto.Field( + proto.STRING, + number=3744684, + ) + + +class DeleteInstanceRequest(proto.Message): + r"""A request message for Instances.Delete. See the method + description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + instance (str): + Name of the instance resource to delete. + project (str): + Project ID for this request. + request_id (str): + An optional request ID to identify requests. + Specify a unique request ID so that if you must + retry your request, the server will know to + ignore the request if it has already been + completed. For example, consider a situation + where you make an initial request and the + request times out. If you make the request again + with the same request ID, the server can check + if original operation with the same request ID + was received, and if so, will ignore the second + request. This prevents clients from accidentally + creating duplicate commitments. The request ID + must be a valid UUID with the exception that + zero UUID is not supported ( + 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. + zone (str): + The name of the zone for this request. + """ + + instance: str = proto.Field( + proto.STRING, + number=18257045, + ) + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + request_id: str = proto.Field( + proto.STRING, + number=37109963, + optional=True, + ) + zone: str = proto.Field( + proto.STRING, + number=3744684, + ) + + +class DeleteInstanceTemplateRequest(proto.Message): + r"""A request message for InstanceTemplates.Delete. See the + method description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + instance_template (str): + The name of the instance template to delete. + project (str): + Project ID for this request. + request_id (str): + An optional request ID to identify requests. + Specify a unique request ID so that if you must + retry your request, the server will know to + ignore the request if it has already been + completed. For example, consider a situation + where you make an initial request and the + request times out. If you make the request again + with the same request ID, the server can check + if original operation with the same request ID + was received, and if so, will ignore the second + request. This prevents clients from accidentally + creating duplicate commitments. The request ID + must be a valid UUID with the exception that + zero UUID is not supported ( + 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. + """ + + instance_template: str = proto.Field( + proto.STRING, + number=309248228, + ) + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + request_id: str = proto.Field( + proto.STRING, + number=37109963, + optional=True, + ) + + +class DeleteInstancesInstanceGroupManagerRequest(proto.Message): + r"""A request message for InstanceGroupManagers.DeleteInstances. + See the method description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + instance_group_manager (str): + The name of the managed instance group. + instance_group_managers_delete_instances_request_resource (google.cloud.compute_v1.types.InstanceGroupManagersDeleteInstancesRequest): + The body resource for this request + project (str): + Project ID for this request. + request_id (str): + An optional request ID to identify requests. + Specify a unique request ID so that if you must + retry your request, the server will know to + ignore the request if it has already been + completed. For example, consider a situation + where you make an initial request and the + request times out. If you make the request again + with the same request ID, the server can check + if original operation with the same request ID + was received, and if so, will ignore the second + request. This prevents clients from accidentally + creating duplicate commitments. The request ID + must be a valid UUID with the exception that + zero UUID is not supported ( + 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. + zone (str): + The name of the zone where the managed + instance group is located. + """ + + instance_group_manager: str = proto.Field( + proto.STRING, + number=249363395, + ) + instance_group_managers_delete_instances_request_resource: 'InstanceGroupManagersDeleteInstancesRequest' = proto.Field( + proto.MESSAGE, + number=166421252, + message='InstanceGroupManagersDeleteInstancesRequest', + ) + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + request_id: str = proto.Field( + proto.STRING, + number=37109963, + optional=True, + ) + zone: str = proto.Field( + proto.STRING, + number=3744684, + ) + + +class DeleteInstancesRegionInstanceGroupManagerRequest(proto.Message): + r"""A request message for + RegionInstanceGroupManagers.DeleteInstances. See the method + description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + instance_group_manager (str): + Name of the managed instance group. + project (str): + Project ID for this request. + region (str): + Name of the region scoping this request. + region_instance_group_managers_delete_instances_request_resource (google.cloud.compute_v1.types.RegionInstanceGroupManagersDeleteInstancesRequest): + The body resource for this request + request_id (str): + An optional request ID to identify requests. + Specify a unique request ID so that if you must + retry your request, the server will know to + ignore the request if it has already been + completed. For example, consider a situation + where you make an initial request and the + request times out. If you make the request again + with the same request ID, the server can check + if original operation with the same request ID + was received, and if so, will ignore the second + request. This prevents clients from accidentally + creating duplicate commitments. The request ID + must be a valid UUID with the exception that + zero UUID is not supported ( + 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. + """ + + instance_group_manager: str = proto.Field( + proto.STRING, + number=249363395, + ) + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + region: str = proto.Field( + proto.STRING, + number=138946292, + ) + region_instance_group_managers_delete_instances_request_resource: 'RegionInstanceGroupManagersDeleteInstancesRequest' = proto.Field( + proto.MESSAGE, + number=500876665, + message='RegionInstanceGroupManagersDeleteInstancesRequest', + ) + request_id: str = proto.Field( + proto.STRING, + number=37109963, + optional=True, + ) + + +class DeleteInterconnectAttachmentRequest(proto.Message): + r"""A request message for InterconnectAttachments.Delete. See the + method description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + interconnect_attachment (str): + Name of the interconnect attachment to + delete. + project (str): + Project ID for this request. + region (str): + Name of the region for this request. + request_id (str): + An optional request ID to identify requests. + Specify a unique request ID so that if you must + retry your request, the server will know to + ignore the request if it has already been + completed. For example, consider a situation + where you make an initial request and the + request times out. If you make the request again + with the same request ID, the server can check + if original operation with the same request ID + was received, and if so, will ignore the second + request. This prevents clients from accidentally + creating duplicate commitments. The request ID + must be a valid UUID with the exception that + zero UUID is not supported ( + 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. + """ + + interconnect_attachment: str = proto.Field( + proto.STRING, + number=308135284, + ) + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + region: str = proto.Field( + proto.STRING, + number=138946292, + ) + request_id: str = proto.Field( + proto.STRING, + number=37109963, + optional=True, + ) + + +class DeleteInterconnectRequest(proto.Message): + r"""A request message for Interconnects.Delete. See the method + description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + interconnect (str): + Name of the interconnect to delete. + project (str): + Project ID for this request. + request_id (str): + An optional request ID to identify requests. + Specify a unique request ID so that if you must + retry your request, the server will know to + ignore the request if it has already been + completed. For example, consider a situation + where you make an initial request and the + request times out. If you make the request again + with the same request ID, the server can check + if original operation with the same request ID + was received, and if so, will ignore the second + request. This prevents clients from accidentally + creating duplicate commitments. The request ID + must be a valid UUID with the exception that + zero UUID is not supported ( + 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. + """ + + interconnect: str = proto.Field( + proto.STRING, + number=224601230, + ) + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + request_id: str = proto.Field( + proto.STRING, + number=37109963, + optional=True, + ) + + +class DeleteLicenseRequest(proto.Message): + r"""A request message for Licenses.Delete. See the method + description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + license_ (str): + Name of the license resource to delete. + project (str): + Project ID for this request. + request_id (str): + An optional request ID to identify requests. + Specify a unique request ID so that if you must + retry your request, the server will know to + ignore the request if it has already been + completed. For example, consider a situation + where you make an initial request and the + request times out. If you make the request again + with the same request ID, the server can check + if original operation with the same request ID + was received, and if so, will ignore the second + request. This prevents clients from accidentally + creating duplicate commitments. The request ID + must be a valid UUID with the exception that + zero UUID is not supported ( + 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. + """ + + license_: str = proto.Field( + proto.STRING, + number=166757441, + ) + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + request_id: str = proto.Field( + proto.STRING, + number=37109963, + optional=True, + ) + + +class DeleteMachineImageRequest(proto.Message): + r"""A request message for MachineImages.Delete. See the method + description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + machine_image (str): + The name of the machine image to delete. + project (str): + Project ID for this request. + request_id (str): + An optional request ID to identify requests. + Specify a unique request ID so that if you must + retry your request, the server will know to + ignore the request if it has already been + completed. For example, consider a situation + where you make an initial request and the + request times out. If you make the request again + with the same request ID, the server can check + if original operation with the same request ID + was received, and if so, will ignore the second + request. This prevents clients from accidentally + creating duplicate commitments. The request ID + must be a valid UUID with the exception that + zero UUID is not supported ( + 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. + """ + + machine_image: str = proto.Field( + proto.STRING, + number=69189475, + ) + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + request_id: str = proto.Field( + proto.STRING, + number=37109963, + optional=True, + ) + + +class DeleteNetworkAttachmentRequest(proto.Message): + r"""A request message for NetworkAttachments.Delete. See the + method description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + network_attachment (str): + Name of the NetworkAttachment resource to + delete. + project (str): + Project ID for this request. + region (str): + Name of the region of this request. + request_id (str): + An optional request ID to identify requests. Specify a + unique request ID so that if you must retry your request, + the server will know to ignore the request if it has already + been completed. For example, consider a situation where you + make an initial request and the request times out. If you + make the request again with the same request ID, the server + can check if original operation with the same request ID was + received, and if so, will ignore the second request. This + prevents clients from accidentally creating duplicate + commitments. The request ID must be a valid UUID with the + exception that zero UUID is not supported ( + 00000000-0000-0000-0000-000000000000). end_interface: + MixerMutationRequestBuilder + + This field is a member of `oneof`_ ``_request_id``. + """ + + network_attachment: str = proto.Field( + proto.STRING, + number=224644052, + ) + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + region: str = proto.Field( + proto.STRING, + number=138946292, + ) + request_id: str = proto.Field( + proto.STRING, + number=37109963, + optional=True, + ) + + +class DeleteNetworkEdgeSecurityServiceRequest(proto.Message): + r"""A request message for NetworkEdgeSecurityServices.Delete. See + the method description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + network_edge_security_service (str): + Name of the network edge security service to + delete. + project (str): + Project ID for this request. + region (str): + Name of the region scoping this request. + request_id (str): + An optional request ID to identify requests. + Specify a unique request ID so that if you must + retry your request, the server will know to + ignore the request if it has already been + completed. For example, consider a situation + where you make an initial request and the + request times out. If you make the request again + with the same request ID, the server can check + if original operation with the same request ID + was received, and if so, will ignore the second + request. This prevents clients from accidentally + creating duplicate commitments. The request ID + must be a valid UUID with the exception that + zero UUID is not supported ( + 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. + """ + + network_edge_security_service: str = proto.Field( + proto.STRING, + number=157011879, + ) + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + region: str = proto.Field( + proto.STRING, + number=138946292, + ) + request_id: str = proto.Field( + proto.STRING, + number=37109963, + optional=True, + ) + + +class DeleteNetworkEndpointGroupRequest(proto.Message): + r"""A request message for NetworkEndpointGroups.Delete. See the + method description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + network_endpoint_group (str): + The name of the network endpoint group to + delete. It should comply with RFC1035. + project (str): + Project ID for this request. + request_id (str): + An optional request ID to identify requests. + Specify a unique request ID so that if you must + retry your request, the server will know to + ignore the request if it has already been + completed. For example, consider a situation + where you make an initial request and the + request times out. If you make the request again + with the same request ID, the server can check + if original operation with the same request ID + was received, and if so, will ignore the second + request. This prevents clients from accidentally + creating duplicate commitments. The request ID + must be a valid UUID with the exception that + zero UUID is not supported ( + 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. + zone (str): + The name of the zone where the network + endpoint group is located. It should comply with + RFC1035. + """ + + network_endpoint_group: str = proto.Field( + proto.STRING, + number=433907078, + ) + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + request_id: str = proto.Field( + proto.STRING, + number=37109963, + optional=True, + ) + zone: str = proto.Field( + proto.STRING, + number=3744684, + ) + + +class DeleteNetworkFirewallPolicyRequest(proto.Message): + r"""A request message for NetworkFirewallPolicies.Delete. See the + method description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + firewall_policy (str): + Name of the firewall policy to delete. + project (str): + Project ID for this request. + request_id (str): + An optional request ID to identify requests. + Specify a unique request ID so that if you must + retry your request, the server will know to + ignore the request if it has already been + completed. For example, consider a situation + where you make an initial request and the + request times out. If you make the request again + with the same request ID, the server can check + if original operation with the same request ID + was received, and if so, will ignore the second + request. This prevents clients from accidentally + creating duplicate commitments. The request ID + must be a valid UUID with the exception that + zero UUID is not supported ( + 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. + """ + + firewall_policy: str = proto.Field( + proto.STRING, + number=498173265, + ) + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + request_id: str = proto.Field( + proto.STRING, + number=37109963, + optional=True, + ) + + +class DeleteNetworkRequest(proto.Message): + r"""A request message for Networks.Delete. See the method + description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + network (str): + Name of the network to delete. + project (str): + Project ID for this request. + request_id (str): + An optional request ID to identify requests. + Specify a unique request ID so that if you must + retry your request, the server will know to + ignore the request if it has already been + completed. For example, consider a situation + where you make an initial request and the + request times out. If you make the request again + with the same request ID, the server can check + if original operation with the same request ID + was received, and if so, will ignore the second + request. This prevents clients from accidentally + creating duplicate commitments. The request ID + must be a valid UUID with the exception that + zero UUID is not supported ( + 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. + """ + + network: str = proto.Field( + proto.STRING, + number=232872494, + ) + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + request_id: str = proto.Field( + proto.STRING, + number=37109963, + optional=True, + ) + + +class DeleteNodeGroupRequest(proto.Message): + r"""A request message for NodeGroups.Delete. See the method + description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + node_group (str): + Name of the NodeGroup resource to delete. + project (str): + Project ID for this request. + request_id (str): + An optional request ID to identify requests. + Specify a unique request ID so that if you must + retry your request, the server will know to + ignore the request if it has already been + completed. For example, consider a situation + where you make an initial request and the + request times out. If you make the request again + with the same request ID, the server can check + if original operation with the same request ID + was received, and if so, will ignore the second + request. This prevents clients from accidentally + creating duplicate commitments. The request ID + must be a valid UUID with the exception that + zero UUID is not supported ( + 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. + zone (str): + The name of the zone for this request. + """ + + node_group: str = proto.Field( + proto.STRING, + number=469958146, + ) + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + request_id: str = proto.Field( + proto.STRING, + number=37109963, + optional=True, + ) + zone: str = proto.Field( + proto.STRING, + number=3744684, + ) + + +class DeleteNodeTemplateRequest(proto.Message): + r"""A request message for NodeTemplates.Delete. See the method + description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + node_template (str): + Name of the NodeTemplate resource to delete. + project (str): + Project ID for this request. + region (str): + The name of the region for this request. + request_id (str): + An optional request ID to identify requests. + Specify a unique request ID so that if you must + retry your request, the server will know to + ignore the request if it has already been + completed. For example, consider a situation + where you make an initial request and the + request times out. If you make the request again + with the same request ID, the server can check + if original operation with the same request ID + was received, and if so, will ignore the second + request. This prevents clients from accidentally + creating duplicate commitments. The request ID + must be a valid UUID with the exception that + zero UUID is not supported ( + 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. + """ + + node_template: str = proto.Field( + proto.STRING, + number=323154455, + ) + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + region: str = proto.Field( + proto.STRING, + number=138946292, + ) + request_id: str = proto.Field( + proto.STRING, + number=37109963, + optional=True, + ) + + +class DeleteNodesNodeGroupRequest(proto.Message): + r"""A request message for NodeGroups.DeleteNodes. See the method + description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + node_group (str): + Name of the NodeGroup resource whose nodes + will be deleted. + node_groups_delete_nodes_request_resource (google.cloud.compute_v1.types.NodeGroupsDeleteNodesRequest): + The body resource for this request + project (str): + Project ID for this request. + request_id (str): + An optional request ID to identify requests. + Specify a unique request ID so that if you must + retry your request, the server will know to + ignore the request if it has already been + completed. For example, consider a situation + where you make an initial request and the + request times out. If you make the request again + with the same request ID, the server can check + if original operation with the same request ID + was received, and if so, will ignore the second + request. This prevents clients from accidentally + creating duplicate commitments. The request ID + must be a valid UUID with the exception that + zero UUID is not supported ( + 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. + zone (str): + The name of the zone for this request. + """ + + node_group: str = proto.Field( + proto.STRING, + number=469958146, + ) + node_groups_delete_nodes_request_resource: 'NodeGroupsDeleteNodesRequest' = proto.Field( + proto.MESSAGE, + number=183298962, + message='NodeGroupsDeleteNodesRequest', + ) + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + request_id: str = proto.Field( + proto.STRING, + number=37109963, + optional=True, + ) + zone: str = proto.Field( + proto.STRING, + number=3744684, + ) + + +class DeletePacketMirroringRequest(proto.Message): + r"""A request message for PacketMirrorings.Delete. See the method + description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + packet_mirroring (str): + Name of the PacketMirroring resource to + delete. + project (str): + Project ID for this request. + region (str): + Name of the region for this request. + request_id (str): + An optional request ID to identify requests. + Specify a unique request ID so that if you must + retry your request, the server will know to + ignore the request if it has already been + completed. For example, consider a situation + where you make an initial request and the + request times out. If you make the request again + with the same request ID, the server can check + if original operation with the same request ID + was received, and if so, will ignore the second + request. This prevents clients from accidentally + creating duplicate commitments. The request ID + must be a valid UUID with the exception that + zero UUID is not supported ( + 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. + """ + + packet_mirroring: str = proto.Field( + proto.STRING, + number=22305996, + ) + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + region: str = proto.Field( + proto.STRING, + number=138946292, + ) + request_id: str = proto.Field( + proto.STRING, + number=37109963, + optional=True, + ) + + +class DeletePerInstanceConfigsInstanceGroupManagerRequest(proto.Message): + r"""A request message for + InstanceGroupManagers.DeletePerInstanceConfigs. See the method + description for details. + + Attributes: + instance_group_manager (str): + The name of the managed instance group. It + should conform to RFC1035. + instance_group_managers_delete_per_instance_configs_req_resource (google.cloud.compute_v1.types.InstanceGroupManagersDeletePerInstanceConfigsReq): + The body resource for this request + project (str): + Project ID for this request. + zone (str): + The name of the zone where the managed + instance group is located. It should conform to + RFC1035. + """ + + instance_group_manager: str = proto.Field( + proto.STRING, + number=249363395, + ) + instance_group_managers_delete_per_instance_configs_req_resource: 'InstanceGroupManagersDeletePerInstanceConfigsReq' = proto.Field( + proto.MESSAGE, + number=362427680, + message='InstanceGroupManagersDeletePerInstanceConfigsReq', + ) + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + zone: str = proto.Field( + proto.STRING, + number=3744684, + ) + + +class DeletePerInstanceConfigsRegionInstanceGroupManagerRequest(proto.Message): + r"""A request message for + RegionInstanceGroupManagers.DeletePerInstanceConfigs. See the + method description for details. + + Attributes: + instance_group_manager (str): + The name of the managed instance group. It + should conform to RFC1035. + project (str): + Project ID for this request. + region (str): + Name of the region scoping this request, + should conform to RFC1035. + region_instance_group_manager_delete_instance_config_req_resource (google.cloud.compute_v1.types.RegionInstanceGroupManagerDeleteInstanceConfigReq): + The body resource for this request + """ + + instance_group_manager: str = proto.Field( + proto.STRING, + number=249363395, + ) + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + region: str = proto.Field( + proto.STRING, + number=138946292, + ) + region_instance_group_manager_delete_instance_config_req_resource: 'RegionInstanceGroupManagerDeleteInstanceConfigReq' = proto.Field( + proto.MESSAGE, + number=740741, + message='RegionInstanceGroupManagerDeleteInstanceConfigReq', + ) + + +class DeletePublicAdvertisedPrefixeRequest(proto.Message): + r"""A request message for PublicAdvertisedPrefixes.Delete. See + the method description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + project (str): + Project ID for this request. + public_advertised_prefix (str): + Name of the PublicAdvertisedPrefix resource + to delete. + request_id (str): + An optional request ID to identify requests. + Specify a unique request ID so that if you must + retry your request, the server will know to + ignore the request if it has already been + completed. For example, consider a situation + where you make an initial request and the + request times out. If you make the request again + with the same request ID, the server can check + if original operation with the same request ID + was received, and if so, will ignore the second + request. This prevents clients from accidentally + creating duplicate commitments. The request ID + must be a valid UUID with the exception that + zero UUID is not supported ( + 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. + """ + + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + public_advertised_prefix: str = proto.Field( + proto.STRING, + number=101874590, + ) + request_id: str = proto.Field( + proto.STRING, + number=37109963, + optional=True, + ) + + +class DeletePublicDelegatedPrefixeRequest(proto.Message): + r"""A request message for PublicDelegatedPrefixes.Delete. See the + method description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + project (str): + Project ID for this request. + public_delegated_prefix (str): + Name of the PublicDelegatedPrefix resource to + delete. + region (str): + Name of the region of this request. + request_id (str): + An optional request ID to identify requests. + Specify a unique request ID so that if you must + retry your request, the server will know to + ignore the request if it has already been + completed. For example, consider a situation + where you make an initial request and the + request times out. If you make the request again + with the same request ID, the server can check + if original operation with the same request ID + was received, and if so, will ignore the second + request. This prevents clients from accidentally + creating duplicate commitments. The request ID + must be a valid UUID with the exception that + zero UUID is not supported ( + 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. + """ + + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + public_delegated_prefix: str = proto.Field( + proto.STRING, + number=204238440, + ) + region: str = proto.Field( + proto.STRING, + number=138946292, + ) + request_id: str = proto.Field( + proto.STRING, + number=37109963, + optional=True, + ) + + +class DeleteRegionAutoscalerRequest(proto.Message): + r"""A request message for RegionAutoscalers.Delete. See the + method description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + autoscaler (str): + Name of the autoscaler to delete. + project (str): + Project ID for this request. + region (str): + Name of the region scoping this request. + request_id (str): + An optional request ID to identify requests. + Specify a unique request ID so that if you must + retry your request, the server will know to + ignore the request if it has already been + completed. For example, consider a situation + where you make an initial request and the + request times out. If you make the request again + with the same request ID, the server can check + if original operation with the same request ID + was received, and if so, will ignore the second + request. This prevents clients from accidentally + creating duplicate commitments. The request ID + must be a valid UUID with the exception that + zero UUID is not supported ( + 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. + """ + + autoscaler: str = proto.Field( + proto.STRING, + number=517258967, + ) + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + region: str = proto.Field( + proto.STRING, + number=138946292, + ) + request_id: str = proto.Field( + proto.STRING, + number=37109963, + optional=True, + ) + + +class DeleteRegionBackendServiceRequest(proto.Message): + r"""A request message for RegionBackendServices.Delete. See the + method description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + backend_service (str): + Name of the BackendService resource to + delete. + project (str): + Project ID for this request. + region (str): + Name of the region scoping this request. + request_id (str): + An optional request ID to identify requests. + Specify a unique request ID so that if you must + retry your request, the server will know to + ignore the request if it has already been + completed. For example, consider a situation + where you make an initial request and the + request times out. If you make the request again + with the same request ID, the server can check + if original operation with the same request ID + was received, and if so, will ignore the second + request. This prevents clients from accidentally + creating duplicate commitments. The request ID + must be a valid UUID with the exception that + zero UUID is not supported ( + 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. + """ + + backend_service: str = proto.Field( + proto.STRING, + number=306946058, + ) + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + region: str = proto.Field( + proto.STRING, + number=138946292, + ) + request_id: str = proto.Field( + proto.STRING, + number=37109963, + optional=True, + ) + + +class DeleteRegionDiskRequest(proto.Message): + r"""A request message for RegionDisks.Delete. See the method + description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + disk (str): + Name of the regional persistent disk to + delete. + project (str): + Project ID for this request. + region (str): + Name of the region for this request. + request_id (str): + An optional request ID to identify requests. + Specify a unique request ID so that if you must + retry your request, the server will know to + ignore the request if it has already been + completed. For example, consider a situation + where you make an initial request and the + request times out. If you make the request again + with the same request ID, the server can check + if original operation with the same request ID + was received, and if so, will ignore the second + request. This prevents clients from accidentally + creating duplicate commitments. The request ID + must be a valid UUID with the exception that + zero UUID is not supported ( + 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. + """ + + disk: str = proto.Field( + proto.STRING, + number=3083677, + ) + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + region: str = proto.Field( + proto.STRING, + number=138946292, + ) + request_id: str = proto.Field( + proto.STRING, + number=37109963, + optional=True, + ) + + +class DeleteRegionHealthCheckRequest(proto.Message): + r"""A request message for RegionHealthChecks.Delete. See the + method description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + health_check (str): + Name of the HealthCheck resource to delete. + project (str): + Project ID for this request. + region (str): + Name of the region scoping this request. + request_id (str): + An optional request ID to identify requests. + Specify a unique request ID so that if you must + retry your request, the server will know to + ignore the request if it has already been + completed. For example, consider a situation + where you make an initial request and the + request times out. If you make the request again + with the same request ID, the server can check + if original operation with the same request ID + was received, and if so, will ignore the second + request. This prevents clients from accidentally + creating duplicate commitments. The request ID + must be a valid UUID with the exception that + zero UUID is not supported ( + 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. + """ + + health_check: str = proto.Field( + proto.STRING, + number=308876645, + ) + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + region: str = proto.Field( + proto.STRING, + number=138946292, + ) + request_id: str = proto.Field( + proto.STRING, + number=37109963, + optional=True, + ) + + +class DeleteRegionHealthCheckServiceRequest(proto.Message): + r"""A request message for RegionHealthCheckServices.Delete. See + the method description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + health_check_service (str): + Name of the HealthCheckService to delete. The + name must be 1-63 characters long, and comply + with RFC1035. + project (str): + Project ID for this request. + region (str): + Name of the region scoping this request. + request_id (str): + An optional request ID to identify requests. + Specify a unique request ID so that if you must + retry your request, the server will know to + ignore the request if it has already been + completed. For example, consider a situation + where you make an initial request and the + request times out. If you make the request again + with the same request ID, the server can check + if original operation with the same request ID + was received, and if so, will ignore the second + request. This prevents clients from accidentally + creating duplicate commitments. The request ID + must be a valid UUID with the exception that + zero UUID is not supported ( + 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. + """ + + health_check_service: str = proto.Field( + proto.STRING, + number=408374747, + ) + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + region: str = proto.Field( + proto.STRING, + number=138946292, + ) + request_id: str = proto.Field( + proto.STRING, + number=37109963, + optional=True, + ) + + +class DeleteRegionInstanceGroupManagerRequest(proto.Message): + r"""A request message for RegionInstanceGroupManagers.Delete. See + the method description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + instance_group_manager (str): + Name of the managed instance group to delete. + project (str): + Project ID for this request. + region (str): + Name of the region scoping this request. + request_id (str): + An optional request ID to identify requests. + Specify a unique request ID so that if you must + retry your request, the server will know to + ignore the request if it has already been + completed. For example, consider a situation + where you make an initial request and the + request times out. If you make the request again + with the same request ID, the server can check + if original operation with the same request ID + was received, and if so, will ignore the second + request. This prevents clients from accidentally + creating duplicate commitments. The request ID + must be a valid UUID with the exception that + zero UUID is not supported ( + 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. + """ + + instance_group_manager: str = proto.Field( + proto.STRING, + number=249363395, + ) + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + region: str = proto.Field( + proto.STRING, + number=138946292, + ) + request_id: str = proto.Field( + proto.STRING, + number=37109963, + optional=True, + ) + + +class DeleteRegionInstanceTemplateRequest(proto.Message): + r"""A request message for RegionInstanceTemplates.Delete. See the + method description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + instance_template (str): + The name of the instance template to delete. + project (str): + Project ID for this request. + region (str): + The name of the region for this request. + request_id (str): + An optional request ID to identify requests. + Specify a unique request ID so that if you must + retry your request, the server will know to + ignore the request if it has already been + completed. For example, consider a situation + where you make an initial request and the + request times out. If you make the request again + with the same request ID, the server can check + if original operation with the same request ID + was received, and if so, will ignore the second + request. This prevents clients from accidentally + creating duplicate commitments. The request ID + must be a valid UUID with the exception that + zero UUID is not supported ( + 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. + """ + + instance_template: str = proto.Field( + proto.STRING, + number=309248228, + ) + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + region: str = proto.Field( + proto.STRING, + number=138946292, + ) + request_id: str = proto.Field( + proto.STRING, + number=37109963, + optional=True, + ) + + +class DeleteRegionNetworkEndpointGroupRequest(proto.Message): + r"""A request message for RegionNetworkEndpointGroups.Delete. See + the method description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + network_endpoint_group (str): + The name of the network endpoint group to + delete. It should comply with RFC1035. + project (str): + Project ID for this request. + region (str): + The name of the region where the network + endpoint group is located. It should comply with + RFC1035. + request_id (str): + An optional request ID to identify requests. + Specify a unique request ID so that if you must + retry your request, the server will know to + ignore the request if it has already been + completed. For example, consider a situation + where you make an initial request and the + request times out. If you make the request again + with the same request ID, the server can check + if original operation with the same request ID + was received, and if so, will ignore the second + request. This prevents clients from accidentally + creating duplicate commitments. The request ID + must be a valid UUID with the exception that + zero UUID is not supported ( + 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. + """ + + network_endpoint_group: str = proto.Field( + proto.STRING, + number=433907078, + ) + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + region: str = proto.Field( + proto.STRING, + number=138946292, + ) + request_id: str = proto.Field( + proto.STRING, + number=37109963, + optional=True, + ) + + +class DeleteRegionNetworkFirewallPolicyRequest(proto.Message): + r"""A request message for RegionNetworkFirewallPolicies.Delete. + See the method description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + firewall_policy (str): + Name of the firewall policy to delete. + project (str): + Project ID for this request. + region (str): + Name of the region scoping this request. + request_id (str): + An optional request ID to identify requests. + Specify a unique request ID so that if you must + retry your request, the server will know to + ignore the request if it has already been + completed. For example, consider a situation + where you make an initial request and the + request times out. If you make the request again + with the same request ID, the server can check + if original operation with the same request ID + was received, and if so, will ignore the second + request. This prevents clients from accidentally + creating duplicate commitments. The request ID + must be a valid UUID with the exception that + zero UUID is not supported ( + 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. + """ + + firewall_policy: str = proto.Field( + proto.STRING, + number=498173265, + ) + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + region: str = proto.Field( + proto.STRING, + number=138946292, + ) + request_id: str = proto.Field( + proto.STRING, + number=37109963, + optional=True, + ) + + +class DeleteRegionNotificationEndpointRequest(proto.Message): + r"""A request message for RegionNotificationEndpoints.Delete. See + the method description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + notification_endpoint (str): + Name of the NotificationEndpoint resource to + delete. + project (str): + Project ID for this request. + region (str): + Name of the region scoping this request. + request_id (str): + An optional request ID to identify requests. + Specify a unique request ID so that if you must + retry your request, the server will know to + ignore the request if it has already been + completed. For example, consider a situation + where you make an initial request and the + request times out. If you make the request again + with the same request ID, the server can check + if original operation with the same request ID + was received, and if so, will ignore the second + request. This prevents clients from accidentally + creating duplicate commitments. The request ID + must be a valid UUID with the exception that + zero UUID is not supported ( + 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. + """ + + notification_endpoint: str = proto.Field( + proto.STRING, + number=376807017, + ) + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + region: str = proto.Field( + proto.STRING, + number=138946292, + ) + request_id: str = proto.Field( + proto.STRING, + number=37109963, + optional=True, + ) + + +class DeleteRegionOperationRequest(proto.Message): + r"""A request message for RegionOperations.Delete. See the method + description for details. + + Attributes: + operation (str): + Name of the Operations resource to delete. + project (str): + Project ID for this request. + region (str): + Name of the region for this request. + """ + + operation: str = proto.Field( + proto.STRING, + number=52090215, + ) + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + region: str = proto.Field( + proto.STRING, + number=138946292, + ) + + +class DeleteRegionOperationResponse(proto.Message): + r"""A response message for RegionOperations.Delete. See the + method description for details. + + """ + + +class DeleteRegionSecurityPolicyRequest(proto.Message): + r"""A request message for RegionSecurityPolicies.Delete. See the + method description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + project (str): + Project ID for this request. + region (str): + Name of the region scoping this request. + request_id (str): + An optional request ID to identify requests. + Specify a unique request ID so that if you must + retry your request, the server will know to + ignore the request if it has already been + completed. For example, consider a situation + where you make an initial request and the + request times out. If you make the request again + with the same request ID, the server can check + if original operation with the same request ID + was received, and if so, will ignore the second + request. This prevents clients from accidentally + creating duplicate commitments. The request ID + must be a valid UUID with the exception that + zero UUID is not supported ( + 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. + security_policy (str): + Name of the security policy to delete. + """ + + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + region: str = proto.Field( + proto.STRING, + number=138946292, + ) + request_id: str = proto.Field( + proto.STRING, + number=37109963, + optional=True, + ) + security_policy: str = proto.Field( + proto.STRING, + number=171082513, + ) + + +class DeleteRegionSslCertificateRequest(proto.Message): + r"""A request message for RegionSslCertificates.Delete. See the + method description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + project (str): + Project ID for this request. + region (str): + Name of the region scoping this request. + request_id (str): + An optional request ID to identify requests. + Specify a unique request ID so that if you must + retry your request, the server will know to + ignore the request if it has already been + completed. For example, consider a situation + where you make an initial request and the + request times out. If you make the request again + with the same request ID, the server can check + if original operation with the same request ID + was received, and if so, will ignore the second + request. This prevents clients from accidentally + creating duplicate commitments. The request ID + must be a valid UUID with the exception that + zero UUID is not supported ( + 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. + ssl_certificate (str): + Name of the SslCertificate resource to + delete. + """ + + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + region: str = proto.Field( + proto.STRING, + number=138946292, + ) + request_id: str = proto.Field( + proto.STRING, + number=37109963, + optional=True, + ) + ssl_certificate: str = proto.Field( + proto.STRING, + number=46443492, + ) + + +class DeleteRegionSslPolicyRequest(proto.Message): + r"""A request message for RegionSslPolicies.Delete. See the + method description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + project (str): + Project ID for this request. + region (str): + Name of the region scoping this request. + request_id (str): + An optional request ID to identify requests. + Specify a unique request ID so that if you must + retry your request, the server will know to + ignore the request if it has already been + completed. For example, consider a situation + where you make an initial request and the + request times out. If you make the request again + with the same request ID, the server can check + if original operation with the same request ID + was received, and if so, will ignore the second + request. This prevents clients from accidentally + creating duplicate commitments. The request ID + must be a valid UUID with the exception that + zero UUID is not supported ( + 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. + ssl_policy (str): + Name of the SSL policy to delete. The name + must be 1-63 characters long, and comply with + RFC1035. + """ + + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + region: str = proto.Field( + proto.STRING, + number=138946292, + ) + request_id: str = proto.Field( + proto.STRING, + number=37109963, + optional=True, + ) + ssl_policy: str = proto.Field( + proto.STRING, + number=295190213, + ) + + +class DeleteRegionTargetHttpProxyRequest(proto.Message): + r"""A request message for RegionTargetHttpProxies.Delete. See the + method description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + project (str): + Project ID for this request. + region (str): + Name of the region scoping this request. + request_id (str): + An optional request ID to identify requests. + Specify a unique request ID so that if you must + retry your request, the server will know to + ignore the request if it has already been + completed. For example, consider a situation + where you make an initial request and the + request times out. If you make the request again + with the same request ID, the server can check + if original operation with the same request ID + was received, and if so, will ignore the second + request. This prevents clients from accidentally + creating duplicate commitments. The request ID + must be a valid UUID with the exception that + zero UUID is not supported ( + 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. + target_http_proxy (str): + Name of the TargetHttpProxy resource to + delete. + """ + + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + region: str = proto.Field( + proto.STRING, + number=138946292, + ) + request_id: str = proto.Field( + proto.STRING, + number=37109963, + optional=True, + ) + target_http_proxy: str = proto.Field( + proto.STRING, + number=206872421, + ) + + +class DeleteRegionTargetHttpsProxyRequest(proto.Message): + r"""A request message for RegionTargetHttpsProxies.Delete. See + the method description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + project (str): + Project ID for this request. + region (str): + Name of the region scoping this request. + request_id (str): + An optional request ID to identify requests. + Specify a unique request ID so that if you must + retry your request, the server will know to + ignore the request if it has already been + completed. For example, consider a situation + where you make an initial request and the + request times out. If you make the request again + with the same request ID, the server can check + if original operation with the same request ID + was received, and if so, will ignore the second + request. This prevents clients from accidentally + creating duplicate commitments. The request ID + must be a valid UUID with the exception that + zero UUID is not supported ( + 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. + target_https_proxy (str): + Name of the TargetHttpsProxy resource to + delete. + """ + + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + region: str = proto.Field( + proto.STRING, + number=138946292, + ) + request_id: str = proto.Field( + proto.STRING, + number=37109963, + optional=True, + ) + target_https_proxy: str = proto.Field( + proto.STRING, + number=52336748, + ) + + +class DeleteRegionTargetTcpProxyRequest(proto.Message): + r"""A request message for RegionTargetTcpProxies.Delete. See the + method description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + project (str): + Project ID for this request. + region (str): + Name of the region scoping this request. + request_id (str): + An optional request ID to identify requests. + Specify a unique request ID so that if you must + retry your request, the server will know to + ignore the request if it has already been + completed. For example, consider a situation + where you make an initial request and the + request times out. If you make the request again + with the same request ID, the server can check + if original operation with the same request ID + was received, and if so, will ignore the second + request. This prevents clients from accidentally + creating duplicate commitments. The request ID + must be a valid UUID with the exception that + zero UUID is not supported ( + 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. + target_tcp_proxy (str): + Name of the TargetTcpProxy resource to + delete. + """ + + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + region: str = proto.Field( + proto.STRING, + number=138946292, + ) + request_id: str = proto.Field( + proto.STRING, + number=37109963, + optional=True, + ) + target_tcp_proxy: str = proto.Field( + proto.STRING, + number=503065442, + ) + + +class DeleteRegionUrlMapRequest(proto.Message): + r"""A request message for RegionUrlMaps.Delete. See the method + description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + project (str): + Project ID for this request. + region (str): + Name of the region scoping this request. + request_id (str): + begin_interface: MixerMutationRequestBuilder Request ID to + support idempotency. + + This field is a member of `oneof`_ ``_request_id``. + url_map (str): + Name of the UrlMap resource to delete. + """ + + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + region: str = proto.Field( + proto.STRING, + number=138946292, + ) + request_id: str = proto.Field( + proto.STRING, + number=37109963, + optional=True, + ) + url_map: str = proto.Field( + proto.STRING, + number=367020684, + ) + + +class DeleteReservationRequest(proto.Message): + r"""A request message for Reservations.Delete. See the method + description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + project (str): + Project ID for this request. + request_id (str): + An optional request ID to identify requests. + Specify a unique request ID so that if you must + retry your request, the server will know to + ignore the request if it has already been + completed. For example, consider a situation + where you make an initial request and the + request times out. If you make the request again + with the same request ID, the server can check + if original operation with the same request ID + was received, and if so, will ignore the second + request. This prevents clients from accidentally + creating duplicate commitments. The request ID + must be a valid UUID with the exception that + zero UUID is not supported ( + 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. + reservation (str): + Name of the reservation to delete. + zone (str): + Name of the zone for this request. + """ + + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + request_id: str = proto.Field( + proto.STRING, + number=37109963, + optional=True, + ) + reservation: str = proto.Field( + proto.STRING, + number=47530956, + ) + zone: str = proto.Field( + proto.STRING, + number=3744684, + ) + + +class DeleteResourcePolicyRequest(proto.Message): + r"""A request message for ResourcePolicies.Delete. See the method + description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + project (str): + Project ID for this request. + region (str): + Name of the region for this request. + request_id (str): + An optional request ID to identify requests. + Specify a unique request ID so that if you must + retry your request, the server will know to + ignore the request if it has already been + completed. For example, consider a situation + where you make an initial request and the + request times out. If you make the request again + with the same request ID, the server can check + if original operation with the same request ID + was received, and if so, will ignore the second + request. This prevents clients from accidentally + creating duplicate commitments. The request ID + must be a valid UUID with the exception that + zero UUID is not supported ( + 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. + resource_policy (str): + Name of the resource policy to delete. + """ + + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + region: str = proto.Field( + proto.STRING, + number=138946292, + ) + request_id: str = proto.Field( + proto.STRING, + number=37109963, + optional=True, + ) + resource_policy: str = proto.Field( + proto.STRING, + number=159240835, + ) + + +class DeleteRouteRequest(proto.Message): + r"""A request message for Routes.Delete. See the method + description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + project (str): + Project ID for this request. + request_id (str): + An optional request ID to identify requests. + Specify a unique request ID so that if you must + retry your request, the server will know to + ignore the request if it has already been + completed. For example, consider a situation + where you make an initial request and the + request times out. If you make the request again + with the same request ID, the server can check + if original operation with the same request ID + was received, and if so, will ignore the second + request. This prevents clients from accidentally + creating duplicate commitments. The request ID + must be a valid UUID with the exception that + zero UUID is not supported ( + 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. + route (str): + Name of the Route resource to delete. + """ + + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + request_id: str = proto.Field( + proto.STRING, + number=37109963, + optional=True, + ) + route: str = proto.Field( + proto.STRING, + number=108704329, + ) + + +class DeleteRouterRequest(proto.Message): + r"""A request message for Routers.Delete. See the method + description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + project (str): + Project ID for this request. + region (str): + Name of the region for this request. + request_id (str): + An optional request ID to identify requests. + Specify a unique request ID so that if you must + retry your request, the server will know to + ignore the request if it has already been + completed. For example, consider a situation + where you make an initial request and the + request times out. If you make the request again + with the same request ID, the server can check + if original operation with the same request ID + was received, and if so, will ignore the second + request. This prevents clients from accidentally + creating duplicate commitments. The request ID + must be a valid UUID with the exception that + zero UUID is not supported ( + 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. + router (str): + Name of the Router resource to delete. + """ + + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + region: str = proto.Field( + proto.STRING, + number=138946292, + ) + request_id: str = proto.Field( + proto.STRING, + number=37109963, + optional=True, + ) + router: str = proto.Field( + proto.STRING, + number=148608841, + ) + + +class DeleteSecurityPolicyRequest(proto.Message): + r"""A request message for SecurityPolicies.Delete. See the method + description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + project (str): + Project ID for this request. + request_id (str): + An optional request ID to identify requests. + Specify a unique request ID so that if you must + retry your request, the server will know to + ignore the request if it has already been + completed. For example, consider a situation + where you make an initial request and the + request times out. If you make the request again + with the same request ID, the server can check + if original operation with the same request ID + was received, and if so, will ignore the second + request. This prevents clients from accidentally + creating duplicate commitments. The request ID + must be a valid UUID with the exception that + zero UUID is not supported ( + 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. + security_policy (str): + Name of the security policy to delete. + """ + + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + request_id: str = proto.Field( + proto.STRING, + number=37109963, + optional=True, + ) + security_policy: str = proto.Field( + proto.STRING, + number=171082513, + ) + + +class DeleteServiceAttachmentRequest(proto.Message): + r"""A request message for ServiceAttachments.Delete. See the + method description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + project (str): + Project ID for this request. + region (str): + Name of the region of this request. + request_id (str): + An optional request ID to identify requests. + Specify a unique request ID so that if you must + retry your request, the server will know to + ignore the request if it has already been + completed. For example, consider a situation + where you make an initial request and the + request times out. If you make the request again + with the same request ID, the server can check + if original operation with the same request ID + was received, and if so, will ignore the second + request. This prevents clients from accidentally + creating duplicate commitments. The request ID + must be a valid UUID with the exception that + zero UUID is not supported ( + 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. + service_attachment (str): + Name of the ServiceAttachment resource to + delete. + """ + + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + region: str = proto.Field( + proto.STRING, + number=138946292, + ) + request_id: str = proto.Field( + proto.STRING, + number=37109963, + optional=True, + ) + service_attachment: str = proto.Field( + proto.STRING, + number=338957549, + ) + + +class DeleteSignedUrlKeyBackendBucketRequest(proto.Message): + r"""A request message for BackendBuckets.DeleteSignedUrlKey. See + the method description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + backend_bucket (str): + Name of the BackendBucket resource to which + the Signed URL Key should be added. The name + should conform to RFC1035. + key_name (str): + The name of the Signed URL Key to delete. + project (str): + Project ID for this request. + request_id (str): + An optional request ID to identify requests. + Specify a unique request ID so that if you must + retry your request, the server will know to + ignore the request if it has already been + completed. For example, consider a situation + where you make an initial request and the + request times out. If you make the request again + with the same request ID, the server can check + if original operation with the same request ID + was received, and if so, will ignore the second + request. This prevents clients from accidentally + creating duplicate commitments. The request ID + must be a valid UUID with the exception that + zero UUID is not supported ( + 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. + """ + + backend_bucket: str = proto.Field( + proto.STRING, + number=91714037, + ) + key_name: str = proto.Field( + proto.STRING, + number=500938859, + ) + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + request_id: str = proto.Field( + proto.STRING, + number=37109963, + optional=True, + ) + + +class DeleteSignedUrlKeyBackendServiceRequest(proto.Message): + r"""A request message for BackendServices.DeleteSignedUrlKey. See + the method description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + backend_service (str): + Name of the BackendService resource to which + the Signed URL Key should be added. The name + should conform to RFC1035. + key_name (str): + The name of the Signed URL Key to delete. + project (str): + Project ID for this request. + request_id (str): + An optional request ID to identify requests. + Specify a unique request ID so that if you must + retry your request, the server will know to + ignore the request if it has already been + completed. For example, consider a situation + where you make an initial request and the + request times out. If you make the request again + with the same request ID, the server can check + if original operation with the same request ID + was received, and if so, will ignore the second + request. This prevents clients from accidentally + creating duplicate commitments. The request ID + must be a valid UUID with the exception that + zero UUID is not supported ( + 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. + """ + + backend_service: str = proto.Field( + proto.STRING, + number=306946058, + ) + key_name: str = proto.Field( + proto.STRING, + number=500938859, + ) + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + request_id: str = proto.Field( + proto.STRING, + number=37109963, + optional=True, + ) + + +class DeleteSnapshotRequest(proto.Message): + r"""A request message for Snapshots.Delete. See the method + description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + project (str): + Project ID for this request. + request_id (str): + An optional request ID to identify requests. + Specify a unique request ID so that if you must + retry your request, the server will know to + ignore the request if it has already been + completed. For example, consider a situation + where you make an initial request and the + request times out. If you make the request again + with the same request ID, the server can check + if original operation with the same request ID + was received, and if so, will ignore the second + request. This prevents clients from accidentally + creating duplicate commitments. The request ID + must be a valid UUID with the exception that + zero UUID is not supported ( + 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. + snapshot (str): + Name of the Snapshot resource to delete. + """ + + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + request_id: str = proto.Field( + proto.STRING, + number=37109963, + optional=True, + ) + snapshot: str = proto.Field( + proto.STRING, + number=284874180, + ) + + +class DeleteSslCertificateRequest(proto.Message): + r"""A request message for SslCertificates.Delete. See the method + description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + project (str): + Project ID for this request. + request_id (str): + An optional request ID to identify requests. + Specify a unique request ID so that if you must + retry your request, the server will know to + ignore the request if it has already been + completed. For example, consider a situation + where you make an initial request and the + request times out. If you make the request again + with the same request ID, the server can check + if original operation with the same request ID + was received, and if so, will ignore the second + request. This prevents clients from accidentally + creating duplicate commitments. The request ID + must be a valid UUID with the exception that + zero UUID is not supported ( + 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. + ssl_certificate (str): + Name of the SslCertificate resource to + delete. + """ + + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + request_id: str = proto.Field( + proto.STRING, + number=37109963, + optional=True, + ) + ssl_certificate: str = proto.Field( + proto.STRING, + number=46443492, + ) + + +class DeleteSslPolicyRequest(proto.Message): + r"""A request message for SslPolicies.Delete. See the method + description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + project (str): + Project ID for this request. + request_id (str): + An optional request ID to identify requests. + Specify a unique request ID so that if you must + retry your request, the server will know to + ignore the request if it has already been + completed. For example, consider a situation + where you make an initial request and the + request times out. If you make the request again + with the same request ID, the server can check + if original operation with the same request ID + was received, and if so, will ignore the second + request. This prevents clients from accidentally + creating duplicate commitments. The request ID + must be a valid UUID with the exception that + zero UUID is not supported ( + 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. + ssl_policy (str): + Name of the SSL policy to delete. The name + must be 1-63 characters long, and comply with + RFC1035. + """ + + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + request_id: str = proto.Field( + proto.STRING, + number=37109963, + optional=True, + ) + ssl_policy: str = proto.Field( + proto.STRING, + number=295190213, + ) + + +class DeleteSubnetworkRequest(proto.Message): + r"""A request message for Subnetworks.Delete. See the method + description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + project (str): + Project ID for this request. + region (str): + Name of the region scoping this request. + request_id (str): + An optional request ID to identify requests. + Specify a unique request ID so that if you must + retry your request, the server will know to + ignore the request if it has already been + completed. For example, consider a situation + where you make an initial request and the + request times out. If you make the request again + with the same request ID, the server can check + if original operation with the same request ID + was received, and if so, will ignore the second + request. This prevents clients from accidentally + creating duplicate commitments. The request ID + must be a valid UUID with the exception that + zero UUID is not supported ( + 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. + subnetwork (str): + Name of the Subnetwork resource to delete. + """ + + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + region: str = proto.Field( + proto.STRING, + number=138946292, + ) + request_id: str = proto.Field( + proto.STRING, + number=37109963, + optional=True, + ) + subnetwork: str = proto.Field( + proto.STRING, + number=307827694, + ) + + +class DeleteTargetGrpcProxyRequest(proto.Message): + r"""A request message for TargetGrpcProxies.Delete. See the + method description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + project (str): + Project ID for this request. + request_id (str): + An optional request ID to identify requests. + Specify a unique request ID so that if you must + retry your request, the server will know to + ignore the request if it has already been + completed. For example, consider a situation + where you make an initial request and the + request times out. If you make the request again + with the same request ID, the server can check + if original operation with the same request ID + was received, and if so, will ignore the second + request. This prevents clients from accidentally + creating duplicate commitments. The request ID + must be a valid UUID with the exception that + zero UUID is not supported ( + 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. + target_grpc_proxy (str): + Name of the TargetGrpcProxy resource to + delete. + """ + + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + request_id: str = proto.Field( + proto.STRING, + number=37109963, + optional=True, + ) + target_grpc_proxy: str = proto.Field( + proto.STRING, + number=5020283, + ) + + +class DeleteTargetHttpProxyRequest(proto.Message): + r"""A request message for TargetHttpProxies.Delete. See the + method description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + project (str): + Project ID for this request. + request_id (str): + An optional request ID to identify requests. + Specify a unique request ID so that if you must + retry your request, the server will know to + ignore the request if it has already been + completed. For example, consider a situation + where you make an initial request and the + request times out. If you make the request again + with the same request ID, the server can check + if original operation with the same request ID + was received, and if so, will ignore the second + request. This prevents clients from accidentally + creating duplicate commitments. The request ID + must be a valid UUID with the exception that + zero UUID is not supported ( + 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. + target_http_proxy (str): + Name of the TargetHttpProxy resource to + delete. + """ + + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + request_id: str = proto.Field( + proto.STRING, + number=37109963, + optional=True, + ) + target_http_proxy: str = proto.Field( + proto.STRING, + number=206872421, + ) + + +class DeleteTargetHttpsProxyRequest(proto.Message): + r"""A request message for TargetHttpsProxies.Delete. See the + method description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + project (str): + Project ID for this request. + request_id (str): + An optional request ID to identify requests. + Specify a unique request ID so that if you must + retry your request, the server will know to + ignore the request if it has already been + completed. For example, consider a situation + where you make an initial request and the + request times out. If you make the request again + with the same request ID, the server can check + if original operation with the same request ID + was received, and if so, will ignore the second + request. This prevents clients from accidentally + creating duplicate commitments. The request ID + must be a valid UUID with the exception that + zero UUID is not supported ( + 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. + target_https_proxy (str): + Name of the TargetHttpsProxy resource to + delete. + """ + + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + request_id: str = proto.Field( + proto.STRING, + number=37109963, + optional=True, + ) + target_https_proxy: str = proto.Field( + proto.STRING, + number=52336748, + ) + + +class DeleteTargetInstanceRequest(proto.Message): + r"""A request message for TargetInstances.Delete. See the method + description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + project (str): + Project ID for this request. + request_id (str): + An optional request ID to identify requests. + Specify a unique request ID so that if you must + retry your request, the server will know to + ignore the request if it has already been + completed. For example, consider a situation + where you make an initial request and the + request times out. If you make the request again + with the same request ID, the server can check + if original operation with the same request ID + was received, and if so, will ignore the second + request. This prevents clients from accidentally + creating duplicate commitments. The request ID + must be a valid UUID with the exception that + zero UUID is not supported ( + 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. + target_instance (str): + Name of the TargetInstance resource to + delete. + zone (str): + Name of the zone scoping this request. + """ + + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + request_id: str = proto.Field( + proto.STRING, + number=37109963, + optional=True, + ) + target_instance: str = proto.Field( + proto.STRING, + number=289769347, + ) + zone: str = proto.Field( + proto.STRING, + number=3744684, + ) + + +class DeleteTargetPoolRequest(proto.Message): + r"""A request message for TargetPools.Delete. See the method + description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + project (str): + Project ID for this request. + region (str): + Name of the region scoping this request. + request_id (str): + An optional request ID to identify requests. + Specify a unique request ID so that if you must + retry your request, the server will know to + ignore the request if it has already been + completed. For example, consider a situation + where you make an initial request and the + request times out. If you make the request again + with the same request ID, the server can check + if original operation with the same request ID + was received, and if so, will ignore the second + request. This prevents clients from accidentally + creating duplicate commitments. The request ID + must be a valid UUID with the exception that + zero UUID is not supported ( + 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. + target_pool (str): + Name of the TargetPool resource to delete. + """ + + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + region: str = proto.Field( + proto.STRING, + number=138946292, + ) + request_id: str = proto.Field( + proto.STRING, + number=37109963, + optional=True, + ) + target_pool: str = proto.Field( + proto.STRING, + number=62796298, + ) + + +class DeleteTargetSslProxyRequest(proto.Message): + r"""A request message for TargetSslProxies.Delete. See the method + description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + project (str): + Project ID for this request. + request_id (str): + An optional request ID to identify requests. + Specify a unique request ID so that if you must + retry your request, the server will know to + ignore the request if it has already been + completed. For example, consider a situation + where you make an initial request and the + request times out. If you make the request again + with the same request ID, the server can check + if original operation with the same request ID + was received, and if so, will ignore the second + request. This prevents clients from accidentally + creating duplicate commitments. The request ID + must be a valid UUID with the exception that + zero UUID is not supported ( + 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. + target_ssl_proxy (str): + Name of the TargetSslProxy resource to + delete. + """ + + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + request_id: str = proto.Field( + proto.STRING, + number=37109963, + optional=True, + ) + target_ssl_proxy: str = proto.Field( + proto.STRING, + number=338795853, + ) + + +class DeleteTargetTcpProxyRequest(proto.Message): + r"""A request message for TargetTcpProxies.Delete. See the method + description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + project (str): + Project ID for this request. + request_id (str): + An optional request ID to identify requests. + Specify a unique request ID so that if you must + retry your request, the server will know to + ignore the request if it has already been + completed. For example, consider a situation + where you make an initial request and the + request times out. If you make the request again + with the same request ID, the server can check + if original operation with the same request ID + was received, and if so, will ignore the second + request. This prevents clients from accidentally + creating duplicate commitments. The request ID + must be a valid UUID with the exception that + zero UUID is not supported ( + 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. + target_tcp_proxy (str): + Name of the TargetTcpProxy resource to + delete. + """ + + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + request_id: str = proto.Field( + proto.STRING, + number=37109963, + optional=True, + ) + target_tcp_proxy: str = proto.Field( + proto.STRING, + number=503065442, + ) + + +class DeleteTargetVpnGatewayRequest(proto.Message): + r"""A request message for TargetVpnGateways.Delete. See the + method description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + project (str): + Project ID for this request. + region (str): + Name of the region for this request. + request_id (str): + An optional request ID to identify requests. + Specify a unique request ID so that if you must + retry your request, the server will know to + ignore the request if it has already been + completed. For example, consider a situation + where you make an initial request and the + request times out. If you make the request again + with the same request ID, the server can check + if original operation with the same request ID + was received, and if so, will ignore the second + request. This prevents clients from accidentally + creating duplicate commitments. The request ID + must be a valid UUID with the exception that + zero UUID is not supported ( + 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. + target_vpn_gateway (str): + Name of the target VPN gateway to delete. + """ + + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + region: str = proto.Field( + proto.STRING, + number=138946292, + ) + request_id: str = proto.Field( + proto.STRING, + number=37109963, + optional=True, + ) + target_vpn_gateway: str = proto.Field( + proto.STRING, + number=532512843, + ) + + +class DeleteUrlMapRequest(proto.Message): + r"""A request message for UrlMaps.Delete. See the method + description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + project (str): + Project ID for this request. + request_id (str): + An optional request ID to identify requests. + Specify a unique request ID so that if you must + retry your request, the server will know to + ignore the request if it has already been + completed. For example, consider a situation + where you make an initial request and the + request times out. If you make the request again + with the same request ID, the server can check + if original operation with the same request ID + was received, and if so, will ignore the second + request. This prevents clients from accidentally + creating duplicate commitments. The request ID + must be a valid UUID with the exception that + zero UUID is not supported ( + 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. + url_map (str): + Name of the UrlMap resource to delete. + """ + + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + request_id: str = proto.Field( + proto.STRING, + number=37109963, + optional=True, + ) + url_map: str = proto.Field( + proto.STRING, + number=367020684, + ) + + +class DeleteVpnGatewayRequest(proto.Message): + r"""A request message for VpnGateways.Delete. See the method + description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + project (str): + Project ID for this request. + region (str): + Name of the region for this request. + request_id (str): + An optional request ID to identify requests. + Specify a unique request ID so that if you must + retry your request, the server will know to + ignore the request if it has already been + completed. For example, consider a situation + where you make an initial request and the + request times out. If you make the request again + with the same request ID, the server can check + if original operation with the same request ID + was received, and if so, will ignore the second + request. This prevents clients from accidentally + creating duplicate commitments. The request ID + must be a valid UUID with the exception that + zero UUID is not supported ( + 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. + vpn_gateway (str): + Name of the VPN gateway to delete. + """ + + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + region: str = proto.Field( + proto.STRING, + number=138946292, + ) + request_id: str = proto.Field( + proto.STRING, + number=37109963, + optional=True, + ) + vpn_gateway: str = proto.Field( + proto.STRING, + number=406684153, + ) + + +class DeleteVpnTunnelRequest(proto.Message): + r"""A request message for VpnTunnels.Delete. See the method + description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + project (str): + Project ID for this request. + region (str): + Name of the region for this request. + request_id (str): + An optional request ID to identify requests. + Specify a unique request ID so that if you must + retry your request, the server will know to + ignore the request if it has already been + completed. For example, consider a situation + where you make an initial request and the + request times out. If you make the request again + with the same request ID, the server can check + if original operation with the same request ID + was received, and if so, will ignore the second + request. This prevents clients from accidentally + creating duplicate commitments. The request ID + must be a valid UUID with the exception that + zero UUID is not supported ( + 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. + vpn_tunnel (str): + Name of the VpnTunnel resource to delete. + """ + + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + region: str = proto.Field( + proto.STRING, + number=138946292, + ) + request_id: str = proto.Field( + proto.STRING, + number=37109963, + optional=True, + ) + vpn_tunnel: str = proto.Field( + proto.STRING, + number=143821331, + ) + + +class DeleteZoneOperationRequest(proto.Message): + r"""A request message for ZoneOperations.Delete. See the method + description for details. + + Attributes: + operation (str): + Name of the Operations resource to delete. + project (str): + Project ID for this request. + zone (str): + Name of the zone for this request. + """ + + operation: str = proto.Field( + proto.STRING, + number=52090215, + ) + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + zone: str = proto.Field( + proto.STRING, + number=3744684, + ) + + +class DeleteZoneOperationResponse(proto.Message): + r"""A response message for ZoneOperations.Delete. See the method + description for details. + + """ + + +class Denied(proto.Message): + r""" + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + I_p_protocol (str): + The IP protocol to which this rule applies. + The protocol type is required when creating a + firewall rule. This value can either be one of + the following well known protocol strings (tcp, + udp, icmp, esp, ah, ipip, sctp) or the IP + protocol number. + + This field is a member of `oneof`_ ``_I_p_protocol``. + ports (MutableSequence[str]): + An optional list of ports to which this rule applies. This + field is only applicable for the UDP or TCP protocol. Each + entry must be either an integer or a range. If not + specified, this rule applies to connections through any + port. Example inputs include: ["22"], ["80","443"], and + ["12345-12349"]. + """ + + I_p_protocol: str = proto.Field( + proto.STRING, + number=488094525, + optional=True, + ) + ports: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=106854418, + ) + + +class DeprecateImageRequest(proto.Message): + r"""A request message for Images.Deprecate. See the method + description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + deprecation_status_resource (google.cloud.compute_v1.types.DeprecationStatus): + The body resource for this request + image (str): + Image name. + project (str): + Project ID for this request. + request_id (str): + An optional request ID to identify requests. + Specify a unique request ID so that if you must + retry your request, the server will know to + ignore the request if it has already been + completed. For example, consider a situation + where you make an initial request and the + request times out. If you make the request again + with the same request ID, the server can check + if original operation with the same request ID + was received, and if so, will ignore the second + request. This prevents clients from accidentally + creating duplicate commitments. The request ID + must be a valid UUID with the exception that + zero UUID is not supported ( + 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. + """ + + deprecation_status_resource: 'DeprecationStatus' = proto.Field( + proto.MESSAGE, + number=333006064, + message='DeprecationStatus', + ) + image: str = proto.Field( + proto.STRING, + number=100313435, + ) + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + request_id: str = proto.Field( + proto.STRING, + number=37109963, + optional=True, + ) + + +class DeprecationStatus(proto.Message): + r"""Deprecation status for a public resource. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + deleted (str): + An optional RFC3339 timestamp on or after + which the state of this resource is intended to + change to DELETED. This is only informational + and the status will not change unless the client + explicitly changes it. + + This field is a member of `oneof`_ ``_deleted``. + deprecated (str): + An optional RFC3339 timestamp on or after + which the state of this resource is intended to + change to DEPRECATED. This is only informational + and the status will not change unless the client + explicitly changes it. + + This field is a member of `oneof`_ ``_deprecated``. + obsolete (str): + An optional RFC3339 timestamp on or after + which the state of this resource is intended to + change to OBSOLETE. This is only informational + and the status will not change unless the client + explicitly changes it. + + This field is a member of `oneof`_ ``_obsolete``. + replacement (str): + The URL of the suggested replacement for a + deprecated resource. The suggested replacement + resource must be the same kind of resource as + the deprecated resource. + + This field is a member of `oneof`_ ``_replacement``. + state (str): + The deprecation state of this resource. This + can be ACTIVE, DEPRECATED, OBSOLETE, or DELETED. + Operations which communicate the end of life + date for an image, can use ACTIVE. Operations + which create a new resource using a DEPRECATED + resource will return successfully, but with a + warning indicating the deprecated resource and + recommending its replacement. Operations which + use OBSOLETE or DELETED resources will be + rejected and result in an error. Check the State + enum for the list of possible values. + + This field is a member of `oneof`_ ``_state``. + """ + class State(proto.Enum): + r"""The deprecation state of this resource. This can be ACTIVE, + DEPRECATED, OBSOLETE, or DELETED. Operations which communicate + the end of life date for an image, can use ACTIVE. Operations + which create a new resource using a DEPRECATED resource will + return successfully, but with a warning indicating the + deprecated resource and recommending its replacement. Operations + which use OBSOLETE or DELETED resources will be rejected and + result in an error. + + Values: + UNDEFINED_STATE (0): + A value indicating that the enum field is not + set. + ACTIVE (314733318): + No description available. + DELETED (120962041): + No description available. + DEPRECATED (463360435): + No description available. + OBSOLETE (66532761): + No description available. + """ + UNDEFINED_STATE = 0 + ACTIVE = 314733318 + DELETED = 120962041 + DEPRECATED = 463360435 + OBSOLETE = 66532761 + + deleted: str = proto.Field( + proto.STRING, + number=476721177, + optional=True, + ) + deprecated: str = proto.Field( + proto.STRING, + number=515138995, + optional=True, + ) + obsolete: str = proto.Field( + proto.STRING, + number=357647769, + optional=True, + ) + replacement: str = proto.Field( + proto.STRING, + number=430919186, + optional=True, + ) + state: str = proto.Field( + proto.STRING, + number=109757585, + optional=True, + ) + + +class DetachDiskInstanceRequest(proto.Message): + r"""A request message for Instances.DetachDisk. See the method + description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + device_name (str): + The device name of the disk to detach. Make a + get() request on the instance to view currently + attached disks and device names. + instance (str): + Instance name for this request. + project (str): + Project ID for this request. + request_id (str): + An optional request ID to identify requests. + Specify a unique request ID so that if you must + retry your request, the server will know to + ignore the request if it has already been + completed. For example, consider a situation + where you make an initial request and the + request times out. If you make the request again + with the same request ID, the server can check + if original operation with the same request ID + was received, and if so, will ignore the second + request. This prevents clients from accidentally + creating duplicate commitments. The request ID + must be a valid UUID with the exception that + zero UUID is not supported ( + 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. + zone (str): + The name of the zone for this request. + """ + + device_name: str = proto.Field( + proto.STRING, + number=67541716, + ) + instance: str = proto.Field( + proto.STRING, + number=18257045, + ) + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + request_id: str = proto.Field( + proto.STRING, + number=37109963, + optional=True, + ) + zone: str = proto.Field( + proto.STRING, + number=3744684, + ) + + +class DetachNetworkEndpointsGlobalNetworkEndpointGroupRequest(proto.Message): + r"""A request message for + GlobalNetworkEndpointGroups.DetachNetworkEndpoints. See the + method description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + global_network_endpoint_groups_detach_endpoints_request_resource (google.cloud.compute_v1.types.GlobalNetworkEndpointGroupsDetachEndpointsRequest): + The body resource for this request + network_endpoint_group (str): + The name of the network endpoint group where + you are removing network endpoints. It should + comply with RFC1035. + project (str): + Project ID for this request. + request_id (str): + An optional request ID to identify requests. + Specify a unique request ID so that if you must + retry your request, the server will know to + ignore the request if it has already been + completed. For example, consider a situation + where you make an initial request and the + request times out. If you make the request again + with the same request ID, the server can check + if original operation with the same request ID + was received, and if so, will ignore the second + request. This prevents clients from accidentally + creating duplicate commitments. The request ID + must be a valid UUID with the exception that + zero UUID is not supported ( + 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. + """ + + global_network_endpoint_groups_detach_endpoints_request_resource: 'GlobalNetworkEndpointGroupsDetachEndpointsRequest' = proto.Field( + proto.MESSAGE, + number=8898269, + message='GlobalNetworkEndpointGroupsDetachEndpointsRequest', + ) + network_endpoint_group: str = proto.Field( + proto.STRING, + number=433907078, + ) + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + request_id: str = proto.Field( + proto.STRING, + number=37109963, + optional=True, + ) + + +class DetachNetworkEndpointsNetworkEndpointGroupRequest(proto.Message): + r"""A request message for + NetworkEndpointGroups.DetachNetworkEndpoints. See the method + description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + network_endpoint_group (str): + The name of the network endpoint group where + you are removing network endpoints. It should + comply with RFC1035. + network_endpoint_groups_detach_endpoints_request_resource (google.cloud.compute_v1.types.NetworkEndpointGroupsDetachEndpointsRequest): + The body resource for this request + project (str): + Project ID for this request. + request_id (str): + An optional request ID to identify requests. + Specify a unique request ID so that if you must + retry your request, the server will know to + ignore the request if it has already been + completed. For example, consider a situation + where you make an initial request and the + request times out. If you make the request again + with the same request ID, the server can check + if original operation with the same request ID + was received, and if so, will ignore the second + request. This prevents clients from accidentally + creating duplicate commitments. The request ID + must be a valid UUID with the exception that + zero UUID is not supported ( + 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. + zone (str): + The name of the zone where the network + endpoint group is located. It should comply with + RFC1035. + """ + + network_endpoint_group: str = proto.Field( + proto.STRING, + number=433907078, + ) + network_endpoint_groups_detach_endpoints_request_resource: 'NetworkEndpointGroupsDetachEndpointsRequest' = proto.Field( + proto.MESSAGE, + number=515608697, + message='NetworkEndpointGroupsDetachEndpointsRequest', + ) + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + request_id: str = proto.Field( + proto.STRING, + number=37109963, + optional=True, + ) + zone: str = proto.Field( + proto.STRING, + number=3744684, + ) + + +class DisableXpnHostProjectRequest(proto.Message): + r"""A request message for Projects.DisableXpnHost. See the method + description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + project (str): + Project ID for this request. + request_id (str): + An optional request ID to identify requests. + Specify a unique request ID so that if you must + retry your request, the server will know to + ignore the request if it has already been + completed. For example, consider a situation + where you make an initial request and the + request times out. If you make the request again + with the same request ID, the server can check + if original operation with the same request ID + was received, and if so, will ignore the second + request. This prevents clients from accidentally + creating duplicate commitments. The request ID + must be a valid UUID with the exception that + zero UUID is not supported ( + 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. + """ + + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + request_id: str = proto.Field( + proto.STRING, + number=37109963, + optional=True, + ) + + +class DisableXpnResourceProjectRequest(proto.Message): + r"""A request message for Projects.DisableXpnResource. See the + method description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + project (str): + Project ID for this request. + projects_disable_xpn_resource_request_resource (google.cloud.compute_v1.types.ProjectsDisableXpnResourceRequest): + The body resource for this request + request_id (str): + An optional request ID to identify requests. + Specify a unique request ID so that if you must + retry your request, the server will know to + ignore the request if it has already been + completed. For example, consider a situation + where you make an initial request and the + request times out. If you make the request again + with the same request ID, the server can check + if original operation with the same request ID + was received, and if so, will ignore the second + request. This prevents clients from accidentally + creating duplicate commitments. The request ID + must be a valid UUID with the exception that + zero UUID is not supported ( + 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. + """ + + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + projects_disable_xpn_resource_request_resource: 'ProjectsDisableXpnResourceRequest' = proto.Field( + proto.MESSAGE, + number=209136170, + message='ProjectsDisableXpnResourceRequest', + ) + request_id: str = proto.Field( + proto.STRING, + number=37109963, + optional=True, + ) + + +class Disk(proto.Message): + r"""Represents a Persistent Disk resource. Google Compute Engine has two + Disk resources: \* `Zonal `__ + \* `Regional `__ + Persistent disks are required for running your VM instances. Create + both boot and non-boot (data) persistent disks. For more + information, read Persistent Disks. For more storage options, read + Storage options. The disks resource represents a zonal persistent + disk. For more information, read Zonal persistent disks. The + regionDisks resource represents a regional persistent disk. For more + information, read Regional resources. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + architecture (str): + The architecture of the disk. Valid values are ARM64 or + X86_64. Check the Architecture enum for the list of possible + values. + + This field is a member of `oneof`_ ``_architecture``. + async_primary_disk (google.cloud.compute_v1.types.DiskAsyncReplication): + Disk asynchronously replicated into this + disk. + + This field is a member of `oneof`_ ``_async_primary_disk``. + async_secondary_disks (MutableMapping[str, google.cloud.compute_v1.types.DiskAsyncReplicationList]): + [Output Only] A list of disks this disk is asynchronously + replicated to. + creation_timestamp (str): + [Output Only] Creation timestamp in RFC3339 text format. + + This field is a member of `oneof`_ ``_creation_timestamp``. + description (str): + An optional description of this resource. + Provide this property when you create the + resource. + + This field is a member of `oneof`_ ``_description``. + disk_encryption_key (google.cloud.compute_v1.types.CustomerEncryptionKey): + Encrypts the disk using a customer-supplied encryption key + or a customer-managed encryption key. Encryption keys do not + protect access to metadata of the disk. After you encrypt a + disk with a customer-supplied key, you must provide the same + key if you use the disk later. For example, to create a disk + snapshot, to create a disk image, to create a machine image, + or to attach the disk to a virtual machine. After you + encrypt a disk with a customer-managed key, the + diskEncryptionKey.kmsKeyName is set to a key *version* name + once the disk is created. The disk is encrypted with this + version of the key. In the response, + diskEncryptionKey.kmsKeyName appears in the following + format: "diskEncryptionKey.kmsKeyName": + "projects/kms_project_id/locations/region/keyRings/ + key_region/cryptoKeys/key /cryptoKeysVersions/version If you + do not provide an encryption key when creating the disk, + then the disk is encrypted using an automatically generated + key and you don't need to provide a key to use the disk + later. + + This field is a member of `oneof`_ ``_disk_encryption_key``. + guest_os_features (MutableSequence[google.cloud.compute_v1.types.GuestOsFeature]): + A list of features to enable on the guest + operating system. Applicable only for bootable + images. Read Enabling guest operating system + features to see a list of available options. + id (int): + [Output Only] The unique identifier for the resource. This + identifier is defined by the server. + + This field is a member of `oneof`_ ``_id``. + kind (str): + [Output Only] Type of the resource. Always compute#disk for + disks. + + This field is a member of `oneof`_ ``_kind``. + label_fingerprint (str): + A fingerprint for the labels being applied to + this disk, which is essentially a hash of the + labels set used for optimistic locking. The + fingerprint is initially generated by Compute + Engine and changes after every request to modify + or update labels. You must always provide an + up-to-date fingerprint hash in order to update + or change labels, otherwise the request will + fail with error 412 conditionNotMet. To see the + latest fingerprint, make a get() request to + retrieve a disk. + + This field is a member of `oneof`_ ``_label_fingerprint``. + labels (MutableMapping[str, str]): + Labels to apply to this disk. These can be + later modified by the setLabels method. + last_attach_timestamp (str): + [Output Only] Last attach timestamp in RFC3339 text format. + + This field is a member of `oneof`_ ``_last_attach_timestamp``. + last_detach_timestamp (str): + [Output Only] Last detach timestamp in RFC3339 text format. + + This field is a member of `oneof`_ ``_last_detach_timestamp``. + license_codes (MutableSequence[int]): + Integer license codes indicating which + licenses are attached to this disk. + licenses (MutableSequence[str]): + A list of publicly visible licenses. Reserved + for Google's use. + location_hint (str): + An opaque location hint used to place the + disk close to other resources. This field is for + use by internal tools that use the public API. + + This field is a member of `oneof`_ ``_location_hint``. + name (str): + Name of the resource. Provided by the client when the + resource is created. The name must be 1-63 characters long, + and comply with RFC1035. Specifically, the name must be 1-63 + characters long and match the regular expression + ``[a-z]([-a-z0-9]*[a-z0-9])?`` which means the first + character must be a lowercase letter, and all following + characters must be a dash, lowercase letter, or digit, + except the last character, which cannot be a dash. + + This field is a member of `oneof`_ ``_name``. + options (str): + Internal use only. + + This field is a member of `oneof`_ ``_options``. + params (google.cloud.compute_v1.types.DiskParams): + Input only. [Input Only] Additional params passed with the + request, but not persisted as part of resource payload. + + This field is a member of `oneof`_ ``_params``. + physical_block_size_bytes (int): + Physical block size of the persistent disk, + in bytes. If not present in a request, a default + value is used. The currently supported size is + 4096, other sizes may be added in the future. If + an unsupported value is requested, the error + message will list the supported values for the + caller's project. + + This field is a member of `oneof`_ ``_physical_block_size_bytes``. + provisioned_iops (int): + Indicates how many IOPS to provision for the + disk. This sets the number of I/O operations per + second that the disk can handle. Values must be + between 10,000 and 120,000. For more details, + see the Extreme persistent disk documentation. + + This field is a member of `oneof`_ ``_provisioned_iops``. + provisioned_throughput (int): + Indicates how much throughput to provision + for the disk. This sets the number of throughput + mb per second that the disk can handle. Values + must be between 1 and 7,124. + + This field is a member of `oneof`_ ``_provisioned_throughput``. + region (str): + [Output Only] URL of the region where the disk resides. Only + applicable for regional resources. You must specify this + field as part of the HTTP request URL. It is not settable as + a field in the request body. + + This field is a member of `oneof`_ ``_region``. + replica_zones (MutableSequence[str]): + URLs of the zones where the disk should be + replicated to. Only applicable for regional + resources. + resource_policies (MutableSequence[str]): + Resource policies applied to this disk for + automatic snapshot creations. + resource_status (google.cloud.compute_v1.types.DiskResourceStatus): + [Output Only] Status information for the disk resource. + + This field is a member of `oneof`_ ``_resource_status``. + satisfies_pzs (bool): + [Output Only] Reserved for future use. + + This field is a member of `oneof`_ ``_satisfies_pzs``. + self_link (str): + [Output Only] Server-defined fully-qualified URL for this + resource. + + This field is a member of `oneof`_ ``_self_link``. + size_gb (int): + Size, in GB, of the persistent disk. You can + specify this field when creating a persistent + disk using the sourceImage, sourceSnapshot, or + sourceDisk parameter, or specify it alone to + create an empty persistent disk. If you specify + this field along with a source, the value of + sizeGb must not be less than the size of the + source. Acceptable values are 1 to 65536, + inclusive. + + This field is a member of `oneof`_ ``_size_gb``. + source_consistency_group_policy (str): + [Output Only] URL of the DiskConsistencyGroupPolicy for a + secondary disk that was created using a consistency group. + + This field is a member of `oneof`_ ``_source_consistency_group_policy``. + source_consistency_group_policy_id (str): + [Output Only] ID of the DiskConsistencyGroupPolicy for a + secondary disk that was created using a consistency group. + + This field is a member of `oneof`_ ``_source_consistency_group_policy_id``. + source_disk (str): + The source disk used to create this disk. You + can provide this as a partial or full URL to the + resource. For example, the following are valid + values: - + https://www.googleapis.com/compute/v1/projects/project/zones/zone + /disks/disk - + https://www.googleapis.com/compute/v1/projects/project/regions/region + /disks/disk - + projects/project/zones/zone/disks/disk - + projects/project/regions/region/disks/disk - + zones/zone/disks/disk - + regions/region/disks/disk + + This field is a member of `oneof`_ ``_source_disk``. + source_disk_id (str): + [Output Only] The unique ID of the disk used to create this + disk. This value identifies the exact disk that was used to + create this persistent disk. For example, if you created the + persistent disk from a disk that was later deleted and + recreated under the same name, the source disk ID would + identify the exact version of the disk that was used. + + This field is a member of `oneof`_ ``_source_disk_id``. + source_image (str): + The source image used to create this disk. If + the source image is deleted, this field will not + be set. To create a disk with one of the public + operating system images, specify the image by + its family name. For example, specify + family/debian-9 to use the latest Debian 9 + image: + projects/debian-cloud/global/images/family/debian-9 + Alternatively, use a specific version of a + public operating system image: + projects/debian-cloud/global/images/debian-9-stretch-vYYYYMMDD + To create a disk with a custom image that you + created, specify the image name in the following + format: global/images/my-custom-image You can + also specify a custom image by its image family, + which returns the latest version of the image in + that family. Replace the image name with + family/family-name: + global/images/family/my-image-family + + This field is a member of `oneof`_ ``_source_image``. + source_image_encryption_key (google.cloud.compute_v1.types.CustomerEncryptionKey): + The customer-supplied encryption key of the + source image. Required if the source image is + protected by a customer-supplied encryption key. + + This field is a member of `oneof`_ ``_source_image_encryption_key``. + source_image_id (str): + [Output Only] The ID value of the image used to create this + disk. This value identifies the exact image that was used to + create this persistent disk. For example, if you created the + persistent disk from an image that was later deleted and + recreated under the same name, the source image ID would + identify the exact version of the image that was used. + + This field is a member of `oneof`_ ``_source_image_id``. + source_snapshot (str): + The source snapshot used to create this disk. + You can provide this as a partial or full URL to + the resource. For example, the following are + valid values: - + https://www.googleapis.com/compute/v1/projects/project + /global/snapshots/snapshot - + projects/project/global/snapshots/snapshot - + global/snapshots/snapshot + + This field is a member of `oneof`_ ``_source_snapshot``. + source_snapshot_encryption_key (google.cloud.compute_v1.types.CustomerEncryptionKey): + The customer-supplied encryption key of the + source snapshot. Required if the source snapshot + is protected by a customer-supplied encryption + key. + + This field is a member of `oneof`_ ``_source_snapshot_encryption_key``. + source_snapshot_id (str): + [Output Only] The unique ID of the snapshot used to create + this disk. This value identifies the exact snapshot that was + used to create this persistent disk. For example, if you + created the persistent disk from a snapshot that was later + deleted and recreated under the same name, the source + snapshot ID would identify the exact version of the snapshot + that was used. + + This field is a member of `oneof`_ ``_source_snapshot_id``. + source_storage_object (str): + The full Google Cloud Storage URI where the + disk image is stored. This file must be a + gzip-compressed tarball whose name ends in + .tar.gz or virtual machine disk whose name ends + in vmdk. Valid URIs may start with gs:// or + https://storage.googleapis.com/. This flag is + not optimized for creating multiple disks from a + source storage object. To create many disks from + a source storage object, use gcloud compute + images import instead. + + This field is a member of `oneof`_ ``_source_storage_object``. + status (str): + [Output Only] The status of disk creation. - CREATING: Disk + is provisioning. - RESTORING: Source data is being copied + into the disk. - FAILED: Disk creation failed. - READY: Disk + is ready for use. - DELETING: Disk is deleting. Check the + Status enum for the list of possible values. + + This field is a member of `oneof`_ ``_status``. + type_ (str): + URL of the disk type resource describing + which disk type to use to create the disk. + Provide this when creating the disk. For + example: projects/project + /zones/zone/diskTypes/pd-ssd . See Persistent + disk types. + + This field is a member of `oneof`_ ``_type``. + users (MutableSequence[str]): + [Output Only] Links to the users of the disk (attached + instances) in form: + projects/project/zones/zone/instances/instance + zone (str): + [Output Only] URL of the zone where the disk resides. You + must specify this field as part of the HTTP request URL. It + is not settable as a field in the request body. + + This field is a member of `oneof`_ ``_zone``. + """ + class Architecture(proto.Enum): + r"""The architecture of the disk. Valid values are ARM64 or X86_64. + + Values: + UNDEFINED_ARCHITECTURE (0): + A value indicating that the enum field is not + set. + ARCHITECTURE_UNSPECIFIED (394750507): + Default value indicating Architecture is not + set. + ARM64 (62547450): + Machines with architecture ARM64 + X86_64 (425300551): + Machines with architecture X86_64 + """ + UNDEFINED_ARCHITECTURE = 0 + ARCHITECTURE_UNSPECIFIED = 394750507 + ARM64 = 62547450 + X86_64 = 425300551 + + class Status(proto.Enum): + r"""[Output Only] The status of disk creation. - CREATING: Disk is + provisioning. - RESTORING: Source data is being copied into the + disk. - FAILED: Disk creation failed. - READY: Disk is ready for + use. - DELETING: Disk is deleting. + + Values: + UNDEFINED_STATUS (0): + A value indicating that the enum field is not + set. + CREATING (455564985): + Disk is provisioning + DELETING (528602024): + Disk is deleting. + FAILED (455706685): + Disk creation failed. + READY (77848963): + Disk is ready for use. + RESTORING (404263851): + Source data is being copied into the disk. + """ + UNDEFINED_STATUS = 0 + CREATING = 455564985 + DELETING = 528602024 + FAILED = 455706685 + READY = 77848963 + RESTORING = 404263851 + + architecture: str = proto.Field( + proto.STRING, + number=302803283, + optional=True, + ) + async_primary_disk: 'DiskAsyncReplication' = proto.Field( + proto.MESSAGE, + number=180517533, + optional=True, + message='DiskAsyncReplication', + ) + async_secondary_disks: MutableMapping[str, 'DiskAsyncReplicationList'] = proto.MapField( + proto.STRING, + proto.MESSAGE, + number=322925608, + message='DiskAsyncReplicationList', + ) + creation_timestamp: str = proto.Field( + proto.STRING, + number=30525366, + optional=True, + ) + description: str = proto.Field( + proto.STRING, + number=422937596, + optional=True, + ) + disk_encryption_key: 'CustomerEncryptionKey' = proto.Field( + proto.MESSAGE, + number=271660677, + optional=True, + message='CustomerEncryptionKey', + ) + guest_os_features: MutableSequence['GuestOsFeature'] = proto.RepeatedField( + proto.MESSAGE, + number=79294545, + message='GuestOsFeature', + ) + id: int = proto.Field( + proto.UINT64, + number=3355, + optional=True, + ) + kind: str = proto.Field( + proto.STRING, + number=3292052, + optional=True, + ) + label_fingerprint: str = proto.Field( + proto.STRING, + number=178124825, + optional=True, + ) + labels: MutableMapping[str, str] = proto.MapField( + proto.STRING, + proto.STRING, + number=500195327, + ) + last_attach_timestamp: str = proto.Field( + proto.STRING, + number=42159653, + optional=True, + ) + last_detach_timestamp: str = proto.Field( + proto.STRING, + number=56471027, + optional=True, + ) + license_codes: MutableSequence[int] = proto.RepeatedField( + proto.INT64, + number=45482664, + ) + licenses: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=337642578, + ) + location_hint: str = proto.Field( + proto.STRING, + number=350519505, + optional=True, + ) + name: str = proto.Field( + proto.STRING, + number=3373707, + optional=True, + ) + options: str = proto.Field( + proto.STRING, + number=361137822, + optional=True, + ) + params: 'DiskParams' = proto.Field( + proto.MESSAGE, + number=78313862, + optional=True, + message='DiskParams', + ) + physical_block_size_bytes: int = proto.Field( + proto.INT64, + number=420007943, + optional=True, + ) + provisioned_iops: int = proto.Field( + proto.INT64, + number=186769108, + optional=True, + ) + provisioned_throughput: int = proto.Field( + proto.INT64, + number=526524181, + optional=True, + ) + region: str = proto.Field( + proto.STRING, + number=138946292, + optional=True, + ) + replica_zones: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=48438272, + ) + resource_policies: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=22220385, + ) + resource_status: 'DiskResourceStatus' = proto.Field( + proto.MESSAGE, + number=249429315, + optional=True, + message='DiskResourceStatus', + ) + satisfies_pzs: bool = proto.Field( + proto.BOOL, + number=480964267, + optional=True, + ) + self_link: str = proto.Field( + proto.STRING, + number=456214797, + optional=True, + ) + size_gb: int = proto.Field( + proto.INT64, + number=494929369, + optional=True, + ) + source_consistency_group_policy: str = proto.Field( + proto.STRING, + number=19616093, + optional=True, + ) + source_consistency_group_policy_id: str = proto.Field( + proto.STRING, + number=267568957, + optional=True, + ) + source_disk: str = proto.Field( + proto.STRING, + number=451753793, + optional=True, + ) + source_disk_id: str = proto.Field( + proto.STRING, + number=454190809, + optional=True, + ) + source_image: str = proto.Field( + proto.STRING, + number=50443319, + optional=True, + ) + source_image_encryption_key: 'CustomerEncryptionKey' = proto.Field( + proto.MESSAGE, + number=381503659, + optional=True, + message='CustomerEncryptionKey', + ) + source_image_id: str = proto.Field( + proto.STRING, + number=55328291, + optional=True, + ) + source_snapshot: str = proto.Field( + proto.STRING, + number=126061928, + optional=True, + ) + source_snapshot_encryption_key: 'CustomerEncryptionKey' = proto.Field( + proto.MESSAGE, + number=303679322, + optional=True, + message='CustomerEncryptionKey', + ) + source_snapshot_id: str = proto.Field( + proto.STRING, + number=98962258, + optional=True, + ) + source_storage_object: str = proto.Field( + proto.STRING, + number=233052711, + optional=True, + ) + status: str = proto.Field( + proto.STRING, + number=181260274, + optional=True, + ) + type_: str = proto.Field( + proto.STRING, + number=3575610, + optional=True, + ) + users: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=111578632, + ) + zone: str = proto.Field( + proto.STRING, + number=3744684, + optional=True, + ) + + +class DiskAggregatedList(proto.Message): + r""" + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + id (str): + [Output Only] Unique identifier for the resource; defined by + the server. + + This field is a member of `oneof`_ ``_id``. + items (MutableMapping[str, google.cloud.compute_v1.types.DisksScopedList]): + A list of DisksScopedList resources. + kind (str): + [Output Only] Type of resource. Always + compute#diskAggregatedList for aggregated lists of + persistent disks. + + This field is a member of `oneof`_ ``_kind``. + next_page_token (str): + [Output Only] This token allows you to get the next page of + results for list requests. If the number of results is + larger than maxResults, use the nextPageToken as a value for + the query parameter pageToken in the next list request. + Subsequent list requests will have their own nextPageToken + to continue paging through the results. + + This field is a member of `oneof`_ ``_next_page_token``. + self_link (str): + [Output Only] Server-defined URL for this resource. + + This field is a member of `oneof`_ ``_self_link``. + unreachables (MutableSequence[str]): + [Output Only] Unreachable resources. + warning (google.cloud.compute_v1.types.Warning): + [Output Only] Informational warning message. + + This field is a member of `oneof`_ ``_warning``. + """ + + @property + def raw_page(self): + return self + + id: str = proto.Field( + proto.STRING, + number=3355, + optional=True, + ) + items: MutableMapping[str, 'DisksScopedList'] = proto.MapField( + proto.STRING, + proto.MESSAGE, + number=100526016, + message='DisksScopedList', + ) + kind: str = proto.Field( + proto.STRING, + number=3292052, + optional=True, + ) + next_page_token: str = proto.Field( + proto.STRING, + number=79797525, + optional=True, + ) + self_link: str = proto.Field( + proto.STRING, + number=456214797, + optional=True, + ) + unreachables: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=243372063, + ) + warning: 'Warning' = proto.Field( + proto.MESSAGE, + number=50704284, + optional=True, + message='Warning', + ) + + +class DiskAsyncReplication(proto.Message): + r""" + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + consistency_group_policy (str): + [Output Only] URL of the DiskConsistencyGroupPolicy if + replication was started on the disk as a member of a group. + + This field is a member of `oneof`_ ``_consistency_group_policy``. + consistency_group_policy_id (str): + [Output Only] ID of the DiskConsistencyGroupPolicy if + replication was started on the disk as a member of a group. + + This field is a member of `oneof`_ ``_consistency_group_policy_id``. + disk (str): + The other disk asynchronously replicated to + or from the current disk. You can provide this + as a partial or full URL to the resource. For + example, the following are valid values: - + https://www.googleapis.com/compute/v1/projects/project/zones/zone + /disks/disk - + projects/project/zones/zone/disks/disk - + zones/zone/disks/disk + + This field is a member of `oneof`_ ``_disk``. + disk_id (str): + [Output Only] The unique ID of the other disk asynchronously + replicated to or from the current disk. This value + identifies the exact disk that was used to create this + replication. For example, if you started replicating the + persistent disk from a disk that was later deleted and + recreated under the same name, the disk ID would identify + the exact version of the disk that was used. + + This field is a member of `oneof`_ ``_disk_id``. + """ + + consistency_group_policy: str = proto.Field( + proto.STRING, + number=1991097, + optional=True, + ) + consistency_group_policy_id: str = proto.Field( + proto.STRING, + number=261065057, + optional=True, + ) + disk: str = proto.Field( + proto.STRING, + number=3083677, + optional=True, + ) + disk_id: str = proto.Field( + proto.STRING, + number=60990205, + optional=True, + ) + + +class DiskAsyncReplicationList(proto.Message): + r""" + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + async_replication_disk (google.cloud.compute_v1.types.DiskAsyncReplication): + + This field is a member of `oneof`_ ``_async_replication_disk``. + """ + + async_replication_disk: 'DiskAsyncReplication' = proto.Field( + proto.MESSAGE, + number=231794067, + optional=True, + message='DiskAsyncReplication', + ) + + +class DiskInstantiationConfig(proto.Message): + r"""A specification of the desired way to instantiate a disk in + the instance template when its created from a source instance. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + auto_delete (bool): + Specifies whether the disk will be + auto-deleted when the instance is deleted (but + not when the disk is detached from the + instance). + + This field is a member of `oneof`_ ``_auto_delete``. + custom_image (str): + The custom source image to be used to restore + this disk when instantiating this instance + template. + + This field is a member of `oneof`_ ``_custom_image``. + device_name (str): + Specifies the device name of the disk to + which the configurations apply to. + + This field is a member of `oneof`_ ``_device_name``. + instantiate_from (str): + Specifies whether to include the disk and + what image to use. Possible values are: - + source-image: to use the same image that was + used to create the source instance's + corresponding disk. Applicable to the boot disk + and additional read-write disks. - + source-image-family: to use the same image + family that was used to create the source + instance's corresponding disk. Applicable to the + boot disk and additional read-write disks. - + custom-image: to use a user-provided image url + for disk creation. Applicable to the boot disk + and additional read-write disks. - + attach-read-only: to attach a read-only disk. + Applicable to read-only disks. - do-not-include: + to exclude a disk from the template. Applicable + to additional read-write disks, local SSDs, and + read-only disks. Check the InstantiateFrom enum + for the list of possible values. + + This field is a member of `oneof`_ ``_instantiate_from``. + """ + class InstantiateFrom(proto.Enum): + r"""Specifies whether to include the disk and what image to use. + Possible values are: - source-image: to use the same image that + was used to create the source instance's corresponding disk. + Applicable to the boot disk and additional read-write disks. - + source-image-family: to use the same image family that was used + to create the source instance's corresponding disk. Applicable + to the boot disk and additional read-write disks. - + custom-image: to use a user-provided image url for disk + creation. Applicable to the boot disk and additional read-write + disks. - attach-read-only: to attach a read-only disk. + Applicable to read-only disks. - do-not-include: to exclude a + disk from the template. Applicable to additional read-write + disks, local SSDs, and read-only disks. + + Values: + UNDEFINED_INSTANTIATE_FROM (0): + A value indicating that the enum field is not + set. + ATTACH_READ_ONLY (513775419): + Attach the existing disk in read-only mode. + The request will fail if the disk was attached + in read-write mode on the source instance. + Applicable to: read-only disks. + BLANK (63281460): + Create a blank disk. The disk will be created + unformatted. Applicable to: additional + read-write disks, local SSDs. + CUSTOM_IMAGE (196311789): + Use the custom image specified in the custom_image field. + Applicable to: boot disk, additional read-write disks. + DEFAULT (115302945): + Use the default instantiation option for the + corresponding type of disk. For boot disk and + any other R/W disks, new custom images will be + created from each disk. For read-only disks, + they will be attached in read-only mode. Local + SSD disks will be created as blank volumes. + DO_NOT_INCLUDE (104218952): + Do not include the disk in the instance + template. Applicable to: additional read-write + disks, local SSDs, read-only disks. + SOURCE_IMAGE (62631959): + Use the same source image used for creation + of the source instance's corresponding disk. The + request will fail if the source VM's disk was + created from a snapshot. Applicable to: boot + disk, additional read-write disks. + SOURCE_IMAGE_FAMILY (76850316): + Use the same source image family used for + creation of the source instance's corresponding + disk. The request will fail if the source image + of the source disk does not belong to any image + family. Applicable to: boot disk, additional + read-write disks. + """ + UNDEFINED_INSTANTIATE_FROM = 0 + ATTACH_READ_ONLY = 513775419 + BLANK = 63281460 + CUSTOM_IMAGE = 196311789 + DEFAULT = 115302945 + DO_NOT_INCLUDE = 104218952 + SOURCE_IMAGE = 62631959 + SOURCE_IMAGE_FAMILY = 76850316 + + auto_delete: bool = proto.Field( + proto.BOOL, + number=464761403, + optional=True, + ) + custom_image: str = proto.Field( + proto.STRING, + number=184123149, + optional=True, + ) + device_name: str = proto.Field( + proto.STRING, + number=67541716, + optional=True, + ) + instantiate_from: str = proto.Field( + proto.STRING, + number=393383903, + optional=True, + ) + + +class DiskList(proto.Message): + r"""A list of Disk resources. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + id (str): + [Output Only] Unique identifier for the resource; defined by + the server. + + This field is a member of `oneof`_ ``_id``. + items (MutableSequence[google.cloud.compute_v1.types.Disk]): + A list of Disk resources. + kind (str): + [Output Only] Type of resource. Always compute#diskList for + lists of disks. + + This field is a member of `oneof`_ ``_kind``. + next_page_token (str): + [Output Only] This token allows you to get the next page of + results for list requests. If the number of results is + larger than maxResults, use the nextPageToken as a value for + the query parameter pageToken in the next list request. + Subsequent list requests will have their own nextPageToken + to continue paging through the results. + + This field is a member of `oneof`_ ``_next_page_token``. + self_link (str): + [Output Only] Server-defined URL for this resource. + + This field is a member of `oneof`_ ``_self_link``. + warning (google.cloud.compute_v1.types.Warning): + [Output Only] Informational warning message. + + This field is a member of `oneof`_ ``_warning``. + """ + + @property + def raw_page(self): + return self + + id: str = proto.Field( + proto.STRING, + number=3355, + optional=True, + ) + items: MutableSequence['Disk'] = proto.RepeatedField( + proto.MESSAGE, + number=100526016, + message='Disk', + ) + kind: str = proto.Field( + proto.STRING, + number=3292052, + optional=True, + ) + next_page_token: str = proto.Field( + proto.STRING, + number=79797525, + optional=True, + ) + self_link: str = proto.Field( + proto.STRING, + number=456214797, + optional=True, + ) + warning: 'Warning' = proto.Field( + proto.MESSAGE, + number=50704284, + optional=True, + message='Warning', + ) + + +class DiskMoveRequest(proto.Message): + r""" + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + destination_zone (str): + The URL of the destination zone to move the + disk. This can be a full or partial URL. For + example, the following are all valid URLs to a + zone: - + https://www.googleapis.com/compute/v1/projects/project/zones/zone + - projects/project/zones/zone - zones/zone + + This field is a member of `oneof`_ ``_destination_zone``. + target_disk (str): + The URL of the target disk to move. This can + be a full or partial URL. For example, the + following are all valid URLs to a disk: - + https://www.googleapis.com/compute/v1/projects/project/zones/zone + /disks/disk - + projects/project/zones/zone/disks/disk - + zones/zone/disks/disk + + This field is a member of `oneof`_ ``_target_disk``. + """ + + destination_zone: str = proto.Field( + proto.STRING, + number=131854653, + optional=True, + ) + target_disk: str = proto.Field( + proto.STRING, + number=62433163, + optional=True, + ) + + +class DiskParams(proto.Message): + r"""Additional disk params. + + Attributes: + resource_manager_tags (MutableMapping[str, str]): + Resource manager tags to be bound to the disk. Tag keys and + values have the same definition as resource manager tags. + Keys must be in the format ``tagKeys/{tag_key_id}``, and + values are in the format ``tagValues/456``. The field is + ignored (both PUT & PATCH) when empty. + """ + + resource_manager_tags: MutableMapping[str, str] = proto.MapField( + proto.STRING, + proto.STRING, + number=377671164, + ) + + +class DiskResourceStatus(proto.Message): + r""" + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + async_primary_disk (google.cloud.compute_v1.types.DiskResourceStatusAsyncReplicationStatus): + + This field is a member of `oneof`_ ``_async_primary_disk``. + async_secondary_disks (MutableMapping[str, google.cloud.compute_v1.types.DiskResourceStatusAsyncReplicationStatus]): + Key: disk, value: AsyncReplicationStatus + message + """ + + async_primary_disk: 'DiskResourceStatusAsyncReplicationStatus' = proto.Field( + proto.MESSAGE, + number=180517533, + optional=True, + message='DiskResourceStatusAsyncReplicationStatus', + ) + async_secondary_disks: MutableMapping[str, 'DiskResourceStatusAsyncReplicationStatus'] = proto.MapField( + proto.STRING, + proto.MESSAGE, + number=322925608, + message='DiskResourceStatusAsyncReplicationStatus', + ) + + +class DiskResourceStatusAsyncReplicationStatus(proto.Message): + r""" + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + state (str): + Check the State enum for the list of possible + values. + + This field is a member of `oneof`_ ``_state``. + """ + class State(proto.Enum): + r""" + + Values: + UNDEFINED_STATE (0): + A value indicating that the enum field is not + set. + ACTIVE (314733318): + Replication is active. + CREATED (135924424): + Secondary disk is created and is waiting for + replication to start. + STARTING (488820800): + Replication is starting. + STATE_UNSPECIFIED (470755401): + No description available. + STOPPED (444276141): + Replication is stopped. + STOPPING (350791796): + Replication is stopping. + """ + UNDEFINED_STATE = 0 + ACTIVE = 314733318 + CREATED = 135924424 + STARTING = 488820800 + STATE_UNSPECIFIED = 470755401 + STOPPED = 444276141 + STOPPING = 350791796 + + state: str = proto.Field( + proto.STRING, + number=109757585, + optional=True, + ) + + +class DiskType(proto.Message): + r"""Represents a Disk Type resource. Google Compute Engine has two Disk + Type resources: \* + `Regional `__ \* + `Zonal `__ You can choose + from a variety of disk types based on your needs. For more + information, read Storage options. The diskTypes resource represents + disk types for a zonal persistent disk. For more information, read + Zonal persistent disks. The regionDiskTypes resource represents disk + types for a regional persistent disk. For more information, read + Regional persistent disks. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + creation_timestamp (str): + [Output Only] Creation timestamp in RFC3339 text format. + + This field is a member of `oneof`_ ``_creation_timestamp``. + default_disk_size_gb (int): + [Output Only] Server-defined default disk size in GB. + + This field is a member of `oneof`_ ``_default_disk_size_gb``. + deprecated (google.cloud.compute_v1.types.DeprecationStatus): + [Output Only] The deprecation status associated with this + disk type. + + This field is a member of `oneof`_ ``_deprecated``. + description (str): + [Output Only] An optional description of this resource. + + This field is a member of `oneof`_ ``_description``. + id (int): + [Output Only] The unique identifier for the resource. This + identifier is defined by the server. + + This field is a member of `oneof`_ ``_id``. + kind (str): + [Output Only] Type of the resource. Always compute#diskType + for disk types. + + This field is a member of `oneof`_ ``_kind``. + name (str): + [Output Only] Name of the resource. + + This field is a member of `oneof`_ ``_name``. + region (str): + [Output Only] URL of the region where the disk type resides. + Only applicable for regional resources. You must specify + this field as part of the HTTP request URL. It is not + settable as a field in the request body. + + This field is a member of `oneof`_ ``_region``. + self_link (str): + [Output Only] Server-defined URL for the resource. + + This field is a member of `oneof`_ ``_self_link``. + valid_disk_size (str): + [Output Only] An optional textual description of the valid + disk size, such as "10GB-10TB". + + This field is a member of `oneof`_ ``_valid_disk_size``. + zone (str): + [Output Only] URL of the zone where the disk type resides. + You must specify this field as part of the HTTP request URL. + It is not settable as a field in the request body. + + This field is a member of `oneof`_ ``_zone``. + """ + + creation_timestamp: str = proto.Field( + proto.STRING, + number=30525366, + optional=True, + ) + default_disk_size_gb: int = proto.Field( + proto.INT64, + number=270619253, + optional=True, + ) + deprecated: 'DeprecationStatus' = proto.Field( + proto.MESSAGE, + number=515138995, + optional=True, + message='DeprecationStatus', + ) + description: str = proto.Field( + proto.STRING, + number=422937596, + optional=True, + ) + id: int = proto.Field( + proto.UINT64, + number=3355, + optional=True, + ) + kind: str = proto.Field( + proto.STRING, + number=3292052, + optional=True, + ) + name: str = proto.Field( + proto.STRING, + number=3373707, + optional=True, + ) + region: str = proto.Field( + proto.STRING, + number=138946292, + optional=True, + ) + self_link: str = proto.Field( + proto.STRING, + number=456214797, + optional=True, + ) + valid_disk_size: str = proto.Field( + proto.STRING, + number=493962464, + optional=True, + ) + zone: str = proto.Field( + proto.STRING, + number=3744684, + optional=True, + ) + + +class DiskTypeAggregatedList(proto.Message): + r""" + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + id (str): + [Output Only] Unique identifier for the resource; defined by + the server. + + This field is a member of `oneof`_ ``_id``. + items (MutableMapping[str, google.cloud.compute_v1.types.DiskTypesScopedList]): + A list of DiskTypesScopedList resources. + kind (str): + [Output Only] Type of resource. Always + compute#diskTypeAggregatedList. + + This field is a member of `oneof`_ ``_kind``. + next_page_token (str): + [Output Only] This token allows you to get the next page of + results for list requests. If the number of results is + larger than maxResults, use the nextPageToken as a value for + the query parameter pageToken in the next list request. + Subsequent list requests will have their own nextPageToken + to continue paging through the results. + + This field is a member of `oneof`_ ``_next_page_token``. + self_link (str): + [Output Only] Server-defined URL for this resource. + + This field is a member of `oneof`_ ``_self_link``. + unreachables (MutableSequence[str]): + [Output Only] Unreachable resources. + warning (google.cloud.compute_v1.types.Warning): + [Output Only] Informational warning message. + + This field is a member of `oneof`_ ``_warning``. + """ + + @property + def raw_page(self): + return self + + id: str = proto.Field( + proto.STRING, + number=3355, + optional=True, + ) + items: MutableMapping[str, 'DiskTypesScopedList'] = proto.MapField( + proto.STRING, + proto.MESSAGE, + number=100526016, + message='DiskTypesScopedList', + ) + kind: str = proto.Field( + proto.STRING, + number=3292052, + optional=True, + ) + next_page_token: str = proto.Field( + proto.STRING, + number=79797525, + optional=True, + ) + self_link: str = proto.Field( + proto.STRING, + number=456214797, + optional=True, + ) + unreachables: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=243372063, + ) + warning: 'Warning' = proto.Field( + proto.MESSAGE, + number=50704284, + optional=True, + message='Warning', + ) + + +class DiskTypeList(proto.Message): + r"""Contains a list of disk types. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + id (str): + [Output Only] Unique identifier for the resource; defined by + the server. + + This field is a member of `oneof`_ ``_id``. + items (MutableSequence[google.cloud.compute_v1.types.DiskType]): + A list of DiskType resources. + kind (str): + [Output Only] Type of resource. Always compute#diskTypeList + for disk types. + + This field is a member of `oneof`_ ``_kind``. + next_page_token (str): + [Output Only] This token allows you to get the next page of + results for list requests. If the number of results is + larger than maxResults, use the nextPageToken as a value for + the query parameter pageToken in the next list request. + Subsequent list requests will have their own nextPageToken + to continue paging through the results. + + This field is a member of `oneof`_ ``_next_page_token``. + self_link (str): + [Output Only] Server-defined URL for this resource. + + This field is a member of `oneof`_ ``_self_link``. + warning (google.cloud.compute_v1.types.Warning): + [Output Only] Informational warning message. + + This field is a member of `oneof`_ ``_warning``. + """ + + @property + def raw_page(self): + return self + + id: str = proto.Field( + proto.STRING, + number=3355, + optional=True, + ) + items: MutableSequence['DiskType'] = proto.RepeatedField( + proto.MESSAGE, + number=100526016, + message='DiskType', + ) + kind: str = proto.Field( + proto.STRING, + number=3292052, + optional=True, + ) + next_page_token: str = proto.Field( + proto.STRING, + number=79797525, + optional=True, + ) + self_link: str = proto.Field( + proto.STRING, + number=456214797, + optional=True, + ) + warning: 'Warning' = proto.Field( + proto.MESSAGE, + number=50704284, + optional=True, + message='Warning', + ) + + +class DiskTypesScopedList(proto.Message): + r""" + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + disk_types (MutableSequence[google.cloud.compute_v1.types.DiskType]): + [Output Only] A list of disk types contained in this scope. + warning (google.cloud.compute_v1.types.Warning): + [Output Only] Informational warning which replaces the list + of disk types when the list is empty. + + This field is a member of `oneof`_ ``_warning``. + """ + + disk_types: MutableSequence['DiskType'] = proto.RepeatedField( + proto.MESSAGE, + number=198926167, + message='DiskType', + ) + warning: 'Warning' = proto.Field( + proto.MESSAGE, + number=50704284, + optional=True, + message='Warning', + ) + + +class DisksAddResourcePoliciesRequest(proto.Message): + r""" + + Attributes: + resource_policies (MutableSequence[str]): + Full or relative path to the resource policy + to be added to this disk. You can only specify + one resource policy. + """ + + resource_policies: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=22220385, + ) + + +class DisksRemoveResourcePoliciesRequest(proto.Message): + r""" + + Attributes: + resource_policies (MutableSequence[str]): + Resource policies to be removed from this + disk. + """ + + resource_policies: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=22220385, + ) + + +class DisksResizeRequest(proto.Message): + r""" + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + size_gb (int): + The new size of the persistent disk, which is + specified in GB. + + This field is a member of `oneof`_ ``_size_gb``. + """ + + size_gb: int = proto.Field( + proto.INT64, + number=494929369, + optional=True, + ) + + +class DisksScopedList(proto.Message): + r""" + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + disks (MutableSequence[google.cloud.compute_v1.types.Disk]): + [Output Only] A list of disks contained in this scope. + warning (google.cloud.compute_v1.types.Warning): + [Output Only] Informational warning which replaces the list + of disks when the list is empty. + + This field is a member of `oneof`_ ``_warning``. + """ + + disks: MutableSequence['Disk'] = proto.RepeatedField( + proto.MESSAGE, + number=95594102, + message='Disk', + ) + warning: 'Warning' = proto.Field( + proto.MESSAGE, + number=50704284, + optional=True, + message='Warning', + ) + + +class DisksStartAsyncReplicationRequest(proto.Message): + r""" + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + async_secondary_disk (str): + The secondary disk to start asynchronous + replication to. You can provide this as a + partial or full URL to the resource. For + example, the following are valid values: - + https://www.googleapis.com/compute/v1/projects/project/zones/zone + /disks/disk - + https://www.googleapis.com/compute/v1/projects/project/regions/region + /disks/disk - + projects/project/zones/zone/disks/disk - + projects/project/regions/region/disks/disk - + zones/zone/disks/disk - + regions/region/disks/disk + + This field is a member of `oneof`_ ``_async_secondary_disk``. + """ + + async_secondary_disk: str = proto.Field( + proto.STRING, + number=131645867, + optional=True, + ) + + +class DisksStopGroupAsyncReplicationResource(proto.Message): + r"""A transient resource used in + compute.disks.stopGroupAsyncReplication and + compute.regionDisks.stopGroupAsyncReplication. It is only used + to process requests and is not persisted. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + resource_policy (str): + The URL of the DiskConsistencyGroupPolicy for + the group of disks to stop. This may be a full + or partial URL, such as: - + https://www.googleapis.com/compute/v1/projects/project/regions/region + /resourcePolicies/resourcePolicy - + projects/project/regions/region/resourcePolicies/resourcePolicy + - regions/region/resourcePolicies/resourcePolicy + + This field is a member of `oneof`_ ``_resource_policy``. + """ + + resource_policy: str = proto.Field( + proto.STRING, + number=159240835, + optional=True, + ) + + +class DisplayDevice(proto.Message): + r"""A set of Display Device options + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + enable_display (bool): + Defines whether the instance has Display + enabled. + + This field is a member of `oneof`_ ``_enable_display``. + """ + + enable_display: bool = proto.Field( + proto.BOOL, + number=14266886, + optional=True, + ) + + +class DistributionPolicy(proto.Message): + r""" + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + target_shape (str): + The distribution shape to which the group + converges either proactively or on resize events + (depending on the value set in + updatePolicy.instanceRedistributionType). Check + the TargetShape enum for the list of possible + values. + + This field is a member of `oneof`_ ``_target_shape``. + zones (MutableSequence[google.cloud.compute_v1.types.DistributionPolicyZoneConfiguration]): + Zones where the regional managed instance + group will create and manage its instances. + """ + class TargetShape(proto.Enum): + r"""The distribution shape to which the group converges either + proactively or on resize events (depending on the value set in + updatePolicy.instanceRedistributionType). + + Values: + UNDEFINED_TARGET_SHAPE (0): + A value indicating that the enum field is not + set. + ANY (64972): + The group picks zones for creating VM + instances to fulfill the requested number of VMs + within present resource constraints and to + maximize utilization of unused zonal + reservations. Recommended for batch workloads + that do not require high availability. + ANY_SINGLE_ZONE (61100880): + The group creates all VM instances within a + single zone. The zone is selected based on the + present resource constraints and to maximize + utilization of unused zonal reservations. + Recommended for batch workloads with heavy + interprocess communication. + BALANCED (468409608): + The group prioritizes acquisition of + resources, scheduling VMs in zones where + resources are available while distributing VMs + as evenly as possible across selected zones to + minimize the impact of zonal failure. + Recommended for highly available serving + workloads. + EVEN (2140442): + The group schedules VM instance creation and + deletion to achieve and maintain an even number + of managed instances across the selected zones. + The distribution is even when the number of + managed instances does not differ by more than 1 + between any two zones. Recommended for highly + available serving workloads. + """ + UNDEFINED_TARGET_SHAPE = 0 + ANY = 64972 + ANY_SINGLE_ZONE = 61100880 + BALANCED = 468409608 + EVEN = 2140442 + + target_shape: str = proto.Field( + proto.STRING, + number=338621299, + optional=True, + ) + zones: MutableSequence['DistributionPolicyZoneConfiguration'] = proto.RepeatedField( + proto.MESSAGE, + number=116085319, + message='DistributionPolicyZoneConfiguration', + ) + + +class DistributionPolicyZoneConfiguration(proto.Message): + r""" + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + zone (str): + The URL of the zone. The zone must exist in + the region where the managed instance group is + located. + + This field is a member of `oneof`_ ``_zone``. + """ + + zone: str = proto.Field( + proto.STRING, + number=3744684, + optional=True, + ) + + +class Duration(proto.Message): + r"""A Duration represents a fixed-length span of time represented + as a count of seconds and fractions of seconds at nanosecond + resolution. It is independent of any calendar and concepts like + "day" or "month". Range is approximately 10,000 years. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + nanos (int): + Span of time that's a fraction of a second at nanosecond + resolution. Durations less than one second are represented + with a 0 ``seconds`` field and a positive ``nanos`` field. + Must be from 0 to 999,999,999 inclusive. + + This field is a member of `oneof`_ ``_nanos``. + seconds (int): + Span of time at a resolution of a second. Must be from 0 to + 315,576,000,000 inclusive. Note: these bounds are computed + from: 60 sec/min \* 60 min/hr \* 24 hr/day \* 365.25 + days/year \* 10000 years + + This field is a member of `oneof`_ ``_seconds``. + """ + + nanos: int = proto.Field( + proto.INT32, + number=104586303, + optional=True, + ) + seconds: int = proto.Field( + proto.INT64, + number=359484031, + optional=True, + ) + + +class EnableXpnHostProjectRequest(proto.Message): + r"""A request message for Projects.EnableXpnHost. See the method + description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + project (str): + Project ID for this request. + request_id (str): + An optional request ID to identify requests. + Specify a unique request ID so that if you must + retry your request, the server will know to + ignore the request if it has already been + completed. For example, consider a situation + where you make an initial request and the + request times out. If you make the request again + with the same request ID, the server can check + if original operation with the same request ID + was received, and if so, will ignore the second + request. This prevents clients from accidentally + creating duplicate commitments. The request ID + must be a valid UUID with the exception that + zero UUID is not supported ( + 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. + """ + + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + request_id: str = proto.Field( + proto.STRING, + number=37109963, + optional=True, + ) + + +class EnableXpnResourceProjectRequest(proto.Message): + r"""A request message for Projects.EnableXpnResource. See the + method description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + project (str): + Project ID for this request. + projects_enable_xpn_resource_request_resource (google.cloud.compute_v1.types.ProjectsEnableXpnResourceRequest): + The body resource for this request + request_id (str): + An optional request ID to identify requests. + Specify a unique request ID so that if you must + retry your request, the server will know to + ignore the request if it has already been + completed. For example, consider a situation + where you make an initial request and the + request times out. If you make the request again + with the same request ID, the server can check + if original operation with the same request ID + was received, and if so, will ignore the second + request. This prevents clients from accidentally + creating duplicate commitments. The request ID + must be a valid UUID with the exception that + zero UUID is not supported ( + 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. + """ + + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + projects_enable_xpn_resource_request_resource: 'ProjectsEnableXpnResourceRequest' = proto.Field( + proto.MESSAGE, + number=421980207, + message='ProjectsEnableXpnResourceRequest', + ) + request_id: str = proto.Field( + proto.STRING, + number=37109963, + optional=True, + ) + + +class Error(proto.Message): + r"""[Output Only] If errors are generated during processing of the + operation, this field will be populated. + + Attributes: + errors (MutableSequence[google.cloud.compute_v1.types.Errors]): + [Output Only] The array of errors encountered while + processing this operation. + """ + + errors: MutableSequence['Errors'] = proto.RepeatedField( + proto.MESSAGE, + number=315977579, + message='Errors', + ) + + +class ErrorDetails(proto.Message): + r""" + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + error_info (google.cloud.compute_v1.types.ErrorInfo): + + This field is a member of `oneof`_ ``_error_info``. + help_ (google.cloud.compute_v1.types.Help): + + This field is a member of `oneof`_ ``_help``. + localized_message (google.cloud.compute_v1.types.LocalizedMessage): + + This field is a member of `oneof`_ ``_localized_message``. + quota_info (google.cloud.compute_v1.types.QuotaExceededInfo): + + This field is a member of `oneof`_ ``_quota_info``. + """ + + error_info: 'ErrorInfo' = proto.Field( + proto.MESSAGE, + number=25251973, + optional=True, + message='ErrorInfo', + ) + help_: 'Help' = proto.Field( + proto.MESSAGE, + number=3198785, + optional=True, + message='Help', + ) + localized_message: 'LocalizedMessage' = proto.Field( + proto.MESSAGE, + number=404537155, + optional=True, + message='LocalizedMessage', + ) + quota_info: 'QuotaExceededInfo' = proto.Field( + proto.MESSAGE, + number=93923861, + optional=True, + message='QuotaExceededInfo', + ) + + +class ErrorInfo(proto.Message): + r"""Describes the cause of the error with structured details. Example of + an error when contacting the "pubsub.googleapis.com" API when it is + not enabled: { "reason": "API_DISABLED" "domain": "googleapis.com" + "metadata": { "resource": "projects/123", "service": + "pubsub.googleapis.com" } } This response indicates that the + pubsub.googleapis.com API is not enabled. Example of an error that + is returned when attempting to create a Spanner instance in a region + that is out of stock: { "reason": "STOCKOUT" "domain": + "spanner.googleapis.com", "metadata": { "availableRegions": + "us-central1,us-east2" } } + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + domain (str): + The logical grouping to which the "reason" + belongs. The error domain is typically the + registered service name of the tool or product + that generates the error. Example: + "pubsub.googleapis.com". If the error is + generated by some common infrastructure, the + error domain must be a globally unique value + that identifies the infrastructure. For Google + API infrastructure, the error domain is + "googleapis.com". + + This field is a member of `oneof`_ ``_domain``. + metadatas (MutableMapping[str, str]): + Additional structured details about this error. Keys should + match /[a-zA-Z0-9-_]/ and be limited to 64 characters in + length. When identifying the current value of an exceeded + limit, the units should be contained in the key, not the + value. For example, rather than {"instanceLimit": + "100/request"}, should be returned as, + {"instanceLimitPerRequest": "100"}, if the client exceeds + the number of instances that can be created in a single + (batch) request. + reason (str): + The reason of the error. This is a constant value that + identifies the proximate cause of the error. Error reasons + are unique within a particular domain of errors. This should + be at most 63 characters and match a regular expression of + ``A-Z+[A-Z0-9]``, which represents UPPER_SNAKE_CASE. + + This field is a member of `oneof`_ ``_reason``. + """ + + domain: str = proto.Field( + proto.STRING, + number=284415172, + optional=True, + ) + metadatas: MutableMapping[str, str] = proto.MapField( + proto.STRING, + proto.STRING, + number=8514340, + ) + reason: str = proto.Field( + proto.STRING, + number=138777156, + optional=True, + ) + + +class Errors(proto.Message): + r""" + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + code (str): + [Output Only] The error type identifier for this error. + + This field is a member of `oneof`_ ``_code``. + error_details (MutableSequence[google.cloud.compute_v1.types.ErrorDetails]): + [Output Only] An optional list of messages that contain the + error details. There is a set of defined message types to + use for providing details.The syntax depends on the error + code. For example, QuotaExceededInfo will have details when + the error code is QUOTA_EXCEEDED. + location (str): + [Output Only] Indicates the field in the request that caused + the error. This property is optional. + + This field is a member of `oneof`_ ``_location``. + message (str): + [Output Only] An optional, human-readable error message. + + This field is a member of `oneof`_ ``_message``. + """ + + code: str = proto.Field( + proto.STRING, + number=3059181, + optional=True, + ) + error_details: MutableSequence['ErrorDetails'] = proto.RepeatedField( + proto.MESSAGE, + number=274653963, + message='ErrorDetails', + ) + location: str = proto.Field( + proto.STRING, + number=290430901, + optional=True, + ) + message: str = proto.Field( + proto.STRING, + number=418054151, + optional=True, + ) + + +class ExchangedPeeringRoute(proto.Message): + r""" + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + dest_range (str): + The destination range of the route. + + This field is a member of `oneof`_ ``_dest_range``. + imported (bool): + True if the peering route has been imported + from a peer. The actual import happens if the + field networkPeering.importCustomRoutes is true + for this network, and + networkPeering.exportCustomRoutes is true for + the peer network, and the import does not result + in a route conflict. + + This field is a member of `oneof`_ ``_imported``. + next_hop_region (str): + The region of peering route next hop, only + applies to dynamic routes. + + This field is a member of `oneof`_ ``_next_hop_region``. + priority (int): + The priority of the peering route. + + This field is a member of `oneof`_ ``_priority``. + type_ (str): + The type of the peering route. + Check the Type enum for the list of possible + values. + + This field is a member of `oneof`_ ``_type``. + """ + class Type(proto.Enum): + r"""The type of the peering route. + + Values: + UNDEFINED_TYPE (0): + A value indicating that the enum field is not + set. + DYNAMIC_PEERING_ROUTE (469794858): + For routes exported from local network. + STATIC_PEERING_ROUTE (473407545): + The peering route. + SUBNET_PEERING_ROUTE (465782504): + The peering route corresponding to subnetwork + range. + """ + UNDEFINED_TYPE = 0 + DYNAMIC_PEERING_ROUTE = 469794858 + STATIC_PEERING_ROUTE = 473407545 + SUBNET_PEERING_ROUTE = 465782504 + + dest_range: str = proto.Field( + proto.STRING, + number=381327712, + optional=True, + ) + imported: bool = proto.Field( + proto.BOOL, + number=114502404, + optional=True, + ) + next_hop_region: str = proto.Field( + proto.STRING, + number=122577014, + optional=True, + ) + priority: int = proto.Field( + proto.UINT32, + number=445151652, + optional=True, + ) + type_: str = proto.Field( + proto.STRING, + number=3575610, + optional=True, + ) + + +class ExchangedPeeringRoutesList(proto.Message): + r""" + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + id (str): + [Output Only] Unique identifier for the resource; defined by + the server. + + This field is a member of `oneof`_ ``_id``. + items (MutableSequence[google.cloud.compute_v1.types.ExchangedPeeringRoute]): + A list of ExchangedPeeringRoute resources. + kind (str): + [Output Only] Type of resource. Always + compute#exchangedPeeringRoutesList for exchanged peering + routes lists. + + This field is a member of `oneof`_ ``_kind``. + next_page_token (str): + [Output Only] This token allows you to get the next page of + results for list requests. If the number of results is + larger than maxResults, use the nextPageToken as a value for + the query parameter pageToken in the next list request. + Subsequent list requests will have their own nextPageToken + to continue paging through the results. + + This field is a member of `oneof`_ ``_next_page_token``. + self_link (str): + [Output Only] Server-defined URL for this resource. + + This field is a member of `oneof`_ ``_self_link``. + warning (google.cloud.compute_v1.types.Warning): + [Output Only] Informational warning message. + + This field is a member of `oneof`_ ``_warning``. + """ + + @property + def raw_page(self): + return self + + id: str = proto.Field( + proto.STRING, + number=3355, + optional=True, + ) + items: MutableSequence['ExchangedPeeringRoute'] = proto.RepeatedField( + proto.MESSAGE, + number=100526016, + message='ExchangedPeeringRoute', + ) + kind: str = proto.Field( + proto.STRING, + number=3292052, + optional=True, + ) + next_page_token: str = proto.Field( + proto.STRING, + number=79797525, + optional=True, + ) + self_link: str = proto.Field( + proto.STRING, + number=456214797, + optional=True, + ) + warning: 'Warning' = proto.Field( + proto.MESSAGE, + number=50704284, + optional=True, + message='Warning', + ) + + +class ExpandIpCidrRangeSubnetworkRequest(proto.Message): + r"""A request message for Subnetworks.ExpandIpCidrRange. See the + method description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + project (str): + Project ID for this request. + region (str): + Name of the region scoping this request. + request_id (str): + An optional request ID to identify requests. + Specify a unique request ID so that if you must + retry your request, the server will know to + ignore the request if it has already been + completed. For example, consider a situation + where you make an initial request and the + request times out. If you make the request again + with the same request ID, the server can check + if original operation with the same request ID + was received, and if so, will ignore the second + request. This prevents clients from accidentally + creating duplicate commitments. The request ID + must be a valid UUID with the exception that + zero UUID is not supported ( + 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. + subnetwork (str): + Name of the Subnetwork resource to update. + subnetworks_expand_ip_cidr_range_request_resource (google.cloud.compute_v1.types.SubnetworksExpandIpCidrRangeRequest): + The body resource for this request + """ + + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + region: str = proto.Field( + proto.STRING, + number=138946292, + ) + request_id: str = proto.Field( + proto.STRING, + number=37109963, + optional=True, + ) + subnetwork: str = proto.Field( + proto.STRING, + number=307827694, + ) + subnetworks_expand_ip_cidr_range_request_resource: 'SubnetworksExpandIpCidrRangeRequest' = proto.Field( + proto.MESSAGE, + number=477014110, + message='SubnetworksExpandIpCidrRangeRequest', + ) + + +class Expr(proto.Message): + r"""Represents a textual expression in the Common Expression Language + (CEL) syntax. CEL is a C-like expression language. The syntax and + semantics of CEL are documented at + https://github.com/google/cel-spec. Example (Comparison): title: + "Summary size limit" description: "Determines if a summary is less + than 100 chars" expression: "document.summary.size() < 100" Example + (Equality): title: "Requestor is owner" description: "Determines if + requestor is the document owner" expression: "document.owner == + request.auth.claims.email" Example (Logic): title: "Public + documents" description: "Determine whether the document should be + publicly visible" expression: "document.type != 'private' && + document.type != 'internal'" Example (Data Manipulation): title: + "Notification string" description: "Create a notification string + with a timestamp." expression: "'New message received at ' + + string(document.create_time)" The exact variables and functions that + may be referenced within an expression are determined by the service + that evaluates it. See the service documentation for additional + information. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + description (str): + Optional. Description of the expression. This + is a longer text which describes the expression, + e.g. when hovered over it in a UI. + + This field is a member of `oneof`_ ``_description``. + expression (str): + Textual representation of an expression in + Common Expression Language syntax. + + This field is a member of `oneof`_ ``_expression``. + location (str): + Optional. String indicating the location of + the expression for error reporting, e.g. a file + name and a position in the file. + + This field is a member of `oneof`_ ``_location``. + title (str): + Optional. Title for the expression, i.e. a + short string describing its purpose. This can be + used e.g. in UIs which allow to enter the + expression. + + This field is a member of `oneof`_ ``_title``. + """ + + description: str = proto.Field( + proto.STRING, + number=422937596, + optional=True, + ) + expression: str = proto.Field( + proto.STRING, + number=352031384, + optional=True, + ) + location: str = proto.Field( + proto.STRING, + number=290430901, + optional=True, + ) + title: str = proto.Field( + proto.STRING, + number=110371416, + optional=True, + ) + + +class ExternalVpnGateway(proto.Message): + r"""Represents an external VPN gateway. External VPN gateway is + the on-premises VPN gateway(s) or another cloud provider's VPN + gateway that connects to your Google Cloud VPN gateway. To + create a highly available VPN from Google Cloud Platform to your + VPN gateway or another cloud provider's VPN gateway, you must + create a external VPN gateway resource with information about + the other gateway. For more information about using external VPN + gateways, see Creating an HA VPN gateway and tunnel pair to a + peer VPN. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + creation_timestamp (str): + [Output Only] Creation timestamp in RFC3339 text format. + + This field is a member of `oneof`_ ``_creation_timestamp``. + description (str): + An optional description of this resource. + Provide this property when you create the + resource. + + This field is a member of `oneof`_ ``_description``. + id (int): + [Output Only] The unique identifier for the resource. This + identifier is defined by the server. + + This field is a member of `oneof`_ ``_id``. + interfaces (MutableSequence[google.cloud.compute_v1.types.ExternalVpnGatewayInterface]): + A list of interfaces for this external VPN + gateway. If your peer-side gateway is an + on-premises gateway and non-AWS cloud providers' + gateway, at most two interfaces can be provided + for an external VPN gateway. If your peer side + is an AWS virtual private gateway, four + interfaces should be provided for an external + VPN gateway. + kind (str): + [Output Only] Type of the resource. Always + compute#externalVpnGateway for externalVpnGateways. + + This field is a member of `oneof`_ ``_kind``. + label_fingerprint (str): + A fingerprint for the labels being applied to + this ExternalVpnGateway, which is essentially a + hash of the labels set used for optimistic + locking. The fingerprint is initially generated + by Compute Engine and changes after every + request to modify or update labels. You must + always provide an up-to-date fingerprint hash in + order to update or change labels, otherwise the + request will fail with error 412 + conditionNotMet. To see the latest fingerprint, + make a get() request to retrieve an + ExternalVpnGateway. + + This field is a member of `oneof`_ ``_label_fingerprint``. + labels (MutableMapping[str, str]): + Labels for this resource. These can only be + added or modified by the setLabels method. Each + label key/value pair must comply with RFC1035. + Label values may be empty. + name (str): + Name of the resource. Provided by the client when the + resource is created. The name must be 1-63 characters long, + and comply with RFC1035. Specifically, the name must be 1-63 + characters long and match the regular expression + ``[a-z]([-a-z0-9]*[a-z0-9])?`` which means the first + character must be a lowercase letter, and all following + characters must be a dash, lowercase letter, or digit, + except the last character, which cannot be a dash. + + This field is a member of `oneof`_ ``_name``. + redundancy_type (str): + Indicates the user-supplied redundancy type + of this external VPN gateway. Check the + RedundancyType enum for the list of possible + values. + + This field is a member of `oneof`_ ``_redundancy_type``. + self_link (str): + [Output Only] Server-defined URL for the resource. + + This field is a member of `oneof`_ ``_self_link``. + """ + class RedundancyType(proto.Enum): + r"""Indicates the user-supplied redundancy type of this external + VPN gateway. + + Values: + UNDEFINED_REDUNDANCY_TYPE (0): + A value indicating that the enum field is not + set. + FOUR_IPS_REDUNDANCY (520087913): + The external VPN gateway has four public IP addresses; at + the time of writing this API, the AWS virtual private + gateway is an example which has four public IP addresses for + high availability connections; there should be two VPN + connections in the AWS virtual private gateway , each AWS + VPN connection has two public IP addresses; please make sure + to put two public IP addresses from one AWS VPN connection + into interfaces 0 and 1 of this external VPN gateway, and + put the other two public IP addresses from another AWS VPN + connection into interfaces 2 and 3 of this external VPN + gateway. When displaying highly available configuration + status for the VPN tunnels connected to FOUR_IPS_REDUNDANCY + external VPN gateway, Google will always detect whether + interfaces 0 and 1 are connected on one interface of HA + Cloud VPN gateway, and detect whether interfaces 2 and 3 are + connected to another interface of the HA Cloud VPN gateway. + SINGLE_IP_INTERNALLY_REDUNDANT (133914873): + The external VPN gateway has only one public + IP address which internally provide redundancy + or failover. + TWO_IPS_REDUNDANCY (367049635): + The external VPN gateway has two public IP + addresses which are redundant with each other, + the following two types of setup on your + on-premises side would have this type of + redundancy: (1) Two separate on-premises + gateways, each with one public IP address, the + two on-premises gateways are redundant with each + other. (2) A single on-premise gateway with two + public IP addresses that are redundant with + eatch other. + """ + UNDEFINED_REDUNDANCY_TYPE = 0 + FOUR_IPS_REDUNDANCY = 520087913 + SINGLE_IP_INTERNALLY_REDUNDANT = 133914873 + TWO_IPS_REDUNDANCY = 367049635 + + creation_timestamp: str = proto.Field( + proto.STRING, + number=30525366, + optional=True, + ) + description: str = proto.Field( + proto.STRING, + number=422937596, + optional=True, + ) + id: int = proto.Field( + proto.UINT64, + number=3355, + optional=True, + ) + interfaces: MutableSequence['ExternalVpnGatewayInterface'] = proto.RepeatedField( + proto.MESSAGE, + number=12073562, + message='ExternalVpnGatewayInterface', + ) + kind: str = proto.Field( + proto.STRING, + number=3292052, + optional=True, + ) + label_fingerprint: str = proto.Field( + proto.STRING, + number=178124825, + optional=True, + ) + labels: MutableMapping[str, str] = proto.MapField( + proto.STRING, + proto.STRING, + number=500195327, + ) + name: str = proto.Field( + proto.STRING, + number=3373707, + optional=True, + ) + redundancy_type: str = proto.Field( + proto.STRING, + number=271443740, + optional=True, + ) + self_link: str = proto.Field( + proto.STRING, + number=456214797, + optional=True, + ) + + +class ExternalVpnGatewayInterface(proto.Message): + r"""The interface for the external VPN gateway. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + id (int): + The numeric ID of this interface. The allowed input values + for this id for different redundancy types of external VPN + gateway: - SINGLE_IP_INTERNALLY_REDUNDANT - 0 - + TWO_IPS_REDUNDANCY - 0, 1 - FOUR_IPS_REDUNDANCY - 0, 1, 2, 3 + + This field is a member of `oneof`_ ``_id``. + ip_address (str): + IP address of the interface in the external + VPN gateway. Only IPv4 is supported. This IP + address can be either from your on-premise + gateway or another Cloud provider's VPN gateway, + it cannot be an IP address from Google Compute + Engine. + + This field is a member of `oneof`_ ``_ip_address``. + """ + + id: int = proto.Field( + proto.UINT32, + number=3355, + optional=True, + ) + ip_address: str = proto.Field( + proto.STRING, + number=406272220, + optional=True, + ) + + +class ExternalVpnGatewayList(proto.Message): + r"""Response to the list request, and contains a list of + externalVpnGateways. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + etag (str): + + This field is a member of `oneof`_ ``_etag``. + id (str): + [Output Only] Unique identifier for the resource; defined by + the server. + + This field is a member of `oneof`_ ``_id``. + items (MutableSequence[google.cloud.compute_v1.types.ExternalVpnGateway]): + A list of ExternalVpnGateway resources. + kind (str): + [Output Only] Type of resource. Always + compute#externalVpnGatewayList for lists of + externalVpnGateways. + + This field is a member of `oneof`_ ``_kind``. + next_page_token (str): + [Output Only] This token allows you to get the next page of + results for list requests. If the number of results is + larger than maxResults, use the nextPageToken as a value for + the query parameter pageToken in the next list request. + Subsequent list requests will have their own nextPageToken + to continue paging through the results. + + This field is a member of `oneof`_ ``_next_page_token``. + self_link (str): + [Output Only] Server-defined URL for this resource. + + This field is a member of `oneof`_ ``_self_link``. + warning (google.cloud.compute_v1.types.Warning): + [Output Only] Informational warning message. + + This field is a member of `oneof`_ ``_warning``. + """ + + @property + def raw_page(self): + return self + + etag: str = proto.Field( + proto.STRING, + number=3123477, + optional=True, + ) + id: str = proto.Field( + proto.STRING, + number=3355, + optional=True, + ) + items: MutableSequence['ExternalVpnGateway'] = proto.RepeatedField( + proto.MESSAGE, + number=100526016, + message='ExternalVpnGateway', + ) + kind: str = proto.Field( + proto.STRING, + number=3292052, + optional=True, + ) + next_page_token: str = proto.Field( + proto.STRING, + number=79797525, + optional=True, + ) + self_link: str = proto.Field( + proto.STRING, + number=456214797, + optional=True, + ) + warning: 'Warning' = proto.Field( + proto.MESSAGE, + number=50704284, + optional=True, + message='Warning', + ) + + +class FileContentBuffer(proto.Message): + r""" + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + content (str): + The raw content in the secure keys file. + + This field is a member of `oneof`_ ``_content``. + file_type (str): + The file type of source file. + Check the FileType enum for the list of possible + values. + + This field is a member of `oneof`_ ``_file_type``. + """ + class FileType(proto.Enum): + r"""The file type of source file. + + Values: + UNDEFINED_FILE_TYPE (0): + A value indicating that the enum field is not + set. + BIN (65767): + No description available. + UNDEFINED (137851184): + No description available. + X509 (2674086): + No description available. + """ + UNDEFINED_FILE_TYPE = 0 + BIN = 65767 + UNDEFINED = 137851184 + X509 = 2674086 + + content: str = proto.Field( + proto.STRING, + number=414659705, + optional=True, + ) + file_type: str = proto.Field( + proto.STRING, + number=294346781, + optional=True, + ) + + +class Firewall(proto.Message): + r"""Represents a Firewall Rule resource. Firewall rules allow or + deny ingress traffic to, and egress traffic from your instances. + For more information, read Firewall rules. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + allowed (MutableSequence[google.cloud.compute_v1.types.Allowed]): + The list of ALLOW rules specified by this + firewall. Each rule specifies a protocol and + port-range tuple that describes a permitted + connection. + creation_timestamp (str): + [Output Only] Creation timestamp in RFC3339 text format. + + This field is a member of `oneof`_ ``_creation_timestamp``. + denied (MutableSequence[google.cloud.compute_v1.types.Denied]): + The list of DENY rules specified by this + firewall. Each rule specifies a protocol and + port-range tuple that describes a denied + connection. + description (str): + An optional description of this resource. + Provide this field when you create the resource. + + This field is a member of `oneof`_ ``_description``. + destination_ranges (MutableSequence[str]): + If destination ranges are specified, the + firewall rule applies only to traffic that has + destination IP address in these ranges. These + ranges must be expressed in CIDR format. Both + IPv4 and IPv6 are supported. + direction (str): + Direction of traffic to which this firewall applies, either + ``INGRESS`` or ``EGRESS``. The default is ``INGRESS``. For + ``EGRESS`` traffic, you cannot specify the sourceTags + fields. Check the Direction enum for the list of possible + values. + + This field is a member of `oneof`_ ``_direction``. + disabled (bool): + Denotes whether the firewall rule is + disabled. When set to true, the firewall rule is + not enforced and the network behaves as if it + did not exist. If this is unspecified, the + firewall rule will be enabled. + + This field is a member of `oneof`_ ``_disabled``. + id (int): + [Output Only] The unique identifier for the resource. This + identifier is defined by the server. + + This field is a member of `oneof`_ ``_id``. + kind (str): + [Output Only] Type of the resource. Always compute#firewall + for firewall rules. + + This field is a member of `oneof`_ ``_kind``. + log_config (google.cloud.compute_v1.types.FirewallLogConfig): + This field denotes the logging options for a + particular firewall rule. If logging is enabled, + logs will be exported to Cloud Logging. + + This field is a member of `oneof`_ ``_log_config``. + name (str): + Name of the resource; provided by the client when the + resource is created. The name must be 1-63 characters long, + and comply with RFC1035. Specifically, the name must be 1-63 + characters long and match the regular expression + ``[a-z]([-a-z0-9]*[a-z0-9])?``. The first character must be + a lowercase letter, and all following characters (except for + the last character) must be a dash, lowercase letter, or + digit. The last character must be a lowercase letter or + digit. + + This field is a member of `oneof`_ ``_name``. + network (str): + URL of the network resource for this firewall + rule. If not specified when creating a firewall + rule, the default network is used: + global/networks/default If you choose to specify + this field, you can specify the network as a + full or partial URL. For example, the following + are all valid URLs: - + https://www.googleapis.com/compute/v1/projects/myproject/global/networks/my-network + - projects/myproject/global/networks/my-network + - global/networks/default + + This field is a member of `oneof`_ ``_network``. + priority (int): + Priority for this rule. This is an integer between ``0`` and + ``65535``, both inclusive. The default value is ``1000``. + Relative priorities determine which rule takes effect if + multiple rules apply. Lower values indicate higher priority. + For example, a rule with priority ``0`` has higher + precedence than a rule with priority ``1``. DENY rules take + precedence over ALLOW rules if they have equal priority. + Note that VPC networks have implied rules with a priority of + ``65535``. To avoid conflicts with the implied rules, use a + priority number less than ``65535``. + + This field is a member of `oneof`_ ``_priority``. + self_link (str): + [Output Only] Server-defined URL for the resource. + + This field is a member of `oneof`_ ``_self_link``. + source_ranges (MutableSequence[str]): + If source ranges are specified, the firewall + rule applies only to traffic that has a source + IP address in these ranges. These ranges must be + expressed in CIDR format. One or both of + sourceRanges and sourceTags may be set. If both + fields are set, the rule applies to traffic that + has a source IP address within sourceRanges OR a + source IP from a resource with a matching tag + listed in the sourceTags field. The connection + does not need to match both fields for the rule + to apply. Both IPv4 and IPv6 are supported. + source_service_accounts (MutableSequence[str]): + If source service accounts are specified, the + firewall rules apply only to traffic originating + from an instance with a service account in this + list. Source service accounts cannot be used to + control traffic to an instance's external IP + address because service accounts are associated + with an instance, not an IP address. + sourceRanges can be set at the same time as + sourceServiceAccounts. If both are set, the + firewall applies to traffic that has a source IP + address within the sourceRanges OR a source IP + that belongs to an instance with service account + listed in sourceServiceAccount. The connection + does not need to match both fields for the + firewall to apply. sourceServiceAccounts cannot + be used at the same time as sourceTags or + targetTags. + source_tags (MutableSequence[str]): + If source tags are specified, the firewall + rule applies only to traffic with source IPs + that match the primary network interfaces of VM + instances that have the tag and are in the same + VPC network. Source tags cannot be used to + control traffic to an instance's external IP + address, it only applies to traffic between + instances in the same virtual network. Because + tags are associated with instances, not IP + addresses. One or both of sourceRanges and + sourceTags may be set. If both fields are set, + the firewall applies to traffic that has a + source IP address within sourceRanges OR a + source IP from a resource with a matching tag + listed in the sourceTags field. The connection + does not need to match both fields for the + firewall to apply. + target_service_accounts (MutableSequence[str]): + A list of service accounts indicating sets of instances + located in the network that may make network connections as + specified in allowed[]. targetServiceAccounts cannot be used + at the same time as targetTags or sourceTags. If neither + targetServiceAccounts nor targetTags are specified, the + firewall rule applies to all instances on the specified + network. + target_tags (MutableSequence[str]): + A list of tags that controls which instances + the firewall rule applies to. If targetTags are + specified, then the firewall rule applies only + to instances in the VPC network that have one of + those tags. If no targetTags are specified, the + firewall rule applies to all instances on the + specified network. + """ + class Direction(proto.Enum): + r"""Direction of traffic to which this firewall applies, either + ``INGRESS`` or ``EGRESS``. The default is ``INGRESS``. For + ``EGRESS`` traffic, you cannot specify the sourceTags fields. + + Values: + UNDEFINED_DIRECTION (0): + A value indicating that the enum field is not + set. + EGRESS (432880501): + Indicates that firewall should apply to + outgoing traffic. + INGRESS (516931221): + Indicates that firewall should apply to + incoming traffic. + """ + UNDEFINED_DIRECTION = 0 + EGRESS = 432880501 + INGRESS = 516931221 + + allowed: MutableSequence['Allowed'] = proto.RepeatedField( + proto.MESSAGE, + number=162398632, + message='Allowed', + ) + creation_timestamp: str = proto.Field( + proto.STRING, + number=30525366, + optional=True, + ) + denied: MutableSequence['Denied'] = proto.RepeatedField( + proto.MESSAGE, + number=275217307, + message='Denied', + ) + description: str = proto.Field( + proto.STRING, + number=422937596, + optional=True, + ) + destination_ranges: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=305699879, + ) + direction: str = proto.Field( + proto.STRING, + number=111150975, + optional=True, + ) + disabled: bool = proto.Field( + proto.BOOL, + number=270940796, + optional=True, + ) + id: int = proto.Field( + proto.UINT64, + number=3355, + optional=True, + ) + kind: str = proto.Field( + proto.STRING, + number=3292052, + optional=True, + ) + log_config: 'FirewallLogConfig' = proto.Field( + proto.MESSAGE, + number=351299741, + optional=True, + message='FirewallLogConfig', + ) + name: str = proto.Field( + proto.STRING, + number=3373707, + optional=True, + ) + network: str = proto.Field( + proto.STRING, + number=232872494, + optional=True, + ) + priority: int = proto.Field( + proto.INT32, + number=445151652, + optional=True, + ) + self_link: str = proto.Field( + proto.STRING, + number=456214797, + optional=True, + ) + source_ranges: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=200097658, + ) + source_service_accounts: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=105100756, + ) + source_tags: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=452222397, + ) + target_service_accounts: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=457639710, + ) + target_tags: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=62901767, + ) + + +class FirewallList(proto.Message): + r"""Contains a list of firewalls. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + id (str): + [Output Only] Unique identifier for the resource; defined by + the server. + + This field is a member of `oneof`_ ``_id``. + items (MutableSequence[google.cloud.compute_v1.types.Firewall]): + A list of Firewall resources. + kind (str): + [Output Only] Type of resource. Always compute#firewallList + for lists of firewalls. + + This field is a member of `oneof`_ ``_kind``. + next_page_token (str): + [Output Only] This token allows you to get the next page of + results for list requests. If the number of results is + larger than maxResults, use the nextPageToken as a value for + the query parameter pageToken in the next list request. + Subsequent list requests will have their own nextPageToken + to continue paging through the results. + + This field is a member of `oneof`_ ``_next_page_token``. + self_link (str): + [Output Only] Server-defined URL for this resource. + + This field is a member of `oneof`_ ``_self_link``. + warning (google.cloud.compute_v1.types.Warning): + [Output Only] Informational warning message. + + This field is a member of `oneof`_ ``_warning``. + """ + + @property + def raw_page(self): + return self + + id: str = proto.Field( + proto.STRING, + number=3355, + optional=True, + ) + items: MutableSequence['Firewall'] = proto.RepeatedField( + proto.MESSAGE, + number=100526016, + message='Firewall', + ) + kind: str = proto.Field( + proto.STRING, + number=3292052, + optional=True, + ) + next_page_token: str = proto.Field( + proto.STRING, + number=79797525, + optional=True, + ) + self_link: str = proto.Field( + proto.STRING, + number=456214797, + optional=True, + ) + warning: 'Warning' = proto.Field( + proto.MESSAGE, + number=50704284, + optional=True, + message='Warning', + ) + + +class FirewallLogConfig(proto.Message): + r"""The available logging options for a firewall rule. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + enable (bool): + This field denotes whether to enable logging + for a particular firewall rule. + + This field is a member of `oneof`_ ``_enable``. + metadata (str): + This field can only be specified for a + particular firewall rule if logging is enabled + for that rule. This field denotes whether to + include or exclude metadata for firewall logs. + Check the Metadata enum for the list of possible + values. + + This field is a member of `oneof`_ ``_metadata``. + """ + class Metadata(proto.Enum): + r"""This field can only be specified for a particular firewall + rule if logging is enabled for that rule. This field denotes + whether to include or exclude metadata for firewall logs. + + Values: + UNDEFINED_METADATA (0): + A value indicating that the enum field is not + set. + EXCLUDE_ALL_METADATA (334519954): + No description available. + INCLUDE_ALL_METADATA (164619908): + No description available. + """ + UNDEFINED_METADATA = 0 + EXCLUDE_ALL_METADATA = 334519954 + INCLUDE_ALL_METADATA = 164619908 + + enable: bool = proto.Field( + proto.BOOL, + number=311764355, + optional=True, + ) + metadata: str = proto.Field( + proto.STRING, + number=86866735, + optional=True, + ) + + +class FirewallPoliciesListAssociationsResponse(proto.Message): + r""" + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + associations (MutableSequence[google.cloud.compute_v1.types.FirewallPolicyAssociation]): + A list of associations. + kind (str): + [Output Only] Type of firewallPolicy associations. Always + compute#FirewallPoliciesListAssociations for lists of + firewallPolicy associations. + + This field is a member of `oneof`_ ``_kind``. + """ + + associations: MutableSequence['FirewallPolicyAssociation'] = proto.RepeatedField( + proto.MESSAGE, + number=508736530, + message='FirewallPolicyAssociation', + ) + kind: str = proto.Field( + proto.STRING, + number=3292052, + optional=True, + ) + + +class FirewallPolicy(proto.Message): + r"""Represents a Firewall Policy resource. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + associations (MutableSequence[google.cloud.compute_v1.types.FirewallPolicyAssociation]): + A list of associations that belong to this + firewall policy. + creation_timestamp (str): + [Output Only] Creation timestamp in RFC3339 text format. + + This field is a member of `oneof`_ ``_creation_timestamp``. + description (str): + An optional description of this resource. + Provide this property when you create the + resource. + + This field is a member of `oneof`_ ``_description``. + display_name (str): + Deprecated, please use short name instead. User-provided + name of the Organization firewall policy. The name should be + unique in the organization in which the firewall policy is + created. This field is not applicable to network firewall + policies. This name must be set on creation and cannot be + changed. The name must be 1-63 characters long, and comply + with RFC1035. Specifically, the name must be 1-63 characters + long and match the regular expression + ``[a-z]([-a-z0-9]*[a-z0-9])?`` which means the first + character must be a lowercase letter, and all following + characters must be a dash, lowercase letter, or digit, + except the last character, which cannot be a dash. + + This field is a member of `oneof`_ ``_display_name``. + fingerprint (str): + Specifies a fingerprint for this resource, + which is essentially a hash of the metadata's + contents and used for optimistic locking. The + fingerprint is initially generated by Compute + Engine and changes after every request to modify + or update metadata. You must always provide an + up-to-date fingerprint hash in order to update + or change metadata, otherwise the request will + fail with error 412 conditionNotMet. To see the + latest fingerprint, make get() request to the + firewall policy. + + This field is a member of `oneof`_ ``_fingerprint``. + id (int): + [Output Only] The unique identifier for the resource. This + identifier is defined by the server. + + This field is a member of `oneof`_ ``_id``. + kind (str): + [Output only] Type of the resource. Always + compute#firewallPolicyfor firewall policies + + This field is a member of `oneof`_ ``_kind``. + name (str): + Name of the resource. For Organization Firewall Policies + it's a [Output Only] numeric ID allocated by Google Cloud + which uniquely identifies the Organization Firewall Policy. + + This field is a member of `oneof`_ ``_name``. + parent (str): + [Output Only] The parent of the firewall policy. This field + is not applicable to network firewall policies. + + This field is a member of `oneof`_ ``_parent``. + region (str): + [Output Only] URL of the region where the regional firewall + policy resides. This field is not applicable to global + firewall policies. You must specify this field as part of + the HTTP request URL. It is not settable as a field in the + request body. + + This field is a member of `oneof`_ ``_region``. + rule_tuple_count (int): + [Output Only] Total count of all firewall policy rule + tuples. A firewall policy can not exceed a set number of + tuples. + + This field is a member of `oneof`_ ``_rule_tuple_count``. + rules (MutableSequence[google.cloud.compute_v1.types.FirewallPolicyRule]): + A list of rules that belong to this policy. There must + always be a default rule (rule with priority 2147483647 and + match "*"). If no rules are provided when creating a + firewall policy, a default rule with action "allow" will be + added. + self_link (str): + [Output Only] Server-defined URL for the resource. + + This field is a member of `oneof`_ ``_self_link``. + self_link_with_id (str): + [Output Only] Server-defined URL for this resource with the + resource id. + + This field is a member of `oneof`_ ``_self_link_with_id``. + short_name (str): + User-provided name of the Organization firewall policy. The + name should be unique in the organization in which the + firewall policy is created. This field is not applicable to + network firewall policies. This name must be set on creation + and cannot be changed. The name must be 1-63 characters + long, and comply with RFC1035. Specifically, the name must + be 1-63 characters long and match the regular expression + ``[a-z]([-a-z0-9]*[a-z0-9])?`` which means the first + character must be a lowercase letter, and all following + characters must be a dash, lowercase letter, or digit, + except the last character, which cannot be a dash. + + This field is a member of `oneof`_ ``_short_name``. + """ + + associations: MutableSequence['FirewallPolicyAssociation'] = proto.RepeatedField( + proto.MESSAGE, + number=508736530, + message='FirewallPolicyAssociation', + ) + creation_timestamp: str = proto.Field( + proto.STRING, + number=30525366, + optional=True, + ) + description: str = proto.Field( + proto.STRING, + number=422937596, + optional=True, + ) + display_name: str = proto.Field( + proto.STRING, + number=4473832, + optional=True, + ) + fingerprint: str = proto.Field( + proto.STRING, + number=234678500, + optional=True, + ) + id: int = proto.Field( + proto.UINT64, + number=3355, + optional=True, + ) + kind: str = proto.Field( + proto.STRING, + number=3292052, + optional=True, + ) + name: str = proto.Field( + proto.STRING, + number=3373707, + optional=True, + ) + parent: str = proto.Field( + proto.STRING, + number=78317738, + optional=True, + ) + region: str = proto.Field( + proto.STRING, + number=138946292, + optional=True, + ) + rule_tuple_count: int = proto.Field( + proto.INT32, + number=388342037, + optional=True, + ) + rules: MutableSequence['FirewallPolicyRule'] = proto.RepeatedField( + proto.MESSAGE, + number=108873975, + message='FirewallPolicyRule', + ) + self_link: str = proto.Field( + proto.STRING, + number=456214797, + optional=True, + ) + self_link_with_id: str = proto.Field( + proto.STRING, + number=44520962, + optional=True, + ) + short_name: str = proto.Field( + proto.STRING, + number=492051566, + optional=True, + ) + + +class FirewallPolicyAssociation(proto.Message): + r""" + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + attachment_target (str): + The target that the firewall policy is + attached to. + + This field is a member of `oneof`_ ``_attachment_target``. + display_name (str): + [Output Only] Deprecated, please use short name instead. The + display name of the firewall policy of the association. + + This field is a member of `oneof`_ ``_display_name``. + firewall_policy_id (str): + [Output Only] The firewall policy ID of the association. + + This field is a member of `oneof`_ ``_firewall_policy_id``. + name (str): + The name for an association. + + This field is a member of `oneof`_ ``_name``. + short_name (str): + [Output Only] The short name of the firewall policy of the + association. + + This field is a member of `oneof`_ ``_short_name``. + """ + + attachment_target: str = proto.Field( + proto.STRING, + number=175773741, + optional=True, + ) + display_name: str = proto.Field( + proto.STRING, + number=4473832, + optional=True, + ) + firewall_policy_id: str = proto.Field( + proto.STRING, + number=357211849, + optional=True, + ) + name: str = proto.Field( + proto.STRING, + number=3373707, + optional=True, + ) + short_name: str = proto.Field( + proto.STRING, + number=492051566, + optional=True, + ) + + +class FirewallPolicyList(proto.Message): + r""" + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + id (str): + [Output Only] Unique identifier for the resource; defined by + the server. + + This field is a member of `oneof`_ ``_id``. + items (MutableSequence[google.cloud.compute_v1.types.FirewallPolicy]): + A list of FirewallPolicy resources. + kind (str): + [Output Only] Type of resource. Always + compute#firewallPolicyList for listsof FirewallPolicies + + This field is a member of `oneof`_ ``_kind``. + next_page_token (str): + [Output Only] This token allows you to get the next page of + results for list requests. If the number of results is + larger than maxResults, use the nextPageToken as a value for + the query parameter pageToken in the next list request. + Subsequent list requests will have their own nextPageToken + to continue paging through the results. + + This field is a member of `oneof`_ ``_next_page_token``. + warning (google.cloud.compute_v1.types.Warning): + [Output Only] Informational warning message. + + This field is a member of `oneof`_ ``_warning``. + """ + + @property + def raw_page(self): + return self + + id: str = proto.Field( + proto.STRING, + number=3355, + optional=True, + ) + items: MutableSequence['FirewallPolicy'] = proto.RepeatedField( + proto.MESSAGE, + number=100526016, + message='FirewallPolicy', + ) + kind: str = proto.Field( + proto.STRING, + number=3292052, + optional=True, + ) + next_page_token: str = proto.Field( + proto.STRING, + number=79797525, + optional=True, + ) + warning: 'Warning' = proto.Field( + proto.MESSAGE, + number=50704284, + optional=True, + message='Warning', + ) + + +class FirewallPolicyRule(proto.Message): + r"""Represents a rule that describes one or more match conditions + along with the action to be taken when traffic matches this + condition (allow or deny). + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + action (str): + The Action to perform when the client connection triggers + the rule. Valid actions are "allow", "deny" and "goto_next". + + This field is a member of `oneof`_ ``_action``. + description (str): + An optional description for this resource. + + This field is a member of `oneof`_ ``_description``. + direction (str): + The direction in which this rule applies. + Check the Direction enum for the list of + possible values. + + This field is a member of `oneof`_ ``_direction``. + disabled (bool): + Denotes whether the firewall policy rule is + disabled. When set to true, the firewall policy + rule is not enforced and traffic behaves as if + it did not exist. If this is unspecified, the + firewall policy rule will be enabled. + + This field is a member of `oneof`_ ``_disabled``. + enable_logging (bool): + Denotes whether to enable logging for a particular rule. If + logging is enabled, logs will be exported to the configured + export destination in Stackdriver. Logs may be exported to + BigQuery or Pub/Sub. Note: you cannot enable logging on + "goto_next" rules. + + This field is a member of `oneof`_ ``_enable_logging``. + kind (str): + [Output only] Type of the resource. Always + compute#firewallPolicyRule for firewall policy rules + + This field is a member of `oneof`_ ``_kind``. + match (google.cloud.compute_v1.types.FirewallPolicyRuleMatcher): + A match condition that incoming traffic is + evaluated against. If it evaluates to true, the + corresponding 'action' is enforced. + + This field is a member of `oneof`_ ``_match``. + priority (int): + An integer indicating the priority of a rule + in the list. The priority must be a positive + value between 0 and 2147483647. Rules are + evaluated from highest to lowest priority where + 0 is the highest priority and 2147483647 is the + lowest prority. + + This field is a member of `oneof`_ ``_priority``. + rule_name (str): + An optional name for the rule. This field is + not a unique identifier and can be updated. + + This field is a member of `oneof`_ ``_rule_name``. + rule_tuple_count (int): + [Output Only] Calculation of the complexity of a single + firewall policy rule. + + This field is a member of `oneof`_ ``_rule_tuple_count``. + target_resources (MutableSequence[str]): + A list of network resource URLs to which this + rule applies. This field allows you to control + which network's VMs get this rule. If this field + is left blank, all VMs within the organization + will receive the rule. + target_secure_tags (MutableSequence[google.cloud.compute_v1.types.FirewallPolicyRuleSecureTag]): + A list of secure tags that controls which instances the + firewall rule applies to. If targetSecureTag are specified, + then the firewall rule applies only to instances in the VPC + network that have one of those EFFECTIVE secure tags, if all + the target_secure_tag are in INEFFECTIVE state, then this + rule will be ignored. targetSecureTag may not be set at the + same time as targetServiceAccounts. If neither + targetServiceAccounts nor targetSecureTag are specified, the + firewall rule applies to all instances on the specified + network. Maximum number of target label tags allowed is 256. + target_service_accounts (MutableSequence[str]): + A list of service accounts indicating the + sets of instances that are applied with this + rule. + """ + class Direction(proto.Enum): + r"""The direction in which this rule applies. + + Values: + UNDEFINED_DIRECTION (0): + A value indicating that the enum field is not + set. + EGRESS (432880501): + No description available. + INGRESS (516931221): + No description available. + """ + UNDEFINED_DIRECTION = 0 + EGRESS = 432880501 + INGRESS = 516931221 + + action: str = proto.Field( + proto.STRING, + number=187661878, + optional=True, + ) + description: str = proto.Field( + proto.STRING, + number=422937596, + optional=True, + ) + direction: str = proto.Field( + proto.STRING, + number=111150975, + optional=True, + ) + disabled: bool = proto.Field( + proto.BOOL, + number=270940796, + optional=True, + ) + enable_logging: bool = proto.Field( + proto.BOOL, + number=295396515, + optional=True, + ) + kind: str = proto.Field( + proto.STRING, + number=3292052, + optional=True, + ) + match: 'FirewallPolicyRuleMatcher' = proto.Field( + proto.MESSAGE, + number=103668165, + optional=True, + message='FirewallPolicyRuleMatcher', + ) + priority: int = proto.Field( + proto.INT32, + number=445151652, + optional=True, + ) + rule_name: str = proto.Field( + proto.STRING, + number=55286254, + optional=True, + ) + rule_tuple_count: int = proto.Field( + proto.INT32, + number=388342037, + optional=True, + ) + target_resources: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=528230647, + ) + target_secure_tags: MutableSequence['FirewallPolicyRuleSecureTag'] = proto.RepeatedField( + proto.MESSAGE, + number=468132403, + message='FirewallPolicyRuleSecureTag', + ) + target_service_accounts: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=457639710, + ) + + +class FirewallPolicyRuleMatcher(proto.Message): + r"""Represents a match condition that incoming traffic is + evaluated against. Exactly one field must be specified. + + Attributes: + dest_address_groups (MutableSequence[str]): + Address groups which should be matched + against the traffic destination. Maximum number + of destination address groups is 10. + dest_fqdns (MutableSequence[str]): + Fully Qualified Domain Name (FQDN) which + should be matched against traffic destination. + Maximum number of destination fqdn allowed is + 100. + dest_ip_ranges (MutableSequence[str]): + CIDR IP address range. Maximum number of + destination CIDR IP ranges allowed is 5000. + dest_region_codes (MutableSequence[str]): + Region codes whose IP addresses will be used + to match for destination of traffic. Should be + specified as 2 letter country code defined as + per ISO 3166 alpha-2 country codes. ex."US" + Maximum number of dest region codes allowed is + 5000. + dest_threat_intelligences (MutableSequence[str]): + Names of Network Threat Intelligence lists. + The IPs in these lists will be matched against + traffic destination. + layer4_configs (MutableSequence[google.cloud.compute_v1.types.FirewallPolicyRuleMatcherLayer4Config]): + Pairs of IP protocols and ports that the rule + should match. + src_address_groups (MutableSequence[str]): + Address groups which should be matched + against the traffic source. Maximum number of + source address groups is 10. + src_fqdns (MutableSequence[str]): + Fully Qualified Domain Name (FQDN) which + should be matched against traffic source. + Maximum number of source fqdn allowed is 100. + src_ip_ranges (MutableSequence[str]): + CIDR IP address range. Maximum number of + source CIDR IP ranges allowed is 5000. + src_region_codes (MutableSequence[str]): + Region codes whose IP addresses will be used + to match for source of traffic. Should be + specified as 2 letter country code defined as + per ISO 3166 alpha-2 country codes. ex."US" + Maximum number of source region codes allowed is + 5000. + src_secure_tags (MutableSequence[google.cloud.compute_v1.types.FirewallPolicyRuleSecureTag]): + List of secure tag values, which should be + matched at the source of the traffic. For + INGRESS rule, if all the srcSecureTag are + INEFFECTIVE, and there is no srcIpRange, this + rule will be ignored. Maximum number of source + tag values allowed is 256. + src_threat_intelligences (MutableSequence[str]): + Names of Network Threat Intelligence lists. + The IPs in these lists will be matched against + traffic source. + """ + + dest_address_groups: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=468760508, + ) + dest_fqdns: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=370712737, + ) + dest_ip_ranges: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=337357713, + ) + dest_region_codes: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=199120280, + ) + dest_threat_intelligences: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=119896492, + ) + layer4_configs: MutableSequence['FirewallPolicyRuleMatcherLayer4Config'] = proto.RepeatedField( + proto.MESSAGE, + number=373534261, + message='FirewallPolicyRuleMatcherLayer4Config', + ) + src_address_groups: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=436423738, + ) + src_fqdns: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=435906147, + ) + src_ip_ranges: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=432128083, + ) + src_region_codes: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=99086742, + ) + src_secure_tags: MutableSequence['FirewallPolicyRuleSecureTag'] = proto.RepeatedField( + proto.MESSAGE, + number=508791302, + message='FirewallPolicyRuleSecureTag', + ) + src_threat_intelligences: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=323631018, + ) + + +class FirewallPolicyRuleMatcherLayer4Config(proto.Message): + r""" + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + ip_protocol (str): + The IP protocol to which this rule applies. + The protocol type is required when creating a + firewall rule. This value can either be one of + the following well known protocol strings (tcp, + udp, icmp, esp, ah, ipip, sctp), or the IP + protocol number. + + This field is a member of `oneof`_ ``_ip_protocol``. + ports (MutableSequence[str]): + An optional list of ports to which this rule applies. This + field is only applicable for UDP or TCP protocol. Each entry + must be either an integer or a range. If not specified, this + rule applies to connections through any port. Example inputs + include: ["22"], ["80","443"], and ["12345-12349"]. + """ + + ip_protocol: str = proto.Field( + proto.STRING, + number=475958960, + optional=True, + ) + ports: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=106854418, + ) + + +class FirewallPolicyRuleSecureTag(proto.Message): + r""" + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + name (str): + Name of the secure tag, created with + TagManager's TagValue API. + + This field is a member of `oneof`_ ``_name``. + state (str): + [Output Only] State of the secure tag, either ``EFFECTIVE`` + or ``INEFFECTIVE``. A secure tag is ``INEFFECTIVE`` when it + is deleted or its network is deleted. Check the State enum + for the list of possible values. + + This field is a member of `oneof`_ ``_state``. + """ + class State(proto.Enum): + r"""[Output Only] State of the secure tag, either ``EFFECTIVE`` or + ``INEFFECTIVE``. A secure tag is ``INEFFECTIVE`` when it is deleted + or its network is deleted. + + Values: + UNDEFINED_STATE (0): + A value indicating that the enum field is not + set. + EFFECTIVE (244201863): + No description available. + INEFFECTIVE (304458242): + No description available. + """ + UNDEFINED_STATE = 0 + EFFECTIVE = 244201863 + INEFFECTIVE = 304458242 + + name: str = proto.Field( + proto.STRING, + number=3373707, + optional=True, + ) + state: str = proto.Field( + proto.STRING, + number=109757585, + optional=True, + ) + + +class FixedOrPercent(proto.Message): + r"""Encapsulates numeric value that can be either absolute or + relative. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + calculated (int): + [Output Only] Absolute value of VM instances calculated + based on the specific mode. - If the value is fixed, then + the calculated value is equal to the fixed value. - If the + value is a percent, then the calculated value is percent/100 + \* targetSize. For example, the calculated value of a 80% of + a managed instance group with 150 instances would be (80/100 + \* 150) = 120 VM instances. If there is a remainder, the + number is rounded. + + This field is a member of `oneof`_ ``_calculated``. + fixed (int): + Specifies a fixed number of VM instances. + This must be a positive integer. + + This field is a member of `oneof`_ ``_fixed``. + percent (int): + Specifies a percentage of instances between 0 + to 100%, inclusive. For example, specify 80 for + 80%. + + This field is a member of `oneof`_ ``_percent``. + """ + + calculated: int = proto.Field( + proto.INT32, + number=472082878, + optional=True, + ) + fixed: int = proto.Field( + proto.INT32, + number=97445748, + optional=True, + ) + percent: int = proto.Field( + proto.INT32, + number=394814533, + optional=True, + ) + + +class ForwardingRule(proto.Message): + r"""Represents a Forwarding Rule resource. Forwarding rule resources in + Google Cloud can be either regional or global in scope: \* + `Global `__ + \* + `Regional `__ + A forwarding rule and its corresponding IP address represent the + frontend configuration of a Google Cloud Platform load balancer. + Forwarding rules can also reference target instances and Cloud VPN + Classic gateways (targetVpnGateway). For more information, read + Forwarding rule concepts and Using protocol forwarding. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + I_p_address (str): + IP address for which this forwarding rule accepts traffic. + When a client sends traffic to this IP address, the + forwarding rule directs the traffic to the referenced target + or backendService. While creating a forwarding rule, + specifying an IPAddress is required under the following + circumstances: - When the target is set to targetGrpcProxy + and validateForProxyless is set to true, the IPAddress + should be set to 0.0.0.0. - When the target is a Private + Service Connect Google APIs bundle, you must specify an + IPAddress. Otherwise, you can optionally specify an IP + address that references an existing static (reserved) IP + address resource. When omitted, Google Cloud assigns an + ephemeral IP address. Use one of the following formats to + specify an IP address while creating a forwarding rule: \* + IP address number, as in ``100.1.2.3`` \* IPv6 address + range, as in ``2600:1234::/96`` \* Full resource URL, as in + https://www.googleapis.com/compute/v1/projects/ + project_id/regions/region/addresses/address-name \* Partial + URL or by name, as in: - + projects/project_id/regions/region/addresses/address-name - + regions/region/addresses/address-name - + global/addresses/address-name - address-name The forwarding + rule's target or backendService, and in most cases, also the + loadBalancingScheme, determine the type of IP address that + you can use. For detailed information, see `IP address + specifications `__. + When reading an IPAddress, the API always returns the IP + address number. + + This field is a member of `oneof`_ ``_I_p_address``. + I_p_protocol (str): + The IP protocol to which this rule applies. For protocol + forwarding, valid options are TCP, UDP, ESP, AH, SCTP, ICMP + and L3_DEFAULT. The valid IP protocols are different for + different load balancing products as described in `Load + balancing + features `__. + Check the IPProtocolEnum enum for the list of possible + values. + + This field is a member of `oneof`_ ``_I_p_protocol``. + all_ports (bool): + This field can only be used: - If IPProtocol is one of TCP, + UDP, or SCTP. - By internal TCP/UDP load balancers, backend + service-based network load balancers, and internal and + external protocol forwarding. Set this field to true to + allow packets addressed to any port or packets lacking + destination port information (for example, UDP fragments + after the first fragment) to be forwarded to the backends + configured with this forwarding rule. The ports, port_range, + and allPorts fields are mutually exclusive. + + This field is a member of `oneof`_ ``_all_ports``. + allow_global_access (bool): + This field is used along with the backend_service field for + internal load balancing or with the target field for + internal TargetInstance. If set to true, clients can access + the Internal TCP/UDP Load Balancer, Internal HTTP(S) and TCP + Proxy Load Balancer from all regions. If false, only allows + access from the local region the load balancer is located + at. Note that for INTERNAL_MANAGED forwarding rules, this + field cannot be changed after the forwarding rule is + created. + + This field is a member of `oneof`_ ``_allow_global_access``. + allow_psc_global_access (bool): + This is used in PSC consumer ForwardingRule + to control whether the PSC endpoint can be + accessed from another region. + + This field is a member of `oneof`_ ``_allow_psc_global_access``. + backend_service (str): + Identifies the backend service to which the + forwarding rule sends traffic. Required for + Internal TCP/UDP Load Balancing and Network Load + Balancing; must be omitted for all other load + balancer types. + + This field is a member of `oneof`_ ``_backend_service``. + base_forwarding_rule (str): + [Output Only] The URL for the corresponding base Forwarding + Rule. By base Forwarding Rule, we mean the Forwarding Rule + that has the same IP address, protocol, and port settings + with the current Forwarding Rule, but without sourceIPRanges + specified. Always empty if the current Forwarding Rule does + not have sourceIPRanges specified. + + This field is a member of `oneof`_ ``_base_forwarding_rule``. + creation_timestamp (str): + [Output Only] Creation timestamp in RFC3339 text format. + + This field is a member of `oneof`_ ``_creation_timestamp``. + description (str): + An optional description of this resource. + Provide this property when you create the + resource. + + This field is a member of `oneof`_ ``_description``. + fingerprint (str): + Fingerprint of this resource. A hash of the + contents stored in this object. This field is + used in optimistic locking. This field will be + ignored when inserting a ForwardingRule. Include + the fingerprint in patch request to ensure that + you do not overwrite changes that were applied + from another concurrent request. To see the + latest fingerprint, make a get() request to + retrieve a ForwardingRule. + + This field is a member of `oneof`_ ``_fingerprint``. + id (int): + [Output Only] The unique identifier for the resource. This + identifier is defined by the server. + + This field is a member of `oneof`_ ``_id``. + ip_version (str): + The IP Version that will be used by this + forwarding rule. Valid options are IPV4 or IPV6. + Check the IpVersion enum for the list of + possible values. + + This field is a member of `oneof`_ ``_ip_version``. + is_mirroring_collector (bool): + Indicates whether or not this load balancer + can be used as a collector for packet mirroring. + To prevent mirroring loops, instances behind + this load balancer will not have their traffic + mirrored even if a PacketMirroring rule applies + to them. This can only be set to true for load + balancers that have their loadBalancingScheme + set to INTERNAL. + + This field is a member of `oneof`_ ``_is_mirroring_collector``. + kind (str): + [Output Only] Type of the resource. Always + compute#forwardingRule for Forwarding Rule resources. + + This field is a member of `oneof`_ ``_kind``. + label_fingerprint (str): + A fingerprint for the labels being applied to + this resource, which is essentially a hash of + the labels set used for optimistic locking. The + fingerprint is initially generated by Compute + Engine and changes after every request to modify + or update labels. You must always provide an + up-to-date fingerprint hash in order to update + or change labels, otherwise the request will + fail with error 412 conditionNotMet. To see the + latest fingerprint, make a get() request to + retrieve a ForwardingRule. + + This field is a member of `oneof`_ ``_label_fingerprint``. + labels (MutableMapping[str, str]): + Labels for this resource. These can only be + added or modified by the setLabels method. Each + label key/value pair must comply with RFC1035. + Label values may be empty. + load_balancing_scheme (str): + Specifies the forwarding rule type. For more + information about forwarding rules, refer to + Forwarding rule concepts. Check the + LoadBalancingScheme enum for the list of + possible values. + + This field is a member of `oneof`_ ``_load_balancing_scheme``. + metadata_filters (MutableSequence[google.cloud.compute_v1.types.MetadataFilter]): + Opaque filter criteria used by load balancer to restrict + routing configuration to a limited set of xDS compliant + clients. In their xDS requests to load balancer, xDS clients + present node metadata. When there is a match, the relevant + configuration is made available to those proxies. Otherwise, + all the resources (e.g. TargetHttpProxy, UrlMap) referenced + by the ForwardingRule are not visible to those proxies. For + each metadataFilter in this list, if its filterMatchCriteria + is set to MATCH_ANY, at least one of the filterLabels must + match the corresponding label provided in the metadata. If + its filterMatchCriteria is set to MATCH_ALL, then all of its + filterLabels must match with corresponding labels provided + in the metadata. If multiple metadataFilters are specified, + all of them need to be satisfied in order to be considered a + match. metadataFilters specified here will be applifed + before those specified in the UrlMap that this + ForwardingRule references. metadataFilters only applies to + Loadbalancers that have their loadBalancingScheme set to + INTERNAL_SELF_MANAGED. + name (str): + Name of the resource; provided by the client when the + resource is created. The name must be 1-63 characters long, + and comply with RFC1035. Specifically, the name must be 1-63 + characters long and match the regular expression + ``[a-z]([-a-z0-9]*[a-z0-9])?`` which means the first + character must be a lowercase letter, and all following + characters must be a dash, lowercase letter, or digit, + except the last character, which cannot be a dash. For + Private Service Connect forwarding rules that forward + traffic to Google APIs, the forwarding rule name must be a + 1-20 characters string with lowercase letters and numbers + and must start with a letter. + + This field is a member of `oneof`_ ``_name``. + network (str): + This field is not used for external load + balancing. For Internal TCP/UDP Load Balancing, + this field identifies the network that the load + balanced IP should belong to for this Forwarding + Rule. If the subnetwork is specified, the + network of the subnetwork will be used. If + neither subnetwork nor this field is specified, + the default network will be used. For Private + Service Connect forwarding rules that forward + traffic to Google APIs, a network must be + provided. + + This field is a member of `oneof`_ ``_network``. + network_tier (str): + This signifies the networking tier used for + configuring this load balancer and can only take + the following values: PREMIUM, STANDARD. For + regional ForwardingRule, the valid values are + PREMIUM and STANDARD. For GlobalForwardingRule, + the valid value is PREMIUM. If this field is not + specified, it is assumed to be PREMIUM. If + IPAddress is specified, this value must be equal + to the networkTier of the Address. Check the + NetworkTier enum for the list of possible + values. + + This field is a member of `oneof`_ ``_network_tier``. + no_automate_dns_zone (bool): + This is used in PSC consumer ForwardingRule + to control whether it should try to + auto-generate a DNS zone or not. Non-PSC + forwarding rules do not use this field. + + This field is a member of `oneof`_ ``_no_automate_dns_zone``. + port_range (str): + This field can only be used: - If IPProtocol is one of TCP, + UDP, or SCTP. - By backend service-based network load + balancers, target pool-based network load balancers, + internal proxy load balancers, external proxy load + balancers, Traffic Director, external protocol forwarding, + and Classic VPN. Some products have restrictions on what + ports can be used. See port specifications for details. Only + packets addressed to ports in the specified range will be + forwarded to the backends configured with this forwarding + rule. The ports, port_range, and allPorts fields are + mutually exclusive. For external forwarding rules, two or + more forwarding rules cannot use the same [IPAddress, + IPProtocol] pair, and cannot have overlapping portRanges. + For internal forwarding rules within the same VPC network, + two or more forwarding rules cannot use the same [IPAddress, + IPProtocol] pair, and cannot have overlapping portRanges. + @pattern: \\d+(?:-\d+)? + + This field is a member of `oneof`_ ``_port_range``. + ports (MutableSequence[str]): + This field can only be used: - If IPProtocol is one of TCP, + UDP, or SCTP. - By internal TCP/UDP load balancers, backend + service-based network load balancers, and internal protocol + forwarding. You can specify a list of up to five ports by + number, separated by commas. The ports can be contiguous or + discontiguous. Only packets addressed to these ports will be + forwarded to the backends configured with this forwarding + rule. For external forwarding rules, two or more forwarding + rules cannot use the same [IPAddress, IPProtocol] pair, and + cannot share any values defined in ports. For internal + forwarding rules within the same VPC network, two or more + forwarding rules cannot use the same [IPAddress, IPProtocol] + pair, and cannot share any values defined in ports. The + ports, port_range, and allPorts fields are mutually + exclusive. @pattern: \\d+(?:-\d+)? + psc_connection_id (int): + [Output Only] The PSC connection id of the PSC Forwarding + Rule. + + This field is a member of `oneof`_ ``_psc_connection_id``. + psc_connection_status (str): + Check the PscConnectionStatus enum for the + list of possible values. + + This field is a member of `oneof`_ ``_psc_connection_status``. + region (str): + [Output Only] URL of the region where the regional + forwarding rule resides. This field is not applicable to + global forwarding rules. You must specify this field as part + of the HTTP request URL. It is not settable as a field in + the request body. + + This field is a member of `oneof`_ ``_region``. + self_link (str): + [Output Only] Server-defined URL for the resource. + + This field is a member of `oneof`_ ``_self_link``. + service_directory_registrations (MutableSequence[google.cloud.compute_v1.types.ForwardingRuleServiceDirectoryRegistration]): + Service Directory resources to register this + forwarding rule with. Currently, only supports a + single Service Directory resource. + service_label (str): + An optional prefix to the service name for this Forwarding + Rule. If specified, the prefix is the first label of the + fully qualified service name. The label must be 1-63 + characters long, and comply with RFC1035. Specifically, the + label must be 1-63 characters long and match the regular + expression ``[a-z]([-a-z0-9]*[a-z0-9])?`` which means the + first character must be a lowercase letter, and all + following characters must be a dash, lowercase letter, or + digit, except the last character, which cannot be a dash. + This field is only used for internal load balancing. + + This field is a member of `oneof`_ ``_service_label``. + service_name (str): + [Output Only] The internal fully qualified service name for + this Forwarding Rule. This field is only used for internal + load balancing. + + This field is a member of `oneof`_ ``_service_name``. + source_ip_ranges (MutableSequence[str]): + If not empty, this Forwarding Rule will only forward the + traffic when the source IP address matches one of the IP + addresses or CIDR ranges set here. Note that a Forwarding + Rule can only have up to 64 source IP ranges, and this field + can only be used with a regional Forwarding Rule whose + scheme is EXTERNAL. Each source_ip_range entry should be + either an IP address (for example, 1.2.3.4) or a CIDR range + (for example, 1.2.3.0/24). + subnetwork (str): + This field identifies the subnetwork that the + load balanced IP should belong to for this + Forwarding Rule, used in internal load balancing + and network load balancing with IPv6. If the + network specified is in auto subnet mode, this + field is optional. However, a subnetwork must be + specified if the network is in custom subnet + mode or when creating external forwarding rule + with IPv6. + + This field is a member of `oneof`_ ``_subnetwork``. + target (str): + The URL of the target resource to receive the matched + traffic. For regional forwarding rules, this target must be + in the same region as the forwarding rule. For global + forwarding rules, this target must be a global load + balancing resource. The forwarded traffic must be of a type + appropriate to the target object. - For load balancers, see + the "Target" column in `Port + specifications `__. + - For Private Service Connect forwarding rules that forward + traffic to Google APIs, provide the name of a supported + Google API bundle: - vpc-sc - APIs that support VPC Service + Controls. - all-apis - All supported Google APIs. - For + Private Service Connect forwarding rules that forward + traffic to managed services, the target must be a service + attachment. + + This field is a member of `oneof`_ ``_target``. + """ + class IPProtocolEnum(proto.Enum): + r"""The IP protocol to which this rule applies. For protocol forwarding, + valid options are TCP, UDP, ESP, AH, SCTP, ICMP and L3_DEFAULT. The + valid IP protocols are different for different load balancing + products as described in `Load balancing + features `__. + + Values: + UNDEFINED_I_P_PROTOCOL_ENUM (0): + A value indicating that the enum field is not + set. + AH (2087): + No description available. + ESP (68962): + No description available. + ICMP (2241597): + No description available. + L3_DEFAULT (48151369): + No description available. + SCTP (2539724): + No description available. + TCP (82881): + No description available. + UDP (83873): + No description available. + """ + UNDEFINED_I_P_PROTOCOL_ENUM = 0 + AH = 2087 + ESP = 68962 + ICMP = 2241597 + L3_DEFAULT = 48151369 + SCTP = 2539724 + TCP = 82881 + UDP = 83873 + + class IpVersion(proto.Enum): + r"""The IP Version that will be used by this forwarding rule. + Valid options are IPV4 or IPV6. + + Values: + UNDEFINED_IP_VERSION (0): + A value indicating that the enum field is not + set. + IPV4 (2254341): + No description available. + IPV6 (2254343): + No description available. + UNSPECIFIED_VERSION (21850000): + No description available. + """ + UNDEFINED_IP_VERSION = 0 + IPV4 = 2254341 + IPV6 = 2254343 + UNSPECIFIED_VERSION = 21850000 + + class LoadBalancingScheme(proto.Enum): + r"""Specifies the forwarding rule type. For more information + about forwarding rules, refer to Forwarding rule concepts. + + Values: + UNDEFINED_LOAD_BALANCING_SCHEME (0): + A value indicating that the enum field is not + set. + EXTERNAL (35607499): + No description available. + EXTERNAL_MANAGED (512006923): + No description available. + INTERNAL (279295677): + No description available. + INTERNAL_MANAGED (37350397): + No description available. + INTERNAL_SELF_MANAGED (236211150): + No description available. + INVALID (530283991): + No description available. + """ + UNDEFINED_LOAD_BALANCING_SCHEME = 0 + EXTERNAL = 35607499 + EXTERNAL_MANAGED = 512006923 + INTERNAL = 279295677 + INTERNAL_MANAGED = 37350397 + INTERNAL_SELF_MANAGED = 236211150 + INVALID = 530283991 + + class NetworkTier(proto.Enum): + r"""This signifies the networking tier used for configuring this + load balancer and can only take the following values: PREMIUM, + STANDARD. For regional ForwardingRule, the valid values are + PREMIUM and STANDARD. For GlobalForwardingRule, the valid value + is PREMIUM. If this field is not specified, it is assumed to be + PREMIUM. If IPAddress is specified, this value must be equal to + the networkTier of the Address. + + Values: + UNDEFINED_NETWORK_TIER (0): + A value indicating that the enum field is not + set. + FIXED_STANDARD (310464328): + Public internet quality with fixed bandwidth. + PREMIUM (399530551): + High quality, Google-grade network tier, + support for all networking products. + STANDARD (484642493): + Public internet quality, only limited support + for other networking products. + STANDARD_OVERRIDES_FIXED_STANDARD (465847234): + (Output only) Temporary tier for FIXED_STANDARD when fixed + standard tier is expired or not configured. + """ + UNDEFINED_NETWORK_TIER = 0 + FIXED_STANDARD = 310464328 + PREMIUM = 399530551 + STANDARD = 484642493 + STANDARD_OVERRIDES_FIXED_STANDARD = 465847234 + + class PscConnectionStatus(proto.Enum): + r""" + + Values: + UNDEFINED_PSC_CONNECTION_STATUS (0): + A value indicating that the enum field is not + set. + ACCEPTED (246714279): + The connection has been accepted by the + producer. + CLOSED (380163436): + The connection has been closed by the + producer and will not serve traffic going + forward. + NEEDS_ATTENTION (344491452): + The connection has been accepted by the + producer, but the producer needs to take further + action before the forwarding rule can serve + traffic. + PENDING (35394935): + The connection is pending acceptance by the + producer. + REJECTED (174130302): + The connection has been rejected by the + producer. + STATUS_UNSPECIFIED (42133066): + No description available. + """ + UNDEFINED_PSC_CONNECTION_STATUS = 0 + ACCEPTED = 246714279 + CLOSED = 380163436 + NEEDS_ATTENTION = 344491452 + PENDING = 35394935 + REJECTED = 174130302 + STATUS_UNSPECIFIED = 42133066 + + I_p_address: str = proto.Field( + proto.STRING, + number=42976943, + optional=True, + ) + I_p_protocol: str = proto.Field( + proto.STRING, + number=488094525, + optional=True, + ) + all_ports: bool = proto.Field( + proto.BOOL, + number=445175796, + optional=True, + ) + allow_global_access: bool = proto.Field( + proto.BOOL, + number=499409674, + optional=True, + ) + allow_psc_global_access: bool = proto.Field( + proto.BOOL, + number=263471819, + optional=True, + ) + backend_service: str = proto.Field( + proto.STRING, + number=306946058, + optional=True, + ) + base_forwarding_rule: str = proto.Field( + proto.STRING, + number=524873104, + optional=True, + ) + creation_timestamp: str = proto.Field( + proto.STRING, + number=30525366, + optional=True, + ) + description: str = proto.Field( + proto.STRING, + number=422937596, + optional=True, + ) + fingerprint: str = proto.Field( + proto.STRING, + number=234678500, + optional=True, + ) + id: int = proto.Field( + proto.UINT64, + number=3355, + optional=True, + ) + ip_version: str = proto.Field( + proto.STRING, + number=294959552, + optional=True, + ) + is_mirroring_collector: bool = proto.Field( + proto.BOOL, + number=119255164, + optional=True, + ) + kind: str = proto.Field( + proto.STRING, + number=3292052, + optional=True, + ) + label_fingerprint: str = proto.Field( + proto.STRING, + number=178124825, + optional=True, + ) + labels: MutableMapping[str, str] = proto.MapField( + proto.STRING, + proto.STRING, + number=500195327, + ) + load_balancing_scheme: str = proto.Field( + proto.STRING, + number=363890244, + optional=True, + ) + metadata_filters: MutableSequence['MetadataFilter'] = proto.RepeatedField( + proto.MESSAGE, + number=464725739, + message='MetadataFilter', + ) + name: str = proto.Field( + proto.STRING, + number=3373707, + optional=True, + ) + network: str = proto.Field( + proto.STRING, + number=232872494, + optional=True, + ) + network_tier: str = proto.Field( + proto.STRING, + number=517397843, + optional=True, + ) + no_automate_dns_zone: bool = proto.Field( + proto.BOOL, + number=64546991, + optional=True, + ) + port_range: str = proto.Field( + proto.STRING, + number=217518079, + optional=True, + ) + ports: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=106854418, + ) + psc_connection_id: int = proto.Field( + proto.UINT64, + number=292082397, + optional=True, + ) + psc_connection_status: str = proto.Field( + proto.STRING, + number=184149172, + optional=True, + ) + region: str = proto.Field( + proto.STRING, + number=138946292, + optional=True, + ) + self_link: str = proto.Field( + proto.STRING, + number=456214797, + optional=True, + ) + service_directory_registrations: MutableSequence['ForwardingRuleServiceDirectoryRegistration'] = proto.RepeatedField( + proto.MESSAGE, + number=223549694, + message='ForwardingRuleServiceDirectoryRegistration', + ) + service_label: str = proto.Field( + proto.STRING, + number=417008874, + optional=True, + ) + service_name: str = proto.Field( + proto.STRING, + number=359880149, + optional=True, + ) + source_ip_ranges: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=111563210, + ) + subnetwork: str = proto.Field( + proto.STRING, + number=307827694, + optional=True, + ) + target: str = proto.Field( + proto.STRING, + number=192835985, + optional=True, + ) + + +class ForwardingRuleAggregatedList(proto.Message): + r""" + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + id (str): + [Output Only] Unique identifier for the resource; defined by + the server. + + This field is a member of `oneof`_ ``_id``. + items (MutableMapping[str, google.cloud.compute_v1.types.ForwardingRulesScopedList]): + A list of ForwardingRulesScopedList + resources. + kind (str): + [Output Only] Type of resource. Always + compute#forwardingRuleAggregatedList for lists of forwarding + rules. + + This field is a member of `oneof`_ ``_kind``. + next_page_token (str): + [Output Only] This token allows you to get the next page of + results for list requests. If the number of results is + larger than maxResults, use the nextPageToken as a value for + the query parameter pageToken in the next list request. + Subsequent list requests will have their own nextPageToken + to continue paging through the results. + + This field is a member of `oneof`_ ``_next_page_token``. + self_link (str): + [Output Only] Server-defined URL for this resource. + + This field is a member of `oneof`_ ``_self_link``. + unreachables (MutableSequence[str]): + [Output Only] Unreachable resources. + warning (google.cloud.compute_v1.types.Warning): + [Output Only] Informational warning message. + + This field is a member of `oneof`_ ``_warning``. + """ + + @property + def raw_page(self): + return self + + id: str = proto.Field( + proto.STRING, + number=3355, + optional=True, + ) + items: MutableMapping[str, 'ForwardingRulesScopedList'] = proto.MapField( + proto.STRING, + proto.MESSAGE, + number=100526016, + message='ForwardingRulesScopedList', + ) + kind: str = proto.Field( + proto.STRING, + number=3292052, + optional=True, + ) + next_page_token: str = proto.Field( + proto.STRING, + number=79797525, + optional=True, + ) + self_link: str = proto.Field( + proto.STRING, + number=456214797, + optional=True, + ) + unreachables: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=243372063, + ) + warning: 'Warning' = proto.Field( + proto.MESSAGE, + number=50704284, + optional=True, + message='Warning', + ) + + +class ForwardingRuleList(proto.Message): + r"""Contains a list of ForwardingRule resources. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + id (str): + [Output Only] Unique identifier for the resource; defined by + the server. + + This field is a member of `oneof`_ ``_id``. + items (MutableSequence[google.cloud.compute_v1.types.ForwardingRule]): + A list of ForwardingRule resources. + kind (str): + Type of resource. + + This field is a member of `oneof`_ ``_kind``. + next_page_token (str): + [Output Only] This token allows you to get the next page of + results for list requests. If the number of results is + larger than maxResults, use the nextPageToken as a value for + the query parameter pageToken in the next list request. + Subsequent list requests will have their own nextPageToken + to continue paging through the results. + + This field is a member of `oneof`_ ``_next_page_token``. + self_link (str): + [Output Only] Server-defined URL for this resource. + + This field is a member of `oneof`_ ``_self_link``. + warning (google.cloud.compute_v1.types.Warning): + [Output Only] Informational warning message. + + This field is a member of `oneof`_ ``_warning``. + """ + + @property + def raw_page(self): + return self + + id: str = proto.Field( + proto.STRING, + number=3355, + optional=True, + ) + items: MutableSequence['ForwardingRule'] = proto.RepeatedField( + proto.MESSAGE, + number=100526016, + message='ForwardingRule', + ) + kind: str = proto.Field( + proto.STRING, + number=3292052, + optional=True, + ) + next_page_token: str = proto.Field( + proto.STRING, + number=79797525, + optional=True, + ) + self_link: str = proto.Field( + proto.STRING, + number=456214797, + optional=True, + ) + warning: 'Warning' = proto.Field( + proto.MESSAGE, + number=50704284, + optional=True, + message='Warning', + ) + + +class ForwardingRuleReference(proto.Message): + r""" + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + forwarding_rule (str): + + This field is a member of `oneof`_ ``_forwarding_rule``. + """ + + forwarding_rule: str = proto.Field( + proto.STRING, + number=269964030, + optional=True, + ) + + +class ForwardingRuleServiceDirectoryRegistration(proto.Message): + r"""Describes the auto-registration of the Forwarding Rule to + Service Directory. The region and project of the Service + Directory resource generated from this registration will be the + same as this Forwarding Rule. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + namespace (str): + Service Directory namespace to register the + forwarding rule under. + + This field is a member of `oneof`_ ``_namespace``. + service (str): + Service Directory service to register the + forwarding rule under. + + This field is a member of `oneof`_ ``_service``. + service_directory_region (str): + [Optional] Service Directory region to register this global + forwarding rule under. Default to "us-central1". Only used + for PSC for Google APIs. All PSC for Google APIs Forwarding + Rules on the same network should use the same Service + Directory region. + + This field is a member of `oneof`_ ``_service_directory_region``. + """ + + namespace: str = proto.Field( + proto.STRING, + number=178476379, + optional=True, + ) + service: str = proto.Field( + proto.STRING, + number=373540533, + optional=True, + ) + service_directory_region: str = proto.Field( + proto.STRING, + number=74030416, + optional=True, + ) + + +class ForwardingRulesScopedList(proto.Message): + r""" + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + forwarding_rules (MutableSequence[google.cloud.compute_v1.types.ForwardingRule]): + A list of forwarding rules contained in this + scope. + warning (google.cloud.compute_v1.types.Warning): + Informational warning which replaces the list + of forwarding rules when the list is empty. + + This field is a member of `oneof`_ ``_warning``. + """ + + forwarding_rules: MutableSequence['ForwardingRule'] = proto.RepeatedField( + proto.MESSAGE, + number=315821365, + message='ForwardingRule', + ) + warning: 'Warning' = proto.Field( + proto.MESSAGE, + number=50704284, + optional=True, + message='Warning', + ) + + +class GRPCHealthCheck(proto.Message): + r""" + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + grpc_service_name (str): + The gRPC service name for the health check. This field is + optional. The value of grpc_service_name has the following + meanings by convention: - Empty service_name means the + overall status of all services at the backend. - Non-empty + service_name means the health of that gRPC service, as + defined by the owner of the service. The grpc_service_name + can only be ASCII. + + This field is a member of `oneof`_ ``_grpc_service_name``. + port (int): + The TCP port number to which the health check + prober sends packets. Valid values are 1 through + 65535. + + This field is a member of `oneof`_ ``_port``. + port_name (str): + Not supported. + + This field is a member of `oneof`_ ``_port_name``. + port_specification (str): + Specifies how a port is selected for health checking. Can be + one of the following values: USE_FIXED_PORT: Specifies a + port number explicitly using the port field in the health + check. Supported by backend services for pass-through load + balancers and backend services for proxy load balancers. Not + supported by target pools. The health check supports all + backends supported by the backend service provided the + backend can be health checked. For example, GCE_VM_IP + network endpoint groups, GCE_VM_IP_PORT network endpoint + groups, and instance group backends. USE_NAMED_PORT: Not + supported. USE_SERVING_PORT: Provides an indirect method of + specifying the health check port by referring to the backend + service. Only supported by backend services for proxy load + balancers. Not supported by target pools. Not supported by + backend services for pass-through load balancers. Supports + all backends that can be health checked; for example, + GCE_VM_IP_PORT network endpoint groups and instance group + backends. For GCE_VM_IP_PORT network endpoint group + backends, the health check uses the port number specified + for each endpoint in the network endpoint group. For + instance group backends, the health check uses the port + number determined by looking up the backend service's named + port in the instance group's list of named ports. Check the + PortSpecification enum for the list of possible values. + + This field is a member of `oneof`_ ``_port_specification``. + """ + class PortSpecification(proto.Enum): + r"""Specifies how a port is selected for health checking. Can be one of + the following values: USE_FIXED_PORT: Specifies a port number + explicitly using the port field in the health check. Supported by + backend services for pass-through load balancers and backend + services for proxy load balancers. Not supported by target pools. + The health check supports all backends supported by the backend + service provided the backend can be health checked. For example, + GCE_VM_IP network endpoint groups, GCE_VM_IP_PORT network endpoint + groups, and instance group backends. USE_NAMED_PORT: Not supported. + USE_SERVING_PORT: Provides an indirect method of specifying the + health check port by referring to the backend service. Only + supported by backend services for proxy load balancers. Not + supported by target pools. Not supported by backend services for + pass-through load balancers. Supports all backends that can be + health checked; for example, GCE_VM_IP_PORT network endpoint groups + and instance group backends. For GCE_VM_IP_PORT network endpoint + group backends, the health check uses the port number specified for + each endpoint in the network endpoint group. For instance group + backends, the health check uses the port number determined by + looking up the backend service's named port in the instance group's + list of named ports. + + Values: + UNDEFINED_PORT_SPECIFICATION (0): + A value indicating that the enum field is not + set. + USE_FIXED_PORT (190235748): + The port number in the health check's port is + used for health checking. Applies to network + endpoint group and instance group backends. + USE_NAMED_PORT (349300671): + Not supported. + USE_SERVING_PORT (362637516): + For network endpoint group backends, the + health check uses the port number specified on + each endpoint in the network endpoint group. For + instance group backends, the health check uses + the port number specified for the backend + service's named port defined in the instance + group's named ports. + """ + UNDEFINED_PORT_SPECIFICATION = 0 + USE_FIXED_PORT = 190235748 + USE_NAMED_PORT = 349300671 + USE_SERVING_PORT = 362637516 + + grpc_service_name: str = proto.Field( + proto.STRING, + number=136533078, + optional=True, + ) + port: int = proto.Field( + proto.INT32, + number=3446913, + optional=True, + ) + port_name: str = proto.Field( + proto.STRING, + number=41534345, + optional=True, + ) + port_specification: str = proto.Field( + proto.STRING, + number=51590597, + optional=True, + ) + + +class GetAcceleratorTypeRequest(proto.Message): + r"""A request message for AcceleratorTypes.Get. See the method + description for details. + + Attributes: + accelerator_type (str): + Name of the accelerator type to return. + project (str): + Project ID for this request. + zone (str): + The name of the zone for this request. + """ + + accelerator_type: str = proto.Field( + proto.STRING, + number=138031246, + ) + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + zone: str = proto.Field( + proto.STRING, + number=3744684, + ) + + +class GetAddressRequest(proto.Message): + r"""A request message for Addresses.Get. See the method + description for details. + + Attributes: + address (str): + Name of the address resource to return. + project (str): + Project ID for this request. + region (str): + Name of the region for this request. + """ + + address: str = proto.Field( + proto.STRING, + number=462920692, + ) + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + region: str = proto.Field( + proto.STRING, + number=138946292, + ) + + +class GetAssociationFirewallPolicyRequest(proto.Message): + r"""A request message for FirewallPolicies.GetAssociation. See + the method description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + firewall_policy (str): + Name of the firewall policy to which the + queried rule belongs. + name (str): + The name of the association to get from the + firewall policy. + + This field is a member of `oneof`_ ``_name``. + """ + + firewall_policy: str = proto.Field( + proto.STRING, + number=498173265, + ) + name: str = proto.Field( + proto.STRING, + number=3373707, + optional=True, + ) + + +class GetAssociationNetworkFirewallPolicyRequest(proto.Message): + r"""A request message for NetworkFirewallPolicies.GetAssociation. + See the method description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + firewall_policy (str): + Name of the firewall policy to which the + queried association belongs. + name (str): + The name of the association to get from the + firewall policy. + + This field is a member of `oneof`_ ``_name``. + project (str): + Project ID for this request. + """ + + firewall_policy: str = proto.Field( + proto.STRING, + number=498173265, + ) + name: str = proto.Field( + proto.STRING, + number=3373707, + optional=True, + ) + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + + +class GetAssociationRegionNetworkFirewallPolicyRequest(proto.Message): + r"""A request message for + RegionNetworkFirewallPolicies.GetAssociation. See the method + description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + firewall_policy (str): + Name of the firewall policy to which the + queried association belongs. + name (str): + The name of the association to get from the + firewall policy. + + This field is a member of `oneof`_ ``_name``. + project (str): + Project ID for this request. + region (str): + Name of the region scoping this request. + """ + + firewall_policy: str = proto.Field( + proto.STRING, + number=498173265, + ) + name: str = proto.Field( + proto.STRING, + number=3373707, + optional=True, + ) + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + region: str = proto.Field( + proto.STRING, + number=138946292, + ) + + +class GetAutoscalerRequest(proto.Message): + r"""A request message for Autoscalers.Get. See the method + description for details. + + Attributes: + autoscaler (str): + Name of the autoscaler to return. + project (str): + Project ID for this request. + zone (str): + Name of the zone for this request. + """ + + autoscaler: str = proto.Field( + proto.STRING, + number=517258967, + ) + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + zone: str = proto.Field( + proto.STRING, + number=3744684, + ) + + +class GetBackendBucketRequest(proto.Message): + r"""A request message for BackendBuckets.Get. See the method + description for details. + + Attributes: + backend_bucket (str): + Name of the BackendBucket resource to return. + project (str): + Project ID for this request. + """ + + backend_bucket: str = proto.Field( + proto.STRING, + number=91714037, + ) + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + + +class GetBackendServiceRequest(proto.Message): + r"""A request message for BackendServices.Get. See the method + description for details. + + Attributes: + backend_service (str): + Name of the BackendService resource to + return. + project (str): + Project ID for this request. + """ + + backend_service: str = proto.Field( + proto.STRING, + number=306946058, + ) + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + + +class GetDiagnosticsInterconnectRequest(proto.Message): + r"""A request message for Interconnects.GetDiagnostics. See the + method description for details. + + Attributes: + interconnect (str): + Name of the interconnect resource to query. + project (str): + Project ID for this request. + """ + + interconnect: str = proto.Field( + proto.STRING, + number=224601230, + ) + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + + +class GetDiskRequest(proto.Message): + r"""A request message for Disks.Get. See the method description + for details. + + Attributes: + disk (str): + Name of the persistent disk to return. + project (str): + Project ID for this request. + zone (str): + The name of the zone for this request. + """ + + disk: str = proto.Field( + proto.STRING, + number=3083677, + ) + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + zone: str = proto.Field( + proto.STRING, + number=3744684, + ) + + +class GetDiskTypeRequest(proto.Message): + r"""A request message for DiskTypes.Get. See the method + description for details. + + Attributes: + disk_type (str): + Name of the disk type to return. + project (str): + Project ID for this request. + zone (str): + The name of the zone for this request. + """ + + disk_type: str = proto.Field( + proto.STRING, + number=93009052, + ) + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + zone: str = proto.Field( + proto.STRING, + number=3744684, + ) + + +class GetEffectiveFirewallsInstanceRequest(proto.Message): + r"""A request message for Instances.GetEffectiveFirewalls. See + the method description for details. + + Attributes: + instance (str): + Name of the instance scoping this request. + network_interface (str): + The name of the network interface to get the + effective firewalls. + project (str): + Project ID for this request. + zone (str): + The name of the zone for this request. + """ + + instance: str = proto.Field( + proto.STRING, + number=18257045, + ) + network_interface: str = proto.Field( + proto.STRING, + number=365387880, + ) + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + zone: str = proto.Field( + proto.STRING, + number=3744684, + ) + + +class GetEffectiveFirewallsNetworkRequest(proto.Message): + r"""A request message for Networks.GetEffectiveFirewalls. See the + method description for details. + + Attributes: + network (str): + Name of the network for this request. + project (str): + Project ID for this request. + """ + + network: str = proto.Field( + proto.STRING, + number=232872494, + ) + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + + +class GetEffectiveFirewallsRegionNetworkFirewallPolicyRequest(proto.Message): + r"""A request message for + RegionNetworkFirewallPolicies.GetEffectiveFirewalls. See the + method description for details. + + Attributes: + network (str): + Network reference + project (str): + Project ID for this request. + region (str): + Name of the region scoping this request. + """ + + network: str = proto.Field( + proto.STRING, + number=232872494, + ) + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + region: str = proto.Field( + proto.STRING, + number=138946292, + ) + + +class GetExternalVpnGatewayRequest(proto.Message): + r"""A request message for ExternalVpnGateways.Get. See the method + description for details. + + Attributes: + external_vpn_gateway (str): + Name of the externalVpnGateway to return. + project (str): + Project ID for this request. + """ + + external_vpn_gateway: str = proto.Field( + proto.STRING, + number=109898629, + ) + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + + +class GetFirewallPolicyRequest(proto.Message): + r"""A request message for FirewallPolicies.Get. See the method + description for details. + + Attributes: + firewall_policy (str): + Name of the firewall policy to get. + """ + + firewall_policy: str = proto.Field( + proto.STRING, + number=498173265, + ) + + +class GetFirewallRequest(proto.Message): + r"""A request message for Firewalls.Get. See the method + description for details. + + Attributes: + firewall (str): + Name of the firewall rule to return. + project (str): + Project ID for this request. + """ + + firewall: str = proto.Field( + proto.STRING, + number=511016192, + ) + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + + +class GetForwardingRuleRequest(proto.Message): + r"""A request message for ForwardingRules.Get. See the method + description for details. + + Attributes: + forwarding_rule (str): + Name of the ForwardingRule resource to + return. + project (str): + Project ID for this request. + region (str): + Name of the region scoping this request. + """ + + forwarding_rule: str = proto.Field( + proto.STRING, + number=269964030, + ) + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + region: str = proto.Field( + proto.STRING, + number=138946292, + ) + + +class GetFromFamilyImageRequest(proto.Message): + r"""A request message for Images.GetFromFamily. See the method + description for details. + + Attributes: + family (str): + Name of the image family to search for. + project (str): + The image project that the image belongs to. + For example, to get a CentOS image, specify + centos-cloud as the image project. + """ + + family: str = proto.Field( + proto.STRING, + number=328751972, + ) + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + + +class GetGlobalAddressRequest(proto.Message): + r"""A request message for GlobalAddresses.Get. See the method + description for details. + + Attributes: + address (str): + Name of the address resource to return. + project (str): + Project ID for this request. + """ + + address: str = proto.Field( + proto.STRING, + number=462920692, + ) + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + + +class GetGlobalForwardingRuleRequest(proto.Message): + r"""A request message for GlobalForwardingRules.Get. See the + method description for details. + + Attributes: + forwarding_rule (str): + Name of the ForwardingRule resource to + return. + project (str): + Project ID for this request. + """ + + forwarding_rule: str = proto.Field( + proto.STRING, + number=269964030, + ) + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + + +class GetGlobalNetworkEndpointGroupRequest(proto.Message): + r"""A request message for GlobalNetworkEndpointGroups.Get. See + the method description for details. + + Attributes: + network_endpoint_group (str): + The name of the network endpoint group. It + should comply with RFC1035. + project (str): + Project ID for this request. + """ + + network_endpoint_group: str = proto.Field( + proto.STRING, + number=433907078, + ) + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + + +class GetGlobalOperationRequest(proto.Message): + r"""A request message for GlobalOperations.Get. See the method + description for details. + + Attributes: + operation (str): + Name of the Operations resource to return. + project (str): + Project ID for this request. + """ + + operation: str = proto.Field( + proto.STRING, + number=52090215, + ) + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + + +class GetGlobalOrganizationOperationRequest(proto.Message): + r"""A request message for GlobalOrganizationOperations.Get. See + the method description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + operation (str): + Name of the Operations resource to return. + parent_id (str): + Parent ID for this request. + + This field is a member of `oneof`_ ``_parent_id``. + """ + + operation: str = proto.Field( + proto.STRING, + number=52090215, + ) + parent_id: str = proto.Field( + proto.STRING, + number=459714768, + optional=True, + ) + + +class GetGlobalPublicDelegatedPrefixeRequest(proto.Message): + r"""A request message for GlobalPublicDelegatedPrefixes.Get. See + the method description for details. + + Attributes: + project (str): + Project ID for this request. + public_delegated_prefix (str): + Name of the PublicDelegatedPrefix resource to + return. + """ + + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + public_delegated_prefix: str = proto.Field( + proto.STRING, + number=204238440, + ) + + +class GetGuestAttributesInstanceRequest(proto.Message): + r"""A request message for Instances.GetGuestAttributes. See the + method description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + instance (str): + Name of the instance scoping this request. + project (str): + Project ID for this request. + query_path (str): + Specifies the guest attributes path to be + queried. + + This field is a member of `oneof`_ ``_query_path``. + variable_key (str): + Specifies the key for the guest attributes + entry. + + This field is a member of `oneof`_ ``_variable_key``. + zone (str): + The name of the zone for this request. + """ + + instance: str = proto.Field( + proto.STRING, + number=18257045, + ) + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + query_path: str = proto.Field( + proto.STRING, + number=368591164, + optional=True, + ) + variable_key: str = proto.Field( + proto.STRING, + number=164364828, + optional=True, + ) + zone: str = proto.Field( + proto.STRING, + number=3744684, + ) + + +class GetHealthBackendServiceRequest(proto.Message): + r"""A request message for BackendServices.GetHealth. See the + method description for details. + + Attributes: + backend_service (str): + Name of the BackendService resource to which + the queried instance belongs. + project (str): + + resource_group_reference_resource (google.cloud.compute_v1.types.ResourceGroupReference): + The body resource for this request + """ + + backend_service: str = proto.Field( + proto.STRING, + number=306946058, + ) + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + resource_group_reference_resource: 'ResourceGroupReference' = proto.Field( + proto.MESSAGE, + number=112951123, + message='ResourceGroupReference', + ) + + +class GetHealthCheckRequest(proto.Message): + r"""A request message for HealthChecks.Get. See the method + description for details. + + Attributes: + health_check (str): + Name of the HealthCheck resource to return. + project (str): + Project ID for this request. + """ + + health_check: str = proto.Field( + proto.STRING, + number=308876645, + ) + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + + +class GetHealthRegionBackendServiceRequest(proto.Message): + r"""A request message for RegionBackendServices.GetHealth. See + the method description for details. + + Attributes: + backend_service (str): + Name of the BackendService resource for which + to get health. + project (str): + + region (str): + Name of the region scoping this request. + resource_group_reference_resource (google.cloud.compute_v1.types.ResourceGroupReference): + The body resource for this request + """ + + backend_service: str = proto.Field( + proto.STRING, + number=306946058, + ) + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + region: str = proto.Field( + proto.STRING, + number=138946292, + ) + resource_group_reference_resource: 'ResourceGroupReference' = proto.Field( + proto.MESSAGE, + number=112951123, + message='ResourceGroupReference', + ) + + +class GetHealthTargetPoolRequest(proto.Message): + r"""A request message for TargetPools.GetHealth. See the method + description for details. + + Attributes: + instance_reference_resource (google.cloud.compute_v1.types.InstanceReference): + The body resource for this request + project (str): + Project ID for this request. + region (str): + Name of the region scoping this request. + target_pool (str): + Name of the TargetPool resource to which the + queried instance belongs. + """ + + instance_reference_resource: 'InstanceReference' = proto.Field( + proto.MESSAGE, + number=292926060, + message='InstanceReference', + ) + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + region: str = proto.Field( + proto.STRING, + number=138946292, + ) + target_pool: str = proto.Field( + proto.STRING, + number=62796298, + ) + + +class GetIamPolicyBackendServiceRequest(proto.Message): + r"""A request message for BackendServices.GetIamPolicy. See the + method description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + options_requested_policy_version (int): + Requested IAM Policy version. + + This field is a member of `oneof`_ ``_options_requested_policy_version``. + project (str): + Project ID for this request. + resource (str): + Name or id of the resource for this request. + """ + + options_requested_policy_version: int = proto.Field( + proto.INT32, + number=499220029, + optional=True, + ) + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + resource: str = proto.Field( + proto.STRING, + number=195806222, + ) + + +class GetIamPolicyDiskRequest(proto.Message): + r"""A request message for Disks.GetIamPolicy. See the method + description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + options_requested_policy_version (int): + Requested IAM Policy version. + + This field is a member of `oneof`_ ``_options_requested_policy_version``. + project (str): + Project ID for this request. + resource (str): + Name or id of the resource for this request. + zone (str): + The name of the zone for this request. + """ + + options_requested_policy_version: int = proto.Field( + proto.INT32, + number=499220029, + optional=True, + ) + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + resource: str = proto.Field( + proto.STRING, + number=195806222, + ) + zone: str = proto.Field( + proto.STRING, + number=3744684, + ) + + +class GetIamPolicyFirewallPolicyRequest(proto.Message): + r"""A request message for FirewallPolicies.GetIamPolicy. See the + method description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + options_requested_policy_version (int): + Requested IAM Policy version. + + This field is a member of `oneof`_ ``_options_requested_policy_version``. + resource (str): + Name or id of the resource for this request. + """ + + options_requested_policy_version: int = proto.Field( + proto.INT32, + number=499220029, + optional=True, + ) + resource: str = proto.Field( + proto.STRING, + number=195806222, + ) + + +class GetIamPolicyImageRequest(proto.Message): + r"""A request message for Images.GetIamPolicy. See the method + description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + options_requested_policy_version (int): + Requested IAM Policy version. + + This field is a member of `oneof`_ ``_options_requested_policy_version``. + project (str): + Project ID for this request. + resource (str): + Name or id of the resource for this request. + """ + + options_requested_policy_version: int = proto.Field( + proto.INT32, + number=499220029, + optional=True, + ) + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + resource: str = proto.Field( + proto.STRING, + number=195806222, + ) + + +class GetIamPolicyInstanceRequest(proto.Message): + r"""A request message for Instances.GetIamPolicy. See the method + description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + options_requested_policy_version (int): + Requested IAM Policy version. + + This field is a member of `oneof`_ ``_options_requested_policy_version``. + project (str): + Project ID for this request. + resource (str): + Name or id of the resource for this request. + zone (str): + The name of the zone for this request. + """ + + options_requested_policy_version: int = proto.Field( + proto.INT32, + number=499220029, + optional=True, + ) + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + resource: str = proto.Field( + proto.STRING, + number=195806222, + ) + zone: str = proto.Field( + proto.STRING, + number=3744684, + ) + + +class GetIamPolicyInstanceTemplateRequest(proto.Message): + r"""A request message for InstanceTemplates.GetIamPolicy. See the + method description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + options_requested_policy_version (int): + Requested IAM Policy version. + + This field is a member of `oneof`_ ``_options_requested_policy_version``. + project (str): + Project ID for this request. + resource (str): + Name or id of the resource for this request. + """ + + options_requested_policy_version: int = proto.Field( + proto.INT32, + number=499220029, + optional=True, + ) + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + resource: str = proto.Field( + proto.STRING, + number=195806222, + ) + + +class GetIamPolicyLicenseRequest(proto.Message): + r"""A request message for Licenses.GetIamPolicy. See the method + description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + options_requested_policy_version (int): + Requested IAM Policy version. + + This field is a member of `oneof`_ ``_options_requested_policy_version``. + project (str): + Project ID for this request. + resource (str): + Name or id of the resource for this request. + """ + + options_requested_policy_version: int = proto.Field( + proto.INT32, + number=499220029, + optional=True, + ) + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + resource: str = proto.Field( + proto.STRING, + number=195806222, + ) + + +class GetIamPolicyMachineImageRequest(proto.Message): + r"""A request message for MachineImages.GetIamPolicy. See the + method description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + options_requested_policy_version (int): + Requested IAM Policy version. + + This field is a member of `oneof`_ ``_options_requested_policy_version``. + project (str): + Project ID for this request. + resource (str): + Name or id of the resource for this request. + """ + + options_requested_policy_version: int = proto.Field( + proto.INT32, + number=499220029, + optional=True, + ) + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + resource: str = proto.Field( + proto.STRING, + number=195806222, + ) + + +class GetIamPolicyNetworkAttachmentRequest(proto.Message): + r"""A request message for NetworkAttachments.GetIamPolicy. See + the method description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + options_requested_policy_version (int): + Requested IAM Policy version. + + This field is a member of `oneof`_ ``_options_requested_policy_version``. + project (str): + Project ID for this request. + region (str): + The name of the region for this request. + resource (str): + Name or id of the resource for this request. + """ + + options_requested_policy_version: int = proto.Field( + proto.INT32, + number=499220029, + optional=True, + ) + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + region: str = proto.Field( + proto.STRING, + number=138946292, + ) + resource: str = proto.Field( + proto.STRING, + number=195806222, + ) + + +class GetIamPolicyNetworkFirewallPolicyRequest(proto.Message): + r"""A request message for NetworkFirewallPolicies.GetIamPolicy. + See the method description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + options_requested_policy_version (int): + Requested IAM Policy version. + + This field is a member of `oneof`_ ``_options_requested_policy_version``. + project (str): + Project ID for this request. + resource (str): + Name or id of the resource for this request. + """ + + options_requested_policy_version: int = proto.Field( + proto.INT32, + number=499220029, + optional=True, + ) + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + resource: str = proto.Field( + proto.STRING, + number=195806222, + ) + + +class GetIamPolicyNodeGroupRequest(proto.Message): + r"""A request message for NodeGroups.GetIamPolicy. See the method + description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + options_requested_policy_version (int): + Requested IAM Policy version. + + This field is a member of `oneof`_ ``_options_requested_policy_version``. + project (str): + Project ID for this request. + resource (str): + Name or id of the resource for this request. + zone (str): + The name of the zone for this request. + """ + + options_requested_policy_version: int = proto.Field( + proto.INT32, + number=499220029, + optional=True, + ) + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + resource: str = proto.Field( + proto.STRING, + number=195806222, + ) + zone: str = proto.Field( + proto.STRING, + number=3744684, + ) + + +class GetIamPolicyNodeTemplateRequest(proto.Message): + r"""A request message for NodeTemplates.GetIamPolicy. See the + method description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + options_requested_policy_version (int): + Requested IAM Policy version. + + This field is a member of `oneof`_ ``_options_requested_policy_version``. + project (str): + Project ID for this request. + region (str): + The name of the region for this request. + resource (str): + Name or id of the resource for this request. + """ + + options_requested_policy_version: int = proto.Field( + proto.INT32, + number=499220029, + optional=True, + ) + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + region: str = proto.Field( + proto.STRING, + number=138946292, + ) + resource: str = proto.Field( + proto.STRING, + number=195806222, + ) + + +class GetIamPolicyRegionBackendServiceRequest(proto.Message): + r"""A request message for RegionBackendServices.GetIamPolicy. See + the method description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + options_requested_policy_version (int): + Requested IAM Policy version. + + This field is a member of `oneof`_ ``_options_requested_policy_version``. + project (str): + Project ID for this request. + region (str): + The name of the region for this request. + resource (str): + Name or id of the resource for this request. + """ + + options_requested_policy_version: int = proto.Field( + proto.INT32, + number=499220029, + optional=True, + ) + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + region: str = proto.Field( + proto.STRING, + number=138946292, + ) + resource: str = proto.Field( + proto.STRING, + number=195806222, + ) + + +class GetIamPolicyRegionDiskRequest(proto.Message): + r"""A request message for RegionDisks.GetIamPolicy. See the + method description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + options_requested_policy_version (int): + Requested IAM Policy version. + + This field is a member of `oneof`_ ``_options_requested_policy_version``. + project (str): + Project ID for this request. + region (str): + The name of the region for this request. + resource (str): + Name or id of the resource for this request. + """ + + options_requested_policy_version: int = proto.Field( + proto.INT32, + number=499220029, + optional=True, + ) + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + region: str = proto.Field( + proto.STRING, + number=138946292, + ) + resource: str = proto.Field( + proto.STRING, + number=195806222, + ) + + +class GetIamPolicyRegionNetworkFirewallPolicyRequest(proto.Message): + r"""A request message for + RegionNetworkFirewallPolicies.GetIamPolicy. See the method + description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + options_requested_policy_version (int): + Requested IAM Policy version. + + This field is a member of `oneof`_ ``_options_requested_policy_version``. + project (str): + Project ID for this request. + region (str): + The name of the region for this request. + resource (str): + Name or id of the resource for this request. + """ + + options_requested_policy_version: int = proto.Field( + proto.INT32, + number=499220029, + optional=True, + ) + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + region: str = proto.Field( + proto.STRING, + number=138946292, + ) + resource: str = proto.Field( + proto.STRING, + number=195806222, + ) + + +class GetIamPolicyReservationRequest(proto.Message): + r"""A request message for Reservations.GetIamPolicy. See the + method description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + options_requested_policy_version (int): + Requested IAM Policy version. + + This field is a member of `oneof`_ ``_options_requested_policy_version``. + project (str): + Project ID for this request. + resource (str): + Name or id of the resource for this request. + zone (str): + The name of the zone for this request. + """ + + options_requested_policy_version: int = proto.Field( + proto.INT32, + number=499220029, + optional=True, + ) + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + resource: str = proto.Field( + proto.STRING, + number=195806222, + ) + zone: str = proto.Field( + proto.STRING, + number=3744684, + ) + + +class GetIamPolicyResourcePolicyRequest(proto.Message): + r"""A request message for ResourcePolicies.GetIamPolicy. See the + method description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + options_requested_policy_version (int): + Requested IAM Policy version. + + This field is a member of `oneof`_ ``_options_requested_policy_version``. + project (str): + Project ID for this request. + region (str): + The name of the region for this request. + resource (str): + Name or id of the resource for this request. + """ + + options_requested_policy_version: int = proto.Field( + proto.INT32, + number=499220029, + optional=True, + ) + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + region: str = proto.Field( + proto.STRING, + number=138946292, + ) + resource: str = proto.Field( + proto.STRING, + number=195806222, + ) + + +class GetIamPolicyServiceAttachmentRequest(proto.Message): + r"""A request message for ServiceAttachments.GetIamPolicy. See + the method description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + options_requested_policy_version (int): + Requested IAM Policy version. + + This field is a member of `oneof`_ ``_options_requested_policy_version``. + project (str): + Project ID for this request. + region (str): + The name of the region for this request. + resource (str): + Name or id of the resource for this request. + """ + + options_requested_policy_version: int = proto.Field( + proto.INT32, + number=499220029, + optional=True, + ) + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + region: str = proto.Field( + proto.STRING, + number=138946292, + ) + resource: str = proto.Field( + proto.STRING, + number=195806222, + ) + + +class GetIamPolicySnapshotRequest(proto.Message): + r"""A request message for Snapshots.GetIamPolicy. See the method + description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + options_requested_policy_version (int): + Requested IAM Policy version. + + This field is a member of `oneof`_ ``_options_requested_policy_version``. + project (str): + Project ID for this request. + resource (str): + Name or id of the resource for this request. + """ + + options_requested_policy_version: int = proto.Field( + proto.INT32, + number=499220029, + optional=True, + ) + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + resource: str = proto.Field( + proto.STRING, + number=195806222, + ) + + +class GetIamPolicySubnetworkRequest(proto.Message): + r"""A request message for Subnetworks.GetIamPolicy. See the + method description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + options_requested_policy_version (int): + Requested IAM Policy version. + + This field is a member of `oneof`_ ``_options_requested_policy_version``. + project (str): + Project ID for this request. + region (str): + The name of the region for this request. + resource (str): + Name or id of the resource for this request. + """ + + options_requested_policy_version: int = proto.Field( + proto.INT32, + number=499220029, + optional=True, + ) + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + region: str = proto.Field( + proto.STRING, + number=138946292, + ) + resource: str = proto.Field( + proto.STRING, + number=195806222, + ) + + +class GetImageFamilyViewRequest(proto.Message): + r"""A request message for ImageFamilyViews.Get. See the method + description for details. + + Attributes: + family (str): + Name of the image family to search for. + project (str): + Project ID for this request. + zone (str): + The name of the zone for this request. + """ + + family: str = proto.Field( + proto.STRING, + number=328751972, + ) + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + zone: str = proto.Field( + proto.STRING, + number=3744684, + ) + + +class GetImageRequest(proto.Message): + r"""A request message for Images.Get. See the method description + for details. + + Attributes: + image (str): + Name of the image resource to return. + project (str): + Project ID for this request. + """ + + image: str = proto.Field( + proto.STRING, + number=100313435, + ) + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + + +class GetInstanceGroupManagerRequest(proto.Message): + r"""A request message for InstanceGroupManagers.Get. See the + method description for details. + + Attributes: + instance_group_manager (str): + The name of the managed instance group. + project (str): + Project ID for this request. + zone (str): + The name of the zone where the managed + instance group is located. + """ + + instance_group_manager: str = proto.Field( + proto.STRING, + number=249363395, + ) + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + zone: str = proto.Field( + proto.STRING, + number=3744684, + ) + + +class GetInstanceGroupRequest(proto.Message): + r"""A request message for InstanceGroups.Get. See the method + description for details. + + Attributes: + instance_group (str): + The name of the instance group. + project (str): + Project ID for this request. + zone (str): + The name of the zone where the instance group + is located. + """ + + instance_group: str = proto.Field( + proto.STRING, + number=81095253, + ) + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + zone: str = proto.Field( + proto.STRING, + number=3744684, + ) + + +class GetInstanceRequest(proto.Message): + r"""A request message for Instances.Get. See the method + description for details. + + Attributes: + instance (str): + Name of the instance resource to return. + project (str): + Project ID for this request. + zone (str): + The name of the zone for this request. + """ + + instance: str = proto.Field( + proto.STRING, + number=18257045, + ) + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + zone: str = proto.Field( + proto.STRING, + number=3744684, + ) + + +class GetInstanceTemplateRequest(proto.Message): + r"""A request message for InstanceTemplates.Get. See the method + description for details. + + Attributes: + instance_template (str): + The name of the instance template. + project (str): + Project ID for this request. + """ + + instance_template: str = proto.Field( + proto.STRING, + number=309248228, + ) + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + + +class GetInterconnectAttachmentRequest(proto.Message): + r"""A request message for InterconnectAttachments.Get. See the + method description for details. + + Attributes: + interconnect_attachment (str): + Name of the interconnect attachment to + return. + project (str): + Project ID for this request. + region (str): + Name of the region for this request. + """ + + interconnect_attachment: str = proto.Field( + proto.STRING, + number=308135284, + ) + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + region: str = proto.Field( + proto.STRING, + number=138946292, + ) + + +class GetInterconnectLocationRequest(proto.Message): + r"""A request message for InterconnectLocations.Get. See the + method description for details. + + Attributes: + interconnect_location (str): + Name of the interconnect location to return. + project (str): + Project ID for this request. + """ + + interconnect_location: str = proto.Field( + proto.STRING, + number=492235846, + ) + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + + +class GetInterconnectRemoteLocationRequest(proto.Message): + r"""A request message for InterconnectRemoteLocations.Get. See + the method description for details. + + Attributes: + interconnect_remote_location (str): + Name of the interconnect remote location to + return. + project (str): + Project ID for this request. + """ + + interconnect_remote_location: str = proto.Field( + proto.STRING, + number=290153949, + ) + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + + +class GetInterconnectRequest(proto.Message): + r"""A request message for Interconnects.Get. See the method + description for details. + + Attributes: + interconnect (str): + Name of the interconnect to return. + project (str): + Project ID for this request. + """ + + interconnect: str = proto.Field( + proto.STRING, + number=224601230, + ) + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + + +class GetLicenseCodeRequest(proto.Message): + r"""A request message for LicenseCodes.Get. See the method + description for details. + + Attributes: + license_code (str): + Number corresponding to the License code + resource to return. + project (str): + Project ID for this request. + """ + + license_code: str = proto.Field( + proto.STRING, + number=1467179, + ) + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + + +class GetLicenseRequest(proto.Message): + r"""A request message for Licenses.Get. See the method + description for details. + + Attributes: + license_ (str): + Name of the License resource to return. + project (str): + Project ID for this request. + """ + + license_: str = proto.Field( + proto.STRING, + number=166757441, + ) + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + + +class GetMachineImageRequest(proto.Message): + r"""A request message for MachineImages.Get. See the method + description for details. + + Attributes: + machine_image (str): + The name of the machine image. + project (str): + Project ID for this request. + """ + + machine_image: str = proto.Field( + proto.STRING, + number=69189475, + ) + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + + +class GetMachineTypeRequest(proto.Message): + r"""A request message for MachineTypes.Get. See the method + description for details. + + Attributes: + machine_type (str): + Name of the machine type to return. + project (str): + Project ID for this request. + zone (str): + The name of the zone for this request. + """ + + machine_type: str = proto.Field( + proto.STRING, + number=227711026, + ) + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + zone: str = proto.Field( + proto.STRING, + number=3744684, + ) + + +class GetNatMappingInfoRoutersRequest(proto.Message): + r"""A request message for Routers.GetNatMappingInfo. See the + method description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + filter (str): + A filter expression that filters resources listed in the + response. Most Compute resources support two types of filter + expressions: expressions that support regular expressions + and expressions that follow API improvement proposal + AIP-160. If you want to use AIP-160, your expression must + specify the field name, an operator, and the value that you + want to use for filtering. The value must be a string, a + number, or a boolean. The operator must be either ``=``, + ``!=``, ``>``, ``<``, ``<=``, ``>=`` or ``:``. For example, + if you are filtering Compute Engine instances, you can + exclude instances named ``example-instance`` by specifying + ``name != example-instance``. The ``:`` operator can be used + with string fields to match substrings. For non-string + fields it is equivalent to the ``=`` operator. The ``:*`` + comparison can be used to test whether a key has been + defined. For example, to find all objects with ``owner`` + label use: ``labels.owner:*`` You can also filter nested + fields. For example, you could specify + ``scheduling.automaticRestart = false`` to include instances + only if they are not scheduled for automatic restarts. You + can use filtering on nested fields to filter based on + resource labels. To filter on multiple expressions, provide + each separate expression within parentheses. For example: + ``(scheduling.automaticRestart = true) (cpuPlatform = "Intel Skylake")`` + By default, each expression is an ``AND`` expression. + However, you can include ``AND`` and ``OR`` expressions + explicitly. For example: + ``(cpuPlatform = "Intel Skylake") OR (cpuPlatform = "Intel Broadwell") AND (scheduling.automaticRestart = true)`` + If you want to use a regular expression, use the ``eq`` + (equal) or ``ne`` (not equal) operator against a single + un-parenthesized expression with or without quotes or + against multiple parenthesized expressions. Examples: + ``fieldname eq unquoted literal`` + ``fieldname eq 'single quoted literal'`` + ``fieldname eq "double quoted literal"`` + ``(fieldname1 eq literal) (fieldname2 ne "literal")`` The + literal value is interpreted as a regular expression using + Google RE2 library syntax. The literal value must match the + entire field. For example, to filter for instances that do + not end with name "instance", you would use + ``name ne .*instance``. + + This field is a member of `oneof`_ ``_filter``. + max_results (int): + The maximum number of results per page that should be + returned. If the number of available results is larger than + ``maxResults``, Compute Engine returns a ``nextPageToken`` + that can be used to get the next page of results in + subsequent list requests. Acceptable values are ``0`` to + ``500``, inclusive. (Default: ``500``) + + This field is a member of `oneof`_ ``_max_results``. + nat_name (str): + Name of the nat service to filter the Nat + Mapping information. If it is omitted, all nats + for this router will be returned. Name should + conform to RFC1035. + + This field is a member of `oneof`_ ``_nat_name``. + order_by (str): + Sorts list results by a certain order. By default, results + are returned in alphanumerical order based on the resource + name. You can also sort results in descending order based on + the creation timestamp using + ``orderBy="creationTimestamp desc"``. This sorts results + based on the ``creationTimestamp`` field in reverse + chronological order (newest result first). Use this to sort + resources like operations so that the newest operation is + returned first. Currently, only sorting by ``name`` or + ``creationTimestamp desc`` is supported. + + This field is a member of `oneof`_ ``_order_by``. + page_token (str): + Specifies a page token to use. Set ``pageToken`` to the + ``nextPageToken`` returned by a previous list request to get + the next page of results. + + This field is a member of `oneof`_ ``_page_token``. + project (str): + Project ID for this request. + region (str): + Name of the region for this request. + return_partial_success (bool): + Opt-in for partial success behavior which + provides partial results in case of failure. The + default value is false. + + This field is a member of `oneof`_ ``_return_partial_success``. + router (str): + Name of the Router resource to query for Nat + Mapping information of VM endpoints. + """ + + filter: str = proto.Field( + proto.STRING, + number=336120696, + optional=True, + ) + max_results: int = proto.Field( + proto.UINT32, + number=54715419, + optional=True, + ) + nat_name: str = proto.Field( + proto.STRING, + number=425596649, + optional=True, + ) + order_by: str = proto.Field( + proto.STRING, + number=160562920, + optional=True, + ) + page_token: str = proto.Field( + proto.STRING, + number=19994697, + optional=True, + ) + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + region: str = proto.Field( + proto.STRING, + number=138946292, + ) + return_partial_success: bool = proto.Field( + proto.BOOL, + number=517198390, + optional=True, + ) + router: str = proto.Field( + proto.STRING, + number=148608841, + ) + + +class GetNetworkAttachmentRequest(proto.Message): + r"""A request message for NetworkAttachments.Get. See the method + description for details. + + Attributes: + network_attachment (str): + Name of the NetworkAttachment resource to + return. + project (str): + Project ID for this request. + region (str): + Name of the region of this request. + """ + + network_attachment: str = proto.Field( + proto.STRING, + number=224644052, + ) + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + region: str = proto.Field( + proto.STRING, + number=138946292, + ) + + +class GetNetworkEdgeSecurityServiceRequest(proto.Message): + r"""A request message for NetworkEdgeSecurityServices.Get. See + the method description for details. + + Attributes: + network_edge_security_service (str): + Name of the network edge security service to + get. + project (str): + Project ID for this request. + region (str): + Name of the region scoping this request. + """ + + network_edge_security_service: str = proto.Field( + proto.STRING, + number=157011879, + ) + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + region: str = proto.Field( + proto.STRING, + number=138946292, + ) + + +class GetNetworkEndpointGroupRequest(proto.Message): + r"""A request message for NetworkEndpointGroups.Get. See the + method description for details. + + Attributes: + network_endpoint_group (str): + The name of the network endpoint group. It + should comply with RFC1035. + project (str): + Project ID for this request. + zone (str): + The name of the zone where the network + endpoint group is located. It should comply with + RFC1035. + """ + + network_endpoint_group: str = proto.Field( + proto.STRING, + number=433907078, + ) + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + zone: str = proto.Field( + proto.STRING, + number=3744684, + ) + + +class GetNetworkFirewallPolicyRequest(proto.Message): + r"""A request message for NetworkFirewallPolicies.Get. See the + method description for details. + + Attributes: + firewall_policy (str): + Name of the firewall policy to get. + project (str): + Project ID for this request. + """ + + firewall_policy: str = proto.Field( + proto.STRING, + number=498173265, + ) + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + + +class GetNetworkRequest(proto.Message): + r"""A request message for Networks.Get. See the method + description for details. + + Attributes: + network (str): + Name of the network to return. + project (str): + Project ID for this request. + """ + + network: str = proto.Field( + proto.STRING, + number=232872494, + ) + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + + +class GetNodeGroupRequest(proto.Message): + r"""A request message for NodeGroups.Get. See the method + description for details. + + Attributes: + node_group (str): + Name of the node group to return. + project (str): + Project ID for this request. + zone (str): + The name of the zone for this request. + """ + + node_group: str = proto.Field( + proto.STRING, + number=469958146, + ) + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + zone: str = proto.Field( + proto.STRING, + number=3744684, + ) + + +class GetNodeTemplateRequest(proto.Message): + r"""A request message for NodeTemplates.Get. See the method + description for details. + + Attributes: + node_template (str): + Name of the node template to return. + project (str): + Project ID for this request. + region (str): + The name of the region for this request. + """ + + node_template: str = proto.Field( + proto.STRING, + number=323154455, + ) + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + region: str = proto.Field( + proto.STRING, + number=138946292, + ) + + +class GetNodeTypeRequest(proto.Message): + r"""A request message for NodeTypes.Get. See the method + description for details. + + Attributes: + node_type (str): + Name of the node type to return. + project (str): + Project ID for this request. + zone (str): + The name of the zone for this request. + """ + + node_type: str = proto.Field( + proto.STRING, + number=465832791, + ) + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + zone: str = proto.Field( + proto.STRING, + number=3744684, + ) + + +class GetPacketMirroringRequest(proto.Message): + r"""A request message for PacketMirrorings.Get. See the method + description for details. + + Attributes: + packet_mirroring (str): + Name of the PacketMirroring resource to + return. + project (str): + Project ID for this request. + region (str): + Name of the region for this request. + """ + + packet_mirroring: str = proto.Field( + proto.STRING, + number=22305996, + ) + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + region: str = proto.Field( + proto.STRING, + number=138946292, + ) + + +class GetProjectRequest(proto.Message): + r"""A request message for Projects.Get. See the method + description for details. + + Attributes: + project (str): + Project ID for this request. + """ + + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + + +class GetPublicAdvertisedPrefixeRequest(proto.Message): + r"""A request message for PublicAdvertisedPrefixes.Get. See the + method description for details. + + Attributes: + project (str): + Project ID for this request. + public_advertised_prefix (str): + Name of the PublicAdvertisedPrefix resource + to return. + """ + + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + public_advertised_prefix: str = proto.Field( + proto.STRING, + number=101874590, + ) + + +class GetPublicDelegatedPrefixeRequest(proto.Message): + r"""A request message for PublicDelegatedPrefixes.Get. See the + method description for details. + + Attributes: + project (str): + Project ID for this request. + public_delegated_prefix (str): + Name of the PublicDelegatedPrefix resource to + return. + region (str): + Name of the region of this request. + """ + + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + public_delegated_prefix: str = proto.Field( + proto.STRING, + number=204238440, + ) + region: str = proto.Field( + proto.STRING, + number=138946292, + ) + + +class GetRegionAutoscalerRequest(proto.Message): + r"""A request message for RegionAutoscalers.Get. See the method + description for details. + + Attributes: + autoscaler (str): + Name of the autoscaler to return. + project (str): + Project ID for this request. + region (str): + Name of the region scoping this request. + """ + + autoscaler: str = proto.Field( + proto.STRING, + number=517258967, + ) + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + region: str = proto.Field( + proto.STRING, + number=138946292, + ) + + +class GetRegionBackendServiceRequest(proto.Message): + r"""A request message for RegionBackendServices.Get. See the + method description for details. + + Attributes: + backend_service (str): + Name of the BackendService resource to + return. + project (str): + Project ID for this request. + region (str): + Name of the region scoping this request. + """ + + backend_service: str = proto.Field( + proto.STRING, + number=306946058, + ) + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + region: str = proto.Field( + proto.STRING, + number=138946292, + ) + + +class GetRegionCommitmentRequest(proto.Message): + r"""A request message for RegionCommitments.Get. See the method + description for details. + + Attributes: + commitment (str): + Name of the commitment to return. + project (str): + Project ID for this request. + region (str): + Name of the region for this request. + """ + + commitment: str = proto.Field( + proto.STRING, + number=482134805, + ) + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + region: str = proto.Field( + proto.STRING, + number=138946292, + ) + + +class GetRegionDiskRequest(proto.Message): + r"""A request message for RegionDisks.Get. See the method + description for details. + + Attributes: + disk (str): + Name of the regional persistent disk to + return. + project (str): + Project ID for this request. + region (str): + Name of the region for this request. + """ + + disk: str = proto.Field( + proto.STRING, + number=3083677, + ) + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + region: str = proto.Field( + proto.STRING, + number=138946292, + ) + + +class GetRegionDiskTypeRequest(proto.Message): + r"""A request message for RegionDiskTypes.Get. See the method + description for details. + + Attributes: + disk_type (str): + Name of the disk type to return. + project (str): + Project ID for this request. + region (str): + The name of the region for this request. + """ + + disk_type: str = proto.Field( + proto.STRING, + number=93009052, + ) + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + region: str = proto.Field( + proto.STRING, + number=138946292, + ) + + +class GetRegionHealthCheckRequest(proto.Message): + r"""A request message for RegionHealthChecks.Get. See the method + description for details. + + Attributes: + health_check (str): + Name of the HealthCheck resource to return. + project (str): + Project ID for this request. + region (str): + Name of the region scoping this request. + """ + + health_check: str = proto.Field( + proto.STRING, + number=308876645, + ) + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + region: str = proto.Field( + proto.STRING, + number=138946292, + ) + + +class GetRegionHealthCheckServiceRequest(proto.Message): + r"""A request message for RegionHealthCheckServices.Get. See the + method description for details. + + Attributes: + health_check_service (str): + Name of the HealthCheckService to update. The + name must be 1-63 characters long, and comply + with RFC1035. + project (str): + Project ID for this request. + region (str): + Name of the region scoping this request. + """ + + health_check_service: str = proto.Field( + proto.STRING, + number=408374747, + ) + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + region: str = proto.Field( + proto.STRING, + number=138946292, + ) + + +class GetRegionInstanceGroupManagerRequest(proto.Message): + r"""A request message for RegionInstanceGroupManagers.Get. See + the method description for details. + + Attributes: + instance_group_manager (str): + Name of the managed instance group to return. + project (str): + Project ID for this request. + region (str): + Name of the region scoping this request. + """ + + instance_group_manager: str = proto.Field( + proto.STRING, + number=249363395, + ) + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + region: str = proto.Field( + proto.STRING, + number=138946292, + ) + + +class GetRegionInstanceGroupRequest(proto.Message): + r"""A request message for RegionInstanceGroups.Get. See the + method description for details. + + Attributes: + instance_group (str): + Name of the instance group resource to + return. + project (str): + Project ID for this request. + region (str): + Name of the region scoping this request. + """ + + instance_group: str = proto.Field( + proto.STRING, + number=81095253, + ) + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + region: str = proto.Field( + proto.STRING, + number=138946292, + ) + + +class GetRegionInstanceTemplateRequest(proto.Message): + r"""A request message for RegionInstanceTemplates.Get. See the + method description for details. + + Attributes: + instance_template (str): + The name of the instance template. + project (str): + Project ID for this request. + region (str): + The name of the region for this request. + """ + + instance_template: str = proto.Field( + proto.STRING, + number=309248228, + ) + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + region: str = proto.Field( + proto.STRING, + number=138946292, + ) + + +class GetRegionNetworkEndpointGroupRequest(proto.Message): + r"""A request message for RegionNetworkEndpointGroups.Get. See + the method description for details. + + Attributes: + network_endpoint_group (str): + The name of the network endpoint group. It + should comply with RFC1035. + project (str): + Project ID for this request. + region (str): + The name of the region where the network + endpoint group is located. It should comply with + RFC1035. + """ + + network_endpoint_group: str = proto.Field( + proto.STRING, + number=433907078, + ) + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + region: str = proto.Field( + proto.STRING, + number=138946292, + ) + + +class GetRegionNetworkFirewallPolicyRequest(proto.Message): + r"""A request message for RegionNetworkFirewallPolicies.Get. See + the method description for details. + + Attributes: + firewall_policy (str): + Name of the firewall policy to get. + project (str): + Project ID for this request. + region (str): + Name of the region scoping this request. + """ + + firewall_policy: str = proto.Field( + proto.STRING, + number=498173265, + ) + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + region: str = proto.Field( + proto.STRING, + number=138946292, + ) + + +class GetRegionNotificationEndpointRequest(proto.Message): + r"""A request message for RegionNotificationEndpoints.Get. See + the method description for details. + + Attributes: + notification_endpoint (str): + Name of the NotificationEndpoint resource to + return. + project (str): + Project ID for this request. + region (str): + Name of the region scoping this request. + """ + + notification_endpoint: str = proto.Field( + proto.STRING, + number=376807017, + ) + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + region: str = proto.Field( + proto.STRING, + number=138946292, + ) + + +class GetRegionOperationRequest(proto.Message): + r"""A request message for RegionOperations.Get. See the method + description for details. + + Attributes: + operation (str): + Name of the Operations resource to return. + project (str): + Project ID for this request. + region (str): + Name of the region for this request. + """ + + operation: str = proto.Field( + proto.STRING, + number=52090215, + ) + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + region: str = proto.Field( + proto.STRING, + number=138946292, + ) + + +class GetRegionRequest(proto.Message): + r"""A request message for Regions.Get. See the method description + for details. + + Attributes: + project (str): + Project ID for this request. + region (str): + Name of the region resource to return. + """ + + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + region: str = proto.Field( + proto.STRING, + number=138946292, + ) + + +class GetRegionSecurityPolicyRequest(proto.Message): + r"""A request message for RegionSecurityPolicies.Get. See the + method description for details. + + Attributes: + project (str): + Project ID for this request. + region (str): + Name of the region scoping this request. + security_policy (str): + Name of the security policy to get. + """ + + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + region: str = proto.Field( + proto.STRING, + number=138946292, + ) + security_policy: str = proto.Field( + proto.STRING, + number=171082513, + ) + + +class GetRegionSslCertificateRequest(proto.Message): + r"""A request message for RegionSslCertificates.Get. See the + method description for details. + + Attributes: + project (str): + Project ID for this request. + region (str): + Name of the region scoping this request. + ssl_certificate (str): + Name of the SslCertificate resource to + return. + """ + + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + region: str = proto.Field( + proto.STRING, + number=138946292, + ) + ssl_certificate: str = proto.Field( + proto.STRING, + number=46443492, + ) + + +class GetRegionSslPolicyRequest(proto.Message): + r"""A request message for RegionSslPolicies.Get. See the method + description for details. + + Attributes: + project (str): + Project ID for this request. + region (str): + Name of the region scoping this request. + ssl_policy (str): + Name of the SSL policy to update. The name + must be 1-63 characters long, and comply with + RFC1035. + """ + + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + region: str = proto.Field( + proto.STRING, + number=138946292, + ) + ssl_policy: str = proto.Field( + proto.STRING, + number=295190213, + ) + + +class GetRegionTargetHttpProxyRequest(proto.Message): + r"""A request message for RegionTargetHttpProxies.Get. See the + method description for details. + + Attributes: + project (str): + Project ID for this request. + region (str): + Name of the region scoping this request. + target_http_proxy (str): + Name of the TargetHttpProxy resource to + return. + """ + + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + region: str = proto.Field( + proto.STRING, + number=138946292, + ) + target_http_proxy: str = proto.Field( + proto.STRING, + number=206872421, + ) + + +class GetRegionTargetHttpsProxyRequest(proto.Message): + r"""A request message for RegionTargetHttpsProxies.Get. See the + method description for details. + + Attributes: + project (str): + Project ID for this request. + region (str): + Name of the region scoping this request. + target_https_proxy (str): + Name of the TargetHttpsProxy resource to + return. + """ + + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + region: str = proto.Field( + proto.STRING, + number=138946292, + ) + target_https_proxy: str = proto.Field( + proto.STRING, + number=52336748, + ) + + +class GetRegionTargetTcpProxyRequest(proto.Message): + r"""A request message for RegionTargetTcpProxies.Get. See the + method description for details. + + Attributes: + project (str): + Project ID for this request. + region (str): + Name of the region scoping this request. + target_tcp_proxy (str): + Name of the TargetTcpProxy resource to + return. + """ + + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + region: str = proto.Field( + proto.STRING, + number=138946292, + ) + target_tcp_proxy: str = proto.Field( + proto.STRING, + number=503065442, + ) + + +class GetRegionUrlMapRequest(proto.Message): + r"""A request message for RegionUrlMaps.Get. See the method + description for details. + + Attributes: + project (str): + Project ID for this request. + region (str): + Name of the region scoping this request. + url_map (str): + Name of the UrlMap resource to return. + """ + + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + region: str = proto.Field( + proto.STRING, + number=138946292, + ) + url_map: str = proto.Field( + proto.STRING, + number=367020684, + ) + + +class GetReservationRequest(proto.Message): + r"""A request message for Reservations.Get. See the method + description for details. + + Attributes: + project (str): + Project ID for this request. + reservation (str): + Name of the reservation to retrieve. + zone (str): + Name of the zone for this request. + """ + + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + reservation: str = proto.Field( + proto.STRING, + number=47530956, + ) + zone: str = proto.Field( + proto.STRING, + number=3744684, + ) + + +class GetResourcePolicyRequest(proto.Message): + r"""A request message for ResourcePolicies.Get. See the method + description for details. + + Attributes: + project (str): + Project ID for this request. + region (str): + Name of the region for this request. + resource_policy (str): + Name of the resource policy to retrieve. + """ + + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + region: str = proto.Field( + proto.STRING, + number=138946292, + ) + resource_policy: str = proto.Field( + proto.STRING, + number=159240835, + ) + + +class GetRouteRequest(proto.Message): + r"""A request message for Routes.Get. See the method description + for details. + + Attributes: + project (str): + Project ID for this request. + route (str): + Name of the Route resource to return. + """ + + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + route: str = proto.Field( + proto.STRING, + number=108704329, + ) + + +class GetRouterRequest(proto.Message): + r"""A request message for Routers.Get. See the method description + for details. + + Attributes: + project (str): + Project ID for this request. + region (str): + Name of the region for this request. + router (str): + Name of the Router resource to return. + """ + + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + region: str = proto.Field( + proto.STRING, + number=138946292, + ) + router: str = proto.Field( + proto.STRING, + number=148608841, + ) + + +class GetRouterStatusRouterRequest(proto.Message): + r"""A request message for Routers.GetRouterStatus. See the method + description for details. + + Attributes: + project (str): + Project ID for this request. + region (str): + Name of the region for this request. + router (str): + Name of the Router resource to query. + """ + + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + region: str = proto.Field( + proto.STRING, + number=138946292, + ) + router: str = proto.Field( + proto.STRING, + number=148608841, + ) + + +class GetRuleFirewallPolicyRequest(proto.Message): + r"""A request message for FirewallPolicies.GetRule. See the + method description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + firewall_policy (str): + Name of the firewall policy to which the + queried rule belongs. + priority (int): + The priority of the rule to get from the + firewall policy. + + This field is a member of `oneof`_ ``_priority``. + """ + + firewall_policy: str = proto.Field( + proto.STRING, + number=498173265, + ) + priority: int = proto.Field( + proto.INT32, + number=445151652, + optional=True, + ) + + +class GetRuleNetworkFirewallPolicyRequest(proto.Message): + r"""A request message for NetworkFirewallPolicies.GetRule. See + the method description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + firewall_policy (str): + Name of the firewall policy to which the + queried rule belongs. + priority (int): + The priority of the rule to get from the + firewall policy. + + This field is a member of `oneof`_ ``_priority``. + project (str): + Project ID for this request. + """ + + firewall_policy: str = proto.Field( + proto.STRING, + number=498173265, + ) + priority: int = proto.Field( + proto.INT32, + number=445151652, + optional=True, + ) + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + + +class GetRuleRegionNetworkFirewallPolicyRequest(proto.Message): + r"""A request message for RegionNetworkFirewallPolicies.GetRule. + See the method description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + firewall_policy (str): + Name of the firewall policy to which the + queried rule belongs. + priority (int): + The priority of the rule to get from the + firewall policy. + + This field is a member of `oneof`_ ``_priority``. + project (str): + Project ID for this request. + region (str): + Name of the region scoping this request. + """ + + firewall_policy: str = proto.Field( + proto.STRING, + number=498173265, + ) + priority: int = proto.Field( + proto.INT32, + number=445151652, + optional=True, + ) + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + region: str = proto.Field( + proto.STRING, + number=138946292, + ) + + +class GetRuleSecurityPolicyRequest(proto.Message): + r"""A request message for SecurityPolicies.GetRule. See the + method description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + priority (int): + The priority of the rule to get from the + security policy. + + This field is a member of `oneof`_ ``_priority``. + project (str): + Project ID for this request. + security_policy (str): + Name of the security policy to which the + queried rule belongs. + """ + + priority: int = proto.Field( + proto.INT32, + number=445151652, + optional=True, + ) + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + security_policy: str = proto.Field( + proto.STRING, + number=171082513, + ) + + +class GetScreenshotInstanceRequest(proto.Message): + r"""A request message for Instances.GetScreenshot. See the method + description for details. + + Attributes: + instance (str): + Name of the instance scoping this request. + project (str): + Project ID for this request. + zone (str): + The name of the zone for this request. + """ + + instance: str = proto.Field( + proto.STRING, + number=18257045, + ) + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + zone: str = proto.Field( + proto.STRING, + number=3744684, + ) + + +class GetSecurityPolicyRequest(proto.Message): + r"""A request message for SecurityPolicies.Get. See the method + description for details. + + Attributes: + project (str): + Project ID for this request. + security_policy (str): + Name of the security policy to get. + """ + + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + security_policy: str = proto.Field( + proto.STRING, + number=171082513, + ) + + +class GetSerialPortOutputInstanceRequest(proto.Message): + r"""A request message for Instances.GetSerialPortOutput. See the + method description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + instance (str): + Name of the instance for this request. + port (int): + Specifies which COM or serial port to + retrieve data from. + + This field is a member of `oneof`_ ``_port``. + project (str): + Project ID for this request. + start (int): + Specifies the starting byte position of the output to + return. To start with the first byte of output to the + specified port, omit this field or set it to ``0``. If the + output for that byte position is available, this field + matches the ``start`` parameter sent with the request. If + the amount of serial console output exceeds the size of the + buffer (1 MB), the oldest output is discarded and is no + longer available. If the requested start position refers to + discarded output, the start position is adjusted to the + oldest output still available, and the adjusted start + position is returned as the ``start`` property value. You + can also provide a negative start position, which translates + to the most recent number of bytes written to the serial + port. For example, -3 is interpreted as the most recent 3 + bytes written to the serial console. + + This field is a member of `oneof`_ ``_start``. + zone (str): + The name of the zone for this request. + """ + + instance: str = proto.Field( + proto.STRING, + number=18257045, + ) + port: int = proto.Field( + proto.INT32, + number=3446913, + optional=True, + ) + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + start: int = proto.Field( + proto.INT64, + number=109757538, + optional=True, + ) + zone: str = proto.Field( + proto.STRING, + number=3744684, + ) + + +class GetServiceAttachmentRequest(proto.Message): + r"""A request message for ServiceAttachments.Get. See the method + description for details. + + Attributes: + project (str): + Project ID for this request. + region (str): + Name of the region of this request. + service_attachment (str): + Name of the ServiceAttachment resource to + return. + """ + + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + region: str = proto.Field( + proto.STRING, + number=138946292, + ) + service_attachment: str = proto.Field( + proto.STRING, + number=338957549, + ) + + +class GetShieldedInstanceIdentityInstanceRequest(proto.Message): + r"""A request message for Instances.GetShieldedInstanceIdentity. + See the method description for details. + + Attributes: + instance (str): + Name or id of the instance scoping this + request. + project (str): + Project ID for this request. + zone (str): + The name of the zone for this request. + """ + + instance: str = proto.Field( + proto.STRING, + number=18257045, + ) + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + zone: str = proto.Field( + proto.STRING, + number=3744684, + ) + + +class GetSnapshotRequest(proto.Message): + r"""A request message for Snapshots.Get. See the method + description for details. + + Attributes: + project (str): + Project ID for this request. + snapshot (str): + Name of the Snapshot resource to return. + """ + + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + snapshot: str = proto.Field( + proto.STRING, + number=284874180, + ) + + +class GetSslCertificateRequest(proto.Message): + r"""A request message for SslCertificates.Get. See the method + description for details. + + Attributes: + project (str): + Project ID for this request. + ssl_certificate (str): + Name of the SslCertificate resource to + return. + """ + + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + ssl_certificate: str = proto.Field( + proto.STRING, + number=46443492, + ) + + +class GetSslPolicyRequest(proto.Message): + r"""A request message for SslPolicies.Get. See the method + description for details. + + Attributes: + project (str): + Project ID for this request. + ssl_policy (str): + Name of the SSL policy to update. The name + must be 1-63 characters long, and comply with + RFC1035. + """ + + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + ssl_policy: str = proto.Field( + proto.STRING, + number=295190213, + ) + + +class GetStatusVpnGatewayRequest(proto.Message): + r"""A request message for VpnGateways.GetStatus. See the method + description for details. + + Attributes: + project (str): + Project ID for this request. + region (str): + Name of the region for this request. + vpn_gateway (str): + Name of the VPN gateway to return. + """ + + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + region: str = proto.Field( + proto.STRING, + number=138946292, + ) + vpn_gateway: str = proto.Field( + proto.STRING, + number=406684153, + ) + + +class GetSubnetworkRequest(proto.Message): + r"""A request message for Subnetworks.Get. See the method + description for details. + + Attributes: + project (str): + Project ID for this request. + region (str): + Name of the region scoping this request. + subnetwork (str): + Name of the Subnetwork resource to return. + """ + + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + region: str = proto.Field( + proto.STRING, + number=138946292, + ) + subnetwork: str = proto.Field( + proto.STRING, + number=307827694, + ) + + +class GetTargetGrpcProxyRequest(proto.Message): + r"""A request message for TargetGrpcProxies.Get. See the method + description for details. + + Attributes: + project (str): + Project ID for this request. + target_grpc_proxy (str): + Name of the TargetGrpcProxy resource to + return. + """ + + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + target_grpc_proxy: str = proto.Field( + proto.STRING, + number=5020283, + ) + + +class GetTargetHttpProxyRequest(proto.Message): + r"""A request message for TargetHttpProxies.Get. See the method + description for details. + + Attributes: + project (str): + Project ID for this request. + target_http_proxy (str): + Name of the TargetHttpProxy resource to + return. + """ + + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + target_http_proxy: str = proto.Field( + proto.STRING, + number=206872421, + ) + + +class GetTargetHttpsProxyRequest(proto.Message): + r"""A request message for TargetHttpsProxies.Get. See the method + description for details. + + Attributes: + project (str): + Project ID for this request. + target_https_proxy (str): + Name of the TargetHttpsProxy resource to + return. + """ + + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + target_https_proxy: str = proto.Field( + proto.STRING, + number=52336748, + ) + + +class GetTargetInstanceRequest(proto.Message): + r"""A request message for TargetInstances.Get. See the method + description for details. + + Attributes: + project (str): + Project ID for this request. + target_instance (str): + Name of the TargetInstance resource to + return. + zone (str): + Name of the zone scoping this request. + """ + + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + target_instance: str = proto.Field( + proto.STRING, + number=289769347, + ) + zone: str = proto.Field( + proto.STRING, + number=3744684, + ) + + +class GetTargetPoolRequest(proto.Message): + r"""A request message for TargetPools.Get. See the method + description for details. + + Attributes: + project (str): + Project ID for this request. + region (str): + Name of the region scoping this request. + target_pool (str): + Name of the TargetPool resource to return. + """ + + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + region: str = proto.Field( + proto.STRING, + number=138946292, + ) + target_pool: str = proto.Field( + proto.STRING, + number=62796298, + ) + + +class GetTargetSslProxyRequest(proto.Message): + r"""A request message for TargetSslProxies.Get. See the method + description for details. + + Attributes: + project (str): + Project ID for this request. + target_ssl_proxy (str): + Name of the TargetSslProxy resource to + return. + """ + + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + target_ssl_proxy: str = proto.Field( + proto.STRING, + number=338795853, + ) + + +class GetTargetTcpProxyRequest(proto.Message): + r"""A request message for TargetTcpProxies.Get. See the method + description for details. + + Attributes: + project (str): + Project ID for this request. + target_tcp_proxy (str): + Name of the TargetTcpProxy resource to + return. + """ + + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + target_tcp_proxy: str = proto.Field( + proto.STRING, + number=503065442, + ) + + +class GetTargetVpnGatewayRequest(proto.Message): + r"""A request message for TargetVpnGateways.Get. See the method + description for details. + + Attributes: + project (str): + Project ID for this request. + region (str): + Name of the region for this request. + target_vpn_gateway (str): + Name of the target VPN gateway to return. + """ + + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + region: str = proto.Field( + proto.STRING, + number=138946292, + ) + target_vpn_gateway: str = proto.Field( + proto.STRING, + number=532512843, + ) + + +class GetUrlMapRequest(proto.Message): + r"""A request message for UrlMaps.Get. See the method description + for details. + + Attributes: + project (str): + Project ID for this request. + url_map (str): + Name of the UrlMap resource to return. + """ + + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + url_map: str = proto.Field( + proto.STRING, + number=367020684, + ) + + +class GetVpnGatewayRequest(proto.Message): + r"""A request message for VpnGateways.Get. See the method + description for details. + + Attributes: + project (str): + Project ID for this request. + region (str): + Name of the region for this request. + vpn_gateway (str): + Name of the VPN gateway to return. + """ + + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + region: str = proto.Field( + proto.STRING, + number=138946292, + ) + vpn_gateway: str = proto.Field( + proto.STRING, + number=406684153, + ) + + +class GetVpnTunnelRequest(proto.Message): + r"""A request message for VpnTunnels.Get. See the method + description for details. + + Attributes: + project (str): + Project ID for this request. + region (str): + Name of the region for this request. + vpn_tunnel (str): + Name of the VpnTunnel resource to return. + """ + + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + region: str = proto.Field( + proto.STRING, + number=138946292, + ) + vpn_tunnel: str = proto.Field( + proto.STRING, + number=143821331, + ) + + +class GetXpnHostProjectRequest(proto.Message): + r"""A request message for Projects.GetXpnHost. See the method + description for details. + + Attributes: + project (str): + Project ID for this request. + """ + + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + + +class GetXpnResourcesProjectsRequest(proto.Message): + r"""A request message for Projects.GetXpnResources. See the + method description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + filter (str): + A filter expression that filters resources listed in the + response. Most Compute resources support two types of filter + expressions: expressions that support regular expressions + and expressions that follow API improvement proposal + AIP-160. If you want to use AIP-160, your expression must + specify the field name, an operator, and the value that you + want to use for filtering. The value must be a string, a + number, or a boolean. The operator must be either ``=``, + ``!=``, ``>``, ``<``, ``<=``, ``>=`` or ``:``. For example, + if you are filtering Compute Engine instances, you can + exclude instances named ``example-instance`` by specifying + ``name != example-instance``. The ``:`` operator can be used + with string fields to match substrings. For non-string + fields it is equivalent to the ``=`` operator. The ``:*`` + comparison can be used to test whether a key has been + defined. For example, to find all objects with ``owner`` + label use: ``labels.owner:*`` You can also filter nested + fields. For example, you could specify + ``scheduling.automaticRestart = false`` to include instances + only if they are not scheduled for automatic restarts. You + can use filtering on nested fields to filter based on + resource labels. To filter on multiple expressions, provide + each separate expression within parentheses. For example: + ``(scheduling.automaticRestart = true) (cpuPlatform = "Intel Skylake")`` + By default, each expression is an ``AND`` expression. + However, you can include ``AND`` and ``OR`` expressions + explicitly. For example: + ``(cpuPlatform = "Intel Skylake") OR (cpuPlatform = "Intel Broadwell") AND (scheduling.automaticRestart = true)`` + If you want to use a regular expression, use the ``eq`` + (equal) or ``ne`` (not equal) operator against a single + un-parenthesized expression with or without quotes or + against multiple parenthesized expressions. Examples: + ``fieldname eq unquoted literal`` + ``fieldname eq 'single quoted literal'`` + ``fieldname eq "double quoted literal"`` + ``(fieldname1 eq literal) (fieldname2 ne "literal")`` The + literal value is interpreted as a regular expression using + Google RE2 library syntax. The literal value must match the + entire field. For example, to filter for instances that do + not end with name "instance", you would use + ``name ne .*instance``. + + This field is a member of `oneof`_ ``_filter``. + max_results (int): + The maximum number of results per page that should be + returned. If the number of available results is larger than + ``maxResults``, Compute Engine returns a ``nextPageToken`` + that can be used to get the next page of results in + subsequent list requests. Acceptable values are ``0`` to + ``500``, inclusive. (Default: ``500``) + + This field is a member of `oneof`_ ``_max_results``. + order_by (str): + Sorts list results by a certain order. By default, results + are returned in alphanumerical order based on the resource + name. You can also sort results in descending order based on + the creation timestamp using + ``orderBy="creationTimestamp desc"``. This sorts results + based on the ``creationTimestamp`` field in reverse + chronological order (newest result first). Use this to sort + resources like operations so that the newest operation is + returned first. Currently, only sorting by ``name`` or + ``creationTimestamp desc`` is supported. + + This field is a member of `oneof`_ ``_order_by``. + page_token (str): + Specifies a page token to use. Set ``pageToken`` to the + ``nextPageToken`` returned by a previous list request to get + the next page of results. + + This field is a member of `oneof`_ ``_page_token``. + project (str): + Project ID for this request. + return_partial_success (bool): + Opt-in for partial success behavior which + provides partial results in case of failure. The + default value is false. + + This field is a member of `oneof`_ ``_return_partial_success``. + """ + + filter: str = proto.Field( + proto.STRING, + number=336120696, + optional=True, + ) + max_results: int = proto.Field( + proto.UINT32, + number=54715419, + optional=True, + ) + order_by: str = proto.Field( + proto.STRING, + number=160562920, + optional=True, + ) + page_token: str = proto.Field( + proto.STRING, + number=19994697, + optional=True, + ) + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + return_partial_success: bool = proto.Field( + proto.BOOL, + number=517198390, + optional=True, + ) + + +class GetZoneOperationRequest(proto.Message): + r"""A request message for ZoneOperations.Get. See the method + description for details. + + Attributes: + operation (str): + Name of the Operations resource to return. + project (str): + Project ID for this request. + zone (str): + Name of the zone for this request. + """ + + operation: str = proto.Field( + proto.STRING, + number=52090215, + ) + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + zone: str = proto.Field( + proto.STRING, + number=3744684, + ) + + +class GetZoneRequest(proto.Message): + r"""A request message for Zones.Get. See the method description + for details. + + Attributes: + project (str): + Project ID for this request. + zone (str): + Name of the zone resource to return. + """ + + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + zone: str = proto.Field( + proto.STRING, + number=3744684, + ) + + +class GlobalAddressesMoveRequest(proto.Message): + r""" + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + description (str): + An optional destination address description + if intended to be different from the source. + + This field is a member of `oneof`_ ``_description``. + destination_address (str): + The URL of the destination address to move + to. This can be a full or partial URL. For + example, the following are all valid URLs to a + address: - + https://www.googleapis.com/compute/v1/projects/project + /global/addresses/address - + projects/project/global/addresses/address Note + that destination project must be different from + the source project. So /global/addresses/address + is not valid partial url. + + This field is a member of `oneof`_ ``_destination_address``. + """ + + description: str = proto.Field( + proto.STRING, + number=422937596, + optional=True, + ) + destination_address: str = proto.Field( + proto.STRING, + number=371693763, + optional=True, + ) + + +class GlobalNetworkEndpointGroupsAttachEndpointsRequest(proto.Message): + r""" + + Attributes: + network_endpoints (MutableSequence[google.cloud.compute_v1.types.NetworkEndpoint]): + The list of network endpoints to be attached. + """ + + network_endpoints: MutableSequence['NetworkEndpoint'] = proto.RepeatedField( + proto.MESSAGE, + number=149850285, + message='NetworkEndpoint', + ) + + +class GlobalNetworkEndpointGroupsDetachEndpointsRequest(proto.Message): + r""" + + Attributes: + network_endpoints (MutableSequence[google.cloud.compute_v1.types.NetworkEndpoint]): + The list of network endpoints to be detached. + """ + + network_endpoints: MutableSequence['NetworkEndpoint'] = proto.RepeatedField( + proto.MESSAGE, + number=149850285, + message='NetworkEndpoint', + ) + + +class GlobalOrganizationSetPolicyRequest(proto.Message): + r""" + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + bindings (MutableSequence[google.cloud.compute_v1.types.Binding]): + Flatten Policy to create a backward + compatible wire-format. Deprecated. Use 'policy' + to specify bindings. + etag (str): + Flatten Policy to create a backward + compatible wire-format. Deprecated. Use 'policy' + to specify the etag. + + This field is a member of `oneof`_ ``_etag``. + policy (google.cloud.compute_v1.types.Policy): + REQUIRED: The complete policy to be applied + to the 'resource'. The size of the policy is + limited to a few 10s of KB. An empty policy is + in general a valid policy but certain services + (like Projects) might reject them. + + This field is a member of `oneof`_ ``_policy``. + """ + + bindings: MutableSequence['Binding'] = proto.RepeatedField( + proto.MESSAGE, + number=403251854, + message='Binding', + ) + etag: str = proto.Field( + proto.STRING, + number=3123477, + optional=True, + ) + policy: 'Policy' = proto.Field( + proto.MESSAGE, + number=91071794, + optional=True, + message='Policy', + ) + + +class GlobalSetLabelsRequest(proto.Message): + r""" + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + label_fingerprint (str): + The fingerprint of the previous set of labels + for this resource, used to detect conflicts. The + fingerprint is initially generated by Compute + Engine and changes after every request to modify + or update labels. You must always provide an + up-to-date fingerprint hash when updating or + changing labels, otherwise the request will fail + with error 412 conditionNotMet. Make a get() + request to the resource to get the latest + fingerprint. + + This field is a member of `oneof`_ ``_label_fingerprint``. + labels (MutableMapping[str, str]): + A list of labels to apply for this resource. + Each label must comply with the requirements for + labels. For example, "webserver-frontend": + "images". A label value can also be empty (e.g. + "my-label": ""). + """ + + label_fingerprint: str = proto.Field( + proto.STRING, + number=178124825, + optional=True, + ) + labels: MutableMapping[str, str] = proto.MapField( + proto.STRING, + proto.STRING, + number=500195327, + ) + + +class GlobalSetPolicyRequest(proto.Message): + r""" + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + bindings (MutableSequence[google.cloud.compute_v1.types.Binding]): + Flatten Policy to create a backward + compatible wire-format. Deprecated. Use 'policy' + to specify bindings. + etag (str): + Flatten Policy to create a backward + compatible wire-format. Deprecated. Use 'policy' + to specify the etag. + + This field is a member of `oneof`_ ``_etag``. + policy (google.cloud.compute_v1.types.Policy): + REQUIRED: The complete policy to be applied + to the 'resource'. The size of the policy is + limited to a few 10s of KB. An empty policy is + in general a valid policy but certain services + (like Projects) might reject them. + + This field is a member of `oneof`_ ``_policy``. + """ + + bindings: MutableSequence['Binding'] = proto.RepeatedField( + proto.MESSAGE, + number=403251854, + message='Binding', + ) + etag: str = proto.Field( + proto.STRING, + number=3123477, + optional=True, + ) + policy: 'Policy' = proto.Field( + proto.MESSAGE, + number=91071794, + optional=True, + message='Policy', + ) + + +class GuestAttributes(proto.Message): + r"""A guest attributes entry. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + kind (str): + [Output Only] Type of the resource. Always + compute#guestAttributes for guest attributes entry. + + This field is a member of `oneof`_ ``_kind``. + query_path (str): + The path to be queried. This can be the + default namespace ('') or a nested namespace + ('\/') or a specified key ('\/\'). + + This field is a member of `oneof`_ ``_query_path``. + query_value (google.cloud.compute_v1.types.GuestAttributesValue): + [Output Only] The value of the requested queried path. + + This field is a member of `oneof`_ ``_query_value``. + self_link (str): + [Output Only] Server-defined URL for this resource. + + This field is a member of `oneof`_ ``_self_link``. + variable_key (str): + The key to search for. + + This field is a member of `oneof`_ ``_variable_key``. + variable_value (str): + [Output Only] The value found for the requested key. + + This field is a member of `oneof`_ ``_variable_value``. + """ + + kind: str = proto.Field( + proto.STRING, + number=3292052, + optional=True, + ) + query_path: str = proto.Field( + proto.STRING, + number=368591164, + optional=True, + ) + query_value: 'GuestAttributesValue' = proto.Field( + proto.MESSAGE, + number=157570874, + optional=True, + message='GuestAttributesValue', + ) + self_link: str = proto.Field( + proto.STRING, + number=456214797, + optional=True, + ) + variable_key: str = proto.Field( + proto.STRING, + number=164364828, + optional=True, + ) + variable_value: str = proto.Field( + proto.STRING, + number=124582382, + optional=True, + ) + + +class GuestAttributesEntry(proto.Message): + r"""A guest attributes namespace/key/value entry. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + key (str): + Key for the guest attribute entry. + + This field is a member of `oneof`_ ``_key``. + namespace (str): + Namespace for the guest attribute entry. + + This field is a member of `oneof`_ ``_namespace``. + value (str): + Value for the guest attribute entry. + + This field is a member of `oneof`_ ``_value``. + """ + + key: str = proto.Field( + proto.STRING, + number=106079, + optional=True, + ) + namespace: str = proto.Field( + proto.STRING, + number=178476379, + optional=True, + ) + value: str = proto.Field( + proto.STRING, + number=111972721, + optional=True, + ) + + +class GuestAttributesValue(proto.Message): + r"""Array of guest attribute namespace/key/value tuples. + + Attributes: + items (MutableSequence[google.cloud.compute_v1.types.GuestAttributesEntry]): + + """ + + items: MutableSequence['GuestAttributesEntry'] = proto.RepeatedField( + proto.MESSAGE, + number=100526016, + message='GuestAttributesEntry', + ) + + +class GuestOsFeature(proto.Message): + r"""Guest OS features. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + type_ (str): + The ID of a supported feature. To add multiple values, use + commas to separate values. Set to one or more of the + following values: - VIRTIO_SCSI_MULTIQUEUE - WINDOWS - + MULTI_IP_SUBNET - UEFI_COMPATIBLE - GVNIC - SEV_CAPABLE - + SUSPEND_RESUME_COMPATIBLE - SEV_LIVE_MIGRATABLE - + SEV_SNP_CAPABLE For more information, see Enabling guest + operating system features. Check the Type enum for the list + of possible values. + + This field is a member of `oneof`_ ``_type``. + """ + class Type(proto.Enum): + r"""The ID of a supported feature. To add multiple values, use commas to + separate values. Set to one or more of the following values: - + VIRTIO_SCSI_MULTIQUEUE - WINDOWS - MULTI_IP_SUBNET - UEFI_COMPATIBLE + - GVNIC - SEV_CAPABLE - SUSPEND_RESUME_COMPATIBLE - + SEV_LIVE_MIGRATABLE - SEV_SNP_CAPABLE For more information, see + Enabling guest operating system features. + + Values: + UNDEFINED_TYPE (0): + A value indicating that the enum field is not + set. + FEATURE_TYPE_UNSPECIFIED (531767259): + No description available. + GVNIC (68209305): + No description available. + MULTI_IP_SUBNET (151776719): + No description available. + SECURE_BOOT (376811194): + No description available. + SEV_CAPABLE (87083793): + No description available. + SEV_LIVE_MIGRATABLE (392039820): + No description available. + SEV_SNP_CAPABLE (426919): + No description available. + UEFI_COMPATIBLE (195865408): + No description available. + VIRTIO_SCSI_MULTIQUEUE (201597069): + No description available. + WINDOWS (456863331): + No description available. + """ + UNDEFINED_TYPE = 0 + FEATURE_TYPE_UNSPECIFIED = 531767259 + GVNIC = 68209305 + MULTI_IP_SUBNET = 151776719 + SECURE_BOOT = 376811194 + SEV_CAPABLE = 87083793 + SEV_LIVE_MIGRATABLE = 392039820 + SEV_SNP_CAPABLE = 426919 + UEFI_COMPATIBLE = 195865408 + VIRTIO_SCSI_MULTIQUEUE = 201597069 + WINDOWS = 456863331 + + type_: str = proto.Field( + proto.STRING, + number=3575610, + optional=True, + ) + + +class HTTP2HealthCheck(proto.Message): + r""" + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + host (str): + The value of the host header in the HTTP/2 + health check request. If left empty (default + value), the host header is set to the + destination IP address to which health check + packets are sent. The destination IP address + depends on the type of load balancer. For + details, see: + https://cloud.google.com/load-balancing/docs/health-check-concepts#hc-packet-dest + + This field is a member of `oneof`_ ``_host``. + port (int): + The TCP port number to which the health check + prober sends packets. The default value is 443. + Valid values are 1 through 65535. + + This field is a member of `oneof`_ ``_port``. + port_name (str): + Not supported. + + This field is a member of `oneof`_ ``_port_name``. + port_specification (str): + Specifies how a port is selected for health checking. Can be + one of the following values: USE_FIXED_PORT: Specifies a + port number explicitly using the port field in the health + check. Supported by backend services for pass-through load + balancers and backend services for proxy load balancers. Not + supported by target pools. The health check supports all + backends supported by the backend service provided the + backend can be health checked. For example, GCE_VM_IP + network endpoint groups, GCE_VM_IP_PORT network endpoint + groups, and instance group backends. USE_NAMED_PORT: Not + supported. USE_SERVING_PORT: Provides an indirect method of + specifying the health check port by referring to the backend + service. Only supported by backend services for proxy load + balancers. Not supported by target pools. Not supported by + backend services for pass-through load balancers. Supports + all backends that can be health checked; for example, + GCE_VM_IP_PORT network endpoint groups and instance group + backends. For GCE_VM_IP_PORT network endpoint group + backends, the health check uses the port number specified + for each endpoint in the network endpoint group. For + instance group backends, the health check uses the port + number determined by looking up the backend service's named + port in the instance group's list of named ports. Check the + PortSpecification enum for the list of possible values. + + This field is a member of `oneof`_ ``_port_specification``. + proxy_header (str): + Specifies the type of proxy header to append before sending + data to the backend, either NONE or PROXY_V1. The default is + NONE. Check the ProxyHeader enum for the list of possible + values. + + This field is a member of `oneof`_ ``_proxy_header``. + request_path (str): + The request path of the HTTP/2 health check + request. The default value is /. + + This field is a member of `oneof`_ ``_request_path``. + response (str): + Creates a content-based HTTP/2 health check. + In addition to the required HTTP 200 (OK) status + code, you can configure the health check to pass + only when the backend sends this specific ASCII + response string within the first 1024 bytes of + the HTTP response body. For details, see: + https://cloud.google.com/load-balancing/docs/health-check-concepts#criteria-protocol-http + + This field is a member of `oneof`_ ``_response``. + """ + class PortSpecification(proto.Enum): + r"""Specifies how a port is selected for health checking. Can be one of + the following values: USE_FIXED_PORT: Specifies a port number + explicitly using the port field in the health check. Supported by + backend services for pass-through load balancers and backend + services for proxy load balancers. Not supported by target pools. + The health check supports all backends supported by the backend + service provided the backend can be health checked. For example, + GCE_VM_IP network endpoint groups, GCE_VM_IP_PORT network endpoint + groups, and instance group backends. USE_NAMED_PORT: Not supported. + USE_SERVING_PORT: Provides an indirect method of specifying the + health check port by referring to the backend service. Only + supported by backend services for proxy load balancers. Not + supported by target pools. Not supported by backend services for + pass-through load balancers. Supports all backends that can be + health checked; for example, GCE_VM_IP_PORT network endpoint groups + and instance group backends. For GCE_VM_IP_PORT network endpoint + group backends, the health check uses the port number specified for + each endpoint in the network endpoint group. For instance group + backends, the health check uses the port number determined by + looking up the backend service's named port in the instance group's + list of named ports. + + Values: + UNDEFINED_PORT_SPECIFICATION (0): + A value indicating that the enum field is not + set. + USE_FIXED_PORT (190235748): + The port number in the health check's port is + used for health checking. Applies to network + endpoint group and instance group backends. + USE_NAMED_PORT (349300671): + Not supported. + USE_SERVING_PORT (362637516): + For network endpoint group backends, the + health check uses the port number specified on + each endpoint in the network endpoint group. For + instance group backends, the health check uses + the port number specified for the backend + service's named port defined in the instance + group's named ports. + """ + UNDEFINED_PORT_SPECIFICATION = 0 + USE_FIXED_PORT = 190235748 + USE_NAMED_PORT = 349300671 + USE_SERVING_PORT = 362637516 + + class ProxyHeader(proto.Enum): + r"""Specifies the type of proxy header to append before sending data to + the backend, either NONE or PROXY_V1. The default is NONE. + + Values: + UNDEFINED_PROXY_HEADER (0): + A value indicating that the enum field is not + set. + NONE (2402104): + No description available. + PROXY_V1 (334352940): + No description available. + """ + UNDEFINED_PROXY_HEADER = 0 + NONE = 2402104 + PROXY_V1 = 334352940 + + host: str = proto.Field( + proto.STRING, + number=3208616, + optional=True, + ) + port: int = proto.Field( + proto.INT32, + number=3446913, + optional=True, + ) + port_name: str = proto.Field( + proto.STRING, + number=41534345, + optional=True, + ) + port_specification: str = proto.Field( + proto.STRING, + number=51590597, + optional=True, + ) + proxy_header: str = proto.Field( + proto.STRING, + number=160374142, + optional=True, + ) + request_path: str = proto.Field( + proto.STRING, + number=229403605, + optional=True, + ) + response: str = proto.Field( + proto.STRING, + number=196547649, + optional=True, + ) + + +class HTTPHealthCheck(proto.Message): + r""" + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + host (str): + The value of the host header in the HTTP + health check request. If left empty (default + value), the host header is set to the + destination IP address to which health check + packets are sent. The destination IP address + depends on the type of load balancer. For + details, see: + https://cloud.google.com/load-balancing/docs/health-check-concepts#hc-packet-dest + + This field is a member of `oneof`_ ``_host``. + port (int): + The TCP port number to which the health check + prober sends packets. The default value is 80. + Valid values are 1 through 65535. + + This field is a member of `oneof`_ ``_port``. + port_name (str): + Not supported. + + This field is a member of `oneof`_ ``_port_name``. + port_specification (str): + Specifies how a port is selected for health checking. Can be + one of the following values: USE_FIXED_PORT: Specifies a + port number explicitly using the port field in the health + check. Supported by backend services for pass-through load + balancers and backend services for proxy load balancers. + Also supported in legacy HTTP health checks for target + pools. The health check supports all backends supported by + the backend service provided the backend can be health + checked. For example, GCE_VM_IP network endpoint groups, + GCE_VM_IP_PORT network endpoint groups, and instance group + backends. USE_NAMED_PORT: Not supported. USE_SERVING_PORT: + Provides an indirect method of specifying the health check + port by referring to the backend service. Only supported by + backend services for proxy load balancers. Not supported by + target pools. Not supported by backend services for + pass-through load balancers. Supports all backends that can + be health checked; for example, GCE_VM_IP_PORT network + endpoint groups and instance group backends. For + GCE_VM_IP_PORT network endpoint group backends, the health + check uses the port number specified for each endpoint in + the network endpoint group. For instance group backends, the + health check uses the port number determined by looking up + the backend service's named port in the instance group's + list of named ports. Check the PortSpecification enum for + the list of possible values. + + This field is a member of `oneof`_ ``_port_specification``. + proxy_header (str): + Specifies the type of proxy header to append before sending + data to the backend, either NONE or PROXY_V1. The default is + NONE. Check the ProxyHeader enum for the list of possible + values. + + This field is a member of `oneof`_ ``_proxy_header``. + request_path (str): + The request path of the HTTP health check + request. The default value is /. + + This field is a member of `oneof`_ ``_request_path``. + response (str): + Creates a content-based HTTP health check. In + addition to the required HTTP 200 (OK) status + code, you can configure the health check to pass + only when the backend sends this specific ASCII + response string within the first 1024 bytes of + the HTTP response body. For details, see: + https://cloud.google.com/load-balancing/docs/health-check-concepts#criteria-protocol-http + + This field is a member of `oneof`_ ``_response``. + """ + class PortSpecification(proto.Enum): + r"""Specifies how a port is selected for health checking. Can be one of + the following values: USE_FIXED_PORT: Specifies a port number + explicitly using the port field in the health check. Supported by + backend services for pass-through load balancers and backend + services for proxy load balancers. Also supported in legacy HTTP + health checks for target pools. The health check supports all + backends supported by the backend service provided the backend can + be health checked. For example, GCE_VM_IP network endpoint groups, + GCE_VM_IP_PORT network endpoint groups, and instance group backends. + USE_NAMED_PORT: Not supported. USE_SERVING_PORT: Provides an + indirect method of specifying the health check port by referring to + the backend service. Only supported by backend services for proxy + load balancers. Not supported by target pools. Not supported by + backend services for pass-through load balancers. Supports all + backends that can be health checked; for example, GCE_VM_IP_PORT + network endpoint groups and instance group backends. For + GCE_VM_IP_PORT network endpoint group backends, the health check + uses the port number specified for each endpoint in the network + endpoint group. For instance group backends, the health check uses + the port number determined by looking up the backend service's named + port in the instance group's list of named ports. + + Values: + UNDEFINED_PORT_SPECIFICATION (0): + A value indicating that the enum field is not + set. + USE_FIXED_PORT (190235748): + The port number in the health check's port is + used for health checking. Applies to network + endpoint group and instance group backends. + USE_NAMED_PORT (349300671): + Not supported. + USE_SERVING_PORT (362637516): + For network endpoint group backends, the + health check uses the port number specified on + each endpoint in the network endpoint group. For + instance group backends, the health check uses + the port number specified for the backend + service's named port defined in the instance + group's named ports. + """ + UNDEFINED_PORT_SPECIFICATION = 0 + USE_FIXED_PORT = 190235748 + USE_NAMED_PORT = 349300671 + USE_SERVING_PORT = 362637516 + + class ProxyHeader(proto.Enum): + r"""Specifies the type of proxy header to append before sending data to + the backend, either NONE or PROXY_V1. The default is NONE. + + Values: + UNDEFINED_PROXY_HEADER (0): + A value indicating that the enum field is not + set. + NONE (2402104): + No description available. + PROXY_V1 (334352940): + No description available. + """ + UNDEFINED_PROXY_HEADER = 0 + NONE = 2402104 + PROXY_V1 = 334352940 + + host: str = proto.Field( + proto.STRING, + number=3208616, + optional=True, + ) + port: int = proto.Field( + proto.INT32, + number=3446913, + optional=True, + ) + port_name: str = proto.Field( + proto.STRING, + number=41534345, + optional=True, + ) + port_specification: str = proto.Field( + proto.STRING, + number=51590597, + optional=True, + ) + proxy_header: str = proto.Field( + proto.STRING, + number=160374142, + optional=True, + ) + request_path: str = proto.Field( + proto.STRING, + number=229403605, + optional=True, + ) + response: str = proto.Field( + proto.STRING, + number=196547649, + optional=True, + ) + + +class HTTPSHealthCheck(proto.Message): + r""" + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + host (str): + The value of the host header in the HTTPS + health check request. If left empty (default + value), the host header is set to the + destination IP address to which health check + packets are sent. The destination IP address + depends on the type of load balancer. For + details, see: + https://cloud.google.com/load-balancing/docs/health-check-concepts#hc-packet-dest + + This field is a member of `oneof`_ ``_host``. + port (int): + The TCP port number to which the health check + prober sends packets. The default value is 443. + Valid values are 1 through 65535. + + This field is a member of `oneof`_ ``_port``. + port_name (str): + Not supported. + + This field is a member of `oneof`_ ``_port_name``. + port_specification (str): + Specifies how a port is selected for health checking. Can be + one of the following values: USE_FIXED_PORT: Specifies a + port number explicitly using the port field in the health + check. Supported by backend services for pass-through load + balancers and backend services for proxy load balancers. Not + supported by target pools. The health check supports all + backends supported by the backend service provided the + backend can be health checked. For example, GCE_VM_IP + network endpoint groups, GCE_VM_IP_PORT network endpoint + groups, and instance group backends. USE_NAMED_PORT: Not + supported. USE_SERVING_PORT: Provides an indirect method of + specifying the health check port by referring to the backend + service. Only supported by backend services for proxy load + balancers. Not supported by target pools. Not supported by + backend services for pass-through load balancers. Supports + all backends that can be health checked; for example, + GCE_VM_IP_PORT network endpoint groups and instance group + backends. For GCE_VM_IP_PORT network endpoint group + backends, the health check uses the port number specified + for each endpoint in the network endpoint group. For + instance group backends, the health check uses the port + number determined by looking up the backend service's named + port in the instance group's list of named ports. Check the + PortSpecification enum for the list of possible values. + + This field is a member of `oneof`_ ``_port_specification``. + proxy_header (str): + Specifies the type of proxy header to append before sending + data to the backend, either NONE or PROXY_V1. The default is + NONE. Check the ProxyHeader enum for the list of possible + values. + + This field is a member of `oneof`_ ``_proxy_header``. + request_path (str): + The request path of the HTTPS health check + request. The default value is /. + + This field is a member of `oneof`_ ``_request_path``. + response (str): + Creates a content-based HTTPS health check. + In addition to the required HTTP 200 (OK) status + code, you can configure the health check to pass + only when the backend sends this specific ASCII + response string within the first 1024 bytes of + the HTTP response body. For details, see: + https://cloud.google.com/load-balancing/docs/health-check-concepts#criteria-protocol-http + + This field is a member of `oneof`_ ``_response``. + """ + class PortSpecification(proto.Enum): + r"""Specifies how a port is selected for health checking. Can be one of + the following values: USE_FIXED_PORT: Specifies a port number + explicitly using the port field in the health check. Supported by + backend services for pass-through load balancers and backend + services for proxy load balancers. Not supported by target pools. + The health check supports all backends supported by the backend + service provided the backend can be health checked. For example, + GCE_VM_IP network endpoint groups, GCE_VM_IP_PORT network endpoint + groups, and instance group backends. USE_NAMED_PORT: Not supported. + USE_SERVING_PORT: Provides an indirect method of specifying the + health check port by referring to the backend service. Only + supported by backend services for proxy load balancers. Not + supported by target pools. Not supported by backend services for + pass-through load balancers. Supports all backends that can be + health checked; for example, GCE_VM_IP_PORT network endpoint groups + and instance group backends. For GCE_VM_IP_PORT network endpoint + group backends, the health check uses the port number specified for + each endpoint in the network endpoint group. For instance group + backends, the health check uses the port number determined by + looking up the backend service's named port in the instance group's + list of named ports. + + Values: + UNDEFINED_PORT_SPECIFICATION (0): + A value indicating that the enum field is not + set. + USE_FIXED_PORT (190235748): + The port number in the health check's port is + used for health checking. Applies to network + endpoint group and instance group backends. + USE_NAMED_PORT (349300671): + Not supported. + USE_SERVING_PORT (362637516): + For network endpoint group backends, the + health check uses the port number specified on + each endpoint in the network endpoint group. For + instance group backends, the health check uses + the port number specified for the backend + service's named port defined in the instance + group's named ports. + """ + UNDEFINED_PORT_SPECIFICATION = 0 + USE_FIXED_PORT = 190235748 + USE_NAMED_PORT = 349300671 + USE_SERVING_PORT = 362637516 + + class ProxyHeader(proto.Enum): + r"""Specifies the type of proxy header to append before sending data to + the backend, either NONE or PROXY_V1. The default is NONE. + + Values: + UNDEFINED_PROXY_HEADER (0): + A value indicating that the enum field is not + set. + NONE (2402104): + No description available. + PROXY_V1 (334352940): + No description available. + """ + UNDEFINED_PROXY_HEADER = 0 + NONE = 2402104 + PROXY_V1 = 334352940 + + host: str = proto.Field( + proto.STRING, + number=3208616, + optional=True, + ) + port: int = proto.Field( + proto.INT32, + number=3446913, + optional=True, + ) + port_name: str = proto.Field( + proto.STRING, + number=41534345, + optional=True, + ) + port_specification: str = proto.Field( + proto.STRING, + number=51590597, + optional=True, + ) + proxy_header: str = proto.Field( + proto.STRING, + number=160374142, + optional=True, + ) + request_path: str = proto.Field( + proto.STRING, + number=229403605, + optional=True, + ) + response: str = proto.Field( + proto.STRING, + number=196547649, + optional=True, + ) + + +class HealthCheck(proto.Message): + r"""Represents a Health Check resource. Google Compute Engine has two + Health Check resources: \* + `Global `__ \* + `Regional `__ + Internal HTTP(S) load balancers must use regional health checks + (``compute.v1.regionHealthChecks``). Traffic Director must use + global health checks (``compute.v1.healthChecks``). Internal TCP/UDP + load balancers can use either regional or global health checks + (``compute.v1.regionHealthChecks`` or ``compute.v1.healthChecks``). + External HTTP(S), TCP proxy, and SSL proxy load balancers as well as + managed instance group auto-healing must use global health checks + (``compute.v1.healthChecks``). Backend service-based network load + balancers must use regional health checks + (``compute.v1.regionHealthChecks``). Target pool-based network load + balancers must use legacy HTTP health checks + (``compute.v1.httpHealthChecks``). For more information, see Health + checks overview. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + check_interval_sec (int): + How often (in seconds) to send a health + check. The default value is 5 seconds. + + This field is a member of `oneof`_ ``_check_interval_sec``. + creation_timestamp (str): + [Output Only] Creation timestamp in 3339 text format. + + This field is a member of `oneof`_ ``_creation_timestamp``. + description (str): + An optional description of this resource. + Provide this property when you create the + resource. + + This field is a member of `oneof`_ ``_description``. + grpc_health_check (google.cloud.compute_v1.types.GRPCHealthCheck): + + This field is a member of `oneof`_ ``_grpc_health_check``. + healthy_threshold (int): + A so-far unhealthy instance will be marked + healthy after this many consecutive successes. + The default value is 2. + + This field is a member of `oneof`_ ``_healthy_threshold``. + http2_health_check (google.cloud.compute_v1.types.HTTP2HealthCheck): + + This field is a member of `oneof`_ ``_http2_health_check``. + http_health_check (google.cloud.compute_v1.types.HTTPHealthCheck): + + This field is a member of `oneof`_ ``_http_health_check``. + https_health_check (google.cloud.compute_v1.types.HTTPSHealthCheck): + + This field is a member of `oneof`_ ``_https_health_check``. + id (int): + [Output Only] The unique identifier for the resource. This + identifier is defined by the server. + + This field is a member of `oneof`_ ``_id``. + kind (str): + Type of the resource. + + This field is a member of `oneof`_ ``_kind``. + log_config (google.cloud.compute_v1.types.HealthCheckLogConfig): + Configure logging on this health check. + + This field is a member of `oneof`_ ``_log_config``. + name (str): + Name of the resource. Provided by the client when the + resource is created. The name must be 1-63 characters long, + and comply with RFC1035. For example, a name that is 1-63 + characters long, matches the regular expression + ``[a-z]([-a-z0-9]*[a-z0-9])?``, and otherwise complies with + RFC1035. This regular expression describes a name where the + first character is a lowercase letter, and all following + characters are a dash, lowercase letter, or digit, except + the last character, which isn't a dash. + + This field is a member of `oneof`_ ``_name``. + region (str): + [Output Only] Region where the health check resides. Not + applicable to global health checks. + + This field is a member of `oneof`_ ``_region``. + self_link (str): + [Output Only] Server-defined URL for the resource. + + This field is a member of `oneof`_ ``_self_link``. + ssl_health_check (google.cloud.compute_v1.types.SSLHealthCheck): + + This field is a member of `oneof`_ ``_ssl_health_check``. + tcp_health_check (google.cloud.compute_v1.types.TCPHealthCheck): + + This field is a member of `oneof`_ ``_tcp_health_check``. + timeout_sec (int): + How long (in seconds) to wait before claiming + failure. The default value is 5 seconds. It is + invalid for timeoutSec to have greater value + than checkIntervalSec. + + This field is a member of `oneof`_ ``_timeout_sec``. + type_ (str): + Specifies the type of the healthCheck, either + TCP, SSL, HTTP, HTTPS, HTTP2 or GRPC. Exactly + one of the protocol-specific health check fields + must be specified, which must match type field. + Check the Type enum for the list of possible + values. + + This field is a member of `oneof`_ ``_type``. + unhealthy_threshold (int): + A so-far healthy instance will be marked + unhealthy after this many consecutive failures. + The default value is 2. + + This field is a member of `oneof`_ ``_unhealthy_threshold``. + """ + class Type(proto.Enum): + r"""Specifies the type of the healthCheck, either TCP, SSL, HTTP, + HTTPS, HTTP2 or GRPC. Exactly one of the protocol-specific + health check fields must be specified, which must match type + field. + + Values: + UNDEFINED_TYPE (0): + A value indicating that the enum field is not + set. + GRPC (2196510): + No description available. + HTTP (2228360): + No description available. + HTTP2 (69079210): + No description available. + HTTPS (69079243): + No description available. + INVALID (530283991): + No description available. + SSL (82412): + No description available. + TCP (82881): + No description available. + """ + UNDEFINED_TYPE = 0 + GRPC = 2196510 + HTTP = 2228360 + HTTP2 = 69079210 + HTTPS = 69079243 + INVALID = 530283991 + SSL = 82412 + TCP = 82881 + + check_interval_sec: int = proto.Field( + proto.INT32, + number=345561006, + optional=True, + ) + creation_timestamp: str = proto.Field( + proto.STRING, + number=30525366, + optional=True, + ) + description: str = proto.Field( + proto.STRING, + number=422937596, + optional=True, + ) + grpc_health_check: 'GRPCHealthCheck' = proto.Field( + proto.MESSAGE, + number=85529574, + optional=True, + message='GRPCHealthCheck', + ) + healthy_threshold: int = proto.Field( + proto.INT32, + number=403212361, + optional=True, + ) + http2_health_check: 'HTTP2HealthCheck' = proto.Field( + proto.MESSAGE, + number=11360986, + optional=True, + message='HTTP2HealthCheck', + ) + http_health_check: 'HTTPHealthCheck' = proto.Field( + proto.MESSAGE, + number=412586940, + optional=True, + message='HTTPHealthCheck', + ) + https_health_check: 'HTTPSHealthCheck' = proto.Field( + proto.MESSAGE, + number=436046905, + optional=True, + message='HTTPSHealthCheck', + ) + id: int = proto.Field( + proto.UINT64, + number=3355, + optional=True, + ) + kind: str = proto.Field( + proto.STRING, + number=3292052, + optional=True, + ) + log_config: 'HealthCheckLogConfig' = proto.Field( + proto.MESSAGE, + number=351299741, + optional=True, + message='HealthCheckLogConfig', + ) + name: str = proto.Field( + proto.STRING, + number=3373707, + optional=True, + ) + region: str = proto.Field( + proto.STRING, + number=138946292, + optional=True, + ) + self_link: str = proto.Field( + proto.STRING, + number=456214797, + optional=True, + ) + ssl_health_check: 'SSLHealthCheck' = proto.Field( + proto.MESSAGE, + number=280032440, + optional=True, + message='SSLHealthCheck', + ) + tcp_health_check: 'TCPHealthCheck' = proto.Field( + proto.MESSAGE, + number=469980419, + optional=True, + message='TCPHealthCheck', + ) + timeout_sec: int = proto.Field( + proto.INT32, + number=79994995, + optional=True, + ) + type_: str = proto.Field( + proto.STRING, + number=3575610, + optional=True, + ) + unhealthy_threshold: int = proto.Field( + proto.INT32, + number=227958480, + optional=True, + ) + + +class HealthCheckList(proto.Message): + r"""Contains a list of HealthCheck resources. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + id (str): + [Output Only] Unique identifier for the resource; defined by + the server. + + This field is a member of `oneof`_ ``_id``. + items (MutableSequence[google.cloud.compute_v1.types.HealthCheck]): + A list of HealthCheck resources. + kind (str): + Type of resource. + + This field is a member of `oneof`_ ``_kind``. + next_page_token (str): + [Output Only] This token allows you to get the next page of + results for list requests. If the number of results is + larger than maxResults, use the nextPageToken as a value for + the query parameter pageToken in the next list request. + Subsequent list requests will have their own nextPageToken + to continue paging through the results. + + This field is a member of `oneof`_ ``_next_page_token``. + self_link (str): + [Output Only] Server-defined URL for this resource. + + This field is a member of `oneof`_ ``_self_link``. + warning (google.cloud.compute_v1.types.Warning): + [Output Only] Informational warning message. + + This field is a member of `oneof`_ ``_warning``. + """ + + @property + def raw_page(self): + return self + + id: str = proto.Field( + proto.STRING, + number=3355, + optional=True, + ) + items: MutableSequence['HealthCheck'] = proto.RepeatedField( + proto.MESSAGE, + number=100526016, + message='HealthCheck', + ) + kind: str = proto.Field( + proto.STRING, + number=3292052, + optional=True, + ) + next_page_token: str = proto.Field( + proto.STRING, + number=79797525, + optional=True, + ) + self_link: str = proto.Field( + proto.STRING, + number=456214797, + optional=True, + ) + warning: 'Warning' = proto.Field( + proto.MESSAGE, + number=50704284, + optional=True, + message='Warning', + ) + + +class HealthCheckLogConfig(proto.Message): + r"""Configuration of logging on a health check. If logging is + enabled, logs will be exported to Stackdriver. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + enable (bool): + Indicates whether or not to export logs. This + is false by default, which means no health check + logging will be done. + + This field is a member of `oneof`_ ``_enable``. + """ + + enable: bool = proto.Field( + proto.BOOL, + number=311764355, + optional=True, + ) + + +class HealthCheckReference(proto.Message): + r"""A full or valid partial URL to a health check. For example, + the following are valid URLs: - + https://www.googleapis.com/compute/beta/projects/project-id/global/httpHealthChecks/health-check + - projects/project-id/global/httpHealthChecks/health-check - + global/httpHealthChecks/health-check + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + health_check (str): + + This field is a member of `oneof`_ ``_health_check``. + """ + + health_check: str = proto.Field( + proto.STRING, + number=308876645, + optional=True, + ) + + +class HealthCheckService(proto.Message): + r"""Represents a Health-Check as a Service resource. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + creation_timestamp (str): + [Output Only] Creation timestamp in RFC3339 text format. + + This field is a member of `oneof`_ ``_creation_timestamp``. + description (str): + An optional description of this resource. + Provide this property when you create the + resource. + + This field is a member of `oneof`_ ``_description``. + fingerprint (str): + Fingerprint of this resource. A hash of the + contents stored in this object. This field is + used in optimistic locking. This field will be + ignored when inserting a HealthCheckService. An + up-to-date fingerprint must be provided in order + to patch/update the HealthCheckService; + Otherwise, the request will fail with error 412 + conditionNotMet. To see the latest fingerprint, + make a get() request to retrieve the + HealthCheckService. + + This field is a member of `oneof`_ ``_fingerprint``. + health_checks (MutableSequence[str]): + A list of URLs to the HealthCheck resources. Must have at + least one HealthCheck, and not more than 10 for regional + HealthCheckService, and not more than 1 for global + HealthCheckService. HealthCheck resources must have + portSpecification=USE_SERVING_PORT or + portSpecification=USE_FIXED_PORT. For regional + HealthCheckService, the HealthCheck must be regional and in + the same region. For global HealthCheckService, HealthCheck + must be global. Mix of regional and global HealthChecks is + not supported. Multiple regional HealthChecks must belong to + the same region. Regional HealthChecks must belong to the + same region as zones of NetworkEndpointGroups. For global + HealthCheckService using global INTERNET_IP_PORT + NetworkEndpointGroups, the global HealthChecks must specify + sourceRegions, and HealthChecks that specify sourceRegions + can only be used with global INTERNET_IP_PORT + NetworkEndpointGroups. + health_status_aggregation_policy (str): + Optional. Policy for how the results from multiple health + checks for the same endpoint are aggregated. Defaults to + NO_AGGREGATION if unspecified. - NO_AGGREGATION. An + EndpointHealth message is returned for each pair in the + health check service. - AND. If any health check of an + endpoint reports UNHEALTHY, then UNHEALTHY is the + HealthState of the endpoint. If all health checks report + HEALTHY, the HealthState of the endpoint is HEALTHY. . This + is only allowed with regional HealthCheckService. Check the + HealthStatusAggregationPolicy enum for the list of possible + values. + + This field is a member of `oneof`_ ``_health_status_aggregation_policy``. + id (int): + [Output Only] The unique identifier for the resource. This + identifier is defined by the server. + + This field is a member of `oneof`_ ``_id``. + kind (str): + [Output only] Type of the resource. Always + compute#healthCheckServicefor health check services. + + This field is a member of `oneof`_ ``_kind``. + name (str): + Name of the resource. The name must be 1-63 characters long, + and comply with RFC1035. Specifically, the name must be 1-63 + characters long and match the regular expression + ``[a-z]([-a-z0-9]*[a-z0-9])?`` which means the first + character must be a lowercase letter, and all following + characters must be a dash, lowercase letter, or digit, + except the last character, which cannot be a dash. + + This field is a member of `oneof`_ ``_name``. + network_endpoint_groups (MutableSequence[str]): + A list of URLs to the NetworkEndpointGroup resources. Must + not have more than 100. For regional HealthCheckService, + NEGs must be in zones in the region of the + HealthCheckService. For global HealthCheckServices, the + NetworkEndpointGroups must be global INTERNET_IP_PORT. + notification_endpoints (MutableSequence[str]): + A list of URLs to the NotificationEndpoint + resources. Must not have more than 10. A list of + endpoints for receiving notifications of change + in health status. For regional + HealthCheckService, NotificationEndpoint must be + regional and in the same region. For global + HealthCheckService, NotificationEndpoint must be + global. + region (str): + [Output Only] URL of the region where the health check + service resides. This field is not applicable to global + health check services. You must specify this field as part + of the HTTP request URL. It is not settable as a field in + the request body. + + This field is a member of `oneof`_ ``_region``. + self_link (str): + [Output Only] Server-defined URL for the resource. + + This field is a member of `oneof`_ ``_self_link``. + """ + class HealthStatusAggregationPolicy(proto.Enum): + r"""Optional. Policy for how the results from multiple health checks for + the same endpoint are aggregated. Defaults to NO_AGGREGATION if + unspecified. - NO_AGGREGATION. An EndpointHealth message is returned + for each pair in the health check service. - AND. If any health + check of an endpoint reports UNHEALTHY, then UNHEALTHY is the + HealthState of the endpoint. If all health checks report HEALTHY, + the HealthState of the endpoint is HEALTHY. . This is only allowed + with regional HealthCheckService. + + Values: + UNDEFINED_HEALTH_STATUS_AGGREGATION_POLICY (0): + A value indicating that the enum field is not + set. + AND (64951): + If any backend's health check reports + UNHEALTHY, then UNHEALTHY is the HealthState of + the entire health check service. If all + backend's are healthy, the HealthState of the + health check service is HEALTHY. + NO_AGGREGATION (426445124): + An EndpointHealth message is returned for + each backend in the health check service. + """ + UNDEFINED_HEALTH_STATUS_AGGREGATION_POLICY = 0 + AND = 64951 + NO_AGGREGATION = 426445124 + + creation_timestamp: str = proto.Field( + proto.STRING, + number=30525366, + optional=True, + ) + description: str = proto.Field( + proto.STRING, + number=422937596, + optional=True, + ) + fingerprint: str = proto.Field( + proto.STRING, + number=234678500, + optional=True, + ) + health_checks: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=448370606, + ) + health_status_aggregation_policy: str = proto.Field( + proto.STRING, + number=253163129, + optional=True, + ) + id: int = proto.Field( + proto.UINT64, + number=3355, + optional=True, + ) + kind: str = proto.Field( + proto.STRING, + number=3292052, + optional=True, + ) + name: str = proto.Field( + proto.STRING, + number=3373707, + optional=True, + ) + network_endpoint_groups: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=29346733, + ) + notification_endpoints: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=406728490, + ) + region: str = proto.Field( + proto.STRING, + number=138946292, + optional=True, + ) + self_link: str = proto.Field( + proto.STRING, + number=456214797, + optional=True, + ) + + +class HealthCheckServiceReference(proto.Message): + r"""A full or valid partial URL to a health check service. For + example, the following are valid URLs: - + https://www.googleapis.com/compute/beta/projects/project-id/regions/us-west1/healthCheckServices/health-check-service + - + projects/project-id/regions/us-west1/healthCheckServices/health-check-service + - regions/us-west1/healthCheckServices/health-check-service + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + health_check_service (str): + + This field is a member of `oneof`_ ``_health_check_service``. + """ + + health_check_service: str = proto.Field( + proto.STRING, + number=408374747, + optional=True, + ) + + +class HealthCheckServicesList(proto.Message): + r""" + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + id (str): + [Output Only] Unique identifier for the resource; defined by + the server. + + This field is a member of `oneof`_ ``_id``. + items (MutableSequence[google.cloud.compute_v1.types.HealthCheckService]): + A list of HealthCheckService resources. + kind (str): + [Output Only] Type of the resource. Always + compute#healthCheckServicesList for lists of + HealthCheckServices. + + This field is a member of `oneof`_ ``_kind``. + next_page_token (str): + [Output Only] This token allows you to get the next page of + results for list requests. If the number of results is + larger than maxResults, use the nextPageToken as a value for + the query parameter pageToken in the next list request. + Subsequent list requests will have their own nextPageToken + to continue paging through the results. + + This field is a member of `oneof`_ ``_next_page_token``. + self_link (str): + [Output Only] Server-defined URL for this resource. + + This field is a member of `oneof`_ ``_self_link``. + warning (google.cloud.compute_v1.types.Warning): + [Output Only] Informational warning message. + + This field is a member of `oneof`_ ``_warning``. + """ + + @property + def raw_page(self): + return self + + id: str = proto.Field( + proto.STRING, + number=3355, + optional=True, + ) + items: MutableSequence['HealthCheckService'] = proto.RepeatedField( + proto.MESSAGE, + number=100526016, + message='HealthCheckService', + ) + kind: str = proto.Field( + proto.STRING, + number=3292052, + optional=True, + ) + next_page_token: str = proto.Field( + proto.STRING, + number=79797525, + optional=True, + ) + self_link: str = proto.Field( + proto.STRING, + number=456214797, + optional=True, + ) + warning: 'Warning' = proto.Field( + proto.MESSAGE, + number=50704284, + optional=True, + message='Warning', + ) + + +class HealthChecksAggregatedList(proto.Message): + r""" + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + id (str): + [Output Only] Unique identifier for the resource; defined by + the server. + + This field is a member of `oneof`_ ``_id``. + items (MutableMapping[str, google.cloud.compute_v1.types.HealthChecksScopedList]): + A list of HealthChecksScopedList resources. + kind (str): + Type of resource. + + This field is a member of `oneof`_ ``_kind``. + next_page_token (str): + [Output Only] This token allows you to get the next page of + results for list requests. If the number of results is + larger than maxResults, use the nextPageToken as a value for + the query parameter pageToken in the next list request. + Subsequent list requests will have their own nextPageToken + to continue paging through the results. + + This field is a member of `oneof`_ ``_next_page_token``. + self_link (str): + [Output Only] Server-defined URL for this resource. + + This field is a member of `oneof`_ ``_self_link``. + unreachables (MutableSequence[str]): + [Output Only] Unreachable resources. + warning (google.cloud.compute_v1.types.Warning): + [Output Only] Informational warning message. + + This field is a member of `oneof`_ ``_warning``. + """ + + @property + def raw_page(self): + return self + + id: str = proto.Field( + proto.STRING, + number=3355, + optional=True, + ) + items: MutableMapping[str, 'HealthChecksScopedList'] = proto.MapField( + proto.STRING, + proto.MESSAGE, + number=100526016, + message='HealthChecksScopedList', + ) + kind: str = proto.Field( + proto.STRING, + number=3292052, + optional=True, + ) + next_page_token: str = proto.Field( + proto.STRING, + number=79797525, + optional=True, + ) + self_link: str = proto.Field( + proto.STRING, + number=456214797, + optional=True, + ) + unreachables: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=243372063, + ) + warning: 'Warning' = proto.Field( + proto.MESSAGE, + number=50704284, + optional=True, + message='Warning', + ) + + +class HealthChecksScopedList(proto.Message): + r""" + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + health_checks (MutableSequence[google.cloud.compute_v1.types.HealthCheck]): + A list of HealthChecks contained in this + scope. + warning (google.cloud.compute_v1.types.Warning): + Informational warning which replaces the list + of backend services when the list is empty. + + This field is a member of `oneof`_ ``_warning``. + """ + + health_checks: MutableSequence['HealthCheck'] = proto.RepeatedField( + proto.MESSAGE, + number=448370606, + message='HealthCheck', + ) + warning: 'Warning' = proto.Field( + proto.MESSAGE, + number=50704284, + optional=True, + message='Warning', + ) + + +class HealthStatus(proto.Message): + r""" + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + annotations (MutableMapping[str, str]): + Metadata defined as annotations for network + endpoint. + forwarding_rule (str): + URL of the forwarding rule associated with + the health status of the instance. + + This field is a member of `oneof`_ ``_forwarding_rule``. + forwarding_rule_ip (str): + A forwarding rule IP address assigned to this + instance. + + This field is a member of `oneof`_ ``_forwarding_rule_ip``. + health_state (str): + Health state of the IPv4 address of the + instance. Check the HealthState enum for the + list of possible values. + + This field is a member of `oneof`_ ``_health_state``. + instance (str): + URL of the instance resource. + + This field is a member of `oneof`_ ``_instance``. + ip_address (str): + For target pool based Network Load Balancing, + it indicates the forwarding rule's IP address + assigned to this instance. For other types of + load balancing, the field indicates VM internal + ip. + + This field is a member of `oneof`_ ``_ip_address``. + port (int): + The named port of the instance group, not + necessarily the port that is health-checked. + + This field is a member of `oneof`_ ``_port``. + weight (str): + + This field is a member of `oneof`_ ``_weight``. + weight_error (str): + Check the WeightError enum for the list of + possible values. + + This field is a member of `oneof`_ ``_weight_error``. + """ + class HealthState(proto.Enum): + r"""Health state of the IPv4 address of the instance. + + Values: + UNDEFINED_HEALTH_STATE (0): + A value indicating that the enum field is not + set. + HEALTHY (439801213): + No description available. + UNHEALTHY (462118084): + No description available. + """ + UNDEFINED_HEALTH_STATE = 0 + HEALTHY = 439801213 + UNHEALTHY = 462118084 + + class WeightError(proto.Enum): + r""" + + Values: + UNDEFINED_WEIGHT_ERROR (0): + A value indicating that the enum field is not + set. + INVALID_WEIGHT (383698400): + The response to a Health Check probe had the + HTTP response header field + X-Load-Balancing-Endpoint-Weight, but its + content was invalid (i.e., not a non-negative + single-precision floating-point number in + decimal string representation). + MISSING_WEIGHT (384027537): + The response to a Health Check probe did not + have the HTTP response header field + X-Load-Balancing-Endpoint-Weight. + UNAVAILABLE_WEIGHT (439464295): + This is the value when the accompanied health + status is either TIMEOUT (i.e.,the Health Check + probe was not able to get a response in time) or + UNKNOWN. For the latter, it should be typically + because there has not been sufficient time to + parse and report the weight for a new backend + (which is with 0.0.0.0 ip address). However, it + can be also due to an outage case for which the + health status is explicitly reset to UNKNOWN. + WEIGHT_NONE (502428831): + This is the default value when WeightReportMode is DISABLE, + and is also the initial value when WeightReportMode has just + updated to ENABLE or DRY_RUN and there has not been + sufficient time to parse and report the backend weight. + """ + UNDEFINED_WEIGHT_ERROR = 0 + INVALID_WEIGHT = 383698400 + MISSING_WEIGHT = 384027537 + UNAVAILABLE_WEIGHT = 439464295 + WEIGHT_NONE = 502428831 + + annotations: MutableMapping[str, str] = proto.MapField( + proto.STRING, + proto.STRING, + number=112032548, + ) + forwarding_rule: str = proto.Field( + proto.STRING, + number=269964030, + optional=True, + ) + forwarding_rule_ip: str = proto.Field( + proto.STRING, + number=172250632, + optional=True, + ) + health_state: str = proto.Field( + proto.STRING, + number=324007150, + optional=True, + ) + instance: str = proto.Field( + proto.STRING, + number=18257045, + optional=True, + ) + ip_address: str = proto.Field( + proto.STRING, + number=406272220, + optional=True, + ) + port: int = proto.Field( + proto.INT32, + number=3446913, + optional=True, + ) + weight: str = proto.Field( + proto.STRING, + number=282149496, + optional=True, + ) + weight_error: str = proto.Field( + proto.STRING, + number=522501505, + optional=True, + ) + + +class HealthStatusForNetworkEndpoint(proto.Message): + r""" + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + backend_service (google.cloud.compute_v1.types.BackendServiceReference): + URL of the backend service associated with + the health state of the network endpoint. + + This field is a member of `oneof`_ ``_backend_service``. + forwarding_rule (google.cloud.compute_v1.types.ForwardingRuleReference): + URL of the forwarding rule associated with + the health state of the network endpoint. + + This field is a member of `oneof`_ ``_forwarding_rule``. + health_check (google.cloud.compute_v1.types.HealthCheckReference): + URL of the health check associated with the + health state of the network endpoint. + + This field is a member of `oneof`_ ``_health_check``. + health_check_service (google.cloud.compute_v1.types.HealthCheckServiceReference): + URL of the health check service associated + with the health state of the network endpoint. + + This field is a member of `oneof`_ ``_health_check_service``. + health_state (str): + Health state of the network endpoint + determined based on the health checks + configured. Check the HealthState enum for the + list of possible values. + + This field is a member of `oneof`_ ``_health_state``. + """ + class HealthState(proto.Enum): + r"""Health state of the network endpoint determined based on the + health checks configured. + + Values: + UNDEFINED_HEALTH_STATE (0): + A value indicating that the enum field is not + set. + DRAINING (480455402): + Endpoint is being drained. + HEALTHY (439801213): + Endpoint is healthy. + UNHEALTHY (462118084): + Endpoint is unhealthy. + UNKNOWN (433141802): + Health status of the endpoint is unknown. + """ + UNDEFINED_HEALTH_STATE = 0 + DRAINING = 480455402 + HEALTHY = 439801213 + UNHEALTHY = 462118084 + UNKNOWN = 433141802 + + backend_service: 'BackendServiceReference' = proto.Field( + proto.MESSAGE, + number=306946058, + optional=True, + message='BackendServiceReference', + ) + forwarding_rule: 'ForwardingRuleReference' = proto.Field( + proto.MESSAGE, + number=269964030, + optional=True, + message='ForwardingRuleReference', + ) + health_check: 'HealthCheckReference' = proto.Field( + proto.MESSAGE, + number=308876645, + optional=True, + message='HealthCheckReference', + ) + health_check_service: 'HealthCheckServiceReference' = proto.Field( + proto.MESSAGE, + number=408374747, + optional=True, + message='HealthCheckServiceReference', + ) + health_state: str = proto.Field( + proto.STRING, + number=324007150, + optional=True, + ) + + +class Help(proto.Message): + r"""Provides links to documentation or for performing an out of + band action. For example, if a quota check failed with an error + indicating the calling project hasn't enabled the accessed + service, this can contain a URL pointing directly to the right + place in the developer console to flip the bit. + + Attributes: + links (MutableSequence[google.cloud.compute_v1.types.HelpLink]): + URL(s) pointing to additional information on + handling the current error. + """ + + links: MutableSequence['HelpLink'] = proto.RepeatedField( + proto.MESSAGE, + number=102977465, + message='HelpLink', + ) + + +class HelpLink(proto.Message): + r"""Describes a URL link. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + description (str): + Describes what the link offers. + + This field is a member of `oneof`_ ``_description``. + url (str): + The URL of the link. + + This field is a member of `oneof`_ ``_url``. + """ + + description: str = proto.Field( + proto.STRING, + number=422937596, + optional=True, + ) + url: str = proto.Field( + proto.STRING, + number=116079, + optional=True, + ) + + +class HostRule(proto.Message): + r"""UrlMaps A host-matching rule for a URL. If matched, will use + the named PathMatcher to select the BackendService. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + description (str): + An optional description of this resource. + Provide this property when you create the + resource. + + This field is a member of `oneof`_ ``_description``. + hosts (MutableSequence[str]): + The list of host patterns to match. They must be valid + hostnames with optional port numbers in the format + host:port. \* matches any string of ([a-z0-9-.]*). In that + case, \* must be the first character, and if followed by + anything, the immediate following character must be either - + or .. \* based matching is not supported when the URL map is + bound to a target gRPC proxy that has the + validateForProxyless field set to true. + path_matcher (str): + The name of the PathMatcher to use to match + the path portion of the URL if the hostRule + matches the URL's host portion. + + This field is a member of `oneof`_ ``_path_matcher``. + """ + + description: str = proto.Field( + proto.STRING, + number=422937596, + optional=True, + ) + hosts: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=99467211, + ) + path_matcher: str = proto.Field( + proto.STRING, + number=337813272, + optional=True, + ) + + +class HttpFaultAbort(proto.Message): + r"""Specification for how requests are aborted as part of fault + injection. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + http_status (int): + The HTTP status code used to abort the + request. The value must be from 200 to 599 + inclusive. For gRPC protocol, the gRPC status + code is mapped to HTTP status code according to + this mapping table. HTTP status 200 is mapped to + gRPC status UNKNOWN. Injecting an OK status is + currently not supported by Traffic Director. + + This field is a member of `oneof`_ ``_http_status``. + percentage (float): + The percentage of traffic for connections, + operations, or requests that is aborted as part + of fault injection. The value must be from 0.0 + to 100.0 inclusive. + + This field is a member of `oneof`_ ``_percentage``. + """ + + http_status: int = proto.Field( + proto.UINT32, + number=468949897, + optional=True, + ) + percentage: float = proto.Field( + proto.DOUBLE, + number=151909018, + optional=True, + ) + + +class HttpFaultDelay(proto.Message): + r"""Specifies the delay introduced by the load balancer before + forwarding the request to the backend service as part of fault + injection. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + fixed_delay (google.cloud.compute_v1.types.Duration): + Specifies the value of the fixed delay + interval. + + This field is a member of `oneof`_ ``_fixed_delay``. + percentage (float): + The percentage of traffic for connections, + operations, or requests for which a delay is + introduced as part of fault injection. The value + must be from 0.0 to 100.0 inclusive. + + This field is a member of `oneof`_ ``_percentage``. + """ + + fixed_delay: 'Duration' = proto.Field( + proto.MESSAGE, + number=317037816, + optional=True, + message='Duration', + ) + percentage: float = proto.Field( + proto.DOUBLE, + number=151909018, + optional=True, + ) + + +class HttpFaultInjection(proto.Message): + r"""The specification for fault injection introduced into traffic + to test the resiliency of clients to backend service failure. As + part of fault injection, when clients send requests to a backend + service, delays can be introduced by the load balancer on a + percentage of requests before sending those request to the + backend service. Similarly requests from clients can be aborted + by the load balancer for a percentage of requests. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + abort (google.cloud.compute_v1.types.HttpFaultAbort): + The specification for how client requests are + aborted as part of fault injection. + + This field is a member of `oneof`_ ``_abort``. + delay (google.cloud.compute_v1.types.HttpFaultDelay): + The specification for how client requests are + delayed as part of fault injection, before being + sent to a backend service. + + This field is a member of `oneof`_ ``_delay``. + """ + + abort: 'HttpFaultAbort' = proto.Field( + proto.MESSAGE, + number=92611376, + optional=True, + message='HttpFaultAbort', + ) + delay: 'HttpFaultDelay' = proto.Field( + proto.MESSAGE, + number=95467907, + optional=True, + message='HttpFaultDelay', + ) + + +class HttpHeaderAction(proto.Message): + r"""The request and response header transformations that take + effect before the request is passed along to the selected + backendService. + + Attributes: + request_headers_to_add (MutableSequence[google.cloud.compute_v1.types.HttpHeaderOption]): + Headers to add to a matching request before + forwarding the request to the backendService. + request_headers_to_remove (MutableSequence[str]): + A list of header names for headers that need + to be removed from the request before forwarding + the request to the backendService. + response_headers_to_add (MutableSequence[google.cloud.compute_v1.types.HttpHeaderOption]): + Headers to add the response before sending + the response back to the client. + response_headers_to_remove (MutableSequence[str]): + A list of header names for headers that need + to be removed from the response before sending + the response back to the client. + """ + + request_headers_to_add: MutableSequence['HttpHeaderOption'] = proto.RepeatedField( + proto.MESSAGE, + number=72111974, + message='HttpHeaderOption', + ) + request_headers_to_remove: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=218425247, + ) + response_headers_to_add: MutableSequence['HttpHeaderOption'] = proto.RepeatedField( + proto.MESSAGE, + number=32136052, + message='HttpHeaderOption', + ) + response_headers_to_remove: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=75415761, + ) + + +class HttpHeaderMatch(proto.Message): + r"""matchRule criteria for request header matches. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + exact_match (str): + The value should exactly match contents of + exactMatch. Only one of exactMatch, prefixMatch, + suffixMatch, regexMatch, presentMatch or + rangeMatch must be set. + + This field is a member of `oneof`_ ``_exact_match``. + header_name (str): + The name of the HTTP header to match. For matching against + the HTTP request's authority, use a headerMatch with the + header name ":authority". For matching a request's method, + use the headerName ":method". When the URL map is bound to a + target gRPC proxy that has the validateForProxyless field + set to true, only non-binary user-specified custom metadata + and the ``content-type`` header are supported. The following + transport-level headers cannot be used in header matching + rules: ``:authority``, ``:method``, ``:path``, ``:scheme``, + ``user-agent``, ``accept-encoding``, ``content-encoding``, + ``grpc-accept-encoding``, ``grpc-encoding``, + ``grpc-previous-rpc-attempts``, ``grpc-tags-bin``, + ``grpc-timeout`` and ``grpc-trace-bin``. + + This field is a member of `oneof`_ ``_header_name``. + invert_match (bool): + If set to false, the headerMatch is + considered a match if the preceding match + criteria are met. If set to true, the + headerMatch is considered a match if the + preceding match criteria are NOT met. The + default setting is false. + + This field is a member of `oneof`_ ``_invert_match``. + prefix_match (str): + The value of the header must start with the + contents of prefixMatch. Only one of exactMatch, + prefixMatch, suffixMatch, regexMatch, + presentMatch or rangeMatch must be set. + + This field is a member of `oneof`_ ``_prefix_match``. + present_match (bool): + A header with the contents of headerName must + exist. The match takes place whether or not the + request's header has a value. Only one of + exactMatch, prefixMatch, suffixMatch, + regexMatch, presentMatch or rangeMatch must be + set. + + This field is a member of `oneof`_ ``_present_match``. + range_match (google.cloud.compute_v1.types.Int64RangeMatch): + The header value must be an integer and its value must be in + the range specified in rangeMatch. If the header does not + contain an integer, number or is empty, the match fails. For + example for a range [-5, 0] - -3 will match. - 0 will not + match. - 0.25 will not match. - -3someString will not match. + Only one of exactMatch, prefixMatch, suffixMatch, + regexMatch, presentMatch or rangeMatch must be set. + rangeMatch is not supported for load balancers that have + loadBalancingScheme set to EXTERNAL. + + This field is a member of `oneof`_ ``_range_match``. + regex_match (str): + The value of the header must match the regular expression + specified in regexMatch. For more information about regular + expression syntax, see Syntax. For matching against a port + specified in the HTTP request, use a headerMatch with + headerName set to PORT and a regular expression that + satisfies the RFC2616 Host header's port specifier. Only one + of exactMatch, prefixMatch, suffixMatch, regexMatch, + presentMatch or rangeMatch must be set. Regular expressions + can only be used when the loadBalancingScheme is set to + INTERNAL_SELF_MANAGED. + + This field is a member of `oneof`_ ``_regex_match``. + suffix_match (str): + The value of the header must end with the + contents of suffixMatch. Only one of exactMatch, + prefixMatch, suffixMatch, regexMatch, + presentMatch or rangeMatch must be set. + + This field is a member of `oneof`_ ``_suffix_match``. + """ + + exact_match: str = proto.Field( + proto.STRING, + number=457641093, + optional=True, + ) + header_name: str = proto.Field( + proto.STRING, + number=110223613, + optional=True, + ) + invert_match: bool = proto.Field( + proto.BOOL, + number=501130268, + optional=True, + ) + prefix_match: str = proto.Field( + proto.STRING, + number=257898968, + optional=True, + ) + present_match: bool = proto.Field( + proto.BOOL, + number=67435841, + optional=True, + ) + range_match: 'Int64RangeMatch' = proto.Field( + proto.MESSAGE, + number=97244227, + optional=True, + message='Int64RangeMatch', + ) + regex_match: str = proto.Field( + proto.STRING, + number=107387853, + optional=True, + ) + suffix_match: str = proto.Field( + proto.STRING, + number=426488663, + optional=True, + ) + + +class HttpHeaderOption(proto.Message): + r"""Specification determining how headers are added to requests + or responses. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + header_name (str): + The name of the header. + + This field is a member of `oneof`_ ``_header_name``. + header_value (str): + The value of the header to add. + + This field is a member of `oneof`_ ``_header_value``. + replace (bool): + If false, headerValue is appended to any + values that already exist for the header. If + true, headerValue is set for the header, + discarding any values that were set for that + header. The default value is false. + + This field is a member of `oneof`_ ``_replace``. + """ + + header_name: str = proto.Field( + proto.STRING, + number=110223613, + optional=True, + ) + header_value: str = proto.Field( + proto.STRING, + number=203094335, + optional=True, + ) + replace: bool = proto.Field( + proto.BOOL, + number=20755124, + optional=True, + ) + + +class HttpQueryParameterMatch(proto.Message): + r"""HttpRouteRuleMatch criteria for a request's query parameter. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + exact_match (str): + The queryParameterMatch matches if the value + of the parameter exactly matches the contents of + exactMatch. Only one of presentMatch, + exactMatch, or regexMatch must be set. + + This field is a member of `oneof`_ ``_exact_match``. + name (str): + The name of the query parameter to match. The + query parameter must exist in the request, in + the absence of which the request match fails. + + This field is a member of `oneof`_ ``_name``. + present_match (bool): + Specifies that the queryParameterMatch + matches if the request contains the query + parameter, irrespective of whether the parameter + has a value or not. Only one of presentMatch, + exactMatch, or regexMatch must be set. + + This field is a member of `oneof`_ ``_present_match``. + regex_match (str): + The queryParameterMatch matches if the value of the + parameter matches the regular expression specified by + regexMatch. For more information about regular expression + syntax, see Syntax. Only one of presentMatch, exactMatch, or + regexMatch must be set. Regular expressions can only be used + when the loadBalancingScheme is set to + INTERNAL_SELF_MANAGED. + + This field is a member of `oneof`_ ``_regex_match``. + """ + + exact_match: str = proto.Field( + proto.STRING, + number=457641093, + optional=True, + ) + name: str = proto.Field( + proto.STRING, + number=3373707, + optional=True, + ) + present_match: bool = proto.Field( + proto.BOOL, + number=67435841, + optional=True, + ) + regex_match: str = proto.Field( + proto.STRING, + number=107387853, + optional=True, + ) + + +class HttpRedirectAction(proto.Message): + r"""Specifies settings for an HTTP redirect. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + host_redirect (str): + The host that is used in the redirect + response instead of the one that was supplied in + the request. The value must be from 1 to 255 + characters. + + This field is a member of `oneof`_ ``_host_redirect``. + https_redirect (bool): + If set to true, the URL scheme in the + redirected request is set to HTTPS. If set to + false, the URL scheme of the redirected request + remains the same as that of the request. This + must only be set for URL maps used in + TargetHttpProxys. Setting this true for + TargetHttpsProxy is not permitted. The default + is set to false. + + This field is a member of `oneof`_ ``_https_redirect``. + path_redirect (str): + The path that is used in the redirect + response instead of the one that was supplied in + the request. pathRedirect cannot be supplied + together with prefixRedirect. Supply one alone + or neither. If neither is supplied, the path of + the original request is used for the redirect. + The value must be from 1 to 1024 characters. + + This field is a member of `oneof`_ ``_path_redirect``. + prefix_redirect (str): + The prefix that replaces the prefixMatch + specified in the HttpRouteRuleMatch, retaining + the remaining portion of the URL before + redirecting the request. prefixRedirect cannot + be supplied together with pathRedirect. Supply + one alone or neither. If neither is supplied, + the path of the original request is used for the + redirect. The value must be from 1 to 1024 + characters. + + This field is a member of `oneof`_ ``_prefix_redirect``. + redirect_response_code (str): + The HTTP Status code to use for this RedirectAction. + Supported values are: - MOVED_PERMANENTLY_DEFAULT, which is + the default value and corresponds to 301. - FOUND, which + corresponds to 302. - SEE_OTHER which corresponds to 303. - + TEMPORARY_REDIRECT, which corresponds to 307. In this case, + the request method is retained. - PERMANENT_REDIRECT, which + corresponds to 308. In this case, the request method is + retained. Check the RedirectResponseCode enum for the list + of possible values. + + This field is a member of `oneof`_ ``_redirect_response_code``. + strip_query (bool): + If set to true, any accompanying query + portion of the original URL is removed before + redirecting the request. If set to false, the + query portion of the original URL is retained. + The default is set to false. + + This field is a member of `oneof`_ ``_strip_query``. + """ + class RedirectResponseCode(proto.Enum): + r"""The HTTP Status code to use for this RedirectAction. Supported + values are: - MOVED_PERMANENTLY_DEFAULT, which is the default value + and corresponds to 301. - FOUND, which corresponds to 302. - + SEE_OTHER which corresponds to 303. - TEMPORARY_REDIRECT, which + corresponds to 307. In this case, the request method is retained. - + PERMANENT_REDIRECT, which corresponds to 308. In this case, the + request method is retained. + + Values: + UNDEFINED_REDIRECT_RESPONSE_CODE (0): + A value indicating that the enum field is not + set. + FOUND (67084130): + Http Status Code 302 - Found. + MOVED_PERMANENTLY_DEFAULT (386698449): + Http Status Code 301 - Moved Permanently. + PERMANENT_REDIRECT (382006381): + Http Status Code 308 - Permanent Redirect + maintaining HTTP method. + SEE_OTHER (445380580): + Http Status Code 303 - See Other. + TEMPORARY_REDIRECT (489550378): + Http Status Code 307 - Temporary Redirect + maintaining HTTP method. + """ + UNDEFINED_REDIRECT_RESPONSE_CODE = 0 + FOUND = 67084130 + MOVED_PERMANENTLY_DEFAULT = 386698449 + PERMANENT_REDIRECT = 382006381 + SEE_OTHER = 445380580 + TEMPORARY_REDIRECT = 489550378 + + host_redirect: str = proto.Field( + proto.STRING, + number=107417747, + optional=True, + ) + https_redirect: bool = proto.Field( + proto.BOOL, + number=170260656, + optional=True, + ) + path_redirect: str = proto.Field( + proto.STRING, + number=272342710, + optional=True, + ) + prefix_redirect: str = proto.Field( + proto.STRING, + number=446184169, + optional=True, + ) + redirect_response_code: str = proto.Field( + proto.STRING, + number=436710408, + optional=True, + ) + strip_query: bool = proto.Field( + proto.BOOL, + number=52284641, + optional=True, + ) + + +class HttpRetryPolicy(proto.Message): + r"""The retry policy associates with HttpRouteRule + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + num_retries (int): + Specifies the allowed number retries. This + number must be > 0. If not specified, defaults + to 1. + + This field is a member of `oneof`_ ``_num_retries``. + per_try_timeout (google.cloud.compute_v1.types.Duration): + Specifies a non-zero timeout per retry + attempt. If not specified, will use the timeout + set in the HttpRouteAction field. If timeout in + the HttpRouteAction field is not set, this field + uses the largest timeout among all backend + services associated with the route. Not + supported when the URL map is bound to a target + gRPC proxy that has the validateForProxyless + field set to true. + + This field is a member of `oneof`_ ``_per_try_timeout``. + retry_conditions (MutableSequence[str]): + Specifies one or more conditions when this retry policy + applies. Valid values are: - 5xx: retry is attempted if the + instance or endpoint responds with any 5xx response code, or + if the instance or endpoint does not respond at all. For + example, disconnects, reset, read timeout, connection + failure, and refused streams. - gateway-error: Similar to + 5xx, but only applies to response codes 502, 503 or 504. - + connect-failure: a retry is attempted on failures connecting + to the instance or endpoint. For example, connection + timeouts. - retriable-4xx: a retry is attempted if the + instance or endpoint responds with a 4xx response code. The + only error that you can retry is error code 409. - + refused-stream: a retry is attempted if the instance or + endpoint resets the stream with a REFUSED_STREAM error code. + This reset type indicates that it is safe to retry. - + cancelled: a retry is attempted if the gRPC status code in + the response header is set to cancelled. - + deadline-exceeded: a retry is attempted if the gRPC status + code in the response header is set to deadline-exceeded. - + internal: a retry is attempted if the gRPC status code in + the response header is set to internal. - + resource-exhausted: a retry is attempted if the gRPC status + code in the response header is set to resource-exhausted. - + unavailable: a retry is attempted if the gRPC status code in + the response header is set to unavailable. Only the + following codes are supported when the URL map is bound to + target gRPC proxy that has validateForProxyless field set to + true. - cancelled - deadline-exceeded - internal - + resource-exhausted - unavailable + """ + + num_retries: int = proto.Field( + proto.UINT32, + number=251680141, + optional=True, + ) + per_try_timeout: 'Duration' = proto.Field( + proto.MESSAGE, + number=280041147, + optional=True, + message='Duration', + ) + retry_conditions: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=28815535, + ) + + +class HttpRouteAction(proto.Message): + r""" + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + cors_policy (google.cloud.compute_v1.types.CorsPolicy): + The specification for allowing client-side + cross-origin requests. For more information + about the W3C recommendation for cross-origin + resource sharing (CORS), see Fetch API Living + Standard. Not supported when the URL map is + bound to a target gRPC proxy. + + This field is a member of `oneof`_ ``_cors_policy``. + fault_injection_policy (google.cloud.compute_v1.types.HttpFaultInjection): + The specification for fault injection introduced into + traffic to test the resiliency of clients to backend service + failure. As part of fault injection, when clients send + requests to a backend service, delays can be introduced by a + load balancer on a percentage of requests before sending + those requests to the backend service. Similarly requests + from clients can be aborted by the load balancer for a + percentage of requests. timeout and retry_policy is ignored + by clients that are configured with a fault_injection_policy + if: 1. The traffic is generated by fault injection AND 2. + The fault injection is not a delay fault injection. Fault + injection is not supported with the global external HTTP(S) + load balancer (classic). To see which load balancers support + fault injection, see Load balancing: Routing and traffic + management features. + + This field is a member of `oneof`_ ``_fault_injection_policy``. + max_stream_duration (google.cloud.compute_v1.types.Duration): + Specifies the maximum duration (timeout) for streams on the + selected route. Unlike the timeout field where the timeout + duration starts from the time the request has been fully + processed (known as *end-of-stream*), the duration in this + field is computed from the beginning of the stream until the + response has been processed, including all retries. A stream + that does not complete in this duration is closed. If not + specified, this field uses the maximum maxStreamDuration + value among all backend services associated with the route. + This field is only allowed if the Url map is used with + backend services with loadBalancingScheme set to + INTERNAL_SELF_MANAGED. + + This field is a member of `oneof`_ ``_max_stream_duration``. + request_mirror_policy (google.cloud.compute_v1.types.RequestMirrorPolicy): + Specifies the policy on how requests intended + for the route's backends are shadowed to a + separate mirrored backend service. The load + balancer does not wait for responses from the + shadow service. Before sending traffic to the + shadow service, the host / authority header is + suffixed with -shadow. Not supported when the + URL map is bound to a target gRPC proxy that has + the validateForProxyless field set to true. + + This field is a member of `oneof`_ ``_request_mirror_policy``. + retry_policy (google.cloud.compute_v1.types.HttpRetryPolicy): + Specifies the retry policy associated with + this route. + + This field is a member of `oneof`_ ``_retry_policy``. + timeout (google.cloud.compute_v1.types.Duration): + Specifies the timeout for the selected route. Timeout is + computed from the time the request has been fully processed + (known as *end-of-stream*) up until the response has been + processed. Timeout includes all retries. If not specified, + this field uses the largest timeout among all backend + services associated with the route. Not supported when the + URL map is bound to a target gRPC proxy that has + validateForProxyless field set to true. + + This field is a member of `oneof`_ ``_timeout``. + url_rewrite (google.cloud.compute_v1.types.UrlRewrite): + The spec to modify the URL of the request, + before forwarding the request to the matched + service. urlRewrite is the only action supported + in UrlMaps for external HTTP(S) load balancers. + Not supported when the URL map is bound to a + target gRPC proxy that has the + validateForProxyless field set to true. + + This field is a member of `oneof`_ ``_url_rewrite``. + weighted_backend_services (MutableSequence[google.cloud.compute_v1.types.WeightedBackendService]): + A list of weighted backend services to send + traffic to when a route match occurs. The + weights determine the fraction of traffic that + flows to their corresponding backend service. If + all traffic needs to go to a single backend + service, there must be one + weightedBackendService with weight set to a + non-zero number. After a backend service is + identified and before forwarding the request to + the backend service, advanced routing actions + such as URL rewrites and header transformations + are applied depending on additional settings + specified in this HttpRouteAction. + """ + + cors_policy: 'CorsPolicy' = proto.Field( + proto.MESSAGE, + number=398943748, + optional=True, + message='CorsPolicy', + ) + fault_injection_policy: 'HttpFaultInjection' = proto.Field( + proto.MESSAGE, + number=412781079, + optional=True, + message='HttpFaultInjection', + ) + max_stream_duration: 'Duration' = proto.Field( + proto.MESSAGE, + number=61428376, + optional=True, + message='Duration', + ) + request_mirror_policy: 'RequestMirrorPolicy' = proto.Field( + proto.MESSAGE, + number=220196866, + optional=True, + message='RequestMirrorPolicy', + ) + retry_policy: 'HttpRetryPolicy' = proto.Field( + proto.MESSAGE, + number=56799913, + optional=True, + message='HttpRetryPolicy', + ) + timeout: 'Duration' = proto.Field( + proto.MESSAGE, + number=296701281, + optional=True, + message='Duration', + ) + url_rewrite: 'UrlRewrite' = proto.Field( + proto.MESSAGE, + number=273333948, + optional=True, + message='UrlRewrite', + ) + weighted_backend_services: MutableSequence['WeightedBackendService'] = proto.RepeatedField( + proto.MESSAGE, + number=337028049, + message='WeightedBackendService', + ) + + +class HttpRouteRule(proto.Message): + r"""The HttpRouteRule setting specifies how to match an HTTP + request and the corresponding routing action that load balancing + proxies perform. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + description (str): + The short description conveying the intent of + this routeRule. The description can have a + maximum length of 1024 characters. + + This field is a member of `oneof`_ ``_description``. + header_action (google.cloud.compute_v1.types.HttpHeaderAction): + Specifies changes to request and response headers that need + to take effect for the selected backendService. The + headerAction value specified here is applied before the + matching pathMatchers[].headerAction and after + pathMatchers[].routeRules[].routeAction.weightedBackendService.backendServiceWeightAction[].headerAction + HeaderAction is not supported for load balancers that have + their loadBalancingScheme set to EXTERNAL. Not supported + when the URL map is bound to a target gRPC proxy that has + validateForProxyless field set to true. + + This field is a member of `oneof`_ ``_header_action``. + match_rules (MutableSequence[google.cloud.compute_v1.types.HttpRouteRuleMatch]): + The list of criteria for matching attributes + of a request to this routeRule. This list has OR + semantics: the request matches this routeRule + when any of the matchRules are satisfied. + However predicates within a given matchRule have + AND semantics. All predicates within a matchRule + must match for the request to match the rule. + priority (int): + For routeRules within a given pathMatcher, + priority determines the order in which a load + balancer interprets routeRules. RouteRules are + evaluated in order of priority, from the lowest + to highest number. The priority of a rule + decreases as its number increases (1, 2, 3, + N+1). The first rule that matches the request is + applied. You cannot configure two or more + routeRules with the same priority. Priority for + each rule must be set to a number from 0 to + 2147483647 inclusive. Priority numbers can have + gaps, which enable you to add or remove rules in + the future without affecting the rest of the + rules. For example, 1, 2, 3, 4, 5, 9, 12, 16 is + a valid series of priority numbers to which you + could add rules numbered from 6 to 8, 10 to 11, + and 13 to 15 in the future without any impact on + existing rules. + + This field is a member of `oneof`_ ``_priority``. + route_action (google.cloud.compute_v1.types.HttpRouteAction): + In response to a matching matchRule, the load + balancer performs advanced routing actions, such + as URL rewrites and header transformations, + before forwarding the request to the selected + backend. If routeAction specifies any + weightedBackendServices, service must not be + set. Conversely if service is set, routeAction + cannot contain any weightedBackendServices. Only + one of urlRedirect, service or + routeAction.weightedBackendService must be set. + URL maps for Classic external HTTP(S) load + balancers only support the urlRewrite action + within a route rule's routeAction. + + This field is a member of `oneof`_ ``_route_action``. + service (str): + The full or partial URL of the backend + service resource to which traffic is directed if + this rule is matched. If routeAction is also + specified, advanced routing actions, such as URL + rewrites, take effect before sending the request + to the backend. However, if service is + specified, routeAction cannot contain any + weightedBackendServices. Conversely, if + routeAction specifies any + weightedBackendServices, service must not be + specified. Only one of urlRedirect, service or + routeAction.weightedBackendService must be set. + + This field is a member of `oneof`_ ``_service``. + url_redirect (google.cloud.compute_v1.types.HttpRedirectAction): + When this rule is matched, the request is + redirected to a URL specified by urlRedirect. If + urlRedirect is specified, service or routeAction + must not be set. Not supported when the URL map + is bound to a target gRPC proxy. + + This field is a member of `oneof`_ ``_url_redirect``. + """ + + description: str = proto.Field( + proto.STRING, + number=422937596, + optional=True, + ) + header_action: 'HttpHeaderAction' = proto.Field( + proto.MESSAGE, + number=328077352, + optional=True, + message='HttpHeaderAction', + ) + match_rules: MutableSequence['HttpRouteRuleMatch'] = proto.RepeatedField( + proto.MESSAGE, + number=376200701, + message='HttpRouteRuleMatch', + ) + priority: int = proto.Field( + proto.INT32, + number=445151652, + optional=True, + ) + route_action: 'HttpRouteAction' = proto.Field( + proto.MESSAGE, + number=424563948, + optional=True, + message='HttpRouteAction', + ) + service: str = proto.Field( + proto.STRING, + number=373540533, + optional=True, + ) + url_redirect: 'HttpRedirectAction' = proto.Field( + proto.MESSAGE, + number=405147820, + optional=True, + message='HttpRedirectAction', + ) + + +class HttpRouteRuleMatch(proto.Message): + r"""HttpRouteRuleMatch specifies a set of criteria for matching + requests to an HttpRouteRule. All specified criteria must be + satisfied for a match to occur. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + full_path_match (str): + For satisfying the matchRule condition, the + path of the request must exactly match the value + specified in fullPathMatch after removing any + query parameters and anchor that may be part of + the original URL. fullPathMatch must be from 1 + to 1024 characters. Only one of prefixMatch, + fullPathMatch or regexMatch must be specified. + + This field is a member of `oneof`_ ``_full_path_match``. + header_matches (MutableSequence[google.cloud.compute_v1.types.HttpHeaderMatch]): + Specifies a list of header match criteria, + all of which must match corresponding headers in + the request. + ignore_case (bool): + Specifies that prefixMatch and fullPathMatch + matches are case sensitive. The default value is + false. ignoreCase must not be used with + regexMatch. Not supported when the URL map is + bound to a target gRPC proxy. + + This field is a member of `oneof`_ ``_ignore_case``. + metadata_filters (MutableSequence[google.cloud.compute_v1.types.MetadataFilter]): + Opaque filter criteria used by the load balancer to restrict + routing configuration to a limited set of xDS compliant + clients. In their xDS requests to the load balancer, xDS + clients present node metadata. When there is a match, the + relevant routing configuration is made available to those + proxies. For each metadataFilter in this list, if its + filterMatchCriteria is set to MATCH_ANY, at least one of the + filterLabels must match the corresponding label provided in + the metadata. If its filterMatchCriteria is set to + MATCH_ALL, then all of its filterLabels must match with + corresponding labels provided in the metadata. If multiple + metadata filters are specified, all of them need to be + satisfied in order to be considered a match. metadataFilters + specified here is applied after those specified in + ForwardingRule that refers to the UrlMap this + HttpRouteRuleMatch belongs to. metadataFilters only applies + to load balancers that have loadBalancingScheme set to + INTERNAL_SELF_MANAGED. Not supported when the URL map is + bound to a target gRPC proxy that has validateForProxyless + field set to true. + path_template_match (str): + If specified, the route is a pattern match expression that + must match the :path header once the query string is + removed. A pattern match allows you to match - The value + must be between 1 and 1024 characters - The pattern must + start with a leading slash ("/") - There may be no more than + 5 operators in pattern Precisely one of prefix_match, + full_path_match, regex_match or path_template_match must be + set. + + This field is a member of `oneof`_ ``_path_template_match``. + prefix_match (str): + For satisfying the matchRule condition, the + request's path must begin with the specified + prefixMatch. prefixMatch must begin with a /. + The value must be from 1 to 1024 characters. + Only one of prefixMatch, fullPathMatch or + regexMatch must be specified. + + This field is a member of `oneof`_ ``_prefix_match``. + query_parameter_matches (MutableSequence[google.cloud.compute_v1.types.HttpQueryParameterMatch]): + Specifies a list of query parameter match + criteria, all of which must match corresponding + query parameters in the request. Not supported + when the URL map is bound to a target gRPC + proxy. + regex_match (str): + For satisfying the matchRule condition, the path of the + request must satisfy the regular expression specified in + regexMatch after removing any query parameters and anchor + supplied with the original URL. For more information about + regular expression syntax, see Syntax. Only one of + prefixMatch, fullPathMatch or regexMatch must be specified. + Regular expressions can only be used when the + loadBalancingScheme is set to INTERNAL_SELF_MANAGED. + + This field is a member of `oneof`_ ``_regex_match``. + """ + + full_path_match: str = proto.Field( + proto.STRING, + number=214598875, + optional=True, + ) + header_matches: MutableSequence['HttpHeaderMatch'] = proto.RepeatedField( + proto.MESSAGE, + number=361903489, + message='HttpHeaderMatch', + ) + ignore_case: bool = proto.Field( + proto.BOOL, + number=464324989, + optional=True, + ) + metadata_filters: MutableSequence['MetadataFilter'] = proto.RepeatedField( + proto.MESSAGE, + number=464725739, + message='MetadataFilter', + ) + path_template_match: str = proto.Field( + proto.STRING, + number=292348186, + optional=True, + ) + prefix_match: str = proto.Field( + proto.STRING, + number=257898968, + optional=True, + ) + query_parameter_matches: MutableSequence['HttpQueryParameterMatch'] = proto.RepeatedField( + proto.MESSAGE, + number=286231270, + message='HttpQueryParameterMatch', + ) + regex_match: str = proto.Field( + proto.STRING, + number=107387853, + optional=True, + ) + + +class Image(proto.Message): + r"""Represents an Image resource. You can use images to create + boot disks for your VM instances. For more information, read + Images. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + architecture (str): + The architecture of the image. Valid values are ARM64 or + X86_64. Check the Architecture enum for the list of possible + values. + + This field is a member of `oneof`_ ``_architecture``. + archive_size_bytes (int): + Size of the image tar.gz archive stored in + Google Cloud Storage (in bytes). + + This field is a member of `oneof`_ ``_archive_size_bytes``. + creation_timestamp (str): + [Output Only] Creation timestamp in RFC3339 text format. + + This field is a member of `oneof`_ ``_creation_timestamp``. + deprecated (google.cloud.compute_v1.types.DeprecationStatus): + The deprecation status associated with this + image. + + This field is a member of `oneof`_ ``_deprecated``. + description (str): + An optional description of this resource. + Provide this property when you create the + resource. + + This field is a member of `oneof`_ ``_description``. + disk_size_gb (int): + Size of the image when restored onto a + persistent disk (in GB). + + This field is a member of `oneof`_ ``_disk_size_gb``. + family (str): + The name of the image family to which this + image belongs. The image family name can be from + a publicly managed image family provided by + Compute Engine, or from a custom image family + you create. For example, centos-stream-9 is a + publicly available image family. For more + information, see Image family best practices. + When creating disks, you can specify an image + family instead of a specific image name. The + image family always returns its latest image + that is not deprecated. The name of the image + family must comply with RFC1035. + + This field is a member of `oneof`_ ``_family``. + guest_os_features (MutableSequence[google.cloud.compute_v1.types.GuestOsFeature]): + A list of features to enable on the guest operating system. + Applicable only for bootable images. To see a list of + available options, see the guestOSfeatures[].type parameter. + id (int): + [Output Only] The unique identifier for the resource. This + identifier is defined by the server. + + This field is a member of `oneof`_ ``_id``. + image_encryption_key (google.cloud.compute_v1.types.CustomerEncryptionKey): + Encrypts the image using a customer-supplied + encryption key. After you encrypt an image with + a customer-supplied key, you must provide the + same key if you use the image later (e.g. to + create a disk from the image). Customer-supplied + encryption keys do not protect access to + metadata of the disk. If you do not provide an + encryption key when creating the image, then the + disk will be encrypted using an automatically + generated key and you do not need to provide a + key to use the image later. + + This field is a member of `oneof`_ ``_image_encryption_key``. + kind (str): + [Output Only] Type of the resource. Always compute#image for + images. + + This field is a member of `oneof`_ ``_kind``. + label_fingerprint (str): + A fingerprint for the labels being applied to + this image, which is essentially a hash of the + labels used for optimistic locking. The + fingerprint is initially generated by Compute + Engine and changes after every request to modify + or update labels. You must always provide an + up-to-date fingerprint hash in order to update + or change labels, otherwise the request will + fail with error 412 conditionNotMet. To see the + latest fingerprint, make a get() request to + retrieve an image. + + This field is a member of `oneof`_ ``_label_fingerprint``. + labels (MutableMapping[str, str]): + Labels to apply to this image. These can be + later modified by the setLabels method. + license_codes (MutableSequence[int]): + Integer license codes indicating which + licenses are attached to this image. + licenses (MutableSequence[str]): + Any applicable license URI. + name (str): + Name of the resource; provided by the client when the + resource is created. The name must be 1-63 characters long, + and comply with RFC1035. Specifically, the name must be 1-63 + characters long and match the regular expression + ``[a-z]([-a-z0-9]*[a-z0-9])?`` which means the first + character must be a lowercase letter, and all following + characters must be a dash, lowercase letter, or digit, + except the last character, which cannot be a dash. + + This field is a member of `oneof`_ ``_name``. + raw_disk (google.cloud.compute_v1.types.RawDisk): + The parameters of the raw disk image. + + This field is a member of `oneof`_ ``_raw_disk``. + satisfies_pzs (bool): + [Output Only] Reserved for future use. + + This field is a member of `oneof`_ ``_satisfies_pzs``. + self_link (str): + [Output Only] Server-defined URL for the resource. + + This field is a member of `oneof`_ ``_self_link``. + shielded_instance_initial_state (google.cloud.compute_v1.types.InitialStateConfig): + Set the secure boot keys of shielded + instance. + + This field is a member of `oneof`_ ``_shielded_instance_initial_state``. + source_disk (str): + URL of the source disk used to create this + image. For example, the following are valid + values: - + https://www.googleapis.com/compute/v1/projects/project/zones/zone + /disks/disk - + projects/project/zones/zone/disks/disk - + zones/zone/disks/disk In order to create an + image, you must provide the full or partial URL + of one of the following: - The rawDisk.source + URL - The sourceDisk URL - The sourceImage URL - + The sourceSnapshot URL + + This field is a member of `oneof`_ ``_source_disk``. + source_disk_encryption_key (google.cloud.compute_v1.types.CustomerEncryptionKey): + The customer-supplied encryption key of the + source disk. Required if the source disk is + protected by a customer-supplied encryption key. + + This field is a member of `oneof`_ ``_source_disk_encryption_key``. + source_disk_id (str): + [Output Only] The ID value of the disk used to create this + image. This value may be used to determine whether the image + was taken from the current or a previous instance of a given + disk name. + + This field is a member of `oneof`_ ``_source_disk_id``. + source_image (str): + URL of the source image used to create this image. The + following are valid formats for the URL: - + https://www.googleapis.com/compute/v1/projects/project_id/global/ + images/image_name - + projects/project_id/global/images/image_name In order to + create an image, you must provide the full or partial URL of + one of the following: - The rawDisk.source URL - The + sourceDisk URL - The sourceImage URL - The sourceSnapshot + URL + + This field is a member of `oneof`_ ``_source_image``. + source_image_encryption_key (google.cloud.compute_v1.types.CustomerEncryptionKey): + The customer-supplied encryption key of the + source image. Required if the source image is + protected by a customer-supplied encryption key. + + This field is a member of `oneof`_ ``_source_image_encryption_key``. + source_image_id (str): + [Output Only] The ID value of the image used to create this + image. This value may be used to determine whether the image + was taken from the current or a previous instance of a given + image name. + + This field is a member of `oneof`_ ``_source_image_id``. + source_snapshot (str): + URL of the source snapshot used to create this image. The + following are valid formats for the URL: - + https://www.googleapis.com/compute/v1/projects/project_id/global/ + snapshots/snapshot_name - + projects/project_id/global/snapshots/snapshot_name In order + to create an image, you must provide the full or partial URL + of one of the following: - The rawDisk.source URL - The + sourceDisk URL - The sourceImage URL - The sourceSnapshot + URL + + This field is a member of `oneof`_ ``_source_snapshot``. + source_snapshot_encryption_key (google.cloud.compute_v1.types.CustomerEncryptionKey): + The customer-supplied encryption key of the + source snapshot. Required if the source snapshot + is protected by a customer-supplied encryption + key. + + This field is a member of `oneof`_ ``_source_snapshot_encryption_key``. + source_snapshot_id (str): + [Output Only] The ID value of the snapshot used to create + this image. This value may be used to determine whether the + snapshot was taken from the current or a previous instance + of a given snapshot name. + + This field is a member of `oneof`_ ``_source_snapshot_id``. + source_type (str): + The type of the image used to create this + disk. The default and only valid value is RAW. + Check the SourceType enum for the list of + possible values. + + This field is a member of `oneof`_ ``_source_type``. + status (str): + [Output Only] The status of the image. An image can be used + to create other resources, such as instances, only after the + image has been successfully created and the status is set to + READY. Possible values are FAILED, PENDING, or READY. Check + the Status enum for the list of possible values. + + This field is a member of `oneof`_ ``_status``. + storage_locations (MutableSequence[str]): + Cloud Storage bucket storage location of the + image (regional or multi-regional). + """ + class Architecture(proto.Enum): + r"""The architecture of the image. Valid values are ARM64 or X86_64. + + Values: + UNDEFINED_ARCHITECTURE (0): + A value indicating that the enum field is not + set. + ARCHITECTURE_UNSPECIFIED (394750507): + Default value indicating Architecture is not + set. + ARM64 (62547450): + Machines with architecture ARM64 + X86_64 (425300551): + Machines with architecture X86_64 + """ + UNDEFINED_ARCHITECTURE = 0 + ARCHITECTURE_UNSPECIFIED = 394750507 + ARM64 = 62547450 + X86_64 = 425300551 + + class SourceType(proto.Enum): + r"""The type of the image used to create this disk. The default + and only valid value is RAW. + + Values: + UNDEFINED_SOURCE_TYPE (0): + A value indicating that the enum field is not + set. + RAW (80904): + No description available. + """ + UNDEFINED_SOURCE_TYPE = 0 + RAW = 80904 + + class Status(proto.Enum): + r"""[Output Only] The status of the image. An image can be used to + create other resources, such as instances, only after the image has + been successfully created and the status is set to READY. Possible + values are FAILED, PENDING, or READY. + + Values: + UNDEFINED_STATUS (0): + A value indicating that the enum field is not + set. + DELETING (528602024): + Image is deleting. + FAILED (455706685): + Image creation failed due to an error. + PENDING (35394935): + Image hasn't been created as yet. + READY (77848963): + Image has been successfully created. + """ + UNDEFINED_STATUS = 0 + DELETING = 528602024 + FAILED = 455706685 + PENDING = 35394935 + READY = 77848963 + + architecture: str = proto.Field( + proto.STRING, + number=302803283, + optional=True, + ) + archive_size_bytes: int = proto.Field( + proto.INT64, + number=381093450, + optional=True, + ) + creation_timestamp: str = proto.Field( + proto.STRING, + number=30525366, + optional=True, + ) + deprecated: 'DeprecationStatus' = proto.Field( + proto.MESSAGE, + number=515138995, + optional=True, + message='DeprecationStatus', + ) + description: str = proto.Field( + proto.STRING, + number=422937596, + optional=True, + ) + disk_size_gb: int = proto.Field( + proto.INT64, + number=316263735, + optional=True, + ) + family: str = proto.Field( + proto.STRING, + number=328751972, + optional=True, + ) + guest_os_features: MutableSequence['GuestOsFeature'] = proto.RepeatedField( + proto.MESSAGE, + number=79294545, + message='GuestOsFeature', + ) + id: int = proto.Field( + proto.UINT64, + number=3355, + optional=True, + ) + image_encryption_key: 'CustomerEncryptionKey' = proto.Field( + proto.MESSAGE, + number=379512583, + optional=True, + message='CustomerEncryptionKey', + ) + kind: str = proto.Field( + proto.STRING, + number=3292052, + optional=True, + ) + label_fingerprint: str = proto.Field( + proto.STRING, + number=178124825, + optional=True, + ) + labels: MutableMapping[str, str] = proto.MapField( + proto.STRING, + proto.STRING, + number=500195327, + ) + license_codes: MutableSequence[int] = proto.RepeatedField( + proto.INT64, + number=45482664, + ) + licenses: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=337642578, + ) + name: str = proto.Field( + proto.STRING, + number=3373707, + optional=True, + ) + raw_disk: 'RawDisk' = proto.Field( + proto.MESSAGE, + number=503113556, + optional=True, + message='RawDisk', + ) + satisfies_pzs: bool = proto.Field( + proto.BOOL, + number=480964267, + optional=True, + ) + self_link: str = proto.Field( + proto.STRING, + number=456214797, + optional=True, + ) + shielded_instance_initial_state: 'InitialStateConfig' = proto.Field( + proto.MESSAGE, + number=192356867, + optional=True, + message='InitialStateConfig', + ) + source_disk: str = proto.Field( + proto.STRING, + number=451753793, + optional=True, + ) + source_disk_encryption_key: 'CustomerEncryptionKey' = proto.Field( + proto.MESSAGE, + number=531501153, + optional=True, + message='CustomerEncryptionKey', + ) + source_disk_id: str = proto.Field( + proto.STRING, + number=454190809, + optional=True, + ) + source_image: str = proto.Field( + proto.STRING, + number=50443319, + optional=True, + ) + source_image_encryption_key: 'CustomerEncryptionKey' = proto.Field( + proto.MESSAGE, + number=381503659, + optional=True, + message='CustomerEncryptionKey', + ) + source_image_id: str = proto.Field( + proto.STRING, + number=55328291, + optional=True, + ) + source_snapshot: str = proto.Field( + proto.STRING, + number=126061928, + optional=True, + ) + source_snapshot_encryption_key: 'CustomerEncryptionKey' = proto.Field( + proto.MESSAGE, + number=303679322, + optional=True, + message='CustomerEncryptionKey', + ) + source_snapshot_id: str = proto.Field( + proto.STRING, + number=98962258, + optional=True, + ) + source_type: str = proto.Field( + proto.STRING, + number=452245726, + optional=True, + ) + status: str = proto.Field( + proto.STRING, + number=181260274, + optional=True, + ) + storage_locations: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=328005274, + ) + + +class ImageFamilyView(proto.Message): + r""" + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + image (google.cloud.compute_v1.types.Image): + The latest image that is part of the + specified image family in the requested + location, and that is not deprecated. + + This field is a member of `oneof`_ ``_image``. + """ + + image: 'Image' = proto.Field( + proto.MESSAGE, + number=100313435, + optional=True, + message='Image', + ) + + +class ImageList(proto.Message): + r"""Contains a list of images. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + id (str): + [Output Only] Unique identifier for the resource; defined by + the server. + + This field is a member of `oneof`_ ``_id``. + items (MutableSequence[google.cloud.compute_v1.types.Image]): + A list of Image resources. + kind (str): + Type of resource. + + This field is a member of `oneof`_ ``_kind``. + next_page_token (str): + [Output Only] This token allows you to get the next page of + results for list requests. If the number of results is + larger than maxResults, use the nextPageToken as a value for + the query parameter pageToken in the next list request. + Subsequent list requests will have their own nextPageToken + to continue paging through the results. + + This field is a member of `oneof`_ ``_next_page_token``. + self_link (str): + [Output Only] Server-defined URL for this resource. + + This field is a member of `oneof`_ ``_self_link``. + warning (google.cloud.compute_v1.types.Warning): + [Output Only] Informational warning message. + + This field is a member of `oneof`_ ``_warning``. + """ + + @property + def raw_page(self): + return self + + id: str = proto.Field( + proto.STRING, + number=3355, + optional=True, + ) + items: MutableSequence['Image'] = proto.RepeatedField( + proto.MESSAGE, + number=100526016, + message='Image', + ) + kind: str = proto.Field( + proto.STRING, + number=3292052, + optional=True, + ) + next_page_token: str = proto.Field( + proto.STRING, + number=79797525, + optional=True, + ) + self_link: str = proto.Field( + proto.STRING, + number=456214797, + optional=True, + ) + warning: 'Warning' = proto.Field( + proto.MESSAGE, + number=50704284, + optional=True, + message='Warning', + ) + + +class InitialStateConfig(proto.Message): + r"""Initial State for shielded instance, these are public keys + which are safe to store in public + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + dbs (MutableSequence[google.cloud.compute_v1.types.FileContentBuffer]): + The Key Database (db). + dbxs (MutableSequence[google.cloud.compute_v1.types.FileContentBuffer]): + The forbidden key database (dbx). + keks (MutableSequence[google.cloud.compute_v1.types.FileContentBuffer]): + The Key Exchange Key (KEK). + pk (google.cloud.compute_v1.types.FileContentBuffer): + The Platform Key (PK). + + This field is a member of `oneof`_ ``_pk``. + """ + + dbs: MutableSequence['FileContentBuffer'] = proto.RepeatedField( + proto.MESSAGE, + number=99253, + message='FileContentBuffer', + ) + dbxs: MutableSequence['FileContentBuffer'] = proto.RepeatedField( + proto.MESSAGE, + number=3077113, + message='FileContentBuffer', + ) + keks: MutableSequence['FileContentBuffer'] = proto.RepeatedField( + proto.MESSAGE, + number=3288130, + message='FileContentBuffer', + ) + pk: 'FileContentBuffer' = proto.Field( + proto.MESSAGE, + number=3579, + optional=True, + message='FileContentBuffer', + ) + + +class InsertAddressRequest(proto.Message): + r"""A request message for Addresses.Insert. See the method + description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + address_resource (google.cloud.compute_v1.types.Address): + The body resource for this request + project (str): + Project ID for this request. + region (str): + Name of the region for this request. + request_id (str): + An optional request ID to identify requests. + Specify a unique request ID so that if you must + retry your request, the server will know to + ignore the request if it has already been + completed. For example, consider a situation + where you make an initial request and the + request times out. If you make the request again + with the same request ID, the server can check + if original operation with the same request ID + was received, and if so, will ignore the second + request. This prevents clients from accidentally + creating duplicate commitments. The request ID + must be a valid UUID with the exception that + zero UUID is not supported ( + 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. + """ + + address_resource: 'Address' = proto.Field( + proto.MESSAGE, + number=483888121, + message='Address', + ) + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + region: str = proto.Field( + proto.STRING, + number=138946292, + ) + request_id: str = proto.Field( + proto.STRING, + number=37109963, + optional=True, + ) + + +class InsertAutoscalerRequest(proto.Message): + r"""A request message for Autoscalers.Insert. See the method + description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + autoscaler_resource (google.cloud.compute_v1.types.Autoscaler): + The body resource for this request + project (str): + Project ID for this request. + request_id (str): + An optional request ID to identify requests. + Specify a unique request ID so that if you must + retry your request, the server will know to + ignore the request if it has already been + completed. For example, consider a situation + where you make an initial request and the + request times out. If you make the request again + with the same request ID, the server can check + if original operation with the same request ID + was received, and if so, will ignore the second + request. This prevents clients from accidentally + creating duplicate commitments. The request ID + must be a valid UUID with the exception that + zero UUID is not supported ( + 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. + zone (str): + Name of the zone for this request. + """ + + autoscaler_resource: 'Autoscaler' = proto.Field( + proto.MESSAGE, + number=207616118, + message='Autoscaler', + ) + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + request_id: str = proto.Field( + proto.STRING, + number=37109963, + optional=True, + ) + zone: str = proto.Field( + proto.STRING, + number=3744684, + ) + + +class InsertBackendBucketRequest(proto.Message): + r"""A request message for BackendBuckets.Insert. See the method + description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + backend_bucket_resource (google.cloud.compute_v1.types.BackendBucket): + The body resource for this request + project (str): + Project ID for this request. + request_id (str): + An optional request ID to identify requests. + Specify a unique request ID so that if you must + retry your request, the server will know to + ignore the request if it has already been + completed. For example, consider a situation + where you make an initial request and the + request times out. If you make the request again + with the same request ID, the server can check + if original operation with the same request ID + was received, and if so, will ignore the second + request. This prevents clients from accidentally + creating duplicate commitments. The request ID + must be a valid UUID with the exception that + zero UUID is not supported ( + 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. + """ + + backend_bucket_resource: 'BackendBucket' = proto.Field( + proto.MESSAGE, + number=380757784, + message='BackendBucket', + ) + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + request_id: str = proto.Field( + proto.STRING, + number=37109963, + optional=True, + ) + + +class InsertBackendServiceRequest(proto.Message): + r"""A request message for BackendServices.Insert. See the method + description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + backend_service_resource (google.cloud.compute_v1.types.BackendService): + The body resource for this request + project (str): + Project ID for this request. + request_id (str): + An optional request ID to identify requests. + Specify a unique request ID so that if you must + retry your request, the server will know to + ignore the request if it has already been + completed. For example, consider a situation + where you make an initial request and the + request times out. If you make the request again + with the same request ID, the server can check + if original operation with the same request ID + was received, and if so, will ignore the second + request. This prevents clients from accidentally + creating duplicate commitments. The request ID + must be a valid UUID with the exception that + zero UUID is not supported ( + 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. + """ + + backend_service_resource: 'BackendService' = proto.Field( + proto.MESSAGE, + number=347586723, + message='BackendService', + ) + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + request_id: str = proto.Field( + proto.STRING, + number=37109963, + optional=True, + ) + + +class InsertDiskRequest(proto.Message): + r"""A request message for Disks.Insert. See the method + description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + disk_resource (google.cloud.compute_v1.types.Disk): + The body resource for this request + project (str): + Project ID for this request. + request_id (str): + An optional request ID to identify requests. + Specify a unique request ID so that if you must + retry your request, the server will know to + ignore the request if it has already been + completed. For example, consider a situation + where you make an initial request and the + request times out. If you make the request again + with the same request ID, the server can check + if original operation with the same request ID + was received, and if so, will ignore the second + request. This prevents clients from accidentally + creating duplicate commitments. The request ID + must be a valid UUID with the exception that + zero UUID is not supported ( + 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. + source_image (str): + Source image to restore onto a disk. This + field is optional. + + This field is a member of `oneof`_ ``_source_image``. + zone (str): + The name of the zone for this request. + """ + + disk_resource: 'Disk' = proto.Field( + proto.MESSAGE, + number=25880688, + message='Disk', + ) + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + request_id: str = proto.Field( + proto.STRING, + number=37109963, + optional=True, + ) + source_image: str = proto.Field( + proto.STRING, + number=50443319, + optional=True, + ) + zone: str = proto.Field( + proto.STRING, + number=3744684, + ) + + +class InsertExternalVpnGatewayRequest(proto.Message): + r"""A request message for ExternalVpnGateways.Insert. See the + method description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + external_vpn_gateway_resource (google.cloud.compute_v1.types.ExternalVpnGateway): + The body resource for this request + project (str): + Project ID for this request. + request_id (str): + An optional request ID to identify requests. + Specify a unique request ID so that if you must + retry your request, the server will know to + ignore the request if it has already been + completed. For example, consider a situation + where you make an initial request and the + request times out. If you make the request again + with the same request ID, the server can check + if original operation with the same request ID + was received, and if so, will ignore the second + request. This prevents clients from accidentally + creating duplicate commitments. The request ID + must be a valid UUID with the exception that + zero UUID is not supported ( + 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. + """ + + external_vpn_gateway_resource: 'ExternalVpnGateway' = proto.Field( + proto.MESSAGE, + number=486813576, + message='ExternalVpnGateway', + ) + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + request_id: str = proto.Field( + proto.STRING, + number=37109963, + optional=True, + ) + + +class InsertFirewallPolicyRequest(proto.Message): + r"""A request message for FirewallPolicies.Insert. See the method + description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + firewall_policy_resource (google.cloud.compute_v1.types.FirewallPolicy): + The body resource for this request + parent_id (str): + Parent ID for this request. The ID can be either be + "folders/[FOLDER_ID]" if the parent is a folder or + "organizations/[ORGANIZATION_ID]" if the parent is an + organization. + request_id (str): + An optional request ID to identify requests. + Specify a unique request ID so that if you must + retry your request, the server will know to + ignore the request if it has already been + completed. For example, consider a situation + where you make an initial request and the + request times out. If you make the request again + with the same request ID, the server can check + if original operation with the same request ID + was received, and if so, will ignore the second + request. This prevents clients from accidentally + creating duplicate commitments. The request ID + must be a valid UUID with the exception that + zero UUID is not supported ( + 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. + """ + + firewall_policy_resource: 'FirewallPolicy' = proto.Field( + proto.MESSAGE, + number=495049532, + message='FirewallPolicy', + ) + parent_id: str = proto.Field( + proto.STRING, + number=459714768, + ) + request_id: str = proto.Field( + proto.STRING, + number=37109963, + optional=True, + ) + + +class InsertFirewallRequest(proto.Message): + r"""A request message for Firewalls.Insert. See the method + description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + firewall_resource (google.cloud.compute_v1.types.Firewall): + The body resource for this request + project (str): + Project ID for this request. + request_id (str): + An optional request ID to identify requests. + Specify a unique request ID so that if you must + retry your request, the server will know to + ignore the request if it has already been + completed. For example, consider a situation + where you make an initial request and the + request times out. If you make the request again + with the same request ID, the server can check + if original operation with the same request ID + was received, and if so, will ignore the second + request. This prevents clients from accidentally + creating duplicate commitments. The request ID + must be a valid UUID with the exception that + zero UUID is not supported ( + 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. + """ + + firewall_resource: 'Firewall' = proto.Field( + proto.MESSAGE, + number=41425005, + message='Firewall', + ) + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + request_id: str = proto.Field( + proto.STRING, + number=37109963, + optional=True, + ) + + +class InsertForwardingRuleRequest(proto.Message): + r"""A request message for ForwardingRules.Insert. See the method + description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + forwarding_rule_resource (google.cloud.compute_v1.types.ForwardingRule): + The body resource for this request + project (str): + Project ID for this request. + region (str): + Name of the region scoping this request. + request_id (str): + An optional request ID to identify requests. + Specify a unique request ID so that if you must + retry your request, the server will know to + ignore the request if it has already been + completed. For example, consider a situation + where you make an initial request and the + request times out. If you make the request again + with the same request ID, the server can check + if original operation with the same request ID + was received, and if so, will ignore the second + request. This prevents clients from accidentally + creating duplicate commitments. The request ID + must be a valid UUID with the exception that + zero UUID is not supported ( + 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. + """ + + forwarding_rule_resource: 'ForwardingRule' = proto.Field( + proto.MESSAGE, + number=301211695, + message='ForwardingRule', + ) + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + region: str = proto.Field( + proto.STRING, + number=138946292, + ) + request_id: str = proto.Field( + proto.STRING, + number=37109963, + optional=True, + ) + + +class InsertGlobalAddressRequest(proto.Message): + r"""A request message for GlobalAddresses.Insert. See the method + description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + address_resource (google.cloud.compute_v1.types.Address): + The body resource for this request + project (str): + Project ID for this request. + request_id (str): + An optional request ID to identify requests. + Specify a unique request ID so that if you must + retry your request, the server will know to + ignore the request if it has already been + completed. For example, consider a situation + where you make an initial request and the + request times out. If you make the request again + with the same request ID, the server can check + if original operation with the same request ID + was received, and if so, will ignore the second + request. This prevents clients from accidentally + creating duplicate commitments. The request ID + must be a valid UUID with the exception that + zero UUID is not supported ( + 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. + """ + + address_resource: 'Address' = proto.Field( + proto.MESSAGE, + number=483888121, + message='Address', + ) + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + request_id: str = proto.Field( + proto.STRING, + number=37109963, + optional=True, + ) + + +class InsertGlobalForwardingRuleRequest(proto.Message): + r"""A request message for GlobalForwardingRules.Insert. See the + method description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + forwarding_rule_resource (google.cloud.compute_v1.types.ForwardingRule): + The body resource for this request + project (str): + Project ID for this request. + request_id (str): + An optional request ID to identify requests. + Specify a unique request ID so that if you must + retry your request, the server will know to + ignore the request if it has already been + completed. For example, consider a situation + where you make an initial request and the + request times out. If you make the request again + with the same request ID, the server can check + if original operation with the same request ID + was received, and if so, will ignore the second + request. This prevents clients from accidentally + creating duplicate commitments. The request ID + must be a valid UUID with the exception that + zero UUID is not supported ( + 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. + """ + + forwarding_rule_resource: 'ForwardingRule' = proto.Field( + proto.MESSAGE, + number=301211695, + message='ForwardingRule', + ) + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + request_id: str = proto.Field( + proto.STRING, + number=37109963, + optional=True, + ) + + +class InsertGlobalNetworkEndpointGroupRequest(proto.Message): + r"""A request message for GlobalNetworkEndpointGroups.Insert. See + the method description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + network_endpoint_group_resource (google.cloud.compute_v1.types.NetworkEndpointGroup): + The body resource for this request + project (str): + Project ID for this request. + request_id (str): + An optional request ID to identify requests. + Specify a unique request ID so that if you must + retry your request, the server will know to + ignore the request if it has already been + completed. For example, consider a situation + where you make an initial request and the + request times out. If you make the request again + with the same request ID, the server can check + if original operation with the same request ID + was received, and if so, will ignore the second + request. This prevents clients from accidentally + creating duplicate commitments. The request ID + must be a valid UUID with the exception that + zero UUID is not supported ( + 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. + """ + + network_endpoint_group_resource: 'NetworkEndpointGroup' = proto.Field( + proto.MESSAGE, + number=525788839, + message='NetworkEndpointGroup', + ) + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + request_id: str = proto.Field( + proto.STRING, + number=37109963, + optional=True, + ) + + +class InsertGlobalPublicDelegatedPrefixeRequest(proto.Message): + r"""A request message for GlobalPublicDelegatedPrefixes.Insert. + See the method description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + project (str): + Project ID for this request. + public_delegated_prefix_resource (google.cloud.compute_v1.types.PublicDelegatedPrefix): + The body resource for this request + request_id (str): + An optional request ID to identify requests. + Specify a unique request ID so that if you must + retry your request, the server will know to + ignore the request if it has already been + completed. For example, consider a situation + where you make an initial request and the + request times out. If you make the request again + with the same request ID, the server can check + if original operation with the same request ID + was received, and if so, will ignore the second + request. This prevents clients from accidentally + creating duplicate commitments. The request ID + must be a valid UUID with the exception that + zero UUID is not supported ( + 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. + """ + + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + public_delegated_prefix_resource: 'PublicDelegatedPrefix' = proto.Field( + proto.MESSAGE, + number=47594501, + message='PublicDelegatedPrefix', + ) + request_id: str = proto.Field( + proto.STRING, + number=37109963, + optional=True, + ) + + +class InsertHealthCheckRequest(proto.Message): + r"""A request message for HealthChecks.Insert. See the method + description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + health_check_resource (google.cloud.compute_v1.types.HealthCheck): + The body resource for this request + project (str): + Project ID for this request. + request_id (str): + An optional request ID to identify requests. + Specify a unique request ID so that if you must + retry your request, the server will know to + ignore the request if it has already been + completed. For example, consider a situation + where you make an initial request and the + request times out. If you make the request again + with the same request ID, the server can check + if original operation with the same request ID + was received, and if so, will ignore the second + request. This prevents clients from accidentally + creating duplicate commitments. The request ID + must be a valid UUID with the exception that + zero UUID is not supported ( + 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. + """ + + health_check_resource: 'HealthCheck' = proto.Field( + proto.MESSAGE, + number=201925032, + message='HealthCheck', + ) + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + request_id: str = proto.Field( + proto.STRING, + number=37109963, + optional=True, + ) + + +class InsertImageRequest(proto.Message): + r"""A request message for Images.Insert. See the method + description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + force_create (bool): + Force image creation if true. + + This field is a member of `oneof`_ ``_force_create``. + image_resource (google.cloud.compute_v1.types.Image): + The body resource for this request + project (str): + Project ID for this request. + request_id (str): + An optional request ID to identify requests. + Specify a unique request ID so that if you must + retry your request, the server will know to + ignore the request if it has already been + completed. For example, consider a situation + where you make an initial request and the + request times out. If you make the request again + with the same request ID, the server can check + if original operation with the same request ID + was received, and if so, will ignore the second + request. This prevents clients from accidentally + creating duplicate commitments. The request ID + must be a valid UUID with the exception that + zero UUID is not supported ( + 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. + """ + + force_create: bool = proto.Field( + proto.BOOL, + number=197723344, + optional=True, + ) + image_resource: 'Image' = proto.Field( + proto.MESSAGE, + number=371171954, + message='Image', + ) + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + request_id: str = proto.Field( + proto.STRING, + number=37109963, + optional=True, + ) + + +class InsertInstanceGroupManagerRequest(proto.Message): + r"""A request message for InstanceGroupManagers.Insert. See the + method description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + instance_group_manager_resource (google.cloud.compute_v1.types.InstanceGroupManager): + The body resource for this request + project (str): + Project ID for this request. + request_id (str): + An optional request ID to identify requests. + Specify a unique request ID so that if you must + retry your request, the server will know to + ignore the request if it has already been + completed. For example, consider a situation + where you make an initial request and the + request times out. If you make the request again + with the same request ID, the server can check + if original operation with the same request ID + was received, and if so, will ignore the second + request. This prevents clients from accidentally + creating duplicate commitments. The request ID + must be a valid UUID with the exception that + zero UUID is not supported ( + 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. + zone (str): + The name of the zone where you want to create + the managed instance group. + """ + + instance_group_manager_resource: 'InstanceGroupManager' = proto.Field( + proto.MESSAGE, + number=261063946, + message='InstanceGroupManager', + ) + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + request_id: str = proto.Field( + proto.STRING, + number=37109963, + optional=True, + ) + zone: str = proto.Field( + proto.STRING, + number=3744684, + ) + + +class InsertInstanceGroupRequest(proto.Message): + r"""A request message for InstanceGroups.Insert. See the method + description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + instance_group_resource (google.cloud.compute_v1.types.InstanceGroup): + The body resource for this request + project (str): + Project ID for this request. + request_id (str): + An optional request ID to identify requests. + Specify a unique request ID so that if you must + retry your request, the server will know to + ignore the request if it has already been + completed. For example, consider a situation + where you make an initial request and the + request times out. If you make the request again + with the same request ID, the server can check + if original operation with the same request ID + was received, and if so, will ignore the second + request. This prevents clients from accidentally + creating duplicate commitments. The request ID + must be a valid UUID with the exception that + zero UUID is not supported ( + 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. + zone (str): + The name of the zone where you want to create + the instance group. + """ + + instance_group_resource: 'InstanceGroup' = proto.Field( + proto.MESSAGE, + number=286612152, + message='InstanceGroup', + ) + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + request_id: str = proto.Field( + proto.STRING, + number=37109963, + optional=True, + ) + zone: str = proto.Field( + proto.STRING, + number=3744684, + ) + + +class InsertInstanceRequest(proto.Message): + r"""A request message for Instances.Insert. See the method + description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + instance_resource (google.cloud.compute_v1.types.Instance): + The body resource for this request + project (str): + Project ID for this request. + request_id (str): + An optional request ID to identify requests. + Specify a unique request ID so that if you must + retry your request, the server will know to + ignore the request if it has already been + completed. For example, consider a situation + where you make an initial request and the + request times out. If you make the request again + with the same request ID, the server can check + if original operation with the same request ID + was received, and if so, will ignore the second + request. This prevents clients from accidentally + creating duplicate commitments. The request ID + must be a valid UUID with the exception that + zero UUID is not supported ( + 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. + source_instance_template (str): + Specifies instance template to create the + instance. This field is optional. It can be a + full or partial URL. For example, the following + are all valid URLs to an instance template: - + https://www.googleapis.com/compute/v1/projects/project + /global/instanceTemplates/instanceTemplate - + projects/project/global/instanceTemplates/instanceTemplate + - global/instanceTemplates/instanceTemplate + + This field is a member of `oneof`_ ``_source_instance_template``. + source_machine_image (str): + Specifies the machine image to use to create + the instance. This field is optional. It can be + a full or partial URL. For example, the + following are all valid URLs to a machine image: + - + https://www.googleapis.com/compute/v1/projects/project/global/global + /machineImages/machineImage - + projects/project/global/global/machineImages/machineImage + - global/machineImages/machineImage + + This field is a member of `oneof`_ ``_source_machine_image``. + zone (str): + The name of the zone for this request. + """ + + instance_resource: 'Instance' = proto.Field( + proto.MESSAGE, + number=215988344, + message='Instance', + ) + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + request_id: str = proto.Field( + proto.STRING, + number=37109963, + optional=True, + ) + source_instance_template: str = proto.Field( + proto.STRING, + number=332423616, + optional=True, + ) + source_machine_image: str = proto.Field( + proto.STRING, + number=21769791, + optional=True, + ) + zone: str = proto.Field( + proto.STRING, + number=3744684, + ) + + +class InsertInstanceTemplateRequest(proto.Message): + r"""A request message for InstanceTemplates.Insert. See the + method description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + instance_template_resource (google.cloud.compute_v1.types.InstanceTemplate): + The body resource for this request + project (str): + Project ID for this request. + request_id (str): + An optional request ID to identify requests. + Specify a unique request ID so that if you must + retry your request, the server will know to + ignore the request if it has already been + completed. For example, consider a situation + where you make an initial request and the + request times out. If you make the request again + with the same request ID, the server can check + if original operation with the same request ID + was received, and if so, will ignore the second + request. This prevents clients from accidentally + creating duplicate commitments. The request ID + must be a valid UUID with the exception that + zero UUID is not supported ( + 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. + """ + + instance_template_resource: 'InstanceTemplate' = proto.Field( + proto.MESSAGE, + number=10679561, + message='InstanceTemplate', + ) + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + request_id: str = proto.Field( + proto.STRING, + number=37109963, + optional=True, + ) + + +class InsertInterconnectAttachmentRequest(proto.Message): + r"""A request message for InterconnectAttachments.Insert. See the + method description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + interconnect_attachment_resource (google.cloud.compute_v1.types.InterconnectAttachment): + The body resource for this request + project (str): + Project ID for this request. + region (str): + Name of the region for this request. + request_id (str): + An optional request ID to identify requests. + Specify a unique request ID so that if you must + retry your request, the server will know to + ignore the request if it has already been + completed. For example, consider a situation + where you make an initial request and the + request times out. If you make the request again + with the same request ID, the server can check + if original operation with the same request ID + was received, and if so, will ignore the second + request. This prevents clients from accidentally + creating duplicate commitments. The request ID + must be a valid UUID with the exception that + zero UUID is not supported ( + 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. + validate_only (bool): + If true, the request will not be committed. + + This field is a member of `oneof`_ ``_validate_only``. + """ + + interconnect_attachment_resource: 'InterconnectAttachment' = proto.Field( + proto.MESSAGE, + number=212341369, + message='InterconnectAttachment', + ) + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + region: str = proto.Field( + proto.STRING, + number=138946292, + ) + request_id: str = proto.Field( + proto.STRING, + number=37109963, + optional=True, + ) + validate_only: bool = proto.Field( + proto.BOOL, + number=242744629, + optional=True, + ) + + +class InsertInterconnectRequest(proto.Message): + r"""A request message for Interconnects.Insert. See the method + description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + interconnect_resource (google.cloud.compute_v1.types.Interconnect): + The body resource for this request + project (str): + Project ID for this request. + request_id (str): + An optional request ID to identify requests. + Specify a unique request ID so that if you must + retry your request, the server will know to + ignore the request if it has already been + completed. For example, consider a situation + where you make an initial request and the + request times out. If you make the request again + with the same request ID, the server can check + if original operation with the same request ID + was received, and if so, will ignore the second + request. This prevents clients from accidentally + creating duplicate commitments. The request ID + must be a valid UUID with the exception that + zero UUID is not supported ( + 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. + """ + + interconnect_resource: 'Interconnect' = proto.Field( + proto.MESSAGE, + number=397611167, + message='Interconnect', + ) + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + request_id: str = proto.Field( + proto.STRING, + number=37109963, + optional=True, + ) + + +class InsertLicenseRequest(proto.Message): + r"""A request message for Licenses.Insert. See the method + description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + license_resource (google.cloud.compute_v1.types.License): + The body resource for this request + project (str): + Project ID for this request. + request_id (str): + An optional request ID to identify requests. + Specify a unique request ID so that if you must + retry your request, the server will know to + ignore the request if it has already been + completed. For example, consider a situation + where you make an initial request and the + request times out. If you make the request again + with the same request ID, the server can check + if original operation with the same request ID + was received, and if so, will ignore the second + request. This prevents clients from accidentally + creating duplicate commitments. The request ID + must be a valid UUID with the exception that + zero UUID is not supported ( + 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. + """ + + license_resource: 'License' = proto.Field( + proto.MESSAGE, + number=437955148, + message='License', + ) + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + request_id: str = proto.Field( + proto.STRING, + number=37109963, + optional=True, + ) + + +class InsertMachineImageRequest(proto.Message): + r"""A request message for MachineImages.Insert. See the method + description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + machine_image_resource (google.cloud.compute_v1.types.MachineImage): + The body resource for this request + project (str): + Project ID for this request. + request_id (str): + An optional request ID to identify requests. + Specify a unique request ID so that if you must + retry your request, the server will know to + ignore the request if it has already been + completed. For example, consider a situation + where you make an initial request and the + request times out. If you make the request again + with the same request ID, the server can check + if original operation with the same request ID + was received, and if so, will ignore the second + request. This prevents clients from accidentally + creating duplicate commitments. The request ID + must be a valid UUID with the exception that + zero UUID is not supported ( + 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. + source_instance (str): + Required. Source instance that is used to + create the machine image from. + + This field is a member of `oneof`_ ``_source_instance``. + """ + + machine_image_resource: 'MachineImage' = proto.Field( + proto.MESSAGE, + number=60740970, + message='MachineImage', + ) + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + request_id: str = proto.Field( + proto.STRING, + number=37109963, + optional=True, + ) + source_instance: str = proto.Field( + proto.STRING, + number=396315705, + optional=True, + ) + + +class InsertNetworkAttachmentRequest(proto.Message): + r"""A request message for NetworkAttachments.Insert. See the + method description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + network_attachment_resource (google.cloud.compute_v1.types.NetworkAttachment): + The body resource for this request + project (str): + Project ID for this request. + region (str): + Name of the region of this request. + request_id (str): + An optional request ID to identify requests. Specify a + unique request ID so that if you must retry your request, + the server will know to ignore the request if it has already + been completed. For example, consider a situation where you + make an initial request and the request times out. If you + make the request again with the same request ID, the server + can check if original operation with the same request ID was + received, and if so, will ignore the second request. This + prevents clients from accidentally creating duplicate + commitments. The request ID must be a valid UUID with the + exception that zero UUID is not supported ( + 00000000-0000-0000-0000-000000000000). end_interface: + MixerMutationRequestBuilder + + This field is a member of `oneof`_ ``_request_id``. + """ + + network_attachment_resource: 'NetworkAttachment' = proto.Field( + proto.MESSAGE, + number=210974745, + message='NetworkAttachment', + ) + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + region: str = proto.Field( + proto.STRING, + number=138946292, + ) + request_id: str = proto.Field( + proto.STRING, + number=37109963, + optional=True, + ) + + +class InsertNetworkEdgeSecurityServiceRequest(proto.Message): + r"""A request message for NetworkEdgeSecurityServices.Insert. See + the method description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + network_edge_security_service_resource (google.cloud.compute_v1.types.NetworkEdgeSecurityService): + The body resource for this request + project (str): + Project ID for this request. + region (str): + Name of the region scoping this request. + request_id (str): + An optional request ID to identify requests. + Specify a unique request ID so that if you must + retry your request, the server will know to + ignore the request if it has already been + completed. For example, consider a situation + where you make an initial request and the + request times out. If you make the request again + with the same request ID, the server can check + if original operation with the same request ID + was received, and if so, will ignore the second + request. This prevents clients from accidentally + creating duplicate commitments. The request ID + must be a valid UUID with the exception that + zero UUID is not supported ( + 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. + validate_only (bool): + If true, the request will not be committed. + + This field is a member of `oneof`_ ``_validate_only``. + """ + + network_edge_security_service_resource: 'NetworkEdgeSecurityService' = proto.Field( + proto.MESSAGE, + number=477548966, + message='NetworkEdgeSecurityService', + ) + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + region: str = proto.Field( + proto.STRING, + number=138946292, + ) + request_id: str = proto.Field( + proto.STRING, + number=37109963, + optional=True, + ) + validate_only: bool = proto.Field( + proto.BOOL, + number=242744629, + optional=True, + ) + + +class InsertNetworkEndpointGroupRequest(proto.Message): + r"""A request message for NetworkEndpointGroups.Insert. See the + method description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + network_endpoint_group_resource (google.cloud.compute_v1.types.NetworkEndpointGroup): + The body resource for this request + project (str): + Project ID for this request. + request_id (str): + An optional request ID to identify requests. + Specify a unique request ID so that if you must + retry your request, the server will know to + ignore the request if it has already been + completed. For example, consider a situation + where you make an initial request and the + request times out. If you make the request again + with the same request ID, the server can check + if original operation with the same request ID + was received, and if so, will ignore the second + request. This prevents clients from accidentally + creating duplicate commitments. The request ID + must be a valid UUID with the exception that + zero UUID is not supported ( + 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. + zone (str): + The name of the zone where you want to create + the network endpoint group. It should comply + with RFC1035. + """ + + network_endpoint_group_resource: 'NetworkEndpointGroup' = proto.Field( + proto.MESSAGE, + number=525788839, + message='NetworkEndpointGroup', + ) + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + request_id: str = proto.Field( + proto.STRING, + number=37109963, + optional=True, + ) + zone: str = proto.Field( + proto.STRING, + number=3744684, + ) + + +class InsertNetworkFirewallPolicyRequest(proto.Message): + r"""A request message for NetworkFirewallPolicies.Insert. See the + method description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + firewall_policy_resource (google.cloud.compute_v1.types.FirewallPolicy): + The body resource for this request + project (str): + Project ID for this request. + request_id (str): + An optional request ID to identify requests. + Specify a unique request ID so that if you must + retry your request, the server will know to + ignore the request if it has already been + completed. For example, consider a situation + where you make an initial request and the + request times out. If you make the request again + with the same request ID, the server can check + if original operation with the same request ID + was received, and if so, will ignore the second + request. This prevents clients from accidentally + creating duplicate commitments. The request ID + must be a valid UUID with the exception that + zero UUID is not supported ( + 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. + """ + + firewall_policy_resource: 'FirewallPolicy' = proto.Field( + proto.MESSAGE, + number=495049532, + message='FirewallPolicy', + ) + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + request_id: str = proto.Field( + proto.STRING, + number=37109963, + optional=True, + ) + + +class InsertNetworkRequest(proto.Message): + r"""A request message for Networks.Insert. See the method + description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + network_resource (google.cloud.compute_v1.types.Network): + The body resource for this request + project (str): + Project ID for this request. + request_id (str): + An optional request ID to identify requests. + Specify a unique request ID so that if you must + retry your request, the server will know to + ignore the request if it has already been + completed. For example, consider a situation + where you make an initial request and the + request times out. If you make the request again + with the same request ID, the server can check + if original operation with the same request ID + was received, and if so, will ignore the second + request. This prevents clients from accidentally + creating duplicate commitments. The request ID + must be a valid UUID with the exception that + zero UUID is not supported ( + 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. + """ + + network_resource: 'Network' = proto.Field( + proto.MESSAGE, + number=122105599, + message='Network', + ) + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + request_id: str = proto.Field( + proto.STRING, + number=37109963, + optional=True, + ) + + +class InsertNodeGroupRequest(proto.Message): + r"""A request message for NodeGroups.Insert. See the method + description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + initial_node_count (int): + Initial count of nodes in the node group. + node_group_resource (google.cloud.compute_v1.types.NodeGroup): + The body resource for this request + project (str): + Project ID for this request. + request_id (str): + An optional request ID to identify requests. + Specify a unique request ID so that if you must + retry your request, the server will know to + ignore the request if it has already been + completed. For example, consider a situation + where you make an initial request and the + request times out. If you make the request again + with the same request ID, the server can check + if original operation with the same request ID + was received, and if so, will ignore the second + request. This prevents clients from accidentally + creating duplicate commitments. The request ID + must be a valid UUID with the exception that + zero UUID is not supported ( + 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. + zone (str): + The name of the zone for this request. + """ + + initial_node_count: int = proto.Field( + proto.INT32, + number=71951469, + ) + node_group_resource: 'NodeGroup' = proto.Field( + proto.MESSAGE, + number=505321899, + message='NodeGroup', + ) + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + request_id: str = proto.Field( + proto.STRING, + number=37109963, + optional=True, + ) + zone: str = proto.Field( + proto.STRING, + number=3744684, + ) + + +class InsertNodeTemplateRequest(proto.Message): + r"""A request message for NodeTemplates.Insert. See the method + description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + node_template_resource (google.cloud.compute_v1.types.NodeTemplate): + The body resource for this request + project (str): + Project ID for this request. + region (str): + The name of the region for this request. + request_id (str): + An optional request ID to identify requests. + Specify a unique request ID so that if you must + retry your request, the server will know to + ignore the request if it has already been + completed. For example, consider a situation + where you make an initial request and the + request times out. If you make the request again + with the same request ID, the server can check + if original operation with the same request ID + was received, and if so, will ignore the second + request. This prevents clients from accidentally + creating duplicate commitments. The request ID + must be a valid UUID with the exception that + zero UUID is not supported ( + 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. + """ + + node_template_resource: 'NodeTemplate' = proto.Field( + proto.MESSAGE, + number=127364406, + message='NodeTemplate', + ) + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + region: str = proto.Field( + proto.STRING, + number=138946292, + ) + request_id: str = proto.Field( + proto.STRING, + number=37109963, + optional=True, + ) + + +class InsertPacketMirroringRequest(proto.Message): + r"""A request message for PacketMirrorings.Insert. See the method + description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + packet_mirroring_resource (google.cloud.compute_v1.types.PacketMirroring): + The body resource for this request + project (str): + Project ID for this request. + region (str): + Name of the region for this request. + request_id (str): + An optional request ID to identify requests. + Specify a unique request ID so that if you must + retry your request, the server will know to + ignore the request if it has already been + completed. For example, consider a situation + where you make an initial request and the + request times out. If you make the request again + with the same request ID, the server can check + if original operation with the same request ID + was received, and if so, will ignore the second + request. This prevents clients from accidentally + creating duplicate commitments. The request ID + must be a valid UUID with the exception that + zero UUID is not supported ( + 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. + """ + + packet_mirroring_resource: 'PacketMirroring' = proto.Field( + proto.MESSAGE, + number=493501985, + message='PacketMirroring', + ) + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + region: str = proto.Field( + proto.STRING, + number=138946292, + ) + request_id: str = proto.Field( + proto.STRING, + number=37109963, + optional=True, + ) + + +class InsertPublicAdvertisedPrefixeRequest(proto.Message): + r"""A request message for PublicAdvertisedPrefixes.Insert. See + the method description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + project (str): + Project ID for this request. + public_advertised_prefix_resource (google.cloud.compute_v1.types.PublicAdvertisedPrefix): + The body resource for this request + request_id (str): + An optional request ID to identify requests. + Specify a unique request ID so that if you must + retry your request, the server will know to + ignore the request if it has already been + completed. For example, consider a situation + where you make an initial request and the + request times out. If you make the request again + with the same request ID, the server can check + if original operation with the same request ID + was received, and if so, will ignore the second + request. This prevents clients from accidentally + creating duplicate commitments. The request ID + must be a valid UUID with the exception that + zero UUID is not supported ( + 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. + """ + + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + public_advertised_prefix_resource: 'PublicAdvertisedPrefix' = proto.Field( + proto.MESSAGE, + number=233614223, + message='PublicAdvertisedPrefix', + ) + request_id: str = proto.Field( + proto.STRING, + number=37109963, + optional=True, + ) + + +class InsertPublicDelegatedPrefixeRequest(proto.Message): + r"""A request message for PublicDelegatedPrefixes.Insert. See the + method description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + project (str): + Project ID for this request. + public_delegated_prefix_resource (google.cloud.compute_v1.types.PublicDelegatedPrefix): + The body resource for this request + region (str): + Name of the region of this request. + request_id (str): + An optional request ID to identify requests. + Specify a unique request ID so that if you must + retry your request, the server will know to + ignore the request if it has already been + completed. For example, consider a situation + where you make an initial request and the + request times out. If you make the request again + with the same request ID, the server can check + if original operation with the same request ID + was received, and if so, will ignore the second + request. This prevents clients from accidentally + creating duplicate commitments. The request ID + must be a valid UUID with the exception that + zero UUID is not supported ( + 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. + """ + + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + public_delegated_prefix_resource: 'PublicDelegatedPrefix' = proto.Field( + proto.MESSAGE, + number=47594501, + message='PublicDelegatedPrefix', + ) + region: str = proto.Field( + proto.STRING, + number=138946292, + ) + request_id: str = proto.Field( + proto.STRING, + number=37109963, + optional=True, + ) + + +class InsertRegionAutoscalerRequest(proto.Message): + r"""A request message for RegionAutoscalers.Insert. See the + method description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + autoscaler_resource (google.cloud.compute_v1.types.Autoscaler): + The body resource for this request + project (str): + Project ID for this request. + region (str): + Name of the region scoping this request. + request_id (str): + An optional request ID to identify requests. + Specify a unique request ID so that if you must + retry your request, the server will know to + ignore the request if it has already been + completed. For example, consider a situation + where you make an initial request and the + request times out. If you make the request again + with the same request ID, the server can check + if original operation with the same request ID + was received, and if so, will ignore the second + request. This prevents clients from accidentally + creating duplicate commitments. The request ID + must be a valid UUID with the exception that + zero UUID is not supported ( + 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. + """ + + autoscaler_resource: 'Autoscaler' = proto.Field( + proto.MESSAGE, + number=207616118, + message='Autoscaler', + ) + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + region: str = proto.Field( + proto.STRING, + number=138946292, + ) + request_id: str = proto.Field( + proto.STRING, + number=37109963, + optional=True, + ) + + +class InsertRegionBackendServiceRequest(proto.Message): + r"""A request message for RegionBackendServices.Insert. See the + method description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + backend_service_resource (google.cloud.compute_v1.types.BackendService): + The body resource for this request + project (str): + Project ID for this request. + region (str): + Name of the region scoping this request. + request_id (str): + An optional request ID to identify requests. + Specify a unique request ID so that if you must + retry your request, the server will know to + ignore the request if it has already been + completed. For example, consider a situation + where you make an initial request and the + request times out. If you make the request again + with the same request ID, the server can check + if original operation with the same request ID + was received, and if so, will ignore the second + request. This prevents clients from accidentally + creating duplicate commitments. The request ID + must be a valid UUID with the exception that + zero UUID is not supported ( + 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. + """ + + backend_service_resource: 'BackendService' = proto.Field( + proto.MESSAGE, + number=347586723, + message='BackendService', + ) + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + region: str = proto.Field( + proto.STRING, + number=138946292, + ) + request_id: str = proto.Field( + proto.STRING, + number=37109963, + optional=True, + ) + + +class InsertRegionCommitmentRequest(proto.Message): + r"""A request message for RegionCommitments.Insert. See the + method description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + commitment_resource (google.cloud.compute_v1.types.Commitment): + The body resource for this request + project (str): + Project ID for this request. + region (str): + Name of the region for this request. + request_id (str): + An optional request ID to identify requests. + Specify a unique request ID so that if you must + retry your request, the server will know to + ignore the request if it has already been + completed. For example, consider a situation + where you make an initial request and the + request times out. If you make the request again + with the same request ID, the server can check + if original operation with the same request ID + was received, and if so, will ignore the second + request. This prevents clients from accidentally + creating duplicate commitments. The request ID + must be a valid UUID with the exception that + zero UUID is not supported ( + 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. + """ + + commitment_resource: 'Commitment' = proto.Field( + proto.MESSAGE, + number=244240888, + message='Commitment', + ) + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + region: str = proto.Field( + proto.STRING, + number=138946292, + ) + request_id: str = proto.Field( + proto.STRING, + number=37109963, + optional=True, + ) + + +class InsertRegionDiskRequest(proto.Message): + r"""A request message for RegionDisks.Insert. See the method + description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + disk_resource (google.cloud.compute_v1.types.Disk): + The body resource for this request + project (str): + Project ID for this request. + region (str): + Name of the region for this request. + request_id (str): + An optional request ID to identify requests. + Specify a unique request ID so that if you must + retry your request, the server will know to + ignore the request if it has already been + completed. For example, consider a situation + where you make an initial request and the + request times out. If you make the request again + with the same request ID, the server can check + if original operation with the same request ID + was received, and if so, will ignore the second + request. This prevents clients from accidentally + creating duplicate commitments. The request ID + must be a valid UUID with the exception that + zero UUID is not supported ( + 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. + source_image (str): + Source image to restore onto a disk. This + field is optional. + + This field is a member of `oneof`_ ``_source_image``. + """ + + disk_resource: 'Disk' = proto.Field( + proto.MESSAGE, + number=25880688, + message='Disk', + ) + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + region: str = proto.Field( + proto.STRING, + number=138946292, + ) + request_id: str = proto.Field( + proto.STRING, + number=37109963, + optional=True, + ) + source_image: str = proto.Field( + proto.STRING, + number=50443319, + optional=True, + ) + + +class InsertRegionHealthCheckRequest(proto.Message): + r"""A request message for RegionHealthChecks.Insert. See the + method description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + health_check_resource (google.cloud.compute_v1.types.HealthCheck): + The body resource for this request + project (str): + Project ID for this request. + region (str): + Name of the region scoping this request. + request_id (str): + An optional request ID to identify requests. + Specify a unique request ID so that if you must + retry your request, the server will know to + ignore the request if it has already been + completed. For example, consider a situation + where you make an initial request and the + request times out. If you make the request again + with the same request ID, the server can check + if original operation with the same request ID + was received, and if so, will ignore the second + request. This prevents clients from accidentally + creating duplicate commitments. The request ID + must be a valid UUID with the exception that + zero UUID is not supported ( + 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. + """ + + health_check_resource: 'HealthCheck' = proto.Field( + proto.MESSAGE, + number=201925032, + message='HealthCheck', + ) + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + region: str = proto.Field( + proto.STRING, + number=138946292, + ) + request_id: str = proto.Field( + proto.STRING, + number=37109963, + optional=True, + ) + + +class InsertRegionHealthCheckServiceRequest(proto.Message): + r"""A request message for RegionHealthCheckServices.Insert. See + the method description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + health_check_service_resource (google.cloud.compute_v1.types.HealthCheckService): + The body resource for this request + project (str): + Project ID for this request. + region (str): + Name of the region scoping this request. + request_id (str): + An optional request ID to identify requests. + Specify a unique request ID so that if you must + retry your request, the server will know to + ignore the request if it has already been + completed. For example, consider a situation + where you make an initial request and the + request times out. If you make the request again + with the same request ID, the server can check + if original operation with the same request ID + was received, and if so, will ignore the second + request. This prevents clients from accidentally + creating duplicate commitments. The request ID + must be a valid UUID with the exception that + zero UUID is not supported ( + 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. + """ + + health_check_service_resource: 'HealthCheckService' = proto.Field( + proto.MESSAGE, + number=477367794, + message='HealthCheckService', + ) + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + region: str = proto.Field( + proto.STRING, + number=138946292, + ) + request_id: str = proto.Field( + proto.STRING, + number=37109963, + optional=True, + ) + + +class InsertRegionInstanceGroupManagerRequest(proto.Message): + r"""A request message for RegionInstanceGroupManagers.Insert. See + the method description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + instance_group_manager_resource (google.cloud.compute_v1.types.InstanceGroupManager): + The body resource for this request + project (str): + Project ID for this request. + region (str): + Name of the region scoping this request. + request_id (str): + An optional request ID to identify requests. + Specify a unique request ID so that if you must + retry your request, the server will know to + ignore the request if it has already been + completed. For example, consider a situation + where you make an initial request and the + request times out. If you make the request again + with the same request ID, the server can check + if original operation with the same request ID + was received, and if so, will ignore the second + request. This prevents clients from accidentally + creating duplicate commitments. The request ID + must be a valid UUID with the exception that + zero UUID is not supported ( + 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. + """ + + instance_group_manager_resource: 'InstanceGroupManager' = proto.Field( + proto.MESSAGE, + number=261063946, + message='InstanceGroupManager', + ) + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + region: str = proto.Field( + proto.STRING, + number=138946292, + ) + request_id: str = proto.Field( + proto.STRING, + number=37109963, + optional=True, + ) + + +class InsertRegionInstanceTemplateRequest(proto.Message): + r"""A request message for RegionInstanceTemplates.Insert. See the + method description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + instance_template_resource (google.cloud.compute_v1.types.InstanceTemplate): + The body resource for this request + project (str): + Project ID for this request. + region (str): + The name of the region for this request. + request_id (str): + An optional request ID to identify requests. + Specify a unique request ID so that if you must + retry your request, the server will know to + ignore the request if it has already been + completed. For example, consider a situation + where you make an initial request and the + request times out. If you make the request again + with the same request ID, the server can check + if original operation with the same request ID + was received, and if so, will ignore the second + request. This prevents clients from accidentally + creating duplicate commitments. The request ID + must be a valid UUID with the exception that + zero UUID is not supported ( + 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. + """ + + instance_template_resource: 'InstanceTemplate' = proto.Field( + proto.MESSAGE, + number=10679561, + message='InstanceTemplate', + ) + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + region: str = proto.Field( + proto.STRING, + number=138946292, + ) + request_id: str = proto.Field( + proto.STRING, + number=37109963, + optional=True, + ) + + +class InsertRegionNetworkEndpointGroupRequest(proto.Message): + r"""A request message for RegionNetworkEndpointGroups.Insert. See + the method description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + network_endpoint_group_resource (google.cloud.compute_v1.types.NetworkEndpointGroup): + The body resource for this request + project (str): + Project ID for this request. + region (str): + The name of the region where you want to + create the network endpoint group. It should + comply with RFC1035. + request_id (str): + An optional request ID to identify requests. + Specify a unique request ID so that if you must + retry your request, the server will know to + ignore the request if it has already been + completed. For example, consider a situation + where you make an initial request and the + request times out. If you make the request again + with the same request ID, the server can check + if original operation with the same request ID + was received, and if so, will ignore the second + request. This prevents clients from accidentally + creating duplicate commitments. The request ID + must be a valid UUID with the exception that + zero UUID is not supported ( + 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. + """ + + network_endpoint_group_resource: 'NetworkEndpointGroup' = proto.Field( + proto.MESSAGE, + number=525788839, + message='NetworkEndpointGroup', + ) + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + region: str = proto.Field( + proto.STRING, + number=138946292, + ) + request_id: str = proto.Field( + proto.STRING, + number=37109963, + optional=True, + ) + + +class InsertRegionNetworkFirewallPolicyRequest(proto.Message): + r"""A request message for RegionNetworkFirewallPolicies.Insert. + See the method description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + firewall_policy_resource (google.cloud.compute_v1.types.FirewallPolicy): + The body resource for this request + project (str): + Project ID for this request. + region (str): + Name of the region scoping this request. + request_id (str): + An optional request ID to identify requests. + Specify a unique request ID so that if you must + retry your request, the server will know to + ignore the request if it has already been + completed. For example, consider a situation + where you make an initial request and the + request times out. If you make the request again + with the same request ID, the server can check + if original operation with the same request ID + was received, and if so, will ignore the second + request. This prevents clients from accidentally + creating duplicate commitments. The request ID + must be a valid UUID with the exception that + zero UUID is not supported ( + 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. + """ + + firewall_policy_resource: 'FirewallPolicy' = proto.Field( + proto.MESSAGE, + number=495049532, + message='FirewallPolicy', + ) + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + region: str = proto.Field( + proto.STRING, + number=138946292, + ) + request_id: str = proto.Field( + proto.STRING, + number=37109963, + optional=True, + ) + + +class InsertRegionNotificationEndpointRequest(proto.Message): + r"""A request message for RegionNotificationEndpoints.Insert. See + the method description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + notification_endpoint_resource (google.cloud.compute_v1.types.NotificationEndpoint): + The body resource for this request + project (str): + Project ID for this request. + region (str): + Name of the region scoping this request. + request_id (str): + An optional request ID to identify requests. + Specify a unique request ID so that if you must + retry your request, the server will know to + ignore the request if it has already been + completed. For example, consider a situation + where you make an initial request and the + request times out. If you make the request again + with the same request ID, the server can check + if original operation with the same request ID + was received, and if so, will ignore the second + request. This prevents clients from accidentally + creating duplicate commitments. The request ID + must be a valid UUID with the exception that + zero UUID is not supported ( + 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. + """ + + notification_endpoint_resource: 'NotificationEndpoint' = proto.Field( + proto.MESSAGE, + number=338459940, + message='NotificationEndpoint', + ) + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + region: str = proto.Field( + proto.STRING, + number=138946292, + ) + request_id: str = proto.Field( + proto.STRING, + number=37109963, + optional=True, + ) + + +class InsertRegionSecurityPolicyRequest(proto.Message): + r"""A request message for RegionSecurityPolicies.Insert. See the + method description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + project (str): + Project ID for this request. + region (str): + Name of the region scoping this request. + request_id (str): + An optional request ID to identify requests. + Specify a unique request ID so that if you must + retry your request, the server will know to + ignore the request if it has already been + completed. For example, consider a situation + where you make an initial request and the + request times out. If you make the request again + with the same request ID, the server can check + if original operation with the same request ID + was received, and if so, will ignore the second + request. This prevents clients from accidentally + creating duplicate commitments. The request ID + must be a valid UUID with the exception that + zero UUID is not supported ( + 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. + security_policy_resource (google.cloud.compute_v1.types.SecurityPolicy): + The body resource for this request + validate_only (bool): + If true, the request will not be committed. + + This field is a member of `oneof`_ ``_validate_only``. + """ + + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + region: str = proto.Field( + proto.STRING, + number=138946292, + ) + request_id: str = proto.Field( + proto.STRING, + number=37109963, + optional=True, + ) + security_policy_resource: 'SecurityPolicy' = proto.Field( + proto.MESSAGE, + number=216159612, + message='SecurityPolicy', + ) + validate_only: bool = proto.Field( + proto.BOOL, + number=242744629, + optional=True, + ) + + +class InsertRegionSslCertificateRequest(proto.Message): + r"""A request message for RegionSslCertificates.Insert. See the + method description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + project (str): + Project ID for this request. + region (str): + Name of the region scoping this request. + request_id (str): + An optional request ID to identify requests. + Specify a unique request ID so that if you must + retry your request, the server will know to + ignore the request if it has already been + completed. For example, consider a situation + where you make an initial request and the + request times out. If you make the request again + with the same request ID, the server can check + if original operation with the same request ID + was received, and if so, will ignore the second + request. This prevents clients from accidentally + creating duplicate commitments. The request ID + must be a valid UUID with the exception that + zero UUID is not supported ( + 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. + ssl_certificate_resource (google.cloud.compute_v1.types.SslCertificate): + The body resource for this request + """ + + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + region: str = proto.Field( + proto.STRING, + number=138946292, + ) + request_id: str = proto.Field( + proto.STRING, + number=37109963, + optional=True, + ) + ssl_certificate_resource: 'SslCertificate' = proto.Field( + proto.MESSAGE, + number=180709897, + message='SslCertificate', + ) + + +class InsertRegionSslPolicyRequest(proto.Message): + r"""A request message for RegionSslPolicies.Insert. See the + method description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + project (str): + Project ID for this request. + region (str): + Name of the region scoping this request. + request_id (str): + An optional request ID to identify requests. + Specify a unique request ID so that if you must + retry your request, the server will know to + ignore the request if it has already been + completed. For example, consider a situation + where you make an initial request and the + request times out. If you make the request again + with the same request ID, the server can check + if original operation with the same request ID + was received, and if so, will ignore the second + request. This prevents clients from accidentally + creating duplicate commitments. The request ID + must be a valid UUID with the exception that + zero UUID is not supported ( + 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. + ssl_policy_resource (google.cloud.compute_v1.types.SslPolicy): + The body resource for this request + """ + + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + region: str = proto.Field( + proto.STRING, + number=138946292, + ) + request_id: str = proto.Field( + proto.STRING, + number=37109963, + optional=True, + ) + ssl_policy_resource: 'SslPolicy' = proto.Field( + proto.MESSAGE, + number=274891848, + message='SslPolicy', + ) + + +class InsertRegionTargetHttpProxyRequest(proto.Message): + r"""A request message for RegionTargetHttpProxies.Insert. See the + method description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + project (str): + Project ID for this request. + region (str): + Name of the region scoping this request. + request_id (str): + An optional request ID to identify requests. + Specify a unique request ID so that if you must + retry your request, the server will know to + ignore the request if it has already been + completed. For example, consider a situation + where you make an initial request and the + request times out. If you make the request again + with the same request ID, the server can check + if original operation with the same request ID + was received, and if so, will ignore the second + request. This prevents clients from accidentally + creating duplicate commitments. The request ID + must be a valid UUID with the exception that + zero UUID is not supported ( + 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. + target_http_proxy_resource (google.cloud.compute_v1.types.TargetHttpProxy): + The body resource for this request + """ + + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + region: str = proto.Field( + proto.STRING, + number=138946292, + ) + request_id: str = proto.Field( + proto.STRING, + number=37109963, + optional=True, + ) + target_http_proxy_resource: 'TargetHttpProxy' = proto.Field( + proto.MESSAGE, + number=24696744, + message='TargetHttpProxy', + ) + + +class InsertRegionTargetHttpsProxyRequest(proto.Message): + r"""A request message for RegionTargetHttpsProxies.Insert. See + the method description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + project (str): + Project ID for this request. + region (str): + Name of the region scoping this request. + request_id (str): + An optional request ID to identify requests. + Specify a unique request ID so that if you must + retry your request, the server will know to + ignore the request if it has already been + completed. For example, consider a situation + where you make an initial request and the + request times out. If you make the request again + with the same request ID, the server can check + if original operation with the same request ID + was received, and if so, will ignore the second + request. This prevents clients from accidentally + creating duplicate commitments. The request ID + must be a valid UUID with the exception that + zero UUID is not supported ( + 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. + target_https_proxy_resource (google.cloud.compute_v1.types.TargetHttpsProxy): + The body resource for this request + """ + + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + region: str = proto.Field( + proto.STRING, + number=138946292, + ) + request_id: str = proto.Field( + proto.STRING, + number=37109963, + optional=True, + ) + target_https_proxy_resource: 'TargetHttpsProxy' = proto.Field( + proto.MESSAGE, + number=433657473, + message='TargetHttpsProxy', + ) + + +class InsertRegionTargetTcpProxyRequest(proto.Message): + r"""A request message for RegionTargetTcpProxies.Insert. See the + method description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + project (str): + Project ID for this request. + region (str): + Name of the region scoping this request. + request_id (str): + An optional request ID to identify requests. + Specify a unique request ID so that if you must + retry your request, the server will know to + ignore the request if it has already been + completed. For example, consider a situation + where you make an initial request and the + request times out. If you make the request again + with the same request ID, the server can check + if original operation with the same request ID + was received, and if so, will ignore the second + request. This prevents clients from accidentally + creating duplicate commitments. The request ID + must be a valid UUID with the exception that + zero UUID is not supported ( + 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. + target_tcp_proxy_resource (google.cloud.compute_v1.types.TargetTcpProxy): + The body resource for this request + """ + + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + region: str = proto.Field( + proto.STRING, + number=138946292, + ) + request_id: str = proto.Field( + proto.STRING, + number=37109963, + optional=True, + ) + target_tcp_proxy_resource: 'TargetTcpProxy' = proto.Field( + proto.MESSAGE, + number=145913931, + message='TargetTcpProxy', + ) + + +class InsertRegionUrlMapRequest(proto.Message): + r"""A request message for RegionUrlMaps.Insert. See the method + description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + project (str): + Project ID for this request. + region (str): + Name of the region scoping this request. + request_id (str): + begin_interface: MixerMutationRequestBuilder Request ID to + support idempotency. + + This field is a member of `oneof`_ ``_request_id``. + url_map_resource (google.cloud.compute_v1.types.UrlMap): + The body resource for this request + """ + + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + region: str = proto.Field( + proto.STRING, + number=138946292, + ) + request_id: str = proto.Field( + proto.STRING, + number=37109963, + optional=True, + ) + url_map_resource: 'UrlMap' = proto.Field( + proto.MESSAGE, + number=168675425, + message='UrlMap', + ) + + +class InsertReservationRequest(proto.Message): + r"""A request message for Reservations.Insert. See the method + description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + project (str): + Project ID for this request. + request_id (str): + An optional request ID to identify requests. + Specify a unique request ID so that if you must + retry your request, the server will know to + ignore the request if it has already been + completed. For example, consider a situation + where you make an initial request and the + request times out. If you make the request again + with the same request ID, the server can check + if original operation with the same request ID + was received, and if so, will ignore the second + request. This prevents clients from accidentally + creating duplicate commitments. The request ID + must be a valid UUID with the exception that + zero UUID is not supported ( + 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. + reservation_resource (google.cloud.compute_v1.types.Reservation): + The body resource for this request + zone (str): + Name of the zone for this request. + """ + + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + request_id: str = proto.Field( + proto.STRING, + number=37109963, + optional=True, + ) + reservation_resource: 'Reservation' = proto.Field( + proto.MESSAGE, + number=285030177, + message='Reservation', + ) + zone: str = proto.Field( + proto.STRING, + number=3744684, + ) + + +class InsertResourcePolicyRequest(proto.Message): + r"""A request message for ResourcePolicies.Insert. See the method + description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + project (str): + Project ID for this request. + region (str): + Name of the region for this request. + request_id (str): + An optional request ID to identify requests. + Specify a unique request ID so that if you must + retry your request, the server will know to + ignore the request if it has already been + completed. For example, consider a situation + where you make an initial request and the + request times out. If you make the request again + with the same request ID, the server can check + if original operation with the same request ID + was received, and if so, will ignore the second + request. This prevents clients from accidentally + creating duplicate commitments. The request ID + must be a valid UUID with the exception that + zero UUID is not supported ( + 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. + resource_policy_resource (google.cloud.compute_v1.types.ResourcePolicy): + The body resource for this request + """ + + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + region: str = proto.Field( + proto.STRING, + number=138946292, + ) + request_id: str = proto.Field( + proto.STRING, + number=37109963, + optional=True, + ) + resource_policy_resource: 'ResourcePolicy' = proto.Field( + proto.MESSAGE, + number=76826186, + message='ResourcePolicy', + ) + + +class InsertRouteRequest(proto.Message): + r"""A request message for Routes.Insert. See the method + description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + project (str): + Project ID for this request. + request_id (str): + An optional request ID to identify requests. + Specify a unique request ID so that if you must + retry your request, the server will know to + ignore the request if it has already been + completed. For example, consider a situation + where you make an initial request and the + request times out. If you make the request again + with the same request ID, the server can check + if original operation with the same request ID + was received, and if so, will ignore the second + request. This prevents clients from accidentally + creating duplicate commitments. The request ID + must be a valid UUID with the exception that + zero UUID is not supported ( + 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. + route_resource (google.cloud.compute_v1.types.Route): + The body resource for this request + """ + + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + request_id: str = proto.Field( + proto.STRING, + number=37109963, + optional=True, + ) + route_resource: 'Route' = proto.Field( + proto.MESSAGE, + number=225428804, + message='Route', + ) + + +class InsertRouterRequest(proto.Message): + r"""A request message for Routers.Insert. See the method + description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + project (str): + Project ID for this request. + region (str): + Name of the region for this request. + request_id (str): + An optional request ID to identify requests. + Specify a unique request ID so that if you must + retry your request, the server will know to + ignore the request if it has already been + completed. For example, consider a situation + where you make an initial request and the + request times out. If you make the request again + with the same request ID, the server can check + if original operation with the same request ID + was received, and if so, will ignore the second + request. This prevents clients from accidentally + creating duplicate commitments. The request ID + must be a valid UUID with the exception that + zero UUID is not supported ( + 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. + router_resource (google.cloud.compute_v1.types.Router): + The body resource for this request + """ + + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + region: str = proto.Field( + proto.STRING, + number=138946292, + ) + request_id: str = proto.Field( + proto.STRING, + number=37109963, + optional=True, + ) + router_resource: 'Router' = proto.Field( + proto.MESSAGE, + number=155222084, + message='Router', + ) + + +class InsertSecurityPolicyRequest(proto.Message): + r"""A request message for SecurityPolicies.Insert. See the method + description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + project (str): + Project ID for this request. + request_id (str): + An optional request ID to identify requests. + Specify a unique request ID so that if you must + retry your request, the server will know to + ignore the request if it has already been + completed. For example, consider a situation + where you make an initial request and the + request times out. If you make the request again + with the same request ID, the server can check + if original operation with the same request ID + was received, and if so, will ignore the second + request. This prevents clients from accidentally + creating duplicate commitments. The request ID + must be a valid UUID with the exception that + zero UUID is not supported ( + 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. + security_policy_resource (google.cloud.compute_v1.types.SecurityPolicy): + The body resource for this request + validate_only (bool): + If true, the request will not be committed. + + This field is a member of `oneof`_ ``_validate_only``. + """ + + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + request_id: str = proto.Field( + proto.STRING, + number=37109963, + optional=True, + ) + security_policy_resource: 'SecurityPolicy' = proto.Field( + proto.MESSAGE, + number=216159612, + message='SecurityPolicy', + ) + validate_only: bool = proto.Field( + proto.BOOL, + number=242744629, + optional=True, + ) + + +class InsertServiceAttachmentRequest(proto.Message): + r"""A request message for ServiceAttachments.Insert. See the + method description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + project (str): + Project ID for this request. + region (str): + Name of the region of this request. + request_id (str): + An optional request ID to identify requests. + Specify a unique request ID so that if you must + retry your request, the server will know to + ignore the request if it has already been + completed. For example, consider a situation + where you make an initial request and the + request times out. If you make the request again + with the same request ID, the server can check + if original operation with the same request ID + was received, and if so, will ignore the second + request. This prevents clients from accidentally + creating duplicate commitments. The request ID + must be a valid UUID with the exception that + zero UUID is not supported ( + 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. + service_attachment_resource (google.cloud.compute_v1.types.ServiceAttachment): + The body resource for this request + """ + + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + region: str = proto.Field( + proto.STRING, + number=138946292, + ) + request_id: str = proto.Field( + proto.STRING, + number=37109963, + optional=True, + ) + service_attachment_resource: 'ServiceAttachment' = proto.Field( + proto.MESSAGE, + number=472980256, + message='ServiceAttachment', + ) + + +class InsertSnapshotRequest(proto.Message): + r"""A request message for Snapshots.Insert. See the method + description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + project (str): + Project ID for this request. + request_id (str): + An optional request ID to identify requests. + Specify a unique request ID so that if you must + retry your request, the server will know to + ignore the request if it has already been + completed. For example, consider a situation + where you make an initial request and the + request times out. If you make the request again + with the same request ID, the server can check + if original operation with the same request ID + was received, and if so, will ignore the second + request. This prevents clients from accidentally + creating duplicate commitments. The request ID + must be a valid UUID with the exception that + zero UUID is not supported ( + 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. + snapshot_resource (google.cloud.compute_v1.types.Snapshot): + The body resource for this request + """ + + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + request_id: str = proto.Field( + proto.STRING, + number=37109963, + optional=True, + ) + snapshot_resource: 'Snapshot' = proto.Field( + proto.MESSAGE, + number=481319977, + message='Snapshot', + ) + + +class InsertSslCertificateRequest(proto.Message): + r"""A request message for SslCertificates.Insert. See the method + description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + project (str): + Project ID for this request. + request_id (str): + An optional request ID to identify requests. + Specify a unique request ID so that if you must + retry your request, the server will know to + ignore the request if it has already been + completed. For example, consider a situation + where you make an initial request and the + request times out. If you make the request again + with the same request ID, the server can check + if original operation with the same request ID + was received, and if so, will ignore the second + request. This prevents clients from accidentally + creating duplicate commitments. The request ID + must be a valid UUID with the exception that + zero UUID is not supported ( + 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. + ssl_certificate_resource (google.cloud.compute_v1.types.SslCertificate): + The body resource for this request + """ + + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + request_id: str = proto.Field( + proto.STRING, + number=37109963, + optional=True, + ) + ssl_certificate_resource: 'SslCertificate' = proto.Field( + proto.MESSAGE, + number=180709897, + message='SslCertificate', + ) + + +class InsertSslPolicyRequest(proto.Message): + r"""A request message for SslPolicies.Insert. See the method + description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + project (str): + Project ID for this request. + request_id (str): + An optional request ID to identify requests. + Specify a unique request ID so that if you must + retry your request, the server will know to + ignore the request if it has already been + completed. For example, consider a situation + where you make an initial request and the + request times out. If you make the request again + with the same request ID, the server can check + if original operation with the same request ID + was received, and if so, will ignore the second + request. This prevents clients from accidentally + creating duplicate commitments. The request ID + must be a valid UUID with the exception that + zero UUID is not supported ( + 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. + ssl_policy_resource (google.cloud.compute_v1.types.SslPolicy): + The body resource for this request + """ + + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + request_id: str = proto.Field( + proto.STRING, + number=37109963, + optional=True, + ) + ssl_policy_resource: 'SslPolicy' = proto.Field( + proto.MESSAGE, + number=274891848, + message='SslPolicy', + ) + + +class InsertSubnetworkRequest(proto.Message): + r"""A request message for Subnetworks.Insert. See the method + description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + project (str): + Project ID for this request. + region (str): + Name of the region scoping this request. + request_id (str): + An optional request ID to identify requests. + Specify a unique request ID so that if you must + retry your request, the server will know to + ignore the request if it has already been + completed. For example, consider a situation + where you make an initial request and the + request times out. If you make the request again + with the same request ID, the server can check + if original operation with the same request ID + was received, and if so, will ignore the second + request. This prevents clients from accidentally + creating duplicate commitments. The request ID + must be a valid UUID with the exception that + zero UUID is not supported ( + 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. + subnetwork_resource (google.cloud.compute_v1.types.Subnetwork): + The body resource for this request + """ + + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + region: str = proto.Field( + proto.STRING, + number=138946292, + ) + request_id: str = proto.Field( + proto.STRING, + number=37109963, + optional=True, + ) + subnetwork_resource: 'Subnetwork' = proto.Field( + proto.MESSAGE, + number=42233151, + message='Subnetwork', + ) + + +class InsertTargetGrpcProxyRequest(proto.Message): + r"""A request message for TargetGrpcProxies.Insert. See the + method description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + project (str): + Project ID for this request. + request_id (str): + An optional request ID to identify requests. + Specify a unique request ID so that if you must + retry your request, the server will know to + ignore the request if it has already been + completed. For example, consider a situation + where you make an initial request and the + request times out. If you make the request again + with the same request ID, the server can check + if original operation with the same request ID + was received, and if so, will ignore the second + request. This prevents clients from accidentally + creating duplicate commitments. The request ID + must be a valid UUID with the exception that + zero UUID is not supported ( + 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. + target_grpc_proxy_resource (google.cloud.compute_v1.types.TargetGrpcProxy): + The body resource for this request + """ + + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + request_id: str = proto.Field( + proto.STRING, + number=37109963, + optional=True, + ) + target_grpc_proxy_resource: 'TargetGrpcProxy' = proto.Field( + proto.MESSAGE, + number=328922450, + message='TargetGrpcProxy', + ) + + +class InsertTargetHttpProxyRequest(proto.Message): + r"""A request message for TargetHttpProxies.Insert. See the + method description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + project (str): + Project ID for this request. + request_id (str): + An optional request ID to identify requests. + Specify a unique request ID so that if you must + retry your request, the server will know to + ignore the request if it has already been + completed. For example, consider a situation + where you make an initial request and the + request times out. If you make the request again + with the same request ID, the server can check + if original operation with the same request ID + was received, and if so, will ignore the second + request. This prevents clients from accidentally + creating duplicate commitments. The request ID + must be a valid UUID with the exception that + zero UUID is not supported ( + 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. + target_http_proxy_resource (google.cloud.compute_v1.types.TargetHttpProxy): + The body resource for this request + """ + + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + request_id: str = proto.Field( + proto.STRING, + number=37109963, + optional=True, + ) + target_http_proxy_resource: 'TargetHttpProxy' = proto.Field( + proto.MESSAGE, + number=24696744, + message='TargetHttpProxy', + ) + + +class InsertTargetHttpsProxyRequest(proto.Message): + r"""A request message for TargetHttpsProxies.Insert. See the + method description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + project (str): + Project ID for this request. + request_id (str): + An optional request ID to identify requests. + Specify a unique request ID so that if you must + retry your request, the server will know to + ignore the request if it has already been + completed. For example, consider a situation + where you make an initial request and the + request times out. If you make the request again + with the same request ID, the server can check + if original operation with the same request ID + was received, and if so, will ignore the second + request. This prevents clients from accidentally + creating duplicate commitments. The request ID + must be a valid UUID with the exception that + zero UUID is not supported ( + 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. + target_https_proxy_resource (google.cloud.compute_v1.types.TargetHttpsProxy): + The body resource for this request + """ + + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + request_id: str = proto.Field( + proto.STRING, + number=37109963, + optional=True, + ) + target_https_proxy_resource: 'TargetHttpsProxy' = proto.Field( + proto.MESSAGE, + number=433657473, + message='TargetHttpsProxy', + ) + + +class InsertTargetInstanceRequest(proto.Message): + r"""A request message for TargetInstances.Insert. See the method + description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + project (str): + Project ID for this request. + request_id (str): + An optional request ID to identify requests. + Specify a unique request ID so that if you must + retry your request, the server will know to + ignore the request if it has already been + completed. For example, consider a situation + where you make an initial request and the + request times out. If you make the request again + with the same request ID, the server can check + if original operation with the same request ID + was received, and if so, will ignore the second + request. This prevents clients from accidentally + creating duplicate commitments. The request ID + must be a valid UUID with the exception that + zero UUID is not supported ( + 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. + target_instance_resource (google.cloud.compute_v1.types.TargetInstance): + The body resource for this request + zone (str): + Name of the zone scoping this request. + """ + + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + request_id: str = proto.Field( + proto.STRING, + number=37109963, + optional=True, + ) + target_instance_resource: 'TargetInstance' = proto.Field( + proto.MESSAGE, + number=430453066, + message='TargetInstance', + ) + zone: str = proto.Field( + proto.STRING, + number=3744684, + ) + + +class InsertTargetPoolRequest(proto.Message): + r"""A request message for TargetPools.Insert. See the method + description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + project (str): + Project ID for this request. + region (str): + Name of the region scoping this request. + request_id (str): + An optional request ID to identify requests. + Specify a unique request ID so that if you must + retry your request, the server will know to + ignore the request if it has already been + completed. For example, consider a situation + where you make an initial request and the + request times out. If you make the request again + with the same request ID, the server can check + if original operation with the same request ID + was received, and if so, will ignore the second + request. This prevents clients from accidentally + creating duplicate commitments. The request ID + must be a valid UUID with the exception that + zero UUID is not supported ( + 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. + target_pool_resource (google.cloud.compute_v1.types.TargetPool): + The body resource for this request + """ + + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + region: str = proto.Field( + proto.STRING, + number=138946292, + ) + request_id: str = proto.Field( + proto.STRING, + number=37109963, + optional=True, + ) + target_pool_resource: 'TargetPool' = proto.Field( + proto.MESSAGE, + number=101281443, + message='TargetPool', + ) + + +class InsertTargetSslProxyRequest(proto.Message): + r"""A request message for TargetSslProxies.Insert. See the method + description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + project (str): + Project ID for this request. + request_id (str): + An optional request ID to identify requests. + Specify a unique request ID so that if you must + retry your request, the server will know to + ignore the request if it has already been + completed. For example, consider a situation + where you make an initial request and the + request times out. If you make the request again + with the same request ID, the server can check + if original operation with the same request ID + was received, and if so, will ignore the second + request. This prevents clients from accidentally + creating duplicate commitments. The request ID + must be a valid UUID with the exception that + zero UUID is not supported ( + 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. + target_ssl_proxy_resource (google.cloud.compute_v1.types.TargetSslProxy): + The body resource for this request + """ + + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + request_id: str = proto.Field( + proto.STRING, + number=37109963, + optional=True, + ) + target_ssl_proxy_resource: 'TargetSslProxy' = proto.Field( + proto.MESSAGE, + number=142016192, + message='TargetSslProxy', + ) + + +class InsertTargetTcpProxyRequest(proto.Message): + r"""A request message for TargetTcpProxies.Insert. See the method + description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + project (str): + Project ID for this request. + request_id (str): + An optional request ID to identify requests. + Specify a unique request ID so that if you must + retry your request, the server will know to + ignore the request if it has already been + completed. For example, consider a situation + where you make an initial request and the + request times out. If you make the request again + with the same request ID, the server can check + if original operation with the same request ID + was received, and if so, will ignore the second + request. This prevents clients from accidentally + creating duplicate commitments. The request ID + must be a valid UUID with the exception that + zero UUID is not supported ( + 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. + target_tcp_proxy_resource (google.cloud.compute_v1.types.TargetTcpProxy): + The body resource for this request + """ + + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + request_id: str = proto.Field( + proto.STRING, + number=37109963, + optional=True, + ) + target_tcp_proxy_resource: 'TargetTcpProxy' = proto.Field( + proto.MESSAGE, + number=145913931, + message='TargetTcpProxy', + ) + + +class InsertTargetVpnGatewayRequest(proto.Message): + r"""A request message for TargetVpnGateways.Insert. See the + method description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + project (str): + Project ID for this request. + region (str): + Name of the region for this request. + request_id (str): + An optional request ID to identify requests. + Specify a unique request ID so that if you must + retry your request, the server will know to + ignore the request if it has already been + completed. For example, consider a situation + where you make an initial request and the + request times out. If you make the request again + with the same request ID, the server can check + if original operation with the same request ID + was received, and if so, will ignore the second + request. This prevents clients from accidentally + creating duplicate commitments. The request ID + must be a valid UUID with the exception that + zero UUID is not supported ( + 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. + target_vpn_gateway_resource (google.cloud.compute_v1.types.TargetVpnGateway): + The body resource for this request + """ + + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + region: str = proto.Field( + proto.STRING, + number=138946292, + ) + request_id: str = proto.Field( + proto.STRING, + number=37109963, + optional=True, + ) + target_vpn_gateway_resource: 'TargetVpnGateway' = proto.Field( + proto.MESSAGE, + number=498050, + message='TargetVpnGateway', + ) + + +class InsertUrlMapRequest(proto.Message): + r"""A request message for UrlMaps.Insert. See the method + description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + project (str): + Project ID for this request. + request_id (str): + An optional request ID to identify requests. + Specify a unique request ID so that if you must + retry your request, the server will know to + ignore the request if it has already been + completed. For example, consider a situation + where you make an initial request and the + request times out. If you make the request again + with the same request ID, the server can check + if original operation with the same request ID + was received, and if so, will ignore the second + request. This prevents clients from accidentally + creating duplicate commitments. The request ID + must be a valid UUID with the exception that + zero UUID is not supported ( + 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. + url_map_resource (google.cloud.compute_v1.types.UrlMap): + The body resource for this request + """ + + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + request_id: str = proto.Field( + proto.STRING, + number=37109963, + optional=True, + ) + url_map_resource: 'UrlMap' = proto.Field( + proto.MESSAGE, + number=168675425, + message='UrlMap', + ) + + +class InsertVpnGatewayRequest(proto.Message): + r"""A request message for VpnGateways.Insert. See the method + description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + project (str): + Project ID for this request. + region (str): + Name of the region for this request. + request_id (str): + An optional request ID to identify requests. + Specify a unique request ID so that if you must + retry your request, the server will know to + ignore the request if it has already been + completed. For example, consider a situation + where you make an initial request and the + request times out. If you make the request again + with the same request ID, the server can check + if original operation with the same request ID + was received, and if so, will ignore the second + request. This prevents clients from accidentally + creating duplicate commitments. The request ID + must be a valid UUID with the exception that + zero UUID is not supported ( + 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. + vpn_gateway_resource (google.cloud.compute_v1.types.VpnGateway): + The body resource for this request + """ + + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + region: str = proto.Field( + proto.STRING, + number=138946292, + ) + request_id: str = proto.Field( + proto.STRING, + number=37109963, + optional=True, + ) + vpn_gateway_resource: 'VpnGateway' = proto.Field( + proto.MESSAGE, + number=182688660, + message='VpnGateway', + ) + + +class InsertVpnTunnelRequest(proto.Message): + r"""A request message for VpnTunnels.Insert. See the method + description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + project (str): + Project ID for this request. + region (str): + Name of the region for this request. + request_id (str): + An optional request ID to identify requests. + Specify a unique request ID so that if you must + retry your request, the server will know to + ignore the request if it has already been + completed. For example, consider a situation + where you make an initial request and the + request times out. If you make the request again + with the same request ID, the server can check + if original operation with the same request ID + was received, and if so, will ignore the second + request. This prevents clients from accidentally + creating duplicate commitments. The request ID + must be a valid UUID with the exception that + zero UUID is not supported ( + 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. + vpn_tunnel_resource (google.cloud.compute_v1.types.VpnTunnel): + The body resource for this request + """ + + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + region: str = proto.Field( + proto.STRING, + number=138946292, + ) + request_id: str = proto.Field( + proto.STRING, + number=37109963, + optional=True, + ) + vpn_tunnel_resource: 'VpnTunnel' = proto.Field( + proto.MESSAGE, + number=86839482, + message='VpnTunnel', + ) + + +class Instance(proto.Message): + r"""Represents an Instance resource. An instance is a virtual + machine that is hosted on Google Cloud Platform. For more + information, read Virtual Machine Instances. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + advanced_machine_features (google.cloud.compute_v1.types.AdvancedMachineFeatures): + Controls for advanced machine-related + behavior features. + + This field is a member of `oneof`_ ``_advanced_machine_features``. + can_ip_forward (bool): + Allows this instance to send and receive + packets with non-matching destination or source + IPs. This is required if you plan to use this + instance to forward routes. For more + information, see Enabling IP Forwarding . + + This field is a member of `oneof`_ ``_can_ip_forward``. + confidential_instance_config (google.cloud.compute_v1.types.ConfidentialInstanceConfig): + + This field is a member of `oneof`_ ``_confidential_instance_config``. + cpu_platform (str): + [Output Only] The CPU platform used by this instance. + + This field is a member of `oneof`_ ``_cpu_platform``. + creation_timestamp (str): + [Output Only] Creation timestamp in RFC3339 text format. + + This field is a member of `oneof`_ ``_creation_timestamp``. + deletion_protection (bool): + Whether the resource should be protected + against deletion. + + This field is a member of `oneof`_ ``_deletion_protection``. + description (str): + An optional description of this resource. + Provide this property when you create the + resource. + + This field is a member of `oneof`_ ``_description``. + disks (MutableSequence[google.cloud.compute_v1.types.AttachedDisk]): + Array of disks associated with this instance. + Persistent disks must be created before you can + assign them. + display_device (google.cloud.compute_v1.types.DisplayDevice): + Enables display device for the instance. + + This field is a member of `oneof`_ ``_display_device``. + fingerprint (str): + Specifies a fingerprint for this resource, + which is essentially a hash of the instance's + contents and used for optimistic locking. The + fingerprint is initially generated by Compute + Engine and changes after every request to modify + or update the instance. You must always provide + an up-to-date fingerprint hash in order to + update the instance. To see the latest + fingerprint, make get() request to the instance. + + This field is a member of `oneof`_ ``_fingerprint``. + guest_accelerators (MutableSequence[google.cloud.compute_v1.types.AcceleratorConfig]): + A list of the type and count of accelerator + cards attached to the instance. + hostname (str): + Specifies the hostname of the instance. The specified + hostname must be RFC1035 compliant. If hostname is not + specified, the default hostname is + [INSTANCE_NAME].c.[PROJECT_ID].internal when using the + global DNS, and + [INSTANCE_NAME].[ZONE].c.[PROJECT_ID].internal when using + zonal DNS. + + This field is a member of `oneof`_ ``_hostname``. + id (int): + [Output Only] The unique identifier for the resource. This + identifier is defined by the server. + + This field is a member of `oneof`_ ``_id``. + instance_encryption_key (google.cloud.compute_v1.types.CustomerEncryptionKey): + Encrypts suspended data for an instance with + a customer-managed encryption key. If you are + creating a new instance, this field will encrypt + the local SSD and in-memory contents of the + instance during the suspend operation. If you do + not provide an encryption key when creating the + instance, then the local SSD and in-memory + contents will be encrypted using an + automatically generated key during the suspend + operation. + + This field is a member of `oneof`_ ``_instance_encryption_key``. + key_revocation_action_type (str): + KeyRevocationActionType of the instance. + Supported options are "STOP" and "NONE". The + default value is "NONE" if it is not specified. + Check the KeyRevocationActionType enum for the + list of possible values. + + This field is a member of `oneof`_ ``_key_revocation_action_type``. + kind (str): + [Output Only] Type of the resource. Always compute#instance + for instances. + + This field is a member of `oneof`_ ``_kind``. + label_fingerprint (str): + A fingerprint for this request, which is + essentially a hash of the label's contents and + used for optimistic locking. The fingerprint is + initially generated by Compute Engine and + changes after every request to modify or update + labels. You must always provide an up-to-date + fingerprint hash in order to update or change + labels. To see the latest fingerprint, make + get() request to the instance. + + This field is a member of `oneof`_ ``_label_fingerprint``. + labels (MutableMapping[str, str]): + Labels to apply to this instance. These can + be later modified by the setLabels method. + last_start_timestamp (str): + [Output Only] Last start timestamp in RFC3339 text format. + + This field is a member of `oneof`_ ``_last_start_timestamp``. + last_stop_timestamp (str): + [Output Only] Last stop timestamp in RFC3339 text format. + + This field is a member of `oneof`_ ``_last_stop_timestamp``. + last_suspended_timestamp (str): + [Output Only] Last suspended timestamp in RFC3339 text + format. + + This field is a member of `oneof`_ ``_last_suspended_timestamp``. + machine_type (str): + Full or partial URL of the machine type + resource to use for this instance, in the + format: zones/zone/machineTypes/machine-type. + This is provided by the client when the instance + is created. For example, the following is a + valid partial url to a predefined machine type: + zones/us-central1-f/machineTypes/n1-standard-1 + To create a custom machine type, provide a URL + to a machine type in the following format, where + CPUS is 1 or an even number up to 32 (2, 4, 6, + ... 24, etc), and MEMORY is the total memory for + this instance. Memory must be a multiple of 256 + MB and must be supplied in MB (e.g. 5 GB of + memory is 5120 MB): + zones/zone/machineTypes/custom-CPUS-MEMORY For + example: + zones/us-central1-f/machineTypes/custom-4-5120 + For a full list of restrictions, read the + Specifications for custom machine types. + + This field is a member of `oneof`_ ``_machine_type``. + metadata (google.cloud.compute_v1.types.Metadata): + The metadata key/value pairs assigned to this + instance. This includes custom metadata and + predefined keys. + + This field is a member of `oneof`_ ``_metadata``. + min_cpu_platform (str): + Specifies a minimum CPU platform for the VM + instance. Applicable values are the friendly + names of CPU platforms, such as minCpuPlatform: + "Intel Haswell" or minCpuPlatform: "Intel Sandy + Bridge". + + This field is a member of `oneof`_ ``_min_cpu_platform``. + name (str): + The name of the resource, provided by the client when + initially creating the resource. The resource name must be + 1-63 characters long, and comply with RFC1035. Specifically, + the name must be 1-63 characters long and match the regular + expression ``[a-z]([-a-z0-9]*[a-z0-9])?`` which means the + first character must be a lowercase letter, and all + following characters must be a dash, lowercase letter, or + digit, except the last character, which cannot be a dash. + + This field is a member of `oneof`_ ``_name``. + network_interfaces (MutableSequence[google.cloud.compute_v1.types.NetworkInterface]): + An array of network configurations for this + instance. These specify how interfaces are + configured to interact with other network + services, such as connecting to the internet. + Multiple interfaces are supported per instance. + network_performance_config (google.cloud.compute_v1.types.NetworkPerformanceConfig): + + This field is a member of `oneof`_ ``_network_performance_config``. + params (google.cloud.compute_v1.types.InstanceParams): + Input only. [Input Only] Additional params passed with the + request, but not persisted as part of resource payload. + + This field is a member of `oneof`_ ``_params``. + private_ipv6_google_access (str): + The private IPv6 google access type for the VM. If not + specified, use INHERIT_FROM_SUBNETWORK as default. Check the + PrivateIpv6GoogleAccess enum for the list of possible + values. + + This field is a member of `oneof`_ ``_private_ipv6_google_access``. + reservation_affinity (google.cloud.compute_v1.types.ReservationAffinity): + Specifies the reservations that this instance + can consume from. + + This field is a member of `oneof`_ ``_reservation_affinity``. + resource_policies (MutableSequence[str]): + Resource policies applied to this instance. + resource_status (google.cloud.compute_v1.types.ResourceStatus): + [Output Only] Specifies values set for instance attributes + as compared to the values requested by user in the + corresponding input only field. + + This field is a member of `oneof`_ ``_resource_status``. + satisfies_pzs (bool): + [Output Only] Reserved for future use. + + This field is a member of `oneof`_ ``_satisfies_pzs``. + scheduling (google.cloud.compute_v1.types.Scheduling): + Sets the scheduling options for this + instance. + + This field is a member of `oneof`_ ``_scheduling``. + self_link (str): + [Output Only] Server-defined URL for this resource. + + This field is a member of `oneof`_ ``_self_link``. + service_accounts (MutableSequence[google.cloud.compute_v1.types.ServiceAccount]): + A list of service accounts, with their + specified scopes, authorized for this instance. + Only one service account per VM instance is + supported. Service accounts generate access + tokens that can be accessed through the metadata + server and used to authenticate applications on + the instance. See Service Accounts for more + information. + shielded_instance_config (google.cloud.compute_v1.types.ShieldedInstanceConfig): + + This field is a member of `oneof`_ ``_shielded_instance_config``. + shielded_instance_integrity_policy (google.cloud.compute_v1.types.ShieldedInstanceIntegrityPolicy): + + This field is a member of `oneof`_ ``_shielded_instance_integrity_policy``. + source_machine_image (str): + Source machine image + + This field is a member of `oneof`_ ``_source_machine_image``. + source_machine_image_encryption_key (google.cloud.compute_v1.types.CustomerEncryptionKey): + Source machine image encryption key when + creating an instance from a machine image. + + This field is a member of `oneof`_ ``_source_machine_image_encryption_key``. + start_restricted (bool): + [Output Only] Whether a VM has been restricted for start + because Compute Engine has detected suspicious activity. + + This field is a member of `oneof`_ ``_start_restricted``. + status (str): + [Output Only] The status of the instance. One of the + following values: PROVISIONING, STAGING, RUNNING, STOPPING, + SUSPENDING, SUSPENDED, REPAIRING, and TERMINATED. For more + information about the status of the instance, see Instance + life cycle. Check the Status enum for the list of possible + values. + + This field is a member of `oneof`_ ``_status``. + status_message (str): + [Output Only] An optional, human-readable explanation of the + status. + + This field is a member of `oneof`_ ``_status_message``. + tags (google.cloud.compute_v1.types.Tags): + Tags to apply to this instance. Tags are used + to identify valid sources or targets for network + firewalls and are specified by the client during + instance creation. The tags can be later + modified by the setTags method. Each tag within + the list must comply with RFC1035. Multiple tags + can be specified via the 'tags.items' field. + + This field is a member of `oneof`_ ``_tags``. + zone (str): + [Output Only] URL of the zone where the instance resides. + You must specify this field as part of the HTTP request URL. + It is not settable as a field in the request body. + + This field is a member of `oneof`_ ``_zone``. + """ + class KeyRevocationActionType(proto.Enum): + r"""KeyRevocationActionType of the instance. Supported options + are "STOP" and "NONE". The default value is "NONE" if it is not + specified. + + Values: + UNDEFINED_KEY_REVOCATION_ACTION_TYPE (0): + A value indicating that the enum field is not + set. + KEY_REVOCATION_ACTION_TYPE_UNSPECIFIED (467110106): + Default value. This value is unused. + NONE (2402104): + Indicates user chose no operation. + STOP (2555906): + Indicates user chose to opt for VM shutdown + on key revocation. + """ + UNDEFINED_KEY_REVOCATION_ACTION_TYPE = 0 + KEY_REVOCATION_ACTION_TYPE_UNSPECIFIED = 467110106 + NONE = 2402104 + STOP = 2555906 + + class PrivateIpv6GoogleAccess(proto.Enum): + r"""The private IPv6 google access type for the VM. If not specified, + use INHERIT_FROM_SUBNETWORK as default. + + Values: + UNDEFINED_PRIVATE_IPV6_GOOGLE_ACCESS (0): + A value indicating that the enum field is not + set. + ENABLE_BIDIRECTIONAL_ACCESS_TO_GOOGLE (427975994): + Bidirectional private IPv6 access to/from + Google services. If specified, the subnetwork + who is attached to the instance's default + network interface will be assigned an internal + IPv6 prefix if it doesn't have before. + ENABLE_OUTBOUND_VM_ACCESS_TO_GOOGLE (288210263): + Outbound private IPv6 access from VMs in this + subnet to Google services. If specified, the + subnetwork who is attached to the instance's + default network interface will be assigned an + internal IPv6 prefix if it doesn't have before. + INHERIT_FROM_SUBNETWORK (530256959): + Each network interface inherits + PrivateIpv6GoogleAccess from its subnetwork. + """ + UNDEFINED_PRIVATE_IPV6_GOOGLE_ACCESS = 0 + ENABLE_BIDIRECTIONAL_ACCESS_TO_GOOGLE = 427975994 + ENABLE_OUTBOUND_VM_ACCESS_TO_GOOGLE = 288210263 + INHERIT_FROM_SUBNETWORK = 530256959 + + class Status(proto.Enum): + r"""[Output Only] The status of the instance. One of the following + values: PROVISIONING, STAGING, RUNNING, STOPPING, SUSPENDING, + SUSPENDED, REPAIRING, and TERMINATED. For more information about the + status of the instance, see Instance life cycle. + + Values: + UNDEFINED_STATUS (0): + A value indicating that the enum field is not + set. + DEPROVISIONING (428935662): + The instance is halted and we are performing + tear down tasks like network deprogramming, + releasing quota, IP, tearing down disks etc. + PROVISIONING (290896621): + Resources are being allocated for the + instance. + REPAIRING (413483285): + The instance is in repair. + RUNNING (121282975): + The instance is running. + STAGING (431072283): + All required resources have been allocated + and the instance is being started. + STOPPED (444276141): + The instance has stopped successfully. + STOPPING (350791796): + The instance is currently stopping (either + being deleted or killed). + SUSPENDED (51223995): + The instance has suspended. + SUSPENDING (514206246): + The instance is suspending. + TERMINATED (250018339): + The instance has stopped (either by explicit + action or underlying failure). + """ + UNDEFINED_STATUS = 0 + DEPROVISIONING = 428935662 + PROVISIONING = 290896621 + REPAIRING = 413483285 + RUNNING = 121282975 + STAGING = 431072283 + STOPPED = 444276141 + STOPPING = 350791796 + SUSPENDED = 51223995 + SUSPENDING = 514206246 + TERMINATED = 250018339 + + advanced_machine_features: 'AdvancedMachineFeatures' = proto.Field( + proto.MESSAGE, + number=409646002, + optional=True, + message='AdvancedMachineFeatures', + ) + can_ip_forward: bool = proto.Field( + proto.BOOL, + number=467731324, + optional=True, + ) + confidential_instance_config: 'ConfidentialInstanceConfig' = proto.Field( + proto.MESSAGE, + number=490637685, + optional=True, + message='ConfidentialInstanceConfig', + ) + cpu_platform: str = proto.Field( + proto.STRING, + number=410285354, + optional=True, + ) + creation_timestamp: str = proto.Field( + proto.STRING, + number=30525366, + optional=True, + ) + deletion_protection: bool = proto.Field( + proto.BOOL, + number=458014698, + optional=True, + ) + description: str = proto.Field( + proto.STRING, + number=422937596, + optional=True, + ) + disks: MutableSequence['AttachedDisk'] = proto.RepeatedField( + proto.MESSAGE, + number=95594102, + message='AttachedDisk', + ) + display_device: 'DisplayDevice' = proto.Field( + proto.MESSAGE, + number=258933875, + optional=True, + message='DisplayDevice', + ) + fingerprint: str = proto.Field( + proto.STRING, + number=234678500, + optional=True, + ) + guest_accelerators: MutableSequence['AcceleratorConfig'] = proto.RepeatedField( + proto.MESSAGE, + number=463595119, + message='AcceleratorConfig', + ) + hostname: str = proto.Field( + proto.STRING, + number=237067315, + optional=True, + ) + id: int = proto.Field( + proto.UINT64, + number=3355, + optional=True, + ) + instance_encryption_key: 'CustomerEncryptionKey' = proto.Field( + proto.MESSAGE, + number=64741517, + optional=True, + message='CustomerEncryptionKey', + ) + key_revocation_action_type: str = proto.Field( + proto.STRING, + number=235941474, + optional=True, + ) + kind: str = proto.Field( + proto.STRING, + number=3292052, + optional=True, + ) + label_fingerprint: str = proto.Field( + proto.STRING, + number=178124825, + optional=True, + ) + labels: MutableMapping[str, str] = proto.MapField( + proto.STRING, + proto.STRING, + number=500195327, + ) + last_start_timestamp: str = proto.Field( + proto.STRING, + number=443830736, + optional=True, + ) + last_stop_timestamp: str = proto.Field( + proto.STRING, + number=412823010, + optional=True, + ) + last_suspended_timestamp: str = proto.Field( + proto.STRING, + number=356275337, + optional=True, + ) + machine_type: str = proto.Field( + proto.STRING, + number=227711026, + optional=True, + ) + metadata: 'Metadata' = proto.Field( + proto.MESSAGE, + number=86866735, + optional=True, + message='Metadata', + ) + min_cpu_platform: str = proto.Field( + proto.STRING, + number=242912759, + optional=True, + ) + name: str = proto.Field( + proto.STRING, + number=3373707, + optional=True, + ) + network_interfaces: MutableSequence['NetworkInterface'] = proto.RepeatedField( + proto.MESSAGE, + number=52735243, + message='NetworkInterface', + ) + network_performance_config: 'NetworkPerformanceConfig' = proto.Field( + proto.MESSAGE, + number=398330850, + optional=True, + message='NetworkPerformanceConfig', + ) + params: 'InstanceParams' = proto.Field( + proto.MESSAGE, + number=78313862, + optional=True, + message='InstanceParams', + ) + private_ipv6_google_access: str = proto.Field( + proto.STRING, + number=48277006, + optional=True, + ) + reservation_affinity: 'ReservationAffinity' = proto.Field( + proto.MESSAGE, + number=157850683, + optional=True, + message='ReservationAffinity', + ) + resource_policies: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=22220385, + ) + resource_status: 'ResourceStatus' = proto.Field( + proto.MESSAGE, + number=249429315, + optional=True, + message='ResourceStatus', + ) + satisfies_pzs: bool = proto.Field( + proto.BOOL, + number=480964267, + optional=True, + ) + scheduling: 'Scheduling' = proto.Field( + proto.MESSAGE, + number=386688404, + optional=True, + message='Scheduling', + ) + self_link: str = proto.Field( + proto.STRING, + number=456214797, + optional=True, + ) + service_accounts: MutableSequence['ServiceAccount'] = proto.RepeatedField( + proto.MESSAGE, + number=277537328, + message='ServiceAccount', + ) + shielded_instance_config: 'ShieldedInstanceConfig' = proto.Field( + proto.MESSAGE, + number=12862901, + optional=True, + message='ShieldedInstanceConfig', + ) + shielded_instance_integrity_policy: 'ShieldedInstanceIntegrityPolicy' = proto.Field( + proto.MESSAGE, + number=163696919, + optional=True, + message='ShieldedInstanceIntegrityPolicy', + ) + source_machine_image: str = proto.Field( + proto.STRING, + number=21769791, + optional=True, + ) + source_machine_image_encryption_key: 'CustomerEncryptionKey' = proto.Field( + proto.MESSAGE, + number=192839075, + optional=True, + message='CustomerEncryptionKey', + ) + start_restricted: bool = proto.Field( + proto.BOOL, + number=123693144, + optional=True, + ) + status: str = proto.Field( + proto.STRING, + number=181260274, + optional=True, + ) + status_message: str = proto.Field( + proto.STRING, + number=297428154, + optional=True, + ) + tags: 'Tags' = proto.Field( + proto.MESSAGE, + number=3552281, + optional=True, + message='Tags', + ) + zone: str = proto.Field( + proto.STRING, + number=3744684, + optional=True, + ) + + +class InstanceAggregatedList(proto.Message): + r""" + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + id (str): + [Output Only] Unique identifier for the resource; defined by + the server. + + This field is a member of `oneof`_ ``_id``. + items (MutableMapping[str, google.cloud.compute_v1.types.InstancesScopedList]): + An object that contains a list of instances + scoped by zone. + kind (str): + [Output Only] Type of resource. Always + compute#instanceAggregatedList for aggregated lists of + Instance resources. + + This field is a member of `oneof`_ ``_kind``. + next_page_token (str): + [Output Only] This token allows you to get the next page of + results for list requests. If the number of results is + larger than maxResults, use the nextPageToken as a value for + the query parameter pageToken in the next list request. + Subsequent list requests will have their own nextPageToken + to continue paging through the results. + + This field is a member of `oneof`_ ``_next_page_token``. + self_link (str): + [Output Only] Server-defined URL for this resource. + + This field is a member of `oneof`_ ``_self_link``. + unreachables (MutableSequence[str]): + [Output Only] Unreachable resources. + warning (google.cloud.compute_v1.types.Warning): + [Output Only] Informational warning message. + + This field is a member of `oneof`_ ``_warning``. + """ + + @property + def raw_page(self): + return self + + id: str = proto.Field( + proto.STRING, + number=3355, + optional=True, + ) + items: MutableMapping[str, 'InstancesScopedList'] = proto.MapField( + proto.STRING, + proto.MESSAGE, + number=100526016, + message='InstancesScopedList', + ) + kind: str = proto.Field( + proto.STRING, + number=3292052, + optional=True, + ) + next_page_token: str = proto.Field( + proto.STRING, + number=79797525, + optional=True, + ) + self_link: str = proto.Field( + proto.STRING, + number=456214797, + optional=True, + ) + unreachables: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=243372063, + ) + warning: 'Warning' = proto.Field( + proto.MESSAGE, + number=50704284, + optional=True, + message='Warning', + ) + + +class InstanceConsumptionData(proto.Message): + r""" + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + consumption_info (google.cloud.compute_v1.types.InstanceConsumptionInfo): + Resources consumed by the instance. + + This field is a member of `oneof`_ ``_consumption_info``. + instance (str): + Server-defined URL for the instance. + + This field is a member of `oneof`_ ``_instance``. + """ + + consumption_info: 'InstanceConsumptionInfo' = proto.Field( + proto.MESSAGE, + number=146354898, + optional=True, + message='InstanceConsumptionInfo', + ) + instance: str = proto.Field( + proto.STRING, + number=18257045, + optional=True, + ) + + +class InstanceConsumptionInfo(proto.Message): + r""" + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + guest_cpus (int): + The number of virtual CPUs that are available + to the instance. + + This field is a member of `oneof`_ ``_guest_cpus``. + local_ssd_gb (int): + The amount of local SSD storage available to + the instance, defined in GiB. + + This field is a member of `oneof`_ ``_local_ssd_gb``. + memory_mb (int): + The amount of physical memory available to + the instance, defined in MiB. + + This field is a member of `oneof`_ ``_memory_mb``. + min_node_cpus (int): + The minimal guaranteed number of virtual CPUs + that are reserved. + + This field is a member of `oneof`_ ``_min_node_cpus``. + """ + + guest_cpus: int = proto.Field( + proto.INT32, + number=393356754, + optional=True, + ) + local_ssd_gb: int = proto.Field( + proto.INT32, + number=329237578, + optional=True, + ) + memory_mb: int = proto.Field( + proto.INT32, + number=116001171, + optional=True, + ) + min_node_cpus: int = proto.Field( + proto.INT32, + number=317231675, + optional=True, + ) + + +class InstanceGroup(proto.Message): + r"""Represents an Instance Group resource. Instance Groups can be + used to configure a target for load balancing. Instance groups + can either be managed or unmanaged. To create managed instance + groups, use the instanceGroupManager or + regionInstanceGroupManager resource instead. Use zonal unmanaged + instance groups if you need to apply load balancing to groups of + heterogeneous instances or if you need to manage the instances + yourself. You cannot create regional unmanaged instance groups. + For more information, read Instance groups. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + creation_timestamp (str): + [Output Only] The creation timestamp for this instance group + in RFC3339 text format. + + This field is a member of `oneof`_ ``_creation_timestamp``. + description (str): + An optional description of this resource. + Provide this property when you create the + resource. + + This field is a member of `oneof`_ ``_description``. + fingerprint (str): + [Output Only] The fingerprint of the named ports. The system + uses this fingerprint to detect conflicts when multiple + users change the named ports concurrently. + + This field is a member of `oneof`_ ``_fingerprint``. + id (int): + [Output Only] A unique identifier for this instance group, + generated by the server. + + This field is a member of `oneof`_ ``_id``. + kind (str): + [Output Only] The resource type, which is always + compute#instanceGroup for instance groups. + + This field is a member of `oneof`_ ``_kind``. + name (str): + The name of the instance group. The name must + be 1-63 characters long, and comply with + RFC1035. + + This field is a member of `oneof`_ ``_name``. + named_ports (MutableSequence[google.cloud.compute_v1.types.NamedPort]): + Assigns a name to a port number. For example: {name: "http", + port: 80} This allows the system to reference ports by the + assigned name instead of a port number. Named ports can also + contain multiple ports. For example: [{name: "app1", port: + 8080}, {name: "app1", port: 8081}, {name: "app2", port: + 8082}] Named ports apply to all instances in this instance + group. + network (str): + [Output Only] The URL of the network to which all instances + in the instance group belong. If your instance has multiple + network interfaces, then the network and subnetwork fields + only refer to the network and subnet used by your primary + interface (nic0). + + This field is a member of `oneof`_ ``_network``. + region (str): + [Output Only] The URL of the region where the instance group + is located (for regional resources). + + This field is a member of `oneof`_ ``_region``. + self_link (str): + [Output Only] The URL for this instance group. The server + generates this URL. + + This field is a member of `oneof`_ ``_self_link``. + size (int): + [Output Only] The total number of instances in the instance + group. + + This field is a member of `oneof`_ ``_size``. + subnetwork (str): + [Output Only] The URL of the subnetwork to which all + instances in the instance group belong. If your instance has + multiple network interfaces, then the network and subnetwork + fields only refer to the network and subnet used by your + primary interface (nic0). + + This field is a member of `oneof`_ ``_subnetwork``. + zone (str): + [Output Only] The URL of the zone where the instance group + is located (for zonal resources). + + This field is a member of `oneof`_ ``_zone``. + """ + + creation_timestamp: str = proto.Field( + proto.STRING, + number=30525366, + optional=True, + ) + description: str = proto.Field( + proto.STRING, + number=422937596, + optional=True, + ) + fingerprint: str = proto.Field( + proto.STRING, + number=234678500, + optional=True, + ) + id: int = proto.Field( + proto.UINT64, + number=3355, + optional=True, + ) + kind: str = proto.Field( + proto.STRING, + number=3292052, + optional=True, + ) + name: str = proto.Field( + proto.STRING, + number=3373707, + optional=True, + ) + named_ports: MutableSequence['NamedPort'] = proto.RepeatedField( + proto.MESSAGE, + number=427598732, + message='NamedPort', + ) + network: str = proto.Field( + proto.STRING, + number=232872494, + optional=True, + ) + region: str = proto.Field( + proto.STRING, + number=138946292, + optional=True, + ) + self_link: str = proto.Field( + proto.STRING, + number=456214797, + optional=True, + ) + size: int = proto.Field( + proto.INT32, + number=3530753, + optional=True, + ) + subnetwork: str = proto.Field( + proto.STRING, + number=307827694, + optional=True, + ) + zone: str = proto.Field( + proto.STRING, + number=3744684, + optional=True, + ) + + +class InstanceGroupAggregatedList(proto.Message): + r""" + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + id (str): + [Output Only] Unique identifier for the resource; defined by + the server. + + This field is a member of `oneof`_ ``_id``. + items (MutableMapping[str, google.cloud.compute_v1.types.InstanceGroupsScopedList]): + A list of InstanceGroupsScopedList resources. + kind (str): + [Output Only] The resource type, which is always + compute#instanceGroupAggregatedList for aggregated lists of + instance groups. + + This field is a member of `oneof`_ ``_kind``. + next_page_token (str): + [Output Only] This token allows you to get the next page of + results for list requests. If the number of results is + larger than maxResults, use the nextPageToken as a value for + the query parameter pageToken in the next list request. + Subsequent list requests will have their own nextPageToken + to continue paging through the results. + + This field is a member of `oneof`_ ``_next_page_token``. + self_link (str): + [Output Only] Server-defined URL for this resource. + + This field is a member of `oneof`_ ``_self_link``. + unreachables (MutableSequence[str]): + [Output Only] Unreachable resources. + warning (google.cloud.compute_v1.types.Warning): + [Output Only] Informational warning message. + + This field is a member of `oneof`_ ``_warning``. + """ + + @property + def raw_page(self): + return self + + id: str = proto.Field( + proto.STRING, + number=3355, + optional=True, + ) + items: MutableMapping[str, 'InstanceGroupsScopedList'] = proto.MapField( + proto.STRING, + proto.MESSAGE, + number=100526016, + message='InstanceGroupsScopedList', + ) + kind: str = proto.Field( + proto.STRING, + number=3292052, + optional=True, + ) + next_page_token: str = proto.Field( + proto.STRING, + number=79797525, + optional=True, + ) + self_link: str = proto.Field( + proto.STRING, + number=456214797, + optional=True, + ) + unreachables: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=243372063, + ) + warning: 'Warning' = proto.Field( + proto.MESSAGE, + number=50704284, + optional=True, + message='Warning', + ) + + +class InstanceGroupList(proto.Message): + r"""A list of InstanceGroup resources. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + id (str): + [Output Only] Unique identifier for the resource; defined by + the server. + + This field is a member of `oneof`_ ``_id``. + items (MutableSequence[google.cloud.compute_v1.types.InstanceGroup]): + A list of InstanceGroup resources. + kind (str): + [Output Only] The resource type, which is always + compute#instanceGroupList for instance group lists. + + This field is a member of `oneof`_ ``_kind``. + next_page_token (str): + [Output Only] This token allows you to get the next page of + results for list requests. If the number of results is + larger than maxResults, use the nextPageToken as a value for + the query parameter pageToken in the next list request. + Subsequent list requests will have their own nextPageToken + to continue paging through the results. + + This field is a member of `oneof`_ ``_next_page_token``. + self_link (str): + [Output Only] Server-defined URL for this resource. + + This field is a member of `oneof`_ ``_self_link``. + warning (google.cloud.compute_v1.types.Warning): + [Output Only] Informational warning message. + + This field is a member of `oneof`_ ``_warning``. + """ + + @property + def raw_page(self): + return self + + id: str = proto.Field( + proto.STRING, + number=3355, + optional=True, + ) + items: MutableSequence['InstanceGroup'] = proto.RepeatedField( + proto.MESSAGE, + number=100526016, + message='InstanceGroup', + ) + kind: str = proto.Field( + proto.STRING, + number=3292052, + optional=True, + ) + next_page_token: str = proto.Field( + proto.STRING, + number=79797525, + optional=True, + ) + self_link: str = proto.Field( + proto.STRING, + number=456214797, + optional=True, + ) + warning: 'Warning' = proto.Field( + proto.MESSAGE, + number=50704284, + optional=True, + message='Warning', + ) + + +class InstanceGroupManager(proto.Message): + r"""Represents a Managed Instance Group resource. An instance + group is a collection of VM instances that you can manage as a + single entity. For more information, read Instance groups. For + zonal Managed Instance Group, use the instanceGroupManagers + resource. For regional Managed Instance Group, use the + regionInstanceGroupManagers resource. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + auto_healing_policies (MutableSequence[google.cloud.compute_v1.types.InstanceGroupManagerAutoHealingPolicy]): + The autohealing policy for this managed + instance group. You can specify only one value. + base_instance_name (str): + The base instance name to use for instances + in this group. The value must be 1-58 characters + long. Instances are named by appending a hyphen + and a random four-character string to the base + instance name. The base instance name must + comply with RFC1035. + + This field is a member of `oneof`_ ``_base_instance_name``. + creation_timestamp (str): + [Output Only] The creation timestamp for this managed + instance group in RFC3339 text format. + + This field is a member of `oneof`_ ``_creation_timestamp``. + current_actions (google.cloud.compute_v1.types.InstanceGroupManagerActionsSummary): + [Output Only] The list of instance actions and the number of + instances in this managed instance group that are scheduled + for each of those actions. + + This field is a member of `oneof`_ ``_current_actions``. + description (str): + An optional description of this resource. + + This field is a member of `oneof`_ ``_description``. + distribution_policy (google.cloud.compute_v1.types.DistributionPolicy): + Policy specifying the intended distribution + of managed instances across zones in a regional + managed instance group. + + This field is a member of `oneof`_ ``_distribution_policy``. + fingerprint (str): + Fingerprint of this resource. This field may + be used in optimistic locking. It will be + ignored when inserting an InstanceGroupManager. + An up-to-date fingerprint must be provided in + order to update the InstanceGroupManager, + otherwise the request will fail with error 412 + conditionNotMet. To see the latest fingerprint, + make a get() request to retrieve an + InstanceGroupManager. + + This field is a member of `oneof`_ ``_fingerprint``. + id (int): + [Output Only] A unique identifier for this resource type. + The server generates this identifier. + + This field is a member of `oneof`_ ``_id``. + instance_group (str): + [Output Only] The URL of the Instance Group resource. + + This field is a member of `oneof`_ ``_instance_group``. + instance_lifecycle_policy (google.cloud.compute_v1.types.InstanceGroupManagerInstanceLifecyclePolicy): + The repair policy for this managed instance + group. + + This field is a member of `oneof`_ ``_instance_lifecycle_policy``. + instance_template (str): + The URL of the instance template that is + specified for this managed instance group. The + group uses this template to create all new + instances in the managed instance group. The + templates for existing instances in the group do + not change unless you run recreateInstances, run + applyUpdatesToInstances, or set the group's + updatePolicy.type to PROACTIVE. + + This field is a member of `oneof`_ ``_instance_template``. + kind (str): + [Output Only] The resource type, which is always + compute#instanceGroupManager for managed instance groups. + + This field is a member of `oneof`_ ``_kind``. + list_managed_instances_results (str): + Pagination behavior of the + listManagedInstances API method for this managed + instance group. Check the + ListManagedInstancesResults enum for the list of + possible values. + + This field is a member of `oneof`_ ``_list_managed_instances_results``. + name (str): + The name of the managed instance group. The + name must be 1-63 characters long, and comply + with RFC1035. + + This field is a member of `oneof`_ ``_name``. + named_ports (MutableSequence[google.cloud.compute_v1.types.NamedPort]): + Named ports configured for the Instance + Groups complementary to this Instance Group + Manager. + region (str): + [Output Only] The URL of the region where the managed + instance group resides (for regional resources). + + This field is a member of `oneof`_ ``_region``. + self_link (str): + [Output Only] The URL for this managed instance group. The + server defines this URL. + + This field is a member of `oneof`_ ``_self_link``. + stateful_policy (google.cloud.compute_v1.types.StatefulPolicy): + Stateful configuration for this Instanced + Group Manager + + This field is a member of `oneof`_ ``_stateful_policy``. + status (google.cloud.compute_v1.types.InstanceGroupManagerStatus): + [Output Only] The status of this managed instance group. + + This field is a member of `oneof`_ ``_status``. + target_pools (MutableSequence[str]): + The URLs for all TargetPool resources to + which instances in the instanceGroup field are + added. The target pools automatically apply to + all of the instances in the managed instance + group. + target_size (int): + The target number of running instances for + this managed instance group. You can reduce this + number by using the instanceGroupManager + deleteInstances or abandonInstances methods. + Resizing the group also changes this number. + + This field is a member of `oneof`_ ``_target_size``. + update_policy (google.cloud.compute_v1.types.InstanceGroupManagerUpdatePolicy): + The update policy for this managed instance + group. + + This field is a member of `oneof`_ ``_update_policy``. + versions (MutableSequence[google.cloud.compute_v1.types.InstanceGroupManagerVersion]): + Specifies the instance templates used by this + managed instance group to create instances. Each + version is defined by an instanceTemplate and a + name. Every version can appear at most once per + instance group. This field overrides the + top-level instanceTemplate field. Read more + about the relationships between these fields. + Exactly one version must leave the targetSize + field unset. That version will be applied to all + remaining instances. For more information, read + about canary updates. + zone (str): + [Output Only] The URL of a zone where the managed instance + group is located (for zonal resources). + + This field is a member of `oneof`_ ``_zone``. + """ + class ListManagedInstancesResults(proto.Enum): + r"""Pagination behavior of the listManagedInstances API method + for this managed instance group. + + Values: + UNDEFINED_LIST_MANAGED_INSTANCES_RESULTS (0): + A value indicating that the enum field is not + set. + PAGELESS (32183464): + (Default) Pagination is disabled for the + group's listManagedInstances API method. + maxResults and pageToken query parameters are + ignored and all instances are returned in a + single response. + PAGINATED (40190637): + Pagination is enabled for the group's + listManagedInstances API method. maxResults and + pageToken query parameters are respected. + """ + UNDEFINED_LIST_MANAGED_INSTANCES_RESULTS = 0 + PAGELESS = 32183464 + PAGINATED = 40190637 + + auto_healing_policies: MutableSequence['InstanceGroupManagerAutoHealingPolicy'] = proto.RepeatedField( + proto.MESSAGE, + number=456799109, + message='InstanceGroupManagerAutoHealingPolicy', + ) + base_instance_name: str = proto.Field( + proto.STRING, + number=389106439, + optional=True, + ) + creation_timestamp: str = proto.Field( + proto.STRING, + number=30525366, + optional=True, + ) + current_actions: 'InstanceGroupManagerActionsSummary' = proto.Field( + proto.MESSAGE, + number=164045879, + optional=True, + message='InstanceGroupManagerActionsSummary', + ) + description: str = proto.Field( + proto.STRING, + number=422937596, + optional=True, + ) + distribution_policy: 'DistributionPolicy' = proto.Field( + proto.MESSAGE, + number=534558541, + optional=True, + message='DistributionPolicy', + ) + fingerprint: str = proto.Field( + proto.STRING, + number=234678500, + optional=True, + ) + id: int = proto.Field( + proto.UINT64, + number=3355, + optional=True, + ) + instance_group: str = proto.Field( + proto.STRING, + number=81095253, + optional=True, + ) + instance_lifecycle_policy: 'InstanceGroupManagerInstanceLifecyclePolicy' = proto.Field( + proto.MESSAGE, + number=447961617, + optional=True, + message='InstanceGroupManagerInstanceLifecyclePolicy', + ) + instance_template: str = proto.Field( + proto.STRING, + number=309248228, + optional=True, + ) + kind: str = proto.Field( + proto.STRING, + number=3292052, + optional=True, + ) + list_managed_instances_results: str = proto.Field( + proto.STRING, + number=296047156, + optional=True, + ) + name: str = proto.Field( + proto.STRING, + number=3373707, + optional=True, + ) + named_ports: MutableSequence['NamedPort'] = proto.RepeatedField( + proto.MESSAGE, + number=427598732, + message='NamedPort', + ) + region: str = proto.Field( + proto.STRING, + number=138946292, + optional=True, + ) + self_link: str = proto.Field( + proto.STRING, + number=456214797, + optional=True, + ) + stateful_policy: 'StatefulPolicy' = proto.Field( + proto.MESSAGE, + number=47538565, + optional=True, + message='StatefulPolicy', + ) + status: 'InstanceGroupManagerStatus' = proto.Field( + proto.MESSAGE, + number=181260274, + optional=True, + message='InstanceGroupManagerStatus', + ) + target_pools: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=336072617, + ) + target_size: int = proto.Field( + proto.INT32, + number=62880239, + optional=True, + ) + update_policy: 'InstanceGroupManagerUpdatePolicy' = proto.Field( + proto.MESSAGE, + number=175809896, + optional=True, + message='InstanceGroupManagerUpdatePolicy', + ) + versions: MutableSequence['InstanceGroupManagerVersion'] = proto.RepeatedField( + proto.MESSAGE, + number=162430619, + message='InstanceGroupManagerVersion', + ) + zone: str = proto.Field( + proto.STRING, + number=3744684, + optional=True, + ) + + +class InstanceGroupManagerActionsSummary(proto.Message): + r""" + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + abandoning (int): + [Output Only] The total number of instances in the managed + instance group that are scheduled to be abandoned. + Abandoning an instance removes it from the managed instance + group without deleting it. + + This field is a member of `oneof`_ ``_abandoning``. + creating (int): + [Output Only] The number of instances in the managed + instance group that are scheduled to be created or are + currently being created. If the group fails to create any of + these instances, it tries again until it creates the + instance successfully. If you have disabled creation + retries, this field will not be populated; instead, the + creatingWithoutRetries field will be populated. + + This field is a member of `oneof`_ ``_creating``. + creating_without_retries (int): + [Output Only] The number of instances that the managed + instance group will attempt to create. The group attempts to + create each instance only once. If the group fails to create + any of these instances, it decreases the group's targetSize + value accordingly. + + This field is a member of `oneof`_ ``_creating_without_retries``. + deleting (int): + [Output Only] The number of instances in the managed + instance group that are scheduled to be deleted or are + currently being deleted. + + This field is a member of `oneof`_ ``_deleting``. + none (int): + [Output Only] The number of instances in the managed + instance group that are running and have no scheduled + actions. + + This field is a member of `oneof`_ ``_none``. + recreating (int): + [Output Only] The number of instances in the managed + instance group that are scheduled to be recreated or are + currently being being recreated. Recreating an instance + deletes the existing root persistent disk and creates a new + disk from the image that is defined in the instance + template. + + This field is a member of `oneof`_ ``_recreating``. + refreshing (int): + [Output Only] The number of instances in the managed + instance group that are being reconfigured with properties + that do not require a restart or a recreate action. For + example, setting or removing target pools for the instance. + + This field is a member of `oneof`_ ``_refreshing``. + restarting (int): + [Output Only] The number of instances in the managed + instance group that are scheduled to be restarted or are + currently being restarted. + + This field is a member of `oneof`_ ``_restarting``. + resuming (int): + [Output Only] The number of instances in the managed + instance group that are scheduled to be resumed or are + currently being resumed. + + This field is a member of `oneof`_ ``_resuming``. + starting (int): + [Output Only] The number of instances in the managed + instance group that are scheduled to be started or are + currently being started. + + This field is a member of `oneof`_ ``_starting``. + stopping (int): + [Output Only] The number of instances in the managed + instance group that are scheduled to be stopped or are + currently being stopped. + + This field is a member of `oneof`_ ``_stopping``. + suspending (int): + [Output Only] The number of instances in the managed + instance group that are scheduled to be suspended or are + currently being suspended. + + This field is a member of `oneof`_ ``_suspending``. + verifying (int): + [Output Only] The number of instances in the managed + instance group that are being verified. See the + managedInstances[].currentAction property in the + listManagedInstances method documentation. + + This field is a member of `oneof`_ ``_verifying``. + """ + + abandoning: int = proto.Field( + proto.INT32, + number=440023373, + optional=True, + ) + creating: int = proto.Field( + proto.INT32, + number=209809081, + optional=True, + ) + creating_without_retries: int = proto.Field( + proto.INT32, + number=369916745, + optional=True, + ) + deleting: int = proto.Field( + proto.INT32, + number=282846120, + optional=True, + ) + none: int = proto.Field( + proto.INT32, + number=3387192, + optional=True, + ) + recreating: int = proto.Field( + proto.INT32, + number=339057132, + optional=True, + ) + refreshing: int = proto.Field( + proto.INT32, + number=215044903, + optional=True, + ) + restarting: int = proto.Field( + proto.INT32, + number=372312947, + optional=True, + ) + resuming: int = proto.Field( + proto.INT32, + number=201100714, + optional=True, + ) + starting: int = proto.Field( + proto.INT32, + number=243064896, + optional=True, + ) + stopping: int = proto.Field( + proto.INT32, + number=105035892, + optional=True, + ) + suspending: int = proto.Field( + proto.INT32, + number=29113894, + optional=True, + ) + verifying: int = proto.Field( + proto.INT32, + number=451612873, + optional=True, + ) + + +class InstanceGroupManagerAggregatedList(proto.Message): + r""" + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + id (str): + [Output Only] Unique identifier for the resource; defined by + the server. + + This field is a member of `oneof`_ ``_id``. + items (MutableMapping[str, google.cloud.compute_v1.types.InstanceGroupManagersScopedList]): + A list of InstanceGroupManagersScopedList + resources. + kind (str): + [Output Only] The resource type, which is always + compute#instanceGroupManagerAggregatedList for an aggregated + list of managed instance groups. + + This field is a member of `oneof`_ ``_kind``. + next_page_token (str): + [Output Only] This token allows you to get the next page of + results for list requests. If the number of results is + larger than maxResults, use the nextPageToken as a value for + the query parameter pageToken in the next list request. + Subsequent list requests will have their own nextPageToken + to continue paging through the results. + + This field is a member of `oneof`_ ``_next_page_token``. + self_link (str): + [Output Only] Server-defined URL for this resource. + + This field is a member of `oneof`_ ``_self_link``. + unreachables (MutableSequence[str]): + [Output Only] Unreachable resources. + warning (google.cloud.compute_v1.types.Warning): + [Output Only] Informational warning message. + + This field is a member of `oneof`_ ``_warning``. + """ + + @property + def raw_page(self): + return self + + id: str = proto.Field( + proto.STRING, + number=3355, + optional=True, + ) + items: MutableMapping[str, 'InstanceGroupManagersScopedList'] = proto.MapField( + proto.STRING, + proto.MESSAGE, + number=100526016, + message='InstanceGroupManagersScopedList', + ) + kind: str = proto.Field( + proto.STRING, + number=3292052, + optional=True, + ) + next_page_token: str = proto.Field( + proto.STRING, + number=79797525, + optional=True, + ) + self_link: str = proto.Field( + proto.STRING, + number=456214797, + optional=True, + ) + unreachables: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=243372063, + ) + warning: 'Warning' = proto.Field( + proto.MESSAGE, + number=50704284, + optional=True, + message='Warning', + ) + + +class InstanceGroupManagerAutoHealingPolicy(proto.Message): + r""" + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + health_check (str): + The URL for the health check that signals + autohealing. + + This field is a member of `oneof`_ ``_health_check``. + initial_delay_sec (int): + The initial delay is the number of seconds + that a new VM takes to initialize and run its + startup script. During a VM's initial delay + period, the MIG ignores unsuccessful health + checks because the VM might be in the startup + process. This prevents the MIG from prematurely + recreating a VM. If the health check receives a + healthy response during the initial delay, it + indicates that the startup process is complete + and the VM is ready. The value of initial delay + must be between 0 and 3600 seconds. The default + value is 0. + + This field is a member of `oneof`_ ``_initial_delay_sec``. + """ + + health_check: str = proto.Field( + proto.STRING, + number=308876645, + optional=True, + ) + initial_delay_sec: int = proto.Field( + proto.INT32, + number=263207002, + optional=True, + ) + + +class InstanceGroupManagerInstanceLifecyclePolicy(proto.Message): + r""" + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + force_update_on_repair (str): + A bit indicating whether to forcefully apply + the group's latest configuration when repairing + a VM. Valid options are: - NO (default): If + configuration updates are available, they are + not forcefully applied during repair. Instead, + configuration updates are applied according to + the group's update policy. - YES: If + configuration updates are available, they are + applied during repair. Check the + ForceUpdateOnRepair enum for the list of + possible values. + + This field is a member of `oneof`_ ``_force_update_on_repair``. + """ + class ForceUpdateOnRepair(proto.Enum): + r"""A bit indicating whether to forcefully apply the group's + latest configuration when repairing a VM. Valid options are: - + NO (default): If configuration updates are available, they are + not forcefully applied during repair. Instead, configuration + updates are applied according to the group's update policy. - + YES: If configuration updates are available, they are applied + during repair. + + Values: + UNDEFINED_FORCE_UPDATE_ON_REPAIR (0): + A value indicating that the enum field is not + set. + NO (2497): + No description available. + YES (87751): + No description available. + """ + UNDEFINED_FORCE_UPDATE_ON_REPAIR = 0 + NO = 2497 + YES = 87751 + + force_update_on_repair: str = proto.Field( + proto.STRING, + number=356302027, + optional=True, + ) + + +class InstanceGroupManagerList(proto.Message): + r"""[Output Only] A list of managed instance groups. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + id (str): + [Output Only] Unique identifier for the resource; defined by + the server. + + This field is a member of `oneof`_ ``_id``. + items (MutableSequence[google.cloud.compute_v1.types.InstanceGroupManager]): + A list of InstanceGroupManager resources. + kind (str): + [Output Only] The resource type, which is always + compute#instanceGroupManagerList for a list of managed + instance groups. + + This field is a member of `oneof`_ ``_kind``. + next_page_token (str): + [Output Only] This token allows you to get the next page of + results for list requests. If the number of results is + larger than maxResults, use the nextPageToken as a value for + the query parameter pageToken in the next list request. + Subsequent list requests will have their own nextPageToken + to continue paging through the results. + + This field is a member of `oneof`_ ``_next_page_token``. + self_link (str): + [Output Only] Server-defined URL for this resource. + + This field is a member of `oneof`_ ``_self_link``. + warning (google.cloud.compute_v1.types.Warning): + [Output Only] Informational warning message. + + This field is a member of `oneof`_ ``_warning``. + """ + + @property + def raw_page(self): + return self + + id: str = proto.Field( + proto.STRING, + number=3355, + optional=True, + ) + items: MutableSequence['InstanceGroupManager'] = proto.RepeatedField( + proto.MESSAGE, + number=100526016, + message='InstanceGroupManager', + ) + kind: str = proto.Field( + proto.STRING, + number=3292052, + optional=True, + ) + next_page_token: str = proto.Field( + proto.STRING, + number=79797525, + optional=True, + ) + self_link: str = proto.Field( + proto.STRING, + number=456214797, + optional=True, + ) + warning: 'Warning' = proto.Field( + proto.MESSAGE, + number=50704284, + optional=True, + message='Warning', + ) + + +class InstanceGroupManagerStatus(proto.Message): + r""" + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + autoscaler (str): + [Output Only] The URL of the Autoscaler that targets this + instance group manager. + + This field is a member of `oneof`_ ``_autoscaler``. + is_stable (bool): + [Output Only] A bit indicating whether the managed instance + group is in a stable state. A stable state means that: none + of the instances in the managed instance group is currently + undergoing any type of change (for example, creation, + restart, or deletion); no future changes are scheduled for + instances in the managed instance group; and the managed + instance group itself is not being modified. + + This field is a member of `oneof`_ ``_is_stable``. + stateful (google.cloud.compute_v1.types.InstanceGroupManagerStatusStateful): + [Output Only] Stateful status of the given Instance Group + Manager. + + This field is a member of `oneof`_ ``_stateful``. + version_target (google.cloud.compute_v1.types.InstanceGroupManagerStatusVersionTarget): + [Output Only] A status of consistency of Instances' versions + with their target version specified by version field on + Instance Group Manager. + + This field is a member of `oneof`_ ``_version_target``. + """ + + autoscaler: str = proto.Field( + proto.STRING, + number=517258967, + optional=True, + ) + is_stable: bool = proto.Field( + proto.BOOL, + number=108410864, + optional=True, + ) + stateful: 'InstanceGroupManagerStatusStateful' = proto.Field( + proto.MESSAGE, + number=244462412, + optional=True, + message='InstanceGroupManagerStatusStateful', + ) + version_target: 'InstanceGroupManagerStatusVersionTarget' = proto.Field( + proto.MESSAGE, + number=289386200, + optional=True, + message='InstanceGroupManagerStatusVersionTarget', + ) + + +class InstanceGroupManagerStatusStateful(proto.Message): + r""" + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + has_stateful_config (bool): + [Output Only] A bit indicating whether the managed instance + group has stateful configuration, that is, if you have + configured any items in a stateful policy or in per-instance + configs. The group might report that it has no stateful + configuration even when there is still some preserved state + on a managed instance, for example, if you have deleted all + PICs but not yet applied those deletions. + + This field is a member of `oneof`_ ``_has_stateful_config``. + per_instance_configs (google.cloud.compute_v1.types.InstanceGroupManagerStatusStatefulPerInstanceConfigs): + [Output Only] Status of per-instance configurations on the + instance. + + This field is a member of `oneof`_ ``_per_instance_configs``. + """ + + has_stateful_config: bool = proto.Field( + proto.BOOL, + number=110474224, + optional=True, + ) + per_instance_configs: 'InstanceGroupManagerStatusStatefulPerInstanceConfigs' = proto.Field( + proto.MESSAGE, + number=526265001, + optional=True, + message='InstanceGroupManagerStatusStatefulPerInstanceConfigs', + ) + + +class InstanceGroupManagerStatusStatefulPerInstanceConfigs(proto.Message): + r""" + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + all_effective (bool): + A bit indicating if all of the group's + per-instance configurations (listed in the + output of a listPerInstanceConfigs API call) + have status EFFECTIVE or there are no + per-instance-configs. + + This field is a member of `oneof`_ ``_all_effective``. + """ + + all_effective: bool = proto.Field( + proto.BOOL, + number=516540553, + optional=True, + ) + + +class InstanceGroupManagerStatusVersionTarget(proto.Message): + r""" + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + is_reached (bool): + [Output Only] A bit indicating whether version target has + been reached in this managed instance group, i.e. all + instances are in their target version. Instances' target + version are specified by version field on Instance Group + Manager. + + This field is a member of `oneof`_ ``_is_reached``. + """ + + is_reached: bool = proto.Field( + proto.BOOL, + number=433209149, + optional=True, + ) + + +class InstanceGroupManagerUpdatePolicy(proto.Message): + r""" + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + instance_redistribution_type (str): + The instance redistribution policy for + regional managed instance groups. Valid values + are: - PROACTIVE (default): The group attempts + to maintain an even distribution of VM instances + across zones in the region. - NONE: For + non-autoscaled groups, proactive redistribution + is disabled. Check the + InstanceRedistributionType enum for the list of + possible values. + + This field is a member of `oneof`_ ``_instance_redistribution_type``. + max_surge (google.cloud.compute_v1.types.FixedOrPercent): + The maximum number of instances that can be + created above the specified targetSize during + the update process. This value can be either a + fixed number or, if the group has 10 or more + instances, a percentage. If you set a + percentage, the number of instances is rounded + if necessary. The default value for maxSurge is + a fixed value equal to the number of zones in + which the managed instance group operates. At + least one of either maxSurge or maxUnavailable + must be greater than 0. Learn more about + maxSurge. + + This field is a member of `oneof`_ ``_max_surge``. + max_unavailable (google.cloud.compute_v1.types.FixedOrPercent): + The maximum number of instances that can be + unavailable during the update process. An + instance is considered available if all of the + following conditions are satisfied: - The + instance's status is RUNNING. - If there is a + health check on the instance group, the + instance's health check status must be HEALTHY + at least once. If there is no health check on + the group, then the instance only needs to have + a status of RUNNING to be considered available. + This value can be either a fixed number or, if + the group has 10 or more instances, a + percentage. If you set a percentage, the number + of instances is rounded if necessary. The + default value for maxUnavailable is a fixed + value equal to the number of zones in which the + managed instance group operates. At least one of + either maxSurge or maxUnavailable must be + greater than 0. Learn more about maxUnavailable. + + This field is a member of `oneof`_ ``_max_unavailable``. + minimal_action (str): + Minimal action to be taken on an instance. + Use this option to minimize disruption as much + as possible or to apply a more disruptive action + than is necessary. - To limit disruption as much + as possible, set the minimal action to REFRESH. + If your update requires a more disruptive + action, Compute Engine performs the necessary + action to execute the update. - To apply a more + disruptive action than is strictly necessary, + set the minimal action to RESTART or REPLACE. + For example, Compute Engine does not need to + restart a VM to change its metadata. But if your + application reads instance metadata only when a + VM is restarted, you can set the minimal action + to RESTART in order to pick up metadata changes. + Check the MinimalAction enum for the list of + possible values. + + This field is a member of `oneof`_ ``_minimal_action``. + most_disruptive_allowed_action (str): + Most disruptive action that is allowed to be + taken on an instance. You can specify either + NONE to forbid any actions, REFRESH to avoid + restarting the VM and to limit disruption as + much as possible. RESTART to allow actions that + can be applied without instance replacing or + REPLACE to allow all possible actions. If the + Updater determines that the minimal update + action needed is more disruptive than most + disruptive allowed action you specify it will + not perform the update at all. Check the + MostDisruptiveAllowedAction enum for the list of + possible values. + + This field is a member of `oneof`_ ``_most_disruptive_allowed_action``. + replacement_method (str): + What action should be used to replace instances. See + minimal_action.REPLACE Check the ReplacementMethod enum for + the list of possible values. + + This field is a member of `oneof`_ ``_replacement_method``. + type_ (str): + The type of update process. You can specify + either PROACTIVE so that the MIG automatically + updates VMs to the latest configurations or + OPPORTUNISTIC so that you can select the VMs + that you want to update. Check the Type enum for + the list of possible values. + + This field is a member of `oneof`_ ``_type``. + """ + class InstanceRedistributionType(proto.Enum): + r"""The instance redistribution policy for regional managed + instance groups. Valid values are: - PROACTIVE (default): The + group attempts to maintain an even distribution of VM instances + across zones in the region. - NONE: For non-autoscaled groups, + proactive redistribution is disabled. Additional supported + values which may be not listed in the enum directly due to + technical reasons: + + NONE + PROACTIVE + + Values: + UNDEFINED_INSTANCE_REDISTRIBUTION_TYPE (0): + A value indicating that the enum field is not + set. + """ + UNDEFINED_INSTANCE_REDISTRIBUTION_TYPE = 0 + + class MinimalAction(proto.Enum): + r"""Minimal action to be taken on an instance. Use this option to + minimize disruption as much as possible or to apply a more + disruptive action than is necessary. - To limit disruption as + much as possible, set the minimal action to REFRESH. If your + update requires a more disruptive action, Compute Engine + performs the necessary action to execute the update. - To apply + a more disruptive action than is strictly necessary, set the + minimal action to RESTART or REPLACE. For example, Compute + Engine does not need to restart a VM to change its metadata. But + if your application reads instance metadata only when a VM is + restarted, you can set the minimal action to RESTART in order to + pick up metadata changes. Additional supported values which may + be not listed in the enum directly due to technical reasons: + + NONE + REFRESH + REPLACE + RESTART + + Values: + UNDEFINED_MINIMAL_ACTION (0): + A value indicating that the enum field is not + set. + """ + UNDEFINED_MINIMAL_ACTION = 0 + + class MostDisruptiveAllowedAction(proto.Enum): + r"""Most disruptive action that is allowed to be taken on an + instance. You can specify either NONE to forbid any actions, + REFRESH to avoid restarting the VM and to limit disruption as + much as possible. RESTART to allow actions that can be applied + without instance replacing or REPLACE to allow all possible + actions. If the Updater determines that the minimal update + action needed is more disruptive than most disruptive allowed + action you specify it will not perform the update at all. + Additional supported values which may be not listed in the enum + directly due to technical reasons: + + NONE + REFRESH + REPLACE + RESTART + + Values: + UNDEFINED_MOST_DISRUPTIVE_ALLOWED_ACTION (0): + A value indicating that the enum field is not + set. + """ + UNDEFINED_MOST_DISRUPTIVE_ALLOWED_ACTION = 0 + + class ReplacementMethod(proto.Enum): + r"""What action should be used to replace instances. See + minimal_action.REPLACE + + Values: + UNDEFINED_REPLACEMENT_METHOD (0): + A value indicating that the enum field is not + set. + RECREATE (522644719): + Instances will be recreated (with the same + name) + SUBSTITUTE (280924314): + Default option: instances will be deleted and + created (with a new name) + """ + UNDEFINED_REPLACEMENT_METHOD = 0 + RECREATE = 522644719 + SUBSTITUTE = 280924314 + + class Type(proto.Enum): + r"""The type of update process. You can specify either PROACTIVE + so that the MIG automatically updates VMs to the latest + configurations or OPPORTUNISTIC so that you can select the VMs + that you want to update. Additional supported values which may + be not listed in the enum directly due to technical reasons: + + PROACTIVE + + Values: + UNDEFINED_TYPE (0): + A value indicating that the enum field is not + set. + OPPORTUNISTIC (429530089): + MIG will apply new configurations to existing + VMs only when you selectively target specific or + all VMs to be updated. + """ + UNDEFINED_TYPE = 0 + OPPORTUNISTIC = 429530089 + + instance_redistribution_type: str = proto.Field( + proto.STRING, + number=292630424, + optional=True, + ) + max_surge: 'FixedOrPercent' = proto.Field( + proto.MESSAGE, + number=302572691, + optional=True, + message='FixedOrPercent', + ) + max_unavailable: 'FixedOrPercent' = proto.Field( + proto.MESSAGE, + number=404940277, + optional=True, + message='FixedOrPercent', + ) + minimal_action: str = proto.Field( + proto.STRING, + number=270567060, + optional=True, + ) + most_disruptive_allowed_action: str = proto.Field( + proto.STRING, + number=66103053, + optional=True, + ) + replacement_method: str = proto.Field( + proto.STRING, + number=505931694, + optional=True, + ) + type_: str = proto.Field( + proto.STRING, + number=3575610, + optional=True, + ) + + +class InstanceGroupManagerVersion(proto.Message): + r""" + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + instance_template (str): + The URL of the instance template that is specified for this + managed instance group. The group uses this template to + create new instances in the managed instance group until the + ``targetSize`` for this version is reached. The templates + for existing instances in the group do not change unless you + run recreateInstances, run applyUpdatesToInstances, or set + the group's updatePolicy.type to PROACTIVE; in those cases, + existing instances are updated until the ``targetSize`` for + this version is reached. + + This field is a member of `oneof`_ ``_instance_template``. + name (str): + Name of the version. Unique among all + versions in the scope of this managed instance + group. + + This field is a member of `oneof`_ ``_name``. + target_size (google.cloud.compute_v1.types.FixedOrPercent): + Specifies the intended number of instances to be created + from the instanceTemplate. The final number of instances + created from the template will be equal to: - If expressed + as a fixed number, the minimum of either targetSize.fixed or + instanceGroupManager.targetSize is used. - if expressed as a + percent, the targetSize would be (targetSize.percent/100 \* + InstanceGroupManager.targetSize) If there is a remainder, + the number is rounded. If unset, this version will update + any remaining instances not updated by another version. Read + Starting a canary update for more information. + + This field is a member of `oneof`_ ``_target_size``. + """ + + instance_template: str = proto.Field( + proto.STRING, + number=309248228, + optional=True, + ) + name: str = proto.Field( + proto.STRING, + number=3373707, + optional=True, + ) + target_size: 'FixedOrPercent' = proto.Field( + proto.MESSAGE, + number=62880239, + optional=True, + message='FixedOrPercent', + ) + + +class InstanceGroupManagersAbandonInstancesRequest(proto.Message): + r""" + + Attributes: + instances (MutableSequence[str]): + The URLs of one or more instances to abandon. This can be a + full URL or a partial URL, such as + zones/[ZONE]/instances/[INSTANCE_NAME]. + """ + + instances: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=29097598, + ) + + +class InstanceGroupManagersApplyUpdatesRequest(proto.Message): + r"""InstanceGroupManagers.applyUpdatesToInstances + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + all_instances (bool): + Flag to update all instances instead of + specified list of “instances”. If the flag is + set to true then the instances may not be + specified in the request. + + This field is a member of `oneof`_ ``_all_instances``. + instances (MutableSequence[str]): + The list of URLs of one or more instances for which you want + to apply updates. Each URL can be a full URL or a partial + URL, such as zones/[ZONE]/instances/[INSTANCE_NAME]. + minimal_action (str): + The minimal action that you want to perform + on each instance during the update: - REPLACE: + At minimum, delete the instance and create it + again. - RESTART: Stop the instance and start it + again. - REFRESH: Do not stop the instance and + limit disruption as much as possible. - NONE: Do + not disrupt the instance at all. By default, the + minimum action is NONE. If your update requires + a more disruptive action than you set with this + flag, the necessary action is performed to + execute the update. Check the MinimalAction enum + for the list of possible values. + + This field is a member of `oneof`_ ``_minimal_action``. + most_disruptive_allowed_action (str): + The most disruptive action that you want to + perform on each instance during the update: - + REPLACE: Delete the instance and create it + again. - RESTART: Stop the instance and start it + again. - REFRESH: Do not stop the instance and + limit disruption as much as possible. - NONE: Do + not disrupt the instance at all. By default, the + most disruptive allowed action is REPLACE. If + your update requires a more disruptive action + than you set with this flag, the update request + will fail. Check the MostDisruptiveAllowedAction + enum for the list of possible values. + + This field is a member of `oneof`_ ``_most_disruptive_allowed_action``. + """ + class MinimalAction(proto.Enum): + r"""The minimal action that you want to perform on each instance + during the update: - REPLACE: At minimum, delete the instance + and create it again. - RESTART: Stop the instance and start it + again. - REFRESH: Do not stop the instance and limit disruption + as much as possible. - NONE: Do not disrupt the instance at all. + By default, the minimum action is NONE. If your update requires + a more disruptive action than you set with this flag, the + necessary action is performed to execute the update. Additional + supported values which may be not listed in the enum directly + due to technical reasons: + + NONE + REFRESH + REPLACE + RESTART + + Values: + UNDEFINED_MINIMAL_ACTION (0): + A value indicating that the enum field is not + set. + """ + UNDEFINED_MINIMAL_ACTION = 0 + + class MostDisruptiveAllowedAction(proto.Enum): + r"""The most disruptive action that you want to perform on each + instance during the update: - REPLACE: Delete the instance and + create it again. - RESTART: Stop the instance and start it + again. - REFRESH: Do not stop the instance and limit disruption + as much as possible. - NONE: Do not disrupt the instance at all. + By default, the most disruptive allowed action is REPLACE. If + your update requires a more disruptive action than you set with + this flag, the update request will fail. Additional supported + values which may be not listed in the enum directly due to + technical reasons: + + NONE + REFRESH + REPLACE + RESTART + + Values: + UNDEFINED_MOST_DISRUPTIVE_ALLOWED_ACTION (0): + A value indicating that the enum field is not + set. + """ + UNDEFINED_MOST_DISRUPTIVE_ALLOWED_ACTION = 0 + + all_instances: bool = proto.Field( + proto.BOOL, + number=403676512, + optional=True, + ) + instances: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=29097598, + ) + minimal_action: str = proto.Field( + proto.STRING, + number=270567060, + optional=True, + ) + most_disruptive_allowed_action: str = proto.Field( + proto.STRING, + number=66103053, + optional=True, + ) + + +class InstanceGroupManagersCreateInstancesRequest(proto.Message): + r"""InstanceGroupManagers.createInstances + + Attributes: + instances (MutableSequence[google.cloud.compute_v1.types.PerInstanceConfig]): + [Required] List of specifications of per-instance configs. + """ + + instances: MutableSequence['PerInstanceConfig'] = proto.RepeatedField( + proto.MESSAGE, + number=29097598, + message='PerInstanceConfig', + ) + + +class InstanceGroupManagersDeleteInstancesRequest(proto.Message): + r""" + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + instances (MutableSequence[str]): + The URLs of one or more instances to delete. This can be a + full URL or a partial URL, such as + zones/[ZONE]/instances/[INSTANCE_NAME]. Queued instances do + not have URL and can be deleted only by name. One cannot + specify both URLs and names in a single request. + skip_instances_on_validation_error (bool): + Specifies whether the request should proceed despite the + inclusion of instances that are not members of the group or + that are already in the process of being deleted or + abandoned. If this field is set to ``false`` and such an + instance is specified in the request, the operation fails. + The operation always fails if the request contains a + malformed instance URL or a reference to an instance that + exists in a zone or region other than the group's zone or + region. + + This field is a member of `oneof`_ ``_skip_instances_on_validation_error``. + """ + + instances: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=29097598, + ) + skip_instances_on_validation_error: bool = proto.Field( + proto.BOOL, + number=40631073, + optional=True, + ) + + +class InstanceGroupManagersDeletePerInstanceConfigsReq(proto.Message): + r"""InstanceGroupManagers.deletePerInstanceConfigs + + Attributes: + names (MutableSequence[str]): + The list of instance names for which we want + to delete per-instance configs on this managed + instance group. + """ + + names: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=104585032, + ) + + +class InstanceGroupManagersListErrorsResponse(proto.Message): + r""" + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + items (MutableSequence[google.cloud.compute_v1.types.InstanceManagedByIgmError]): + [Output Only] The list of errors of the managed instance + group. + next_page_token (str): + [Output Only] This token allows you to get the next page of + results for list requests. If the number of results is + larger than maxResults, use the nextPageToken as a value for + the query parameter pageToken in the next list request. + Subsequent list requests will have their own nextPageToken + to continue paging through the results. + + This field is a member of `oneof`_ ``_next_page_token``. + """ + + @property + def raw_page(self): + return self + + items: MutableSequence['InstanceManagedByIgmError'] = proto.RepeatedField( + proto.MESSAGE, + number=100526016, + message='InstanceManagedByIgmError', + ) + next_page_token: str = proto.Field( + proto.STRING, + number=79797525, + optional=True, + ) + + +class InstanceGroupManagersListManagedInstancesResponse(proto.Message): + r""" + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + managed_instances (MutableSequence[google.cloud.compute_v1.types.ManagedInstance]): + [Output Only] The list of instances in the managed instance + group. + next_page_token (str): + [Output Only] This token allows you to get the next page of + results for list requests. If the number of results is + larger than maxResults, use the nextPageToken as a value for + the query parameter pageToken in the next list request. + Subsequent list requests will have their own nextPageToken + to continue paging through the results. + + This field is a member of `oneof`_ ``_next_page_token``. + """ + + @property + def raw_page(self): + return self + + managed_instances: MutableSequence['ManagedInstance'] = proto.RepeatedField( + proto.MESSAGE, + number=336219614, + message='ManagedInstance', + ) + next_page_token: str = proto.Field( + proto.STRING, + number=79797525, + optional=True, + ) + + +class InstanceGroupManagersListPerInstanceConfigsResp(proto.Message): + r""" + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + items (MutableSequence[google.cloud.compute_v1.types.PerInstanceConfig]): + [Output Only] The list of PerInstanceConfig. + next_page_token (str): + [Output Only] This token allows you to get the next page of + results for list requests. If the number of results is + larger than maxResults, use the nextPageToken as a value for + the query parameter pageToken in the next list request. + Subsequent list requests will have their own nextPageToken + to continue paging through the results. + + This field is a member of `oneof`_ ``_next_page_token``. + warning (google.cloud.compute_v1.types.Warning): + [Output Only] Informational warning message. + + This field is a member of `oneof`_ ``_warning``. + """ + + @property + def raw_page(self): + return self + + items: MutableSequence['PerInstanceConfig'] = proto.RepeatedField( + proto.MESSAGE, + number=100526016, + message='PerInstanceConfig', + ) + next_page_token: str = proto.Field( + proto.STRING, + number=79797525, + optional=True, + ) + warning: 'Warning' = proto.Field( + proto.MESSAGE, + number=50704284, + optional=True, + message='Warning', + ) + + +class InstanceGroupManagersPatchPerInstanceConfigsReq(proto.Message): + r"""InstanceGroupManagers.patchPerInstanceConfigs + + Attributes: + per_instance_configs (MutableSequence[google.cloud.compute_v1.types.PerInstanceConfig]): + The list of per-instance configurations to + insert or patch on this managed instance group. + """ + + per_instance_configs: MutableSequence['PerInstanceConfig'] = proto.RepeatedField( + proto.MESSAGE, + number=526265001, + message='PerInstanceConfig', + ) + + +class InstanceGroupManagersRecreateInstancesRequest(proto.Message): + r""" + + Attributes: + instances (MutableSequence[str]): + The URLs of one or more instances to recreate. This can be a + full URL or a partial URL, such as + zones/[ZONE]/instances/[INSTANCE_NAME]. + """ + + instances: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=29097598, + ) + + +class InstanceGroupManagersScopedList(proto.Message): + r""" + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + instance_group_managers (MutableSequence[google.cloud.compute_v1.types.InstanceGroupManager]): + [Output Only] The list of managed instance groups that are + contained in the specified project and zone. + warning (google.cloud.compute_v1.types.Warning): + [Output Only] The warning that replaces the list of managed + instance groups when the list is empty. + + This field is a member of `oneof`_ ``_warning``. + """ + + instance_group_managers: MutableSequence['InstanceGroupManager'] = proto.RepeatedField( + proto.MESSAGE, + number=214072592, + message='InstanceGroupManager', + ) + warning: 'Warning' = proto.Field( + proto.MESSAGE, + number=50704284, + optional=True, + message='Warning', + ) + + +class InstanceGroupManagersSetInstanceTemplateRequest(proto.Message): + r""" + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + instance_template (str): + The URL of the instance template that is + specified for this managed instance group. The + group uses this template to create all new + instances in the managed instance group. The + templates for existing instances in the group do + not change unless you run recreateInstances, run + applyUpdatesToInstances, or set the group's + updatePolicy.type to PROACTIVE. + + This field is a member of `oneof`_ ``_instance_template``. + """ + + instance_template: str = proto.Field( + proto.STRING, + number=309248228, + optional=True, + ) + + +class InstanceGroupManagersSetTargetPoolsRequest(proto.Message): + r""" + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + fingerprint (str): + The fingerprint of the target pools + information. Use this optional property to + prevent conflicts when multiple users change the + target pools settings concurrently. Obtain the + fingerprint with the instanceGroupManagers.get + method. Then, include the fingerprint in your + request to ensure that you do not overwrite + changes that were applied from another + concurrent request. + + This field is a member of `oneof`_ ``_fingerprint``. + target_pools (MutableSequence[str]): + The list of target pool URLs that instances + in this managed instance group belong to. The + managed instance group applies these target + pools to all of the instances in the group. + Existing instances and new instances in the + group all receive these target pool settings. + """ + + fingerprint: str = proto.Field( + proto.STRING, + number=234678500, + optional=True, + ) + target_pools: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=336072617, + ) + + +class InstanceGroupManagersUpdatePerInstanceConfigsReq(proto.Message): + r"""InstanceGroupManagers.updatePerInstanceConfigs + + Attributes: + per_instance_configs (MutableSequence[google.cloud.compute_v1.types.PerInstanceConfig]): + The list of per-instance configurations to + insert or patch on this managed instance group. + """ + + per_instance_configs: MutableSequence['PerInstanceConfig'] = proto.RepeatedField( + proto.MESSAGE, + number=526265001, + message='PerInstanceConfig', + ) + + +class InstanceGroupsAddInstancesRequest(proto.Message): + r""" + + Attributes: + instances (MutableSequence[google.cloud.compute_v1.types.InstanceReference]): + The list of instances to add to the instance + group. + """ + + instances: MutableSequence['InstanceReference'] = proto.RepeatedField( + proto.MESSAGE, + number=29097598, + message='InstanceReference', + ) + + +class InstanceGroupsListInstances(proto.Message): + r""" + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + id (str): + [Output Only] Unique identifier for the resource; defined by + the server. + + This field is a member of `oneof`_ ``_id``. + items (MutableSequence[google.cloud.compute_v1.types.InstanceWithNamedPorts]): + A list of InstanceWithNamedPorts resources. + kind (str): + [Output Only] The resource type, which is always + compute#instanceGroupsListInstances for the list of + instances in the specified instance group. + + This field is a member of `oneof`_ ``_kind``. + next_page_token (str): + [Output Only] This token allows you to get the next page of + results for list requests. If the number of results is + larger than maxResults, use the nextPageToken as a value for + the query parameter pageToken in the next list request. + Subsequent list requests will have their own nextPageToken + to continue paging through the results. + + This field is a member of `oneof`_ ``_next_page_token``. + self_link (str): + [Output Only] Server-defined URL for this resource. + + This field is a member of `oneof`_ ``_self_link``. + warning (google.cloud.compute_v1.types.Warning): + [Output Only] Informational warning message. + + This field is a member of `oneof`_ ``_warning``. + """ + + @property + def raw_page(self): + return self + + id: str = proto.Field( + proto.STRING, + number=3355, + optional=True, + ) + items: MutableSequence['InstanceWithNamedPorts'] = proto.RepeatedField( + proto.MESSAGE, + number=100526016, + message='InstanceWithNamedPorts', + ) + kind: str = proto.Field( + proto.STRING, + number=3292052, + optional=True, + ) + next_page_token: str = proto.Field( + proto.STRING, + number=79797525, + optional=True, + ) + self_link: str = proto.Field( + proto.STRING, + number=456214797, + optional=True, + ) + warning: 'Warning' = proto.Field( + proto.MESSAGE, + number=50704284, + optional=True, + message='Warning', + ) + + +class InstanceGroupsListInstancesRequest(proto.Message): + r""" + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + instance_state (str): + A filter for the state of the instances in + the instance group. Valid options are ALL or + RUNNING. If you do not specify this parameter + the list includes all instances regardless of + their state. Check the InstanceState enum for + the list of possible values. + + This field is a member of `oneof`_ ``_instance_state``. + """ + class InstanceState(proto.Enum): + r"""A filter for the state of the instances in the instance + group. Valid options are ALL or RUNNING. If you do not specify + this parameter the list includes all instances regardless of + their state. + + Values: + UNDEFINED_INSTANCE_STATE (0): + A value indicating that the enum field is not + set. + ALL (64897): + Includes all instances in the generated list + regardless of their state. + RUNNING (121282975): + Includes instances in the generated list only + if they have a RUNNING state. + """ + UNDEFINED_INSTANCE_STATE = 0 + ALL = 64897 + RUNNING = 121282975 + + instance_state: str = proto.Field( + proto.STRING, + number=92223591, + optional=True, + ) + + +class InstanceGroupsRemoveInstancesRequest(proto.Message): + r""" + + Attributes: + instances (MutableSequence[google.cloud.compute_v1.types.InstanceReference]): + The list of instances to remove from the + instance group. + """ + + instances: MutableSequence['InstanceReference'] = proto.RepeatedField( + proto.MESSAGE, + number=29097598, + message='InstanceReference', + ) + + +class InstanceGroupsScopedList(proto.Message): + r""" + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + instance_groups (MutableSequence[google.cloud.compute_v1.types.InstanceGroup]): + [Output Only] The list of instance groups that are contained + in this scope. + warning (google.cloud.compute_v1.types.Warning): + [Output Only] An informational warning that replaces the + list of instance groups when the list is empty. + + This field is a member of `oneof`_ ``_warning``. + """ + + instance_groups: MutableSequence['InstanceGroup'] = proto.RepeatedField( + proto.MESSAGE, + number=366469310, + message='InstanceGroup', + ) + warning: 'Warning' = proto.Field( + proto.MESSAGE, + number=50704284, + optional=True, + message='Warning', + ) + + +class InstanceGroupsSetNamedPortsRequest(proto.Message): + r""" + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + fingerprint (str): + The fingerprint of the named ports + information for this instance group. Use this + optional property to prevent conflicts when + multiple users change the named ports settings + concurrently. Obtain the fingerprint with the + instanceGroups.get method. Then, include the + fingerprint in your request to ensure that you + do not overwrite changes that were applied from + another concurrent request. A request with an + incorrect fingerprint will fail with error 412 + conditionNotMet. + + This field is a member of `oneof`_ ``_fingerprint``. + named_ports (MutableSequence[google.cloud.compute_v1.types.NamedPort]): + The list of named ports to set for this + instance group. + """ + + fingerprint: str = proto.Field( + proto.STRING, + number=234678500, + optional=True, + ) + named_ports: MutableSequence['NamedPort'] = proto.RepeatedField( + proto.MESSAGE, + number=427598732, + message='NamedPort', + ) + + +class InstanceList(proto.Message): + r"""Contains a list of instances. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + id (str): + [Output Only] Unique identifier for the resource; defined by + the server. + + This field is a member of `oneof`_ ``_id``. + items (MutableSequence[google.cloud.compute_v1.types.Instance]): + A list of Instance resources. + kind (str): + [Output Only] Type of resource. Always compute#instanceList + for lists of Instance resources. + + This field is a member of `oneof`_ ``_kind``. + next_page_token (str): + [Output Only] This token allows you to get the next page of + results for list requests. If the number of results is + larger than maxResults, use the nextPageToken as a value for + the query parameter pageToken in the next list request. + Subsequent list requests will have their own nextPageToken + to continue paging through the results. + + This field is a member of `oneof`_ ``_next_page_token``. + self_link (str): + [Output Only] Server-defined URL for this resource. + + This field is a member of `oneof`_ ``_self_link``. + warning (google.cloud.compute_v1.types.Warning): + [Output Only] Informational warning message. + + This field is a member of `oneof`_ ``_warning``. + """ + + @property + def raw_page(self): + return self + + id: str = proto.Field( + proto.STRING, + number=3355, + optional=True, + ) + items: MutableSequence['Instance'] = proto.RepeatedField( + proto.MESSAGE, + number=100526016, + message='Instance', + ) + kind: str = proto.Field( + proto.STRING, + number=3292052, + optional=True, + ) + next_page_token: str = proto.Field( + proto.STRING, + number=79797525, + optional=True, + ) + self_link: str = proto.Field( + proto.STRING, + number=456214797, + optional=True, + ) + warning: 'Warning' = proto.Field( + proto.MESSAGE, + number=50704284, + optional=True, + message='Warning', + ) + + +class InstanceListReferrers(proto.Message): + r"""Contains a list of instance referrers. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + id (str): + [Output Only] Unique identifier for the resource; defined by + the server. + + This field is a member of `oneof`_ ``_id``. + items (MutableSequence[google.cloud.compute_v1.types.Reference]): + A list of Reference resources. + kind (str): + [Output Only] Type of resource. Always + compute#instanceListReferrers for lists of Instance + referrers. + + This field is a member of `oneof`_ ``_kind``. + next_page_token (str): + [Output Only] This token allows you to get the next page of + results for list requests. If the number of results is + larger than maxResults, use the nextPageToken as a value for + the query parameter pageToken in the next list request. + Subsequent list requests will have their own nextPageToken + to continue paging through the results. + + This field is a member of `oneof`_ ``_next_page_token``. + self_link (str): + [Output Only] Server-defined URL for this resource. + + This field is a member of `oneof`_ ``_self_link``. + warning (google.cloud.compute_v1.types.Warning): + [Output Only] Informational warning message. + + This field is a member of `oneof`_ ``_warning``. + """ + + @property + def raw_page(self): + return self + + id: str = proto.Field( + proto.STRING, + number=3355, + optional=True, + ) + items: MutableSequence['Reference'] = proto.RepeatedField( + proto.MESSAGE, + number=100526016, + message='Reference', + ) + kind: str = proto.Field( + proto.STRING, + number=3292052, + optional=True, + ) + next_page_token: str = proto.Field( + proto.STRING, + number=79797525, + optional=True, + ) + self_link: str = proto.Field( + proto.STRING, + number=456214797, + optional=True, + ) + warning: 'Warning' = proto.Field( + proto.MESSAGE, + number=50704284, + optional=True, + message='Warning', + ) + + +class InstanceManagedByIgmError(proto.Message): + r""" + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + error (google.cloud.compute_v1.types.InstanceManagedByIgmErrorManagedInstanceError): + [Output Only] Contents of the error. + + This field is a member of `oneof`_ ``_error``. + instance_action_details (google.cloud.compute_v1.types.InstanceManagedByIgmErrorInstanceActionDetails): + [Output Only] Details of the instance action that triggered + this error. May be null, if the error was not caused by an + action on an instance. This field is optional. + + This field is a member of `oneof`_ ``_instance_action_details``. + timestamp (str): + [Output Only] The time that this error occurred. This value + is in RFC3339 text format. + + This field is a member of `oneof`_ ``_timestamp``. + """ + + error: 'InstanceManagedByIgmErrorManagedInstanceError' = proto.Field( + proto.MESSAGE, + number=96784904, + optional=True, + message='InstanceManagedByIgmErrorManagedInstanceError', + ) + instance_action_details: 'InstanceManagedByIgmErrorInstanceActionDetails' = proto.Field( + proto.MESSAGE, + number=292224547, + optional=True, + message='InstanceManagedByIgmErrorInstanceActionDetails', + ) + timestamp: str = proto.Field( + proto.STRING, + number=55126294, + optional=True, + ) + + +class InstanceManagedByIgmErrorInstanceActionDetails(proto.Message): + r""" + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + action (str): + [Output Only] Action that managed instance group was + executing on the instance when the error occurred. Possible + values: Check the Action enum for the list of possible + values. + + This field is a member of `oneof`_ ``_action``. + instance (str): + [Output Only] The URL of the instance. The URL can be set + even if the instance has not yet been created. + + This field is a member of `oneof`_ ``_instance``. + version (google.cloud.compute_v1.types.ManagedInstanceVersion): + [Output Only] Version this instance was created from, or was + being created from, but the creation failed. Corresponds to + one of the versions that were set on the Instance Group + Manager resource at the time this instance was being + created. + + This field is a member of `oneof`_ ``_version``. + """ + class Action(proto.Enum): + r"""[Output Only] Action that managed instance group was executing on + the instance when the error occurred. Possible values: + + Values: + UNDEFINED_ACTION (0): + A value indicating that the enum field is not + set. + ABANDONING (388244813): + The managed instance group is abandoning this + instance. The instance will be removed from the + instance group and from any target pools that + are associated with this group. + CREATING (455564985): + The managed instance group is creating this + instance. If the group fails to create this + instance, it will try again until it is + successful. + CREATING_WITHOUT_RETRIES (428843785): + The managed instance group is attempting to + create this instance only once. If the group + fails to create this instance, it does not try + again and the group's targetSize value is + decreased. + DELETING (528602024): + The managed instance group is permanently + deleting this instance. + NONE (2402104): + The managed instance group has not scheduled + any actions for this instance. + RECREATING (287278572): + The managed instance group is recreating this + instance. + REFRESHING (163266343): + The managed instance group is applying + configuration changes to the instance without + stopping it. For example, the group can update + the target pool list for an instance without + stopping that instance. + RESTARTING (320534387): + The managed instance group is restarting this + instance. + RESUMING (446856618): + The managed instance group is resuming this + instance. + STARTING (488820800): + The managed instance group is starting this + instance. + STOPPING (350791796): + The managed instance group is stopping this + instance. + SUSPENDING (514206246): + The managed instance group is suspending this + instance. + VERIFYING (16982185): + The managed instance group is verifying this + already created instance. Verification happens + every time the instance is (re)created or + restarted and consists of: 1. Waiting until + health check specified as part of this managed + instance group's autohealing policy reports + HEALTHY. Note: Applies only if autohealing + policy has a health check specified 2. Waiting + for addition verification steps performed as + post-instance creation (subject to future + extensions). + """ + UNDEFINED_ACTION = 0 + ABANDONING = 388244813 + CREATING = 455564985 + CREATING_WITHOUT_RETRIES = 428843785 + DELETING = 528602024 + NONE = 2402104 + RECREATING = 287278572 + REFRESHING = 163266343 + RESTARTING = 320534387 + RESUMING = 446856618 + STARTING = 488820800 + STOPPING = 350791796 + SUSPENDING = 514206246 + VERIFYING = 16982185 + + action: str = proto.Field( + proto.STRING, + number=187661878, + optional=True, + ) + instance: str = proto.Field( + proto.STRING, + number=18257045, + optional=True, + ) + version: 'ManagedInstanceVersion' = proto.Field( + proto.MESSAGE, + number=351608024, + optional=True, + message='ManagedInstanceVersion', + ) + + +class InstanceManagedByIgmErrorManagedInstanceError(proto.Message): + r""" + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + code (str): + [Output Only] Error code. + + This field is a member of `oneof`_ ``_code``. + message (str): + [Output Only] Error message. + + This field is a member of `oneof`_ ``_message``. + """ + + code: str = proto.Field( + proto.STRING, + number=3059181, + optional=True, + ) + message: str = proto.Field( + proto.STRING, + number=418054151, + optional=True, + ) + + +class InstanceMoveRequest(proto.Message): + r""" + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + destination_zone (str): + The URL of the destination zone to move the + instance. This can be a full or partial URL. For + example, the following are all valid URLs to a + zone: - + https://www.googleapis.com/compute/v1/projects/project/zones/zone + - projects/project/zones/zone - zones/zone + + This field is a member of `oneof`_ ``_destination_zone``. + target_instance (str): + The URL of the target instance to move. This + can be a full or partial URL. For example, the + following are all valid URLs to an instance: - + https://www.googleapis.com/compute/v1/projects/project/zones/zone + /instances/instance - + projects/project/zones/zone/instances/instance - + zones/zone/instances/instance + + This field is a member of `oneof`_ ``_target_instance``. + """ + + destination_zone: str = proto.Field( + proto.STRING, + number=131854653, + optional=True, + ) + target_instance: str = proto.Field( + proto.STRING, + number=289769347, + optional=True, + ) + + +class InstanceParams(proto.Message): + r"""Additional instance params. + + Attributes: + resource_manager_tags (MutableMapping[str, str]): + Resource manager tags to be bound to the instance. Tag keys + and values have the same definition as resource manager + tags. Keys must be in the format ``tagKeys/{tag_key_id}``, + and values are in the format ``tagValues/456``. The field is + ignored (both PUT & PATCH) when empty. + """ + + resource_manager_tags: MutableMapping[str, str] = proto.MapField( + proto.STRING, + proto.STRING, + number=377671164, + ) + + +class InstanceProperties(proto.Message): + r""" + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + advanced_machine_features (google.cloud.compute_v1.types.AdvancedMachineFeatures): + Controls for advanced machine-related + behavior features. Note that for MachineImage, + this is not supported yet. + + This field is a member of `oneof`_ ``_advanced_machine_features``. + can_ip_forward (bool): + Enables instances created based on these + properties to send packets with source IP + addresses other than their own and receive + packets with destination IP addresses other than + their own. If these instances will be used as an + IP gateway or it will be set as the next-hop in + a Route resource, specify true. If unsure, leave + this set to false. See the Enable IP forwarding + documentation for more information. + + This field is a member of `oneof`_ ``_can_ip_forward``. + confidential_instance_config (google.cloud.compute_v1.types.ConfidentialInstanceConfig): + Specifies the Confidential Instance options. + Note that for MachineImage, this is not + supported yet. + + This field is a member of `oneof`_ ``_confidential_instance_config``. + description (str): + An optional text description for the + instances that are created from these + properties. + + This field is a member of `oneof`_ ``_description``. + disks (MutableSequence[google.cloud.compute_v1.types.AttachedDisk]): + An array of disks that are associated with + the instances that are created from these + properties. + guest_accelerators (MutableSequence[google.cloud.compute_v1.types.AcceleratorConfig]): + A list of guest accelerator cards' type and + count to use for instances created from these + properties. + key_revocation_action_type (str): + KeyRevocationActionType of the instance. + Supported options are "STOP" and "NONE". The + default value is "NONE" if it is not specified. + Check the KeyRevocationActionType enum for the + list of possible values. + + This field is a member of `oneof`_ ``_key_revocation_action_type``. + labels (MutableMapping[str, str]): + Labels to apply to instances that are created + from these properties. + machine_type (str): + The machine type to use for instances that + are created from these properties. + + This field is a member of `oneof`_ ``_machine_type``. + metadata (google.cloud.compute_v1.types.Metadata): + The metadata key/value pairs to assign to + instances that are created from these + properties. These pairs can consist of custom + metadata or predefined keys. See Project and + instance metadata for more information. + + This field is a member of `oneof`_ ``_metadata``. + min_cpu_platform (str): + Minimum cpu/platform to be used by instances. + The instance may be scheduled on the specified + or newer cpu/platform. Applicable values are the + friendly names of CPU platforms, such as + minCpuPlatform: "Intel Haswell" or + minCpuPlatform: "Intel Sandy Bridge". For more + information, read Specifying a Minimum CPU + Platform. + + This field is a member of `oneof`_ ``_min_cpu_platform``. + network_interfaces (MutableSequence[google.cloud.compute_v1.types.NetworkInterface]): + An array of network access configurations for + this interface. + network_performance_config (google.cloud.compute_v1.types.NetworkPerformanceConfig): + Note that for MachineImage, this is not + supported yet. + + This field is a member of `oneof`_ ``_network_performance_config``. + private_ipv6_google_access (str): + The private IPv6 google access type for VMs. If not + specified, use INHERIT_FROM_SUBNETWORK as default. Note that + for MachineImage, this is not supported yet. Check the + PrivateIpv6GoogleAccess enum for the list of possible + values. + + This field is a member of `oneof`_ ``_private_ipv6_google_access``. + reservation_affinity (google.cloud.compute_v1.types.ReservationAffinity): + Specifies the reservations that instances can + consume from. Note that for MachineImage, this + is not supported yet. + + This field is a member of `oneof`_ ``_reservation_affinity``. + resource_manager_tags (MutableMapping[str, str]): + Resource manager tags to be bound to the instance. Tag keys + and values have the same definition as resource manager + tags. Keys must be in the format ``tagKeys/{tag_key_id}``, + and values are in the format ``tagValues/456``. The field is + ignored (both PUT & PATCH) when empty. + resource_policies (MutableSequence[str]): + Resource policies (names, not URLs) applied + to instances created from these properties. Note + that for MachineImage, this is not supported + yet. + scheduling (google.cloud.compute_v1.types.Scheduling): + Specifies the scheduling options for the + instances that are created from these + properties. + + This field is a member of `oneof`_ ``_scheduling``. + service_accounts (MutableSequence[google.cloud.compute_v1.types.ServiceAccount]): + A list of service accounts with specified + scopes. Access tokens for these service accounts + are available to the instances that are created + from these properties. Use metadata queries to + obtain the access tokens for these instances. + shielded_instance_config (google.cloud.compute_v1.types.ShieldedInstanceConfig): + Note that for MachineImage, this is not + supported yet. + + This field is a member of `oneof`_ ``_shielded_instance_config``. + tags (google.cloud.compute_v1.types.Tags): + A list of tags to apply to the instances that + are created from these properties. The tags + identify valid sources or targets for network + firewalls. The setTags method can modify this + list of tags. Each tag within the list must + comply with RFC1035. + + This field is a member of `oneof`_ ``_tags``. + """ + class KeyRevocationActionType(proto.Enum): + r"""KeyRevocationActionType of the instance. Supported options + are "STOP" and "NONE". The default value is "NONE" if it is not + specified. + + Values: + UNDEFINED_KEY_REVOCATION_ACTION_TYPE (0): + A value indicating that the enum field is not + set. + KEY_REVOCATION_ACTION_TYPE_UNSPECIFIED (467110106): + Default value. This value is unused. + NONE (2402104): + Indicates user chose no operation. + STOP (2555906): + Indicates user chose to opt for VM shutdown + on key revocation. + """ + UNDEFINED_KEY_REVOCATION_ACTION_TYPE = 0 + KEY_REVOCATION_ACTION_TYPE_UNSPECIFIED = 467110106 + NONE = 2402104 + STOP = 2555906 + + class PrivateIpv6GoogleAccess(proto.Enum): + r"""The private IPv6 google access type for VMs. If not specified, use + INHERIT_FROM_SUBNETWORK as default. Note that for MachineImage, this + is not supported yet. + + Values: + UNDEFINED_PRIVATE_IPV6_GOOGLE_ACCESS (0): + A value indicating that the enum field is not + set. + ENABLE_BIDIRECTIONAL_ACCESS_TO_GOOGLE (427975994): + Bidirectional private IPv6 access to/from + Google services. If specified, the subnetwork + who is attached to the instance's default + network interface will be assigned an internal + IPv6 prefix if it doesn't have before. + ENABLE_OUTBOUND_VM_ACCESS_TO_GOOGLE (288210263): + Outbound private IPv6 access from VMs in this + subnet to Google services. If specified, the + subnetwork who is attached to the instance's + default network interface will be assigned an + internal IPv6 prefix if it doesn't have before. + INHERIT_FROM_SUBNETWORK (530256959): + Each network interface inherits + PrivateIpv6GoogleAccess from its subnetwork. + """ + UNDEFINED_PRIVATE_IPV6_GOOGLE_ACCESS = 0 + ENABLE_BIDIRECTIONAL_ACCESS_TO_GOOGLE = 427975994 + ENABLE_OUTBOUND_VM_ACCESS_TO_GOOGLE = 288210263 + INHERIT_FROM_SUBNETWORK = 530256959 + + advanced_machine_features: 'AdvancedMachineFeatures' = proto.Field( + proto.MESSAGE, + number=409646002, + optional=True, + message='AdvancedMachineFeatures', + ) + can_ip_forward: bool = proto.Field( + proto.BOOL, + number=467731324, + optional=True, + ) + confidential_instance_config: 'ConfidentialInstanceConfig' = proto.Field( + proto.MESSAGE, + number=490637685, + optional=True, + message='ConfidentialInstanceConfig', + ) + description: str = proto.Field( + proto.STRING, + number=422937596, + optional=True, + ) + disks: MutableSequence['AttachedDisk'] = proto.RepeatedField( + proto.MESSAGE, + number=95594102, + message='AttachedDisk', + ) + guest_accelerators: MutableSequence['AcceleratorConfig'] = proto.RepeatedField( + proto.MESSAGE, + number=463595119, + message='AcceleratorConfig', + ) + key_revocation_action_type: str = proto.Field( + proto.STRING, + number=235941474, + optional=True, + ) + labels: MutableMapping[str, str] = proto.MapField( + proto.STRING, + proto.STRING, + number=500195327, + ) + machine_type: str = proto.Field( + proto.STRING, + number=227711026, + optional=True, + ) + metadata: 'Metadata' = proto.Field( + proto.MESSAGE, + number=86866735, + optional=True, + message='Metadata', + ) + min_cpu_platform: str = proto.Field( + proto.STRING, + number=242912759, + optional=True, + ) + network_interfaces: MutableSequence['NetworkInterface'] = proto.RepeatedField( + proto.MESSAGE, + number=52735243, + message='NetworkInterface', + ) + network_performance_config: 'NetworkPerformanceConfig' = proto.Field( + proto.MESSAGE, + number=398330850, + optional=True, + message='NetworkPerformanceConfig', + ) + private_ipv6_google_access: str = proto.Field( + proto.STRING, + number=48277006, + optional=True, + ) + reservation_affinity: 'ReservationAffinity' = proto.Field( + proto.MESSAGE, + number=157850683, + optional=True, + message='ReservationAffinity', + ) + resource_manager_tags: MutableMapping[str, str] = proto.MapField( + proto.STRING, + proto.STRING, + number=377671164, + ) + resource_policies: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=22220385, + ) + scheduling: 'Scheduling' = proto.Field( + proto.MESSAGE, + number=386688404, + optional=True, + message='Scheduling', + ) + service_accounts: MutableSequence['ServiceAccount'] = proto.RepeatedField( + proto.MESSAGE, + number=277537328, + message='ServiceAccount', + ) + shielded_instance_config: 'ShieldedInstanceConfig' = proto.Field( + proto.MESSAGE, + number=12862901, + optional=True, + message='ShieldedInstanceConfig', + ) + tags: 'Tags' = proto.Field( + proto.MESSAGE, + number=3552281, + optional=True, + message='Tags', + ) + + +class InstanceReference(proto.Message): + r""" + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + instance (str): + The URL for a specific instance. @required + compute.instancegroups.addInstances/removeInstances + + This field is a member of `oneof`_ ``_instance``. + """ + + instance: str = proto.Field( + proto.STRING, + number=18257045, + optional=True, + ) + + +class InstanceTemplate(proto.Message): + r"""Represents an Instance Template resource. You can use + instance templates to create VM instances and managed instance + groups. For more information, read Instance Templates. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + creation_timestamp (str): + [Output Only] The creation timestamp for this instance + template in RFC3339 text format. + + This field is a member of `oneof`_ ``_creation_timestamp``. + description (str): + An optional description of this resource. + Provide this property when you create the + resource. + + This field is a member of `oneof`_ ``_description``. + id (int): + [Output Only] A unique identifier for this instance + template. The server defines this identifier. + + This field is a member of `oneof`_ ``_id``. + kind (str): + [Output Only] The resource type, which is always + compute#instanceTemplate for instance templates. + + This field is a member of `oneof`_ ``_kind``. + name (str): + Name of the resource; provided by the client when the + resource is created. The name must be 1-63 characters long, + and comply with RFC1035. Specifically, the name must be 1-63 + characters long and match the regular expression + ``[a-z]([-a-z0-9]*[a-z0-9])?`` which means the first + character must be a lowercase letter, and all following + characters must be a dash, lowercase letter, or digit, + except the last character, which cannot be a dash. + + This field is a member of `oneof`_ ``_name``. + properties (google.cloud.compute_v1.types.InstanceProperties): + The instance properties for this instance + template. + + This field is a member of `oneof`_ ``_properties``. + region (str): + [Output Only] URL of the region where the instance template + resides. Only applicable for regional resources. + + This field is a member of `oneof`_ ``_region``. + self_link (str): + [Output Only] The URL for this instance template. The server + defines this URL. + + This field is a member of `oneof`_ ``_self_link``. + source_instance (str): + The source instance used to create the + template. You can provide this as a partial or + full URL to the resource. For example, the + following are valid values: - + https://www.googleapis.com/compute/v1/projects/project/zones/zone + /instances/instance - + projects/project/zones/zone/instances/instance + + This field is a member of `oneof`_ ``_source_instance``. + source_instance_params (google.cloud.compute_v1.types.SourceInstanceParams): + The source instance params to use to create + this instance template. + + This field is a member of `oneof`_ ``_source_instance_params``. + """ + + creation_timestamp: str = proto.Field( + proto.STRING, + number=30525366, + optional=True, + ) + description: str = proto.Field( + proto.STRING, + number=422937596, + optional=True, + ) + id: int = proto.Field( + proto.UINT64, + number=3355, + optional=True, + ) + kind: str = proto.Field( + proto.STRING, + number=3292052, + optional=True, + ) + name: str = proto.Field( + proto.STRING, + number=3373707, + optional=True, + ) + properties: 'InstanceProperties' = proto.Field( + proto.MESSAGE, + number=147688755, + optional=True, + message='InstanceProperties', + ) + region: str = proto.Field( + proto.STRING, + number=138946292, + optional=True, + ) + self_link: str = proto.Field( + proto.STRING, + number=456214797, + optional=True, + ) + source_instance: str = proto.Field( + proto.STRING, + number=396315705, + optional=True, + ) + source_instance_params: 'SourceInstanceParams' = proto.Field( + proto.MESSAGE, + number=135342156, + optional=True, + message='SourceInstanceParams', + ) + + +class InstanceTemplateAggregatedList(proto.Message): + r"""Contains a list of InstanceTemplatesScopedList. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + id (str): + [Output Only] Unique identifier for the resource; defined by + the server. + + This field is a member of `oneof`_ ``_id``. + items (MutableMapping[str, google.cloud.compute_v1.types.InstanceTemplatesScopedList]): + A list of InstanceTemplatesScopedList + resources. + kind (str): + Type of resource. + + This field is a member of `oneof`_ ``_kind``. + next_page_token (str): + [Output Only] This token allows you to get the next page of + results for list requests. If the number of results is + larger than maxResults, use the nextPageToken as a value for + the query parameter pageToken in the next list request. + Subsequent list requests will have their own nextPageToken + to continue paging through the results. + + This field is a member of `oneof`_ ``_next_page_token``. + self_link (str): + [Output Only] Server-defined URL for this resource. + + This field is a member of `oneof`_ ``_self_link``. + warning (google.cloud.compute_v1.types.Warning): + [Output Only] Informational warning message. + + This field is a member of `oneof`_ ``_warning``. + """ + + @property + def raw_page(self): + return self + + id: str = proto.Field( + proto.STRING, + number=3355, + optional=True, + ) + items: MutableMapping[str, 'InstanceTemplatesScopedList'] = proto.MapField( + proto.STRING, + proto.MESSAGE, + number=100526016, + message='InstanceTemplatesScopedList', + ) + kind: str = proto.Field( + proto.STRING, + number=3292052, + optional=True, + ) + next_page_token: str = proto.Field( + proto.STRING, + number=79797525, + optional=True, + ) + self_link: str = proto.Field( + proto.STRING, + number=456214797, + optional=True, + ) + warning: 'Warning' = proto.Field( + proto.MESSAGE, + number=50704284, + optional=True, + message='Warning', + ) + + +class InstanceTemplateList(proto.Message): + r"""A list of instance templates. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + id (str): + [Output Only] Unique identifier for the resource; defined by + the server. + + This field is a member of `oneof`_ ``_id``. + items (MutableSequence[google.cloud.compute_v1.types.InstanceTemplate]): + A list of InstanceTemplate resources. + kind (str): + [Output Only] The resource type, which is always + compute#instanceTemplatesListResponse for instance template + lists. + + This field is a member of `oneof`_ ``_kind``. + next_page_token (str): + [Output Only] This token allows you to get the next page of + results for list requests. If the number of results is + larger than maxResults, use the nextPageToken as a value for + the query parameter pageToken in the next list request. + Subsequent list requests will have their own nextPageToken + to continue paging through the results. + + This field is a member of `oneof`_ ``_next_page_token``. + self_link (str): + [Output Only] Server-defined URL for this resource. + + This field is a member of `oneof`_ ``_self_link``. + warning (google.cloud.compute_v1.types.Warning): + [Output Only] Informational warning message. + + This field is a member of `oneof`_ ``_warning``. + """ + + @property + def raw_page(self): + return self + + id: str = proto.Field( + proto.STRING, + number=3355, + optional=True, + ) + items: MutableSequence['InstanceTemplate'] = proto.RepeatedField( + proto.MESSAGE, + number=100526016, + message='InstanceTemplate', + ) + kind: str = proto.Field( + proto.STRING, + number=3292052, + optional=True, + ) + next_page_token: str = proto.Field( + proto.STRING, + number=79797525, + optional=True, + ) + self_link: str = proto.Field( + proto.STRING, + number=456214797, + optional=True, + ) + warning: 'Warning' = proto.Field( + proto.MESSAGE, + number=50704284, + optional=True, + message='Warning', + ) + + +class InstanceTemplatesScopedList(proto.Message): + r""" + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + instance_templates (MutableSequence[google.cloud.compute_v1.types.InstanceTemplate]): + [Output Only] A list of instance templates that are + contained within the specified project and zone. + warning (google.cloud.compute_v1.types.Warning): + [Output Only] An informational warning that replaces the + list of instance templates when the list is empty. + + This field is a member of `oneof`_ ``_warning``. + """ + + instance_templates: MutableSequence['InstanceTemplate'] = proto.RepeatedField( + proto.MESSAGE, + number=459889679, + message='InstanceTemplate', + ) + warning: 'Warning' = proto.Field( + proto.MESSAGE, + number=50704284, + optional=True, + message='Warning', + ) + + +class InstanceWithNamedPorts(proto.Message): + r""" + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + instance (str): + [Output Only] The URL of the instance. + + This field is a member of `oneof`_ ``_instance``. + named_ports (MutableSequence[google.cloud.compute_v1.types.NamedPort]): + [Output Only] The named ports that belong to this instance + group. + status (str): + [Output Only] The status of the instance. Check the Status + enum for the list of possible values. + + This field is a member of `oneof`_ ``_status``. + """ + class Status(proto.Enum): + r"""[Output Only] The status of the instance. + + Values: + UNDEFINED_STATUS (0): + A value indicating that the enum field is not + set. + DEPROVISIONING (428935662): + The instance is halted and we are performing + tear down tasks like network deprogramming, + releasing quota, IP, tearing down disks etc. + PROVISIONING (290896621): + Resources are being allocated for the + instance. + REPAIRING (413483285): + The instance is in repair. + RUNNING (121282975): + The instance is running. + STAGING (431072283): + All required resources have been allocated + and the instance is being started. + STOPPED (444276141): + The instance has stopped successfully. + STOPPING (350791796): + The instance is currently stopping (either + being deleted or killed). + SUSPENDED (51223995): + The instance has suspended. + SUSPENDING (514206246): + The instance is suspending. + TERMINATED (250018339): + The instance has stopped (either by explicit + action or underlying failure). + """ + UNDEFINED_STATUS = 0 + DEPROVISIONING = 428935662 + PROVISIONING = 290896621 + REPAIRING = 413483285 + RUNNING = 121282975 + STAGING = 431072283 + STOPPED = 444276141 + STOPPING = 350791796 + SUSPENDED = 51223995 + SUSPENDING = 514206246 + TERMINATED = 250018339 + + instance: str = proto.Field( + proto.STRING, + number=18257045, + optional=True, + ) + named_ports: MutableSequence['NamedPort'] = proto.RepeatedField( + proto.MESSAGE, + number=427598732, + message='NamedPort', + ) + status: str = proto.Field( + proto.STRING, + number=181260274, + optional=True, + ) + + +class InstancesAddResourcePoliciesRequest(proto.Message): + r""" + + Attributes: + resource_policies (MutableSequence[str]): + Resource policies to be added to this + instance. + """ + + resource_policies: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=22220385, + ) + + +class InstancesGetEffectiveFirewallsResponse(proto.Message): + r""" + + Attributes: + firewall_policys (MutableSequence[google.cloud.compute_v1.types.InstancesGetEffectiveFirewallsResponseEffectiveFirewallPolicy]): + Effective firewalls from firewall policies. + firewalls (MutableSequence[google.cloud.compute_v1.types.Firewall]): + Effective firewalls on the instance. + """ + + firewall_policys: MutableSequence['InstancesGetEffectiveFirewallsResponseEffectiveFirewallPolicy'] = proto.RepeatedField( + proto.MESSAGE, + number=410985794, + message='InstancesGetEffectiveFirewallsResponseEffectiveFirewallPolicy', + ) + firewalls: MutableSequence['Firewall'] = proto.RepeatedField( + proto.MESSAGE, + number=272245619, + message='Firewall', + ) + + +class InstancesGetEffectiveFirewallsResponseEffectiveFirewallPolicy(proto.Message): + r""" + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + display_name (str): + [Output Only] Deprecated, please use short name instead. The + display name of the firewall policy. + + This field is a member of `oneof`_ ``_display_name``. + name (str): + [Output Only] The name of the firewall policy. + + This field is a member of `oneof`_ ``_name``. + rules (MutableSequence[google.cloud.compute_v1.types.FirewallPolicyRule]): + The rules that apply to the network. + short_name (str): + [Output Only] The short name of the firewall policy. + + This field is a member of `oneof`_ ``_short_name``. + type_ (str): + [Output Only] The type of the firewall policy. Can be one of + HIERARCHY, NETWORK, NETWORK_REGIONAL. Check the Type enum + for the list of possible values. + + This field is a member of `oneof`_ ``_type``. + """ + class Type(proto.Enum): + r"""[Output Only] The type of the firewall policy. Can be one of + HIERARCHY, NETWORK, NETWORK_REGIONAL. + + Values: + UNDEFINED_TYPE (0): + A value indicating that the enum field is not + set. + HIERARCHY (69902869): + No description available. + NETWORK (413984270): + No description available. + NETWORK_REGIONAL (190804272): + No description available. + UNSPECIFIED (526786327): + No description available. + """ + UNDEFINED_TYPE = 0 + HIERARCHY = 69902869 + NETWORK = 413984270 + NETWORK_REGIONAL = 190804272 + UNSPECIFIED = 526786327 + + display_name: str = proto.Field( + proto.STRING, + number=4473832, + optional=True, + ) + name: str = proto.Field( + proto.STRING, + number=3373707, + optional=True, + ) + rules: MutableSequence['FirewallPolicyRule'] = proto.RepeatedField( + proto.MESSAGE, + number=108873975, + message='FirewallPolicyRule', + ) + short_name: str = proto.Field( + proto.STRING, + number=492051566, + optional=True, + ) + type_: str = proto.Field( + proto.STRING, + number=3575610, + optional=True, + ) + + +class InstancesRemoveResourcePoliciesRequest(proto.Message): + r""" + + Attributes: + resource_policies (MutableSequence[str]): + Resource policies to be removed from this + instance. + """ + + resource_policies: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=22220385, + ) + + +class InstancesScopedList(proto.Message): + r""" + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + instances (MutableSequence[google.cloud.compute_v1.types.Instance]): + [Output Only] A list of instances contained in this scope. + warning (google.cloud.compute_v1.types.Warning): + [Output Only] Informational warning which replaces the list + of instances when the list is empty. + + This field is a member of `oneof`_ ``_warning``. + """ + + instances: MutableSequence['Instance'] = proto.RepeatedField( + proto.MESSAGE, + number=29097598, + message='Instance', + ) + warning: 'Warning' = proto.Field( + proto.MESSAGE, + number=50704284, + optional=True, + message='Warning', + ) + + +class InstancesSetLabelsRequest(proto.Message): + r""" + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + label_fingerprint (str): + Fingerprint of the previous set of labels for + this resource, used to prevent conflicts. + Provide the latest fingerprint value when making + a request to add or change labels. + + This field is a member of `oneof`_ ``_label_fingerprint``. + labels (MutableMapping[str, str]): + + """ + + label_fingerprint: str = proto.Field( + proto.STRING, + number=178124825, + optional=True, + ) + labels: MutableMapping[str, str] = proto.MapField( + proto.STRING, + proto.STRING, + number=500195327, + ) + + +class InstancesSetMachineResourcesRequest(proto.Message): + r""" + + Attributes: + guest_accelerators (MutableSequence[google.cloud.compute_v1.types.AcceleratorConfig]): + A list of the type and count of accelerator + cards attached to the instance. + """ + + guest_accelerators: MutableSequence['AcceleratorConfig'] = proto.RepeatedField( + proto.MESSAGE, + number=463595119, + message='AcceleratorConfig', + ) + + +class InstancesSetMachineTypeRequest(proto.Message): + r""" + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + machine_type (str): + Full or partial URL of the machine type + resource. See Machine Types for a full list of + machine types. For example: + zones/us-central1-f/machineTypes/n1-standard-1 + + This field is a member of `oneof`_ ``_machine_type``. + """ + + machine_type: str = proto.Field( + proto.STRING, + number=227711026, + optional=True, + ) + + +class InstancesSetMinCpuPlatformRequest(proto.Message): + r""" + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + min_cpu_platform (str): + Minimum cpu/platform this instance should be + started at. + + This field is a member of `oneof`_ ``_min_cpu_platform``. + """ + + min_cpu_platform: str = proto.Field( + proto.STRING, + number=242912759, + optional=True, + ) + + +class InstancesSetNameRequest(proto.Message): + r""" + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + current_name (str): + The current name of this resource, used to + prevent conflicts. Provide the latest name when + making a request to change name. + + This field is a member of `oneof`_ ``_current_name``. + name (str): + The name to be applied to the instance. Needs + to be RFC 1035 compliant. + + This field is a member of `oneof`_ ``_name``. + """ + + current_name: str = proto.Field( + proto.STRING, + number=394983825, + optional=True, + ) + name: str = proto.Field( + proto.STRING, + number=3373707, + optional=True, + ) + + +class InstancesSetServiceAccountRequest(proto.Message): + r""" + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + email (str): + Email address of the service account. + + This field is a member of `oneof`_ ``_email``. + scopes (MutableSequence[str]): + The list of scopes to be made available for + this service account. + """ + + email: str = proto.Field( + proto.STRING, + number=96619420, + optional=True, + ) + scopes: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=165973151, + ) + + +class InstancesStartWithEncryptionKeyRequest(proto.Message): + r""" + + Attributes: + disks (MutableSequence[google.cloud.compute_v1.types.CustomerEncryptionKeyProtectedDisk]): + Array of disks associated with this instance + that are protected with a customer-supplied + encryption key. In order to start the instance, + the disk url and its corresponding key must be + provided. If the disk is not protected with a + customer-supplied encryption key it should not + be specified. + """ + + disks: MutableSequence['CustomerEncryptionKeyProtectedDisk'] = proto.RepeatedField( + proto.MESSAGE, + number=95594102, + message='CustomerEncryptionKeyProtectedDisk', + ) + + +class Int64RangeMatch(proto.Message): + r"""HttpRouteRuleMatch criteria for field values that must stay + within the specified integer range. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + range_end (int): + The end of the range (exclusive) in signed + long integer format. + + This field is a member of `oneof`_ ``_range_end``. + range_start (int): + The start of the range (inclusive) in signed + long integer format. + + This field is a member of `oneof`_ ``_range_start``. + """ + + range_end: int = proto.Field( + proto.INT64, + number=322439897, + optional=True, + ) + range_start: int = proto.Field( + proto.INT64, + number=103333600, + optional=True, + ) + + +class Interconnect(proto.Message): + r"""Represents an Interconnect resource. An Interconnect resource + is a dedicated connection between the Google Cloud network and + your on-premises network. For more information, read the + Dedicated Interconnect Overview. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + admin_enabled (bool): + Administrative status of the interconnect. + When this is set to true, the Interconnect is + functional and can carry traffic. When set to + false, no packets can be carried over the + interconnect and no BGP routes are exchanged + over it. By default, the status is set to true. + + This field is a member of `oneof`_ ``_admin_enabled``. + circuit_infos (MutableSequence[google.cloud.compute_v1.types.InterconnectCircuitInfo]): + [Output Only] A list of CircuitInfo objects, that describe + the individual circuits in this LAG. + creation_timestamp (str): + [Output Only] Creation timestamp in RFC3339 text format. + + This field is a member of `oneof`_ ``_creation_timestamp``. + customer_name (str): + Customer name, to put in the Letter of + Authorization as the party authorized to request + a crossconnect. + + This field is a member of `oneof`_ ``_customer_name``. + description (str): + An optional description of this resource. + Provide this property when you create the + resource. + + This field is a member of `oneof`_ ``_description``. + expected_outages (MutableSequence[google.cloud.compute_v1.types.InterconnectOutageNotification]): + [Output Only] A list of outages expected for this + Interconnect. + google_ip_address (str): + [Output Only] IP address configured on the Google side of + the Interconnect link. This can be used only for ping tests. + + This field is a member of `oneof`_ ``_google_ip_address``. + google_reference_id (str): + [Output Only] Google reference ID to be used when raising + support tickets with Google or otherwise to debug backend + connectivity issues. + + This field is a member of `oneof`_ ``_google_reference_id``. + id (int): + [Output Only] The unique identifier for the resource. This + identifier is defined by the server. + + This field is a member of `oneof`_ ``_id``. + interconnect_attachments (MutableSequence[str]): + [Output Only] A list of the URLs of all + InterconnectAttachments configured to use this Interconnect. + interconnect_type (str): + Type of interconnect, which can take one of the following + values: - PARTNER: A partner-managed interconnection shared + between customers though a partner. - DEDICATED: A dedicated + physical interconnection with the customer. Note that a + value IT_PRIVATE has been deprecated in favor of DEDICATED. + Check the InterconnectType enum for the list of possible + values. + + This field is a member of `oneof`_ ``_interconnect_type``. + kind (str): + [Output Only] Type of the resource. Always + compute#interconnect for interconnects. + + This field is a member of `oneof`_ ``_kind``. + label_fingerprint (str): + A fingerprint for the labels being applied to + this Interconnect, which is essentially a hash + of the labels set used for optimistic locking. + The fingerprint is initially generated by + Compute Engine and changes after every request + to modify or update labels. You must always + provide an up-to-date fingerprint hash in order + to update or change labels, otherwise the + request will fail with error 412 + conditionNotMet. To see the latest fingerprint, + make a get() request to retrieve an + Interconnect. + + This field is a member of `oneof`_ ``_label_fingerprint``. + labels (MutableMapping[str, str]): + Labels for this resource. These can only be + added or modified by the setLabels method. Each + label key/value pair must comply with RFC1035. + Label values may be empty. + link_type (str): + Type of link requested, which can take one of the following + values: - LINK_TYPE_ETHERNET_10G_LR: A 10G Ethernet with LR + optics - LINK_TYPE_ETHERNET_100G_LR: A 100G Ethernet with LR + optics. Note that this field indicates the speed of each of + the links in the bundle, not the speed of the entire bundle. + Check the LinkType enum for the list of possible values. + + This field is a member of `oneof`_ ``_link_type``. + location (str): + URL of the InterconnectLocation object that + represents where this connection is to be + provisioned. + + This field is a member of `oneof`_ ``_location``. + name (str): + Name of the resource. Provided by the client when the + resource is created. The name must be 1-63 characters long, + and comply with RFC1035. Specifically, the name must be 1-63 + characters long and match the regular expression + ``[a-z]([-a-z0-9]*[a-z0-9])?`` which means the first + character must be a lowercase letter, and all following + characters must be a dash, lowercase letter, or digit, + except the last character, which cannot be a dash. + + This field is a member of `oneof`_ ``_name``. + noc_contact_email (str): + Email address to contact the customer NOC for + operations and maintenance notifications + regarding this Interconnect. If specified, this + will be used for notifications in addition to + all other forms described, such as Cloud + Monitoring logs alerting and Cloud + Notifications. This field is required for users + who sign up for Cloud Interconnect using + workforce identity federation. + + This field is a member of `oneof`_ ``_noc_contact_email``. + operational_status (str): + [Output Only] The current status of this Interconnect's + functionality, which can take one of the following values: - + OS_ACTIVE: A valid Interconnect, which is turned up and is + ready to use. Attachments may be provisioned on this + Interconnect. - OS_UNPROVISIONED: An Interconnect that has + not completed turnup. No attachments may be provisioned on + this Interconnect. - OS_UNDER_MAINTENANCE: An Interconnect + that is undergoing internal maintenance. No attachments may + be provisioned or updated on this Interconnect. Check the + OperationalStatus enum for the list of possible values. + + This field is a member of `oneof`_ ``_operational_status``. + peer_ip_address (str): + [Output Only] IP address configured on the customer side of + the Interconnect link. The customer should configure this IP + address during turnup when prompted by Google NOC. This can + be used only for ping tests. + + This field is a member of `oneof`_ ``_peer_ip_address``. + provisioned_link_count (int): + [Output Only] Number of links actually provisioned in this + interconnect. + + This field is a member of `oneof`_ ``_provisioned_link_count``. + remote_location (str): + Indicates that this is a Cross-Cloud + Interconnect. This field specifies the location + outside of Google's network that the + interconnect is connected to. + + This field is a member of `oneof`_ ``_remote_location``. + requested_link_count (int): + Target number of physical links in the link + bundle, as requested by the customer. + + This field is a member of `oneof`_ ``_requested_link_count``. + satisfies_pzs (bool): + [Output Only] Reserved for future use. + + This field is a member of `oneof`_ ``_satisfies_pzs``. + self_link (str): + [Output Only] Server-defined URL for the resource. + + This field is a member of `oneof`_ ``_self_link``. + state (str): + [Output Only] The current state of Interconnect + functionality, which can take one of the following values: - + ACTIVE: The Interconnect is valid, turned up and ready to + use. Attachments may be provisioned on this Interconnect. - + UNPROVISIONED: The Interconnect has not completed turnup. No + attachments may be provisioned on this Interconnect. - + UNDER_MAINTENANCE: The Interconnect is undergoing internal + maintenance. No attachments may be provisioned or updated on + this Interconnect. Check the State enum for the list of + possible values. + + This field is a member of `oneof`_ ``_state``. + """ + class InterconnectType(proto.Enum): + r"""Type of interconnect, which can take one of the following values: - + PARTNER: A partner-managed interconnection shared between customers + though a partner. - DEDICATED: A dedicated physical interconnection + with the customer. Note that a value IT_PRIVATE has been deprecated + in favor of DEDICATED. + + Values: + UNDEFINED_INTERCONNECT_TYPE (0): + A value indicating that the enum field is not + set. + DEDICATED (258411983): + A dedicated physical interconnection with the + customer. + IT_PRIVATE (335677007): + [Deprecated] A private, physical interconnection with the + customer. + PARTNER (461924520): + A partner-managed interconnection shared + between customers via partner. + """ + UNDEFINED_INTERCONNECT_TYPE = 0 + DEDICATED = 258411983 + IT_PRIVATE = 335677007 + PARTNER = 461924520 + + class LinkType(proto.Enum): + r"""Type of link requested, which can take one of the following values: + - LINK_TYPE_ETHERNET_10G_LR: A 10G Ethernet with LR optics - + LINK_TYPE_ETHERNET_100G_LR: A 100G Ethernet with LR optics. Note + that this field indicates the speed of each of the links in the + bundle, not the speed of the entire bundle. + + Values: + UNDEFINED_LINK_TYPE (0): + A value indicating that the enum field is not + set. + LINK_TYPE_ETHERNET_100G_LR (337672551): + 100G Ethernet, LR Optics. + LINK_TYPE_ETHERNET_10G_LR (236739749): + 10G Ethernet, LR Optics. [(rate_bps) = 10000000000]; + """ + UNDEFINED_LINK_TYPE = 0 + LINK_TYPE_ETHERNET_100G_LR = 337672551 + LINK_TYPE_ETHERNET_10G_LR = 236739749 + + class OperationalStatus(proto.Enum): + r"""[Output Only] The current status of this Interconnect's + functionality, which can take one of the following values: - + OS_ACTIVE: A valid Interconnect, which is turned up and is ready to + use. Attachments may be provisioned on this Interconnect. - + OS_UNPROVISIONED: An Interconnect that has not completed turnup. No + attachments may be provisioned on this Interconnect. - + OS_UNDER_MAINTENANCE: An Interconnect that is undergoing internal + maintenance. No attachments may be provisioned or updated on this + Interconnect. + + Values: + UNDEFINED_OPERATIONAL_STATUS (0): + A value indicating that the enum field is not + set. + OS_ACTIVE (55721409): + The interconnect is valid, turned up, and + ready to use. Attachments may be provisioned on + this interconnect. + OS_UNPROVISIONED (239771840): + The interconnect has not completed turnup. No + attachments may be provisioned on this + interconnect. + """ + UNDEFINED_OPERATIONAL_STATUS = 0 + OS_ACTIVE = 55721409 + OS_UNPROVISIONED = 239771840 + + class State(proto.Enum): + r"""[Output Only] The current state of Interconnect functionality, which + can take one of the following values: - ACTIVE: The Interconnect is + valid, turned up and ready to use. Attachments may be provisioned on + this Interconnect. - UNPROVISIONED: The Interconnect has not + completed turnup. No attachments may be provisioned on this + Interconnect. - UNDER_MAINTENANCE: The Interconnect is undergoing + internal maintenance. No attachments may be provisioned or updated + on this Interconnect. + + Values: + UNDEFINED_STATE (0): + A value indicating that the enum field is not + set. + ACTIVE (314733318): + The interconnect is valid, turned up, and + ready to use. Attachments may be provisioned on + this interconnect. + UNPROVISIONED (517333979): + The interconnect has not completed turnup. No + attachments may be provisioned on this + interconnect. + """ + UNDEFINED_STATE = 0 + ACTIVE = 314733318 + UNPROVISIONED = 517333979 + + admin_enabled: bool = proto.Field( + proto.BOOL, + number=445675089, + optional=True, + ) + circuit_infos: MutableSequence['InterconnectCircuitInfo'] = proto.RepeatedField( + proto.MESSAGE, + number=164839855, + message='InterconnectCircuitInfo', + ) + creation_timestamp: str = proto.Field( + proto.STRING, + number=30525366, + optional=True, + ) + customer_name: str = proto.Field( + proto.STRING, + number=3665484, + optional=True, + ) + description: str = proto.Field( + proto.STRING, + number=422937596, + optional=True, + ) + expected_outages: MutableSequence['InterconnectOutageNotification'] = proto.RepeatedField( + proto.MESSAGE, + number=264484123, + message='InterconnectOutageNotification', + ) + google_ip_address: str = proto.Field( + proto.STRING, + number=443105954, + optional=True, + ) + google_reference_id: str = proto.Field( + proto.STRING, + number=534944469, + optional=True, + ) + id: int = proto.Field( + proto.UINT64, + number=3355, + optional=True, + ) + interconnect_attachments: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=425388415, + ) + interconnect_type: str = proto.Field( + proto.STRING, + number=515165259, + optional=True, + ) + kind: str = proto.Field( + proto.STRING, + number=3292052, + optional=True, + ) + label_fingerprint: str = proto.Field( + proto.STRING, + number=178124825, + optional=True, + ) + labels: MutableMapping[str, str] = proto.MapField( + proto.STRING, + proto.STRING, + number=500195327, + ) + link_type: str = proto.Field( + proto.STRING, + number=523207775, + optional=True, + ) + location: str = proto.Field( + proto.STRING, + number=290430901, + optional=True, + ) + name: str = proto.Field( + proto.STRING, + number=3373707, + optional=True, + ) + noc_contact_email: str = proto.Field( + proto.STRING, + number=14072832, + optional=True, + ) + operational_status: str = proto.Field( + proto.STRING, + number=201070847, + optional=True, + ) + peer_ip_address: str = proto.Field( + proto.STRING, + number=207735769, + optional=True, + ) + provisioned_link_count: int = proto.Field( + proto.INT32, + number=410888565, + optional=True, + ) + remote_location: str = proto.Field( + proto.STRING, + number=324388750, + optional=True, + ) + requested_link_count: int = proto.Field( + proto.INT32, + number=45051387, + optional=True, + ) + satisfies_pzs: bool = proto.Field( + proto.BOOL, + number=480964267, + optional=True, + ) + self_link: str = proto.Field( + proto.STRING, + number=456214797, + optional=True, + ) + state: str = proto.Field( + proto.STRING, + number=109757585, + optional=True, + ) + + +class InterconnectAttachment(proto.Message): + r"""Represents an Interconnect Attachment (VLAN) resource. You + can use Interconnect attachments (VLANS) to connect your Virtual + Private Cloud networks to your on-premises networks through an + Interconnect. For more information, read Creating VLAN + Attachments. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + admin_enabled (bool): + Determines whether this Attachment will carry packets. Not + present for PARTNER_PROVIDER. + + This field is a member of `oneof`_ ``_admin_enabled``. + bandwidth (str): + Provisioned bandwidth capacity for the interconnect + attachment. For attachments of type DEDICATED, the user can + set the bandwidth. For attachments of type PARTNER, the + Google Partner that is operating the interconnect must set + the bandwidth. Output only for PARTNER type, mutable for + PARTNER_PROVIDER and DEDICATED, and can take one of the + following values: - BPS_50M: 50 Mbit/s - BPS_100M: 100 + Mbit/s - BPS_200M: 200 Mbit/s - BPS_300M: 300 Mbit/s - + BPS_400M: 400 Mbit/s - BPS_500M: 500 Mbit/s - BPS_1G: 1 + Gbit/s - BPS_2G: 2 Gbit/s - BPS_5G: 5 Gbit/s - BPS_10G: 10 + Gbit/s - BPS_20G: 20 Gbit/s - BPS_50G: 50 Gbit/s Check the + Bandwidth enum for the list of possible values. + + This field is a member of `oneof`_ ``_bandwidth``. + candidate_ipv6_subnets (MutableSequence[str]): + This field is not available. + candidate_subnets (MutableSequence[str]): + Up to 16 candidate prefixes that can be used + to restrict the allocation of + cloudRouterIpAddress and customerRouterIpAddress + for this attachment. All prefixes must be within + link-local address space (169.254.0.0/16) and + must be /29 or shorter (/28, /27, etc). Google + will attempt to select an unused /29 from the + supplied candidate prefix(es). The request will + fail if all possible /29s are in use on Google's + edge. If not supplied, Google will randomly + select an unused /29 from all of link-local + space. + cloud_router_ip_address (str): + [Output Only] IPv4 address + prefix length to be configured + on Cloud Router Interface for this interconnect attachment. + + This field is a member of `oneof`_ ``_cloud_router_ip_address``. + cloud_router_ipv6_address (str): + [Output Only] IPv6 address + prefix length to be configured + on Cloud Router Interface for this interconnect attachment. + + This field is a member of `oneof`_ ``_cloud_router_ipv6_address``. + cloud_router_ipv6_interface_id (str): + This field is not available. + + This field is a member of `oneof`_ ``_cloud_router_ipv6_interface_id``. + configuration_constraints (google.cloud.compute_v1.types.InterconnectAttachmentConfigurationConstraints): + [Output Only] Constraints for this attachment, if any. The + attachment does not work if these constraints are not met. + + This field is a member of `oneof`_ ``_configuration_constraints``. + creation_timestamp (str): + [Output Only] Creation timestamp in RFC3339 text format. + + This field is a member of `oneof`_ ``_creation_timestamp``. + customer_router_ip_address (str): + [Output Only] IPv4 address + prefix length to be configured + on the customer router subinterface for this interconnect + attachment. + + This field is a member of `oneof`_ ``_customer_router_ip_address``. + customer_router_ipv6_address (str): + [Output Only] IPv6 address + prefix length to be configured + on the customer router subinterface for this interconnect + attachment. + + This field is a member of `oneof`_ ``_customer_router_ipv6_address``. + customer_router_ipv6_interface_id (str): + This field is not available. + + This field is a member of `oneof`_ ``_customer_router_ipv6_interface_id``. + dataplane_version (int): + [Output Only] Dataplane version for this + InterconnectAttachment. This field is only present for + Dataplane version 2 and higher. Absence of this field in the + API output indicates that the Dataplane is version 1. + + This field is a member of `oneof`_ ``_dataplane_version``. + description (str): + An optional description of this resource. + + This field is a member of `oneof`_ ``_description``. + edge_availability_domain (str): + Desired availability domain for the attachment. Only + available for type PARTNER, at creation time, and can take + one of the following values: - AVAILABILITY_DOMAIN_ANY - + AVAILABILITY_DOMAIN_1 - AVAILABILITY_DOMAIN_2 For improved + reliability, customers should configure a pair of + attachments, one per availability domain. The selected + availability domain will be provided to the Partner via the + pairing key, so that the provisioned circuit will lie in the + specified domain. If not specified, the value will default + to AVAILABILITY_DOMAIN_ANY. Check the EdgeAvailabilityDomain + enum for the list of possible values. + + This field is a member of `oneof`_ ``_edge_availability_domain``. + encryption (str): + Indicates the user-supplied encryption option of this VLAN + attachment (interconnectAttachment). Can only be specified + at attachment creation for PARTNER or DEDICATED attachments. + Possible values are: - NONE - This is the default value, + which means that the VLAN attachment carries unencrypted + traffic. VMs are able to send traffic to, or receive traffic + from, such a VLAN attachment. - IPSEC - The VLAN attachment + carries only encrypted traffic that is encrypted by an IPsec + device, such as an HA VPN gateway or third-party IPsec VPN. + VMs cannot directly send traffic to, or receive traffic + from, such a VLAN attachment. To use *HA VPN over Cloud + Interconnect*, the VLAN attachment must be created with this + option. Check the Encryption enum for the list of possible + values. + + This field is a member of `oneof`_ ``_encryption``. + google_reference_id (str): + [Output Only] Google reference ID, to be used when raising + support tickets with Google or otherwise to debug backend + connectivity issues. [Deprecated] This field is not used. + + This field is a member of `oneof`_ ``_google_reference_id``. + id (int): + [Output Only] The unique identifier for the resource. This + identifier is defined by the server. + + This field is a member of `oneof`_ ``_id``. + interconnect (str): + URL of the underlying Interconnect object + that this attachment's traffic will traverse + through. + + This field is a member of `oneof`_ ``_interconnect``. + ipsec_internal_addresses (MutableSequence[str]): + A list of URLs of addresses that have been + reserved for the VLAN attachment. Used only for + the VLAN attachment that has the encryption + option as IPSEC. The addresses must be regional + internal IP address ranges. When creating an HA + VPN gateway over the VLAN attachment, if the + attachment is configured to use a regional + internal IP address, then the VPN gateway's IP + address is allocated from the IP address range + specified here. For example, if the HA VPN + gateway's interface 0 is paired to this VLAN + attachment, then a regional internal IP address + for the VPN gateway interface 0 will be + allocated from the IP address specified for this + VLAN attachment. If this field is not specified + when creating the VLAN attachment, then later on + when creating an HA VPN gateway on this VLAN + attachment, the HA VPN gateway's IP address is + allocated from the regional external IP address + pool. + kind (str): + [Output Only] Type of the resource. Always + compute#interconnectAttachment for interconnect attachments. + + This field is a member of `oneof`_ ``_kind``. + label_fingerprint (str): + A fingerprint for the labels being applied to + this InterconnectAttachment, which is + essentially a hash of the labels set used for + optimistic locking. The fingerprint is initially + generated by Compute Engine and changes after + every request to modify or update labels. You + must always provide an up-to-date fingerprint + hash in order to update or change labels, + otherwise the request will fail with error 412 + conditionNotMet. To see the latest fingerprint, + make a get() request to retrieve an + InterconnectAttachment. + + This field is a member of `oneof`_ ``_label_fingerprint``. + labels (MutableMapping[str, str]): + Labels for this resource. These can only be + added or modified by the setLabels method. Each + label key/value pair must comply with RFC1035. + Label values may be empty. + mtu (int): + Maximum Transmission Unit (MTU), in bytes, of + packets passing through this interconnect + attachment. Only 1440 and 1500 are allowed. If + not specified, the value will default to 1440. + + This field is a member of `oneof`_ ``_mtu``. + name (str): + Name of the resource. Provided by the client when the + resource is created. The name must be 1-63 characters long, + and comply with RFC1035. Specifically, the name must be 1-63 + characters long and match the regular expression + ``[a-z]([-a-z0-9]*[a-z0-9])?`` which means the first + character must be a lowercase letter, and all following + characters must be a dash, lowercase letter, or digit, + except the last character, which cannot be a dash. + + This field is a member of `oneof`_ ``_name``. + operational_status (str): + [Output Only] The current status of whether or not this + interconnect attachment is functional, which can take one of + the following values: - OS_ACTIVE: The attachment has been + turned up and is ready to use. - OS_UNPROVISIONED: The + attachment is not ready to use yet, because turnup is not + complete. Check the OperationalStatus enum for the list of + possible values. + + This field is a member of `oneof`_ ``_operational_status``. + pairing_key (str): + [Output only for type PARTNER. Input only for + PARTNER_PROVIDER. Not present for DEDICATED]. The opaque + identifier of an PARTNER attachment used to initiate + provisioning with a selected partner. Of the form + "XXXXX/region/domain". + + This field is a member of `oneof`_ ``_pairing_key``. + partner_asn (int): + Optional BGP ASN for the router supplied by a Layer 3 + Partner if they configured BGP on behalf of the customer. + Output only for PARTNER type, input only for + PARTNER_PROVIDER, not available for DEDICATED. + + This field is a member of `oneof`_ ``_partner_asn``. + partner_metadata (google.cloud.compute_v1.types.InterconnectAttachmentPartnerMetadata): + Informational metadata about Partner attachments from + Partners to display to customers. Output only for for + PARTNER type, mutable for PARTNER_PROVIDER, not available + for DEDICATED. + + This field is a member of `oneof`_ ``_partner_metadata``. + private_interconnect_info (google.cloud.compute_v1.types.InterconnectAttachmentPrivateInfo): + [Output Only] Information specific to an + InterconnectAttachment. This property is populated if the + interconnect that this is attached to is of type DEDICATED. + + This field is a member of `oneof`_ ``_private_interconnect_info``. + region (str): + [Output Only] URL of the region where the regional + interconnect attachment resides. You must specify this field + as part of the HTTP request URL. It is not settable as a + field in the request body. + + This field is a member of `oneof`_ ``_region``. + remote_service (str): + [Output Only] If the attachment is on a Cross-Cloud + Interconnect connection, this field contains the + interconnect's remote location service provider. Example + values: "Amazon Web Services" "Microsoft Azure". The field + is set only for attachments on Cross-Cloud Interconnect + connections. Its value is copied from the + InterconnectRemoteLocation remoteService field. + + This field is a member of `oneof`_ ``_remote_service``. + router (str): + URL of the Cloud Router to be used for + dynamic routing. This router must be in the same + region as this InterconnectAttachment. The + InterconnectAttachment will automatically + connect the Interconnect to the network & region + within which the Cloud Router is configured. + + This field is a member of `oneof`_ ``_router``. + satisfies_pzs (bool): + [Output Only] Reserved for future use. + + This field is a member of `oneof`_ ``_satisfies_pzs``. + self_link (str): + [Output Only] Server-defined URL for the resource. + + This field is a member of `oneof`_ ``_self_link``. + stack_type (str): + The stack type for this interconnect attachment to identify + whether the IPv6 feature is enabled or not. If not + specified, IPV4_ONLY will be used. This field can be both + set at interconnect attachments creation and update + interconnect attachment operations. Check the StackType enum + for the list of possible values. + + This field is a member of `oneof`_ ``_stack_type``. + state (str): + [Output Only] The current state of this attachment's + functionality. Enum values ACTIVE and UNPROVISIONED are + shared by DEDICATED/PRIVATE, PARTNER, and PARTNER_PROVIDER + interconnect attachments, while enum values PENDING_PARTNER, + PARTNER_REQUEST_RECEIVED, and PENDING_CUSTOMER are used for + only PARTNER and PARTNER_PROVIDER interconnect attachments. + This state can take one of the following values: - ACTIVE: + The attachment has been turned up and is ready to use. - + UNPROVISIONED: The attachment is not ready to use yet, + because turnup is not complete. - PENDING_PARTNER: A + newly-created PARTNER attachment that has not yet been + configured on the Partner side. - PARTNER_REQUEST_RECEIVED: + A PARTNER attachment is in the process of provisioning after + a PARTNER_PROVIDER attachment was created that references + it. - PENDING_CUSTOMER: A PARTNER or PARTNER_PROVIDER + attachment that is waiting for a customer to activate it. - + DEFUNCT: The attachment was deleted externally and is no + longer functional. This could be because the associated + Interconnect was removed, or because the other side of a + Partner attachment was deleted. Check the State enum for the + list of possible values. + + This field is a member of `oneof`_ ``_state``. + subnet_length (int): + Length of the IPv4 subnet mask. Allowed + values: - 29 (default) - 30 The default value is + 29, except for Cross-Cloud Interconnect + connections that use an + InterconnectRemoteLocation with a + constraints.subnetLengthRange.min equal to 30. + For example, connections that use an Azure + remote location fall into this category. In + these cases, the default value is 30, and + requesting 29 returns an error. Where both 29 + and 30 are allowed, 29 is preferred, because it + gives Google Cloud Support more debugging + visibility. + + This field is a member of `oneof`_ ``_subnet_length``. + type_ (str): + The type of interconnect attachment this is, which can take + one of the following values: - DEDICATED: an attachment to a + Dedicated Interconnect. - PARTNER: an attachment to a + Partner Interconnect, created by the customer. - + PARTNER_PROVIDER: an attachment to a Partner Interconnect, + created by the partner. Check the Type enum for the list of + possible values. + + This field is a member of `oneof`_ ``_type``. + vlan_tag8021q (int): + The IEEE 802.1Q VLAN tag for this attachment, + in the range 2-4093. Only specified at creation + time. + + This field is a member of `oneof`_ ``_vlan_tag8021q``. + """ + class Bandwidth(proto.Enum): + r"""Provisioned bandwidth capacity for the interconnect attachment. For + attachments of type DEDICATED, the user can set the bandwidth. For + attachments of type PARTNER, the Google Partner that is operating + the interconnect must set the bandwidth. Output only for PARTNER + type, mutable for PARTNER_PROVIDER and DEDICATED, and can take one + of the following values: - BPS_50M: 50 Mbit/s - BPS_100M: 100 Mbit/s + - BPS_200M: 200 Mbit/s - BPS_300M: 300 Mbit/s - BPS_400M: 400 Mbit/s + - BPS_500M: 500 Mbit/s - BPS_1G: 1 Gbit/s - BPS_2G: 2 Gbit/s - + BPS_5G: 5 Gbit/s - BPS_10G: 10 Gbit/s - BPS_20G: 20 Gbit/s - + BPS_50G: 50 Gbit/s + + Values: + UNDEFINED_BANDWIDTH (0): + A value indicating that the enum field is not + set. + BPS_100M (49547958): + 100 Mbit/s + BPS_10G (278693006): + 10 Gbit/s + BPS_1G (355358448): + 1 Gbit/s + BPS_200M (49577749): + 200 Mbit/s + BPS_20G (278693967): + 20 Gbit/s + BPS_2G (355358479): + 2 Gbit/s + BPS_300M (49607540): + 300 Mbit/s + BPS_400M (49637331): + 400 Mbit/s + BPS_500M (49667122): + 500 Mbit/s + BPS_50G (278696850): + 50 Gbit/s + BPS_50M (278696856): + 50 Mbit/s + BPS_5G (355358572): + 5 Gbit/s + """ + UNDEFINED_BANDWIDTH = 0 + BPS_100M = 49547958 + BPS_10G = 278693006 + BPS_1G = 355358448 + BPS_200M = 49577749 + BPS_20G = 278693967 + BPS_2G = 355358479 + BPS_300M = 49607540 + BPS_400M = 49637331 + BPS_500M = 49667122 + BPS_50G = 278696850 + BPS_50M = 278696856 + BPS_5G = 355358572 + + class EdgeAvailabilityDomain(proto.Enum): + r"""Desired availability domain for the attachment. Only available for + type PARTNER, at creation time, and can take one of the following + values: - AVAILABILITY_DOMAIN_ANY - AVAILABILITY_DOMAIN_1 - + AVAILABILITY_DOMAIN_2 For improved reliability, customers should + configure a pair of attachments, one per availability domain. The + selected availability domain will be provided to the Partner via the + pairing key, so that the provisioned circuit will lie in the + specified domain. If not specified, the value will default to + AVAILABILITY_DOMAIN_ANY. + + Values: + UNDEFINED_EDGE_AVAILABILITY_DOMAIN (0): + A value indicating that the enum field is not + set. + AVAILABILITY_DOMAIN_1 (349552090): + No description available. + AVAILABILITY_DOMAIN_2 (349552091): + No description available. + AVAILABILITY_DOMAIN_ANY (375256373): + No description available. + """ + UNDEFINED_EDGE_AVAILABILITY_DOMAIN = 0 + AVAILABILITY_DOMAIN_1 = 349552090 + AVAILABILITY_DOMAIN_2 = 349552091 + AVAILABILITY_DOMAIN_ANY = 375256373 + + class Encryption(proto.Enum): + r"""Indicates the user-supplied encryption option of this VLAN + attachment (interconnectAttachment). Can only be specified at + attachment creation for PARTNER or DEDICATED attachments. Possible + values are: - NONE - This is the default value, which means that the + VLAN attachment carries unencrypted traffic. VMs are able to send + traffic to, or receive traffic from, such a VLAN attachment. - IPSEC + - The VLAN attachment carries only encrypted traffic that is + encrypted by an IPsec device, such as an HA VPN gateway or + third-party IPsec VPN. VMs cannot directly send traffic to, or + receive traffic from, such a VLAN attachment. To use *HA VPN over + Cloud Interconnect*, the VLAN attachment must be created with this + option. + + Values: + UNDEFINED_ENCRYPTION (0): + A value indicating that the enum field is not + set. + IPSEC (69882282): + The interconnect attachment will carry only + encrypted traffic that is encrypted by an IPsec + device such as HA VPN gateway; VMs cannot + directly send traffic to or receive traffic from + such an interconnect attachment. To use HA VPN + over Cloud Interconnect, the interconnect + attachment must be created with this option. + NONE (2402104): + This is the default value, which means the + Interconnect Attachment will carry unencrypted + traffic. VMs will be able to send traffic to or + receive traffic from such interconnect + attachment. + """ + UNDEFINED_ENCRYPTION = 0 + IPSEC = 69882282 + NONE = 2402104 + + class OperationalStatus(proto.Enum): + r"""[Output Only] The current status of whether or not this interconnect + attachment is functional, which can take one of the following + values: - OS_ACTIVE: The attachment has been turned up and is ready + to use. - OS_UNPROVISIONED: The attachment is not ready to use yet, + because turnup is not complete. + + Values: + UNDEFINED_OPERATIONAL_STATUS (0): + A value indicating that the enum field is not + set. + OS_ACTIVE (55721409): + Indicates that attachment has been turned up + and is ready to use. + OS_UNPROVISIONED (239771840): + Indicates that attachment is not ready to use + yet, because turnup is not complete. + """ + UNDEFINED_OPERATIONAL_STATUS = 0 + OS_ACTIVE = 55721409 + OS_UNPROVISIONED = 239771840 + + class StackType(proto.Enum): + r"""The stack type for this interconnect attachment to identify whether + the IPv6 feature is enabled or not. If not specified, IPV4_ONLY will + be used. This field can be both set at interconnect attachments + creation and update interconnect attachment operations. + + Values: + UNDEFINED_STACK_TYPE (0): + A value indicating that the enum field is not + set. + IPV4_IPV6 (22197249): + The interconnect attachment can have both + IPv4 and IPv6 addresses. + IPV4_ONLY (22373798): + The interconnect attachment will only be + assigned IPv4 addresses. + """ + UNDEFINED_STACK_TYPE = 0 + IPV4_IPV6 = 22197249 + IPV4_ONLY = 22373798 + + class State(proto.Enum): + r"""[Output Only] The current state of this attachment's functionality. + Enum values ACTIVE and UNPROVISIONED are shared by + DEDICATED/PRIVATE, PARTNER, and PARTNER_PROVIDER interconnect + attachments, while enum values PENDING_PARTNER, + PARTNER_REQUEST_RECEIVED, and PENDING_CUSTOMER are used for only + PARTNER and PARTNER_PROVIDER interconnect attachments. This state + can take one of the following values: - ACTIVE: The attachment has + been turned up and is ready to use. - UNPROVISIONED: The attachment + is not ready to use yet, because turnup is not complete. - + PENDING_PARTNER: A newly-created PARTNER attachment that has not yet + been configured on the Partner side. - PARTNER_REQUEST_RECEIVED: A + PARTNER attachment is in the process of provisioning after a + PARTNER_PROVIDER attachment was created that references it. - + PENDING_CUSTOMER: A PARTNER or PARTNER_PROVIDER attachment that is + waiting for a customer to activate it. - DEFUNCT: The attachment was + deleted externally and is no longer functional. This could be + because the associated Interconnect was removed, or because the + other side of a Partner attachment was deleted. + + Values: + UNDEFINED_STATE (0): + A value indicating that the enum field is not + set. + ACTIVE (314733318): + Indicates that attachment has been turned up + and is ready to use. + DEFUNCT (115891759): + The attachment was deleted externally and is + no longer functional. This could be because the + associated Interconnect was wiped out, or + because the other side of a Partner attachment + was deleted. + PARTNER_REQUEST_RECEIVED (513587304): + A PARTNER attachment is in the process of provisioning after + a PARTNER_PROVIDER attachment was created that references + it. + PENDING_CUSTOMER (167494054): + PARTNER or PARTNER_PROVIDER attachment that is waiting for + the customer to activate. + PENDING_PARTNER (387890656): + A newly created PARTNER attachment that has + not yet been configured on the Partner side. + STATE_UNSPECIFIED (470755401): + No description available. + UNPROVISIONED (517333979): + Indicates that attachment is not ready to use + yet, because turnup is not complete. + """ + UNDEFINED_STATE = 0 + ACTIVE = 314733318 + DEFUNCT = 115891759 + PARTNER_REQUEST_RECEIVED = 513587304 + PENDING_CUSTOMER = 167494054 + PENDING_PARTNER = 387890656 + STATE_UNSPECIFIED = 470755401 + UNPROVISIONED = 517333979 + + class Type(proto.Enum): + r"""The type of interconnect attachment this is, which can take one of + the following values: - DEDICATED: an attachment to a Dedicated + Interconnect. - PARTNER: an attachment to a Partner Interconnect, + created by the customer. - PARTNER_PROVIDER: an attachment to a + Partner Interconnect, created by the partner. + + Values: + UNDEFINED_TYPE (0): + A value indicating that the enum field is not + set. + DEDICATED (258411983): + Attachment to a dedicated interconnect. + PARTNER (461924520): + Attachment to a partner interconnect, created + by the customer. + PARTNER_PROVIDER (483261352): + Attachment to a partner interconnect, created + by the partner. + """ + UNDEFINED_TYPE = 0 + DEDICATED = 258411983 + PARTNER = 461924520 + PARTNER_PROVIDER = 483261352 + + admin_enabled: bool = proto.Field( + proto.BOOL, + number=445675089, + optional=True, + ) + bandwidth: str = proto.Field( + proto.STRING, + number=181715121, + optional=True, + ) + candidate_ipv6_subnets: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=70682522, + ) + candidate_subnets: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=237842938, + ) + cloud_router_ip_address: str = proto.Field( + proto.STRING, + number=287392776, + optional=True, + ) + cloud_router_ipv6_address: str = proto.Field( + proto.STRING, + number=451922376, + optional=True, + ) + cloud_router_ipv6_interface_id: str = proto.Field( + proto.STRING, + number=521282701, + optional=True, + ) + configuration_constraints: 'InterconnectAttachmentConfigurationConstraints' = proto.Field( + proto.MESSAGE, + number=179681389, + optional=True, + message='InterconnectAttachmentConfigurationConstraints', + ) + creation_timestamp: str = proto.Field( + proto.STRING, + number=30525366, + optional=True, + ) + customer_router_ip_address: str = proto.Field( + proto.STRING, + number=332475761, + optional=True, + ) + customer_router_ipv6_address: str = proto.Field( + proto.STRING, + number=290127089, + optional=True, + ) + customer_router_ipv6_interface_id: str = proto.Field( + proto.STRING, + number=380994308, + optional=True, + ) + dataplane_version: int = proto.Field( + proto.INT32, + number=34920075, + optional=True, + ) + description: str = proto.Field( + proto.STRING, + number=422937596, + optional=True, + ) + edge_availability_domain: str = proto.Field( + proto.STRING, + number=71289510, + optional=True, + ) + encryption: str = proto.Field( + proto.STRING, + number=97980291, + optional=True, + ) + google_reference_id: str = proto.Field( + proto.STRING, + number=534944469, + optional=True, + ) + id: int = proto.Field( + proto.UINT64, + number=3355, + optional=True, + ) + interconnect: str = proto.Field( + proto.STRING, + number=224601230, + optional=True, + ) + ipsec_internal_addresses: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=407648565, + ) + kind: str = proto.Field( + proto.STRING, + number=3292052, + optional=True, + ) + label_fingerprint: str = proto.Field( + proto.STRING, + number=178124825, + optional=True, + ) + labels: MutableMapping[str, str] = proto.MapField( + proto.STRING, + proto.STRING, + number=500195327, + ) + mtu: int = proto.Field( + proto.INT32, + number=108462, + optional=True, + ) + name: str = proto.Field( + proto.STRING, + number=3373707, + optional=True, + ) + operational_status: str = proto.Field( + proto.STRING, + number=201070847, + optional=True, + ) + pairing_key: str = proto.Field( + proto.STRING, + number=439695464, + optional=True, + ) + partner_asn: int = proto.Field( + proto.INT64, + number=438166149, + optional=True, + ) + partner_metadata: 'InterconnectAttachmentPartnerMetadata' = proto.Field( + proto.MESSAGE, + number=65908934, + optional=True, + message='InterconnectAttachmentPartnerMetadata', + ) + private_interconnect_info: 'InterconnectAttachmentPrivateInfo' = proto.Field( + proto.MESSAGE, + number=237270531, + optional=True, + message='InterconnectAttachmentPrivateInfo', + ) + region: str = proto.Field( + proto.STRING, + number=138946292, + optional=True, + ) + remote_service: str = proto.Field( + proto.STRING, + number=391954364, + optional=True, + ) + router: str = proto.Field( + proto.STRING, + number=148608841, + optional=True, + ) + satisfies_pzs: bool = proto.Field( + proto.BOOL, + number=480964267, + optional=True, + ) + self_link: str = proto.Field( + proto.STRING, + number=456214797, + optional=True, + ) + stack_type: str = proto.Field( + proto.STRING, + number=425908881, + optional=True, + ) + state: str = proto.Field( + proto.STRING, + number=109757585, + optional=True, + ) + subnet_length: int = proto.Field( + proto.INT32, + number=279831048, + optional=True, + ) + type_: str = proto.Field( + proto.STRING, + number=3575610, + optional=True, + ) + vlan_tag8021q: int = proto.Field( + proto.INT32, + number=119927836, + optional=True, + ) + + +class InterconnectAttachmentAggregatedList(proto.Message): + r""" + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + id (str): + [Output Only] Unique identifier for the resource; defined by + the server. + + This field is a member of `oneof`_ ``_id``. + items (MutableMapping[str, google.cloud.compute_v1.types.InterconnectAttachmentsScopedList]): + A list of InterconnectAttachmentsScopedList + resources. + kind (str): + [Output Only] Type of resource. Always + compute#interconnectAttachmentAggregatedList for aggregated + lists of interconnect attachments. + + This field is a member of `oneof`_ ``_kind``. + next_page_token (str): + [Output Only] This token allows you to get the next page of + results for list requests. If the number of results is + larger than maxResults, use the nextPageToken as a value for + the query parameter pageToken in the next list request. + Subsequent list requests will have their own nextPageToken + to continue paging through the results. + + This field is a member of `oneof`_ ``_next_page_token``. + self_link (str): + [Output Only] Server-defined URL for this resource. + + This field is a member of `oneof`_ ``_self_link``. + unreachables (MutableSequence[str]): + [Output Only] Unreachable resources. + warning (google.cloud.compute_v1.types.Warning): + [Output Only] Informational warning message. + + This field is a member of `oneof`_ ``_warning``. + """ + + @property + def raw_page(self): + return self + + id: str = proto.Field( + proto.STRING, + number=3355, + optional=True, + ) + items: MutableMapping[str, 'InterconnectAttachmentsScopedList'] = proto.MapField( + proto.STRING, + proto.MESSAGE, + number=100526016, + message='InterconnectAttachmentsScopedList', + ) + kind: str = proto.Field( + proto.STRING, + number=3292052, + optional=True, + ) + next_page_token: str = proto.Field( + proto.STRING, + number=79797525, + optional=True, + ) + self_link: str = proto.Field( + proto.STRING, + number=456214797, + optional=True, + ) + unreachables: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=243372063, + ) + warning: 'Warning' = proto.Field( + proto.MESSAGE, + number=50704284, + optional=True, + message='Warning', + ) + + +class InterconnectAttachmentConfigurationConstraints(proto.Message): + r""" + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + bgp_md5 (str): + [Output Only] Whether the attachment's BGP session + requires/allows/disallows BGP MD5 authentication. This can + take one of the following values: MD5_OPTIONAL, + MD5_REQUIRED, MD5_UNSUPPORTED. For example, a Cross-Cloud + Interconnect connection to a remote cloud provider that + requires BGP MD5 authentication has the + interconnectRemoteLocation + attachment_configuration_constraints.bgp_md5 field set to + MD5_REQUIRED, and that property is propagated to the + attachment. Similarly, if BGP MD5 is MD5_UNSUPPORTED, an + error is returned if MD5 is requested. Check the BgpMd5 enum + for the list of possible values. + + This field is a member of `oneof`_ ``_bgp_md5``. + bgp_peer_asn_ranges (MutableSequence[google.cloud.compute_v1.types.InterconnectAttachmentConfigurationConstraintsBgpPeerASNRange]): + [Output Only] List of ASN ranges that the remote location is + known to support. Formatted as an array of inclusive ranges + {min: min-value, max: max-value}. For example, [{min: 123, + max: 123}, {min: 64512, max: 65534}] allows the peer ASN to + be 123 or anything in the range 64512-65534. This field is + only advisory. Although the API accepts other ranges, these + are the ranges that we recommend. + """ + class BgpMd5(proto.Enum): + r"""[Output Only] Whether the attachment's BGP session + requires/allows/disallows BGP MD5 authentication. This can take one + of the following values: MD5_OPTIONAL, MD5_REQUIRED, + MD5_UNSUPPORTED. For example, a Cross-Cloud Interconnect connection + to a remote cloud provider that requires BGP MD5 authentication has + the interconnectRemoteLocation + attachment_configuration_constraints.bgp_md5 field set to + MD5_REQUIRED, and that property is propagated to the attachment. + Similarly, if BGP MD5 is MD5_UNSUPPORTED, an error is returned if + MD5 is requested. + + Values: + UNDEFINED_BGP_MD5 (0): + A value indicating that the enum field is not + set. + MD5_OPTIONAL (532156673): + MD5_OPTIONAL: BGP MD5 authentication is supported and can + optionally be configured. + MD5_REQUIRED (218034496): + MD5_REQUIRED: BGP MD5 authentication must be configured. + MD5_UNSUPPORTED (86962388): + MD5_UNSUPPORTED: BGP MD5 authentication must not be + configured + """ + UNDEFINED_BGP_MD5 = 0 + MD5_OPTIONAL = 532156673 + MD5_REQUIRED = 218034496 + MD5_UNSUPPORTED = 86962388 + + bgp_md5: str = proto.Field( + proto.STRING, + number=373093386, + optional=True, + ) + bgp_peer_asn_ranges: MutableSequence['InterconnectAttachmentConfigurationConstraintsBgpPeerASNRange'] = proto.RepeatedField( + proto.MESSAGE, + number=475946370, + message='InterconnectAttachmentConfigurationConstraintsBgpPeerASNRange', + ) + + +class InterconnectAttachmentConfigurationConstraintsBgpPeerASNRange(proto.Message): + r""" + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + max_ (int): + + This field is a member of `oneof`_ ``_max``. + min_ (int): + + This field is a member of `oneof`_ ``_min``. + """ + + max_: int = proto.Field( + proto.UINT32, + number=107876, + optional=True, + ) + min_: int = proto.Field( + proto.UINT32, + number=108114, + optional=True, + ) + + +class InterconnectAttachmentList(proto.Message): + r"""Response to the list request, and contains a list of + interconnect attachments. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + id (str): + [Output Only] Unique identifier for the resource; defined by + the server. + + This field is a member of `oneof`_ ``_id``. + items (MutableSequence[google.cloud.compute_v1.types.InterconnectAttachment]): + A list of InterconnectAttachment resources. + kind (str): + [Output Only] Type of resource. Always + compute#interconnectAttachmentList for lists of interconnect + attachments. + + This field is a member of `oneof`_ ``_kind``. + next_page_token (str): + [Output Only] This token allows you to get the next page of + results for list requests. If the number of results is + larger than maxResults, use the nextPageToken as a value for + the query parameter pageToken in the next list request. + Subsequent list requests will have their own nextPageToken + to continue paging through the results. + + This field is a member of `oneof`_ ``_next_page_token``. + self_link (str): + [Output Only] Server-defined URL for this resource. + + This field is a member of `oneof`_ ``_self_link``. + warning (google.cloud.compute_v1.types.Warning): + [Output Only] Informational warning message. + + This field is a member of `oneof`_ ``_warning``. + """ + + @property + def raw_page(self): + return self + + id: str = proto.Field( + proto.STRING, + number=3355, + optional=True, + ) + items: MutableSequence['InterconnectAttachment'] = proto.RepeatedField( + proto.MESSAGE, + number=100526016, + message='InterconnectAttachment', + ) + kind: str = proto.Field( + proto.STRING, + number=3292052, + optional=True, + ) + next_page_token: str = proto.Field( + proto.STRING, + number=79797525, + optional=True, + ) + self_link: str = proto.Field( + proto.STRING, + number=456214797, + optional=True, + ) + warning: 'Warning' = proto.Field( + proto.MESSAGE, + number=50704284, + optional=True, + message='Warning', + ) + + +class InterconnectAttachmentPartnerMetadata(proto.Message): + r"""Informational metadata about Partner attachments from Partners to + display to customers. These fields are propagated from + PARTNER_PROVIDER attachments to their corresponding PARTNER + attachments. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + interconnect_name (str): + Plain text name of the Interconnect this + attachment is connected to, as displayed in the + Partner's portal. For instance "Chicago 1". This + value may be validated to match approved Partner + values. + + This field is a member of `oneof`_ ``_interconnect_name``. + partner_name (str): + Plain text name of the Partner providing this + attachment. This value may be validated to match + approved Partner values. + + This field is a member of `oneof`_ ``_partner_name``. + portal_url (str): + URL of the Partner's portal for this + Attachment. Partners may customise this to be a + deep link to the specific resource on the + Partner portal. This value may be validated to + match approved Partner values. + + This field is a member of `oneof`_ ``_portal_url``. + """ + + interconnect_name: str = proto.Field( + proto.STRING, + number=514963356, + optional=True, + ) + partner_name: str = proto.Field( + proto.STRING, + number=161747874, + optional=True, + ) + portal_url: str = proto.Field( + proto.STRING, + number=269182748, + optional=True, + ) + + +class InterconnectAttachmentPrivateInfo(proto.Message): + r"""Information for an interconnect attachment when this belongs + to an interconnect of type DEDICATED. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + tag8021q (int): + [Output Only] 802.1q encapsulation tag to be used for + traffic between Google and the customer, going to and from + this network and region. + + This field is a member of `oneof`_ ``_tag8021q``. + """ + + tag8021q: int = proto.Field( + proto.UINT32, + number=271820992, + optional=True, + ) + + +class InterconnectAttachmentsScopedList(proto.Message): + r""" + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + interconnect_attachments (MutableSequence[google.cloud.compute_v1.types.InterconnectAttachment]): + A list of interconnect attachments contained + in this scope. + warning (google.cloud.compute_v1.types.Warning): + Informational warning which replaces the list + of addresses when the list is empty. + + This field is a member of `oneof`_ ``_warning``. + """ + + interconnect_attachments: MutableSequence['InterconnectAttachment'] = proto.RepeatedField( + proto.MESSAGE, + number=425388415, + message='InterconnectAttachment', + ) + warning: 'Warning' = proto.Field( + proto.MESSAGE, + number=50704284, + optional=True, + message='Warning', + ) + + +class InterconnectCircuitInfo(proto.Message): + r"""Describes a single physical circuit between the Customer and + Google. CircuitInfo objects are created by Google, so all fields + are output only. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + customer_demarc_id (str): + Customer-side demarc ID for this circuit. + + This field is a member of `oneof`_ ``_customer_demarc_id``. + google_circuit_id (str): + Google-assigned unique ID for this circuit. + Assigned at circuit turn-up. + + This field is a member of `oneof`_ ``_google_circuit_id``. + google_demarc_id (str): + Google-side demarc ID for this circuit. + Assigned at circuit turn-up and provided by + Google to the customer in the LOA. + + This field is a member of `oneof`_ ``_google_demarc_id``. + """ + + customer_demarc_id: str = proto.Field( + proto.STRING, + number=28771859, + optional=True, + ) + google_circuit_id: str = proto.Field( + proto.STRING, + number=262014711, + optional=True, + ) + google_demarc_id: str = proto.Field( + proto.STRING, + number=448196270, + optional=True, + ) + + +class InterconnectDiagnostics(proto.Message): + r"""Diagnostics information about the Interconnect connection, + which contains detailed and current technical information about + Google's side of the connection. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + arp_caches (MutableSequence[google.cloud.compute_v1.types.InterconnectDiagnosticsARPEntry]): + A list of InterconnectDiagnostics.ARPEntry + objects, describing individual neighbors + currently seen by the Google router in the ARP + cache for the Interconnect. This will be empty + when the Interconnect is not bundled. + bundle_aggregation_type (str): + The aggregation type of the bundle interface. + Check the BundleAggregationType enum for the + list of possible values. + + This field is a member of `oneof`_ ``_bundle_aggregation_type``. + bundle_operational_status (str): + The operational status of the bundle + interface. Check the BundleOperationalStatus + enum for the list of possible values. + + This field is a member of `oneof`_ ``_bundle_operational_status``. + links (MutableSequence[google.cloud.compute_v1.types.InterconnectDiagnosticsLinkStatus]): + A list of InterconnectDiagnostics.LinkStatus + objects, describing the status for each link on + the Interconnect. + mac_address (str): + The MAC address of the Interconnect's bundle + interface. + + This field is a member of `oneof`_ ``_mac_address``. + """ + class BundleAggregationType(proto.Enum): + r"""The aggregation type of the bundle interface. + + Values: + UNDEFINED_BUNDLE_AGGREGATION_TYPE (0): + A value indicating that the enum field is not + set. + BUNDLE_AGGREGATION_TYPE_LACP (27758925): + LACP is enabled. + BUNDLE_AGGREGATION_TYPE_STATIC (50678873): + LACP is disabled. + """ + UNDEFINED_BUNDLE_AGGREGATION_TYPE = 0 + BUNDLE_AGGREGATION_TYPE_LACP = 27758925 + BUNDLE_AGGREGATION_TYPE_STATIC = 50678873 + + class BundleOperationalStatus(proto.Enum): + r"""The operational status of the bundle interface. + + Values: + UNDEFINED_BUNDLE_OPERATIONAL_STATUS (0): + A value indicating that the enum field is not + set. + BUNDLE_OPERATIONAL_STATUS_DOWN (453842693): + If bundleAggregationType is LACP: LACP is not + established and/or all links in the bundle have + DOWN operational status. If + bundleAggregationType is STATIC: one or more + links in the bundle has DOWN operational status. + BUNDLE_OPERATIONAL_STATUS_UP (161366462): + If bundleAggregationType is LACP: LACP is + established and at least one link in the bundle + has UP operational status. If + bundleAggregationType is STATIC: all links in + the bundle (typically just one) have UP + operational status. + """ + UNDEFINED_BUNDLE_OPERATIONAL_STATUS = 0 + BUNDLE_OPERATIONAL_STATUS_DOWN = 453842693 + BUNDLE_OPERATIONAL_STATUS_UP = 161366462 + + arp_caches: MutableSequence['InterconnectDiagnosticsARPEntry'] = proto.RepeatedField( + proto.MESSAGE, + number=414591761, + message='InterconnectDiagnosticsARPEntry', + ) + bundle_aggregation_type: str = proto.Field( + proto.STRING, + number=434939028, + optional=True, + ) + bundle_operational_status: str = proto.Field( + proto.STRING, + number=106433500, + optional=True, + ) + links: MutableSequence['InterconnectDiagnosticsLinkStatus'] = proto.RepeatedField( + proto.MESSAGE, + number=102977465, + message='InterconnectDiagnosticsLinkStatus', + ) + mac_address: str = proto.Field( + proto.STRING, + number=332540164, + optional=True, + ) + + +class InterconnectDiagnosticsARPEntry(proto.Message): + r"""Describing the ARP neighbor entries seen on this link + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + ip_address (str): + The IP address of this ARP neighbor. + + This field is a member of `oneof`_ ``_ip_address``. + mac_address (str): + The MAC address of this ARP neighbor. + + This field is a member of `oneof`_ ``_mac_address``. + """ + + ip_address: str = proto.Field( + proto.STRING, + number=406272220, + optional=True, + ) + mac_address: str = proto.Field( + proto.STRING, + number=332540164, + optional=True, + ) + + +class InterconnectDiagnosticsLinkLACPStatus(proto.Message): + r""" + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + google_system_id (str): + System ID of the port on Google's side of the + LACP exchange. + + This field is a member of `oneof`_ ``_google_system_id``. + neighbor_system_id (str): + System ID of the port on the neighbor's side + of the LACP exchange. + + This field is a member of `oneof`_ ``_neighbor_system_id``. + state (str): + The state of a LACP link, which can take one + of the following values: - ACTIVE: The link is + configured and active within the bundle. - + DETACHED: The link is not configured within the + bundle. This means that the rest of the object + should be empty. Check the State enum for the + list of possible values. + + This field is a member of `oneof`_ ``_state``. + """ + class State(proto.Enum): + r"""The state of a LACP link, which can take one of the following + values: - ACTIVE: The link is configured and active within the + bundle. - DETACHED: The link is not configured within the + bundle. This means that the rest of the object should be empty. + + Values: + UNDEFINED_STATE (0): + A value indicating that the enum field is not + set. + ACTIVE (314733318): + The link is configured and active within the + bundle. + DETACHED (216562546): + The link is not configured within the bundle, + this means the rest of the object should be + empty. + """ + UNDEFINED_STATE = 0 + ACTIVE = 314733318 + DETACHED = 216562546 + + google_system_id: str = proto.Field( + proto.STRING, + number=91210405, + optional=True, + ) + neighbor_system_id: str = proto.Field( + proto.STRING, + number=343821342, + optional=True, + ) + state: str = proto.Field( + proto.STRING, + number=109757585, + optional=True, + ) + + +class InterconnectDiagnosticsLinkOpticalPower(proto.Message): + r""" + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + state (str): + The status of the current value when compared to the warning + and alarm levels for the receiving or transmitting + transceiver. Possible states include: - OK: The value has + not crossed a warning threshold. - LOW_WARNING: The value + has crossed below the low warning threshold. - HIGH_WARNING: + The value has crossed above the high warning threshold. - + LOW_ALARM: The value has crossed below the low alarm + threshold. - HIGH_ALARM: The value has crossed above the + high alarm threshold. Check the State enum for the list of + possible values. + + This field is a member of `oneof`_ ``_state``. + value (float): + Value of the current receiving or + transmitting optical power, read in dBm. Take a + known good optical value, give it a 10% margin + and trigger warnings relative to that value. In + general, a -7dBm warning and a -11dBm alarm are + good optical value estimates for most links. + + This field is a member of `oneof`_ ``_value``. + """ + class State(proto.Enum): + r"""The status of the current value when compared to the warning and + alarm levels for the receiving or transmitting transceiver. Possible + states include: - OK: The value has not crossed a warning threshold. + - LOW_WARNING: The value has crossed below the low warning + threshold. - HIGH_WARNING: The value has crossed above the high + warning threshold. - LOW_ALARM: The value has crossed below the low + alarm threshold. - HIGH_ALARM: The value has crossed above the high + alarm threshold. + + Values: + UNDEFINED_STATE (0): + A value indicating that the enum field is not + set. + HIGH_ALARM (305363284): + The value has crossed above the high alarm + threshold. + HIGH_WARNING (220984799): + The value of the current optical power has + crossed above the high warning threshold. + LOW_ALARM (316659046): + The value of the current optical power has + crossed below the low alarm threshold. + LOW_WARNING (338793841): + The value of the current optical power has + crossed below the low warning threshold. + OK (2524): + The value of the current optical power has + not crossed a warning threshold. + """ + UNDEFINED_STATE = 0 + HIGH_ALARM = 305363284 + HIGH_WARNING = 220984799 + LOW_ALARM = 316659046 + LOW_WARNING = 338793841 + OK = 2524 + + state: str = proto.Field( + proto.STRING, + number=109757585, + optional=True, + ) + value: float = proto.Field( + proto.FLOAT, + number=111972721, + optional=True, + ) + + +class InterconnectDiagnosticsLinkStatus(proto.Message): + r""" + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + arp_caches (MutableSequence[google.cloud.compute_v1.types.InterconnectDiagnosticsARPEntry]): + A list of InterconnectDiagnostics.ARPEntry + objects, describing the ARP neighbor entries + seen on this link. This will be empty if the + link is bundled + circuit_id (str): + The unique ID for this link assigned during + turn up by Google. + + This field is a member of `oneof`_ ``_circuit_id``. + google_demarc (str): + The Demarc address assigned by Google and + provided in the LoA. + + This field is a member of `oneof`_ ``_google_demarc``. + lacp_status (google.cloud.compute_v1.types.InterconnectDiagnosticsLinkLACPStatus): + + This field is a member of `oneof`_ ``_lacp_status``. + operational_status (str): + The operational status of the link. + Check the OperationalStatus enum for the list of + possible values. + + This field is a member of `oneof`_ ``_operational_status``. + receiving_optical_power (google.cloud.compute_v1.types.InterconnectDiagnosticsLinkOpticalPower): + An InterconnectDiagnostics.LinkOpticalPower + object, describing the current value and status + of the received light level. + + This field is a member of `oneof`_ ``_receiving_optical_power``. + transmitting_optical_power (google.cloud.compute_v1.types.InterconnectDiagnosticsLinkOpticalPower): + An InterconnectDiagnostics.LinkOpticalPower + object, describing the current value and status + of the transmitted light level. + + This field is a member of `oneof`_ ``_transmitting_optical_power``. + """ + class OperationalStatus(proto.Enum): + r"""The operational status of the link. + + Values: + UNDEFINED_OPERATIONAL_STATUS (0): + A value indicating that the enum field is not + set. + LINK_OPERATIONAL_STATUS_DOWN (281653885): + The interface is unable to communicate with + the remote end. + LINK_OPERATIONAL_STATUS_UP (305879862): + The interface has low level communication + with the remote end. + """ + UNDEFINED_OPERATIONAL_STATUS = 0 + LINK_OPERATIONAL_STATUS_DOWN = 281653885 + LINK_OPERATIONAL_STATUS_UP = 305879862 + + arp_caches: MutableSequence['InterconnectDiagnosticsARPEntry'] = proto.RepeatedField( + proto.MESSAGE, + number=414591761, + message='InterconnectDiagnosticsARPEntry', + ) + circuit_id: str = proto.Field( + proto.STRING, + number=225180977, + optional=True, + ) + google_demarc: str = proto.Field( + proto.STRING, + number=51084, + optional=True, + ) + lacp_status: 'InterconnectDiagnosticsLinkLACPStatus' = proto.Field( + proto.MESSAGE, + number=361210415, + optional=True, + message='InterconnectDiagnosticsLinkLACPStatus', + ) + operational_status: str = proto.Field( + proto.STRING, + number=201070847, + optional=True, + ) + receiving_optical_power: 'InterconnectDiagnosticsLinkOpticalPower' = proto.Field( + proto.MESSAGE, + number=244717279, + optional=True, + message='InterconnectDiagnosticsLinkOpticalPower', + ) + transmitting_optical_power: 'InterconnectDiagnosticsLinkOpticalPower' = proto.Field( + proto.MESSAGE, + number=459431197, + optional=True, + message='InterconnectDiagnosticsLinkOpticalPower', + ) + + +class InterconnectList(proto.Message): + r"""Response to the list request, and contains a list of + interconnects. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + id (str): + [Output Only] Unique identifier for the resource; defined by + the server. + + This field is a member of `oneof`_ ``_id``. + items (MutableSequence[google.cloud.compute_v1.types.Interconnect]): + A list of Interconnect resources. + kind (str): + [Output Only] Type of resource. Always + compute#interconnectList for lists of interconnects. + + This field is a member of `oneof`_ ``_kind``. + next_page_token (str): + [Output Only] This token allows you to get the next page of + results for list requests. If the number of results is + larger than maxResults, use the nextPageToken as a value for + the query parameter pageToken in the next list request. + Subsequent list requests will have their own nextPageToken + to continue paging through the results. + + This field is a member of `oneof`_ ``_next_page_token``. + self_link (str): + [Output Only] Server-defined URL for this resource. + + This field is a member of `oneof`_ ``_self_link``. + warning (google.cloud.compute_v1.types.Warning): + [Output Only] Informational warning message. + + This field is a member of `oneof`_ ``_warning``. + """ + + @property + def raw_page(self): + return self + + id: str = proto.Field( + proto.STRING, + number=3355, + optional=True, + ) + items: MutableSequence['Interconnect'] = proto.RepeatedField( + proto.MESSAGE, + number=100526016, + message='Interconnect', + ) + kind: str = proto.Field( + proto.STRING, + number=3292052, + optional=True, + ) + next_page_token: str = proto.Field( + proto.STRING, + number=79797525, + optional=True, + ) + self_link: str = proto.Field( + proto.STRING, + number=456214797, + optional=True, + ) + warning: 'Warning' = proto.Field( + proto.MESSAGE, + number=50704284, + optional=True, + message='Warning', + ) + + +class InterconnectLocation(proto.Message): + r"""Represents an Interconnect Attachment (VLAN) Location + resource. You can use this resource to find location details + about an Interconnect attachment (VLAN). For more information + about interconnect attachments, read Creating VLAN Attachments. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + address (str): + [Output Only] The postal address of the Point of Presence, + each line in the address is separated by a newline + character. + + This field is a member of `oneof`_ ``_address``. + availability_zone (str): + [Output Only] Availability zone for this + InterconnectLocation. Within a metropolitan area (metro), + maintenance will not be simultaneously scheduled in more + than one availability zone. Example: "zone1" or "zone2". + + This field is a member of `oneof`_ ``_availability_zone``. + city (str): + [Output Only] Metropolitan area designator that indicates + which city an interconnect is located. For example: + "Chicago, IL", "Amsterdam, Netherlands". + + This field is a member of `oneof`_ ``_city``. + continent (str): + [Output Only] Continent for this location, which can take + one of the following values: - AFRICA - ASIA_PAC - EUROPE - + NORTH_AMERICA - SOUTH_AMERICA Check the Continent enum for + the list of possible values. + + This field is a member of `oneof`_ ``_continent``. + creation_timestamp (str): + [Output Only] Creation timestamp in RFC3339 text format. + + This field is a member of `oneof`_ ``_creation_timestamp``. + description (str): + [Output Only] An optional description of the resource. + + This field is a member of `oneof`_ ``_description``. + facility_provider (str): + [Output Only] The name of the provider for this facility + (e.g., EQUINIX). + + This field is a member of `oneof`_ ``_facility_provider``. + facility_provider_facility_id (str): + [Output Only] A provider-assigned Identifier for this + facility (e.g., Ashburn-DC1). + + This field is a member of `oneof`_ ``_facility_provider_facility_id``. + id (int): + [Output Only] The unique identifier for the resource. This + identifier is defined by the server. + + This field is a member of `oneof`_ ``_id``. + kind (str): + [Output Only] Type of the resource. Always + compute#interconnectLocation for interconnect locations. + + This field is a member of `oneof`_ ``_kind``. + name (str): + [Output Only] Name of the resource. + + This field is a member of `oneof`_ ``_name``. + peeringdb_facility_id (str): + [Output Only] The peeringdb identifier for this facility + (corresponding with a netfac type in peeringdb). + + This field is a member of `oneof`_ ``_peeringdb_facility_id``. + region_infos (MutableSequence[google.cloud.compute_v1.types.InterconnectLocationRegionInfo]): + [Output Only] A list of InterconnectLocation.RegionInfo + objects, that describe parameters pertaining to the relation + between this InterconnectLocation and various Google Cloud + regions. + self_link (str): + [Output Only] Server-defined URL for the resource. + + This field is a member of `oneof`_ ``_self_link``. + status (str): + [Output Only] The status of this InterconnectLocation, which + can take one of the following values: - CLOSED: The + InterconnectLocation is closed and is unavailable for + provisioning new Interconnects. - AVAILABLE: The + InterconnectLocation is available for provisioning new + Interconnects. Check the Status enum for the list of + possible values. + + This field is a member of `oneof`_ ``_status``. + supports_pzs (bool): + [Output Only] Reserved for future use. + + This field is a member of `oneof`_ ``_supports_pzs``. + """ + class Continent(proto.Enum): + r"""[Output Only] Continent for this location, which can take one of the + following values: - AFRICA - ASIA_PAC - EUROPE - NORTH_AMERICA - + SOUTH_AMERICA + + Values: + UNDEFINED_CONTINENT (0): + A value indicating that the enum field is not + set. + AFRICA (317443706): + No description available. + ASIA_PAC (119782269): + No description available. + C_AFRICA (71993846): + No description available. + C_ASIA_PAC (465668089): + No description available. + C_EUROPE (200369438): + No description available. + C_NORTH_AMERICA (275697048): + No description available. + C_SOUTH_AMERICA (397149792): + No description available. + EUROPE (445819298): + No description available. + NORTH_AMERICA (448015508): + No description available. + SOUTH_AMERICA (32597340): + No description available. + """ + UNDEFINED_CONTINENT = 0 + AFRICA = 317443706 + ASIA_PAC = 119782269 + C_AFRICA = 71993846 + C_ASIA_PAC = 465668089 + C_EUROPE = 200369438 + C_NORTH_AMERICA = 275697048 + C_SOUTH_AMERICA = 397149792 + EUROPE = 445819298 + NORTH_AMERICA = 448015508 + SOUTH_AMERICA = 32597340 + + class Status(proto.Enum): + r"""[Output Only] The status of this InterconnectLocation, which can + take one of the following values: - CLOSED: The InterconnectLocation + is closed and is unavailable for provisioning new Interconnects. - + AVAILABLE: The InterconnectLocation is available for provisioning + new Interconnects. + + Values: + UNDEFINED_STATUS (0): + A value indicating that the enum field is not + set. + AVAILABLE (442079913): + The InterconnectLocation is available for + provisioning new Interconnects. + CLOSED (380163436): + The InterconnectLocation is closed for + provisioning new Interconnects. + """ + UNDEFINED_STATUS = 0 + AVAILABLE = 442079913 + CLOSED = 380163436 + + address: str = proto.Field( + proto.STRING, + number=462920692, + optional=True, + ) + availability_zone: str = proto.Field( + proto.STRING, + number=158459920, + optional=True, + ) + city: str = proto.Field( + proto.STRING, + number=3053931, + optional=True, + ) + continent: str = proto.Field( + proto.STRING, + number=133442996, + optional=True, + ) + creation_timestamp: str = proto.Field( + proto.STRING, + number=30525366, + optional=True, + ) + description: str = proto.Field( + proto.STRING, + number=422937596, + optional=True, + ) + facility_provider: str = proto.Field( + proto.STRING, + number=533303309, + optional=True, + ) + facility_provider_facility_id: str = proto.Field( + proto.STRING, + number=87269125, + optional=True, + ) + id: int = proto.Field( + proto.UINT64, + number=3355, + optional=True, + ) + kind: str = proto.Field( + proto.STRING, + number=3292052, + optional=True, + ) + name: str = proto.Field( + proto.STRING, + number=3373707, + optional=True, + ) + peeringdb_facility_id: str = proto.Field( + proto.STRING, + number=536567094, + optional=True, + ) + region_infos: MutableSequence['InterconnectLocationRegionInfo'] = proto.RepeatedField( + proto.MESSAGE, + number=312194170, + message='InterconnectLocationRegionInfo', + ) + self_link: str = proto.Field( + proto.STRING, + number=456214797, + optional=True, + ) + status: str = proto.Field( + proto.STRING, + number=181260274, + optional=True, + ) + supports_pzs: bool = proto.Field( + proto.BOOL, + number=83983214, + optional=True, + ) + + +class InterconnectLocationList(proto.Message): + r"""Response to the list request, and contains a list of + interconnect locations. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + id (str): + [Output Only] Unique identifier for the resource; defined by + the server. + + This field is a member of `oneof`_ ``_id``. + items (MutableSequence[google.cloud.compute_v1.types.InterconnectLocation]): + A list of InterconnectLocation resources. + kind (str): + [Output Only] Type of resource. Always + compute#interconnectLocationList for lists of interconnect + locations. + + This field is a member of `oneof`_ ``_kind``. + next_page_token (str): + [Output Only] This token allows you to get the next page of + results for list requests. If the number of results is + larger than maxResults, use the nextPageToken as a value for + the query parameter pageToken in the next list request. + Subsequent list requests will have their own nextPageToken + to continue paging through the results. + + This field is a member of `oneof`_ ``_next_page_token``. + self_link (str): + [Output Only] Server-defined URL for this resource. + + This field is a member of `oneof`_ ``_self_link``. + warning (google.cloud.compute_v1.types.Warning): + [Output Only] Informational warning message. + + This field is a member of `oneof`_ ``_warning``. + """ + + @property + def raw_page(self): + return self + + id: str = proto.Field( + proto.STRING, + number=3355, + optional=True, + ) + items: MutableSequence['InterconnectLocation'] = proto.RepeatedField( + proto.MESSAGE, + number=100526016, + message='InterconnectLocation', + ) + kind: str = proto.Field( + proto.STRING, + number=3292052, + optional=True, + ) + next_page_token: str = proto.Field( + proto.STRING, + number=79797525, + optional=True, + ) + self_link: str = proto.Field( + proto.STRING, + number=456214797, + optional=True, + ) + warning: 'Warning' = proto.Field( + proto.MESSAGE, + number=50704284, + optional=True, + message='Warning', + ) + + +class InterconnectLocationRegionInfo(proto.Message): + r"""Information about any potential InterconnectAttachments + between an Interconnect at a specific InterconnectLocation, and + a specific Cloud Region. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + expected_rtt_ms (int): + Expected round-trip time in milliseconds, + from this InterconnectLocation to a VM in this + region. + + This field is a member of `oneof`_ ``_expected_rtt_ms``. + location_presence (str): + Identifies the network presence of this + location. Check the LocationPresence enum for + the list of possible values. + + This field is a member of `oneof`_ ``_location_presence``. + region (str): + URL for the region of this location. + + This field is a member of `oneof`_ ``_region``. + """ + class LocationPresence(proto.Enum): + r"""Identifies the network presence of this location. + + Values: + UNDEFINED_LOCATION_PRESENCE (0): + A value indicating that the enum field is not + set. + GLOBAL (494663587): + This region is not in any common network + presence with this InterconnectLocation. + LOCAL_REGION (403535464): + This region shares the same regional network + presence as this InterconnectLocation. + LP_GLOBAL (429584062): + [Deprecated] This region is not in any common network + presence with this InterconnectLocation. + LP_LOCAL_REGION (488598851): + [Deprecated] This region shares the same regional network + presence as this InterconnectLocation. + """ + UNDEFINED_LOCATION_PRESENCE = 0 + GLOBAL = 494663587 + LOCAL_REGION = 403535464 + LP_GLOBAL = 429584062 + LP_LOCAL_REGION = 488598851 + + expected_rtt_ms: int = proto.Field( + proto.INT64, + number=422543866, + optional=True, + ) + location_presence: str = proto.Field( + proto.STRING, + number=101517893, + optional=True, + ) + region: str = proto.Field( + proto.STRING, + number=138946292, + optional=True, + ) + + +class InterconnectOutageNotification(proto.Message): + r"""Description of a planned outage on this Interconnect. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + affected_circuits (MutableSequence[str]): + If issue_type is IT_PARTIAL_OUTAGE, a list of the + Google-side circuit IDs that will be affected. + description (str): + A description about the purpose of the + outage. + + This field is a member of `oneof`_ ``_description``. + end_time (int): + Scheduled end time for the outage + (milliseconds since Unix epoch). + + This field is a member of `oneof`_ ``_end_time``. + issue_type (str): + Form this outage is expected to take, which can take one of + the following values: - OUTAGE: The Interconnect may be + completely out of service for some or all of the specified + window. - PARTIAL_OUTAGE: Some circuits comprising the + Interconnect as a whole should remain up, but with reduced + bandwidth. Note that the versions of this enum prefixed with + "IT_" have been deprecated in favor of the unprefixed + values. Check the IssueType enum for the list of possible + values. + + This field is a member of `oneof`_ ``_issue_type``. + name (str): + Unique identifier for this outage + notification. + + This field is a member of `oneof`_ ``_name``. + source (str): + The party that generated this notification, which can take + the following value: - GOOGLE: this notification as + generated by Google. Note that the value of NSRC_GOOGLE has + been deprecated in favor of GOOGLE. Check the Source enum + for the list of possible values. + + This field is a member of `oneof`_ ``_source``. + start_time (int): + Scheduled start time for the outage + (milliseconds since Unix epoch). + + This field is a member of `oneof`_ ``_start_time``. + state (str): + State of this notification, which can take one of the + following values: - ACTIVE: This outage notification is + active. The event could be in the past, present, or future. + See start_time and end_time for scheduling. - CANCELLED: The + outage associated with this notification was cancelled + before the outage was due to start. - COMPLETED: The outage + associated with this notification is complete. Note that the + versions of this enum prefixed with "NS_" have been + deprecated in favor of the unprefixed values. Check the + State enum for the list of possible values. + + This field is a member of `oneof`_ ``_state``. + """ + class IssueType(proto.Enum): + r"""Form this outage is expected to take, which can take one of the + following values: - OUTAGE: The Interconnect may be completely out + of service for some or all of the specified window. - + PARTIAL_OUTAGE: Some circuits comprising the Interconnect as a whole + should remain up, but with reduced bandwidth. Note that the versions + of this enum prefixed with "IT_" have been deprecated in favor of + the unprefixed values. + + Values: + UNDEFINED_ISSUE_TYPE (0): + A value indicating that the enum field is not + set. + IT_OUTAGE (175779973): + [Deprecated] The Interconnect may be completely out of + service for some or all of the specified window. + IT_PARTIAL_OUTAGE (92103971): + [Deprecated] Some circuits comprising the Interconnect will + be out of service during the expected window. The + interconnect as a whole should remain up, albeit with + reduced bandwidth. + OUTAGE (195285745): + The Interconnect may be completely out of + service for some or all of the specified window. + PARTIAL_OUTAGE (147053455): + Some circuits comprising the Interconnect + will be out of service during the expected + window. The interconnect as a whole should + remain up, albeit with reduced bandwidth. + """ + UNDEFINED_ISSUE_TYPE = 0 + IT_OUTAGE = 175779973 + IT_PARTIAL_OUTAGE = 92103971 + OUTAGE = 195285745 + PARTIAL_OUTAGE = 147053455 + + class Source(proto.Enum): + r"""The party that generated this notification, which can take the + following value: - GOOGLE: this notification as generated by Google. + Note that the value of NSRC_GOOGLE has been deprecated in favor of + GOOGLE. + + Values: + UNDEFINED_SOURCE (0): + A value indicating that the enum field is not + set. + GOOGLE (497439289): + This notification was generated by Google. + NSRC_GOOGLE (510574562): + [Deprecated] This notification was generated by Google. + """ + UNDEFINED_SOURCE = 0 + GOOGLE = 497439289 + NSRC_GOOGLE = 510574562 + + class State(proto.Enum): + r"""State of this notification, which can take one of the following + values: - ACTIVE: This outage notification is active. The event + could be in the past, present, or future. See start_time and + end_time for scheduling. - CANCELLED: The outage associated with + this notification was cancelled before the outage was due to start. + - COMPLETED: The outage associated with this notification is + complete. Note that the versions of this enum prefixed with "NS_" + have been deprecated in favor of the unprefixed values. + + Values: + UNDEFINED_STATE (0): + A value indicating that the enum field is not + set. + ACTIVE (314733318): + This outage notification is active. The event could be in + the future, present, or past. See start_time and end_time + for scheduling. + CANCELLED (41957681): + The outage associated with this notification + was cancelled before the outage was due to + start. + COMPLETED (309921323): + The outage associated with this notification + is complete. + NS_ACTIVE (252563136): + [Deprecated] This outage notification is active. The event + could be in the future, present, or past. See start_time and + end_time for scheduling. + NS_CANCELED (506579411): + [Deprecated] The outage associated with this notification + was canceled before the outage was due to start. + """ + UNDEFINED_STATE = 0 + ACTIVE = 314733318 + CANCELLED = 41957681 + COMPLETED = 309921323 + NS_ACTIVE = 252563136 + NS_CANCELED = 506579411 + + affected_circuits: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=177717013, + ) + description: str = proto.Field( + proto.STRING, + number=422937596, + optional=True, + ) + end_time: int = proto.Field( + proto.INT64, + number=114938801, + optional=True, + ) + issue_type: str = proto.Field( + proto.STRING, + number=369639136, + optional=True, + ) + name: str = proto.Field( + proto.STRING, + number=3373707, + optional=True, + ) + source: str = proto.Field( + proto.STRING, + number=177235995, + optional=True, + ) + start_time: int = proto.Field( + proto.INT64, + number=37467274, + optional=True, + ) + state: str = proto.Field( + proto.STRING, + number=109757585, + optional=True, + ) + + +class InterconnectRemoteLocation(proto.Message): + r"""Represents a Cross-Cloud Interconnect Remote Location + resource. You can use this resource to find remote location + details about an Interconnect attachment (VLAN). + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + address (str): + [Output Only] The postal address of the Point of Presence, + each line in the address is separated by a newline + character. + + This field is a member of `oneof`_ ``_address``. + attachment_configuration_constraints (google.cloud.compute_v1.types.InterconnectAttachmentConfigurationConstraints): + [Output Only] Subset of fields from InterconnectAttachment's + \|configurationConstraints\| field that apply to all + attachments for this remote location. + + This field is a member of `oneof`_ ``_attachment_configuration_constraints``. + city (str): + [Output Only] Metropolitan area designator that indicates + which city an interconnect is located. For example: + "Chicago, IL", "Amsterdam, Netherlands". + + This field is a member of `oneof`_ ``_city``. + constraints (google.cloud.compute_v1.types.InterconnectRemoteLocationConstraints): + [Output Only] Constraints on the parameters for creating + Cross-Cloud Interconnect and associated + InterconnectAttachments. + + This field is a member of `oneof`_ ``_constraints``. + continent (str): + [Output Only] Continent for this location, which can take + one of the following values: - AFRICA - ASIA_PAC - EUROPE - + NORTH_AMERICA - SOUTH_AMERICA Check the Continent enum for + the list of possible values. + + This field is a member of `oneof`_ ``_continent``. + creation_timestamp (str): + [Output Only] Creation timestamp in RFC3339 text format. + + This field is a member of `oneof`_ ``_creation_timestamp``. + description (str): + [Output Only] An optional description of the resource. + + This field is a member of `oneof`_ ``_description``. + facility_provider (str): + [Output Only] The name of the provider for this facility + (e.g., EQUINIX). + + This field is a member of `oneof`_ ``_facility_provider``. + facility_provider_facility_id (str): + [Output Only] A provider-assigned Identifier for this + facility (e.g., Ashburn-DC1). + + This field is a member of `oneof`_ ``_facility_provider_facility_id``. + id (int): + [Output Only] The unique identifier for the resource. This + identifier is defined by the server. + + This field is a member of `oneof`_ ``_id``. + kind (str): + [Output Only] Type of the resource. Always + compute#interconnectRemoteLocation for interconnect remote + locations. + + This field is a member of `oneof`_ ``_kind``. + lacp (str): + [Output Only] Link Aggregation Control Protocol (LACP) + constraints, which can take one of the following values: + LACP_SUPPORTED, LACP_UNSUPPORTED Check the Lacp enum for the + list of possible values. + + This field is a member of `oneof`_ ``_lacp``. + max_lag_size100_gbps (int): + [Output Only] The maximum number of 100 Gbps ports supported + in a link aggregation group (LAG). When linkType is 100 + Gbps, requestedLinkCount cannot exceed + max_lag_size_100_gbps. + + This field is a member of `oneof`_ ``_max_lag_size100_gbps``. + max_lag_size10_gbps (int): + [Output Only] The maximum number of 10 Gbps ports supported + in a link aggregation group (LAG). When linkType is 10 Gbps, + requestedLinkCount cannot exceed max_lag_size_10_gbps. + + This field is a member of `oneof`_ ``_max_lag_size10_gbps``. + name (str): + [Output Only] Name of the resource. + + This field is a member of `oneof`_ ``_name``. + peeringdb_facility_id (str): + [Output Only] The peeringdb identifier for this facility + (corresponding with a netfac type in peeringdb). + + This field is a member of `oneof`_ ``_peeringdb_facility_id``. + permitted_connections (MutableSequence[google.cloud.compute_v1.types.InterconnectRemoteLocationPermittedConnections]): + [Output Only] Permitted connections. + remote_service (str): + [Output Only] Indicates the service provider present at the + remote location. Example values: "Amazon Web Services", + "Microsoft Azure". + + This field is a member of `oneof`_ ``_remote_service``. + self_link (str): + [Output Only] Server-defined URL for the resource. + + This field is a member of `oneof`_ ``_self_link``. + status (str): + [Output Only] The status of this InterconnectRemoteLocation, + which can take one of the following values: - CLOSED: The + InterconnectRemoteLocation is closed and is unavailable for + provisioning new Cross-Cloud Interconnects. - AVAILABLE: The + InterconnectRemoteLocation is available for provisioning new + Cross-Cloud Interconnects. Check the Status enum for the + list of possible values. + + This field is a member of `oneof`_ ``_status``. + """ + class Continent(proto.Enum): + r"""[Output Only] Continent for this location, which can take one of the + following values: - AFRICA - ASIA_PAC - EUROPE - NORTH_AMERICA - + SOUTH_AMERICA + + Values: + UNDEFINED_CONTINENT (0): + A value indicating that the enum field is not + set. + AFRICA (317443706): + No description available. + ASIA_PAC (119782269): + No description available. + EUROPE (445819298): + No description available. + NORTH_AMERICA (448015508): + No description available. + SOUTH_AMERICA (32597340): + No description available. + """ + UNDEFINED_CONTINENT = 0 + AFRICA = 317443706 + ASIA_PAC = 119782269 + EUROPE = 445819298 + NORTH_AMERICA = 448015508 + SOUTH_AMERICA = 32597340 + + class Lacp(proto.Enum): + r"""[Output Only] Link Aggregation Control Protocol (LACP) constraints, + which can take one of the following values: LACP_SUPPORTED, + LACP_UNSUPPORTED + + Values: + UNDEFINED_LACP (0): + A value indicating that the enum field is not + set. + LACP_SUPPORTED (339576113): + LACP_SUPPORTED: LACP is supported, and enabled by default on + the Cross-Cloud Interconnect. + LACP_UNSUPPORTED (203930104): + LACP_UNSUPPORTED: LACP is not supported and is not be + enabled on this port. GetDiagnostics shows + bundleAggregationType as "static". GCP does not support LAGs + without LACP, so requestedLinkCount must be 1. + """ + UNDEFINED_LACP = 0 + LACP_SUPPORTED = 339576113 + LACP_UNSUPPORTED = 203930104 + + class Status(proto.Enum): + r"""[Output Only] The status of this InterconnectRemoteLocation, which + can take one of the following values: - CLOSED: The + InterconnectRemoteLocation is closed and is unavailable for + provisioning new Cross-Cloud Interconnects. - AVAILABLE: The + InterconnectRemoteLocation is available for provisioning new + Cross-Cloud Interconnects. + + Values: + UNDEFINED_STATUS (0): + A value indicating that the enum field is not + set. + AVAILABLE (442079913): + The InterconnectRemoteLocation is available + for provisioning new Cross-Cloud Interconnects. + CLOSED (380163436): + The InterconnectRemoteLocation is closed for + provisioning new Cross-Cloud Interconnects. + """ + UNDEFINED_STATUS = 0 + AVAILABLE = 442079913 + CLOSED = 380163436 + + address: str = proto.Field( + proto.STRING, + number=462920692, + optional=True, + ) + attachment_configuration_constraints: 'InterconnectAttachmentConfigurationConstraints' = proto.Field( + proto.MESSAGE, + number=326825041, + optional=True, + message='InterconnectAttachmentConfigurationConstraints', + ) + city: str = proto.Field( + proto.STRING, + number=3053931, + optional=True, + ) + constraints: 'InterconnectRemoteLocationConstraints' = proto.Field( + proto.MESSAGE, + number=3909174, + optional=True, + message='InterconnectRemoteLocationConstraints', + ) + continent: str = proto.Field( + proto.STRING, + number=133442996, + optional=True, + ) + creation_timestamp: str = proto.Field( + proto.STRING, + number=30525366, + optional=True, + ) + description: str = proto.Field( + proto.STRING, + number=422937596, + optional=True, + ) + facility_provider: str = proto.Field( + proto.STRING, + number=533303309, + optional=True, + ) + facility_provider_facility_id: str = proto.Field( + proto.STRING, + number=87269125, + optional=True, + ) + id: int = proto.Field( + proto.UINT64, + number=3355, + optional=True, + ) + kind: str = proto.Field( + proto.STRING, + number=3292052, + optional=True, + ) + lacp: str = proto.Field( + proto.STRING, + number=3313826, + optional=True, + ) + max_lag_size100_gbps: int = proto.Field( + proto.INT32, + number=245219253, + optional=True, + ) + max_lag_size10_gbps: int = proto.Field( + proto.INT32, + number=294007573, + optional=True, + ) + name: str = proto.Field( + proto.STRING, + number=3373707, + optional=True, + ) + peeringdb_facility_id: str = proto.Field( + proto.STRING, + number=536567094, + optional=True, + ) + permitted_connections: MutableSequence['InterconnectRemoteLocationPermittedConnections'] = proto.RepeatedField( + proto.MESSAGE, + number=442063278, + message='InterconnectRemoteLocationPermittedConnections', + ) + remote_service: str = proto.Field( + proto.STRING, + number=391954364, + optional=True, + ) + self_link: str = proto.Field( + proto.STRING, + number=456214797, + optional=True, + ) + status: str = proto.Field( + proto.STRING, + number=181260274, + optional=True, + ) + + +class InterconnectRemoteLocationConstraints(proto.Message): + r""" + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + port_pair_remote_location (str): + [Output Only] Port pair remote location constraints, which + can take one of the following values: + PORT_PAIR_UNCONSTRAINED_REMOTE_LOCATION, + PORT_PAIR_MATCHING_REMOTE_LOCATION. GCP's API refers only to + individual ports, but the UI uses this field when ordering a + pair of ports, to prevent users from accidentally ordering + something that is incompatible with their cloud provider. + Specifically, when ordering a redundant pair of Cross-Cloud + Interconnect ports, and one of them uses a remote location + with portPairMatchingRemoteLocation set to matching, the UI + requires that both ports use the same remote location. Check + the PortPairRemoteLocation enum for the list of possible + values. + + This field is a member of `oneof`_ ``_port_pair_remote_location``. + port_pair_vlan (str): + [Output Only] Port pair VLAN constraints, which can take one + of the following values: PORT_PAIR_UNCONSTRAINED_VLAN, + PORT_PAIR_MATCHING_VLAN Check the PortPairVlan enum for the + list of possible values. + + This field is a member of `oneof`_ ``_port_pair_vlan``. + subnet_length_range (google.cloud.compute_v1.types.InterconnectRemoteLocationConstraintsSubnetLengthRange): + [Output Only] [min-length, max-length] The minimum and + maximum value (inclusive) for the IPv4 subnet length. For + example, an interconnectRemoteLocation for Azure has {min: + 30, max: 30} because Azure requires /30 subnets. This range + specifies the values supported by both cloud providers. + Interconnect currently supports /29 and /30 IPv4 subnet + lengths. If a remote cloud has no constraint on IPv4 subnet + length, the range would thus be {min: 29, max: 30}. + + This field is a member of `oneof`_ ``_subnet_length_range``. + """ + class PortPairRemoteLocation(proto.Enum): + r"""[Output Only] Port pair remote location constraints, which can take + one of the following values: + PORT_PAIR_UNCONSTRAINED_REMOTE_LOCATION, + PORT_PAIR_MATCHING_REMOTE_LOCATION. GCP's API refers only to + individual ports, but the UI uses this field when ordering a pair of + ports, to prevent users from accidentally ordering something that is + incompatible with their cloud provider. Specifically, when ordering + a redundant pair of Cross-Cloud Interconnect ports, and one of them + uses a remote location with portPairMatchingRemoteLocation set to + matching, the UI requires that both ports use the same remote + location. + + Values: + UNDEFINED_PORT_PAIR_REMOTE_LOCATION (0): + A value indicating that the enum field is not + set. + PORT_PAIR_MATCHING_REMOTE_LOCATION (207291859): + If PORT_PAIR_MATCHING_REMOTE_LOCATION, the remote cloud + provider allocates ports in pairs, and the user should + choose the same remote location for both ports. + PORT_PAIR_UNCONSTRAINED_REMOTE_LOCATION (60609829): + If PORT_PAIR_UNCONSTRAINED_REMOTE_LOCATION, a user may opt + to provision a redundant pair of Cross-Cloud Interconnects + using two different remote locations in the same city. + """ + UNDEFINED_PORT_PAIR_REMOTE_LOCATION = 0 + PORT_PAIR_MATCHING_REMOTE_LOCATION = 207291859 + PORT_PAIR_UNCONSTRAINED_REMOTE_LOCATION = 60609829 + + class PortPairVlan(proto.Enum): + r"""[Output Only] Port pair VLAN constraints, which can take one of the + following values: PORT_PAIR_UNCONSTRAINED_VLAN, + PORT_PAIR_MATCHING_VLAN + + Values: + UNDEFINED_PORT_PAIR_VLAN (0): + A value indicating that the enum field is not + set. + PORT_PAIR_MATCHING_VLAN (250295358): + If PORT_PAIR_MATCHING_VLAN, the Interconnect for this + attachment is part of a pair of ports that should have + matching VLAN allocations. This occurs with Cross-Cloud + Interconnect to Azure remote locations. While GCP's API does + not explicitly group pairs of ports, the UI uses this field + to ensure matching VLAN ids when configuring a redundant + VLAN pair. + PORT_PAIR_UNCONSTRAINED_VLAN (175227948): + PORT_PAIR_UNCONSTRAINED_VLAN means there is no constraint. + """ + UNDEFINED_PORT_PAIR_VLAN = 0 + PORT_PAIR_MATCHING_VLAN = 250295358 + PORT_PAIR_UNCONSTRAINED_VLAN = 175227948 + + port_pair_remote_location: str = proto.Field( + proto.STRING, + number=495917351, + optional=True, + ) + port_pair_vlan: str = proto.Field( + proto.STRING, + number=478214506, + optional=True, + ) + subnet_length_range: 'InterconnectRemoteLocationConstraintsSubnetLengthRange' = proto.Field( + proto.MESSAGE, + number=184473670, + optional=True, + message='InterconnectRemoteLocationConstraintsSubnetLengthRange', + ) + + +class InterconnectRemoteLocationConstraintsSubnetLengthRange(proto.Message): + r""" + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + max_ (int): + + This field is a member of `oneof`_ ``_max``. + min_ (int): + + This field is a member of `oneof`_ ``_min``. + """ + + max_: int = proto.Field( + proto.INT32, + number=107876, + optional=True, + ) + min_: int = proto.Field( + proto.INT32, + number=108114, + optional=True, + ) + + +class InterconnectRemoteLocationList(proto.Message): + r"""Response to the list request, and contains a list of + interconnect remote locations. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + id (str): + [Output Only] Unique identifier for the resource; defined by + the server. + + This field is a member of `oneof`_ ``_id``. + items (MutableSequence[google.cloud.compute_v1.types.InterconnectRemoteLocation]): + A list of InterconnectRemoteLocation + resources. + kind (str): + [Output Only] Type of resource. Always + compute#interconnectRemoteLocationList for lists of + interconnect remote locations. + + This field is a member of `oneof`_ ``_kind``. + next_page_token (str): + [Output Only] This token lets you get the next page of + results for list requests. If the number of results is + larger than maxResults, use the nextPageToken as a value for + the query parameter pageToken in the next list request. + Subsequent list requests will have their own nextPageToken + to continue paging through the results. + + This field is a member of `oneof`_ ``_next_page_token``. + self_link (str): + [Output Only] Server-defined URL for this resource. + + This field is a member of `oneof`_ ``_self_link``. + warning (google.cloud.compute_v1.types.Warning): + [Output Only] Informational warning message. + + This field is a member of `oneof`_ ``_warning``. + """ + + @property + def raw_page(self): + return self + + id: str = proto.Field( + proto.STRING, + number=3355, + optional=True, + ) + items: MutableSequence['InterconnectRemoteLocation'] = proto.RepeatedField( + proto.MESSAGE, + number=100526016, + message='InterconnectRemoteLocation', + ) + kind: str = proto.Field( + proto.STRING, + number=3292052, + optional=True, + ) + next_page_token: str = proto.Field( + proto.STRING, + number=79797525, + optional=True, + ) + self_link: str = proto.Field( + proto.STRING, + number=456214797, + optional=True, + ) + warning: 'Warning' = proto.Field( + proto.MESSAGE, + number=50704284, + optional=True, + message='Warning', + ) + + +class InterconnectRemoteLocationPermittedConnections(proto.Message): + r""" + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + interconnect_location (str): + [Output Only] URL of an Interconnect location that is + permitted to connect to this Interconnect remote location. + + This field is a member of `oneof`_ ``_interconnect_location``. + """ + + interconnect_location: str = proto.Field( + proto.STRING, + number=492235846, + optional=True, + ) + + +class InterconnectsGetDiagnosticsResponse(proto.Message): + r"""Response for the InterconnectsGetDiagnosticsRequest. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + result (google.cloud.compute_v1.types.InterconnectDiagnostics): + + This field is a member of `oneof`_ ``_result``. + """ + + result: 'InterconnectDiagnostics' = proto.Field( + proto.MESSAGE, + number=139315229, + optional=True, + message='InterconnectDiagnostics', + ) + + +class InvalidateCacheUrlMapRequest(proto.Message): + r"""A request message for UrlMaps.InvalidateCache. See the method + description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + cache_invalidation_rule_resource (google.cloud.compute_v1.types.CacheInvalidationRule): + The body resource for this request + project (str): + Project ID for this request. + request_id (str): + An optional request ID to identify requests. + Specify a unique request ID so that if you must + retry your request, the server will know to + ignore the request if it has already been + completed. For example, consider a situation + where you make an initial request and the + request times out. If you make the request again + with the same request ID, the server can check + if original operation with the same request ID + was received, and if so, will ignore the second + request. This prevents clients from accidentally + creating duplicate commitments. The request ID + must be a valid UUID with the exception that + zero UUID is not supported ( + 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. + url_map (str): + Name of the UrlMap scoping this request. + """ + + cache_invalidation_rule_resource: 'CacheInvalidationRule' = proto.Field( + proto.MESSAGE, + number=312795565, + message='CacheInvalidationRule', + ) + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + request_id: str = proto.Field( + proto.STRING, + number=37109963, + optional=True, + ) + url_map: str = proto.Field( + proto.STRING, + number=367020684, + ) + + +class Items(proto.Message): + r"""Metadata + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + key (str): + Key for the metadata entry. Keys must conform to the + following regexp: [a-zA-Z0-9-_]+, and be less than 128 bytes + in length. This is reflected as part of a URL in the + metadata server. Additionally, to avoid ambiguity, keys must + not conflict with any other metadata keys for the project. + + This field is a member of `oneof`_ ``_key``. + value (str): + Value for the metadata entry. These are + free-form strings, and only have meaning as + interpreted by the image running in the + instance. The only restriction placed on values + is that their size must be less than or equal to + 262144 bytes (256 KiB). + + This field is a member of `oneof`_ ``_value``. + """ + + key: str = proto.Field( + proto.STRING, + number=106079, + optional=True, + ) + value: str = proto.Field( + proto.STRING, + number=111972721, + optional=True, + ) + + +class License(proto.Message): + r"""Represents a License resource. A License represents billing and + aggregate usage data for public and marketplace images. *Caution* + This resource is intended for use only by third-party partners who + are creating Cloud Marketplace images. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + charges_use_fee (bool): + [Output Only] Deprecated. This field no longer reflects + whether a license charges a usage fee. + + This field is a member of `oneof`_ ``_charges_use_fee``. + creation_timestamp (str): + [Output Only] Creation timestamp in RFC3339 text format. + + This field is a member of `oneof`_ ``_creation_timestamp``. + description (str): + An optional textual description of the + resource; provided by the client when the + resource is created. + + This field is a member of `oneof`_ ``_description``. + id (int): + [Output Only] The unique identifier for the resource. This + identifier is defined by the server. + + This field is a member of `oneof`_ ``_id``. + kind (str): + [Output Only] Type of resource. Always compute#license for + licenses. + + This field is a member of `oneof`_ ``_kind``. + license_code (int): + [Output Only] The unique code used to attach this license to + images, snapshots, and disks. + + This field is a member of `oneof`_ ``_license_code``. + name (str): + Name of the resource. The name must be 1-63 + characters long and comply with RFC1035. + + This field is a member of `oneof`_ ``_name``. + resource_requirements (google.cloud.compute_v1.types.LicenseResourceRequirements): + + This field is a member of `oneof`_ ``_resource_requirements``. + self_link (str): + [Output Only] Server-defined URL for the resource. + + This field is a member of `oneof`_ ``_self_link``. + transferable (bool): + If false, licenses will not be copied from + the source resource when creating an image from + a disk, disk from snapshot, or snapshot from + disk. + + This field is a member of `oneof`_ ``_transferable``. + """ + + charges_use_fee: bool = proto.Field( + proto.BOOL, + number=372412622, + optional=True, + ) + creation_timestamp: str = proto.Field( + proto.STRING, + number=30525366, + optional=True, + ) + description: str = proto.Field( + proto.STRING, + number=422937596, + optional=True, + ) + id: int = proto.Field( + proto.UINT64, + number=3355, + optional=True, + ) + kind: str = proto.Field( + proto.STRING, + number=3292052, + optional=True, + ) + license_code: int = proto.Field( + proto.UINT64, + number=1467179, + optional=True, + ) + name: str = proto.Field( + proto.STRING, + number=3373707, + optional=True, + ) + resource_requirements: 'LicenseResourceRequirements' = proto.Field( + proto.MESSAGE, + number=214292769, + optional=True, + message='LicenseResourceRequirements', + ) + self_link: str = proto.Field( + proto.STRING, + number=456214797, + optional=True, + ) + transferable: bool = proto.Field( + proto.BOOL, + number=4349893, + optional=True, + ) + + +class LicenseCode(proto.Message): + r"""Represents a License Code resource. A License Code is a unique + identifier used to represent a license resource. *Caution* This + resource is intended for use only by third-party partners who are + creating Cloud Marketplace images. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + creation_timestamp (str): + [Output Only] Creation timestamp in RFC3339 text format. + + This field is a member of `oneof`_ ``_creation_timestamp``. + description (str): + [Output Only] Description of this License Code. + + This field is a member of `oneof`_ ``_description``. + id (int): + [Output Only] The unique identifier for the resource. This + identifier is defined by the server. + + This field is a member of `oneof`_ ``_id``. + kind (str): + [Output Only] Type of resource. Always compute#licenseCode + for licenses. + + This field is a member of `oneof`_ ``_kind``. + license_alias (MutableSequence[google.cloud.compute_v1.types.LicenseCodeLicenseAlias]): + [Output Only] URL and description aliases of Licenses with + the same License Code. + name (str): + [Output Only] Name of the resource. The name is 1-20 + characters long and must be a valid 64 bit integer. + + This field is a member of `oneof`_ ``_name``. + self_link (str): + [Output Only] Server-defined URL for the resource. + + This field is a member of `oneof`_ ``_self_link``. + state (str): + [Output Only] Current state of this License Code. Check the + State enum for the list of possible values. + + This field is a member of `oneof`_ ``_state``. + transferable (bool): + [Output Only] If true, the license will remain attached when + creating images or snapshots from disks. Otherwise, the + license is not transferred. + + This field is a member of `oneof`_ ``_transferable``. + """ + class State(proto.Enum): + r"""[Output Only] Current state of this License Code. + + Values: + UNDEFINED_STATE (0): + A value indicating that the enum field is not + set. + DISABLED (516696700): + Machines are not allowed to attach boot disks + with this License Code. Requests to create new + resources with this license will be rejected. + ENABLED (182130465): + Use is allowed for anyone with USE_READ_ONLY access to this + License Code. + RESTRICTED (261551195): + Use of this license is limited to a project + whitelist. + STATE_UNSPECIFIED (470755401): + No description available. + TERMINATED (250018339): + Reserved state. + """ + UNDEFINED_STATE = 0 + DISABLED = 516696700 + ENABLED = 182130465 + RESTRICTED = 261551195 + STATE_UNSPECIFIED = 470755401 + TERMINATED = 250018339 + + creation_timestamp: str = proto.Field( + proto.STRING, + number=30525366, + optional=True, + ) + description: str = proto.Field( + proto.STRING, + number=422937596, + optional=True, + ) + id: int = proto.Field( + proto.UINT64, + number=3355, + optional=True, + ) + kind: str = proto.Field( + proto.STRING, + number=3292052, + optional=True, + ) + license_alias: MutableSequence['LicenseCodeLicenseAlias'] = proto.RepeatedField( + proto.MESSAGE, + number=43550930, + message='LicenseCodeLicenseAlias', + ) + name: str = proto.Field( + proto.STRING, + number=3373707, + optional=True, + ) + self_link: str = proto.Field( + proto.STRING, + number=456214797, + optional=True, + ) + state: str = proto.Field( + proto.STRING, + number=109757585, + optional=True, + ) + transferable: bool = proto.Field( + proto.BOOL, + number=4349893, + optional=True, + ) + + +class LicenseCodeLicenseAlias(proto.Message): + r""" + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + description (str): + [Output Only] Description of this License Code. + + This field is a member of `oneof`_ ``_description``. + self_link (str): + [Output Only] URL of license corresponding to this License + Code. + + This field is a member of `oneof`_ ``_self_link``. + """ + + description: str = proto.Field( + proto.STRING, + number=422937596, + optional=True, + ) + self_link: str = proto.Field( + proto.STRING, + number=456214797, + optional=True, + ) + + +class LicenseResourceCommitment(proto.Message): + r"""Commitment for a particular license resource. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + amount (int): + The number of licenses purchased. + + This field is a member of `oneof`_ ``_amount``. + cores_per_license (str): + Specifies the core range of the instance for + which this license applies. + + This field is a member of `oneof`_ ``_cores_per_license``. + license_ (str): + Any applicable license URI. + + This field is a member of `oneof`_ ``_license``. + """ + + amount: int = proto.Field( + proto.INT64, + number=196759640, + optional=True, + ) + cores_per_license: str = proto.Field( + proto.STRING, + number=32482324, + optional=True, + ) + license_: str = proto.Field( + proto.STRING, + number=166757441, + optional=True, + ) + + +class LicenseResourceRequirements(proto.Message): + r""" + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + min_guest_cpu_count (int): + Minimum number of guest cpus required to use + the Instance. Enforced at Instance creation and + Instance start. + + This field is a member of `oneof`_ ``_min_guest_cpu_count``. + min_memory_mb (int): + Minimum memory required to use the Instance. + Enforced at Instance creation and Instance + start. + + This field is a member of `oneof`_ ``_min_memory_mb``. + """ + + min_guest_cpu_count: int = proto.Field( + proto.INT32, + number=477964836, + optional=True, + ) + min_memory_mb: int = proto.Field( + proto.INT32, + number=504785894, + optional=True, + ) + + +class LicensesListResponse(proto.Message): + r""" + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + id (str): + [Output Only] Unique identifier for the resource; defined by + the server. + + This field is a member of `oneof`_ ``_id``. + items (MutableSequence[google.cloud.compute_v1.types.License]): + A list of License resources. + next_page_token (str): + [Output Only] This token allows you to get the next page of + results for list requests. If the number of results is + larger than maxResults, use the nextPageToken as a value for + the query parameter pageToken in the next list request. + Subsequent list requests will have their own nextPageToken + to continue paging through the results. + + This field is a member of `oneof`_ ``_next_page_token``. + self_link (str): + [Output Only] Server-defined URL for this resource. + + This field is a member of `oneof`_ ``_self_link``. + warning (google.cloud.compute_v1.types.Warning): + [Output Only] Informational warning message. + + This field is a member of `oneof`_ ``_warning``. + """ + + @property + def raw_page(self): + return self + + id: str = proto.Field( + proto.STRING, + number=3355, + optional=True, + ) + items: MutableSequence['License'] = proto.RepeatedField( + proto.MESSAGE, + number=100526016, + message='License', + ) + next_page_token: str = proto.Field( + proto.STRING, + number=79797525, + optional=True, + ) + self_link: str = proto.Field( + proto.STRING, + number=456214797, + optional=True, + ) + warning: 'Warning' = proto.Field( + proto.MESSAGE, + number=50704284, + optional=True, + message='Warning', + ) + + +class ListAcceleratorTypesRequest(proto.Message): + r"""A request message for AcceleratorTypes.List. See the method + description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + filter (str): + A filter expression that filters resources listed in the + response. Most Compute resources support two types of filter + expressions: expressions that support regular expressions + and expressions that follow API improvement proposal + AIP-160. If you want to use AIP-160, your expression must + specify the field name, an operator, and the value that you + want to use for filtering. The value must be a string, a + number, or a boolean. The operator must be either ``=``, + ``!=``, ``>``, ``<``, ``<=``, ``>=`` or ``:``. For example, + if you are filtering Compute Engine instances, you can + exclude instances named ``example-instance`` by specifying + ``name != example-instance``. The ``:`` operator can be used + with string fields to match substrings. For non-string + fields it is equivalent to the ``=`` operator. The ``:*`` + comparison can be used to test whether a key has been + defined. For example, to find all objects with ``owner`` + label use: ``labels.owner:*`` You can also filter nested + fields. For example, you could specify + ``scheduling.automaticRestart = false`` to include instances + only if they are not scheduled for automatic restarts. You + can use filtering on nested fields to filter based on + resource labels. To filter on multiple expressions, provide + each separate expression within parentheses. For example: + ``(scheduling.automaticRestart = true) (cpuPlatform = "Intel Skylake")`` + By default, each expression is an ``AND`` expression. + However, you can include ``AND`` and ``OR`` expressions + explicitly. For example: + ``(cpuPlatform = "Intel Skylake") OR (cpuPlatform = "Intel Broadwell") AND (scheduling.automaticRestart = true)`` + If you want to use a regular expression, use the ``eq`` + (equal) or ``ne`` (not equal) operator against a single + un-parenthesized expression with or without quotes or + against multiple parenthesized expressions. Examples: + ``fieldname eq unquoted literal`` + ``fieldname eq 'single quoted literal'`` + ``fieldname eq "double quoted literal"`` + ``(fieldname1 eq literal) (fieldname2 ne "literal")`` The + literal value is interpreted as a regular expression using + Google RE2 library syntax. The literal value must match the + entire field. For example, to filter for instances that do + not end with name "instance", you would use + ``name ne .*instance``. + + This field is a member of `oneof`_ ``_filter``. + max_results (int): + The maximum number of results per page that should be + returned. If the number of available results is larger than + ``maxResults``, Compute Engine returns a ``nextPageToken`` + that can be used to get the next page of results in + subsequent list requests. Acceptable values are ``0`` to + ``500``, inclusive. (Default: ``500``) + + This field is a member of `oneof`_ ``_max_results``. + order_by (str): + Sorts list results by a certain order. By default, results + are returned in alphanumerical order based on the resource + name. You can also sort results in descending order based on + the creation timestamp using + ``orderBy="creationTimestamp desc"``. This sorts results + based on the ``creationTimestamp`` field in reverse + chronological order (newest result first). Use this to sort + resources like operations so that the newest operation is + returned first. Currently, only sorting by ``name`` or + ``creationTimestamp desc`` is supported. + + This field is a member of `oneof`_ ``_order_by``. + page_token (str): + Specifies a page token to use. Set ``pageToken`` to the + ``nextPageToken`` returned by a previous list request to get + the next page of results. + + This field is a member of `oneof`_ ``_page_token``. + project (str): + Project ID for this request. + return_partial_success (bool): + Opt-in for partial success behavior which + provides partial results in case of failure. The + default value is false. + + This field is a member of `oneof`_ ``_return_partial_success``. + zone (str): + The name of the zone for this request. + """ + + filter: str = proto.Field( + proto.STRING, + number=336120696, + optional=True, + ) + max_results: int = proto.Field( + proto.UINT32, + number=54715419, + optional=True, + ) + order_by: str = proto.Field( + proto.STRING, + number=160562920, + optional=True, + ) + page_token: str = proto.Field( + proto.STRING, + number=19994697, + optional=True, + ) + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + return_partial_success: bool = proto.Field( + proto.BOOL, + number=517198390, + optional=True, + ) + zone: str = proto.Field( + proto.STRING, + number=3744684, + ) + + +class ListAddressesRequest(proto.Message): + r"""A request message for Addresses.List. See the method + description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + filter (str): + A filter expression that filters resources listed in the + response. Most Compute resources support two types of filter + expressions: expressions that support regular expressions + and expressions that follow API improvement proposal + AIP-160. If you want to use AIP-160, your expression must + specify the field name, an operator, and the value that you + want to use for filtering. The value must be a string, a + number, or a boolean. The operator must be either ``=``, + ``!=``, ``>``, ``<``, ``<=``, ``>=`` or ``:``. For example, + if you are filtering Compute Engine instances, you can + exclude instances named ``example-instance`` by specifying + ``name != example-instance``. The ``:`` operator can be used + with string fields to match substrings. For non-string + fields it is equivalent to the ``=`` operator. The ``:*`` + comparison can be used to test whether a key has been + defined. For example, to find all objects with ``owner`` + label use: ``labels.owner:*`` You can also filter nested + fields. For example, you could specify + ``scheduling.automaticRestart = false`` to include instances + only if they are not scheduled for automatic restarts. You + can use filtering on nested fields to filter based on + resource labels. To filter on multiple expressions, provide + each separate expression within parentheses. For example: + ``(scheduling.automaticRestart = true) (cpuPlatform = "Intel Skylake")`` + By default, each expression is an ``AND`` expression. + However, you can include ``AND`` and ``OR`` expressions + explicitly. For example: + ``(cpuPlatform = "Intel Skylake") OR (cpuPlatform = "Intel Broadwell") AND (scheduling.automaticRestart = true)`` + If you want to use a regular expression, use the ``eq`` + (equal) or ``ne`` (not equal) operator against a single + un-parenthesized expression with or without quotes or + against multiple parenthesized expressions. Examples: + ``fieldname eq unquoted literal`` + ``fieldname eq 'single quoted literal'`` + ``fieldname eq "double quoted literal"`` + ``(fieldname1 eq literal) (fieldname2 ne "literal")`` The + literal value is interpreted as a regular expression using + Google RE2 library syntax. The literal value must match the + entire field. For example, to filter for instances that do + not end with name "instance", you would use + ``name ne .*instance``. + + This field is a member of `oneof`_ ``_filter``. + max_results (int): + The maximum number of results per page that should be + returned. If the number of available results is larger than + ``maxResults``, Compute Engine returns a ``nextPageToken`` + that can be used to get the next page of results in + subsequent list requests. Acceptable values are ``0`` to + ``500``, inclusive. (Default: ``500``) + + This field is a member of `oneof`_ ``_max_results``. + order_by (str): + Sorts list results by a certain order. By default, results + are returned in alphanumerical order based on the resource + name. You can also sort results in descending order based on + the creation timestamp using + ``orderBy="creationTimestamp desc"``. This sorts results + based on the ``creationTimestamp`` field in reverse + chronological order (newest result first). Use this to sort + resources like operations so that the newest operation is + returned first. Currently, only sorting by ``name`` or + ``creationTimestamp desc`` is supported. + + This field is a member of `oneof`_ ``_order_by``. + page_token (str): + Specifies a page token to use. Set ``pageToken`` to the + ``nextPageToken`` returned by a previous list request to get + the next page of results. + + This field is a member of `oneof`_ ``_page_token``. + project (str): + Project ID for this request. + region (str): + Name of the region for this request. + return_partial_success (bool): + Opt-in for partial success behavior which + provides partial results in case of failure. The + default value is false. + + This field is a member of `oneof`_ ``_return_partial_success``. + """ + + filter: str = proto.Field( + proto.STRING, + number=336120696, + optional=True, + ) + max_results: int = proto.Field( + proto.UINT32, + number=54715419, + optional=True, + ) + order_by: str = proto.Field( + proto.STRING, + number=160562920, + optional=True, + ) + page_token: str = proto.Field( + proto.STRING, + number=19994697, + optional=True, + ) + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + region: str = proto.Field( + proto.STRING, + number=138946292, + ) + return_partial_success: bool = proto.Field( + proto.BOOL, + number=517198390, + optional=True, + ) + + +class ListAssociationsFirewallPolicyRequest(proto.Message): + r"""A request message for FirewallPolicies.ListAssociations. See + the method description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + target_resource (str): + The target resource to list associations. It + is an organization, or a folder. + + This field is a member of `oneof`_ ``_target_resource``. + """ + + target_resource: str = proto.Field( + proto.STRING, + number=467318524, + optional=True, + ) + + +class ListAutoscalersRequest(proto.Message): + r"""A request message for Autoscalers.List. See the method + description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + filter (str): + A filter expression that filters resources listed in the + response. Most Compute resources support two types of filter + expressions: expressions that support regular expressions + and expressions that follow API improvement proposal + AIP-160. If you want to use AIP-160, your expression must + specify the field name, an operator, and the value that you + want to use for filtering. The value must be a string, a + number, or a boolean. The operator must be either ``=``, + ``!=``, ``>``, ``<``, ``<=``, ``>=`` or ``:``. For example, + if you are filtering Compute Engine instances, you can + exclude instances named ``example-instance`` by specifying + ``name != example-instance``. The ``:`` operator can be used + with string fields to match substrings. For non-string + fields it is equivalent to the ``=`` operator. The ``:*`` + comparison can be used to test whether a key has been + defined. For example, to find all objects with ``owner`` + label use: ``labels.owner:*`` You can also filter nested + fields. For example, you could specify + ``scheduling.automaticRestart = false`` to include instances + only if they are not scheduled for automatic restarts. You + can use filtering on nested fields to filter based on + resource labels. To filter on multiple expressions, provide + each separate expression within parentheses. For example: + ``(scheduling.automaticRestart = true) (cpuPlatform = "Intel Skylake")`` + By default, each expression is an ``AND`` expression. + However, you can include ``AND`` and ``OR`` expressions + explicitly. For example: + ``(cpuPlatform = "Intel Skylake") OR (cpuPlatform = "Intel Broadwell") AND (scheduling.automaticRestart = true)`` + If you want to use a regular expression, use the ``eq`` + (equal) or ``ne`` (not equal) operator against a single + un-parenthesized expression with or without quotes or + against multiple parenthesized expressions. Examples: + ``fieldname eq unquoted literal`` + ``fieldname eq 'single quoted literal'`` + ``fieldname eq "double quoted literal"`` + ``(fieldname1 eq literal) (fieldname2 ne "literal")`` The + literal value is interpreted as a regular expression using + Google RE2 library syntax. The literal value must match the + entire field. For example, to filter for instances that do + not end with name "instance", you would use + ``name ne .*instance``. + + This field is a member of `oneof`_ ``_filter``. + max_results (int): + The maximum number of results per page that should be + returned. If the number of available results is larger than + ``maxResults``, Compute Engine returns a ``nextPageToken`` + that can be used to get the next page of results in + subsequent list requests. Acceptable values are ``0`` to + ``500``, inclusive. (Default: ``500``) + + This field is a member of `oneof`_ ``_max_results``. + order_by (str): + Sorts list results by a certain order. By default, results + are returned in alphanumerical order based on the resource + name. You can also sort results in descending order based on + the creation timestamp using + ``orderBy="creationTimestamp desc"``. This sorts results + based on the ``creationTimestamp`` field in reverse + chronological order (newest result first). Use this to sort + resources like operations so that the newest operation is + returned first. Currently, only sorting by ``name`` or + ``creationTimestamp desc`` is supported. + + This field is a member of `oneof`_ ``_order_by``. + page_token (str): + Specifies a page token to use. Set ``pageToken`` to the + ``nextPageToken`` returned by a previous list request to get + the next page of results. + + This field is a member of `oneof`_ ``_page_token``. + project (str): + Project ID for this request. + return_partial_success (bool): + Opt-in for partial success behavior which + provides partial results in case of failure. The + default value is false. + + This field is a member of `oneof`_ ``_return_partial_success``. + zone (str): + Name of the zone for this request. + """ + + filter: str = proto.Field( + proto.STRING, + number=336120696, + optional=True, + ) + max_results: int = proto.Field( + proto.UINT32, + number=54715419, + optional=True, + ) + order_by: str = proto.Field( + proto.STRING, + number=160562920, + optional=True, + ) + page_token: str = proto.Field( + proto.STRING, + number=19994697, + optional=True, + ) + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + return_partial_success: bool = proto.Field( + proto.BOOL, + number=517198390, + optional=True, + ) + zone: str = proto.Field( + proto.STRING, + number=3744684, + ) + + +class ListAvailableFeaturesRegionSslPoliciesRequest(proto.Message): + r"""A request message for + RegionSslPolicies.ListAvailableFeatures. See the method + description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + filter (str): + A filter expression that filters resources listed in the + response. Most Compute resources support two types of filter + expressions: expressions that support regular expressions + and expressions that follow API improvement proposal + AIP-160. If you want to use AIP-160, your expression must + specify the field name, an operator, and the value that you + want to use for filtering. The value must be a string, a + number, or a boolean. The operator must be either ``=``, + ``!=``, ``>``, ``<``, ``<=``, ``>=`` or ``:``. For example, + if you are filtering Compute Engine instances, you can + exclude instances named ``example-instance`` by specifying + ``name != example-instance``. The ``:`` operator can be used + with string fields to match substrings. For non-string + fields it is equivalent to the ``=`` operator. The ``:*`` + comparison can be used to test whether a key has been + defined. For example, to find all objects with ``owner`` + label use: ``labels.owner:*`` You can also filter nested + fields. For example, you could specify + ``scheduling.automaticRestart = false`` to include instances + only if they are not scheduled for automatic restarts. You + can use filtering on nested fields to filter based on + resource labels. To filter on multiple expressions, provide + each separate expression within parentheses. For example: + ``(scheduling.automaticRestart = true) (cpuPlatform = "Intel Skylake")`` + By default, each expression is an ``AND`` expression. + However, you can include ``AND`` and ``OR`` expressions + explicitly. For example: + ``(cpuPlatform = "Intel Skylake") OR (cpuPlatform = "Intel Broadwell") AND (scheduling.automaticRestart = true)`` + If you want to use a regular expression, use the ``eq`` + (equal) or ``ne`` (not equal) operator against a single + un-parenthesized expression with or without quotes or + against multiple parenthesized expressions. Examples: + ``fieldname eq unquoted literal`` + ``fieldname eq 'single quoted literal'`` + ``fieldname eq "double quoted literal"`` + ``(fieldname1 eq literal) (fieldname2 ne "literal")`` The + literal value is interpreted as a regular expression using + Google RE2 library syntax. The literal value must match the + entire field. For example, to filter for instances that do + not end with name "instance", you would use + ``name ne .*instance``. + + This field is a member of `oneof`_ ``_filter``. + max_results (int): + The maximum number of results per page that should be + returned. If the number of available results is larger than + ``maxResults``, Compute Engine returns a ``nextPageToken`` + that can be used to get the next page of results in + subsequent list requests. Acceptable values are ``0`` to + ``500``, inclusive. (Default: ``500``) + + This field is a member of `oneof`_ ``_max_results``. + order_by (str): + Sorts list results by a certain order. By default, results + are returned in alphanumerical order based on the resource + name. You can also sort results in descending order based on + the creation timestamp using + ``orderBy="creationTimestamp desc"``. This sorts results + based on the ``creationTimestamp`` field in reverse + chronological order (newest result first). Use this to sort + resources like operations so that the newest operation is + returned first. Currently, only sorting by ``name`` or + ``creationTimestamp desc`` is supported. + + This field is a member of `oneof`_ ``_order_by``. + page_token (str): + Specifies a page token to use. Set ``pageToken`` to the + ``nextPageToken`` returned by a previous list request to get + the next page of results. + + This field is a member of `oneof`_ ``_page_token``. + project (str): + Project ID for this request. + region (str): + Name of the region scoping this request. + return_partial_success (bool): + Opt-in for partial success behavior which + provides partial results in case of failure. The + default value is false. + + This field is a member of `oneof`_ ``_return_partial_success``. + """ + + filter: str = proto.Field( + proto.STRING, + number=336120696, + optional=True, + ) + max_results: int = proto.Field( + proto.UINT32, + number=54715419, + optional=True, + ) + order_by: str = proto.Field( + proto.STRING, + number=160562920, + optional=True, + ) + page_token: str = proto.Field( + proto.STRING, + number=19994697, + optional=True, + ) + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + region: str = proto.Field( + proto.STRING, + number=138946292, + ) + return_partial_success: bool = proto.Field( + proto.BOOL, + number=517198390, + optional=True, + ) + + +class ListAvailableFeaturesSslPoliciesRequest(proto.Message): + r"""A request message for SslPolicies.ListAvailableFeatures. See + the method description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + filter (str): + A filter expression that filters resources listed in the + response. Most Compute resources support two types of filter + expressions: expressions that support regular expressions + and expressions that follow API improvement proposal + AIP-160. If you want to use AIP-160, your expression must + specify the field name, an operator, and the value that you + want to use for filtering. The value must be a string, a + number, or a boolean. The operator must be either ``=``, + ``!=``, ``>``, ``<``, ``<=``, ``>=`` or ``:``. For example, + if you are filtering Compute Engine instances, you can + exclude instances named ``example-instance`` by specifying + ``name != example-instance``. The ``:`` operator can be used + with string fields to match substrings. For non-string + fields it is equivalent to the ``=`` operator. The ``:*`` + comparison can be used to test whether a key has been + defined. For example, to find all objects with ``owner`` + label use: ``labels.owner:*`` You can also filter nested + fields. For example, you could specify + ``scheduling.automaticRestart = false`` to include instances + only if they are not scheduled for automatic restarts. You + can use filtering on nested fields to filter based on + resource labels. To filter on multiple expressions, provide + each separate expression within parentheses. For example: + ``(scheduling.automaticRestart = true) (cpuPlatform = "Intel Skylake")`` + By default, each expression is an ``AND`` expression. + However, you can include ``AND`` and ``OR`` expressions + explicitly. For example: + ``(cpuPlatform = "Intel Skylake") OR (cpuPlatform = "Intel Broadwell") AND (scheduling.automaticRestart = true)`` + If you want to use a regular expression, use the ``eq`` + (equal) or ``ne`` (not equal) operator against a single + un-parenthesized expression with or without quotes or + against multiple parenthesized expressions. Examples: + ``fieldname eq unquoted literal`` + ``fieldname eq 'single quoted literal'`` + ``fieldname eq "double quoted literal"`` + ``(fieldname1 eq literal) (fieldname2 ne "literal")`` The + literal value is interpreted as a regular expression using + Google RE2 library syntax. The literal value must match the + entire field. For example, to filter for instances that do + not end with name "instance", you would use + ``name ne .*instance``. + + This field is a member of `oneof`_ ``_filter``. + max_results (int): + The maximum number of results per page that should be + returned. If the number of available results is larger than + ``maxResults``, Compute Engine returns a ``nextPageToken`` + that can be used to get the next page of results in + subsequent list requests. Acceptable values are ``0`` to + ``500``, inclusive. (Default: ``500``) + + This field is a member of `oneof`_ ``_max_results``. + order_by (str): + Sorts list results by a certain order. By default, results + are returned in alphanumerical order based on the resource + name. You can also sort results in descending order based on + the creation timestamp using + ``orderBy="creationTimestamp desc"``. This sorts results + based on the ``creationTimestamp`` field in reverse + chronological order (newest result first). Use this to sort + resources like operations so that the newest operation is + returned first. Currently, only sorting by ``name`` or + ``creationTimestamp desc`` is supported. + + This field is a member of `oneof`_ ``_order_by``. + page_token (str): + Specifies a page token to use. Set ``pageToken`` to the + ``nextPageToken`` returned by a previous list request to get + the next page of results. + + This field is a member of `oneof`_ ``_page_token``. + project (str): + Project ID for this request. + return_partial_success (bool): + Opt-in for partial success behavior which + provides partial results in case of failure. The + default value is false. + + This field is a member of `oneof`_ ``_return_partial_success``. + """ + + filter: str = proto.Field( + proto.STRING, + number=336120696, + optional=True, + ) + max_results: int = proto.Field( + proto.UINT32, + number=54715419, + optional=True, + ) + order_by: str = proto.Field( + proto.STRING, + number=160562920, + optional=True, + ) + page_token: str = proto.Field( + proto.STRING, + number=19994697, + optional=True, + ) + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + return_partial_success: bool = proto.Field( + proto.BOOL, + number=517198390, + optional=True, + ) + + +class ListBackendBucketsRequest(proto.Message): + r"""A request message for BackendBuckets.List. See the method + description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + filter (str): + A filter expression that filters resources listed in the + response. Most Compute resources support two types of filter + expressions: expressions that support regular expressions + and expressions that follow API improvement proposal + AIP-160. If you want to use AIP-160, your expression must + specify the field name, an operator, and the value that you + want to use for filtering. The value must be a string, a + number, or a boolean. The operator must be either ``=``, + ``!=``, ``>``, ``<``, ``<=``, ``>=`` or ``:``. For example, + if you are filtering Compute Engine instances, you can + exclude instances named ``example-instance`` by specifying + ``name != example-instance``. The ``:`` operator can be used + with string fields to match substrings. For non-string + fields it is equivalent to the ``=`` operator. The ``:*`` + comparison can be used to test whether a key has been + defined. For example, to find all objects with ``owner`` + label use: ``labels.owner:*`` You can also filter nested + fields. For example, you could specify + ``scheduling.automaticRestart = false`` to include instances + only if they are not scheduled for automatic restarts. You + can use filtering on nested fields to filter based on + resource labels. To filter on multiple expressions, provide + each separate expression within parentheses. For example: + ``(scheduling.automaticRestart = true) (cpuPlatform = "Intel Skylake")`` + By default, each expression is an ``AND`` expression. + However, you can include ``AND`` and ``OR`` expressions + explicitly. For example: + ``(cpuPlatform = "Intel Skylake") OR (cpuPlatform = "Intel Broadwell") AND (scheduling.automaticRestart = true)`` + If you want to use a regular expression, use the ``eq`` + (equal) or ``ne`` (not equal) operator against a single + un-parenthesized expression with or without quotes or + against multiple parenthesized expressions. Examples: + ``fieldname eq unquoted literal`` + ``fieldname eq 'single quoted literal'`` + ``fieldname eq "double quoted literal"`` + ``(fieldname1 eq literal) (fieldname2 ne "literal")`` The + literal value is interpreted as a regular expression using + Google RE2 library syntax. The literal value must match the + entire field. For example, to filter for instances that do + not end with name "instance", you would use + ``name ne .*instance``. + + This field is a member of `oneof`_ ``_filter``. + max_results (int): + The maximum number of results per page that should be + returned. If the number of available results is larger than + ``maxResults``, Compute Engine returns a ``nextPageToken`` + that can be used to get the next page of results in + subsequent list requests. Acceptable values are ``0`` to + ``500``, inclusive. (Default: ``500``) + + This field is a member of `oneof`_ ``_max_results``. + order_by (str): + Sorts list results by a certain order. By default, results + are returned in alphanumerical order based on the resource + name. You can also sort results in descending order based on + the creation timestamp using + ``orderBy="creationTimestamp desc"``. This sorts results + based on the ``creationTimestamp`` field in reverse + chronological order (newest result first). Use this to sort + resources like operations so that the newest operation is + returned first. Currently, only sorting by ``name`` or + ``creationTimestamp desc`` is supported. + + This field is a member of `oneof`_ ``_order_by``. + page_token (str): + Specifies a page token to use. Set ``pageToken`` to the + ``nextPageToken`` returned by a previous list request to get + the next page of results. + + This field is a member of `oneof`_ ``_page_token``. + project (str): + Project ID for this request. + return_partial_success (bool): + Opt-in for partial success behavior which + provides partial results in case of failure. The + default value is false. + + This field is a member of `oneof`_ ``_return_partial_success``. + """ + + filter: str = proto.Field( + proto.STRING, + number=336120696, + optional=True, + ) + max_results: int = proto.Field( + proto.UINT32, + number=54715419, + optional=True, + ) + order_by: str = proto.Field( + proto.STRING, + number=160562920, + optional=True, + ) + page_token: str = proto.Field( + proto.STRING, + number=19994697, + optional=True, + ) + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + return_partial_success: bool = proto.Field( + proto.BOOL, + number=517198390, + optional=True, + ) + + +class ListBackendServicesRequest(proto.Message): + r"""A request message for BackendServices.List. See the method + description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + filter (str): + A filter expression that filters resources listed in the + response. Most Compute resources support two types of filter + expressions: expressions that support regular expressions + and expressions that follow API improvement proposal + AIP-160. If you want to use AIP-160, your expression must + specify the field name, an operator, and the value that you + want to use for filtering. The value must be a string, a + number, or a boolean. The operator must be either ``=``, + ``!=``, ``>``, ``<``, ``<=``, ``>=`` or ``:``. For example, + if you are filtering Compute Engine instances, you can + exclude instances named ``example-instance`` by specifying + ``name != example-instance``. The ``:`` operator can be used + with string fields to match substrings. For non-string + fields it is equivalent to the ``=`` operator. The ``:*`` + comparison can be used to test whether a key has been + defined. For example, to find all objects with ``owner`` + label use: ``labels.owner:*`` You can also filter nested + fields. For example, you could specify + ``scheduling.automaticRestart = false`` to include instances + only if they are not scheduled for automatic restarts. You + can use filtering on nested fields to filter based on + resource labels. To filter on multiple expressions, provide + each separate expression within parentheses. For example: + ``(scheduling.automaticRestart = true) (cpuPlatform = "Intel Skylake")`` + By default, each expression is an ``AND`` expression. + However, you can include ``AND`` and ``OR`` expressions + explicitly. For example: + ``(cpuPlatform = "Intel Skylake") OR (cpuPlatform = "Intel Broadwell") AND (scheduling.automaticRestart = true)`` + If you want to use a regular expression, use the ``eq`` + (equal) or ``ne`` (not equal) operator against a single + un-parenthesized expression with or without quotes or + against multiple parenthesized expressions. Examples: + ``fieldname eq unquoted literal`` + ``fieldname eq 'single quoted literal'`` + ``fieldname eq "double quoted literal"`` + ``(fieldname1 eq literal) (fieldname2 ne "literal")`` The + literal value is interpreted as a regular expression using + Google RE2 library syntax. The literal value must match the + entire field. For example, to filter for instances that do + not end with name "instance", you would use + ``name ne .*instance``. + + This field is a member of `oneof`_ ``_filter``. + max_results (int): + The maximum number of results per page that should be + returned. If the number of available results is larger than + ``maxResults``, Compute Engine returns a ``nextPageToken`` + that can be used to get the next page of results in + subsequent list requests. Acceptable values are ``0`` to + ``500``, inclusive. (Default: ``500``) + + This field is a member of `oneof`_ ``_max_results``. + order_by (str): + Sorts list results by a certain order. By default, results + are returned in alphanumerical order based on the resource + name. You can also sort results in descending order based on + the creation timestamp using + ``orderBy="creationTimestamp desc"``. This sorts results + based on the ``creationTimestamp`` field in reverse + chronological order (newest result first). Use this to sort + resources like operations so that the newest operation is + returned first. Currently, only sorting by ``name`` or + ``creationTimestamp desc`` is supported. + + This field is a member of `oneof`_ ``_order_by``. + page_token (str): + Specifies a page token to use. Set ``pageToken`` to the + ``nextPageToken`` returned by a previous list request to get + the next page of results. + + This field is a member of `oneof`_ ``_page_token``. + project (str): + Project ID for this request. + return_partial_success (bool): + Opt-in for partial success behavior which + provides partial results in case of failure. The + default value is false. + + This field is a member of `oneof`_ ``_return_partial_success``. + """ + + filter: str = proto.Field( + proto.STRING, + number=336120696, + optional=True, + ) + max_results: int = proto.Field( + proto.UINT32, + number=54715419, + optional=True, + ) + order_by: str = proto.Field( + proto.STRING, + number=160562920, + optional=True, + ) + page_token: str = proto.Field( + proto.STRING, + number=19994697, + optional=True, + ) + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + return_partial_success: bool = proto.Field( + proto.BOOL, + number=517198390, + optional=True, + ) + + +class ListDiskTypesRequest(proto.Message): + r"""A request message for DiskTypes.List. See the method + description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + filter (str): + A filter expression that filters resources listed in the + response. Most Compute resources support two types of filter + expressions: expressions that support regular expressions + and expressions that follow API improvement proposal + AIP-160. If you want to use AIP-160, your expression must + specify the field name, an operator, and the value that you + want to use for filtering. The value must be a string, a + number, or a boolean. The operator must be either ``=``, + ``!=``, ``>``, ``<``, ``<=``, ``>=`` or ``:``. For example, + if you are filtering Compute Engine instances, you can + exclude instances named ``example-instance`` by specifying + ``name != example-instance``. The ``:`` operator can be used + with string fields to match substrings. For non-string + fields it is equivalent to the ``=`` operator. The ``:*`` + comparison can be used to test whether a key has been + defined. For example, to find all objects with ``owner`` + label use: ``labels.owner:*`` You can also filter nested + fields. For example, you could specify + ``scheduling.automaticRestart = false`` to include instances + only if they are not scheduled for automatic restarts. You + can use filtering on nested fields to filter based on + resource labels. To filter on multiple expressions, provide + each separate expression within parentheses. For example: + ``(scheduling.automaticRestart = true) (cpuPlatform = "Intel Skylake")`` + By default, each expression is an ``AND`` expression. + However, you can include ``AND`` and ``OR`` expressions + explicitly. For example: + ``(cpuPlatform = "Intel Skylake") OR (cpuPlatform = "Intel Broadwell") AND (scheduling.automaticRestart = true)`` + If you want to use a regular expression, use the ``eq`` + (equal) or ``ne`` (not equal) operator against a single + un-parenthesized expression with or without quotes or + against multiple parenthesized expressions. Examples: + ``fieldname eq unquoted literal`` + ``fieldname eq 'single quoted literal'`` + ``fieldname eq "double quoted literal"`` + ``(fieldname1 eq literal) (fieldname2 ne "literal")`` The + literal value is interpreted as a regular expression using + Google RE2 library syntax. The literal value must match the + entire field. For example, to filter for instances that do + not end with name "instance", you would use + ``name ne .*instance``. + + This field is a member of `oneof`_ ``_filter``. + max_results (int): + The maximum number of results per page that should be + returned. If the number of available results is larger than + ``maxResults``, Compute Engine returns a ``nextPageToken`` + that can be used to get the next page of results in + subsequent list requests. Acceptable values are ``0`` to + ``500``, inclusive. (Default: ``500``) + + This field is a member of `oneof`_ ``_max_results``. + order_by (str): + Sorts list results by a certain order. By default, results + are returned in alphanumerical order based on the resource + name. You can also sort results in descending order based on + the creation timestamp using + ``orderBy="creationTimestamp desc"``. This sorts results + based on the ``creationTimestamp`` field in reverse + chronological order (newest result first). Use this to sort + resources like operations so that the newest operation is + returned first. Currently, only sorting by ``name`` or + ``creationTimestamp desc`` is supported. + + This field is a member of `oneof`_ ``_order_by``. + page_token (str): + Specifies a page token to use. Set ``pageToken`` to the + ``nextPageToken`` returned by a previous list request to get + the next page of results. + + This field is a member of `oneof`_ ``_page_token``. + project (str): + Project ID for this request. + return_partial_success (bool): + Opt-in for partial success behavior which + provides partial results in case of failure. The + default value is false. + + This field is a member of `oneof`_ ``_return_partial_success``. + zone (str): + The name of the zone for this request. + """ + + filter: str = proto.Field( + proto.STRING, + number=336120696, + optional=True, + ) + max_results: int = proto.Field( + proto.UINT32, + number=54715419, + optional=True, + ) + order_by: str = proto.Field( + proto.STRING, + number=160562920, + optional=True, + ) + page_token: str = proto.Field( + proto.STRING, + number=19994697, + optional=True, + ) + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + return_partial_success: bool = proto.Field( + proto.BOOL, + number=517198390, + optional=True, + ) + zone: str = proto.Field( + proto.STRING, + number=3744684, + ) + + +class ListDisksRequest(proto.Message): + r"""A request message for Disks.List. See the method description + for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + filter (str): + A filter expression that filters resources listed in the + response. Most Compute resources support two types of filter + expressions: expressions that support regular expressions + and expressions that follow API improvement proposal + AIP-160. If you want to use AIP-160, your expression must + specify the field name, an operator, and the value that you + want to use for filtering. The value must be a string, a + number, or a boolean. The operator must be either ``=``, + ``!=``, ``>``, ``<``, ``<=``, ``>=`` or ``:``. For example, + if you are filtering Compute Engine instances, you can + exclude instances named ``example-instance`` by specifying + ``name != example-instance``. The ``:`` operator can be used + with string fields to match substrings. For non-string + fields it is equivalent to the ``=`` operator. The ``:*`` + comparison can be used to test whether a key has been + defined. For example, to find all objects with ``owner`` + label use: ``labels.owner:*`` You can also filter nested + fields. For example, you could specify + ``scheduling.automaticRestart = false`` to include instances + only if they are not scheduled for automatic restarts. You + can use filtering on nested fields to filter based on + resource labels. To filter on multiple expressions, provide + each separate expression within parentheses. For example: + ``(scheduling.automaticRestart = true) (cpuPlatform = "Intel Skylake")`` + By default, each expression is an ``AND`` expression. + However, you can include ``AND`` and ``OR`` expressions + explicitly. For example: + ``(cpuPlatform = "Intel Skylake") OR (cpuPlatform = "Intel Broadwell") AND (scheduling.automaticRestart = true)`` + If you want to use a regular expression, use the ``eq`` + (equal) or ``ne`` (not equal) operator against a single + un-parenthesized expression with or without quotes or + against multiple parenthesized expressions. Examples: + ``fieldname eq unquoted literal`` + ``fieldname eq 'single quoted literal'`` + ``fieldname eq "double quoted literal"`` + ``(fieldname1 eq literal) (fieldname2 ne "literal")`` The + literal value is interpreted as a regular expression using + Google RE2 library syntax. The literal value must match the + entire field. For example, to filter for instances that do + not end with name "instance", you would use + ``name ne .*instance``. + + This field is a member of `oneof`_ ``_filter``. + max_results (int): + The maximum number of results per page that should be + returned. If the number of available results is larger than + ``maxResults``, Compute Engine returns a ``nextPageToken`` + that can be used to get the next page of results in + subsequent list requests. Acceptable values are ``0`` to + ``500``, inclusive. (Default: ``500``) + + This field is a member of `oneof`_ ``_max_results``. + order_by (str): + Sorts list results by a certain order. By default, results + are returned in alphanumerical order based on the resource + name. You can also sort results in descending order based on + the creation timestamp using + ``orderBy="creationTimestamp desc"``. This sorts results + based on the ``creationTimestamp`` field in reverse + chronological order (newest result first). Use this to sort + resources like operations so that the newest operation is + returned first. Currently, only sorting by ``name`` or + ``creationTimestamp desc`` is supported. + + This field is a member of `oneof`_ ``_order_by``. + page_token (str): + Specifies a page token to use. Set ``pageToken`` to the + ``nextPageToken`` returned by a previous list request to get + the next page of results. + + This field is a member of `oneof`_ ``_page_token``. + project (str): + Project ID for this request. + return_partial_success (bool): + Opt-in for partial success behavior which + provides partial results in case of failure. The + default value is false. + + This field is a member of `oneof`_ ``_return_partial_success``. + zone (str): + The name of the zone for this request. + """ + + filter: str = proto.Field( + proto.STRING, + number=336120696, + optional=True, + ) + max_results: int = proto.Field( + proto.UINT32, + number=54715419, + optional=True, + ) + order_by: str = proto.Field( + proto.STRING, + number=160562920, + optional=True, + ) + page_token: str = proto.Field( + proto.STRING, + number=19994697, + optional=True, + ) + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + return_partial_success: bool = proto.Field( + proto.BOOL, + number=517198390, + optional=True, + ) + zone: str = proto.Field( + proto.STRING, + number=3744684, + ) + + +class ListErrorsInstanceGroupManagersRequest(proto.Message): + r"""A request message for InstanceGroupManagers.ListErrors. See + the method description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + filter (str): + A filter expression that filters resources listed in the + response. Most Compute resources support two types of filter + expressions: expressions that support regular expressions + and expressions that follow API improvement proposal + AIP-160. If you want to use AIP-160, your expression must + specify the field name, an operator, and the value that you + want to use for filtering. The value must be a string, a + number, or a boolean. The operator must be either ``=``, + ``!=``, ``>``, ``<``, ``<=``, ``>=`` or ``:``. For example, + if you are filtering Compute Engine instances, you can + exclude instances named ``example-instance`` by specifying + ``name != example-instance``. The ``:`` operator can be used + with string fields to match substrings. For non-string + fields it is equivalent to the ``=`` operator. The ``:*`` + comparison can be used to test whether a key has been + defined. For example, to find all objects with ``owner`` + label use: ``labels.owner:*`` You can also filter nested + fields. For example, you could specify + ``scheduling.automaticRestart = false`` to include instances + only if they are not scheduled for automatic restarts. You + can use filtering on nested fields to filter based on + resource labels. To filter on multiple expressions, provide + each separate expression within parentheses. For example: + ``(scheduling.automaticRestart = true) (cpuPlatform = "Intel Skylake")`` + By default, each expression is an ``AND`` expression. + However, you can include ``AND`` and ``OR`` expressions + explicitly. For example: + ``(cpuPlatform = "Intel Skylake") OR (cpuPlatform = "Intel Broadwell") AND (scheduling.automaticRestart = true)`` + If you want to use a regular expression, use the ``eq`` + (equal) or ``ne`` (not equal) operator against a single + un-parenthesized expression with or without quotes or + against multiple parenthesized expressions. Examples: + ``fieldname eq unquoted literal`` + ``fieldname eq 'single quoted literal'`` + ``fieldname eq "double quoted literal"`` + ``(fieldname1 eq literal) (fieldname2 ne "literal")`` The + literal value is interpreted as a regular expression using + Google RE2 library syntax. The literal value must match the + entire field. For example, to filter for instances that do + not end with name "instance", you would use + ``name ne .*instance``. + + This field is a member of `oneof`_ ``_filter``. + instance_group_manager (str): + The name of the managed instance group. It must be a string + that meets the requirements in RFC1035, or an unsigned long + integer: must match regexp pattern: + (?:`a-z `__?)|1-9{0,19}. + max_results (int): + The maximum number of results per page that should be + returned. If the number of available results is larger than + ``maxResults``, Compute Engine returns a ``nextPageToken`` + that can be used to get the next page of results in + subsequent list requests. Acceptable values are ``0`` to + ``500``, inclusive. (Default: ``500``) + + This field is a member of `oneof`_ ``_max_results``. + order_by (str): + Sorts list results by a certain order. By default, results + are returned in alphanumerical order based on the resource + name. You can also sort results in descending order based on + the creation timestamp using + ``orderBy="creationTimestamp desc"``. This sorts results + based on the ``creationTimestamp`` field in reverse + chronological order (newest result first). Use this to sort + resources like operations so that the newest operation is + returned first. Currently, only sorting by ``name`` or + ``creationTimestamp desc`` is supported. + + This field is a member of `oneof`_ ``_order_by``. + page_token (str): + Specifies a page token to use. Set ``pageToken`` to the + ``nextPageToken`` returned by a previous list request to get + the next page of results. + + This field is a member of `oneof`_ ``_page_token``. + project (str): + Project ID for this request. + return_partial_success (bool): + Opt-in for partial success behavior which + provides partial results in case of failure. The + default value is false. + + This field is a member of `oneof`_ ``_return_partial_success``. + zone (str): + The name of the zone where the managed + instance group is located. It should conform to + RFC1035. + """ + + filter: str = proto.Field( + proto.STRING, + number=336120696, + optional=True, + ) + instance_group_manager: str = proto.Field( + proto.STRING, + number=249363395, + ) + max_results: int = proto.Field( + proto.UINT32, + number=54715419, + optional=True, + ) + order_by: str = proto.Field( + proto.STRING, + number=160562920, + optional=True, + ) + page_token: str = proto.Field( + proto.STRING, + number=19994697, + optional=True, + ) + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + return_partial_success: bool = proto.Field( + proto.BOOL, + number=517198390, + optional=True, + ) + zone: str = proto.Field( + proto.STRING, + number=3744684, + ) + + +class ListErrorsRegionInstanceGroupManagersRequest(proto.Message): + r"""A request message for RegionInstanceGroupManagers.ListErrors. + See the method description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + filter (str): + A filter expression that filters resources listed in the + response. Most Compute resources support two types of filter + expressions: expressions that support regular expressions + and expressions that follow API improvement proposal + AIP-160. If you want to use AIP-160, your expression must + specify the field name, an operator, and the value that you + want to use for filtering. The value must be a string, a + number, or a boolean. The operator must be either ``=``, + ``!=``, ``>``, ``<``, ``<=``, ``>=`` or ``:``. For example, + if you are filtering Compute Engine instances, you can + exclude instances named ``example-instance`` by specifying + ``name != example-instance``. The ``:`` operator can be used + with string fields to match substrings. For non-string + fields it is equivalent to the ``=`` operator. The ``:*`` + comparison can be used to test whether a key has been + defined. For example, to find all objects with ``owner`` + label use: ``labels.owner:*`` You can also filter nested + fields. For example, you could specify + ``scheduling.automaticRestart = false`` to include instances + only if they are not scheduled for automatic restarts. You + can use filtering on nested fields to filter based on + resource labels. To filter on multiple expressions, provide + each separate expression within parentheses. For example: + ``(scheduling.automaticRestart = true) (cpuPlatform = "Intel Skylake")`` + By default, each expression is an ``AND`` expression. + However, you can include ``AND`` and ``OR`` expressions + explicitly. For example: + ``(cpuPlatform = "Intel Skylake") OR (cpuPlatform = "Intel Broadwell") AND (scheduling.automaticRestart = true)`` + If you want to use a regular expression, use the ``eq`` + (equal) or ``ne`` (not equal) operator against a single + un-parenthesized expression with or without quotes or + against multiple parenthesized expressions. Examples: + ``fieldname eq unquoted literal`` + ``fieldname eq 'single quoted literal'`` + ``fieldname eq "double quoted literal"`` + ``(fieldname1 eq literal) (fieldname2 ne "literal")`` The + literal value is interpreted as a regular expression using + Google RE2 library syntax. The literal value must match the + entire field. For example, to filter for instances that do + not end with name "instance", you would use + ``name ne .*instance``. + + This field is a member of `oneof`_ ``_filter``. + instance_group_manager (str): + The name of the managed instance group. It must be a string + that meets the requirements in RFC1035, or an unsigned long + integer: must match regexp pattern: + (?:`a-z `__?)|1-9{0,19}. + max_results (int): + The maximum number of results per page that should be + returned. If the number of available results is larger than + ``maxResults``, Compute Engine returns a ``nextPageToken`` + that can be used to get the next page of results in + subsequent list requests. Acceptable values are ``0`` to + ``500``, inclusive. (Default: ``500``) + + This field is a member of `oneof`_ ``_max_results``. + order_by (str): + Sorts list results by a certain order. By default, results + are returned in alphanumerical order based on the resource + name. You can also sort results in descending order based on + the creation timestamp using + ``orderBy="creationTimestamp desc"``. This sorts results + based on the ``creationTimestamp`` field in reverse + chronological order (newest result first). Use this to sort + resources like operations so that the newest operation is + returned first. Currently, only sorting by ``name`` or + ``creationTimestamp desc`` is supported. + + This field is a member of `oneof`_ ``_order_by``. + page_token (str): + Specifies a page token to use. Set ``pageToken`` to the + ``nextPageToken`` returned by a previous list request to get + the next page of results. + + This field is a member of `oneof`_ ``_page_token``. + project (str): + Project ID for this request. + region (str): + Name of the region scoping this request. This + should conform to RFC1035. + return_partial_success (bool): + Opt-in for partial success behavior which + provides partial results in case of failure. The + default value is false. + + This field is a member of `oneof`_ ``_return_partial_success``. + """ + + filter: str = proto.Field( + proto.STRING, + number=336120696, + optional=True, + ) + instance_group_manager: str = proto.Field( + proto.STRING, + number=249363395, + ) + max_results: int = proto.Field( + proto.UINT32, + number=54715419, + optional=True, + ) + order_by: str = proto.Field( + proto.STRING, + number=160562920, + optional=True, + ) + page_token: str = proto.Field( + proto.STRING, + number=19994697, + optional=True, + ) + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + region: str = proto.Field( + proto.STRING, + number=138946292, + ) + return_partial_success: bool = proto.Field( + proto.BOOL, + number=517198390, + optional=True, + ) + + +class ListExternalVpnGatewaysRequest(proto.Message): + r"""A request message for ExternalVpnGateways.List. See the + method description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + filter (str): + A filter expression that filters resources listed in the + response. Most Compute resources support two types of filter + expressions: expressions that support regular expressions + and expressions that follow API improvement proposal + AIP-160. If you want to use AIP-160, your expression must + specify the field name, an operator, and the value that you + want to use for filtering. The value must be a string, a + number, or a boolean. The operator must be either ``=``, + ``!=``, ``>``, ``<``, ``<=``, ``>=`` or ``:``. For example, + if you are filtering Compute Engine instances, you can + exclude instances named ``example-instance`` by specifying + ``name != example-instance``. The ``:`` operator can be used + with string fields to match substrings. For non-string + fields it is equivalent to the ``=`` operator. The ``:*`` + comparison can be used to test whether a key has been + defined. For example, to find all objects with ``owner`` + label use: ``labels.owner:*`` You can also filter nested + fields. For example, you could specify + ``scheduling.automaticRestart = false`` to include instances + only if they are not scheduled for automatic restarts. You + can use filtering on nested fields to filter based on + resource labels. To filter on multiple expressions, provide + each separate expression within parentheses. For example: + ``(scheduling.automaticRestart = true) (cpuPlatform = "Intel Skylake")`` + By default, each expression is an ``AND`` expression. + However, you can include ``AND`` and ``OR`` expressions + explicitly. For example: + ``(cpuPlatform = "Intel Skylake") OR (cpuPlatform = "Intel Broadwell") AND (scheduling.automaticRestart = true)`` + If you want to use a regular expression, use the ``eq`` + (equal) or ``ne`` (not equal) operator against a single + un-parenthesized expression with or without quotes or + against multiple parenthesized expressions. Examples: + ``fieldname eq unquoted literal`` + ``fieldname eq 'single quoted literal'`` + ``fieldname eq "double quoted literal"`` + ``(fieldname1 eq literal) (fieldname2 ne "literal")`` The + literal value is interpreted as a regular expression using + Google RE2 library syntax. The literal value must match the + entire field. For example, to filter for instances that do + not end with name "instance", you would use + ``name ne .*instance``. + + This field is a member of `oneof`_ ``_filter``. + max_results (int): + The maximum number of results per page that should be + returned. If the number of available results is larger than + ``maxResults``, Compute Engine returns a ``nextPageToken`` + that can be used to get the next page of results in + subsequent list requests. Acceptable values are ``0`` to + ``500``, inclusive. (Default: ``500``) + + This field is a member of `oneof`_ ``_max_results``. + order_by (str): + Sorts list results by a certain order. By default, results + are returned in alphanumerical order based on the resource + name. You can also sort results in descending order based on + the creation timestamp using + ``orderBy="creationTimestamp desc"``. This sorts results + based on the ``creationTimestamp`` field in reverse + chronological order (newest result first). Use this to sort + resources like operations so that the newest operation is + returned first. Currently, only sorting by ``name`` or + ``creationTimestamp desc`` is supported. + + This field is a member of `oneof`_ ``_order_by``. + page_token (str): + Specifies a page token to use. Set ``pageToken`` to the + ``nextPageToken`` returned by a previous list request to get + the next page of results. + + This field is a member of `oneof`_ ``_page_token``. + project (str): + Project ID for this request. + return_partial_success (bool): + Opt-in for partial success behavior which + provides partial results in case of failure. The + default value is false. + + This field is a member of `oneof`_ ``_return_partial_success``. + """ + + filter: str = proto.Field( + proto.STRING, + number=336120696, + optional=True, + ) + max_results: int = proto.Field( + proto.UINT32, + number=54715419, + optional=True, + ) + order_by: str = proto.Field( + proto.STRING, + number=160562920, + optional=True, + ) + page_token: str = proto.Field( + proto.STRING, + number=19994697, + optional=True, + ) + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + return_partial_success: bool = proto.Field( + proto.BOOL, + number=517198390, + optional=True, + ) + + +class ListFirewallPoliciesRequest(proto.Message): + r"""A request message for FirewallPolicies.List. See the method + description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + filter (str): + A filter expression that filters resources listed in the + response. Most Compute resources support two types of filter + expressions: expressions that support regular expressions + and expressions that follow API improvement proposal + AIP-160. If you want to use AIP-160, your expression must + specify the field name, an operator, and the value that you + want to use for filtering. The value must be a string, a + number, or a boolean. The operator must be either ``=``, + ``!=``, ``>``, ``<``, ``<=``, ``>=`` or ``:``. For example, + if you are filtering Compute Engine instances, you can + exclude instances named ``example-instance`` by specifying + ``name != example-instance``. The ``:`` operator can be used + with string fields to match substrings. For non-string + fields it is equivalent to the ``=`` operator. The ``:*`` + comparison can be used to test whether a key has been + defined. For example, to find all objects with ``owner`` + label use: ``labels.owner:*`` You can also filter nested + fields. For example, you could specify + ``scheduling.automaticRestart = false`` to include instances + only if they are not scheduled for automatic restarts. You + can use filtering on nested fields to filter based on + resource labels. To filter on multiple expressions, provide + each separate expression within parentheses. For example: + ``(scheduling.automaticRestart = true) (cpuPlatform = "Intel Skylake")`` + By default, each expression is an ``AND`` expression. + However, you can include ``AND`` and ``OR`` expressions + explicitly. For example: + ``(cpuPlatform = "Intel Skylake") OR (cpuPlatform = "Intel Broadwell") AND (scheduling.automaticRestart = true)`` + If you want to use a regular expression, use the ``eq`` + (equal) or ``ne`` (not equal) operator against a single + un-parenthesized expression with or without quotes or + against multiple parenthesized expressions. Examples: + ``fieldname eq unquoted literal`` + ``fieldname eq 'single quoted literal'`` + ``fieldname eq "double quoted literal"`` + ``(fieldname1 eq literal) (fieldname2 ne "literal")`` The + literal value is interpreted as a regular expression using + Google RE2 library syntax. The literal value must match the + entire field. For example, to filter for instances that do + not end with name "instance", you would use + ``name ne .*instance``. + + This field is a member of `oneof`_ ``_filter``. + max_results (int): + The maximum number of results per page that should be + returned. If the number of available results is larger than + ``maxResults``, Compute Engine returns a ``nextPageToken`` + that can be used to get the next page of results in + subsequent list requests. Acceptable values are ``0`` to + ``500``, inclusive. (Default: ``500``) + + This field is a member of `oneof`_ ``_max_results``. + order_by (str): + Sorts list results by a certain order. By default, results + are returned in alphanumerical order based on the resource + name. You can also sort results in descending order based on + the creation timestamp using + ``orderBy="creationTimestamp desc"``. This sorts results + based on the ``creationTimestamp`` field in reverse + chronological order (newest result first). Use this to sort + resources like operations so that the newest operation is + returned first. Currently, only sorting by ``name`` or + ``creationTimestamp desc`` is supported. + + This field is a member of `oneof`_ ``_order_by``. + page_token (str): + Specifies a page token to use. Set ``pageToken`` to the + ``nextPageToken`` returned by a previous list request to get + the next page of results. + + This field is a member of `oneof`_ ``_page_token``. + parent_id (str): + Parent ID for this request. The ID can be either be + "folders/[FOLDER_ID]" if the parent is a folder or + "organizations/[ORGANIZATION_ID]" if the parent is an + organization. + + This field is a member of `oneof`_ ``_parent_id``. + return_partial_success (bool): + Opt-in for partial success behavior which + provides partial results in case of failure. The + default value is false. + + This field is a member of `oneof`_ ``_return_partial_success``. + """ + + filter: str = proto.Field( + proto.STRING, + number=336120696, + optional=True, + ) + max_results: int = proto.Field( + proto.UINT32, + number=54715419, + optional=True, + ) + order_by: str = proto.Field( + proto.STRING, + number=160562920, + optional=True, + ) + page_token: str = proto.Field( + proto.STRING, + number=19994697, + optional=True, + ) + parent_id: str = proto.Field( + proto.STRING, + number=459714768, + optional=True, + ) + return_partial_success: bool = proto.Field( + proto.BOOL, + number=517198390, + optional=True, + ) + + +class ListFirewallsRequest(proto.Message): + r"""A request message for Firewalls.List. See the method + description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + filter (str): + A filter expression that filters resources listed in the + response. Most Compute resources support two types of filter + expressions: expressions that support regular expressions + and expressions that follow API improvement proposal + AIP-160. If you want to use AIP-160, your expression must + specify the field name, an operator, and the value that you + want to use for filtering. The value must be a string, a + number, or a boolean. The operator must be either ``=``, + ``!=``, ``>``, ``<``, ``<=``, ``>=`` or ``:``. For example, + if you are filtering Compute Engine instances, you can + exclude instances named ``example-instance`` by specifying + ``name != example-instance``. The ``:`` operator can be used + with string fields to match substrings. For non-string + fields it is equivalent to the ``=`` operator. The ``:*`` + comparison can be used to test whether a key has been + defined. For example, to find all objects with ``owner`` + label use: ``labels.owner:*`` You can also filter nested + fields. For example, you could specify + ``scheduling.automaticRestart = false`` to include instances + only if they are not scheduled for automatic restarts. You + can use filtering on nested fields to filter based on + resource labels. To filter on multiple expressions, provide + each separate expression within parentheses. For example: + ``(scheduling.automaticRestart = true) (cpuPlatform = "Intel Skylake")`` + By default, each expression is an ``AND`` expression. + However, you can include ``AND`` and ``OR`` expressions + explicitly. For example: + ``(cpuPlatform = "Intel Skylake") OR (cpuPlatform = "Intel Broadwell") AND (scheduling.automaticRestart = true)`` + If you want to use a regular expression, use the ``eq`` + (equal) or ``ne`` (not equal) operator against a single + un-parenthesized expression with or without quotes or + against multiple parenthesized expressions. Examples: + ``fieldname eq unquoted literal`` + ``fieldname eq 'single quoted literal'`` + ``fieldname eq "double quoted literal"`` + ``(fieldname1 eq literal) (fieldname2 ne "literal")`` The + literal value is interpreted as a regular expression using + Google RE2 library syntax. The literal value must match the + entire field. For example, to filter for instances that do + not end with name "instance", you would use + ``name ne .*instance``. + + This field is a member of `oneof`_ ``_filter``. + max_results (int): + The maximum number of results per page that should be + returned. If the number of available results is larger than + ``maxResults``, Compute Engine returns a ``nextPageToken`` + that can be used to get the next page of results in + subsequent list requests. Acceptable values are ``0`` to + ``500``, inclusive. (Default: ``500``) + + This field is a member of `oneof`_ ``_max_results``. + order_by (str): + Sorts list results by a certain order. By default, results + are returned in alphanumerical order based on the resource + name. You can also sort results in descending order based on + the creation timestamp using + ``orderBy="creationTimestamp desc"``. This sorts results + based on the ``creationTimestamp`` field in reverse + chronological order (newest result first). Use this to sort + resources like operations so that the newest operation is + returned first. Currently, only sorting by ``name`` or + ``creationTimestamp desc`` is supported. + + This field is a member of `oneof`_ ``_order_by``. + page_token (str): + Specifies a page token to use. Set ``pageToken`` to the + ``nextPageToken`` returned by a previous list request to get + the next page of results. + + This field is a member of `oneof`_ ``_page_token``. + project (str): + Project ID for this request. + return_partial_success (bool): + Opt-in for partial success behavior which + provides partial results in case of failure. The + default value is false. + + This field is a member of `oneof`_ ``_return_partial_success``. + """ + + filter: str = proto.Field( + proto.STRING, + number=336120696, + optional=True, + ) + max_results: int = proto.Field( + proto.UINT32, + number=54715419, + optional=True, + ) + order_by: str = proto.Field( + proto.STRING, + number=160562920, + optional=True, + ) + page_token: str = proto.Field( + proto.STRING, + number=19994697, + optional=True, + ) + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + return_partial_success: bool = proto.Field( + proto.BOOL, + number=517198390, + optional=True, + ) + + +class ListForwardingRulesRequest(proto.Message): + r"""A request message for ForwardingRules.List. See the method + description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + filter (str): + A filter expression that filters resources listed in the + response. Most Compute resources support two types of filter + expressions: expressions that support regular expressions + and expressions that follow API improvement proposal + AIP-160. If you want to use AIP-160, your expression must + specify the field name, an operator, and the value that you + want to use for filtering. The value must be a string, a + number, or a boolean. The operator must be either ``=``, + ``!=``, ``>``, ``<``, ``<=``, ``>=`` or ``:``. For example, + if you are filtering Compute Engine instances, you can + exclude instances named ``example-instance`` by specifying + ``name != example-instance``. The ``:`` operator can be used + with string fields to match substrings. For non-string + fields it is equivalent to the ``=`` operator. The ``:*`` + comparison can be used to test whether a key has been + defined. For example, to find all objects with ``owner`` + label use: ``labels.owner:*`` You can also filter nested + fields. For example, you could specify + ``scheduling.automaticRestart = false`` to include instances + only if they are not scheduled for automatic restarts. You + can use filtering on nested fields to filter based on + resource labels. To filter on multiple expressions, provide + each separate expression within parentheses. For example: + ``(scheduling.automaticRestart = true) (cpuPlatform = "Intel Skylake")`` + By default, each expression is an ``AND`` expression. + However, you can include ``AND`` and ``OR`` expressions + explicitly. For example: + ``(cpuPlatform = "Intel Skylake") OR (cpuPlatform = "Intel Broadwell") AND (scheduling.automaticRestart = true)`` + If you want to use a regular expression, use the ``eq`` + (equal) or ``ne`` (not equal) operator against a single + un-parenthesized expression with or without quotes or + against multiple parenthesized expressions. Examples: + ``fieldname eq unquoted literal`` + ``fieldname eq 'single quoted literal'`` + ``fieldname eq "double quoted literal"`` + ``(fieldname1 eq literal) (fieldname2 ne "literal")`` The + literal value is interpreted as a regular expression using + Google RE2 library syntax. The literal value must match the + entire field. For example, to filter for instances that do + not end with name "instance", you would use + ``name ne .*instance``. + + This field is a member of `oneof`_ ``_filter``. + max_results (int): + The maximum number of results per page that should be + returned. If the number of available results is larger than + ``maxResults``, Compute Engine returns a ``nextPageToken`` + that can be used to get the next page of results in + subsequent list requests. Acceptable values are ``0`` to + ``500``, inclusive. (Default: ``500``) + + This field is a member of `oneof`_ ``_max_results``. + order_by (str): + Sorts list results by a certain order. By default, results + are returned in alphanumerical order based on the resource + name. You can also sort results in descending order based on + the creation timestamp using + ``orderBy="creationTimestamp desc"``. This sorts results + based on the ``creationTimestamp`` field in reverse + chronological order (newest result first). Use this to sort + resources like operations so that the newest operation is + returned first. Currently, only sorting by ``name`` or + ``creationTimestamp desc`` is supported. + + This field is a member of `oneof`_ ``_order_by``. + page_token (str): + Specifies a page token to use. Set ``pageToken`` to the + ``nextPageToken`` returned by a previous list request to get + the next page of results. + + This field is a member of `oneof`_ ``_page_token``. + project (str): + Project ID for this request. + region (str): + Name of the region scoping this request. + return_partial_success (bool): + Opt-in for partial success behavior which + provides partial results in case of failure. The + default value is false. + + This field is a member of `oneof`_ ``_return_partial_success``. + """ + + filter: str = proto.Field( + proto.STRING, + number=336120696, + optional=True, + ) + max_results: int = proto.Field( + proto.UINT32, + number=54715419, + optional=True, + ) + order_by: str = proto.Field( + proto.STRING, + number=160562920, + optional=True, + ) + page_token: str = proto.Field( + proto.STRING, + number=19994697, + optional=True, + ) + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + region: str = proto.Field( + proto.STRING, + number=138946292, + ) + return_partial_success: bool = proto.Field( + proto.BOOL, + number=517198390, + optional=True, + ) + + +class ListGlobalAddressesRequest(proto.Message): + r"""A request message for GlobalAddresses.List. See the method + description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + filter (str): + A filter expression that filters resources listed in the + response. Most Compute resources support two types of filter + expressions: expressions that support regular expressions + and expressions that follow API improvement proposal + AIP-160. If you want to use AIP-160, your expression must + specify the field name, an operator, and the value that you + want to use for filtering. The value must be a string, a + number, or a boolean. The operator must be either ``=``, + ``!=``, ``>``, ``<``, ``<=``, ``>=`` or ``:``. For example, + if you are filtering Compute Engine instances, you can + exclude instances named ``example-instance`` by specifying + ``name != example-instance``. The ``:`` operator can be used + with string fields to match substrings. For non-string + fields it is equivalent to the ``=`` operator. The ``:*`` + comparison can be used to test whether a key has been + defined. For example, to find all objects with ``owner`` + label use: ``labels.owner:*`` You can also filter nested + fields. For example, you could specify + ``scheduling.automaticRestart = false`` to include instances + only if they are not scheduled for automatic restarts. You + can use filtering on nested fields to filter based on + resource labels. To filter on multiple expressions, provide + each separate expression within parentheses. For example: + ``(scheduling.automaticRestart = true) (cpuPlatform = "Intel Skylake")`` + By default, each expression is an ``AND`` expression. + However, you can include ``AND`` and ``OR`` expressions + explicitly. For example: + ``(cpuPlatform = "Intel Skylake") OR (cpuPlatform = "Intel Broadwell") AND (scheduling.automaticRestart = true)`` + If you want to use a regular expression, use the ``eq`` + (equal) or ``ne`` (not equal) operator against a single + un-parenthesized expression with or without quotes or + against multiple parenthesized expressions. Examples: + ``fieldname eq unquoted literal`` + ``fieldname eq 'single quoted literal'`` + ``fieldname eq "double quoted literal"`` + ``(fieldname1 eq literal) (fieldname2 ne "literal")`` The + literal value is interpreted as a regular expression using + Google RE2 library syntax. The literal value must match the + entire field. For example, to filter for instances that do + not end with name "instance", you would use + ``name ne .*instance``. + + This field is a member of `oneof`_ ``_filter``. + max_results (int): + The maximum number of results per page that should be + returned. If the number of available results is larger than + ``maxResults``, Compute Engine returns a ``nextPageToken`` + that can be used to get the next page of results in + subsequent list requests. Acceptable values are ``0`` to + ``500``, inclusive. (Default: ``500``) + + This field is a member of `oneof`_ ``_max_results``. + order_by (str): + Sorts list results by a certain order. By default, results + are returned in alphanumerical order based on the resource + name. You can also sort results in descending order based on + the creation timestamp using + ``orderBy="creationTimestamp desc"``. This sorts results + based on the ``creationTimestamp`` field in reverse + chronological order (newest result first). Use this to sort + resources like operations so that the newest operation is + returned first. Currently, only sorting by ``name`` or + ``creationTimestamp desc`` is supported. + + This field is a member of `oneof`_ ``_order_by``. + page_token (str): + Specifies a page token to use. Set ``pageToken`` to the + ``nextPageToken`` returned by a previous list request to get + the next page of results. + + This field is a member of `oneof`_ ``_page_token``. + project (str): + Project ID for this request. + return_partial_success (bool): + Opt-in for partial success behavior which + provides partial results in case of failure. The + default value is false. + + This field is a member of `oneof`_ ``_return_partial_success``. + """ + + filter: str = proto.Field( + proto.STRING, + number=336120696, + optional=True, + ) + max_results: int = proto.Field( + proto.UINT32, + number=54715419, + optional=True, + ) + order_by: str = proto.Field( + proto.STRING, + number=160562920, + optional=True, + ) + page_token: str = proto.Field( + proto.STRING, + number=19994697, + optional=True, + ) + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + return_partial_success: bool = proto.Field( + proto.BOOL, + number=517198390, + optional=True, + ) + + +class ListGlobalForwardingRulesRequest(proto.Message): + r"""A request message for GlobalForwardingRules.List. See the + method description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + filter (str): + A filter expression that filters resources listed in the + response. Most Compute resources support two types of filter + expressions: expressions that support regular expressions + and expressions that follow API improvement proposal + AIP-160. If you want to use AIP-160, your expression must + specify the field name, an operator, and the value that you + want to use for filtering. The value must be a string, a + number, or a boolean. The operator must be either ``=``, + ``!=``, ``>``, ``<``, ``<=``, ``>=`` or ``:``. For example, + if you are filtering Compute Engine instances, you can + exclude instances named ``example-instance`` by specifying + ``name != example-instance``. The ``:`` operator can be used + with string fields to match substrings. For non-string + fields it is equivalent to the ``=`` operator. The ``:*`` + comparison can be used to test whether a key has been + defined. For example, to find all objects with ``owner`` + label use: ``labels.owner:*`` You can also filter nested + fields. For example, you could specify + ``scheduling.automaticRestart = false`` to include instances + only if they are not scheduled for automatic restarts. You + can use filtering on nested fields to filter based on + resource labels. To filter on multiple expressions, provide + each separate expression within parentheses. For example: + ``(scheduling.automaticRestart = true) (cpuPlatform = "Intel Skylake")`` + By default, each expression is an ``AND`` expression. + However, you can include ``AND`` and ``OR`` expressions + explicitly. For example: + ``(cpuPlatform = "Intel Skylake") OR (cpuPlatform = "Intel Broadwell") AND (scheduling.automaticRestart = true)`` + If you want to use a regular expression, use the ``eq`` + (equal) or ``ne`` (not equal) operator against a single + un-parenthesized expression with or without quotes or + against multiple parenthesized expressions. Examples: + ``fieldname eq unquoted literal`` + ``fieldname eq 'single quoted literal'`` + ``fieldname eq "double quoted literal"`` + ``(fieldname1 eq literal) (fieldname2 ne "literal")`` The + literal value is interpreted as a regular expression using + Google RE2 library syntax. The literal value must match the + entire field. For example, to filter for instances that do + not end with name "instance", you would use + ``name ne .*instance``. + + This field is a member of `oneof`_ ``_filter``. + max_results (int): + The maximum number of results per page that should be + returned. If the number of available results is larger than + ``maxResults``, Compute Engine returns a ``nextPageToken`` + that can be used to get the next page of results in + subsequent list requests. Acceptable values are ``0`` to + ``500``, inclusive. (Default: ``500``) + + This field is a member of `oneof`_ ``_max_results``. + order_by (str): + Sorts list results by a certain order. By default, results + are returned in alphanumerical order based on the resource + name. You can also sort results in descending order based on + the creation timestamp using + ``orderBy="creationTimestamp desc"``. This sorts results + based on the ``creationTimestamp`` field in reverse + chronological order (newest result first). Use this to sort + resources like operations so that the newest operation is + returned first. Currently, only sorting by ``name`` or + ``creationTimestamp desc`` is supported. + + This field is a member of `oneof`_ ``_order_by``. + page_token (str): + Specifies a page token to use. Set ``pageToken`` to the + ``nextPageToken`` returned by a previous list request to get + the next page of results. + + This field is a member of `oneof`_ ``_page_token``. + project (str): + Project ID for this request. + return_partial_success (bool): + Opt-in for partial success behavior which + provides partial results in case of failure. The + default value is false. + + This field is a member of `oneof`_ ``_return_partial_success``. + """ + + filter: str = proto.Field( + proto.STRING, + number=336120696, + optional=True, + ) + max_results: int = proto.Field( + proto.UINT32, + number=54715419, + optional=True, + ) + order_by: str = proto.Field( + proto.STRING, + number=160562920, + optional=True, + ) + page_token: str = proto.Field( + proto.STRING, + number=19994697, + optional=True, + ) + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + return_partial_success: bool = proto.Field( + proto.BOOL, + number=517198390, + optional=True, + ) + + +class ListGlobalNetworkEndpointGroupsRequest(proto.Message): + r"""A request message for GlobalNetworkEndpointGroups.List. See + the method description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + filter (str): + A filter expression that filters resources listed in the + response. Most Compute resources support two types of filter + expressions: expressions that support regular expressions + and expressions that follow API improvement proposal + AIP-160. If you want to use AIP-160, your expression must + specify the field name, an operator, and the value that you + want to use for filtering. The value must be a string, a + number, or a boolean. The operator must be either ``=``, + ``!=``, ``>``, ``<``, ``<=``, ``>=`` or ``:``. For example, + if you are filtering Compute Engine instances, you can + exclude instances named ``example-instance`` by specifying + ``name != example-instance``. The ``:`` operator can be used + with string fields to match substrings. For non-string + fields it is equivalent to the ``=`` operator. The ``:*`` + comparison can be used to test whether a key has been + defined. For example, to find all objects with ``owner`` + label use: ``labels.owner:*`` You can also filter nested + fields. For example, you could specify + ``scheduling.automaticRestart = false`` to include instances + only if they are not scheduled for automatic restarts. You + can use filtering on nested fields to filter based on + resource labels. To filter on multiple expressions, provide + each separate expression within parentheses. For example: + ``(scheduling.automaticRestart = true) (cpuPlatform = "Intel Skylake")`` + By default, each expression is an ``AND`` expression. + However, you can include ``AND`` and ``OR`` expressions + explicitly. For example: + ``(cpuPlatform = "Intel Skylake") OR (cpuPlatform = "Intel Broadwell") AND (scheduling.automaticRestart = true)`` + If you want to use a regular expression, use the ``eq`` + (equal) or ``ne`` (not equal) operator against a single + un-parenthesized expression with or without quotes or + against multiple parenthesized expressions. Examples: + ``fieldname eq unquoted literal`` + ``fieldname eq 'single quoted literal'`` + ``fieldname eq "double quoted literal"`` + ``(fieldname1 eq literal) (fieldname2 ne "literal")`` The + literal value is interpreted as a regular expression using + Google RE2 library syntax. The literal value must match the + entire field. For example, to filter for instances that do + not end with name "instance", you would use + ``name ne .*instance``. + + This field is a member of `oneof`_ ``_filter``. + max_results (int): + The maximum number of results per page that should be + returned. If the number of available results is larger than + ``maxResults``, Compute Engine returns a ``nextPageToken`` + that can be used to get the next page of results in + subsequent list requests. Acceptable values are ``0`` to + ``500``, inclusive. (Default: ``500``) + + This field is a member of `oneof`_ ``_max_results``. + order_by (str): + Sorts list results by a certain order. By default, results + are returned in alphanumerical order based on the resource + name. You can also sort results in descending order based on + the creation timestamp using + ``orderBy="creationTimestamp desc"``. This sorts results + based on the ``creationTimestamp`` field in reverse + chronological order (newest result first). Use this to sort + resources like operations so that the newest operation is + returned first. Currently, only sorting by ``name`` or + ``creationTimestamp desc`` is supported. + + This field is a member of `oneof`_ ``_order_by``. + page_token (str): + Specifies a page token to use. Set ``pageToken`` to the + ``nextPageToken`` returned by a previous list request to get + the next page of results. + + This field is a member of `oneof`_ ``_page_token``. + project (str): + Project ID for this request. + return_partial_success (bool): + Opt-in for partial success behavior which + provides partial results in case of failure. The + default value is false. + + This field is a member of `oneof`_ ``_return_partial_success``. + """ + + filter: str = proto.Field( + proto.STRING, + number=336120696, + optional=True, + ) + max_results: int = proto.Field( + proto.UINT32, + number=54715419, + optional=True, + ) + order_by: str = proto.Field( + proto.STRING, + number=160562920, + optional=True, + ) + page_token: str = proto.Field( + proto.STRING, + number=19994697, + optional=True, + ) + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + return_partial_success: bool = proto.Field( + proto.BOOL, + number=517198390, + optional=True, + ) + + +class ListGlobalOperationsRequest(proto.Message): + r"""A request message for GlobalOperations.List. See the method + description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + filter (str): + A filter expression that filters resources listed in the + response. Most Compute resources support two types of filter + expressions: expressions that support regular expressions + and expressions that follow API improvement proposal + AIP-160. If you want to use AIP-160, your expression must + specify the field name, an operator, and the value that you + want to use for filtering. The value must be a string, a + number, or a boolean. The operator must be either ``=``, + ``!=``, ``>``, ``<``, ``<=``, ``>=`` or ``:``. For example, + if you are filtering Compute Engine instances, you can + exclude instances named ``example-instance`` by specifying + ``name != example-instance``. The ``:`` operator can be used + with string fields to match substrings. For non-string + fields it is equivalent to the ``=`` operator. The ``:*`` + comparison can be used to test whether a key has been + defined. For example, to find all objects with ``owner`` + label use: ``labels.owner:*`` You can also filter nested + fields. For example, you could specify + ``scheduling.automaticRestart = false`` to include instances + only if they are not scheduled for automatic restarts. You + can use filtering on nested fields to filter based on + resource labels. To filter on multiple expressions, provide + each separate expression within parentheses. For example: + ``(scheduling.automaticRestart = true) (cpuPlatform = "Intel Skylake")`` + By default, each expression is an ``AND`` expression. + However, you can include ``AND`` and ``OR`` expressions + explicitly. For example: + ``(cpuPlatform = "Intel Skylake") OR (cpuPlatform = "Intel Broadwell") AND (scheduling.automaticRestart = true)`` + If you want to use a regular expression, use the ``eq`` + (equal) or ``ne`` (not equal) operator against a single + un-parenthesized expression with or without quotes or + against multiple parenthesized expressions. Examples: + ``fieldname eq unquoted literal`` + ``fieldname eq 'single quoted literal'`` + ``fieldname eq "double quoted literal"`` + ``(fieldname1 eq literal) (fieldname2 ne "literal")`` The + literal value is interpreted as a regular expression using + Google RE2 library syntax. The literal value must match the + entire field. For example, to filter for instances that do + not end with name "instance", you would use + ``name ne .*instance``. + + This field is a member of `oneof`_ ``_filter``. + max_results (int): + The maximum number of results per page that should be + returned. If the number of available results is larger than + ``maxResults``, Compute Engine returns a ``nextPageToken`` + that can be used to get the next page of results in + subsequent list requests. Acceptable values are ``0`` to + ``500``, inclusive. (Default: ``500``) + + This field is a member of `oneof`_ ``_max_results``. + order_by (str): + Sorts list results by a certain order. By default, results + are returned in alphanumerical order based on the resource + name. You can also sort results in descending order based on + the creation timestamp using + ``orderBy="creationTimestamp desc"``. This sorts results + based on the ``creationTimestamp`` field in reverse + chronological order (newest result first). Use this to sort + resources like operations so that the newest operation is + returned first. Currently, only sorting by ``name`` or + ``creationTimestamp desc`` is supported. + + This field is a member of `oneof`_ ``_order_by``. + page_token (str): + Specifies a page token to use. Set ``pageToken`` to the + ``nextPageToken`` returned by a previous list request to get + the next page of results. + + This field is a member of `oneof`_ ``_page_token``. + project (str): + Project ID for this request. + return_partial_success (bool): + Opt-in for partial success behavior which + provides partial results in case of failure. The + default value is false. + + This field is a member of `oneof`_ ``_return_partial_success``. + """ + + filter: str = proto.Field( + proto.STRING, + number=336120696, + optional=True, + ) + max_results: int = proto.Field( + proto.UINT32, + number=54715419, + optional=True, + ) + order_by: str = proto.Field( + proto.STRING, + number=160562920, + optional=True, + ) + page_token: str = proto.Field( + proto.STRING, + number=19994697, + optional=True, + ) + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + return_partial_success: bool = proto.Field( + proto.BOOL, + number=517198390, + optional=True, + ) + + +class ListGlobalOrganizationOperationsRequest(proto.Message): + r"""A request message for GlobalOrganizationOperations.List. See + the method description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + filter (str): + A filter expression that filters resources listed in the + response. Most Compute resources support two types of filter + expressions: expressions that support regular expressions + and expressions that follow API improvement proposal + AIP-160. If you want to use AIP-160, your expression must + specify the field name, an operator, and the value that you + want to use for filtering. The value must be a string, a + number, or a boolean. The operator must be either ``=``, + ``!=``, ``>``, ``<``, ``<=``, ``>=`` or ``:``. For example, + if you are filtering Compute Engine instances, you can + exclude instances named ``example-instance`` by specifying + ``name != example-instance``. The ``:`` operator can be used + with string fields to match substrings. For non-string + fields it is equivalent to the ``=`` operator. The ``:*`` + comparison can be used to test whether a key has been + defined. For example, to find all objects with ``owner`` + label use: ``labels.owner:*`` You can also filter nested + fields. For example, you could specify + ``scheduling.automaticRestart = false`` to include instances + only if they are not scheduled for automatic restarts. You + can use filtering on nested fields to filter based on + resource labels. To filter on multiple expressions, provide + each separate expression within parentheses. For example: + ``(scheduling.automaticRestart = true) (cpuPlatform = "Intel Skylake")`` + By default, each expression is an ``AND`` expression. + However, you can include ``AND`` and ``OR`` expressions + explicitly. For example: + ``(cpuPlatform = "Intel Skylake") OR (cpuPlatform = "Intel Broadwell") AND (scheduling.automaticRestart = true)`` + If you want to use a regular expression, use the ``eq`` + (equal) or ``ne`` (not equal) operator against a single + un-parenthesized expression with or without quotes or + against multiple parenthesized expressions. Examples: + ``fieldname eq unquoted literal`` + ``fieldname eq 'single quoted literal'`` + ``fieldname eq "double quoted literal"`` + ``(fieldname1 eq literal) (fieldname2 ne "literal")`` The + literal value is interpreted as a regular expression using + Google RE2 library syntax. The literal value must match the + entire field. For example, to filter for instances that do + not end with name "instance", you would use + ``name ne .*instance``. + + This field is a member of `oneof`_ ``_filter``. + max_results (int): + The maximum number of results per page that should be + returned. If the number of available results is larger than + ``maxResults``, Compute Engine returns a ``nextPageToken`` + that can be used to get the next page of results in + subsequent list requests. Acceptable values are ``0`` to + ``500``, inclusive. (Default: ``500``) + + This field is a member of `oneof`_ ``_max_results``. + order_by (str): + Sorts list results by a certain order. By default, results + are returned in alphanumerical order based on the resource + name. You can also sort results in descending order based on + the creation timestamp using + ``orderBy="creationTimestamp desc"``. This sorts results + based on the ``creationTimestamp`` field in reverse + chronological order (newest result first). Use this to sort + resources like operations so that the newest operation is + returned first. Currently, only sorting by ``name`` or + ``creationTimestamp desc`` is supported. + + This field is a member of `oneof`_ ``_order_by``. + page_token (str): + Specifies a page token to use. Set ``pageToken`` to the + ``nextPageToken`` returned by a previous list request to get + the next page of results. + + This field is a member of `oneof`_ ``_page_token``. + parent_id (str): + Parent ID for this request. + + This field is a member of `oneof`_ ``_parent_id``. + return_partial_success (bool): + Opt-in for partial success behavior which + provides partial results in case of failure. The + default value is false. + + This field is a member of `oneof`_ ``_return_partial_success``. + """ + + filter: str = proto.Field( + proto.STRING, + number=336120696, + optional=True, + ) + max_results: int = proto.Field( + proto.UINT32, + number=54715419, + optional=True, + ) + order_by: str = proto.Field( + proto.STRING, + number=160562920, + optional=True, + ) + page_token: str = proto.Field( + proto.STRING, + number=19994697, + optional=True, + ) + parent_id: str = proto.Field( + proto.STRING, + number=459714768, + optional=True, + ) + return_partial_success: bool = proto.Field( + proto.BOOL, + number=517198390, + optional=True, + ) + + +class ListGlobalPublicDelegatedPrefixesRequest(proto.Message): + r"""A request message for GlobalPublicDelegatedPrefixes.List. See + the method description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + filter (str): + A filter expression that filters resources listed in the + response. Most Compute resources support two types of filter + expressions: expressions that support regular expressions + and expressions that follow API improvement proposal + AIP-160. If you want to use AIP-160, your expression must + specify the field name, an operator, and the value that you + want to use for filtering. The value must be a string, a + number, or a boolean. The operator must be either ``=``, + ``!=``, ``>``, ``<``, ``<=``, ``>=`` or ``:``. For example, + if you are filtering Compute Engine instances, you can + exclude instances named ``example-instance`` by specifying + ``name != example-instance``. The ``:`` operator can be used + with string fields to match substrings. For non-string + fields it is equivalent to the ``=`` operator. The ``:*`` + comparison can be used to test whether a key has been + defined. For example, to find all objects with ``owner`` + label use: ``labels.owner:*`` You can also filter nested + fields. For example, you could specify + ``scheduling.automaticRestart = false`` to include instances + only if they are not scheduled for automatic restarts. You + can use filtering on nested fields to filter based on + resource labels. To filter on multiple expressions, provide + each separate expression within parentheses. For example: + ``(scheduling.automaticRestart = true) (cpuPlatform = "Intel Skylake")`` + By default, each expression is an ``AND`` expression. + However, you can include ``AND`` and ``OR`` expressions + explicitly. For example: + ``(cpuPlatform = "Intel Skylake") OR (cpuPlatform = "Intel Broadwell") AND (scheduling.automaticRestart = true)`` + If you want to use a regular expression, use the ``eq`` + (equal) or ``ne`` (not equal) operator against a single + un-parenthesized expression with or without quotes or + against multiple parenthesized expressions. Examples: + ``fieldname eq unquoted literal`` + ``fieldname eq 'single quoted literal'`` + ``fieldname eq "double quoted literal"`` + ``(fieldname1 eq literal) (fieldname2 ne "literal")`` The + literal value is interpreted as a regular expression using + Google RE2 library syntax. The literal value must match the + entire field. For example, to filter for instances that do + not end with name "instance", you would use + ``name ne .*instance``. + + This field is a member of `oneof`_ ``_filter``. + max_results (int): + The maximum number of results per page that should be + returned. If the number of available results is larger than + ``maxResults``, Compute Engine returns a ``nextPageToken`` + that can be used to get the next page of results in + subsequent list requests. Acceptable values are ``0`` to + ``500``, inclusive. (Default: ``500``) + + This field is a member of `oneof`_ ``_max_results``. + order_by (str): + Sorts list results by a certain order. By default, results + are returned in alphanumerical order based on the resource + name. You can also sort results in descending order based on + the creation timestamp using + ``orderBy="creationTimestamp desc"``. This sorts results + based on the ``creationTimestamp`` field in reverse + chronological order (newest result first). Use this to sort + resources like operations so that the newest operation is + returned first. Currently, only sorting by ``name`` or + ``creationTimestamp desc`` is supported. + + This field is a member of `oneof`_ ``_order_by``. + page_token (str): + Specifies a page token to use. Set ``pageToken`` to the + ``nextPageToken`` returned by a previous list request to get + the next page of results. + + This field is a member of `oneof`_ ``_page_token``. + project (str): + Project ID for this request. + return_partial_success (bool): + Opt-in for partial success behavior which + provides partial results in case of failure. The + default value is false. + + This field is a member of `oneof`_ ``_return_partial_success``. + """ + + filter: str = proto.Field( + proto.STRING, + number=336120696, + optional=True, + ) + max_results: int = proto.Field( + proto.UINT32, + number=54715419, + optional=True, + ) + order_by: str = proto.Field( + proto.STRING, + number=160562920, + optional=True, + ) + page_token: str = proto.Field( + proto.STRING, + number=19994697, + optional=True, + ) + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + return_partial_success: bool = proto.Field( + proto.BOOL, + number=517198390, + optional=True, + ) + + +class ListHealthChecksRequest(proto.Message): + r"""A request message for HealthChecks.List. See the method + description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + filter (str): + A filter expression that filters resources listed in the + response. Most Compute resources support two types of filter + expressions: expressions that support regular expressions + and expressions that follow API improvement proposal + AIP-160. If you want to use AIP-160, your expression must + specify the field name, an operator, and the value that you + want to use for filtering. The value must be a string, a + number, or a boolean. The operator must be either ``=``, + ``!=``, ``>``, ``<``, ``<=``, ``>=`` or ``:``. For example, + if you are filtering Compute Engine instances, you can + exclude instances named ``example-instance`` by specifying + ``name != example-instance``. The ``:`` operator can be used + with string fields to match substrings. For non-string + fields it is equivalent to the ``=`` operator. The ``:*`` + comparison can be used to test whether a key has been + defined. For example, to find all objects with ``owner`` + label use: ``labels.owner:*`` You can also filter nested + fields. For example, you could specify + ``scheduling.automaticRestart = false`` to include instances + only if they are not scheduled for automatic restarts. You + can use filtering on nested fields to filter based on + resource labels. To filter on multiple expressions, provide + each separate expression within parentheses. For example: + ``(scheduling.automaticRestart = true) (cpuPlatform = "Intel Skylake")`` + By default, each expression is an ``AND`` expression. + However, you can include ``AND`` and ``OR`` expressions + explicitly. For example: + ``(cpuPlatform = "Intel Skylake") OR (cpuPlatform = "Intel Broadwell") AND (scheduling.automaticRestart = true)`` + If you want to use a regular expression, use the ``eq`` + (equal) or ``ne`` (not equal) operator against a single + un-parenthesized expression with or without quotes or + against multiple parenthesized expressions. Examples: + ``fieldname eq unquoted literal`` + ``fieldname eq 'single quoted literal'`` + ``fieldname eq "double quoted literal"`` + ``(fieldname1 eq literal) (fieldname2 ne "literal")`` The + literal value is interpreted as a regular expression using + Google RE2 library syntax. The literal value must match the + entire field. For example, to filter for instances that do + not end with name "instance", you would use + ``name ne .*instance``. + + This field is a member of `oneof`_ ``_filter``. + max_results (int): + The maximum number of results per page that should be + returned. If the number of available results is larger than + ``maxResults``, Compute Engine returns a ``nextPageToken`` + that can be used to get the next page of results in + subsequent list requests. Acceptable values are ``0`` to + ``500``, inclusive. (Default: ``500``) + + This field is a member of `oneof`_ ``_max_results``. + order_by (str): + Sorts list results by a certain order. By default, results + are returned in alphanumerical order based on the resource + name. You can also sort results in descending order based on + the creation timestamp using + ``orderBy="creationTimestamp desc"``. This sorts results + based on the ``creationTimestamp`` field in reverse + chronological order (newest result first). Use this to sort + resources like operations so that the newest operation is + returned first. Currently, only sorting by ``name`` or + ``creationTimestamp desc`` is supported. + + This field is a member of `oneof`_ ``_order_by``. + page_token (str): + Specifies a page token to use. Set ``pageToken`` to the + ``nextPageToken`` returned by a previous list request to get + the next page of results. + + This field is a member of `oneof`_ ``_page_token``. + project (str): + Project ID for this request. + return_partial_success (bool): + Opt-in for partial success behavior which + provides partial results in case of failure. The + default value is false. + + This field is a member of `oneof`_ ``_return_partial_success``. + """ + + filter: str = proto.Field( + proto.STRING, + number=336120696, + optional=True, + ) + max_results: int = proto.Field( + proto.UINT32, + number=54715419, + optional=True, + ) + order_by: str = proto.Field( + proto.STRING, + number=160562920, + optional=True, + ) + page_token: str = proto.Field( + proto.STRING, + number=19994697, + optional=True, + ) + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + return_partial_success: bool = proto.Field( + proto.BOOL, + number=517198390, + optional=True, + ) + + +class ListImagesRequest(proto.Message): + r"""A request message for Images.List. See the method description + for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + filter (str): + A filter expression that filters resources listed in the + response. Most Compute resources support two types of filter + expressions: expressions that support regular expressions + and expressions that follow API improvement proposal + AIP-160. If you want to use AIP-160, your expression must + specify the field name, an operator, and the value that you + want to use for filtering. The value must be a string, a + number, or a boolean. The operator must be either ``=``, + ``!=``, ``>``, ``<``, ``<=``, ``>=`` or ``:``. For example, + if you are filtering Compute Engine instances, you can + exclude instances named ``example-instance`` by specifying + ``name != example-instance``. The ``:`` operator can be used + with string fields to match substrings. For non-string + fields it is equivalent to the ``=`` operator. The ``:*`` + comparison can be used to test whether a key has been + defined. For example, to find all objects with ``owner`` + label use: ``labels.owner:*`` You can also filter nested + fields. For example, you could specify + ``scheduling.automaticRestart = false`` to include instances + only if they are not scheduled for automatic restarts. You + can use filtering on nested fields to filter based on + resource labels. To filter on multiple expressions, provide + each separate expression within parentheses. For example: + ``(scheduling.automaticRestart = true) (cpuPlatform = "Intel Skylake")`` + By default, each expression is an ``AND`` expression. + However, you can include ``AND`` and ``OR`` expressions + explicitly. For example: + ``(cpuPlatform = "Intel Skylake") OR (cpuPlatform = "Intel Broadwell") AND (scheduling.automaticRestart = true)`` + If you want to use a regular expression, use the ``eq`` + (equal) or ``ne`` (not equal) operator against a single + un-parenthesized expression with or without quotes or + against multiple parenthesized expressions. Examples: + ``fieldname eq unquoted literal`` + ``fieldname eq 'single quoted literal'`` + ``fieldname eq "double quoted literal"`` + ``(fieldname1 eq literal) (fieldname2 ne "literal")`` The + literal value is interpreted as a regular expression using + Google RE2 library syntax. The literal value must match the + entire field. For example, to filter for instances that do + not end with name "instance", you would use + ``name ne .*instance``. + + This field is a member of `oneof`_ ``_filter``. + max_results (int): + The maximum number of results per page that should be + returned. If the number of available results is larger than + ``maxResults``, Compute Engine returns a ``nextPageToken`` + that can be used to get the next page of results in + subsequent list requests. Acceptable values are ``0`` to + ``500``, inclusive. (Default: ``500``) + + This field is a member of `oneof`_ ``_max_results``. + order_by (str): + Sorts list results by a certain order. By default, results + are returned in alphanumerical order based on the resource + name. You can also sort results in descending order based on + the creation timestamp using + ``orderBy="creationTimestamp desc"``. This sorts results + based on the ``creationTimestamp`` field in reverse + chronological order (newest result first). Use this to sort + resources like operations so that the newest operation is + returned first. Currently, only sorting by ``name`` or + ``creationTimestamp desc`` is supported. + + This field is a member of `oneof`_ ``_order_by``. + page_token (str): + Specifies a page token to use. Set ``pageToken`` to the + ``nextPageToken`` returned by a previous list request to get + the next page of results. + + This field is a member of `oneof`_ ``_page_token``. + project (str): + Project ID for this request. + return_partial_success (bool): + Opt-in for partial success behavior which + provides partial results in case of failure. The + default value is false. + + This field is a member of `oneof`_ ``_return_partial_success``. + """ + + filter: str = proto.Field( + proto.STRING, + number=336120696, + optional=True, + ) + max_results: int = proto.Field( + proto.UINT32, + number=54715419, + optional=True, + ) + order_by: str = proto.Field( + proto.STRING, + number=160562920, + optional=True, + ) + page_token: str = proto.Field( + proto.STRING, + number=19994697, + optional=True, + ) + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + return_partial_success: bool = proto.Field( + proto.BOOL, + number=517198390, + optional=True, + ) + + +class ListInstanceGroupManagersRequest(proto.Message): + r"""A request message for InstanceGroupManagers.List. See the + method description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + filter (str): + A filter expression that filters resources listed in the + response. Most Compute resources support two types of filter + expressions: expressions that support regular expressions + and expressions that follow API improvement proposal + AIP-160. If you want to use AIP-160, your expression must + specify the field name, an operator, and the value that you + want to use for filtering. The value must be a string, a + number, or a boolean. The operator must be either ``=``, + ``!=``, ``>``, ``<``, ``<=``, ``>=`` or ``:``. For example, + if you are filtering Compute Engine instances, you can + exclude instances named ``example-instance`` by specifying + ``name != example-instance``. The ``:`` operator can be used + with string fields to match substrings. For non-string + fields it is equivalent to the ``=`` operator. The ``:*`` + comparison can be used to test whether a key has been + defined. For example, to find all objects with ``owner`` + label use: ``labels.owner:*`` You can also filter nested + fields. For example, you could specify + ``scheduling.automaticRestart = false`` to include instances + only if they are not scheduled for automatic restarts. You + can use filtering on nested fields to filter based on + resource labels. To filter on multiple expressions, provide + each separate expression within parentheses. For example: + ``(scheduling.automaticRestart = true) (cpuPlatform = "Intel Skylake")`` + By default, each expression is an ``AND`` expression. + However, you can include ``AND`` and ``OR`` expressions + explicitly. For example: + ``(cpuPlatform = "Intel Skylake") OR (cpuPlatform = "Intel Broadwell") AND (scheduling.automaticRestart = true)`` + If you want to use a regular expression, use the ``eq`` + (equal) or ``ne`` (not equal) operator against a single + un-parenthesized expression with or without quotes or + against multiple parenthesized expressions. Examples: + ``fieldname eq unquoted literal`` + ``fieldname eq 'single quoted literal'`` + ``fieldname eq "double quoted literal"`` + ``(fieldname1 eq literal) (fieldname2 ne "literal")`` The + literal value is interpreted as a regular expression using + Google RE2 library syntax. The literal value must match the + entire field. For example, to filter for instances that do + not end with name "instance", you would use + ``name ne .*instance``. + + This field is a member of `oneof`_ ``_filter``. + max_results (int): + The maximum number of results per page that should be + returned. If the number of available results is larger than + ``maxResults``, Compute Engine returns a ``nextPageToken`` + that can be used to get the next page of results in + subsequent list requests. Acceptable values are ``0`` to + ``500``, inclusive. (Default: ``500``) + + This field is a member of `oneof`_ ``_max_results``. + order_by (str): + Sorts list results by a certain order. By default, results + are returned in alphanumerical order based on the resource + name. You can also sort results in descending order based on + the creation timestamp using + ``orderBy="creationTimestamp desc"``. This sorts results + based on the ``creationTimestamp`` field in reverse + chronological order (newest result first). Use this to sort + resources like operations so that the newest operation is + returned first. Currently, only sorting by ``name`` or + ``creationTimestamp desc`` is supported. + + This field is a member of `oneof`_ ``_order_by``. + page_token (str): + Specifies a page token to use. Set ``pageToken`` to the + ``nextPageToken`` returned by a previous list request to get + the next page of results. + + This field is a member of `oneof`_ ``_page_token``. + project (str): + Project ID for this request. + return_partial_success (bool): + Opt-in for partial success behavior which + provides partial results in case of failure. The + default value is false. + + This field is a member of `oneof`_ ``_return_partial_success``. + zone (str): + The name of the zone where the managed + instance group is located. + """ + + filter: str = proto.Field( + proto.STRING, + number=336120696, + optional=True, + ) + max_results: int = proto.Field( + proto.UINT32, + number=54715419, + optional=True, + ) + order_by: str = proto.Field( + proto.STRING, + number=160562920, + optional=True, + ) + page_token: str = proto.Field( + proto.STRING, + number=19994697, + optional=True, + ) + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + return_partial_success: bool = proto.Field( + proto.BOOL, + number=517198390, + optional=True, + ) + zone: str = proto.Field( + proto.STRING, + number=3744684, + ) + + +class ListInstanceGroupsRequest(proto.Message): + r"""A request message for InstanceGroups.List. See the method + description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + filter (str): + A filter expression that filters resources listed in the + response. Most Compute resources support two types of filter + expressions: expressions that support regular expressions + and expressions that follow API improvement proposal + AIP-160. If you want to use AIP-160, your expression must + specify the field name, an operator, and the value that you + want to use for filtering. The value must be a string, a + number, or a boolean. The operator must be either ``=``, + ``!=``, ``>``, ``<``, ``<=``, ``>=`` or ``:``. For example, + if you are filtering Compute Engine instances, you can + exclude instances named ``example-instance`` by specifying + ``name != example-instance``. The ``:`` operator can be used + with string fields to match substrings. For non-string + fields it is equivalent to the ``=`` operator. The ``:*`` + comparison can be used to test whether a key has been + defined. For example, to find all objects with ``owner`` + label use: ``labels.owner:*`` You can also filter nested + fields. For example, you could specify + ``scheduling.automaticRestart = false`` to include instances + only if they are not scheduled for automatic restarts. You + can use filtering on nested fields to filter based on + resource labels. To filter on multiple expressions, provide + each separate expression within parentheses. For example: + ``(scheduling.automaticRestart = true) (cpuPlatform = "Intel Skylake")`` + By default, each expression is an ``AND`` expression. + However, you can include ``AND`` and ``OR`` expressions + explicitly. For example: + ``(cpuPlatform = "Intel Skylake") OR (cpuPlatform = "Intel Broadwell") AND (scheduling.automaticRestart = true)`` + If you want to use a regular expression, use the ``eq`` + (equal) or ``ne`` (not equal) operator against a single + un-parenthesized expression with or without quotes or + against multiple parenthesized expressions. Examples: + ``fieldname eq unquoted literal`` + ``fieldname eq 'single quoted literal'`` + ``fieldname eq "double quoted literal"`` + ``(fieldname1 eq literal) (fieldname2 ne "literal")`` The + literal value is interpreted as a regular expression using + Google RE2 library syntax. The literal value must match the + entire field. For example, to filter for instances that do + not end with name "instance", you would use + ``name ne .*instance``. + + This field is a member of `oneof`_ ``_filter``. + max_results (int): + The maximum number of results per page that should be + returned. If the number of available results is larger than + ``maxResults``, Compute Engine returns a ``nextPageToken`` + that can be used to get the next page of results in + subsequent list requests. Acceptable values are ``0`` to + ``500``, inclusive. (Default: ``500``) + + This field is a member of `oneof`_ ``_max_results``. + order_by (str): + Sorts list results by a certain order. By default, results + are returned in alphanumerical order based on the resource + name. You can also sort results in descending order based on + the creation timestamp using + ``orderBy="creationTimestamp desc"``. This sorts results + based on the ``creationTimestamp`` field in reverse + chronological order (newest result first). Use this to sort + resources like operations so that the newest operation is + returned first. Currently, only sorting by ``name`` or + ``creationTimestamp desc`` is supported. + + This field is a member of `oneof`_ ``_order_by``. + page_token (str): + Specifies a page token to use. Set ``pageToken`` to the + ``nextPageToken`` returned by a previous list request to get + the next page of results. + + This field is a member of `oneof`_ ``_page_token``. + project (str): + Project ID for this request. + return_partial_success (bool): + Opt-in for partial success behavior which + provides partial results in case of failure. The + default value is false. + + This field is a member of `oneof`_ ``_return_partial_success``. + zone (str): + The name of the zone where the instance group + is located. + """ + + filter: str = proto.Field( + proto.STRING, + number=336120696, + optional=True, + ) + max_results: int = proto.Field( + proto.UINT32, + number=54715419, + optional=True, + ) + order_by: str = proto.Field( + proto.STRING, + number=160562920, + optional=True, + ) + page_token: str = proto.Field( + proto.STRING, + number=19994697, + optional=True, + ) + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + return_partial_success: bool = proto.Field( + proto.BOOL, + number=517198390, + optional=True, + ) + zone: str = proto.Field( + proto.STRING, + number=3744684, + ) + + +class ListInstanceTemplatesRequest(proto.Message): + r"""A request message for InstanceTemplates.List. See the method + description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + filter (str): + A filter expression that filters resources listed in the + response. Most Compute resources support two types of filter + expressions: expressions that support regular expressions + and expressions that follow API improvement proposal + AIP-160. If you want to use AIP-160, your expression must + specify the field name, an operator, and the value that you + want to use for filtering. The value must be a string, a + number, or a boolean. The operator must be either ``=``, + ``!=``, ``>``, ``<``, ``<=``, ``>=`` or ``:``. For example, + if you are filtering Compute Engine instances, you can + exclude instances named ``example-instance`` by specifying + ``name != example-instance``. The ``:`` operator can be used + with string fields to match substrings. For non-string + fields it is equivalent to the ``=`` operator. The ``:*`` + comparison can be used to test whether a key has been + defined. For example, to find all objects with ``owner`` + label use: ``labels.owner:*`` You can also filter nested + fields. For example, you could specify + ``scheduling.automaticRestart = false`` to include instances + only if they are not scheduled for automatic restarts. You + can use filtering on nested fields to filter based on + resource labels. To filter on multiple expressions, provide + each separate expression within parentheses. For example: + ``(scheduling.automaticRestart = true) (cpuPlatform = "Intel Skylake")`` + By default, each expression is an ``AND`` expression. + However, you can include ``AND`` and ``OR`` expressions + explicitly. For example: + ``(cpuPlatform = "Intel Skylake") OR (cpuPlatform = "Intel Broadwell") AND (scheduling.automaticRestart = true)`` + If you want to use a regular expression, use the ``eq`` + (equal) or ``ne`` (not equal) operator against a single + un-parenthesized expression with or without quotes or + against multiple parenthesized expressions. Examples: + ``fieldname eq unquoted literal`` + ``fieldname eq 'single quoted literal'`` + ``fieldname eq "double quoted literal"`` + ``(fieldname1 eq literal) (fieldname2 ne "literal")`` The + literal value is interpreted as a regular expression using + Google RE2 library syntax. The literal value must match the + entire field. For example, to filter for instances that do + not end with name "instance", you would use + ``name ne .*instance``. + + This field is a member of `oneof`_ ``_filter``. + max_results (int): + The maximum number of results per page that should be + returned. If the number of available results is larger than + ``maxResults``, Compute Engine returns a ``nextPageToken`` + that can be used to get the next page of results in + subsequent list requests. Acceptable values are ``0`` to + ``500``, inclusive. (Default: ``500``) + + This field is a member of `oneof`_ ``_max_results``. + order_by (str): + Sorts list results by a certain order. By default, results + are returned in alphanumerical order based on the resource + name. You can also sort results in descending order based on + the creation timestamp using + ``orderBy="creationTimestamp desc"``. This sorts results + based on the ``creationTimestamp`` field in reverse + chronological order (newest result first). Use this to sort + resources like operations so that the newest operation is + returned first. Currently, only sorting by ``name`` or + ``creationTimestamp desc`` is supported. + + This field is a member of `oneof`_ ``_order_by``. + page_token (str): + Specifies a page token to use. Set ``pageToken`` to the + ``nextPageToken`` returned by a previous list request to get + the next page of results. + + This field is a member of `oneof`_ ``_page_token``. + project (str): + Project ID for this request. + return_partial_success (bool): + Opt-in for partial success behavior which + provides partial results in case of failure. The + default value is false. + + This field is a member of `oneof`_ ``_return_partial_success``. + """ + + filter: str = proto.Field( + proto.STRING, + number=336120696, + optional=True, + ) + max_results: int = proto.Field( + proto.UINT32, + number=54715419, + optional=True, + ) + order_by: str = proto.Field( + proto.STRING, + number=160562920, + optional=True, + ) + page_token: str = proto.Field( + proto.STRING, + number=19994697, + optional=True, + ) + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + return_partial_success: bool = proto.Field( + proto.BOOL, + number=517198390, + optional=True, + ) + + +class ListInstancesInstanceGroupsRequest(proto.Message): + r"""A request message for InstanceGroups.ListInstances. See the + method description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + filter (str): + A filter expression that filters resources listed in the + response. Most Compute resources support two types of filter + expressions: expressions that support regular expressions + and expressions that follow API improvement proposal + AIP-160. If you want to use AIP-160, your expression must + specify the field name, an operator, and the value that you + want to use for filtering. The value must be a string, a + number, or a boolean. The operator must be either ``=``, + ``!=``, ``>``, ``<``, ``<=``, ``>=`` or ``:``. For example, + if you are filtering Compute Engine instances, you can + exclude instances named ``example-instance`` by specifying + ``name != example-instance``. The ``:`` operator can be used + with string fields to match substrings. For non-string + fields it is equivalent to the ``=`` operator. The ``:*`` + comparison can be used to test whether a key has been + defined. For example, to find all objects with ``owner`` + label use: ``labels.owner:*`` You can also filter nested + fields. For example, you could specify + ``scheduling.automaticRestart = false`` to include instances + only if they are not scheduled for automatic restarts. You + can use filtering on nested fields to filter based on + resource labels. To filter on multiple expressions, provide + each separate expression within parentheses. For example: + ``(scheduling.automaticRestart = true) (cpuPlatform = "Intel Skylake")`` + By default, each expression is an ``AND`` expression. + However, you can include ``AND`` and ``OR`` expressions + explicitly. For example: + ``(cpuPlatform = "Intel Skylake") OR (cpuPlatform = "Intel Broadwell") AND (scheduling.automaticRestart = true)`` + If you want to use a regular expression, use the ``eq`` + (equal) or ``ne`` (not equal) operator against a single + un-parenthesized expression with or without quotes or + against multiple parenthesized expressions. Examples: + ``fieldname eq unquoted literal`` + ``fieldname eq 'single quoted literal'`` + ``fieldname eq "double quoted literal"`` + ``(fieldname1 eq literal) (fieldname2 ne "literal")`` The + literal value is interpreted as a regular expression using + Google RE2 library syntax. The literal value must match the + entire field. For example, to filter for instances that do + not end with name "instance", you would use + ``name ne .*instance``. + + This field is a member of `oneof`_ ``_filter``. + instance_group (str): + The name of the instance group from which you + want to generate a list of included instances. + instance_groups_list_instances_request_resource (google.cloud.compute_v1.types.InstanceGroupsListInstancesRequest): + The body resource for this request + max_results (int): + The maximum number of results per page that should be + returned. If the number of available results is larger than + ``maxResults``, Compute Engine returns a ``nextPageToken`` + that can be used to get the next page of results in + subsequent list requests. Acceptable values are ``0`` to + ``500``, inclusive. (Default: ``500``) + + This field is a member of `oneof`_ ``_max_results``. + order_by (str): + Sorts list results by a certain order. By default, results + are returned in alphanumerical order based on the resource + name. You can also sort results in descending order based on + the creation timestamp using + ``orderBy="creationTimestamp desc"``. This sorts results + based on the ``creationTimestamp`` field in reverse + chronological order (newest result first). Use this to sort + resources like operations so that the newest operation is + returned first. Currently, only sorting by ``name`` or + ``creationTimestamp desc`` is supported. + + This field is a member of `oneof`_ ``_order_by``. + page_token (str): + Specifies a page token to use. Set ``pageToken`` to the + ``nextPageToken`` returned by a previous list request to get + the next page of results. + + This field is a member of `oneof`_ ``_page_token``. + project (str): + Project ID for this request. + return_partial_success (bool): + Opt-in for partial success behavior which + provides partial results in case of failure. The + default value is false. + + This field is a member of `oneof`_ ``_return_partial_success``. + zone (str): + The name of the zone where the instance group + is located. + """ + + filter: str = proto.Field( + proto.STRING, + number=336120696, + optional=True, + ) + instance_group: str = proto.Field( + proto.STRING, + number=81095253, + ) + instance_groups_list_instances_request_resource: 'InstanceGroupsListInstancesRequest' = proto.Field( + proto.MESSAGE, + number=476255263, + message='InstanceGroupsListInstancesRequest', + ) + max_results: int = proto.Field( + proto.UINT32, + number=54715419, + optional=True, + ) + order_by: str = proto.Field( + proto.STRING, + number=160562920, + optional=True, + ) + page_token: str = proto.Field( + proto.STRING, + number=19994697, + optional=True, + ) + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + return_partial_success: bool = proto.Field( + proto.BOOL, + number=517198390, + optional=True, + ) + zone: str = proto.Field( + proto.STRING, + number=3744684, + ) + + +class ListInstancesRegionInstanceGroupsRequest(proto.Message): + r"""A request message for RegionInstanceGroups.ListInstances. See + the method description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + filter (str): + A filter expression that filters resources listed in the + response. Most Compute resources support two types of filter + expressions: expressions that support regular expressions + and expressions that follow API improvement proposal + AIP-160. If you want to use AIP-160, your expression must + specify the field name, an operator, and the value that you + want to use for filtering. The value must be a string, a + number, or a boolean. The operator must be either ``=``, + ``!=``, ``>``, ``<``, ``<=``, ``>=`` or ``:``. For example, + if you are filtering Compute Engine instances, you can + exclude instances named ``example-instance`` by specifying + ``name != example-instance``. The ``:`` operator can be used + with string fields to match substrings. For non-string + fields it is equivalent to the ``=`` operator. The ``:*`` + comparison can be used to test whether a key has been + defined. For example, to find all objects with ``owner`` + label use: ``labels.owner:*`` You can also filter nested + fields. For example, you could specify + ``scheduling.automaticRestart = false`` to include instances + only if they are not scheduled for automatic restarts. You + can use filtering on nested fields to filter based on + resource labels. To filter on multiple expressions, provide + each separate expression within parentheses. For example: + ``(scheduling.automaticRestart = true) (cpuPlatform = "Intel Skylake")`` + By default, each expression is an ``AND`` expression. + However, you can include ``AND`` and ``OR`` expressions + explicitly. For example: + ``(cpuPlatform = "Intel Skylake") OR (cpuPlatform = "Intel Broadwell") AND (scheduling.automaticRestart = true)`` + If you want to use a regular expression, use the ``eq`` + (equal) or ``ne`` (not equal) operator against a single + un-parenthesized expression with or without quotes or + against multiple parenthesized expressions. Examples: + ``fieldname eq unquoted literal`` + ``fieldname eq 'single quoted literal'`` + ``fieldname eq "double quoted literal"`` + ``(fieldname1 eq literal) (fieldname2 ne "literal")`` The + literal value is interpreted as a regular expression using + Google RE2 library syntax. The literal value must match the + entire field. For example, to filter for instances that do + not end with name "instance", you would use + ``name ne .*instance``. + + This field is a member of `oneof`_ ``_filter``. + instance_group (str): + Name of the regional instance group for which + we want to list the instances. + max_results (int): + The maximum number of results per page that should be + returned. If the number of available results is larger than + ``maxResults``, Compute Engine returns a ``nextPageToken`` + that can be used to get the next page of results in + subsequent list requests. Acceptable values are ``0`` to + ``500``, inclusive. (Default: ``500``) + + This field is a member of `oneof`_ ``_max_results``. + order_by (str): + Sorts list results by a certain order. By default, results + are returned in alphanumerical order based on the resource + name. You can also sort results in descending order based on + the creation timestamp using + ``orderBy="creationTimestamp desc"``. This sorts results + based on the ``creationTimestamp`` field in reverse + chronological order (newest result first). Use this to sort + resources like operations so that the newest operation is + returned first. Currently, only sorting by ``name`` or + ``creationTimestamp desc`` is supported. + + This field is a member of `oneof`_ ``_order_by``. + page_token (str): + Specifies a page token to use. Set ``pageToken`` to the + ``nextPageToken`` returned by a previous list request to get + the next page of results. + + This field is a member of `oneof`_ ``_page_token``. + project (str): + Project ID for this request. + region (str): + Name of the region scoping this request. + region_instance_groups_list_instances_request_resource (google.cloud.compute_v1.types.RegionInstanceGroupsListInstancesRequest): + The body resource for this request + return_partial_success (bool): + Opt-in for partial success behavior which + provides partial results in case of failure. The + default value is false. + + This field is a member of `oneof`_ ``_return_partial_success``. + """ + + filter: str = proto.Field( + proto.STRING, + number=336120696, + optional=True, + ) + instance_group: str = proto.Field( + proto.STRING, + number=81095253, + ) + max_results: int = proto.Field( + proto.UINT32, + number=54715419, + optional=True, + ) + order_by: str = proto.Field( + proto.STRING, + number=160562920, + optional=True, + ) + page_token: str = proto.Field( + proto.STRING, + number=19994697, + optional=True, + ) + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + region: str = proto.Field( + proto.STRING, + number=138946292, + ) + region_instance_groups_list_instances_request_resource: 'RegionInstanceGroupsListInstancesRequest' = proto.Field( + proto.MESSAGE, + number=48239828, + message='RegionInstanceGroupsListInstancesRequest', + ) + return_partial_success: bool = proto.Field( + proto.BOOL, + number=517198390, + optional=True, + ) + + +class ListInstancesRequest(proto.Message): + r"""A request message for Instances.List. See the method + description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + filter (str): + A filter expression that filters resources listed in the + response. Most Compute resources support two types of filter + expressions: expressions that support regular expressions + and expressions that follow API improvement proposal + AIP-160. If you want to use AIP-160, your expression must + specify the field name, an operator, and the value that you + want to use for filtering. The value must be a string, a + number, or a boolean. The operator must be either ``=``, + ``!=``, ``>``, ``<``, ``<=``, ``>=`` or ``:``. For example, + if you are filtering Compute Engine instances, you can + exclude instances named ``example-instance`` by specifying + ``name != example-instance``. The ``:`` operator can be used + with string fields to match substrings. For non-string + fields it is equivalent to the ``=`` operator. The ``:*`` + comparison can be used to test whether a key has been + defined. For example, to find all objects with ``owner`` + label use: ``labels.owner:*`` You can also filter nested + fields. For example, you could specify + ``scheduling.automaticRestart = false`` to include instances + only if they are not scheduled for automatic restarts. You + can use filtering on nested fields to filter based on + resource labels. To filter on multiple expressions, provide + each separate expression within parentheses. For example: + ``(scheduling.automaticRestart = true) (cpuPlatform = "Intel Skylake")`` + By default, each expression is an ``AND`` expression. + However, you can include ``AND`` and ``OR`` expressions + explicitly. For example: + ``(cpuPlatform = "Intel Skylake") OR (cpuPlatform = "Intel Broadwell") AND (scheduling.automaticRestart = true)`` + If you want to use a regular expression, use the ``eq`` + (equal) or ``ne`` (not equal) operator against a single + un-parenthesized expression with or without quotes or + against multiple parenthesized expressions. Examples: + ``fieldname eq unquoted literal`` + ``fieldname eq 'single quoted literal'`` + ``fieldname eq "double quoted literal"`` + ``(fieldname1 eq literal) (fieldname2 ne "literal")`` The + literal value is interpreted as a regular expression using + Google RE2 library syntax. The literal value must match the + entire field. For example, to filter for instances that do + not end with name "instance", you would use + ``name ne .*instance``. + + This field is a member of `oneof`_ ``_filter``. + max_results (int): + The maximum number of results per page that should be + returned. If the number of available results is larger than + ``maxResults``, Compute Engine returns a ``nextPageToken`` + that can be used to get the next page of results in + subsequent list requests. Acceptable values are ``0`` to + ``500``, inclusive. (Default: ``500``) + + This field is a member of `oneof`_ ``_max_results``. + order_by (str): + Sorts list results by a certain order. By default, results + are returned in alphanumerical order based on the resource + name. You can also sort results in descending order based on + the creation timestamp using + ``orderBy="creationTimestamp desc"``. This sorts results + based on the ``creationTimestamp`` field in reverse + chronological order (newest result first). Use this to sort + resources like operations so that the newest operation is + returned first. Currently, only sorting by ``name`` or + ``creationTimestamp desc`` is supported. + + This field is a member of `oneof`_ ``_order_by``. + page_token (str): + Specifies a page token to use. Set ``pageToken`` to the + ``nextPageToken`` returned by a previous list request to get + the next page of results. + + This field is a member of `oneof`_ ``_page_token``. + project (str): + Project ID for this request. + return_partial_success (bool): + Opt-in for partial success behavior which + provides partial results in case of failure. The + default value is false. + + This field is a member of `oneof`_ ``_return_partial_success``. + zone (str): + The name of the zone for this request. + """ + + filter: str = proto.Field( + proto.STRING, + number=336120696, + optional=True, + ) + max_results: int = proto.Field( + proto.UINT32, + number=54715419, + optional=True, + ) + order_by: str = proto.Field( + proto.STRING, + number=160562920, + optional=True, + ) + page_token: str = proto.Field( + proto.STRING, + number=19994697, + optional=True, + ) + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + return_partial_success: bool = proto.Field( + proto.BOOL, + number=517198390, + optional=True, + ) + zone: str = proto.Field( + proto.STRING, + number=3744684, + ) + + +class ListInterconnectAttachmentsRequest(proto.Message): + r"""A request message for InterconnectAttachments.List. See the + method description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + filter (str): + A filter expression that filters resources listed in the + response. Most Compute resources support two types of filter + expressions: expressions that support regular expressions + and expressions that follow API improvement proposal + AIP-160. If you want to use AIP-160, your expression must + specify the field name, an operator, and the value that you + want to use for filtering. The value must be a string, a + number, or a boolean. The operator must be either ``=``, + ``!=``, ``>``, ``<``, ``<=``, ``>=`` or ``:``. For example, + if you are filtering Compute Engine instances, you can + exclude instances named ``example-instance`` by specifying + ``name != example-instance``. The ``:`` operator can be used + with string fields to match substrings. For non-string + fields it is equivalent to the ``=`` operator. The ``:*`` + comparison can be used to test whether a key has been + defined. For example, to find all objects with ``owner`` + label use: ``labels.owner:*`` You can also filter nested + fields. For example, you could specify + ``scheduling.automaticRestart = false`` to include instances + only if they are not scheduled for automatic restarts. You + can use filtering on nested fields to filter based on + resource labels. To filter on multiple expressions, provide + each separate expression within parentheses. For example: + ``(scheduling.automaticRestart = true) (cpuPlatform = "Intel Skylake")`` + By default, each expression is an ``AND`` expression. + However, you can include ``AND`` and ``OR`` expressions + explicitly. For example: + ``(cpuPlatform = "Intel Skylake") OR (cpuPlatform = "Intel Broadwell") AND (scheduling.automaticRestart = true)`` + If you want to use a regular expression, use the ``eq`` + (equal) or ``ne`` (not equal) operator against a single + un-parenthesized expression with or without quotes or + against multiple parenthesized expressions. Examples: + ``fieldname eq unquoted literal`` + ``fieldname eq 'single quoted literal'`` + ``fieldname eq "double quoted literal"`` + ``(fieldname1 eq literal) (fieldname2 ne "literal")`` The + literal value is interpreted as a regular expression using + Google RE2 library syntax. The literal value must match the + entire field. For example, to filter for instances that do + not end with name "instance", you would use + ``name ne .*instance``. + + This field is a member of `oneof`_ ``_filter``. + max_results (int): + The maximum number of results per page that should be + returned. If the number of available results is larger than + ``maxResults``, Compute Engine returns a ``nextPageToken`` + that can be used to get the next page of results in + subsequent list requests. Acceptable values are ``0`` to + ``500``, inclusive. (Default: ``500``) + + This field is a member of `oneof`_ ``_max_results``. + order_by (str): + Sorts list results by a certain order. By default, results + are returned in alphanumerical order based on the resource + name. You can also sort results in descending order based on + the creation timestamp using + ``orderBy="creationTimestamp desc"``. This sorts results + based on the ``creationTimestamp`` field in reverse + chronological order (newest result first). Use this to sort + resources like operations so that the newest operation is + returned first. Currently, only sorting by ``name`` or + ``creationTimestamp desc`` is supported. + + This field is a member of `oneof`_ ``_order_by``. + page_token (str): + Specifies a page token to use. Set ``pageToken`` to the + ``nextPageToken`` returned by a previous list request to get + the next page of results. + + This field is a member of `oneof`_ ``_page_token``. + project (str): + Project ID for this request. + region (str): + Name of the region for this request. + return_partial_success (bool): + Opt-in for partial success behavior which + provides partial results in case of failure. The + default value is false. + + This field is a member of `oneof`_ ``_return_partial_success``. + """ + + filter: str = proto.Field( + proto.STRING, + number=336120696, + optional=True, + ) + max_results: int = proto.Field( + proto.UINT32, + number=54715419, + optional=True, + ) + order_by: str = proto.Field( + proto.STRING, + number=160562920, + optional=True, + ) + page_token: str = proto.Field( + proto.STRING, + number=19994697, + optional=True, + ) + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + region: str = proto.Field( + proto.STRING, + number=138946292, + ) + return_partial_success: bool = proto.Field( + proto.BOOL, + number=517198390, + optional=True, + ) + + +class ListInterconnectLocationsRequest(proto.Message): + r"""A request message for InterconnectLocations.List. See the + method description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + filter (str): + A filter expression that filters resources listed in the + response. Most Compute resources support two types of filter + expressions: expressions that support regular expressions + and expressions that follow API improvement proposal + AIP-160. If you want to use AIP-160, your expression must + specify the field name, an operator, and the value that you + want to use for filtering. The value must be a string, a + number, or a boolean. The operator must be either ``=``, + ``!=``, ``>``, ``<``, ``<=``, ``>=`` or ``:``. For example, + if you are filtering Compute Engine instances, you can + exclude instances named ``example-instance`` by specifying + ``name != example-instance``. The ``:`` operator can be used + with string fields to match substrings. For non-string + fields it is equivalent to the ``=`` operator. The ``:*`` + comparison can be used to test whether a key has been + defined. For example, to find all objects with ``owner`` + label use: ``labels.owner:*`` You can also filter nested + fields. For example, you could specify + ``scheduling.automaticRestart = false`` to include instances + only if they are not scheduled for automatic restarts. You + can use filtering on nested fields to filter based on + resource labels. To filter on multiple expressions, provide + each separate expression within parentheses. For example: + ``(scheduling.automaticRestart = true) (cpuPlatform = "Intel Skylake")`` + By default, each expression is an ``AND`` expression. + However, you can include ``AND`` and ``OR`` expressions + explicitly. For example: + ``(cpuPlatform = "Intel Skylake") OR (cpuPlatform = "Intel Broadwell") AND (scheduling.automaticRestart = true)`` + If you want to use a regular expression, use the ``eq`` + (equal) or ``ne`` (not equal) operator against a single + un-parenthesized expression with or without quotes or + against multiple parenthesized expressions. Examples: + ``fieldname eq unquoted literal`` + ``fieldname eq 'single quoted literal'`` + ``fieldname eq "double quoted literal"`` + ``(fieldname1 eq literal) (fieldname2 ne "literal")`` The + literal value is interpreted as a regular expression using + Google RE2 library syntax. The literal value must match the + entire field. For example, to filter for instances that do + not end with name "instance", you would use + ``name ne .*instance``. + + This field is a member of `oneof`_ ``_filter``. + max_results (int): + The maximum number of results per page that should be + returned. If the number of available results is larger than + ``maxResults``, Compute Engine returns a ``nextPageToken`` + that can be used to get the next page of results in + subsequent list requests. Acceptable values are ``0`` to + ``500``, inclusive. (Default: ``500``) + + This field is a member of `oneof`_ ``_max_results``. + order_by (str): + Sorts list results by a certain order. By default, results + are returned in alphanumerical order based on the resource + name. You can also sort results in descending order based on + the creation timestamp using + ``orderBy="creationTimestamp desc"``. This sorts results + based on the ``creationTimestamp`` field in reverse + chronological order (newest result first). Use this to sort + resources like operations so that the newest operation is + returned first. Currently, only sorting by ``name`` or + ``creationTimestamp desc`` is supported. + + This field is a member of `oneof`_ ``_order_by``. + page_token (str): + Specifies a page token to use. Set ``pageToken`` to the + ``nextPageToken`` returned by a previous list request to get + the next page of results. + + This field is a member of `oneof`_ ``_page_token``. + project (str): + Project ID for this request. + return_partial_success (bool): + Opt-in for partial success behavior which + provides partial results in case of failure. The + default value is false. + + This field is a member of `oneof`_ ``_return_partial_success``. + """ + + filter: str = proto.Field( + proto.STRING, + number=336120696, + optional=True, + ) + max_results: int = proto.Field( + proto.UINT32, + number=54715419, + optional=True, + ) + order_by: str = proto.Field( + proto.STRING, + number=160562920, + optional=True, + ) + page_token: str = proto.Field( + proto.STRING, + number=19994697, + optional=True, + ) + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + return_partial_success: bool = proto.Field( + proto.BOOL, + number=517198390, + optional=True, + ) + + +class ListInterconnectRemoteLocationsRequest(proto.Message): + r"""A request message for InterconnectRemoteLocations.List. See + the method description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + filter (str): + A filter expression that filters resources listed in the + response. Most Compute resources support two types of filter + expressions: expressions that support regular expressions + and expressions that follow API improvement proposal + AIP-160. If you want to use AIP-160, your expression must + specify the field name, an operator, and the value that you + want to use for filtering. The value must be a string, a + number, or a boolean. The operator must be either ``=``, + ``!=``, ``>``, ``<``, ``<=``, ``>=`` or ``:``. For example, + if you are filtering Compute Engine instances, you can + exclude instances named ``example-instance`` by specifying + ``name != example-instance``. The ``:`` operator can be used + with string fields to match substrings. For non-string + fields it is equivalent to the ``=`` operator. The ``:*`` + comparison can be used to test whether a key has been + defined. For example, to find all objects with ``owner`` + label use: ``labels.owner:*`` You can also filter nested + fields. For example, you could specify + ``scheduling.automaticRestart = false`` to include instances + only if they are not scheduled for automatic restarts. You + can use filtering on nested fields to filter based on + resource labels. To filter on multiple expressions, provide + each separate expression within parentheses. For example: + ``(scheduling.automaticRestart = true) (cpuPlatform = "Intel Skylake")`` + By default, each expression is an ``AND`` expression. + However, you can include ``AND`` and ``OR`` expressions + explicitly. For example: + ``(cpuPlatform = "Intel Skylake") OR (cpuPlatform = "Intel Broadwell") AND (scheduling.automaticRestart = true)`` + If you want to use a regular expression, use the ``eq`` + (equal) or ``ne`` (not equal) operator against a single + un-parenthesized expression with or without quotes or + against multiple parenthesized expressions. Examples: + ``fieldname eq unquoted literal`` + ``fieldname eq 'single quoted literal'`` + ``fieldname eq "double quoted literal"`` + ``(fieldname1 eq literal) (fieldname2 ne "literal")`` The + literal value is interpreted as a regular expression using + Google RE2 library syntax. The literal value must match the + entire field. For example, to filter for instances that do + not end with name "instance", you would use + ``name ne .*instance``. + + This field is a member of `oneof`_ ``_filter``. + max_results (int): + The maximum number of results per page that should be + returned. If the number of available results is larger than + ``maxResults``, Compute Engine returns a ``nextPageToken`` + that can be used to get the next page of results in + subsequent list requests. Acceptable values are ``0`` to + ``500``, inclusive. (Default: ``500``) + + This field is a member of `oneof`_ ``_max_results``. + order_by (str): + Sorts list results by a certain order. By default, results + are returned in alphanumerical order based on the resource + name. You can also sort results in descending order based on + the creation timestamp using + ``orderBy="creationTimestamp desc"``. This sorts results + based on the ``creationTimestamp`` field in reverse + chronological order (newest result first). Use this to sort + resources like operations so that the newest operation is + returned first. Currently, only sorting by ``name`` or + ``creationTimestamp desc`` is supported. + + This field is a member of `oneof`_ ``_order_by``. + page_token (str): + Specifies a page token to use. Set ``pageToken`` to the + ``nextPageToken`` returned by a previous list request to get + the next page of results. + + This field is a member of `oneof`_ ``_page_token``. + project (str): + Project ID for this request. + return_partial_success (bool): + Opt-in for partial success behavior which + provides partial results in case of failure. The + default value is false. + + This field is a member of `oneof`_ ``_return_partial_success``. + """ + + filter: str = proto.Field( + proto.STRING, + number=336120696, + optional=True, + ) + max_results: int = proto.Field( + proto.UINT32, + number=54715419, + optional=True, + ) + order_by: str = proto.Field( + proto.STRING, + number=160562920, + optional=True, + ) + page_token: str = proto.Field( + proto.STRING, + number=19994697, + optional=True, + ) + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + return_partial_success: bool = proto.Field( + proto.BOOL, + number=517198390, + optional=True, + ) + + +class ListInterconnectsRequest(proto.Message): + r"""A request message for Interconnects.List. See the method + description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + filter (str): + A filter expression that filters resources listed in the + response. Most Compute resources support two types of filter + expressions: expressions that support regular expressions + and expressions that follow API improvement proposal + AIP-160. If you want to use AIP-160, your expression must + specify the field name, an operator, and the value that you + want to use for filtering. The value must be a string, a + number, or a boolean. The operator must be either ``=``, + ``!=``, ``>``, ``<``, ``<=``, ``>=`` or ``:``. For example, + if you are filtering Compute Engine instances, you can + exclude instances named ``example-instance`` by specifying + ``name != example-instance``. The ``:`` operator can be used + with string fields to match substrings. For non-string + fields it is equivalent to the ``=`` operator. The ``:*`` + comparison can be used to test whether a key has been + defined. For example, to find all objects with ``owner`` + label use: ``labels.owner:*`` You can also filter nested + fields. For example, you could specify + ``scheduling.automaticRestart = false`` to include instances + only if they are not scheduled for automatic restarts. You + can use filtering on nested fields to filter based on + resource labels. To filter on multiple expressions, provide + each separate expression within parentheses. For example: + ``(scheduling.automaticRestart = true) (cpuPlatform = "Intel Skylake")`` + By default, each expression is an ``AND`` expression. + However, you can include ``AND`` and ``OR`` expressions + explicitly. For example: + ``(cpuPlatform = "Intel Skylake") OR (cpuPlatform = "Intel Broadwell") AND (scheduling.automaticRestart = true)`` + If you want to use a regular expression, use the ``eq`` + (equal) or ``ne`` (not equal) operator against a single + un-parenthesized expression with or without quotes or + against multiple parenthesized expressions. Examples: + ``fieldname eq unquoted literal`` + ``fieldname eq 'single quoted literal'`` + ``fieldname eq "double quoted literal"`` + ``(fieldname1 eq literal) (fieldname2 ne "literal")`` The + literal value is interpreted as a regular expression using + Google RE2 library syntax. The literal value must match the + entire field. For example, to filter for instances that do + not end with name "instance", you would use + ``name ne .*instance``. + + This field is a member of `oneof`_ ``_filter``. + max_results (int): + The maximum number of results per page that should be + returned. If the number of available results is larger than + ``maxResults``, Compute Engine returns a ``nextPageToken`` + that can be used to get the next page of results in + subsequent list requests. Acceptable values are ``0`` to + ``500``, inclusive. (Default: ``500``) + + This field is a member of `oneof`_ ``_max_results``. + order_by (str): + Sorts list results by a certain order. By default, results + are returned in alphanumerical order based on the resource + name. You can also sort results in descending order based on + the creation timestamp using + ``orderBy="creationTimestamp desc"``. This sorts results + based on the ``creationTimestamp`` field in reverse + chronological order (newest result first). Use this to sort + resources like operations so that the newest operation is + returned first. Currently, only sorting by ``name`` or + ``creationTimestamp desc`` is supported. + + This field is a member of `oneof`_ ``_order_by``. + page_token (str): + Specifies a page token to use. Set ``pageToken`` to the + ``nextPageToken`` returned by a previous list request to get + the next page of results. + + This field is a member of `oneof`_ ``_page_token``. + project (str): + Project ID for this request. + return_partial_success (bool): + Opt-in for partial success behavior which + provides partial results in case of failure. The + default value is false. + + This field is a member of `oneof`_ ``_return_partial_success``. + """ + + filter: str = proto.Field( + proto.STRING, + number=336120696, + optional=True, + ) + max_results: int = proto.Field( + proto.UINT32, + number=54715419, + optional=True, + ) + order_by: str = proto.Field( + proto.STRING, + number=160562920, + optional=True, + ) + page_token: str = proto.Field( + proto.STRING, + number=19994697, + optional=True, + ) + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + return_partial_success: bool = proto.Field( + proto.BOOL, + number=517198390, + optional=True, + ) + + +class ListLicensesRequest(proto.Message): + r"""A request message for Licenses.List. See the method + description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + filter (str): + A filter expression that filters resources listed in the + response. Most Compute resources support two types of filter + expressions: expressions that support regular expressions + and expressions that follow API improvement proposal + AIP-160. If you want to use AIP-160, your expression must + specify the field name, an operator, and the value that you + want to use for filtering. The value must be a string, a + number, or a boolean. The operator must be either ``=``, + ``!=``, ``>``, ``<``, ``<=``, ``>=`` or ``:``. For example, + if you are filtering Compute Engine instances, you can + exclude instances named ``example-instance`` by specifying + ``name != example-instance``. The ``:`` operator can be used + with string fields to match substrings. For non-string + fields it is equivalent to the ``=`` operator. The ``:*`` + comparison can be used to test whether a key has been + defined. For example, to find all objects with ``owner`` + label use: ``labels.owner:*`` You can also filter nested + fields. For example, you could specify + ``scheduling.automaticRestart = false`` to include instances + only if they are not scheduled for automatic restarts. You + can use filtering on nested fields to filter based on + resource labels. To filter on multiple expressions, provide + each separate expression within parentheses. For example: + ``(scheduling.automaticRestart = true) (cpuPlatform = "Intel Skylake")`` + By default, each expression is an ``AND`` expression. + However, you can include ``AND`` and ``OR`` expressions + explicitly. For example: + ``(cpuPlatform = "Intel Skylake") OR (cpuPlatform = "Intel Broadwell") AND (scheduling.automaticRestart = true)`` + If you want to use a regular expression, use the ``eq`` + (equal) or ``ne`` (not equal) operator against a single + un-parenthesized expression with or without quotes or + against multiple parenthesized expressions. Examples: + ``fieldname eq unquoted literal`` + ``fieldname eq 'single quoted literal'`` + ``fieldname eq "double quoted literal"`` + ``(fieldname1 eq literal) (fieldname2 ne "literal")`` The + literal value is interpreted as a regular expression using + Google RE2 library syntax. The literal value must match the + entire field. For example, to filter for instances that do + not end with name "instance", you would use + ``name ne .*instance``. + + This field is a member of `oneof`_ ``_filter``. + max_results (int): + The maximum number of results per page that should be + returned. If the number of available results is larger than + ``maxResults``, Compute Engine returns a ``nextPageToken`` + that can be used to get the next page of results in + subsequent list requests. Acceptable values are ``0`` to + ``500``, inclusive. (Default: ``500``) + + This field is a member of `oneof`_ ``_max_results``. + order_by (str): + Sorts list results by a certain order. By default, results + are returned in alphanumerical order based on the resource + name. You can also sort results in descending order based on + the creation timestamp using + ``orderBy="creationTimestamp desc"``. This sorts results + based on the ``creationTimestamp`` field in reverse + chronological order (newest result first). Use this to sort + resources like operations so that the newest operation is + returned first. Currently, only sorting by ``name`` or + ``creationTimestamp desc`` is supported. + + This field is a member of `oneof`_ ``_order_by``. + page_token (str): + Specifies a page token to use. Set ``pageToken`` to the + ``nextPageToken`` returned by a previous list request to get + the next page of results. + + This field is a member of `oneof`_ ``_page_token``. + project (str): + Project ID for this request. + return_partial_success (bool): + Opt-in for partial success behavior which + provides partial results in case of failure. The + default value is false. + + This field is a member of `oneof`_ ``_return_partial_success``. + """ + + filter: str = proto.Field( + proto.STRING, + number=336120696, + optional=True, + ) + max_results: int = proto.Field( + proto.UINT32, + number=54715419, + optional=True, + ) + order_by: str = proto.Field( + proto.STRING, + number=160562920, + optional=True, + ) + page_token: str = proto.Field( + proto.STRING, + number=19994697, + optional=True, + ) + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + return_partial_success: bool = proto.Field( + proto.BOOL, + number=517198390, + optional=True, + ) + + +class ListMachineImagesRequest(proto.Message): + r"""A request message for MachineImages.List. See the method + description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + filter (str): + A filter expression that filters resources listed in the + response. Most Compute resources support two types of filter + expressions: expressions that support regular expressions + and expressions that follow API improvement proposal + AIP-160. If you want to use AIP-160, your expression must + specify the field name, an operator, and the value that you + want to use for filtering. The value must be a string, a + number, or a boolean. The operator must be either ``=``, + ``!=``, ``>``, ``<``, ``<=``, ``>=`` or ``:``. For example, + if you are filtering Compute Engine instances, you can + exclude instances named ``example-instance`` by specifying + ``name != example-instance``. The ``:`` operator can be used + with string fields to match substrings. For non-string + fields it is equivalent to the ``=`` operator. The ``:*`` + comparison can be used to test whether a key has been + defined. For example, to find all objects with ``owner`` + label use: ``labels.owner:*`` You can also filter nested + fields. For example, you could specify + ``scheduling.automaticRestart = false`` to include instances + only if they are not scheduled for automatic restarts. You + can use filtering on nested fields to filter based on + resource labels. To filter on multiple expressions, provide + each separate expression within parentheses. For example: + ``(scheduling.automaticRestart = true) (cpuPlatform = "Intel Skylake")`` + By default, each expression is an ``AND`` expression. + However, you can include ``AND`` and ``OR`` expressions + explicitly. For example: + ``(cpuPlatform = "Intel Skylake") OR (cpuPlatform = "Intel Broadwell") AND (scheduling.automaticRestart = true)`` + If you want to use a regular expression, use the ``eq`` + (equal) or ``ne`` (not equal) operator against a single + un-parenthesized expression with or without quotes or + against multiple parenthesized expressions. Examples: + ``fieldname eq unquoted literal`` + ``fieldname eq 'single quoted literal'`` + ``fieldname eq "double quoted literal"`` + ``(fieldname1 eq literal) (fieldname2 ne "literal")`` The + literal value is interpreted as a regular expression using + Google RE2 library syntax. The literal value must match the + entire field. For example, to filter for instances that do + not end with name "instance", you would use + ``name ne .*instance``. + + This field is a member of `oneof`_ ``_filter``. + max_results (int): + The maximum number of results per page that should be + returned. If the number of available results is larger than + ``maxResults``, Compute Engine returns a ``nextPageToken`` + that can be used to get the next page of results in + subsequent list requests. Acceptable values are ``0`` to + ``500``, inclusive. (Default: ``500``) + + This field is a member of `oneof`_ ``_max_results``. + order_by (str): + Sorts list results by a certain order. By default, results + are returned in alphanumerical order based on the resource + name. You can also sort results in descending order based on + the creation timestamp using + ``orderBy="creationTimestamp desc"``. This sorts results + based on the ``creationTimestamp`` field in reverse + chronological order (newest result first). Use this to sort + resources like operations so that the newest operation is + returned first. Currently, only sorting by ``name`` or + ``creationTimestamp desc`` is supported. + + This field is a member of `oneof`_ ``_order_by``. + page_token (str): + Specifies a page token to use. Set ``pageToken`` to the + ``nextPageToken`` returned by a previous list request to get + the next page of results. + + This field is a member of `oneof`_ ``_page_token``. + project (str): + Project ID for this request. + return_partial_success (bool): + Opt-in for partial success behavior which + provides partial results in case of failure. The + default value is false. + + This field is a member of `oneof`_ ``_return_partial_success``. + """ + + filter: str = proto.Field( + proto.STRING, + number=336120696, + optional=True, + ) + max_results: int = proto.Field( + proto.UINT32, + number=54715419, + optional=True, + ) + order_by: str = proto.Field( + proto.STRING, + number=160562920, + optional=True, + ) + page_token: str = proto.Field( + proto.STRING, + number=19994697, + optional=True, + ) + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + return_partial_success: bool = proto.Field( + proto.BOOL, + number=517198390, + optional=True, + ) + + +class ListMachineTypesRequest(proto.Message): + r"""A request message for MachineTypes.List. See the method + description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + filter (str): + A filter expression that filters resources listed in the + response. Most Compute resources support two types of filter + expressions: expressions that support regular expressions + and expressions that follow API improvement proposal + AIP-160. If you want to use AIP-160, your expression must + specify the field name, an operator, and the value that you + want to use for filtering. The value must be a string, a + number, or a boolean. The operator must be either ``=``, + ``!=``, ``>``, ``<``, ``<=``, ``>=`` or ``:``. For example, + if you are filtering Compute Engine instances, you can + exclude instances named ``example-instance`` by specifying + ``name != example-instance``. The ``:`` operator can be used + with string fields to match substrings. For non-string + fields it is equivalent to the ``=`` operator. The ``:*`` + comparison can be used to test whether a key has been + defined. For example, to find all objects with ``owner`` + label use: ``labels.owner:*`` You can also filter nested + fields. For example, you could specify + ``scheduling.automaticRestart = false`` to include instances + only if they are not scheduled for automatic restarts. You + can use filtering on nested fields to filter based on + resource labels. To filter on multiple expressions, provide + each separate expression within parentheses. For example: + ``(scheduling.automaticRestart = true) (cpuPlatform = "Intel Skylake")`` + By default, each expression is an ``AND`` expression. + However, you can include ``AND`` and ``OR`` expressions + explicitly. For example: + ``(cpuPlatform = "Intel Skylake") OR (cpuPlatform = "Intel Broadwell") AND (scheduling.automaticRestart = true)`` + If you want to use a regular expression, use the ``eq`` + (equal) or ``ne`` (not equal) operator against a single + un-parenthesized expression with or without quotes or + against multiple parenthesized expressions. Examples: + ``fieldname eq unquoted literal`` + ``fieldname eq 'single quoted literal'`` + ``fieldname eq "double quoted literal"`` + ``(fieldname1 eq literal) (fieldname2 ne "literal")`` The + literal value is interpreted as a regular expression using + Google RE2 library syntax. The literal value must match the + entire field. For example, to filter for instances that do + not end with name "instance", you would use + ``name ne .*instance``. + + This field is a member of `oneof`_ ``_filter``. + max_results (int): + The maximum number of results per page that should be + returned. If the number of available results is larger than + ``maxResults``, Compute Engine returns a ``nextPageToken`` + that can be used to get the next page of results in + subsequent list requests. Acceptable values are ``0`` to + ``500``, inclusive. (Default: ``500``) + + This field is a member of `oneof`_ ``_max_results``. + order_by (str): + Sorts list results by a certain order. By default, results + are returned in alphanumerical order based on the resource + name. You can also sort results in descending order based on + the creation timestamp using + ``orderBy="creationTimestamp desc"``. This sorts results + based on the ``creationTimestamp`` field in reverse + chronological order (newest result first). Use this to sort + resources like operations so that the newest operation is + returned first. Currently, only sorting by ``name`` or + ``creationTimestamp desc`` is supported. + + This field is a member of `oneof`_ ``_order_by``. + page_token (str): + Specifies a page token to use. Set ``pageToken`` to the + ``nextPageToken`` returned by a previous list request to get + the next page of results. + + This field is a member of `oneof`_ ``_page_token``. + project (str): + Project ID for this request. + return_partial_success (bool): + Opt-in for partial success behavior which + provides partial results in case of failure. The + default value is false. + + This field is a member of `oneof`_ ``_return_partial_success``. + zone (str): + The name of the zone for this request. + """ + + filter: str = proto.Field( + proto.STRING, + number=336120696, + optional=True, + ) + max_results: int = proto.Field( + proto.UINT32, + number=54715419, + optional=True, + ) + order_by: str = proto.Field( + proto.STRING, + number=160562920, + optional=True, + ) + page_token: str = proto.Field( + proto.STRING, + number=19994697, + optional=True, + ) + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + return_partial_success: bool = proto.Field( + proto.BOOL, + number=517198390, + optional=True, + ) + zone: str = proto.Field( + proto.STRING, + number=3744684, + ) + + +class ListManagedInstancesInstanceGroupManagersRequest(proto.Message): + r"""A request message for + InstanceGroupManagers.ListManagedInstances. See the method + description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + filter (str): + A filter expression that filters resources listed in the + response. Most Compute resources support two types of filter + expressions: expressions that support regular expressions + and expressions that follow API improvement proposal + AIP-160. If you want to use AIP-160, your expression must + specify the field name, an operator, and the value that you + want to use for filtering. The value must be a string, a + number, or a boolean. The operator must be either ``=``, + ``!=``, ``>``, ``<``, ``<=``, ``>=`` or ``:``. For example, + if you are filtering Compute Engine instances, you can + exclude instances named ``example-instance`` by specifying + ``name != example-instance``. The ``:`` operator can be used + with string fields to match substrings. For non-string + fields it is equivalent to the ``=`` operator. The ``:*`` + comparison can be used to test whether a key has been + defined. For example, to find all objects with ``owner`` + label use: ``labels.owner:*`` You can also filter nested + fields. For example, you could specify + ``scheduling.automaticRestart = false`` to include instances + only if they are not scheduled for automatic restarts. You + can use filtering on nested fields to filter based on + resource labels. To filter on multiple expressions, provide + each separate expression within parentheses. For example: + ``(scheduling.automaticRestart = true) (cpuPlatform = "Intel Skylake")`` + By default, each expression is an ``AND`` expression. + However, you can include ``AND`` and ``OR`` expressions + explicitly. For example: + ``(cpuPlatform = "Intel Skylake") OR (cpuPlatform = "Intel Broadwell") AND (scheduling.automaticRestart = true)`` + If you want to use a regular expression, use the ``eq`` + (equal) or ``ne`` (not equal) operator against a single + un-parenthesized expression with or without quotes or + against multiple parenthesized expressions. Examples: + ``fieldname eq unquoted literal`` + ``fieldname eq 'single quoted literal'`` + ``fieldname eq "double quoted literal"`` + ``(fieldname1 eq literal) (fieldname2 ne "literal")`` The + literal value is interpreted as a regular expression using + Google RE2 library syntax. The literal value must match the + entire field. For example, to filter for instances that do + not end with name "instance", you would use + ``name ne .*instance``. + + This field is a member of `oneof`_ ``_filter``. + instance_group_manager (str): + The name of the managed instance group. + max_results (int): + The maximum number of results per page that should be + returned. If the number of available results is larger than + ``maxResults``, Compute Engine returns a ``nextPageToken`` + that can be used to get the next page of results in + subsequent list requests. Acceptable values are ``0`` to + ``500``, inclusive. (Default: ``500``) + + This field is a member of `oneof`_ ``_max_results``. + order_by (str): + Sorts list results by a certain order. By default, results + are returned in alphanumerical order based on the resource + name. You can also sort results in descending order based on + the creation timestamp using + ``orderBy="creationTimestamp desc"``. This sorts results + based on the ``creationTimestamp`` field in reverse + chronological order (newest result first). Use this to sort + resources like operations so that the newest operation is + returned first. Currently, only sorting by ``name`` or + ``creationTimestamp desc`` is supported. + + This field is a member of `oneof`_ ``_order_by``. + page_token (str): + Specifies a page token to use. Set ``pageToken`` to the + ``nextPageToken`` returned by a previous list request to get + the next page of results. + + This field is a member of `oneof`_ ``_page_token``. + project (str): + Project ID for this request. + return_partial_success (bool): + Opt-in for partial success behavior which + provides partial results in case of failure. The + default value is false. + + This field is a member of `oneof`_ ``_return_partial_success``. + zone (str): + The name of the zone where the managed + instance group is located. + """ + + filter: str = proto.Field( + proto.STRING, + number=336120696, + optional=True, + ) + instance_group_manager: str = proto.Field( + proto.STRING, + number=249363395, + ) + max_results: int = proto.Field( + proto.UINT32, + number=54715419, + optional=True, + ) + order_by: str = proto.Field( + proto.STRING, + number=160562920, + optional=True, + ) + page_token: str = proto.Field( + proto.STRING, + number=19994697, + optional=True, + ) + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + return_partial_success: bool = proto.Field( + proto.BOOL, + number=517198390, + optional=True, + ) + zone: str = proto.Field( + proto.STRING, + number=3744684, + ) + + +class ListManagedInstancesRegionInstanceGroupManagersRequest(proto.Message): + r"""A request message for + RegionInstanceGroupManagers.ListManagedInstances. See the method + description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + filter (str): + A filter expression that filters resources listed in the + response. Most Compute resources support two types of filter + expressions: expressions that support regular expressions + and expressions that follow API improvement proposal + AIP-160. If you want to use AIP-160, your expression must + specify the field name, an operator, and the value that you + want to use for filtering. The value must be a string, a + number, or a boolean. The operator must be either ``=``, + ``!=``, ``>``, ``<``, ``<=``, ``>=`` or ``:``. For example, + if you are filtering Compute Engine instances, you can + exclude instances named ``example-instance`` by specifying + ``name != example-instance``. The ``:`` operator can be used + with string fields to match substrings. For non-string + fields it is equivalent to the ``=`` operator. The ``:*`` + comparison can be used to test whether a key has been + defined. For example, to find all objects with ``owner`` + label use: ``labels.owner:*`` You can also filter nested + fields. For example, you could specify + ``scheduling.automaticRestart = false`` to include instances + only if they are not scheduled for automatic restarts. You + can use filtering on nested fields to filter based on + resource labels. To filter on multiple expressions, provide + each separate expression within parentheses. For example: + ``(scheduling.automaticRestart = true) (cpuPlatform = "Intel Skylake")`` + By default, each expression is an ``AND`` expression. + However, you can include ``AND`` and ``OR`` expressions + explicitly. For example: + ``(cpuPlatform = "Intel Skylake") OR (cpuPlatform = "Intel Broadwell") AND (scheduling.automaticRestart = true)`` + If you want to use a regular expression, use the ``eq`` + (equal) or ``ne`` (not equal) operator against a single + un-parenthesized expression with or without quotes or + against multiple parenthesized expressions. Examples: + ``fieldname eq unquoted literal`` + ``fieldname eq 'single quoted literal'`` + ``fieldname eq "double quoted literal"`` + ``(fieldname1 eq literal) (fieldname2 ne "literal")`` The + literal value is interpreted as a regular expression using + Google RE2 library syntax. The literal value must match the + entire field. For example, to filter for instances that do + not end with name "instance", you would use + ``name ne .*instance``. + + This field is a member of `oneof`_ ``_filter``. + instance_group_manager (str): + The name of the managed instance group. + max_results (int): + The maximum number of results per page that should be + returned. If the number of available results is larger than + ``maxResults``, Compute Engine returns a ``nextPageToken`` + that can be used to get the next page of results in + subsequent list requests. Acceptable values are ``0`` to + ``500``, inclusive. (Default: ``500``) + + This field is a member of `oneof`_ ``_max_results``. + order_by (str): + Sorts list results by a certain order. By default, results + are returned in alphanumerical order based on the resource + name. You can also sort results in descending order based on + the creation timestamp using + ``orderBy="creationTimestamp desc"``. This sorts results + based on the ``creationTimestamp`` field in reverse + chronological order (newest result first). Use this to sort + resources like operations so that the newest operation is + returned first. Currently, only sorting by ``name`` or + ``creationTimestamp desc`` is supported. + + This field is a member of `oneof`_ ``_order_by``. + page_token (str): + Specifies a page token to use. Set ``pageToken`` to the + ``nextPageToken`` returned by a previous list request to get + the next page of results. + + This field is a member of `oneof`_ ``_page_token``. + project (str): + Project ID for this request. + region (str): + Name of the region scoping this request. + return_partial_success (bool): + Opt-in for partial success behavior which + provides partial results in case of failure. The + default value is false. + + This field is a member of `oneof`_ ``_return_partial_success``. + """ + + filter: str = proto.Field( + proto.STRING, + number=336120696, + optional=True, + ) + instance_group_manager: str = proto.Field( + proto.STRING, + number=249363395, + ) + max_results: int = proto.Field( + proto.UINT32, + number=54715419, + optional=True, + ) + order_by: str = proto.Field( + proto.STRING, + number=160562920, + optional=True, + ) + page_token: str = proto.Field( + proto.STRING, + number=19994697, + optional=True, + ) + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + region: str = proto.Field( + proto.STRING, + number=138946292, + ) + return_partial_success: bool = proto.Field( + proto.BOOL, + number=517198390, + optional=True, + ) + + +class ListNetworkAttachmentsRequest(proto.Message): + r"""A request message for NetworkAttachments.List. See the method + description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + filter (str): + A filter expression that filters resources listed in the + response. Most Compute resources support two types of filter + expressions: expressions that support regular expressions + and expressions that follow API improvement proposal + AIP-160. If you want to use AIP-160, your expression must + specify the field name, an operator, and the value that you + want to use for filtering. The value must be a string, a + number, or a boolean. The operator must be either ``=``, + ``!=``, ``>``, ``<``, ``<=``, ``>=`` or ``:``. For example, + if you are filtering Compute Engine instances, you can + exclude instances named ``example-instance`` by specifying + ``name != example-instance``. The ``:`` operator can be used + with string fields to match substrings. For non-string + fields it is equivalent to the ``=`` operator. The ``:*`` + comparison can be used to test whether a key has been + defined. For example, to find all objects with ``owner`` + label use: ``labels.owner:*`` You can also filter nested + fields. For example, you could specify + ``scheduling.automaticRestart = false`` to include instances + only if they are not scheduled for automatic restarts. You + can use filtering on nested fields to filter based on + resource labels. To filter on multiple expressions, provide + each separate expression within parentheses. For example: + ``(scheduling.automaticRestart = true) (cpuPlatform = "Intel Skylake")`` + By default, each expression is an ``AND`` expression. + However, you can include ``AND`` and ``OR`` expressions + explicitly. For example: + ``(cpuPlatform = "Intel Skylake") OR (cpuPlatform = "Intel Broadwell") AND (scheduling.automaticRestart = true)`` + If you want to use a regular expression, use the ``eq`` + (equal) or ``ne`` (not equal) operator against a single + un-parenthesized expression with or without quotes or + against multiple parenthesized expressions. Examples: + ``fieldname eq unquoted literal`` + ``fieldname eq 'single quoted literal'`` + ``fieldname eq "double quoted literal"`` + ``(fieldname1 eq literal) (fieldname2 ne "literal")`` The + literal value is interpreted as a regular expression using + Google RE2 library syntax. The literal value must match the + entire field. For example, to filter for instances that do + not end with name "instance", you would use + ``name ne .*instance``. + + This field is a member of `oneof`_ ``_filter``. + max_results (int): + The maximum number of results per page that should be + returned. If the number of available results is larger than + ``maxResults``, Compute Engine returns a ``nextPageToken`` + that can be used to get the next page of results in + subsequent list requests. Acceptable values are ``0`` to + ``500``, inclusive. (Default: ``500``) + + This field is a member of `oneof`_ ``_max_results``. + order_by (str): + Sorts list results by a certain order. By default, results + are returned in alphanumerical order based on the resource + name. You can also sort results in descending order based on + the creation timestamp using + ``orderBy="creationTimestamp desc"``. This sorts results + based on the ``creationTimestamp`` field in reverse + chronological order (newest result first). Use this to sort + resources like operations so that the newest operation is + returned first. Currently, only sorting by ``name`` or + ``creationTimestamp desc`` is supported. + + This field is a member of `oneof`_ ``_order_by``. + page_token (str): + Specifies a page token to use. Set ``pageToken`` to the + ``nextPageToken`` returned by a previous list request to get + the next page of results. + + This field is a member of `oneof`_ ``_page_token``. + project (str): + Project ID for this request. + region (str): + Name of the region of this request. + return_partial_success (bool): + Opt-in for partial success behavior which + provides partial results in case of failure. The + default value is false. + + This field is a member of `oneof`_ ``_return_partial_success``. + """ + + filter: str = proto.Field( + proto.STRING, + number=336120696, + optional=True, + ) + max_results: int = proto.Field( + proto.UINT32, + number=54715419, + optional=True, + ) + order_by: str = proto.Field( + proto.STRING, + number=160562920, + optional=True, + ) + page_token: str = proto.Field( + proto.STRING, + number=19994697, + optional=True, + ) + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + region: str = proto.Field( + proto.STRING, + number=138946292, + ) + return_partial_success: bool = proto.Field( + proto.BOOL, + number=517198390, + optional=True, + ) + + +class ListNetworkEndpointGroupsRequest(proto.Message): + r"""A request message for NetworkEndpointGroups.List. See the + method description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + filter (str): + A filter expression that filters resources listed in the + response. Most Compute resources support two types of filter + expressions: expressions that support regular expressions + and expressions that follow API improvement proposal + AIP-160. If you want to use AIP-160, your expression must + specify the field name, an operator, and the value that you + want to use for filtering. The value must be a string, a + number, or a boolean. The operator must be either ``=``, + ``!=``, ``>``, ``<``, ``<=``, ``>=`` or ``:``. For example, + if you are filtering Compute Engine instances, you can + exclude instances named ``example-instance`` by specifying + ``name != example-instance``. The ``:`` operator can be used + with string fields to match substrings. For non-string + fields it is equivalent to the ``=`` operator. The ``:*`` + comparison can be used to test whether a key has been + defined. For example, to find all objects with ``owner`` + label use: ``labels.owner:*`` You can also filter nested + fields. For example, you could specify + ``scheduling.automaticRestart = false`` to include instances + only if they are not scheduled for automatic restarts. You + can use filtering on nested fields to filter based on + resource labels. To filter on multiple expressions, provide + each separate expression within parentheses. For example: + ``(scheduling.automaticRestart = true) (cpuPlatform = "Intel Skylake")`` + By default, each expression is an ``AND`` expression. + However, you can include ``AND`` and ``OR`` expressions + explicitly. For example: + ``(cpuPlatform = "Intel Skylake") OR (cpuPlatform = "Intel Broadwell") AND (scheduling.automaticRestart = true)`` + If you want to use a regular expression, use the ``eq`` + (equal) or ``ne`` (not equal) operator against a single + un-parenthesized expression with or without quotes or + against multiple parenthesized expressions. Examples: + ``fieldname eq unquoted literal`` + ``fieldname eq 'single quoted literal'`` + ``fieldname eq "double quoted literal"`` + ``(fieldname1 eq literal) (fieldname2 ne "literal")`` The + literal value is interpreted as a regular expression using + Google RE2 library syntax. The literal value must match the + entire field. For example, to filter for instances that do + not end with name "instance", you would use + ``name ne .*instance``. + + This field is a member of `oneof`_ ``_filter``. + max_results (int): + The maximum number of results per page that should be + returned. If the number of available results is larger than + ``maxResults``, Compute Engine returns a ``nextPageToken`` + that can be used to get the next page of results in + subsequent list requests. Acceptable values are ``0`` to + ``500``, inclusive. (Default: ``500``) + + This field is a member of `oneof`_ ``_max_results``. + order_by (str): + Sorts list results by a certain order. By default, results + are returned in alphanumerical order based on the resource + name. You can also sort results in descending order based on + the creation timestamp using + ``orderBy="creationTimestamp desc"``. This sorts results + based on the ``creationTimestamp`` field in reverse + chronological order (newest result first). Use this to sort + resources like operations so that the newest operation is + returned first. Currently, only sorting by ``name`` or + ``creationTimestamp desc`` is supported. + + This field is a member of `oneof`_ ``_order_by``. + page_token (str): + Specifies a page token to use. Set ``pageToken`` to the + ``nextPageToken`` returned by a previous list request to get + the next page of results. + + This field is a member of `oneof`_ ``_page_token``. + project (str): + Project ID for this request. + return_partial_success (bool): + Opt-in for partial success behavior which + provides partial results in case of failure. The + default value is false. + + This field is a member of `oneof`_ ``_return_partial_success``. + zone (str): + The name of the zone where the network + endpoint group is located. It should comply with + RFC1035. + """ + + filter: str = proto.Field( + proto.STRING, + number=336120696, + optional=True, + ) + max_results: int = proto.Field( + proto.UINT32, + number=54715419, + optional=True, + ) + order_by: str = proto.Field( + proto.STRING, + number=160562920, + optional=True, + ) + page_token: str = proto.Field( + proto.STRING, + number=19994697, + optional=True, + ) + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + return_partial_success: bool = proto.Field( + proto.BOOL, + number=517198390, + optional=True, + ) + zone: str = proto.Field( + proto.STRING, + number=3744684, + ) + + +class ListNetworkEndpointsGlobalNetworkEndpointGroupsRequest(proto.Message): + r"""A request message for + GlobalNetworkEndpointGroups.ListNetworkEndpoints. See the method + description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + filter (str): + A filter expression that filters resources listed in the + response. Most Compute resources support two types of filter + expressions: expressions that support regular expressions + and expressions that follow API improvement proposal + AIP-160. If you want to use AIP-160, your expression must + specify the field name, an operator, and the value that you + want to use for filtering. The value must be a string, a + number, or a boolean. The operator must be either ``=``, + ``!=``, ``>``, ``<``, ``<=``, ``>=`` or ``:``. For example, + if you are filtering Compute Engine instances, you can + exclude instances named ``example-instance`` by specifying + ``name != example-instance``. The ``:`` operator can be used + with string fields to match substrings. For non-string + fields it is equivalent to the ``=`` operator. The ``:*`` + comparison can be used to test whether a key has been + defined. For example, to find all objects with ``owner`` + label use: ``labels.owner:*`` You can also filter nested + fields. For example, you could specify + ``scheduling.automaticRestart = false`` to include instances + only if they are not scheduled for automatic restarts. You + can use filtering on nested fields to filter based on + resource labels. To filter on multiple expressions, provide + each separate expression within parentheses. For example: + ``(scheduling.automaticRestart = true) (cpuPlatform = "Intel Skylake")`` + By default, each expression is an ``AND`` expression. + However, you can include ``AND`` and ``OR`` expressions + explicitly. For example: + ``(cpuPlatform = "Intel Skylake") OR (cpuPlatform = "Intel Broadwell") AND (scheduling.automaticRestart = true)`` + If you want to use a regular expression, use the ``eq`` + (equal) or ``ne`` (not equal) operator against a single + un-parenthesized expression with or without quotes or + against multiple parenthesized expressions. Examples: + ``fieldname eq unquoted literal`` + ``fieldname eq 'single quoted literal'`` + ``fieldname eq "double quoted literal"`` + ``(fieldname1 eq literal) (fieldname2 ne "literal")`` The + literal value is interpreted as a regular expression using + Google RE2 library syntax. The literal value must match the + entire field. For example, to filter for instances that do + not end with name "instance", you would use + ``name ne .*instance``. + + This field is a member of `oneof`_ ``_filter``. + max_results (int): + The maximum number of results per page that should be + returned. If the number of available results is larger than + ``maxResults``, Compute Engine returns a ``nextPageToken`` + that can be used to get the next page of results in + subsequent list requests. Acceptable values are ``0`` to + ``500``, inclusive. (Default: ``500``) + + This field is a member of `oneof`_ ``_max_results``. + network_endpoint_group (str): + The name of the network endpoint group from + which you want to generate a list of included + network endpoints. It should comply with + RFC1035. + order_by (str): + Sorts list results by a certain order. By default, results + are returned in alphanumerical order based on the resource + name. You can also sort results in descending order based on + the creation timestamp using + ``orderBy="creationTimestamp desc"``. This sorts results + based on the ``creationTimestamp`` field in reverse + chronological order (newest result first). Use this to sort + resources like operations so that the newest operation is + returned first. Currently, only sorting by ``name`` or + ``creationTimestamp desc`` is supported. + + This field is a member of `oneof`_ ``_order_by``. + page_token (str): + Specifies a page token to use. Set ``pageToken`` to the + ``nextPageToken`` returned by a previous list request to get + the next page of results. + + This field is a member of `oneof`_ ``_page_token``. + project (str): + Project ID for this request. + return_partial_success (bool): + Opt-in for partial success behavior which + provides partial results in case of failure. The + default value is false. + + This field is a member of `oneof`_ ``_return_partial_success``. + """ + + filter: str = proto.Field( + proto.STRING, + number=336120696, + optional=True, + ) + max_results: int = proto.Field( + proto.UINT32, + number=54715419, + optional=True, + ) + network_endpoint_group: str = proto.Field( + proto.STRING, + number=433907078, + ) + order_by: str = proto.Field( + proto.STRING, + number=160562920, + optional=True, + ) + page_token: str = proto.Field( + proto.STRING, + number=19994697, + optional=True, + ) + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + return_partial_success: bool = proto.Field( + proto.BOOL, + number=517198390, + optional=True, + ) + + +class ListNetworkEndpointsNetworkEndpointGroupsRequest(proto.Message): + r"""A request message for + NetworkEndpointGroups.ListNetworkEndpoints. See the method + description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + filter (str): + A filter expression that filters resources listed in the + response. Most Compute resources support two types of filter + expressions: expressions that support regular expressions + and expressions that follow API improvement proposal + AIP-160. If you want to use AIP-160, your expression must + specify the field name, an operator, and the value that you + want to use for filtering. The value must be a string, a + number, or a boolean. The operator must be either ``=``, + ``!=``, ``>``, ``<``, ``<=``, ``>=`` or ``:``. For example, + if you are filtering Compute Engine instances, you can + exclude instances named ``example-instance`` by specifying + ``name != example-instance``. The ``:`` operator can be used + with string fields to match substrings. For non-string + fields it is equivalent to the ``=`` operator. The ``:*`` + comparison can be used to test whether a key has been + defined. For example, to find all objects with ``owner`` + label use: ``labels.owner:*`` You can also filter nested + fields. For example, you could specify + ``scheduling.automaticRestart = false`` to include instances + only if they are not scheduled for automatic restarts. You + can use filtering on nested fields to filter based on + resource labels. To filter on multiple expressions, provide + each separate expression within parentheses. For example: + ``(scheduling.automaticRestart = true) (cpuPlatform = "Intel Skylake")`` + By default, each expression is an ``AND`` expression. + However, you can include ``AND`` and ``OR`` expressions + explicitly. For example: + ``(cpuPlatform = "Intel Skylake") OR (cpuPlatform = "Intel Broadwell") AND (scheduling.automaticRestart = true)`` + If you want to use a regular expression, use the ``eq`` + (equal) or ``ne`` (not equal) operator against a single + un-parenthesized expression with or without quotes or + against multiple parenthesized expressions. Examples: + ``fieldname eq unquoted literal`` + ``fieldname eq 'single quoted literal'`` + ``fieldname eq "double quoted literal"`` + ``(fieldname1 eq literal) (fieldname2 ne "literal")`` The + literal value is interpreted as a regular expression using + Google RE2 library syntax. The literal value must match the + entire field. For example, to filter for instances that do + not end with name "instance", you would use + ``name ne .*instance``. + + This field is a member of `oneof`_ ``_filter``. + max_results (int): + The maximum number of results per page that should be + returned. If the number of available results is larger than + ``maxResults``, Compute Engine returns a ``nextPageToken`` + that can be used to get the next page of results in + subsequent list requests. Acceptable values are ``0`` to + ``500``, inclusive. (Default: ``500``) + + This field is a member of `oneof`_ ``_max_results``. + network_endpoint_group (str): + The name of the network endpoint group from + which you want to generate a list of included + network endpoints. It should comply with + RFC1035. + network_endpoint_groups_list_endpoints_request_resource (google.cloud.compute_v1.types.NetworkEndpointGroupsListEndpointsRequest): + The body resource for this request + order_by (str): + Sorts list results by a certain order. By default, results + are returned in alphanumerical order based on the resource + name. You can also sort results in descending order based on + the creation timestamp using + ``orderBy="creationTimestamp desc"``. This sorts results + based on the ``creationTimestamp`` field in reverse + chronological order (newest result first). Use this to sort + resources like operations so that the newest operation is + returned first. Currently, only sorting by ``name`` or + ``creationTimestamp desc`` is supported. + + This field is a member of `oneof`_ ``_order_by``. + page_token (str): + Specifies a page token to use. Set ``pageToken`` to the + ``nextPageToken`` returned by a previous list request to get + the next page of results. + + This field is a member of `oneof`_ ``_page_token``. + project (str): + Project ID for this request. + return_partial_success (bool): + Opt-in for partial success behavior which + provides partial results in case of failure. The + default value is false. + + This field is a member of `oneof`_ ``_return_partial_success``. + zone (str): + The name of the zone where the network + endpoint group is located. It should comply with + RFC1035. + """ + + filter: str = proto.Field( + proto.STRING, + number=336120696, + optional=True, + ) + max_results: int = proto.Field( + proto.UINT32, + number=54715419, + optional=True, + ) + network_endpoint_group: str = proto.Field( + proto.STRING, + number=433907078, + ) + network_endpoint_groups_list_endpoints_request_resource: 'NetworkEndpointGroupsListEndpointsRequest' = proto.Field( + proto.MESSAGE, + number=59493390, + message='NetworkEndpointGroupsListEndpointsRequest', + ) + order_by: str = proto.Field( + proto.STRING, + number=160562920, + optional=True, + ) + page_token: str = proto.Field( + proto.STRING, + number=19994697, + optional=True, + ) + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + return_partial_success: bool = proto.Field( + proto.BOOL, + number=517198390, + optional=True, + ) + zone: str = proto.Field( + proto.STRING, + number=3744684, + ) + + +class ListNetworkFirewallPoliciesRequest(proto.Message): + r"""A request message for NetworkFirewallPolicies.List. See the + method description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + filter (str): + A filter expression that filters resources listed in the + response. Most Compute resources support two types of filter + expressions: expressions that support regular expressions + and expressions that follow API improvement proposal + AIP-160. If you want to use AIP-160, your expression must + specify the field name, an operator, and the value that you + want to use for filtering. The value must be a string, a + number, or a boolean. The operator must be either ``=``, + ``!=``, ``>``, ``<``, ``<=``, ``>=`` or ``:``. For example, + if you are filtering Compute Engine instances, you can + exclude instances named ``example-instance`` by specifying + ``name != example-instance``. The ``:`` operator can be used + with string fields to match substrings. For non-string + fields it is equivalent to the ``=`` operator. The ``:*`` + comparison can be used to test whether a key has been + defined. For example, to find all objects with ``owner`` + label use: ``labels.owner:*`` You can also filter nested + fields. For example, you could specify + ``scheduling.automaticRestart = false`` to include instances + only if they are not scheduled for automatic restarts. You + can use filtering on nested fields to filter based on + resource labels. To filter on multiple expressions, provide + each separate expression within parentheses. For example: + ``(scheduling.automaticRestart = true) (cpuPlatform = "Intel Skylake")`` + By default, each expression is an ``AND`` expression. + However, you can include ``AND`` and ``OR`` expressions + explicitly. For example: + ``(cpuPlatform = "Intel Skylake") OR (cpuPlatform = "Intel Broadwell") AND (scheduling.automaticRestart = true)`` + If you want to use a regular expression, use the ``eq`` + (equal) or ``ne`` (not equal) operator against a single + un-parenthesized expression with or without quotes or + against multiple parenthesized expressions. Examples: + ``fieldname eq unquoted literal`` + ``fieldname eq 'single quoted literal'`` + ``fieldname eq "double quoted literal"`` + ``(fieldname1 eq literal) (fieldname2 ne "literal")`` The + literal value is interpreted as a regular expression using + Google RE2 library syntax. The literal value must match the + entire field. For example, to filter for instances that do + not end with name "instance", you would use + ``name ne .*instance``. + + This field is a member of `oneof`_ ``_filter``. + max_results (int): + The maximum number of results per page that should be + returned. If the number of available results is larger than + ``maxResults``, Compute Engine returns a ``nextPageToken`` + that can be used to get the next page of results in + subsequent list requests. Acceptable values are ``0`` to + ``500``, inclusive. (Default: ``500``) + + This field is a member of `oneof`_ ``_max_results``. + order_by (str): + Sorts list results by a certain order. By default, results + are returned in alphanumerical order based on the resource + name. You can also sort results in descending order based on + the creation timestamp using + ``orderBy="creationTimestamp desc"``. This sorts results + based on the ``creationTimestamp`` field in reverse + chronological order (newest result first). Use this to sort + resources like operations so that the newest operation is + returned first. Currently, only sorting by ``name`` or + ``creationTimestamp desc`` is supported. + + This field is a member of `oneof`_ ``_order_by``. + page_token (str): + Specifies a page token to use. Set ``pageToken`` to the + ``nextPageToken`` returned by a previous list request to get + the next page of results. + + This field is a member of `oneof`_ ``_page_token``. + project (str): + Project ID for this request. + return_partial_success (bool): + Opt-in for partial success behavior which + provides partial results in case of failure. The + default value is false. + + This field is a member of `oneof`_ ``_return_partial_success``. + """ + + filter: str = proto.Field( + proto.STRING, + number=336120696, + optional=True, + ) + max_results: int = proto.Field( + proto.UINT32, + number=54715419, + optional=True, + ) + order_by: str = proto.Field( + proto.STRING, + number=160562920, + optional=True, + ) + page_token: str = proto.Field( + proto.STRING, + number=19994697, + optional=True, + ) + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + return_partial_success: bool = proto.Field( + proto.BOOL, + number=517198390, + optional=True, + ) + + +class ListNetworksRequest(proto.Message): + r"""A request message for Networks.List. See the method + description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + filter (str): + A filter expression that filters resources listed in the + response. Most Compute resources support two types of filter + expressions: expressions that support regular expressions + and expressions that follow API improvement proposal + AIP-160. If you want to use AIP-160, your expression must + specify the field name, an operator, and the value that you + want to use for filtering. The value must be a string, a + number, or a boolean. The operator must be either ``=``, + ``!=``, ``>``, ``<``, ``<=``, ``>=`` or ``:``. For example, + if you are filtering Compute Engine instances, you can + exclude instances named ``example-instance`` by specifying + ``name != example-instance``. The ``:`` operator can be used + with string fields to match substrings. For non-string + fields it is equivalent to the ``=`` operator. The ``:*`` + comparison can be used to test whether a key has been + defined. For example, to find all objects with ``owner`` + label use: ``labels.owner:*`` You can also filter nested + fields. For example, you could specify + ``scheduling.automaticRestart = false`` to include instances + only if they are not scheduled for automatic restarts. You + can use filtering on nested fields to filter based on + resource labels. To filter on multiple expressions, provide + each separate expression within parentheses. For example: + ``(scheduling.automaticRestart = true) (cpuPlatform = "Intel Skylake")`` + By default, each expression is an ``AND`` expression. + However, you can include ``AND`` and ``OR`` expressions + explicitly. For example: + ``(cpuPlatform = "Intel Skylake") OR (cpuPlatform = "Intel Broadwell") AND (scheduling.automaticRestart = true)`` + If you want to use a regular expression, use the ``eq`` + (equal) or ``ne`` (not equal) operator against a single + un-parenthesized expression with or without quotes or + against multiple parenthesized expressions. Examples: + ``fieldname eq unquoted literal`` + ``fieldname eq 'single quoted literal'`` + ``fieldname eq "double quoted literal"`` + ``(fieldname1 eq literal) (fieldname2 ne "literal")`` The + literal value is interpreted as a regular expression using + Google RE2 library syntax. The literal value must match the + entire field. For example, to filter for instances that do + not end with name "instance", you would use + ``name ne .*instance``. + + This field is a member of `oneof`_ ``_filter``. + max_results (int): + The maximum number of results per page that should be + returned. If the number of available results is larger than + ``maxResults``, Compute Engine returns a ``nextPageToken`` + that can be used to get the next page of results in + subsequent list requests. Acceptable values are ``0`` to + ``500``, inclusive. (Default: ``500``) + + This field is a member of `oneof`_ ``_max_results``. + order_by (str): + Sorts list results by a certain order. By default, results + are returned in alphanumerical order based on the resource + name. You can also sort results in descending order based on + the creation timestamp using + ``orderBy="creationTimestamp desc"``. This sorts results + based on the ``creationTimestamp`` field in reverse + chronological order (newest result first). Use this to sort + resources like operations so that the newest operation is + returned first. Currently, only sorting by ``name`` or + ``creationTimestamp desc`` is supported. + + This field is a member of `oneof`_ ``_order_by``. + page_token (str): + Specifies a page token to use. Set ``pageToken`` to the + ``nextPageToken`` returned by a previous list request to get + the next page of results. + + This field is a member of `oneof`_ ``_page_token``. + project (str): + Project ID for this request. + return_partial_success (bool): + Opt-in for partial success behavior which + provides partial results in case of failure. The + default value is false. + + This field is a member of `oneof`_ ``_return_partial_success``. + """ + + filter: str = proto.Field( + proto.STRING, + number=336120696, + optional=True, + ) + max_results: int = proto.Field( + proto.UINT32, + number=54715419, + optional=True, + ) + order_by: str = proto.Field( + proto.STRING, + number=160562920, + optional=True, + ) + page_token: str = proto.Field( + proto.STRING, + number=19994697, + optional=True, + ) + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + return_partial_success: bool = proto.Field( + proto.BOOL, + number=517198390, + optional=True, + ) + + +class ListNodeGroupsRequest(proto.Message): + r"""A request message for NodeGroups.List. See the method + description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + filter (str): + A filter expression that filters resources listed in the + response. Most Compute resources support two types of filter + expressions: expressions that support regular expressions + and expressions that follow API improvement proposal + AIP-160. If you want to use AIP-160, your expression must + specify the field name, an operator, and the value that you + want to use for filtering. The value must be a string, a + number, or a boolean. The operator must be either ``=``, + ``!=``, ``>``, ``<``, ``<=``, ``>=`` or ``:``. For example, + if you are filtering Compute Engine instances, you can + exclude instances named ``example-instance`` by specifying + ``name != example-instance``. The ``:`` operator can be used + with string fields to match substrings. For non-string + fields it is equivalent to the ``=`` operator. The ``:*`` + comparison can be used to test whether a key has been + defined. For example, to find all objects with ``owner`` + label use: ``labels.owner:*`` You can also filter nested + fields. For example, you could specify + ``scheduling.automaticRestart = false`` to include instances + only if they are not scheduled for automatic restarts. You + can use filtering on nested fields to filter based on + resource labels. To filter on multiple expressions, provide + each separate expression within parentheses. For example: + ``(scheduling.automaticRestart = true) (cpuPlatform = "Intel Skylake")`` + By default, each expression is an ``AND`` expression. + However, you can include ``AND`` and ``OR`` expressions + explicitly. For example: + ``(cpuPlatform = "Intel Skylake") OR (cpuPlatform = "Intel Broadwell") AND (scheduling.automaticRestart = true)`` + If you want to use a regular expression, use the ``eq`` + (equal) or ``ne`` (not equal) operator against a single + un-parenthesized expression with or without quotes or + against multiple parenthesized expressions. Examples: + ``fieldname eq unquoted literal`` + ``fieldname eq 'single quoted literal'`` + ``fieldname eq "double quoted literal"`` + ``(fieldname1 eq literal) (fieldname2 ne "literal")`` The + literal value is interpreted as a regular expression using + Google RE2 library syntax. The literal value must match the + entire field. For example, to filter for instances that do + not end with name "instance", you would use + ``name ne .*instance``. + + This field is a member of `oneof`_ ``_filter``. + max_results (int): + The maximum number of results per page that should be + returned. If the number of available results is larger than + ``maxResults``, Compute Engine returns a ``nextPageToken`` + that can be used to get the next page of results in + subsequent list requests. Acceptable values are ``0`` to + ``500``, inclusive. (Default: ``500``) + + This field is a member of `oneof`_ ``_max_results``. + order_by (str): + Sorts list results by a certain order. By default, results + are returned in alphanumerical order based on the resource + name. You can also sort results in descending order based on + the creation timestamp using + ``orderBy="creationTimestamp desc"``. This sorts results + based on the ``creationTimestamp`` field in reverse + chronological order (newest result first). Use this to sort + resources like operations so that the newest operation is + returned first. Currently, only sorting by ``name`` or + ``creationTimestamp desc`` is supported. + + This field is a member of `oneof`_ ``_order_by``. + page_token (str): + Specifies a page token to use. Set ``pageToken`` to the + ``nextPageToken`` returned by a previous list request to get + the next page of results. + + This field is a member of `oneof`_ ``_page_token``. + project (str): + Project ID for this request. + return_partial_success (bool): + Opt-in for partial success behavior which + provides partial results in case of failure. The + default value is false. + + This field is a member of `oneof`_ ``_return_partial_success``. + zone (str): + The name of the zone for this request. + """ + + filter: str = proto.Field( + proto.STRING, + number=336120696, + optional=True, + ) + max_results: int = proto.Field( + proto.UINT32, + number=54715419, + optional=True, + ) + order_by: str = proto.Field( + proto.STRING, + number=160562920, + optional=True, + ) + page_token: str = proto.Field( + proto.STRING, + number=19994697, + optional=True, + ) + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + return_partial_success: bool = proto.Field( + proto.BOOL, + number=517198390, + optional=True, + ) + zone: str = proto.Field( + proto.STRING, + number=3744684, + ) + + +class ListNodeTemplatesRequest(proto.Message): + r"""A request message for NodeTemplates.List. See the method + description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + filter (str): + A filter expression that filters resources listed in the + response. Most Compute resources support two types of filter + expressions: expressions that support regular expressions + and expressions that follow API improvement proposal + AIP-160. If you want to use AIP-160, your expression must + specify the field name, an operator, and the value that you + want to use for filtering. The value must be a string, a + number, or a boolean. The operator must be either ``=``, + ``!=``, ``>``, ``<``, ``<=``, ``>=`` or ``:``. For example, + if you are filtering Compute Engine instances, you can + exclude instances named ``example-instance`` by specifying + ``name != example-instance``. The ``:`` operator can be used + with string fields to match substrings. For non-string + fields it is equivalent to the ``=`` operator. The ``:*`` + comparison can be used to test whether a key has been + defined. For example, to find all objects with ``owner`` + label use: ``labels.owner:*`` You can also filter nested + fields. For example, you could specify + ``scheduling.automaticRestart = false`` to include instances + only if they are not scheduled for automatic restarts. You + can use filtering on nested fields to filter based on + resource labels. To filter on multiple expressions, provide + each separate expression within parentheses. For example: + ``(scheduling.automaticRestart = true) (cpuPlatform = "Intel Skylake")`` + By default, each expression is an ``AND`` expression. + However, you can include ``AND`` and ``OR`` expressions + explicitly. For example: + ``(cpuPlatform = "Intel Skylake") OR (cpuPlatform = "Intel Broadwell") AND (scheduling.automaticRestart = true)`` + If you want to use a regular expression, use the ``eq`` + (equal) or ``ne`` (not equal) operator against a single + un-parenthesized expression with or without quotes or + against multiple parenthesized expressions. Examples: + ``fieldname eq unquoted literal`` + ``fieldname eq 'single quoted literal'`` + ``fieldname eq "double quoted literal"`` + ``(fieldname1 eq literal) (fieldname2 ne "literal")`` The + literal value is interpreted as a regular expression using + Google RE2 library syntax. The literal value must match the + entire field. For example, to filter for instances that do + not end with name "instance", you would use + ``name ne .*instance``. + + This field is a member of `oneof`_ ``_filter``. + max_results (int): + The maximum number of results per page that should be + returned. If the number of available results is larger than + ``maxResults``, Compute Engine returns a ``nextPageToken`` + that can be used to get the next page of results in + subsequent list requests. Acceptable values are ``0`` to + ``500``, inclusive. (Default: ``500``) + + This field is a member of `oneof`_ ``_max_results``. + order_by (str): + Sorts list results by a certain order. By default, results + are returned in alphanumerical order based on the resource + name. You can also sort results in descending order based on + the creation timestamp using + ``orderBy="creationTimestamp desc"``. This sorts results + based on the ``creationTimestamp`` field in reverse + chronological order (newest result first). Use this to sort + resources like operations so that the newest operation is + returned first. Currently, only sorting by ``name`` or + ``creationTimestamp desc`` is supported. + + This field is a member of `oneof`_ ``_order_by``. + page_token (str): + Specifies a page token to use. Set ``pageToken`` to the + ``nextPageToken`` returned by a previous list request to get + the next page of results. + + This field is a member of `oneof`_ ``_page_token``. + project (str): + Project ID for this request. + region (str): + The name of the region for this request. + return_partial_success (bool): + Opt-in for partial success behavior which + provides partial results in case of failure. The + default value is false. + + This field is a member of `oneof`_ ``_return_partial_success``. + """ + + filter: str = proto.Field( + proto.STRING, + number=336120696, + optional=True, + ) + max_results: int = proto.Field( + proto.UINT32, + number=54715419, + optional=True, + ) + order_by: str = proto.Field( + proto.STRING, + number=160562920, + optional=True, + ) + page_token: str = proto.Field( + proto.STRING, + number=19994697, + optional=True, + ) + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + region: str = proto.Field( + proto.STRING, + number=138946292, + ) + return_partial_success: bool = proto.Field( + proto.BOOL, + number=517198390, + optional=True, + ) + + +class ListNodeTypesRequest(proto.Message): + r"""A request message for NodeTypes.List. See the method + description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + filter (str): + A filter expression that filters resources listed in the + response. Most Compute resources support two types of filter + expressions: expressions that support regular expressions + and expressions that follow API improvement proposal + AIP-160. If you want to use AIP-160, your expression must + specify the field name, an operator, and the value that you + want to use for filtering. The value must be a string, a + number, or a boolean. The operator must be either ``=``, + ``!=``, ``>``, ``<``, ``<=``, ``>=`` or ``:``. For example, + if you are filtering Compute Engine instances, you can + exclude instances named ``example-instance`` by specifying + ``name != example-instance``. The ``:`` operator can be used + with string fields to match substrings. For non-string + fields it is equivalent to the ``=`` operator. The ``:*`` + comparison can be used to test whether a key has been + defined. For example, to find all objects with ``owner`` + label use: ``labels.owner:*`` You can also filter nested + fields. For example, you could specify + ``scheduling.automaticRestart = false`` to include instances + only if they are not scheduled for automatic restarts. You + can use filtering on nested fields to filter based on + resource labels. To filter on multiple expressions, provide + each separate expression within parentheses. For example: + ``(scheduling.automaticRestart = true) (cpuPlatform = "Intel Skylake")`` + By default, each expression is an ``AND`` expression. + However, you can include ``AND`` and ``OR`` expressions + explicitly. For example: + ``(cpuPlatform = "Intel Skylake") OR (cpuPlatform = "Intel Broadwell") AND (scheduling.automaticRestart = true)`` + If you want to use a regular expression, use the ``eq`` + (equal) or ``ne`` (not equal) operator against a single + un-parenthesized expression with or without quotes or + against multiple parenthesized expressions. Examples: + ``fieldname eq unquoted literal`` + ``fieldname eq 'single quoted literal'`` + ``fieldname eq "double quoted literal"`` + ``(fieldname1 eq literal) (fieldname2 ne "literal")`` The + literal value is interpreted as a regular expression using + Google RE2 library syntax. The literal value must match the + entire field. For example, to filter for instances that do + not end with name "instance", you would use + ``name ne .*instance``. + + This field is a member of `oneof`_ ``_filter``. + max_results (int): + The maximum number of results per page that should be + returned. If the number of available results is larger than + ``maxResults``, Compute Engine returns a ``nextPageToken`` + that can be used to get the next page of results in + subsequent list requests. Acceptable values are ``0`` to + ``500``, inclusive. (Default: ``500``) + + This field is a member of `oneof`_ ``_max_results``. + order_by (str): + Sorts list results by a certain order. By default, results + are returned in alphanumerical order based on the resource + name. You can also sort results in descending order based on + the creation timestamp using + ``orderBy="creationTimestamp desc"``. This sorts results + based on the ``creationTimestamp`` field in reverse + chronological order (newest result first). Use this to sort + resources like operations so that the newest operation is + returned first. Currently, only sorting by ``name`` or + ``creationTimestamp desc`` is supported. + + This field is a member of `oneof`_ ``_order_by``. + page_token (str): + Specifies a page token to use. Set ``pageToken`` to the + ``nextPageToken`` returned by a previous list request to get + the next page of results. + + This field is a member of `oneof`_ ``_page_token``. + project (str): + Project ID for this request. + return_partial_success (bool): + Opt-in for partial success behavior which + provides partial results in case of failure. The + default value is false. + + This field is a member of `oneof`_ ``_return_partial_success``. + zone (str): + The name of the zone for this request. + """ + + filter: str = proto.Field( + proto.STRING, + number=336120696, + optional=True, + ) + max_results: int = proto.Field( + proto.UINT32, + number=54715419, + optional=True, + ) + order_by: str = proto.Field( + proto.STRING, + number=160562920, + optional=True, + ) + page_token: str = proto.Field( + proto.STRING, + number=19994697, + optional=True, + ) + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + return_partial_success: bool = proto.Field( + proto.BOOL, + number=517198390, + optional=True, + ) + zone: str = proto.Field( + proto.STRING, + number=3744684, + ) + + +class ListNodesNodeGroupsRequest(proto.Message): + r"""A request message for NodeGroups.ListNodes. See the method + description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + filter (str): + A filter expression that filters resources listed in the + response. Most Compute resources support two types of filter + expressions: expressions that support regular expressions + and expressions that follow API improvement proposal + AIP-160. If you want to use AIP-160, your expression must + specify the field name, an operator, and the value that you + want to use for filtering. The value must be a string, a + number, or a boolean. The operator must be either ``=``, + ``!=``, ``>``, ``<``, ``<=``, ``>=`` or ``:``. For example, + if you are filtering Compute Engine instances, you can + exclude instances named ``example-instance`` by specifying + ``name != example-instance``. The ``:`` operator can be used + with string fields to match substrings. For non-string + fields it is equivalent to the ``=`` operator. The ``:*`` + comparison can be used to test whether a key has been + defined. For example, to find all objects with ``owner`` + label use: ``labels.owner:*`` You can also filter nested + fields. For example, you could specify + ``scheduling.automaticRestart = false`` to include instances + only if they are not scheduled for automatic restarts. You + can use filtering on nested fields to filter based on + resource labels. To filter on multiple expressions, provide + each separate expression within parentheses. For example: + ``(scheduling.automaticRestart = true) (cpuPlatform = "Intel Skylake")`` + By default, each expression is an ``AND`` expression. + However, you can include ``AND`` and ``OR`` expressions + explicitly. For example: + ``(cpuPlatform = "Intel Skylake") OR (cpuPlatform = "Intel Broadwell") AND (scheduling.automaticRestart = true)`` + If you want to use a regular expression, use the ``eq`` + (equal) or ``ne`` (not equal) operator against a single + un-parenthesized expression with or without quotes or + against multiple parenthesized expressions. Examples: + ``fieldname eq unquoted literal`` + ``fieldname eq 'single quoted literal'`` + ``fieldname eq "double quoted literal"`` + ``(fieldname1 eq literal) (fieldname2 ne "literal")`` The + literal value is interpreted as a regular expression using + Google RE2 library syntax. The literal value must match the + entire field. For example, to filter for instances that do + not end with name "instance", you would use + ``name ne .*instance``. + + This field is a member of `oneof`_ ``_filter``. + max_results (int): + The maximum number of results per page that should be + returned. If the number of available results is larger than + ``maxResults``, Compute Engine returns a ``nextPageToken`` + that can be used to get the next page of results in + subsequent list requests. Acceptable values are ``0`` to + ``500``, inclusive. (Default: ``500``) + + This field is a member of `oneof`_ ``_max_results``. + node_group (str): + Name of the NodeGroup resource whose nodes + you want to list. + order_by (str): + Sorts list results by a certain order. By default, results + are returned in alphanumerical order based on the resource + name. You can also sort results in descending order based on + the creation timestamp using + ``orderBy="creationTimestamp desc"``. This sorts results + based on the ``creationTimestamp`` field in reverse + chronological order (newest result first). Use this to sort + resources like operations so that the newest operation is + returned first. Currently, only sorting by ``name`` or + ``creationTimestamp desc`` is supported. + + This field is a member of `oneof`_ ``_order_by``. + page_token (str): + Specifies a page token to use. Set ``pageToken`` to the + ``nextPageToken`` returned by a previous list request to get + the next page of results. + + This field is a member of `oneof`_ ``_page_token``. + project (str): + Project ID for this request. + return_partial_success (bool): + Opt-in for partial success behavior which + provides partial results in case of failure. The + default value is false. + + This field is a member of `oneof`_ ``_return_partial_success``. + zone (str): + The name of the zone for this request. + """ + + filter: str = proto.Field( + proto.STRING, + number=336120696, + optional=True, + ) + max_results: int = proto.Field( + proto.UINT32, + number=54715419, + optional=True, + ) + node_group: str = proto.Field( + proto.STRING, + number=469958146, + ) + order_by: str = proto.Field( + proto.STRING, + number=160562920, + optional=True, + ) + page_token: str = proto.Field( + proto.STRING, + number=19994697, + optional=True, + ) + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + return_partial_success: bool = proto.Field( + proto.BOOL, + number=517198390, + optional=True, + ) + zone: str = proto.Field( + proto.STRING, + number=3744684, + ) + + +class ListPacketMirroringsRequest(proto.Message): + r"""A request message for PacketMirrorings.List. See the method + description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + filter (str): + A filter expression that filters resources listed in the + response. Most Compute resources support two types of filter + expressions: expressions that support regular expressions + and expressions that follow API improvement proposal + AIP-160. If you want to use AIP-160, your expression must + specify the field name, an operator, and the value that you + want to use for filtering. The value must be a string, a + number, or a boolean. The operator must be either ``=``, + ``!=``, ``>``, ``<``, ``<=``, ``>=`` or ``:``. For example, + if you are filtering Compute Engine instances, you can + exclude instances named ``example-instance`` by specifying + ``name != example-instance``. The ``:`` operator can be used + with string fields to match substrings. For non-string + fields it is equivalent to the ``=`` operator. The ``:*`` + comparison can be used to test whether a key has been + defined. For example, to find all objects with ``owner`` + label use: ``labels.owner:*`` You can also filter nested + fields. For example, you could specify + ``scheduling.automaticRestart = false`` to include instances + only if they are not scheduled for automatic restarts. You + can use filtering on nested fields to filter based on + resource labels. To filter on multiple expressions, provide + each separate expression within parentheses. For example: + ``(scheduling.automaticRestart = true) (cpuPlatform = "Intel Skylake")`` + By default, each expression is an ``AND`` expression. + However, you can include ``AND`` and ``OR`` expressions + explicitly. For example: + ``(cpuPlatform = "Intel Skylake") OR (cpuPlatform = "Intel Broadwell") AND (scheduling.automaticRestart = true)`` + If you want to use a regular expression, use the ``eq`` + (equal) or ``ne`` (not equal) operator against a single + un-parenthesized expression with or without quotes or + against multiple parenthesized expressions. Examples: + ``fieldname eq unquoted literal`` + ``fieldname eq 'single quoted literal'`` + ``fieldname eq "double quoted literal"`` + ``(fieldname1 eq literal) (fieldname2 ne "literal")`` The + literal value is interpreted as a regular expression using + Google RE2 library syntax. The literal value must match the + entire field. For example, to filter for instances that do + not end with name "instance", you would use + ``name ne .*instance``. + + This field is a member of `oneof`_ ``_filter``. + max_results (int): + The maximum number of results per page that should be + returned. If the number of available results is larger than + ``maxResults``, Compute Engine returns a ``nextPageToken`` + that can be used to get the next page of results in + subsequent list requests. Acceptable values are ``0`` to + ``500``, inclusive. (Default: ``500``) + + This field is a member of `oneof`_ ``_max_results``. + order_by (str): + Sorts list results by a certain order. By default, results + are returned in alphanumerical order based on the resource + name. You can also sort results in descending order based on + the creation timestamp using + ``orderBy="creationTimestamp desc"``. This sorts results + based on the ``creationTimestamp`` field in reverse + chronological order (newest result first). Use this to sort + resources like operations so that the newest operation is + returned first. Currently, only sorting by ``name`` or + ``creationTimestamp desc`` is supported. + + This field is a member of `oneof`_ ``_order_by``. + page_token (str): + Specifies a page token to use. Set ``pageToken`` to the + ``nextPageToken`` returned by a previous list request to get + the next page of results. + + This field is a member of `oneof`_ ``_page_token``. + project (str): + Project ID for this request. + region (str): + Name of the region for this request. + return_partial_success (bool): + Opt-in for partial success behavior which + provides partial results in case of failure. The + default value is false. + + This field is a member of `oneof`_ ``_return_partial_success``. + """ + + filter: str = proto.Field( + proto.STRING, + number=336120696, + optional=True, + ) + max_results: int = proto.Field( + proto.UINT32, + number=54715419, + optional=True, + ) + order_by: str = proto.Field( + proto.STRING, + number=160562920, + optional=True, + ) + page_token: str = proto.Field( + proto.STRING, + number=19994697, + optional=True, + ) + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + region: str = proto.Field( + proto.STRING, + number=138946292, + ) + return_partial_success: bool = proto.Field( + proto.BOOL, + number=517198390, + optional=True, + ) + + +class ListPeeringRoutesNetworksRequest(proto.Message): + r"""A request message for Networks.ListPeeringRoutes. See the + method description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + direction (str): + The direction of the exchanged routes. + Check the Direction enum for the list of + possible values. + + This field is a member of `oneof`_ ``_direction``. + filter (str): + A filter expression that filters resources listed in the + response. Most Compute resources support two types of filter + expressions: expressions that support regular expressions + and expressions that follow API improvement proposal + AIP-160. If you want to use AIP-160, your expression must + specify the field name, an operator, and the value that you + want to use for filtering. The value must be a string, a + number, or a boolean. The operator must be either ``=``, + ``!=``, ``>``, ``<``, ``<=``, ``>=`` or ``:``. For example, + if you are filtering Compute Engine instances, you can + exclude instances named ``example-instance`` by specifying + ``name != example-instance``. The ``:`` operator can be used + with string fields to match substrings. For non-string + fields it is equivalent to the ``=`` operator. The ``:*`` + comparison can be used to test whether a key has been + defined. For example, to find all objects with ``owner`` + label use: ``labels.owner:*`` You can also filter nested + fields. For example, you could specify + ``scheduling.automaticRestart = false`` to include instances + only if they are not scheduled for automatic restarts. You + can use filtering on nested fields to filter based on + resource labels. To filter on multiple expressions, provide + each separate expression within parentheses. For example: + ``(scheduling.automaticRestart = true) (cpuPlatform = "Intel Skylake")`` + By default, each expression is an ``AND`` expression. + However, you can include ``AND`` and ``OR`` expressions + explicitly. For example: + ``(cpuPlatform = "Intel Skylake") OR (cpuPlatform = "Intel Broadwell") AND (scheduling.automaticRestart = true)`` + If you want to use a regular expression, use the ``eq`` + (equal) or ``ne`` (not equal) operator against a single + un-parenthesized expression with or without quotes or + against multiple parenthesized expressions. Examples: + ``fieldname eq unquoted literal`` + ``fieldname eq 'single quoted literal'`` + ``fieldname eq "double quoted literal"`` + ``(fieldname1 eq literal) (fieldname2 ne "literal")`` The + literal value is interpreted as a regular expression using + Google RE2 library syntax. The literal value must match the + entire field. For example, to filter for instances that do + not end with name "instance", you would use + ``name ne .*instance``. + + This field is a member of `oneof`_ ``_filter``. + max_results (int): + The maximum number of results per page that should be + returned. If the number of available results is larger than + ``maxResults``, Compute Engine returns a ``nextPageToken`` + that can be used to get the next page of results in + subsequent list requests. Acceptable values are ``0`` to + ``500``, inclusive. (Default: ``500``) + + This field is a member of `oneof`_ ``_max_results``. + network (str): + Name of the network for this request. + order_by (str): + Sorts list results by a certain order. By default, results + are returned in alphanumerical order based on the resource + name. You can also sort results in descending order based on + the creation timestamp using + ``orderBy="creationTimestamp desc"``. This sorts results + based on the ``creationTimestamp`` field in reverse + chronological order (newest result first). Use this to sort + resources like operations so that the newest operation is + returned first. Currently, only sorting by ``name`` or + ``creationTimestamp desc`` is supported. + + This field is a member of `oneof`_ ``_order_by``. + page_token (str): + Specifies a page token to use. Set ``pageToken`` to the + ``nextPageToken`` returned by a previous list request to get + the next page of results. + + This field is a member of `oneof`_ ``_page_token``. + peering_name (str): + The response will show routes exchanged over + the given peering connection. + + This field is a member of `oneof`_ ``_peering_name``. + project (str): + Project ID for this request. + region (str): + The region of the request. The response will + include all subnet routes, static routes and + dynamic routes in the region. + + This field is a member of `oneof`_ ``_region``. + return_partial_success (bool): + Opt-in for partial success behavior which + provides partial results in case of failure. The + default value is false. + + This field is a member of `oneof`_ ``_return_partial_success``. + """ + class Direction(proto.Enum): + r"""The direction of the exchanged routes. + + Values: + UNDEFINED_DIRECTION (0): + A value indicating that the enum field is not + set. + INCOMING (338552870): + For routes exported from peer network. + OUTGOING (307438444): + For routes exported from local network. + """ + UNDEFINED_DIRECTION = 0 + INCOMING = 338552870 + OUTGOING = 307438444 + + direction: str = proto.Field( + proto.STRING, + number=111150975, + optional=True, + ) + filter: str = proto.Field( + proto.STRING, + number=336120696, + optional=True, + ) + max_results: int = proto.Field( + proto.UINT32, + number=54715419, + optional=True, + ) + network: str = proto.Field( + proto.STRING, + number=232872494, + ) + order_by: str = proto.Field( + proto.STRING, + number=160562920, + optional=True, + ) + page_token: str = proto.Field( + proto.STRING, + number=19994697, + optional=True, + ) + peering_name: str = proto.Field( + proto.STRING, + number=249571370, + optional=True, + ) + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + region: str = proto.Field( + proto.STRING, + number=138946292, + optional=True, + ) + return_partial_success: bool = proto.Field( + proto.BOOL, + number=517198390, + optional=True, + ) + + +class ListPerInstanceConfigsInstanceGroupManagersRequest(proto.Message): + r"""A request message for + InstanceGroupManagers.ListPerInstanceConfigs. See the method + description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + filter (str): + A filter expression that filters resources listed in the + response. Most Compute resources support two types of filter + expressions: expressions that support regular expressions + and expressions that follow API improvement proposal + AIP-160. If you want to use AIP-160, your expression must + specify the field name, an operator, and the value that you + want to use for filtering. The value must be a string, a + number, or a boolean. The operator must be either ``=``, + ``!=``, ``>``, ``<``, ``<=``, ``>=`` or ``:``. For example, + if you are filtering Compute Engine instances, you can + exclude instances named ``example-instance`` by specifying + ``name != example-instance``. The ``:`` operator can be used + with string fields to match substrings. For non-string + fields it is equivalent to the ``=`` operator. The ``:*`` + comparison can be used to test whether a key has been + defined. For example, to find all objects with ``owner`` + label use: ``labels.owner:*`` You can also filter nested + fields. For example, you could specify + ``scheduling.automaticRestart = false`` to include instances + only if they are not scheduled for automatic restarts. You + can use filtering on nested fields to filter based on + resource labels. To filter on multiple expressions, provide + each separate expression within parentheses. For example: + ``(scheduling.automaticRestart = true) (cpuPlatform = "Intel Skylake")`` + By default, each expression is an ``AND`` expression. + However, you can include ``AND`` and ``OR`` expressions + explicitly. For example: + ``(cpuPlatform = "Intel Skylake") OR (cpuPlatform = "Intel Broadwell") AND (scheduling.automaticRestart = true)`` + If you want to use a regular expression, use the ``eq`` + (equal) or ``ne`` (not equal) operator against a single + un-parenthesized expression with or without quotes or + against multiple parenthesized expressions. Examples: + ``fieldname eq unquoted literal`` + ``fieldname eq 'single quoted literal'`` + ``fieldname eq "double quoted literal"`` + ``(fieldname1 eq literal) (fieldname2 ne "literal")`` The + literal value is interpreted as a regular expression using + Google RE2 library syntax. The literal value must match the + entire field. For example, to filter for instances that do + not end with name "instance", you would use + ``name ne .*instance``. + + This field is a member of `oneof`_ ``_filter``. + instance_group_manager (str): + The name of the managed instance group. It + should conform to RFC1035. + max_results (int): + The maximum number of results per page that should be + returned. If the number of available results is larger than + ``maxResults``, Compute Engine returns a ``nextPageToken`` + that can be used to get the next page of results in + subsequent list requests. Acceptable values are ``0`` to + ``500``, inclusive. (Default: ``500``) + + This field is a member of `oneof`_ ``_max_results``. + order_by (str): + Sorts list results by a certain order. By default, results + are returned in alphanumerical order based on the resource + name. You can also sort results in descending order based on + the creation timestamp using + ``orderBy="creationTimestamp desc"``. This sorts results + based on the ``creationTimestamp`` field in reverse + chronological order (newest result first). Use this to sort + resources like operations so that the newest operation is + returned first. Currently, only sorting by ``name`` or + ``creationTimestamp desc`` is supported. + + This field is a member of `oneof`_ ``_order_by``. + page_token (str): + Specifies a page token to use. Set ``pageToken`` to the + ``nextPageToken`` returned by a previous list request to get + the next page of results. + + This field is a member of `oneof`_ ``_page_token``. + project (str): + Project ID for this request. + return_partial_success (bool): + Opt-in for partial success behavior which + provides partial results in case of failure. The + default value is false. + + This field is a member of `oneof`_ ``_return_partial_success``. + zone (str): + The name of the zone where the managed + instance group is located. It should conform to + RFC1035. + """ + + filter: str = proto.Field( + proto.STRING, + number=336120696, + optional=True, + ) + instance_group_manager: str = proto.Field( + proto.STRING, + number=249363395, + ) + max_results: int = proto.Field( + proto.UINT32, + number=54715419, + optional=True, + ) + order_by: str = proto.Field( + proto.STRING, + number=160562920, + optional=True, + ) + page_token: str = proto.Field( + proto.STRING, + number=19994697, + optional=True, + ) + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + return_partial_success: bool = proto.Field( + proto.BOOL, + number=517198390, + optional=True, + ) + zone: str = proto.Field( + proto.STRING, + number=3744684, + ) + + +class ListPerInstanceConfigsRegionInstanceGroupManagersRequest(proto.Message): + r"""A request message for + RegionInstanceGroupManagers.ListPerInstanceConfigs. See the + method description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + filter (str): + A filter expression that filters resources listed in the + response. Most Compute resources support two types of filter + expressions: expressions that support regular expressions + and expressions that follow API improvement proposal + AIP-160. If you want to use AIP-160, your expression must + specify the field name, an operator, and the value that you + want to use for filtering. The value must be a string, a + number, or a boolean. The operator must be either ``=``, + ``!=``, ``>``, ``<``, ``<=``, ``>=`` or ``:``. For example, + if you are filtering Compute Engine instances, you can + exclude instances named ``example-instance`` by specifying + ``name != example-instance``. The ``:`` operator can be used + with string fields to match substrings. For non-string + fields it is equivalent to the ``=`` operator. The ``:*`` + comparison can be used to test whether a key has been + defined. For example, to find all objects with ``owner`` + label use: ``labels.owner:*`` You can also filter nested + fields. For example, you could specify + ``scheduling.automaticRestart = false`` to include instances + only if they are not scheduled for automatic restarts. You + can use filtering on nested fields to filter based on + resource labels. To filter on multiple expressions, provide + each separate expression within parentheses. For example: + ``(scheduling.automaticRestart = true) (cpuPlatform = "Intel Skylake")`` + By default, each expression is an ``AND`` expression. + However, you can include ``AND`` and ``OR`` expressions + explicitly. For example: + ``(cpuPlatform = "Intel Skylake") OR (cpuPlatform = "Intel Broadwell") AND (scheduling.automaticRestart = true)`` + If you want to use a regular expression, use the ``eq`` + (equal) or ``ne`` (not equal) operator against a single + un-parenthesized expression with or without quotes or + against multiple parenthesized expressions. Examples: + ``fieldname eq unquoted literal`` + ``fieldname eq 'single quoted literal'`` + ``fieldname eq "double quoted literal"`` + ``(fieldname1 eq literal) (fieldname2 ne "literal")`` The + literal value is interpreted as a regular expression using + Google RE2 library syntax. The literal value must match the + entire field. For example, to filter for instances that do + not end with name "instance", you would use + ``name ne .*instance``. + + This field is a member of `oneof`_ ``_filter``. + instance_group_manager (str): + The name of the managed instance group. It + should conform to RFC1035. + max_results (int): + The maximum number of results per page that should be + returned. If the number of available results is larger than + ``maxResults``, Compute Engine returns a ``nextPageToken`` + that can be used to get the next page of results in + subsequent list requests. Acceptable values are ``0`` to + ``500``, inclusive. (Default: ``500``) + + This field is a member of `oneof`_ ``_max_results``. + order_by (str): + Sorts list results by a certain order. By default, results + are returned in alphanumerical order based on the resource + name. You can also sort results in descending order based on + the creation timestamp using + ``orderBy="creationTimestamp desc"``. This sorts results + based on the ``creationTimestamp`` field in reverse + chronological order (newest result first). Use this to sort + resources like operations so that the newest operation is + returned first. Currently, only sorting by ``name`` or + ``creationTimestamp desc`` is supported. + + This field is a member of `oneof`_ ``_order_by``. + page_token (str): + Specifies a page token to use. Set ``pageToken`` to the + ``nextPageToken`` returned by a previous list request to get + the next page of results. + + This field is a member of `oneof`_ ``_page_token``. + project (str): + Project ID for this request. + region (str): + Name of the region scoping this request, + should conform to RFC1035. + return_partial_success (bool): + Opt-in for partial success behavior which + provides partial results in case of failure. The + default value is false. + + This field is a member of `oneof`_ ``_return_partial_success``. + """ + + filter: str = proto.Field( + proto.STRING, + number=336120696, + optional=True, + ) + instance_group_manager: str = proto.Field( + proto.STRING, + number=249363395, + ) + max_results: int = proto.Field( + proto.UINT32, + number=54715419, + optional=True, + ) + order_by: str = proto.Field( + proto.STRING, + number=160562920, + optional=True, + ) + page_token: str = proto.Field( + proto.STRING, + number=19994697, + optional=True, + ) + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + region: str = proto.Field( + proto.STRING, + number=138946292, + ) + return_partial_success: bool = proto.Field( + proto.BOOL, + number=517198390, + optional=True, + ) + + +class ListPreconfiguredExpressionSetsSecurityPoliciesRequest(proto.Message): + r"""A request message for + SecurityPolicies.ListPreconfiguredExpressionSets. See the method + description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + filter (str): + A filter expression that filters resources listed in the + response. Most Compute resources support two types of filter + expressions: expressions that support regular expressions + and expressions that follow API improvement proposal + AIP-160. If you want to use AIP-160, your expression must + specify the field name, an operator, and the value that you + want to use for filtering. The value must be a string, a + number, or a boolean. The operator must be either ``=``, + ``!=``, ``>``, ``<``, ``<=``, ``>=`` or ``:``. For example, + if you are filtering Compute Engine instances, you can + exclude instances named ``example-instance`` by specifying + ``name != example-instance``. The ``:`` operator can be used + with string fields to match substrings. For non-string + fields it is equivalent to the ``=`` operator. The ``:*`` + comparison can be used to test whether a key has been + defined. For example, to find all objects with ``owner`` + label use: ``labels.owner:*`` You can also filter nested + fields. For example, you could specify + ``scheduling.automaticRestart = false`` to include instances + only if they are not scheduled for automatic restarts. You + can use filtering on nested fields to filter based on + resource labels. To filter on multiple expressions, provide + each separate expression within parentheses. For example: + ``(scheduling.automaticRestart = true) (cpuPlatform = "Intel Skylake")`` + By default, each expression is an ``AND`` expression. + However, you can include ``AND`` and ``OR`` expressions + explicitly. For example: + ``(cpuPlatform = "Intel Skylake") OR (cpuPlatform = "Intel Broadwell") AND (scheduling.automaticRestart = true)`` + If you want to use a regular expression, use the ``eq`` + (equal) or ``ne`` (not equal) operator against a single + un-parenthesized expression with or without quotes or + against multiple parenthesized expressions. Examples: + ``fieldname eq unquoted literal`` + ``fieldname eq 'single quoted literal'`` + ``fieldname eq "double quoted literal"`` + ``(fieldname1 eq literal) (fieldname2 ne "literal")`` The + literal value is interpreted as a regular expression using + Google RE2 library syntax. The literal value must match the + entire field. For example, to filter for instances that do + not end with name "instance", you would use + ``name ne .*instance``. + + This field is a member of `oneof`_ ``_filter``. + max_results (int): + The maximum number of results per page that should be + returned. If the number of available results is larger than + ``maxResults``, Compute Engine returns a ``nextPageToken`` + that can be used to get the next page of results in + subsequent list requests. Acceptable values are ``0`` to + ``500``, inclusive. (Default: ``500``) + + This field is a member of `oneof`_ ``_max_results``. + order_by (str): + Sorts list results by a certain order. By default, results + are returned in alphanumerical order based on the resource + name. You can also sort results in descending order based on + the creation timestamp using + ``orderBy="creationTimestamp desc"``. This sorts results + based on the ``creationTimestamp`` field in reverse + chronological order (newest result first). Use this to sort + resources like operations so that the newest operation is + returned first. Currently, only sorting by ``name`` or + ``creationTimestamp desc`` is supported. + + This field is a member of `oneof`_ ``_order_by``. + page_token (str): + Specifies a page token to use. Set ``pageToken`` to the + ``nextPageToken`` returned by a previous list request to get + the next page of results. + + This field is a member of `oneof`_ ``_page_token``. + project (str): + Project ID for this request. + return_partial_success (bool): + Opt-in for partial success behavior which + provides partial results in case of failure. The + default value is false. + + This field is a member of `oneof`_ ``_return_partial_success``. + """ + + filter: str = proto.Field( + proto.STRING, + number=336120696, + optional=True, + ) + max_results: int = proto.Field( + proto.UINT32, + number=54715419, + optional=True, + ) + order_by: str = proto.Field( + proto.STRING, + number=160562920, + optional=True, + ) + page_token: str = proto.Field( + proto.STRING, + number=19994697, + optional=True, + ) + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + return_partial_success: bool = proto.Field( + proto.BOOL, + number=517198390, + optional=True, + ) + + +class ListPublicAdvertisedPrefixesRequest(proto.Message): + r"""A request message for PublicAdvertisedPrefixes.List. See the + method description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + filter (str): + A filter expression that filters resources listed in the + response. Most Compute resources support two types of filter + expressions: expressions that support regular expressions + and expressions that follow API improvement proposal + AIP-160. If you want to use AIP-160, your expression must + specify the field name, an operator, and the value that you + want to use for filtering. The value must be a string, a + number, or a boolean. The operator must be either ``=``, + ``!=``, ``>``, ``<``, ``<=``, ``>=`` or ``:``. For example, + if you are filtering Compute Engine instances, you can + exclude instances named ``example-instance`` by specifying + ``name != example-instance``. The ``:`` operator can be used + with string fields to match substrings. For non-string + fields it is equivalent to the ``=`` operator. The ``:*`` + comparison can be used to test whether a key has been + defined. For example, to find all objects with ``owner`` + label use: ``labels.owner:*`` You can also filter nested + fields. For example, you could specify + ``scheduling.automaticRestart = false`` to include instances + only if they are not scheduled for automatic restarts. You + can use filtering on nested fields to filter based on + resource labels. To filter on multiple expressions, provide + each separate expression within parentheses. For example: + ``(scheduling.automaticRestart = true) (cpuPlatform = "Intel Skylake")`` + By default, each expression is an ``AND`` expression. + However, you can include ``AND`` and ``OR`` expressions + explicitly. For example: + ``(cpuPlatform = "Intel Skylake") OR (cpuPlatform = "Intel Broadwell") AND (scheduling.automaticRestart = true)`` + If you want to use a regular expression, use the ``eq`` + (equal) or ``ne`` (not equal) operator against a single + un-parenthesized expression with or without quotes or + against multiple parenthesized expressions. Examples: + ``fieldname eq unquoted literal`` + ``fieldname eq 'single quoted literal'`` + ``fieldname eq "double quoted literal"`` + ``(fieldname1 eq literal) (fieldname2 ne "literal")`` The + literal value is interpreted as a regular expression using + Google RE2 library syntax. The literal value must match the + entire field. For example, to filter for instances that do + not end with name "instance", you would use + ``name ne .*instance``. + + This field is a member of `oneof`_ ``_filter``. + max_results (int): + The maximum number of results per page that should be + returned. If the number of available results is larger than + ``maxResults``, Compute Engine returns a ``nextPageToken`` + that can be used to get the next page of results in + subsequent list requests. Acceptable values are ``0`` to + ``500``, inclusive. (Default: ``500``) + + This field is a member of `oneof`_ ``_max_results``. + order_by (str): + Sorts list results by a certain order. By default, results + are returned in alphanumerical order based on the resource + name. You can also sort results in descending order based on + the creation timestamp using + ``orderBy="creationTimestamp desc"``. This sorts results + based on the ``creationTimestamp`` field in reverse + chronological order (newest result first). Use this to sort + resources like operations so that the newest operation is + returned first. Currently, only sorting by ``name`` or + ``creationTimestamp desc`` is supported. + + This field is a member of `oneof`_ ``_order_by``. + page_token (str): + Specifies a page token to use. Set ``pageToken`` to the + ``nextPageToken`` returned by a previous list request to get + the next page of results. + + This field is a member of `oneof`_ ``_page_token``. + project (str): + Project ID for this request. + return_partial_success (bool): + Opt-in for partial success behavior which + provides partial results in case of failure. The + default value is false. + + This field is a member of `oneof`_ ``_return_partial_success``. + """ + + filter: str = proto.Field( + proto.STRING, + number=336120696, + optional=True, + ) + max_results: int = proto.Field( + proto.UINT32, + number=54715419, + optional=True, + ) + order_by: str = proto.Field( + proto.STRING, + number=160562920, + optional=True, + ) + page_token: str = proto.Field( + proto.STRING, + number=19994697, + optional=True, + ) + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + return_partial_success: bool = proto.Field( + proto.BOOL, + number=517198390, + optional=True, + ) + + +class ListPublicDelegatedPrefixesRequest(proto.Message): + r"""A request message for PublicDelegatedPrefixes.List. See the + method description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + filter (str): + A filter expression that filters resources listed in the + response. Most Compute resources support two types of filter + expressions: expressions that support regular expressions + and expressions that follow API improvement proposal + AIP-160. If you want to use AIP-160, your expression must + specify the field name, an operator, and the value that you + want to use for filtering. The value must be a string, a + number, or a boolean. The operator must be either ``=``, + ``!=``, ``>``, ``<``, ``<=``, ``>=`` or ``:``. For example, + if you are filtering Compute Engine instances, you can + exclude instances named ``example-instance`` by specifying + ``name != example-instance``. The ``:`` operator can be used + with string fields to match substrings. For non-string + fields it is equivalent to the ``=`` operator. The ``:*`` + comparison can be used to test whether a key has been + defined. For example, to find all objects with ``owner`` + label use: ``labels.owner:*`` You can also filter nested + fields. For example, you could specify + ``scheduling.automaticRestart = false`` to include instances + only if they are not scheduled for automatic restarts. You + can use filtering on nested fields to filter based on + resource labels. To filter on multiple expressions, provide + each separate expression within parentheses. For example: + ``(scheduling.automaticRestart = true) (cpuPlatform = "Intel Skylake")`` + By default, each expression is an ``AND`` expression. + However, you can include ``AND`` and ``OR`` expressions + explicitly. For example: + ``(cpuPlatform = "Intel Skylake") OR (cpuPlatform = "Intel Broadwell") AND (scheduling.automaticRestart = true)`` + If you want to use a regular expression, use the ``eq`` + (equal) or ``ne`` (not equal) operator against a single + un-parenthesized expression with or without quotes or + against multiple parenthesized expressions. Examples: + ``fieldname eq unquoted literal`` + ``fieldname eq 'single quoted literal'`` + ``fieldname eq "double quoted literal"`` + ``(fieldname1 eq literal) (fieldname2 ne "literal")`` The + literal value is interpreted as a regular expression using + Google RE2 library syntax. The literal value must match the + entire field. For example, to filter for instances that do + not end with name "instance", you would use + ``name ne .*instance``. + + This field is a member of `oneof`_ ``_filter``. + max_results (int): + The maximum number of results per page that should be + returned. If the number of available results is larger than + ``maxResults``, Compute Engine returns a ``nextPageToken`` + that can be used to get the next page of results in + subsequent list requests. Acceptable values are ``0`` to + ``500``, inclusive. (Default: ``500``) + + This field is a member of `oneof`_ ``_max_results``. + order_by (str): + Sorts list results by a certain order. By default, results + are returned in alphanumerical order based on the resource + name. You can also sort results in descending order based on + the creation timestamp using + ``orderBy="creationTimestamp desc"``. This sorts results + based on the ``creationTimestamp`` field in reverse + chronological order (newest result first). Use this to sort + resources like operations so that the newest operation is + returned first. Currently, only sorting by ``name`` or + ``creationTimestamp desc`` is supported. + + This field is a member of `oneof`_ ``_order_by``. + page_token (str): + Specifies a page token to use. Set ``pageToken`` to the + ``nextPageToken`` returned by a previous list request to get + the next page of results. + + This field is a member of `oneof`_ ``_page_token``. + project (str): + Project ID for this request. + region (str): + Name of the region of this request. + return_partial_success (bool): + Opt-in for partial success behavior which + provides partial results in case of failure. The + default value is false. + + This field is a member of `oneof`_ ``_return_partial_success``. + """ + + filter: str = proto.Field( + proto.STRING, + number=336120696, + optional=True, + ) + max_results: int = proto.Field( + proto.UINT32, + number=54715419, + optional=True, + ) + order_by: str = proto.Field( + proto.STRING, + number=160562920, + optional=True, + ) + page_token: str = proto.Field( + proto.STRING, + number=19994697, + optional=True, + ) + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + region: str = proto.Field( + proto.STRING, + number=138946292, + ) + return_partial_success: bool = proto.Field( + proto.BOOL, + number=517198390, + optional=True, + ) + + +class ListReferrersInstancesRequest(proto.Message): + r"""A request message for Instances.ListReferrers. See the method + description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + filter (str): + A filter expression that filters resources listed in the + response. Most Compute resources support two types of filter + expressions: expressions that support regular expressions + and expressions that follow API improvement proposal + AIP-160. If you want to use AIP-160, your expression must + specify the field name, an operator, and the value that you + want to use for filtering. The value must be a string, a + number, or a boolean. The operator must be either ``=``, + ``!=``, ``>``, ``<``, ``<=``, ``>=`` or ``:``. For example, + if you are filtering Compute Engine instances, you can + exclude instances named ``example-instance`` by specifying + ``name != example-instance``. The ``:`` operator can be used + with string fields to match substrings. For non-string + fields it is equivalent to the ``=`` operator. The ``:*`` + comparison can be used to test whether a key has been + defined. For example, to find all objects with ``owner`` + label use: ``labels.owner:*`` You can also filter nested + fields. For example, you could specify + ``scheduling.automaticRestart = false`` to include instances + only if they are not scheduled for automatic restarts. You + can use filtering on nested fields to filter based on + resource labels. To filter on multiple expressions, provide + each separate expression within parentheses. For example: + ``(scheduling.automaticRestart = true) (cpuPlatform = "Intel Skylake")`` + By default, each expression is an ``AND`` expression. + However, you can include ``AND`` and ``OR`` expressions + explicitly. For example: + ``(cpuPlatform = "Intel Skylake") OR (cpuPlatform = "Intel Broadwell") AND (scheduling.automaticRestart = true)`` + If you want to use a regular expression, use the ``eq`` + (equal) or ``ne`` (not equal) operator against a single + un-parenthesized expression with or without quotes or + against multiple parenthesized expressions. Examples: + ``fieldname eq unquoted literal`` + ``fieldname eq 'single quoted literal'`` + ``fieldname eq "double quoted literal"`` + ``(fieldname1 eq literal) (fieldname2 ne "literal")`` The + literal value is interpreted as a regular expression using + Google RE2 library syntax. The literal value must match the + entire field. For example, to filter for instances that do + not end with name "instance", you would use + ``name ne .*instance``. + + This field is a member of `oneof`_ ``_filter``. + instance (str): + Name of the target instance scoping this + request, or '-' if the request should span over + all instances in the container. + max_results (int): + The maximum number of results per page that should be + returned. If the number of available results is larger than + ``maxResults``, Compute Engine returns a ``nextPageToken`` + that can be used to get the next page of results in + subsequent list requests. Acceptable values are ``0`` to + ``500``, inclusive. (Default: ``500``) + + This field is a member of `oneof`_ ``_max_results``. + order_by (str): + Sorts list results by a certain order. By default, results + are returned in alphanumerical order based on the resource + name. You can also sort results in descending order based on + the creation timestamp using + ``orderBy="creationTimestamp desc"``. This sorts results + based on the ``creationTimestamp`` field in reverse + chronological order (newest result first). Use this to sort + resources like operations so that the newest operation is + returned first. Currently, only sorting by ``name`` or + ``creationTimestamp desc`` is supported. + + This field is a member of `oneof`_ ``_order_by``. + page_token (str): + Specifies a page token to use. Set ``pageToken`` to the + ``nextPageToken`` returned by a previous list request to get + the next page of results. + + This field is a member of `oneof`_ ``_page_token``. + project (str): + Project ID for this request. + return_partial_success (bool): + Opt-in for partial success behavior which + provides partial results in case of failure. The + default value is false. + + This field is a member of `oneof`_ ``_return_partial_success``. + zone (str): + The name of the zone for this request. + """ + + filter: str = proto.Field( + proto.STRING, + number=336120696, + optional=True, + ) + instance: str = proto.Field( + proto.STRING, + number=18257045, + ) + max_results: int = proto.Field( + proto.UINT32, + number=54715419, + optional=True, + ) + order_by: str = proto.Field( + proto.STRING, + number=160562920, + optional=True, + ) + page_token: str = proto.Field( + proto.STRING, + number=19994697, + optional=True, + ) + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + return_partial_success: bool = proto.Field( + proto.BOOL, + number=517198390, + optional=True, + ) + zone: str = proto.Field( + proto.STRING, + number=3744684, + ) + + +class ListRegionAutoscalersRequest(proto.Message): + r"""A request message for RegionAutoscalers.List. See the method + description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + filter (str): + A filter expression that filters resources listed in the + response. Most Compute resources support two types of filter + expressions: expressions that support regular expressions + and expressions that follow API improvement proposal + AIP-160. If you want to use AIP-160, your expression must + specify the field name, an operator, and the value that you + want to use for filtering. The value must be a string, a + number, or a boolean. The operator must be either ``=``, + ``!=``, ``>``, ``<``, ``<=``, ``>=`` or ``:``. For example, + if you are filtering Compute Engine instances, you can + exclude instances named ``example-instance`` by specifying + ``name != example-instance``. The ``:`` operator can be used + with string fields to match substrings. For non-string + fields it is equivalent to the ``=`` operator. The ``:*`` + comparison can be used to test whether a key has been + defined. For example, to find all objects with ``owner`` + label use: ``labels.owner:*`` You can also filter nested + fields. For example, you could specify + ``scheduling.automaticRestart = false`` to include instances + only if they are not scheduled for automatic restarts. You + can use filtering on nested fields to filter based on + resource labels. To filter on multiple expressions, provide + each separate expression within parentheses. For example: + ``(scheduling.automaticRestart = true) (cpuPlatform = "Intel Skylake")`` + By default, each expression is an ``AND`` expression. + However, you can include ``AND`` and ``OR`` expressions + explicitly. For example: + ``(cpuPlatform = "Intel Skylake") OR (cpuPlatform = "Intel Broadwell") AND (scheduling.automaticRestart = true)`` + If you want to use a regular expression, use the ``eq`` + (equal) or ``ne`` (not equal) operator against a single + un-parenthesized expression with or without quotes or + against multiple parenthesized expressions. Examples: + ``fieldname eq unquoted literal`` + ``fieldname eq 'single quoted literal'`` + ``fieldname eq "double quoted literal"`` + ``(fieldname1 eq literal) (fieldname2 ne "literal")`` The + literal value is interpreted as a regular expression using + Google RE2 library syntax. The literal value must match the + entire field. For example, to filter for instances that do + not end with name "instance", you would use + ``name ne .*instance``. + + This field is a member of `oneof`_ ``_filter``. + max_results (int): + The maximum number of results per page that should be + returned. If the number of available results is larger than + ``maxResults``, Compute Engine returns a ``nextPageToken`` + that can be used to get the next page of results in + subsequent list requests. Acceptable values are ``0`` to + ``500``, inclusive. (Default: ``500``) + + This field is a member of `oneof`_ ``_max_results``. + order_by (str): + Sorts list results by a certain order. By default, results + are returned in alphanumerical order based on the resource + name. You can also sort results in descending order based on + the creation timestamp using + ``orderBy="creationTimestamp desc"``. This sorts results + based on the ``creationTimestamp`` field in reverse + chronological order (newest result first). Use this to sort + resources like operations so that the newest operation is + returned first. Currently, only sorting by ``name`` or + ``creationTimestamp desc`` is supported. + + This field is a member of `oneof`_ ``_order_by``. + page_token (str): + Specifies a page token to use. Set ``pageToken`` to the + ``nextPageToken`` returned by a previous list request to get + the next page of results. + + This field is a member of `oneof`_ ``_page_token``. + project (str): + Project ID for this request. + region (str): + Name of the region scoping this request. + return_partial_success (bool): + Opt-in for partial success behavior which + provides partial results in case of failure. The + default value is false. + + This field is a member of `oneof`_ ``_return_partial_success``. + """ + + filter: str = proto.Field( + proto.STRING, + number=336120696, + optional=True, + ) + max_results: int = proto.Field( + proto.UINT32, + number=54715419, + optional=True, + ) + order_by: str = proto.Field( + proto.STRING, + number=160562920, + optional=True, + ) + page_token: str = proto.Field( + proto.STRING, + number=19994697, + optional=True, + ) + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + region: str = proto.Field( + proto.STRING, + number=138946292, + ) + return_partial_success: bool = proto.Field( + proto.BOOL, + number=517198390, + optional=True, + ) + + +class ListRegionBackendServicesRequest(proto.Message): + r"""A request message for RegionBackendServices.List. See the + method description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + filter (str): + A filter expression that filters resources listed in the + response. Most Compute resources support two types of filter + expressions: expressions that support regular expressions + and expressions that follow API improvement proposal + AIP-160. If you want to use AIP-160, your expression must + specify the field name, an operator, and the value that you + want to use for filtering. The value must be a string, a + number, or a boolean. The operator must be either ``=``, + ``!=``, ``>``, ``<``, ``<=``, ``>=`` or ``:``. For example, + if you are filtering Compute Engine instances, you can + exclude instances named ``example-instance`` by specifying + ``name != example-instance``. The ``:`` operator can be used + with string fields to match substrings. For non-string + fields it is equivalent to the ``=`` operator. The ``:*`` + comparison can be used to test whether a key has been + defined. For example, to find all objects with ``owner`` + label use: ``labels.owner:*`` You can also filter nested + fields. For example, you could specify + ``scheduling.automaticRestart = false`` to include instances + only if they are not scheduled for automatic restarts. You + can use filtering on nested fields to filter based on + resource labels. To filter on multiple expressions, provide + each separate expression within parentheses. For example: + ``(scheduling.automaticRestart = true) (cpuPlatform = "Intel Skylake")`` + By default, each expression is an ``AND`` expression. + However, you can include ``AND`` and ``OR`` expressions + explicitly. For example: + ``(cpuPlatform = "Intel Skylake") OR (cpuPlatform = "Intel Broadwell") AND (scheduling.automaticRestart = true)`` + If you want to use a regular expression, use the ``eq`` + (equal) or ``ne`` (not equal) operator against a single + un-parenthesized expression with or without quotes or + against multiple parenthesized expressions. Examples: + ``fieldname eq unquoted literal`` + ``fieldname eq 'single quoted literal'`` + ``fieldname eq "double quoted literal"`` + ``(fieldname1 eq literal) (fieldname2 ne "literal")`` The + literal value is interpreted as a regular expression using + Google RE2 library syntax. The literal value must match the + entire field. For example, to filter for instances that do + not end with name "instance", you would use + ``name ne .*instance``. + + This field is a member of `oneof`_ ``_filter``. + max_results (int): + The maximum number of results per page that should be + returned. If the number of available results is larger than + ``maxResults``, Compute Engine returns a ``nextPageToken`` + that can be used to get the next page of results in + subsequent list requests. Acceptable values are ``0`` to + ``500``, inclusive. (Default: ``500``) + + This field is a member of `oneof`_ ``_max_results``. + order_by (str): + Sorts list results by a certain order. By default, results + are returned in alphanumerical order based on the resource + name. You can also sort results in descending order based on + the creation timestamp using + ``orderBy="creationTimestamp desc"``. This sorts results + based on the ``creationTimestamp`` field in reverse + chronological order (newest result first). Use this to sort + resources like operations so that the newest operation is + returned first. Currently, only sorting by ``name`` or + ``creationTimestamp desc`` is supported. + + This field is a member of `oneof`_ ``_order_by``. + page_token (str): + Specifies a page token to use. Set ``pageToken`` to the + ``nextPageToken`` returned by a previous list request to get + the next page of results. + + This field is a member of `oneof`_ ``_page_token``. + project (str): + Project ID for this request. + region (str): + Name of the region scoping this request. + return_partial_success (bool): + Opt-in for partial success behavior which + provides partial results in case of failure. The + default value is false. + + This field is a member of `oneof`_ ``_return_partial_success``. + """ + + filter: str = proto.Field( + proto.STRING, + number=336120696, + optional=True, + ) + max_results: int = proto.Field( + proto.UINT32, + number=54715419, + optional=True, + ) + order_by: str = proto.Field( + proto.STRING, + number=160562920, + optional=True, + ) + page_token: str = proto.Field( + proto.STRING, + number=19994697, + optional=True, + ) + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + region: str = proto.Field( + proto.STRING, + number=138946292, + ) + return_partial_success: bool = proto.Field( + proto.BOOL, + number=517198390, + optional=True, + ) + + +class ListRegionCommitmentsRequest(proto.Message): + r"""A request message for RegionCommitments.List. See the method + description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + filter (str): + A filter expression that filters resources listed in the + response. Most Compute resources support two types of filter + expressions: expressions that support regular expressions + and expressions that follow API improvement proposal + AIP-160. If you want to use AIP-160, your expression must + specify the field name, an operator, and the value that you + want to use for filtering. The value must be a string, a + number, or a boolean. The operator must be either ``=``, + ``!=``, ``>``, ``<``, ``<=``, ``>=`` or ``:``. For example, + if you are filtering Compute Engine instances, you can + exclude instances named ``example-instance`` by specifying + ``name != example-instance``. The ``:`` operator can be used + with string fields to match substrings. For non-string + fields it is equivalent to the ``=`` operator. The ``:*`` + comparison can be used to test whether a key has been + defined. For example, to find all objects with ``owner`` + label use: ``labels.owner:*`` You can also filter nested + fields. For example, you could specify + ``scheduling.automaticRestart = false`` to include instances + only if they are not scheduled for automatic restarts. You + can use filtering on nested fields to filter based on + resource labels. To filter on multiple expressions, provide + each separate expression within parentheses. For example: + ``(scheduling.automaticRestart = true) (cpuPlatform = "Intel Skylake")`` + By default, each expression is an ``AND`` expression. + However, you can include ``AND`` and ``OR`` expressions + explicitly. For example: + ``(cpuPlatform = "Intel Skylake") OR (cpuPlatform = "Intel Broadwell") AND (scheduling.automaticRestart = true)`` + If you want to use a regular expression, use the ``eq`` + (equal) or ``ne`` (not equal) operator against a single + un-parenthesized expression with or without quotes or + against multiple parenthesized expressions. Examples: + ``fieldname eq unquoted literal`` + ``fieldname eq 'single quoted literal'`` + ``fieldname eq "double quoted literal"`` + ``(fieldname1 eq literal) (fieldname2 ne "literal")`` The + literal value is interpreted as a regular expression using + Google RE2 library syntax. The literal value must match the + entire field. For example, to filter for instances that do + not end with name "instance", you would use + ``name ne .*instance``. + + This field is a member of `oneof`_ ``_filter``. + max_results (int): + The maximum number of results per page that should be + returned. If the number of available results is larger than + ``maxResults``, Compute Engine returns a ``nextPageToken`` + that can be used to get the next page of results in + subsequent list requests. Acceptable values are ``0`` to + ``500``, inclusive. (Default: ``500``) + + This field is a member of `oneof`_ ``_max_results``. + order_by (str): + Sorts list results by a certain order. By default, results + are returned in alphanumerical order based on the resource + name. You can also sort results in descending order based on + the creation timestamp using + ``orderBy="creationTimestamp desc"``. This sorts results + based on the ``creationTimestamp`` field in reverse + chronological order (newest result first). Use this to sort + resources like operations so that the newest operation is + returned first. Currently, only sorting by ``name`` or + ``creationTimestamp desc`` is supported. + + This field is a member of `oneof`_ ``_order_by``. + page_token (str): + Specifies a page token to use. Set ``pageToken`` to the + ``nextPageToken`` returned by a previous list request to get + the next page of results. + + This field is a member of `oneof`_ ``_page_token``. + project (str): + Project ID for this request. + region (str): + Name of the region for this request. + return_partial_success (bool): + Opt-in for partial success behavior which + provides partial results in case of failure. The + default value is false. + + This field is a member of `oneof`_ ``_return_partial_success``. + """ + + filter: str = proto.Field( + proto.STRING, + number=336120696, + optional=True, + ) + max_results: int = proto.Field( + proto.UINT32, + number=54715419, + optional=True, + ) + order_by: str = proto.Field( + proto.STRING, + number=160562920, + optional=True, + ) + page_token: str = proto.Field( + proto.STRING, + number=19994697, + optional=True, + ) + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + region: str = proto.Field( + proto.STRING, + number=138946292, + ) + return_partial_success: bool = proto.Field( + proto.BOOL, + number=517198390, + optional=True, + ) + + +class ListRegionDiskTypesRequest(proto.Message): + r"""A request message for RegionDiskTypes.List. See the method + description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + filter (str): + A filter expression that filters resources listed in the + response. Most Compute resources support two types of filter + expressions: expressions that support regular expressions + and expressions that follow API improvement proposal + AIP-160. If you want to use AIP-160, your expression must + specify the field name, an operator, and the value that you + want to use for filtering. The value must be a string, a + number, or a boolean. The operator must be either ``=``, + ``!=``, ``>``, ``<``, ``<=``, ``>=`` or ``:``. For example, + if you are filtering Compute Engine instances, you can + exclude instances named ``example-instance`` by specifying + ``name != example-instance``. The ``:`` operator can be used + with string fields to match substrings. For non-string + fields it is equivalent to the ``=`` operator. The ``:*`` + comparison can be used to test whether a key has been + defined. For example, to find all objects with ``owner`` + label use: ``labels.owner:*`` You can also filter nested + fields. For example, you could specify + ``scheduling.automaticRestart = false`` to include instances + only if they are not scheduled for automatic restarts. You + can use filtering on nested fields to filter based on + resource labels. To filter on multiple expressions, provide + each separate expression within parentheses. For example: + ``(scheduling.automaticRestart = true) (cpuPlatform = "Intel Skylake")`` + By default, each expression is an ``AND`` expression. + However, you can include ``AND`` and ``OR`` expressions + explicitly. For example: + ``(cpuPlatform = "Intel Skylake") OR (cpuPlatform = "Intel Broadwell") AND (scheduling.automaticRestart = true)`` + If you want to use a regular expression, use the ``eq`` + (equal) or ``ne`` (not equal) operator against a single + un-parenthesized expression with or without quotes or + against multiple parenthesized expressions. Examples: + ``fieldname eq unquoted literal`` + ``fieldname eq 'single quoted literal'`` + ``fieldname eq "double quoted literal"`` + ``(fieldname1 eq literal) (fieldname2 ne "literal")`` The + literal value is interpreted as a regular expression using + Google RE2 library syntax. The literal value must match the + entire field. For example, to filter for instances that do + not end with name "instance", you would use + ``name ne .*instance``. + + This field is a member of `oneof`_ ``_filter``. + max_results (int): + The maximum number of results per page that should be + returned. If the number of available results is larger than + ``maxResults``, Compute Engine returns a ``nextPageToken`` + that can be used to get the next page of results in + subsequent list requests. Acceptable values are ``0`` to + ``500``, inclusive. (Default: ``500``) + + This field is a member of `oneof`_ ``_max_results``. + order_by (str): + Sorts list results by a certain order. By default, results + are returned in alphanumerical order based on the resource + name. You can also sort results in descending order based on + the creation timestamp using + ``orderBy="creationTimestamp desc"``. This sorts results + based on the ``creationTimestamp`` field in reverse + chronological order (newest result first). Use this to sort + resources like operations so that the newest operation is + returned first. Currently, only sorting by ``name`` or + ``creationTimestamp desc`` is supported. + + This field is a member of `oneof`_ ``_order_by``. + page_token (str): + Specifies a page token to use. Set ``pageToken`` to the + ``nextPageToken`` returned by a previous list request to get + the next page of results. + + This field is a member of `oneof`_ ``_page_token``. + project (str): + Project ID for this request. + region (str): + The name of the region for this request. + return_partial_success (bool): + Opt-in for partial success behavior which + provides partial results in case of failure. The + default value is false. + + This field is a member of `oneof`_ ``_return_partial_success``. + """ + + filter: str = proto.Field( + proto.STRING, + number=336120696, + optional=True, + ) + max_results: int = proto.Field( + proto.UINT32, + number=54715419, + optional=True, + ) + order_by: str = proto.Field( + proto.STRING, + number=160562920, + optional=True, + ) + page_token: str = proto.Field( + proto.STRING, + number=19994697, + optional=True, + ) + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + region: str = proto.Field( + proto.STRING, + number=138946292, + ) + return_partial_success: bool = proto.Field( + proto.BOOL, + number=517198390, + optional=True, + ) + + +class ListRegionDisksRequest(proto.Message): + r"""A request message for RegionDisks.List. See the method + description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + filter (str): + A filter expression that filters resources listed in the + response. Most Compute resources support two types of filter + expressions: expressions that support regular expressions + and expressions that follow API improvement proposal + AIP-160. If you want to use AIP-160, your expression must + specify the field name, an operator, and the value that you + want to use for filtering. The value must be a string, a + number, or a boolean. The operator must be either ``=``, + ``!=``, ``>``, ``<``, ``<=``, ``>=`` or ``:``. For example, + if you are filtering Compute Engine instances, you can + exclude instances named ``example-instance`` by specifying + ``name != example-instance``. The ``:`` operator can be used + with string fields to match substrings. For non-string + fields it is equivalent to the ``=`` operator. The ``:*`` + comparison can be used to test whether a key has been + defined. For example, to find all objects with ``owner`` + label use: ``labels.owner:*`` You can also filter nested + fields. For example, you could specify + ``scheduling.automaticRestart = false`` to include instances + only if they are not scheduled for automatic restarts. You + can use filtering on nested fields to filter based on + resource labels. To filter on multiple expressions, provide + each separate expression within parentheses. For example: + ``(scheduling.automaticRestart = true) (cpuPlatform = "Intel Skylake")`` + By default, each expression is an ``AND`` expression. + However, you can include ``AND`` and ``OR`` expressions + explicitly. For example: + ``(cpuPlatform = "Intel Skylake") OR (cpuPlatform = "Intel Broadwell") AND (scheduling.automaticRestart = true)`` + If you want to use a regular expression, use the ``eq`` + (equal) or ``ne`` (not equal) operator against a single + un-parenthesized expression with or without quotes or + against multiple parenthesized expressions. Examples: + ``fieldname eq unquoted literal`` + ``fieldname eq 'single quoted literal'`` + ``fieldname eq "double quoted literal"`` + ``(fieldname1 eq literal) (fieldname2 ne "literal")`` The + literal value is interpreted as a regular expression using + Google RE2 library syntax. The literal value must match the + entire field. For example, to filter for instances that do + not end with name "instance", you would use + ``name ne .*instance``. + + This field is a member of `oneof`_ ``_filter``. + max_results (int): + The maximum number of results per page that should be + returned. If the number of available results is larger than + ``maxResults``, Compute Engine returns a ``nextPageToken`` + that can be used to get the next page of results in + subsequent list requests. Acceptable values are ``0`` to + ``500``, inclusive. (Default: ``500``) + + This field is a member of `oneof`_ ``_max_results``. + order_by (str): + Sorts list results by a certain order. By default, results + are returned in alphanumerical order based on the resource + name. You can also sort results in descending order based on + the creation timestamp using + ``orderBy="creationTimestamp desc"``. This sorts results + based on the ``creationTimestamp`` field in reverse + chronological order (newest result first). Use this to sort + resources like operations so that the newest operation is + returned first. Currently, only sorting by ``name`` or + ``creationTimestamp desc`` is supported. + + This field is a member of `oneof`_ ``_order_by``. + page_token (str): + Specifies a page token to use. Set ``pageToken`` to the + ``nextPageToken`` returned by a previous list request to get + the next page of results. + + This field is a member of `oneof`_ ``_page_token``. + project (str): + Project ID for this request. + region (str): + Name of the region for this request. + return_partial_success (bool): + Opt-in for partial success behavior which + provides partial results in case of failure. The + default value is false. + + This field is a member of `oneof`_ ``_return_partial_success``. + """ + + filter: str = proto.Field( + proto.STRING, + number=336120696, + optional=True, + ) + max_results: int = proto.Field( + proto.UINT32, + number=54715419, + optional=True, + ) + order_by: str = proto.Field( + proto.STRING, + number=160562920, + optional=True, + ) + page_token: str = proto.Field( + proto.STRING, + number=19994697, + optional=True, + ) + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + region: str = proto.Field( + proto.STRING, + number=138946292, + ) + return_partial_success: bool = proto.Field( + proto.BOOL, + number=517198390, + optional=True, + ) + + +class ListRegionHealthCheckServicesRequest(proto.Message): + r"""A request message for RegionHealthCheckServices.List. See the + method description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + filter (str): + A filter expression that filters resources listed in the + response. Most Compute resources support two types of filter + expressions: expressions that support regular expressions + and expressions that follow API improvement proposal + AIP-160. If you want to use AIP-160, your expression must + specify the field name, an operator, and the value that you + want to use for filtering. The value must be a string, a + number, or a boolean. The operator must be either ``=``, + ``!=``, ``>``, ``<``, ``<=``, ``>=`` or ``:``. For example, + if you are filtering Compute Engine instances, you can + exclude instances named ``example-instance`` by specifying + ``name != example-instance``. The ``:`` operator can be used + with string fields to match substrings. For non-string + fields it is equivalent to the ``=`` operator. The ``:*`` + comparison can be used to test whether a key has been + defined. For example, to find all objects with ``owner`` + label use: ``labels.owner:*`` You can also filter nested + fields. For example, you could specify + ``scheduling.automaticRestart = false`` to include instances + only if they are not scheduled for automatic restarts. You + can use filtering on nested fields to filter based on + resource labels. To filter on multiple expressions, provide + each separate expression within parentheses. For example: + ``(scheduling.automaticRestart = true) (cpuPlatform = "Intel Skylake")`` + By default, each expression is an ``AND`` expression. + However, you can include ``AND`` and ``OR`` expressions + explicitly. For example: + ``(cpuPlatform = "Intel Skylake") OR (cpuPlatform = "Intel Broadwell") AND (scheduling.automaticRestart = true)`` + If you want to use a regular expression, use the ``eq`` + (equal) or ``ne`` (not equal) operator against a single + un-parenthesized expression with or without quotes or + against multiple parenthesized expressions. Examples: + ``fieldname eq unquoted literal`` + ``fieldname eq 'single quoted literal'`` + ``fieldname eq "double quoted literal"`` + ``(fieldname1 eq literal) (fieldname2 ne "literal")`` The + literal value is interpreted as a regular expression using + Google RE2 library syntax. The literal value must match the + entire field. For example, to filter for instances that do + not end with name "instance", you would use + ``name ne .*instance``. + + This field is a member of `oneof`_ ``_filter``. + max_results (int): + The maximum number of results per page that should be + returned. If the number of available results is larger than + ``maxResults``, Compute Engine returns a ``nextPageToken`` + that can be used to get the next page of results in + subsequent list requests. Acceptable values are ``0`` to + ``500``, inclusive. (Default: ``500``) + + This field is a member of `oneof`_ ``_max_results``. + order_by (str): + Sorts list results by a certain order. By default, results + are returned in alphanumerical order based on the resource + name. You can also sort results in descending order based on + the creation timestamp using + ``orderBy="creationTimestamp desc"``. This sorts results + based on the ``creationTimestamp`` field in reverse + chronological order (newest result first). Use this to sort + resources like operations so that the newest operation is + returned first. Currently, only sorting by ``name`` or + ``creationTimestamp desc`` is supported. + + This field is a member of `oneof`_ ``_order_by``. + page_token (str): + Specifies a page token to use. Set ``pageToken`` to the + ``nextPageToken`` returned by a previous list request to get + the next page of results. + + This field is a member of `oneof`_ ``_page_token``. + project (str): + Project ID for this request. + region (str): + Name of the region scoping this request. + return_partial_success (bool): + Opt-in for partial success behavior which + provides partial results in case of failure. The + default value is false. + + This field is a member of `oneof`_ ``_return_partial_success``. + """ + + filter: str = proto.Field( + proto.STRING, + number=336120696, + optional=True, + ) + max_results: int = proto.Field( + proto.UINT32, + number=54715419, + optional=True, + ) + order_by: str = proto.Field( + proto.STRING, + number=160562920, + optional=True, + ) + page_token: str = proto.Field( + proto.STRING, + number=19994697, + optional=True, + ) + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + region: str = proto.Field( + proto.STRING, + number=138946292, + ) + return_partial_success: bool = proto.Field( + proto.BOOL, + number=517198390, + optional=True, + ) + + +class ListRegionHealthChecksRequest(proto.Message): + r"""A request message for RegionHealthChecks.List. See the method + description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + filter (str): + A filter expression that filters resources listed in the + response. Most Compute resources support two types of filter + expressions: expressions that support regular expressions + and expressions that follow API improvement proposal + AIP-160. If you want to use AIP-160, your expression must + specify the field name, an operator, and the value that you + want to use for filtering. The value must be a string, a + number, or a boolean. The operator must be either ``=``, + ``!=``, ``>``, ``<``, ``<=``, ``>=`` or ``:``. For example, + if you are filtering Compute Engine instances, you can + exclude instances named ``example-instance`` by specifying + ``name != example-instance``. The ``:`` operator can be used + with string fields to match substrings. For non-string + fields it is equivalent to the ``=`` operator. The ``:*`` + comparison can be used to test whether a key has been + defined. For example, to find all objects with ``owner`` + label use: ``labels.owner:*`` You can also filter nested + fields. For example, you could specify + ``scheduling.automaticRestart = false`` to include instances + only if they are not scheduled for automatic restarts. You + can use filtering on nested fields to filter based on + resource labels. To filter on multiple expressions, provide + each separate expression within parentheses. For example: + ``(scheduling.automaticRestart = true) (cpuPlatform = "Intel Skylake")`` + By default, each expression is an ``AND`` expression. + However, you can include ``AND`` and ``OR`` expressions + explicitly. For example: + ``(cpuPlatform = "Intel Skylake") OR (cpuPlatform = "Intel Broadwell") AND (scheduling.automaticRestart = true)`` + If you want to use a regular expression, use the ``eq`` + (equal) or ``ne`` (not equal) operator against a single + un-parenthesized expression with or without quotes or + against multiple parenthesized expressions. Examples: + ``fieldname eq unquoted literal`` + ``fieldname eq 'single quoted literal'`` + ``fieldname eq "double quoted literal"`` + ``(fieldname1 eq literal) (fieldname2 ne "literal")`` The + literal value is interpreted as a regular expression using + Google RE2 library syntax. The literal value must match the + entire field. For example, to filter for instances that do + not end with name "instance", you would use + ``name ne .*instance``. + + This field is a member of `oneof`_ ``_filter``. + max_results (int): + The maximum number of results per page that should be + returned. If the number of available results is larger than + ``maxResults``, Compute Engine returns a ``nextPageToken`` + that can be used to get the next page of results in + subsequent list requests. Acceptable values are ``0`` to + ``500``, inclusive. (Default: ``500``) + + This field is a member of `oneof`_ ``_max_results``. + order_by (str): + Sorts list results by a certain order. By default, results + are returned in alphanumerical order based on the resource + name. You can also sort results in descending order based on + the creation timestamp using + ``orderBy="creationTimestamp desc"``. This sorts results + based on the ``creationTimestamp`` field in reverse + chronological order (newest result first). Use this to sort + resources like operations so that the newest operation is + returned first. Currently, only sorting by ``name`` or + ``creationTimestamp desc`` is supported. + + This field is a member of `oneof`_ ``_order_by``. + page_token (str): + Specifies a page token to use. Set ``pageToken`` to the + ``nextPageToken`` returned by a previous list request to get + the next page of results. + + This field is a member of `oneof`_ ``_page_token``. + project (str): + Project ID for this request. + region (str): + Name of the region scoping this request. + return_partial_success (bool): + Opt-in for partial success behavior which + provides partial results in case of failure. The + default value is false. + + This field is a member of `oneof`_ ``_return_partial_success``. + """ + + filter: str = proto.Field( + proto.STRING, + number=336120696, + optional=True, + ) + max_results: int = proto.Field( + proto.UINT32, + number=54715419, + optional=True, + ) + order_by: str = proto.Field( + proto.STRING, + number=160562920, + optional=True, + ) + page_token: str = proto.Field( + proto.STRING, + number=19994697, + optional=True, + ) + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + region: str = proto.Field( + proto.STRING, + number=138946292, + ) + return_partial_success: bool = proto.Field( + proto.BOOL, + number=517198390, + optional=True, + ) + + +class ListRegionInstanceGroupManagersRequest(proto.Message): + r"""A request message for RegionInstanceGroupManagers.List. See + the method description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + filter (str): + A filter expression that filters resources listed in the + response. Most Compute resources support two types of filter + expressions: expressions that support regular expressions + and expressions that follow API improvement proposal + AIP-160. If you want to use AIP-160, your expression must + specify the field name, an operator, and the value that you + want to use for filtering. The value must be a string, a + number, or a boolean. The operator must be either ``=``, + ``!=``, ``>``, ``<``, ``<=``, ``>=`` or ``:``. For example, + if you are filtering Compute Engine instances, you can + exclude instances named ``example-instance`` by specifying + ``name != example-instance``. The ``:`` operator can be used + with string fields to match substrings. For non-string + fields it is equivalent to the ``=`` operator. The ``:*`` + comparison can be used to test whether a key has been + defined. For example, to find all objects with ``owner`` + label use: ``labels.owner:*`` You can also filter nested + fields. For example, you could specify + ``scheduling.automaticRestart = false`` to include instances + only if they are not scheduled for automatic restarts. You + can use filtering on nested fields to filter based on + resource labels. To filter on multiple expressions, provide + each separate expression within parentheses. For example: + ``(scheduling.automaticRestart = true) (cpuPlatform = "Intel Skylake")`` + By default, each expression is an ``AND`` expression. + However, you can include ``AND`` and ``OR`` expressions + explicitly. For example: + ``(cpuPlatform = "Intel Skylake") OR (cpuPlatform = "Intel Broadwell") AND (scheduling.automaticRestart = true)`` + If you want to use a regular expression, use the ``eq`` + (equal) or ``ne`` (not equal) operator against a single + un-parenthesized expression with or without quotes or + against multiple parenthesized expressions. Examples: + ``fieldname eq unquoted literal`` + ``fieldname eq 'single quoted literal'`` + ``fieldname eq "double quoted literal"`` + ``(fieldname1 eq literal) (fieldname2 ne "literal")`` The + literal value is interpreted as a regular expression using + Google RE2 library syntax. The literal value must match the + entire field. For example, to filter for instances that do + not end with name "instance", you would use + ``name ne .*instance``. + + This field is a member of `oneof`_ ``_filter``. + max_results (int): + The maximum number of results per page that should be + returned. If the number of available results is larger than + ``maxResults``, Compute Engine returns a ``nextPageToken`` + that can be used to get the next page of results in + subsequent list requests. Acceptable values are ``0`` to + ``500``, inclusive. (Default: ``500``) + + This field is a member of `oneof`_ ``_max_results``. + order_by (str): + Sorts list results by a certain order. By default, results + are returned in alphanumerical order based on the resource + name. You can also sort results in descending order based on + the creation timestamp using + ``orderBy="creationTimestamp desc"``. This sorts results + based on the ``creationTimestamp`` field in reverse + chronological order (newest result first). Use this to sort + resources like operations so that the newest operation is + returned first. Currently, only sorting by ``name`` or + ``creationTimestamp desc`` is supported. + + This field is a member of `oneof`_ ``_order_by``. + page_token (str): + Specifies a page token to use. Set ``pageToken`` to the + ``nextPageToken`` returned by a previous list request to get + the next page of results. + + This field is a member of `oneof`_ ``_page_token``. + project (str): + Project ID for this request. + region (str): + Name of the region scoping this request. + return_partial_success (bool): + Opt-in for partial success behavior which + provides partial results in case of failure. The + default value is false. + + This field is a member of `oneof`_ ``_return_partial_success``. + """ + + filter: str = proto.Field( + proto.STRING, + number=336120696, + optional=True, + ) + max_results: int = proto.Field( + proto.UINT32, + number=54715419, + optional=True, + ) + order_by: str = proto.Field( + proto.STRING, + number=160562920, + optional=True, + ) + page_token: str = proto.Field( + proto.STRING, + number=19994697, + optional=True, + ) + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + region: str = proto.Field( + proto.STRING, + number=138946292, + ) + return_partial_success: bool = proto.Field( + proto.BOOL, + number=517198390, + optional=True, + ) + + +class ListRegionInstanceGroupsRequest(proto.Message): + r"""A request message for RegionInstanceGroups.List. See the + method description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + filter (str): + A filter expression that filters resources listed in the + response. Most Compute resources support two types of filter + expressions: expressions that support regular expressions + and expressions that follow API improvement proposal + AIP-160. If you want to use AIP-160, your expression must + specify the field name, an operator, and the value that you + want to use for filtering. The value must be a string, a + number, or a boolean. The operator must be either ``=``, + ``!=``, ``>``, ``<``, ``<=``, ``>=`` or ``:``. For example, + if you are filtering Compute Engine instances, you can + exclude instances named ``example-instance`` by specifying + ``name != example-instance``. The ``:`` operator can be used + with string fields to match substrings. For non-string + fields it is equivalent to the ``=`` operator. The ``:*`` + comparison can be used to test whether a key has been + defined. For example, to find all objects with ``owner`` + label use: ``labels.owner:*`` You can also filter nested + fields. For example, you could specify + ``scheduling.automaticRestart = false`` to include instances + only if they are not scheduled for automatic restarts. You + can use filtering on nested fields to filter based on + resource labels. To filter on multiple expressions, provide + each separate expression within parentheses. For example: + ``(scheduling.automaticRestart = true) (cpuPlatform = "Intel Skylake")`` + By default, each expression is an ``AND`` expression. + However, you can include ``AND`` and ``OR`` expressions + explicitly. For example: + ``(cpuPlatform = "Intel Skylake") OR (cpuPlatform = "Intel Broadwell") AND (scheduling.automaticRestart = true)`` + If you want to use a regular expression, use the ``eq`` + (equal) or ``ne`` (not equal) operator against a single + un-parenthesized expression with or without quotes or + against multiple parenthesized expressions. Examples: + ``fieldname eq unquoted literal`` + ``fieldname eq 'single quoted literal'`` + ``fieldname eq "double quoted literal"`` + ``(fieldname1 eq literal) (fieldname2 ne "literal")`` The + literal value is interpreted as a regular expression using + Google RE2 library syntax. The literal value must match the + entire field. For example, to filter for instances that do + not end with name "instance", you would use + ``name ne .*instance``. + + This field is a member of `oneof`_ ``_filter``. + max_results (int): + The maximum number of results per page that should be + returned. If the number of available results is larger than + ``maxResults``, Compute Engine returns a ``nextPageToken`` + that can be used to get the next page of results in + subsequent list requests. Acceptable values are ``0`` to + ``500``, inclusive. (Default: ``500``) + + This field is a member of `oneof`_ ``_max_results``. + order_by (str): + Sorts list results by a certain order. By default, results + are returned in alphanumerical order based on the resource + name. You can also sort results in descending order based on + the creation timestamp using + ``orderBy="creationTimestamp desc"``. This sorts results + based on the ``creationTimestamp`` field in reverse + chronological order (newest result first). Use this to sort + resources like operations so that the newest operation is + returned first. Currently, only sorting by ``name`` or + ``creationTimestamp desc`` is supported. + + This field is a member of `oneof`_ ``_order_by``. + page_token (str): + Specifies a page token to use. Set ``pageToken`` to the + ``nextPageToken`` returned by a previous list request to get + the next page of results. + + This field is a member of `oneof`_ ``_page_token``. + project (str): + Project ID for this request. + region (str): + Name of the region scoping this request. + return_partial_success (bool): + Opt-in for partial success behavior which + provides partial results in case of failure. The + default value is false. + + This field is a member of `oneof`_ ``_return_partial_success``. + """ + + filter: str = proto.Field( + proto.STRING, + number=336120696, + optional=True, + ) + max_results: int = proto.Field( + proto.UINT32, + number=54715419, + optional=True, + ) + order_by: str = proto.Field( + proto.STRING, + number=160562920, + optional=True, + ) + page_token: str = proto.Field( + proto.STRING, + number=19994697, + optional=True, + ) + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + region: str = proto.Field( + proto.STRING, + number=138946292, + ) + return_partial_success: bool = proto.Field( + proto.BOOL, + number=517198390, + optional=True, + ) + + +class ListRegionInstanceTemplatesRequest(proto.Message): + r"""A request message for RegionInstanceTemplates.List. See the + method description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + filter (str): + A filter expression that filters resources listed in the + response. Most Compute resources support two types of filter + expressions: expressions that support regular expressions + and expressions that follow API improvement proposal + AIP-160. If you want to use AIP-160, your expression must + specify the field name, an operator, and the value that you + want to use for filtering. The value must be a string, a + number, or a boolean. The operator must be either ``=``, + ``!=``, ``>``, ``<``, ``<=``, ``>=`` or ``:``. For example, + if you are filtering Compute Engine instances, you can + exclude instances named ``example-instance`` by specifying + ``name != example-instance``. The ``:`` operator can be used + with string fields to match substrings. For non-string + fields it is equivalent to the ``=`` operator. The ``:*`` + comparison can be used to test whether a key has been + defined. For example, to find all objects with ``owner`` + label use: ``labels.owner:*`` You can also filter nested + fields. For example, you could specify + ``scheduling.automaticRestart = false`` to include instances + only if they are not scheduled for automatic restarts. You + can use filtering on nested fields to filter based on + resource labels. To filter on multiple expressions, provide + each separate expression within parentheses. For example: + ``(scheduling.automaticRestart = true) (cpuPlatform = "Intel Skylake")`` + By default, each expression is an ``AND`` expression. + However, you can include ``AND`` and ``OR`` expressions + explicitly. For example: + ``(cpuPlatform = "Intel Skylake") OR (cpuPlatform = "Intel Broadwell") AND (scheduling.automaticRestart = true)`` + If you want to use a regular expression, use the ``eq`` + (equal) or ``ne`` (not equal) operator against a single + un-parenthesized expression with or without quotes or + against multiple parenthesized expressions. Examples: + ``fieldname eq unquoted literal`` + ``fieldname eq 'single quoted literal'`` + ``fieldname eq "double quoted literal"`` + ``(fieldname1 eq literal) (fieldname2 ne "literal")`` The + literal value is interpreted as a regular expression using + Google RE2 library syntax. The literal value must match the + entire field. For example, to filter for instances that do + not end with name "instance", you would use + ``name ne .*instance``. + + This field is a member of `oneof`_ ``_filter``. + max_results (int): + The maximum number of results per page that should be + returned. If the number of available results is larger than + ``maxResults``, Compute Engine returns a ``nextPageToken`` + that can be used to get the next page of results in + subsequent list requests. Acceptable values are ``0`` to + ``500``, inclusive. (Default: ``500``) + + This field is a member of `oneof`_ ``_max_results``. + order_by (str): + Sorts list results by a certain order. By default, results + are returned in alphanumerical order based on the resource + name. You can also sort results in descending order based on + the creation timestamp using + ``orderBy="creationTimestamp desc"``. This sorts results + based on the ``creationTimestamp`` field in reverse + chronological order (newest result first). Use this to sort + resources like operations so that the newest operation is + returned first. Currently, only sorting by ``name`` or + ``creationTimestamp desc`` is supported. + + This field is a member of `oneof`_ ``_order_by``. + page_token (str): + Specifies a page token to use. Set ``pageToken`` to the + ``nextPageToken`` returned by a previous list request to get + the next page of results. + + This field is a member of `oneof`_ ``_page_token``. + project (str): + Project ID for this request. + region (str): + The name of the regions for this request. + return_partial_success (bool): + Opt-in for partial success behavior which + provides partial results in case of failure. The + default value is false. + + This field is a member of `oneof`_ ``_return_partial_success``. + """ + + filter: str = proto.Field( + proto.STRING, + number=336120696, + optional=True, + ) + max_results: int = proto.Field( + proto.UINT32, + number=54715419, + optional=True, + ) + order_by: str = proto.Field( + proto.STRING, + number=160562920, + optional=True, + ) + page_token: str = proto.Field( + proto.STRING, + number=19994697, + optional=True, + ) + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + region: str = proto.Field( + proto.STRING, + number=138946292, + ) + return_partial_success: bool = proto.Field( + proto.BOOL, + number=517198390, + optional=True, + ) + + +class ListRegionNetworkEndpointGroupsRequest(proto.Message): + r"""A request message for RegionNetworkEndpointGroups.List. See + the method description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + filter (str): + A filter expression that filters resources listed in the + response. Most Compute resources support two types of filter + expressions: expressions that support regular expressions + and expressions that follow API improvement proposal + AIP-160. If you want to use AIP-160, your expression must + specify the field name, an operator, and the value that you + want to use for filtering. The value must be a string, a + number, or a boolean. The operator must be either ``=``, + ``!=``, ``>``, ``<``, ``<=``, ``>=`` or ``:``. For example, + if you are filtering Compute Engine instances, you can + exclude instances named ``example-instance`` by specifying + ``name != example-instance``. The ``:`` operator can be used + with string fields to match substrings. For non-string + fields it is equivalent to the ``=`` operator. The ``:*`` + comparison can be used to test whether a key has been + defined. For example, to find all objects with ``owner`` + label use: ``labels.owner:*`` You can also filter nested + fields. For example, you could specify + ``scheduling.automaticRestart = false`` to include instances + only if they are not scheduled for automatic restarts. You + can use filtering on nested fields to filter based on + resource labels. To filter on multiple expressions, provide + each separate expression within parentheses. For example: + ``(scheduling.automaticRestart = true) (cpuPlatform = "Intel Skylake")`` + By default, each expression is an ``AND`` expression. + However, you can include ``AND`` and ``OR`` expressions + explicitly. For example: + ``(cpuPlatform = "Intel Skylake") OR (cpuPlatform = "Intel Broadwell") AND (scheduling.automaticRestart = true)`` + If you want to use a regular expression, use the ``eq`` + (equal) or ``ne`` (not equal) operator against a single + un-parenthesized expression with or without quotes or + against multiple parenthesized expressions. Examples: + ``fieldname eq unquoted literal`` + ``fieldname eq 'single quoted literal'`` + ``fieldname eq "double quoted literal"`` + ``(fieldname1 eq literal) (fieldname2 ne "literal")`` The + literal value is interpreted as a regular expression using + Google RE2 library syntax. The literal value must match the + entire field. For example, to filter for instances that do + not end with name "instance", you would use + ``name ne .*instance``. + + This field is a member of `oneof`_ ``_filter``. + max_results (int): + The maximum number of results per page that should be + returned. If the number of available results is larger than + ``maxResults``, Compute Engine returns a ``nextPageToken`` + that can be used to get the next page of results in + subsequent list requests. Acceptable values are ``0`` to + ``500``, inclusive. (Default: ``500``) + + This field is a member of `oneof`_ ``_max_results``. + order_by (str): + Sorts list results by a certain order. By default, results + are returned in alphanumerical order based on the resource + name. You can also sort results in descending order based on + the creation timestamp using + ``orderBy="creationTimestamp desc"``. This sorts results + based on the ``creationTimestamp`` field in reverse + chronological order (newest result first). Use this to sort + resources like operations so that the newest operation is + returned first. Currently, only sorting by ``name`` or + ``creationTimestamp desc`` is supported. + + This field is a member of `oneof`_ ``_order_by``. + page_token (str): + Specifies a page token to use. Set ``pageToken`` to the + ``nextPageToken`` returned by a previous list request to get + the next page of results. + + This field is a member of `oneof`_ ``_page_token``. + project (str): + Project ID for this request. + region (str): + The name of the region where the network + endpoint group is located. It should comply with + RFC1035. + return_partial_success (bool): + Opt-in for partial success behavior which + provides partial results in case of failure. The + default value is false. + + This field is a member of `oneof`_ ``_return_partial_success``. + """ + + filter: str = proto.Field( + proto.STRING, + number=336120696, + optional=True, + ) + max_results: int = proto.Field( + proto.UINT32, + number=54715419, + optional=True, + ) + order_by: str = proto.Field( + proto.STRING, + number=160562920, + optional=True, + ) + page_token: str = proto.Field( + proto.STRING, + number=19994697, + optional=True, + ) + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + region: str = proto.Field( + proto.STRING, + number=138946292, + ) + return_partial_success: bool = proto.Field( + proto.BOOL, + number=517198390, + optional=True, + ) + + +class ListRegionNetworkFirewallPoliciesRequest(proto.Message): + r"""A request message for RegionNetworkFirewallPolicies.List. See + the method description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + filter (str): + A filter expression that filters resources listed in the + response. Most Compute resources support two types of filter + expressions: expressions that support regular expressions + and expressions that follow API improvement proposal + AIP-160. If you want to use AIP-160, your expression must + specify the field name, an operator, and the value that you + want to use for filtering. The value must be a string, a + number, or a boolean. The operator must be either ``=``, + ``!=``, ``>``, ``<``, ``<=``, ``>=`` or ``:``. For example, + if you are filtering Compute Engine instances, you can + exclude instances named ``example-instance`` by specifying + ``name != example-instance``. The ``:`` operator can be used + with string fields to match substrings. For non-string + fields it is equivalent to the ``=`` operator. The ``:*`` + comparison can be used to test whether a key has been + defined. For example, to find all objects with ``owner`` + label use: ``labels.owner:*`` You can also filter nested + fields. For example, you could specify + ``scheduling.automaticRestart = false`` to include instances + only if they are not scheduled for automatic restarts. You + can use filtering on nested fields to filter based on + resource labels. To filter on multiple expressions, provide + each separate expression within parentheses. For example: + ``(scheduling.automaticRestart = true) (cpuPlatform = "Intel Skylake")`` + By default, each expression is an ``AND`` expression. + However, you can include ``AND`` and ``OR`` expressions + explicitly. For example: + ``(cpuPlatform = "Intel Skylake") OR (cpuPlatform = "Intel Broadwell") AND (scheduling.automaticRestart = true)`` + If you want to use a regular expression, use the ``eq`` + (equal) or ``ne`` (not equal) operator against a single + un-parenthesized expression with or without quotes or + against multiple parenthesized expressions. Examples: + ``fieldname eq unquoted literal`` + ``fieldname eq 'single quoted literal'`` + ``fieldname eq "double quoted literal"`` + ``(fieldname1 eq literal) (fieldname2 ne "literal")`` The + literal value is interpreted as a regular expression using + Google RE2 library syntax. The literal value must match the + entire field. For example, to filter for instances that do + not end with name "instance", you would use + ``name ne .*instance``. + + This field is a member of `oneof`_ ``_filter``. + max_results (int): + The maximum number of results per page that should be + returned. If the number of available results is larger than + ``maxResults``, Compute Engine returns a ``nextPageToken`` + that can be used to get the next page of results in + subsequent list requests. Acceptable values are ``0`` to + ``500``, inclusive. (Default: ``500``) + + This field is a member of `oneof`_ ``_max_results``. + order_by (str): + Sorts list results by a certain order. By default, results + are returned in alphanumerical order based on the resource + name. You can also sort results in descending order based on + the creation timestamp using + ``orderBy="creationTimestamp desc"``. This sorts results + based on the ``creationTimestamp`` field in reverse + chronological order (newest result first). Use this to sort + resources like operations so that the newest operation is + returned first. Currently, only sorting by ``name`` or + ``creationTimestamp desc`` is supported. + + This field is a member of `oneof`_ ``_order_by``. + page_token (str): + Specifies a page token to use. Set ``pageToken`` to the + ``nextPageToken`` returned by a previous list request to get + the next page of results. + + This field is a member of `oneof`_ ``_page_token``. + project (str): + Project ID for this request. + region (str): + Name of the region scoping this request. + return_partial_success (bool): + Opt-in for partial success behavior which + provides partial results in case of failure. The + default value is false. + + This field is a member of `oneof`_ ``_return_partial_success``. + """ + + filter: str = proto.Field( + proto.STRING, + number=336120696, + optional=True, + ) + max_results: int = proto.Field( + proto.UINT32, + number=54715419, + optional=True, + ) + order_by: str = proto.Field( + proto.STRING, + number=160562920, + optional=True, + ) + page_token: str = proto.Field( + proto.STRING, + number=19994697, + optional=True, + ) + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + region: str = proto.Field( + proto.STRING, + number=138946292, + ) + return_partial_success: bool = proto.Field( + proto.BOOL, + number=517198390, + optional=True, + ) + + +class ListRegionNotificationEndpointsRequest(proto.Message): + r"""A request message for RegionNotificationEndpoints.List. See + the method description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + filter (str): + A filter expression that filters resources listed in the + response. Most Compute resources support two types of filter + expressions: expressions that support regular expressions + and expressions that follow API improvement proposal + AIP-160. If you want to use AIP-160, your expression must + specify the field name, an operator, and the value that you + want to use for filtering. The value must be a string, a + number, or a boolean. The operator must be either ``=``, + ``!=``, ``>``, ``<``, ``<=``, ``>=`` or ``:``. For example, + if you are filtering Compute Engine instances, you can + exclude instances named ``example-instance`` by specifying + ``name != example-instance``. The ``:`` operator can be used + with string fields to match substrings. For non-string + fields it is equivalent to the ``=`` operator. The ``:*`` + comparison can be used to test whether a key has been + defined. For example, to find all objects with ``owner`` + label use: ``labels.owner:*`` You can also filter nested + fields. For example, you could specify + ``scheduling.automaticRestart = false`` to include instances + only if they are not scheduled for automatic restarts. You + can use filtering on nested fields to filter based on + resource labels. To filter on multiple expressions, provide + each separate expression within parentheses. For example: + ``(scheduling.automaticRestart = true) (cpuPlatform = "Intel Skylake")`` + By default, each expression is an ``AND`` expression. + However, you can include ``AND`` and ``OR`` expressions + explicitly. For example: + ``(cpuPlatform = "Intel Skylake") OR (cpuPlatform = "Intel Broadwell") AND (scheduling.automaticRestart = true)`` + If you want to use a regular expression, use the ``eq`` + (equal) or ``ne`` (not equal) operator against a single + un-parenthesized expression with or without quotes or + against multiple parenthesized expressions. Examples: + ``fieldname eq unquoted literal`` + ``fieldname eq 'single quoted literal'`` + ``fieldname eq "double quoted literal"`` + ``(fieldname1 eq literal) (fieldname2 ne "literal")`` The + literal value is interpreted as a regular expression using + Google RE2 library syntax. The literal value must match the + entire field. For example, to filter for instances that do + not end with name "instance", you would use + ``name ne .*instance``. + + This field is a member of `oneof`_ ``_filter``. + max_results (int): + The maximum number of results per page that should be + returned. If the number of available results is larger than + ``maxResults``, Compute Engine returns a ``nextPageToken`` + that can be used to get the next page of results in + subsequent list requests. Acceptable values are ``0`` to + ``500``, inclusive. (Default: ``500``) + + This field is a member of `oneof`_ ``_max_results``. + order_by (str): + Sorts list results by a certain order. By default, results + are returned in alphanumerical order based on the resource + name. You can also sort results in descending order based on + the creation timestamp using + ``orderBy="creationTimestamp desc"``. This sorts results + based on the ``creationTimestamp`` field in reverse + chronological order (newest result first). Use this to sort + resources like operations so that the newest operation is + returned first. Currently, only sorting by ``name`` or + ``creationTimestamp desc`` is supported. + + This field is a member of `oneof`_ ``_order_by``. + page_token (str): + Specifies a page token to use. Set ``pageToken`` to the + ``nextPageToken`` returned by a previous list request to get + the next page of results. + + This field is a member of `oneof`_ ``_page_token``. + project (str): + Project ID for this request. + region (str): + Name of the region scoping this request. + return_partial_success (bool): + Opt-in for partial success behavior which + provides partial results in case of failure. The + default value is false. + + This field is a member of `oneof`_ ``_return_partial_success``. + """ + + filter: str = proto.Field( + proto.STRING, + number=336120696, + optional=True, + ) + max_results: int = proto.Field( + proto.UINT32, + number=54715419, + optional=True, + ) + order_by: str = proto.Field( + proto.STRING, + number=160562920, + optional=True, + ) + page_token: str = proto.Field( + proto.STRING, + number=19994697, + optional=True, + ) + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + region: str = proto.Field( + proto.STRING, + number=138946292, + ) + return_partial_success: bool = proto.Field( + proto.BOOL, + number=517198390, + optional=True, + ) + + +class ListRegionOperationsRequest(proto.Message): + r"""A request message for RegionOperations.List. See the method + description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + filter (str): + A filter expression that filters resources listed in the + response. Most Compute resources support two types of filter + expressions: expressions that support regular expressions + and expressions that follow API improvement proposal + AIP-160. If you want to use AIP-160, your expression must + specify the field name, an operator, and the value that you + want to use for filtering. The value must be a string, a + number, or a boolean. The operator must be either ``=``, + ``!=``, ``>``, ``<``, ``<=``, ``>=`` or ``:``. For example, + if you are filtering Compute Engine instances, you can + exclude instances named ``example-instance`` by specifying + ``name != example-instance``. The ``:`` operator can be used + with string fields to match substrings. For non-string + fields it is equivalent to the ``=`` operator. The ``:*`` + comparison can be used to test whether a key has been + defined. For example, to find all objects with ``owner`` + label use: ``labels.owner:*`` You can also filter nested + fields. For example, you could specify + ``scheduling.automaticRestart = false`` to include instances + only if they are not scheduled for automatic restarts. You + can use filtering on nested fields to filter based on + resource labels. To filter on multiple expressions, provide + each separate expression within parentheses. For example: + ``(scheduling.automaticRestart = true) (cpuPlatform = "Intel Skylake")`` + By default, each expression is an ``AND`` expression. + However, you can include ``AND`` and ``OR`` expressions + explicitly. For example: + ``(cpuPlatform = "Intel Skylake") OR (cpuPlatform = "Intel Broadwell") AND (scheduling.automaticRestart = true)`` + If you want to use a regular expression, use the ``eq`` + (equal) or ``ne`` (not equal) operator against a single + un-parenthesized expression with or without quotes or + against multiple parenthesized expressions. Examples: + ``fieldname eq unquoted literal`` + ``fieldname eq 'single quoted literal'`` + ``fieldname eq "double quoted literal"`` + ``(fieldname1 eq literal) (fieldname2 ne "literal")`` The + literal value is interpreted as a regular expression using + Google RE2 library syntax. The literal value must match the + entire field. For example, to filter for instances that do + not end with name "instance", you would use + ``name ne .*instance``. + + This field is a member of `oneof`_ ``_filter``. + max_results (int): + The maximum number of results per page that should be + returned. If the number of available results is larger than + ``maxResults``, Compute Engine returns a ``nextPageToken`` + that can be used to get the next page of results in + subsequent list requests. Acceptable values are ``0`` to + ``500``, inclusive. (Default: ``500``) + + This field is a member of `oneof`_ ``_max_results``. + order_by (str): + Sorts list results by a certain order. By default, results + are returned in alphanumerical order based on the resource + name. You can also sort results in descending order based on + the creation timestamp using + ``orderBy="creationTimestamp desc"``. This sorts results + based on the ``creationTimestamp`` field in reverse + chronological order (newest result first). Use this to sort + resources like operations so that the newest operation is + returned first. Currently, only sorting by ``name`` or + ``creationTimestamp desc`` is supported. + + This field is a member of `oneof`_ ``_order_by``. + page_token (str): + Specifies a page token to use. Set ``pageToken`` to the + ``nextPageToken`` returned by a previous list request to get + the next page of results. + + This field is a member of `oneof`_ ``_page_token``. + project (str): + Project ID for this request. + region (str): + Name of the region for this request. + return_partial_success (bool): + Opt-in for partial success behavior which + provides partial results in case of failure. The + default value is false. + + This field is a member of `oneof`_ ``_return_partial_success``. + """ + + filter: str = proto.Field( + proto.STRING, + number=336120696, + optional=True, + ) + max_results: int = proto.Field( + proto.UINT32, + number=54715419, + optional=True, + ) + order_by: str = proto.Field( + proto.STRING, + number=160562920, + optional=True, + ) + page_token: str = proto.Field( + proto.STRING, + number=19994697, + optional=True, + ) + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + region: str = proto.Field( + proto.STRING, + number=138946292, + ) + return_partial_success: bool = proto.Field( + proto.BOOL, + number=517198390, + optional=True, + ) + + +class ListRegionSecurityPoliciesRequest(proto.Message): + r"""A request message for RegionSecurityPolicies.List. See the + method description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + filter (str): + A filter expression that filters resources listed in the + response. Most Compute resources support two types of filter + expressions: expressions that support regular expressions + and expressions that follow API improvement proposal + AIP-160. If you want to use AIP-160, your expression must + specify the field name, an operator, and the value that you + want to use for filtering. The value must be a string, a + number, or a boolean. The operator must be either ``=``, + ``!=``, ``>``, ``<``, ``<=``, ``>=`` or ``:``. For example, + if you are filtering Compute Engine instances, you can + exclude instances named ``example-instance`` by specifying + ``name != example-instance``. The ``:`` operator can be used + with string fields to match substrings. For non-string + fields it is equivalent to the ``=`` operator. The ``:*`` + comparison can be used to test whether a key has been + defined. For example, to find all objects with ``owner`` + label use: ``labels.owner:*`` You can also filter nested + fields. For example, you could specify + ``scheduling.automaticRestart = false`` to include instances + only if they are not scheduled for automatic restarts. You + can use filtering on nested fields to filter based on + resource labels. To filter on multiple expressions, provide + each separate expression within parentheses. For example: + ``(scheduling.automaticRestart = true) (cpuPlatform = "Intel Skylake")`` + By default, each expression is an ``AND`` expression. + However, you can include ``AND`` and ``OR`` expressions + explicitly. For example: + ``(cpuPlatform = "Intel Skylake") OR (cpuPlatform = "Intel Broadwell") AND (scheduling.automaticRestart = true)`` + If you want to use a regular expression, use the ``eq`` + (equal) or ``ne`` (not equal) operator against a single + un-parenthesized expression with or without quotes or + against multiple parenthesized expressions. Examples: + ``fieldname eq unquoted literal`` + ``fieldname eq 'single quoted literal'`` + ``fieldname eq "double quoted literal"`` + ``(fieldname1 eq literal) (fieldname2 ne "literal")`` The + literal value is interpreted as a regular expression using + Google RE2 library syntax. The literal value must match the + entire field. For example, to filter for instances that do + not end with name "instance", you would use + ``name ne .*instance``. + + This field is a member of `oneof`_ ``_filter``. + max_results (int): + The maximum number of results per page that should be + returned. If the number of available results is larger than + ``maxResults``, Compute Engine returns a ``nextPageToken`` + that can be used to get the next page of results in + subsequent list requests. Acceptable values are ``0`` to + ``500``, inclusive. (Default: ``500``) + + This field is a member of `oneof`_ ``_max_results``. + order_by (str): + Sorts list results by a certain order. By default, results + are returned in alphanumerical order based on the resource + name. You can also sort results in descending order based on + the creation timestamp using + ``orderBy="creationTimestamp desc"``. This sorts results + based on the ``creationTimestamp`` field in reverse + chronological order (newest result first). Use this to sort + resources like operations so that the newest operation is + returned first. Currently, only sorting by ``name`` or + ``creationTimestamp desc`` is supported. + + This field is a member of `oneof`_ ``_order_by``. + page_token (str): + Specifies a page token to use. Set ``pageToken`` to the + ``nextPageToken`` returned by a previous list request to get + the next page of results. + + This field is a member of `oneof`_ ``_page_token``. + project (str): + Project ID for this request. + region (str): + Name of the region scoping this request. + return_partial_success (bool): + Opt-in for partial success behavior which + provides partial results in case of failure. The + default value is false. + + This field is a member of `oneof`_ ``_return_partial_success``. + """ + + filter: str = proto.Field( + proto.STRING, + number=336120696, + optional=True, + ) + max_results: int = proto.Field( + proto.UINT32, + number=54715419, + optional=True, + ) + order_by: str = proto.Field( + proto.STRING, + number=160562920, + optional=True, + ) + page_token: str = proto.Field( + proto.STRING, + number=19994697, + optional=True, + ) + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + region: str = proto.Field( + proto.STRING, + number=138946292, + ) + return_partial_success: bool = proto.Field( + proto.BOOL, + number=517198390, + optional=True, + ) + + +class ListRegionSslCertificatesRequest(proto.Message): + r"""A request message for RegionSslCertificates.List. See the + method description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + filter (str): + A filter expression that filters resources listed in the + response. Most Compute resources support two types of filter + expressions: expressions that support regular expressions + and expressions that follow API improvement proposal + AIP-160. If you want to use AIP-160, your expression must + specify the field name, an operator, and the value that you + want to use for filtering. The value must be a string, a + number, or a boolean. The operator must be either ``=``, + ``!=``, ``>``, ``<``, ``<=``, ``>=`` or ``:``. For example, + if you are filtering Compute Engine instances, you can + exclude instances named ``example-instance`` by specifying + ``name != example-instance``. The ``:`` operator can be used + with string fields to match substrings. For non-string + fields it is equivalent to the ``=`` operator. The ``:*`` + comparison can be used to test whether a key has been + defined. For example, to find all objects with ``owner`` + label use: ``labels.owner:*`` You can also filter nested + fields. For example, you could specify + ``scheduling.automaticRestart = false`` to include instances + only if they are not scheduled for automatic restarts. You + can use filtering on nested fields to filter based on + resource labels. To filter on multiple expressions, provide + each separate expression within parentheses. For example: + ``(scheduling.automaticRestart = true) (cpuPlatform = "Intel Skylake")`` + By default, each expression is an ``AND`` expression. + However, you can include ``AND`` and ``OR`` expressions + explicitly. For example: + ``(cpuPlatform = "Intel Skylake") OR (cpuPlatform = "Intel Broadwell") AND (scheduling.automaticRestart = true)`` + If you want to use a regular expression, use the ``eq`` + (equal) or ``ne`` (not equal) operator against a single + un-parenthesized expression with or without quotes or + against multiple parenthesized expressions. Examples: + ``fieldname eq unquoted literal`` + ``fieldname eq 'single quoted literal'`` + ``fieldname eq "double quoted literal"`` + ``(fieldname1 eq literal) (fieldname2 ne "literal")`` The + literal value is interpreted as a regular expression using + Google RE2 library syntax. The literal value must match the + entire field. For example, to filter for instances that do + not end with name "instance", you would use + ``name ne .*instance``. + + This field is a member of `oneof`_ ``_filter``. + max_results (int): + The maximum number of results per page that should be + returned. If the number of available results is larger than + ``maxResults``, Compute Engine returns a ``nextPageToken`` + that can be used to get the next page of results in + subsequent list requests. Acceptable values are ``0`` to + ``500``, inclusive. (Default: ``500``) + + This field is a member of `oneof`_ ``_max_results``. + order_by (str): + Sorts list results by a certain order. By default, results + are returned in alphanumerical order based on the resource + name. You can also sort results in descending order based on + the creation timestamp using + ``orderBy="creationTimestamp desc"``. This sorts results + based on the ``creationTimestamp`` field in reverse + chronological order (newest result first). Use this to sort + resources like operations so that the newest operation is + returned first. Currently, only sorting by ``name`` or + ``creationTimestamp desc`` is supported. + + This field is a member of `oneof`_ ``_order_by``. + page_token (str): + Specifies a page token to use. Set ``pageToken`` to the + ``nextPageToken`` returned by a previous list request to get + the next page of results. + + This field is a member of `oneof`_ ``_page_token``. + project (str): + Project ID for this request. + region (str): + Name of the region scoping this request. + return_partial_success (bool): + Opt-in for partial success behavior which + provides partial results in case of failure. The + default value is false. + + This field is a member of `oneof`_ ``_return_partial_success``. + """ + + filter: str = proto.Field( + proto.STRING, + number=336120696, + optional=True, + ) + max_results: int = proto.Field( + proto.UINT32, + number=54715419, + optional=True, + ) + order_by: str = proto.Field( + proto.STRING, + number=160562920, + optional=True, + ) + page_token: str = proto.Field( + proto.STRING, + number=19994697, + optional=True, + ) + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + region: str = proto.Field( + proto.STRING, + number=138946292, + ) + return_partial_success: bool = proto.Field( + proto.BOOL, + number=517198390, + optional=True, + ) + + +class ListRegionSslPoliciesRequest(proto.Message): + r"""A request message for RegionSslPolicies.List. See the method + description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + filter (str): + A filter expression that filters resources listed in the + response. Most Compute resources support two types of filter + expressions: expressions that support regular expressions + and expressions that follow API improvement proposal + AIP-160. If you want to use AIP-160, your expression must + specify the field name, an operator, and the value that you + want to use for filtering. The value must be a string, a + number, or a boolean. The operator must be either ``=``, + ``!=``, ``>``, ``<``, ``<=``, ``>=`` or ``:``. For example, + if you are filtering Compute Engine instances, you can + exclude instances named ``example-instance`` by specifying + ``name != example-instance``. The ``:`` operator can be used + with string fields to match substrings. For non-string + fields it is equivalent to the ``=`` operator. The ``:*`` + comparison can be used to test whether a key has been + defined. For example, to find all objects with ``owner`` + label use: ``labels.owner:*`` You can also filter nested + fields. For example, you could specify + ``scheduling.automaticRestart = false`` to include instances + only if they are not scheduled for automatic restarts. You + can use filtering on nested fields to filter based on + resource labels. To filter on multiple expressions, provide + each separate expression within parentheses. For example: + ``(scheduling.automaticRestart = true) (cpuPlatform = "Intel Skylake")`` + By default, each expression is an ``AND`` expression. + However, you can include ``AND`` and ``OR`` expressions + explicitly. For example: + ``(cpuPlatform = "Intel Skylake") OR (cpuPlatform = "Intel Broadwell") AND (scheduling.automaticRestart = true)`` + If you want to use a regular expression, use the ``eq`` + (equal) or ``ne`` (not equal) operator against a single + un-parenthesized expression with or without quotes or + against multiple parenthesized expressions. Examples: + ``fieldname eq unquoted literal`` + ``fieldname eq 'single quoted literal'`` + ``fieldname eq "double quoted literal"`` + ``(fieldname1 eq literal) (fieldname2 ne "literal")`` The + literal value is interpreted as a regular expression using + Google RE2 library syntax. The literal value must match the + entire field. For example, to filter for instances that do + not end with name "instance", you would use + ``name ne .*instance``. + + This field is a member of `oneof`_ ``_filter``. + max_results (int): + The maximum number of results per page that should be + returned. If the number of available results is larger than + ``maxResults``, Compute Engine returns a ``nextPageToken`` + that can be used to get the next page of results in + subsequent list requests. Acceptable values are ``0`` to + ``500``, inclusive. (Default: ``500``) + + This field is a member of `oneof`_ ``_max_results``. + order_by (str): + Sorts list results by a certain order. By default, results + are returned in alphanumerical order based on the resource + name. You can also sort results in descending order based on + the creation timestamp using + ``orderBy="creationTimestamp desc"``. This sorts results + based on the ``creationTimestamp`` field in reverse + chronological order (newest result first). Use this to sort + resources like operations so that the newest operation is + returned first. Currently, only sorting by ``name`` or + ``creationTimestamp desc`` is supported. + + This field is a member of `oneof`_ ``_order_by``. + page_token (str): + Specifies a page token to use. Set ``pageToken`` to the + ``nextPageToken`` returned by a previous list request to get + the next page of results. + + This field is a member of `oneof`_ ``_page_token``. + project (str): + Project ID for this request. + region (str): + Name of the region scoping this request. + return_partial_success (bool): + Opt-in for partial success behavior which + provides partial results in case of failure. The + default value is false. + + This field is a member of `oneof`_ ``_return_partial_success``. + """ + + filter: str = proto.Field( + proto.STRING, + number=336120696, + optional=True, + ) + max_results: int = proto.Field( + proto.UINT32, + number=54715419, + optional=True, + ) + order_by: str = proto.Field( + proto.STRING, + number=160562920, + optional=True, + ) + page_token: str = proto.Field( + proto.STRING, + number=19994697, + optional=True, + ) + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + region: str = proto.Field( + proto.STRING, + number=138946292, + ) + return_partial_success: bool = proto.Field( + proto.BOOL, + number=517198390, + optional=True, + ) + + +class ListRegionTargetHttpProxiesRequest(proto.Message): + r"""A request message for RegionTargetHttpProxies.List. See the + method description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + filter (str): + A filter expression that filters resources listed in the + response. Most Compute resources support two types of filter + expressions: expressions that support regular expressions + and expressions that follow API improvement proposal + AIP-160. If you want to use AIP-160, your expression must + specify the field name, an operator, and the value that you + want to use for filtering. The value must be a string, a + number, or a boolean. The operator must be either ``=``, + ``!=``, ``>``, ``<``, ``<=``, ``>=`` or ``:``. For example, + if you are filtering Compute Engine instances, you can + exclude instances named ``example-instance`` by specifying + ``name != example-instance``. The ``:`` operator can be used + with string fields to match substrings. For non-string + fields it is equivalent to the ``=`` operator. The ``:*`` + comparison can be used to test whether a key has been + defined. For example, to find all objects with ``owner`` + label use: ``labels.owner:*`` You can also filter nested + fields. For example, you could specify + ``scheduling.automaticRestart = false`` to include instances + only if they are not scheduled for automatic restarts. You + can use filtering on nested fields to filter based on + resource labels. To filter on multiple expressions, provide + each separate expression within parentheses. For example: + ``(scheduling.automaticRestart = true) (cpuPlatform = "Intel Skylake")`` + By default, each expression is an ``AND`` expression. + However, you can include ``AND`` and ``OR`` expressions + explicitly. For example: + ``(cpuPlatform = "Intel Skylake") OR (cpuPlatform = "Intel Broadwell") AND (scheduling.automaticRestart = true)`` + If you want to use a regular expression, use the ``eq`` + (equal) or ``ne`` (not equal) operator against a single + un-parenthesized expression with or without quotes or + against multiple parenthesized expressions. Examples: + ``fieldname eq unquoted literal`` + ``fieldname eq 'single quoted literal'`` + ``fieldname eq "double quoted literal"`` + ``(fieldname1 eq literal) (fieldname2 ne "literal")`` The + literal value is interpreted as a regular expression using + Google RE2 library syntax. The literal value must match the + entire field. For example, to filter for instances that do + not end with name "instance", you would use + ``name ne .*instance``. + + This field is a member of `oneof`_ ``_filter``. + max_results (int): + The maximum number of results per page that should be + returned. If the number of available results is larger than + ``maxResults``, Compute Engine returns a ``nextPageToken`` + that can be used to get the next page of results in + subsequent list requests. Acceptable values are ``0`` to + ``500``, inclusive. (Default: ``500``) + + This field is a member of `oneof`_ ``_max_results``. + order_by (str): + Sorts list results by a certain order. By default, results + are returned in alphanumerical order based on the resource + name. You can also sort results in descending order based on + the creation timestamp using + ``orderBy="creationTimestamp desc"``. This sorts results + based on the ``creationTimestamp`` field in reverse + chronological order (newest result first). Use this to sort + resources like operations so that the newest operation is + returned first. Currently, only sorting by ``name`` or + ``creationTimestamp desc`` is supported. + + This field is a member of `oneof`_ ``_order_by``. + page_token (str): + Specifies a page token to use. Set ``pageToken`` to the + ``nextPageToken`` returned by a previous list request to get + the next page of results. + + This field is a member of `oneof`_ ``_page_token``. + project (str): + Project ID for this request. + region (str): + Name of the region scoping this request. + return_partial_success (bool): + Opt-in for partial success behavior which + provides partial results in case of failure. The + default value is false. + + This field is a member of `oneof`_ ``_return_partial_success``. + """ + + filter: str = proto.Field( + proto.STRING, + number=336120696, + optional=True, + ) + max_results: int = proto.Field( + proto.UINT32, + number=54715419, + optional=True, + ) + order_by: str = proto.Field( + proto.STRING, + number=160562920, + optional=True, + ) + page_token: str = proto.Field( + proto.STRING, + number=19994697, + optional=True, + ) + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + region: str = proto.Field( + proto.STRING, + number=138946292, + ) + return_partial_success: bool = proto.Field( + proto.BOOL, + number=517198390, + optional=True, + ) + + +class ListRegionTargetHttpsProxiesRequest(proto.Message): + r"""A request message for RegionTargetHttpsProxies.List. See the + method description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + filter (str): + A filter expression that filters resources listed in the + response. Most Compute resources support two types of filter + expressions: expressions that support regular expressions + and expressions that follow API improvement proposal + AIP-160. If you want to use AIP-160, your expression must + specify the field name, an operator, and the value that you + want to use for filtering. The value must be a string, a + number, or a boolean. The operator must be either ``=``, + ``!=``, ``>``, ``<``, ``<=``, ``>=`` or ``:``. For example, + if you are filtering Compute Engine instances, you can + exclude instances named ``example-instance`` by specifying + ``name != example-instance``. The ``:`` operator can be used + with string fields to match substrings. For non-string + fields it is equivalent to the ``=`` operator. The ``:*`` + comparison can be used to test whether a key has been + defined. For example, to find all objects with ``owner`` + label use: ``labels.owner:*`` You can also filter nested + fields. For example, you could specify + ``scheduling.automaticRestart = false`` to include instances + only if they are not scheduled for automatic restarts. You + can use filtering on nested fields to filter based on + resource labels. To filter on multiple expressions, provide + each separate expression within parentheses. For example: + ``(scheduling.automaticRestart = true) (cpuPlatform = "Intel Skylake")`` + By default, each expression is an ``AND`` expression. + However, you can include ``AND`` and ``OR`` expressions + explicitly. For example: + ``(cpuPlatform = "Intel Skylake") OR (cpuPlatform = "Intel Broadwell") AND (scheduling.automaticRestart = true)`` + If you want to use a regular expression, use the ``eq`` + (equal) or ``ne`` (not equal) operator against a single + un-parenthesized expression with or without quotes or + against multiple parenthesized expressions. Examples: + ``fieldname eq unquoted literal`` + ``fieldname eq 'single quoted literal'`` + ``fieldname eq "double quoted literal"`` + ``(fieldname1 eq literal) (fieldname2 ne "literal")`` The + literal value is interpreted as a regular expression using + Google RE2 library syntax. The literal value must match the + entire field. For example, to filter for instances that do + not end with name "instance", you would use + ``name ne .*instance``. + + This field is a member of `oneof`_ ``_filter``. + max_results (int): + The maximum number of results per page that should be + returned. If the number of available results is larger than + ``maxResults``, Compute Engine returns a ``nextPageToken`` + that can be used to get the next page of results in + subsequent list requests. Acceptable values are ``0`` to + ``500``, inclusive. (Default: ``500``) + + This field is a member of `oneof`_ ``_max_results``. + order_by (str): + Sorts list results by a certain order. By default, results + are returned in alphanumerical order based on the resource + name. You can also sort results in descending order based on + the creation timestamp using + ``orderBy="creationTimestamp desc"``. This sorts results + based on the ``creationTimestamp`` field in reverse + chronological order (newest result first). Use this to sort + resources like operations so that the newest operation is + returned first. Currently, only sorting by ``name`` or + ``creationTimestamp desc`` is supported. + + This field is a member of `oneof`_ ``_order_by``. + page_token (str): + Specifies a page token to use. Set ``pageToken`` to the + ``nextPageToken`` returned by a previous list request to get + the next page of results. + + This field is a member of `oneof`_ ``_page_token``. + project (str): + Project ID for this request. + region (str): + Name of the region scoping this request. + return_partial_success (bool): + Opt-in for partial success behavior which + provides partial results in case of failure. The + default value is false. + + This field is a member of `oneof`_ ``_return_partial_success``. + """ + + filter: str = proto.Field( + proto.STRING, + number=336120696, + optional=True, + ) + max_results: int = proto.Field( + proto.UINT32, + number=54715419, + optional=True, + ) + order_by: str = proto.Field( + proto.STRING, + number=160562920, + optional=True, + ) + page_token: str = proto.Field( + proto.STRING, + number=19994697, + optional=True, + ) + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + region: str = proto.Field( + proto.STRING, + number=138946292, + ) + return_partial_success: bool = proto.Field( + proto.BOOL, + number=517198390, + optional=True, + ) + + +class ListRegionTargetTcpProxiesRequest(proto.Message): + r"""A request message for RegionTargetTcpProxies.List. See the + method description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + filter (str): + A filter expression that filters resources listed in the + response. Most Compute resources support two types of filter + expressions: expressions that support regular expressions + and expressions that follow API improvement proposal + AIP-160. If you want to use AIP-160, your expression must + specify the field name, an operator, and the value that you + want to use for filtering. The value must be a string, a + number, or a boolean. The operator must be either ``=``, + ``!=``, ``>``, ``<``, ``<=``, ``>=`` or ``:``. For example, + if you are filtering Compute Engine instances, you can + exclude instances named ``example-instance`` by specifying + ``name != example-instance``. The ``:`` operator can be used + with string fields to match substrings. For non-string + fields it is equivalent to the ``=`` operator. The ``:*`` + comparison can be used to test whether a key has been + defined. For example, to find all objects with ``owner`` + label use: ``labels.owner:*`` You can also filter nested + fields. For example, you could specify + ``scheduling.automaticRestart = false`` to include instances + only if they are not scheduled for automatic restarts. You + can use filtering on nested fields to filter based on + resource labels. To filter on multiple expressions, provide + each separate expression within parentheses. For example: + ``(scheduling.automaticRestart = true) (cpuPlatform = "Intel Skylake")`` + By default, each expression is an ``AND`` expression. + However, you can include ``AND`` and ``OR`` expressions + explicitly. For example: + ``(cpuPlatform = "Intel Skylake") OR (cpuPlatform = "Intel Broadwell") AND (scheduling.automaticRestart = true)`` + If you want to use a regular expression, use the ``eq`` + (equal) or ``ne`` (not equal) operator against a single + un-parenthesized expression with or without quotes or + against multiple parenthesized expressions. Examples: + ``fieldname eq unquoted literal`` + ``fieldname eq 'single quoted literal'`` + ``fieldname eq "double quoted literal"`` + ``(fieldname1 eq literal) (fieldname2 ne "literal")`` The + literal value is interpreted as a regular expression using + Google RE2 library syntax. The literal value must match the + entire field. For example, to filter for instances that do + not end with name "instance", you would use + ``name ne .*instance``. + + This field is a member of `oneof`_ ``_filter``. + max_results (int): + The maximum number of results per page that should be + returned. If the number of available results is larger than + ``maxResults``, Compute Engine returns a ``nextPageToken`` + that can be used to get the next page of results in + subsequent list requests. Acceptable values are ``0`` to + ``500``, inclusive. (Default: ``500``) + + This field is a member of `oneof`_ ``_max_results``. + order_by (str): + Sorts list results by a certain order. By default, results + are returned in alphanumerical order based on the resource + name. You can also sort results in descending order based on + the creation timestamp using + ``orderBy="creationTimestamp desc"``. This sorts results + based on the ``creationTimestamp`` field in reverse + chronological order (newest result first). Use this to sort + resources like operations so that the newest operation is + returned first. Currently, only sorting by ``name`` or + ``creationTimestamp desc`` is supported. + + This field is a member of `oneof`_ ``_order_by``. + page_token (str): + Specifies a page token to use. Set ``pageToken`` to the + ``nextPageToken`` returned by a previous list request to get + the next page of results. + + This field is a member of `oneof`_ ``_page_token``. + project (str): + Project ID for this request. + region (str): + Name of the region scoping this request. + return_partial_success (bool): + Opt-in for partial success behavior which + provides partial results in case of failure. The + default value is false. + + This field is a member of `oneof`_ ``_return_partial_success``. + """ + + filter: str = proto.Field( + proto.STRING, + number=336120696, + optional=True, + ) + max_results: int = proto.Field( + proto.UINT32, + number=54715419, + optional=True, + ) + order_by: str = proto.Field( + proto.STRING, + number=160562920, + optional=True, + ) + page_token: str = proto.Field( + proto.STRING, + number=19994697, + optional=True, + ) + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + region: str = proto.Field( + proto.STRING, + number=138946292, + ) + return_partial_success: bool = proto.Field( + proto.BOOL, + number=517198390, + optional=True, + ) + + +class ListRegionUrlMapsRequest(proto.Message): + r"""A request message for RegionUrlMaps.List. See the method + description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + filter (str): + A filter expression that filters resources listed in the + response. Most Compute resources support two types of filter + expressions: expressions that support regular expressions + and expressions that follow API improvement proposal + AIP-160. If you want to use AIP-160, your expression must + specify the field name, an operator, and the value that you + want to use for filtering. The value must be a string, a + number, or a boolean. The operator must be either ``=``, + ``!=``, ``>``, ``<``, ``<=``, ``>=`` or ``:``. For example, + if you are filtering Compute Engine instances, you can + exclude instances named ``example-instance`` by specifying + ``name != example-instance``. The ``:`` operator can be used + with string fields to match substrings. For non-string + fields it is equivalent to the ``=`` operator. The ``:*`` + comparison can be used to test whether a key has been + defined. For example, to find all objects with ``owner`` + label use: ``labels.owner:*`` You can also filter nested + fields. For example, you could specify + ``scheduling.automaticRestart = false`` to include instances + only if they are not scheduled for automatic restarts. You + can use filtering on nested fields to filter based on + resource labels. To filter on multiple expressions, provide + each separate expression within parentheses. For example: + ``(scheduling.automaticRestart = true) (cpuPlatform = "Intel Skylake")`` + By default, each expression is an ``AND`` expression. + However, you can include ``AND`` and ``OR`` expressions + explicitly. For example: + ``(cpuPlatform = "Intel Skylake") OR (cpuPlatform = "Intel Broadwell") AND (scheduling.automaticRestart = true)`` + If you want to use a regular expression, use the ``eq`` + (equal) or ``ne`` (not equal) operator against a single + un-parenthesized expression with or without quotes or + against multiple parenthesized expressions. Examples: + ``fieldname eq unquoted literal`` + ``fieldname eq 'single quoted literal'`` + ``fieldname eq "double quoted literal"`` + ``(fieldname1 eq literal) (fieldname2 ne "literal")`` The + literal value is interpreted as a regular expression using + Google RE2 library syntax. The literal value must match the + entire field. For example, to filter for instances that do + not end with name "instance", you would use + ``name ne .*instance``. + + This field is a member of `oneof`_ ``_filter``. + max_results (int): + The maximum number of results per page that should be + returned. If the number of available results is larger than + ``maxResults``, Compute Engine returns a ``nextPageToken`` + that can be used to get the next page of results in + subsequent list requests. Acceptable values are ``0`` to + ``500``, inclusive. (Default: ``500``) + + This field is a member of `oneof`_ ``_max_results``. + order_by (str): + Sorts list results by a certain order. By default, results + are returned in alphanumerical order based on the resource + name. You can also sort results in descending order based on + the creation timestamp using + ``orderBy="creationTimestamp desc"``. This sorts results + based on the ``creationTimestamp`` field in reverse + chronological order (newest result first). Use this to sort + resources like operations so that the newest operation is + returned first. Currently, only sorting by ``name`` or + ``creationTimestamp desc`` is supported. + + This field is a member of `oneof`_ ``_order_by``. + page_token (str): + Specifies a page token to use. Set ``pageToken`` to the + ``nextPageToken`` returned by a previous list request to get + the next page of results. + + This field is a member of `oneof`_ ``_page_token``. + project (str): + Project ID for this request. + region (str): + Name of the region scoping this request. + return_partial_success (bool): + Opt-in for partial success behavior which + provides partial results in case of failure. The + default value is false. + + This field is a member of `oneof`_ ``_return_partial_success``. + """ + + filter: str = proto.Field( + proto.STRING, + number=336120696, + optional=True, + ) + max_results: int = proto.Field( + proto.UINT32, + number=54715419, + optional=True, + ) + order_by: str = proto.Field( + proto.STRING, + number=160562920, + optional=True, + ) + page_token: str = proto.Field( + proto.STRING, + number=19994697, + optional=True, + ) + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + region: str = proto.Field( + proto.STRING, + number=138946292, + ) + return_partial_success: bool = proto.Field( + proto.BOOL, + number=517198390, + optional=True, + ) + + +class ListRegionsRequest(proto.Message): + r"""A request message for Regions.List. See the method + description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + filter (str): + A filter expression that filters resources listed in the + response. Most Compute resources support two types of filter + expressions: expressions that support regular expressions + and expressions that follow API improvement proposal + AIP-160. If you want to use AIP-160, your expression must + specify the field name, an operator, and the value that you + want to use for filtering. The value must be a string, a + number, or a boolean. The operator must be either ``=``, + ``!=``, ``>``, ``<``, ``<=``, ``>=`` or ``:``. For example, + if you are filtering Compute Engine instances, you can + exclude instances named ``example-instance`` by specifying + ``name != example-instance``. The ``:`` operator can be used + with string fields to match substrings. For non-string + fields it is equivalent to the ``=`` operator. The ``:*`` + comparison can be used to test whether a key has been + defined. For example, to find all objects with ``owner`` + label use: ``labels.owner:*`` You can also filter nested + fields. For example, you could specify + ``scheduling.automaticRestart = false`` to include instances + only if they are not scheduled for automatic restarts. You + can use filtering on nested fields to filter based on + resource labels. To filter on multiple expressions, provide + each separate expression within parentheses. For example: + ``(scheduling.automaticRestart = true) (cpuPlatform = "Intel Skylake")`` + By default, each expression is an ``AND`` expression. + However, you can include ``AND`` and ``OR`` expressions + explicitly. For example: + ``(cpuPlatform = "Intel Skylake") OR (cpuPlatform = "Intel Broadwell") AND (scheduling.automaticRestart = true)`` + If you want to use a regular expression, use the ``eq`` + (equal) or ``ne`` (not equal) operator against a single + un-parenthesized expression with or without quotes or + against multiple parenthesized expressions. Examples: + ``fieldname eq unquoted literal`` + ``fieldname eq 'single quoted literal'`` + ``fieldname eq "double quoted literal"`` + ``(fieldname1 eq literal) (fieldname2 ne "literal")`` The + literal value is interpreted as a regular expression using + Google RE2 library syntax. The literal value must match the + entire field. For example, to filter for instances that do + not end with name "instance", you would use + ``name ne .*instance``. + + This field is a member of `oneof`_ ``_filter``. + max_results (int): + The maximum number of results per page that should be + returned. If the number of available results is larger than + ``maxResults``, Compute Engine returns a ``nextPageToken`` + that can be used to get the next page of results in + subsequent list requests. Acceptable values are ``0`` to + ``500``, inclusive. (Default: ``500``) + + This field is a member of `oneof`_ ``_max_results``. + order_by (str): + Sorts list results by a certain order. By default, results + are returned in alphanumerical order based on the resource + name. You can also sort results in descending order based on + the creation timestamp using + ``orderBy="creationTimestamp desc"``. This sorts results + based on the ``creationTimestamp`` field in reverse + chronological order (newest result first). Use this to sort + resources like operations so that the newest operation is + returned first. Currently, only sorting by ``name`` or + ``creationTimestamp desc`` is supported. + + This field is a member of `oneof`_ ``_order_by``. + page_token (str): + Specifies a page token to use. Set ``pageToken`` to the + ``nextPageToken`` returned by a previous list request to get + the next page of results. + + This field is a member of `oneof`_ ``_page_token``. + project (str): + Project ID for this request. + return_partial_success (bool): + Opt-in for partial success behavior which + provides partial results in case of failure. The + default value is false. + + This field is a member of `oneof`_ ``_return_partial_success``. + """ + + filter: str = proto.Field( + proto.STRING, + number=336120696, + optional=True, + ) + max_results: int = proto.Field( + proto.UINT32, + number=54715419, + optional=True, + ) + order_by: str = proto.Field( + proto.STRING, + number=160562920, + optional=True, + ) + page_token: str = proto.Field( + proto.STRING, + number=19994697, + optional=True, + ) + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + return_partial_success: bool = proto.Field( + proto.BOOL, + number=517198390, + optional=True, + ) + + +class ListReservationsRequest(proto.Message): + r"""A request message for Reservations.List. See the method + description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + filter (str): + A filter expression that filters resources listed in the + response. Most Compute resources support two types of filter + expressions: expressions that support regular expressions + and expressions that follow API improvement proposal + AIP-160. If you want to use AIP-160, your expression must + specify the field name, an operator, and the value that you + want to use for filtering. The value must be a string, a + number, or a boolean. The operator must be either ``=``, + ``!=``, ``>``, ``<``, ``<=``, ``>=`` or ``:``. For example, + if you are filtering Compute Engine instances, you can + exclude instances named ``example-instance`` by specifying + ``name != example-instance``. The ``:`` operator can be used + with string fields to match substrings. For non-string + fields it is equivalent to the ``=`` operator. The ``:*`` + comparison can be used to test whether a key has been + defined. For example, to find all objects with ``owner`` + label use: ``labels.owner:*`` You can also filter nested + fields. For example, you could specify + ``scheduling.automaticRestart = false`` to include instances + only if they are not scheduled for automatic restarts. You + can use filtering on nested fields to filter based on + resource labels. To filter on multiple expressions, provide + each separate expression within parentheses. For example: + ``(scheduling.automaticRestart = true) (cpuPlatform = "Intel Skylake")`` + By default, each expression is an ``AND`` expression. + However, you can include ``AND`` and ``OR`` expressions + explicitly. For example: + ``(cpuPlatform = "Intel Skylake") OR (cpuPlatform = "Intel Broadwell") AND (scheduling.automaticRestart = true)`` + If you want to use a regular expression, use the ``eq`` + (equal) or ``ne`` (not equal) operator against a single + un-parenthesized expression with or without quotes or + against multiple parenthesized expressions. Examples: + ``fieldname eq unquoted literal`` + ``fieldname eq 'single quoted literal'`` + ``fieldname eq "double quoted literal"`` + ``(fieldname1 eq literal) (fieldname2 ne "literal")`` The + literal value is interpreted as a regular expression using + Google RE2 library syntax. The literal value must match the + entire field. For example, to filter for instances that do + not end with name "instance", you would use + ``name ne .*instance``. + + This field is a member of `oneof`_ ``_filter``. + max_results (int): + The maximum number of results per page that should be + returned. If the number of available results is larger than + ``maxResults``, Compute Engine returns a ``nextPageToken`` + that can be used to get the next page of results in + subsequent list requests. Acceptable values are ``0`` to + ``500``, inclusive. (Default: ``500``) + + This field is a member of `oneof`_ ``_max_results``. + order_by (str): + Sorts list results by a certain order. By default, results + are returned in alphanumerical order based on the resource + name. You can also sort results in descending order based on + the creation timestamp using + ``orderBy="creationTimestamp desc"``. This sorts results + based on the ``creationTimestamp`` field in reverse + chronological order (newest result first). Use this to sort + resources like operations so that the newest operation is + returned first. Currently, only sorting by ``name`` or + ``creationTimestamp desc`` is supported. + + This field is a member of `oneof`_ ``_order_by``. + page_token (str): + Specifies a page token to use. Set ``pageToken`` to the + ``nextPageToken`` returned by a previous list request to get + the next page of results. + + This field is a member of `oneof`_ ``_page_token``. + project (str): + Project ID for this request. + return_partial_success (bool): + Opt-in for partial success behavior which + provides partial results in case of failure. The + default value is false. + + This field is a member of `oneof`_ ``_return_partial_success``. + zone (str): + Name of the zone for this request. + """ + + filter: str = proto.Field( + proto.STRING, + number=336120696, + optional=True, + ) + max_results: int = proto.Field( + proto.UINT32, + number=54715419, + optional=True, + ) + order_by: str = proto.Field( + proto.STRING, + number=160562920, + optional=True, + ) + page_token: str = proto.Field( + proto.STRING, + number=19994697, + optional=True, + ) + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + return_partial_success: bool = proto.Field( + proto.BOOL, + number=517198390, + optional=True, + ) + zone: str = proto.Field( + proto.STRING, + number=3744684, + ) + + +class ListResourcePoliciesRequest(proto.Message): + r"""A request message for ResourcePolicies.List. See the method + description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + filter (str): + A filter expression that filters resources listed in the + response. Most Compute resources support two types of filter + expressions: expressions that support regular expressions + and expressions that follow API improvement proposal + AIP-160. If you want to use AIP-160, your expression must + specify the field name, an operator, and the value that you + want to use for filtering. The value must be a string, a + number, or a boolean. The operator must be either ``=``, + ``!=``, ``>``, ``<``, ``<=``, ``>=`` or ``:``. For example, + if you are filtering Compute Engine instances, you can + exclude instances named ``example-instance`` by specifying + ``name != example-instance``. The ``:`` operator can be used + with string fields to match substrings. For non-string + fields it is equivalent to the ``=`` operator. The ``:*`` + comparison can be used to test whether a key has been + defined. For example, to find all objects with ``owner`` + label use: ``labels.owner:*`` You can also filter nested + fields. For example, you could specify + ``scheduling.automaticRestart = false`` to include instances + only if they are not scheduled for automatic restarts. You + can use filtering on nested fields to filter based on + resource labels. To filter on multiple expressions, provide + each separate expression within parentheses. For example: + ``(scheduling.automaticRestart = true) (cpuPlatform = "Intel Skylake")`` + By default, each expression is an ``AND`` expression. + However, you can include ``AND`` and ``OR`` expressions + explicitly. For example: + ``(cpuPlatform = "Intel Skylake") OR (cpuPlatform = "Intel Broadwell") AND (scheduling.automaticRestart = true)`` + If you want to use a regular expression, use the ``eq`` + (equal) or ``ne`` (not equal) operator against a single + un-parenthesized expression with or without quotes or + against multiple parenthesized expressions. Examples: + ``fieldname eq unquoted literal`` + ``fieldname eq 'single quoted literal'`` + ``fieldname eq "double quoted literal"`` + ``(fieldname1 eq literal) (fieldname2 ne "literal")`` The + literal value is interpreted as a regular expression using + Google RE2 library syntax. The literal value must match the + entire field. For example, to filter for instances that do + not end with name "instance", you would use + ``name ne .*instance``. + + This field is a member of `oneof`_ ``_filter``. + max_results (int): + The maximum number of results per page that should be + returned. If the number of available results is larger than + ``maxResults``, Compute Engine returns a ``nextPageToken`` + that can be used to get the next page of results in + subsequent list requests. Acceptable values are ``0`` to + ``500``, inclusive. (Default: ``500``) + + This field is a member of `oneof`_ ``_max_results``. + order_by (str): + Sorts list results by a certain order. By default, results + are returned in alphanumerical order based on the resource + name. You can also sort results in descending order based on + the creation timestamp using + ``orderBy="creationTimestamp desc"``. This sorts results + based on the ``creationTimestamp`` field in reverse + chronological order (newest result first). Use this to sort + resources like operations so that the newest operation is + returned first. Currently, only sorting by ``name`` or + ``creationTimestamp desc`` is supported. + + This field is a member of `oneof`_ ``_order_by``. + page_token (str): + Specifies a page token to use. Set ``pageToken`` to the + ``nextPageToken`` returned by a previous list request to get + the next page of results. + + This field is a member of `oneof`_ ``_page_token``. + project (str): + Project ID for this request. + region (str): + Name of the region for this request. + return_partial_success (bool): + Opt-in for partial success behavior which + provides partial results in case of failure. The + default value is false. + + This field is a member of `oneof`_ ``_return_partial_success``. + """ + + filter: str = proto.Field( + proto.STRING, + number=336120696, + optional=True, + ) + max_results: int = proto.Field( + proto.UINT32, + number=54715419, + optional=True, + ) + order_by: str = proto.Field( + proto.STRING, + number=160562920, + optional=True, + ) + page_token: str = proto.Field( + proto.STRING, + number=19994697, + optional=True, + ) + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + region: str = proto.Field( + proto.STRING, + number=138946292, + ) + return_partial_success: bool = proto.Field( + proto.BOOL, + number=517198390, + optional=True, + ) + + +class ListRoutersRequest(proto.Message): + r"""A request message for Routers.List. See the method + description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + filter (str): + A filter expression that filters resources listed in the + response. Most Compute resources support two types of filter + expressions: expressions that support regular expressions + and expressions that follow API improvement proposal + AIP-160. If you want to use AIP-160, your expression must + specify the field name, an operator, and the value that you + want to use for filtering. The value must be a string, a + number, or a boolean. The operator must be either ``=``, + ``!=``, ``>``, ``<``, ``<=``, ``>=`` or ``:``. For example, + if you are filtering Compute Engine instances, you can + exclude instances named ``example-instance`` by specifying + ``name != example-instance``. The ``:`` operator can be used + with string fields to match substrings. For non-string + fields it is equivalent to the ``=`` operator. The ``:*`` + comparison can be used to test whether a key has been + defined. For example, to find all objects with ``owner`` + label use: ``labels.owner:*`` You can also filter nested + fields. For example, you could specify + ``scheduling.automaticRestart = false`` to include instances + only if they are not scheduled for automatic restarts. You + can use filtering on nested fields to filter based on + resource labels. To filter on multiple expressions, provide + each separate expression within parentheses. For example: + ``(scheduling.automaticRestart = true) (cpuPlatform = "Intel Skylake")`` + By default, each expression is an ``AND`` expression. + However, you can include ``AND`` and ``OR`` expressions + explicitly. For example: + ``(cpuPlatform = "Intel Skylake") OR (cpuPlatform = "Intel Broadwell") AND (scheduling.automaticRestart = true)`` + If you want to use a regular expression, use the ``eq`` + (equal) or ``ne`` (not equal) operator against a single + un-parenthesized expression with or without quotes or + against multiple parenthesized expressions. Examples: + ``fieldname eq unquoted literal`` + ``fieldname eq 'single quoted literal'`` + ``fieldname eq "double quoted literal"`` + ``(fieldname1 eq literal) (fieldname2 ne "literal")`` The + literal value is interpreted as a regular expression using + Google RE2 library syntax. The literal value must match the + entire field. For example, to filter for instances that do + not end with name "instance", you would use + ``name ne .*instance``. + + This field is a member of `oneof`_ ``_filter``. + max_results (int): + The maximum number of results per page that should be + returned. If the number of available results is larger than + ``maxResults``, Compute Engine returns a ``nextPageToken`` + that can be used to get the next page of results in + subsequent list requests. Acceptable values are ``0`` to + ``500``, inclusive. (Default: ``500``) + + This field is a member of `oneof`_ ``_max_results``. + order_by (str): + Sorts list results by a certain order. By default, results + are returned in alphanumerical order based on the resource + name. You can also sort results in descending order based on + the creation timestamp using + ``orderBy="creationTimestamp desc"``. This sorts results + based on the ``creationTimestamp`` field in reverse + chronological order (newest result first). Use this to sort + resources like operations so that the newest operation is + returned first. Currently, only sorting by ``name`` or + ``creationTimestamp desc`` is supported. + + This field is a member of `oneof`_ ``_order_by``. + page_token (str): + Specifies a page token to use. Set ``pageToken`` to the + ``nextPageToken`` returned by a previous list request to get + the next page of results. + + This field is a member of `oneof`_ ``_page_token``. + project (str): + Project ID for this request. + region (str): + Name of the region for this request. + return_partial_success (bool): + Opt-in for partial success behavior which + provides partial results in case of failure. The + default value is false. + + This field is a member of `oneof`_ ``_return_partial_success``. + """ + + filter: str = proto.Field( + proto.STRING, + number=336120696, + optional=True, + ) + max_results: int = proto.Field( + proto.UINT32, + number=54715419, + optional=True, + ) + order_by: str = proto.Field( + proto.STRING, + number=160562920, + optional=True, + ) + page_token: str = proto.Field( + proto.STRING, + number=19994697, + optional=True, + ) + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + region: str = proto.Field( + proto.STRING, + number=138946292, + ) + return_partial_success: bool = proto.Field( + proto.BOOL, + number=517198390, + optional=True, + ) + + +class ListRoutesRequest(proto.Message): + r"""A request message for Routes.List. See the method description + for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + filter (str): + A filter expression that filters resources listed in the + response. Most Compute resources support two types of filter + expressions: expressions that support regular expressions + and expressions that follow API improvement proposal + AIP-160. If you want to use AIP-160, your expression must + specify the field name, an operator, and the value that you + want to use for filtering. The value must be a string, a + number, or a boolean. The operator must be either ``=``, + ``!=``, ``>``, ``<``, ``<=``, ``>=`` or ``:``. For example, + if you are filtering Compute Engine instances, you can + exclude instances named ``example-instance`` by specifying + ``name != example-instance``. The ``:`` operator can be used + with string fields to match substrings. For non-string + fields it is equivalent to the ``=`` operator. The ``:*`` + comparison can be used to test whether a key has been + defined. For example, to find all objects with ``owner`` + label use: ``labels.owner:*`` You can also filter nested + fields. For example, you could specify + ``scheduling.automaticRestart = false`` to include instances + only if they are not scheduled for automatic restarts. You + can use filtering on nested fields to filter based on + resource labels. To filter on multiple expressions, provide + each separate expression within parentheses. For example: + ``(scheduling.automaticRestart = true) (cpuPlatform = "Intel Skylake")`` + By default, each expression is an ``AND`` expression. + However, you can include ``AND`` and ``OR`` expressions + explicitly. For example: + ``(cpuPlatform = "Intel Skylake") OR (cpuPlatform = "Intel Broadwell") AND (scheduling.automaticRestart = true)`` + If you want to use a regular expression, use the ``eq`` + (equal) or ``ne`` (not equal) operator against a single + un-parenthesized expression with or without quotes or + against multiple parenthesized expressions. Examples: + ``fieldname eq unquoted literal`` + ``fieldname eq 'single quoted literal'`` + ``fieldname eq "double quoted literal"`` + ``(fieldname1 eq literal) (fieldname2 ne "literal")`` The + literal value is interpreted as a regular expression using + Google RE2 library syntax. The literal value must match the + entire field. For example, to filter for instances that do + not end with name "instance", you would use + ``name ne .*instance``. + + This field is a member of `oneof`_ ``_filter``. + max_results (int): + The maximum number of results per page that should be + returned. If the number of available results is larger than + ``maxResults``, Compute Engine returns a ``nextPageToken`` + that can be used to get the next page of results in + subsequent list requests. Acceptable values are ``0`` to + ``500``, inclusive. (Default: ``500``) + + This field is a member of `oneof`_ ``_max_results``. + order_by (str): + Sorts list results by a certain order. By default, results + are returned in alphanumerical order based on the resource + name. You can also sort results in descending order based on + the creation timestamp using + ``orderBy="creationTimestamp desc"``. This sorts results + based on the ``creationTimestamp`` field in reverse + chronological order (newest result first). Use this to sort + resources like operations so that the newest operation is + returned first. Currently, only sorting by ``name`` or + ``creationTimestamp desc`` is supported. + + This field is a member of `oneof`_ ``_order_by``. + page_token (str): + Specifies a page token to use. Set ``pageToken`` to the + ``nextPageToken`` returned by a previous list request to get + the next page of results. + + This field is a member of `oneof`_ ``_page_token``. + project (str): + Project ID for this request. + return_partial_success (bool): + Opt-in for partial success behavior which + provides partial results in case of failure. The + default value is false. + + This field is a member of `oneof`_ ``_return_partial_success``. + """ + + filter: str = proto.Field( + proto.STRING, + number=336120696, + optional=True, + ) + max_results: int = proto.Field( + proto.UINT32, + number=54715419, + optional=True, + ) + order_by: str = proto.Field( + proto.STRING, + number=160562920, + optional=True, + ) + page_token: str = proto.Field( + proto.STRING, + number=19994697, + optional=True, + ) + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + return_partial_success: bool = proto.Field( + proto.BOOL, + number=517198390, + optional=True, + ) + + +class ListSecurityPoliciesRequest(proto.Message): + r"""A request message for SecurityPolicies.List. See the method + description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + filter (str): + A filter expression that filters resources listed in the + response. Most Compute resources support two types of filter + expressions: expressions that support regular expressions + and expressions that follow API improvement proposal + AIP-160. If you want to use AIP-160, your expression must + specify the field name, an operator, and the value that you + want to use for filtering. The value must be a string, a + number, or a boolean. The operator must be either ``=``, + ``!=``, ``>``, ``<``, ``<=``, ``>=`` or ``:``. For example, + if you are filtering Compute Engine instances, you can + exclude instances named ``example-instance`` by specifying + ``name != example-instance``. The ``:`` operator can be used + with string fields to match substrings. For non-string + fields it is equivalent to the ``=`` operator. The ``:*`` + comparison can be used to test whether a key has been + defined. For example, to find all objects with ``owner`` + label use: ``labels.owner:*`` You can also filter nested + fields. For example, you could specify + ``scheduling.automaticRestart = false`` to include instances + only if they are not scheduled for automatic restarts. You + can use filtering on nested fields to filter based on + resource labels. To filter on multiple expressions, provide + each separate expression within parentheses. For example: + ``(scheduling.automaticRestart = true) (cpuPlatform = "Intel Skylake")`` + By default, each expression is an ``AND`` expression. + However, you can include ``AND`` and ``OR`` expressions + explicitly. For example: + ``(cpuPlatform = "Intel Skylake") OR (cpuPlatform = "Intel Broadwell") AND (scheduling.automaticRestart = true)`` + If you want to use a regular expression, use the ``eq`` + (equal) or ``ne`` (not equal) operator against a single + un-parenthesized expression with or without quotes or + against multiple parenthesized expressions. Examples: + ``fieldname eq unquoted literal`` + ``fieldname eq 'single quoted literal'`` + ``fieldname eq "double quoted literal"`` + ``(fieldname1 eq literal) (fieldname2 ne "literal")`` The + literal value is interpreted as a regular expression using + Google RE2 library syntax. The literal value must match the + entire field. For example, to filter for instances that do + not end with name "instance", you would use + ``name ne .*instance``. + + This field is a member of `oneof`_ ``_filter``. + max_results (int): + The maximum number of results per page that should be + returned. If the number of available results is larger than + ``maxResults``, Compute Engine returns a ``nextPageToken`` + that can be used to get the next page of results in + subsequent list requests. Acceptable values are ``0`` to + ``500``, inclusive. (Default: ``500``) + + This field is a member of `oneof`_ ``_max_results``. + order_by (str): + Sorts list results by a certain order. By default, results + are returned in alphanumerical order based on the resource + name. You can also sort results in descending order based on + the creation timestamp using + ``orderBy="creationTimestamp desc"``. This sorts results + based on the ``creationTimestamp`` field in reverse + chronological order (newest result first). Use this to sort + resources like operations so that the newest operation is + returned first. Currently, only sorting by ``name`` or + ``creationTimestamp desc`` is supported. + + This field is a member of `oneof`_ ``_order_by``. + page_token (str): + Specifies a page token to use. Set ``pageToken`` to the + ``nextPageToken`` returned by a previous list request to get + the next page of results. + + This field is a member of `oneof`_ ``_page_token``. + project (str): + Project ID for this request. + return_partial_success (bool): + Opt-in for partial success behavior which + provides partial results in case of failure. The + default value is false. + + This field is a member of `oneof`_ ``_return_partial_success``. + """ + + filter: str = proto.Field( + proto.STRING, + number=336120696, + optional=True, + ) + max_results: int = proto.Field( + proto.UINT32, + number=54715419, + optional=True, + ) + order_by: str = proto.Field( + proto.STRING, + number=160562920, + optional=True, + ) + page_token: str = proto.Field( + proto.STRING, + number=19994697, + optional=True, + ) + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + return_partial_success: bool = proto.Field( + proto.BOOL, + number=517198390, + optional=True, + ) + + +class ListServiceAttachmentsRequest(proto.Message): + r"""A request message for ServiceAttachments.List. See the method + description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + filter (str): + A filter expression that filters resources listed in the + response. Most Compute resources support two types of filter + expressions: expressions that support regular expressions + and expressions that follow API improvement proposal + AIP-160. If you want to use AIP-160, your expression must + specify the field name, an operator, and the value that you + want to use for filtering. The value must be a string, a + number, or a boolean. The operator must be either ``=``, + ``!=``, ``>``, ``<``, ``<=``, ``>=`` or ``:``. For example, + if you are filtering Compute Engine instances, you can + exclude instances named ``example-instance`` by specifying + ``name != example-instance``. The ``:`` operator can be used + with string fields to match substrings. For non-string + fields it is equivalent to the ``=`` operator. The ``:*`` + comparison can be used to test whether a key has been + defined. For example, to find all objects with ``owner`` + label use: ``labels.owner:*`` You can also filter nested + fields. For example, you could specify + ``scheduling.automaticRestart = false`` to include instances + only if they are not scheduled for automatic restarts. You + can use filtering on nested fields to filter based on + resource labels. To filter on multiple expressions, provide + each separate expression within parentheses. For example: + ``(scheduling.automaticRestart = true) (cpuPlatform = "Intel Skylake")`` + By default, each expression is an ``AND`` expression. + However, you can include ``AND`` and ``OR`` expressions + explicitly. For example: + ``(cpuPlatform = "Intel Skylake") OR (cpuPlatform = "Intel Broadwell") AND (scheduling.automaticRestart = true)`` + If you want to use a regular expression, use the ``eq`` + (equal) or ``ne`` (not equal) operator against a single + un-parenthesized expression with or without quotes or + against multiple parenthesized expressions. Examples: + ``fieldname eq unquoted literal`` + ``fieldname eq 'single quoted literal'`` + ``fieldname eq "double quoted literal"`` + ``(fieldname1 eq literal) (fieldname2 ne "literal")`` The + literal value is interpreted as a regular expression using + Google RE2 library syntax. The literal value must match the + entire field. For example, to filter for instances that do + not end with name "instance", you would use + ``name ne .*instance``. + + This field is a member of `oneof`_ ``_filter``. + max_results (int): + The maximum number of results per page that should be + returned. If the number of available results is larger than + ``maxResults``, Compute Engine returns a ``nextPageToken`` + that can be used to get the next page of results in + subsequent list requests. Acceptable values are ``0`` to + ``500``, inclusive. (Default: ``500``) + + This field is a member of `oneof`_ ``_max_results``. + order_by (str): + Sorts list results by a certain order. By default, results + are returned in alphanumerical order based on the resource + name. You can also sort results in descending order based on + the creation timestamp using + ``orderBy="creationTimestamp desc"``. This sorts results + based on the ``creationTimestamp`` field in reverse + chronological order (newest result first). Use this to sort + resources like operations so that the newest operation is + returned first. Currently, only sorting by ``name`` or + ``creationTimestamp desc`` is supported. + + This field is a member of `oneof`_ ``_order_by``. + page_token (str): + Specifies a page token to use. Set ``pageToken`` to the + ``nextPageToken`` returned by a previous list request to get + the next page of results. + + This field is a member of `oneof`_ ``_page_token``. + project (str): + Project ID for this request. + region (str): + Name of the region of this request. + return_partial_success (bool): + Opt-in for partial success behavior which + provides partial results in case of failure. The + default value is false. + + This field is a member of `oneof`_ ``_return_partial_success``. + """ + + filter: str = proto.Field( + proto.STRING, + number=336120696, + optional=True, + ) + max_results: int = proto.Field( + proto.UINT32, + number=54715419, + optional=True, + ) + order_by: str = proto.Field( + proto.STRING, + number=160562920, + optional=True, + ) + page_token: str = proto.Field( + proto.STRING, + number=19994697, + optional=True, + ) + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + region: str = proto.Field( + proto.STRING, + number=138946292, + ) + return_partial_success: bool = proto.Field( + proto.BOOL, + number=517198390, + optional=True, + ) + + +class ListSnapshotsRequest(proto.Message): + r"""A request message for Snapshots.List. See the method + description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + filter (str): + A filter expression that filters resources listed in the + response. Most Compute resources support two types of filter + expressions: expressions that support regular expressions + and expressions that follow API improvement proposal + AIP-160. If you want to use AIP-160, your expression must + specify the field name, an operator, and the value that you + want to use for filtering. The value must be a string, a + number, or a boolean. The operator must be either ``=``, + ``!=``, ``>``, ``<``, ``<=``, ``>=`` or ``:``. For example, + if you are filtering Compute Engine instances, you can + exclude instances named ``example-instance`` by specifying + ``name != example-instance``. The ``:`` operator can be used + with string fields to match substrings. For non-string + fields it is equivalent to the ``=`` operator. The ``:*`` + comparison can be used to test whether a key has been + defined. For example, to find all objects with ``owner`` + label use: ``labels.owner:*`` You can also filter nested + fields. For example, you could specify + ``scheduling.automaticRestart = false`` to include instances + only if they are not scheduled for automatic restarts. You + can use filtering on nested fields to filter based on + resource labels. To filter on multiple expressions, provide + each separate expression within parentheses. For example: + ``(scheduling.automaticRestart = true) (cpuPlatform = "Intel Skylake")`` + By default, each expression is an ``AND`` expression. + However, you can include ``AND`` and ``OR`` expressions + explicitly. For example: + ``(cpuPlatform = "Intel Skylake") OR (cpuPlatform = "Intel Broadwell") AND (scheduling.automaticRestart = true)`` + If you want to use a regular expression, use the ``eq`` + (equal) or ``ne`` (not equal) operator against a single + un-parenthesized expression with or without quotes or + against multiple parenthesized expressions. Examples: + ``fieldname eq unquoted literal`` + ``fieldname eq 'single quoted literal'`` + ``fieldname eq "double quoted literal"`` + ``(fieldname1 eq literal) (fieldname2 ne "literal")`` The + literal value is interpreted as a regular expression using + Google RE2 library syntax. The literal value must match the + entire field. For example, to filter for instances that do + not end with name "instance", you would use + ``name ne .*instance``. + + This field is a member of `oneof`_ ``_filter``. + max_results (int): + The maximum number of results per page that should be + returned. If the number of available results is larger than + ``maxResults``, Compute Engine returns a ``nextPageToken`` + that can be used to get the next page of results in + subsequent list requests. Acceptable values are ``0`` to + ``500``, inclusive. (Default: ``500``) + + This field is a member of `oneof`_ ``_max_results``. + order_by (str): + Sorts list results by a certain order. By default, results + are returned in alphanumerical order based on the resource + name. You can also sort results in descending order based on + the creation timestamp using + ``orderBy="creationTimestamp desc"``. This sorts results + based on the ``creationTimestamp`` field in reverse + chronological order (newest result first). Use this to sort + resources like operations so that the newest operation is + returned first. Currently, only sorting by ``name`` or + ``creationTimestamp desc`` is supported. + + This field is a member of `oneof`_ ``_order_by``. + page_token (str): + Specifies a page token to use. Set ``pageToken`` to the + ``nextPageToken`` returned by a previous list request to get + the next page of results. + + This field is a member of `oneof`_ ``_page_token``. + project (str): + Project ID for this request. + return_partial_success (bool): + Opt-in for partial success behavior which + provides partial results in case of failure. The + default value is false. + + This field is a member of `oneof`_ ``_return_partial_success``. + """ + + filter: str = proto.Field( + proto.STRING, + number=336120696, + optional=True, + ) + max_results: int = proto.Field( + proto.UINT32, + number=54715419, + optional=True, + ) + order_by: str = proto.Field( + proto.STRING, + number=160562920, + optional=True, + ) + page_token: str = proto.Field( + proto.STRING, + number=19994697, + optional=True, + ) + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + return_partial_success: bool = proto.Field( + proto.BOOL, + number=517198390, + optional=True, + ) + + +class ListSslCertificatesRequest(proto.Message): + r"""A request message for SslCertificates.List. See the method + description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + filter (str): + A filter expression that filters resources listed in the + response. Most Compute resources support two types of filter + expressions: expressions that support regular expressions + and expressions that follow API improvement proposal + AIP-160. If you want to use AIP-160, your expression must + specify the field name, an operator, and the value that you + want to use for filtering. The value must be a string, a + number, or a boolean. The operator must be either ``=``, + ``!=``, ``>``, ``<``, ``<=``, ``>=`` or ``:``. For example, + if you are filtering Compute Engine instances, you can + exclude instances named ``example-instance`` by specifying + ``name != example-instance``. The ``:`` operator can be used + with string fields to match substrings. For non-string + fields it is equivalent to the ``=`` operator. The ``:*`` + comparison can be used to test whether a key has been + defined. For example, to find all objects with ``owner`` + label use: ``labels.owner:*`` You can also filter nested + fields. For example, you could specify + ``scheduling.automaticRestart = false`` to include instances + only if they are not scheduled for automatic restarts. You + can use filtering on nested fields to filter based on + resource labels. To filter on multiple expressions, provide + each separate expression within parentheses. For example: + ``(scheduling.automaticRestart = true) (cpuPlatform = "Intel Skylake")`` + By default, each expression is an ``AND`` expression. + However, you can include ``AND`` and ``OR`` expressions + explicitly. For example: + ``(cpuPlatform = "Intel Skylake") OR (cpuPlatform = "Intel Broadwell") AND (scheduling.automaticRestart = true)`` + If you want to use a regular expression, use the ``eq`` + (equal) or ``ne`` (not equal) operator against a single + un-parenthesized expression with or without quotes or + against multiple parenthesized expressions. Examples: + ``fieldname eq unquoted literal`` + ``fieldname eq 'single quoted literal'`` + ``fieldname eq "double quoted literal"`` + ``(fieldname1 eq literal) (fieldname2 ne "literal")`` The + literal value is interpreted as a regular expression using + Google RE2 library syntax. The literal value must match the + entire field. For example, to filter for instances that do + not end with name "instance", you would use + ``name ne .*instance``. + + This field is a member of `oneof`_ ``_filter``. + max_results (int): + The maximum number of results per page that should be + returned. If the number of available results is larger than + ``maxResults``, Compute Engine returns a ``nextPageToken`` + that can be used to get the next page of results in + subsequent list requests. Acceptable values are ``0`` to + ``500``, inclusive. (Default: ``500``) + + This field is a member of `oneof`_ ``_max_results``. + order_by (str): + Sorts list results by a certain order. By default, results + are returned in alphanumerical order based on the resource + name. You can also sort results in descending order based on + the creation timestamp using + ``orderBy="creationTimestamp desc"``. This sorts results + based on the ``creationTimestamp`` field in reverse + chronological order (newest result first). Use this to sort + resources like operations so that the newest operation is + returned first. Currently, only sorting by ``name`` or + ``creationTimestamp desc`` is supported. + + This field is a member of `oneof`_ ``_order_by``. + page_token (str): + Specifies a page token to use. Set ``pageToken`` to the + ``nextPageToken`` returned by a previous list request to get + the next page of results. + + This field is a member of `oneof`_ ``_page_token``. + project (str): + Project ID for this request. + return_partial_success (bool): + Opt-in for partial success behavior which + provides partial results in case of failure. The + default value is false. + + This field is a member of `oneof`_ ``_return_partial_success``. + """ + + filter: str = proto.Field( + proto.STRING, + number=336120696, + optional=True, + ) + max_results: int = proto.Field( + proto.UINT32, + number=54715419, + optional=True, + ) + order_by: str = proto.Field( + proto.STRING, + number=160562920, + optional=True, + ) + page_token: str = proto.Field( + proto.STRING, + number=19994697, + optional=True, + ) + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + return_partial_success: bool = proto.Field( + proto.BOOL, + number=517198390, + optional=True, + ) + + +class ListSslPoliciesRequest(proto.Message): + r"""A request message for SslPolicies.List. See the method + description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + filter (str): + A filter expression that filters resources listed in the + response. Most Compute resources support two types of filter + expressions: expressions that support regular expressions + and expressions that follow API improvement proposal + AIP-160. If you want to use AIP-160, your expression must + specify the field name, an operator, and the value that you + want to use for filtering. The value must be a string, a + number, or a boolean. The operator must be either ``=``, + ``!=``, ``>``, ``<``, ``<=``, ``>=`` or ``:``. For example, + if you are filtering Compute Engine instances, you can + exclude instances named ``example-instance`` by specifying + ``name != example-instance``. The ``:`` operator can be used + with string fields to match substrings. For non-string + fields it is equivalent to the ``=`` operator. The ``:*`` + comparison can be used to test whether a key has been + defined. For example, to find all objects with ``owner`` + label use: ``labels.owner:*`` You can also filter nested + fields. For example, you could specify + ``scheduling.automaticRestart = false`` to include instances + only if they are not scheduled for automatic restarts. You + can use filtering on nested fields to filter based on + resource labels. To filter on multiple expressions, provide + each separate expression within parentheses. For example: + ``(scheduling.automaticRestart = true) (cpuPlatform = "Intel Skylake")`` + By default, each expression is an ``AND`` expression. + However, you can include ``AND`` and ``OR`` expressions + explicitly. For example: + ``(cpuPlatform = "Intel Skylake") OR (cpuPlatform = "Intel Broadwell") AND (scheduling.automaticRestart = true)`` + If you want to use a regular expression, use the ``eq`` + (equal) or ``ne`` (not equal) operator against a single + un-parenthesized expression with or without quotes or + against multiple parenthesized expressions. Examples: + ``fieldname eq unquoted literal`` + ``fieldname eq 'single quoted literal'`` + ``fieldname eq "double quoted literal"`` + ``(fieldname1 eq literal) (fieldname2 ne "literal")`` The + literal value is interpreted as a regular expression using + Google RE2 library syntax. The literal value must match the + entire field. For example, to filter for instances that do + not end with name "instance", you would use + ``name ne .*instance``. + + This field is a member of `oneof`_ ``_filter``. + max_results (int): + The maximum number of results per page that should be + returned. If the number of available results is larger than + ``maxResults``, Compute Engine returns a ``nextPageToken`` + that can be used to get the next page of results in + subsequent list requests. Acceptable values are ``0`` to + ``500``, inclusive. (Default: ``500``) + + This field is a member of `oneof`_ ``_max_results``. + order_by (str): + Sorts list results by a certain order. By default, results + are returned in alphanumerical order based on the resource + name. You can also sort results in descending order based on + the creation timestamp using + ``orderBy="creationTimestamp desc"``. This sorts results + based on the ``creationTimestamp`` field in reverse + chronological order (newest result first). Use this to sort + resources like operations so that the newest operation is + returned first. Currently, only sorting by ``name`` or + ``creationTimestamp desc`` is supported. + + This field is a member of `oneof`_ ``_order_by``. + page_token (str): + Specifies a page token to use. Set ``pageToken`` to the + ``nextPageToken`` returned by a previous list request to get + the next page of results. + + This field is a member of `oneof`_ ``_page_token``. + project (str): + Project ID for this request. + return_partial_success (bool): + Opt-in for partial success behavior which + provides partial results in case of failure. The + default value is false. + + This field is a member of `oneof`_ ``_return_partial_success``. + """ + + filter: str = proto.Field( + proto.STRING, + number=336120696, + optional=True, + ) + max_results: int = proto.Field( + proto.UINT32, + number=54715419, + optional=True, + ) + order_by: str = proto.Field( + proto.STRING, + number=160562920, + optional=True, + ) + page_token: str = proto.Field( + proto.STRING, + number=19994697, + optional=True, + ) + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + return_partial_success: bool = proto.Field( + proto.BOOL, + number=517198390, + optional=True, + ) + + +class ListSubnetworksRequest(proto.Message): + r"""A request message for Subnetworks.List. See the method + description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + filter (str): + A filter expression that filters resources listed in the + response. Most Compute resources support two types of filter + expressions: expressions that support regular expressions + and expressions that follow API improvement proposal + AIP-160. If you want to use AIP-160, your expression must + specify the field name, an operator, and the value that you + want to use for filtering. The value must be a string, a + number, or a boolean. The operator must be either ``=``, + ``!=``, ``>``, ``<``, ``<=``, ``>=`` or ``:``. For example, + if you are filtering Compute Engine instances, you can + exclude instances named ``example-instance`` by specifying + ``name != example-instance``. The ``:`` operator can be used + with string fields to match substrings. For non-string + fields it is equivalent to the ``=`` operator. The ``:*`` + comparison can be used to test whether a key has been + defined. For example, to find all objects with ``owner`` + label use: ``labels.owner:*`` You can also filter nested + fields. For example, you could specify + ``scheduling.automaticRestart = false`` to include instances + only if they are not scheduled for automatic restarts. You + can use filtering on nested fields to filter based on + resource labels. To filter on multiple expressions, provide + each separate expression within parentheses. For example: + ``(scheduling.automaticRestart = true) (cpuPlatform = "Intel Skylake")`` + By default, each expression is an ``AND`` expression. + However, you can include ``AND`` and ``OR`` expressions + explicitly. For example: + ``(cpuPlatform = "Intel Skylake") OR (cpuPlatform = "Intel Broadwell") AND (scheduling.automaticRestart = true)`` + If you want to use a regular expression, use the ``eq`` + (equal) or ``ne`` (not equal) operator against a single + un-parenthesized expression with or without quotes or + against multiple parenthesized expressions. Examples: + ``fieldname eq unquoted literal`` + ``fieldname eq 'single quoted literal'`` + ``fieldname eq "double quoted literal"`` + ``(fieldname1 eq literal) (fieldname2 ne "literal")`` The + literal value is interpreted as a regular expression using + Google RE2 library syntax. The literal value must match the + entire field. For example, to filter for instances that do + not end with name "instance", you would use + ``name ne .*instance``. + + This field is a member of `oneof`_ ``_filter``. + max_results (int): + The maximum number of results per page that should be + returned. If the number of available results is larger than + ``maxResults``, Compute Engine returns a ``nextPageToken`` + that can be used to get the next page of results in + subsequent list requests. Acceptable values are ``0`` to + ``500``, inclusive. (Default: ``500``) + + This field is a member of `oneof`_ ``_max_results``. + order_by (str): + Sorts list results by a certain order. By default, results + are returned in alphanumerical order based on the resource + name. You can also sort results in descending order based on + the creation timestamp using + ``orderBy="creationTimestamp desc"``. This sorts results + based on the ``creationTimestamp`` field in reverse + chronological order (newest result first). Use this to sort + resources like operations so that the newest operation is + returned first. Currently, only sorting by ``name`` or + ``creationTimestamp desc`` is supported. + + This field is a member of `oneof`_ ``_order_by``. + page_token (str): + Specifies a page token to use. Set ``pageToken`` to the + ``nextPageToken`` returned by a previous list request to get + the next page of results. + + This field is a member of `oneof`_ ``_page_token``. + project (str): + Project ID for this request. + region (str): + Name of the region scoping this request. + return_partial_success (bool): + Opt-in for partial success behavior which + provides partial results in case of failure. The + default value is false. + + This field is a member of `oneof`_ ``_return_partial_success``. + """ + + filter: str = proto.Field( + proto.STRING, + number=336120696, + optional=True, + ) + max_results: int = proto.Field( + proto.UINT32, + number=54715419, + optional=True, + ) + order_by: str = proto.Field( + proto.STRING, + number=160562920, + optional=True, + ) + page_token: str = proto.Field( + proto.STRING, + number=19994697, + optional=True, + ) + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + region: str = proto.Field( + proto.STRING, + number=138946292, + ) + return_partial_success: bool = proto.Field( + proto.BOOL, + number=517198390, + optional=True, + ) + + +class ListTargetGrpcProxiesRequest(proto.Message): + r"""A request message for TargetGrpcProxies.List. See the method + description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + filter (str): + A filter expression that filters resources listed in the + response. Most Compute resources support two types of filter + expressions: expressions that support regular expressions + and expressions that follow API improvement proposal + AIP-160. If you want to use AIP-160, your expression must + specify the field name, an operator, and the value that you + want to use for filtering. The value must be a string, a + number, or a boolean. The operator must be either ``=``, + ``!=``, ``>``, ``<``, ``<=``, ``>=`` or ``:``. For example, + if you are filtering Compute Engine instances, you can + exclude instances named ``example-instance`` by specifying + ``name != example-instance``. The ``:`` operator can be used + with string fields to match substrings. For non-string + fields it is equivalent to the ``=`` operator. The ``:*`` + comparison can be used to test whether a key has been + defined. For example, to find all objects with ``owner`` + label use: ``labels.owner:*`` You can also filter nested + fields. For example, you could specify + ``scheduling.automaticRestart = false`` to include instances + only if they are not scheduled for automatic restarts. You + can use filtering on nested fields to filter based on + resource labels. To filter on multiple expressions, provide + each separate expression within parentheses. For example: + ``(scheduling.automaticRestart = true) (cpuPlatform = "Intel Skylake")`` + By default, each expression is an ``AND`` expression. + However, you can include ``AND`` and ``OR`` expressions + explicitly. For example: + ``(cpuPlatform = "Intel Skylake") OR (cpuPlatform = "Intel Broadwell") AND (scheduling.automaticRestart = true)`` + If you want to use a regular expression, use the ``eq`` + (equal) or ``ne`` (not equal) operator against a single + un-parenthesized expression with or without quotes or + against multiple parenthesized expressions. Examples: + ``fieldname eq unquoted literal`` + ``fieldname eq 'single quoted literal'`` + ``fieldname eq "double quoted literal"`` + ``(fieldname1 eq literal) (fieldname2 ne "literal")`` The + literal value is interpreted as a regular expression using + Google RE2 library syntax. The literal value must match the + entire field. For example, to filter for instances that do + not end with name "instance", you would use + ``name ne .*instance``. + + This field is a member of `oneof`_ ``_filter``. + max_results (int): + The maximum number of results per page that should be + returned. If the number of available results is larger than + ``maxResults``, Compute Engine returns a ``nextPageToken`` + that can be used to get the next page of results in + subsequent list requests. Acceptable values are ``0`` to + ``500``, inclusive. (Default: ``500``) + + This field is a member of `oneof`_ ``_max_results``. + order_by (str): + Sorts list results by a certain order. By default, results + are returned in alphanumerical order based on the resource + name. You can also sort results in descending order based on + the creation timestamp using + ``orderBy="creationTimestamp desc"``. This sorts results + based on the ``creationTimestamp`` field in reverse + chronological order (newest result first). Use this to sort + resources like operations so that the newest operation is + returned first. Currently, only sorting by ``name`` or + ``creationTimestamp desc`` is supported. + + This field is a member of `oneof`_ ``_order_by``. + page_token (str): + Specifies a page token to use. Set ``pageToken`` to the + ``nextPageToken`` returned by a previous list request to get + the next page of results. + + This field is a member of `oneof`_ ``_page_token``. + project (str): + Project ID for this request. + return_partial_success (bool): + Opt-in for partial success behavior which + provides partial results in case of failure. The + default value is false. + + This field is a member of `oneof`_ ``_return_partial_success``. + """ + + filter: str = proto.Field( + proto.STRING, + number=336120696, + optional=True, + ) + max_results: int = proto.Field( + proto.UINT32, + number=54715419, + optional=True, + ) + order_by: str = proto.Field( + proto.STRING, + number=160562920, + optional=True, + ) + page_token: str = proto.Field( + proto.STRING, + number=19994697, + optional=True, + ) + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + return_partial_success: bool = proto.Field( + proto.BOOL, + number=517198390, + optional=True, + ) + + +class ListTargetHttpProxiesRequest(proto.Message): + r"""A request message for TargetHttpProxies.List. See the method + description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + filter (str): + A filter expression that filters resources listed in the + response. Most Compute resources support two types of filter + expressions: expressions that support regular expressions + and expressions that follow API improvement proposal + AIP-160. If you want to use AIP-160, your expression must + specify the field name, an operator, and the value that you + want to use for filtering. The value must be a string, a + number, or a boolean. The operator must be either ``=``, + ``!=``, ``>``, ``<``, ``<=``, ``>=`` or ``:``. For example, + if you are filtering Compute Engine instances, you can + exclude instances named ``example-instance`` by specifying + ``name != example-instance``. The ``:`` operator can be used + with string fields to match substrings. For non-string + fields it is equivalent to the ``=`` operator. The ``:*`` + comparison can be used to test whether a key has been + defined. For example, to find all objects with ``owner`` + label use: ``labels.owner:*`` You can also filter nested + fields. For example, you could specify + ``scheduling.automaticRestart = false`` to include instances + only if they are not scheduled for automatic restarts. You + can use filtering on nested fields to filter based on + resource labels. To filter on multiple expressions, provide + each separate expression within parentheses. For example: + ``(scheduling.automaticRestart = true) (cpuPlatform = "Intel Skylake")`` + By default, each expression is an ``AND`` expression. + However, you can include ``AND`` and ``OR`` expressions + explicitly. For example: + ``(cpuPlatform = "Intel Skylake") OR (cpuPlatform = "Intel Broadwell") AND (scheduling.automaticRestart = true)`` + If you want to use a regular expression, use the ``eq`` + (equal) or ``ne`` (not equal) operator against a single + un-parenthesized expression with or without quotes or + against multiple parenthesized expressions. Examples: + ``fieldname eq unquoted literal`` + ``fieldname eq 'single quoted literal'`` + ``fieldname eq "double quoted literal"`` + ``(fieldname1 eq literal) (fieldname2 ne "literal")`` The + literal value is interpreted as a regular expression using + Google RE2 library syntax. The literal value must match the + entire field. For example, to filter for instances that do + not end with name "instance", you would use + ``name ne .*instance``. + + This field is a member of `oneof`_ ``_filter``. + max_results (int): + The maximum number of results per page that should be + returned. If the number of available results is larger than + ``maxResults``, Compute Engine returns a ``nextPageToken`` + that can be used to get the next page of results in + subsequent list requests. Acceptable values are ``0`` to + ``500``, inclusive. (Default: ``500``) + + This field is a member of `oneof`_ ``_max_results``. + order_by (str): + Sorts list results by a certain order. By default, results + are returned in alphanumerical order based on the resource + name. You can also sort results in descending order based on + the creation timestamp using + ``orderBy="creationTimestamp desc"``. This sorts results + based on the ``creationTimestamp`` field in reverse + chronological order (newest result first). Use this to sort + resources like operations so that the newest operation is + returned first. Currently, only sorting by ``name`` or + ``creationTimestamp desc`` is supported. + + This field is a member of `oneof`_ ``_order_by``. + page_token (str): + Specifies a page token to use. Set ``pageToken`` to the + ``nextPageToken`` returned by a previous list request to get + the next page of results. + + This field is a member of `oneof`_ ``_page_token``. + project (str): + Project ID for this request. + return_partial_success (bool): + Opt-in for partial success behavior which + provides partial results in case of failure. The + default value is false. + + This field is a member of `oneof`_ ``_return_partial_success``. + """ + + filter: str = proto.Field( + proto.STRING, + number=336120696, + optional=True, + ) + max_results: int = proto.Field( + proto.UINT32, + number=54715419, + optional=True, + ) + order_by: str = proto.Field( + proto.STRING, + number=160562920, + optional=True, + ) + page_token: str = proto.Field( + proto.STRING, + number=19994697, + optional=True, + ) + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + return_partial_success: bool = proto.Field( + proto.BOOL, + number=517198390, + optional=True, + ) + + +class ListTargetHttpsProxiesRequest(proto.Message): + r"""A request message for TargetHttpsProxies.List. See the method + description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + filter (str): + A filter expression that filters resources listed in the + response. Most Compute resources support two types of filter + expressions: expressions that support regular expressions + and expressions that follow API improvement proposal + AIP-160. If you want to use AIP-160, your expression must + specify the field name, an operator, and the value that you + want to use for filtering. The value must be a string, a + number, or a boolean. The operator must be either ``=``, + ``!=``, ``>``, ``<``, ``<=``, ``>=`` or ``:``. For example, + if you are filtering Compute Engine instances, you can + exclude instances named ``example-instance`` by specifying + ``name != example-instance``. The ``:`` operator can be used + with string fields to match substrings. For non-string + fields it is equivalent to the ``=`` operator. The ``:*`` + comparison can be used to test whether a key has been + defined. For example, to find all objects with ``owner`` + label use: ``labels.owner:*`` You can also filter nested + fields. For example, you could specify + ``scheduling.automaticRestart = false`` to include instances + only if they are not scheduled for automatic restarts. You + can use filtering on nested fields to filter based on + resource labels. To filter on multiple expressions, provide + each separate expression within parentheses. For example: + ``(scheduling.automaticRestart = true) (cpuPlatform = "Intel Skylake")`` + By default, each expression is an ``AND`` expression. + However, you can include ``AND`` and ``OR`` expressions + explicitly. For example: + ``(cpuPlatform = "Intel Skylake") OR (cpuPlatform = "Intel Broadwell") AND (scheduling.automaticRestart = true)`` + If you want to use a regular expression, use the ``eq`` + (equal) or ``ne`` (not equal) operator against a single + un-parenthesized expression with or without quotes or + against multiple parenthesized expressions. Examples: + ``fieldname eq unquoted literal`` + ``fieldname eq 'single quoted literal'`` + ``fieldname eq "double quoted literal"`` + ``(fieldname1 eq literal) (fieldname2 ne "literal")`` The + literal value is interpreted as a regular expression using + Google RE2 library syntax. The literal value must match the + entire field. For example, to filter for instances that do + not end with name "instance", you would use + ``name ne .*instance``. + + This field is a member of `oneof`_ ``_filter``. + max_results (int): + The maximum number of results per page that should be + returned. If the number of available results is larger than + ``maxResults``, Compute Engine returns a ``nextPageToken`` + that can be used to get the next page of results in + subsequent list requests. Acceptable values are ``0`` to + ``500``, inclusive. (Default: ``500``) + + This field is a member of `oneof`_ ``_max_results``. + order_by (str): + Sorts list results by a certain order. By default, results + are returned in alphanumerical order based on the resource + name. You can also sort results in descending order based on + the creation timestamp using + ``orderBy="creationTimestamp desc"``. This sorts results + based on the ``creationTimestamp`` field in reverse + chronological order (newest result first). Use this to sort + resources like operations so that the newest operation is + returned first. Currently, only sorting by ``name`` or + ``creationTimestamp desc`` is supported. + + This field is a member of `oneof`_ ``_order_by``. + page_token (str): + Specifies a page token to use. Set ``pageToken`` to the + ``nextPageToken`` returned by a previous list request to get + the next page of results. + + This field is a member of `oneof`_ ``_page_token``. + project (str): + Project ID for this request. + return_partial_success (bool): + Opt-in for partial success behavior which + provides partial results in case of failure. The + default value is false. + + This field is a member of `oneof`_ ``_return_partial_success``. + """ + + filter: str = proto.Field( + proto.STRING, + number=336120696, + optional=True, + ) + max_results: int = proto.Field( + proto.UINT32, + number=54715419, + optional=True, + ) + order_by: str = proto.Field( + proto.STRING, + number=160562920, + optional=True, + ) + page_token: str = proto.Field( + proto.STRING, + number=19994697, + optional=True, + ) + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + return_partial_success: bool = proto.Field( + proto.BOOL, + number=517198390, + optional=True, + ) + + +class ListTargetInstancesRequest(proto.Message): + r"""A request message for TargetInstances.List. See the method + description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + filter (str): + A filter expression that filters resources listed in the + response. Most Compute resources support two types of filter + expressions: expressions that support regular expressions + and expressions that follow API improvement proposal + AIP-160. If you want to use AIP-160, your expression must + specify the field name, an operator, and the value that you + want to use for filtering. The value must be a string, a + number, or a boolean. The operator must be either ``=``, + ``!=``, ``>``, ``<``, ``<=``, ``>=`` or ``:``. For example, + if you are filtering Compute Engine instances, you can + exclude instances named ``example-instance`` by specifying + ``name != example-instance``. The ``:`` operator can be used + with string fields to match substrings. For non-string + fields it is equivalent to the ``=`` operator. The ``:*`` + comparison can be used to test whether a key has been + defined. For example, to find all objects with ``owner`` + label use: ``labels.owner:*`` You can also filter nested + fields. For example, you could specify + ``scheduling.automaticRestart = false`` to include instances + only if they are not scheduled for automatic restarts. You + can use filtering on nested fields to filter based on + resource labels. To filter on multiple expressions, provide + each separate expression within parentheses. For example: + ``(scheduling.automaticRestart = true) (cpuPlatform = "Intel Skylake")`` + By default, each expression is an ``AND`` expression. + However, you can include ``AND`` and ``OR`` expressions + explicitly. For example: + ``(cpuPlatform = "Intel Skylake") OR (cpuPlatform = "Intel Broadwell") AND (scheduling.automaticRestart = true)`` + If you want to use a regular expression, use the ``eq`` + (equal) or ``ne`` (not equal) operator against a single + un-parenthesized expression with or without quotes or + against multiple parenthesized expressions. Examples: + ``fieldname eq unquoted literal`` + ``fieldname eq 'single quoted literal'`` + ``fieldname eq "double quoted literal"`` + ``(fieldname1 eq literal) (fieldname2 ne "literal")`` The + literal value is interpreted as a regular expression using + Google RE2 library syntax. The literal value must match the + entire field. For example, to filter for instances that do + not end with name "instance", you would use + ``name ne .*instance``. + + This field is a member of `oneof`_ ``_filter``. + max_results (int): + The maximum number of results per page that should be + returned. If the number of available results is larger than + ``maxResults``, Compute Engine returns a ``nextPageToken`` + that can be used to get the next page of results in + subsequent list requests. Acceptable values are ``0`` to + ``500``, inclusive. (Default: ``500``) + + This field is a member of `oneof`_ ``_max_results``. + order_by (str): + Sorts list results by a certain order. By default, results + are returned in alphanumerical order based on the resource + name. You can also sort results in descending order based on + the creation timestamp using + ``orderBy="creationTimestamp desc"``. This sorts results + based on the ``creationTimestamp`` field in reverse + chronological order (newest result first). Use this to sort + resources like operations so that the newest operation is + returned first. Currently, only sorting by ``name`` or + ``creationTimestamp desc`` is supported. + + This field is a member of `oneof`_ ``_order_by``. + page_token (str): + Specifies a page token to use. Set ``pageToken`` to the + ``nextPageToken`` returned by a previous list request to get + the next page of results. + + This field is a member of `oneof`_ ``_page_token``. + project (str): + Project ID for this request. + return_partial_success (bool): + Opt-in for partial success behavior which + provides partial results in case of failure. The + default value is false. + + This field is a member of `oneof`_ ``_return_partial_success``. + zone (str): + Name of the zone scoping this request. + """ + + filter: str = proto.Field( + proto.STRING, + number=336120696, + optional=True, + ) + max_results: int = proto.Field( + proto.UINT32, + number=54715419, + optional=True, + ) + order_by: str = proto.Field( + proto.STRING, + number=160562920, + optional=True, + ) + page_token: str = proto.Field( + proto.STRING, + number=19994697, + optional=True, + ) + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + return_partial_success: bool = proto.Field( + proto.BOOL, + number=517198390, + optional=True, + ) + zone: str = proto.Field( + proto.STRING, + number=3744684, + ) + + +class ListTargetPoolsRequest(proto.Message): + r"""A request message for TargetPools.List. See the method + description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + filter (str): + A filter expression that filters resources listed in the + response. Most Compute resources support two types of filter + expressions: expressions that support regular expressions + and expressions that follow API improvement proposal + AIP-160. If you want to use AIP-160, your expression must + specify the field name, an operator, and the value that you + want to use for filtering. The value must be a string, a + number, or a boolean. The operator must be either ``=``, + ``!=``, ``>``, ``<``, ``<=``, ``>=`` or ``:``. For example, + if you are filtering Compute Engine instances, you can + exclude instances named ``example-instance`` by specifying + ``name != example-instance``. The ``:`` operator can be used + with string fields to match substrings. For non-string + fields it is equivalent to the ``=`` operator. The ``:*`` + comparison can be used to test whether a key has been + defined. For example, to find all objects with ``owner`` + label use: ``labels.owner:*`` You can also filter nested + fields. For example, you could specify + ``scheduling.automaticRestart = false`` to include instances + only if they are not scheduled for automatic restarts. You + can use filtering on nested fields to filter based on + resource labels. To filter on multiple expressions, provide + each separate expression within parentheses. For example: + ``(scheduling.automaticRestart = true) (cpuPlatform = "Intel Skylake")`` + By default, each expression is an ``AND`` expression. + However, you can include ``AND`` and ``OR`` expressions + explicitly. For example: + ``(cpuPlatform = "Intel Skylake") OR (cpuPlatform = "Intel Broadwell") AND (scheduling.automaticRestart = true)`` + If you want to use a regular expression, use the ``eq`` + (equal) or ``ne`` (not equal) operator against a single + un-parenthesized expression with or without quotes or + against multiple parenthesized expressions. Examples: + ``fieldname eq unquoted literal`` + ``fieldname eq 'single quoted literal'`` + ``fieldname eq "double quoted literal"`` + ``(fieldname1 eq literal) (fieldname2 ne "literal")`` The + literal value is interpreted as a regular expression using + Google RE2 library syntax. The literal value must match the + entire field. For example, to filter for instances that do + not end with name "instance", you would use + ``name ne .*instance``. + + This field is a member of `oneof`_ ``_filter``. + max_results (int): + The maximum number of results per page that should be + returned. If the number of available results is larger than + ``maxResults``, Compute Engine returns a ``nextPageToken`` + that can be used to get the next page of results in + subsequent list requests. Acceptable values are ``0`` to + ``500``, inclusive. (Default: ``500``) + + This field is a member of `oneof`_ ``_max_results``. + order_by (str): + Sorts list results by a certain order. By default, results + are returned in alphanumerical order based on the resource + name. You can also sort results in descending order based on + the creation timestamp using + ``orderBy="creationTimestamp desc"``. This sorts results + based on the ``creationTimestamp`` field in reverse + chronological order (newest result first). Use this to sort + resources like operations so that the newest operation is + returned first. Currently, only sorting by ``name`` or + ``creationTimestamp desc`` is supported. + + This field is a member of `oneof`_ ``_order_by``. + page_token (str): + Specifies a page token to use. Set ``pageToken`` to the + ``nextPageToken`` returned by a previous list request to get + the next page of results. + + This field is a member of `oneof`_ ``_page_token``. + project (str): + Project ID for this request. + region (str): + Name of the region scoping this request. + return_partial_success (bool): + Opt-in for partial success behavior which + provides partial results in case of failure. The + default value is false. + + This field is a member of `oneof`_ ``_return_partial_success``. + """ + + filter: str = proto.Field( + proto.STRING, + number=336120696, + optional=True, + ) + max_results: int = proto.Field( + proto.UINT32, + number=54715419, + optional=True, + ) + order_by: str = proto.Field( + proto.STRING, + number=160562920, + optional=True, + ) + page_token: str = proto.Field( + proto.STRING, + number=19994697, + optional=True, + ) + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + region: str = proto.Field( + proto.STRING, + number=138946292, + ) + return_partial_success: bool = proto.Field( + proto.BOOL, + number=517198390, + optional=True, + ) + + +class ListTargetSslProxiesRequest(proto.Message): + r"""A request message for TargetSslProxies.List. See the method + description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + filter (str): + A filter expression that filters resources listed in the + response. Most Compute resources support two types of filter + expressions: expressions that support regular expressions + and expressions that follow API improvement proposal + AIP-160. If you want to use AIP-160, your expression must + specify the field name, an operator, and the value that you + want to use for filtering. The value must be a string, a + number, or a boolean. The operator must be either ``=``, + ``!=``, ``>``, ``<``, ``<=``, ``>=`` or ``:``. For example, + if you are filtering Compute Engine instances, you can + exclude instances named ``example-instance`` by specifying + ``name != example-instance``. The ``:`` operator can be used + with string fields to match substrings. For non-string + fields it is equivalent to the ``=`` operator. The ``:*`` + comparison can be used to test whether a key has been + defined. For example, to find all objects with ``owner`` + label use: ``labels.owner:*`` You can also filter nested + fields. For example, you could specify + ``scheduling.automaticRestart = false`` to include instances + only if they are not scheduled for automatic restarts. You + can use filtering on nested fields to filter based on + resource labels. To filter on multiple expressions, provide + each separate expression within parentheses. For example: + ``(scheduling.automaticRestart = true) (cpuPlatform = "Intel Skylake")`` + By default, each expression is an ``AND`` expression. + However, you can include ``AND`` and ``OR`` expressions + explicitly. For example: + ``(cpuPlatform = "Intel Skylake") OR (cpuPlatform = "Intel Broadwell") AND (scheduling.automaticRestart = true)`` + If you want to use a regular expression, use the ``eq`` + (equal) or ``ne`` (not equal) operator against a single + un-parenthesized expression with or without quotes or + against multiple parenthesized expressions. Examples: + ``fieldname eq unquoted literal`` + ``fieldname eq 'single quoted literal'`` + ``fieldname eq "double quoted literal"`` + ``(fieldname1 eq literal) (fieldname2 ne "literal")`` The + literal value is interpreted as a regular expression using + Google RE2 library syntax. The literal value must match the + entire field. For example, to filter for instances that do + not end with name "instance", you would use + ``name ne .*instance``. + + This field is a member of `oneof`_ ``_filter``. + max_results (int): + The maximum number of results per page that should be + returned. If the number of available results is larger than + ``maxResults``, Compute Engine returns a ``nextPageToken`` + that can be used to get the next page of results in + subsequent list requests. Acceptable values are ``0`` to + ``500``, inclusive. (Default: ``500``) + + This field is a member of `oneof`_ ``_max_results``. + order_by (str): + Sorts list results by a certain order. By default, results + are returned in alphanumerical order based on the resource + name. You can also sort results in descending order based on + the creation timestamp using + ``orderBy="creationTimestamp desc"``. This sorts results + based on the ``creationTimestamp`` field in reverse + chronological order (newest result first). Use this to sort + resources like operations so that the newest operation is + returned first. Currently, only sorting by ``name`` or + ``creationTimestamp desc`` is supported. + + This field is a member of `oneof`_ ``_order_by``. + page_token (str): + Specifies a page token to use. Set ``pageToken`` to the + ``nextPageToken`` returned by a previous list request to get + the next page of results. + + This field is a member of `oneof`_ ``_page_token``. + project (str): + Project ID for this request. + return_partial_success (bool): + Opt-in for partial success behavior which + provides partial results in case of failure. The + default value is false. + + This field is a member of `oneof`_ ``_return_partial_success``. + """ + + filter: str = proto.Field( + proto.STRING, + number=336120696, + optional=True, + ) + max_results: int = proto.Field( + proto.UINT32, + number=54715419, + optional=True, + ) + order_by: str = proto.Field( + proto.STRING, + number=160562920, + optional=True, + ) + page_token: str = proto.Field( + proto.STRING, + number=19994697, + optional=True, + ) + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + return_partial_success: bool = proto.Field( + proto.BOOL, + number=517198390, + optional=True, + ) + + +class ListTargetTcpProxiesRequest(proto.Message): + r"""A request message for TargetTcpProxies.List. See the method + description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + filter (str): + A filter expression that filters resources listed in the + response. Most Compute resources support two types of filter + expressions: expressions that support regular expressions + and expressions that follow API improvement proposal + AIP-160. If you want to use AIP-160, your expression must + specify the field name, an operator, and the value that you + want to use for filtering. The value must be a string, a + number, or a boolean. The operator must be either ``=``, + ``!=``, ``>``, ``<``, ``<=``, ``>=`` or ``:``. For example, + if you are filtering Compute Engine instances, you can + exclude instances named ``example-instance`` by specifying + ``name != example-instance``. The ``:`` operator can be used + with string fields to match substrings. For non-string + fields it is equivalent to the ``=`` operator. The ``:*`` + comparison can be used to test whether a key has been + defined. For example, to find all objects with ``owner`` + label use: ``labels.owner:*`` You can also filter nested + fields. For example, you could specify + ``scheduling.automaticRestart = false`` to include instances + only if they are not scheduled for automatic restarts. You + can use filtering on nested fields to filter based on + resource labels. To filter on multiple expressions, provide + each separate expression within parentheses. For example: + ``(scheduling.automaticRestart = true) (cpuPlatform = "Intel Skylake")`` + By default, each expression is an ``AND`` expression. + However, you can include ``AND`` and ``OR`` expressions + explicitly. For example: + ``(cpuPlatform = "Intel Skylake") OR (cpuPlatform = "Intel Broadwell") AND (scheduling.automaticRestart = true)`` + If you want to use a regular expression, use the ``eq`` + (equal) or ``ne`` (not equal) operator against a single + un-parenthesized expression with or without quotes or + against multiple parenthesized expressions. Examples: + ``fieldname eq unquoted literal`` + ``fieldname eq 'single quoted literal'`` + ``fieldname eq "double quoted literal"`` + ``(fieldname1 eq literal) (fieldname2 ne "literal")`` The + literal value is interpreted as a regular expression using + Google RE2 library syntax. The literal value must match the + entire field. For example, to filter for instances that do + not end with name "instance", you would use + ``name ne .*instance``. + + This field is a member of `oneof`_ ``_filter``. + max_results (int): + The maximum number of results per page that should be + returned. If the number of available results is larger than + ``maxResults``, Compute Engine returns a ``nextPageToken`` + that can be used to get the next page of results in + subsequent list requests. Acceptable values are ``0`` to + ``500``, inclusive. (Default: ``500``) + + This field is a member of `oneof`_ ``_max_results``. + order_by (str): + Sorts list results by a certain order. By default, results + are returned in alphanumerical order based on the resource + name. You can also sort results in descending order based on + the creation timestamp using + ``orderBy="creationTimestamp desc"``. This sorts results + based on the ``creationTimestamp`` field in reverse + chronological order (newest result first). Use this to sort + resources like operations so that the newest operation is + returned first. Currently, only sorting by ``name`` or + ``creationTimestamp desc`` is supported. + + This field is a member of `oneof`_ ``_order_by``. + page_token (str): + Specifies a page token to use. Set ``pageToken`` to the + ``nextPageToken`` returned by a previous list request to get + the next page of results. + + This field is a member of `oneof`_ ``_page_token``. + project (str): + Project ID for this request. + return_partial_success (bool): + Opt-in for partial success behavior which + provides partial results in case of failure. The + default value is false. + + This field is a member of `oneof`_ ``_return_partial_success``. + """ + + filter: str = proto.Field( + proto.STRING, + number=336120696, + optional=True, + ) + max_results: int = proto.Field( + proto.UINT32, + number=54715419, + optional=True, + ) + order_by: str = proto.Field( + proto.STRING, + number=160562920, + optional=True, + ) + page_token: str = proto.Field( + proto.STRING, + number=19994697, + optional=True, + ) + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + return_partial_success: bool = proto.Field( + proto.BOOL, + number=517198390, + optional=True, + ) + + +class ListTargetVpnGatewaysRequest(proto.Message): + r"""A request message for TargetVpnGateways.List. See the method + description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + filter (str): + A filter expression that filters resources listed in the + response. Most Compute resources support two types of filter + expressions: expressions that support regular expressions + and expressions that follow API improvement proposal + AIP-160. If you want to use AIP-160, your expression must + specify the field name, an operator, and the value that you + want to use for filtering. The value must be a string, a + number, or a boolean. The operator must be either ``=``, + ``!=``, ``>``, ``<``, ``<=``, ``>=`` or ``:``. For example, + if you are filtering Compute Engine instances, you can + exclude instances named ``example-instance`` by specifying + ``name != example-instance``. The ``:`` operator can be used + with string fields to match substrings. For non-string + fields it is equivalent to the ``=`` operator. The ``:*`` + comparison can be used to test whether a key has been + defined. For example, to find all objects with ``owner`` + label use: ``labels.owner:*`` You can also filter nested + fields. For example, you could specify + ``scheduling.automaticRestart = false`` to include instances + only if they are not scheduled for automatic restarts. You + can use filtering on nested fields to filter based on + resource labels. To filter on multiple expressions, provide + each separate expression within parentheses. For example: + ``(scheduling.automaticRestart = true) (cpuPlatform = "Intel Skylake")`` + By default, each expression is an ``AND`` expression. + However, you can include ``AND`` and ``OR`` expressions + explicitly. For example: + ``(cpuPlatform = "Intel Skylake") OR (cpuPlatform = "Intel Broadwell") AND (scheduling.automaticRestart = true)`` + If you want to use a regular expression, use the ``eq`` + (equal) or ``ne`` (not equal) operator against a single + un-parenthesized expression with or without quotes or + against multiple parenthesized expressions. Examples: + ``fieldname eq unquoted literal`` + ``fieldname eq 'single quoted literal'`` + ``fieldname eq "double quoted literal"`` + ``(fieldname1 eq literal) (fieldname2 ne "literal")`` The + literal value is interpreted as a regular expression using + Google RE2 library syntax. The literal value must match the + entire field. For example, to filter for instances that do + not end with name "instance", you would use + ``name ne .*instance``. + + This field is a member of `oneof`_ ``_filter``. + max_results (int): + The maximum number of results per page that should be + returned. If the number of available results is larger than + ``maxResults``, Compute Engine returns a ``nextPageToken`` + that can be used to get the next page of results in + subsequent list requests. Acceptable values are ``0`` to + ``500``, inclusive. (Default: ``500``) + + This field is a member of `oneof`_ ``_max_results``. + order_by (str): + Sorts list results by a certain order. By default, results + are returned in alphanumerical order based on the resource + name. You can also sort results in descending order based on + the creation timestamp using + ``orderBy="creationTimestamp desc"``. This sorts results + based on the ``creationTimestamp`` field in reverse + chronological order (newest result first). Use this to sort + resources like operations so that the newest operation is + returned first. Currently, only sorting by ``name`` or + ``creationTimestamp desc`` is supported. + + This field is a member of `oneof`_ ``_order_by``. + page_token (str): + Specifies a page token to use. Set ``pageToken`` to the + ``nextPageToken`` returned by a previous list request to get + the next page of results. + + This field is a member of `oneof`_ ``_page_token``. + project (str): + Project ID for this request. + region (str): + Name of the region for this request. + return_partial_success (bool): + Opt-in for partial success behavior which + provides partial results in case of failure. The + default value is false. + + This field is a member of `oneof`_ ``_return_partial_success``. + """ + + filter: str = proto.Field( + proto.STRING, + number=336120696, + optional=True, + ) + max_results: int = proto.Field( + proto.UINT32, + number=54715419, + optional=True, + ) + order_by: str = proto.Field( + proto.STRING, + number=160562920, + optional=True, + ) + page_token: str = proto.Field( + proto.STRING, + number=19994697, + optional=True, + ) + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + region: str = proto.Field( + proto.STRING, + number=138946292, + ) + return_partial_success: bool = proto.Field( + proto.BOOL, + number=517198390, + optional=True, + ) + + +class ListUrlMapsRequest(proto.Message): + r"""A request message for UrlMaps.List. See the method + description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + filter (str): + A filter expression that filters resources listed in the + response. Most Compute resources support two types of filter + expressions: expressions that support regular expressions + and expressions that follow API improvement proposal + AIP-160. If you want to use AIP-160, your expression must + specify the field name, an operator, and the value that you + want to use for filtering. The value must be a string, a + number, or a boolean. The operator must be either ``=``, + ``!=``, ``>``, ``<``, ``<=``, ``>=`` or ``:``. For example, + if you are filtering Compute Engine instances, you can + exclude instances named ``example-instance`` by specifying + ``name != example-instance``. The ``:`` operator can be used + with string fields to match substrings. For non-string + fields it is equivalent to the ``=`` operator. The ``:*`` + comparison can be used to test whether a key has been + defined. For example, to find all objects with ``owner`` + label use: ``labels.owner:*`` You can also filter nested + fields. For example, you could specify + ``scheduling.automaticRestart = false`` to include instances + only if they are not scheduled for automatic restarts. You + can use filtering on nested fields to filter based on + resource labels. To filter on multiple expressions, provide + each separate expression within parentheses. For example: + ``(scheduling.automaticRestart = true) (cpuPlatform = "Intel Skylake")`` + By default, each expression is an ``AND`` expression. + However, you can include ``AND`` and ``OR`` expressions + explicitly. For example: + ``(cpuPlatform = "Intel Skylake") OR (cpuPlatform = "Intel Broadwell") AND (scheduling.automaticRestart = true)`` + If you want to use a regular expression, use the ``eq`` + (equal) or ``ne`` (not equal) operator against a single + un-parenthesized expression with or without quotes or + against multiple parenthesized expressions. Examples: + ``fieldname eq unquoted literal`` + ``fieldname eq 'single quoted literal'`` + ``fieldname eq "double quoted literal"`` + ``(fieldname1 eq literal) (fieldname2 ne "literal")`` The + literal value is interpreted as a regular expression using + Google RE2 library syntax. The literal value must match the + entire field. For example, to filter for instances that do + not end with name "instance", you would use + ``name ne .*instance``. + + This field is a member of `oneof`_ ``_filter``. + max_results (int): + The maximum number of results per page that should be + returned. If the number of available results is larger than + ``maxResults``, Compute Engine returns a ``nextPageToken`` + that can be used to get the next page of results in + subsequent list requests. Acceptable values are ``0`` to + ``500``, inclusive. (Default: ``500``) + + This field is a member of `oneof`_ ``_max_results``. + order_by (str): + Sorts list results by a certain order. By default, results + are returned in alphanumerical order based on the resource + name. You can also sort results in descending order based on + the creation timestamp using + ``orderBy="creationTimestamp desc"``. This sorts results + based on the ``creationTimestamp`` field in reverse + chronological order (newest result first). Use this to sort + resources like operations so that the newest operation is + returned first. Currently, only sorting by ``name`` or + ``creationTimestamp desc`` is supported. + + This field is a member of `oneof`_ ``_order_by``. + page_token (str): + Specifies a page token to use. Set ``pageToken`` to the + ``nextPageToken`` returned by a previous list request to get + the next page of results. + + This field is a member of `oneof`_ ``_page_token``. + project (str): + Project ID for this request. + return_partial_success (bool): + Opt-in for partial success behavior which + provides partial results in case of failure. The + default value is false. + + This field is a member of `oneof`_ ``_return_partial_success``. + """ + + filter: str = proto.Field( + proto.STRING, + number=336120696, + optional=True, + ) + max_results: int = proto.Field( + proto.UINT32, + number=54715419, + optional=True, + ) + order_by: str = proto.Field( + proto.STRING, + number=160562920, + optional=True, + ) + page_token: str = proto.Field( + proto.STRING, + number=19994697, + optional=True, + ) + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + return_partial_success: bool = proto.Field( + proto.BOOL, + number=517198390, + optional=True, + ) + + +class ListUsableSubnetworksRequest(proto.Message): + r"""A request message for Subnetworks.ListUsable. See the method + description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + filter (str): + A filter expression that filters resources listed in the + response. Most Compute resources support two types of filter + expressions: expressions that support regular expressions + and expressions that follow API improvement proposal + AIP-160. If you want to use AIP-160, your expression must + specify the field name, an operator, and the value that you + want to use for filtering. The value must be a string, a + number, or a boolean. The operator must be either ``=``, + ``!=``, ``>``, ``<``, ``<=``, ``>=`` or ``:``. For example, + if you are filtering Compute Engine instances, you can + exclude instances named ``example-instance`` by specifying + ``name != example-instance``. The ``:`` operator can be used + with string fields to match substrings. For non-string + fields it is equivalent to the ``=`` operator. The ``:*`` + comparison can be used to test whether a key has been + defined. For example, to find all objects with ``owner`` + label use: ``labels.owner:*`` You can also filter nested + fields. For example, you could specify + ``scheduling.automaticRestart = false`` to include instances + only if they are not scheduled for automatic restarts. You + can use filtering on nested fields to filter based on + resource labels. To filter on multiple expressions, provide + each separate expression within parentheses. For example: + ``(scheduling.automaticRestart = true) (cpuPlatform = "Intel Skylake")`` + By default, each expression is an ``AND`` expression. + However, you can include ``AND`` and ``OR`` expressions + explicitly. For example: + ``(cpuPlatform = "Intel Skylake") OR (cpuPlatform = "Intel Broadwell") AND (scheduling.automaticRestart = true)`` + If you want to use a regular expression, use the ``eq`` + (equal) or ``ne`` (not equal) operator against a single + un-parenthesized expression with or without quotes or + against multiple parenthesized expressions. Examples: + ``fieldname eq unquoted literal`` + ``fieldname eq 'single quoted literal'`` + ``fieldname eq "double quoted literal"`` + ``(fieldname1 eq literal) (fieldname2 ne "literal")`` The + literal value is interpreted as a regular expression using + Google RE2 library syntax. The literal value must match the + entire field. For example, to filter for instances that do + not end with name "instance", you would use + ``name ne .*instance``. + + This field is a member of `oneof`_ ``_filter``. + max_results (int): + The maximum number of results per page that should be + returned. If the number of available results is larger than + ``maxResults``, Compute Engine returns a ``nextPageToken`` + that can be used to get the next page of results in + subsequent list requests. Acceptable values are ``0`` to + ``500``, inclusive. (Default: ``500``) + + This field is a member of `oneof`_ ``_max_results``. + order_by (str): + Sorts list results by a certain order. By default, results + are returned in alphanumerical order based on the resource + name. You can also sort results in descending order based on + the creation timestamp using + ``orderBy="creationTimestamp desc"``. This sorts results + based on the ``creationTimestamp`` field in reverse + chronological order (newest result first). Use this to sort + resources like operations so that the newest operation is + returned first. Currently, only sorting by ``name`` or + ``creationTimestamp desc`` is supported. + + This field is a member of `oneof`_ ``_order_by``. + page_token (str): + Specifies a page token to use. Set ``pageToken`` to the + ``nextPageToken`` returned by a previous list request to get + the next page of results. + + This field is a member of `oneof`_ ``_page_token``. + project (str): + Project ID for this request. + return_partial_success (bool): + Opt-in for partial success behavior which + provides partial results in case of failure. The + default value is false. + + This field is a member of `oneof`_ ``_return_partial_success``. + """ + + filter: str = proto.Field( + proto.STRING, + number=336120696, + optional=True, + ) + max_results: int = proto.Field( + proto.UINT32, + number=54715419, + optional=True, + ) + order_by: str = proto.Field( + proto.STRING, + number=160562920, + optional=True, + ) + page_token: str = proto.Field( + proto.STRING, + number=19994697, + optional=True, + ) + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + return_partial_success: bool = proto.Field( + proto.BOOL, + number=517198390, + optional=True, + ) + + +class ListVpnGatewaysRequest(proto.Message): + r"""A request message for VpnGateways.List. See the method + description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + filter (str): + A filter expression that filters resources listed in the + response. Most Compute resources support two types of filter + expressions: expressions that support regular expressions + and expressions that follow API improvement proposal + AIP-160. If you want to use AIP-160, your expression must + specify the field name, an operator, and the value that you + want to use for filtering. The value must be a string, a + number, or a boolean. The operator must be either ``=``, + ``!=``, ``>``, ``<``, ``<=``, ``>=`` or ``:``. For example, + if you are filtering Compute Engine instances, you can + exclude instances named ``example-instance`` by specifying + ``name != example-instance``. The ``:`` operator can be used + with string fields to match substrings. For non-string + fields it is equivalent to the ``=`` operator. The ``:*`` + comparison can be used to test whether a key has been + defined. For example, to find all objects with ``owner`` + label use: ``labels.owner:*`` You can also filter nested + fields. For example, you could specify + ``scheduling.automaticRestart = false`` to include instances + only if they are not scheduled for automatic restarts. You + can use filtering on nested fields to filter based on + resource labels. To filter on multiple expressions, provide + each separate expression within parentheses. For example: + ``(scheduling.automaticRestart = true) (cpuPlatform = "Intel Skylake")`` + By default, each expression is an ``AND`` expression. + However, you can include ``AND`` and ``OR`` expressions + explicitly. For example: + ``(cpuPlatform = "Intel Skylake") OR (cpuPlatform = "Intel Broadwell") AND (scheduling.automaticRestart = true)`` + If you want to use a regular expression, use the ``eq`` + (equal) or ``ne`` (not equal) operator against a single + un-parenthesized expression with or without quotes or + against multiple parenthesized expressions. Examples: + ``fieldname eq unquoted literal`` + ``fieldname eq 'single quoted literal'`` + ``fieldname eq "double quoted literal"`` + ``(fieldname1 eq literal) (fieldname2 ne "literal")`` The + literal value is interpreted as a regular expression using + Google RE2 library syntax. The literal value must match the + entire field. For example, to filter for instances that do + not end with name "instance", you would use + ``name ne .*instance``. + + This field is a member of `oneof`_ ``_filter``. + max_results (int): + The maximum number of results per page that should be + returned. If the number of available results is larger than + ``maxResults``, Compute Engine returns a ``nextPageToken`` + that can be used to get the next page of results in + subsequent list requests. Acceptable values are ``0`` to + ``500``, inclusive. (Default: ``500``) + + This field is a member of `oneof`_ ``_max_results``. + order_by (str): + Sorts list results by a certain order. By default, results + are returned in alphanumerical order based on the resource + name. You can also sort results in descending order based on + the creation timestamp using + ``orderBy="creationTimestamp desc"``. This sorts results + based on the ``creationTimestamp`` field in reverse + chronological order (newest result first). Use this to sort + resources like operations so that the newest operation is + returned first. Currently, only sorting by ``name`` or + ``creationTimestamp desc`` is supported. + + This field is a member of `oneof`_ ``_order_by``. + page_token (str): + Specifies a page token to use. Set ``pageToken`` to the + ``nextPageToken`` returned by a previous list request to get + the next page of results. + + This field is a member of `oneof`_ ``_page_token``. + project (str): + Project ID for this request. + region (str): + Name of the region for this request. + return_partial_success (bool): + Opt-in for partial success behavior which + provides partial results in case of failure. The + default value is false. + + This field is a member of `oneof`_ ``_return_partial_success``. + """ + + filter: str = proto.Field( + proto.STRING, + number=336120696, + optional=True, + ) + max_results: int = proto.Field( + proto.UINT32, + number=54715419, + optional=True, + ) + order_by: str = proto.Field( + proto.STRING, + number=160562920, + optional=True, + ) + page_token: str = proto.Field( + proto.STRING, + number=19994697, + optional=True, + ) + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + region: str = proto.Field( + proto.STRING, + number=138946292, + ) + return_partial_success: bool = proto.Field( + proto.BOOL, + number=517198390, + optional=True, + ) + + +class ListVpnTunnelsRequest(proto.Message): + r"""A request message for VpnTunnels.List. See the method + description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + filter (str): + A filter expression that filters resources listed in the + response. Most Compute resources support two types of filter + expressions: expressions that support regular expressions + and expressions that follow API improvement proposal + AIP-160. If you want to use AIP-160, your expression must + specify the field name, an operator, and the value that you + want to use for filtering. The value must be a string, a + number, or a boolean. The operator must be either ``=``, + ``!=``, ``>``, ``<``, ``<=``, ``>=`` or ``:``. For example, + if you are filtering Compute Engine instances, you can + exclude instances named ``example-instance`` by specifying + ``name != example-instance``. The ``:`` operator can be used + with string fields to match substrings. For non-string + fields it is equivalent to the ``=`` operator. The ``:*`` + comparison can be used to test whether a key has been + defined. For example, to find all objects with ``owner`` + label use: ``labels.owner:*`` You can also filter nested + fields. For example, you could specify + ``scheduling.automaticRestart = false`` to include instances + only if they are not scheduled for automatic restarts. You + can use filtering on nested fields to filter based on + resource labels. To filter on multiple expressions, provide + each separate expression within parentheses. For example: + ``(scheduling.automaticRestart = true) (cpuPlatform = "Intel Skylake")`` + By default, each expression is an ``AND`` expression. + However, you can include ``AND`` and ``OR`` expressions + explicitly. For example: + ``(cpuPlatform = "Intel Skylake") OR (cpuPlatform = "Intel Broadwell") AND (scheduling.automaticRestart = true)`` + If you want to use a regular expression, use the ``eq`` + (equal) or ``ne`` (not equal) operator against a single + un-parenthesized expression with or without quotes or + against multiple parenthesized expressions. Examples: + ``fieldname eq unquoted literal`` + ``fieldname eq 'single quoted literal'`` + ``fieldname eq "double quoted literal"`` + ``(fieldname1 eq literal) (fieldname2 ne "literal")`` The + literal value is interpreted as a regular expression using + Google RE2 library syntax. The literal value must match the + entire field. For example, to filter for instances that do + not end with name "instance", you would use + ``name ne .*instance``. + + This field is a member of `oneof`_ ``_filter``. + max_results (int): + The maximum number of results per page that should be + returned. If the number of available results is larger than + ``maxResults``, Compute Engine returns a ``nextPageToken`` + that can be used to get the next page of results in + subsequent list requests. Acceptable values are ``0`` to + ``500``, inclusive. (Default: ``500``) + + This field is a member of `oneof`_ ``_max_results``. + order_by (str): + Sorts list results by a certain order. By default, results + are returned in alphanumerical order based on the resource + name. You can also sort results in descending order based on + the creation timestamp using + ``orderBy="creationTimestamp desc"``. This sorts results + based on the ``creationTimestamp`` field in reverse + chronological order (newest result first). Use this to sort + resources like operations so that the newest operation is + returned first. Currently, only sorting by ``name`` or + ``creationTimestamp desc`` is supported. + + This field is a member of `oneof`_ ``_order_by``. + page_token (str): + Specifies a page token to use. Set ``pageToken`` to the + ``nextPageToken`` returned by a previous list request to get + the next page of results. + + This field is a member of `oneof`_ ``_page_token``. + project (str): + Project ID for this request. + region (str): + Name of the region for this request. + return_partial_success (bool): + Opt-in for partial success behavior which + provides partial results in case of failure. The + default value is false. + + This field is a member of `oneof`_ ``_return_partial_success``. + """ + + filter: str = proto.Field( + proto.STRING, + number=336120696, + optional=True, + ) + max_results: int = proto.Field( + proto.UINT32, + number=54715419, + optional=True, + ) + order_by: str = proto.Field( + proto.STRING, + number=160562920, + optional=True, + ) + page_token: str = proto.Field( + proto.STRING, + number=19994697, + optional=True, + ) + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + region: str = proto.Field( + proto.STRING, + number=138946292, + ) + return_partial_success: bool = proto.Field( + proto.BOOL, + number=517198390, + optional=True, + ) + + +class ListXpnHostsProjectsRequest(proto.Message): + r"""A request message for Projects.ListXpnHosts. See the method + description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + filter (str): + A filter expression that filters resources listed in the + response. Most Compute resources support two types of filter + expressions: expressions that support regular expressions + and expressions that follow API improvement proposal + AIP-160. If you want to use AIP-160, your expression must + specify the field name, an operator, and the value that you + want to use for filtering. The value must be a string, a + number, or a boolean. The operator must be either ``=``, + ``!=``, ``>``, ``<``, ``<=``, ``>=`` or ``:``. For example, + if you are filtering Compute Engine instances, you can + exclude instances named ``example-instance`` by specifying + ``name != example-instance``. The ``:`` operator can be used + with string fields to match substrings. For non-string + fields it is equivalent to the ``=`` operator. The ``:*`` + comparison can be used to test whether a key has been + defined. For example, to find all objects with ``owner`` + label use: ``labels.owner:*`` You can also filter nested + fields. For example, you could specify + ``scheduling.automaticRestart = false`` to include instances + only if they are not scheduled for automatic restarts. You + can use filtering on nested fields to filter based on + resource labels. To filter on multiple expressions, provide + each separate expression within parentheses. For example: + ``(scheduling.automaticRestart = true) (cpuPlatform = "Intel Skylake")`` + By default, each expression is an ``AND`` expression. + However, you can include ``AND`` and ``OR`` expressions + explicitly. For example: + ``(cpuPlatform = "Intel Skylake") OR (cpuPlatform = "Intel Broadwell") AND (scheduling.automaticRestart = true)`` + If you want to use a regular expression, use the ``eq`` + (equal) or ``ne`` (not equal) operator against a single + un-parenthesized expression with or without quotes or + against multiple parenthesized expressions. Examples: + ``fieldname eq unquoted literal`` + ``fieldname eq 'single quoted literal'`` + ``fieldname eq "double quoted literal"`` + ``(fieldname1 eq literal) (fieldname2 ne "literal")`` The + literal value is interpreted as a regular expression using + Google RE2 library syntax. The literal value must match the + entire field. For example, to filter for instances that do + not end with name "instance", you would use + ``name ne .*instance``. + + This field is a member of `oneof`_ ``_filter``. + max_results (int): + The maximum number of results per page that should be + returned. If the number of available results is larger than + ``maxResults``, Compute Engine returns a ``nextPageToken`` + that can be used to get the next page of results in + subsequent list requests. Acceptable values are ``0`` to + ``500``, inclusive. (Default: ``500``) + + This field is a member of `oneof`_ ``_max_results``. + order_by (str): + Sorts list results by a certain order. By default, results + are returned in alphanumerical order based on the resource + name. You can also sort results in descending order based on + the creation timestamp using + ``orderBy="creationTimestamp desc"``. This sorts results + based on the ``creationTimestamp`` field in reverse + chronological order (newest result first). Use this to sort + resources like operations so that the newest operation is + returned first. Currently, only sorting by ``name`` or + ``creationTimestamp desc`` is supported. + + This field is a member of `oneof`_ ``_order_by``. + page_token (str): + Specifies a page token to use. Set ``pageToken`` to the + ``nextPageToken`` returned by a previous list request to get + the next page of results. + + This field is a member of `oneof`_ ``_page_token``. + project (str): + Project ID for this request. + projects_list_xpn_hosts_request_resource (google.cloud.compute_v1.types.ProjectsListXpnHostsRequest): + The body resource for this request + return_partial_success (bool): + Opt-in for partial success behavior which + provides partial results in case of failure. The + default value is false. + + This field is a member of `oneof`_ ``_return_partial_success``. + """ + + filter: str = proto.Field( + proto.STRING, + number=336120696, + optional=True, + ) + max_results: int = proto.Field( + proto.UINT32, + number=54715419, + optional=True, + ) + order_by: str = proto.Field( + proto.STRING, + number=160562920, + optional=True, + ) + page_token: str = proto.Field( + proto.STRING, + number=19994697, + optional=True, + ) + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + projects_list_xpn_hosts_request_resource: 'ProjectsListXpnHostsRequest' = proto.Field( + proto.MESSAGE, + number=238266391, + message='ProjectsListXpnHostsRequest', + ) + return_partial_success: bool = proto.Field( + proto.BOOL, + number=517198390, + optional=True, + ) + + +class ListZoneOperationsRequest(proto.Message): + r"""A request message for ZoneOperations.List. See the method + description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + filter (str): + A filter expression that filters resources listed in the + response. Most Compute resources support two types of filter + expressions: expressions that support regular expressions + and expressions that follow API improvement proposal + AIP-160. If you want to use AIP-160, your expression must + specify the field name, an operator, and the value that you + want to use for filtering. The value must be a string, a + number, or a boolean. The operator must be either ``=``, + ``!=``, ``>``, ``<``, ``<=``, ``>=`` or ``:``. For example, + if you are filtering Compute Engine instances, you can + exclude instances named ``example-instance`` by specifying + ``name != example-instance``. The ``:`` operator can be used + with string fields to match substrings. For non-string + fields it is equivalent to the ``=`` operator. The ``:*`` + comparison can be used to test whether a key has been + defined. For example, to find all objects with ``owner`` + label use: ``labels.owner:*`` You can also filter nested + fields. For example, you could specify + ``scheduling.automaticRestart = false`` to include instances + only if they are not scheduled for automatic restarts. You + can use filtering on nested fields to filter based on + resource labels. To filter on multiple expressions, provide + each separate expression within parentheses. For example: + ``(scheduling.automaticRestart = true) (cpuPlatform = "Intel Skylake")`` + By default, each expression is an ``AND`` expression. + However, you can include ``AND`` and ``OR`` expressions + explicitly. For example: + ``(cpuPlatform = "Intel Skylake") OR (cpuPlatform = "Intel Broadwell") AND (scheduling.automaticRestart = true)`` + If you want to use a regular expression, use the ``eq`` + (equal) or ``ne`` (not equal) operator against a single + un-parenthesized expression with or without quotes or + against multiple parenthesized expressions. Examples: + ``fieldname eq unquoted literal`` + ``fieldname eq 'single quoted literal'`` + ``fieldname eq "double quoted literal"`` + ``(fieldname1 eq literal) (fieldname2 ne "literal")`` The + literal value is interpreted as a regular expression using + Google RE2 library syntax. The literal value must match the + entire field. For example, to filter for instances that do + not end with name "instance", you would use + ``name ne .*instance``. + + This field is a member of `oneof`_ ``_filter``. + max_results (int): + The maximum number of results per page that should be + returned. If the number of available results is larger than + ``maxResults``, Compute Engine returns a ``nextPageToken`` + that can be used to get the next page of results in + subsequent list requests. Acceptable values are ``0`` to + ``500``, inclusive. (Default: ``500``) + + This field is a member of `oneof`_ ``_max_results``. + order_by (str): + Sorts list results by a certain order. By default, results + are returned in alphanumerical order based on the resource + name. You can also sort results in descending order based on + the creation timestamp using + ``orderBy="creationTimestamp desc"``. This sorts results + based on the ``creationTimestamp`` field in reverse + chronological order (newest result first). Use this to sort + resources like operations so that the newest operation is + returned first. Currently, only sorting by ``name`` or + ``creationTimestamp desc`` is supported. + + This field is a member of `oneof`_ ``_order_by``. + page_token (str): + Specifies a page token to use. Set ``pageToken`` to the + ``nextPageToken`` returned by a previous list request to get + the next page of results. + + This field is a member of `oneof`_ ``_page_token``. + project (str): + Project ID for this request. + return_partial_success (bool): + Opt-in for partial success behavior which + provides partial results in case of failure. The + default value is false. + + This field is a member of `oneof`_ ``_return_partial_success``. + zone (str): + Name of the zone for request. + """ + + filter: str = proto.Field( + proto.STRING, + number=336120696, + optional=True, + ) + max_results: int = proto.Field( + proto.UINT32, + number=54715419, + optional=True, + ) + order_by: str = proto.Field( + proto.STRING, + number=160562920, + optional=True, + ) + page_token: str = proto.Field( + proto.STRING, + number=19994697, + optional=True, + ) + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + return_partial_success: bool = proto.Field( + proto.BOOL, + number=517198390, + optional=True, + ) + zone: str = proto.Field( + proto.STRING, + number=3744684, + ) + + +class ListZonesRequest(proto.Message): + r"""A request message for Zones.List. See the method description + for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + filter (str): + A filter expression that filters resources listed in the + response. Most Compute resources support two types of filter + expressions: expressions that support regular expressions + and expressions that follow API improvement proposal + AIP-160. If you want to use AIP-160, your expression must + specify the field name, an operator, and the value that you + want to use for filtering. The value must be a string, a + number, or a boolean. The operator must be either ``=``, + ``!=``, ``>``, ``<``, ``<=``, ``>=`` or ``:``. For example, + if you are filtering Compute Engine instances, you can + exclude instances named ``example-instance`` by specifying + ``name != example-instance``. The ``:`` operator can be used + with string fields to match substrings. For non-string + fields it is equivalent to the ``=`` operator. The ``:*`` + comparison can be used to test whether a key has been + defined. For example, to find all objects with ``owner`` + label use: ``labels.owner:*`` You can also filter nested + fields. For example, you could specify + ``scheduling.automaticRestart = false`` to include instances + only if they are not scheduled for automatic restarts. You + can use filtering on nested fields to filter based on + resource labels. To filter on multiple expressions, provide + each separate expression within parentheses. For example: + ``(scheduling.automaticRestart = true) (cpuPlatform = "Intel Skylake")`` + By default, each expression is an ``AND`` expression. + However, you can include ``AND`` and ``OR`` expressions + explicitly. For example: + ``(cpuPlatform = "Intel Skylake") OR (cpuPlatform = "Intel Broadwell") AND (scheduling.automaticRestart = true)`` + If you want to use a regular expression, use the ``eq`` + (equal) or ``ne`` (not equal) operator against a single + un-parenthesized expression with or without quotes or + against multiple parenthesized expressions. Examples: + ``fieldname eq unquoted literal`` + ``fieldname eq 'single quoted literal'`` + ``fieldname eq "double quoted literal"`` + ``(fieldname1 eq literal) (fieldname2 ne "literal")`` The + literal value is interpreted as a regular expression using + Google RE2 library syntax. The literal value must match the + entire field. For example, to filter for instances that do + not end with name "instance", you would use + ``name ne .*instance``. + + This field is a member of `oneof`_ ``_filter``. + max_results (int): + The maximum number of results per page that should be + returned. If the number of available results is larger than + ``maxResults``, Compute Engine returns a ``nextPageToken`` + that can be used to get the next page of results in + subsequent list requests. Acceptable values are ``0`` to + ``500``, inclusive. (Default: ``500``) + + This field is a member of `oneof`_ ``_max_results``. + order_by (str): + Sorts list results by a certain order. By default, results + are returned in alphanumerical order based on the resource + name. You can also sort results in descending order based on + the creation timestamp using + ``orderBy="creationTimestamp desc"``. This sorts results + based on the ``creationTimestamp`` field in reverse + chronological order (newest result first). Use this to sort + resources like operations so that the newest operation is + returned first. Currently, only sorting by ``name`` or + ``creationTimestamp desc`` is supported. + + This field is a member of `oneof`_ ``_order_by``. + page_token (str): + Specifies a page token to use. Set ``pageToken`` to the + ``nextPageToken`` returned by a previous list request to get + the next page of results. + + This field is a member of `oneof`_ ``_page_token``. + project (str): + Project ID for this request. + return_partial_success (bool): + Opt-in for partial success behavior which + provides partial results in case of failure. The + default value is false. + + This field is a member of `oneof`_ ``_return_partial_success``. + """ + + filter: str = proto.Field( + proto.STRING, + number=336120696, + optional=True, + ) + max_results: int = proto.Field( + proto.UINT32, + number=54715419, + optional=True, + ) + order_by: str = proto.Field( + proto.STRING, + number=160562920, + optional=True, + ) + page_token: str = proto.Field( + proto.STRING, + number=19994697, + optional=True, + ) + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + return_partial_success: bool = proto.Field( + proto.BOOL, + number=517198390, + optional=True, + ) + + +class LocalDisk(proto.Message): + r""" + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + disk_count (int): + Specifies the number of such disks. + + This field is a member of `oneof`_ ``_disk_count``. + disk_size_gb (int): + Specifies the size of the disk in base-2 GB. + + This field is a member of `oneof`_ ``_disk_size_gb``. + disk_type (str): + Specifies the desired disk type on the node. + This disk type must be a local storage type + (e.g.: local-ssd). Note that for nodeTemplates, + this should be the name of the disk type and not + its URL. + + This field is a member of `oneof`_ ``_disk_type``. + """ + + disk_count: int = proto.Field( + proto.INT32, + number=182933485, + optional=True, + ) + disk_size_gb: int = proto.Field( + proto.INT32, + number=316263735, + optional=True, + ) + disk_type: str = proto.Field( + proto.STRING, + number=93009052, + optional=True, + ) + + +class LocalizedMessage(proto.Message): + r"""Provides a localized error message that is safe to return to + the user which can be attached to an RPC error. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + locale (str): + The locale used following the specification + defined at + https://www.rfc-editor.org/rfc/bcp/bcp47.txt. + Examples are: "en-US", "fr-CH", "es-MX". + + This field is a member of `oneof`_ ``_locale``. + message (str): + The localized error message in the above + locale. + + This field is a member of `oneof`_ ``_message``. + """ + + locale: str = proto.Field( + proto.STRING, + number=513150554, + optional=True, + ) + message: str = proto.Field( + proto.STRING, + number=418054151, + optional=True, + ) + + +class LocationPolicy(proto.Message): + r"""Configuration for location policy among multiple possible + locations (e.g. preferences for zone selection among zones in a + single region). + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + locations (MutableMapping[str, google.cloud.compute_v1.types.LocationPolicyLocation]): + Location configurations mapped by location + name. Currently only zone names are supported + and must be represented as valid internal URLs, + such as zones/us-central1-a. + target_shape (str): + Strategy for distributing VMs across zones in + a region. Check the TargetShape enum for the + list of possible values. + + This field is a member of `oneof`_ ``_target_shape``. + """ + class TargetShape(proto.Enum): + r"""Strategy for distributing VMs across zones in a region. + + Values: + UNDEFINED_TARGET_SHAPE (0): + A value indicating that the enum field is not + set. + ANY (64972): + GCE picks zones for creating VM instances to + fulfill the requested number of VMs within + present resource constraints and to maximize + utilization of unused zonal reservations. + Recommended for batch workloads that do not + require high availability. + ANY_SINGLE_ZONE (61100880): + GCE always selects a single zone for all the + VMs, optimizing for resource quotas, available + reservations and general capacity. Recommended + for batch workloads that cannot tollerate + distribution over multiple zones. This the + default shape in Bulk Insert and Capacity + Advisor APIs. + BALANCED (468409608): + GCE prioritizes acquisition of resources, + scheduling VMs in zones where resources are + available while distributing VMs as evenly as + possible across allowed zones to minimize the + impact of zonal failure. Recommended for highly + available serving workloads. + """ + UNDEFINED_TARGET_SHAPE = 0 + ANY = 64972 + ANY_SINGLE_ZONE = 61100880 + BALANCED = 468409608 + + locations: MutableMapping[str, 'LocationPolicyLocation'] = proto.MapField( + proto.STRING, + proto.MESSAGE, + number=413423454, + message='LocationPolicyLocation', + ) + target_shape: str = proto.Field( + proto.STRING, + number=338621299, + optional=True, + ) + + +class LocationPolicyLocation(proto.Message): + r""" + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + constraints (google.cloud.compute_v1.types.LocationPolicyLocationConstraints): + Constraints that the caller requires on the + result distribution in this zone. + + This field is a member of `oneof`_ ``_constraints``. + preference (str): + Preference for a given location. Set to + either ALLOW or DENY. Check the Preference enum + for the list of possible values. + + This field is a member of `oneof`_ ``_preference``. + """ + class Preference(proto.Enum): + r"""Preference for a given location. Set to either ALLOW or DENY. + + Values: + UNDEFINED_PREFERENCE (0): + A value indicating that the enum field is not + set. + ALLOW (62368553): + Location is allowed for use. + DENY (2094604): + Location is prohibited. + PREFERENCE_UNSPECIFIED (496219571): + Default value, unused. + """ + UNDEFINED_PREFERENCE = 0 + ALLOW = 62368553 + DENY = 2094604 + PREFERENCE_UNSPECIFIED = 496219571 + + constraints: 'LocationPolicyLocationConstraints' = proto.Field( + proto.MESSAGE, + number=3909174, + optional=True, + message='LocationPolicyLocationConstraints', + ) + preference: str = proto.Field( + proto.STRING, + number=150781147, + optional=True, + ) + + +class LocationPolicyLocationConstraints(proto.Message): + r"""Per-zone constraints on location policy for this zone. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + max_count (int): + Maximum number of items that are allowed to + be placed in this zone. The value must be + non-negative. + + This field is a member of `oneof`_ ``_max_count``. + """ + + max_count: int = proto.Field( + proto.INT32, + number=287620724, + optional=True, + ) + + +class LogConfig(proto.Message): + r"""This is deprecated and has no effect. Do not use. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + cloud_audit (google.cloud.compute_v1.types.LogConfigCloudAuditOptions): + This is deprecated and has no effect. Do not + use. + + This field is a member of `oneof`_ ``_cloud_audit``. + counter (google.cloud.compute_v1.types.LogConfigCounterOptions): + This is deprecated and has no effect. Do not + use. + + This field is a member of `oneof`_ ``_counter``. + data_access (google.cloud.compute_v1.types.LogConfigDataAccessOptions): + This is deprecated and has no effect. Do not + use. + + This field is a member of `oneof`_ ``_data_access``. + """ + + cloud_audit: 'LogConfigCloudAuditOptions' = proto.Field( + proto.MESSAGE, + number=412852561, + optional=True, + message='LogConfigCloudAuditOptions', + ) + counter: 'LogConfigCounterOptions' = proto.Field( + proto.MESSAGE, + number=420959740, + optional=True, + message='LogConfigCounterOptions', + ) + data_access: 'LogConfigDataAccessOptions' = proto.Field( + proto.MESSAGE, + number=286633881, + optional=True, + message='LogConfigDataAccessOptions', + ) + + +class LogConfigCloudAuditOptions(proto.Message): + r"""This is deprecated and has no effect. Do not use. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + authorization_logging_options (google.cloud.compute_v1.types.AuthorizationLoggingOptions): + This is deprecated and has no effect. Do not + use. + + This field is a member of `oneof`_ ``_authorization_logging_options``. + log_name (str): + This is deprecated and has no effect. Do not + use. Check the LogName enum for the list of + possible values. + + This field is a member of `oneof`_ ``_log_name``. + """ + class LogName(proto.Enum): + r"""This is deprecated and has no effect. Do not use. + + Values: + UNDEFINED_LOG_NAME (0): + A value indicating that the enum field is not + set. + ADMIN_ACTIVITY (427503135): + This is deprecated and has no effect. Do not + use. + DATA_ACCESS (238070681): + This is deprecated and has no effect. Do not + use. + UNSPECIFIED_LOG_NAME (410515182): + This is deprecated and has no effect. Do not + use. + """ + UNDEFINED_LOG_NAME = 0 + ADMIN_ACTIVITY = 427503135 + DATA_ACCESS = 238070681 + UNSPECIFIED_LOG_NAME = 410515182 + + authorization_logging_options: 'AuthorizationLoggingOptions' = proto.Field( + proto.MESSAGE, + number=217861624, + optional=True, + message='AuthorizationLoggingOptions', + ) + log_name: str = proto.Field( + proto.STRING, + number=402913958, + optional=True, + ) + + +class LogConfigCounterOptions(proto.Message): + r"""This is deprecated and has no effect. Do not use. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + custom_fields (MutableSequence[google.cloud.compute_v1.types.LogConfigCounterOptionsCustomField]): + This is deprecated and has no effect. Do not + use. + field (str): + This is deprecated and has no effect. Do not + use. + + This field is a member of `oneof`_ ``_field``. + metric (str): + This is deprecated and has no effect. Do not + use. + + This field is a member of `oneof`_ ``_metric``. + """ + + custom_fields: MutableSequence['LogConfigCounterOptionsCustomField'] = proto.RepeatedField( + proto.MESSAGE, + number=249651015, + message='LogConfigCounterOptionsCustomField', + ) + field: str = proto.Field( + proto.STRING, + number=97427706, + optional=True, + ) + metric: str = proto.Field( + proto.STRING, + number=533067184, + optional=True, + ) + + +class LogConfigCounterOptionsCustomField(proto.Message): + r"""This is deprecated and has no effect. Do not use. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + name (str): + This is deprecated and has no effect. Do not + use. + + This field is a member of `oneof`_ ``_name``. + value (str): + This is deprecated and has no effect. Do not + use. + + This field is a member of `oneof`_ ``_value``. + """ + + name: str = proto.Field( + proto.STRING, + number=3373707, + optional=True, + ) + value: str = proto.Field( + proto.STRING, + number=111972721, + optional=True, + ) + + +class LogConfigDataAccessOptions(proto.Message): + r"""This is deprecated and has no effect. Do not use. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + log_mode (str): + This is deprecated and has no effect. Do not + use. Check the LogMode enum for the list of + possible values. + + This field is a member of `oneof`_ ``_log_mode``. + """ + class LogMode(proto.Enum): + r"""This is deprecated and has no effect. Do not use. + + Values: + UNDEFINED_LOG_MODE (0): + A value indicating that the enum field is not + set. + LOG_FAIL_CLOSED (360469778): + This is deprecated and has no effect. Do not + use. + LOG_MODE_UNSPECIFIED (88160822): + This is deprecated and has no effect. Do not + use. + """ + UNDEFINED_LOG_MODE = 0 + LOG_FAIL_CLOSED = 360469778 + LOG_MODE_UNSPECIFIED = 88160822 + + log_mode: str = proto.Field( + proto.STRING, + number=402897342, + optional=True, + ) + + +class MachineImage(proto.Message): + r"""Represents a machine image resource. A machine image is a + Compute Engine resource that stores all the configuration, + metadata, permissions, and data from one or more disks required + to create a Virtual machine (VM) instance. For more information, + see Machine images. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + creation_timestamp (str): + [Output Only] The creation timestamp for this machine image + in RFC3339 text format. + + This field is a member of `oneof`_ ``_creation_timestamp``. + description (str): + An optional description of this resource. + Provide this property when you create the + resource. + + This field is a member of `oneof`_ ``_description``. + guest_flush (bool): + [Input Only] Whether to attempt an application consistent + machine image by informing the OS to prepare for the + snapshot process. + + This field is a member of `oneof`_ ``_guest_flush``. + id (int): + [Output Only] A unique identifier for this machine image. + The server defines this identifier. + + This field is a member of `oneof`_ ``_id``. + instance_properties (google.cloud.compute_v1.types.InstanceProperties): + [Output Only] Properties of source instance + + This field is a member of `oneof`_ ``_instance_properties``. + kind (str): + [Output Only] The resource type, which is always + compute#machineImage for machine image. + + This field is a member of `oneof`_ ``_kind``. + machine_image_encryption_key (google.cloud.compute_v1.types.CustomerEncryptionKey): + Encrypts the machine image using a + customer-supplied encryption key. After you + encrypt a machine image using a + customer-supplied key, you must provide the same + key if you use the machine image later. For + example, you must provide the encryption key + when you create an instance from the encrypted + machine image in a future request. + Customer-supplied encryption keys do not protect + access to metadata of the machine image. If you + do not provide an encryption key when creating + the machine image, then the machine image will + be encrypted using an automatically generated + key and you do not need to provide a key to use + the machine image later. + + This field is a member of `oneof`_ ``_machine_image_encryption_key``. + name (str): + Name of the resource; provided by the client when the + resource is created. The name must be 1-63 characters long, + and comply with RFC1035. Specifically, the name must be 1-63 + characters long and match the regular expression + ``[a-z]([-a-z0-9]*[a-z0-9])?`` which means the first + character must be a lowercase letter, and all following + characters must be a dash, lowercase letter, or digit, + except the last character, which cannot be a dash. + + This field is a member of `oneof`_ ``_name``. + satisfies_pzs (bool): + [Output Only] Reserved for future use. + + This field is a member of `oneof`_ ``_satisfies_pzs``. + saved_disks (MutableSequence[google.cloud.compute_v1.types.SavedDisk]): + An array of Machine Image specific properties + for disks attached to the source instance + self_link (str): + [Output Only] The URL for this machine image. The server + defines this URL. + + This field is a member of `oneof`_ ``_self_link``. + source_disk_encryption_keys (MutableSequence[google.cloud.compute_v1.types.SourceDiskEncryptionKey]): + [Input Only] The customer-supplied encryption key of the + disks attached to the source instance. Required if the + source disk is protected by a customer-supplied encryption + key. + source_instance (str): + The source instance used to create the + machine image. You can provide this as a partial + or full URL to the resource. For example, the + following are valid values: - + https://www.googleapis.com/compute/v1/projects/project/zones/zone + /instances/instance - + projects/project/zones/zone/instances/instance + + This field is a member of `oneof`_ ``_source_instance``. + source_instance_properties (google.cloud.compute_v1.types.SourceInstanceProperties): + [Output Only] DEPRECATED: Please use instance_properties + instead for source instance related properties. New + properties will not be added to this field. + + This field is a member of `oneof`_ ``_source_instance_properties``. + status (str): + [Output Only] The status of the machine image. One of the + following values: INVALID, CREATING, READY, DELETING, and + UPLOADING. Check the Status enum for the list of possible + values. + + This field is a member of `oneof`_ ``_status``. + storage_locations (MutableSequence[str]): + The regional or multi-regional Cloud Storage + bucket location where the machine image is + stored. + total_storage_bytes (int): + [Output Only] Total size of the storage used by the machine + image. + + This field is a member of `oneof`_ ``_total_storage_bytes``. + """ + class Status(proto.Enum): + r"""[Output Only] The status of the machine image. One of the following + values: INVALID, CREATING, READY, DELETING, and UPLOADING. + + Values: + UNDEFINED_STATUS (0): + A value indicating that the enum field is not + set. + CREATING (455564985): + No description available. + DELETING (528602024): + No description available. + INVALID (530283991): + No description available. + READY (77848963): + No description available. + UPLOADING (267603489): + No description available. + """ + UNDEFINED_STATUS = 0 + CREATING = 455564985 + DELETING = 528602024 + INVALID = 530283991 + READY = 77848963 + UPLOADING = 267603489 + + creation_timestamp: str = proto.Field( + proto.STRING, + number=30525366, + optional=True, + ) + description: str = proto.Field( + proto.STRING, + number=422937596, + optional=True, + ) + guest_flush: bool = proto.Field( + proto.BOOL, + number=385550813, + optional=True, + ) + id: int = proto.Field( + proto.UINT64, + number=3355, + optional=True, + ) + instance_properties: 'InstanceProperties' = proto.Field( + proto.MESSAGE, + number=215355165, + optional=True, + message='InstanceProperties', + ) + kind: str = proto.Field( + proto.STRING, + number=3292052, + optional=True, + ) + machine_image_encryption_key: 'CustomerEncryptionKey' = proto.Field( + proto.MESSAGE, + number=528089087, + optional=True, + message='CustomerEncryptionKey', + ) + name: str = proto.Field( + proto.STRING, + number=3373707, + optional=True, + ) + satisfies_pzs: bool = proto.Field( + proto.BOOL, + number=480964267, + optional=True, + ) + saved_disks: MutableSequence['SavedDisk'] = proto.RepeatedField( + proto.MESSAGE, + number=397424318, + message='SavedDisk', + ) + self_link: str = proto.Field( + proto.STRING, + number=456214797, + optional=True, + ) + source_disk_encryption_keys: MutableSequence['SourceDiskEncryptionKey'] = proto.RepeatedField( + proto.MESSAGE, + number=370408498, + message='SourceDiskEncryptionKey', + ) + source_instance: str = proto.Field( + proto.STRING, + number=396315705, + optional=True, + ) + source_instance_properties: 'SourceInstanceProperties' = proto.Field( + proto.MESSAGE, + number=475195641, + optional=True, + message='SourceInstanceProperties', + ) + status: str = proto.Field( + proto.STRING, + number=181260274, + optional=True, + ) + storage_locations: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=328005274, + ) + total_storage_bytes: int = proto.Field( + proto.INT64, + number=81855468, + optional=True, + ) + + +class MachineImageList(proto.Message): + r"""A list of machine images. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + id (str): + [Output Only] Unique identifier for the resource; defined by + the server. + + This field is a member of `oneof`_ ``_id``. + items (MutableSequence[google.cloud.compute_v1.types.MachineImage]): + A list of MachineImage resources. + kind (str): + [Output Only] The resource type, which is always + compute#machineImagesListResponse for machine image lists. + + This field is a member of `oneof`_ ``_kind``. + next_page_token (str): + [Output Only] This token allows you to get the next page of + results for list requests. If the number of results is + larger than maxResults, use the nextPageToken as a value for + the query parameter pageToken in the next list request. + Subsequent list requests will have their own nextPageToken + to continue paging through the results. + + This field is a member of `oneof`_ ``_next_page_token``. + self_link (str): + [Output Only] Server-defined URL for this resource. + + This field is a member of `oneof`_ ``_self_link``. + warning (google.cloud.compute_v1.types.Warning): + [Output Only] Informational warning message. + + This field is a member of `oneof`_ ``_warning``. + """ + + @property + def raw_page(self): + return self + + id: str = proto.Field( + proto.STRING, + number=3355, + optional=True, + ) + items: MutableSequence['MachineImage'] = proto.RepeatedField( + proto.MESSAGE, + number=100526016, + message='MachineImage', + ) + kind: str = proto.Field( + proto.STRING, + number=3292052, + optional=True, + ) + next_page_token: str = proto.Field( + proto.STRING, + number=79797525, + optional=True, + ) + self_link: str = proto.Field( + proto.STRING, + number=456214797, + optional=True, + ) + warning: 'Warning' = proto.Field( + proto.MESSAGE, + number=50704284, + optional=True, + message='Warning', + ) + + +class MachineType(proto.Message): + r"""Represents a Machine Type resource. You can use specific + machine types for your VM instances based on performance and + pricing requirements. For more information, read Machine Types. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + accelerators (MutableSequence[google.cloud.compute_v1.types.Accelerators]): + [Output Only] A list of accelerator configurations assigned + to this machine type. + creation_timestamp (str): + [Output Only] Creation timestamp in RFC3339 text format. + + This field is a member of `oneof`_ ``_creation_timestamp``. + deprecated (google.cloud.compute_v1.types.DeprecationStatus): + [Output Only] The deprecation status associated with this + machine type. Only applicable if the machine type is + unavailable. + + This field is a member of `oneof`_ ``_deprecated``. + description (str): + [Output Only] An optional textual description of the + resource. + + This field is a member of `oneof`_ ``_description``. + guest_cpus (int): + [Output Only] The number of virtual CPUs that are available + to the instance. + + This field is a member of `oneof`_ ``_guest_cpus``. + id (int): + [Output Only] The unique identifier for the resource. This + identifier is defined by the server. + + This field is a member of `oneof`_ ``_id``. + image_space_gb (int): + [Deprecated] This property is deprecated and will never be + populated with any relevant values. + + This field is a member of `oneof`_ ``_image_space_gb``. + is_shared_cpu (bool): + [Output Only] Whether this machine type has a shared CPU. + See Shared-core machine types for more information. + + This field is a member of `oneof`_ ``_is_shared_cpu``. + kind (str): + [Output Only] The type of the resource. Always + compute#machineType for machine types. + + This field is a member of `oneof`_ ``_kind``. + maximum_persistent_disks (int): + [Output Only] Maximum persistent disks allowed. + + This field is a member of `oneof`_ ``_maximum_persistent_disks``. + maximum_persistent_disks_size_gb (int): + [Output Only] Maximum total persistent disks size (GB) + allowed. + + This field is a member of `oneof`_ ``_maximum_persistent_disks_size_gb``. + memory_mb (int): + [Output Only] The amount of physical memory available to the + instance, defined in MB. + + This field is a member of `oneof`_ ``_memory_mb``. + name (str): + [Output Only] Name of the resource. + + This field is a member of `oneof`_ ``_name``. + scratch_disks (MutableSequence[google.cloud.compute_v1.types.ScratchDisks]): + [Output Only] A list of extended scratch disks assigned to + the instance. + self_link (str): + [Output Only] Server-defined URL for the resource. + + This field is a member of `oneof`_ ``_self_link``. + zone (str): + [Output Only] The name of the zone where the machine type + resides, such as us-central1-a. + + This field is a member of `oneof`_ ``_zone``. + """ + + accelerators: MutableSequence['Accelerators'] = proto.RepeatedField( + proto.MESSAGE, + number=269577064, + message='Accelerators', + ) + creation_timestamp: str = proto.Field( + proto.STRING, + number=30525366, + optional=True, + ) + deprecated: 'DeprecationStatus' = proto.Field( + proto.MESSAGE, + number=515138995, + optional=True, + message='DeprecationStatus', + ) + description: str = proto.Field( + proto.STRING, + number=422937596, + optional=True, + ) + guest_cpus: int = proto.Field( + proto.INT32, + number=393356754, + optional=True, + ) + id: int = proto.Field( + proto.UINT64, + number=3355, + optional=True, + ) + image_space_gb: int = proto.Field( + proto.INT32, + number=75331864, + optional=True, + ) + is_shared_cpu: bool = proto.Field( + proto.BOOL, + number=521399555, + optional=True, + ) + kind: str = proto.Field( + proto.STRING, + number=3292052, + optional=True, + ) + maximum_persistent_disks: int = proto.Field( + proto.INT32, + number=496220941, + optional=True, + ) + maximum_persistent_disks_size_gb: int = proto.Field( + proto.INT64, + number=154274471, + optional=True, + ) + memory_mb: int = proto.Field( + proto.INT32, + number=116001171, + optional=True, + ) + name: str = proto.Field( + proto.STRING, + number=3373707, + optional=True, + ) + scratch_disks: MutableSequence['ScratchDisks'] = proto.RepeatedField( + proto.MESSAGE, + number=480778481, + message='ScratchDisks', + ) + self_link: str = proto.Field( + proto.STRING, + number=456214797, + optional=True, + ) + zone: str = proto.Field( + proto.STRING, + number=3744684, + optional=True, + ) + + +class MachineTypeAggregatedList(proto.Message): + r""" + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + id (str): + [Output Only] Unique identifier for the resource; defined by + the server. + + This field is a member of `oneof`_ ``_id``. + items (MutableMapping[str, google.cloud.compute_v1.types.MachineTypesScopedList]): + A list of MachineTypesScopedList resources. + kind (str): + [Output Only] Type of resource. Always + compute#machineTypeAggregatedList for aggregated lists of + machine types. + + This field is a member of `oneof`_ ``_kind``. + next_page_token (str): + [Output Only] This token allows you to get the next page of + results for list requests. If the number of results is + larger than maxResults, use the nextPageToken as a value for + the query parameter pageToken in the next list request. + Subsequent list requests will have their own nextPageToken + to continue paging through the results. + + This field is a member of `oneof`_ ``_next_page_token``. + self_link (str): + [Output Only] Server-defined URL for this resource. + + This field is a member of `oneof`_ ``_self_link``. + unreachables (MutableSequence[str]): + [Output Only] Unreachable resources. + warning (google.cloud.compute_v1.types.Warning): + [Output Only] Informational warning message. + + This field is a member of `oneof`_ ``_warning``. + """ + + @property + def raw_page(self): + return self + + id: str = proto.Field( + proto.STRING, + number=3355, + optional=True, + ) + items: MutableMapping[str, 'MachineTypesScopedList'] = proto.MapField( + proto.STRING, + proto.MESSAGE, + number=100526016, + message='MachineTypesScopedList', + ) + kind: str = proto.Field( + proto.STRING, + number=3292052, + optional=True, + ) + next_page_token: str = proto.Field( + proto.STRING, + number=79797525, + optional=True, + ) + self_link: str = proto.Field( + proto.STRING, + number=456214797, + optional=True, + ) + unreachables: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=243372063, + ) + warning: 'Warning' = proto.Field( + proto.MESSAGE, + number=50704284, + optional=True, + message='Warning', + ) + + +class MachineTypeList(proto.Message): + r"""Contains a list of machine types. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + id (str): + [Output Only] Unique identifier for the resource; defined by + the server. + + This field is a member of `oneof`_ ``_id``. + items (MutableSequence[google.cloud.compute_v1.types.MachineType]): + A list of MachineType resources. + kind (str): + [Output Only] Type of resource. Always + compute#machineTypeList for lists of machine types. + + This field is a member of `oneof`_ ``_kind``. + next_page_token (str): + [Output Only] This token allows you to get the next page of + results for list requests. If the number of results is + larger than maxResults, use the nextPageToken as a value for + the query parameter pageToken in the next list request. + Subsequent list requests will have their own nextPageToken + to continue paging through the results. + + This field is a member of `oneof`_ ``_next_page_token``. + self_link (str): + [Output Only] Server-defined URL for this resource. + + This field is a member of `oneof`_ ``_self_link``. + warning (google.cloud.compute_v1.types.Warning): + [Output Only] Informational warning message. + + This field is a member of `oneof`_ ``_warning``. + """ + + @property + def raw_page(self): + return self + + id: str = proto.Field( + proto.STRING, + number=3355, + optional=True, + ) + items: MutableSequence['MachineType'] = proto.RepeatedField( + proto.MESSAGE, + number=100526016, + message='MachineType', + ) + kind: str = proto.Field( + proto.STRING, + number=3292052, + optional=True, + ) + next_page_token: str = proto.Field( + proto.STRING, + number=79797525, + optional=True, + ) + self_link: str = proto.Field( + proto.STRING, + number=456214797, + optional=True, + ) + warning: 'Warning' = proto.Field( + proto.MESSAGE, + number=50704284, + optional=True, + message='Warning', + ) + + +class MachineTypesScopedList(proto.Message): + r""" + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + machine_types (MutableSequence[google.cloud.compute_v1.types.MachineType]): + [Output Only] A list of machine types contained in this + scope. + warning (google.cloud.compute_v1.types.Warning): + [Output Only] An informational warning that appears when the + machine types list is empty. + + This field is a member of `oneof`_ ``_warning``. + """ + + machine_types: MutableSequence['MachineType'] = proto.RepeatedField( + proto.MESSAGE, + number=79720065, + message='MachineType', + ) + warning: 'Warning' = proto.Field( + proto.MESSAGE, + number=50704284, + optional=True, + message='Warning', + ) + + +class ManagedInstance(proto.Message): + r"""A Managed Instance resource. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + current_action (str): + [Output Only] The current action that the managed instance + group has scheduled for the instance. Possible values: - + NONE The instance is running, and the managed instance group + does not have any scheduled actions for this instance. - + CREATING The managed instance group is creating this + instance. If the group fails to create this instance, it + will try again until it is successful. - + CREATING_WITHOUT_RETRIES The managed instance group is + attempting to create this instance only once. If the group + fails to create this instance, it does not try again and the + group's targetSize value is decreased instead. - RECREATING + The managed instance group is recreating this instance. - + DELETING The managed instance group is permanently deleting + this instance. - ABANDONING The managed instance group is + abandoning this instance. The instance will be removed from + the instance group and from any target pools that are + associated with this group. - RESTARTING The managed + instance group is restarting the instance. - REFRESHING The + managed instance group is applying configuration changes to + the instance without stopping it. For example, the group can + update the target pool list for an instance without stopping + that instance. - VERIFYING The managed instance group has + created the instance and it is in the process of being + verified. Check the CurrentAction enum for the list of + possible values. + + This field is a member of `oneof`_ ``_current_action``. + id (int): + [Output only] The unique identifier for this resource. This + field is empty when instance does not exist. + + This field is a member of `oneof`_ ``_id``. + instance (str): + [Output Only] The URL of the instance. The URL can exist + even if the instance has not yet been created. + + This field is a member of `oneof`_ ``_instance``. + instance_health (MutableSequence[google.cloud.compute_v1.types.ManagedInstanceInstanceHealth]): + [Output Only] Health state of the instance per health-check. + instance_status (str): + [Output Only] The status of the instance. This field is + empty when the instance does not exist. Check the + InstanceStatus enum for the list of possible values. + + This field is a member of `oneof`_ ``_instance_status``. + last_attempt (google.cloud.compute_v1.types.ManagedInstanceLastAttempt): + [Output Only] Information about the last attempt to create + or delete the instance. + + This field is a member of `oneof`_ ``_last_attempt``. + preserved_state_from_config (google.cloud.compute_v1.types.PreservedState): + [Output Only] Preserved state applied from per-instance + config for this instance. + + This field is a member of `oneof`_ ``_preserved_state_from_config``. + preserved_state_from_policy (google.cloud.compute_v1.types.PreservedState): + [Output Only] Preserved state generated based on stateful + policy for this instance. + + This field is a member of `oneof`_ ``_preserved_state_from_policy``. + version (google.cloud.compute_v1.types.ManagedInstanceVersion): + [Output Only] Intended version of this instance. + + This field is a member of `oneof`_ ``_version``. + """ + class CurrentAction(proto.Enum): + r"""[Output Only] The current action that the managed instance group has + scheduled for the instance. Possible values: - NONE The instance is + running, and the managed instance group does not have any scheduled + actions for this instance. - CREATING The managed instance group is + creating this instance. If the group fails to create this instance, + it will try again until it is successful. - CREATING_WITHOUT_RETRIES + The managed instance group is attempting to create this instance + only once. If the group fails to create this instance, it does not + try again and the group's targetSize value is decreased instead. - + RECREATING The managed instance group is recreating this instance. - + DELETING The managed instance group is permanently deleting this + instance. - ABANDONING The managed instance group is abandoning this + instance. The instance will be removed from the instance group and + from any target pools that are associated with this group. - + RESTARTING The managed instance group is restarting the instance. - + REFRESHING The managed instance group is applying configuration + changes to the instance without stopping it. For example, the group + can update the target pool list for an instance without stopping + that instance. - VERIFYING The managed instance group has created + the instance and it is in the process of being verified. Additional + supported values which may be not listed in the enum directly due to + technical reasons: STOPPING SUSPENDING + + Values: + UNDEFINED_CURRENT_ACTION (0): + A value indicating that the enum field is not + set. + ABANDONING (388244813): + The managed instance group is abandoning this + instance. The instance will be removed from the + instance group and from any target pools that + are associated with this group. + CREATING (455564985): + The managed instance group is creating this + instance. If the group fails to create this + instance, it will try again until it is + successful. + CREATING_WITHOUT_RETRIES (428843785): + The managed instance group is attempting to + create this instance only once. If the group + fails to create this instance, it does not try + again and the group's targetSize value is + decreased. + DELETING (528602024): + The managed instance group is permanently + deleting this instance. + NONE (2402104): + The managed instance group has not scheduled + any actions for this instance. + RECREATING (287278572): + The managed instance group is recreating this + instance. + REFRESHING (163266343): + The managed instance group is applying + configuration changes to the instance without + stopping it. For example, the group can update + the target pool list for an instance without + stopping that instance. + RESTARTING (320534387): + The managed instance group is restarting this + instance. + RESUMING (446856618): + The managed instance group is resuming this + instance. + STARTING (488820800): + The managed instance group is starting this + instance. + VERIFYING (16982185): + The managed instance group is verifying this + already created instance. Verification happens + every time the instance is (re)created or + restarted and consists of: 1. Waiting until + health check specified as part of this managed + instance group's autohealing policy reports + HEALTHY. Note: Applies only if autohealing + policy has a health check specified 2. Waiting + for addition verification steps performed as + post-instance creation (subject to future + extensions). + """ + UNDEFINED_CURRENT_ACTION = 0 + ABANDONING = 388244813 + CREATING = 455564985 + CREATING_WITHOUT_RETRIES = 428843785 + DELETING = 528602024 + NONE = 2402104 + RECREATING = 287278572 + REFRESHING = 163266343 + RESTARTING = 320534387 + RESUMING = 446856618 + STARTING = 488820800 + VERIFYING = 16982185 + + class InstanceStatus(proto.Enum): + r"""[Output Only] The status of the instance. This field is empty when + the instance does not exist. Additional supported values which may + be not listed in the enum directly due to technical reasons: + STOPPING SUSPENDING + + Values: + UNDEFINED_INSTANCE_STATUS (0): + A value indicating that the enum field is not + set. + DEPROVISIONING (428935662): + The instance is halted and we are performing + tear down tasks like network deprogramming, + releasing quota, IP, tearing down disks etc. + PROVISIONING (290896621): + Resources are being allocated for the + instance. + REPAIRING (413483285): + The instance is in repair. + RUNNING (121282975): + The instance is running. + STAGING (431072283): + All required resources have been allocated + and the instance is being started. + STOPPED (444276141): + The instance has stopped successfully. + STOPPING (350791796): + No description available. + SUSPENDED (51223995): + The instance has suspended. + SUSPENDING (514206246): + No description available. + TERMINATED (250018339): + The instance has stopped (either by explicit + action or underlying failure). + """ + UNDEFINED_INSTANCE_STATUS = 0 + DEPROVISIONING = 428935662 + PROVISIONING = 290896621 + REPAIRING = 413483285 + RUNNING = 121282975 + STAGING = 431072283 + STOPPED = 444276141 + STOPPING = 350791796 + SUSPENDED = 51223995 + SUSPENDING = 514206246 + TERMINATED = 250018339 + + current_action: str = proto.Field( + proto.STRING, + number=178475964, + optional=True, + ) + id: int = proto.Field( + proto.UINT64, + number=3355, + optional=True, + ) + instance: str = proto.Field( + proto.STRING, + number=18257045, + optional=True, + ) + instance_health: MutableSequence['ManagedInstanceInstanceHealth'] = proto.RepeatedField( + proto.MESSAGE, + number=382667078, + message='ManagedInstanceInstanceHealth', + ) + instance_status: str = proto.Field( + proto.STRING, + number=174577372, + optional=True, + ) + last_attempt: 'ManagedInstanceLastAttempt' = proto.Field( + proto.MESSAGE, + number=434771492, + optional=True, + message='ManagedInstanceLastAttempt', + ) + preserved_state_from_config: 'PreservedState' = proto.Field( + proto.MESSAGE, + number=98661858, + optional=True, + message='PreservedState', + ) + preserved_state_from_policy: 'PreservedState' = proto.Field( + proto.MESSAGE, + number=470783954, + optional=True, + message='PreservedState', + ) + version: 'ManagedInstanceVersion' = proto.Field( + proto.MESSAGE, + number=351608024, + optional=True, + message='ManagedInstanceVersion', + ) + + +class ManagedInstanceInstanceHealth(proto.Message): + r""" + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + detailed_health_state (str): + [Output Only] The current detailed instance health state. + Check the DetailedHealthState enum for the list of possible + values. + + This field is a member of `oneof`_ ``_detailed_health_state``. + health_check (str): + [Output Only] The URL for the health check that verifies + whether the instance is healthy. + + This field is a member of `oneof`_ ``_health_check``. + """ + class DetailedHealthState(proto.Enum): + r"""[Output Only] The current detailed instance health state. + + Values: + UNDEFINED_DETAILED_HEALTH_STATE (0): + A value indicating that the enum field is not + set. + DRAINING (480455402): + The instance is being drained. The existing + connections to the instance have time to + complete, but the new ones are being refused. + HEALTHY (439801213): + The instance is reachable i.e. a connection + to the application health checking endpoint can + be established, and conforms to the requirements + defined by the health check. + TIMEOUT (477813057): + The instance is unreachable i.e. a connection + to the application health checking endpoint + cannot be established, or the server does not + respond within the specified timeout. + UNHEALTHY (462118084): + The instance is reachable, but does not + conform to the requirements defined by the + health check. + UNKNOWN (433141802): + The health checking system is aware of the + instance but its health is not known at the + moment. + """ + UNDEFINED_DETAILED_HEALTH_STATE = 0 + DRAINING = 480455402 + HEALTHY = 439801213 + TIMEOUT = 477813057 + UNHEALTHY = 462118084 + UNKNOWN = 433141802 + + detailed_health_state: str = proto.Field( + proto.STRING, + number=510470173, + optional=True, + ) + health_check: str = proto.Field( + proto.STRING, + number=308876645, + optional=True, + ) + + +class ManagedInstanceLastAttempt(proto.Message): + r""" + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + errors (google.cloud.compute_v1.types.Errors): + [Output Only] Encountered errors during the last attempt to + create or delete the instance. + + This field is a member of `oneof`_ ``_errors``. + """ + + errors: 'Errors' = proto.Field( + proto.MESSAGE, + number=315977579, + optional=True, + message='Errors', + ) + + +class ManagedInstanceVersion(proto.Message): + r""" + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + instance_template (str): + [Output Only] The intended template of the instance. This + field is empty when current_action is one of { DELETING, + ABANDONING }. + + This field is a member of `oneof`_ ``_instance_template``. + name (str): + [Output Only] Name of the version. + + This field is a member of `oneof`_ ``_name``. + """ + + instance_template: str = proto.Field( + proto.STRING, + number=309248228, + optional=True, + ) + name: str = proto.Field( + proto.STRING, + number=3373707, + optional=True, + ) + + +class Metadata(proto.Message): + r"""A metadata key/value entry. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + fingerprint (str): + Specifies a fingerprint for this request, + which is essentially a hash of the metadata's + contents and used for optimistic locking. The + fingerprint is initially generated by Compute + Engine and changes after every request to modify + or update metadata. You must always provide an + up-to-date fingerprint hash in order to update + or change metadata, otherwise the request will + fail with error 412 conditionNotMet. To see the + latest fingerprint, make a get() request to + retrieve the resource. + + This field is a member of `oneof`_ ``_fingerprint``. + items (MutableSequence[google.cloud.compute_v1.types.Items]): + Array of key/value pairs. The total size of + all keys and values must be less than 512 KB. + kind (str): + [Output Only] Type of the resource. Always compute#metadata + for metadata. + + This field is a member of `oneof`_ ``_kind``. + """ + + fingerprint: str = proto.Field( + proto.STRING, + number=234678500, + optional=True, + ) + items: MutableSequence['Items'] = proto.RepeatedField( + proto.MESSAGE, + number=100526016, + message='Items', + ) + kind: str = proto.Field( + proto.STRING, + number=3292052, + optional=True, + ) + + +class MetadataFilter(proto.Message): + r"""Opaque filter criteria used by load balancers to restrict routing + configuration to a limited set of load balancing proxies. Proxies + and sidecars involved in load balancing would typically present + metadata to the load balancers that need to match criteria specified + here. If a match takes place, the relevant configuration is made + available to those proxies. For each metadataFilter in this list, if + its filterMatchCriteria is set to MATCH_ANY, at least one of the + filterLabels must match the corresponding label provided in the + metadata. If its filterMatchCriteria is set to MATCH_ALL, then all + of its filterLabels must match with corresponding labels provided in + the metadata. An example for using metadataFilters would be: if load + balancing involves Envoys, they receive routing configuration when + values in metadataFilters match values supplied in of their XDS + requests to loadbalancers. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + filter_labels (MutableSequence[google.cloud.compute_v1.types.MetadataFilterLabelMatch]): + The list of label value pairs that must match + labels in the provided metadata based on + filterMatchCriteria This list must not be empty + and can have at the most 64 entries. + filter_match_criteria (str): + Specifies how individual filter label matches within the + list of filterLabels and contributes toward the overall + metadataFilter match. Supported values are: - MATCH_ANY: at + least one of the filterLabels must have a matching label in + the provided metadata. - MATCH_ALL: all filterLabels must + have matching labels in the provided metadata. Check the + FilterMatchCriteria enum for the list of possible values. + + This field is a member of `oneof`_ ``_filter_match_criteria``. + """ + class FilterMatchCriteria(proto.Enum): + r"""Specifies how individual filter label matches within the list of + filterLabels and contributes toward the overall metadataFilter + match. Supported values are: - MATCH_ANY: at least one of the + filterLabels must have a matching label in the provided metadata. - + MATCH_ALL: all filterLabels must have matching labels in the + provided metadata. + + Values: + UNDEFINED_FILTER_MATCH_CRITERIA (0): + A value indicating that the enum field is not + set. + MATCH_ALL (180663271): + Specifies that all filterLabels must match + for the metadataFilter to be considered a match. + MATCH_ANY (180663346): + Specifies that any filterLabel must match for + the metadataFilter to be considered a match. + NOT_SET (163646646): + Indicates that the match criteria was not + set. A metadataFilter must never be created with + this value. + """ + UNDEFINED_FILTER_MATCH_CRITERIA = 0 + MATCH_ALL = 180663271 + MATCH_ANY = 180663346 + NOT_SET = 163646646 + + filter_labels: MutableSequence['MetadataFilterLabelMatch'] = proto.RepeatedField( + proto.MESSAGE, + number=307903142, + message='MetadataFilterLabelMatch', + ) + filter_match_criteria: str = proto.Field( + proto.STRING, + number=239970368, + optional=True, + ) + + +class MetadataFilterLabelMatch(proto.Message): + r"""MetadataFilter label name value pairs that are expected to + match corresponding labels presented as metadata to the load + balancer. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + name (str): + Name of metadata label. The name can have a + maximum length of 1024 characters and must be at + least 1 character long. + + This field is a member of `oneof`_ ``_name``. + value (str): + The value of the label must match the + specified value. value can have a maximum length + of 1024 characters. + + This field is a member of `oneof`_ ``_value``. + """ + + name: str = proto.Field( + proto.STRING, + number=3373707, + optional=True, + ) + value: str = proto.Field( + proto.STRING, + number=111972721, + optional=True, + ) + + +class MoveAddressRequest(proto.Message): + r"""A request message for Addresses.Move. See the method + description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + address (str): + Name of the address resource to move. + project (str): + Source project ID which the Address is moved + from. + region (str): + Name of the region for this request. + region_addresses_move_request_resource (google.cloud.compute_v1.types.RegionAddressesMoveRequest): + The body resource for this request + request_id (str): + An optional request ID to identify requests. + Specify a unique request ID so that if you must + retry your request, the server will know to + ignore the request if it has already been + completed. For example, consider a situation + where you make an initial request and the + request times out. If you make the request again + with the same request ID, the server can check + if original operation with the same request ID + was received, and if so, will ignore the second + request. This prevents clients from accidentally + creating duplicate commitments. The request ID + must be a valid UUID with the exception that + zero UUID is not supported ( + 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. + """ + + address: str = proto.Field( + proto.STRING, + number=462920692, + ) + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + region: str = proto.Field( + proto.STRING, + number=138946292, + ) + region_addresses_move_request_resource: 'RegionAddressesMoveRequest' = proto.Field( + proto.MESSAGE, + number=409081924, + message='RegionAddressesMoveRequest', + ) + request_id: str = proto.Field( + proto.STRING, + number=37109963, + optional=True, + ) + + +class MoveDiskProjectRequest(proto.Message): + r"""A request message for Projects.MoveDisk. See the method + description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + disk_move_request_resource (google.cloud.compute_v1.types.DiskMoveRequest): + The body resource for this request + project (str): + Project ID for this request. + request_id (str): + An optional request ID to identify requests. + Specify a unique request ID so that if you must + retry your request, the server will know to + ignore the request if it has already been + completed. For example, consider a situation + where you make an initial request and the + request times out. If you make the request again + with the same request ID, the server can check + if original operation with the same request ID + was received, and if so, will ignore the second + request. This prevents clients from accidentally + creating duplicate commitments. The request ID + must be a valid UUID with the exception that + zero UUID is not supported ( + 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. + """ + + disk_move_request_resource: 'DiskMoveRequest' = proto.Field( + proto.MESSAGE, + number=313008458, + message='DiskMoveRequest', + ) + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + request_id: str = proto.Field( + proto.STRING, + number=37109963, + optional=True, + ) + + +class MoveFirewallPolicyRequest(proto.Message): + r"""A request message for FirewallPolicies.Move. See the method + description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + firewall_policy (str): + Name of the firewall policy to update. + parent_id (str): + The new parent of the firewall policy. The ID can be either + be "folders/[FOLDER_ID]" if the parent is a folder or + "organizations/[ORGANIZATION_ID]" if the parent is an + organization. + request_id (str): + An optional request ID to identify requests. + Specify a unique request ID so that if you must + retry your request, the server will know to + ignore the request if it has already been + completed. For example, consider a situation + where you make an initial request and the + request times out. If you make the request again + with the same request ID, the server can check + if original operation with the same request ID + was received, and if so, will ignore the second + request. This prevents clients from accidentally + creating duplicate commitments. The request ID + must be a valid UUID with the exception that + zero UUID is not supported ( + 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. + """ + + firewall_policy: str = proto.Field( + proto.STRING, + number=498173265, + ) + parent_id: str = proto.Field( + proto.STRING, + number=459714768, + ) + request_id: str = proto.Field( + proto.STRING, + number=37109963, + optional=True, + ) + + +class MoveGlobalAddressRequest(proto.Message): + r"""A request message for GlobalAddresses.Move. See the method + description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + address (str): + Name of the address resource to move. + global_addresses_move_request_resource (google.cloud.compute_v1.types.GlobalAddressesMoveRequest): + The body resource for this request + project (str): + Source project ID which the Address is moved + from. + request_id (str): + An optional request ID to identify requests. + Specify a unique request ID so that if you must + retry your request, the server will know to + ignore the request if it has already been + completed. For example, consider a situation + where you make an initial request and the + request times out. If you make the request again + with the same request ID, the server can check + if original operation with the same request ID + was received, and if so, will ignore the second + request. This prevents clients from accidentally + creating duplicate commitments. The request ID + must be a valid UUID with the exception that + zero UUID is not supported ( + 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. + """ + + address: str = proto.Field( + proto.STRING, + number=462920692, + ) + global_addresses_move_request_resource: 'GlobalAddressesMoveRequest' = proto.Field( + proto.MESSAGE, + number=302807283, + message='GlobalAddressesMoveRequest', + ) + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + request_id: str = proto.Field( + proto.STRING, + number=37109963, + optional=True, + ) + + +class MoveInstanceProjectRequest(proto.Message): + r"""A request message for Projects.MoveInstance. See the method + description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + instance_move_request_resource (google.cloud.compute_v1.types.InstanceMoveRequest): + The body resource for this request + project (str): + Project ID for this request. + request_id (str): + An optional request ID to identify requests. + Specify a unique request ID so that if you must + retry your request, the server will know to + ignore the request if it has already been + completed. For example, consider a situation + where you make an initial request and the + request times out. If you make the request again + with the same request ID, the server can check + if original operation with the same request ID + was received, and if so, will ignore the second + request. This prevents clients from accidentally + creating duplicate commitments. The request ID + must be a valid UUID with the exception that + zero UUID is not supported ( + 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. + """ + + instance_move_request_resource: 'InstanceMoveRequest' = proto.Field( + proto.MESSAGE, + number=311664194, + message='InstanceMoveRequest', + ) + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + request_id: str = proto.Field( + proto.STRING, + number=37109963, + optional=True, + ) + + +class NamedPort(proto.Message): + r"""The named port. For example: <"http", 80>. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + name (str): + The name for this named port. The name must + be 1-63 characters long, and comply with + RFC1035. + + This field is a member of `oneof`_ ``_name``. + port (int): + The port number, which can be a value between + 1 and 65535. + + This field is a member of `oneof`_ ``_port``. + """ + + name: str = proto.Field( + proto.STRING, + number=3373707, + optional=True, + ) + port: int = proto.Field( + proto.INT32, + number=3446913, + optional=True, + ) + + +class Network(proto.Message): + r"""Represents a VPC Network resource. Networks connect resources + to each other and to the internet. For more information, read + Virtual Private Cloud (VPC) Network. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + I_pv4_range (str): + Deprecated in favor of subnet mode networks. + The range of internal addresses that are legal + on this network. This range is a CIDR + specification, for example: 192.168.0.0/16. + Provided by the client when the network is + created. + + This field is a member of `oneof`_ ``_I_pv4_range``. + auto_create_subnetworks (bool): + Must be set to create a VPC network. If not + set, a legacy network is created. When set to + true, the VPC network is created in auto mode. + When set to false, the VPC network is created in + custom mode. An auto mode VPC network starts + with one subnet per region. Each subnet has a + predetermined range as described in Auto mode + VPC network IP ranges. For custom mode VPC + networks, you can add subnets using the + subnetworks insert method. + + This field is a member of `oneof`_ ``_auto_create_subnetworks``. + creation_timestamp (str): + [Output Only] Creation timestamp in RFC3339 text format. + + This field is a member of `oneof`_ ``_creation_timestamp``. + description (str): + An optional description of this resource. + Provide this field when you create the resource. + + This field is a member of `oneof`_ ``_description``. + enable_ula_internal_ipv6 (bool): + Enable ULA internal ipv6 on this network. + Enabling this feature will assign a /48 from + google defined ULA prefix fd20::/20. . + + This field is a member of `oneof`_ ``_enable_ula_internal_ipv6``. + firewall_policy (str): + [Output Only] URL of the firewall policy the network is + associated with. + + This field is a member of `oneof`_ ``_firewall_policy``. + gateway_i_pv4 (str): + [Output Only] The gateway address for default routing out of + the network, selected by Google Cloud. + + This field is a member of `oneof`_ ``_gateway_i_pv4``. + id (int): + [Output Only] The unique identifier for the resource. This + identifier is defined by the server. + + This field is a member of `oneof`_ ``_id``. + internal_ipv6_range (str): + When enabling ula internal ipv6, caller + optionally can specify the /48 range they want + from the google defined ULA prefix fd20::/20. + The input must be a valid /48 ULA IPv6 address + and must be within the fd20::/20. Operation will + fail if the speficied /48 is already in used by + another resource. If the field is not speficied, + then a /48 range will be randomly allocated from + fd20::/20 and returned via this field. . + + This field is a member of `oneof`_ ``_internal_ipv6_range``. + kind (str): + [Output Only] Type of the resource. Always compute#network + for networks. + + This field is a member of `oneof`_ ``_kind``. + mtu (int): + Maximum Transmission Unit in bytes. The + minimum value for this field is 1300 and the + maximum value is 8896. The suggested value is + 1500, which is the default MTU used on the + Internet, or 8896 if you want to use Jumbo + frames. If unspecified, the value defaults to + 1460. + + This field is a member of `oneof`_ ``_mtu``. + name (str): + Name of the resource. Provided by the client when the + resource is created. The name must be 1-63 characters long, + and comply with RFC1035. Specifically, the name must be 1-63 + characters long and match the regular expression + ``[a-z]([-a-z0-9]*[a-z0-9])?``. The first character must be + a lowercase letter, and all following characters (except for + the last character) must be a dash, lowercase letter, or + digit. The last character must be a lowercase letter or + digit. + + This field is a member of `oneof`_ ``_name``. + network_firewall_policy_enforcement_order (str): + The network firewall policy enforcement order. Can be either + AFTER_CLASSIC_FIREWALL or BEFORE_CLASSIC_FIREWALL. Defaults + to AFTER_CLASSIC_FIREWALL if the field is not specified. + Check the NetworkFirewallPolicyEnforcementOrder enum for the + list of possible values. + + This field is a member of `oneof`_ ``_network_firewall_policy_enforcement_order``. + peerings (MutableSequence[google.cloud.compute_v1.types.NetworkPeering]): + [Output Only] A list of network peerings for the resource. + routing_config (google.cloud.compute_v1.types.NetworkRoutingConfig): + The network-level routing configuration for + this network. Used by Cloud Router to determine + what type of network-wide routing behavior to + enforce. + + This field is a member of `oneof`_ ``_routing_config``. + self_link (str): + [Output Only] Server-defined URL for the resource. + + This field is a member of `oneof`_ ``_self_link``. + self_link_with_id (str): + [Output Only] Server-defined URL for this resource with the + resource id. + + This field is a member of `oneof`_ ``_self_link_with_id``. + subnetworks (MutableSequence[str]): + [Output Only] Server-defined fully-qualified URLs for all + subnetworks in this VPC network. + """ + class NetworkFirewallPolicyEnforcementOrder(proto.Enum): + r"""The network firewall policy enforcement order. Can be either + AFTER_CLASSIC_FIREWALL or BEFORE_CLASSIC_FIREWALL. Defaults to + AFTER_CLASSIC_FIREWALL if the field is not specified. + + Values: + UNDEFINED_NETWORK_FIREWALL_POLICY_ENFORCEMENT_ORDER (0): + A value indicating that the enum field is not + set. + AFTER_CLASSIC_FIREWALL (154582608): + No description available. + BEFORE_CLASSIC_FIREWALL (338458349): + No description available. + """ + UNDEFINED_NETWORK_FIREWALL_POLICY_ENFORCEMENT_ORDER = 0 + AFTER_CLASSIC_FIREWALL = 154582608 + BEFORE_CLASSIC_FIREWALL = 338458349 + + I_pv4_range: str = proto.Field( + proto.STRING, + number=59234358, + optional=True, + ) + auto_create_subnetworks: bool = proto.Field( + proto.BOOL, + number=256156690, + optional=True, + ) + creation_timestamp: str = proto.Field( + proto.STRING, + number=30525366, + optional=True, + ) + description: str = proto.Field( + proto.STRING, + number=422937596, + optional=True, + ) + enable_ula_internal_ipv6: bool = proto.Field( + proto.BOOL, + number=423757720, + optional=True, + ) + firewall_policy: str = proto.Field( + proto.STRING, + number=498173265, + optional=True, + ) + gateway_i_pv4: str = proto.Field( + proto.STRING, + number=178678877, + optional=True, + ) + id: int = proto.Field( + proto.UINT64, + number=3355, + optional=True, + ) + internal_ipv6_range: str = proto.Field( + proto.STRING, + number=277456807, + optional=True, + ) + kind: str = proto.Field( + proto.STRING, + number=3292052, + optional=True, + ) + mtu: int = proto.Field( + proto.INT32, + number=108462, + optional=True, + ) + name: str = proto.Field( + proto.STRING, + number=3373707, + optional=True, + ) + network_firewall_policy_enforcement_order: str = proto.Field( + proto.STRING, + number=6504784, + optional=True, + ) + peerings: MutableSequence['NetworkPeering'] = proto.RepeatedField( + proto.MESSAGE, + number=69883187, + message='NetworkPeering', + ) + routing_config: 'NetworkRoutingConfig' = proto.Field( + proto.MESSAGE, + number=523556059, + optional=True, + message='NetworkRoutingConfig', + ) + self_link: str = proto.Field( + proto.STRING, + number=456214797, + optional=True, + ) + self_link_with_id: str = proto.Field( + proto.STRING, + number=44520962, + optional=True, + ) + subnetworks: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=415853125, + ) + + +class NetworkAttachment(proto.Message): + r"""NetworkAttachments A network attachment resource ... + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + connection_endpoints (MutableSequence[google.cloud.compute_v1.types.NetworkAttachmentConnectedEndpoint]): + [Output Only] An array of connections for all the producers + connected to this network attachment. + connection_preference (str): + Check the ConnectionPreference enum for the + list of possible values. + + This field is a member of `oneof`_ ``_connection_preference``. + creation_timestamp (str): + [Output Only] Creation timestamp in RFC3339 text format. + + This field is a member of `oneof`_ ``_creation_timestamp``. + description (str): + An optional description of this resource. + Provide this property when you create the + resource. + + This field is a member of `oneof`_ ``_description``. + fingerprint (str): + Fingerprint of this resource. A hash of the + contents stored in this object. This field is + used in optimistic locking. An up-to-date + fingerprint must be provided in order to patch. + + This field is a member of `oneof`_ ``_fingerprint``. + id (int): + [Output Only] The unique identifier for the resource type. + The server generates this identifier. + + This field is a member of `oneof`_ ``_id``. + kind (str): + [Output Only] Type of the resource. + + This field is a member of `oneof`_ ``_kind``. + name (str): + Name of the resource. Provided by the client when the + resource is created. The name must be 1-63 characters long, + and comply with RFC1035. Specifically, the name must be 1-63 + characters long and match the regular expression + ``[a-z]([-a-z0-9]*[a-z0-9])?`` which means the first + character must be a lowercase letter, and all following + characters must be a dash, lowercase letter, or digit, + except the last character, which cannot be a dash. + + This field is a member of `oneof`_ ``_name``. + network (str): + [Output Only] The URL of the network which the Network + Attachment belongs to. Practically it is inferred by + fetching the network of the first subnetwork associated. + Because it is required that all the subnetworks must be from + the same network, it is assured that the Network Attachment + belongs to the same network as all the subnetworks. + + This field is a member of `oneof`_ ``_network``. + producer_accept_lists (MutableSequence[str]): + Projects that are allowed to connect to this + network attachment. The project can be specified + using its id or number. + producer_reject_lists (MutableSequence[str]): + Projects that are not allowed to connect to + this network attachment. The project can be + specified using its id or number. + region (str): + [Output Only] URL of the region where the network attachment + resides. This field applies only to the region resource. You + must specify this field as part of the HTTP request URL. It + is not settable as a field in the request body. + + This field is a member of `oneof`_ ``_region``. + self_link (str): + [Output Only] Server-defined URL for the resource. + + This field is a member of `oneof`_ ``_self_link``. + self_link_with_id (str): + [Output Only] Server-defined URL for this resource's + resource id. + + This field is a member of `oneof`_ ``_self_link_with_id``. + subnetworks (MutableSequence[str]): + An array of URLs where each entry is the URL + of a subnet provided by the service consumer to + use for endpoints in the producers that connect + to this network attachment. + """ + class ConnectionPreference(proto.Enum): + r""" + + Values: + UNDEFINED_CONNECTION_PREFERENCE (0): + A value indicating that the enum field is not + set. + ACCEPT_AUTOMATIC (75250580): + No description available. + ACCEPT_MANUAL (373061341): + No description available. + INVALID (530283991): + No description available. + """ + UNDEFINED_CONNECTION_PREFERENCE = 0 + ACCEPT_AUTOMATIC = 75250580 + ACCEPT_MANUAL = 373061341 + INVALID = 530283991 + + connection_endpoints: MutableSequence['NetworkAttachmentConnectedEndpoint'] = proto.RepeatedField( + proto.MESSAGE, + number=326078813, + message='NetworkAttachmentConnectedEndpoint', + ) + connection_preference: str = proto.Field( + proto.STRING, + number=285818076, + optional=True, + ) + creation_timestamp: str = proto.Field( + proto.STRING, + number=30525366, + optional=True, + ) + description: str = proto.Field( + proto.STRING, + number=422937596, + optional=True, + ) + fingerprint: str = proto.Field( + proto.STRING, + number=234678500, + optional=True, + ) + id: int = proto.Field( + proto.UINT64, + number=3355, + optional=True, + ) + kind: str = proto.Field( + proto.STRING, + number=3292052, + optional=True, + ) + name: str = proto.Field( + proto.STRING, + number=3373707, + optional=True, + ) + network: str = proto.Field( + proto.STRING, + number=232872494, + optional=True, + ) + producer_accept_lists: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=202804523, + ) + producer_reject_lists: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=4112002, + ) + region: str = proto.Field( + proto.STRING, + number=138946292, + optional=True, + ) + self_link: str = proto.Field( + proto.STRING, + number=456214797, + optional=True, + ) + self_link_with_id: str = proto.Field( + proto.STRING, + number=44520962, + optional=True, + ) + subnetworks: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=415853125, + ) + + +class NetworkAttachmentAggregatedList(proto.Message): + r"""Contains a list of NetworkAttachmentsScopedList. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + id (str): + [Output Only] Unique identifier for the resource; defined by + the server. + + This field is a member of `oneof`_ ``_id``. + items (MutableMapping[str, google.cloud.compute_v1.types.NetworkAttachmentsScopedList]): + A list of NetworkAttachmentsScopedList + resources. + kind (str): + + This field is a member of `oneof`_ ``_kind``. + next_page_token (str): + [Output Only] This token allows you to get the next page of + results for list requests. If the number of results is + larger than maxResults, use the nextPageToken as a value for + the query parameter pageToken in the next list request. + Subsequent list requests will have their own nextPageToken + to continue paging through the results. + + This field is a member of `oneof`_ ``_next_page_token``. + self_link (str): + [Output Only] Server-defined URL for this resource. + + This field is a member of `oneof`_ ``_self_link``. + warning (google.cloud.compute_v1.types.Warning): + [Output Only] Informational warning message. + + This field is a member of `oneof`_ ``_warning``. + """ + + @property + def raw_page(self): + return self + + id: str = proto.Field( + proto.STRING, + number=3355, + optional=True, + ) + items: MutableMapping[str, 'NetworkAttachmentsScopedList'] = proto.MapField( + proto.STRING, + proto.MESSAGE, + number=100526016, + message='NetworkAttachmentsScopedList', + ) + kind: str = proto.Field( + proto.STRING, + number=3292052, + optional=True, + ) + next_page_token: str = proto.Field( + proto.STRING, + number=79797525, + optional=True, + ) + self_link: str = proto.Field( + proto.STRING, + number=456214797, + optional=True, + ) + warning: 'Warning' = proto.Field( + proto.MESSAGE, + number=50704284, + optional=True, + message='Warning', + ) + + +class NetworkAttachmentConnectedEndpoint(proto.Message): + r"""[Output Only] A connection connected to this network attachment. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + ip_address (str): + The IPv4 address assigned to the producer + instance network interface. This value will be a + range in case of Serverless. + + This field is a member of `oneof`_ ``_ip_address``. + project_id_or_num (str): + The project id or number of the interface to + which the IP was assigned. + + This field is a member of `oneof`_ ``_project_id_or_num``. + secondary_ip_cidr_ranges (MutableSequence[str]): + Alias IP ranges from the same subnetwork. + status (str): + The status of a connected endpoint to this + network attachment. Check the Status enum for + the list of possible values. + + This field is a member of `oneof`_ ``_status``. + subnetwork (str): + The subnetwork used to assign the IP to the + producer instance network interface. + + This field is a member of `oneof`_ ``_subnetwork``. + """ + class Status(proto.Enum): + r"""The status of a connected endpoint to this network + attachment. + + Values: + UNDEFINED_STATUS (0): + A value indicating that the enum field is not + set. + ACCEPTED (246714279): + The consumer allows traffic from the producer + to reach its VPC. + CLOSED (380163436): + The consumer network attachment no longer + exists. + NEEDS_ATTENTION (344491452): + The consumer needs to take further action + before traffic can be served. + PENDING (35394935): + The consumer neither allows nor prohibits + traffic from the producer to reach its VPC. + REJECTED (174130302): + The consumer prohibits traffic from the + producer to reach its VPC. + STATUS_UNSPECIFIED (42133066): + No description available. + """ + UNDEFINED_STATUS = 0 + ACCEPTED = 246714279 + CLOSED = 380163436 + NEEDS_ATTENTION = 344491452 + PENDING = 35394935 + REJECTED = 174130302 + STATUS_UNSPECIFIED = 42133066 + + ip_address: str = proto.Field( + proto.STRING, + number=406272220, + optional=True, + ) + project_id_or_num: str = proto.Field( + proto.STRING, + number=349783336, + optional=True, + ) + secondary_ip_cidr_ranges: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=117184788, + ) + status: str = proto.Field( + proto.STRING, + number=181260274, + optional=True, + ) + subnetwork: str = proto.Field( + proto.STRING, + number=307827694, + optional=True, + ) + + +class NetworkAttachmentList(proto.Message): + r""" + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + id (str): + [Output Only] Unique identifier for the resource; defined by + the server. + + This field is a member of `oneof`_ ``_id``. + items (MutableSequence[google.cloud.compute_v1.types.NetworkAttachment]): + A list of NetworkAttachment resources. + kind (str): + + This field is a member of `oneof`_ ``_kind``. + next_page_token (str): + [Output Only] This token allows you to get the next page of + results for list requests. If the number of results is + larger than maxResults, use the nextPageToken as a value for + the query parameter pageToken in the next list request. + Subsequent list requests will have their own nextPageToken + to continue paging through the results. + + This field is a member of `oneof`_ ``_next_page_token``. + self_link (str): + [Output Only] Server-defined URL for this resource. + + This field is a member of `oneof`_ ``_self_link``. + warning (google.cloud.compute_v1.types.Warning): + [Output Only] Informational warning message. + + This field is a member of `oneof`_ ``_warning``. + """ + + @property + def raw_page(self): + return self + + id: str = proto.Field( + proto.STRING, + number=3355, + optional=True, + ) + items: MutableSequence['NetworkAttachment'] = proto.RepeatedField( + proto.MESSAGE, + number=100526016, + message='NetworkAttachment', + ) + kind: str = proto.Field( + proto.STRING, + number=3292052, + optional=True, + ) + next_page_token: str = proto.Field( + proto.STRING, + number=79797525, + optional=True, + ) + self_link: str = proto.Field( + proto.STRING, + number=456214797, + optional=True, + ) + warning: 'Warning' = proto.Field( + proto.MESSAGE, + number=50704284, + optional=True, + message='Warning', + ) + + +class NetworkAttachmentsScopedList(proto.Message): + r""" + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + network_attachments (MutableSequence[google.cloud.compute_v1.types.NetworkAttachment]): + A list of NetworkAttachments contained in + this scope. + warning (google.cloud.compute_v1.types.Warning): + Informational warning which replaces the list + of network attachments when the list is empty. + + This field is a member of `oneof`_ ``_warning``. + """ + + network_attachments: MutableSequence['NetworkAttachment'] = proto.RepeatedField( + proto.MESSAGE, + number=521514783, + message='NetworkAttachment', + ) + warning: 'Warning' = proto.Field( + proto.MESSAGE, + number=50704284, + optional=True, + message='Warning', + ) + + +class NetworkEdgeSecurityService(proto.Message): + r"""Represents a Google Cloud Armor network edge security service + resource. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + creation_timestamp (str): + [Output Only] Creation timestamp in RFC3339 text format. + + This field is a member of `oneof`_ ``_creation_timestamp``. + description (str): + An optional description of this resource. + Provide this property when you create the + resource. + + This field is a member of `oneof`_ ``_description``. + fingerprint (str): + Fingerprint of this resource. A hash of the + contents stored in this object. This field is + used in optimistic locking. This field will be + ignored when inserting a + NetworkEdgeSecurityService. An up-to-date + fingerprint must be provided in order to update + the NetworkEdgeSecurityService, otherwise the + request will fail with error 412 + conditionNotMet. To see the latest fingerprint, + make a get() request to retrieve a + NetworkEdgeSecurityService. + + This field is a member of `oneof`_ ``_fingerprint``. + id (int): + [Output Only] The unique identifier for the resource. This + identifier is defined by the server. + + This field is a member of `oneof`_ ``_id``. + kind (str): + [Output only] Type of the resource. Always + compute#networkEdgeSecurityService for + NetworkEdgeSecurityServices + + This field is a member of `oneof`_ ``_kind``. + name (str): + Name of the resource. Provided by the client when the + resource is created. The name must be 1-63 characters long, + and comply with RFC1035. Specifically, the name must be 1-63 + characters long and match the regular expression + ``[a-z]([-a-z0-9]*[a-z0-9])?`` which means the first + character must be a lowercase letter, and all following + characters must be a dash, lowercase letter, or digit, + except the last character, which cannot be a dash. + + This field is a member of `oneof`_ ``_name``. + region (str): + [Output Only] URL of the region where the resource resides. + You must specify this field as part of the HTTP request URL. + It is not settable as a field in the request body. + + This field is a member of `oneof`_ ``_region``. + security_policy (str): + The resource URL for the network edge + security service associated with this network + edge security service. + + This field is a member of `oneof`_ ``_security_policy``. + self_link (str): + [Output Only] Server-defined URL for the resource. + + This field is a member of `oneof`_ ``_self_link``. + self_link_with_id (str): + [Output Only] Server-defined URL for this resource with the + resource id. + + This field is a member of `oneof`_ ``_self_link_with_id``. + """ + + creation_timestamp: str = proto.Field( + proto.STRING, + number=30525366, + optional=True, + ) + description: str = proto.Field( + proto.STRING, + number=422937596, + optional=True, + ) + fingerprint: str = proto.Field( + proto.STRING, + number=234678500, + optional=True, + ) + id: int = proto.Field( + proto.UINT64, + number=3355, + optional=True, + ) + kind: str = proto.Field( + proto.STRING, + number=3292052, + optional=True, + ) + name: str = proto.Field( + proto.STRING, + number=3373707, + optional=True, + ) + region: str = proto.Field( + proto.STRING, + number=138946292, + optional=True, + ) + security_policy: str = proto.Field( + proto.STRING, + number=171082513, + optional=True, + ) + self_link: str = proto.Field( + proto.STRING, + number=456214797, + optional=True, + ) + self_link_with_id: str = proto.Field( + proto.STRING, + number=44520962, + optional=True, + ) + + +class NetworkEdgeSecurityServiceAggregatedList(proto.Message): + r""" + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + etag (str): + + This field is a member of `oneof`_ ``_etag``. + id (str): + [Output Only] Unique identifier for the resource; defined by + the server. + + This field is a member of `oneof`_ ``_id``. + items (MutableMapping[str, google.cloud.compute_v1.types.NetworkEdgeSecurityServicesScopedList]): + A list of + NetworkEdgeSecurityServicesScopedList resources. + kind (str): + [Output Only] Type of resource. Always + compute#networkEdgeSecurityServiceAggregatedList for lists + of Network Edge Security Services. + + This field is a member of `oneof`_ ``_kind``. + next_page_token (str): + [Output Only] This token allows you to get the next page of + results for list requests. If the number of results is + larger than maxResults, use the nextPageToken as a value for + the query parameter pageToken in the next list request. + Subsequent list requests will have their own nextPageToken + to continue paging through the results. + + This field is a member of `oneof`_ ``_next_page_token``. + self_link (str): + [Output Only] Server-defined URL for this resource. + + This field is a member of `oneof`_ ``_self_link``. + unreachables (MutableSequence[str]): + [Output Only] Unreachable resources. + warning (google.cloud.compute_v1.types.Warning): + [Output Only] Informational warning message. + + This field is a member of `oneof`_ ``_warning``. + """ + + @property + def raw_page(self): + return self + + etag: str = proto.Field( + proto.STRING, + number=3123477, + optional=True, + ) + id: str = proto.Field( + proto.STRING, + number=3355, + optional=True, + ) + items: MutableMapping[str, 'NetworkEdgeSecurityServicesScopedList'] = proto.MapField( + proto.STRING, + proto.MESSAGE, + number=100526016, + message='NetworkEdgeSecurityServicesScopedList', + ) + kind: str = proto.Field( + proto.STRING, + number=3292052, + optional=True, + ) + next_page_token: str = proto.Field( + proto.STRING, + number=79797525, + optional=True, + ) + self_link: str = proto.Field( + proto.STRING, + number=456214797, + optional=True, + ) + unreachables: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=243372063, + ) + warning: 'Warning' = proto.Field( + proto.MESSAGE, + number=50704284, + optional=True, + message='Warning', + ) + + +class NetworkEdgeSecurityServicesScopedList(proto.Message): + r""" + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + network_edge_security_services (MutableSequence[google.cloud.compute_v1.types.NetworkEdgeSecurityService]): + A list of NetworkEdgeSecurityServices + contained in this scope. + warning (google.cloud.compute_v1.types.Warning): + Informational warning which replaces the list + of security policies when the list is empty. + + This field is a member of `oneof`_ ``_warning``. + """ + + network_edge_security_services: MutableSequence['NetworkEdgeSecurityService'] = proto.RepeatedField( + proto.MESSAGE, + number=35530156, + message='NetworkEdgeSecurityService', + ) + warning: 'Warning' = proto.Field( + proto.MESSAGE, + number=50704284, + optional=True, + message='Warning', + ) + + +class NetworkEndpoint(proto.Message): + r"""The network endpoint. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + annotations (MutableMapping[str, str]): + Metadata defined as annotations on the + network endpoint. + fqdn (str): + Optional fully qualified domain name of network endpoint. + This can only be specified when + NetworkEndpointGroup.network_endpoint_type is + NON_GCP_FQDN_PORT. + + This field is a member of `oneof`_ ``_fqdn``. + instance (str): + The name for a specific VM instance that the IP address + belongs to. This is required for network endpoints of type + GCE_VM_IP_PORT. The instance must be in the same zone of + network endpoint group. The name must be 1-63 characters + long, and comply with RFC1035. + + This field is a member of `oneof`_ ``_instance``. + ip_address (str): + Optional IPv4 address of network endpoint. + The IP address must belong to a VM in Compute + Engine (either the primary IP or as part of an + aliased IP range). If the IP address is not + specified, then the primary IP address for the + VM instance in the network that the network + endpoint group belongs to will be used. + + This field is a member of `oneof`_ ``_ip_address``. + port (int): + Optional port number of network endpoint. If + not specified, the defaultPort for the network + endpoint group will be used. + + This field is a member of `oneof`_ ``_port``. + """ + + annotations: MutableMapping[str, str] = proto.MapField( + proto.STRING, + proto.STRING, + number=112032548, + ) + fqdn: str = proto.Field( + proto.STRING, + number=3150485, + optional=True, + ) + instance: str = proto.Field( + proto.STRING, + number=18257045, + optional=True, + ) + ip_address: str = proto.Field( + proto.STRING, + number=406272220, + optional=True, + ) + port: int = proto.Field( + proto.INT32, + number=3446913, + optional=True, + ) + + +class NetworkEndpointGroup(proto.Message): + r"""Represents a collection of network endpoints. A network + endpoint group (NEG) defines how a set of endpoints should be + reached, whether they are reachable, and where they are located. + For more information about using NEGs, see Setting up external + HTTP(S) Load Balancing with internet NEGs, Setting up zonal + NEGs, or Setting up external HTTP(S) Load Balancing with + serverless NEGs. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + annotations (MutableMapping[str, str]): + Metadata defined as annotations on the + network endpoint group. + app_engine (google.cloud.compute_v1.types.NetworkEndpointGroupAppEngine): + Only valid when networkEndpointType is + "SERVERLESS". Only one of cloudRun, appEngine or + cloudFunction may be set. + + This field is a member of `oneof`_ ``_app_engine``. + cloud_function (google.cloud.compute_v1.types.NetworkEndpointGroupCloudFunction): + Only valid when networkEndpointType is + "SERVERLESS". Only one of cloudRun, appEngine or + cloudFunction may be set. + + This field is a member of `oneof`_ ``_cloud_function``. + cloud_run (google.cloud.compute_v1.types.NetworkEndpointGroupCloudRun): + Only valid when networkEndpointType is + "SERVERLESS". Only one of cloudRun, appEngine or + cloudFunction may be set. + + This field is a member of `oneof`_ ``_cloud_run``. + creation_timestamp (str): + [Output Only] Creation timestamp in RFC3339 text format. + + This field is a member of `oneof`_ ``_creation_timestamp``. + default_port (int): + The default port used if the port number is + not specified in the network endpoint. + + This field is a member of `oneof`_ ``_default_port``. + description (str): + An optional description of this resource. + Provide this property when you create the + resource. + + This field is a member of `oneof`_ ``_description``. + id (int): + [Output Only] The unique identifier for the resource. This + identifier is defined by the server. + + This field is a member of `oneof`_ ``_id``. + kind (str): + [Output Only] Type of the resource. Always + compute#networkEndpointGroup for network endpoint group. + + This field is a member of `oneof`_ ``_kind``. + name (str): + Name of the resource; provided by the client when the + resource is created. The name must be 1-63 characters long, + and comply with RFC1035. Specifically, the name must be 1-63 + characters long and match the regular expression + ``[a-z]([-a-z0-9]*[a-z0-9])?`` which means the first + character must be a lowercase letter, and all following + characters must be a dash, lowercase letter, or digit, + except the last character, which cannot be a dash. + + This field is a member of `oneof`_ ``_name``. + network (str): + The URL of the network to which all network + endpoints in the NEG belong. Uses "default" + project network if unspecified. + + This field is a member of `oneof`_ ``_network``. + network_endpoint_type (str): + Type of network endpoints in this network endpoint group. + Can be one of GCE_VM_IP, GCE_VM_IP_PORT, + NON_GCP_PRIVATE_IP_PORT, INTERNET_FQDN_PORT, + INTERNET_IP_PORT, SERVERLESS, PRIVATE_SERVICE_CONNECT. Check + the NetworkEndpointType enum for the list of possible + values. + + This field is a member of `oneof`_ ``_network_endpoint_type``. + psc_data (google.cloud.compute_v1.types.NetworkEndpointGroupPscData): + + This field is a member of `oneof`_ ``_psc_data``. + psc_target_service (str): + The target service url used to set up private + service connection to a Google API or a PSC + Producer Service Attachment. An example value + is: "asia-northeast3-cloudkms.googleapis.com". + + This field is a member of `oneof`_ ``_psc_target_service``. + region (str): + [Output Only] The URL of the region where the network + endpoint group is located. + + This field is a member of `oneof`_ ``_region``. + self_link (str): + [Output Only] Server-defined URL for the resource. + + This field is a member of `oneof`_ ``_self_link``. + size (int): + [Output only] Number of network endpoints in the network + endpoint group. + + This field is a member of `oneof`_ ``_size``. + subnetwork (str): + Optional URL of the subnetwork to which all + network endpoints in the NEG belong. + + This field is a member of `oneof`_ ``_subnetwork``. + zone (str): + [Output Only] The URL of the zone where the network endpoint + group is located. + + This field is a member of `oneof`_ ``_zone``. + """ + class NetworkEndpointType(proto.Enum): + r"""Type of network endpoints in this network endpoint group. Can be one + of GCE_VM_IP, GCE_VM_IP_PORT, NON_GCP_PRIVATE_IP_PORT, + INTERNET_FQDN_PORT, INTERNET_IP_PORT, SERVERLESS, + PRIVATE_SERVICE_CONNECT. + + Values: + UNDEFINED_NETWORK_ENDPOINT_TYPE (0): + A value indicating that the enum field is not + set. + GCE_VM_IP (401880793): + The network endpoint is represented by an IP + address. + GCE_VM_IP_PORT (501838375): + The network endpoint is represented by IP + address and port pair. + INTERNET_FQDN_PORT (404154477): + The network endpoint is represented by fully + qualified domain name and port. + INTERNET_IP_PORT (477719963): + The network endpoint is represented by an + internet IP address and port. + NON_GCP_PRIVATE_IP_PORT (336447968): + The network endpoint is represented by an IP + address and port. The endpoint belongs to a VM + or pod running in a customer's on-premises. + PRIVATE_SERVICE_CONNECT (48134724): + The network endpoint is either public Google + APIs or services exposed by other GCP Project + with a Service Attachment. The connection is set + up by private service connect + SERVERLESS (270492508): + The network endpoint is handled by specified + serverless infrastructure. + """ + UNDEFINED_NETWORK_ENDPOINT_TYPE = 0 + GCE_VM_IP = 401880793 + GCE_VM_IP_PORT = 501838375 + INTERNET_FQDN_PORT = 404154477 + INTERNET_IP_PORT = 477719963 + NON_GCP_PRIVATE_IP_PORT = 336447968 + PRIVATE_SERVICE_CONNECT = 48134724 + SERVERLESS = 270492508 + + annotations: MutableMapping[str, str] = proto.MapField( + proto.STRING, + proto.STRING, + number=112032548, + ) + app_engine: 'NetworkEndpointGroupAppEngine' = proto.Field( + proto.MESSAGE, + number=340788768, + optional=True, + message='NetworkEndpointGroupAppEngine', + ) + cloud_function: 'NetworkEndpointGroupCloudFunction' = proto.Field( + proto.MESSAGE, + number=519893666, + optional=True, + message='NetworkEndpointGroupCloudFunction', + ) + cloud_run: 'NetworkEndpointGroupCloudRun' = proto.Field( + proto.MESSAGE, + number=111060353, + optional=True, + message='NetworkEndpointGroupCloudRun', + ) + creation_timestamp: str = proto.Field( + proto.STRING, + number=30525366, + optional=True, + ) + default_port: int = proto.Field( + proto.INT32, + number=423377855, + optional=True, + ) + description: str = proto.Field( + proto.STRING, + number=422937596, + optional=True, + ) + id: int = proto.Field( + proto.UINT64, + number=3355, + optional=True, + ) + kind: str = proto.Field( + proto.STRING, + number=3292052, + optional=True, + ) + name: str = proto.Field( + proto.STRING, + number=3373707, + optional=True, + ) + network: str = proto.Field( + proto.STRING, + number=232872494, + optional=True, + ) + network_endpoint_type: str = proto.Field( + proto.STRING, + number=118301523, + optional=True, + ) + psc_data: 'NetworkEndpointGroupPscData' = proto.Field( + proto.MESSAGE, + number=71937481, + optional=True, + message='NetworkEndpointGroupPscData', + ) + psc_target_service: str = proto.Field( + proto.STRING, + number=269132134, + optional=True, + ) + region: str = proto.Field( + proto.STRING, + number=138946292, + optional=True, + ) + self_link: str = proto.Field( + proto.STRING, + number=456214797, + optional=True, + ) + size: int = proto.Field( + proto.INT32, + number=3530753, + optional=True, + ) + subnetwork: str = proto.Field( + proto.STRING, + number=307827694, + optional=True, + ) + zone: str = proto.Field( + proto.STRING, + number=3744684, + optional=True, + ) + + +class NetworkEndpointGroupAggregatedList(proto.Message): + r""" + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + id (str): + [Output Only] Unique identifier for the resource; defined by + the server. + + This field is a member of `oneof`_ ``_id``. + items (MutableMapping[str, google.cloud.compute_v1.types.NetworkEndpointGroupsScopedList]): + A list of NetworkEndpointGroupsScopedList + resources. + kind (str): + [Output Only] The resource type, which is always + compute#networkEndpointGroupAggregatedList for aggregated + lists of network endpoint groups. + + This field is a member of `oneof`_ ``_kind``. + next_page_token (str): + [Output Only] This token allows you to get the next page of + results for list requests. If the number of results is + larger than maxResults, use the nextPageToken as a value for + the query parameter pageToken in the next list request. + Subsequent list requests will have their own nextPageToken + to continue paging through the results. + + This field is a member of `oneof`_ ``_next_page_token``. + self_link (str): + [Output Only] Server-defined URL for this resource. + + This field is a member of `oneof`_ ``_self_link``. + unreachables (MutableSequence[str]): + [Output Only] Unreachable resources. + warning (google.cloud.compute_v1.types.Warning): + [Output Only] Informational warning message. + + This field is a member of `oneof`_ ``_warning``. + """ + + @property + def raw_page(self): + return self + + id: str = proto.Field( + proto.STRING, + number=3355, + optional=True, + ) + items: MutableMapping[str, 'NetworkEndpointGroupsScopedList'] = proto.MapField( + proto.STRING, + proto.MESSAGE, + number=100526016, + message='NetworkEndpointGroupsScopedList', + ) + kind: str = proto.Field( + proto.STRING, + number=3292052, + optional=True, + ) + next_page_token: str = proto.Field( + proto.STRING, + number=79797525, + optional=True, + ) + self_link: str = proto.Field( + proto.STRING, + number=456214797, + optional=True, + ) + unreachables: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=243372063, + ) + warning: 'Warning' = proto.Field( + proto.MESSAGE, + number=50704284, + optional=True, + message='Warning', + ) + + +class NetworkEndpointGroupAppEngine(proto.Message): + r"""Configuration for an App Engine network endpoint group (NEG). + The service is optional, may be provided explicitly or in the + URL mask. The version is optional and can only be provided + explicitly or in the URL mask when service is present. Note: App + Engine service must be in the same project and located in the + same region as the Serverless NEG. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + service (str): + Optional serving service. The service name is + case-sensitive and must be 1-63 characters long. + Example value: "default", "my-service". + + This field is a member of `oneof`_ ``_service``. + url_mask (str): + A template to parse service and version + fields from a request URL. URL mask allows for + routing to multiple App Engine services without + having to create multiple Network Endpoint + Groups and backend services. For example, the + request URLs "foo1-dot-appname.appspot.com/v1" + and "foo1-dot-appname.appspot.com/v2" can be + backed by the same Serverless NEG with URL mask + "-dot-appname.appspot.com/". + The URL mask will parse them to { service = + "foo1", version = "v1" } and { service = "foo1", + version = "v2" } respectively. + + This field is a member of `oneof`_ ``_url_mask``. + version (str): + Optional serving version. The version name is + case-sensitive and must be 1-100 characters + long. Example value: "v1", "v2". + + This field is a member of `oneof`_ ``_version``. + """ + + service: str = proto.Field( + proto.STRING, + number=373540533, + optional=True, + ) + url_mask: str = proto.Field( + proto.STRING, + number=103352252, + optional=True, + ) + version: str = proto.Field( + proto.STRING, + number=351608024, + optional=True, + ) + + +class NetworkEndpointGroupCloudFunction(proto.Message): + r"""Configuration for a Cloud Function network endpoint group + (NEG). The function must be provided explicitly or in the URL + mask. Note: Cloud Function must be in the same project and + located in the same region as the Serverless NEG. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + function (str): + A user-defined name of the Cloud Function. + The function name is case-sensitive and must be + 1-63 characters long. Example value: "func1". + + This field is a member of `oneof`_ ``_function``. + url_mask (str): + A template to parse function field from a + request URL. URL mask allows for routing to + multiple Cloud Functions without having to + create multiple Network Endpoint Groups and + backend services. For example, request URLs " + mydomain.com/function1" and + "mydomain.com/function2" can be backed by the + same Serverless NEG with URL mask "/". + The URL mask will parse them to { function = + "function1" } and { function = "function2" } + respectively. + + This field is a member of `oneof`_ ``_url_mask``. + """ + + function: str = proto.Field( + proto.STRING, + number=307196888, + optional=True, + ) + url_mask: str = proto.Field( + proto.STRING, + number=103352252, + optional=True, + ) + + +class NetworkEndpointGroupCloudRun(proto.Message): + r"""Configuration for a Cloud Run network endpoint group (NEG). + The service must be provided explicitly or in the URL mask. The + tag is optional, may be provided explicitly or in the URL mask. + Note: Cloud Run service must be in the same project and located + in the same region as the Serverless NEG. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + service (str): + Cloud Run service is the main resource of + Cloud Run. The service must be 1-63 characters + long, and comply with RFC1035. Example value: + "run-service". + + This field is a member of `oneof`_ ``_service``. + tag (str): + Optional Cloud Run tag represents the + "named-revision" to provide additional + fine-grained traffic routing information. The + tag must be 1-63 characters long, and comply + with RFC1035. Example value: "revision-0010". + + This field is a member of `oneof`_ ``_tag``. + url_mask (str): + A template to parse and + fields from a request URL. URL mask allows for + routing to multiple Run services without having + to create multiple network endpoint groups and + backend services. For example, request URLs + "foo1.domain.com/bar1" and + "foo1.domain.com/bar2" can be backed by the same + Serverless Network Endpoint Group (NEG) with URL + mask ".domain.com/". The URL mask + will parse them to { service="bar1", tag="foo1" + } and { service="bar2", tag="foo2" } + respectively. + + This field is a member of `oneof`_ ``_url_mask``. + """ + + service: str = proto.Field( + proto.STRING, + number=373540533, + optional=True, + ) + tag: str = proto.Field( + proto.STRING, + number=114586, + optional=True, + ) + url_mask: str = proto.Field( + proto.STRING, + number=103352252, + optional=True, + ) + + +class NetworkEndpointGroupList(proto.Message): + r""" + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + id (str): + [Output Only] Unique identifier for the resource; defined by + the server. + + This field is a member of `oneof`_ ``_id``. + items (MutableSequence[google.cloud.compute_v1.types.NetworkEndpointGroup]): + A list of NetworkEndpointGroup resources. + kind (str): + [Output Only] The resource type, which is always + compute#networkEndpointGroupList for network endpoint group + lists. + + This field is a member of `oneof`_ ``_kind``. + next_page_token (str): + [Output Only] This token allows you to get the next page of + results for list requests. If the number of results is + larger than maxResults, use the nextPageToken as a value for + the query parameter pageToken in the next list request. + Subsequent list requests will have their own nextPageToken + to continue paging through the results. + + This field is a member of `oneof`_ ``_next_page_token``. + self_link (str): + [Output Only] Server-defined URL for this resource. + + This field is a member of `oneof`_ ``_self_link``. + warning (google.cloud.compute_v1.types.Warning): + [Output Only] Informational warning message. + + This field is a member of `oneof`_ ``_warning``. + """ + + @property + def raw_page(self): + return self + + id: str = proto.Field( + proto.STRING, + number=3355, + optional=True, + ) + items: MutableSequence['NetworkEndpointGroup'] = proto.RepeatedField( + proto.MESSAGE, + number=100526016, + message='NetworkEndpointGroup', + ) + kind: str = proto.Field( + proto.STRING, + number=3292052, + optional=True, + ) + next_page_token: str = proto.Field( + proto.STRING, + number=79797525, + optional=True, + ) + self_link: str = proto.Field( + proto.STRING, + number=456214797, + optional=True, + ) + warning: 'Warning' = proto.Field( + proto.MESSAGE, + number=50704284, + optional=True, + message='Warning', + ) + + +class NetworkEndpointGroupPscData(proto.Message): + r"""All data that is specifically relevant to only network endpoint + groups of type PRIVATE_SERVICE_CONNECT. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + consumer_psc_address (str): + [Output Only] Address allocated from given subnetwork for + PSC. This IP address acts as a VIP for a PSC NEG, allowing + it to act as an endpoint in L7 PSC-XLB. + + This field is a member of `oneof`_ ``_consumer_psc_address``. + psc_connection_id (int): + [Output Only] The PSC connection id of the PSC Network + Endpoint Group Consumer. + + This field is a member of `oneof`_ ``_psc_connection_id``. + psc_connection_status (str): + [Output Only] The connection status of the PSC Forwarding + Rule. Check the PscConnectionStatus enum for the list of + possible values. + + This field is a member of `oneof`_ ``_psc_connection_status``. + """ + class PscConnectionStatus(proto.Enum): + r"""[Output Only] The connection status of the PSC Forwarding Rule. + + Values: + UNDEFINED_PSC_CONNECTION_STATUS (0): + A value indicating that the enum field is not + set. + ACCEPTED (246714279): + The connection has been accepted by the + producer. + CLOSED (380163436): + The connection has been closed by the + producer and will not serve traffic going + forward. + NEEDS_ATTENTION (344491452): + The connection has been accepted by the + producer, but the producer needs to take further + action before the forwarding rule can serve + traffic. + PENDING (35394935): + The connection is pending acceptance by the + producer. + REJECTED (174130302): + The connection has been rejected by the + producer. + STATUS_UNSPECIFIED (42133066): + No description available. + """ + UNDEFINED_PSC_CONNECTION_STATUS = 0 + ACCEPTED = 246714279 + CLOSED = 380163436 + NEEDS_ATTENTION = 344491452 + PENDING = 35394935 + REJECTED = 174130302 + STATUS_UNSPECIFIED = 42133066 + + consumer_psc_address: str = proto.Field( + proto.STRING, + number=452646572, + optional=True, + ) + psc_connection_id: int = proto.Field( + proto.UINT64, + number=292082397, + optional=True, + ) + psc_connection_status: str = proto.Field( + proto.STRING, + number=184149172, + optional=True, + ) + + +class NetworkEndpointGroupsAttachEndpointsRequest(proto.Message): + r""" + + Attributes: + network_endpoints (MutableSequence[google.cloud.compute_v1.types.NetworkEndpoint]): + The list of network endpoints to be attached. + """ + + network_endpoints: MutableSequence['NetworkEndpoint'] = proto.RepeatedField( + proto.MESSAGE, + number=149850285, + message='NetworkEndpoint', + ) + + +class NetworkEndpointGroupsDetachEndpointsRequest(proto.Message): + r""" + + Attributes: + network_endpoints (MutableSequence[google.cloud.compute_v1.types.NetworkEndpoint]): + The list of network endpoints to be detached. + """ + + network_endpoints: MutableSequence['NetworkEndpoint'] = proto.RepeatedField( + proto.MESSAGE, + number=149850285, + message='NetworkEndpoint', + ) + + +class NetworkEndpointGroupsListEndpointsRequest(proto.Message): + r""" + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + health_status (str): + Optional query parameter for showing the + health status of each network endpoint. Valid + options are SKIP or SHOW. If you don't specify + this parameter, the health status of network + endpoints will not be provided. Check the + HealthStatus enum for the list of possible + values. + + This field is a member of `oneof`_ ``_health_status``. + """ + class HealthStatus(proto.Enum): + r"""Optional query parameter for showing the health status of + each network endpoint. Valid options are SKIP or SHOW. If you + don't specify this parameter, the health status of network + endpoints will not be provided. + + Values: + UNDEFINED_HEALTH_STATUS (0): + A value indicating that the enum field is not + set. + SHOW (2544381): + Show the health status for each network + endpoint. Impacts latency of the call. + SKIP (2547071): + Health status for network endpoints will not + be provided. + """ + UNDEFINED_HEALTH_STATUS = 0 + SHOW = 2544381 + SKIP = 2547071 + + health_status: str = proto.Field( + proto.STRING, + number=380545845, + optional=True, + ) + + +class NetworkEndpointGroupsListNetworkEndpoints(proto.Message): + r""" + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + id (str): + [Output Only] Unique identifier for the resource; defined by + the server. + + This field is a member of `oneof`_ ``_id``. + items (MutableSequence[google.cloud.compute_v1.types.NetworkEndpointWithHealthStatus]): + A list of NetworkEndpointWithHealthStatus + resources. + kind (str): + [Output Only] The resource type, which is always + compute#networkEndpointGroupsListNetworkEndpoints for the + list of network endpoints in the specified network endpoint + group. + + This field is a member of `oneof`_ ``_kind``. + next_page_token (str): + [Output Only] This token allows you to get the next page of + results for list requests. If the number of results is + larger than maxResults, use the nextPageToken as a value for + the query parameter pageToken in the next list request. + Subsequent list requests will have their own nextPageToken + to continue paging through the results. + + This field is a member of `oneof`_ ``_next_page_token``. + warning (google.cloud.compute_v1.types.Warning): + [Output Only] Informational warning message. + + This field is a member of `oneof`_ ``_warning``. + """ + + @property + def raw_page(self): + return self + + id: str = proto.Field( + proto.STRING, + number=3355, + optional=True, + ) + items: MutableSequence['NetworkEndpointWithHealthStatus'] = proto.RepeatedField( + proto.MESSAGE, + number=100526016, + message='NetworkEndpointWithHealthStatus', + ) + kind: str = proto.Field( + proto.STRING, + number=3292052, + optional=True, + ) + next_page_token: str = proto.Field( + proto.STRING, + number=79797525, + optional=True, + ) + warning: 'Warning' = proto.Field( + proto.MESSAGE, + number=50704284, + optional=True, + message='Warning', + ) + + +class NetworkEndpointGroupsScopedList(proto.Message): + r""" + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + network_endpoint_groups (MutableSequence[google.cloud.compute_v1.types.NetworkEndpointGroup]): + [Output Only] The list of network endpoint groups that are + contained in this scope. + warning (google.cloud.compute_v1.types.Warning): + [Output Only] An informational warning that replaces the + list of network endpoint groups when the list is empty. + + This field is a member of `oneof`_ ``_warning``. + """ + + network_endpoint_groups: MutableSequence['NetworkEndpointGroup'] = proto.RepeatedField( + proto.MESSAGE, + number=29346733, + message='NetworkEndpointGroup', + ) + warning: 'Warning' = proto.Field( + proto.MESSAGE, + number=50704284, + optional=True, + message='Warning', + ) + + +class NetworkEndpointWithHealthStatus(proto.Message): + r""" + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + healths (MutableSequence[google.cloud.compute_v1.types.HealthStatusForNetworkEndpoint]): + [Output only] The health status of network endpoint; + network_endpoint (google.cloud.compute_v1.types.NetworkEndpoint): + [Output only] The network endpoint; + + This field is a member of `oneof`_ ``_network_endpoint``. + """ + + healths: MutableSequence['HealthStatusForNetworkEndpoint'] = proto.RepeatedField( + proto.MESSAGE, + number=258689431, + message='HealthStatusForNetworkEndpoint', + ) + network_endpoint: 'NetworkEndpoint' = proto.Field( + proto.MESSAGE, + number=56789126, + optional=True, + message='NetworkEndpoint', + ) + + +class NetworkInterface(proto.Message): + r"""A network interface resource attached to an instance. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + access_configs (MutableSequence[google.cloud.compute_v1.types.AccessConfig]): + An array of configurations for this interface. Currently, + only one access config, ONE_TO_ONE_NAT, is supported. If + there are no accessConfigs specified, then this instance + will have no external internet access. + alias_ip_ranges (MutableSequence[google.cloud.compute_v1.types.AliasIpRange]): + An array of alias IP ranges for this network + interface. You can only specify this field for + network interfaces in VPC networks. + fingerprint (str): + Fingerprint hash of contents stored in this + network interface. This field will be ignored + when inserting an Instance or adding a + NetworkInterface. An up-to-date fingerprint must + be provided in order to update the + NetworkInterface. The request will fail with + error 400 Bad Request if the fingerprint is not + provided, or 412 Precondition Failed if the + fingerprint is out of date. + + This field is a member of `oneof`_ ``_fingerprint``. + internal_ipv6_prefix_length (int): + The prefix length of the primary internal + IPv6 range. + + This field is a member of `oneof`_ ``_internal_ipv6_prefix_length``. + ipv6_access_configs (MutableSequence[google.cloud.compute_v1.types.AccessConfig]): + An array of IPv6 access configurations for this interface. + Currently, only one IPv6 access config, DIRECT_IPV6, is + supported. If there is no ipv6AccessConfig specified, then + this instance will have no external IPv6 Internet access. + ipv6_access_type (str): + [Output Only] One of EXTERNAL, INTERNAL to indicate whether + the IP can be accessed from the Internet. This field is + always inherited from its subnetwork. Valid only if + stackType is IPV4_IPV6. Check the Ipv6AccessType enum for + the list of possible values. + + This field is a member of `oneof`_ ``_ipv6_access_type``. + ipv6_address (str): + An IPv6 internal network address for this + network interface. To use a static internal IP + address, it must be unused and in the same + region as the instance's zone. If not specified, + Google Cloud will automatically assign an + internal IPv6 address from the instance's + subnetwork. + + This field is a member of `oneof`_ ``_ipv6_address``. + kind (str): + [Output Only] Type of the resource. Always + compute#networkInterface for network interfaces. + + This field is a member of `oneof`_ ``_kind``. + name (str): + [Output Only] The name of the network interface, which is + generated by the server. For a VM, the network interface + uses the nicN naming format. Where N is a value between 0 + and 7. The default interface value is nic0. + + This field is a member of `oneof`_ ``_name``. + network (str): + URL of the VPC network resource for this + instance. When creating an instance, if neither + the network nor the subnetwork is specified, the + default network global/networks/default is used. + If the selected project doesn't have the default + network, you must specify a network or subnet. + If the network is not specified but the + subnetwork is specified, the network is + inferred. If you specify this property, you can + specify the network as a full or partial URL. + For example, the following are all valid URLs: - + https://www.googleapis.com/compute/v1/projects/project/global/networks/ + network - + projects/project/global/networks/network - + global/networks/default + + This field is a member of `oneof`_ ``_network``. + network_attachment (str): + The URL of the network attachment that this interface should + connect to in the following format: + projects/{project_number}/regions/{region_name}/networkAttachments/{network_attachment_name}. + + This field is a member of `oneof`_ ``_network_attachment``. + network_i_p (str): + An IPv4 internal IP address to assign to the + instance for this network interface. If not + specified by the user, an unused internal IP is + assigned by the system. + + This field is a member of `oneof`_ ``_network_i_p``. + nic_type (str): + The type of vNIC to be used on this + interface. This may be gVNIC or VirtioNet. Check + the NicType enum for the list of possible + values. + + This field is a member of `oneof`_ ``_nic_type``. + queue_count (int): + The networking queue count that's specified + by users for the network interface. Both Rx and + Tx queues will be set to this number. It'll be + empty if not specified by the users. + + This field is a member of `oneof`_ ``_queue_count``. + stack_type (str): + The stack type for this network interface. To assign only + IPv4 addresses, use IPV4_ONLY. To assign both IPv4 and IPv6 + addresses, use IPV4_IPV6. If not specified, IPV4_ONLY is + used. This field can be both set at instance creation and + update network interface operations. Check the StackType + enum for the list of possible values. + + This field is a member of `oneof`_ ``_stack_type``. + subnetwork (str): + The URL of the Subnetwork resource for this + instance. If the network resource is in legacy + mode, do not specify this field. If the network + is in auto subnet mode, specifying the + subnetwork is optional. If the network is in + custom subnet mode, specifying the subnetwork is + required. If you specify this field, you can + specify the subnetwork as a full or partial URL. + For example, the following are all valid URLs: - + https://www.googleapis.com/compute/v1/projects/project/regions/region + /subnetworks/subnetwork - + regions/region/subnetworks/subnetwork + + This field is a member of `oneof`_ ``_subnetwork``. + """ + class Ipv6AccessType(proto.Enum): + r"""[Output Only] One of EXTERNAL, INTERNAL to indicate whether the IP + can be accessed from the Internet. This field is always inherited + from its subnetwork. Valid only if stackType is IPV4_IPV6. + + Values: + UNDEFINED_IPV6_ACCESS_TYPE (0): + A value indicating that the enum field is not + set. + EXTERNAL (35607499): + This network interface can have external + IPv6. + INTERNAL (279295677): + This network interface can have internal + IPv6. + UNSPECIFIED_IPV6_ACCESS_TYPE (313080613): + No description available. + """ + UNDEFINED_IPV6_ACCESS_TYPE = 0 + EXTERNAL = 35607499 + INTERNAL = 279295677 + UNSPECIFIED_IPV6_ACCESS_TYPE = 313080613 + + class NicType(proto.Enum): + r"""The type of vNIC to be used on this interface. This may be + gVNIC or VirtioNet. + + Values: + UNDEFINED_NIC_TYPE (0): + A value indicating that the enum field is not + set. + GVNIC (68209305): + GVNIC + UNSPECIFIED_NIC_TYPE (67411801): + No type specified. + VIRTIO_NET (452123481): + VIRTIO + """ + UNDEFINED_NIC_TYPE = 0 + GVNIC = 68209305 + UNSPECIFIED_NIC_TYPE = 67411801 + VIRTIO_NET = 452123481 + + class StackType(proto.Enum): + r"""The stack type for this network interface. To assign only IPv4 + addresses, use IPV4_ONLY. To assign both IPv4 and IPv6 addresses, + use IPV4_IPV6. If not specified, IPV4_ONLY is used. This field can + be both set at instance creation and update network interface + operations. + + Values: + UNDEFINED_STACK_TYPE (0): + A value indicating that the enum field is not + set. + IPV4_IPV6 (22197249): + The network interface can have both IPv4 and + IPv6 addresses. + IPV4_ONLY (22373798): + The network interface will be assigned IPv4 + address. + UNSPECIFIED_STACK_TYPE (298084569): + No description available. + """ + UNDEFINED_STACK_TYPE = 0 + IPV4_IPV6 = 22197249 + IPV4_ONLY = 22373798 + UNSPECIFIED_STACK_TYPE = 298084569 + + access_configs: MutableSequence['AccessConfig'] = proto.RepeatedField( + proto.MESSAGE, + number=111058326, + message='AccessConfig', + ) + alias_ip_ranges: MutableSequence['AliasIpRange'] = proto.RepeatedField( + proto.MESSAGE, + number=165085631, + message='AliasIpRange', + ) + fingerprint: str = proto.Field( + proto.STRING, + number=234678500, + optional=True, + ) + internal_ipv6_prefix_length: int = proto.Field( + proto.INT32, + number=203833757, + optional=True, + ) + ipv6_access_configs: MutableSequence['AccessConfig'] = proto.RepeatedField( + proto.MESSAGE, + number=483472110, + message='AccessConfig', + ) + ipv6_access_type: str = proto.Field( + proto.STRING, + number=504658653, + optional=True, + ) + ipv6_address: str = proto.Field( + proto.STRING, + number=341563804, + optional=True, + ) + kind: str = proto.Field( + proto.STRING, + number=3292052, + optional=True, + ) + name: str = proto.Field( + proto.STRING, + number=3373707, + optional=True, + ) + network: str = proto.Field( + proto.STRING, + number=232872494, + optional=True, + ) + network_attachment: str = proto.Field( + proto.STRING, + number=224644052, + optional=True, + ) + network_i_p: str = proto.Field( + proto.STRING, + number=207181961, + optional=True, + ) + nic_type: str = proto.Field( + proto.STRING, + number=59810577, + optional=True, + ) + queue_count: int = proto.Field( + proto.INT32, + number=503708769, + optional=True, + ) + stack_type: str = proto.Field( + proto.STRING, + number=425908881, + optional=True, + ) + subnetwork: str = proto.Field( + proto.STRING, + number=307827694, + optional=True, + ) + + +class NetworkList(proto.Message): + r"""Contains a list of networks. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + id (str): + [Output Only] Unique identifier for the resource; defined by + the server. + + This field is a member of `oneof`_ ``_id``. + items (MutableSequence[google.cloud.compute_v1.types.Network]): + A list of Network resources. + kind (str): + [Output Only] Type of resource. Always compute#networkList + for lists of networks. + + This field is a member of `oneof`_ ``_kind``. + next_page_token (str): + [Output Only] This token allows you to get the next page of + results for list requests. If the number of results is + larger than maxResults, use the nextPageToken as a value for + the query parameter pageToken in the next list request. + Subsequent list requests will have their own nextPageToken + to continue paging through the results. + + This field is a member of `oneof`_ ``_next_page_token``. + self_link (str): + [Output Only] Server-defined URL for this resource. + + This field is a member of `oneof`_ ``_self_link``. + warning (google.cloud.compute_v1.types.Warning): + [Output Only] Informational warning message. + + This field is a member of `oneof`_ ``_warning``. + """ + + @property + def raw_page(self): + return self + + id: str = proto.Field( + proto.STRING, + number=3355, + optional=True, + ) + items: MutableSequence['Network'] = proto.RepeatedField( + proto.MESSAGE, + number=100526016, + message='Network', + ) + kind: str = proto.Field( + proto.STRING, + number=3292052, + optional=True, + ) + next_page_token: str = proto.Field( + proto.STRING, + number=79797525, + optional=True, + ) + self_link: str = proto.Field( + proto.STRING, + number=456214797, + optional=True, + ) + warning: 'Warning' = proto.Field( + proto.MESSAGE, + number=50704284, + optional=True, + message='Warning', + ) + + +class NetworkPeering(proto.Message): + r"""A network peering attached to a network resource. The message + includes the peering name, peer network, peering state, and a + flag indicating whether Google Compute Engine should + automatically create routes for the peering. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + auto_create_routes (bool): + This field will be deprecated soon. Use the + exchange_subnet_routes field instead. Indicates whether full + mesh connectivity is created and managed automatically + between peered networks. Currently this field should always + be true since Google Compute Engine will automatically + create and manage subnetwork routes between two networks + when peering state is ACTIVE. + + This field is a member of `oneof`_ ``_auto_create_routes``. + exchange_subnet_routes (bool): + Indicates whether full mesh connectivity is + created and managed automatically between peered + networks. Currently this field should always be + true since Google Compute Engine will + automatically create and manage subnetwork + routes between two networks when peering state + is ACTIVE. + + This field is a member of `oneof`_ ``_exchange_subnet_routes``. + export_custom_routes (bool): + Whether to export the custom routes to peer + network. The default value is false. + + This field is a member of `oneof`_ ``_export_custom_routes``. + export_subnet_routes_with_public_ip (bool): + Whether subnet routes with public IP range + are exported. The default value is true, all + subnet routes are exported. IPv4 special-use + ranges are always exported to peers and are not + controlled by this field. + + This field is a member of `oneof`_ ``_export_subnet_routes_with_public_ip``. + import_custom_routes (bool): + Whether to import the custom routes from peer + network. The default value is false. + + This field is a member of `oneof`_ ``_import_custom_routes``. + import_subnet_routes_with_public_ip (bool): + Whether subnet routes with public IP range + are imported. The default value is false. IPv4 + special-use ranges are always imported from + peers and are not controlled by this field. + + This field is a member of `oneof`_ ``_import_subnet_routes_with_public_ip``. + name (str): + Name of this peering. Provided by the client when the + peering is created. The name must comply with RFC1035. + Specifically, the name must be 1-63 characters long and + match regular expression ``[a-z]([-a-z0-9]*[a-z0-9])?``. The + first character must be a lowercase letter, and all the + following characters must be a dash, lowercase letter, or + digit, except the last character, which cannot be a dash. + + This field is a member of `oneof`_ ``_name``. + network (str): + The URL of the peer network. It can be either + full URL or partial URL. The peer network may + belong to a different project. If the partial + URL does not contain project, it is assumed that + the peer network is in the same project as the + current network. + + This field is a member of `oneof`_ ``_network``. + peer_mtu (int): + Maximum Transmission Unit in bytes. + + This field is a member of `oneof`_ ``_peer_mtu``. + stack_type (str): + Which IP version(s) of traffic and routes are allowed to be + imported or exported between peer networks. The default + value is IPV4_ONLY. Check the StackType enum for the list of + possible values. + + This field is a member of `oneof`_ ``_stack_type``. + state (str): + [Output Only] State for the peering, either ``ACTIVE`` or + ``INACTIVE``. The peering is ``ACTIVE`` when there's a + matching configuration in the peer network. Check the State + enum for the list of possible values. + + This field is a member of `oneof`_ ``_state``. + state_details (str): + [Output Only] Details about the current state of the + peering. + + This field is a member of `oneof`_ ``_state_details``. + """ + class StackType(proto.Enum): + r"""Which IP version(s) of traffic and routes are allowed to be imported + or exported between peer networks. The default value is IPV4_ONLY. + + Values: + UNDEFINED_STACK_TYPE (0): + A value indicating that the enum field is not + set. + IPV4_IPV6 (22197249): + This Peering will allow IPv4 traffic and routes to be + exchanged. Additionally if the matching peering is + IPV4_IPV6, IPv6 traffic and routes will be exchanged as + well. + IPV4_ONLY (22373798): + This Peering will only allow IPv4 traffic and routes to be + exchanged, even if the matching peering is IPV4_IPV6. + """ + UNDEFINED_STACK_TYPE = 0 + IPV4_IPV6 = 22197249 + IPV4_ONLY = 22373798 + + class State(proto.Enum): + r"""[Output Only] State for the peering, either ``ACTIVE`` or + ``INACTIVE``. The peering is ``ACTIVE`` when there's a matching + configuration in the peer network. + + Values: + UNDEFINED_STATE (0): + A value indicating that the enum field is not + set. + ACTIVE (314733318): + Matching configuration exists on the peer. + INACTIVE (270421099): + There is no matching configuration on the + peer, including the case when peer does not + exist. + """ + UNDEFINED_STATE = 0 + ACTIVE = 314733318 + INACTIVE = 270421099 + + auto_create_routes: bool = proto.Field( + proto.BOOL, + number=57454941, + optional=True, + ) + exchange_subnet_routes: bool = proto.Field( + proto.BOOL, + number=26322256, + optional=True, + ) + export_custom_routes: bool = proto.Field( + proto.BOOL, + number=60281485, + optional=True, + ) + export_subnet_routes_with_public_ip: bool = proto.Field( + proto.BOOL, + number=97940834, + optional=True, + ) + import_custom_routes: bool = proto.Field( + proto.BOOL, + number=197982398, + optional=True, + ) + import_subnet_routes_with_public_ip: bool = proto.Field( + proto.BOOL, + number=14419729, + optional=True, + ) + name: str = proto.Field( + proto.STRING, + number=3373707, + optional=True, + ) + network: str = proto.Field( + proto.STRING, + number=232872494, + optional=True, + ) + peer_mtu: int = proto.Field( + proto.INT32, + number=69584721, + optional=True, + ) + stack_type: str = proto.Field( + proto.STRING, + number=425908881, + optional=True, + ) + state: str = proto.Field( + proto.STRING, + number=109757585, + optional=True, + ) + state_details: str = proto.Field( + proto.STRING, + number=95566996, + optional=True, + ) + + +class NetworkPerformanceConfig(proto.Message): + r""" + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + total_egress_bandwidth_tier (str): + Check the TotalEgressBandwidthTier enum for + the list of possible values. + + This field is a member of `oneof`_ ``_total_egress_bandwidth_tier``. + """ + class TotalEgressBandwidthTier(proto.Enum): + r""" + + Values: + UNDEFINED_TOTAL_EGRESS_BANDWIDTH_TIER (0): + A value indicating that the enum field is not + set. + DEFAULT (115302945): + No description available. + TIER_1 (326919444): + No description available. + """ + UNDEFINED_TOTAL_EGRESS_BANDWIDTH_TIER = 0 + DEFAULT = 115302945 + TIER_1 = 326919444 + + total_egress_bandwidth_tier: str = proto.Field( + proto.STRING, + number=130109439, + optional=True, + ) + + +class NetworkRoutingConfig(proto.Message): + r"""A routing configuration attached to a network resource. The + message includes the list of routers associated with the + network, and a flag indicating the type of routing behavior to + enforce network-wide. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + routing_mode (str): + The network-wide routing mode to use. If set + to REGIONAL, this network's Cloud Routers will + only advertise routes with subnets of this + network in the same region as the router. If set + to GLOBAL, this network's Cloud Routers will + advertise routes with all subnets of this + network, across regions. Check the RoutingMode + enum for the list of possible values. + + This field is a member of `oneof`_ ``_routing_mode``. + """ + class RoutingMode(proto.Enum): + r"""The network-wide routing mode to use. If set to REGIONAL, + this network's Cloud Routers will only advertise routes with + subnets of this network in the same region as the router. If set + to GLOBAL, this network's Cloud Routers will advertise routes + with all subnets of this network, across regions. + + Values: + UNDEFINED_ROUTING_MODE (0): + A value indicating that the enum field is not + set. + GLOBAL (494663587): + No description available. + REGIONAL (92288543): + No description available. + """ + UNDEFINED_ROUTING_MODE = 0 + GLOBAL = 494663587 + REGIONAL = 92288543 + + routing_mode: str = proto.Field( + proto.STRING, + number=475143548, + optional=True, + ) + + +class NetworksAddPeeringRequest(proto.Message): + r""" + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + auto_create_routes (bool): + This field will be deprecated soon. Use + exchange_subnet_routes in network_peering instead. Indicates + whether full mesh connectivity is created and managed + automatically between peered networks. Currently this field + should always be true since Google Compute Engine will + automatically create and manage subnetwork routes between + two networks when peering state is ACTIVE. + + This field is a member of `oneof`_ ``_auto_create_routes``. + name (str): + Name of the peering, which should conform to + RFC1035. + + This field is a member of `oneof`_ ``_name``. + network_peering (google.cloud.compute_v1.types.NetworkPeering): + Network peering parameters. In order to specify route + policies for peering using import and export custom routes, + you must specify all peering related parameters (name, peer + network, exchange_subnet_routes) in the network_peering + field. The corresponding fields in NetworksAddPeeringRequest + will be deprecated soon. + + This field is a member of `oneof`_ ``_network_peering``. + peer_network (str): + URL of the peer network. It can be either + full URL or partial URL. The peer network may + belong to a different project. If the partial + URL does not contain project, it is assumed that + the peer network is in the same project as the + current network. + + This field is a member of `oneof`_ ``_peer_network``. + """ + + auto_create_routes: bool = proto.Field( + proto.BOOL, + number=57454941, + optional=True, + ) + name: str = proto.Field( + proto.STRING, + number=3373707, + optional=True, + ) + network_peering: 'NetworkPeering' = proto.Field( + proto.MESSAGE, + number=328926767, + optional=True, + message='NetworkPeering', + ) + peer_network: str = proto.Field( + proto.STRING, + number=500625489, + optional=True, + ) + + +class NetworksGetEffectiveFirewallsResponse(proto.Message): + r""" + + Attributes: + firewall_policys (MutableSequence[google.cloud.compute_v1.types.NetworksGetEffectiveFirewallsResponseEffectiveFirewallPolicy]): + Effective firewalls from firewall policy. + firewalls (MutableSequence[google.cloud.compute_v1.types.Firewall]): + Effective firewalls on the network. + """ + + firewall_policys: MutableSequence['NetworksGetEffectiveFirewallsResponseEffectiveFirewallPolicy'] = proto.RepeatedField( + proto.MESSAGE, + number=410985794, + message='NetworksGetEffectiveFirewallsResponseEffectiveFirewallPolicy', + ) + firewalls: MutableSequence['Firewall'] = proto.RepeatedField( + proto.MESSAGE, + number=272245619, + message='Firewall', + ) + + +class NetworksGetEffectiveFirewallsResponseEffectiveFirewallPolicy(proto.Message): + r""" + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + display_name (str): + [Output Only] Deprecated, please use short name instead. The + display name of the firewall policy. + + This field is a member of `oneof`_ ``_display_name``. + name (str): + [Output Only] The name of the firewall policy. + + This field is a member of `oneof`_ ``_name``. + rules (MutableSequence[google.cloud.compute_v1.types.FirewallPolicyRule]): + The rules that apply to the network. + short_name (str): + [Output Only] The short name of the firewall policy. + + This field is a member of `oneof`_ ``_short_name``. + type_ (str): + [Output Only] The type of the firewall policy. Check the + Type enum for the list of possible values. + + This field is a member of `oneof`_ ``_type``. + """ + class Type(proto.Enum): + r"""[Output Only] The type of the firewall policy. + + Values: + UNDEFINED_TYPE (0): + A value indicating that the enum field is not + set. + HIERARCHY (69902869): + No description available. + NETWORK (413984270): + No description available. + UNSPECIFIED (526786327): + No description available. + """ + UNDEFINED_TYPE = 0 + HIERARCHY = 69902869 + NETWORK = 413984270 + UNSPECIFIED = 526786327 + + display_name: str = proto.Field( + proto.STRING, + number=4473832, + optional=True, + ) + name: str = proto.Field( + proto.STRING, + number=3373707, + optional=True, + ) + rules: MutableSequence['FirewallPolicyRule'] = proto.RepeatedField( + proto.MESSAGE, + number=108873975, + message='FirewallPolicyRule', + ) + short_name: str = proto.Field( + proto.STRING, + number=492051566, + optional=True, + ) + type_: str = proto.Field( + proto.STRING, + number=3575610, + optional=True, + ) + + +class NetworksRemovePeeringRequest(proto.Message): + r""" + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + name (str): + Name of the peering, which should conform to + RFC1035. + + This field is a member of `oneof`_ ``_name``. + """ + + name: str = proto.Field( + proto.STRING, + number=3373707, + optional=True, + ) + + +class NetworksUpdatePeeringRequest(proto.Message): + r""" + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + network_peering (google.cloud.compute_v1.types.NetworkPeering): + + This field is a member of `oneof`_ ``_network_peering``. + """ + + network_peering: 'NetworkPeering' = proto.Field( + proto.MESSAGE, + number=328926767, + optional=True, + message='NetworkPeering', + ) + + +class NodeGroup(proto.Message): + r"""Represents a sole-tenant Node Group resource. A sole-tenant + node is a physical server that is dedicated to hosting VM + instances only for your specific project. Use sole-tenant nodes + to keep your instances physically separated from instances in + other projects, or to group your instances together on the same + host hardware. For more information, read Sole-tenant nodes. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + autoscaling_policy (google.cloud.compute_v1.types.NodeGroupAutoscalingPolicy): + Specifies how autoscaling should behave. + + This field is a member of `oneof`_ ``_autoscaling_policy``. + creation_timestamp (str): + [Output Only] Creation timestamp in RFC3339 text format. + + This field is a member of `oneof`_ ``_creation_timestamp``. + description (str): + An optional description of this resource. + Provide this property when you create the + resource. + + This field is a member of `oneof`_ ``_description``. + fingerprint (str): + + This field is a member of `oneof`_ ``_fingerprint``. + id (int): + [Output Only] The unique identifier for the resource. This + identifier is defined by the server. + + This field is a member of `oneof`_ ``_id``. + kind (str): + [Output Only] The type of the resource. Always + compute#nodeGroup for node group. + + This field is a member of `oneof`_ ``_kind``. + location_hint (str): + An opaque location hint used to place the Node close to + other resources. This field is for use by internal tools + that use the public API. The location hint here on the + NodeGroup overrides any location_hint present in the + NodeTemplate. + + This field is a member of `oneof`_ ``_location_hint``. + maintenance_policy (str): + Specifies how to handle instances when a node in the group + undergoes maintenance. Set to one of: DEFAULT, + RESTART_IN_PLACE, or MIGRATE_WITHIN_NODE_GROUP. The default + value is DEFAULT. For more information, see Maintenance + policies. Check the MaintenancePolicy enum for the list of + possible values. + + This field is a member of `oneof`_ ``_maintenance_policy``. + maintenance_window (google.cloud.compute_v1.types.NodeGroupMaintenanceWindow): + + This field is a member of `oneof`_ ``_maintenance_window``. + name (str): + The name of the resource, provided by the client when + initially creating the resource. The resource name must be + 1-63 characters long, and comply with RFC1035. Specifically, + the name must be 1-63 characters long and match the regular + expression ``[a-z]([-a-z0-9]*[a-z0-9])?`` which means the + first character must be a lowercase letter, and all + following characters must be a dash, lowercase letter, or + digit, except the last character, which cannot be a dash. + + This field is a member of `oneof`_ ``_name``. + node_template (str): + URL of the node template to create the node + group from. + + This field is a member of `oneof`_ ``_node_template``. + self_link (str): + [Output Only] Server-defined URL for the resource. + + This field is a member of `oneof`_ ``_self_link``. + share_settings (google.cloud.compute_v1.types.ShareSettings): + Share-settings for the node group + + This field is a member of `oneof`_ ``_share_settings``. + size (int): + [Output Only] The total number of nodes in the node group. + + This field is a member of `oneof`_ ``_size``. + status (str): + Check the Status enum for the list of + possible values. + + This field is a member of `oneof`_ ``_status``. + zone (str): + [Output Only] The name of the zone where the node group + resides, such as us-central1-a. + + This field is a member of `oneof`_ ``_zone``. + """ + class MaintenancePolicy(proto.Enum): + r"""Specifies how to handle instances when a node in the group undergoes + maintenance. Set to one of: DEFAULT, RESTART_IN_PLACE, or + MIGRATE_WITHIN_NODE_GROUP. The default value is DEFAULT. For more + information, see Maintenance policies. + + Values: + UNDEFINED_MAINTENANCE_POLICY (0): + A value indicating that the enum field is not + set. + DEFAULT (115302945): + Allow the node and corresponding instances to + retain default maintenance behavior. + MAINTENANCE_POLICY_UNSPECIFIED (72964182): + No description available. + MIGRATE_WITHIN_NODE_GROUP (153483394): + When maintenance must be done on a node, the + instances on that node will be moved to other + nodes in the group. Instances with + onHostMaintenance = MIGRATE will live migrate to + their destinations while instances with + onHostMaintenance = TERMINATE will terminate and + then restart on their destination nodes if + automaticRestart = true. + RESTART_IN_PLACE (228647325): + Instances in this group will restart on the + same node when maintenance has completed. + Instances must have onHostMaintenance = + TERMINATE, and they will only restart if + automaticRestart = true. + """ + UNDEFINED_MAINTENANCE_POLICY = 0 + DEFAULT = 115302945 + MAINTENANCE_POLICY_UNSPECIFIED = 72964182 + MIGRATE_WITHIN_NODE_GROUP = 153483394 + RESTART_IN_PLACE = 228647325 + + class Status(proto.Enum): + r""" + + Values: + UNDEFINED_STATUS (0): + A value indicating that the enum field is not + set. + CREATING (455564985): + No description available. + DELETING (528602024): + No description available. + INVALID (530283991): + No description available. + READY (77848963): + No description available. + """ + UNDEFINED_STATUS = 0 + CREATING = 455564985 + DELETING = 528602024 + INVALID = 530283991 + READY = 77848963 + + autoscaling_policy: 'NodeGroupAutoscalingPolicy' = proto.Field( + proto.MESSAGE, + number=221950041, + optional=True, + message='NodeGroupAutoscalingPolicy', + ) + creation_timestamp: str = proto.Field( + proto.STRING, + number=30525366, + optional=True, + ) + description: str = proto.Field( + proto.STRING, + number=422937596, + optional=True, + ) + fingerprint: str = proto.Field( + proto.STRING, + number=234678500, + optional=True, + ) + id: int = proto.Field( + proto.UINT64, + number=3355, + optional=True, + ) + kind: str = proto.Field( + proto.STRING, + number=3292052, + optional=True, + ) + location_hint: str = proto.Field( + proto.STRING, + number=350519505, + optional=True, + ) + maintenance_policy: str = proto.Field( + proto.STRING, + number=528327646, + optional=True, + ) + maintenance_window: 'NodeGroupMaintenanceWindow' = proto.Field( + proto.MESSAGE, + number=186374812, + optional=True, + message='NodeGroupMaintenanceWindow', + ) + name: str = proto.Field( + proto.STRING, + number=3373707, + optional=True, + ) + node_template: str = proto.Field( + proto.STRING, + number=323154455, + optional=True, + ) + self_link: str = proto.Field( + proto.STRING, + number=456214797, + optional=True, + ) + share_settings: 'ShareSettings' = proto.Field( + proto.MESSAGE, + number=266668163, + optional=True, + message='ShareSettings', + ) + size: int = proto.Field( + proto.INT32, + number=3530753, + optional=True, + ) + status: str = proto.Field( + proto.STRING, + number=181260274, + optional=True, + ) + zone: str = proto.Field( + proto.STRING, + number=3744684, + optional=True, + ) + + +class NodeGroupAggregatedList(proto.Message): + r""" + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + id (str): + [Output Only] Unique identifier for the resource; defined by + the server. + + This field is a member of `oneof`_ ``_id``. + items (MutableMapping[str, google.cloud.compute_v1.types.NodeGroupsScopedList]): + A list of NodeGroupsScopedList resources. + kind (str): + [Output Only] Type of resource.Always + compute#nodeGroupAggregatedList for aggregated lists of node + groups. + + This field is a member of `oneof`_ ``_kind``. + next_page_token (str): + [Output Only] This token allows you to get the next page of + results for list requests. If the number of results is + larger than maxResults, use the nextPageToken as a value for + the query parameter pageToken in the next list request. + Subsequent list requests will have their own nextPageToken + to continue paging through the results. + + This field is a member of `oneof`_ ``_next_page_token``. + self_link (str): + [Output Only] Server-defined URL for this resource. + + This field is a member of `oneof`_ ``_self_link``. + unreachables (MutableSequence[str]): + [Output Only] Unreachable resources. + warning (google.cloud.compute_v1.types.Warning): + [Output Only] Informational warning message. + + This field is a member of `oneof`_ ``_warning``. + """ + + @property + def raw_page(self): + return self + + id: str = proto.Field( + proto.STRING, + number=3355, + optional=True, + ) + items: MutableMapping[str, 'NodeGroupsScopedList'] = proto.MapField( + proto.STRING, + proto.MESSAGE, + number=100526016, + message='NodeGroupsScopedList', + ) + kind: str = proto.Field( + proto.STRING, + number=3292052, + optional=True, + ) + next_page_token: str = proto.Field( + proto.STRING, + number=79797525, + optional=True, + ) + self_link: str = proto.Field( + proto.STRING, + number=456214797, + optional=True, + ) + unreachables: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=243372063, + ) + warning: 'Warning' = proto.Field( + proto.MESSAGE, + number=50704284, + optional=True, + message='Warning', + ) + + +class NodeGroupAutoscalingPolicy(proto.Message): + r""" + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + max_nodes (int): + The maximum number of nodes that the group + should have. Must be set if autoscaling is + enabled. Maximum value allowed is 100. + + This field is a member of `oneof`_ ``_max_nodes``. + min_nodes (int): + The minimum number of nodes that the group + should have. + + This field is a member of `oneof`_ ``_min_nodes``. + mode (str): + The autoscaling mode. Set to one of: ON, OFF, or + ONLY_SCALE_OUT. For more information, see Autoscaler modes. + Check the Mode enum for the list of possible values. + + This field is a member of `oneof`_ ``_mode``. + """ + class Mode(proto.Enum): + r"""The autoscaling mode. Set to one of: ON, OFF, or ONLY_SCALE_OUT. For + more information, see Autoscaler modes. + + Values: + UNDEFINED_MODE (0): + A value indicating that the enum field is not + set. + MODE_UNSPECIFIED (371348091): + No description available. + OFF (78159): + Autoscaling is disabled. + ON (2527): + Autocaling is fully enabled. + ONLY_SCALE_OUT (152713670): + Autoscaling will only scale out and will not + remove nodes. + """ + UNDEFINED_MODE = 0 + MODE_UNSPECIFIED = 371348091 + OFF = 78159 + ON = 2527 + ONLY_SCALE_OUT = 152713670 + + max_nodes: int = proto.Field( + proto.INT32, + number=297762838, + optional=True, + ) + min_nodes: int = proto.Field( + proto.INT32, + number=533370500, + optional=True, + ) + mode: str = proto.Field( + proto.STRING, + number=3357091, + optional=True, + ) + + +class NodeGroupList(proto.Message): + r"""Contains a list of nodeGroups. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + id (str): + [Output Only] Unique identifier for the resource; defined by + the server. + + This field is a member of `oneof`_ ``_id``. + items (MutableSequence[google.cloud.compute_v1.types.NodeGroup]): + A list of NodeGroup resources. + kind (str): + [Output Only] Type of resource.Always compute#nodeGroupList + for lists of node groups. + + This field is a member of `oneof`_ ``_kind``. + next_page_token (str): + [Output Only] This token allows you to get the next page of + results for list requests. If the number of results is + larger than maxResults, use the nextPageToken as a value for + the query parameter pageToken in the next list request. + Subsequent list requests will have their own nextPageToken + to continue paging through the results. + + This field is a member of `oneof`_ ``_next_page_token``. + self_link (str): + [Output Only] Server-defined URL for this resource. + + This field is a member of `oneof`_ ``_self_link``. + warning (google.cloud.compute_v1.types.Warning): + [Output Only] Informational warning message. + + This field is a member of `oneof`_ ``_warning``. + """ + + @property + def raw_page(self): + return self + + id: str = proto.Field( + proto.STRING, + number=3355, + optional=True, + ) + items: MutableSequence['NodeGroup'] = proto.RepeatedField( + proto.MESSAGE, + number=100526016, + message='NodeGroup', + ) + kind: str = proto.Field( + proto.STRING, + number=3292052, + optional=True, + ) + next_page_token: str = proto.Field( + proto.STRING, + number=79797525, + optional=True, + ) + self_link: str = proto.Field( + proto.STRING, + number=456214797, + optional=True, + ) + warning: 'Warning' = proto.Field( + proto.MESSAGE, + number=50704284, + optional=True, + message='Warning', + ) + + +class NodeGroupMaintenanceWindow(proto.Message): + r"""Time window specified for daily maintenance operations. GCE's + internal maintenance will be performed within this window. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + maintenance_duration (google.cloud.compute_v1.types.Duration): + [Output only] A predetermined duration for the window, + automatically chosen to be the smallest possible in the + given scenario. + + This field is a member of `oneof`_ ``_maintenance_duration``. + start_time (str): + Start time of the window. This must be in UTC + format that resolves to one of 00:00, 04:00, + 08:00, 12:00, 16:00, or 20:00. For example, both + 13:00-5 and 08:00 are valid. + + This field is a member of `oneof`_ ``_start_time``. + """ + + maintenance_duration: 'Duration' = proto.Field( + proto.MESSAGE, + number=525291840, + optional=True, + message='Duration', + ) + start_time: str = proto.Field( + proto.STRING, + number=37467274, + optional=True, + ) + + +class NodeGroupNode(proto.Message): + r""" + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + accelerators (MutableSequence[google.cloud.compute_v1.types.AcceleratorConfig]): + Accelerators for this node. + consumed_resources (google.cloud.compute_v1.types.InstanceConsumptionInfo): + Node resources that are reserved by all + instances. + + This field is a member of `oneof`_ ``_consumed_resources``. + cpu_overcommit_type (str): + CPU overcommit. + Check the CpuOvercommitType enum for the list of + possible values. + + This field is a member of `oneof`_ ``_cpu_overcommit_type``. + disks (MutableSequence[google.cloud.compute_v1.types.LocalDisk]): + Local disk configurations. + instance_consumption_data (MutableSequence[google.cloud.compute_v1.types.InstanceConsumptionData]): + Instance data that shows consumed resources + on the node. + instances (MutableSequence[str]): + Instances scheduled on this node. + name (str): + The name of the node. + + This field is a member of `oneof`_ ``_name``. + node_type (str): + The type of this node. + + This field is a member of `oneof`_ ``_node_type``. + satisfies_pzs (bool): + [Output Only] Reserved for future use. + + This field is a member of `oneof`_ ``_satisfies_pzs``. + server_binding (google.cloud.compute_v1.types.ServerBinding): + Binding properties for the physical server. + + This field is a member of `oneof`_ ``_server_binding``. + server_id (str): + Server ID associated with this node. + + This field is a member of `oneof`_ ``_server_id``. + status (str): + Check the Status enum for the list of + possible values. + + This field is a member of `oneof`_ ``_status``. + total_resources (google.cloud.compute_v1.types.InstanceConsumptionInfo): + Total amount of available resources on the + node. + + This field is a member of `oneof`_ ``_total_resources``. + """ + class CpuOvercommitType(proto.Enum): + r"""CPU overcommit. + + Values: + UNDEFINED_CPU_OVERCOMMIT_TYPE (0): + A value indicating that the enum field is not + set. + CPU_OVERCOMMIT_TYPE_UNSPECIFIED (520665615): + No description available. + ENABLED (182130465): + No description available. + NONE (2402104): + No description available. + """ + UNDEFINED_CPU_OVERCOMMIT_TYPE = 0 + CPU_OVERCOMMIT_TYPE_UNSPECIFIED = 520665615 + ENABLED = 182130465 + NONE = 2402104 + + class Status(proto.Enum): + r""" + + Values: + UNDEFINED_STATUS (0): + A value indicating that the enum field is not + set. + CREATING (455564985): + No description available. + DELETING (528602024): + No description available. + INVALID (530283991): + No description available. + READY (77848963): + No description available. + REPAIRING (413483285): + No description available. + """ + UNDEFINED_STATUS = 0 + CREATING = 455564985 + DELETING = 528602024 + INVALID = 530283991 + READY = 77848963 + REPAIRING = 413483285 + + accelerators: MutableSequence['AcceleratorConfig'] = proto.RepeatedField( + proto.MESSAGE, + number=269577064, + message='AcceleratorConfig', + ) + consumed_resources: 'InstanceConsumptionInfo' = proto.Field( + proto.MESSAGE, + number=334527118, + optional=True, + message='InstanceConsumptionInfo', + ) + cpu_overcommit_type: str = proto.Field( + proto.STRING, + number=247727959, + optional=True, + ) + disks: MutableSequence['LocalDisk'] = proto.RepeatedField( + proto.MESSAGE, + number=95594102, + message='LocalDisk', + ) + instance_consumption_data: MutableSequence['InstanceConsumptionData'] = proto.RepeatedField( + proto.MESSAGE, + number=84715576, + message='InstanceConsumptionData', + ) + instances: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=29097598, + ) + name: str = proto.Field( + proto.STRING, + number=3373707, + optional=True, + ) + node_type: str = proto.Field( + proto.STRING, + number=465832791, + optional=True, + ) + satisfies_pzs: bool = proto.Field( + proto.BOOL, + number=480964267, + optional=True, + ) + server_binding: 'ServerBinding' = proto.Field( + proto.MESSAGE, + number=208179593, + optional=True, + message='ServerBinding', + ) + server_id: str = proto.Field( + proto.STRING, + number=339433367, + optional=True, + ) + status: str = proto.Field( + proto.STRING, + number=181260274, + optional=True, + ) + total_resources: 'InstanceConsumptionInfo' = proto.Field( + proto.MESSAGE, + number=97406698, + optional=True, + message='InstanceConsumptionInfo', + ) + + +class NodeGroupsAddNodesRequest(proto.Message): + r""" + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + additional_node_count (int): + Count of additional nodes to be added to the + node group. + + This field is a member of `oneof`_ ``_additional_node_count``. + """ + + additional_node_count: int = proto.Field( + proto.INT32, + number=134997930, + optional=True, + ) + + +class NodeGroupsDeleteNodesRequest(proto.Message): + r""" + + Attributes: + nodes (MutableSequence[str]): + Names of the nodes to delete. + """ + + nodes: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=104993457, + ) + + +class NodeGroupsListNodes(proto.Message): + r""" + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + id (str): + [Output Only] Unique identifier for the resource; defined by + the server. + + This field is a member of `oneof`_ ``_id``. + items (MutableSequence[google.cloud.compute_v1.types.NodeGroupNode]): + A list of Node resources. + kind (str): + [Output Only] The resource type, which is always + compute.nodeGroupsListNodes for the list of nodes in the + specified node group. + + This field is a member of `oneof`_ ``_kind``. + next_page_token (str): + [Output Only] This token allows you to get the next page of + results for list requests. If the number of results is + larger than maxResults, use the nextPageToken as a value for + the query parameter pageToken in the next list request. + Subsequent list requests will have their own nextPageToken + to continue paging through the results. + + This field is a member of `oneof`_ ``_next_page_token``. + self_link (str): + [Output Only] Server-defined URL for this resource. + + This field is a member of `oneof`_ ``_self_link``. + warning (google.cloud.compute_v1.types.Warning): + [Output Only] Informational warning message. + + This field is a member of `oneof`_ ``_warning``. + """ + + @property + def raw_page(self): + return self + + id: str = proto.Field( + proto.STRING, + number=3355, + optional=True, + ) + items: MutableSequence['NodeGroupNode'] = proto.RepeatedField( + proto.MESSAGE, + number=100526016, + message='NodeGroupNode', + ) + kind: str = proto.Field( + proto.STRING, + number=3292052, + optional=True, + ) + next_page_token: str = proto.Field( + proto.STRING, + number=79797525, + optional=True, + ) + self_link: str = proto.Field( + proto.STRING, + number=456214797, + optional=True, + ) + warning: 'Warning' = proto.Field( + proto.MESSAGE, + number=50704284, + optional=True, + message='Warning', + ) + + +class NodeGroupsScopedList(proto.Message): + r""" + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + node_groups (MutableSequence[google.cloud.compute_v1.types.NodeGroup]): + [Output Only] A list of node groups contained in this scope. + warning (google.cloud.compute_v1.types.Warning): + [Output Only] An informational warning that appears when the + nodeGroup list is empty. + + This field is a member of `oneof`_ ``_warning``. + """ + + node_groups: MutableSequence['NodeGroup'] = proto.RepeatedField( + proto.MESSAGE, + number=73188017, + message='NodeGroup', + ) + warning: 'Warning' = proto.Field( + proto.MESSAGE, + number=50704284, + optional=True, + message='Warning', + ) + + +class NodeGroupsSetNodeTemplateRequest(proto.Message): + r""" + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + node_template (str): + Full or partial URL of the node template + resource to be updated for this node group. + + This field is a member of `oneof`_ ``_node_template``. + """ + + node_template: str = proto.Field( + proto.STRING, + number=323154455, + optional=True, + ) + + +class NodeGroupsSimulateMaintenanceEventRequest(proto.Message): + r""" + + Attributes: + nodes (MutableSequence[str]): + Names of the nodes to go under maintenance + simulation. + """ + + nodes: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=104993457, + ) + + +class NodeTemplate(proto.Message): + r"""Represent a sole-tenant Node Template resource. You can use a + template to define properties for nodes in a node group. For + more information, read Creating node groups and instances. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + accelerators (MutableSequence[google.cloud.compute_v1.types.AcceleratorConfig]): + + cpu_overcommit_type (str): + CPU overcommit. + Check the CpuOvercommitType enum for the list of + possible values. + + This field is a member of `oneof`_ ``_cpu_overcommit_type``. + creation_timestamp (str): + [Output Only] Creation timestamp in RFC3339 text format. + + This field is a member of `oneof`_ ``_creation_timestamp``. + description (str): + An optional description of this resource. + Provide this property when you create the + resource. + + This field is a member of `oneof`_ ``_description``. + disks (MutableSequence[google.cloud.compute_v1.types.LocalDisk]): + + id (int): + [Output Only] The unique identifier for the resource. This + identifier is defined by the server. + + This field is a member of `oneof`_ ``_id``. + kind (str): + [Output Only] The type of the resource. Always + compute#nodeTemplate for node templates. + + This field is a member of `oneof`_ ``_kind``. + name (str): + The name of the resource, provided by the client when + initially creating the resource. The resource name must be + 1-63 characters long, and comply with RFC1035. Specifically, + the name must be 1-63 characters long and match the regular + expression ``[a-z]([-a-z0-9]*[a-z0-9])?`` which means the + first character must be a lowercase letter, and all + following characters must be a dash, lowercase letter, or + digit, except the last character, which cannot be a dash. + + This field is a member of `oneof`_ ``_name``. + node_affinity_labels (MutableMapping[str, str]): + Labels to use for node affinity, which will + be used in instance scheduling. + node_type (str): + The node type to use for nodes group that are + created from this template. + + This field is a member of `oneof`_ ``_node_type``. + node_type_flexibility (google.cloud.compute_v1.types.NodeTemplateNodeTypeFlexibility): + Do not use. Instead, use the node_type property. + + This field is a member of `oneof`_ ``_node_type_flexibility``. + region (str): + [Output Only] The name of the region where the node template + resides, such as us-central1. + + This field is a member of `oneof`_ ``_region``. + self_link (str): + [Output Only] Server-defined URL for the resource. + + This field is a member of `oneof`_ ``_self_link``. + server_binding (google.cloud.compute_v1.types.ServerBinding): + Sets the binding properties for the physical server. Valid + values include: - *[Default]* RESTART_NODE_ON_ANY_SERVER: + Restarts VMs on any available physical server - + RESTART_NODE_ON_MINIMAL_SERVER: Restarts VMs on the same + physical server whenever possible See Sole-tenant node + options for more information. + + This field is a member of `oneof`_ ``_server_binding``. + status (str): + [Output Only] The status of the node template. One of the + following values: CREATING, READY, and DELETING. Check the + Status enum for the list of possible values. + + This field is a member of `oneof`_ ``_status``. + status_message (str): + [Output Only] An optional, human-readable explanation of the + status. + + This field is a member of `oneof`_ ``_status_message``. + """ + class CpuOvercommitType(proto.Enum): + r"""CPU overcommit. + + Values: + UNDEFINED_CPU_OVERCOMMIT_TYPE (0): + A value indicating that the enum field is not + set. + CPU_OVERCOMMIT_TYPE_UNSPECIFIED (520665615): + No description available. + ENABLED (182130465): + No description available. + NONE (2402104): + No description available. + """ + UNDEFINED_CPU_OVERCOMMIT_TYPE = 0 + CPU_OVERCOMMIT_TYPE_UNSPECIFIED = 520665615 + ENABLED = 182130465 + NONE = 2402104 + + class Status(proto.Enum): + r"""[Output Only] The status of the node template. One of the following + values: CREATING, READY, and DELETING. + + Values: + UNDEFINED_STATUS (0): + A value indicating that the enum field is not + set. + CREATING (455564985): + Resources are being allocated. + DELETING (528602024): + The node template is currently being deleted. + INVALID (530283991): + Invalid status. + READY (77848963): + The node template is ready. + """ + UNDEFINED_STATUS = 0 + CREATING = 455564985 + DELETING = 528602024 + INVALID = 530283991 + READY = 77848963 + + accelerators: MutableSequence['AcceleratorConfig'] = proto.RepeatedField( + proto.MESSAGE, + number=269577064, + message='AcceleratorConfig', + ) + cpu_overcommit_type: str = proto.Field( + proto.STRING, + number=247727959, + optional=True, + ) + creation_timestamp: str = proto.Field( + proto.STRING, + number=30525366, + optional=True, + ) + description: str = proto.Field( + proto.STRING, + number=422937596, + optional=True, + ) + disks: MutableSequence['LocalDisk'] = proto.RepeatedField( + proto.MESSAGE, + number=95594102, + message='LocalDisk', + ) + id: int = proto.Field( + proto.UINT64, + number=3355, + optional=True, + ) + kind: str = proto.Field( + proto.STRING, + number=3292052, + optional=True, + ) + name: str = proto.Field( + proto.STRING, + number=3373707, + optional=True, + ) + node_affinity_labels: MutableMapping[str, str] = proto.MapField( + proto.STRING, + proto.STRING, + number=339007161, + ) + node_type: str = proto.Field( + proto.STRING, + number=465832791, + optional=True, + ) + node_type_flexibility: 'NodeTemplateNodeTypeFlexibility' = proto.Field( + proto.MESSAGE, + number=315257905, + optional=True, + message='NodeTemplateNodeTypeFlexibility', + ) + region: str = proto.Field( + proto.STRING, + number=138946292, + optional=True, + ) + self_link: str = proto.Field( + proto.STRING, + number=456214797, + optional=True, + ) + server_binding: 'ServerBinding' = proto.Field( + proto.MESSAGE, + number=208179593, + optional=True, + message='ServerBinding', + ) + status: str = proto.Field( + proto.STRING, + number=181260274, + optional=True, + ) + status_message: str = proto.Field( + proto.STRING, + number=297428154, + optional=True, + ) + + +class NodeTemplateAggregatedList(proto.Message): + r""" + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + id (str): + [Output Only] Unique identifier for the resource; defined by + the server. + + This field is a member of `oneof`_ ``_id``. + items (MutableMapping[str, google.cloud.compute_v1.types.NodeTemplatesScopedList]): + A list of NodeTemplatesScopedList resources. + kind (str): + [Output Only] Type of resource.Always + compute#nodeTemplateAggregatedList for aggregated lists of + node templates. + + This field is a member of `oneof`_ ``_kind``. + next_page_token (str): + [Output Only] This token allows you to get the next page of + results for list requests. If the number of results is + larger than maxResults, use the nextPageToken as a value for + the query parameter pageToken in the next list request. + Subsequent list requests will have their own nextPageToken + to continue paging through the results. + + This field is a member of `oneof`_ ``_next_page_token``. + self_link (str): + [Output Only] Server-defined URL for this resource. + + This field is a member of `oneof`_ ``_self_link``. + unreachables (MutableSequence[str]): + [Output Only] Unreachable resources. + warning (google.cloud.compute_v1.types.Warning): + [Output Only] Informational warning message. + + This field is a member of `oneof`_ ``_warning``. + """ + + @property + def raw_page(self): + return self + + id: str = proto.Field( + proto.STRING, + number=3355, + optional=True, + ) + items: MutableMapping[str, 'NodeTemplatesScopedList'] = proto.MapField( + proto.STRING, + proto.MESSAGE, + number=100526016, + message='NodeTemplatesScopedList', + ) + kind: str = proto.Field( + proto.STRING, + number=3292052, + optional=True, + ) + next_page_token: str = proto.Field( + proto.STRING, + number=79797525, + optional=True, + ) + self_link: str = proto.Field( + proto.STRING, + number=456214797, + optional=True, + ) + unreachables: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=243372063, + ) + warning: 'Warning' = proto.Field( + proto.MESSAGE, + number=50704284, + optional=True, + message='Warning', + ) + + +class NodeTemplateList(proto.Message): + r"""Contains a list of node templates. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + id (str): + [Output Only] Unique identifier for the resource; defined by + the server. + + This field is a member of `oneof`_ ``_id``. + items (MutableSequence[google.cloud.compute_v1.types.NodeTemplate]): + A list of NodeTemplate resources. + kind (str): + [Output Only] Type of resource.Always + compute#nodeTemplateList for lists of node templates. + + This field is a member of `oneof`_ ``_kind``. + next_page_token (str): + [Output Only] This token allows you to get the next page of + results for list requests. If the number of results is + larger than maxResults, use the nextPageToken as a value for + the query parameter pageToken in the next list request. + Subsequent list requests will have their own nextPageToken + to continue paging through the results. + + This field is a member of `oneof`_ ``_next_page_token``. + self_link (str): + [Output Only] Server-defined URL for this resource. + + This field is a member of `oneof`_ ``_self_link``. + warning (google.cloud.compute_v1.types.Warning): + [Output Only] Informational warning message. + + This field is a member of `oneof`_ ``_warning``. + """ + + @property + def raw_page(self): + return self + + id: str = proto.Field( + proto.STRING, + number=3355, + optional=True, + ) + items: MutableSequence['NodeTemplate'] = proto.RepeatedField( + proto.MESSAGE, + number=100526016, + message='NodeTemplate', + ) + kind: str = proto.Field( + proto.STRING, + number=3292052, + optional=True, + ) + next_page_token: str = proto.Field( + proto.STRING, + number=79797525, + optional=True, + ) + self_link: str = proto.Field( + proto.STRING, + number=456214797, + optional=True, + ) + warning: 'Warning' = proto.Field( + proto.MESSAGE, + number=50704284, + optional=True, + message='Warning', + ) + + +class NodeTemplateNodeTypeFlexibility(proto.Message): + r""" + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + cpus (str): + + This field is a member of `oneof`_ ``_cpus``. + local_ssd (str): + + This field is a member of `oneof`_ ``_local_ssd``. + memory (str): + + This field is a member of `oneof`_ ``_memory``. + """ + + cpus: str = proto.Field( + proto.STRING, + number=3060683, + optional=True, + ) + local_ssd: str = proto.Field( + proto.STRING, + number=405741360, + optional=True, + ) + memory: str = proto.Field( + proto.STRING, + number=532856065, + optional=True, + ) + + +class NodeTemplatesScopedList(proto.Message): + r""" + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + node_templates (MutableSequence[google.cloud.compute_v1.types.NodeTemplate]): + [Output Only] A list of node templates contained in this + scope. + warning (google.cloud.compute_v1.types.Warning): + [Output Only] An informational warning that appears when the + node templates list is empty. + + This field is a member of `oneof`_ ``_warning``. + """ + + node_templates: MutableSequence['NodeTemplate'] = proto.RepeatedField( + proto.MESSAGE, + number=354111804, + message='NodeTemplate', + ) + warning: 'Warning' = proto.Field( + proto.MESSAGE, + number=50704284, + optional=True, + message='Warning', + ) + + +class NodeType(proto.Message): + r"""Represent a sole-tenant Node Type resource. Each node within + a node group must have a node type. A node type specifies the + total amount of cores and memory for that node. Currently, the + only available node type is n1-node-96-624 node type that has 96 + vCPUs and 624 GB of memory, available in multiple zones. For + more information read Node types. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + cpu_platform (str): + [Output Only] The CPU platform used by this node type. + + This field is a member of `oneof`_ ``_cpu_platform``. + creation_timestamp (str): + [Output Only] Creation timestamp in RFC3339 text format. + + This field is a member of `oneof`_ ``_creation_timestamp``. + deprecated (google.cloud.compute_v1.types.DeprecationStatus): + [Output Only] The deprecation status associated with this + node type. + + This field is a member of `oneof`_ ``_deprecated``. + description (str): + [Output Only] An optional textual description of the + resource. + + This field is a member of `oneof`_ ``_description``. + guest_cpus (int): + [Output Only] The number of virtual CPUs that are available + to the node type. + + This field is a member of `oneof`_ ``_guest_cpus``. + id (int): + [Output Only] The unique identifier for the resource. This + identifier is defined by the server. + + This field is a member of `oneof`_ ``_id``. + kind (str): + [Output Only] The type of the resource. Always + compute#nodeType for node types. + + This field is a member of `oneof`_ ``_kind``. + local_ssd_gb (int): + [Output Only] Local SSD available to the node type, defined + in GB. + + This field is a member of `oneof`_ ``_local_ssd_gb``. + memory_mb (int): + [Output Only] The amount of physical memory available to the + node type, defined in MB. + + This field is a member of `oneof`_ ``_memory_mb``. + name (str): + [Output Only] Name of the resource. + + This field is a member of `oneof`_ ``_name``. + self_link (str): + [Output Only] Server-defined URL for the resource. + + This field is a member of `oneof`_ ``_self_link``. + zone (str): + [Output Only] The name of the zone where the node type + resides, such as us-central1-a. + + This field is a member of `oneof`_ ``_zone``. + """ + + cpu_platform: str = proto.Field( + proto.STRING, + number=410285354, + optional=True, + ) + creation_timestamp: str = proto.Field( + proto.STRING, + number=30525366, + optional=True, + ) + deprecated: 'DeprecationStatus' = proto.Field( + proto.MESSAGE, + number=515138995, + optional=True, + message='DeprecationStatus', + ) + description: str = proto.Field( + proto.STRING, + number=422937596, + optional=True, + ) + guest_cpus: int = proto.Field( + proto.INT32, + number=393356754, + optional=True, + ) + id: int = proto.Field( + proto.UINT64, + number=3355, + optional=True, + ) + kind: str = proto.Field( + proto.STRING, + number=3292052, + optional=True, + ) + local_ssd_gb: int = proto.Field( + proto.INT32, + number=329237578, + optional=True, + ) + memory_mb: int = proto.Field( + proto.INT32, + number=116001171, + optional=True, + ) + name: str = proto.Field( + proto.STRING, + number=3373707, + optional=True, + ) + self_link: str = proto.Field( + proto.STRING, + number=456214797, + optional=True, + ) + zone: str = proto.Field( + proto.STRING, + number=3744684, + optional=True, + ) + + +class NodeTypeAggregatedList(proto.Message): + r""" + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + id (str): + [Output Only] Unique identifier for the resource; defined by + the server. + + This field is a member of `oneof`_ ``_id``. + items (MutableMapping[str, google.cloud.compute_v1.types.NodeTypesScopedList]): + A list of NodeTypesScopedList resources. + kind (str): + [Output Only] Type of resource.Always + compute#nodeTypeAggregatedList for aggregated lists of node + types. + + This field is a member of `oneof`_ ``_kind``. + next_page_token (str): + [Output Only] This token allows you to get the next page of + results for list requests. If the number of results is + larger than maxResults, use the nextPageToken as a value for + the query parameter pageToken in the next list request. + Subsequent list requests will have their own nextPageToken + to continue paging through the results. + + This field is a member of `oneof`_ ``_next_page_token``. + self_link (str): + [Output Only] Server-defined URL for this resource. + + This field is a member of `oneof`_ ``_self_link``. + unreachables (MutableSequence[str]): + [Output Only] Unreachable resources. + warning (google.cloud.compute_v1.types.Warning): + [Output Only] Informational warning message. + + This field is a member of `oneof`_ ``_warning``. + """ + + @property + def raw_page(self): + return self + + id: str = proto.Field( + proto.STRING, + number=3355, + optional=True, + ) + items: MutableMapping[str, 'NodeTypesScopedList'] = proto.MapField( + proto.STRING, + proto.MESSAGE, + number=100526016, + message='NodeTypesScopedList', + ) + kind: str = proto.Field( + proto.STRING, + number=3292052, + optional=True, + ) + next_page_token: str = proto.Field( + proto.STRING, + number=79797525, + optional=True, + ) + self_link: str = proto.Field( + proto.STRING, + number=456214797, + optional=True, + ) + unreachables: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=243372063, + ) + warning: 'Warning' = proto.Field( + proto.MESSAGE, + number=50704284, + optional=True, + message='Warning', + ) + + +class NodeTypeList(proto.Message): + r"""Contains a list of node types. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + id (str): + [Output Only] Unique identifier for the resource; defined by + the server. + + This field is a member of `oneof`_ ``_id``. + items (MutableSequence[google.cloud.compute_v1.types.NodeType]): + A list of NodeType resources. + kind (str): + [Output Only] Type of resource.Always compute#nodeTypeList + for lists of node types. + + This field is a member of `oneof`_ ``_kind``. + next_page_token (str): + [Output Only] This token allows you to get the next page of + results for list requests. If the number of results is + larger than maxResults, use the nextPageToken as a value for + the query parameter pageToken in the next list request. + Subsequent list requests will have their own nextPageToken + to continue paging through the results. + + This field is a member of `oneof`_ ``_next_page_token``. + self_link (str): + [Output Only] Server-defined URL for this resource. + + This field is a member of `oneof`_ ``_self_link``. + warning (google.cloud.compute_v1.types.Warning): + [Output Only] Informational warning message. + + This field is a member of `oneof`_ ``_warning``. + """ + + @property + def raw_page(self): + return self + + id: str = proto.Field( + proto.STRING, + number=3355, + optional=True, + ) + items: MutableSequence['NodeType'] = proto.RepeatedField( + proto.MESSAGE, + number=100526016, + message='NodeType', + ) + kind: str = proto.Field( + proto.STRING, + number=3292052, + optional=True, + ) + next_page_token: str = proto.Field( + proto.STRING, + number=79797525, + optional=True, + ) + self_link: str = proto.Field( + proto.STRING, + number=456214797, + optional=True, + ) + warning: 'Warning' = proto.Field( + proto.MESSAGE, + number=50704284, + optional=True, + message='Warning', + ) + + +class NodeTypesScopedList(proto.Message): + r""" + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + node_types (MutableSequence[google.cloud.compute_v1.types.NodeType]): + [Output Only] A list of node types contained in this scope. + warning (google.cloud.compute_v1.types.Warning): + [Output Only] An informational warning that appears when the + node types list is empty. + + This field is a member of `oneof`_ ``_warning``. + """ + + node_types: MutableSequence['NodeType'] = proto.RepeatedField( + proto.MESSAGE, + number=482172924, + message='NodeType', + ) + warning: 'Warning' = proto.Field( + proto.MESSAGE, + number=50704284, + optional=True, + message='Warning', + ) + + +class NotificationEndpoint(proto.Message): + r"""Represents a notification endpoint. A notification endpoint + resource defines an endpoint to receive notifications when there + are status changes detected by the associated health check + service. For more information, see Health checks overview. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + creation_timestamp (str): + [Output Only] Creation timestamp in RFC3339 text format. + + This field is a member of `oneof`_ ``_creation_timestamp``. + description (str): + An optional description of this resource. + Provide this property when you create the + resource. + + This field is a member of `oneof`_ ``_description``. + grpc_settings (google.cloud.compute_v1.types.NotificationEndpointGrpcSettings): + Settings of the gRPC notification endpoint + including the endpoint URL and the retry + duration. + + This field is a member of `oneof`_ ``_grpc_settings``. + id (int): + [Output Only] A unique identifier for this resource type. + The server generates this identifier. + + This field is a member of `oneof`_ ``_id``. + kind (str): + [Output Only] Type of the resource. Always + compute#notificationEndpoint for notification endpoints. + + This field is a member of `oneof`_ ``_kind``. + name (str): + Name of the resource. Provided by the client when the + resource is created. The name must be 1-63 characters long, + and comply with RFC1035. Specifically, the name must be 1-63 + characters long and match the regular expression + ``[a-z]([-a-z0-9]*[a-z0-9])?`` which means the first + character must be a lowercase letter, and all following + characters must be a dash, lowercase letter, or digit, + except the last character, which cannot be a dash. + + This field is a member of `oneof`_ ``_name``. + region (str): + [Output Only] URL of the region where the notification + endpoint resides. This field applies only to the regional + resource. You must specify this field as part of the HTTP + request URL. It is not settable as a field in the request + body. + + This field is a member of `oneof`_ ``_region``. + self_link (str): + [Output Only] Server-defined URL for the resource. + + This field is a member of `oneof`_ ``_self_link``. + """ + + creation_timestamp: str = proto.Field( + proto.STRING, + number=30525366, + optional=True, + ) + description: str = proto.Field( + proto.STRING, + number=422937596, + optional=True, + ) + grpc_settings: 'NotificationEndpointGrpcSettings' = proto.Field( + proto.MESSAGE, + number=456139556, + optional=True, + message='NotificationEndpointGrpcSettings', + ) + id: int = proto.Field( + proto.UINT64, + number=3355, + optional=True, + ) + kind: str = proto.Field( + proto.STRING, + number=3292052, + optional=True, + ) + name: str = proto.Field( + proto.STRING, + number=3373707, + optional=True, + ) + region: str = proto.Field( + proto.STRING, + number=138946292, + optional=True, + ) + self_link: str = proto.Field( + proto.STRING, + number=456214797, + optional=True, + ) + + +class NotificationEndpointGrpcSettings(proto.Message): + r"""Represents a gRPC setting that describes one gRPC + notification endpoint and the retry duration attempting to send + notification to this endpoint. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + authority (str): + Optional. If specified, this field is used to + set the authority header by the sender of + notifications. See + https://tools.ietf.org/html/rfc7540#section-8.1.2.3 + + This field is a member of `oneof`_ ``_authority``. + endpoint (str): + Endpoint to which gRPC notifications are + sent. This must be a valid gRPCLB DNS name. + + This field is a member of `oneof`_ ``_endpoint``. + payload_name (str): + Optional. If specified, this field is used to + populate the "name" field in gRPC requests. + + This field is a member of `oneof`_ ``_payload_name``. + resend_interval (google.cloud.compute_v1.types.Duration): + Optional. This field is used to configure how + often to send a full update of all non-healthy + backends. If unspecified, full updates are not + sent. If specified, must be in the range between + 600 seconds to 3600 seconds. Nanos are + disallowed. Can only be set for regional + notification endpoints. + + This field is a member of `oneof`_ ``_resend_interval``. + retry_duration_sec (int): + How much time (in seconds) is spent + attempting notification retries until a + successful response is received. Default is 30s. + Limit is 20m (1200s). Must be a positive number. + + This field is a member of `oneof`_ ``_retry_duration_sec``. + """ + + authority: str = proto.Field( + proto.STRING, + number=401868611, + optional=True, + ) + endpoint: str = proto.Field( + proto.STRING, + number=130489749, + optional=True, + ) + payload_name: str = proto.Field( + proto.STRING, + number=300358300, + optional=True, + ) + resend_interval: 'Duration' = proto.Field( + proto.MESSAGE, + number=478288969, + optional=True, + message='Duration', + ) + retry_duration_sec: int = proto.Field( + proto.UINT32, + number=115681117, + optional=True, + ) + + +class NotificationEndpointList(proto.Message): + r""" + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + id (str): + [Output Only] Unique identifier for the resource; defined by + the server. + + This field is a member of `oneof`_ ``_id``. + items (MutableSequence[google.cloud.compute_v1.types.NotificationEndpoint]): + A list of NotificationEndpoint resources. + kind (str): + [Output Only] Type of the resource. Always + compute#notificationEndpoint for notification endpoints. + + This field is a member of `oneof`_ ``_kind``. + next_page_token (str): + [Output Only] This token allows you to get the next page of + results for list requests. If the number of results is + larger than maxResults, use the nextPageToken as a value for + the query parameter pageToken in the next list request. + Subsequent list requests will have their own nextPageToken + to continue paging through the results. + + This field is a member of `oneof`_ ``_next_page_token``. + self_link (str): + [Output Only] Server-defined URL for this resource. + + This field is a member of `oneof`_ ``_self_link``. + warning (google.cloud.compute_v1.types.Warning): + [Output Only] Informational warning message. + + This field is a member of `oneof`_ ``_warning``. + """ + + @property + def raw_page(self): + return self + + id: str = proto.Field( + proto.STRING, + number=3355, + optional=True, + ) + items: MutableSequence['NotificationEndpoint'] = proto.RepeatedField( + proto.MESSAGE, + number=100526016, + message='NotificationEndpoint', + ) + kind: str = proto.Field( + proto.STRING, + number=3292052, + optional=True, + ) + next_page_token: str = proto.Field( + proto.STRING, + number=79797525, + optional=True, + ) + self_link: str = proto.Field( + proto.STRING, + number=456214797, + optional=True, + ) + warning: 'Warning' = proto.Field( + proto.MESSAGE, + number=50704284, + optional=True, + message='Warning', + ) + + +class Operation(proto.Message): + r"""Represents an Operation resource. Google Compute Engine has three + Operation resources: \* + `Global `__ \* + `Regional `__ \* + `Zonal `__ You can + use an operation resource to manage asynchronous API requests. For + more information, read Handling API responses. Operations can be + global, regional or zonal. - For global operations, use the + ``globalOperations`` resource. - For regional operations, use the + ``regionOperations`` resource. - For zonal operations, use the + ``zonalOperations`` resource. For more information, read Global, + Regional, and Zonal Resources. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + client_operation_id (str): + [Output Only] The value of ``requestId`` if you provided it + in the request. Not present otherwise. + + This field is a member of `oneof`_ ``_client_operation_id``. + creation_timestamp (str): + [Deprecated] This field is deprecated. + + This field is a member of `oneof`_ ``_creation_timestamp``. + description (str): + [Output Only] A textual description of the operation, which + is set when the operation is created. + + This field is a member of `oneof`_ ``_description``. + end_time (str): + [Output Only] The time that this operation was completed. + This value is in RFC3339 text format. + + This field is a member of `oneof`_ ``_end_time``. + error (google.cloud.compute_v1.types.Error): + [Output Only] If errors are generated during processing of + the operation, this field will be populated. + + This field is a member of `oneof`_ ``_error``. + http_error_message (str): + [Output Only] If the operation fails, this field contains + the HTTP error message that was returned, such as + ``NOT FOUND``. + + This field is a member of `oneof`_ ``_http_error_message``. + http_error_status_code (int): + [Output Only] If the operation fails, this field contains + the HTTP error status code that was returned. For example, a + ``404`` means the resource was not found. + + This field is a member of `oneof`_ ``_http_error_status_code``. + id (int): + [Output Only] The unique identifier for the operation. This + identifier is defined by the server. + + This field is a member of `oneof`_ ``_id``. + insert_time (str): + [Output Only] The time that this operation was requested. + This value is in RFC3339 text format. + + This field is a member of `oneof`_ ``_insert_time``. + kind (str): + [Output Only] Type of the resource. Always + ``compute#operation`` for Operation resources. + + This field is a member of `oneof`_ ``_kind``. + name (str): + [Output Only] Name of the operation. + + This field is a member of `oneof`_ ``_name``. + operation_group_id (str): + [Output Only] An ID that represents a group of operations, + such as when a group of operations results from a + ``bulkInsert`` API request. + + This field is a member of `oneof`_ ``_operation_group_id``. + operation_type (str): + [Output Only] The type of operation, such as ``insert``, + ``update``, or ``delete``, and so on. + + This field is a member of `oneof`_ ``_operation_type``. + progress (int): + [Output Only] An optional progress indicator that ranges + from 0 to 100. There is no requirement that this be linear + or support any granularity of operations. This should not be + used to guess when the operation will be complete. This + number should monotonically increase as the operation + progresses. + + This field is a member of `oneof`_ ``_progress``. + region (str): + [Output Only] The URL of the region where the operation + resides. Only applicable when performing regional + operations. + + This field is a member of `oneof`_ ``_region``. + self_link (str): + [Output Only] Server-defined URL for the resource. + + This field is a member of `oneof`_ ``_self_link``. + start_time (str): + [Output Only] The time that this operation was started by + the server. This value is in RFC3339 text format. + + This field is a member of `oneof`_ ``_start_time``. + status (google.cloud.compute_v1.types.Operation.Status): + [Output Only] The status of the operation, which can be one + of the following: ``PENDING``, ``RUNNING``, or ``DONE``. + + This field is a member of `oneof`_ ``_status``. + status_message (str): + [Output Only] An optional textual description of the current + status of the operation. + + This field is a member of `oneof`_ ``_status_message``. + target_id (int): + [Output Only] The unique target ID, which identifies a + specific incarnation of the target resource. + + This field is a member of `oneof`_ ``_target_id``. + target_link (str): + [Output Only] The URL of the resource that the operation + modifies. For operations related to creating a snapshot, + this points to the persistent disk that the snapshot was + created from. + + This field is a member of `oneof`_ ``_target_link``. + user (str): + [Output Only] User who requested the operation, for example: + ``user@example.com``. + + This field is a member of `oneof`_ ``_user``. + warnings (MutableSequence[google.cloud.compute_v1.types.Warnings]): + [Output Only] If warning messages are generated during + processing of the operation, this field will be populated. + zone (str): + [Output Only] The URL of the zone where the operation + resides. Only applicable when performing per-zone + operations. + + This field is a member of `oneof`_ ``_zone``. + """ + class Status(proto.Enum): + r"""[Output Only] The status of the operation, which can be one of the + following: ``PENDING``, ``RUNNING``, or ``DONE``. + + Values: + UNDEFINED_STATUS (0): + A value indicating that the enum field is not + set. + DONE (2104194): + No description available. + PENDING (35394935): + No description available. + RUNNING (121282975): + No description available. + """ + UNDEFINED_STATUS = 0 + DONE = 2104194 + PENDING = 35394935 + RUNNING = 121282975 + + client_operation_id: str = proto.Field( + proto.STRING, + number=297240295, + optional=True, + ) + creation_timestamp: str = proto.Field( + proto.STRING, + number=30525366, + optional=True, + ) + description: str = proto.Field( + proto.STRING, + number=422937596, + optional=True, + ) + end_time: str = proto.Field( + proto.STRING, + number=114938801, + optional=True, + ) + error: 'Error' = proto.Field( + proto.MESSAGE, + number=96784904, + optional=True, + message='Error', + ) + http_error_message: str = proto.Field( + proto.STRING, + number=202521945, + optional=True, + ) + http_error_status_code: int = proto.Field( + proto.INT32, + number=312345196, + optional=True, + ) + id: int = proto.Field( + proto.UINT64, + number=3355, + optional=True, + ) + insert_time: str = proto.Field( + proto.STRING, + number=433722515, + optional=True, + ) + kind: str = proto.Field( + proto.STRING, + number=3292052, + optional=True, + ) + name: str = proto.Field( + proto.STRING, + number=3373707, + optional=True, + ) + operation_group_id: str = proto.Field( + proto.STRING, + number=40171187, + optional=True, + ) + operation_type: str = proto.Field( + proto.STRING, + number=177650450, + optional=True, + ) + progress: int = proto.Field( + proto.INT32, + number=72663597, + optional=True, + ) + region: str = proto.Field( + proto.STRING, + number=138946292, + optional=True, + ) + self_link: str = proto.Field( + proto.STRING, + number=456214797, + optional=True, + ) + start_time: str = proto.Field( + proto.STRING, + number=37467274, + optional=True, + ) + status: Status = proto.Field( + proto.ENUM, + number=181260274, + optional=True, + enum=Status, + ) + status_message: str = proto.Field( + proto.STRING, + number=297428154, + optional=True, + ) + target_id: int = proto.Field( + proto.UINT64, + number=258165385, + optional=True, + ) + target_link: str = proto.Field( + proto.STRING, + number=62671336, + optional=True, + ) + user: str = proto.Field( + proto.STRING, + number=3599307, + optional=True, + ) + warnings: MutableSequence['Warnings'] = proto.RepeatedField( + proto.MESSAGE, + number=498091095, + message='Warnings', + ) + zone: str = proto.Field( + proto.STRING, + number=3744684, + optional=True, + ) + + @property + def done(self) -> bool: + """Return True if the backing extended operation is completed, False otherwise.""" + return self.status == type(self.status).DONE + + +class OperationAggregatedList(proto.Message): + r""" + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + id (str): + [Output Only] The unique identifier for the resource. This + identifier is defined by the server. + + This field is a member of `oneof`_ ``_id``. + items (MutableMapping[str, google.cloud.compute_v1.types.OperationsScopedList]): + [Output Only] A map of scoped operation lists. + kind (str): + [Output Only] Type of resource. Always + ``compute#operationAggregatedList`` for aggregated lists of + operations. + + This field is a member of `oneof`_ ``_kind``. + next_page_token (str): + [Output Only] This token allows you to get the next page of + results for list requests. If the number of results is + larger than ``maxResults``, use the ``nextPageToken`` as a + value for the query parameter ``pageToken`` in the next list + request. Subsequent list requests will have their own + ``nextPageToken`` to continue paging through the results. + + This field is a member of `oneof`_ ``_next_page_token``. + self_link (str): + [Output Only] Server-defined URL for this resource. + + This field is a member of `oneof`_ ``_self_link``. + unreachables (MutableSequence[str]): + [Output Only] Unreachable resources. + warning (google.cloud.compute_v1.types.Warning): + [Output Only] Informational warning message. + + This field is a member of `oneof`_ ``_warning``. + """ + + @property + def raw_page(self): + return self + + id: str = proto.Field( + proto.STRING, + number=3355, + optional=True, + ) + items: MutableMapping[str, 'OperationsScopedList'] = proto.MapField( + proto.STRING, + proto.MESSAGE, + number=100526016, + message='OperationsScopedList', + ) + kind: str = proto.Field( + proto.STRING, + number=3292052, + optional=True, + ) + next_page_token: str = proto.Field( + proto.STRING, + number=79797525, + optional=True, + ) + self_link: str = proto.Field( + proto.STRING, + number=456214797, + optional=True, + ) + unreachables: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=243372063, + ) + warning: 'Warning' = proto.Field( + proto.MESSAGE, + number=50704284, + optional=True, + message='Warning', + ) + + +class OperationList(proto.Message): + r"""Contains a list of Operation resources. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + id (str): + [Output Only] The unique identifier for the resource. This + identifier is defined by the server. + + This field is a member of `oneof`_ ``_id``. + items (MutableSequence[google.cloud.compute_v1.types.Operation]): + [Output Only] A list of Operation resources. + kind (str): + [Output Only] Type of resource. Always + ``compute#operations`` for Operations resource. + + This field is a member of `oneof`_ ``_kind``. + next_page_token (str): + [Output Only] This token allows you to get the next page of + results for list requests. If the number of results is + larger than ``maxResults``, use the ``nextPageToken`` as a + value for the query parameter ``pageToken`` in the next list + request. Subsequent list requests will have their own + ``nextPageToken`` to continue paging through the results. + + This field is a member of `oneof`_ ``_next_page_token``. + self_link (str): + [Output Only] Server-defined URL for this resource. + + This field is a member of `oneof`_ ``_self_link``. + warning (google.cloud.compute_v1.types.Warning): + [Output Only] Informational warning message. + + This field is a member of `oneof`_ ``_warning``. + """ + + @property + def raw_page(self): + return self + + id: str = proto.Field( + proto.STRING, + number=3355, + optional=True, + ) + items: MutableSequence['Operation'] = proto.RepeatedField( + proto.MESSAGE, + number=100526016, + message='Operation', + ) + kind: str = proto.Field( + proto.STRING, + number=3292052, + optional=True, + ) + next_page_token: str = proto.Field( + proto.STRING, + number=79797525, + optional=True, + ) + self_link: str = proto.Field( + proto.STRING, + number=456214797, + optional=True, + ) + warning: 'Warning' = proto.Field( + proto.MESSAGE, + number=50704284, + optional=True, + message='Warning', + ) + + +class OperationsScopedList(proto.Message): + r""" + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + operations (MutableSequence[google.cloud.compute_v1.types.Operation]): + [Output Only] A list of operations contained in this scope. + warning (google.cloud.compute_v1.types.Warning): + [Output Only] Informational warning which replaces the list + of operations when the list is empty. + + This field is a member of `oneof`_ ``_warning``. + """ + + operations: MutableSequence['Operation'] = proto.RepeatedField( + proto.MESSAGE, + number=4184044, + message='Operation', + ) + warning: 'Warning' = proto.Field( + proto.MESSAGE, + number=50704284, + optional=True, + message='Warning', + ) + + +class OutlierDetection(proto.Message): + r"""Settings controlling the eviction of unhealthy hosts from the + load balancing pool for the backend service. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + base_ejection_time (google.cloud.compute_v1.types.Duration): + The base time that a backend endpoint is + ejected for. Defaults to 30000ms or 30s. After a + backend endpoint is returned back to the load + balancing pool, it can be ejected again in + another ejection analysis. Thus, the total + ejection time is equal to the base ejection time + multiplied by the number of times the backend + endpoint has been ejected. Defaults to 30000ms + or 30s. + + This field is a member of `oneof`_ ``_base_ejection_time``. + consecutive_errors (int): + Number of consecutive errors before a backend + endpoint is ejected from the load balancing + pool. When the backend endpoint is accessed over + HTTP, a 5xx return code qualifies as an error. + Defaults to 5. + + This field is a member of `oneof`_ ``_consecutive_errors``. + consecutive_gateway_failure (int): + The number of consecutive gateway failures + (502, 503, 504 status or connection errors that + are mapped to one of those status codes) before + a consecutive gateway failure ejection occurs. + Defaults to 3. + + This field is a member of `oneof`_ ``_consecutive_gateway_failure``. + enforcing_consecutive_errors (int): + The percentage chance that a backend endpoint + will be ejected when an outlier status is + detected through consecutive 5xx. This setting + can be used to disable ejection or to ramp it up + slowly. Defaults to 0. + + This field is a member of `oneof`_ ``_enforcing_consecutive_errors``. + enforcing_consecutive_gateway_failure (int): + The percentage chance that a backend endpoint + will be ejected when an outlier status is + detected through consecutive gateway failures. + This setting can be used to disable ejection or + to ramp it up slowly. Defaults to 100. + + This field is a member of `oneof`_ ``_enforcing_consecutive_gateway_failure``. + enforcing_success_rate (int): + The percentage chance that a backend endpoint + will be ejected when an outlier status is + detected through success rate statistics. This + setting can be used to disable ejection or to + ramp it up slowly. Defaults to 100. Not + supported when the backend service uses + Serverless NEG. + + This field is a member of `oneof`_ ``_enforcing_success_rate``. + interval (google.cloud.compute_v1.types.Duration): + Time interval between ejection analysis + sweeps. This can result in both new ejections + and backend endpoints being returned to service. + The interval is equal to the number of seconds + as defined in outlierDetection.interval.seconds + plus the number of nanoseconds as defined in + outlierDetection.interval.nanos. Defaults to 1 + second. + + This field is a member of `oneof`_ ``_interval``. + max_ejection_percent (int): + Maximum percentage of backend endpoints in + the load balancing pool for the backend service + that can be ejected if the ejection conditions + are met. Defaults to 50%. + + This field is a member of `oneof`_ ``_max_ejection_percent``. + success_rate_minimum_hosts (int): + The number of backend endpoints in the load + balancing pool that must have enough request + volume to detect success rate outliers. If the + number of backend endpoints is fewer than this + setting, outlier detection via success rate + statistics is not performed for any backend + endpoint in the load balancing pool. Defaults to + 5. Not supported when the backend service uses + Serverless NEG. + + This field is a member of `oneof`_ ``_success_rate_minimum_hosts``. + success_rate_request_volume (int): + The minimum number of total requests that + must be collected in one interval (as defined by + the interval duration above) to include this + backend endpoint in success rate based outlier + detection. If the volume is lower than this + setting, outlier detection via success rate + statistics is not performed for that backend + endpoint. Defaults to 100. Not supported when + the backend service uses Serverless NEG. + + This field is a member of `oneof`_ ``_success_rate_request_volume``. + success_rate_stdev_factor (int): + This factor is used to determine the ejection threshold for + success rate outlier ejection. The ejection threshold is the + difference between the mean success rate, and the product of + this factor and the standard deviation of the mean success + rate: mean - (stdev \* successRateStdevFactor). This factor + is divided by a thousand to get a double. That is, if the + desired factor is 1.9, the runtime value should be 1900. + Defaults to 1900. Not supported when the backend service + uses Serverless NEG. + + This field is a member of `oneof`_ ``_success_rate_stdev_factor``. + """ + + base_ejection_time: 'Duration' = proto.Field( + proto.MESSAGE, + number=80997255, + optional=True, + message='Duration', + ) + consecutive_errors: int = proto.Field( + proto.INT32, + number=387193248, + optional=True, + ) + consecutive_gateway_failure: int = proto.Field( + proto.INT32, + number=417504250, + optional=True, + ) + enforcing_consecutive_errors: int = proto.Field( + proto.INT32, + number=213133760, + optional=True, + ) + enforcing_consecutive_gateway_failure: int = proto.Field( + proto.INT32, + number=394440666, + optional=True, + ) + enforcing_success_rate: int = proto.Field( + proto.INT32, + number=194508732, + optional=True, + ) + interval: 'Duration' = proto.Field( + proto.MESSAGE, + number=33547461, + optional=True, + message='Duration', + ) + max_ejection_percent: int = proto.Field( + proto.INT32, + number=18436888, + optional=True, + ) + success_rate_minimum_hosts: int = proto.Field( + proto.INT32, + number=525766903, + optional=True, + ) + success_rate_request_volume: int = proto.Field( + proto.INT32, + number=281425357, + optional=True, + ) + success_rate_stdev_factor: int = proto.Field( + proto.INT32, + number=174735773, + optional=True, + ) + + +class PacketIntervals(proto.Message): + r"""Next free: 7 + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + avg_ms (int): + Average observed inter-packet interval in + milliseconds. + + This field is a member of `oneof`_ ``_avg_ms``. + duration (str): + From how long ago in the past these intervals + were observed. Check the Duration enum for the + list of possible values. + + This field is a member of `oneof`_ ``_duration``. + max_ms (int): + Maximum observed inter-packet interval in + milliseconds. + + This field is a member of `oneof`_ ``_max_ms``. + min_ms (int): + Minimum observed inter-packet interval in + milliseconds. + + This field is a member of `oneof`_ ``_min_ms``. + num_intervals (int): + Number of inter-packet intervals from which + these statistics were derived. + + This field is a member of `oneof`_ ``_num_intervals``. + type_ (str): + The type of packets for which inter-packet + intervals were computed. Check the Type enum for + the list of possible values. + + This field is a member of `oneof`_ ``_type``. + """ + class Duration(proto.Enum): + r"""From how long ago in the past these intervals were observed. + + Values: + UNDEFINED_DURATION (0): + A value indicating that the enum field is not + set. + DURATION_UNSPECIFIED (529071340): + No description available. + HOUR (2223588): + No description available. + MAX (76100): + From BfdSession object creation time. + MINUTE (126786068): + No description available. + """ + UNDEFINED_DURATION = 0 + DURATION_UNSPECIFIED = 529071340 + HOUR = 2223588 + MAX = 76100 + MINUTE = 126786068 + + class Type(proto.Enum): + r"""The type of packets for which inter-packet intervals were + computed. + + Values: + UNDEFINED_TYPE (0): + A value indicating that the enum field is not + set. + LOOPBACK (356174219): + Only applies to Echo packets. This shows the + intervals between sending and receiving the same + packet. + RECEIVE (189660867): + Intervals between received packets. + TRANSMIT (452903600): + Intervals between transmitted packets. + TYPE_UNSPECIFIED (437714322): + No description available. + """ + UNDEFINED_TYPE = 0 + LOOPBACK = 356174219 + RECEIVE = 189660867 + TRANSMIT = 452903600 + TYPE_UNSPECIFIED = 437714322 + + avg_ms: int = proto.Field( + proto.INT64, + number=204811827, + optional=True, + ) + duration: str = proto.Field( + proto.STRING, + number=155471252, + optional=True, + ) + max_ms: int = proto.Field( + proto.INT64, + number=529474145, + optional=True, + ) + min_ms: int = proto.Field( + proto.INT64, + number=536564403, + optional=True, + ) + num_intervals: int = proto.Field( + proto.INT64, + number=186329813, + optional=True, + ) + type_: str = proto.Field( + proto.STRING, + number=3575610, + optional=True, + ) + + +class PacketMirroring(proto.Message): + r"""Represents a Packet Mirroring resource. Packet Mirroring + clones the traffic of specified instances in your Virtual + Private Cloud (VPC) network and forwards it to a collector + destination, such as an instance group of an internal TCP/UDP + load balancer, for analysis or examination. For more information + about setting up Packet Mirroring, see Using Packet Mirroring. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + collector_ilb (google.cloud.compute_v1.types.PacketMirroringForwardingRuleInfo): + The Forwarding Rule resource of type + loadBalancingScheme=INTERNAL that will be used + as collector for mirrored traffic. The specified + forwarding rule must have isMirroringCollector + set to true. + + This field is a member of `oneof`_ ``_collector_ilb``. + creation_timestamp (str): + [Output Only] Creation timestamp in RFC3339 text format. + + This field is a member of `oneof`_ ``_creation_timestamp``. + description (str): + An optional description of this resource. + Provide this property when you create the + resource. + + This field is a member of `oneof`_ ``_description``. + enable (str): + Indicates whether or not this packet + mirroring takes effect. If set to FALSE, this + packet mirroring policy will not be enforced on + the network. The default is TRUE. Check the + Enable enum for the list of possible values. + + This field is a member of `oneof`_ ``_enable``. + filter (google.cloud.compute_v1.types.PacketMirroringFilter): + Filter for mirrored traffic. If unspecified, + all traffic is mirrored. + + This field is a member of `oneof`_ ``_filter``. + id (int): + [Output Only] The unique identifier for the resource. This + identifier is defined by the server. + + This field is a member of `oneof`_ ``_id``. + kind (str): + [Output Only] Type of the resource. Always + compute#packetMirroring for packet mirrorings. + + This field is a member of `oneof`_ ``_kind``. + mirrored_resources (google.cloud.compute_v1.types.PacketMirroringMirroredResourceInfo): + PacketMirroring mirroredResourceInfos. + MirroredResourceInfo specifies a set of mirrored + VM instances, subnetworks and/or tags for which + traffic from/to all VM instances will be + mirrored. + + This field is a member of `oneof`_ ``_mirrored_resources``. + name (str): + Name of the resource; provided by the client when the + resource is created. The name must be 1-63 characters long, + and comply with RFC1035. Specifically, the name must be 1-63 + characters long and match the regular expression + ``[a-z]([-a-z0-9]*[a-z0-9])?`` which means the first + character must be a lowercase letter, and all following + characters must be a dash, lowercase letter, or digit, + except the last character, which cannot be a dash. + + This field is a member of `oneof`_ ``_name``. + network (google.cloud.compute_v1.types.PacketMirroringNetworkInfo): + Specifies the mirrored VPC network. Only + packets in this network will be mirrored. All + mirrored VMs should have a NIC in the given + network. All mirrored subnetworks should belong + to the given network. + + This field is a member of `oneof`_ ``_network``. + priority (int): + The priority of applying this configuration. + Priority is used to break ties in cases where + there is more than one matching rule. In the + case of two rules that apply for a given + Instance, the one with the lowest-numbered + priority value wins. Default value is 1000. + Valid range is 0 through 65535. + + This field is a member of `oneof`_ ``_priority``. + region (str): + [Output Only] URI of the region where the packetMirroring + resides. + + This field is a member of `oneof`_ ``_region``. + self_link (str): + [Output Only] Server-defined URL for the resource. + + This field is a member of `oneof`_ ``_self_link``. + """ + class Enable(proto.Enum): + r"""Indicates whether or not this packet mirroring takes effect. + If set to FALSE, this packet mirroring policy will not be + enforced on the network. The default is TRUE. + + Values: + UNDEFINED_ENABLE (0): + A value indicating that the enum field is not + set. + FALSE (66658563): + No description available. + TRUE (2583950): + No description available. + """ + UNDEFINED_ENABLE = 0 + FALSE = 66658563 + TRUE = 2583950 + + collector_ilb: 'PacketMirroringForwardingRuleInfo' = proto.Field( + proto.MESSAGE, + number=426607853, + optional=True, + message='PacketMirroringForwardingRuleInfo', + ) + creation_timestamp: str = proto.Field( + proto.STRING, + number=30525366, + optional=True, + ) + description: str = proto.Field( + proto.STRING, + number=422937596, + optional=True, + ) + enable: str = proto.Field( + proto.STRING, + number=311764355, + optional=True, + ) + filter: 'PacketMirroringFilter' = proto.Field( + proto.MESSAGE, + number=336120696, + optional=True, + message='PacketMirroringFilter', + ) + id: int = proto.Field( + proto.UINT64, + number=3355, + optional=True, + ) + kind: str = proto.Field( + proto.STRING, + number=3292052, + optional=True, + ) + mirrored_resources: 'PacketMirroringMirroredResourceInfo' = proto.Field( + proto.MESSAGE, + number=124817348, + optional=True, + message='PacketMirroringMirroredResourceInfo', + ) + name: str = proto.Field( + proto.STRING, + number=3373707, + optional=True, + ) + network: 'PacketMirroringNetworkInfo' = proto.Field( + proto.MESSAGE, + number=232872494, + optional=True, + message='PacketMirroringNetworkInfo', + ) + priority: int = proto.Field( + proto.UINT32, + number=445151652, + optional=True, + ) + region: str = proto.Field( + proto.STRING, + number=138946292, + optional=True, + ) + self_link: str = proto.Field( + proto.STRING, + number=456214797, + optional=True, + ) + + +class PacketMirroringAggregatedList(proto.Message): + r"""Contains a list of packetMirrorings. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + id (str): + [Output Only] Unique identifier for the resource; defined by + the server. + + This field is a member of `oneof`_ ``_id``. + items (MutableMapping[str, google.cloud.compute_v1.types.PacketMirroringsScopedList]): + A list of PacketMirroring resources. + kind (str): + Type of resource. + + This field is a member of `oneof`_ ``_kind``. + next_page_token (str): + [Output Only] This token allows you to get the next page of + results for list requests. If the number of results is + larger than maxResults, use the nextPageToken as a value for + the query parameter pageToken in the next list request. + Subsequent list requests will have their own nextPageToken + to continue paging through the results. + + This field is a member of `oneof`_ ``_next_page_token``. + self_link (str): + [Output Only] Server-defined URL for this resource. + + This field is a member of `oneof`_ ``_self_link``. + unreachables (MutableSequence[str]): + [Output Only] Unreachable resources. + warning (google.cloud.compute_v1.types.Warning): + [Output Only] Informational warning message. + + This field is a member of `oneof`_ ``_warning``. + """ + + @property + def raw_page(self): + return self + + id: str = proto.Field( + proto.STRING, + number=3355, + optional=True, + ) + items: MutableMapping[str, 'PacketMirroringsScopedList'] = proto.MapField( + proto.STRING, + proto.MESSAGE, + number=100526016, + message='PacketMirroringsScopedList', + ) + kind: str = proto.Field( + proto.STRING, + number=3292052, + optional=True, + ) + next_page_token: str = proto.Field( + proto.STRING, + number=79797525, + optional=True, + ) + self_link: str = proto.Field( + proto.STRING, + number=456214797, + optional=True, + ) + unreachables: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=243372063, + ) + warning: 'Warning' = proto.Field( + proto.MESSAGE, + number=50704284, + optional=True, + message='Warning', + ) + + +class PacketMirroringFilter(proto.Message): + r""" + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + I_p_protocols (MutableSequence[str]): + Protocols that apply as filter on mirrored + traffic. If no protocols are specified, all + traffic that matches the specified CIDR ranges + is mirrored. If neither cidrRanges nor + IPProtocols is specified, all traffic is + mirrored. + cidr_ranges (MutableSequence[str]): + IP CIDR ranges that apply as filter on the + source (ingress) or destination (egress) IP in + the IP header. Only IPv4 is supported. If no + ranges are specified, all traffic that matches + the specified IPProtocols is mirrored. If + neither cidrRanges nor IPProtocols is specified, + all traffic is mirrored. + direction (str): + Direction of traffic to mirror, either + INGRESS, EGRESS, or BOTH. The default is BOTH. + Check the Direction enum for the list of + possible values. + + This field is a member of `oneof`_ ``_direction``. + """ + class Direction(proto.Enum): + r"""Direction of traffic to mirror, either INGRESS, EGRESS, or + BOTH. The default is BOTH. + + Values: + UNDEFINED_DIRECTION (0): + A value indicating that the enum field is not + set. + BOTH (2044801): + Default, both directions are mirrored. + EGRESS (432880501): + Only egress traffic is mirrored. + INGRESS (516931221): + Only ingress traffic is mirrored. + """ + UNDEFINED_DIRECTION = 0 + BOTH = 2044801 + EGRESS = 432880501 + INGRESS = 516931221 + + I_p_protocols: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=98544854, + ) + cidr_ranges: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=487901697, + ) + direction: str = proto.Field( + proto.STRING, + number=111150975, + optional=True, + ) + + +class PacketMirroringForwardingRuleInfo(proto.Message): + r""" + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + canonical_url (str): + [Output Only] Unique identifier for the forwarding rule; + defined by the server. + + This field is a member of `oneof`_ ``_canonical_url``. + url (str): + Resource URL to the forwarding rule + representing the ILB configured as destination + of the mirrored traffic. + + This field is a member of `oneof`_ ``_url``. + """ + + canonical_url: str = proto.Field( + proto.STRING, + number=512294820, + optional=True, + ) + url: str = proto.Field( + proto.STRING, + number=116079, + optional=True, + ) + + +class PacketMirroringList(proto.Message): + r"""Contains a list of PacketMirroring resources. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + id (str): + [Output Only] Unique identifier for the resource; defined by + the server. + + This field is a member of `oneof`_ ``_id``. + items (MutableSequence[google.cloud.compute_v1.types.PacketMirroring]): + A list of PacketMirroring resources. + kind (str): + [Output Only] Type of resource. Always + compute#packetMirroring for packetMirrorings. + + This field is a member of `oneof`_ ``_kind``. + next_page_token (str): + [Output Only] This token allows you to get the next page of + results for list requests. If the number of results is + larger than maxResults, use the nextPageToken as a value for + the query parameter pageToken in the next list request. + Subsequent list requests will have their own nextPageToken + to continue paging through the results. + + This field is a member of `oneof`_ ``_next_page_token``. + self_link (str): + [Output Only] Server-defined URL for this resource. + + This field is a member of `oneof`_ ``_self_link``. + warning (google.cloud.compute_v1.types.Warning): + [Output Only] Informational warning message. + + This field is a member of `oneof`_ ``_warning``. + """ + + @property + def raw_page(self): + return self + + id: str = proto.Field( + proto.STRING, + number=3355, + optional=True, + ) + items: MutableSequence['PacketMirroring'] = proto.RepeatedField( + proto.MESSAGE, + number=100526016, + message='PacketMirroring', + ) + kind: str = proto.Field( + proto.STRING, + number=3292052, + optional=True, + ) + next_page_token: str = proto.Field( + proto.STRING, + number=79797525, + optional=True, + ) + self_link: str = proto.Field( + proto.STRING, + number=456214797, + optional=True, + ) + warning: 'Warning' = proto.Field( + proto.MESSAGE, + number=50704284, + optional=True, + message='Warning', + ) + + +class PacketMirroringMirroredResourceInfo(proto.Message): + r""" + + Attributes: + instances (MutableSequence[google.cloud.compute_v1.types.PacketMirroringMirroredResourceInfoInstanceInfo]): + A set of virtual machine instances that are + being mirrored. They must live in zones + contained in the same region as this + packetMirroring. Note that this config will + apply only to those network interfaces of the + Instances that belong to the network specified + in this packetMirroring. You may specify a + maximum of 50 Instances. + subnetworks (MutableSequence[google.cloud.compute_v1.types.PacketMirroringMirroredResourceInfoSubnetInfo]): + A set of subnetworks for which traffic + from/to all VM instances will be mirrored. They + must live in the same region as this + packetMirroring. You may specify a maximum of 5 + subnetworks. + tags (MutableSequence[str]): + A set of mirrored tags. Traffic from/to all + VM instances that have one or more of these tags + will be mirrored. + """ + + instances: MutableSequence['PacketMirroringMirroredResourceInfoInstanceInfo'] = proto.RepeatedField( + proto.MESSAGE, + number=29097598, + message='PacketMirroringMirroredResourceInfoInstanceInfo', + ) + subnetworks: MutableSequence['PacketMirroringMirroredResourceInfoSubnetInfo'] = proto.RepeatedField( + proto.MESSAGE, + number=415853125, + message='PacketMirroringMirroredResourceInfoSubnetInfo', + ) + tags: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=3552281, + ) + + +class PacketMirroringMirroredResourceInfoInstanceInfo(proto.Message): + r""" + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + canonical_url (str): + [Output Only] Unique identifier for the instance; defined by + the server. + + This field is a member of `oneof`_ ``_canonical_url``. + url (str): + Resource URL to the virtual machine instance + which is being mirrored. + + This field is a member of `oneof`_ ``_url``. + """ + + canonical_url: str = proto.Field( + proto.STRING, + number=512294820, + optional=True, + ) + url: str = proto.Field( + proto.STRING, + number=116079, + optional=True, + ) + + +class PacketMirroringMirroredResourceInfoSubnetInfo(proto.Message): + r""" + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + canonical_url (str): + [Output Only] Unique identifier for the subnetwork; defined + by the server. + + This field is a member of `oneof`_ ``_canonical_url``. + url (str): + Resource URL to the subnetwork for which + traffic from/to all VM instances will be + mirrored. + + This field is a member of `oneof`_ ``_url``. + """ + + canonical_url: str = proto.Field( + proto.STRING, + number=512294820, + optional=True, + ) + url: str = proto.Field( + proto.STRING, + number=116079, + optional=True, + ) + + +class PacketMirroringNetworkInfo(proto.Message): + r""" + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + canonical_url (str): + [Output Only] Unique identifier for the network; defined by + the server. + + This field is a member of `oneof`_ ``_canonical_url``. + url (str): + URL of the network resource. + + This field is a member of `oneof`_ ``_url``. + """ + + canonical_url: str = proto.Field( + proto.STRING, + number=512294820, + optional=True, + ) + url: str = proto.Field( + proto.STRING, + number=116079, + optional=True, + ) + + +class PacketMirroringsScopedList(proto.Message): + r""" + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + packet_mirrorings (MutableSequence[google.cloud.compute_v1.types.PacketMirroring]): + A list of packetMirrorings contained in this + scope. + warning (google.cloud.compute_v1.types.Warning): + Informational warning which replaces the list + of packetMirrorings when the list is empty. + + This field is a member of `oneof`_ ``_warning``. + """ + + packet_mirrorings: MutableSequence['PacketMirroring'] = proto.RepeatedField( + proto.MESSAGE, + number=154615079, + message='PacketMirroring', + ) + warning: 'Warning' = proto.Field( + proto.MESSAGE, + number=50704284, + optional=True, + message='Warning', + ) + + +class PatchAutoscalerRequest(proto.Message): + r"""A request message for Autoscalers.Patch. See the method + description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + autoscaler (str): + Name of the autoscaler to patch. + + This field is a member of `oneof`_ ``_autoscaler``. + autoscaler_resource (google.cloud.compute_v1.types.Autoscaler): + The body resource for this request + project (str): + Project ID for this request. + request_id (str): + An optional request ID to identify requests. + Specify a unique request ID so that if you must + retry your request, the server will know to + ignore the request if it has already been + completed. For example, consider a situation + where you make an initial request and the + request times out. If you make the request again + with the same request ID, the server can check + if original operation with the same request ID + was received, and if so, will ignore the second + request. This prevents clients from accidentally + creating duplicate commitments. The request ID + must be a valid UUID with the exception that + zero UUID is not supported ( + 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. + zone (str): + Name of the zone for this request. + """ + + autoscaler: str = proto.Field( + proto.STRING, + number=517258967, + optional=True, + ) + autoscaler_resource: 'Autoscaler' = proto.Field( + proto.MESSAGE, + number=207616118, + message='Autoscaler', + ) + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + request_id: str = proto.Field( + proto.STRING, + number=37109963, + optional=True, + ) + zone: str = proto.Field( + proto.STRING, + number=3744684, + ) + + +class PatchBackendBucketRequest(proto.Message): + r"""A request message for BackendBuckets.Patch. See the method + description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + backend_bucket (str): + Name of the BackendBucket resource to patch. + backend_bucket_resource (google.cloud.compute_v1.types.BackendBucket): + The body resource for this request + project (str): + Project ID for this request. + request_id (str): + An optional request ID to identify requests. + Specify a unique request ID so that if you must + retry your request, the server will know to + ignore the request if it has already been + completed. For example, consider a situation + where you make an initial request and the + request times out. If you make the request again + with the same request ID, the server can check + if original operation with the same request ID + was received, and if so, will ignore the second + request. This prevents clients from accidentally + creating duplicate commitments. The request ID + must be a valid UUID with the exception that + zero UUID is not supported ( + 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. + """ + + backend_bucket: str = proto.Field( + proto.STRING, + number=91714037, + ) + backend_bucket_resource: 'BackendBucket' = proto.Field( + proto.MESSAGE, + number=380757784, + message='BackendBucket', + ) + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + request_id: str = proto.Field( + proto.STRING, + number=37109963, + optional=True, + ) + + +class PatchBackendServiceRequest(proto.Message): + r"""A request message for BackendServices.Patch. See the method + description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + backend_service (str): + Name of the BackendService resource to patch. + backend_service_resource (google.cloud.compute_v1.types.BackendService): + The body resource for this request + project (str): + Project ID for this request. + request_id (str): + An optional request ID to identify requests. + Specify a unique request ID so that if you must + retry your request, the server will know to + ignore the request if it has already been + completed. For example, consider a situation + where you make an initial request and the + request times out. If you make the request again + with the same request ID, the server can check + if original operation with the same request ID + was received, and if so, will ignore the second + request. This prevents clients from accidentally + creating duplicate commitments. The request ID + must be a valid UUID with the exception that + zero UUID is not supported ( + 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. + """ + + backend_service: str = proto.Field( + proto.STRING, + number=306946058, + ) + backend_service_resource: 'BackendService' = proto.Field( + proto.MESSAGE, + number=347586723, + message='BackendService', + ) + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + request_id: str = proto.Field( + proto.STRING, + number=37109963, + optional=True, + ) + + +class PatchFirewallPolicyRequest(proto.Message): + r"""A request message for FirewallPolicies.Patch. See the method + description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + firewall_policy (str): + Name of the firewall policy to update. + firewall_policy_resource (google.cloud.compute_v1.types.FirewallPolicy): + The body resource for this request + request_id (str): + An optional request ID to identify requests. + Specify a unique request ID so that if you must + retry your request, the server will know to + ignore the request if it has already been + completed. For example, consider a situation + where you make an initial request and the + request times out. If you make the request again + with the same request ID, the server can check + if original operation with the same request ID + was received, and if so, will ignore the second + request. This prevents clients from accidentally + creating duplicate commitments. The request ID + must be a valid UUID with the exception that + zero UUID is not supported ( + 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. + """ + + firewall_policy: str = proto.Field( + proto.STRING, + number=498173265, + ) + firewall_policy_resource: 'FirewallPolicy' = proto.Field( + proto.MESSAGE, + number=495049532, + message='FirewallPolicy', + ) + request_id: str = proto.Field( + proto.STRING, + number=37109963, + optional=True, + ) + + +class PatchFirewallRequest(proto.Message): + r"""A request message for Firewalls.Patch. See the method + description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + firewall (str): + Name of the firewall rule to patch. + firewall_resource (google.cloud.compute_v1.types.Firewall): + The body resource for this request + project (str): + Project ID for this request. + request_id (str): + An optional request ID to identify requests. + Specify a unique request ID so that if you must + retry your request, the server will know to + ignore the request if it has already been + completed. For example, consider a situation + where you make an initial request and the + request times out. If you make the request again + with the same request ID, the server can check + if original operation with the same request ID + was received, and if so, will ignore the second + request. This prevents clients from accidentally + creating duplicate commitments. The request ID + must be a valid UUID with the exception that + zero UUID is not supported ( + 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. + """ + + firewall: str = proto.Field( + proto.STRING, + number=511016192, + ) + firewall_resource: 'Firewall' = proto.Field( + proto.MESSAGE, + number=41425005, + message='Firewall', + ) + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + request_id: str = proto.Field( + proto.STRING, + number=37109963, + optional=True, + ) + + +class PatchForwardingRuleRequest(proto.Message): + r"""A request message for ForwardingRules.Patch. See the method + description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + forwarding_rule (str): + Name of the ForwardingRule resource to patch. + forwarding_rule_resource (google.cloud.compute_v1.types.ForwardingRule): + The body resource for this request + project (str): + Project ID for this request. + region (str): + Name of the region scoping this request. + request_id (str): + An optional request ID to identify requests. + Specify a unique request ID so that if you must + retry your request, the server will know to + ignore the request if it has already been + completed. For example, consider a situation + where you make an initial request and the + request times out. If you make the request again + with the same request ID, the server can check + if original operation with the same request ID + was received, and if so, will ignore the second + request. This prevents clients from accidentally + creating duplicate commitments. The request ID + must be a valid UUID with the exception that + zero UUID is not supported ( + 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. + """ + + forwarding_rule: str = proto.Field( + proto.STRING, + number=269964030, + ) + forwarding_rule_resource: 'ForwardingRule' = proto.Field( + proto.MESSAGE, + number=301211695, + message='ForwardingRule', + ) + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + region: str = proto.Field( + proto.STRING, + number=138946292, + ) + request_id: str = proto.Field( + proto.STRING, + number=37109963, + optional=True, + ) + + +class PatchGlobalForwardingRuleRequest(proto.Message): + r"""A request message for GlobalForwardingRules.Patch. See the + method description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + forwarding_rule (str): + Name of the ForwardingRule resource to patch. + forwarding_rule_resource (google.cloud.compute_v1.types.ForwardingRule): + The body resource for this request + project (str): + Project ID for this request. + request_id (str): + An optional request ID to identify requests. + Specify a unique request ID so that if you must + retry your request, the server will know to + ignore the request if it has already been + completed. For example, consider a situation + where you make an initial request and the + request times out. If you make the request again + with the same request ID, the server can check + if original operation with the same request ID + was received, and if so, will ignore the second + request. This prevents clients from accidentally + creating duplicate commitments. The request ID + must be a valid UUID with the exception that + zero UUID is not supported ( + 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. + """ + + forwarding_rule: str = proto.Field( + proto.STRING, + number=269964030, + ) + forwarding_rule_resource: 'ForwardingRule' = proto.Field( + proto.MESSAGE, + number=301211695, + message='ForwardingRule', + ) + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + request_id: str = proto.Field( + proto.STRING, + number=37109963, + optional=True, + ) + + +class PatchGlobalPublicDelegatedPrefixeRequest(proto.Message): + r"""A request message for GlobalPublicDelegatedPrefixes.Patch. + See the method description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + project (str): + Project ID for this request. + public_delegated_prefix (str): + Name of the PublicDelegatedPrefix resource to + patch. + public_delegated_prefix_resource (google.cloud.compute_v1.types.PublicDelegatedPrefix): + The body resource for this request + request_id (str): + An optional request ID to identify requests. + Specify a unique request ID so that if you must + retry your request, the server will know to + ignore the request if it has already been + completed. For example, consider a situation + where you make an initial request and the + request times out. If you make the request again + with the same request ID, the server can check + if original operation with the same request ID + was received, and if so, will ignore the second + request. This prevents clients from accidentally + creating duplicate commitments. The request ID + must be a valid UUID with the exception that + zero UUID is not supported ( + 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. + """ + + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + public_delegated_prefix: str = proto.Field( + proto.STRING, + number=204238440, + ) + public_delegated_prefix_resource: 'PublicDelegatedPrefix' = proto.Field( + proto.MESSAGE, + number=47594501, + message='PublicDelegatedPrefix', + ) + request_id: str = proto.Field( + proto.STRING, + number=37109963, + optional=True, + ) + + +class PatchHealthCheckRequest(proto.Message): + r"""A request message for HealthChecks.Patch. See the method + description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + health_check (str): + Name of the HealthCheck resource to patch. + health_check_resource (google.cloud.compute_v1.types.HealthCheck): + The body resource for this request + project (str): + Project ID for this request. + request_id (str): + An optional request ID to identify requests. + Specify a unique request ID so that if you must + retry your request, the server will know to + ignore the request if it has already been + completed. For example, consider a situation + where you make an initial request and the + request times out. If you make the request again + with the same request ID, the server can check + if original operation with the same request ID + was received, and if so, will ignore the second + request. This prevents clients from accidentally + creating duplicate commitments. The request ID + must be a valid UUID with the exception that + zero UUID is not supported ( + 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. + """ + + health_check: str = proto.Field( + proto.STRING, + number=308876645, + ) + health_check_resource: 'HealthCheck' = proto.Field( + proto.MESSAGE, + number=201925032, + message='HealthCheck', + ) + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + request_id: str = proto.Field( + proto.STRING, + number=37109963, + optional=True, + ) + + +class PatchImageRequest(proto.Message): + r"""A request message for Images.Patch. See the method + description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + image (str): + Name of the image resource to patch. + image_resource (google.cloud.compute_v1.types.Image): + The body resource for this request + project (str): + Project ID for this request. + request_id (str): + An optional request ID to identify requests. + Specify a unique request ID so that if you must + retry your request, the server will know to + ignore the request if it has already been + completed. For example, consider a situation + where you make an initial request and the + request times out. If you make the request again + with the same request ID, the server can check + if original operation with the same request ID + was received, and if so, will ignore the second + request. This prevents clients from accidentally + creating duplicate commitments. The request ID + must be a valid UUID with the exception that + zero UUID is not supported ( + 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. + """ + + image: str = proto.Field( + proto.STRING, + number=100313435, + ) + image_resource: 'Image' = proto.Field( + proto.MESSAGE, + number=371171954, + message='Image', + ) + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + request_id: str = proto.Field( + proto.STRING, + number=37109963, + optional=True, + ) + + +class PatchInstanceGroupManagerRequest(proto.Message): + r"""A request message for InstanceGroupManagers.Patch. See the + method description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + instance_group_manager (str): + The name of the instance group manager. + instance_group_manager_resource (google.cloud.compute_v1.types.InstanceGroupManager): + The body resource for this request + project (str): + Project ID for this request. + request_id (str): + An optional request ID to identify requests. + Specify a unique request ID so that if you must + retry your request, the server will know to + ignore the request if it has already been + completed. For example, consider a situation + where you make an initial request and the + request times out. If you make the request again + with the same request ID, the server can check + if original operation with the same request ID + was received, and if so, will ignore the second + request. This prevents clients from accidentally + creating duplicate commitments. The request ID + must be a valid UUID with the exception that + zero UUID is not supported ( + 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. + zone (str): + The name of the zone where you want to create + the managed instance group. + """ + + instance_group_manager: str = proto.Field( + proto.STRING, + number=249363395, + ) + instance_group_manager_resource: 'InstanceGroupManager' = proto.Field( + proto.MESSAGE, + number=261063946, + message='InstanceGroupManager', + ) + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + request_id: str = proto.Field( + proto.STRING, + number=37109963, + optional=True, + ) + zone: str = proto.Field( + proto.STRING, + number=3744684, + ) + + +class PatchInterconnectAttachmentRequest(proto.Message): + r"""A request message for InterconnectAttachments.Patch. See the + method description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + interconnect_attachment (str): + Name of the interconnect attachment to patch. + interconnect_attachment_resource (google.cloud.compute_v1.types.InterconnectAttachment): + The body resource for this request + project (str): + Project ID for this request. + region (str): + Name of the region scoping this request. + request_id (str): + An optional request ID to identify requests. + Specify a unique request ID so that if you must + retry your request, the server will know to + ignore the request if it has already been + completed. For example, consider a situation + where you make an initial request and the + request times out. If you make the request again + with the same request ID, the server can check + if original operation with the same request ID + was received, and if so, will ignore the second + request. This prevents clients from accidentally + creating duplicate commitments. The request ID + must be a valid UUID with the exception that + zero UUID is not supported ( + 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. + """ + + interconnect_attachment: str = proto.Field( + proto.STRING, + number=308135284, + ) + interconnect_attachment_resource: 'InterconnectAttachment' = proto.Field( + proto.MESSAGE, + number=212341369, + message='InterconnectAttachment', + ) + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + region: str = proto.Field( + proto.STRING, + number=138946292, + ) + request_id: str = proto.Field( + proto.STRING, + number=37109963, + optional=True, + ) + + +class PatchInterconnectRequest(proto.Message): + r"""A request message for Interconnects.Patch. See the method + description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + interconnect (str): + Name of the interconnect to update. + interconnect_resource (google.cloud.compute_v1.types.Interconnect): + The body resource for this request + project (str): + Project ID for this request. + request_id (str): + An optional request ID to identify requests. + Specify a unique request ID so that if you must + retry your request, the server will know to + ignore the request if it has already been + completed. For example, consider a situation + where you make an initial request and the + request times out. If you make the request again + with the same request ID, the server can check + if original operation with the same request ID + was received, and if so, will ignore the second + request. This prevents clients from accidentally + creating duplicate commitments. The request ID + must be a valid UUID with the exception that + zero UUID is not supported ( + 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. + """ + + interconnect: str = proto.Field( + proto.STRING, + number=224601230, + ) + interconnect_resource: 'Interconnect' = proto.Field( + proto.MESSAGE, + number=397611167, + message='Interconnect', + ) + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + request_id: str = proto.Field( + proto.STRING, + number=37109963, + optional=True, + ) + + +class PatchNetworkEdgeSecurityServiceRequest(proto.Message): + r"""A request message for NetworkEdgeSecurityServices.Patch. See + the method description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + network_edge_security_service (str): + Name of the network edge security service to + update. + network_edge_security_service_resource (google.cloud.compute_v1.types.NetworkEdgeSecurityService): + The body resource for this request + paths (str): + + This field is a member of `oneof`_ ``_paths``. + project (str): + Project ID for this request. + region (str): + Name of the region scoping this request. + request_id (str): + An optional request ID to identify requests. + Specify a unique request ID so that if you must + retry your request, the server will know to + ignore the request if it has already been + completed. For example, consider a situation + where you make an initial request and the + request times out. If you make the request again + with the same request ID, the server can check + if original operation with the same request ID + was received, and if so, will ignore the second + request. This prevents clients from accidentally + creating duplicate commitments. The request ID + must be a valid UUID with the exception that + zero UUID is not supported ( + 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. + update_mask (str): + Indicates fields to be updated as part of + this request. + + This field is a member of `oneof`_ ``_update_mask``. + """ + + network_edge_security_service: str = proto.Field( + proto.STRING, + number=157011879, + ) + network_edge_security_service_resource: 'NetworkEdgeSecurityService' = proto.Field( + proto.MESSAGE, + number=477548966, + message='NetworkEdgeSecurityService', + ) + paths: str = proto.Field( + proto.STRING, + number=106438894, + optional=True, + ) + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + region: str = proto.Field( + proto.STRING, + number=138946292, + ) + request_id: str = proto.Field( + proto.STRING, + number=37109963, + optional=True, + ) + update_mask: str = proto.Field( + proto.STRING, + number=500079778, + optional=True, + ) + + +class PatchNetworkFirewallPolicyRequest(proto.Message): + r"""A request message for NetworkFirewallPolicies.Patch. See the + method description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + firewall_policy (str): + Name of the firewall policy to update. + firewall_policy_resource (google.cloud.compute_v1.types.FirewallPolicy): + The body resource for this request + project (str): + Project ID for this request. + request_id (str): + An optional request ID to identify requests. + Specify a unique request ID so that if you must + retry your request, the server will know to + ignore the request if it has already been + completed. For example, consider a situation + where you make an initial request and the + request times out. If you make the request again + with the same request ID, the server can check + if original operation with the same request ID + was received, and if so, will ignore the second + request. This prevents clients from accidentally + creating duplicate commitments. The request ID + must be a valid UUID with the exception that + zero UUID is not supported ( + 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. + """ + + firewall_policy: str = proto.Field( + proto.STRING, + number=498173265, + ) + firewall_policy_resource: 'FirewallPolicy' = proto.Field( + proto.MESSAGE, + number=495049532, + message='FirewallPolicy', + ) + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + request_id: str = proto.Field( + proto.STRING, + number=37109963, + optional=True, + ) + + +class PatchNetworkRequest(proto.Message): + r"""A request message for Networks.Patch. See the method + description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + network (str): + Name of the network to update. + network_resource (google.cloud.compute_v1.types.Network): + The body resource for this request + project (str): + Project ID for this request. + request_id (str): + An optional request ID to identify requests. + Specify a unique request ID so that if you must + retry your request, the server will know to + ignore the request if it has already been + completed. For example, consider a situation + where you make an initial request and the + request times out. If you make the request again + with the same request ID, the server can check + if original operation with the same request ID + was received, and if so, will ignore the second + request. This prevents clients from accidentally + creating duplicate commitments. The request ID + must be a valid UUID with the exception that + zero UUID is not supported ( + 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. + """ + + network: str = proto.Field( + proto.STRING, + number=232872494, + ) + network_resource: 'Network' = proto.Field( + proto.MESSAGE, + number=122105599, + message='Network', + ) + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + request_id: str = proto.Field( + proto.STRING, + number=37109963, + optional=True, + ) + + +class PatchNodeGroupRequest(proto.Message): + r"""A request message for NodeGroups.Patch. See the method + description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + node_group (str): + Name of the NodeGroup resource to update. + node_group_resource (google.cloud.compute_v1.types.NodeGroup): + The body resource for this request + project (str): + Project ID for this request. + request_id (str): + An optional request ID to identify requests. + Specify a unique request ID so that if you must + retry your request, the server will know to + ignore the request if it has already been + completed. For example, consider a situation + where you make an initial request and the + request times out. If you make the request again + with the same request ID, the server can check + if original operation with the same request ID + was received, and if so, will ignore the second + request. This prevents clients from accidentally + creating duplicate commitments. The request ID + must be a valid UUID with the exception that + zero UUID is not supported ( + 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. + zone (str): + The name of the zone for this request. + """ + + node_group: str = proto.Field( + proto.STRING, + number=469958146, + ) + node_group_resource: 'NodeGroup' = proto.Field( + proto.MESSAGE, + number=505321899, + message='NodeGroup', + ) + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + request_id: str = proto.Field( + proto.STRING, + number=37109963, + optional=True, + ) + zone: str = proto.Field( + proto.STRING, + number=3744684, + ) + + +class PatchPacketMirroringRequest(proto.Message): + r"""A request message for PacketMirrorings.Patch. See the method + description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + packet_mirroring (str): + Name of the PacketMirroring resource to + patch. + packet_mirroring_resource (google.cloud.compute_v1.types.PacketMirroring): + The body resource for this request + project (str): + Project ID for this request. + region (str): + Name of the region for this request. + request_id (str): + An optional request ID to identify requests. + Specify a unique request ID so that if you must + retry your request, the server will know to + ignore the request if it has already been + completed. For example, consider a situation + where you make an initial request and the + request times out. If you make the request again + with the same request ID, the server can check + if original operation with the same request ID + was received, and if so, will ignore the second + request. This prevents clients from accidentally + creating duplicate commitments. The request ID + must be a valid UUID with the exception that + zero UUID is not supported ( + 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. + """ + + packet_mirroring: str = proto.Field( + proto.STRING, + number=22305996, + ) + packet_mirroring_resource: 'PacketMirroring' = proto.Field( + proto.MESSAGE, + number=493501985, + message='PacketMirroring', + ) + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + region: str = proto.Field( + proto.STRING, + number=138946292, + ) + request_id: str = proto.Field( + proto.STRING, + number=37109963, + optional=True, + ) + + +class PatchPerInstanceConfigsInstanceGroupManagerRequest(proto.Message): + r"""A request message for + InstanceGroupManagers.PatchPerInstanceConfigs. See the method + description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + instance_group_manager (str): + The name of the managed instance group. It + should conform to RFC1035. + instance_group_managers_patch_per_instance_configs_req_resource (google.cloud.compute_v1.types.InstanceGroupManagersPatchPerInstanceConfigsReq): + The body resource for this request + project (str): + Project ID for this request. + request_id (str): + An optional request ID to identify requests. + Specify a unique request ID so that if you must + retry your request, the server will know to + ignore the request if it has already been + completed. For example, consider a situation + where you make an initial request and the + request times out. If you make the request again + with the same request ID, the server can check + if original operation with the same request ID + was received, and if so, will ignore the second + request. This prevents clients from accidentally + creating duplicate commitments. The request ID + must be a valid UUID with the exception that + zero UUID is not supported ( + 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. + zone (str): + The name of the zone where the managed + instance group is located. It should conform to + RFC1035. + """ + + instance_group_manager: str = proto.Field( + proto.STRING, + number=249363395, + ) + instance_group_managers_patch_per_instance_configs_req_resource: 'InstanceGroupManagersPatchPerInstanceConfigsReq' = proto.Field( + proto.MESSAGE, + number=356650495, + message='InstanceGroupManagersPatchPerInstanceConfigsReq', + ) + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + request_id: str = proto.Field( + proto.STRING, + number=37109963, + optional=True, + ) + zone: str = proto.Field( + proto.STRING, + number=3744684, + ) + + +class PatchPerInstanceConfigsRegionInstanceGroupManagerRequest(proto.Message): + r"""A request message for + RegionInstanceGroupManagers.PatchPerInstanceConfigs. See the + method description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + instance_group_manager (str): + The name of the managed instance group. It + should conform to RFC1035. + project (str): + Project ID for this request. + region (str): + Name of the region scoping this request, + should conform to RFC1035. + region_instance_group_manager_patch_instance_config_req_resource (google.cloud.compute_v1.types.RegionInstanceGroupManagerPatchInstanceConfigReq): + The body resource for this request + request_id (str): + An optional request ID to identify requests. + Specify a unique request ID so that if you must + retry your request, the server will know to + ignore the request if it has already been + completed. For example, consider a situation + where you make an initial request and the + request times out. If you make the request again + with the same request ID, the server can check + if original operation with the same request ID + was received, and if so, will ignore the second + request. This prevents clients from accidentally + creating duplicate commitments. The request ID + must be a valid UUID with the exception that + zero UUID is not supported ( + 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. + """ + + instance_group_manager: str = proto.Field( + proto.STRING, + number=249363395, + ) + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + region: str = proto.Field( + proto.STRING, + number=138946292, + ) + region_instance_group_manager_patch_instance_config_req_resource: 'RegionInstanceGroupManagerPatchInstanceConfigReq' = proto.Field( + proto.MESSAGE, + number=197682890, + message='RegionInstanceGroupManagerPatchInstanceConfigReq', + ) + request_id: str = proto.Field( + proto.STRING, + number=37109963, + optional=True, + ) + + +class PatchPublicAdvertisedPrefixeRequest(proto.Message): + r"""A request message for PublicAdvertisedPrefixes.Patch. See the + method description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + project (str): + Project ID for this request. + public_advertised_prefix (str): + Name of the PublicAdvertisedPrefix resource + to patch. + public_advertised_prefix_resource (google.cloud.compute_v1.types.PublicAdvertisedPrefix): + The body resource for this request + request_id (str): + An optional request ID to identify requests. + Specify a unique request ID so that if you must + retry your request, the server will know to + ignore the request if it has already been + completed. For example, consider a situation + where you make an initial request and the + request times out. If you make the request again + with the same request ID, the server can check + if original operation with the same request ID + was received, and if so, will ignore the second + request. This prevents clients from accidentally + creating duplicate commitments. The request ID + must be a valid UUID with the exception that + zero UUID is not supported ( + 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. + """ + + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + public_advertised_prefix: str = proto.Field( + proto.STRING, + number=101874590, + ) + public_advertised_prefix_resource: 'PublicAdvertisedPrefix' = proto.Field( + proto.MESSAGE, + number=233614223, + message='PublicAdvertisedPrefix', + ) + request_id: str = proto.Field( + proto.STRING, + number=37109963, + optional=True, + ) + + +class PatchPublicDelegatedPrefixeRequest(proto.Message): + r"""A request message for PublicDelegatedPrefixes.Patch. See the + method description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + project (str): + Project ID for this request. + public_delegated_prefix (str): + Name of the PublicDelegatedPrefix resource to + patch. + public_delegated_prefix_resource (google.cloud.compute_v1.types.PublicDelegatedPrefix): + The body resource for this request + region (str): + Name of the region for this request. + request_id (str): + An optional request ID to identify requests. + Specify a unique request ID so that if you must + retry your request, the server will know to + ignore the request if it has already been + completed. For example, consider a situation + where you make an initial request and the + request times out. If you make the request again + with the same request ID, the server can check + if original operation with the same request ID + was received, and if so, will ignore the second + request. This prevents clients from accidentally + creating duplicate commitments. The request ID + must be a valid UUID with the exception that + zero UUID is not supported ( + 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. + """ + + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + public_delegated_prefix: str = proto.Field( + proto.STRING, + number=204238440, + ) + public_delegated_prefix_resource: 'PublicDelegatedPrefix' = proto.Field( + proto.MESSAGE, + number=47594501, + message='PublicDelegatedPrefix', + ) + region: str = proto.Field( + proto.STRING, + number=138946292, + ) + request_id: str = proto.Field( + proto.STRING, + number=37109963, + optional=True, + ) + + +class PatchRegionAutoscalerRequest(proto.Message): + r"""A request message for RegionAutoscalers.Patch. See the method + description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + autoscaler (str): + Name of the autoscaler to patch. + + This field is a member of `oneof`_ ``_autoscaler``. + autoscaler_resource (google.cloud.compute_v1.types.Autoscaler): + The body resource for this request + project (str): + Project ID for this request. + region (str): + Name of the region scoping this request. + request_id (str): + An optional request ID to identify requests. + Specify a unique request ID so that if you must + retry your request, the server will know to + ignore the request if it has already been + completed. For example, consider a situation + where you make an initial request and the + request times out. If you make the request again + with the same request ID, the server can check + if original operation with the same request ID + was received, and if so, will ignore the second + request. This prevents clients from accidentally + creating duplicate commitments. The request ID + must be a valid UUID with the exception that + zero UUID is not supported ( + 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. + """ + + autoscaler: str = proto.Field( + proto.STRING, + number=517258967, + optional=True, + ) + autoscaler_resource: 'Autoscaler' = proto.Field( + proto.MESSAGE, + number=207616118, + message='Autoscaler', + ) + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + region: str = proto.Field( + proto.STRING, + number=138946292, + ) + request_id: str = proto.Field( + proto.STRING, + number=37109963, + optional=True, + ) + + +class PatchRegionBackendServiceRequest(proto.Message): + r"""A request message for RegionBackendServices.Patch. See the + method description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + backend_service (str): + Name of the BackendService resource to patch. + backend_service_resource (google.cloud.compute_v1.types.BackendService): + The body resource for this request + project (str): + Project ID for this request. + region (str): + Name of the region scoping this request. + request_id (str): + An optional request ID to identify requests. + Specify a unique request ID so that if you must + retry your request, the server will know to + ignore the request if it has already been + completed. For example, consider a situation + where you make an initial request and the + request times out. If you make the request again + with the same request ID, the server can check + if original operation with the same request ID + was received, and if so, will ignore the second + request. This prevents clients from accidentally + creating duplicate commitments. The request ID + must be a valid UUID with the exception that + zero UUID is not supported ( + 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. + """ + + backend_service: str = proto.Field( + proto.STRING, + number=306946058, + ) + backend_service_resource: 'BackendService' = proto.Field( + proto.MESSAGE, + number=347586723, + message='BackendService', + ) + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + region: str = proto.Field( + proto.STRING, + number=138946292, + ) + request_id: str = proto.Field( + proto.STRING, + number=37109963, + optional=True, + ) + + +class PatchRegionHealthCheckRequest(proto.Message): + r"""A request message for RegionHealthChecks.Patch. See the + method description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + health_check (str): + Name of the HealthCheck resource to patch. + health_check_resource (google.cloud.compute_v1.types.HealthCheck): + The body resource for this request + project (str): + Project ID for this request. + region (str): + Name of the region scoping this request. + request_id (str): + An optional request ID to identify requests. + Specify a unique request ID so that if you must + retry your request, the server will know to + ignore the request if it has already been + completed. For example, consider a situation + where you make an initial request and the + request times out. If you make the request again + with the same request ID, the server can check + if original operation with the same request ID + was received, and if so, will ignore the second + request. This prevents clients from accidentally + creating duplicate commitments. The request ID + must be a valid UUID with the exception that + zero UUID is not supported ( + 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. + """ + + health_check: str = proto.Field( + proto.STRING, + number=308876645, + ) + health_check_resource: 'HealthCheck' = proto.Field( + proto.MESSAGE, + number=201925032, + message='HealthCheck', + ) + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + region: str = proto.Field( + proto.STRING, + number=138946292, + ) + request_id: str = proto.Field( + proto.STRING, + number=37109963, + optional=True, + ) + + +class PatchRegionHealthCheckServiceRequest(proto.Message): + r"""A request message for RegionHealthCheckServices.Patch. See + the method description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + health_check_service (str): + Name of the HealthCheckService to update. The + name must be 1-63 characters long, and comply + with RFC1035. + health_check_service_resource (google.cloud.compute_v1.types.HealthCheckService): + The body resource for this request + project (str): + Project ID for this request. + region (str): + Name of the region scoping this request. + request_id (str): + An optional request ID to identify requests. + Specify a unique request ID so that if you must + retry your request, the server will know to + ignore the request if it has already been + completed. For example, consider a situation + where you make an initial request and the + request times out. If you make the request again + with the same request ID, the server can check + if original operation with the same request ID + was received, and if so, will ignore the second + request. This prevents clients from accidentally + creating duplicate commitments. The request ID + must be a valid UUID with the exception that + zero UUID is not supported ( + 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. + """ + + health_check_service: str = proto.Field( + proto.STRING, + number=408374747, + ) + health_check_service_resource: 'HealthCheckService' = proto.Field( + proto.MESSAGE, + number=477367794, + message='HealthCheckService', + ) + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + region: str = proto.Field( + proto.STRING, + number=138946292, + ) + request_id: str = proto.Field( + proto.STRING, + number=37109963, + optional=True, + ) + + +class PatchRegionInstanceGroupManagerRequest(proto.Message): + r"""A request message for RegionInstanceGroupManagers.Patch. See + the method description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + instance_group_manager (str): + The name of the instance group manager. + instance_group_manager_resource (google.cloud.compute_v1.types.InstanceGroupManager): + The body resource for this request + project (str): + Project ID for this request. + region (str): + Name of the region scoping this request. + request_id (str): + An optional request ID to identify requests. + Specify a unique request ID so that if you must + retry your request, the server will know to + ignore the request if it has already been + completed. For example, consider a situation + where you make an initial request and the + request times out. If you make the request again + with the same request ID, the server can check + if original operation with the same request ID + was received, and if so, will ignore the second + request. This prevents clients from accidentally + creating duplicate commitments. The request ID + must be a valid UUID with the exception that + zero UUID is not supported ( + 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. + """ + + instance_group_manager: str = proto.Field( + proto.STRING, + number=249363395, + ) + instance_group_manager_resource: 'InstanceGroupManager' = proto.Field( + proto.MESSAGE, + number=261063946, + message='InstanceGroupManager', + ) + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + region: str = proto.Field( + proto.STRING, + number=138946292, + ) + request_id: str = proto.Field( + proto.STRING, + number=37109963, + optional=True, + ) + + +class PatchRegionNetworkFirewallPolicyRequest(proto.Message): + r"""A request message for RegionNetworkFirewallPolicies.Patch. + See the method description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + firewall_policy (str): + Name of the firewall policy to update. + firewall_policy_resource (google.cloud.compute_v1.types.FirewallPolicy): + The body resource for this request + project (str): + Project ID for this request. + region (str): + Name of the region scoping this request. + request_id (str): + An optional request ID to identify requests. + Specify a unique request ID so that if you must + retry your request, the server will know to + ignore the request if it has already been + completed. For example, consider a situation + where you make an initial request and the + request times out. If you make the request again + with the same request ID, the server can check + if original operation with the same request ID + was received, and if so, will ignore the second + request. This prevents clients from accidentally + creating duplicate commitments. The request ID + must be a valid UUID with the exception that + zero UUID is not supported ( + 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. + """ + + firewall_policy: str = proto.Field( + proto.STRING, + number=498173265, + ) + firewall_policy_resource: 'FirewallPolicy' = proto.Field( + proto.MESSAGE, + number=495049532, + message='FirewallPolicy', + ) + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + region: str = proto.Field( + proto.STRING, + number=138946292, + ) + request_id: str = proto.Field( + proto.STRING, + number=37109963, + optional=True, + ) + + +class PatchRegionSecurityPolicyRequest(proto.Message): + r"""A request message for RegionSecurityPolicies.Patch. See the + method description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + project (str): + Project ID for this request. + region (str): + Name of the region scoping this request. + request_id (str): + An optional request ID to identify requests. + Specify a unique request ID so that if you must + retry your request, the server will know to + ignore the request if it has already been + completed. For example, consider a situation + where you make an initial request and the + request times out. If you make the request again + with the same request ID, the server can check + if original operation with the same request ID + was received, and if so, will ignore the second + request. This prevents clients from accidentally + creating duplicate commitments. The request ID + must be a valid UUID with the exception that + zero UUID is not supported ( + 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. + security_policy (str): + Name of the security policy to update. + security_policy_resource (google.cloud.compute_v1.types.SecurityPolicy): + The body resource for this request + """ + + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + region: str = proto.Field( + proto.STRING, + number=138946292, + ) + request_id: str = proto.Field( + proto.STRING, + number=37109963, + optional=True, + ) + security_policy: str = proto.Field( + proto.STRING, + number=171082513, + ) + security_policy_resource: 'SecurityPolicy' = proto.Field( + proto.MESSAGE, + number=216159612, + message='SecurityPolicy', + ) + + +class PatchRegionSslPolicyRequest(proto.Message): + r"""A request message for RegionSslPolicies.Patch. See the method + description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + project (str): + Project ID for this request. + region (str): + Name of the region scoping this request. + request_id (str): + An optional request ID to identify requests. + Specify a unique request ID so that if you must + retry your request, the server will know to + ignore the request if it has already been + completed. For example, consider a situation + where you make an initial request and the + request times out. If you make the request again + with the same request ID, the server can check + if original operation with the same request ID + was received, and if so, will ignore the second + request. This prevents clients from accidentally + creating duplicate commitments. The request ID + must be a valid UUID with the exception that + zero UUID is not supported ( + 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. + ssl_policy (str): + Name of the SSL policy to update. The name + must be 1-63 characters long, and comply with + RFC1035. + ssl_policy_resource (google.cloud.compute_v1.types.SslPolicy): + The body resource for this request + """ + + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + region: str = proto.Field( + proto.STRING, + number=138946292, + ) + request_id: str = proto.Field( + proto.STRING, + number=37109963, + optional=True, + ) + ssl_policy: str = proto.Field( + proto.STRING, + number=295190213, + ) + ssl_policy_resource: 'SslPolicy' = proto.Field( + proto.MESSAGE, + number=274891848, + message='SslPolicy', + ) + + +class PatchRegionTargetHttpsProxyRequest(proto.Message): + r"""A request message for RegionTargetHttpsProxies.Patch. See the + method description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + project (str): + Project ID for this request. + region (str): + Name of the region for this request. + request_id (str): + An optional request ID to identify requests. + Specify a unique request ID so that if you must + retry your request, the server will know to + ignore the request if it has already been + completed. For example, consider a situation + where you make an initial request and the + request times out. If you make the request again + with the same request ID, the server can check + if original operation with the same request ID + was received, and if so, will ignore the second + request. This prevents clients from accidentally + creating duplicate commitments. The request ID + must be a valid UUID with the exception that + zero UUID is not supported ( + 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. + target_https_proxy (str): + Name of the TargetHttpsProxy resource to + patch. + target_https_proxy_resource (google.cloud.compute_v1.types.TargetHttpsProxy): + The body resource for this request + """ + + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + region: str = proto.Field( + proto.STRING, + number=138946292, + ) + request_id: str = proto.Field( + proto.STRING, + number=37109963, + optional=True, + ) + target_https_proxy: str = proto.Field( + proto.STRING, + number=52336748, + ) + target_https_proxy_resource: 'TargetHttpsProxy' = proto.Field( + proto.MESSAGE, + number=433657473, + message='TargetHttpsProxy', + ) + + +class PatchRegionUrlMapRequest(proto.Message): + r"""A request message for RegionUrlMaps.Patch. See the method + description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + project (str): + Project ID for this request. + region (str): + Name of the region scoping this request. + request_id (str): + begin_interface: MixerMutationRequestBuilder Request ID to + support idempotency. + + This field is a member of `oneof`_ ``_request_id``. + url_map (str): + Name of the UrlMap resource to patch. + url_map_resource (google.cloud.compute_v1.types.UrlMap): + The body resource for this request + """ + + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + region: str = proto.Field( + proto.STRING, + number=138946292, + ) + request_id: str = proto.Field( + proto.STRING, + number=37109963, + optional=True, + ) + url_map: str = proto.Field( + proto.STRING, + number=367020684, + ) + url_map_resource: 'UrlMap' = proto.Field( + proto.MESSAGE, + number=168675425, + message='UrlMap', + ) + + +class PatchResourcePolicyRequest(proto.Message): + r"""A request message for ResourcePolicies.Patch. See the method + description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + project (str): + Project ID for this request. + region (str): + Name of the region for this request. + request_id (str): + An optional request ID to identify requests. + Specify a unique request ID so that if you must + retry your request, the server will know to + ignore the request if it has already been + completed. For example, consider a situation + where you make an initial request and the + request times out. If you make the request again + with the same request ID, the server can check + if original operation with the same request ID + was received, and if so, will ignore the second + request. This prevents clients from accidentally + creating duplicate commitments. The request ID + must be a valid UUID with the exception that + zero UUID is not supported ( + 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. + resource_policy (str): + Id of the resource policy to patch. + resource_policy_resource (google.cloud.compute_v1.types.ResourcePolicy): + The body resource for this request + update_mask (str): + update_mask indicates fields to be updated as part of this + request. + + This field is a member of `oneof`_ ``_update_mask``. + """ + + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + region: str = proto.Field( + proto.STRING, + number=138946292, + ) + request_id: str = proto.Field( + proto.STRING, + number=37109963, + optional=True, + ) + resource_policy: str = proto.Field( + proto.STRING, + number=159240835, + ) + resource_policy_resource: 'ResourcePolicy' = proto.Field( + proto.MESSAGE, + number=76826186, + message='ResourcePolicy', + ) + update_mask: str = proto.Field( + proto.STRING, + number=500079778, + optional=True, + ) + + +class PatchRouterRequest(proto.Message): + r"""A request message for Routers.Patch. See the method + description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + project (str): + Project ID for this request. + region (str): + Name of the region for this request. + request_id (str): + An optional request ID to identify requests. + Specify a unique request ID so that if you must + retry your request, the server will know to + ignore the request if it has already been + completed. For example, consider a situation + where you make an initial request and the + request times out. If you make the request again + with the same request ID, the server can check + if original operation with the same request ID + was received, and if so, will ignore the second + request. This prevents clients from accidentally + creating duplicate commitments. The request ID + must be a valid UUID with the exception that + zero UUID is not supported ( + 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. + router (str): + Name of the Router resource to patch. + router_resource (google.cloud.compute_v1.types.Router): + The body resource for this request + """ + + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + region: str = proto.Field( + proto.STRING, + number=138946292, + ) + request_id: str = proto.Field( + proto.STRING, + number=37109963, + optional=True, + ) + router: str = proto.Field( + proto.STRING, + number=148608841, + ) + router_resource: 'Router' = proto.Field( + proto.MESSAGE, + number=155222084, + message='Router', + ) + + +class PatchRuleFirewallPolicyRequest(proto.Message): + r"""A request message for FirewallPolicies.PatchRule. See the + method description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + firewall_policy (str): + Name of the firewall policy to update. + firewall_policy_rule_resource (google.cloud.compute_v1.types.FirewallPolicyRule): + The body resource for this request + priority (int): + The priority of the rule to patch. + + This field is a member of `oneof`_ ``_priority``. + request_id (str): + An optional request ID to identify requests. + Specify a unique request ID so that if you must + retry your request, the server will know to + ignore the request if it has already been + completed. For example, consider a situation + where you make an initial request and the + request times out. If you make the request again + with the same request ID, the server can check + if original operation with the same request ID + was received, and if so, will ignore the second + request. This prevents clients from accidentally + creating duplicate commitments. The request ID + must be a valid UUID with the exception that + zero UUID is not supported ( + 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. + """ + + firewall_policy: str = proto.Field( + proto.STRING, + number=498173265, + ) + firewall_policy_rule_resource: 'FirewallPolicyRule' = proto.Field( + proto.MESSAGE, + number=250523523, + message='FirewallPolicyRule', + ) + priority: int = proto.Field( + proto.INT32, + number=445151652, + optional=True, + ) + request_id: str = proto.Field( + proto.STRING, + number=37109963, + optional=True, + ) + + +class PatchRuleNetworkFirewallPolicyRequest(proto.Message): + r"""A request message for NetworkFirewallPolicies.PatchRule. See + the method description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + firewall_policy (str): + Name of the firewall policy to update. + firewall_policy_rule_resource (google.cloud.compute_v1.types.FirewallPolicyRule): + The body resource for this request + priority (int): + The priority of the rule to patch. + + This field is a member of `oneof`_ ``_priority``. + project (str): + Project ID for this request. + request_id (str): + An optional request ID to identify requests. + Specify a unique request ID so that if you must + retry your request, the server will know to + ignore the request if it has already been + completed. For example, consider a situation + where you make an initial request and the + request times out. If you make the request again + with the same request ID, the server can check + if original operation with the same request ID + was received, and if so, will ignore the second + request. This prevents clients from accidentally + creating duplicate commitments. The request ID + must be a valid UUID with the exception that + zero UUID is not supported ( + 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. + """ + + firewall_policy: str = proto.Field( + proto.STRING, + number=498173265, + ) + firewall_policy_rule_resource: 'FirewallPolicyRule' = proto.Field( + proto.MESSAGE, + number=250523523, + message='FirewallPolicyRule', + ) + priority: int = proto.Field( + proto.INT32, + number=445151652, + optional=True, + ) + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + request_id: str = proto.Field( + proto.STRING, + number=37109963, + optional=True, + ) + + +class PatchRuleRegionNetworkFirewallPolicyRequest(proto.Message): + r"""A request message for + RegionNetworkFirewallPolicies.PatchRule. See the method + description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + firewall_policy (str): + Name of the firewall policy to update. + firewall_policy_rule_resource (google.cloud.compute_v1.types.FirewallPolicyRule): + The body resource for this request + priority (int): + The priority of the rule to patch. + + This field is a member of `oneof`_ ``_priority``. + project (str): + Project ID for this request. + region (str): + Name of the region scoping this request. + request_id (str): + An optional request ID to identify requests. + Specify a unique request ID so that if you must + retry your request, the server will know to + ignore the request if it has already been + completed. For example, consider a situation + where you make an initial request and the + request times out. If you make the request again + with the same request ID, the server can check + if original operation with the same request ID + was received, and if so, will ignore the second + request. This prevents clients from accidentally + creating duplicate commitments. The request ID + must be a valid UUID with the exception that + zero UUID is not supported ( + 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. + """ + + firewall_policy: str = proto.Field( + proto.STRING, + number=498173265, + ) + firewall_policy_rule_resource: 'FirewallPolicyRule' = proto.Field( + proto.MESSAGE, + number=250523523, + message='FirewallPolicyRule', + ) + priority: int = proto.Field( + proto.INT32, + number=445151652, + optional=True, + ) + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + region: str = proto.Field( + proto.STRING, + number=138946292, + ) + request_id: str = proto.Field( + proto.STRING, + number=37109963, + optional=True, + ) + + +class PatchRuleSecurityPolicyRequest(proto.Message): + r"""A request message for SecurityPolicies.PatchRule. See the + method description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + priority (int): + The priority of the rule to patch. + + This field is a member of `oneof`_ ``_priority``. + project (str): + Project ID for this request. + security_policy (str): + Name of the security policy to update. + security_policy_rule_resource (google.cloud.compute_v1.types.SecurityPolicyRule): + The body resource for this request + validate_only (bool): + If true, the request will not be committed. + + This field is a member of `oneof`_ ``_validate_only``. + """ + + priority: int = proto.Field( + proto.INT32, + number=445151652, + optional=True, + ) + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + security_policy: str = proto.Field( + proto.STRING, + number=171082513, + ) + security_policy_rule_resource: 'SecurityPolicyRule' = proto.Field( + proto.MESSAGE, + number=402693443, + message='SecurityPolicyRule', + ) + validate_only: bool = proto.Field( + proto.BOOL, + number=242744629, + optional=True, + ) + + +class PatchSecurityPolicyRequest(proto.Message): + r"""A request message for SecurityPolicies.Patch. See the method + description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + project (str): + Project ID for this request. + request_id (str): + An optional request ID to identify requests. + Specify a unique request ID so that if you must + retry your request, the server will know to + ignore the request if it has already been + completed. For example, consider a situation + where you make an initial request and the + request times out. If you make the request again + with the same request ID, the server can check + if original operation with the same request ID + was received, and if so, will ignore the second + request. This prevents clients from accidentally + creating duplicate commitments. The request ID + must be a valid UUID with the exception that + zero UUID is not supported ( + 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. + security_policy (str): + Name of the security policy to update. + security_policy_resource (google.cloud.compute_v1.types.SecurityPolicy): + The body resource for this request + """ + + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + request_id: str = proto.Field( + proto.STRING, + number=37109963, + optional=True, + ) + security_policy: str = proto.Field( + proto.STRING, + number=171082513, + ) + security_policy_resource: 'SecurityPolicy' = proto.Field( + proto.MESSAGE, + number=216159612, + message='SecurityPolicy', + ) + + +class PatchServiceAttachmentRequest(proto.Message): + r"""A request message for ServiceAttachments.Patch. See the + method description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + project (str): + Project ID for this request. + region (str): + The region scoping this request and should + conform to RFC1035. + request_id (str): + An optional request ID to identify requests. + Specify a unique request ID so that if you must + retry your request, the server will know to + ignore the request if it has already been + completed. For example, consider a situation + where you make an initial request and the + request times out. If you make the request again + with the same request ID, the server can check + if original operation with the same request ID + was received, and if so, will ignore the second + request. This prevents clients from accidentally + creating duplicate commitments. The request ID + must be a valid UUID with the exception that + zero UUID is not supported ( + 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. + service_attachment (str): + The resource id of the ServiceAttachment to + patch. It should conform to RFC1035 resource + name or be a string form on an unsigned long + number. + service_attachment_resource (google.cloud.compute_v1.types.ServiceAttachment): + The body resource for this request + """ + + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + region: str = proto.Field( + proto.STRING, + number=138946292, + ) + request_id: str = proto.Field( + proto.STRING, + number=37109963, + optional=True, + ) + service_attachment: str = proto.Field( + proto.STRING, + number=338957549, + ) + service_attachment_resource: 'ServiceAttachment' = proto.Field( + proto.MESSAGE, + number=472980256, + message='ServiceAttachment', + ) + + +class PatchSslPolicyRequest(proto.Message): + r"""A request message for SslPolicies.Patch. See the method + description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + project (str): + Project ID for this request. + request_id (str): + An optional request ID to identify requests. + Specify a unique request ID so that if you must + retry your request, the server will know to + ignore the request if it has already been + completed. For example, consider a situation + where you make an initial request and the + request times out. If you make the request again + with the same request ID, the server can check + if original operation with the same request ID + was received, and if so, will ignore the second + request. This prevents clients from accidentally + creating duplicate commitments. The request ID + must be a valid UUID with the exception that + zero UUID is not supported ( + 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. + ssl_policy (str): + Name of the SSL policy to update. The name + must be 1-63 characters long, and comply with + RFC1035. + ssl_policy_resource (google.cloud.compute_v1.types.SslPolicy): + The body resource for this request + """ + + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + request_id: str = proto.Field( + proto.STRING, + number=37109963, + optional=True, + ) + ssl_policy: str = proto.Field( + proto.STRING, + number=295190213, + ) + ssl_policy_resource: 'SslPolicy' = proto.Field( + proto.MESSAGE, + number=274891848, + message='SslPolicy', + ) + + +class PatchSubnetworkRequest(proto.Message): + r"""A request message for Subnetworks.Patch. See the method + description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + drain_timeout_seconds (int): + The drain timeout specifies the upper bound in seconds on + the amount of time allowed to drain connections from the + current ACTIVE subnetwork to the current BACKUP subnetwork. + The drain timeout is only applicable when the following + conditions are true: - the subnetwork being patched has + purpose = INTERNAL_HTTPS_LOAD_BALANCER - the subnetwork + being patched has role = BACKUP - the patch request is + setting the role to ACTIVE. Note that after this patch + operation the roles of the ACTIVE and BACKUP subnetworks + will be swapped. + + This field is a member of `oneof`_ ``_drain_timeout_seconds``. + project (str): + Project ID for this request. + region (str): + Name of the region scoping this request. + request_id (str): + An optional request ID to identify requests. + Specify a unique request ID so that if you must + retry your request, the server will know to + ignore the request if it has already been + completed. For example, consider a situation + where you make an initial request and the + request times out. If you make the request again + with the same request ID, the server can check + if original operation with the same request ID + was received, and if so, will ignore the second + request. This prevents clients from accidentally + creating duplicate commitments. The request ID + must be a valid UUID with the exception that + zero UUID is not supported ( + 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. + subnetwork (str): + Name of the Subnetwork resource to patch. + subnetwork_resource (google.cloud.compute_v1.types.Subnetwork): + The body resource for this request + """ + + drain_timeout_seconds: int = proto.Field( + proto.INT32, + number=357707098, + optional=True, + ) + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + region: str = proto.Field( + proto.STRING, + number=138946292, + ) + request_id: str = proto.Field( + proto.STRING, + number=37109963, + optional=True, + ) + subnetwork: str = proto.Field( + proto.STRING, + number=307827694, + ) + subnetwork_resource: 'Subnetwork' = proto.Field( + proto.MESSAGE, + number=42233151, + message='Subnetwork', + ) + + +class PatchTargetGrpcProxyRequest(proto.Message): + r"""A request message for TargetGrpcProxies.Patch. See the method + description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + project (str): + Project ID for this request. + request_id (str): + An optional request ID to identify requests. + Specify a unique request ID so that if you must + retry your request, the server will know to + ignore the request if it has already been + completed. For example, consider a situation + where you make an initial request and the + request times out. If you make the request again + with the same request ID, the server can check + if original operation with the same request ID + was received, and if so, will ignore the second + request. This prevents clients from accidentally + creating duplicate commitments. The request ID + must be a valid UUID with the exception that + zero UUID is not supported ( + 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. + target_grpc_proxy (str): + Name of the TargetGrpcProxy resource to + patch. + target_grpc_proxy_resource (google.cloud.compute_v1.types.TargetGrpcProxy): + The body resource for this request + """ + + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + request_id: str = proto.Field( + proto.STRING, + number=37109963, + optional=True, + ) + target_grpc_proxy: str = proto.Field( + proto.STRING, + number=5020283, + ) + target_grpc_proxy_resource: 'TargetGrpcProxy' = proto.Field( + proto.MESSAGE, + number=328922450, + message='TargetGrpcProxy', + ) + + +class PatchTargetHttpProxyRequest(proto.Message): + r"""A request message for TargetHttpProxies.Patch. See the method + description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + project (str): + Project ID for this request. + request_id (str): + An optional request ID to identify requests. + Specify a unique request ID so that if you must + retry your request, the server will know to + ignore the request if it has already been + completed. For example, consider a situation + where you make an initial request and the + request times out. If you make the request again + with the same request ID, the server can check + if original operation with the same request ID + was received, and if so, will ignore the second + request. This prevents clients from accidentally + creating duplicate commitments. The request ID + must be a valid UUID with the exception that + zero UUID is not supported ( + 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. + target_http_proxy (str): + Name of the TargetHttpProxy resource to + patch. + target_http_proxy_resource (google.cloud.compute_v1.types.TargetHttpProxy): + The body resource for this request + """ + + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + request_id: str = proto.Field( + proto.STRING, + number=37109963, + optional=True, + ) + target_http_proxy: str = proto.Field( + proto.STRING, + number=206872421, + ) + target_http_proxy_resource: 'TargetHttpProxy' = proto.Field( + proto.MESSAGE, + number=24696744, + message='TargetHttpProxy', + ) + + +class PatchTargetHttpsProxyRequest(proto.Message): + r"""A request message for TargetHttpsProxies.Patch. See the + method description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + project (str): + Project ID for this request. + request_id (str): + An optional request ID to identify requests. + Specify a unique request ID so that if you must + retry your request, the server will know to + ignore the request if it has already been + completed. For example, consider a situation + where you make an initial request and the + request times out. If you make the request again + with the same request ID, the server can check + if original operation with the same request ID + was received, and if so, will ignore the second + request. This prevents clients from accidentally + creating duplicate commitments. The request ID + must be a valid UUID with the exception that + zero UUID is not supported ( + 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. + target_https_proxy (str): + Name of the TargetHttpsProxy resource to + patch. + target_https_proxy_resource (google.cloud.compute_v1.types.TargetHttpsProxy): + The body resource for this request + """ + + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + request_id: str = proto.Field( + proto.STRING, + number=37109963, + optional=True, + ) + target_https_proxy: str = proto.Field( + proto.STRING, + number=52336748, + ) + target_https_proxy_resource: 'TargetHttpsProxy' = proto.Field( + proto.MESSAGE, + number=433657473, + message='TargetHttpsProxy', + ) + + +class PatchUrlMapRequest(proto.Message): + r"""A request message for UrlMaps.Patch. See the method + description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + project (str): + Project ID for this request. + request_id (str): + An optional request ID to identify requests. + Specify a unique request ID so that if you must + retry your request, the server will know to + ignore the request if it has already been + completed. For example, consider a situation + where you make an initial request and the + request times out. If you make the request again + with the same request ID, the server can check + if original operation with the same request ID + was received, and if so, will ignore the second + request. This prevents clients from accidentally + creating duplicate commitments. The request ID + must be a valid UUID with the exception that + zero UUID is not supported ( + 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. + url_map (str): + Name of the UrlMap resource to patch. + url_map_resource (google.cloud.compute_v1.types.UrlMap): + The body resource for this request + """ + + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + request_id: str = proto.Field( + proto.STRING, + number=37109963, + optional=True, + ) + url_map: str = proto.Field( + proto.STRING, + number=367020684, + ) + url_map_resource: 'UrlMap' = proto.Field( + proto.MESSAGE, + number=168675425, + message='UrlMap', + ) + + +class PathMatcher(proto.Message): + r"""A matcher for the path portion of the URL. The BackendService + from the longest-matched rule will serve the URL. If no rule was + matched, the default service is used. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + default_route_action (google.cloud.compute_v1.types.HttpRouteAction): + defaultRouteAction takes effect when none of + the pathRules or routeRules match. The load + balancer performs advanced routing actions, such + as URL rewrites and header transformations, + before forwarding the request to the selected + backend. If defaultRouteAction specifies any + weightedBackendServices, defaultService must not + be set. Conversely if defaultService is set, + defaultRouteAction cannot contain any + weightedBackendServices. Only one of + defaultRouteAction or defaultUrlRedirect must be + set. URL maps for Classic external HTTP(S) load + balancers only support the urlRewrite action + within a path matcher's defaultRouteAction. + + This field is a member of `oneof`_ ``_default_route_action``. + default_service (str): + The full or partial URL to the BackendService resource. This + URL is used if none of the pathRules or routeRules defined + by this PathMatcher are matched. For example, the following + are all valid URLs to a BackendService resource: - + https://www.googleapis.com/compute/v1/projects/project + /global/backendServices/backendService - + compute/v1/projects/project/global/backendServices/backendService + - global/backendServices/backendService If + defaultRouteAction is also specified, advanced routing + actions, such as URL rewrites, take effect before sending + the request to the backend. However, if defaultService is + specified, defaultRouteAction cannot contain any + weightedBackendServices. Conversely, if defaultRouteAction + specifies any weightedBackendServices, defaultService must + not be specified. Only one of defaultService, + defaultUrlRedirect , or + defaultRouteAction.weightedBackendService must be set. + Authorization requires one or more of the following Google + IAM permissions on the specified resource default_service: - + compute.backendBuckets.use - compute.backendServices.use + + This field is a member of `oneof`_ ``_default_service``. + default_url_redirect (google.cloud.compute_v1.types.HttpRedirectAction): + When none of the specified pathRules or + routeRules match, the request is redirected to a + URL specified by defaultUrlRedirect. If + defaultUrlRedirect is specified, defaultService + or defaultRouteAction must not be set. Not + supported when the URL map is bound to a target + gRPC proxy. + + This field is a member of `oneof`_ ``_default_url_redirect``. + description (str): + An optional description of this resource. + Provide this property when you create the + resource. + + This field is a member of `oneof`_ ``_description``. + header_action (google.cloud.compute_v1.types.HttpHeaderAction): + Specifies changes to request and response + headers that need to take effect for the + selected backend service. HeaderAction specified + here are applied after the matching + HttpRouteRule HeaderAction and before the + HeaderAction in the UrlMap HeaderAction is not + supported for load balancers that have their + loadBalancingScheme set to EXTERNAL. Not + supported when the URL map is bound to a target + gRPC proxy that has validateForProxyless field + set to true. + + This field is a member of `oneof`_ ``_header_action``. + name (str): + The name to which this PathMatcher is + referred by the HostRule. + + This field is a member of `oneof`_ ``_name``. + path_rules (MutableSequence[google.cloud.compute_v1.types.PathRule]): + The list of path rules. Use this list instead of routeRules + when routing based on simple path matching is all that's + required. The order by which path rules are specified does + not matter. Matches are always done on the + longest-path-first basis. For example: a pathRule with a + path /a/b/c/\* will match before /a/b/\* irrespective of the + order in which those paths appear in this list. Within a + given pathMatcher, only one of pathRules or routeRules must + be set. + route_rules (MutableSequence[google.cloud.compute_v1.types.HttpRouteRule]): + The list of HTTP route rules. Use this list + instead of pathRules when advanced route + matching and routing actions are desired. + routeRules are evaluated in order of priority, + from the lowest to highest number. Within a + given pathMatcher, you can set only one of + pathRules or routeRules. + """ + + default_route_action: 'HttpRouteAction' = proto.Field( + proto.MESSAGE, + number=378919466, + optional=True, + message='HttpRouteAction', + ) + default_service: str = proto.Field( + proto.STRING, + number=370242231, + optional=True, + ) + default_url_redirect: 'HttpRedirectAction' = proto.Field( + proto.MESSAGE, + number=359503338, + optional=True, + message='HttpRedirectAction', + ) + description: str = proto.Field( + proto.STRING, + number=422937596, + optional=True, + ) + header_action: 'HttpHeaderAction' = proto.Field( + proto.MESSAGE, + number=328077352, + optional=True, + message='HttpHeaderAction', + ) + name: str = proto.Field( + proto.STRING, + number=3373707, + optional=True, + ) + path_rules: MutableSequence['PathRule'] = proto.RepeatedField( + proto.MESSAGE, + number=104439901, + message='PathRule', + ) + route_rules: MutableSequence['HttpRouteRule'] = proto.RepeatedField( + proto.MESSAGE, + number=376292225, + message='HttpRouteRule', + ) + + +class PathRule(proto.Message): + r"""A path-matching rule for a URL. If matched, will use the + specified BackendService to handle the traffic arriving at this + URL. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + paths (MutableSequence[str]): + The list of path patterns to match. Each must start with / + and the only place a \* is allowed is at the end following a + /. The string fed to the path matcher does not include any + text after the first ? or #, and those chars are not allowed + here. + route_action (google.cloud.compute_v1.types.HttpRouteAction): + In response to a matching path, the load + balancer performs advanced routing actions, such + as URL rewrites and header transformations, + before forwarding the request to the selected + backend. If routeAction specifies any + weightedBackendServices, service must not be + set. Conversely if service is set, routeAction + cannot contain any weightedBackendServices. Only + one of routeAction or urlRedirect must be set. + URL maps for Classic external HTTP(S) load + balancers only support the urlRewrite action + within a path rule's routeAction. + + This field is a member of `oneof`_ ``_route_action``. + service (str): + The full or partial URL of the backend + service resource to which traffic is directed if + this rule is matched. If routeAction is also + specified, advanced routing actions, such as URL + rewrites, take effect before sending the request + to the backend. However, if service is + specified, routeAction cannot contain any + weightedBackendServices. Conversely, if + routeAction specifies any + weightedBackendServices, service must not be + specified. Only one of urlRedirect, service or + routeAction.weightedBackendService must be set. + + This field is a member of `oneof`_ ``_service``. + url_redirect (google.cloud.compute_v1.types.HttpRedirectAction): + When a path pattern is matched, the request + is redirected to a URL specified by urlRedirect. + If urlRedirect is specified, service or + routeAction must not be set. Not supported when + the URL map is bound to a target gRPC proxy. + + This field is a member of `oneof`_ ``_url_redirect``. + """ + + paths: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=106438894, + ) + route_action: 'HttpRouteAction' = proto.Field( + proto.MESSAGE, + number=424563948, + optional=True, + message='HttpRouteAction', + ) + service: str = proto.Field( + proto.STRING, + number=373540533, + optional=True, + ) + url_redirect: 'HttpRedirectAction' = proto.Field( + proto.MESSAGE, + number=405147820, + optional=True, + message='HttpRedirectAction', + ) + + +class PerInstanceConfig(proto.Message): + r""" + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + fingerprint (str): + Fingerprint of this per-instance config. This + field can be used in optimistic locking. It is + ignored when inserting a per-instance config. An + up-to-date fingerprint must be provided in order + to update an existing per-instance configuration + or the field needs to be unset. + + This field is a member of `oneof`_ ``_fingerprint``. + name (str): + The name of a per-instance configuration and + its corresponding instance. Serves as a merge + key during UpdatePerInstanceConfigs operations, + that is, if a per-instance configuration with + the same name exists then it will be updated, + otherwise a new one will be created for the VM + instance with the same name. An attempt to + create a per-instance configconfiguration for a + VM instance that either doesn't exist or is not + part of the group will result in an error. + + This field is a member of `oneof`_ ``_name``. + preserved_state (google.cloud.compute_v1.types.PreservedState): + The intended preserved state for the given + instance. Does not contain preserved state + generated from a stateful policy. + + This field is a member of `oneof`_ ``_preserved_state``. + status (str): + The status of applying this per-instance + configuration on the corresponding managed + instance. Check the Status enum for the list of + possible values. + + This field is a member of `oneof`_ ``_status``. + """ + class Status(proto.Enum): + r"""The status of applying this per-instance configuration on the + corresponding managed instance. + + Values: + UNDEFINED_STATUS (0): + A value indicating that the enum field is not + set. + APPLYING (352003508): + The per-instance configuration is being + applied to the instance, but is not yet + effective, possibly waiting for the instance to, + for example, REFRESH. + DELETING (528602024): + The per-instance configuration deletion is + being applied on the instance, possibly waiting + for the instance to, for example, REFRESH. + EFFECTIVE (244201863): + The per-instance configuration is effective + on the instance, meaning that all disks, ips and + metadata specified in this configuration are + attached or set on the instance. + NONE (2402104): + *[Default]* The default status, when no per-instance + configuration exists. + UNAPPLIED (483935140): + The per-instance configuration is set on an + instance but not been applied yet. + UNAPPLIED_DELETION (313956873): + The per-instance configuration has been + deleted, but the deletion is not yet applied. + """ + UNDEFINED_STATUS = 0 + APPLYING = 352003508 + DELETING = 528602024 + EFFECTIVE = 244201863 + NONE = 2402104 + UNAPPLIED = 483935140 + UNAPPLIED_DELETION = 313956873 + + fingerprint: str = proto.Field( + proto.STRING, + number=234678500, + optional=True, + ) + name: str = proto.Field( + proto.STRING, + number=3373707, + optional=True, + ) + preserved_state: 'PreservedState' = proto.Field( + proto.MESSAGE, + number=2634026, + optional=True, + message='PreservedState', + ) + status: str = proto.Field( + proto.STRING, + number=181260274, + optional=True, + ) + + +class Policy(proto.Message): + r"""An Identity and Access Management (IAM) policy, which specifies + access controls for Google Cloud resources. A ``Policy`` is a + collection of ``bindings``. A ``binding`` binds one or more + ``members``, or principals, to a single ``role``. Principals can be + user accounts, service accounts, Google groups, and domains (such as + G Suite). A ``role`` is a named list of permissions; each ``role`` + can be an IAM predefined role or a user-created custom role. For + some types of Google Cloud resources, a ``binding`` can also specify + a ``condition``, which is a logical expression that allows access to + a resource only if the expression evaluates to ``true``. A condition + can add constraints based on attributes of the request, the + resource, or both. To learn which resources support conditions in + their IAM policies, see the `IAM + documentation `__. + **JSON example:** { "bindings": [ { "role": + "roles/resourcemanager.organizationAdmin", "members": [ + "user:mike@example.com", "group:admins@example.com", + "domain:google.com", + "serviceAccount:my-project-id@appspot.gserviceaccount.com" ] }, { + "role": "roles/resourcemanager.organizationViewer", "members": [ + "user:eve@example.com" ], "condition": { "title": "expirable + access", "description": "Does not grant access after Sep 2020", + "expression": "request.time < + timestamp('2020-10-01T00:00:00.000Z')", } } ], "etag": + "BwWWja0YfJA=", "version": 3 } **YAML example:** bindings: - + members: - user:mike@example.com - group:admins@example.com - + domain:google.com - + serviceAccount:my-project-id@appspot.gserviceaccount.com role: + roles/resourcemanager.organizationAdmin - members: - + user:eve@example.com role: roles/resourcemanager.organizationViewer + condition: title: expirable access description: Does not grant + access after Sep 2020 expression: request.time < + timestamp('2020-10-01T00:00:00.000Z') etag: BwWWja0YfJA= version: 3 + For a description of IAM and its features, see the `IAM + documentation `__. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + audit_configs (MutableSequence[google.cloud.compute_v1.types.AuditConfig]): + Specifies cloud audit logging configuration + for this policy. + bindings (MutableSequence[google.cloud.compute_v1.types.Binding]): + Associates a list of ``members``, or principals, with a + ``role``. Optionally, may specify a ``condition`` that + determines how and when the ``bindings`` are applied. Each + of the ``bindings`` must contain at least one principal. The + ``bindings`` in a ``Policy`` can refer to up to 1,500 + principals; up to 250 of these principals can be Google + groups. Each occurrence of a principal counts towards these + limits. For example, if the ``bindings`` grant 50 different + roles to ``user:alice@example.com``, and not to any other + principal, then you can add another 1,450 principals to the + ``bindings`` in the ``Policy``. + etag (str): + ``etag`` is used for optimistic concurrency control as a way + to help prevent simultaneous updates of a policy from + overwriting each other. It is strongly suggested that + systems make use of the ``etag`` in the read-modify-write + cycle to perform policy updates in order to avoid race + conditions: An ``etag`` is returned in the response to + ``getIamPolicy``, and systems are expected to put that etag + in the request to ``setIamPolicy`` to ensure that their + change will be applied to the same version of the policy. + **Important:** If you use IAM Conditions, you must include + the ``etag`` field whenever you call ``setIamPolicy``. If + you omit this field, then IAM allows you to overwrite a + version ``3`` policy with a version ``1`` policy, and all of + the conditions in the version ``3`` policy are lost. + + This field is a member of `oneof`_ ``_etag``. + iam_owned (bool): + + This field is a member of `oneof`_ ``_iam_owned``. + rules (MutableSequence[google.cloud.compute_v1.types.Rule]): + This is deprecated and has no effect. Do not + use. + version (int): + Specifies the format of the policy. Valid values are ``0``, + ``1``, and ``3``. Requests that specify an invalid value are + rejected. Any operation that affects conditional role + bindings must specify version ``3``. This requirement + applies to the following operations: \* Getting a policy + that includes a conditional role binding \* Adding a + conditional role binding to a policy \* Changing a + conditional role binding in a policy \* Removing any role + binding, with or without a condition, from a policy that + includes conditions **Important:** If you use IAM + Conditions, you must include the ``etag`` field whenever you + call ``setIamPolicy``. If you omit this field, then IAM + allows you to overwrite a version ``3`` policy with a + version ``1`` policy, and all of the conditions in the + version ``3`` policy are lost. If a policy does not include + any conditions, operations on that policy may specify any + valid version or leave the field unset. To learn which + resources support conditions in their IAM policies, see the + `IAM + documentation `__. + + This field is a member of `oneof`_ ``_version``. + """ + + audit_configs: MutableSequence['AuditConfig'] = proto.RepeatedField( + proto.MESSAGE, + number=328080653, + message='AuditConfig', + ) + bindings: MutableSequence['Binding'] = proto.RepeatedField( + proto.MESSAGE, + number=403251854, + message='Binding', + ) + etag: str = proto.Field( + proto.STRING, + number=3123477, + optional=True, + ) + iam_owned: bool = proto.Field( + proto.BOOL, + number=450566203, + optional=True, + ) + rules: MutableSequence['Rule'] = proto.RepeatedField( + proto.MESSAGE, + number=108873975, + message='Rule', + ) + version: int = proto.Field( + proto.INT32, + number=351608024, + optional=True, + ) + + +class PreconfiguredWafSet(proto.Message): + r""" + + Attributes: + expression_sets (MutableSequence[google.cloud.compute_v1.types.WafExpressionSet]): + List of entities that are currently supported + for WAF rules. + """ + + expression_sets: MutableSequence['WafExpressionSet'] = proto.RepeatedField( + proto.MESSAGE, + number=474011032, + message='WafExpressionSet', + ) + + +class PreservedState(proto.Message): + r"""Preserved state for a given instance. + + Attributes: + disks (MutableMapping[str, google.cloud.compute_v1.types.PreservedStatePreservedDisk]): + Preserved disks defined for this instance. + This map is keyed with the device names of the + disks. + metadata (MutableMapping[str, str]): + Preserved metadata defined for this instance. + """ + + disks: MutableMapping[str, 'PreservedStatePreservedDisk'] = proto.MapField( + proto.STRING, + proto.MESSAGE, + number=95594102, + message='PreservedStatePreservedDisk', + ) + metadata: MutableMapping[str, str] = proto.MapField( + proto.STRING, + proto.STRING, + number=86866735, + ) + + +class PreservedStatePreservedDisk(proto.Message): + r""" + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + auto_delete (str): + These stateful disks will never be deleted during + autohealing, update, instance recreate operations. This flag + is used to configure if the disk should be deleted after it + is no longer used by the group, e.g. when the given instance + or the whole MIG is deleted. Note: disks attached in + READ_ONLY mode cannot be auto-deleted. Check the AutoDelete + enum for the list of possible values. + + This field is a member of `oneof`_ ``_auto_delete``. + mode (str): + The mode in which to attach this disk, either READ_WRITE or + READ_ONLY. If not specified, the default is to attach the + disk in READ_WRITE mode. Check the Mode enum for the list of + possible values. + + This field is a member of `oneof`_ ``_mode``. + source (str): + The URL of the disk resource that is stateful + and should be attached to the VM instance. + + This field is a member of `oneof`_ ``_source``. + """ + class AutoDelete(proto.Enum): + r"""These stateful disks will never be deleted during autohealing, + update, instance recreate operations. This flag is used to configure + if the disk should be deleted after it is no longer used by the + group, e.g. when the given instance or the whole MIG is deleted. + Note: disks attached in READ_ONLY mode cannot be auto-deleted. + + Values: + UNDEFINED_AUTO_DELETE (0): + A value indicating that the enum field is not + set. + NEVER (74175084): + No description available. + ON_PERMANENT_INSTANCE_DELETION (95727719): + No description available. + """ + UNDEFINED_AUTO_DELETE = 0 + NEVER = 74175084 + ON_PERMANENT_INSTANCE_DELETION = 95727719 + + class Mode(proto.Enum): + r"""The mode in which to attach this disk, either READ_WRITE or + READ_ONLY. If not specified, the default is to attach the disk in + READ_WRITE mode. + + Values: + UNDEFINED_MODE (0): + A value indicating that the enum field is not + set. + READ_ONLY (91950261): + Attaches this disk in read-only mode. Multiple VM instances + can use a disk in READ_ONLY mode at a time. + READ_WRITE (173607894): + *[Default]* Attaches this disk in READ_WRITE mode. Only one + VM instance at a time can be attached to a disk in + READ_WRITE mode. + """ + UNDEFINED_MODE = 0 + READ_ONLY = 91950261 + READ_WRITE = 173607894 + + auto_delete: str = proto.Field( + proto.STRING, + number=464761403, + optional=True, + ) + mode: str = proto.Field( + proto.STRING, + number=3357091, + optional=True, + ) + source: str = proto.Field( + proto.STRING, + number=177235995, + optional=True, + ) + + +class PreviewRouterRequest(proto.Message): + r"""A request message for Routers.Preview. See the method + description for details. + + Attributes: + project (str): + Project ID for this request. + region (str): + Name of the region for this request. + router (str): + Name of the Router resource to query. + router_resource (google.cloud.compute_v1.types.Router): + The body resource for this request + """ + + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + region: str = proto.Field( + proto.STRING, + number=138946292, + ) + router: str = proto.Field( + proto.STRING, + number=148608841, + ) + router_resource: 'Router' = proto.Field( + proto.MESSAGE, + number=155222084, + message='Router', + ) + + +class Project(proto.Message): + r"""Represents a Project resource. A project is used to organize + resources in a Google Cloud Platform environment. For more + information, read about the Resource Hierarchy. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + common_instance_metadata (google.cloud.compute_v1.types.Metadata): + Metadata key/value pairs available to all + instances contained in this project. See Custom + metadata for more information. + + This field is a member of `oneof`_ ``_common_instance_metadata``. + creation_timestamp (str): + [Output Only] Creation timestamp in RFC3339 text format. + + This field is a member of `oneof`_ ``_creation_timestamp``. + default_network_tier (str): + This signifies the default network tier used + for configuring resources of the project and can + only take the following values: PREMIUM, + STANDARD. Initially the default network tier is + PREMIUM. Check the DefaultNetworkTier enum for + the list of possible values. + + This field is a member of `oneof`_ ``_default_network_tier``. + default_service_account (str): + [Output Only] Default service account used by VMs running in + this project. + + This field is a member of `oneof`_ ``_default_service_account``. + description (str): + An optional textual description of the + resource. + + This field is a member of `oneof`_ ``_description``. + enabled_features (MutableSequence[str]): + Restricted features enabled for use on this + project. + id (int): + [Output Only] The unique identifier for the resource. This + identifier is defined by the server. This is *not* the + project ID, and is just a unique ID used by Compute Engine + to identify resources. + + This field is a member of `oneof`_ ``_id``. + kind (str): + [Output Only] Type of the resource. Always compute#project + for projects. + + This field is a member of `oneof`_ ``_kind``. + name (str): + The project ID. For example: + my-example-project. Use the project ID to make + requests to Compute Engine. + + This field is a member of `oneof`_ ``_name``. + quotas (MutableSequence[google.cloud.compute_v1.types.Quota]): + [Output Only] Quotas assigned to this project. + self_link (str): + [Output Only] Server-defined URL for the resource. + + This field is a member of `oneof`_ ``_self_link``. + usage_export_location (google.cloud.compute_v1.types.UsageExportLocation): + The naming prefix for daily usage reports and + the Google Cloud Storage bucket where they are + stored. + + This field is a member of `oneof`_ ``_usage_export_location``. + vm_dns_setting (str): + [Output Only] Default internal DNS setting used by VMs + running in this project. Check the VmDnsSetting enum for the + list of possible values. + + This field is a member of `oneof`_ ``_vm_dns_setting``. + xpn_project_status (str): + [Output Only] The role this project has in a shared VPC + configuration. Currently, only projects with the host role, + which is specified by the value HOST, are differentiated. + Check the XpnProjectStatus enum for the list of possible + values. + + This field is a member of `oneof`_ ``_xpn_project_status``. + """ + class DefaultNetworkTier(proto.Enum): + r"""This signifies the default network tier used for configuring + resources of the project and can only take the following values: + PREMIUM, STANDARD. Initially the default network tier is + PREMIUM. + + Values: + UNDEFINED_DEFAULT_NETWORK_TIER (0): + A value indicating that the enum field is not + set. + FIXED_STANDARD (310464328): + Public internet quality with fixed bandwidth. + PREMIUM (399530551): + High quality, Google-grade network tier, + support for all networking products. + STANDARD (484642493): + Public internet quality, only limited support + for other networking products. + STANDARD_OVERRIDES_FIXED_STANDARD (465847234): + (Output only) Temporary tier for FIXED_STANDARD when fixed + standard tier is expired or not configured. + """ + UNDEFINED_DEFAULT_NETWORK_TIER = 0 + FIXED_STANDARD = 310464328 + PREMIUM = 399530551 + STANDARD = 484642493 + STANDARD_OVERRIDES_FIXED_STANDARD = 465847234 + + class VmDnsSetting(proto.Enum): + r"""[Output Only] Default internal DNS setting used by VMs running in + this project. + + Values: + UNDEFINED_VM_DNS_SETTING (0): + A value indicating that the enum field is not + set. + GLOBAL_DEFAULT (345419141): + No description available. + UNSPECIFIED_VM_DNS_SETTING (35691930): + No description available. + ZONAL_DEFAULT (368475782): + No description available. + ZONAL_ONLY (521198951): + No description available. + """ + UNDEFINED_VM_DNS_SETTING = 0 + GLOBAL_DEFAULT = 345419141 + UNSPECIFIED_VM_DNS_SETTING = 35691930 + ZONAL_DEFAULT = 368475782 + ZONAL_ONLY = 521198951 + + class XpnProjectStatus(proto.Enum): + r"""[Output Only] The role this project has in a shared VPC + configuration. Currently, only projects with the host role, which is + specified by the value HOST, are differentiated. + + Values: + UNDEFINED_XPN_PROJECT_STATUS (0): + A value indicating that the enum field is not + set. + HOST (2223528): + No description available. + UNSPECIFIED_XPN_PROJECT_STATUS (340393257): + No description available. + """ + UNDEFINED_XPN_PROJECT_STATUS = 0 + HOST = 2223528 + UNSPECIFIED_XPN_PROJECT_STATUS = 340393257 + + common_instance_metadata: 'Metadata' = proto.Field( + proto.MESSAGE, + number=185794117, + optional=True, + message='Metadata', + ) + creation_timestamp: str = proto.Field( + proto.STRING, + number=30525366, + optional=True, + ) + default_network_tier: str = proto.Field( + proto.STRING, + number=471753361, + optional=True, + ) + default_service_account: str = proto.Field( + proto.STRING, + number=298712229, + optional=True, + ) + description: str = proto.Field( + proto.STRING, + number=422937596, + optional=True, + ) + enabled_features: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=469017467, + ) + id: int = proto.Field( + proto.UINT64, + number=3355, + optional=True, + ) + kind: str = proto.Field( + proto.STRING, + number=3292052, + optional=True, + ) + name: str = proto.Field( + proto.STRING, + number=3373707, + optional=True, + ) + quotas: MutableSequence['Quota'] = proto.RepeatedField( + proto.MESSAGE, + number=125341947, + message='Quota', + ) + self_link: str = proto.Field( + proto.STRING, + number=456214797, + optional=True, + ) + usage_export_location: 'UsageExportLocation' = proto.Field( + proto.MESSAGE, + number=347543874, + optional=True, + message='UsageExportLocation', + ) + vm_dns_setting: str = proto.Field( + proto.STRING, + number=58856370, + optional=True, + ) + xpn_project_status: str = proto.Field( + proto.STRING, + number=228419265, + optional=True, + ) + + +class ProjectsDisableXpnResourceRequest(proto.Message): + r""" + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + xpn_resource (google.cloud.compute_v1.types.XpnResourceId): + Service resource (a.k.a service project) ID. + + This field is a member of `oneof`_ ``_xpn_resource``. + """ + + xpn_resource: 'XpnResourceId' = proto.Field( + proto.MESSAGE, + number=133384631, + optional=True, + message='XpnResourceId', + ) + + +class ProjectsEnableXpnResourceRequest(proto.Message): + r""" + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + xpn_resource (google.cloud.compute_v1.types.XpnResourceId): + Service resource (a.k.a service project) ID. + + This field is a member of `oneof`_ ``_xpn_resource``. + """ + + xpn_resource: 'XpnResourceId' = proto.Field( + proto.MESSAGE, + number=133384631, + optional=True, + message='XpnResourceId', + ) + + +class ProjectsGetXpnResources(proto.Message): + r""" + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + kind (str): + [Output Only] Type of resource. Always + compute#projectsGetXpnResources for lists of service + resources (a.k.a service projects) + + This field is a member of `oneof`_ ``_kind``. + next_page_token (str): + [Output Only] This token allows you to get the next page of + results for list requests. If the number of results is + larger than maxResults, use the nextPageToken as a value for + the query parameter pageToken in the next list request. + Subsequent list requests will have their own nextPageToken + to continue paging through the results. + + This field is a member of `oneof`_ ``_next_page_token``. + resources (MutableSequence[google.cloud.compute_v1.types.XpnResourceId]): + Service resources (a.k.a service projects) + attached to this project as their shared VPC + host. + """ + + @property + def raw_page(self): + return self + + kind: str = proto.Field( + proto.STRING, + number=3292052, + optional=True, + ) + next_page_token: str = proto.Field( + proto.STRING, + number=79797525, + optional=True, + ) + resources: MutableSequence['XpnResourceId'] = proto.RepeatedField( + proto.MESSAGE, + number=164412965, + message='XpnResourceId', + ) + + +class ProjectsListXpnHostsRequest(proto.Message): + r""" + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + organization (str): + Optional organization ID managed by Cloud + Resource Manager, for which to list shared VPC + host projects. If not specified, the + organization will be inferred from the project. + + This field is a member of `oneof`_ ``_organization``. + """ + + organization: str = proto.Field( + proto.STRING, + number=105180467, + optional=True, + ) + + +class ProjectsSetDefaultNetworkTierRequest(proto.Message): + r""" + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + network_tier (str): + Default network tier to be set. + Check the NetworkTier enum for the list of + possible values. + + This field is a member of `oneof`_ ``_network_tier``. + """ + class NetworkTier(proto.Enum): + r"""Default network tier to be set. + + Values: + UNDEFINED_NETWORK_TIER (0): + A value indicating that the enum field is not + set. + FIXED_STANDARD (310464328): + Public internet quality with fixed bandwidth. + PREMIUM (399530551): + High quality, Google-grade network tier, + support for all networking products. + STANDARD (484642493): + Public internet quality, only limited support + for other networking products. + STANDARD_OVERRIDES_FIXED_STANDARD (465847234): + (Output only) Temporary tier for FIXED_STANDARD when fixed + standard tier is expired or not configured. + """ + UNDEFINED_NETWORK_TIER = 0 + FIXED_STANDARD = 310464328 + PREMIUM = 399530551 + STANDARD = 484642493 + STANDARD_OVERRIDES_FIXED_STANDARD = 465847234 + + network_tier: str = proto.Field( + proto.STRING, + number=517397843, + optional=True, + ) + + +class PublicAdvertisedPrefix(proto.Message): + r"""A public advertised prefix represents an aggregated IP prefix + or netblock which customers bring to cloud. The IP prefix is a + single unit of route advertisement and is announced globally to + the internet. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + creation_timestamp (str): + [Output Only] Creation timestamp in RFC3339 text format. + + This field is a member of `oneof`_ ``_creation_timestamp``. + description (str): + An optional description of this resource. + Provide this property when you create the + resource. + + This field is a member of `oneof`_ ``_description``. + dns_verification_ip (str): + The address to be used for reverse DNS + verification. + + This field is a member of `oneof`_ ``_dns_verification_ip``. + fingerprint (str): + Fingerprint of this resource. A hash of the + contents stored in this object. This field is + used in optimistic locking. This field will be + ignored when inserting a new + PublicAdvertisedPrefix. An up-to-date + fingerprint must be provided in order to update + the PublicAdvertisedPrefix, otherwise the + request will fail with error 412 + conditionNotMet. To see the latest fingerprint, + make a get() request to retrieve a + PublicAdvertisedPrefix. + + This field is a member of `oneof`_ ``_fingerprint``. + id (int): + [Output Only] The unique identifier for the resource type. + The server generates this identifier. + + This field is a member of `oneof`_ ``_id``. + ip_cidr_range (str): + The address range, in CIDR format, + represented by this public advertised prefix. + + This field is a member of `oneof`_ ``_ip_cidr_range``. + kind (str): + [Output Only] Type of the resource. Always + compute#publicAdvertisedPrefix for public advertised + prefixes. + + This field is a member of `oneof`_ ``_kind``. + name (str): + Name of the resource. Provided by the client when the + resource is created. The name must be 1-63 characters long, + and comply with RFC1035. Specifically, the name must be 1-63 + characters long and match the regular expression + ``[a-z]([-a-z0-9]*[a-z0-9])?`` which means the first + character must be a lowercase letter, and all following + characters must be a dash, lowercase letter, or digit, + except the last character, which cannot be a dash. + + This field is a member of `oneof`_ ``_name``. + public_delegated_prefixs (MutableSequence[google.cloud.compute_v1.types.PublicAdvertisedPrefixPublicDelegatedPrefix]): + [Output Only] The list of public delegated prefixes that + exist for this public advertised prefix. + self_link (str): + [Output Only] Server-defined URL for the resource. + + This field is a member of `oneof`_ ``_self_link``. + shared_secret (str): + [Output Only] The shared secret to be used for reverse DNS + verification. + + This field is a member of `oneof`_ ``_shared_secret``. + status (str): + The status of the public advertised prefix. Possible values + include: - ``INITIAL``: RPKI validation is complete. - + ``PTR_CONFIGURED``: User has configured the PTR. - + ``VALIDATED``: Reverse DNS lookup is successful. - + ``REVERSE_DNS_LOOKUP_FAILED``: Reverse DNS lookup failed. - + ``PREFIX_CONFIGURATION_IN_PROGRESS``: The prefix is being + configured. - ``PREFIX_CONFIGURATION_COMPLETE``: The prefix + is fully configured. - ``PREFIX_REMOVAL_IN_PROGRESS``: The + prefix is being removed. Check the Status enum for the list + of possible values. + + This field is a member of `oneof`_ ``_status``. + """ + class Status(proto.Enum): + r"""The status of the public advertised prefix. Possible values include: + - ``INITIAL``: RPKI validation is complete. - ``PTR_CONFIGURED``: + User has configured the PTR. - ``VALIDATED``: Reverse DNS lookup is + successful. - ``REVERSE_DNS_LOOKUP_FAILED``: Reverse DNS lookup + failed. - ``PREFIX_CONFIGURATION_IN_PROGRESS``: The prefix is being + configured. - ``PREFIX_CONFIGURATION_COMPLETE``: The prefix is fully + configured. - ``PREFIX_REMOVAL_IN_PROGRESS``: The prefix is being + removed. + + Values: + UNDEFINED_STATUS (0): + A value indicating that the enum field is not + set. + INITIAL (518841124): + RPKI validation is complete. + PREFIX_CONFIGURATION_COMPLETE (480889551): + The prefix is fully configured. + PREFIX_CONFIGURATION_IN_PROGRESS (378550961): + The prefix is being configured. + PREFIX_REMOVAL_IN_PROGRESS (284375783): + The prefix is being removed. + PTR_CONFIGURED (513497167): + User has configured the PTR. + REVERSE_DNS_LOOKUP_FAILED (295755183): + Reverse DNS lookup failed. + VALIDATED (66197998): + Reverse DNS lookup is successful. + """ + UNDEFINED_STATUS = 0 + INITIAL = 518841124 + PREFIX_CONFIGURATION_COMPLETE = 480889551 + PREFIX_CONFIGURATION_IN_PROGRESS = 378550961 + PREFIX_REMOVAL_IN_PROGRESS = 284375783 + PTR_CONFIGURED = 513497167 + REVERSE_DNS_LOOKUP_FAILED = 295755183 + VALIDATED = 66197998 + + creation_timestamp: str = proto.Field( + proto.STRING, + number=30525366, + optional=True, + ) + description: str = proto.Field( + proto.STRING, + number=422937596, + optional=True, + ) + dns_verification_ip: str = proto.Field( + proto.STRING, + number=241011381, + optional=True, + ) + fingerprint: str = proto.Field( + proto.STRING, + number=234678500, + optional=True, + ) + id: int = proto.Field( + proto.UINT64, + number=3355, + optional=True, + ) + ip_cidr_range: str = proto.Field( + proto.STRING, + number=98117322, + optional=True, + ) + kind: str = proto.Field( + proto.STRING, + number=3292052, + optional=True, + ) + name: str = proto.Field( + proto.STRING, + number=3373707, + optional=True, + ) + public_delegated_prefixs: MutableSequence['PublicAdvertisedPrefixPublicDelegatedPrefix'] = proto.RepeatedField( + proto.MESSAGE, + number=425811723, + message='PublicAdvertisedPrefixPublicDelegatedPrefix', + ) + self_link: str = proto.Field( + proto.STRING, + number=456214797, + optional=True, + ) + shared_secret: str = proto.Field( + proto.STRING, + number=381932490, + optional=True, + ) + status: str = proto.Field( + proto.STRING, + number=181260274, + optional=True, + ) + + +class PublicAdvertisedPrefixList(proto.Message): + r""" + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + id (str): + [Output Only] Unique identifier for the resource; defined by + the server. + + This field is a member of `oneof`_ ``_id``. + items (MutableSequence[google.cloud.compute_v1.types.PublicAdvertisedPrefix]): + A list of PublicAdvertisedPrefix resources. + kind (str): + [Output Only] Type of the resource. Always + compute#publicAdvertisedPrefix for public advertised + prefixes. + + This field is a member of `oneof`_ ``_kind``. + next_page_token (str): + [Output Only] This token allows you to get the next page of + results for list requests. If the number of results is + larger than maxResults, use the nextPageToken as a value for + the query parameter pageToken in the next list request. + Subsequent list requests will have their own nextPageToken + to continue paging through the results. + + This field is a member of `oneof`_ ``_next_page_token``. + self_link (str): + [Output Only] Server-defined URL for this resource. + + This field is a member of `oneof`_ ``_self_link``. + warning (google.cloud.compute_v1.types.Warning): + [Output Only] Informational warning message. + + This field is a member of `oneof`_ ``_warning``. + """ + + @property + def raw_page(self): + return self + + id: str = proto.Field( + proto.STRING, + number=3355, + optional=True, + ) + items: MutableSequence['PublicAdvertisedPrefix'] = proto.RepeatedField( + proto.MESSAGE, + number=100526016, + message='PublicAdvertisedPrefix', + ) + kind: str = proto.Field( + proto.STRING, + number=3292052, + optional=True, + ) + next_page_token: str = proto.Field( + proto.STRING, + number=79797525, + optional=True, + ) + self_link: str = proto.Field( + proto.STRING, + number=456214797, + optional=True, + ) + warning: 'Warning' = proto.Field( + proto.MESSAGE, + number=50704284, + optional=True, + message='Warning', + ) + + +class PublicAdvertisedPrefixPublicDelegatedPrefix(proto.Message): + r"""Represents a CIDR range which can be used to assign + addresses. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + ip_range (str): + The IP address range of the public delegated + prefix + + This field is a member of `oneof`_ ``_ip_range``. + name (str): + The name of the public delegated prefix + + This field is a member of `oneof`_ ``_name``. + project (str): + The project number of the public delegated + prefix + + This field is a member of `oneof`_ ``_project``. + region (str): + The region of the public delegated prefix if + it is regional. If absent, the prefix is global. + + This field is a member of `oneof`_ ``_region``. + status (str): + The status of the public delegated prefix. + Possible values are: INITIALIZING: The public + delegated prefix is being initialized and + addresses cannot be created yet. ANNOUNCED: The + public delegated prefix is active. + + This field is a member of `oneof`_ ``_status``. + """ + + ip_range: str = proto.Field( + proto.STRING, + number=145092645, + optional=True, + ) + name: str = proto.Field( + proto.STRING, + number=3373707, + optional=True, + ) + project: str = proto.Field( + proto.STRING, + number=227560217, + optional=True, + ) + region: str = proto.Field( + proto.STRING, + number=138946292, + optional=True, + ) + status: str = proto.Field( + proto.STRING, + number=181260274, + optional=True, + ) + + +class PublicDelegatedPrefix(proto.Message): + r"""A PublicDelegatedPrefix resource represents an IP block + within a PublicAdvertisedPrefix that is configured within a + single cloud scope (global or region). IPs in the block can be + allocated to resources within that scope. Public delegated + prefixes may be further broken up into smaller IP blocks in the + same scope as the parent block. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + creation_timestamp (str): + [Output Only] Creation timestamp in RFC3339 text format. + + This field is a member of `oneof`_ ``_creation_timestamp``. + description (str): + An optional description of this resource. + Provide this property when you create the + resource. + + This field is a member of `oneof`_ ``_description``. + fingerprint (str): + Fingerprint of this resource. A hash of the + contents stored in this object. This field is + used in optimistic locking. This field will be + ignored when inserting a new + PublicDelegatedPrefix. An up-to-date fingerprint + must be provided in order to update the + PublicDelegatedPrefix, otherwise the request + will fail with error 412 conditionNotMet. To see + the latest fingerprint, make a get() request to + retrieve a PublicDelegatedPrefix. + + This field is a member of `oneof`_ ``_fingerprint``. + id (int): + [Output Only] The unique identifier for the resource type. + The server generates this identifier. + + This field is a member of `oneof`_ ``_id``. + ip_cidr_range (str): + The IP address range, in CIDR format, + represented by this public delegated prefix. + + This field is a member of `oneof`_ ``_ip_cidr_range``. + is_live_migration (bool): + If true, the prefix will be live migrated. + + This field is a member of `oneof`_ ``_is_live_migration``. + kind (str): + [Output Only] Type of the resource. Always + compute#publicDelegatedPrefix for public delegated prefixes. + + This field is a member of `oneof`_ ``_kind``. + name (str): + Name of the resource. Provided by the client when the + resource is created. The name must be 1-63 characters long, + and comply with RFC1035. Specifically, the name must be 1-63 + characters long and match the regular expression + ``[a-z]([-a-z0-9]*[a-z0-9])?`` which means the first + character must be a lowercase letter, and all following + characters must be a dash, lowercase letter, or digit, + except the last character, which cannot be a dash. + + This field is a member of `oneof`_ ``_name``. + parent_prefix (str): + The URL of parent prefix. Either + PublicAdvertisedPrefix or PublicDelegatedPrefix. + + This field is a member of `oneof`_ ``_parent_prefix``. + public_delegated_sub_prefixs (MutableSequence[google.cloud.compute_v1.types.PublicDelegatedPrefixPublicDelegatedSubPrefix]): + The list of sub public delegated prefixes + that exist for this public delegated prefix. + region (str): + [Output Only] URL of the region where the public delegated + prefix resides. This field applies only to the region + resource. You must specify this field as part of the HTTP + request URL. It is not settable as a field in the request + body. + + This field is a member of `oneof`_ ``_region``. + self_link (str): + [Output Only] Server-defined URL for the resource. + + This field is a member of `oneof`_ ``_self_link``. + status (str): + [Output Only] The status of the public delegated prefix, + which can be one of following values: - ``INITIALIZING`` The + public delegated prefix is being initialized and addresses + cannot be created yet. - ``READY_TO_ANNOUNCE`` The public + delegated prefix is a live migration prefix and is active. - + ``ANNOUNCED`` The public delegated prefix is active. - + ``DELETING`` The public delegated prefix is being + deprovsioned. Check the Status enum for the list of possible + values. + + This field is a member of `oneof`_ ``_status``. + """ + class Status(proto.Enum): + r"""[Output Only] The status of the public delegated prefix, which can + be one of following values: - ``INITIALIZING`` The public delegated + prefix is being initialized and addresses cannot be created yet. - + ``READY_TO_ANNOUNCE`` The public delegated prefix is a live + migration prefix and is active. - ``ANNOUNCED`` The public delegated + prefix is active. - ``DELETING`` The public delegated prefix is + being deprovsioned. + + Values: + UNDEFINED_STATUS (0): + A value indicating that the enum field is not + set. + ANNOUNCED (365103355): + The public delegated prefix is active. + DELETING (528602024): + The public delegated prefix is being + deprovsioned. + INITIALIZING (306588749): + The public delegated prefix is being + initialized and addresses cannot be created yet. + READY_TO_ANNOUNCE (64641265): + The public delegated prefix is currently + withdrawn but ready to be announced. + """ + UNDEFINED_STATUS = 0 + ANNOUNCED = 365103355 + DELETING = 528602024 + INITIALIZING = 306588749 + READY_TO_ANNOUNCE = 64641265 + + creation_timestamp: str = proto.Field( + proto.STRING, + number=30525366, + optional=True, + ) + description: str = proto.Field( + proto.STRING, + number=422937596, + optional=True, + ) + fingerprint: str = proto.Field( + proto.STRING, + number=234678500, + optional=True, + ) + id: int = proto.Field( + proto.UINT64, + number=3355, + optional=True, + ) + ip_cidr_range: str = proto.Field( + proto.STRING, + number=98117322, + optional=True, + ) + is_live_migration: bool = proto.Field( + proto.BOOL, + number=511823856, + optional=True, + ) + kind: str = proto.Field( + proto.STRING, + number=3292052, + optional=True, + ) + name: str = proto.Field( + proto.STRING, + number=3373707, + optional=True, + ) + parent_prefix: str = proto.Field( + proto.STRING, + number=15233991, + optional=True, + ) + public_delegated_sub_prefixs: MutableSequence['PublicDelegatedPrefixPublicDelegatedSubPrefix'] = proto.RepeatedField( + proto.MESSAGE, + number=188940044, + message='PublicDelegatedPrefixPublicDelegatedSubPrefix', + ) + region: str = proto.Field( + proto.STRING, + number=138946292, + optional=True, + ) + self_link: str = proto.Field( + proto.STRING, + number=456214797, + optional=True, + ) + status: str = proto.Field( + proto.STRING, + number=181260274, + optional=True, + ) + + +class PublicDelegatedPrefixAggregatedList(proto.Message): + r""" + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + id (str): + [Output Only] Unique identifier for the resource; defined by + the server. + + This field is a member of `oneof`_ ``_id``. + items (MutableMapping[str, google.cloud.compute_v1.types.PublicDelegatedPrefixesScopedList]): + A list of PublicDelegatedPrefixesScopedList + resources. + kind (str): + [Output Only] Type of the resource. Always + compute#publicDelegatedPrefixAggregatedList for aggregated + lists of public delegated prefixes. + + This field is a member of `oneof`_ ``_kind``. + next_page_token (str): + [Output Only] This token allows you to get the next page of + results for list requests. If the number of results is + larger than maxResults, use the nextPageToken as a value for + the query parameter pageToken in the next list request. + Subsequent list requests will have their own nextPageToken + to continue paging through the results. + + This field is a member of `oneof`_ ``_next_page_token``. + self_link (str): + [Output Only] Server-defined URL for this resource. + + This field is a member of `oneof`_ ``_self_link``. + unreachables (MutableSequence[str]): + [Output Only] Unreachable resources. + warning (google.cloud.compute_v1.types.Warning): + [Output Only] Informational warning message. + + This field is a member of `oneof`_ ``_warning``. + """ + + @property + def raw_page(self): + return self + + id: str = proto.Field( + proto.STRING, + number=3355, + optional=True, + ) + items: MutableMapping[str, 'PublicDelegatedPrefixesScopedList'] = proto.MapField( + proto.STRING, + proto.MESSAGE, + number=100526016, + message='PublicDelegatedPrefixesScopedList', + ) + kind: str = proto.Field( + proto.STRING, + number=3292052, + optional=True, + ) + next_page_token: str = proto.Field( + proto.STRING, + number=79797525, + optional=True, + ) + self_link: str = proto.Field( + proto.STRING, + number=456214797, + optional=True, + ) + unreachables: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=243372063, + ) + warning: 'Warning' = proto.Field( + proto.MESSAGE, + number=50704284, + optional=True, + message='Warning', + ) + + +class PublicDelegatedPrefixList(proto.Message): + r""" + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + id (str): + [Output Only] Unique identifier for the resource; defined by + the server. + + This field is a member of `oneof`_ ``_id``. + items (MutableSequence[google.cloud.compute_v1.types.PublicDelegatedPrefix]): + A list of PublicDelegatedPrefix resources. + kind (str): + [Output Only] Type of the resource. Always + compute#publicDelegatedPrefixList for public delegated + prefixes. + + This field is a member of `oneof`_ ``_kind``. + next_page_token (str): + [Output Only] This token allows you to get the next page of + results for list requests. If the number of results is + larger than maxResults, use the nextPageToken as a value for + the query parameter pageToken in the next list request. + Subsequent list requests will have their own nextPageToken + to continue paging through the results. + + This field is a member of `oneof`_ ``_next_page_token``. + self_link (str): + [Output Only] Server-defined URL for this resource. + + This field is a member of `oneof`_ ``_self_link``. + warning (google.cloud.compute_v1.types.Warning): + [Output Only] Informational warning message. + + This field is a member of `oneof`_ ``_warning``. + """ + + @property + def raw_page(self): + return self + + id: str = proto.Field( + proto.STRING, + number=3355, + optional=True, + ) + items: MutableSequence['PublicDelegatedPrefix'] = proto.RepeatedField( + proto.MESSAGE, + number=100526016, + message='PublicDelegatedPrefix', + ) + kind: str = proto.Field( + proto.STRING, + number=3292052, + optional=True, + ) + next_page_token: str = proto.Field( + proto.STRING, + number=79797525, + optional=True, + ) + self_link: str = proto.Field( + proto.STRING, + number=456214797, + optional=True, + ) + warning: 'Warning' = proto.Field( + proto.MESSAGE, + number=50704284, + optional=True, + message='Warning', + ) + + +class PublicDelegatedPrefixPublicDelegatedSubPrefix(proto.Message): + r"""Represents a sub PublicDelegatedPrefix. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + delegatee_project (str): + Name of the project scoping this + PublicDelegatedSubPrefix. + + This field is a member of `oneof`_ ``_delegatee_project``. + description (str): + An optional description of this resource. + Provide this property when you create the + resource. + + This field is a member of `oneof`_ ``_description``. + ip_cidr_range (str): + The IP address range, in CIDR format, + represented by this sub public delegated prefix. + + This field is a member of `oneof`_ ``_ip_cidr_range``. + is_address (bool): + Whether the sub prefix is delegated to create + Address resources in the delegatee project. + + This field is a member of `oneof`_ ``_is_address``. + name (str): + The name of the sub public delegated prefix. + + This field is a member of `oneof`_ ``_name``. + region (str): + [Output Only] The region of the sub public delegated prefix + if it is regional. If absent, the sub prefix is global. + + This field is a member of `oneof`_ ``_region``. + status (str): + [Output Only] The status of the sub public delegated prefix. + Check the Status enum for the list of possible values. + + This field is a member of `oneof`_ ``_status``. + """ + class Status(proto.Enum): + r"""[Output Only] The status of the sub public delegated prefix. + + Values: + UNDEFINED_STATUS (0): + A value indicating that the enum field is not + set. + ACTIVE (314733318): + No description available. + INACTIVE (270421099): + No description available. + """ + UNDEFINED_STATUS = 0 + ACTIVE = 314733318 + INACTIVE = 270421099 + + delegatee_project: str = proto.Field( + proto.STRING, + number=414860634, + optional=True, + ) + description: str = proto.Field( + proto.STRING, + number=422937596, + optional=True, + ) + ip_cidr_range: str = proto.Field( + proto.STRING, + number=98117322, + optional=True, + ) + is_address: bool = proto.Field( + proto.BOOL, + number=352617951, + optional=True, + ) + name: str = proto.Field( + proto.STRING, + number=3373707, + optional=True, + ) + region: str = proto.Field( + proto.STRING, + number=138946292, + optional=True, + ) + status: str = proto.Field( + proto.STRING, + number=181260274, + optional=True, + ) + + +class PublicDelegatedPrefixesScopedList(proto.Message): + r""" + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + public_delegated_prefixes (MutableSequence[google.cloud.compute_v1.types.PublicDelegatedPrefix]): + [Output Only] A list of PublicDelegatedPrefixes contained in + this scope. + warning (google.cloud.compute_v1.types.Warning): + [Output Only] Informational warning which replaces the list + of public delegated prefixes when the list is empty. + + This field is a member of `oneof`_ ``_warning``. + """ + + public_delegated_prefixes: MutableSequence['PublicDelegatedPrefix'] = proto.RepeatedField( + proto.MESSAGE, + number=315261206, + message='PublicDelegatedPrefix', + ) + warning: 'Warning' = proto.Field( + proto.MESSAGE, + number=50704284, + optional=True, + message='Warning', + ) + + +class Quota(proto.Message): + r"""A quotas entry. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + limit (float): + [Output Only] Quota limit for this metric. + + This field is a member of `oneof`_ ``_limit``. + metric (str): + [Output Only] Name of the quota metric. Check the Metric + enum for the list of possible values. + + This field is a member of `oneof`_ ``_metric``. + owner (str): + [Output Only] Owning resource. This is the resource on which + this quota is applied. + + This field is a member of `oneof`_ ``_owner``. + usage (float): + [Output Only] Current usage of this metric. + + This field is a member of `oneof`_ ``_usage``. + """ + class Metric(proto.Enum): + r"""[Output Only] Name of the quota metric. + + Values: + UNDEFINED_METRIC (0): + A value indicating that the enum field is not + set. + A2_CPUS (153206585): + No description available. + AFFINITY_GROUPS (108303563): + No description available. + AUTOSCALERS (471248988): + No description available. + BACKEND_BUCKETS (137626846): + No description available. + BACKEND_SERVICES (269623753): + No description available. + C2D_CPUS (508182517): + No description available. + C2_CPUS (317601211): + No description available. + C3_CPUS (346230362): + No description available. + COMMITMENTS (456141790): + No description available. + COMMITTED_A2_CPUS (59330902): + No description available. + COMMITTED_C2D_CPUS (282390904): + No description available. + COMMITTED_C2_CPUS (223725528): + No description available. + COMMITTED_C3_CPUS (252354679): + No description available. + COMMITTED_CPUS (292394702): + No description available. + COMMITTED_E2_CPUS (388120154): + No description available. + COMMITTED_LICENSES (357606869): + No description available. + COMMITTED_LOCAL_SSD_TOTAL_GB (308393480): + No description available. + COMMITTED_M3_CPUS (585985): + No description available. + COMMITTED_MEMORY_OPTIMIZED_CPUS (489057886): + No description available. + COMMITTED_N2A_CPUS (40064304): + No description available. + COMMITTED_N2D_CPUS (125951757): + No description available. + COMMITTED_N2_CPUS (322589603): + No description available. + COMMITTED_NVIDIA_A100_80GB_GPUS (464326565): + No description available. + COMMITTED_NVIDIA_A100_GPUS (375799445): + No description available. + COMMITTED_NVIDIA_K80_GPUS (3857188): + No description available. + COMMITTED_NVIDIA_L4_GPUS (19163645): + No description available. + COMMITTED_NVIDIA_P100_GPUS (107528100): + No description available. + COMMITTED_NVIDIA_P4_GPUS (347952897): + No description available. + COMMITTED_NVIDIA_T4_GPUS (139871237): + No description available. + COMMITTED_NVIDIA_V100_GPUS (219562): + No description available. + COMMITTED_T2A_CPUS (296378986): + No description available. + COMMITTED_T2D_CPUS (382266439): + No description available. + CPUS (2075595): + Guest CPUs + CPUS_ALL_REGIONS (470911149): + No description available. + DISKS_TOTAL_GB (353520543): + No description available. + E2_CPUS (481995837): + No description available. + EXTERNAL_MANAGED_FORWARDING_RULES (150790089): + No description available. + EXTERNAL_NETWORK_LB_FORWARDING_RULES (374298265): + No description available. + EXTERNAL_PROTOCOL_FORWARDING_RULES (63478888): + No description available. + EXTERNAL_VPN_GATEWAYS (272457134): + No description available. + FIREWALLS (374485843): + No description available. + FORWARDING_RULES (432668949): + No description available. + GLOBAL_EXTERNAL_MANAGED_BACKEND_SERVICES (164566753): + No description available. + GLOBAL_EXTERNAL_MANAGED_FORWARDING_RULES (327611949): + No description available. + GLOBAL_EXTERNAL_PROXY_LB_BACKEND_SERVICES (400256169): + No description available. + GLOBAL_INTERNAL_ADDRESSES (42738332): + No description available. + GLOBAL_INTERNAL_MANAGED_BACKEND_SERVICES (256608303): + No description available. + GLOBAL_INTERNAL_TRAFFIC_DIRECTOR_BACKEND_SERVICES (323514196): + No description available. + GPUS_ALL_REGIONS (39387177): + No description available. + HEALTH_CHECKS (289347502): + No description available. + IMAGES (15562360): + No description available. + INSTANCES (131337822): + No description available. + INSTANCE_GROUPS (355919038): + No description available. + INSTANCE_GROUP_MANAGERS (101798192): + No description available. + INSTANCE_TEMPLATES (226188271): + No description available. + INTERCONNECTS (415204741): + No description available. + INTERCONNECT_ATTACHMENTS_PER_REGION (159968086): + No description available. + INTERCONNECT_ATTACHMENTS_TOTAL_MBPS (425090419): + No description available. + INTERCONNECT_TOTAL_GBPS (285341866): + No description available. + INTERNAL_ADDRESSES (197899392): + No description available. + INTERNAL_TRAFFIC_DIRECTOR_FORWARDING_RULES (266433668): + No description available. + IN_PLACE_SNAPSHOTS (151359133): + No description available. + IN_USE_ADDRESSES (402125072): + No description available. + IN_USE_BACKUP_SCHEDULES (32786705): + No description available. + IN_USE_SNAPSHOT_SCHEDULES (462104083): + No description available. + LOCAL_SSD_TOTAL_GB (330878021): + No description available. + M1_CPUS (37203366): + No description available. + M2_CPUS (65832517): + No description available. + M3_CPUS (94461668): + No description available. + MACHINE_IMAGES (446986640): + No description available. + N2A_CPUS (265855917): + No description available. + N2D_CPUS (351743370): + No description available. + N2_CPUS (416465286): + No description available. + NETWORKS (485481477): + No description available. + NETWORK_ATTACHMENTS (149028575): + No description available. + NETWORK_ENDPOINT_GROUPS (102144909): + No description available. + NETWORK_FIREWALL_POLICIES (101117374): + No description available. + NET_LB_SECURITY_POLICIES_PER_REGION (157892269): + No description available. + NET_LB_SECURITY_POLICY_RULES_PER_REGION (356090931): + No description available. + NET_LB_SECURITY_POLICY_RULE_ATTRIBUTES_PER_REGION (311243888): + No description available. + NODE_GROUPS (24624817): + No description available. + NODE_TEMPLATES (474896668): + No description available. + NVIDIA_A100_80GB_GPUS (286389320): + No description available. + NVIDIA_A100_GPUS (504872978): + No description available. + NVIDIA_K80_GPUS (163886599): + No description available. + NVIDIA_L4_GPUS (491923130): + No description available. + NVIDIA_P100_GPUS (236601633): + No description available. + NVIDIA_P100_VWS_GPUS (213970574): + No description available. + NVIDIA_P4_GPUS (283841470): + No description available. + NVIDIA_P4_VWS_GPUS (528296619): + No description available. + NVIDIA_T4_GPUS (75759810): + No description available. + NVIDIA_T4_VWS_GPUS (319813039): + No description available. + NVIDIA_V100_GPUS (129293095): + No description available. + PACKET_MIRRORINGS (15578407): + No description available. + PD_EXTREME_TOTAL_PROVISIONED_IOPS (69593965): + No description available. + PREEMPTIBLE_CPUS (251184841): + No description available. + PREEMPTIBLE_LOCAL_SSD_GB (260819336): + No description available. + PREEMPTIBLE_NVIDIA_A100_80GB_GPUS (151942410): + No description available. + PREEMPTIBLE_NVIDIA_A100_GPUS (68832784): + No description available. + PREEMPTIBLE_NVIDIA_K80_GPUS (374960201): + No description available. + PREEMPTIBLE_NVIDIA_L4_GPUS (100408376): + No description available. + PREEMPTIBLE_NVIDIA_P100_GPUS (337432351): + No description available. + PREEMPTIBLE_NVIDIA_P100_VWS_GPUS (313544076): + No description available. + PREEMPTIBLE_NVIDIA_P4_GPUS (429197628): + No description available. + PREEMPTIBLE_NVIDIA_P4_VWS_GPUS (252981545): + No description available. + PREEMPTIBLE_NVIDIA_T4_GPUS (221115968): + No description available. + PREEMPTIBLE_NVIDIA_T4_VWS_GPUS (44497965): + No description available. + PREEMPTIBLE_NVIDIA_V100_GPUS (230123813): + No description available. + PSC_ILB_CONSUMER_FORWARDING_RULES_PER_PRODUCER_NETWORK (231164291): + No description available. + PSC_INTERNAL_LB_FORWARDING_RULES (169005435): + No description available. + PUBLIC_ADVERTISED_PREFIXES (471371980): + No description available. + PUBLIC_DELEGATED_PREFIXES (532465974): + No description available. + REGIONAL_AUTOSCALERS (29363772): + No description available. + REGIONAL_EXTERNAL_MANAGED_BACKEND_SERVICES (4240989): + No description available. + REGIONAL_EXTERNAL_NETWORK_LB_BACKEND_SERVICES (409564525): + No description available. + REGIONAL_INSTANCE_GROUP_MANAGERS (37543696): + No description available. + REGIONAL_INTERNAL_LB_BACKEND_SERVICES (137983760): + No description available. + REGIONAL_INTERNAL_MANAGED_BACKEND_SERVICES (96282539): + No description available. + RESERVATIONS (32644647): + No description available. + RESOURCE_POLICIES (83955297): + No description available. + ROUTERS (493018666): + No description available. + ROUTES (275680074): + No description available. + SECURITY_POLICIES (189518703): + No description available. + SECURITY_POLICIES_PER_REGION (249041734): + No description available. + SECURITY_POLICY_ADVANCED_RULES_PER_REGION (371815341): + No description available. + SECURITY_POLICY_CEVAL_RULES (470815689): + No description available. + SECURITY_POLICY_RULES (203549225): + No description available. + SECURITY_POLICY_RULES_PER_REGION (126510156): + No description available. + SERVICE_ATTACHMENTS (471521510): + No description available. + SNAPSHOTS (343405327): + The total number of snapshots allowed for a + single project. + SSD_TOTAL_GB (161732561): + No description available. + SSL_CERTIFICATES (378372399): + No description available. + STATIC_ADDRESSES (93624049): + No description available. + STATIC_BYOIP_ADDRESSES (275809649): + No description available. + STATIC_EXTERNAL_IPV6_ADDRESS_RANGES (472346774): + No description available. + SUBNETWORKS (421330469): + No description available. + T2A_CPUS (522170599): + No description available. + T2D_CPUS (71187140): + No description available. + TARGET_HTTPS_PROXIES (219522506): + No description available. + TARGET_HTTP_PROXIES (164117155): + No description available. + TARGET_INSTANCES (284519728): + No description available. + TARGET_POOLS (348261257): + No description available. + TARGET_SSL_PROXIES (159216235): + No description available. + TARGET_TCP_PROXIES (182243136): + No description available. + TARGET_VPN_GATEWAYS (75029928): + No description available. + URL_MAPS (378660743): + No description available. + VPN_GATEWAYS (35620282): + No description available. + VPN_TUNNELS (104327296): + No description available. + XPN_SERVICE_PROJECTS (95191981): + No description available. + """ + UNDEFINED_METRIC = 0 + A2_CPUS = 153206585 + AFFINITY_GROUPS = 108303563 + AUTOSCALERS = 471248988 + BACKEND_BUCKETS = 137626846 + BACKEND_SERVICES = 269623753 + C2D_CPUS = 508182517 + C2_CPUS = 317601211 + C3_CPUS = 346230362 + COMMITMENTS = 456141790 + COMMITTED_A2_CPUS = 59330902 + COMMITTED_C2D_CPUS = 282390904 + COMMITTED_C2_CPUS = 223725528 + COMMITTED_C3_CPUS = 252354679 + COMMITTED_CPUS = 292394702 + COMMITTED_E2_CPUS = 388120154 + COMMITTED_LICENSES = 357606869 + COMMITTED_LOCAL_SSD_TOTAL_GB = 308393480 + COMMITTED_M3_CPUS = 585985 + COMMITTED_MEMORY_OPTIMIZED_CPUS = 489057886 + COMMITTED_N2A_CPUS = 40064304 + COMMITTED_N2D_CPUS = 125951757 + COMMITTED_N2_CPUS = 322589603 + COMMITTED_NVIDIA_A100_80GB_GPUS = 464326565 + COMMITTED_NVIDIA_A100_GPUS = 375799445 + COMMITTED_NVIDIA_K80_GPUS = 3857188 + COMMITTED_NVIDIA_L4_GPUS = 19163645 + COMMITTED_NVIDIA_P100_GPUS = 107528100 + COMMITTED_NVIDIA_P4_GPUS = 347952897 + COMMITTED_NVIDIA_T4_GPUS = 139871237 + COMMITTED_NVIDIA_V100_GPUS = 219562 + COMMITTED_T2A_CPUS = 296378986 + COMMITTED_T2D_CPUS = 382266439 + CPUS = 2075595 + CPUS_ALL_REGIONS = 470911149 + DISKS_TOTAL_GB = 353520543 + E2_CPUS = 481995837 + EXTERNAL_MANAGED_FORWARDING_RULES = 150790089 + EXTERNAL_NETWORK_LB_FORWARDING_RULES = 374298265 + EXTERNAL_PROTOCOL_FORWARDING_RULES = 63478888 + EXTERNAL_VPN_GATEWAYS = 272457134 + FIREWALLS = 374485843 + FORWARDING_RULES = 432668949 + GLOBAL_EXTERNAL_MANAGED_BACKEND_SERVICES = 164566753 + GLOBAL_EXTERNAL_MANAGED_FORWARDING_RULES = 327611949 + GLOBAL_EXTERNAL_PROXY_LB_BACKEND_SERVICES = 400256169 + GLOBAL_INTERNAL_ADDRESSES = 42738332 + GLOBAL_INTERNAL_MANAGED_BACKEND_SERVICES = 256608303 + GLOBAL_INTERNAL_TRAFFIC_DIRECTOR_BACKEND_SERVICES = 323514196 + GPUS_ALL_REGIONS = 39387177 + HEALTH_CHECKS = 289347502 + IMAGES = 15562360 + INSTANCES = 131337822 + INSTANCE_GROUPS = 355919038 + INSTANCE_GROUP_MANAGERS = 101798192 + INSTANCE_TEMPLATES = 226188271 + INTERCONNECTS = 415204741 + INTERCONNECT_ATTACHMENTS_PER_REGION = 159968086 + INTERCONNECT_ATTACHMENTS_TOTAL_MBPS = 425090419 + INTERCONNECT_TOTAL_GBPS = 285341866 + INTERNAL_ADDRESSES = 197899392 + INTERNAL_TRAFFIC_DIRECTOR_FORWARDING_RULES = 266433668 + IN_PLACE_SNAPSHOTS = 151359133 + IN_USE_ADDRESSES = 402125072 + IN_USE_BACKUP_SCHEDULES = 32786705 + IN_USE_SNAPSHOT_SCHEDULES = 462104083 + LOCAL_SSD_TOTAL_GB = 330878021 + M1_CPUS = 37203366 + M2_CPUS = 65832517 + M3_CPUS = 94461668 + MACHINE_IMAGES = 446986640 + N2A_CPUS = 265855917 + N2D_CPUS = 351743370 + N2_CPUS = 416465286 + NETWORKS = 485481477 + NETWORK_ATTACHMENTS = 149028575 + NETWORK_ENDPOINT_GROUPS = 102144909 + NETWORK_FIREWALL_POLICIES = 101117374 + NET_LB_SECURITY_POLICIES_PER_REGION = 157892269 + NET_LB_SECURITY_POLICY_RULES_PER_REGION = 356090931 + NET_LB_SECURITY_POLICY_RULE_ATTRIBUTES_PER_REGION = 311243888 + NODE_GROUPS = 24624817 + NODE_TEMPLATES = 474896668 + NVIDIA_A100_80GB_GPUS = 286389320 + NVIDIA_A100_GPUS = 504872978 + NVIDIA_K80_GPUS = 163886599 + NVIDIA_L4_GPUS = 491923130 + NVIDIA_P100_GPUS = 236601633 + NVIDIA_P100_VWS_GPUS = 213970574 + NVIDIA_P4_GPUS = 283841470 + NVIDIA_P4_VWS_GPUS = 528296619 + NVIDIA_T4_GPUS = 75759810 + NVIDIA_T4_VWS_GPUS = 319813039 + NVIDIA_V100_GPUS = 129293095 + PACKET_MIRRORINGS = 15578407 + PD_EXTREME_TOTAL_PROVISIONED_IOPS = 69593965 + PREEMPTIBLE_CPUS = 251184841 + PREEMPTIBLE_LOCAL_SSD_GB = 260819336 + PREEMPTIBLE_NVIDIA_A100_80GB_GPUS = 151942410 + PREEMPTIBLE_NVIDIA_A100_GPUS = 68832784 + PREEMPTIBLE_NVIDIA_K80_GPUS = 374960201 + PREEMPTIBLE_NVIDIA_L4_GPUS = 100408376 + PREEMPTIBLE_NVIDIA_P100_GPUS = 337432351 + PREEMPTIBLE_NVIDIA_P100_VWS_GPUS = 313544076 + PREEMPTIBLE_NVIDIA_P4_GPUS = 429197628 + PREEMPTIBLE_NVIDIA_P4_VWS_GPUS = 252981545 + PREEMPTIBLE_NVIDIA_T4_GPUS = 221115968 + PREEMPTIBLE_NVIDIA_T4_VWS_GPUS = 44497965 + PREEMPTIBLE_NVIDIA_V100_GPUS = 230123813 + PSC_ILB_CONSUMER_FORWARDING_RULES_PER_PRODUCER_NETWORK = 231164291 + PSC_INTERNAL_LB_FORWARDING_RULES = 169005435 + PUBLIC_ADVERTISED_PREFIXES = 471371980 + PUBLIC_DELEGATED_PREFIXES = 532465974 + REGIONAL_AUTOSCALERS = 29363772 + REGIONAL_EXTERNAL_MANAGED_BACKEND_SERVICES = 4240989 + REGIONAL_EXTERNAL_NETWORK_LB_BACKEND_SERVICES = 409564525 + REGIONAL_INSTANCE_GROUP_MANAGERS = 37543696 + REGIONAL_INTERNAL_LB_BACKEND_SERVICES = 137983760 + REGIONAL_INTERNAL_MANAGED_BACKEND_SERVICES = 96282539 + RESERVATIONS = 32644647 + RESOURCE_POLICIES = 83955297 + ROUTERS = 493018666 + ROUTES = 275680074 + SECURITY_POLICIES = 189518703 + SECURITY_POLICIES_PER_REGION = 249041734 + SECURITY_POLICY_ADVANCED_RULES_PER_REGION = 371815341 + SECURITY_POLICY_CEVAL_RULES = 470815689 + SECURITY_POLICY_RULES = 203549225 + SECURITY_POLICY_RULES_PER_REGION = 126510156 + SERVICE_ATTACHMENTS = 471521510 + SNAPSHOTS = 343405327 + SSD_TOTAL_GB = 161732561 + SSL_CERTIFICATES = 378372399 + STATIC_ADDRESSES = 93624049 + STATIC_BYOIP_ADDRESSES = 275809649 + STATIC_EXTERNAL_IPV6_ADDRESS_RANGES = 472346774 + SUBNETWORKS = 421330469 + T2A_CPUS = 522170599 + T2D_CPUS = 71187140 + TARGET_HTTPS_PROXIES = 219522506 + TARGET_HTTP_PROXIES = 164117155 + TARGET_INSTANCES = 284519728 + TARGET_POOLS = 348261257 + TARGET_SSL_PROXIES = 159216235 + TARGET_TCP_PROXIES = 182243136 + TARGET_VPN_GATEWAYS = 75029928 + URL_MAPS = 378660743 + VPN_GATEWAYS = 35620282 + VPN_TUNNELS = 104327296 + XPN_SERVICE_PROJECTS = 95191981 + + limit: float = proto.Field( + proto.DOUBLE, + number=102976443, + optional=True, + ) + metric: str = proto.Field( + proto.STRING, + number=533067184, + optional=True, + ) + owner: str = proto.Field( + proto.STRING, + number=106164915, + optional=True, + ) + usage: float = proto.Field( + proto.DOUBLE, + number=111574433, + optional=True, + ) + + +class QuotaExceededInfo(proto.Message): + r"""Additional details for quota exceeded error for resource + quota. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + dimensions (MutableMapping[str, str]): + The map holding related quota dimensions. + future_limit (float): + Future quota limit being rolled out. The + limit's unit depends on the quota type or + metric. + + This field is a member of `oneof`_ ``_future_limit``. + limit (float): + Current effective quota limit. The limit's + unit depends on the quota type or metric. + + This field is a member of `oneof`_ ``_limit``. + limit_name (str): + The name of the quota limit. + + This field is a member of `oneof`_ ``_limit_name``. + metric_name (str): + The Compute Engine quota metric name. + + This field is a member of `oneof`_ ``_metric_name``. + rollout_status (str): + Rollout status of the future quota limit. + Check the RolloutStatus enum for the list of + possible values. + + This field is a member of `oneof`_ ``_rollout_status``. + """ + class RolloutStatus(proto.Enum): + r"""Rollout status of the future quota limit. + + Values: + UNDEFINED_ROLLOUT_STATUS (0): + A value indicating that the enum field is not + set. + IN_PROGRESS (469193735): + IN_PROGRESS - A rollout is in process which will change the + limit value to future limit. + ROLLOUT_STATUS_UNSPECIFIED (26864568): + ROLLOUT_STATUS_UNSPECIFIED - Rollout status is not + specified. The default value. + """ + UNDEFINED_ROLLOUT_STATUS = 0 + IN_PROGRESS = 469193735 + ROLLOUT_STATUS_UNSPECIFIED = 26864568 + + dimensions: MutableMapping[str, str] = proto.MapField( + proto.STRING, + proto.STRING, + number=414334925, + ) + future_limit: float = proto.Field( + proto.DOUBLE, + number=456564287, + optional=True, + ) + limit: float = proto.Field( + proto.DOUBLE, + number=102976443, + optional=True, + ) + limit_name: str = proto.Field( + proto.STRING, + number=398197903, + optional=True, + ) + metric_name: str = proto.Field( + proto.STRING, + number=409881530, + optional=True, + ) + rollout_status: str = proto.Field( + proto.STRING, + number=476426816, + optional=True, + ) + + +class RawDisk(proto.Message): + r"""The parameters of the raw disk image. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + container_type (str): + The format used to encode and transmit the + block device, which should be TAR. This is just + a container and transmission format and not a + runtime format. Provided by the client when the + disk image is created. Check the ContainerType + enum for the list of possible values. + + This field is a member of `oneof`_ ``_container_type``. + sha1_checksum (str): + [Deprecated] This field is deprecated. An optional SHA1 + checksum of the disk image before unpackaging provided by + the client when the disk image is created. + + This field is a member of `oneof`_ ``_sha1_checksum``. + source (str): + The full Google Cloud Storage URL where the raw disk image + archive is stored. The following are valid formats for the + URL: - + https://storage.googleapis.com/bucket_name/image_archive_name + - https://storage.googleapis.com/bucket_name/folder_name/ + image_archive_name In order to create an image, you must + provide the full or partial URL of one of the following: - + The rawDisk.source URL - The sourceDisk URL - The + sourceImage URL - The sourceSnapshot URL + + This field is a member of `oneof`_ ``_source``. + """ + class ContainerType(proto.Enum): + r"""The format used to encode and transmit the block device, + which should be TAR. This is just a container and transmission + format and not a runtime format. Provided by the client when the + disk image is created. + + Values: + UNDEFINED_CONTAINER_TYPE (0): + A value indicating that the enum field is not + set. + TAR (82821): + No description available. + """ + UNDEFINED_CONTAINER_TYPE = 0 + TAR = 82821 + + container_type: str = proto.Field( + proto.STRING, + number=318809144, + optional=True, + ) + sha1_checksum: str = proto.Field( + proto.STRING, + number=314444349, + optional=True, + ) + source: str = proto.Field( + proto.STRING, + number=177235995, + optional=True, + ) + + +class RecreateInstancesInstanceGroupManagerRequest(proto.Message): + r"""A request message for + InstanceGroupManagers.RecreateInstances. See the method + description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + instance_group_manager (str): + The name of the managed instance group. + instance_group_managers_recreate_instances_request_resource (google.cloud.compute_v1.types.InstanceGroupManagersRecreateInstancesRequest): + The body resource for this request + project (str): + Project ID for this request. + request_id (str): + An optional request ID to identify requests. + Specify a unique request ID so that if you must + retry your request, the server will know to + ignore the request if it has already been + completed. For example, consider a situation + where you make an initial request and the + request times out. If you make the request again + with the same request ID, the server can check + if original operation with the same request ID + was received, and if so, will ignore the second + request. This prevents clients from accidentally + creating duplicate commitments. The request ID + must be a valid UUID with the exception that + zero UUID is not supported ( + 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. + zone (str): + The name of the zone where the managed + instance group is located. + """ + + instance_group_manager: str = proto.Field( + proto.STRING, + number=249363395, + ) + instance_group_managers_recreate_instances_request_resource: 'InstanceGroupManagersRecreateInstancesRequest' = proto.Field( + proto.MESSAGE, + number=21405952, + message='InstanceGroupManagersRecreateInstancesRequest', + ) + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + request_id: str = proto.Field( + proto.STRING, + number=37109963, + optional=True, + ) + zone: str = proto.Field( + proto.STRING, + number=3744684, + ) + + +class RecreateInstancesRegionInstanceGroupManagerRequest(proto.Message): + r"""A request message for + RegionInstanceGroupManagers.RecreateInstances. See the method + description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + instance_group_manager (str): + Name of the managed instance group. + project (str): + Project ID for this request. + region (str): + Name of the region scoping this request. + region_instance_group_managers_recreate_request_resource (google.cloud.compute_v1.types.RegionInstanceGroupManagersRecreateRequest): + The body resource for this request + request_id (str): + An optional request ID to identify requests. + Specify a unique request ID so that if you must + retry your request, the server will know to + ignore the request if it has already been + completed. For example, consider a situation + where you make an initial request and the + request times out. If you make the request again + with the same request ID, the server can check + if original operation with the same request ID + was received, and if so, will ignore the second + request. This prevents clients from accidentally + creating duplicate commitments. The request ID + must be a valid UUID with the exception that + zero UUID is not supported ( + 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. + """ + + instance_group_manager: str = proto.Field( + proto.STRING, + number=249363395, + ) + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + region: str = proto.Field( + proto.STRING, + number=138946292, + ) + region_instance_group_managers_recreate_request_resource: 'RegionInstanceGroupManagersRecreateRequest' = proto.Field( + proto.MESSAGE, + number=170999316, + message='RegionInstanceGroupManagersRecreateRequest', + ) + request_id: str = proto.Field( + proto.STRING, + number=37109963, + optional=True, + ) + + +class Reference(proto.Message): + r"""Represents a reference to a resource. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + kind (str): + [Output Only] Type of the resource. Always compute#reference + for references. + + This field is a member of `oneof`_ ``_kind``. + reference_type (str): + A description of the reference type with no implied + semantics. Possible values include: 1. MEMBER_OF + + This field is a member of `oneof`_ ``_reference_type``. + referrer (str): + URL of the resource which refers to the + target. + + This field is a member of `oneof`_ ``_referrer``. + target (str): + URL of the resource to which this reference + points. + + This field is a member of `oneof`_ ``_target``. + """ + + kind: str = proto.Field( + proto.STRING, + number=3292052, + optional=True, + ) + reference_type: str = proto.Field( + proto.STRING, + number=247521198, + optional=True, + ) + referrer: str = proto.Field( + proto.STRING, + number=351173663, + optional=True, + ) + target: str = proto.Field( + proto.STRING, + number=192835985, + optional=True, + ) + + +class Region(proto.Message): + r"""Represents a Region resource. A region is a geographical area + where a resource is located. For more information, read Regions + and Zones. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + creation_timestamp (str): + [Output Only] Creation timestamp in RFC3339 text format. + + This field is a member of `oneof`_ ``_creation_timestamp``. + deprecated (google.cloud.compute_v1.types.DeprecationStatus): + [Output Only] The deprecation status associated with this + region. + + This field is a member of `oneof`_ ``_deprecated``. + description (str): + [Output Only] Textual description of the resource. + + This field is a member of `oneof`_ ``_description``. + id (int): + [Output Only] The unique identifier for the resource. This + identifier is defined by the server. + + This field is a member of `oneof`_ ``_id``. + kind (str): + [Output Only] Type of the resource. Always compute#region + for regions. + + This field is a member of `oneof`_ ``_kind``. + name (str): + [Output Only] Name of the resource. + + This field is a member of `oneof`_ ``_name``. + quotas (MutableSequence[google.cloud.compute_v1.types.Quota]): + [Output Only] Quotas assigned to this region. + self_link (str): + [Output Only] Server-defined URL for the resource. + + This field is a member of `oneof`_ ``_self_link``. + status (str): + [Output Only] Status of the region, either UP or DOWN. Check + the Status enum for the list of possible values. + + This field is a member of `oneof`_ ``_status``. + supports_pzs (bool): + [Output Only] Reserved for future use. + + This field is a member of `oneof`_ ``_supports_pzs``. + zones (MutableSequence[str]): + [Output Only] A list of zones available in this region, in + the form of resource URLs. + """ + class Status(proto.Enum): + r"""[Output Only] Status of the region, either UP or DOWN. + + Values: + UNDEFINED_STATUS (0): + A value indicating that the enum field is not + set. + DOWN (2104482): + No description available. + UP (2715): + No description available. + """ + UNDEFINED_STATUS = 0 + DOWN = 2104482 + UP = 2715 + + creation_timestamp: str = proto.Field( + proto.STRING, + number=30525366, + optional=True, + ) + deprecated: 'DeprecationStatus' = proto.Field( + proto.MESSAGE, + number=515138995, + optional=True, + message='DeprecationStatus', + ) + description: str = proto.Field( + proto.STRING, + number=422937596, + optional=True, + ) + id: int = proto.Field( + proto.UINT64, + number=3355, + optional=True, + ) + kind: str = proto.Field( + proto.STRING, + number=3292052, + optional=True, + ) + name: str = proto.Field( + proto.STRING, + number=3373707, + optional=True, + ) + quotas: MutableSequence['Quota'] = proto.RepeatedField( + proto.MESSAGE, + number=125341947, + message='Quota', + ) + self_link: str = proto.Field( + proto.STRING, + number=456214797, + optional=True, + ) + status: str = proto.Field( + proto.STRING, + number=181260274, + optional=True, + ) + supports_pzs: bool = proto.Field( + proto.BOOL, + number=83983214, + optional=True, + ) + zones: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=116085319, + ) + + +class RegionAddressesMoveRequest(proto.Message): + r""" + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + description (str): + An optional destination address description + if intended to be different from the source. + + This field is a member of `oneof`_ ``_description``. + destination_address (str): + The URL of the destination address to move + to. This can be a full or partial URL. For + example, the following are all valid URLs to a + address: - + https://www.googleapis.com/compute/v1/projects/project/regions/region + /addresses/address - + projects/project/regions/region/addresses/address + Note that destination project must be different + from the source project. So + /regions/region/addresses/address is not valid + partial url. + + This field is a member of `oneof`_ ``_destination_address``. + """ + + description: str = proto.Field( + proto.STRING, + number=422937596, + optional=True, + ) + destination_address: str = proto.Field( + proto.STRING, + number=371693763, + optional=True, + ) + + +class RegionAutoscalerList(proto.Message): + r"""Contains a list of autoscalers. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + id (str): + [Output Only] Unique identifier for the resource; defined by + the server. + + This field is a member of `oneof`_ ``_id``. + items (MutableSequence[google.cloud.compute_v1.types.Autoscaler]): + A list of Autoscaler resources. + kind (str): + Type of resource. + + This field is a member of `oneof`_ ``_kind``. + next_page_token (str): + [Output Only] This token allows you to get the next page of + results for list requests. If the number of results is + larger than maxResults, use the nextPageToken as a value for + the query parameter pageToken in the next list request. + Subsequent list requests will have their own nextPageToken + to continue paging through the results. + + This field is a member of `oneof`_ ``_next_page_token``. + self_link (str): + [Output Only] Server-defined URL for this resource. + + This field is a member of `oneof`_ ``_self_link``. + warning (google.cloud.compute_v1.types.Warning): + [Output Only] Informational warning message. + + This field is a member of `oneof`_ ``_warning``. + """ + + @property + def raw_page(self): + return self + + id: str = proto.Field( + proto.STRING, + number=3355, + optional=True, + ) + items: MutableSequence['Autoscaler'] = proto.RepeatedField( + proto.MESSAGE, + number=100526016, + message='Autoscaler', + ) + kind: str = proto.Field( + proto.STRING, + number=3292052, + optional=True, + ) + next_page_token: str = proto.Field( + proto.STRING, + number=79797525, + optional=True, + ) + self_link: str = proto.Field( + proto.STRING, + number=456214797, + optional=True, + ) + warning: 'Warning' = proto.Field( + proto.MESSAGE, + number=50704284, + optional=True, + message='Warning', + ) + + +class RegionDiskTypeList(proto.Message): + r""" + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + id (str): + [Output Only] Unique identifier for the resource; defined by + the server. + + This field is a member of `oneof`_ ``_id``. + items (MutableSequence[google.cloud.compute_v1.types.DiskType]): + A list of DiskType resources. + kind (str): + [Output Only] Type of resource. Always + compute#regionDiskTypeList for region disk types. + + This field is a member of `oneof`_ ``_kind``. + next_page_token (str): + [Output Only] This token allows you to get the next page of + results for list requests. If the number of results is + larger than maxResults, use the nextPageToken as a value for + the query parameter pageToken in the next list request. + Subsequent list requests will have their own nextPageToken + to continue paging through the results. + + This field is a member of `oneof`_ ``_next_page_token``. + self_link (str): + [Output Only] Server-defined URL for this resource. + + This field is a member of `oneof`_ ``_self_link``. + warning (google.cloud.compute_v1.types.Warning): + [Output Only] Informational warning message. + + This field is a member of `oneof`_ ``_warning``. + """ + + @property + def raw_page(self): + return self + + id: str = proto.Field( + proto.STRING, + number=3355, + optional=True, + ) + items: MutableSequence['DiskType'] = proto.RepeatedField( + proto.MESSAGE, + number=100526016, + message='DiskType', + ) + kind: str = proto.Field( + proto.STRING, + number=3292052, + optional=True, + ) + next_page_token: str = proto.Field( + proto.STRING, + number=79797525, + optional=True, + ) + self_link: str = proto.Field( + proto.STRING, + number=456214797, + optional=True, + ) + warning: 'Warning' = proto.Field( + proto.MESSAGE, + number=50704284, + optional=True, + message='Warning', + ) + + +class RegionDisksAddResourcePoliciesRequest(proto.Message): + r""" + + Attributes: + resource_policies (MutableSequence[str]): + Resource policies to be added to this disk. + """ + + resource_policies: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=22220385, + ) + + +class RegionDisksRemoveResourcePoliciesRequest(proto.Message): + r""" + + Attributes: + resource_policies (MutableSequence[str]): + Resource policies to be removed from this + disk. + """ + + resource_policies: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=22220385, + ) + + +class RegionDisksResizeRequest(proto.Message): + r""" + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + size_gb (int): + The new size of the regional persistent disk, + which is specified in GB. + + This field is a member of `oneof`_ ``_size_gb``. + """ + + size_gb: int = proto.Field( + proto.INT64, + number=494929369, + optional=True, + ) + + +class RegionDisksStartAsyncReplicationRequest(proto.Message): + r""" + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + async_secondary_disk (str): + The secondary disk to start asynchronous + replication to. You can provide this as a + partial or full URL to the resource. For + example, the following are valid values: - + https://www.googleapis.com/compute/v1/projects/project/zones/zone + /disks/disk - + https://www.googleapis.com/compute/v1/projects/project/regions/region + /disks/disk - + projects/project/zones/zone/disks/disk - + projects/project/regions/region/disks/disk - + zones/zone/disks/disk - + regions/region/disks/disk + + This field is a member of `oneof`_ ``_async_secondary_disk``. + """ + + async_secondary_disk: str = proto.Field( + proto.STRING, + number=131645867, + optional=True, + ) + + +class RegionInstanceGroupList(proto.Message): + r"""Contains a list of InstanceGroup resources. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + id (str): + [Output Only] Unique identifier for the resource; defined by + the server. + + This field is a member of `oneof`_ ``_id``. + items (MutableSequence[google.cloud.compute_v1.types.InstanceGroup]): + A list of InstanceGroup resources. + kind (str): + The resource type. + + This field is a member of `oneof`_ ``_kind``. + next_page_token (str): + [Output Only] This token allows you to get the next page of + results for list requests. If the number of results is + larger than maxResults, use the nextPageToken as a value for + the query parameter pageToken in the next list request. + Subsequent list requests will have their own nextPageToken + to continue paging through the results. + + This field is a member of `oneof`_ ``_next_page_token``. + self_link (str): + [Output Only] Server-defined URL for this resource. + + This field is a member of `oneof`_ ``_self_link``. + warning (google.cloud.compute_v1.types.Warning): + [Output Only] Informational warning message. + + This field is a member of `oneof`_ ``_warning``. + """ + + @property + def raw_page(self): + return self + + id: str = proto.Field( + proto.STRING, + number=3355, + optional=True, + ) + items: MutableSequence['InstanceGroup'] = proto.RepeatedField( + proto.MESSAGE, + number=100526016, + message='InstanceGroup', + ) + kind: str = proto.Field( + proto.STRING, + number=3292052, + optional=True, + ) + next_page_token: str = proto.Field( + proto.STRING, + number=79797525, + optional=True, + ) + self_link: str = proto.Field( + proto.STRING, + number=456214797, + optional=True, + ) + warning: 'Warning' = proto.Field( + proto.MESSAGE, + number=50704284, + optional=True, + message='Warning', + ) + + +class RegionInstanceGroupManagerDeleteInstanceConfigReq(proto.Message): + r"""RegionInstanceGroupManagers.deletePerInstanceConfigs + + Attributes: + names (MutableSequence[str]): + The list of instance names for which we want + to delete per-instance configs on this managed + instance group. + """ + + names: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=104585032, + ) + + +class RegionInstanceGroupManagerList(proto.Message): + r"""Contains a list of managed instance groups. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + id (str): + [Output Only] Unique identifier for the resource; defined by + the server. + + This field is a member of `oneof`_ ``_id``. + items (MutableSequence[google.cloud.compute_v1.types.InstanceGroupManager]): + A list of InstanceGroupManager resources. + kind (str): + [Output Only] The resource type, which is always + compute#instanceGroupManagerList for a list of managed + instance groups that exist in th regional scope. + + This field is a member of `oneof`_ ``_kind``. + next_page_token (str): + [Output Only] This token allows you to get the next page of + results for list requests. If the number of results is + larger than maxResults, use the nextPageToken as a value for + the query parameter pageToken in the next list request. + Subsequent list requests will have their own nextPageToken + to continue paging through the results. + + This field is a member of `oneof`_ ``_next_page_token``. + self_link (str): + [Output Only] Server-defined URL for this resource. + + This field is a member of `oneof`_ ``_self_link``. + warning (google.cloud.compute_v1.types.Warning): + [Output Only] Informational warning message. + + This field is a member of `oneof`_ ``_warning``. + """ + + @property + def raw_page(self): + return self + + id: str = proto.Field( + proto.STRING, + number=3355, + optional=True, + ) + items: MutableSequence['InstanceGroupManager'] = proto.RepeatedField( + proto.MESSAGE, + number=100526016, + message='InstanceGroupManager', + ) + kind: str = proto.Field( + proto.STRING, + number=3292052, + optional=True, + ) + next_page_token: str = proto.Field( + proto.STRING, + number=79797525, + optional=True, + ) + self_link: str = proto.Field( + proto.STRING, + number=456214797, + optional=True, + ) + warning: 'Warning' = proto.Field( + proto.MESSAGE, + number=50704284, + optional=True, + message='Warning', + ) + + +class RegionInstanceGroupManagerPatchInstanceConfigReq(proto.Message): + r"""RegionInstanceGroupManagers.patchPerInstanceConfigs + + Attributes: + per_instance_configs (MutableSequence[google.cloud.compute_v1.types.PerInstanceConfig]): + The list of per-instance configurations to + insert or patch on this managed instance group. + """ + + per_instance_configs: MutableSequence['PerInstanceConfig'] = proto.RepeatedField( + proto.MESSAGE, + number=526265001, + message='PerInstanceConfig', + ) + + +class RegionInstanceGroupManagerUpdateInstanceConfigReq(proto.Message): + r"""RegionInstanceGroupManagers.updatePerInstanceConfigs + + Attributes: + per_instance_configs (MutableSequence[google.cloud.compute_v1.types.PerInstanceConfig]): + The list of per-instance configurations to + insert or patch on this managed instance group. + """ + + per_instance_configs: MutableSequence['PerInstanceConfig'] = proto.RepeatedField( + proto.MESSAGE, + number=526265001, + message='PerInstanceConfig', + ) + + +class RegionInstanceGroupManagersAbandonInstancesRequest(proto.Message): + r""" + + Attributes: + instances (MutableSequence[str]): + The URLs of one or more instances to abandon. This can be a + full URL or a partial URL, such as + zones/[ZONE]/instances/[INSTANCE_NAME]. + """ + + instances: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=29097598, + ) + + +class RegionInstanceGroupManagersApplyUpdatesRequest(proto.Message): + r"""RegionInstanceGroupManagers.applyUpdatesToInstances + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + all_instances (bool): + Flag to update all instances instead of + specified list of “instances”. If the flag is + set to true then the instances may not be + specified in the request. + + This field is a member of `oneof`_ ``_all_instances``. + instances (MutableSequence[str]): + The list of URLs of one or more instances for which you want + to apply updates. Each URL can be a full URL or a partial + URL, such as zones/[ZONE]/instances/[INSTANCE_NAME]. + minimal_action (str): + The minimal action that you want to perform + on each instance during the update: - REPLACE: + At minimum, delete the instance and create it + again. - RESTART: Stop the instance and start it + again. - REFRESH: Do not stop the instance and + limit disruption as much as possible. - NONE: Do + not disrupt the instance at all. By default, the + minimum action is NONE. If your update requires + a more disruptive action than you set with this + flag, the necessary action is performed to + execute the update. Check the MinimalAction enum + for the list of possible values. + + This field is a member of `oneof`_ ``_minimal_action``. + most_disruptive_allowed_action (str): + The most disruptive action that you want to + perform on each instance during the update: - + REPLACE: Delete the instance and create it + again. - RESTART: Stop the instance and start it + again. - REFRESH: Do not stop the instance and + limit disruption as much as possible. - NONE: Do + not disrupt the instance at all. By default, the + most disruptive allowed action is REPLACE. If + your update requires a more disruptive action + than you set with this flag, the update request + will fail. Check the MostDisruptiveAllowedAction + enum for the list of possible values. + + This field is a member of `oneof`_ ``_most_disruptive_allowed_action``. + """ + class MinimalAction(proto.Enum): + r"""The minimal action that you want to perform on each instance + during the update: - REPLACE: At minimum, delete the instance + and create it again. - RESTART: Stop the instance and start it + again. - REFRESH: Do not stop the instance and limit disruption + as much as possible. - NONE: Do not disrupt the instance at all. + By default, the minimum action is NONE. If your update requires + a more disruptive action than you set with this flag, the + necessary action is performed to execute the update. Additional + supported values which may be not listed in the enum directly + due to technical reasons: + + NONE + REFRESH + REPLACE + RESTART + + Values: + UNDEFINED_MINIMAL_ACTION (0): + A value indicating that the enum field is not + set. + """ + UNDEFINED_MINIMAL_ACTION = 0 + + class MostDisruptiveAllowedAction(proto.Enum): + r"""The most disruptive action that you want to perform on each + instance during the update: - REPLACE: Delete the instance and + create it again. - RESTART: Stop the instance and start it + again. - REFRESH: Do not stop the instance and limit disruption + as much as possible. - NONE: Do not disrupt the instance at all. + By default, the most disruptive allowed action is REPLACE. If + your update requires a more disruptive action than you set with + this flag, the update request will fail. Additional supported + values which may be not listed in the enum directly due to + technical reasons: + + NONE + REFRESH + REPLACE + RESTART + + Values: + UNDEFINED_MOST_DISRUPTIVE_ALLOWED_ACTION (0): + A value indicating that the enum field is not + set. + """ + UNDEFINED_MOST_DISRUPTIVE_ALLOWED_ACTION = 0 + + all_instances: bool = proto.Field( + proto.BOOL, + number=403676512, + optional=True, + ) + instances: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=29097598, + ) + minimal_action: str = proto.Field( + proto.STRING, + number=270567060, + optional=True, + ) + most_disruptive_allowed_action: str = proto.Field( + proto.STRING, + number=66103053, + optional=True, + ) + + +class RegionInstanceGroupManagersCreateInstancesRequest(proto.Message): + r"""RegionInstanceGroupManagers.createInstances + + Attributes: + instances (MutableSequence[google.cloud.compute_v1.types.PerInstanceConfig]): + [Required] List of specifications of per-instance configs. + """ + + instances: MutableSequence['PerInstanceConfig'] = proto.RepeatedField( + proto.MESSAGE, + number=29097598, + message='PerInstanceConfig', + ) + + +class RegionInstanceGroupManagersDeleteInstancesRequest(proto.Message): + r""" + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + instances (MutableSequence[str]): + The URLs of one or more instances to delete. This can be a + full URL or a partial URL, such as + zones/[ZONE]/instances/[INSTANCE_NAME]. + skip_instances_on_validation_error (bool): + Specifies whether the request should proceed despite the + inclusion of instances that are not members of the group or + that are already in the process of being deleted or + abandoned. If this field is set to ``false`` and such an + instance is specified in the request, the operation fails. + The operation always fails if the request contains a + malformed instance URL or a reference to an instance that + exists in a zone or region other than the group's zone or + region. + + This field is a member of `oneof`_ ``_skip_instances_on_validation_error``. + """ + + instances: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=29097598, + ) + skip_instances_on_validation_error: bool = proto.Field( + proto.BOOL, + number=40631073, + optional=True, + ) + + +class RegionInstanceGroupManagersListErrorsResponse(proto.Message): + r""" + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + items (MutableSequence[google.cloud.compute_v1.types.InstanceManagedByIgmError]): + [Output Only] The list of errors of the managed instance + group. + next_page_token (str): + [Output Only] This token allows you to get the next page of + results for list requests. If the number of results is + larger than maxResults, use the nextPageToken as a value for + the query parameter pageToken in the next list request. + Subsequent list requests will have their own nextPageToken + to continue paging through the results. + + This field is a member of `oneof`_ ``_next_page_token``. + """ + + @property + def raw_page(self): + return self + + items: MutableSequence['InstanceManagedByIgmError'] = proto.RepeatedField( + proto.MESSAGE, + number=100526016, + message='InstanceManagedByIgmError', + ) + next_page_token: str = proto.Field( + proto.STRING, + number=79797525, + optional=True, + ) + + +class RegionInstanceGroupManagersListInstanceConfigsResp(proto.Message): + r""" + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + items (MutableSequence[google.cloud.compute_v1.types.PerInstanceConfig]): + [Output Only] The list of PerInstanceConfig. + next_page_token (str): + [Output Only] This token allows you to get the next page of + results for list requests. If the number of results is + larger than maxResults, use the nextPageToken as a value for + the query parameter pageToken in the next list request. + Subsequent list requests will have their own nextPageToken + to continue paging through the results. + + This field is a member of `oneof`_ ``_next_page_token``. + warning (google.cloud.compute_v1.types.Warning): + [Output Only] Informational warning message. + + This field is a member of `oneof`_ ``_warning``. + """ + + @property + def raw_page(self): + return self + + items: MutableSequence['PerInstanceConfig'] = proto.RepeatedField( + proto.MESSAGE, + number=100526016, + message='PerInstanceConfig', + ) + next_page_token: str = proto.Field( + proto.STRING, + number=79797525, + optional=True, + ) + warning: 'Warning' = proto.Field( + proto.MESSAGE, + number=50704284, + optional=True, + message='Warning', + ) + + +class RegionInstanceGroupManagersListInstancesResponse(proto.Message): + r""" + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + managed_instances (MutableSequence[google.cloud.compute_v1.types.ManagedInstance]): + A list of managed instances. + next_page_token (str): + [Output Only] This token allows you to get the next page of + results for list requests. If the number of results is + larger than maxResults, use the nextPageToken as a value for + the query parameter pageToken in the next list request. + Subsequent list requests will have their own nextPageToken + to continue paging through the results. + + This field is a member of `oneof`_ ``_next_page_token``. + """ + + @property + def raw_page(self): + return self + + managed_instances: MutableSequence['ManagedInstance'] = proto.RepeatedField( + proto.MESSAGE, + number=336219614, + message='ManagedInstance', + ) + next_page_token: str = proto.Field( + proto.STRING, + number=79797525, + optional=True, + ) + + +class RegionInstanceGroupManagersRecreateRequest(proto.Message): + r""" + + Attributes: + instances (MutableSequence[str]): + The URLs of one or more instances to recreate. This can be a + full URL or a partial URL, such as + zones/[ZONE]/instances/[INSTANCE_NAME]. + """ + + instances: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=29097598, + ) + + +class RegionInstanceGroupManagersSetTargetPoolsRequest(proto.Message): + r""" + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + fingerprint (str): + Fingerprint of the target pools information, + which is a hash of the contents. This field is + used for optimistic locking when you update the + target pool entries. This field is optional. + + This field is a member of `oneof`_ ``_fingerprint``. + target_pools (MutableSequence[str]): + The URL of all TargetPool resources to which + instances in the instanceGroup field are added. + The target pools automatically apply to all of + the instances in the managed instance group. + """ + + fingerprint: str = proto.Field( + proto.STRING, + number=234678500, + optional=True, + ) + target_pools: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=336072617, + ) + + +class RegionInstanceGroupManagersSetTemplateRequest(proto.Message): + r""" + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + instance_template (str): + URL of the InstanceTemplate resource from + which all new instances will be created. + + This field is a member of `oneof`_ ``_instance_template``. + """ + + instance_template: str = proto.Field( + proto.STRING, + number=309248228, + optional=True, + ) + + +class RegionInstanceGroupsListInstances(proto.Message): + r""" + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + id (str): + [Output Only] Unique identifier for the resource; defined by + the server. + + This field is a member of `oneof`_ ``_id``. + items (MutableSequence[google.cloud.compute_v1.types.InstanceWithNamedPorts]): + A list of InstanceWithNamedPorts resources. + kind (str): + The resource type. + + This field is a member of `oneof`_ ``_kind``. + next_page_token (str): + [Output Only] This token allows you to get the next page of + results for list requests. If the number of results is + larger than maxResults, use the nextPageToken as a value for + the query parameter pageToken in the next list request. + Subsequent list requests will have their own nextPageToken + to continue paging through the results. + + This field is a member of `oneof`_ ``_next_page_token``. + self_link (str): + [Output Only] Server-defined URL for this resource. + + This field is a member of `oneof`_ ``_self_link``. + warning (google.cloud.compute_v1.types.Warning): + [Output Only] Informational warning message. + + This field is a member of `oneof`_ ``_warning``. + """ + + @property + def raw_page(self): + return self + + id: str = proto.Field( + proto.STRING, + number=3355, + optional=True, + ) + items: MutableSequence['InstanceWithNamedPorts'] = proto.RepeatedField( + proto.MESSAGE, + number=100526016, + message='InstanceWithNamedPorts', + ) + kind: str = proto.Field( + proto.STRING, + number=3292052, + optional=True, + ) + next_page_token: str = proto.Field( + proto.STRING, + number=79797525, + optional=True, + ) + self_link: str = proto.Field( + proto.STRING, + number=456214797, + optional=True, + ) + warning: 'Warning' = proto.Field( + proto.MESSAGE, + number=50704284, + optional=True, + message='Warning', + ) + + +class RegionInstanceGroupsListInstancesRequest(proto.Message): + r""" + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + instance_state (str): + Instances in which state should be returned. + Valid options are: 'ALL', 'RUNNING'. By default, + it lists all instances. Check the InstanceState + enum for the list of possible values. + + This field is a member of `oneof`_ ``_instance_state``. + port_name (str): + Name of port user is interested in. It is + optional. If it is set, only information about + this ports will be returned. If it is not set, + all the named ports will be returned. Always + lists all instances. + + This field is a member of `oneof`_ ``_port_name``. + """ + class InstanceState(proto.Enum): + r"""Instances in which state should be returned. Valid options + are: 'ALL', 'RUNNING'. By default, it lists all instances. + + Values: + UNDEFINED_INSTANCE_STATE (0): + A value indicating that the enum field is not + set. + ALL (64897): + Matches any status of the instances, running, + non-running and others. + RUNNING (121282975): + Instance is in RUNNING state if it is + running. + """ + UNDEFINED_INSTANCE_STATE = 0 + ALL = 64897 + RUNNING = 121282975 + + instance_state: str = proto.Field( + proto.STRING, + number=92223591, + optional=True, + ) + port_name: str = proto.Field( + proto.STRING, + number=41534345, + optional=True, + ) + + +class RegionInstanceGroupsSetNamedPortsRequest(proto.Message): + r""" + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + fingerprint (str): + The fingerprint of the named ports + information for this instance group. Use this + optional property to prevent conflicts when + multiple users change the named ports settings + concurrently. Obtain the fingerprint with the + instanceGroups.get method. Then, include the + fingerprint in your request to ensure that you + do not overwrite changes that were applied from + another concurrent request. + + This field is a member of `oneof`_ ``_fingerprint``. + named_ports (MutableSequence[google.cloud.compute_v1.types.NamedPort]): + The list of named ports to set for this + instance group. + """ + + fingerprint: str = proto.Field( + proto.STRING, + number=234678500, + optional=True, + ) + named_ports: MutableSequence['NamedPort'] = proto.RepeatedField( + proto.MESSAGE, + number=427598732, + message='NamedPort', + ) + + +class RegionList(proto.Message): + r"""Contains a list of region resources. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + id (str): + [Output Only] Unique identifier for the resource; defined by + the server. + + This field is a member of `oneof`_ ``_id``. + items (MutableSequence[google.cloud.compute_v1.types.Region]): + A list of Region resources. + kind (str): + [Output Only] Type of resource. Always compute#regionList + for lists of regions. + + This field is a member of `oneof`_ ``_kind``. + next_page_token (str): + [Output Only] This token allows you to get the next page of + results for list requests. If the number of results is + larger than maxResults, use the nextPageToken as a value for + the query parameter pageToken in the next list request. + Subsequent list requests will have their own nextPageToken + to continue paging through the results. + + This field is a member of `oneof`_ ``_next_page_token``. + self_link (str): + [Output Only] Server-defined URL for this resource. + + This field is a member of `oneof`_ ``_self_link``. + warning (google.cloud.compute_v1.types.Warning): + [Output Only] Informational warning message. + + This field is a member of `oneof`_ ``_warning``. + """ + + @property + def raw_page(self): + return self + + id: str = proto.Field( + proto.STRING, + number=3355, + optional=True, + ) + items: MutableSequence['Region'] = proto.RepeatedField( + proto.MESSAGE, + number=100526016, + message='Region', + ) + kind: str = proto.Field( + proto.STRING, + number=3292052, + optional=True, + ) + next_page_token: str = proto.Field( + proto.STRING, + number=79797525, + optional=True, + ) + self_link: str = proto.Field( + proto.STRING, + number=456214797, + optional=True, + ) + warning: 'Warning' = proto.Field( + proto.MESSAGE, + number=50704284, + optional=True, + message='Warning', + ) + + +class RegionNetworkFirewallPoliciesGetEffectiveFirewallsResponse(proto.Message): + r""" + + Attributes: + firewall_policys (MutableSequence[google.cloud.compute_v1.types.RegionNetworkFirewallPoliciesGetEffectiveFirewallsResponseEffectiveFirewallPolicy]): + Effective firewalls from firewall policy. + firewalls (MutableSequence[google.cloud.compute_v1.types.Firewall]): + Effective firewalls on the network. + """ + + firewall_policys: MutableSequence['RegionNetworkFirewallPoliciesGetEffectiveFirewallsResponseEffectiveFirewallPolicy'] = proto.RepeatedField( + proto.MESSAGE, + number=410985794, + message='RegionNetworkFirewallPoliciesGetEffectiveFirewallsResponseEffectiveFirewallPolicy', + ) + firewalls: MutableSequence['Firewall'] = proto.RepeatedField( + proto.MESSAGE, + number=272245619, + message='Firewall', + ) + + +class RegionNetworkFirewallPoliciesGetEffectiveFirewallsResponseEffectiveFirewallPolicy(proto.Message): + r""" + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + display_name (str): + [Output Only] The display name of the firewall policy. + + This field is a member of `oneof`_ ``_display_name``. + name (str): + [Output Only] The name of the firewall policy. + + This field is a member of `oneof`_ ``_name``. + rules (MutableSequence[google.cloud.compute_v1.types.FirewallPolicyRule]): + The rules that apply to the network. + type_ (str): + [Output Only] The type of the firewall policy. Can be one of + HIERARCHY, NETWORK, NETWORK_REGIONAL. Check the Type enum + for the list of possible values. + + This field is a member of `oneof`_ ``_type``. + """ + class Type(proto.Enum): + r"""[Output Only] The type of the firewall policy. Can be one of + HIERARCHY, NETWORK, NETWORK_REGIONAL. + + Values: + UNDEFINED_TYPE (0): + A value indicating that the enum field is not + set. + HIERARCHY (69902869): + No description available. + NETWORK (413984270): + No description available. + NETWORK_REGIONAL (190804272): + No description available. + UNSPECIFIED (526786327): + No description available. + """ + UNDEFINED_TYPE = 0 + HIERARCHY = 69902869 + NETWORK = 413984270 + NETWORK_REGIONAL = 190804272 + UNSPECIFIED = 526786327 + + display_name: str = proto.Field( + proto.STRING, + number=4473832, + optional=True, + ) + name: str = proto.Field( + proto.STRING, + number=3373707, + optional=True, + ) + rules: MutableSequence['FirewallPolicyRule'] = proto.RepeatedField( + proto.MESSAGE, + number=108873975, + message='FirewallPolicyRule', + ) + type_: str = proto.Field( + proto.STRING, + number=3575610, + optional=True, + ) + + +class RegionSetLabelsRequest(proto.Message): + r""" + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + label_fingerprint (str): + The fingerprint of the previous set of labels + for this resource, used to detect conflicts. The + fingerprint is initially generated by Compute + Engine and changes after every request to modify + or update labels. You must always provide an + up-to-date fingerprint hash in order to update + or change labels. Make a get() request to the + resource to get the latest fingerprint. + + This field is a member of `oneof`_ ``_label_fingerprint``. + labels (MutableMapping[str, str]): + The labels to set for this resource. + """ + + label_fingerprint: str = proto.Field( + proto.STRING, + number=178124825, + optional=True, + ) + labels: MutableMapping[str, str] = proto.MapField( + proto.STRING, + proto.STRING, + number=500195327, + ) + + +class RegionSetPolicyRequest(proto.Message): + r""" + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + bindings (MutableSequence[google.cloud.compute_v1.types.Binding]): + Flatten Policy to create a backwacd + compatible wire-format. Deprecated. Use 'policy' + to specify bindings. + etag (str): + Flatten Policy to create a backward + compatible wire-format. Deprecated. Use 'policy' + to specify the etag. + + This field is a member of `oneof`_ ``_etag``. + policy (google.cloud.compute_v1.types.Policy): + REQUIRED: The complete policy to be applied + to the 'resource'. The size of the policy is + limited to a few 10s of KB. An empty policy is + in general a valid policy but certain services + (like Projects) might reject them. + + This field is a member of `oneof`_ ``_policy``. + """ + + bindings: MutableSequence['Binding'] = proto.RepeatedField( + proto.MESSAGE, + number=403251854, + message='Binding', + ) + etag: str = proto.Field( + proto.STRING, + number=3123477, + optional=True, + ) + policy: 'Policy' = proto.Field( + proto.MESSAGE, + number=91071794, + optional=True, + message='Policy', + ) + + +class RegionTargetHttpsProxiesSetSslCertificatesRequest(proto.Message): + r""" + + Attributes: + ssl_certificates (MutableSequence[str]): + New set of SslCertificate resources to + associate with this TargetHttpsProxy resource. + """ + + ssl_certificates: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=366006543, + ) + + +class RegionUrlMapsValidateRequest(proto.Message): + r""" + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + resource (google.cloud.compute_v1.types.UrlMap): + Content of the UrlMap to be validated. + + This field is a member of `oneof`_ ``_resource``. + """ + + resource: 'UrlMap' = proto.Field( + proto.MESSAGE, + number=195806222, + optional=True, + message='UrlMap', + ) + + +class RemoveAssociationFirewallPolicyRequest(proto.Message): + r"""A request message for FirewallPolicies.RemoveAssociation. See + the method description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + firewall_policy (str): + Name of the firewall policy to update. + name (str): + Name for the attachment that will be removed. + + This field is a member of `oneof`_ ``_name``. + request_id (str): + An optional request ID to identify requests. + Specify a unique request ID so that if you must + retry your request, the server will know to + ignore the request if it has already been + completed. For example, consider a situation + where you make an initial request and the + request times out. If you make the request again + with the same request ID, the server can check + if original operation with the same request ID + was received, and if so, will ignore the second + request. This prevents clients from accidentally + creating duplicate commitments. The request ID + must be a valid UUID with the exception that + zero UUID is not supported ( + 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. + """ + + firewall_policy: str = proto.Field( + proto.STRING, + number=498173265, + ) + name: str = proto.Field( + proto.STRING, + number=3373707, + optional=True, + ) + request_id: str = proto.Field( + proto.STRING, + number=37109963, + optional=True, + ) + + +class RemoveAssociationNetworkFirewallPolicyRequest(proto.Message): + r"""A request message for + NetworkFirewallPolicies.RemoveAssociation. See the method + description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + firewall_policy (str): + Name of the firewall policy to update. + name (str): + Name for the attachment that will be removed. + + This field is a member of `oneof`_ ``_name``. + project (str): + Project ID for this request. + request_id (str): + An optional request ID to identify requests. + Specify a unique request ID so that if you must + retry your request, the server will know to + ignore the request if it has already been + completed. For example, consider a situation + where you make an initial request and the + request times out. If you make the request again + with the same request ID, the server can check + if original operation with the same request ID + was received, and if so, will ignore the second + request. This prevents clients from accidentally + creating duplicate commitments. The request ID + must be a valid UUID with the exception that + zero UUID is not supported ( + 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. + """ + + firewall_policy: str = proto.Field( + proto.STRING, + number=498173265, + ) + name: str = proto.Field( + proto.STRING, + number=3373707, + optional=True, + ) + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + request_id: str = proto.Field( + proto.STRING, + number=37109963, + optional=True, + ) + + +class RemoveAssociationRegionNetworkFirewallPolicyRequest(proto.Message): + r"""A request message for + RegionNetworkFirewallPolicies.RemoveAssociation. See the method + description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + firewall_policy (str): + Name of the firewall policy to update. + name (str): + Name for the association that will be + removed. + + This field is a member of `oneof`_ ``_name``. + project (str): + Project ID for this request. + region (str): + Name of the region scoping this request. + request_id (str): + An optional request ID to identify requests. + Specify a unique request ID so that if you must + retry your request, the server will know to + ignore the request if it has already been + completed. For example, consider a situation + where you make an initial request and the + request times out. If you make the request again + with the same request ID, the server can check + if original operation with the same request ID + was received, and if so, will ignore the second + request. This prevents clients from accidentally + creating duplicate commitments. The request ID + must be a valid UUID with the exception that + zero UUID is not supported ( + 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. + """ + + firewall_policy: str = proto.Field( + proto.STRING, + number=498173265, + ) + name: str = proto.Field( + proto.STRING, + number=3373707, + optional=True, + ) + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + region: str = proto.Field( + proto.STRING, + number=138946292, + ) + request_id: str = proto.Field( + proto.STRING, + number=37109963, + optional=True, + ) + + +class RemoveHealthCheckTargetPoolRequest(proto.Message): + r"""A request message for TargetPools.RemoveHealthCheck. See the + method description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + project (str): + Project ID for this request. + region (str): + Name of the region for this request. + request_id (str): + An optional request ID to identify requests. + Specify a unique request ID so that if you must + retry your request, the server will know to + ignore the request if it has already been + completed. For example, consider a situation + where you make an initial request and the + request times out. If you make the request again + with the same request ID, the server can check + if original operation with the same request ID + was received, and if so, will ignore the second + request. This prevents clients from accidentally + creating duplicate commitments. The request ID + must be a valid UUID with the exception that + zero UUID is not supported ( + 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. + target_pool (str): + Name of the target pool to remove health + checks from. + target_pools_remove_health_check_request_resource (google.cloud.compute_v1.types.TargetPoolsRemoveHealthCheckRequest): + The body resource for this request + """ + + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + region: str = proto.Field( + proto.STRING, + number=138946292, + ) + request_id: str = proto.Field( + proto.STRING, + number=37109963, + optional=True, + ) + target_pool: str = proto.Field( + proto.STRING, + number=62796298, + ) + target_pools_remove_health_check_request_resource: 'TargetPoolsRemoveHealthCheckRequest' = proto.Field( + proto.MESSAGE, + number=304985011, + message='TargetPoolsRemoveHealthCheckRequest', + ) + + +class RemoveInstanceTargetPoolRequest(proto.Message): + r"""A request message for TargetPools.RemoveInstance. See the + method description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + project (str): + Project ID for this request. + region (str): + Name of the region scoping this request. + request_id (str): + An optional request ID to identify requests. + Specify a unique request ID so that if you must + retry your request, the server will know to + ignore the request if it has already been + completed. For example, consider a situation + where you make an initial request and the + request times out. If you make the request again + with the same request ID, the server can check + if original operation with the same request ID + was received, and if so, will ignore the second + request. This prevents clients from accidentally + creating duplicate commitments. The request ID + must be a valid UUID with the exception that + zero UUID is not supported ( + 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. + target_pool (str): + Name of the TargetPool resource to remove + instances from. + target_pools_remove_instance_request_resource (google.cloud.compute_v1.types.TargetPoolsRemoveInstanceRequest): + The body resource for this request + """ + + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + region: str = proto.Field( + proto.STRING, + number=138946292, + ) + request_id: str = proto.Field( + proto.STRING, + number=37109963, + optional=True, + ) + target_pool: str = proto.Field( + proto.STRING, + number=62796298, + ) + target_pools_remove_instance_request_resource: 'TargetPoolsRemoveInstanceRequest' = proto.Field( + proto.MESSAGE, + number=29548547, + message='TargetPoolsRemoveInstanceRequest', + ) + + +class RemoveInstancesInstanceGroupRequest(proto.Message): + r"""A request message for InstanceGroups.RemoveInstances. See the + method description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + instance_group (str): + The name of the instance group where the + specified instances will be removed. + instance_groups_remove_instances_request_resource (google.cloud.compute_v1.types.InstanceGroupsRemoveInstancesRequest): + The body resource for this request + project (str): + Project ID for this request. + request_id (str): + An optional request ID to identify requests. + Specify a unique request ID so that if you must + retry your request, the server will know to + ignore the request if it has already been + completed. For example, consider a situation + where you make an initial request and the + request times out. If you make the request again + with the same request ID, the server can check + if original operation with the same request ID + was received, and if so, will ignore the second + request. This prevents clients from accidentally + creating duplicate commitments. The request ID + must be a valid UUID with the exception that + zero UUID is not supported ( + 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. + zone (str): + The name of the zone where the instance group + is located. + """ + + instance_group: str = proto.Field( + proto.STRING, + number=81095253, + ) + instance_groups_remove_instances_request_resource: 'InstanceGroupsRemoveInstancesRequest' = proto.Field( + proto.MESSAGE, + number=390981817, + message='InstanceGroupsRemoveInstancesRequest', + ) + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + request_id: str = proto.Field( + proto.STRING, + number=37109963, + optional=True, + ) + zone: str = proto.Field( + proto.STRING, + number=3744684, + ) + + +class RemovePeeringNetworkRequest(proto.Message): + r"""A request message for Networks.RemovePeering. See the method + description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + network (str): + Name of the network resource to remove + peering from. + networks_remove_peering_request_resource (google.cloud.compute_v1.types.NetworksRemovePeeringRequest): + The body resource for this request + project (str): + Project ID for this request. + request_id (str): + An optional request ID to identify requests. + Specify a unique request ID so that if you must + retry your request, the server will know to + ignore the request if it has already been + completed. For example, consider a situation + where you make an initial request and the + request times out. If you make the request again + with the same request ID, the server can check + if original operation with the same request ID + was received, and if so, will ignore the second + request. This prevents clients from accidentally + creating duplicate commitments. The request ID + must be a valid UUID with the exception that + zero UUID is not supported ( + 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. + """ + + network: str = proto.Field( + proto.STRING, + number=232872494, + ) + networks_remove_peering_request_resource: 'NetworksRemovePeeringRequest' = proto.Field( + proto.MESSAGE, + number=421162494, + message='NetworksRemovePeeringRequest', + ) + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + request_id: str = proto.Field( + proto.STRING, + number=37109963, + optional=True, + ) + + +class RemoveResourcePoliciesDiskRequest(proto.Message): + r"""A request message for Disks.RemoveResourcePolicies. See the + method description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + disk (str): + The disk name for this request. + disks_remove_resource_policies_request_resource (google.cloud.compute_v1.types.DisksRemoveResourcePoliciesRequest): + The body resource for this request + project (str): + Project ID for this request. + request_id (str): + An optional request ID to identify requests. + Specify a unique request ID so that if you must + retry your request, the server will know to + ignore the request if it has already been + completed. For example, consider a situation + where you make an initial request and the + request times out. If you make the request again + with the same request ID, the server can check + if original operation with the same request ID + was received, and if so, will ignore the second + request. This prevents clients from accidentally + creating duplicate commitments. The request ID + must be a valid UUID with the exception that + zero UUID is not supported ( + 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. + zone (str): + The name of the zone for this request. + """ + + disk: str = proto.Field( + proto.STRING, + number=3083677, + ) + disks_remove_resource_policies_request_resource: 'DisksRemoveResourcePoliciesRequest' = proto.Field( + proto.MESSAGE, + number=436756718, + message='DisksRemoveResourcePoliciesRequest', + ) + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + request_id: str = proto.Field( + proto.STRING, + number=37109963, + optional=True, + ) + zone: str = proto.Field( + proto.STRING, + number=3744684, + ) + + +class RemoveResourcePoliciesInstanceRequest(proto.Message): + r"""A request message for Instances.RemoveResourcePolicies. See + the method description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + instance (str): + The instance name for this request. + instances_remove_resource_policies_request_resource (google.cloud.compute_v1.types.InstancesRemoveResourcePoliciesRequest): + The body resource for this request + project (str): + Project ID for this request. + request_id (str): + An optional request ID to identify requests. + Specify a unique request ID so that if you must + retry your request, the server will know to + ignore the request if it has already been + completed. For example, consider a situation + where you make an initial request and the + request times out. If you make the request again + with the same request ID, the server can check + if original operation with the same request ID + was received, and if so, will ignore the second + request. This prevents clients from accidentally + creating duplicate commitments. The request ID + must be a valid UUID with the exception that + zero UUID is not supported ( + 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. + zone (str): + The name of the zone for this request. + """ + + instance: str = proto.Field( + proto.STRING, + number=18257045, + ) + instances_remove_resource_policies_request_resource: 'InstancesRemoveResourcePoliciesRequest' = proto.Field( + proto.MESSAGE, + number=49229558, + message='InstancesRemoveResourcePoliciesRequest', + ) + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + request_id: str = proto.Field( + proto.STRING, + number=37109963, + optional=True, + ) + zone: str = proto.Field( + proto.STRING, + number=3744684, + ) + + +class RemoveResourcePoliciesRegionDiskRequest(proto.Message): + r"""A request message for RegionDisks.RemoveResourcePolicies. See + the method description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + disk (str): + The disk name for this request. + project (str): + Project ID for this request. + region (str): + The name of the region for this request. + region_disks_remove_resource_policies_request_resource (google.cloud.compute_v1.types.RegionDisksRemoveResourcePoliciesRequest): + The body resource for this request + request_id (str): + An optional request ID to identify requests. + Specify a unique request ID so that if you must + retry your request, the server will know to + ignore the request if it has already been + completed. For example, consider a situation + where you make an initial request and the + request times out. If you make the request again + with the same request ID, the server can check + if original operation with the same request ID + was received, and if so, will ignore the second + request. This prevents clients from accidentally + creating duplicate commitments. The request ID + must be a valid UUID with the exception that + zero UUID is not supported ( + 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. + """ + + disk: str = proto.Field( + proto.STRING, + number=3083677, + ) + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + region: str = proto.Field( + proto.STRING, + number=138946292, + ) + region_disks_remove_resource_policies_request_resource: 'RegionDisksRemoveResourcePoliciesRequest' = proto.Field( + proto.MESSAGE, + number=8741283, + message='RegionDisksRemoveResourcePoliciesRequest', + ) + request_id: str = proto.Field( + proto.STRING, + number=37109963, + optional=True, + ) + + +class RemoveRuleFirewallPolicyRequest(proto.Message): + r"""A request message for FirewallPolicies.RemoveRule. See the + method description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + firewall_policy (str): + Name of the firewall policy to update. + priority (int): + The priority of the rule to remove from the + firewall policy. + + This field is a member of `oneof`_ ``_priority``. + request_id (str): + An optional request ID to identify requests. + Specify a unique request ID so that if you must + retry your request, the server will know to + ignore the request if it has already been + completed. For example, consider a situation + where you make an initial request and the + request times out. If you make the request again + with the same request ID, the server can check + if original operation with the same request ID + was received, and if so, will ignore the second + request. This prevents clients from accidentally + creating duplicate commitments. The request ID + must be a valid UUID with the exception that + zero UUID is not supported ( + 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. + """ + + firewall_policy: str = proto.Field( + proto.STRING, + number=498173265, + ) + priority: int = proto.Field( + proto.INT32, + number=445151652, + optional=True, + ) + request_id: str = proto.Field( + proto.STRING, + number=37109963, + optional=True, + ) + + +class RemoveRuleNetworkFirewallPolicyRequest(proto.Message): + r"""A request message for NetworkFirewallPolicies.RemoveRule. See + the method description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + firewall_policy (str): + Name of the firewall policy to update. + priority (int): + The priority of the rule to remove from the + firewall policy. + + This field is a member of `oneof`_ ``_priority``. + project (str): + Project ID for this request. + request_id (str): + An optional request ID to identify requests. + Specify a unique request ID so that if you must + retry your request, the server will know to + ignore the request if it has already been + completed. For example, consider a situation + where you make an initial request and the + request times out. If you make the request again + with the same request ID, the server can check + if original operation with the same request ID + was received, and if so, will ignore the second + request. This prevents clients from accidentally + creating duplicate commitments. The request ID + must be a valid UUID with the exception that + zero UUID is not supported ( + 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. + """ + + firewall_policy: str = proto.Field( + proto.STRING, + number=498173265, + ) + priority: int = proto.Field( + proto.INT32, + number=445151652, + optional=True, + ) + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + request_id: str = proto.Field( + proto.STRING, + number=37109963, + optional=True, + ) + + +class RemoveRuleRegionNetworkFirewallPolicyRequest(proto.Message): + r"""A request message for + RegionNetworkFirewallPolicies.RemoveRule. See the method + description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + firewall_policy (str): + Name of the firewall policy to update. + priority (int): + The priority of the rule to remove from the + firewall policy. + + This field is a member of `oneof`_ ``_priority``. + project (str): + Project ID for this request. + region (str): + Name of the region scoping this request. + request_id (str): + An optional request ID to identify requests. + Specify a unique request ID so that if you must + retry your request, the server will know to + ignore the request if it has already been + completed. For example, consider a situation + where you make an initial request and the + request times out. If you make the request again + with the same request ID, the server can check + if original operation with the same request ID + was received, and if so, will ignore the second + request. This prevents clients from accidentally + creating duplicate commitments. The request ID + must be a valid UUID with the exception that + zero UUID is not supported ( + 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. + """ + + firewall_policy: str = proto.Field( + proto.STRING, + number=498173265, + ) + priority: int = proto.Field( + proto.INT32, + number=445151652, + optional=True, + ) + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + region: str = proto.Field( + proto.STRING, + number=138946292, + ) + request_id: str = proto.Field( + proto.STRING, + number=37109963, + optional=True, + ) + + +class RemoveRuleSecurityPolicyRequest(proto.Message): + r"""A request message for SecurityPolicies.RemoveRule. See the + method description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + priority (int): + The priority of the rule to remove from the + security policy. + + This field is a member of `oneof`_ ``_priority``. + project (str): + Project ID for this request. + security_policy (str): + Name of the security policy to update. + """ + + priority: int = proto.Field( + proto.INT32, + number=445151652, + optional=True, + ) + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + security_policy: str = proto.Field( + proto.STRING, + number=171082513, + ) + + +class RequestMirrorPolicy(proto.Message): + r"""A policy that specifies how requests intended for the route's + backends are shadowed to a separate mirrored backend service. + The load balancer doesn't wait for responses from the shadow + service. Before sending traffic to the shadow service, the host + or authority header is suffixed with -shadow. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + backend_service (str): + The full or partial URL to the BackendService + resource being mirrored to. The backend service + configured for a mirroring policy must reference + backends that are of the same type as the + original backend service matched in the URL map. + Serverless NEG backends are not currently + supported as a mirrored backend service. + + This field is a member of `oneof`_ ``_backend_service``. + """ + + backend_service: str = proto.Field( + proto.STRING, + number=306946058, + optional=True, + ) + + +class Reservation(proto.Message): + r"""Represents a reservation resource. A reservation ensures that + capacity is held in a specific zone even if the reserved VMs are + not running. For more information, read Reserving zonal + resources. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + commitment (str): + [Output Only] Full or partial URL to a parent commitment. + This field displays for reservations that are tied to a + commitment. + + This field is a member of `oneof`_ ``_commitment``. + creation_timestamp (str): + [Output Only] Creation timestamp in RFC3339 text format. + + This field is a member of `oneof`_ ``_creation_timestamp``. + description (str): + An optional description of this resource. + Provide this property when you create the + resource. + + This field is a member of `oneof`_ ``_description``. + id (int): + [Output Only] The unique identifier for the resource. This + identifier is defined by the server. + + This field is a member of `oneof`_ ``_id``. + kind (str): + [Output Only] Type of the resource. Always + compute#reservations for reservations. + + This field is a member of `oneof`_ ``_kind``. + name (str): + The name of the resource, provided by the client when + initially creating the resource. The resource name must be + 1-63 characters long, and comply with RFC1035. Specifically, + the name must be 1-63 characters long and match the regular + expression ``[a-z]([-a-z0-9]*[a-z0-9])?`` which means the + first character must be a lowercase letter, and all + following characters must be a dash, lowercase letter, or + digit, except the last character, which cannot be a dash. + + This field is a member of `oneof`_ ``_name``. + resource_policies (MutableMapping[str, str]): + Resource policies to be added to this + reservation. The key is defined by user, and the + value is resource policy url. This is to define + placement policy with reservation. + resource_status (google.cloud.compute_v1.types.AllocationResourceStatus): + [Output Only] Status information for Reservation resource. + + This field is a member of `oneof`_ ``_resource_status``. + satisfies_pzs (bool): + [Output Only] Reserved for future use. + + This field is a member of `oneof`_ ``_satisfies_pzs``. + self_link (str): + [Output Only] Server-defined fully-qualified URL for this + resource. + + This field is a member of `oneof`_ ``_self_link``. + share_settings (google.cloud.compute_v1.types.ShareSettings): + Specify share-settings to create a shared + reservation. This property is optional. For more + information about the syntax and options for + this field and its subfields, see the guide for + creating a shared reservation. + + This field is a member of `oneof`_ ``_share_settings``. + specific_reservation (google.cloud.compute_v1.types.AllocationSpecificSKUReservation): + Reservation for instances with specific + machine shapes. + + This field is a member of `oneof`_ ``_specific_reservation``. + specific_reservation_required (bool): + Indicates whether the reservation can be + consumed by VMs with affinity for "any" + reservation. If the field is set, then only VMs + that target the reservation by name can consume + from this reservation. + + This field is a member of `oneof`_ ``_specific_reservation_required``. + status (str): + [Output Only] The status of the reservation. Check the + Status enum for the list of possible values. + + This field is a member of `oneof`_ ``_status``. + zone (str): + Zone in which the reservation resides. A zone + must be provided if the reservation is created + within a commitment. + + This field is a member of `oneof`_ ``_zone``. + """ + class Status(proto.Enum): + r"""[Output Only] The status of the reservation. + + Values: + UNDEFINED_STATUS (0): + A value indicating that the enum field is not + set. + CREATING (455564985): + Resources are being allocated for the + reservation. + DELETING (528602024): + Reservation is currently being deleted. + INVALID (530283991): + No description available. + READY (77848963): + Reservation has allocated all its resources. + UPDATING (494614342): + Reservation is currently being resized. + """ + UNDEFINED_STATUS = 0 + CREATING = 455564985 + DELETING = 528602024 + INVALID = 530283991 + READY = 77848963 + UPDATING = 494614342 + + commitment: str = proto.Field( + proto.STRING, + number=482134805, + optional=True, + ) + creation_timestamp: str = proto.Field( + proto.STRING, + number=30525366, + optional=True, + ) + description: str = proto.Field( + proto.STRING, + number=422937596, + optional=True, + ) + id: int = proto.Field( + proto.UINT64, + number=3355, + optional=True, + ) + kind: str = proto.Field( + proto.STRING, + number=3292052, + optional=True, + ) + name: str = proto.Field( + proto.STRING, + number=3373707, + optional=True, + ) + resource_policies: MutableMapping[str, str] = proto.MapField( + proto.STRING, + proto.STRING, + number=22220385, + ) + resource_status: 'AllocationResourceStatus' = proto.Field( + proto.MESSAGE, + number=249429315, + optional=True, + message='AllocationResourceStatus', + ) + satisfies_pzs: bool = proto.Field( + proto.BOOL, + number=480964267, + optional=True, + ) + self_link: str = proto.Field( + proto.STRING, + number=456214797, + optional=True, + ) + share_settings: 'ShareSettings' = proto.Field( + proto.MESSAGE, + number=266668163, + optional=True, + message='ShareSettings', + ) + specific_reservation: 'AllocationSpecificSKUReservation' = proto.Field( + proto.MESSAGE, + number=404901951, + optional=True, + message='AllocationSpecificSKUReservation', + ) + specific_reservation_required: bool = proto.Field( + proto.BOOL, + number=226550687, + optional=True, + ) + status: str = proto.Field( + proto.STRING, + number=181260274, + optional=True, + ) + zone: str = proto.Field( + proto.STRING, + number=3744684, + optional=True, + ) + + +class ReservationAffinity(proto.Message): + r"""Specifies the reservations that this instance can consume + from. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + consume_reservation_type (str): + Specifies the type of reservation from which this instance + can consume resources: ANY_RESERVATION (default), + SPECIFIC_RESERVATION, or NO_RESERVATION. See Consuming + reserved instances for examples. Check the + ConsumeReservationType enum for the list of possible values. + + This field is a member of `oneof`_ ``_consume_reservation_type``. + key (str): + Corresponds to the label key of a reservation resource. To + target a SPECIFIC_RESERVATION by name, specify + googleapis.com/reservation-name as the key and specify the + name of your reservation as its value. + + This field is a member of `oneof`_ ``_key``. + values (MutableSequence[str]): + Corresponds to the label values of a + reservation resource. This can be either a name + to a reservation in the same project or + "projects/different-project/reservations/some-reservation-name" + to target a shared reservation in the same zone + but in a different project. + """ + class ConsumeReservationType(proto.Enum): + r"""Specifies the type of reservation from which this instance can + consume resources: ANY_RESERVATION (default), SPECIFIC_RESERVATION, + or NO_RESERVATION. See Consuming reserved instances for examples. + + Values: + UNDEFINED_CONSUME_RESERVATION_TYPE (0): + A value indicating that the enum field is not + set. + ANY_RESERVATION (200008121): + Consume any allocation available. + NO_RESERVATION (169322030): + Do not consume from any allocated capacity. + SPECIFIC_RESERVATION (229889055): + Must consume from a specific reservation. + Must specify key value fields for specifying the + reservations. + UNSPECIFIED (526786327): + No description available. + """ + UNDEFINED_CONSUME_RESERVATION_TYPE = 0 + ANY_RESERVATION = 200008121 + NO_RESERVATION = 169322030 + SPECIFIC_RESERVATION = 229889055 + UNSPECIFIED = 526786327 + + consume_reservation_type: str = proto.Field( + proto.STRING, + number=300736944, + optional=True, + ) + key: str = proto.Field( + proto.STRING, + number=106079, + optional=True, + ) + values: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=249928994, + ) + + +class ReservationAggregatedList(proto.Message): + r"""Contains a list of reservations. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + id (str): + [Output Only] Unique identifier for the resource; defined by + the server. + + This field is a member of `oneof`_ ``_id``. + items (MutableMapping[str, google.cloud.compute_v1.types.ReservationsScopedList]): + A list of Allocation resources. + kind (str): + Type of resource. + + This field is a member of `oneof`_ ``_kind``. + next_page_token (str): + [Output Only] This token allows you to get the next page of + results for list requests. If the number of results is + larger than maxResults, use the nextPageToken as a value for + the query parameter pageToken in the next list request. + Subsequent list requests will have their own nextPageToken + to continue paging through the results. + + This field is a member of `oneof`_ ``_next_page_token``. + self_link (str): + [Output Only] Server-defined URL for this resource. + + This field is a member of `oneof`_ ``_self_link``. + unreachables (MutableSequence[str]): + [Output Only] Unreachable resources. + warning (google.cloud.compute_v1.types.Warning): + [Output Only] Informational warning message. + + This field is a member of `oneof`_ ``_warning``. + """ + + @property + def raw_page(self): + return self + + id: str = proto.Field( + proto.STRING, + number=3355, + optional=True, + ) + items: MutableMapping[str, 'ReservationsScopedList'] = proto.MapField( + proto.STRING, + proto.MESSAGE, + number=100526016, + message='ReservationsScopedList', + ) + kind: str = proto.Field( + proto.STRING, + number=3292052, + optional=True, + ) + next_page_token: str = proto.Field( + proto.STRING, + number=79797525, + optional=True, + ) + self_link: str = proto.Field( + proto.STRING, + number=456214797, + optional=True, + ) + unreachables: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=243372063, + ) + warning: 'Warning' = proto.Field( + proto.MESSAGE, + number=50704284, + optional=True, + message='Warning', + ) + + +class ReservationList(proto.Message): + r""" + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + id (str): + [Output Only] The unique identifier for the resource. This + identifier is defined by the server. + + This field is a member of `oneof`_ ``_id``. + items (MutableSequence[google.cloud.compute_v1.types.Reservation]): + [Output Only] A list of Allocation resources. + kind (str): + [Output Only] Type of resource.Always + compute#reservationsList for listsof reservations + + This field is a member of `oneof`_ ``_kind``. + next_page_token (str): + [Output Only] This token allows you to get the next page of + results for list requests. If the number of results is + larger than maxResults, use the nextPageToken as a value for + the query parameter pageToken in the next list request. + Subsequent list requests will have their own nextPageToken + to continue paging through the results. + + This field is a member of `oneof`_ ``_next_page_token``. + self_link (str): + [Output Only] Server-defined URL for this resource. + + This field is a member of `oneof`_ ``_self_link``. + warning (google.cloud.compute_v1.types.Warning): + [Output Only] Informational warning message. + + This field is a member of `oneof`_ ``_warning``. + """ + + @property + def raw_page(self): + return self + + id: str = proto.Field( + proto.STRING, + number=3355, + optional=True, + ) + items: MutableSequence['Reservation'] = proto.RepeatedField( + proto.MESSAGE, + number=100526016, + message='Reservation', + ) + kind: str = proto.Field( + proto.STRING, + number=3292052, + optional=True, + ) + next_page_token: str = proto.Field( + proto.STRING, + number=79797525, + optional=True, + ) + self_link: str = proto.Field( + proto.STRING, + number=456214797, + optional=True, + ) + warning: 'Warning' = proto.Field( + proto.MESSAGE, + number=50704284, + optional=True, + message='Warning', + ) + + +class ReservationsResizeRequest(proto.Message): + r""" + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + specific_sku_count (int): + Number of allocated resources can be resized + with minimum = 1 and maximum = 1000. + + This field is a member of `oneof`_ ``_specific_sku_count``. + """ + + specific_sku_count: int = proto.Field( + proto.INT64, + number=13890720, + optional=True, + ) + + +class ReservationsScopedList(proto.Message): + r""" + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + reservations (MutableSequence[google.cloud.compute_v1.types.Reservation]): + A list of reservations contained in this + scope. + warning (google.cloud.compute_v1.types.Warning): + Informational warning which replaces the list + of reservations when the list is empty. + + This field is a member of `oneof`_ ``_warning``. + """ + + reservations: MutableSequence['Reservation'] = proto.RepeatedField( + proto.MESSAGE, + number=399717927, + message='Reservation', + ) + warning: 'Warning' = proto.Field( + proto.MESSAGE, + number=50704284, + optional=True, + message='Warning', + ) + + +class ResetInstanceRequest(proto.Message): + r"""A request message for Instances.Reset. See the method + description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + instance (str): + Name of the instance scoping this request. + project (str): + Project ID for this request. + request_id (str): + An optional request ID to identify requests. + Specify a unique request ID so that if you must + retry your request, the server will know to + ignore the request if it has already been + completed. For example, consider a situation + where you make an initial request and the + request times out. If you make the request again + with the same request ID, the server can check + if original operation with the same request ID + was received, and if so, will ignore the second + request. This prevents clients from accidentally + creating duplicate commitments. The request ID + must be a valid UUID with the exception that + zero UUID is not supported ( + 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. + zone (str): + The name of the zone for this request. + """ + + instance: str = proto.Field( + proto.STRING, + number=18257045, + ) + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + request_id: str = proto.Field( + proto.STRING, + number=37109963, + optional=True, + ) + zone: str = proto.Field( + proto.STRING, + number=3744684, + ) + + +class ResizeDiskRequest(proto.Message): + r"""A request message for Disks.Resize. See the method + description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + disk (str): + The name of the persistent disk. + disks_resize_request_resource (google.cloud.compute_v1.types.DisksResizeRequest): + The body resource for this request + project (str): + Project ID for this request. + request_id (str): + An optional request ID to identify requests. + Specify a unique request ID so that if you must + retry your request, the server will know to + ignore the request if it has already been + completed. For example, consider a situation + where you make an initial request and the + request times out. If you make the request again + with the same request ID, the server can check + if original operation with the same request ID + was received, and if so, will ignore the second + request. This prevents clients from accidentally + creating duplicate commitments. The request ID + must be a valid UUID with the exception that + zero UUID is not supported ( + 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. + zone (str): + The name of the zone for this request. + """ + + disk: str = proto.Field( + proto.STRING, + number=3083677, + ) + disks_resize_request_resource: 'DisksResizeRequest' = proto.Field( + proto.MESSAGE, + number=78307616, + message='DisksResizeRequest', + ) + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + request_id: str = proto.Field( + proto.STRING, + number=37109963, + optional=True, + ) + zone: str = proto.Field( + proto.STRING, + number=3744684, + ) + + +class ResizeInstanceGroupManagerRequest(proto.Message): + r"""A request message for InstanceGroupManagers.Resize. See the + method description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + instance_group_manager (str): + The name of the managed instance group. + project (str): + Project ID for this request. + request_id (str): + An optional request ID to identify requests. + Specify a unique request ID so that if you must + retry your request, the server will know to + ignore the request if it has already been + completed. For example, consider a situation + where you make an initial request and the + request times out. If you make the request again + with the same request ID, the server can check + if original operation with the same request ID + was received, and if so, will ignore the second + request. This prevents clients from accidentally + creating duplicate commitments. The request ID + must be a valid UUID with the exception that + zero UUID is not supported ( + 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. + size (int): + The number of running instances that the + managed instance group should maintain at any + given time. The group automatically adds or + removes instances to maintain the number of + instances specified by this parameter. + zone (str): + The name of the zone where the managed + instance group is located. + """ + + instance_group_manager: str = proto.Field( + proto.STRING, + number=249363395, + ) + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + request_id: str = proto.Field( + proto.STRING, + number=37109963, + optional=True, + ) + size: int = proto.Field( + proto.INT32, + number=3530753, + ) + zone: str = proto.Field( + proto.STRING, + number=3744684, + ) + + +class ResizeRegionDiskRequest(proto.Message): + r"""A request message for RegionDisks.Resize. See the method + description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + disk (str): + Name of the regional persistent disk. + project (str): + The project ID for this request. + region (str): + Name of the region for this request. + region_disks_resize_request_resource (google.cloud.compute_v1.types.RegionDisksResizeRequest): + The body resource for this request + request_id (str): + An optional request ID to identify requests. + Specify a unique request ID so that if you must + retry your request, the server will know to + ignore the request if it has already been + completed. For example, consider a situation + where you make an initial request and the + request times out. If you make the request again + with the same request ID, the server can check + if original operation with the same request ID + was received, and if so, will ignore the second + request. This prevents clients from accidentally + creating duplicate commitments. The request ID + must be a valid UUID with the exception that + zero UUID is not supported ( + 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. + """ + + disk: str = proto.Field( + proto.STRING, + number=3083677, + ) + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + region: str = proto.Field( + proto.STRING, + number=138946292, + ) + region_disks_resize_request_resource: 'RegionDisksResizeRequest' = proto.Field( + proto.MESSAGE, + number=446633237, + message='RegionDisksResizeRequest', + ) + request_id: str = proto.Field( + proto.STRING, + number=37109963, + optional=True, + ) + + +class ResizeRegionInstanceGroupManagerRequest(proto.Message): + r"""A request message for RegionInstanceGroupManagers.Resize. See + the method description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + instance_group_manager (str): + Name of the managed instance group. + project (str): + Project ID for this request. + region (str): + Name of the region scoping this request. + request_id (str): + An optional request ID to identify requests. + Specify a unique request ID so that if you must + retry your request, the server will know to + ignore the request if it has already been + completed. For example, consider a situation + where you make an initial request and the + request times out. If you make the request again + with the same request ID, the server can check + if original operation with the same request ID + was received, and if so, will ignore the second + request. This prevents clients from accidentally + creating duplicate commitments. The request ID + must be a valid UUID with the exception that + zero UUID is not supported ( + 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. + size (int): + Number of instances that should exist in this + instance group manager. + """ + + instance_group_manager: str = proto.Field( + proto.STRING, + number=249363395, + ) + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + region: str = proto.Field( + proto.STRING, + number=138946292, + ) + request_id: str = proto.Field( + proto.STRING, + number=37109963, + optional=True, + ) + size: int = proto.Field( + proto.INT32, + number=3530753, + ) + + +class ResizeReservationRequest(proto.Message): + r"""A request message for Reservations.Resize. See the method + description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + project (str): + Project ID for this request. + request_id (str): + An optional request ID to identify requests. + Specify a unique request ID so that if you must + retry your request, the server will know to + ignore the request if it has already been + completed. For example, consider a situation + where you make an initial request and the + request times out. If you make the request again + with the same request ID, the server can check + if original operation with the same request ID + was received, and if so, will ignore the second + request. This prevents clients from accidentally + creating duplicate commitments. The request ID + must be a valid UUID with the exception that + zero UUID is not supported ( + 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. + reservation (str): + Name of the reservation to update. + reservations_resize_request_resource (google.cloud.compute_v1.types.ReservationsResizeRequest): + The body resource for this request + zone (str): + Name of the zone for this request. + """ + + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + request_id: str = proto.Field( + proto.STRING, + number=37109963, + optional=True, + ) + reservation: str = proto.Field( + proto.STRING, + number=47530956, + ) + reservations_resize_request_resource: 'ReservationsResizeRequest' = proto.Field( + proto.MESSAGE, + number=389262801, + message='ReservationsResizeRequest', + ) + zone: str = proto.Field( + proto.STRING, + number=3744684, + ) + + +class ResourceCommitment(proto.Message): + r"""Commitment for a particular resource (a Commitment is + composed of one or more of these). + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + accelerator_type (str): + Name of the accelerator type resource. + Applicable only when the type is ACCELERATOR. + + This field is a member of `oneof`_ ``_accelerator_type``. + amount (int): + The amount of the resource purchased (in a + type-dependent unit, such as bytes). For vCPUs, + this can just be an integer. For memory, this + must be provided in MB. Memory must be a + multiple of 256 MB, with up to 6.5GB of memory + per every vCPU. + + This field is a member of `oneof`_ ``_amount``. + type_ (str): + Type of resource for which this commitment applies. Possible + values are VCPU, MEMORY, LOCAL_SSD, and ACCELERATOR. Check + the Type enum for the list of possible values. + + This field is a member of `oneof`_ ``_type``. + """ + class Type(proto.Enum): + r"""Type of resource for which this commitment applies. Possible values + are VCPU, MEMORY, LOCAL_SSD, and ACCELERATOR. + + Values: + UNDEFINED_TYPE (0): + A value indicating that the enum field is not + set. + ACCELERATOR (429815371): + No description available. + LOCAL_SSD (508934896): + No description available. + MEMORY (123056385): + No description available. + UNSPECIFIED (526786327): + No description available. + VCPU (2628978): + No description available. + """ + UNDEFINED_TYPE = 0 + ACCELERATOR = 429815371 + LOCAL_SSD = 508934896 + MEMORY = 123056385 + UNSPECIFIED = 526786327 + VCPU = 2628978 + + accelerator_type: str = proto.Field( + proto.STRING, + number=138031246, + optional=True, + ) + amount: int = proto.Field( + proto.INT64, + number=196759640, + optional=True, + ) + type_: str = proto.Field( + proto.STRING, + number=3575610, + optional=True, + ) + + +class ResourceGroupReference(proto.Message): + r""" + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + group (str): + A URI referencing one of the instance groups + or network endpoint groups listed in the backend + service. + + This field is a member of `oneof`_ ``_group``. + """ + + group: str = proto.Field( + proto.STRING, + number=98629247, + optional=True, + ) + + +class ResourcePoliciesScopedList(proto.Message): + r""" + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + resource_policies (MutableSequence[google.cloud.compute_v1.types.ResourcePolicy]): + A list of resourcePolicies contained in this + scope. + warning (google.cloud.compute_v1.types.Warning): + Informational warning which replaces the list + of resourcePolicies when the list is empty. + + This field is a member of `oneof`_ ``_warning``. + """ + + resource_policies: MutableSequence['ResourcePolicy'] = proto.RepeatedField( + proto.MESSAGE, + number=22220385, + message='ResourcePolicy', + ) + warning: 'Warning' = proto.Field( + proto.MESSAGE, + number=50704284, + optional=True, + message='Warning', + ) + + +class ResourcePolicy(proto.Message): + r"""Represents a Resource Policy resource. You can use resource + policies to schedule actions for some Compute Engine resources. + For example, you can use them to schedule persistent disk + snapshots. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + creation_timestamp (str): + [Output Only] Creation timestamp in RFC3339 text format. + + This field is a member of `oneof`_ ``_creation_timestamp``. + description (str): + + This field is a member of `oneof`_ ``_description``. + disk_consistency_group_policy (google.cloud.compute_v1.types.ResourcePolicyDiskConsistencyGroupPolicy): + Resource policy for disk consistency groups. + + This field is a member of `oneof`_ ``_disk_consistency_group_policy``. + group_placement_policy (google.cloud.compute_v1.types.ResourcePolicyGroupPlacementPolicy): + Resource policy for instances for placement + configuration. + + This field is a member of `oneof`_ ``_group_placement_policy``. + id (int): + [Output Only] The unique identifier for the resource. This + identifier is defined by the server. + + This field is a member of `oneof`_ ``_id``. + instance_schedule_policy (google.cloud.compute_v1.types.ResourcePolicyInstanceSchedulePolicy): + Resource policy for scheduling instance + operations. + + This field is a member of `oneof`_ ``_instance_schedule_policy``. + kind (str): + [Output Only] Type of the resource. Always + compute#resource_policies for resource policies. + + This field is a member of `oneof`_ ``_kind``. + name (str): + The name of the resource, provided by the client when + initially creating the resource. The resource name must be + 1-63 characters long, and comply with RFC1035. Specifically, + the name must be 1-63 characters long and match the regular + expression ``[a-z]([-a-z0-9]*[a-z0-9])?`` which means the + first character must be a lowercase letter, and all + following characters must be a dash, lowercase letter, or + digit, except the last character, which cannot be a dash. + + This field is a member of `oneof`_ ``_name``. + region (str): + + This field is a member of `oneof`_ ``_region``. + resource_status (google.cloud.compute_v1.types.ResourcePolicyResourceStatus): + [Output Only] The system status of the resource policy. + + This field is a member of `oneof`_ ``_resource_status``. + self_link (str): + [Output Only] Server-defined fully-qualified URL for this + resource. + + This field is a member of `oneof`_ ``_self_link``. + snapshot_schedule_policy (google.cloud.compute_v1.types.ResourcePolicySnapshotSchedulePolicy): + Resource policy for persistent disks for + creating snapshots. + + This field is a member of `oneof`_ ``_snapshot_schedule_policy``. + status (str): + [Output Only] The status of resource policy creation. Check + the Status enum for the list of possible values. + + This field is a member of `oneof`_ ``_status``. + """ + class Status(proto.Enum): + r"""[Output Only] The status of resource policy creation. + + Values: + UNDEFINED_STATUS (0): + A value indicating that the enum field is not + set. + CREATING (455564985): + Resource policy is being created. + DELETING (528602024): + Resource policy is being deleted. + EXPIRED (482489093): + Resource policy is expired and will not run + again. + INVALID (530283991): + No description available. + READY (77848963): + Resource policy is ready to be used. + """ + UNDEFINED_STATUS = 0 + CREATING = 455564985 + DELETING = 528602024 + EXPIRED = 482489093 + INVALID = 530283991 + READY = 77848963 + + creation_timestamp: str = proto.Field( + proto.STRING, + number=30525366, + optional=True, + ) + description: str = proto.Field( + proto.STRING, + number=422937596, + optional=True, + ) + disk_consistency_group_policy: 'ResourcePolicyDiskConsistencyGroupPolicy' = proto.Field( + proto.MESSAGE, + number=473727515, + optional=True, + message='ResourcePolicyDiskConsistencyGroupPolicy', + ) + group_placement_policy: 'ResourcePolicyGroupPlacementPolicy' = proto.Field( + proto.MESSAGE, + number=10931596, + optional=True, + message='ResourcePolicyGroupPlacementPolicy', + ) + id: int = proto.Field( + proto.UINT64, + number=3355, + optional=True, + ) + instance_schedule_policy: 'ResourcePolicyInstanceSchedulePolicy' = proto.Field( + proto.MESSAGE, + number=344877104, + optional=True, + message='ResourcePolicyInstanceSchedulePolicy', + ) + kind: str = proto.Field( + proto.STRING, + number=3292052, + optional=True, + ) + name: str = proto.Field( + proto.STRING, + number=3373707, + optional=True, + ) + region: str = proto.Field( + proto.STRING, + number=138946292, + optional=True, + ) + resource_status: 'ResourcePolicyResourceStatus' = proto.Field( + proto.MESSAGE, + number=249429315, + optional=True, + message='ResourcePolicyResourceStatus', + ) + self_link: str = proto.Field( + proto.STRING, + number=456214797, + optional=True, + ) + snapshot_schedule_policy: 'ResourcePolicySnapshotSchedulePolicy' = proto.Field( + proto.MESSAGE, + number=218131295, + optional=True, + message='ResourcePolicySnapshotSchedulePolicy', + ) + status: str = proto.Field( + proto.STRING, + number=181260274, + optional=True, + ) + + +class ResourcePolicyAggregatedList(proto.Message): + r"""Contains a list of resourcePolicies. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + etag (str): + + This field is a member of `oneof`_ ``_etag``. + id (str): + [Output Only] Unique identifier for the resource; defined by + the server. + + This field is a member of `oneof`_ ``_id``. + items (MutableMapping[str, google.cloud.compute_v1.types.ResourcePoliciesScopedList]): + A list of ResourcePolicy resources. + kind (str): + Type of resource. + + This field is a member of `oneof`_ ``_kind``. + next_page_token (str): + [Output Only] This token allows you to get the next page of + results for list requests. If the number of results is + larger than maxResults, use the nextPageToken as a value for + the query parameter pageToken in the next list request. + Subsequent list requests will have their own nextPageToken + to continue paging through the results. + + This field is a member of `oneof`_ ``_next_page_token``. + self_link (str): + [Output Only] Server-defined URL for this resource. + + This field is a member of `oneof`_ ``_self_link``. + unreachables (MutableSequence[str]): + [Output Only] Unreachable resources. + warning (google.cloud.compute_v1.types.Warning): + [Output Only] Informational warning message. + + This field is a member of `oneof`_ ``_warning``. + """ + + @property + def raw_page(self): + return self + + etag: str = proto.Field( + proto.STRING, + number=3123477, + optional=True, + ) + id: str = proto.Field( + proto.STRING, + number=3355, + optional=True, + ) + items: MutableMapping[str, 'ResourcePoliciesScopedList'] = proto.MapField( + proto.STRING, + proto.MESSAGE, + number=100526016, + message='ResourcePoliciesScopedList', + ) + kind: str = proto.Field( + proto.STRING, + number=3292052, + optional=True, + ) + next_page_token: str = proto.Field( + proto.STRING, + number=79797525, + optional=True, + ) + self_link: str = proto.Field( + proto.STRING, + number=456214797, + optional=True, + ) + unreachables: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=243372063, + ) + warning: 'Warning' = proto.Field( + proto.MESSAGE, + number=50704284, + optional=True, + message='Warning', + ) + + +class ResourcePolicyDailyCycle(proto.Message): + r"""Time window specified for daily operations. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + days_in_cycle (int): + Defines a schedule with units measured in + days. The value determines how many days pass + between the start of each cycle. + + This field is a member of `oneof`_ ``_days_in_cycle``. + duration (str): + [Output only] A predetermined duration for the window, + automatically chosen to be the smallest possible in the + given scenario. + + This field is a member of `oneof`_ ``_duration``. + start_time (str): + Start time of the window. This must be in UTC + format that resolves to one of 00:00, 04:00, + 08:00, 12:00, 16:00, or 20:00. For example, both + 13:00-5 and 08:00 are valid. + + This field is a member of `oneof`_ ``_start_time``. + """ + + days_in_cycle: int = proto.Field( + proto.INT32, + number=369790004, + optional=True, + ) + duration: str = proto.Field( + proto.STRING, + number=155471252, + optional=True, + ) + start_time: str = proto.Field( + proto.STRING, + number=37467274, + optional=True, + ) + + +class ResourcePolicyDiskConsistencyGroupPolicy(proto.Message): + r"""Resource policy for disk consistency groups. + """ + + +class ResourcePolicyGroupPlacementPolicy(proto.Message): + r"""A GroupPlacementPolicy specifies resource placement + configuration. It specifies the failure bucket separation as + well as network locality + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + availability_domain_count (int): + The number of availability domains to spread + instances across. If two instances are in + different availability domain, they are not in + the same low latency network. + + This field is a member of `oneof`_ ``_availability_domain_count``. + collocation (str): + Specifies network collocation + Check the Collocation enum for the list of + possible values. + + This field is a member of `oneof`_ ``_collocation``. + vm_count (int): + Number of VMs in this placement group. Google + does not recommend that you use this field + unless you use a compact policy and you want + your policy to work only if it contains this + exact number of VMs. + + This field is a member of `oneof`_ ``_vm_count``. + """ + class Collocation(proto.Enum): + r"""Specifies network collocation + + Values: + UNDEFINED_COLLOCATION (0): + A value indicating that the enum field is not + set. + COLLOCATED (103257554): + No description available. + UNSPECIFIED_COLLOCATION (464308205): + No description available. + """ + UNDEFINED_COLLOCATION = 0 + COLLOCATED = 103257554 + UNSPECIFIED_COLLOCATION = 464308205 + + availability_domain_count: int = proto.Field( + proto.INT32, + number=12453432, + optional=True, + ) + collocation: str = proto.Field( + proto.STRING, + number=511156533, + optional=True, + ) + vm_count: int = proto.Field( + proto.INT32, + number=261463431, + optional=True, + ) + + +class ResourcePolicyHourlyCycle(proto.Message): + r"""Time window specified for hourly operations. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + duration (str): + [Output only] Duration of the time window, automatically + chosen to be smallest possible in the given scenario. + + This field is a member of `oneof`_ ``_duration``. + hours_in_cycle (int): + Defines a schedule with units measured in + hours. The value determines how many hours pass + between the start of each cycle. + + This field is a member of `oneof`_ ``_hours_in_cycle``. + start_time (str): + Time within the window to start the operations. It must be + in format "HH:MM", where HH : [00-23] and MM : [00-00] GMT. + + This field is a member of `oneof`_ ``_start_time``. + """ + + duration: str = proto.Field( + proto.STRING, + number=155471252, + optional=True, + ) + hours_in_cycle: int = proto.Field( + proto.INT32, + number=526763132, + optional=True, + ) + start_time: str = proto.Field( + proto.STRING, + number=37467274, + optional=True, + ) + + +class ResourcePolicyInstanceSchedulePolicy(proto.Message): + r"""An InstanceSchedulePolicy specifies when and how frequent + certain operations are performed on the instance. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + expiration_time (str): + The expiration time of the schedule. The + timestamp is an RFC3339 string. + + This field is a member of `oneof`_ ``_expiration_time``. + start_time (str): + The start time of the schedule. The timestamp + is an RFC3339 string. + + This field is a member of `oneof`_ ``_start_time``. + time_zone (str): + Specifies the time zone to be used in interpreting + Schedule.schedule. The value of this field must be a time + zone name from the tz database: + https://wikipedia.org/wiki/Tz_database. + + This field is a member of `oneof`_ ``_time_zone``. + vm_start_schedule (google.cloud.compute_v1.types.ResourcePolicyInstanceSchedulePolicySchedule): + Specifies the schedule for starting + instances. + + This field is a member of `oneof`_ ``_vm_start_schedule``. + vm_stop_schedule (google.cloud.compute_v1.types.ResourcePolicyInstanceSchedulePolicySchedule): + Specifies the schedule for stopping + instances. + + This field is a member of `oneof`_ ``_vm_stop_schedule``. + """ + + expiration_time: str = proto.Field( + proto.STRING, + number=230299229, + optional=True, + ) + start_time: str = proto.Field( + proto.STRING, + number=37467274, + optional=True, + ) + time_zone: str = proto.Field( + proto.STRING, + number=36848094, + optional=True, + ) + vm_start_schedule: 'ResourcePolicyInstanceSchedulePolicySchedule' = proto.Field( + proto.MESSAGE, + number=17762396, + optional=True, + message='ResourcePolicyInstanceSchedulePolicySchedule', + ) + vm_stop_schedule: 'ResourcePolicyInstanceSchedulePolicySchedule' = proto.Field( + proto.MESSAGE, + number=426242732, + optional=True, + message='ResourcePolicyInstanceSchedulePolicySchedule', + ) + + +class ResourcePolicyInstanceSchedulePolicySchedule(proto.Message): + r"""Schedule for an instance operation. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + schedule (str): + Specifies the frequency for the operation, + using the unix-cron format. + + This field is a member of `oneof`_ ``_schedule``. + """ + + schedule: str = proto.Field( + proto.STRING, + number=375820951, + optional=True, + ) + + +class ResourcePolicyList(proto.Message): + r""" + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + etag (str): + + This field is a member of `oneof`_ ``_etag``. + id (str): + [Output Only] The unique identifier for the resource. This + identifier is defined by the server. + + This field is a member of `oneof`_ ``_id``. + items (MutableSequence[google.cloud.compute_v1.types.ResourcePolicy]): + [Output Only] A list of ResourcePolicy resources. + kind (str): + [Output Only] Type of resource.Always + compute#resourcePoliciesList for listsof resourcePolicies + + This field is a member of `oneof`_ ``_kind``. + next_page_token (str): + [Output Only] This token allows you to get the next page of + results for list requests. If the number of results is + larger than maxResults, use the nextPageToken as a value for + the query parameter pageToken in the next list request. + Subsequent list requests will have their own nextPageToken + to continue paging through the results. + + This field is a member of `oneof`_ ``_next_page_token``. + self_link (str): + [Output Only] Server-defined URL for this resource. + + This field is a member of `oneof`_ ``_self_link``. + warning (google.cloud.compute_v1.types.Warning): + [Output Only] Informational warning message. + + This field is a member of `oneof`_ ``_warning``. + """ + + @property + def raw_page(self): + return self + + etag: str = proto.Field( + proto.STRING, + number=3123477, + optional=True, + ) + id: str = proto.Field( + proto.STRING, + number=3355, + optional=True, + ) + items: MutableSequence['ResourcePolicy'] = proto.RepeatedField( + proto.MESSAGE, + number=100526016, + message='ResourcePolicy', + ) + kind: str = proto.Field( + proto.STRING, + number=3292052, + optional=True, + ) + next_page_token: str = proto.Field( + proto.STRING, + number=79797525, + optional=True, + ) + self_link: str = proto.Field( + proto.STRING, + number=456214797, + optional=True, + ) + warning: 'Warning' = proto.Field( + proto.MESSAGE, + number=50704284, + optional=True, + message='Warning', + ) + + +class ResourcePolicyResourceStatus(proto.Message): + r"""Contains output only fields. Use this sub-message for all + output fields set on ResourcePolicy. The internal structure of + this "status" field should mimic the structure of ResourcePolicy + proto specification. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + instance_schedule_policy (google.cloud.compute_v1.types.ResourcePolicyResourceStatusInstanceSchedulePolicyStatus): + [Output Only] Specifies a set of output values reffering to + the instance_schedule_policy system status. This field + should have the same name as corresponding policy field. + + This field is a member of `oneof`_ ``_instance_schedule_policy``. + """ + + instance_schedule_policy: 'ResourcePolicyResourceStatusInstanceSchedulePolicyStatus' = proto.Field( + proto.MESSAGE, + number=344877104, + optional=True, + message='ResourcePolicyResourceStatusInstanceSchedulePolicyStatus', + ) + + +class ResourcePolicyResourceStatusInstanceSchedulePolicyStatus(proto.Message): + r""" + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + last_run_start_time (str): + [Output Only] The last time the schedule successfully ran. + The timestamp is an RFC3339 string. + + This field is a member of `oneof`_ ``_last_run_start_time``. + next_run_start_time (str): + [Output Only] The next time the schedule is planned to run. + The actual time might be slightly different. The timestamp + is an RFC3339 string. + + This field is a member of `oneof`_ ``_next_run_start_time``. + """ + + last_run_start_time: str = proto.Field( + proto.STRING, + number=303069063, + optional=True, + ) + next_run_start_time: str = proto.Field( + proto.STRING, + number=318642570, + optional=True, + ) + + +class ResourcePolicySnapshotSchedulePolicy(proto.Message): + r"""A snapshot schedule policy specifies when and how frequently + snapshots are to be created for the target disk. Also specifies + how many and how long these scheduled snapshots should be + retained. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + retention_policy (google.cloud.compute_v1.types.ResourcePolicySnapshotSchedulePolicyRetentionPolicy): + Retention policy applied to snapshots created + by this resource policy. + + This field is a member of `oneof`_ ``_retention_policy``. + schedule (google.cloud.compute_v1.types.ResourcePolicySnapshotSchedulePolicySchedule): + A Vm Maintenance Policy specifies what kind + of infrastructure maintenance we are allowed to + perform on this VM and when. Schedule that is + applied to disks covered by this policy. + + This field is a member of `oneof`_ ``_schedule``. + snapshot_properties (google.cloud.compute_v1.types.ResourcePolicySnapshotSchedulePolicySnapshotProperties): + Properties with which snapshots are created + such as labels, encryption keys. + + This field is a member of `oneof`_ ``_snapshot_properties``. + """ + + retention_policy: 'ResourcePolicySnapshotSchedulePolicyRetentionPolicy' = proto.Field( + proto.MESSAGE, + number=68625779, + optional=True, + message='ResourcePolicySnapshotSchedulePolicyRetentionPolicy', + ) + schedule: 'ResourcePolicySnapshotSchedulePolicySchedule' = proto.Field( + proto.MESSAGE, + number=375820951, + optional=True, + message='ResourcePolicySnapshotSchedulePolicySchedule', + ) + snapshot_properties: 'ResourcePolicySnapshotSchedulePolicySnapshotProperties' = proto.Field( + proto.MESSAGE, + number=185371278, + optional=True, + message='ResourcePolicySnapshotSchedulePolicySnapshotProperties', + ) + + +class ResourcePolicySnapshotSchedulePolicyRetentionPolicy(proto.Message): + r"""Policy for retention of scheduled snapshots. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + max_retention_days (int): + Maximum age of the snapshot that is allowed + to be kept. + + This field is a member of `oneof`_ ``_max_retention_days``. + on_source_disk_delete (str): + Specifies the behavior to apply to scheduled + snapshots when the source disk is deleted. Check + the OnSourceDiskDelete enum for the list of + possible values. + + This field is a member of `oneof`_ ``_on_source_disk_delete``. + """ + class OnSourceDiskDelete(proto.Enum): + r"""Specifies the behavior to apply to scheduled snapshots when + the source disk is deleted. + + Values: + UNDEFINED_ON_SOURCE_DISK_DELETE (0): + A value indicating that the enum field is not + set. + APPLY_RETENTION_POLICY (535071332): + No description available. + KEEP_AUTO_SNAPSHOTS (258925689): + No description available. + UNSPECIFIED_ON_SOURCE_DISK_DELETE (239140769): + No description available. + """ + UNDEFINED_ON_SOURCE_DISK_DELETE = 0 + APPLY_RETENTION_POLICY = 535071332 + KEEP_AUTO_SNAPSHOTS = 258925689 + UNSPECIFIED_ON_SOURCE_DISK_DELETE = 239140769 + + max_retention_days: int = proto.Field( + proto.INT32, + number=324296979, + optional=True, + ) + on_source_disk_delete: str = proto.Field( + proto.STRING, + number=321955529, + optional=True, + ) + + +class ResourcePolicySnapshotSchedulePolicySchedule(proto.Message): + r"""A schedule for disks where the schedueled operations are + performed. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + daily_schedule (google.cloud.compute_v1.types.ResourcePolicyDailyCycle): + + This field is a member of `oneof`_ ``_daily_schedule``. + hourly_schedule (google.cloud.compute_v1.types.ResourcePolicyHourlyCycle): + + This field is a member of `oneof`_ ``_hourly_schedule``. + weekly_schedule (google.cloud.compute_v1.types.ResourcePolicyWeeklyCycle): + + This field is a member of `oneof`_ ``_weekly_schedule``. + """ + + daily_schedule: 'ResourcePolicyDailyCycle' = proto.Field( + proto.MESSAGE, + number=86159869, + optional=True, + message='ResourcePolicyDailyCycle', + ) + hourly_schedule: 'ResourcePolicyHourlyCycle' = proto.Field( + proto.MESSAGE, + number=38328485, + optional=True, + message='ResourcePolicyHourlyCycle', + ) + weekly_schedule: 'ResourcePolicyWeeklyCycle' = proto.Field( + proto.MESSAGE, + number=359548053, + optional=True, + message='ResourcePolicyWeeklyCycle', + ) + + +class ResourcePolicySnapshotSchedulePolicySnapshotProperties(proto.Message): + r"""Specified snapshot properties for scheduled snapshots created + by this policy. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + chain_name (str): + Chain name that the snapshot is created in. + + This field is a member of `oneof`_ ``_chain_name``. + guest_flush (bool): + Indication to perform a 'guest aware' + snapshot. + + This field is a member of `oneof`_ ``_guest_flush``. + labels (MutableMapping[str, str]): + Labels to apply to scheduled snapshots. These + can be later modified by the setLabels method. + Label values may be empty. + storage_locations (MutableSequence[str]): + Cloud Storage bucket storage location of the + auto snapshot (regional or multi-regional). + """ + + chain_name: str = proto.Field( + proto.STRING, + number=68644169, + optional=True, + ) + guest_flush: bool = proto.Field( + proto.BOOL, + number=385550813, + optional=True, + ) + labels: MutableMapping[str, str] = proto.MapField( + proto.STRING, + proto.STRING, + number=500195327, + ) + storage_locations: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=328005274, + ) + + +class ResourcePolicyWeeklyCycle(proto.Message): + r"""Time window specified for weekly operations. + + Attributes: + day_of_weeks (MutableSequence[google.cloud.compute_v1.types.ResourcePolicyWeeklyCycleDayOfWeek]): + Up to 7 intervals/windows, one for each day + of the week. + """ + + day_of_weeks: MutableSequence['ResourcePolicyWeeklyCycleDayOfWeek'] = proto.RepeatedField( + proto.MESSAGE, + number=257871834, + message='ResourcePolicyWeeklyCycleDayOfWeek', + ) + + +class ResourcePolicyWeeklyCycleDayOfWeek(proto.Message): + r""" + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + day (str): + Defines a schedule that runs on specific days + of the week. Specify one or more days. The + following options are available: MONDAY, + TUESDAY, WEDNESDAY, THURSDAY, FRIDAY, SATURDAY, + SUNDAY. Check the Day enum for the list of + possible values. + + This field is a member of `oneof`_ ``_day``. + duration (str): + [Output only] Duration of the time window, automatically + chosen to be smallest possible in the given scenario. + + This field is a member of `oneof`_ ``_duration``. + start_time (str): + Time within the window to start the operations. It must be + in format "HH:MM", where HH : [00-23] and MM : [00-00] GMT. + + This field is a member of `oneof`_ ``_start_time``. + """ + class Day(proto.Enum): + r"""Defines a schedule that runs on specific days of the week. + Specify one or more days. The following options are available: + MONDAY, TUESDAY, WEDNESDAY, THURSDAY, FRIDAY, SATURDAY, SUNDAY. + + Values: + UNDEFINED_DAY (0): + A value indicating that the enum field is not + set. + FRIDAY (471398751): + No description available. + INVALID (530283991): + No description available. + MONDAY (132310288): + No description available. + SATURDAY (279037881): + No description available. + SUNDAY (309626320): + No description available. + THURSDAY (207198682): + No description available. + TUESDAY (277509677): + No description available. + WEDNESDAY (422029110): + No description available. + """ + UNDEFINED_DAY = 0 + FRIDAY = 471398751 + INVALID = 530283991 + MONDAY = 132310288 + SATURDAY = 279037881 + SUNDAY = 309626320 + THURSDAY = 207198682 + TUESDAY = 277509677 + WEDNESDAY = 422029110 + + day: str = proto.Field( + proto.STRING, + number=99228, + optional=True, + ) + duration: str = proto.Field( + proto.STRING, + number=155471252, + optional=True, + ) + start_time: str = proto.Field( + proto.STRING, + number=37467274, + optional=True, + ) + + +class ResourceStatus(proto.Message): + r"""Contains output only fields. Use this sub-message for actual + values set on Instance attributes as compared to the value + requested by the user (intent) in their instance CRUD calls. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + physical_host (str): + [Output Only] An opaque ID of the host on which the VM is + running. + + This field is a member of `oneof`_ ``_physical_host``. + """ + + physical_host: str = proto.Field( + proto.STRING, + number=464370704, + optional=True, + ) + + +class ResumeInstanceRequest(proto.Message): + r"""A request message for Instances.Resume. See the method + description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + instance (str): + Name of the instance resource to resume. + project (str): + Project ID for this request. + request_id (str): + An optional request ID to identify requests. + Specify a unique request ID so that if you must + retry your request, the server will know to + ignore the request if it has already been + completed. For example, consider a situation + where you make an initial request and the + request times out. If you make the request again + with the same request ID, the server can check + if original operation with the same request ID + was received, and if so, will ignore the second + request. This prevents clients from accidentally + creating duplicate commitments. The request ID + must be a valid UUID with the exception that + zero UUID is not supported ( + 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. + zone (str): + The name of the zone for this request. + """ + + instance: str = proto.Field( + proto.STRING, + number=18257045, + ) + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + request_id: str = proto.Field( + proto.STRING, + number=37109963, + optional=True, + ) + zone: str = proto.Field( + proto.STRING, + number=3744684, + ) + + +class Route(proto.Message): + r"""Represents a Route resource. A route defines a path from VM + instances in the VPC network to a specific destination. This + destination can be inside or outside the VPC network. For more + information, read the Routes overview. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + as_paths (MutableSequence[google.cloud.compute_v1.types.RouteAsPath]): + [Output Only] AS path. + creation_timestamp (str): + [Output Only] Creation timestamp in RFC3339 text format. + + This field is a member of `oneof`_ ``_creation_timestamp``. + description (str): + An optional description of this resource. + Provide this field when you create the resource. + + This field is a member of `oneof`_ ``_description``. + dest_range (str): + The destination range of outgoing packets + that this route applies to. Both IPv4 and IPv6 + are supported. + + This field is a member of `oneof`_ ``_dest_range``. + id (int): + [Output Only] The unique identifier for the resource. This + identifier is defined by the server. + + This field is a member of `oneof`_ ``_id``. + kind (str): + [Output Only] Type of this resource. Always compute#routes + for Route resources. + + This field is a member of `oneof`_ ``_kind``. + name (str): + Name of the resource. Provided by the client when the + resource is created. The name must be 1-63 characters long, + and comply with RFC1035. Specifically, the name must be 1-63 + characters long and match the regular expression + ``[a-z]([-a-z0-9]*[a-z0-9])?``. The first character must be + a lowercase letter, and all following characters (except for + the last character) must be a dash, lowercase letter, or + digit. The last character must be a lowercase letter or + digit. + + This field is a member of `oneof`_ ``_name``. + network (str): + Fully-qualified URL of the network that this + route applies to. + + This field is a member of `oneof`_ ``_network``. + next_hop_gateway (str): + The URL to a gateway that should handle + matching packets. You can only specify the + internet gateway using a full or partial valid + URL: projects/ + project/global/gateways/default-internet-gateway + + This field is a member of `oneof`_ ``_next_hop_gateway``. + next_hop_hub (str): + [Output Only] The full resource name of the Network + Connectivity Center hub that will handle matching packets. + + This field is a member of `oneof`_ ``_next_hop_hub``. + next_hop_ilb (str): + The URL to a forwarding rule of type + loadBalancingScheme=INTERNAL that should handle + matching packets or the IP address of the + forwarding Rule. For example, the following are + all valid URLs: - 10.128.0.56 - + https://www.googleapis.com/compute/v1/projects/project/regions/region + /forwardingRules/forwardingRule - + regions/region/forwardingRules/forwardingRule + + This field is a member of `oneof`_ ``_next_hop_ilb``. + next_hop_instance (str): + The URL to an instance that should handle + matching packets. You can specify this as a full + or partial URL. For example: + https://www.googleapis.com/compute/v1/projects/project/zones/zone/instances/ + + This field is a member of `oneof`_ ``_next_hop_instance``. + next_hop_ip (str): + The network IP address of an instance that + should handle matching packets. Only IPv4 is + supported. + + This field is a member of `oneof`_ ``_next_hop_ip``. + next_hop_network (str): + The URL of the local network if it should + handle matching packets. + + This field is a member of `oneof`_ ``_next_hop_network``. + next_hop_peering (str): + [Output Only] The network peering name that should handle + matching packets, which should conform to RFC1035. + + This field is a member of `oneof`_ ``_next_hop_peering``. + next_hop_vpn_tunnel (str): + The URL to a VpnTunnel that should handle + matching packets. + + This field is a member of `oneof`_ ``_next_hop_vpn_tunnel``. + priority (int): + The priority of this route. Priority is used to break ties + in cases where there is more than one matching route of + equal prefix length. In cases where multiple routes have + equal prefix length, the one with the lowest-numbered + priority value wins. The default value is ``1000``. The + priority value must be from ``0`` to ``65535``, inclusive. + + This field is a member of `oneof`_ ``_priority``. + route_status (str): + [Output only] The status of the route. Check the RouteStatus + enum for the list of possible values. + + This field is a member of `oneof`_ ``_route_status``. + route_type (str): + [Output Only] The type of this route, which can be one of + the following values: - 'TRANSIT' for a transit route that + this router learned from another Cloud Router and will + readvertise to one of its BGP peers - 'SUBNET' for a route + from a subnet of the VPC - 'BGP' for a route learned from a + BGP peer of this router - 'STATIC' for a static route Check + the RouteType enum for the list of possible values. + + This field is a member of `oneof`_ ``_route_type``. + self_link (str): + [Output Only] Server-defined fully-qualified URL for this + resource. + + This field is a member of `oneof`_ ``_self_link``. + tags (MutableSequence[str]): + A list of instance tags to which this route + applies. + warnings (MutableSequence[google.cloud.compute_v1.types.Warnings]): + [Output Only] If potential misconfigurations are detected + for this route, this field will be populated with warning + messages. + """ + class RouteStatus(proto.Enum): + r"""[Output only] The status of the route. + + Values: + UNDEFINED_ROUTE_STATUS (0): + A value indicating that the enum field is not + set. + ACTIVE (314733318): + This route is processed and active. + DROPPED (496235424): + The route is dropped due to the VPC exceeding + the dynamic route limit. For dynamic route + limit, please refer to the Learned route example + INACTIVE (270421099): + This route is processed but inactive due to + failure from the backend. The backend may have + rejected the route + PENDING (35394935): + This route is being processed internally. The + status will change once processed. + """ + UNDEFINED_ROUTE_STATUS = 0 + ACTIVE = 314733318 + DROPPED = 496235424 + INACTIVE = 270421099 + PENDING = 35394935 + + class RouteType(proto.Enum): + r"""[Output Only] The type of this route, which can be one of the + following values: - 'TRANSIT' for a transit route that this router + learned from another Cloud Router and will readvertise to one of its + BGP peers - 'SUBNET' for a route from a subnet of the VPC - 'BGP' + for a route learned from a BGP peer of this router - 'STATIC' for a + static route + + Values: + UNDEFINED_ROUTE_TYPE (0): + A value indicating that the enum field is not + set. + BGP (65707): + No description available. + STATIC (308331118): + No description available. + SUBNET (309278557): + No description available. + TRANSIT (187793843): + No description available. + """ + UNDEFINED_ROUTE_TYPE = 0 + BGP = 65707 + STATIC = 308331118 + SUBNET = 309278557 + TRANSIT = 187793843 + + as_paths: MutableSequence['RouteAsPath'] = proto.RepeatedField( + proto.MESSAGE, + number=137568929, + message='RouteAsPath', + ) + creation_timestamp: str = proto.Field( + proto.STRING, + number=30525366, + optional=True, + ) + description: str = proto.Field( + proto.STRING, + number=422937596, + optional=True, + ) + dest_range: str = proto.Field( + proto.STRING, + number=381327712, + optional=True, + ) + id: int = proto.Field( + proto.UINT64, + number=3355, + optional=True, + ) + kind: str = proto.Field( + proto.STRING, + number=3292052, + optional=True, + ) + name: str = proto.Field( + proto.STRING, + number=3373707, + optional=True, + ) + network: str = proto.Field( + proto.STRING, + number=232872494, + optional=True, + ) + next_hop_gateway: str = proto.Field( + proto.STRING, + number=377175298, + optional=True, + ) + next_hop_hub: str = proto.Field( + proto.STRING, + number=198679219, + optional=True, + ) + next_hop_ilb: str = proto.Field( + proto.STRING, + number=198679901, + optional=True, + ) + next_hop_instance: str = proto.Field( + proto.STRING, + number=393508247, + optional=True, + ) + next_hop_ip: str = proto.Field( + proto.STRING, + number=110319529, + optional=True, + ) + next_hop_network: str = proto.Field( + proto.STRING, + number=262295788, + optional=True, + ) + next_hop_peering: str = proto.Field( + proto.STRING, + number=412682750, + optional=True, + ) + next_hop_vpn_tunnel: str = proto.Field( + proto.STRING, + number=519844501, + optional=True, + ) + priority: int = proto.Field( + proto.UINT32, + number=445151652, + optional=True, + ) + route_status: str = proto.Field( + proto.STRING, + number=418162344, + optional=True, + ) + route_type: str = proto.Field( + proto.STRING, + number=375888752, + optional=True, + ) + self_link: str = proto.Field( + proto.STRING, + number=456214797, + optional=True, + ) + tags: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=3552281, + ) + warnings: MutableSequence['Warnings'] = proto.RepeatedField( + proto.MESSAGE, + number=498091095, + message='Warnings', + ) + + +class RouteAsPath(proto.Message): + r""" + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + as_lists (MutableSequence[int]): + [Output Only] The AS numbers of the AS Path. + path_segment_type (str): + [Output Only] The type of the AS Path, which can be one of + the following values: - 'AS_SET': unordered set of + autonomous systems that the route in has traversed - + 'AS_SEQUENCE': ordered set of autonomous systems that the + route has traversed - 'AS_CONFED_SEQUENCE': ordered set of + Member Autonomous Systems in the local confederation that + the route has traversed - 'AS_CONFED_SET': unordered set of + Member Autonomous Systems in the local confederation that + the route has traversed Check the PathSegmentType enum for + the list of possible values. + + This field is a member of `oneof`_ ``_path_segment_type``. + """ + class PathSegmentType(proto.Enum): + r"""[Output Only] The type of the AS Path, which can be one of the + following values: - 'AS_SET': unordered set of autonomous systems + that the route in has traversed - 'AS_SEQUENCE': ordered set of + autonomous systems that the route has traversed - + 'AS_CONFED_SEQUENCE': ordered set of Member Autonomous Systems in + the local confederation that the route has traversed - + 'AS_CONFED_SET': unordered set of Member Autonomous Systems in the + local confederation that the route has traversed + + Values: + UNDEFINED_PATH_SEGMENT_TYPE (0): + A value indicating that the enum field is not + set. + AS_CONFED_SEQUENCE (222152624): + No description available. + AS_CONFED_SET (374040307): + No description available. + AS_SEQUENCE (106735918): + No description available. + AS_SET (329846453): + No description available. + """ + UNDEFINED_PATH_SEGMENT_TYPE = 0 + AS_CONFED_SEQUENCE = 222152624 + AS_CONFED_SET = 374040307 + AS_SEQUENCE = 106735918 + AS_SET = 329846453 + + as_lists: MutableSequence[int] = proto.RepeatedField( + proto.UINT32, + number=134112584, + ) + path_segment_type: str = proto.Field( + proto.STRING, + number=513464992, + optional=True, + ) + + +class RouteList(proto.Message): + r"""Contains a list of Route resources. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + id (str): + [Output Only] Unique identifier for the resource; defined by + the server. + + This field is a member of `oneof`_ ``_id``. + items (MutableSequence[google.cloud.compute_v1.types.Route]): + A list of Route resources. + kind (str): + Type of resource. + + This field is a member of `oneof`_ ``_kind``. + next_page_token (str): + [Output Only] This token allows you to get the next page of + results for list requests. If the number of results is + larger than maxResults, use the nextPageToken as a value for + the query parameter pageToken in the next list request. + Subsequent list requests will have their own nextPageToken + to continue paging through the results. + + This field is a member of `oneof`_ ``_next_page_token``. + self_link (str): + [Output Only] Server-defined URL for this resource. + + This field is a member of `oneof`_ ``_self_link``. + warning (google.cloud.compute_v1.types.Warning): + [Output Only] Informational warning message. + + This field is a member of `oneof`_ ``_warning``. + """ + + @property + def raw_page(self): + return self + + id: str = proto.Field( + proto.STRING, + number=3355, + optional=True, + ) + items: MutableSequence['Route'] = proto.RepeatedField( + proto.MESSAGE, + number=100526016, + message='Route', + ) + kind: str = proto.Field( + proto.STRING, + number=3292052, + optional=True, + ) + next_page_token: str = proto.Field( + proto.STRING, + number=79797525, + optional=True, + ) + self_link: str = proto.Field( + proto.STRING, + number=456214797, + optional=True, + ) + warning: 'Warning' = proto.Field( + proto.MESSAGE, + number=50704284, + optional=True, + message='Warning', + ) + + +class Router(proto.Message): + r"""Represents a Cloud Router resource. For more information + about Cloud Router, read the Cloud Router overview. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + bgp (google.cloud.compute_v1.types.RouterBgp): + BGP information specific to this router. + + This field is a member of `oneof`_ ``_bgp``. + bgp_peers (MutableSequence[google.cloud.compute_v1.types.RouterBgpPeer]): + BGP information that must be configured into + the routing stack to establish BGP peering. This + information must specify the peer ASN and either + the interface name, IP address, or peer IP + address. Please refer to RFC4273. + creation_timestamp (str): + [Output Only] Creation timestamp in RFC3339 text format. + + This field is a member of `oneof`_ ``_creation_timestamp``. + description (str): + An optional description of this resource. + Provide this property when you create the + resource. + + This field is a member of `oneof`_ ``_description``. + encrypted_interconnect_router (bool): + Indicates if a router is dedicated for use + with encrypted VLAN attachments + (interconnectAttachments). + + This field is a member of `oneof`_ ``_encrypted_interconnect_router``. + id (int): + [Output Only] The unique identifier for the resource. This + identifier is defined by the server. + + This field is a member of `oneof`_ ``_id``. + interfaces (MutableSequence[google.cloud.compute_v1.types.RouterInterface]): + Router interfaces. Each interface requires + either one linked resource, (for example, + linkedVpnTunnel), or IP address and IP address + range (for example, ipRange), or both. + kind (str): + [Output Only] Type of resource. Always compute#router for + routers. + + This field is a member of `oneof`_ ``_kind``. + md5_authentication_keys (MutableSequence[google.cloud.compute_v1.types.RouterMd5AuthenticationKey]): + Keys used for MD5 authentication. + name (str): + Name of the resource. Provided by the client when the + resource is created. The name must be 1-63 characters long, + and comply with RFC1035. Specifically, the name must be 1-63 + characters long and match the regular expression + ``[a-z]([-a-z0-9]*[a-z0-9])?`` which means the first + character must be a lowercase letter, and all following + characters must be a dash, lowercase letter, or digit, + except the last character, which cannot be a dash. + + This field is a member of `oneof`_ ``_name``. + nats (MutableSequence[google.cloud.compute_v1.types.RouterNat]): + A list of NAT services created in this + router. + network (str): + URI of the network to which this router + belongs. + + This field is a member of `oneof`_ ``_network``. + region (str): + [Output Only] URI of the region where the router resides. + You must specify this field as part of the HTTP request URL. + It is not settable as a field in the request body. + + This field is a member of `oneof`_ ``_region``. + self_link (str): + [Output Only] Server-defined URL for the resource. + + This field is a member of `oneof`_ ``_self_link``. + """ + + bgp: 'RouterBgp' = proto.Field( + proto.MESSAGE, + number=97483, + optional=True, + message='RouterBgp', + ) + bgp_peers: MutableSequence['RouterBgpPeer'] = proto.RepeatedField( + proto.MESSAGE, + number=452695773, + message='RouterBgpPeer', + ) + creation_timestamp: str = proto.Field( + proto.STRING, + number=30525366, + optional=True, + ) + description: str = proto.Field( + proto.STRING, + number=422937596, + optional=True, + ) + encrypted_interconnect_router: bool = proto.Field( + proto.BOOL, + number=297996575, + optional=True, + ) + id: int = proto.Field( + proto.UINT64, + number=3355, + optional=True, + ) + interfaces: MutableSequence['RouterInterface'] = proto.RepeatedField( + proto.MESSAGE, + number=12073562, + message='RouterInterface', + ) + kind: str = proto.Field( + proto.STRING, + number=3292052, + optional=True, + ) + md5_authentication_keys: MutableSequence['RouterMd5AuthenticationKey'] = proto.RepeatedField( + proto.MESSAGE, + number=71063322, + message='RouterMd5AuthenticationKey', + ) + name: str = proto.Field( + proto.STRING, + number=3373707, + optional=True, + ) + nats: MutableSequence['RouterNat'] = proto.RepeatedField( + proto.MESSAGE, + number=3373938, + message='RouterNat', + ) + network: str = proto.Field( + proto.STRING, + number=232872494, + optional=True, + ) + region: str = proto.Field( + proto.STRING, + number=138946292, + optional=True, + ) + self_link: str = proto.Field( + proto.STRING, + number=456214797, + optional=True, + ) + + +class RouterAdvertisedIpRange(proto.Message): + r"""Description-tagged IP ranges for the router to advertise. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + description (str): + User-specified description for the IP range. + + This field is a member of `oneof`_ ``_description``. + range_ (str): + The IP range to advertise. The value must be + a CIDR-formatted string. + + This field is a member of `oneof`_ ``_range``. + """ + + description: str = proto.Field( + proto.STRING, + number=422937596, + optional=True, + ) + range_: str = proto.Field( + proto.STRING, + number=108280125, + optional=True, + ) + + +class RouterAggregatedList(proto.Message): + r"""Contains a list of routers. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + id (str): + [Output Only] Unique identifier for the resource; defined by + the server. + + This field is a member of `oneof`_ ``_id``. + items (MutableMapping[str, google.cloud.compute_v1.types.RoutersScopedList]): + A list of Router resources. + kind (str): + Type of resource. + + This field is a member of `oneof`_ ``_kind``. + next_page_token (str): + [Output Only] This token allows you to get the next page of + results for list requests. If the number of results is + larger than maxResults, use the nextPageToken as a value for + the query parameter pageToken in the next list request. + Subsequent list requests will have their own nextPageToken + to continue paging through the results. + + This field is a member of `oneof`_ ``_next_page_token``. + self_link (str): + [Output Only] Server-defined URL for this resource. + + This field is a member of `oneof`_ ``_self_link``. + unreachables (MutableSequence[str]): + [Output Only] Unreachable resources. + warning (google.cloud.compute_v1.types.Warning): + [Output Only] Informational warning message. + + This field is a member of `oneof`_ ``_warning``. + """ + + @property + def raw_page(self): + return self + + id: str = proto.Field( + proto.STRING, + number=3355, + optional=True, + ) + items: MutableMapping[str, 'RoutersScopedList'] = proto.MapField( + proto.STRING, + proto.MESSAGE, + number=100526016, + message='RoutersScopedList', + ) + kind: str = proto.Field( + proto.STRING, + number=3292052, + optional=True, + ) + next_page_token: str = proto.Field( + proto.STRING, + number=79797525, + optional=True, + ) + self_link: str = proto.Field( + proto.STRING, + number=456214797, + optional=True, + ) + unreachables: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=243372063, + ) + warning: 'Warning' = proto.Field( + proto.MESSAGE, + number=50704284, + optional=True, + message='Warning', + ) + + +class RouterBgp(proto.Message): + r""" + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + advertise_mode (str): + User-specified flag to indicate which mode to + use for advertisement. The options are DEFAULT + or CUSTOM. Check the AdvertiseMode enum for the + list of possible values. + + This field is a member of `oneof`_ ``_advertise_mode``. + advertised_groups (MutableSequence[str]): + User-specified list of prefix groups to advertise in custom + mode. This field can only be populated if advertise_mode is + CUSTOM and is advertised to all peers of the router. These + groups will be advertised in addition to any specified + prefixes. Leave this field blank to advertise no custom + groups. Check the AdvertisedGroups enum for the list of + possible values. + advertised_ip_ranges (MutableSequence[google.cloud.compute_v1.types.RouterAdvertisedIpRange]): + User-specified list of individual IP ranges to advertise in + custom mode. This field can only be populated if + advertise_mode is CUSTOM and is advertised to all peers of + the router. These IP ranges will be advertised in addition + to any specified groups. Leave this field blank to advertise + no custom IP ranges. + asn (int): + Local BGP Autonomous System Number (ASN). + Must be an RFC6996 private ASN, either 16-bit or + 32-bit. The value will be fixed for this router + resource. All VPN tunnels that link to this + router will have the same local ASN. + + This field is a member of `oneof`_ ``_asn``. + keepalive_interval (int): + The interval in seconds between BGP keepalive + messages that are sent to the peer. Hold time is + three times the interval at which keepalive + messages are sent, and the hold time is the + maximum number of seconds allowed to elapse + between successive keepalive messages that BGP + receives from a peer. BGP will use the smaller + of either the local hold time value or the + peer's hold time value as the hold time for the + BGP connection between the two peers. If set, + this value must be between 20 and 60. The + default is 20. + + This field is a member of `oneof`_ ``_keepalive_interval``. + """ + class AdvertiseMode(proto.Enum): + r"""User-specified flag to indicate which mode to use for + advertisement. The options are DEFAULT or CUSTOM. + + Values: + UNDEFINED_ADVERTISE_MODE (0): + A value indicating that the enum field is not + set. + CUSTOM (388595569): + No description available. + DEFAULT (115302945): + No description available. + """ + UNDEFINED_ADVERTISE_MODE = 0 + CUSTOM = 388595569 + DEFAULT = 115302945 + + class AdvertisedGroups(proto.Enum): + r""" + + Values: + UNDEFINED_ADVERTISED_GROUPS (0): + A value indicating that the enum field is not + set. + ALL_SUBNETS (3622872): + Advertise all available subnets (including + peer VPC subnets). + """ + UNDEFINED_ADVERTISED_GROUPS = 0 + ALL_SUBNETS = 3622872 + + advertise_mode: str = proto.Field( + proto.STRING, + number=312134331, + optional=True, + ) + advertised_groups: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=21065526, + ) + advertised_ip_ranges: MutableSequence['RouterAdvertisedIpRange'] = proto.RepeatedField( + proto.MESSAGE, + number=35449932, + message='RouterAdvertisedIpRange', + ) + asn: int = proto.Field( + proto.UINT32, + number=96892, + optional=True, + ) + keepalive_interval: int = proto.Field( + proto.UINT32, + number=276771516, + optional=True, + ) + + +class RouterBgpPeer(proto.Message): + r""" + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + advertise_mode (str): + User-specified flag to indicate which mode to + use for advertisement. Check the AdvertiseMode + enum for the list of possible values. + + This field is a member of `oneof`_ ``_advertise_mode``. + advertised_groups (MutableSequence[str]): + User-specified list of prefix groups to advertise in custom + mode, which currently supports the following option: - + ALL_SUBNETS: Advertises all of the router's own VPC subnets. + This excludes any routes learned for subnets that use VPC + Network Peering. Note that this field can only be populated + if advertise_mode is CUSTOM and overrides the list defined + for the router (in the "bgp" message). These groups are + advertised in addition to any specified prefixes. Leave this + field blank to advertise no custom groups. Check the + AdvertisedGroups enum for the list of possible values. + advertised_ip_ranges (MutableSequence[google.cloud.compute_v1.types.RouterAdvertisedIpRange]): + User-specified list of individual IP ranges to advertise in + custom mode. This field can only be populated if + advertise_mode is CUSTOM and overrides the list defined for + the router (in the "bgp" message). These IP ranges are + advertised in addition to any specified groups. Leave this + field blank to advertise no custom IP ranges. + advertised_route_priority (int): + The priority of routes advertised to this BGP + peer. Where there is more than one matching + route of maximum length, the routes with the + lowest priority value win. + + This field is a member of `oneof`_ ``_advertised_route_priority``. + bfd (google.cloud.compute_v1.types.RouterBgpPeerBfd): + BFD configuration for the BGP peering. + + This field is a member of `oneof`_ ``_bfd``. + custom_learned_ip_ranges (MutableSequence[google.cloud.compute_v1.types.RouterBgpPeerCustomLearnedIpRange]): + A list of user-defined custom learned route + IP address ranges for a BGP session. + custom_learned_route_priority (int): + The user-defined custom learned route priority for a BGP + session. This value is applied to all custom learned route + ranges for the session. You can choose a value from ``0`` to + ``65335``. If you don't provide a value, Google Cloud + assigns a priority of ``100`` to the ranges. + + This field is a member of `oneof`_ ``_custom_learned_route_priority``. + enable (str): + The status of the BGP peer connection. If set + to FALSE, any active session with the peer is + terminated and all associated routing + information is removed. If set to TRUE, the peer + connection can be established with routing + information. The default is TRUE. Check the + Enable enum for the list of possible values. + + This field is a member of `oneof`_ ``_enable``. + enable_ipv6 (bool): + Enable IPv6 traffic over BGP Peer. If not + specified, it is disabled by default. + + This field is a member of `oneof`_ ``_enable_ipv6``. + interface_name (str): + Name of the interface the BGP peer is + associated with. + + This field is a member of `oneof`_ ``_interface_name``. + ip_address (str): + IP address of the interface inside Google + Cloud Platform. Only IPv4 is supported. + + This field is a member of `oneof`_ ``_ip_address``. + ipv6_nexthop_address (str): + IPv6 address of the interface inside Google + Cloud Platform. + + This field is a member of `oneof`_ ``_ipv6_nexthop_address``. + management_type (str): + [Output Only] The resource that configures and manages this + BGP peer. - MANAGED_BY_USER is the default value and can be + managed by you or other users - MANAGED_BY_ATTACHMENT is a + BGP peer that is configured and managed by Cloud + Interconnect, specifically by an InterconnectAttachment of + type PARTNER. Google automatically creates, updates, and + deletes this type of BGP peer when the PARTNER + InterconnectAttachment is created, updated, or deleted. + Check the ManagementType enum for the list of possible + values. + + This field is a member of `oneof`_ ``_management_type``. + md5_authentication_key_name (str): + Present if MD5 authentication is enabled for the peering. + Must be the name of one of the entries in the + Router.md5_authentication_keys. The field must comply with + RFC1035. + + This field is a member of `oneof`_ ``_md5_authentication_key_name``. + name (str): + Name of this BGP peer. The name must be 1-63 characters + long, and comply with RFC1035. Specifically, the name must + be 1-63 characters long and match the regular expression + ``[a-z]([-a-z0-9]*[a-z0-9])?`` which means the first + character must be a lowercase letter, and all following + characters must be a dash, lowercase letter, or digit, + except the last character, which cannot be a dash. + + This field is a member of `oneof`_ ``_name``. + peer_asn (int): + Peer BGP Autonomous System Number (ASN). Each + BGP interface may use a different value. + + This field is a member of `oneof`_ ``_peer_asn``. + peer_ip_address (str): + IP address of the BGP interface outside + Google Cloud Platform. Only IPv4 is supported. + + This field is a member of `oneof`_ ``_peer_ip_address``. + peer_ipv6_nexthop_address (str): + IPv6 address of the BGP interface outside + Google Cloud Platform. + + This field is a member of `oneof`_ ``_peer_ipv6_nexthop_address``. + router_appliance_instance (str): + URI of the VM instance that is used as + third-party router appliances such as Next Gen + Firewalls, Virtual Routers, or Router + Appliances. The VM instance must be located in + zones contained in the same region as this Cloud + Router. The VM instance is the peer side of the + BGP session. + + This field is a member of `oneof`_ ``_router_appliance_instance``. + """ + class AdvertiseMode(proto.Enum): + r"""User-specified flag to indicate which mode to use for + advertisement. + + Values: + UNDEFINED_ADVERTISE_MODE (0): + A value indicating that the enum field is not + set. + CUSTOM (388595569): + No description available. + DEFAULT (115302945): + No description available. + """ + UNDEFINED_ADVERTISE_MODE = 0 + CUSTOM = 388595569 + DEFAULT = 115302945 + + class AdvertisedGroups(proto.Enum): + r""" + + Values: + UNDEFINED_ADVERTISED_GROUPS (0): + A value indicating that the enum field is not + set. + ALL_SUBNETS (3622872): + Advertise all available subnets (including + peer VPC subnets). + """ + UNDEFINED_ADVERTISED_GROUPS = 0 + ALL_SUBNETS = 3622872 + + class Enable(proto.Enum): + r"""The status of the BGP peer connection. If set to FALSE, any + active session with the peer is terminated and all associated + routing information is removed. If set to TRUE, the peer + connection can be established with routing information. The + default is TRUE. + + Values: + UNDEFINED_ENABLE (0): + A value indicating that the enum field is not + set. + FALSE (66658563): + No description available. + TRUE (2583950): + No description available. + """ + UNDEFINED_ENABLE = 0 + FALSE = 66658563 + TRUE = 2583950 + + class ManagementType(proto.Enum): + r"""[Output Only] The resource that configures and manages this BGP + peer. - MANAGED_BY_USER is the default value and can be managed by + you or other users - MANAGED_BY_ATTACHMENT is a BGP peer that is + configured and managed by Cloud Interconnect, specifically by an + InterconnectAttachment of type PARTNER. Google automatically + creates, updates, and deletes this type of BGP peer when the PARTNER + InterconnectAttachment is created, updated, or deleted. + + Values: + UNDEFINED_MANAGEMENT_TYPE (0): + A value indicating that the enum field is not + set. + MANAGED_BY_ATTACHMENT (458926411): + The BGP peer is automatically created for + PARTNER type InterconnectAttachment; Google will + automatically create/delete this BGP peer when + the PARTNER InterconnectAttachment is + created/deleted, and Google will update the + ipAddress and peerIpAddress when the PARTNER + InterconnectAttachment is provisioned. This type + of BGP peer cannot be created or deleted, but + can be modified for all fields except for name, + ipAddress and peerIpAddress. + MANAGED_BY_USER (317294067): + Default value, the BGP peer is manually + created and managed by user. + """ + UNDEFINED_MANAGEMENT_TYPE = 0 + MANAGED_BY_ATTACHMENT = 458926411 + MANAGED_BY_USER = 317294067 + + advertise_mode: str = proto.Field( + proto.STRING, + number=312134331, + optional=True, + ) + advertised_groups: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=21065526, + ) + advertised_ip_ranges: MutableSequence['RouterAdvertisedIpRange'] = proto.RepeatedField( + proto.MESSAGE, + number=35449932, + message='RouterAdvertisedIpRange', + ) + advertised_route_priority: int = proto.Field( + proto.UINT32, + number=186486332, + optional=True, + ) + bfd: 'RouterBgpPeerBfd' = proto.Field( + proto.MESSAGE, + number=97440, + optional=True, + message='RouterBgpPeerBfd', + ) + custom_learned_ip_ranges: MutableSequence['RouterBgpPeerCustomLearnedIpRange'] = proto.RepeatedField( + proto.MESSAGE, + number=481363012, + message='RouterBgpPeerCustomLearnedIpRange', + ) + custom_learned_route_priority: int = proto.Field( + proto.INT32, + number=330412356, + optional=True, + ) + enable: str = proto.Field( + proto.STRING, + number=311764355, + optional=True, + ) + enable_ipv6: bool = proto.Field( + proto.BOOL, + number=181467939, + optional=True, + ) + interface_name: str = proto.Field( + proto.STRING, + number=437854673, + optional=True, + ) + ip_address: str = proto.Field( + proto.STRING, + number=406272220, + optional=True, + ) + ipv6_nexthop_address: str = proto.Field( + proto.STRING, + number=27968211, + optional=True, + ) + management_type: str = proto.Field( + proto.STRING, + number=173703606, + optional=True, + ) + md5_authentication_key_name: str = proto.Field( + proto.STRING, + number=281075345, + optional=True, + ) + name: str = proto.Field( + proto.STRING, + number=3373707, + optional=True, + ) + peer_asn: int = proto.Field( + proto.UINT32, + number=69573151, + optional=True, + ) + peer_ip_address: str = proto.Field( + proto.STRING, + number=207735769, + optional=True, + ) + peer_ipv6_nexthop_address: str = proto.Field( + proto.STRING, + number=491486608, + optional=True, + ) + router_appliance_instance: str = proto.Field( + proto.STRING, + number=468312989, + optional=True, + ) + + +class RouterBgpPeerBfd(proto.Message): + r""" + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + min_receive_interval (int): + The minimum interval, in milliseconds, + between BFD control packets received from the + peer router. The actual value is negotiated + between the two routers and is equal to the + greater of this value and the transmit interval + of the other router. If set, this value must be + between 1000 and 30000. The default is 1000. + + This field is a member of `oneof`_ ``_min_receive_interval``. + min_transmit_interval (int): + The minimum interval, in milliseconds, + between BFD control packets transmitted to the + peer router. The actual value is negotiated + between the two routers and is equal to the + greater of this value and the corresponding + receive interval of the other router. If set, + this value must be between 1000 and 30000. The + default is 1000. + + This field is a member of `oneof`_ ``_min_transmit_interval``. + multiplier (int): + The number of consecutive BFD packets that + must be missed before BFD declares that a peer + is unavailable. If set, the value must be a + value between 5 and 16. The default is 5. + + This field is a member of `oneof`_ ``_multiplier``. + session_initialization_mode (str): + The BFD session initialization mode for this + BGP peer. If set to ACTIVE, the Cloud Router + will initiate the BFD session for this BGP peer. + If set to PASSIVE, the Cloud Router will wait + for the peer router to initiate the BFD session + for this BGP peer. If set to DISABLED, BFD is + disabled for this BGP peer. The default is + DISABLED. Check the SessionInitializationMode + enum for the list of possible values. + + This field is a member of `oneof`_ ``_session_initialization_mode``. + """ + class SessionInitializationMode(proto.Enum): + r"""The BFD session initialization mode for this BGP peer. If set + to ACTIVE, the Cloud Router will initiate the BFD session for + this BGP peer. If set to PASSIVE, the Cloud Router will wait for + the peer router to initiate the BFD session for this BGP peer. + If set to DISABLED, BFD is disabled for this BGP peer. The + default is DISABLED. + + Values: + UNDEFINED_SESSION_INITIALIZATION_MODE (0): + A value indicating that the enum field is not + set. + ACTIVE (314733318): + No description available. + DISABLED (516696700): + No description available. + PASSIVE (462813959): + No description available. + """ + UNDEFINED_SESSION_INITIALIZATION_MODE = 0 + ACTIVE = 314733318 + DISABLED = 516696700 + PASSIVE = 462813959 + + min_receive_interval: int = proto.Field( + proto.UINT32, + number=186981614, + optional=True, + ) + min_transmit_interval: int = proto.Field( + proto.UINT32, + number=523282631, + optional=True, + ) + multiplier: int = proto.Field( + proto.UINT32, + number=191331777, + optional=True, + ) + session_initialization_mode: str = proto.Field( + proto.STRING, + number=105957049, + optional=True, + ) + + +class RouterBgpPeerCustomLearnedIpRange(proto.Message): + r""" + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + range_ (str): + The custom learned route IP address range. Must be a valid + CIDR-formatted prefix. If an IP address is provided without + a subnet mask, it is interpreted as, for IPv4, a ``/32`` + singular IP address range, and, for IPv6, ``/128``. + + This field is a member of `oneof`_ ``_range``. + """ + + range_: str = proto.Field( + proto.STRING, + number=108280125, + optional=True, + ) + + +class RouterInterface(proto.Message): + r""" + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + ip_range (str): + IP address and range of the interface. The IP + range must be in the RFC3927 link-local IP + address space. The value must be a + CIDR-formatted string, for example: + 169.254.0.1/30. NOTE: Do not truncate the + address as it represents the IP address of the + interface. + + This field is a member of `oneof`_ ``_ip_range``. + linked_interconnect_attachment (str): + URI of the linked Interconnect attachment. It + must be in the same region as the router. Each + interface can have one linked resource, which + can be a VPN tunnel, an Interconnect attachment, + or a virtual machine instance. + + This field is a member of `oneof`_ ``_linked_interconnect_attachment``. + linked_vpn_tunnel (str): + URI of the linked VPN tunnel, which must be + in the same region as the router. Each interface + can have one linked resource, which can be a VPN + tunnel, an Interconnect attachment, or a virtual + machine instance. + + This field is a member of `oneof`_ ``_linked_vpn_tunnel``. + management_type (str): + [Output Only] The resource that configures and manages this + interface. - MANAGED_BY_USER is the default value and can be + managed directly by users. - MANAGED_BY_ATTACHMENT is an + interface that is configured and managed by Cloud + Interconnect, specifically, by an InterconnectAttachment of + type PARTNER. Google automatically creates, updates, and + deletes this type of interface when the PARTNER + InterconnectAttachment is created, updated, or deleted. + Check the ManagementType enum for the list of possible + values. + + This field is a member of `oneof`_ ``_management_type``. + name (str): + Name of this interface entry. The name must be 1-63 + characters long, and comply with RFC1035. Specifically, the + name must be 1-63 characters long and match the regular + expression ``[a-z]([-a-z0-9]*[a-z0-9])?`` which means the + first character must be a lowercase letter, and all + following characters must be a dash, lowercase letter, or + digit, except the last character, which cannot be a dash. + + This field is a member of `oneof`_ ``_name``. + private_ip_address (str): + The regional private internal IP address that + is used to establish BGP sessions to a VM + instance acting as a third-party Router + Appliance, such as a Next Gen Firewall, a + Virtual Router, or an SD-WAN VM. + + This field is a member of `oneof`_ ``_private_ip_address``. + redundant_interface (str): + Name of the interface that will be redundant with the + current interface you are creating. The redundantInterface + must belong to the same Cloud Router as the interface here. + To establish the BGP session to a Router Appliance VM, you + must create two BGP peers. The two BGP peers must be + attached to two separate interfaces that are redundant with + each other. The redundant_interface must be 1-63 characters + long, and comply with RFC1035. Specifically, the + redundant_interface must be 1-63 characters long and match + the regular expression ``[a-z]([-a-z0-9]*[a-z0-9])?`` which + means the first character must be a lowercase letter, and + all following characters must be a dash, lowercase letter, + or digit, except the last character, which cannot be a dash. + + This field is a member of `oneof`_ ``_redundant_interface``. + subnetwork (str): + The URI of the subnetwork resource that this + interface belongs to, which must be in the same + region as the Cloud Router. When you establish a + BGP session to a VM instance using this + interface, the VM instance must belong to the + same subnetwork as the subnetwork specified + here. + + This field is a member of `oneof`_ ``_subnetwork``. + """ + class ManagementType(proto.Enum): + r"""[Output Only] The resource that configures and manages this + interface. - MANAGED_BY_USER is the default value and can be managed + directly by users. - MANAGED_BY_ATTACHMENT is an interface that is + configured and managed by Cloud Interconnect, specifically, by an + InterconnectAttachment of type PARTNER. Google automatically + creates, updates, and deletes this type of interface when the + PARTNER InterconnectAttachment is created, updated, or deleted. + + Values: + UNDEFINED_MANAGEMENT_TYPE (0): + A value indicating that the enum field is not + set. + MANAGED_BY_ATTACHMENT (458926411): + The interface is automatically created for + PARTNER type InterconnectAttachment, Google will + automatically create/update/delete this + interface when the PARTNER + InterconnectAttachment is + created/provisioned/deleted. This type of + interface cannot be manually managed by user. + MANAGED_BY_USER (317294067): + Default value, the interface is manually + created and managed by user. + """ + UNDEFINED_MANAGEMENT_TYPE = 0 + MANAGED_BY_ATTACHMENT = 458926411 + MANAGED_BY_USER = 317294067 + + ip_range: str = proto.Field( + proto.STRING, + number=145092645, + optional=True, + ) + linked_interconnect_attachment: str = proto.Field( + proto.STRING, + number=501085518, + optional=True, + ) + linked_vpn_tunnel: str = proto.Field( + proto.STRING, + number=352296953, + optional=True, + ) + management_type: str = proto.Field( + proto.STRING, + number=173703606, + optional=True, + ) + name: str = proto.Field( + proto.STRING, + number=3373707, + optional=True, + ) + private_ip_address: str = proto.Field( + proto.STRING, + number=100854040, + optional=True, + ) + redundant_interface: str = proto.Field( + proto.STRING, + number=523187303, + optional=True, + ) + subnetwork: str = proto.Field( + proto.STRING, + number=307827694, + optional=True, + ) + + +class RouterList(proto.Message): + r"""Contains a list of Router resources. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + id (str): + [Output Only] Unique identifier for the resource; defined by + the server. + + This field is a member of `oneof`_ ``_id``. + items (MutableSequence[google.cloud.compute_v1.types.Router]): + A list of Router resources. + kind (str): + [Output Only] Type of resource. Always compute#router for + routers. + + This field is a member of `oneof`_ ``_kind``. + next_page_token (str): + [Output Only] This token allows you to get the next page of + results for list requests. If the number of results is + larger than maxResults, use the nextPageToken as a value for + the query parameter pageToken in the next list request. + Subsequent list requests will have their own nextPageToken + to continue paging through the results. + + This field is a member of `oneof`_ ``_next_page_token``. + self_link (str): + [Output Only] Server-defined URL for this resource. + + This field is a member of `oneof`_ ``_self_link``. + warning (google.cloud.compute_v1.types.Warning): + [Output Only] Informational warning message. + + This field is a member of `oneof`_ ``_warning``. + """ + + @property + def raw_page(self): + return self + + id: str = proto.Field( + proto.STRING, + number=3355, + optional=True, + ) + items: MutableSequence['Router'] = proto.RepeatedField( + proto.MESSAGE, + number=100526016, + message='Router', + ) + kind: str = proto.Field( + proto.STRING, + number=3292052, + optional=True, + ) + next_page_token: str = proto.Field( + proto.STRING, + number=79797525, + optional=True, + ) + self_link: str = proto.Field( + proto.STRING, + number=456214797, + optional=True, + ) + warning: 'Warning' = proto.Field( + proto.MESSAGE, + number=50704284, + optional=True, + message='Warning', + ) + + +class RouterMd5AuthenticationKey(proto.Message): + r""" + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + key (str): + [Input only] Value of the key. For patch and update calls, + it can be skipped to copy the value from the previous + configuration. This is allowed if the key with the same name + existed before the operation. Maximum length is 80 + characters. Can only contain printable ASCII characters. + + This field is a member of `oneof`_ ``_key``. + name (str): + Name used to identify the key. Must be unique + within a router. Must be referenced by at least + one bgpPeer. Must comply with RFC1035. + + This field is a member of `oneof`_ ``_name``. + """ + + key: str = proto.Field( + proto.STRING, + number=106079, + optional=True, + ) + name: str = proto.Field( + proto.STRING, + number=3373707, + optional=True, + ) + + +class RouterNat(proto.Message): + r"""Represents a Nat resource. It enables the VMs within the + specified subnetworks to access Internet without external IP + addresses. It specifies a list of subnetworks (and the ranges + within) that want to use NAT. Customers can also provide the + external IPs that would be used for NAT. GCP would auto-allocate + ephemeral IPs if no external IPs are provided. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + auto_network_tier (str): + The network tier to use when automatically + reserving IP addresses. Must be one of: PREMIUM, + STANDARD. If not specified, PREMIUM tier will be + used. Check the AutoNetworkTier enum for the + list of possible values. + + This field is a member of `oneof`_ ``_auto_network_tier``. + drain_nat_ips (MutableSequence[str]): + A list of URLs of the IP resources to be + drained. These IPs must be valid static external + IPs that have been assigned to the NAT. These + IPs should be used for updating/patching a NAT + only. + enable_dynamic_port_allocation (bool): + Enable Dynamic Port Allocation. If not + specified, it is disabled by default. If set to + true, - Dynamic Port Allocation will be enabled + on this NAT config. - + enableEndpointIndependentMapping cannot be set + to true. - If minPorts is set, minPortsPerVm + must be set to a power of two greater than or + equal to 32. If minPortsPerVm is not set, a + minimum of 32 ports will be allocated to a VM + from this NAT config. + + This field is a member of `oneof`_ ``_enable_dynamic_port_allocation``. + enable_endpoint_independent_mapping (bool): + + This field is a member of `oneof`_ ``_enable_endpoint_independent_mapping``. + endpoint_types (MutableSequence[str]): + List of NAT-ted endpoint types supported by the Nat Gateway. + If the list is empty, then it will be equivalent to include + ENDPOINT_TYPE_VM Check the EndpointTypes enum for the list + of possible values. + icmp_idle_timeout_sec (int): + Timeout (in seconds) for ICMP connections. + Defaults to 30s if not set. + + This field is a member of `oneof`_ ``_icmp_idle_timeout_sec``. + log_config (google.cloud.compute_v1.types.RouterNatLogConfig): + Configure logging on this NAT. + + This field is a member of `oneof`_ ``_log_config``. + max_ports_per_vm (int): + Maximum number of ports allocated to a VM + from this NAT config when Dynamic Port + Allocation is enabled. If Dynamic Port + Allocation is not enabled, this field has no + effect. If Dynamic Port Allocation is enabled, + and this field is set, it must be set to a power + of two greater than minPortsPerVm, or 64 if + minPortsPerVm is not set. If Dynamic Port + Allocation is enabled and this field is not set, + a maximum of 65536 ports will be allocated to a + VM from this NAT config. + + This field is a member of `oneof`_ ``_max_ports_per_vm``. + min_ports_per_vm (int): + Minimum number of ports allocated to a VM + from this NAT config. If not set, a default + number of ports is allocated to a VM. This is + rounded up to the nearest power of 2. For + example, if the value of this field is 50, at + least 64 ports are allocated to a VM. + + This field is a member of `oneof`_ ``_min_ports_per_vm``. + name (str): + Unique name of this Nat service. The name + must be 1-63 characters long and comply with + RFC1035. + + This field is a member of `oneof`_ ``_name``. + nat_ip_allocate_option (str): + Specify the NatIpAllocateOption, which can take one of the + following values: - MANUAL_ONLY: Uses only Nat IP addresses + provided by customers. When there are not enough specified + Nat IPs, the Nat service fails for new VMs. - AUTO_ONLY: Nat + IPs are allocated by Google Cloud Platform; customers can't + specify any Nat IPs. When choosing AUTO_ONLY, then nat_ip + should be empty. Check the NatIpAllocateOption enum for the + list of possible values. + + This field is a member of `oneof`_ ``_nat_ip_allocate_option``. + nat_ips (MutableSequence[str]): + A list of URLs of the IP resources used for + this Nat service. These IP addresses must be + valid static external IP addresses assigned to + the project. + rules (MutableSequence[google.cloud.compute_v1.types.RouterNatRule]): + A list of rules associated with this NAT. + source_subnetwork_ip_ranges_to_nat (str): + Specify the Nat option, which can take one of the following + values: - ALL_SUBNETWORKS_ALL_IP_RANGES: All of the IP + ranges in every Subnetwork are allowed to Nat. - + ALL_SUBNETWORKS_ALL_PRIMARY_IP_RANGES: All of the primary IP + ranges in every Subnetwork are allowed to Nat. - + LIST_OF_SUBNETWORKS: A list of Subnetworks are allowed to + Nat (specified in the field subnetwork below) The default is + SUBNETWORK_IP_RANGE_TO_NAT_OPTION_UNSPECIFIED. Note that if + this field contains ALL_SUBNETWORKS_ALL_IP_RANGES then there + should not be any other Router.Nat section in any Router for + this network in this region. Check the + SourceSubnetworkIpRangesToNat enum for the list of possible + values. + + This field is a member of `oneof`_ ``_source_subnetwork_ip_ranges_to_nat``. + subnetworks (MutableSequence[google.cloud.compute_v1.types.RouterNatSubnetworkToNat]): + A list of Subnetwork resources whose traffic should be + translated by NAT Gateway. It is used only when + LIST_OF_SUBNETWORKS is selected for the + SubnetworkIpRangeToNatOption above. + tcp_established_idle_timeout_sec (int): + Timeout (in seconds) for TCP established + connections. Defaults to 1200s if not set. + + This field is a member of `oneof`_ ``_tcp_established_idle_timeout_sec``. + tcp_time_wait_timeout_sec (int): + Timeout (in seconds) for TCP connections that are in + TIME_WAIT state. Defaults to 120s if not set. + + This field is a member of `oneof`_ ``_tcp_time_wait_timeout_sec``. + tcp_transitory_idle_timeout_sec (int): + Timeout (in seconds) for TCP transitory + connections. Defaults to 30s if not set. + + This field is a member of `oneof`_ ``_tcp_transitory_idle_timeout_sec``. + udp_idle_timeout_sec (int): + Timeout (in seconds) for UDP connections. + Defaults to 30s if not set. + + This field is a member of `oneof`_ ``_udp_idle_timeout_sec``. + """ + class AutoNetworkTier(proto.Enum): + r"""The network tier to use when automatically reserving IP + addresses. Must be one of: PREMIUM, STANDARD. If not specified, + PREMIUM tier will be used. + + Values: + UNDEFINED_AUTO_NETWORK_TIER (0): + A value indicating that the enum field is not + set. + FIXED_STANDARD (310464328): + Public internet quality with fixed bandwidth. + PREMIUM (399530551): + High quality, Google-grade network tier, + support for all networking products. + STANDARD (484642493): + Public internet quality, only limited support + for other networking products. + STANDARD_OVERRIDES_FIXED_STANDARD (465847234): + (Output only) Temporary tier for FIXED_STANDARD when fixed + standard tier is expired or not configured. + """ + UNDEFINED_AUTO_NETWORK_TIER = 0 + FIXED_STANDARD = 310464328 + PREMIUM = 399530551 + STANDARD = 484642493 + STANDARD_OVERRIDES_FIXED_STANDARD = 465847234 + + class EndpointTypes(proto.Enum): + r""" + + Values: + UNDEFINED_ENDPOINT_TYPES (0): + A value indicating that the enum field is not + set. + ENDPOINT_TYPE_SWG (159344456): + This is used for Secure Web Gateway + endpoints. + ENDPOINT_TYPE_VM (57095474): + This is the default. + """ + UNDEFINED_ENDPOINT_TYPES = 0 + ENDPOINT_TYPE_SWG = 159344456 + ENDPOINT_TYPE_VM = 57095474 + + class NatIpAllocateOption(proto.Enum): + r"""Specify the NatIpAllocateOption, which can take one of the following + values: - MANUAL_ONLY: Uses only Nat IP addresses provided by + customers. When there are not enough specified Nat IPs, the Nat + service fails for new VMs. - AUTO_ONLY: Nat IPs are allocated by + Google Cloud Platform; customers can't specify any Nat IPs. When + choosing AUTO_ONLY, then nat_ip should be empty. + + Values: + UNDEFINED_NAT_IP_ALLOCATE_OPTION (0): + A value indicating that the enum field is not + set. + AUTO_ONLY (182333500): + Nat IPs are allocated by GCP; customers can + not specify any Nat IPs. + MANUAL_ONLY (261251205): + Only use Nat IPs provided by customers. When + specified Nat IPs are not enough then the Nat + service fails for new VMs. + """ + UNDEFINED_NAT_IP_ALLOCATE_OPTION = 0 + AUTO_ONLY = 182333500 + MANUAL_ONLY = 261251205 + + class SourceSubnetworkIpRangesToNat(proto.Enum): + r"""Specify the Nat option, which can take one of the following values: + - ALL_SUBNETWORKS_ALL_IP_RANGES: All of the IP ranges in every + Subnetwork are allowed to Nat. - + ALL_SUBNETWORKS_ALL_PRIMARY_IP_RANGES: All of the primary IP ranges + in every Subnetwork are allowed to Nat. - LIST_OF_SUBNETWORKS: A + list of Subnetworks are allowed to Nat (specified in the field + subnetwork below) The default is + SUBNETWORK_IP_RANGE_TO_NAT_OPTION_UNSPECIFIED. Note that if this + field contains ALL_SUBNETWORKS_ALL_IP_RANGES then there should not + be any other Router.Nat section in any Router for this network in + this region. + + Values: + UNDEFINED_SOURCE_SUBNETWORK_IP_RANGES_TO_NAT (0): + A value indicating that the enum field is not + set. + ALL_SUBNETWORKS_ALL_IP_RANGES (179964376): + All the IP ranges in every Subnetwork are + allowed to Nat. + ALL_SUBNETWORKS_ALL_PRIMARY_IP_RANGES (185573819): + All the primary IP ranges in every Subnetwork + are allowed to Nat. + LIST_OF_SUBNETWORKS (517542270): + A list of Subnetworks are allowed to Nat + (specified in the field subnetwork below) + """ + UNDEFINED_SOURCE_SUBNETWORK_IP_RANGES_TO_NAT = 0 + ALL_SUBNETWORKS_ALL_IP_RANGES = 179964376 + ALL_SUBNETWORKS_ALL_PRIMARY_IP_RANGES = 185573819 + LIST_OF_SUBNETWORKS = 517542270 + + auto_network_tier: str = proto.Field( + proto.STRING, + number=269770211, + optional=True, + ) + drain_nat_ips: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=504078535, + ) + enable_dynamic_port_allocation: bool = proto.Field( + proto.BOOL, + number=532106402, + optional=True, + ) + enable_endpoint_independent_mapping: bool = proto.Field( + proto.BOOL, + number=259441819, + optional=True, + ) + endpoint_types: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=502633807, + ) + icmp_idle_timeout_sec: int = proto.Field( + proto.INT32, + number=3647562, + optional=True, + ) + log_config: 'RouterNatLogConfig' = proto.Field( + proto.MESSAGE, + number=351299741, + optional=True, + message='RouterNatLogConfig', + ) + max_ports_per_vm: int = proto.Field( + proto.INT32, + number=250062049, + optional=True, + ) + min_ports_per_vm: int = proto.Field( + proto.INT32, + number=186193587, + optional=True, + ) + name: str = proto.Field( + proto.STRING, + number=3373707, + optional=True, + ) + nat_ip_allocate_option: str = proto.Field( + proto.STRING, + number=429726845, + optional=True, + ) + nat_ips: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=117635086, + ) + rules: MutableSequence['RouterNatRule'] = proto.RepeatedField( + proto.MESSAGE, + number=108873975, + message='RouterNatRule', + ) + source_subnetwork_ip_ranges_to_nat: str = proto.Field( + proto.STRING, + number=252213211, + optional=True, + ) + subnetworks: MutableSequence['RouterNatSubnetworkToNat'] = proto.RepeatedField( + proto.MESSAGE, + number=415853125, + message='RouterNatSubnetworkToNat', + ) + tcp_established_idle_timeout_sec: int = proto.Field( + proto.INT32, + number=223098349, + optional=True, + ) + tcp_time_wait_timeout_sec: int = proto.Field( + proto.INT32, + number=513596925, + optional=True, + ) + tcp_transitory_idle_timeout_sec: int = proto.Field( + proto.INT32, + number=205028774, + optional=True, + ) + udp_idle_timeout_sec: int = proto.Field( + proto.INT32, + number=64919878, + optional=True, + ) + + +class RouterNatLogConfig(proto.Message): + r"""Configuration of logging on a NAT. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + enable (bool): + Indicates whether or not to export logs. This + is false by default. + + This field is a member of `oneof`_ ``_enable``. + filter (str): + Specify the desired filtering of logs on this NAT. If + unspecified, logs are exported for all connections handled + by this NAT. This option can take one of the following + values: - ERRORS_ONLY: Export logs only for connection + failures. - TRANSLATIONS_ONLY: Export logs only for + successful connections. - ALL: Export logs for all + connections, successful and unsuccessful. Check the Filter + enum for the list of possible values. + + This field is a member of `oneof`_ ``_filter``. + """ + class Filter(proto.Enum): + r"""Specify the desired filtering of logs on this NAT. If unspecified, + logs are exported for all connections handled by this NAT. This + option can take one of the following values: - ERRORS_ONLY: Export + logs only for connection failures. - TRANSLATIONS_ONLY: Export logs + only for successful connections. - ALL: Export logs for all + connections, successful and unsuccessful. + + Values: + UNDEFINED_FILTER (0): + A value indicating that the enum field is not + set. + ALL (64897): + Export logs for all (successful and + unsuccessful) connections. + ERRORS_ONLY (307484672): + Export logs for connection failures only. + TRANSLATIONS_ONLY (357212649): + Export logs for successful connections only. + """ + UNDEFINED_FILTER = 0 + ALL = 64897 + ERRORS_ONLY = 307484672 + TRANSLATIONS_ONLY = 357212649 + + enable: bool = proto.Field( + proto.BOOL, + number=311764355, + optional=True, + ) + filter: str = proto.Field( + proto.STRING, + number=336120696, + optional=True, + ) + + +class RouterNatRule(proto.Message): + r""" + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + action (google.cloud.compute_v1.types.RouterNatRuleAction): + The action to be enforced for traffic that + matches this rule. + + This field is a member of `oneof`_ ``_action``. + description (str): + An optional description of this rule. + + This field is a member of `oneof`_ ``_description``. + match (str): + CEL expression that specifies the match condition that + egress traffic from a VM is evaluated against. If it + evaluates to true, the corresponding ``action`` is enforced. + The following examples are valid match expressions for + public NAT: "inIpRange(destination.ip, '1.1.0.0/16') \|\| + inIpRange(destination.ip, '2.2.0.0/16')" "destination.ip == + '1.1.0.1' \|\| destination.ip == '8.8.8.8'" The following + example is a valid match expression for private NAT: + "nexthop.hub == + 'https://networkconnectivity.googleapis.com/v1alpha1/projects/my-project/global/hub/hub-1'". + + This field is a member of `oneof`_ ``_match``. + rule_number (int): + An integer uniquely identifying a rule in the + list. The rule number must be a positive value + between 0 and 65000, and must be unique among + rules within a NAT. + + This field is a member of `oneof`_ ``_rule_number``. + """ + + action: 'RouterNatRuleAction' = proto.Field( + proto.MESSAGE, + number=187661878, + optional=True, + message='RouterNatRuleAction', + ) + description: str = proto.Field( + proto.STRING, + number=422937596, + optional=True, + ) + match: str = proto.Field( + proto.STRING, + number=103668165, + optional=True, + ) + rule_number: int = proto.Field( + proto.UINT32, + number=535211500, + optional=True, + ) + + +class RouterNatRuleAction(proto.Message): + r""" + + Attributes: + source_nat_active_ips (MutableSequence[str]): + A list of URLs of the IP resources used for + this NAT rule. These IP addresses must be valid + static external IP addresses assigned to the + project. This field is used for public NAT. + source_nat_drain_ips (MutableSequence[str]): + A list of URLs of the IP resources to be + drained. These IPs must be valid static external + IPs that have been assigned to the NAT. These + IPs should be used for updating/patching a NAT + rule only. This field is used for public NAT. + """ + + source_nat_active_ips: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=210378229, + ) + source_nat_drain_ips: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=340812451, + ) + + +class RouterNatSubnetworkToNat(proto.Message): + r"""Defines the IP ranges that want to use NAT for a subnetwork. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + name (str): + URL for the subnetwork resource that will use + NAT. + + This field is a member of `oneof`_ ``_name``. + secondary_ip_range_names (MutableSequence[str]): + A list of the secondary ranges of the Subnetwork that are + allowed to use NAT. This can be populated only if + "LIST_OF_SECONDARY_IP_RANGES" is one of the values in + source_ip_ranges_to_nat. + source_ip_ranges_to_nat (MutableSequence[str]): + Specify the options for NAT ranges in the Subnetwork. All + options of a single value are valid except + NAT_IP_RANGE_OPTION_UNSPECIFIED. The only valid option with + multiple values is: ["PRIMARY_IP_RANGE", + "LIST_OF_SECONDARY_IP_RANGES"] Default: [ALL_IP_RANGES] + Check the SourceIpRangesToNat enum for the list of possible + values. + """ + class SourceIpRangesToNat(proto.Enum): + r""" + + Values: + UNDEFINED_SOURCE_IP_RANGES_TO_NAT (0): + A value indicating that the enum field is not + set. + ALL_IP_RANGES (35608496): + The primary and all the secondary ranges are + allowed to Nat. + LIST_OF_SECONDARY_IP_RANGES (192289308): + A list of secondary ranges are allowed to + Nat. + PRIMARY_IP_RANGE (297109954): + The primary range is allowed to Nat. + """ + UNDEFINED_SOURCE_IP_RANGES_TO_NAT = 0 + ALL_IP_RANGES = 35608496 + LIST_OF_SECONDARY_IP_RANGES = 192289308 + PRIMARY_IP_RANGE = 297109954 + + name: str = proto.Field( + proto.STRING, + number=3373707, + optional=True, + ) + secondary_ip_range_names: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=264315097, + ) + source_ip_ranges_to_nat: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=388310386, + ) + + +class RouterStatus(proto.Message): + r""" + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + best_routes (MutableSequence[google.cloud.compute_v1.types.Route]): + Best routes for this router's network. + best_routes_for_router (MutableSequence[google.cloud.compute_v1.types.Route]): + Best routes learned by this router. + bgp_peer_status (MutableSequence[google.cloud.compute_v1.types.RouterStatusBgpPeerStatus]): + + nat_status (MutableSequence[google.cloud.compute_v1.types.RouterStatusNatStatus]): + + network (str): + URI of the network to which this router + belongs. + + This field is a member of `oneof`_ ``_network``. + """ + + best_routes: MutableSequence['Route'] = proto.RepeatedField( + proto.MESSAGE, + number=395826693, + message='Route', + ) + best_routes_for_router: MutableSequence['Route'] = proto.RepeatedField( + proto.MESSAGE, + number=119389689, + message='Route', + ) + bgp_peer_status: MutableSequence['RouterStatusBgpPeerStatus'] = proto.RepeatedField( + proto.MESSAGE, + number=218459131, + message='RouterStatusBgpPeerStatus', + ) + nat_status: MutableSequence['RouterStatusNatStatus'] = proto.RepeatedField( + proto.MESSAGE, + number=63098064, + message='RouterStatusNatStatus', + ) + network: str = proto.Field( + proto.STRING, + number=232872494, + optional=True, + ) + + +class RouterStatusBgpPeerStatus(proto.Message): + r""" + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + advertised_routes (MutableSequence[google.cloud.compute_v1.types.Route]): + Routes that were advertised to the remote BGP + peer + bfd_status (google.cloud.compute_v1.types.BfdStatus): + + This field is a member of `oneof`_ ``_bfd_status``. + enable_ipv6 (bool): + Enable IPv6 traffic over BGP Peer. If not + specified, it is disabled by default. + + This field is a member of `oneof`_ ``_enable_ipv6``. + ip_address (str): + IP address of the local BGP interface. + + This field is a member of `oneof`_ ``_ip_address``. + ipv6_nexthop_address (str): + IPv6 address of the local BGP interface. + + This field is a member of `oneof`_ ``_ipv6_nexthop_address``. + linked_vpn_tunnel (str): + URL of the VPN tunnel that this BGP peer + controls. + + This field is a member of `oneof`_ ``_linked_vpn_tunnel``. + md5_auth_enabled (bool): + Informs whether MD5 authentication is enabled + on this BGP peer. + + This field is a member of `oneof`_ ``_md5_auth_enabled``. + name (str): + Name of this BGP peer. Unique within the + Routers resource. + + This field is a member of `oneof`_ ``_name``. + num_learned_routes (int): + Number of routes learned from the remote BGP + Peer. + + This field is a member of `oneof`_ ``_num_learned_routes``. + peer_ip_address (str): + IP address of the remote BGP interface. + + This field is a member of `oneof`_ ``_peer_ip_address``. + peer_ipv6_nexthop_address (str): + IPv6 address of the remote BGP interface. + + This field is a member of `oneof`_ ``_peer_ipv6_nexthop_address``. + router_appliance_instance (str): + [Output only] URI of the VM instance that is used as + third-party router appliances such as Next Gen Firewalls, + Virtual Routers, or Router Appliances. The VM instance is + the peer side of the BGP session. + + This field is a member of `oneof`_ ``_router_appliance_instance``. + state (str): + The state of the BGP session. For a list of + possible values for this field, see BGP session + states. + + This field is a member of `oneof`_ ``_state``. + status (str): + Status of the BGP peer: {UP, DOWN} + Check the Status enum for the list of possible + values. + + This field is a member of `oneof`_ ``_status``. + status_reason (str): + Indicates why particular status was returned. + Check the StatusReason enum for the list of + possible values. + + This field is a member of `oneof`_ ``_status_reason``. + uptime (str): + Time this session has been up. Format: 14 + years, 51 weeks, 6 days, 23 hours, 59 minutes, + 59 seconds + + This field is a member of `oneof`_ ``_uptime``. + uptime_seconds (str): + Time this session has been up, in seconds. + Format: 145 + + This field is a member of `oneof`_ ``_uptime_seconds``. + """ + class Status(proto.Enum): + r"""Status of the BGP peer: {UP, DOWN} + + Values: + UNDEFINED_STATUS (0): + A value indicating that the enum field is not + set. + DOWN (2104482): + No description available. + UNKNOWN (433141802): + No description available. + UP (2715): + No description available. + """ + UNDEFINED_STATUS = 0 + DOWN = 2104482 + UNKNOWN = 433141802 + UP = 2715 + + class StatusReason(proto.Enum): + r"""Indicates why particular status was returned. + + Values: + UNDEFINED_STATUS_REASON (0): + A value indicating that the enum field is not + set. + MD5_AUTH_INTERNAL_PROBLEM (140462259): + Indicates internal problems with + configuration of MD5 authentication. This + particular reason can only be returned when + md5AuthEnabled is true and status is DOWN. + STATUS_REASON_UNSPECIFIED (394331913): + No description available. + """ + UNDEFINED_STATUS_REASON = 0 + MD5_AUTH_INTERNAL_PROBLEM = 140462259 + STATUS_REASON_UNSPECIFIED = 394331913 + + advertised_routes: MutableSequence['Route'] = proto.RepeatedField( + proto.MESSAGE, + number=333393068, + message='Route', + ) + bfd_status: 'BfdStatus' = proto.Field( + proto.MESSAGE, + number=395631729, + optional=True, + message='BfdStatus', + ) + enable_ipv6: bool = proto.Field( + proto.BOOL, + number=181467939, + optional=True, + ) + ip_address: str = proto.Field( + proto.STRING, + number=406272220, + optional=True, + ) + ipv6_nexthop_address: str = proto.Field( + proto.STRING, + number=27968211, + optional=True, + ) + linked_vpn_tunnel: str = proto.Field( + proto.STRING, + number=352296953, + optional=True, + ) + md5_auth_enabled: bool = proto.Field( + proto.BOOL, + number=451152075, + optional=True, + ) + name: str = proto.Field( + proto.STRING, + number=3373707, + optional=True, + ) + num_learned_routes: int = proto.Field( + proto.UINT32, + number=135457535, + optional=True, + ) + peer_ip_address: str = proto.Field( + proto.STRING, + number=207735769, + optional=True, + ) + peer_ipv6_nexthop_address: str = proto.Field( + proto.STRING, + number=491486608, + optional=True, + ) + router_appliance_instance: str = proto.Field( + proto.STRING, + number=468312989, + optional=True, + ) + state: str = proto.Field( + proto.STRING, + number=109757585, + optional=True, + ) + status: str = proto.Field( + proto.STRING, + number=181260274, + optional=True, + ) + status_reason: str = proto.Field( + proto.STRING, + number=342706993, + optional=True, + ) + uptime: str = proto.Field( + proto.STRING, + number=235379688, + optional=True, + ) + uptime_seconds: str = proto.Field( + proto.STRING, + number=104736040, + optional=True, + ) + + +class RouterStatusNatStatus(proto.Message): + r"""Status of a NAT contained in this router. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + auto_allocated_nat_ips (MutableSequence[str]): + A list of IPs auto-allocated for NAT. Example: ["1.1.1.1", + "129.2.16.89"] + drain_auto_allocated_nat_ips (MutableSequence[str]): + A list of IPs auto-allocated for NAT that are in drain mode. + Example: ["1.1.1.1", "179.12.26.133"]. + drain_user_allocated_nat_ips (MutableSequence[str]): + A list of IPs user-allocated for NAT that are in drain mode. + Example: ["1.1.1.1", "179.12.26.133"]. + min_extra_nat_ips_needed (int): + The number of extra IPs to allocate. This will be greater + than 0 only if user-specified IPs are NOT enough to allow + all configured VMs to use NAT. This value is meaningful only + when auto-allocation of NAT IPs is *not* used. + + This field is a member of `oneof`_ ``_min_extra_nat_ips_needed``. + name (str): + Unique name of this NAT. + + This field is a member of `oneof`_ ``_name``. + num_vm_endpoints_with_nat_mappings (int): + Number of VM endpoints (i.e., Nics) that can + use NAT. + + This field is a member of `oneof`_ ``_num_vm_endpoints_with_nat_mappings``. + rule_status (MutableSequence[google.cloud.compute_v1.types.RouterStatusNatStatusNatRuleStatus]): + Status of rules in this NAT. + user_allocated_nat_ip_resources (MutableSequence[str]): + A list of fully qualified URLs of reserved IP + address resources. + user_allocated_nat_ips (MutableSequence[str]): + A list of IPs user-allocated for NAT. They + will be raw IP strings like "179.12.26.133". + """ + + auto_allocated_nat_ips: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=510794246, + ) + drain_auto_allocated_nat_ips: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=309184557, + ) + drain_user_allocated_nat_ips: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=305268553, + ) + min_extra_nat_ips_needed: int = proto.Field( + proto.INT32, + number=365786338, + optional=True, + ) + name: str = proto.Field( + proto.STRING, + number=3373707, + optional=True, + ) + num_vm_endpoints_with_nat_mappings: int = proto.Field( + proto.INT32, + number=512367468, + optional=True, + ) + rule_status: MutableSequence['RouterStatusNatStatusNatRuleStatus'] = proto.RepeatedField( + proto.MESSAGE, + number=140223125, + message='RouterStatusNatStatusNatRuleStatus', + ) + user_allocated_nat_ip_resources: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=212776151, + ) + user_allocated_nat_ips: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=506878242, + ) + + +class RouterStatusNatStatusNatRuleStatus(proto.Message): + r"""Status of a NAT Rule contained in this NAT. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + active_nat_ips (MutableSequence[str]): + A list of active IPs for NAT. Example: ["1.1.1.1", + "179.12.26.133"]. + drain_nat_ips (MutableSequence[str]): + A list of IPs for NAT that are in drain mode. Example: + ["1.1.1.1", "179.12.26.133"]. + min_extra_ips_needed (int): + The number of extra IPs to allocate. This + will be greater than 0 only if the existing IPs + in this NAT Rule are NOT enough to allow all + configured VMs to use NAT. + + This field is a member of `oneof`_ ``_min_extra_ips_needed``. + num_vm_endpoints_with_nat_mappings (int): + Number of VM endpoints (i.e., NICs) that have + NAT Mappings from this NAT Rule. + + This field is a member of `oneof`_ ``_num_vm_endpoints_with_nat_mappings``. + rule_number (int): + Rule number of the rule. + + This field is a member of `oneof`_ ``_rule_number``. + """ + + active_nat_ips: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=208517077, + ) + drain_nat_ips: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=504078535, + ) + min_extra_ips_needed: int = proto.Field( + proto.INT32, + number=353002756, + optional=True, + ) + num_vm_endpoints_with_nat_mappings: int = proto.Field( + proto.INT32, + number=512367468, + optional=True, + ) + rule_number: int = proto.Field( + proto.INT32, + number=535211500, + optional=True, + ) + + +class RouterStatusResponse(proto.Message): + r""" + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + kind (str): + Type of resource. + + This field is a member of `oneof`_ ``_kind``. + result (google.cloud.compute_v1.types.RouterStatus): + + This field is a member of `oneof`_ ``_result``. + """ + + kind: str = proto.Field( + proto.STRING, + number=3292052, + optional=True, + ) + result: 'RouterStatus' = proto.Field( + proto.MESSAGE, + number=139315229, + optional=True, + message='RouterStatus', + ) + + +class RoutersPreviewResponse(proto.Message): + r""" + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + resource (google.cloud.compute_v1.types.Router): + Preview of given router. + + This field is a member of `oneof`_ ``_resource``. + """ + + resource: 'Router' = proto.Field( + proto.MESSAGE, + number=195806222, + optional=True, + message='Router', + ) + + +class RoutersScopedList(proto.Message): + r""" + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + routers (MutableSequence[google.cloud.compute_v1.types.Router]): + A list of routers contained in this scope. + warning (google.cloud.compute_v1.types.Warning): + Informational warning which replaces the list + of routers when the list is empty. + + This field is a member of `oneof`_ ``_warning``. + """ + + routers: MutableSequence['Router'] = proto.RepeatedField( + proto.MESSAGE, + number=311906890, + message='Router', + ) + warning: 'Warning' = proto.Field( + proto.MESSAGE, + number=50704284, + optional=True, + message='Warning', + ) + + +class Rule(proto.Message): + r"""This is deprecated and has no effect. Do not use. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + action (str): + This is deprecated and has no effect. Do not + use. Check the Action enum for the list of + possible values. + + This field is a member of `oneof`_ ``_action``. + conditions (MutableSequence[google.cloud.compute_v1.types.Condition]): + This is deprecated and has no effect. Do not + use. + description (str): + This is deprecated and has no effect. Do not + use. + + This field is a member of `oneof`_ ``_description``. + ins (MutableSequence[str]): + This is deprecated and has no effect. Do not + use. + log_configs (MutableSequence[google.cloud.compute_v1.types.LogConfig]): + This is deprecated and has no effect. Do not + use. + not_ins (MutableSequence[str]): + This is deprecated and has no effect. Do not + use. + permissions (MutableSequence[str]): + This is deprecated and has no effect. Do not + use. + """ + class Action(proto.Enum): + r"""This is deprecated and has no effect. Do not use. + + Values: + UNDEFINED_ACTION (0): + A value indicating that the enum field is not + set. + ALLOW (62368553): + This is deprecated and has no effect. Do not + use. + ALLOW_WITH_LOG (76034177): + This is deprecated and has no effect. Do not + use. + DENY (2094604): + This is deprecated and has no effect. Do not + use. + DENY_WITH_LOG (351433982): + This is deprecated and has no effect. Do not + use. + LOG (75556): + This is deprecated and has no effect. Do not + use. + NO_ACTION (260643444): + This is deprecated and has no effect. Do not + use. + """ + UNDEFINED_ACTION = 0 + ALLOW = 62368553 + ALLOW_WITH_LOG = 76034177 + DENY = 2094604 + DENY_WITH_LOG = 351433982 + LOG = 75556 + NO_ACTION = 260643444 + + action: str = proto.Field( + proto.STRING, + number=187661878, + optional=True, + ) + conditions: MutableSequence['Condition'] = proto.RepeatedField( + proto.MESSAGE, + number=142882488, + message='Condition', + ) + description: str = proto.Field( + proto.STRING, + number=422937596, + optional=True, + ) + ins: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=104430, + ) + log_configs: MutableSequence['LogConfig'] = proto.RepeatedField( + proto.MESSAGE, + number=152873846, + message='LogConfig', + ) + not_ins: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=518443138, + ) + permissions: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=59962500, + ) + + +class SSLHealthCheck(proto.Message): + r""" + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + port (int): + The TCP port number to which the health check + prober sends packets. The default value is 443. + Valid values are 1 through 65535. + + This field is a member of `oneof`_ ``_port``. + port_name (str): + Not supported. + + This field is a member of `oneof`_ ``_port_name``. + port_specification (str): + Specifies how a port is selected for health checking. Can be + one of the following values: USE_FIXED_PORT: Specifies a + port number explicitly using the port field in the health + check. Supported by backend services for pass-through load + balancers and backend services for proxy load balancers. Not + supported by target pools. The health check supports all + backends supported by the backend service provided the + backend can be health checked. For example, GCE_VM_IP + network endpoint groups, GCE_VM_IP_PORT network endpoint + groups, and instance group backends. USE_NAMED_PORT: Not + supported. USE_SERVING_PORT: Provides an indirect method of + specifying the health check port by referring to the backend + service. Only supported by backend services for proxy load + balancers. Not supported by target pools. Not supported by + backend services for pass-through load balancers. Supports + all backends that can be health checked; for example, + GCE_VM_IP_PORT network endpoint groups and instance group + backends. For GCE_VM_IP_PORT network endpoint group + backends, the health check uses the port number specified + for each endpoint in the network endpoint group. For + instance group backends, the health check uses the port + number determined by looking up the backend service's named + port in the instance group's list of named ports. Check the + PortSpecification enum for the list of possible values. + + This field is a member of `oneof`_ ``_port_specification``. + proxy_header (str): + Specifies the type of proxy header to append before sending + data to the backend, either NONE or PROXY_V1. The default is + NONE. Check the ProxyHeader enum for the list of possible + values. + + This field is a member of `oneof`_ ``_proxy_header``. + request (str): + Instructs the health check prober to send + this exact ASCII string, up to 1024 bytes in + length, after establishing the TCP connection + and SSL handshake. + + This field is a member of `oneof`_ ``_request``. + response (str): + Creates a content-based SSL health check. In + addition to establishing a TCP connection and + the TLS handshake, you can configure the health + check to pass only when the backend sends this + exact response ASCII string, up to 1024 bytes in + length. For details, see: + https://cloud.google.com/load-balancing/docs/health-check-concepts#criteria-protocol-ssl-tcp + + This field is a member of `oneof`_ ``_response``. + """ + class PortSpecification(proto.Enum): + r"""Specifies how a port is selected for health checking. Can be one of + the following values: USE_FIXED_PORT: Specifies a port number + explicitly using the port field in the health check. Supported by + backend services for pass-through load balancers and backend + services for proxy load balancers. Not supported by target pools. + The health check supports all backends supported by the backend + service provided the backend can be health checked. For example, + GCE_VM_IP network endpoint groups, GCE_VM_IP_PORT network endpoint + groups, and instance group backends. USE_NAMED_PORT: Not supported. + USE_SERVING_PORT: Provides an indirect method of specifying the + health check port by referring to the backend service. Only + supported by backend services for proxy load balancers. Not + supported by target pools. Not supported by backend services for + pass-through load balancers. Supports all backends that can be + health checked; for example, GCE_VM_IP_PORT network endpoint groups + and instance group backends. For GCE_VM_IP_PORT network endpoint + group backends, the health check uses the port number specified for + each endpoint in the network endpoint group. For instance group + backends, the health check uses the port number determined by + looking up the backend service's named port in the instance group's + list of named ports. + + Values: + UNDEFINED_PORT_SPECIFICATION (0): + A value indicating that the enum field is not + set. + USE_FIXED_PORT (190235748): + The port number in the health check's port is + used for health checking. Applies to network + endpoint group and instance group backends. + USE_NAMED_PORT (349300671): + Not supported. + USE_SERVING_PORT (362637516): + For network endpoint group backends, the + health check uses the port number specified on + each endpoint in the network endpoint group. For + instance group backends, the health check uses + the port number specified for the backend + service's named port defined in the instance + group's named ports. + """ + UNDEFINED_PORT_SPECIFICATION = 0 + USE_FIXED_PORT = 190235748 + USE_NAMED_PORT = 349300671 + USE_SERVING_PORT = 362637516 + + class ProxyHeader(proto.Enum): + r"""Specifies the type of proxy header to append before sending data to + the backend, either NONE or PROXY_V1. The default is NONE. + + Values: + UNDEFINED_PROXY_HEADER (0): + A value indicating that the enum field is not + set. + NONE (2402104): + No description available. + PROXY_V1 (334352940): + No description available. + """ + UNDEFINED_PROXY_HEADER = 0 + NONE = 2402104 + PROXY_V1 = 334352940 + + port: int = proto.Field( + proto.INT32, + number=3446913, + optional=True, + ) + port_name: str = proto.Field( + proto.STRING, + number=41534345, + optional=True, + ) + port_specification: str = proto.Field( + proto.STRING, + number=51590597, + optional=True, + ) + proxy_header: str = proto.Field( + proto.STRING, + number=160374142, + optional=True, + ) + request: str = proto.Field( + proto.STRING, + number=21951119, + optional=True, + ) + response: str = proto.Field( + proto.STRING, + number=196547649, + optional=True, + ) + + +class SavedAttachedDisk(proto.Message): + r"""DEPRECATED: Please use compute#savedDisk instead. An + instance-attached disk resource. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + auto_delete (bool): + Specifies whether the disk will be + auto-deleted when the instance is deleted (but + not when the disk is detached from the + instance). + + This field is a member of `oneof`_ ``_auto_delete``. + boot (bool): + Indicates that this is a boot disk. The + virtual machine will use the first partition of + the disk for its root filesystem. + + This field is a member of `oneof`_ ``_boot``. + device_name (str): + Specifies the name of the disk attached to + the source instance. + + This field is a member of `oneof`_ ``_device_name``. + disk_encryption_key (google.cloud.compute_v1.types.CustomerEncryptionKey): + The encryption key for the disk. + + This field is a member of `oneof`_ ``_disk_encryption_key``. + disk_size_gb (int): + The size of the disk in base-2 GB. + + This field is a member of `oneof`_ ``_disk_size_gb``. + disk_type (str): + [Output Only] URL of the disk type resource. For example: + projects/project /zones/zone/diskTypes/pd-standard or pd-ssd + + This field is a member of `oneof`_ ``_disk_type``. + guest_os_features (MutableSequence[google.cloud.compute_v1.types.GuestOsFeature]): + A list of features to enable on the guest + operating system. Applicable only for bootable + images. Read Enabling guest operating system + features to see a list of available options. + index (int): + Specifies zero-based index of the disk that + is attached to the source instance. + + This field is a member of `oneof`_ ``_index``. + interface (str): + Specifies the disk interface to use for + attaching this disk, which is either SCSI or + NVME. Check the Interface enum for the list of + possible values. + + This field is a member of `oneof`_ ``_interface``. + kind (str): + [Output Only] Type of the resource. Always + compute#attachedDisk for attached disks. + + This field is a member of `oneof`_ ``_kind``. + licenses (MutableSequence[str]): + [Output Only] Any valid publicly visible licenses. + mode (str): + The mode in which this disk is attached to the source + instance, either READ_WRITE or READ_ONLY. Check the Mode + enum for the list of possible values. + + This field is a member of `oneof`_ ``_mode``. + source (str): + Specifies a URL of the disk attached to the + source instance. + + This field is a member of `oneof`_ ``_source``. + storage_bytes (int): + [Output Only] A size of the storage used by the disk's + snapshot by this machine image. + + This field is a member of `oneof`_ ``_storage_bytes``. + storage_bytes_status (str): + [Output Only] An indicator whether storageBytes is in a + stable state or it is being adjusted as a result of shared + storage reallocation. This status can either be UPDATING, + meaning the size of the snapshot is being updated, or + UP_TO_DATE, meaning the size of the snapshot is up-to-date. + Check the StorageBytesStatus enum for the list of possible + values. + + This field is a member of `oneof`_ ``_storage_bytes_status``. + type_ (str): + Specifies the type of the attached disk, + either SCRATCH or PERSISTENT. Check the Type + enum for the list of possible values. + + This field is a member of `oneof`_ ``_type``. + """ + class Interface(proto.Enum): + r"""Specifies the disk interface to use for attaching this disk, + which is either SCSI or NVME. + + Values: + UNDEFINED_INTERFACE (0): + A value indicating that the enum field is not + set. + NVME (2408800): + No description available. + SCSI (2539686): + No description available. + """ + UNDEFINED_INTERFACE = 0 + NVME = 2408800 + SCSI = 2539686 + + class Mode(proto.Enum): + r"""The mode in which this disk is attached to the source instance, + either READ_WRITE or READ_ONLY. + + Values: + UNDEFINED_MODE (0): + A value indicating that the enum field is not + set. + READ_ONLY (91950261): + Attaches this disk in read-only mode. + Multiple virtual machines can use a disk in + read-only mode at a time. + READ_WRITE (173607894): + *[Default]* Attaches this disk in read-write mode. Only one + virtual machine at a time can be attached to a disk in + read-write mode. + """ + UNDEFINED_MODE = 0 + READ_ONLY = 91950261 + READ_WRITE = 173607894 + + class StorageBytesStatus(proto.Enum): + r"""[Output Only] An indicator whether storageBytes is in a stable state + or it is being adjusted as a result of shared storage reallocation. + This status can either be UPDATING, meaning the size of the snapshot + is being updated, or UP_TO_DATE, meaning the size of the snapshot is + up-to-date. + + Values: + UNDEFINED_STORAGE_BYTES_STATUS (0): + A value indicating that the enum field is not + set. + UPDATING (494614342): + No description available. + UP_TO_DATE (101306702): + No description available. + """ + UNDEFINED_STORAGE_BYTES_STATUS = 0 + UPDATING = 494614342 + UP_TO_DATE = 101306702 + + class Type(proto.Enum): + r"""Specifies the type of the attached disk, either SCRATCH or + PERSISTENT. + + Values: + UNDEFINED_TYPE (0): + A value indicating that the enum field is not + set. + PERSISTENT (460683927): + No description available. + SCRATCH (496778970): + No description available. + """ + UNDEFINED_TYPE = 0 + PERSISTENT = 460683927 + SCRATCH = 496778970 + + auto_delete: bool = proto.Field( + proto.BOOL, + number=464761403, + optional=True, + ) + boot: bool = proto.Field( + proto.BOOL, + number=3029746, + optional=True, + ) + device_name: str = proto.Field( + proto.STRING, + number=67541716, + optional=True, + ) + disk_encryption_key: 'CustomerEncryptionKey' = proto.Field( + proto.MESSAGE, + number=271660677, + optional=True, + message='CustomerEncryptionKey', + ) + disk_size_gb: int = proto.Field( + proto.INT64, + number=316263735, + optional=True, + ) + disk_type: str = proto.Field( + proto.STRING, + number=93009052, + optional=True, + ) + guest_os_features: MutableSequence['GuestOsFeature'] = proto.RepeatedField( + proto.MESSAGE, + number=79294545, + message='GuestOsFeature', + ) + index: int = proto.Field( + proto.INT32, + number=100346066, + optional=True, + ) + interface: str = proto.Field( + proto.STRING, + number=502623545, + optional=True, + ) + kind: str = proto.Field( + proto.STRING, + number=3292052, + optional=True, + ) + licenses: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=337642578, + ) + mode: str = proto.Field( + proto.STRING, + number=3357091, + optional=True, + ) + source: str = proto.Field( + proto.STRING, + number=177235995, + optional=True, + ) + storage_bytes: int = proto.Field( + proto.INT64, + number=424631719, + optional=True, + ) + storage_bytes_status: str = proto.Field( + proto.STRING, + number=490739082, + optional=True, + ) + type_: str = proto.Field( + proto.STRING, + number=3575610, + optional=True, + ) + + +class SavedDisk(proto.Message): + r"""An instance-attached disk resource. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + architecture (str): + [Output Only] The architecture of the attached disk. Check + the Architecture enum for the list of possible values. + + This field is a member of `oneof`_ ``_architecture``. + kind (str): + [Output Only] Type of the resource. Always compute#savedDisk + for attached disks. + + This field is a member of `oneof`_ ``_kind``. + source_disk (str): + Specifies a URL of the disk attached to the + source instance. + + This field is a member of `oneof`_ ``_source_disk``. + storage_bytes (int): + [Output Only] Size of the individual disk snapshot used by + this machine image. + + This field is a member of `oneof`_ ``_storage_bytes``. + storage_bytes_status (str): + [Output Only] An indicator whether storageBytes is in a + stable state or it is being adjusted as a result of shared + storage reallocation. This status can either be UPDATING, + meaning the size of the snapshot is being updated, or + UP_TO_DATE, meaning the size of the snapshot is up-to-date. + Check the StorageBytesStatus enum for the list of possible + values. + + This field is a member of `oneof`_ ``_storage_bytes_status``. + """ + class Architecture(proto.Enum): + r"""[Output Only] The architecture of the attached disk. + + Values: + UNDEFINED_ARCHITECTURE (0): + A value indicating that the enum field is not + set. + ARCHITECTURE_UNSPECIFIED (394750507): + Default value indicating Architecture is not + set. + ARM64 (62547450): + Machines with architecture ARM64 + X86_64 (425300551): + Machines with architecture X86_64 + """ + UNDEFINED_ARCHITECTURE = 0 + ARCHITECTURE_UNSPECIFIED = 394750507 + ARM64 = 62547450 + X86_64 = 425300551 + + class StorageBytesStatus(proto.Enum): + r"""[Output Only] An indicator whether storageBytes is in a stable state + or it is being adjusted as a result of shared storage reallocation. + This status can either be UPDATING, meaning the size of the snapshot + is being updated, or UP_TO_DATE, meaning the size of the snapshot is + up-to-date. + + Values: + UNDEFINED_STORAGE_BYTES_STATUS (0): + A value indicating that the enum field is not + set. + UPDATING (494614342): + No description available. + UP_TO_DATE (101306702): + No description available. + """ + UNDEFINED_STORAGE_BYTES_STATUS = 0 + UPDATING = 494614342 + UP_TO_DATE = 101306702 + + architecture: str = proto.Field( + proto.STRING, + number=302803283, + optional=True, + ) + kind: str = proto.Field( + proto.STRING, + number=3292052, + optional=True, + ) + source_disk: str = proto.Field( + proto.STRING, + number=451753793, + optional=True, + ) + storage_bytes: int = proto.Field( + proto.INT64, + number=424631719, + optional=True, + ) + storage_bytes_status: str = proto.Field( + proto.STRING, + number=490739082, + optional=True, + ) + + +class ScalingScheduleStatus(proto.Message): + r""" + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + last_start_time (str): + [Output Only] The last time the scaling schedule became + active. Note: this is a timestamp when a schedule actually + became active, not when it was planned to do so. The + timestamp is in RFC3339 text format. + + This field is a member of `oneof`_ ``_last_start_time``. + next_start_time (str): + [Output Only] The next time the scaling schedule is to + become active. Note: this is a timestamp when a schedule is + planned to run, but the actual time might be slightly + different. The timestamp is in RFC3339 text format. + + This field is a member of `oneof`_ ``_next_start_time``. + state (str): + [Output Only] The current state of a scaling schedule. Check + the State enum for the list of possible values. + + This field is a member of `oneof`_ ``_state``. + """ + class State(proto.Enum): + r"""[Output Only] The current state of a scaling schedule. + + Values: + UNDEFINED_STATE (0): + A value indicating that the enum field is not + set. + ACTIVE (314733318): + The current autoscaling recommendation is + influenced by this scaling schedule. + DISABLED (516696700): + This scaling schedule has been disabled by + the user. + OBSOLETE (66532761): + This scaling schedule will never become + active again. + READY (77848963): + The current autoscaling recommendation is not + influenced by this scaling schedule. + """ + UNDEFINED_STATE = 0 + ACTIVE = 314733318 + DISABLED = 516696700 + OBSOLETE = 66532761 + READY = 77848963 + + last_start_time: str = proto.Field( + proto.STRING, + number=34545107, + optional=True, + ) + next_start_time: str = proto.Field( + proto.STRING, + number=97270102, + optional=True, + ) + state: str = proto.Field( + proto.STRING, + number=109757585, + optional=True, + ) + + +class Scheduling(proto.Message): + r"""Sets the scheduling options for an Instance. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + automatic_restart (bool): + Specifies whether the instance should be + automatically restarted if it is terminated by + Compute Engine (not terminated by a user). You + can only set the automatic restart option for + standard instances. Preemptible instances cannot + be automatically restarted. By default, this is + set to true so an instance is automatically + restarted if it is terminated by Compute Engine. + + This field is a member of `oneof`_ ``_automatic_restart``. + instance_termination_action (str): + Specifies the termination action for the + instance. Check the InstanceTerminationAction + enum for the list of possible values. + + This field is a member of `oneof`_ ``_instance_termination_action``. + local_ssd_recovery_timeout (google.cloud.compute_v1.types.Duration): + Specifies the maximum amount of time a Local + Ssd Vm should wait while recovery of the Local + Ssd state is attempted. Its value should be in + between 0 and 168 hours with hour granularity + and the default value being 1 hour. + + This field is a member of `oneof`_ ``_local_ssd_recovery_timeout``. + location_hint (str): + An opaque location hint used to place the + instance close to other resources. This field is + for use by internal tools that use the public + API. + + This field is a member of `oneof`_ ``_location_hint``. + min_node_cpus (int): + The minimum number of virtual CPUs this + instance will consume when running on a + sole-tenant node. + + This field is a member of `oneof`_ ``_min_node_cpus``. + node_affinities (MutableSequence[google.cloud.compute_v1.types.SchedulingNodeAffinity]): + A set of node affinity and anti-affinity + configurations. Refer to Configuring node + affinity for more information. Overrides + reservationAffinity. + on_host_maintenance (str): + Defines the maintenance behavior for this + instance. For standard instances, the default + behavior is MIGRATE. For preemptible instances, + the default and only possible behavior is + TERMINATE. For more information, see Set VM host + maintenance policy. Check the OnHostMaintenance + enum for the list of possible values. + + This field is a member of `oneof`_ ``_on_host_maintenance``. + preemptible (bool): + Defines whether the instance is preemptible. This can only + be set during instance creation or while the instance is + stopped and therefore, in a ``TERMINATED`` state. See + Instance Life Cycle for more information on the possible + instance states. + + This field is a member of `oneof`_ ``_preemptible``. + provisioning_model (str): + Specifies the provisioning model of the + instance. Check the ProvisioningModel enum for + the list of possible values. + + This field is a member of `oneof`_ ``_provisioning_model``. + """ + class InstanceTerminationAction(proto.Enum): + r"""Specifies the termination action for the instance. + + Values: + UNDEFINED_INSTANCE_TERMINATION_ACTION (0): + A value indicating that the enum field is not + set. + DELETE (402225579): + Delete the VM. + INSTANCE_TERMINATION_ACTION_UNSPECIFIED (92954803): + Default value. This value is unused. + STOP (2555906): + Stop the VM without storing in-memory + content. default action. + """ + UNDEFINED_INSTANCE_TERMINATION_ACTION = 0 + DELETE = 402225579 + INSTANCE_TERMINATION_ACTION_UNSPECIFIED = 92954803 + STOP = 2555906 + + class OnHostMaintenance(proto.Enum): + r"""Defines the maintenance behavior for this instance. For + standard instances, the default behavior is MIGRATE. For + preemptible instances, the default and only possible behavior is + TERMINATE. For more information, see Set VM host maintenance + policy. + + Values: + UNDEFINED_ON_HOST_MAINTENANCE (0): + A value indicating that the enum field is not + set. + MIGRATE (165699979): + *[Default]* Allows Compute Engine to automatically migrate + instances out of the way of maintenance events. + TERMINATE (527617601): + Tells Compute Engine to terminate and + (optionally) restart the instance away from the + maintenance activity. If you would like your + instance to be restarted, set the + automaticRestart flag to true. Your instance may + be restarted more than once, and it may be + restarted outside the window of maintenance + events. + """ + UNDEFINED_ON_HOST_MAINTENANCE = 0 + MIGRATE = 165699979 + TERMINATE = 527617601 + + class ProvisioningModel(proto.Enum): + r"""Specifies the provisioning model of the instance. + + Values: + UNDEFINED_PROVISIONING_MODEL (0): + A value indicating that the enum field is not + set. + SPOT (2552066): + Heavily discounted, no guaranteed runtime. + STANDARD (484642493): + Standard provisioning with user controlled + runtime, no discounts. + """ + UNDEFINED_PROVISIONING_MODEL = 0 + SPOT = 2552066 + STANDARD = 484642493 + + automatic_restart: bool = proto.Field( + proto.BOOL, + number=350821371, + optional=True, + ) + instance_termination_action: str = proto.Field( + proto.STRING, + number=107380667, + optional=True, + ) + local_ssd_recovery_timeout: 'Duration' = proto.Field( + proto.MESSAGE, + number=268015590, + optional=True, + message='Duration', + ) + location_hint: str = proto.Field( + proto.STRING, + number=350519505, + optional=True, + ) + min_node_cpus: int = proto.Field( + proto.INT32, + number=317231675, + optional=True, + ) + node_affinities: MutableSequence['SchedulingNodeAffinity'] = proto.RepeatedField( + proto.MESSAGE, + number=461799971, + message='SchedulingNodeAffinity', + ) + on_host_maintenance: str = proto.Field( + proto.STRING, + number=64616796, + optional=True, + ) + preemptible: bool = proto.Field( + proto.BOOL, + number=324203169, + optional=True, + ) + provisioning_model: str = proto.Field( + proto.STRING, + number=494423, + optional=True, + ) + + +class SchedulingNodeAffinity(proto.Message): + r"""Node Affinity: the configuration of desired nodes onto which + this Instance could be scheduled. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + key (str): + Corresponds to the label key of Node + resource. + + This field is a member of `oneof`_ ``_key``. + operator (str): + Defines the operation of node selection. Valid operators are + IN for affinity and NOT_IN for anti-affinity. Check the + Operator enum for the list of possible values. + + This field is a member of `oneof`_ ``_operator``. + values (MutableSequence[str]): + Corresponds to the label values of Node + resource. + """ + class Operator(proto.Enum): + r"""Defines the operation of node selection. Valid operators are IN for + affinity and NOT_IN for anti-affinity. + + Values: + UNDEFINED_OPERATOR (0): + A value indicating that the enum field is not + set. + IN (2341): + Requires Compute Engine to seek for matched + nodes. + NOT_IN (161144369): + Requires Compute Engine to avoid certain + nodes. + OPERATOR_UNSPECIFIED (128892924): + No description available. + """ + UNDEFINED_OPERATOR = 0 + IN = 2341 + NOT_IN = 161144369 + OPERATOR_UNSPECIFIED = 128892924 + + key: str = proto.Field( + proto.STRING, + number=106079, + optional=True, + ) + operator: str = proto.Field( + proto.STRING, + number=36317348, + optional=True, + ) + values: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=249928994, + ) + + +class ScratchDisks(proto.Message): + r""" + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + disk_gb (int): + Size of the scratch disk, defined in GB. + + This field is a member of `oneof`_ ``_disk_gb``. + """ + + disk_gb: int = proto.Field( + proto.INT32, + number=60990141, + optional=True, + ) + + +class Screenshot(proto.Message): + r"""An instance's screenshot. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + contents (str): + [Output Only] The Base64-encoded screenshot data. + + This field is a member of `oneof`_ ``_contents``. + kind (str): + [Output Only] Type of the resource. Always + compute#screenshot for the screenshots. + + This field is a member of `oneof`_ ``_kind``. + """ + + contents: str = proto.Field( + proto.STRING, + number=506419994, + optional=True, + ) + kind: str = proto.Field( + proto.STRING, + number=3292052, + optional=True, + ) + + +class SecurityPoliciesAggregatedList(proto.Message): + r""" + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + etag (str): + + This field is a member of `oneof`_ ``_etag``. + id (str): + [Output Only] Unique identifier for the resource; defined by + the server. + + This field is a member of `oneof`_ ``_id``. + items (MutableMapping[str, google.cloud.compute_v1.types.SecurityPoliciesScopedList]): + A list of SecurityPoliciesScopedList + resources. + kind (str): + [Output Only] Type of resource. Always + compute#securityPolicyAggregatedList for lists of Security + Policies. + + This field is a member of `oneof`_ ``_kind``. + next_page_token (str): + [Output Only] This token allows you to get the next page of + results for list requests. If the number of results is + larger than maxResults, use the nextPageToken as a value for + the query parameter pageToken in the next list request. + Subsequent list requests will have their own nextPageToken + to continue paging through the results. + + This field is a member of `oneof`_ ``_next_page_token``. + self_link (str): + [Output Only] Server-defined URL for this resource. + + This field is a member of `oneof`_ ``_self_link``. + unreachables (MutableSequence[str]): + [Output Only] Unreachable resources. + warning (google.cloud.compute_v1.types.Warning): + [Output Only] Informational warning message. + + This field is a member of `oneof`_ ``_warning``. + """ + + @property + def raw_page(self): + return self + + etag: str = proto.Field( + proto.STRING, + number=3123477, + optional=True, + ) + id: str = proto.Field( + proto.STRING, + number=3355, + optional=True, + ) + items: MutableMapping[str, 'SecurityPoliciesScopedList'] = proto.MapField( + proto.STRING, + proto.MESSAGE, + number=100526016, + message='SecurityPoliciesScopedList', + ) + kind: str = proto.Field( + proto.STRING, + number=3292052, + optional=True, + ) + next_page_token: str = proto.Field( + proto.STRING, + number=79797525, + optional=True, + ) + self_link: str = proto.Field( + proto.STRING, + number=456214797, + optional=True, + ) + unreachables: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=243372063, + ) + warning: 'Warning' = proto.Field( + proto.MESSAGE, + number=50704284, + optional=True, + message='Warning', + ) + + +class SecurityPoliciesListPreconfiguredExpressionSetsResponse(proto.Message): + r""" + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + preconfigured_expression_sets (google.cloud.compute_v1.types.SecurityPoliciesWafConfig): + + This field is a member of `oneof`_ ``_preconfigured_expression_sets``. + """ + + preconfigured_expression_sets: 'SecurityPoliciesWafConfig' = proto.Field( + proto.MESSAGE, + number=536200826, + optional=True, + message='SecurityPoliciesWafConfig', + ) + + +class SecurityPoliciesScopedList(proto.Message): + r""" + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + security_policies (MutableSequence[google.cloud.compute_v1.types.SecurityPolicy]): + A list of SecurityPolicies contained in this + scope. + warning (google.cloud.compute_v1.types.Warning): + Informational warning which replaces the list + of security policies when the list is empty. + + This field is a member of `oneof`_ ``_warning``. + """ + + security_policies: MutableSequence['SecurityPolicy'] = proto.RepeatedField( + proto.MESSAGE, + number=127783791, + message='SecurityPolicy', + ) + warning: 'Warning' = proto.Field( + proto.MESSAGE, + number=50704284, + optional=True, + message='Warning', + ) + + +class SecurityPoliciesWafConfig(proto.Message): + r""" + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + waf_rules (google.cloud.compute_v1.types.PreconfiguredWafSet): + + This field is a member of `oneof`_ ``_waf_rules``. + """ + + waf_rules: 'PreconfiguredWafSet' = proto.Field( + proto.MESSAGE, + number=74899924, + optional=True, + message='PreconfiguredWafSet', + ) + + +class SecurityPolicy(proto.Message): + r"""Represents a Google Cloud Armor security policy resource. + Only external backend services that use load balancers can + reference a security policy. For more information, see Google + Cloud Armor security policy overview. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + adaptive_protection_config (google.cloud.compute_v1.types.SecurityPolicyAdaptiveProtectionConfig): + + This field is a member of `oneof`_ ``_adaptive_protection_config``. + advanced_options_config (google.cloud.compute_v1.types.SecurityPolicyAdvancedOptionsConfig): + + This field is a member of `oneof`_ ``_advanced_options_config``. + creation_timestamp (str): + [Output Only] Creation timestamp in RFC3339 text format. + + This field is a member of `oneof`_ ``_creation_timestamp``. + ddos_protection_config (google.cloud.compute_v1.types.SecurityPolicyDdosProtectionConfig): + + This field is a member of `oneof`_ ``_ddos_protection_config``. + description (str): + An optional description of this resource. + Provide this property when you create the + resource. + + This field is a member of `oneof`_ ``_description``. + fingerprint (str): + Specifies a fingerprint for this resource, + which is essentially a hash of the metadata's + contents and used for optimistic locking. The + fingerprint is initially generated by Compute + Engine and changes after every request to modify + or update metadata. You must always provide an + up-to-date fingerprint hash in order to update + or change metadata, otherwise the request will + fail with error 412 conditionNotMet. To see the + latest fingerprint, make get() request to the + security policy. + + This field is a member of `oneof`_ ``_fingerprint``. + id (int): + [Output Only] The unique identifier for the resource. This + identifier is defined by the server. + + This field is a member of `oneof`_ ``_id``. + kind (str): + [Output only] Type of the resource. Always + compute#securityPolicyfor security policies + + This field is a member of `oneof`_ ``_kind``. + label_fingerprint (str): + A fingerprint for the labels being applied to + this security policy, which is essentially a + hash of the labels set used for optimistic + locking. The fingerprint is initially generated + by Compute Engine and changes after every + request to modify or update labels. You must + always provide an up-to-date fingerprint hash in + order to update or change labels. To see the + latest fingerprint, make get() request to the + security policy. + + This field is a member of `oneof`_ ``_label_fingerprint``. + labels (MutableMapping[str, str]): + Labels for this resource. These can only be + added or modified by the setLabels method. Each + label key/value pair must comply with RFC1035. + Label values may be empty. + name (str): + Name of the resource. Provided by the client when the + resource is created. The name must be 1-63 characters long, + and comply with RFC1035. Specifically, the name must be 1-63 + characters long and match the regular expression + ``[a-z]([-a-z0-9]*[a-z0-9])?`` which means the first + character must be a lowercase letter, and all following + characters must be a dash, lowercase letter, or digit, + except the last character, which cannot be a dash. + + This field is a member of `oneof`_ ``_name``. + recaptcha_options_config (google.cloud.compute_v1.types.SecurityPolicyRecaptchaOptionsConfig): + + This field is a member of `oneof`_ ``_recaptcha_options_config``. + region (str): + [Output Only] URL of the region where the regional security + policy resides. This field is not applicable to global + security policies. + + This field is a member of `oneof`_ ``_region``. + rules (MutableSequence[google.cloud.compute_v1.types.SecurityPolicyRule]): + A list of rules that belong to this policy. There must + always be a default rule which is a rule with priority + 2147483647 and match all condition (for the match condition + this means match "*" for srcIpRanges and for the + networkMatch condition every field must be either match "*" + or not set). If no rules are provided when creating a + security policy, a default rule with action "allow" will be + added. + self_link (str): + [Output Only] Server-defined URL for the resource. + + This field is a member of `oneof`_ ``_self_link``. + type_ (str): + The type indicates the intended use of the security policy. + - CLOUD_ARMOR: Cloud Armor backend security policies can be + configured to filter incoming HTTP requests targeting + backend services. They filter requests before they hit the + origin servers. - CLOUD_ARMOR_EDGE: Cloud Armor edge + security policies can be configured to filter incoming HTTP + requests targeting backend services (including Cloud + CDN-enabled) as well as backend buckets (Cloud Storage). + They filter requests before the request is served from + Google's cache. - CLOUD_ARMOR_INTERNAL_SERVICE: Cloud Armor + internal service policies can be configured to filter HTTP + requests targeting services managed by Traffic Director in a + service mesh. They filter requests before the request is + served from the application. - CLOUD_ARMOR_NETWORK: Cloud + Armor network policies can be configured to filter packets + targeting network load balancing resources such as backend + services, target pools, target instances, and instances with + external IPs. They filter requests before the request is + served from the application. This field can be set only at + resource creation time. Check the Type enum for the list of + possible values. + + This field is a member of `oneof`_ ``_type``. + """ + class Type(proto.Enum): + r"""The type indicates the intended use of the security policy. - + CLOUD_ARMOR: Cloud Armor backend security policies can be configured + to filter incoming HTTP requests targeting backend services. They + filter requests before they hit the origin servers. - + CLOUD_ARMOR_EDGE: Cloud Armor edge security policies can be + configured to filter incoming HTTP requests targeting backend + services (including Cloud CDN-enabled) as well as backend buckets + (Cloud Storage). They filter requests before the request is served + from Google's cache. - CLOUD_ARMOR_INTERNAL_SERVICE: Cloud Armor + internal service policies can be configured to filter HTTP requests + targeting services managed by Traffic Director in a service mesh. + They filter requests before the request is served from the + application. - CLOUD_ARMOR_NETWORK: Cloud Armor network policies can + be configured to filter packets targeting network load balancing + resources such as backend services, target pools, target instances, + and instances with external IPs. They filter requests before the + request is served from the application. This field can be set only + at resource creation time. + + Values: + UNDEFINED_TYPE (0): + A value indicating that the enum field is not + set. + CLOUD_ARMOR (260640373): + No description available. + CLOUD_ARMOR_EDGE (250728775): + No description available. + CLOUD_ARMOR_NETWORK (488527428): + No description available. + """ + UNDEFINED_TYPE = 0 + CLOUD_ARMOR = 260640373 + CLOUD_ARMOR_EDGE = 250728775 + CLOUD_ARMOR_NETWORK = 488527428 + + adaptive_protection_config: 'SecurityPolicyAdaptiveProtectionConfig' = proto.Field( + proto.MESSAGE, + number=150240735, + optional=True, + message='SecurityPolicyAdaptiveProtectionConfig', + ) + advanced_options_config: 'SecurityPolicyAdvancedOptionsConfig' = proto.Field( + proto.MESSAGE, + number=449276352, + optional=True, + message='SecurityPolicyAdvancedOptionsConfig', + ) + creation_timestamp: str = proto.Field( + proto.STRING, + number=30525366, + optional=True, + ) + ddos_protection_config: 'SecurityPolicyDdosProtectionConfig' = proto.Field( + proto.MESSAGE, + number=50315853, + optional=True, + message='SecurityPolicyDdosProtectionConfig', + ) + description: str = proto.Field( + proto.STRING, + number=422937596, + optional=True, + ) + fingerprint: str = proto.Field( + proto.STRING, + number=234678500, + optional=True, + ) + id: int = proto.Field( + proto.UINT64, + number=3355, + optional=True, + ) + kind: str = proto.Field( + proto.STRING, + number=3292052, + optional=True, + ) + label_fingerprint: str = proto.Field( + proto.STRING, + number=178124825, + optional=True, + ) + labels: MutableMapping[str, str] = proto.MapField( + proto.STRING, + proto.STRING, + number=500195327, + ) + name: str = proto.Field( + proto.STRING, + number=3373707, + optional=True, + ) + recaptcha_options_config: 'SecurityPolicyRecaptchaOptionsConfig' = proto.Field( + proto.MESSAGE, + number=519006811, + optional=True, + message='SecurityPolicyRecaptchaOptionsConfig', + ) + region: str = proto.Field( + proto.STRING, + number=138946292, + optional=True, + ) + rules: MutableSequence['SecurityPolicyRule'] = proto.RepeatedField( + proto.MESSAGE, + number=108873975, + message='SecurityPolicyRule', + ) + self_link: str = proto.Field( + proto.STRING, + number=456214797, + optional=True, + ) + type_: str = proto.Field( + proto.STRING, + number=3575610, + optional=True, + ) + + +class SecurityPolicyAdaptiveProtectionConfig(proto.Message): + r"""Configuration options for Cloud Armor Adaptive Protection + (CAAP). + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + layer7_ddos_defense_config (google.cloud.compute_v1.types.SecurityPolicyAdaptiveProtectionConfigLayer7DdosDefenseConfig): + If set to true, enables Cloud Armor Machine + Learning. + + This field is a member of `oneof`_ ``_layer7_ddos_defense_config``. + """ + + layer7_ddos_defense_config: 'SecurityPolicyAdaptiveProtectionConfigLayer7DdosDefenseConfig' = proto.Field( + proto.MESSAGE, + number=437316771, + optional=True, + message='SecurityPolicyAdaptiveProtectionConfigLayer7DdosDefenseConfig', + ) + + +class SecurityPolicyAdaptiveProtectionConfigLayer7DdosDefenseConfig(proto.Message): + r"""Configuration options for L7 DDoS detection. This field is only + supported in Global Security Policies of type CLOUD_ARMOR. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + enable (bool): + If set to true, enables CAAP for L7 DDoS detection. This + field is only supported in Global Security Policies of type + CLOUD_ARMOR. + + This field is a member of `oneof`_ ``_enable``. + rule_visibility (str): + Rule visibility can be one of the following: STANDARD - + opaque rules. (default) PREMIUM - transparent rules. This + field is only supported in Global Security Policies of type + CLOUD_ARMOR. Check the RuleVisibility enum for the list of + possible values. + + This field is a member of `oneof`_ ``_rule_visibility``. + """ + class RuleVisibility(proto.Enum): + r"""Rule visibility can be one of the following: STANDARD - opaque + rules. (default) PREMIUM - transparent rules. This field is only + supported in Global Security Policies of type CLOUD_ARMOR. + + Values: + UNDEFINED_RULE_VISIBILITY (0): + A value indicating that the enum field is not + set. + PREMIUM (399530551): + No description available. + STANDARD (484642493): + No description available. + """ + UNDEFINED_RULE_VISIBILITY = 0 + PREMIUM = 399530551 + STANDARD = 484642493 + + enable: bool = proto.Field( + proto.BOOL, + number=311764355, + optional=True, + ) + rule_visibility: str = proto.Field( + proto.STRING, + number=453258293, + optional=True, + ) + + +class SecurityPolicyAdvancedOptionsConfig(proto.Message): + r""" + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + json_custom_config (google.cloud.compute_v1.types.SecurityPolicyAdvancedOptionsConfigJsonCustomConfig): + Custom configuration to apply the JSON parsing. Only + applicable when json_parsing is set to STANDARD. + + This field is a member of `oneof`_ ``_json_custom_config``. + json_parsing (str): + Check the JsonParsing enum for the list of + possible values. + + This field is a member of `oneof`_ ``_json_parsing``. + log_level (str): + Check the LogLevel enum for the list of + possible values. + + This field is a member of `oneof`_ ``_log_level``. + """ + class JsonParsing(proto.Enum): + r""" + + Values: + UNDEFINED_JSON_PARSING (0): + A value indicating that the enum field is not + set. + DISABLED (516696700): + No description available. + STANDARD (484642493): + No description available. + """ + UNDEFINED_JSON_PARSING = 0 + DISABLED = 516696700 + STANDARD = 484642493 + + class LogLevel(proto.Enum): + r""" + + Values: + UNDEFINED_LOG_LEVEL (0): + A value indicating that the enum field is not + set. + NORMAL (161067239): + No description available. + VERBOSE (532219234): + No description available. + """ + UNDEFINED_LOG_LEVEL = 0 + NORMAL = 161067239 + VERBOSE = 532219234 + + json_custom_config: 'SecurityPolicyAdvancedOptionsConfigJsonCustomConfig' = proto.Field( + proto.MESSAGE, + number=111570105, + optional=True, + message='SecurityPolicyAdvancedOptionsConfigJsonCustomConfig', + ) + json_parsing: str = proto.Field( + proto.STRING, + number=282493529, + optional=True, + ) + log_level: str = proto.Field( + proto.STRING, + number=140582601, + optional=True, + ) + + +class SecurityPolicyAdvancedOptionsConfigJsonCustomConfig(proto.Message): + r""" + + Attributes: + content_types (MutableSequence[str]): + A list of custom Content-Type header values to apply the + JSON parsing. As per RFC 1341, a Content-Type header value + has the following format: Content-Type := type "/" subtype + \*[";" parameter] When configuring a custom Content-Type + header value, only the type/subtype needs to be specified, + and the parameters should be excluded. + """ + + content_types: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=17428787, + ) + + +class SecurityPolicyDdosProtectionConfig(proto.Message): + r""" + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + ddos_protection (str): + Check the DdosProtection enum for the list of + possible values. + + This field is a member of `oneof`_ ``_ddos_protection``. + """ + class DdosProtection(proto.Enum): + r""" + + Values: + UNDEFINED_DDOS_PROTECTION (0): + A value indicating that the enum field is not + set. + ADVANCED (63789090): + No description available. + STANDARD (484642493): + No description available. + """ + UNDEFINED_DDOS_PROTECTION = 0 + ADVANCED = 63789090 + STANDARD = 484642493 + + ddos_protection: str = proto.Field( + proto.STRING, + number=275173268, + optional=True, + ) + + +class SecurityPolicyList(proto.Message): + r""" + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + id (str): + [Output Only] Unique identifier for the resource; defined by + the server. + + This field is a member of `oneof`_ ``_id``. + items (MutableSequence[google.cloud.compute_v1.types.SecurityPolicy]): + A list of SecurityPolicy resources. + kind (str): + [Output Only] Type of resource. Always + compute#securityPolicyList for listsof securityPolicies + + This field is a member of `oneof`_ ``_kind``. + next_page_token (str): + [Output Only] This token allows you to get the next page of + results for list requests. If the number of results is + larger than maxResults, use the nextPageToken as a value for + the query parameter pageToken in the next list request. + Subsequent list requests will have their own nextPageToken + to continue paging through the results. + + This field is a member of `oneof`_ ``_next_page_token``. + warning (google.cloud.compute_v1.types.Warning): + [Output Only] Informational warning message. + + This field is a member of `oneof`_ ``_warning``. + """ + + @property + def raw_page(self): + return self + + id: str = proto.Field( + proto.STRING, + number=3355, + optional=True, + ) + items: MutableSequence['SecurityPolicy'] = proto.RepeatedField( + proto.MESSAGE, + number=100526016, + message='SecurityPolicy', + ) + kind: str = proto.Field( + proto.STRING, + number=3292052, + optional=True, + ) + next_page_token: str = proto.Field( + proto.STRING, + number=79797525, + optional=True, + ) + warning: 'Warning' = proto.Field( + proto.MESSAGE, + number=50704284, + optional=True, + message='Warning', + ) + + +class SecurityPolicyRecaptchaOptionsConfig(proto.Message): + r""" + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + redirect_site_key (str): + An optional field to supply a reCAPTCHA site key to be used + for all the rules using the redirect action with the type of + GOOGLE_RECAPTCHA under the security policy. The specified + site key needs to be created from the reCAPTCHA API. The + user is responsible for the validity of the specified site + key. If not specified, a Google-managed site key is used. + This field is only supported in Global Security Policies of + type CLOUD_ARMOR. + + This field is a member of `oneof`_ ``_redirect_site_key``. + """ + + redirect_site_key: str = proto.Field( + proto.STRING, + number=447677034, + optional=True, + ) + + +class SecurityPolicyReference(proto.Message): + r""" + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + security_policy (str): + + This field is a member of `oneof`_ ``_security_policy``. + """ + + security_policy: str = proto.Field( + proto.STRING, + number=171082513, + optional=True, + ) + + +class SecurityPolicyRule(proto.Message): + r"""Represents a rule that describes one or more match conditions + along with the action to be taken when traffic matches this + condition (allow or deny). + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + action (str): + The Action to perform when the rule is matched. The + following are the valid actions: - allow: allow access to + target. - deny(STATUS): deny access to target, returns the + HTTP response code specified. Valid values for ``STATUS`` + are 403, 404, and 502. - rate_based_ban: limit client + traffic to the configured threshold and ban the client if + the traffic exceeds the threshold. Configure parameters for + this action in RateLimitOptions. Requires rate_limit_options + to be set. - redirect: redirect to a different target. This + can either be an internal reCAPTCHA redirect, or an external + URL-based redirect via a 302 response. Parameters for this + action can be configured via redirectOptions. This action is + only supported in Global Security Policies of type + CLOUD_ARMOR. - throttle: limit client traffic to the + configured threshold. Configure parameters for this action + in rateLimitOptions. Requires rate_limit_options to be set + for this. + + This field is a member of `oneof`_ ``_action``. + description (str): + An optional description of this resource. + Provide this property when you create the + resource. + + This field is a member of `oneof`_ ``_description``. + header_action (google.cloud.compute_v1.types.SecurityPolicyRuleHttpHeaderAction): + Optional, additional actions that are performed on headers. + This field is only supported in Global Security Policies of + type CLOUD_ARMOR. + + This field is a member of `oneof`_ ``_header_action``. + kind (str): + [Output only] Type of the resource. Always + compute#securityPolicyRule for security policy rules + + This field is a member of `oneof`_ ``_kind``. + match (google.cloud.compute_v1.types.SecurityPolicyRuleMatcher): + A match condition that incoming traffic is + evaluated against. If it evaluates to true, the + corresponding 'action' is enforced. + + This field is a member of `oneof`_ ``_match``. + preconfigured_waf_config (google.cloud.compute_v1.types.SecurityPolicyRulePreconfiguredWafConfig): + Preconfigured WAF configuration to be applied + for the rule. If the rule does not evaluate + preconfigured WAF rules, i.e., if + evaluatePreconfiguredWaf() is not used, this + field will have no effect. + + This field is a member of `oneof`_ ``_preconfigured_waf_config``. + preview (bool): + If set to true, the specified action is not + enforced. + + This field is a member of `oneof`_ ``_preview``. + priority (int): + An integer indicating the priority of a rule + in the list. The priority must be a positive + value between 0 and 2147483647. Rules are + evaluated from highest to lowest priority where + 0 is the highest priority and 2147483647 is the + lowest priority. + + This field is a member of `oneof`_ ``_priority``. + rate_limit_options (google.cloud.compute_v1.types.SecurityPolicyRuleRateLimitOptions): + Must be specified if the action is "rate_based_ban" or + "throttle". Cannot be specified for any other actions. + + This field is a member of `oneof`_ ``_rate_limit_options``. + redirect_options (google.cloud.compute_v1.types.SecurityPolicyRuleRedirectOptions): + Parameters defining the redirect action. Cannot be specified + for any other actions. This field is only supported in + Global Security Policies of type CLOUD_ARMOR. + + This field is a member of `oneof`_ ``_redirect_options``. + """ + + action: str = proto.Field( + proto.STRING, + number=187661878, + optional=True, + ) + description: str = proto.Field( + proto.STRING, + number=422937596, + optional=True, + ) + header_action: 'SecurityPolicyRuleHttpHeaderAction' = proto.Field( + proto.MESSAGE, + number=328077352, + optional=True, + message='SecurityPolicyRuleHttpHeaderAction', + ) + kind: str = proto.Field( + proto.STRING, + number=3292052, + optional=True, + ) + match: 'SecurityPolicyRuleMatcher' = proto.Field( + proto.MESSAGE, + number=103668165, + optional=True, + message='SecurityPolicyRuleMatcher', + ) + preconfigured_waf_config: 'SecurityPolicyRulePreconfiguredWafConfig' = proto.Field( + proto.MESSAGE, + number=117805027, + optional=True, + message='SecurityPolicyRulePreconfiguredWafConfig', + ) + preview: bool = proto.Field( + proto.BOOL, + number=218686408, + optional=True, + ) + priority: int = proto.Field( + proto.INT32, + number=445151652, + optional=True, + ) + rate_limit_options: 'SecurityPolicyRuleRateLimitOptions' = proto.Field( + proto.MESSAGE, + number=67544315, + optional=True, + message='SecurityPolicyRuleRateLimitOptions', + ) + redirect_options: 'SecurityPolicyRuleRedirectOptions' = proto.Field( + proto.MESSAGE, + number=163285307, + optional=True, + message='SecurityPolicyRuleRedirectOptions', + ) + + +class SecurityPolicyRuleHttpHeaderAction(proto.Message): + r""" + + Attributes: + request_headers_to_adds (MutableSequence[google.cloud.compute_v1.types.SecurityPolicyRuleHttpHeaderActionHttpHeaderOption]): + The list of request headers to add or + overwrite if they're already present. + """ + + request_headers_to_adds: MutableSequence['SecurityPolicyRuleHttpHeaderActionHttpHeaderOption'] = proto.RepeatedField( + proto.MESSAGE, + number=87987661, + message='SecurityPolicyRuleHttpHeaderActionHttpHeaderOption', + ) + + +class SecurityPolicyRuleHttpHeaderActionHttpHeaderOption(proto.Message): + r""" + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + header_name (str): + The name of the header to set. + + This field is a member of `oneof`_ ``_header_name``. + header_value (str): + The value to set the named header to. + + This field is a member of `oneof`_ ``_header_value``. + """ + + header_name: str = proto.Field( + proto.STRING, + number=110223613, + optional=True, + ) + header_value: str = proto.Field( + proto.STRING, + number=203094335, + optional=True, + ) + + +class SecurityPolicyRuleMatcher(proto.Message): + r"""Represents a match condition that incoming traffic is + evaluated against. Exactly one field must be specified. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + config (google.cloud.compute_v1.types.SecurityPolicyRuleMatcherConfig): + The configuration options available when specifying + versioned_expr. This field must be specified if + versioned_expr is specified and cannot be specified if + versioned_expr is not specified. + + This field is a member of `oneof`_ ``_config``. + expr (google.cloud.compute_v1.types.Expr): + User defined CEVAL expression. A CEVAL expression is used to + specify match criteria such as origin.ip, source.region_code + and contents in the request header. Expressions containing + ``evaluateThreatIntelligence`` require Cloud Armor Managed + Protection Plus tier and are not supported in Edge Policies + nor in Regional Policies. Expressions containing + ``evaluatePreconfiguredExpr('sourceiplist-*')`` require + Cloud Armor Managed Protection Plus tier and are only + supported in Global Security Policies. + + This field is a member of `oneof`_ ``_expr``. + versioned_expr (str): + Preconfigured versioned expression. If this field is + specified, config must also be specified. Available + preconfigured expressions along with their requirements are: + SRC_IPS_V1 - must specify the corresponding src_ip_range + field in config. Check the VersionedExpr enum for the list + of possible values. + + This field is a member of `oneof`_ ``_versioned_expr``. + """ + class VersionedExpr(proto.Enum): + r"""Preconfigured versioned expression. If this field is specified, + config must also be specified. Available preconfigured expressions + along with their requirements are: SRC_IPS_V1 - must specify the + corresponding src_ip_range field in config. + + Values: + UNDEFINED_VERSIONED_EXPR (0): + A value indicating that the enum field is not + set. + SRC_IPS_V1 (70925961): + Matches the source IP address of a request to + the IP ranges supplied in config. + """ + UNDEFINED_VERSIONED_EXPR = 0 + SRC_IPS_V1 = 70925961 + + config: 'SecurityPolicyRuleMatcherConfig' = proto.Field( + proto.MESSAGE, + number=255820610, + optional=True, + message='SecurityPolicyRuleMatcherConfig', + ) + expr: 'Expr' = proto.Field( + proto.MESSAGE, + number=3127797, + optional=True, + message='Expr', + ) + versioned_expr: str = proto.Field( + proto.STRING, + number=322286013, + optional=True, + ) + + +class SecurityPolicyRuleMatcherConfig(proto.Message): + r""" + + Attributes: + src_ip_ranges (MutableSequence[str]): + CIDR IP address range. Maximum number of src_ip_ranges + allowed is 10. + """ + + src_ip_ranges: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=432128083, + ) + + +class SecurityPolicyRulePreconfiguredWafConfig(proto.Message): + r""" + + Attributes: + exclusions (MutableSequence[google.cloud.compute_v1.types.SecurityPolicyRulePreconfiguredWafConfigExclusion]): + A list of exclusions to apply during + preconfigured WAF evaluation. + """ + + exclusions: MutableSequence['SecurityPolicyRulePreconfiguredWafConfigExclusion'] = proto.RepeatedField( + proto.MESSAGE, + number=208665701, + message='SecurityPolicyRulePreconfiguredWafConfigExclusion', + ) + + +class SecurityPolicyRulePreconfiguredWafConfigExclusion(proto.Message): + r""" + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + request_cookies_to_exclude (MutableSequence[google.cloud.compute_v1.types.SecurityPolicyRulePreconfiguredWafConfigExclusionFieldParams]): + A list of request cookie names whose value + will be excluded from inspection during + preconfigured WAF evaluation. + request_headers_to_exclude (MutableSequence[google.cloud.compute_v1.types.SecurityPolicyRulePreconfiguredWafConfigExclusionFieldParams]): + A list of request header names whose value + will be excluded from inspection during + preconfigured WAF evaluation. + request_query_params_to_exclude (MutableSequence[google.cloud.compute_v1.types.SecurityPolicyRulePreconfiguredWafConfigExclusionFieldParams]): + A list of request query parameter names whose + value will be excluded from inspection during + preconfigured WAF evaluation. Note that the + parameter can be in the query string or in the + POST body. + request_uris_to_exclude (MutableSequence[google.cloud.compute_v1.types.SecurityPolicyRulePreconfiguredWafConfigExclusionFieldParams]): + A list of request URIs from the request line + to be excluded from inspection during + preconfigured WAF evaluation. When specifying + this field, the query or fragment part should be + excluded. + target_rule_ids (MutableSequence[str]): + A list of target rule IDs under the WAF rule + set to apply the preconfigured WAF exclusion. If + omitted, it refers to all the rule IDs under the + WAF rule set. + target_rule_set (str): + Target WAF rule set to apply the + preconfigured WAF exclusion. + + This field is a member of `oneof`_ ``_target_rule_set``. + """ + + request_cookies_to_exclude: MutableSequence['SecurityPolicyRulePreconfiguredWafConfigExclusionFieldParams'] = proto.RepeatedField( + proto.MESSAGE, + number=156757878, + message='SecurityPolicyRulePreconfiguredWafConfigExclusionFieldParams', + ) + request_headers_to_exclude: MutableSequence['SecurityPolicyRulePreconfiguredWafConfigExclusionFieldParams'] = proto.RepeatedField( + proto.MESSAGE, + number=63230495, + message='SecurityPolicyRulePreconfiguredWafConfigExclusionFieldParams', + ) + request_query_params_to_exclude: MutableSequence['SecurityPolicyRulePreconfiguredWafConfigExclusionFieldParams'] = proto.RepeatedField( + proto.MESSAGE, + number=340692744, + message='SecurityPolicyRulePreconfiguredWafConfigExclusionFieldParams', + ) + request_uris_to_exclude: MutableSequence['SecurityPolicyRulePreconfiguredWafConfigExclusionFieldParams'] = proto.RepeatedField( + proto.MESSAGE, + number=90690846, + message='SecurityPolicyRulePreconfiguredWafConfigExclusionFieldParams', + ) + target_rule_ids: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=498430435, + ) + target_rule_set: str = proto.Field( + proto.STRING, + number=498440077, + optional=True, + ) + + +class SecurityPolicyRulePreconfiguredWafConfigExclusionFieldParams(proto.Message): + r""" + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + op (str): + The match operator for the field. + Check the Op enum for the list of possible + values. + + This field is a member of `oneof`_ ``_op``. + val (str): + The value of the field. + + This field is a member of `oneof`_ ``_val``. + """ + class Op(proto.Enum): + r"""The match operator for the field. + + Values: + UNDEFINED_OP (0): + A value indicating that the enum field is not + set. + CONTAINS (215180831): + The operator matches if the field value + contains the specified value. + ENDS_WITH (490402221): + The operator matches if the field value ends + with the specified value. + EQUALS (442201023): + The operator matches if the field value + equals the specified value. + EQUALS_ANY (337226060): + The operator matches if the field value is + any value. + STARTS_WITH (139505652): + The operator matches if the field value + starts with the specified value. + """ + UNDEFINED_OP = 0 + CONTAINS = 215180831 + ENDS_WITH = 490402221 + EQUALS = 442201023 + EQUALS_ANY = 337226060 + STARTS_WITH = 139505652 + + op: str = proto.Field( + proto.STRING, + number=3553, + optional=True, + ) + val: str = proto.Field( + proto.STRING, + number=116513, + optional=True, + ) + + +class SecurityPolicyRuleRateLimitOptions(proto.Message): + r""" + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + ban_duration_sec (int): + Can only be specified if the action for the rule is + "rate_based_ban". If specified, determines the time (in + seconds) the traffic will continue to be banned by the rate + limit after the rate falls below the threshold. + + This field is a member of `oneof`_ ``_ban_duration_sec``. + ban_threshold (google.cloud.compute_v1.types.SecurityPolicyRuleRateLimitOptionsThreshold): + Can only be specified if the action for the rule is + "rate_based_ban". If specified, the key will be banned for + the configured 'ban_duration_sec' when the number of + requests that exceed the 'rate_limit_threshold' also exceed + this 'ban_threshold'. + + This field is a member of `oneof`_ ``_ban_threshold``. + conform_action (str): + Action to take for requests that are under + the configured rate limit threshold. Valid + option is "allow" only. + + This field is a member of `oneof`_ ``_conform_action``. + enforce_on_key (str): + Determines the key to enforce the rate_limit_threshold on. + Possible values are: - ALL: A single rate limit threshold is + applied to all the requests matching this rule. This is the + default value if "enforceOnKey" is not configured. - IP: The + source IP address of the request is the key. Each IP has + this limit enforced separately. - HTTP_HEADER: The value of + the HTTP header whose name is configured under + "enforceOnKeyName". The key value is truncated to the first + 128 bytes of the header value. If no such header is present + in the request, the key type defaults to ALL. - XFF_IP: The + first IP address (i.e. the originating client IP address) + specified in the list of IPs under X-Forwarded-For HTTP + header. If no such header is present or the value is not a + valid IP, the key defaults to the source IP address of the + request i.e. key type IP. - HTTP_COOKIE: The value of the + HTTP cookie whose name is configured under + "enforceOnKeyName". The key value is truncated to the first + 128 bytes of the cookie value. If no such cookie is present + in the request, the key type defaults to ALL. - HTTP_PATH: + The URL path of the HTTP request. The key value is truncated + to the first 128 bytes. - SNI: Server name indication in the + TLS session of the HTTPS request. The key value is truncated + to the first 128 bytes. The key type defaults to ALL on a + HTTP session. - REGION_CODE: The country/region from which + the request originates. Check the EnforceOnKey enum for the + list of possible values. + + This field is a member of `oneof`_ ``_enforce_on_key``. + enforce_on_key_configs (MutableSequence[google.cloud.compute_v1.types.SecurityPolicyRuleRateLimitOptionsEnforceOnKeyConfig]): + If specified, any combination of values of + enforce_on_key_type/enforce_on_key_name is treated as the + key on which ratelimit threshold/action is enforced. You can + specify up to 3 enforce_on_key_configs. If + enforce_on_key_configs is specified, enforce_on_key must not + be specified. + enforce_on_key_name (str): + Rate limit key name applicable only for the following key + types: HTTP_HEADER -- Name of the HTTP header whose value is + taken as the key value. HTTP_COOKIE -- Name of the HTTP + cookie whose value is taken as the key value. + + This field is a member of `oneof`_ ``_enforce_on_key_name``. + exceed_action (str): + Action to take for requests that are above the configured + rate limit threshold, to either deny with a specified HTTP + response code, or redirect to a different endpoint. Valid + options are ``deny(STATUS)``, where valid values for + ``STATUS`` are 403, 404, 429, and 502, and ``redirect``, + where the redirect parameters come from + ``exceedRedirectOptions`` below. The ``redirect`` action is + only supported in Global Security Policies of type + CLOUD_ARMOR. + + This field is a member of `oneof`_ ``_exceed_action``. + exceed_redirect_options (google.cloud.compute_v1.types.SecurityPolicyRuleRedirectOptions): + Parameters defining the redirect action that is used as the + exceed action. Cannot be specified if the exceed action is + not redirect. This field is only supported in Global + Security Policies of type CLOUD_ARMOR. + + This field is a member of `oneof`_ ``_exceed_redirect_options``. + rate_limit_threshold (google.cloud.compute_v1.types.SecurityPolicyRuleRateLimitOptionsThreshold): + Threshold at which to begin ratelimiting. + + This field is a member of `oneof`_ ``_rate_limit_threshold``. + """ + class EnforceOnKey(proto.Enum): + r"""Determines the key to enforce the rate_limit_threshold on. Possible + values are: - ALL: A single rate limit threshold is applied to all + the requests matching this rule. This is the default value if + "enforceOnKey" is not configured. - IP: The source IP address of the + request is the key. Each IP has this limit enforced separately. - + HTTP_HEADER: The value of the HTTP header whose name is configured + under "enforceOnKeyName". The key value is truncated to the first + 128 bytes of the header value. If no such header is present in the + request, the key type defaults to ALL. - XFF_IP: The first IP + address (i.e. the originating client IP address) specified in the + list of IPs under X-Forwarded-For HTTP header. If no such header is + present or the value is not a valid IP, the key defaults to the + source IP address of the request i.e. key type IP. - HTTP_COOKIE: + The value of the HTTP cookie whose name is configured under + "enforceOnKeyName". The key value is truncated to the first 128 + bytes of the cookie value. If no such cookie is present in the + request, the key type defaults to ALL. - HTTP_PATH: The URL path of + the HTTP request. The key value is truncated to the first 128 bytes. + - SNI: Server name indication in the TLS session of the HTTPS + request. The key value is truncated to the first 128 bytes. The key + type defaults to ALL on a HTTP session. - REGION_CODE: The + country/region from which the request originates. + + Values: + UNDEFINED_ENFORCE_ON_KEY (0): + A value indicating that the enum field is not + set. + ALL (64897): + No description available. + HTTP_COOKIE (494981627): + No description available. + HTTP_HEADER (91597348): + No description available. + HTTP_PATH (311503228): + No description available. + IP (2343): + No description available. + REGION_CODE (79559768): + No description available. + SNI (82254): + No description available. + XFF_IP (438707118): + No description available. + """ + UNDEFINED_ENFORCE_ON_KEY = 0 + ALL = 64897 + HTTP_COOKIE = 494981627 + HTTP_HEADER = 91597348 + HTTP_PATH = 311503228 + IP = 2343 + REGION_CODE = 79559768 + SNI = 82254 + XFF_IP = 438707118 + + ban_duration_sec: int = proto.Field( + proto.INT32, + number=42896726, + optional=True, + ) + ban_threshold: 'SecurityPolicyRuleRateLimitOptionsThreshold' = proto.Field( + proto.MESSAGE, + number=501208123, + optional=True, + message='SecurityPolicyRuleRateLimitOptionsThreshold', + ) + conform_action: str = proto.Field( + proto.STRING, + number=517612367, + optional=True, + ) + enforce_on_key: str = proto.Field( + proto.STRING, + number=416648956, + optional=True, + ) + enforce_on_key_configs: MutableSequence['SecurityPolicyRuleRateLimitOptionsEnforceOnKeyConfig'] = proto.RepeatedField( + proto.MESSAGE, + number=33906478, + message='SecurityPolicyRuleRateLimitOptionsEnforceOnKeyConfig', + ) + enforce_on_key_name: str = proto.Field( + proto.STRING, + number=132555246, + optional=True, + ) + exceed_action: str = proto.Field( + proto.STRING, + number=167159073, + optional=True, + ) + exceed_redirect_options: 'SecurityPolicyRuleRedirectOptions' = proto.Field( + proto.MESSAGE, + number=473646694, + optional=True, + message='SecurityPolicyRuleRedirectOptions', + ) + rate_limit_threshold: 'SecurityPolicyRuleRateLimitOptionsThreshold' = proto.Field( + proto.MESSAGE, + number=315875208, + optional=True, + message='SecurityPolicyRuleRateLimitOptionsThreshold', + ) + + +class SecurityPolicyRuleRateLimitOptionsEnforceOnKeyConfig(proto.Message): + r""" + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + enforce_on_key_name (str): + Rate limit key name applicable only for the following key + types: HTTP_HEADER -- Name of the HTTP header whose value is + taken as the key value. HTTP_COOKIE -- Name of the HTTP + cookie whose value is taken as the key value. + + This field is a member of `oneof`_ ``_enforce_on_key_name``. + enforce_on_key_type (str): + Determines the key to enforce the rate_limit_threshold on. + Possible values are: - ALL: A single rate limit threshold is + applied to all the requests matching this rule. This is the + default value if "enforceOnKeyConfigs" is not configured. - + IP: The source IP address of the request is the key. Each IP + has this limit enforced separately. - HTTP_HEADER: The value + of the HTTP header whose name is configured under + "enforceOnKeyName". The key value is truncated to the first + 128 bytes of the header value. If no such header is present + in the request, the key type defaults to ALL. - XFF_IP: The + first IP address (i.e. the originating client IP address) + specified in the list of IPs under X-Forwarded-For HTTP + header. If no such header is present or the value is not a + valid IP, the key defaults to the source IP address of the + request i.e. key type IP. - HTTP_COOKIE: The value of the + HTTP cookie whose name is configured under + "enforceOnKeyName". The key value is truncated to the first + 128 bytes of the cookie value. If no such cookie is present + in the request, the key type defaults to ALL. - HTTP_PATH: + The URL path of the HTTP request. The key value is truncated + to the first 128 bytes. - SNI: Server name indication in the + TLS session of the HTTPS request. The key value is truncated + to the first 128 bytes. The key type defaults to ALL on a + HTTP session. - REGION_CODE: The country/region from which + the request originates. Check the EnforceOnKeyType enum for + the list of possible values. + + This field is a member of `oneof`_ ``_enforce_on_key_type``. + """ + class EnforceOnKeyType(proto.Enum): + r"""Determines the key to enforce the rate_limit_threshold on. Possible + values are: - ALL: A single rate limit threshold is applied to all + the requests matching this rule. This is the default value if + "enforceOnKeyConfigs" is not configured. - IP: The source IP address + of the request is the key. Each IP has this limit enforced + separately. - HTTP_HEADER: The value of the HTTP header whose name + is configured under "enforceOnKeyName". The key value is truncated + to the first 128 bytes of the header value. If no such header is + present in the request, the key type defaults to ALL. - XFF_IP: The + first IP address (i.e. the originating client IP address) specified + in the list of IPs under X-Forwarded-For HTTP header. If no such + header is present or the value is not a valid IP, the key defaults + to the source IP address of the request i.e. key type IP. - + HTTP_COOKIE: The value of the HTTP cookie whose name is configured + under "enforceOnKeyName". The key value is truncated to the first + 128 bytes of the cookie value. If no such cookie is present in the + request, the key type defaults to ALL. - HTTP_PATH: The URL path of + the HTTP request. The key value is truncated to the first 128 bytes. + - SNI: Server name indication in the TLS session of the HTTPS + request. The key value is truncated to the first 128 bytes. The key + type defaults to ALL on a HTTP session. - REGION_CODE: The + country/region from which the request originates. + + Values: + UNDEFINED_ENFORCE_ON_KEY_TYPE (0): + A value indicating that the enum field is not + set. + ALL (64897): + No description available. + HTTP_COOKIE (494981627): + No description available. + HTTP_HEADER (91597348): + No description available. + HTTP_PATH (311503228): + No description available. + IP (2343): + No description available. + REGION_CODE (79559768): + No description available. + SNI (82254): + No description available. + XFF_IP (438707118): + No description available. + """ + UNDEFINED_ENFORCE_ON_KEY_TYPE = 0 + ALL = 64897 + HTTP_COOKIE = 494981627 + HTTP_HEADER = 91597348 + HTTP_PATH = 311503228 + IP = 2343 + REGION_CODE = 79559768 + SNI = 82254 + XFF_IP = 438707118 + + enforce_on_key_name: str = proto.Field( + proto.STRING, + number=132555246, + optional=True, + ) + enforce_on_key_type: str = proto.Field( + proto.STRING, + number=132757149, + optional=True, + ) + + +class SecurityPolicyRuleRateLimitOptionsThreshold(proto.Message): + r""" + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + count (int): + Number of HTTP(S) requests for calculating + the threshold. + + This field is a member of `oneof`_ ``_count``. + interval_sec (int): + Interval over which the threshold is + computed. + + This field is a member of `oneof`_ ``_interval_sec``. + """ + + count: int = proto.Field( + proto.INT32, + number=94851343, + optional=True, + ) + interval_sec: int = proto.Field( + proto.INT32, + number=41084375, + optional=True, + ) + + +class SecurityPolicyRuleRedirectOptions(proto.Message): + r""" + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + target (str): + Target for the redirect action. This is required if the type + is EXTERNAL_302 and cannot be specified for + GOOGLE_RECAPTCHA. + + This field is a member of `oneof`_ ``_target``. + type_ (str): + Type of the redirect action. + Check the Type enum for the list of possible + values. + + This field is a member of `oneof`_ ``_type``. + """ + class Type(proto.Enum): + r"""Type of the redirect action. + + Values: + UNDEFINED_TYPE (0): + A value indicating that the enum field is not + set. + EXTERNAL_302 (395733761): + No description available. + GOOGLE_RECAPTCHA (518803009): + No description available. + """ + UNDEFINED_TYPE = 0 + EXTERNAL_302 = 395733761 + GOOGLE_RECAPTCHA = 518803009 + + target: str = proto.Field( + proto.STRING, + number=192835985, + optional=True, + ) + type_: str = proto.Field( + proto.STRING, + number=3575610, + optional=True, + ) + + +class SecuritySettings(proto.Message): + r"""The authentication and authorization settings for a + BackendService. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + client_tls_policy (str): + Optional. A URL referring to a + networksecurity.ClientTlsPolicy resource that describes how + clients should authenticate with this service's backends. + clientTlsPolicy only applies to a global BackendService with + the loadBalancingScheme set to INTERNAL_SELF_MANAGED. If + left blank, communications are not encrypted. + + This field is a member of `oneof`_ ``_client_tls_policy``. + subject_alt_names (MutableSequence[str]): + Optional. A list of Subject Alternative Names (SANs) that + the client verifies during a mutual TLS handshake with an + server/endpoint for this BackendService. When the server + presents its X.509 certificate to the client, the client + inspects the certificate's subjectAltName field. If the + field contains one of the specified values, the + communication continues. Otherwise, it fails. This + additional check enables the client to verify that the + server is authorized to run the requested service. Note that + the contents of the server certificate's subjectAltName + field are configured by the Public Key Infrastructure which + provisions server identities. Only applies to a global + BackendService with loadBalancingScheme set to + INTERNAL_SELF_MANAGED. Only applies when BackendService has + an attached clientTlsPolicy with clientCertificate (mTLS + mode). + """ + + client_tls_policy: str = proto.Field( + proto.STRING, + number=462325226, + optional=True, + ) + subject_alt_names: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=330029535, + ) + + +class SendDiagnosticInterruptInstanceRequest(proto.Message): + r"""A request message for Instances.SendDiagnosticInterrupt. See + the method description for details. + + Attributes: + instance (str): + Name of the instance scoping this request. + project (str): + Project ID for this request. + zone (str): + The name of the zone for this request. + """ + + instance: str = proto.Field( + proto.STRING, + number=18257045, + ) + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + zone: str = proto.Field( + proto.STRING, + number=3744684, + ) + + +class SendDiagnosticInterruptInstanceResponse(proto.Message): + r"""A response message for Instances.SendDiagnosticInterrupt. See + the method description for details. + + """ + + +class SerialPortOutput(proto.Message): + r"""An instance serial console output. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + contents (str): + [Output Only] The contents of the console output. + + This field is a member of `oneof`_ ``_contents``. + kind (str): + [Output Only] Type of the resource. Always + compute#serialPortOutput for serial port output. + + This field is a member of `oneof`_ ``_kind``. + next_ (int): + [Output Only] The position of the next byte of content, + regardless of whether the content exists, following the + output returned in the ``contents`` property. Use this value + in the next request as the start parameter. + + This field is a member of `oneof`_ ``_next``. + self_link (str): + [Output Only] Server-defined URL for this resource. + + This field is a member of `oneof`_ ``_self_link``. + start (int): + The starting byte position of the output that was returned. + This should match the start parameter sent with the request. + If the serial console output exceeds the size of the buffer + (1 MB), older output is overwritten by newer content. The + output start value will indicate the byte position of the + output that was returned, which might be different than the + ``start`` value that was specified in the request. + + This field is a member of `oneof`_ ``_start``. + """ + + contents: str = proto.Field( + proto.STRING, + number=506419994, + optional=True, + ) + kind: str = proto.Field( + proto.STRING, + number=3292052, + optional=True, + ) + next_: int = proto.Field( + proto.INT64, + number=3377907, + optional=True, + ) + self_link: str = proto.Field( + proto.STRING, + number=456214797, + optional=True, + ) + start: int = proto.Field( + proto.INT64, + number=109757538, + optional=True, + ) + + +class ServerBinding(proto.Message): + r""" + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + type_ (str): + Check the Type enum for the list of possible + values. + + This field is a member of `oneof`_ ``_type``. + """ + class Type(proto.Enum): + r""" + + Values: + UNDEFINED_TYPE (0): + A value indicating that the enum field is not + set. + RESTART_NODE_ON_ANY_SERVER (502950985): + Node may associate with any physical server + over its lifetime. + RESTART_NODE_ON_MINIMAL_SERVERS (204166495): + Node may associate with minimal physical + servers over its lifetime. + SERVER_BINDING_TYPE_UNSPECIFIED (180825512): + No description available. + """ + UNDEFINED_TYPE = 0 + RESTART_NODE_ON_ANY_SERVER = 502950985 + RESTART_NODE_ON_MINIMAL_SERVERS = 204166495 + SERVER_BINDING_TYPE_UNSPECIFIED = 180825512 + + type_: str = proto.Field( + proto.STRING, + number=3575610, + optional=True, + ) + + +class ServiceAccount(proto.Message): + r"""A service account. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + email (str): + Email address of the service account. + + This field is a member of `oneof`_ ``_email``. + scopes (MutableSequence[str]): + The list of scopes to be made available for + this service account. + """ + + email: str = proto.Field( + proto.STRING, + number=96619420, + optional=True, + ) + scopes: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=165973151, + ) + + +class ServiceAttachment(proto.Message): + r"""Represents a ServiceAttachment resource. A service attachment + represents a service that a producer has exposed. It + encapsulates the load balancer which fronts the service runs and + a list of NAT IP ranges that the producers uses to represent the + consumers connecting to the service. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + connected_endpoints (MutableSequence[google.cloud.compute_v1.types.ServiceAttachmentConnectedEndpoint]): + [Output Only] An array of connections for all the consumers + connected to this service attachment. + connection_preference (str): + The connection preference of service attachment. The value + can be set to ACCEPT_AUTOMATIC. An ACCEPT_AUTOMATIC service + attachment is one that always accepts the connection from + consumer forwarding rules. Check the ConnectionPreference + enum for the list of possible values. + + This field is a member of `oneof`_ ``_connection_preference``. + consumer_accept_lists (MutableSequence[google.cloud.compute_v1.types.ServiceAttachmentConsumerProjectLimit]): + Projects that are allowed to connect to this + service attachment. + consumer_reject_lists (MutableSequence[str]): + Projects that are not allowed to connect to + this service attachment. The project can be + specified using its id or number. + creation_timestamp (str): + [Output Only] Creation timestamp in RFC3339 text format. + + This field is a member of `oneof`_ ``_creation_timestamp``. + description (str): + An optional description of this resource. + Provide this property when you create the + resource. + + This field is a member of `oneof`_ ``_description``. + domain_names (MutableSequence[str]): + If specified, the domain name will be used + during the integration between the PSC connected + endpoints and the Cloud DNS. For example, this + is a valid domain name: "p.mycompany.com.". + Current max number of domain names supported is + 1. + enable_proxy_protocol (bool): + If true, enable the proxy protocol which is + for supplying client TCP/IP address data in TCP + connections that traverse proxies on their way + to destination servers. + + This field is a member of `oneof`_ ``_enable_proxy_protocol``. + fingerprint (str): + Fingerprint of this resource. A hash of the + contents stored in this object. This field is + used in optimistic locking. This field will be + ignored when inserting a ServiceAttachment. An + up-to-date fingerprint must be provided in order + to patch/update the ServiceAttachment; + otherwise, the request will fail with error 412 + conditionNotMet. To see the latest fingerprint, + make a get() request to retrieve the + ServiceAttachment. + + This field is a member of `oneof`_ ``_fingerprint``. + id (int): + [Output Only] The unique identifier for the resource type. + The server generates this identifier. + + This field is a member of `oneof`_ ``_id``. + kind (str): + [Output Only] Type of the resource. Always + compute#serviceAttachment for service attachments. + + This field is a member of `oneof`_ ``_kind``. + name (str): + Name of the resource. Provided by the client when the + resource is created. The name must be 1-63 characters long, + and comply with RFC1035. Specifically, the name must be 1-63 + characters long and match the regular expression + ``[a-z]([-a-z0-9]*[a-z0-9])?`` which means the first + character must be a lowercase letter, and all following + characters must be a dash, lowercase letter, or digit, + except the last character, which cannot be a dash. + + This field is a member of `oneof`_ ``_name``. + nat_subnets (MutableSequence[str]): + An array of URLs where each entry is the URL + of a subnet provided by the service producer to + use for NAT in this service attachment. + producer_forwarding_rule (str): + The URL of a forwarding rule with loadBalancingScheme + INTERNAL\* that is serving the endpoint identified by this + service attachment. + + This field is a member of `oneof`_ ``_producer_forwarding_rule``. + psc_service_attachment_id (google.cloud.compute_v1.types.Uint128): + [Output Only] An 128-bit global unique ID of the PSC service + attachment. + + This field is a member of `oneof`_ ``_psc_service_attachment_id``. + reconcile_connections (bool): + This flag determines whether a consumer + accept/reject list change can reconcile the + statuses of existing ACCEPTED or REJECTED PSC + endpoints. - If false, connection policy update + will only affect existing PENDING PSC endpoints. + Existing ACCEPTED/REJECTED endpoints will remain + untouched regardless how the connection policy + is modified . - If true, update will affect both + PENDING and ACCEPTED/REJECTED PSC endpoints. For + example, an ACCEPTED PSC endpoint will be moved + to REJECTED if its project is added to the + reject list. For newly created service + attachment, this boolean defaults to true. + + This field is a member of `oneof`_ ``_reconcile_connections``. + region (str): + [Output Only] URL of the region where the service attachment + resides. This field applies only to the region resource. You + must specify this field as part of the HTTP request URL. It + is not settable as a field in the request body. + + This field is a member of `oneof`_ ``_region``. + self_link (str): + [Output Only] Server-defined URL for the resource. + + This field is a member of `oneof`_ ``_self_link``. + target_service (str): + The URL of a service serving the endpoint + identified by this service attachment. + + This field is a member of `oneof`_ ``_target_service``. + """ + class ConnectionPreference(proto.Enum): + r"""The connection preference of service attachment. The value can be + set to ACCEPT_AUTOMATIC. An ACCEPT_AUTOMATIC service attachment is + one that always accepts the connection from consumer forwarding + rules. + + Values: + UNDEFINED_CONNECTION_PREFERENCE (0): + A value indicating that the enum field is not + set. + ACCEPT_AUTOMATIC (75250580): + No description available. + ACCEPT_MANUAL (373061341): + No description available. + CONNECTION_PREFERENCE_UNSPECIFIED (34590772): + No description available. + """ + UNDEFINED_CONNECTION_PREFERENCE = 0 + ACCEPT_AUTOMATIC = 75250580 + ACCEPT_MANUAL = 373061341 + CONNECTION_PREFERENCE_UNSPECIFIED = 34590772 + + connected_endpoints: MutableSequence['ServiceAttachmentConnectedEndpoint'] = proto.RepeatedField( + proto.MESSAGE, + number=72223688, + message='ServiceAttachmentConnectedEndpoint', + ) + connection_preference: str = proto.Field( + proto.STRING, + number=285818076, + optional=True, + ) + consumer_accept_lists: MutableSequence['ServiceAttachmentConsumerProjectLimit'] = proto.RepeatedField( + proto.MESSAGE, + number=402725703, + message='ServiceAttachmentConsumerProjectLimit', + ) + consumer_reject_lists: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=204033182, + ) + creation_timestamp: str = proto.Field( + proto.STRING, + number=30525366, + optional=True, + ) + description: str = proto.Field( + proto.STRING, + number=422937596, + optional=True, + ) + domain_names: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=6450189, + ) + enable_proxy_protocol: bool = proto.Field( + proto.BOOL, + number=363791237, + optional=True, + ) + fingerprint: str = proto.Field( + proto.STRING, + number=234678500, + optional=True, + ) + id: int = proto.Field( + proto.UINT64, + number=3355, + optional=True, + ) + kind: str = proto.Field( + proto.STRING, + number=3292052, + optional=True, + ) + name: str = proto.Field( + proto.STRING, + number=3373707, + optional=True, + ) + nat_subnets: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=374785944, + ) + producer_forwarding_rule: str = proto.Field( + proto.STRING, + number=247927889, + optional=True, + ) + psc_service_attachment_id: 'Uint128' = proto.Field( + proto.MESSAGE, + number=527695214, + optional=True, + message='Uint128', + ) + reconcile_connections: bool = proto.Field( + proto.BOOL, + number=125493732, + optional=True, + ) + region: str = proto.Field( + proto.STRING, + number=138946292, + optional=True, + ) + self_link: str = proto.Field( + proto.STRING, + number=456214797, + optional=True, + ) + target_service: str = proto.Field( + proto.STRING, + number=1293831, + optional=True, + ) + + +class ServiceAttachmentAggregatedList(proto.Message): + r"""Contains a list of ServiceAttachmentsScopedList. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + id (str): + [Output Only] Unique identifier for the resource; defined by + the server. + + This field is a member of `oneof`_ ``_id``. + items (MutableMapping[str, google.cloud.compute_v1.types.ServiceAttachmentsScopedList]): + A list of ServiceAttachmentsScopedList + resources. + kind (str): + Type of resource. + + This field is a member of `oneof`_ ``_kind``. + next_page_token (str): + [Output Only] This token allows you to get the next page of + results for list requests. If the number of results is + larger than maxResults, use the nextPageToken as a value for + the query parameter pageToken in the next list request. + Subsequent list requests will have their own nextPageToken + to continue paging through the results. + + This field is a member of `oneof`_ ``_next_page_token``. + self_link (str): + [Output Only] Server-defined URL for this resource. + + This field is a member of `oneof`_ ``_self_link``. + unreachables (MutableSequence[str]): + [Output Only] Unreachable resources. + warning (google.cloud.compute_v1.types.Warning): + [Output Only] Informational warning message. + + This field is a member of `oneof`_ ``_warning``. + """ + + @property + def raw_page(self): + return self + + id: str = proto.Field( + proto.STRING, + number=3355, + optional=True, + ) + items: MutableMapping[str, 'ServiceAttachmentsScopedList'] = proto.MapField( + proto.STRING, + proto.MESSAGE, + number=100526016, + message='ServiceAttachmentsScopedList', + ) + kind: str = proto.Field( + proto.STRING, + number=3292052, + optional=True, + ) + next_page_token: str = proto.Field( + proto.STRING, + number=79797525, + optional=True, + ) + self_link: str = proto.Field( + proto.STRING, + number=456214797, + optional=True, + ) + unreachables: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=243372063, + ) + warning: 'Warning' = proto.Field( + proto.MESSAGE, + number=50704284, + optional=True, + message='Warning', + ) + + +class ServiceAttachmentConnectedEndpoint(proto.Message): + r"""[Output Only] A connection connected to this service attachment. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + consumer_network (str): + The url of the consumer network. + + This field is a member of `oneof`_ ``_consumer_network``. + endpoint (str): + The url of a connected endpoint. + + This field is a member of `oneof`_ ``_endpoint``. + psc_connection_id (int): + The PSC connection id of the connected + endpoint. + + This field is a member of `oneof`_ ``_psc_connection_id``. + status (str): + The status of a connected endpoint to this + service attachment. Check the Status enum for + the list of possible values. + + This field is a member of `oneof`_ ``_status``. + """ + class Status(proto.Enum): + r"""The status of a connected endpoint to this service + attachment. + + Values: + UNDEFINED_STATUS (0): + A value indicating that the enum field is not + set. + ACCEPTED (246714279): + The connection has been accepted by the + producer. + CLOSED (380163436): + The connection has been closed by the + producer. + NEEDS_ATTENTION (344491452): + The connection has been accepted by the + producer, but the producer needs to take further + action before the forwarding rule can serve + traffic. + PENDING (35394935): + The connection is pending acceptance by the + producer. + REJECTED (174130302): + The consumer is still connected but not using + the connection. + STATUS_UNSPECIFIED (42133066): + No description available. + """ + UNDEFINED_STATUS = 0 + ACCEPTED = 246714279 + CLOSED = 380163436 + NEEDS_ATTENTION = 344491452 + PENDING = 35394935 + REJECTED = 174130302 + STATUS_UNSPECIFIED = 42133066 + + consumer_network: str = proto.Field( + proto.STRING, + number=254357221, + optional=True, + ) + endpoint: str = proto.Field( + proto.STRING, + number=130489749, + optional=True, + ) + psc_connection_id: int = proto.Field( + proto.UINT64, + number=292082397, + optional=True, + ) + status: str = proto.Field( + proto.STRING, + number=181260274, + optional=True, + ) + + +class ServiceAttachmentConsumerProjectLimit(proto.Message): + r""" + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + connection_limit (int): + The value of the limit to set. + + This field is a member of `oneof`_ ``_connection_limit``. + network_url (str): + The network URL for the network to set the + limit for. + + This field is a member of `oneof`_ ``_network_url``. + project_id_or_num (str): + The project id or number for the project to + set the limit for. + + This field is a member of `oneof`_ ``_project_id_or_num``. + """ + + connection_limit: int = proto.Field( + proto.UINT32, + number=131403546, + optional=True, + ) + network_url: str = proto.Field( + proto.STRING, + number=207194078, + optional=True, + ) + project_id_or_num: str = proto.Field( + proto.STRING, + number=349783336, + optional=True, + ) + + +class ServiceAttachmentList(proto.Message): + r""" + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + id (str): + [Output Only] Unique identifier for the resource; defined by + the server. + + This field is a member of `oneof`_ ``_id``. + items (MutableSequence[google.cloud.compute_v1.types.ServiceAttachment]): + A list of ServiceAttachment resources. + kind (str): + [Output Only] Type of the resource. Always + compute#serviceAttachment for service attachments. + + This field is a member of `oneof`_ ``_kind``. + next_page_token (str): + [Output Only] This token allows you to get the next page of + results for list requests. If the number of results is + larger than maxResults, use the nextPageToken as a value for + the query parameter pageToken in the next list request. + Subsequent list requests will have their own nextPageToken + to continue paging through the results. + + This field is a member of `oneof`_ ``_next_page_token``. + self_link (str): + [Output Only] Server-defined URL for this resource. + + This field is a member of `oneof`_ ``_self_link``. + warning (google.cloud.compute_v1.types.Warning): + [Output Only] Informational warning message. + + This field is a member of `oneof`_ ``_warning``. + """ + + @property + def raw_page(self): + return self + + id: str = proto.Field( + proto.STRING, + number=3355, + optional=True, + ) + items: MutableSequence['ServiceAttachment'] = proto.RepeatedField( + proto.MESSAGE, + number=100526016, + message='ServiceAttachment', + ) + kind: str = proto.Field( + proto.STRING, + number=3292052, + optional=True, + ) + next_page_token: str = proto.Field( + proto.STRING, + number=79797525, + optional=True, + ) + self_link: str = proto.Field( + proto.STRING, + number=456214797, + optional=True, + ) + warning: 'Warning' = proto.Field( + proto.MESSAGE, + number=50704284, + optional=True, + message='Warning', + ) + + +class ServiceAttachmentsScopedList(proto.Message): + r""" + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + service_attachments (MutableSequence[google.cloud.compute_v1.types.ServiceAttachment]): + A list of ServiceAttachments contained in + this scope. + warning (google.cloud.compute_v1.types.Warning): + Informational warning which replaces the list + of service attachments when the list is empty. + + This field is a member of `oneof`_ ``_warning``. + """ + + service_attachments: MutableSequence['ServiceAttachment'] = proto.RepeatedField( + proto.MESSAGE, + number=307136806, + message='ServiceAttachment', + ) + warning: 'Warning' = proto.Field( + proto.MESSAGE, + number=50704284, + optional=True, + message='Warning', + ) + + +class SetBackendServiceTargetSslProxyRequest(proto.Message): + r"""A request message for TargetSslProxies.SetBackendService. See + the method description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + project (str): + Project ID for this request. + request_id (str): + An optional request ID to identify requests. + Specify a unique request ID so that if you must + retry your request, the server will know to + ignore the request if it has already been + completed. For example, consider a situation + where you make an initial request and the + request times out. If you make the request again + with the same request ID, the server can check + if original operation with the same request ID + was received, and if so, will ignore the second + request. This prevents clients from accidentally + creating duplicate commitments. The request ID + must be a valid UUID with the exception that + zero UUID is not supported ( + 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. + target_ssl_proxies_set_backend_service_request_resource (google.cloud.compute_v1.types.TargetSslProxiesSetBackendServiceRequest): + The body resource for this request + target_ssl_proxy (str): + Name of the TargetSslProxy resource whose + BackendService resource is to be set. + """ + + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + request_id: str = proto.Field( + proto.STRING, + number=37109963, + optional=True, + ) + target_ssl_proxies_set_backend_service_request_resource: 'TargetSslProxiesSetBackendServiceRequest' = proto.Field( + proto.MESSAGE, + number=139080868, + message='TargetSslProxiesSetBackendServiceRequest', + ) + target_ssl_proxy: str = proto.Field( + proto.STRING, + number=338795853, + ) + + +class SetBackendServiceTargetTcpProxyRequest(proto.Message): + r"""A request message for TargetTcpProxies.SetBackendService. See + the method description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + project (str): + Project ID for this request. + request_id (str): + An optional request ID to identify requests. + Specify a unique request ID so that if you must + retry your request, the server will know to + ignore the request if it has already been + completed. For example, consider a situation + where you make an initial request and the + request times out. If you make the request again + with the same request ID, the server can check + if original operation with the same request ID + was received, and if so, will ignore the second + request. This prevents clients from accidentally + creating duplicate commitments. The request ID + must be a valid UUID with the exception that + zero UUID is not supported ( + 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. + target_tcp_proxies_set_backend_service_request_resource (google.cloud.compute_v1.types.TargetTcpProxiesSetBackendServiceRequest): + The body resource for this request + target_tcp_proxy (str): + Name of the TargetTcpProxy resource whose + BackendService resource is to be set. + """ + + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + request_id: str = proto.Field( + proto.STRING, + number=37109963, + optional=True, + ) + target_tcp_proxies_set_backend_service_request_resource: 'TargetTcpProxiesSetBackendServiceRequest' = proto.Field( + proto.MESSAGE, + number=273721583, + message='TargetTcpProxiesSetBackendServiceRequest', + ) + target_tcp_proxy: str = proto.Field( + proto.STRING, + number=503065442, + ) + + +class SetBackupTargetPoolRequest(proto.Message): + r"""A request message for TargetPools.SetBackup. See the method + description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + failover_ratio (float): + New failoverRatio value for the target pool. + + This field is a member of `oneof`_ ``_failover_ratio``. + project (str): + Project ID for this request. + region (str): + Name of the region scoping this request. + request_id (str): + An optional request ID to identify requests. + Specify a unique request ID so that if you must + retry your request, the server will know to + ignore the request if it has already been + completed. For example, consider a situation + where you make an initial request and the + request times out. If you make the request again + with the same request ID, the server can check + if original operation with the same request ID + was received, and if so, will ignore the second + request. This prevents clients from accidentally + creating duplicate commitments. The request ID + must be a valid UUID with the exception that + zero UUID is not supported ( + 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. + target_pool (str): + Name of the TargetPool resource to set a + backup pool for. + target_reference_resource (google.cloud.compute_v1.types.TargetReference): + The body resource for this request + """ + + failover_ratio: float = proto.Field( + proto.FLOAT, + number=212667006, + optional=True, + ) + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + region: str = proto.Field( + proto.STRING, + number=138946292, + ) + request_id: str = proto.Field( + proto.STRING, + number=37109963, + optional=True, + ) + target_pool: str = proto.Field( + proto.STRING, + number=62796298, + ) + target_reference_resource: 'TargetReference' = proto.Field( + proto.MESSAGE, + number=523721712, + message='TargetReference', + ) + + +class SetCertificateMapTargetHttpsProxyRequest(proto.Message): + r"""A request message for TargetHttpsProxies.SetCertificateMap. + See the method description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + project (str): + Project ID for this request. + request_id (str): + An optional request ID to identify requests. + Specify a unique request ID so that if you must + retry your request, the server will know to + ignore the request if it has already been + completed. For example, consider a situation + where you make an initial request and the + request times out. If you make the request again + with the same request ID, the server can check + if original operation with the same request ID + was received, and if so, will ignore the second + request. This prevents clients from accidentally + creating duplicate commitments. The request ID + must be a valid UUID with the exception that + zero UUID is not supported ( + 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. + target_https_proxies_set_certificate_map_request_resource (google.cloud.compute_v1.types.TargetHttpsProxiesSetCertificateMapRequest): + The body resource for this request + target_https_proxy (str): + Name of the TargetHttpsProxy resource whose + CertificateMap is to be set. The name must be + 1-63 characters long, and comply with RFC1035. + """ + + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + request_id: str = proto.Field( + proto.STRING, + number=37109963, + optional=True, + ) + target_https_proxies_set_certificate_map_request_resource: 'TargetHttpsProxiesSetCertificateMapRequest' = proto.Field( + proto.MESSAGE, + number=467639099, + message='TargetHttpsProxiesSetCertificateMapRequest', + ) + target_https_proxy: str = proto.Field( + proto.STRING, + number=52336748, + ) + + +class SetCertificateMapTargetSslProxyRequest(proto.Message): + r"""A request message for TargetSslProxies.SetCertificateMap. See + the method description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + project (str): + Project ID for this request. + request_id (str): + An optional request ID to identify requests. + Specify a unique request ID so that if you must + retry your request, the server will know to + ignore the request if it has already been + completed. For example, consider a situation + where you make an initial request and the + request times out. If you make the request again + with the same request ID, the server can check + if original operation with the same request ID + was received, and if so, will ignore the second + request. This prevents clients from accidentally + creating duplicate commitments. The request ID + must be a valid UUID with the exception that + zero UUID is not supported ( + 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. + target_ssl_proxies_set_certificate_map_request_resource (google.cloud.compute_v1.types.TargetSslProxiesSetCertificateMapRequest): + The body resource for this request + target_ssl_proxy (str): + Name of the TargetSslProxy resource whose + CertificateMap is to be set. The name must be + 1-63 characters long, and comply with RFC1035. + """ + + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + request_id: str = proto.Field( + proto.STRING, + number=37109963, + optional=True, + ) + target_ssl_proxies_set_certificate_map_request_resource: 'TargetSslProxiesSetCertificateMapRequest' = proto.Field( + proto.MESSAGE, + number=343984954, + message='TargetSslProxiesSetCertificateMapRequest', + ) + target_ssl_proxy: str = proto.Field( + proto.STRING, + number=338795853, + ) + + +class SetCommonInstanceMetadataProjectRequest(proto.Message): + r"""A request message for Projects.SetCommonInstanceMetadata. See + the method description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + metadata_resource (google.cloud.compute_v1.types.Metadata): + The body resource for this request + project (str): + Project ID for this request. + request_id (str): + An optional request ID to identify requests. + Specify a unique request ID so that if you must + retry your request, the server will know to + ignore the request if it has already been + completed. For example, consider a situation + where you make an initial request and the + request times out. If you make the request again + with the same request ID, the server can check + if original operation with the same request ID + was received, and if so, will ignore the second + request. This prevents clients from accidentally + creating duplicate commitments. The request ID + must be a valid UUID with the exception that + zero UUID is not supported ( + 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. + """ + + metadata_resource: 'Metadata' = proto.Field( + proto.MESSAGE, + number=291086110, + message='Metadata', + ) + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + request_id: str = proto.Field( + proto.STRING, + number=37109963, + optional=True, + ) + + +class SetDefaultNetworkTierProjectRequest(proto.Message): + r"""A request message for Projects.SetDefaultNetworkTier. See the + method description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + project (str): + Project ID for this request. + projects_set_default_network_tier_request_resource (google.cloud.compute_v1.types.ProjectsSetDefaultNetworkTierRequest): + The body resource for this request + request_id (str): + An optional request ID to identify requests. + Specify a unique request ID so that if you must + retry your request, the server will know to + ignore the request if it has already been + completed. For example, consider a situation + where you make an initial request and the + request times out. If you make the request again + with the same request ID, the server can check + if original operation with the same request ID + was received, and if so, will ignore the second + request. This prevents clients from accidentally + creating duplicate commitments. The request ID + must be a valid UUID with the exception that + zero UUID is not supported ( + 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. + """ + + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + projects_set_default_network_tier_request_resource: 'ProjectsSetDefaultNetworkTierRequest' = proto.Field( + proto.MESSAGE, + number=126410762, + message='ProjectsSetDefaultNetworkTierRequest', + ) + request_id: str = proto.Field( + proto.STRING, + number=37109963, + optional=True, + ) + + +class SetDeletionProtectionInstanceRequest(proto.Message): + r"""A request message for Instances.SetDeletionProtection. See + the method description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + deletion_protection (bool): + Whether the resource should be protected + against deletion. + + This field is a member of `oneof`_ ``_deletion_protection``. + project (str): + Project ID for this request. + request_id (str): + An optional request ID to identify requests. + Specify a unique request ID so that if you must + retry your request, the server will know to + ignore the request if it has already been + completed. For example, consider a situation + where you make an initial request and the + request times out. If you make the request again + with the same request ID, the server can check + if original operation with the same request ID + was received, and if so, will ignore the second + request. This prevents clients from accidentally + creating duplicate commitments. The request ID + must be a valid UUID with the exception that + zero UUID is not supported ( + 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. + resource (str): + Name or id of the resource for this request. + zone (str): + The name of the zone for this request. + """ + + deletion_protection: bool = proto.Field( + proto.BOOL, + number=458014698, + optional=True, + ) + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + request_id: str = proto.Field( + proto.STRING, + number=37109963, + optional=True, + ) + resource: str = proto.Field( + proto.STRING, + number=195806222, + ) + zone: str = proto.Field( + proto.STRING, + number=3744684, + ) + + +class SetDiskAutoDeleteInstanceRequest(proto.Message): + r"""A request message for Instances.SetDiskAutoDelete. See the + method description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + auto_delete (bool): + Whether to auto-delete the disk when the + instance is deleted. + device_name (str): + The device name of the disk to modify. Make a + get() request on the instance to view currently + attached disks and device names. + instance (str): + The instance name for this request. + project (str): + Project ID for this request. + request_id (str): + An optional request ID to identify requests. + Specify a unique request ID so that if you must + retry your request, the server will know to + ignore the request if it has already been + completed. For example, consider a situation + where you make an initial request and the + request times out. If you make the request again + with the same request ID, the server can check + if original operation with the same request ID + was received, and if so, will ignore the second + request. This prevents clients from accidentally + creating duplicate commitments. The request ID + must be a valid UUID with the exception that + zero UUID is not supported ( + 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. + zone (str): + The name of the zone for this request. + """ + + auto_delete: bool = proto.Field( + proto.BOOL, + number=464761403, + ) + device_name: str = proto.Field( + proto.STRING, + number=67541716, + ) + instance: str = proto.Field( + proto.STRING, + number=18257045, + ) + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + request_id: str = proto.Field( + proto.STRING, + number=37109963, + optional=True, + ) + zone: str = proto.Field( + proto.STRING, + number=3744684, + ) + + +class SetEdgeSecurityPolicyBackendBucketRequest(proto.Message): + r"""A request message for BackendBuckets.SetEdgeSecurityPolicy. + See the method description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + backend_bucket (str): + Name of the BackendService resource to which + the security policy should be set. The name + should conform to RFC1035. + project (str): + Project ID for this request. + request_id (str): + An optional request ID to identify requests. + Specify a unique request ID so that if you must + retry your request, the server will know to + ignore the request if it has already been + completed. For example, consider a situation + where you make an initial request and the + request times out. If you make the request again + with the same request ID, the server can check + if original operation with the same request ID + was received, and if so, will ignore the second + request. This prevents clients from accidentally + creating duplicate commitments. The request ID + must be a valid UUID with the exception that + zero UUID is not supported ( + 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. + security_policy_reference_resource (google.cloud.compute_v1.types.SecurityPolicyReference): + The body resource for this request + """ + + backend_bucket: str = proto.Field( + proto.STRING, + number=91714037, + ) + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + request_id: str = proto.Field( + proto.STRING, + number=37109963, + optional=True, + ) + security_policy_reference_resource: 'SecurityPolicyReference' = proto.Field( + proto.MESSAGE, + number=204135024, + message='SecurityPolicyReference', + ) + + +class SetEdgeSecurityPolicyBackendServiceRequest(proto.Message): + r"""A request message for BackendServices.SetEdgeSecurityPolicy. + See the method description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + backend_service (str): + Name of the BackendService resource to which + the edge security policy should be set. The name + should conform to RFC1035. + project (str): + Project ID for this request. + request_id (str): + An optional request ID to identify requests. + Specify a unique request ID so that if you must + retry your request, the server will know to + ignore the request if it has already been + completed. For example, consider a situation + where you make an initial request and the + request times out. If you make the request again + with the same request ID, the server can check + if original operation with the same request ID + was received, and if so, will ignore the second + request. This prevents clients from accidentally + creating duplicate commitments. The request ID + must be a valid UUID with the exception that + zero UUID is not supported ( + 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. + security_policy_reference_resource (google.cloud.compute_v1.types.SecurityPolicyReference): + The body resource for this request + """ + + backend_service: str = proto.Field( + proto.STRING, + number=306946058, + ) + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + request_id: str = proto.Field( + proto.STRING, + number=37109963, + optional=True, + ) + security_policy_reference_resource: 'SecurityPolicyReference' = proto.Field( + proto.MESSAGE, + number=204135024, + message='SecurityPolicyReference', + ) + + +class SetIamPolicyBackendServiceRequest(proto.Message): + r"""A request message for BackendServices.SetIamPolicy. See the + method description for details. + + Attributes: + global_set_policy_request_resource (google.cloud.compute_v1.types.GlobalSetPolicyRequest): + The body resource for this request + project (str): + Project ID for this request. + resource (str): + Name or id of the resource for this request. + """ + + global_set_policy_request_resource: 'GlobalSetPolicyRequest' = proto.Field( + proto.MESSAGE, + number=337048498, + message='GlobalSetPolicyRequest', + ) + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + resource: str = proto.Field( + proto.STRING, + number=195806222, + ) + + +class SetIamPolicyDiskRequest(proto.Message): + r"""A request message for Disks.SetIamPolicy. See the method + description for details. + + Attributes: + project (str): + Project ID for this request. + resource (str): + Name or id of the resource for this request. + zone (str): + The name of the zone for this request. + zone_set_policy_request_resource (google.cloud.compute_v1.types.ZoneSetPolicyRequest): + The body resource for this request + """ + + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + resource: str = proto.Field( + proto.STRING, + number=195806222, + ) + zone: str = proto.Field( + proto.STRING, + number=3744684, + ) + zone_set_policy_request_resource: 'ZoneSetPolicyRequest' = proto.Field( + proto.MESSAGE, + number=382082107, + message='ZoneSetPolicyRequest', + ) + + +class SetIamPolicyFirewallPolicyRequest(proto.Message): + r"""A request message for FirewallPolicies.SetIamPolicy. See the + method description for details. + + Attributes: + global_organization_set_policy_request_resource (google.cloud.compute_v1.types.GlobalOrganizationSetPolicyRequest): + The body resource for this request + resource (str): + Name or id of the resource for this request. + """ + + global_organization_set_policy_request_resource: 'GlobalOrganizationSetPolicyRequest' = proto.Field( + proto.MESSAGE, + number=177408606, + message='GlobalOrganizationSetPolicyRequest', + ) + resource: str = proto.Field( + proto.STRING, + number=195806222, + ) + + +class SetIamPolicyImageRequest(proto.Message): + r"""A request message for Images.SetIamPolicy. See the method + description for details. + + Attributes: + global_set_policy_request_resource (google.cloud.compute_v1.types.GlobalSetPolicyRequest): + The body resource for this request + project (str): + Project ID for this request. + resource (str): + Name or id of the resource for this request. + """ + + global_set_policy_request_resource: 'GlobalSetPolicyRequest' = proto.Field( + proto.MESSAGE, + number=337048498, + message='GlobalSetPolicyRequest', + ) + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + resource: str = proto.Field( + proto.STRING, + number=195806222, + ) + + +class SetIamPolicyInstanceRequest(proto.Message): + r"""A request message for Instances.SetIamPolicy. See the method + description for details. + + Attributes: + project (str): + Project ID for this request. + resource (str): + Name or id of the resource for this request. + zone (str): + The name of the zone for this request. + zone_set_policy_request_resource (google.cloud.compute_v1.types.ZoneSetPolicyRequest): + The body resource for this request + """ + + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + resource: str = proto.Field( + proto.STRING, + number=195806222, + ) + zone: str = proto.Field( + proto.STRING, + number=3744684, + ) + zone_set_policy_request_resource: 'ZoneSetPolicyRequest' = proto.Field( + proto.MESSAGE, + number=382082107, + message='ZoneSetPolicyRequest', + ) + + +class SetIamPolicyInstanceTemplateRequest(proto.Message): + r"""A request message for InstanceTemplates.SetIamPolicy. See the + method description for details. + + Attributes: + global_set_policy_request_resource (google.cloud.compute_v1.types.GlobalSetPolicyRequest): + The body resource for this request + project (str): + Project ID for this request. + resource (str): + Name or id of the resource for this request. + """ + + global_set_policy_request_resource: 'GlobalSetPolicyRequest' = proto.Field( + proto.MESSAGE, + number=337048498, + message='GlobalSetPolicyRequest', + ) + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + resource: str = proto.Field( + proto.STRING, + number=195806222, + ) + + +class SetIamPolicyLicenseRequest(proto.Message): + r"""A request message for Licenses.SetIamPolicy. See the method + description for details. + + Attributes: + global_set_policy_request_resource (google.cloud.compute_v1.types.GlobalSetPolicyRequest): + The body resource for this request + project (str): + Project ID for this request. + resource (str): + Name or id of the resource for this request. + """ + + global_set_policy_request_resource: 'GlobalSetPolicyRequest' = proto.Field( + proto.MESSAGE, + number=337048498, + message='GlobalSetPolicyRequest', + ) + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + resource: str = proto.Field( + proto.STRING, + number=195806222, + ) + + +class SetIamPolicyMachineImageRequest(proto.Message): + r"""A request message for MachineImages.SetIamPolicy. See the + method description for details. + + Attributes: + global_set_policy_request_resource (google.cloud.compute_v1.types.GlobalSetPolicyRequest): + The body resource for this request + project (str): + Project ID for this request. + resource (str): + Name or id of the resource for this request. + """ + + global_set_policy_request_resource: 'GlobalSetPolicyRequest' = proto.Field( + proto.MESSAGE, + number=337048498, + message='GlobalSetPolicyRequest', + ) + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + resource: str = proto.Field( + proto.STRING, + number=195806222, + ) + + +class SetIamPolicyNetworkAttachmentRequest(proto.Message): + r"""A request message for NetworkAttachments.SetIamPolicy. See + the method description for details. + + Attributes: + project (str): + Project ID for this request. + region (str): + The name of the region for this request. + region_set_policy_request_resource (google.cloud.compute_v1.types.RegionSetPolicyRequest): + The body resource for this request + resource (str): + Name or id of the resource for this request. + """ + + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + region: str = proto.Field( + proto.STRING, + number=138946292, + ) + region_set_policy_request_resource: 'RegionSetPolicyRequest' = proto.Field( + proto.MESSAGE, + number=276489091, + message='RegionSetPolicyRequest', + ) + resource: str = proto.Field( + proto.STRING, + number=195806222, + ) + + +class SetIamPolicyNetworkFirewallPolicyRequest(proto.Message): + r"""A request message for NetworkFirewallPolicies.SetIamPolicy. + See the method description for details. + + Attributes: + global_set_policy_request_resource (google.cloud.compute_v1.types.GlobalSetPolicyRequest): + The body resource for this request + project (str): + Project ID for this request. + resource (str): + Name or id of the resource for this request. + """ + + global_set_policy_request_resource: 'GlobalSetPolicyRequest' = proto.Field( + proto.MESSAGE, + number=337048498, + message='GlobalSetPolicyRequest', + ) + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + resource: str = proto.Field( + proto.STRING, + number=195806222, + ) + + +class SetIamPolicyNodeGroupRequest(proto.Message): + r"""A request message for NodeGroups.SetIamPolicy. See the method + description for details. + + Attributes: + project (str): + Project ID for this request. + resource (str): + Name or id of the resource for this request. + zone (str): + The name of the zone for this request. + zone_set_policy_request_resource (google.cloud.compute_v1.types.ZoneSetPolicyRequest): + The body resource for this request + """ + + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + resource: str = proto.Field( + proto.STRING, + number=195806222, + ) + zone: str = proto.Field( + proto.STRING, + number=3744684, + ) + zone_set_policy_request_resource: 'ZoneSetPolicyRequest' = proto.Field( + proto.MESSAGE, + number=382082107, + message='ZoneSetPolicyRequest', + ) + + +class SetIamPolicyNodeTemplateRequest(proto.Message): + r"""A request message for NodeTemplates.SetIamPolicy. See the + method description for details. + + Attributes: + project (str): + Project ID for this request. + region (str): + The name of the region for this request. + region_set_policy_request_resource (google.cloud.compute_v1.types.RegionSetPolicyRequest): + The body resource for this request + resource (str): + Name or id of the resource for this request. + """ + + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + region: str = proto.Field( + proto.STRING, + number=138946292, + ) + region_set_policy_request_resource: 'RegionSetPolicyRequest' = proto.Field( + proto.MESSAGE, + number=276489091, + message='RegionSetPolicyRequest', + ) + resource: str = proto.Field( + proto.STRING, + number=195806222, + ) + + +class SetIamPolicyRegionBackendServiceRequest(proto.Message): + r"""A request message for RegionBackendServices.SetIamPolicy. See + the method description for details. + + Attributes: + project (str): + Project ID for this request. + region (str): + The name of the region for this request. + region_set_policy_request_resource (google.cloud.compute_v1.types.RegionSetPolicyRequest): + The body resource for this request + resource (str): + Name or id of the resource for this request. + """ + + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + region: str = proto.Field( + proto.STRING, + number=138946292, + ) + region_set_policy_request_resource: 'RegionSetPolicyRequest' = proto.Field( + proto.MESSAGE, + number=276489091, + message='RegionSetPolicyRequest', + ) + resource: str = proto.Field( + proto.STRING, + number=195806222, + ) + + +class SetIamPolicyRegionDiskRequest(proto.Message): + r"""A request message for RegionDisks.SetIamPolicy. See the + method description for details. + + Attributes: + project (str): + Project ID for this request. + region (str): + The name of the region for this request. + region_set_policy_request_resource (google.cloud.compute_v1.types.RegionSetPolicyRequest): + The body resource for this request + resource (str): + Name or id of the resource for this request. + """ + + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + region: str = proto.Field( + proto.STRING, + number=138946292, + ) + region_set_policy_request_resource: 'RegionSetPolicyRequest' = proto.Field( + proto.MESSAGE, + number=276489091, + message='RegionSetPolicyRequest', + ) + resource: str = proto.Field( + proto.STRING, + number=195806222, + ) + + +class SetIamPolicyRegionNetworkFirewallPolicyRequest(proto.Message): + r"""A request message for + RegionNetworkFirewallPolicies.SetIamPolicy. See the method + description for details. + + Attributes: + project (str): + Project ID for this request. + region (str): + The name of the region for this request. + region_set_policy_request_resource (google.cloud.compute_v1.types.RegionSetPolicyRequest): + The body resource for this request + resource (str): + Name or id of the resource for this request. + """ + + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + region: str = proto.Field( + proto.STRING, + number=138946292, + ) + region_set_policy_request_resource: 'RegionSetPolicyRequest' = proto.Field( + proto.MESSAGE, + number=276489091, + message='RegionSetPolicyRequest', + ) + resource: str = proto.Field( + proto.STRING, + number=195806222, + ) + + +class SetIamPolicyReservationRequest(proto.Message): + r"""A request message for Reservations.SetIamPolicy. See the + method description for details. + + Attributes: + project (str): + Project ID for this request. + resource (str): + Name or id of the resource for this request. + zone (str): + The name of the zone for this request. + zone_set_policy_request_resource (google.cloud.compute_v1.types.ZoneSetPolicyRequest): + The body resource for this request + """ + + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + resource: str = proto.Field( + proto.STRING, + number=195806222, + ) + zone: str = proto.Field( + proto.STRING, + number=3744684, + ) + zone_set_policy_request_resource: 'ZoneSetPolicyRequest' = proto.Field( + proto.MESSAGE, + number=382082107, + message='ZoneSetPolicyRequest', + ) + + +class SetIamPolicyResourcePolicyRequest(proto.Message): + r"""A request message for ResourcePolicies.SetIamPolicy. See the + method description for details. + + Attributes: + project (str): + Project ID for this request. + region (str): + The name of the region for this request. + region_set_policy_request_resource (google.cloud.compute_v1.types.RegionSetPolicyRequest): + The body resource for this request + resource (str): + Name or id of the resource for this request. + """ + + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + region: str = proto.Field( + proto.STRING, + number=138946292, + ) + region_set_policy_request_resource: 'RegionSetPolicyRequest' = proto.Field( + proto.MESSAGE, + number=276489091, + message='RegionSetPolicyRequest', + ) + resource: str = proto.Field( + proto.STRING, + number=195806222, + ) + + +class SetIamPolicyServiceAttachmentRequest(proto.Message): + r"""A request message for ServiceAttachments.SetIamPolicy. See + the method description for details. + + Attributes: + project (str): + Project ID for this request. + region (str): + The name of the region for this request. + region_set_policy_request_resource (google.cloud.compute_v1.types.RegionSetPolicyRequest): + The body resource for this request + resource (str): + Name or id of the resource for this request. + """ + + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + region: str = proto.Field( + proto.STRING, + number=138946292, + ) + region_set_policy_request_resource: 'RegionSetPolicyRequest' = proto.Field( + proto.MESSAGE, + number=276489091, + message='RegionSetPolicyRequest', + ) + resource: str = proto.Field( + proto.STRING, + number=195806222, + ) + + +class SetIamPolicySnapshotRequest(proto.Message): + r"""A request message for Snapshots.SetIamPolicy. See the method + description for details. + + Attributes: + global_set_policy_request_resource (google.cloud.compute_v1.types.GlobalSetPolicyRequest): + The body resource for this request + project (str): + Project ID for this request. + resource (str): + Name or id of the resource for this request. + """ + + global_set_policy_request_resource: 'GlobalSetPolicyRequest' = proto.Field( + proto.MESSAGE, + number=337048498, + message='GlobalSetPolicyRequest', + ) + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + resource: str = proto.Field( + proto.STRING, + number=195806222, + ) + + +class SetIamPolicySubnetworkRequest(proto.Message): + r"""A request message for Subnetworks.SetIamPolicy. See the + method description for details. + + Attributes: + project (str): + Project ID for this request. + region (str): + The name of the region for this request. + region_set_policy_request_resource (google.cloud.compute_v1.types.RegionSetPolicyRequest): + The body resource for this request + resource (str): + Name or id of the resource for this request. + """ + + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + region: str = proto.Field( + proto.STRING, + number=138946292, + ) + region_set_policy_request_resource: 'RegionSetPolicyRequest' = proto.Field( + proto.MESSAGE, + number=276489091, + message='RegionSetPolicyRequest', + ) + resource: str = proto.Field( + proto.STRING, + number=195806222, + ) + + +class SetInstanceTemplateInstanceGroupManagerRequest(proto.Message): + r"""A request message for + InstanceGroupManagers.SetInstanceTemplate. See the method + description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + instance_group_manager (str): + The name of the managed instance group. + instance_group_managers_set_instance_template_request_resource (google.cloud.compute_v1.types.InstanceGroupManagersSetInstanceTemplateRequest): + The body resource for this request + project (str): + Project ID for this request. + request_id (str): + An optional request ID to identify requests. + Specify a unique request ID so that if you must + retry your request, the server will know to + ignore the request if it has already been + completed. For example, consider a situation + where you make an initial request and the + request times out. If you make the request again + with the same request ID, the server can check + if original operation with the same request ID + was received, and if so, will ignore the second + request. This prevents clients from accidentally + creating duplicate commitments. The request ID + must be a valid UUID with the exception that + zero UUID is not supported ( + 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. + zone (str): + The name of the zone where the managed + instance group is located. + """ + + instance_group_manager: str = proto.Field( + proto.STRING, + number=249363395, + ) + instance_group_managers_set_instance_template_request_resource: 'InstanceGroupManagersSetInstanceTemplateRequest' = proto.Field( + proto.MESSAGE, + number=9809093, + message='InstanceGroupManagersSetInstanceTemplateRequest', + ) + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + request_id: str = proto.Field( + proto.STRING, + number=37109963, + optional=True, + ) + zone: str = proto.Field( + proto.STRING, + number=3744684, + ) + + +class SetInstanceTemplateRegionInstanceGroupManagerRequest(proto.Message): + r"""A request message for + RegionInstanceGroupManagers.SetInstanceTemplate. See the method + description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + instance_group_manager (str): + The name of the managed instance group. + project (str): + Project ID for this request. + region (str): + Name of the region scoping this request. + region_instance_group_managers_set_template_request_resource (google.cloud.compute_v1.types.RegionInstanceGroupManagersSetTemplateRequest): + The body resource for this request + request_id (str): + An optional request ID to identify requests. + Specify a unique request ID so that if you must + retry your request, the server will know to + ignore the request if it has already been + completed. For example, consider a situation + where you make an initial request and the + request times out. If you make the request again + with the same request ID, the server can check + if original operation with the same request ID + was received, and if so, will ignore the second + request. This prevents clients from accidentally + creating duplicate commitments. The request ID + must be a valid UUID with the exception that + zero UUID is not supported ( + 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. + """ + + instance_group_manager: str = proto.Field( + proto.STRING, + number=249363395, + ) + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + region: str = proto.Field( + proto.STRING, + number=138946292, + ) + region_instance_group_managers_set_template_request_resource: 'RegionInstanceGroupManagersSetTemplateRequest' = proto.Field( + proto.MESSAGE, + number=187310412, + message='RegionInstanceGroupManagersSetTemplateRequest', + ) + request_id: str = proto.Field( + proto.STRING, + number=37109963, + optional=True, + ) + + +class SetLabelsAddressRequest(proto.Message): + r"""A request message for Addresses.SetLabels. See the method + description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + project (str): + Project ID for this request. + region (str): + The region for this request. + region_set_labels_request_resource (google.cloud.compute_v1.types.RegionSetLabelsRequest): + The body resource for this request + request_id (str): + An optional request ID to identify requests. + Specify a unique request ID so that if you must + retry your request, the server will know to + ignore the request if it has already been + completed. For example, consider a situation + where you make an initial request and the + request times out. If you make the request again + with the same request ID, the server can check + if original operation with the same request ID + was received, and if so, will ignore the second + request. This prevents clients from accidentally + creating duplicate commitments. The request ID + must be a valid UUID with the exception that + zero UUID is not supported ( + 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. + resource (str): + Name or id of the resource for this request. + """ + + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + region: str = proto.Field( + proto.STRING, + number=138946292, + ) + region_set_labels_request_resource: 'RegionSetLabelsRequest' = proto.Field( + proto.MESSAGE, + number=259357782, + message='RegionSetLabelsRequest', + ) + request_id: str = proto.Field( + proto.STRING, + number=37109963, + optional=True, + ) + resource: str = proto.Field( + proto.STRING, + number=195806222, + ) + + +class SetLabelsDiskRequest(proto.Message): + r"""A request message for Disks.SetLabels. See the method + description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + project (str): + Project ID for this request. + request_id (str): + An optional request ID to identify requests. + Specify a unique request ID so that if you must + retry your request, the server will know to + ignore the request if it has already been + completed. For example, consider a situation + where you make an initial request and the + request times out. If you make the request again + with the same request ID, the server can check + if original operation with the same request ID + was received, and if so, will ignore the second + request. This prevents clients from accidentally + creating duplicate commitments. The request ID + must be a valid UUID with the exception that + zero UUID is not supported ( + 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. + resource (str): + Name or id of the resource for this request. + zone (str): + The name of the zone for this request. + zone_set_labels_request_resource (google.cloud.compute_v1.types.ZoneSetLabelsRequest): + The body resource for this request + """ + + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + request_id: str = proto.Field( + proto.STRING, + number=37109963, + optional=True, + ) + resource: str = proto.Field( + proto.STRING, + number=195806222, + ) + zone: str = proto.Field( + proto.STRING, + number=3744684, + ) + zone_set_labels_request_resource: 'ZoneSetLabelsRequest' = proto.Field( + proto.MESSAGE, + number=364950798, + message='ZoneSetLabelsRequest', + ) + + +class SetLabelsExternalVpnGatewayRequest(proto.Message): + r"""A request message for ExternalVpnGateways.SetLabels. See the + method description for details. + + Attributes: + global_set_labels_request_resource (google.cloud.compute_v1.types.GlobalSetLabelsRequest): + The body resource for this request + project (str): + Project ID for this request. + resource (str): + Name or id of the resource for this request. + """ + + global_set_labels_request_resource: 'GlobalSetLabelsRequest' = proto.Field( + proto.MESSAGE, + number=319917189, + message='GlobalSetLabelsRequest', + ) + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + resource: str = proto.Field( + proto.STRING, + number=195806222, + ) + + +class SetLabelsForwardingRuleRequest(proto.Message): + r"""A request message for ForwardingRules.SetLabels. See the + method description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + project (str): + Project ID for this request. + region (str): + The region for this request. + region_set_labels_request_resource (google.cloud.compute_v1.types.RegionSetLabelsRequest): + The body resource for this request + request_id (str): + An optional request ID to identify requests. + Specify a unique request ID so that if you must + retry your request, the server will know to + ignore the request if it has already been + completed. For example, consider a situation + where you make an initial request and the + request times out. If you make the request again + with the same request ID, the server can check + if original operation with the same request ID + was received, and if so, will ignore the second + request. This prevents clients from accidentally + creating duplicate commitments. The request ID + must be a valid UUID with the exception that + zero UUID is not supported ( + 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. + resource (str): + Name or id of the resource for this request. + """ + + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + region: str = proto.Field( + proto.STRING, + number=138946292, + ) + region_set_labels_request_resource: 'RegionSetLabelsRequest' = proto.Field( + proto.MESSAGE, + number=259357782, + message='RegionSetLabelsRequest', + ) + request_id: str = proto.Field( + proto.STRING, + number=37109963, + optional=True, + ) + resource: str = proto.Field( + proto.STRING, + number=195806222, + ) + + +class SetLabelsGlobalAddressRequest(proto.Message): + r"""A request message for GlobalAddresses.SetLabels. See the + method description for details. + + Attributes: + global_set_labels_request_resource (google.cloud.compute_v1.types.GlobalSetLabelsRequest): + The body resource for this request + project (str): + Project ID for this request. + resource (str): + Name or id of the resource for this request. + """ + + global_set_labels_request_resource: 'GlobalSetLabelsRequest' = proto.Field( + proto.MESSAGE, + number=319917189, + message='GlobalSetLabelsRequest', + ) + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + resource: str = proto.Field( + proto.STRING, + number=195806222, + ) + + +class SetLabelsGlobalForwardingRuleRequest(proto.Message): + r"""A request message for GlobalForwardingRules.SetLabels. See + the method description for details. + + Attributes: + global_set_labels_request_resource (google.cloud.compute_v1.types.GlobalSetLabelsRequest): + The body resource for this request + project (str): + Project ID for this request. + resource (str): + Name or id of the resource for this request. + """ + + global_set_labels_request_resource: 'GlobalSetLabelsRequest' = proto.Field( + proto.MESSAGE, + number=319917189, + message='GlobalSetLabelsRequest', + ) + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + resource: str = proto.Field( + proto.STRING, + number=195806222, + ) + + +class SetLabelsImageRequest(proto.Message): + r"""A request message for Images.SetLabels. See the method + description for details. + + Attributes: + global_set_labels_request_resource (google.cloud.compute_v1.types.GlobalSetLabelsRequest): + The body resource for this request + project (str): + Project ID for this request. + resource (str): + Name or id of the resource for this request. + """ + + global_set_labels_request_resource: 'GlobalSetLabelsRequest' = proto.Field( + proto.MESSAGE, + number=319917189, + message='GlobalSetLabelsRequest', + ) + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + resource: str = proto.Field( + proto.STRING, + number=195806222, + ) + + +class SetLabelsInstanceRequest(proto.Message): + r"""A request message for Instances.SetLabels. See the method + description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + instance (str): + Name of the instance scoping this request. + instances_set_labels_request_resource (google.cloud.compute_v1.types.InstancesSetLabelsRequest): + The body resource for this request + project (str): + Project ID for this request. + request_id (str): + An optional request ID to identify requests. + Specify a unique request ID so that if you must + retry your request, the server will know to + ignore the request if it has already been + completed. For example, consider a situation + where you make an initial request and the + request times out. If you make the request again + with the same request ID, the server can check + if original operation with the same request ID + was received, and if so, will ignore the second + request. This prevents clients from accidentally + creating duplicate commitments. The request ID + must be a valid UUID with the exception that + zero UUID is not supported ( + 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. + zone (str): + The name of the zone for this request. + """ + + instance: str = proto.Field( + proto.STRING, + number=18257045, + ) + instances_set_labels_request_resource: 'InstancesSetLabelsRequest' = proto.Field( + proto.MESSAGE, + number=207749344, + message='InstancesSetLabelsRequest', + ) + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + request_id: str = proto.Field( + proto.STRING, + number=37109963, + optional=True, + ) + zone: str = proto.Field( + proto.STRING, + number=3744684, + ) + + +class SetLabelsInterconnectAttachmentRequest(proto.Message): + r"""A request message for InterconnectAttachments.SetLabels. See + the method description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + project (str): + Project ID for this request. + region (str): + The region for this request. + region_set_labels_request_resource (google.cloud.compute_v1.types.RegionSetLabelsRequest): + The body resource for this request + request_id (str): + An optional request ID to identify requests. + Specify a unique request ID so that if you must + retry your request, the server will know to + ignore the request if it has already been + completed. For example, consider a situation + where you make an initial request and the + request times out. If you make the request again + with the same request ID, the server can check + if original operation with the same request ID + was received, and if so, will ignore the second + request. This prevents clients from accidentally + creating duplicate commitments. The request ID + must be a valid UUID with the exception that + zero UUID is not supported ( + 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. + resource (str): + Name or id of the resource for this request. + """ + + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + region: str = proto.Field( + proto.STRING, + number=138946292, + ) + region_set_labels_request_resource: 'RegionSetLabelsRequest' = proto.Field( + proto.MESSAGE, + number=259357782, + message='RegionSetLabelsRequest', + ) + request_id: str = proto.Field( + proto.STRING, + number=37109963, + optional=True, + ) + resource: str = proto.Field( + proto.STRING, + number=195806222, + ) + + +class SetLabelsInterconnectRequest(proto.Message): + r"""A request message for Interconnects.SetLabels. See the method + description for details. + + Attributes: + global_set_labels_request_resource (google.cloud.compute_v1.types.GlobalSetLabelsRequest): + The body resource for this request + project (str): + Project ID for this request. + resource (str): + Name or id of the resource for this request. + """ + + global_set_labels_request_resource: 'GlobalSetLabelsRequest' = proto.Field( + proto.MESSAGE, + number=319917189, + message='GlobalSetLabelsRequest', + ) + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + resource: str = proto.Field( + proto.STRING, + number=195806222, + ) + + +class SetLabelsRegionDiskRequest(proto.Message): + r"""A request message for RegionDisks.SetLabels. See the method + description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + project (str): + Project ID for this request. + region (str): + The region for this request. + region_set_labels_request_resource (google.cloud.compute_v1.types.RegionSetLabelsRequest): + The body resource for this request + request_id (str): + An optional request ID to identify requests. + Specify a unique request ID so that if you must + retry your request, the server will know to + ignore the request if it has already been + completed. For example, consider a situation + where you make an initial request and the + request times out. If you make the request again + with the same request ID, the server can check + if original operation with the same request ID + was received, and if so, will ignore the second + request. This prevents clients from accidentally + creating duplicate commitments. The request ID + must be a valid UUID with the exception that + zero UUID is not supported ( + 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. + resource (str): + Name or id of the resource for this request. + """ + + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + region: str = proto.Field( + proto.STRING, + number=138946292, + ) + region_set_labels_request_resource: 'RegionSetLabelsRequest' = proto.Field( + proto.MESSAGE, + number=259357782, + message='RegionSetLabelsRequest', + ) + request_id: str = proto.Field( + proto.STRING, + number=37109963, + optional=True, + ) + resource: str = proto.Field( + proto.STRING, + number=195806222, + ) + + +class SetLabelsSecurityPolicyRequest(proto.Message): + r"""A request message for SecurityPolicies.SetLabels. See the + method description for details. + + Attributes: + global_set_labels_request_resource (google.cloud.compute_v1.types.GlobalSetLabelsRequest): + The body resource for this request + project (str): + Project ID for this request. + resource (str): + Name or id of the resource for this request. + """ + + global_set_labels_request_resource: 'GlobalSetLabelsRequest' = proto.Field( + proto.MESSAGE, + number=319917189, + message='GlobalSetLabelsRequest', + ) + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + resource: str = proto.Field( + proto.STRING, + number=195806222, + ) + + +class SetLabelsSnapshotRequest(proto.Message): + r"""A request message for Snapshots.SetLabels. See the method + description for details. + + Attributes: + global_set_labels_request_resource (google.cloud.compute_v1.types.GlobalSetLabelsRequest): + The body resource for this request + project (str): + Project ID for this request. + resource (str): + Name or id of the resource for this request. + """ + + global_set_labels_request_resource: 'GlobalSetLabelsRequest' = proto.Field( + proto.MESSAGE, + number=319917189, + message='GlobalSetLabelsRequest', + ) + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + resource: str = proto.Field( + proto.STRING, + number=195806222, + ) + + +class SetLabelsTargetVpnGatewayRequest(proto.Message): + r"""A request message for TargetVpnGateways.SetLabels. See the + method description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + project (str): + Project ID for this request. + region (str): + The region for this request. + region_set_labels_request_resource (google.cloud.compute_v1.types.RegionSetLabelsRequest): + The body resource for this request + request_id (str): + An optional request ID to identify requests. + Specify a unique request ID so that if you must + retry your request, the server will know to + ignore the request if it has already been + completed. For example, consider a situation + where you make an initial request and the + request times out. If you make the request again + with the same request ID, the server can check + if original operation with the same request ID + was received, and if so, will ignore the second + request. This prevents clients from accidentally + creating duplicate commitments. The request ID + must be a valid UUID with the exception that + zero UUID is not supported ( + 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. + resource (str): + Name or id of the resource for this request. + """ + + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + region: str = proto.Field( + proto.STRING, + number=138946292, + ) + region_set_labels_request_resource: 'RegionSetLabelsRequest' = proto.Field( + proto.MESSAGE, + number=259357782, + message='RegionSetLabelsRequest', + ) + request_id: str = proto.Field( + proto.STRING, + number=37109963, + optional=True, + ) + resource: str = proto.Field( + proto.STRING, + number=195806222, + ) + + +class SetLabelsVpnGatewayRequest(proto.Message): + r"""A request message for VpnGateways.SetLabels. See the method + description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + project (str): + Project ID for this request. + region (str): + The region for this request. + region_set_labels_request_resource (google.cloud.compute_v1.types.RegionSetLabelsRequest): + The body resource for this request + request_id (str): + An optional request ID to identify requests. + Specify a unique request ID so that if you must + retry your request, the server will know to + ignore the request if it has already been + completed. For example, consider a situation + where you make an initial request and the + request times out. If you make the request again + with the same request ID, the server can check + if original operation with the same request ID + was received, and if so, will ignore the second + request. This prevents clients from accidentally + creating duplicate commitments. The request ID + must be a valid UUID with the exception that + zero UUID is not supported ( + 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. + resource (str): + Name or id of the resource for this request. + """ + + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + region: str = proto.Field( + proto.STRING, + number=138946292, + ) + region_set_labels_request_resource: 'RegionSetLabelsRequest' = proto.Field( + proto.MESSAGE, + number=259357782, + message='RegionSetLabelsRequest', + ) + request_id: str = proto.Field( + proto.STRING, + number=37109963, + optional=True, + ) + resource: str = proto.Field( + proto.STRING, + number=195806222, + ) + + +class SetLabelsVpnTunnelRequest(proto.Message): + r"""A request message for VpnTunnels.SetLabels. See the method + description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + project (str): + Project ID for this request. + region (str): + The region for this request. + region_set_labels_request_resource (google.cloud.compute_v1.types.RegionSetLabelsRequest): + The body resource for this request + request_id (str): + An optional request ID to identify requests. + Specify a unique request ID so that if you must + retry your request, the server will know to + ignore the request if it has already been + completed. For example, consider a situation + where you make an initial request and the + request times out. If you make the request again + with the same request ID, the server can check + if original operation with the same request ID + was received, and if so, will ignore the second + request. This prevents clients from accidentally + creating duplicate commitments. The request ID + must be a valid UUID with the exception that + zero UUID is not supported ( + 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. + resource (str): + Name or id of the resource for this request. + """ + + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + region: str = proto.Field( + proto.STRING, + number=138946292, + ) + region_set_labels_request_resource: 'RegionSetLabelsRequest' = proto.Field( + proto.MESSAGE, + number=259357782, + message='RegionSetLabelsRequest', + ) + request_id: str = proto.Field( + proto.STRING, + number=37109963, + optional=True, + ) + resource: str = proto.Field( + proto.STRING, + number=195806222, + ) + + +class SetMachineResourcesInstanceRequest(proto.Message): + r"""A request message for Instances.SetMachineResources. See the + method description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + instance (str): + Name of the instance scoping this request. + instances_set_machine_resources_request_resource (google.cloud.compute_v1.types.InstancesSetMachineResourcesRequest): + The body resource for this request + project (str): + Project ID for this request. + request_id (str): + An optional request ID to identify requests. + Specify a unique request ID so that if you must + retry your request, the server will know to + ignore the request if it has already been + completed. For example, consider a situation + where you make an initial request and the + request times out. If you make the request again + with the same request ID, the server can check + if original operation with the same request ID + was received, and if so, will ignore the second + request. This prevents clients from accidentally + creating duplicate commitments. The request ID + must be a valid UUID with the exception that + zero UUID is not supported ( + 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. + zone (str): + The name of the zone for this request. + """ + + instance: str = proto.Field( + proto.STRING, + number=18257045, + ) + instances_set_machine_resources_request_resource: 'InstancesSetMachineResourcesRequest' = proto.Field( + proto.MESSAGE, + number=196286318, + message='InstancesSetMachineResourcesRequest', + ) + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + request_id: str = proto.Field( + proto.STRING, + number=37109963, + optional=True, + ) + zone: str = proto.Field( + proto.STRING, + number=3744684, + ) + + +class SetMachineTypeInstanceRequest(proto.Message): + r"""A request message for Instances.SetMachineType. See the + method description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + instance (str): + Name of the instance scoping this request. + instances_set_machine_type_request_resource (google.cloud.compute_v1.types.InstancesSetMachineTypeRequest): + The body resource for this request + project (str): + Project ID for this request. + request_id (str): + An optional request ID to identify requests. + Specify a unique request ID so that if you must + retry your request, the server will know to + ignore the request if it has already been + completed. For example, consider a situation + where you make an initial request and the + request times out. If you make the request again + with the same request ID, the server can check + if original operation with the same request ID + was received, and if so, will ignore the second + request. This prevents clients from accidentally + creating duplicate commitments. The request ID + must be a valid UUID with the exception that + zero UUID is not supported ( + 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. + zone (str): + The name of the zone for this request. + """ + + instance: str = proto.Field( + proto.STRING, + number=18257045, + ) + instances_set_machine_type_request_resource: 'InstancesSetMachineTypeRequest' = proto.Field( + proto.MESSAGE, + number=254157709, + message='InstancesSetMachineTypeRequest', + ) + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + request_id: str = proto.Field( + proto.STRING, + number=37109963, + optional=True, + ) + zone: str = proto.Field( + proto.STRING, + number=3744684, + ) + + +class SetMetadataInstanceRequest(proto.Message): + r"""A request message for Instances.SetMetadata. See the method + description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + instance (str): + Name of the instance scoping this request. + metadata_resource (google.cloud.compute_v1.types.Metadata): + The body resource for this request + project (str): + Project ID for this request. + request_id (str): + An optional request ID to identify requests. + Specify a unique request ID so that if you must + retry your request, the server will know to + ignore the request if it has already been + completed. For example, consider a situation + where you make an initial request and the + request times out. If you make the request again + with the same request ID, the server can check + if original operation with the same request ID + was received, and if so, will ignore the second + request. This prevents clients from accidentally + creating duplicate commitments. The request ID + must be a valid UUID with the exception that + zero UUID is not supported ( + 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. + zone (str): + The name of the zone for this request. + """ + + instance: str = proto.Field( + proto.STRING, + number=18257045, + ) + metadata_resource: 'Metadata' = proto.Field( + proto.MESSAGE, + number=291086110, + message='Metadata', + ) + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + request_id: str = proto.Field( + proto.STRING, + number=37109963, + optional=True, + ) + zone: str = proto.Field( + proto.STRING, + number=3744684, + ) + + +class SetMinCpuPlatformInstanceRequest(proto.Message): + r"""A request message for Instances.SetMinCpuPlatform. See the + method description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + instance (str): + Name of the instance scoping this request. + instances_set_min_cpu_platform_request_resource (google.cloud.compute_v1.types.InstancesSetMinCpuPlatformRequest): + The body resource for this request + project (str): + Project ID for this request. + request_id (str): + An optional request ID to identify requests. + Specify a unique request ID so that if you must + retry your request, the server will know to + ignore the request if it has already been + completed. For example, consider a situation + where you make an initial request and the + request times out. If you make the request again + with the same request ID, the server can check + if original operation with the same request ID + was received, and if so, will ignore the second + request. This prevents clients from accidentally + creating duplicate commitments. The request ID + must be a valid UUID with the exception that + zero UUID is not supported ( + 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. + zone (str): + The name of the zone for this request. + """ + + instance: str = proto.Field( + proto.STRING, + number=18257045, + ) + instances_set_min_cpu_platform_request_resource: 'InstancesSetMinCpuPlatformRequest' = proto.Field( + proto.MESSAGE, + number=148459368, + message='InstancesSetMinCpuPlatformRequest', + ) + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + request_id: str = proto.Field( + proto.STRING, + number=37109963, + optional=True, + ) + zone: str = proto.Field( + proto.STRING, + number=3744684, + ) + + +class SetNameInstanceRequest(proto.Message): + r"""A request message for Instances.SetName. See the method + description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + instance (str): + The instance name for this request. + instances_set_name_request_resource (google.cloud.compute_v1.types.InstancesSetNameRequest): + The body resource for this request + project (str): + Project ID for this request. + request_id (str): + An optional request ID to identify requests. + Specify a unique request ID so that if you must + retry your request, the server will know to + ignore the request if it has already been + completed. For example, consider a situation + where you make an initial request and the + request times out. If you make the request again + with the same request ID, the server can check + if original operation with the same request ID + was received, and if so, will ignore the second + request. This prevents clients from accidentally + creating duplicate commitments. The request ID + must be a valid UUID with the exception that + zero UUID is not supported ( + 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. + zone (str): + The name of the zone for this request. + """ + + instance: str = proto.Field( + proto.STRING, + number=18257045, + ) + instances_set_name_request_resource: 'InstancesSetNameRequest' = proto.Field( + proto.MESSAGE, + number=272080980, + message='InstancesSetNameRequest', + ) + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + request_id: str = proto.Field( + proto.STRING, + number=37109963, + optional=True, + ) + zone: str = proto.Field( + proto.STRING, + number=3744684, + ) + + +class SetNamedPortsInstanceGroupRequest(proto.Message): + r"""A request message for InstanceGroups.SetNamedPorts. See the + method description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + instance_group (str): + The name of the instance group where the + named ports are updated. + instance_groups_set_named_ports_request_resource (google.cloud.compute_v1.types.InstanceGroupsSetNamedPortsRequest): + The body resource for this request + project (str): + Project ID for this request. + request_id (str): + An optional request ID to identify requests. + Specify a unique request ID so that if you must + retry your request, the server will know to + ignore the request if it has already been + completed. For example, consider a situation + where you make an initial request and the + request times out. If you make the request again + with the same request ID, the server can check + if original operation with the same request ID + was received, and if so, will ignore the second + request. This prevents clients from accidentally + creating duplicate commitments. The request ID + must be a valid UUID with the exception that + zero UUID is not supported ( + 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. + zone (str): + The name of the zone where the instance group + is located. + """ + + instance_group: str = proto.Field( + proto.STRING, + number=81095253, + ) + instance_groups_set_named_ports_request_resource: 'InstanceGroupsSetNamedPortsRequest' = proto.Field( + proto.MESSAGE, + number=385151535, + message='InstanceGroupsSetNamedPortsRequest', + ) + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + request_id: str = proto.Field( + proto.STRING, + number=37109963, + optional=True, + ) + zone: str = proto.Field( + proto.STRING, + number=3744684, + ) + + +class SetNamedPortsRegionInstanceGroupRequest(proto.Message): + r"""A request message for RegionInstanceGroups.SetNamedPorts. See + the method description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + instance_group (str): + The name of the regional instance group where + the named ports are updated. + project (str): + Project ID for this request. + region (str): + Name of the region scoping this request. + region_instance_groups_set_named_ports_request_resource (google.cloud.compute_v1.types.RegionInstanceGroupsSetNamedPortsRequest): + The body resource for this request + request_id (str): + An optional request ID to identify requests. + Specify a unique request ID so that if you must + retry your request, the server will know to + ignore the request if it has already been + completed. For example, consider a situation + where you make an initial request and the + request times out. If you make the request again + with the same request ID, the server can check + if original operation with the same request ID + was received, and if so, will ignore the second + request. This prevents clients from accidentally + creating duplicate commitments. The request ID + must be a valid UUID with the exception that + zero UUID is not supported ( + 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. + """ + + instance_group: str = proto.Field( + proto.STRING, + number=81095253, + ) + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + region: str = proto.Field( + proto.STRING, + number=138946292, + ) + region_instance_groups_set_named_ports_request_resource: 'RegionInstanceGroupsSetNamedPortsRequest' = proto.Field( + proto.MESSAGE, + number=1574938, + message='RegionInstanceGroupsSetNamedPortsRequest', + ) + request_id: str = proto.Field( + proto.STRING, + number=37109963, + optional=True, + ) + + +class SetNodeTemplateNodeGroupRequest(proto.Message): + r"""A request message for NodeGroups.SetNodeTemplate. See the + method description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + node_group (str): + Name of the NodeGroup resource to update. + node_groups_set_node_template_request_resource (google.cloud.compute_v1.types.NodeGroupsSetNodeTemplateRequest): + The body resource for this request + project (str): + Project ID for this request. + request_id (str): + An optional request ID to identify requests. + Specify a unique request ID so that if you must + retry your request, the server will know to + ignore the request if it has already been + completed. For example, consider a situation + where you make an initial request and the + request times out. If you make the request again + with the same request ID, the server can check + if original operation with the same request ID + was received, and if so, will ignore the second + request. This prevents clients from accidentally + creating duplicate commitments. The request ID + must be a valid UUID with the exception that + zero UUID is not supported ( + 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. + zone (str): + The name of the zone for this request. + """ + + node_group: str = proto.Field( + proto.STRING, + number=469958146, + ) + node_groups_set_node_template_request_resource: 'NodeGroupsSetNodeTemplateRequest' = proto.Field( + proto.MESSAGE, + number=117382321, + message='NodeGroupsSetNodeTemplateRequest', + ) + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + request_id: str = proto.Field( + proto.STRING, + number=37109963, + optional=True, + ) + zone: str = proto.Field( + proto.STRING, + number=3744684, + ) + + +class SetPrivateIpGoogleAccessSubnetworkRequest(proto.Message): + r"""A request message for Subnetworks.SetPrivateIpGoogleAccess. + See the method description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + project (str): + Project ID for this request. + region (str): + Name of the region scoping this request. + request_id (str): + An optional request ID to identify requests. + Specify a unique request ID so that if you must + retry your request, the server will know to + ignore the request if it has already been + completed. For example, consider a situation + where you make an initial request and the + request times out. If you make the request again + with the same request ID, the server can check + if original operation with the same request ID + was received, and if so, will ignore the second + request. This prevents clients from accidentally + creating duplicate commitments. The request ID + must be a valid UUID with the exception that + zero UUID is not supported ( + 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. + subnetwork (str): + Name of the Subnetwork resource. + subnetworks_set_private_ip_google_access_request_resource (google.cloud.compute_v1.types.SubnetworksSetPrivateIpGoogleAccessRequest): + The body resource for this request + """ + + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + region: str = proto.Field( + proto.STRING, + number=138946292, + ) + request_id: str = proto.Field( + proto.STRING, + number=37109963, + optional=True, + ) + subnetwork: str = proto.Field( + proto.STRING, + number=307827694, + ) + subnetworks_set_private_ip_google_access_request_resource: 'SubnetworksSetPrivateIpGoogleAccessRequest' = proto.Field( + proto.MESSAGE, + number=268920696, + message='SubnetworksSetPrivateIpGoogleAccessRequest', + ) + + +class SetProxyHeaderTargetSslProxyRequest(proto.Message): + r"""A request message for TargetSslProxies.SetProxyHeader. See + the method description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + project (str): + Project ID for this request. + request_id (str): + An optional request ID to identify requests. + Specify a unique request ID so that if you must + retry your request, the server will know to + ignore the request if it has already been + completed. For example, consider a situation + where you make an initial request and the + request times out. If you make the request again + with the same request ID, the server can check + if original operation with the same request ID + was received, and if so, will ignore the second + request. This prevents clients from accidentally + creating duplicate commitments. The request ID + must be a valid UUID with the exception that + zero UUID is not supported ( + 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. + target_ssl_proxies_set_proxy_header_request_resource (google.cloud.compute_v1.types.TargetSslProxiesSetProxyHeaderRequest): + The body resource for this request + target_ssl_proxy (str): + Name of the TargetSslProxy resource whose + ProxyHeader is to be set. + """ + + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + request_id: str = proto.Field( + proto.STRING, + number=37109963, + optional=True, + ) + target_ssl_proxies_set_proxy_header_request_resource: 'TargetSslProxiesSetProxyHeaderRequest' = proto.Field( + proto.MESSAGE, + number=205284526, + message='TargetSslProxiesSetProxyHeaderRequest', + ) + target_ssl_proxy: str = proto.Field( + proto.STRING, + number=338795853, + ) + + +class SetProxyHeaderTargetTcpProxyRequest(proto.Message): + r"""A request message for TargetTcpProxies.SetProxyHeader. See + the method description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + project (str): + Project ID for this request. + request_id (str): + An optional request ID to identify requests. + Specify a unique request ID so that if you must + retry your request, the server will know to + ignore the request if it has already been + completed. For example, consider a situation + where you make an initial request and the + request times out. If you make the request again + with the same request ID, the server can check + if original operation with the same request ID + was received, and if so, will ignore the second + request. This prevents clients from accidentally + creating duplicate commitments. The request ID + must be a valid UUID with the exception that + zero UUID is not supported ( + 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. + target_tcp_proxies_set_proxy_header_request_resource (google.cloud.compute_v1.types.TargetTcpProxiesSetProxyHeaderRequest): + The body resource for this request + target_tcp_proxy (str): + Name of the TargetTcpProxy resource whose + ProxyHeader is to be set. + """ + + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + request_id: str = proto.Field( + proto.STRING, + number=37109963, + optional=True, + ) + target_tcp_proxies_set_proxy_header_request_resource: 'TargetTcpProxiesSetProxyHeaderRequest' = proto.Field( + proto.MESSAGE, + number=219958339, + message='TargetTcpProxiesSetProxyHeaderRequest', + ) + target_tcp_proxy: str = proto.Field( + proto.STRING, + number=503065442, + ) + + +class SetQuicOverrideTargetHttpsProxyRequest(proto.Message): + r"""A request message for TargetHttpsProxies.SetQuicOverride. See + the method description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + project (str): + Project ID for this request. + request_id (str): + An optional request ID to identify requests. + Specify a unique request ID so that if you must + retry your request, the server will know to + ignore the request if it has already been + completed. For example, consider a situation + where you make an initial request and the + request times out. If you make the request again + with the same request ID, the server can check + if original operation with the same request ID + was received, and if so, will ignore the second + request. This prevents clients from accidentally + creating duplicate commitments. The request ID + must be a valid UUID with the exception that + zero UUID is not supported ( + 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. + target_https_proxies_set_quic_override_request_resource (google.cloud.compute_v1.types.TargetHttpsProxiesSetQuicOverrideRequest): + The body resource for this request + target_https_proxy (str): + Name of the TargetHttpsProxy resource to set + the QUIC override policy for. The name should + conform to RFC1035. + """ + + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + request_id: str = proto.Field( + proto.STRING, + number=37109963, + optional=True, + ) + target_https_proxies_set_quic_override_request_resource: 'TargetHttpsProxiesSetQuicOverrideRequest' = proto.Field( + proto.MESSAGE, + number=72940258, + message='TargetHttpsProxiesSetQuicOverrideRequest', + ) + target_https_proxy: str = proto.Field( + proto.STRING, + number=52336748, + ) + + +class SetSchedulingInstanceRequest(proto.Message): + r"""A request message for Instances.SetScheduling. See the method + description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + instance (str): + Instance name for this request. + project (str): + Project ID for this request. + request_id (str): + An optional request ID to identify requests. + Specify a unique request ID so that if you must + retry your request, the server will know to + ignore the request if it has already been + completed. For example, consider a situation + where you make an initial request and the + request times out. If you make the request again + with the same request ID, the server can check + if original operation with the same request ID + was received, and if so, will ignore the second + request. This prevents clients from accidentally + creating duplicate commitments. The request ID + must be a valid UUID with the exception that + zero UUID is not supported ( + 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. + scheduling_resource (google.cloud.compute_v1.types.Scheduling): + The body resource for this request + zone (str): + The name of the zone for this request. + """ + + instance: str = proto.Field( + proto.STRING, + number=18257045, + ) + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + request_id: str = proto.Field( + proto.STRING, + number=37109963, + optional=True, + ) + scheduling_resource: 'Scheduling' = proto.Field( + proto.MESSAGE, + number=463181401, + message='Scheduling', + ) + zone: str = proto.Field( + proto.STRING, + number=3744684, + ) + + +class SetSecurityPolicyBackendServiceRequest(proto.Message): + r"""A request message for BackendServices.SetSecurityPolicy. See + the method description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + backend_service (str): + Name of the BackendService resource to which + the security policy should be set. The name + should conform to RFC1035. + project (str): + Project ID for this request. + request_id (str): + An optional request ID to identify requests. + Specify a unique request ID so that if you must + retry your request, the server will know to + ignore the request if it has already been + completed. For example, consider a situation + where you make an initial request and the + request times out. If you make the request again + with the same request ID, the server can check + if original operation with the same request ID + was received, and if so, will ignore the second + request. This prevents clients from accidentally + creating duplicate commitments. The request ID + must be a valid UUID with the exception that + zero UUID is not supported ( + 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. + security_policy_reference_resource (google.cloud.compute_v1.types.SecurityPolicyReference): + The body resource for this request + """ + + backend_service: str = proto.Field( + proto.STRING, + number=306946058, + ) + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + request_id: str = proto.Field( + proto.STRING, + number=37109963, + optional=True, + ) + security_policy_reference_resource: 'SecurityPolicyReference' = proto.Field( + proto.MESSAGE, + number=204135024, + message='SecurityPolicyReference', + ) + + +class SetServiceAccountInstanceRequest(proto.Message): + r"""A request message for Instances.SetServiceAccount. See the + method description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + instance (str): + Name of the instance resource to start. + instances_set_service_account_request_resource (google.cloud.compute_v1.types.InstancesSetServiceAccountRequest): + The body resource for this request + project (str): + Project ID for this request. + request_id (str): + An optional request ID to identify requests. + Specify a unique request ID so that if you must + retry your request, the server will know to + ignore the request if it has already been + completed. For example, consider a situation + where you make an initial request and the + request times out. If you make the request again + with the same request ID, the server can check + if original operation with the same request ID + was received, and if so, will ignore the second + request. This prevents clients from accidentally + creating duplicate commitments. The request ID + must be a valid UUID with the exception that + zero UUID is not supported ( + 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. + zone (str): + The name of the zone for this request. + """ + + instance: str = proto.Field( + proto.STRING, + number=18257045, + ) + instances_set_service_account_request_resource: 'InstancesSetServiceAccountRequest' = proto.Field( + proto.MESSAGE, + number=275550008, + message='InstancesSetServiceAccountRequest', + ) + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + request_id: str = proto.Field( + proto.STRING, + number=37109963, + optional=True, + ) + zone: str = proto.Field( + proto.STRING, + number=3744684, + ) + + +class SetShieldedInstanceIntegrityPolicyInstanceRequest(proto.Message): + r"""A request message for + Instances.SetShieldedInstanceIntegrityPolicy. See the method + description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + instance (str): + Name or id of the instance scoping this + request. + project (str): + Project ID for this request. + request_id (str): + An optional request ID to identify requests. + Specify a unique request ID so that if you must + retry your request, the server will know to + ignore the request if it has already been + completed. For example, consider a situation + where you make an initial request and the + request times out. If you make the request again + with the same request ID, the server can check + if original operation with the same request ID + was received, and if so, will ignore the second + request. This prevents clients from accidentally + creating duplicate commitments. The request ID + must be a valid UUID with the exception that + zero UUID is not supported ( + 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. + shielded_instance_integrity_policy_resource (google.cloud.compute_v1.types.ShieldedInstanceIntegrityPolicy): + The body resource for this request + zone (str): + The name of the zone for this request. + """ + + instance: str = proto.Field( + proto.STRING, + number=18257045, + ) + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + request_id: str = proto.Field( + proto.STRING, + number=37109963, + optional=True, + ) + shielded_instance_integrity_policy_resource: 'ShieldedInstanceIntegrityPolicy' = proto.Field( + proto.MESSAGE, + number=409169462, + message='ShieldedInstanceIntegrityPolicy', + ) + zone: str = proto.Field( + proto.STRING, + number=3744684, + ) + + +class SetSslCertificatesRegionTargetHttpsProxyRequest(proto.Message): + r"""A request message for + RegionTargetHttpsProxies.SetSslCertificates. See the method + description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + project (str): + Project ID for this request. + region (str): + Name of the region scoping this request. + region_target_https_proxies_set_ssl_certificates_request_resource (google.cloud.compute_v1.types.RegionTargetHttpsProxiesSetSslCertificatesRequest): + The body resource for this request + request_id (str): + An optional request ID to identify requests. + Specify a unique request ID so that if you must + retry your request, the server will know to + ignore the request if it has already been + completed. For example, consider a situation + where you make an initial request and the + request times out. If you make the request again + with the same request ID, the server can check + if original operation with the same request ID + was received, and if so, will ignore the second + request. This prevents clients from accidentally + creating duplicate commitments. The request ID + must be a valid UUID with the exception that + zero UUID is not supported ( + 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. + target_https_proxy (str): + Name of the TargetHttpsProxy resource to set + an SslCertificates resource for. + """ + + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + region: str = proto.Field( + proto.STRING, + number=138946292, + ) + region_target_https_proxies_set_ssl_certificates_request_resource: 'RegionTargetHttpsProxiesSetSslCertificatesRequest' = proto.Field( + proto.MESSAGE, + number=390693383, + message='RegionTargetHttpsProxiesSetSslCertificatesRequest', + ) + request_id: str = proto.Field( + proto.STRING, + number=37109963, + optional=True, + ) + target_https_proxy: str = proto.Field( + proto.STRING, + number=52336748, + ) + + +class SetSslCertificatesTargetHttpsProxyRequest(proto.Message): + r"""A request message for TargetHttpsProxies.SetSslCertificates. + See the method description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + project (str): + Project ID for this request. + request_id (str): + An optional request ID to identify requests. + Specify a unique request ID so that if you must + retry your request, the server will know to + ignore the request if it has already been + completed. For example, consider a situation + where you make an initial request and the + request times out. If you make the request again + with the same request ID, the server can check + if original operation with the same request ID + was received, and if so, will ignore the second + request. This prevents clients from accidentally + creating duplicate commitments. The request ID + must be a valid UUID with the exception that + zero UUID is not supported ( + 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. + target_https_proxies_set_ssl_certificates_request_resource (google.cloud.compute_v1.types.TargetHttpsProxiesSetSslCertificatesRequest): + The body resource for this request + target_https_proxy (str): + Name of the TargetHttpsProxy resource to set + an SslCertificates resource for. + """ + + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + request_id: str = proto.Field( + proto.STRING, + number=37109963, + optional=True, + ) + target_https_proxies_set_ssl_certificates_request_resource: 'TargetHttpsProxiesSetSslCertificatesRequest' = proto.Field( + proto.MESSAGE, + number=223122908, + message='TargetHttpsProxiesSetSslCertificatesRequest', + ) + target_https_proxy: str = proto.Field( + proto.STRING, + number=52336748, + ) + + +class SetSslCertificatesTargetSslProxyRequest(proto.Message): + r"""A request message for TargetSslProxies.SetSslCertificates. + See the method description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + project (str): + Project ID for this request. + request_id (str): + An optional request ID to identify requests. + Specify a unique request ID so that if you must + retry your request, the server will know to + ignore the request if it has already been + completed. For example, consider a situation + where you make an initial request and the + request times out. If you make the request again + with the same request ID, the server can check + if original operation with the same request ID + was received, and if so, will ignore the second + request. This prevents clients from accidentally + creating duplicate commitments. The request ID + must be a valid UUID with the exception that + zero UUID is not supported ( + 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. + target_ssl_proxies_set_ssl_certificates_request_resource (google.cloud.compute_v1.types.TargetSslProxiesSetSslCertificatesRequest): + The body resource for this request + target_ssl_proxy (str): + Name of the TargetSslProxy resource whose + SslCertificate resource is to be set. + """ + + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + request_id: str = proto.Field( + proto.STRING, + number=37109963, + optional=True, + ) + target_ssl_proxies_set_ssl_certificates_request_resource: 'TargetSslProxiesSetSslCertificatesRequest' = proto.Field( + proto.MESSAGE, + number=147940797, + message='TargetSslProxiesSetSslCertificatesRequest', + ) + target_ssl_proxy: str = proto.Field( + proto.STRING, + number=338795853, + ) + + +class SetSslPolicyTargetHttpsProxyRequest(proto.Message): + r"""A request message for TargetHttpsProxies.SetSslPolicy. See + the method description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + project (str): + Project ID for this request. + request_id (str): + An optional request ID to identify requests. + Specify a unique request ID so that if you must + retry your request, the server will know to + ignore the request if it has already been + completed. For example, consider a situation + where you make an initial request and the + request times out. If you make the request again + with the same request ID, the server can check + if original operation with the same request ID + was received, and if so, will ignore the second + request. This prevents clients from accidentally + creating duplicate commitments. The request ID + must be a valid UUID with the exception that + zero UUID is not supported ( + 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. + ssl_policy_reference_resource (google.cloud.compute_v1.types.SslPolicyReference): + The body resource for this request + target_https_proxy (str): + Name of the TargetHttpsProxy resource whose + SSL policy is to be set. The name must be 1-63 + characters long, and comply with RFC1035. + """ + + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + request_id: str = proto.Field( + proto.STRING, + number=37109963, + optional=True, + ) + ssl_policy_reference_resource: 'SslPolicyReference' = proto.Field( + proto.MESSAGE, + number=235403836, + message='SslPolicyReference', + ) + target_https_proxy: str = proto.Field( + proto.STRING, + number=52336748, + ) + + +class SetSslPolicyTargetSslProxyRequest(proto.Message): + r"""A request message for TargetSslProxies.SetSslPolicy. See the + method description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + project (str): + Project ID for this request. + request_id (str): + An optional request ID to identify requests. + Specify a unique request ID so that if you must + retry your request, the server will know to + ignore the request if it has already been + completed. For example, consider a situation + where you make an initial request and the + request times out. If you make the request again + with the same request ID, the server can check + if original operation with the same request ID + was received, and if so, will ignore the second + request. This prevents clients from accidentally + creating duplicate commitments. The request ID + must be a valid UUID with the exception that + zero UUID is not supported ( + 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. + ssl_policy_reference_resource (google.cloud.compute_v1.types.SslPolicyReference): + The body resource for this request + target_ssl_proxy (str): + Name of the TargetSslProxy resource whose SSL + policy is to be set. The name must be 1-63 + characters long, and comply with RFC1035. + """ + + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + request_id: str = proto.Field( + proto.STRING, + number=37109963, + optional=True, + ) + ssl_policy_reference_resource: 'SslPolicyReference' = proto.Field( + proto.MESSAGE, + number=235403836, + message='SslPolicyReference', + ) + target_ssl_proxy: str = proto.Field( + proto.STRING, + number=338795853, + ) + + +class SetTagsInstanceRequest(proto.Message): + r"""A request message for Instances.SetTags. See the method + description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + instance (str): + Name of the instance scoping this request. + project (str): + Project ID for this request. + request_id (str): + An optional request ID to identify requests. + Specify a unique request ID so that if you must + retry your request, the server will know to + ignore the request if it has already been + completed. For example, consider a situation + where you make an initial request and the + request times out. If you make the request again + with the same request ID, the server can check + if original operation with the same request ID + was received, and if so, will ignore the second + request. This prevents clients from accidentally + creating duplicate commitments. The request ID + must be a valid UUID with the exception that + zero UUID is not supported ( + 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. + tags_resource (google.cloud.compute_v1.types.Tags): + The body resource for this request + zone (str): + The name of the zone for this request. + """ + + instance: str = proto.Field( + proto.STRING, + number=18257045, + ) + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + request_id: str = proto.Field( + proto.STRING, + number=37109963, + optional=True, + ) + tags_resource: 'Tags' = proto.Field( + proto.MESSAGE, + number=331435380, + message='Tags', + ) + zone: str = proto.Field( + proto.STRING, + number=3744684, + ) + + +class SetTargetForwardingRuleRequest(proto.Message): + r"""A request message for ForwardingRules.SetTarget. See the + method description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + forwarding_rule (str): + Name of the ForwardingRule resource in which + target is to be set. + project (str): + Project ID for this request. + region (str): + Name of the region scoping this request. + request_id (str): + An optional request ID to identify requests. + Specify a unique request ID so that if you must + retry your request, the server will know to + ignore the request if it has already been + completed. For example, consider a situation + where you make an initial request and the + request times out. If you make the request again + with the same request ID, the server can check + if original operation with the same request ID + was received, and if so, will ignore the second + request. This prevents clients from accidentally + creating duplicate commitments. The request ID + must be a valid UUID with the exception that + zero UUID is not supported ( + 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. + target_reference_resource (google.cloud.compute_v1.types.TargetReference): + The body resource for this request + """ + + forwarding_rule: str = proto.Field( + proto.STRING, + number=269964030, + ) + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + region: str = proto.Field( + proto.STRING, + number=138946292, + ) + request_id: str = proto.Field( + proto.STRING, + number=37109963, + optional=True, + ) + target_reference_resource: 'TargetReference' = proto.Field( + proto.MESSAGE, + number=523721712, + message='TargetReference', + ) + + +class SetTargetGlobalForwardingRuleRequest(proto.Message): + r"""A request message for GlobalForwardingRules.SetTarget. See + the method description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + forwarding_rule (str): + Name of the ForwardingRule resource in which + target is to be set. + project (str): + Project ID for this request. + request_id (str): + An optional request ID to identify requests. + Specify a unique request ID so that if you must + retry your request, the server will know to + ignore the request if it has already been + completed. For example, consider a situation + where you make an initial request and the + request times out. If you make the request again + with the same request ID, the server can check + if original operation with the same request ID + was received, and if so, will ignore the second + request. This prevents clients from accidentally + creating duplicate commitments. The request ID + must be a valid UUID with the exception that + zero UUID is not supported ( + 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. + target_reference_resource (google.cloud.compute_v1.types.TargetReference): + The body resource for this request + """ + + forwarding_rule: str = proto.Field( + proto.STRING, + number=269964030, + ) + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + request_id: str = proto.Field( + proto.STRING, + number=37109963, + optional=True, + ) + target_reference_resource: 'TargetReference' = proto.Field( + proto.MESSAGE, + number=523721712, + message='TargetReference', + ) + + +class SetTargetPoolsInstanceGroupManagerRequest(proto.Message): + r"""A request message for InstanceGroupManagers.SetTargetPools. + See the method description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + instance_group_manager (str): + The name of the managed instance group. + instance_group_managers_set_target_pools_request_resource (google.cloud.compute_v1.types.InstanceGroupManagersSetTargetPoolsRequest): + The body resource for this request + project (str): + Project ID for this request. + request_id (str): + An optional request ID to identify requests. + Specify a unique request ID so that if you must + retry your request, the server will know to + ignore the request if it has already been + completed. For example, consider a situation + where you make an initial request and the + request times out. If you make the request again + with the same request ID, the server can check + if original operation with the same request ID + was received, and if so, will ignore the second + request. This prevents clients from accidentally + creating duplicate commitments. The request ID + must be a valid UUID with the exception that + zero UUID is not supported ( + 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. + zone (str): + The name of the zone where the managed + instance group is located. + """ + + instance_group_manager: str = proto.Field( + proto.STRING, + number=249363395, + ) + instance_group_managers_set_target_pools_request_resource: 'InstanceGroupManagersSetTargetPoolsRequest' = proto.Field( + proto.MESSAGE, + number=281150216, + message='InstanceGroupManagersSetTargetPoolsRequest', + ) + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + request_id: str = proto.Field( + proto.STRING, + number=37109963, + optional=True, + ) + zone: str = proto.Field( + proto.STRING, + number=3744684, + ) + + +class SetTargetPoolsRegionInstanceGroupManagerRequest(proto.Message): + r"""A request message for + RegionInstanceGroupManagers.SetTargetPools. See the method + description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + instance_group_manager (str): + Name of the managed instance group. + project (str): + Project ID for this request. + region (str): + Name of the region scoping this request. + region_instance_group_managers_set_target_pools_request_resource (google.cloud.compute_v1.types.RegionInstanceGroupManagersSetTargetPoolsRequest): + The body resource for this request + request_id (str): + An optional request ID to identify requests. + Specify a unique request ID so that if you must + retry your request, the server will know to + ignore the request if it has already been + completed. For example, consider a situation + where you make an initial request and the + request times out. If you make the request again + with the same request ID, the server can check + if original operation with the same request ID + was received, and if so, will ignore the second + request. This prevents clients from accidentally + creating duplicate commitments. The request ID + must be a valid UUID with the exception that + zero UUID is not supported ( + 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. + """ + + instance_group_manager: str = proto.Field( + proto.STRING, + number=249363395, + ) + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + region: str = proto.Field( + proto.STRING, + number=138946292, + ) + region_instance_group_managers_set_target_pools_request_resource: 'RegionInstanceGroupManagersSetTargetPoolsRequest' = proto.Field( + proto.MESSAGE, + number=78734717, + message='RegionInstanceGroupManagersSetTargetPoolsRequest', + ) + request_id: str = proto.Field( + proto.STRING, + number=37109963, + optional=True, + ) + + +class SetUrlMapRegionTargetHttpProxyRequest(proto.Message): + r"""A request message for RegionTargetHttpProxies.SetUrlMap. See + the method description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + project (str): + Project ID for this request. + region (str): + Name of the region scoping this request. + request_id (str): + An optional request ID to identify requests. + Specify a unique request ID so that if you must + retry your request, the server will know to + ignore the request if it has already been + completed. For example, consider a situation + where you make an initial request and the + request times out. If you make the request again + with the same request ID, the server can check + if original operation with the same request ID + was received, and if so, will ignore the second + request. This prevents clients from accidentally + creating duplicate commitments. The request ID + must be a valid UUID with the exception that + zero UUID is not supported ( + 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. + target_http_proxy (str): + Name of the TargetHttpProxy to set a URL map + for. + url_map_reference_resource (google.cloud.compute_v1.types.UrlMapReference): + The body resource for this request + """ + + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + region: str = proto.Field( + proto.STRING, + number=138946292, + ) + request_id: str = proto.Field( + proto.STRING, + number=37109963, + optional=True, + ) + target_http_proxy: str = proto.Field( + proto.STRING, + number=206872421, + ) + url_map_reference_resource: 'UrlMapReference' = proto.Field( + proto.MESSAGE, + number=398701333, + message='UrlMapReference', + ) + + +class SetUrlMapRegionTargetHttpsProxyRequest(proto.Message): + r"""A request message for RegionTargetHttpsProxies.SetUrlMap. See + the method description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + project (str): + Project ID for this request. + region (str): + Name of the region scoping this request. + request_id (str): + An optional request ID to identify requests. + Specify a unique request ID so that if you must + retry your request, the server will know to + ignore the request if it has already been + completed. For example, consider a situation + where you make an initial request and the + request times out. If you make the request again + with the same request ID, the server can check + if original operation with the same request ID + was received, and if so, will ignore the second + request. This prevents clients from accidentally + creating duplicate commitments. The request ID + must be a valid UUID with the exception that + zero UUID is not supported ( + 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. + target_https_proxy (str): + Name of the TargetHttpsProxy to set a URL map + for. + url_map_reference_resource (google.cloud.compute_v1.types.UrlMapReference): + The body resource for this request + """ + + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + region: str = proto.Field( + proto.STRING, + number=138946292, + ) + request_id: str = proto.Field( + proto.STRING, + number=37109963, + optional=True, + ) + target_https_proxy: str = proto.Field( + proto.STRING, + number=52336748, + ) + url_map_reference_resource: 'UrlMapReference' = proto.Field( + proto.MESSAGE, + number=398701333, + message='UrlMapReference', + ) + + +class SetUrlMapTargetHttpProxyRequest(proto.Message): + r"""A request message for TargetHttpProxies.SetUrlMap. See the + method description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + project (str): + Project ID for this request. + request_id (str): + An optional request ID to identify requests. + Specify a unique request ID so that if you must + retry your request, the server will know to + ignore the request if it has already been + completed. For example, consider a situation + where you make an initial request and the + request times out. If you make the request again + with the same request ID, the server can check + if original operation with the same request ID + was received, and if so, will ignore the second + request. This prevents clients from accidentally + creating duplicate commitments. The request ID + must be a valid UUID with the exception that + zero UUID is not supported ( + 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. + target_http_proxy (str): + Name of the TargetHttpProxy to set a URL map + for. + url_map_reference_resource (google.cloud.compute_v1.types.UrlMapReference): + The body resource for this request + """ + + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + request_id: str = proto.Field( + proto.STRING, + number=37109963, + optional=True, + ) + target_http_proxy: str = proto.Field( + proto.STRING, + number=206872421, + ) + url_map_reference_resource: 'UrlMapReference' = proto.Field( + proto.MESSAGE, + number=398701333, + message='UrlMapReference', + ) + + +class SetUrlMapTargetHttpsProxyRequest(proto.Message): + r"""A request message for TargetHttpsProxies.SetUrlMap. See the + method description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + project (str): + Project ID for this request. + request_id (str): + An optional request ID to identify requests. + Specify a unique request ID so that if you must + retry your request, the server will know to + ignore the request if it has already been + completed. For example, consider a situation + where you make an initial request and the + request times out. If you make the request again + with the same request ID, the server can check + if original operation with the same request ID + was received, and if so, will ignore the second + request. This prevents clients from accidentally + creating duplicate commitments. The request ID + must be a valid UUID with the exception that + zero UUID is not supported ( + 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. + target_https_proxy (str): + Name of the TargetHttpsProxy resource whose + URL map is to be set. + url_map_reference_resource (google.cloud.compute_v1.types.UrlMapReference): + The body resource for this request + """ + + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + request_id: str = proto.Field( + proto.STRING, + number=37109963, + optional=True, + ) + target_https_proxy: str = proto.Field( + proto.STRING, + number=52336748, + ) + url_map_reference_resource: 'UrlMapReference' = proto.Field( + proto.MESSAGE, + number=398701333, + message='UrlMapReference', + ) + + +class SetUsageExportBucketProjectRequest(proto.Message): + r"""A request message for Projects.SetUsageExportBucket. See the + method description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + project (str): + Project ID for this request. + request_id (str): + An optional request ID to identify requests. + Specify a unique request ID so that if you must + retry your request, the server will know to + ignore the request if it has already been + completed. For example, consider a situation + where you make an initial request and the + request times out. If you make the request again + with the same request ID, the server can check + if original operation with the same request ID + was received, and if so, will ignore the second + request. This prevents clients from accidentally + creating duplicate commitments. The request ID + must be a valid UUID with the exception that + zero UUID is not supported ( + 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. + usage_export_location_resource (google.cloud.compute_v1.types.UsageExportLocation): + The body resource for this request + """ + + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + request_id: str = proto.Field( + proto.STRING, + number=37109963, + optional=True, + ) + usage_export_location_resource: 'UsageExportLocation' = proto.Field( + proto.MESSAGE, + number=20260459, + message='UsageExportLocation', + ) + + +class ShareSettings(proto.Message): + r"""The share setting for reservations and sole tenancy node + groups. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + project_map (MutableMapping[str, google.cloud.compute_v1.types.ShareSettingsProjectConfig]): + A map of project id and project config. This is only valid + when share_type's value is SPECIFIC_PROJECTS. + share_type (str): + Type of sharing for this shared-reservation + Check the ShareType enum for the list of + possible values. + + This field is a member of `oneof`_ ``_share_type``. + """ + class ShareType(proto.Enum): + r"""Type of sharing for this shared-reservation + + Values: + UNDEFINED_SHARE_TYPE (0): + A value indicating that the enum field is not + set. + LOCAL (72607563): + Default value. + ORGANIZATION (274978099): + Shared-reservation is open to entire + Organization + SHARE_TYPE_UNSPECIFIED (494771730): + Default value. This value is unused. + SPECIFIC_PROJECTS (347838695): + Shared-reservation is open to specific + projects + """ + UNDEFINED_SHARE_TYPE = 0 + LOCAL = 72607563 + ORGANIZATION = 274978099 + SHARE_TYPE_UNSPECIFIED = 494771730 + SPECIFIC_PROJECTS = 347838695 + + project_map: MutableMapping[str, 'ShareSettingsProjectConfig'] = proto.MapField( + proto.STRING, + proto.MESSAGE, + number=134212406, + message='ShareSettingsProjectConfig', + ) + share_type: str = proto.Field( + proto.STRING, + number=359533466, + optional=True, + ) + + +class ShareSettingsProjectConfig(proto.Message): + r"""Config for each project in the share settings. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + project_id (str): + The project ID, should be same as the key of + this project config in the parent map. + + This field is a member of `oneof`_ ``_project_id``. + """ + + project_id: str = proto.Field( + proto.STRING, + number=177513473, + optional=True, + ) + + +class ShieldedInstanceConfig(proto.Message): + r"""A set of Shielded Instance options. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + enable_integrity_monitoring (bool): + Defines whether the instance has integrity + monitoring enabled. Enabled by default. + + This field is a member of `oneof`_ ``_enable_integrity_monitoring``. + enable_secure_boot (bool): + Defines whether the instance has Secure Boot + enabled. Disabled by default. + + This field is a member of `oneof`_ ``_enable_secure_boot``. + enable_vtpm (bool): + Defines whether the instance has the vTPM + enabled. Enabled by default. + + This field is a member of `oneof`_ ``_enable_vtpm``. + """ + + enable_integrity_monitoring: bool = proto.Field( + proto.BOOL, + number=409071030, + optional=True, + ) + enable_secure_boot: bool = proto.Field( + proto.BOOL, + number=123568638, + optional=True, + ) + enable_vtpm: bool = proto.Field( + proto.BOOL, + number=181858935, + optional=True, + ) + + +class ShieldedInstanceIdentity(proto.Message): + r"""A Shielded Instance Identity. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + encryption_key (google.cloud.compute_v1.types.ShieldedInstanceIdentityEntry): + An Endorsement Key (EK) made by the RSA 2048 + algorithm issued to the Shielded Instance's + vTPM. + + This field is a member of `oneof`_ ``_encryption_key``. + kind (str): + [Output Only] Type of the resource. Always + compute#shieldedInstanceIdentity for shielded Instance + identity entry. + + This field is a member of `oneof`_ ``_kind``. + signing_key (google.cloud.compute_v1.types.ShieldedInstanceIdentityEntry): + An Attestation Key (AK) made by the RSA 2048 + algorithm issued to the Shielded Instance's + vTPM. + + This field is a member of `oneof`_ ``_signing_key``. + """ + + encryption_key: 'ShieldedInstanceIdentityEntry' = proto.Field( + proto.MESSAGE, + number=488268707, + optional=True, + message='ShieldedInstanceIdentityEntry', + ) + kind: str = proto.Field( + proto.STRING, + number=3292052, + optional=True, + ) + signing_key: 'ShieldedInstanceIdentityEntry' = proto.Field( + proto.MESSAGE, + number=320948261, + optional=True, + message='ShieldedInstanceIdentityEntry', + ) + + +class ShieldedInstanceIdentityEntry(proto.Message): + r"""A Shielded Instance Identity Entry. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + ek_cert (str): + A PEM-encoded X.509 certificate. This field + can be empty. + + This field is a member of `oneof`_ ``_ek_cert``. + ek_pub (str): + A PEM-encoded public key. + + This field is a member of `oneof`_ ``_ek_pub``. + """ + + ek_cert: str = proto.Field( + proto.STRING, + number=450178589, + optional=True, + ) + ek_pub: str = proto.Field( + proto.STRING, + number=308947940, + optional=True, + ) + + +class ShieldedInstanceIntegrityPolicy(proto.Message): + r"""The policy describes the baseline against which Instance boot + integrity is measured. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + update_auto_learn_policy (bool): + Updates the integrity policy baseline using + the measurements from the VM instance's most + recent boot. + + This field is a member of `oneof`_ ``_update_auto_learn_policy``. + """ + + update_auto_learn_policy: bool = proto.Field( + proto.BOOL, + number=245490215, + optional=True, + ) + + +class SignedUrlKey(proto.Message): + r"""Represents a customer-supplied Signing Key used by Cloud CDN + Signed URLs + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + key_name (str): + Name of the key. The name must be 1-63 characters long, and + comply with RFC1035. Specifically, the name must be 1-63 + characters long and match the regular expression + ``[a-z]([-a-z0-9]*[a-z0-9])?`` which means the first + character must be a lowercase letter, and all following + characters must be a dash, lowercase letter, or digit, + except the last character, which cannot be a dash. + + This field is a member of `oneof`_ ``_key_name``. + key_value (str): + 128-bit key value used for signing the URL. + The key value must be a valid RFC 4648 Section 5 + base64url encoded string. + + This field is a member of `oneof`_ ``_key_value``. + """ + + key_name: str = proto.Field( + proto.STRING, + number=500938859, + optional=True, + ) + key_value: str = proto.Field( + proto.STRING, + number=504106897, + optional=True, + ) + + +class SimulateMaintenanceEventInstanceRequest(proto.Message): + r"""A request message for Instances.SimulateMaintenanceEvent. See + the method description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + instance (str): + Name of the instance scoping this request. + project (str): + Project ID for this request. + request_id (str): + An optional request ID to identify requests. + Specify a unique request ID so that if you must + retry your request, the server will know to + ignore the request if it has already been + completed. For example, consider a situation + where you make an initial request and the + request times out. If you make the request again + with the same request ID, the server can check + if original operation with the same request ID + was received, and if so, will ignore the second + request. This prevents clients from accidentally + creating duplicate commitments. The request ID + must be a valid UUID with the exception that + zero UUID is not supported ( + 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. + zone (str): + The name of the zone for this request. + """ + + instance: str = proto.Field( + proto.STRING, + number=18257045, + ) + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + request_id: str = proto.Field( + proto.STRING, + number=37109963, + optional=True, + ) + zone: str = proto.Field( + proto.STRING, + number=3744684, + ) + + +class SimulateMaintenanceEventNodeGroupRequest(proto.Message): + r"""A request message for NodeGroups.SimulateMaintenanceEvent. + See the method description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + node_group (str): + Name of the NodeGroup resource whose nodes + will go under maintenance simulation. + node_groups_simulate_maintenance_event_request_resource (google.cloud.compute_v1.types.NodeGroupsSimulateMaintenanceEventRequest): + The body resource for this request + project (str): + Project ID for this request. + request_id (str): + An optional request ID to identify requests. + Specify a unique request ID so that if you must + retry your request, the server will know to + ignore the request if it has already been + completed. For example, consider a situation + where you make an initial request and the + request times out. If you make the request again + with the same request ID, the server can check + if original operation with the same request ID + was received, and if so, will ignore the second + request. This prevents clients from accidentally + creating duplicate commitments. The request ID + must be a valid UUID with the exception that + zero UUID is not supported ( + 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. + zone (str): + The name of the zone for this request. + """ + + node_group: str = proto.Field( + proto.STRING, + number=469958146, + ) + node_groups_simulate_maintenance_event_request_resource: 'NodeGroupsSimulateMaintenanceEventRequest' = proto.Field( + proto.MESSAGE, + number=351468764, + message='NodeGroupsSimulateMaintenanceEventRequest', + ) + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + request_id: str = proto.Field( + proto.STRING, + number=37109963, + optional=True, + ) + zone: str = proto.Field( + proto.STRING, + number=3744684, + ) + + +class Snapshot(proto.Message): + r"""Represents a Persistent Disk Snapshot resource. You can use + snapshots to back up data on a regular interval. For more + information, read Creating persistent disk snapshots. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + architecture (str): + [Output Only] The architecture of the snapshot. Valid values + are ARM64 or X86_64. Check the Architecture enum for the + list of possible values. + + This field is a member of `oneof`_ ``_architecture``. + auto_created (bool): + [Output Only] Set to true if snapshots are automatically + created by applying resource policy on the target disk. + + This field is a member of `oneof`_ ``_auto_created``. + chain_name (str): + Creates the new snapshot in the snapshot + chain labeled with the specified name. The chain + name must be 1-63 characters long and comply + with RFC1035. This is an uncommon option only + for advanced service owners who needs to create + separate snapshot chains, for example, for + chargeback tracking. When you describe your + snapshot resource, this field is visible only if + it has a non-empty value. + + This field is a member of `oneof`_ ``_chain_name``. + creation_size_bytes (int): + [Output Only] Size in bytes of the snapshot at creation + time. + + This field is a member of `oneof`_ ``_creation_size_bytes``. + creation_timestamp (str): + [Output Only] Creation timestamp in RFC3339 text format. + + This field is a member of `oneof`_ ``_creation_timestamp``. + description (str): + An optional description of this resource. + Provide this property when you create the + resource. + + This field is a member of `oneof`_ ``_description``. + disk_size_gb (int): + [Output Only] Size of the source disk, specified in GB. + + This field is a member of `oneof`_ ``_disk_size_gb``. + download_bytes (int): + [Output Only] Number of bytes downloaded to restore a + snapshot to a disk. + + This field is a member of `oneof`_ ``_download_bytes``. + id (int): + [Output Only] The unique identifier for the resource. This + identifier is defined by the server. + + This field is a member of `oneof`_ ``_id``. + kind (str): + [Output Only] Type of the resource. Always compute#snapshot + for Snapshot resources. + + This field is a member of `oneof`_ ``_kind``. + label_fingerprint (str): + A fingerprint for the labels being applied to + this snapshot, which is essentially a hash of + the labels set used for optimistic locking. The + fingerprint is initially generated by Compute + Engine and changes after every request to modify + or update labels. You must always provide an + up-to-date fingerprint hash in order to update + or change labels, otherwise the request will + fail with error 412 conditionNotMet. To see the + latest fingerprint, make a get() request to + retrieve a snapshot. + + This field is a member of `oneof`_ ``_label_fingerprint``. + labels (MutableMapping[str, str]): + Labels to apply to this snapshot. These can + be later modified by the setLabels method. Label + values may be empty. + license_codes (MutableSequence[int]): + [Output Only] Integer license codes indicating which + licenses are attached to this snapshot. + licenses (MutableSequence[str]): + [Output Only] A list of public visible licenses that apply + to this snapshot. This can be because the original image had + licenses attached (such as a Windows image). + location_hint (str): + An opaque location hint used to place the + snapshot close to other resources. This field is + for use by internal tools that use the public + API. + + This field is a member of `oneof`_ ``_location_hint``. + name (str): + Name of the resource; provided by the client when the + resource is created. The name must be 1-63 characters long, + and comply with RFC1035. Specifically, the name must be 1-63 + characters long and match the regular expression + ``[a-z]([-a-z0-9]*[a-z0-9])?`` which means the first + character must be a lowercase letter, and all following + characters must be a dash, lowercase letter, or digit, + except the last character, which cannot be a dash. + + This field is a member of `oneof`_ ``_name``. + satisfies_pzs (bool): + [Output Only] Reserved for future use. + + This field is a member of `oneof`_ ``_satisfies_pzs``. + self_link (str): + [Output Only] Server-defined URL for the resource. + + This field is a member of `oneof`_ ``_self_link``. + snapshot_encryption_key (google.cloud.compute_v1.types.CustomerEncryptionKey): + Encrypts the snapshot using a + customer-supplied encryption key. After you + encrypt a snapshot using a customer-supplied + key, you must provide the same key if you use + the snapshot later. For example, you must + provide the encryption key when you create a + disk from the encrypted snapshot in a future + request. Customer-supplied encryption keys do + not protect access to metadata of the snapshot. + If you do not provide an encryption key when + creating the snapshot, then the snapshot will be + encrypted using an automatically generated key + and you do not need to provide a key to use the + snapshot later. + + This field is a member of `oneof`_ ``_snapshot_encryption_key``. + snapshot_type (str): + Indicates the type of the snapshot. + Check the SnapshotType enum for the list of + possible values. + + This field is a member of `oneof`_ ``_snapshot_type``. + source_disk (str): + The source disk used to create this snapshot. + + This field is a member of `oneof`_ ``_source_disk``. + source_disk_encryption_key (google.cloud.compute_v1.types.CustomerEncryptionKey): + The customer-supplied encryption key of the + source disk. Required if the source disk is + protected by a customer-supplied encryption key. + + This field is a member of `oneof`_ ``_source_disk_encryption_key``. + source_disk_id (str): + [Output Only] The ID value of the disk used to create this + snapshot. This value may be used to determine whether the + snapshot was taken from the current or a previous instance + of a given disk name. + + This field is a member of `oneof`_ ``_source_disk_id``. + source_snapshot_schedule_policy (str): + [Output Only] URL of the resource policy which created this + scheduled snapshot. + + This field is a member of `oneof`_ ``_source_snapshot_schedule_policy``. + source_snapshot_schedule_policy_id (str): + [Output Only] ID of the resource policy which created this + scheduled snapshot. + + This field is a member of `oneof`_ ``_source_snapshot_schedule_policy_id``. + status (str): + [Output Only] The status of the snapshot. This can be + CREATING, DELETING, FAILED, READY, or UPLOADING. Check the + Status enum for the list of possible values. + + This field is a member of `oneof`_ ``_status``. + storage_bytes (int): + [Output Only] A size of the storage used by the snapshot. As + snapshots share storage, this number is expected to change + with snapshot creation/deletion. + + This field is a member of `oneof`_ ``_storage_bytes``. + storage_bytes_status (str): + [Output Only] An indicator whether storageBytes is in a + stable state or it is being adjusted as a result of shared + storage reallocation. This status can either be UPDATING, + meaning the size of the snapshot is being updated, or + UP_TO_DATE, meaning the size of the snapshot is up-to-date. + Check the StorageBytesStatus enum for the list of possible + values. + + This field is a member of `oneof`_ ``_storage_bytes_status``. + storage_locations (MutableSequence[str]): + Cloud Storage bucket storage location of the + snapshot (regional or multi-regional). + """ + class Architecture(proto.Enum): + r"""[Output Only] The architecture of the snapshot. Valid values are + ARM64 or X86_64. + + Values: + UNDEFINED_ARCHITECTURE (0): + A value indicating that the enum field is not + set. + ARCHITECTURE_UNSPECIFIED (394750507): + Default value indicating Architecture is not + set. + ARM64 (62547450): + Machines with architecture ARM64 + X86_64 (425300551): + Machines with architecture X86_64 + """ + UNDEFINED_ARCHITECTURE = 0 + ARCHITECTURE_UNSPECIFIED = 394750507 + ARM64 = 62547450 + X86_64 = 425300551 + + class SnapshotType(proto.Enum): + r"""Indicates the type of the snapshot. + + Values: + UNDEFINED_SNAPSHOT_TYPE (0): + A value indicating that the enum field is not + set. + ARCHIVE (506752162): + No description available. + STANDARD (484642493): + No description available. + """ + UNDEFINED_SNAPSHOT_TYPE = 0 + ARCHIVE = 506752162 + STANDARD = 484642493 + + class Status(proto.Enum): + r"""[Output Only] The status of the snapshot. This can be CREATING, + DELETING, FAILED, READY, or UPLOADING. + + Values: + UNDEFINED_STATUS (0): + A value indicating that the enum field is not + set. + CREATING (455564985): + Snapshot creation is in progress. + DELETING (528602024): + Snapshot is currently being deleted. + FAILED (455706685): + Snapshot creation failed. + READY (77848963): + Snapshot has been created successfully. + UPLOADING (267603489): + Snapshot is being uploaded. + """ + UNDEFINED_STATUS = 0 + CREATING = 455564985 + DELETING = 528602024 + FAILED = 455706685 + READY = 77848963 + UPLOADING = 267603489 + + class StorageBytesStatus(proto.Enum): + r"""[Output Only] An indicator whether storageBytes is in a stable state + or it is being adjusted as a result of shared storage reallocation. + This status can either be UPDATING, meaning the size of the snapshot + is being updated, or UP_TO_DATE, meaning the size of the snapshot is + up-to-date. + + Values: + UNDEFINED_STORAGE_BYTES_STATUS (0): + A value indicating that the enum field is not + set. + UPDATING (494614342): + No description available. + UP_TO_DATE (101306702): + No description available. + """ + UNDEFINED_STORAGE_BYTES_STATUS = 0 + UPDATING = 494614342 + UP_TO_DATE = 101306702 + + architecture: str = proto.Field( + proto.STRING, + number=302803283, + optional=True, + ) + auto_created: bool = proto.Field( + proto.BOOL, + number=463922264, + optional=True, + ) + chain_name: str = proto.Field( + proto.STRING, + number=68644169, + optional=True, + ) + creation_size_bytes: int = proto.Field( + proto.INT64, + number=125400077, + optional=True, + ) + creation_timestamp: str = proto.Field( + proto.STRING, + number=30525366, + optional=True, + ) + description: str = proto.Field( + proto.STRING, + number=422937596, + optional=True, + ) + disk_size_gb: int = proto.Field( + proto.INT64, + number=316263735, + optional=True, + ) + download_bytes: int = proto.Field( + proto.INT64, + number=435054068, + optional=True, + ) + id: int = proto.Field( + proto.UINT64, + number=3355, + optional=True, + ) + kind: str = proto.Field( + proto.STRING, + number=3292052, + optional=True, + ) + label_fingerprint: str = proto.Field( + proto.STRING, + number=178124825, + optional=True, + ) + labels: MutableMapping[str, str] = proto.MapField( + proto.STRING, + proto.STRING, + number=500195327, + ) + license_codes: MutableSequence[int] = proto.RepeatedField( + proto.INT64, + number=45482664, + ) + licenses: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=337642578, + ) + location_hint: str = proto.Field( + proto.STRING, + number=350519505, + optional=True, + ) + name: str = proto.Field( + proto.STRING, + number=3373707, + optional=True, + ) + satisfies_pzs: bool = proto.Field( + proto.BOOL, + number=480964267, + optional=True, + ) + self_link: str = proto.Field( + proto.STRING, + number=456214797, + optional=True, + ) + snapshot_encryption_key: 'CustomerEncryptionKey' = proto.Field( + proto.MESSAGE, + number=43334526, + optional=True, + message='CustomerEncryptionKey', + ) + snapshot_type: str = proto.Field( + proto.STRING, + number=124349653, + optional=True, + ) + source_disk: str = proto.Field( + proto.STRING, + number=451753793, + optional=True, + ) + source_disk_encryption_key: 'CustomerEncryptionKey' = proto.Field( + proto.MESSAGE, + number=531501153, + optional=True, + message='CustomerEncryptionKey', + ) + source_disk_id: str = proto.Field( + proto.STRING, + number=454190809, + optional=True, + ) + source_snapshot_schedule_policy: str = proto.Field( + proto.STRING, + number=235756291, + optional=True, + ) + source_snapshot_schedule_policy_id: str = proto.Field( + proto.STRING, + number=70489047, + optional=True, + ) + status: str = proto.Field( + proto.STRING, + number=181260274, + optional=True, + ) + storage_bytes: int = proto.Field( + proto.INT64, + number=424631719, + optional=True, + ) + storage_bytes_status: str = proto.Field( + proto.STRING, + number=490739082, + optional=True, + ) + storage_locations: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=328005274, + ) + + +class SnapshotList(proto.Message): + r"""Contains a list of Snapshot resources. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + id (str): + [Output Only] Unique identifier for the resource; defined by + the server. + + This field is a member of `oneof`_ ``_id``. + items (MutableSequence[google.cloud.compute_v1.types.Snapshot]): + A list of Snapshot resources. + kind (str): + Type of resource. + + This field is a member of `oneof`_ ``_kind``. + next_page_token (str): + [Output Only] This token allows you to get the next page of + results for list requests. If the number of results is + larger than maxResults, use the nextPageToken as a value for + the query parameter pageToken in the next list request. + Subsequent list requests will have their own nextPageToken + to continue paging through the results. + + This field is a member of `oneof`_ ``_next_page_token``. + self_link (str): + [Output Only] Server-defined URL for this resource. + + This field is a member of `oneof`_ ``_self_link``. + warning (google.cloud.compute_v1.types.Warning): + [Output Only] Informational warning message. + + This field is a member of `oneof`_ ``_warning``. + """ + + @property + def raw_page(self): + return self + + id: str = proto.Field( + proto.STRING, + number=3355, + optional=True, + ) + items: MutableSequence['Snapshot'] = proto.RepeatedField( + proto.MESSAGE, + number=100526016, + message='Snapshot', + ) + kind: str = proto.Field( + proto.STRING, + number=3292052, + optional=True, + ) + next_page_token: str = proto.Field( + proto.STRING, + number=79797525, + optional=True, + ) + self_link: str = proto.Field( + proto.STRING, + number=456214797, + optional=True, + ) + warning: 'Warning' = proto.Field( + proto.MESSAGE, + number=50704284, + optional=True, + message='Warning', + ) + + +class SourceDiskEncryptionKey(proto.Message): + r""" + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + disk_encryption_key (google.cloud.compute_v1.types.CustomerEncryptionKey): + The customer-supplied encryption key of the + source disk. Required if the source disk is + protected by a customer-supplied encryption key. + + This field is a member of `oneof`_ ``_disk_encryption_key``. + source_disk (str): + URL of the disk attached to the source + instance. This can be a full or valid partial + URL. For example, the following are valid + values: - + https://www.googleapis.com/compute/v1/projects/project/zones/zone + /disks/disk - + projects/project/zones/zone/disks/disk - + zones/zone/disks/disk + + This field is a member of `oneof`_ ``_source_disk``. + """ + + disk_encryption_key: 'CustomerEncryptionKey' = proto.Field( + proto.MESSAGE, + number=271660677, + optional=True, + message='CustomerEncryptionKey', + ) + source_disk: str = proto.Field( + proto.STRING, + number=451753793, + optional=True, + ) + + +class SourceInstanceParams(proto.Message): + r"""A specification of the parameters to use when creating the + instance template from a source instance. + + Attributes: + disk_configs (MutableSequence[google.cloud.compute_v1.types.DiskInstantiationConfig]): + Attached disks configuration. If not + provided, defaults are applied: For boot disk + and any other R/W disks, the source images for + each disk will be used. For read-only disks, + they will be attached in read-only mode. Local + SSD disks will be created as blank volumes. + """ + + disk_configs: MutableSequence['DiskInstantiationConfig'] = proto.RepeatedField( + proto.MESSAGE, + number=235580623, + message='DiskInstantiationConfig', + ) + + +class SourceInstanceProperties(proto.Message): + r"""DEPRECATED: Please use compute#instanceProperties instead. + New properties will not be added to this field. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + can_ip_forward (bool): + Enables instances created based on this + machine image to send packets with source IP + addresses other than their own and receive + packets with destination IP addresses other than + their own. If these instances will be used as an + IP gateway or it will be set as the next-hop in + a Route resource, specify true. If unsure, leave + this set to false. See the Enable IP forwarding + documentation for more information. + + This field is a member of `oneof`_ ``_can_ip_forward``. + deletion_protection (bool): + Whether the instance created from this + machine image should be protected against + deletion. + + This field is a member of `oneof`_ ``_deletion_protection``. + description (str): + An optional text description for the + instances that are created from this machine + image. + + This field is a member of `oneof`_ ``_description``. + disks (MutableSequence[google.cloud.compute_v1.types.SavedAttachedDisk]): + An array of disks that are associated with + the instances that are created from this machine + image. + guest_accelerators (MutableSequence[google.cloud.compute_v1.types.AcceleratorConfig]): + A list of guest accelerator cards' type and + count to use for instances created from this + machine image. + key_revocation_action_type (str): + KeyRevocationActionType of the instance. + Supported options are "STOP" and "NONE". The + default value is "NONE" if it is not specified. + Check the KeyRevocationActionType enum for the + list of possible values. + + This field is a member of `oneof`_ ``_key_revocation_action_type``. + labels (MutableMapping[str, str]): + Labels to apply to instances that are created + from this machine image. + machine_type (str): + The machine type to use for instances that + are created from this machine image. + + This field is a member of `oneof`_ ``_machine_type``. + metadata (google.cloud.compute_v1.types.Metadata): + The metadata key/value pairs to assign to + instances that are created from this machine + image. These pairs can consist of custom + metadata or predefined keys. See Project and + instance metadata for more information. + + This field is a member of `oneof`_ ``_metadata``. + min_cpu_platform (str): + Minimum cpu/platform to be used by instances + created from this machine image. The instance + may be scheduled on the specified or newer + cpu/platform. Applicable values are the friendly + names of CPU platforms, such as minCpuPlatform: + "Intel Haswell" or minCpuPlatform: "Intel Sandy + Bridge". For more information, read Specifying a + Minimum CPU Platform. + + This field is a member of `oneof`_ ``_min_cpu_platform``. + network_interfaces (MutableSequence[google.cloud.compute_v1.types.NetworkInterface]): + An array of network access configurations for + this interface. + scheduling (google.cloud.compute_v1.types.Scheduling): + Specifies the scheduling options for the + instances that are created from this machine + image. + + This field is a member of `oneof`_ ``_scheduling``. + service_accounts (MutableSequence[google.cloud.compute_v1.types.ServiceAccount]): + A list of service accounts with specified + scopes. Access tokens for these service accounts + are available to the instances that are created + from this machine image. Use metadata queries to + obtain the access tokens for these instances. + tags (google.cloud.compute_v1.types.Tags): + A list of tags to apply to the instances that + are created from this machine image. The tags + identify valid sources or targets for network + firewalls. The setTags method can modify this + list of tags. Each tag within the list must + comply with RFC1035. + + This field is a member of `oneof`_ ``_tags``. + """ + class KeyRevocationActionType(proto.Enum): + r"""KeyRevocationActionType of the instance. Supported options + are "STOP" and "NONE". The default value is "NONE" if it is not + specified. + + Values: + UNDEFINED_KEY_REVOCATION_ACTION_TYPE (0): + A value indicating that the enum field is not + set. + KEY_REVOCATION_ACTION_TYPE_UNSPECIFIED (467110106): + Default value. This value is unused. + NONE (2402104): + Indicates user chose no operation. + STOP (2555906): + Indicates user chose to opt for VM shutdown + on key revocation. + """ + UNDEFINED_KEY_REVOCATION_ACTION_TYPE = 0 + KEY_REVOCATION_ACTION_TYPE_UNSPECIFIED = 467110106 + NONE = 2402104 + STOP = 2555906 + + can_ip_forward: bool = proto.Field( + proto.BOOL, + number=467731324, + optional=True, + ) + deletion_protection: bool = proto.Field( + proto.BOOL, + number=458014698, + optional=True, + ) + description: str = proto.Field( + proto.STRING, + number=422937596, + optional=True, + ) + disks: MutableSequence['SavedAttachedDisk'] = proto.RepeatedField( + proto.MESSAGE, + number=95594102, + message='SavedAttachedDisk', + ) + guest_accelerators: MutableSequence['AcceleratorConfig'] = proto.RepeatedField( + proto.MESSAGE, + number=463595119, + message='AcceleratorConfig', + ) + key_revocation_action_type: str = proto.Field( + proto.STRING, + number=235941474, + optional=True, + ) + labels: MutableMapping[str, str] = proto.MapField( + proto.STRING, + proto.STRING, + number=500195327, + ) + machine_type: str = proto.Field( + proto.STRING, + number=227711026, + optional=True, + ) + metadata: 'Metadata' = proto.Field( + proto.MESSAGE, + number=86866735, + optional=True, + message='Metadata', + ) + min_cpu_platform: str = proto.Field( + proto.STRING, + number=242912759, + optional=True, + ) + network_interfaces: MutableSequence['NetworkInterface'] = proto.RepeatedField( + proto.MESSAGE, + number=52735243, + message='NetworkInterface', + ) + scheduling: 'Scheduling' = proto.Field( + proto.MESSAGE, + number=386688404, + optional=True, + message='Scheduling', + ) + service_accounts: MutableSequence['ServiceAccount'] = proto.RepeatedField( + proto.MESSAGE, + number=277537328, + message='ServiceAccount', + ) + tags: 'Tags' = proto.Field( + proto.MESSAGE, + number=3552281, + optional=True, + message='Tags', + ) + + +class SslCertificate(proto.Message): + r"""Represents an SSL Certificate resource. Google Compute Engine has + two SSL Certificate resources: \* + `Global `__ \* + `Regional `__ + The sslCertificates are used by: - external HTTPS load balancers - + SSL proxy load balancers The regionSslCertificates are used by + internal HTTPS load balancers. Optionally, certificate file contents + that you upload can contain a set of up to five PEM-encoded + certificates. The API call creates an object (sslCertificate) that + holds this data. You can use SSL keys and certificates to secure + connections to a load balancer. For more information, read Creating + and using SSL certificates, SSL certificates quotas and limits, and + Troubleshooting SSL certificates. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + certificate (str): + A value read into memory from a certificate + file. The certificate file must be in PEM + format. The certificate chain must be no greater + than 5 certs long. The chain must include at + least one intermediate cert. + + This field is a member of `oneof`_ ``_certificate``. + creation_timestamp (str): + [Output Only] Creation timestamp in RFC3339 text format. + + This field is a member of `oneof`_ ``_creation_timestamp``. + description (str): + An optional description of this resource. + Provide this property when you create the + resource. + + This field is a member of `oneof`_ ``_description``. + expire_time (str): + [Output Only] Expire time of the certificate. RFC3339 + + This field is a member of `oneof`_ ``_expire_time``. + id (int): + [Output Only] The unique identifier for the resource. This + identifier is defined by the server. + + This field is a member of `oneof`_ ``_id``. + kind (str): + [Output Only] Type of the resource. Always + compute#sslCertificate for SSL certificates. + + This field is a member of `oneof`_ ``_kind``. + managed (google.cloud.compute_v1.types.SslCertificateManagedSslCertificate): + Configuration and status of a managed SSL + certificate. + + This field is a member of `oneof`_ ``_managed``. + name (str): + Name of the resource. Provided by the client when the + resource is created. The name must be 1-63 characters long, + and comply with RFC1035. Specifically, the name must be 1-63 + characters long and match the regular expression + ``[a-z]([-a-z0-9]*[a-z0-9])?`` which means the first + character must be a lowercase letter, and all following + characters must be a dash, lowercase letter, or digit, + except the last character, which cannot be a dash. + + This field is a member of `oneof`_ ``_name``. + private_key (str): + A value read into memory from a write-only + private key file. The private key file must be + in PEM format. For security, only insert + requests include this field. + + This field is a member of `oneof`_ ``_private_key``. + region (str): + [Output Only] URL of the region where the regional SSL + Certificate resides. This field is not applicable to global + SSL Certificate. + + This field is a member of `oneof`_ ``_region``. + self_link (str): + [Output only] Server-defined URL for the resource. + + This field is a member of `oneof`_ ``_self_link``. + self_managed (google.cloud.compute_v1.types.SslCertificateSelfManagedSslCertificate): + Configuration and status of a self-managed + SSL certificate. + + This field is a member of `oneof`_ ``_self_managed``. + subject_alternative_names (MutableSequence[str]): + [Output Only] Domains associated with the certificate via + Subject Alternative Name. + type_ (str): + (Optional) Specifies the type of SSL certificate, either + "SELF_MANAGED" or "MANAGED". If not specified, the + certificate is self-managed and the fields certificate and + private_key are used. Check the Type enum for the list of + possible values. + + This field is a member of `oneof`_ ``_type``. + """ + class Type(proto.Enum): + r"""(Optional) Specifies the type of SSL certificate, either + "SELF_MANAGED" or "MANAGED". If not specified, the certificate is + self-managed and the fields certificate and private_key are used. + + Values: + UNDEFINED_TYPE (0): + A value indicating that the enum field is not + set. + MANAGED (479501183): + Google-managed SSLCertificate. + SELF_MANAGED (434437516): + Certificate uploaded by user. + TYPE_UNSPECIFIED (437714322): + No description available. + """ + UNDEFINED_TYPE = 0 + MANAGED = 479501183 + SELF_MANAGED = 434437516 + TYPE_UNSPECIFIED = 437714322 + + certificate: str = proto.Field( + proto.STRING, + number=341787031, + optional=True, + ) + creation_timestamp: str = proto.Field( + proto.STRING, + number=30525366, + optional=True, + ) + description: str = proto.Field( + proto.STRING, + number=422937596, + optional=True, + ) + expire_time: str = proto.Field( + proto.STRING, + number=440691181, + optional=True, + ) + id: int = proto.Field( + proto.UINT64, + number=3355, + optional=True, + ) + kind: str = proto.Field( + proto.STRING, + number=3292052, + optional=True, + ) + managed: 'SslCertificateManagedSslCertificate' = proto.Field( + proto.MESSAGE, + number=298389407, + optional=True, + message='SslCertificateManagedSslCertificate', + ) + name: str = proto.Field( + proto.STRING, + number=3373707, + optional=True, + ) + private_key: str = proto.Field( + proto.STRING, + number=361331107, + optional=True, + ) + region: str = proto.Field( + proto.STRING, + number=138946292, + optional=True, + ) + self_link: str = proto.Field( + proto.STRING, + number=456214797, + optional=True, + ) + self_managed: 'SslCertificateSelfManagedSslCertificate' = proto.Field( + proto.MESSAGE, + number=329284012, + optional=True, + message='SslCertificateSelfManagedSslCertificate', + ) + subject_alternative_names: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=528807907, + ) + type_: str = proto.Field( + proto.STRING, + number=3575610, + optional=True, + ) + + +class SslCertificateAggregatedList(proto.Message): + r""" + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + id (str): + [Output Only] Unique identifier for the resource; defined by + the server. + + This field is a member of `oneof`_ ``_id``. + items (MutableMapping[str, google.cloud.compute_v1.types.SslCertificatesScopedList]): + A list of SslCertificatesScopedList + resources. + kind (str): + [Output Only] Type of resource. Always + compute#sslCertificateAggregatedList for lists of SSL + Certificates. + + This field is a member of `oneof`_ ``_kind``. + next_page_token (str): + [Output Only] This token allows you to get the next page of + results for list requests. If the number of results is + larger than maxResults, use the nextPageToken as a value for + the query parameter pageToken in the next list request. + Subsequent list requests will have their own nextPageToken + to continue paging through the results. + + This field is a member of `oneof`_ ``_next_page_token``. + self_link (str): + [Output Only] Server-defined URL for this resource. + + This field is a member of `oneof`_ ``_self_link``. + unreachables (MutableSequence[str]): + [Output Only] Unreachable resources. + warning (google.cloud.compute_v1.types.Warning): + [Output Only] Informational warning message. + + This field is a member of `oneof`_ ``_warning``. + """ + + @property + def raw_page(self): + return self + + id: str = proto.Field( + proto.STRING, + number=3355, + optional=True, + ) + items: MutableMapping[str, 'SslCertificatesScopedList'] = proto.MapField( + proto.STRING, + proto.MESSAGE, + number=100526016, + message='SslCertificatesScopedList', + ) + kind: str = proto.Field( + proto.STRING, + number=3292052, + optional=True, + ) + next_page_token: str = proto.Field( + proto.STRING, + number=79797525, + optional=True, + ) + self_link: str = proto.Field( + proto.STRING, + number=456214797, + optional=True, + ) + unreachables: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=243372063, + ) + warning: 'Warning' = proto.Field( + proto.MESSAGE, + number=50704284, + optional=True, + message='Warning', + ) + + +class SslCertificateList(proto.Message): + r"""Contains a list of SslCertificate resources. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + id (str): + [Output Only] Unique identifier for the resource; defined by + the server. + + This field is a member of `oneof`_ ``_id``. + items (MutableSequence[google.cloud.compute_v1.types.SslCertificate]): + A list of SslCertificate resources. + kind (str): + Type of resource. + + This field is a member of `oneof`_ ``_kind``. + next_page_token (str): + [Output Only] This token allows you to get the next page of + results for list requests. If the number of results is + larger than maxResults, use the nextPageToken as a value for + the query parameter pageToken in the next list request. + Subsequent list requests will have their own nextPageToken + to continue paging through the results. + + This field is a member of `oneof`_ ``_next_page_token``. + self_link (str): + [Output Only] Server-defined URL for this resource. + + This field is a member of `oneof`_ ``_self_link``. + warning (google.cloud.compute_v1.types.Warning): + [Output Only] Informational warning message. + + This field is a member of `oneof`_ ``_warning``. + """ + + @property + def raw_page(self): + return self + + id: str = proto.Field( + proto.STRING, + number=3355, + optional=True, + ) + items: MutableSequence['SslCertificate'] = proto.RepeatedField( + proto.MESSAGE, + number=100526016, + message='SslCertificate', + ) + kind: str = proto.Field( + proto.STRING, + number=3292052, + optional=True, + ) + next_page_token: str = proto.Field( + proto.STRING, + number=79797525, + optional=True, + ) + self_link: str = proto.Field( + proto.STRING, + number=456214797, + optional=True, + ) + warning: 'Warning' = proto.Field( + proto.MESSAGE, + number=50704284, + optional=True, + message='Warning', + ) + + +class SslCertificateManagedSslCertificate(proto.Message): + r"""Configuration and status of a managed SSL certificate. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + domain_status (MutableMapping[str, str]): + [Output only] Detailed statuses of the domains specified for + managed certificate resource. + domains (MutableSequence[str]): + The domains for which a managed SSL certificate will be + generated. Each Google-managed SSL certificate supports up + to the `maximum number of domains per Google-managed SSL + certificate `__. + status (str): + [Output only] Status of the managed certificate resource. + Check the Status enum for the list of possible values. + + This field is a member of `oneof`_ ``_status``. + """ + class Status(proto.Enum): + r"""[Output only] Status of the managed certificate resource. + + Values: + UNDEFINED_STATUS (0): + A value indicating that the enum field is not + set. + ACTIVE (314733318): + The certificate management is working, and a + certificate has been provisioned. + MANAGED_CERTIFICATE_STATUS_UNSPECIFIED (474800850): + No description available. + PROVISIONING (290896621): + The certificate management is working. GCP + will attempt to provision the first certificate. + PROVISIONING_FAILED (76813775): + Certificate provisioning failed due to an issue with the DNS + or load balancing configuration. For details of which domain + failed, consult domain_status field. + PROVISIONING_FAILED_PERMANENTLY (275036203): + Certificate provisioning failed due to an issue with the DNS + or load balancing configuration. It won't be retried. To try + again delete and create a new managed SslCertificate + resource. For details of which domain failed, consult + domain_status field. + RENEWAL_FAILED (434659076): + Renewal of the certificate has failed due to an issue with + the DNS or load balancing configuration. The existing cert + is still serving; however, it will expire shortly. To + provision a renewed certificate, delete and create a new + managed SslCertificate resource. For details on which domain + failed, consult domain_status field. + """ + UNDEFINED_STATUS = 0 + ACTIVE = 314733318 + MANAGED_CERTIFICATE_STATUS_UNSPECIFIED = 474800850 + PROVISIONING = 290896621 + PROVISIONING_FAILED = 76813775 + PROVISIONING_FAILED_PERMANENTLY = 275036203 + RENEWAL_FAILED = 434659076 + + domain_status: MutableMapping[str, str] = proto.MapField( + proto.STRING, + proto.STRING, + number=360305613, + ) + domains: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=226935855, + ) + status: str = proto.Field( + proto.STRING, + number=181260274, + optional=True, + ) + + +class SslCertificateSelfManagedSslCertificate(proto.Message): + r"""Configuration and status of a self-managed SSL certificate. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + certificate (str): + A local certificate file. The certificate + must be in PEM format. The certificate chain + must be no greater than 5 certs long. The chain + must include at least one intermediate cert. + + This field is a member of `oneof`_ ``_certificate``. + private_key (str): + A write-only private key in PEM format. Only + insert requests will include this field. + + This field is a member of `oneof`_ ``_private_key``. + """ + + certificate: str = proto.Field( + proto.STRING, + number=341787031, + optional=True, + ) + private_key: str = proto.Field( + proto.STRING, + number=361331107, + optional=True, + ) + + +class SslCertificatesScopedList(proto.Message): + r""" + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + ssl_certificates (MutableSequence[google.cloud.compute_v1.types.SslCertificate]): + List of SslCertificates contained in this + scope. + warning (google.cloud.compute_v1.types.Warning): + Informational warning which replaces the list + of backend services when the list is empty. + + This field is a member of `oneof`_ ``_warning``. + """ + + ssl_certificates: MutableSequence['SslCertificate'] = proto.RepeatedField( + proto.MESSAGE, + number=366006543, + message='SslCertificate', + ) + warning: 'Warning' = proto.Field( + proto.MESSAGE, + number=50704284, + optional=True, + message='Warning', + ) + + +class SslPoliciesAggregatedList(proto.Message): + r""" + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + etag (str): + + This field is a member of `oneof`_ ``_etag``. + id (str): + [Output Only] Unique identifier for the resource; defined by + the server. + + This field is a member of `oneof`_ ``_id``. + items (MutableMapping[str, google.cloud.compute_v1.types.SslPoliciesScopedList]): + A list of SslPoliciesScopedList resources. + kind (str): + [Output Only] Type of resource. Always + compute#sslPolicyAggregatedList for lists of SSL Policies. + + This field is a member of `oneof`_ ``_kind``. + next_page_token (str): + [Output Only] This token allows you to get the next page of + results for list requests. If the number of results is + larger than maxResults, use the nextPageToken as a value for + the query parameter pageToken in the next list request. + Subsequent list requests will have their own nextPageToken + to continue paging through the results. + + This field is a member of `oneof`_ ``_next_page_token``. + self_link (str): + [Output Only] Server-defined URL for this resource. + + This field is a member of `oneof`_ ``_self_link``. + unreachables (MutableSequence[str]): + [Output Only] Unreachable resources. + warning (google.cloud.compute_v1.types.Warning): + [Output Only] Informational warning message. + + This field is a member of `oneof`_ ``_warning``. + """ + + @property + def raw_page(self): + return self + + etag: str = proto.Field( + proto.STRING, + number=3123477, + optional=True, + ) + id: str = proto.Field( + proto.STRING, + number=3355, + optional=True, + ) + items: MutableMapping[str, 'SslPoliciesScopedList'] = proto.MapField( + proto.STRING, + proto.MESSAGE, + number=100526016, + message='SslPoliciesScopedList', + ) + kind: str = proto.Field( + proto.STRING, + number=3292052, + optional=True, + ) + next_page_token: str = proto.Field( + proto.STRING, + number=79797525, + optional=True, + ) + self_link: str = proto.Field( + proto.STRING, + number=456214797, + optional=True, + ) + unreachables: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=243372063, + ) + warning: 'Warning' = proto.Field( + proto.MESSAGE, + number=50704284, + optional=True, + message='Warning', + ) + + +class SslPoliciesList(proto.Message): + r""" + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + id (str): + [Output Only] Unique identifier for the resource; defined by + the server. + + This field is a member of `oneof`_ ``_id``. + items (MutableSequence[google.cloud.compute_v1.types.SslPolicy]): + A list of SslPolicy resources. + kind (str): + [Output Only] Type of the resource. Always + compute#sslPoliciesList for lists of sslPolicies. + + This field is a member of `oneof`_ ``_kind``. + next_page_token (str): + [Output Only] This token allows you to get the next page of + results for list requests. If the number of results is + larger than maxResults, use the nextPageToken as a value for + the query parameter pageToken in the next list request. + Subsequent list requests will have their own nextPageToken + to continue paging through the results. + + This field is a member of `oneof`_ ``_next_page_token``. + self_link (str): + [Output Only] Server-defined URL for this resource. + + This field is a member of `oneof`_ ``_self_link``. + warning (google.cloud.compute_v1.types.Warning): + [Output Only] Informational warning message. + + This field is a member of `oneof`_ ``_warning``. + """ + + @property + def raw_page(self): + return self + + id: str = proto.Field( + proto.STRING, + number=3355, + optional=True, + ) + items: MutableSequence['SslPolicy'] = proto.RepeatedField( + proto.MESSAGE, + number=100526016, + message='SslPolicy', + ) + kind: str = proto.Field( + proto.STRING, + number=3292052, + optional=True, + ) + next_page_token: str = proto.Field( + proto.STRING, + number=79797525, + optional=True, + ) + self_link: str = proto.Field( + proto.STRING, + number=456214797, + optional=True, + ) + warning: 'Warning' = proto.Field( + proto.MESSAGE, + number=50704284, + optional=True, + message='Warning', + ) + + +class SslPoliciesListAvailableFeaturesResponse(proto.Message): + r""" + + Attributes: + features (MutableSequence[str]): + + """ + + features: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=246211645, + ) + + +class SslPoliciesScopedList(proto.Message): + r""" + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + ssl_policies (MutableSequence[google.cloud.compute_v1.types.SslPolicy]): + A list of SslPolicies contained in this + scope. + warning (google.cloud.compute_v1.types.Warning): + Informational warning which replaces the list + of SSL policies when the list is empty. + + This field is a member of `oneof`_ ``_warning``. + """ + + ssl_policies: MutableSequence['SslPolicy'] = proto.RepeatedField( + proto.MESSAGE, + number=209941027, + message='SslPolicy', + ) + warning: 'Warning' = proto.Field( + proto.MESSAGE, + number=50704284, + optional=True, + message='Warning', + ) + + +class SslPolicy(proto.Message): + r"""Represents an SSL Policy resource. Use SSL policies to + control the SSL features, such as versions and cipher suites, + offered by an HTTPS or SSL Proxy load balancer. For more + information, read SSL Policy Concepts. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + creation_timestamp (str): + [Output Only] Creation timestamp in RFC3339 text format. + + This field is a member of `oneof`_ ``_creation_timestamp``. + custom_features (MutableSequence[str]): + A list of features enabled when the selected + profile is CUSTOM. The method returns the set of + features that can be specified in this list. + This field must be empty if the profile is not + CUSTOM. + description (str): + An optional description of this resource. + Provide this property when you create the + resource. + + This field is a member of `oneof`_ ``_description``. + enabled_features (MutableSequence[str]): + [Output Only] The list of features enabled in the SSL + policy. + fingerprint (str): + Fingerprint of this resource. A hash of the + contents stored in this object. This field is + used in optimistic locking. This field will be + ignored when inserting a SslPolicy. An + up-to-date fingerprint must be provided in order + to update the SslPolicy, otherwise the request + will fail with error 412 conditionNotMet. To see + the latest fingerprint, make a get() request to + retrieve an SslPolicy. + + This field is a member of `oneof`_ ``_fingerprint``. + id (int): + [Output Only] The unique identifier for the resource. This + identifier is defined by the server. + + This field is a member of `oneof`_ ``_id``. + kind (str): + [Output only] Type of the resource. Always + compute#sslPolicyfor SSL policies. + + This field is a member of `oneof`_ ``_kind``. + min_tls_version (str): + The minimum version of SSL protocol that can be used by the + clients to establish a connection with the load balancer. + This can be one of TLS_1_0, TLS_1_1, TLS_1_2. Check the + MinTlsVersion enum for the list of possible values. + + This field is a member of `oneof`_ ``_min_tls_version``. + name (str): + Name of the resource. The name must be 1-63 characters long, + and comply with RFC1035. Specifically, the name must be 1-63 + characters long and match the regular expression + ``[a-z]([-a-z0-9]*[a-z0-9])?`` which means the first + character must be a lowercase letter, and all following + characters must be a dash, lowercase letter, or digit, + except the last character, which cannot be a dash. + + This field is a member of `oneof`_ ``_name``. + profile (str): + Profile specifies the set of SSL features + that can be used by the load balancer when + negotiating SSL with clients. This can be one of + COMPATIBLE, MODERN, RESTRICTED, or CUSTOM. If + using CUSTOM, the set of SSL features to enable + must be specified in the customFeatures field. + Check the Profile enum for the list of possible + values. + + This field is a member of `oneof`_ ``_profile``. + region (str): + [Output Only] URL of the region where the regional SSL + policy resides. This field is not applicable to global SSL + policies. + + This field is a member of `oneof`_ ``_region``. + self_link (str): + [Output Only] Server-defined URL for the resource. + + This field is a member of `oneof`_ ``_self_link``. + warnings (MutableSequence[google.cloud.compute_v1.types.Warnings]): + [Output Only] If potential misconfigurations are detected + for this SSL policy, this field will be populated with + warning messages. + """ + class MinTlsVersion(proto.Enum): + r"""The minimum version of SSL protocol that can be used by the clients + to establish a connection with the load balancer. This can be one of + TLS_1_0, TLS_1_1, TLS_1_2. + + Values: + UNDEFINED_MIN_TLS_VERSION (0): + A value indicating that the enum field is not + set. + TLS_1_0 (33116734): + TLS 1.0 + TLS_1_1 (33116735): + TLS 1.1 + TLS_1_2 (33116736): + TLS 1.2 + """ + UNDEFINED_MIN_TLS_VERSION = 0 + TLS_1_0 = 33116734 + TLS_1_1 = 33116735 + TLS_1_2 = 33116736 + + class Profile(proto.Enum): + r"""Profile specifies the set of SSL features that can be used by + the load balancer when negotiating SSL with clients. This can be + one of COMPATIBLE, MODERN, RESTRICTED, or CUSTOM. If using + CUSTOM, the set of SSL features to enable must be specified in + the customFeatures field. + + Values: + UNDEFINED_PROFILE (0): + A value indicating that the enum field is not + set. + COMPATIBLE (179357396): + Compatible profile. Allows the broadset set + of clients, even those which support only + out-of-date SSL features to negotiate with the + load balancer. + CUSTOM (388595569): + Custom profile. Allow only the set of allowed + SSL features specified in the customFeatures + field. + MODERN (132013855): + Modern profile. Supports a wide set of SSL + features, allowing modern clients to negotiate + SSL with the load balancer. + RESTRICTED (261551195): + Restricted profile. Supports a reduced set of + SSL features, intended to meet stricter + compliance requirements. + """ + UNDEFINED_PROFILE = 0 + COMPATIBLE = 179357396 + CUSTOM = 388595569 + MODERN = 132013855 + RESTRICTED = 261551195 + + creation_timestamp: str = proto.Field( + proto.STRING, + number=30525366, + optional=True, + ) + custom_features: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=34789707, + ) + description: str = proto.Field( + proto.STRING, + number=422937596, + optional=True, + ) + enabled_features: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=469017467, + ) + fingerprint: str = proto.Field( + proto.STRING, + number=234678500, + optional=True, + ) + id: int = proto.Field( + proto.UINT64, + number=3355, + optional=True, + ) + kind: str = proto.Field( + proto.STRING, + number=3292052, + optional=True, + ) + min_tls_version: str = proto.Field( + proto.STRING, + number=8155943, + optional=True, + ) + name: str = proto.Field( + proto.STRING, + number=3373707, + optional=True, + ) + profile: str = proto.Field( + proto.STRING, + number=227445161, + optional=True, + ) + region: str = proto.Field( + proto.STRING, + number=138946292, + optional=True, + ) + self_link: str = proto.Field( + proto.STRING, + number=456214797, + optional=True, + ) + warnings: MutableSequence['Warnings'] = proto.RepeatedField( + proto.MESSAGE, + number=498091095, + message='Warnings', + ) + + +class SslPolicyReference(proto.Message): + r""" + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + ssl_policy (str): + URL of the SSL policy resource. Set this to + empty string to clear any existing SSL policy + associated with the target proxy resource. + + This field is a member of `oneof`_ ``_ssl_policy``. + """ + + ssl_policy: str = proto.Field( + proto.STRING, + number=295190213, + optional=True, + ) + + +class StartAsyncReplicationDiskRequest(proto.Message): + r"""A request message for Disks.StartAsyncReplication. See the + method description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + disk (str): + The name of the persistent disk. + disks_start_async_replication_request_resource (google.cloud.compute_v1.types.DisksStartAsyncReplicationRequest): + The body resource for this request + project (str): + Project ID for this request. + request_id (str): + An optional request ID to identify requests. + Specify a unique request ID so that if you must + retry your request, the server will know to + ignore the request if it has already been + completed. For example, consider a situation + where you make an initial request and the + request times out. If you make the request again + with the same request ID, the server can check + if original operation with the same request ID + was received, and if so, will ignore the second + request. This prevents clients from accidentally + creating duplicate commitments. The request ID + must be a valid UUID with the exception that + zero UUID is not supported ( + 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. + zone (str): + The name of the zone for this request. + """ + + disk: str = proto.Field( + proto.STRING, + number=3083677, + ) + disks_start_async_replication_request_resource: 'DisksStartAsyncReplicationRequest' = proto.Field( + proto.MESSAGE, + number=470814554, + message='DisksStartAsyncReplicationRequest', + ) + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + request_id: str = proto.Field( + proto.STRING, + number=37109963, + optional=True, + ) + zone: str = proto.Field( + proto.STRING, + number=3744684, + ) + + +class StartAsyncReplicationRegionDiskRequest(proto.Message): + r"""A request message for RegionDisks.StartAsyncReplication. See + the method description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + disk (str): + The name of the persistent disk. + project (str): + Project ID for this request. + region (str): + The name of the region for this request. + region_disks_start_async_replication_request_resource (google.cloud.compute_v1.types.RegionDisksStartAsyncReplicationRequest): + The body resource for this request + request_id (str): + An optional request ID to identify requests. + Specify a unique request ID so that if you must + retry your request, the server will know to + ignore the request if it has already been + completed. For example, consider a situation + where you make an initial request and the + request times out. If you make the request again + with the same request ID, the server can check + if original operation with the same request ID + was received, and if so, will ignore the second + request. This prevents clients from accidentally + creating duplicate commitments. The request ID + must be a valid UUID with the exception that + zero UUID is not supported ( + 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. + """ + + disk: str = proto.Field( + proto.STRING, + number=3083677, + ) + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + region: str = proto.Field( + proto.STRING, + number=138946292, + ) + region_disks_start_async_replication_request_resource: 'RegionDisksStartAsyncReplicationRequest' = proto.Field( + proto.MESSAGE, + number=474326021, + message='RegionDisksStartAsyncReplicationRequest', + ) + request_id: str = proto.Field( + proto.STRING, + number=37109963, + optional=True, + ) + + +class StartInstanceRequest(proto.Message): + r"""A request message for Instances.Start. See the method + description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + instance (str): + Name of the instance resource to start. + project (str): + Project ID for this request. + request_id (str): + An optional request ID to identify requests. + Specify a unique request ID so that if you must + retry your request, the server will know to + ignore the request if it has already been + completed. For example, consider a situation + where you make an initial request and the + request times out. If you make the request again + with the same request ID, the server can check + if original operation with the same request ID + was received, and if so, will ignore the second + request. This prevents clients from accidentally + creating duplicate commitments. The request ID + must be a valid UUID with the exception that + zero UUID is not supported ( + 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. + zone (str): + The name of the zone for this request. + """ + + instance: str = proto.Field( + proto.STRING, + number=18257045, + ) + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + request_id: str = proto.Field( + proto.STRING, + number=37109963, + optional=True, + ) + zone: str = proto.Field( + proto.STRING, + number=3744684, + ) + + +class StartWithEncryptionKeyInstanceRequest(proto.Message): + r"""A request message for Instances.StartWithEncryptionKey. See + the method description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + instance (str): + Name of the instance resource to start. + instances_start_with_encryption_key_request_resource (google.cloud.compute_v1.types.InstancesStartWithEncryptionKeyRequest): + The body resource for this request + project (str): + Project ID for this request. + request_id (str): + An optional request ID to identify requests. + Specify a unique request ID so that if you must + retry your request, the server will know to + ignore the request if it has already been + completed. For example, consider a situation + where you make an initial request and the + request times out. If you make the request again + with the same request ID, the server can check + if original operation with the same request ID + was received, and if so, will ignore the second + request. This prevents clients from accidentally + creating duplicate commitments. The request ID + must be a valid UUID with the exception that + zero UUID is not supported ( + 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. + zone (str): + The name of the zone for this request. + """ + + instance: str = proto.Field( + proto.STRING, + number=18257045, + ) + instances_start_with_encryption_key_request_resource: 'InstancesStartWithEncryptionKeyRequest' = proto.Field( + proto.MESSAGE, + number=441712511, + message='InstancesStartWithEncryptionKeyRequest', + ) + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + request_id: str = proto.Field( + proto.STRING, + number=37109963, + optional=True, + ) + zone: str = proto.Field( + proto.STRING, + number=3744684, + ) + + +class StatefulPolicy(proto.Message): + r""" + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + preserved_state (google.cloud.compute_v1.types.StatefulPolicyPreservedState): + + This field is a member of `oneof`_ ``_preserved_state``. + """ + + preserved_state: 'StatefulPolicyPreservedState' = proto.Field( + proto.MESSAGE, + number=2634026, + optional=True, + message='StatefulPolicyPreservedState', + ) + + +class StatefulPolicyPreservedState(proto.Message): + r"""Configuration of preserved resources. + + Attributes: + disks (MutableMapping[str, google.cloud.compute_v1.types.StatefulPolicyPreservedStateDiskDevice]): + Disks created on the instances that will be + preserved on instance delete, update, etc. This + map is keyed with the device names of the disks. + """ + + disks: MutableMapping[str, 'StatefulPolicyPreservedStateDiskDevice'] = proto.MapField( + proto.STRING, + proto.MESSAGE, + number=95594102, + message='StatefulPolicyPreservedStateDiskDevice', + ) + + +class StatefulPolicyPreservedStateDiskDevice(proto.Message): + r""" + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + auto_delete (str): + These stateful disks will never be deleted during + autohealing, update or VM instance recreate operations. This + flag is used to configure if the disk should be deleted + after it is no longer used by the group, e.g. when the given + instance or the whole group is deleted. Note: disks attached + in READ_ONLY mode cannot be auto-deleted. Check the + AutoDelete enum for the list of possible values. + + This field is a member of `oneof`_ ``_auto_delete``. + """ + class AutoDelete(proto.Enum): + r"""These stateful disks will never be deleted during autohealing, + update or VM instance recreate operations. This flag is used to + configure if the disk should be deleted after it is no longer used + by the group, e.g. when the given instance or the whole group is + deleted. Note: disks attached in READ_ONLY mode cannot be + auto-deleted. + + Values: + UNDEFINED_AUTO_DELETE (0): + A value indicating that the enum field is not + set. + NEVER (74175084): + No description available. + ON_PERMANENT_INSTANCE_DELETION (95727719): + No description available. + """ + UNDEFINED_AUTO_DELETE = 0 + NEVER = 74175084 + ON_PERMANENT_INSTANCE_DELETION = 95727719 + + auto_delete: str = proto.Field( + proto.STRING, + number=464761403, + optional=True, + ) + + +class StopAsyncReplicationDiskRequest(proto.Message): + r"""A request message for Disks.StopAsyncReplication. See the + method description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + disk (str): + The name of the persistent disk. + project (str): + Project ID for this request. + request_id (str): + An optional request ID to identify requests. + Specify a unique request ID so that if you must + retry your request, the server will know to + ignore the request if it has already been + completed. For example, consider a situation + where you make an initial request and the + request times out. If you make the request again + with the same request ID, the server can check + if original operation with the same request ID + was received, and if so, will ignore the second + request. This prevents clients from accidentally + creating duplicate commitments. The request ID + must be a valid UUID with the exception that + zero UUID is not supported ( + 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. + zone (str): + The name of the zone for this request. + """ + + disk: str = proto.Field( + proto.STRING, + number=3083677, + ) + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + request_id: str = proto.Field( + proto.STRING, + number=37109963, + optional=True, + ) + zone: str = proto.Field( + proto.STRING, + number=3744684, + ) + + +class StopAsyncReplicationRegionDiskRequest(proto.Message): + r"""A request message for RegionDisks.StopAsyncReplication. See + the method description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + disk (str): + The name of the persistent disk. + project (str): + Project ID for this request. + region (str): + The name of the region for this request. + request_id (str): + An optional request ID to identify requests. + Specify a unique request ID so that if you must + retry your request, the server will know to + ignore the request if it has already been + completed. For example, consider a situation + where you make an initial request and the + request times out. If you make the request again + with the same request ID, the server can check + if original operation with the same request ID + was received, and if so, will ignore the second + request. This prevents clients from accidentally + creating duplicate commitments. The request ID + must be a valid UUID with the exception that + zero UUID is not supported ( + 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. + """ + + disk: str = proto.Field( + proto.STRING, + number=3083677, + ) + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + region: str = proto.Field( + proto.STRING, + number=138946292, + ) + request_id: str = proto.Field( + proto.STRING, + number=37109963, + optional=True, + ) + + +class StopGroupAsyncReplicationDiskRequest(proto.Message): + r"""A request message for Disks.StopGroupAsyncReplication. See + the method description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + disks_stop_group_async_replication_resource_resource (google.cloud.compute_v1.types.DisksStopGroupAsyncReplicationResource): + The body resource for this request + project (str): + Project ID for this request. + request_id (str): + An optional request ID to identify requests. + Specify a unique request ID so that if you must + retry your request, the server will know to + ignore the request if it has already been + completed. For example, consider a situation + where you make an initial request and the + request times out. If you make the request again + with the same request ID, the server can check + if original operation with the same request ID + was received, and if so, will ignore the second + request. This prevents clients from accidentally + creating duplicate commitments. The request ID + must be a valid UUID with the exception that + zero UUID is not supported ( + 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. + zone (str): + The name of the zone for this request. This + must be the zone of the primary or secondary + disks in the consistency group. + """ + + disks_stop_group_async_replication_resource_resource: 'DisksStopGroupAsyncReplicationResource' = proto.Field( + proto.MESSAGE, + number=346815509, + message='DisksStopGroupAsyncReplicationResource', + ) + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + request_id: str = proto.Field( + proto.STRING, + number=37109963, + optional=True, + ) + zone: str = proto.Field( + proto.STRING, + number=3744684, + ) + + +class StopGroupAsyncReplicationRegionDiskRequest(proto.Message): + r"""A request message for RegionDisks.StopGroupAsyncReplication. + See the method description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + disks_stop_group_async_replication_resource_resource (google.cloud.compute_v1.types.DisksStopGroupAsyncReplicationResource): + The body resource for this request + project (str): + Project ID for this request. + region (str): + The name of the region for this request. This + must be the region of the primary or secondary + disks in the consistency group. + request_id (str): + An optional request ID to identify requests. + Specify a unique request ID so that if you must + retry your request, the server will know to + ignore the request if it has already been + completed. For example, consider a situation + where you make an initial request and the + request times out. If you make the request again + with the same request ID, the server can check + if original operation with the same request ID + was received, and if so, will ignore the second + request. This prevents clients from accidentally + creating duplicate commitments. The request ID + must be a valid UUID with the exception that + zero UUID is not supported ( + 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. + """ + + disks_stop_group_async_replication_resource_resource: 'DisksStopGroupAsyncReplicationResource' = proto.Field( + proto.MESSAGE, + number=346815509, + message='DisksStopGroupAsyncReplicationResource', + ) + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + region: str = proto.Field( + proto.STRING, + number=138946292, + ) + request_id: str = proto.Field( + proto.STRING, + number=37109963, + optional=True, + ) + + +class StopInstanceRequest(proto.Message): + r"""A request message for Instances.Stop. See the method + description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + discard_local_ssd (bool): + If true, discard the contents of any attached + localSSD partitions. Default value is false. + + This field is a member of `oneof`_ ``_discard_local_ssd``. + instance (str): + Name of the instance resource to stop. + project (str): + Project ID for this request. + request_id (str): + An optional request ID to identify requests. + Specify a unique request ID so that if you must + retry your request, the server will know to + ignore the request if it has already been + completed. For example, consider a situation + where you make an initial request and the + request times out. If you make the request again + with the same request ID, the server can check + if original operation with the same request ID + was received, and if so, will ignore the second + request. This prevents clients from accidentally + creating duplicate commitments. The request ID + must be a valid UUID with the exception that + zero UUID is not supported ( + 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. + zone (str): + The name of the zone for this request. + """ + + discard_local_ssd: bool = proto.Field( + proto.BOOL, + number=319517903, + optional=True, + ) + instance: str = proto.Field( + proto.STRING, + number=18257045, + ) + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + request_id: str = proto.Field( + proto.STRING, + number=37109963, + optional=True, + ) + zone: str = proto.Field( + proto.STRING, + number=3744684, + ) + + +class Subnetwork(proto.Message): + r"""Represents a Subnetwork resource. A subnetwork (also known as + a subnet) is a logical partition of a Virtual Private Cloud + network with one primary IP range and zero or more secondary IP + ranges. For more information, read Virtual Private Cloud (VPC) + Network. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + creation_timestamp (str): + [Output Only] Creation timestamp in RFC3339 text format. + + This field is a member of `oneof`_ ``_creation_timestamp``. + description (str): + An optional description of this resource. + Provide this property when you create the + resource. This field can be set only at resource + creation time. + + This field is a member of `oneof`_ ``_description``. + enable_flow_logs (bool): + Whether to enable flow logging for this subnetwork. If this + field is not explicitly set, it will not appear in get + listings. If not set the default behavior is determined by + the org policy, if there is no org policy specified, then it + will default to disabled. This field isn't supported if the + subnet purpose field is set to REGIONAL_MANAGED_PROXY. + + This field is a member of `oneof`_ ``_enable_flow_logs``. + external_ipv6_prefix (str): + The external IPv6 address range that is owned + by this subnetwork. + + This field is a member of `oneof`_ ``_external_ipv6_prefix``. + fingerprint (str): + Fingerprint of this resource. A hash of the + contents stored in this object. This field is + used in optimistic locking. This field will be + ignored when inserting a Subnetwork. An + up-to-date fingerprint must be provided in order + to update the Subnetwork, otherwise the request + will fail with error 412 conditionNotMet. To see + the latest fingerprint, make a get() request to + retrieve a Subnetwork. + + This field is a member of `oneof`_ ``_fingerprint``. + gateway_address (str): + [Output Only] The gateway address for default routes to + reach destination addresses outside this subnetwork. + + This field is a member of `oneof`_ ``_gateway_address``. + id (int): + [Output Only] The unique identifier for the resource. This + identifier is defined by the server. + + This field is a member of `oneof`_ ``_id``. + internal_ipv6_prefix (str): + [Output Only] The internal IPv6 address range that is + assigned to this subnetwork. + + This field is a member of `oneof`_ ``_internal_ipv6_prefix``. + ip_cidr_range (str): + The range of internal addresses that are + owned by this subnetwork. Provide this property + when you create the subnetwork. For example, + 10.0.0.0/8 or 100.64.0.0/10. Ranges must be + unique and non-overlapping within a network. + Only IPv4 is supported. This field is set at + resource creation time. The range can be any + range listed in the Valid ranges list. The range + can be expanded after creation using + expandIpCidrRange. + + This field is a member of `oneof`_ ``_ip_cidr_range``. + ipv6_access_type (str): + The access type of IPv6 address this subnet holds. It's + immutable and can only be specified during creation or the + first time the subnet is updated into IPV4_IPV6 dual stack. + Check the Ipv6AccessType enum for the list of possible + values. + + This field is a member of `oneof`_ ``_ipv6_access_type``. + ipv6_cidr_range (str): + [Output Only] This field is for internal use. + + This field is a member of `oneof`_ ``_ipv6_cidr_range``. + kind (str): + [Output Only] Type of the resource. Always + compute#subnetwork for Subnetwork resources. + + This field is a member of `oneof`_ ``_kind``. + log_config (google.cloud.compute_v1.types.SubnetworkLogConfig): + This field denotes the VPC flow logging + options for this subnetwork. If logging is + enabled, logs are exported to Cloud Logging. + + This field is a member of `oneof`_ ``_log_config``. + name (str): + The name of the resource, provided by the client when + initially creating the resource. The name must be 1-63 + characters long, and comply with RFC1035. Specifically, the + name must be 1-63 characters long and match the regular + expression ``[a-z]([-a-z0-9]*[a-z0-9])?`` which means the + first character must be a lowercase letter, and all + following characters must be a dash, lowercase letter, or + digit, except the last character, which cannot be a dash. + + This field is a member of `oneof`_ ``_name``. + network (str): + The URL of the network to which this + subnetwork belongs, provided by the client when + initially creating the subnetwork. This field + can be set only at resource creation time. + + This field is a member of `oneof`_ ``_network``. + private_ip_google_access (bool): + Whether the VMs in this subnet can access + Google services without assigned external IP + addresses. This field can be both set at + resource creation time and updated using + setPrivateIpGoogleAccess. + + This field is a member of `oneof`_ ``_private_ip_google_access``. + private_ipv6_google_access (str): + This field is for internal use. This field + can be both set at resource creation time and + updated using patch. Check the + PrivateIpv6GoogleAccess enum for the list of + possible values. + + This field is a member of `oneof`_ ``_private_ipv6_google_access``. + purpose (str): + The purpose of the resource. This field can be either + PRIVATE, REGIONAL_MANAGED_PROXY, PRIVATE_SERVICE_CONNECT, or + INTERNAL_HTTPS_LOAD_BALANCER. PRIVATE is the default purpose + for user-created subnets or subnets that are automatically + created in auto mode networks. A subnet with purpose set to + REGIONAL_MANAGED_PROXY is a user-created subnetwork that is + reserved for regional Envoy-based load balancers. A subnet + with purpose set to PRIVATE_SERVICE_CONNECT is used to + publish services using Private Service Connect. A subnet + with purpose set to INTERNAL_HTTPS_LOAD_BALANCER is a + proxy-only subnet that can be used only by regional internal + HTTP(S) load balancers. Note that REGIONAL_MANAGED_PROXY is + the preferred setting for all regional Envoy load balancers. + If unspecified, the subnet purpose defaults to PRIVATE. The + enableFlowLogs field isn't supported if the subnet purpose + field is set to REGIONAL_MANAGED_PROXY. Check the Purpose + enum for the list of possible values. + + This field is a member of `oneof`_ ``_purpose``. + region (str): + URL of the region where the Subnetwork + resides. This field can be set only at resource + creation time. + + This field is a member of `oneof`_ ``_region``. + role (str): + The role of subnetwork. Currently, this field is only used + when purpose = REGIONAL_MANAGED_PROXY. The value can be set + to ACTIVE or BACKUP. An ACTIVE subnetwork is one that is + currently being used for Envoy-based load balancers in a + region. A BACKUP subnetwork is one that is ready to be + promoted to ACTIVE or is currently draining. This field can + be updated with a patch request. Check the Role enum for the + list of possible values. + + This field is a member of `oneof`_ ``_role``. + secondary_ip_ranges (MutableSequence[google.cloud.compute_v1.types.SubnetworkSecondaryRange]): + An array of configurations for secondary IP + ranges for VM instances contained in this + subnetwork. The primary IP of such VM must + belong to the primary ipCidrRange of the + subnetwork. The alias IPs may belong to either + primary or secondary ranges. This field can be + updated with a patch request. + self_link (str): + [Output Only] Server-defined URL for the resource. + + This field is a member of `oneof`_ ``_self_link``. + stack_type (str): + The stack type for the subnet. If set to IPV4_ONLY, new VMs + in the subnet are assigned IPv4 addresses only. If set to + IPV4_IPV6, new VMs in the subnet can be assigned both IPv4 + and IPv6 addresses. If not specified, IPV4_ONLY is used. + This field can be both set at resource creation time and + updated using patch. Check the StackType enum for the list + of possible values. + + This field is a member of `oneof`_ ``_stack_type``. + state (str): + [Output Only] The state of the subnetwork, which can be one + of the following values: READY: Subnetwork is created and + ready to use DRAINING: only applicable to subnetworks that + have the purpose set to INTERNAL_HTTPS_LOAD_BALANCER and + indicates that connections to the load balancer are being + drained. A subnetwork that is draining cannot be used or + modified until it reaches a status of READY Check the State + enum for the list of possible values. + + This field is a member of `oneof`_ ``_state``. + """ + class Ipv6AccessType(proto.Enum): + r"""The access type of IPv6 address this subnet holds. It's immutable + and can only be specified during creation or the first time the + subnet is updated into IPV4_IPV6 dual stack. + + Values: + UNDEFINED_IPV6_ACCESS_TYPE (0): + A value indicating that the enum field is not + set. + EXTERNAL (35607499): + VMs on this subnet will be assigned IPv6 + addresses that are accessible via the Internet, + as well as the VPC network. + INTERNAL (279295677): + VMs on this subnet will be assigned IPv6 + addresses that are only accessible over the VPC + network. + UNSPECIFIED_IPV6_ACCESS_TYPE (313080613): + No description available. + """ + UNDEFINED_IPV6_ACCESS_TYPE = 0 + EXTERNAL = 35607499 + INTERNAL = 279295677 + UNSPECIFIED_IPV6_ACCESS_TYPE = 313080613 + + class PrivateIpv6GoogleAccess(proto.Enum): + r"""This field is for internal use. This field can be both set at + resource creation time and updated using patch. + + Values: + UNDEFINED_PRIVATE_IPV6_GOOGLE_ACCESS (0): + A value indicating that the enum field is not + set. + DISABLE_GOOGLE_ACCESS (450958579): + Disable private IPv6 access to/from Google + services. + ENABLE_BIDIRECTIONAL_ACCESS_TO_GOOGLE (427975994): + Bidirectional private IPv6 access to/from + Google services. + ENABLE_OUTBOUND_VM_ACCESS_TO_GOOGLE (288210263): + Outbound private IPv6 access from VMs in this + subnet to Google services. + """ + UNDEFINED_PRIVATE_IPV6_GOOGLE_ACCESS = 0 + DISABLE_GOOGLE_ACCESS = 450958579 + ENABLE_BIDIRECTIONAL_ACCESS_TO_GOOGLE = 427975994 + ENABLE_OUTBOUND_VM_ACCESS_TO_GOOGLE = 288210263 + + class Purpose(proto.Enum): + r"""The purpose of the resource. This field can be either PRIVATE, + REGIONAL_MANAGED_PROXY, PRIVATE_SERVICE_CONNECT, or + INTERNAL_HTTPS_LOAD_BALANCER. PRIVATE is the default purpose for + user-created subnets or subnets that are automatically created in + auto mode networks. A subnet with purpose set to + REGIONAL_MANAGED_PROXY is a user-created subnetwork that is reserved + for regional Envoy-based load balancers. A subnet with purpose set + to PRIVATE_SERVICE_CONNECT is used to publish services using Private + Service Connect. A subnet with purpose set to + INTERNAL_HTTPS_LOAD_BALANCER is a proxy-only subnet that can be used + only by regional internal HTTP(S) load balancers. Note that + REGIONAL_MANAGED_PROXY is the preferred setting for all regional + Envoy load balancers. If unspecified, the subnet purpose defaults to + PRIVATE. The enableFlowLogs field isn't supported if the subnet + purpose field is set to REGIONAL_MANAGED_PROXY. + + Values: + UNDEFINED_PURPOSE (0): + A value indicating that the enum field is not + set. + INTERNAL_HTTPS_LOAD_BALANCER (248748889): + Subnet reserved for Internal HTTP(S) Load + Balancing. + PRIVATE (403485027): + Regular user created or automatically created + subnet. + PRIVATE_RFC_1918 (254902107): + Regular user created or automatically created + subnet. + PRIVATE_SERVICE_CONNECT (48134724): + Subnetworks created for Private Service + Connect in the producer network. + REGIONAL_MANAGED_PROXY (153049966): + Subnetwork used for Regional + Internal/External HTTP(S) Load Balancing. + """ + UNDEFINED_PURPOSE = 0 + INTERNAL_HTTPS_LOAD_BALANCER = 248748889 + PRIVATE = 403485027 + PRIVATE_RFC_1918 = 254902107 + PRIVATE_SERVICE_CONNECT = 48134724 + REGIONAL_MANAGED_PROXY = 153049966 + + class Role(proto.Enum): + r"""The role of subnetwork. Currently, this field is only used when + purpose = REGIONAL_MANAGED_PROXY. The value can be set to ACTIVE or + BACKUP. An ACTIVE subnetwork is one that is currently being used for + Envoy-based load balancers in a region. A BACKUP subnetwork is one + that is ready to be promoted to ACTIVE or is currently draining. + This field can be updated with a patch request. + + Values: + UNDEFINED_ROLE (0): + A value indicating that the enum field is not + set. + ACTIVE (314733318): + The ACTIVE subnet that is currently used. + BACKUP (341010882): + The BACKUP subnet that could be promoted to + ACTIVE. + """ + UNDEFINED_ROLE = 0 + ACTIVE = 314733318 + BACKUP = 341010882 + + class StackType(proto.Enum): + r"""The stack type for the subnet. If set to IPV4_ONLY, new VMs in the + subnet are assigned IPv4 addresses only. If set to IPV4_IPV6, new + VMs in the subnet can be assigned both IPv4 and IPv6 addresses. If + not specified, IPV4_ONLY is used. This field can be both set at + resource creation time and updated using patch. + + Values: + UNDEFINED_STACK_TYPE (0): + A value indicating that the enum field is not + set. + IPV4_IPV6 (22197249): + New VMs in this subnet can have both IPv4 and + IPv6 addresses. + IPV4_ONLY (22373798): + New VMs in this subnet will only be assigned + IPv4 addresses. + UNSPECIFIED_STACK_TYPE (298084569): + No description available. + """ + UNDEFINED_STACK_TYPE = 0 + IPV4_IPV6 = 22197249 + IPV4_ONLY = 22373798 + UNSPECIFIED_STACK_TYPE = 298084569 + + class State(proto.Enum): + r"""[Output Only] The state of the subnetwork, which can be one of the + following values: READY: Subnetwork is created and ready to use + DRAINING: only applicable to subnetworks that have the purpose set + to INTERNAL_HTTPS_LOAD_BALANCER and indicates that connections to + the load balancer are being drained. A subnetwork that is draining + cannot be used or modified until it reaches a status of READY + + Values: + UNDEFINED_STATE (0): + A value indicating that the enum field is not + set. + DRAINING (480455402): + Subnetwork is being drained. + READY (77848963): + Subnetwork is ready for use. + """ + UNDEFINED_STATE = 0 + DRAINING = 480455402 + READY = 77848963 + + creation_timestamp: str = proto.Field( + proto.STRING, + number=30525366, + optional=True, + ) + description: str = proto.Field( + proto.STRING, + number=422937596, + optional=True, + ) + enable_flow_logs: bool = proto.Field( + proto.BOOL, + number=151544420, + optional=True, + ) + external_ipv6_prefix: str = proto.Field( + proto.STRING, + number=139299190, + optional=True, + ) + fingerprint: str = proto.Field( + proto.STRING, + number=234678500, + optional=True, + ) + gateway_address: str = proto.Field( + proto.STRING, + number=459867385, + optional=True, + ) + id: int = proto.Field( + proto.UINT64, + number=3355, + optional=True, + ) + internal_ipv6_prefix: str = proto.Field( + proto.STRING, + number=506270056, + optional=True, + ) + ip_cidr_range: str = proto.Field( + proto.STRING, + number=98117322, + optional=True, + ) + ipv6_access_type: str = proto.Field( + proto.STRING, + number=504658653, + optional=True, + ) + ipv6_cidr_range: str = proto.Field( + proto.STRING, + number=273141258, + optional=True, + ) + kind: str = proto.Field( + proto.STRING, + number=3292052, + optional=True, + ) + log_config: 'SubnetworkLogConfig' = proto.Field( + proto.MESSAGE, + number=351299741, + optional=True, + message='SubnetworkLogConfig', + ) + name: str = proto.Field( + proto.STRING, + number=3373707, + optional=True, + ) + network: str = proto.Field( + proto.STRING, + number=232872494, + optional=True, + ) + private_ip_google_access: bool = proto.Field( + proto.BOOL, + number=421491790, + optional=True, + ) + private_ipv6_google_access: str = proto.Field( + proto.STRING, + number=48277006, + optional=True, + ) + purpose: str = proto.Field( + proto.STRING, + number=316407070, + optional=True, + ) + region: str = proto.Field( + proto.STRING, + number=138946292, + optional=True, + ) + role: str = proto.Field( + proto.STRING, + number=3506294, + optional=True, + ) + secondary_ip_ranges: MutableSequence['SubnetworkSecondaryRange'] = proto.RepeatedField( + proto.MESSAGE, + number=136658915, + message='SubnetworkSecondaryRange', + ) + self_link: str = proto.Field( + proto.STRING, + number=456214797, + optional=True, + ) + stack_type: str = proto.Field( + proto.STRING, + number=425908881, + optional=True, + ) + state: str = proto.Field( + proto.STRING, + number=109757585, + optional=True, + ) + + +class SubnetworkAggregatedList(proto.Message): + r""" + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + id (str): + [Output Only] Unique identifier for the resource; defined by + the server. + + This field is a member of `oneof`_ ``_id``. + items (MutableMapping[str, google.cloud.compute_v1.types.SubnetworksScopedList]): + A list of SubnetworksScopedList resources. + kind (str): + [Output Only] Type of resource. Always + compute#subnetworkAggregatedList for aggregated lists of + subnetworks. + + This field is a member of `oneof`_ ``_kind``. + next_page_token (str): + [Output Only] This token allows you to get the next page of + results for list requests. If the number of results is + larger than maxResults, use the nextPageToken as a value for + the query parameter pageToken in the next list request. + Subsequent list requests will have their own nextPageToken + to continue paging through the results. + + This field is a member of `oneof`_ ``_next_page_token``. + self_link (str): + [Output Only] Server-defined URL for this resource. + + This field is a member of `oneof`_ ``_self_link``. + unreachables (MutableSequence[str]): + [Output Only] Unreachable resources. + warning (google.cloud.compute_v1.types.Warning): + [Output Only] Informational warning message. + + This field is a member of `oneof`_ ``_warning``. + """ + + @property + def raw_page(self): + return self + + id: str = proto.Field( + proto.STRING, + number=3355, + optional=True, + ) + items: MutableMapping[str, 'SubnetworksScopedList'] = proto.MapField( + proto.STRING, + proto.MESSAGE, + number=100526016, + message='SubnetworksScopedList', + ) + kind: str = proto.Field( + proto.STRING, + number=3292052, + optional=True, + ) + next_page_token: str = proto.Field( + proto.STRING, + number=79797525, + optional=True, + ) + self_link: str = proto.Field( + proto.STRING, + number=456214797, + optional=True, + ) + unreachables: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=243372063, + ) + warning: 'Warning' = proto.Field( + proto.MESSAGE, + number=50704284, + optional=True, + message='Warning', + ) + + +class SubnetworkList(proto.Message): + r"""Contains a list of Subnetwork resources. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + id (str): + [Output Only] Unique identifier for the resource; defined by + the server. + + This field is a member of `oneof`_ ``_id``. + items (MutableSequence[google.cloud.compute_v1.types.Subnetwork]): + A list of Subnetwork resources. + kind (str): + [Output Only] Type of resource. Always + compute#subnetworkList for lists of subnetworks. + + This field is a member of `oneof`_ ``_kind``. + next_page_token (str): + [Output Only] This token allows you to get the next page of + results for list requests. If the number of results is + larger than maxResults, use the nextPageToken as a value for + the query parameter pageToken in the next list request. + Subsequent list requests will have their own nextPageToken + to continue paging through the results. + + This field is a member of `oneof`_ ``_next_page_token``. + self_link (str): + [Output Only] Server-defined URL for this resource. + + This field is a member of `oneof`_ ``_self_link``. + warning (google.cloud.compute_v1.types.Warning): + [Output Only] Informational warning message. + + This field is a member of `oneof`_ ``_warning``. + """ + + @property + def raw_page(self): + return self + + id: str = proto.Field( + proto.STRING, + number=3355, + optional=True, + ) + items: MutableSequence['Subnetwork'] = proto.RepeatedField( + proto.MESSAGE, + number=100526016, + message='Subnetwork', + ) + kind: str = proto.Field( + proto.STRING, + number=3292052, + optional=True, + ) + next_page_token: str = proto.Field( + proto.STRING, + number=79797525, + optional=True, + ) + self_link: str = proto.Field( + proto.STRING, + number=456214797, + optional=True, + ) + warning: 'Warning' = proto.Field( + proto.MESSAGE, + number=50704284, + optional=True, + message='Warning', + ) + + +class SubnetworkLogConfig(proto.Message): + r"""The available logging options for this subnetwork. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + aggregation_interval (str): + Can only be specified if VPC flow logging for + this subnetwork is enabled. Toggles the + aggregation interval for collecting flow logs. + Increasing the interval time will reduce the + amount of generated flow logs for long lasting + connections. Default is an interval of 5 seconds + per connection. Check the AggregationInterval + enum for the list of possible values. + + This field is a member of `oneof`_ ``_aggregation_interval``. + enable (bool): + Whether to enable flow logging for this subnetwork. If this + field is not explicitly set, it will not appear in get + listings. If not set the default behavior is determined by + the org policy, if there is no org policy specified, then it + will default to disabled. Flow logging isn't supported if + the subnet purpose field is set to REGIONAL_MANAGED_PROXY. + + This field is a member of `oneof`_ ``_enable``. + filter_expr (str): + Can only be specified if VPC flow logs for + this subnetwork is enabled. The filter + expression is used to define which VPC flow logs + should be exported to Cloud Logging. + + This field is a member of `oneof`_ ``_filter_expr``. + flow_sampling (float): + Can only be specified if VPC flow logging for this + subnetwork is enabled. The value of the field must be in [0, + 1]. Set the sampling rate of VPC flow logs within the + subnetwork where 1.0 means all collected logs are reported + and 0.0 means no logs are reported. Default is 0.5 unless + otherwise specified by the org policy, which means half of + all collected logs are reported. + + This field is a member of `oneof`_ ``_flow_sampling``. + metadata (str): + Can only be specified if VPC flow logs for this subnetwork + is enabled. Configures whether all, none or a subset of + metadata fields should be added to the reported VPC flow + logs. Default is EXCLUDE_ALL_METADATA. Check the Metadata + enum for the list of possible values. + + This field is a member of `oneof`_ ``_metadata``. + metadata_fields (MutableSequence[str]): + Can only be specified if VPC flow logs for this subnetwork + is enabled and "metadata" was set to CUSTOM_METADATA. + """ + class AggregationInterval(proto.Enum): + r"""Can only be specified if VPC flow logging for this subnetwork + is enabled. Toggles the aggregation interval for collecting flow + logs. Increasing the interval time will reduce the amount of + generated flow logs for long lasting connections. Default is an + interval of 5 seconds per connection. + + Values: + UNDEFINED_AGGREGATION_INTERVAL (0): + A value indicating that the enum field is not + set. + INTERVAL_10_MIN (487155916): + No description available. + INTERVAL_15_MIN (491773521): + No description available. + INTERVAL_1_MIN (69052714): + No description available. + INTERVAL_30_SEC (7548937): + No description available. + INTERVAL_5_MIN (72746798): + No description available. + INTERVAL_5_SEC (72752429): + No description available. + """ + UNDEFINED_AGGREGATION_INTERVAL = 0 + INTERVAL_10_MIN = 487155916 + INTERVAL_15_MIN = 491773521 + INTERVAL_1_MIN = 69052714 + INTERVAL_30_SEC = 7548937 + INTERVAL_5_MIN = 72746798 + INTERVAL_5_SEC = 72752429 + + class Metadata(proto.Enum): + r"""Can only be specified if VPC flow logs for this subnetwork is + enabled. Configures whether all, none or a subset of metadata fields + should be added to the reported VPC flow logs. Default is + EXCLUDE_ALL_METADATA. + + Values: + UNDEFINED_METADATA (0): + A value indicating that the enum field is not + set. + CUSTOM_METADATA (62450749): + No description available. + EXCLUDE_ALL_METADATA (334519954): + No description available. + INCLUDE_ALL_METADATA (164619908): + No description available. + """ + UNDEFINED_METADATA = 0 + CUSTOM_METADATA = 62450749 + EXCLUDE_ALL_METADATA = 334519954 + INCLUDE_ALL_METADATA = 164619908 + + aggregation_interval: str = proto.Field( + proto.STRING, + number=174919042, + optional=True, + ) + enable: bool = proto.Field( + proto.BOOL, + number=311764355, + optional=True, + ) + filter_expr: str = proto.Field( + proto.STRING, + number=183374428, + optional=True, + ) + flow_sampling: float = proto.Field( + proto.FLOAT, + number=530150360, + optional=True, + ) + metadata: str = proto.Field( + proto.STRING, + number=86866735, + optional=True, + ) + metadata_fields: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=378461641, + ) + + +class SubnetworkSecondaryRange(proto.Message): + r"""Represents a secondary IP range of a subnetwork. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + ip_cidr_range (str): + The range of IP addresses belonging to this + subnetwork secondary range. Provide this + property when you create the subnetwork. Ranges + must be unique and non-overlapping with all + primary and secondary IP ranges within a + network. Only IPv4 is supported. The range can + be any range listed in the Valid ranges list. + + This field is a member of `oneof`_ ``_ip_cidr_range``. + range_name (str): + The name associated with this subnetwork + secondary range, used when adding an alias IP + range to a VM instance. The name must be 1-63 + characters long, and comply with RFC1035. The + name must be unique within the subnetwork. + + This field is a member of `oneof`_ ``_range_name``. + """ + + ip_cidr_range: str = proto.Field( + proto.STRING, + number=98117322, + optional=True, + ) + range_name: str = proto.Field( + proto.STRING, + number=332216397, + optional=True, + ) + + +class SubnetworksExpandIpCidrRangeRequest(proto.Message): + r""" + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + ip_cidr_range (str): + The IP (in CIDR format or netmask) of + internal addresses that are legal on this + Subnetwork. This range should be disjoint from + other subnetworks within this network. This + range can only be larger than (i.e. a superset + of) the range previously defined before the + update. + + This field is a member of `oneof`_ ``_ip_cidr_range``. + """ + + ip_cidr_range: str = proto.Field( + proto.STRING, + number=98117322, + optional=True, + ) + + +class SubnetworksScopedList(proto.Message): + r""" + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + subnetworks (MutableSequence[google.cloud.compute_v1.types.Subnetwork]): + A list of subnetworks contained in this + scope. + warning (google.cloud.compute_v1.types.Warning): + An informational warning that appears when + the list of addresses is empty. + + This field is a member of `oneof`_ ``_warning``. + """ + + subnetworks: MutableSequence['Subnetwork'] = proto.RepeatedField( + proto.MESSAGE, + number=415853125, + message='Subnetwork', + ) + warning: 'Warning' = proto.Field( + proto.MESSAGE, + number=50704284, + optional=True, + message='Warning', + ) + + +class SubnetworksSetPrivateIpGoogleAccessRequest(proto.Message): + r""" + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + private_ip_google_access (bool): + + This field is a member of `oneof`_ ``_private_ip_google_access``. + """ + + private_ip_google_access: bool = proto.Field( + proto.BOOL, + number=421491790, + optional=True, + ) + + +class Subsetting(proto.Message): + r"""Subsetting configuration for this BackendService. Currently + this is applicable only for Internal TCP/UDP load balancing, + Internal HTTP(S) load balancing and Traffic Director. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + policy (str): + Check the Policy enum for the list of + possible values. + + This field is a member of `oneof`_ ``_policy``. + """ + class Policy(proto.Enum): + r""" + + Values: + UNDEFINED_POLICY (0): + A value indicating that the enum field is not + set. + CONSISTENT_HASH_SUBSETTING (108989492): + Subsetting based on consistent hashing. For Traffic + Director, the number of backends per backend group (the + subset size) is based on the ``subset_size`` parameter. For + Internal HTTP(S) load balancing, the number of backends per + backend group (the subset size) is dynamically adjusted in + two cases: - As the number of proxy instances participating + in Internal HTTP(S) load balancing increases, the subset + size decreases. - When the total number of backends in a + network exceeds the capacity of a single proxy instance, + subset sizes are reduced automatically for each service that + has backend subsetting enabled. + NONE (2402104): + No Subsetting. Clients may open connections + and send traffic to all backends of this backend + service. This can lead to performance issues if + there is substantial imbalance in the count of + clients and backends. + """ + UNDEFINED_POLICY = 0 + CONSISTENT_HASH_SUBSETTING = 108989492 + NONE = 2402104 + + policy: str = proto.Field( + proto.STRING, + number=91071794, + optional=True, + ) + + +class SuspendInstanceRequest(proto.Message): + r"""A request message for Instances.Suspend. See the method + description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + discard_local_ssd (bool): + If true, discard the contents of any attached + localSSD partitions. Default value is false. + + This field is a member of `oneof`_ ``_discard_local_ssd``. + instance (str): + Name of the instance resource to suspend. + project (str): + Project ID for this request. + request_id (str): + An optional request ID to identify requests. + Specify a unique request ID so that if you must + retry your request, the server will know to + ignore the request if it has already been + completed. For example, consider a situation + where you make an initial request and the + request times out. If you make the request again + with the same request ID, the server can check + if original operation with the same request ID + was received, and if so, will ignore the second + request. This prevents clients from accidentally + creating duplicate commitments. The request ID + must be a valid UUID with the exception that + zero UUID is not supported ( + 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. + zone (str): + The name of the zone for this request. + """ + + discard_local_ssd: bool = proto.Field( + proto.BOOL, + number=319517903, + optional=True, + ) + instance: str = proto.Field( + proto.STRING, + number=18257045, + ) + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + request_id: str = proto.Field( + proto.STRING, + number=37109963, + optional=True, + ) + zone: str = proto.Field( + proto.STRING, + number=3744684, + ) + + +class SwitchToCustomModeNetworkRequest(proto.Message): + r"""A request message for Networks.SwitchToCustomMode. See the + method description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + network (str): + Name of the network to be updated. + project (str): + Project ID for this request. + request_id (str): + An optional request ID to identify requests. + Specify a unique request ID so that if you must + retry your request, the server will know to + ignore the request if it has already been + completed. For example, consider a situation + where you make an initial request and the + request times out. If you make the request again + with the same request ID, the server can check + if original operation with the same request ID + was received, and if so, will ignore the second + request. This prevents clients from accidentally + creating duplicate commitments. The request ID + must be a valid UUID with the exception that + zero UUID is not supported ( + 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. + """ + + network: str = proto.Field( + proto.STRING, + number=232872494, + ) + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + request_id: str = proto.Field( + proto.STRING, + number=37109963, + optional=True, + ) + + +class TCPHealthCheck(proto.Message): + r""" + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + port (int): + The TCP port number to which the health check + prober sends packets. The default value is 80. + Valid values are 1 through 65535. + + This field is a member of `oneof`_ ``_port``. + port_name (str): + Not supported. + + This field is a member of `oneof`_ ``_port_name``. + port_specification (str): + Specifies how a port is selected for health checking. Can be + one of the following values: USE_FIXED_PORT: Specifies a + port number explicitly using the port field in the health + check. Supported by backend services for pass-through load + balancers and backend services for proxy load balancers. Not + supported by target pools. The health check supports all + backends supported by the backend service provided the + backend can be health checked. For example, GCE_VM_IP + network endpoint groups, GCE_VM_IP_PORT network endpoint + groups, and instance group backends. USE_NAMED_PORT: Not + supported. USE_SERVING_PORT: Provides an indirect method of + specifying the health check port by referring to the backend + service. Only supported by backend services for proxy load + balancers. Not supported by target pools. Not supported by + backend services for pass-through load balancers. Supports + all backends that can be health checked; for example, + GCE_VM_IP_PORT network endpoint groups and instance group + backends. For GCE_VM_IP_PORT network endpoint group + backends, the health check uses the port number specified + for each endpoint in the network endpoint group. For + instance group backends, the health check uses the port + number determined by looking up the backend service's named + port in the instance group's list of named ports. Check the + PortSpecification enum for the list of possible values. + + This field is a member of `oneof`_ ``_port_specification``. + proxy_header (str): + Specifies the type of proxy header to append before sending + data to the backend, either NONE or PROXY_V1. The default is + NONE. Check the ProxyHeader enum for the list of possible + values. + + This field is a member of `oneof`_ ``_proxy_header``. + request (str): + Instructs the health check prober to send + this exact ASCII string, up to 1024 bytes in + length, after establishing the TCP connection. + + This field is a member of `oneof`_ ``_request``. + response (str): + Creates a content-based TCP health check. In + addition to establishing a TCP connection, you + can configure the health check to pass only when + the backend sends this exact response ASCII + string, up to 1024 bytes in length. For details, + see: + https://cloud.google.com/load-balancing/docs/health-check-concepts#criteria-protocol-ssl-tcp + + This field is a member of `oneof`_ ``_response``. + """ + class PortSpecification(proto.Enum): + r"""Specifies how a port is selected for health checking. Can be one of + the following values: USE_FIXED_PORT: Specifies a port number + explicitly using the port field in the health check. Supported by + backend services for pass-through load balancers and backend + services for proxy load balancers. Not supported by target pools. + The health check supports all backends supported by the backend + service provided the backend can be health checked. For example, + GCE_VM_IP network endpoint groups, GCE_VM_IP_PORT network endpoint + groups, and instance group backends. USE_NAMED_PORT: Not supported. + USE_SERVING_PORT: Provides an indirect method of specifying the + health check port by referring to the backend service. Only + supported by backend services for proxy load balancers. Not + supported by target pools. Not supported by backend services for + pass-through load balancers. Supports all backends that can be + health checked; for example, GCE_VM_IP_PORT network endpoint groups + and instance group backends. For GCE_VM_IP_PORT network endpoint + group backends, the health check uses the port number specified for + each endpoint in the network endpoint group. For instance group + backends, the health check uses the port number determined by + looking up the backend service's named port in the instance group's + list of named ports. + + Values: + UNDEFINED_PORT_SPECIFICATION (0): + A value indicating that the enum field is not + set. + USE_FIXED_PORT (190235748): + The port number in the health check's port is + used for health checking. Applies to network + endpoint group and instance group backends. + USE_NAMED_PORT (349300671): + Not supported. + USE_SERVING_PORT (362637516): + For network endpoint group backends, the + health check uses the port number specified on + each endpoint in the network endpoint group. For + instance group backends, the health check uses + the port number specified for the backend + service's named port defined in the instance + group's named ports. + """ + UNDEFINED_PORT_SPECIFICATION = 0 + USE_FIXED_PORT = 190235748 + USE_NAMED_PORT = 349300671 + USE_SERVING_PORT = 362637516 + + class ProxyHeader(proto.Enum): + r"""Specifies the type of proxy header to append before sending data to + the backend, either NONE or PROXY_V1. The default is NONE. + + Values: + UNDEFINED_PROXY_HEADER (0): + A value indicating that the enum field is not + set. + NONE (2402104): + No description available. + PROXY_V1 (334352940): + No description available. + """ + UNDEFINED_PROXY_HEADER = 0 + NONE = 2402104 + PROXY_V1 = 334352940 + + port: int = proto.Field( + proto.INT32, + number=3446913, + optional=True, + ) + port_name: str = proto.Field( + proto.STRING, + number=41534345, + optional=True, + ) + port_specification: str = proto.Field( + proto.STRING, + number=51590597, + optional=True, + ) + proxy_header: str = proto.Field( + proto.STRING, + number=160374142, + optional=True, + ) + request: str = proto.Field( + proto.STRING, + number=21951119, + optional=True, + ) + response: str = proto.Field( + proto.STRING, + number=196547649, + optional=True, + ) + + +class Tags(proto.Message): + r"""A set of instance tags. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + fingerprint (str): + Specifies a fingerprint for this request, + which is essentially a hash of the tags' + contents and used for optimistic locking. The + fingerprint is initially generated by Compute + Engine and changes after every request to modify + or update tags. You must always provide an + up-to-date fingerprint hash in order to update + or change tags. To see the latest fingerprint, + make get() request to the instance. + + This field is a member of `oneof`_ ``_fingerprint``. + items (MutableSequence[str]): + An array of tags. Each tag must be 1-63 + characters long, and comply with RFC1035. + """ + + fingerprint: str = proto.Field( + proto.STRING, + number=234678500, + optional=True, + ) + items: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=100526016, + ) + + +class TargetGrpcProxy(proto.Message): + r"""Represents a Target gRPC Proxy resource. A target gRPC proxy is a + component of load balancers intended for load balancing gRPC + traffic. Only global forwarding rules with load balancing scheme + INTERNAL_SELF_MANAGED can reference a target gRPC proxy. The target + gRPC Proxy references a URL map that specifies how traffic is routed + to gRPC backend services. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + creation_timestamp (str): + [Output Only] Creation timestamp in RFC3339 text format. + + This field is a member of `oneof`_ ``_creation_timestamp``. + description (str): + An optional description of this resource. + Provide this property when you create the + resource. + + This field is a member of `oneof`_ ``_description``. + fingerprint (str): + Fingerprint of this resource. A hash of the + contents stored in this object. This field is + used in optimistic locking. This field will be + ignored when inserting a TargetGrpcProxy. An + up-to-date fingerprint must be provided in order + to patch/update the TargetGrpcProxy; otherwise, + the request will fail with error 412 + conditionNotMet. To see the latest fingerprint, + make a get() request to retrieve the + TargetGrpcProxy. + + This field is a member of `oneof`_ ``_fingerprint``. + id (int): + [Output Only] The unique identifier for the resource type. + The server generates this identifier. + + This field is a member of `oneof`_ ``_id``. + kind (str): + [Output Only] Type of the resource. Always + compute#targetGrpcProxy for target grpc proxies. + + This field is a member of `oneof`_ ``_kind``. + name (str): + Name of the resource. Provided by the client when the + resource is created. The name must be 1-63 characters long, + and comply with RFC1035. Specifically, the name must be 1-63 + characters long and match the regular expression + ``[a-z]([-a-z0-9]*[a-z0-9])?`` which means the first + character must be a lowercase letter, and all following + characters must be a dash, lowercase letter, or digit, + except the last character, which cannot be a dash. + + This field is a member of `oneof`_ ``_name``. + self_link (str): + [Output Only] Server-defined URL for the resource. + + This field is a member of `oneof`_ ``_self_link``. + self_link_with_id (str): + [Output Only] Server-defined URL with id for the resource. + + This field is a member of `oneof`_ ``_self_link_with_id``. + url_map (str): + URL to the UrlMap resource that defines the + mapping from URL to the BackendService. The + protocol field in the BackendService must be set + to GRPC. + + This field is a member of `oneof`_ ``_url_map``. + validate_for_proxyless (bool): + If true, indicates that the BackendServices + referenced by the urlMap may be accessed by gRPC + applications without using a sidecar proxy. This + will enable configuration checks on urlMap and + its referenced BackendServices to not allow + unsupported features. A gRPC application must + use "xds:///" scheme in the target URI of the + service it is connecting to. If false, indicates + that the BackendServices referenced by the + urlMap will be accessed by gRPC applications via + a sidecar proxy. In this case, a gRPC + application must not use "xds:///" scheme in the + target URI of the service it is connecting to + + This field is a member of `oneof`_ ``_validate_for_proxyless``. + """ + + creation_timestamp: str = proto.Field( + proto.STRING, + number=30525366, + optional=True, + ) + description: str = proto.Field( + proto.STRING, + number=422937596, + optional=True, + ) + fingerprint: str = proto.Field( + proto.STRING, + number=234678500, + optional=True, + ) + id: int = proto.Field( + proto.UINT64, + number=3355, + optional=True, + ) + kind: str = proto.Field( + proto.STRING, + number=3292052, + optional=True, + ) + name: str = proto.Field( + proto.STRING, + number=3373707, + optional=True, + ) + self_link: str = proto.Field( + proto.STRING, + number=456214797, + optional=True, + ) + self_link_with_id: str = proto.Field( + proto.STRING, + number=44520962, + optional=True, + ) + url_map: str = proto.Field( + proto.STRING, + number=367020684, + optional=True, + ) + validate_for_proxyless: bool = proto.Field( + proto.BOOL, + number=101822888, + optional=True, + ) + + +class TargetGrpcProxyList(proto.Message): + r""" + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + id (str): + [Output Only] Unique identifier for the resource; defined by + the server. + + This field is a member of `oneof`_ ``_id``. + items (MutableSequence[google.cloud.compute_v1.types.TargetGrpcProxy]): + A list of TargetGrpcProxy resources. + kind (str): + [Output Only] Type of the resource. Always + compute#targetGrpcProxy for target grpc proxies. + + This field is a member of `oneof`_ ``_kind``. + next_page_token (str): + [Output Only] This token allows you to get the next page of + results for list requests. If the number of results is + larger than maxResults, use the nextPageToken as a value for + the query parameter pageToken in the next list request. + Subsequent list requests will have their own nextPageToken + to continue paging through the results. + + This field is a member of `oneof`_ ``_next_page_token``. + self_link (str): + [Output Only] Server-defined URL for this resource. + + This field is a member of `oneof`_ ``_self_link``. + warning (google.cloud.compute_v1.types.Warning): + [Output Only] Informational warning message. + + This field is a member of `oneof`_ ``_warning``. + """ + + @property + def raw_page(self): + return self + + id: str = proto.Field( + proto.STRING, + number=3355, + optional=True, + ) + items: MutableSequence['TargetGrpcProxy'] = proto.RepeatedField( + proto.MESSAGE, + number=100526016, + message='TargetGrpcProxy', + ) + kind: str = proto.Field( + proto.STRING, + number=3292052, + optional=True, + ) + next_page_token: str = proto.Field( + proto.STRING, + number=79797525, + optional=True, + ) + self_link: str = proto.Field( + proto.STRING, + number=456214797, + optional=True, + ) + warning: 'Warning' = proto.Field( + proto.MESSAGE, + number=50704284, + optional=True, + message='Warning', + ) + + +class TargetHttpProxiesScopedList(proto.Message): + r""" + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + target_http_proxies (MutableSequence[google.cloud.compute_v1.types.TargetHttpProxy]): + A list of TargetHttpProxies contained in this + scope. + warning (google.cloud.compute_v1.types.Warning): + Informational warning which replaces the list + of backend services when the list is empty. + + This field is a member of `oneof`_ ``_warning``. + """ + + target_http_proxies: MutableSequence['TargetHttpProxy'] = proto.RepeatedField( + proto.MESSAGE, + number=162147011, + message='TargetHttpProxy', + ) + warning: 'Warning' = proto.Field( + proto.MESSAGE, + number=50704284, + optional=True, + message='Warning', + ) + + +class TargetHttpProxy(proto.Message): + r"""Represents a Target HTTP Proxy resource. Google Compute Engine has + two Target HTTP Proxy resources: \* + `Global `__ \* + `Regional `__ + A target HTTP proxy is a component of GCP HTTP load balancers. \* + targetHttpProxies are used by external HTTP load balancers and + Traffic Director. \* regionTargetHttpProxies are used by internal + HTTP load balancers. Forwarding rules reference a target HTTP proxy, + and the target proxy then references a URL map. For more + information, read Using Target Proxies and Forwarding rule concepts. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + creation_timestamp (str): + [Output Only] Creation timestamp in RFC3339 text format. + + This field is a member of `oneof`_ ``_creation_timestamp``. + description (str): + An optional description of this resource. + Provide this property when you create the + resource. + + This field is a member of `oneof`_ ``_description``. + fingerprint (str): + Fingerprint of this resource. A hash of the + contents stored in this object. This field is + used in optimistic locking. This field will be + ignored when inserting a TargetHttpProxy. An + up-to-date fingerprint must be provided in order + to patch/update the TargetHttpProxy; otherwise, + the request will fail with error 412 + conditionNotMet. To see the latest fingerprint, + make a get() request to retrieve the + TargetHttpProxy. + + This field is a member of `oneof`_ ``_fingerprint``. + http_keep_alive_timeout_sec (int): + Specifies how long to keep a connection open, + after completing a response, while there is no + matching traffic (in seconds). If an HTTP + keep-alive is not specified, a default value + (610 seconds) will be used. For Global external + HTTP(S) load balancer, the minimum allowed value + is 5 seconds and the maximum allowed value is + 1200 seconds. For Global external HTTP(S) load + balancer (classic), this option is not available + publicly. + + This field is a member of `oneof`_ ``_http_keep_alive_timeout_sec``. + id (int): + [Output Only] The unique identifier for the resource. This + identifier is defined by the server. + + This field is a member of `oneof`_ ``_id``. + kind (str): + [Output Only] Type of resource. Always + compute#targetHttpProxy for target HTTP proxies. + + This field is a member of `oneof`_ ``_kind``. + name (str): + Name of the resource. Provided by the client when the + resource is created. The name must be 1-63 characters long, + and comply with RFC1035. Specifically, the name must be 1-63 + characters long and match the regular expression + ``[a-z]([-a-z0-9]*[a-z0-9])?`` which means the first + character must be a lowercase letter, and all following + characters must be a dash, lowercase letter, or digit, + except the last character, which cannot be a dash. + + This field is a member of `oneof`_ ``_name``. + proxy_bind (bool): + This field only applies when the forwarding rule that + references this target proxy has a loadBalancingScheme set + to INTERNAL_SELF_MANAGED. When this field is set to true, + Envoy proxies set up inbound traffic interception and bind + to the IP address and port specified in the forwarding rule. + This is generally useful when using Traffic Director to + configure Envoy as a gateway or middle proxy (in other + words, not a sidecar proxy). The Envoy proxy listens for + inbound requests and handles requests when it receives them. + The default is false. + + This field is a member of `oneof`_ ``_proxy_bind``. + region (str): + [Output Only] URL of the region where the regional Target + HTTP Proxy resides. This field is not applicable to global + Target HTTP Proxies. + + This field is a member of `oneof`_ ``_region``. + self_link (str): + [Output Only] Server-defined URL for the resource. + + This field is a member of `oneof`_ ``_self_link``. + url_map (str): + URL to the UrlMap resource that defines the + mapping from URL to the BackendService. + + This field is a member of `oneof`_ ``_url_map``. + """ + + creation_timestamp: str = proto.Field( + proto.STRING, + number=30525366, + optional=True, + ) + description: str = proto.Field( + proto.STRING, + number=422937596, + optional=True, + ) + fingerprint: str = proto.Field( + proto.STRING, + number=234678500, + optional=True, + ) + http_keep_alive_timeout_sec: int = proto.Field( + proto.INT32, + number=447326046, + optional=True, + ) + id: int = proto.Field( + proto.UINT64, + number=3355, + optional=True, + ) + kind: str = proto.Field( + proto.STRING, + number=3292052, + optional=True, + ) + name: str = proto.Field( + proto.STRING, + number=3373707, + optional=True, + ) + proxy_bind: bool = proto.Field( + proto.BOOL, + number=286025582, + optional=True, + ) + region: str = proto.Field( + proto.STRING, + number=138946292, + optional=True, + ) + self_link: str = proto.Field( + proto.STRING, + number=456214797, + optional=True, + ) + url_map: str = proto.Field( + proto.STRING, + number=367020684, + optional=True, + ) + + +class TargetHttpProxyAggregatedList(proto.Message): + r""" + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + id (str): + [Output Only] Unique identifier for the resource; defined by + the server. + + This field is a member of `oneof`_ ``_id``. + items (MutableMapping[str, google.cloud.compute_v1.types.TargetHttpProxiesScopedList]): + A list of TargetHttpProxiesScopedList + resources. + kind (str): + [Output Only] Type of resource. Always + compute#targetHttpProxyAggregatedList for lists of Target + HTTP Proxies. + + This field is a member of `oneof`_ ``_kind``. + next_page_token (str): + [Output Only] This token allows you to get the next page of + results for list requests. If the number of results is + larger than maxResults, use the nextPageToken as a value for + the query parameter pageToken in the next list request. + Subsequent list requests will have their own nextPageToken + to continue paging through the results. + + This field is a member of `oneof`_ ``_next_page_token``. + self_link (str): + [Output Only] Server-defined URL for this resource. + + This field is a member of `oneof`_ ``_self_link``. + unreachables (MutableSequence[str]): + [Output Only] Unreachable resources. + """ + + @property + def raw_page(self): + return self + + id: str = proto.Field( + proto.STRING, + number=3355, + optional=True, + ) + items: MutableMapping[str, 'TargetHttpProxiesScopedList'] = proto.MapField( + proto.STRING, + proto.MESSAGE, + number=100526016, + message='TargetHttpProxiesScopedList', + ) + kind: str = proto.Field( + proto.STRING, + number=3292052, + optional=True, + ) + next_page_token: str = proto.Field( + proto.STRING, + number=79797525, + optional=True, + ) + self_link: str = proto.Field( + proto.STRING, + number=456214797, + optional=True, + ) + unreachables: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=243372063, + ) + + +class TargetHttpProxyList(proto.Message): + r"""A list of TargetHttpProxy resources. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + id (str): + [Output Only] Unique identifier for the resource; defined by + the server. + + This field is a member of `oneof`_ ``_id``. + items (MutableSequence[google.cloud.compute_v1.types.TargetHttpProxy]): + A list of TargetHttpProxy resources. + kind (str): + Type of resource. Always + compute#targetHttpProxyList for lists of target + HTTP proxies. + + This field is a member of `oneof`_ ``_kind``. + next_page_token (str): + [Output Only] This token allows you to get the next page of + results for list requests. If the number of results is + larger than maxResults, use the nextPageToken as a value for + the query parameter pageToken in the next list request. + Subsequent list requests will have their own nextPageToken + to continue paging through the results. + + This field is a member of `oneof`_ ``_next_page_token``. + self_link (str): + [Output Only] Server-defined URL for this resource. + + This field is a member of `oneof`_ ``_self_link``. + warning (google.cloud.compute_v1.types.Warning): + [Output Only] Informational warning message. + + This field is a member of `oneof`_ ``_warning``. + """ + + @property + def raw_page(self): + return self + + id: str = proto.Field( + proto.STRING, + number=3355, + optional=True, + ) + items: MutableSequence['TargetHttpProxy'] = proto.RepeatedField( + proto.MESSAGE, + number=100526016, + message='TargetHttpProxy', + ) + kind: str = proto.Field( + proto.STRING, + number=3292052, + optional=True, + ) + next_page_token: str = proto.Field( + proto.STRING, + number=79797525, + optional=True, + ) + self_link: str = proto.Field( + proto.STRING, + number=456214797, + optional=True, + ) + warning: 'Warning' = proto.Field( + proto.MESSAGE, + number=50704284, + optional=True, + message='Warning', + ) + + +class TargetHttpsProxiesScopedList(proto.Message): + r""" + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + target_https_proxies (MutableSequence[google.cloud.compute_v1.types.TargetHttpsProxy]): + A list of TargetHttpsProxies contained in + this scope. + warning (google.cloud.compute_v1.types.Warning): + Informational warning which replaces the list + of backend services when the list is empty. + + This field is a member of `oneof`_ ``_warning``. + """ + + target_https_proxies: MutableSequence['TargetHttpsProxy'] = proto.RepeatedField( + proto.MESSAGE, + number=366607882, + message='TargetHttpsProxy', + ) + warning: 'Warning' = proto.Field( + proto.MESSAGE, + number=50704284, + optional=True, + message='Warning', + ) + + +class TargetHttpsProxiesSetCertificateMapRequest(proto.Message): + r""" + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + certificate_map (str): + URL of the Certificate Map to associate with + this TargetHttpsProxy. Accepted format is + //certificatemanager.googleapis.com/projects/{project + }/locations/{location}/certificateMaps/{resourceName}. + + This field is a member of `oneof`_ ``_certificate_map``. + """ + + certificate_map: str = proto.Field( + proto.STRING, + number=156463796, + optional=True, + ) + + +class TargetHttpsProxiesSetQuicOverrideRequest(proto.Message): + r""" + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + quic_override (str): + QUIC policy for the TargetHttpsProxy + resource. Check the QuicOverride enum for the + list of possible values. + + This field is a member of `oneof`_ ``_quic_override``. + """ + class QuicOverride(proto.Enum): + r"""QUIC policy for the TargetHttpsProxy resource. + + Values: + UNDEFINED_QUIC_OVERRIDE (0): + A value indicating that the enum field is not + set. + DISABLE (241807048): + The load balancer will not attempt to + negotiate QUIC with clients. + ENABLE (438835587): + The load balancer will attempt to negotiate + QUIC with clients. + NONE (2402104): + No overrides to the default QUIC policy. This + option is implicit if no QUIC override has been + specified in the request. + """ + UNDEFINED_QUIC_OVERRIDE = 0 + DISABLE = 241807048 + ENABLE = 438835587 + NONE = 2402104 + + quic_override: str = proto.Field( + proto.STRING, + number=456577197, + optional=True, + ) + + +class TargetHttpsProxiesSetSslCertificatesRequest(proto.Message): + r""" + + Attributes: + ssl_certificates (MutableSequence[str]): + New set of SslCertificate resources to + associate with this TargetHttpsProxy resource. + At least one SSL certificate must be specified. + Currently, you may specify up to 15 SSL + certificates. + """ + + ssl_certificates: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=366006543, + ) + + +class TargetHttpsProxy(proto.Message): + r"""Represents a Target HTTPS Proxy resource. Google Compute Engine has + two Target HTTPS Proxy resources: \* + `Global `__ \* + `Regional `__ + A target HTTPS proxy is a component of GCP HTTPS load balancers. \* + targetHttpsProxies are used by external HTTPS load balancers. \* + regionTargetHttpsProxies are used by internal HTTPS load balancers. + Forwarding rules reference a target HTTPS proxy, and the target + proxy then references a URL map. For more information, read Using + Target Proxies and Forwarding rule concepts. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + authorization_policy (str): + Optional. A URL referring to a + networksecurity.AuthorizationPolicy resource that describes + how the proxy should authorize inbound traffic. If left + blank, access will not be restricted by an authorization + policy. Refer to the AuthorizationPolicy resource for + additional details. authorizationPolicy only applies to a + global TargetHttpsProxy attached to globalForwardingRules + with the loadBalancingScheme set to INTERNAL_SELF_MANAGED. + Note: This field currently has no impact. + + This field is a member of `oneof`_ ``_authorization_policy``. + certificate_map (str): + URL of a certificate map that identifies a + certificate map associated with the given target + proxy. This field can only be set for global + target proxies. If set, sslCertificates will be + ignored. Accepted format is + //certificatemanager.googleapis.com/projects/{project + }/locations/{location}/certificateMaps/{resourceName}. + + This field is a member of `oneof`_ ``_certificate_map``. + creation_timestamp (str): + [Output Only] Creation timestamp in RFC3339 text format. + + This field is a member of `oneof`_ ``_creation_timestamp``. + description (str): + An optional description of this resource. + Provide this property when you create the + resource. + + This field is a member of `oneof`_ ``_description``. + fingerprint (str): + Fingerprint of this resource. A hash of the + contents stored in this object. This field is + used in optimistic locking. This field will be + ignored when inserting a TargetHttpsProxy. An + up-to-date fingerprint must be provided in order + to patch the TargetHttpsProxy; otherwise, the + request will fail with error 412 + conditionNotMet. To see the latest fingerprint, + make a get() request to retrieve the + TargetHttpsProxy. + + This field is a member of `oneof`_ ``_fingerprint``. + http_keep_alive_timeout_sec (int): + Specifies how long to keep a connection open, + after completing a response, while there is no + matching traffic (in seconds). If an HTTP + keep-alive is not specified, a default value + (610 seconds) will be used. For Global external + HTTP(S) load balancer, the minimum allowed value + is 5 seconds and the maximum allowed value is + 1200 seconds. For Global external HTTP(S) load + balancer (classic), this option is not available + publicly. + + This field is a member of `oneof`_ ``_http_keep_alive_timeout_sec``. + id (int): + [Output Only] The unique identifier for the resource. This + identifier is defined by the server. + + This field is a member of `oneof`_ ``_id``. + kind (str): + [Output Only] Type of resource. Always + compute#targetHttpsProxy for target HTTPS proxies. + + This field is a member of `oneof`_ ``_kind``. + name (str): + Name of the resource. Provided by the client when the + resource is created. The name must be 1-63 characters long, + and comply with RFC1035. Specifically, the name must be 1-63 + characters long and match the regular expression + ``[a-z]([-a-z0-9]*[a-z0-9])?`` which means the first + character must be a lowercase letter, and all following + characters must be a dash, lowercase letter, or digit, + except the last character, which cannot be a dash. + + This field is a member of `oneof`_ ``_name``. + proxy_bind (bool): + This field only applies when the forwarding rule that + references this target proxy has a loadBalancingScheme set + to INTERNAL_SELF_MANAGED. When this field is set to true, + Envoy proxies set up inbound traffic interception and bind + to the IP address and port specified in the forwarding rule. + This is generally useful when using Traffic Director to + configure Envoy as a gateway or middle proxy (in other + words, not a sidecar proxy). The Envoy proxy listens for + inbound requests and handles requests when it receives them. + The default is false. + + This field is a member of `oneof`_ ``_proxy_bind``. + quic_override (str): + Specifies the QUIC override policy for this + TargetHttpsProxy resource. This setting + determines whether the load balancer attempts to + negotiate QUIC with clients. You can specify + NONE, ENABLE, or DISABLE. - When quic-override + is set to NONE, Google manages whether QUIC is + used. - When quic-override is set to ENABLE, the + load balancer uses QUIC when possible. - When + quic-override is set to DISABLE, the load + balancer doesn't use QUIC. - If the + quic-override flag is not specified, NONE is + implied. Check the QuicOverride enum for the + list of possible values. + + This field is a member of `oneof`_ ``_quic_override``. + region (str): + [Output Only] URL of the region where the regional + TargetHttpsProxy resides. This field is not applicable to + global TargetHttpsProxies. + + This field is a member of `oneof`_ ``_region``. + self_link (str): + [Output Only] Server-defined URL for the resource. + + This field is a member of `oneof`_ ``_self_link``. + server_tls_policy (str): + Optional. A URL referring to a + networksecurity.ServerTlsPolicy resource that describes how + the proxy should authenticate inbound traffic. + serverTlsPolicy only applies to a global TargetHttpsProxy + attached to globalForwardingRules with the + loadBalancingScheme set to INTERNAL_SELF_MANAGED or EXTERNAL + or EXTERNAL_MANAGED. For details which ServerTlsPolicy + resources are accepted with INTERNAL_SELF_MANAGED and which + with EXTERNAL, EXTERNAL_MANAGED loadBalancingScheme consult + ServerTlsPolicy documentation. If left blank, communications + are not encrypted. + + This field is a member of `oneof`_ ``_server_tls_policy``. + ssl_certificates (MutableSequence[str]): + URLs to SslCertificate resources that are used to + authenticate connections between users and the load + balancer. At least one SSL certificate must be specified. + Currently, you may specify up to 15 SSL certificates. + sslCertificates do not apply when the load balancing scheme + is set to INTERNAL_SELF_MANAGED. + ssl_policy (str): + URL of SslPolicy resource that will be + associated with the TargetHttpsProxy resource. + If not set, the TargetHttpsProxy resource has no + SSL policy configured. + + This field is a member of `oneof`_ ``_ssl_policy``. + url_map (str): + A fully-qualified or valid partial URL to the + UrlMap resource that defines the mapping from + URL to the BackendService. For example, the + following are all valid URLs for specifying a + URL map: - + https://www.googleapis.compute/v1/projects/project/global/urlMaps/ + url-map - + projects/project/global/urlMaps/url-map - + global/urlMaps/url-map + + This field is a member of `oneof`_ ``_url_map``. + """ + class QuicOverride(proto.Enum): + r"""Specifies the QUIC override policy for this TargetHttpsProxy + resource. This setting determines whether the load balancer + attempts to negotiate QUIC with clients. You can specify NONE, + ENABLE, or DISABLE. - When quic-override is set to NONE, Google + manages whether QUIC is used. - When quic-override is set to + ENABLE, the load balancer uses QUIC when possible. - When + quic-override is set to DISABLE, the load balancer doesn't use + QUIC. - If the quic-override flag is not specified, NONE is + implied. + + Values: + UNDEFINED_QUIC_OVERRIDE (0): + A value indicating that the enum field is not + set. + DISABLE (241807048): + The load balancer will not attempt to + negotiate QUIC with clients. + ENABLE (438835587): + The load balancer will attempt to negotiate + QUIC with clients. + NONE (2402104): + No overrides to the default QUIC policy. This + option is implicit if no QUIC override has been + specified in the request. + """ + UNDEFINED_QUIC_OVERRIDE = 0 + DISABLE = 241807048 + ENABLE = 438835587 + NONE = 2402104 + + authorization_policy: str = proto.Field( + proto.STRING, + number=33945528, + optional=True, + ) + certificate_map: str = proto.Field( + proto.STRING, + number=156463796, + optional=True, + ) + creation_timestamp: str = proto.Field( + proto.STRING, + number=30525366, + optional=True, + ) + description: str = proto.Field( + proto.STRING, + number=422937596, + optional=True, + ) + fingerprint: str = proto.Field( + proto.STRING, + number=234678500, + optional=True, + ) + http_keep_alive_timeout_sec: int = proto.Field( + proto.INT32, + number=447326046, + optional=True, + ) + id: int = proto.Field( + proto.UINT64, + number=3355, + optional=True, + ) + kind: str = proto.Field( + proto.STRING, + number=3292052, + optional=True, + ) + name: str = proto.Field( + proto.STRING, + number=3373707, + optional=True, + ) + proxy_bind: bool = proto.Field( + proto.BOOL, + number=286025582, + optional=True, + ) + quic_override: str = proto.Field( + proto.STRING, + number=456577197, + optional=True, + ) + region: str = proto.Field( + proto.STRING, + number=138946292, + optional=True, + ) + self_link: str = proto.Field( + proto.STRING, + number=456214797, + optional=True, + ) + server_tls_policy: str = proto.Field( + proto.STRING, + number=295825266, + optional=True, + ) + ssl_certificates: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=366006543, + ) + ssl_policy: str = proto.Field( + proto.STRING, + number=295190213, + optional=True, + ) + url_map: str = proto.Field( + proto.STRING, + number=367020684, + optional=True, + ) + + +class TargetHttpsProxyAggregatedList(proto.Message): + r""" + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + id (str): + [Output Only] Unique identifier for the resource; defined by + the server. + + This field is a member of `oneof`_ ``_id``. + items (MutableMapping[str, google.cloud.compute_v1.types.TargetHttpsProxiesScopedList]): + A list of TargetHttpsProxiesScopedList + resources. + kind (str): + [Output Only] Type of resource. Always + compute#targetHttpsProxyAggregatedList for lists of Target + HTTP Proxies. + + This field is a member of `oneof`_ ``_kind``. + next_page_token (str): + [Output Only] This token allows you to get the next page of + results for list requests. If the number of results is + larger than maxResults, use the nextPageToken as a value for + the query parameter pageToken in the next list request. + Subsequent list requests will have their own nextPageToken + to continue paging through the results. + + This field is a member of `oneof`_ ``_next_page_token``. + self_link (str): + [Output Only] Server-defined URL for this resource. + + This field is a member of `oneof`_ ``_self_link``. + unreachables (MutableSequence[str]): + [Output Only] Unreachable resources. + warning (google.cloud.compute_v1.types.Warning): + [Output Only] Informational warning message. + + This field is a member of `oneof`_ ``_warning``. + """ + + @property + def raw_page(self): + return self + + id: str = proto.Field( + proto.STRING, + number=3355, + optional=True, + ) + items: MutableMapping[str, 'TargetHttpsProxiesScopedList'] = proto.MapField( + proto.STRING, + proto.MESSAGE, + number=100526016, + message='TargetHttpsProxiesScopedList', + ) + kind: str = proto.Field( + proto.STRING, + number=3292052, + optional=True, + ) + next_page_token: str = proto.Field( + proto.STRING, + number=79797525, + optional=True, + ) + self_link: str = proto.Field( + proto.STRING, + number=456214797, + optional=True, + ) + unreachables: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=243372063, + ) + warning: 'Warning' = proto.Field( + proto.MESSAGE, + number=50704284, + optional=True, + message='Warning', + ) + + +class TargetHttpsProxyList(proto.Message): + r"""Contains a list of TargetHttpsProxy resources. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + id (str): + [Output Only] Unique identifier for the resource; defined by + the server. + + This field is a member of `oneof`_ ``_id``. + items (MutableSequence[google.cloud.compute_v1.types.TargetHttpsProxy]): + A list of TargetHttpsProxy resources. + kind (str): + Type of resource. Always + compute#targetHttpsProxyList for lists of target + HTTPS proxies. + + This field is a member of `oneof`_ ``_kind``. + next_page_token (str): + [Output Only] This token allows you to get the next page of + results for list requests. If the number of results is + larger than maxResults, use the nextPageToken as a value for + the query parameter pageToken in the next list request. + Subsequent list requests will have their own nextPageToken + to continue paging through the results. + + This field is a member of `oneof`_ ``_next_page_token``. + self_link (str): + [Output Only] Server-defined URL for this resource. + + This field is a member of `oneof`_ ``_self_link``. + warning (google.cloud.compute_v1.types.Warning): + [Output Only] Informational warning message. + + This field is a member of `oneof`_ ``_warning``. + """ + + @property + def raw_page(self): + return self + + id: str = proto.Field( + proto.STRING, + number=3355, + optional=True, + ) + items: MutableSequence['TargetHttpsProxy'] = proto.RepeatedField( + proto.MESSAGE, + number=100526016, + message='TargetHttpsProxy', + ) + kind: str = proto.Field( + proto.STRING, + number=3292052, + optional=True, + ) + next_page_token: str = proto.Field( + proto.STRING, + number=79797525, + optional=True, + ) + self_link: str = proto.Field( + proto.STRING, + number=456214797, + optional=True, + ) + warning: 'Warning' = proto.Field( + proto.MESSAGE, + number=50704284, + optional=True, + message='Warning', + ) + + +class TargetInstance(proto.Message): + r"""Represents a Target Instance resource. You can use a target + instance to handle traffic for one or more forwarding rules, + which is ideal for forwarding protocol traffic that is managed + by a single source. For example, ESP, AH, TCP, or UDP. For more + information, read Target instances. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + creation_timestamp (str): + [Output Only] Creation timestamp in RFC3339 text format. + + This field is a member of `oneof`_ ``_creation_timestamp``. + description (str): + An optional description of this resource. + Provide this property when you create the + resource. + + This field is a member of `oneof`_ ``_description``. + id (int): + [Output Only] The unique identifier for the resource. This + identifier is defined by the server. + + This field is a member of `oneof`_ ``_id``. + instance (str): + A URL to the virtual machine instance that + handles traffic for this target instance. When + creating a target instance, you can provide the + fully-qualified URL or a valid partial URL to + the desired virtual machine. For example, the + following are all valid URLs: - + https://www.googleapis.com/compute/v1/projects/project/zones/zone + /instances/instance - + projects/project/zones/zone/instances/instance - + zones/zone/instances/instance + + This field is a member of `oneof`_ ``_instance``. + kind (str): + [Output Only] The type of the resource. Always + compute#targetInstance for target instances. + + This field is a member of `oneof`_ ``_kind``. + name (str): + Name of the resource. Provided by the client when the + resource is created. The name must be 1-63 characters long, + and comply with RFC1035. Specifically, the name must be 1-63 + characters long and match the regular expression + ``[a-z]([-a-z0-9]*[a-z0-9])?`` which means the first + character must be a lowercase letter, and all following + characters must be a dash, lowercase letter, or digit, + except the last character, which cannot be a dash. + + This field is a member of `oneof`_ ``_name``. + nat_policy (str): + Must have a value of NO_NAT. Protocol forwarding delivers + packets while preserving the destination IP address of the + forwarding rule referencing the target instance. Check the + NatPolicy enum for the list of possible values. + + This field is a member of `oneof`_ ``_nat_policy``. + network (str): + The URL of the network this target instance + uses to forward traffic. If not specified, the + traffic will be forwarded to the network that + the default network interface belongs to. + + This field is a member of `oneof`_ ``_network``. + self_link (str): + [Output Only] Server-defined URL for the resource. + + This field is a member of `oneof`_ ``_self_link``. + zone (str): + [Output Only] URL of the zone where the target instance + resides. You must specify this field as part of the HTTP + request URL. It is not settable as a field in the request + body. + + This field is a member of `oneof`_ ``_zone``. + """ + class NatPolicy(proto.Enum): + r"""Must have a value of NO_NAT. Protocol forwarding delivers packets + while preserving the destination IP address of the forwarding rule + referencing the target instance. + + Values: + UNDEFINED_NAT_POLICY (0): + A value indicating that the enum field is not + set. + NO_NAT (161455491): + No NAT performed. + """ + UNDEFINED_NAT_POLICY = 0 + NO_NAT = 161455491 + + creation_timestamp: str = proto.Field( + proto.STRING, + number=30525366, + optional=True, + ) + description: str = proto.Field( + proto.STRING, + number=422937596, + optional=True, + ) + id: int = proto.Field( + proto.UINT64, + number=3355, + optional=True, + ) + instance: str = proto.Field( + proto.STRING, + number=18257045, + optional=True, + ) + kind: str = proto.Field( + proto.STRING, + number=3292052, + optional=True, + ) + name: str = proto.Field( + proto.STRING, + number=3373707, + optional=True, + ) + nat_policy: str = proto.Field( + proto.STRING, + number=509780496, + optional=True, + ) + network: str = proto.Field( + proto.STRING, + number=232872494, + optional=True, + ) + self_link: str = proto.Field( + proto.STRING, + number=456214797, + optional=True, + ) + zone: str = proto.Field( + proto.STRING, + number=3744684, + optional=True, + ) + + +class TargetInstanceAggregatedList(proto.Message): + r""" + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + id (str): + [Output Only] Unique identifier for the resource; defined by + the server. + + This field is a member of `oneof`_ ``_id``. + items (MutableMapping[str, google.cloud.compute_v1.types.TargetInstancesScopedList]): + A list of TargetInstance resources. + kind (str): + Type of resource. + + This field is a member of `oneof`_ ``_kind``. + next_page_token (str): + [Output Only] This token allows you to get the next page of + results for list requests. If the number of results is + larger than maxResults, use the nextPageToken as a value for + the query parameter pageToken in the next list request. + Subsequent list requests will have their own nextPageToken + to continue paging through the results. + + This field is a member of `oneof`_ ``_next_page_token``. + self_link (str): + [Output Only] Server-defined URL for this resource. + + This field is a member of `oneof`_ ``_self_link``. + unreachables (MutableSequence[str]): + [Output Only] Unreachable resources. + warning (google.cloud.compute_v1.types.Warning): + [Output Only] Informational warning message. + + This field is a member of `oneof`_ ``_warning``. + """ + + @property + def raw_page(self): + return self + + id: str = proto.Field( + proto.STRING, + number=3355, + optional=True, + ) + items: MutableMapping[str, 'TargetInstancesScopedList'] = proto.MapField( + proto.STRING, + proto.MESSAGE, + number=100526016, + message='TargetInstancesScopedList', + ) + kind: str = proto.Field( + proto.STRING, + number=3292052, + optional=True, + ) + next_page_token: str = proto.Field( + proto.STRING, + number=79797525, + optional=True, + ) + self_link: str = proto.Field( + proto.STRING, + number=456214797, + optional=True, + ) + unreachables: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=243372063, + ) + warning: 'Warning' = proto.Field( + proto.MESSAGE, + number=50704284, + optional=True, + message='Warning', + ) + + +class TargetInstanceList(proto.Message): + r"""Contains a list of TargetInstance resources. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + id (str): + [Output Only] Unique identifier for the resource; defined by + the server. + + This field is a member of `oneof`_ ``_id``. + items (MutableSequence[google.cloud.compute_v1.types.TargetInstance]): + A list of TargetInstance resources. + kind (str): + Type of resource. + + This field is a member of `oneof`_ ``_kind``. + next_page_token (str): + [Output Only] This token allows you to get the next page of + results for list requests. If the number of results is + larger than maxResults, use the nextPageToken as a value for + the query parameter pageToken in the next list request. + Subsequent list requests will have their own nextPageToken + to continue paging through the results. + + This field is a member of `oneof`_ ``_next_page_token``. + self_link (str): + [Output Only] Server-defined URL for this resource. + + This field is a member of `oneof`_ ``_self_link``. + warning (google.cloud.compute_v1.types.Warning): + [Output Only] Informational warning message. + + This field is a member of `oneof`_ ``_warning``. + """ + + @property + def raw_page(self): + return self + + id: str = proto.Field( + proto.STRING, + number=3355, + optional=True, + ) + items: MutableSequence['TargetInstance'] = proto.RepeatedField( + proto.MESSAGE, + number=100526016, + message='TargetInstance', + ) + kind: str = proto.Field( + proto.STRING, + number=3292052, + optional=True, + ) + next_page_token: str = proto.Field( + proto.STRING, + number=79797525, + optional=True, + ) + self_link: str = proto.Field( + proto.STRING, + number=456214797, + optional=True, + ) + warning: 'Warning' = proto.Field( + proto.MESSAGE, + number=50704284, + optional=True, + message='Warning', + ) + + +class TargetInstancesScopedList(proto.Message): + r""" + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + target_instances (MutableSequence[google.cloud.compute_v1.types.TargetInstance]): + A list of target instances contained in this + scope. + warning (google.cloud.compute_v1.types.Warning): + Informational warning which replaces the list + of addresses when the list is empty. + + This field is a member of `oneof`_ ``_warning``. + """ + + target_instances: MutableSequence['TargetInstance'] = proto.RepeatedField( + proto.MESSAGE, + number=392915280, + message='TargetInstance', + ) + warning: 'Warning' = proto.Field( + proto.MESSAGE, + number=50704284, + optional=True, + message='Warning', + ) + + +class TargetPool(proto.Message): + r"""Represents a Target Pool resource. Target pools are used for + network TCP/UDP load balancing. A target pool references member + instances, an associated legacy HttpHealthCheck resource, and, + optionally, a backup target pool. For more information, read + Using target pools. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + backup_pool (str): + The server-defined URL for the resource. This field is + applicable only when the containing target pool is serving a + forwarding rule as the primary pool, and its failoverRatio + field is properly set to a value between [0, 1]. backupPool + and failoverRatio together define the fallback behavior of + the primary target pool: if the ratio of the healthy + instances in the primary pool is at or below failoverRatio, + traffic arriving at the load-balanced IP will be directed to + the backup pool. In case where failoverRatio and backupPool + are not set, or all the instances in the backup pool are + unhealthy, the traffic will be directed back to the primary + pool in the "force" mode, where traffic will be spread to + the healthy instances with the best effort, or to all + instances when no instance is healthy. + + This field is a member of `oneof`_ ``_backup_pool``. + creation_timestamp (str): + [Output Only] Creation timestamp in RFC3339 text format. + + This field is a member of `oneof`_ ``_creation_timestamp``. + description (str): + An optional description of this resource. + Provide this property when you create the + resource. + + This field is a member of `oneof`_ ``_description``. + failover_ratio (float): + This field is applicable only when the containing target + pool is serving a forwarding rule as the primary pool (i.e., + not as a backup pool to some other target pool). The value + of the field must be in [0, 1]. If set, backupPool must also + be set. They together define the fallback behavior of the + primary target pool: if the ratio of the healthy instances + in the primary pool is at or below this number, traffic + arriving at the load-balanced IP will be directed to the + backup pool. In case where failoverRatio is not set or all + the instances in the backup pool are unhealthy, the traffic + will be directed back to the primary pool in the "force" + mode, where traffic will be spread to the healthy instances + with the best effort, or to all instances when no instance + is healthy. + + This field is a member of `oneof`_ ``_failover_ratio``. + health_checks (MutableSequence[str]): + The URL of the HttpHealthCheck resource. A + member instance in this pool is considered + healthy if and only if the health checks pass. + Only legacy HttpHealthChecks are supported. Only + one health check may be specified. + id (int): + [Output Only] The unique identifier for the resource. This + identifier is defined by the server. + + This field is a member of `oneof`_ ``_id``. + instances (MutableSequence[str]): + A list of resource URLs to the virtual + machine instances serving this pool. They must + live in zones contained in the same region as + this pool. + kind (str): + [Output Only] Type of the resource. Always + compute#targetPool for target pools. + + This field is a member of `oneof`_ ``_kind``. + name (str): + Name of the resource. Provided by the client when the + resource is created. The name must be 1-63 characters long, + and comply with RFC1035. Specifically, the name must be 1-63 + characters long and match the regular expression + ``[a-z]([-a-z0-9]*[a-z0-9])?`` which means the first + character must be a lowercase letter, and all following + characters must be a dash, lowercase letter, or digit, + except the last character, which cannot be a dash. + + This field is a member of `oneof`_ ``_name``. + region (str): + [Output Only] URL of the region where the target pool + resides. + + This field is a member of `oneof`_ ``_region``. + self_link (str): + [Output Only] Server-defined URL for the resource. + + This field is a member of `oneof`_ ``_self_link``. + session_affinity (str): + Session affinity option, must be one of the following + values: NONE: Connections from the same client IP may go to + any instance in the pool. CLIENT_IP: Connections from the + same client IP will go to the same instance in the pool + while that instance remains healthy. CLIENT_IP_PROTO: + Connections from the same client IP with the same IP + protocol will go to the same instance in the pool while that + instance remains healthy. Check the SessionAffinity enum for + the list of possible values. + + This field is a member of `oneof`_ ``_session_affinity``. + """ + class SessionAffinity(proto.Enum): + r"""Session affinity option, must be one of the following values: NONE: + Connections from the same client IP may go to any instance in the + pool. CLIENT_IP: Connections from the same client IP will go to the + same instance in the pool while that instance remains healthy. + CLIENT_IP_PROTO: Connections from the same client IP with the same + IP protocol will go to the same instance in the pool while that + instance remains healthy. + + Values: + UNDEFINED_SESSION_AFFINITY (0): + A value indicating that the enum field is not + set. + CLIENT_IP (345665051): + 2-tuple hash on packet's source and + destination IP addresses. Connections from the + same source IP address to the same destination + IP address will be served by the same backend VM + while that VM remains healthy. + CLIENT_IP_NO_DESTINATION (106122516): + 1-tuple hash only on packet's source IP + address. Connections from the same source IP + address will be served by the same backend VM + while that VM remains healthy. This option can + only be used for Internal TCP/UDP Load + Balancing. + CLIENT_IP_PORT_PROTO (221722926): + 5-tuple hash on packet's source and + destination IP addresses, IP protocol, and + source and destination ports. Connections for + the same IP protocol from the same source IP + address and port to the same destination IP + address and port will be served by the same + backend VM while that VM remains healthy. This + option cannot be used for HTTP(S) load + balancing. + CLIENT_IP_PROTO (25322148): + 3-tuple hash on packet's source and + destination IP addresses, and IP protocol. + Connections for the same IP protocol from the + same source IP address to the same destination + IP address will be served by the same backend VM + while that VM remains healthy. This option + cannot be used for HTTP(S) load balancing. + GENERATED_COOKIE (370321204): + Hash based on a cookie generated by the L7 + loadbalancer. Only valid for HTTP(S) load + balancing. + HEADER_FIELD (200737960): + The hash is based on a user specified header + field. + HTTP_COOKIE (494981627): + The hash is based on a user provided cookie. + NONE (2402104): + No session affinity. Connections from the + same client IP may go to any instance in the + pool. + """ + UNDEFINED_SESSION_AFFINITY = 0 + CLIENT_IP = 345665051 + CLIENT_IP_NO_DESTINATION = 106122516 + CLIENT_IP_PORT_PROTO = 221722926 + CLIENT_IP_PROTO = 25322148 + GENERATED_COOKIE = 370321204 + HEADER_FIELD = 200737960 + HTTP_COOKIE = 494981627 + NONE = 2402104 + + backup_pool: str = proto.Field( + proto.STRING, + number=45884537, + optional=True, + ) + creation_timestamp: str = proto.Field( + proto.STRING, + number=30525366, + optional=True, + ) + description: str = proto.Field( + proto.STRING, + number=422937596, + optional=True, + ) + failover_ratio: float = proto.Field( + proto.FLOAT, + number=212667006, + optional=True, + ) + health_checks: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=448370606, + ) + id: int = proto.Field( + proto.UINT64, + number=3355, + optional=True, + ) + instances: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=29097598, + ) + kind: str = proto.Field( + proto.STRING, + number=3292052, + optional=True, + ) + name: str = proto.Field( + proto.STRING, + number=3373707, + optional=True, + ) + region: str = proto.Field( + proto.STRING, + number=138946292, + optional=True, + ) + self_link: str = proto.Field( + proto.STRING, + number=456214797, + optional=True, + ) + session_affinity: str = proto.Field( + proto.STRING, + number=463888561, + optional=True, + ) + + +class TargetPoolAggregatedList(proto.Message): + r""" + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + id (str): + [Output Only] Unique identifier for the resource; defined by + the server. + + This field is a member of `oneof`_ ``_id``. + items (MutableMapping[str, google.cloud.compute_v1.types.TargetPoolsScopedList]): + A list of TargetPool resources. + kind (str): + [Output Only] Type of resource. Always + compute#targetPoolAggregatedList for aggregated lists of + target pools. + + This field is a member of `oneof`_ ``_kind``. + next_page_token (str): + [Output Only] This token allows you to get the next page of + results for list requests. If the number of results is + larger than maxResults, use the nextPageToken as a value for + the query parameter pageToken in the next list request. + Subsequent list requests will have their own nextPageToken + to continue paging through the results. + + This field is a member of `oneof`_ ``_next_page_token``. + self_link (str): + [Output Only] Server-defined URL for this resource. + + This field is a member of `oneof`_ ``_self_link``. + unreachables (MutableSequence[str]): + [Output Only] Unreachable resources. + warning (google.cloud.compute_v1.types.Warning): + [Output Only] Informational warning message. + + This field is a member of `oneof`_ ``_warning``. + """ + + @property + def raw_page(self): + return self + + id: str = proto.Field( + proto.STRING, + number=3355, + optional=True, + ) + items: MutableMapping[str, 'TargetPoolsScopedList'] = proto.MapField( + proto.STRING, + proto.MESSAGE, + number=100526016, + message='TargetPoolsScopedList', + ) + kind: str = proto.Field( + proto.STRING, + number=3292052, + optional=True, + ) + next_page_token: str = proto.Field( + proto.STRING, + number=79797525, + optional=True, + ) + self_link: str = proto.Field( + proto.STRING, + number=456214797, + optional=True, + ) + unreachables: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=243372063, + ) + warning: 'Warning' = proto.Field( + proto.MESSAGE, + number=50704284, + optional=True, + message='Warning', + ) + + +class TargetPoolInstanceHealth(proto.Message): + r""" + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + health_status (MutableSequence[google.cloud.compute_v1.types.HealthStatus]): + + kind (str): + [Output Only] Type of resource. Always + compute#targetPoolInstanceHealth when checking the health of + an instance. + + This field is a member of `oneof`_ ``_kind``. + """ + + health_status: MutableSequence['HealthStatus'] = proto.RepeatedField( + proto.MESSAGE, + number=380545845, + message='HealthStatus', + ) + kind: str = proto.Field( + proto.STRING, + number=3292052, + optional=True, + ) + + +class TargetPoolList(proto.Message): + r"""Contains a list of TargetPool resources. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + id (str): + [Output Only] Unique identifier for the resource; defined by + the server. + + This field is a member of `oneof`_ ``_id``. + items (MutableSequence[google.cloud.compute_v1.types.TargetPool]): + A list of TargetPool resources. + kind (str): + [Output Only] Type of resource. Always + compute#targetPoolList for lists of target pools. + + This field is a member of `oneof`_ ``_kind``. + next_page_token (str): + [Output Only] This token allows you to get the next page of + results for list requests. If the number of results is + larger than maxResults, use the nextPageToken as a value for + the query parameter pageToken in the next list request. + Subsequent list requests will have their own nextPageToken + to continue paging through the results. + + This field is a member of `oneof`_ ``_next_page_token``. + self_link (str): + [Output Only] Server-defined URL for this resource. + + This field is a member of `oneof`_ ``_self_link``. + warning (google.cloud.compute_v1.types.Warning): + [Output Only] Informational warning message. + + This field is a member of `oneof`_ ``_warning``. + """ + + @property + def raw_page(self): + return self + + id: str = proto.Field( + proto.STRING, + number=3355, + optional=True, + ) + items: MutableSequence['TargetPool'] = proto.RepeatedField( + proto.MESSAGE, + number=100526016, + message='TargetPool', + ) + kind: str = proto.Field( + proto.STRING, + number=3292052, + optional=True, + ) + next_page_token: str = proto.Field( + proto.STRING, + number=79797525, + optional=True, + ) + self_link: str = proto.Field( + proto.STRING, + number=456214797, + optional=True, + ) + warning: 'Warning' = proto.Field( + proto.MESSAGE, + number=50704284, + optional=True, + message='Warning', + ) + + +class TargetPoolsAddHealthCheckRequest(proto.Message): + r""" + + Attributes: + health_checks (MutableSequence[google.cloud.compute_v1.types.HealthCheckReference]): + The HttpHealthCheck to add to the target + pool. + """ + + health_checks: MutableSequence['HealthCheckReference'] = proto.RepeatedField( + proto.MESSAGE, + number=448370606, + message='HealthCheckReference', + ) + + +class TargetPoolsAddInstanceRequest(proto.Message): + r""" + + Attributes: + instances (MutableSequence[google.cloud.compute_v1.types.InstanceReference]): + A full or partial URL to an instance to add + to this target pool. This can be a full or + partial URL. For example, the following are + valid URLs: - + https://www.googleapis.com/compute/v1/projects/project-id/zones/zone + /instances/instance-name - + projects/project-id/zones/zone/instances/instance-name + - zones/zone/instances/instance-name + """ + + instances: MutableSequence['InstanceReference'] = proto.RepeatedField( + proto.MESSAGE, + number=29097598, + message='InstanceReference', + ) + + +class TargetPoolsRemoveHealthCheckRequest(proto.Message): + r""" + + Attributes: + health_checks (MutableSequence[google.cloud.compute_v1.types.HealthCheckReference]): + Health check URL to be removed. This can be a + full or valid partial URL. For example, the + following are valid URLs: - + https://www.googleapis.com/compute/beta/projects/project + /global/httpHealthChecks/health-check - + projects/project/global/httpHealthChecks/health-check + - global/httpHealthChecks/health-check + """ + + health_checks: MutableSequence['HealthCheckReference'] = proto.RepeatedField( + proto.MESSAGE, + number=448370606, + message='HealthCheckReference', + ) + + +class TargetPoolsRemoveInstanceRequest(proto.Message): + r""" + + Attributes: + instances (MutableSequence[google.cloud.compute_v1.types.InstanceReference]): + URLs of the instances to be removed from + target pool. + """ + + instances: MutableSequence['InstanceReference'] = proto.RepeatedField( + proto.MESSAGE, + number=29097598, + message='InstanceReference', + ) + + +class TargetPoolsScopedList(proto.Message): + r""" + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + target_pools (MutableSequence[google.cloud.compute_v1.types.TargetPool]): + A list of target pools contained in this + scope. + warning (google.cloud.compute_v1.types.Warning): + Informational warning which replaces the list + of addresses when the list is empty. + + This field is a member of `oneof`_ ``_warning``. + """ + + target_pools: MutableSequence['TargetPool'] = proto.RepeatedField( + proto.MESSAGE, + number=336072617, + message='TargetPool', + ) + warning: 'Warning' = proto.Field( + proto.MESSAGE, + number=50704284, + optional=True, + message='Warning', + ) + + +class TargetReference(proto.Message): + r""" + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + target (str): + + This field is a member of `oneof`_ ``_target``. + """ + + target: str = proto.Field( + proto.STRING, + number=192835985, + optional=True, + ) + + +class TargetSslProxiesSetBackendServiceRequest(proto.Message): + r""" + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + service (str): + The URL of the new BackendService resource + for the targetSslProxy. + + This field is a member of `oneof`_ ``_service``. + """ + + service: str = proto.Field( + proto.STRING, + number=373540533, + optional=True, + ) + + +class TargetSslProxiesSetCertificateMapRequest(proto.Message): + r""" + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + certificate_map (str): + URL of the Certificate Map to associate with + this TargetSslProxy. Accepted format is + //certificatemanager.googleapis.com/projects/{project + }/locations/{location}/certificateMaps/{resourceName}. + + This field is a member of `oneof`_ ``_certificate_map``. + """ + + certificate_map: str = proto.Field( + proto.STRING, + number=156463796, + optional=True, + ) + + +class TargetSslProxiesSetProxyHeaderRequest(proto.Message): + r""" + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + proxy_header (str): + The new type of proxy header to append before sending data + to the backend. NONE or PROXY_V1 are allowed. Check the + ProxyHeader enum for the list of possible values. + + This field is a member of `oneof`_ ``_proxy_header``. + """ + class ProxyHeader(proto.Enum): + r"""The new type of proxy header to append before sending data to the + backend. NONE or PROXY_V1 are allowed. + + Values: + UNDEFINED_PROXY_HEADER (0): + A value indicating that the enum field is not + set. + NONE (2402104): + No description available. + PROXY_V1 (334352940): + No description available. + """ + UNDEFINED_PROXY_HEADER = 0 + NONE = 2402104 + PROXY_V1 = 334352940 + + proxy_header: str = proto.Field( + proto.STRING, + number=160374142, + optional=True, + ) + + +class TargetSslProxiesSetSslCertificatesRequest(proto.Message): + r""" + + Attributes: + ssl_certificates (MutableSequence[str]): + New set of URLs to SslCertificate resources + to associate with this TargetSslProxy. At least + one SSL certificate must be specified. + Currently, you may specify up to 15 SSL + certificates. + """ + + ssl_certificates: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=366006543, + ) + + +class TargetSslProxy(proto.Message): + r"""Represents a Target SSL Proxy resource. A target SSL proxy is + a component of a SSL Proxy load balancer. Global forwarding + rules reference a target SSL proxy, and the target proxy then + references an external backend service. For more information, + read Using Target Proxies. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + certificate_map (str): + URL of a certificate map that identifies a + certificate map associated with the given target + proxy. This field can only be set for global + target proxies. If set, sslCertificates will be + ignored. Accepted format is + //certificatemanager.googleapis.com/projects/{project + }/locations/{location}/certificateMaps/{resourceName}. + + This field is a member of `oneof`_ ``_certificate_map``. + creation_timestamp (str): + [Output Only] Creation timestamp in RFC3339 text format. + + This field is a member of `oneof`_ ``_creation_timestamp``. + description (str): + An optional description of this resource. + Provide this property when you create the + resource. + + This field is a member of `oneof`_ ``_description``. + id (int): + [Output Only] The unique identifier for the resource. This + identifier is defined by the server. + + This field is a member of `oneof`_ ``_id``. + kind (str): + [Output Only] Type of the resource. Always + compute#targetSslProxy for target SSL proxies. + + This field is a member of `oneof`_ ``_kind``. + name (str): + Name of the resource. Provided by the client when the + resource is created. The name must be 1-63 characters long, + and comply with RFC1035. Specifically, the name must be 1-63 + characters long and match the regular expression + ``[a-z]([-a-z0-9]*[a-z0-9])?`` which means the first + character must be a lowercase letter, and all following + characters must be a dash, lowercase letter, or digit, + except the last character, which cannot be a dash. + + This field is a member of `oneof`_ ``_name``. + proxy_header (str): + Specifies the type of proxy header to append before sending + data to the backend, either NONE or PROXY_V1. The default is + NONE. Check the ProxyHeader enum for the list of possible + values. + + This field is a member of `oneof`_ ``_proxy_header``. + self_link (str): + [Output Only] Server-defined URL for the resource. + + This field is a member of `oneof`_ ``_self_link``. + service (str): + URL to the BackendService resource. + + This field is a member of `oneof`_ ``_service``. + ssl_certificates (MutableSequence[str]): + URLs to SslCertificate resources that are used to + authenticate connections to Backends. At least one SSL + certificate must be specified. Currently, you may specify up + to 15 SSL certificates. sslCertificates do not apply when + the load balancing scheme is set to INTERNAL_SELF_MANAGED. + ssl_policy (str): + URL of SslPolicy resource that will be + associated with the TargetSslProxy resource. If + not set, the TargetSslProxy resource will not + have any SSL policy configured. + + This field is a member of `oneof`_ ``_ssl_policy``. + """ + class ProxyHeader(proto.Enum): + r"""Specifies the type of proxy header to append before sending data to + the backend, either NONE or PROXY_V1. The default is NONE. + + Values: + UNDEFINED_PROXY_HEADER (0): + A value indicating that the enum field is not + set. + NONE (2402104): + No description available. + PROXY_V1 (334352940): + No description available. + """ + UNDEFINED_PROXY_HEADER = 0 + NONE = 2402104 + PROXY_V1 = 334352940 + + certificate_map: str = proto.Field( + proto.STRING, + number=156463796, + optional=True, + ) + creation_timestamp: str = proto.Field( + proto.STRING, + number=30525366, + optional=True, + ) + description: str = proto.Field( + proto.STRING, + number=422937596, + optional=True, + ) + id: int = proto.Field( + proto.UINT64, + number=3355, + optional=True, + ) + kind: str = proto.Field( + proto.STRING, + number=3292052, + optional=True, + ) + name: str = proto.Field( + proto.STRING, + number=3373707, + optional=True, + ) + proxy_header: str = proto.Field( + proto.STRING, + number=160374142, + optional=True, + ) + self_link: str = proto.Field( + proto.STRING, + number=456214797, + optional=True, + ) + service: str = proto.Field( + proto.STRING, + number=373540533, + optional=True, + ) + ssl_certificates: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=366006543, + ) + ssl_policy: str = proto.Field( + proto.STRING, + number=295190213, + optional=True, + ) + + +class TargetSslProxyList(proto.Message): + r"""Contains a list of TargetSslProxy resources. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + id (str): + [Output Only] Unique identifier for the resource; defined by + the server. + + This field is a member of `oneof`_ ``_id``. + items (MutableSequence[google.cloud.compute_v1.types.TargetSslProxy]): + A list of TargetSslProxy resources. + kind (str): + Type of resource. + + This field is a member of `oneof`_ ``_kind``. + next_page_token (str): + [Output Only] This token allows you to get the next page of + results for list requests. If the number of results is + larger than maxResults, use the nextPageToken as a value for + the query parameter pageToken in the next list request. + Subsequent list requests will have their own nextPageToken + to continue paging through the results. + + This field is a member of `oneof`_ ``_next_page_token``. + self_link (str): + [Output Only] Server-defined URL for this resource. + + This field is a member of `oneof`_ ``_self_link``. + warning (google.cloud.compute_v1.types.Warning): + [Output Only] Informational warning message. + + This field is a member of `oneof`_ ``_warning``. + """ + + @property + def raw_page(self): + return self + + id: str = proto.Field( + proto.STRING, + number=3355, + optional=True, + ) + items: MutableSequence['TargetSslProxy'] = proto.RepeatedField( + proto.MESSAGE, + number=100526016, + message='TargetSslProxy', + ) + kind: str = proto.Field( + proto.STRING, + number=3292052, + optional=True, + ) + next_page_token: str = proto.Field( + proto.STRING, + number=79797525, + optional=True, + ) + self_link: str = proto.Field( + proto.STRING, + number=456214797, + optional=True, + ) + warning: 'Warning' = proto.Field( + proto.MESSAGE, + number=50704284, + optional=True, + message='Warning', + ) + + +class TargetTcpProxiesScopedList(proto.Message): + r""" + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + target_tcp_proxies (MutableSequence[google.cloud.compute_v1.types.TargetTcpProxy]): + A list of TargetTcpProxies contained in this + scope. + warning (google.cloud.compute_v1.types.Warning): + Informational warning which replaces the list + of backend services when the list is empty. + + This field is a member of `oneof`_ ``_warning``. + """ + + target_tcp_proxies: MutableSequence['TargetTcpProxy'] = proto.RepeatedField( + proto.MESSAGE, + number=262056832, + message='TargetTcpProxy', + ) + warning: 'Warning' = proto.Field( + proto.MESSAGE, + number=50704284, + optional=True, + message='Warning', + ) + + +class TargetTcpProxiesSetBackendServiceRequest(proto.Message): + r""" + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + service (str): + The URL of the new BackendService resource + for the targetTcpProxy. + + This field is a member of `oneof`_ ``_service``. + """ + + service: str = proto.Field( + proto.STRING, + number=373540533, + optional=True, + ) + + +class TargetTcpProxiesSetProxyHeaderRequest(proto.Message): + r""" + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + proxy_header (str): + The new type of proxy header to append before sending data + to the backend. NONE or PROXY_V1 are allowed. Check the + ProxyHeader enum for the list of possible values. + + This field is a member of `oneof`_ ``_proxy_header``. + """ + class ProxyHeader(proto.Enum): + r"""The new type of proxy header to append before sending data to the + backend. NONE or PROXY_V1 are allowed. + + Values: + UNDEFINED_PROXY_HEADER (0): + A value indicating that the enum field is not + set. + NONE (2402104): + No description available. + PROXY_V1 (334352940): + No description available. + """ + UNDEFINED_PROXY_HEADER = 0 + NONE = 2402104 + PROXY_V1 = 334352940 + + proxy_header: str = proto.Field( + proto.STRING, + number=160374142, + optional=True, + ) + + +class TargetTcpProxy(proto.Message): + r"""Represents a Target TCP Proxy resource. A target TCP proxy is + a component of a TCP Proxy load balancer. Global forwarding + rules reference target TCP proxy, and the target proxy then + references an external backend service. For more information, + read TCP Proxy Load Balancing overview. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + creation_timestamp (str): + [Output Only] Creation timestamp in RFC3339 text format. + + This field is a member of `oneof`_ ``_creation_timestamp``. + description (str): + An optional description of this resource. + Provide this property when you create the + resource. + + This field is a member of `oneof`_ ``_description``. + id (int): + [Output Only] The unique identifier for the resource. This + identifier is defined by the server. + + This field is a member of `oneof`_ ``_id``. + kind (str): + [Output Only] Type of the resource. Always + compute#targetTcpProxy for target TCP proxies. + + This field is a member of `oneof`_ ``_kind``. + name (str): + Name of the resource. Provided by the client when the + resource is created. The name must be 1-63 characters long, + and comply with RFC1035. Specifically, the name must be 1-63 + characters long and match the regular expression + ``[a-z]([-a-z0-9]*[a-z0-9])?`` which means the first + character must be a lowercase letter, and all following + characters must be a dash, lowercase letter, or digit, + except the last character, which cannot be a dash. + + This field is a member of `oneof`_ ``_name``. + proxy_bind (bool): + This field only applies when the forwarding rule that + references this target proxy has a loadBalancingScheme set + to INTERNAL_SELF_MANAGED. When this field is set to true, + Envoy proxies set up inbound traffic interception and bind + to the IP address and port specified in the forwarding rule. + This is generally useful when using Traffic Director to + configure Envoy as a gateway or middle proxy (in other + words, not a sidecar proxy). The Envoy proxy listens for + inbound requests and handles requests when it receives them. + The default is false. + + This field is a member of `oneof`_ ``_proxy_bind``. + proxy_header (str): + Specifies the type of proxy header to append before sending + data to the backend, either NONE or PROXY_V1. The default is + NONE. Check the ProxyHeader enum for the list of possible + values. + + This field is a member of `oneof`_ ``_proxy_header``. + region (str): + [Output Only] URL of the region where the regional TCP proxy + resides. This field is not applicable to global TCP proxy. + + This field is a member of `oneof`_ ``_region``. + self_link (str): + [Output Only] Server-defined URL for the resource. + + This field is a member of `oneof`_ ``_self_link``. + service (str): + URL to the BackendService resource. + + This field is a member of `oneof`_ ``_service``. + """ + class ProxyHeader(proto.Enum): + r"""Specifies the type of proxy header to append before sending data to + the backend, either NONE or PROXY_V1. The default is NONE. + + Values: + UNDEFINED_PROXY_HEADER (0): + A value indicating that the enum field is not + set. + NONE (2402104): + No description available. + PROXY_V1 (334352940): + No description available. + """ + UNDEFINED_PROXY_HEADER = 0 + NONE = 2402104 + PROXY_V1 = 334352940 + + creation_timestamp: str = proto.Field( + proto.STRING, + number=30525366, + optional=True, + ) + description: str = proto.Field( + proto.STRING, + number=422937596, + optional=True, + ) + id: int = proto.Field( + proto.UINT64, + number=3355, + optional=True, + ) + kind: str = proto.Field( + proto.STRING, + number=3292052, + optional=True, + ) + name: str = proto.Field( + proto.STRING, + number=3373707, + optional=True, + ) + proxy_bind: bool = proto.Field( + proto.BOOL, + number=286025582, + optional=True, + ) + proxy_header: str = proto.Field( + proto.STRING, + number=160374142, + optional=True, + ) + region: str = proto.Field( + proto.STRING, + number=138946292, + optional=True, + ) + self_link: str = proto.Field( + proto.STRING, + number=456214797, + optional=True, + ) + service: str = proto.Field( + proto.STRING, + number=373540533, + optional=True, + ) + + +class TargetTcpProxyAggregatedList(proto.Message): + r""" + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + id (str): + [Output Only] Unique identifier for the resource; defined by + the server. + + This field is a member of `oneof`_ ``_id``. + items (MutableMapping[str, google.cloud.compute_v1.types.TargetTcpProxiesScopedList]): + A list of TargetTcpProxiesScopedList + resources. + kind (str): + [Output Only] Type of resource. Always + compute#targetTcpProxyAggregatedList for lists of Target TCP + Proxies. + + This field is a member of `oneof`_ ``_kind``. + next_page_token (str): + [Output Only] This token allows you to get the next page of + results for list requests. If the number of results is + larger than maxResults, use the nextPageToken as a value for + the query parameter pageToken in the next list request. + Subsequent list requests will have their own nextPageToken + to continue paging through the results. + + This field is a member of `oneof`_ ``_next_page_token``. + self_link (str): + [Output Only] Server-defined URL for this resource. + + This field is a member of `oneof`_ ``_self_link``. + unreachables (MutableSequence[str]): + [Output Only] Unreachable resources. + warning (google.cloud.compute_v1.types.Warning): + [Output Only] Informational warning message. + + This field is a member of `oneof`_ ``_warning``. + """ + + @property + def raw_page(self): + return self + + id: str = proto.Field( + proto.STRING, + number=3355, + optional=True, + ) + items: MutableMapping[str, 'TargetTcpProxiesScopedList'] = proto.MapField( + proto.STRING, + proto.MESSAGE, + number=100526016, + message='TargetTcpProxiesScopedList', + ) + kind: str = proto.Field( + proto.STRING, + number=3292052, + optional=True, + ) + next_page_token: str = proto.Field( + proto.STRING, + number=79797525, + optional=True, + ) + self_link: str = proto.Field( + proto.STRING, + number=456214797, + optional=True, + ) + unreachables: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=243372063, + ) + warning: 'Warning' = proto.Field( + proto.MESSAGE, + number=50704284, + optional=True, + message='Warning', + ) + + +class TargetTcpProxyList(proto.Message): + r"""Contains a list of TargetTcpProxy resources. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + id (str): + [Output Only] Unique identifier for the resource; defined by + the server. + + This field is a member of `oneof`_ ``_id``. + items (MutableSequence[google.cloud.compute_v1.types.TargetTcpProxy]): + A list of TargetTcpProxy resources. + kind (str): + Type of resource. + + This field is a member of `oneof`_ ``_kind``. + next_page_token (str): + [Output Only] This token allows you to get the next page of + results for list requests. If the number of results is + larger than maxResults, use the nextPageToken as a value for + the query parameter pageToken in the next list request. + Subsequent list requests will have their own nextPageToken + to continue paging through the results. + + This field is a member of `oneof`_ ``_next_page_token``. + self_link (str): + [Output Only] Server-defined URL for this resource. + + This field is a member of `oneof`_ ``_self_link``. + warning (google.cloud.compute_v1.types.Warning): + [Output Only] Informational warning message. + + This field is a member of `oneof`_ ``_warning``. + """ + + @property + def raw_page(self): + return self + + id: str = proto.Field( + proto.STRING, + number=3355, + optional=True, + ) + items: MutableSequence['TargetTcpProxy'] = proto.RepeatedField( + proto.MESSAGE, + number=100526016, + message='TargetTcpProxy', + ) + kind: str = proto.Field( + proto.STRING, + number=3292052, + optional=True, + ) + next_page_token: str = proto.Field( + proto.STRING, + number=79797525, + optional=True, + ) + self_link: str = proto.Field( + proto.STRING, + number=456214797, + optional=True, + ) + warning: 'Warning' = proto.Field( + proto.MESSAGE, + number=50704284, + optional=True, + message='Warning', + ) + + +class TargetVpnGateway(proto.Message): + r"""Represents a Target VPN Gateway resource. The target VPN + gateway resource represents a Classic Cloud VPN gateway. For + more information, read the the Cloud VPN Overview. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + creation_timestamp (str): + [Output Only] Creation timestamp in RFC3339 text format. + + This field is a member of `oneof`_ ``_creation_timestamp``. + description (str): + An optional description of this resource. + Provide this property when you create the + resource. + + This field is a member of `oneof`_ ``_description``. + forwarding_rules (MutableSequence[str]): + [Output Only] A list of URLs to the ForwardingRule + resources. ForwardingRules are created using + compute.forwardingRules.insert and associated with a VPN + gateway. + id (int): + [Output Only] The unique identifier for the resource. This + identifier is defined by the server. + + This field is a member of `oneof`_ ``_id``. + kind (str): + [Output Only] Type of resource. Always + compute#targetVpnGateway for target VPN gateways. + + This field is a member of `oneof`_ ``_kind``. + label_fingerprint (str): + A fingerprint for the labels being applied to + this TargetVpnGateway, which is essentially a + hash of the labels set used for optimistic + locking. The fingerprint is initially generated + by Compute Engine and changes after every + request to modify or update labels. You must + always provide an up-to-date fingerprint hash in + order to update or change labels, otherwise the + request will fail with error 412 + conditionNotMet. To see the latest fingerprint, + make a get() request to retrieve a + TargetVpnGateway. + + This field is a member of `oneof`_ ``_label_fingerprint``. + labels (MutableMapping[str, str]): + Labels for this resource. These can only be + added or modified by the setLabels method. Each + label key/value pair must comply with RFC1035. + Label values may be empty. + name (str): + Name of the resource. Provided by the client when the + resource is created. The name must be 1-63 characters long, + and comply with RFC1035. Specifically, the name must be 1-63 + characters long and match the regular expression + ``[a-z]([-a-z0-9]*[a-z0-9])?`` which means the first + character must be a lowercase letter, and all following + characters must be a dash, lowercase letter, or digit, + except the last character, which cannot be a dash. + + This field is a member of `oneof`_ ``_name``. + network (str): + URL of the network to which this VPN gateway + is attached. Provided by the client when the VPN + gateway is created. + + This field is a member of `oneof`_ ``_network``. + region (str): + [Output Only] URL of the region where the target VPN gateway + resides. You must specify this field as part of the HTTP + request URL. It is not settable as a field in the request + body. + + This field is a member of `oneof`_ ``_region``. + self_link (str): + [Output Only] Server-defined URL for the resource. + + This field is a member of `oneof`_ ``_self_link``. + status (str): + [Output Only] The status of the VPN gateway, which can be + one of the following: CREATING, READY, FAILED, or DELETING. + Check the Status enum for the list of possible values. + + This field is a member of `oneof`_ ``_status``. + tunnels (MutableSequence[str]): + [Output Only] A list of URLs to VpnTunnel resources. + VpnTunnels are created using the compute.vpntunnels.insert + method and associated with a VPN gateway. + """ + class Status(proto.Enum): + r"""[Output Only] The status of the VPN gateway, which can be one of the + following: CREATING, READY, FAILED, or DELETING. + + Values: + UNDEFINED_STATUS (0): + A value indicating that the enum field is not + set. + CREATING (455564985): + No description available. + DELETING (528602024): + No description available. + FAILED (455706685): + No description available. + READY (77848963): + No description available. + """ + UNDEFINED_STATUS = 0 + CREATING = 455564985 + DELETING = 528602024 + FAILED = 455706685 + READY = 77848963 + + creation_timestamp: str = proto.Field( + proto.STRING, + number=30525366, + optional=True, + ) + description: str = proto.Field( + proto.STRING, + number=422937596, + optional=True, + ) + forwarding_rules: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=315821365, + ) + id: int = proto.Field( + proto.UINT64, + number=3355, + optional=True, + ) + kind: str = proto.Field( + proto.STRING, + number=3292052, + optional=True, + ) + label_fingerprint: str = proto.Field( + proto.STRING, + number=178124825, + optional=True, + ) + labels: MutableMapping[str, str] = proto.MapField( + proto.STRING, + proto.STRING, + number=500195327, + ) + name: str = proto.Field( + proto.STRING, + number=3373707, + optional=True, + ) + network: str = proto.Field( + proto.STRING, + number=232872494, + optional=True, + ) + region: str = proto.Field( + proto.STRING, + number=138946292, + optional=True, + ) + self_link: str = proto.Field( + proto.STRING, + number=456214797, + optional=True, + ) + status: str = proto.Field( + proto.STRING, + number=181260274, + optional=True, + ) + tunnels: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=104561931, + ) + + +class TargetVpnGatewayAggregatedList(proto.Message): + r""" + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + id (str): + [Output Only] Unique identifier for the resource; defined by + the server. + + This field is a member of `oneof`_ ``_id``. + items (MutableMapping[str, google.cloud.compute_v1.types.TargetVpnGatewaysScopedList]): + A list of TargetVpnGateway resources. + kind (str): + [Output Only] Type of resource. Always + compute#targetVpnGateway for target VPN gateways. + + This field is a member of `oneof`_ ``_kind``. + next_page_token (str): + [Output Only] This token allows you to get the next page of + results for list requests. If the number of results is + larger than maxResults, use the nextPageToken as a value for + the query parameter pageToken in the next list request. + Subsequent list requests will have their own nextPageToken + to continue paging through the results. + + This field is a member of `oneof`_ ``_next_page_token``. + self_link (str): + [Output Only] Server-defined URL for this resource. + + This field is a member of `oneof`_ ``_self_link``. + unreachables (MutableSequence[str]): + [Output Only] Unreachable resources. + warning (google.cloud.compute_v1.types.Warning): + [Output Only] Informational warning message. + + This field is a member of `oneof`_ ``_warning``. + """ + + @property + def raw_page(self): + return self + + id: str = proto.Field( + proto.STRING, + number=3355, + optional=True, + ) + items: MutableMapping[str, 'TargetVpnGatewaysScopedList'] = proto.MapField( + proto.STRING, + proto.MESSAGE, + number=100526016, + message='TargetVpnGatewaysScopedList', + ) + kind: str = proto.Field( + proto.STRING, + number=3292052, + optional=True, + ) + next_page_token: str = proto.Field( + proto.STRING, + number=79797525, + optional=True, + ) + self_link: str = proto.Field( + proto.STRING, + number=456214797, + optional=True, + ) + unreachables: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=243372063, + ) + warning: 'Warning' = proto.Field( + proto.MESSAGE, + number=50704284, + optional=True, + message='Warning', + ) + + +class TargetVpnGatewayList(proto.Message): + r"""Contains a list of TargetVpnGateway resources. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + id (str): + [Output Only] Unique identifier for the resource; defined by + the server. + + This field is a member of `oneof`_ ``_id``. + items (MutableSequence[google.cloud.compute_v1.types.TargetVpnGateway]): + A list of TargetVpnGateway resources. + kind (str): + [Output Only] Type of resource. Always + compute#targetVpnGateway for target VPN gateways. + + This field is a member of `oneof`_ ``_kind``. + next_page_token (str): + [Output Only] This token allows you to get the next page of + results for list requests. If the number of results is + larger than maxResults, use the nextPageToken as a value for + the query parameter pageToken in the next list request. + Subsequent list requests will have their own nextPageToken + to continue paging through the results. + + This field is a member of `oneof`_ ``_next_page_token``. + self_link (str): + [Output Only] Server-defined URL for this resource. + + This field is a member of `oneof`_ ``_self_link``. + warning (google.cloud.compute_v1.types.Warning): + [Output Only] Informational warning message. + + This field is a member of `oneof`_ ``_warning``. + """ + + @property + def raw_page(self): + return self + + id: str = proto.Field( + proto.STRING, + number=3355, + optional=True, + ) + items: MutableSequence['TargetVpnGateway'] = proto.RepeatedField( + proto.MESSAGE, + number=100526016, + message='TargetVpnGateway', + ) + kind: str = proto.Field( + proto.STRING, + number=3292052, + optional=True, + ) + next_page_token: str = proto.Field( + proto.STRING, + number=79797525, + optional=True, + ) + self_link: str = proto.Field( + proto.STRING, + number=456214797, + optional=True, + ) + warning: 'Warning' = proto.Field( + proto.MESSAGE, + number=50704284, + optional=True, + message='Warning', + ) + + +class TargetVpnGatewaysScopedList(proto.Message): + r""" + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + target_vpn_gateways (MutableSequence[google.cloud.compute_v1.types.TargetVpnGateway]): + [Output Only] A list of target VPN gateways contained in + this scope. + warning (google.cloud.compute_v1.types.Warning): + [Output Only] Informational warning which replaces the list + of addresses when the list is empty. + + This field is a member of `oneof`_ ``_warning``. + """ + + target_vpn_gateways: MutableSequence['TargetVpnGateway'] = proto.RepeatedField( + proto.MESSAGE, + number=401770888, + message='TargetVpnGateway', + ) + warning: 'Warning' = proto.Field( + proto.MESSAGE, + number=50704284, + optional=True, + message='Warning', + ) + + +class TestFailure(proto.Message): + r""" + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + actual_output_url (str): + The actual output URL evaluated by a load + balancer containing the scheme, host, path and + query parameters. + + This field is a member of `oneof`_ ``_actual_output_url``. + actual_redirect_response_code (int): + Actual HTTP status code for rule with ``urlRedirect`` + calculated by load balancer + + This field is a member of `oneof`_ ``_actual_redirect_response_code``. + actual_service (str): + BackendService or BackendBucket returned by + load balancer. + + This field is a member of `oneof`_ ``_actual_service``. + expected_output_url (str): + The expected output URL evaluated by a load + balancer containing the scheme, host, path and + query parameters. + + This field is a member of `oneof`_ ``_expected_output_url``. + expected_redirect_response_code (int): + Expected HTTP status code for rule with ``urlRedirect`` + calculated by load balancer + + This field is a member of `oneof`_ ``_expected_redirect_response_code``. + expected_service (str): + Expected BackendService or BackendBucket + resource the given URL should be mapped to. + + This field is a member of `oneof`_ ``_expected_service``. + headers (MutableSequence[google.cloud.compute_v1.types.UrlMapTestHeader]): + HTTP headers of the request. + host (str): + Host portion of the URL. + + This field is a member of `oneof`_ ``_host``. + path (str): + Path portion including query parameters in + the URL. + + This field is a member of `oneof`_ ``_path``. + """ + + actual_output_url: str = proto.Field( + proto.STRING, + number=287075458, + optional=True, + ) + actual_redirect_response_code: int = proto.Field( + proto.INT32, + number=42926553, + optional=True, + ) + actual_service: str = proto.Field( + proto.STRING, + number=440379652, + optional=True, + ) + expected_output_url: str = proto.Field( + proto.STRING, + number=433967384, + optional=True, + ) + expected_redirect_response_code: int = proto.Field( + proto.INT32, + number=18888047, + optional=True, + ) + expected_service: str = proto.Field( + proto.STRING, + number=133987374, + optional=True, + ) + headers: MutableSequence['UrlMapTestHeader'] = proto.RepeatedField( + proto.MESSAGE, + number=258436998, + message='UrlMapTestHeader', + ) + host: str = proto.Field( + proto.STRING, + number=3208616, + optional=True, + ) + path: str = proto.Field( + proto.STRING, + number=3433509, + optional=True, + ) + + +class TestIamPermissionsDiskRequest(proto.Message): + r"""A request message for Disks.TestIamPermissions. See the + method description for details. + + Attributes: + project (str): + Project ID for this request. + resource (str): + Name or id of the resource for this request. + test_permissions_request_resource (google.cloud.compute_v1.types.TestPermissionsRequest): + The body resource for this request + zone (str): + The name of the zone for this request. + """ + + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + resource: str = proto.Field( + proto.STRING, + number=195806222, + ) + test_permissions_request_resource: 'TestPermissionsRequest' = proto.Field( + proto.MESSAGE, + number=439214758, + message='TestPermissionsRequest', + ) + zone: str = proto.Field( + proto.STRING, + number=3744684, + ) + + +class TestIamPermissionsExternalVpnGatewayRequest(proto.Message): + r"""A request message for ExternalVpnGateways.TestIamPermissions. + See the method description for details. + + Attributes: + project (str): + Project ID for this request. + resource (str): + Name or id of the resource for this request. + test_permissions_request_resource (google.cloud.compute_v1.types.TestPermissionsRequest): + The body resource for this request + """ + + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + resource: str = proto.Field( + proto.STRING, + number=195806222, + ) + test_permissions_request_resource: 'TestPermissionsRequest' = proto.Field( + proto.MESSAGE, + number=439214758, + message='TestPermissionsRequest', + ) + + +class TestIamPermissionsFirewallPolicyRequest(proto.Message): + r"""A request message for FirewallPolicies.TestIamPermissions. + See the method description for details. + + Attributes: + resource (str): + Name or id of the resource for this request. + test_permissions_request_resource (google.cloud.compute_v1.types.TestPermissionsRequest): + The body resource for this request + """ + + resource: str = proto.Field( + proto.STRING, + number=195806222, + ) + test_permissions_request_resource: 'TestPermissionsRequest' = proto.Field( + proto.MESSAGE, + number=439214758, + message='TestPermissionsRequest', + ) + + +class TestIamPermissionsImageRequest(proto.Message): + r"""A request message for Images.TestIamPermissions. See the + method description for details. + + Attributes: + project (str): + Project ID for this request. + resource (str): + Name or id of the resource for this request. + test_permissions_request_resource (google.cloud.compute_v1.types.TestPermissionsRequest): + The body resource for this request + """ + + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + resource: str = proto.Field( + proto.STRING, + number=195806222, + ) + test_permissions_request_resource: 'TestPermissionsRequest' = proto.Field( + proto.MESSAGE, + number=439214758, + message='TestPermissionsRequest', + ) + + +class TestIamPermissionsInstanceRequest(proto.Message): + r"""A request message for Instances.TestIamPermissions. See the + method description for details. + + Attributes: + project (str): + Project ID for this request. + resource (str): + Name or id of the resource for this request. + test_permissions_request_resource (google.cloud.compute_v1.types.TestPermissionsRequest): + The body resource for this request + zone (str): + The name of the zone for this request. + """ + + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + resource: str = proto.Field( + proto.STRING, + number=195806222, + ) + test_permissions_request_resource: 'TestPermissionsRequest' = proto.Field( + proto.MESSAGE, + number=439214758, + message='TestPermissionsRequest', + ) + zone: str = proto.Field( + proto.STRING, + number=3744684, + ) + + +class TestIamPermissionsInstanceTemplateRequest(proto.Message): + r"""A request message for InstanceTemplates.TestIamPermissions. + See the method description for details. + + Attributes: + project (str): + Project ID for this request. + resource (str): + Name or id of the resource for this request. + test_permissions_request_resource (google.cloud.compute_v1.types.TestPermissionsRequest): + The body resource for this request + """ + + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + resource: str = proto.Field( + proto.STRING, + number=195806222, + ) + test_permissions_request_resource: 'TestPermissionsRequest' = proto.Field( + proto.MESSAGE, + number=439214758, + message='TestPermissionsRequest', + ) + + +class TestIamPermissionsLicenseCodeRequest(proto.Message): + r"""A request message for LicenseCodes.TestIamPermissions. See + the method description for details. + + Attributes: + project (str): + Project ID for this request. + resource (str): + Name or id of the resource for this request. + test_permissions_request_resource (google.cloud.compute_v1.types.TestPermissionsRequest): + The body resource for this request + """ + + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + resource: str = proto.Field( + proto.STRING, + number=195806222, + ) + test_permissions_request_resource: 'TestPermissionsRequest' = proto.Field( + proto.MESSAGE, + number=439214758, + message='TestPermissionsRequest', + ) + + +class TestIamPermissionsLicenseRequest(proto.Message): + r"""A request message for Licenses.TestIamPermissions. See the + method description for details. + + Attributes: + project (str): + Project ID for this request. + resource (str): + Name or id of the resource for this request. + test_permissions_request_resource (google.cloud.compute_v1.types.TestPermissionsRequest): + The body resource for this request + """ + + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + resource: str = proto.Field( + proto.STRING, + number=195806222, + ) + test_permissions_request_resource: 'TestPermissionsRequest' = proto.Field( + proto.MESSAGE, + number=439214758, + message='TestPermissionsRequest', + ) + + +class TestIamPermissionsMachineImageRequest(proto.Message): + r"""A request message for MachineImages.TestIamPermissions. See + the method description for details. + + Attributes: + project (str): + Project ID for this request. + resource (str): + Name or id of the resource for this request. + test_permissions_request_resource (google.cloud.compute_v1.types.TestPermissionsRequest): + The body resource for this request + """ + + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + resource: str = proto.Field( + proto.STRING, + number=195806222, + ) + test_permissions_request_resource: 'TestPermissionsRequest' = proto.Field( + proto.MESSAGE, + number=439214758, + message='TestPermissionsRequest', + ) + + +class TestIamPermissionsNetworkAttachmentRequest(proto.Message): + r"""A request message for NetworkAttachments.TestIamPermissions. + See the method description for details. + + Attributes: + project (str): + Project ID for this request. + region (str): + The name of the region for this request. + resource (str): + Name or id of the resource for this request. + test_permissions_request_resource (google.cloud.compute_v1.types.TestPermissionsRequest): + The body resource for this request + """ + + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + region: str = proto.Field( + proto.STRING, + number=138946292, + ) + resource: str = proto.Field( + proto.STRING, + number=195806222, + ) + test_permissions_request_resource: 'TestPermissionsRequest' = proto.Field( + proto.MESSAGE, + number=439214758, + message='TestPermissionsRequest', + ) + + +class TestIamPermissionsNetworkEndpointGroupRequest(proto.Message): + r"""A request message for + NetworkEndpointGroups.TestIamPermissions. See the method + description for details. + + Attributes: + project (str): + Project ID for this request. + resource (str): + Name or id of the resource for this request. + test_permissions_request_resource (google.cloud.compute_v1.types.TestPermissionsRequest): + The body resource for this request + zone (str): + The name of the zone for this request. + """ + + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + resource: str = proto.Field( + proto.STRING, + number=195806222, + ) + test_permissions_request_resource: 'TestPermissionsRequest' = proto.Field( + proto.MESSAGE, + number=439214758, + message='TestPermissionsRequest', + ) + zone: str = proto.Field( + proto.STRING, + number=3744684, + ) + + +class TestIamPermissionsNetworkFirewallPolicyRequest(proto.Message): + r"""A request message for + NetworkFirewallPolicies.TestIamPermissions. See the method + description for details. + + Attributes: + project (str): + Project ID for this request. + resource (str): + Name or id of the resource for this request. + test_permissions_request_resource (google.cloud.compute_v1.types.TestPermissionsRequest): + The body resource for this request + """ + + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + resource: str = proto.Field( + proto.STRING, + number=195806222, + ) + test_permissions_request_resource: 'TestPermissionsRequest' = proto.Field( + proto.MESSAGE, + number=439214758, + message='TestPermissionsRequest', + ) + + +class TestIamPermissionsNodeGroupRequest(proto.Message): + r"""A request message for NodeGroups.TestIamPermissions. See the + method description for details. + + Attributes: + project (str): + Project ID for this request. + resource (str): + Name or id of the resource for this request. + test_permissions_request_resource (google.cloud.compute_v1.types.TestPermissionsRequest): + The body resource for this request + zone (str): + The name of the zone for this request. + """ + + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + resource: str = proto.Field( + proto.STRING, + number=195806222, + ) + test_permissions_request_resource: 'TestPermissionsRequest' = proto.Field( + proto.MESSAGE, + number=439214758, + message='TestPermissionsRequest', + ) + zone: str = proto.Field( + proto.STRING, + number=3744684, + ) + + +class TestIamPermissionsNodeTemplateRequest(proto.Message): + r"""A request message for NodeTemplates.TestIamPermissions. See + the method description for details. + + Attributes: + project (str): + Project ID for this request. + region (str): + The name of the region for this request. + resource (str): + Name or id of the resource for this request. + test_permissions_request_resource (google.cloud.compute_v1.types.TestPermissionsRequest): + The body resource for this request + """ + + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + region: str = proto.Field( + proto.STRING, + number=138946292, + ) + resource: str = proto.Field( + proto.STRING, + number=195806222, + ) + test_permissions_request_resource: 'TestPermissionsRequest' = proto.Field( + proto.MESSAGE, + number=439214758, + message='TestPermissionsRequest', + ) + + +class TestIamPermissionsPacketMirroringRequest(proto.Message): + r"""A request message for PacketMirrorings.TestIamPermissions. + See the method description for details. + + Attributes: + project (str): + Project ID for this request. + region (str): + The name of the region for this request. + resource (str): + Name or id of the resource for this request. + test_permissions_request_resource (google.cloud.compute_v1.types.TestPermissionsRequest): + The body resource for this request + """ + + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + region: str = proto.Field( + proto.STRING, + number=138946292, + ) + resource: str = proto.Field( + proto.STRING, + number=195806222, + ) + test_permissions_request_resource: 'TestPermissionsRequest' = proto.Field( + proto.MESSAGE, + number=439214758, + message='TestPermissionsRequest', + ) + + +class TestIamPermissionsRegionDiskRequest(proto.Message): + r"""A request message for RegionDisks.TestIamPermissions. See the + method description for details. + + Attributes: + project (str): + Project ID for this request. + region (str): + The name of the region for this request. + resource (str): + Name or id of the resource for this request. + test_permissions_request_resource (google.cloud.compute_v1.types.TestPermissionsRequest): + The body resource for this request + """ + + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + region: str = proto.Field( + proto.STRING, + number=138946292, + ) + resource: str = proto.Field( + proto.STRING, + number=195806222, + ) + test_permissions_request_resource: 'TestPermissionsRequest' = proto.Field( + proto.MESSAGE, + number=439214758, + message='TestPermissionsRequest', + ) + + +class TestIamPermissionsRegionNetworkFirewallPolicyRequest(proto.Message): + r"""A request message for + RegionNetworkFirewallPolicies.TestIamPermissions. See the method + description for details. + + Attributes: + project (str): + Project ID for this request. + region (str): + The name of the region for this request. + resource (str): + Name or id of the resource for this request. + test_permissions_request_resource (google.cloud.compute_v1.types.TestPermissionsRequest): + The body resource for this request + """ + + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + region: str = proto.Field( + proto.STRING, + number=138946292, + ) + resource: str = proto.Field( + proto.STRING, + number=195806222, + ) + test_permissions_request_resource: 'TestPermissionsRequest' = proto.Field( + proto.MESSAGE, + number=439214758, + message='TestPermissionsRequest', + ) + + +class TestIamPermissionsReservationRequest(proto.Message): + r"""A request message for Reservations.TestIamPermissions. See + the method description for details. + + Attributes: + project (str): + Project ID for this request. + resource (str): + Name or id of the resource for this request. + test_permissions_request_resource (google.cloud.compute_v1.types.TestPermissionsRequest): + The body resource for this request + zone (str): + The name of the zone for this request. + """ + + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + resource: str = proto.Field( + proto.STRING, + number=195806222, + ) + test_permissions_request_resource: 'TestPermissionsRequest' = proto.Field( + proto.MESSAGE, + number=439214758, + message='TestPermissionsRequest', + ) + zone: str = proto.Field( + proto.STRING, + number=3744684, + ) + + +class TestIamPermissionsResourcePolicyRequest(proto.Message): + r"""A request message for ResourcePolicies.TestIamPermissions. + See the method description for details. + + Attributes: + project (str): + Project ID for this request. + region (str): + The name of the region for this request. + resource (str): + Name or id of the resource for this request. + test_permissions_request_resource (google.cloud.compute_v1.types.TestPermissionsRequest): + The body resource for this request + """ + + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + region: str = proto.Field( + proto.STRING, + number=138946292, + ) + resource: str = proto.Field( + proto.STRING, + number=195806222, + ) + test_permissions_request_resource: 'TestPermissionsRequest' = proto.Field( + proto.MESSAGE, + number=439214758, + message='TestPermissionsRequest', + ) + + +class TestIamPermissionsServiceAttachmentRequest(proto.Message): + r"""A request message for ServiceAttachments.TestIamPermissions. + See the method description for details. + + Attributes: + project (str): + Project ID for this request. + region (str): + The name of the region for this request. + resource (str): + Name or id of the resource for this request. + test_permissions_request_resource (google.cloud.compute_v1.types.TestPermissionsRequest): + The body resource for this request + """ + + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + region: str = proto.Field( + proto.STRING, + number=138946292, + ) + resource: str = proto.Field( + proto.STRING, + number=195806222, + ) + test_permissions_request_resource: 'TestPermissionsRequest' = proto.Field( + proto.MESSAGE, + number=439214758, + message='TestPermissionsRequest', + ) + + +class TestIamPermissionsSnapshotRequest(proto.Message): + r"""A request message for Snapshots.TestIamPermissions. See the + method description for details. + + Attributes: + project (str): + Project ID for this request. + resource (str): + Name or id of the resource for this request. + test_permissions_request_resource (google.cloud.compute_v1.types.TestPermissionsRequest): + The body resource for this request + """ + + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + resource: str = proto.Field( + proto.STRING, + number=195806222, + ) + test_permissions_request_resource: 'TestPermissionsRequest' = proto.Field( + proto.MESSAGE, + number=439214758, + message='TestPermissionsRequest', + ) + + +class TestIamPermissionsSubnetworkRequest(proto.Message): + r"""A request message for Subnetworks.TestIamPermissions. See the + method description for details. + + Attributes: + project (str): + Project ID for this request. + region (str): + The name of the region for this request. + resource (str): + Name or id of the resource for this request. + test_permissions_request_resource (google.cloud.compute_v1.types.TestPermissionsRequest): + The body resource for this request + """ + + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + region: str = proto.Field( + proto.STRING, + number=138946292, + ) + resource: str = proto.Field( + proto.STRING, + number=195806222, + ) + test_permissions_request_resource: 'TestPermissionsRequest' = proto.Field( + proto.MESSAGE, + number=439214758, + message='TestPermissionsRequest', + ) + + +class TestIamPermissionsVpnGatewayRequest(proto.Message): + r"""A request message for VpnGateways.TestIamPermissions. See the + method description for details. + + Attributes: + project (str): + Project ID for this request. + region (str): + The name of the region for this request. + resource (str): + Name or id of the resource for this request. + test_permissions_request_resource (google.cloud.compute_v1.types.TestPermissionsRequest): + The body resource for this request + """ + + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + region: str = proto.Field( + proto.STRING, + number=138946292, + ) + resource: str = proto.Field( + proto.STRING, + number=195806222, + ) + test_permissions_request_resource: 'TestPermissionsRequest' = proto.Field( + proto.MESSAGE, + number=439214758, + message='TestPermissionsRequest', + ) + + +class TestPermissionsRequest(proto.Message): + r""" + + Attributes: + permissions (MutableSequence[str]): + The set of permissions to check for the 'resource'. + Permissions with wildcards (such as '*' or 'storage.*') are + not allowed. + """ + + permissions: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=59962500, + ) + + +class TestPermissionsResponse(proto.Message): + r""" + + Attributes: + permissions (MutableSequence[str]): + A subset of ``TestPermissionsRequest.permissions`` that the + caller is allowed. + """ + + permissions: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=59962500, + ) + + +class Uint128(proto.Message): + r""" + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + high (int): + + This field is a member of `oneof`_ ``_high``. + low (int): + + This field is a member of `oneof`_ ``_low``. + """ + + high: int = proto.Field( + proto.UINT64, + number=3202466, + optional=True, + ) + low: int = proto.Field( + proto.UINT64, + number=107348, + optional=True, + ) + + +class UpdateAccessConfigInstanceRequest(proto.Message): + r"""A request message for Instances.UpdateAccessConfig. See the + method description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + access_config_resource (google.cloud.compute_v1.types.AccessConfig): + The body resource for this request + instance (str): + The instance name for this request. + network_interface (str): + The name of the network interface where the + access config is attached. + project (str): + Project ID for this request. + request_id (str): + An optional request ID to identify requests. + Specify a unique request ID so that if you must + retry your request, the server will know to + ignore the request if it has already been + completed. For example, consider a situation + where you make an initial request and the + request times out. If you make the request again + with the same request ID, the server can check + if original operation with the same request ID + was received, and if so, will ignore the second + request. This prevents clients from accidentally + creating duplicate commitments. The request ID + must be a valid UUID with the exception that + zero UUID is not supported ( + 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. + zone (str): + The name of the zone for this request. + """ + + access_config_resource: 'AccessConfig' = proto.Field( + proto.MESSAGE, + number=387825552, + message='AccessConfig', + ) + instance: str = proto.Field( + proto.STRING, + number=18257045, + ) + network_interface: str = proto.Field( + proto.STRING, + number=365387880, + ) + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + request_id: str = proto.Field( + proto.STRING, + number=37109963, + optional=True, + ) + zone: str = proto.Field( + proto.STRING, + number=3744684, + ) + + +class UpdateAutoscalerRequest(proto.Message): + r"""A request message for Autoscalers.Update. See the method + description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + autoscaler (str): + Name of the autoscaler to update. + + This field is a member of `oneof`_ ``_autoscaler``. + autoscaler_resource (google.cloud.compute_v1.types.Autoscaler): + The body resource for this request + project (str): + Project ID for this request. + request_id (str): + An optional request ID to identify requests. + Specify a unique request ID so that if you must + retry your request, the server will know to + ignore the request if it has already been + completed. For example, consider a situation + where you make an initial request and the + request times out. If you make the request again + with the same request ID, the server can check + if original operation with the same request ID + was received, and if so, will ignore the second + request. This prevents clients from accidentally + creating duplicate commitments. The request ID + must be a valid UUID with the exception that + zero UUID is not supported ( + 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. + zone (str): + Name of the zone for this request. + """ + + autoscaler: str = proto.Field( + proto.STRING, + number=517258967, + optional=True, + ) + autoscaler_resource: 'Autoscaler' = proto.Field( + proto.MESSAGE, + number=207616118, + message='Autoscaler', + ) + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + request_id: str = proto.Field( + proto.STRING, + number=37109963, + optional=True, + ) + zone: str = proto.Field( + proto.STRING, + number=3744684, + ) + + +class UpdateBackendBucketRequest(proto.Message): + r"""A request message for BackendBuckets.Update. See the method + description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + backend_bucket (str): + Name of the BackendBucket resource to update. + backend_bucket_resource (google.cloud.compute_v1.types.BackendBucket): + The body resource for this request + project (str): + Project ID for this request. + request_id (str): + An optional request ID to identify requests. + Specify a unique request ID so that if you must + retry your request, the server will know to + ignore the request if it has already been + completed. For example, consider a situation + where you make an initial request and the + request times out. If you make the request again + with the same request ID, the server can check + if original operation with the same request ID + was received, and if so, will ignore the second + request. This prevents clients from accidentally + creating duplicate commitments. The request ID + must be a valid UUID with the exception that + zero UUID is not supported ( + 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. + """ + + backend_bucket: str = proto.Field( + proto.STRING, + number=91714037, + ) + backend_bucket_resource: 'BackendBucket' = proto.Field( + proto.MESSAGE, + number=380757784, + message='BackendBucket', + ) + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + request_id: str = proto.Field( + proto.STRING, + number=37109963, + optional=True, + ) + + +class UpdateBackendServiceRequest(proto.Message): + r"""A request message for BackendServices.Update. See the method + description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + backend_service (str): + Name of the BackendService resource to + update. + backend_service_resource (google.cloud.compute_v1.types.BackendService): + The body resource for this request + project (str): + Project ID for this request. + request_id (str): + An optional request ID to identify requests. + Specify a unique request ID so that if you must + retry your request, the server will know to + ignore the request if it has already been + completed. For example, consider a situation + where you make an initial request and the + request times out. If you make the request again + with the same request ID, the server can check + if original operation with the same request ID + was received, and if so, will ignore the second + request. This prevents clients from accidentally + creating duplicate commitments. The request ID + must be a valid UUID with the exception that + zero UUID is not supported ( + 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. + """ + + backend_service: str = proto.Field( + proto.STRING, + number=306946058, + ) + backend_service_resource: 'BackendService' = proto.Field( + proto.MESSAGE, + number=347586723, + message='BackendService', + ) + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + request_id: str = proto.Field( + proto.STRING, + number=37109963, + optional=True, + ) + + +class UpdateDiskRequest(proto.Message): + r"""A request message for Disks.Update. See the method + description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + disk (str): + The disk name for this request. + disk_resource (google.cloud.compute_v1.types.Disk): + The body resource for this request + paths (str): + + This field is a member of `oneof`_ ``_paths``. + project (str): + Project ID for this request. + request_id (str): + An optional request ID to identify requests. + Specify a unique request ID so that if you must + retry your request, the server will know to + ignore the request if it has already been + completed. For example, consider a situation + where you make an initial request and the + request times out. If you make the request again + with the same request ID, the server can check + if original operation with the same request ID + was received, and if so, will ignore the second + request. This prevents clients from accidentally + creating duplicate commitments. The request ID + must be a valid UUID with the exception that + zero UUID is not supported ( + 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. + update_mask (str): + update_mask indicates fields to be updated as part of this + request. + + This field is a member of `oneof`_ ``_update_mask``. + zone (str): + The name of the zone for this request. + """ + + disk: str = proto.Field( + proto.STRING, + number=3083677, + ) + disk_resource: 'Disk' = proto.Field( + proto.MESSAGE, + number=25880688, + message='Disk', + ) + paths: str = proto.Field( + proto.STRING, + number=106438894, + optional=True, + ) + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + request_id: str = proto.Field( + proto.STRING, + number=37109963, + optional=True, + ) + update_mask: str = proto.Field( + proto.STRING, + number=500079778, + optional=True, + ) + zone: str = proto.Field( + proto.STRING, + number=3744684, + ) + + +class UpdateDisplayDeviceInstanceRequest(proto.Message): + r"""A request message for Instances.UpdateDisplayDevice. See the + method description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + display_device_resource (google.cloud.compute_v1.types.DisplayDevice): + The body resource for this request + instance (str): + Name of the instance scoping this request. + project (str): + Project ID for this request. + request_id (str): + An optional request ID to identify requests. + Specify a unique request ID so that if you must + retry your request, the server will know to + ignore the request if it has already been + completed. For example, consider a situation + where you make an initial request and the + request times out. If you make the request again + with the same request ID, the server can check + if original operation with the same request ID + was received, and if so, will ignore the second + request. This prevents clients from accidentally + creating duplicate commitments. The request ID + must be a valid UUID with the exception that + zero UUID is not supported ( + 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. + zone (str): + The name of the zone for this request. + """ + + display_device_resource: 'DisplayDevice' = proto.Field( + proto.MESSAGE, + number=289686106, + message='DisplayDevice', + ) + instance: str = proto.Field( + proto.STRING, + number=18257045, + ) + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + request_id: str = proto.Field( + proto.STRING, + number=37109963, + optional=True, + ) + zone: str = proto.Field( + proto.STRING, + number=3744684, + ) + + +class UpdateFirewallRequest(proto.Message): + r"""A request message for Firewalls.Update. See the method + description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + firewall (str): + Name of the firewall rule to update. + firewall_resource (google.cloud.compute_v1.types.Firewall): + The body resource for this request + project (str): + Project ID for this request. + request_id (str): + An optional request ID to identify requests. + Specify a unique request ID so that if you must + retry your request, the server will know to + ignore the request if it has already been + completed. For example, consider a situation + where you make an initial request and the + request times out. If you make the request again + with the same request ID, the server can check + if original operation with the same request ID + was received, and if so, will ignore the second + request. This prevents clients from accidentally + creating duplicate commitments. The request ID + must be a valid UUID with the exception that + zero UUID is not supported ( + 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. + """ + + firewall: str = proto.Field( + proto.STRING, + number=511016192, + ) + firewall_resource: 'Firewall' = proto.Field( + proto.MESSAGE, + number=41425005, + message='Firewall', + ) + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + request_id: str = proto.Field( + proto.STRING, + number=37109963, + optional=True, + ) + + +class UpdateHealthCheckRequest(proto.Message): + r"""A request message for HealthChecks.Update. See the method + description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + health_check (str): + Name of the HealthCheck resource to update. + health_check_resource (google.cloud.compute_v1.types.HealthCheck): + The body resource for this request + project (str): + Project ID for this request. + request_id (str): + An optional request ID to identify requests. + Specify a unique request ID so that if you must + retry your request, the server will know to + ignore the request if it has already been + completed. For example, consider a situation + where you make an initial request and the + request times out. If you make the request again + with the same request ID, the server can check + if original operation with the same request ID + was received, and if so, will ignore the second + request. This prevents clients from accidentally + creating duplicate commitments. The request ID + must be a valid UUID with the exception that + zero UUID is not supported ( + 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. + """ + + health_check: str = proto.Field( + proto.STRING, + number=308876645, + ) + health_check_resource: 'HealthCheck' = proto.Field( + proto.MESSAGE, + number=201925032, + message='HealthCheck', + ) + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + request_id: str = proto.Field( + proto.STRING, + number=37109963, + optional=True, + ) + + +class UpdateInstanceRequest(proto.Message): + r"""A request message for Instances.Update. See the method + description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + instance (str): + Name of the instance resource to update. + instance_resource (google.cloud.compute_v1.types.Instance): + The body resource for this request + minimal_action (str): + Specifies the action to take when updating an + instance even if the updated properties do not + require it. If not specified, then Compute + Engine acts based on the minimum action that the + updated properties require. Check the + MinimalAction enum for the list of possible + values. + + This field is a member of `oneof`_ ``_minimal_action``. + most_disruptive_allowed_action (str): + Specifies the most disruptive action that can be taken on + the instance as part of the update. Compute Engine returns + an error if the instance properties require a more + disruptive action as part of the instance update. Valid + options from lowest to highest are NO_EFFECT, REFRESH, and + RESTART. Check the MostDisruptiveAllowedAction enum for the + list of possible values. + + This field is a member of `oneof`_ ``_most_disruptive_allowed_action``. + project (str): + Project ID for this request. + request_id (str): + An optional request ID to identify requests. + Specify a unique request ID so that if you must + retry your request, the server will know to + ignore the request if it has already been + completed. For example, consider a situation + where you make an initial request and the + request times out. If you make the request again + with the same request ID, the server can check + if original operation with the same request ID + was received, and if so, will ignore the second + request. This prevents clients from accidentally + creating duplicate commitments. The request ID + must be a valid UUID with the exception that + zero UUID is not supported ( + 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. + zone (str): + The name of the zone for this request. + """ + class MinimalAction(proto.Enum): + r"""Specifies the action to take when updating an instance even if the + updated properties do not require it. If not specified, then Compute + Engine acts based on the minimum action that the updated properties + require. Additional supported values which may be not listed in the + enum directly due to technical reasons: INVALID NO_EFFECT REFRESH + RESTART + + Values: + UNDEFINED_MINIMAL_ACTION (0): + A value indicating that the enum field is not + set. + """ + UNDEFINED_MINIMAL_ACTION = 0 + + class MostDisruptiveAllowedAction(proto.Enum): + r"""Specifies the most disruptive action that can be taken on the + instance as part of the update. Compute Engine returns an error if + the instance properties require a more disruptive action as part of + the instance update. Valid options from lowest to highest are + NO_EFFECT, REFRESH, and RESTART. Additional supported values which + may be not listed in the enum directly due to technical reasons: + INVALID NO_EFFECT REFRESH RESTART + + Values: + UNDEFINED_MOST_DISRUPTIVE_ALLOWED_ACTION (0): + A value indicating that the enum field is not + set. + """ + UNDEFINED_MOST_DISRUPTIVE_ALLOWED_ACTION = 0 + + instance: str = proto.Field( + proto.STRING, + number=18257045, + ) + instance_resource: 'Instance' = proto.Field( + proto.MESSAGE, + number=215988344, + message='Instance', + ) + minimal_action: str = proto.Field( + proto.STRING, + number=270567060, + optional=True, + ) + most_disruptive_allowed_action: str = proto.Field( + proto.STRING, + number=66103053, + optional=True, + ) + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + request_id: str = proto.Field( + proto.STRING, + number=37109963, + optional=True, + ) + zone: str = proto.Field( + proto.STRING, + number=3744684, + ) + + +class UpdateNetworkInterfaceInstanceRequest(proto.Message): + r"""A request message for Instances.UpdateNetworkInterface. See + the method description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + instance (str): + The instance name for this request. + network_interface (str): + The name of the network interface to update. + network_interface_resource (google.cloud.compute_v1.types.NetworkInterface): + The body resource for this request + project (str): + Project ID for this request. + request_id (str): + An optional request ID to identify requests. + Specify a unique request ID so that if you must + retry your request, the server will know to + ignore the request if it has already been + completed. For example, consider a situation + where you make an initial request and the + request times out. If you make the request again + with the same request ID, the server can check + if original operation with the same request ID + was received, and if so, will ignore the second + request. This prevents clients from accidentally + creating duplicate commitments. The request ID + must be a valid UUID with the exception that + zero UUID is not supported ( + 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. + zone (str): + The name of the zone for this request. + """ + + instance: str = proto.Field( + proto.STRING, + number=18257045, + ) + network_interface: str = proto.Field( + proto.STRING, + number=365387880, + ) + network_interface_resource: 'NetworkInterface' = proto.Field( + proto.MESSAGE, + number=325814789, + message='NetworkInterface', + ) + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + request_id: str = proto.Field( + proto.STRING, + number=37109963, + optional=True, + ) + zone: str = proto.Field( + proto.STRING, + number=3744684, + ) + + +class UpdatePeeringNetworkRequest(proto.Message): + r"""A request message for Networks.UpdatePeering. See the method + description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + network (str): + Name of the network resource which the + updated peering is belonging to. + networks_update_peering_request_resource (google.cloud.compute_v1.types.NetworksUpdatePeeringRequest): + The body resource for this request + project (str): + Project ID for this request. + request_id (str): + An optional request ID to identify requests. + Specify a unique request ID so that if you must + retry your request, the server will know to + ignore the request if it has already been + completed. For example, consider a situation + where you make an initial request and the + request times out. If you make the request again + with the same request ID, the server can check + if original operation with the same request ID + was received, and if so, will ignore the second + request. This prevents clients from accidentally + creating duplicate commitments. The request ID + must be a valid UUID with the exception that + zero UUID is not supported ( + 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. + """ + + network: str = proto.Field( + proto.STRING, + number=232872494, + ) + networks_update_peering_request_resource: 'NetworksUpdatePeeringRequest' = proto.Field( + proto.MESSAGE, + number=224433497, + message='NetworksUpdatePeeringRequest', + ) + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + request_id: str = proto.Field( + proto.STRING, + number=37109963, + optional=True, + ) + + +class UpdatePerInstanceConfigsInstanceGroupManagerRequest(proto.Message): + r"""A request message for + InstanceGroupManagers.UpdatePerInstanceConfigs. See the method + description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + instance_group_manager (str): + The name of the managed instance group. It + should conform to RFC1035. + instance_group_managers_update_per_instance_configs_req_resource (google.cloud.compute_v1.types.InstanceGroupManagersUpdatePerInstanceConfigsReq): + The body resource for this request + project (str): + Project ID for this request. + request_id (str): + An optional request ID to identify requests. + Specify a unique request ID so that if you must + retry your request, the server will know to + ignore the request if it has already been + completed. For example, consider a situation + where you make an initial request and the + request times out. If you make the request again + with the same request ID, the server can check + if original operation with the same request ID + was received, and if so, will ignore the second + request. This prevents clients from accidentally + creating duplicate commitments. The request ID + must be a valid UUID with the exception that + zero UUID is not supported ( + 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. + zone (str): + The name of the zone where the managed + instance group is located. It should conform to + RFC1035. + """ + + instance_group_manager: str = proto.Field( + proto.STRING, + number=249363395, + ) + instance_group_managers_update_per_instance_configs_req_resource: 'InstanceGroupManagersUpdatePerInstanceConfigsReq' = proto.Field( + proto.MESSAGE, + number=141402302, + message='InstanceGroupManagersUpdatePerInstanceConfigsReq', + ) + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + request_id: str = proto.Field( + proto.STRING, + number=37109963, + optional=True, + ) + zone: str = proto.Field( + proto.STRING, + number=3744684, + ) + + +class UpdatePerInstanceConfigsRegionInstanceGroupManagerRequest(proto.Message): + r"""A request message for + RegionInstanceGroupManagers.UpdatePerInstanceConfigs. See the + method description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + instance_group_manager (str): + The name of the managed instance group. It + should conform to RFC1035. + project (str): + Project ID for this request. + region (str): + Name of the region scoping this request, + should conform to RFC1035. + region_instance_group_manager_update_instance_config_req_resource (google.cloud.compute_v1.types.RegionInstanceGroupManagerUpdateInstanceConfigReq): + The body resource for this request + request_id (str): + An optional request ID to identify requests. + Specify a unique request ID so that if you must + retry your request, the server will know to + ignore the request if it has already been + completed. For example, consider a situation + where you make an initial request and the + request times out. If you make the request again + with the same request ID, the server can check + if original operation with the same request ID + was received, and if so, will ignore the second + request. This prevents clients from accidentally + creating duplicate commitments. The request ID + must be a valid UUID with the exception that + zero UUID is not supported ( + 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. + """ + + instance_group_manager: str = proto.Field( + proto.STRING, + number=249363395, + ) + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + region: str = proto.Field( + proto.STRING, + number=138946292, + ) + region_instance_group_manager_update_instance_config_req_resource: 'RegionInstanceGroupManagerUpdateInstanceConfigReq' = proto.Field( + proto.MESSAGE, + number=89036583, + message='RegionInstanceGroupManagerUpdateInstanceConfigReq', + ) + request_id: str = proto.Field( + proto.STRING, + number=37109963, + optional=True, + ) + + +class UpdateRegionAutoscalerRequest(proto.Message): + r"""A request message for RegionAutoscalers.Update. See the + method description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + autoscaler (str): + Name of the autoscaler to update. + + This field is a member of `oneof`_ ``_autoscaler``. + autoscaler_resource (google.cloud.compute_v1.types.Autoscaler): + The body resource for this request + project (str): + Project ID for this request. + region (str): + Name of the region scoping this request. + request_id (str): + An optional request ID to identify requests. + Specify a unique request ID so that if you must + retry your request, the server will know to + ignore the request if it has already been + completed. For example, consider a situation + where you make an initial request and the + request times out. If you make the request again + with the same request ID, the server can check + if original operation with the same request ID + was received, and if so, will ignore the second + request. This prevents clients from accidentally + creating duplicate commitments. The request ID + must be a valid UUID with the exception that + zero UUID is not supported ( + 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. + """ + + autoscaler: str = proto.Field( + proto.STRING, + number=517258967, + optional=True, + ) + autoscaler_resource: 'Autoscaler' = proto.Field( + proto.MESSAGE, + number=207616118, + message='Autoscaler', + ) + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + region: str = proto.Field( + proto.STRING, + number=138946292, + ) + request_id: str = proto.Field( + proto.STRING, + number=37109963, + optional=True, + ) + + +class UpdateRegionBackendServiceRequest(proto.Message): + r"""A request message for RegionBackendServices.Update. See the + method description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + backend_service (str): + Name of the BackendService resource to + update. + backend_service_resource (google.cloud.compute_v1.types.BackendService): + The body resource for this request + project (str): + Project ID for this request. + region (str): + Name of the region scoping this request. + request_id (str): + An optional request ID to identify requests. + Specify a unique request ID so that if you must + retry your request, the server will know to + ignore the request if it has already been + completed. For example, consider a situation + where you make an initial request and the + request times out. If you make the request again + with the same request ID, the server can check + if original operation with the same request ID + was received, and if so, will ignore the second + request. This prevents clients from accidentally + creating duplicate commitments. The request ID + must be a valid UUID with the exception that + zero UUID is not supported ( + 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. + """ + + backend_service: str = proto.Field( + proto.STRING, + number=306946058, + ) + backend_service_resource: 'BackendService' = proto.Field( + proto.MESSAGE, + number=347586723, + message='BackendService', + ) + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + region: str = proto.Field( + proto.STRING, + number=138946292, + ) + request_id: str = proto.Field( + proto.STRING, + number=37109963, + optional=True, + ) + + +class UpdateRegionCommitmentRequest(proto.Message): + r"""A request message for RegionCommitments.Update. See the + method description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + commitment (str): + Name of the commitment for which auto renew + is being updated. + commitment_resource (google.cloud.compute_v1.types.Commitment): + The body resource for this request + paths (str): + + This field is a member of `oneof`_ ``_paths``. + project (str): + Project ID for this request. + region (str): + Name of the region for this request. + request_id (str): + An optional request ID to identify requests. + Specify a unique request ID so that if you must + retry your request, the server will know to + ignore the request if it has already been + completed. For example, consider a situation + where you make an initial request and the + request times out. If you make the request again + with the same request ID, the server can check + if original operation with the same request ID + was received, and if so, will ignore the second + request. This prevents clients from accidentally + creating duplicate commitments. The request ID + must be a valid UUID with the exception that + zero UUID is not supported ( + 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. + update_mask (str): + update_mask indicates fields to be updated as part of this + request. + + This field is a member of `oneof`_ ``_update_mask``. + """ + + commitment: str = proto.Field( + proto.STRING, + number=482134805, + ) + commitment_resource: 'Commitment' = proto.Field( + proto.MESSAGE, + number=244240888, + message='Commitment', + ) + paths: str = proto.Field( + proto.STRING, + number=106438894, + optional=True, + ) + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + region: str = proto.Field( + proto.STRING, + number=138946292, + ) + request_id: str = proto.Field( + proto.STRING, + number=37109963, + optional=True, + ) + update_mask: str = proto.Field( + proto.STRING, + number=500079778, + optional=True, + ) + + +class UpdateRegionDiskRequest(proto.Message): + r"""A request message for RegionDisks.Update. See the method + description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + disk (str): + The disk name for this request. + disk_resource (google.cloud.compute_v1.types.Disk): + The body resource for this request + paths (str): + + This field is a member of `oneof`_ ``_paths``. + project (str): + Project ID for this request. + region (str): + The name of the region for this request. + request_id (str): + An optional request ID to identify requests. + Specify a unique request ID so that if you must + retry your request, the server will know to + ignore the request if it has already been + completed. For example, consider a situation + where you make an initial request and the + request times out. If you make the request again + with the same request ID, the server can check + if original operation with the same request ID + was received, and if so, will ignore the second + request. This prevents clients from accidentally + creating duplicate commitments. The request ID + must be a valid UUID with the exception that + zero UUID is not supported ( + 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. + update_mask (str): + update_mask indicates fields to be updated as part of this + request. + + This field is a member of `oneof`_ ``_update_mask``. + """ + + disk: str = proto.Field( + proto.STRING, + number=3083677, + ) + disk_resource: 'Disk' = proto.Field( + proto.MESSAGE, + number=25880688, + message='Disk', + ) + paths: str = proto.Field( + proto.STRING, + number=106438894, + optional=True, + ) + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + region: str = proto.Field( + proto.STRING, + number=138946292, + ) + request_id: str = proto.Field( + proto.STRING, + number=37109963, + optional=True, + ) + update_mask: str = proto.Field( + proto.STRING, + number=500079778, + optional=True, + ) + + +class UpdateRegionHealthCheckRequest(proto.Message): + r"""A request message for RegionHealthChecks.Update. See the + method description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + health_check (str): + Name of the HealthCheck resource to update. + health_check_resource (google.cloud.compute_v1.types.HealthCheck): + The body resource for this request + project (str): + Project ID for this request. + region (str): + Name of the region scoping this request. + request_id (str): + An optional request ID to identify requests. + Specify a unique request ID so that if you must + retry your request, the server will know to + ignore the request if it has already been + completed. For example, consider a situation + where you make an initial request and the + request times out. If you make the request again + with the same request ID, the server can check + if original operation with the same request ID + was received, and if so, will ignore the second + request. This prevents clients from accidentally + creating duplicate commitments. The request ID + must be a valid UUID with the exception that + zero UUID is not supported ( + 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. + """ + + health_check: str = proto.Field( + proto.STRING, + number=308876645, + ) + health_check_resource: 'HealthCheck' = proto.Field( + proto.MESSAGE, + number=201925032, + message='HealthCheck', + ) + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + region: str = proto.Field( + proto.STRING, + number=138946292, + ) + request_id: str = proto.Field( + proto.STRING, + number=37109963, + optional=True, + ) + + +class UpdateRegionUrlMapRequest(proto.Message): + r"""A request message for RegionUrlMaps.Update. See the method + description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + project (str): + Project ID for this request. + region (str): + Name of the region scoping this request. + request_id (str): + begin_interface: MixerMutationRequestBuilder Request ID to + support idempotency. + + This field is a member of `oneof`_ ``_request_id``. + url_map (str): + Name of the UrlMap resource to update. + url_map_resource (google.cloud.compute_v1.types.UrlMap): + The body resource for this request + """ + + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + region: str = proto.Field( + proto.STRING, + number=138946292, + ) + request_id: str = proto.Field( + proto.STRING, + number=37109963, + optional=True, + ) + url_map: str = proto.Field( + proto.STRING, + number=367020684, + ) + url_map_resource: 'UrlMap' = proto.Field( + proto.MESSAGE, + number=168675425, + message='UrlMap', + ) + + +class UpdateReservationRequest(proto.Message): + r"""A request message for Reservations.Update. See the method + description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + paths (str): + + This field is a member of `oneof`_ ``_paths``. + project (str): + Project ID for this request. + request_id (str): + An optional request ID to identify requests. + Specify a unique request ID so that if you must + retry your request, the server will know to + ignore the request if it has already been + completed. For example, consider a situation + where you make an initial request and the + request times out. If you make the request again + with the same request ID, the server can check + if original operation with the same request ID + was received, and if so, will ignore the second + request. This prevents clients from accidentally + creating duplicate commitments. The request ID + must be a valid UUID with the exception that + zero UUID is not supported ( + 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. + reservation (str): + Name of the reservation to update. + reservation_resource (google.cloud.compute_v1.types.Reservation): + The body resource for this request + update_mask (str): + Update_mask indicates fields to be updated as part of this + request. + + This field is a member of `oneof`_ ``_update_mask``. + zone (str): + Name of the zone for this request. + """ + + paths: str = proto.Field( + proto.STRING, + number=106438894, + optional=True, + ) + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + request_id: str = proto.Field( + proto.STRING, + number=37109963, + optional=True, + ) + reservation: str = proto.Field( + proto.STRING, + number=47530956, + ) + reservation_resource: 'Reservation' = proto.Field( + proto.MESSAGE, + number=285030177, + message='Reservation', + ) + update_mask: str = proto.Field( + proto.STRING, + number=500079778, + optional=True, + ) + zone: str = proto.Field( + proto.STRING, + number=3744684, + ) + + +class UpdateRouterRequest(proto.Message): + r"""A request message for Routers.Update. See the method + description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + project (str): + Project ID for this request. + region (str): + Name of the region for this request. + request_id (str): + An optional request ID to identify requests. + Specify a unique request ID so that if you must + retry your request, the server will know to + ignore the request if it has already been + completed. For example, consider a situation + where you make an initial request and the + request times out. If you make the request again + with the same request ID, the server can check + if original operation with the same request ID + was received, and if so, will ignore the second + request. This prevents clients from accidentally + creating duplicate commitments. The request ID + must be a valid UUID with the exception that + zero UUID is not supported ( + 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. + router (str): + Name of the Router resource to update. + router_resource (google.cloud.compute_v1.types.Router): + The body resource for this request + """ + + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + region: str = proto.Field( + proto.STRING, + number=138946292, + ) + request_id: str = proto.Field( + proto.STRING, + number=37109963, + optional=True, + ) + router: str = proto.Field( + proto.STRING, + number=148608841, + ) + router_resource: 'Router' = proto.Field( + proto.MESSAGE, + number=155222084, + message='Router', + ) + + +class UpdateShieldedInstanceConfigInstanceRequest(proto.Message): + r"""A request message for Instances.UpdateShieldedInstanceConfig. + See the method description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + instance (str): + Name or id of the instance scoping this + request. + project (str): + Project ID for this request. + request_id (str): + An optional request ID to identify requests. + Specify a unique request ID so that if you must + retry your request, the server will know to + ignore the request if it has already been + completed. For example, consider a situation + where you make an initial request and the + request times out. If you make the request again + with the same request ID, the server can check + if original operation with the same request ID + was received, and if so, will ignore the second + request. This prevents clients from accidentally + creating duplicate commitments. The request ID + must be a valid UUID with the exception that + zero UUID is not supported ( + 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. + shielded_instance_config_resource (google.cloud.compute_v1.types.ShieldedInstanceConfig): + The body resource for this request + zone (str): + The name of the zone for this request. + """ + + instance: str = proto.Field( + proto.STRING, + number=18257045, + ) + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + request_id: str = proto.Field( + proto.STRING, + number=37109963, + optional=True, + ) + shielded_instance_config_resource: 'ShieldedInstanceConfig' = proto.Field( + proto.MESSAGE, + number=272059224, + message='ShieldedInstanceConfig', + ) + zone: str = proto.Field( + proto.STRING, + number=3744684, + ) + + +class UpdateUrlMapRequest(proto.Message): + r"""A request message for UrlMaps.Update. See the method + description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + project (str): + Project ID for this request. + request_id (str): + An optional request ID to identify requests. + Specify a unique request ID so that if you must + retry your request, the server will know to + ignore the request if it has already been + completed. For example, consider a situation + where you make an initial request and the + request times out. If you make the request again + with the same request ID, the server can check + if original operation with the same request ID + was received, and if so, will ignore the second + request. This prevents clients from accidentally + creating duplicate commitments. The request ID + must be a valid UUID with the exception that + zero UUID is not supported ( + 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. + url_map (str): + Name of the UrlMap resource to update. + url_map_resource (google.cloud.compute_v1.types.UrlMap): + The body resource for this request + """ + + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + request_id: str = proto.Field( + proto.STRING, + number=37109963, + optional=True, + ) + url_map: str = proto.Field( + proto.STRING, + number=367020684, + ) + url_map_resource: 'UrlMap' = proto.Field( + proto.MESSAGE, + number=168675425, + message='UrlMap', + ) + + +class UrlMap(proto.Message): + r"""Represents a URL Map resource. Compute Engine has two URL Map + resources: \* `Global `__ + \* `Regional `__ A + URL map resource is a component of certain types of cloud load + balancers and Traffic Director: \* urlMaps are used by external + HTTP(S) load balancers and Traffic Director. \* regionUrlMaps are + used by internal HTTP(S) load balancers. For a list of supported URL + map features by the load balancer type, see the Load balancing + features: Routing and traffic management table. For a list of + supported URL map features for Traffic Director, see the Traffic + Director features: Routing and traffic management table. This + resource defines mappings from hostnames and URL paths to either a + backend service or a backend bucket. To use the global urlMaps + resource, the backend service must have a loadBalancingScheme of + either EXTERNAL or INTERNAL_SELF_MANAGED. To use the regionUrlMaps + resource, the backend service must have a loadBalancingScheme of + INTERNAL_MANAGED. For more information, read URL Map Concepts. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + creation_timestamp (str): + [Output Only] Creation timestamp in RFC3339 text format. + + This field is a member of `oneof`_ ``_creation_timestamp``. + default_route_action (google.cloud.compute_v1.types.HttpRouteAction): + defaultRouteAction takes effect when none of + the hostRules match. The load balancer performs + advanced routing actions, such as URL rewrites + and header transformations, before forwarding + the request to the selected backend. If + defaultRouteAction specifies any + weightedBackendServices, defaultService must not + be set. Conversely if defaultService is set, + defaultRouteAction cannot contain any + weightedBackendServices. Only one of + defaultRouteAction or defaultUrlRedirect must be + set. URL maps for Classic external HTTP(S) load + balancers only support the urlRewrite action + within defaultRouteAction. defaultRouteAction + has no effect when the URL map is bound to a + target gRPC proxy that has the + validateForProxyless field set to true. + + This field is a member of `oneof`_ ``_default_route_action``. + default_service (str): + The full or partial URL of the defaultService + resource to which traffic is directed if none of + the hostRules match. If defaultRouteAction is + also specified, advanced routing actions, such + as URL rewrites, take effect before sending the + request to the backend. However, if + defaultService is specified, defaultRouteAction + cannot contain any weightedBackendServices. + Conversely, if routeAction specifies any + weightedBackendServices, service must not be + specified. Only one of defaultService, + defaultUrlRedirect , or + defaultRouteAction.weightedBackendService must + be set. defaultService has no effect when the + URL map is bound to a target gRPC proxy that has + the validateForProxyless field set to true. + + This field is a member of `oneof`_ ``_default_service``. + default_url_redirect (google.cloud.compute_v1.types.HttpRedirectAction): + When none of the specified hostRules match, + the request is redirected to a URL specified by + defaultUrlRedirect. If defaultUrlRedirect is + specified, defaultService or defaultRouteAction + must not be set. Not supported when the URL map + is bound to a target gRPC proxy. + + This field is a member of `oneof`_ ``_default_url_redirect``. + description (str): + An optional description of this resource. + Provide this property when you create the + resource. + + This field is a member of `oneof`_ ``_description``. + fingerprint (str): + Fingerprint of this resource. A hash of the + contents stored in this object. This field is + used in optimistic locking. This field is + ignored when inserting a UrlMap. An up-to-date + fingerprint must be provided in order to update + the UrlMap, otherwise the request will fail with + error 412 conditionNotMet. To see the latest + fingerprint, make a get() request to retrieve a + UrlMap. + + This field is a member of `oneof`_ ``_fingerprint``. + header_action (google.cloud.compute_v1.types.HttpHeaderAction): + Specifies changes to request and response + headers that need to take effect for the + selected backendService. The headerAction + specified here take effect after headerAction + specified under pathMatcher. headerAction is not + supported for load balancers that have their + loadBalancingScheme set to EXTERNAL. Not + supported when the URL map is bound to a target + gRPC proxy that has validateForProxyless field + set to true. + + This field is a member of `oneof`_ ``_header_action``. + host_rules (MutableSequence[google.cloud.compute_v1.types.HostRule]): + The list of host rules to use against the + URL. + id (int): + [Output Only] The unique identifier for the resource. This + identifier is defined by the server. + + This field is a member of `oneof`_ ``_id``. + kind (str): + [Output Only] Type of the resource. Always compute#urlMaps + for url maps. + + This field is a member of `oneof`_ ``_kind``. + name (str): + Name of the resource. Provided by the client when the + resource is created. The name must be 1-63 characters long, + and comply with RFC1035. Specifically, the name must be 1-63 + characters long and match the regular expression + ``[a-z]([-a-z0-9]*[a-z0-9])?`` which means the first + character must be a lowercase letter, and all following + characters must be a dash, lowercase letter, or digit, + except the last character, which cannot be a dash. + + This field is a member of `oneof`_ ``_name``. + path_matchers (MutableSequence[google.cloud.compute_v1.types.PathMatcher]): + The list of named PathMatchers to use against + the URL. + region (str): + [Output Only] URL of the region where the regional URL map + resides. This field is not applicable to global URL maps. + You must specify this field as part of the HTTP request URL. + It is not settable as a field in the request body. + + This field is a member of `oneof`_ ``_region``. + self_link (str): + [Output Only] Server-defined URL for the resource. + + This field is a member of `oneof`_ ``_self_link``. + tests (MutableSequence[google.cloud.compute_v1.types.UrlMapTest]): + The list of expected URL mapping tests. + Request to update the UrlMap succeeds only if + all test cases pass. You can specify a maximum + of 100 tests per UrlMap. Not supported when the + URL map is bound to a target gRPC proxy that has + validateForProxyless field set to true. + """ + + creation_timestamp: str = proto.Field( + proto.STRING, + number=30525366, + optional=True, + ) + default_route_action: 'HttpRouteAction' = proto.Field( + proto.MESSAGE, + number=378919466, + optional=True, + message='HttpRouteAction', + ) + default_service: str = proto.Field( + proto.STRING, + number=370242231, + optional=True, + ) + default_url_redirect: 'HttpRedirectAction' = proto.Field( + proto.MESSAGE, + number=359503338, + optional=True, + message='HttpRedirectAction', + ) + description: str = proto.Field( + proto.STRING, + number=422937596, + optional=True, + ) + fingerprint: str = proto.Field( + proto.STRING, + number=234678500, + optional=True, + ) + header_action: 'HttpHeaderAction' = proto.Field( + proto.MESSAGE, + number=328077352, + optional=True, + message='HttpHeaderAction', + ) + host_rules: MutableSequence['HostRule'] = proto.RepeatedField( + proto.MESSAGE, + number=311804832, + message='HostRule', + ) + id: int = proto.Field( + proto.UINT64, + number=3355, + optional=True, + ) + kind: str = proto.Field( + proto.STRING, + number=3292052, + optional=True, + ) + name: str = proto.Field( + proto.STRING, + number=3373707, + optional=True, + ) + path_matchers: MutableSequence['PathMatcher'] = proto.RepeatedField( + proto.MESSAGE, + number=271664219, + message='PathMatcher', + ) + region: str = proto.Field( + proto.STRING, + number=138946292, + optional=True, + ) + self_link: str = proto.Field( + proto.STRING, + number=456214797, + optional=True, + ) + tests: MutableSequence['UrlMapTest'] = proto.RepeatedField( + proto.MESSAGE, + number=110251553, + message='UrlMapTest', + ) + + +class UrlMapList(proto.Message): + r"""Contains a list of UrlMap resources. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + id (str): + [Output Only] Unique identifier for the resource; defined by + the server. + + This field is a member of `oneof`_ ``_id``. + items (MutableSequence[google.cloud.compute_v1.types.UrlMap]): + A list of UrlMap resources. + kind (str): + Type of resource. + + This field is a member of `oneof`_ ``_kind``. + next_page_token (str): + [Output Only] This token allows you to get the next page of + results for list requests. If the number of results is + larger than maxResults, use the nextPageToken as a value for + the query parameter pageToken in the next list request. + Subsequent list requests will have their own nextPageToken + to continue paging through the results. + + This field is a member of `oneof`_ ``_next_page_token``. + self_link (str): + [Output Only] Server-defined URL for this resource. + + This field is a member of `oneof`_ ``_self_link``. + warning (google.cloud.compute_v1.types.Warning): + [Output Only] Informational warning message. + + This field is a member of `oneof`_ ``_warning``. + """ + + @property + def raw_page(self): + return self + + id: str = proto.Field( + proto.STRING, + number=3355, + optional=True, + ) + items: MutableSequence['UrlMap'] = proto.RepeatedField( + proto.MESSAGE, + number=100526016, + message='UrlMap', + ) + kind: str = proto.Field( + proto.STRING, + number=3292052, + optional=True, + ) + next_page_token: str = proto.Field( + proto.STRING, + number=79797525, + optional=True, + ) + self_link: str = proto.Field( + proto.STRING, + number=456214797, + optional=True, + ) + warning: 'Warning' = proto.Field( + proto.MESSAGE, + number=50704284, + optional=True, + message='Warning', + ) + + +class UrlMapReference(proto.Message): + r""" + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + url_map (str): + + This field is a member of `oneof`_ ``_url_map``. + """ + + url_map: str = proto.Field( + proto.STRING, + number=367020684, + optional=True, + ) + + +class UrlMapTest(proto.Message): + r"""Message for the expected URL mappings. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + description (str): + Description of this test case. + + This field is a member of `oneof`_ ``_description``. + expected_output_url (str): + The expected output URL evaluated by the load balancer + containing the scheme, host, path and query parameters. For + rules that forward requests to backends, the test passes + only when expectedOutputUrl matches the request forwarded by + the load balancer to backends. For rules with urlRewrite, + the test verifies that the forwarded request matches + hostRewrite and pathPrefixRewrite in the urlRewrite action. + When service is specified, expectedOutputUrl`s scheme is + ignored. For rules with urlRedirect, the test passes only if + expectedOutputUrl matches the URL in the load balancer's + redirect response. If urlRedirect specifies https_redirect, + the test passes only if the scheme in expectedOutputUrl is + also set to HTTPS. If urlRedirect specifies strip_query, the + test passes only if expectedOutputUrl does not contain any + query parameters. expectedOutputUrl is optional when service + is specified. + + This field is a member of `oneof`_ ``_expected_output_url``. + expected_redirect_response_code (int): + For rules with urlRedirect, the test passes + only if expectedRedirectResponseCode matches the + HTTP status code in load balancer's redirect + response. expectedRedirectResponseCode cannot be + set when service is set. + + This field is a member of `oneof`_ ``_expected_redirect_response_code``. + headers (MutableSequence[google.cloud.compute_v1.types.UrlMapTestHeader]): + HTTP headers for this request. If headers + contains a host header, then host must also + match the header value. + host (str): + Host portion of the URL. If headers contains + a host header, then host must also match the + header value. + + This field is a member of `oneof`_ ``_host``. + path (str): + Path portion of the URL. + + This field is a member of `oneof`_ ``_path``. + service (str): + Expected BackendService or BackendBucket + resource the given URL should be mapped to. The + service field cannot be set if + expectedRedirectResponseCode is set. + + This field is a member of `oneof`_ ``_service``. + """ + + description: str = proto.Field( + proto.STRING, + number=422937596, + optional=True, + ) + expected_output_url: str = proto.Field( + proto.STRING, + number=433967384, + optional=True, + ) + expected_redirect_response_code: int = proto.Field( + proto.INT32, + number=18888047, + optional=True, + ) + headers: MutableSequence['UrlMapTestHeader'] = proto.RepeatedField( + proto.MESSAGE, + number=258436998, + message='UrlMapTestHeader', + ) + host: str = proto.Field( + proto.STRING, + number=3208616, + optional=True, + ) + path: str = proto.Field( + proto.STRING, + number=3433509, + optional=True, + ) + service: str = proto.Field( + proto.STRING, + number=373540533, + optional=True, + ) + + +class UrlMapTestHeader(proto.Message): + r"""HTTP headers used in UrlMapTests. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + name (str): + Header name. + + This field is a member of `oneof`_ ``_name``. + value (str): + Header value. + + This field is a member of `oneof`_ ``_value``. + """ + + name: str = proto.Field( + proto.STRING, + number=3373707, + optional=True, + ) + value: str = proto.Field( + proto.STRING, + number=111972721, + optional=True, + ) + + +class UrlMapValidationResult(proto.Message): + r"""Message representing the validation result for a UrlMap. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + load_errors (MutableSequence[str]): + + load_succeeded (bool): + Whether the given UrlMap can be successfully + loaded. If false, 'loadErrors' indicates the + reasons. + + This field is a member of `oneof`_ ``_load_succeeded``. + test_failures (MutableSequence[google.cloud.compute_v1.types.TestFailure]): + + test_passed (bool): + If successfully loaded, this field indicates + whether the test passed. If false, + 'testFailures's indicate the reason of failure. + + This field is a member of `oneof`_ ``_test_passed``. + """ + + load_errors: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=310147300, + ) + load_succeeded: bool = proto.Field( + proto.BOOL, + number=128326216, + optional=True, + ) + test_failures: MutableSequence['TestFailure'] = proto.RepeatedField( + proto.MESSAGE, + number=505934134, + message='TestFailure', + ) + test_passed: bool = proto.Field( + proto.BOOL, + number=192708797, + optional=True, + ) + + +class UrlMapsAggregatedList(proto.Message): + r""" + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + id (str): + [Output Only] Unique identifier for the resource; defined by + the server. + + This field is a member of `oneof`_ ``_id``. + items (MutableMapping[str, google.cloud.compute_v1.types.UrlMapsScopedList]): + A list of UrlMapsScopedList resources. + kind (str): + Type of resource. + + This field is a member of `oneof`_ ``_kind``. + next_page_token (str): + [Output Only] This token allows you to get the next page of + results for list requests. If the number of results is + larger than maxResults, use the nextPageToken as a value for + the query parameter pageToken in the next list request. + Subsequent list requests will have their own nextPageToken + to continue paging through the results. + + This field is a member of `oneof`_ ``_next_page_token``. + self_link (str): + [Output Only] Server-defined URL for this resource. + + This field is a member of `oneof`_ ``_self_link``. + unreachables (MutableSequence[str]): + [Output Only] Unreachable resources. + warning (google.cloud.compute_v1.types.Warning): + [Output Only] Informational warning message. + + This field is a member of `oneof`_ ``_warning``. + """ + + @property + def raw_page(self): + return self + + id: str = proto.Field( + proto.STRING, + number=3355, + optional=True, + ) + items: MutableMapping[str, 'UrlMapsScopedList'] = proto.MapField( + proto.STRING, + proto.MESSAGE, + number=100526016, + message='UrlMapsScopedList', + ) + kind: str = proto.Field( + proto.STRING, + number=3292052, + optional=True, + ) + next_page_token: str = proto.Field( + proto.STRING, + number=79797525, + optional=True, + ) + self_link: str = proto.Field( + proto.STRING, + number=456214797, + optional=True, + ) + unreachables: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=243372063, + ) + warning: 'Warning' = proto.Field( + proto.MESSAGE, + number=50704284, + optional=True, + message='Warning', + ) + + +class UrlMapsScopedList(proto.Message): + r""" + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + url_maps (MutableSequence[google.cloud.compute_v1.types.UrlMap]): + A list of UrlMaps contained in this scope. + warning (google.cloud.compute_v1.types.Warning): + Informational warning which replaces the list + of backend services when the list is empty. + + This field is a member of `oneof`_ ``_warning``. + """ + + url_maps: MutableSequence['UrlMap'] = proto.RepeatedField( + proto.MESSAGE, + number=103352167, + message='UrlMap', + ) + warning: 'Warning' = proto.Field( + proto.MESSAGE, + number=50704284, + optional=True, + message='Warning', + ) + + +class UrlMapsValidateRequest(proto.Message): + r""" + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + load_balancing_schemes (MutableSequence[str]): + Specifies the load balancer type(s) this validation request + is for. Use EXTERNAL_MANAGED for HTTP/HTTPS External Global + Load Balancer with Advanced Traffic Management. Use EXTERNAL + for Classic HTTP/HTTPS External Global Load Balancer. Other + load balancer types are not supported. For more information, + refer to Choosing a load balancer. If unspecified, the load + balancing scheme will be inferred from the backend service + resources this URL map references. If that can not be + inferred (for example, this URL map only references backend + buckets, or this Url map is for rewrites and redirects only + and doesn't reference any backends), EXTERNAL will be used + as the default type. If specified, the scheme(s) must not + conflict with the load balancing scheme of the backend + service resources this Url map references. Check the + LoadBalancingSchemes enum for the list of possible values. + resource (google.cloud.compute_v1.types.UrlMap): + Content of the UrlMap to be validated. + + This field is a member of `oneof`_ ``_resource``. + """ + class LoadBalancingSchemes(proto.Enum): + r""" + + Values: + UNDEFINED_LOAD_BALANCING_SCHEMES (0): + A value indicating that the enum field is not + set. + EXTERNAL (35607499): + Signifies that this will be used for Classic + L7 External Load Balancing. + EXTERNAL_MANAGED (512006923): + Signifies that this will be used for + Envoy-based L7 External Load Balancing. + LOAD_BALANCING_SCHEME_UNSPECIFIED (526507452): + If unspecified, the validation will try to + infer the scheme from the backend service + resources this Url map references. If the + inferrence is not possible, EXTERNAL will be + used as the default type. + """ + UNDEFINED_LOAD_BALANCING_SCHEMES = 0 + EXTERNAL = 35607499 + EXTERNAL_MANAGED = 512006923 + LOAD_BALANCING_SCHEME_UNSPECIFIED = 526507452 + + load_balancing_schemes: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=6308527, + ) + resource: 'UrlMap' = proto.Field( + proto.MESSAGE, + number=195806222, + optional=True, + message='UrlMap', + ) + + +class UrlMapsValidateResponse(proto.Message): + r""" + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + result (google.cloud.compute_v1.types.UrlMapValidationResult): + + This field is a member of `oneof`_ ``_result``. + """ + + result: 'UrlMapValidationResult' = proto.Field( + proto.MESSAGE, + number=139315229, + optional=True, + message='UrlMapValidationResult', + ) + + +class UrlRewrite(proto.Message): + r"""The spec for modifying the path before sending the request to + the matched backend service. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + host_rewrite (str): + Before forwarding the request to the selected + service, the request's host header is replaced + with contents of hostRewrite. The value must be + from 1 to 255 characters. + + This field is a member of `oneof`_ ``_host_rewrite``. + path_prefix_rewrite (str): + Before forwarding the request to the selected + backend service, the matching portion of the + request's path is replaced by pathPrefixRewrite. + The value must be from 1 to 1024 characters. + + This field is a member of `oneof`_ ``_path_prefix_rewrite``. + path_template_rewrite (str): + If specified, the pattern rewrites the URL path (based on + the :path header) using the HTTP template syntax. A + corresponding path_template_match must be specified. Any + template variables must exist in the path_template_match + field. - -At least one variable must be specified in the + path_template_match field - You can omit variables from the + rewritten URL - The \* and \*\* operators cannot be matched + unless they have a corresponding variable name - e.g. + {format=*} or {var=**}. For example, a path_template_match + of /static/{format=**} could be rewritten as + /static/content/{format} to prefix /content to the URL. + Variables can also be re-ordered in a rewrite, so that + /{country}/{format}/{suffix=**} can be rewritten as + /content/{format}/{country}/{suffix}. At least one non-empty + routeRules[].matchRules[].path_template_match is required. + Only one of path_prefix_rewrite or path_template_rewrite may + be specified. + + This field is a member of `oneof`_ ``_path_template_rewrite``. + """ + + host_rewrite: str = proto.Field( + proto.STRING, + number=159819253, + optional=True, + ) + path_prefix_rewrite: str = proto.Field( + proto.STRING, + number=41186361, + optional=True, + ) + path_template_rewrite: str = proto.Field( + proto.STRING, + number=423409569, + optional=True, + ) + + +class UsableSubnetwork(proto.Message): + r"""Subnetwork which the current user has compute.subnetworks.use + permission on. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + external_ipv6_prefix (str): + [Output Only] The external IPv6 address range that is + assigned to this subnetwork. + + This field is a member of `oneof`_ ``_external_ipv6_prefix``. + internal_ipv6_prefix (str): + [Output Only] The internal IPv6 address range that is + assigned to this subnetwork. + + This field is a member of `oneof`_ ``_internal_ipv6_prefix``. + ip_cidr_range (str): + The range of internal addresses that are + owned by this subnetwork. + + This field is a member of `oneof`_ ``_ip_cidr_range``. + ipv6_access_type (str): + The access type of IPv6 address this subnet holds. It's + immutable and can only be specified during creation or the + first time the subnet is updated into IPV4_IPV6 dual stack. + Check the Ipv6AccessType enum for the list of possible + values. + + This field is a member of `oneof`_ ``_ipv6_access_type``. + network (str): + Network URL. + + This field is a member of `oneof`_ ``_network``. + purpose (str): + The purpose of the resource. This field can be either + PRIVATE, REGIONAL_MANAGED_PROXY, PRIVATE_SERVICE_CONNECT, or + INTERNAL_HTTPS_LOAD_BALANCER. PRIVATE is the default purpose + for user-created subnets or subnets that are automatically + created in auto mode networks. A subnet with purpose set to + REGIONAL_MANAGED_PROXY is a user-created subnetwork that is + reserved for regional Envoy-based load balancers. A subnet + with purpose set to PRIVATE_SERVICE_CONNECT is used to + publish services using Private Service Connect. A subnet + with purpose set to INTERNAL_HTTPS_LOAD_BALANCER is a + proxy-only subnet that can be used only by regional internal + HTTP(S) load balancers. Note that REGIONAL_MANAGED_PROXY is + the preferred setting for all regional Envoy load balancers. + If unspecified, the subnet purpose defaults to PRIVATE. The + enableFlowLogs field isn't supported if the subnet purpose + field is set to REGIONAL_MANAGED_PROXY. Check the Purpose + enum for the list of possible values. + + This field is a member of `oneof`_ ``_purpose``. + role (str): + The role of subnetwork. Currently, this field is only used + when purpose = REGIONAL_MANAGED_PROXY. The value can be set + to ACTIVE or BACKUP. An ACTIVE subnetwork is one that is + currently being used for Envoy-based load balancers in a + region. A BACKUP subnetwork is one that is ready to be + promoted to ACTIVE or is currently draining. This field can + be updated with a patch request. Check the Role enum for the + list of possible values. + + This field is a member of `oneof`_ ``_role``. + secondary_ip_ranges (MutableSequence[google.cloud.compute_v1.types.UsableSubnetworkSecondaryRange]): + Secondary IP ranges. + stack_type (str): + The stack type for the subnet. If set to IPV4_ONLY, new VMs + in the subnet are assigned IPv4 addresses only. If set to + IPV4_IPV6, new VMs in the subnet can be assigned both IPv4 + and IPv6 addresses. If not specified, IPV4_ONLY is used. + This field can be both set at resource creation time and + updated using patch. Check the StackType enum for the list + of possible values. + + This field is a member of `oneof`_ ``_stack_type``. + subnetwork (str): + Subnetwork URL. + + This field is a member of `oneof`_ ``_subnetwork``. + """ + class Ipv6AccessType(proto.Enum): + r"""The access type of IPv6 address this subnet holds. It's immutable + and can only be specified during creation or the first time the + subnet is updated into IPV4_IPV6 dual stack. + + Values: + UNDEFINED_IPV6_ACCESS_TYPE (0): + A value indicating that the enum field is not + set. + EXTERNAL (35607499): + VMs on this subnet will be assigned IPv6 + addresses that are accessible via the Internet, + as well as the VPC network. + INTERNAL (279295677): + VMs on this subnet will be assigned IPv6 + addresses that are only accessible over the VPC + network. + """ + UNDEFINED_IPV6_ACCESS_TYPE = 0 + EXTERNAL = 35607499 + INTERNAL = 279295677 + + class Purpose(proto.Enum): + r"""The purpose of the resource. This field can be either PRIVATE, + REGIONAL_MANAGED_PROXY, PRIVATE_SERVICE_CONNECT, or + INTERNAL_HTTPS_LOAD_BALANCER. PRIVATE is the default purpose for + user-created subnets or subnets that are automatically created in + auto mode networks. A subnet with purpose set to + REGIONAL_MANAGED_PROXY is a user-created subnetwork that is reserved + for regional Envoy-based load balancers. A subnet with purpose set + to PRIVATE_SERVICE_CONNECT is used to publish services using Private + Service Connect. A subnet with purpose set to + INTERNAL_HTTPS_LOAD_BALANCER is a proxy-only subnet that can be used + only by regional internal HTTP(S) load balancers. Note that + REGIONAL_MANAGED_PROXY is the preferred setting for all regional + Envoy load balancers. If unspecified, the subnet purpose defaults to + PRIVATE. The enableFlowLogs field isn't supported if the subnet + purpose field is set to REGIONAL_MANAGED_PROXY. + + Values: + UNDEFINED_PURPOSE (0): + A value indicating that the enum field is not + set. + INTERNAL_HTTPS_LOAD_BALANCER (248748889): + Subnet reserved for Internal HTTP(S) Load + Balancing. + PRIVATE (403485027): + Regular user created or automatically created + subnet. + PRIVATE_RFC_1918 (254902107): + Regular user created or automatically created + subnet. + PRIVATE_SERVICE_CONNECT (48134724): + Subnetworks created for Private Service + Connect in the producer network. + REGIONAL_MANAGED_PROXY (153049966): + Subnetwork used for Regional + Internal/External HTTP(S) Load Balancing. + """ + UNDEFINED_PURPOSE = 0 + INTERNAL_HTTPS_LOAD_BALANCER = 248748889 + PRIVATE = 403485027 + PRIVATE_RFC_1918 = 254902107 + PRIVATE_SERVICE_CONNECT = 48134724 + REGIONAL_MANAGED_PROXY = 153049966 + + class Role(proto.Enum): + r"""The role of subnetwork. Currently, this field is only used when + purpose = REGIONAL_MANAGED_PROXY. The value can be set to ACTIVE or + BACKUP. An ACTIVE subnetwork is one that is currently being used for + Envoy-based load balancers in a region. A BACKUP subnetwork is one + that is ready to be promoted to ACTIVE or is currently draining. + This field can be updated with a patch request. + + Values: + UNDEFINED_ROLE (0): + A value indicating that the enum field is not + set. + ACTIVE (314733318): + The ACTIVE subnet that is currently used. + BACKUP (341010882): + The BACKUP subnet that could be promoted to + ACTIVE. + """ + UNDEFINED_ROLE = 0 + ACTIVE = 314733318 + BACKUP = 341010882 + + class StackType(proto.Enum): + r"""The stack type for the subnet. If set to IPV4_ONLY, new VMs in the + subnet are assigned IPv4 addresses only. If set to IPV4_IPV6, new + VMs in the subnet can be assigned both IPv4 and IPv6 addresses. If + not specified, IPV4_ONLY is used. This field can be both set at + resource creation time and updated using patch. + + Values: + UNDEFINED_STACK_TYPE (0): + A value indicating that the enum field is not + set. + IPV4_IPV6 (22197249): + New VMs in this subnet can have both IPv4 and + IPv6 addresses. + IPV4_ONLY (22373798): + New VMs in this subnet will only be assigned + IPv4 addresses. + """ + UNDEFINED_STACK_TYPE = 0 + IPV4_IPV6 = 22197249 + IPV4_ONLY = 22373798 + + external_ipv6_prefix: str = proto.Field( + proto.STRING, + number=139299190, + optional=True, + ) + internal_ipv6_prefix: str = proto.Field( + proto.STRING, + number=506270056, + optional=True, + ) + ip_cidr_range: str = proto.Field( + proto.STRING, + number=98117322, + optional=True, + ) + ipv6_access_type: str = proto.Field( + proto.STRING, + number=504658653, + optional=True, + ) + network: str = proto.Field( + proto.STRING, + number=232872494, + optional=True, + ) + purpose: str = proto.Field( + proto.STRING, + number=316407070, + optional=True, + ) + role: str = proto.Field( + proto.STRING, + number=3506294, + optional=True, + ) + secondary_ip_ranges: MutableSequence['UsableSubnetworkSecondaryRange'] = proto.RepeatedField( + proto.MESSAGE, + number=136658915, + message='UsableSubnetworkSecondaryRange', + ) + stack_type: str = proto.Field( + proto.STRING, + number=425908881, + optional=True, + ) + subnetwork: str = proto.Field( + proto.STRING, + number=307827694, + optional=True, + ) + + +class UsableSubnetworkSecondaryRange(proto.Message): + r"""Secondary IP range of a usable subnetwork. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + ip_cidr_range (str): + The range of IP addresses belonging to this + subnetwork secondary range. + + This field is a member of `oneof`_ ``_ip_cidr_range``. + range_name (str): + The name associated with this subnetwork + secondary range, used when adding an alias IP + range to a VM instance. The name must be 1-63 + characters long, and comply with RFC1035. The + name must be unique within the subnetwork. + + This field is a member of `oneof`_ ``_range_name``. + """ + + ip_cidr_range: str = proto.Field( + proto.STRING, + number=98117322, + optional=True, + ) + range_name: str = proto.Field( + proto.STRING, + number=332216397, + optional=True, + ) + + +class UsableSubnetworksAggregatedList(proto.Message): + r""" + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + id (str): + [Output Only] The unique identifier for the resource. This + identifier is defined by the server. + + This field is a member of `oneof`_ ``_id``. + items (MutableSequence[google.cloud.compute_v1.types.UsableSubnetwork]): + [Output] A list of usable subnetwork URLs. + kind (str): + [Output Only] Type of resource. Always + compute#usableSubnetworksAggregatedList for aggregated lists + of usable subnetworks. + + This field is a member of `oneof`_ ``_kind``. + next_page_token (str): + [Output Only] This token allows you to get the next page of + results for list requests. If the number of results is + larger than maxResults, use the nextPageToken as a value for + the query parameter pageToken in the next list request. + Subsequent list requests will have their own nextPageToken + to continue paging through the results. In special cases + listUsable may return 0 subnetworks and nextPageToken which + still should be used to get the next page of results. + + This field is a member of `oneof`_ ``_next_page_token``. + self_link (str): + [Output Only] Server-defined URL for this resource. + + This field is a member of `oneof`_ ``_self_link``. + warning (google.cloud.compute_v1.types.Warning): + [Output Only] Informational warning message. + + This field is a member of `oneof`_ ``_warning``. + """ + + @property + def raw_page(self): + return self + + id: str = proto.Field( + proto.STRING, + number=3355, + optional=True, + ) + items: MutableSequence['UsableSubnetwork'] = proto.RepeatedField( + proto.MESSAGE, + number=100526016, + message='UsableSubnetwork', + ) + kind: str = proto.Field( + proto.STRING, + number=3292052, + optional=True, + ) + next_page_token: str = proto.Field( + proto.STRING, + number=79797525, + optional=True, + ) + self_link: str = proto.Field( + proto.STRING, + number=456214797, + optional=True, + ) + warning: 'Warning' = proto.Field( + proto.MESSAGE, + number=50704284, + optional=True, + message='Warning', + ) + + +class UsageExportLocation(proto.Message): + r"""The location in Cloud Storage and naming method of the daily usage + report. Contains bucket_name and report_name prefix. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + bucket_name (str): + The name of an existing bucket in Cloud + Storage where the usage report object is stored. + The Google Service Account is granted write + access to this bucket. This can either be the + bucket name by itself, such as example-bucket, + or the bucket name with gs:// or + https://storage.googleapis.com/ in front of it, + such as gs://example-bucket. + + This field is a member of `oneof`_ ``_bucket_name``. + report_name_prefix (str): + An optional prefix for the name of the usage report object + stored in bucketName. If not supplied, defaults to + usage_gce. The report is stored as a CSV file named + report_name_prefix_gce_YYYYMMDD.csv where YYYYMMDD is the + day of the usage according to Pacific Time. If you supply a + prefix, it should conform to Cloud Storage object naming + conventions. + + This field is a member of `oneof`_ ``_report_name_prefix``. + """ + + bucket_name: str = proto.Field( + proto.STRING, + number=283610048, + optional=True, + ) + report_name_prefix: str = proto.Field( + proto.STRING, + number=320198715, + optional=True, + ) + + +class ValidateRegionUrlMapRequest(proto.Message): + r"""A request message for RegionUrlMaps.Validate. See the method + description for details. + + Attributes: + project (str): + Project ID for this request. + region (str): + Name of the region scoping this request. + region_url_maps_validate_request_resource (google.cloud.compute_v1.types.RegionUrlMapsValidateRequest): + The body resource for this request + url_map (str): + Name of the UrlMap resource to be validated + as. + """ + + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + region: str = proto.Field( + proto.STRING, + number=138946292, + ) + region_url_maps_validate_request_resource: 'RegionUrlMapsValidateRequest' = proto.Field( + proto.MESSAGE, + number=56632858, + message='RegionUrlMapsValidateRequest', + ) + url_map: str = proto.Field( + proto.STRING, + number=367020684, + ) + + +class ValidateUrlMapRequest(proto.Message): + r"""A request message for UrlMaps.Validate. See the method + description for details. + + Attributes: + project (str): + Project ID for this request. + url_map (str): + Name of the UrlMap resource to be validated + as. + url_maps_validate_request_resource (google.cloud.compute_v1.types.UrlMapsValidateRequest): + The body resource for this request + """ + + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + url_map: str = proto.Field( + proto.STRING, + number=367020684, + ) + url_maps_validate_request_resource: 'UrlMapsValidateRequest' = proto.Field( + proto.MESSAGE, + number=395913455, + message='UrlMapsValidateRequest', + ) + + +class VmEndpointNatMappings(proto.Message): + r"""Contain information of Nat mapping for a VM endpoint (i.e., + NIC). + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + instance_name (str): + Name of the VM instance which the endpoint + belongs to + + This field is a member of `oneof`_ ``_instance_name``. + interface_nat_mappings (MutableSequence[google.cloud.compute_v1.types.VmEndpointNatMappingsInterfaceNatMappings]): + + """ + + instance_name: str = proto.Field( + proto.STRING, + number=227947509, + optional=True, + ) + interface_nat_mappings: MutableSequence['VmEndpointNatMappingsInterfaceNatMappings'] = proto.RepeatedField( + proto.MESSAGE, + number=256196617, + message='VmEndpointNatMappingsInterfaceNatMappings', + ) + + +class VmEndpointNatMappingsInterfaceNatMappings(proto.Message): + r"""Contain information of Nat mapping for an interface of this + endpoint. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + drain_nat_ip_port_ranges (MutableSequence[str]): + List of all drain IP:port-range mappings assigned to this + interface. These ranges are inclusive, that is, both the + first and the last ports can be used for NAT. Example: + ["2.2.2.2:12345-12355", "1.1.1.1:2234-2234"]. + nat_ip_port_ranges (MutableSequence[str]): + A list of all IP:port-range mappings assigned to this + interface. These ranges are inclusive, that is, both the + first and the last ports can be used for NAT. Example: + ["2.2.2.2:12345-12355", "1.1.1.1:2234-2234"]. + num_total_drain_nat_ports (int): + Total number of drain ports across all NAT IPs allocated to + this interface. It equals to the aggregated port number in + the field drain_nat_ip_port_ranges. + + This field is a member of `oneof`_ ``_num_total_drain_nat_ports``. + num_total_nat_ports (int): + Total number of ports across all NAT IPs allocated to this + interface. It equals to the aggregated port number in the + field nat_ip_port_ranges. + + This field is a member of `oneof`_ ``_num_total_nat_ports``. + rule_mappings (MutableSequence[google.cloud.compute_v1.types.VmEndpointNatMappingsInterfaceNatMappingsNatRuleMappings]): + Information about mappings provided by rules + in this NAT. + source_alias_ip_range (str): + Alias IP range for this interface endpoint. + It will be a private (RFC 1918) IP range. + Examples: "10.33.4.55/32", or "192.168.5.0/24". + + This field is a member of `oneof`_ ``_source_alias_ip_range``. + source_virtual_ip (str): + Primary IP of the VM for this NIC. + + This field is a member of `oneof`_ ``_source_virtual_ip``. + """ + + drain_nat_ip_port_ranges: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=395440577, + ) + nat_ip_port_ranges: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=531830810, + ) + num_total_drain_nat_ports: int = proto.Field( + proto.INT32, + number=335532793, + optional=True, + ) + num_total_nat_ports: int = proto.Field( + proto.INT32, + number=299904384, + optional=True, + ) + rule_mappings: MutableSequence['VmEndpointNatMappingsInterfaceNatMappingsNatRuleMappings'] = proto.RepeatedField( + proto.MESSAGE, + number=486192968, + message='VmEndpointNatMappingsInterfaceNatMappingsNatRuleMappings', + ) + source_alias_ip_range: str = proto.Field( + proto.STRING, + number=440340952, + optional=True, + ) + source_virtual_ip: str = proto.Field( + proto.STRING, + number=149836159, + optional=True, + ) + + +class VmEndpointNatMappingsInterfaceNatMappingsNatRuleMappings(proto.Message): + r"""Contains information of NAT Mappings provided by a NAT Rule. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + drain_nat_ip_port_ranges (MutableSequence[str]): + List of all drain IP:port-range mappings assigned to this + interface by this rule. These ranges are inclusive, that is, + both the first and the last ports can be used for NAT. + Example: ["2.2.2.2:12345-12355", "1.1.1.1:2234-2234"]. + nat_ip_port_ranges (MutableSequence[str]): + A list of all IP:port-range mappings assigned to this + interface by this rule. These ranges are inclusive, that is, + both the first and the last ports can be used for NAT. + Example: ["2.2.2.2:12345-12355", "1.1.1.1:2234-2234"]. + num_total_drain_nat_ports (int): + Total number of drain ports across all NAT IPs allocated to + this interface by this rule. It equals the aggregated port + number in the field drain_nat_ip_port_ranges. + + This field is a member of `oneof`_ ``_num_total_drain_nat_ports``. + num_total_nat_ports (int): + Total number of ports across all NAT IPs allocated to this + interface by this rule. It equals the aggregated port number + in the field nat_ip_port_ranges. + + This field is a member of `oneof`_ ``_num_total_nat_ports``. + rule_number (int): + Rule number of the NAT Rule. + + This field is a member of `oneof`_ ``_rule_number``. + """ + + drain_nat_ip_port_ranges: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=395440577, + ) + nat_ip_port_ranges: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=531830810, + ) + num_total_drain_nat_ports: int = proto.Field( + proto.INT32, + number=335532793, + optional=True, + ) + num_total_nat_ports: int = proto.Field( + proto.INT32, + number=299904384, + optional=True, + ) + rule_number: int = proto.Field( + proto.INT32, + number=535211500, + optional=True, + ) + + +class VmEndpointNatMappingsList(proto.Message): + r"""Contains a list of VmEndpointNatMappings. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + id (str): + [Output Only] The unique identifier for the resource. This + identifier is defined by the server. + + This field is a member of `oneof`_ ``_id``. + kind (str): + [Output Only] Type of resource. Always + compute#vmEndpointNatMappingsList for lists of Nat mappings + of VM endpoints. + + This field is a member of `oneof`_ ``_kind``. + next_page_token (str): + [Output Only] This token allows you to get the next page of + results for list requests. If the number of results is + larger than maxResults, use the nextPageToken as a value for + the query parameter pageToken in the next list request. + Subsequent list requests will have their own nextPageToken + to continue paging through the results. + + This field is a member of `oneof`_ ``_next_page_token``. + result (MutableSequence[google.cloud.compute_v1.types.VmEndpointNatMappings]): + [Output Only] A list of Nat mapping information of VM + endpoints. + self_link (str): + [Output Only] Server-defined URL for this resource. + + This field is a member of `oneof`_ ``_self_link``. + warning (google.cloud.compute_v1.types.Warning): + [Output Only] Informational warning message. + + This field is a member of `oneof`_ ``_warning``. + """ + + @property + def raw_page(self): + return self + + id: str = proto.Field( + proto.STRING, + number=3355, + optional=True, + ) + kind: str = proto.Field( + proto.STRING, + number=3292052, + optional=True, + ) + next_page_token: str = proto.Field( + proto.STRING, + number=79797525, + optional=True, + ) + result: MutableSequence['VmEndpointNatMappings'] = proto.RepeatedField( + proto.MESSAGE, + number=139315229, + message='VmEndpointNatMappings', + ) + self_link: str = proto.Field( + proto.STRING, + number=456214797, + optional=True, + ) + warning: 'Warning' = proto.Field( + proto.MESSAGE, + number=50704284, + optional=True, + message='Warning', + ) + + +class VpnGateway(proto.Message): + r"""Represents a HA VPN gateway. HA VPN is a high-availability + (HA) Cloud VPN solution that lets you securely connect your + on-premises network to your Google Cloud Virtual Private Cloud + network through an IPsec VPN connection in a single region. For + more information about Cloud HA VPN solutions, see Cloud VPN + topologies . + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + creation_timestamp (str): + [Output Only] Creation timestamp in RFC3339 text format. + + This field is a member of `oneof`_ ``_creation_timestamp``. + description (str): + An optional description of this resource. + Provide this property when you create the + resource. + + This field is a member of `oneof`_ ``_description``. + id (int): + [Output Only] The unique identifier for the resource. This + identifier is defined by the server. + + This field is a member of `oneof`_ ``_id``. + kind (str): + [Output Only] Type of resource. Always compute#vpnGateway + for VPN gateways. + + This field is a member of `oneof`_ ``_kind``. + label_fingerprint (str): + A fingerprint for the labels being applied to + this VpnGateway, which is essentially a hash of + the labels set used for optimistic locking. The + fingerprint is initially generated by Compute + Engine and changes after every request to modify + or update labels. You must always provide an + up-to-date fingerprint hash in order to update + or change labels, otherwise the request will + fail with error 412 conditionNotMet. To see the + latest fingerprint, make a get() request to + retrieve a VpnGateway. + + This field is a member of `oneof`_ ``_label_fingerprint``. + labels (MutableMapping[str, str]): + Labels for this resource. These can only be + added or modified by the setLabels method. Each + label key/value pair must comply with RFC1035. + Label values may be empty. + name (str): + Name of the resource. Provided by the client when the + resource is created. The name must be 1-63 characters long, + and comply with RFC1035. Specifically, the name must be 1-63 + characters long and match the regular expression + ``[a-z]([-a-z0-9]*[a-z0-9])?`` which means the first + character must be a lowercase letter, and all following + characters must be a dash, lowercase letter, or digit, + except the last character, which cannot be a dash. + + This field is a member of `oneof`_ ``_name``. + network (str): + URL of the network to which this VPN gateway + is attached. Provided by the client when the VPN + gateway is created. + + This field is a member of `oneof`_ ``_network``. + region (str): + [Output Only] URL of the region where the VPN gateway + resides. + + This field is a member of `oneof`_ ``_region``. + self_link (str): + [Output Only] Server-defined URL for the resource. + + This field is a member of `oneof`_ ``_self_link``. + stack_type (str): + The stack type for this VPN gateway to identify the IP + protocols that are enabled. Possible values are: IPV4_ONLY, + IPV4_IPV6. If not specified, IPV4_ONLY will be used. Check + the StackType enum for the list of possible values. + + This field is a member of `oneof`_ ``_stack_type``. + vpn_interfaces (MutableSequence[google.cloud.compute_v1.types.VpnGatewayVpnGatewayInterface]): + The list of VPN interfaces associated with + this VPN gateway. + """ + class StackType(proto.Enum): + r"""The stack type for this VPN gateway to identify the IP protocols + that are enabled. Possible values are: IPV4_ONLY, IPV4_IPV6. If not + specified, IPV4_ONLY will be used. + + Values: + UNDEFINED_STACK_TYPE (0): + A value indicating that the enum field is not + set. + IPV4_IPV6 (22197249): + Enable VPN gateway with both IPv4 and IPv6 + protocols. + IPV4_ONLY (22373798): + Enable VPN gateway with only IPv4 protocol. + """ + UNDEFINED_STACK_TYPE = 0 + IPV4_IPV6 = 22197249 + IPV4_ONLY = 22373798 + + creation_timestamp: str = proto.Field( + proto.STRING, + number=30525366, + optional=True, + ) + description: str = proto.Field( + proto.STRING, + number=422937596, + optional=True, + ) + id: int = proto.Field( + proto.UINT64, + number=3355, + optional=True, + ) + kind: str = proto.Field( + proto.STRING, + number=3292052, + optional=True, + ) + label_fingerprint: str = proto.Field( + proto.STRING, + number=178124825, + optional=True, + ) + labels: MutableMapping[str, str] = proto.MapField( + proto.STRING, + proto.STRING, + number=500195327, + ) + name: str = proto.Field( + proto.STRING, + number=3373707, + optional=True, + ) + network: str = proto.Field( + proto.STRING, + number=232872494, + optional=True, + ) + region: str = proto.Field( + proto.STRING, + number=138946292, + optional=True, + ) + self_link: str = proto.Field( + proto.STRING, + number=456214797, + optional=True, + ) + stack_type: str = proto.Field( + proto.STRING, + number=425908881, + optional=True, + ) + vpn_interfaces: MutableSequence['VpnGatewayVpnGatewayInterface'] = proto.RepeatedField( + proto.MESSAGE, + number=91842181, + message='VpnGatewayVpnGatewayInterface', + ) + + +class VpnGatewayAggregatedList(proto.Message): + r""" + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + id (str): + [Output Only] Unique identifier for the resource; defined by + the server. + + This field is a member of `oneof`_ ``_id``. + items (MutableMapping[str, google.cloud.compute_v1.types.VpnGatewaysScopedList]): + A list of VpnGateway resources. + kind (str): + [Output Only] Type of resource. Always compute#vpnGateway + for VPN gateways. + + This field is a member of `oneof`_ ``_kind``. + next_page_token (str): + [Output Only] This token allows you to get the next page of + results for list requests. If the number of results is + larger than maxResults, use the nextPageToken as a value for + the query parameter pageToken in the next list request. + Subsequent list requests will have their own nextPageToken + to continue paging through the results. + + This field is a member of `oneof`_ ``_next_page_token``. + self_link (str): + [Output Only] Server-defined URL for this resource. + + This field is a member of `oneof`_ ``_self_link``. + unreachables (MutableSequence[str]): + [Output Only] Unreachable resources. + warning (google.cloud.compute_v1.types.Warning): + [Output Only] Informational warning message. + + This field is a member of `oneof`_ ``_warning``. + """ + + @property + def raw_page(self): + return self + + id: str = proto.Field( + proto.STRING, + number=3355, + optional=True, + ) + items: MutableMapping[str, 'VpnGatewaysScopedList'] = proto.MapField( + proto.STRING, + proto.MESSAGE, + number=100526016, + message='VpnGatewaysScopedList', + ) + kind: str = proto.Field( + proto.STRING, + number=3292052, + optional=True, + ) + next_page_token: str = proto.Field( + proto.STRING, + number=79797525, + optional=True, + ) + self_link: str = proto.Field( + proto.STRING, + number=456214797, + optional=True, + ) + unreachables: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=243372063, + ) + warning: 'Warning' = proto.Field( + proto.MESSAGE, + number=50704284, + optional=True, + message='Warning', + ) + + +class VpnGatewayList(proto.Message): + r"""Contains a list of VpnGateway resources. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + id (str): + [Output Only] Unique identifier for the resource; defined by + the server. + + This field is a member of `oneof`_ ``_id``. + items (MutableSequence[google.cloud.compute_v1.types.VpnGateway]): + A list of VpnGateway resources. + kind (str): + [Output Only] Type of resource. Always compute#vpnGateway + for VPN gateways. + + This field is a member of `oneof`_ ``_kind``. + next_page_token (str): + [Output Only] This token allows you to get the next page of + results for list requests. If the number of results is + larger than maxResults, use the nextPageToken as a value for + the query parameter pageToken in the next list request. + Subsequent list requests will have their own nextPageToken + to continue paging through the results. + + This field is a member of `oneof`_ ``_next_page_token``. + self_link (str): + [Output Only] Server-defined URL for this resource. + + This field is a member of `oneof`_ ``_self_link``. + warning (google.cloud.compute_v1.types.Warning): + [Output Only] Informational warning message. + + This field is a member of `oneof`_ ``_warning``. + """ + + @property + def raw_page(self): + return self + + id: str = proto.Field( + proto.STRING, + number=3355, + optional=True, + ) + items: MutableSequence['VpnGateway'] = proto.RepeatedField( + proto.MESSAGE, + number=100526016, + message='VpnGateway', + ) + kind: str = proto.Field( + proto.STRING, + number=3292052, + optional=True, + ) + next_page_token: str = proto.Field( + proto.STRING, + number=79797525, + optional=True, + ) + self_link: str = proto.Field( + proto.STRING, + number=456214797, + optional=True, + ) + warning: 'Warning' = proto.Field( + proto.MESSAGE, + number=50704284, + optional=True, + message='Warning', + ) + + +class VpnGatewayStatus(proto.Message): + r""" + + Attributes: + vpn_connections (MutableSequence[google.cloud.compute_v1.types.VpnGatewayStatusVpnConnection]): + List of VPN connection for this VpnGateway. + """ + + vpn_connections: MutableSequence['VpnGatewayStatusVpnConnection'] = proto.RepeatedField( + proto.MESSAGE, + number=439334538, + message='VpnGatewayStatusVpnConnection', + ) + + +class VpnGatewayStatusHighAvailabilityRequirementState(proto.Message): + r"""Describes the high availability requirement state for the VPN + connection between this Cloud VPN gateway and a peer gateway. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + state (str): + Indicates the high availability requirement state for the + VPN connection. Valid values are CONNECTION_REDUNDANCY_MET, + CONNECTION_REDUNDANCY_NOT_MET. Check the State enum for the + list of possible values. + + This field is a member of `oneof`_ ``_state``. + unsatisfied_reason (str): + Indicates the reason why the VPN connection does not meet + the high availability redundancy criteria/requirement. Valid + values is INCOMPLETE_TUNNELS_COVERAGE. Check the + UnsatisfiedReason enum for the list of possible values. + + This field is a member of `oneof`_ ``_unsatisfied_reason``. + """ + class State(proto.Enum): + r"""Indicates the high availability requirement state for the VPN + connection. Valid values are CONNECTION_REDUNDANCY_MET, + CONNECTION_REDUNDANCY_NOT_MET. + + Values: + UNDEFINED_STATE (0): + A value indicating that the enum field is not + set. + CONNECTION_REDUNDANCY_MET (505242907): + VPN tunnels are configured with adequate + redundancy from Cloud VPN gateway to the peer + VPN gateway. For both GCP-to-non-GCP and + GCP-to-GCP connections, the adequate redundancy + is a pre-requirement for users to get 99.99% + availability on GCP side; please note that for + any connection, end-to-end 99.99% availability + is subject to proper configuration on the peer + VPN gateway. + CONNECTION_REDUNDANCY_NOT_MET (511863311): + VPN tunnels are not configured with adequate + redundancy from the Cloud VPN gateway to the + peer gateway + """ + UNDEFINED_STATE = 0 + CONNECTION_REDUNDANCY_MET = 505242907 + CONNECTION_REDUNDANCY_NOT_MET = 511863311 + + class UnsatisfiedReason(proto.Enum): + r"""Indicates the reason why the VPN connection does not meet the high + availability redundancy criteria/requirement. Valid values is + INCOMPLETE_TUNNELS_COVERAGE. + + Values: + UNDEFINED_UNSATISFIED_REASON (0): + A value indicating that the enum field is not + set. + INCOMPLETE_TUNNELS_COVERAGE (55917437): + No description available. + """ + UNDEFINED_UNSATISFIED_REASON = 0 + INCOMPLETE_TUNNELS_COVERAGE = 55917437 + + state: str = proto.Field( + proto.STRING, + number=109757585, + optional=True, + ) + unsatisfied_reason: str = proto.Field( + proto.STRING, + number=55016330, + optional=True, + ) + + +class VpnGatewayStatusTunnel(proto.Message): + r"""Contains some information about a VPN tunnel. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + local_gateway_interface (int): + The VPN gateway interface this VPN tunnel is + associated with. + + This field is a member of `oneof`_ ``_local_gateway_interface``. + peer_gateway_interface (int): + The peer gateway interface this VPN tunnel is + connected to, the peer gateway could either be + an external VPN gateway or a Google Cloud VPN + gateway. + + This field is a member of `oneof`_ ``_peer_gateway_interface``. + tunnel_url (str): + URL reference to the VPN tunnel. + + This field is a member of `oneof`_ ``_tunnel_url``. + """ + + local_gateway_interface: int = proto.Field( + proto.UINT32, + number=158764330, + optional=True, + ) + peer_gateway_interface: int = proto.Field( + proto.UINT32, + number=214380385, + optional=True, + ) + tunnel_url: str = proto.Field( + proto.STRING, + number=78975256, + optional=True, + ) + + +class VpnGatewayStatusVpnConnection(proto.Message): + r"""A VPN connection contains all VPN tunnels connected from this + VpnGateway to the same peer gateway. The peer gateway could + either be an external VPN gateway or a Google Cloud VPN gateway. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + peer_external_gateway (str): + URL reference to the peer external VPN gateways to which the + VPN tunnels in this VPN connection are connected. This field + is mutually exclusive with peer_gcp_gateway. + + This field is a member of `oneof`_ ``_peer_external_gateway``. + peer_gcp_gateway (str): + URL reference to the peer side VPN gateways to which the VPN + tunnels in this VPN connection are connected. This field is + mutually exclusive with peer_gcp_gateway. + + This field is a member of `oneof`_ ``_peer_gcp_gateway``. + state (google.cloud.compute_v1.types.VpnGatewayStatusHighAvailabilityRequirementState): + HighAvailabilityRequirementState for the VPN + connection. + + This field is a member of `oneof`_ ``_state``. + tunnels (MutableSequence[google.cloud.compute_v1.types.VpnGatewayStatusTunnel]): + List of VPN tunnels that are in this VPN + connection. + """ + + peer_external_gateway: str = proto.Field( + proto.STRING, + number=384956173, + optional=True, + ) + peer_gcp_gateway: str = proto.Field( + proto.STRING, + number=281867452, + optional=True, + ) + state: 'VpnGatewayStatusHighAvailabilityRequirementState' = proto.Field( + proto.MESSAGE, + number=109757585, + optional=True, + message='VpnGatewayStatusHighAvailabilityRequirementState', + ) + tunnels: MutableSequence['VpnGatewayStatusTunnel'] = proto.RepeatedField( + proto.MESSAGE, + number=104561931, + message='VpnGatewayStatusTunnel', + ) + + +class VpnGatewayVpnGatewayInterface(proto.Message): + r"""A VPN gateway interface. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + id (int): + [Output Only] Numeric identifier for this VPN interface + associated with the VPN gateway. + + This field is a member of `oneof`_ ``_id``. + interconnect_attachment (str): + URL of the VLAN attachment + (interconnectAttachment) resource for this VPN + gateway interface. When the value of this field + is present, the VPN gateway is used for HA VPN + over Cloud Interconnect; all egress or ingress + traffic for this VPN gateway interface goes + through the specified VLAN attachment resource. + + This field is a member of `oneof`_ ``_interconnect_attachment``. + ip_address (str): + [Output Only] IP address for this VPN interface associated + with the VPN gateway. The IP address could be either a + regional external IP address or a regional internal IP + address. The two IP addresses for a VPN gateway must be all + regional external or regional internal IP addresses. There + cannot be a mix of regional external IP addresses and + regional internal IP addresses. For HA VPN over Cloud + Interconnect, the IP addresses for both interfaces could + either be regional internal IP addresses or regional + external IP addresses. For regular (non HA VPN over Cloud + Interconnect) HA VPN tunnels, the IP address must be a + regional external IP address. + + This field is a member of `oneof`_ ``_ip_address``. + """ + + id: int = proto.Field( + proto.UINT32, + number=3355, + optional=True, + ) + interconnect_attachment: str = proto.Field( + proto.STRING, + number=308135284, + optional=True, + ) + ip_address: str = proto.Field( + proto.STRING, + number=406272220, + optional=True, + ) + + +class VpnGatewaysGetStatusResponse(proto.Message): + r""" + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + result (google.cloud.compute_v1.types.VpnGatewayStatus): + + This field is a member of `oneof`_ ``_result``. + """ + + result: 'VpnGatewayStatus' = proto.Field( + proto.MESSAGE, + number=139315229, + optional=True, + message='VpnGatewayStatus', + ) + + +class VpnGatewaysScopedList(proto.Message): + r""" + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + vpn_gateways (MutableSequence[google.cloud.compute_v1.types.VpnGateway]): + [Output Only] A list of VPN gateways contained in this + scope. + warning (google.cloud.compute_v1.types.Warning): + [Output Only] Informational warning which replaces the list + of addresses when the list is empty. + + This field is a member of `oneof`_ ``_warning``. + """ + + vpn_gateways: MutableSequence['VpnGateway'] = proto.RepeatedField( + proto.MESSAGE, + number=259177882, + message='VpnGateway', + ) + warning: 'Warning' = proto.Field( + proto.MESSAGE, + number=50704284, + optional=True, + message='Warning', + ) + + +class VpnTunnel(proto.Message): + r"""Represents a Cloud VPN Tunnel resource. For more information + about VPN, read the the Cloud VPN Overview. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + creation_timestamp (str): + [Output Only] Creation timestamp in RFC3339 text format. + + This field is a member of `oneof`_ ``_creation_timestamp``. + description (str): + An optional description of this resource. + Provide this property when you create the + resource. + + This field is a member of `oneof`_ ``_description``. + detailed_status (str): + [Output Only] Detailed status message for the VPN tunnel. + + This field is a member of `oneof`_ ``_detailed_status``. + id (int): + [Output Only] The unique identifier for the resource. This + identifier is defined by the server. + + This field is a member of `oneof`_ ``_id``. + ike_version (int): + IKE protocol version to use when establishing + the VPN tunnel with the peer VPN gateway. + Acceptable IKE versions are 1 or 2. The default + version is 2. + + This field is a member of `oneof`_ ``_ike_version``. + kind (str): + [Output Only] Type of resource. Always compute#vpnTunnel for + VPN tunnels. + + This field is a member of `oneof`_ ``_kind``. + label_fingerprint (str): + A fingerprint for the labels being applied to + this VpnTunnel, which is essentially a hash of + the labels set used for optimistic locking. The + fingerprint is initially generated by Compute + Engine and changes after every request to modify + or update labels. You must always provide an + up-to-date fingerprint hash in order to update + or change labels, otherwise the request will + fail with error 412 conditionNotMet. To see the + latest fingerprint, make a get() request to + retrieve a VpnTunnel. + + This field is a member of `oneof`_ ``_label_fingerprint``. + labels (MutableMapping[str, str]): + Labels for this resource. These can only be + added or modified by the setLabels method. Each + label key/value pair must comply with RFC1035. + Label values may be empty. + local_traffic_selector (MutableSequence[str]): + Local traffic selector to use when + establishing the VPN tunnel with the peer VPN + gateway. The value should be a CIDR formatted + string, for example: 192.168.0.0/16. The ranges + must be disjoint. Only IPv4 is supported. + name (str): + Name of the resource. Provided by the client when the + resource is created. The name must be 1-63 characters long, + and comply with RFC1035. Specifically, the name must be 1-63 + characters long and match the regular expression + ``[a-z]([-a-z0-9]*[a-z0-9])?`` which means the first + character must be a lowercase letter, and all following + characters must be a dash, lowercase letter, or digit, + except the last character, which cannot be a dash. + + This field is a member of `oneof`_ ``_name``. + peer_external_gateway (str): + URL of the peer side external VPN gateway to + which this VPN tunnel is connected. Provided by + the client when the VPN tunnel is created. This + field is exclusive with the field + peerGcpGateway. + + This field is a member of `oneof`_ ``_peer_external_gateway``. + peer_external_gateway_interface (int): + The interface ID of the external VPN gateway to which this + VPN tunnel is connected. Provided by the client when the VPN + tunnel is created. Possible values are: ``0``, ``1``, ``2``, + ``3``. The number of IDs in use depends on the external VPN + gateway redundancy type. + + This field is a member of `oneof`_ ``_peer_external_gateway_interface``. + peer_gcp_gateway (str): + URL of the peer side HA GCP VPN gateway to + which this VPN tunnel is connected. Provided by + the client when the VPN tunnel is created. This + field can be used when creating highly available + VPN from VPC network to VPC network, the field + is exclusive with the field peerExternalGateway. + If provided, the VPN tunnel will automatically + use the same vpnGatewayInterface ID in the peer + GCP VPN gateway. + + This field is a member of `oneof`_ ``_peer_gcp_gateway``. + peer_ip (str): + IP address of the peer VPN gateway. Only IPv4 + is supported. + + This field is a member of `oneof`_ ``_peer_ip``. + region (str): + [Output Only] URL of the region where the VPN tunnel + resides. You must specify this field as part of the HTTP + request URL. It is not settable as a field in the request + body. + + This field is a member of `oneof`_ ``_region``. + remote_traffic_selector (MutableSequence[str]): + Remote traffic selectors to use when + establishing the VPN tunnel with the peer VPN + gateway. The value should be a CIDR formatted + string, for example: 192.168.0.0/16. The ranges + should be disjoint. Only IPv4 is supported. + router (str): + URL of the router resource to be used for + dynamic routing. + + This field is a member of `oneof`_ ``_router``. + self_link (str): + [Output Only] Server-defined URL for the resource. + + This field is a member of `oneof`_ ``_self_link``. + shared_secret (str): + Shared secret used to set the secure session + between the Cloud VPN gateway and the peer VPN + gateway. + + This field is a member of `oneof`_ ``_shared_secret``. + shared_secret_hash (str): + Hash of the shared secret. + + This field is a member of `oneof`_ ``_shared_secret_hash``. + status (str): + [Output Only] The status of the VPN tunnel, which can be one + of the following: - PROVISIONING: Resource is being + allocated for the VPN tunnel. - WAITING_FOR_FULL_CONFIG: + Waiting to receive all VPN-related configs from the user. + Network, TargetVpnGateway, VpnTunnel, ForwardingRule, and + Route resources are needed to setup the VPN tunnel. - + FIRST_HANDSHAKE: Successful first handshake with the peer + VPN. - ESTABLISHED: Secure session is successfully + established with the peer VPN. - NETWORK_ERROR: Deprecated, + replaced by NO_INCOMING_PACKETS - AUTHORIZATION_ERROR: Auth + error (for example, bad shared secret). - + NEGOTIATION_FAILURE: Handshake failed. - DEPROVISIONING: + Resources are being deallocated for the VPN tunnel. - + FAILED: Tunnel creation has failed and the tunnel is not + ready to be used. - NO_INCOMING_PACKETS: No incoming packets + from peer. - REJECTED: Tunnel configuration was rejected, + can be result of being denied access. - + ALLOCATING_RESOURCES: Cloud VPN is in the process of + allocating all required resources. - STOPPED: Tunnel is + stopped due to its Forwarding Rules being deleted for + Classic VPN tunnels or the project is in frozen state. - + PEER_IDENTITY_MISMATCH: Peer identity does not match peer + IP, probably behind NAT. - TS_NARROWING_NOT_ALLOWED: Traffic + selector narrowing not allowed for an HA-VPN tunnel. Check + the Status enum for the list of possible values. + + This field is a member of `oneof`_ ``_status``. + target_vpn_gateway (str): + URL of the Target VPN gateway with which this + VPN tunnel is associated. Provided by the client + when the VPN tunnel is created. + + This field is a member of `oneof`_ ``_target_vpn_gateway``. + vpn_gateway (str): + URL of the VPN gateway with which this VPN tunnel is + associated. Provided by the client when the VPN tunnel is + created. This must be used (instead of target_vpn_gateway) + if a High Availability VPN gateway resource is created. + + This field is a member of `oneof`_ ``_vpn_gateway``. + vpn_gateway_interface (int): + The interface ID of the VPN gateway with which this VPN + tunnel is associated. Possible values are: ``0``, ``1``. + + This field is a member of `oneof`_ ``_vpn_gateway_interface``. + """ + class Status(proto.Enum): + r"""[Output Only] The status of the VPN tunnel, which can be one of the + following: - PROVISIONING: Resource is being allocated for the VPN + tunnel. - WAITING_FOR_FULL_CONFIG: Waiting to receive all + VPN-related configs from the user. Network, TargetVpnGateway, + VpnTunnel, ForwardingRule, and Route resources are needed to setup + the VPN tunnel. - FIRST_HANDSHAKE: Successful first handshake with + the peer VPN. - ESTABLISHED: Secure session is successfully + established with the peer VPN. - NETWORK_ERROR: Deprecated, replaced + by NO_INCOMING_PACKETS - AUTHORIZATION_ERROR: Auth error (for + example, bad shared secret). - NEGOTIATION_FAILURE: Handshake + failed. - DEPROVISIONING: Resources are being deallocated for the + VPN tunnel. - FAILED: Tunnel creation has failed and the tunnel is + not ready to be used. - NO_INCOMING_PACKETS: No incoming packets + from peer. - REJECTED: Tunnel configuration was rejected, can be + result of being denied access. - ALLOCATING_RESOURCES: Cloud VPN is + in the process of allocating all required resources. - STOPPED: + Tunnel is stopped due to its Forwarding Rules being deleted for + Classic VPN tunnels or the project is in frozen state. - + PEER_IDENTITY_MISMATCH: Peer identity does not match peer IP, + probably behind NAT. - TS_NARROWING_NOT_ALLOWED: Traffic selector + narrowing not allowed for an HA-VPN tunnel. + + Values: + UNDEFINED_STATUS (0): + A value indicating that the enum field is not + set. + ALLOCATING_RESOURCES (320922816): + Cloud VPN is in the process of allocating all + required resources (specifically, a borg task). + AUTHORIZATION_ERROR (23580290): + Auth error (e.g. bad shared secret). + DEPROVISIONING (428935662): + Resources is being deallocated for the VPN + tunnel. + ESTABLISHED (88852344): + Secure session is successfully established + with peer VPN. + FAILED (455706685): + Tunnel creation has failed and the tunnel is + not ready to be used. + FIRST_HANDSHAKE (191393000): + Successful first handshake with peer VPN. + NEGOTIATION_FAILURE (360325868): + Handshake failed. + NETWORK_ERROR (193912951): + Deprecated, replaced by NO_INCOMING_PACKETS + NO_INCOMING_PACKETS (119983216): + No incoming packets from peer + PROVISIONING (290896621): + Resource is being allocated for the VPN + tunnel. + REJECTED (174130302): + Tunnel configuration was rejected, can be + result of being denylisted. + STOPPED (444276141): + Tunnel is stopped due to its Forwarding Rules + being deleted. + WAITING_FOR_FULL_CONFIG (41640522): + Waiting to receive all VPN-related configs + from user. Network, TargetVpnGateway, VpnTunnel, + ForwardingRule and Route resources are needed to + setup VPN tunnel. + """ + UNDEFINED_STATUS = 0 + ALLOCATING_RESOURCES = 320922816 + AUTHORIZATION_ERROR = 23580290 + DEPROVISIONING = 428935662 + ESTABLISHED = 88852344 + FAILED = 455706685 + FIRST_HANDSHAKE = 191393000 + NEGOTIATION_FAILURE = 360325868 + NETWORK_ERROR = 193912951 + NO_INCOMING_PACKETS = 119983216 + PROVISIONING = 290896621 + REJECTED = 174130302 + STOPPED = 444276141 + WAITING_FOR_FULL_CONFIG = 41640522 + + creation_timestamp: str = proto.Field( + proto.STRING, + number=30525366, + optional=True, + ) + description: str = proto.Field( + proto.STRING, + number=422937596, + optional=True, + ) + detailed_status: str = proto.Field( + proto.STRING, + number=333501025, + optional=True, + ) + id: int = proto.Field( + proto.UINT64, + number=3355, + optional=True, + ) + ike_version: int = proto.Field( + proto.INT32, + number=218376220, + optional=True, + ) + kind: str = proto.Field( + proto.STRING, + number=3292052, + optional=True, + ) + label_fingerprint: str = proto.Field( + proto.STRING, + number=178124825, + optional=True, + ) + labels: MutableMapping[str, str] = proto.MapField( + proto.STRING, + proto.STRING, + number=500195327, + ) + local_traffic_selector: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=317314613, + ) + name: str = proto.Field( + proto.STRING, + number=3373707, + optional=True, + ) + peer_external_gateway: str = proto.Field( + proto.STRING, + number=384956173, + optional=True, + ) + peer_external_gateway_interface: int = proto.Field( + proto.INT32, + number=452768391, + optional=True, + ) + peer_gcp_gateway: str = proto.Field( + proto.STRING, + number=281867452, + optional=True, + ) + peer_ip: str = proto.Field( + proto.STRING, + number=383249700, + optional=True, + ) + region: str = proto.Field( + proto.STRING, + number=138946292, + optional=True, + ) + remote_traffic_selector: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=358887098, + ) + router: str = proto.Field( + proto.STRING, + number=148608841, + optional=True, + ) + self_link: str = proto.Field( + proto.STRING, + number=456214797, + optional=True, + ) + shared_secret: str = proto.Field( + proto.STRING, + number=381932490, + optional=True, + ) + shared_secret_hash: str = proto.Field( + proto.STRING, + number=398881891, + optional=True, + ) + status: str = proto.Field( + proto.STRING, + number=181260274, + optional=True, + ) + target_vpn_gateway: str = proto.Field( + proto.STRING, + number=532512843, + optional=True, + ) + vpn_gateway: str = proto.Field( + proto.STRING, + number=406684153, + optional=True, + ) + vpn_gateway_interface: int = proto.Field( + proto.INT32, + number=95979123, + optional=True, + ) + + +class VpnTunnelAggregatedList(proto.Message): + r""" + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + id (str): + [Output Only] Unique identifier for the resource; defined by + the server. + + This field is a member of `oneof`_ ``_id``. + items (MutableMapping[str, google.cloud.compute_v1.types.VpnTunnelsScopedList]): + A list of VpnTunnelsScopedList resources. + kind (str): + [Output Only] Type of resource. Always compute#vpnTunnel for + VPN tunnels. + + This field is a member of `oneof`_ ``_kind``. + next_page_token (str): + [Output Only] This token allows you to get the next page of + results for list requests. If the number of results is + larger than maxResults, use the nextPageToken as a value for + the query parameter pageToken in the next list request. + Subsequent list requests will have their own nextPageToken + to continue paging through the results. + + This field is a member of `oneof`_ ``_next_page_token``. + self_link (str): + [Output Only] Server-defined URL for this resource. + + This field is a member of `oneof`_ ``_self_link``. + unreachables (MutableSequence[str]): + [Output Only] Unreachable resources. + warning (google.cloud.compute_v1.types.Warning): + [Output Only] Informational warning message. + + This field is a member of `oneof`_ ``_warning``. + """ + + @property + def raw_page(self): + return self + + id: str = proto.Field( + proto.STRING, + number=3355, + optional=True, + ) + items: MutableMapping[str, 'VpnTunnelsScopedList'] = proto.MapField( + proto.STRING, + proto.MESSAGE, + number=100526016, + message='VpnTunnelsScopedList', + ) + kind: str = proto.Field( + proto.STRING, + number=3292052, + optional=True, + ) + next_page_token: str = proto.Field( + proto.STRING, + number=79797525, + optional=True, + ) + self_link: str = proto.Field( + proto.STRING, + number=456214797, + optional=True, + ) + unreachables: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=243372063, + ) + warning: 'Warning' = proto.Field( + proto.MESSAGE, + number=50704284, + optional=True, + message='Warning', + ) + + +class VpnTunnelList(proto.Message): + r"""Contains a list of VpnTunnel resources. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + id (str): + [Output Only] Unique identifier for the resource; defined by + the server. + + This field is a member of `oneof`_ ``_id``. + items (MutableSequence[google.cloud.compute_v1.types.VpnTunnel]): + A list of VpnTunnel resources. + kind (str): + [Output Only] Type of resource. Always compute#vpnTunnel for + VPN tunnels. + + This field is a member of `oneof`_ ``_kind``. + next_page_token (str): + [Output Only] This token allows you to get the next page of + results for list requests. If the number of results is + larger than maxResults, use the nextPageToken as a value for + the query parameter pageToken in the next list request. + Subsequent list requests will have their own nextPageToken + to continue paging through the results. + + This field is a member of `oneof`_ ``_next_page_token``. + self_link (str): + [Output Only] Server-defined URL for this resource. + + This field is a member of `oneof`_ ``_self_link``. + warning (google.cloud.compute_v1.types.Warning): + [Output Only] Informational warning message. + + This field is a member of `oneof`_ ``_warning``. + """ + + @property + def raw_page(self): + return self + + id: str = proto.Field( + proto.STRING, + number=3355, + optional=True, + ) + items: MutableSequence['VpnTunnel'] = proto.RepeatedField( + proto.MESSAGE, + number=100526016, + message='VpnTunnel', + ) + kind: str = proto.Field( + proto.STRING, + number=3292052, + optional=True, + ) + next_page_token: str = proto.Field( + proto.STRING, + number=79797525, + optional=True, + ) + self_link: str = proto.Field( + proto.STRING, + number=456214797, + optional=True, + ) + warning: 'Warning' = proto.Field( + proto.MESSAGE, + number=50704284, + optional=True, + message='Warning', + ) + + +class VpnTunnelsScopedList(proto.Message): + r""" + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + vpn_tunnels (MutableSequence[google.cloud.compute_v1.types.VpnTunnel]): + A list of VPN tunnels contained in this + scope. + warning (google.cloud.compute_v1.types.Warning): + Informational warning which replaces the list + of addresses when the list is empty. + + This field is a member of `oneof`_ ``_warning``. + """ + + vpn_tunnels: MutableSequence['VpnTunnel'] = proto.RepeatedField( + proto.MESSAGE, + number=163494080, + message='VpnTunnel', + ) + warning: 'Warning' = proto.Field( + proto.MESSAGE, + number=50704284, + optional=True, + message='Warning', + ) + + +class WafExpressionSet(proto.Message): + r""" + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + aliases (MutableSequence[str]): + A list of alternate IDs. The format should + be: - E.g. XSS-stable Generic suffix like + "stable" is particularly useful if a policy + likes to avail newer set of expressions without + having to change the policy. A given alias name + can't be used for more than one entity set. + expressions (MutableSequence[google.cloud.compute_v1.types.WafExpressionSetExpression]): + List of available expressions. + id (str): + Google specified expression set ID. The + format should be: - E.g. XSS-20170329 required + + This field is a member of `oneof`_ ``_id``. + """ + + aliases: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=159207166, + ) + expressions: MutableSequence['WafExpressionSetExpression'] = proto.RepeatedField( + proto.MESSAGE, + number=175554779, + message='WafExpressionSetExpression', + ) + id: str = proto.Field( + proto.STRING, + number=3355, + optional=True, + ) + + +class WafExpressionSetExpression(proto.Message): + r""" + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + id (str): + Expression ID should uniquely identify the + origin of the expression. E.g. + owasp-crs-v020901-id973337 identifies Owasp core + rule set version 2.9.1 rule id 973337. The ID + could be used to determine the individual attack + definition that has been detected. It could also + be used to exclude it from the policy in case of + false positive. required + + This field is a member of `oneof`_ ``_id``. + sensitivity (int): + The sensitivity value associated with the WAF + rule ID. This corresponds to the ModSecurity + paranoia level, ranging from 1 to 4. 0 is + reserved for opt-in only rules. + + This field is a member of `oneof`_ ``_sensitivity``. + """ + + id: str = proto.Field( + proto.STRING, + number=3355, + optional=True, + ) + sensitivity: int = proto.Field( + proto.INT32, + number=27532959, + optional=True, + ) + + +class WaitGlobalOperationRequest(proto.Message): + r"""A request message for GlobalOperations.Wait. See the method + description for details. + + Attributes: + operation (str): + Name of the Operations resource to return. + project (str): + Project ID for this request. + """ + + operation: str = proto.Field( + proto.STRING, + number=52090215, + ) + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + + +class WaitRegionOperationRequest(proto.Message): + r"""A request message for RegionOperations.Wait. See the method + description for details. + + Attributes: + operation (str): + Name of the Operations resource to return. + project (str): + Project ID for this request. + region (str): + Name of the region for this request. + """ + + operation: str = proto.Field( + proto.STRING, + number=52090215, + ) + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + region: str = proto.Field( + proto.STRING, + number=138946292, + ) + + +class WaitZoneOperationRequest(proto.Message): + r"""A request message for ZoneOperations.Wait. See the method + description for details. + + Attributes: + operation (str): + Name of the Operations resource to return. + project (str): + Project ID for this request. + zone (str): + Name of the zone for this request. + """ + + operation: str = proto.Field( + proto.STRING, + number=52090215, + ) + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + zone: str = proto.Field( + proto.STRING, + number=3744684, + ) + + +class Warning(proto.Message): + r"""[Output Only] Informational warning message. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + code (str): + [Output Only] A warning code, if applicable. For example, + Compute Engine returns NO_RESULTS_ON_PAGE if there are no + results in the response. Check the Code enum for the list of + possible values. + + This field is a member of `oneof`_ ``_code``. + data (MutableSequence[google.cloud.compute_v1.types.Data]): + [Output Only] Metadata about this warning in key: value + format. For example: "data": [ { "key": "scope", "value": + "zones/us-east1-d" } + message (str): + [Output Only] A human-readable description of the warning + code. + + This field is a member of `oneof`_ ``_message``. + """ + class Code(proto.Enum): + r"""[Output Only] A warning code, if applicable. For example, Compute + Engine returns NO_RESULTS_ON_PAGE if there are no results in the + response. + + Values: + UNDEFINED_CODE (0): + A value indicating that the enum field is not + set. + CLEANUP_FAILED (150308440): + Warning about failed cleanup of transient + changes made by a failed operation. + DEPRECATED_RESOURCE_USED (391835586): + A link to a deprecated resource was created. + DEPRECATED_TYPE_USED (346526230): + When deploying and at least one of the + resources has a type marked as deprecated + DISK_SIZE_LARGER_THAN_IMAGE_SIZE (369442967): + The user created a boot disk that is larger + than image size. + EXPERIMENTAL_TYPE_USED (451954443): + When deploying and at least one of the + resources has a type marked as experimental + EXTERNAL_API_WARNING (175546307): + Warning that is present in an external api + call + FIELD_VALUE_OVERRIDEN (329669423): + Warning that value of a field has been + overridden. Deprecated unused field. + INJECTED_KERNELS_DEPRECATED (417377419): + The operation involved use of an injected + kernel, which is deprecated. + INVALID_HEALTH_CHECK_FOR_DYNAMIC_WIEGHTED_LB (401542606): + A WEIGHTED_MAGLEV backend service is associated with a + health check that is not of type HTTP/HTTPS/HTTP2. + LARGE_DEPLOYMENT_WARNING (481440678): + When deploying a deployment with a + exceedingly large number of resources + LIST_OVERHEAD_QUOTA_EXCEED (47618117): + Resource can't be retrieved due to list + overhead quota exceed which captures the amount + of resources filtered out by user-defined list + filter. + MISSING_TYPE_DEPENDENCY (344505463): + A resource depends on a missing type + NEXT_HOP_ADDRESS_NOT_ASSIGNED (324964999): + The route's nextHopIp address is not assigned + to an instance on the network. + NEXT_HOP_CANNOT_IP_FORWARD (383382887): + The route's next hop instance cannot ip + forward. + NEXT_HOP_INSTANCE_HAS_NO_IPV6_INTERFACE (146748434): + The route's nextHopInstance URL refers to an + instance that does not have an ipv6 interface on + the same network as the route. + NEXT_HOP_INSTANCE_NOT_FOUND (464250446): + The route's nextHopInstance URL refers to an + instance that does not exist. + NEXT_HOP_INSTANCE_NOT_ON_NETWORK (243758146): + The route's nextHopInstance URL refers to an + instance that is not on the same network as the + route. + NEXT_HOP_NOT_RUNNING (417081265): + The route's next hop instance does not have a + status of RUNNING. + NOT_CRITICAL_ERROR (105763924): + Error which is not critical. We decided to + continue the process despite the mentioned + error. + NO_RESULTS_ON_PAGE (30036744): + No results are present on a particular list + page. + PARTIAL_SUCCESS (39966469): + Success is reported, but some results may be + missing due to errors + REQUIRED_TOS_AGREEMENT (3745539): + The user attempted to use a resource that + requires a TOS they have not accepted. + RESOURCE_IN_USE_BY_OTHER_RESOURCE_WARNING (496728641): + Warning that a resource is in use. + RESOURCE_NOT_DELETED (168598460): + One or more of the resources set to + auto-delete could not be deleted because they + were in use. + SCHEMA_VALIDATION_IGNORED (275245642): + When a resource schema validation is ignored. + SINGLE_INSTANCE_PROPERTY_TEMPLATE (268305617): + Instance template used in instance group + manager is valid as such, but its application + does not make a lot of sense, because it allows + only single instance in instance group. + UNDECLARED_PROPERTIES (390513439): + When undeclared properties in the schema are + present + UNREACHABLE (13328052): + A given scope cannot be reached. + """ + UNDEFINED_CODE = 0 + CLEANUP_FAILED = 150308440 + DEPRECATED_RESOURCE_USED = 391835586 + DEPRECATED_TYPE_USED = 346526230 + DISK_SIZE_LARGER_THAN_IMAGE_SIZE = 369442967 + EXPERIMENTAL_TYPE_USED = 451954443 + EXTERNAL_API_WARNING = 175546307 + FIELD_VALUE_OVERRIDEN = 329669423 + INJECTED_KERNELS_DEPRECATED = 417377419 + INVALID_HEALTH_CHECK_FOR_DYNAMIC_WIEGHTED_LB = 401542606 + LARGE_DEPLOYMENT_WARNING = 481440678 + LIST_OVERHEAD_QUOTA_EXCEED = 47618117 + MISSING_TYPE_DEPENDENCY = 344505463 + NEXT_HOP_ADDRESS_NOT_ASSIGNED = 324964999 + NEXT_HOP_CANNOT_IP_FORWARD = 383382887 + NEXT_HOP_INSTANCE_HAS_NO_IPV6_INTERFACE = 146748434 + NEXT_HOP_INSTANCE_NOT_FOUND = 464250446 + NEXT_HOP_INSTANCE_NOT_ON_NETWORK = 243758146 + NEXT_HOP_NOT_RUNNING = 417081265 + NOT_CRITICAL_ERROR = 105763924 + NO_RESULTS_ON_PAGE = 30036744 + PARTIAL_SUCCESS = 39966469 + REQUIRED_TOS_AGREEMENT = 3745539 + RESOURCE_IN_USE_BY_OTHER_RESOURCE_WARNING = 496728641 + RESOURCE_NOT_DELETED = 168598460 + SCHEMA_VALIDATION_IGNORED = 275245642 + SINGLE_INSTANCE_PROPERTY_TEMPLATE = 268305617 + UNDECLARED_PROPERTIES = 390513439 + UNREACHABLE = 13328052 + + code: str = proto.Field( + proto.STRING, + number=3059181, + optional=True, + ) + data: MutableSequence['Data'] = proto.RepeatedField( + proto.MESSAGE, + number=3076010, + message='Data', + ) + message: str = proto.Field( + proto.STRING, + number=418054151, + optional=True, + ) + + +class Warnings(proto.Message): + r""" + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + code (str): + [Output Only] A warning code, if applicable. For example, + Compute Engine returns NO_RESULTS_ON_PAGE if there are no + results in the response. Check the Code enum for the list of + possible values. + + This field is a member of `oneof`_ ``_code``. + data (MutableSequence[google.cloud.compute_v1.types.Data]): + [Output Only] Metadata about this warning in key: value + format. For example: "data": [ { "key": "scope", "value": + "zones/us-east1-d" } + message (str): + [Output Only] A human-readable description of the warning + code. + + This field is a member of `oneof`_ ``_message``. + """ + class Code(proto.Enum): + r"""[Output Only] A warning code, if applicable. For example, Compute + Engine returns NO_RESULTS_ON_PAGE if there are no results in the + response. + + Values: + UNDEFINED_CODE (0): + A value indicating that the enum field is not + set. + CLEANUP_FAILED (150308440): + Warning about failed cleanup of transient + changes made by a failed operation. + DEPRECATED_RESOURCE_USED (391835586): + A link to a deprecated resource was created. + DEPRECATED_TYPE_USED (346526230): + When deploying and at least one of the + resources has a type marked as deprecated + DISK_SIZE_LARGER_THAN_IMAGE_SIZE (369442967): + The user created a boot disk that is larger + than image size. + EXPERIMENTAL_TYPE_USED (451954443): + When deploying and at least one of the + resources has a type marked as experimental + EXTERNAL_API_WARNING (175546307): + Warning that is present in an external api + call + FIELD_VALUE_OVERRIDEN (329669423): + Warning that value of a field has been + overridden. Deprecated unused field. + INJECTED_KERNELS_DEPRECATED (417377419): + The operation involved use of an injected + kernel, which is deprecated. + INVALID_HEALTH_CHECK_FOR_DYNAMIC_WIEGHTED_LB (401542606): + A WEIGHTED_MAGLEV backend service is associated with a + health check that is not of type HTTP/HTTPS/HTTP2. + LARGE_DEPLOYMENT_WARNING (481440678): + When deploying a deployment with a + exceedingly large number of resources + LIST_OVERHEAD_QUOTA_EXCEED (47618117): + Resource can't be retrieved due to list + overhead quota exceed which captures the amount + of resources filtered out by user-defined list + filter. + MISSING_TYPE_DEPENDENCY (344505463): + A resource depends on a missing type + NEXT_HOP_ADDRESS_NOT_ASSIGNED (324964999): + The route's nextHopIp address is not assigned + to an instance on the network. + NEXT_HOP_CANNOT_IP_FORWARD (383382887): + The route's next hop instance cannot ip + forward. + NEXT_HOP_INSTANCE_HAS_NO_IPV6_INTERFACE (146748434): + The route's nextHopInstance URL refers to an + instance that does not have an ipv6 interface on + the same network as the route. + NEXT_HOP_INSTANCE_NOT_FOUND (464250446): + The route's nextHopInstance URL refers to an + instance that does not exist. + NEXT_HOP_INSTANCE_NOT_ON_NETWORK (243758146): + The route's nextHopInstance URL refers to an + instance that is not on the same network as the + route. + NEXT_HOP_NOT_RUNNING (417081265): + The route's next hop instance does not have a + status of RUNNING. + NOT_CRITICAL_ERROR (105763924): + Error which is not critical. We decided to + continue the process despite the mentioned + error. + NO_RESULTS_ON_PAGE (30036744): + No results are present on a particular list + page. + PARTIAL_SUCCESS (39966469): + Success is reported, but some results may be + missing due to errors + REQUIRED_TOS_AGREEMENT (3745539): + The user attempted to use a resource that + requires a TOS they have not accepted. + RESOURCE_IN_USE_BY_OTHER_RESOURCE_WARNING (496728641): + Warning that a resource is in use. + RESOURCE_NOT_DELETED (168598460): + One or more of the resources set to + auto-delete could not be deleted because they + were in use. + SCHEMA_VALIDATION_IGNORED (275245642): + When a resource schema validation is ignored. + SINGLE_INSTANCE_PROPERTY_TEMPLATE (268305617): + Instance template used in instance group + manager is valid as such, but its application + does not make a lot of sense, because it allows + only single instance in instance group. + UNDECLARED_PROPERTIES (390513439): + When undeclared properties in the schema are + present + UNREACHABLE (13328052): + A given scope cannot be reached. + """ + UNDEFINED_CODE = 0 + CLEANUP_FAILED = 150308440 + DEPRECATED_RESOURCE_USED = 391835586 + DEPRECATED_TYPE_USED = 346526230 + DISK_SIZE_LARGER_THAN_IMAGE_SIZE = 369442967 + EXPERIMENTAL_TYPE_USED = 451954443 + EXTERNAL_API_WARNING = 175546307 + FIELD_VALUE_OVERRIDEN = 329669423 + INJECTED_KERNELS_DEPRECATED = 417377419 + INVALID_HEALTH_CHECK_FOR_DYNAMIC_WIEGHTED_LB = 401542606 + LARGE_DEPLOYMENT_WARNING = 481440678 + LIST_OVERHEAD_QUOTA_EXCEED = 47618117 + MISSING_TYPE_DEPENDENCY = 344505463 + NEXT_HOP_ADDRESS_NOT_ASSIGNED = 324964999 + NEXT_HOP_CANNOT_IP_FORWARD = 383382887 + NEXT_HOP_INSTANCE_HAS_NO_IPV6_INTERFACE = 146748434 + NEXT_HOP_INSTANCE_NOT_FOUND = 464250446 + NEXT_HOP_INSTANCE_NOT_ON_NETWORK = 243758146 + NEXT_HOP_NOT_RUNNING = 417081265 + NOT_CRITICAL_ERROR = 105763924 + NO_RESULTS_ON_PAGE = 30036744 + PARTIAL_SUCCESS = 39966469 + REQUIRED_TOS_AGREEMENT = 3745539 + RESOURCE_IN_USE_BY_OTHER_RESOURCE_WARNING = 496728641 + RESOURCE_NOT_DELETED = 168598460 + SCHEMA_VALIDATION_IGNORED = 275245642 + SINGLE_INSTANCE_PROPERTY_TEMPLATE = 268305617 + UNDECLARED_PROPERTIES = 390513439 + UNREACHABLE = 13328052 + + code: str = proto.Field( + proto.STRING, + number=3059181, + optional=True, + ) + data: MutableSequence['Data'] = proto.RepeatedField( + proto.MESSAGE, + number=3076010, + message='Data', + ) + message: str = proto.Field( + proto.STRING, + number=418054151, + optional=True, + ) + + +class WeightedBackendService(proto.Message): + r"""In contrast to a single BackendService in HttpRouteAction to + which all matching traffic is directed to, + WeightedBackendService allows traffic to be split across + multiple backend services. The volume of traffic for each + backend service is proportional to the weight specified in each + WeightedBackendService + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + backend_service (str): + The full or partial URL to the default + BackendService resource. Before forwarding the + request to backendService, the load balancer + applies any relevant headerActions specified as + part of this backendServiceWeight. + + This field is a member of `oneof`_ ``_backend_service``. + header_action (google.cloud.compute_v1.types.HttpHeaderAction): + Specifies changes to request and response + headers that need to take effect for the + selected backendService. headerAction specified + here take effect before headerAction in the + enclosing HttpRouteRule, PathMatcher and UrlMap. + headerAction is not supported for load balancers + that have their loadBalancingScheme set to + EXTERNAL. Not supported when the URL map is + bound to a target gRPC proxy that has + validateForProxyless field set to true. + + This field is a member of `oneof`_ ``_header_action``. + weight (int): + Specifies the fraction of traffic sent to a + backend service, computed as weight / (sum of + all weightedBackendService weights in + routeAction) . The selection of a backend + service is determined only for new traffic. Once + a user's request has been directed to a backend + service, subsequent requests are sent to the + same backend service as determined by the + backend service's session affinity policy. The + value must be from 0 to 1000. + + This field is a member of `oneof`_ ``_weight``. + """ + + backend_service: str = proto.Field( + proto.STRING, + number=306946058, + optional=True, + ) + header_action: 'HttpHeaderAction' = proto.Field( + proto.MESSAGE, + number=328077352, + optional=True, + message='HttpHeaderAction', + ) + weight: int = proto.Field( + proto.UINT32, + number=282149496, + optional=True, + ) + + +class XpnHostList(proto.Message): + r""" + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + id (str): + [Output Only] Unique identifier for the resource; defined by + the server. + + This field is a member of `oneof`_ ``_id``. + items (MutableSequence[google.cloud.compute_v1.types.Project]): + [Output Only] A list of shared VPC host project URLs. + kind (str): + [Output Only] Type of resource. Always compute#xpnHostList + for lists of shared VPC hosts. + + This field is a member of `oneof`_ ``_kind``. + next_page_token (str): + [Output Only] This token allows you to get the next page of + results for list requests. If the number of results is + larger than maxResults, use the nextPageToken as a value for + the query parameter pageToken in the next list request. + Subsequent list requests will have their own nextPageToken + to continue paging through the results. + + This field is a member of `oneof`_ ``_next_page_token``. + self_link (str): + [Output Only] Server-defined URL for this resource. + + This field is a member of `oneof`_ ``_self_link``. + warning (google.cloud.compute_v1.types.Warning): + [Output Only] Informational warning message. + + This field is a member of `oneof`_ ``_warning``. + """ + + @property + def raw_page(self): + return self + + id: str = proto.Field( + proto.STRING, + number=3355, + optional=True, + ) + items: MutableSequence['Project'] = proto.RepeatedField( + proto.MESSAGE, + number=100526016, + message='Project', + ) + kind: str = proto.Field( + proto.STRING, + number=3292052, + optional=True, + ) + next_page_token: str = proto.Field( + proto.STRING, + number=79797525, + optional=True, + ) + self_link: str = proto.Field( + proto.STRING, + number=456214797, + optional=True, + ) + warning: 'Warning' = proto.Field( + proto.MESSAGE, + number=50704284, + optional=True, + message='Warning', + ) + + +class XpnResourceId(proto.Message): + r"""Service resource (a.k.a service project) ID. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + id (str): + The ID of the service resource. In the case + of projects, this field supports project id + (e.g., my-project-123) and project number (e.g. + 12345678). + + This field is a member of `oneof`_ ``_id``. + type_ (str): + The type of the service resource. + Check the Type enum for the list of possible + values. + + This field is a member of `oneof`_ ``_type``. + """ + class Type(proto.Enum): + r"""The type of the service resource. + + Values: + UNDEFINED_TYPE (0): + A value indicating that the enum field is not + set. + PROJECT (408671993): + No description available. + XPN_RESOURCE_TYPE_UNSPECIFIED (151607034): + No description available. + """ + UNDEFINED_TYPE = 0 + PROJECT = 408671993 + XPN_RESOURCE_TYPE_UNSPECIFIED = 151607034 + + id: str = proto.Field( + proto.STRING, + number=3355, + optional=True, + ) + type_: str = proto.Field( + proto.STRING, + number=3575610, + optional=True, + ) + + +class Zone(proto.Message): + r"""Represents a Zone resource. A zone is a deployment area. + These deployment areas are subsets of a region. For example the + zone us-east1-a is located in the us-east1 region. For more + information, read Regions and Zones. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + available_cpu_platforms (MutableSequence[str]): + [Output Only] Available cpu/platform selections for the + zone. + creation_timestamp (str): + [Output Only] Creation timestamp in RFC3339 text format. + + This field is a member of `oneof`_ ``_creation_timestamp``. + deprecated (google.cloud.compute_v1.types.DeprecationStatus): + [Output Only] The deprecation status associated with this + zone. + + This field is a member of `oneof`_ ``_deprecated``. + description (str): + [Output Only] Textual description of the resource. + + This field is a member of `oneof`_ ``_description``. + id (int): + [Output Only] The unique identifier for the resource. This + identifier is defined by the server. + + This field is a member of `oneof`_ ``_id``. + kind (str): + [Output Only] Type of the resource. Always compute#zone for + zones. + + This field is a member of `oneof`_ ``_kind``. + name (str): + [Output Only] Name of the resource. + + This field is a member of `oneof`_ ``_name``. + region (str): + [Output Only] Full URL reference to the region which hosts + the zone. + + This field is a member of `oneof`_ ``_region``. + self_link (str): + [Output Only] Server-defined URL for the resource. + + This field is a member of `oneof`_ ``_self_link``. + status (str): + [Output Only] Status of the zone, either UP or DOWN. Check + the Status enum for the list of possible values. + + This field is a member of `oneof`_ ``_status``. + supports_pzs (bool): + [Output Only] Reserved for future use. + + This field is a member of `oneof`_ ``_supports_pzs``. + """ + class Status(proto.Enum): + r"""[Output Only] Status of the zone, either UP or DOWN. + + Values: + UNDEFINED_STATUS (0): + A value indicating that the enum field is not + set. + DOWN (2104482): + No description available. + UP (2715): + No description available. + """ + UNDEFINED_STATUS = 0 + DOWN = 2104482 + UP = 2715 + + available_cpu_platforms: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=175536531, + ) + creation_timestamp: str = proto.Field( + proto.STRING, + number=30525366, + optional=True, + ) + deprecated: 'DeprecationStatus' = proto.Field( + proto.MESSAGE, + number=515138995, + optional=True, + message='DeprecationStatus', + ) + description: str = proto.Field( + proto.STRING, + number=422937596, + optional=True, + ) + id: int = proto.Field( + proto.UINT64, + number=3355, + optional=True, + ) + kind: str = proto.Field( + proto.STRING, + number=3292052, + optional=True, + ) + name: str = proto.Field( + proto.STRING, + number=3373707, + optional=True, + ) + region: str = proto.Field( + proto.STRING, + number=138946292, + optional=True, + ) + self_link: str = proto.Field( + proto.STRING, + number=456214797, + optional=True, + ) + status: str = proto.Field( + proto.STRING, + number=181260274, + optional=True, + ) + supports_pzs: bool = proto.Field( + proto.BOOL, + number=83983214, + optional=True, + ) + + +class ZoneList(proto.Message): + r"""Contains a list of zone resources. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + id (str): + [Output Only] Unique identifier for the resource; defined by + the server. + + This field is a member of `oneof`_ ``_id``. + items (MutableSequence[google.cloud.compute_v1.types.Zone]): + A list of Zone resources. + kind (str): + Type of resource. + + This field is a member of `oneof`_ ``_kind``. + next_page_token (str): + [Output Only] This token allows you to get the next page of + results for list requests. If the number of results is + larger than maxResults, use the nextPageToken as a value for + the query parameter pageToken in the next list request. + Subsequent list requests will have their own nextPageToken + to continue paging through the results. + + This field is a member of `oneof`_ ``_next_page_token``. + self_link (str): + [Output Only] Server-defined URL for this resource. + + This field is a member of `oneof`_ ``_self_link``. + warning (google.cloud.compute_v1.types.Warning): + [Output Only] Informational warning message. + + This field is a member of `oneof`_ ``_warning``. + """ + + @property + def raw_page(self): + return self + + id: str = proto.Field( + proto.STRING, + number=3355, + optional=True, + ) + items: MutableSequence['Zone'] = proto.RepeatedField( + proto.MESSAGE, + number=100526016, + message='Zone', + ) + kind: str = proto.Field( + proto.STRING, + number=3292052, + optional=True, + ) + next_page_token: str = proto.Field( + proto.STRING, + number=79797525, + optional=True, + ) + self_link: str = proto.Field( + proto.STRING, + number=456214797, + optional=True, + ) + warning: 'Warning' = proto.Field( + proto.MESSAGE, + number=50704284, + optional=True, + message='Warning', + ) + + +class ZoneSetLabelsRequest(proto.Message): + r""" + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + label_fingerprint (str): + The fingerprint of the previous set of labels + for this resource, used to detect conflicts. The + fingerprint is initially generated by Compute + Engine and changes after every request to modify + or update labels. You must always provide an + up-to-date fingerprint hash in order to update + or change labels. Make a get() request to the + resource to get the latest fingerprint. + + This field is a member of `oneof`_ ``_label_fingerprint``. + labels (MutableMapping[str, str]): + The labels to set for this resource. + """ + + label_fingerprint: str = proto.Field( + proto.STRING, + number=178124825, + optional=True, + ) + labels: MutableMapping[str, str] = proto.MapField( + proto.STRING, + proto.STRING, + number=500195327, + ) + + +class ZoneSetPolicyRequest(proto.Message): + r""" + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + bindings (MutableSequence[google.cloud.compute_v1.types.Binding]): + Flatten Policy to create a backwacd + compatible wire-format. Deprecated. Use 'policy' + to specify bindings. + etag (str): + Flatten Policy to create a backward + compatible wire-format. Deprecated. Use 'policy' + to specify the etag. + + This field is a member of `oneof`_ ``_etag``. + policy (google.cloud.compute_v1.types.Policy): + REQUIRED: The complete policy to be applied + to the 'resource'. The size of the policy is + limited to a few 10s of KB. An empty policy is + in general a valid policy but certain services + (like Projects) might reject them. + + This field is a member of `oneof`_ ``_policy``. + """ + + bindings: MutableSequence['Binding'] = proto.RepeatedField( + proto.MESSAGE, + number=403251854, + message='Binding', + ) + etag: str = proto.Field( + proto.STRING, + number=3123477, + optional=True, + ) + policy: 'Policy' = proto.Field( + proto.MESSAGE, + number=91071794, + optional=True, + message='Policy', + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/owl-bot-staging/v1/mypy.ini b/owl-bot-staging/v1/mypy.ini new file mode 100644 index 000000000..574c5aed3 --- /dev/null +++ b/owl-bot-staging/v1/mypy.ini @@ -0,0 +1,3 @@ +[mypy] +python_version = 3.7 +namespace_packages = True diff --git a/owl-bot-staging/v1/noxfile.py b/owl-bot-staging/v1/noxfile.py new file mode 100644 index 000000000..0f07f1025 --- /dev/null +++ b/owl-bot-staging/v1/noxfile.py @@ -0,0 +1,184 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import os +import pathlib +import shutil +import subprocess +import sys + + +import nox # type: ignore + +ALL_PYTHON = [ + "3.7", + "3.8", + "3.9", + "3.10", + "3.11", +] + +CURRENT_DIRECTORY = pathlib.Path(__file__).parent.absolute() + +LOWER_BOUND_CONSTRAINTS_FILE = CURRENT_DIRECTORY / "constraints.txt" +PACKAGE_NAME = subprocess.check_output([sys.executable, "setup.py", "--name"], encoding="utf-8") + +BLACK_VERSION = "black==22.3.0" +BLACK_PATHS = ["docs", "google", "tests", "samples", "noxfile.py", "setup.py"] +DEFAULT_PYTHON_VERSION = "3.11" + +nox.sessions = [ + "unit", + "cover", + "mypy", + "check_lower_bounds" + # exclude update_lower_bounds from default + "docs", + "blacken", + "lint", + "lint_setup_py", +] + +@nox.session(python=ALL_PYTHON) +def unit(session): + """Run the unit test suite.""" + + session.install('coverage', 'pytest', 'pytest-cov', 'pytest-asyncio', 'asyncmock; python_version < "3.8"') + session.install('-e', '.') + + session.run( + 'py.test', + '--quiet', + '--cov=google/cloud/compute_v1/', + '--cov=tests/', + '--cov-config=.coveragerc', + '--cov-report=term', + '--cov-report=html', + os.path.join('tests', 'unit', ''.join(session.posargs)) + ) + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def cover(session): + """Run the final coverage report. + This outputs the coverage report aggregating coverage from the unit + test runs (not system test runs), and then erases coverage data. + """ + session.install("coverage", "pytest-cov") + session.run("coverage", "report", "--show-missing", "--fail-under=100") + + session.run("coverage", "erase") + + +@nox.session(python=ALL_PYTHON) +def mypy(session): + """Run the type checker.""" + session.install( + 'mypy', + 'types-requests', + 'types-protobuf' + ) + session.install('.') + session.run( + 'mypy', + '--explicit-package-bases', + 'google', + ) + + +@nox.session +def update_lower_bounds(session): + """Update lower bounds in constraints.txt to match setup.py""" + session.install('google-cloud-testutils') + session.install('.') + + session.run( + 'lower-bound-checker', + 'update', + '--package-name', + PACKAGE_NAME, + '--constraints-file', + str(LOWER_BOUND_CONSTRAINTS_FILE), + ) + + +@nox.session +def check_lower_bounds(session): + """Check lower bounds in setup.py are reflected in constraints file""" + session.install('google-cloud-testutils') + session.install('.') + + session.run( + 'lower-bound-checker', + 'check', + '--package-name', + PACKAGE_NAME, + '--constraints-file', + str(LOWER_BOUND_CONSTRAINTS_FILE), + ) + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def docs(session): + """Build the docs for this library.""" + + session.install("-e", ".") + session.install("sphinx==7.0.1", "alabaster", "recommonmark") + + shutil.rmtree(os.path.join("docs", "_build"), ignore_errors=True) + session.run( + "sphinx-build", + "-W", # warnings as errors + "-T", # show full traceback on exception + "-N", # no colors + "-b", + "html", + "-d", + os.path.join("docs", "_build", "doctrees", ""), + os.path.join("docs", ""), + os.path.join("docs", "_build", "html", ""), + ) + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def lint(session): + """Run linters. + + Returns a failure if the linters find linting errors or sufficiently + serious code quality issues. + """ + session.install("flake8", BLACK_VERSION) + session.run( + "black", + "--check", + *BLACK_PATHS, + ) + session.run("flake8", "google", "tests", "samples") + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def blacken(session): + """Run black. Format code to uniform standard.""" + session.install(BLACK_VERSION) + session.run( + "black", + *BLACK_PATHS, + ) + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def lint_setup_py(session): + """Verify that setup.py is valid (including RST check).""" + session.install("docutils", "pygments") + session.run("python", "setup.py", "check", "--restructuredtext", "--strict") diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_accelerator_types_aggregated_list_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_accelerator_types_aggregated_list_sync.py new file mode 100644 index 000000000..c70a19eed --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_accelerator_types_aggregated_list_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for AggregatedList +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_AcceleratorTypes_AggregatedList_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_aggregated_list(): + # Create a client + client = compute_v1.AcceleratorTypesClient() + + # Initialize request argument(s) + request = compute_v1.AggregatedListAcceleratorTypesRequest( + project="project_value", + ) + + # Make the request + page_result = client.aggregated_list(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END compute_v1_generated_AcceleratorTypes_AggregatedList_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_accelerator_types_get_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_accelerator_types_get_sync.py new file mode 100644 index 000000000..7e0d07b4d --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_accelerator_types_get_sync.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for Get +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_AcceleratorTypes_Get_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_get(): + # Create a client + client = compute_v1.AcceleratorTypesClient() + + # Initialize request argument(s) + request = compute_v1.GetAcceleratorTypeRequest( + accelerator_type="accelerator_type_value", + project="project_value", + zone="zone_value", + ) + + # Make the request + response = client.get(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_AcceleratorTypes_Get_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_accelerator_types_list_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_accelerator_types_list_sync.py new file mode 100644 index 000000000..01c3265e4 --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_accelerator_types_list_sync.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for List +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_AcceleratorTypes_List_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_list(): + # Create a client + client = compute_v1.AcceleratorTypesClient() + + # Initialize request argument(s) + request = compute_v1.ListAcceleratorTypesRequest( + project="project_value", + zone="zone_value", + ) + + # Make the request + page_result = client.list(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END compute_v1_generated_AcceleratorTypes_List_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_addresses_aggregated_list_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_addresses_aggregated_list_sync.py new file mode 100644 index 000000000..9e92e7dd3 --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_addresses_aggregated_list_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for AggregatedList +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_Addresses_AggregatedList_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_aggregated_list(): + # Create a client + client = compute_v1.AddressesClient() + + # Initialize request argument(s) + request = compute_v1.AggregatedListAddressesRequest( + project="project_value", + ) + + # Make the request + page_result = client.aggregated_list(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END compute_v1_generated_Addresses_AggregatedList_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_addresses_delete_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_addresses_delete_sync.py new file mode 100644 index 000000000..8a70c29a4 --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_addresses_delete_sync.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for Delete +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_Addresses_Delete_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_delete(): + # Create a client + client = compute_v1.AddressesClient() + + # Initialize request argument(s) + request = compute_v1.DeleteAddressRequest( + address="address_value", + project="project_value", + region="region_value", + ) + + # Make the request + response = client.delete(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_Addresses_Delete_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_addresses_get_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_addresses_get_sync.py new file mode 100644 index 000000000..936330a31 --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_addresses_get_sync.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for Get +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_Addresses_Get_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_get(): + # Create a client + client = compute_v1.AddressesClient() + + # Initialize request argument(s) + request = compute_v1.GetAddressRequest( + address="address_value", + project="project_value", + region="region_value", + ) + + # Make the request + response = client.get(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_Addresses_Get_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_addresses_insert_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_addresses_insert_sync.py new file mode 100644 index 000000000..277bc6369 --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_addresses_insert_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for Insert +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_Addresses_Insert_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_insert(): + # Create a client + client = compute_v1.AddressesClient() + + # Initialize request argument(s) + request = compute_v1.InsertAddressRequest( + project="project_value", + region="region_value", + ) + + # Make the request + response = client.insert(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_Addresses_Insert_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_addresses_list_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_addresses_list_sync.py new file mode 100644 index 000000000..d75e028dd --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_addresses_list_sync.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for List +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_Addresses_List_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_list(): + # Create a client + client = compute_v1.AddressesClient() + + # Initialize request argument(s) + request = compute_v1.ListAddressesRequest( + project="project_value", + region="region_value", + ) + + # Make the request + page_result = client.list(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END compute_v1_generated_Addresses_List_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_addresses_move_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_addresses_move_sync.py new file mode 100644 index 000000000..a702fc65e --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_addresses_move_sync.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for Move +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_Addresses_Move_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_move(): + # Create a client + client = compute_v1.AddressesClient() + + # Initialize request argument(s) + request = compute_v1.MoveAddressRequest( + address="address_value", + project="project_value", + region="region_value", + ) + + # Make the request + response = client.move(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_Addresses_Move_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_addresses_set_labels_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_addresses_set_labels_sync.py new file mode 100644 index 000000000..0df1a820c --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_addresses_set_labels_sync.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for SetLabels +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_Addresses_SetLabels_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_set_labels(): + # Create a client + client = compute_v1.AddressesClient() + + # Initialize request argument(s) + request = compute_v1.SetLabelsAddressRequest( + project="project_value", + region="region_value", + resource="resource_value", + ) + + # Make the request + response = client.set_labels(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_Addresses_SetLabels_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_autoscalers_aggregated_list_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_autoscalers_aggregated_list_sync.py new file mode 100644 index 000000000..934dd1d57 --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_autoscalers_aggregated_list_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for AggregatedList +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_Autoscalers_AggregatedList_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_aggregated_list(): + # Create a client + client = compute_v1.AutoscalersClient() + + # Initialize request argument(s) + request = compute_v1.AggregatedListAutoscalersRequest( + project="project_value", + ) + + # Make the request + page_result = client.aggregated_list(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END compute_v1_generated_Autoscalers_AggregatedList_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_autoscalers_delete_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_autoscalers_delete_sync.py new file mode 100644 index 000000000..975d98329 --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_autoscalers_delete_sync.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for Delete +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_Autoscalers_Delete_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_delete(): + # Create a client + client = compute_v1.AutoscalersClient() + + # Initialize request argument(s) + request = compute_v1.DeleteAutoscalerRequest( + autoscaler="autoscaler_value", + project="project_value", + zone="zone_value", + ) + + # Make the request + response = client.delete(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_Autoscalers_Delete_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_autoscalers_get_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_autoscalers_get_sync.py new file mode 100644 index 000000000..ce9f3b573 --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_autoscalers_get_sync.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for Get +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_Autoscalers_Get_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_get(): + # Create a client + client = compute_v1.AutoscalersClient() + + # Initialize request argument(s) + request = compute_v1.GetAutoscalerRequest( + autoscaler="autoscaler_value", + project="project_value", + zone="zone_value", + ) + + # Make the request + response = client.get(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_Autoscalers_Get_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_autoscalers_insert_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_autoscalers_insert_sync.py new file mode 100644 index 000000000..c1c68e9c1 --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_autoscalers_insert_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for Insert +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_Autoscalers_Insert_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_insert(): + # Create a client + client = compute_v1.AutoscalersClient() + + # Initialize request argument(s) + request = compute_v1.InsertAutoscalerRequest( + project="project_value", + zone="zone_value", + ) + + # Make the request + response = client.insert(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_Autoscalers_Insert_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_autoscalers_list_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_autoscalers_list_sync.py new file mode 100644 index 000000000..dc8722c37 --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_autoscalers_list_sync.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for List +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_Autoscalers_List_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_list(): + # Create a client + client = compute_v1.AutoscalersClient() + + # Initialize request argument(s) + request = compute_v1.ListAutoscalersRequest( + project="project_value", + zone="zone_value", + ) + + # Make the request + page_result = client.list(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END compute_v1_generated_Autoscalers_List_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_autoscalers_patch_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_autoscalers_patch_sync.py new file mode 100644 index 000000000..59e158774 --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_autoscalers_patch_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for Patch +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_Autoscalers_Patch_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_patch(): + # Create a client + client = compute_v1.AutoscalersClient() + + # Initialize request argument(s) + request = compute_v1.PatchAutoscalerRequest( + project="project_value", + zone="zone_value", + ) + + # Make the request + response = client.patch(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_Autoscalers_Patch_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_autoscalers_update_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_autoscalers_update_sync.py new file mode 100644 index 000000000..36a994573 --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_autoscalers_update_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for Update +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_Autoscalers_Update_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_update(): + # Create a client + client = compute_v1.AutoscalersClient() + + # Initialize request argument(s) + request = compute_v1.UpdateAutoscalerRequest( + project="project_value", + zone="zone_value", + ) + + # Make the request + response = client.update(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_Autoscalers_Update_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_backend_buckets_add_signed_url_key_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_backend_buckets_add_signed_url_key_sync.py new file mode 100644 index 000000000..3a398b24d --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_backend_buckets_add_signed_url_key_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for AddSignedUrlKey +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_BackendBuckets_AddSignedUrlKey_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_add_signed_url_key(): + # Create a client + client = compute_v1.BackendBucketsClient() + + # Initialize request argument(s) + request = compute_v1.AddSignedUrlKeyBackendBucketRequest( + backend_bucket="backend_bucket_value", + project="project_value", + ) + + # Make the request + response = client.add_signed_url_key(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_BackendBuckets_AddSignedUrlKey_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_backend_buckets_delete_signed_url_key_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_backend_buckets_delete_signed_url_key_sync.py new file mode 100644 index 000000000..597ac31e1 --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_backend_buckets_delete_signed_url_key_sync.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteSignedUrlKey +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_BackendBuckets_DeleteSignedUrlKey_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_delete_signed_url_key(): + # Create a client + client = compute_v1.BackendBucketsClient() + + # Initialize request argument(s) + request = compute_v1.DeleteSignedUrlKeyBackendBucketRequest( + backend_bucket="backend_bucket_value", + key_name="key_name_value", + project="project_value", + ) + + # Make the request + response = client.delete_signed_url_key(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_BackendBuckets_DeleteSignedUrlKey_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_backend_buckets_delete_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_backend_buckets_delete_sync.py new file mode 100644 index 000000000..d3a1f02b7 --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_backend_buckets_delete_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for Delete +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_BackendBuckets_Delete_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_delete(): + # Create a client + client = compute_v1.BackendBucketsClient() + + # Initialize request argument(s) + request = compute_v1.DeleteBackendBucketRequest( + backend_bucket="backend_bucket_value", + project="project_value", + ) + + # Make the request + response = client.delete(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_BackendBuckets_Delete_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_backend_buckets_get_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_backend_buckets_get_sync.py new file mode 100644 index 000000000..cc0ba9a2e --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_backend_buckets_get_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for Get +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_BackendBuckets_Get_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_get(): + # Create a client + client = compute_v1.BackendBucketsClient() + + # Initialize request argument(s) + request = compute_v1.GetBackendBucketRequest( + backend_bucket="backend_bucket_value", + project="project_value", + ) + + # Make the request + response = client.get(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_BackendBuckets_Get_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_backend_buckets_insert_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_backend_buckets_insert_sync.py new file mode 100644 index 000000000..f33b48297 --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_backend_buckets_insert_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for Insert +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_BackendBuckets_Insert_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_insert(): + # Create a client + client = compute_v1.BackendBucketsClient() + + # Initialize request argument(s) + request = compute_v1.InsertBackendBucketRequest( + project="project_value", + ) + + # Make the request + response = client.insert(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_BackendBuckets_Insert_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_backend_buckets_list_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_backend_buckets_list_sync.py new file mode 100644 index 000000000..ff3c639c9 --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_backend_buckets_list_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for List +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_BackendBuckets_List_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_list(): + # Create a client + client = compute_v1.BackendBucketsClient() + + # Initialize request argument(s) + request = compute_v1.ListBackendBucketsRequest( + project="project_value", + ) + + # Make the request + page_result = client.list(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END compute_v1_generated_BackendBuckets_List_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_backend_buckets_patch_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_backend_buckets_patch_sync.py new file mode 100644 index 000000000..51683a925 --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_backend_buckets_patch_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for Patch +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_BackendBuckets_Patch_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_patch(): + # Create a client + client = compute_v1.BackendBucketsClient() + + # Initialize request argument(s) + request = compute_v1.PatchBackendBucketRequest( + backend_bucket="backend_bucket_value", + project="project_value", + ) + + # Make the request + response = client.patch(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_BackendBuckets_Patch_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_backend_buckets_set_edge_security_policy_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_backend_buckets_set_edge_security_policy_sync.py new file mode 100644 index 000000000..beaa86b89 --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_backend_buckets_set_edge_security_policy_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for SetEdgeSecurityPolicy +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_BackendBuckets_SetEdgeSecurityPolicy_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_set_edge_security_policy(): + # Create a client + client = compute_v1.BackendBucketsClient() + + # Initialize request argument(s) + request = compute_v1.SetEdgeSecurityPolicyBackendBucketRequest( + backend_bucket="backend_bucket_value", + project="project_value", + ) + + # Make the request + response = client.set_edge_security_policy(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_BackendBuckets_SetEdgeSecurityPolicy_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_backend_buckets_update_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_backend_buckets_update_sync.py new file mode 100644 index 000000000..e84f55600 --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_backend_buckets_update_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for Update +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_BackendBuckets_Update_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_update(): + # Create a client + client = compute_v1.BackendBucketsClient() + + # Initialize request argument(s) + request = compute_v1.UpdateBackendBucketRequest( + backend_bucket="backend_bucket_value", + project="project_value", + ) + + # Make the request + response = client.update(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_BackendBuckets_Update_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_backend_services_add_signed_url_key_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_backend_services_add_signed_url_key_sync.py new file mode 100644 index 000000000..50f5c2377 --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_backend_services_add_signed_url_key_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for AddSignedUrlKey +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_BackendServices_AddSignedUrlKey_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_add_signed_url_key(): + # Create a client + client = compute_v1.BackendServicesClient() + + # Initialize request argument(s) + request = compute_v1.AddSignedUrlKeyBackendServiceRequest( + backend_service="backend_service_value", + project="project_value", + ) + + # Make the request + response = client.add_signed_url_key(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_BackendServices_AddSignedUrlKey_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_backend_services_aggregated_list_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_backend_services_aggregated_list_sync.py new file mode 100644 index 000000000..19d0ad120 --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_backend_services_aggregated_list_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for AggregatedList +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_BackendServices_AggregatedList_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_aggregated_list(): + # Create a client + client = compute_v1.BackendServicesClient() + + # Initialize request argument(s) + request = compute_v1.AggregatedListBackendServicesRequest( + project="project_value", + ) + + # Make the request + page_result = client.aggregated_list(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END compute_v1_generated_BackendServices_AggregatedList_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_backend_services_delete_signed_url_key_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_backend_services_delete_signed_url_key_sync.py new file mode 100644 index 000000000..06719b6f7 --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_backend_services_delete_signed_url_key_sync.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteSignedUrlKey +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_BackendServices_DeleteSignedUrlKey_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_delete_signed_url_key(): + # Create a client + client = compute_v1.BackendServicesClient() + + # Initialize request argument(s) + request = compute_v1.DeleteSignedUrlKeyBackendServiceRequest( + backend_service="backend_service_value", + key_name="key_name_value", + project="project_value", + ) + + # Make the request + response = client.delete_signed_url_key(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_BackendServices_DeleteSignedUrlKey_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_backend_services_delete_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_backend_services_delete_sync.py new file mode 100644 index 000000000..73e80fecd --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_backend_services_delete_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for Delete +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_BackendServices_Delete_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_delete(): + # Create a client + client = compute_v1.BackendServicesClient() + + # Initialize request argument(s) + request = compute_v1.DeleteBackendServiceRequest( + backend_service="backend_service_value", + project="project_value", + ) + + # Make the request + response = client.delete(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_BackendServices_Delete_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_backend_services_get_health_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_backend_services_get_health_sync.py new file mode 100644 index 000000000..c1a54fd34 --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_backend_services_get_health_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetHealth +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_BackendServices_GetHealth_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_get_health(): + # Create a client + client = compute_v1.BackendServicesClient() + + # Initialize request argument(s) + request = compute_v1.GetHealthBackendServiceRequest( + backend_service="backend_service_value", + project="project_value", + ) + + # Make the request + response = client.get_health(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_BackendServices_GetHealth_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_backend_services_get_iam_policy_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_backend_services_get_iam_policy_sync.py new file mode 100644 index 000000000..48656ab09 --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_backend_services_get_iam_policy_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetIamPolicy +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_BackendServices_GetIamPolicy_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_get_iam_policy(): + # Create a client + client = compute_v1.BackendServicesClient() + + # Initialize request argument(s) + request = compute_v1.GetIamPolicyBackendServiceRequest( + project="project_value", + resource="resource_value", + ) + + # Make the request + response = client.get_iam_policy(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_BackendServices_GetIamPolicy_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_backend_services_get_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_backend_services_get_sync.py new file mode 100644 index 000000000..8dba27c3d --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_backend_services_get_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for Get +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_BackendServices_Get_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_get(): + # Create a client + client = compute_v1.BackendServicesClient() + + # Initialize request argument(s) + request = compute_v1.GetBackendServiceRequest( + backend_service="backend_service_value", + project="project_value", + ) + + # Make the request + response = client.get(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_BackendServices_Get_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_backend_services_insert_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_backend_services_insert_sync.py new file mode 100644 index 000000000..1000c32d9 --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_backend_services_insert_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for Insert +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_BackendServices_Insert_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_insert(): + # Create a client + client = compute_v1.BackendServicesClient() + + # Initialize request argument(s) + request = compute_v1.InsertBackendServiceRequest( + project="project_value", + ) + + # Make the request + response = client.insert(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_BackendServices_Insert_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_backend_services_list_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_backend_services_list_sync.py new file mode 100644 index 000000000..0ff7c34f2 --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_backend_services_list_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for List +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_BackendServices_List_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_list(): + # Create a client + client = compute_v1.BackendServicesClient() + + # Initialize request argument(s) + request = compute_v1.ListBackendServicesRequest( + project="project_value", + ) + + # Make the request + page_result = client.list(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END compute_v1_generated_BackendServices_List_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_backend_services_patch_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_backend_services_patch_sync.py new file mode 100644 index 000000000..d14bc2248 --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_backend_services_patch_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for Patch +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_BackendServices_Patch_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_patch(): + # Create a client + client = compute_v1.BackendServicesClient() + + # Initialize request argument(s) + request = compute_v1.PatchBackendServiceRequest( + backend_service="backend_service_value", + project="project_value", + ) + + # Make the request + response = client.patch(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_BackendServices_Patch_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_backend_services_set_edge_security_policy_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_backend_services_set_edge_security_policy_sync.py new file mode 100644 index 000000000..ebff51e8c --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_backend_services_set_edge_security_policy_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for SetEdgeSecurityPolicy +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_BackendServices_SetEdgeSecurityPolicy_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_set_edge_security_policy(): + # Create a client + client = compute_v1.BackendServicesClient() + + # Initialize request argument(s) + request = compute_v1.SetEdgeSecurityPolicyBackendServiceRequest( + backend_service="backend_service_value", + project="project_value", + ) + + # Make the request + response = client.set_edge_security_policy(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_BackendServices_SetEdgeSecurityPolicy_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_backend_services_set_iam_policy_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_backend_services_set_iam_policy_sync.py new file mode 100644 index 000000000..ef93f8b6f --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_backend_services_set_iam_policy_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for SetIamPolicy +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_BackendServices_SetIamPolicy_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_set_iam_policy(): + # Create a client + client = compute_v1.BackendServicesClient() + + # Initialize request argument(s) + request = compute_v1.SetIamPolicyBackendServiceRequest( + project="project_value", + resource="resource_value", + ) + + # Make the request + response = client.set_iam_policy(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_BackendServices_SetIamPolicy_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_backend_services_set_security_policy_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_backend_services_set_security_policy_sync.py new file mode 100644 index 000000000..442ca564d --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_backend_services_set_security_policy_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for SetSecurityPolicy +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_BackendServices_SetSecurityPolicy_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_set_security_policy(): + # Create a client + client = compute_v1.BackendServicesClient() + + # Initialize request argument(s) + request = compute_v1.SetSecurityPolicyBackendServiceRequest( + backend_service="backend_service_value", + project="project_value", + ) + + # Make the request + response = client.set_security_policy(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_BackendServices_SetSecurityPolicy_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_backend_services_update_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_backend_services_update_sync.py new file mode 100644 index 000000000..0d13b13cd --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_backend_services_update_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for Update +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_BackendServices_Update_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_update(): + # Create a client + client = compute_v1.BackendServicesClient() + + # Initialize request argument(s) + request = compute_v1.UpdateBackendServiceRequest( + backend_service="backend_service_value", + project="project_value", + ) + + # Make the request + response = client.update(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_BackendServices_Update_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_disk_types_aggregated_list_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_disk_types_aggregated_list_sync.py new file mode 100644 index 000000000..b50be6113 --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_disk_types_aggregated_list_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for AggregatedList +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_DiskTypes_AggregatedList_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_aggregated_list(): + # Create a client + client = compute_v1.DiskTypesClient() + + # Initialize request argument(s) + request = compute_v1.AggregatedListDiskTypesRequest( + project="project_value", + ) + + # Make the request + page_result = client.aggregated_list(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END compute_v1_generated_DiskTypes_AggregatedList_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_disk_types_get_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_disk_types_get_sync.py new file mode 100644 index 000000000..4026ba875 --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_disk_types_get_sync.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for Get +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_DiskTypes_Get_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_get(): + # Create a client + client = compute_v1.DiskTypesClient() + + # Initialize request argument(s) + request = compute_v1.GetDiskTypeRequest( + disk_type="disk_type_value", + project="project_value", + zone="zone_value", + ) + + # Make the request + response = client.get(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_DiskTypes_Get_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_disk_types_list_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_disk_types_list_sync.py new file mode 100644 index 000000000..c95d554a8 --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_disk_types_list_sync.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for List +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_DiskTypes_List_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_list(): + # Create a client + client = compute_v1.DiskTypesClient() + + # Initialize request argument(s) + request = compute_v1.ListDiskTypesRequest( + project="project_value", + zone="zone_value", + ) + + # Make the request + page_result = client.list(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END compute_v1_generated_DiskTypes_List_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_disks_add_resource_policies_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_disks_add_resource_policies_sync.py new file mode 100644 index 000000000..9158e288d --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_disks_add_resource_policies_sync.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for AddResourcePolicies +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_Disks_AddResourcePolicies_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_add_resource_policies(): + # Create a client + client = compute_v1.DisksClient() + + # Initialize request argument(s) + request = compute_v1.AddResourcePoliciesDiskRequest( + disk="disk_value", + project="project_value", + zone="zone_value", + ) + + # Make the request + response = client.add_resource_policies(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_Disks_AddResourcePolicies_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_disks_aggregated_list_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_disks_aggregated_list_sync.py new file mode 100644 index 000000000..841eadd6a --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_disks_aggregated_list_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for AggregatedList +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_Disks_AggregatedList_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_aggregated_list(): + # Create a client + client = compute_v1.DisksClient() + + # Initialize request argument(s) + request = compute_v1.AggregatedListDisksRequest( + project="project_value", + ) + + # Make the request + page_result = client.aggregated_list(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END compute_v1_generated_Disks_AggregatedList_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_disks_bulk_insert_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_disks_bulk_insert_sync.py new file mode 100644 index 000000000..148e37bd6 --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_disks_bulk_insert_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for BulkInsert +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_Disks_BulkInsert_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_bulk_insert(): + # Create a client + client = compute_v1.DisksClient() + + # Initialize request argument(s) + request = compute_v1.BulkInsertDiskRequest( + project="project_value", + zone="zone_value", + ) + + # Make the request + response = client.bulk_insert(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_Disks_BulkInsert_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_disks_create_snapshot_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_disks_create_snapshot_sync.py new file mode 100644 index 000000000..b1a159706 --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_disks_create_snapshot_sync.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateSnapshot +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_Disks_CreateSnapshot_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_create_snapshot(): + # Create a client + client = compute_v1.DisksClient() + + # Initialize request argument(s) + request = compute_v1.CreateSnapshotDiskRequest( + disk="disk_value", + project="project_value", + zone="zone_value", + ) + + # Make the request + response = client.create_snapshot(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_Disks_CreateSnapshot_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_disks_delete_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_disks_delete_sync.py new file mode 100644 index 000000000..fb29f3c9c --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_disks_delete_sync.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for Delete +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_Disks_Delete_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_delete(): + # Create a client + client = compute_v1.DisksClient() + + # Initialize request argument(s) + request = compute_v1.DeleteDiskRequest( + disk="disk_value", + project="project_value", + zone="zone_value", + ) + + # Make the request + response = client.delete(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_Disks_Delete_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_disks_get_iam_policy_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_disks_get_iam_policy_sync.py new file mode 100644 index 000000000..7dc5173e9 --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_disks_get_iam_policy_sync.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetIamPolicy +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_Disks_GetIamPolicy_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_get_iam_policy(): + # Create a client + client = compute_v1.DisksClient() + + # Initialize request argument(s) + request = compute_v1.GetIamPolicyDiskRequest( + project="project_value", + resource="resource_value", + zone="zone_value", + ) + + # Make the request + response = client.get_iam_policy(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_Disks_GetIamPolicy_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_disks_get_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_disks_get_sync.py new file mode 100644 index 000000000..c53d3d92a --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_disks_get_sync.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for Get +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_Disks_Get_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_get(): + # Create a client + client = compute_v1.DisksClient() + + # Initialize request argument(s) + request = compute_v1.GetDiskRequest( + disk="disk_value", + project="project_value", + zone="zone_value", + ) + + # Make the request + response = client.get(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_Disks_Get_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_disks_insert_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_disks_insert_sync.py new file mode 100644 index 000000000..872874245 --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_disks_insert_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for Insert +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_Disks_Insert_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_insert(): + # Create a client + client = compute_v1.DisksClient() + + # Initialize request argument(s) + request = compute_v1.InsertDiskRequest( + project="project_value", + zone="zone_value", + ) + + # Make the request + response = client.insert(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_Disks_Insert_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_disks_list_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_disks_list_sync.py new file mode 100644 index 000000000..a9462ee16 --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_disks_list_sync.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for List +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_Disks_List_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_list(): + # Create a client + client = compute_v1.DisksClient() + + # Initialize request argument(s) + request = compute_v1.ListDisksRequest( + project="project_value", + zone="zone_value", + ) + + # Make the request + page_result = client.list(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END compute_v1_generated_Disks_List_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_disks_remove_resource_policies_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_disks_remove_resource_policies_sync.py new file mode 100644 index 000000000..d9ed27ab5 --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_disks_remove_resource_policies_sync.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for RemoveResourcePolicies +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_Disks_RemoveResourcePolicies_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_remove_resource_policies(): + # Create a client + client = compute_v1.DisksClient() + + # Initialize request argument(s) + request = compute_v1.RemoveResourcePoliciesDiskRequest( + disk="disk_value", + project="project_value", + zone="zone_value", + ) + + # Make the request + response = client.remove_resource_policies(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_Disks_RemoveResourcePolicies_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_disks_resize_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_disks_resize_sync.py new file mode 100644 index 000000000..e1c396cb3 --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_disks_resize_sync.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for Resize +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_Disks_Resize_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_resize(): + # Create a client + client = compute_v1.DisksClient() + + # Initialize request argument(s) + request = compute_v1.ResizeDiskRequest( + disk="disk_value", + project="project_value", + zone="zone_value", + ) + + # Make the request + response = client.resize(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_Disks_Resize_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_disks_set_iam_policy_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_disks_set_iam_policy_sync.py new file mode 100644 index 000000000..d3b53e7fd --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_disks_set_iam_policy_sync.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for SetIamPolicy +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_Disks_SetIamPolicy_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_set_iam_policy(): + # Create a client + client = compute_v1.DisksClient() + + # Initialize request argument(s) + request = compute_v1.SetIamPolicyDiskRequest( + project="project_value", + resource="resource_value", + zone="zone_value", + ) + + # Make the request + response = client.set_iam_policy(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_Disks_SetIamPolicy_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_disks_set_labels_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_disks_set_labels_sync.py new file mode 100644 index 000000000..a15681f62 --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_disks_set_labels_sync.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for SetLabels +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_Disks_SetLabels_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_set_labels(): + # Create a client + client = compute_v1.DisksClient() + + # Initialize request argument(s) + request = compute_v1.SetLabelsDiskRequest( + project="project_value", + resource="resource_value", + zone="zone_value", + ) + + # Make the request + response = client.set_labels(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_Disks_SetLabels_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_disks_start_async_replication_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_disks_start_async_replication_sync.py new file mode 100644 index 000000000..8eca33b10 --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_disks_start_async_replication_sync.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for StartAsyncReplication +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_Disks_StartAsyncReplication_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_start_async_replication(): + # Create a client + client = compute_v1.DisksClient() + + # Initialize request argument(s) + request = compute_v1.StartAsyncReplicationDiskRequest( + disk="disk_value", + project="project_value", + zone="zone_value", + ) + + # Make the request + response = client.start_async_replication(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_Disks_StartAsyncReplication_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_disks_stop_async_replication_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_disks_stop_async_replication_sync.py new file mode 100644 index 000000000..522588d99 --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_disks_stop_async_replication_sync.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for StopAsyncReplication +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_Disks_StopAsyncReplication_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_stop_async_replication(): + # Create a client + client = compute_v1.DisksClient() + + # Initialize request argument(s) + request = compute_v1.StopAsyncReplicationDiskRequest( + disk="disk_value", + project="project_value", + zone="zone_value", + ) + + # Make the request + response = client.stop_async_replication(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_Disks_StopAsyncReplication_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_disks_stop_group_async_replication_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_disks_stop_group_async_replication_sync.py new file mode 100644 index 000000000..1131a21fd --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_disks_stop_group_async_replication_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for StopGroupAsyncReplication +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_Disks_StopGroupAsyncReplication_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_stop_group_async_replication(): + # Create a client + client = compute_v1.DisksClient() + + # Initialize request argument(s) + request = compute_v1.StopGroupAsyncReplicationDiskRequest( + project="project_value", + zone="zone_value", + ) + + # Make the request + response = client.stop_group_async_replication(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_Disks_StopGroupAsyncReplication_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_disks_test_iam_permissions_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_disks_test_iam_permissions_sync.py new file mode 100644 index 000000000..44926c80d --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_disks_test_iam_permissions_sync.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for TestIamPermissions +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_Disks_TestIamPermissions_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_test_iam_permissions(): + # Create a client + client = compute_v1.DisksClient() + + # Initialize request argument(s) + request = compute_v1.TestIamPermissionsDiskRequest( + project="project_value", + resource="resource_value", + zone="zone_value", + ) + + # Make the request + response = client.test_iam_permissions(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_Disks_TestIamPermissions_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_disks_update_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_disks_update_sync.py new file mode 100644 index 000000000..a79ce530e --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_disks_update_sync.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for Update +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_Disks_Update_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_update(): + # Create a client + client = compute_v1.DisksClient() + + # Initialize request argument(s) + request = compute_v1.UpdateDiskRequest( + disk="disk_value", + project="project_value", + zone="zone_value", + ) + + # Make the request + response = client.update(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_Disks_Update_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_external_vpn_gateways_delete_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_external_vpn_gateways_delete_sync.py new file mode 100644 index 000000000..a1ac52452 --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_external_vpn_gateways_delete_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for Delete +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_ExternalVpnGateways_Delete_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_delete(): + # Create a client + client = compute_v1.ExternalVpnGatewaysClient() + + # Initialize request argument(s) + request = compute_v1.DeleteExternalVpnGatewayRequest( + external_vpn_gateway="external_vpn_gateway_value", + project="project_value", + ) + + # Make the request + response = client.delete(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_ExternalVpnGateways_Delete_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_external_vpn_gateways_get_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_external_vpn_gateways_get_sync.py new file mode 100644 index 000000000..5604ca609 --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_external_vpn_gateways_get_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for Get +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_ExternalVpnGateways_Get_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_get(): + # Create a client + client = compute_v1.ExternalVpnGatewaysClient() + + # Initialize request argument(s) + request = compute_v1.GetExternalVpnGatewayRequest( + external_vpn_gateway="external_vpn_gateway_value", + project="project_value", + ) + + # Make the request + response = client.get(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_ExternalVpnGateways_Get_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_external_vpn_gateways_insert_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_external_vpn_gateways_insert_sync.py new file mode 100644 index 000000000..4999ca10f --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_external_vpn_gateways_insert_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for Insert +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_ExternalVpnGateways_Insert_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_insert(): + # Create a client + client = compute_v1.ExternalVpnGatewaysClient() + + # Initialize request argument(s) + request = compute_v1.InsertExternalVpnGatewayRequest( + project="project_value", + ) + + # Make the request + response = client.insert(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_ExternalVpnGateways_Insert_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_external_vpn_gateways_list_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_external_vpn_gateways_list_sync.py new file mode 100644 index 000000000..63db110e9 --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_external_vpn_gateways_list_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for List +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_ExternalVpnGateways_List_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_list(): + # Create a client + client = compute_v1.ExternalVpnGatewaysClient() + + # Initialize request argument(s) + request = compute_v1.ListExternalVpnGatewaysRequest( + project="project_value", + ) + + # Make the request + page_result = client.list(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END compute_v1_generated_ExternalVpnGateways_List_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_external_vpn_gateways_set_labels_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_external_vpn_gateways_set_labels_sync.py new file mode 100644 index 000000000..b56c24fb2 --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_external_vpn_gateways_set_labels_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for SetLabels +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_ExternalVpnGateways_SetLabels_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_set_labels(): + # Create a client + client = compute_v1.ExternalVpnGatewaysClient() + + # Initialize request argument(s) + request = compute_v1.SetLabelsExternalVpnGatewayRequest( + project="project_value", + resource="resource_value", + ) + + # Make the request + response = client.set_labels(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_ExternalVpnGateways_SetLabels_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_external_vpn_gateways_test_iam_permissions_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_external_vpn_gateways_test_iam_permissions_sync.py new file mode 100644 index 000000000..cb65f1638 --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_external_vpn_gateways_test_iam_permissions_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for TestIamPermissions +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_ExternalVpnGateways_TestIamPermissions_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_test_iam_permissions(): + # Create a client + client = compute_v1.ExternalVpnGatewaysClient() + + # Initialize request argument(s) + request = compute_v1.TestIamPermissionsExternalVpnGatewayRequest( + project="project_value", + resource="resource_value", + ) + + # Make the request + response = client.test_iam_permissions(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_ExternalVpnGateways_TestIamPermissions_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_firewall_policies_add_association_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_firewall_policies_add_association_sync.py new file mode 100644 index 000000000..b2f1d9053 --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_firewall_policies_add_association_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for AddAssociation +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_FirewallPolicies_AddAssociation_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_add_association(): + # Create a client + client = compute_v1.FirewallPoliciesClient() + + # Initialize request argument(s) + request = compute_v1.AddAssociationFirewallPolicyRequest( + firewall_policy="firewall_policy_value", + ) + + # Make the request + response = client.add_association(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_FirewallPolicies_AddAssociation_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_firewall_policies_add_rule_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_firewall_policies_add_rule_sync.py new file mode 100644 index 000000000..98d538c21 --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_firewall_policies_add_rule_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for AddRule +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_FirewallPolicies_AddRule_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_add_rule(): + # Create a client + client = compute_v1.FirewallPoliciesClient() + + # Initialize request argument(s) + request = compute_v1.AddRuleFirewallPolicyRequest( + firewall_policy="firewall_policy_value", + ) + + # Make the request + response = client.add_rule(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_FirewallPolicies_AddRule_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_firewall_policies_clone_rules_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_firewall_policies_clone_rules_sync.py new file mode 100644 index 000000000..0081a5fcd --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_firewall_policies_clone_rules_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CloneRules +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_FirewallPolicies_CloneRules_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_clone_rules(): + # Create a client + client = compute_v1.FirewallPoliciesClient() + + # Initialize request argument(s) + request = compute_v1.CloneRulesFirewallPolicyRequest( + firewall_policy="firewall_policy_value", + ) + + # Make the request + response = client.clone_rules(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_FirewallPolicies_CloneRules_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_firewall_policies_delete_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_firewall_policies_delete_sync.py new file mode 100644 index 000000000..fb088723a --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_firewall_policies_delete_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for Delete +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_FirewallPolicies_Delete_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_delete(): + # Create a client + client = compute_v1.FirewallPoliciesClient() + + # Initialize request argument(s) + request = compute_v1.DeleteFirewallPolicyRequest( + firewall_policy="firewall_policy_value", + ) + + # Make the request + response = client.delete(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_FirewallPolicies_Delete_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_firewall_policies_get_association_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_firewall_policies_get_association_sync.py new file mode 100644 index 000000000..50b12c383 --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_firewall_policies_get_association_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetAssociation +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_FirewallPolicies_GetAssociation_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_get_association(): + # Create a client + client = compute_v1.FirewallPoliciesClient() + + # Initialize request argument(s) + request = compute_v1.GetAssociationFirewallPolicyRequest( + firewall_policy="firewall_policy_value", + ) + + # Make the request + response = client.get_association(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_FirewallPolicies_GetAssociation_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_firewall_policies_get_iam_policy_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_firewall_policies_get_iam_policy_sync.py new file mode 100644 index 000000000..2bbd42422 --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_firewall_policies_get_iam_policy_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetIamPolicy +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_FirewallPolicies_GetIamPolicy_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_get_iam_policy(): + # Create a client + client = compute_v1.FirewallPoliciesClient() + + # Initialize request argument(s) + request = compute_v1.GetIamPolicyFirewallPolicyRequest( + resource="resource_value", + ) + + # Make the request + response = client.get_iam_policy(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_FirewallPolicies_GetIamPolicy_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_firewall_policies_get_rule_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_firewall_policies_get_rule_sync.py new file mode 100644 index 000000000..9dc7cfd42 --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_firewall_policies_get_rule_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetRule +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_FirewallPolicies_GetRule_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_get_rule(): + # Create a client + client = compute_v1.FirewallPoliciesClient() + + # Initialize request argument(s) + request = compute_v1.GetRuleFirewallPolicyRequest( + firewall_policy="firewall_policy_value", + ) + + # Make the request + response = client.get_rule(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_FirewallPolicies_GetRule_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_firewall_policies_get_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_firewall_policies_get_sync.py new file mode 100644 index 000000000..392f1620c --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_firewall_policies_get_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for Get +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_FirewallPolicies_Get_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_get(): + # Create a client + client = compute_v1.FirewallPoliciesClient() + + # Initialize request argument(s) + request = compute_v1.GetFirewallPolicyRequest( + firewall_policy="firewall_policy_value", + ) + + # Make the request + response = client.get(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_FirewallPolicies_Get_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_firewall_policies_insert_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_firewall_policies_insert_sync.py new file mode 100644 index 000000000..f43f92e5c --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_firewall_policies_insert_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for Insert +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_FirewallPolicies_Insert_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_insert(): + # Create a client + client = compute_v1.FirewallPoliciesClient() + + # Initialize request argument(s) + request = compute_v1.InsertFirewallPolicyRequest( + parent_id="parent_id_value", + ) + + # Make the request + response = client.insert(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_FirewallPolicies_Insert_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_firewall_policies_list_associations_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_firewall_policies_list_associations_sync.py new file mode 100644 index 000000000..051e8ad27 --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_firewall_policies_list_associations_sync.py @@ -0,0 +1,51 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListAssociations +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_FirewallPolicies_ListAssociations_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_list_associations(): + # Create a client + client = compute_v1.FirewallPoliciesClient() + + # Initialize request argument(s) + request = compute_v1.ListAssociationsFirewallPolicyRequest( + ) + + # Make the request + response = client.list_associations(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_FirewallPolicies_ListAssociations_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_firewall_policies_list_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_firewall_policies_list_sync.py new file mode 100644 index 000000000..51a2e6d66 --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_firewall_policies_list_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for List +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_FirewallPolicies_List_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_list(): + # Create a client + client = compute_v1.FirewallPoliciesClient() + + # Initialize request argument(s) + request = compute_v1.ListFirewallPoliciesRequest( + ) + + # Make the request + page_result = client.list(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END compute_v1_generated_FirewallPolicies_List_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_firewall_policies_move_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_firewall_policies_move_sync.py new file mode 100644 index 000000000..e6f05ce6d --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_firewall_policies_move_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for Move +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_FirewallPolicies_Move_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_move(): + # Create a client + client = compute_v1.FirewallPoliciesClient() + + # Initialize request argument(s) + request = compute_v1.MoveFirewallPolicyRequest( + firewall_policy="firewall_policy_value", + parent_id="parent_id_value", + ) + + # Make the request + response = client.move(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_FirewallPolicies_Move_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_firewall_policies_patch_rule_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_firewall_policies_patch_rule_sync.py new file mode 100644 index 000000000..2ab51f508 --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_firewall_policies_patch_rule_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for PatchRule +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_FirewallPolicies_PatchRule_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_patch_rule(): + # Create a client + client = compute_v1.FirewallPoliciesClient() + + # Initialize request argument(s) + request = compute_v1.PatchRuleFirewallPolicyRequest( + firewall_policy="firewall_policy_value", + ) + + # Make the request + response = client.patch_rule(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_FirewallPolicies_PatchRule_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_firewall_policies_patch_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_firewall_policies_patch_sync.py new file mode 100644 index 000000000..748a9a291 --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_firewall_policies_patch_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for Patch +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_FirewallPolicies_Patch_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_patch(): + # Create a client + client = compute_v1.FirewallPoliciesClient() + + # Initialize request argument(s) + request = compute_v1.PatchFirewallPolicyRequest( + firewall_policy="firewall_policy_value", + ) + + # Make the request + response = client.patch(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_FirewallPolicies_Patch_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_firewall_policies_remove_association_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_firewall_policies_remove_association_sync.py new file mode 100644 index 000000000..8edac0d5c --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_firewall_policies_remove_association_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for RemoveAssociation +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_FirewallPolicies_RemoveAssociation_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_remove_association(): + # Create a client + client = compute_v1.FirewallPoliciesClient() + + # Initialize request argument(s) + request = compute_v1.RemoveAssociationFirewallPolicyRequest( + firewall_policy="firewall_policy_value", + ) + + # Make the request + response = client.remove_association(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_FirewallPolicies_RemoveAssociation_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_firewall_policies_remove_rule_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_firewall_policies_remove_rule_sync.py new file mode 100644 index 000000000..5cdef9bb1 --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_firewall_policies_remove_rule_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for RemoveRule +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_FirewallPolicies_RemoveRule_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_remove_rule(): + # Create a client + client = compute_v1.FirewallPoliciesClient() + + # Initialize request argument(s) + request = compute_v1.RemoveRuleFirewallPolicyRequest( + firewall_policy="firewall_policy_value", + ) + + # Make the request + response = client.remove_rule(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_FirewallPolicies_RemoveRule_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_firewall_policies_set_iam_policy_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_firewall_policies_set_iam_policy_sync.py new file mode 100644 index 000000000..f93644b54 --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_firewall_policies_set_iam_policy_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for SetIamPolicy +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_FirewallPolicies_SetIamPolicy_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_set_iam_policy(): + # Create a client + client = compute_v1.FirewallPoliciesClient() + + # Initialize request argument(s) + request = compute_v1.SetIamPolicyFirewallPolicyRequest( + resource="resource_value", + ) + + # Make the request + response = client.set_iam_policy(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_FirewallPolicies_SetIamPolicy_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_firewall_policies_test_iam_permissions_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_firewall_policies_test_iam_permissions_sync.py new file mode 100644 index 000000000..d80c9abc6 --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_firewall_policies_test_iam_permissions_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for TestIamPermissions +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_FirewallPolicies_TestIamPermissions_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_test_iam_permissions(): + # Create a client + client = compute_v1.FirewallPoliciesClient() + + # Initialize request argument(s) + request = compute_v1.TestIamPermissionsFirewallPolicyRequest( + resource="resource_value", + ) + + # Make the request + response = client.test_iam_permissions(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_FirewallPolicies_TestIamPermissions_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_firewalls_delete_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_firewalls_delete_sync.py new file mode 100644 index 000000000..f601fa8d1 --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_firewalls_delete_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for Delete +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_Firewalls_Delete_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_delete(): + # Create a client + client = compute_v1.FirewallsClient() + + # Initialize request argument(s) + request = compute_v1.DeleteFirewallRequest( + firewall="firewall_value", + project="project_value", + ) + + # Make the request + response = client.delete(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_Firewalls_Delete_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_firewalls_get_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_firewalls_get_sync.py new file mode 100644 index 000000000..6f086344c --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_firewalls_get_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for Get +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_Firewalls_Get_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_get(): + # Create a client + client = compute_v1.FirewallsClient() + + # Initialize request argument(s) + request = compute_v1.GetFirewallRequest( + firewall="firewall_value", + project="project_value", + ) + + # Make the request + response = client.get(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_Firewalls_Get_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_firewalls_insert_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_firewalls_insert_sync.py new file mode 100644 index 000000000..82fed6f01 --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_firewalls_insert_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for Insert +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_Firewalls_Insert_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_insert(): + # Create a client + client = compute_v1.FirewallsClient() + + # Initialize request argument(s) + request = compute_v1.InsertFirewallRequest( + project="project_value", + ) + + # Make the request + response = client.insert(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_Firewalls_Insert_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_firewalls_list_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_firewalls_list_sync.py new file mode 100644 index 000000000..a75395e00 --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_firewalls_list_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for List +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_Firewalls_List_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_list(): + # Create a client + client = compute_v1.FirewallsClient() + + # Initialize request argument(s) + request = compute_v1.ListFirewallsRequest( + project="project_value", + ) + + # Make the request + page_result = client.list(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END compute_v1_generated_Firewalls_List_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_firewalls_patch_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_firewalls_patch_sync.py new file mode 100644 index 000000000..1b8b0a4f4 --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_firewalls_patch_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for Patch +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_Firewalls_Patch_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_patch(): + # Create a client + client = compute_v1.FirewallsClient() + + # Initialize request argument(s) + request = compute_v1.PatchFirewallRequest( + firewall="firewall_value", + project="project_value", + ) + + # Make the request + response = client.patch(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_Firewalls_Patch_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_firewalls_update_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_firewalls_update_sync.py new file mode 100644 index 000000000..f46503776 --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_firewalls_update_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for Update +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_Firewalls_Update_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_update(): + # Create a client + client = compute_v1.FirewallsClient() + + # Initialize request argument(s) + request = compute_v1.UpdateFirewallRequest( + firewall="firewall_value", + project="project_value", + ) + + # Make the request + response = client.update(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_Firewalls_Update_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_forwarding_rules_aggregated_list_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_forwarding_rules_aggregated_list_sync.py new file mode 100644 index 000000000..b09bdb1b5 --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_forwarding_rules_aggregated_list_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for AggregatedList +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_ForwardingRules_AggregatedList_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_aggregated_list(): + # Create a client + client = compute_v1.ForwardingRulesClient() + + # Initialize request argument(s) + request = compute_v1.AggregatedListForwardingRulesRequest( + project="project_value", + ) + + # Make the request + page_result = client.aggregated_list(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END compute_v1_generated_ForwardingRules_AggregatedList_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_forwarding_rules_delete_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_forwarding_rules_delete_sync.py new file mode 100644 index 000000000..4b6c998ae --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_forwarding_rules_delete_sync.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for Delete +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_ForwardingRules_Delete_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_delete(): + # Create a client + client = compute_v1.ForwardingRulesClient() + + # Initialize request argument(s) + request = compute_v1.DeleteForwardingRuleRequest( + forwarding_rule="forwarding_rule_value", + project="project_value", + region="region_value", + ) + + # Make the request + response = client.delete(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_ForwardingRules_Delete_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_forwarding_rules_get_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_forwarding_rules_get_sync.py new file mode 100644 index 000000000..d44e5bcee --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_forwarding_rules_get_sync.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for Get +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_ForwardingRules_Get_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_get(): + # Create a client + client = compute_v1.ForwardingRulesClient() + + # Initialize request argument(s) + request = compute_v1.GetForwardingRuleRequest( + forwarding_rule="forwarding_rule_value", + project="project_value", + region="region_value", + ) + + # Make the request + response = client.get(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_ForwardingRules_Get_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_forwarding_rules_insert_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_forwarding_rules_insert_sync.py new file mode 100644 index 000000000..849b8004f --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_forwarding_rules_insert_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for Insert +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_ForwardingRules_Insert_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_insert(): + # Create a client + client = compute_v1.ForwardingRulesClient() + + # Initialize request argument(s) + request = compute_v1.InsertForwardingRuleRequest( + project="project_value", + region="region_value", + ) + + # Make the request + response = client.insert(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_ForwardingRules_Insert_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_forwarding_rules_list_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_forwarding_rules_list_sync.py new file mode 100644 index 000000000..56f18b9e3 --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_forwarding_rules_list_sync.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for List +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_ForwardingRules_List_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_list(): + # Create a client + client = compute_v1.ForwardingRulesClient() + + # Initialize request argument(s) + request = compute_v1.ListForwardingRulesRequest( + project="project_value", + region="region_value", + ) + + # Make the request + page_result = client.list(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END compute_v1_generated_ForwardingRules_List_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_forwarding_rules_patch_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_forwarding_rules_patch_sync.py new file mode 100644 index 000000000..99f5a97a7 --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_forwarding_rules_patch_sync.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for Patch +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_ForwardingRules_Patch_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_patch(): + # Create a client + client = compute_v1.ForwardingRulesClient() + + # Initialize request argument(s) + request = compute_v1.PatchForwardingRuleRequest( + forwarding_rule="forwarding_rule_value", + project="project_value", + region="region_value", + ) + + # Make the request + response = client.patch(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_ForwardingRules_Patch_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_forwarding_rules_set_labels_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_forwarding_rules_set_labels_sync.py new file mode 100644 index 000000000..ec7277483 --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_forwarding_rules_set_labels_sync.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for SetLabels +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_ForwardingRules_SetLabels_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_set_labels(): + # Create a client + client = compute_v1.ForwardingRulesClient() + + # Initialize request argument(s) + request = compute_v1.SetLabelsForwardingRuleRequest( + project="project_value", + region="region_value", + resource="resource_value", + ) + + # Make the request + response = client.set_labels(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_ForwardingRules_SetLabels_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_forwarding_rules_set_target_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_forwarding_rules_set_target_sync.py new file mode 100644 index 000000000..53cfb21ed --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_forwarding_rules_set_target_sync.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for SetTarget +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_ForwardingRules_SetTarget_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_set_target(): + # Create a client + client = compute_v1.ForwardingRulesClient() + + # Initialize request argument(s) + request = compute_v1.SetTargetForwardingRuleRequest( + forwarding_rule="forwarding_rule_value", + project="project_value", + region="region_value", + ) + + # Make the request + response = client.set_target(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_ForwardingRules_SetTarget_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_global_addresses_delete_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_global_addresses_delete_sync.py new file mode 100644 index 000000000..1998fb83a --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_global_addresses_delete_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for Delete +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_GlobalAddresses_Delete_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_delete(): + # Create a client + client = compute_v1.GlobalAddressesClient() + + # Initialize request argument(s) + request = compute_v1.DeleteGlobalAddressRequest( + address="address_value", + project="project_value", + ) + + # Make the request + response = client.delete(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_GlobalAddresses_Delete_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_global_addresses_get_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_global_addresses_get_sync.py new file mode 100644 index 000000000..7cabf6edb --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_global_addresses_get_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for Get +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_GlobalAddresses_Get_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_get(): + # Create a client + client = compute_v1.GlobalAddressesClient() + + # Initialize request argument(s) + request = compute_v1.GetGlobalAddressRequest( + address="address_value", + project="project_value", + ) + + # Make the request + response = client.get(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_GlobalAddresses_Get_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_global_addresses_insert_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_global_addresses_insert_sync.py new file mode 100644 index 000000000..9aeb19aad --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_global_addresses_insert_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for Insert +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_GlobalAddresses_Insert_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_insert(): + # Create a client + client = compute_v1.GlobalAddressesClient() + + # Initialize request argument(s) + request = compute_v1.InsertGlobalAddressRequest( + project="project_value", + ) + + # Make the request + response = client.insert(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_GlobalAddresses_Insert_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_global_addresses_list_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_global_addresses_list_sync.py new file mode 100644 index 000000000..2f28e9a3e --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_global_addresses_list_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for List +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_GlobalAddresses_List_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_list(): + # Create a client + client = compute_v1.GlobalAddressesClient() + + # Initialize request argument(s) + request = compute_v1.ListGlobalAddressesRequest( + project="project_value", + ) + + # Make the request + page_result = client.list(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END compute_v1_generated_GlobalAddresses_List_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_global_addresses_move_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_global_addresses_move_sync.py new file mode 100644 index 000000000..7b273a4ff --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_global_addresses_move_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for Move +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_GlobalAddresses_Move_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_move(): + # Create a client + client = compute_v1.GlobalAddressesClient() + + # Initialize request argument(s) + request = compute_v1.MoveGlobalAddressRequest( + address="address_value", + project="project_value", + ) + + # Make the request + response = client.move(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_GlobalAddresses_Move_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_global_addresses_set_labels_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_global_addresses_set_labels_sync.py new file mode 100644 index 000000000..7d85a23ed --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_global_addresses_set_labels_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for SetLabels +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_GlobalAddresses_SetLabels_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_set_labels(): + # Create a client + client = compute_v1.GlobalAddressesClient() + + # Initialize request argument(s) + request = compute_v1.SetLabelsGlobalAddressRequest( + project="project_value", + resource="resource_value", + ) + + # Make the request + response = client.set_labels(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_GlobalAddresses_SetLabels_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_global_forwarding_rules_delete_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_global_forwarding_rules_delete_sync.py new file mode 100644 index 000000000..705a1a4ee --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_global_forwarding_rules_delete_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for Delete +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_GlobalForwardingRules_Delete_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_delete(): + # Create a client + client = compute_v1.GlobalForwardingRulesClient() + + # Initialize request argument(s) + request = compute_v1.DeleteGlobalForwardingRuleRequest( + forwarding_rule="forwarding_rule_value", + project="project_value", + ) + + # Make the request + response = client.delete(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_GlobalForwardingRules_Delete_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_global_forwarding_rules_get_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_global_forwarding_rules_get_sync.py new file mode 100644 index 000000000..67d5ab41e --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_global_forwarding_rules_get_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for Get +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_GlobalForwardingRules_Get_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_get(): + # Create a client + client = compute_v1.GlobalForwardingRulesClient() + + # Initialize request argument(s) + request = compute_v1.GetGlobalForwardingRuleRequest( + forwarding_rule="forwarding_rule_value", + project="project_value", + ) + + # Make the request + response = client.get(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_GlobalForwardingRules_Get_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_global_forwarding_rules_insert_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_global_forwarding_rules_insert_sync.py new file mode 100644 index 000000000..dea4b3c07 --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_global_forwarding_rules_insert_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for Insert +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_GlobalForwardingRules_Insert_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_insert(): + # Create a client + client = compute_v1.GlobalForwardingRulesClient() + + # Initialize request argument(s) + request = compute_v1.InsertGlobalForwardingRuleRequest( + project="project_value", + ) + + # Make the request + response = client.insert(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_GlobalForwardingRules_Insert_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_global_forwarding_rules_list_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_global_forwarding_rules_list_sync.py new file mode 100644 index 000000000..7ac4fed21 --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_global_forwarding_rules_list_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for List +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_GlobalForwardingRules_List_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_list(): + # Create a client + client = compute_v1.GlobalForwardingRulesClient() + + # Initialize request argument(s) + request = compute_v1.ListGlobalForwardingRulesRequest( + project="project_value", + ) + + # Make the request + page_result = client.list(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END compute_v1_generated_GlobalForwardingRules_List_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_global_forwarding_rules_patch_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_global_forwarding_rules_patch_sync.py new file mode 100644 index 000000000..7772d8902 --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_global_forwarding_rules_patch_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for Patch +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_GlobalForwardingRules_Patch_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_patch(): + # Create a client + client = compute_v1.GlobalForwardingRulesClient() + + # Initialize request argument(s) + request = compute_v1.PatchGlobalForwardingRuleRequest( + forwarding_rule="forwarding_rule_value", + project="project_value", + ) + + # Make the request + response = client.patch(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_GlobalForwardingRules_Patch_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_global_forwarding_rules_set_labels_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_global_forwarding_rules_set_labels_sync.py new file mode 100644 index 000000000..2bd5a16e5 --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_global_forwarding_rules_set_labels_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for SetLabels +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_GlobalForwardingRules_SetLabels_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_set_labels(): + # Create a client + client = compute_v1.GlobalForwardingRulesClient() + + # Initialize request argument(s) + request = compute_v1.SetLabelsGlobalForwardingRuleRequest( + project="project_value", + resource="resource_value", + ) + + # Make the request + response = client.set_labels(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_GlobalForwardingRules_SetLabels_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_global_forwarding_rules_set_target_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_global_forwarding_rules_set_target_sync.py new file mode 100644 index 000000000..9af0b07fe --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_global_forwarding_rules_set_target_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for SetTarget +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_GlobalForwardingRules_SetTarget_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_set_target(): + # Create a client + client = compute_v1.GlobalForwardingRulesClient() + + # Initialize request argument(s) + request = compute_v1.SetTargetGlobalForwardingRuleRequest( + forwarding_rule="forwarding_rule_value", + project="project_value", + ) + + # Make the request + response = client.set_target(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_GlobalForwardingRules_SetTarget_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_global_network_endpoint_groups_attach_network_endpoints_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_global_network_endpoint_groups_attach_network_endpoints_sync.py new file mode 100644 index 000000000..821d28974 --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_global_network_endpoint_groups_attach_network_endpoints_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for AttachNetworkEndpoints +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_GlobalNetworkEndpointGroups_AttachNetworkEndpoints_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_attach_network_endpoints(): + # Create a client + client = compute_v1.GlobalNetworkEndpointGroupsClient() + + # Initialize request argument(s) + request = compute_v1.AttachNetworkEndpointsGlobalNetworkEndpointGroupRequest( + network_endpoint_group="network_endpoint_group_value", + project="project_value", + ) + + # Make the request + response = client.attach_network_endpoints(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_GlobalNetworkEndpointGroups_AttachNetworkEndpoints_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_global_network_endpoint_groups_delete_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_global_network_endpoint_groups_delete_sync.py new file mode 100644 index 000000000..914404ef2 --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_global_network_endpoint_groups_delete_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for Delete +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_GlobalNetworkEndpointGroups_Delete_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_delete(): + # Create a client + client = compute_v1.GlobalNetworkEndpointGroupsClient() + + # Initialize request argument(s) + request = compute_v1.DeleteGlobalNetworkEndpointGroupRequest( + network_endpoint_group="network_endpoint_group_value", + project="project_value", + ) + + # Make the request + response = client.delete(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_GlobalNetworkEndpointGroups_Delete_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_global_network_endpoint_groups_detach_network_endpoints_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_global_network_endpoint_groups_detach_network_endpoints_sync.py new file mode 100644 index 000000000..7980a7e50 --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_global_network_endpoint_groups_detach_network_endpoints_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DetachNetworkEndpoints +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_GlobalNetworkEndpointGroups_DetachNetworkEndpoints_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_detach_network_endpoints(): + # Create a client + client = compute_v1.GlobalNetworkEndpointGroupsClient() + + # Initialize request argument(s) + request = compute_v1.DetachNetworkEndpointsGlobalNetworkEndpointGroupRequest( + network_endpoint_group="network_endpoint_group_value", + project="project_value", + ) + + # Make the request + response = client.detach_network_endpoints(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_GlobalNetworkEndpointGroups_DetachNetworkEndpoints_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_global_network_endpoint_groups_get_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_global_network_endpoint_groups_get_sync.py new file mode 100644 index 000000000..6c57a722f --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_global_network_endpoint_groups_get_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for Get +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_GlobalNetworkEndpointGroups_Get_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_get(): + # Create a client + client = compute_v1.GlobalNetworkEndpointGroupsClient() + + # Initialize request argument(s) + request = compute_v1.GetGlobalNetworkEndpointGroupRequest( + network_endpoint_group="network_endpoint_group_value", + project="project_value", + ) + + # Make the request + response = client.get(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_GlobalNetworkEndpointGroups_Get_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_global_network_endpoint_groups_insert_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_global_network_endpoint_groups_insert_sync.py new file mode 100644 index 000000000..789bd150d --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_global_network_endpoint_groups_insert_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for Insert +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_GlobalNetworkEndpointGroups_Insert_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_insert(): + # Create a client + client = compute_v1.GlobalNetworkEndpointGroupsClient() + + # Initialize request argument(s) + request = compute_v1.InsertGlobalNetworkEndpointGroupRequest( + project="project_value", + ) + + # Make the request + response = client.insert(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_GlobalNetworkEndpointGroups_Insert_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_global_network_endpoint_groups_list_network_endpoints_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_global_network_endpoint_groups_list_network_endpoints_sync.py new file mode 100644 index 000000000..10d036c17 --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_global_network_endpoint_groups_list_network_endpoints_sync.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListNetworkEndpoints +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_GlobalNetworkEndpointGroups_ListNetworkEndpoints_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_list_network_endpoints(): + # Create a client + client = compute_v1.GlobalNetworkEndpointGroupsClient() + + # Initialize request argument(s) + request = compute_v1.ListNetworkEndpointsGlobalNetworkEndpointGroupsRequest( + network_endpoint_group="network_endpoint_group_value", + project="project_value", + ) + + # Make the request + page_result = client.list_network_endpoints(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END compute_v1_generated_GlobalNetworkEndpointGroups_ListNetworkEndpoints_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_global_network_endpoint_groups_list_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_global_network_endpoint_groups_list_sync.py new file mode 100644 index 000000000..3348581e4 --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_global_network_endpoint_groups_list_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for List +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_GlobalNetworkEndpointGroups_List_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_list(): + # Create a client + client = compute_v1.GlobalNetworkEndpointGroupsClient() + + # Initialize request argument(s) + request = compute_v1.ListGlobalNetworkEndpointGroupsRequest( + project="project_value", + ) + + # Make the request + page_result = client.list(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END compute_v1_generated_GlobalNetworkEndpointGroups_List_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_global_operations_aggregated_list_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_global_operations_aggregated_list_sync.py new file mode 100644 index 000000000..3a5a4a9f0 --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_global_operations_aggregated_list_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for AggregatedList +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_GlobalOperations_AggregatedList_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_aggregated_list(): + # Create a client + client = compute_v1.GlobalOperationsClient() + + # Initialize request argument(s) + request = compute_v1.AggregatedListGlobalOperationsRequest( + project="project_value", + ) + + # Make the request + page_result = client.aggregated_list(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END compute_v1_generated_GlobalOperations_AggregatedList_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_global_operations_delete_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_global_operations_delete_sync.py new file mode 100644 index 000000000..bcc624f5f --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_global_operations_delete_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for Delete +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_GlobalOperations_Delete_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_delete(): + # Create a client + client = compute_v1.GlobalOperationsClient() + + # Initialize request argument(s) + request = compute_v1.DeleteGlobalOperationRequest( + operation="operation_value", + project="project_value", + ) + + # Make the request + response = client.delete(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_GlobalOperations_Delete_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_global_operations_get_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_global_operations_get_sync.py new file mode 100644 index 000000000..b806bb653 --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_global_operations_get_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for Get +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_GlobalOperations_Get_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_get(): + # Create a client + client = compute_v1.GlobalOperationsClient() + + # Initialize request argument(s) + request = compute_v1.GetGlobalOperationRequest( + operation="operation_value", + project="project_value", + ) + + # Make the request + response = client.get(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_GlobalOperations_Get_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_global_operations_list_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_global_operations_list_sync.py new file mode 100644 index 000000000..16bb2026c --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_global_operations_list_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for List +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_GlobalOperations_List_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_list(): + # Create a client + client = compute_v1.GlobalOperationsClient() + + # Initialize request argument(s) + request = compute_v1.ListGlobalOperationsRequest( + project="project_value", + ) + + # Make the request + page_result = client.list(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END compute_v1_generated_GlobalOperations_List_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_global_operations_wait_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_global_operations_wait_sync.py new file mode 100644 index 000000000..7de5dcc07 --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_global_operations_wait_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for Wait +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_GlobalOperations_Wait_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_wait(): + # Create a client + client = compute_v1.GlobalOperationsClient() + + # Initialize request argument(s) + request = compute_v1.WaitGlobalOperationRequest( + operation="operation_value", + project="project_value", + ) + + # Make the request + response = client.wait(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_GlobalOperations_Wait_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_global_organization_operations_delete_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_global_organization_operations_delete_sync.py new file mode 100644 index 000000000..dc4493b98 --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_global_organization_operations_delete_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for Delete +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_GlobalOrganizationOperations_Delete_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_delete(): + # Create a client + client = compute_v1.GlobalOrganizationOperationsClient() + + # Initialize request argument(s) + request = compute_v1.DeleteGlobalOrganizationOperationRequest( + operation="operation_value", + ) + + # Make the request + response = client.delete(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_GlobalOrganizationOperations_Delete_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_global_organization_operations_get_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_global_organization_operations_get_sync.py new file mode 100644 index 000000000..bfc261cf3 --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_global_organization_operations_get_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for Get +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_GlobalOrganizationOperations_Get_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_get(): + # Create a client + client = compute_v1.GlobalOrganizationOperationsClient() + + # Initialize request argument(s) + request = compute_v1.GetGlobalOrganizationOperationRequest( + operation="operation_value", + ) + + # Make the request + response = client.get(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_GlobalOrganizationOperations_Get_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_global_organization_operations_list_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_global_organization_operations_list_sync.py new file mode 100644 index 000000000..b984d36d1 --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_global_organization_operations_list_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for List +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_GlobalOrganizationOperations_List_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_list(): + # Create a client + client = compute_v1.GlobalOrganizationOperationsClient() + + # Initialize request argument(s) + request = compute_v1.ListGlobalOrganizationOperationsRequest( + ) + + # Make the request + page_result = client.list(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END compute_v1_generated_GlobalOrganizationOperations_List_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_global_public_delegated_prefixes_delete_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_global_public_delegated_prefixes_delete_sync.py new file mode 100644 index 000000000..1f7c87d87 --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_global_public_delegated_prefixes_delete_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for Delete +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_GlobalPublicDelegatedPrefixes_Delete_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_delete(): + # Create a client + client = compute_v1.GlobalPublicDelegatedPrefixesClient() + + # Initialize request argument(s) + request = compute_v1.DeleteGlobalPublicDelegatedPrefixeRequest( + project="project_value", + public_delegated_prefix="public_delegated_prefix_value", + ) + + # Make the request + response = client.delete(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_GlobalPublicDelegatedPrefixes_Delete_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_global_public_delegated_prefixes_get_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_global_public_delegated_prefixes_get_sync.py new file mode 100644 index 000000000..4618ea779 --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_global_public_delegated_prefixes_get_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for Get +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_GlobalPublicDelegatedPrefixes_Get_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_get(): + # Create a client + client = compute_v1.GlobalPublicDelegatedPrefixesClient() + + # Initialize request argument(s) + request = compute_v1.GetGlobalPublicDelegatedPrefixeRequest( + project="project_value", + public_delegated_prefix="public_delegated_prefix_value", + ) + + # Make the request + response = client.get(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_GlobalPublicDelegatedPrefixes_Get_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_global_public_delegated_prefixes_insert_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_global_public_delegated_prefixes_insert_sync.py new file mode 100644 index 000000000..713daf422 --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_global_public_delegated_prefixes_insert_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for Insert +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_GlobalPublicDelegatedPrefixes_Insert_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_insert(): + # Create a client + client = compute_v1.GlobalPublicDelegatedPrefixesClient() + + # Initialize request argument(s) + request = compute_v1.InsertGlobalPublicDelegatedPrefixeRequest( + project="project_value", + ) + + # Make the request + response = client.insert(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_GlobalPublicDelegatedPrefixes_Insert_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_global_public_delegated_prefixes_list_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_global_public_delegated_prefixes_list_sync.py new file mode 100644 index 000000000..18e5d2991 --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_global_public_delegated_prefixes_list_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for List +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_GlobalPublicDelegatedPrefixes_List_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_list(): + # Create a client + client = compute_v1.GlobalPublicDelegatedPrefixesClient() + + # Initialize request argument(s) + request = compute_v1.ListGlobalPublicDelegatedPrefixesRequest( + project="project_value", + ) + + # Make the request + page_result = client.list(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END compute_v1_generated_GlobalPublicDelegatedPrefixes_List_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_global_public_delegated_prefixes_patch_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_global_public_delegated_prefixes_patch_sync.py new file mode 100644 index 000000000..e891fa4d4 --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_global_public_delegated_prefixes_patch_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for Patch +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_GlobalPublicDelegatedPrefixes_Patch_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_patch(): + # Create a client + client = compute_v1.GlobalPublicDelegatedPrefixesClient() + + # Initialize request argument(s) + request = compute_v1.PatchGlobalPublicDelegatedPrefixeRequest( + project="project_value", + public_delegated_prefix="public_delegated_prefix_value", + ) + + # Make the request + response = client.patch(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_GlobalPublicDelegatedPrefixes_Patch_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_health_checks_aggregated_list_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_health_checks_aggregated_list_sync.py new file mode 100644 index 000000000..86508ab42 --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_health_checks_aggregated_list_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for AggregatedList +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_HealthChecks_AggregatedList_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_aggregated_list(): + # Create a client + client = compute_v1.HealthChecksClient() + + # Initialize request argument(s) + request = compute_v1.AggregatedListHealthChecksRequest( + project="project_value", + ) + + # Make the request + page_result = client.aggregated_list(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END compute_v1_generated_HealthChecks_AggregatedList_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_health_checks_delete_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_health_checks_delete_sync.py new file mode 100644 index 000000000..c9761b513 --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_health_checks_delete_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for Delete +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_HealthChecks_Delete_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_delete(): + # Create a client + client = compute_v1.HealthChecksClient() + + # Initialize request argument(s) + request = compute_v1.DeleteHealthCheckRequest( + health_check="health_check_value", + project="project_value", + ) + + # Make the request + response = client.delete(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_HealthChecks_Delete_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_health_checks_get_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_health_checks_get_sync.py new file mode 100644 index 000000000..d89271b22 --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_health_checks_get_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for Get +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_HealthChecks_Get_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_get(): + # Create a client + client = compute_v1.HealthChecksClient() + + # Initialize request argument(s) + request = compute_v1.GetHealthCheckRequest( + health_check="health_check_value", + project="project_value", + ) + + # Make the request + response = client.get(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_HealthChecks_Get_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_health_checks_insert_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_health_checks_insert_sync.py new file mode 100644 index 000000000..5c87c456d --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_health_checks_insert_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for Insert +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_HealthChecks_Insert_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_insert(): + # Create a client + client = compute_v1.HealthChecksClient() + + # Initialize request argument(s) + request = compute_v1.InsertHealthCheckRequest( + project="project_value", + ) + + # Make the request + response = client.insert(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_HealthChecks_Insert_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_health_checks_list_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_health_checks_list_sync.py new file mode 100644 index 000000000..e0c4fbc70 --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_health_checks_list_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for List +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_HealthChecks_List_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_list(): + # Create a client + client = compute_v1.HealthChecksClient() + + # Initialize request argument(s) + request = compute_v1.ListHealthChecksRequest( + project="project_value", + ) + + # Make the request + page_result = client.list(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END compute_v1_generated_HealthChecks_List_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_health_checks_patch_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_health_checks_patch_sync.py new file mode 100644 index 000000000..112833f51 --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_health_checks_patch_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for Patch +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_HealthChecks_Patch_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_patch(): + # Create a client + client = compute_v1.HealthChecksClient() + + # Initialize request argument(s) + request = compute_v1.PatchHealthCheckRequest( + health_check="health_check_value", + project="project_value", + ) + + # Make the request + response = client.patch(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_HealthChecks_Patch_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_health_checks_update_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_health_checks_update_sync.py new file mode 100644 index 000000000..49493d1d0 --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_health_checks_update_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for Update +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_HealthChecks_Update_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_update(): + # Create a client + client = compute_v1.HealthChecksClient() + + # Initialize request argument(s) + request = compute_v1.UpdateHealthCheckRequest( + health_check="health_check_value", + project="project_value", + ) + + # Make the request + response = client.update(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_HealthChecks_Update_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_image_family_views_get_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_image_family_views_get_sync.py new file mode 100644 index 000000000..827e7d8d6 --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_image_family_views_get_sync.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for Get +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_ImageFamilyViews_Get_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_get(): + # Create a client + client = compute_v1.ImageFamilyViewsClient() + + # Initialize request argument(s) + request = compute_v1.GetImageFamilyViewRequest( + family="family_value", + project="project_value", + zone="zone_value", + ) + + # Make the request + response = client.get(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_ImageFamilyViews_Get_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_images_delete_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_images_delete_sync.py new file mode 100644 index 000000000..ccbe6894f --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_images_delete_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for Delete +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_Images_Delete_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_delete(): + # Create a client + client = compute_v1.ImagesClient() + + # Initialize request argument(s) + request = compute_v1.DeleteImageRequest( + image="image_value", + project="project_value", + ) + + # Make the request + response = client.delete(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_Images_Delete_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_images_deprecate_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_images_deprecate_sync.py new file mode 100644 index 000000000..40521e88c --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_images_deprecate_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for Deprecate +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_Images_Deprecate_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_deprecate(): + # Create a client + client = compute_v1.ImagesClient() + + # Initialize request argument(s) + request = compute_v1.DeprecateImageRequest( + image="image_value", + project="project_value", + ) + + # Make the request + response = client.deprecate(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_Images_Deprecate_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_images_get_from_family_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_images_get_from_family_sync.py new file mode 100644 index 000000000..e231789b0 --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_images_get_from_family_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetFromFamily +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_Images_GetFromFamily_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_get_from_family(): + # Create a client + client = compute_v1.ImagesClient() + + # Initialize request argument(s) + request = compute_v1.GetFromFamilyImageRequest( + family="family_value", + project="project_value", + ) + + # Make the request + response = client.get_from_family(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_Images_GetFromFamily_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_images_get_iam_policy_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_images_get_iam_policy_sync.py new file mode 100644 index 000000000..f536741ab --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_images_get_iam_policy_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetIamPolicy +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_Images_GetIamPolicy_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_get_iam_policy(): + # Create a client + client = compute_v1.ImagesClient() + + # Initialize request argument(s) + request = compute_v1.GetIamPolicyImageRequest( + project="project_value", + resource="resource_value", + ) + + # Make the request + response = client.get_iam_policy(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_Images_GetIamPolicy_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_images_get_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_images_get_sync.py new file mode 100644 index 000000000..5ecb6f021 --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_images_get_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for Get +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_Images_Get_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_get(): + # Create a client + client = compute_v1.ImagesClient() + + # Initialize request argument(s) + request = compute_v1.GetImageRequest( + image="image_value", + project="project_value", + ) + + # Make the request + response = client.get(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_Images_Get_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_images_insert_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_images_insert_sync.py new file mode 100644 index 000000000..c0fea396f --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_images_insert_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for Insert +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_Images_Insert_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_insert(): + # Create a client + client = compute_v1.ImagesClient() + + # Initialize request argument(s) + request = compute_v1.InsertImageRequest( + project="project_value", + ) + + # Make the request + response = client.insert(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_Images_Insert_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_images_list_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_images_list_sync.py new file mode 100644 index 000000000..68c05363c --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_images_list_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for List +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_Images_List_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_list(): + # Create a client + client = compute_v1.ImagesClient() + + # Initialize request argument(s) + request = compute_v1.ListImagesRequest( + project="project_value", + ) + + # Make the request + page_result = client.list(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END compute_v1_generated_Images_List_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_images_patch_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_images_patch_sync.py new file mode 100644 index 000000000..07674804e --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_images_patch_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for Patch +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_Images_Patch_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_patch(): + # Create a client + client = compute_v1.ImagesClient() + + # Initialize request argument(s) + request = compute_v1.PatchImageRequest( + image="image_value", + project="project_value", + ) + + # Make the request + response = client.patch(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_Images_Patch_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_images_set_iam_policy_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_images_set_iam_policy_sync.py new file mode 100644 index 000000000..830c6db54 --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_images_set_iam_policy_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for SetIamPolicy +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_Images_SetIamPolicy_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_set_iam_policy(): + # Create a client + client = compute_v1.ImagesClient() + + # Initialize request argument(s) + request = compute_v1.SetIamPolicyImageRequest( + project="project_value", + resource="resource_value", + ) + + # Make the request + response = client.set_iam_policy(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_Images_SetIamPolicy_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_images_set_labels_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_images_set_labels_sync.py new file mode 100644 index 000000000..b36715aa0 --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_images_set_labels_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for SetLabels +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_Images_SetLabels_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_set_labels(): + # Create a client + client = compute_v1.ImagesClient() + + # Initialize request argument(s) + request = compute_v1.SetLabelsImageRequest( + project="project_value", + resource="resource_value", + ) + + # Make the request + response = client.set_labels(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_Images_SetLabels_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_images_test_iam_permissions_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_images_test_iam_permissions_sync.py new file mode 100644 index 000000000..3bdc01aff --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_images_test_iam_permissions_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for TestIamPermissions +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_Images_TestIamPermissions_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_test_iam_permissions(): + # Create a client + client = compute_v1.ImagesClient() + + # Initialize request argument(s) + request = compute_v1.TestIamPermissionsImageRequest( + project="project_value", + resource="resource_value", + ) + + # Make the request + response = client.test_iam_permissions(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_Images_TestIamPermissions_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_instance_group_managers_abandon_instances_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_instance_group_managers_abandon_instances_sync.py new file mode 100644 index 000000000..4e4469347 --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_instance_group_managers_abandon_instances_sync.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for AbandonInstances +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_InstanceGroupManagers_AbandonInstances_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_abandon_instances(): + # Create a client + client = compute_v1.InstanceGroupManagersClient() + + # Initialize request argument(s) + request = compute_v1.AbandonInstancesInstanceGroupManagerRequest( + instance_group_manager="instance_group_manager_value", + project="project_value", + zone="zone_value", + ) + + # Make the request + response = client.abandon_instances(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_InstanceGroupManagers_AbandonInstances_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_instance_group_managers_aggregated_list_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_instance_group_managers_aggregated_list_sync.py new file mode 100644 index 000000000..26bb3befe --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_instance_group_managers_aggregated_list_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for AggregatedList +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_InstanceGroupManagers_AggregatedList_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_aggregated_list(): + # Create a client + client = compute_v1.InstanceGroupManagersClient() + + # Initialize request argument(s) + request = compute_v1.AggregatedListInstanceGroupManagersRequest( + project="project_value", + ) + + # Make the request + page_result = client.aggregated_list(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END compute_v1_generated_InstanceGroupManagers_AggregatedList_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_instance_group_managers_apply_updates_to_instances_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_instance_group_managers_apply_updates_to_instances_sync.py new file mode 100644 index 000000000..0063e242c --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_instance_group_managers_apply_updates_to_instances_sync.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ApplyUpdatesToInstances +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_InstanceGroupManagers_ApplyUpdatesToInstances_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_apply_updates_to_instances(): + # Create a client + client = compute_v1.InstanceGroupManagersClient() + + # Initialize request argument(s) + request = compute_v1.ApplyUpdatesToInstancesInstanceGroupManagerRequest( + instance_group_manager="instance_group_manager_value", + project="project_value", + zone="zone_value", + ) + + # Make the request + response = client.apply_updates_to_instances(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_InstanceGroupManagers_ApplyUpdatesToInstances_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_instance_group_managers_create_instances_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_instance_group_managers_create_instances_sync.py new file mode 100644 index 000000000..a9f72f439 --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_instance_group_managers_create_instances_sync.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateInstances +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_InstanceGroupManagers_CreateInstances_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_create_instances(): + # Create a client + client = compute_v1.InstanceGroupManagersClient() + + # Initialize request argument(s) + request = compute_v1.CreateInstancesInstanceGroupManagerRequest( + instance_group_manager="instance_group_manager_value", + project="project_value", + zone="zone_value", + ) + + # Make the request + response = client.create_instances(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_InstanceGroupManagers_CreateInstances_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_instance_group_managers_delete_instances_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_instance_group_managers_delete_instances_sync.py new file mode 100644 index 000000000..27a229338 --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_instance_group_managers_delete_instances_sync.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteInstances +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_InstanceGroupManagers_DeleteInstances_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_delete_instances(): + # Create a client + client = compute_v1.InstanceGroupManagersClient() + + # Initialize request argument(s) + request = compute_v1.DeleteInstancesInstanceGroupManagerRequest( + instance_group_manager="instance_group_manager_value", + project="project_value", + zone="zone_value", + ) + + # Make the request + response = client.delete_instances(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_InstanceGroupManagers_DeleteInstances_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_instance_group_managers_delete_per_instance_configs_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_instance_group_managers_delete_per_instance_configs_sync.py new file mode 100644 index 000000000..98bd9547f --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_instance_group_managers_delete_per_instance_configs_sync.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeletePerInstanceConfigs +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_InstanceGroupManagers_DeletePerInstanceConfigs_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_delete_per_instance_configs(): + # Create a client + client = compute_v1.InstanceGroupManagersClient() + + # Initialize request argument(s) + request = compute_v1.DeletePerInstanceConfigsInstanceGroupManagerRequest( + instance_group_manager="instance_group_manager_value", + project="project_value", + zone="zone_value", + ) + + # Make the request + response = client.delete_per_instance_configs(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_InstanceGroupManagers_DeletePerInstanceConfigs_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_instance_group_managers_delete_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_instance_group_managers_delete_sync.py new file mode 100644 index 000000000..2f320c48a --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_instance_group_managers_delete_sync.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for Delete +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_InstanceGroupManagers_Delete_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_delete(): + # Create a client + client = compute_v1.InstanceGroupManagersClient() + + # Initialize request argument(s) + request = compute_v1.DeleteInstanceGroupManagerRequest( + instance_group_manager="instance_group_manager_value", + project="project_value", + zone="zone_value", + ) + + # Make the request + response = client.delete(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_InstanceGroupManagers_Delete_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_instance_group_managers_get_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_instance_group_managers_get_sync.py new file mode 100644 index 000000000..e6cfb58b2 --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_instance_group_managers_get_sync.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for Get +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_InstanceGroupManagers_Get_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_get(): + # Create a client + client = compute_v1.InstanceGroupManagersClient() + + # Initialize request argument(s) + request = compute_v1.GetInstanceGroupManagerRequest( + instance_group_manager="instance_group_manager_value", + project="project_value", + zone="zone_value", + ) + + # Make the request + response = client.get(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_InstanceGroupManagers_Get_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_instance_group_managers_insert_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_instance_group_managers_insert_sync.py new file mode 100644 index 000000000..9d1af04ed --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_instance_group_managers_insert_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for Insert +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_InstanceGroupManagers_Insert_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_insert(): + # Create a client + client = compute_v1.InstanceGroupManagersClient() + + # Initialize request argument(s) + request = compute_v1.InsertInstanceGroupManagerRequest( + project="project_value", + zone="zone_value", + ) + + # Make the request + response = client.insert(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_InstanceGroupManagers_Insert_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_instance_group_managers_list_errors_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_instance_group_managers_list_errors_sync.py new file mode 100644 index 000000000..70b2572f9 --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_instance_group_managers_list_errors_sync.py @@ -0,0 +1,55 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListErrors +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_InstanceGroupManagers_ListErrors_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_list_errors(): + # Create a client + client = compute_v1.InstanceGroupManagersClient() + + # Initialize request argument(s) + request = compute_v1.ListErrorsInstanceGroupManagersRequest( + instance_group_manager="instance_group_manager_value", + project="project_value", + zone="zone_value", + ) + + # Make the request + page_result = client.list_errors(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END compute_v1_generated_InstanceGroupManagers_ListErrors_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_instance_group_managers_list_managed_instances_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_instance_group_managers_list_managed_instances_sync.py new file mode 100644 index 000000000..73cf47611 --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_instance_group_managers_list_managed_instances_sync.py @@ -0,0 +1,55 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListManagedInstances +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_InstanceGroupManagers_ListManagedInstances_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_list_managed_instances(): + # Create a client + client = compute_v1.InstanceGroupManagersClient() + + # Initialize request argument(s) + request = compute_v1.ListManagedInstancesInstanceGroupManagersRequest( + instance_group_manager="instance_group_manager_value", + project="project_value", + zone="zone_value", + ) + + # Make the request + page_result = client.list_managed_instances(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END compute_v1_generated_InstanceGroupManagers_ListManagedInstances_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_instance_group_managers_list_per_instance_configs_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_instance_group_managers_list_per_instance_configs_sync.py new file mode 100644 index 000000000..5c9acb273 --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_instance_group_managers_list_per_instance_configs_sync.py @@ -0,0 +1,55 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListPerInstanceConfigs +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_InstanceGroupManagers_ListPerInstanceConfigs_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_list_per_instance_configs(): + # Create a client + client = compute_v1.InstanceGroupManagersClient() + + # Initialize request argument(s) + request = compute_v1.ListPerInstanceConfigsInstanceGroupManagersRequest( + instance_group_manager="instance_group_manager_value", + project="project_value", + zone="zone_value", + ) + + # Make the request + page_result = client.list_per_instance_configs(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END compute_v1_generated_InstanceGroupManagers_ListPerInstanceConfigs_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_instance_group_managers_list_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_instance_group_managers_list_sync.py new file mode 100644 index 000000000..93c9aab73 --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_instance_group_managers_list_sync.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for List +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_InstanceGroupManagers_List_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_list(): + # Create a client + client = compute_v1.InstanceGroupManagersClient() + + # Initialize request argument(s) + request = compute_v1.ListInstanceGroupManagersRequest( + project="project_value", + zone="zone_value", + ) + + # Make the request + page_result = client.list(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END compute_v1_generated_InstanceGroupManagers_List_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_instance_group_managers_patch_per_instance_configs_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_instance_group_managers_patch_per_instance_configs_sync.py new file mode 100644 index 000000000..2ea5abb16 --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_instance_group_managers_patch_per_instance_configs_sync.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for PatchPerInstanceConfigs +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_InstanceGroupManagers_PatchPerInstanceConfigs_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_patch_per_instance_configs(): + # Create a client + client = compute_v1.InstanceGroupManagersClient() + + # Initialize request argument(s) + request = compute_v1.PatchPerInstanceConfigsInstanceGroupManagerRequest( + instance_group_manager="instance_group_manager_value", + project="project_value", + zone="zone_value", + ) + + # Make the request + response = client.patch_per_instance_configs(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_InstanceGroupManagers_PatchPerInstanceConfigs_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_instance_group_managers_patch_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_instance_group_managers_patch_sync.py new file mode 100644 index 000000000..e870db90c --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_instance_group_managers_patch_sync.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for Patch +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_InstanceGroupManagers_Patch_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_patch(): + # Create a client + client = compute_v1.InstanceGroupManagersClient() + + # Initialize request argument(s) + request = compute_v1.PatchInstanceGroupManagerRequest( + instance_group_manager="instance_group_manager_value", + project="project_value", + zone="zone_value", + ) + + # Make the request + response = client.patch(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_InstanceGroupManagers_Patch_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_instance_group_managers_recreate_instances_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_instance_group_managers_recreate_instances_sync.py new file mode 100644 index 000000000..ba1440f77 --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_instance_group_managers_recreate_instances_sync.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for RecreateInstances +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_InstanceGroupManagers_RecreateInstances_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_recreate_instances(): + # Create a client + client = compute_v1.InstanceGroupManagersClient() + + # Initialize request argument(s) + request = compute_v1.RecreateInstancesInstanceGroupManagerRequest( + instance_group_manager="instance_group_manager_value", + project="project_value", + zone="zone_value", + ) + + # Make the request + response = client.recreate_instances(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_InstanceGroupManagers_RecreateInstances_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_instance_group_managers_resize_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_instance_group_managers_resize_sync.py new file mode 100644 index 000000000..c90605817 --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_instance_group_managers_resize_sync.py @@ -0,0 +1,55 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for Resize +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_InstanceGroupManagers_Resize_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_resize(): + # Create a client + client = compute_v1.InstanceGroupManagersClient() + + # Initialize request argument(s) + request = compute_v1.ResizeInstanceGroupManagerRequest( + instance_group_manager="instance_group_manager_value", + project="project_value", + size=443, + zone="zone_value", + ) + + # Make the request + response = client.resize(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_InstanceGroupManagers_Resize_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_instance_group_managers_set_instance_template_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_instance_group_managers_set_instance_template_sync.py new file mode 100644 index 000000000..83ba3ed91 --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_instance_group_managers_set_instance_template_sync.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for SetInstanceTemplate +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_InstanceGroupManagers_SetInstanceTemplate_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_set_instance_template(): + # Create a client + client = compute_v1.InstanceGroupManagersClient() + + # Initialize request argument(s) + request = compute_v1.SetInstanceTemplateInstanceGroupManagerRequest( + instance_group_manager="instance_group_manager_value", + project="project_value", + zone="zone_value", + ) + + # Make the request + response = client.set_instance_template(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_InstanceGroupManagers_SetInstanceTemplate_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_instance_group_managers_set_target_pools_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_instance_group_managers_set_target_pools_sync.py new file mode 100644 index 000000000..110126edb --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_instance_group_managers_set_target_pools_sync.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for SetTargetPools +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_InstanceGroupManagers_SetTargetPools_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_set_target_pools(): + # Create a client + client = compute_v1.InstanceGroupManagersClient() + + # Initialize request argument(s) + request = compute_v1.SetTargetPoolsInstanceGroupManagerRequest( + instance_group_manager="instance_group_manager_value", + project="project_value", + zone="zone_value", + ) + + # Make the request + response = client.set_target_pools(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_InstanceGroupManagers_SetTargetPools_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_instance_group_managers_update_per_instance_configs_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_instance_group_managers_update_per_instance_configs_sync.py new file mode 100644 index 000000000..c5074ce31 --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_instance_group_managers_update_per_instance_configs_sync.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdatePerInstanceConfigs +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_InstanceGroupManagers_UpdatePerInstanceConfigs_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_update_per_instance_configs(): + # Create a client + client = compute_v1.InstanceGroupManagersClient() + + # Initialize request argument(s) + request = compute_v1.UpdatePerInstanceConfigsInstanceGroupManagerRequest( + instance_group_manager="instance_group_manager_value", + project="project_value", + zone="zone_value", + ) + + # Make the request + response = client.update_per_instance_configs(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_InstanceGroupManagers_UpdatePerInstanceConfigs_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_instance_groups_add_instances_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_instance_groups_add_instances_sync.py new file mode 100644 index 000000000..3053b0eab --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_instance_groups_add_instances_sync.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for AddInstances +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_InstanceGroups_AddInstances_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_add_instances(): + # Create a client + client = compute_v1.InstanceGroupsClient() + + # Initialize request argument(s) + request = compute_v1.AddInstancesInstanceGroupRequest( + instance_group="instance_group_value", + project="project_value", + zone="zone_value", + ) + + # Make the request + response = client.add_instances(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_InstanceGroups_AddInstances_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_instance_groups_aggregated_list_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_instance_groups_aggregated_list_sync.py new file mode 100644 index 000000000..07d666050 --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_instance_groups_aggregated_list_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for AggregatedList +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_InstanceGroups_AggregatedList_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_aggregated_list(): + # Create a client + client = compute_v1.InstanceGroupsClient() + + # Initialize request argument(s) + request = compute_v1.AggregatedListInstanceGroupsRequest( + project="project_value", + ) + + # Make the request + page_result = client.aggregated_list(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END compute_v1_generated_InstanceGroups_AggregatedList_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_instance_groups_delete_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_instance_groups_delete_sync.py new file mode 100644 index 000000000..fd098eb23 --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_instance_groups_delete_sync.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for Delete +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_InstanceGroups_Delete_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_delete(): + # Create a client + client = compute_v1.InstanceGroupsClient() + + # Initialize request argument(s) + request = compute_v1.DeleteInstanceGroupRequest( + instance_group="instance_group_value", + project="project_value", + zone="zone_value", + ) + + # Make the request + response = client.delete(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_InstanceGroups_Delete_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_instance_groups_get_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_instance_groups_get_sync.py new file mode 100644 index 000000000..494eb64ec --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_instance_groups_get_sync.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for Get +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_InstanceGroups_Get_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_get(): + # Create a client + client = compute_v1.InstanceGroupsClient() + + # Initialize request argument(s) + request = compute_v1.GetInstanceGroupRequest( + instance_group="instance_group_value", + project="project_value", + zone="zone_value", + ) + + # Make the request + response = client.get(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_InstanceGroups_Get_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_instance_groups_insert_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_instance_groups_insert_sync.py new file mode 100644 index 000000000..82b4fbb1e --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_instance_groups_insert_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for Insert +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_InstanceGroups_Insert_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_insert(): + # Create a client + client = compute_v1.InstanceGroupsClient() + + # Initialize request argument(s) + request = compute_v1.InsertInstanceGroupRequest( + project="project_value", + zone="zone_value", + ) + + # Make the request + response = client.insert(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_InstanceGroups_Insert_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_instance_groups_list_instances_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_instance_groups_list_instances_sync.py new file mode 100644 index 000000000..16c773d72 --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_instance_groups_list_instances_sync.py @@ -0,0 +1,55 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListInstances +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_InstanceGroups_ListInstances_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_list_instances(): + # Create a client + client = compute_v1.InstanceGroupsClient() + + # Initialize request argument(s) + request = compute_v1.ListInstancesInstanceGroupsRequest( + instance_group="instance_group_value", + project="project_value", + zone="zone_value", + ) + + # Make the request + page_result = client.list_instances(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END compute_v1_generated_InstanceGroups_ListInstances_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_instance_groups_list_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_instance_groups_list_sync.py new file mode 100644 index 000000000..13e1bf4ea --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_instance_groups_list_sync.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for List +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_InstanceGroups_List_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_list(): + # Create a client + client = compute_v1.InstanceGroupsClient() + + # Initialize request argument(s) + request = compute_v1.ListInstanceGroupsRequest( + project="project_value", + zone="zone_value", + ) + + # Make the request + page_result = client.list(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END compute_v1_generated_InstanceGroups_List_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_instance_groups_remove_instances_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_instance_groups_remove_instances_sync.py new file mode 100644 index 000000000..ff6a41049 --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_instance_groups_remove_instances_sync.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for RemoveInstances +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_InstanceGroups_RemoveInstances_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_remove_instances(): + # Create a client + client = compute_v1.InstanceGroupsClient() + + # Initialize request argument(s) + request = compute_v1.RemoveInstancesInstanceGroupRequest( + instance_group="instance_group_value", + project="project_value", + zone="zone_value", + ) + + # Make the request + response = client.remove_instances(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_InstanceGroups_RemoveInstances_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_instance_groups_set_named_ports_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_instance_groups_set_named_ports_sync.py new file mode 100644 index 000000000..61c5677fd --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_instance_groups_set_named_ports_sync.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for SetNamedPorts +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_InstanceGroups_SetNamedPorts_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_set_named_ports(): + # Create a client + client = compute_v1.InstanceGroupsClient() + + # Initialize request argument(s) + request = compute_v1.SetNamedPortsInstanceGroupRequest( + instance_group="instance_group_value", + project="project_value", + zone="zone_value", + ) + + # Make the request + response = client.set_named_ports(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_InstanceGroups_SetNamedPorts_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_instance_templates_aggregated_list_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_instance_templates_aggregated_list_sync.py new file mode 100644 index 000000000..0b030ba12 --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_instance_templates_aggregated_list_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for AggregatedList +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_InstanceTemplates_AggregatedList_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_aggregated_list(): + # Create a client + client = compute_v1.InstanceTemplatesClient() + + # Initialize request argument(s) + request = compute_v1.AggregatedListInstanceTemplatesRequest( + project="project_value", + ) + + # Make the request + page_result = client.aggregated_list(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END compute_v1_generated_InstanceTemplates_AggregatedList_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_instance_templates_delete_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_instance_templates_delete_sync.py new file mode 100644 index 000000000..98c6d46f9 --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_instance_templates_delete_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for Delete +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_InstanceTemplates_Delete_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_delete(): + # Create a client + client = compute_v1.InstanceTemplatesClient() + + # Initialize request argument(s) + request = compute_v1.DeleteInstanceTemplateRequest( + instance_template="instance_template_value", + project="project_value", + ) + + # Make the request + response = client.delete(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_InstanceTemplates_Delete_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_instance_templates_get_iam_policy_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_instance_templates_get_iam_policy_sync.py new file mode 100644 index 000000000..03657b103 --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_instance_templates_get_iam_policy_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetIamPolicy +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_InstanceTemplates_GetIamPolicy_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_get_iam_policy(): + # Create a client + client = compute_v1.InstanceTemplatesClient() + + # Initialize request argument(s) + request = compute_v1.GetIamPolicyInstanceTemplateRequest( + project="project_value", + resource="resource_value", + ) + + # Make the request + response = client.get_iam_policy(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_InstanceTemplates_GetIamPolicy_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_instance_templates_get_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_instance_templates_get_sync.py new file mode 100644 index 000000000..1aa84ec41 --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_instance_templates_get_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for Get +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_InstanceTemplates_Get_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_get(): + # Create a client + client = compute_v1.InstanceTemplatesClient() + + # Initialize request argument(s) + request = compute_v1.GetInstanceTemplateRequest( + instance_template="instance_template_value", + project="project_value", + ) + + # Make the request + response = client.get(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_InstanceTemplates_Get_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_instance_templates_insert_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_instance_templates_insert_sync.py new file mode 100644 index 000000000..6feb4861b --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_instance_templates_insert_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for Insert +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_InstanceTemplates_Insert_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_insert(): + # Create a client + client = compute_v1.InstanceTemplatesClient() + + # Initialize request argument(s) + request = compute_v1.InsertInstanceTemplateRequest( + project="project_value", + ) + + # Make the request + response = client.insert(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_InstanceTemplates_Insert_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_instance_templates_list_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_instance_templates_list_sync.py new file mode 100644 index 000000000..88a7e5352 --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_instance_templates_list_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for List +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_InstanceTemplates_List_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_list(): + # Create a client + client = compute_v1.InstanceTemplatesClient() + + # Initialize request argument(s) + request = compute_v1.ListInstanceTemplatesRequest( + project="project_value", + ) + + # Make the request + page_result = client.list(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END compute_v1_generated_InstanceTemplates_List_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_instance_templates_set_iam_policy_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_instance_templates_set_iam_policy_sync.py new file mode 100644 index 000000000..5a979f512 --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_instance_templates_set_iam_policy_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for SetIamPolicy +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_InstanceTemplates_SetIamPolicy_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_set_iam_policy(): + # Create a client + client = compute_v1.InstanceTemplatesClient() + + # Initialize request argument(s) + request = compute_v1.SetIamPolicyInstanceTemplateRequest( + project="project_value", + resource="resource_value", + ) + + # Make the request + response = client.set_iam_policy(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_InstanceTemplates_SetIamPolicy_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_instance_templates_test_iam_permissions_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_instance_templates_test_iam_permissions_sync.py new file mode 100644 index 000000000..ac8239b1c --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_instance_templates_test_iam_permissions_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for TestIamPermissions +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_InstanceTemplates_TestIamPermissions_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_test_iam_permissions(): + # Create a client + client = compute_v1.InstanceTemplatesClient() + + # Initialize request argument(s) + request = compute_v1.TestIamPermissionsInstanceTemplateRequest( + project="project_value", + resource="resource_value", + ) + + # Make the request + response = client.test_iam_permissions(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_InstanceTemplates_TestIamPermissions_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_instances_add_access_config_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_instances_add_access_config_sync.py new file mode 100644 index 000000000..d16d8318a --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_instances_add_access_config_sync.py @@ -0,0 +1,55 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for AddAccessConfig +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_Instances_AddAccessConfig_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_add_access_config(): + # Create a client + client = compute_v1.InstancesClient() + + # Initialize request argument(s) + request = compute_v1.AddAccessConfigInstanceRequest( + instance="instance_value", + network_interface="network_interface_value", + project="project_value", + zone="zone_value", + ) + + # Make the request + response = client.add_access_config(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_Instances_AddAccessConfig_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_instances_add_resource_policies_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_instances_add_resource_policies_sync.py new file mode 100644 index 000000000..488a4d59f --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_instances_add_resource_policies_sync.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for AddResourcePolicies +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_Instances_AddResourcePolicies_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_add_resource_policies(): + # Create a client + client = compute_v1.InstancesClient() + + # Initialize request argument(s) + request = compute_v1.AddResourcePoliciesInstanceRequest( + instance="instance_value", + project="project_value", + zone="zone_value", + ) + + # Make the request + response = client.add_resource_policies(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_Instances_AddResourcePolicies_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_instances_aggregated_list_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_instances_aggregated_list_sync.py new file mode 100644 index 000000000..ff43ce7c8 --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_instances_aggregated_list_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for AggregatedList +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_Instances_AggregatedList_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_aggregated_list(): + # Create a client + client = compute_v1.InstancesClient() + + # Initialize request argument(s) + request = compute_v1.AggregatedListInstancesRequest( + project="project_value", + ) + + # Make the request + page_result = client.aggregated_list(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END compute_v1_generated_Instances_AggregatedList_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_instances_attach_disk_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_instances_attach_disk_sync.py new file mode 100644 index 000000000..666b0abb6 --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_instances_attach_disk_sync.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for AttachDisk +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_Instances_AttachDisk_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_attach_disk(): + # Create a client + client = compute_v1.InstancesClient() + + # Initialize request argument(s) + request = compute_v1.AttachDiskInstanceRequest( + instance="instance_value", + project="project_value", + zone="zone_value", + ) + + # Make the request + response = client.attach_disk(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_Instances_AttachDisk_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_instances_bulk_insert_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_instances_bulk_insert_sync.py new file mode 100644 index 000000000..5c6f222f2 --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_instances_bulk_insert_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for BulkInsert +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_Instances_BulkInsert_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_bulk_insert(): + # Create a client + client = compute_v1.InstancesClient() + + # Initialize request argument(s) + request = compute_v1.BulkInsertInstanceRequest( + project="project_value", + zone="zone_value", + ) + + # Make the request + response = client.bulk_insert(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_Instances_BulkInsert_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_instances_delete_access_config_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_instances_delete_access_config_sync.py new file mode 100644 index 000000000..aa929ebc8 --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_instances_delete_access_config_sync.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteAccessConfig +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_Instances_DeleteAccessConfig_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_delete_access_config(): + # Create a client + client = compute_v1.InstancesClient() + + # Initialize request argument(s) + request = compute_v1.DeleteAccessConfigInstanceRequest( + access_config="access_config_value", + instance="instance_value", + network_interface="network_interface_value", + project="project_value", + zone="zone_value", + ) + + # Make the request + response = client.delete_access_config(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_Instances_DeleteAccessConfig_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_instances_delete_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_instances_delete_sync.py new file mode 100644 index 000000000..4983348c1 --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_instances_delete_sync.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for Delete +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_Instances_Delete_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_delete(): + # Create a client + client = compute_v1.InstancesClient() + + # Initialize request argument(s) + request = compute_v1.DeleteInstanceRequest( + instance="instance_value", + project="project_value", + zone="zone_value", + ) + + # Make the request + response = client.delete(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_Instances_Delete_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_instances_detach_disk_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_instances_detach_disk_sync.py new file mode 100644 index 000000000..788db89bb --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_instances_detach_disk_sync.py @@ -0,0 +1,55 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DetachDisk +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_Instances_DetachDisk_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_detach_disk(): + # Create a client + client = compute_v1.InstancesClient() + + # Initialize request argument(s) + request = compute_v1.DetachDiskInstanceRequest( + device_name="device_name_value", + instance="instance_value", + project="project_value", + zone="zone_value", + ) + + # Make the request + response = client.detach_disk(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_Instances_DetachDisk_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_instances_get_effective_firewalls_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_instances_get_effective_firewalls_sync.py new file mode 100644 index 000000000..649a00d9f --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_instances_get_effective_firewalls_sync.py @@ -0,0 +1,55 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetEffectiveFirewalls +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_Instances_GetEffectiveFirewalls_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_get_effective_firewalls(): + # Create a client + client = compute_v1.InstancesClient() + + # Initialize request argument(s) + request = compute_v1.GetEffectiveFirewallsInstanceRequest( + instance="instance_value", + network_interface="network_interface_value", + project="project_value", + zone="zone_value", + ) + + # Make the request + response = client.get_effective_firewalls(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_Instances_GetEffectiveFirewalls_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_instances_get_guest_attributes_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_instances_get_guest_attributes_sync.py new file mode 100644 index 000000000..ed5ddf5a0 --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_instances_get_guest_attributes_sync.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetGuestAttributes +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_Instances_GetGuestAttributes_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_get_guest_attributes(): + # Create a client + client = compute_v1.InstancesClient() + + # Initialize request argument(s) + request = compute_v1.GetGuestAttributesInstanceRequest( + instance="instance_value", + project="project_value", + zone="zone_value", + ) + + # Make the request + response = client.get_guest_attributes(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_Instances_GetGuestAttributes_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_instances_get_iam_policy_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_instances_get_iam_policy_sync.py new file mode 100644 index 000000000..083c4c177 --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_instances_get_iam_policy_sync.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetIamPolicy +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_Instances_GetIamPolicy_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_get_iam_policy(): + # Create a client + client = compute_v1.InstancesClient() + + # Initialize request argument(s) + request = compute_v1.GetIamPolicyInstanceRequest( + project="project_value", + resource="resource_value", + zone="zone_value", + ) + + # Make the request + response = client.get_iam_policy(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_Instances_GetIamPolicy_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_instances_get_screenshot_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_instances_get_screenshot_sync.py new file mode 100644 index 000000000..ab99681e2 --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_instances_get_screenshot_sync.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetScreenshot +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_Instances_GetScreenshot_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_get_screenshot(): + # Create a client + client = compute_v1.InstancesClient() + + # Initialize request argument(s) + request = compute_v1.GetScreenshotInstanceRequest( + instance="instance_value", + project="project_value", + zone="zone_value", + ) + + # Make the request + response = client.get_screenshot(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_Instances_GetScreenshot_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_instances_get_serial_port_output_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_instances_get_serial_port_output_sync.py new file mode 100644 index 000000000..2a3b045ac --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_instances_get_serial_port_output_sync.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetSerialPortOutput +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_Instances_GetSerialPortOutput_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_get_serial_port_output(): + # Create a client + client = compute_v1.InstancesClient() + + # Initialize request argument(s) + request = compute_v1.GetSerialPortOutputInstanceRequest( + instance="instance_value", + project="project_value", + zone="zone_value", + ) + + # Make the request + response = client.get_serial_port_output(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_Instances_GetSerialPortOutput_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_instances_get_shielded_instance_identity_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_instances_get_shielded_instance_identity_sync.py new file mode 100644 index 000000000..09d5b72db --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_instances_get_shielded_instance_identity_sync.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetShieldedInstanceIdentity +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_Instances_GetShieldedInstanceIdentity_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_get_shielded_instance_identity(): + # Create a client + client = compute_v1.InstancesClient() + + # Initialize request argument(s) + request = compute_v1.GetShieldedInstanceIdentityInstanceRequest( + instance="instance_value", + project="project_value", + zone="zone_value", + ) + + # Make the request + response = client.get_shielded_instance_identity(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_Instances_GetShieldedInstanceIdentity_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_instances_get_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_instances_get_sync.py new file mode 100644 index 000000000..a7e772ed8 --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_instances_get_sync.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for Get +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_Instances_Get_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_get(): + # Create a client + client = compute_v1.InstancesClient() + + # Initialize request argument(s) + request = compute_v1.GetInstanceRequest( + instance="instance_value", + project="project_value", + zone="zone_value", + ) + + # Make the request + response = client.get(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_Instances_Get_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_instances_insert_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_instances_insert_sync.py new file mode 100644 index 000000000..fcc75c383 --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_instances_insert_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for Insert +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_Instances_Insert_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_insert(): + # Create a client + client = compute_v1.InstancesClient() + + # Initialize request argument(s) + request = compute_v1.InsertInstanceRequest( + project="project_value", + zone="zone_value", + ) + + # Make the request + response = client.insert(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_Instances_Insert_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_instances_list_referrers_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_instances_list_referrers_sync.py new file mode 100644 index 000000000..b5a568350 --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_instances_list_referrers_sync.py @@ -0,0 +1,55 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListReferrers +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_Instances_ListReferrers_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_list_referrers(): + # Create a client + client = compute_v1.InstancesClient() + + # Initialize request argument(s) + request = compute_v1.ListReferrersInstancesRequest( + instance="instance_value", + project="project_value", + zone="zone_value", + ) + + # Make the request + page_result = client.list_referrers(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END compute_v1_generated_Instances_ListReferrers_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_instances_list_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_instances_list_sync.py new file mode 100644 index 000000000..efcf90817 --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_instances_list_sync.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for List +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_Instances_List_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_list(): + # Create a client + client = compute_v1.InstancesClient() + + # Initialize request argument(s) + request = compute_v1.ListInstancesRequest( + project="project_value", + zone="zone_value", + ) + + # Make the request + page_result = client.list(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END compute_v1_generated_Instances_List_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_instances_remove_resource_policies_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_instances_remove_resource_policies_sync.py new file mode 100644 index 000000000..90356dfb2 --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_instances_remove_resource_policies_sync.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for RemoveResourcePolicies +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_Instances_RemoveResourcePolicies_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_remove_resource_policies(): + # Create a client + client = compute_v1.InstancesClient() + + # Initialize request argument(s) + request = compute_v1.RemoveResourcePoliciesInstanceRequest( + instance="instance_value", + project="project_value", + zone="zone_value", + ) + + # Make the request + response = client.remove_resource_policies(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_Instances_RemoveResourcePolicies_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_instances_reset_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_instances_reset_sync.py new file mode 100644 index 000000000..85859d14d --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_instances_reset_sync.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for Reset +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_Instances_Reset_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_reset(): + # Create a client + client = compute_v1.InstancesClient() + + # Initialize request argument(s) + request = compute_v1.ResetInstanceRequest( + instance="instance_value", + project="project_value", + zone="zone_value", + ) + + # Make the request + response = client.reset(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_Instances_Reset_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_instances_resume_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_instances_resume_sync.py new file mode 100644 index 000000000..5a14e93be --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_instances_resume_sync.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for Resume +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_Instances_Resume_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_resume(): + # Create a client + client = compute_v1.InstancesClient() + + # Initialize request argument(s) + request = compute_v1.ResumeInstanceRequest( + instance="instance_value", + project="project_value", + zone="zone_value", + ) + + # Make the request + response = client.resume(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_Instances_Resume_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_instances_send_diagnostic_interrupt_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_instances_send_diagnostic_interrupt_sync.py new file mode 100644 index 000000000..7f7a25b5c --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_instances_send_diagnostic_interrupt_sync.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for SendDiagnosticInterrupt +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_Instances_SendDiagnosticInterrupt_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_send_diagnostic_interrupt(): + # Create a client + client = compute_v1.InstancesClient() + + # Initialize request argument(s) + request = compute_v1.SendDiagnosticInterruptInstanceRequest( + instance="instance_value", + project="project_value", + zone="zone_value", + ) + + # Make the request + response = client.send_diagnostic_interrupt(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_Instances_SendDiagnosticInterrupt_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_instances_set_deletion_protection_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_instances_set_deletion_protection_sync.py new file mode 100644 index 000000000..fbf9921d4 --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_instances_set_deletion_protection_sync.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for SetDeletionProtection +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_Instances_SetDeletionProtection_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_set_deletion_protection(): + # Create a client + client = compute_v1.InstancesClient() + + # Initialize request argument(s) + request = compute_v1.SetDeletionProtectionInstanceRequest( + project="project_value", + resource="resource_value", + zone="zone_value", + ) + + # Make the request + response = client.set_deletion_protection(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_Instances_SetDeletionProtection_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_instances_set_disk_auto_delete_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_instances_set_disk_auto_delete_sync.py new file mode 100644 index 000000000..bf2cbca32 --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_instances_set_disk_auto_delete_sync.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for SetDiskAutoDelete +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_Instances_SetDiskAutoDelete_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_set_disk_auto_delete(): + # Create a client + client = compute_v1.InstancesClient() + + # Initialize request argument(s) + request = compute_v1.SetDiskAutoDeleteInstanceRequest( + auto_delete=True, + device_name="device_name_value", + instance="instance_value", + project="project_value", + zone="zone_value", + ) + + # Make the request + response = client.set_disk_auto_delete(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_Instances_SetDiskAutoDelete_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_instances_set_iam_policy_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_instances_set_iam_policy_sync.py new file mode 100644 index 000000000..334857f59 --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_instances_set_iam_policy_sync.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for SetIamPolicy +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_Instances_SetIamPolicy_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_set_iam_policy(): + # Create a client + client = compute_v1.InstancesClient() + + # Initialize request argument(s) + request = compute_v1.SetIamPolicyInstanceRequest( + project="project_value", + resource="resource_value", + zone="zone_value", + ) + + # Make the request + response = client.set_iam_policy(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_Instances_SetIamPolicy_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_instances_set_labels_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_instances_set_labels_sync.py new file mode 100644 index 000000000..6c45e69ae --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_instances_set_labels_sync.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for SetLabels +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_Instances_SetLabels_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_set_labels(): + # Create a client + client = compute_v1.InstancesClient() + + # Initialize request argument(s) + request = compute_v1.SetLabelsInstanceRequest( + instance="instance_value", + project="project_value", + zone="zone_value", + ) + + # Make the request + response = client.set_labels(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_Instances_SetLabels_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_instances_set_machine_resources_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_instances_set_machine_resources_sync.py new file mode 100644 index 000000000..90f119361 --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_instances_set_machine_resources_sync.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for SetMachineResources +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_Instances_SetMachineResources_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_set_machine_resources(): + # Create a client + client = compute_v1.InstancesClient() + + # Initialize request argument(s) + request = compute_v1.SetMachineResourcesInstanceRequest( + instance="instance_value", + project="project_value", + zone="zone_value", + ) + + # Make the request + response = client.set_machine_resources(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_Instances_SetMachineResources_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_instances_set_machine_type_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_instances_set_machine_type_sync.py new file mode 100644 index 000000000..9b9034957 --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_instances_set_machine_type_sync.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for SetMachineType +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_Instances_SetMachineType_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_set_machine_type(): + # Create a client + client = compute_v1.InstancesClient() + + # Initialize request argument(s) + request = compute_v1.SetMachineTypeInstanceRequest( + instance="instance_value", + project="project_value", + zone="zone_value", + ) + + # Make the request + response = client.set_machine_type(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_Instances_SetMachineType_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_instances_set_metadata_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_instances_set_metadata_sync.py new file mode 100644 index 000000000..c97cd1c6f --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_instances_set_metadata_sync.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for SetMetadata +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_Instances_SetMetadata_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_set_metadata(): + # Create a client + client = compute_v1.InstancesClient() + + # Initialize request argument(s) + request = compute_v1.SetMetadataInstanceRequest( + instance="instance_value", + project="project_value", + zone="zone_value", + ) + + # Make the request + response = client.set_metadata(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_Instances_SetMetadata_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_instances_set_min_cpu_platform_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_instances_set_min_cpu_platform_sync.py new file mode 100644 index 000000000..0198d6c3f --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_instances_set_min_cpu_platform_sync.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for SetMinCpuPlatform +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_Instances_SetMinCpuPlatform_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_set_min_cpu_platform(): + # Create a client + client = compute_v1.InstancesClient() + + # Initialize request argument(s) + request = compute_v1.SetMinCpuPlatformInstanceRequest( + instance="instance_value", + project="project_value", + zone="zone_value", + ) + + # Make the request + response = client.set_min_cpu_platform(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_Instances_SetMinCpuPlatform_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_instances_set_name_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_instances_set_name_sync.py new file mode 100644 index 000000000..e04cbc4ac --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_instances_set_name_sync.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for SetName +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_Instances_SetName_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_set_name(): + # Create a client + client = compute_v1.InstancesClient() + + # Initialize request argument(s) + request = compute_v1.SetNameInstanceRequest( + instance="instance_value", + project="project_value", + zone="zone_value", + ) + + # Make the request + response = client.set_name(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_Instances_SetName_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_instances_set_scheduling_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_instances_set_scheduling_sync.py new file mode 100644 index 000000000..1f797e0a8 --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_instances_set_scheduling_sync.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for SetScheduling +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_Instances_SetScheduling_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_set_scheduling(): + # Create a client + client = compute_v1.InstancesClient() + + # Initialize request argument(s) + request = compute_v1.SetSchedulingInstanceRequest( + instance="instance_value", + project="project_value", + zone="zone_value", + ) + + # Make the request + response = client.set_scheduling(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_Instances_SetScheduling_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_instances_set_service_account_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_instances_set_service_account_sync.py new file mode 100644 index 000000000..06f83f0eb --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_instances_set_service_account_sync.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for SetServiceAccount +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_Instances_SetServiceAccount_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_set_service_account(): + # Create a client + client = compute_v1.InstancesClient() + + # Initialize request argument(s) + request = compute_v1.SetServiceAccountInstanceRequest( + instance="instance_value", + project="project_value", + zone="zone_value", + ) + + # Make the request + response = client.set_service_account(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_Instances_SetServiceAccount_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_instances_set_shielded_instance_integrity_policy_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_instances_set_shielded_instance_integrity_policy_sync.py new file mode 100644 index 000000000..af65f6519 --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_instances_set_shielded_instance_integrity_policy_sync.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for SetShieldedInstanceIntegrityPolicy +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_Instances_SetShieldedInstanceIntegrityPolicy_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_set_shielded_instance_integrity_policy(): + # Create a client + client = compute_v1.InstancesClient() + + # Initialize request argument(s) + request = compute_v1.SetShieldedInstanceIntegrityPolicyInstanceRequest( + instance="instance_value", + project="project_value", + zone="zone_value", + ) + + # Make the request + response = client.set_shielded_instance_integrity_policy(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_Instances_SetShieldedInstanceIntegrityPolicy_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_instances_set_tags_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_instances_set_tags_sync.py new file mode 100644 index 000000000..0d8fe2ffc --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_instances_set_tags_sync.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for SetTags +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_Instances_SetTags_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_set_tags(): + # Create a client + client = compute_v1.InstancesClient() + + # Initialize request argument(s) + request = compute_v1.SetTagsInstanceRequest( + instance="instance_value", + project="project_value", + zone="zone_value", + ) + + # Make the request + response = client.set_tags(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_Instances_SetTags_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_instances_simulate_maintenance_event_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_instances_simulate_maintenance_event_sync.py new file mode 100644 index 000000000..3639327f1 --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_instances_simulate_maintenance_event_sync.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for SimulateMaintenanceEvent +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_Instances_SimulateMaintenanceEvent_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_simulate_maintenance_event(): + # Create a client + client = compute_v1.InstancesClient() + + # Initialize request argument(s) + request = compute_v1.SimulateMaintenanceEventInstanceRequest( + instance="instance_value", + project="project_value", + zone="zone_value", + ) + + # Make the request + response = client.simulate_maintenance_event(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_Instances_SimulateMaintenanceEvent_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_instances_start_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_instances_start_sync.py new file mode 100644 index 000000000..50ffcabe5 --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_instances_start_sync.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for Start +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_Instances_Start_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_start(): + # Create a client + client = compute_v1.InstancesClient() + + # Initialize request argument(s) + request = compute_v1.StartInstanceRequest( + instance="instance_value", + project="project_value", + zone="zone_value", + ) + + # Make the request + response = client.start(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_Instances_Start_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_instances_start_with_encryption_key_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_instances_start_with_encryption_key_sync.py new file mode 100644 index 000000000..7f5b65158 --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_instances_start_with_encryption_key_sync.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for StartWithEncryptionKey +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_Instances_StartWithEncryptionKey_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_start_with_encryption_key(): + # Create a client + client = compute_v1.InstancesClient() + + # Initialize request argument(s) + request = compute_v1.StartWithEncryptionKeyInstanceRequest( + instance="instance_value", + project="project_value", + zone="zone_value", + ) + + # Make the request + response = client.start_with_encryption_key(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_Instances_StartWithEncryptionKey_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_instances_stop_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_instances_stop_sync.py new file mode 100644 index 000000000..820a73bd3 --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_instances_stop_sync.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for Stop +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_Instances_Stop_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_stop(): + # Create a client + client = compute_v1.InstancesClient() + + # Initialize request argument(s) + request = compute_v1.StopInstanceRequest( + instance="instance_value", + project="project_value", + zone="zone_value", + ) + + # Make the request + response = client.stop(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_Instances_Stop_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_instances_suspend_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_instances_suspend_sync.py new file mode 100644 index 000000000..f2906302f --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_instances_suspend_sync.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for Suspend +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_Instances_Suspend_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_suspend(): + # Create a client + client = compute_v1.InstancesClient() + + # Initialize request argument(s) + request = compute_v1.SuspendInstanceRequest( + instance="instance_value", + project="project_value", + zone="zone_value", + ) + + # Make the request + response = client.suspend(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_Instances_Suspend_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_instances_test_iam_permissions_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_instances_test_iam_permissions_sync.py new file mode 100644 index 000000000..d47e5f986 --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_instances_test_iam_permissions_sync.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for TestIamPermissions +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_Instances_TestIamPermissions_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_test_iam_permissions(): + # Create a client + client = compute_v1.InstancesClient() + + # Initialize request argument(s) + request = compute_v1.TestIamPermissionsInstanceRequest( + project="project_value", + resource="resource_value", + zone="zone_value", + ) + + # Make the request + response = client.test_iam_permissions(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_Instances_TestIamPermissions_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_instances_update_access_config_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_instances_update_access_config_sync.py new file mode 100644 index 000000000..433a06b89 --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_instances_update_access_config_sync.py @@ -0,0 +1,55 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateAccessConfig +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_Instances_UpdateAccessConfig_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_update_access_config(): + # Create a client + client = compute_v1.InstancesClient() + + # Initialize request argument(s) + request = compute_v1.UpdateAccessConfigInstanceRequest( + instance="instance_value", + network_interface="network_interface_value", + project="project_value", + zone="zone_value", + ) + + # Make the request + response = client.update_access_config(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_Instances_UpdateAccessConfig_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_instances_update_display_device_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_instances_update_display_device_sync.py new file mode 100644 index 000000000..eecfdaf69 --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_instances_update_display_device_sync.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateDisplayDevice +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_Instances_UpdateDisplayDevice_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_update_display_device(): + # Create a client + client = compute_v1.InstancesClient() + + # Initialize request argument(s) + request = compute_v1.UpdateDisplayDeviceInstanceRequest( + instance="instance_value", + project="project_value", + zone="zone_value", + ) + + # Make the request + response = client.update_display_device(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_Instances_UpdateDisplayDevice_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_instances_update_network_interface_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_instances_update_network_interface_sync.py new file mode 100644 index 000000000..9c2527a9d --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_instances_update_network_interface_sync.py @@ -0,0 +1,55 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateNetworkInterface +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_Instances_UpdateNetworkInterface_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_update_network_interface(): + # Create a client + client = compute_v1.InstancesClient() + + # Initialize request argument(s) + request = compute_v1.UpdateNetworkInterfaceInstanceRequest( + instance="instance_value", + network_interface="network_interface_value", + project="project_value", + zone="zone_value", + ) + + # Make the request + response = client.update_network_interface(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_Instances_UpdateNetworkInterface_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_instances_update_shielded_instance_config_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_instances_update_shielded_instance_config_sync.py new file mode 100644 index 000000000..e593034cd --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_instances_update_shielded_instance_config_sync.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateShieldedInstanceConfig +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_Instances_UpdateShieldedInstanceConfig_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_update_shielded_instance_config(): + # Create a client + client = compute_v1.InstancesClient() + + # Initialize request argument(s) + request = compute_v1.UpdateShieldedInstanceConfigInstanceRequest( + instance="instance_value", + project="project_value", + zone="zone_value", + ) + + # Make the request + response = client.update_shielded_instance_config(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_Instances_UpdateShieldedInstanceConfig_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_instances_update_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_instances_update_sync.py new file mode 100644 index 000000000..d63f1245c --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_instances_update_sync.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for Update +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_Instances_Update_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_update(): + # Create a client + client = compute_v1.InstancesClient() + + # Initialize request argument(s) + request = compute_v1.UpdateInstanceRequest( + instance="instance_value", + project="project_value", + zone="zone_value", + ) + + # Make the request + response = client.update(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_Instances_Update_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_interconnect_attachments_aggregated_list_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_interconnect_attachments_aggregated_list_sync.py new file mode 100644 index 000000000..6a5c35920 --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_interconnect_attachments_aggregated_list_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for AggregatedList +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_InterconnectAttachments_AggregatedList_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_aggregated_list(): + # Create a client + client = compute_v1.InterconnectAttachmentsClient() + + # Initialize request argument(s) + request = compute_v1.AggregatedListInterconnectAttachmentsRequest( + project="project_value", + ) + + # Make the request + page_result = client.aggregated_list(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END compute_v1_generated_InterconnectAttachments_AggregatedList_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_interconnect_attachments_delete_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_interconnect_attachments_delete_sync.py new file mode 100644 index 000000000..ae4751d9a --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_interconnect_attachments_delete_sync.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for Delete +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_InterconnectAttachments_Delete_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_delete(): + # Create a client + client = compute_v1.InterconnectAttachmentsClient() + + # Initialize request argument(s) + request = compute_v1.DeleteInterconnectAttachmentRequest( + interconnect_attachment="interconnect_attachment_value", + project="project_value", + region="region_value", + ) + + # Make the request + response = client.delete(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_InterconnectAttachments_Delete_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_interconnect_attachments_get_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_interconnect_attachments_get_sync.py new file mode 100644 index 000000000..c96e3de87 --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_interconnect_attachments_get_sync.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for Get +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_InterconnectAttachments_Get_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_get(): + # Create a client + client = compute_v1.InterconnectAttachmentsClient() + + # Initialize request argument(s) + request = compute_v1.GetInterconnectAttachmentRequest( + interconnect_attachment="interconnect_attachment_value", + project="project_value", + region="region_value", + ) + + # Make the request + response = client.get(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_InterconnectAttachments_Get_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_interconnect_attachments_insert_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_interconnect_attachments_insert_sync.py new file mode 100644 index 000000000..74bd5c952 --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_interconnect_attachments_insert_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for Insert +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_InterconnectAttachments_Insert_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_insert(): + # Create a client + client = compute_v1.InterconnectAttachmentsClient() + + # Initialize request argument(s) + request = compute_v1.InsertInterconnectAttachmentRequest( + project="project_value", + region="region_value", + ) + + # Make the request + response = client.insert(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_InterconnectAttachments_Insert_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_interconnect_attachments_list_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_interconnect_attachments_list_sync.py new file mode 100644 index 000000000..d21357db6 --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_interconnect_attachments_list_sync.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for List +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_InterconnectAttachments_List_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_list(): + # Create a client + client = compute_v1.InterconnectAttachmentsClient() + + # Initialize request argument(s) + request = compute_v1.ListInterconnectAttachmentsRequest( + project="project_value", + region="region_value", + ) + + # Make the request + page_result = client.list(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END compute_v1_generated_InterconnectAttachments_List_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_interconnect_attachments_patch_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_interconnect_attachments_patch_sync.py new file mode 100644 index 000000000..41e760a56 --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_interconnect_attachments_patch_sync.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for Patch +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_InterconnectAttachments_Patch_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_patch(): + # Create a client + client = compute_v1.InterconnectAttachmentsClient() + + # Initialize request argument(s) + request = compute_v1.PatchInterconnectAttachmentRequest( + interconnect_attachment="interconnect_attachment_value", + project="project_value", + region="region_value", + ) + + # Make the request + response = client.patch(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_InterconnectAttachments_Patch_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_interconnect_attachments_set_labels_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_interconnect_attachments_set_labels_sync.py new file mode 100644 index 000000000..d0ef9b249 --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_interconnect_attachments_set_labels_sync.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for SetLabels +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_InterconnectAttachments_SetLabels_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_set_labels(): + # Create a client + client = compute_v1.InterconnectAttachmentsClient() + + # Initialize request argument(s) + request = compute_v1.SetLabelsInterconnectAttachmentRequest( + project="project_value", + region="region_value", + resource="resource_value", + ) + + # Make the request + response = client.set_labels(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_InterconnectAttachments_SetLabels_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_interconnect_locations_get_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_interconnect_locations_get_sync.py new file mode 100644 index 000000000..02da3c48f --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_interconnect_locations_get_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for Get +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_InterconnectLocations_Get_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_get(): + # Create a client + client = compute_v1.InterconnectLocationsClient() + + # Initialize request argument(s) + request = compute_v1.GetInterconnectLocationRequest( + interconnect_location="interconnect_location_value", + project="project_value", + ) + + # Make the request + response = client.get(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_InterconnectLocations_Get_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_interconnect_locations_list_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_interconnect_locations_list_sync.py new file mode 100644 index 000000000..781f9c164 --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_interconnect_locations_list_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for List +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_InterconnectLocations_List_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_list(): + # Create a client + client = compute_v1.InterconnectLocationsClient() + + # Initialize request argument(s) + request = compute_v1.ListInterconnectLocationsRequest( + project="project_value", + ) + + # Make the request + page_result = client.list(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END compute_v1_generated_InterconnectLocations_List_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_interconnect_remote_locations_get_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_interconnect_remote_locations_get_sync.py new file mode 100644 index 000000000..1cfeea8b1 --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_interconnect_remote_locations_get_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for Get +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_InterconnectRemoteLocations_Get_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_get(): + # Create a client + client = compute_v1.InterconnectRemoteLocationsClient() + + # Initialize request argument(s) + request = compute_v1.GetInterconnectRemoteLocationRequest( + interconnect_remote_location="interconnect_remote_location_value", + project="project_value", + ) + + # Make the request + response = client.get(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_InterconnectRemoteLocations_Get_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_interconnect_remote_locations_list_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_interconnect_remote_locations_list_sync.py new file mode 100644 index 000000000..c107a8403 --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_interconnect_remote_locations_list_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for List +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_InterconnectRemoteLocations_List_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_list(): + # Create a client + client = compute_v1.InterconnectRemoteLocationsClient() + + # Initialize request argument(s) + request = compute_v1.ListInterconnectRemoteLocationsRequest( + project="project_value", + ) + + # Make the request + page_result = client.list(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END compute_v1_generated_InterconnectRemoteLocations_List_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_interconnects_delete_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_interconnects_delete_sync.py new file mode 100644 index 000000000..fae7a80e9 --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_interconnects_delete_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for Delete +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_Interconnects_Delete_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_delete(): + # Create a client + client = compute_v1.InterconnectsClient() + + # Initialize request argument(s) + request = compute_v1.DeleteInterconnectRequest( + interconnect="interconnect_value", + project="project_value", + ) + + # Make the request + response = client.delete(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_Interconnects_Delete_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_interconnects_get_diagnostics_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_interconnects_get_diagnostics_sync.py new file mode 100644 index 000000000..3cef73dd3 --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_interconnects_get_diagnostics_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetDiagnostics +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_Interconnects_GetDiagnostics_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_get_diagnostics(): + # Create a client + client = compute_v1.InterconnectsClient() + + # Initialize request argument(s) + request = compute_v1.GetDiagnosticsInterconnectRequest( + interconnect="interconnect_value", + project="project_value", + ) + + # Make the request + response = client.get_diagnostics(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_Interconnects_GetDiagnostics_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_interconnects_get_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_interconnects_get_sync.py new file mode 100644 index 000000000..f35a13b6c --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_interconnects_get_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for Get +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_Interconnects_Get_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_get(): + # Create a client + client = compute_v1.InterconnectsClient() + + # Initialize request argument(s) + request = compute_v1.GetInterconnectRequest( + interconnect="interconnect_value", + project="project_value", + ) + + # Make the request + response = client.get(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_Interconnects_Get_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_interconnects_insert_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_interconnects_insert_sync.py new file mode 100644 index 000000000..5e603cfb9 --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_interconnects_insert_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for Insert +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_Interconnects_Insert_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_insert(): + # Create a client + client = compute_v1.InterconnectsClient() + + # Initialize request argument(s) + request = compute_v1.InsertInterconnectRequest( + project="project_value", + ) + + # Make the request + response = client.insert(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_Interconnects_Insert_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_interconnects_list_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_interconnects_list_sync.py new file mode 100644 index 000000000..e808df560 --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_interconnects_list_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for List +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_Interconnects_List_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_list(): + # Create a client + client = compute_v1.InterconnectsClient() + + # Initialize request argument(s) + request = compute_v1.ListInterconnectsRequest( + project="project_value", + ) + + # Make the request + page_result = client.list(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END compute_v1_generated_Interconnects_List_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_interconnects_patch_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_interconnects_patch_sync.py new file mode 100644 index 000000000..a246269a9 --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_interconnects_patch_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for Patch +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_Interconnects_Patch_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_patch(): + # Create a client + client = compute_v1.InterconnectsClient() + + # Initialize request argument(s) + request = compute_v1.PatchInterconnectRequest( + interconnect="interconnect_value", + project="project_value", + ) + + # Make the request + response = client.patch(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_Interconnects_Patch_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_interconnects_set_labels_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_interconnects_set_labels_sync.py new file mode 100644 index 000000000..84176f390 --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_interconnects_set_labels_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for SetLabels +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_Interconnects_SetLabels_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_set_labels(): + # Create a client + client = compute_v1.InterconnectsClient() + + # Initialize request argument(s) + request = compute_v1.SetLabelsInterconnectRequest( + project="project_value", + resource="resource_value", + ) + + # Make the request + response = client.set_labels(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_Interconnects_SetLabels_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_license_codes_get_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_license_codes_get_sync.py new file mode 100644 index 000000000..c66ffa55a --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_license_codes_get_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for Get +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_LicenseCodes_Get_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_get(): + # Create a client + client = compute_v1.LicenseCodesClient() + + # Initialize request argument(s) + request = compute_v1.GetLicenseCodeRequest( + license_code="license_code_value", + project="project_value", + ) + + # Make the request + response = client.get(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_LicenseCodes_Get_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_license_codes_test_iam_permissions_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_license_codes_test_iam_permissions_sync.py new file mode 100644 index 000000000..4812552ef --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_license_codes_test_iam_permissions_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for TestIamPermissions +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_LicenseCodes_TestIamPermissions_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_test_iam_permissions(): + # Create a client + client = compute_v1.LicenseCodesClient() + + # Initialize request argument(s) + request = compute_v1.TestIamPermissionsLicenseCodeRequest( + project="project_value", + resource="resource_value", + ) + + # Make the request + response = client.test_iam_permissions(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_LicenseCodes_TestIamPermissions_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_licenses_delete_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_licenses_delete_sync.py new file mode 100644 index 000000000..9299d39cc --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_licenses_delete_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for Delete +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_Licenses_Delete_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_delete(): + # Create a client + client = compute_v1.LicensesClient() + + # Initialize request argument(s) + request = compute_v1.DeleteLicenseRequest( + license_="license__value", + project="project_value", + ) + + # Make the request + response = client.delete(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_Licenses_Delete_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_licenses_get_iam_policy_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_licenses_get_iam_policy_sync.py new file mode 100644 index 000000000..3bdc6694a --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_licenses_get_iam_policy_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetIamPolicy +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_Licenses_GetIamPolicy_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_get_iam_policy(): + # Create a client + client = compute_v1.LicensesClient() + + # Initialize request argument(s) + request = compute_v1.GetIamPolicyLicenseRequest( + project="project_value", + resource="resource_value", + ) + + # Make the request + response = client.get_iam_policy(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_Licenses_GetIamPolicy_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_licenses_get_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_licenses_get_sync.py new file mode 100644 index 000000000..eb066bb92 --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_licenses_get_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for Get +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_Licenses_Get_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_get(): + # Create a client + client = compute_v1.LicensesClient() + + # Initialize request argument(s) + request = compute_v1.GetLicenseRequest( + license_="license__value", + project="project_value", + ) + + # Make the request + response = client.get(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_Licenses_Get_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_licenses_insert_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_licenses_insert_sync.py new file mode 100644 index 000000000..74f9270bd --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_licenses_insert_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for Insert +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_Licenses_Insert_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_insert(): + # Create a client + client = compute_v1.LicensesClient() + + # Initialize request argument(s) + request = compute_v1.InsertLicenseRequest( + project="project_value", + ) + + # Make the request + response = client.insert(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_Licenses_Insert_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_licenses_list_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_licenses_list_sync.py new file mode 100644 index 000000000..74dea400f --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_licenses_list_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for List +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_Licenses_List_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_list(): + # Create a client + client = compute_v1.LicensesClient() + + # Initialize request argument(s) + request = compute_v1.ListLicensesRequest( + project="project_value", + ) + + # Make the request + page_result = client.list(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END compute_v1_generated_Licenses_List_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_licenses_set_iam_policy_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_licenses_set_iam_policy_sync.py new file mode 100644 index 000000000..2cc661530 --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_licenses_set_iam_policy_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for SetIamPolicy +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_Licenses_SetIamPolicy_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_set_iam_policy(): + # Create a client + client = compute_v1.LicensesClient() + + # Initialize request argument(s) + request = compute_v1.SetIamPolicyLicenseRequest( + project="project_value", + resource="resource_value", + ) + + # Make the request + response = client.set_iam_policy(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_Licenses_SetIamPolicy_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_licenses_test_iam_permissions_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_licenses_test_iam_permissions_sync.py new file mode 100644 index 000000000..6757ae189 --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_licenses_test_iam_permissions_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for TestIamPermissions +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_Licenses_TestIamPermissions_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_test_iam_permissions(): + # Create a client + client = compute_v1.LicensesClient() + + # Initialize request argument(s) + request = compute_v1.TestIamPermissionsLicenseRequest( + project="project_value", + resource="resource_value", + ) + + # Make the request + response = client.test_iam_permissions(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_Licenses_TestIamPermissions_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_machine_images_delete_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_machine_images_delete_sync.py new file mode 100644 index 000000000..dd0778036 --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_machine_images_delete_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for Delete +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_MachineImages_Delete_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_delete(): + # Create a client + client = compute_v1.MachineImagesClient() + + # Initialize request argument(s) + request = compute_v1.DeleteMachineImageRequest( + machine_image="machine_image_value", + project="project_value", + ) + + # Make the request + response = client.delete(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_MachineImages_Delete_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_machine_images_get_iam_policy_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_machine_images_get_iam_policy_sync.py new file mode 100644 index 000000000..fd2a5196b --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_machine_images_get_iam_policy_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetIamPolicy +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_MachineImages_GetIamPolicy_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_get_iam_policy(): + # Create a client + client = compute_v1.MachineImagesClient() + + # Initialize request argument(s) + request = compute_v1.GetIamPolicyMachineImageRequest( + project="project_value", + resource="resource_value", + ) + + # Make the request + response = client.get_iam_policy(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_MachineImages_GetIamPolicy_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_machine_images_get_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_machine_images_get_sync.py new file mode 100644 index 000000000..69d16987a --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_machine_images_get_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for Get +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_MachineImages_Get_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_get(): + # Create a client + client = compute_v1.MachineImagesClient() + + # Initialize request argument(s) + request = compute_v1.GetMachineImageRequest( + machine_image="machine_image_value", + project="project_value", + ) + + # Make the request + response = client.get(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_MachineImages_Get_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_machine_images_insert_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_machine_images_insert_sync.py new file mode 100644 index 000000000..5f42f1168 --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_machine_images_insert_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for Insert +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_MachineImages_Insert_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_insert(): + # Create a client + client = compute_v1.MachineImagesClient() + + # Initialize request argument(s) + request = compute_v1.InsertMachineImageRequest( + project="project_value", + ) + + # Make the request + response = client.insert(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_MachineImages_Insert_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_machine_images_list_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_machine_images_list_sync.py new file mode 100644 index 000000000..9f97c269c --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_machine_images_list_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for List +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_MachineImages_List_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_list(): + # Create a client + client = compute_v1.MachineImagesClient() + + # Initialize request argument(s) + request = compute_v1.ListMachineImagesRequest( + project="project_value", + ) + + # Make the request + page_result = client.list(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END compute_v1_generated_MachineImages_List_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_machine_images_set_iam_policy_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_machine_images_set_iam_policy_sync.py new file mode 100644 index 000000000..2a6dca80b --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_machine_images_set_iam_policy_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for SetIamPolicy +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_MachineImages_SetIamPolicy_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_set_iam_policy(): + # Create a client + client = compute_v1.MachineImagesClient() + + # Initialize request argument(s) + request = compute_v1.SetIamPolicyMachineImageRequest( + project="project_value", + resource="resource_value", + ) + + # Make the request + response = client.set_iam_policy(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_MachineImages_SetIamPolicy_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_machine_images_test_iam_permissions_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_machine_images_test_iam_permissions_sync.py new file mode 100644 index 000000000..5985d64a2 --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_machine_images_test_iam_permissions_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for TestIamPermissions +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_MachineImages_TestIamPermissions_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_test_iam_permissions(): + # Create a client + client = compute_v1.MachineImagesClient() + + # Initialize request argument(s) + request = compute_v1.TestIamPermissionsMachineImageRequest( + project="project_value", + resource="resource_value", + ) + + # Make the request + response = client.test_iam_permissions(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_MachineImages_TestIamPermissions_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_machine_types_aggregated_list_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_machine_types_aggregated_list_sync.py new file mode 100644 index 000000000..62cbefb5e --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_machine_types_aggregated_list_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for AggregatedList +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_MachineTypes_AggregatedList_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_aggregated_list(): + # Create a client + client = compute_v1.MachineTypesClient() + + # Initialize request argument(s) + request = compute_v1.AggregatedListMachineTypesRequest( + project="project_value", + ) + + # Make the request + page_result = client.aggregated_list(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END compute_v1_generated_MachineTypes_AggregatedList_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_machine_types_get_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_machine_types_get_sync.py new file mode 100644 index 000000000..381289be3 --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_machine_types_get_sync.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for Get +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_MachineTypes_Get_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_get(): + # Create a client + client = compute_v1.MachineTypesClient() + + # Initialize request argument(s) + request = compute_v1.GetMachineTypeRequest( + machine_type="machine_type_value", + project="project_value", + zone="zone_value", + ) + + # Make the request + response = client.get(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_MachineTypes_Get_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_machine_types_list_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_machine_types_list_sync.py new file mode 100644 index 000000000..0164b94f8 --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_machine_types_list_sync.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for List +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_MachineTypes_List_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_list(): + # Create a client + client = compute_v1.MachineTypesClient() + + # Initialize request argument(s) + request = compute_v1.ListMachineTypesRequest( + project="project_value", + zone="zone_value", + ) + + # Make the request + page_result = client.list(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END compute_v1_generated_MachineTypes_List_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_network_attachments_aggregated_list_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_network_attachments_aggregated_list_sync.py new file mode 100644 index 000000000..c8be487f6 --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_network_attachments_aggregated_list_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for AggregatedList +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_NetworkAttachments_AggregatedList_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_aggregated_list(): + # Create a client + client = compute_v1.NetworkAttachmentsClient() + + # Initialize request argument(s) + request = compute_v1.AggregatedListNetworkAttachmentsRequest( + project="project_value", + ) + + # Make the request + page_result = client.aggregated_list(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END compute_v1_generated_NetworkAttachments_AggregatedList_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_network_attachments_delete_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_network_attachments_delete_sync.py new file mode 100644 index 000000000..7c2d5513c --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_network_attachments_delete_sync.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for Delete +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_NetworkAttachments_Delete_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_delete(): + # Create a client + client = compute_v1.NetworkAttachmentsClient() + + # Initialize request argument(s) + request = compute_v1.DeleteNetworkAttachmentRequest( + network_attachment="network_attachment_value", + project="project_value", + region="region_value", + ) + + # Make the request + response = client.delete(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_NetworkAttachments_Delete_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_network_attachments_get_iam_policy_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_network_attachments_get_iam_policy_sync.py new file mode 100644 index 000000000..7bc1aed38 --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_network_attachments_get_iam_policy_sync.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetIamPolicy +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_NetworkAttachments_GetIamPolicy_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_get_iam_policy(): + # Create a client + client = compute_v1.NetworkAttachmentsClient() + + # Initialize request argument(s) + request = compute_v1.GetIamPolicyNetworkAttachmentRequest( + project="project_value", + region="region_value", + resource="resource_value", + ) + + # Make the request + response = client.get_iam_policy(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_NetworkAttachments_GetIamPolicy_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_network_attachments_get_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_network_attachments_get_sync.py new file mode 100644 index 000000000..8494859a4 --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_network_attachments_get_sync.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for Get +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_NetworkAttachments_Get_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_get(): + # Create a client + client = compute_v1.NetworkAttachmentsClient() + + # Initialize request argument(s) + request = compute_v1.GetNetworkAttachmentRequest( + network_attachment="network_attachment_value", + project="project_value", + region="region_value", + ) + + # Make the request + response = client.get(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_NetworkAttachments_Get_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_network_attachments_insert_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_network_attachments_insert_sync.py new file mode 100644 index 000000000..f90fd7b1b --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_network_attachments_insert_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for Insert +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_NetworkAttachments_Insert_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_insert(): + # Create a client + client = compute_v1.NetworkAttachmentsClient() + + # Initialize request argument(s) + request = compute_v1.InsertNetworkAttachmentRequest( + project="project_value", + region="region_value", + ) + + # Make the request + response = client.insert(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_NetworkAttachments_Insert_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_network_attachments_list_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_network_attachments_list_sync.py new file mode 100644 index 000000000..953565fb3 --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_network_attachments_list_sync.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for List +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_NetworkAttachments_List_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_list(): + # Create a client + client = compute_v1.NetworkAttachmentsClient() + + # Initialize request argument(s) + request = compute_v1.ListNetworkAttachmentsRequest( + project="project_value", + region="region_value", + ) + + # Make the request + page_result = client.list(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END compute_v1_generated_NetworkAttachments_List_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_network_attachments_set_iam_policy_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_network_attachments_set_iam_policy_sync.py new file mode 100644 index 000000000..df6deae84 --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_network_attachments_set_iam_policy_sync.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for SetIamPolicy +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_NetworkAttachments_SetIamPolicy_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_set_iam_policy(): + # Create a client + client = compute_v1.NetworkAttachmentsClient() + + # Initialize request argument(s) + request = compute_v1.SetIamPolicyNetworkAttachmentRequest( + project="project_value", + region="region_value", + resource="resource_value", + ) + + # Make the request + response = client.set_iam_policy(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_NetworkAttachments_SetIamPolicy_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_network_attachments_test_iam_permissions_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_network_attachments_test_iam_permissions_sync.py new file mode 100644 index 000000000..bde337803 --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_network_attachments_test_iam_permissions_sync.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for TestIamPermissions +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_NetworkAttachments_TestIamPermissions_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_test_iam_permissions(): + # Create a client + client = compute_v1.NetworkAttachmentsClient() + + # Initialize request argument(s) + request = compute_v1.TestIamPermissionsNetworkAttachmentRequest( + project="project_value", + region="region_value", + resource="resource_value", + ) + + # Make the request + response = client.test_iam_permissions(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_NetworkAttachments_TestIamPermissions_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_network_edge_security_services_aggregated_list_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_network_edge_security_services_aggregated_list_sync.py new file mode 100644 index 000000000..b1d1fa5bb --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_network_edge_security_services_aggregated_list_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for AggregatedList +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_NetworkEdgeSecurityServices_AggregatedList_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_aggregated_list(): + # Create a client + client = compute_v1.NetworkEdgeSecurityServicesClient() + + # Initialize request argument(s) + request = compute_v1.AggregatedListNetworkEdgeSecurityServicesRequest( + project="project_value", + ) + + # Make the request + page_result = client.aggregated_list(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END compute_v1_generated_NetworkEdgeSecurityServices_AggregatedList_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_network_edge_security_services_delete_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_network_edge_security_services_delete_sync.py new file mode 100644 index 000000000..1de0d26fe --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_network_edge_security_services_delete_sync.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for Delete +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_NetworkEdgeSecurityServices_Delete_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_delete(): + # Create a client + client = compute_v1.NetworkEdgeSecurityServicesClient() + + # Initialize request argument(s) + request = compute_v1.DeleteNetworkEdgeSecurityServiceRequest( + network_edge_security_service="network_edge_security_service_value", + project="project_value", + region="region_value", + ) + + # Make the request + response = client.delete(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_NetworkEdgeSecurityServices_Delete_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_network_edge_security_services_get_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_network_edge_security_services_get_sync.py new file mode 100644 index 000000000..39aa22c8a --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_network_edge_security_services_get_sync.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for Get +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_NetworkEdgeSecurityServices_Get_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_get(): + # Create a client + client = compute_v1.NetworkEdgeSecurityServicesClient() + + # Initialize request argument(s) + request = compute_v1.GetNetworkEdgeSecurityServiceRequest( + network_edge_security_service="network_edge_security_service_value", + project="project_value", + region="region_value", + ) + + # Make the request + response = client.get(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_NetworkEdgeSecurityServices_Get_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_network_edge_security_services_insert_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_network_edge_security_services_insert_sync.py new file mode 100644 index 000000000..2f6542f04 --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_network_edge_security_services_insert_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for Insert +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_NetworkEdgeSecurityServices_Insert_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_insert(): + # Create a client + client = compute_v1.NetworkEdgeSecurityServicesClient() + + # Initialize request argument(s) + request = compute_v1.InsertNetworkEdgeSecurityServiceRequest( + project="project_value", + region="region_value", + ) + + # Make the request + response = client.insert(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_NetworkEdgeSecurityServices_Insert_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_network_edge_security_services_patch_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_network_edge_security_services_patch_sync.py new file mode 100644 index 000000000..5fb3bd913 --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_network_edge_security_services_patch_sync.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for Patch +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_NetworkEdgeSecurityServices_Patch_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_patch(): + # Create a client + client = compute_v1.NetworkEdgeSecurityServicesClient() + + # Initialize request argument(s) + request = compute_v1.PatchNetworkEdgeSecurityServiceRequest( + network_edge_security_service="network_edge_security_service_value", + project="project_value", + region="region_value", + ) + + # Make the request + response = client.patch(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_NetworkEdgeSecurityServices_Patch_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_network_endpoint_groups_aggregated_list_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_network_endpoint_groups_aggregated_list_sync.py new file mode 100644 index 000000000..19a4fdeff --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_network_endpoint_groups_aggregated_list_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for AggregatedList +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_NetworkEndpointGroups_AggregatedList_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_aggregated_list(): + # Create a client + client = compute_v1.NetworkEndpointGroupsClient() + + # Initialize request argument(s) + request = compute_v1.AggregatedListNetworkEndpointGroupsRequest( + project="project_value", + ) + + # Make the request + page_result = client.aggregated_list(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END compute_v1_generated_NetworkEndpointGroups_AggregatedList_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_network_endpoint_groups_attach_network_endpoints_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_network_endpoint_groups_attach_network_endpoints_sync.py new file mode 100644 index 000000000..e83dbc3db --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_network_endpoint_groups_attach_network_endpoints_sync.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for AttachNetworkEndpoints +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_NetworkEndpointGroups_AttachNetworkEndpoints_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_attach_network_endpoints(): + # Create a client + client = compute_v1.NetworkEndpointGroupsClient() + + # Initialize request argument(s) + request = compute_v1.AttachNetworkEndpointsNetworkEndpointGroupRequest( + network_endpoint_group="network_endpoint_group_value", + project="project_value", + zone="zone_value", + ) + + # Make the request + response = client.attach_network_endpoints(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_NetworkEndpointGroups_AttachNetworkEndpoints_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_network_endpoint_groups_delete_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_network_endpoint_groups_delete_sync.py new file mode 100644 index 000000000..69d655658 --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_network_endpoint_groups_delete_sync.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for Delete +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_NetworkEndpointGroups_Delete_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_delete(): + # Create a client + client = compute_v1.NetworkEndpointGroupsClient() + + # Initialize request argument(s) + request = compute_v1.DeleteNetworkEndpointGroupRequest( + network_endpoint_group="network_endpoint_group_value", + project="project_value", + zone="zone_value", + ) + + # Make the request + response = client.delete(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_NetworkEndpointGroups_Delete_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_network_endpoint_groups_detach_network_endpoints_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_network_endpoint_groups_detach_network_endpoints_sync.py new file mode 100644 index 000000000..275d78f35 --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_network_endpoint_groups_detach_network_endpoints_sync.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DetachNetworkEndpoints +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_NetworkEndpointGroups_DetachNetworkEndpoints_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_detach_network_endpoints(): + # Create a client + client = compute_v1.NetworkEndpointGroupsClient() + + # Initialize request argument(s) + request = compute_v1.DetachNetworkEndpointsNetworkEndpointGroupRequest( + network_endpoint_group="network_endpoint_group_value", + project="project_value", + zone="zone_value", + ) + + # Make the request + response = client.detach_network_endpoints(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_NetworkEndpointGroups_DetachNetworkEndpoints_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_network_endpoint_groups_get_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_network_endpoint_groups_get_sync.py new file mode 100644 index 000000000..b525205a7 --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_network_endpoint_groups_get_sync.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for Get +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_NetworkEndpointGroups_Get_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_get(): + # Create a client + client = compute_v1.NetworkEndpointGroupsClient() + + # Initialize request argument(s) + request = compute_v1.GetNetworkEndpointGroupRequest( + network_endpoint_group="network_endpoint_group_value", + project="project_value", + zone="zone_value", + ) + + # Make the request + response = client.get(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_NetworkEndpointGroups_Get_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_network_endpoint_groups_insert_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_network_endpoint_groups_insert_sync.py new file mode 100644 index 000000000..82df16d2b --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_network_endpoint_groups_insert_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for Insert +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_NetworkEndpointGroups_Insert_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_insert(): + # Create a client + client = compute_v1.NetworkEndpointGroupsClient() + + # Initialize request argument(s) + request = compute_v1.InsertNetworkEndpointGroupRequest( + project="project_value", + zone="zone_value", + ) + + # Make the request + response = client.insert(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_NetworkEndpointGroups_Insert_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_network_endpoint_groups_list_network_endpoints_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_network_endpoint_groups_list_network_endpoints_sync.py new file mode 100644 index 000000000..bf85f627b --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_network_endpoint_groups_list_network_endpoints_sync.py @@ -0,0 +1,55 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListNetworkEndpoints +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_NetworkEndpointGroups_ListNetworkEndpoints_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_list_network_endpoints(): + # Create a client + client = compute_v1.NetworkEndpointGroupsClient() + + # Initialize request argument(s) + request = compute_v1.ListNetworkEndpointsNetworkEndpointGroupsRequest( + network_endpoint_group="network_endpoint_group_value", + project="project_value", + zone="zone_value", + ) + + # Make the request + page_result = client.list_network_endpoints(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END compute_v1_generated_NetworkEndpointGroups_ListNetworkEndpoints_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_network_endpoint_groups_list_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_network_endpoint_groups_list_sync.py new file mode 100644 index 000000000..f1417d6b9 --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_network_endpoint_groups_list_sync.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for List +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_NetworkEndpointGroups_List_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_list(): + # Create a client + client = compute_v1.NetworkEndpointGroupsClient() + + # Initialize request argument(s) + request = compute_v1.ListNetworkEndpointGroupsRequest( + project="project_value", + zone="zone_value", + ) + + # Make the request + page_result = client.list(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END compute_v1_generated_NetworkEndpointGroups_List_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_network_endpoint_groups_test_iam_permissions_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_network_endpoint_groups_test_iam_permissions_sync.py new file mode 100644 index 000000000..1573f2cc7 --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_network_endpoint_groups_test_iam_permissions_sync.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for TestIamPermissions +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_NetworkEndpointGroups_TestIamPermissions_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_test_iam_permissions(): + # Create a client + client = compute_v1.NetworkEndpointGroupsClient() + + # Initialize request argument(s) + request = compute_v1.TestIamPermissionsNetworkEndpointGroupRequest( + project="project_value", + resource="resource_value", + zone="zone_value", + ) + + # Make the request + response = client.test_iam_permissions(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_NetworkEndpointGroups_TestIamPermissions_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_network_firewall_policies_add_association_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_network_firewall_policies_add_association_sync.py new file mode 100644 index 000000000..ee75ebca9 --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_network_firewall_policies_add_association_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for AddAssociation +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_NetworkFirewallPolicies_AddAssociation_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_add_association(): + # Create a client + client = compute_v1.NetworkFirewallPoliciesClient() + + # Initialize request argument(s) + request = compute_v1.AddAssociationNetworkFirewallPolicyRequest( + firewall_policy="firewall_policy_value", + project="project_value", + ) + + # Make the request + response = client.add_association(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_NetworkFirewallPolicies_AddAssociation_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_network_firewall_policies_add_rule_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_network_firewall_policies_add_rule_sync.py new file mode 100644 index 000000000..f94ceab63 --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_network_firewall_policies_add_rule_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for AddRule +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_NetworkFirewallPolicies_AddRule_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_add_rule(): + # Create a client + client = compute_v1.NetworkFirewallPoliciesClient() + + # Initialize request argument(s) + request = compute_v1.AddRuleNetworkFirewallPolicyRequest( + firewall_policy="firewall_policy_value", + project="project_value", + ) + + # Make the request + response = client.add_rule(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_NetworkFirewallPolicies_AddRule_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_network_firewall_policies_clone_rules_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_network_firewall_policies_clone_rules_sync.py new file mode 100644 index 000000000..35a36905e --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_network_firewall_policies_clone_rules_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CloneRules +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_NetworkFirewallPolicies_CloneRules_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_clone_rules(): + # Create a client + client = compute_v1.NetworkFirewallPoliciesClient() + + # Initialize request argument(s) + request = compute_v1.CloneRulesNetworkFirewallPolicyRequest( + firewall_policy="firewall_policy_value", + project="project_value", + ) + + # Make the request + response = client.clone_rules(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_NetworkFirewallPolicies_CloneRules_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_network_firewall_policies_delete_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_network_firewall_policies_delete_sync.py new file mode 100644 index 000000000..c63af053b --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_network_firewall_policies_delete_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for Delete +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_NetworkFirewallPolicies_Delete_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_delete(): + # Create a client + client = compute_v1.NetworkFirewallPoliciesClient() + + # Initialize request argument(s) + request = compute_v1.DeleteNetworkFirewallPolicyRequest( + firewall_policy="firewall_policy_value", + project="project_value", + ) + + # Make the request + response = client.delete(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_NetworkFirewallPolicies_Delete_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_network_firewall_policies_get_association_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_network_firewall_policies_get_association_sync.py new file mode 100644 index 000000000..5b0478e77 --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_network_firewall_policies_get_association_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetAssociation +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_NetworkFirewallPolicies_GetAssociation_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_get_association(): + # Create a client + client = compute_v1.NetworkFirewallPoliciesClient() + + # Initialize request argument(s) + request = compute_v1.GetAssociationNetworkFirewallPolicyRequest( + firewall_policy="firewall_policy_value", + project="project_value", + ) + + # Make the request + response = client.get_association(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_NetworkFirewallPolicies_GetAssociation_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_network_firewall_policies_get_iam_policy_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_network_firewall_policies_get_iam_policy_sync.py new file mode 100644 index 000000000..51a1bd68d --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_network_firewall_policies_get_iam_policy_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetIamPolicy +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_NetworkFirewallPolicies_GetIamPolicy_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_get_iam_policy(): + # Create a client + client = compute_v1.NetworkFirewallPoliciesClient() + + # Initialize request argument(s) + request = compute_v1.GetIamPolicyNetworkFirewallPolicyRequest( + project="project_value", + resource="resource_value", + ) + + # Make the request + response = client.get_iam_policy(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_NetworkFirewallPolicies_GetIamPolicy_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_network_firewall_policies_get_rule_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_network_firewall_policies_get_rule_sync.py new file mode 100644 index 000000000..faac3a91c --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_network_firewall_policies_get_rule_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetRule +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_NetworkFirewallPolicies_GetRule_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_get_rule(): + # Create a client + client = compute_v1.NetworkFirewallPoliciesClient() + + # Initialize request argument(s) + request = compute_v1.GetRuleNetworkFirewallPolicyRequest( + firewall_policy="firewall_policy_value", + project="project_value", + ) + + # Make the request + response = client.get_rule(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_NetworkFirewallPolicies_GetRule_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_network_firewall_policies_get_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_network_firewall_policies_get_sync.py new file mode 100644 index 000000000..ccffec443 --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_network_firewall_policies_get_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for Get +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_NetworkFirewallPolicies_Get_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_get(): + # Create a client + client = compute_v1.NetworkFirewallPoliciesClient() + + # Initialize request argument(s) + request = compute_v1.GetNetworkFirewallPolicyRequest( + firewall_policy="firewall_policy_value", + project="project_value", + ) + + # Make the request + response = client.get(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_NetworkFirewallPolicies_Get_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_network_firewall_policies_insert_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_network_firewall_policies_insert_sync.py new file mode 100644 index 000000000..a83ad95eb --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_network_firewall_policies_insert_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for Insert +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_NetworkFirewallPolicies_Insert_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_insert(): + # Create a client + client = compute_v1.NetworkFirewallPoliciesClient() + + # Initialize request argument(s) + request = compute_v1.InsertNetworkFirewallPolicyRequest( + project="project_value", + ) + + # Make the request + response = client.insert(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_NetworkFirewallPolicies_Insert_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_network_firewall_policies_list_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_network_firewall_policies_list_sync.py new file mode 100644 index 000000000..11d04bf7e --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_network_firewall_policies_list_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for List +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_NetworkFirewallPolicies_List_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_list(): + # Create a client + client = compute_v1.NetworkFirewallPoliciesClient() + + # Initialize request argument(s) + request = compute_v1.ListNetworkFirewallPoliciesRequest( + project="project_value", + ) + + # Make the request + page_result = client.list(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END compute_v1_generated_NetworkFirewallPolicies_List_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_network_firewall_policies_patch_rule_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_network_firewall_policies_patch_rule_sync.py new file mode 100644 index 000000000..dee738acf --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_network_firewall_policies_patch_rule_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for PatchRule +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_NetworkFirewallPolicies_PatchRule_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_patch_rule(): + # Create a client + client = compute_v1.NetworkFirewallPoliciesClient() + + # Initialize request argument(s) + request = compute_v1.PatchRuleNetworkFirewallPolicyRequest( + firewall_policy="firewall_policy_value", + project="project_value", + ) + + # Make the request + response = client.patch_rule(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_NetworkFirewallPolicies_PatchRule_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_network_firewall_policies_patch_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_network_firewall_policies_patch_sync.py new file mode 100644 index 000000000..25d46c03e --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_network_firewall_policies_patch_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for Patch +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_NetworkFirewallPolicies_Patch_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_patch(): + # Create a client + client = compute_v1.NetworkFirewallPoliciesClient() + + # Initialize request argument(s) + request = compute_v1.PatchNetworkFirewallPolicyRequest( + firewall_policy="firewall_policy_value", + project="project_value", + ) + + # Make the request + response = client.patch(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_NetworkFirewallPolicies_Patch_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_network_firewall_policies_remove_association_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_network_firewall_policies_remove_association_sync.py new file mode 100644 index 000000000..37485ccbb --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_network_firewall_policies_remove_association_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for RemoveAssociation +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_NetworkFirewallPolicies_RemoveAssociation_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_remove_association(): + # Create a client + client = compute_v1.NetworkFirewallPoliciesClient() + + # Initialize request argument(s) + request = compute_v1.RemoveAssociationNetworkFirewallPolicyRequest( + firewall_policy="firewall_policy_value", + project="project_value", + ) + + # Make the request + response = client.remove_association(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_NetworkFirewallPolicies_RemoveAssociation_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_network_firewall_policies_remove_rule_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_network_firewall_policies_remove_rule_sync.py new file mode 100644 index 000000000..c2fe421ba --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_network_firewall_policies_remove_rule_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for RemoveRule +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_NetworkFirewallPolicies_RemoveRule_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_remove_rule(): + # Create a client + client = compute_v1.NetworkFirewallPoliciesClient() + + # Initialize request argument(s) + request = compute_v1.RemoveRuleNetworkFirewallPolicyRequest( + firewall_policy="firewall_policy_value", + project="project_value", + ) + + # Make the request + response = client.remove_rule(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_NetworkFirewallPolicies_RemoveRule_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_network_firewall_policies_set_iam_policy_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_network_firewall_policies_set_iam_policy_sync.py new file mode 100644 index 000000000..1860817b6 --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_network_firewall_policies_set_iam_policy_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for SetIamPolicy +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_NetworkFirewallPolicies_SetIamPolicy_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_set_iam_policy(): + # Create a client + client = compute_v1.NetworkFirewallPoliciesClient() + + # Initialize request argument(s) + request = compute_v1.SetIamPolicyNetworkFirewallPolicyRequest( + project="project_value", + resource="resource_value", + ) + + # Make the request + response = client.set_iam_policy(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_NetworkFirewallPolicies_SetIamPolicy_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_network_firewall_policies_test_iam_permissions_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_network_firewall_policies_test_iam_permissions_sync.py new file mode 100644 index 000000000..b9b247c70 --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_network_firewall_policies_test_iam_permissions_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for TestIamPermissions +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_NetworkFirewallPolicies_TestIamPermissions_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_test_iam_permissions(): + # Create a client + client = compute_v1.NetworkFirewallPoliciesClient() + + # Initialize request argument(s) + request = compute_v1.TestIamPermissionsNetworkFirewallPolicyRequest( + project="project_value", + resource="resource_value", + ) + + # Make the request + response = client.test_iam_permissions(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_NetworkFirewallPolicies_TestIamPermissions_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_networks_add_peering_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_networks_add_peering_sync.py new file mode 100644 index 000000000..bb882bfc6 --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_networks_add_peering_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for AddPeering +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_Networks_AddPeering_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_add_peering(): + # Create a client + client = compute_v1.NetworksClient() + + # Initialize request argument(s) + request = compute_v1.AddPeeringNetworkRequest( + network="network_value", + project="project_value", + ) + + # Make the request + response = client.add_peering(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_Networks_AddPeering_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_networks_delete_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_networks_delete_sync.py new file mode 100644 index 000000000..2686043a9 --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_networks_delete_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for Delete +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_Networks_Delete_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_delete(): + # Create a client + client = compute_v1.NetworksClient() + + # Initialize request argument(s) + request = compute_v1.DeleteNetworkRequest( + network="network_value", + project="project_value", + ) + + # Make the request + response = client.delete(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_Networks_Delete_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_networks_get_effective_firewalls_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_networks_get_effective_firewalls_sync.py new file mode 100644 index 000000000..14e36c33b --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_networks_get_effective_firewalls_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetEffectiveFirewalls +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_Networks_GetEffectiveFirewalls_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_get_effective_firewalls(): + # Create a client + client = compute_v1.NetworksClient() + + # Initialize request argument(s) + request = compute_v1.GetEffectiveFirewallsNetworkRequest( + network="network_value", + project="project_value", + ) + + # Make the request + response = client.get_effective_firewalls(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_Networks_GetEffectiveFirewalls_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_networks_get_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_networks_get_sync.py new file mode 100644 index 000000000..2bbccade5 --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_networks_get_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for Get +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_Networks_Get_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_get(): + # Create a client + client = compute_v1.NetworksClient() + + # Initialize request argument(s) + request = compute_v1.GetNetworkRequest( + network="network_value", + project="project_value", + ) + + # Make the request + response = client.get(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_Networks_Get_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_networks_insert_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_networks_insert_sync.py new file mode 100644 index 000000000..0c98c8bc3 --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_networks_insert_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for Insert +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_Networks_Insert_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_insert(): + # Create a client + client = compute_v1.NetworksClient() + + # Initialize request argument(s) + request = compute_v1.InsertNetworkRequest( + project="project_value", + ) + + # Make the request + response = client.insert(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_Networks_Insert_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_networks_list_peering_routes_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_networks_list_peering_routes_sync.py new file mode 100644 index 000000000..f26983082 --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_networks_list_peering_routes_sync.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListPeeringRoutes +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_Networks_ListPeeringRoutes_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_list_peering_routes(): + # Create a client + client = compute_v1.NetworksClient() + + # Initialize request argument(s) + request = compute_v1.ListPeeringRoutesNetworksRequest( + network="network_value", + project="project_value", + ) + + # Make the request + page_result = client.list_peering_routes(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END compute_v1_generated_Networks_ListPeeringRoutes_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_networks_list_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_networks_list_sync.py new file mode 100644 index 000000000..6858600fa --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_networks_list_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for List +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_Networks_List_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_list(): + # Create a client + client = compute_v1.NetworksClient() + + # Initialize request argument(s) + request = compute_v1.ListNetworksRequest( + project="project_value", + ) + + # Make the request + page_result = client.list(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END compute_v1_generated_Networks_List_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_networks_patch_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_networks_patch_sync.py new file mode 100644 index 000000000..126fd1e6b --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_networks_patch_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for Patch +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_Networks_Patch_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_patch(): + # Create a client + client = compute_v1.NetworksClient() + + # Initialize request argument(s) + request = compute_v1.PatchNetworkRequest( + network="network_value", + project="project_value", + ) + + # Make the request + response = client.patch(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_Networks_Patch_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_networks_remove_peering_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_networks_remove_peering_sync.py new file mode 100644 index 000000000..03944d256 --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_networks_remove_peering_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for RemovePeering +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_Networks_RemovePeering_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_remove_peering(): + # Create a client + client = compute_v1.NetworksClient() + + # Initialize request argument(s) + request = compute_v1.RemovePeeringNetworkRequest( + network="network_value", + project="project_value", + ) + + # Make the request + response = client.remove_peering(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_Networks_RemovePeering_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_networks_switch_to_custom_mode_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_networks_switch_to_custom_mode_sync.py new file mode 100644 index 000000000..54ae7d6c6 --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_networks_switch_to_custom_mode_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for SwitchToCustomMode +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_Networks_SwitchToCustomMode_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_switch_to_custom_mode(): + # Create a client + client = compute_v1.NetworksClient() + + # Initialize request argument(s) + request = compute_v1.SwitchToCustomModeNetworkRequest( + network="network_value", + project="project_value", + ) + + # Make the request + response = client.switch_to_custom_mode(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_Networks_SwitchToCustomMode_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_networks_update_peering_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_networks_update_peering_sync.py new file mode 100644 index 000000000..9257b8345 --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_networks_update_peering_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdatePeering +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_Networks_UpdatePeering_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_update_peering(): + # Create a client + client = compute_v1.NetworksClient() + + # Initialize request argument(s) + request = compute_v1.UpdatePeeringNetworkRequest( + network="network_value", + project="project_value", + ) + + # Make the request + response = client.update_peering(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_Networks_UpdatePeering_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_node_groups_add_nodes_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_node_groups_add_nodes_sync.py new file mode 100644 index 000000000..1933d75d3 --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_node_groups_add_nodes_sync.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for AddNodes +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_NodeGroups_AddNodes_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_add_nodes(): + # Create a client + client = compute_v1.NodeGroupsClient() + + # Initialize request argument(s) + request = compute_v1.AddNodesNodeGroupRequest( + node_group="node_group_value", + project="project_value", + zone="zone_value", + ) + + # Make the request + response = client.add_nodes(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_NodeGroups_AddNodes_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_node_groups_aggregated_list_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_node_groups_aggregated_list_sync.py new file mode 100644 index 000000000..52337f0cc --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_node_groups_aggregated_list_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for AggregatedList +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_NodeGroups_AggregatedList_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_aggregated_list(): + # Create a client + client = compute_v1.NodeGroupsClient() + + # Initialize request argument(s) + request = compute_v1.AggregatedListNodeGroupsRequest( + project="project_value", + ) + + # Make the request + page_result = client.aggregated_list(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END compute_v1_generated_NodeGroups_AggregatedList_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_node_groups_delete_nodes_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_node_groups_delete_nodes_sync.py new file mode 100644 index 000000000..aa3aa523f --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_node_groups_delete_nodes_sync.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteNodes +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_NodeGroups_DeleteNodes_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_delete_nodes(): + # Create a client + client = compute_v1.NodeGroupsClient() + + # Initialize request argument(s) + request = compute_v1.DeleteNodesNodeGroupRequest( + node_group="node_group_value", + project="project_value", + zone="zone_value", + ) + + # Make the request + response = client.delete_nodes(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_NodeGroups_DeleteNodes_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_node_groups_delete_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_node_groups_delete_sync.py new file mode 100644 index 000000000..9d5288d4f --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_node_groups_delete_sync.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for Delete +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_NodeGroups_Delete_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_delete(): + # Create a client + client = compute_v1.NodeGroupsClient() + + # Initialize request argument(s) + request = compute_v1.DeleteNodeGroupRequest( + node_group="node_group_value", + project="project_value", + zone="zone_value", + ) + + # Make the request + response = client.delete(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_NodeGroups_Delete_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_node_groups_get_iam_policy_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_node_groups_get_iam_policy_sync.py new file mode 100644 index 000000000..d0f0d9ad4 --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_node_groups_get_iam_policy_sync.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetIamPolicy +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_NodeGroups_GetIamPolicy_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_get_iam_policy(): + # Create a client + client = compute_v1.NodeGroupsClient() + + # Initialize request argument(s) + request = compute_v1.GetIamPolicyNodeGroupRequest( + project="project_value", + resource="resource_value", + zone="zone_value", + ) + + # Make the request + response = client.get_iam_policy(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_NodeGroups_GetIamPolicy_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_node_groups_get_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_node_groups_get_sync.py new file mode 100644 index 000000000..1017506a6 --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_node_groups_get_sync.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for Get +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_NodeGroups_Get_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_get(): + # Create a client + client = compute_v1.NodeGroupsClient() + + # Initialize request argument(s) + request = compute_v1.GetNodeGroupRequest( + node_group="node_group_value", + project="project_value", + zone="zone_value", + ) + + # Make the request + response = client.get(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_NodeGroups_Get_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_node_groups_insert_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_node_groups_insert_sync.py new file mode 100644 index 000000000..fec34fdd8 --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_node_groups_insert_sync.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for Insert +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_NodeGroups_Insert_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_insert(): + # Create a client + client = compute_v1.NodeGroupsClient() + + # Initialize request argument(s) + request = compute_v1.InsertNodeGroupRequest( + initial_node_count=1911, + project="project_value", + zone="zone_value", + ) + + # Make the request + response = client.insert(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_NodeGroups_Insert_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_node_groups_list_nodes_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_node_groups_list_nodes_sync.py new file mode 100644 index 000000000..ffef85343 --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_node_groups_list_nodes_sync.py @@ -0,0 +1,55 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListNodes +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_NodeGroups_ListNodes_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_list_nodes(): + # Create a client + client = compute_v1.NodeGroupsClient() + + # Initialize request argument(s) + request = compute_v1.ListNodesNodeGroupsRequest( + node_group="node_group_value", + project="project_value", + zone="zone_value", + ) + + # Make the request + page_result = client.list_nodes(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END compute_v1_generated_NodeGroups_ListNodes_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_node_groups_list_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_node_groups_list_sync.py new file mode 100644 index 000000000..a89872d3f --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_node_groups_list_sync.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for List +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_NodeGroups_List_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_list(): + # Create a client + client = compute_v1.NodeGroupsClient() + + # Initialize request argument(s) + request = compute_v1.ListNodeGroupsRequest( + project="project_value", + zone="zone_value", + ) + + # Make the request + page_result = client.list(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END compute_v1_generated_NodeGroups_List_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_node_groups_patch_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_node_groups_patch_sync.py new file mode 100644 index 000000000..780a538e2 --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_node_groups_patch_sync.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for Patch +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_NodeGroups_Patch_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_patch(): + # Create a client + client = compute_v1.NodeGroupsClient() + + # Initialize request argument(s) + request = compute_v1.PatchNodeGroupRequest( + node_group="node_group_value", + project="project_value", + zone="zone_value", + ) + + # Make the request + response = client.patch(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_NodeGroups_Patch_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_node_groups_set_iam_policy_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_node_groups_set_iam_policy_sync.py new file mode 100644 index 000000000..8ea9667d6 --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_node_groups_set_iam_policy_sync.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for SetIamPolicy +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_NodeGroups_SetIamPolicy_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_set_iam_policy(): + # Create a client + client = compute_v1.NodeGroupsClient() + + # Initialize request argument(s) + request = compute_v1.SetIamPolicyNodeGroupRequest( + project="project_value", + resource="resource_value", + zone="zone_value", + ) + + # Make the request + response = client.set_iam_policy(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_NodeGroups_SetIamPolicy_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_node_groups_set_node_template_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_node_groups_set_node_template_sync.py new file mode 100644 index 000000000..4ecb6dd36 --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_node_groups_set_node_template_sync.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for SetNodeTemplate +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_NodeGroups_SetNodeTemplate_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_set_node_template(): + # Create a client + client = compute_v1.NodeGroupsClient() + + # Initialize request argument(s) + request = compute_v1.SetNodeTemplateNodeGroupRequest( + node_group="node_group_value", + project="project_value", + zone="zone_value", + ) + + # Make the request + response = client.set_node_template(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_NodeGroups_SetNodeTemplate_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_node_groups_simulate_maintenance_event_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_node_groups_simulate_maintenance_event_sync.py new file mode 100644 index 000000000..2e40b7e11 --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_node_groups_simulate_maintenance_event_sync.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for SimulateMaintenanceEvent +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_NodeGroups_SimulateMaintenanceEvent_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_simulate_maintenance_event(): + # Create a client + client = compute_v1.NodeGroupsClient() + + # Initialize request argument(s) + request = compute_v1.SimulateMaintenanceEventNodeGroupRequest( + node_group="node_group_value", + project="project_value", + zone="zone_value", + ) + + # Make the request + response = client.simulate_maintenance_event(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_NodeGroups_SimulateMaintenanceEvent_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_node_groups_test_iam_permissions_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_node_groups_test_iam_permissions_sync.py new file mode 100644 index 000000000..ec4973f96 --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_node_groups_test_iam_permissions_sync.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for TestIamPermissions +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_NodeGroups_TestIamPermissions_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_test_iam_permissions(): + # Create a client + client = compute_v1.NodeGroupsClient() + + # Initialize request argument(s) + request = compute_v1.TestIamPermissionsNodeGroupRequest( + project="project_value", + resource="resource_value", + zone="zone_value", + ) + + # Make the request + response = client.test_iam_permissions(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_NodeGroups_TestIamPermissions_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_node_templates_aggregated_list_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_node_templates_aggregated_list_sync.py new file mode 100644 index 000000000..014dedbdb --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_node_templates_aggregated_list_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for AggregatedList +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_NodeTemplates_AggregatedList_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_aggregated_list(): + # Create a client + client = compute_v1.NodeTemplatesClient() + + # Initialize request argument(s) + request = compute_v1.AggregatedListNodeTemplatesRequest( + project="project_value", + ) + + # Make the request + page_result = client.aggregated_list(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END compute_v1_generated_NodeTemplates_AggregatedList_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_node_templates_delete_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_node_templates_delete_sync.py new file mode 100644 index 000000000..666fb920c --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_node_templates_delete_sync.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for Delete +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_NodeTemplates_Delete_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_delete(): + # Create a client + client = compute_v1.NodeTemplatesClient() + + # Initialize request argument(s) + request = compute_v1.DeleteNodeTemplateRequest( + node_template="node_template_value", + project="project_value", + region="region_value", + ) + + # Make the request + response = client.delete(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_NodeTemplates_Delete_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_node_templates_get_iam_policy_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_node_templates_get_iam_policy_sync.py new file mode 100644 index 000000000..634eb5069 --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_node_templates_get_iam_policy_sync.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetIamPolicy +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_NodeTemplates_GetIamPolicy_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_get_iam_policy(): + # Create a client + client = compute_v1.NodeTemplatesClient() + + # Initialize request argument(s) + request = compute_v1.GetIamPolicyNodeTemplateRequest( + project="project_value", + region="region_value", + resource="resource_value", + ) + + # Make the request + response = client.get_iam_policy(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_NodeTemplates_GetIamPolicy_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_node_templates_get_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_node_templates_get_sync.py new file mode 100644 index 000000000..c9e54f6ab --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_node_templates_get_sync.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for Get +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_NodeTemplates_Get_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_get(): + # Create a client + client = compute_v1.NodeTemplatesClient() + + # Initialize request argument(s) + request = compute_v1.GetNodeTemplateRequest( + node_template="node_template_value", + project="project_value", + region="region_value", + ) + + # Make the request + response = client.get(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_NodeTemplates_Get_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_node_templates_insert_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_node_templates_insert_sync.py new file mode 100644 index 000000000..d196f2054 --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_node_templates_insert_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for Insert +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_NodeTemplates_Insert_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_insert(): + # Create a client + client = compute_v1.NodeTemplatesClient() + + # Initialize request argument(s) + request = compute_v1.InsertNodeTemplateRequest( + project="project_value", + region="region_value", + ) + + # Make the request + response = client.insert(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_NodeTemplates_Insert_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_node_templates_list_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_node_templates_list_sync.py new file mode 100644 index 000000000..9bc6247cf --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_node_templates_list_sync.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for List +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_NodeTemplates_List_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_list(): + # Create a client + client = compute_v1.NodeTemplatesClient() + + # Initialize request argument(s) + request = compute_v1.ListNodeTemplatesRequest( + project="project_value", + region="region_value", + ) + + # Make the request + page_result = client.list(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END compute_v1_generated_NodeTemplates_List_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_node_templates_set_iam_policy_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_node_templates_set_iam_policy_sync.py new file mode 100644 index 000000000..0ce658004 --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_node_templates_set_iam_policy_sync.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for SetIamPolicy +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_NodeTemplates_SetIamPolicy_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_set_iam_policy(): + # Create a client + client = compute_v1.NodeTemplatesClient() + + # Initialize request argument(s) + request = compute_v1.SetIamPolicyNodeTemplateRequest( + project="project_value", + region="region_value", + resource="resource_value", + ) + + # Make the request + response = client.set_iam_policy(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_NodeTemplates_SetIamPolicy_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_node_templates_test_iam_permissions_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_node_templates_test_iam_permissions_sync.py new file mode 100644 index 000000000..7a984e793 --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_node_templates_test_iam_permissions_sync.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for TestIamPermissions +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_NodeTemplates_TestIamPermissions_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_test_iam_permissions(): + # Create a client + client = compute_v1.NodeTemplatesClient() + + # Initialize request argument(s) + request = compute_v1.TestIamPermissionsNodeTemplateRequest( + project="project_value", + region="region_value", + resource="resource_value", + ) + + # Make the request + response = client.test_iam_permissions(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_NodeTemplates_TestIamPermissions_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_node_types_aggregated_list_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_node_types_aggregated_list_sync.py new file mode 100644 index 000000000..04ff90279 --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_node_types_aggregated_list_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for AggregatedList +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_NodeTypes_AggregatedList_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_aggregated_list(): + # Create a client + client = compute_v1.NodeTypesClient() + + # Initialize request argument(s) + request = compute_v1.AggregatedListNodeTypesRequest( + project="project_value", + ) + + # Make the request + page_result = client.aggregated_list(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END compute_v1_generated_NodeTypes_AggregatedList_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_node_types_get_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_node_types_get_sync.py new file mode 100644 index 000000000..ce8cd77a1 --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_node_types_get_sync.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for Get +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_NodeTypes_Get_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_get(): + # Create a client + client = compute_v1.NodeTypesClient() + + # Initialize request argument(s) + request = compute_v1.GetNodeTypeRequest( + node_type="node_type_value", + project="project_value", + zone="zone_value", + ) + + # Make the request + response = client.get(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_NodeTypes_Get_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_node_types_list_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_node_types_list_sync.py new file mode 100644 index 000000000..f77c21390 --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_node_types_list_sync.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for List +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_NodeTypes_List_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_list(): + # Create a client + client = compute_v1.NodeTypesClient() + + # Initialize request argument(s) + request = compute_v1.ListNodeTypesRequest( + project="project_value", + zone="zone_value", + ) + + # Make the request + page_result = client.list(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END compute_v1_generated_NodeTypes_List_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_packet_mirrorings_aggregated_list_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_packet_mirrorings_aggregated_list_sync.py new file mode 100644 index 000000000..955008941 --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_packet_mirrorings_aggregated_list_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for AggregatedList +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_PacketMirrorings_AggregatedList_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_aggregated_list(): + # Create a client + client = compute_v1.PacketMirroringsClient() + + # Initialize request argument(s) + request = compute_v1.AggregatedListPacketMirroringsRequest( + project="project_value", + ) + + # Make the request + page_result = client.aggregated_list(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END compute_v1_generated_PacketMirrorings_AggregatedList_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_packet_mirrorings_delete_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_packet_mirrorings_delete_sync.py new file mode 100644 index 000000000..30e361c48 --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_packet_mirrorings_delete_sync.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for Delete +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_PacketMirrorings_Delete_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_delete(): + # Create a client + client = compute_v1.PacketMirroringsClient() + + # Initialize request argument(s) + request = compute_v1.DeletePacketMirroringRequest( + packet_mirroring="packet_mirroring_value", + project="project_value", + region="region_value", + ) + + # Make the request + response = client.delete(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_PacketMirrorings_Delete_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_packet_mirrorings_get_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_packet_mirrorings_get_sync.py new file mode 100644 index 000000000..c7c7b8f10 --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_packet_mirrorings_get_sync.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for Get +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_PacketMirrorings_Get_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_get(): + # Create a client + client = compute_v1.PacketMirroringsClient() + + # Initialize request argument(s) + request = compute_v1.GetPacketMirroringRequest( + packet_mirroring="packet_mirroring_value", + project="project_value", + region="region_value", + ) + + # Make the request + response = client.get(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_PacketMirrorings_Get_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_packet_mirrorings_insert_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_packet_mirrorings_insert_sync.py new file mode 100644 index 000000000..603ad2a97 --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_packet_mirrorings_insert_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for Insert +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_PacketMirrorings_Insert_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_insert(): + # Create a client + client = compute_v1.PacketMirroringsClient() + + # Initialize request argument(s) + request = compute_v1.InsertPacketMirroringRequest( + project="project_value", + region="region_value", + ) + + # Make the request + response = client.insert(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_PacketMirrorings_Insert_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_packet_mirrorings_list_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_packet_mirrorings_list_sync.py new file mode 100644 index 000000000..bd1c04573 --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_packet_mirrorings_list_sync.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for List +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_PacketMirrorings_List_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_list(): + # Create a client + client = compute_v1.PacketMirroringsClient() + + # Initialize request argument(s) + request = compute_v1.ListPacketMirroringsRequest( + project="project_value", + region="region_value", + ) + + # Make the request + page_result = client.list(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END compute_v1_generated_PacketMirrorings_List_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_packet_mirrorings_patch_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_packet_mirrorings_patch_sync.py new file mode 100644 index 000000000..bd3780cd5 --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_packet_mirrorings_patch_sync.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for Patch +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_PacketMirrorings_Patch_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_patch(): + # Create a client + client = compute_v1.PacketMirroringsClient() + + # Initialize request argument(s) + request = compute_v1.PatchPacketMirroringRequest( + packet_mirroring="packet_mirroring_value", + project="project_value", + region="region_value", + ) + + # Make the request + response = client.patch(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_PacketMirrorings_Patch_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_packet_mirrorings_test_iam_permissions_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_packet_mirrorings_test_iam_permissions_sync.py new file mode 100644 index 000000000..7f3b1c0e6 --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_packet_mirrorings_test_iam_permissions_sync.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for TestIamPermissions +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_PacketMirrorings_TestIamPermissions_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_test_iam_permissions(): + # Create a client + client = compute_v1.PacketMirroringsClient() + + # Initialize request argument(s) + request = compute_v1.TestIamPermissionsPacketMirroringRequest( + project="project_value", + region="region_value", + resource="resource_value", + ) + + # Make the request + response = client.test_iam_permissions(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_PacketMirrorings_TestIamPermissions_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_projects_disable_xpn_host_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_projects_disable_xpn_host_sync.py new file mode 100644 index 000000000..04729aa39 --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_projects_disable_xpn_host_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DisableXpnHost +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_Projects_DisableXpnHost_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_disable_xpn_host(): + # Create a client + client = compute_v1.ProjectsClient() + + # Initialize request argument(s) + request = compute_v1.DisableXpnHostProjectRequest( + project="project_value", + ) + + # Make the request + response = client.disable_xpn_host(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_Projects_DisableXpnHost_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_projects_disable_xpn_resource_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_projects_disable_xpn_resource_sync.py new file mode 100644 index 000000000..b9b788f34 --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_projects_disable_xpn_resource_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DisableXpnResource +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_Projects_DisableXpnResource_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_disable_xpn_resource(): + # Create a client + client = compute_v1.ProjectsClient() + + # Initialize request argument(s) + request = compute_v1.DisableXpnResourceProjectRequest( + project="project_value", + ) + + # Make the request + response = client.disable_xpn_resource(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_Projects_DisableXpnResource_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_projects_enable_xpn_host_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_projects_enable_xpn_host_sync.py new file mode 100644 index 000000000..6d6de1f30 --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_projects_enable_xpn_host_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for EnableXpnHost +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_Projects_EnableXpnHost_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_enable_xpn_host(): + # Create a client + client = compute_v1.ProjectsClient() + + # Initialize request argument(s) + request = compute_v1.EnableXpnHostProjectRequest( + project="project_value", + ) + + # Make the request + response = client.enable_xpn_host(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_Projects_EnableXpnHost_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_projects_enable_xpn_resource_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_projects_enable_xpn_resource_sync.py new file mode 100644 index 000000000..dae93663e --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_projects_enable_xpn_resource_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for EnableXpnResource +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_Projects_EnableXpnResource_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_enable_xpn_resource(): + # Create a client + client = compute_v1.ProjectsClient() + + # Initialize request argument(s) + request = compute_v1.EnableXpnResourceProjectRequest( + project="project_value", + ) + + # Make the request + response = client.enable_xpn_resource(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_Projects_EnableXpnResource_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_projects_get_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_projects_get_sync.py new file mode 100644 index 000000000..28461ee9d --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_projects_get_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for Get +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_Projects_Get_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_get(): + # Create a client + client = compute_v1.ProjectsClient() + + # Initialize request argument(s) + request = compute_v1.GetProjectRequest( + project="project_value", + ) + + # Make the request + response = client.get(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_Projects_Get_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_projects_get_xpn_host_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_projects_get_xpn_host_sync.py new file mode 100644 index 000000000..2582677f2 --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_projects_get_xpn_host_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetXpnHost +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_Projects_GetXpnHost_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_get_xpn_host(): + # Create a client + client = compute_v1.ProjectsClient() + + # Initialize request argument(s) + request = compute_v1.GetXpnHostProjectRequest( + project="project_value", + ) + + # Make the request + response = client.get_xpn_host(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_Projects_GetXpnHost_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_projects_get_xpn_resources_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_projects_get_xpn_resources_sync.py new file mode 100644 index 000000000..829d76b51 --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_projects_get_xpn_resources_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetXpnResources +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_Projects_GetXpnResources_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_get_xpn_resources(): + # Create a client + client = compute_v1.ProjectsClient() + + # Initialize request argument(s) + request = compute_v1.GetXpnResourcesProjectsRequest( + project="project_value", + ) + + # Make the request + page_result = client.get_xpn_resources(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END compute_v1_generated_Projects_GetXpnResources_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_projects_list_xpn_hosts_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_projects_list_xpn_hosts_sync.py new file mode 100644 index 000000000..f8d4e6177 --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_projects_list_xpn_hosts_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListXpnHosts +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_Projects_ListXpnHosts_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_list_xpn_hosts(): + # Create a client + client = compute_v1.ProjectsClient() + + # Initialize request argument(s) + request = compute_v1.ListXpnHostsProjectsRequest( + project="project_value", + ) + + # Make the request + page_result = client.list_xpn_hosts(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END compute_v1_generated_Projects_ListXpnHosts_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_projects_move_disk_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_projects_move_disk_sync.py new file mode 100644 index 000000000..80ab0feaf --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_projects_move_disk_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for MoveDisk +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_Projects_MoveDisk_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_move_disk(): + # Create a client + client = compute_v1.ProjectsClient() + + # Initialize request argument(s) + request = compute_v1.MoveDiskProjectRequest( + project="project_value", + ) + + # Make the request + response = client.move_disk(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_Projects_MoveDisk_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_projects_move_instance_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_projects_move_instance_sync.py new file mode 100644 index 000000000..bceb01c92 --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_projects_move_instance_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for MoveInstance +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_Projects_MoveInstance_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_move_instance(): + # Create a client + client = compute_v1.ProjectsClient() + + # Initialize request argument(s) + request = compute_v1.MoveInstanceProjectRequest( + project="project_value", + ) + + # Make the request + response = client.move_instance(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_Projects_MoveInstance_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_projects_set_common_instance_metadata_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_projects_set_common_instance_metadata_sync.py new file mode 100644 index 000000000..7d23ad3f2 --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_projects_set_common_instance_metadata_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for SetCommonInstanceMetadata +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_Projects_SetCommonInstanceMetadata_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_set_common_instance_metadata(): + # Create a client + client = compute_v1.ProjectsClient() + + # Initialize request argument(s) + request = compute_v1.SetCommonInstanceMetadataProjectRequest( + project="project_value", + ) + + # Make the request + response = client.set_common_instance_metadata(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_Projects_SetCommonInstanceMetadata_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_projects_set_default_network_tier_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_projects_set_default_network_tier_sync.py new file mode 100644 index 000000000..b163a41b3 --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_projects_set_default_network_tier_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for SetDefaultNetworkTier +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_Projects_SetDefaultNetworkTier_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_set_default_network_tier(): + # Create a client + client = compute_v1.ProjectsClient() + + # Initialize request argument(s) + request = compute_v1.SetDefaultNetworkTierProjectRequest( + project="project_value", + ) + + # Make the request + response = client.set_default_network_tier(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_Projects_SetDefaultNetworkTier_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_projects_set_usage_export_bucket_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_projects_set_usage_export_bucket_sync.py new file mode 100644 index 000000000..04d3b56ff --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_projects_set_usage_export_bucket_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for SetUsageExportBucket +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_Projects_SetUsageExportBucket_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_set_usage_export_bucket(): + # Create a client + client = compute_v1.ProjectsClient() + + # Initialize request argument(s) + request = compute_v1.SetUsageExportBucketProjectRequest( + project="project_value", + ) + + # Make the request + response = client.set_usage_export_bucket(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_Projects_SetUsageExportBucket_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_public_advertised_prefixes_delete_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_public_advertised_prefixes_delete_sync.py new file mode 100644 index 000000000..31899ee4a --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_public_advertised_prefixes_delete_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for Delete +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_PublicAdvertisedPrefixes_Delete_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_delete(): + # Create a client + client = compute_v1.PublicAdvertisedPrefixesClient() + + # Initialize request argument(s) + request = compute_v1.DeletePublicAdvertisedPrefixeRequest( + project="project_value", + public_advertised_prefix="public_advertised_prefix_value", + ) + + # Make the request + response = client.delete(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_PublicAdvertisedPrefixes_Delete_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_public_advertised_prefixes_get_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_public_advertised_prefixes_get_sync.py new file mode 100644 index 000000000..2f6766d77 --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_public_advertised_prefixes_get_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for Get +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_PublicAdvertisedPrefixes_Get_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_get(): + # Create a client + client = compute_v1.PublicAdvertisedPrefixesClient() + + # Initialize request argument(s) + request = compute_v1.GetPublicAdvertisedPrefixeRequest( + project="project_value", + public_advertised_prefix="public_advertised_prefix_value", + ) + + # Make the request + response = client.get(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_PublicAdvertisedPrefixes_Get_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_public_advertised_prefixes_insert_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_public_advertised_prefixes_insert_sync.py new file mode 100644 index 000000000..347f1f554 --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_public_advertised_prefixes_insert_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for Insert +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_PublicAdvertisedPrefixes_Insert_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_insert(): + # Create a client + client = compute_v1.PublicAdvertisedPrefixesClient() + + # Initialize request argument(s) + request = compute_v1.InsertPublicAdvertisedPrefixeRequest( + project="project_value", + ) + + # Make the request + response = client.insert(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_PublicAdvertisedPrefixes_Insert_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_public_advertised_prefixes_list_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_public_advertised_prefixes_list_sync.py new file mode 100644 index 000000000..3b8abbcf8 --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_public_advertised_prefixes_list_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for List +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_PublicAdvertisedPrefixes_List_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_list(): + # Create a client + client = compute_v1.PublicAdvertisedPrefixesClient() + + # Initialize request argument(s) + request = compute_v1.ListPublicAdvertisedPrefixesRequest( + project="project_value", + ) + + # Make the request + page_result = client.list(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END compute_v1_generated_PublicAdvertisedPrefixes_List_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_public_advertised_prefixes_patch_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_public_advertised_prefixes_patch_sync.py new file mode 100644 index 000000000..93d48a09b --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_public_advertised_prefixes_patch_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for Patch +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_PublicAdvertisedPrefixes_Patch_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_patch(): + # Create a client + client = compute_v1.PublicAdvertisedPrefixesClient() + + # Initialize request argument(s) + request = compute_v1.PatchPublicAdvertisedPrefixeRequest( + project="project_value", + public_advertised_prefix="public_advertised_prefix_value", + ) + + # Make the request + response = client.patch(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_PublicAdvertisedPrefixes_Patch_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_public_delegated_prefixes_aggregated_list_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_public_delegated_prefixes_aggregated_list_sync.py new file mode 100644 index 000000000..652003e04 --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_public_delegated_prefixes_aggregated_list_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for AggregatedList +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_PublicDelegatedPrefixes_AggregatedList_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_aggregated_list(): + # Create a client + client = compute_v1.PublicDelegatedPrefixesClient() + + # Initialize request argument(s) + request = compute_v1.AggregatedListPublicDelegatedPrefixesRequest( + project="project_value", + ) + + # Make the request + page_result = client.aggregated_list(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END compute_v1_generated_PublicDelegatedPrefixes_AggregatedList_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_public_delegated_prefixes_delete_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_public_delegated_prefixes_delete_sync.py new file mode 100644 index 000000000..33894d738 --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_public_delegated_prefixes_delete_sync.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for Delete +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_PublicDelegatedPrefixes_Delete_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_delete(): + # Create a client + client = compute_v1.PublicDelegatedPrefixesClient() + + # Initialize request argument(s) + request = compute_v1.DeletePublicDelegatedPrefixeRequest( + project="project_value", + public_delegated_prefix="public_delegated_prefix_value", + region="region_value", + ) + + # Make the request + response = client.delete(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_PublicDelegatedPrefixes_Delete_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_public_delegated_prefixes_get_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_public_delegated_prefixes_get_sync.py new file mode 100644 index 000000000..d107d52c6 --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_public_delegated_prefixes_get_sync.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for Get +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_PublicDelegatedPrefixes_Get_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_get(): + # Create a client + client = compute_v1.PublicDelegatedPrefixesClient() + + # Initialize request argument(s) + request = compute_v1.GetPublicDelegatedPrefixeRequest( + project="project_value", + public_delegated_prefix="public_delegated_prefix_value", + region="region_value", + ) + + # Make the request + response = client.get(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_PublicDelegatedPrefixes_Get_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_public_delegated_prefixes_insert_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_public_delegated_prefixes_insert_sync.py new file mode 100644 index 000000000..a204839fc --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_public_delegated_prefixes_insert_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for Insert +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_PublicDelegatedPrefixes_Insert_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_insert(): + # Create a client + client = compute_v1.PublicDelegatedPrefixesClient() + + # Initialize request argument(s) + request = compute_v1.InsertPublicDelegatedPrefixeRequest( + project="project_value", + region="region_value", + ) + + # Make the request + response = client.insert(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_PublicDelegatedPrefixes_Insert_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_public_delegated_prefixes_list_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_public_delegated_prefixes_list_sync.py new file mode 100644 index 000000000..dea7c5ac6 --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_public_delegated_prefixes_list_sync.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for List +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_PublicDelegatedPrefixes_List_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_list(): + # Create a client + client = compute_v1.PublicDelegatedPrefixesClient() + + # Initialize request argument(s) + request = compute_v1.ListPublicDelegatedPrefixesRequest( + project="project_value", + region="region_value", + ) + + # Make the request + page_result = client.list(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END compute_v1_generated_PublicDelegatedPrefixes_List_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_public_delegated_prefixes_patch_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_public_delegated_prefixes_patch_sync.py new file mode 100644 index 000000000..9e900ad6b --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_public_delegated_prefixes_patch_sync.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for Patch +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_PublicDelegatedPrefixes_Patch_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_patch(): + # Create a client + client = compute_v1.PublicDelegatedPrefixesClient() + + # Initialize request argument(s) + request = compute_v1.PatchPublicDelegatedPrefixeRequest( + project="project_value", + public_delegated_prefix="public_delegated_prefix_value", + region="region_value", + ) + + # Make the request + response = client.patch(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_PublicDelegatedPrefixes_Patch_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_region_autoscalers_delete_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_region_autoscalers_delete_sync.py new file mode 100644 index 000000000..b61aa4c86 --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_region_autoscalers_delete_sync.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for Delete +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_RegionAutoscalers_Delete_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_delete(): + # Create a client + client = compute_v1.RegionAutoscalersClient() + + # Initialize request argument(s) + request = compute_v1.DeleteRegionAutoscalerRequest( + autoscaler="autoscaler_value", + project="project_value", + region="region_value", + ) + + # Make the request + response = client.delete(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_RegionAutoscalers_Delete_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_region_autoscalers_get_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_region_autoscalers_get_sync.py new file mode 100644 index 000000000..b77b5dd7a --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_region_autoscalers_get_sync.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for Get +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_RegionAutoscalers_Get_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_get(): + # Create a client + client = compute_v1.RegionAutoscalersClient() + + # Initialize request argument(s) + request = compute_v1.GetRegionAutoscalerRequest( + autoscaler="autoscaler_value", + project="project_value", + region="region_value", + ) + + # Make the request + response = client.get(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_RegionAutoscalers_Get_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_region_autoscalers_insert_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_region_autoscalers_insert_sync.py new file mode 100644 index 000000000..a27cefedd --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_region_autoscalers_insert_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for Insert +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_RegionAutoscalers_Insert_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_insert(): + # Create a client + client = compute_v1.RegionAutoscalersClient() + + # Initialize request argument(s) + request = compute_v1.InsertRegionAutoscalerRequest( + project="project_value", + region="region_value", + ) + + # Make the request + response = client.insert(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_RegionAutoscalers_Insert_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_region_autoscalers_list_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_region_autoscalers_list_sync.py new file mode 100644 index 000000000..6b5ae7874 --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_region_autoscalers_list_sync.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for List +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_RegionAutoscalers_List_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_list(): + # Create a client + client = compute_v1.RegionAutoscalersClient() + + # Initialize request argument(s) + request = compute_v1.ListRegionAutoscalersRequest( + project="project_value", + region="region_value", + ) + + # Make the request + page_result = client.list(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END compute_v1_generated_RegionAutoscalers_List_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_region_autoscalers_patch_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_region_autoscalers_patch_sync.py new file mode 100644 index 000000000..75ebd41a9 --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_region_autoscalers_patch_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for Patch +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_RegionAutoscalers_Patch_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_patch(): + # Create a client + client = compute_v1.RegionAutoscalersClient() + + # Initialize request argument(s) + request = compute_v1.PatchRegionAutoscalerRequest( + project="project_value", + region="region_value", + ) + + # Make the request + response = client.patch(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_RegionAutoscalers_Patch_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_region_autoscalers_update_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_region_autoscalers_update_sync.py new file mode 100644 index 000000000..aea43ce93 --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_region_autoscalers_update_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for Update +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_RegionAutoscalers_Update_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_update(): + # Create a client + client = compute_v1.RegionAutoscalersClient() + + # Initialize request argument(s) + request = compute_v1.UpdateRegionAutoscalerRequest( + project="project_value", + region="region_value", + ) + + # Make the request + response = client.update(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_RegionAutoscalers_Update_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_region_backend_services_delete_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_region_backend_services_delete_sync.py new file mode 100644 index 000000000..074b5b353 --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_region_backend_services_delete_sync.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for Delete +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_RegionBackendServices_Delete_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_delete(): + # Create a client + client = compute_v1.RegionBackendServicesClient() + + # Initialize request argument(s) + request = compute_v1.DeleteRegionBackendServiceRequest( + backend_service="backend_service_value", + project="project_value", + region="region_value", + ) + + # Make the request + response = client.delete(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_RegionBackendServices_Delete_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_region_backend_services_get_health_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_region_backend_services_get_health_sync.py new file mode 100644 index 000000000..e7659e6ae --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_region_backend_services_get_health_sync.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetHealth +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_RegionBackendServices_GetHealth_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_get_health(): + # Create a client + client = compute_v1.RegionBackendServicesClient() + + # Initialize request argument(s) + request = compute_v1.GetHealthRegionBackendServiceRequest( + backend_service="backend_service_value", + project="project_value", + region="region_value", + ) + + # Make the request + response = client.get_health(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_RegionBackendServices_GetHealth_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_region_backend_services_get_iam_policy_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_region_backend_services_get_iam_policy_sync.py new file mode 100644 index 000000000..70e1d8b72 --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_region_backend_services_get_iam_policy_sync.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetIamPolicy +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_RegionBackendServices_GetIamPolicy_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_get_iam_policy(): + # Create a client + client = compute_v1.RegionBackendServicesClient() + + # Initialize request argument(s) + request = compute_v1.GetIamPolicyRegionBackendServiceRequest( + project="project_value", + region="region_value", + resource="resource_value", + ) + + # Make the request + response = client.get_iam_policy(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_RegionBackendServices_GetIamPolicy_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_region_backend_services_get_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_region_backend_services_get_sync.py new file mode 100644 index 000000000..5ae896d1f --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_region_backend_services_get_sync.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for Get +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_RegionBackendServices_Get_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_get(): + # Create a client + client = compute_v1.RegionBackendServicesClient() + + # Initialize request argument(s) + request = compute_v1.GetRegionBackendServiceRequest( + backend_service="backend_service_value", + project="project_value", + region="region_value", + ) + + # Make the request + response = client.get(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_RegionBackendServices_Get_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_region_backend_services_insert_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_region_backend_services_insert_sync.py new file mode 100644 index 000000000..49c52c982 --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_region_backend_services_insert_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for Insert +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_RegionBackendServices_Insert_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_insert(): + # Create a client + client = compute_v1.RegionBackendServicesClient() + + # Initialize request argument(s) + request = compute_v1.InsertRegionBackendServiceRequest( + project="project_value", + region="region_value", + ) + + # Make the request + response = client.insert(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_RegionBackendServices_Insert_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_region_backend_services_list_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_region_backend_services_list_sync.py new file mode 100644 index 000000000..1351c7a61 --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_region_backend_services_list_sync.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for List +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_RegionBackendServices_List_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_list(): + # Create a client + client = compute_v1.RegionBackendServicesClient() + + # Initialize request argument(s) + request = compute_v1.ListRegionBackendServicesRequest( + project="project_value", + region="region_value", + ) + + # Make the request + page_result = client.list(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END compute_v1_generated_RegionBackendServices_List_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_region_backend_services_patch_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_region_backend_services_patch_sync.py new file mode 100644 index 000000000..ca94d587b --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_region_backend_services_patch_sync.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for Patch +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_RegionBackendServices_Patch_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_patch(): + # Create a client + client = compute_v1.RegionBackendServicesClient() + + # Initialize request argument(s) + request = compute_v1.PatchRegionBackendServiceRequest( + backend_service="backend_service_value", + project="project_value", + region="region_value", + ) + + # Make the request + response = client.patch(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_RegionBackendServices_Patch_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_region_backend_services_set_iam_policy_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_region_backend_services_set_iam_policy_sync.py new file mode 100644 index 000000000..337a0d492 --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_region_backend_services_set_iam_policy_sync.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for SetIamPolicy +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_RegionBackendServices_SetIamPolicy_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_set_iam_policy(): + # Create a client + client = compute_v1.RegionBackendServicesClient() + + # Initialize request argument(s) + request = compute_v1.SetIamPolicyRegionBackendServiceRequest( + project="project_value", + region="region_value", + resource="resource_value", + ) + + # Make the request + response = client.set_iam_policy(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_RegionBackendServices_SetIamPolicy_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_region_backend_services_update_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_region_backend_services_update_sync.py new file mode 100644 index 000000000..d0912bea8 --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_region_backend_services_update_sync.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for Update +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_RegionBackendServices_Update_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_update(): + # Create a client + client = compute_v1.RegionBackendServicesClient() + + # Initialize request argument(s) + request = compute_v1.UpdateRegionBackendServiceRequest( + backend_service="backend_service_value", + project="project_value", + region="region_value", + ) + + # Make the request + response = client.update(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_RegionBackendServices_Update_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_region_commitments_aggregated_list_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_region_commitments_aggregated_list_sync.py new file mode 100644 index 000000000..a25d949c7 --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_region_commitments_aggregated_list_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for AggregatedList +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_RegionCommitments_AggregatedList_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_aggregated_list(): + # Create a client + client = compute_v1.RegionCommitmentsClient() + + # Initialize request argument(s) + request = compute_v1.AggregatedListRegionCommitmentsRequest( + project="project_value", + ) + + # Make the request + page_result = client.aggregated_list(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END compute_v1_generated_RegionCommitments_AggregatedList_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_region_commitments_get_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_region_commitments_get_sync.py new file mode 100644 index 000000000..e190dfd6b --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_region_commitments_get_sync.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for Get +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_RegionCommitments_Get_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_get(): + # Create a client + client = compute_v1.RegionCommitmentsClient() + + # Initialize request argument(s) + request = compute_v1.GetRegionCommitmentRequest( + commitment="commitment_value", + project="project_value", + region="region_value", + ) + + # Make the request + response = client.get(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_RegionCommitments_Get_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_region_commitments_insert_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_region_commitments_insert_sync.py new file mode 100644 index 000000000..553699152 --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_region_commitments_insert_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for Insert +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_RegionCommitments_Insert_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_insert(): + # Create a client + client = compute_v1.RegionCommitmentsClient() + + # Initialize request argument(s) + request = compute_v1.InsertRegionCommitmentRequest( + project="project_value", + region="region_value", + ) + + # Make the request + response = client.insert(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_RegionCommitments_Insert_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_region_commitments_list_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_region_commitments_list_sync.py new file mode 100644 index 000000000..3de324586 --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_region_commitments_list_sync.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for List +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_RegionCommitments_List_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_list(): + # Create a client + client = compute_v1.RegionCommitmentsClient() + + # Initialize request argument(s) + request = compute_v1.ListRegionCommitmentsRequest( + project="project_value", + region="region_value", + ) + + # Make the request + page_result = client.list(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END compute_v1_generated_RegionCommitments_List_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_region_commitments_update_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_region_commitments_update_sync.py new file mode 100644 index 000000000..b6b37112c --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_region_commitments_update_sync.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for Update +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_RegionCommitments_Update_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_update(): + # Create a client + client = compute_v1.RegionCommitmentsClient() + + # Initialize request argument(s) + request = compute_v1.UpdateRegionCommitmentRequest( + commitment="commitment_value", + project="project_value", + region="region_value", + ) + + # Make the request + response = client.update(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_RegionCommitments_Update_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_region_disk_types_get_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_region_disk_types_get_sync.py new file mode 100644 index 000000000..ad1d52bbf --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_region_disk_types_get_sync.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for Get +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_RegionDiskTypes_Get_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_get(): + # Create a client + client = compute_v1.RegionDiskTypesClient() + + # Initialize request argument(s) + request = compute_v1.GetRegionDiskTypeRequest( + disk_type="disk_type_value", + project="project_value", + region="region_value", + ) + + # Make the request + response = client.get(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_RegionDiskTypes_Get_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_region_disk_types_list_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_region_disk_types_list_sync.py new file mode 100644 index 000000000..e3df5bf89 --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_region_disk_types_list_sync.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for List +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_RegionDiskTypes_List_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_list(): + # Create a client + client = compute_v1.RegionDiskTypesClient() + + # Initialize request argument(s) + request = compute_v1.ListRegionDiskTypesRequest( + project="project_value", + region="region_value", + ) + + # Make the request + page_result = client.list(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END compute_v1_generated_RegionDiskTypes_List_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_region_disks_add_resource_policies_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_region_disks_add_resource_policies_sync.py new file mode 100644 index 000000000..77ee84108 --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_region_disks_add_resource_policies_sync.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for AddResourcePolicies +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_RegionDisks_AddResourcePolicies_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_add_resource_policies(): + # Create a client + client = compute_v1.RegionDisksClient() + + # Initialize request argument(s) + request = compute_v1.AddResourcePoliciesRegionDiskRequest( + disk="disk_value", + project="project_value", + region="region_value", + ) + + # Make the request + response = client.add_resource_policies(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_RegionDisks_AddResourcePolicies_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_region_disks_bulk_insert_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_region_disks_bulk_insert_sync.py new file mode 100644 index 000000000..d6cee727c --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_region_disks_bulk_insert_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for BulkInsert +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_RegionDisks_BulkInsert_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_bulk_insert(): + # Create a client + client = compute_v1.RegionDisksClient() + + # Initialize request argument(s) + request = compute_v1.BulkInsertRegionDiskRequest( + project="project_value", + region="region_value", + ) + + # Make the request + response = client.bulk_insert(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_RegionDisks_BulkInsert_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_region_disks_create_snapshot_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_region_disks_create_snapshot_sync.py new file mode 100644 index 000000000..e940305d1 --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_region_disks_create_snapshot_sync.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateSnapshot +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_RegionDisks_CreateSnapshot_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_create_snapshot(): + # Create a client + client = compute_v1.RegionDisksClient() + + # Initialize request argument(s) + request = compute_v1.CreateSnapshotRegionDiskRequest( + disk="disk_value", + project="project_value", + region="region_value", + ) + + # Make the request + response = client.create_snapshot(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_RegionDisks_CreateSnapshot_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_region_disks_delete_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_region_disks_delete_sync.py new file mode 100644 index 000000000..5034cdc69 --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_region_disks_delete_sync.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for Delete +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_RegionDisks_Delete_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_delete(): + # Create a client + client = compute_v1.RegionDisksClient() + + # Initialize request argument(s) + request = compute_v1.DeleteRegionDiskRequest( + disk="disk_value", + project="project_value", + region="region_value", + ) + + # Make the request + response = client.delete(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_RegionDisks_Delete_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_region_disks_get_iam_policy_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_region_disks_get_iam_policy_sync.py new file mode 100644 index 000000000..6c0e73a6d --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_region_disks_get_iam_policy_sync.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetIamPolicy +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_RegionDisks_GetIamPolicy_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_get_iam_policy(): + # Create a client + client = compute_v1.RegionDisksClient() + + # Initialize request argument(s) + request = compute_v1.GetIamPolicyRegionDiskRequest( + project="project_value", + region="region_value", + resource="resource_value", + ) + + # Make the request + response = client.get_iam_policy(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_RegionDisks_GetIamPolicy_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_region_disks_get_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_region_disks_get_sync.py new file mode 100644 index 000000000..5d4d3c3ab --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_region_disks_get_sync.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for Get +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_RegionDisks_Get_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_get(): + # Create a client + client = compute_v1.RegionDisksClient() + + # Initialize request argument(s) + request = compute_v1.GetRegionDiskRequest( + disk="disk_value", + project="project_value", + region="region_value", + ) + + # Make the request + response = client.get(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_RegionDisks_Get_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_region_disks_insert_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_region_disks_insert_sync.py new file mode 100644 index 000000000..18a1aa588 --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_region_disks_insert_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for Insert +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_RegionDisks_Insert_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_insert(): + # Create a client + client = compute_v1.RegionDisksClient() + + # Initialize request argument(s) + request = compute_v1.InsertRegionDiskRequest( + project="project_value", + region="region_value", + ) + + # Make the request + response = client.insert(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_RegionDisks_Insert_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_region_disks_list_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_region_disks_list_sync.py new file mode 100644 index 000000000..f03aa89f1 --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_region_disks_list_sync.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for List +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_RegionDisks_List_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_list(): + # Create a client + client = compute_v1.RegionDisksClient() + + # Initialize request argument(s) + request = compute_v1.ListRegionDisksRequest( + project="project_value", + region="region_value", + ) + + # Make the request + page_result = client.list(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END compute_v1_generated_RegionDisks_List_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_region_disks_remove_resource_policies_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_region_disks_remove_resource_policies_sync.py new file mode 100644 index 000000000..0795052f2 --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_region_disks_remove_resource_policies_sync.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for RemoveResourcePolicies +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_RegionDisks_RemoveResourcePolicies_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_remove_resource_policies(): + # Create a client + client = compute_v1.RegionDisksClient() + + # Initialize request argument(s) + request = compute_v1.RemoveResourcePoliciesRegionDiskRequest( + disk="disk_value", + project="project_value", + region="region_value", + ) + + # Make the request + response = client.remove_resource_policies(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_RegionDisks_RemoveResourcePolicies_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_region_disks_resize_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_region_disks_resize_sync.py new file mode 100644 index 000000000..7794f24a8 --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_region_disks_resize_sync.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for Resize +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_RegionDisks_Resize_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_resize(): + # Create a client + client = compute_v1.RegionDisksClient() + + # Initialize request argument(s) + request = compute_v1.ResizeRegionDiskRequest( + disk="disk_value", + project="project_value", + region="region_value", + ) + + # Make the request + response = client.resize(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_RegionDisks_Resize_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_region_disks_set_iam_policy_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_region_disks_set_iam_policy_sync.py new file mode 100644 index 000000000..9a6171e7b --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_region_disks_set_iam_policy_sync.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for SetIamPolicy +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_RegionDisks_SetIamPolicy_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_set_iam_policy(): + # Create a client + client = compute_v1.RegionDisksClient() + + # Initialize request argument(s) + request = compute_v1.SetIamPolicyRegionDiskRequest( + project="project_value", + region="region_value", + resource="resource_value", + ) + + # Make the request + response = client.set_iam_policy(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_RegionDisks_SetIamPolicy_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_region_disks_set_labels_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_region_disks_set_labels_sync.py new file mode 100644 index 000000000..17b13e4e5 --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_region_disks_set_labels_sync.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for SetLabels +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_RegionDisks_SetLabels_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_set_labels(): + # Create a client + client = compute_v1.RegionDisksClient() + + # Initialize request argument(s) + request = compute_v1.SetLabelsRegionDiskRequest( + project="project_value", + region="region_value", + resource="resource_value", + ) + + # Make the request + response = client.set_labels(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_RegionDisks_SetLabels_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_region_disks_start_async_replication_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_region_disks_start_async_replication_sync.py new file mode 100644 index 000000000..5032999a4 --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_region_disks_start_async_replication_sync.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for StartAsyncReplication +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_RegionDisks_StartAsyncReplication_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_start_async_replication(): + # Create a client + client = compute_v1.RegionDisksClient() + + # Initialize request argument(s) + request = compute_v1.StartAsyncReplicationRegionDiskRequest( + disk="disk_value", + project="project_value", + region="region_value", + ) + + # Make the request + response = client.start_async_replication(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_RegionDisks_StartAsyncReplication_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_region_disks_stop_async_replication_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_region_disks_stop_async_replication_sync.py new file mode 100644 index 000000000..c996d8ec7 --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_region_disks_stop_async_replication_sync.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for StopAsyncReplication +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_RegionDisks_StopAsyncReplication_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_stop_async_replication(): + # Create a client + client = compute_v1.RegionDisksClient() + + # Initialize request argument(s) + request = compute_v1.StopAsyncReplicationRegionDiskRequest( + disk="disk_value", + project="project_value", + region="region_value", + ) + + # Make the request + response = client.stop_async_replication(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_RegionDisks_StopAsyncReplication_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_region_disks_stop_group_async_replication_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_region_disks_stop_group_async_replication_sync.py new file mode 100644 index 000000000..14bafaaaa --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_region_disks_stop_group_async_replication_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for StopGroupAsyncReplication +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_RegionDisks_StopGroupAsyncReplication_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_stop_group_async_replication(): + # Create a client + client = compute_v1.RegionDisksClient() + + # Initialize request argument(s) + request = compute_v1.StopGroupAsyncReplicationRegionDiskRequest( + project="project_value", + region="region_value", + ) + + # Make the request + response = client.stop_group_async_replication(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_RegionDisks_StopGroupAsyncReplication_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_region_disks_test_iam_permissions_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_region_disks_test_iam_permissions_sync.py new file mode 100644 index 000000000..0f0ca7bfe --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_region_disks_test_iam_permissions_sync.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for TestIamPermissions +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_RegionDisks_TestIamPermissions_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_test_iam_permissions(): + # Create a client + client = compute_v1.RegionDisksClient() + + # Initialize request argument(s) + request = compute_v1.TestIamPermissionsRegionDiskRequest( + project="project_value", + region="region_value", + resource="resource_value", + ) + + # Make the request + response = client.test_iam_permissions(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_RegionDisks_TestIamPermissions_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_region_disks_update_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_region_disks_update_sync.py new file mode 100644 index 000000000..4ca5f15a7 --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_region_disks_update_sync.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for Update +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_RegionDisks_Update_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_update(): + # Create a client + client = compute_v1.RegionDisksClient() + + # Initialize request argument(s) + request = compute_v1.UpdateRegionDiskRequest( + disk="disk_value", + project="project_value", + region="region_value", + ) + + # Make the request + response = client.update(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_RegionDisks_Update_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_region_health_check_services_delete_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_region_health_check_services_delete_sync.py new file mode 100644 index 000000000..6660b0962 --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_region_health_check_services_delete_sync.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for Delete +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_RegionHealthCheckServices_Delete_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_delete(): + # Create a client + client = compute_v1.RegionHealthCheckServicesClient() + + # Initialize request argument(s) + request = compute_v1.DeleteRegionHealthCheckServiceRequest( + health_check_service="health_check_service_value", + project="project_value", + region="region_value", + ) + + # Make the request + response = client.delete(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_RegionHealthCheckServices_Delete_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_region_health_check_services_get_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_region_health_check_services_get_sync.py new file mode 100644 index 000000000..cec9fca42 --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_region_health_check_services_get_sync.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for Get +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_RegionHealthCheckServices_Get_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_get(): + # Create a client + client = compute_v1.RegionHealthCheckServicesClient() + + # Initialize request argument(s) + request = compute_v1.GetRegionHealthCheckServiceRequest( + health_check_service="health_check_service_value", + project="project_value", + region="region_value", + ) + + # Make the request + response = client.get(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_RegionHealthCheckServices_Get_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_region_health_check_services_insert_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_region_health_check_services_insert_sync.py new file mode 100644 index 000000000..683e106f8 --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_region_health_check_services_insert_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for Insert +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_RegionHealthCheckServices_Insert_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_insert(): + # Create a client + client = compute_v1.RegionHealthCheckServicesClient() + + # Initialize request argument(s) + request = compute_v1.InsertRegionHealthCheckServiceRequest( + project="project_value", + region="region_value", + ) + + # Make the request + response = client.insert(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_RegionHealthCheckServices_Insert_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_region_health_check_services_list_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_region_health_check_services_list_sync.py new file mode 100644 index 000000000..624d96d4b --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_region_health_check_services_list_sync.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for List +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_RegionHealthCheckServices_List_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_list(): + # Create a client + client = compute_v1.RegionHealthCheckServicesClient() + + # Initialize request argument(s) + request = compute_v1.ListRegionHealthCheckServicesRequest( + project="project_value", + region="region_value", + ) + + # Make the request + page_result = client.list(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END compute_v1_generated_RegionHealthCheckServices_List_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_region_health_check_services_patch_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_region_health_check_services_patch_sync.py new file mode 100644 index 000000000..14b1e53d3 --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_region_health_check_services_patch_sync.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for Patch +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_RegionHealthCheckServices_Patch_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_patch(): + # Create a client + client = compute_v1.RegionHealthCheckServicesClient() + + # Initialize request argument(s) + request = compute_v1.PatchRegionHealthCheckServiceRequest( + health_check_service="health_check_service_value", + project="project_value", + region="region_value", + ) + + # Make the request + response = client.patch(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_RegionHealthCheckServices_Patch_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_region_health_checks_delete_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_region_health_checks_delete_sync.py new file mode 100644 index 000000000..d4031c425 --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_region_health_checks_delete_sync.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for Delete +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_RegionHealthChecks_Delete_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_delete(): + # Create a client + client = compute_v1.RegionHealthChecksClient() + + # Initialize request argument(s) + request = compute_v1.DeleteRegionHealthCheckRequest( + health_check="health_check_value", + project="project_value", + region="region_value", + ) + + # Make the request + response = client.delete(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_RegionHealthChecks_Delete_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_region_health_checks_get_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_region_health_checks_get_sync.py new file mode 100644 index 000000000..5b337c874 --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_region_health_checks_get_sync.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for Get +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_RegionHealthChecks_Get_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_get(): + # Create a client + client = compute_v1.RegionHealthChecksClient() + + # Initialize request argument(s) + request = compute_v1.GetRegionHealthCheckRequest( + health_check="health_check_value", + project="project_value", + region="region_value", + ) + + # Make the request + response = client.get(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_RegionHealthChecks_Get_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_region_health_checks_insert_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_region_health_checks_insert_sync.py new file mode 100644 index 000000000..1a4bcb51a --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_region_health_checks_insert_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for Insert +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_RegionHealthChecks_Insert_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_insert(): + # Create a client + client = compute_v1.RegionHealthChecksClient() + + # Initialize request argument(s) + request = compute_v1.InsertRegionHealthCheckRequest( + project="project_value", + region="region_value", + ) + + # Make the request + response = client.insert(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_RegionHealthChecks_Insert_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_region_health_checks_list_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_region_health_checks_list_sync.py new file mode 100644 index 000000000..8728f2d81 --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_region_health_checks_list_sync.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for List +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_RegionHealthChecks_List_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_list(): + # Create a client + client = compute_v1.RegionHealthChecksClient() + + # Initialize request argument(s) + request = compute_v1.ListRegionHealthChecksRequest( + project="project_value", + region="region_value", + ) + + # Make the request + page_result = client.list(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END compute_v1_generated_RegionHealthChecks_List_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_region_health_checks_patch_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_region_health_checks_patch_sync.py new file mode 100644 index 000000000..bd3821cff --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_region_health_checks_patch_sync.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for Patch +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_RegionHealthChecks_Patch_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_patch(): + # Create a client + client = compute_v1.RegionHealthChecksClient() + + # Initialize request argument(s) + request = compute_v1.PatchRegionHealthCheckRequest( + health_check="health_check_value", + project="project_value", + region="region_value", + ) + + # Make the request + response = client.patch(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_RegionHealthChecks_Patch_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_region_health_checks_update_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_region_health_checks_update_sync.py new file mode 100644 index 000000000..f24c697ad --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_region_health_checks_update_sync.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for Update +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_RegionHealthChecks_Update_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_update(): + # Create a client + client = compute_v1.RegionHealthChecksClient() + + # Initialize request argument(s) + request = compute_v1.UpdateRegionHealthCheckRequest( + health_check="health_check_value", + project="project_value", + region="region_value", + ) + + # Make the request + response = client.update(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_RegionHealthChecks_Update_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_region_instance_group_managers_abandon_instances_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_region_instance_group_managers_abandon_instances_sync.py new file mode 100644 index 000000000..fe512b218 --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_region_instance_group_managers_abandon_instances_sync.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for AbandonInstances +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_RegionInstanceGroupManagers_AbandonInstances_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_abandon_instances(): + # Create a client + client = compute_v1.RegionInstanceGroupManagersClient() + + # Initialize request argument(s) + request = compute_v1.AbandonInstancesRegionInstanceGroupManagerRequest( + instance_group_manager="instance_group_manager_value", + project="project_value", + region="region_value", + ) + + # Make the request + response = client.abandon_instances(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_RegionInstanceGroupManagers_AbandonInstances_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_region_instance_group_managers_apply_updates_to_instances_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_region_instance_group_managers_apply_updates_to_instances_sync.py new file mode 100644 index 000000000..7fbbc3bf8 --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_region_instance_group_managers_apply_updates_to_instances_sync.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ApplyUpdatesToInstances +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_RegionInstanceGroupManagers_ApplyUpdatesToInstances_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_apply_updates_to_instances(): + # Create a client + client = compute_v1.RegionInstanceGroupManagersClient() + + # Initialize request argument(s) + request = compute_v1.ApplyUpdatesToInstancesRegionInstanceGroupManagerRequest( + instance_group_manager="instance_group_manager_value", + project="project_value", + region="region_value", + ) + + # Make the request + response = client.apply_updates_to_instances(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_RegionInstanceGroupManagers_ApplyUpdatesToInstances_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_region_instance_group_managers_create_instances_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_region_instance_group_managers_create_instances_sync.py new file mode 100644 index 000000000..e5cf5fed4 --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_region_instance_group_managers_create_instances_sync.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateInstances +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_RegionInstanceGroupManagers_CreateInstances_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_create_instances(): + # Create a client + client = compute_v1.RegionInstanceGroupManagersClient() + + # Initialize request argument(s) + request = compute_v1.CreateInstancesRegionInstanceGroupManagerRequest( + instance_group_manager="instance_group_manager_value", + project="project_value", + region="region_value", + ) + + # Make the request + response = client.create_instances(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_RegionInstanceGroupManagers_CreateInstances_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_region_instance_group_managers_delete_instances_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_region_instance_group_managers_delete_instances_sync.py new file mode 100644 index 000000000..87a49d140 --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_region_instance_group_managers_delete_instances_sync.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteInstances +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_RegionInstanceGroupManagers_DeleteInstances_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_delete_instances(): + # Create a client + client = compute_v1.RegionInstanceGroupManagersClient() + + # Initialize request argument(s) + request = compute_v1.DeleteInstancesRegionInstanceGroupManagerRequest( + instance_group_manager="instance_group_manager_value", + project="project_value", + region="region_value", + ) + + # Make the request + response = client.delete_instances(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_RegionInstanceGroupManagers_DeleteInstances_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_region_instance_group_managers_delete_per_instance_configs_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_region_instance_group_managers_delete_per_instance_configs_sync.py new file mode 100644 index 000000000..34d040b81 --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_region_instance_group_managers_delete_per_instance_configs_sync.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeletePerInstanceConfigs +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_RegionInstanceGroupManagers_DeletePerInstanceConfigs_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_delete_per_instance_configs(): + # Create a client + client = compute_v1.RegionInstanceGroupManagersClient() + + # Initialize request argument(s) + request = compute_v1.DeletePerInstanceConfigsRegionInstanceGroupManagerRequest( + instance_group_manager="instance_group_manager_value", + project="project_value", + region="region_value", + ) + + # Make the request + response = client.delete_per_instance_configs(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_RegionInstanceGroupManagers_DeletePerInstanceConfigs_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_region_instance_group_managers_delete_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_region_instance_group_managers_delete_sync.py new file mode 100644 index 000000000..34dfbb355 --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_region_instance_group_managers_delete_sync.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for Delete +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_RegionInstanceGroupManagers_Delete_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_delete(): + # Create a client + client = compute_v1.RegionInstanceGroupManagersClient() + + # Initialize request argument(s) + request = compute_v1.DeleteRegionInstanceGroupManagerRequest( + instance_group_manager="instance_group_manager_value", + project="project_value", + region="region_value", + ) + + # Make the request + response = client.delete(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_RegionInstanceGroupManagers_Delete_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_region_instance_group_managers_get_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_region_instance_group_managers_get_sync.py new file mode 100644 index 000000000..79180c35a --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_region_instance_group_managers_get_sync.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for Get +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_RegionInstanceGroupManagers_Get_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_get(): + # Create a client + client = compute_v1.RegionInstanceGroupManagersClient() + + # Initialize request argument(s) + request = compute_v1.GetRegionInstanceGroupManagerRequest( + instance_group_manager="instance_group_manager_value", + project="project_value", + region="region_value", + ) + + # Make the request + response = client.get(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_RegionInstanceGroupManagers_Get_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_region_instance_group_managers_insert_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_region_instance_group_managers_insert_sync.py new file mode 100644 index 000000000..60c87f475 --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_region_instance_group_managers_insert_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for Insert +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_RegionInstanceGroupManagers_Insert_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_insert(): + # Create a client + client = compute_v1.RegionInstanceGroupManagersClient() + + # Initialize request argument(s) + request = compute_v1.InsertRegionInstanceGroupManagerRequest( + project="project_value", + region="region_value", + ) + + # Make the request + response = client.insert(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_RegionInstanceGroupManagers_Insert_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_region_instance_group_managers_list_errors_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_region_instance_group_managers_list_errors_sync.py new file mode 100644 index 000000000..27498fb41 --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_region_instance_group_managers_list_errors_sync.py @@ -0,0 +1,55 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListErrors +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_RegionInstanceGroupManagers_ListErrors_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_list_errors(): + # Create a client + client = compute_v1.RegionInstanceGroupManagersClient() + + # Initialize request argument(s) + request = compute_v1.ListErrorsRegionInstanceGroupManagersRequest( + instance_group_manager="instance_group_manager_value", + project="project_value", + region="region_value", + ) + + # Make the request + page_result = client.list_errors(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END compute_v1_generated_RegionInstanceGroupManagers_ListErrors_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_region_instance_group_managers_list_managed_instances_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_region_instance_group_managers_list_managed_instances_sync.py new file mode 100644 index 000000000..5fe598b90 --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_region_instance_group_managers_list_managed_instances_sync.py @@ -0,0 +1,55 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListManagedInstances +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_RegionInstanceGroupManagers_ListManagedInstances_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_list_managed_instances(): + # Create a client + client = compute_v1.RegionInstanceGroupManagersClient() + + # Initialize request argument(s) + request = compute_v1.ListManagedInstancesRegionInstanceGroupManagersRequest( + instance_group_manager="instance_group_manager_value", + project="project_value", + region="region_value", + ) + + # Make the request + page_result = client.list_managed_instances(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END compute_v1_generated_RegionInstanceGroupManagers_ListManagedInstances_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_region_instance_group_managers_list_per_instance_configs_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_region_instance_group_managers_list_per_instance_configs_sync.py new file mode 100644 index 000000000..cb7593791 --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_region_instance_group_managers_list_per_instance_configs_sync.py @@ -0,0 +1,55 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListPerInstanceConfigs +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_RegionInstanceGroupManagers_ListPerInstanceConfigs_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_list_per_instance_configs(): + # Create a client + client = compute_v1.RegionInstanceGroupManagersClient() + + # Initialize request argument(s) + request = compute_v1.ListPerInstanceConfigsRegionInstanceGroupManagersRequest( + instance_group_manager="instance_group_manager_value", + project="project_value", + region="region_value", + ) + + # Make the request + page_result = client.list_per_instance_configs(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END compute_v1_generated_RegionInstanceGroupManagers_ListPerInstanceConfigs_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_region_instance_group_managers_list_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_region_instance_group_managers_list_sync.py new file mode 100644 index 000000000..67ac486fd --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_region_instance_group_managers_list_sync.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for List +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_RegionInstanceGroupManagers_List_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_list(): + # Create a client + client = compute_v1.RegionInstanceGroupManagersClient() + + # Initialize request argument(s) + request = compute_v1.ListRegionInstanceGroupManagersRequest( + project="project_value", + region="region_value", + ) + + # Make the request + page_result = client.list(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END compute_v1_generated_RegionInstanceGroupManagers_List_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_region_instance_group_managers_patch_per_instance_configs_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_region_instance_group_managers_patch_per_instance_configs_sync.py new file mode 100644 index 000000000..f432b6eb7 --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_region_instance_group_managers_patch_per_instance_configs_sync.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for PatchPerInstanceConfigs +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_RegionInstanceGroupManagers_PatchPerInstanceConfigs_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_patch_per_instance_configs(): + # Create a client + client = compute_v1.RegionInstanceGroupManagersClient() + + # Initialize request argument(s) + request = compute_v1.PatchPerInstanceConfigsRegionInstanceGroupManagerRequest( + instance_group_manager="instance_group_manager_value", + project="project_value", + region="region_value", + ) + + # Make the request + response = client.patch_per_instance_configs(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_RegionInstanceGroupManagers_PatchPerInstanceConfigs_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_region_instance_group_managers_patch_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_region_instance_group_managers_patch_sync.py new file mode 100644 index 000000000..c0425505f --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_region_instance_group_managers_patch_sync.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for Patch +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_RegionInstanceGroupManagers_Patch_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_patch(): + # Create a client + client = compute_v1.RegionInstanceGroupManagersClient() + + # Initialize request argument(s) + request = compute_v1.PatchRegionInstanceGroupManagerRequest( + instance_group_manager="instance_group_manager_value", + project="project_value", + region="region_value", + ) + + # Make the request + response = client.patch(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_RegionInstanceGroupManagers_Patch_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_region_instance_group_managers_recreate_instances_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_region_instance_group_managers_recreate_instances_sync.py new file mode 100644 index 000000000..0ac3715a1 --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_region_instance_group_managers_recreate_instances_sync.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for RecreateInstances +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_RegionInstanceGroupManagers_RecreateInstances_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_recreate_instances(): + # Create a client + client = compute_v1.RegionInstanceGroupManagersClient() + + # Initialize request argument(s) + request = compute_v1.RecreateInstancesRegionInstanceGroupManagerRequest( + instance_group_manager="instance_group_manager_value", + project="project_value", + region="region_value", + ) + + # Make the request + response = client.recreate_instances(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_RegionInstanceGroupManagers_RecreateInstances_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_region_instance_group_managers_resize_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_region_instance_group_managers_resize_sync.py new file mode 100644 index 000000000..cb0066615 --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_region_instance_group_managers_resize_sync.py @@ -0,0 +1,55 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for Resize +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_RegionInstanceGroupManagers_Resize_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_resize(): + # Create a client + client = compute_v1.RegionInstanceGroupManagersClient() + + # Initialize request argument(s) + request = compute_v1.ResizeRegionInstanceGroupManagerRequest( + instance_group_manager="instance_group_manager_value", + project="project_value", + region="region_value", + size=443, + ) + + # Make the request + response = client.resize(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_RegionInstanceGroupManagers_Resize_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_region_instance_group_managers_set_instance_template_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_region_instance_group_managers_set_instance_template_sync.py new file mode 100644 index 000000000..9aa4e6f17 --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_region_instance_group_managers_set_instance_template_sync.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for SetInstanceTemplate +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_RegionInstanceGroupManagers_SetInstanceTemplate_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_set_instance_template(): + # Create a client + client = compute_v1.RegionInstanceGroupManagersClient() + + # Initialize request argument(s) + request = compute_v1.SetInstanceTemplateRegionInstanceGroupManagerRequest( + instance_group_manager="instance_group_manager_value", + project="project_value", + region="region_value", + ) + + # Make the request + response = client.set_instance_template(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_RegionInstanceGroupManagers_SetInstanceTemplate_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_region_instance_group_managers_set_target_pools_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_region_instance_group_managers_set_target_pools_sync.py new file mode 100644 index 000000000..5eb5d8c1c --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_region_instance_group_managers_set_target_pools_sync.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for SetTargetPools +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_RegionInstanceGroupManagers_SetTargetPools_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_set_target_pools(): + # Create a client + client = compute_v1.RegionInstanceGroupManagersClient() + + # Initialize request argument(s) + request = compute_v1.SetTargetPoolsRegionInstanceGroupManagerRequest( + instance_group_manager="instance_group_manager_value", + project="project_value", + region="region_value", + ) + + # Make the request + response = client.set_target_pools(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_RegionInstanceGroupManagers_SetTargetPools_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_region_instance_group_managers_update_per_instance_configs_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_region_instance_group_managers_update_per_instance_configs_sync.py new file mode 100644 index 000000000..b671fe82d --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_region_instance_group_managers_update_per_instance_configs_sync.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdatePerInstanceConfigs +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_RegionInstanceGroupManagers_UpdatePerInstanceConfigs_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_update_per_instance_configs(): + # Create a client + client = compute_v1.RegionInstanceGroupManagersClient() + + # Initialize request argument(s) + request = compute_v1.UpdatePerInstanceConfigsRegionInstanceGroupManagerRequest( + instance_group_manager="instance_group_manager_value", + project="project_value", + region="region_value", + ) + + # Make the request + response = client.update_per_instance_configs(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_RegionInstanceGroupManagers_UpdatePerInstanceConfigs_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_region_instance_groups_get_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_region_instance_groups_get_sync.py new file mode 100644 index 000000000..d988f6001 --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_region_instance_groups_get_sync.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for Get +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_RegionInstanceGroups_Get_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_get(): + # Create a client + client = compute_v1.RegionInstanceGroupsClient() + + # Initialize request argument(s) + request = compute_v1.GetRegionInstanceGroupRequest( + instance_group="instance_group_value", + project="project_value", + region="region_value", + ) + + # Make the request + response = client.get(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_RegionInstanceGroups_Get_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_region_instance_groups_list_instances_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_region_instance_groups_list_instances_sync.py new file mode 100644 index 000000000..492ebcd29 --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_region_instance_groups_list_instances_sync.py @@ -0,0 +1,55 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListInstances +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_RegionInstanceGroups_ListInstances_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_list_instances(): + # Create a client + client = compute_v1.RegionInstanceGroupsClient() + + # Initialize request argument(s) + request = compute_v1.ListInstancesRegionInstanceGroupsRequest( + instance_group="instance_group_value", + project="project_value", + region="region_value", + ) + + # Make the request + page_result = client.list_instances(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END compute_v1_generated_RegionInstanceGroups_ListInstances_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_region_instance_groups_list_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_region_instance_groups_list_sync.py new file mode 100644 index 000000000..7b7313b7c --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_region_instance_groups_list_sync.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for List +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_RegionInstanceGroups_List_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_list(): + # Create a client + client = compute_v1.RegionInstanceGroupsClient() + + # Initialize request argument(s) + request = compute_v1.ListRegionInstanceGroupsRequest( + project="project_value", + region="region_value", + ) + + # Make the request + page_result = client.list(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END compute_v1_generated_RegionInstanceGroups_List_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_region_instance_groups_set_named_ports_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_region_instance_groups_set_named_ports_sync.py new file mode 100644 index 000000000..690252fdf --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_region_instance_groups_set_named_ports_sync.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for SetNamedPorts +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_RegionInstanceGroups_SetNamedPorts_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_set_named_ports(): + # Create a client + client = compute_v1.RegionInstanceGroupsClient() + + # Initialize request argument(s) + request = compute_v1.SetNamedPortsRegionInstanceGroupRequest( + instance_group="instance_group_value", + project="project_value", + region="region_value", + ) + + # Make the request + response = client.set_named_ports(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_RegionInstanceGroups_SetNamedPorts_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_region_instance_templates_delete_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_region_instance_templates_delete_sync.py new file mode 100644 index 000000000..cea6b5e22 --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_region_instance_templates_delete_sync.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for Delete +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_RegionInstanceTemplates_Delete_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_delete(): + # Create a client + client = compute_v1.RegionInstanceTemplatesClient() + + # Initialize request argument(s) + request = compute_v1.DeleteRegionInstanceTemplateRequest( + instance_template="instance_template_value", + project="project_value", + region="region_value", + ) + + # Make the request + response = client.delete(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_RegionInstanceTemplates_Delete_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_region_instance_templates_get_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_region_instance_templates_get_sync.py new file mode 100644 index 000000000..7bec8da9a --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_region_instance_templates_get_sync.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for Get +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_RegionInstanceTemplates_Get_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_get(): + # Create a client + client = compute_v1.RegionInstanceTemplatesClient() + + # Initialize request argument(s) + request = compute_v1.GetRegionInstanceTemplateRequest( + instance_template="instance_template_value", + project="project_value", + region="region_value", + ) + + # Make the request + response = client.get(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_RegionInstanceTemplates_Get_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_region_instance_templates_insert_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_region_instance_templates_insert_sync.py new file mode 100644 index 000000000..8e26ab2a3 --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_region_instance_templates_insert_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for Insert +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_RegionInstanceTemplates_Insert_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_insert(): + # Create a client + client = compute_v1.RegionInstanceTemplatesClient() + + # Initialize request argument(s) + request = compute_v1.InsertRegionInstanceTemplateRequest( + project="project_value", + region="region_value", + ) + + # Make the request + response = client.insert(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_RegionInstanceTemplates_Insert_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_region_instance_templates_list_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_region_instance_templates_list_sync.py new file mode 100644 index 000000000..4a8f36d9d --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_region_instance_templates_list_sync.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for List +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_RegionInstanceTemplates_List_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_list(): + # Create a client + client = compute_v1.RegionInstanceTemplatesClient() + + # Initialize request argument(s) + request = compute_v1.ListRegionInstanceTemplatesRequest( + project="project_value", + region="region_value", + ) + + # Make the request + page_result = client.list(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END compute_v1_generated_RegionInstanceTemplates_List_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_region_instances_bulk_insert_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_region_instances_bulk_insert_sync.py new file mode 100644 index 000000000..4cd9947ba --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_region_instances_bulk_insert_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for BulkInsert +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_RegionInstances_BulkInsert_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_bulk_insert(): + # Create a client + client = compute_v1.RegionInstancesClient() + + # Initialize request argument(s) + request = compute_v1.BulkInsertRegionInstanceRequest( + project="project_value", + region="region_value", + ) + + # Make the request + response = client.bulk_insert(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_RegionInstances_BulkInsert_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_region_network_endpoint_groups_delete_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_region_network_endpoint_groups_delete_sync.py new file mode 100644 index 000000000..6c518efca --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_region_network_endpoint_groups_delete_sync.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for Delete +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_RegionNetworkEndpointGroups_Delete_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_delete(): + # Create a client + client = compute_v1.RegionNetworkEndpointGroupsClient() + + # Initialize request argument(s) + request = compute_v1.DeleteRegionNetworkEndpointGroupRequest( + network_endpoint_group="network_endpoint_group_value", + project="project_value", + region="region_value", + ) + + # Make the request + response = client.delete(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_RegionNetworkEndpointGroups_Delete_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_region_network_endpoint_groups_get_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_region_network_endpoint_groups_get_sync.py new file mode 100644 index 000000000..6beccaa2b --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_region_network_endpoint_groups_get_sync.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for Get +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_RegionNetworkEndpointGroups_Get_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_get(): + # Create a client + client = compute_v1.RegionNetworkEndpointGroupsClient() + + # Initialize request argument(s) + request = compute_v1.GetRegionNetworkEndpointGroupRequest( + network_endpoint_group="network_endpoint_group_value", + project="project_value", + region="region_value", + ) + + # Make the request + response = client.get(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_RegionNetworkEndpointGroups_Get_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_region_network_endpoint_groups_insert_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_region_network_endpoint_groups_insert_sync.py new file mode 100644 index 000000000..1f59ce4ce --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_region_network_endpoint_groups_insert_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for Insert +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_RegionNetworkEndpointGroups_Insert_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_insert(): + # Create a client + client = compute_v1.RegionNetworkEndpointGroupsClient() + + # Initialize request argument(s) + request = compute_v1.InsertRegionNetworkEndpointGroupRequest( + project="project_value", + region="region_value", + ) + + # Make the request + response = client.insert(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_RegionNetworkEndpointGroups_Insert_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_region_network_endpoint_groups_list_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_region_network_endpoint_groups_list_sync.py new file mode 100644 index 000000000..ee04b6970 --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_region_network_endpoint_groups_list_sync.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for List +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_RegionNetworkEndpointGroups_List_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_list(): + # Create a client + client = compute_v1.RegionNetworkEndpointGroupsClient() + + # Initialize request argument(s) + request = compute_v1.ListRegionNetworkEndpointGroupsRequest( + project="project_value", + region="region_value", + ) + + # Make the request + page_result = client.list(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END compute_v1_generated_RegionNetworkEndpointGroups_List_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_region_network_firewall_policies_add_association_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_region_network_firewall_policies_add_association_sync.py new file mode 100644 index 000000000..ab9519f04 --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_region_network_firewall_policies_add_association_sync.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for AddAssociation +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_RegionNetworkFirewallPolicies_AddAssociation_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_add_association(): + # Create a client + client = compute_v1.RegionNetworkFirewallPoliciesClient() + + # Initialize request argument(s) + request = compute_v1.AddAssociationRegionNetworkFirewallPolicyRequest( + firewall_policy="firewall_policy_value", + project="project_value", + region="region_value", + ) + + # Make the request + response = client.add_association(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_RegionNetworkFirewallPolicies_AddAssociation_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_region_network_firewall_policies_add_rule_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_region_network_firewall_policies_add_rule_sync.py new file mode 100644 index 000000000..1a6859f5a --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_region_network_firewall_policies_add_rule_sync.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for AddRule +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_RegionNetworkFirewallPolicies_AddRule_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_add_rule(): + # Create a client + client = compute_v1.RegionNetworkFirewallPoliciesClient() + + # Initialize request argument(s) + request = compute_v1.AddRuleRegionNetworkFirewallPolicyRequest( + firewall_policy="firewall_policy_value", + project="project_value", + region="region_value", + ) + + # Make the request + response = client.add_rule(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_RegionNetworkFirewallPolicies_AddRule_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_region_network_firewall_policies_clone_rules_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_region_network_firewall_policies_clone_rules_sync.py new file mode 100644 index 000000000..ae6b94150 --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_region_network_firewall_policies_clone_rules_sync.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CloneRules +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_RegionNetworkFirewallPolicies_CloneRules_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_clone_rules(): + # Create a client + client = compute_v1.RegionNetworkFirewallPoliciesClient() + + # Initialize request argument(s) + request = compute_v1.CloneRulesRegionNetworkFirewallPolicyRequest( + firewall_policy="firewall_policy_value", + project="project_value", + region="region_value", + ) + + # Make the request + response = client.clone_rules(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_RegionNetworkFirewallPolicies_CloneRules_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_region_network_firewall_policies_delete_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_region_network_firewall_policies_delete_sync.py new file mode 100644 index 000000000..059ed80f8 --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_region_network_firewall_policies_delete_sync.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for Delete +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_RegionNetworkFirewallPolicies_Delete_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_delete(): + # Create a client + client = compute_v1.RegionNetworkFirewallPoliciesClient() + + # Initialize request argument(s) + request = compute_v1.DeleteRegionNetworkFirewallPolicyRequest( + firewall_policy="firewall_policy_value", + project="project_value", + region="region_value", + ) + + # Make the request + response = client.delete(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_RegionNetworkFirewallPolicies_Delete_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_region_network_firewall_policies_get_association_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_region_network_firewall_policies_get_association_sync.py new file mode 100644 index 000000000..6e77a9b46 --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_region_network_firewall_policies_get_association_sync.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetAssociation +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_RegionNetworkFirewallPolicies_GetAssociation_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_get_association(): + # Create a client + client = compute_v1.RegionNetworkFirewallPoliciesClient() + + # Initialize request argument(s) + request = compute_v1.GetAssociationRegionNetworkFirewallPolicyRequest( + firewall_policy="firewall_policy_value", + project="project_value", + region="region_value", + ) + + # Make the request + response = client.get_association(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_RegionNetworkFirewallPolicies_GetAssociation_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_region_network_firewall_policies_get_effective_firewalls_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_region_network_firewall_policies_get_effective_firewalls_sync.py new file mode 100644 index 000000000..720de18f5 --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_region_network_firewall_policies_get_effective_firewalls_sync.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetEffectiveFirewalls +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_RegionNetworkFirewallPolicies_GetEffectiveFirewalls_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_get_effective_firewalls(): + # Create a client + client = compute_v1.RegionNetworkFirewallPoliciesClient() + + # Initialize request argument(s) + request = compute_v1.GetEffectiveFirewallsRegionNetworkFirewallPolicyRequest( + network="network_value", + project="project_value", + region="region_value", + ) + + # Make the request + response = client.get_effective_firewalls(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_RegionNetworkFirewallPolicies_GetEffectiveFirewalls_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_region_network_firewall_policies_get_iam_policy_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_region_network_firewall_policies_get_iam_policy_sync.py new file mode 100644 index 000000000..1c4bd6d55 --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_region_network_firewall_policies_get_iam_policy_sync.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetIamPolicy +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_RegionNetworkFirewallPolicies_GetIamPolicy_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_get_iam_policy(): + # Create a client + client = compute_v1.RegionNetworkFirewallPoliciesClient() + + # Initialize request argument(s) + request = compute_v1.GetIamPolicyRegionNetworkFirewallPolicyRequest( + project="project_value", + region="region_value", + resource="resource_value", + ) + + # Make the request + response = client.get_iam_policy(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_RegionNetworkFirewallPolicies_GetIamPolicy_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_region_network_firewall_policies_get_rule_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_region_network_firewall_policies_get_rule_sync.py new file mode 100644 index 000000000..94b517772 --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_region_network_firewall_policies_get_rule_sync.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetRule +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_RegionNetworkFirewallPolicies_GetRule_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_get_rule(): + # Create a client + client = compute_v1.RegionNetworkFirewallPoliciesClient() + + # Initialize request argument(s) + request = compute_v1.GetRuleRegionNetworkFirewallPolicyRequest( + firewall_policy="firewall_policy_value", + project="project_value", + region="region_value", + ) + + # Make the request + response = client.get_rule(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_RegionNetworkFirewallPolicies_GetRule_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_region_network_firewall_policies_get_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_region_network_firewall_policies_get_sync.py new file mode 100644 index 000000000..e5af2fa5f --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_region_network_firewall_policies_get_sync.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for Get +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_RegionNetworkFirewallPolicies_Get_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_get(): + # Create a client + client = compute_v1.RegionNetworkFirewallPoliciesClient() + + # Initialize request argument(s) + request = compute_v1.GetRegionNetworkFirewallPolicyRequest( + firewall_policy="firewall_policy_value", + project="project_value", + region="region_value", + ) + + # Make the request + response = client.get(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_RegionNetworkFirewallPolicies_Get_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_region_network_firewall_policies_insert_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_region_network_firewall_policies_insert_sync.py new file mode 100644 index 000000000..31deceb5a --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_region_network_firewall_policies_insert_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for Insert +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_RegionNetworkFirewallPolicies_Insert_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_insert(): + # Create a client + client = compute_v1.RegionNetworkFirewallPoliciesClient() + + # Initialize request argument(s) + request = compute_v1.InsertRegionNetworkFirewallPolicyRequest( + project="project_value", + region="region_value", + ) + + # Make the request + response = client.insert(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_RegionNetworkFirewallPolicies_Insert_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_region_network_firewall_policies_list_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_region_network_firewall_policies_list_sync.py new file mode 100644 index 000000000..37c9d6fb9 --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_region_network_firewall_policies_list_sync.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for List +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_RegionNetworkFirewallPolicies_List_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_list(): + # Create a client + client = compute_v1.RegionNetworkFirewallPoliciesClient() + + # Initialize request argument(s) + request = compute_v1.ListRegionNetworkFirewallPoliciesRequest( + project="project_value", + region="region_value", + ) + + # Make the request + page_result = client.list(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END compute_v1_generated_RegionNetworkFirewallPolicies_List_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_region_network_firewall_policies_patch_rule_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_region_network_firewall_policies_patch_rule_sync.py new file mode 100644 index 000000000..670488aef --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_region_network_firewall_policies_patch_rule_sync.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for PatchRule +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_RegionNetworkFirewallPolicies_PatchRule_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_patch_rule(): + # Create a client + client = compute_v1.RegionNetworkFirewallPoliciesClient() + + # Initialize request argument(s) + request = compute_v1.PatchRuleRegionNetworkFirewallPolicyRequest( + firewall_policy="firewall_policy_value", + project="project_value", + region="region_value", + ) + + # Make the request + response = client.patch_rule(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_RegionNetworkFirewallPolicies_PatchRule_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_region_network_firewall_policies_patch_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_region_network_firewall_policies_patch_sync.py new file mode 100644 index 000000000..f51b6802a --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_region_network_firewall_policies_patch_sync.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for Patch +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_RegionNetworkFirewallPolicies_Patch_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_patch(): + # Create a client + client = compute_v1.RegionNetworkFirewallPoliciesClient() + + # Initialize request argument(s) + request = compute_v1.PatchRegionNetworkFirewallPolicyRequest( + firewall_policy="firewall_policy_value", + project="project_value", + region="region_value", + ) + + # Make the request + response = client.patch(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_RegionNetworkFirewallPolicies_Patch_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_region_network_firewall_policies_remove_association_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_region_network_firewall_policies_remove_association_sync.py new file mode 100644 index 000000000..3bfc0ec55 --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_region_network_firewall_policies_remove_association_sync.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for RemoveAssociation +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_RegionNetworkFirewallPolicies_RemoveAssociation_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_remove_association(): + # Create a client + client = compute_v1.RegionNetworkFirewallPoliciesClient() + + # Initialize request argument(s) + request = compute_v1.RemoveAssociationRegionNetworkFirewallPolicyRequest( + firewall_policy="firewall_policy_value", + project="project_value", + region="region_value", + ) + + # Make the request + response = client.remove_association(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_RegionNetworkFirewallPolicies_RemoveAssociation_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_region_network_firewall_policies_remove_rule_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_region_network_firewall_policies_remove_rule_sync.py new file mode 100644 index 000000000..0b73d7edd --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_region_network_firewall_policies_remove_rule_sync.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for RemoveRule +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_RegionNetworkFirewallPolicies_RemoveRule_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_remove_rule(): + # Create a client + client = compute_v1.RegionNetworkFirewallPoliciesClient() + + # Initialize request argument(s) + request = compute_v1.RemoveRuleRegionNetworkFirewallPolicyRequest( + firewall_policy="firewall_policy_value", + project="project_value", + region="region_value", + ) + + # Make the request + response = client.remove_rule(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_RegionNetworkFirewallPolicies_RemoveRule_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_region_network_firewall_policies_set_iam_policy_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_region_network_firewall_policies_set_iam_policy_sync.py new file mode 100644 index 000000000..8d9577771 --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_region_network_firewall_policies_set_iam_policy_sync.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for SetIamPolicy +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_RegionNetworkFirewallPolicies_SetIamPolicy_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_set_iam_policy(): + # Create a client + client = compute_v1.RegionNetworkFirewallPoliciesClient() + + # Initialize request argument(s) + request = compute_v1.SetIamPolicyRegionNetworkFirewallPolicyRequest( + project="project_value", + region="region_value", + resource="resource_value", + ) + + # Make the request + response = client.set_iam_policy(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_RegionNetworkFirewallPolicies_SetIamPolicy_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_region_network_firewall_policies_test_iam_permissions_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_region_network_firewall_policies_test_iam_permissions_sync.py new file mode 100644 index 000000000..10dcffb4d --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_region_network_firewall_policies_test_iam_permissions_sync.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for TestIamPermissions +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_RegionNetworkFirewallPolicies_TestIamPermissions_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_test_iam_permissions(): + # Create a client + client = compute_v1.RegionNetworkFirewallPoliciesClient() + + # Initialize request argument(s) + request = compute_v1.TestIamPermissionsRegionNetworkFirewallPolicyRequest( + project="project_value", + region="region_value", + resource="resource_value", + ) + + # Make the request + response = client.test_iam_permissions(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_RegionNetworkFirewallPolicies_TestIamPermissions_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_region_notification_endpoints_delete_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_region_notification_endpoints_delete_sync.py new file mode 100644 index 000000000..7a15c9573 --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_region_notification_endpoints_delete_sync.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for Delete +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_RegionNotificationEndpoints_Delete_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_delete(): + # Create a client + client = compute_v1.RegionNotificationEndpointsClient() + + # Initialize request argument(s) + request = compute_v1.DeleteRegionNotificationEndpointRequest( + notification_endpoint="notification_endpoint_value", + project="project_value", + region="region_value", + ) + + # Make the request + response = client.delete(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_RegionNotificationEndpoints_Delete_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_region_notification_endpoints_get_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_region_notification_endpoints_get_sync.py new file mode 100644 index 000000000..e6691465e --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_region_notification_endpoints_get_sync.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for Get +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_RegionNotificationEndpoints_Get_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_get(): + # Create a client + client = compute_v1.RegionNotificationEndpointsClient() + + # Initialize request argument(s) + request = compute_v1.GetRegionNotificationEndpointRequest( + notification_endpoint="notification_endpoint_value", + project="project_value", + region="region_value", + ) + + # Make the request + response = client.get(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_RegionNotificationEndpoints_Get_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_region_notification_endpoints_insert_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_region_notification_endpoints_insert_sync.py new file mode 100644 index 000000000..c9da7809a --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_region_notification_endpoints_insert_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for Insert +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_RegionNotificationEndpoints_Insert_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_insert(): + # Create a client + client = compute_v1.RegionNotificationEndpointsClient() + + # Initialize request argument(s) + request = compute_v1.InsertRegionNotificationEndpointRequest( + project="project_value", + region="region_value", + ) + + # Make the request + response = client.insert(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_RegionNotificationEndpoints_Insert_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_region_notification_endpoints_list_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_region_notification_endpoints_list_sync.py new file mode 100644 index 000000000..e155ee928 --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_region_notification_endpoints_list_sync.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for List +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_RegionNotificationEndpoints_List_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_list(): + # Create a client + client = compute_v1.RegionNotificationEndpointsClient() + + # Initialize request argument(s) + request = compute_v1.ListRegionNotificationEndpointsRequest( + project="project_value", + region="region_value", + ) + + # Make the request + page_result = client.list(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END compute_v1_generated_RegionNotificationEndpoints_List_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_region_operations_delete_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_region_operations_delete_sync.py new file mode 100644 index 000000000..d0b4dd515 --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_region_operations_delete_sync.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for Delete +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_RegionOperations_Delete_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_delete(): + # Create a client + client = compute_v1.RegionOperationsClient() + + # Initialize request argument(s) + request = compute_v1.DeleteRegionOperationRequest( + operation="operation_value", + project="project_value", + region="region_value", + ) + + # Make the request + response = client.delete(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_RegionOperations_Delete_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_region_operations_get_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_region_operations_get_sync.py new file mode 100644 index 000000000..ab601bd64 --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_region_operations_get_sync.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for Get +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_RegionOperations_Get_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_get(): + # Create a client + client = compute_v1.RegionOperationsClient() + + # Initialize request argument(s) + request = compute_v1.GetRegionOperationRequest( + operation="operation_value", + project="project_value", + region="region_value", + ) + + # Make the request + response = client.get(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_RegionOperations_Get_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_region_operations_list_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_region_operations_list_sync.py new file mode 100644 index 000000000..3aaa7bf0f --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_region_operations_list_sync.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for List +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_RegionOperations_List_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_list(): + # Create a client + client = compute_v1.RegionOperationsClient() + + # Initialize request argument(s) + request = compute_v1.ListRegionOperationsRequest( + project="project_value", + region="region_value", + ) + + # Make the request + page_result = client.list(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END compute_v1_generated_RegionOperations_List_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_region_operations_wait_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_region_operations_wait_sync.py new file mode 100644 index 000000000..2b7761015 --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_region_operations_wait_sync.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for Wait +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_RegionOperations_Wait_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_wait(): + # Create a client + client = compute_v1.RegionOperationsClient() + + # Initialize request argument(s) + request = compute_v1.WaitRegionOperationRequest( + operation="operation_value", + project="project_value", + region="region_value", + ) + + # Make the request + response = client.wait(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_RegionOperations_Wait_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_region_security_policies_delete_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_region_security_policies_delete_sync.py new file mode 100644 index 000000000..03d823318 --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_region_security_policies_delete_sync.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for Delete +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_RegionSecurityPolicies_Delete_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_delete(): + # Create a client + client = compute_v1.RegionSecurityPoliciesClient() + + # Initialize request argument(s) + request = compute_v1.DeleteRegionSecurityPolicyRequest( + project="project_value", + region="region_value", + security_policy="security_policy_value", + ) + + # Make the request + response = client.delete(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_RegionSecurityPolicies_Delete_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_region_security_policies_get_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_region_security_policies_get_sync.py new file mode 100644 index 000000000..e93753b04 --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_region_security_policies_get_sync.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for Get +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_RegionSecurityPolicies_Get_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_get(): + # Create a client + client = compute_v1.RegionSecurityPoliciesClient() + + # Initialize request argument(s) + request = compute_v1.GetRegionSecurityPolicyRequest( + project="project_value", + region="region_value", + security_policy="security_policy_value", + ) + + # Make the request + response = client.get(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_RegionSecurityPolicies_Get_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_region_security_policies_insert_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_region_security_policies_insert_sync.py new file mode 100644 index 000000000..8ba17e748 --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_region_security_policies_insert_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for Insert +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_RegionSecurityPolicies_Insert_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_insert(): + # Create a client + client = compute_v1.RegionSecurityPoliciesClient() + + # Initialize request argument(s) + request = compute_v1.InsertRegionSecurityPolicyRequest( + project="project_value", + region="region_value", + ) + + # Make the request + response = client.insert(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_RegionSecurityPolicies_Insert_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_region_security_policies_list_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_region_security_policies_list_sync.py new file mode 100644 index 000000000..802a788de --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_region_security_policies_list_sync.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for List +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_RegionSecurityPolicies_List_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_list(): + # Create a client + client = compute_v1.RegionSecurityPoliciesClient() + + # Initialize request argument(s) + request = compute_v1.ListRegionSecurityPoliciesRequest( + project="project_value", + region="region_value", + ) + + # Make the request + page_result = client.list(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END compute_v1_generated_RegionSecurityPolicies_List_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_region_security_policies_patch_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_region_security_policies_patch_sync.py new file mode 100644 index 000000000..c8de2f710 --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_region_security_policies_patch_sync.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for Patch +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_RegionSecurityPolicies_Patch_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_patch(): + # Create a client + client = compute_v1.RegionSecurityPoliciesClient() + + # Initialize request argument(s) + request = compute_v1.PatchRegionSecurityPolicyRequest( + project="project_value", + region="region_value", + security_policy="security_policy_value", + ) + + # Make the request + response = client.patch(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_RegionSecurityPolicies_Patch_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_region_ssl_certificates_delete_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_region_ssl_certificates_delete_sync.py new file mode 100644 index 000000000..64d1b5dac --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_region_ssl_certificates_delete_sync.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for Delete +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_RegionSslCertificates_Delete_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_delete(): + # Create a client + client = compute_v1.RegionSslCertificatesClient() + + # Initialize request argument(s) + request = compute_v1.DeleteRegionSslCertificateRequest( + project="project_value", + region="region_value", + ssl_certificate="ssl_certificate_value", + ) + + # Make the request + response = client.delete(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_RegionSslCertificates_Delete_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_region_ssl_certificates_get_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_region_ssl_certificates_get_sync.py new file mode 100644 index 000000000..937c8f086 --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_region_ssl_certificates_get_sync.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for Get +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_RegionSslCertificates_Get_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_get(): + # Create a client + client = compute_v1.RegionSslCertificatesClient() + + # Initialize request argument(s) + request = compute_v1.GetRegionSslCertificateRequest( + project="project_value", + region="region_value", + ssl_certificate="ssl_certificate_value", + ) + + # Make the request + response = client.get(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_RegionSslCertificates_Get_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_region_ssl_certificates_insert_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_region_ssl_certificates_insert_sync.py new file mode 100644 index 000000000..9ef0b92b2 --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_region_ssl_certificates_insert_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for Insert +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_RegionSslCertificates_Insert_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_insert(): + # Create a client + client = compute_v1.RegionSslCertificatesClient() + + # Initialize request argument(s) + request = compute_v1.InsertRegionSslCertificateRequest( + project="project_value", + region="region_value", + ) + + # Make the request + response = client.insert(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_RegionSslCertificates_Insert_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_region_ssl_certificates_list_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_region_ssl_certificates_list_sync.py new file mode 100644 index 000000000..3c7442330 --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_region_ssl_certificates_list_sync.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for List +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_RegionSslCertificates_List_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_list(): + # Create a client + client = compute_v1.RegionSslCertificatesClient() + + # Initialize request argument(s) + request = compute_v1.ListRegionSslCertificatesRequest( + project="project_value", + region="region_value", + ) + + # Make the request + page_result = client.list(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END compute_v1_generated_RegionSslCertificates_List_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_region_ssl_policies_delete_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_region_ssl_policies_delete_sync.py new file mode 100644 index 000000000..63fe93788 --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_region_ssl_policies_delete_sync.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for Delete +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_RegionSslPolicies_Delete_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_delete(): + # Create a client + client = compute_v1.RegionSslPoliciesClient() + + # Initialize request argument(s) + request = compute_v1.DeleteRegionSslPolicyRequest( + project="project_value", + region="region_value", + ssl_policy="ssl_policy_value", + ) + + # Make the request + response = client.delete(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_RegionSslPolicies_Delete_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_region_ssl_policies_get_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_region_ssl_policies_get_sync.py new file mode 100644 index 000000000..d9adf749c --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_region_ssl_policies_get_sync.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for Get +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_RegionSslPolicies_Get_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_get(): + # Create a client + client = compute_v1.RegionSslPoliciesClient() + + # Initialize request argument(s) + request = compute_v1.GetRegionSslPolicyRequest( + project="project_value", + region="region_value", + ssl_policy="ssl_policy_value", + ) + + # Make the request + response = client.get(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_RegionSslPolicies_Get_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_region_ssl_policies_insert_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_region_ssl_policies_insert_sync.py new file mode 100644 index 000000000..b49c65e67 --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_region_ssl_policies_insert_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for Insert +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_RegionSslPolicies_Insert_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_insert(): + # Create a client + client = compute_v1.RegionSslPoliciesClient() + + # Initialize request argument(s) + request = compute_v1.InsertRegionSslPolicyRequest( + project="project_value", + region="region_value", + ) + + # Make the request + response = client.insert(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_RegionSslPolicies_Insert_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_region_ssl_policies_list_available_features_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_region_ssl_policies_list_available_features_sync.py new file mode 100644 index 000000000..3bfc3e1c1 --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_region_ssl_policies_list_available_features_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListAvailableFeatures +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_RegionSslPolicies_ListAvailableFeatures_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_list_available_features(): + # Create a client + client = compute_v1.RegionSslPoliciesClient() + + # Initialize request argument(s) + request = compute_v1.ListAvailableFeaturesRegionSslPoliciesRequest( + project="project_value", + region="region_value", + ) + + # Make the request + response = client.list_available_features(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_RegionSslPolicies_ListAvailableFeatures_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_region_ssl_policies_list_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_region_ssl_policies_list_sync.py new file mode 100644 index 000000000..2d2e9ccab --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_region_ssl_policies_list_sync.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for List +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_RegionSslPolicies_List_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_list(): + # Create a client + client = compute_v1.RegionSslPoliciesClient() + + # Initialize request argument(s) + request = compute_v1.ListRegionSslPoliciesRequest( + project="project_value", + region="region_value", + ) + + # Make the request + page_result = client.list(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END compute_v1_generated_RegionSslPolicies_List_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_region_ssl_policies_patch_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_region_ssl_policies_patch_sync.py new file mode 100644 index 000000000..1bfbec4ba --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_region_ssl_policies_patch_sync.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for Patch +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_RegionSslPolicies_Patch_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_patch(): + # Create a client + client = compute_v1.RegionSslPoliciesClient() + + # Initialize request argument(s) + request = compute_v1.PatchRegionSslPolicyRequest( + project="project_value", + region="region_value", + ssl_policy="ssl_policy_value", + ) + + # Make the request + response = client.patch(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_RegionSslPolicies_Patch_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_region_target_http_proxies_delete_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_region_target_http_proxies_delete_sync.py new file mode 100644 index 000000000..fa2de3d08 --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_region_target_http_proxies_delete_sync.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for Delete +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_RegionTargetHttpProxies_Delete_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_delete(): + # Create a client + client = compute_v1.RegionTargetHttpProxiesClient() + + # Initialize request argument(s) + request = compute_v1.DeleteRegionTargetHttpProxyRequest( + project="project_value", + region="region_value", + target_http_proxy="target_http_proxy_value", + ) + + # Make the request + response = client.delete(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_RegionTargetHttpProxies_Delete_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_region_target_http_proxies_get_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_region_target_http_proxies_get_sync.py new file mode 100644 index 000000000..3e02ff0ed --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_region_target_http_proxies_get_sync.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for Get +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_RegionTargetHttpProxies_Get_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_get(): + # Create a client + client = compute_v1.RegionTargetHttpProxiesClient() + + # Initialize request argument(s) + request = compute_v1.GetRegionTargetHttpProxyRequest( + project="project_value", + region="region_value", + target_http_proxy="target_http_proxy_value", + ) + + # Make the request + response = client.get(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_RegionTargetHttpProxies_Get_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_region_target_http_proxies_insert_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_region_target_http_proxies_insert_sync.py new file mode 100644 index 000000000..64ce3cfa9 --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_region_target_http_proxies_insert_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for Insert +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_RegionTargetHttpProxies_Insert_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_insert(): + # Create a client + client = compute_v1.RegionTargetHttpProxiesClient() + + # Initialize request argument(s) + request = compute_v1.InsertRegionTargetHttpProxyRequest( + project="project_value", + region="region_value", + ) + + # Make the request + response = client.insert(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_RegionTargetHttpProxies_Insert_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_region_target_http_proxies_list_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_region_target_http_proxies_list_sync.py new file mode 100644 index 000000000..5aaa0e730 --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_region_target_http_proxies_list_sync.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for List +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_RegionTargetHttpProxies_List_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_list(): + # Create a client + client = compute_v1.RegionTargetHttpProxiesClient() + + # Initialize request argument(s) + request = compute_v1.ListRegionTargetHttpProxiesRequest( + project="project_value", + region="region_value", + ) + + # Make the request + page_result = client.list(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END compute_v1_generated_RegionTargetHttpProxies_List_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_region_target_http_proxies_set_url_map_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_region_target_http_proxies_set_url_map_sync.py new file mode 100644 index 000000000..a2389bbee --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_region_target_http_proxies_set_url_map_sync.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for SetUrlMap +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_RegionTargetHttpProxies_SetUrlMap_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_set_url_map(): + # Create a client + client = compute_v1.RegionTargetHttpProxiesClient() + + # Initialize request argument(s) + request = compute_v1.SetUrlMapRegionTargetHttpProxyRequest( + project="project_value", + region="region_value", + target_http_proxy="target_http_proxy_value", + ) + + # Make the request + response = client.set_url_map(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_RegionTargetHttpProxies_SetUrlMap_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_region_target_https_proxies_delete_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_region_target_https_proxies_delete_sync.py new file mode 100644 index 000000000..8bb68667c --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_region_target_https_proxies_delete_sync.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for Delete +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_RegionTargetHttpsProxies_Delete_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_delete(): + # Create a client + client = compute_v1.RegionTargetHttpsProxiesClient() + + # Initialize request argument(s) + request = compute_v1.DeleteRegionTargetHttpsProxyRequest( + project="project_value", + region="region_value", + target_https_proxy="target_https_proxy_value", + ) + + # Make the request + response = client.delete(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_RegionTargetHttpsProxies_Delete_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_region_target_https_proxies_get_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_region_target_https_proxies_get_sync.py new file mode 100644 index 000000000..9c6088fe9 --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_region_target_https_proxies_get_sync.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for Get +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_RegionTargetHttpsProxies_Get_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_get(): + # Create a client + client = compute_v1.RegionTargetHttpsProxiesClient() + + # Initialize request argument(s) + request = compute_v1.GetRegionTargetHttpsProxyRequest( + project="project_value", + region="region_value", + target_https_proxy="target_https_proxy_value", + ) + + # Make the request + response = client.get(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_RegionTargetHttpsProxies_Get_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_region_target_https_proxies_insert_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_region_target_https_proxies_insert_sync.py new file mode 100644 index 000000000..a74fe27f2 --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_region_target_https_proxies_insert_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for Insert +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_RegionTargetHttpsProxies_Insert_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_insert(): + # Create a client + client = compute_v1.RegionTargetHttpsProxiesClient() + + # Initialize request argument(s) + request = compute_v1.InsertRegionTargetHttpsProxyRequest( + project="project_value", + region="region_value", + ) + + # Make the request + response = client.insert(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_RegionTargetHttpsProxies_Insert_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_region_target_https_proxies_list_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_region_target_https_proxies_list_sync.py new file mode 100644 index 000000000..073a13b66 --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_region_target_https_proxies_list_sync.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for List +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_RegionTargetHttpsProxies_List_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_list(): + # Create a client + client = compute_v1.RegionTargetHttpsProxiesClient() + + # Initialize request argument(s) + request = compute_v1.ListRegionTargetHttpsProxiesRequest( + project="project_value", + region="region_value", + ) + + # Make the request + page_result = client.list(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END compute_v1_generated_RegionTargetHttpsProxies_List_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_region_target_https_proxies_patch_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_region_target_https_proxies_patch_sync.py new file mode 100644 index 000000000..0c18c74d6 --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_region_target_https_proxies_patch_sync.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for Patch +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_RegionTargetHttpsProxies_Patch_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_patch(): + # Create a client + client = compute_v1.RegionTargetHttpsProxiesClient() + + # Initialize request argument(s) + request = compute_v1.PatchRegionTargetHttpsProxyRequest( + project="project_value", + region="region_value", + target_https_proxy="target_https_proxy_value", + ) + + # Make the request + response = client.patch(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_RegionTargetHttpsProxies_Patch_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_region_target_https_proxies_set_ssl_certificates_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_region_target_https_proxies_set_ssl_certificates_sync.py new file mode 100644 index 000000000..50ad2682d --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_region_target_https_proxies_set_ssl_certificates_sync.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for SetSslCertificates +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_RegionTargetHttpsProxies_SetSslCertificates_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_set_ssl_certificates(): + # Create a client + client = compute_v1.RegionTargetHttpsProxiesClient() + + # Initialize request argument(s) + request = compute_v1.SetSslCertificatesRegionTargetHttpsProxyRequest( + project="project_value", + region="region_value", + target_https_proxy="target_https_proxy_value", + ) + + # Make the request + response = client.set_ssl_certificates(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_RegionTargetHttpsProxies_SetSslCertificates_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_region_target_https_proxies_set_url_map_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_region_target_https_proxies_set_url_map_sync.py new file mode 100644 index 000000000..989e80f31 --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_region_target_https_proxies_set_url_map_sync.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for SetUrlMap +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_RegionTargetHttpsProxies_SetUrlMap_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_set_url_map(): + # Create a client + client = compute_v1.RegionTargetHttpsProxiesClient() + + # Initialize request argument(s) + request = compute_v1.SetUrlMapRegionTargetHttpsProxyRequest( + project="project_value", + region="region_value", + target_https_proxy="target_https_proxy_value", + ) + + # Make the request + response = client.set_url_map(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_RegionTargetHttpsProxies_SetUrlMap_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_region_target_tcp_proxies_delete_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_region_target_tcp_proxies_delete_sync.py new file mode 100644 index 000000000..e5b1ffefd --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_region_target_tcp_proxies_delete_sync.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for Delete +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_RegionTargetTcpProxies_Delete_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_delete(): + # Create a client + client = compute_v1.RegionTargetTcpProxiesClient() + + # Initialize request argument(s) + request = compute_v1.DeleteRegionTargetTcpProxyRequest( + project="project_value", + region="region_value", + target_tcp_proxy="target_tcp_proxy_value", + ) + + # Make the request + response = client.delete(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_RegionTargetTcpProxies_Delete_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_region_target_tcp_proxies_get_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_region_target_tcp_proxies_get_sync.py new file mode 100644 index 000000000..08a2c68b1 --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_region_target_tcp_proxies_get_sync.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for Get +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_RegionTargetTcpProxies_Get_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_get(): + # Create a client + client = compute_v1.RegionTargetTcpProxiesClient() + + # Initialize request argument(s) + request = compute_v1.GetRegionTargetTcpProxyRequest( + project="project_value", + region="region_value", + target_tcp_proxy="target_tcp_proxy_value", + ) + + # Make the request + response = client.get(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_RegionTargetTcpProxies_Get_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_region_target_tcp_proxies_insert_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_region_target_tcp_proxies_insert_sync.py new file mode 100644 index 000000000..aacc53a18 --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_region_target_tcp_proxies_insert_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for Insert +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_RegionTargetTcpProxies_Insert_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_insert(): + # Create a client + client = compute_v1.RegionTargetTcpProxiesClient() + + # Initialize request argument(s) + request = compute_v1.InsertRegionTargetTcpProxyRequest( + project="project_value", + region="region_value", + ) + + # Make the request + response = client.insert(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_RegionTargetTcpProxies_Insert_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_region_target_tcp_proxies_list_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_region_target_tcp_proxies_list_sync.py new file mode 100644 index 000000000..438d9f588 --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_region_target_tcp_proxies_list_sync.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for List +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_RegionTargetTcpProxies_List_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_list(): + # Create a client + client = compute_v1.RegionTargetTcpProxiesClient() + + # Initialize request argument(s) + request = compute_v1.ListRegionTargetTcpProxiesRequest( + project="project_value", + region="region_value", + ) + + # Make the request + page_result = client.list(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END compute_v1_generated_RegionTargetTcpProxies_List_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_region_url_maps_delete_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_region_url_maps_delete_sync.py new file mode 100644 index 000000000..b71b63fad --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_region_url_maps_delete_sync.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for Delete +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_RegionUrlMaps_Delete_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_delete(): + # Create a client + client = compute_v1.RegionUrlMapsClient() + + # Initialize request argument(s) + request = compute_v1.DeleteRegionUrlMapRequest( + project="project_value", + region="region_value", + url_map="url_map_value", + ) + + # Make the request + response = client.delete(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_RegionUrlMaps_Delete_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_region_url_maps_get_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_region_url_maps_get_sync.py new file mode 100644 index 000000000..939fa61ba --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_region_url_maps_get_sync.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for Get +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_RegionUrlMaps_Get_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_get(): + # Create a client + client = compute_v1.RegionUrlMapsClient() + + # Initialize request argument(s) + request = compute_v1.GetRegionUrlMapRequest( + project="project_value", + region="region_value", + url_map="url_map_value", + ) + + # Make the request + response = client.get(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_RegionUrlMaps_Get_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_region_url_maps_insert_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_region_url_maps_insert_sync.py new file mode 100644 index 000000000..8b0cd83fd --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_region_url_maps_insert_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for Insert +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_RegionUrlMaps_Insert_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_insert(): + # Create a client + client = compute_v1.RegionUrlMapsClient() + + # Initialize request argument(s) + request = compute_v1.InsertRegionUrlMapRequest( + project="project_value", + region="region_value", + ) + + # Make the request + response = client.insert(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_RegionUrlMaps_Insert_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_region_url_maps_list_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_region_url_maps_list_sync.py new file mode 100644 index 000000000..68d43c09a --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_region_url_maps_list_sync.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for List +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_RegionUrlMaps_List_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_list(): + # Create a client + client = compute_v1.RegionUrlMapsClient() + + # Initialize request argument(s) + request = compute_v1.ListRegionUrlMapsRequest( + project="project_value", + region="region_value", + ) + + # Make the request + page_result = client.list(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END compute_v1_generated_RegionUrlMaps_List_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_region_url_maps_patch_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_region_url_maps_patch_sync.py new file mode 100644 index 000000000..6fa0a11df --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_region_url_maps_patch_sync.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for Patch +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_RegionUrlMaps_Patch_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_patch(): + # Create a client + client = compute_v1.RegionUrlMapsClient() + + # Initialize request argument(s) + request = compute_v1.PatchRegionUrlMapRequest( + project="project_value", + region="region_value", + url_map="url_map_value", + ) + + # Make the request + response = client.patch(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_RegionUrlMaps_Patch_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_region_url_maps_update_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_region_url_maps_update_sync.py new file mode 100644 index 000000000..297006c86 --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_region_url_maps_update_sync.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for Update +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_RegionUrlMaps_Update_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_update(): + # Create a client + client = compute_v1.RegionUrlMapsClient() + + # Initialize request argument(s) + request = compute_v1.UpdateRegionUrlMapRequest( + project="project_value", + region="region_value", + url_map="url_map_value", + ) + + # Make the request + response = client.update(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_RegionUrlMaps_Update_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_region_url_maps_validate_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_region_url_maps_validate_sync.py new file mode 100644 index 000000000..b0a4572aa --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_region_url_maps_validate_sync.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for Validate +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_RegionUrlMaps_Validate_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_validate(): + # Create a client + client = compute_v1.RegionUrlMapsClient() + + # Initialize request argument(s) + request = compute_v1.ValidateRegionUrlMapRequest( + project="project_value", + region="region_value", + url_map="url_map_value", + ) + + # Make the request + response = client.validate(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_RegionUrlMaps_Validate_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_regions_get_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_regions_get_sync.py new file mode 100644 index 000000000..84f0deff1 --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_regions_get_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for Get +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_Regions_Get_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_get(): + # Create a client + client = compute_v1.RegionsClient() + + # Initialize request argument(s) + request = compute_v1.GetRegionRequest( + project="project_value", + region="region_value", + ) + + # Make the request + response = client.get(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_Regions_Get_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_regions_list_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_regions_list_sync.py new file mode 100644 index 000000000..6a759f98d --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_regions_list_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for List +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_Regions_List_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_list(): + # Create a client + client = compute_v1.RegionsClient() + + # Initialize request argument(s) + request = compute_v1.ListRegionsRequest( + project="project_value", + ) + + # Make the request + page_result = client.list(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END compute_v1_generated_Regions_List_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_reservations_aggregated_list_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_reservations_aggregated_list_sync.py new file mode 100644 index 000000000..9b0ddaeef --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_reservations_aggregated_list_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for AggregatedList +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_Reservations_AggregatedList_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_aggregated_list(): + # Create a client + client = compute_v1.ReservationsClient() + + # Initialize request argument(s) + request = compute_v1.AggregatedListReservationsRequest( + project="project_value", + ) + + # Make the request + page_result = client.aggregated_list(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END compute_v1_generated_Reservations_AggregatedList_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_reservations_delete_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_reservations_delete_sync.py new file mode 100644 index 000000000..00ffe4ea1 --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_reservations_delete_sync.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for Delete +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_Reservations_Delete_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_delete(): + # Create a client + client = compute_v1.ReservationsClient() + + # Initialize request argument(s) + request = compute_v1.DeleteReservationRequest( + project="project_value", + reservation="reservation_value", + zone="zone_value", + ) + + # Make the request + response = client.delete(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_Reservations_Delete_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_reservations_get_iam_policy_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_reservations_get_iam_policy_sync.py new file mode 100644 index 000000000..bba338401 --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_reservations_get_iam_policy_sync.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetIamPolicy +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_Reservations_GetIamPolicy_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_get_iam_policy(): + # Create a client + client = compute_v1.ReservationsClient() + + # Initialize request argument(s) + request = compute_v1.GetIamPolicyReservationRequest( + project="project_value", + resource="resource_value", + zone="zone_value", + ) + + # Make the request + response = client.get_iam_policy(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_Reservations_GetIamPolicy_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_reservations_get_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_reservations_get_sync.py new file mode 100644 index 000000000..821bcbeb0 --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_reservations_get_sync.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for Get +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_Reservations_Get_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_get(): + # Create a client + client = compute_v1.ReservationsClient() + + # Initialize request argument(s) + request = compute_v1.GetReservationRequest( + project="project_value", + reservation="reservation_value", + zone="zone_value", + ) + + # Make the request + response = client.get(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_Reservations_Get_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_reservations_insert_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_reservations_insert_sync.py new file mode 100644 index 000000000..245a77f3a --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_reservations_insert_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for Insert +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_Reservations_Insert_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_insert(): + # Create a client + client = compute_v1.ReservationsClient() + + # Initialize request argument(s) + request = compute_v1.InsertReservationRequest( + project="project_value", + zone="zone_value", + ) + + # Make the request + response = client.insert(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_Reservations_Insert_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_reservations_list_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_reservations_list_sync.py new file mode 100644 index 000000000..6c1487096 --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_reservations_list_sync.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for List +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_Reservations_List_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_list(): + # Create a client + client = compute_v1.ReservationsClient() + + # Initialize request argument(s) + request = compute_v1.ListReservationsRequest( + project="project_value", + zone="zone_value", + ) + + # Make the request + page_result = client.list(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END compute_v1_generated_Reservations_List_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_reservations_resize_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_reservations_resize_sync.py new file mode 100644 index 000000000..72af06ebf --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_reservations_resize_sync.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for Resize +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_Reservations_Resize_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_resize(): + # Create a client + client = compute_v1.ReservationsClient() + + # Initialize request argument(s) + request = compute_v1.ResizeReservationRequest( + project="project_value", + reservation="reservation_value", + zone="zone_value", + ) + + # Make the request + response = client.resize(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_Reservations_Resize_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_reservations_set_iam_policy_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_reservations_set_iam_policy_sync.py new file mode 100644 index 000000000..a995af32b --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_reservations_set_iam_policy_sync.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for SetIamPolicy +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_Reservations_SetIamPolicy_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_set_iam_policy(): + # Create a client + client = compute_v1.ReservationsClient() + + # Initialize request argument(s) + request = compute_v1.SetIamPolicyReservationRequest( + project="project_value", + resource="resource_value", + zone="zone_value", + ) + + # Make the request + response = client.set_iam_policy(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_Reservations_SetIamPolicy_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_reservations_test_iam_permissions_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_reservations_test_iam_permissions_sync.py new file mode 100644 index 000000000..2a7545695 --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_reservations_test_iam_permissions_sync.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for TestIamPermissions +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_Reservations_TestIamPermissions_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_test_iam_permissions(): + # Create a client + client = compute_v1.ReservationsClient() + + # Initialize request argument(s) + request = compute_v1.TestIamPermissionsReservationRequest( + project="project_value", + resource="resource_value", + zone="zone_value", + ) + + # Make the request + response = client.test_iam_permissions(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_Reservations_TestIamPermissions_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_reservations_update_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_reservations_update_sync.py new file mode 100644 index 000000000..29cbeaf48 --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_reservations_update_sync.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for Update +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_Reservations_Update_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_update(): + # Create a client + client = compute_v1.ReservationsClient() + + # Initialize request argument(s) + request = compute_v1.UpdateReservationRequest( + project="project_value", + reservation="reservation_value", + zone="zone_value", + ) + + # Make the request + response = client.update(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_Reservations_Update_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_resource_policies_aggregated_list_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_resource_policies_aggregated_list_sync.py new file mode 100644 index 000000000..be6d33844 --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_resource_policies_aggregated_list_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for AggregatedList +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_ResourcePolicies_AggregatedList_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_aggregated_list(): + # Create a client + client = compute_v1.ResourcePoliciesClient() + + # Initialize request argument(s) + request = compute_v1.AggregatedListResourcePoliciesRequest( + project="project_value", + ) + + # Make the request + page_result = client.aggregated_list(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END compute_v1_generated_ResourcePolicies_AggregatedList_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_resource_policies_delete_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_resource_policies_delete_sync.py new file mode 100644 index 000000000..5bf07382a --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_resource_policies_delete_sync.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for Delete +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_ResourcePolicies_Delete_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_delete(): + # Create a client + client = compute_v1.ResourcePoliciesClient() + + # Initialize request argument(s) + request = compute_v1.DeleteResourcePolicyRequest( + project="project_value", + region="region_value", + resource_policy="resource_policy_value", + ) + + # Make the request + response = client.delete(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_ResourcePolicies_Delete_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_resource_policies_get_iam_policy_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_resource_policies_get_iam_policy_sync.py new file mode 100644 index 000000000..ac1861f41 --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_resource_policies_get_iam_policy_sync.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetIamPolicy +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_ResourcePolicies_GetIamPolicy_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_get_iam_policy(): + # Create a client + client = compute_v1.ResourcePoliciesClient() + + # Initialize request argument(s) + request = compute_v1.GetIamPolicyResourcePolicyRequest( + project="project_value", + region="region_value", + resource="resource_value", + ) + + # Make the request + response = client.get_iam_policy(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_ResourcePolicies_GetIamPolicy_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_resource_policies_get_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_resource_policies_get_sync.py new file mode 100644 index 000000000..e0d4410b9 --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_resource_policies_get_sync.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for Get +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_ResourcePolicies_Get_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_get(): + # Create a client + client = compute_v1.ResourcePoliciesClient() + + # Initialize request argument(s) + request = compute_v1.GetResourcePolicyRequest( + project="project_value", + region="region_value", + resource_policy="resource_policy_value", + ) + + # Make the request + response = client.get(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_ResourcePolicies_Get_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_resource_policies_insert_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_resource_policies_insert_sync.py new file mode 100644 index 000000000..d451cc8fa --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_resource_policies_insert_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for Insert +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_ResourcePolicies_Insert_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_insert(): + # Create a client + client = compute_v1.ResourcePoliciesClient() + + # Initialize request argument(s) + request = compute_v1.InsertResourcePolicyRequest( + project="project_value", + region="region_value", + ) + + # Make the request + response = client.insert(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_ResourcePolicies_Insert_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_resource_policies_list_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_resource_policies_list_sync.py new file mode 100644 index 000000000..a5ebaf63c --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_resource_policies_list_sync.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for List +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_ResourcePolicies_List_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_list(): + # Create a client + client = compute_v1.ResourcePoliciesClient() + + # Initialize request argument(s) + request = compute_v1.ListResourcePoliciesRequest( + project="project_value", + region="region_value", + ) + + # Make the request + page_result = client.list(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END compute_v1_generated_ResourcePolicies_List_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_resource_policies_patch_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_resource_policies_patch_sync.py new file mode 100644 index 000000000..c889d2989 --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_resource_policies_patch_sync.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for Patch +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_ResourcePolicies_Patch_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_patch(): + # Create a client + client = compute_v1.ResourcePoliciesClient() + + # Initialize request argument(s) + request = compute_v1.PatchResourcePolicyRequest( + project="project_value", + region="region_value", + resource_policy="resource_policy_value", + ) + + # Make the request + response = client.patch(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_ResourcePolicies_Patch_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_resource_policies_set_iam_policy_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_resource_policies_set_iam_policy_sync.py new file mode 100644 index 000000000..9916edb83 --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_resource_policies_set_iam_policy_sync.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for SetIamPolicy +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_ResourcePolicies_SetIamPolicy_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_set_iam_policy(): + # Create a client + client = compute_v1.ResourcePoliciesClient() + + # Initialize request argument(s) + request = compute_v1.SetIamPolicyResourcePolicyRequest( + project="project_value", + region="region_value", + resource="resource_value", + ) + + # Make the request + response = client.set_iam_policy(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_ResourcePolicies_SetIamPolicy_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_resource_policies_test_iam_permissions_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_resource_policies_test_iam_permissions_sync.py new file mode 100644 index 000000000..8a8df7efb --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_resource_policies_test_iam_permissions_sync.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for TestIamPermissions +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_ResourcePolicies_TestIamPermissions_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_test_iam_permissions(): + # Create a client + client = compute_v1.ResourcePoliciesClient() + + # Initialize request argument(s) + request = compute_v1.TestIamPermissionsResourcePolicyRequest( + project="project_value", + region="region_value", + resource="resource_value", + ) + + # Make the request + response = client.test_iam_permissions(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_ResourcePolicies_TestIamPermissions_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_routers_aggregated_list_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_routers_aggregated_list_sync.py new file mode 100644 index 000000000..74a3bb561 --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_routers_aggregated_list_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for AggregatedList +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_Routers_AggregatedList_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_aggregated_list(): + # Create a client + client = compute_v1.RoutersClient() + + # Initialize request argument(s) + request = compute_v1.AggregatedListRoutersRequest( + project="project_value", + ) + + # Make the request + page_result = client.aggregated_list(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END compute_v1_generated_Routers_AggregatedList_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_routers_delete_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_routers_delete_sync.py new file mode 100644 index 000000000..7ce8a0f5c --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_routers_delete_sync.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for Delete +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_Routers_Delete_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_delete(): + # Create a client + client = compute_v1.RoutersClient() + + # Initialize request argument(s) + request = compute_v1.DeleteRouterRequest( + project="project_value", + region="region_value", + router="router_value", + ) + + # Make the request + response = client.delete(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_Routers_Delete_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_routers_get_nat_mapping_info_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_routers_get_nat_mapping_info_sync.py new file mode 100644 index 000000000..b20c81aa5 --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_routers_get_nat_mapping_info_sync.py @@ -0,0 +1,55 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetNatMappingInfo +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_Routers_GetNatMappingInfo_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_get_nat_mapping_info(): + # Create a client + client = compute_v1.RoutersClient() + + # Initialize request argument(s) + request = compute_v1.GetNatMappingInfoRoutersRequest( + project="project_value", + region="region_value", + router="router_value", + ) + + # Make the request + page_result = client.get_nat_mapping_info(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END compute_v1_generated_Routers_GetNatMappingInfo_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_routers_get_router_status_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_routers_get_router_status_sync.py new file mode 100644 index 000000000..3f214d622 --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_routers_get_router_status_sync.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetRouterStatus +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_Routers_GetRouterStatus_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_get_router_status(): + # Create a client + client = compute_v1.RoutersClient() + + # Initialize request argument(s) + request = compute_v1.GetRouterStatusRouterRequest( + project="project_value", + region="region_value", + router="router_value", + ) + + # Make the request + response = client.get_router_status(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_Routers_GetRouterStatus_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_routers_get_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_routers_get_sync.py new file mode 100644 index 000000000..a7884e16a --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_routers_get_sync.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for Get +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_Routers_Get_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_get(): + # Create a client + client = compute_v1.RoutersClient() + + # Initialize request argument(s) + request = compute_v1.GetRouterRequest( + project="project_value", + region="region_value", + router="router_value", + ) + + # Make the request + response = client.get(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_Routers_Get_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_routers_insert_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_routers_insert_sync.py new file mode 100644 index 000000000..b98358ab2 --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_routers_insert_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for Insert +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_Routers_Insert_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_insert(): + # Create a client + client = compute_v1.RoutersClient() + + # Initialize request argument(s) + request = compute_v1.InsertRouterRequest( + project="project_value", + region="region_value", + ) + + # Make the request + response = client.insert(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_Routers_Insert_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_routers_list_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_routers_list_sync.py new file mode 100644 index 000000000..ef3385c38 --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_routers_list_sync.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for List +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_Routers_List_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_list(): + # Create a client + client = compute_v1.RoutersClient() + + # Initialize request argument(s) + request = compute_v1.ListRoutersRequest( + project="project_value", + region="region_value", + ) + + # Make the request + page_result = client.list(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END compute_v1_generated_Routers_List_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_routers_patch_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_routers_patch_sync.py new file mode 100644 index 000000000..6220293c8 --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_routers_patch_sync.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for Patch +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_Routers_Patch_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_patch(): + # Create a client + client = compute_v1.RoutersClient() + + # Initialize request argument(s) + request = compute_v1.PatchRouterRequest( + project="project_value", + region="region_value", + router="router_value", + ) + + # Make the request + response = client.patch(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_Routers_Patch_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_routers_preview_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_routers_preview_sync.py new file mode 100644 index 000000000..339543387 --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_routers_preview_sync.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for Preview +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_Routers_Preview_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_preview(): + # Create a client + client = compute_v1.RoutersClient() + + # Initialize request argument(s) + request = compute_v1.PreviewRouterRequest( + project="project_value", + region="region_value", + router="router_value", + ) + + # Make the request + response = client.preview(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_Routers_Preview_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_routers_update_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_routers_update_sync.py new file mode 100644 index 000000000..3d80aba93 --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_routers_update_sync.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for Update +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_Routers_Update_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_update(): + # Create a client + client = compute_v1.RoutersClient() + + # Initialize request argument(s) + request = compute_v1.UpdateRouterRequest( + project="project_value", + region="region_value", + router="router_value", + ) + + # Make the request + response = client.update(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_Routers_Update_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_routes_delete_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_routes_delete_sync.py new file mode 100644 index 000000000..a04ce6401 --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_routes_delete_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for Delete +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_Routes_Delete_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_delete(): + # Create a client + client = compute_v1.RoutesClient() + + # Initialize request argument(s) + request = compute_v1.DeleteRouteRequest( + project="project_value", + route="route_value", + ) + + # Make the request + response = client.delete(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_Routes_Delete_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_routes_get_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_routes_get_sync.py new file mode 100644 index 000000000..14520bb6d --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_routes_get_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for Get +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_Routes_Get_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_get(): + # Create a client + client = compute_v1.RoutesClient() + + # Initialize request argument(s) + request = compute_v1.GetRouteRequest( + project="project_value", + route="route_value", + ) + + # Make the request + response = client.get(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_Routes_Get_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_routes_insert_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_routes_insert_sync.py new file mode 100644 index 000000000..a0a6c8b24 --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_routes_insert_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for Insert +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_Routes_Insert_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_insert(): + # Create a client + client = compute_v1.RoutesClient() + + # Initialize request argument(s) + request = compute_v1.InsertRouteRequest( + project="project_value", + ) + + # Make the request + response = client.insert(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_Routes_Insert_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_routes_list_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_routes_list_sync.py new file mode 100644 index 000000000..de2cf2deb --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_routes_list_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for List +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_Routes_List_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_list(): + # Create a client + client = compute_v1.RoutesClient() + + # Initialize request argument(s) + request = compute_v1.ListRoutesRequest( + project="project_value", + ) + + # Make the request + page_result = client.list(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END compute_v1_generated_Routes_List_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_security_policies_add_rule_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_security_policies_add_rule_sync.py new file mode 100644 index 000000000..f5ba73054 --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_security_policies_add_rule_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for AddRule +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_SecurityPolicies_AddRule_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_add_rule(): + # Create a client + client = compute_v1.SecurityPoliciesClient() + + # Initialize request argument(s) + request = compute_v1.AddRuleSecurityPolicyRequest( + project="project_value", + security_policy="security_policy_value", + ) + + # Make the request + response = client.add_rule(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_SecurityPolicies_AddRule_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_security_policies_aggregated_list_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_security_policies_aggregated_list_sync.py new file mode 100644 index 000000000..3fd212ef1 --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_security_policies_aggregated_list_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for AggregatedList +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_SecurityPolicies_AggregatedList_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_aggregated_list(): + # Create a client + client = compute_v1.SecurityPoliciesClient() + + # Initialize request argument(s) + request = compute_v1.AggregatedListSecurityPoliciesRequest( + project="project_value", + ) + + # Make the request + page_result = client.aggregated_list(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END compute_v1_generated_SecurityPolicies_AggregatedList_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_security_policies_delete_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_security_policies_delete_sync.py new file mode 100644 index 000000000..73f427250 --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_security_policies_delete_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for Delete +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_SecurityPolicies_Delete_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_delete(): + # Create a client + client = compute_v1.SecurityPoliciesClient() + + # Initialize request argument(s) + request = compute_v1.DeleteSecurityPolicyRequest( + project="project_value", + security_policy="security_policy_value", + ) + + # Make the request + response = client.delete(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_SecurityPolicies_Delete_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_security_policies_get_rule_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_security_policies_get_rule_sync.py new file mode 100644 index 000000000..5ee6319c8 --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_security_policies_get_rule_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetRule +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_SecurityPolicies_GetRule_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_get_rule(): + # Create a client + client = compute_v1.SecurityPoliciesClient() + + # Initialize request argument(s) + request = compute_v1.GetRuleSecurityPolicyRequest( + project="project_value", + security_policy="security_policy_value", + ) + + # Make the request + response = client.get_rule(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_SecurityPolicies_GetRule_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_security_policies_get_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_security_policies_get_sync.py new file mode 100644 index 000000000..9be177ae8 --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_security_policies_get_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for Get +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_SecurityPolicies_Get_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_get(): + # Create a client + client = compute_v1.SecurityPoliciesClient() + + # Initialize request argument(s) + request = compute_v1.GetSecurityPolicyRequest( + project="project_value", + security_policy="security_policy_value", + ) + + # Make the request + response = client.get(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_SecurityPolicies_Get_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_security_policies_insert_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_security_policies_insert_sync.py new file mode 100644 index 000000000..5887b19db --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_security_policies_insert_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for Insert +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_SecurityPolicies_Insert_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_insert(): + # Create a client + client = compute_v1.SecurityPoliciesClient() + + # Initialize request argument(s) + request = compute_v1.InsertSecurityPolicyRequest( + project="project_value", + ) + + # Make the request + response = client.insert(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_SecurityPolicies_Insert_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_security_policies_list_preconfigured_expression_sets_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_security_policies_list_preconfigured_expression_sets_sync.py new file mode 100644 index 000000000..1aedf78c5 --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_security_policies_list_preconfigured_expression_sets_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListPreconfiguredExpressionSets +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_SecurityPolicies_ListPreconfiguredExpressionSets_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_list_preconfigured_expression_sets(): + # Create a client + client = compute_v1.SecurityPoliciesClient() + + # Initialize request argument(s) + request = compute_v1.ListPreconfiguredExpressionSetsSecurityPoliciesRequest( + project="project_value", + ) + + # Make the request + response = client.list_preconfigured_expression_sets(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_SecurityPolicies_ListPreconfiguredExpressionSets_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_security_policies_list_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_security_policies_list_sync.py new file mode 100644 index 000000000..a12e7e23d --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_security_policies_list_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for List +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_SecurityPolicies_List_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_list(): + # Create a client + client = compute_v1.SecurityPoliciesClient() + + # Initialize request argument(s) + request = compute_v1.ListSecurityPoliciesRequest( + project="project_value", + ) + + # Make the request + page_result = client.list(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END compute_v1_generated_SecurityPolicies_List_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_security_policies_patch_rule_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_security_policies_patch_rule_sync.py new file mode 100644 index 000000000..cd6202f74 --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_security_policies_patch_rule_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for PatchRule +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_SecurityPolicies_PatchRule_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_patch_rule(): + # Create a client + client = compute_v1.SecurityPoliciesClient() + + # Initialize request argument(s) + request = compute_v1.PatchRuleSecurityPolicyRequest( + project="project_value", + security_policy="security_policy_value", + ) + + # Make the request + response = client.patch_rule(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_SecurityPolicies_PatchRule_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_security_policies_patch_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_security_policies_patch_sync.py new file mode 100644 index 000000000..dbc773a7f --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_security_policies_patch_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for Patch +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_SecurityPolicies_Patch_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_patch(): + # Create a client + client = compute_v1.SecurityPoliciesClient() + + # Initialize request argument(s) + request = compute_v1.PatchSecurityPolicyRequest( + project="project_value", + security_policy="security_policy_value", + ) + + # Make the request + response = client.patch(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_SecurityPolicies_Patch_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_security_policies_remove_rule_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_security_policies_remove_rule_sync.py new file mode 100644 index 000000000..d7df1317b --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_security_policies_remove_rule_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for RemoveRule +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_SecurityPolicies_RemoveRule_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_remove_rule(): + # Create a client + client = compute_v1.SecurityPoliciesClient() + + # Initialize request argument(s) + request = compute_v1.RemoveRuleSecurityPolicyRequest( + project="project_value", + security_policy="security_policy_value", + ) + + # Make the request + response = client.remove_rule(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_SecurityPolicies_RemoveRule_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_security_policies_set_labels_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_security_policies_set_labels_sync.py new file mode 100644 index 000000000..940ef459a --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_security_policies_set_labels_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for SetLabels +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_SecurityPolicies_SetLabels_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_set_labels(): + # Create a client + client = compute_v1.SecurityPoliciesClient() + + # Initialize request argument(s) + request = compute_v1.SetLabelsSecurityPolicyRequest( + project="project_value", + resource="resource_value", + ) + + # Make the request + response = client.set_labels(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_SecurityPolicies_SetLabels_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_service_attachments_aggregated_list_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_service_attachments_aggregated_list_sync.py new file mode 100644 index 000000000..60a7beb70 --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_service_attachments_aggregated_list_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for AggregatedList +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_ServiceAttachments_AggregatedList_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_aggregated_list(): + # Create a client + client = compute_v1.ServiceAttachmentsClient() + + # Initialize request argument(s) + request = compute_v1.AggregatedListServiceAttachmentsRequest( + project="project_value", + ) + + # Make the request + page_result = client.aggregated_list(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END compute_v1_generated_ServiceAttachments_AggregatedList_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_service_attachments_delete_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_service_attachments_delete_sync.py new file mode 100644 index 000000000..2841eb72b --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_service_attachments_delete_sync.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for Delete +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_ServiceAttachments_Delete_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_delete(): + # Create a client + client = compute_v1.ServiceAttachmentsClient() + + # Initialize request argument(s) + request = compute_v1.DeleteServiceAttachmentRequest( + project="project_value", + region="region_value", + service_attachment="service_attachment_value", + ) + + # Make the request + response = client.delete(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_ServiceAttachments_Delete_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_service_attachments_get_iam_policy_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_service_attachments_get_iam_policy_sync.py new file mode 100644 index 000000000..ffe0e36d8 --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_service_attachments_get_iam_policy_sync.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetIamPolicy +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_ServiceAttachments_GetIamPolicy_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_get_iam_policy(): + # Create a client + client = compute_v1.ServiceAttachmentsClient() + + # Initialize request argument(s) + request = compute_v1.GetIamPolicyServiceAttachmentRequest( + project="project_value", + region="region_value", + resource="resource_value", + ) + + # Make the request + response = client.get_iam_policy(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_ServiceAttachments_GetIamPolicy_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_service_attachments_get_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_service_attachments_get_sync.py new file mode 100644 index 000000000..735785e31 --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_service_attachments_get_sync.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for Get +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_ServiceAttachments_Get_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_get(): + # Create a client + client = compute_v1.ServiceAttachmentsClient() + + # Initialize request argument(s) + request = compute_v1.GetServiceAttachmentRequest( + project="project_value", + region="region_value", + service_attachment="service_attachment_value", + ) + + # Make the request + response = client.get(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_ServiceAttachments_Get_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_service_attachments_insert_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_service_attachments_insert_sync.py new file mode 100644 index 000000000..782946a22 --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_service_attachments_insert_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for Insert +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_ServiceAttachments_Insert_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_insert(): + # Create a client + client = compute_v1.ServiceAttachmentsClient() + + # Initialize request argument(s) + request = compute_v1.InsertServiceAttachmentRequest( + project="project_value", + region="region_value", + ) + + # Make the request + response = client.insert(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_ServiceAttachments_Insert_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_service_attachments_list_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_service_attachments_list_sync.py new file mode 100644 index 000000000..55319ba27 --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_service_attachments_list_sync.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for List +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_ServiceAttachments_List_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_list(): + # Create a client + client = compute_v1.ServiceAttachmentsClient() + + # Initialize request argument(s) + request = compute_v1.ListServiceAttachmentsRequest( + project="project_value", + region="region_value", + ) + + # Make the request + page_result = client.list(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END compute_v1_generated_ServiceAttachments_List_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_service_attachments_patch_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_service_attachments_patch_sync.py new file mode 100644 index 000000000..bcc9049e4 --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_service_attachments_patch_sync.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for Patch +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_ServiceAttachments_Patch_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_patch(): + # Create a client + client = compute_v1.ServiceAttachmentsClient() + + # Initialize request argument(s) + request = compute_v1.PatchServiceAttachmentRequest( + project="project_value", + region="region_value", + service_attachment="service_attachment_value", + ) + + # Make the request + response = client.patch(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_ServiceAttachments_Patch_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_service_attachments_set_iam_policy_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_service_attachments_set_iam_policy_sync.py new file mode 100644 index 000000000..be99856f6 --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_service_attachments_set_iam_policy_sync.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for SetIamPolicy +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_ServiceAttachments_SetIamPolicy_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_set_iam_policy(): + # Create a client + client = compute_v1.ServiceAttachmentsClient() + + # Initialize request argument(s) + request = compute_v1.SetIamPolicyServiceAttachmentRequest( + project="project_value", + region="region_value", + resource="resource_value", + ) + + # Make the request + response = client.set_iam_policy(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_ServiceAttachments_SetIamPolicy_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_service_attachments_test_iam_permissions_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_service_attachments_test_iam_permissions_sync.py new file mode 100644 index 000000000..854284247 --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_service_attachments_test_iam_permissions_sync.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for TestIamPermissions +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_ServiceAttachments_TestIamPermissions_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_test_iam_permissions(): + # Create a client + client = compute_v1.ServiceAttachmentsClient() + + # Initialize request argument(s) + request = compute_v1.TestIamPermissionsServiceAttachmentRequest( + project="project_value", + region="region_value", + resource="resource_value", + ) + + # Make the request + response = client.test_iam_permissions(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_ServiceAttachments_TestIamPermissions_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_snapshots_delete_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_snapshots_delete_sync.py new file mode 100644 index 000000000..515ed628c --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_snapshots_delete_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for Delete +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_Snapshots_Delete_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_delete(): + # Create a client + client = compute_v1.SnapshotsClient() + + # Initialize request argument(s) + request = compute_v1.DeleteSnapshotRequest( + project="project_value", + snapshot="snapshot_value", + ) + + # Make the request + response = client.delete(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_Snapshots_Delete_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_snapshots_get_iam_policy_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_snapshots_get_iam_policy_sync.py new file mode 100644 index 000000000..665a770d6 --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_snapshots_get_iam_policy_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetIamPolicy +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_Snapshots_GetIamPolicy_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_get_iam_policy(): + # Create a client + client = compute_v1.SnapshotsClient() + + # Initialize request argument(s) + request = compute_v1.GetIamPolicySnapshotRequest( + project="project_value", + resource="resource_value", + ) + + # Make the request + response = client.get_iam_policy(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_Snapshots_GetIamPolicy_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_snapshots_get_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_snapshots_get_sync.py new file mode 100644 index 000000000..14cc56c5a --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_snapshots_get_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for Get +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_Snapshots_Get_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_get(): + # Create a client + client = compute_v1.SnapshotsClient() + + # Initialize request argument(s) + request = compute_v1.GetSnapshotRequest( + project="project_value", + snapshot="snapshot_value", + ) + + # Make the request + response = client.get(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_Snapshots_Get_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_snapshots_insert_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_snapshots_insert_sync.py new file mode 100644 index 000000000..e4044c0b0 --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_snapshots_insert_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for Insert +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_Snapshots_Insert_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_insert(): + # Create a client + client = compute_v1.SnapshotsClient() + + # Initialize request argument(s) + request = compute_v1.InsertSnapshotRequest( + project="project_value", + ) + + # Make the request + response = client.insert(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_Snapshots_Insert_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_snapshots_list_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_snapshots_list_sync.py new file mode 100644 index 000000000..569882d83 --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_snapshots_list_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for List +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_Snapshots_List_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_list(): + # Create a client + client = compute_v1.SnapshotsClient() + + # Initialize request argument(s) + request = compute_v1.ListSnapshotsRequest( + project="project_value", + ) + + # Make the request + page_result = client.list(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END compute_v1_generated_Snapshots_List_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_snapshots_set_iam_policy_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_snapshots_set_iam_policy_sync.py new file mode 100644 index 000000000..553e21f27 --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_snapshots_set_iam_policy_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for SetIamPolicy +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_Snapshots_SetIamPolicy_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_set_iam_policy(): + # Create a client + client = compute_v1.SnapshotsClient() + + # Initialize request argument(s) + request = compute_v1.SetIamPolicySnapshotRequest( + project="project_value", + resource="resource_value", + ) + + # Make the request + response = client.set_iam_policy(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_Snapshots_SetIamPolicy_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_snapshots_set_labels_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_snapshots_set_labels_sync.py new file mode 100644 index 000000000..0b95507b1 --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_snapshots_set_labels_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for SetLabels +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_Snapshots_SetLabels_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_set_labels(): + # Create a client + client = compute_v1.SnapshotsClient() + + # Initialize request argument(s) + request = compute_v1.SetLabelsSnapshotRequest( + project="project_value", + resource="resource_value", + ) + + # Make the request + response = client.set_labels(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_Snapshots_SetLabels_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_snapshots_test_iam_permissions_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_snapshots_test_iam_permissions_sync.py new file mode 100644 index 000000000..c15ec4556 --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_snapshots_test_iam_permissions_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for TestIamPermissions +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_Snapshots_TestIamPermissions_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_test_iam_permissions(): + # Create a client + client = compute_v1.SnapshotsClient() + + # Initialize request argument(s) + request = compute_v1.TestIamPermissionsSnapshotRequest( + project="project_value", + resource="resource_value", + ) + + # Make the request + response = client.test_iam_permissions(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_Snapshots_TestIamPermissions_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_ssl_certificates_aggregated_list_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_ssl_certificates_aggregated_list_sync.py new file mode 100644 index 000000000..05cde259a --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_ssl_certificates_aggregated_list_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for AggregatedList +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_SslCertificates_AggregatedList_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_aggregated_list(): + # Create a client + client = compute_v1.SslCertificatesClient() + + # Initialize request argument(s) + request = compute_v1.AggregatedListSslCertificatesRequest( + project="project_value", + ) + + # Make the request + page_result = client.aggregated_list(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END compute_v1_generated_SslCertificates_AggregatedList_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_ssl_certificates_delete_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_ssl_certificates_delete_sync.py new file mode 100644 index 000000000..b6ad5b5b7 --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_ssl_certificates_delete_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for Delete +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_SslCertificates_Delete_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_delete(): + # Create a client + client = compute_v1.SslCertificatesClient() + + # Initialize request argument(s) + request = compute_v1.DeleteSslCertificateRequest( + project="project_value", + ssl_certificate="ssl_certificate_value", + ) + + # Make the request + response = client.delete(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_SslCertificates_Delete_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_ssl_certificates_get_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_ssl_certificates_get_sync.py new file mode 100644 index 000000000..954bb9e47 --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_ssl_certificates_get_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for Get +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_SslCertificates_Get_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_get(): + # Create a client + client = compute_v1.SslCertificatesClient() + + # Initialize request argument(s) + request = compute_v1.GetSslCertificateRequest( + project="project_value", + ssl_certificate="ssl_certificate_value", + ) + + # Make the request + response = client.get(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_SslCertificates_Get_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_ssl_certificates_insert_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_ssl_certificates_insert_sync.py new file mode 100644 index 000000000..5ae2f583f --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_ssl_certificates_insert_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for Insert +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_SslCertificates_Insert_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_insert(): + # Create a client + client = compute_v1.SslCertificatesClient() + + # Initialize request argument(s) + request = compute_v1.InsertSslCertificateRequest( + project="project_value", + ) + + # Make the request + response = client.insert(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_SslCertificates_Insert_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_ssl_certificates_list_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_ssl_certificates_list_sync.py new file mode 100644 index 000000000..d2cf9d676 --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_ssl_certificates_list_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for List +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_SslCertificates_List_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_list(): + # Create a client + client = compute_v1.SslCertificatesClient() + + # Initialize request argument(s) + request = compute_v1.ListSslCertificatesRequest( + project="project_value", + ) + + # Make the request + page_result = client.list(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END compute_v1_generated_SslCertificates_List_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_ssl_policies_aggregated_list_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_ssl_policies_aggregated_list_sync.py new file mode 100644 index 000000000..66a388be9 --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_ssl_policies_aggregated_list_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for AggregatedList +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_SslPolicies_AggregatedList_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_aggregated_list(): + # Create a client + client = compute_v1.SslPoliciesClient() + + # Initialize request argument(s) + request = compute_v1.AggregatedListSslPoliciesRequest( + project="project_value", + ) + + # Make the request + page_result = client.aggregated_list(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END compute_v1_generated_SslPolicies_AggregatedList_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_ssl_policies_delete_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_ssl_policies_delete_sync.py new file mode 100644 index 000000000..4dae0beb6 --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_ssl_policies_delete_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for Delete +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_SslPolicies_Delete_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_delete(): + # Create a client + client = compute_v1.SslPoliciesClient() + + # Initialize request argument(s) + request = compute_v1.DeleteSslPolicyRequest( + project="project_value", + ssl_policy="ssl_policy_value", + ) + + # Make the request + response = client.delete(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_SslPolicies_Delete_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_ssl_policies_get_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_ssl_policies_get_sync.py new file mode 100644 index 000000000..30155b8fb --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_ssl_policies_get_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for Get +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_SslPolicies_Get_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_get(): + # Create a client + client = compute_v1.SslPoliciesClient() + + # Initialize request argument(s) + request = compute_v1.GetSslPolicyRequest( + project="project_value", + ssl_policy="ssl_policy_value", + ) + + # Make the request + response = client.get(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_SslPolicies_Get_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_ssl_policies_insert_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_ssl_policies_insert_sync.py new file mode 100644 index 000000000..779a12ce9 --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_ssl_policies_insert_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for Insert +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_SslPolicies_Insert_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_insert(): + # Create a client + client = compute_v1.SslPoliciesClient() + + # Initialize request argument(s) + request = compute_v1.InsertSslPolicyRequest( + project="project_value", + ) + + # Make the request + response = client.insert(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_SslPolicies_Insert_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_ssl_policies_list_available_features_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_ssl_policies_list_available_features_sync.py new file mode 100644 index 000000000..0203ee57c --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_ssl_policies_list_available_features_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListAvailableFeatures +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_SslPolicies_ListAvailableFeatures_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_list_available_features(): + # Create a client + client = compute_v1.SslPoliciesClient() + + # Initialize request argument(s) + request = compute_v1.ListAvailableFeaturesSslPoliciesRequest( + project="project_value", + ) + + # Make the request + response = client.list_available_features(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_SslPolicies_ListAvailableFeatures_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_ssl_policies_list_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_ssl_policies_list_sync.py new file mode 100644 index 000000000..547fbc8a4 --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_ssl_policies_list_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for List +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_SslPolicies_List_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_list(): + # Create a client + client = compute_v1.SslPoliciesClient() + + # Initialize request argument(s) + request = compute_v1.ListSslPoliciesRequest( + project="project_value", + ) + + # Make the request + page_result = client.list(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END compute_v1_generated_SslPolicies_List_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_ssl_policies_patch_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_ssl_policies_patch_sync.py new file mode 100644 index 000000000..7405db457 --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_ssl_policies_patch_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for Patch +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_SslPolicies_Patch_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_patch(): + # Create a client + client = compute_v1.SslPoliciesClient() + + # Initialize request argument(s) + request = compute_v1.PatchSslPolicyRequest( + project="project_value", + ssl_policy="ssl_policy_value", + ) + + # Make the request + response = client.patch(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_SslPolicies_Patch_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_subnetworks_aggregated_list_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_subnetworks_aggregated_list_sync.py new file mode 100644 index 000000000..ff1e7318f --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_subnetworks_aggregated_list_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for AggregatedList +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_Subnetworks_AggregatedList_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_aggregated_list(): + # Create a client + client = compute_v1.SubnetworksClient() + + # Initialize request argument(s) + request = compute_v1.AggregatedListSubnetworksRequest( + project="project_value", + ) + + # Make the request + page_result = client.aggregated_list(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END compute_v1_generated_Subnetworks_AggregatedList_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_subnetworks_delete_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_subnetworks_delete_sync.py new file mode 100644 index 000000000..ddb3d9169 --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_subnetworks_delete_sync.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for Delete +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_Subnetworks_Delete_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_delete(): + # Create a client + client = compute_v1.SubnetworksClient() + + # Initialize request argument(s) + request = compute_v1.DeleteSubnetworkRequest( + project="project_value", + region="region_value", + subnetwork="subnetwork_value", + ) + + # Make the request + response = client.delete(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_Subnetworks_Delete_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_subnetworks_expand_ip_cidr_range_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_subnetworks_expand_ip_cidr_range_sync.py new file mode 100644 index 000000000..8cb5454cc --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_subnetworks_expand_ip_cidr_range_sync.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ExpandIpCidrRange +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_Subnetworks_ExpandIpCidrRange_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_expand_ip_cidr_range(): + # Create a client + client = compute_v1.SubnetworksClient() + + # Initialize request argument(s) + request = compute_v1.ExpandIpCidrRangeSubnetworkRequest( + project="project_value", + region="region_value", + subnetwork="subnetwork_value", + ) + + # Make the request + response = client.expand_ip_cidr_range(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_Subnetworks_ExpandIpCidrRange_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_subnetworks_get_iam_policy_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_subnetworks_get_iam_policy_sync.py new file mode 100644 index 000000000..06e33ecd7 --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_subnetworks_get_iam_policy_sync.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetIamPolicy +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_Subnetworks_GetIamPolicy_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_get_iam_policy(): + # Create a client + client = compute_v1.SubnetworksClient() + + # Initialize request argument(s) + request = compute_v1.GetIamPolicySubnetworkRequest( + project="project_value", + region="region_value", + resource="resource_value", + ) + + # Make the request + response = client.get_iam_policy(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_Subnetworks_GetIamPolicy_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_subnetworks_get_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_subnetworks_get_sync.py new file mode 100644 index 000000000..11ffbd79b --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_subnetworks_get_sync.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for Get +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_Subnetworks_Get_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_get(): + # Create a client + client = compute_v1.SubnetworksClient() + + # Initialize request argument(s) + request = compute_v1.GetSubnetworkRequest( + project="project_value", + region="region_value", + subnetwork="subnetwork_value", + ) + + # Make the request + response = client.get(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_Subnetworks_Get_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_subnetworks_insert_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_subnetworks_insert_sync.py new file mode 100644 index 000000000..b605c81ea --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_subnetworks_insert_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for Insert +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_Subnetworks_Insert_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_insert(): + # Create a client + client = compute_v1.SubnetworksClient() + + # Initialize request argument(s) + request = compute_v1.InsertSubnetworkRequest( + project="project_value", + region="region_value", + ) + + # Make the request + response = client.insert(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_Subnetworks_Insert_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_subnetworks_list_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_subnetworks_list_sync.py new file mode 100644 index 000000000..abe1fcd75 --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_subnetworks_list_sync.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for List +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_Subnetworks_List_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_list(): + # Create a client + client = compute_v1.SubnetworksClient() + + # Initialize request argument(s) + request = compute_v1.ListSubnetworksRequest( + project="project_value", + region="region_value", + ) + + # Make the request + page_result = client.list(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END compute_v1_generated_Subnetworks_List_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_subnetworks_list_usable_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_subnetworks_list_usable_sync.py new file mode 100644 index 000000000..9da332a22 --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_subnetworks_list_usable_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListUsable +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_Subnetworks_ListUsable_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_list_usable(): + # Create a client + client = compute_v1.SubnetworksClient() + + # Initialize request argument(s) + request = compute_v1.ListUsableSubnetworksRequest( + project="project_value", + ) + + # Make the request + page_result = client.list_usable(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END compute_v1_generated_Subnetworks_ListUsable_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_subnetworks_patch_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_subnetworks_patch_sync.py new file mode 100644 index 000000000..453054ca4 --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_subnetworks_patch_sync.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for Patch +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_Subnetworks_Patch_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_patch(): + # Create a client + client = compute_v1.SubnetworksClient() + + # Initialize request argument(s) + request = compute_v1.PatchSubnetworkRequest( + project="project_value", + region="region_value", + subnetwork="subnetwork_value", + ) + + # Make the request + response = client.patch(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_Subnetworks_Patch_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_subnetworks_set_iam_policy_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_subnetworks_set_iam_policy_sync.py new file mode 100644 index 000000000..ab2ba477d --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_subnetworks_set_iam_policy_sync.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for SetIamPolicy +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_Subnetworks_SetIamPolicy_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_set_iam_policy(): + # Create a client + client = compute_v1.SubnetworksClient() + + # Initialize request argument(s) + request = compute_v1.SetIamPolicySubnetworkRequest( + project="project_value", + region="region_value", + resource="resource_value", + ) + + # Make the request + response = client.set_iam_policy(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_Subnetworks_SetIamPolicy_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_subnetworks_set_private_ip_google_access_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_subnetworks_set_private_ip_google_access_sync.py new file mode 100644 index 000000000..b26ee8f4c --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_subnetworks_set_private_ip_google_access_sync.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for SetPrivateIpGoogleAccess +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_Subnetworks_SetPrivateIpGoogleAccess_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_set_private_ip_google_access(): + # Create a client + client = compute_v1.SubnetworksClient() + + # Initialize request argument(s) + request = compute_v1.SetPrivateIpGoogleAccessSubnetworkRequest( + project="project_value", + region="region_value", + subnetwork="subnetwork_value", + ) + + # Make the request + response = client.set_private_ip_google_access(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_Subnetworks_SetPrivateIpGoogleAccess_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_subnetworks_test_iam_permissions_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_subnetworks_test_iam_permissions_sync.py new file mode 100644 index 000000000..4d81e023a --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_subnetworks_test_iam_permissions_sync.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for TestIamPermissions +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_Subnetworks_TestIamPermissions_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_test_iam_permissions(): + # Create a client + client = compute_v1.SubnetworksClient() + + # Initialize request argument(s) + request = compute_v1.TestIamPermissionsSubnetworkRequest( + project="project_value", + region="region_value", + resource="resource_value", + ) + + # Make the request + response = client.test_iam_permissions(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_Subnetworks_TestIamPermissions_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_target_grpc_proxies_delete_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_target_grpc_proxies_delete_sync.py new file mode 100644 index 000000000..055ac9837 --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_target_grpc_proxies_delete_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for Delete +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_TargetGrpcProxies_Delete_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_delete(): + # Create a client + client = compute_v1.TargetGrpcProxiesClient() + + # Initialize request argument(s) + request = compute_v1.DeleteTargetGrpcProxyRequest( + project="project_value", + target_grpc_proxy="target_grpc_proxy_value", + ) + + # Make the request + response = client.delete(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_TargetGrpcProxies_Delete_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_target_grpc_proxies_get_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_target_grpc_proxies_get_sync.py new file mode 100644 index 000000000..28eff8422 --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_target_grpc_proxies_get_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for Get +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_TargetGrpcProxies_Get_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_get(): + # Create a client + client = compute_v1.TargetGrpcProxiesClient() + + # Initialize request argument(s) + request = compute_v1.GetTargetGrpcProxyRequest( + project="project_value", + target_grpc_proxy="target_grpc_proxy_value", + ) + + # Make the request + response = client.get(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_TargetGrpcProxies_Get_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_target_grpc_proxies_insert_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_target_grpc_proxies_insert_sync.py new file mode 100644 index 000000000..078ba733b --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_target_grpc_proxies_insert_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for Insert +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_TargetGrpcProxies_Insert_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_insert(): + # Create a client + client = compute_v1.TargetGrpcProxiesClient() + + # Initialize request argument(s) + request = compute_v1.InsertTargetGrpcProxyRequest( + project="project_value", + ) + + # Make the request + response = client.insert(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_TargetGrpcProxies_Insert_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_target_grpc_proxies_list_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_target_grpc_proxies_list_sync.py new file mode 100644 index 000000000..89d5716b1 --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_target_grpc_proxies_list_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for List +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_TargetGrpcProxies_List_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_list(): + # Create a client + client = compute_v1.TargetGrpcProxiesClient() + + # Initialize request argument(s) + request = compute_v1.ListTargetGrpcProxiesRequest( + project="project_value", + ) + + # Make the request + page_result = client.list(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END compute_v1_generated_TargetGrpcProxies_List_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_target_grpc_proxies_patch_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_target_grpc_proxies_patch_sync.py new file mode 100644 index 000000000..bf150bce6 --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_target_grpc_proxies_patch_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for Patch +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_TargetGrpcProxies_Patch_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_patch(): + # Create a client + client = compute_v1.TargetGrpcProxiesClient() + + # Initialize request argument(s) + request = compute_v1.PatchTargetGrpcProxyRequest( + project="project_value", + target_grpc_proxy="target_grpc_proxy_value", + ) + + # Make the request + response = client.patch(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_TargetGrpcProxies_Patch_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_target_http_proxies_aggregated_list_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_target_http_proxies_aggregated_list_sync.py new file mode 100644 index 000000000..355cd6c3c --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_target_http_proxies_aggregated_list_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for AggregatedList +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_TargetHttpProxies_AggregatedList_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_aggregated_list(): + # Create a client + client = compute_v1.TargetHttpProxiesClient() + + # Initialize request argument(s) + request = compute_v1.AggregatedListTargetHttpProxiesRequest( + project="project_value", + ) + + # Make the request + page_result = client.aggregated_list(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END compute_v1_generated_TargetHttpProxies_AggregatedList_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_target_http_proxies_delete_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_target_http_proxies_delete_sync.py new file mode 100644 index 000000000..0ae944a95 --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_target_http_proxies_delete_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for Delete +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_TargetHttpProxies_Delete_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_delete(): + # Create a client + client = compute_v1.TargetHttpProxiesClient() + + # Initialize request argument(s) + request = compute_v1.DeleteTargetHttpProxyRequest( + project="project_value", + target_http_proxy="target_http_proxy_value", + ) + + # Make the request + response = client.delete(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_TargetHttpProxies_Delete_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_target_http_proxies_get_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_target_http_proxies_get_sync.py new file mode 100644 index 000000000..e8ecee5e2 --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_target_http_proxies_get_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for Get +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_TargetHttpProxies_Get_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_get(): + # Create a client + client = compute_v1.TargetHttpProxiesClient() + + # Initialize request argument(s) + request = compute_v1.GetTargetHttpProxyRequest( + project="project_value", + target_http_proxy="target_http_proxy_value", + ) + + # Make the request + response = client.get(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_TargetHttpProxies_Get_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_target_http_proxies_insert_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_target_http_proxies_insert_sync.py new file mode 100644 index 000000000..a946cf916 --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_target_http_proxies_insert_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for Insert +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_TargetHttpProxies_Insert_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_insert(): + # Create a client + client = compute_v1.TargetHttpProxiesClient() + + # Initialize request argument(s) + request = compute_v1.InsertTargetHttpProxyRequest( + project="project_value", + ) + + # Make the request + response = client.insert(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_TargetHttpProxies_Insert_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_target_http_proxies_list_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_target_http_proxies_list_sync.py new file mode 100644 index 000000000..d4e79bff8 --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_target_http_proxies_list_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for List +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_TargetHttpProxies_List_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_list(): + # Create a client + client = compute_v1.TargetHttpProxiesClient() + + # Initialize request argument(s) + request = compute_v1.ListTargetHttpProxiesRequest( + project="project_value", + ) + + # Make the request + page_result = client.list(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END compute_v1_generated_TargetHttpProxies_List_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_target_http_proxies_patch_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_target_http_proxies_patch_sync.py new file mode 100644 index 000000000..22a30d699 --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_target_http_proxies_patch_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for Patch +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_TargetHttpProxies_Patch_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_patch(): + # Create a client + client = compute_v1.TargetHttpProxiesClient() + + # Initialize request argument(s) + request = compute_v1.PatchTargetHttpProxyRequest( + project="project_value", + target_http_proxy="target_http_proxy_value", + ) + + # Make the request + response = client.patch(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_TargetHttpProxies_Patch_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_target_http_proxies_set_url_map_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_target_http_proxies_set_url_map_sync.py new file mode 100644 index 000000000..94034ee76 --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_target_http_proxies_set_url_map_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for SetUrlMap +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_TargetHttpProxies_SetUrlMap_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_set_url_map(): + # Create a client + client = compute_v1.TargetHttpProxiesClient() + + # Initialize request argument(s) + request = compute_v1.SetUrlMapTargetHttpProxyRequest( + project="project_value", + target_http_proxy="target_http_proxy_value", + ) + + # Make the request + response = client.set_url_map(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_TargetHttpProxies_SetUrlMap_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_target_https_proxies_aggregated_list_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_target_https_proxies_aggregated_list_sync.py new file mode 100644 index 000000000..6dbe49534 --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_target_https_proxies_aggregated_list_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for AggregatedList +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_TargetHttpsProxies_AggregatedList_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_aggregated_list(): + # Create a client + client = compute_v1.TargetHttpsProxiesClient() + + # Initialize request argument(s) + request = compute_v1.AggregatedListTargetHttpsProxiesRequest( + project="project_value", + ) + + # Make the request + page_result = client.aggregated_list(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END compute_v1_generated_TargetHttpsProxies_AggregatedList_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_target_https_proxies_delete_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_target_https_proxies_delete_sync.py new file mode 100644 index 000000000..ef71df386 --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_target_https_proxies_delete_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for Delete +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_TargetHttpsProxies_Delete_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_delete(): + # Create a client + client = compute_v1.TargetHttpsProxiesClient() + + # Initialize request argument(s) + request = compute_v1.DeleteTargetHttpsProxyRequest( + project="project_value", + target_https_proxy="target_https_proxy_value", + ) + + # Make the request + response = client.delete(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_TargetHttpsProxies_Delete_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_target_https_proxies_get_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_target_https_proxies_get_sync.py new file mode 100644 index 000000000..228d64331 --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_target_https_proxies_get_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for Get +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_TargetHttpsProxies_Get_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_get(): + # Create a client + client = compute_v1.TargetHttpsProxiesClient() + + # Initialize request argument(s) + request = compute_v1.GetTargetHttpsProxyRequest( + project="project_value", + target_https_proxy="target_https_proxy_value", + ) + + # Make the request + response = client.get(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_TargetHttpsProxies_Get_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_target_https_proxies_insert_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_target_https_proxies_insert_sync.py new file mode 100644 index 000000000..833fcf4b0 --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_target_https_proxies_insert_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for Insert +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_TargetHttpsProxies_Insert_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_insert(): + # Create a client + client = compute_v1.TargetHttpsProxiesClient() + + # Initialize request argument(s) + request = compute_v1.InsertTargetHttpsProxyRequest( + project="project_value", + ) + + # Make the request + response = client.insert(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_TargetHttpsProxies_Insert_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_target_https_proxies_list_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_target_https_proxies_list_sync.py new file mode 100644 index 000000000..eb11ef24e --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_target_https_proxies_list_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for List +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_TargetHttpsProxies_List_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_list(): + # Create a client + client = compute_v1.TargetHttpsProxiesClient() + + # Initialize request argument(s) + request = compute_v1.ListTargetHttpsProxiesRequest( + project="project_value", + ) + + # Make the request + page_result = client.list(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END compute_v1_generated_TargetHttpsProxies_List_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_target_https_proxies_patch_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_target_https_proxies_patch_sync.py new file mode 100644 index 000000000..8cf2bbd02 --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_target_https_proxies_patch_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for Patch +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_TargetHttpsProxies_Patch_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_patch(): + # Create a client + client = compute_v1.TargetHttpsProxiesClient() + + # Initialize request argument(s) + request = compute_v1.PatchTargetHttpsProxyRequest( + project="project_value", + target_https_proxy="target_https_proxy_value", + ) + + # Make the request + response = client.patch(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_TargetHttpsProxies_Patch_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_target_https_proxies_set_certificate_map_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_target_https_proxies_set_certificate_map_sync.py new file mode 100644 index 000000000..d33bcbd05 --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_target_https_proxies_set_certificate_map_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for SetCertificateMap +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_TargetHttpsProxies_SetCertificateMap_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_set_certificate_map(): + # Create a client + client = compute_v1.TargetHttpsProxiesClient() + + # Initialize request argument(s) + request = compute_v1.SetCertificateMapTargetHttpsProxyRequest( + project="project_value", + target_https_proxy="target_https_proxy_value", + ) + + # Make the request + response = client.set_certificate_map(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_TargetHttpsProxies_SetCertificateMap_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_target_https_proxies_set_quic_override_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_target_https_proxies_set_quic_override_sync.py new file mode 100644 index 000000000..c5bddb59b --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_target_https_proxies_set_quic_override_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for SetQuicOverride +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_TargetHttpsProxies_SetQuicOverride_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_set_quic_override(): + # Create a client + client = compute_v1.TargetHttpsProxiesClient() + + # Initialize request argument(s) + request = compute_v1.SetQuicOverrideTargetHttpsProxyRequest( + project="project_value", + target_https_proxy="target_https_proxy_value", + ) + + # Make the request + response = client.set_quic_override(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_TargetHttpsProxies_SetQuicOverride_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_target_https_proxies_set_ssl_certificates_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_target_https_proxies_set_ssl_certificates_sync.py new file mode 100644 index 000000000..cbe76996f --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_target_https_proxies_set_ssl_certificates_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for SetSslCertificates +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_TargetHttpsProxies_SetSslCertificates_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_set_ssl_certificates(): + # Create a client + client = compute_v1.TargetHttpsProxiesClient() + + # Initialize request argument(s) + request = compute_v1.SetSslCertificatesTargetHttpsProxyRequest( + project="project_value", + target_https_proxy="target_https_proxy_value", + ) + + # Make the request + response = client.set_ssl_certificates(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_TargetHttpsProxies_SetSslCertificates_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_target_https_proxies_set_ssl_policy_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_target_https_proxies_set_ssl_policy_sync.py new file mode 100644 index 000000000..ee2dcfbc5 --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_target_https_proxies_set_ssl_policy_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for SetSslPolicy +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_TargetHttpsProxies_SetSslPolicy_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_set_ssl_policy(): + # Create a client + client = compute_v1.TargetHttpsProxiesClient() + + # Initialize request argument(s) + request = compute_v1.SetSslPolicyTargetHttpsProxyRequest( + project="project_value", + target_https_proxy="target_https_proxy_value", + ) + + # Make the request + response = client.set_ssl_policy(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_TargetHttpsProxies_SetSslPolicy_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_target_https_proxies_set_url_map_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_target_https_proxies_set_url_map_sync.py new file mode 100644 index 000000000..373e65eb1 --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_target_https_proxies_set_url_map_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for SetUrlMap +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_TargetHttpsProxies_SetUrlMap_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_set_url_map(): + # Create a client + client = compute_v1.TargetHttpsProxiesClient() + + # Initialize request argument(s) + request = compute_v1.SetUrlMapTargetHttpsProxyRequest( + project="project_value", + target_https_proxy="target_https_proxy_value", + ) + + # Make the request + response = client.set_url_map(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_TargetHttpsProxies_SetUrlMap_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_target_instances_aggregated_list_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_target_instances_aggregated_list_sync.py new file mode 100644 index 000000000..3ad480c4c --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_target_instances_aggregated_list_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for AggregatedList +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_TargetInstances_AggregatedList_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_aggregated_list(): + # Create a client + client = compute_v1.TargetInstancesClient() + + # Initialize request argument(s) + request = compute_v1.AggregatedListTargetInstancesRequest( + project="project_value", + ) + + # Make the request + page_result = client.aggregated_list(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END compute_v1_generated_TargetInstances_AggregatedList_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_target_instances_delete_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_target_instances_delete_sync.py new file mode 100644 index 000000000..60d9f52d1 --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_target_instances_delete_sync.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for Delete +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_TargetInstances_Delete_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_delete(): + # Create a client + client = compute_v1.TargetInstancesClient() + + # Initialize request argument(s) + request = compute_v1.DeleteTargetInstanceRequest( + project="project_value", + target_instance="target_instance_value", + zone="zone_value", + ) + + # Make the request + response = client.delete(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_TargetInstances_Delete_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_target_instances_get_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_target_instances_get_sync.py new file mode 100644 index 000000000..91a2ebd76 --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_target_instances_get_sync.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for Get +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_TargetInstances_Get_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_get(): + # Create a client + client = compute_v1.TargetInstancesClient() + + # Initialize request argument(s) + request = compute_v1.GetTargetInstanceRequest( + project="project_value", + target_instance="target_instance_value", + zone="zone_value", + ) + + # Make the request + response = client.get(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_TargetInstances_Get_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_target_instances_insert_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_target_instances_insert_sync.py new file mode 100644 index 000000000..1dd98234c --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_target_instances_insert_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for Insert +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_TargetInstances_Insert_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_insert(): + # Create a client + client = compute_v1.TargetInstancesClient() + + # Initialize request argument(s) + request = compute_v1.InsertTargetInstanceRequest( + project="project_value", + zone="zone_value", + ) + + # Make the request + response = client.insert(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_TargetInstances_Insert_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_target_instances_list_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_target_instances_list_sync.py new file mode 100644 index 000000000..cf22a7bcb --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_target_instances_list_sync.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for List +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_TargetInstances_List_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_list(): + # Create a client + client = compute_v1.TargetInstancesClient() + + # Initialize request argument(s) + request = compute_v1.ListTargetInstancesRequest( + project="project_value", + zone="zone_value", + ) + + # Make the request + page_result = client.list(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END compute_v1_generated_TargetInstances_List_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_target_pools_add_health_check_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_target_pools_add_health_check_sync.py new file mode 100644 index 000000000..bf1cc5bc3 --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_target_pools_add_health_check_sync.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for AddHealthCheck +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_TargetPools_AddHealthCheck_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_add_health_check(): + # Create a client + client = compute_v1.TargetPoolsClient() + + # Initialize request argument(s) + request = compute_v1.AddHealthCheckTargetPoolRequest( + project="project_value", + region="region_value", + target_pool="target_pool_value", + ) + + # Make the request + response = client.add_health_check(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_TargetPools_AddHealthCheck_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_target_pools_add_instance_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_target_pools_add_instance_sync.py new file mode 100644 index 000000000..6434f1f38 --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_target_pools_add_instance_sync.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for AddInstance +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_TargetPools_AddInstance_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_add_instance(): + # Create a client + client = compute_v1.TargetPoolsClient() + + # Initialize request argument(s) + request = compute_v1.AddInstanceTargetPoolRequest( + project="project_value", + region="region_value", + target_pool="target_pool_value", + ) + + # Make the request + response = client.add_instance(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_TargetPools_AddInstance_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_target_pools_aggregated_list_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_target_pools_aggregated_list_sync.py new file mode 100644 index 000000000..1548a2d64 --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_target_pools_aggregated_list_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for AggregatedList +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_TargetPools_AggregatedList_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_aggregated_list(): + # Create a client + client = compute_v1.TargetPoolsClient() + + # Initialize request argument(s) + request = compute_v1.AggregatedListTargetPoolsRequest( + project="project_value", + ) + + # Make the request + page_result = client.aggregated_list(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END compute_v1_generated_TargetPools_AggregatedList_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_target_pools_delete_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_target_pools_delete_sync.py new file mode 100644 index 000000000..0e7543932 --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_target_pools_delete_sync.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for Delete +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_TargetPools_Delete_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_delete(): + # Create a client + client = compute_v1.TargetPoolsClient() + + # Initialize request argument(s) + request = compute_v1.DeleteTargetPoolRequest( + project="project_value", + region="region_value", + target_pool="target_pool_value", + ) + + # Make the request + response = client.delete(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_TargetPools_Delete_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_target_pools_get_health_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_target_pools_get_health_sync.py new file mode 100644 index 000000000..964df036a --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_target_pools_get_health_sync.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetHealth +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_TargetPools_GetHealth_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_get_health(): + # Create a client + client = compute_v1.TargetPoolsClient() + + # Initialize request argument(s) + request = compute_v1.GetHealthTargetPoolRequest( + project="project_value", + region="region_value", + target_pool="target_pool_value", + ) + + # Make the request + response = client.get_health(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_TargetPools_GetHealth_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_target_pools_get_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_target_pools_get_sync.py new file mode 100644 index 000000000..5179d56d8 --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_target_pools_get_sync.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for Get +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_TargetPools_Get_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_get(): + # Create a client + client = compute_v1.TargetPoolsClient() + + # Initialize request argument(s) + request = compute_v1.GetTargetPoolRequest( + project="project_value", + region="region_value", + target_pool="target_pool_value", + ) + + # Make the request + response = client.get(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_TargetPools_Get_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_target_pools_insert_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_target_pools_insert_sync.py new file mode 100644 index 000000000..bc5f4e613 --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_target_pools_insert_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for Insert +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_TargetPools_Insert_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_insert(): + # Create a client + client = compute_v1.TargetPoolsClient() + + # Initialize request argument(s) + request = compute_v1.InsertTargetPoolRequest( + project="project_value", + region="region_value", + ) + + # Make the request + response = client.insert(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_TargetPools_Insert_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_target_pools_list_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_target_pools_list_sync.py new file mode 100644 index 000000000..20c944922 --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_target_pools_list_sync.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for List +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_TargetPools_List_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_list(): + # Create a client + client = compute_v1.TargetPoolsClient() + + # Initialize request argument(s) + request = compute_v1.ListTargetPoolsRequest( + project="project_value", + region="region_value", + ) + + # Make the request + page_result = client.list(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END compute_v1_generated_TargetPools_List_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_target_pools_remove_health_check_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_target_pools_remove_health_check_sync.py new file mode 100644 index 000000000..44fda5b3b --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_target_pools_remove_health_check_sync.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for RemoveHealthCheck +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_TargetPools_RemoveHealthCheck_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_remove_health_check(): + # Create a client + client = compute_v1.TargetPoolsClient() + + # Initialize request argument(s) + request = compute_v1.RemoveHealthCheckTargetPoolRequest( + project="project_value", + region="region_value", + target_pool="target_pool_value", + ) + + # Make the request + response = client.remove_health_check(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_TargetPools_RemoveHealthCheck_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_target_pools_remove_instance_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_target_pools_remove_instance_sync.py new file mode 100644 index 000000000..f3660a8b7 --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_target_pools_remove_instance_sync.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for RemoveInstance +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_TargetPools_RemoveInstance_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_remove_instance(): + # Create a client + client = compute_v1.TargetPoolsClient() + + # Initialize request argument(s) + request = compute_v1.RemoveInstanceTargetPoolRequest( + project="project_value", + region="region_value", + target_pool="target_pool_value", + ) + + # Make the request + response = client.remove_instance(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_TargetPools_RemoveInstance_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_target_pools_set_backup_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_target_pools_set_backup_sync.py new file mode 100644 index 000000000..09f3e733a --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_target_pools_set_backup_sync.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for SetBackup +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_TargetPools_SetBackup_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_set_backup(): + # Create a client + client = compute_v1.TargetPoolsClient() + + # Initialize request argument(s) + request = compute_v1.SetBackupTargetPoolRequest( + project="project_value", + region="region_value", + target_pool="target_pool_value", + ) + + # Make the request + response = client.set_backup(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_TargetPools_SetBackup_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_target_ssl_proxies_delete_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_target_ssl_proxies_delete_sync.py new file mode 100644 index 000000000..08f065d90 --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_target_ssl_proxies_delete_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for Delete +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_TargetSslProxies_Delete_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_delete(): + # Create a client + client = compute_v1.TargetSslProxiesClient() + + # Initialize request argument(s) + request = compute_v1.DeleteTargetSslProxyRequest( + project="project_value", + target_ssl_proxy="target_ssl_proxy_value", + ) + + # Make the request + response = client.delete(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_TargetSslProxies_Delete_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_target_ssl_proxies_get_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_target_ssl_proxies_get_sync.py new file mode 100644 index 000000000..e64cabc1d --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_target_ssl_proxies_get_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for Get +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_TargetSslProxies_Get_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_get(): + # Create a client + client = compute_v1.TargetSslProxiesClient() + + # Initialize request argument(s) + request = compute_v1.GetTargetSslProxyRequest( + project="project_value", + target_ssl_proxy="target_ssl_proxy_value", + ) + + # Make the request + response = client.get(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_TargetSslProxies_Get_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_target_ssl_proxies_insert_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_target_ssl_proxies_insert_sync.py new file mode 100644 index 000000000..fec15f44e --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_target_ssl_proxies_insert_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for Insert +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_TargetSslProxies_Insert_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_insert(): + # Create a client + client = compute_v1.TargetSslProxiesClient() + + # Initialize request argument(s) + request = compute_v1.InsertTargetSslProxyRequest( + project="project_value", + ) + + # Make the request + response = client.insert(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_TargetSslProxies_Insert_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_target_ssl_proxies_list_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_target_ssl_proxies_list_sync.py new file mode 100644 index 000000000..708094f7e --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_target_ssl_proxies_list_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for List +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_TargetSslProxies_List_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_list(): + # Create a client + client = compute_v1.TargetSslProxiesClient() + + # Initialize request argument(s) + request = compute_v1.ListTargetSslProxiesRequest( + project="project_value", + ) + + # Make the request + page_result = client.list(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END compute_v1_generated_TargetSslProxies_List_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_target_ssl_proxies_set_backend_service_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_target_ssl_proxies_set_backend_service_sync.py new file mode 100644 index 000000000..ebdae8b98 --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_target_ssl_proxies_set_backend_service_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for SetBackendService +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_TargetSslProxies_SetBackendService_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_set_backend_service(): + # Create a client + client = compute_v1.TargetSslProxiesClient() + + # Initialize request argument(s) + request = compute_v1.SetBackendServiceTargetSslProxyRequest( + project="project_value", + target_ssl_proxy="target_ssl_proxy_value", + ) + + # Make the request + response = client.set_backend_service(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_TargetSslProxies_SetBackendService_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_target_ssl_proxies_set_certificate_map_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_target_ssl_proxies_set_certificate_map_sync.py new file mode 100644 index 000000000..07cd4cffa --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_target_ssl_proxies_set_certificate_map_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for SetCertificateMap +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_TargetSslProxies_SetCertificateMap_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_set_certificate_map(): + # Create a client + client = compute_v1.TargetSslProxiesClient() + + # Initialize request argument(s) + request = compute_v1.SetCertificateMapTargetSslProxyRequest( + project="project_value", + target_ssl_proxy="target_ssl_proxy_value", + ) + + # Make the request + response = client.set_certificate_map(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_TargetSslProxies_SetCertificateMap_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_target_ssl_proxies_set_proxy_header_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_target_ssl_proxies_set_proxy_header_sync.py new file mode 100644 index 000000000..728f120d8 --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_target_ssl_proxies_set_proxy_header_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for SetProxyHeader +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_TargetSslProxies_SetProxyHeader_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_set_proxy_header(): + # Create a client + client = compute_v1.TargetSslProxiesClient() + + # Initialize request argument(s) + request = compute_v1.SetProxyHeaderTargetSslProxyRequest( + project="project_value", + target_ssl_proxy="target_ssl_proxy_value", + ) + + # Make the request + response = client.set_proxy_header(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_TargetSslProxies_SetProxyHeader_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_target_ssl_proxies_set_ssl_certificates_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_target_ssl_proxies_set_ssl_certificates_sync.py new file mode 100644 index 000000000..33ab802cd --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_target_ssl_proxies_set_ssl_certificates_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for SetSslCertificates +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_TargetSslProxies_SetSslCertificates_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_set_ssl_certificates(): + # Create a client + client = compute_v1.TargetSslProxiesClient() + + # Initialize request argument(s) + request = compute_v1.SetSslCertificatesTargetSslProxyRequest( + project="project_value", + target_ssl_proxy="target_ssl_proxy_value", + ) + + # Make the request + response = client.set_ssl_certificates(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_TargetSslProxies_SetSslCertificates_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_target_ssl_proxies_set_ssl_policy_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_target_ssl_proxies_set_ssl_policy_sync.py new file mode 100644 index 000000000..ea5e40eb3 --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_target_ssl_proxies_set_ssl_policy_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for SetSslPolicy +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_TargetSslProxies_SetSslPolicy_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_set_ssl_policy(): + # Create a client + client = compute_v1.TargetSslProxiesClient() + + # Initialize request argument(s) + request = compute_v1.SetSslPolicyTargetSslProxyRequest( + project="project_value", + target_ssl_proxy="target_ssl_proxy_value", + ) + + # Make the request + response = client.set_ssl_policy(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_TargetSslProxies_SetSslPolicy_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_target_tcp_proxies_aggregated_list_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_target_tcp_proxies_aggregated_list_sync.py new file mode 100644 index 000000000..2e7a23cf6 --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_target_tcp_proxies_aggregated_list_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for AggregatedList +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_TargetTcpProxies_AggregatedList_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_aggregated_list(): + # Create a client + client = compute_v1.TargetTcpProxiesClient() + + # Initialize request argument(s) + request = compute_v1.AggregatedListTargetTcpProxiesRequest( + project="project_value", + ) + + # Make the request + page_result = client.aggregated_list(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END compute_v1_generated_TargetTcpProxies_AggregatedList_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_target_tcp_proxies_delete_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_target_tcp_proxies_delete_sync.py new file mode 100644 index 000000000..0c068668c --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_target_tcp_proxies_delete_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for Delete +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_TargetTcpProxies_Delete_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_delete(): + # Create a client + client = compute_v1.TargetTcpProxiesClient() + + # Initialize request argument(s) + request = compute_v1.DeleteTargetTcpProxyRequest( + project="project_value", + target_tcp_proxy="target_tcp_proxy_value", + ) + + # Make the request + response = client.delete(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_TargetTcpProxies_Delete_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_target_tcp_proxies_get_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_target_tcp_proxies_get_sync.py new file mode 100644 index 000000000..dcd186eb9 --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_target_tcp_proxies_get_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for Get +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_TargetTcpProxies_Get_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_get(): + # Create a client + client = compute_v1.TargetTcpProxiesClient() + + # Initialize request argument(s) + request = compute_v1.GetTargetTcpProxyRequest( + project="project_value", + target_tcp_proxy="target_tcp_proxy_value", + ) + + # Make the request + response = client.get(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_TargetTcpProxies_Get_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_target_tcp_proxies_insert_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_target_tcp_proxies_insert_sync.py new file mode 100644 index 000000000..be0edfca6 --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_target_tcp_proxies_insert_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for Insert +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_TargetTcpProxies_Insert_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_insert(): + # Create a client + client = compute_v1.TargetTcpProxiesClient() + + # Initialize request argument(s) + request = compute_v1.InsertTargetTcpProxyRequest( + project="project_value", + ) + + # Make the request + response = client.insert(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_TargetTcpProxies_Insert_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_target_tcp_proxies_list_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_target_tcp_proxies_list_sync.py new file mode 100644 index 000000000..fd984b568 --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_target_tcp_proxies_list_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for List +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_TargetTcpProxies_List_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_list(): + # Create a client + client = compute_v1.TargetTcpProxiesClient() + + # Initialize request argument(s) + request = compute_v1.ListTargetTcpProxiesRequest( + project="project_value", + ) + + # Make the request + page_result = client.list(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END compute_v1_generated_TargetTcpProxies_List_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_target_tcp_proxies_set_backend_service_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_target_tcp_proxies_set_backend_service_sync.py new file mode 100644 index 000000000..26d2f0b42 --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_target_tcp_proxies_set_backend_service_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for SetBackendService +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_TargetTcpProxies_SetBackendService_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_set_backend_service(): + # Create a client + client = compute_v1.TargetTcpProxiesClient() + + # Initialize request argument(s) + request = compute_v1.SetBackendServiceTargetTcpProxyRequest( + project="project_value", + target_tcp_proxy="target_tcp_proxy_value", + ) + + # Make the request + response = client.set_backend_service(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_TargetTcpProxies_SetBackendService_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_target_tcp_proxies_set_proxy_header_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_target_tcp_proxies_set_proxy_header_sync.py new file mode 100644 index 000000000..c15e97cd7 --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_target_tcp_proxies_set_proxy_header_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for SetProxyHeader +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_TargetTcpProxies_SetProxyHeader_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_set_proxy_header(): + # Create a client + client = compute_v1.TargetTcpProxiesClient() + + # Initialize request argument(s) + request = compute_v1.SetProxyHeaderTargetTcpProxyRequest( + project="project_value", + target_tcp_proxy="target_tcp_proxy_value", + ) + + # Make the request + response = client.set_proxy_header(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_TargetTcpProxies_SetProxyHeader_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_target_vpn_gateways_aggregated_list_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_target_vpn_gateways_aggregated_list_sync.py new file mode 100644 index 000000000..435926d57 --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_target_vpn_gateways_aggregated_list_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for AggregatedList +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_TargetVpnGateways_AggregatedList_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_aggregated_list(): + # Create a client + client = compute_v1.TargetVpnGatewaysClient() + + # Initialize request argument(s) + request = compute_v1.AggregatedListTargetVpnGatewaysRequest( + project="project_value", + ) + + # Make the request + page_result = client.aggregated_list(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END compute_v1_generated_TargetVpnGateways_AggregatedList_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_target_vpn_gateways_delete_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_target_vpn_gateways_delete_sync.py new file mode 100644 index 000000000..1d719e771 --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_target_vpn_gateways_delete_sync.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for Delete +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_TargetVpnGateways_Delete_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_delete(): + # Create a client + client = compute_v1.TargetVpnGatewaysClient() + + # Initialize request argument(s) + request = compute_v1.DeleteTargetVpnGatewayRequest( + project="project_value", + region="region_value", + target_vpn_gateway="target_vpn_gateway_value", + ) + + # Make the request + response = client.delete(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_TargetVpnGateways_Delete_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_target_vpn_gateways_get_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_target_vpn_gateways_get_sync.py new file mode 100644 index 000000000..2b94fb16f --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_target_vpn_gateways_get_sync.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for Get +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_TargetVpnGateways_Get_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_get(): + # Create a client + client = compute_v1.TargetVpnGatewaysClient() + + # Initialize request argument(s) + request = compute_v1.GetTargetVpnGatewayRequest( + project="project_value", + region="region_value", + target_vpn_gateway="target_vpn_gateway_value", + ) + + # Make the request + response = client.get(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_TargetVpnGateways_Get_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_target_vpn_gateways_insert_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_target_vpn_gateways_insert_sync.py new file mode 100644 index 000000000..b7c2b9024 --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_target_vpn_gateways_insert_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for Insert +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_TargetVpnGateways_Insert_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_insert(): + # Create a client + client = compute_v1.TargetVpnGatewaysClient() + + # Initialize request argument(s) + request = compute_v1.InsertTargetVpnGatewayRequest( + project="project_value", + region="region_value", + ) + + # Make the request + response = client.insert(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_TargetVpnGateways_Insert_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_target_vpn_gateways_list_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_target_vpn_gateways_list_sync.py new file mode 100644 index 000000000..ae092ca8f --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_target_vpn_gateways_list_sync.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for List +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_TargetVpnGateways_List_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_list(): + # Create a client + client = compute_v1.TargetVpnGatewaysClient() + + # Initialize request argument(s) + request = compute_v1.ListTargetVpnGatewaysRequest( + project="project_value", + region="region_value", + ) + + # Make the request + page_result = client.list(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END compute_v1_generated_TargetVpnGateways_List_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_target_vpn_gateways_set_labels_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_target_vpn_gateways_set_labels_sync.py new file mode 100644 index 000000000..01d05eb94 --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_target_vpn_gateways_set_labels_sync.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for SetLabels +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_TargetVpnGateways_SetLabels_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_set_labels(): + # Create a client + client = compute_v1.TargetVpnGatewaysClient() + + # Initialize request argument(s) + request = compute_v1.SetLabelsTargetVpnGatewayRequest( + project="project_value", + region="region_value", + resource="resource_value", + ) + + # Make the request + response = client.set_labels(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_TargetVpnGateways_SetLabels_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_url_maps_aggregated_list_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_url_maps_aggregated_list_sync.py new file mode 100644 index 000000000..50a001e5b --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_url_maps_aggregated_list_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for AggregatedList +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_UrlMaps_AggregatedList_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_aggregated_list(): + # Create a client + client = compute_v1.UrlMapsClient() + + # Initialize request argument(s) + request = compute_v1.AggregatedListUrlMapsRequest( + project="project_value", + ) + + # Make the request + page_result = client.aggregated_list(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END compute_v1_generated_UrlMaps_AggregatedList_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_url_maps_delete_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_url_maps_delete_sync.py new file mode 100644 index 000000000..0891fd82a --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_url_maps_delete_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for Delete +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_UrlMaps_Delete_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_delete(): + # Create a client + client = compute_v1.UrlMapsClient() + + # Initialize request argument(s) + request = compute_v1.DeleteUrlMapRequest( + project="project_value", + url_map="url_map_value", + ) + + # Make the request + response = client.delete(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_UrlMaps_Delete_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_url_maps_get_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_url_maps_get_sync.py new file mode 100644 index 000000000..d30e4773b --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_url_maps_get_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for Get +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_UrlMaps_Get_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_get(): + # Create a client + client = compute_v1.UrlMapsClient() + + # Initialize request argument(s) + request = compute_v1.GetUrlMapRequest( + project="project_value", + url_map="url_map_value", + ) + + # Make the request + response = client.get(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_UrlMaps_Get_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_url_maps_insert_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_url_maps_insert_sync.py new file mode 100644 index 000000000..dde5029cf --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_url_maps_insert_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for Insert +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_UrlMaps_Insert_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_insert(): + # Create a client + client = compute_v1.UrlMapsClient() + + # Initialize request argument(s) + request = compute_v1.InsertUrlMapRequest( + project="project_value", + ) + + # Make the request + response = client.insert(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_UrlMaps_Insert_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_url_maps_invalidate_cache_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_url_maps_invalidate_cache_sync.py new file mode 100644 index 000000000..e7383c982 --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_url_maps_invalidate_cache_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for InvalidateCache +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_UrlMaps_InvalidateCache_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_invalidate_cache(): + # Create a client + client = compute_v1.UrlMapsClient() + + # Initialize request argument(s) + request = compute_v1.InvalidateCacheUrlMapRequest( + project="project_value", + url_map="url_map_value", + ) + + # Make the request + response = client.invalidate_cache(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_UrlMaps_InvalidateCache_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_url_maps_list_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_url_maps_list_sync.py new file mode 100644 index 000000000..d17f1614f --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_url_maps_list_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for List +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_UrlMaps_List_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_list(): + # Create a client + client = compute_v1.UrlMapsClient() + + # Initialize request argument(s) + request = compute_v1.ListUrlMapsRequest( + project="project_value", + ) + + # Make the request + page_result = client.list(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END compute_v1_generated_UrlMaps_List_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_url_maps_patch_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_url_maps_patch_sync.py new file mode 100644 index 000000000..86a826007 --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_url_maps_patch_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for Patch +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_UrlMaps_Patch_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_patch(): + # Create a client + client = compute_v1.UrlMapsClient() + + # Initialize request argument(s) + request = compute_v1.PatchUrlMapRequest( + project="project_value", + url_map="url_map_value", + ) + + # Make the request + response = client.patch(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_UrlMaps_Patch_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_url_maps_update_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_url_maps_update_sync.py new file mode 100644 index 000000000..13addcbb2 --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_url_maps_update_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for Update +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_UrlMaps_Update_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_update(): + # Create a client + client = compute_v1.UrlMapsClient() + + # Initialize request argument(s) + request = compute_v1.UpdateUrlMapRequest( + project="project_value", + url_map="url_map_value", + ) + + # Make the request + response = client.update(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_UrlMaps_Update_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_url_maps_validate_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_url_maps_validate_sync.py new file mode 100644 index 000000000..176fab131 --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_url_maps_validate_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for Validate +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_UrlMaps_Validate_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_validate(): + # Create a client + client = compute_v1.UrlMapsClient() + + # Initialize request argument(s) + request = compute_v1.ValidateUrlMapRequest( + project="project_value", + url_map="url_map_value", + ) + + # Make the request + response = client.validate(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_UrlMaps_Validate_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_vpn_gateways_aggregated_list_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_vpn_gateways_aggregated_list_sync.py new file mode 100644 index 000000000..ceaddbf8d --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_vpn_gateways_aggregated_list_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for AggregatedList +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_VpnGateways_AggregatedList_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_aggregated_list(): + # Create a client + client = compute_v1.VpnGatewaysClient() + + # Initialize request argument(s) + request = compute_v1.AggregatedListVpnGatewaysRequest( + project="project_value", + ) + + # Make the request + page_result = client.aggregated_list(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END compute_v1_generated_VpnGateways_AggregatedList_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_vpn_gateways_delete_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_vpn_gateways_delete_sync.py new file mode 100644 index 000000000..7c6485e60 --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_vpn_gateways_delete_sync.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for Delete +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_VpnGateways_Delete_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_delete(): + # Create a client + client = compute_v1.VpnGatewaysClient() + + # Initialize request argument(s) + request = compute_v1.DeleteVpnGatewayRequest( + project="project_value", + region="region_value", + vpn_gateway="vpn_gateway_value", + ) + + # Make the request + response = client.delete(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_VpnGateways_Delete_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_vpn_gateways_get_status_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_vpn_gateways_get_status_sync.py new file mode 100644 index 000000000..93717dc1d --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_vpn_gateways_get_status_sync.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetStatus +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_VpnGateways_GetStatus_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_get_status(): + # Create a client + client = compute_v1.VpnGatewaysClient() + + # Initialize request argument(s) + request = compute_v1.GetStatusVpnGatewayRequest( + project="project_value", + region="region_value", + vpn_gateway="vpn_gateway_value", + ) + + # Make the request + response = client.get_status(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_VpnGateways_GetStatus_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_vpn_gateways_get_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_vpn_gateways_get_sync.py new file mode 100644 index 000000000..31691f8c8 --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_vpn_gateways_get_sync.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for Get +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_VpnGateways_Get_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_get(): + # Create a client + client = compute_v1.VpnGatewaysClient() + + # Initialize request argument(s) + request = compute_v1.GetVpnGatewayRequest( + project="project_value", + region="region_value", + vpn_gateway="vpn_gateway_value", + ) + + # Make the request + response = client.get(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_VpnGateways_Get_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_vpn_gateways_insert_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_vpn_gateways_insert_sync.py new file mode 100644 index 000000000..cbff844e9 --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_vpn_gateways_insert_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for Insert +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_VpnGateways_Insert_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_insert(): + # Create a client + client = compute_v1.VpnGatewaysClient() + + # Initialize request argument(s) + request = compute_v1.InsertVpnGatewayRequest( + project="project_value", + region="region_value", + ) + + # Make the request + response = client.insert(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_VpnGateways_Insert_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_vpn_gateways_list_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_vpn_gateways_list_sync.py new file mode 100644 index 000000000..63787d58c --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_vpn_gateways_list_sync.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for List +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_VpnGateways_List_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_list(): + # Create a client + client = compute_v1.VpnGatewaysClient() + + # Initialize request argument(s) + request = compute_v1.ListVpnGatewaysRequest( + project="project_value", + region="region_value", + ) + + # Make the request + page_result = client.list(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END compute_v1_generated_VpnGateways_List_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_vpn_gateways_set_labels_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_vpn_gateways_set_labels_sync.py new file mode 100644 index 000000000..19395945c --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_vpn_gateways_set_labels_sync.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for SetLabels +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_VpnGateways_SetLabels_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_set_labels(): + # Create a client + client = compute_v1.VpnGatewaysClient() + + # Initialize request argument(s) + request = compute_v1.SetLabelsVpnGatewayRequest( + project="project_value", + region="region_value", + resource="resource_value", + ) + + # Make the request + response = client.set_labels(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_VpnGateways_SetLabels_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_vpn_gateways_test_iam_permissions_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_vpn_gateways_test_iam_permissions_sync.py new file mode 100644 index 000000000..8db5c37e8 --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_vpn_gateways_test_iam_permissions_sync.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for TestIamPermissions +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_VpnGateways_TestIamPermissions_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_test_iam_permissions(): + # Create a client + client = compute_v1.VpnGatewaysClient() + + # Initialize request argument(s) + request = compute_v1.TestIamPermissionsVpnGatewayRequest( + project="project_value", + region="region_value", + resource="resource_value", + ) + + # Make the request + response = client.test_iam_permissions(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_VpnGateways_TestIamPermissions_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_vpn_tunnels_aggregated_list_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_vpn_tunnels_aggregated_list_sync.py new file mode 100644 index 000000000..4f2cac38c --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_vpn_tunnels_aggregated_list_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for AggregatedList +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_VpnTunnels_AggregatedList_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_aggregated_list(): + # Create a client + client = compute_v1.VpnTunnelsClient() + + # Initialize request argument(s) + request = compute_v1.AggregatedListVpnTunnelsRequest( + project="project_value", + ) + + # Make the request + page_result = client.aggregated_list(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END compute_v1_generated_VpnTunnels_AggregatedList_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_vpn_tunnels_delete_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_vpn_tunnels_delete_sync.py new file mode 100644 index 000000000..6653e1ff2 --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_vpn_tunnels_delete_sync.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for Delete +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_VpnTunnels_Delete_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_delete(): + # Create a client + client = compute_v1.VpnTunnelsClient() + + # Initialize request argument(s) + request = compute_v1.DeleteVpnTunnelRequest( + project="project_value", + region="region_value", + vpn_tunnel="vpn_tunnel_value", + ) + + # Make the request + response = client.delete(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_VpnTunnels_Delete_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_vpn_tunnels_get_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_vpn_tunnels_get_sync.py new file mode 100644 index 000000000..29e967deb --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_vpn_tunnels_get_sync.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for Get +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_VpnTunnels_Get_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_get(): + # Create a client + client = compute_v1.VpnTunnelsClient() + + # Initialize request argument(s) + request = compute_v1.GetVpnTunnelRequest( + project="project_value", + region="region_value", + vpn_tunnel="vpn_tunnel_value", + ) + + # Make the request + response = client.get(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_VpnTunnels_Get_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_vpn_tunnels_insert_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_vpn_tunnels_insert_sync.py new file mode 100644 index 000000000..b919bfcbc --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_vpn_tunnels_insert_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for Insert +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_VpnTunnels_Insert_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_insert(): + # Create a client + client = compute_v1.VpnTunnelsClient() + + # Initialize request argument(s) + request = compute_v1.InsertVpnTunnelRequest( + project="project_value", + region="region_value", + ) + + # Make the request + response = client.insert(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_VpnTunnels_Insert_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_vpn_tunnels_list_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_vpn_tunnels_list_sync.py new file mode 100644 index 000000000..3dc755e29 --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_vpn_tunnels_list_sync.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for List +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_VpnTunnels_List_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_list(): + # Create a client + client = compute_v1.VpnTunnelsClient() + + # Initialize request argument(s) + request = compute_v1.ListVpnTunnelsRequest( + project="project_value", + region="region_value", + ) + + # Make the request + page_result = client.list(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END compute_v1_generated_VpnTunnels_List_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_vpn_tunnels_set_labels_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_vpn_tunnels_set_labels_sync.py new file mode 100644 index 000000000..89c424b38 --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_vpn_tunnels_set_labels_sync.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for SetLabels +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_VpnTunnels_SetLabels_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_set_labels(): + # Create a client + client = compute_v1.VpnTunnelsClient() + + # Initialize request argument(s) + request = compute_v1.SetLabelsVpnTunnelRequest( + project="project_value", + region="region_value", + resource="resource_value", + ) + + # Make the request + response = client.set_labels(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_VpnTunnels_SetLabels_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_zone_operations_delete_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_zone_operations_delete_sync.py new file mode 100644 index 000000000..08f6b66c7 --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_zone_operations_delete_sync.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for Delete +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_ZoneOperations_Delete_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_delete(): + # Create a client + client = compute_v1.ZoneOperationsClient() + + # Initialize request argument(s) + request = compute_v1.DeleteZoneOperationRequest( + operation="operation_value", + project="project_value", + zone="zone_value", + ) + + # Make the request + response = client.delete(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_ZoneOperations_Delete_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_zone_operations_get_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_zone_operations_get_sync.py new file mode 100644 index 000000000..0120af12b --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_zone_operations_get_sync.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for Get +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_ZoneOperations_Get_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_get(): + # Create a client + client = compute_v1.ZoneOperationsClient() + + # Initialize request argument(s) + request = compute_v1.GetZoneOperationRequest( + operation="operation_value", + project="project_value", + zone="zone_value", + ) + + # Make the request + response = client.get(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_ZoneOperations_Get_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_zone_operations_list_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_zone_operations_list_sync.py new file mode 100644 index 000000000..c50e9c041 --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_zone_operations_list_sync.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for List +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_ZoneOperations_List_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_list(): + # Create a client + client = compute_v1.ZoneOperationsClient() + + # Initialize request argument(s) + request = compute_v1.ListZoneOperationsRequest( + project="project_value", + zone="zone_value", + ) + + # Make the request + page_result = client.list(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END compute_v1_generated_ZoneOperations_List_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_zone_operations_wait_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_zone_operations_wait_sync.py new file mode 100644 index 000000000..985b8d88e --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_zone_operations_wait_sync.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for Wait +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_ZoneOperations_Wait_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_wait(): + # Create a client + client = compute_v1.ZoneOperationsClient() + + # Initialize request argument(s) + request = compute_v1.WaitZoneOperationRequest( + operation="operation_value", + project="project_value", + zone="zone_value", + ) + + # Make the request + response = client.wait(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_ZoneOperations_Wait_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_zones_get_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_zones_get_sync.py new file mode 100644 index 000000000..ccdd77e5a --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_zones_get_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for Get +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_Zones_Get_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_get(): + # Create a client + client = compute_v1.ZonesClient() + + # Initialize request argument(s) + request = compute_v1.GetZoneRequest( + project="project_value", + zone="zone_value", + ) + + # Make the request + response = client.get(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_Zones_Get_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_zones_list_sync.py b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_zones_list_sync.py new file mode 100644 index 000000000..55acd5971 --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/compute_v1_generated_zones_list_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for List +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_Zones_List_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_list(): + # Create a client + client = compute_v1.ZonesClient() + + # Initialize request argument(s) + request = compute_v1.ListZonesRequest( + project="project_value", + ) + + # Make the request + page_result = client.list(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END compute_v1_generated_Zones_List_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/snippet_metadata_google.cloud.compute.v1.json b/owl-bot-staging/v1/samples/generated_samples/snippet_metadata_google.cloud.compute.v1.json new file mode 100644 index 000000000..ebc934990 --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/snippet_metadata_google.cloud.compute.v1.json @@ -0,0 +1,60667 @@ +{ + "clientLibrary": { + "apis": [ + { + "id": "google.cloud.compute.v1", + "version": "v1" + } + ], + "language": "PYTHON", + "name": "google-cloud-compute", + "version": "0.1.0" + }, + "snippets": [ + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.AcceleratorTypesClient", + "shortName": "AcceleratorTypesClient" + }, + "fullName": "google.cloud.compute_v1.AcceleratorTypesClient.aggregated_list", + "method": { + "fullName": "google.cloud.compute.v1.AcceleratorTypes.AggregatedList", + "service": { + "fullName": "google.cloud.compute.v1.AcceleratorTypes", + "shortName": "AcceleratorTypes" + }, + "shortName": "AggregatedList" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.AggregatedListAcceleratorTypesRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.compute_v1.services.accelerator_types.pagers.AggregatedListPager", + "shortName": "aggregated_list" + }, + "description": "Sample for AggregatedList", + "file": "compute_v1_generated_accelerator_types_aggregated_list_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_AcceleratorTypes_AggregatedList_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_accelerator_types_aggregated_list_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.AcceleratorTypesClient", + "shortName": "AcceleratorTypesClient" + }, + "fullName": "google.cloud.compute_v1.AcceleratorTypesClient.get", + "method": { + "fullName": "google.cloud.compute.v1.AcceleratorTypes.Get", + "service": { + "fullName": "google.cloud.compute.v1.AcceleratorTypes", + "shortName": "AcceleratorTypes" + }, + "shortName": "Get" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.GetAcceleratorTypeRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "zone", + "type": "str" + }, + { + "name": "accelerator_type", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.compute_v1.types.AcceleratorType", + "shortName": "get" + }, + "description": "Sample for Get", + "file": "compute_v1_generated_accelerator_types_get_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_AcceleratorTypes_Get_sync", + "segments": [ + { + "end": 53, + "start": 27, + "type": "FULL" + }, + { + "end": 53, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 47, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 50, + "start": 48, + "type": "REQUEST_EXECUTION" + }, + { + "end": 54, + "start": 51, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_accelerator_types_get_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.AcceleratorTypesClient", + "shortName": "AcceleratorTypesClient" + }, + "fullName": "google.cloud.compute_v1.AcceleratorTypesClient.list", + "method": { + "fullName": "google.cloud.compute.v1.AcceleratorTypes.List", + "service": { + "fullName": "google.cloud.compute.v1.AcceleratorTypes", + "shortName": "AcceleratorTypes" + }, + "shortName": "List" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.ListAcceleratorTypesRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "zone", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.compute_v1.services.accelerator_types.pagers.ListPager", + "shortName": "list" + }, + "description": "Sample for List", + "file": "compute_v1_generated_accelerator_types_list_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_AcceleratorTypes_List_sync", + "segments": [ + { + "end": 53, + "start": 27, + "type": "FULL" + }, + { + "end": 53, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 54, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_accelerator_types_list_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.AddressesClient", + "shortName": "AddressesClient" + }, + "fullName": "google.cloud.compute_v1.AddressesClient.aggregated_list", + "method": { + "fullName": "google.cloud.compute.v1.Addresses.AggregatedList", + "service": { + "fullName": "google.cloud.compute.v1.Addresses", + "shortName": "Addresses" + }, + "shortName": "AggregatedList" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.AggregatedListAddressesRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.compute_v1.services.addresses.pagers.AggregatedListPager", + "shortName": "aggregated_list" + }, + "description": "Sample for AggregatedList", + "file": "compute_v1_generated_addresses_aggregated_list_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_Addresses_AggregatedList_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_addresses_aggregated_list_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.AddressesClient", + "shortName": "AddressesClient" + }, + "fullName": "google.cloud.compute_v1.AddressesClient.delete", + "method": { + "fullName": "google.cloud.compute.v1.Addresses.Delete", + "service": { + "fullName": "google.cloud.compute.v1.Addresses", + "shortName": "Addresses" + }, + "shortName": "Delete" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.DeleteAddressRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "region", + "type": "str" + }, + { + "name": "address", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.extended_operation.ExtendedOperation", + "shortName": "delete" + }, + "description": "Sample for Delete", + "file": "compute_v1_generated_addresses_delete_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_Addresses_Delete_sync", + "segments": [ + { + "end": 53, + "start": 27, + "type": "FULL" + }, + { + "end": 53, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 47, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 50, + "start": 48, + "type": "REQUEST_EXECUTION" + }, + { + "end": 54, + "start": 51, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_addresses_delete_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.AddressesClient", + "shortName": "AddressesClient" + }, + "fullName": "google.cloud.compute_v1.AddressesClient.get", + "method": { + "fullName": "google.cloud.compute.v1.Addresses.Get", + "service": { + "fullName": "google.cloud.compute.v1.Addresses", + "shortName": "Addresses" + }, + "shortName": "Get" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.GetAddressRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "region", + "type": "str" + }, + { + "name": "address", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.compute_v1.types.Address", + "shortName": "get" + }, + "description": "Sample for Get", + "file": "compute_v1_generated_addresses_get_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_Addresses_Get_sync", + "segments": [ + { + "end": 53, + "start": 27, + "type": "FULL" + }, + { + "end": 53, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 47, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 50, + "start": 48, + "type": "REQUEST_EXECUTION" + }, + { + "end": 54, + "start": 51, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_addresses_get_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.AddressesClient", + "shortName": "AddressesClient" + }, + "fullName": "google.cloud.compute_v1.AddressesClient.insert", + "method": { + "fullName": "google.cloud.compute.v1.Addresses.Insert", + "service": { + "fullName": "google.cloud.compute.v1.Addresses", + "shortName": "Addresses" + }, + "shortName": "Insert" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.InsertAddressRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "region", + "type": "str" + }, + { + "name": "address_resource", + "type": "google.cloud.compute_v1.types.Address" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.extended_operation.ExtendedOperation", + "shortName": "insert" + }, + "description": "Sample for Insert", + "file": "compute_v1_generated_addresses_insert_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_Addresses_Insert_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_addresses_insert_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.AddressesClient", + "shortName": "AddressesClient" + }, + "fullName": "google.cloud.compute_v1.AddressesClient.list", + "method": { + "fullName": "google.cloud.compute.v1.Addresses.List", + "service": { + "fullName": "google.cloud.compute.v1.Addresses", + "shortName": "Addresses" + }, + "shortName": "List" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.ListAddressesRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "region", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.compute_v1.services.addresses.pagers.ListPager", + "shortName": "list" + }, + "description": "Sample for List", + "file": "compute_v1_generated_addresses_list_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_Addresses_List_sync", + "segments": [ + { + "end": 53, + "start": 27, + "type": "FULL" + }, + { + "end": 53, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 54, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_addresses_list_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.AddressesClient", + "shortName": "AddressesClient" + }, + "fullName": "google.cloud.compute_v1.AddressesClient.move", + "method": { + "fullName": "google.cloud.compute.v1.Addresses.Move", + "service": { + "fullName": "google.cloud.compute.v1.Addresses", + "shortName": "Addresses" + }, + "shortName": "Move" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.MoveAddressRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "region", + "type": "str" + }, + { + "name": "address", + "type": "str" + }, + { + "name": "region_addresses_move_request_resource", + "type": "google.cloud.compute_v1.types.RegionAddressesMoveRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.extended_operation.ExtendedOperation", + "shortName": "move" + }, + "description": "Sample for Move", + "file": "compute_v1_generated_addresses_move_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_Addresses_Move_sync", + "segments": [ + { + "end": 53, + "start": 27, + "type": "FULL" + }, + { + "end": 53, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 47, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 50, + "start": 48, + "type": "REQUEST_EXECUTION" + }, + { + "end": 54, + "start": 51, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_addresses_move_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.AddressesClient", + "shortName": "AddressesClient" + }, + "fullName": "google.cloud.compute_v1.AddressesClient.set_labels", + "method": { + "fullName": "google.cloud.compute.v1.Addresses.SetLabels", + "service": { + "fullName": "google.cloud.compute.v1.Addresses", + "shortName": "Addresses" + }, + "shortName": "SetLabels" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.SetLabelsAddressRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "region", + "type": "str" + }, + { + "name": "resource", + "type": "str" + }, + { + "name": "region_set_labels_request_resource", + "type": "google.cloud.compute_v1.types.RegionSetLabelsRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.extended_operation.ExtendedOperation", + "shortName": "set_labels" + }, + "description": "Sample for SetLabels", + "file": "compute_v1_generated_addresses_set_labels_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_Addresses_SetLabels_sync", + "segments": [ + { + "end": 53, + "start": 27, + "type": "FULL" + }, + { + "end": 53, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 47, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 50, + "start": 48, + "type": "REQUEST_EXECUTION" + }, + { + "end": 54, + "start": 51, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_addresses_set_labels_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.AutoscalersClient", + "shortName": "AutoscalersClient" + }, + "fullName": "google.cloud.compute_v1.AutoscalersClient.aggregated_list", + "method": { + "fullName": "google.cloud.compute.v1.Autoscalers.AggregatedList", + "service": { + "fullName": "google.cloud.compute.v1.Autoscalers", + "shortName": "Autoscalers" + }, + "shortName": "AggregatedList" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.AggregatedListAutoscalersRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.compute_v1.services.autoscalers.pagers.AggregatedListPager", + "shortName": "aggregated_list" + }, + "description": "Sample for AggregatedList", + "file": "compute_v1_generated_autoscalers_aggregated_list_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_Autoscalers_AggregatedList_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_autoscalers_aggregated_list_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.AutoscalersClient", + "shortName": "AutoscalersClient" + }, + "fullName": "google.cloud.compute_v1.AutoscalersClient.delete", + "method": { + "fullName": "google.cloud.compute.v1.Autoscalers.Delete", + "service": { + "fullName": "google.cloud.compute.v1.Autoscalers", + "shortName": "Autoscalers" + }, + "shortName": "Delete" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.DeleteAutoscalerRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "zone", + "type": "str" + }, + { + "name": "autoscaler", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.extended_operation.ExtendedOperation", + "shortName": "delete" + }, + "description": "Sample for Delete", + "file": "compute_v1_generated_autoscalers_delete_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_Autoscalers_Delete_sync", + "segments": [ + { + "end": 53, + "start": 27, + "type": "FULL" + }, + { + "end": 53, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 47, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 50, + "start": 48, + "type": "REQUEST_EXECUTION" + }, + { + "end": 54, + "start": 51, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_autoscalers_delete_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.AutoscalersClient", + "shortName": "AutoscalersClient" + }, + "fullName": "google.cloud.compute_v1.AutoscalersClient.get", + "method": { + "fullName": "google.cloud.compute.v1.Autoscalers.Get", + "service": { + "fullName": "google.cloud.compute.v1.Autoscalers", + "shortName": "Autoscalers" + }, + "shortName": "Get" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.GetAutoscalerRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "zone", + "type": "str" + }, + { + "name": "autoscaler", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.compute_v1.types.Autoscaler", + "shortName": "get" + }, + "description": "Sample for Get", + "file": "compute_v1_generated_autoscalers_get_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_Autoscalers_Get_sync", + "segments": [ + { + "end": 53, + "start": 27, + "type": "FULL" + }, + { + "end": 53, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 47, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 50, + "start": 48, + "type": "REQUEST_EXECUTION" + }, + { + "end": 54, + "start": 51, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_autoscalers_get_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.AutoscalersClient", + "shortName": "AutoscalersClient" + }, + "fullName": "google.cloud.compute_v1.AutoscalersClient.insert", + "method": { + "fullName": "google.cloud.compute.v1.Autoscalers.Insert", + "service": { + "fullName": "google.cloud.compute.v1.Autoscalers", + "shortName": "Autoscalers" + }, + "shortName": "Insert" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.InsertAutoscalerRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "zone", + "type": "str" + }, + { + "name": "autoscaler_resource", + "type": "google.cloud.compute_v1.types.Autoscaler" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.extended_operation.ExtendedOperation", + "shortName": "insert" + }, + "description": "Sample for Insert", + "file": "compute_v1_generated_autoscalers_insert_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_Autoscalers_Insert_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_autoscalers_insert_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.AutoscalersClient", + "shortName": "AutoscalersClient" + }, + "fullName": "google.cloud.compute_v1.AutoscalersClient.list", + "method": { + "fullName": "google.cloud.compute.v1.Autoscalers.List", + "service": { + "fullName": "google.cloud.compute.v1.Autoscalers", + "shortName": "Autoscalers" + }, + "shortName": "List" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.ListAutoscalersRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "zone", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.compute_v1.services.autoscalers.pagers.ListPager", + "shortName": "list" + }, + "description": "Sample for List", + "file": "compute_v1_generated_autoscalers_list_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_Autoscalers_List_sync", + "segments": [ + { + "end": 53, + "start": 27, + "type": "FULL" + }, + { + "end": 53, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 54, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_autoscalers_list_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.AutoscalersClient", + "shortName": "AutoscalersClient" + }, + "fullName": "google.cloud.compute_v1.AutoscalersClient.patch", + "method": { + "fullName": "google.cloud.compute.v1.Autoscalers.Patch", + "service": { + "fullName": "google.cloud.compute.v1.Autoscalers", + "shortName": "Autoscalers" + }, + "shortName": "Patch" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.PatchAutoscalerRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "zone", + "type": "str" + }, + { + "name": "autoscaler_resource", + "type": "google.cloud.compute_v1.types.Autoscaler" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.extended_operation.ExtendedOperation", + "shortName": "patch" + }, + "description": "Sample for Patch", + "file": "compute_v1_generated_autoscalers_patch_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_Autoscalers_Patch_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_autoscalers_patch_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.AutoscalersClient", + "shortName": "AutoscalersClient" + }, + "fullName": "google.cloud.compute_v1.AutoscalersClient.update", + "method": { + "fullName": "google.cloud.compute.v1.Autoscalers.Update", + "service": { + "fullName": "google.cloud.compute.v1.Autoscalers", + "shortName": "Autoscalers" + }, + "shortName": "Update" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.UpdateAutoscalerRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "zone", + "type": "str" + }, + { + "name": "autoscaler_resource", + "type": "google.cloud.compute_v1.types.Autoscaler" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.extended_operation.ExtendedOperation", + "shortName": "update" + }, + "description": "Sample for Update", + "file": "compute_v1_generated_autoscalers_update_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_Autoscalers_Update_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_autoscalers_update_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.BackendBucketsClient", + "shortName": "BackendBucketsClient" + }, + "fullName": "google.cloud.compute_v1.BackendBucketsClient.add_signed_url_key", + "method": { + "fullName": "google.cloud.compute.v1.BackendBuckets.AddSignedUrlKey", + "service": { + "fullName": "google.cloud.compute.v1.BackendBuckets", + "shortName": "BackendBuckets" + }, + "shortName": "AddSignedUrlKey" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.AddSignedUrlKeyBackendBucketRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "backend_bucket", + "type": "str" + }, + { + "name": "signed_url_key_resource", + "type": "google.cloud.compute_v1.types.SignedUrlKey" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.extended_operation.ExtendedOperation", + "shortName": "add_signed_url_key" + }, + "description": "Sample for AddSignedUrlKey", + "file": "compute_v1_generated_backend_buckets_add_signed_url_key_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_BackendBuckets_AddSignedUrlKey_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_backend_buckets_add_signed_url_key_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.BackendBucketsClient", + "shortName": "BackendBucketsClient" + }, + "fullName": "google.cloud.compute_v1.BackendBucketsClient.delete_signed_url_key", + "method": { + "fullName": "google.cloud.compute.v1.BackendBuckets.DeleteSignedUrlKey", + "service": { + "fullName": "google.cloud.compute.v1.BackendBuckets", + "shortName": "BackendBuckets" + }, + "shortName": "DeleteSignedUrlKey" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.DeleteSignedUrlKeyBackendBucketRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "backend_bucket", + "type": "str" + }, + { + "name": "key_name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.extended_operation.ExtendedOperation", + "shortName": "delete_signed_url_key" + }, + "description": "Sample for DeleteSignedUrlKey", + "file": "compute_v1_generated_backend_buckets_delete_signed_url_key_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_BackendBuckets_DeleteSignedUrlKey_sync", + "segments": [ + { + "end": 53, + "start": 27, + "type": "FULL" + }, + { + "end": 53, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 47, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 50, + "start": 48, + "type": "REQUEST_EXECUTION" + }, + { + "end": 54, + "start": 51, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_backend_buckets_delete_signed_url_key_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.BackendBucketsClient", + "shortName": "BackendBucketsClient" + }, + "fullName": "google.cloud.compute_v1.BackendBucketsClient.delete", + "method": { + "fullName": "google.cloud.compute.v1.BackendBuckets.Delete", + "service": { + "fullName": "google.cloud.compute.v1.BackendBuckets", + "shortName": "BackendBuckets" + }, + "shortName": "Delete" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.DeleteBackendBucketRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "backend_bucket", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.extended_operation.ExtendedOperation", + "shortName": "delete" + }, + "description": "Sample for Delete", + "file": "compute_v1_generated_backend_buckets_delete_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_BackendBuckets_Delete_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_backend_buckets_delete_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.BackendBucketsClient", + "shortName": "BackendBucketsClient" + }, + "fullName": "google.cloud.compute_v1.BackendBucketsClient.get", + "method": { + "fullName": "google.cloud.compute.v1.BackendBuckets.Get", + "service": { + "fullName": "google.cloud.compute.v1.BackendBuckets", + "shortName": "BackendBuckets" + }, + "shortName": "Get" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.GetBackendBucketRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "backend_bucket", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.compute_v1.types.BackendBucket", + "shortName": "get" + }, + "description": "Sample for Get", + "file": "compute_v1_generated_backend_buckets_get_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_BackendBuckets_Get_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_backend_buckets_get_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.BackendBucketsClient", + "shortName": "BackendBucketsClient" + }, + "fullName": "google.cloud.compute_v1.BackendBucketsClient.insert", + "method": { + "fullName": "google.cloud.compute.v1.BackendBuckets.Insert", + "service": { + "fullName": "google.cloud.compute.v1.BackendBuckets", + "shortName": "BackendBuckets" + }, + "shortName": "Insert" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.InsertBackendBucketRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "backend_bucket_resource", + "type": "google.cloud.compute_v1.types.BackendBucket" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.extended_operation.ExtendedOperation", + "shortName": "insert" + }, + "description": "Sample for Insert", + "file": "compute_v1_generated_backend_buckets_insert_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_BackendBuckets_Insert_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_backend_buckets_insert_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.BackendBucketsClient", + "shortName": "BackendBucketsClient" + }, + "fullName": "google.cloud.compute_v1.BackendBucketsClient.list", + "method": { + "fullName": "google.cloud.compute.v1.BackendBuckets.List", + "service": { + "fullName": "google.cloud.compute.v1.BackendBuckets", + "shortName": "BackendBuckets" + }, + "shortName": "List" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.ListBackendBucketsRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.compute_v1.services.backend_buckets.pagers.ListPager", + "shortName": "list" + }, + "description": "Sample for List", + "file": "compute_v1_generated_backend_buckets_list_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_BackendBuckets_List_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_backend_buckets_list_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.BackendBucketsClient", + "shortName": "BackendBucketsClient" + }, + "fullName": "google.cloud.compute_v1.BackendBucketsClient.patch", + "method": { + "fullName": "google.cloud.compute.v1.BackendBuckets.Patch", + "service": { + "fullName": "google.cloud.compute.v1.BackendBuckets", + "shortName": "BackendBuckets" + }, + "shortName": "Patch" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.PatchBackendBucketRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "backend_bucket", + "type": "str" + }, + { + "name": "backend_bucket_resource", + "type": "google.cloud.compute_v1.types.BackendBucket" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.extended_operation.ExtendedOperation", + "shortName": "patch" + }, + "description": "Sample for Patch", + "file": "compute_v1_generated_backend_buckets_patch_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_BackendBuckets_Patch_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_backend_buckets_patch_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.BackendBucketsClient", + "shortName": "BackendBucketsClient" + }, + "fullName": "google.cloud.compute_v1.BackendBucketsClient.set_edge_security_policy", + "method": { + "fullName": "google.cloud.compute.v1.BackendBuckets.SetEdgeSecurityPolicy", + "service": { + "fullName": "google.cloud.compute.v1.BackendBuckets", + "shortName": "BackendBuckets" + }, + "shortName": "SetEdgeSecurityPolicy" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.SetEdgeSecurityPolicyBackendBucketRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "backend_bucket", + "type": "str" + }, + { + "name": "security_policy_reference_resource", + "type": "google.cloud.compute_v1.types.SecurityPolicyReference" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.extended_operation.ExtendedOperation", + "shortName": "set_edge_security_policy" + }, + "description": "Sample for SetEdgeSecurityPolicy", + "file": "compute_v1_generated_backend_buckets_set_edge_security_policy_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_BackendBuckets_SetEdgeSecurityPolicy_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_backend_buckets_set_edge_security_policy_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.BackendBucketsClient", + "shortName": "BackendBucketsClient" + }, + "fullName": "google.cloud.compute_v1.BackendBucketsClient.update", + "method": { + "fullName": "google.cloud.compute.v1.BackendBuckets.Update", + "service": { + "fullName": "google.cloud.compute.v1.BackendBuckets", + "shortName": "BackendBuckets" + }, + "shortName": "Update" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.UpdateBackendBucketRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "backend_bucket", + "type": "str" + }, + { + "name": "backend_bucket_resource", + "type": "google.cloud.compute_v1.types.BackendBucket" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.extended_operation.ExtendedOperation", + "shortName": "update" + }, + "description": "Sample for Update", + "file": "compute_v1_generated_backend_buckets_update_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_BackendBuckets_Update_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_backend_buckets_update_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.BackendServicesClient", + "shortName": "BackendServicesClient" + }, + "fullName": "google.cloud.compute_v1.BackendServicesClient.add_signed_url_key", + "method": { + "fullName": "google.cloud.compute.v1.BackendServices.AddSignedUrlKey", + "service": { + "fullName": "google.cloud.compute.v1.BackendServices", + "shortName": "BackendServices" + }, + "shortName": "AddSignedUrlKey" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.AddSignedUrlKeyBackendServiceRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "backend_service", + "type": "str" + }, + { + "name": "signed_url_key_resource", + "type": "google.cloud.compute_v1.types.SignedUrlKey" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.extended_operation.ExtendedOperation", + "shortName": "add_signed_url_key" + }, + "description": "Sample for AddSignedUrlKey", + "file": "compute_v1_generated_backend_services_add_signed_url_key_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_BackendServices_AddSignedUrlKey_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_backend_services_add_signed_url_key_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.BackendServicesClient", + "shortName": "BackendServicesClient" + }, + "fullName": "google.cloud.compute_v1.BackendServicesClient.aggregated_list", + "method": { + "fullName": "google.cloud.compute.v1.BackendServices.AggregatedList", + "service": { + "fullName": "google.cloud.compute.v1.BackendServices", + "shortName": "BackendServices" + }, + "shortName": "AggregatedList" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.AggregatedListBackendServicesRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.compute_v1.services.backend_services.pagers.AggregatedListPager", + "shortName": "aggregated_list" + }, + "description": "Sample for AggregatedList", + "file": "compute_v1_generated_backend_services_aggregated_list_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_BackendServices_AggregatedList_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_backend_services_aggregated_list_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.BackendServicesClient", + "shortName": "BackendServicesClient" + }, + "fullName": "google.cloud.compute_v1.BackendServicesClient.delete_signed_url_key", + "method": { + "fullName": "google.cloud.compute.v1.BackendServices.DeleteSignedUrlKey", + "service": { + "fullName": "google.cloud.compute.v1.BackendServices", + "shortName": "BackendServices" + }, + "shortName": "DeleteSignedUrlKey" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.DeleteSignedUrlKeyBackendServiceRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "backend_service", + "type": "str" + }, + { + "name": "key_name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.extended_operation.ExtendedOperation", + "shortName": "delete_signed_url_key" + }, + "description": "Sample for DeleteSignedUrlKey", + "file": "compute_v1_generated_backend_services_delete_signed_url_key_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_BackendServices_DeleteSignedUrlKey_sync", + "segments": [ + { + "end": 53, + "start": 27, + "type": "FULL" + }, + { + "end": 53, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 47, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 50, + "start": 48, + "type": "REQUEST_EXECUTION" + }, + { + "end": 54, + "start": 51, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_backend_services_delete_signed_url_key_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.BackendServicesClient", + "shortName": "BackendServicesClient" + }, + "fullName": "google.cloud.compute_v1.BackendServicesClient.delete", + "method": { + "fullName": "google.cloud.compute.v1.BackendServices.Delete", + "service": { + "fullName": "google.cloud.compute.v1.BackendServices", + "shortName": "BackendServices" + }, + "shortName": "Delete" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.DeleteBackendServiceRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "backend_service", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.extended_operation.ExtendedOperation", + "shortName": "delete" + }, + "description": "Sample for Delete", + "file": "compute_v1_generated_backend_services_delete_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_BackendServices_Delete_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_backend_services_delete_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.BackendServicesClient", + "shortName": "BackendServicesClient" + }, + "fullName": "google.cloud.compute_v1.BackendServicesClient.get_health", + "method": { + "fullName": "google.cloud.compute.v1.BackendServices.GetHealth", + "service": { + "fullName": "google.cloud.compute.v1.BackendServices", + "shortName": "BackendServices" + }, + "shortName": "GetHealth" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.GetHealthBackendServiceRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "backend_service", + "type": "str" + }, + { + "name": "resource_group_reference_resource", + "type": "google.cloud.compute_v1.types.ResourceGroupReference" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.compute_v1.types.BackendServiceGroupHealth", + "shortName": "get_health" + }, + "description": "Sample for GetHealth", + "file": "compute_v1_generated_backend_services_get_health_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_BackendServices_GetHealth_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_backend_services_get_health_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.BackendServicesClient", + "shortName": "BackendServicesClient" + }, + "fullName": "google.cloud.compute_v1.BackendServicesClient.get_iam_policy", + "method": { + "fullName": "google.cloud.compute.v1.BackendServices.GetIamPolicy", + "service": { + "fullName": "google.cloud.compute.v1.BackendServices", + "shortName": "BackendServices" + }, + "shortName": "GetIamPolicy" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.GetIamPolicyBackendServiceRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "resource", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.compute_v1.types.Policy", + "shortName": "get_iam_policy" + }, + "description": "Sample for GetIamPolicy", + "file": "compute_v1_generated_backend_services_get_iam_policy_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_BackendServices_GetIamPolicy_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_backend_services_get_iam_policy_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.BackendServicesClient", + "shortName": "BackendServicesClient" + }, + "fullName": "google.cloud.compute_v1.BackendServicesClient.get", + "method": { + "fullName": "google.cloud.compute.v1.BackendServices.Get", + "service": { + "fullName": "google.cloud.compute.v1.BackendServices", + "shortName": "BackendServices" + }, + "shortName": "Get" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.GetBackendServiceRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "backend_service", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.compute_v1.types.BackendService", + "shortName": "get" + }, + "description": "Sample for Get", + "file": "compute_v1_generated_backend_services_get_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_BackendServices_Get_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_backend_services_get_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.BackendServicesClient", + "shortName": "BackendServicesClient" + }, + "fullName": "google.cloud.compute_v1.BackendServicesClient.insert", + "method": { + "fullName": "google.cloud.compute.v1.BackendServices.Insert", + "service": { + "fullName": "google.cloud.compute.v1.BackendServices", + "shortName": "BackendServices" + }, + "shortName": "Insert" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.InsertBackendServiceRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "backend_service_resource", + "type": "google.cloud.compute_v1.types.BackendService" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.extended_operation.ExtendedOperation", + "shortName": "insert" + }, + "description": "Sample for Insert", + "file": "compute_v1_generated_backend_services_insert_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_BackendServices_Insert_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_backend_services_insert_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.BackendServicesClient", + "shortName": "BackendServicesClient" + }, + "fullName": "google.cloud.compute_v1.BackendServicesClient.list", + "method": { + "fullName": "google.cloud.compute.v1.BackendServices.List", + "service": { + "fullName": "google.cloud.compute.v1.BackendServices", + "shortName": "BackendServices" + }, + "shortName": "List" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.ListBackendServicesRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.compute_v1.services.backend_services.pagers.ListPager", + "shortName": "list" + }, + "description": "Sample for List", + "file": "compute_v1_generated_backend_services_list_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_BackendServices_List_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_backend_services_list_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.BackendServicesClient", + "shortName": "BackendServicesClient" + }, + "fullName": "google.cloud.compute_v1.BackendServicesClient.patch", + "method": { + "fullName": "google.cloud.compute.v1.BackendServices.Patch", + "service": { + "fullName": "google.cloud.compute.v1.BackendServices", + "shortName": "BackendServices" + }, + "shortName": "Patch" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.PatchBackendServiceRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "backend_service", + "type": "str" + }, + { + "name": "backend_service_resource", + "type": "google.cloud.compute_v1.types.BackendService" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.extended_operation.ExtendedOperation", + "shortName": "patch" + }, + "description": "Sample for Patch", + "file": "compute_v1_generated_backend_services_patch_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_BackendServices_Patch_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_backend_services_patch_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.BackendServicesClient", + "shortName": "BackendServicesClient" + }, + "fullName": "google.cloud.compute_v1.BackendServicesClient.set_edge_security_policy", + "method": { + "fullName": "google.cloud.compute.v1.BackendServices.SetEdgeSecurityPolicy", + "service": { + "fullName": "google.cloud.compute.v1.BackendServices", + "shortName": "BackendServices" + }, + "shortName": "SetEdgeSecurityPolicy" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.SetEdgeSecurityPolicyBackendServiceRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "backend_service", + "type": "str" + }, + { + "name": "security_policy_reference_resource", + "type": "google.cloud.compute_v1.types.SecurityPolicyReference" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.extended_operation.ExtendedOperation", + "shortName": "set_edge_security_policy" + }, + "description": "Sample for SetEdgeSecurityPolicy", + "file": "compute_v1_generated_backend_services_set_edge_security_policy_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_BackendServices_SetEdgeSecurityPolicy_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_backend_services_set_edge_security_policy_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.BackendServicesClient", + "shortName": "BackendServicesClient" + }, + "fullName": "google.cloud.compute_v1.BackendServicesClient.set_iam_policy", + "method": { + "fullName": "google.cloud.compute.v1.BackendServices.SetIamPolicy", + "service": { + "fullName": "google.cloud.compute.v1.BackendServices", + "shortName": "BackendServices" + }, + "shortName": "SetIamPolicy" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.SetIamPolicyBackendServiceRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "resource", + "type": "str" + }, + { + "name": "global_set_policy_request_resource", + "type": "google.cloud.compute_v1.types.GlobalSetPolicyRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.compute_v1.types.Policy", + "shortName": "set_iam_policy" + }, + "description": "Sample for SetIamPolicy", + "file": "compute_v1_generated_backend_services_set_iam_policy_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_BackendServices_SetIamPolicy_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_backend_services_set_iam_policy_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.BackendServicesClient", + "shortName": "BackendServicesClient" + }, + "fullName": "google.cloud.compute_v1.BackendServicesClient.set_security_policy", + "method": { + "fullName": "google.cloud.compute.v1.BackendServices.SetSecurityPolicy", + "service": { + "fullName": "google.cloud.compute.v1.BackendServices", + "shortName": "BackendServices" + }, + "shortName": "SetSecurityPolicy" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.SetSecurityPolicyBackendServiceRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "backend_service", + "type": "str" + }, + { + "name": "security_policy_reference_resource", + "type": "google.cloud.compute_v1.types.SecurityPolicyReference" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.extended_operation.ExtendedOperation", + "shortName": "set_security_policy" + }, + "description": "Sample for SetSecurityPolicy", + "file": "compute_v1_generated_backend_services_set_security_policy_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_BackendServices_SetSecurityPolicy_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_backend_services_set_security_policy_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.BackendServicesClient", + "shortName": "BackendServicesClient" + }, + "fullName": "google.cloud.compute_v1.BackendServicesClient.update", + "method": { + "fullName": "google.cloud.compute.v1.BackendServices.Update", + "service": { + "fullName": "google.cloud.compute.v1.BackendServices", + "shortName": "BackendServices" + }, + "shortName": "Update" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.UpdateBackendServiceRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "backend_service", + "type": "str" + }, + { + "name": "backend_service_resource", + "type": "google.cloud.compute_v1.types.BackendService" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.extended_operation.ExtendedOperation", + "shortName": "update" + }, + "description": "Sample for Update", + "file": "compute_v1_generated_backend_services_update_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_BackendServices_Update_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_backend_services_update_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.DiskTypesClient", + "shortName": "DiskTypesClient" + }, + "fullName": "google.cloud.compute_v1.DiskTypesClient.aggregated_list", + "method": { + "fullName": "google.cloud.compute.v1.DiskTypes.AggregatedList", + "service": { + "fullName": "google.cloud.compute.v1.DiskTypes", + "shortName": "DiskTypes" + }, + "shortName": "AggregatedList" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.AggregatedListDiskTypesRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.compute_v1.services.disk_types.pagers.AggregatedListPager", + "shortName": "aggregated_list" + }, + "description": "Sample for AggregatedList", + "file": "compute_v1_generated_disk_types_aggregated_list_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_DiskTypes_AggregatedList_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_disk_types_aggregated_list_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.DiskTypesClient", + "shortName": "DiskTypesClient" + }, + "fullName": "google.cloud.compute_v1.DiskTypesClient.get", + "method": { + "fullName": "google.cloud.compute.v1.DiskTypes.Get", + "service": { + "fullName": "google.cloud.compute.v1.DiskTypes", + "shortName": "DiskTypes" + }, + "shortName": "Get" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.GetDiskTypeRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "zone", + "type": "str" + }, + { + "name": "disk_type", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.compute_v1.types.DiskType", + "shortName": "get" + }, + "description": "Sample for Get", + "file": "compute_v1_generated_disk_types_get_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_DiskTypes_Get_sync", + "segments": [ + { + "end": 53, + "start": 27, + "type": "FULL" + }, + { + "end": 53, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 47, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 50, + "start": 48, + "type": "REQUEST_EXECUTION" + }, + { + "end": 54, + "start": 51, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_disk_types_get_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.DiskTypesClient", + "shortName": "DiskTypesClient" + }, + "fullName": "google.cloud.compute_v1.DiskTypesClient.list", + "method": { + "fullName": "google.cloud.compute.v1.DiskTypes.List", + "service": { + "fullName": "google.cloud.compute.v1.DiskTypes", + "shortName": "DiskTypes" + }, + "shortName": "List" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.ListDiskTypesRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "zone", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.compute_v1.services.disk_types.pagers.ListPager", + "shortName": "list" + }, + "description": "Sample for List", + "file": "compute_v1_generated_disk_types_list_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_DiskTypes_List_sync", + "segments": [ + { + "end": 53, + "start": 27, + "type": "FULL" + }, + { + "end": 53, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 54, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_disk_types_list_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.DisksClient", + "shortName": "DisksClient" + }, + "fullName": "google.cloud.compute_v1.DisksClient.add_resource_policies", + "method": { + "fullName": "google.cloud.compute.v1.Disks.AddResourcePolicies", + "service": { + "fullName": "google.cloud.compute.v1.Disks", + "shortName": "Disks" + }, + "shortName": "AddResourcePolicies" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.AddResourcePoliciesDiskRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "zone", + "type": "str" + }, + { + "name": "disk", + "type": "str" + }, + { + "name": "disks_add_resource_policies_request_resource", + "type": "google.cloud.compute_v1.types.DisksAddResourcePoliciesRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.extended_operation.ExtendedOperation", + "shortName": "add_resource_policies" + }, + "description": "Sample for AddResourcePolicies", + "file": "compute_v1_generated_disks_add_resource_policies_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_Disks_AddResourcePolicies_sync", + "segments": [ + { + "end": 53, + "start": 27, + "type": "FULL" + }, + { + "end": 53, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 47, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 50, + "start": 48, + "type": "REQUEST_EXECUTION" + }, + { + "end": 54, + "start": 51, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_disks_add_resource_policies_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.DisksClient", + "shortName": "DisksClient" + }, + "fullName": "google.cloud.compute_v1.DisksClient.aggregated_list", + "method": { + "fullName": "google.cloud.compute.v1.Disks.AggregatedList", + "service": { + "fullName": "google.cloud.compute.v1.Disks", + "shortName": "Disks" + }, + "shortName": "AggregatedList" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.AggregatedListDisksRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.compute_v1.services.disks.pagers.AggregatedListPager", + "shortName": "aggregated_list" + }, + "description": "Sample for AggregatedList", + "file": "compute_v1_generated_disks_aggregated_list_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_Disks_AggregatedList_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_disks_aggregated_list_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.DisksClient", + "shortName": "DisksClient" + }, + "fullName": "google.cloud.compute_v1.DisksClient.bulk_insert", + "method": { + "fullName": "google.cloud.compute.v1.Disks.BulkInsert", + "service": { + "fullName": "google.cloud.compute.v1.Disks", + "shortName": "Disks" + }, + "shortName": "BulkInsert" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.BulkInsertDiskRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "zone", + "type": "str" + }, + { + "name": "bulk_insert_disk_resource_resource", + "type": "google.cloud.compute_v1.types.BulkInsertDiskResource" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.extended_operation.ExtendedOperation", + "shortName": "bulk_insert" + }, + "description": "Sample for BulkInsert", + "file": "compute_v1_generated_disks_bulk_insert_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_Disks_BulkInsert_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_disks_bulk_insert_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.DisksClient", + "shortName": "DisksClient" + }, + "fullName": "google.cloud.compute_v1.DisksClient.create_snapshot", + "method": { + "fullName": "google.cloud.compute.v1.Disks.CreateSnapshot", + "service": { + "fullName": "google.cloud.compute.v1.Disks", + "shortName": "Disks" + }, + "shortName": "CreateSnapshot" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.CreateSnapshotDiskRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "zone", + "type": "str" + }, + { + "name": "disk", + "type": "str" + }, + { + "name": "snapshot_resource", + "type": "google.cloud.compute_v1.types.Snapshot" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.extended_operation.ExtendedOperation", + "shortName": "create_snapshot" + }, + "description": "Sample for CreateSnapshot", + "file": "compute_v1_generated_disks_create_snapshot_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_Disks_CreateSnapshot_sync", + "segments": [ + { + "end": 53, + "start": 27, + "type": "FULL" + }, + { + "end": 53, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 47, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 50, + "start": 48, + "type": "REQUEST_EXECUTION" + }, + { + "end": 54, + "start": 51, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_disks_create_snapshot_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.DisksClient", + "shortName": "DisksClient" + }, + "fullName": "google.cloud.compute_v1.DisksClient.delete", + "method": { + "fullName": "google.cloud.compute.v1.Disks.Delete", + "service": { + "fullName": "google.cloud.compute.v1.Disks", + "shortName": "Disks" + }, + "shortName": "Delete" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.DeleteDiskRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "zone", + "type": "str" + }, + { + "name": "disk", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.extended_operation.ExtendedOperation", + "shortName": "delete" + }, + "description": "Sample for Delete", + "file": "compute_v1_generated_disks_delete_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_Disks_Delete_sync", + "segments": [ + { + "end": 53, + "start": 27, + "type": "FULL" + }, + { + "end": 53, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 47, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 50, + "start": 48, + "type": "REQUEST_EXECUTION" + }, + { + "end": 54, + "start": 51, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_disks_delete_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.DisksClient", + "shortName": "DisksClient" + }, + "fullName": "google.cloud.compute_v1.DisksClient.get_iam_policy", + "method": { + "fullName": "google.cloud.compute.v1.Disks.GetIamPolicy", + "service": { + "fullName": "google.cloud.compute.v1.Disks", + "shortName": "Disks" + }, + "shortName": "GetIamPolicy" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.GetIamPolicyDiskRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "zone", + "type": "str" + }, + { + "name": "resource", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.compute_v1.types.Policy", + "shortName": "get_iam_policy" + }, + "description": "Sample for GetIamPolicy", + "file": "compute_v1_generated_disks_get_iam_policy_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_Disks_GetIamPolicy_sync", + "segments": [ + { + "end": 53, + "start": 27, + "type": "FULL" + }, + { + "end": 53, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 47, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 50, + "start": 48, + "type": "REQUEST_EXECUTION" + }, + { + "end": 54, + "start": 51, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_disks_get_iam_policy_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.DisksClient", + "shortName": "DisksClient" + }, + "fullName": "google.cloud.compute_v1.DisksClient.get", + "method": { + "fullName": "google.cloud.compute.v1.Disks.Get", + "service": { + "fullName": "google.cloud.compute.v1.Disks", + "shortName": "Disks" + }, + "shortName": "Get" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.GetDiskRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "zone", + "type": "str" + }, + { + "name": "disk", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.compute_v1.types.Disk", + "shortName": "get" + }, + "description": "Sample for Get", + "file": "compute_v1_generated_disks_get_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_Disks_Get_sync", + "segments": [ + { + "end": 53, + "start": 27, + "type": "FULL" + }, + { + "end": 53, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 47, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 50, + "start": 48, + "type": "REQUEST_EXECUTION" + }, + { + "end": 54, + "start": 51, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_disks_get_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.DisksClient", + "shortName": "DisksClient" + }, + "fullName": "google.cloud.compute_v1.DisksClient.insert", + "method": { + "fullName": "google.cloud.compute.v1.Disks.Insert", + "service": { + "fullName": "google.cloud.compute.v1.Disks", + "shortName": "Disks" + }, + "shortName": "Insert" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.InsertDiskRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "zone", + "type": "str" + }, + { + "name": "disk_resource", + "type": "google.cloud.compute_v1.types.Disk" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.extended_operation.ExtendedOperation", + "shortName": "insert" + }, + "description": "Sample for Insert", + "file": "compute_v1_generated_disks_insert_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_Disks_Insert_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_disks_insert_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.DisksClient", + "shortName": "DisksClient" + }, + "fullName": "google.cloud.compute_v1.DisksClient.list", + "method": { + "fullName": "google.cloud.compute.v1.Disks.List", + "service": { + "fullName": "google.cloud.compute.v1.Disks", + "shortName": "Disks" + }, + "shortName": "List" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.ListDisksRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "zone", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.compute_v1.services.disks.pagers.ListPager", + "shortName": "list" + }, + "description": "Sample for List", + "file": "compute_v1_generated_disks_list_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_Disks_List_sync", + "segments": [ + { + "end": 53, + "start": 27, + "type": "FULL" + }, + { + "end": 53, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 54, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_disks_list_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.DisksClient", + "shortName": "DisksClient" + }, + "fullName": "google.cloud.compute_v1.DisksClient.remove_resource_policies", + "method": { + "fullName": "google.cloud.compute.v1.Disks.RemoveResourcePolicies", + "service": { + "fullName": "google.cloud.compute.v1.Disks", + "shortName": "Disks" + }, + "shortName": "RemoveResourcePolicies" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.RemoveResourcePoliciesDiskRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "zone", + "type": "str" + }, + { + "name": "disk", + "type": "str" + }, + { + "name": "disks_remove_resource_policies_request_resource", + "type": "google.cloud.compute_v1.types.DisksRemoveResourcePoliciesRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.extended_operation.ExtendedOperation", + "shortName": "remove_resource_policies" + }, + "description": "Sample for RemoveResourcePolicies", + "file": "compute_v1_generated_disks_remove_resource_policies_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_Disks_RemoveResourcePolicies_sync", + "segments": [ + { + "end": 53, + "start": 27, + "type": "FULL" + }, + { + "end": 53, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 47, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 50, + "start": 48, + "type": "REQUEST_EXECUTION" + }, + { + "end": 54, + "start": 51, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_disks_remove_resource_policies_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.DisksClient", + "shortName": "DisksClient" + }, + "fullName": "google.cloud.compute_v1.DisksClient.resize", + "method": { + "fullName": "google.cloud.compute.v1.Disks.Resize", + "service": { + "fullName": "google.cloud.compute.v1.Disks", + "shortName": "Disks" + }, + "shortName": "Resize" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.ResizeDiskRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "zone", + "type": "str" + }, + { + "name": "disk", + "type": "str" + }, + { + "name": "disks_resize_request_resource", + "type": "google.cloud.compute_v1.types.DisksResizeRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.extended_operation.ExtendedOperation", + "shortName": "resize" + }, + "description": "Sample for Resize", + "file": "compute_v1_generated_disks_resize_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_Disks_Resize_sync", + "segments": [ + { + "end": 53, + "start": 27, + "type": "FULL" + }, + { + "end": 53, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 47, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 50, + "start": 48, + "type": "REQUEST_EXECUTION" + }, + { + "end": 54, + "start": 51, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_disks_resize_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.DisksClient", + "shortName": "DisksClient" + }, + "fullName": "google.cloud.compute_v1.DisksClient.set_iam_policy", + "method": { + "fullName": "google.cloud.compute.v1.Disks.SetIamPolicy", + "service": { + "fullName": "google.cloud.compute.v1.Disks", + "shortName": "Disks" + }, + "shortName": "SetIamPolicy" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.SetIamPolicyDiskRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "zone", + "type": "str" + }, + { + "name": "resource", + "type": "str" + }, + { + "name": "zone_set_policy_request_resource", + "type": "google.cloud.compute_v1.types.ZoneSetPolicyRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.compute_v1.types.Policy", + "shortName": "set_iam_policy" + }, + "description": "Sample for SetIamPolicy", + "file": "compute_v1_generated_disks_set_iam_policy_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_Disks_SetIamPolicy_sync", + "segments": [ + { + "end": 53, + "start": 27, + "type": "FULL" + }, + { + "end": 53, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 47, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 50, + "start": 48, + "type": "REQUEST_EXECUTION" + }, + { + "end": 54, + "start": 51, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_disks_set_iam_policy_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.DisksClient", + "shortName": "DisksClient" + }, + "fullName": "google.cloud.compute_v1.DisksClient.set_labels", + "method": { + "fullName": "google.cloud.compute.v1.Disks.SetLabels", + "service": { + "fullName": "google.cloud.compute.v1.Disks", + "shortName": "Disks" + }, + "shortName": "SetLabels" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.SetLabelsDiskRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "zone", + "type": "str" + }, + { + "name": "resource", + "type": "str" + }, + { + "name": "zone_set_labels_request_resource", + "type": "google.cloud.compute_v1.types.ZoneSetLabelsRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.extended_operation.ExtendedOperation", + "shortName": "set_labels" + }, + "description": "Sample for SetLabels", + "file": "compute_v1_generated_disks_set_labels_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_Disks_SetLabels_sync", + "segments": [ + { + "end": 53, + "start": 27, + "type": "FULL" + }, + { + "end": 53, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 47, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 50, + "start": 48, + "type": "REQUEST_EXECUTION" + }, + { + "end": 54, + "start": 51, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_disks_set_labels_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.DisksClient", + "shortName": "DisksClient" + }, + "fullName": "google.cloud.compute_v1.DisksClient.start_async_replication", + "method": { + "fullName": "google.cloud.compute.v1.Disks.StartAsyncReplication", + "service": { + "fullName": "google.cloud.compute.v1.Disks", + "shortName": "Disks" + }, + "shortName": "StartAsyncReplication" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.StartAsyncReplicationDiskRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "zone", + "type": "str" + }, + { + "name": "disk", + "type": "str" + }, + { + "name": "disks_start_async_replication_request_resource", + "type": "google.cloud.compute_v1.types.DisksStartAsyncReplicationRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.extended_operation.ExtendedOperation", + "shortName": "start_async_replication" + }, + "description": "Sample for StartAsyncReplication", + "file": "compute_v1_generated_disks_start_async_replication_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_Disks_StartAsyncReplication_sync", + "segments": [ + { + "end": 53, + "start": 27, + "type": "FULL" + }, + { + "end": 53, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 47, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 50, + "start": 48, + "type": "REQUEST_EXECUTION" + }, + { + "end": 54, + "start": 51, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_disks_start_async_replication_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.DisksClient", + "shortName": "DisksClient" + }, + "fullName": "google.cloud.compute_v1.DisksClient.stop_async_replication", + "method": { + "fullName": "google.cloud.compute.v1.Disks.StopAsyncReplication", + "service": { + "fullName": "google.cloud.compute.v1.Disks", + "shortName": "Disks" + }, + "shortName": "StopAsyncReplication" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.StopAsyncReplicationDiskRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "zone", + "type": "str" + }, + { + "name": "disk", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.extended_operation.ExtendedOperation", + "shortName": "stop_async_replication" + }, + "description": "Sample for StopAsyncReplication", + "file": "compute_v1_generated_disks_stop_async_replication_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_Disks_StopAsyncReplication_sync", + "segments": [ + { + "end": 53, + "start": 27, + "type": "FULL" + }, + { + "end": 53, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 47, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 50, + "start": 48, + "type": "REQUEST_EXECUTION" + }, + { + "end": 54, + "start": 51, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_disks_stop_async_replication_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.DisksClient", + "shortName": "DisksClient" + }, + "fullName": "google.cloud.compute_v1.DisksClient.stop_group_async_replication", + "method": { + "fullName": "google.cloud.compute.v1.Disks.StopGroupAsyncReplication", + "service": { + "fullName": "google.cloud.compute.v1.Disks", + "shortName": "Disks" + }, + "shortName": "StopGroupAsyncReplication" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.StopGroupAsyncReplicationDiskRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "zone", + "type": "str" + }, + { + "name": "disks_stop_group_async_replication_resource_resource", + "type": "google.cloud.compute_v1.types.DisksStopGroupAsyncReplicationResource" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.extended_operation.ExtendedOperation", + "shortName": "stop_group_async_replication" + }, + "description": "Sample for StopGroupAsyncReplication", + "file": "compute_v1_generated_disks_stop_group_async_replication_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_Disks_StopGroupAsyncReplication_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_disks_stop_group_async_replication_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.DisksClient", + "shortName": "DisksClient" + }, + "fullName": "google.cloud.compute_v1.DisksClient.test_iam_permissions", + "method": { + "fullName": "google.cloud.compute.v1.Disks.TestIamPermissions", + "service": { + "fullName": "google.cloud.compute.v1.Disks", + "shortName": "Disks" + }, + "shortName": "TestIamPermissions" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.TestIamPermissionsDiskRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "zone", + "type": "str" + }, + { + "name": "resource", + "type": "str" + }, + { + "name": "test_permissions_request_resource", + "type": "google.cloud.compute_v1.types.TestPermissionsRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.compute_v1.types.TestPermissionsResponse", + "shortName": "test_iam_permissions" + }, + "description": "Sample for TestIamPermissions", + "file": "compute_v1_generated_disks_test_iam_permissions_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_Disks_TestIamPermissions_sync", + "segments": [ + { + "end": 53, + "start": 27, + "type": "FULL" + }, + { + "end": 53, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 47, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 50, + "start": 48, + "type": "REQUEST_EXECUTION" + }, + { + "end": 54, + "start": 51, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_disks_test_iam_permissions_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.DisksClient", + "shortName": "DisksClient" + }, + "fullName": "google.cloud.compute_v1.DisksClient.update", + "method": { + "fullName": "google.cloud.compute.v1.Disks.Update", + "service": { + "fullName": "google.cloud.compute.v1.Disks", + "shortName": "Disks" + }, + "shortName": "Update" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.UpdateDiskRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "zone", + "type": "str" + }, + { + "name": "disk", + "type": "str" + }, + { + "name": "disk_resource", + "type": "google.cloud.compute_v1.types.Disk" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.extended_operation.ExtendedOperation", + "shortName": "update" + }, + "description": "Sample for Update", + "file": "compute_v1_generated_disks_update_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_Disks_Update_sync", + "segments": [ + { + "end": 53, + "start": 27, + "type": "FULL" + }, + { + "end": 53, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 47, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 50, + "start": 48, + "type": "REQUEST_EXECUTION" + }, + { + "end": 54, + "start": 51, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_disks_update_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.ExternalVpnGatewaysClient", + "shortName": "ExternalVpnGatewaysClient" + }, + "fullName": "google.cloud.compute_v1.ExternalVpnGatewaysClient.delete", + "method": { + "fullName": "google.cloud.compute.v1.ExternalVpnGateways.Delete", + "service": { + "fullName": "google.cloud.compute.v1.ExternalVpnGateways", + "shortName": "ExternalVpnGateways" + }, + "shortName": "Delete" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.DeleteExternalVpnGatewayRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "external_vpn_gateway", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.extended_operation.ExtendedOperation", + "shortName": "delete" + }, + "description": "Sample for Delete", + "file": "compute_v1_generated_external_vpn_gateways_delete_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_ExternalVpnGateways_Delete_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_external_vpn_gateways_delete_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.ExternalVpnGatewaysClient", + "shortName": "ExternalVpnGatewaysClient" + }, + "fullName": "google.cloud.compute_v1.ExternalVpnGatewaysClient.get", + "method": { + "fullName": "google.cloud.compute.v1.ExternalVpnGateways.Get", + "service": { + "fullName": "google.cloud.compute.v1.ExternalVpnGateways", + "shortName": "ExternalVpnGateways" + }, + "shortName": "Get" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.GetExternalVpnGatewayRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "external_vpn_gateway", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.compute_v1.types.ExternalVpnGateway", + "shortName": "get" + }, + "description": "Sample for Get", + "file": "compute_v1_generated_external_vpn_gateways_get_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_ExternalVpnGateways_Get_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_external_vpn_gateways_get_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.ExternalVpnGatewaysClient", + "shortName": "ExternalVpnGatewaysClient" + }, + "fullName": "google.cloud.compute_v1.ExternalVpnGatewaysClient.insert", + "method": { + "fullName": "google.cloud.compute.v1.ExternalVpnGateways.Insert", + "service": { + "fullName": "google.cloud.compute.v1.ExternalVpnGateways", + "shortName": "ExternalVpnGateways" + }, + "shortName": "Insert" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.InsertExternalVpnGatewayRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "external_vpn_gateway_resource", + "type": "google.cloud.compute_v1.types.ExternalVpnGateway" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.extended_operation.ExtendedOperation", + "shortName": "insert" + }, + "description": "Sample for Insert", + "file": "compute_v1_generated_external_vpn_gateways_insert_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_ExternalVpnGateways_Insert_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_external_vpn_gateways_insert_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.ExternalVpnGatewaysClient", + "shortName": "ExternalVpnGatewaysClient" + }, + "fullName": "google.cloud.compute_v1.ExternalVpnGatewaysClient.list", + "method": { + "fullName": "google.cloud.compute.v1.ExternalVpnGateways.List", + "service": { + "fullName": "google.cloud.compute.v1.ExternalVpnGateways", + "shortName": "ExternalVpnGateways" + }, + "shortName": "List" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.ListExternalVpnGatewaysRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.compute_v1.services.external_vpn_gateways.pagers.ListPager", + "shortName": "list" + }, + "description": "Sample for List", + "file": "compute_v1_generated_external_vpn_gateways_list_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_ExternalVpnGateways_List_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_external_vpn_gateways_list_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.ExternalVpnGatewaysClient", + "shortName": "ExternalVpnGatewaysClient" + }, + "fullName": "google.cloud.compute_v1.ExternalVpnGatewaysClient.set_labels", + "method": { + "fullName": "google.cloud.compute.v1.ExternalVpnGateways.SetLabels", + "service": { + "fullName": "google.cloud.compute.v1.ExternalVpnGateways", + "shortName": "ExternalVpnGateways" + }, + "shortName": "SetLabels" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.SetLabelsExternalVpnGatewayRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "resource", + "type": "str" + }, + { + "name": "global_set_labels_request_resource", + "type": "google.cloud.compute_v1.types.GlobalSetLabelsRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.extended_operation.ExtendedOperation", + "shortName": "set_labels" + }, + "description": "Sample for SetLabels", + "file": "compute_v1_generated_external_vpn_gateways_set_labels_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_ExternalVpnGateways_SetLabels_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_external_vpn_gateways_set_labels_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.ExternalVpnGatewaysClient", + "shortName": "ExternalVpnGatewaysClient" + }, + "fullName": "google.cloud.compute_v1.ExternalVpnGatewaysClient.test_iam_permissions", + "method": { + "fullName": "google.cloud.compute.v1.ExternalVpnGateways.TestIamPermissions", + "service": { + "fullName": "google.cloud.compute.v1.ExternalVpnGateways", + "shortName": "ExternalVpnGateways" + }, + "shortName": "TestIamPermissions" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.TestIamPermissionsExternalVpnGatewayRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "resource", + "type": "str" + }, + { + "name": "test_permissions_request_resource", + "type": "google.cloud.compute_v1.types.TestPermissionsRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.compute_v1.types.TestPermissionsResponse", + "shortName": "test_iam_permissions" + }, + "description": "Sample for TestIamPermissions", + "file": "compute_v1_generated_external_vpn_gateways_test_iam_permissions_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_ExternalVpnGateways_TestIamPermissions_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_external_vpn_gateways_test_iam_permissions_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.FirewallPoliciesClient", + "shortName": "FirewallPoliciesClient" + }, + "fullName": "google.cloud.compute_v1.FirewallPoliciesClient.add_association", + "method": { + "fullName": "google.cloud.compute.v1.FirewallPolicies.AddAssociation", + "service": { + "fullName": "google.cloud.compute.v1.FirewallPolicies", + "shortName": "FirewallPolicies" + }, + "shortName": "AddAssociation" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.AddAssociationFirewallPolicyRequest" + }, + { + "name": "firewall_policy", + "type": "str" + }, + { + "name": "firewall_policy_association_resource", + "type": "google.cloud.compute_v1.types.FirewallPolicyAssociation" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.extended_operation.ExtendedOperation", + "shortName": "add_association" + }, + "description": "Sample for AddAssociation", + "file": "compute_v1_generated_firewall_policies_add_association_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_FirewallPolicies_AddAssociation_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_firewall_policies_add_association_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.FirewallPoliciesClient", + "shortName": "FirewallPoliciesClient" + }, + "fullName": "google.cloud.compute_v1.FirewallPoliciesClient.add_rule", + "method": { + "fullName": "google.cloud.compute.v1.FirewallPolicies.AddRule", + "service": { + "fullName": "google.cloud.compute.v1.FirewallPolicies", + "shortName": "FirewallPolicies" + }, + "shortName": "AddRule" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.AddRuleFirewallPolicyRequest" + }, + { + "name": "firewall_policy", + "type": "str" + }, + { + "name": "firewall_policy_rule_resource", + "type": "google.cloud.compute_v1.types.FirewallPolicyRule" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.extended_operation.ExtendedOperation", + "shortName": "add_rule" + }, + "description": "Sample for AddRule", + "file": "compute_v1_generated_firewall_policies_add_rule_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_FirewallPolicies_AddRule_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_firewall_policies_add_rule_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.FirewallPoliciesClient", + "shortName": "FirewallPoliciesClient" + }, + "fullName": "google.cloud.compute_v1.FirewallPoliciesClient.clone_rules", + "method": { + "fullName": "google.cloud.compute.v1.FirewallPolicies.CloneRules", + "service": { + "fullName": "google.cloud.compute.v1.FirewallPolicies", + "shortName": "FirewallPolicies" + }, + "shortName": "CloneRules" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.CloneRulesFirewallPolicyRequest" + }, + { + "name": "firewall_policy", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.extended_operation.ExtendedOperation", + "shortName": "clone_rules" + }, + "description": "Sample for CloneRules", + "file": "compute_v1_generated_firewall_policies_clone_rules_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_FirewallPolicies_CloneRules_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_firewall_policies_clone_rules_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.FirewallPoliciesClient", + "shortName": "FirewallPoliciesClient" + }, + "fullName": "google.cloud.compute_v1.FirewallPoliciesClient.delete", + "method": { + "fullName": "google.cloud.compute.v1.FirewallPolicies.Delete", + "service": { + "fullName": "google.cloud.compute.v1.FirewallPolicies", + "shortName": "FirewallPolicies" + }, + "shortName": "Delete" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.DeleteFirewallPolicyRequest" + }, + { + "name": "firewall_policy", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.extended_operation.ExtendedOperation", + "shortName": "delete" + }, + "description": "Sample for Delete", + "file": "compute_v1_generated_firewall_policies_delete_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_FirewallPolicies_Delete_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_firewall_policies_delete_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.FirewallPoliciesClient", + "shortName": "FirewallPoliciesClient" + }, + "fullName": "google.cloud.compute_v1.FirewallPoliciesClient.get_association", + "method": { + "fullName": "google.cloud.compute.v1.FirewallPolicies.GetAssociation", + "service": { + "fullName": "google.cloud.compute.v1.FirewallPolicies", + "shortName": "FirewallPolicies" + }, + "shortName": "GetAssociation" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.GetAssociationFirewallPolicyRequest" + }, + { + "name": "firewall_policy", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.compute_v1.types.FirewallPolicyAssociation", + "shortName": "get_association" + }, + "description": "Sample for GetAssociation", + "file": "compute_v1_generated_firewall_policies_get_association_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_FirewallPolicies_GetAssociation_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_firewall_policies_get_association_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.FirewallPoliciesClient", + "shortName": "FirewallPoliciesClient" + }, + "fullName": "google.cloud.compute_v1.FirewallPoliciesClient.get_iam_policy", + "method": { + "fullName": "google.cloud.compute.v1.FirewallPolicies.GetIamPolicy", + "service": { + "fullName": "google.cloud.compute.v1.FirewallPolicies", + "shortName": "FirewallPolicies" + }, + "shortName": "GetIamPolicy" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.GetIamPolicyFirewallPolicyRequest" + }, + { + "name": "resource", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.compute_v1.types.Policy", + "shortName": "get_iam_policy" + }, + "description": "Sample for GetIamPolicy", + "file": "compute_v1_generated_firewall_policies_get_iam_policy_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_FirewallPolicies_GetIamPolicy_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_firewall_policies_get_iam_policy_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.FirewallPoliciesClient", + "shortName": "FirewallPoliciesClient" + }, + "fullName": "google.cloud.compute_v1.FirewallPoliciesClient.get_rule", + "method": { + "fullName": "google.cloud.compute.v1.FirewallPolicies.GetRule", + "service": { + "fullName": "google.cloud.compute.v1.FirewallPolicies", + "shortName": "FirewallPolicies" + }, + "shortName": "GetRule" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.GetRuleFirewallPolicyRequest" + }, + { + "name": "firewall_policy", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.compute_v1.types.FirewallPolicyRule", + "shortName": "get_rule" + }, + "description": "Sample for GetRule", + "file": "compute_v1_generated_firewall_policies_get_rule_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_FirewallPolicies_GetRule_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_firewall_policies_get_rule_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.FirewallPoliciesClient", + "shortName": "FirewallPoliciesClient" + }, + "fullName": "google.cloud.compute_v1.FirewallPoliciesClient.get", + "method": { + "fullName": "google.cloud.compute.v1.FirewallPolicies.Get", + "service": { + "fullName": "google.cloud.compute.v1.FirewallPolicies", + "shortName": "FirewallPolicies" + }, + "shortName": "Get" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.GetFirewallPolicyRequest" + }, + { + "name": "firewall_policy", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.compute_v1.types.FirewallPolicy", + "shortName": "get" + }, + "description": "Sample for Get", + "file": "compute_v1_generated_firewall_policies_get_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_FirewallPolicies_Get_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_firewall_policies_get_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.FirewallPoliciesClient", + "shortName": "FirewallPoliciesClient" + }, + "fullName": "google.cloud.compute_v1.FirewallPoliciesClient.insert", + "method": { + "fullName": "google.cloud.compute.v1.FirewallPolicies.Insert", + "service": { + "fullName": "google.cloud.compute.v1.FirewallPolicies", + "shortName": "FirewallPolicies" + }, + "shortName": "Insert" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.InsertFirewallPolicyRequest" + }, + { + "name": "parent_id", + "type": "str" + }, + { + "name": "firewall_policy_resource", + "type": "google.cloud.compute_v1.types.FirewallPolicy" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.extended_operation.ExtendedOperation", + "shortName": "insert" + }, + "description": "Sample for Insert", + "file": "compute_v1_generated_firewall_policies_insert_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_FirewallPolicies_Insert_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_firewall_policies_insert_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.FirewallPoliciesClient", + "shortName": "FirewallPoliciesClient" + }, + "fullName": "google.cloud.compute_v1.FirewallPoliciesClient.list_associations", + "method": { + "fullName": "google.cloud.compute.v1.FirewallPolicies.ListAssociations", + "service": { + "fullName": "google.cloud.compute.v1.FirewallPolicies", + "shortName": "FirewallPolicies" + }, + "shortName": "ListAssociations" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.ListAssociationsFirewallPolicyRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.compute_v1.types.FirewallPoliciesListAssociationsResponse", + "shortName": "list_associations" + }, + "description": "Sample for ListAssociations", + "file": "compute_v1_generated_firewall_policies_list_associations_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_FirewallPolicies_ListAssociations_sync", + "segments": [ + { + "end": 50, + "start": 27, + "type": "FULL" + }, + { + "end": 50, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 44, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 47, + "start": 45, + "type": "REQUEST_EXECUTION" + }, + { + "end": 51, + "start": 48, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_firewall_policies_list_associations_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.FirewallPoliciesClient", + "shortName": "FirewallPoliciesClient" + }, + "fullName": "google.cloud.compute_v1.FirewallPoliciesClient.list", + "method": { + "fullName": "google.cloud.compute.v1.FirewallPolicies.List", + "service": { + "fullName": "google.cloud.compute.v1.FirewallPolicies", + "shortName": "FirewallPolicies" + }, + "shortName": "List" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.ListFirewallPoliciesRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.compute_v1.services.firewall_policies.pagers.ListPager", + "shortName": "list" + }, + "description": "Sample for List", + "file": "compute_v1_generated_firewall_policies_list_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_FirewallPolicies_List_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 44, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 47, + "start": 45, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 48, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_firewall_policies_list_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.FirewallPoliciesClient", + "shortName": "FirewallPoliciesClient" + }, + "fullName": "google.cloud.compute_v1.FirewallPoliciesClient.move", + "method": { + "fullName": "google.cloud.compute.v1.FirewallPolicies.Move", + "service": { + "fullName": "google.cloud.compute.v1.FirewallPolicies", + "shortName": "FirewallPolicies" + }, + "shortName": "Move" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.MoveFirewallPolicyRequest" + }, + { + "name": "firewall_policy", + "type": "str" + }, + { + "name": "parent_id", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.extended_operation.ExtendedOperation", + "shortName": "move" + }, + "description": "Sample for Move", + "file": "compute_v1_generated_firewall_policies_move_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_FirewallPolicies_Move_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_firewall_policies_move_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.FirewallPoliciesClient", + "shortName": "FirewallPoliciesClient" + }, + "fullName": "google.cloud.compute_v1.FirewallPoliciesClient.patch_rule", + "method": { + "fullName": "google.cloud.compute.v1.FirewallPolicies.PatchRule", + "service": { + "fullName": "google.cloud.compute.v1.FirewallPolicies", + "shortName": "FirewallPolicies" + }, + "shortName": "PatchRule" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.PatchRuleFirewallPolicyRequest" + }, + { + "name": "firewall_policy", + "type": "str" + }, + { + "name": "firewall_policy_rule_resource", + "type": "google.cloud.compute_v1.types.FirewallPolicyRule" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.extended_operation.ExtendedOperation", + "shortName": "patch_rule" + }, + "description": "Sample for PatchRule", + "file": "compute_v1_generated_firewall_policies_patch_rule_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_FirewallPolicies_PatchRule_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_firewall_policies_patch_rule_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.FirewallPoliciesClient", + "shortName": "FirewallPoliciesClient" + }, + "fullName": "google.cloud.compute_v1.FirewallPoliciesClient.patch", + "method": { + "fullName": "google.cloud.compute.v1.FirewallPolicies.Patch", + "service": { + "fullName": "google.cloud.compute.v1.FirewallPolicies", + "shortName": "FirewallPolicies" + }, + "shortName": "Patch" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.PatchFirewallPolicyRequest" + }, + { + "name": "firewall_policy", + "type": "str" + }, + { + "name": "firewall_policy_resource", + "type": "google.cloud.compute_v1.types.FirewallPolicy" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.extended_operation.ExtendedOperation", + "shortName": "patch" + }, + "description": "Sample for Patch", + "file": "compute_v1_generated_firewall_policies_patch_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_FirewallPolicies_Patch_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_firewall_policies_patch_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.FirewallPoliciesClient", + "shortName": "FirewallPoliciesClient" + }, + "fullName": "google.cloud.compute_v1.FirewallPoliciesClient.remove_association", + "method": { + "fullName": "google.cloud.compute.v1.FirewallPolicies.RemoveAssociation", + "service": { + "fullName": "google.cloud.compute.v1.FirewallPolicies", + "shortName": "FirewallPolicies" + }, + "shortName": "RemoveAssociation" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.RemoveAssociationFirewallPolicyRequest" + }, + { + "name": "firewall_policy", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.extended_operation.ExtendedOperation", + "shortName": "remove_association" + }, + "description": "Sample for RemoveAssociation", + "file": "compute_v1_generated_firewall_policies_remove_association_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_FirewallPolicies_RemoveAssociation_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_firewall_policies_remove_association_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.FirewallPoliciesClient", + "shortName": "FirewallPoliciesClient" + }, + "fullName": "google.cloud.compute_v1.FirewallPoliciesClient.remove_rule", + "method": { + "fullName": "google.cloud.compute.v1.FirewallPolicies.RemoveRule", + "service": { + "fullName": "google.cloud.compute.v1.FirewallPolicies", + "shortName": "FirewallPolicies" + }, + "shortName": "RemoveRule" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.RemoveRuleFirewallPolicyRequest" + }, + { + "name": "firewall_policy", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.extended_operation.ExtendedOperation", + "shortName": "remove_rule" + }, + "description": "Sample for RemoveRule", + "file": "compute_v1_generated_firewall_policies_remove_rule_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_FirewallPolicies_RemoveRule_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_firewall_policies_remove_rule_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.FirewallPoliciesClient", + "shortName": "FirewallPoliciesClient" + }, + "fullName": "google.cloud.compute_v1.FirewallPoliciesClient.set_iam_policy", + "method": { + "fullName": "google.cloud.compute.v1.FirewallPolicies.SetIamPolicy", + "service": { + "fullName": "google.cloud.compute.v1.FirewallPolicies", + "shortName": "FirewallPolicies" + }, + "shortName": "SetIamPolicy" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.SetIamPolicyFirewallPolicyRequest" + }, + { + "name": "resource", + "type": "str" + }, + { + "name": "global_organization_set_policy_request_resource", + "type": "google.cloud.compute_v1.types.GlobalOrganizationSetPolicyRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.compute_v1.types.Policy", + "shortName": "set_iam_policy" + }, + "description": "Sample for SetIamPolicy", + "file": "compute_v1_generated_firewall_policies_set_iam_policy_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_FirewallPolicies_SetIamPolicy_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_firewall_policies_set_iam_policy_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.FirewallPoliciesClient", + "shortName": "FirewallPoliciesClient" + }, + "fullName": "google.cloud.compute_v1.FirewallPoliciesClient.test_iam_permissions", + "method": { + "fullName": "google.cloud.compute.v1.FirewallPolicies.TestIamPermissions", + "service": { + "fullName": "google.cloud.compute.v1.FirewallPolicies", + "shortName": "FirewallPolicies" + }, + "shortName": "TestIamPermissions" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.TestIamPermissionsFirewallPolicyRequest" + }, + { + "name": "resource", + "type": "str" + }, + { + "name": "test_permissions_request_resource", + "type": "google.cloud.compute_v1.types.TestPermissionsRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.compute_v1.types.TestPermissionsResponse", + "shortName": "test_iam_permissions" + }, + "description": "Sample for TestIamPermissions", + "file": "compute_v1_generated_firewall_policies_test_iam_permissions_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_FirewallPolicies_TestIamPermissions_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_firewall_policies_test_iam_permissions_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.FirewallsClient", + "shortName": "FirewallsClient" + }, + "fullName": "google.cloud.compute_v1.FirewallsClient.delete", + "method": { + "fullName": "google.cloud.compute.v1.Firewalls.Delete", + "service": { + "fullName": "google.cloud.compute.v1.Firewalls", + "shortName": "Firewalls" + }, + "shortName": "Delete" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.DeleteFirewallRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "firewall", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.extended_operation.ExtendedOperation", + "shortName": "delete" + }, + "description": "Sample for Delete", + "file": "compute_v1_generated_firewalls_delete_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_Firewalls_Delete_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_firewalls_delete_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.FirewallsClient", + "shortName": "FirewallsClient" + }, + "fullName": "google.cloud.compute_v1.FirewallsClient.get", + "method": { + "fullName": "google.cloud.compute.v1.Firewalls.Get", + "service": { + "fullName": "google.cloud.compute.v1.Firewalls", + "shortName": "Firewalls" + }, + "shortName": "Get" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.GetFirewallRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "firewall", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.compute_v1.types.Firewall", + "shortName": "get" + }, + "description": "Sample for Get", + "file": "compute_v1_generated_firewalls_get_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_Firewalls_Get_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_firewalls_get_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.FirewallsClient", + "shortName": "FirewallsClient" + }, + "fullName": "google.cloud.compute_v1.FirewallsClient.insert", + "method": { + "fullName": "google.cloud.compute.v1.Firewalls.Insert", + "service": { + "fullName": "google.cloud.compute.v1.Firewalls", + "shortName": "Firewalls" + }, + "shortName": "Insert" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.InsertFirewallRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "firewall_resource", + "type": "google.cloud.compute_v1.types.Firewall" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.extended_operation.ExtendedOperation", + "shortName": "insert" + }, + "description": "Sample for Insert", + "file": "compute_v1_generated_firewalls_insert_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_Firewalls_Insert_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_firewalls_insert_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.FirewallsClient", + "shortName": "FirewallsClient" + }, + "fullName": "google.cloud.compute_v1.FirewallsClient.list", + "method": { + "fullName": "google.cloud.compute.v1.Firewalls.List", + "service": { + "fullName": "google.cloud.compute.v1.Firewalls", + "shortName": "Firewalls" + }, + "shortName": "List" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.ListFirewallsRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.compute_v1.services.firewalls.pagers.ListPager", + "shortName": "list" + }, + "description": "Sample for List", + "file": "compute_v1_generated_firewalls_list_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_Firewalls_List_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_firewalls_list_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.FirewallsClient", + "shortName": "FirewallsClient" + }, + "fullName": "google.cloud.compute_v1.FirewallsClient.patch", + "method": { + "fullName": "google.cloud.compute.v1.Firewalls.Patch", + "service": { + "fullName": "google.cloud.compute.v1.Firewalls", + "shortName": "Firewalls" + }, + "shortName": "Patch" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.PatchFirewallRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "firewall", + "type": "str" + }, + { + "name": "firewall_resource", + "type": "google.cloud.compute_v1.types.Firewall" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.extended_operation.ExtendedOperation", + "shortName": "patch" + }, + "description": "Sample for Patch", + "file": "compute_v1_generated_firewalls_patch_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_Firewalls_Patch_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_firewalls_patch_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.FirewallsClient", + "shortName": "FirewallsClient" + }, + "fullName": "google.cloud.compute_v1.FirewallsClient.update", + "method": { + "fullName": "google.cloud.compute.v1.Firewalls.Update", + "service": { + "fullName": "google.cloud.compute.v1.Firewalls", + "shortName": "Firewalls" + }, + "shortName": "Update" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.UpdateFirewallRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "firewall", + "type": "str" + }, + { + "name": "firewall_resource", + "type": "google.cloud.compute_v1.types.Firewall" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.extended_operation.ExtendedOperation", + "shortName": "update" + }, + "description": "Sample for Update", + "file": "compute_v1_generated_firewalls_update_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_Firewalls_Update_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_firewalls_update_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.ForwardingRulesClient", + "shortName": "ForwardingRulesClient" + }, + "fullName": "google.cloud.compute_v1.ForwardingRulesClient.aggregated_list", + "method": { + "fullName": "google.cloud.compute.v1.ForwardingRules.AggregatedList", + "service": { + "fullName": "google.cloud.compute.v1.ForwardingRules", + "shortName": "ForwardingRules" + }, + "shortName": "AggregatedList" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.AggregatedListForwardingRulesRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.compute_v1.services.forwarding_rules.pagers.AggregatedListPager", + "shortName": "aggregated_list" + }, + "description": "Sample for AggregatedList", + "file": "compute_v1_generated_forwarding_rules_aggregated_list_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_ForwardingRules_AggregatedList_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_forwarding_rules_aggregated_list_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.ForwardingRulesClient", + "shortName": "ForwardingRulesClient" + }, + "fullName": "google.cloud.compute_v1.ForwardingRulesClient.delete", + "method": { + "fullName": "google.cloud.compute.v1.ForwardingRules.Delete", + "service": { + "fullName": "google.cloud.compute.v1.ForwardingRules", + "shortName": "ForwardingRules" + }, + "shortName": "Delete" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.DeleteForwardingRuleRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "region", + "type": "str" + }, + { + "name": "forwarding_rule", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.extended_operation.ExtendedOperation", + "shortName": "delete" + }, + "description": "Sample for Delete", + "file": "compute_v1_generated_forwarding_rules_delete_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_ForwardingRules_Delete_sync", + "segments": [ + { + "end": 53, + "start": 27, + "type": "FULL" + }, + { + "end": 53, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 47, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 50, + "start": 48, + "type": "REQUEST_EXECUTION" + }, + { + "end": 54, + "start": 51, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_forwarding_rules_delete_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.ForwardingRulesClient", + "shortName": "ForwardingRulesClient" + }, + "fullName": "google.cloud.compute_v1.ForwardingRulesClient.get", + "method": { + "fullName": "google.cloud.compute.v1.ForwardingRules.Get", + "service": { + "fullName": "google.cloud.compute.v1.ForwardingRules", + "shortName": "ForwardingRules" + }, + "shortName": "Get" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.GetForwardingRuleRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "region", + "type": "str" + }, + { + "name": "forwarding_rule", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.compute_v1.types.ForwardingRule", + "shortName": "get" + }, + "description": "Sample for Get", + "file": "compute_v1_generated_forwarding_rules_get_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_ForwardingRules_Get_sync", + "segments": [ + { + "end": 53, + "start": 27, + "type": "FULL" + }, + { + "end": 53, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 47, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 50, + "start": 48, + "type": "REQUEST_EXECUTION" + }, + { + "end": 54, + "start": 51, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_forwarding_rules_get_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.ForwardingRulesClient", + "shortName": "ForwardingRulesClient" + }, + "fullName": "google.cloud.compute_v1.ForwardingRulesClient.insert", + "method": { + "fullName": "google.cloud.compute.v1.ForwardingRules.Insert", + "service": { + "fullName": "google.cloud.compute.v1.ForwardingRules", + "shortName": "ForwardingRules" + }, + "shortName": "Insert" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.InsertForwardingRuleRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "region", + "type": "str" + }, + { + "name": "forwarding_rule_resource", + "type": "google.cloud.compute_v1.types.ForwardingRule" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.extended_operation.ExtendedOperation", + "shortName": "insert" + }, + "description": "Sample for Insert", + "file": "compute_v1_generated_forwarding_rules_insert_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_ForwardingRules_Insert_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_forwarding_rules_insert_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.ForwardingRulesClient", + "shortName": "ForwardingRulesClient" + }, + "fullName": "google.cloud.compute_v1.ForwardingRulesClient.list", + "method": { + "fullName": "google.cloud.compute.v1.ForwardingRules.List", + "service": { + "fullName": "google.cloud.compute.v1.ForwardingRules", + "shortName": "ForwardingRules" + }, + "shortName": "List" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.ListForwardingRulesRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "region", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.compute_v1.services.forwarding_rules.pagers.ListPager", + "shortName": "list" + }, + "description": "Sample for List", + "file": "compute_v1_generated_forwarding_rules_list_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_ForwardingRules_List_sync", + "segments": [ + { + "end": 53, + "start": 27, + "type": "FULL" + }, + { + "end": 53, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 54, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_forwarding_rules_list_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.ForwardingRulesClient", + "shortName": "ForwardingRulesClient" + }, + "fullName": "google.cloud.compute_v1.ForwardingRulesClient.patch", + "method": { + "fullName": "google.cloud.compute.v1.ForwardingRules.Patch", + "service": { + "fullName": "google.cloud.compute.v1.ForwardingRules", + "shortName": "ForwardingRules" + }, + "shortName": "Patch" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.PatchForwardingRuleRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "region", + "type": "str" + }, + { + "name": "forwarding_rule", + "type": "str" + }, + { + "name": "forwarding_rule_resource", + "type": "google.cloud.compute_v1.types.ForwardingRule" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.extended_operation.ExtendedOperation", + "shortName": "patch" + }, + "description": "Sample for Patch", + "file": "compute_v1_generated_forwarding_rules_patch_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_ForwardingRules_Patch_sync", + "segments": [ + { + "end": 53, + "start": 27, + "type": "FULL" + }, + { + "end": 53, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 47, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 50, + "start": 48, + "type": "REQUEST_EXECUTION" + }, + { + "end": 54, + "start": 51, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_forwarding_rules_patch_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.ForwardingRulesClient", + "shortName": "ForwardingRulesClient" + }, + "fullName": "google.cloud.compute_v1.ForwardingRulesClient.set_labels", + "method": { + "fullName": "google.cloud.compute.v1.ForwardingRules.SetLabels", + "service": { + "fullName": "google.cloud.compute.v1.ForwardingRules", + "shortName": "ForwardingRules" + }, + "shortName": "SetLabels" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.SetLabelsForwardingRuleRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "region", + "type": "str" + }, + { + "name": "resource", + "type": "str" + }, + { + "name": "region_set_labels_request_resource", + "type": "google.cloud.compute_v1.types.RegionSetLabelsRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.extended_operation.ExtendedOperation", + "shortName": "set_labels" + }, + "description": "Sample for SetLabels", + "file": "compute_v1_generated_forwarding_rules_set_labels_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_ForwardingRules_SetLabels_sync", + "segments": [ + { + "end": 53, + "start": 27, + "type": "FULL" + }, + { + "end": 53, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 47, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 50, + "start": 48, + "type": "REQUEST_EXECUTION" + }, + { + "end": 54, + "start": 51, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_forwarding_rules_set_labels_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.ForwardingRulesClient", + "shortName": "ForwardingRulesClient" + }, + "fullName": "google.cloud.compute_v1.ForwardingRulesClient.set_target", + "method": { + "fullName": "google.cloud.compute.v1.ForwardingRules.SetTarget", + "service": { + "fullName": "google.cloud.compute.v1.ForwardingRules", + "shortName": "ForwardingRules" + }, + "shortName": "SetTarget" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.SetTargetForwardingRuleRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "region", + "type": "str" + }, + { + "name": "forwarding_rule", + "type": "str" + }, + { + "name": "target_reference_resource", + "type": "google.cloud.compute_v1.types.TargetReference" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.extended_operation.ExtendedOperation", + "shortName": "set_target" + }, + "description": "Sample for SetTarget", + "file": "compute_v1_generated_forwarding_rules_set_target_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_ForwardingRules_SetTarget_sync", + "segments": [ + { + "end": 53, + "start": 27, + "type": "FULL" + }, + { + "end": 53, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 47, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 50, + "start": 48, + "type": "REQUEST_EXECUTION" + }, + { + "end": 54, + "start": 51, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_forwarding_rules_set_target_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.GlobalAddressesClient", + "shortName": "GlobalAddressesClient" + }, + "fullName": "google.cloud.compute_v1.GlobalAddressesClient.delete", + "method": { + "fullName": "google.cloud.compute.v1.GlobalAddresses.Delete", + "service": { + "fullName": "google.cloud.compute.v1.GlobalAddresses", + "shortName": "GlobalAddresses" + }, + "shortName": "Delete" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.DeleteGlobalAddressRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "address", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.extended_operation.ExtendedOperation", + "shortName": "delete" + }, + "description": "Sample for Delete", + "file": "compute_v1_generated_global_addresses_delete_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_GlobalAddresses_Delete_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_global_addresses_delete_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.GlobalAddressesClient", + "shortName": "GlobalAddressesClient" + }, + "fullName": "google.cloud.compute_v1.GlobalAddressesClient.get", + "method": { + "fullName": "google.cloud.compute.v1.GlobalAddresses.Get", + "service": { + "fullName": "google.cloud.compute.v1.GlobalAddresses", + "shortName": "GlobalAddresses" + }, + "shortName": "Get" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.GetGlobalAddressRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "address", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.compute_v1.types.Address", + "shortName": "get" + }, + "description": "Sample for Get", + "file": "compute_v1_generated_global_addresses_get_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_GlobalAddresses_Get_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_global_addresses_get_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.GlobalAddressesClient", + "shortName": "GlobalAddressesClient" + }, + "fullName": "google.cloud.compute_v1.GlobalAddressesClient.insert", + "method": { + "fullName": "google.cloud.compute.v1.GlobalAddresses.Insert", + "service": { + "fullName": "google.cloud.compute.v1.GlobalAddresses", + "shortName": "GlobalAddresses" + }, + "shortName": "Insert" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.InsertGlobalAddressRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "address_resource", + "type": "google.cloud.compute_v1.types.Address" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.extended_operation.ExtendedOperation", + "shortName": "insert" + }, + "description": "Sample for Insert", + "file": "compute_v1_generated_global_addresses_insert_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_GlobalAddresses_Insert_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_global_addresses_insert_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.GlobalAddressesClient", + "shortName": "GlobalAddressesClient" + }, + "fullName": "google.cloud.compute_v1.GlobalAddressesClient.list", + "method": { + "fullName": "google.cloud.compute.v1.GlobalAddresses.List", + "service": { + "fullName": "google.cloud.compute.v1.GlobalAddresses", + "shortName": "GlobalAddresses" + }, + "shortName": "List" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.ListGlobalAddressesRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.compute_v1.services.global_addresses.pagers.ListPager", + "shortName": "list" + }, + "description": "Sample for List", + "file": "compute_v1_generated_global_addresses_list_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_GlobalAddresses_List_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_global_addresses_list_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.GlobalAddressesClient", + "shortName": "GlobalAddressesClient" + }, + "fullName": "google.cloud.compute_v1.GlobalAddressesClient.move", + "method": { + "fullName": "google.cloud.compute.v1.GlobalAddresses.Move", + "service": { + "fullName": "google.cloud.compute.v1.GlobalAddresses", + "shortName": "GlobalAddresses" + }, + "shortName": "Move" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.MoveGlobalAddressRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "address", + "type": "str" + }, + { + "name": "global_addresses_move_request_resource", + "type": "google.cloud.compute_v1.types.GlobalAddressesMoveRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.extended_operation.ExtendedOperation", + "shortName": "move" + }, + "description": "Sample for Move", + "file": "compute_v1_generated_global_addresses_move_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_GlobalAddresses_Move_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_global_addresses_move_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.GlobalAddressesClient", + "shortName": "GlobalAddressesClient" + }, + "fullName": "google.cloud.compute_v1.GlobalAddressesClient.set_labels", + "method": { + "fullName": "google.cloud.compute.v1.GlobalAddresses.SetLabels", + "service": { + "fullName": "google.cloud.compute.v1.GlobalAddresses", + "shortName": "GlobalAddresses" + }, + "shortName": "SetLabels" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.SetLabelsGlobalAddressRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "resource", + "type": "str" + }, + { + "name": "global_set_labels_request_resource", + "type": "google.cloud.compute_v1.types.GlobalSetLabelsRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.extended_operation.ExtendedOperation", + "shortName": "set_labels" + }, + "description": "Sample for SetLabels", + "file": "compute_v1_generated_global_addresses_set_labels_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_GlobalAddresses_SetLabels_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_global_addresses_set_labels_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.GlobalForwardingRulesClient", + "shortName": "GlobalForwardingRulesClient" + }, + "fullName": "google.cloud.compute_v1.GlobalForwardingRulesClient.delete", + "method": { + "fullName": "google.cloud.compute.v1.GlobalForwardingRules.Delete", + "service": { + "fullName": "google.cloud.compute.v1.GlobalForwardingRules", + "shortName": "GlobalForwardingRules" + }, + "shortName": "Delete" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.DeleteGlobalForwardingRuleRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "forwarding_rule", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.extended_operation.ExtendedOperation", + "shortName": "delete" + }, + "description": "Sample for Delete", + "file": "compute_v1_generated_global_forwarding_rules_delete_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_GlobalForwardingRules_Delete_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_global_forwarding_rules_delete_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.GlobalForwardingRulesClient", + "shortName": "GlobalForwardingRulesClient" + }, + "fullName": "google.cloud.compute_v1.GlobalForwardingRulesClient.get", + "method": { + "fullName": "google.cloud.compute.v1.GlobalForwardingRules.Get", + "service": { + "fullName": "google.cloud.compute.v1.GlobalForwardingRules", + "shortName": "GlobalForwardingRules" + }, + "shortName": "Get" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.GetGlobalForwardingRuleRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "forwarding_rule", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.compute_v1.types.ForwardingRule", + "shortName": "get" + }, + "description": "Sample for Get", + "file": "compute_v1_generated_global_forwarding_rules_get_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_GlobalForwardingRules_Get_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_global_forwarding_rules_get_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.GlobalForwardingRulesClient", + "shortName": "GlobalForwardingRulesClient" + }, + "fullName": "google.cloud.compute_v1.GlobalForwardingRulesClient.insert", + "method": { + "fullName": "google.cloud.compute.v1.GlobalForwardingRules.Insert", + "service": { + "fullName": "google.cloud.compute.v1.GlobalForwardingRules", + "shortName": "GlobalForwardingRules" + }, + "shortName": "Insert" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.InsertGlobalForwardingRuleRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "forwarding_rule_resource", + "type": "google.cloud.compute_v1.types.ForwardingRule" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.extended_operation.ExtendedOperation", + "shortName": "insert" + }, + "description": "Sample for Insert", + "file": "compute_v1_generated_global_forwarding_rules_insert_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_GlobalForwardingRules_Insert_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_global_forwarding_rules_insert_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.GlobalForwardingRulesClient", + "shortName": "GlobalForwardingRulesClient" + }, + "fullName": "google.cloud.compute_v1.GlobalForwardingRulesClient.list", + "method": { + "fullName": "google.cloud.compute.v1.GlobalForwardingRules.List", + "service": { + "fullName": "google.cloud.compute.v1.GlobalForwardingRules", + "shortName": "GlobalForwardingRules" + }, + "shortName": "List" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.ListGlobalForwardingRulesRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.compute_v1.services.global_forwarding_rules.pagers.ListPager", + "shortName": "list" + }, + "description": "Sample for List", + "file": "compute_v1_generated_global_forwarding_rules_list_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_GlobalForwardingRules_List_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_global_forwarding_rules_list_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.GlobalForwardingRulesClient", + "shortName": "GlobalForwardingRulesClient" + }, + "fullName": "google.cloud.compute_v1.GlobalForwardingRulesClient.patch", + "method": { + "fullName": "google.cloud.compute.v1.GlobalForwardingRules.Patch", + "service": { + "fullName": "google.cloud.compute.v1.GlobalForwardingRules", + "shortName": "GlobalForwardingRules" + }, + "shortName": "Patch" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.PatchGlobalForwardingRuleRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "forwarding_rule", + "type": "str" + }, + { + "name": "forwarding_rule_resource", + "type": "google.cloud.compute_v1.types.ForwardingRule" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.extended_operation.ExtendedOperation", + "shortName": "patch" + }, + "description": "Sample for Patch", + "file": "compute_v1_generated_global_forwarding_rules_patch_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_GlobalForwardingRules_Patch_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_global_forwarding_rules_patch_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.GlobalForwardingRulesClient", + "shortName": "GlobalForwardingRulesClient" + }, + "fullName": "google.cloud.compute_v1.GlobalForwardingRulesClient.set_labels", + "method": { + "fullName": "google.cloud.compute.v1.GlobalForwardingRules.SetLabels", + "service": { + "fullName": "google.cloud.compute.v1.GlobalForwardingRules", + "shortName": "GlobalForwardingRules" + }, + "shortName": "SetLabels" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.SetLabelsGlobalForwardingRuleRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "resource", + "type": "str" + }, + { + "name": "global_set_labels_request_resource", + "type": "google.cloud.compute_v1.types.GlobalSetLabelsRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.extended_operation.ExtendedOperation", + "shortName": "set_labels" + }, + "description": "Sample for SetLabels", + "file": "compute_v1_generated_global_forwarding_rules_set_labels_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_GlobalForwardingRules_SetLabels_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_global_forwarding_rules_set_labels_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.GlobalForwardingRulesClient", + "shortName": "GlobalForwardingRulesClient" + }, + "fullName": "google.cloud.compute_v1.GlobalForwardingRulesClient.set_target", + "method": { + "fullName": "google.cloud.compute.v1.GlobalForwardingRules.SetTarget", + "service": { + "fullName": "google.cloud.compute.v1.GlobalForwardingRules", + "shortName": "GlobalForwardingRules" + }, + "shortName": "SetTarget" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.SetTargetGlobalForwardingRuleRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "forwarding_rule", + "type": "str" + }, + { + "name": "target_reference_resource", + "type": "google.cloud.compute_v1.types.TargetReference" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.extended_operation.ExtendedOperation", + "shortName": "set_target" + }, + "description": "Sample for SetTarget", + "file": "compute_v1_generated_global_forwarding_rules_set_target_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_GlobalForwardingRules_SetTarget_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_global_forwarding_rules_set_target_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.GlobalNetworkEndpointGroupsClient", + "shortName": "GlobalNetworkEndpointGroupsClient" + }, + "fullName": "google.cloud.compute_v1.GlobalNetworkEndpointGroupsClient.attach_network_endpoints", + "method": { + "fullName": "google.cloud.compute.v1.GlobalNetworkEndpointGroups.AttachNetworkEndpoints", + "service": { + "fullName": "google.cloud.compute.v1.GlobalNetworkEndpointGroups", + "shortName": "GlobalNetworkEndpointGroups" + }, + "shortName": "AttachNetworkEndpoints" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.AttachNetworkEndpointsGlobalNetworkEndpointGroupRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "network_endpoint_group", + "type": "str" + }, + { + "name": "global_network_endpoint_groups_attach_endpoints_request_resource", + "type": "google.cloud.compute_v1.types.GlobalNetworkEndpointGroupsAttachEndpointsRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.extended_operation.ExtendedOperation", + "shortName": "attach_network_endpoints" + }, + "description": "Sample for AttachNetworkEndpoints", + "file": "compute_v1_generated_global_network_endpoint_groups_attach_network_endpoints_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_GlobalNetworkEndpointGroups_AttachNetworkEndpoints_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_global_network_endpoint_groups_attach_network_endpoints_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.GlobalNetworkEndpointGroupsClient", + "shortName": "GlobalNetworkEndpointGroupsClient" + }, + "fullName": "google.cloud.compute_v1.GlobalNetworkEndpointGroupsClient.delete", + "method": { + "fullName": "google.cloud.compute.v1.GlobalNetworkEndpointGroups.Delete", + "service": { + "fullName": "google.cloud.compute.v1.GlobalNetworkEndpointGroups", + "shortName": "GlobalNetworkEndpointGroups" + }, + "shortName": "Delete" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.DeleteGlobalNetworkEndpointGroupRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "network_endpoint_group", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.extended_operation.ExtendedOperation", + "shortName": "delete" + }, + "description": "Sample for Delete", + "file": "compute_v1_generated_global_network_endpoint_groups_delete_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_GlobalNetworkEndpointGroups_Delete_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_global_network_endpoint_groups_delete_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.GlobalNetworkEndpointGroupsClient", + "shortName": "GlobalNetworkEndpointGroupsClient" + }, + "fullName": "google.cloud.compute_v1.GlobalNetworkEndpointGroupsClient.detach_network_endpoints", + "method": { + "fullName": "google.cloud.compute.v1.GlobalNetworkEndpointGroups.DetachNetworkEndpoints", + "service": { + "fullName": "google.cloud.compute.v1.GlobalNetworkEndpointGroups", + "shortName": "GlobalNetworkEndpointGroups" + }, + "shortName": "DetachNetworkEndpoints" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.DetachNetworkEndpointsGlobalNetworkEndpointGroupRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "network_endpoint_group", + "type": "str" + }, + { + "name": "global_network_endpoint_groups_detach_endpoints_request_resource", + "type": "google.cloud.compute_v1.types.GlobalNetworkEndpointGroupsDetachEndpointsRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.extended_operation.ExtendedOperation", + "shortName": "detach_network_endpoints" + }, + "description": "Sample for DetachNetworkEndpoints", + "file": "compute_v1_generated_global_network_endpoint_groups_detach_network_endpoints_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_GlobalNetworkEndpointGroups_DetachNetworkEndpoints_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_global_network_endpoint_groups_detach_network_endpoints_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.GlobalNetworkEndpointGroupsClient", + "shortName": "GlobalNetworkEndpointGroupsClient" + }, + "fullName": "google.cloud.compute_v1.GlobalNetworkEndpointGroupsClient.get", + "method": { + "fullName": "google.cloud.compute.v1.GlobalNetworkEndpointGroups.Get", + "service": { + "fullName": "google.cloud.compute.v1.GlobalNetworkEndpointGroups", + "shortName": "GlobalNetworkEndpointGroups" + }, + "shortName": "Get" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.GetGlobalNetworkEndpointGroupRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "network_endpoint_group", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.compute_v1.types.NetworkEndpointGroup", + "shortName": "get" + }, + "description": "Sample for Get", + "file": "compute_v1_generated_global_network_endpoint_groups_get_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_GlobalNetworkEndpointGroups_Get_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_global_network_endpoint_groups_get_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.GlobalNetworkEndpointGroupsClient", + "shortName": "GlobalNetworkEndpointGroupsClient" + }, + "fullName": "google.cloud.compute_v1.GlobalNetworkEndpointGroupsClient.insert", + "method": { + "fullName": "google.cloud.compute.v1.GlobalNetworkEndpointGroups.Insert", + "service": { + "fullName": "google.cloud.compute.v1.GlobalNetworkEndpointGroups", + "shortName": "GlobalNetworkEndpointGroups" + }, + "shortName": "Insert" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.InsertGlobalNetworkEndpointGroupRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "network_endpoint_group_resource", + "type": "google.cloud.compute_v1.types.NetworkEndpointGroup" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.extended_operation.ExtendedOperation", + "shortName": "insert" + }, + "description": "Sample for Insert", + "file": "compute_v1_generated_global_network_endpoint_groups_insert_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_GlobalNetworkEndpointGroups_Insert_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_global_network_endpoint_groups_insert_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.GlobalNetworkEndpointGroupsClient", + "shortName": "GlobalNetworkEndpointGroupsClient" + }, + "fullName": "google.cloud.compute_v1.GlobalNetworkEndpointGroupsClient.list_network_endpoints", + "method": { + "fullName": "google.cloud.compute.v1.GlobalNetworkEndpointGroups.ListNetworkEndpoints", + "service": { + "fullName": "google.cloud.compute.v1.GlobalNetworkEndpointGroups", + "shortName": "GlobalNetworkEndpointGroups" + }, + "shortName": "ListNetworkEndpoints" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.ListNetworkEndpointsGlobalNetworkEndpointGroupsRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "network_endpoint_group", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.compute_v1.services.global_network_endpoint_groups.pagers.ListNetworkEndpointsPager", + "shortName": "list_network_endpoints" + }, + "description": "Sample for ListNetworkEndpoints", + "file": "compute_v1_generated_global_network_endpoint_groups_list_network_endpoints_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_GlobalNetworkEndpointGroups_ListNetworkEndpoints_sync", + "segments": [ + { + "end": 53, + "start": 27, + "type": "FULL" + }, + { + "end": 53, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 54, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_global_network_endpoint_groups_list_network_endpoints_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.GlobalNetworkEndpointGroupsClient", + "shortName": "GlobalNetworkEndpointGroupsClient" + }, + "fullName": "google.cloud.compute_v1.GlobalNetworkEndpointGroupsClient.list", + "method": { + "fullName": "google.cloud.compute.v1.GlobalNetworkEndpointGroups.List", + "service": { + "fullName": "google.cloud.compute.v1.GlobalNetworkEndpointGroups", + "shortName": "GlobalNetworkEndpointGroups" + }, + "shortName": "List" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.ListGlobalNetworkEndpointGroupsRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.compute_v1.services.global_network_endpoint_groups.pagers.ListPager", + "shortName": "list" + }, + "description": "Sample for List", + "file": "compute_v1_generated_global_network_endpoint_groups_list_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_GlobalNetworkEndpointGroups_List_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_global_network_endpoint_groups_list_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.GlobalOperationsClient", + "shortName": "GlobalOperationsClient" + }, + "fullName": "google.cloud.compute_v1.GlobalOperationsClient.aggregated_list", + "method": { + "fullName": "google.cloud.compute.v1.GlobalOperations.AggregatedList", + "service": { + "fullName": "google.cloud.compute.v1.GlobalOperations", + "shortName": "GlobalOperations" + }, + "shortName": "AggregatedList" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.AggregatedListGlobalOperationsRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.compute_v1.services.global_operations.pagers.AggregatedListPager", + "shortName": "aggregated_list" + }, + "description": "Sample for AggregatedList", + "file": "compute_v1_generated_global_operations_aggregated_list_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_GlobalOperations_AggregatedList_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_global_operations_aggregated_list_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.GlobalOperationsClient", + "shortName": "GlobalOperationsClient" + }, + "fullName": "google.cloud.compute_v1.GlobalOperationsClient.delete", + "method": { + "fullName": "google.cloud.compute.v1.GlobalOperations.Delete", + "service": { + "fullName": "google.cloud.compute.v1.GlobalOperations", + "shortName": "GlobalOperations" + }, + "shortName": "Delete" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.DeleteGlobalOperationRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "operation", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.compute_v1.types.DeleteGlobalOperationResponse", + "shortName": "delete" + }, + "description": "Sample for Delete", + "file": "compute_v1_generated_global_operations_delete_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_GlobalOperations_Delete_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_global_operations_delete_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.GlobalOperationsClient", + "shortName": "GlobalOperationsClient" + }, + "fullName": "google.cloud.compute_v1.GlobalOperationsClient.get", + "method": { + "fullName": "google.cloud.compute.v1.GlobalOperations.Get", + "service": { + "fullName": "google.cloud.compute.v1.GlobalOperations", + "shortName": "GlobalOperations" + }, + "shortName": "Get" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.GetGlobalOperationRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "operation", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.compute_v1.types.Operation", + "shortName": "get" + }, + "description": "Sample for Get", + "file": "compute_v1_generated_global_operations_get_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_GlobalOperations_Get_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_global_operations_get_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.GlobalOperationsClient", + "shortName": "GlobalOperationsClient" + }, + "fullName": "google.cloud.compute_v1.GlobalOperationsClient.list", + "method": { + "fullName": "google.cloud.compute.v1.GlobalOperations.List", + "service": { + "fullName": "google.cloud.compute.v1.GlobalOperations", + "shortName": "GlobalOperations" + }, + "shortName": "List" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.ListGlobalOperationsRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.compute_v1.services.global_operations.pagers.ListPager", + "shortName": "list" + }, + "description": "Sample for List", + "file": "compute_v1_generated_global_operations_list_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_GlobalOperations_List_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_global_operations_list_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.GlobalOperationsClient", + "shortName": "GlobalOperationsClient" + }, + "fullName": "google.cloud.compute_v1.GlobalOperationsClient.wait", + "method": { + "fullName": "google.cloud.compute.v1.GlobalOperations.Wait", + "service": { + "fullName": "google.cloud.compute.v1.GlobalOperations", + "shortName": "GlobalOperations" + }, + "shortName": "Wait" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.WaitGlobalOperationRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "operation", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.compute_v1.types.Operation", + "shortName": "wait" + }, + "description": "Sample for Wait", + "file": "compute_v1_generated_global_operations_wait_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_GlobalOperations_Wait_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_global_operations_wait_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.GlobalOrganizationOperationsClient", + "shortName": "GlobalOrganizationOperationsClient" + }, + "fullName": "google.cloud.compute_v1.GlobalOrganizationOperationsClient.delete", + "method": { + "fullName": "google.cloud.compute.v1.GlobalOrganizationOperations.Delete", + "service": { + "fullName": "google.cloud.compute.v1.GlobalOrganizationOperations", + "shortName": "GlobalOrganizationOperations" + }, + "shortName": "Delete" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.DeleteGlobalOrganizationOperationRequest" + }, + { + "name": "operation", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.compute_v1.types.DeleteGlobalOrganizationOperationResponse", + "shortName": "delete" + }, + "description": "Sample for Delete", + "file": "compute_v1_generated_global_organization_operations_delete_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_GlobalOrganizationOperations_Delete_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_global_organization_operations_delete_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.GlobalOrganizationOperationsClient", + "shortName": "GlobalOrganizationOperationsClient" + }, + "fullName": "google.cloud.compute_v1.GlobalOrganizationOperationsClient.get", + "method": { + "fullName": "google.cloud.compute.v1.GlobalOrganizationOperations.Get", + "service": { + "fullName": "google.cloud.compute.v1.GlobalOrganizationOperations", + "shortName": "GlobalOrganizationOperations" + }, + "shortName": "Get" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.GetGlobalOrganizationOperationRequest" + }, + { + "name": "operation", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.compute_v1.types.Operation", + "shortName": "get" + }, + "description": "Sample for Get", + "file": "compute_v1_generated_global_organization_operations_get_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_GlobalOrganizationOperations_Get_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_global_organization_operations_get_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.GlobalOrganizationOperationsClient", + "shortName": "GlobalOrganizationOperationsClient" + }, + "fullName": "google.cloud.compute_v1.GlobalOrganizationOperationsClient.list", + "method": { + "fullName": "google.cloud.compute.v1.GlobalOrganizationOperations.List", + "service": { + "fullName": "google.cloud.compute.v1.GlobalOrganizationOperations", + "shortName": "GlobalOrganizationOperations" + }, + "shortName": "List" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.ListGlobalOrganizationOperationsRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.compute_v1.services.global_organization_operations.pagers.ListPager", + "shortName": "list" + }, + "description": "Sample for List", + "file": "compute_v1_generated_global_organization_operations_list_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_GlobalOrganizationOperations_List_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 44, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 47, + "start": 45, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 48, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_global_organization_operations_list_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.GlobalPublicDelegatedPrefixesClient", + "shortName": "GlobalPublicDelegatedPrefixesClient" + }, + "fullName": "google.cloud.compute_v1.GlobalPublicDelegatedPrefixesClient.delete", + "method": { + "fullName": "google.cloud.compute.v1.GlobalPublicDelegatedPrefixes.Delete", + "service": { + "fullName": "google.cloud.compute.v1.GlobalPublicDelegatedPrefixes", + "shortName": "GlobalPublicDelegatedPrefixes" + }, + "shortName": "Delete" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.DeleteGlobalPublicDelegatedPrefixeRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "public_delegated_prefix", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.extended_operation.ExtendedOperation", + "shortName": "delete" + }, + "description": "Sample for Delete", + "file": "compute_v1_generated_global_public_delegated_prefixes_delete_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_GlobalPublicDelegatedPrefixes_Delete_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_global_public_delegated_prefixes_delete_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.GlobalPublicDelegatedPrefixesClient", + "shortName": "GlobalPublicDelegatedPrefixesClient" + }, + "fullName": "google.cloud.compute_v1.GlobalPublicDelegatedPrefixesClient.get", + "method": { + "fullName": "google.cloud.compute.v1.GlobalPublicDelegatedPrefixes.Get", + "service": { + "fullName": "google.cloud.compute.v1.GlobalPublicDelegatedPrefixes", + "shortName": "GlobalPublicDelegatedPrefixes" + }, + "shortName": "Get" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.GetGlobalPublicDelegatedPrefixeRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "public_delegated_prefix", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.compute_v1.types.PublicDelegatedPrefix", + "shortName": "get" + }, + "description": "Sample for Get", + "file": "compute_v1_generated_global_public_delegated_prefixes_get_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_GlobalPublicDelegatedPrefixes_Get_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_global_public_delegated_prefixes_get_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.GlobalPublicDelegatedPrefixesClient", + "shortName": "GlobalPublicDelegatedPrefixesClient" + }, + "fullName": "google.cloud.compute_v1.GlobalPublicDelegatedPrefixesClient.insert", + "method": { + "fullName": "google.cloud.compute.v1.GlobalPublicDelegatedPrefixes.Insert", + "service": { + "fullName": "google.cloud.compute.v1.GlobalPublicDelegatedPrefixes", + "shortName": "GlobalPublicDelegatedPrefixes" + }, + "shortName": "Insert" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.InsertGlobalPublicDelegatedPrefixeRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "public_delegated_prefix_resource", + "type": "google.cloud.compute_v1.types.PublicDelegatedPrefix" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.extended_operation.ExtendedOperation", + "shortName": "insert" + }, + "description": "Sample for Insert", + "file": "compute_v1_generated_global_public_delegated_prefixes_insert_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_GlobalPublicDelegatedPrefixes_Insert_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_global_public_delegated_prefixes_insert_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.GlobalPublicDelegatedPrefixesClient", + "shortName": "GlobalPublicDelegatedPrefixesClient" + }, + "fullName": "google.cloud.compute_v1.GlobalPublicDelegatedPrefixesClient.list", + "method": { + "fullName": "google.cloud.compute.v1.GlobalPublicDelegatedPrefixes.List", + "service": { + "fullName": "google.cloud.compute.v1.GlobalPublicDelegatedPrefixes", + "shortName": "GlobalPublicDelegatedPrefixes" + }, + "shortName": "List" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.ListGlobalPublicDelegatedPrefixesRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.compute_v1.services.global_public_delegated_prefixes.pagers.ListPager", + "shortName": "list" + }, + "description": "Sample for List", + "file": "compute_v1_generated_global_public_delegated_prefixes_list_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_GlobalPublicDelegatedPrefixes_List_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_global_public_delegated_prefixes_list_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.GlobalPublicDelegatedPrefixesClient", + "shortName": "GlobalPublicDelegatedPrefixesClient" + }, + "fullName": "google.cloud.compute_v1.GlobalPublicDelegatedPrefixesClient.patch", + "method": { + "fullName": "google.cloud.compute.v1.GlobalPublicDelegatedPrefixes.Patch", + "service": { + "fullName": "google.cloud.compute.v1.GlobalPublicDelegatedPrefixes", + "shortName": "GlobalPublicDelegatedPrefixes" + }, + "shortName": "Patch" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.PatchGlobalPublicDelegatedPrefixeRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "public_delegated_prefix", + "type": "str" + }, + { + "name": "public_delegated_prefix_resource", + "type": "google.cloud.compute_v1.types.PublicDelegatedPrefix" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.extended_operation.ExtendedOperation", + "shortName": "patch" + }, + "description": "Sample for Patch", + "file": "compute_v1_generated_global_public_delegated_prefixes_patch_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_GlobalPublicDelegatedPrefixes_Patch_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_global_public_delegated_prefixes_patch_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.HealthChecksClient", + "shortName": "HealthChecksClient" + }, + "fullName": "google.cloud.compute_v1.HealthChecksClient.aggregated_list", + "method": { + "fullName": "google.cloud.compute.v1.HealthChecks.AggregatedList", + "service": { + "fullName": "google.cloud.compute.v1.HealthChecks", + "shortName": "HealthChecks" + }, + "shortName": "AggregatedList" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.AggregatedListHealthChecksRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.compute_v1.services.health_checks.pagers.AggregatedListPager", + "shortName": "aggregated_list" + }, + "description": "Sample for AggregatedList", + "file": "compute_v1_generated_health_checks_aggregated_list_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_HealthChecks_AggregatedList_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_health_checks_aggregated_list_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.HealthChecksClient", + "shortName": "HealthChecksClient" + }, + "fullName": "google.cloud.compute_v1.HealthChecksClient.delete", + "method": { + "fullName": "google.cloud.compute.v1.HealthChecks.Delete", + "service": { + "fullName": "google.cloud.compute.v1.HealthChecks", + "shortName": "HealthChecks" + }, + "shortName": "Delete" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.DeleteHealthCheckRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "health_check", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.extended_operation.ExtendedOperation", + "shortName": "delete" + }, + "description": "Sample for Delete", + "file": "compute_v1_generated_health_checks_delete_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_HealthChecks_Delete_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_health_checks_delete_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.HealthChecksClient", + "shortName": "HealthChecksClient" + }, + "fullName": "google.cloud.compute_v1.HealthChecksClient.get", + "method": { + "fullName": "google.cloud.compute.v1.HealthChecks.Get", + "service": { + "fullName": "google.cloud.compute.v1.HealthChecks", + "shortName": "HealthChecks" + }, + "shortName": "Get" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.GetHealthCheckRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "health_check", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.compute_v1.types.HealthCheck", + "shortName": "get" + }, + "description": "Sample for Get", + "file": "compute_v1_generated_health_checks_get_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_HealthChecks_Get_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_health_checks_get_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.HealthChecksClient", + "shortName": "HealthChecksClient" + }, + "fullName": "google.cloud.compute_v1.HealthChecksClient.insert", + "method": { + "fullName": "google.cloud.compute.v1.HealthChecks.Insert", + "service": { + "fullName": "google.cloud.compute.v1.HealthChecks", + "shortName": "HealthChecks" + }, + "shortName": "Insert" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.InsertHealthCheckRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "health_check_resource", + "type": "google.cloud.compute_v1.types.HealthCheck" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.extended_operation.ExtendedOperation", + "shortName": "insert" + }, + "description": "Sample for Insert", + "file": "compute_v1_generated_health_checks_insert_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_HealthChecks_Insert_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_health_checks_insert_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.HealthChecksClient", + "shortName": "HealthChecksClient" + }, + "fullName": "google.cloud.compute_v1.HealthChecksClient.list", + "method": { + "fullName": "google.cloud.compute.v1.HealthChecks.List", + "service": { + "fullName": "google.cloud.compute.v1.HealthChecks", + "shortName": "HealthChecks" + }, + "shortName": "List" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.ListHealthChecksRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.compute_v1.services.health_checks.pagers.ListPager", + "shortName": "list" + }, + "description": "Sample for List", + "file": "compute_v1_generated_health_checks_list_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_HealthChecks_List_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_health_checks_list_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.HealthChecksClient", + "shortName": "HealthChecksClient" + }, + "fullName": "google.cloud.compute_v1.HealthChecksClient.patch", + "method": { + "fullName": "google.cloud.compute.v1.HealthChecks.Patch", + "service": { + "fullName": "google.cloud.compute.v1.HealthChecks", + "shortName": "HealthChecks" + }, + "shortName": "Patch" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.PatchHealthCheckRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "health_check", + "type": "str" + }, + { + "name": "health_check_resource", + "type": "google.cloud.compute_v1.types.HealthCheck" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.extended_operation.ExtendedOperation", + "shortName": "patch" + }, + "description": "Sample for Patch", + "file": "compute_v1_generated_health_checks_patch_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_HealthChecks_Patch_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_health_checks_patch_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.HealthChecksClient", + "shortName": "HealthChecksClient" + }, + "fullName": "google.cloud.compute_v1.HealthChecksClient.update", + "method": { + "fullName": "google.cloud.compute.v1.HealthChecks.Update", + "service": { + "fullName": "google.cloud.compute.v1.HealthChecks", + "shortName": "HealthChecks" + }, + "shortName": "Update" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.UpdateHealthCheckRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "health_check", + "type": "str" + }, + { + "name": "health_check_resource", + "type": "google.cloud.compute_v1.types.HealthCheck" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.extended_operation.ExtendedOperation", + "shortName": "update" + }, + "description": "Sample for Update", + "file": "compute_v1_generated_health_checks_update_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_HealthChecks_Update_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_health_checks_update_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.ImageFamilyViewsClient", + "shortName": "ImageFamilyViewsClient" + }, + "fullName": "google.cloud.compute_v1.ImageFamilyViewsClient.get", + "method": { + "fullName": "google.cloud.compute.v1.ImageFamilyViews.Get", + "service": { + "fullName": "google.cloud.compute.v1.ImageFamilyViews", + "shortName": "ImageFamilyViews" + }, + "shortName": "Get" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.GetImageFamilyViewRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "zone", + "type": "str" + }, + { + "name": "family", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.compute_v1.types.ImageFamilyView", + "shortName": "get" + }, + "description": "Sample for Get", + "file": "compute_v1_generated_image_family_views_get_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_ImageFamilyViews_Get_sync", + "segments": [ + { + "end": 53, + "start": 27, + "type": "FULL" + }, + { + "end": 53, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 47, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 50, + "start": 48, + "type": "REQUEST_EXECUTION" + }, + { + "end": 54, + "start": 51, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_image_family_views_get_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.ImagesClient", + "shortName": "ImagesClient" + }, + "fullName": "google.cloud.compute_v1.ImagesClient.delete", + "method": { + "fullName": "google.cloud.compute.v1.Images.Delete", + "service": { + "fullName": "google.cloud.compute.v1.Images", + "shortName": "Images" + }, + "shortName": "Delete" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.DeleteImageRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "image", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.extended_operation.ExtendedOperation", + "shortName": "delete" + }, + "description": "Sample for Delete", + "file": "compute_v1_generated_images_delete_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_Images_Delete_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_images_delete_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.ImagesClient", + "shortName": "ImagesClient" + }, + "fullName": "google.cloud.compute_v1.ImagesClient.deprecate", + "method": { + "fullName": "google.cloud.compute.v1.Images.Deprecate", + "service": { + "fullName": "google.cloud.compute.v1.Images", + "shortName": "Images" + }, + "shortName": "Deprecate" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.DeprecateImageRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "image", + "type": "str" + }, + { + "name": "deprecation_status_resource", + "type": "google.cloud.compute_v1.types.DeprecationStatus" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.extended_operation.ExtendedOperation", + "shortName": "deprecate" + }, + "description": "Sample for Deprecate", + "file": "compute_v1_generated_images_deprecate_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_Images_Deprecate_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_images_deprecate_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.ImagesClient", + "shortName": "ImagesClient" + }, + "fullName": "google.cloud.compute_v1.ImagesClient.get_from_family", + "method": { + "fullName": "google.cloud.compute.v1.Images.GetFromFamily", + "service": { + "fullName": "google.cloud.compute.v1.Images", + "shortName": "Images" + }, + "shortName": "GetFromFamily" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.GetFromFamilyImageRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "family", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.compute_v1.types.Image", + "shortName": "get_from_family" + }, + "description": "Sample for GetFromFamily", + "file": "compute_v1_generated_images_get_from_family_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_Images_GetFromFamily_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_images_get_from_family_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.ImagesClient", + "shortName": "ImagesClient" + }, + "fullName": "google.cloud.compute_v1.ImagesClient.get_iam_policy", + "method": { + "fullName": "google.cloud.compute.v1.Images.GetIamPolicy", + "service": { + "fullName": "google.cloud.compute.v1.Images", + "shortName": "Images" + }, + "shortName": "GetIamPolicy" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.GetIamPolicyImageRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "resource", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.compute_v1.types.Policy", + "shortName": "get_iam_policy" + }, + "description": "Sample for GetIamPolicy", + "file": "compute_v1_generated_images_get_iam_policy_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_Images_GetIamPolicy_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_images_get_iam_policy_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.ImagesClient", + "shortName": "ImagesClient" + }, + "fullName": "google.cloud.compute_v1.ImagesClient.get", + "method": { + "fullName": "google.cloud.compute.v1.Images.Get", + "service": { + "fullName": "google.cloud.compute.v1.Images", + "shortName": "Images" + }, + "shortName": "Get" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.GetImageRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "image", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.compute_v1.types.Image", + "shortName": "get" + }, + "description": "Sample for Get", + "file": "compute_v1_generated_images_get_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_Images_Get_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_images_get_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.ImagesClient", + "shortName": "ImagesClient" + }, + "fullName": "google.cloud.compute_v1.ImagesClient.insert", + "method": { + "fullName": "google.cloud.compute.v1.Images.Insert", + "service": { + "fullName": "google.cloud.compute.v1.Images", + "shortName": "Images" + }, + "shortName": "Insert" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.InsertImageRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "image_resource", + "type": "google.cloud.compute_v1.types.Image" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.extended_operation.ExtendedOperation", + "shortName": "insert" + }, + "description": "Sample for Insert", + "file": "compute_v1_generated_images_insert_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_Images_Insert_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_images_insert_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.ImagesClient", + "shortName": "ImagesClient" + }, + "fullName": "google.cloud.compute_v1.ImagesClient.list", + "method": { + "fullName": "google.cloud.compute.v1.Images.List", + "service": { + "fullName": "google.cloud.compute.v1.Images", + "shortName": "Images" + }, + "shortName": "List" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.ListImagesRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.compute_v1.services.images.pagers.ListPager", + "shortName": "list" + }, + "description": "Sample for List", + "file": "compute_v1_generated_images_list_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_Images_List_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_images_list_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.ImagesClient", + "shortName": "ImagesClient" + }, + "fullName": "google.cloud.compute_v1.ImagesClient.patch", + "method": { + "fullName": "google.cloud.compute.v1.Images.Patch", + "service": { + "fullName": "google.cloud.compute.v1.Images", + "shortName": "Images" + }, + "shortName": "Patch" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.PatchImageRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "image", + "type": "str" + }, + { + "name": "image_resource", + "type": "google.cloud.compute_v1.types.Image" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.extended_operation.ExtendedOperation", + "shortName": "patch" + }, + "description": "Sample for Patch", + "file": "compute_v1_generated_images_patch_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_Images_Patch_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_images_patch_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.ImagesClient", + "shortName": "ImagesClient" + }, + "fullName": "google.cloud.compute_v1.ImagesClient.set_iam_policy", + "method": { + "fullName": "google.cloud.compute.v1.Images.SetIamPolicy", + "service": { + "fullName": "google.cloud.compute.v1.Images", + "shortName": "Images" + }, + "shortName": "SetIamPolicy" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.SetIamPolicyImageRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "resource", + "type": "str" + }, + { + "name": "global_set_policy_request_resource", + "type": "google.cloud.compute_v1.types.GlobalSetPolicyRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.compute_v1.types.Policy", + "shortName": "set_iam_policy" + }, + "description": "Sample for SetIamPolicy", + "file": "compute_v1_generated_images_set_iam_policy_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_Images_SetIamPolicy_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_images_set_iam_policy_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.ImagesClient", + "shortName": "ImagesClient" + }, + "fullName": "google.cloud.compute_v1.ImagesClient.set_labels", + "method": { + "fullName": "google.cloud.compute.v1.Images.SetLabels", + "service": { + "fullName": "google.cloud.compute.v1.Images", + "shortName": "Images" + }, + "shortName": "SetLabels" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.SetLabelsImageRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "resource", + "type": "str" + }, + { + "name": "global_set_labels_request_resource", + "type": "google.cloud.compute_v1.types.GlobalSetLabelsRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.extended_operation.ExtendedOperation", + "shortName": "set_labels" + }, + "description": "Sample for SetLabels", + "file": "compute_v1_generated_images_set_labels_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_Images_SetLabels_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_images_set_labels_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.ImagesClient", + "shortName": "ImagesClient" + }, + "fullName": "google.cloud.compute_v1.ImagesClient.test_iam_permissions", + "method": { + "fullName": "google.cloud.compute.v1.Images.TestIamPermissions", + "service": { + "fullName": "google.cloud.compute.v1.Images", + "shortName": "Images" + }, + "shortName": "TestIamPermissions" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.TestIamPermissionsImageRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "resource", + "type": "str" + }, + { + "name": "test_permissions_request_resource", + "type": "google.cloud.compute_v1.types.TestPermissionsRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.compute_v1.types.TestPermissionsResponse", + "shortName": "test_iam_permissions" + }, + "description": "Sample for TestIamPermissions", + "file": "compute_v1_generated_images_test_iam_permissions_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_Images_TestIamPermissions_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_images_test_iam_permissions_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.InstanceGroupManagersClient", + "shortName": "InstanceGroupManagersClient" + }, + "fullName": "google.cloud.compute_v1.InstanceGroupManagersClient.abandon_instances", + "method": { + "fullName": "google.cloud.compute.v1.InstanceGroupManagers.AbandonInstances", + "service": { + "fullName": "google.cloud.compute.v1.InstanceGroupManagers", + "shortName": "InstanceGroupManagers" + }, + "shortName": "AbandonInstances" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.AbandonInstancesInstanceGroupManagerRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "zone", + "type": "str" + }, + { + "name": "instance_group_manager", + "type": "str" + }, + { + "name": "instance_group_managers_abandon_instances_request_resource", + "type": "google.cloud.compute_v1.types.InstanceGroupManagersAbandonInstancesRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.extended_operation.ExtendedOperation", + "shortName": "abandon_instances" + }, + "description": "Sample for AbandonInstances", + "file": "compute_v1_generated_instance_group_managers_abandon_instances_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_InstanceGroupManagers_AbandonInstances_sync", + "segments": [ + { + "end": 53, + "start": 27, + "type": "FULL" + }, + { + "end": 53, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 47, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 50, + "start": 48, + "type": "REQUEST_EXECUTION" + }, + { + "end": 54, + "start": 51, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_instance_group_managers_abandon_instances_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.InstanceGroupManagersClient", + "shortName": "InstanceGroupManagersClient" + }, + "fullName": "google.cloud.compute_v1.InstanceGroupManagersClient.aggregated_list", + "method": { + "fullName": "google.cloud.compute.v1.InstanceGroupManagers.AggregatedList", + "service": { + "fullName": "google.cloud.compute.v1.InstanceGroupManagers", + "shortName": "InstanceGroupManagers" + }, + "shortName": "AggregatedList" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.AggregatedListInstanceGroupManagersRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.compute_v1.services.instance_group_managers.pagers.AggregatedListPager", + "shortName": "aggregated_list" + }, + "description": "Sample for AggregatedList", + "file": "compute_v1_generated_instance_group_managers_aggregated_list_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_InstanceGroupManagers_AggregatedList_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_instance_group_managers_aggregated_list_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.InstanceGroupManagersClient", + "shortName": "InstanceGroupManagersClient" + }, + "fullName": "google.cloud.compute_v1.InstanceGroupManagersClient.apply_updates_to_instances", + "method": { + "fullName": "google.cloud.compute.v1.InstanceGroupManagers.ApplyUpdatesToInstances", + "service": { + "fullName": "google.cloud.compute.v1.InstanceGroupManagers", + "shortName": "InstanceGroupManagers" + }, + "shortName": "ApplyUpdatesToInstances" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.ApplyUpdatesToInstancesInstanceGroupManagerRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "zone", + "type": "str" + }, + { + "name": "instance_group_manager", + "type": "str" + }, + { + "name": "instance_group_managers_apply_updates_request_resource", + "type": "google.cloud.compute_v1.types.InstanceGroupManagersApplyUpdatesRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.extended_operation.ExtendedOperation", + "shortName": "apply_updates_to_instances" + }, + "description": "Sample for ApplyUpdatesToInstances", + "file": "compute_v1_generated_instance_group_managers_apply_updates_to_instances_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_InstanceGroupManagers_ApplyUpdatesToInstances_sync", + "segments": [ + { + "end": 53, + "start": 27, + "type": "FULL" + }, + { + "end": 53, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 47, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 50, + "start": 48, + "type": "REQUEST_EXECUTION" + }, + { + "end": 54, + "start": 51, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_instance_group_managers_apply_updates_to_instances_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.InstanceGroupManagersClient", + "shortName": "InstanceGroupManagersClient" + }, + "fullName": "google.cloud.compute_v1.InstanceGroupManagersClient.create_instances", + "method": { + "fullName": "google.cloud.compute.v1.InstanceGroupManagers.CreateInstances", + "service": { + "fullName": "google.cloud.compute.v1.InstanceGroupManagers", + "shortName": "InstanceGroupManagers" + }, + "shortName": "CreateInstances" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.CreateInstancesInstanceGroupManagerRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "zone", + "type": "str" + }, + { + "name": "instance_group_manager", + "type": "str" + }, + { + "name": "instance_group_managers_create_instances_request_resource", + "type": "google.cloud.compute_v1.types.InstanceGroupManagersCreateInstancesRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.extended_operation.ExtendedOperation", + "shortName": "create_instances" + }, + "description": "Sample for CreateInstances", + "file": "compute_v1_generated_instance_group_managers_create_instances_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_InstanceGroupManagers_CreateInstances_sync", + "segments": [ + { + "end": 53, + "start": 27, + "type": "FULL" + }, + { + "end": 53, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 47, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 50, + "start": 48, + "type": "REQUEST_EXECUTION" + }, + { + "end": 54, + "start": 51, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_instance_group_managers_create_instances_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.InstanceGroupManagersClient", + "shortName": "InstanceGroupManagersClient" + }, + "fullName": "google.cloud.compute_v1.InstanceGroupManagersClient.delete_instances", + "method": { + "fullName": "google.cloud.compute.v1.InstanceGroupManagers.DeleteInstances", + "service": { + "fullName": "google.cloud.compute.v1.InstanceGroupManagers", + "shortName": "InstanceGroupManagers" + }, + "shortName": "DeleteInstances" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.DeleteInstancesInstanceGroupManagerRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "zone", + "type": "str" + }, + { + "name": "instance_group_manager", + "type": "str" + }, + { + "name": "instance_group_managers_delete_instances_request_resource", + "type": "google.cloud.compute_v1.types.InstanceGroupManagersDeleteInstancesRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.extended_operation.ExtendedOperation", + "shortName": "delete_instances" + }, + "description": "Sample for DeleteInstances", + "file": "compute_v1_generated_instance_group_managers_delete_instances_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_InstanceGroupManagers_DeleteInstances_sync", + "segments": [ + { + "end": 53, + "start": 27, + "type": "FULL" + }, + { + "end": 53, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 47, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 50, + "start": 48, + "type": "REQUEST_EXECUTION" + }, + { + "end": 54, + "start": 51, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_instance_group_managers_delete_instances_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.InstanceGroupManagersClient", + "shortName": "InstanceGroupManagersClient" + }, + "fullName": "google.cloud.compute_v1.InstanceGroupManagersClient.delete_per_instance_configs", + "method": { + "fullName": "google.cloud.compute.v1.InstanceGroupManagers.DeletePerInstanceConfigs", + "service": { + "fullName": "google.cloud.compute.v1.InstanceGroupManagers", + "shortName": "InstanceGroupManagers" + }, + "shortName": "DeletePerInstanceConfigs" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.DeletePerInstanceConfigsInstanceGroupManagerRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "zone", + "type": "str" + }, + { + "name": "instance_group_manager", + "type": "str" + }, + { + "name": "instance_group_managers_delete_per_instance_configs_req_resource", + "type": "google.cloud.compute_v1.types.InstanceGroupManagersDeletePerInstanceConfigsReq" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.extended_operation.ExtendedOperation", + "shortName": "delete_per_instance_configs" + }, + "description": "Sample for DeletePerInstanceConfigs", + "file": "compute_v1_generated_instance_group_managers_delete_per_instance_configs_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_InstanceGroupManagers_DeletePerInstanceConfigs_sync", + "segments": [ + { + "end": 53, + "start": 27, + "type": "FULL" + }, + { + "end": 53, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 47, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 50, + "start": 48, + "type": "REQUEST_EXECUTION" + }, + { + "end": 54, + "start": 51, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_instance_group_managers_delete_per_instance_configs_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.InstanceGroupManagersClient", + "shortName": "InstanceGroupManagersClient" + }, + "fullName": "google.cloud.compute_v1.InstanceGroupManagersClient.delete", + "method": { + "fullName": "google.cloud.compute.v1.InstanceGroupManagers.Delete", + "service": { + "fullName": "google.cloud.compute.v1.InstanceGroupManagers", + "shortName": "InstanceGroupManagers" + }, + "shortName": "Delete" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.DeleteInstanceGroupManagerRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "zone", + "type": "str" + }, + { + "name": "instance_group_manager", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.extended_operation.ExtendedOperation", + "shortName": "delete" + }, + "description": "Sample for Delete", + "file": "compute_v1_generated_instance_group_managers_delete_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_InstanceGroupManagers_Delete_sync", + "segments": [ + { + "end": 53, + "start": 27, + "type": "FULL" + }, + { + "end": 53, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 47, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 50, + "start": 48, + "type": "REQUEST_EXECUTION" + }, + { + "end": 54, + "start": 51, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_instance_group_managers_delete_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.InstanceGroupManagersClient", + "shortName": "InstanceGroupManagersClient" + }, + "fullName": "google.cloud.compute_v1.InstanceGroupManagersClient.get", + "method": { + "fullName": "google.cloud.compute.v1.InstanceGroupManagers.Get", + "service": { + "fullName": "google.cloud.compute.v1.InstanceGroupManagers", + "shortName": "InstanceGroupManagers" + }, + "shortName": "Get" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.GetInstanceGroupManagerRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "zone", + "type": "str" + }, + { + "name": "instance_group_manager", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.compute_v1.types.InstanceGroupManager", + "shortName": "get" + }, + "description": "Sample for Get", + "file": "compute_v1_generated_instance_group_managers_get_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_InstanceGroupManagers_Get_sync", + "segments": [ + { + "end": 53, + "start": 27, + "type": "FULL" + }, + { + "end": 53, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 47, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 50, + "start": 48, + "type": "REQUEST_EXECUTION" + }, + { + "end": 54, + "start": 51, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_instance_group_managers_get_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.InstanceGroupManagersClient", + "shortName": "InstanceGroupManagersClient" + }, + "fullName": "google.cloud.compute_v1.InstanceGroupManagersClient.insert", + "method": { + "fullName": "google.cloud.compute.v1.InstanceGroupManagers.Insert", + "service": { + "fullName": "google.cloud.compute.v1.InstanceGroupManagers", + "shortName": "InstanceGroupManagers" + }, + "shortName": "Insert" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.InsertInstanceGroupManagerRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "zone", + "type": "str" + }, + { + "name": "instance_group_manager_resource", + "type": "google.cloud.compute_v1.types.InstanceGroupManager" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.extended_operation.ExtendedOperation", + "shortName": "insert" + }, + "description": "Sample for Insert", + "file": "compute_v1_generated_instance_group_managers_insert_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_InstanceGroupManagers_Insert_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_instance_group_managers_insert_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.InstanceGroupManagersClient", + "shortName": "InstanceGroupManagersClient" + }, + "fullName": "google.cloud.compute_v1.InstanceGroupManagersClient.list_errors", + "method": { + "fullName": "google.cloud.compute.v1.InstanceGroupManagers.ListErrors", + "service": { + "fullName": "google.cloud.compute.v1.InstanceGroupManagers", + "shortName": "InstanceGroupManagers" + }, + "shortName": "ListErrors" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.ListErrorsInstanceGroupManagersRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "zone", + "type": "str" + }, + { + "name": "instance_group_manager", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.compute_v1.services.instance_group_managers.pagers.ListErrorsPager", + "shortName": "list_errors" + }, + "description": "Sample for ListErrors", + "file": "compute_v1_generated_instance_group_managers_list_errors_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_InstanceGroupManagers_ListErrors_sync", + "segments": [ + { + "end": 54, + "start": 27, + "type": "FULL" + }, + { + "end": 54, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 47, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 50, + "start": 48, + "type": "REQUEST_EXECUTION" + }, + { + "end": 55, + "start": 51, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_instance_group_managers_list_errors_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.InstanceGroupManagersClient", + "shortName": "InstanceGroupManagersClient" + }, + "fullName": "google.cloud.compute_v1.InstanceGroupManagersClient.list_managed_instances", + "method": { + "fullName": "google.cloud.compute.v1.InstanceGroupManagers.ListManagedInstances", + "service": { + "fullName": "google.cloud.compute.v1.InstanceGroupManagers", + "shortName": "InstanceGroupManagers" + }, + "shortName": "ListManagedInstances" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.ListManagedInstancesInstanceGroupManagersRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "zone", + "type": "str" + }, + { + "name": "instance_group_manager", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.compute_v1.services.instance_group_managers.pagers.ListManagedInstancesPager", + "shortName": "list_managed_instances" + }, + "description": "Sample for ListManagedInstances", + "file": "compute_v1_generated_instance_group_managers_list_managed_instances_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_InstanceGroupManagers_ListManagedInstances_sync", + "segments": [ + { + "end": 54, + "start": 27, + "type": "FULL" + }, + { + "end": 54, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 47, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 50, + "start": 48, + "type": "REQUEST_EXECUTION" + }, + { + "end": 55, + "start": 51, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_instance_group_managers_list_managed_instances_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.InstanceGroupManagersClient", + "shortName": "InstanceGroupManagersClient" + }, + "fullName": "google.cloud.compute_v1.InstanceGroupManagersClient.list_per_instance_configs", + "method": { + "fullName": "google.cloud.compute.v1.InstanceGroupManagers.ListPerInstanceConfigs", + "service": { + "fullName": "google.cloud.compute.v1.InstanceGroupManagers", + "shortName": "InstanceGroupManagers" + }, + "shortName": "ListPerInstanceConfigs" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.ListPerInstanceConfigsInstanceGroupManagersRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "zone", + "type": "str" + }, + { + "name": "instance_group_manager", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.compute_v1.services.instance_group_managers.pagers.ListPerInstanceConfigsPager", + "shortName": "list_per_instance_configs" + }, + "description": "Sample for ListPerInstanceConfigs", + "file": "compute_v1_generated_instance_group_managers_list_per_instance_configs_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_InstanceGroupManagers_ListPerInstanceConfigs_sync", + "segments": [ + { + "end": 54, + "start": 27, + "type": "FULL" + }, + { + "end": 54, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 47, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 50, + "start": 48, + "type": "REQUEST_EXECUTION" + }, + { + "end": 55, + "start": 51, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_instance_group_managers_list_per_instance_configs_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.InstanceGroupManagersClient", + "shortName": "InstanceGroupManagersClient" + }, + "fullName": "google.cloud.compute_v1.InstanceGroupManagersClient.list", + "method": { + "fullName": "google.cloud.compute.v1.InstanceGroupManagers.List", + "service": { + "fullName": "google.cloud.compute.v1.InstanceGroupManagers", + "shortName": "InstanceGroupManagers" + }, + "shortName": "List" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.ListInstanceGroupManagersRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "zone", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.compute_v1.services.instance_group_managers.pagers.ListPager", + "shortName": "list" + }, + "description": "Sample for List", + "file": "compute_v1_generated_instance_group_managers_list_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_InstanceGroupManagers_List_sync", + "segments": [ + { + "end": 53, + "start": 27, + "type": "FULL" + }, + { + "end": 53, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 54, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_instance_group_managers_list_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.InstanceGroupManagersClient", + "shortName": "InstanceGroupManagersClient" + }, + "fullName": "google.cloud.compute_v1.InstanceGroupManagersClient.patch_per_instance_configs", + "method": { + "fullName": "google.cloud.compute.v1.InstanceGroupManagers.PatchPerInstanceConfigs", + "service": { + "fullName": "google.cloud.compute.v1.InstanceGroupManagers", + "shortName": "InstanceGroupManagers" + }, + "shortName": "PatchPerInstanceConfigs" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.PatchPerInstanceConfigsInstanceGroupManagerRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "zone", + "type": "str" + }, + { + "name": "instance_group_manager", + "type": "str" + }, + { + "name": "instance_group_managers_patch_per_instance_configs_req_resource", + "type": "google.cloud.compute_v1.types.InstanceGroupManagersPatchPerInstanceConfigsReq" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.extended_operation.ExtendedOperation", + "shortName": "patch_per_instance_configs" + }, + "description": "Sample for PatchPerInstanceConfigs", + "file": "compute_v1_generated_instance_group_managers_patch_per_instance_configs_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_InstanceGroupManagers_PatchPerInstanceConfigs_sync", + "segments": [ + { + "end": 53, + "start": 27, + "type": "FULL" + }, + { + "end": 53, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 47, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 50, + "start": 48, + "type": "REQUEST_EXECUTION" + }, + { + "end": 54, + "start": 51, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_instance_group_managers_patch_per_instance_configs_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.InstanceGroupManagersClient", + "shortName": "InstanceGroupManagersClient" + }, + "fullName": "google.cloud.compute_v1.InstanceGroupManagersClient.patch", + "method": { + "fullName": "google.cloud.compute.v1.InstanceGroupManagers.Patch", + "service": { + "fullName": "google.cloud.compute.v1.InstanceGroupManagers", + "shortName": "InstanceGroupManagers" + }, + "shortName": "Patch" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.PatchInstanceGroupManagerRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "zone", + "type": "str" + }, + { + "name": "instance_group_manager", + "type": "str" + }, + { + "name": "instance_group_manager_resource", + "type": "google.cloud.compute_v1.types.InstanceGroupManager" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.extended_operation.ExtendedOperation", + "shortName": "patch" + }, + "description": "Sample for Patch", + "file": "compute_v1_generated_instance_group_managers_patch_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_InstanceGroupManagers_Patch_sync", + "segments": [ + { + "end": 53, + "start": 27, + "type": "FULL" + }, + { + "end": 53, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 47, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 50, + "start": 48, + "type": "REQUEST_EXECUTION" + }, + { + "end": 54, + "start": 51, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_instance_group_managers_patch_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.InstanceGroupManagersClient", + "shortName": "InstanceGroupManagersClient" + }, + "fullName": "google.cloud.compute_v1.InstanceGroupManagersClient.recreate_instances", + "method": { + "fullName": "google.cloud.compute.v1.InstanceGroupManagers.RecreateInstances", + "service": { + "fullName": "google.cloud.compute.v1.InstanceGroupManagers", + "shortName": "InstanceGroupManagers" + }, + "shortName": "RecreateInstances" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.RecreateInstancesInstanceGroupManagerRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "zone", + "type": "str" + }, + { + "name": "instance_group_manager", + "type": "str" + }, + { + "name": "instance_group_managers_recreate_instances_request_resource", + "type": "google.cloud.compute_v1.types.InstanceGroupManagersRecreateInstancesRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.extended_operation.ExtendedOperation", + "shortName": "recreate_instances" + }, + "description": "Sample for RecreateInstances", + "file": "compute_v1_generated_instance_group_managers_recreate_instances_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_InstanceGroupManagers_RecreateInstances_sync", + "segments": [ + { + "end": 53, + "start": 27, + "type": "FULL" + }, + { + "end": 53, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 47, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 50, + "start": 48, + "type": "REQUEST_EXECUTION" + }, + { + "end": 54, + "start": 51, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_instance_group_managers_recreate_instances_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.InstanceGroupManagersClient", + "shortName": "InstanceGroupManagersClient" + }, + "fullName": "google.cloud.compute_v1.InstanceGroupManagersClient.resize", + "method": { + "fullName": "google.cloud.compute.v1.InstanceGroupManagers.Resize", + "service": { + "fullName": "google.cloud.compute.v1.InstanceGroupManagers", + "shortName": "InstanceGroupManagers" + }, + "shortName": "Resize" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.ResizeInstanceGroupManagerRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "zone", + "type": "str" + }, + { + "name": "instance_group_manager", + "type": "str" + }, + { + "name": "size", + "type": "int" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.extended_operation.ExtendedOperation", + "shortName": "resize" + }, + "description": "Sample for Resize", + "file": "compute_v1_generated_instance_group_managers_resize_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_InstanceGroupManagers_Resize_sync", + "segments": [ + { + "end": 54, + "start": 27, + "type": "FULL" + }, + { + "end": 54, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 48, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 51, + "start": 49, + "type": "REQUEST_EXECUTION" + }, + { + "end": 55, + "start": 52, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_instance_group_managers_resize_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.InstanceGroupManagersClient", + "shortName": "InstanceGroupManagersClient" + }, + "fullName": "google.cloud.compute_v1.InstanceGroupManagersClient.set_instance_template", + "method": { + "fullName": "google.cloud.compute.v1.InstanceGroupManagers.SetInstanceTemplate", + "service": { + "fullName": "google.cloud.compute.v1.InstanceGroupManagers", + "shortName": "InstanceGroupManagers" + }, + "shortName": "SetInstanceTemplate" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.SetInstanceTemplateInstanceGroupManagerRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "zone", + "type": "str" + }, + { + "name": "instance_group_manager", + "type": "str" + }, + { + "name": "instance_group_managers_set_instance_template_request_resource", + "type": "google.cloud.compute_v1.types.InstanceGroupManagersSetInstanceTemplateRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.extended_operation.ExtendedOperation", + "shortName": "set_instance_template" + }, + "description": "Sample for SetInstanceTemplate", + "file": "compute_v1_generated_instance_group_managers_set_instance_template_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_InstanceGroupManagers_SetInstanceTemplate_sync", + "segments": [ + { + "end": 53, + "start": 27, + "type": "FULL" + }, + { + "end": 53, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 47, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 50, + "start": 48, + "type": "REQUEST_EXECUTION" + }, + { + "end": 54, + "start": 51, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_instance_group_managers_set_instance_template_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.InstanceGroupManagersClient", + "shortName": "InstanceGroupManagersClient" + }, + "fullName": "google.cloud.compute_v1.InstanceGroupManagersClient.set_target_pools", + "method": { + "fullName": "google.cloud.compute.v1.InstanceGroupManagers.SetTargetPools", + "service": { + "fullName": "google.cloud.compute.v1.InstanceGroupManagers", + "shortName": "InstanceGroupManagers" + }, + "shortName": "SetTargetPools" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.SetTargetPoolsInstanceGroupManagerRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "zone", + "type": "str" + }, + { + "name": "instance_group_manager", + "type": "str" + }, + { + "name": "instance_group_managers_set_target_pools_request_resource", + "type": "google.cloud.compute_v1.types.InstanceGroupManagersSetTargetPoolsRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.extended_operation.ExtendedOperation", + "shortName": "set_target_pools" + }, + "description": "Sample for SetTargetPools", + "file": "compute_v1_generated_instance_group_managers_set_target_pools_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_InstanceGroupManagers_SetTargetPools_sync", + "segments": [ + { + "end": 53, + "start": 27, + "type": "FULL" + }, + { + "end": 53, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 47, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 50, + "start": 48, + "type": "REQUEST_EXECUTION" + }, + { + "end": 54, + "start": 51, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_instance_group_managers_set_target_pools_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.InstanceGroupManagersClient", + "shortName": "InstanceGroupManagersClient" + }, + "fullName": "google.cloud.compute_v1.InstanceGroupManagersClient.update_per_instance_configs", + "method": { + "fullName": "google.cloud.compute.v1.InstanceGroupManagers.UpdatePerInstanceConfigs", + "service": { + "fullName": "google.cloud.compute.v1.InstanceGroupManagers", + "shortName": "InstanceGroupManagers" + }, + "shortName": "UpdatePerInstanceConfigs" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.UpdatePerInstanceConfigsInstanceGroupManagerRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "zone", + "type": "str" + }, + { + "name": "instance_group_manager", + "type": "str" + }, + { + "name": "instance_group_managers_update_per_instance_configs_req_resource", + "type": "google.cloud.compute_v1.types.InstanceGroupManagersUpdatePerInstanceConfigsReq" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.extended_operation.ExtendedOperation", + "shortName": "update_per_instance_configs" + }, + "description": "Sample for UpdatePerInstanceConfigs", + "file": "compute_v1_generated_instance_group_managers_update_per_instance_configs_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_InstanceGroupManagers_UpdatePerInstanceConfigs_sync", + "segments": [ + { + "end": 53, + "start": 27, + "type": "FULL" + }, + { + "end": 53, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 47, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 50, + "start": 48, + "type": "REQUEST_EXECUTION" + }, + { + "end": 54, + "start": 51, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_instance_group_managers_update_per_instance_configs_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.InstanceGroupsClient", + "shortName": "InstanceGroupsClient" + }, + "fullName": "google.cloud.compute_v1.InstanceGroupsClient.add_instances", + "method": { + "fullName": "google.cloud.compute.v1.InstanceGroups.AddInstances", + "service": { + "fullName": "google.cloud.compute.v1.InstanceGroups", + "shortName": "InstanceGroups" + }, + "shortName": "AddInstances" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.AddInstancesInstanceGroupRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "zone", + "type": "str" + }, + { + "name": "instance_group", + "type": "str" + }, + { + "name": "instance_groups_add_instances_request_resource", + "type": "google.cloud.compute_v1.types.InstanceGroupsAddInstancesRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.extended_operation.ExtendedOperation", + "shortName": "add_instances" + }, + "description": "Sample for AddInstances", + "file": "compute_v1_generated_instance_groups_add_instances_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_InstanceGroups_AddInstances_sync", + "segments": [ + { + "end": 53, + "start": 27, + "type": "FULL" + }, + { + "end": 53, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 47, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 50, + "start": 48, + "type": "REQUEST_EXECUTION" + }, + { + "end": 54, + "start": 51, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_instance_groups_add_instances_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.InstanceGroupsClient", + "shortName": "InstanceGroupsClient" + }, + "fullName": "google.cloud.compute_v1.InstanceGroupsClient.aggregated_list", + "method": { + "fullName": "google.cloud.compute.v1.InstanceGroups.AggregatedList", + "service": { + "fullName": "google.cloud.compute.v1.InstanceGroups", + "shortName": "InstanceGroups" + }, + "shortName": "AggregatedList" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.AggregatedListInstanceGroupsRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.compute_v1.services.instance_groups.pagers.AggregatedListPager", + "shortName": "aggregated_list" + }, + "description": "Sample for AggregatedList", + "file": "compute_v1_generated_instance_groups_aggregated_list_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_InstanceGroups_AggregatedList_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_instance_groups_aggregated_list_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.InstanceGroupsClient", + "shortName": "InstanceGroupsClient" + }, + "fullName": "google.cloud.compute_v1.InstanceGroupsClient.delete", + "method": { + "fullName": "google.cloud.compute.v1.InstanceGroups.Delete", + "service": { + "fullName": "google.cloud.compute.v1.InstanceGroups", + "shortName": "InstanceGroups" + }, + "shortName": "Delete" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.DeleteInstanceGroupRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "zone", + "type": "str" + }, + { + "name": "instance_group", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.extended_operation.ExtendedOperation", + "shortName": "delete" + }, + "description": "Sample for Delete", + "file": "compute_v1_generated_instance_groups_delete_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_InstanceGroups_Delete_sync", + "segments": [ + { + "end": 53, + "start": 27, + "type": "FULL" + }, + { + "end": 53, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 47, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 50, + "start": 48, + "type": "REQUEST_EXECUTION" + }, + { + "end": 54, + "start": 51, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_instance_groups_delete_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.InstanceGroupsClient", + "shortName": "InstanceGroupsClient" + }, + "fullName": "google.cloud.compute_v1.InstanceGroupsClient.get", + "method": { + "fullName": "google.cloud.compute.v1.InstanceGroups.Get", + "service": { + "fullName": "google.cloud.compute.v1.InstanceGroups", + "shortName": "InstanceGroups" + }, + "shortName": "Get" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.GetInstanceGroupRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "zone", + "type": "str" + }, + { + "name": "instance_group", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.compute_v1.types.InstanceGroup", + "shortName": "get" + }, + "description": "Sample for Get", + "file": "compute_v1_generated_instance_groups_get_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_InstanceGroups_Get_sync", + "segments": [ + { + "end": 53, + "start": 27, + "type": "FULL" + }, + { + "end": 53, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 47, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 50, + "start": 48, + "type": "REQUEST_EXECUTION" + }, + { + "end": 54, + "start": 51, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_instance_groups_get_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.InstanceGroupsClient", + "shortName": "InstanceGroupsClient" + }, + "fullName": "google.cloud.compute_v1.InstanceGroupsClient.insert", + "method": { + "fullName": "google.cloud.compute.v1.InstanceGroups.Insert", + "service": { + "fullName": "google.cloud.compute.v1.InstanceGroups", + "shortName": "InstanceGroups" + }, + "shortName": "Insert" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.InsertInstanceGroupRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "zone", + "type": "str" + }, + { + "name": "instance_group_resource", + "type": "google.cloud.compute_v1.types.InstanceGroup" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.extended_operation.ExtendedOperation", + "shortName": "insert" + }, + "description": "Sample for Insert", + "file": "compute_v1_generated_instance_groups_insert_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_InstanceGroups_Insert_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_instance_groups_insert_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.InstanceGroupsClient", + "shortName": "InstanceGroupsClient" + }, + "fullName": "google.cloud.compute_v1.InstanceGroupsClient.list_instances", + "method": { + "fullName": "google.cloud.compute.v1.InstanceGroups.ListInstances", + "service": { + "fullName": "google.cloud.compute.v1.InstanceGroups", + "shortName": "InstanceGroups" + }, + "shortName": "ListInstances" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.ListInstancesInstanceGroupsRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "zone", + "type": "str" + }, + { + "name": "instance_group", + "type": "str" + }, + { + "name": "instance_groups_list_instances_request_resource", + "type": "google.cloud.compute_v1.types.InstanceGroupsListInstancesRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.compute_v1.services.instance_groups.pagers.ListInstancesPager", + "shortName": "list_instances" + }, + "description": "Sample for ListInstances", + "file": "compute_v1_generated_instance_groups_list_instances_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_InstanceGroups_ListInstances_sync", + "segments": [ + { + "end": 54, + "start": 27, + "type": "FULL" + }, + { + "end": 54, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 47, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 50, + "start": 48, + "type": "REQUEST_EXECUTION" + }, + { + "end": 55, + "start": 51, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_instance_groups_list_instances_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.InstanceGroupsClient", + "shortName": "InstanceGroupsClient" + }, + "fullName": "google.cloud.compute_v1.InstanceGroupsClient.list", + "method": { + "fullName": "google.cloud.compute.v1.InstanceGroups.List", + "service": { + "fullName": "google.cloud.compute.v1.InstanceGroups", + "shortName": "InstanceGroups" + }, + "shortName": "List" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.ListInstanceGroupsRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "zone", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.compute_v1.services.instance_groups.pagers.ListPager", + "shortName": "list" + }, + "description": "Sample for List", + "file": "compute_v1_generated_instance_groups_list_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_InstanceGroups_List_sync", + "segments": [ + { + "end": 53, + "start": 27, + "type": "FULL" + }, + { + "end": 53, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 54, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_instance_groups_list_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.InstanceGroupsClient", + "shortName": "InstanceGroupsClient" + }, + "fullName": "google.cloud.compute_v1.InstanceGroupsClient.remove_instances", + "method": { + "fullName": "google.cloud.compute.v1.InstanceGroups.RemoveInstances", + "service": { + "fullName": "google.cloud.compute.v1.InstanceGroups", + "shortName": "InstanceGroups" + }, + "shortName": "RemoveInstances" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.RemoveInstancesInstanceGroupRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "zone", + "type": "str" + }, + { + "name": "instance_group", + "type": "str" + }, + { + "name": "instance_groups_remove_instances_request_resource", + "type": "google.cloud.compute_v1.types.InstanceGroupsRemoveInstancesRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.extended_operation.ExtendedOperation", + "shortName": "remove_instances" + }, + "description": "Sample for RemoveInstances", + "file": "compute_v1_generated_instance_groups_remove_instances_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_InstanceGroups_RemoveInstances_sync", + "segments": [ + { + "end": 53, + "start": 27, + "type": "FULL" + }, + { + "end": 53, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 47, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 50, + "start": 48, + "type": "REQUEST_EXECUTION" + }, + { + "end": 54, + "start": 51, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_instance_groups_remove_instances_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.InstanceGroupsClient", + "shortName": "InstanceGroupsClient" + }, + "fullName": "google.cloud.compute_v1.InstanceGroupsClient.set_named_ports", + "method": { + "fullName": "google.cloud.compute.v1.InstanceGroups.SetNamedPorts", + "service": { + "fullName": "google.cloud.compute.v1.InstanceGroups", + "shortName": "InstanceGroups" + }, + "shortName": "SetNamedPorts" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.SetNamedPortsInstanceGroupRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "zone", + "type": "str" + }, + { + "name": "instance_group", + "type": "str" + }, + { + "name": "instance_groups_set_named_ports_request_resource", + "type": "google.cloud.compute_v1.types.InstanceGroupsSetNamedPortsRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.extended_operation.ExtendedOperation", + "shortName": "set_named_ports" + }, + "description": "Sample for SetNamedPorts", + "file": "compute_v1_generated_instance_groups_set_named_ports_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_InstanceGroups_SetNamedPorts_sync", + "segments": [ + { + "end": 53, + "start": 27, + "type": "FULL" + }, + { + "end": 53, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 47, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 50, + "start": 48, + "type": "REQUEST_EXECUTION" + }, + { + "end": 54, + "start": 51, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_instance_groups_set_named_ports_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.InstanceTemplatesClient", + "shortName": "InstanceTemplatesClient" + }, + "fullName": "google.cloud.compute_v1.InstanceTemplatesClient.aggregated_list", + "method": { + "fullName": "google.cloud.compute.v1.InstanceTemplates.AggregatedList", + "service": { + "fullName": "google.cloud.compute.v1.InstanceTemplates", + "shortName": "InstanceTemplates" + }, + "shortName": "AggregatedList" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.AggregatedListInstanceTemplatesRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.compute_v1.services.instance_templates.pagers.AggregatedListPager", + "shortName": "aggregated_list" + }, + "description": "Sample for AggregatedList", + "file": "compute_v1_generated_instance_templates_aggregated_list_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_InstanceTemplates_AggregatedList_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_instance_templates_aggregated_list_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.InstanceTemplatesClient", + "shortName": "InstanceTemplatesClient" + }, + "fullName": "google.cloud.compute_v1.InstanceTemplatesClient.delete", + "method": { + "fullName": "google.cloud.compute.v1.InstanceTemplates.Delete", + "service": { + "fullName": "google.cloud.compute.v1.InstanceTemplates", + "shortName": "InstanceTemplates" + }, + "shortName": "Delete" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.DeleteInstanceTemplateRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "instance_template", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.extended_operation.ExtendedOperation", + "shortName": "delete" + }, + "description": "Sample for Delete", + "file": "compute_v1_generated_instance_templates_delete_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_InstanceTemplates_Delete_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_instance_templates_delete_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.InstanceTemplatesClient", + "shortName": "InstanceTemplatesClient" + }, + "fullName": "google.cloud.compute_v1.InstanceTemplatesClient.get_iam_policy", + "method": { + "fullName": "google.cloud.compute.v1.InstanceTemplates.GetIamPolicy", + "service": { + "fullName": "google.cloud.compute.v1.InstanceTemplates", + "shortName": "InstanceTemplates" + }, + "shortName": "GetIamPolicy" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.GetIamPolicyInstanceTemplateRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "resource", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.compute_v1.types.Policy", + "shortName": "get_iam_policy" + }, + "description": "Sample for GetIamPolicy", + "file": "compute_v1_generated_instance_templates_get_iam_policy_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_InstanceTemplates_GetIamPolicy_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_instance_templates_get_iam_policy_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.InstanceTemplatesClient", + "shortName": "InstanceTemplatesClient" + }, + "fullName": "google.cloud.compute_v1.InstanceTemplatesClient.get", + "method": { + "fullName": "google.cloud.compute.v1.InstanceTemplates.Get", + "service": { + "fullName": "google.cloud.compute.v1.InstanceTemplates", + "shortName": "InstanceTemplates" + }, + "shortName": "Get" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.GetInstanceTemplateRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "instance_template", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.compute_v1.types.InstanceTemplate", + "shortName": "get" + }, + "description": "Sample for Get", + "file": "compute_v1_generated_instance_templates_get_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_InstanceTemplates_Get_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_instance_templates_get_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.InstanceTemplatesClient", + "shortName": "InstanceTemplatesClient" + }, + "fullName": "google.cloud.compute_v1.InstanceTemplatesClient.insert", + "method": { + "fullName": "google.cloud.compute.v1.InstanceTemplates.Insert", + "service": { + "fullName": "google.cloud.compute.v1.InstanceTemplates", + "shortName": "InstanceTemplates" + }, + "shortName": "Insert" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.InsertInstanceTemplateRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "instance_template_resource", + "type": "google.cloud.compute_v1.types.InstanceTemplate" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.extended_operation.ExtendedOperation", + "shortName": "insert" + }, + "description": "Sample for Insert", + "file": "compute_v1_generated_instance_templates_insert_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_InstanceTemplates_Insert_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_instance_templates_insert_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.InstanceTemplatesClient", + "shortName": "InstanceTemplatesClient" + }, + "fullName": "google.cloud.compute_v1.InstanceTemplatesClient.list", + "method": { + "fullName": "google.cloud.compute.v1.InstanceTemplates.List", + "service": { + "fullName": "google.cloud.compute.v1.InstanceTemplates", + "shortName": "InstanceTemplates" + }, + "shortName": "List" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.ListInstanceTemplatesRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.compute_v1.services.instance_templates.pagers.ListPager", + "shortName": "list" + }, + "description": "Sample for List", + "file": "compute_v1_generated_instance_templates_list_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_InstanceTemplates_List_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_instance_templates_list_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.InstanceTemplatesClient", + "shortName": "InstanceTemplatesClient" + }, + "fullName": "google.cloud.compute_v1.InstanceTemplatesClient.set_iam_policy", + "method": { + "fullName": "google.cloud.compute.v1.InstanceTemplates.SetIamPolicy", + "service": { + "fullName": "google.cloud.compute.v1.InstanceTemplates", + "shortName": "InstanceTemplates" + }, + "shortName": "SetIamPolicy" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.SetIamPolicyInstanceTemplateRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "resource", + "type": "str" + }, + { + "name": "global_set_policy_request_resource", + "type": "google.cloud.compute_v1.types.GlobalSetPolicyRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.compute_v1.types.Policy", + "shortName": "set_iam_policy" + }, + "description": "Sample for SetIamPolicy", + "file": "compute_v1_generated_instance_templates_set_iam_policy_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_InstanceTemplates_SetIamPolicy_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_instance_templates_set_iam_policy_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.InstanceTemplatesClient", + "shortName": "InstanceTemplatesClient" + }, + "fullName": "google.cloud.compute_v1.InstanceTemplatesClient.test_iam_permissions", + "method": { + "fullName": "google.cloud.compute.v1.InstanceTemplates.TestIamPermissions", + "service": { + "fullName": "google.cloud.compute.v1.InstanceTemplates", + "shortName": "InstanceTemplates" + }, + "shortName": "TestIamPermissions" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.TestIamPermissionsInstanceTemplateRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "resource", + "type": "str" + }, + { + "name": "test_permissions_request_resource", + "type": "google.cloud.compute_v1.types.TestPermissionsRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.compute_v1.types.TestPermissionsResponse", + "shortName": "test_iam_permissions" + }, + "description": "Sample for TestIamPermissions", + "file": "compute_v1_generated_instance_templates_test_iam_permissions_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_InstanceTemplates_TestIamPermissions_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_instance_templates_test_iam_permissions_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.InstancesClient", + "shortName": "InstancesClient" + }, + "fullName": "google.cloud.compute_v1.InstancesClient.add_access_config", + "method": { + "fullName": "google.cloud.compute.v1.Instances.AddAccessConfig", + "service": { + "fullName": "google.cloud.compute.v1.Instances", + "shortName": "Instances" + }, + "shortName": "AddAccessConfig" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.AddAccessConfigInstanceRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "zone", + "type": "str" + }, + { + "name": "instance", + "type": "str" + }, + { + "name": "network_interface", + "type": "str" + }, + { + "name": "access_config_resource", + "type": "google.cloud.compute_v1.types.AccessConfig" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.extended_operation.ExtendedOperation", + "shortName": "add_access_config" + }, + "description": "Sample for AddAccessConfig", + "file": "compute_v1_generated_instances_add_access_config_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_Instances_AddAccessConfig_sync", + "segments": [ + { + "end": 54, + "start": 27, + "type": "FULL" + }, + { + "end": 54, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 48, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 51, + "start": 49, + "type": "REQUEST_EXECUTION" + }, + { + "end": 55, + "start": 52, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_instances_add_access_config_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.InstancesClient", + "shortName": "InstancesClient" + }, + "fullName": "google.cloud.compute_v1.InstancesClient.add_resource_policies", + "method": { + "fullName": "google.cloud.compute.v1.Instances.AddResourcePolicies", + "service": { + "fullName": "google.cloud.compute.v1.Instances", + "shortName": "Instances" + }, + "shortName": "AddResourcePolicies" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.AddResourcePoliciesInstanceRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "zone", + "type": "str" + }, + { + "name": "instance", + "type": "str" + }, + { + "name": "instances_add_resource_policies_request_resource", + "type": "google.cloud.compute_v1.types.InstancesAddResourcePoliciesRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.extended_operation.ExtendedOperation", + "shortName": "add_resource_policies" + }, + "description": "Sample for AddResourcePolicies", + "file": "compute_v1_generated_instances_add_resource_policies_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_Instances_AddResourcePolicies_sync", + "segments": [ + { + "end": 53, + "start": 27, + "type": "FULL" + }, + { + "end": 53, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 47, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 50, + "start": 48, + "type": "REQUEST_EXECUTION" + }, + { + "end": 54, + "start": 51, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_instances_add_resource_policies_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.InstancesClient", + "shortName": "InstancesClient" + }, + "fullName": "google.cloud.compute_v1.InstancesClient.aggregated_list", + "method": { + "fullName": "google.cloud.compute.v1.Instances.AggregatedList", + "service": { + "fullName": "google.cloud.compute.v1.Instances", + "shortName": "Instances" + }, + "shortName": "AggregatedList" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.AggregatedListInstancesRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.compute_v1.services.instances.pagers.AggregatedListPager", + "shortName": "aggregated_list" + }, + "description": "Sample for AggregatedList", + "file": "compute_v1_generated_instances_aggregated_list_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_Instances_AggregatedList_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_instances_aggregated_list_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.InstancesClient", + "shortName": "InstancesClient" + }, + "fullName": "google.cloud.compute_v1.InstancesClient.attach_disk", + "method": { + "fullName": "google.cloud.compute.v1.Instances.AttachDisk", + "service": { + "fullName": "google.cloud.compute.v1.Instances", + "shortName": "Instances" + }, + "shortName": "AttachDisk" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.AttachDiskInstanceRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "zone", + "type": "str" + }, + { + "name": "instance", + "type": "str" + }, + { + "name": "attached_disk_resource", + "type": "google.cloud.compute_v1.types.AttachedDisk" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.extended_operation.ExtendedOperation", + "shortName": "attach_disk" + }, + "description": "Sample for AttachDisk", + "file": "compute_v1_generated_instances_attach_disk_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_Instances_AttachDisk_sync", + "segments": [ + { + "end": 53, + "start": 27, + "type": "FULL" + }, + { + "end": 53, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 47, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 50, + "start": 48, + "type": "REQUEST_EXECUTION" + }, + { + "end": 54, + "start": 51, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_instances_attach_disk_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.InstancesClient", + "shortName": "InstancesClient" + }, + "fullName": "google.cloud.compute_v1.InstancesClient.bulk_insert", + "method": { + "fullName": "google.cloud.compute.v1.Instances.BulkInsert", + "service": { + "fullName": "google.cloud.compute.v1.Instances", + "shortName": "Instances" + }, + "shortName": "BulkInsert" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.BulkInsertInstanceRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "zone", + "type": "str" + }, + { + "name": "bulk_insert_instance_resource_resource", + "type": "google.cloud.compute_v1.types.BulkInsertInstanceResource" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.extended_operation.ExtendedOperation", + "shortName": "bulk_insert" + }, + "description": "Sample for BulkInsert", + "file": "compute_v1_generated_instances_bulk_insert_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_Instances_BulkInsert_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_instances_bulk_insert_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.InstancesClient", + "shortName": "InstancesClient" + }, + "fullName": "google.cloud.compute_v1.InstancesClient.delete_access_config", + "method": { + "fullName": "google.cloud.compute.v1.Instances.DeleteAccessConfig", + "service": { + "fullName": "google.cloud.compute.v1.Instances", + "shortName": "Instances" + }, + "shortName": "DeleteAccessConfig" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.DeleteAccessConfigInstanceRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "zone", + "type": "str" + }, + { + "name": "instance", + "type": "str" + }, + { + "name": "access_config", + "type": "str" + }, + { + "name": "network_interface", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.extended_operation.ExtendedOperation", + "shortName": "delete_access_config" + }, + "description": "Sample for DeleteAccessConfig", + "file": "compute_v1_generated_instances_delete_access_config_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_Instances_DeleteAccessConfig_sync", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 49, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 50, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_instances_delete_access_config_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.InstancesClient", + "shortName": "InstancesClient" + }, + "fullName": "google.cloud.compute_v1.InstancesClient.delete", + "method": { + "fullName": "google.cloud.compute.v1.Instances.Delete", + "service": { + "fullName": "google.cloud.compute.v1.Instances", + "shortName": "Instances" + }, + "shortName": "Delete" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.DeleteInstanceRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "zone", + "type": "str" + }, + { + "name": "instance", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.extended_operation.ExtendedOperation", + "shortName": "delete" + }, + "description": "Sample for Delete", + "file": "compute_v1_generated_instances_delete_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_Instances_Delete_sync", + "segments": [ + { + "end": 53, + "start": 27, + "type": "FULL" + }, + { + "end": 53, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 47, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 50, + "start": 48, + "type": "REQUEST_EXECUTION" + }, + { + "end": 54, + "start": 51, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_instances_delete_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.InstancesClient", + "shortName": "InstancesClient" + }, + "fullName": "google.cloud.compute_v1.InstancesClient.detach_disk", + "method": { + "fullName": "google.cloud.compute.v1.Instances.DetachDisk", + "service": { + "fullName": "google.cloud.compute.v1.Instances", + "shortName": "Instances" + }, + "shortName": "DetachDisk" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.DetachDiskInstanceRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "zone", + "type": "str" + }, + { + "name": "instance", + "type": "str" + }, + { + "name": "device_name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.extended_operation.ExtendedOperation", + "shortName": "detach_disk" + }, + "description": "Sample for DetachDisk", + "file": "compute_v1_generated_instances_detach_disk_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_Instances_DetachDisk_sync", + "segments": [ + { + "end": 54, + "start": 27, + "type": "FULL" + }, + { + "end": 54, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 48, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 51, + "start": 49, + "type": "REQUEST_EXECUTION" + }, + { + "end": 55, + "start": 52, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_instances_detach_disk_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.InstancesClient", + "shortName": "InstancesClient" + }, + "fullName": "google.cloud.compute_v1.InstancesClient.get_effective_firewalls", + "method": { + "fullName": "google.cloud.compute.v1.Instances.GetEffectiveFirewalls", + "service": { + "fullName": "google.cloud.compute.v1.Instances", + "shortName": "Instances" + }, + "shortName": "GetEffectiveFirewalls" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.GetEffectiveFirewallsInstanceRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "zone", + "type": "str" + }, + { + "name": "instance", + "type": "str" + }, + { + "name": "network_interface", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.compute_v1.types.InstancesGetEffectiveFirewallsResponse", + "shortName": "get_effective_firewalls" + }, + "description": "Sample for GetEffectiveFirewalls", + "file": "compute_v1_generated_instances_get_effective_firewalls_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_Instances_GetEffectiveFirewalls_sync", + "segments": [ + { + "end": 54, + "start": 27, + "type": "FULL" + }, + { + "end": 54, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 48, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 51, + "start": 49, + "type": "REQUEST_EXECUTION" + }, + { + "end": 55, + "start": 52, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_instances_get_effective_firewalls_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.InstancesClient", + "shortName": "InstancesClient" + }, + "fullName": "google.cloud.compute_v1.InstancesClient.get_guest_attributes", + "method": { + "fullName": "google.cloud.compute.v1.Instances.GetGuestAttributes", + "service": { + "fullName": "google.cloud.compute.v1.Instances", + "shortName": "Instances" + }, + "shortName": "GetGuestAttributes" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.GetGuestAttributesInstanceRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "zone", + "type": "str" + }, + { + "name": "instance", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.compute_v1.types.GuestAttributes", + "shortName": "get_guest_attributes" + }, + "description": "Sample for GetGuestAttributes", + "file": "compute_v1_generated_instances_get_guest_attributes_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_Instances_GetGuestAttributes_sync", + "segments": [ + { + "end": 53, + "start": 27, + "type": "FULL" + }, + { + "end": 53, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 47, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 50, + "start": 48, + "type": "REQUEST_EXECUTION" + }, + { + "end": 54, + "start": 51, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_instances_get_guest_attributes_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.InstancesClient", + "shortName": "InstancesClient" + }, + "fullName": "google.cloud.compute_v1.InstancesClient.get_iam_policy", + "method": { + "fullName": "google.cloud.compute.v1.Instances.GetIamPolicy", + "service": { + "fullName": "google.cloud.compute.v1.Instances", + "shortName": "Instances" + }, + "shortName": "GetIamPolicy" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.GetIamPolicyInstanceRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "zone", + "type": "str" + }, + { + "name": "resource", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.compute_v1.types.Policy", + "shortName": "get_iam_policy" + }, + "description": "Sample for GetIamPolicy", + "file": "compute_v1_generated_instances_get_iam_policy_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_Instances_GetIamPolicy_sync", + "segments": [ + { + "end": 53, + "start": 27, + "type": "FULL" + }, + { + "end": 53, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 47, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 50, + "start": 48, + "type": "REQUEST_EXECUTION" + }, + { + "end": 54, + "start": 51, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_instances_get_iam_policy_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.InstancesClient", + "shortName": "InstancesClient" + }, + "fullName": "google.cloud.compute_v1.InstancesClient.get_screenshot", + "method": { + "fullName": "google.cloud.compute.v1.Instances.GetScreenshot", + "service": { + "fullName": "google.cloud.compute.v1.Instances", + "shortName": "Instances" + }, + "shortName": "GetScreenshot" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.GetScreenshotInstanceRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "zone", + "type": "str" + }, + { + "name": "instance", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.compute_v1.types.Screenshot", + "shortName": "get_screenshot" + }, + "description": "Sample for GetScreenshot", + "file": "compute_v1_generated_instances_get_screenshot_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_Instances_GetScreenshot_sync", + "segments": [ + { + "end": 53, + "start": 27, + "type": "FULL" + }, + { + "end": 53, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 47, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 50, + "start": 48, + "type": "REQUEST_EXECUTION" + }, + { + "end": 54, + "start": 51, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_instances_get_screenshot_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.InstancesClient", + "shortName": "InstancesClient" + }, + "fullName": "google.cloud.compute_v1.InstancesClient.get_serial_port_output", + "method": { + "fullName": "google.cloud.compute.v1.Instances.GetSerialPortOutput", + "service": { + "fullName": "google.cloud.compute.v1.Instances", + "shortName": "Instances" + }, + "shortName": "GetSerialPortOutput" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.GetSerialPortOutputInstanceRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "zone", + "type": "str" + }, + { + "name": "instance", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.compute_v1.types.SerialPortOutput", + "shortName": "get_serial_port_output" + }, + "description": "Sample for GetSerialPortOutput", + "file": "compute_v1_generated_instances_get_serial_port_output_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_Instances_GetSerialPortOutput_sync", + "segments": [ + { + "end": 53, + "start": 27, + "type": "FULL" + }, + { + "end": 53, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 47, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 50, + "start": 48, + "type": "REQUEST_EXECUTION" + }, + { + "end": 54, + "start": 51, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_instances_get_serial_port_output_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.InstancesClient", + "shortName": "InstancesClient" + }, + "fullName": "google.cloud.compute_v1.InstancesClient.get_shielded_instance_identity", + "method": { + "fullName": "google.cloud.compute.v1.Instances.GetShieldedInstanceIdentity", + "service": { + "fullName": "google.cloud.compute.v1.Instances", + "shortName": "Instances" + }, + "shortName": "GetShieldedInstanceIdentity" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.GetShieldedInstanceIdentityInstanceRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "zone", + "type": "str" + }, + { + "name": "instance", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.compute_v1.types.ShieldedInstanceIdentity", + "shortName": "get_shielded_instance_identity" + }, + "description": "Sample for GetShieldedInstanceIdentity", + "file": "compute_v1_generated_instances_get_shielded_instance_identity_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_Instances_GetShieldedInstanceIdentity_sync", + "segments": [ + { + "end": 53, + "start": 27, + "type": "FULL" + }, + { + "end": 53, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 47, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 50, + "start": 48, + "type": "REQUEST_EXECUTION" + }, + { + "end": 54, + "start": 51, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_instances_get_shielded_instance_identity_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.InstancesClient", + "shortName": "InstancesClient" + }, + "fullName": "google.cloud.compute_v1.InstancesClient.get", + "method": { + "fullName": "google.cloud.compute.v1.Instances.Get", + "service": { + "fullName": "google.cloud.compute.v1.Instances", + "shortName": "Instances" + }, + "shortName": "Get" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.GetInstanceRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "zone", + "type": "str" + }, + { + "name": "instance", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.compute_v1.types.Instance", + "shortName": "get" + }, + "description": "Sample for Get", + "file": "compute_v1_generated_instances_get_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_Instances_Get_sync", + "segments": [ + { + "end": 53, + "start": 27, + "type": "FULL" + }, + { + "end": 53, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 47, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 50, + "start": 48, + "type": "REQUEST_EXECUTION" + }, + { + "end": 54, + "start": 51, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_instances_get_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.InstancesClient", + "shortName": "InstancesClient" + }, + "fullName": "google.cloud.compute_v1.InstancesClient.insert", + "method": { + "fullName": "google.cloud.compute.v1.Instances.Insert", + "service": { + "fullName": "google.cloud.compute.v1.Instances", + "shortName": "Instances" + }, + "shortName": "Insert" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.InsertInstanceRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "zone", + "type": "str" + }, + { + "name": "instance_resource", + "type": "google.cloud.compute_v1.types.Instance" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.extended_operation.ExtendedOperation", + "shortName": "insert" + }, + "description": "Sample for Insert", + "file": "compute_v1_generated_instances_insert_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_Instances_Insert_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_instances_insert_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.InstancesClient", + "shortName": "InstancesClient" + }, + "fullName": "google.cloud.compute_v1.InstancesClient.list_referrers", + "method": { + "fullName": "google.cloud.compute.v1.Instances.ListReferrers", + "service": { + "fullName": "google.cloud.compute.v1.Instances", + "shortName": "Instances" + }, + "shortName": "ListReferrers" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.ListReferrersInstancesRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "zone", + "type": "str" + }, + { + "name": "instance", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.compute_v1.services.instances.pagers.ListReferrersPager", + "shortName": "list_referrers" + }, + "description": "Sample for ListReferrers", + "file": "compute_v1_generated_instances_list_referrers_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_Instances_ListReferrers_sync", + "segments": [ + { + "end": 54, + "start": 27, + "type": "FULL" + }, + { + "end": 54, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 47, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 50, + "start": 48, + "type": "REQUEST_EXECUTION" + }, + { + "end": 55, + "start": 51, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_instances_list_referrers_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.InstancesClient", + "shortName": "InstancesClient" + }, + "fullName": "google.cloud.compute_v1.InstancesClient.list", + "method": { + "fullName": "google.cloud.compute.v1.Instances.List", + "service": { + "fullName": "google.cloud.compute.v1.Instances", + "shortName": "Instances" + }, + "shortName": "List" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.ListInstancesRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "zone", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.compute_v1.services.instances.pagers.ListPager", + "shortName": "list" + }, + "description": "Sample for List", + "file": "compute_v1_generated_instances_list_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_Instances_List_sync", + "segments": [ + { + "end": 53, + "start": 27, + "type": "FULL" + }, + { + "end": 53, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 54, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_instances_list_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.InstancesClient", + "shortName": "InstancesClient" + }, + "fullName": "google.cloud.compute_v1.InstancesClient.remove_resource_policies", + "method": { + "fullName": "google.cloud.compute.v1.Instances.RemoveResourcePolicies", + "service": { + "fullName": "google.cloud.compute.v1.Instances", + "shortName": "Instances" + }, + "shortName": "RemoveResourcePolicies" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.RemoveResourcePoliciesInstanceRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "zone", + "type": "str" + }, + { + "name": "instance", + "type": "str" + }, + { + "name": "instances_remove_resource_policies_request_resource", + "type": "google.cloud.compute_v1.types.InstancesRemoveResourcePoliciesRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.extended_operation.ExtendedOperation", + "shortName": "remove_resource_policies" + }, + "description": "Sample for RemoveResourcePolicies", + "file": "compute_v1_generated_instances_remove_resource_policies_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_Instances_RemoveResourcePolicies_sync", + "segments": [ + { + "end": 53, + "start": 27, + "type": "FULL" + }, + { + "end": 53, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 47, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 50, + "start": 48, + "type": "REQUEST_EXECUTION" + }, + { + "end": 54, + "start": 51, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_instances_remove_resource_policies_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.InstancesClient", + "shortName": "InstancesClient" + }, + "fullName": "google.cloud.compute_v1.InstancesClient.reset", + "method": { + "fullName": "google.cloud.compute.v1.Instances.Reset", + "service": { + "fullName": "google.cloud.compute.v1.Instances", + "shortName": "Instances" + }, + "shortName": "Reset" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.ResetInstanceRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "zone", + "type": "str" + }, + { + "name": "instance", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.extended_operation.ExtendedOperation", + "shortName": "reset" + }, + "description": "Sample for Reset", + "file": "compute_v1_generated_instances_reset_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_Instances_Reset_sync", + "segments": [ + { + "end": 53, + "start": 27, + "type": "FULL" + }, + { + "end": 53, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 47, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 50, + "start": 48, + "type": "REQUEST_EXECUTION" + }, + { + "end": 54, + "start": 51, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_instances_reset_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.InstancesClient", + "shortName": "InstancesClient" + }, + "fullName": "google.cloud.compute_v1.InstancesClient.resume", + "method": { + "fullName": "google.cloud.compute.v1.Instances.Resume", + "service": { + "fullName": "google.cloud.compute.v1.Instances", + "shortName": "Instances" + }, + "shortName": "Resume" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.ResumeInstanceRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "zone", + "type": "str" + }, + { + "name": "instance", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.extended_operation.ExtendedOperation", + "shortName": "resume" + }, + "description": "Sample for Resume", + "file": "compute_v1_generated_instances_resume_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_Instances_Resume_sync", + "segments": [ + { + "end": 53, + "start": 27, + "type": "FULL" + }, + { + "end": 53, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 47, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 50, + "start": 48, + "type": "REQUEST_EXECUTION" + }, + { + "end": 54, + "start": 51, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_instances_resume_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.InstancesClient", + "shortName": "InstancesClient" + }, + "fullName": "google.cloud.compute_v1.InstancesClient.send_diagnostic_interrupt", + "method": { + "fullName": "google.cloud.compute.v1.Instances.SendDiagnosticInterrupt", + "service": { + "fullName": "google.cloud.compute.v1.Instances", + "shortName": "Instances" + }, + "shortName": "SendDiagnosticInterrupt" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.SendDiagnosticInterruptInstanceRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "zone", + "type": "str" + }, + { + "name": "instance", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.compute_v1.types.SendDiagnosticInterruptInstanceResponse", + "shortName": "send_diagnostic_interrupt" + }, + "description": "Sample for SendDiagnosticInterrupt", + "file": "compute_v1_generated_instances_send_diagnostic_interrupt_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_Instances_SendDiagnosticInterrupt_sync", + "segments": [ + { + "end": 53, + "start": 27, + "type": "FULL" + }, + { + "end": 53, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 47, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 50, + "start": 48, + "type": "REQUEST_EXECUTION" + }, + { + "end": 54, + "start": 51, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_instances_send_diagnostic_interrupt_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.InstancesClient", + "shortName": "InstancesClient" + }, + "fullName": "google.cloud.compute_v1.InstancesClient.set_deletion_protection", + "method": { + "fullName": "google.cloud.compute.v1.Instances.SetDeletionProtection", + "service": { + "fullName": "google.cloud.compute.v1.Instances", + "shortName": "Instances" + }, + "shortName": "SetDeletionProtection" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.SetDeletionProtectionInstanceRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "zone", + "type": "str" + }, + { + "name": "resource", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.extended_operation.ExtendedOperation", + "shortName": "set_deletion_protection" + }, + "description": "Sample for SetDeletionProtection", + "file": "compute_v1_generated_instances_set_deletion_protection_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_Instances_SetDeletionProtection_sync", + "segments": [ + { + "end": 53, + "start": 27, + "type": "FULL" + }, + { + "end": 53, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 47, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 50, + "start": 48, + "type": "REQUEST_EXECUTION" + }, + { + "end": 54, + "start": 51, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_instances_set_deletion_protection_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.InstancesClient", + "shortName": "InstancesClient" + }, + "fullName": "google.cloud.compute_v1.InstancesClient.set_disk_auto_delete", + "method": { + "fullName": "google.cloud.compute.v1.Instances.SetDiskAutoDelete", + "service": { + "fullName": "google.cloud.compute.v1.Instances", + "shortName": "Instances" + }, + "shortName": "SetDiskAutoDelete" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.SetDiskAutoDeleteInstanceRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "zone", + "type": "str" + }, + { + "name": "instance", + "type": "str" + }, + { + "name": "auto_delete", + "type": "bool" + }, + { + "name": "device_name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.extended_operation.ExtendedOperation", + "shortName": "set_disk_auto_delete" + }, + "description": "Sample for SetDiskAutoDelete", + "file": "compute_v1_generated_instances_set_disk_auto_delete_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_Instances_SetDiskAutoDelete_sync", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 49, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 50, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_instances_set_disk_auto_delete_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.InstancesClient", + "shortName": "InstancesClient" + }, + "fullName": "google.cloud.compute_v1.InstancesClient.set_iam_policy", + "method": { + "fullName": "google.cloud.compute.v1.Instances.SetIamPolicy", + "service": { + "fullName": "google.cloud.compute.v1.Instances", + "shortName": "Instances" + }, + "shortName": "SetIamPolicy" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.SetIamPolicyInstanceRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "zone", + "type": "str" + }, + { + "name": "resource", + "type": "str" + }, + { + "name": "zone_set_policy_request_resource", + "type": "google.cloud.compute_v1.types.ZoneSetPolicyRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.compute_v1.types.Policy", + "shortName": "set_iam_policy" + }, + "description": "Sample for SetIamPolicy", + "file": "compute_v1_generated_instances_set_iam_policy_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_Instances_SetIamPolicy_sync", + "segments": [ + { + "end": 53, + "start": 27, + "type": "FULL" + }, + { + "end": 53, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 47, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 50, + "start": 48, + "type": "REQUEST_EXECUTION" + }, + { + "end": 54, + "start": 51, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_instances_set_iam_policy_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.InstancesClient", + "shortName": "InstancesClient" + }, + "fullName": "google.cloud.compute_v1.InstancesClient.set_labels", + "method": { + "fullName": "google.cloud.compute.v1.Instances.SetLabels", + "service": { + "fullName": "google.cloud.compute.v1.Instances", + "shortName": "Instances" + }, + "shortName": "SetLabels" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.SetLabelsInstanceRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "zone", + "type": "str" + }, + { + "name": "instance", + "type": "str" + }, + { + "name": "instances_set_labels_request_resource", + "type": "google.cloud.compute_v1.types.InstancesSetLabelsRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.extended_operation.ExtendedOperation", + "shortName": "set_labels" + }, + "description": "Sample for SetLabels", + "file": "compute_v1_generated_instances_set_labels_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_Instances_SetLabels_sync", + "segments": [ + { + "end": 53, + "start": 27, + "type": "FULL" + }, + { + "end": 53, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 47, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 50, + "start": 48, + "type": "REQUEST_EXECUTION" + }, + { + "end": 54, + "start": 51, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_instances_set_labels_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.InstancesClient", + "shortName": "InstancesClient" + }, + "fullName": "google.cloud.compute_v1.InstancesClient.set_machine_resources", + "method": { + "fullName": "google.cloud.compute.v1.Instances.SetMachineResources", + "service": { + "fullName": "google.cloud.compute.v1.Instances", + "shortName": "Instances" + }, + "shortName": "SetMachineResources" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.SetMachineResourcesInstanceRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "zone", + "type": "str" + }, + { + "name": "instance", + "type": "str" + }, + { + "name": "instances_set_machine_resources_request_resource", + "type": "google.cloud.compute_v1.types.InstancesSetMachineResourcesRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.extended_operation.ExtendedOperation", + "shortName": "set_machine_resources" + }, + "description": "Sample for SetMachineResources", + "file": "compute_v1_generated_instances_set_machine_resources_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_Instances_SetMachineResources_sync", + "segments": [ + { + "end": 53, + "start": 27, + "type": "FULL" + }, + { + "end": 53, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 47, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 50, + "start": 48, + "type": "REQUEST_EXECUTION" + }, + { + "end": 54, + "start": 51, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_instances_set_machine_resources_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.InstancesClient", + "shortName": "InstancesClient" + }, + "fullName": "google.cloud.compute_v1.InstancesClient.set_machine_type", + "method": { + "fullName": "google.cloud.compute.v1.Instances.SetMachineType", + "service": { + "fullName": "google.cloud.compute.v1.Instances", + "shortName": "Instances" + }, + "shortName": "SetMachineType" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.SetMachineTypeInstanceRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "zone", + "type": "str" + }, + { + "name": "instance", + "type": "str" + }, + { + "name": "instances_set_machine_type_request_resource", + "type": "google.cloud.compute_v1.types.InstancesSetMachineTypeRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.extended_operation.ExtendedOperation", + "shortName": "set_machine_type" + }, + "description": "Sample for SetMachineType", + "file": "compute_v1_generated_instances_set_machine_type_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_Instances_SetMachineType_sync", + "segments": [ + { + "end": 53, + "start": 27, + "type": "FULL" + }, + { + "end": 53, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 47, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 50, + "start": 48, + "type": "REQUEST_EXECUTION" + }, + { + "end": 54, + "start": 51, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_instances_set_machine_type_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.InstancesClient", + "shortName": "InstancesClient" + }, + "fullName": "google.cloud.compute_v1.InstancesClient.set_metadata", + "method": { + "fullName": "google.cloud.compute.v1.Instances.SetMetadata", + "service": { + "fullName": "google.cloud.compute.v1.Instances", + "shortName": "Instances" + }, + "shortName": "SetMetadata" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.SetMetadataInstanceRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "zone", + "type": "str" + }, + { + "name": "instance", + "type": "str" + }, + { + "name": "metadata_resource", + "type": "google.cloud.compute_v1.types.Metadata" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.extended_operation.ExtendedOperation", + "shortName": "set_metadata" + }, + "description": "Sample for SetMetadata", + "file": "compute_v1_generated_instances_set_metadata_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_Instances_SetMetadata_sync", + "segments": [ + { + "end": 53, + "start": 27, + "type": "FULL" + }, + { + "end": 53, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 47, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 50, + "start": 48, + "type": "REQUEST_EXECUTION" + }, + { + "end": 54, + "start": 51, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_instances_set_metadata_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.InstancesClient", + "shortName": "InstancesClient" + }, + "fullName": "google.cloud.compute_v1.InstancesClient.set_min_cpu_platform", + "method": { + "fullName": "google.cloud.compute.v1.Instances.SetMinCpuPlatform", + "service": { + "fullName": "google.cloud.compute.v1.Instances", + "shortName": "Instances" + }, + "shortName": "SetMinCpuPlatform" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.SetMinCpuPlatformInstanceRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "zone", + "type": "str" + }, + { + "name": "instance", + "type": "str" + }, + { + "name": "instances_set_min_cpu_platform_request_resource", + "type": "google.cloud.compute_v1.types.InstancesSetMinCpuPlatformRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.extended_operation.ExtendedOperation", + "shortName": "set_min_cpu_platform" + }, + "description": "Sample for SetMinCpuPlatform", + "file": "compute_v1_generated_instances_set_min_cpu_platform_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_Instances_SetMinCpuPlatform_sync", + "segments": [ + { + "end": 53, + "start": 27, + "type": "FULL" + }, + { + "end": 53, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 47, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 50, + "start": 48, + "type": "REQUEST_EXECUTION" + }, + { + "end": 54, + "start": 51, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_instances_set_min_cpu_platform_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.InstancesClient", + "shortName": "InstancesClient" + }, + "fullName": "google.cloud.compute_v1.InstancesClient.set_name", + "method": { + "fullName": "google.cloud.compute.v1.Instances.SetName", + "service": { + "fullName": "google.cloud.compute.v1.Instances", + "shortName": "Instances" + }, + "shortName": "SetName" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.SetNameInstanceRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "zone", + "type": "str" + }, + { + "name": "instance", + "type": "str" + }, + { + "name": "instances_set_name_request_resource", + "type": "google.cloud.compute_v1.types.InstancesSetNameRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.extended_operation.ExtendedOperation", + "shortName": "set_name" + }, + "description": "Sample for SetName", + "file": "compute_v1_generated_instances_set_name_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_Instances_SetName_sync", + "segments": [ + { + "end": 53, + "start": 27, + "type": "FULL" + }, + { + "end": 53, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 47, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 50, + "start": 48, + "type": "REQUEST_EXECUTION" + }, + { + "end": 54, + "start": 51, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_instances_set_name_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.InstancesClient", + "shortName": "InstancesClient" + }, + "fullName": "google.cloud.compute_v1.InstancesClient.set_scheduling", + "method": { + "fullName": "google.cloud.compute.v1.Instances.SetScheduling", + "service": { + "fullName": "google.cloud.compute.v1.Instances", + "shortName": "Instances" + }, + "shortName": "SetScheduling" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.SetSchedulingInstanceRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "zone", + "type": "str" + }, + { + "name": "instance", + "type": "str" + }, + { + "name": "scheduling_resource", + "type": "google.cloud.compute_v1.types.Scheduling" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.extended_operation.ExtendedOperation", + "shortName": "set_scheduling" + }, + "description": "Sample for SetScheduling", + "file": "compute_v1_generated_instances_set_scheduling_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_Instances_SetScheduling_sync", + "segments": [ + { + "end": 53, + "start": 27, + "type": "FULL" + }, + { + "end": 53, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 47, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 50, + "start": 48, + "type": "REQUEST_EXECUTION" + }, + { + "end": 54, + "start": 51, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_instances_set_scheduling_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.InstancesClient", + "shortName": "InstancesClient" + }, + "fullName": "google.cloud.compute_v1.InstancesClient.set_service_account", + "method": { + "fullName": "google.cloud.compute.v1.Instances.SetServiceAccount", + "service": { + "fullName": "google.cloud.compute.v1.Instances", + "shortName": "Instances" + }, + "shortName": "SetServiceAccount" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.SetServiceAccountInstanceRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "zone", + "type": "str" + }, + { + "name": "instance", + "type": "str" + }, + { + "name": "instances_set_service_account_request_resource", + "type": "google.cloud.compute_v1.types.InstancesSetServiceAccountRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.extended_operation.ExtendedOperation", + "shortName": "set_service_account" + }, + "description": "Sample for SetServiceAccount", + "file": "compute_v1_generated_instances_set_service_account_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_Instances_SetServiceAccount_sync", + "segments": [ + { + "end": 53, + "start": 27, + "type": "FULL" + }, + { + "end": 53, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 47, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 50, + "start": 48, + "type": "REQUEST_EXECUTION" + }, + { + "end": 54, + "start": 51, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_instances_set_service_account_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.InstancesClient", + "shortName": "InstancesClient" + }, + "fullName": "google.cloud.compute_v1.InstancesClient.set_shielded_instance_integrity_policy", + "method": { + "fullName": "google.cloud.compute.v1.Instances.SetShieldedInstanceIntegrityPolicy", + "service": { + "fullName": "google.cloud.compute.v1.Instances", + "shortName": "Instances" + }, + "shortName": "SetShieldedInstanceIntegrityPolicy" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.SetShieldedInstanceIntegrityPolicyInstanceRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "zone", + "type": "str" + }, + { + "name": "instance", + "type": "str" + }, + { + "name": "shielded_instance_integrity_policy_resource", + "type": "google.cloud.compute_v1.types.ShieldedInstanceIntegrityPolicy" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.extended_operation.ExtendedOperation", + "shortName": "set_shielded_instance_integrity_policy" + }, + "description": "Sample for SetShieldedInstanceIntegrityPolicy", + "file": "compute_v1_generated_instances_set_shielded_instance_integrity_policy_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_Instances_SetShieldedInstanceIntegrityPolicy_sync", + "segments": [ + { + "end": 53, + "start": 27, + "type": "FULL" + }, + { + "end": 53, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 47, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 50, + "start": 48, + "type": "REQUEST_EXECUTION" + }, + { + "end": 54, + "start": 51, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_instances_set_shielded_instance_integrity_policy_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.InstancesClient", + "shortName": "InstancesClient" + }, + "fullName": "google.cloud.compute_v1.InstancesClient.set_tags", + "method": { + "fullName": "google.cloud.compute.v1.Instances.SetTags", + "service": { + "fullName": "google.cloud.compute.v1.Instances", + "shortName": "Instances" + }, + "shortName": "SetTags" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.SetTagsInstanceRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "zone", + "type": "str" + }, + { + "name": "instance", + "type": "str" + }, + { + "name": "tags_resource", + "type": "google.cloud.compute_v1.types.Tags" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.extended_operation.ExtendedOperation", + "shortName": "set_tags" + }, + "description": "Sample for SetTags", + "file": "compute_v1_generated_instances_set_tags_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_Instances_SetTags_sync", + "segments": [ + { + "end": 53, + "start": 27, + "type": "FULL" + }, + { + "end": 53, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 47, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 50, + "start": 48, + "type": "REQUEST_EXECUTION" + }, + { + "end": 54, + "start": 51, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_instances_set_tags_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.InstancesClient", + "shortName": "InstancesClient" + }, + "fullName": "google.cloud.compute_v1.InstancesClient.simulate_maintenance_event", + "method": { + "fullName": "google.cloud.compute.v1.Instances.SimulateMaintenanceEvent", + "service": { + "fullName": "google.cloud.compute.v1.Instances", + "shortName": "Instances" + }, + "shortName": "SimulateMaintenanceEvent" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.SimulateMaintenanceEventInstanceRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "zone", + "type": "str" + }, + { + "name": "instance", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.extended_operation.ExtendedOperation", + "shortName": "simulate_maintenance_event" + }, + "description": "Sample for SimulateMaintenanceEvent", + "file": "compute_v1_generated_instances_simulate_maintenance_event_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_Instances_SimulateMaintenanceEvent_sync", + "segments": [ + { + "end": 53, + "start": 27, + "type": "FULL" + }, + { + "end": 53, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 47, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 50, + "start": 48, + "type": "REQUEST_EXECUTION" + }, + { + "end": 54, + "start": 51, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_instances_simulate_maintenance_event_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.InstancesClient", + "shortName": "InstancesClient" + }, + "fullName": "google.cloud.compute_v1.InstancesClient.start_with_encryption_key", + "method": { + "fullName": "google.cloud.compute.v1.Instances.StartWithEncryptionKey", + "service": { + "fullName": "google.cloud.compute.v1.Instances", + "shortName": "Instances" + }, + "shortName": "StartWithEncryptionKey" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.StartWithEncryptionKeyInstanceRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "zone", + "type": "str" + }, + { + "name": "instance", + "type": "str" + }, + { + "name": "instances_start_with_encryption_key_request_resource", + "type": "google.cloud.compute_v1.types.InstancesStartWithEncryptionKeyRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.extended_operation.ExtendedOperation", + "shortName": "start_with_encryption_key" + }, + "description": "Sample for StartWithEncryptionKey", + "file": "compute_v1_generated_instances_start_with_encryption_key_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_Instances_StartWithEncryptionKey_sync", + "segments": [ + { + "end": 53, + "start": 27, + "type": "FULL" + }, + { + "end": 53, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 47, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 50, + "start": 48, + "type": "REQUEST_EXECUTION" + }, + { + "end": 54, + "start": 51, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_instances_start_with_encryption_key_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.InstancesClient", + "shortName": "InstancesClient" + }, + "fullName": "google.cloud.compute_v1.InstancesClient.start", + "method": { + "fullName": "google.cloud.compute.v1.Instances.Start", + "service": { + "fullName": "google.cloud.compute.v1.Instances", + "shortName": "Instances" + }, + "shortName": "Start" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.StartInstanceRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "zone", + "type": "str" + }, + { + "name": "instance", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.extended_operation.ExtendedOperation", + "shortName": "start" + }, + "description": "Sample for Start", + "file": "compute_v1_generated_instances_start_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_Instances_Start_sync", + "segments": [ + { + "end": 53, + "start": 27, + "type": "FULL" + }, + { + "end": 53, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 47, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 50, + "start": 48, + "type": "REQUEST_EXECUTION" + }, + { + "end": 54, + "start": 51, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_instances_start_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.InstancesClient", + "shortName": "InstancesClient" + }, + "fullName": "google.cloud.compute_v1.InstancesClient.stop", + "method": { + "fullName": "google.cloud.compute.v1.Instances.Stop", + "service": { + "fullName": "google.cloud.compute.v1.Instances", + "shortName": "Instances" + }, + "shortName": "Stop" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.StopInstanceRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "zone", + "type": "str" + }, + { + "name": "instance", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.extended_operation.ExtendedOperation", + "shortName": "stop" + }, + "description": "Sample for Stop", + "file": "compute_v1_generated_instances_stop_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_Instances_Stop_sync", + "segments": [ + { + "end": 53, + "start": 27, + "type": "FULL" + }, + { + "end": 53, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 47, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 50, + "start": 48, + "type": "REQUEST_EXECUTION" + }, + { + "end": 54, + "start": 51, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_instances_stop_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.InstancesClient", + "shortName": "InstancesClient" + }, + "fullName": "google.cloud.compute_v1.InstancesClient.suspend", + "method": { + "fullName": "google.cloud.compute.v1.Instances.Suspend", + "service": { + "fullName": "google.cloud.compute.v1.Instances", + "shortName": "Instances" + }, + "shortName": "Suspend" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.SuspendInstanceRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "zone", + "type": "str" + }, + { + "name": "instance", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.extended_operation.ExtendedOperation", + "shortName": "suspend" + }, + "description": "Sample for Suspend", + "file": "compute_v1_generated_instances_suspend_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_Instances_Suspend_sync", + "segments": [ + { + "end": 53, + "start": 27, + "type": "FULL" + }, + { + "end": 53, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 47, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 50, + "start": 48, + "type": "REQUEST_EXECUTION" + }, + { + "end": 54, + "start": 51, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_instances_suspend_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.InstancesClient", + "shortName": "InstancesClient" + }, + "fullName": "google.cloud.compute_v1.InstancesClient.test_iam_permissions", + "method": { + "fullName": "google.cloud.compute.v1.Instances.TestIamPermissions", + "service": { + "fullName": "google.cloud.compute.v1.Instances", + "shortName": "Instances" + }, + "shortName": "TestIamPermissions" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.TestIamPermissionsInstanceRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "zone", + "type": "str" + }, + { + "name": "resource", + "type": "str" + }, + { + "name": "test_permissions_request_resource", + "type": "google.cloud.compute_v1.types.TestPermissionsRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.compute_v1.types.TestPermissionsResponse", + "shortName": "test_iam_permissions" + }, + "description": "Sample for TestIamPermissions", + "file": "compute_v1_generated_instances_test_iam_permissions_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_Instances_TestIamPermissions_sync", + "segments": [ + { + "end": 53, + "start": 27, + "type": "FULL" + }, + { + "end": 53, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 47, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 50, + "start": 48, + "type": "REQUEST_EXECUTION" + }, + { + "end": 54, + "start": 51, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_instances_test_iam_permissions_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.InstancesClient", + "shortName": "InstancesClient" + }, + "fullName": "google.cloud.compute_v1.InstancesClient.update_access_config", + "method": { + "fullName": "google.cloud.compute.v1.Instances.UpdateAccessConfig", + "service": { + "fullName": "google.cloud.compute.v1.Instances", + "shortName": "Instances" + }, + "shortName": "UpdateAccessConfig" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.UpdateAccessConfigInstanceRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "zone", + "type": "str" + }, + { + "name": "instance", + "type": "str" + }, + { + "name": "network_interface", + "type": "str" + }, + { + "name": "access_config_resource", + "type": "google.cloud.compute_v1.types.AccessConfig" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.extended_operation.ExtendedOperation", + "shortName": "update_access_config" + }, + "description": "Sample for UpdateAccessConfig", + "file": "compute_v1_generated_instances_update_access_config_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_Instances_UpdateAccessConfig_sync", + "segments": [ + { + "end": 54, + "start": 27, + "type": "FULL" + }, + { + "end": 54, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 48, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 51, + "start": 49, + "type": "REQUEST_EXECUTION" + }, + { + "end": 55, + "start": 52, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_instances_update_access_config_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.InstancesClient", + "shortName": "InstancesClient" + }, + "fullName": "google.cloud.compute_v1.InstancesClient.update_display_device", + "method": { + "fullName": "google.cloud.compute.v1.Instances.UpdateDisplayDevice", + "service": { + "fullName": "google.cloud.compute.v1.Instances", + "shortName": "Instances" + }, + "shortName": "UpdateDisplayDevice" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.UpdateDisplayDeviceInstanceRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "zone", + "type": "str" + }, + { + "name": "instance", + "type": "str" + }, + { + "name": "display_device_resource", + "type": "google.cloud.compute_v1.types.DisplayDevice" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.extended_operation.ExtendedOperation", + "shortName": "update_display_device" + }, + "description": "Sample for UpdateDisplayDevice", + "file": "compute_v1_generated_instances_update_display_device_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_Instances_UpdateDisplayDevice_sync", + "segments": [ + { + "end": 53, + "start": 27, + "type": "FULL" + }, + { + "end": 53, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 47, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 50, + "start": 48, + "type": "REQUEST_EXECUTION" + }, + { + "end": 54, + "start": 51, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_instances_update_display_device_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.InstancesClient", + "shortName": "InstancesClient" + }, + "fullName": "google.cloud.compute_v1.InstancesClient.update_network_interface", + "method": { + "fullName": "google.cloud.compute.v1.Instances.UpdateNetworkInterface", + "service": { + "fullName": "google.cloud.compute.v1.Instances", + "shortName": "Instances" + }, + "shortName": "UpdateNetworkInterface" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.UpdateNetworkInterfaceInstanceRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "zone", + "type": "str" + }, + { + "name": "instance", + "type": "str" + }, + { + "name": "network_interface", + "type": "str" + }, + { + "name": "network_interface_resource", + "type": "google.cloud.compute_v1.types.NetworkInterface" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.extended_operation.ExtendedOperation", + "shortName": "update_network_interface" + }, + "description": "Sample for UpdateNetworkInterface", + "file": "compute_v1_generated_instances_update_network_interface_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_Instances_UpdateNetworkInterface_sync", + "segments": [ + { + "end": 54, + "start": 27, + "type": "FULL" + }, + { + "end": 54, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 48, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 51, + "start": 49, + "type": "REQUEST_EXECUTION" + }, + { + "end": 55, + "start": 52, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_instances_update_network_interface_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.InstancesClient", + "shortName": "InstancesClient" + }, + "fullName": "google.cloud.compute_v1.InstancesClient.update_shielded_instance_config", + "method": { + "fullName": "google.cloud.compute.v1.Instances.UpdateShieldedInstanceConfig", + "service": { + "fullName": "google.cloud.compute.v1.Instances", + "shortName": "Instances" + }, + "shortName": "UpdateShieldedInstanceConfig" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.UpdateShieldedInstanceConfigInstanceRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "zone", + "type": "str" + }, + { + "name": "instance", + "type": "str" + }, + { + "name": "shielded_instance_config_resource", + "type": "google.cloud.compute_v1.types.ShieldedInstanceConfig" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.extended_operation.ExtendedOperation", + "shortName": "update_shielded_instance_config" + }, + "description": "Sample for UpdateShieldedInstanceConfig", + "file": "compute_v1_generated_instances_update_shielded_instance_config_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_Instances_UpdateShieldedInstanceConfig_sync", + "segments": [ + { + "end": 53, + "start": 27, + "type": "FULL" + }, + { + "end": 53, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 47, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 50, + "start": 48, + "type": "REQUEST_EXECUTION" + }, + { + "end": 54, + "start": 51, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_instances_update_shielded_instance_config_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.InstancesClient", + "shortName": "InstancesClient" + }, + "fullName": "google.cloud.compute_v1.InstancesClient.update", + "method": { + "fullName": "google.cloud.compute.v1.Instances.Update", + "service": { + "fullName": "google.cloud.compute.v1.Instances", + "shortName": "Instances" + }, + "shortName": "Update" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.UpdateInstanceRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "zone", + "type": "str" + }, + { + "name": "instance", + "type": "str" + }, + { + "name": "instance_resource", + "type": "google.cloud.compute_v1.types.Instance" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.extended_operation.ExtendedOperation", + "shortName": "update" + }, + "description": "Sample for Update", + "file": "compute_v1_generated_instances_update_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_Instances_Update_sync", + "segments": [ + { + "end": 53, + "start": 27, + "type": "FULL" + }, + { + "end": 53, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 47, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 50, + "start": 48, + "type": "REQUEST_EXECUTION" + }, + { + "end": 54, + "start": 51, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_instances_update_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.InterconnectAttachmentsClient", + "shortName": "InterconnectAttachmentsClient" + }, + "fullName": "google.cloud.compute_v1.InterconnectAttachmentsClient.aggregated_list", + "method": { + "fullName": "google.cloud.compute.v1.InterconnectAttachments.AggregatedList", + "service": { + "fullName": "google.cloud.compute.v1.InterconnectAttachments", + "shortName": "InterconnectAttachments" + }, + "shortName": "AggregatedList" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.AggregatedListInterconnectAttachmentsRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.compute_v1.services.interconnect_attachments.pagers.AggregatedListPager", + "shortName": "aggregated_list" + }, + "description": "Sample for AggregatedList", + "file": "compute_v1_generated_interconnect_attachments_aggregated_list_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_InterconnectAttachments_AggregatedList_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_interconnect_attachments_aggregated_list_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.InterconnectAttachmentsClient", + "shortName": "InterconnectAttachmentsClient" + }, + "fullName": "google.cloud.compute_v1.InterconnectAttachmentsClient.delete", + "method": { + "fullName": "google.cloud.compute.v1.InterconnectAttachments.Delete", + "service": { + "fullName": "google.cloud.compute.v1.InterconnectAttachments", + "shortName": "InterconnectAttachments" + }, + "shortName": "Delete" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.DeleteInterconnectAttachmentRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "region", + "type": "str" + }, + { + "name": "interconnect_attachment", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.extended_operation.ExtendedOperation", + "shortName": "delete" + }, + "description": "Sample for Delete", + "file": "compute_v1_generated_interconnect_attachments_delete_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_InterconnectAttachments_Delete_sync", + "segments": [ + { + "end": 53, + "start": 27, + "type": "FULL" + }, + { + "end": 53, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 47, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 50, + "start": 48, + "type": "REQUEST_EXECUTION" + }, + { + "end": 54, + "start": 51, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_interconnect_attachments_delete_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.InterconnectAttachmentsClient", + "shortName": "InterconnectAttachmentsClient" + }, + "fullName": "google.cloud.compute_v1.InterconnectAttachmentsClient.get", + "method": { + "fullName": "google.cloud.compute.v1.InterconnectAttachments.Get", + "service": { + "fullName": "google.cloud.compute.v1.InterconnectAttachments", + "shortName": "InterconnectAttachments" + }, + "shortName": "Get" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.GetInterconnectAttachmentRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "region", + "type": "str" + }, + { + "name": "interconnect_attachment", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.compute_v1.types.InterconnectAttachment", + "shortName": "get" + }, + "description": "Sample for Get", + "file": "compute_v1_generated_interconnect_attachments_get_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_InterconnectAttachments_Get_sync", + "segments": [ + { + "end": 53, + "start": 27, + "type": "FULL" + }, + { + "end": 53, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 47, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 50, + "start": 48, + "type": "REQUEST_EXECUTION" + }, + { + "end": 54, + "start": 51, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_interconnect_attachments_get_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.InterconnectAttachmentsClient", + "shortName": "InterconnectAttachmentsClient" + }, + "fullName": "google.cloud.compute_v1.InterconnectAttachmentsClient.insert", + "method": { + "fullName": "google.cloud.compute.v1.InterconnectAttachments.Insert", + "service": { + "fullName": "google.cloud.compute.v1.InterconnectAttachments", + "shortName": "InterconnectAttachments" + }, + "shortName": "Insert" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.InsertInterconnectAttachmentRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "region", + "type": "str" + }, + { + "name": "interconnect_attachment_resource", + "type": "google.cloud.compute_v1.types.InterconnectAttachment" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.extended_operation.ExtendedOperation", + "shortName": "insert" + }, + "description": "Sample for Insert", + "file": "compute_v1_generated_interconnect_attachments_insert_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_InterconnectAttachments_Insert_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_interconnect_attachments_insert_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.InterconnectAttachmentsClient", + "shortName": "InterconnectAttachmentsClient" + }, + "fullName": "google.cloud.compute_v1.InterconnectAttachmentsClient.list", + "method": { + "fullName": "google.cloud.compute.v1.InterconnectAttachments.List", + "service": { + "fullName": "google.cloud.compute.v1.InterconnectAttachments", + "shortName": "InterconnectAttachments" + }, + "shortName": "List" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.ListInterconnectAttachmentsRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "region", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.compute_v1.services.interconnect_attachments.pagers.ListPager", + "shortName": "list" + }, + "description": "Sample for List", + "file": "compute_v1_generated_interconnect_attachments_list_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_InterconnectAttachments_List_sync", + "segments": [ + { + "end": 53, + "start": 27, + "type": "FULL" + }, + { + "end": 53, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 54, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_interconnect_attachments_list_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.InterconnectAttachmentsClient", + "shortName": "InterconnectAttachmentsClient" + }, + "fullName": "google.cloud.compute_v1.InterconnectAttachmentsClient.patch", + "method": { + "fullName": "google.cloud.compute.v1.InterconnectAttachments.Patch", + "service": { + "fullName": "google.cloud.compute.v1.InterconnectAttachments", + "shortName": "InterconnectAttachments" + }, + "shortName": "Patch" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.PatchInterconnectAttachmentRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "region", + "type": "str" + }, + { + "name": "interconnect_attachment", + "type": "str" + }, + { + "name": "interconnect_attachment_resource", + "type": "google.cloud.compute_v1.types.InterconnectAttachment" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.extended_operation.ExtendedOperation", + "shortName": "patch" + }, + "description": "Sample for Patch", + "file": "compute_v1_generated_interconnect_attachments_patch_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_InterconnectAttachments_Patch_sync", + "segments": [ + { + "end": 53, + "start": 27, + "type": "FULL" + }, + { + "end": 53, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 47, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 50, + "start": 48, + "type": "REQUEST_EXECUTION" + }, + { + "end": 54, + "start": 51, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_interconnect_attachments_patch_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.InterconnectAttachmentsClient", + "shortName": "InterconnectAttachmentsClient" + }, + "fullName": "google.cloud.compute_v1.InterconnectAttachmentsClient.set_labels", + "method": { + "fullName": "google.cloud.compute.v1.InterconnectAttachments.SetLabels", + "service": { + "fullName": "google.cloud.compute.v1.InterconnectAttachments", + "shortName": "InterconnectAttachments" + }, + "shortName": "SetLabels" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.SetLabelsInterconnectAttachmentRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "region", + "type": "str" + }, + { + "name": "resource", + "type": "str" + }, + { + "name": "region_set_labels_request_resource", + "type": "google.cloud.compute_v1.types.RegionSetLabelsRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.extended_operation.ExtendedOperation", + "shortName": "set_labels" + }, + "description": "Sample for SetLabels", + "file": "compute_v1_generated_interconnect_attachments_set_labels_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_InterconnectAttachments_SetLabels_sync", + "segments": [ + { + "end": 53, + "start": 27, + "type": "FULL" + }, + { + "end": 53, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 47, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 50, + "start": 48, + "type": "REQUEST_EXECUTION" + }, + { + "end": 54, + "start": 51, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_interconnect_attachments_set_labels_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.InterconnectLocationsClient", + "shortName": "InterconnectLocationsClient" + }, + "fullName": "google.cloud.compute_v1.InterconnectLocationsClient.get", + "method": { + "fullName": "google.cloud.compute.v1.InterconnectLocations.Get", + "service": { + "fullName": "google.cloud.compute.v1.InterconnectLocations", + "shortName": "InterconnectLocations" + }, + "shortName": "Get" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.GetInterconnectLocationRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "interconnect_location", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.compute_v1.types.InterconnectLocation", + "shortName": "get" + }, + "description": "Sample for Get", + "file": "compute_v1_generated_interconnect_locations_get_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_InterconnectLocations_Get_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_interconnect_locations_get_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.InterconnectLocationsClient", + "shortName": "InterconnectLocationsClient" + }, + "fullName": "google.cloud.compute_v1.InterconnectLocationsClient.list", + "method": { + "fullName": "google.cloud.compute.v1.InterconnectLocations.List", + "service": { + "fullName": "google.cloud.compute.v1.InterconnectLocations", + "shortName": "InterconnectLocations" + }, + "shortName": "List" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.ListInterconnectLocationsRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.compute_v1.services.interconnect_locations.pagers.ListPager", + "shortName": "list" + }, + "description": "Sample for List", + "file": "compute_v1_generated_interconnect_locations_list_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_InterconnectLocations_List_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_interconnect_locations_list_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.InterconnectRemoteLocationsClient", + "shortName": "InterconnectRemoteLocationsClient" + }, + "fullName": "google.cloud.compute_v1.InterconnectRemoteLocationsClient.get", + "method": { + "fullName": "google.cloud.compute.v1.InterconnectRemoteLocations.Get", + "service": { + "fullName": "google.cloud.compute.v1.InterconnectRemoteLocations", + "shortName": "InterconnectRemoteLocations" + }, + "shortName": "Get" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.GetInterconnectRemoteLocationRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "interconnect_remote_location", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.compute_v1.types.InterconnectRemoteLocation", + "shortName": "get" + }, + "description": "Sample for Get", + "file": "compute_v1_generated_interconnect_remote_locations_get_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_InterconnectRemoteLocations_Get_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_interconnect_remote_locations_get_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.InterconnectRemoteLocationsClient", + "shortName": "InterconnectRemoteLocationsClient" + }, + "fullName": "google.cloud.compute_v1.InterconnectRemoteLocationsClient.list", + "method": { + "fullName": "google.cloud.compute.v1.InterconnectRemoteLocations.List", + "service": { + "fullName": "google.cloud.compute.v1.InterconnectRemoteLocations", + "shortName": "InterconnectRemoteLocations" + }, + "shortName": "List" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.ListInterconnectRemoteLocationsRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.compute_v1.services.interconnect_remote_locations.pagers.ListPager", + "shortName": "list" + }, + "description": "Sample for List", + "file": "compute_v1_generated_interconnect_remote_locations_list_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_InterconnectRemoteLocations_List_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_interconnect_remote_locations_list_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.InterconnectsClient", + "shortName": "InterconnectsClient" + }, + "fullName": "google.cloud.compute_v1.InterconnectsClient.delete", + "method": { + "fullName": "google.cloud.compute.v1.Interconnects.Delete", + "service": { + "fullName": "google.cloud.compute.v1.Interconnects", + "shortName": "Interconnects" + }, + "shortName": "Delete" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.DeleteInterconnectRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "interconnect", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.extended_operation.ExtendedOperation", + "shortName": "delete" + }, + "description": "Sample for Delete", + "file": "compute_v1_generated_interconnects_delete_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_Interconnects_Delete_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_interconnects_delete_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.InterconnectsClient", + "shortName": "InterconnectsClient" + }, + "fullName": "google.cloud.compute_v1.InterconnectsClient.get_diagnostics", + "method": { + "fullName": "google.cloud.compute.v1.Interconnects.GetDiagnostics", + "service": { + "fullName": "google.cloud.compute.v1.Interconnects", + "shortName": "Interconnects" + }, + "shortName": "GetDiagnostics" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.GetDiagnosticsInterconnectRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "interconnect", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.compute_v1.types.InterconnectsGetDiagnosticsResponse", + "shortName": "get_diagnostics" + }, + "description": "Sample for GetDiagnostics", + "file": "compute_v1_generated_interconnects_get_diagnostics_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_Interconnects_GetDiagnostics_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_interconnects_get_diagnostics_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.InterconnectsClient", + "shortName": "InterconnectsClient" + }, + "fullName": "google.cloud.compute_v1.InterconnectsClient.get", + "method": { + "fullName": "google.cloud.compute.v1.Interconnects.Get", + "service": { + "fullName": "google.cloud.compute.v1.Interconnects", + "shortName": "Interconnects" + }, + "shortName": "Get" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.GetInterconnectRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "interconnect", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.compute_v1.types.Interconnect", + "shortName": "get" + }, + "description": "Sample for Get", + "file": "compute_v1_generated_interconnects_get_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_Interconnects_Get_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_interconnects_get_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.InterconnectsClient", + "shortName": "InterconnectsClient" + }, + "fullName": "google.cloud.compute_v1.InterconnectsClient.insert", + "method": { + "fullName": "google.cloud.compute.v1.Interconnects.Insert", + "service": { + "fullName": "google.cloud.compute.v1.Interconnects", + "shortName": "Interconnects" + }, + "shortName": "Insert" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.InsertInterconnectRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "interconnect_resource", + "type": "google.cloud.compute_v1.types.Interconnect" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.extended_operation.ExtendedOperation", + "shortName": "insert" + }, + "description": "Sample for Insert", + "file": "compute_v1_generated_interconnects_insert_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_Interconnects_Insert_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_interconnects_insert_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.InterconnectsClient", + "shortName": "InterconnectsClient" + }, + "fullName": "google.cloud.compute_v1.InterconnectsClient.list", + "method": { + "fullName": "google.cloud.compute.v1.Interconnects.List", + "service": { + "fullName": "google.cloud.compute.v1.Interconnects", + "shortName": "Interconnects" + }, + "shortName": "List" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.ListInterconnectsRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.compute_v1.services.interconnects.pagers.ListPager", + "shortName": "list" + }, + "description": "Sample for List", + "file": "compute_v1_generated_interconnects_list_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_Interconnects_List_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_interconnects_list_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.InterconnectsClient", + "shortName": "InterconnectsClient" + }, + "fullName": "google.cloud.compute_v1.InterconnectsClient.patch", + "method": { + "fullName": "google.cloud.compute.v1.Interconnects.Patch", + "service": { + "fullName": "google.cloud.compute.v1.Interconnects", + "shortName": "Interconnects" + }, + "shortName": "Patch" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.PatchInterconnectRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "interconnect", + "type": "str" + }, + { + "name": "interconnect_resource", + "type": "google.cloud.compute_v1.types.Interconnect" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.extended_operation.ExtendedOperation", + "shortName": "patch" + }, + "description": "Sample for Patch", + "file": "compute_v1_generated_interconnects_patch_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_Interconnects_Patch_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_interconnects_patch_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.InterconnectsClient", + "shortName": "InterconnectsClient" + }, + "fullName": "google.cloud.compute_v1.InterconnectsClient.set_labels", + "method": { + "fullName": "google.cloud.compute.v1.Interconnects.SetLabels", + "service": { + "fullName": "google.cloud.compute.v1.Interconnects", + "shortName": "Interconnects" + }, + "shortName": "SetLabels" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.SetLabelsInterconnectRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "resource", + "type": "str" + }, + { + "name": "global_set_labels_request_resource", + "type": "google.cloud.compute_v1.types.GlobalSetLabelsRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.extended_operation.ExtendedOperation", + "shortName": "set_labels" + }, + "description": "Sample for SetLabels", + "file": "compute_v1_generated_interconnects_set_labels_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_Interconnects_SetLabels_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_interconnects_set_labels_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.LicenseCodesClient", + "shortName": "LicenseCodesClient" + }, + "fullName": "google.cloud.compute_v1.LicenseCodesClient.get", + "method": { + "fullName": "google.cloud.compute.v1.LicenseCodes.Get", + "service": { + "fullName": "google.cloud.compute.v1.LicenseCodes", + "shortName": "LicenseCodes" + }, + "shortName": "Get" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.GetLicenseCodeRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "license_code", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.compute_v1.types.LicenseCode", + "shortName": "get" + }, + "description": "Sample for Get", + "file": "compute_v1_generated_license_codes_get_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_LicenseCodes_Get_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_license_codes_get_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.LicenseCodesClient", + "shortName": "LicenseCodesClient" + }, + "fullName": "google.cloud.compute_v1.LicenseCodesClient.test_iam_permissions", + "method": { + "fullName": "google.cloud.compute.v1.LicenseCodes.TestIamPermissions", + "service": { + "fullName": "google.cloud.compute.v1.LicenseCodes", + "shortName": "LicenseCodes" + }, + "shortName": "TestIamPermissions" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.TestIamPermissionsLicenseCodeRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "resource", + "type": "str" + }, + { + "name": "test_permissions_request_resource", + "type": "google.cloud.compute_v1.types.TestPermissionsRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.compute_v1.types.TestPermissionsResponse", + "shortName": "test_iam_permissions" + }, + "description": "Sample for TestIamPermissions", + "file": "compute_v1_generated_license_codes_test_iam_permissions_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_LicenseCodes_TestIamPermissions_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_license_codes_test_iam_permissions_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.LicensesClient", + "shortName": "LicensesClient" + }, + "fullName": "google.cloud.compute_v1.LicensesClient.delete", + "method": { + "fullName": "google.cloud.compute.v1.Licenses.Delete", + "service": { + "fullName": "google.cloud.compute.v1.Licenses", + "shortName": "Licenses" + }, + "shortName": "Delete" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.DeleteLicenseRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "license_", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.extended_operation.ExtendedOperation", + "shortName": "delete" + }, + "description": "Sample for Delete", + "file": "compute_v1_generated_licenses_delete_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_Licenses_Delete_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_licenses_delete_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.LicensesClient", + "shortName": "LicensesClient" + }, + "fullName": "google.cloud.compute_v1.LicensesClient.get_iam_policy", + "method": { + "fullName": "google.cloud.compute.v1.Licenses.GetIamPolicy", + "service": { + "fullName": "google.cloud.compute.v1.Licenses", + "shortName": "Licenses" + }, + "shortName": "GetIamPolicy" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.GetIamPolicyLicenseRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "resource", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.compute_v1.types.Policy", + "shortName": "get_iam_policy" + }, + "description": "Sample for GetIamPolicy", + "file": "compute_v1_generated_licenses_get_iam_policy_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_Licenses_GetIamPolicy_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_licenses_get_iam_policy_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.LicensesClient", + "shortName": "LicensesClient" + }, + "fullName": "google.cloud.compute_v1.LicensesClient.get", + "method": { + "fullName": "google.cloud.compute.v1.Licenses.Get", + "service": { + "fullName": "google.cloud.compute.v1.Licenses", + "shortName": "Licenses" + }, + "shortName": "Get" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.GetLicenseRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "license_", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.compute_v1.types.License", + "shortName": "get" + }, + "description": "Sample for Get", + "file": "compute_v1_generated_licenses_get_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_Licenses_Get_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_licenses_get_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.LicensesClient", + "shortName": "LicensesClient" + }, + "fullName": "google.cloud.compute_v1.LicensesClient.insert", + "method": { + "fullName": "google.cloud.compute.v1.Licenses.Insert", + "service": { + "fullName": "google.cloud.compute.v1.Licenses", + "shortName": "Licenses" + }, + "shortName": "Insert" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.InsertLicenseRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "license_resource", + "type": "google.cloud.compute_v1.types.License" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.extended_operation.ExtendedOperation", + "shortName": "insert" + }, + "description": "Sample for Insert", + "file": "compute_v1_generated_licenses_insert_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_Licenses_Insert_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_licenses_insert_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.LicensesClient", + "shortName": "LicensesClient" + }, + "fullName": "google.cloud.compute_v1.LicensesClient.list", + "method": { + "fullName": "google.cloud.compute.v1.Licenses.List", + "service": { + "fullName": "google.cloud.compute.v1.Licenses", + "shortName": "Licenses" + }, + "shortName": "List" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.ListLicensesRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.compute_v1.services.licenses.pagers.ListPager", + "shortName": "list" + }, + "description": "Sample for List", + "file": "compute_v1_generated_licenses_list_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_Licenses_List_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_licenses_list_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.LicensesClient", + "shortName": "LicensesClient" + }, + "fullName": "google.cloud.compute_v1.LicensesClient.set_iam_policy", + "method": { + "fullName": "google.cloud.compute.v1.Licenses.SetIamPolicy", + "service": { + "fullName": "google.cloud.compute.v1.Licenses", + "shortName": "Licenses" + }, + "shortName": "SetIamPolicy" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.SetIamPolicyLicenseRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "resource", + "type": "str" + }, + { + "name": "global_set_policy_request_resource", + "type": "google.cloud.compute_v1.types.GlobalSetPolicyRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.compute_v1.types.Policy", + "shortName": "set_iam_policy" + }, + "description": "Sample for SetIamPolicy", + "file": "compute_v1_generated_licenses_set_iam_policy_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_Licenses_SetIamPolicy_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_licenses_set_iam_policy_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.LicensesClient", + "shortName": "LicensesClient" + }, + "fullName": "google.cloud.compute_v1.LicensesClient.test_iam_permissions", + "method": { + "fullName": "google.cloud.compute.v1.Licenses.TestIamPermissions", + "service": { + "fullName": "google.cloud.compute.v1.Licenses", + "shortName": "Licenses" + }, + "shortName": "TestIamPermissions" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.TestIamPermissionsLicenseRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "resource", + "type": "str" + }, + { + "name": "test_permissions_request_resource", + "type": "google.cloud.compute_v1.types.TestPermissionsRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.compute_v1.types.TestPermissionsResponse", + "shortName": "test_iam_permissions" + }, + "description": "Sample for TestIamPermissions", + "file": "compute_v1_generated_licenses_test_iam_permissions_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_Licenses_TestIamPermissions_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_licenses_test_iam_permissions_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.MachineImagesClient", + "shortName": "MachineImagesClient" + }, + "fullName": "google.cloud.compute_v1.MachineImagesClient.delete", + "method": { + "fullName": "google.cloud.compute.v1.MachineImages.Delete", + "service": { + "fullName": "google.cloud.compute.v1.MachineImages", + "shortName": "MachineImages" + }, + "shortName": "Delete" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.DeleteMachineImageRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "machine_image", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.extended_operation.ExtendedOperation", + "shortName": "delete" + }, + "description": "Sample for Delete", + "file": "compute_v1_generated_machine_images_delete_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_MachineImages_Delete_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_machine_images_delete_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.MachineImagesClient", + "shortName": "MachineImagesClient" + }, + "fullName": "google.cloud.compute_v1.MachineImagesClient.get_iam_policy", + "method": { + "fullName": "google.cloud.compute.v1.MachineImages.GetIamPolicy", + "service": { + "fullName": "google.cloud.compute.v1.MachineImages", + "shortName": "MachineImages" + }, + "shortName": "GetIamPolicy" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.GetIamPolicyMachineImageRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "resource", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.compute_v1.types.Policy", + "shortName": "get_iam_policy" + }, + "description": "Sample for GetIamPolicy", + "file": "compute_v1_generated_machine_images_get_iam_policy_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_MachineImages_GetIamPolicy_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_machine_images_get_iam_policy_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.MachineImagesClient", + "shortName": "MachineImagesClient" + }, + "fullName": "google.cloud.compute_v1.MachineImagesClient.get", + "method": { + "fullName": "google.cloud.compute.v1.MachineImages.Get", + "service": { + "fullName": "google.cloud.compute.v1.MachineImages", + "shortName": "MachineImages" + }, + "shortName": "Get" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.GetMachineImageRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "machine_image", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.compute_v1.types.MachineImage", + "shortName": "get" + }, + "description": "Sample for Get", + "file": "compute_v1_generated_machine_images_get_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_MachineImages_Get_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_machine_images_get_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.MachineImagesClient", + "shortName": "MachineImagesClient" + }, + "fullName": "google.cloud.compute_v1.MachineImagesClient.insert", + "method": { + "fullName": "google.cloud.compute.v1.MachineImages.Insert", + "service": { + "fullName": "google.cloud.compute.v1.MachineImages", + "shortName": "MachineImages" + }, + "shortName": "Insert" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.InsertMachineImageRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "machine_image_resource", + "type": "google.cloud.compute_v1.types.MachineImage" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.extended_operation.ExtendedOperation", + "shortName": "insert" + }, + "description": "Sample for Insert", + "file": "compute_v1_generated_machine_images_insert_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_MachineImages_Insert_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_machine_images_insert_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.MachineImagesClient", + "shortName": "MachineImagesClient" + }, + "fullName": "google.cloud.compute_v1.MachineImagesClient.list", + "method": { + "fullName": "google.cloud.compute.v1.MachineImages.List", + "service": { + "fullName": "google.cloud.compute.v1.MachineImages", + "shortName": "MachineImages" + }, + "shortName": "List" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.ListMachineImagesRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.compute_v1.services.machine_images.pagers.ListPager", + "shortName": "list" + }, + "description": "Sample for List", + "file": "compute_v1_generated_machine_images_list_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_MachineImages_List_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_machine_images_list_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.MachineImagesClient", + "shortName": "MachineImagesClient" + }, + "fullName": "google.cloud.compute_v1.MachineImagesClient.set_iam_policy", + "method": { + "fullName": "google.cloud.compute.v1.MachineImages.SetIamPolicy", + "service": { + "fullName": "google.cloud.compute.v1.MachineImages", + "shortName": "MachineImages" + }, + "shortName": "SetIamPolicy" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.SetIamPolicyMachineImageRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "resource", + "type": "str" + }, + { + "name": "global_set_policy_request_resource", + "type": "google.cloud.compute_v1.types.GlobalSetPolicyRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.compute_v1.types.Policy", + "shortName": "set_iam_policy" + }, + "description": "Sample for SetIamPolicy", + "file": "compute_v1_generated_machine_images_set_iam_policy_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_MachineImages_SetIamPolicy_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_machine_images_set_iam_policy_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.MachineImagesClient", + "shortName": "MachineImagesClient" + }, + "fullName": "google.cloud.compute_v1.MachineImagesClient.test_iam_permissions", + "method": { + "fullName": "google.cloud.compute.v1.MachineImages.TestIamPermissions", + "service": { + "fullName": "google.cloud.compute.v1.MachineImages", + "shortName": "MachineImages" + }, + "shortName": "TestIamPermissions" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.TestIamPermissionsMachineImageRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "resource", + "type": "str" + }, + { + "name": "test_permissions_request_resource", + "type": "google.cloud.compute_v1.types.TestPermissionsRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.compute_v1.types.TestPermissionsResponse", + "shortName": "test_iam_permissions" + }, + "description": "Sample for TestIamPermissions", + "file": "compute_v1_generated_machine_images_test_iam_permissions_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_MachineImages_TestIamPermissions_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_machine_images_test_iam_permissions_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.MachineTypesClient", + "shortName": "MachineTypesClient" + }, + "fullName": "google.cloud.compute_v1.MachineTypesClient.aggregated_list", + "method": { + "fullName": "google.cloud.compute.v1.MachineTypes.AggregatedList", + "service": { + "fullName": "google.cloud.compute.v1.MachineTypes", + "shortName": "MachineTypes" + }, + "shortName": "AggregatedList" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.AggregatedListMachineTypesRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.compute_v1.services.machine_types.pagers.AggregatedListPager", + "shortName": "aggregated_list" + }, + "description": "Sample for AggregatedList", + "file": "compute_v1_generated_machine_types_aggregated_list_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_MachineTypes_AggregatedList_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_machine_types_aggregated_list_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.MachineTypesClient", + "shortName": "MachineTypesClient" + }, + "fullName": "google.cloud.compute_v1.MachineTypesClient.get", + "method": { + "fullName": "google.cloud.compute.v1.MachineTypes.Get", + "service": { + "fullName": "google.cloud.compute.v1.MachineTypes", + "shortName": "MachineTypes" + }, + "shortName": "Get" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.GetMachineTypeRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "zone", + "type": "str" + }, + { + "name": "machine_type", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.compute_v1.types.MachineType", + "shortName": "get" + }, + "description": "Sample for Get", + "file": "compute_v1_generated_machine_types_get_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_MachineTypes_Get_sync", + "segments": [ + { + "end": 53, + "start": 27, + "type": "FULL" + }, + { + "end": 53, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 47, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 50, + "start": 48, + "type": "REQUEST_EXECUTION" + }, + { + "end": 54, + "start": 51, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_machine_types_get_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.MachineTypesClient", + "shortName": "MachineTypesClient" + }, + "fullName": "google.cloud.compute_v1.MachineTypesClient.list", + "method": { + "fullName": "google.cloud.compute.v1.MachineTypes.List", + "service": { + "fullName": "google.cloud.compute.v1.MachineTypes", + "shortName": "MachineTypes" + }, + "shortName": "List" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.ListMachineTypesRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "zone", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.compute_v1.services.machine_types.pagers.ListPager", + "shortName": "list" + }, + "description": "Sample for List", + "file": "compute_v1_generated_machine_types_list_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_MachineTypes_List_sync", + "segments": [ + { + "end": 53, + "start": 27, + "type": "FULL" + }, + { + "end": 53, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 54, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_machine_types_list_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.NetworkAttachmentsClient", + "shortName": "NetworkAttachmentsClient" + }, + "fullName": "google.cloud.compute_v1.NetworkAttachmentsClient.aggregated_list", + "method": { + "fullName": "google.cloud.compute.v1.NetworkAttachments.AggregatedList", + "service": { + "fullName": "google.cloud.compute.v1.NetworkAttachments", + "shortName": "NetworkAttachments" + }, + "shortName": "AggregatedList" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.AggregatedListNetworkAttachmentsRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.compute_v1.services.network_attachments.pagers.AggregatedListPager", + "shortName": "aggregated_list" + }, + "description": "Sample for AggregatedList", + "file": "compute_v1_generated_network_attachments_aggregated_list_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_NetworkAttachments_AggregatedList_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_network_attachments_aggregated_list_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.NetworkAttachmentsClient", + "shortName": "NetworkAttachmentsClient" + }, + "fullName": "google.cloud.compute_v1.NetworkAttachmentsClient.delete", + "method": { + "fullName": "google.cloud.compute.v1.NetworkAttachments.Delete", + "service": { + "fullName": "google.cloud.compute.v1.NetworkAttachments", + "shortName": "NetworkAttachments" + }, + "shortName": "Delete" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.DeleteNetworkAttachmentRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "region", + "type": "str" + }, + { + "name": "network_attachment", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.extended_operation.ExtendedOperation", + "shortName": "delete" + }, + "description": "Sample for Delete", + "file": "compute_v1_generated_network_attachments_delete_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_NetworkAttachments_Delete_sync", + "segments": [ + { + "end": 53, + "start": 27, + "type": "FULL" + }, + { + "end": 53, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 47, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 50, + "start": 48, + "type": "REQUEST_EXECUTION" + }, + { + "end": 54, + "start": 51, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_network_attachments_delete_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.NetworkAttachmentsClient", + "shortName": "NetworkAttachmentsClient" + }, + "fullName": "google.cloud.compute_v1.NetworkAttachmentsClient.get_iam_policy", + "method": { + "fullName": "google.cloud.compute.v1.NetworkAttachments.GetIamPolicy", + "service": { + "fullName": "google.cloud.compute.v1.NetworkAttachments", + "shortName": "NetworkAttachments" + }, + "shortName": "GetIamPolicy" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.GetIamPolicyNetworkAttachmentRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "region", + "type": "str" + }, + { + "name": "resource", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.compute_v1.types.Policy", + "shortName": "get_iam_policy" + }, + "description": "Sample for GetIamPolicy", + "file": "compute_v1_generated_network_attachments_get_iam_policy_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_NetworkAttachments_GetIamPolicy_sync", + "segments": [ + { + "end": 53, + "start": 27, + "type": "FULL" + }, + { + "end": 53, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 47, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 50, + "start": 48, + "type": "REQUEST_EXECUTION" + }, + { + "end": 54, + "start": 51, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_network_attachments_get_iam_policy_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.NetworkAttachmentsClient", + "shortName": "NetworkAttachmentsClient" + }, + "fullName": "google.cloud.compute_v1.NetworkAttachmentsClient.get", + "method": { + "fullName": "google.cloud.compute.v1.NetworkAttachments.Get", + "service": { + "fullName": "google.cloud.compute.v1.NetworkAttachments", + "shortName": "NetworkAttachments" + }, + "shortName": "Get" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.GetNetworkAttachmentRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "region", + "type": "str" + }, + { + "name": "network_attachment", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.compute_v1.types.NetworkAttachment", + "shortName": "get" + }, + "description": "Sample for Get", + "file": "compute_v1_generated_network_attachments_get_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_NetworkAttachments_Get_sync", + "segments": [ + { + "end": 53, + "start": 27, + "type": "FULL" + }, + { + "end": 53, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 47, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 50, + "start": 48, + "type": "REQUEST_EXECUTION" + }, + { + "end": 54, + "start": 51, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_network_attachments_get_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.NetworkAttachmentsClient", + "shortName": "NetworkAttachmentsClient" + }, + "fullName": "google.cloud.compute_v1.NetworkAttachmentsClient.insert", + "method": { + "fullName": "google.cloud.compute.v1.NetworkAttachments.Insert", + "service": { + "fullName": "google.cloud.compute.v1.NetworkAttachments", + "shortName": "NetworkAttachments" + }, + "shortName": "Insert" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.InsertNetworkAttachmentRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "region", + "type": "str" + }, + { + "name": "network_attachment_resource", + "type": "google.cloud.compute_v1.types.NetworkAttachment" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.extended_operation.ExtendedOperation", + "shortName": "insert" + }, + "description": "Sample for Insert", + "file": "compute_v1_generated_network_attachments_insert_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_NetworkAttachments_Insert_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_network_attachments_insert_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.NetworkAttachmentsClient", + "shortName": "NetworkAttachmentsClient" + }, + "fullName": "google.cloud.compute_v1.NetworkAttachmentsClient.list", + "method": { + "fullName": "google.cloud.compute.v1.NetworkAttachments.List", + "service": { + "fullName": "google.cloud.compute.v1.NetworkAttachments", + "shortName": "NetworkAttachments" + }, + "shortName": "List" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.ListNetworkAttachmentsRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "region", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.compute_v1.services.network_attachments.pagers.ListPager", + "shortName": "list" + }, + "description": "Sample for List", + "file": "compute_v1_generated_network_attachments_list_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_NetworkAttachments_List_sync", + "segments": [ + { + "end": 53, + "start": 27, + "type": "FULL" + }, + { + "end": 53, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 54, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_network_attachments_list_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.NetworkAttachmentsClient", + "shortName": "NetworkAttachmentsClient" + }, + "fullName": "google.cloud.compute_v1.NetworkAttachmentsClient.set_iam_policy", + "method": { + "fullName": "google.cloud.compute.v1.NetworkAttachments.SetIamPolicy", + "service": { + "fullName": "google.cloud.compute.v1.NetworkAttachments", + "shortName": "NetworkAttachments" + }, + "shortName": "SetIamPolicy" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.SetIamPolicyNetworkAttachmentRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "region", + "type": "str" + }, + { + "name": "resource", + "type": "str" + }, + { + "name": "region_set_policy_request_resource", + "type": "google.cloud.compute_v1.types.RegionSetPolicyRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.compute_v1.types.Policy", + "shortName": "set_iam_policy" + }, + "description": "Sample for SetIamPolicy", + "file": "compute_v1_generated_network_attachments_set_iam_policy_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_NetworkAttachments_SetIamPolicy_sync", + "segments": [ + { + "end": 53, + "start": 27, + "type": "FULL" + }, + { + "end": 53, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 47, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 50, + "start": 48, + "type": "REQUEST_EXECUTION" + }, + { + "end": 54, + "start": 51, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_network_attachments_set_iam_policy_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.NetworkAttachmentsClient", + "shortName": "NetworkAttachmentsClient" + }, + "fullName": "google.cloud.compute_v1.NetworkAttachmentsClient.test_iam_permissions", + "method": { + "fullName": "google.cloud.compute.v1.NetworkAttachments.TestIamPermissions", + "service": { + "fullName": "google.cloud.compute.v1.NetworkAttachments", + "shortName": "NetworkAttachments" + }, + "shortName": "TestIamPermissions" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.TestIamPermissionsNetworkAttachmentRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "region", + "type": "str" + }, + { + "name": "resource", + "type": "str" + }, + { + "name": "test_permissions_request_resource", + "type": "google.cloud.compute_v1.types.TestPermissionsRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.compute_v1.types.TestPermissionsResponse", + "shortName": "test_iam_permissions" + }, + "description": "Sample for TestIamPermissions", + "file": "compute_v1_generated_network_attachments_test_iam_permissions_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_NetworkAttachments_TestIamPermissions_sync", + "segments": [ + { + "end": 53, + "start": 27, + "type": "FULL" + }, + { + "end": 53, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 47, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 50, + "start": 48, + "type": "REQUEST_EXECUTION" + }, + { + "end": 54, + "start": 51, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_network_attachments_test_iam_permissions_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.NetworkEdgeSecurityServicesClient", + "shortName": "NetworkEdgeSecurityServicesClient" + }, + "fullName": "google.cloud.compute_v1.NetworkEdgeSecurityServicesClient.aggregated_list", + "method": { + "fullName": "google.cloud.compute.v1.NetworkEdgeSecurityServices.AggregatedList", + "service": { + "fullName": "google.cloud.compute.v1.NetworkEdgeSecurityServices", + "shortName": "NetworkEdgeSecurityServices" + }, + "shortName": "AggregatedList" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.AggregatedListNetworkEdgeSecurityServicesRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.compute_v1.services.network_edge_security_services.pagers.AggregatedListPager", + "shortName": "aggregated_list" + }, + "description": "Sample for AggregatedList", + "file": "compute_v1_generated_network_edge_security_services_aggregated_list_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_NetworkEdgeSecurityServices_AggregatedList_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_network_edge_security_services_aggregated_list_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.NetworkEdgeSecurityServicesClient", + "shortName": "NetworkEdgeSecurityServicesClient" + }, + "fullName": "google.cloud.compute_v1.NetworkEdgeSecurityServicesClient.delete", + "method": { + "fullName": "google.cloud.compute.v1.NetworkEdgeSecurityServices.Delete", + "service": { + "fullName": "google.cloud.compute.v1.NetworkEdgeSecurityServices", + "shortName": "NetworkEdgeSecurityServices" + }, + "shortName": "Delete" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.DeleteNetworkEdgeSecurityServiceRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "region", + "type": "str" + }, + { + "name": "network_edge_security_service", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.extended_operation.ExtendedOperation", + "shortName": "delete" + }, + "description": "Sample for Delete", + "file": "compute_v1_generated_network_edge_security_services_delete_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_NetworkEdgeSecurityServices_Delete_sync", + "segments": [ + { + "end": 53, + "start": 27, + "type": "FULL" + }, + { + "end": 53, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 47, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 50, + "start": 48, + "type": "REQUEST_EXECUTION" + }, + { + "end": 54, + "start": 51, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_network_edge_security_services_delete_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.NetworkEdgeSecurityServicesClient", + "shortName": "NetworkEdgeSecurityServicesClient" + }, + "fullName": "google.cloud.compute_v1.NetworkEdgeSecurityServicesClient.get", + "method": { + "fullName": "google.cloud.compute.v1.NetworkEdgeSecurityServices.Get", + "service": { + "fullName": "google.cloud.compute.v1.NetworkEdgeSecurityServices", + "shortName": "NetworkEdgeSecurityServices" + }, + "shortName": "Get" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.GetNetworkEdgeSecurityServiceRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "region", + "type": "str" + }, + { + "name": "network_edge_security_service", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.compute_v1.types.NetworkEdgeSecurityService", + "shortName": "get" + }, + "description": "Sample for Get", + "file": "compute_v1_generated_network_edge_security_services_get_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_NetworkEdgeSecurityServices_Get_sync", + "segments": [ + { + "end": 53, + "start": 27, + "type": "FULL" + }, + { + "end": 53, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 47, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 50, + "start": 48, + "type": "REQUEST_EXECUTION" + }, + { + "end": 54, + "start": 51, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_network_edge_security_services_get_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.NetworkEdgeSecurityServicesClient", + "shortName": "NetworkEdgeSecurityServicesClient" + }, + "fullName": "google.cloud.compute_v1.NetworkEdgeSecurityServicesClient.insert", + "method": { + "fullName": "google.cloud.compute.v1.NetworkEdgeSecurityServices.Insert", + "service": { + "fullName": "google.cloud.compute.v1.NetworkEdgeSecurityServices", + "shortName": "NetworkEdgeSecurityServices" + }, + "shortName": "Insert" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.InsertNetworkEdgeSecurityServiceRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "region", + "type": "str" + }, + { + "name": "network_edge_security_service_resource", + "type": "google.cloud.compute_v1.types.NetworkEdgeSecurityService" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.extended_operation.ExtendedOperation", + "shortName": "insert" + }, + "description": "Sample for Insert", + "file": "compute_v1_generated_network_edge_security_services_insert_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_NetworkEdgeSecurityServices_Insert_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_network_edge_security_services_insert_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.NetworkEdgeSecurityServicesClient", + "shortName": "NetworkEdgeSecurityServicesClient" + }, + "fullName": "google.cloud.compute_v1.NetworkEdgeSecurityServicesClient.patch", + "method": { + "fullName": "google.cloud.compute.v1.NetworkEdgeSecurityServices.Patch", + "service": { + "fullName": "google.cloud.compute.v1.NetworkEdgeSecurityServices", + "shortName": "NetworkEdgeSecurityServices" + }, + "shortName": "Patch" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.PatchNetworkEdgeSecurityServiceRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "region", + "type": "str" + }, + { + "name": "network_edge_security_service", + "type": "str" + }, + { + "name": "network_edge_security_service_resource", + "type": "google.cloud.compute_v1.types.NetworkEdgeSecurityService" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.extended_operation.ExtendedOperation", + "shortName": "patch" + }, + "description": "Sample for Patch", + "file": "compute_v1_generated_network_edge_security_services_patch_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_NetworkEdgeSecurityServices_Patch_sync", + "segments": [ + { + "end": 53, + "start": 27, + "type": "FULL" + }, + { + "end": 53, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 47, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 50, + "start": 48, + "type": "REQUEST_EXECUTION" + }, + { + "end": 54, + "start": 51, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_network_edge_security_services_patch_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.NetworkEndpointGroupsClient", + "shortName": "NetworkEndpointGroupsClient" + }, + "fullName": "google.cloud.compute_v1.NetworkEndpointGroupsClient.aggregated_list", + "method": { + "fullName": "google.cloud.compute.v1.NetworkEndpointGroups.AggregatedList", + "service": { + "fullName": "google.cloud.compute.v1.NetworkEndpointGroups", + "shortName": "NetworkEndpointGroups" + }, + "shortName": "AggregatedList" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.AggregatedListNetworkEndpointGroupsRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.compute_v1.services.network_endpoint_groups.pagers.AggregatedListPager", + "shortName": "aggregated_list" + }, + "description": "Sample for AggregatedList", + "file": "compute_v1_generated_network_endpoint_groups_aggregated_list_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_NetworkEndpointGroups_AggregatedList_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_network_endpoint_groups_aggregated_list_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.NetworkEndpointGroupsClient", + "shortName": "NetworkEndpointGroupsClient" + }, + "fullName": "google.cloud.compute_v1.NetworkEndpointGroupsClient.attach_network_endpoints", + "method": { + "fullName": "google.cloud.compute.v1.NetworkEndpointGroups.AttachNetworkEndpoints", + "service": { + "fullName": "google.cloud.compute.v1.NetworkEndpointGroups", + "shortName": "NetworkEndpointGroups" + }, + "shortName": "AttachNetworkEndpoints" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.AttachNetworkEndpointsNetworkEndpointGroupRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "zone", + "type": "str" + }, + { + "name": "network_endpoint_group", + "type": "str" + }, + { + "name": "network_endpoint_groups_attach_endpoints_request_resource", + "type": "google.cloud.compute_v1.types.NetworkEndpointGroupsAttachEndpointsRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.extended_operation.ExtendedOperation", + "shortName": "attach_network_endpoints" + }, + "description": "Sample for AttachNetworkEndpoints", + "file": "compute_v1_generated_network_endpoint_groups_attach_network_endpoints_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_NetworkEndpointGroups_AttachNetworkEndpoints_sync", + "segments": [ + { + "end": 53, + "start": 27, + "type": "FULL" + }, + { + "end": 53, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 47, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 50, + "start": 48, + "type": "REQUEST_EXECUTION" + }, + { + "end": 54, + "start": 51, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_network_endpoint_groups_attach_network_endpoints_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.NetworkEndpointGroupsClient", + "shortName": "NetworkEndpointGroupsClient" + }, + "fullName": "google.cloud.compute_v1.NetworkEndpointGroupsClient.delete", + "method": { + "fullName": "google.cloud.compute.v1.NetworkEndpointGroups.Delete", + "service": { + "fullName": "google.cloud.compute.v1.NetworkEndpointGroups", + "shortName": "NetworkEndpointGroups" + }, + "shortName": "Delete" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.DeleteNetworkEndpointGroupRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "zone", + "type": "str" + }, + { + "name": "network_endpoint_group", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.extended_operation.ExtendedOperation", + "shortName": "delete" + }, + "description": "Sample for Delete", + "file": "compute_v1_generated_network_endpoint_groups_delete_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_NetworkEndpointGroups_Delete_sync", + "segments": [ + { + "end": 53, + "start": 27, + "type": "FULL" + }, + { + "end": 53, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 47, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 50, + "start": 48, + "type": "REQUEST_EXECUTION" + }, + { + "end": 54, + "start": 51, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_network_endpoint_groups_delete_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.NetworkEndpointGroupsClient", + "shortName": "NetworkEndpointGroupsClient" + }, + "fullName": "google.cloud.compute_v1.NetworkEndpointGroupsClient.detach_network_endpoints", + "method": { + "fullName": "google.cloud.compute.v1.NetworkEndpointGroups.DetachNetworkEndpoints", + "service": { + "fullName": "google.cloud.compute.v1.NetworkEndpointGroups", + "shortName": "NetworkEndpointGroups" + }, + "shortName": "DetachNetworkEndpoints" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.DetachNetworkEndpointsNetworkEndpointGroupRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "zone", + "type": "str" + }, + { + "name": "network_endpoint_group", + "type": "str" + }, + { + "name": "network_endpoint_groups_detach_endpoints_request_resource", + "type": "google.cloud.compute_v1.types.NetworkEndpointGroupsDetachEndpointsRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.extended_operation.ExtendedOperation", + "shortName": "detach_network_endpoints" + }, + "description": "Sample for DetachNetworkEndpoints", + "file": "compute_v1_generated_network_endpoint_groups_detach_network_endpoints_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_NetworkEndpointGroups_DetachNetworkEndpoints_sync", + "segments": [ + { + "end": 53, + "start": 27, + "type": "FULL" + }, + { + "end": 53, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 47, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 50, + "start": 48, + "type": "REQUEST_EXECUTION" + }, + { + "end": 54, + "start": 51, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_network_endpoint_groups_detach_network_endpoints_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.NetworkEndpointGroupsClient", + "shortName": "NetworkEndpointGroupsClient" + }, + "fullName": "google.cloud.compute_v1.NetworkEndpointGroupsClient.get", + "method": { + "fullName": "google.cloud.compute.v1.NetworkEndpointGroups.Get", + "service": { + "fullName": "google.cloud.compute.v1.NetworkEndpointGroups", + "shortName": "NetworkEndpointGroups" + }, + "shortName": "Get" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.GetNetworkEndpointGroupRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "zone", + "type": "str" + }, + { + "name": "network_endpoint_group", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.compute_v1.types.NetworkEndpointGroup", + "shortName": "get" + }, + "description": "Sample for Get", + "file": "compute_v1_generated_network_endpoint_groups_get_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_NetworkEndpointGroups_Get_sync", + "segments": [ + { + "end": 53, + "start": 27, + "type": "FULL" + }, + { + "end": 53, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 47, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 50, + "start": 48, + "type": "REQUEST_EXECUTION" + }, + { + "end": 54, + "start": 51, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_network_endpoint_groups_get_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.NetworkEndpointGroupsClient", + "shortName": "NetworkEndpointGroupsClient" + }, + "fullName": "google.cloud.compute_v1.NetworkEndpointGroupsClient.insert", + "method": { + "fullName": "google.cloud.compute.v1.NetworkEndpointGroups.Insert", + "service": { + "fullName": "google.cloud.compute.v1.NetworkEndpointGroups", + "shortName": "NetworkEndpointGroups" + }, + "shortName": "Insert" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.InsertNetworkEndpointGroupRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "zone", + "type": "str" + }, + { + "name": "network_endpoint_group_resource", + "type": "google.cloud.compute_v1.types.NetworkEndpointGroup" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.extended_operation.ExtendedOperation", + "shortName": "insert" + }, + "description": "Sample for Insert", + "file": "compute_v1_generated_network_endpoint_groups_insert_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_NetworkEndpointGroups_Insert_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_network_endpoint_groups_insert_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.NetworkEndpointGroupsClient", + "shortName": "NetworkEndpointGroupsClient" + }, + "fullName": "google.cloud.compute_v1.NetworkEndpointGroupsClient.list_network_endpoints", + "method": { + "fullName": "google.cloud.compute.v1.NetworkEndpointGroups.ListNetworkEndpoints", + "service": { + "fullName": "google.cloud.compute.v1.NetworkEndpointGroups", + "shortName": "NetworkEndpointGroups" + }, + "shortName": "ListNetworkEndpoints" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.ListNetworkEndpointsNetworkEndpointGroupsRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "zone", + "type": "str" + }, + { + "name": "network_endpoint_group", + "type": "str" + }, + { + "name": "network_endpoint_groups_list_endpoints_request_resource", + "type": "google.cloud.compute_v1.types.NetworkEndpointGroupsListEndpointsRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.compute_v1.services.network_endpoint_groups.pagers.ListNetworkEndpointsPager", + "shortName": "list_network_endpoints" + }, + "description": "Sample for ListNetworkEndpoints", + "file": "compute_v1_generated_network_endpoint_groups_list_network_endpoints_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_NetworkEndpointGroups_ListNetworkEndpoints_sync", + "segments": [ + { + "end": 54, + "start": 27, + "type": "FULL" + }, + { + "end": 54, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 47, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 50, + "start": 48, + "type": "REQUEST_EXECUTION" + }, + { + "end": 55, + "start": 51, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_network_endpoint_groups_list_network_endpoints_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.NetworkEndpointGroupsClient", + "shortName": "NetworkEndpointGroupsClient" + }, + "fullName": "google.cloud.compute_v1.NetworkEndpointGroupsClient.list", + "method": { + "fullName": "google.cloud.compute.v1.NetworkEndpointGroups.List", + "service": { + "fullName": "google.cloud.compute.v1.NetworkEndpointGroups", + "shortName": "NetworkEndpointGroups" + }, + "shortName": "List" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.ListNetworkEndpointGroupsRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "zone", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.compute_v1.services.network_endpoint_groups.pagers.ListPager", + "shortName": "list" + }, + "description": "Sample for List", + "file": "compute_v1_generated_network_endpoint_groups_list_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_NetworkEndpointGroups_List_sync", + "segments": [ + { + "end": 53, + "start": 27, + "type": "FULL" + }, + { + "end": 53, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 54, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_network_endpoint_groups_list_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.NetworkEndpointGroupsClient", + "shortName": "NetworkEndpointGroupsClient" + }, + "fullName": "google.cloud.compute_v1.NetworkEndpointGroupsClient.test_iam_permissions", + "method": { + "fullName": "google.cloud.compute.v1.NetworkEndpointGroups.TestIamPermissions", + "service": { + "fullName": "google.cloud.compute.v1.NetworkEndpointGroups", + "shortName": "NetworkEndpointGroups" + }, + "shortName": "TestIamPermissions" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.TestIamPermissionsNetworkEndpointGroupRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "zone", + "type": "str" + }, + { + "name": "resource", + "type": "str" + }, + { + "name": "test_permissions_request_resource", + "type": "google.cloud.compute_v1.types.TestPermissionsRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.compute_v1.types.TestPermissionsResponse", + "shortName": "test_iam_permissions" + }, + "description": "Sample for TestIamPermissions", + "file": "compute_v1_generated_network_endpoint_groups_test_iam_permissions_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_NetworkEndpointGroups_TestIamPermissions_sync", + "segments": [ + { + "end": 53, + "start": 27, + "type": "FULL" + }, + { + "end": 53, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 47, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 50, + "start": 48, + "type": "REQUEST_EXECUTION" + }, + { + "end": 54, + "start": 51, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_network_endpoint_groups_test_iam_permissions_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.NetworkFirewallPoliciesClient", + "shortName": "NetworkFirewallPoliciesClient" + }, + "fullName": "google.cloud.compute_v1.NetworkFirewallPoliciesClient.add_association", + "method": { + "fullName": "google.cloud.compute.v1.NetworkFirewallPolicies.AddAssociation", + "service": { + "fullName": "google.cloud.compute.v1.NetworkFirewallPolicies", + "shortName": "NetworkFirewallPolicies" + }, + "shortName": "AddAssociation" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.AddAssociationNetworkFirewallPolicyRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "firewall_policy", + "type": "str" + }, + { + "name": "firewall_policy_association_resource", + "type": "google.cloud.compute_v1.types.FirewallPolicyAssociation" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.extended_operation.ExtendedOperation", + "shortName": "add_association" + }, + "description": "Sample for AddAssociation", + "file": "compute_v1_generated_network_firewall_policies_add_association_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_NetworkFirewallPolicies_AddAssociation_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_network_firewall_policies_add_association_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.NetworkFirewallPoliciesClient", + "shortName": "NetworkFirewallPoliciesClient" + }, + "fullName": "google.cloud.compute_v1.NetworkFirewallPoliciesClient.add_rule", + "method": { + "fullName": "google.cloud.compute.v1.NetworkFirewallPolicies.AddRule", + "service": { + "fullName": "google.cloud.compute.v1.NetworkFirewallPolicies", + "shortName": "NetworkFirewallPolicies" + }, + "shortName": "AddRule" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.AddRuleNetworkFirewallPolicyRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "firewall_policy", + "type": "str" + }, + { + "name": "firewall_policy_rule_resource", + "type": "google.cloud.compute_v1.types.FirewallPolicyRule" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.extended_operation.ExtendedOperation", + "shortName": "add_rule" + }, + "description": "Sample for AddRule", + "file": "compute_v1_generated_network_firewall_policies_add_rule_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_NetworkFirewallPolicies_AddRule_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_network_firewall_policies_add_rule_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.NetworkFirewallPoliciesClient", + "shortName": "NetworkFirewallPoliciesClient" + }, + "fullName": "google.cloud.compute_v1.NetworkFirewallPoliciesClient.clone_rules", + "method": { + "fullName": "google.cloud.compute.v1.NetworkFirewallPolicies.CloneRules", + "service": { + "fullName": "google.cloud.compute.v1.NetworkFirewallPolicies", + "shortName": "NetworkFirewallPolicies" + }, + "shortName": "CloneRules" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.CloneRulesNetworkFirewallPolicyRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "firewall_policy", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.extended_operation.ExtendedOperation", + "shortName": "clone_rules" + }, + "description": "Sample for CloneRules", + "file": "compute_v1_generated_network_firewall_policies_clone_rules_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_NetworkFirewallPolicies_CloneRules_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_network_firewall_policies_clone_rules_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.NetworkFirewallPoliciesClient", + "shortName": "NetworkFirewallPoliciesClient" + }, + "fullName": "google.cloud.compute_v1.NetworkFirewallPoliciesClient.delete", + "method": { + "fullName": "google.cloud.compute.v1.NetworkFirewallPolicies.Delete", + "service": { + "fullName": "google.cloud.compute.v1.NetworkFirewallPolicies", + "shortName": "NetworkFirewallPolicies" + }, + "shortName": "Delete" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.DeleteNetworkFirewallPolicyRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "firewall_policy", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.extended_operation.ExtendedOperation", + "shortName": "delete" + }, + "description": "Sample for Delete", + "file": "compute_v1_generated_network_firewall_policies_delete_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_NetworkFirewallPolicies_Delete_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_network_firewall_policies_delete_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.NetworkFirewallPoliciesClient", + "shortName": "NetworkFirewallPoliciesClient" + }, + "fullName": "google.cloud.compute_v1.NetworkFirewallPoliciesClient.get_association", + "method": { + "fullName": "google.cloud.compute.v1.NetworkFirewallPolicies.GetAssociation", + "service": { + "fullName": "google.cloud.compute.v1.NetworkFirewallPolicies", + "shortName": "NetworkFirewallPolicies" + }, + "shortName": "GetAssociation" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.GetAssociationNetworkFirewallPolicyRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "firewall_policy", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.compute_v1.types.FirewallPolicyAssociation", + "shortName": "get_association" + }, + "description": "Sample for GetAssociation", + "file": "compute_v1_generated_network_firewall_policies_get_association_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_NetworkFirewallPolicies_GetAssociation_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_network_firewall_policies_get_association_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.NetworkFirewallPoliciesClient", + "shortName": "NetworkFirewallPoliciesClient" + }, + "fullName": "google.cloud.compute_v1.NetworkFirewallPoliciesClient.get_iam_policy", + "method": { + "fullName": "google.cloud.compute.v1.NetworkFirewallPolicies.GetIamPolicy", + "service": { + "fullName": "google.cloud.compute.v1.NetworkFirewallPolicies", + "shortName": "NetworkFirewallPolicies" + }, + "shortName": "GetIamPolicy" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.GetIamPolicyNetworkFirewallPolicyRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "resource", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.compute_v1.types.Policy", + "shortName": "get_iam_policy" + }, + "description": "Sample for GetIamPolicy", + "file": "compute_v1_generated_network_firewall_policies_get_iam_policy_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_NetworkFirewallPolicies_GetIamPolicy_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_network_firewall_policies_get_iam_policy_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.NetworkFirewallPoliciesClient", + "shortName": "NetworkFirewallPoliciesClient" + }, + "fullName": "google.cloud.compute_v1.NetworkFirewallPoliciesClient.get_rule", + "method": { + "fullName": "google.cloud.compute.v1.NetworkFirewallPolicies.GetRule", + "service": { + "fullName": "google.cloud.compute.v1.NetworkFirewallPolicies", + "shortName": "NetworkFirewallPolicies" + }, + "shortName": "GetRule" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.GetRuleNetworkFirewallPolicyRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "firewall_policy", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.compute_v1.types.FirewallPolicyRule", + "shortName": "get_rule" + }, + "description": "Sample for GetRule", + "file": "compute_v1_generated_network_firewall_policies_get_rule_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_NetworkFirewallPolicies_GetRule_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_network_firewall_policies_get_rule_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.NetworkFirewallPoliciesClient", + "shortName": "NetworkFirewallPoliciesClient" + }, + "fullName": "google.cloud.compute_v1.NetworkFirewallPoliciesClient.get", + "method": { + "fullName": "google.cloud.compute.v1.NetworkFirewallPolicies.Get", + "service": { + "fullName": "google.cloud.compute.v1.NetworkFirewallPolicies", + "shortName": "NetworkFirewallPolicies" + }, + "shortName": "Get" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.GetNetworkFirewallPolicyRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "firewall_policy", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.compute_v1.types.FirewallPolicy", + "shortName": "get" + }, + "description": "Sample for Get", + "file": "compute_v1_generated_network_firewall_policies_get_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_NetworkFirewallPolicies_Get_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_network_firewall_policies_get_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.NetworkFirewallPoliciesClient", + "shortName": "NetworkFirewallPoliciesClient" + }, + "fullName": "google.cloud.compute_v1.NetworkFirewallPoliciesClient.insert", + "method": { + "fullName": "google.cloud.compute.v1.NetworkFirewallPolicies.Insert", + "service": { + "fullName": "google.cloud.compute.v1.NetworkFirewallPolicies", + "shortName": "NetworkFirewallPolicies" + }, + "shortName": "Insert" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.InsertNetworkFirewallPolicyRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "firewall_policy_resource", + "type": "google.cloud.compute_v1.types.FirewallPolicy" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.extended_operation.ExtendedOperation", + "shortName": "insert" + }, + "description": "Sample for Insert", + "file": "compute_v1_generated_network_firewall_policies_insert_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_NetworkFirewallPolicies_Insert_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_network_firewall_policies_insert_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.NetworkFirewallPoliciesClient", + "shortName": "NetworkFirewallPoliciesClient" + }, + "fullName": "google.cloud.compute_v1.NetworkFirewallPoliciesClient.list", + "method": { + "fullName": "google.cloud.compute.v1.NetworkFirewallPolicies.List", + "service": { + "fullName": "google.cloud.compute.v1.NetworkFirewallPolicies", + "shortName": "NetworkFirewallPolicies" + }, + "shortName": "List" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.ListNetworkFirewallPoliciesRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.compute_v1.services.network_firewall_policies.pagers.ListPager", + "shortName": "list" + }, + "description": "Sample for List", + "file": "compute_v1_generated_network_firewall_policies_list_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_NetworkFirewallPolicies_List_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_network_firewall_policies_list_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.NetworkFirewallPoliciesClient", + "shortName": "NetworkFirewallPoliciesClient" + }, + "fullName": "google.cloud.compute_v1.NetworkFirewallPoliciesClient.patch_rule", + "method": { + "fullName": "google.cloud.compute.v1.NetworkFirewallPolicies.PatchRule", + "service": { + "fullName": "google.cloud.compute.v1.NetworkFirewallPolicies", + "shortName": "NetworkFirewallPolicies" + }, + "shortName": "PatchRule" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.PatchRuleNetworkFirewallPolicyRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "firewall_policy", + "type": "str" + }, + { + "name": "firewall_policy_rule_resource", + "type": "google.cloud.compute_v1.types.FirewallPolicyRule" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.extended_operation.ExtendedOperation", + "shortName": "patch_rule" + }, + "description": "Sample for PatchRule", + "file": "compute_v1_generated_network_firewall_policies_patch_rule_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_NetworkFirewallPolicies_PatchRule_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_network_firewall_policies_patch_rule_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.NetworkFirewallPoliciesClient", + "shortName": "NetworkFirewallPoliciesClient" + }, + "fullName": "google.cloud.compute_v1.NetworkFirewallPoliciesClient.patch", + "method": { + "fullName": "google.cloud.compute.v1.NetworkFirewallPolicies.Patch", + "service": { + "fullName": "google.cloud.compute.v1.NetworkFirewallPolicies", + "shortName": "NetworkFirewallPolicies" + }, + "shortName": "Patch" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.PatchNetworkFirewallPolicyRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "firewall_policy", + "type": "str" + }, + { + "name": "firewall_policy_resource", + "type": "google.cloud.compute_v1.types.FirewallPolicy" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.extended_operation.ExtendedOperation", + "shortName": "patch" + }, + "description": "Sample for Patch", + "file": "compute_v1_generated_network_firewall_policies_patch_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_NetworkFirewallPolicies_Patch_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_network_firewall_policies_patch_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.NetworkFirewallPoliciesClient", + "shortName": "NetworkFirewallPoliciesClient" + }, + "fullName": "google.cloud.compute_v1.NetworkFirewallPoliciesClient.remove_association", + "method": { + "fullName": "google.cloud.compute.v1.NetworkFirewallPolicies.RemoveAssociation", + "service": { + "fullName": "google.cloud.compute.v1.NetworkFirewallPolicies", + "shortName": "NetworkFirewallPolicies" + }, + "shortName": "RemoveAssociation" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.RemoveAssociationNetworkFirewallPolicyRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "firewall_policy", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.extended_operation.ExtendedOperation", + "shortName": "remove_association" + }, + "description": "Sample for RemoveAssociation", + "file": "compute_v1_generated_network_firewall_policies_remove_association_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_NetworkFirewallPolicies_RemoveAssociation_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_network_firewall_policies_remove_association_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.NetworkFirewallPoliciesClient", + "shortName": "NetworkFirewallPoliciesClient" + }, + "fullName": "google.cloud.compute_v1.NetworkFirewallPoliciesClient.remove_rule", + "method": { + "fullName": "google.cloud.compute.v1.NetworkFirewallPolicies.RemoveRule", + "service": { + "fullName": "google.cloud.compute.v1.NetworkFirewallPolicies", + "shortName": "NetworkFirewallPolicies" + }, + "shortName": "RemoveRule" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.RemoveRuleNetworkFirewallPolicyRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "firewall_policy", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.extended_operation.ExtendedOperation", + "shortName": "remove_rule" + }, + "description": "Sample for RemoveRule", + "file": "compute_v1_generated_network_firewall_policies_remove_rule_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_NetworkFirewallPolicies_RemoveRule_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_network_firewall_policies_remove_rule_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.NetworkFirewallPoliciesClient", + "shortName": "NetworkFirewallPoliciesClient" + }, + "fullName": "google.cloud.compute_v1.NetworkFirewallPoliciesClient.set_iam_policy", + "method": { + "fullName": "google.cloud.compute.v1.NetworkFirewallPolicies.SetIamPolicy", + "service": { + "fullName": "google.cloud.compute.v1.NetworkFirewallPolicies", + "shortName": "NetworkFirewallPolicies" + }, + "shortName": "SetIamPolicy" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.SetIamPolicyNetworkFirewallPolicyRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "resource", + "type": "str" + }, + { + "name": "global_set_policy_request_resource", + "type": "google.cloud.compute_v1.types.GlobalSetPolicyRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.compute_v1.types.Policy", + "shortName": "set_iam_policy" + }, + "description": "Sample for SetIamPolicy", + "file": "compute_v1_generated_network_firewall_policies_set_iam_policy_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_NetworkFirewallPolicies_SetIamPolicy_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_network_firewall_policies_set_iam_policy_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.NetworkFirewallPoliciesClient", + "shortName": "NetworkFirewallPoliciesClient" + }, + "fullName": "google.cloud.compute_v1.NetworkFirewallPoliciesClient.test_iam_permissions", + "method": { + "fullName": "google.cloud.compute.v1.NetworkFirewallPolicies.TestIamPermissions", + "service": { + "fullName": "google.cloud.compute.v1.NetworkFirewallPolicies", + "shortName": "NetworkFirewallPolicies" + }, + "shortName": "TestIamPermissions" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.TestIamPermissionsNetworkFirewallPolicyRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "resource", + "type": "str" + }, + { + "name": "test_permissions_request_resource", + "type": "google.cloud.compute_v1.types.TestPermissionsRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.compute_v1.types.TestPermissionsResponse", + "shortName": "test_iam_permissions" + }, + "description": "Sample for TestIamPermissions", + "file": "compute_v1_generated_network_firewall_policies_test_iam_permissions_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_NetworkFirewallPolicies_TestIamPermissions_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_network_firewall_policies_test_iam_permissions_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.NetworksClient", + "shortName": "NetworksClient" + }, + "fullName": "google.cloud.compute_v1.NetworksClient.add_peering", + "method": { + "fullName": "google.cloud.compute.v1.Networks.AddPeering", + "service": { + "fullName": "google.cloud.compute.v1.Networks", + "shortName": "Networks" + }, + "shortName": "AddPeering" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.AddPeeringNetworkRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "network", + "type": "str" + }, + { + "name": "networks_add_peering_request_resource", + "type": "google.cloud.compute_v1.types.NetworksAddPeeringRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.extended_operation.ExtendedOperation", + "shortName": "add_peering" + }, + "description": "Sample for AddPeering", + "file": "compute_v1_generated_networks_add_peering_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_Networks_AddPeering_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_networks_add_peering_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.NetworksClient", + "shortName": "NetworksClient" + }, + "fullName": "google.cloud.compute_v1.NetworksClient.delete", + "method": { + "fullName": "google.cloud.compute.v1.Networks.Delete", + "service": { + "fullName": "google.cloud.compute.v1.Networks", + "shortName": "Networks" + }, + "shortName": "Delete" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.DeleteNetworkRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "network", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.extended_operation.ExtendedOperation", + "shortName": "delete" + }, + "description": "Sample for Delete", + "file": "compute_v1_generated_networks_delete_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_Networks_Delete_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_networks_delete_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.NetworksClient", + "shortName": "NetworksClient" + }, + "fullName": "google.cloud.compute_v1.NetworksClient.get_effective_firewalls", + "method": { + "fullName": "google.cloud.compute.v1.Networks.GetEffectiveFirewalls", + "service": { + "fullName": "google.cloud.compute.v1.Networks", + "shortName": "Networks" + }, + "shortName": "GetEffectiveFirewalls" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.GetEffectiveFirewallsNetworkRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "network", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.compute_v1.types.NetworksGetEffectiveFirewallsResponse", + "shortName": "get_effective_firewalls" + }, + "description": "Sample for GetEffectiveFirewalls", + "file": "compute_v1_generated_networks_get_effective_firewalls_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_Networks_GetEffectiveFirewalls_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_networks_get_effective_firewalls_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.NetworksClient", + "shortName": "NetworksClient" + }, + "fullName": "google.cloud.compute_v1.NetworksClient.get", + "method": { + "fullName": "google.cloud.compute.v1.Networks.Get", + "service": { + "fullName": "google.cloud.compute.v1.Networks", + "shortName": "Networks" + }, + "shortName": "Get" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.GetNetworkRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "network", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.compute_v1.types.Network", + "shortName": "get" + }, + "description": "Sample for Get", + "file": "compute_v1_generated_networks_get_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_Networks_Get_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_networks_get_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.NetworksClient", + "shortName": "NetworksClient" + }, + "fullName": "google.cloud.compute_v1.NetworksClient.insert", + "method": { + "fullName": "google.cloud.compute.v1.Networks.Insert", + "service": { + "fullName": "google.cloud.compute.v1.Networks", + "shortName": "Networks" + }, + "shortName": "Insert" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.InsertNetworkRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "network_resource", + "type": "google.cloud.compute_v1.types.Network" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.extended_operation.ExtendedOperation", + "shortName": "insert" + }, + "description": "Sample for Insert", + "file": "compute_v1_generated_networks_insert_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_Networks_Insert_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_networks_insert_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.NetworksClient", + "shortName": "NetworksClient" + }, + "fullName": "google.cloud.compute_v1.NetworksClient.list_peering_routes", + "method": { + "fullName": "google.cloud.compute.v1.Networks.ListPeeringRoutes", + "service": { + "fullName": "google.cloud.compute.v1.Networks", + "shortName": "Networks" + }, + "shortName": "ListPeeringRoutes" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.ListPeeringRoutesNetworksRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "network", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.compute_v1.services.networks.pagers.ListPeeringRoutesPager", + "shortName": "list_peering_routes" + }, + "description": "Sample for ListPeeringRoutes", + "file": "compute_v1_generated_networks_list_peering_routes_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_Networks_ListPeeringRoutes_sync", + "segments": [ + { + "end": 53, + "start": 27, + "type": "FULL" + }, + { + "end": 53, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 54, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_networks_list_peering_routes_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.NetworksClient", + "shortName": "NetworksClient" + }, + "fullName": "google.cloud.compute_v1.NetworksClient.list", + "method": { + "fullName": "google.cloud.compute.v1.Networks.List", + "service": { + "fullName": "google.cloud.compute.v1.Networks", + "shortName": "Networks" + }, + "shortName": "List" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.ListNetworksRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.compute_v1.services.networks.pagers.ListPager", + "shortName": "list" + }, + "description": "Sample for List", + "file": "compute_v1_generated_networks_list_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_Networks_List_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_networks_list_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.NetworksClient", + "shortName": "NetworksClient" + }, + "fullName": "google.cloud.compute_v1.NetworksClient.patch", + "method": { + "fullName": "google.cloud.compute.v1.Networks.Patch", + "service": { + "fullName": "google.cloud.compute.v1.Networks", + "shortName": "Networks" + }, + "shortName": "Patch" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.PatchNetworkRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "network", + "type": "str" + }, + { + "name": "network_resource", + "type": "google.cloud.compute_v1.types.Network" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.extended_operation.ExtendedOperation", + "shortName": "patch" + }, + "description": "Sample for Patch", + "file": "compute_v1_generated_networks_patch_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_Networks_Patch_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_networks_patch_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.NetworksClient", + "shortName": "NetworksClient" + }, + "fullName": "google.cloud.compute_v1.NetworksClient.remove_peering", + "method": { + "fullName": "google.cloud.compute.v1.Networks.RemovePeering", + "service": { + "fullName": "google.cloud.compute.v1.Networks", + "shortName": "Networks" + }, + "shortName": "RemovePeering" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.RemovePeeringNetworkRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "network", + "type": "str" + }, + { + "name": "networks_remove_peering_request_resource", + "type": "google.cloud.compute_v1.types.NetworksRemovePeeringRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.extended_operation.ExtendedOperation", + "shortName": "remove_peering" + }, + "description": "Sample for RemovePeering", + "file": "compute_v1_generated_networks_remove_peering_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_Networks_RemovePeering_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_networks_remove_peering_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.NetworksClient", + "shortName": "NetworksClient" + }, + "fullName": "google.cloud.compute_v1.NetworksClient.switch_to_custom_mode", + "method": { + "fullName": "google.cloud.compute.v1.Networks.SwitchToCustomMode", + "service": { + "fullName": "google.cloud.compute.v1.Networks", + "shortName": "Networks" + }, + "shortName": "SwitchToCustomMode" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.SwitchToCustomModeNetworkRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "network", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.extended_operation.ExtendedOperation", + "shortName": "switch_to_custom_mode" + }, + "description": "Sample for SwitchToCustomMode", + "file": "compute_v1_generated_networks_switch_to_custom_mode_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_Networks_SwitchToCustomMode_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_networks_switch_to_custom_mode_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.NetworksClient", + "shortName": "NetworksClient" + }, + "fullName": "google.cloud.compute_v1.NetworksClient.update_peering", + "method": { + "fullName": "google.cloud.compute.v1.Networks.UpdatePeering", + "service": { + "fullName": "google.cloud.compute.v1.Networks", + "shortName": "Networks" + }, + "shortName": "UpdatePeering" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.UpdatePeeringNetworkRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "network", + "type": "str" + }, + { + "name": "networks_update_peering_request_resource", + "type": "google.cloud.compute_v1.types.NetworksUpdatePeeringRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.extended_operation.ExtendedOperation", + "shortName": "update_peering" + }, + "description": "Sample for UpdatePeering", + "file": "compute_v1_generated_networks_update_peering_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_Networks_UpdatePeering_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_networks_update_peering_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.NodeGroupsClient", + "shortName": "NodeGroupsClient" + }, + "fullName": "google.cloud.compute_v1.NodeGroupsClient.add_nodes", + "method": { + "fullName": "google.cloud.compute.v1.NodeGroups.AddNodes", + "service": { + "fullName": "google.cloud.compute.v1.NodeGroups", + "shortName": "NodeGroups" + }, + "shortName": "AddNodes" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.AddNodesNodeGroupRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "zone", + "type": "str" + }, + { + "name": "node_group", + "type": "str" + }, + { + "name": "node_groups_add_nodes_request_resource", + "type": "google.cloud.compute_v1.types.NodeGroupsAddNodesRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.extended_operation.ExtendedOperation", + "shortName": "add_nodes" + }, + "description": "Sample for AddNodes", + "file": "compute_v1_generated_node_groups_add_nodes_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_NodeGroups_AddNodes_sync", + "segments": [ + { + "end": 53, + "start": 27, + "type": "FULL" + }, + { + "end": 53, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 47, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 50, + "start": 48, + "type": "REQUEST_EXECUTION" + }, + { + "end": 54, + "start": 51, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_node_groups_add_nodes_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.NodeGroupsClient", + "shortName": "NodeGroupsClient" + }, + "fullName": "google.cloud.compute_v1.NodeGroupsClient.aggregated_list", + "method": { + "fullName": "google.cloud.compute.v1.NodeGroups.AggregatedList", + "service": { + "fullName": "google.cloud.compute.v1.NodeGroups", + "shortName": "NodeGroups" + }, + "shortName": "AggregatedList" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.AggregatedListNodeGroupsRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.compute_v1.services.node_groups.pagers.AggregatedListPager", + "shortName": "aggregated_list" + }, + "description": "Sample for AggregatedList", + "file": "compute_v1_generated_node_groups_aggregated_list_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_NodeGroups_AggregatedList_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_node_groups_aggregated_list_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.NodeGroupsClient", + "shortName": "NodeGroupsClient" + }, + "fullName": "google.cloud.compute_v1.NodeGroupsClient.delete_nodes", + "method": { + "fullName": "google.cloud.compute.v1.NodeGroups.DeleteNodes", + "service": { + "fullName": "google.cloud.compute.v1.NodeGroups", + "shortName": "NodeGroups" + }, + "shortName": "DeleteNodes" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.DeleteNodesNodeGroupRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "zone", + "type": "str" + }, + { + "name": "node_group", + "type": "str" + }, + { + "name": "node_groups_delete_nodes_request_resource", + "type": "google.cloud.compute_v1.types.NodeGroupsDeleteNodesRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.extended_operation.ExtendedOperation", + "shortName": "delete_nodes" + }, + "description": "Sample for DeleteNodes", + "file": "compute_v1_generated_node_groups_delete_nodes_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_NodeGroups_DeleteNodes_sync", + "segments": [ + { + "end": 53, + "start": 27, + "type": "FULL" + }, + { + "end": 53, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 47, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 50, + "start": 48, + "type": "REQUEST_EXECUTION" + }, + { + "end": 54, + "start": 51, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_node_groups_delete_nodes_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.NodeGroupsClient", + "shortName": "NodeGroupsClient" + }, + "fullName": "google.cloud.compute_v1.NodeGroupsClient.delete", + "method": { + "fullName": "google.cloud.compute.v1.NodeGroups.Delete", + "service": { + "fullName": "google.cloud.compute.v1.NodeGroups", + "shortName": "NodeGroups" + }, + "shortName": "Delete" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.DeleteNodeGroupRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "zone", + "type": "str" + }, + { + "name": "node_group", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.extended_operation.ExtendedOperation", + "shortName": "delete" + }, + "description": "Sample for Delete", + "file": "compute_v1_generated_node_groups_delete_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_NodeGroups_Delete_sync", + "segments": [ + { + "end": 53, + "start": 27, + "type": "FULL" + }, + { + "end": 53, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 47, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 50, + "start": 48, + "type": "REQUEST_EXECUTION" + }, + { + "end": 54, + "start": 51, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_node_groups_delete_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.NodeGroupsClient", + "shortName": "NodeGroupsClient" + }, + "fullName": "google.cloud.compute_v1.NodeGroupsClient.get_iam_policy", + "method": { + "fullName": "google.cloud.compute.v1.NodeGroups.GetIamPolicy", + "service": { + "fullName": "google.cloud.compute.v1.NodeGroups", + "shortName": "NodeGroups" + }, + "shortName": "GetIamPolicy" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.GetIamPolicyNodeGroupRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "zone", + "type": "str" + }, + { + "name": "resource", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.compute_v1.types.Policy", + "shortName": "get_iam_policy" + }, + "description": "Sample for GetIamPolicy", + "file": "compute_v1_generated_node_groups_get_iam_policy_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_NodeGroups_GetIamPolicy_sync", + "segments": [ + { + "end": 53, + "start": 27, + "type": "FULL" + }, + { + "end": 53, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 47, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 50, + "start": 48, + "type": "REQUEST_EXECUTION" + }, + { + "end": 54, + "start": 51, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_node_groups_get_iam_policy_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.NodeGroupsClient", + "shortName": "NodeGroupsClient" + }, + "fullName": "google.cloud.compute_v1.NodeGroupsClient.get", + "method": { + "fullName": "google.cloud.compute.v1.NodeGroups.Get", + "service": { + "fullName": "google.cloud.compute.v1.NodeGroups", + "shortName": "NodeGroups" + }, + "shortName": "Get" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.GetNodeGroupRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "zone", + "type": "str" + }, + { + "name": "node_group", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.compute_v1.types.NodeGroup", + "shortName": "get" + }, + "description": "Sample for Get", + "file": "compute_v1_generated_node_groups_get_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_NodeGroups_Get_sync", + "segments": [ + { + "end": 53, + "start": 27, + "type": "FULL" + }, + { + "end": 53, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 47, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 50, + "start": 48, + "type": "REQUEST_EXECUTION" + }, + { + "end": 54, + "start": 51, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_node_groups_get_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.NodeGroupsClient", + "shortName": "NodeGroupsClient" + }, + "fullName": "google.cloud.compute_v1.NodeGroupsClient.insert", + "method": { + "fullName": "google.cloud.compute.v1.NodeGroups.Insert", + "service": { + "fullName": "google.cloud.compute.v1.NodeGroups", + "shortName": "NodeGroups" + }, + "shortName": "Insert" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.InsertNodeGroupRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "zone", + "type": "str" + }, + { + "name": "initial_node_count", + "type": "int" + }, + { + "name": "node_group_resource", + "type": "google.cloud.compute_v1.types.NodeGroup" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.extended_operation.ExtendedOperation", + "shortName": "insert" + }, + "description": "Sample for Insert", + "file": "compute_v1_generated_node_groups_insert_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_NodeGroups_Insert_sync", + "segments": [ + { + "end": 53, + "start": 27, + "type": "FULL" + }, + { + "end": 53, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 47, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 50, + "start": 48, + "type": "REQUEST_EXECUTION" + }, + { + "end": 54, + "start": 51, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_node_groups_insert_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.NodeGroupsClient", + "shortName": "NodeGroupsClient" + }, + "fullName": "google.cloud.compute_v1.NodeGroupsClient.list_nodes", + "method": { + "fullName": "google.cloud.compute.v1.NodeGroups.ListNodes", + "service": { + "fullName": "google.cloud.compute.v1.NodeGroups", + "shortName": "NodeGroups" + }, + "shortName": "ListNodes" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.ListNodesNodeGroupsRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "zone", + "type": "str" + }, + { + "name": "node_group", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.compute_v1.services.node_groups.pagers.ListNodesPager", + "shortName": "list_nodes" + }, + "description": "Sample for ListNodes", + "file": "compute_v1_generated_node_groups_list_nodes_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_NodeGroups_ListNodes_sync", + "segments": [ + { + "end": 54, + "start": 27, + "type": "FULL" + }, + { + "end": 54, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 47, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 50, + "start": 48, + "type": "REQUEST_EXECUTION" + }, + { + "end": 55, + "start": 51, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_node_groups_list_nodes_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.NodeGroupsClient", + "shortName": "NodeGroupsClient" + }, + "fullName": "google.cloud.compute_v1.NodeGroupsClient.list", + "method": { + "fullName": "google.cloud.compute.v1.NodeGroups.List", + "service": { + "fullName": "google.cloud.compute.v1.NodeGroups", + "shortName": "NodeGroups" + }, + "shortName": "List" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.ListNodeGroupsRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "zone", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.compute_v1.services.node_groups.pagers.ListPager", + "shortName": "list" + }, + "description": "Sample for List", + "file": "compute_v1_generated_node_groups_list_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_NodeGroups_List_sync", + "segments": [ + { + "end": 53, + "start": 27, + "type": "FULL" + }, + { + "end": 53, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 54, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_node_groups_list_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.NodeGroupsClient", + "shortName": "NodeGroupsClient" + }, + "fullName": "google.cloud.compute_v1.NodeGroupsClient.patch", + "method": { + "fullName": "google.cloud.compute.v1.NodeGroups.Patch", + "service": { + "fullName": "google.cloud.compute.v1.NodeGroups", + "shortName": "NodeGroups" + }, + "shortName": "Patch" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.PatchNodeGroupRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "zone", + "type": "str" + }, + { + "name": "node_group", + "type": "str" + }, + { + "name": "node_group_resource", + "type": "google.cloud.compute_v1.types.NodeGroup" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.extended_operation.ExtendedOperation", + "shortName": "patch" + }, + "description": "Sample for Patch", + "file": "compute_v1_generated_node_groups_patch_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_NodeGroups_Patch_sync", + "segments": [ + { + "end": 53, + "start": 27, + "type": "FULL" + }, + { + "end": 53, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 47, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 50, + "start": 48, + "type": "REQUEST_EXECUTION" + }, + { + "end": 54, + "start": 51, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_node_groups_patch_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.NodeGroupsClient", + "shortName": "NodeGroupsClient" + }, + "fullName": "google.cloud.compute_v1.NodeGroupsClient.set_iam_policy", + "method": { + "fullName": "google.cloud.compute.v1.NodeGroups.SetIamPolicy", + "service": { + "fullName": "google.cloud.compute.v1.NodeGroups", + "shortName": "NodeGroups" + }, + "shortName": "SetIamPolicy" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.SetIamPolicyNodeGroupRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "zone", + "type": "str" + }, + { + "name": "resource", + "type": "str" + }, + { + "name": "zone_set_policy_request_resource", + "type": "google.cloud.compute_v1.types.ZoneSetPolicyRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.compute_v1.types.Policy", + "shortName": "set_iam_policy" + }, + "description": "Sample for SetIamPolicy", + "file": "compute_v1_generated_node_groups_set_iam_policy_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_NodeGroups_SetIamPolicy_sync", + "segments": [ + { + "end": 53, + "start": 27, + "type": "FULL" + }, + { + "end": 53, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 47, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 50, + "start": 48, + "type": "REQUEST_EXECUTION" + }, + { + "end": 54, + "start": 51, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_node_groups_set_iam_policy_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.NodeGroupsClient", + "shortName": "NodeGroupsClient" + }, + "fullName": "google.cloud.compute_v1.NodeGroupsClient.set_node_template", + "method": { + "fullName": "google.cloud.compute.v1.NodeGroups.SetNodeTemplate", + "service": { + "fullName": "google.cloud.compute.v1.NodeGroups", + "shortName": "NodeGroups" + }, + "shortName": "SetNodeTemplate" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.SetNodeTemplateNodeGroupRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "zone", + "type": "str" + }, + { + "name": "node_group", + "type": "str" + }, + { + "name": "node_groups_set_node_template_request_resource", + "type": "google.cloud.compute_v1.types.NodeGroupsSetNodeTemplateRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.extended_operation.ExtendedOperation", + "shortName": "set_node_template" + }, + "description": "Sample for SetNodeTemplate", + "file": "compute_v1_generated_node_groups_set_node_template_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_NodeGroups_SetNodeTemplate_sync", + "segments": [ + { + "end": 53, + "start": 27, + "type": "FULL" + }, + { + "end": 53, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 47, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 50, + "start": 48, + "type": "REQUEST_EXECUTION" + }, + { + "end": 54, + "start": 51, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_node_groups_set_node_template_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.NodeGroupsClient", + "shortName": "NodeGroupsClient" + }, + "fullName": "google.cloud.compute_v1.NodeGroupsClient.simulate_maintenance_event", + "method": { + "fullName": "google.cloud.compute.v1.NodeGroups.SimulateMaintenanceEvent", + "service": { + "fullName": "google.cloud.compute.v1.NodeGroups", + "shortName": "NodeGroups" + }, + "shortName": "SimulateMaintenanceEvent" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.SimulateMaintenanceEventNodeGroupRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "zone", + "type": "str" + }, + { + "name": "node_group", + "type": "str" + }, + { + "name": "node_groups_simulate_maintenance_event_request_resource", + "type": "google.cloud.compute_v1.types.NodeGroupsSimulateMaintenanceEventRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.extended_operation.ExtendedOperation", + "shortName": "simulate_maintenance_event" + }, + "description": "Sample for SimulateMaintenanceEvent", + "file": "compute_v1_generated_node_groups_simulate_maintenance_event_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_NodeGroups_SimulateMaintenanceEvent_sync", + "segments": [ + { + "end": 53, + "start": 27, + "type": "FULL" + }, + { + "end": 53, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 47, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 50, + "start": 48, + "type": "REQUEST_EXECUTION" + }, + { + "end": 54, + "start": 51, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_node_groups_simulate_maintenance_event_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.NodeGroupsClient", + "shortName": "NodeGroupsClient" + }, + "fullName": "google.cloud.compute_v1.NodeGroupsClient.test_iam_permissions", + "method": { + "fullName": "google.cloud.compute.v1.NodeGroups.TestIamPermissions", + "service": { + "fullName": "google.cloud.compute.v1.NodeGroups", + "shortName": "NodeGroups" + }, + "shortName": "TestIamPermissions" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.TestIamPermissionsNodeGroupRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "zone", + "type": "str" + }, + { + "name": "resource", + "type": "str" + }, + { + "name": "test_permissions_request_resource", + "type": "google.cloud.compute_v1.types.TestPermissionsRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.compute_v1.types.TestPermissionsResponse", + "shortName": "test_iam_permissions" + }, + "description": "Sample for TestIamPermissions", + "file": "compute_v1_generated_node_groups_test_iam_permissions_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_NodeGroups_TestIamPermissions_sync", + "segments": [ + { + "end": 53, + "start": 27, + "type": "FULL" + }, + { + "end": 53, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 47, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 50, + "start": 48, + "type": "REQUEST_EXECUTION" + }, + { + "end": 54, + "start": 51, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_node_groups_test_iam_permissions_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.NodeTemplatesClient", + "shortName": "NodeTemplatesClient" + }, + "fullName": "google.cloud.compute_v1.NodeTemplatesClient.aggregated_list", + "method": { + "fullName": "google.cloud.compute.v1.NodeTemplates.AggregatedList", + "service": { + "fullName": "google.cloud.compute.v1.NodeTemplates", + "shortName": "NodeTemplates" + }, + "shortName": "AggregatedList" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.AggregatedListNodeTemplatesRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.compute_v1.services.node_templates.pagers.AggregatedListPager", + "shortName": "aggregated_list" + }, + "description": "Sample for AggregatedList", + "file": "compute_v1_generated_node_templates_aggregated_list_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_NodeTemplates_AggregatedList_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_node_templates_aggregated_list_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.NodeTemplatesClient", + "shortName": "NodeTemplatesClient" + }, + "fullName": "google.cloud.compute_v1.NodeTemplatesClient.delete", + "method": { + "fullName": "google.cloud.compute.v1.NodeTemplates.Delete", + "service": { + "fullName": "google.cloud.compute.v1.NodeTemplates", + "shortName": "NodeTemplates" + }, + "shortName": "Delete" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.DeleteNodeTemplateRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "region", + "type": "str" + }, + { + "name": "node_template", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.extended_operation.ExtendedOperation", + "shortName": "delete" + }, + "description": "Sample for Delete", + "file": "compute_v1_generated_node_templates_delete_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_NodeTemplates_Delete_sync", + "segments": [ + { + "end": 53, + "start": 27, + "type": "FULL" + }, + { + "end": 53, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 47, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 50, + "start": 48, + "type": "REQUEST_EXECUTION" + }, + { + "end": 54, + "start": 51, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_node_templates_delete_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.NodeTemplatesClient", + "shortName": "NodeTemplatesClient" + }, + "fullName": "google.cloud.compute_v1.NodeTemplatesClient.get_iam_policy", + "method": { + "fullName": "google.cloud.compute.v1.NodeTemplates.GetIamPolicy", + "service": { + "fullName": "google.cloud.compute.v1.NodeTemplates", + "shortName": "NodeTemplates" + }, + "shortName": "GetIamPolicy" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.GetIamPolicyNodeTemplateRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "region", + "type": "str" + }, + { + "name": "resource", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.compute_v1.types.Policy", + "shortName": "get_iam_policy" + }, + "description": "Sample for GetIamPolicy", + "file": "compute_v1_generated_node_templates_get_iam_policy_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_NodeTemplates_GetIamPolicy_sync", + "segments": [ + { + "end": 53, + "start": 27, + "type": "FULL" + }, + { + "end": 53, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 47, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 50, + "start": 48, + "type": "REQUEST_EXECUTION" + }, + { + "end": 54, + "start": 51, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_node_templates_get_iam_policy_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.NodeTemplatesClient", + "shortName": "NodeTemplatesClient" + }, + "fullName": "google.cloud.compute_v1.NodeTemplatesClient.get", + "method": { + "fullName": "google.cloud.compute.v1.NodeTemplates.Get", + "service": { + "fullName": "google.cloud.compute.v1.NodeTemplates", + "shortName": "NodeTemplates" + }, + "shortName": "Get" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.GetNodeTemplateRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "region", + "type": "str" + }, + { + "name": "node_template", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.compute_v1.types.NodeTemplate", + "shortName": "get" + }, + "description": "Sample for Get", + "file": "compute_v1_generated_node_templates_get_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_NodeTemplates_Get_sync", + "segments": [ + { + "end": 53, + "start": 27, + "type": "FULL" + }, + { + "end": 53, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 47, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 50, + "start": 48, + "type": "REQUEST_EXECUTION" + }, + { + "end": 54, + "start": 51, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_node_templates_get_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.NodeTemplatesClient", + "shortName": "NodeTemplatesClient" + }, + "fullName": "google.cloud.compute_v1.NodeTemplatesClient.insert", + "method": { + "fullName": "google.cloud.compute.v1.NodeTemplates.Insert", + "service": { + "fullName": "google.cloud.compute.v1.NodeTemplates", + "shortName": "NodeTemplates" + }, + "shortName": "Insert" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.InsertNodeTemplateRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "region", + "type": "str" + }, + { + "name": "node_template_resource", + "type": "google.cloud.compute_v1.types.NodeTemplate" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.extended_operation.ExtendedOperation", + "shortName": "insert" + }, + "description": "Sample for Insert", + "file": "compute_v1_generated_node_templates_insert_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_NodeTemplates_Insert_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_node_templates_insert_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.NodeTemplatesClient", + "shortName": "NodeTemplatesClient" + }, + "fullName": "google.cloud.compute_v1.NodeTemplatesClient.list", + "method": { + "fullName": "google.cloud.compute.v1.NodeTemplates.List", + "service": { + "fullName": "google.cloud.compute.v1.NodeTemplates", + "shortName": "NodeTemplates" + }, + "shortName": "List" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.ListNodeTemplatesRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "region", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.compute_v1.services.node_templates.pagers.ListPager", + "shortName": "list" + }, + "description": "Sample for List", + "file": "compute_v1_generated_node_templates_list_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_NodeTemplates_List_sync", + "segments": [ + { + "end": 53, + "start": 27, + "type": "FULL" + }, + { + "end": 53, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 54, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_node_templates_list_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.NodeTemplatesClient", + "shortName": "NodeTemplatesClient" + }, + "fullName": "google.cloud.compute_v1.NodeTemplatesClient.set_iam_policy", + "method": { + "fullName": "google.cloud.compute.v1.NodeTemplates.SetIamPolicy", + "service": { + "fullName": "google.cloud.compute.v1.NodeTemplates", + "shortName": "NodeTemplates" + }, + "shortName": "SetIamPolicy" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.SetIamPolicyNodeTemplateRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "region", + "type": "str" + }, + { + "name": "resource", + "type": "str" + }, + { + "name": "region_set_policy_request_resource", + "type": "google.cloud.compute_v1.types.RegionSetPolicyRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.compute_v1.types.Policy", + "shortName": "set_iam_policy" + }, + "description": "Sample for SetIamPolicy", + "file": "compute_v1_generated_node_templates_set_iam_policy_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_NodeTemplates_SetIamPolicy_sync", + "segments": [ + { + "end": 53, + "start": 27, + "type": "FULL" + }, + { + "end": 53, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 47, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 50, + "start": 48, + "type": "REQUEST_EXECUTION" + }, + { + "end": 54, + "start": 51, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_node_templates_set_iam_policy_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.NodeTemplatesClient", + "shortName": "NodeTemplatesClient" + }, + "fullName": "google.cloud.compute_v1.NodeTemplatesClient.test_iam_permissions", + "method": { + "fullName": "google.cloud.compute.v1.NodeTemplates.TestIamPermissions", + "service": { + "fullName": "google.cloud.compute.v1.NodeTemplates", + "shortName": "NodeTemplates" + }, + "shortName": "TestIamPermissions" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.TestIamPermissionsNodeTemplateRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "region", + "type": "str" + }, + { + "name": "resource", + "type": "str" + }, + { + "name": "test_permissions_request_resource", + "type": "google.cloud.compute_v1.types.TestPermissionsRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.compute_v1.types.TestPermissionsResponse", + "shortName": "test_iam_permissions" + }, + "description": "Sample for TestIamPermissions", + "file": "compute_v1_generated_node_templates_test_iam_permissions_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_NodeTemplates_TestIamPermissions_sync", + "segments": [ + { + "end": 53, + "start": 27, + "type": "FULL" + }, + { + "end": 53, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 47, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 50, + "start": 48, + "type": "REQUEST_EXECUTION" + }, + { + "end": 54, + "start": 51, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_node_templates_test_iam_permissions_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.NodeTypesClient", + "shortName": "NodeTypesClient" + }, + "fullName": "google.cloud.compute_v1.NodeTypesClient.aggregated_list", + "method": { + "fullName": "google.cloud.compute.v1.NodeTypes.AggregatedList", + "service": { + "fullName": "google.cloud.compute.v1.NodeTypes", + "shortName": "NodeTypes" + }, + "shortName": "AggregatedList" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.AggregatedListNodeTypesRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.compute_v1.services.node_types.pagers.AggregatedListPager", + "shortName": "aggregated_list" + }, + "description": "Sample for AggregatedList", + "file": "compute_v1_generated_node_types_aggregated_list_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_NodeTypes_AggregatedList_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_node_types_aggregated_list_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.NodeTypesClient", + "shortName": "NodeTypesClient" + }, + "fullName": "google.cloud.compute_v1.NodeTypesClient.get", + "method": { + "fullName": "google.cloud.compute.v1.NodeTypes.Get", + "service": { + "fullName": "google.cloud.compute.v1.NodeTypes", + "shortName": "NodeTypes" + }, + "shortName": "Get" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.GetNodeTypeRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "zone", + "type": "str" + }, + { + "name": "node_type", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.compute_v1.types.NodeType", + "shortName": "get" + }, + "description": "Sample for Get", + "file": "compute_v1_generated_node_types_get_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_NodeTypes_Get_sync", + "segments": [ + { + "end": 53, + "start": 27, + "type": "FULL" + }, + { + "end": 53, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 47, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 50, + "start": 48, + "type": "REQUEST_EXECUTION" + }, + { + "end": 54, + "start": 51, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_node_types_get_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.NodeTypesClient", + "shortName": "NodeTypesClient" + }, + "fullName": "google.cloud.compute_v1.NodeTypesClient.list", + "method": { + "fullName": "google.cloud.compute.v1.NodeTypes.List", + "service": { + "fullName": "google.cloud.compute.v1.NodeTypes", + "shortName": "NodeTypes" + }, + "shortName": "List" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.ListNodeTypesRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "zone", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.compute_v1.services.node_types.pagers.ListPager", + "shortName": "list" + }, + "description": "Sample for List", + "file": "compute_v1_generated_node_types_list_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_NodeTypes_List_sync", + "segments": [ + { + "end": 53, + "start": 27, + "type": "FULL" + }, + { + "end": 53, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 54, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_node_types_list_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.PacketMirroringsClient", + "shortName": "PacketMirroringsClient" + }, + "fullName": "google.cloud.compute_v1.PacketMirroringsClient.aggregated_list", + "method": { + "fullName": "google.cloud.compute.v1.PacketMirrorings.AggregatedList", + "service": { + "fullName": "google.cloud.compute.v1.PacketMirrorings", + "shortName": "PacketMirrorings" + }, + "shortName": "AggregatedList" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.AggregatedListPacketMirroringsRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.compute_v1.services.packet_mirrorings.pagers.AggregatedListPager", + "shortName": "aggregated_list" + }, + "description": "Sample for AggregatedList", + "file": "compute_v1_generated_packet_mirrorings_aggregated_list_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_PacketMirrorings_AggregatedList_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_packet_mirrorings_aggregated_list_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.PacketMirroringsClient", + "shortName": "PacketMirroringsClient" + }, + "fullName": "google.cloud.compute_v1.PacketMirroringsClient.delete", + "method": { + "fullName": "google.cloud.compute.v1.PacketMirrorings.Delete", + "service": { + "fullName": "google.cloud.compute.v1.PacketMirrorings", + "shortName": "PacketMirrorings" + }, + "shortName": "Delete" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.DeletePacketMirroringRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "region", + "type": "str" + }, + { + "name": "packet_mirroring", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.extended_operation.ExtendedOperation", + "shortName": "delete" + }, + "description": "Sample for Delete", + "file": "compute_v1_generated_packet_mirrorings_delete_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_PacketMirrorings_Delete_sync", + "segments": [ + { + "end": 53, + "start": 27, + "type": "FULL" + }, + { + "end": 53, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 47, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 50, + "start": 48, + "type": "REQUEST_EXECUTION" + }, + { + "end": 54, + "start": 51, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_packet_mirrorings_delete_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.PacketMirroringsClient", + "shortName": "PacketMirroringsClient" + }, + "fullName": "google.cloud.compute_v1.PacketMirroringsClient.get", + "method": { + "fullName": "google.cloud.compute.v1.PacketMirrorings.Get", + "service": { + "fullName": "google.cloud.compute.v1.PacketMirrorings", + "shortName": "PacketMirrorings" + }, + "shortName": "Get" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.GetPacketMirroringRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "region", + "type": "str" + }, + { + "name": "packet_mirroring", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.compute_v1.types.PacketMirroring", + "shortName": "get" + }, + "description": "Sample for Get", + "file": "compute_v1_generated_packet_mirrorings_get_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_PacketMirrorings_Get_sync", + "segments": [ + { + "end": 53, + "start": 27, + "type": "FULL" + }, + { + "end": 53, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 47, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 50, + "start": 48, + "type": "REQUEST_EXECUTION" + }, + { + "end": 54, + "start": 51, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_packet_mirrorings_get_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.PacketMirroringsClient", + "shortName": "PacketMirroringsClient" + }, + "fullName": "google.cloud.compute_v1.PacketMirroringsClient.insert", + "method": { + "fullName": "google.cloud.compute.v1.PacketMirrorings.Insert", + "service": { + "fullName": "google.cloud.compute.v1.PacketMirrorings", + "shortName": "PacketMirrorings" + }, + "shortName": "Insert" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.InsertPacketMirroringRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "region", + "type": "str" + }, + { + "name": "packet_mirroring_resource", + "type": "google.cloud.compute_v1.types.PacketMirroring" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.extended_operation.ExtendedOperation", + "shortName": "insert" + }, + "description": "Sample for Insert", + "file": "compute_v1_generated_packet_mirrorings_insert_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_PacketMirrorings_Insert_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_packet_mirrorings_insert_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.PacketMirroringsClient", + "shortName": "PacketMirroringsClient" + }, + "fullName": "google.cloud.compute_v1.PacketMirroringsClient.list", + "method": { + "fullName": "google.cloud.compute.v1.PacketMirrorings.List", + "service": { + "fullName": "google.cloud.compute.v1.PacketMirrorings", + "shortName": "PacketMirrorings" + }, + "shortName": "List" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.ListPacketMirroringsRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "region", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.compute_v1.services.packet_mirrorings.pagers.ListPager", + "shortName": "list" + }, + "description": "Sample for List", + "file": "compute_v1_generated_packet_mirrorings_list_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_PacketMirrorings_List_sync", + "segments": [ + { + "end": 53, + "start": 27, + "type": "FULL" + }, + { + "end": 53, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 54, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_packet_mirrorings_list_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.PacketMirroringsClient", + "shortName": "PacketMirroringsClient" + }, + "fullName": "google.cloud.compute_v1.PacketMirroringsClient.patch", + "method": { + "fullName": "google.cloud.compute.v1.PacketMirrorings.Patch", + "service": { + "fullName": "google.cloud.compute.v1.PacketMirrorings", + "shortName": "PacketMirrorings" + }, + "shortName": "Patch" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.PatchPacketMirroringRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "region", + "type": "str" + }, + { + "name": "packet_mirroring", + "type": "str" + }, + { + "name": "packet_mirroring_resource", + "type": "google.cloud.compute_v1.types.PacketMirroring" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.extended_operation.ExtendedOperation", + "shortName": "patch" + }, + "description": "Sample for Patch", + "file": "compute_v1_generated_packet_mirrorings_patch_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_PacketMirrorings_Patch_sync", + "segments": [ + { + "end": 53, + "start": 27, + "type": "FULL" + }, + { + "end": 53, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 47, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 50, + "start": 48, + "type": "REQUEST_EXECUTION" + }, + { + "end": 54, + "start": 51, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_packet_mirrorings_patch_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.PacketMirroringsClient", + "shortName": "PacketMirroringsClient" + }, + "fullName": "google.cloud.compute_v1.PacketMirroringsClient.test_iam_permissions", + "method": { + "fullName": "google.cloud.compute.v1.PacketMirrorings.TestIamPermissions", + "service": { + "fullName": "google.cloud.compute.v1.PacketMirrorings", + "shortName": "PacketMirrorings" + }, + "shortName": "TestIamPermissions" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.TestIamPermissionsPacketMirroringRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "region", + "type": "str" + }, + { + "name": "resource", + "type": "str" + }, + { + "name": "test_permissions_request_resource", + "type": "google.cloud.compute_v1.types.TestPermissionsRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.compute_v1.types.TestPermissionsResponse", + "shortName": "test_iam_permissions" + }, + "description": "Sample for TestIamPermissions", + "file": "compute_v1_generated_packet_mirrorings_test_iam_permissions_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_PacketMirrorings_TestIamPermissions_sync", + "segments": [ + { + "end": 53, + "start": 27, + "type": "FULL" + }, + { + "end": 53, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 47, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 50, + "start": 48, + "type": "REQUEST_EXECUTION" + }, + { + "end": 54, + "start": 51, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_packet_mirrorings_test_iam_permissions_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.ProjectsClient", + "shortName": "ProjectsClient" + }, + "fullName": "google.cloud.compute_v1.ProjectsClient.disable_xpn_host", + "method": { + "fullName": "google.cloud.compute.v1.Projects.DisableXpnHost", + "service": { + "fullName": "google.cloud.compute.v1.Projects", + "shortName": "Projects" + }, + "shortName": "DisableXpnHost" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.DisableXpnHostProjectRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.extended_operation.ExtendedOperation", + "shortName": "disable_xpn_host" + }, + "description": "Sample for DisableXpnHost", + "file": "compute_v1_generated_projects_disable_xpn_host_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_Projects_DisableXpnHost_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_projects_disable_xpn_host_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.ProjectsClient", + "shortName": "ProjectsClient" + }, + "fullName": "google.cloud.compute_v1.ProjectsClient.disable_xpn_resource", + "method": { + "fullName": "google.cloud.compute.v1.Projects.DisableXpnResource", + "service": { + "fullName": "google.cloud.compute.v1.Projects", + "shortName": "Projects" + }, + "shortName": "DisableXpnResource" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.DisableXpnResourceProjectRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "projects_disable_xpn_resource_request_resource", + "type": "google.cloud.compute_v1.types.ProjectsDisableXpnResourceRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.extended_operation.ExtendedOperation", + "shortName": "disable_xpn_resource" + }, + "description": "Sample for DisableXpnResource", + "file": "compute_v1_generated_projects_disable_xpn_resource_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_Projects_DisableXpnResource_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_projects_disable_xpn_resource_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.ProjectsClient", + "shortName": "ProjectsClient" + }, + "fullName": "google.cloud.compute_v1.ProjectsClient.enable_xpn_host", + "method": { + "fullName": "google.cloud.compute.v1.Projects.EnableXpnHost", + "service": { + "fullName": "google.cloud.compute.v1.Projects", + "shortName": "Projects" + }, + "shortName": "EnableXpnHost" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.EnableXpnHostProjectRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.extended_operation.ExtendedOperation", + "shortName": "enable_xpn_host" + }, + "description": "Sample for EnableXpnHost", + "file": "compute_v1_generated_projects_enable_xpn_host_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_Projects_EnableXpnHost_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_projects_enable_xpn_host_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.ProjectsClient", + "shortName": "ProjectsClient" + }, + "fullName": "google.cloud.compute_v1.ProjectsClient.enable_xpn_resource", + "method": { + "fullName": "google.cloud.compute.v1.Projects.EnableXpnResource", + "service": { + "fullName": "google.cloud.compute.v1.Projects", + "shortName": "Projects" + }, + "shortName": "EnableXpnResource" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.EnableXpnResourceProjectRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "projects_enable_xpn_resource_request_resource", + "type": "google.cloud.compute_v1.types.ProjectsEnableXpnResourceRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.extended_operation.ExtendedOperation", + "shortName": "enable_xpn_resource" + }, + "description": "Sample for EnableXpnResource", + "file": "compute_v1_generated_projects_enable_xpn_resource_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_Projects_EnableXpnResource_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_projects_enable_xpn_resource_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.ProjectsClient", + "shortName": "ProjectsClient" + }, + "fullName": "google.cloud.compute_v1.ProjectsClient.get_xpn_host", + "method": { + "fullName": "google.cloud.compute.v1.Projects.GetXpnHost", + "service": { + "fullName": "google.cloud.compute.v1.Projects", + "shortName": "Projects" + }, + "shortName": "GetXpnHost" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.GetXpnHostProjectRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.compute_v1.types.Project", + "shortName": "get_xpn_host" + }, + "description": "Sample for GetXpnHost", + "file": "compute_v1_generated_projects_get_xpn_host_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_Projects_GetXpnHost_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_projects_get_xpn_host_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.ProjectsClient", + "shortName": "ProjectsClient" + }, + "fullName": "google.cloud.compute_v1.ProjectsClient.get_xpn_resources", + "method": { + "fullName": "google.cloud.compute.v1.Projects.GetXpnResources", + "service": { + "fullName": "google.cloud.compute.v1.Projects", + "shortName": "Projects" + }, + "shortName": "GetXpnResources" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.GetXpnResourcesProjectsRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.compute_v1.services.projects.pagers.GetXpnResourcesPager", + "shortName": "get_xpn_resources" + }, + "description": "Sample for GetXpnResources", + "file": "compute_v1_generated_projects_get_xpn_resources_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_Projects_GetXpnResources_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_projects_get_xpn_resources_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.ProjectsClient", + "shortName": "ProjectsClient" + }, + "fullName": "google.cloud.compute_v1.ProjectsClient.get", + "method": { + "fullName": "google.cloud.compute.v1.Projects.Get", + "service": { + "fullName": "google.cloud.compute.v1.Projects", + "shortName": "Projects" + }, + "shortName": "Get" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.GetProjectRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.compute_v1.types.Project", + "shortName": "get" + }, + "description": "Sample for Get", + "file": "compute_v1_generated_projects_get_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_Projects_Get_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_projects_get_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.ProjectsClient", + "shortName": "ProjectsClient" + }, + "fullName": "google.cloud.compute_v1.ProjectsClient.list_xpn_hosts", + "method": { + "fullName": "google.cloud.compute.v1.Projects.ListXpnHosts", + "service": { + "fullName": "google.cloud.compute.v1.Projects", + "shortName": "Projects" + }, + "shortName": "ListXpnHosts" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.ListXpnHostsProjectsRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "projects_list_xpn_hosts_request_resource", + "type": "google.cloud.compute_v1.types.ProjectsListXpnHostsRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.compute_v1.services.projects.pagers.ListXpnHostsPager", + "shortName": "list_xpn_hosts" + }, + "description": "Sample for ListXpnHosts", + "file": "compute_v1_generated_projects_list_xpn_hosts_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_Projects_ListXpnHosts_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_projects_list_xpn_hosts_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.ProjectsClient", + "shortName": "ProjectsClient" + }, + "fullName": "google.cloud.compute_v1.ProjectsClient.move_disk", + "method": { + "fullName": "google.cloud.compute.v1.Projects.MoveDisk", + "service": { + "fullName": "google.cloud.compute.v1.Projects", + "shortName": "Projects" + }, + "shortName": "MoveDisk" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.MoveDiskProjectRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "disk_move_request_resource", + "type": "google.cloud.compute_v1.types.DiskMoveRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.extended_operation.ExtendedOperation", + "shortName": "move_disk" + }, + "description": "Sample for MoveDisk", + "file": "compute_v1_generated_projects_move_disk_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_Projects_MoveDisk_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_projects_move_disk_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.ProjectsClient", + "shortName": "ProjectsClient" + }, + "fullName": "google.cloud.compute_v1.ProjectsClient.move_instance", + "method": { + "fullName": "google.cloud.compute.v1.Projects.MoveInstance", + "service": { + "fullName": "google.cloud.compute.v1.Projects", + "shortName": "Projects" + }, + "shortName": "MoveInstance" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.MoveInstanceProjectRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "instance_move_request_resource", + "type": "google.cloud.compute_v1.types.InstanceMoveRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.extended_operation.ExtendedOperation", + "shortName": "move_instance" + }, + "description": "Sample for MoveInstance", + "file": "compute_v1_generated_projects_move_instance_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_Projects_MoveInstance_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_projects_move_instance_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.ProjectsClient", + "shortName": "ProjectsClient" + }, + "fullName": "google.cloud.compute_v1.ProjectsClient.set_common_instance_metadata", + "method": { + "fullName": "google.cloud.compute.v1.Projects.SetCommonInstanceMetadata", + "service": { + "fullName": "google.cloud.compute.v1.Projects", + "shortName": "Projects" + }, + "shortName": "SetCommonInstanceMetadata" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.SetCommonInstanceMetadataProjectRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "metadata_resource", + "type": "google.cloud.compute_v1.types.Metadata" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.extended_operation.ExtendedOperation", + "shortName": "set_common_instance_metadata" + }, + "description": "Sample for SetCommonInstanceMetadata", + "file": "compute_v1_generated_projects_set_common_instance_metadata_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_Projects_SetCommonInstanceMetadata_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_projects_set_common_instance_metadata_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.ProjectsClient", + "shortName": "ProjectsClient" + }, + "fullName": "google.cloud.compute_v1.ProjectsClient.set_default_network_tier", + "method": { + "fullName": "google.cloud.compute.v1.Projects.SetDefaultNetworkTier", + "service": { + "fullName": "google.cloud.compute.v1.Projects", + "shortName": "Projects" + }, + "shortName": "SetDefaultNetworkTier" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.SetDefaultNetworkTierProjectRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "projects_set_default_network_tier_request_resource", + "type": "google.cloud.compute_v1.types.ProjectsSetDefaultNetworkTierRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.extended_operation.ExtendedOperation", + "shortName": "set_default_network_tier" + }, + "description": "Sample for SetDefaultNetworkTier", + "file": "compute_v1_generated_projects_set_default_network_tier_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_Projects_SetDefaultNetworkTier_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_projects_set_default_network_tier_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.ProjectsClient", + "shortName": "ProjectsClient" + }, + "fullName": "google.cloud.compute_v1.ProjectsClient.set_usage_export_bucket", + "method": { + "fullName": "google.cloud.compute.v1.Projects.SetUsageExportBucket", + "service": { + "fullName": "google.cloud.compute.v1.Projects", + "shortName": "Projects" + }, + "shortName": "SetUsageExportBucket" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.SetUsageExportBucketProjectRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "usage_export_location_resource", + "type": "google.cloud.compute_v1.types.UsageExportLocation" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.extended_operation.ExtendedOperation", + "shortName": "set_usage_export_bucket" + }, + "description": "Sample for SetUsageExportBucket", + "file": "compute_v1_generated_projects_set_usage_export_bucket_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_Projects_SetUsageExportBucket_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_projects_set_usage_export_bucket_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.PublicAdvertisedPrefixesClient", + "shortName": "PublicAdvertisedPrefixesClient" + }, + "fullName": "google.cloud.compute_v1.PublicAdvertisedPrefixesClient.delete", + "method": { + "fullName": "google.cloud.compute.v1.PublicAdvertisedPrefixes.Delete", + "service": { + "fullName": "google.cloud.compute.v1.PublicAdvertisedPrefixes", + "shortName": "PublicAdvertisedPrefixes" + }, + "shortName": "Delete" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.DeletePublicAdvertisedPrefixeRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "public_advertised_prefix", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.extended_operation.ExtendedOperation", + "shortName": "delete" + }, + "description": "Sample for Delete", + "file": "compute_v1_generated_public_advertised_prefixes_delete_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_PublicAdvertisedPrefixes_Delete_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_public_advertised_prefixes_delete_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.PublicAdvertisedPrefixesClient", + "shortName": "PublicAdvertisedPrefixesClient" + }, + "fullName": "google.cloud.compute_v1.PublicAdvertisedPrefixesClient.get", + "method": { + "fullName": "google.cloud.compute.v1.PublicAdvertisedPrefixes.Get", + "service": { + "fullName": "google.cloud.compute.v1.PublicAdvertisedPrefixes", + "shortName": "PublicAdvertisedPrefixes" + }, + "shortName": "Get" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.GetPublicAdvertisedPrefixeRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "public_advertised_prefix", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.compute_v1.types.PublicAdvertisedPrefix", + "shortName": "get" + }, + "description": "Sample for Get", + "file": "compute_v1_generated_public_advertised_prefixes_get_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_PublicAdvertisedPrefixes_Get_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_public_advertised_prefixes_get_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.PublicAdvertisedPrefixesClient", + "shortName": "PublicAdvertisedPrefixesClient" + }, + "fullName": "google.cloud.compute_v1.PublicAdvertisedPrefixesClient.insert", + "method": { + "fullName": "google.cloud.compute.v1.PublicAdvertisedPrefixes.Insert", + "service": { + "fullName": "google.cloud.compute.v1.PublicAdvertisedPrefixes", + "shortName": "PublicAdvertisedPrefixes" + }, + "shortName": "Insert" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.InsertPublicAdvertisedPrefixeRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "public_advertised_prefix_resource", + "type": "google.cloud.compute_v1.types.PublicAdvertisedPrefix" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.extended_operation.ExtendedOperation", + "shortName": "insert" + }, + "description": "Sample for Insert", + "file": "compute_v1_generated_public_advertised_prefixes_insert_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_PublicAdvertisedPrefixes_Insert_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_public_advertised_prefixes_insert_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.PublicAdvertisedPrefixesClient", + "shortName": "PublicAdvertisedPrefixesClient" + }, + "fullName": "google.cloud.compute_v1.PublicAdvertisedPrefixesClient.list", + "method": { + "fullName": "google.cloud.compute.v1.PublicAdvertisedPrefixes.List", + "service": { + "fullName": "google.cloud.compute.v1.PublicAdvertisedPrefixes", + "shortName": "PublicAdvertisedPrefixes" + }, + "shortName": "List" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.ListPublicAdvertisedPrefixesRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.compute_v1.services.public_advertised_prefixes.pagers.ListPager", + "shortName": "list" + }, + "description": "Sample for List", + "file": "compute_v1_generated_public_advertised_prefixes_list_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_PublicAdvertisedPrefixes_List_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_public_advertised_prefixes_list_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.PublicAdvertisedPrefixesClient", + "shortName": "PublicAdvertisedPrefixesClient" + }, + "fullName": "google.cloud.compute_v1.PublicAdvertisedPrefixesClient.patch", + "method": { + "fullName": "google.cloud.compute.v1.PublicAdvertisedPrefixes.Patch", + "service": { + "fullName": "google.cloud.compute.v1.PublicAdvertisedPrefixes", + "shortName": "PublicAdvertisedPrefixes" + }, + "shortName": "Patch" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.PatchPublicAdvertisedPrefixeRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "public_advertised_prefix", + "type": "str" + }, + { + "name": "public_advertised_prefix_resource", + "type": "google.cloud.compute_v1.types.PublicAdvertisedPrefix" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.extended_operation.ExtendedOperation", + "shortName": "patch" + }, + "description": "Sample for Patch", + "file": "compute_v1_generated_public_advertised_prefixes_patch_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_PublicAdvertisedPrefixes_Patch_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_public_advertised_prefixes_patch_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.PublicDelegatedPrefixesClient", + "shortName": "PublicDelegatedPrefixesClient" + }, + "fullName": "google.cloud.compute_v1.PublicDelegatedPrefixesClient.aggregated_list", + "method": { + "fullName": "google.cloud.compute.v1.PublicDelegatedPrefixes.AggregatedList", + "service": { + "fullName": "google.cloud.compute.v1.PublicDelegatedPrefixes", + "shortName": "PublicDelegatedPrefixes" + }, + "shortName": "AggregatedList" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.AggregatedListPublicDelegatedPrefixesRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.compute_v1.services.public_delegated_prefixes.pagers.AggregatedListPager", + "shortName": "aggregated_list" + }, + "description": "Sample for AggregatedList", + "file": "compute_v1_generated_public_delegated_prefixes_aggregated_list_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_PublicDelegatedPrefixes_AggregatedList_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_public_delegated_prefixes_aggregated_list_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.PublicDelegatedPrefixesClient", + "shortName": "PublicDelegatedPrefixesClient" + }, + "fullName": "google.cloud.compute_v1.PublicDelegatedPrefixesClient.delete", + "method": { + "fullName": "google.cloud.compute.v1.PublicDelegatedPrefixes.Delete", + "service": { + "fullName": "google.cloud.compute.v1.PublicDelegatedPrefixes", + "shortName": "PublicDelegatedPrefixes" + }, + "shortName": "Delete" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.DeletePublicDelegatedPrefixeRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "region", + "type": "str" + }, + { + "name": "public_delegated_prefix", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.extended_operation.ExtendedOperation", + "shortName": "delete" + }, + "description": "Sample for Delete", + "file": "compute_v1_generated_public_delegated_prefixes_delete_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_PublicDelegatedPrefixes_Delete_sync", + "segments": [ + { + "end": 53, + "start": 27, + "type": "FULL" + }, + { + "end": 53, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 47, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 50, + "start": 48, + "type": "REQUEST_EXECUTION" + }, + { + "end": 54, + "start": 51, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_public_delegated_prefixes_delete_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.PublicDelegatedPrefixesClient", + "shortName": "PublicDelegatedPrefixesClient" + }, + "fullName": "google.cloud.compute_v1.PublicDelegatedPrefixesClient.get", + "method": { + "fullName": "google.cloud.compute.v1.PublicDelegatedPrefixes.Get", + "service": { + "fullName": "google.cloud.compute.v1.PublicDelegatedPrefixes", + "shortName": "PublicDelegatedPrefixes" + }, + "shortName": "Get" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.GetPublicDelegatedPrefixeRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "region", + "type": "str" + }, + { + "name": "public_delegated_prefix", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.compute_v1.types.PublicDelegatedPrefix", + "shortName": "get" + }, + "description": "Sample for Get", + "file": "compute_v1_generated_public_delegated_prefixes_get_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_PublicDelegatedPrefixes_Get_sync", + "segments": [ + { + "end": 53, + "start": 27, + "type": "FULL" + }, + { + "end": 53, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 47, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 50, + "start": 48, + "type": "REQUEST_EXECUTION" + }, + { + "end": 54, + "start": 51, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_public_delegated_prefixes_get_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.PublicDelegatedPrefixesClient", + "shortName": "PublicDelegatedPrefixesClient" + }, + "fullName": "google.cloud.compute_v1.PublicDelegatedPrefixesClient.insert", + "method": { + "fullName": "google.cloud.compute.v1.PublicDelegatedPrefixes.Insert", + "service": { + "fullName": "google.cloud.compute.v1.PublicDelegatedPrefixes", + "shortName": "PublicDelegatedPrefixes" + }, + "shortName": "Insert" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.InsertPublicDelegatedPrefixeRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "region", + "type": "str" + }, + { + "name": "public_delegated_prefix_resource", + "type": "google.cloud.compute_v1.types.PublicDelegatedPrefix" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.extended_operation.ExtendedOperation", + "shortName": "insert" + }, + "description": "Sample for Insert", + "file": "compute_v1_generated_public_delegated_prefixes_insert_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_PublicDelegatedPrefixes_Insert_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_public_delegated_prefixes_insert_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.PublicDelegatedPrefixesClient", + "shortName": "PublicDelegatedPrefixesClient" + }, + "fullName": "google.cloud.compute_v1.PublicDelegatedPrefixesClient.list", + "method": { + "fullName": "google.cloud.compute.v1.PublicDelegatedPrefixes.List", + "service": { + "fullName": "google.cloud.compute.v1.PublicDelegatedPrefixes", + "shortName": "PublicDelegatedPrefixes" + }, + "shortName": "List" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.ListPublicDelegatedPrefixesRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "region", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.compute_v1.services.public_delegated_prefixes.pagers.ListPager", + "shortName": "list" + }, + "description": "Sample for List", + "file": "compute_v1_generated_public_delegated_prefixes_list_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_PublicDelegatedPrefixes_List_sync", + "segments": [ + { + "end": 53, + "start": 27, + "type": "FULL" + }, + { + "end": 53, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 54, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_public_delegated_prefixes_list_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.PublicDelegatedPrefixesClient", + "shortName": "PublicDelegatedPrefixesClient" + }, + "fullName": "google.cloud.compute_v1.PublicDelegatedPrefixesClient.patch", + "method": { + "fullName": "google.cloud.compute.v1.PublicDelegatedPrefixes.Patch", + "service": { + "fullName": "google.cloud.compute.v1.PublicDelegatedPrefixes", + "shortName": "PublicDelegatedPrefixes" + }, + "shortName": "Patch" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.PatchPublicDelegatedPrefixeRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "region", + "type": "str" + }, + { + "name": "public_delegated_prefix", + "type": "str" + }, + { + "name": "public_delegated_prefix_resource", + "type": "google.cloud.compute_v1.types.PublicDelegatedPrefix" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.extended_operation.ExtendedOperation", + "shortName": "patch" + }, + "description": "Sample for Patch", + "file": "compute_v1_generated_public_delegated_prefixes_patch_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_PublicDelegatedPrefixes_Patch_sync", + "segments": [ + { + "end": 53, + "start": 27, + "type": "FULL" + }, + { + "end": 53, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 47, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 50, + "start": 48, + "type": "REQUEST_EXECUTION" + }, + { + "end": 54, + "start": 51, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_public_delegated_prefixes_patch_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.RegionAutoscalersClient", + "shortName": "RegionAutoscalersClient" + }, + "fullName": "google.cloud.compute_v1.RegionAutoscalersClient.delete", + "method": { + "fullName": "google.cloud.compute.v1.RegionAutoscalers.Delete", + "service": { + "fullName": "google.cloud.compute.v1.RegionAutoscalers", + "shortName": "RegionAutoscalers" + }, + "shortName": "Delete" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.DeleteRegionAutoscalerRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "region", + "type": "str" + }, + { + "name": "autoscaler", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.extended_operation.ExtendedOperation", + "shortName": "delete" + }, + "description": "Sample for Delete", + "file": "compute_v1_generated_region_autoscalers_delete_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_RegionAutoscalers_Delete_sync", + "segments": [ + { + "end": 53, + "start": 27, + "type": "FULL" + }, + { + "end": 53, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 47, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 50, + "start": 48, + "type": "REQUEST_EXECUTION" + }, + { + "end": 54, + "start": 51, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_region_autoscalers_delete_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.RegionAutoscalersClient", + "shortName": "RegionAutoscalersClient" + }, + "fullName": "google.cloud.compute_v1.RegionAutoscalersClient.get", + "method": { + "fullName": "google.cloud.compute.v1.RegionAutoscalers.Get", + "service": { + "fullName": "google.cloud.compute.v1.RegionAutoscalers", + "shortName": "RegionAutoscalers" + }, + "shortName": "Get" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.GetRegionAutoscalerRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "region", + "type": "str" + }, + { + "name": "autoscaler", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.compute_v1.types.Autoscaler", + "shortName": "get" + }, + "description": "Sample for Get", + "file": "compute_v1_generated_region_autoscalers_get_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_RegionAutoscalers_Get_sync", + "segments": [ + { + "end": 53, + "start": 27, + "type": "FULL" + }, + { + "end": 53, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 47, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 50, + "start": 48, + "type": "REQUEST_EXECUTION" + }, + { + "end": 54, + "start": 51, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_region_autoscalers_get_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.RegionAutoscalersClient", + "shortName": "RegionAutoscalersClient" + }, + "fullName": "google.cloud.compute_v1.RegionAutoscalersClient.insert", + "method": { + "fullName": "google.cloud.compute.v1.RegionAutoscalers.Insert", + "service": { + "fullName": "google.cloud.compute.v1.RegionAutoscalers", + "shortName": "RegionAutoscalers" + }, + "shortName": "Insert" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.InsertRegionAutoscalerRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "region", + "type": "str" + }, + { + "name": "autoscaler_resource", + "type": "google.cloud.compute_v1.types.Autoscaler" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.extended_operation.ExtendedOperation", + "shortName": "insert" + }, + "description": "Sample for Insert", + "file": "compute_v1_generated_region_autoscalers_insert_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_RegionAutoscalers_Insert_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_region_autoscalers_insert_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.RegionAutoscalersClient", + "shortName": "RegionAutoscalersClient" + }, + "fullName": "google.cloud.compute_v1.RegionAutoscalersClient.list", + "method": { + "fullName": "google.cloud.compute.v1.RegionAutoscalers.List", + "service": { + "fullName": "google.cloud.compute.v1.RegionAutoscalers", + "shortName": "RegionAutoscalers" + }, + "shortName": "List" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.ListRegionAutoscalersRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "region", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.compute_v1.services.region_autoscalers.pagers.ListPager", + "shortName": "list" + }, + "description": "Sample for List", + "file": "compute_v1_generated_region_autoscalers_list_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_RegionAutoscalers_List_sync", + "segments": [ + { + "end": 53, + "start": 27, + "type": "FULL" + }, + { + "end": 53, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 54, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_region_autoscalers_list_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.RegionAutoscalersClient", + "shortName": "RegionAutoscalersClient" + }, + "fullName": "google.cloud.compute_v1.RegionAutoscalersClient.patch", + "method": { + "fullName": "google.cloud.compute.v1.RegionAutoscalers.Patch", + "service": { + "fullName": "google.cloud.compute.v1.RegionAutoscalers", + "shortName": "RegionAutoscalers" + }, + "shortName": "Patch" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.PatchRegionAutoscalerRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "region", + "type": "str" + }, + { + "name": "autoscaler_resource", + "type": "google.cloud.compute_v1.types.Autoscaler" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.extended_operation.ExtendedOperation", + "shortName": "patch" + }, + "description": "Sample for Patch", + "file": "compute_v1_generated_region_autoscalers_patch_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_RegionAutoscalers_Patch_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_region_autoscalers_patch_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.RegionAutoscalersClient", + "shortName": "RegionAutoscalersClient" + }, + "fullName": "google.cloud.compute_v1.RegionAutoscalersClient.update", + "method": { + "fullName": "google.cloud.compute.v1.RegionAutoscalers.Update", + "service": { + "fullName": "google.cloud.compute.v1.RegionAutoscalers", + "shortName": "RegionAutoscalers" + }, + "shortName": "Update" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.UpdateRegionAutoscalerRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "region", + "type": "str" + }, + { + "name": "autoscaler_resource", + "type": "google.cloud.compute_v1.types.Autoscaler" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.extended_operation.ExtendedOperation", + "shortName": "update" + }, + "description": "Sample for Update", + "file": "compute_v1_generated_region_autoscalers_update_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_RegionAutoscalers_Update_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_region_autoscalers_update_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.RegionBackendServicesClient", + "shortName": "RegionBackendServicesClient" + }, + "fullName": "google.cloud.compute_v1.RegionBackendServicesClient.delete", + "method": { + "fullName": "google.cloud.compute.v1.RegionBackendServices.Delete", + "service": { + "fullName": "google.cloud.compute.v1.RegionBackendServices", + "shortName": "RegionBackendServices" + }, + "shortName": "Delete" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.DeleteRegionBackendServiceRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "region", + "type": "str" + }, + { + "name": "backend_service", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.extended_operation.ExtendedOperation", + "shortName": "delete" + }, + "description": "Sample for Delete", + "file": "compute_v1_generated_region_backend_services_delete_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_RegionBackendServices_Delete_sync", + "segments": [ + { + "end": 53, + "start": 27, + "type": "FULL" + }, + { + "end": 53, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 47, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 50, + "start": 48, + "type": "REQUEST_EXECUTION" + }, + { + "end": 54, + "start": 51, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_region_backend_services_delete_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.RegionBackendServicesClient", + "shortName": "RegionBackendServicesClient" + }, + "fullName": "google.cloud.compute_v1.RegionBackendServicesClient.get_health", + "method": { + "fullName": "google.cloud.compute.v1.RegionBackendServices.GetHealth", + "service": { + "fullName": "google.cloud.compute.v1.RegionBackendServices", + "shortName": "RegionBackendServices" + }, + "shortName": "GetHealth" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.GetHealthRegionBackendServiceRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "region", + "type": "str" + }, + { + "name": "backend_service", + "type": "str" + }, + { + "name": "resource_group_reference_resource", + "type": "google.cloud.compute_v1.types.ResourceGroupReference" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.compute_v1.types.BackendServiceGroupHealth", + "shortName": "get_health" + }, + "description": "Sample for GetHealth", + "file": "compute_v1_generated_region_backend_services_get_health_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_RegionBackendServices_GetHealth_sync", + "segments": [ + { + "end": 53, + "start": 27, + "type": "FULL" + }, + { + "end": 53, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 47, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 50, + "start": 48, + "type": "REQUEST_EXECUTION" + }, + { + "end": 54, + "start": 51, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_region_backend_services_get_health_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.RegionBackendServicesClient", + "shortName": "RegionBackendServicesClient" + }, + "fullName": "google.cloud.compute_v1.RegionBackendServicesClient.get_iam_policy", + "method": { + "fullName": "google.cloud.compute.v1.RegionBackendServices.GetIamPolicy", + "service": { + "fullName": "google.cloud.compute.v1.RegionBackendServices", + "shortName": "RegionBackendServices" + }, + "shortName": "GetIamPolicy" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.GetIamPolicyRegionBackendServiceRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "region", + "type": "str" + }, + { + "name": "resource", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.compute_v1.types.Policy", + "shortName": "get_iam_policy" + }, + "description": "Sample for GetIamPolicy", + "file": "compute_v1_generated_region_backend_services_get_iam_policy_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_RegionBackendServices_GetIamPolicy_sync", + "segments": [ + { + "end": 53, + "start": 27, + "type": "FULL" + }, + { + "end": 53, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 47, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 50, + "start": 48, + "type": "REQUEST_EXECUTION" + }, + { + "end": 54, + "start": 51, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_region_backend_services_get_iam_policy_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.RegionBackendServicesClient", + "shortName": "RegionBackendServicesClient" + }, + "fullName": "google.cloud.compute_v1.RegionBackendServicesClient.get", + "method": { + "fullName": "google.cloud.compute.v1.RegionBackendServices.Get", + "service": { + "fullName": "google.cloud.compute.v1.RegionBackendServices", + "shortName": "RegionBackendServices" + }, + "shortName": "Get" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.GetRegionBackendServiceRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "region", + "type": "str" + }, + { + "name": "backend_service", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.compute_v1.types.BackendService", + "shortName": "get" + }, + "description": "Sample for Get", + "file": "compute_v1_generated_region_backend_services_get_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_RegionBackendServices_Get_sync", + "segments": [ + { + "end": 53, + "start": 27, + "type": "FULL" + }, + { + "end": 53, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 47, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 50, + "start": 48, + "type": "REQUEST_EXECUTION" + }, + { + "end": 54, + "start": 51, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_region_backend_services_get_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.RegionBackendServicesClient", + "shortName": "RegionBackendServicesClient" + }, + "fullName": "google.cloud.compute_v1.RegionBackendServicesClient.insert", + "method": { + "fullName": "google.cloud.compute.v1.RegionBackendServices.Insert", + "service": { + "fullName": "google.cloud.compute.v1.RegionBackendServices", + "shortName": "RegionBackendServices" + }, + "shortName": "Insert" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.InsertRegionBackendServiceRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "region", + "type": "str" + }, + { + "name": "backend_service_resource", + "type": "google.cloud.compute_v1.types.BackendService" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.extended_operation.ExtendedOperation", + "shortName": "insert" + }, + "description": "Sample for Insert", + "file": "compute_v1_generated_region_backend_services_insert_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_RegionBackendServices_Insert_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_region_backend_services_insert_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.RegionBackendServicesClient", + "shortName": "RegionBackendServicesClient" + }, + "fullName": "google.cloud.compute_v1.RegionBackendServicesClient.list", + "method": { + "fullName": "google.cloud.compute.v1.RegionBackendServices.List", + "service": { + "fullName": "google.cloud.compute.v1.RegionBackendServices", + "shortName": "RegionBackendServices" + }, + "shortName": "List" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.ListRegionBackendServicesRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "region", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.compute_v1.services.region_backend_services.pagers.ListPager", + "shortName": "list" + }, + "description": "Sample for List", + "file": "compute_v1_generated_region_backend_services_list_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_RegionBackendServices_List_sync", + "segments": [ + { + "end": 53, + "start": 27, + "type": "FULL" + }, + { + "end": 53, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 54, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_region_backend_services_list_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.RegionBackendServicesClient", + "shortName": "RegionBackendServicesClient" + }, + "fullName": "google.cloud.compute_v1.RegionBackendServicesClient.patch", + "method": { + "fullName": "google.cloud.compute.v1.RegionBackendServices.Patch", + "service": { + "fullName": "google.cloud.compute.v1.RegionBackendServices", + "shortName": "RegionBackendServices" + }, + "shortName": "Patch" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.PatchRegionBackendServiceRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "region", + "type": "str" + }, + { + "name": "backend_service", + "type": "str" + }, + { + "name": "backend_service_resource", + "type": "google.cloud.compute_v1.types.BackendService" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.extended_operation.ExtendedOperation", + "shortName": "patch" + }, + "description": "Sample for Patch", + "file": "compute_v1_generated_region_backend_services_patch_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_RegionBackendServices_Patch_sync", + "segments": [ + { + "end": 53, + "start": 27, + "type": "FULL" + }, + { + "end": 53, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 47, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 50, + "start": 48, + "type": "REQUEST_EXECUTION" + }, + { + "end": 54, + "start": 51, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_region_backend_services_patch_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.RegionBackendServicesClient", + "shortName": "RegionBackendServicesClient" + }, + "fullName": "google.cloud.compute_v1.RegionBackendServicesClient.set_iam_policy", + "method": { + "fullName": "google.cloud.compute.v1.RegionBackendServices.SetIamPolicy", + "service": { + "fullName": "google.cloud.compute.v1.RegionBackendServices", + "shortName": "RegionBackendServices" + }, + "shortName": "SetIamPolicy" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.SetIamPolicyRegionBackendServiceRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "region", + "type": "str" + }, + { + "name": "resource", + "type": "str" + }, + { + "name": "region_set_policy_request_resource", + "type": "google.cloud.compute_v1.types.RegionSetPolicyRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.compute_v1.types.Policy", + "shortName": "set_iam_policy" + }, + "description": "Sample for SetIamPolicy", + "file": "compute_v1_generated_region_backend_services_set_iam_policy_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_RegionBackendServices_SetIamPolicy_sync", + "segments": [ + { + "end": 53, + "start": 27, + "type": "FULL" + }, + { + "end": 53, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 47, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 50, + "start": 48, + "type": "REQUEST_EXECUTION" + }, + { + "end": 54, + "start": 51, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_region_backend_services_set_iam_policy_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.RegionBackendServicesClient", + "shortName": "RegionBackendServicesClient" + }, + "fullName": "google.cloud.compute_v1.RegionBackendServicesClient.update", + "method": { + "fullName": "google.cloud.compute.v1.RegionBackendServices.Update", + "service": { + "fullName": "google.cloud.compute.v1.RegionBackendServices", + "shortName": "RegionBackendServices" + }, + "shortName": "Update" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.UpdateRegionBackendServiceRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "region", + "type": "str" + }, + { + "name": "backend_service", + "type": "str" + }, + { + "name": "backend_service_resource", + "type": "google.cloud.compute_v1.types.BackendService" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.extended_operation.ExtendedOperation", + "shortName": "update" + }, + "description": "Sample for Update", + "file": "compute_v1_generated_region_backend_services_update_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_RegionBackendServices_Update_sync", + "segments": [ + { + "end": 53, + "start": 27, + "type": "FULL" + }, + { + "end": 53, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 47, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 50, + "start": 48, + "type": "REQUEST_EXECUTION" + }, + { + "end": 54, + "start": 51, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_region_backend_services_update_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.RegionCommitmentsClient", + "shortName": "RegionCommitmentsClient" + }, + "fullName": "google.cloud.compute_v1.RegionCommitmentsClient.aggregated_list", + "method": { + "fullName": "google.cloud.compute.v1.RegionCommitments.AggregatedList", + "service": { + "fullName": "google.cloud.compute.v1.RegionCommitments", + "shortName": "RegionCommitments" + }, + "shortName": "AggregatedList" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.AggregatedListRegionCommitmentsRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.compute_v1.services.region_commitments.pagers.AggregatedListPager", + "shortName": "aggregated_list" + }, + "description": "Sample for AggregatedList", + "file": "compute_v1_generated_region_commitments_aggregated_list_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_RegionCommitments_AggregatedList_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_region_commitments_aggregated_list_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.RegionCommitmentsClient", + "shortName": "RegionCommitmentsClient" + }, + "fullName": "google.cloud.compute_v1.RegionCommitmentsClient.get", + "method": { + "fullName": "google.cloud.compute.v1.RegionCommitments.Get", + "service": { + "fullName": "google.cloud.compute.v1.RegionCommitments", + "shortName": "RegionCommitments" + }, + "shortName": "Get" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.GetRegionCommitmentRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "region", + "type": "str" + }, + { + "name": "commitment", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.compute_v1.types.Commitment", + "shortName": "get" + }, + "description": "Sample for Get", + "file": "compute_v1_generated_region_commitments_get_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_RegionCommitments_Get_sync", + "segments": [ + { + "end": 53, + "start": 27, + "type": "FULL" + }, + { + "end": 53, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 47, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 50, + "start": 48, + "type": "REQUEST_EXECUTION" + }, + { + "end": 54, + "start": 51, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_region_commitments_get_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.RegionCommitmentsClient", + "shortName": "RegionCommitmentsClient" + }, + "fullName": "google.cloud.compute_v1.RegionCommitmentsClient.insert", + "method": { + "fullName": "google.cloud.compute.v1.RegionCommitments.Insert", + "service": { + "fullName": "google.cloud.compute.v1.RegionCommitments", + "shortName": "RegionCommitments" + }, + "shortName": "Insert" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.InsertRegionCommitmentRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "region", + "type": "str" + }, + { + "name": "commitment_resource", + "type": "google.cloud.compute_v1.types.Commitment" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.extended_operation.ExtendedOperation", + "shortName": "insert" + }, + "description": "Sample for Insert", + "file": "compute_v1_generated_region_commitments_insert_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_RegionCommitments_Insert_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_region_commitments_insert_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.RegionCommitmentsClient", + "shortName": "RegionCommitmentsClient" + }, + "fullName": "google.cloud.compute_v1.RegionCommitmentsClient.list", + "method": { + "fullName": "google.cloud.compute.v1.RegionCommitments.List", + "service": { + "fullName": "google.cloud.compute.v1.RegionCommitments", + "shortName": "RegionCommitments" + }, + "shortName": "List" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.ListRegionCommitmentsRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "region", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.compute_v1.services.region_commitments.pagers.ListPager", + "shortName": "list" + }, + "description": "Sample for List", + "file": "compute_v1_generated_region_commitments_list_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_RegionCommitments_List_sync", + "segments": [ + { + "end": 53, + "start": 27, + "type": "FULL" + }, + { + "end": 53, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 54, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_region_commitments_list_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.RegionCommitmentsClient", + "shortName": "RegionCommitmentsClient" + }, + "fullName": "google.cloud.compute_v1.RegionCommitmentsClient.update", + "method": { + "fullName": "google.cloud.compute.v1.RegionCommitments.Update", + "service": { + "fullName": "google.cloud.compute.v1.RegionCommitments", + "shortName": "RegionCommitments" + }, + "shortName": "Update" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.UpdateRegionCommitmentRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "region", + "type": "str" + }, + { + "name": "commitment", + "type": "str" + }, + { + "name": "commitment_resource", + "type": "google.cloud.compute_v1.types.Commitment" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.extended_operation.ExtendedOperation", + "shortName": "update" + }, + "description": "Sample for Update", + "file": "compute_v1_generated_region_commitments_update_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_RegionCommitments_Update_sync", + "segments": [ + { + "end": 53, + "start": 27, + "type": "FULL" + }, + { + "end": 53, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 47, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 50, + "start": 48, + "type": "REQUEST_EXECUTION" + }, + { + "end": 54, + "start": 51, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_region_commitments_update_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.RegionDiskTypesClient", + "shortName": "RegionDiskTypesClient" + }, + "fullName": "google.cloud.compute_v1.RegionDiskTypesClient.get", + "method": { + "fullName": "google.cloud.compute.v1.RegionDiskTypes.Get", + "service": { + "fullName": "google.cloud.compute.v1.RegionDiskTypes", + "shortName": "RegionDiskTypes" + }, + "shortName": "Get" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.GetRegionDiskTypeRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "region", + "type": "str" + }, + { + "name": "disk_type", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.compute_v1.types.DiskType", + "shortName": "get" + }, + "description": "Sample for Get", + "file": "compute_v1_generated_region_disk_types_get_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_RegionDiskTypes_Get_sync", + "segments": [ + { + "end": 53, + "start": 27, + "type": "FULL" + }, + { + "end": 53, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 47, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 50, + "start": 48, + "type": "REQUEST_EXECUTION" + }, + { + "end": 54, + "start": 51, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_region_disk_types_get_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.RegionDiskTypesClient", + "shortName": "RegionDiskTypesClient" + }, + "fullName": "google.cloud.compute_v1.RegionDiskTypesClient.list", + "method": { + "fullName": "google.cloud.compute.v1.RegionDiskTypes.List", + "service": { + "fullName": "google.cloud.compute.v1.RegionDiskTypes", + "shortName": "RegionDiskTypes" + }, + "shortName": "List" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.ListRegionDiskTypesRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "region", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.compute_v1.services.region_disk_types.pagers.ListPager", + "shortName": "list" + }, + "description": "Sample for List", + "file": "compute_v1_generated_region_disk_types_list_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_RegionDiskTypes_List_sync", + "segments": [ + { + "end": 53, + "start": 27, + "type": "FULL" + }, + { + "end": 53, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 54, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_region_disk_types_list_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.RegionDisksClient", + "shortName": "RegionDisksClient" + }, + "fullName": "google.cloud.compute_v1.RegionDisksClient.add_resource_policies", + "method": { + "fullName": "google.cloud.compute.v1.RegionDisks.AddResourcePolicies", + "service": { + "fullName": "google.cloud.compute.v1.RegionDisks", + "shortName": "RegionDisks" + }, + "shortName": "AddResourcePolicies" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.AddResourcePoliciesRegionDiskRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "region", + "type": "str" + }, + { + "name": "disk", + "type": "str" + }, + { + "name": "region_disks_add_resource_policies_request_resource", + "type": "google.cloud.compute_v1.types.RegionDisksAddResourcePoliciesRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.extended_operation.ExtendedOperation", + "shortName": "add_resource_policies" + }, + "description": "Sample for AddResourcePolicies", + "file": "compute_v1_generated_region_disks_add_resource_policies_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_RegionDisks_AddResourcePolicies_sync", + "segments": [ + { + "end": 53, + "start": 27, + "type": "FULL" + }, + { + "end": 53, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 47, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 50, + "start": 48, + "type": "REQUEST_EXECUTION" + }, + { + "end": 54, + "start": 51, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_region_disks_add_resource_policies_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.RegionDisksClient", + "shortName": "RegionDisksClient" + }, + "fullName": "google.cloud.compute_v1.RegionDisksClient.bulk_insert", + "method": { + "fullName": "google.cloud.compute.v1.RegionDisks.BulkInsert", + "service": { + "fullName": "google.cloud.compute.v1.RegionDisks", + "shortName": "RegionDisks" + }, + "shortName": "BulkInsert" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.BulkInsertRegionDiskRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "region", + "type": "str" + }, + { + "name": "bulk_insert_disk_resource_resource", + "type": "google.cloud.compute_v1.types.BulkInsertDiskResource" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.extended_operation.ExtendedOperation", + "shortName": "bulk_insert" + }, + "description": "Sample for BulkInsert", + "file": "compute_v1_generated_region_disks_bulk_insert_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_RegionDisks_BulkInsert_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_region_disks_bulk_insert_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.RegionDisksClient", + "shortName": "RegionDisksClient" + }, + "fullName": "google.cloud.compute_v1.RegionDisksClient.create_snapshot", + "method": { + "fullName": "google.cloud.compute.v1.RegionDisks.CreateSnapshot", + "service": { + "fullName": "google.cloud.compute.v1.RegionDisks", + "shortName": "RegionDisks" + }, + "shortName": "CreateSnapshot" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.CreateSnapshotRegionDiskRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "region", + "type": "str" + }, + { + "name": "disk", + "type": "str" + }, + { + "name": "snapshot_resource", + "type": "google.cloud.compute_v1.types.Snapshot" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.extended_operation.ExtendedOperation", + "shortName": "create_snapshot" + }, + "description": "Sample for CreateSnapshot", + "file": "compute_v1_generated_region_disks_create_snapshot_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_RegionDisks_CreateSnapshot_sync", + "segments": [ + { + "end": 53, + "start": 27, + "type": "FULL" + }, + { + "end": 53, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 47, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 50, + "start": 48, + "type": "REQUEST_EXECUTION" + }, + { + "end": 54, + "start": 51, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_region_disks_create_snapshot_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.RegionDisksClient", + "shortName": "RegionDisksClient" + }, + "fullName": "google.cloud.compute_v1.RegionDisksClient.delete", + "method": { + "fullName": "google.cloud.compute.v1.RegionDisks.Delete", + "service": { + "fullName": "google.cloud.compute.v1.RegionDisks", + "shortName": "RegionDisks" + }, + "shortName": "Delete" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.DeleteRegionDiskRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "region", + "type": "str" + }, + { + "name": "disk", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.extended_operation.ExtendedOperation", + "shortName": "delete" + }, + "description": "Sample for Delete", + "file": "compute_v1_generated_region_disks_delete_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_RegionDisks_Delete_sync", + "segments": [ + { + "end": 53, + "start": 27, + "type": "FULL" + }, + { + "end": 53, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 47, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 50, + "start": 48, + "type": "REQUEST_EXECUTION" + }, + { + "end": 54, + "start": 51, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_region_disks_delete_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.RegionDisksClient", + "shortName": "RegionDisksClient" + }, + "fullName": "google.cloud.compute_v1.RegionDisksClient.get_iam_policy", + "method": { + "fullName": "google.cloud.compute.v1.RegionDisks.GetIamPolicy", + "service": { + "fullName": "google.cloud.compute.v1.RegionDisks", + "shortName": "RegionDisks" + }, + "shortName": "GetIamPolicy" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.GetIamPolicyRegionDiskRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "region", + "type": "str" + }, + { + "name": "resource", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.compute_v1.types.Policy", + "shortName": "get_iam_policy" + }, + "description": "Sample for GetIamPolicy", + "file": "compute_v1_generated_region_disks_get_iam_policy_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_RegionDisks_GetIamPolicy_sync", + "segments": [ + { + "end": 53, + "start": 27, + "type": "FULL" + }, + { + "end": 53, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 47, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 50, + "start": 48, + "type": "REQUEST_EXECUTION" + }, + { + "end": 54, + "start": 51, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_region_disks_get_iam_policy_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.RegionDisksClient", + "shortName": "RegionDisksClient" + }, + "fullName": "google.cloud.compute_v1.RegionDisksClient.get", + "method": { + "fullName": "google.cloud.compute.v1.RegionDisks.Get", + "service": { + "fullName": "google.cloud.compute.v1.RegionDisks", + "shortName": "RegionDisks" + }, + "shortName": "Get" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.GetRegionDiskRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "region", + "type": "str" + }, + { + "name": "disk", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.compute_v1.types.Disk", + "shortName": "get" + }, + "description": "Sample for Get", + "file": "compute_v1_generated_region_disks_get_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_RegionDisks_Get_sync", + "segments": [ + { + "end": 53, + "start": 27, + "type": "FULL" + }, + { + "end": 53, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 47, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 50, + "start": 48, + "type": "REQUEST_EXECUTION" + }, + { + "end": 54, + "start": 51, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_region_disks_get_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.RegionDisksClient", + "shortName": "RegionDisksClient" + }, + "fullName": "google.cloud.compute_v1.RegionDisksClient.insert", + "method": { + "fullName": "google.cloud.compute.v1.RegionDisks.Insert", + "service": { + "fullName": "google.cloud.compute.v1.RegionDisks", + "shortName": "RegionDisks" + }, + "shortName": "Insert" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.InsertRegionDiskRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "region", + "type": "str" + }, + { + "name": "disk_resource", + "type": "google.cloud.compute_v1.types.Disk" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.extended_operation.ExtendedOperation", + "shortName": "insert" + }, + "description": "Sample for Insert", + "file": "compute_v1_generated_region_disks_insert_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_RegionDisks_Insert_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_region_disks_insert_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.RegionDisksClient", + "shortName": "RegionDisksClient" + }, + "fullName": "google.cloud.compute_v1.RegionDisksClient.list", + "method": { + "fullName": "google.cloud.compute.v1.RegionDisks.List", + "service": { + "fullName": "google.cloud.compute.v1.RegionDisks", + "shortName": "RegionDisks" + }, + "shortName": "List" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.ListRegionDisksRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "region", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.compute_v1.services.region_disks.pagers.ListPager", + "shortName": "list" + }, + "description": "Sample for List", + "file": "compute_v1_generated_region_disks_list_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_RegionDisks_List_sync", + "segments": [ + { + "end": 53, + "start": 27, + "type": "FULL" + }, + { + "end": 53, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 54, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_region_disks_list_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.RegionDisksClient", + "shortName": "RegionDisksClient" + }, + "fullName": "google.cloud.compute_v1.RegionDisksClient.remove_resource_policies", + "method": { + "fullName": "google.cloud.compute.v1.RegionDisks.RemoveResourcePolicies", + "service": { + "fullName": "google.cloud.compute.v1.RegionDisks", + "shortName": "RegionDisks" + }, + "shortName": "RemoveResourcePolicies" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.RemoveResourcePoliciesRegionDiskRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "region", + "type": "str" + }, + { + "name": "disk", + "type": "str" + }, + { + "name": "region_disks_remove_resource_policies_request_resource", + "type": "google.cloud.compute_v1.types.RegionDisksRemoveResourcePoliciesRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.extended_operation.ExtendedOperation", + "shortName": "remove_resource_policies" + }, + "description": "Sample for RemoveResourcePolicies", + "file": "compute_v1_generated_region_disks_remove_resource_policies_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_RegionDisks_RemoveResourcePolicies_sync", + "segments": [ + { + "end": 53, + "start": 27, + "type": "FULL" + }, + { + "end": 53, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 47, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 50, + "start": 48, + "type": "REQUEST_EXECUTION" + }, + { + "end": 54, + "start": 51, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_region_disks_remove_resource_policies_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.RegionDisksClient", + "shortName": "RegionDisksClient" + }, + "fullName": "google.cloud.compute_v1.RegionDisksClient.resize", + "method": { + "fullName": "google.cloud.compute.v1.RegionDisks.Resize", + "service": { + "fullName": "google.cloud.compute.v1.RegionDisks", + "shortName": "RegionDisks" + }, + "shortName": "Resize" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.ResizeRegionDiskRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "region", + "type": "str" + }, + { + "name": "disk", + "type": "str" + }, + { + "name": "region_disks_resize_request_resource", + "type": "google.cloud.compute_v1.types.RegionDisksResizeRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.extended_operation.ExtendedOperation", + "shortName": "resize" + }, + "description": "Sample for Resize", + "file": "compute_v1_generated_region_disks_resize_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_RegionDisks_Resize_sync", + "segments": [ + { + "end": 53, + "start": 27, + "type": "FULL" + }, + { + "end": 53, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 47, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 50, + "start": 48, + "type": "REQUEST_EXECUTION" + }, + { + "end": 54, + "start": 51, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_region_disks_resize_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.RegionDisksClient", + "shortName": "RegionDisksClient" + }, + "fullName": "google.cloud.compute_v1.RegionDisksClient.set_iam_policy", + "method": { + "fullName": "google.cloud.compute.v1.RegionDisks.SetIamPolicy", + "service": { + "fullName": "google.cloud.compute.v1.RegionDisks", + "shortName": "RegionDisks" + }, + "shortName": "SetIamPolicy" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.SetIamPolicyRegionDiskRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "region", + "type": "str" + }, + { + "name": "resource", + "type": "str" + }, + { + "name": "region_set_policy_request_resource", + "type": "google.cloud.compute_v1.types.RegionSetPolicyRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.compute_v1.types.Policy", + "shortName": "set_iam_policy" + }, + "description": "Sample for SetIamPolicy", + "file": "compute_v1_generated_region_disks_set_iam_policy_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_RegionDisks_SetIamPolicy_sync", + "segments": [ + { + "end": 53, + "start": 27, + "type": "FULL" + }, + { + "end": 53, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 47, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 50, + "start": 48, + "type": "REQUEST_EXECUTION" + }, + { + "end": 54, + "start": 51, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_region_disks_set_iam_policy_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.RegionDisksClient", + "shortName": "RegionDisksClient" + }, + "fullName": "google.cloud.compute_v1.RegionDisksClient.set_labels", + "method": { + "fullName": "google.cloud.compute.v1.RegionDisks.SetLabels", + "service": { + "fullName": "google.cloud.compute.v1.RegionDisks", + "shortName": "RegionDisks" + }, + "shortName": "SetLabels" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.SetLabelsRegionDiskRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "region", + "type": "str" + }, + { + "name": "resource", + "type": "str" + }, + { + "name": "region_set_labels_request_resource", + "type": "google.cloud.compute_v1.types.RegionSetLabelsRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.extended_operation.ExtendedOperation", + "shortName": "set_labels" + }, + "description": "Sample for SetLabels", + "file": "compute_v1_generated_region_disks_set_labels_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_RegionDisks_SetLabels_sync", + "segments": [ + { + "end": 53, + "start": 27, + "type": "FULL" + }, + { + "end": 53, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 47, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 50, + "start": 48, + "type": "REQUEST_EXECUTION" + }, + { + "end": 54, + "start": 51, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_region_disks_set_labels_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.RegionDisksClient", + "shortName": "RegionDisksClient" + }, + "fullName": "google.cloud.compute_v1.RegionDisksClient.start_async_replication", + "method": { + "fullName": "google.cloud.compute.v1.RegionDisks.StartAsyncReplication", + "service": { + "fullName": "google.cloud.compute.v1.RegionDisks", + "shortName": "RegionDisks" + }, + "shortName": "StartAsyncReplication" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.StartAsyncReplicationRegionDiskRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "region", + "type": "str" + }, + { + "name": "disk", + "type": "str" + }, + { + "name": "region_disks_start_async_replication_request_resource", + "type": "google.cloud.compute_v1.types.RegionDisksStartAsyncReplicationRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.extended_operation.ExtendedOperation", + "shortName": "start_async_replication" + }, + "description": "Sample for StartAsyncReplication", + "file": "compute_v1_generated_region_disks_start_async_replication_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_RegionDisks_StartAsyncReplication_sync", + "segments": [ + { + "end": 53, + "start": 27, + "type": "FULL" + }, + { + "end": 53, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 47, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 50, + "start": 48, + "type": "REQUEST_EXECUTION" + }, + { + "end": 54, + "start": 51, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_region_disks_start_async_replication_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.RegionDisksClient", + "shortName": "RegionDisksClient" + }, + "fullName": "google.cloud.compute_v1.RegionDisksClient.stop_async_replication", + "method": { + "fullName": "google.cloud.compute.v1.RegionDisks.StopAsyncReplication", + "service": { + "fullName": "google.cloud.compute.v1.RegionDisks", + "shortName": "RegionDisks" + }, + "shortName": "StopAsyncReplication" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.StopAsyncReplicationRegionDiskRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "region", + "type": "str" + }, + { + "name": "disk", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.extended_operation.ExtendedOperation", + "shortName": "stop_async_replication" + }, + "description": "Sample for StopAsyncReplication", + "file": "compute_v1_generated_region_disks_stop_async_replication_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_RegionDisks_StopAsyncReplication_sync", + "segments": [ + { + "end": 53, + "start": 27, + "type": "FULL" + }, + { + "end": 53, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 47, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 50, + "start": 48, + "type": "REQUEST_EXECUTION" + }, + { + "end": 54, + "start": 51, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_region_disks_stop_async_replication_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.RegionDisksClient", + "shortName": "RegionDisksClient" + }, + "fullName": "google.cloud.compute_v1.RegionDisksClient.stop_group_async_replication", + "method": { + "fullName": "google.cloud.compute.v1.RegionDisks.StopGroupAsyncReplication", + "service": { + "fullName": "google.cloud.compute.v1.RegionDisks", + "shortName": "RegionDisks" + }, + "shortName": "StopGroupAsyncReplication" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.StopGroupAsyncReplicationRegionDiskRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "region", + "type": "str" + }, + { + "name": "disks_stop_group_async_replication_resource_resource", + "type": "google.cloud.compute_v1.types.DisksStopGroupAsyncReplicationResource" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.extended_operation.ExtendedOperation", + "shortName": "stop_group_async_replication" + }, + "description": "Sample for StopGroupAsyncReplication", + "file": "compute_v1_generated_region_disks_stop_group_async_replication_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_RegionDisks_StopGroupAsyncReplication_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_region_disks_stop_group_async_replication_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.RegionDisksClient", + "shortName": "RegionDisksClient" + }, + "fullName": "google.cloud.compute_v1.RegionDisksClient.test_iam_permissions", + "method": { + "fullName": "google.cloud.compute.v1.RegionDisks.TestIamPermissions", + "service": { + "fullName": "google.cloud.compute.v1.RegionDisks", + "shortName": "RegionDisks" + }, + "shortName": "TestIamPermissions" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.TestIamPermissionsRegionDiskRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "region", + "type": "str" + }, + { + "name": "resource", + "type": "str" + }, + { + "name": "test_permissions_request_resource", + "type": "google.cloud.compute_v1.types.TestPermissionsRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.compute_v1.types.TestPermissionsResponse", + "shortName": "test_iam_permissions" + }, + "description": "Sample for TestIamPermissions", + "file": "compute_v1_generated_region_disks_test_iam_permissions_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_RegionDisks_TestIamPermissions_sync", + "segments": [ + { + "end": 53, + "start": 27, + "type": "FULL" + }, + { + "end": 53, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 47, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 50, + "start": 48, + "type": "REQUEST_EXECUTION" + }, + { + "end": 54, + "start": 51, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_region_disks_test_iam_permissions_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.RegionDisksClient", + "shortName": "RegionDisksClient" + }, + "fullName": "google.cloud.compute_v1.RegionDisksClient.update", + "method": { + "fullName": "google.cloud.compute.v1.RegionDisks.Update", + "service": { + "fullName": "google.cloud.compute.v1.RegionDisks", + "shortName": "RegionDisks" + }, + "shortName": "Update" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.UpdateRegionDiskRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "region", + "type": "str" + }, + { + "name": "disk", + "type": "str" + }, + { + "name": "disk_resource", + "type": "google.cloud.compute_v1.types.Disk" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.extended_operation.ExtendedOperation", + "shortName": "update" + }, + "description": "Sample for Update", + "file": "compute_v1_generated_region_disks_update_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_RegionDisks_Update_sync", + "segments": [ + { + "end": 53, + "start": 27, + "type": "FULL" + }, + { + "end": 53, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 47, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 50, + "start": 48, + "type": "REQUEST_EXECUTION" + }, + { + "end": 54, + "start": 51, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_region_disks_update_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.RegionHealthCheckServicesClient", + "shortName": "RegionHealthCheckServicesClient" + }, + "fullName": "google.cloud.compute_v1.RegionHealthCheckServicesClient.delete", + "method": { + "fullName": "google.cloud.compute.v1.RegionHealthCheckServices.Delete", + "service": { + "fullName": "google.cloud.compute.v1.RegionHealthCheckServices", + "shortName": "RegionHealthCheckServices" + }, + "shortName": "Delete" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.DeleteRegionHealthCheckServiceRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "region", + "type": "str" + }, + { + "name": "health_check_service", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.extended_operation.ExtendedOperation", + "shortName": "delete" + }, + "description": "Sample for Delete", + "file": "compute_v1_generated_region_health_check_services_delete_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_RegionHealthCheckServices_Delete_sync", + "segments": [ + { + "end": 53, + "start": 27, + "type": "FULL" + }, + { + "end": 53, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 47, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 50, + "start": 48, + "type": "REQUEST_EXECUTION" + }, + { + "end": 54, + "start": 51, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_region_health_check_services_delete_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.RegionHealthCheckServicesClient", + "shortName": "RegionHealthCheckServicesClient" + }, + "fullName": "google.cloud.compute_v1.RegionHealthCheckServicesClient.get", + "method": { + "fullName": "google.cloud.compute.v1.RegionHealthCheckServices.Get", + "service": { + "fullName": "google.cloud.compute.v1.RegionHealthCheckServices", + "shortName": "RegionHealthCheckServices" + }, + "shortName": "Get" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.GetRegionHealthCheckServiceRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "region", + "type": "str" + }, + { + "name": "health_check_service", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.compute_v1.types.HealthCheckService", + "shortName": "get" + }, + "description": "Sample for Get", + "file": "compute_v1_generated_region_health_check_services_get_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_RegionHealthCheckServices_Get_sync", + "segments": [ + { + "end": 53, + "start": 27, + "type": "FULL" + }, + { + "end": 53, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 47, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 50, + "start": 48, + "type": "REQUEST_EXECUTION" + }, + { + "end": 54, + "start": 51, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_region_health_check_services_get_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.RegionHealthCheckServicesClient", + "shortName": "RegionHealthCheckServicesClient" + }, + "fullName": "google.cloud.compute_v1.RegionHealthCheckServicesClient.insert", + "method": { + "fullName": "google.cloud.compute.v1.RegionHealthCheckServices.Insert", + "service": { + "fullName": "google.cloud.compute.v1.RegionHealthCheckServices", + "shortName": "RegionHealthCheckServices" + }, + "shortName": "Insert" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.InsertRegionHealthCheckServiceRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "region", + "type": "str" + }, + { + "name": "health_check_service_resource", + "type": "google.cloud.compute_v1.types.HealthCheckService" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.extended_operation.ExtendedOperation", + "shortName": "insert" + }, + "description": "Sample for Insert", + "file": "compute_v1_generated_region_health_check_services_insert_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_RegionHealthCheckServices_Insert_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_region_health_check_services_insert_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.RegionHealthCheckServicesClient", + "shortName": "RegionHealthCheckServicesClient" + }, + "fullName": "google.cloud.compute_v1.RegionHealthCheckServicesClient.list", + "method": { + "fullName": "google.cloud.compute.v1.RegionHealthCheckServices.List", + "service": { + "fullName": "google.cloud.compute.v1.RegionHealthCheckServices", + "shortName": "RegionHealthCheckServices" + }, + "shortName": "List" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.ListRegionHealthCheckServicesRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "region", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.compute_v1.services.region_health_check_services.pagers.ListPager", + "shortName": "list" + }, + "description": "Sample for List", + "file": "compute_v1_generated_region_health_check_services_list_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_RegionHealthCheckServices_List_sync", + "segments": [ + { + "end": 53, + "start": 27, + "type": "FULL" + }, + { + "end": 53, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 54, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_region_health_check_services_list_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.RegionHealthCheckServicesClient", + "shortName": "RegionHealthCheckServicesClient" + }, + "fullName": "google.cloud.compute_v1.RegionHealthCheckServicesClient.patch", + "method": { + "fullName": "google.cloud.compute.v1.RegionHealthCheckServices.Patch", + "service": { + "fullName": "google.cloud.compute.v1.RegionHealthCheckServices", + "shortName": "RegionHealthCheckServices" + }, + "shortName": "Patch" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.PatchRegionHealthCheckServiceRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "region", + "type": "str" + }, + { + "name": "health_check_service", + "type": "str" + }, + { + "name": "health_check_service_resource", + "type": "google.cloud.compute_v1.types.HealthCheckService" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.extended_operation.ExtendedOperation", + "shortName": "patch" + }, + "description": "Sample for Patch", + "file": "compute_v1_generated_region_health_check_services_patch_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_RegionHealthCheckServices_Patch_sync", + "segments": [ + { + "end": 53, + "start": 27, + "type": "FULL" + }, + { + "end": 53, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 47, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 50, + "start": 48, + "type": "REQUEST_EXECUTION" + }, + { + "end": 54, + "start": 51, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_region_health_check_services_patch_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.RegionHealthChecksClient", + "shortName": "RegionHealthChecksClient" + }, + "fullName": "google.cloud.compute_v1.RegionHealthChecksClient.delete", + "method": { + "fullName": "google.cloud.compute.v1.RegionHealthChecks.Delete", + "service": { + "fullName": "google.cloud.compute.v1.RegionHealthChecks", + "shortName": "RegionHealthChecks" + }, + "shortName": "Delete" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.DeleteRegionHealthCheckRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "region", + "type": "str" + }, + { + "name": "health_check", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.extended_operation.ExtendedOperation", + "shortName": "delete" + }, + "description": "Sample for Delete", + "file": "compute_v1_generated_region_health_checks_delete_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_RegionHealthChecks_Delete_sync", + "segments": [ + { + "end": 53, + "start": 27, + "type": "FULL" + }, + { + "end": 53, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 47, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 50, + "start": 48, + "type": "REQUEST_EXECUTION" + }, + { + "end": 54, + "start": 51, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_region_health_checks_delete_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.RegionHealthChecksClient", + "shortName": "RegionHealthChecksClient" + }, + "fullName": "google.cloud.compute_v1.RegionHealthChecksClient.get", + "method": { + "fullName": "google.cloud.compute.v1.RegionHealthChecks.Get", + "service": { + "fullName": "google.cloud.compute.v1.RegionHealthChecks", + "shortName": "RegionHealthChecks" + }, + "shortName": "Get" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.GetRegionHealthCheckRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "region", + "type": "str" + }, + { + "name": "health_check", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.compute_v1.types.HealthCheck", + "shortName": "get" + }, + "description": "Sample for Get", + "file": "compute_v1_generated_region_health_checks_get_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_RegionHealthChecks_Get_sync", + "segments": [ + { + "end": 53, + "start": 27, + "type": "FULL" + }, + { + "end": 53, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 47, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 50, + "start": 48, + "type": "REQUEST_EXECUTION" + }, + { + "end": 54, + "start": 51, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_region_health_checks_get_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.RegionHealthChecksClient", + "shortName": "RegionHealthChecksClient" + }, + "fullName": "google.cloud.compute_v1.RegionHealthChecksClient.insert", + "method": { + "fullName": "google.cloud.compute.v1.RegionHealthChecks.Insert", + "service": { + "fullName": "google.cloud.compute.v1.RegionHealthChecks", + "shortName": "RegionHealthChecks" + }, + "shortName": "Insert" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.InsertRegionHealthCheckRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "region", + "type": "str" + }, + { + "name": "health_check_resource", + "type": "google.cloud.compute_v1.types.HealthCheck" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.extended_operation.ExtendedOperation", + "shortName": "insert" + }, + "description": "Sample for Insert", + "file": "compute_v1_generated_region_health_checks_insert_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_RegionHealthChecks_Insert_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_region_health_checks_insert_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.RegionHealthChecksClient", + "shortName": "RegionHealthChecksClient" + }, + "fullName": "google.cloud.compute_v1.RegionHealthChecksClient.list", + "method": { + "fullName": "google.cloud.compute.v1.RegionHealthChecks.List", + "service": { + "fullName": "google.cloud.compute.v1.RegionHealthChecks", + "shortName": "RegionHealthChecks" + }, + "shortName": "List" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.ListRegionHealthChecksRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "region", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.compute_v1.services.region_health_checks.pagers.ListPager", + "shortName": "list" + }, + "description": "Sample for List", + "file": "compute_v1_generated_region_health_checks_list_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_RegionHealthChecks_List_sync", + "segments": [ + { + "end": 53, + "start": 27, + "type": "FULL" + }, + { + "end": 53, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 54, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_region_health_checks_list_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.RegionHealthChecksClient", + "shortName": "RegionHealthChecksClient" + }, + "fullName": "google.cloud.compute_v1.RegionHealthChecksClient.patch", + "method": { + "fullName": "google.cloud.compute.v1.RegionHealthChecks.Patch", + "service": { + "fullName": "google.cloud.compute.v1.RegionHealthChecks", + "shortName": "RegionHealthChecks" + }, + "shortName": "Patch" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.PatchRegionHealthCheckRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "region", + "type": "str" + }, + { + "name": "health_check", + "type": "str" + }, + { + "name": "health_check_resource", + "type": "google.cloud.compute_v1.types.HealthCheck" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.extended_operation.ExtendedOperation", + "shortName": "patch" + }, + "description": "Sample for Patch", + "file": "compute_v1_generated_region_health_checks_patch_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_RegionHealthChecks_Patch_sync", + "segments": [ + { + "end": 53, + "start": 27, + "type": "FULL" + }, + { + "end": 53, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 47, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 50, + "start": 48, + "type": "REQUEST_EXECUTION" + }, + { + "end": 54, + "start": 51, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_region_health_checks_patch_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.RegionHealthChecksClient", + "shortName": "RegionHealthChecksClient" + }, + "fullName": "google.cloud.compute_v1.RegionHealthChecksClient.update", + "method": { + "fullName": "google.cloud.compute.v1.RegionHealthChecks.Update", + "service": { + "fullName": "google.cloud.compute.v1.RegionHealthChecks", + "shortName": "RegionHealthChecks" + }, + "shortName": "Update" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.UpdateRegionHealthCheckRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "region", + "type": "str" + }, + { + "name": "health_check", + "type": "str" + }, + { + "name": "health_check_resource", + "type": "google.cloud.compute_v1.types.HealthCheck" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.extended_operation.ExtendedOperation", + "shortName": "update" + }, + "description": "Sample for Update", + "file": "compute_v1_generated_region_health_checks_update_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_RegionHealthChecks_Update_sync", + "segments": [ + { + "end": 53, + "start": 27, + "type": "FULL" + }, + { + "end": 53, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 47, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 50, + "start": 48, + "type": "REQUEST_EXECUTION" + }, + { + "end": 54, + "start": 51, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_region_health_checks_update_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.RegionInstanceGroupManagersClient", + "shortName": "RegionInstanceGroupManagersClient" + }, + "fullName": "google.cloud.compute_v1.RegionInstanceGroupManagersClient.abandon_instances", + "method": { + "fullName": "google.cloud.compute.v1.RegionInstanceGroupManagers.AbandonInstances", + "service": { + "fullName": "google.cloud.compute.v1.RegionInstanceGroupManagers", + "shortName": "RegionInstanceGroupManagers" + }, + "shortName": "AbandonInstances" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.AbandonInstancesRegionInstanceGroupManagerRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "region", + "type": "str" + }, + { + "name": "instance_group_manager", + "type": "str" + }, + { + "name": "region_instance_group_managers_abandon_instances_request_resource", + "type": "google.cloud.compute_v1.types.RegionInstanceGroupManagersAbandonInstancesRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.extended_operation.ExtendedOperation", + "shortName": "abandon_instances" + }, + "description": "Sample for AbandonInstances", + "file": "compute_v1_generated_region_instance_group_managers_abandon_instances_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_RegionInstanceGroupManagers_AbandonInstances_sync", + "segments": [ + { + "end": 53, + "start": 27, + "type": "FULL" + }, + { + "end": 53, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 47, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 50, + "start": 48, + "type": "REQUEST_EXECUTION" + }, + { + "end": 54, + "start": 51, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_region_instance_group_managers_abandon_instances_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.RegionInstanceGroupManagersClient", + "shortName": "RegionInstanceGroupManagersClient" + }, + "fullName": "google.cloud.compute_v1.RegionInstanceGroupManagersClient.apply_updates_to_instances", + "method": { + "fullName": "google.cloud.compute.v1.RegionInstanceGroupManagers.ApplyUpdatesToInstances", + "service": { + "fullName": "google.cloud.compute.v1.RegionInstanceGroupManagers", + "shortName": "RegionInstanceGroupManagers" + }, + "shortName": "ApplyUpdatesToInstances" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.ApplyUpdatesToInstancesRegionInstanceGroupManagerRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "region", + "type": "str" + }, + { + "name": "instance_group_manager", + "type": "str" + }, + { + "name": "region_instance_group_managers_apply_updates_request_resource", + "type": "google.cloud.compute_v1.types.RegionInstanceGroupManagersApplyUpdatesRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.extended_operation.ExtendedOperation", + "shortName": "apply_updates_to_instances" + }, + "description": "Sample for ApplyUpdatesToInstances", + "file": "compute_v1_generated_region_instance_group_managers_apply_updates_to_instances_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_RegionInstanceGroupManagers_ApplyUpdatesToInstances_sync", + "segments": [ + { + "end": 53, + "start": 27, + "type": "FULL" + }, + { + "end": 53, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 47, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 50, + "start": 48, + "type": "REQUEST_EXECUTION" + }, + { + "end": 54, + "start": 51, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_region_instance_group_managers_apply_updates_to_instances_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.RegionInstanceGroupManagersClient", + "shortName": "RegionInstanceGroupManagersClient" + }, + "fullName": "google.cloud.compute_v1.RegionInstanceGroupManagersClient.create_instances", + "method": { + "fullName": "google.cloud.compute.v1.RegionInstanceGroupManagers.CreateInstances", + "service": { + "fullName": "google.cloud.compute.v1.RegionInstanceGroupManagers", + "shortName": "RegionInstanceGroupManagers" + }, + "shortName": "CreateInstances" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.CreateInstancesRegionInstanceGroupManagerRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "region", + "type": "str" + }, + { + "name": "instance_group_manager", + "type": "str" + }, + { + "name": "region_instance_group_managers_create_instances_request_resource", + "type": "google.cloud.compute_v1.types.RegionInstanceGroupManagersCreateInstancesRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.extended_operation.ExtendedOperation", + "shortName": "create_instances" + }, + "description": "Sample for CreateInstances", + "file": "compute_v1_generated_region_instance_group_managers_create_instances_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_RegionInstanceGroupManagers_CreateInstances_sync", + "segments": [ + { + "end": 53, + "start": 27, + "type": "FULL" + }, + { + "end": 53, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 47, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 50, + "start": 48, + "type": "REQUEST_EXECUTION" + }, + { + "end": 54, + "start": 51, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_region_instance_group_managers_create_instances_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.RegionInstanceGroupManagersClient", + "shortName": "RegionInstanceGroupManagersClient" + }, + "fullName": "google.cloud.compute_v1.RegionInstanceGroupManagersClient.delete_instances", + "method": { + "fullName": "google.cloud.compute.v1.RegionInstanceGroupManagers.DeleteInstances", + "service": { + "fullName": "google.cloud.compute.v1.RegionInstanceGroupManagers", + "shortName": "RegionInstanceGroupManagers" + }, + "shortName": "DeleteInstances" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.DeleteInstancesRegionInstanceGroupManagerRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "region", + "type": "str" + }, + { + "name": "instance_group_manager", + "type": "str" + }, + { + "name": "region_instance_group_managers_delete_instances_request_resource", + "type": "google.cloud.compute_v1.types.RegionInstanceGroupManagersDeleteInstancesRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.extended_operation.ExtendedOperation", + "shortName": "delete_instances" + }, + "description": "Sample for DeleteInstances", + "file": "compute_v1_generated_region_instance_group_managers_delete_instances_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_RegionInstanceGroupManagers_DeleteInstances_sync", + "segments": [ + { + "end": 53, + "start": 27, + "type": "FULL" + }, + { + "end": 53, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 47, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 50, + "start": 48, + "type": "REQUEST_EXECUTION" + }, + { + "end": 54, + "start": 51, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_region_instance_group_managers_delete_instances_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.RegionInstanceGroupManagersClient", + "shortName": "RegionInstanceGroupManagersClient" + }, + "fullName": "google.cloud.compute_v1.RegionInstanceGroupManagersClient.delete_per_instance_configs", + "method": { + "fullName": "google.cloud.compute.v1.RegionInstanceGroupManagers.DeletePerInstanceConfigs", + "service": { + "fullName": "google.cloud.compute.v1.RegionInstanceGroupManagers", + "shortName": "RegionInstanceGroupManagers" + }, + "shortName": "DeletePerInstanceConfigs" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.DeletePerInstanceConfigsRegionInstanceGroupManagerRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "region", + "type": "str" + }, + { + "name": "instance_group_manager", + "type": "str" + }, + { + "name": "region_instance_group_manager_delete_instance_config_req_resource", + "type": "google.cloud.compute_v1.types.RegionInstanceGroupManagerDeleteInstanceConfigReq" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.extended_operation.ExtendedOperation", + "shortName": "delete_per_instance_configs" + }, + "description": "Sample for DeletePerInstanceConfigs", + "file": "compute_v1_generated_region_instance_group_managers_delete_per_instance_configs_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_RegionInstanceGroupManagers_DeletePerInstanceConfigs_sync", + "segments": [ + { + "end": 53, + "start": 27, + "type": "FULL" + }, + { + "end": 53, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 47, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 50, + "start": 48, + "type": "REQUEST_EXECUTION" + }, + { + "end": 54, + "start": 51, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_region_instance_group_managers_delete_per_instance_configs_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.RegionInstanceGroupManagersClient", + "shortName": "RegionInstanceGroupManagersClient" + }, + "fullName": "google.cloud.compute_v1.RegionInstanceGroupManagersClient.delete", + "method": { + "fullName": "google.cloud.compute.v1.RegionInstanceGroupManagers.Delete", + "service": { + "fullName": "google.cloud.compute.v1.RegionInstanceGroupManagers", + "shortName": "RegionInstanceGroupManagers" + }, + "shortName": "Delete" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.DeleteRegionInstanceGroupManagerRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "region", + "type": "str" + }, + { + "name": "instance_group_manager", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.extended_operation.ExtendedOperation", + "shortName": "delete" + }, + "description": "Sample for Delete", + "file": "compute_v1_generated_region_instance_group_managers_delete_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_RegionInstanceGroupManagers_Delete_sync", + "segments": [ + { + "end": 53, + "start": 27, + "type": "FULL" + }, + { + "end": 53, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 47, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 50, + "start": 48, + "type": "REQUEST_EXECUTION" + }, + { + "end": 54, + "start": 51, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_region_instance_group_managers_delete_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.RegionInstanceGroupManagersClient", + "shortName": "RegionInstanceGroupManagersClient" + }, + "fullName": "google.cloud.compute_v1.RegionInstanceGroupManagersClient.get", + "method": { + "fullName": "google.cloud.compute.v1.RegionInstanceGroupManagers.Get", + "service": { + "fullName": "google.cloud.compute.v1.RegionInstanceGroupManagers", + "shortName": "RegionInstanceGroupManagers" + }, + "shortName": "Get" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.GetRegionInstanceGroupManagerRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "region", + "type": "str" + }, + { + "name": "instance_group_manager", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.compute_v1.types.InstanceGroupManager", + "shortName": "get" + }, + "description": "Sample for Get", + "file": "compute_v1_generated_region_instance_group_managers_get_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_RegionInstanceGroupManagers_Get_sync", + "segments": [ + { + "end": 53, + "start": 27, + "type": "FULL" + }, + { + "end": 53, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 47, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 50, + "start": 48, + "type": "REQUEST_EXECUTION" + }, + { + "end": 54, + "start": 51, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_region_instance_group_managers_get_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.RegionInstanceGroupManagersClient", + "shortName": "RegionInstanceGroupManagersClient" + }, + "fullName": "google.cloud.compute_v1.RegionInstanceGroupManagersClient.insert", + "method": { + "fullName": "google.cloud.compute.v1.RegionInstanceGroupManagers.Insert", + "service": { + "fullName": "google.cloud.compute.v1.RegionInstanceGroupManagers", + "shortName": "RegionInstanceGroupManagers" + }, + "shortName": "Insert" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.InsertRegionInstanceGroupManagerRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "region", + "type": "str" + }, + { + "name": "instance_group_manager_resource", + "type": "google.cloud.compute_v1.types.InstanceGroupManager" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.extended_operation.ExtendedOperation", + "shortName": "insert" + }, + "description": "Sample for Insert", + "file": "compute_v1_generated_region_instance_group_managers_insert_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_RegionInstanceGroupManagers_Insert_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_region_instance_group_managers_insert_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.RegionInstanceGroupManagersClient", + "shortName": "RegionInstanceGroupManagersClient" + }, + "fullName": "google.cloud.compute_v1.RegionInstanceGroupManagersClient.list_errors", + "method": { + "fullName": "google.cloud.compute.v1.RegionInstanceGroupManagers.ListErrors", + "service": { + "fullName": "google.cloud.compute.v1.RegionInstanceGroupManagers", + "shortName": "RegionInstanceGroupManagers" + }, + "shortName": "ListErrors" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.ListErrorsRegionInstanceGroupManagersRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "region", + "type": "str" + }, + { + "name": "instance_group_manager", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.compute_v1.services.region_instance_group_managers.pagers.ListErrorsPager", + "shortName": "list_errors" + }, + "description": "Sample for ListErrors", + "file": "compute_v1_generated_region_instance_group_managers_list_errors_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_RegionInstanceGroupManagers_ListErrors_sync", + "segments": [ + { + "end": 54, + "start": 27, + "type": "FULL" + }, + { + "end": 54, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 47, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 50, + "start": 48, + "type": "REQUEST_EXECUTION" + }, + { + "end": 55, + "start": 51, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_region_instance_group_managers_list_errors_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.RegionInstanceGroupManagersClient", + "shortName": "RegionInstanceGroupManagersClient" + }, + "fullName": "google.cloud.compute_v1.RegionInstanceGroupManagersClient.list_managed_instances", + "method": { + "fullName": "google.cloud.compute.v1.RegionInstanceGroupManagers.ListManagedInstances", + "service": { + "fullName": "google.cloud.compute.v1.RegionInstanceGroupManagers", + "shortName": "RegionInstanceGroupManagers" + }, + "shortName": "ListManagedInstances" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.ListManagedInstancesRegionInstanceGroupManagersRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "region", + "type": "str" + }, + { + "name": "instance_group_manager", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.compute_v1.services.region_instance_group_managers.pagers.ListManagedInstancesPager", + "shortName": "list_managed_instances" + }, + "description": "Sample for ListManagedInstances", + "file": "compute_v1_generated_region_instance_group_managers_list_managed_instances_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_RegionInstanceGroupManagers_ListManagedInstances_sync", + "segments": [ + { + "end": 54, + "start": 27, + "type": "FULL" + }, + { + "end": 54, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 47, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 50, + "start": 48, + "type": "REQUEST_EXECUTION" + }, + { + "end": 55, + "start": 51, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_region_instance_group_managers_list_managed_instances_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.RegionInstanceGroupManagersClient", + "shortName": "RegionInstanceGroupManagersClient" + }, + "fullName": "google.cloud.compute_v1.RegionInstanceGroupManagersClient.list_per_instance_configs", + "method": { + "fullName": "google.cloud.compute.v1.RegionInstanceGroupManagers.ListPerInstanceConfigs", + "service": { + "fullName": "google.cloud.compute.v1.RegionInstanceGroupManagers", + "shortName": "RegionInstanceGroupManagers" + }, + "shortName": "ListPerInstanceConfigs" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.ListPerInstanceConfigsRegionInstanceGroupManagersRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "region", + "type": "str" + }, + { + "name": "instance_group_manager", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.compute_v1.services.region_instance_group_managers.pagers.ListPerInstanceConfigsPager", + "shortName": "list_per_instance_configs" + }, + "description": "Sample for ListPerInstanceConfigs", + "file": "compute_v1_generated_region_instance_group_managers_list_per_instance_configs_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_RegionInstanceGroupManagers_ListPerInstanceConfigs_sync", + "segments": [ + { + "end": 54, + "start": 27, + "type": "FULL" + }, + { + "end": 54, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 47, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 50, + "start": 48, + "type": "REQUEST_EXECUTION" + }, + { + "end": 55, + "start": 51, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_region_instance_group_managers_list_per_instance_configs_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.RegionInstanceGroupManagersClient", + "shortName": "RegionInstanceGroupManagersClient" + }, + "fullName": "google.cloud.compute_v1.RegionInstanceGroupManagersClient.list", + "method": { + "fullName": "google.cloud.compute.v1.RegionInstanceGroupManagers.List", + "service": { + "fullName": "google.cloud.compute.v1.RegionInstanceGroupManagers", + "shortName": "RegionInstanceGroupManagers" + }, + "shortName": "List" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.ListRegionInstanceGroupManagersRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "region", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.compute_v1.services.region_instance_group_managers.pagers.ListPager", + "shortName": "list" + }, + "description": "Sample for List", + "file": "compute_v1_generated_region_instance_group_managers_list_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_RegionInstanceGroupManagers_List_sync", + "segments": [ + { + "end": 53, + "start": 27, + "type": "FULL" + }, + { + "end": 53, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 54, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_region_instance_group_managers_list_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.RegionInstanceGroupManagersClient", + "shortName": "RegionInstanceGroupManagersClient" + }, + "fullName": "google.cloud.compute_v1.RegionInstanceGroupManagersClient.patch_per_instance_configs", + "method": { + "fullName": "google.cloud.compute.v1.RegionInstanceGroupManagers.PatchPerInstanceConfigs", + "service": { + "fullName": "google.cloud.compute.v1.RegionInstanceGroupManagers", + "shortName": "RegionInstanceGroupManagers" + }, + "shortName": "PatchPerInstanceConfigs" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.PatchPerInstanceConfigsRegionInstanceGroupManagerRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "region", + "type": "str" + }, + { + "name": "instance_group_manager", + "type": "str" + }, + { + "name": "region_instance_group_manager_patch_instance_config_req_resource", + "type": "google.cloud.compute_v1.types.RegionInstanceGroupManagerPatchInstanceConfigReq" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.extended_operation.ExtendedOperation", + "shortName": "patch_per_instance_configs" + }, + "description": "Sample for PatchPerInstanceConfigs", + "file": "compute_v1_generated_region_instance_group_managers_patch_per_instance_configs_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_RegionInstanceGroupManagers_PatchPerInstanceConfigs_sync", + "segments": [ + { + "end": 53, + "start": 27, + "type": "FULL" + }, + { + "end": 53, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 47, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 50, + "start": 48, + "type": "REQUEST_EXECUTION" + }, + { + "end": 54, + "start": 51, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_region_instance_group_managers_patch_per_instance_configs_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.RegionInstanceGroupManagersClient", + "shortName": "RegionInstanceGroupManagersClient" + }, + "fullName": "google.cloud.compute_v1.RegionInstanceGroupManagersClient.patch", + "method": { + "fullName": "google.cloud.compute.v1.RegionInstanceGroupManagers.Patch", + "service": { + "fullName": "google.cloud.compute.v1.RegionInstanceGroupManagers", + "shortName": "RegionInstanceGroupManagers" + }, + "shortName": "Patch" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.PatchRegionInstanceGroupManagerRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "region", + "type": "str" + }, + { + "name": "instance_group_manager", + "type": "str" + }, + { + "name": "instance_group_manager_resource", + "type": "google.cloud.compute_v1.types.InstanceGroupManager" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.extended_operation.ExtendedOperation", + "shortName": "patch" + }, + "description": "Sample for Patch", + "file": "compute_v1_generated_region_instance_group_managers_patch_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_RegionInstanceGroupManagers_Patch_sync", + "segments": [ + { + "end": 53, + "start": 27, + "type": "FULL" + }, + { + "end": 53, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 47, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 50, + "start": 48, + "type": "REQUEST_EXECUTION" + }, + { + "end": 54, + "start": 51, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_region_instance_group_managers_patch_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.RegionInstanceGroupManagersClient", + "shortName": "RegionInstanceGroupManagersClient" + }, + "fullName": "google.cloud.compute_v1.RegionInstanceGroupManagersClient.recreate_instances", + "method": { + "fullName": "google.cloud.compute.v1.RegionInstanceGroupManagers.RecreateInstances", + "service": { + "fullName": "google.cloud.compute.v1.RegionInstanceGroupManagers", + "shortName": "RegionInstanceGroupManagers" + }, + "shortName": "RecreateInstances" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.RecreateInstancesRegionInstanceGroupManagerRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "region", + "type": "str" + }, + { + "name": "instance_group_manager", + "type": "str" + }, + { + "name": "region_instance_group_managers_recreate_request_resource", + "type": "google.cloud.compute_v1.types.RegionInstanceGroupManagersRecreateRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.extended_operation.ExtendedOperation", + "shortName": "recreate_instances" + }, + "description": "Sample for RecreateInstances", + "file": "compute_v1_generated_region_instance_group_managers_recreate_instances_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_RegionInstanceGroupManagers_RecreateInstances_sync", + "segments": [ + { + "end": 53, + "start": 27, + "type": "FULL" + }, + { + "end": 53, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 47, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 50, + "start": 48, + "type": "REQUEST_EXECUTION" + }, + { + "end": 54, + "start": 51, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_region_instance_group_managers_recreate_instances_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.RegionInstanceGroupManagersClient", + "shortName": "RegionInstanceGroupManagersClient" + }, + "fullName": "google.cloud.compute_v1.RegionInstanceGroupManagersClient.resize", + "method": { + "fullName": "google.cloud.compute.v1.RegionInstanceGroupManagers.Resize", + "service": { + "fullName": "google.cloud.compute.v1.RegionInstanceGroupManagers", + "shortName": "RegionInstanceGroupManagers" + }, + "shortName": "Resize" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.ResizeRegionInstanceGroupManagerRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "region", + "type": "str" + }, + { + "name": "instance_group_manager", + "type": "str" + }, + { + "name": "size", + "type": "int" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.extended_operation.ExtendedOperation", + "shortName": "resize" + }, + "description": "Sample for Resize", + "file": "compute_v1_generated_region_instance_group_managers_resize_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_RegionInstanceGroupManagers_Resize_sync", + "segments": [ + { + "end": 54, + "start": 27, + "type": "FULL" + }, + { + "end": 54, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 48, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 51, + "start": 49, + "type": "REQUEST_EXECUTION" + }, + { + "end": 55, + "start": 52, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_region_instance_group_managers_resize_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.RegionInstanceGroupManagersClient", + "shortName": "RegionInstanceGroupManagersClient" + }, + "fullName": "google.cloud.compute_v1.RegionInstanceGroupManagersClient.set_instance_template", + "method": { + "fullName": "google.cloud.compute.v1.RegionInstanceGroupManagers.SetInstanceTemplate", + "service": { + "fullName": "google.cloud.compute.v1.RegionInstanceGroupManagers", + "shortName": "RegionInstanceGroupManagers" + }, + "shortName": "SetInstanceTemplate" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.SetInstanceTemplateRegionInstanceGroupManagerRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "region", + "type": "str" + }, + { + "name": "instance_group_manager", + "type": "str" + }, + { + "name": "region_instance_group_managers_set_template_request_resource", + "type": "google.cloud.compute_v1.types.RegionInstanceGroupManagersSetTemplateRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.extended_operation.ExtendedOperation", + "shortName": "set_instance_template" + }, + "description": "Sample for SetInstanceTemplate", + "file": "compute_v1_generated_region_instance_group_managers_set_instance_template_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_RegionInstanceGroupManagers_SetInstanceTemplate_sync", + "segments": [ + { + "end": 53, + "start": 27, + "type": "FULL" + }, + { + "end": 53, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 47, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 50, + "start": 48, + "type": "REQUEST_EXECUTION" + }, + { + "end": 54, + "start": 51, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_region_instance_group_managers_set_instance_template_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.RegionInstanceGroupManagersClient", + "shortName": "RegionInstanceGroupManagersClient" + }, + "fullName": "google.cloud.compute_v1.RegionInstanceGroupManagersClient.set_target_pools", + "method": { + "fullName": "google.cloud.compute.v1.RegionInstanceGroupManagers.SetTargetPools", + "service": { + "fullName": "google.cloud.compute.v1.RegionInstanceGroupManagers", + "shortName": "RegionInstanceGroupManagers" + }, + "shortName": "SetTargetPools" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.SetTargetPoolsRegionInstanceGroupManagerRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "region", + "type": "str" + }, + { + "name": "instance_group_manager", + "type": "str" + }, + { + "name": "region_instance_group_managers_set_target_pools_request_resource", + "type": "google.cloud.compute_v1.types.RegionInstanceGroupManagersSetTargetPoolsRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.extended_operation.ExtendedOperation", + "shortName": "set_target_pools" + }, + "description": "Sample for SetTargetPools", + "file": "compute_v1_generated_region_instance_group_managers_set_target_pools_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_RegionInstanceGroupManagers_SetTargetPools_sync", + "segments": [ + { + "end": 53, + "start": 27, + "type": "FULL" + }, + { + "end": 53, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 47, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 50, + "start": 48, + "type": "REQUEST_EXECUTION" + }, + { + "end": 54, + "start": 51, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_region_instance_group_managers_set_target_pools_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.RegionInstanceGroupManagersClient", + "shortName": "RegionInstanceGroupManagersClient" + }, + "fullName": "google.cloud.compute_v1.RegionInstanceGroupManagersClient.update_per_instance_configs", + "method": { + "fullName": "google.cloud.compute.v1.RegionInstanceGroupManagers.UpdatePerInstanceConfigs", + "service": { + "fullName": "google.cloud.compute.v1.RegionInstanceGroupManagers", + "shortName": "RegionInstanceGroupManagers" + }, + "shortName": "UpdatePerInstanceConfigs" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.UpdatePerInstanceConfigsRegionInstanceGroupManagerRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "region", + "type": "str" + }, + { + "name": "instance_group_manager", + "type": "str" + }, + { + "name": "region_instance_group_manager_update_instance_config_req_resource", + "type": "google.cloud.compute_v1.types.RegionInstanceGroupManagerUpdateInstanceConfigReq" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.extended_operation.ExtendedOperation", + "shortName": "update_per_instance_configs" + }, + "description": "Sample for UpdatePerInstanceConfigs", + "file": "compute_v1_generated_region_instance_group_managers_update_per_instance_configs_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_RegionInstanceGroupManagers_UpdatePerInstanceConfigs_sync", + "segments": [ + { + "end": 53, + "start": 27, + "type": "FULL" + }, + { + "end": 53, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 47, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 50, + "start": 48, + "type": "REQUEST_EXECUTION" + }, + { + "end": 54, + "start": 51, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_region_instance_group_managers_update_per_instance_configs_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.RegionInstanceGroupsClient", + "shortName": "RegionInstanceGroupsClient" + }, + "fullName": "google.cloud.compute_v1.RegionInstanceGroupsClient.get", + "method": { + "fullName": "google.cloud.compute.v1.RegionInstanceGroups.Get", + "service": { + "fullName": "google.cloud.compute.v1.RegionInstanceGroups", + "shortName": "RegionInstanceGroups" + }, + "shortName": "Get" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.GetRegionInstanceGroupRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "region", + "type": "str" + }, + { + "name": "instance_group", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.compute_v1.types.InstanceGroup", + "shortName": "get" + }, + "description": "Sample for Get", + "file": "compute_v1_generated_region_instance_groups_get_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_RegionInstanceGroups_Get_sync", + "segments": [ + { + "end": 53, + "start": 27, + "type": "FULL" + }, + { + "end": 53, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 47, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 50, + "start": 48, + "type": "REQUEST_EXECUTION" + }, + { + "end": 54, + "start": 51, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_region_instance_groups_get_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.RegionInstanceGroupsClient", + "shortName": "RegionInstanceGroupsClient" + }, + "fullName": "google.cloud.compute_v1.RegionInstanceGroupsClient.list_instances", + "method": { + "fullName": "google.cloud.compute.v1.RegionInstanceGroups.ListInstances", + "service": { + "fullName": "google.cloud.compute.v1.RegionInstanceGroups", + "shortName": "RegionInstanceGroups" + }, + "shortName": "ListInstances" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.ListInstancesRegionInstanceGroupsRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "region", + "type": "str" + }, + { + "name": "instance_group", + "type": "str" + }, + { + "name": "region_instance_groups_list_instances_request_resource", + "type": "google.cloud.compute_v1.types.RegionInstanceGroupsListInstancesRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.compute_v1.services.region_instance_groups.pagers.ListInstancesPager", + "shortName": "list_instances" + }, + "description": "Sample for ListInstances", + "file": "compute_v1_generated_region_instance_groups_list_instances_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_RegionInstanceGroups_ListInstances_sync", + "segments": [ + { + "end": 54, + "start": 27, + "type": "FULL" + }, + { + "end": 54, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 47, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 50, + "start": 48, + "type": "REQUEST_EXECUTION" + }, + { + "end": 55, + "start": 51, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_region_instance_groups_list_instances_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.RegionInstanceGroupsClient", + "shortName": "RegionInstanceGroupsClient" + }, + "fullName": "google.cloud.compute_v1.RegionInstanceGroupsClient.list", + "method": { + "fullName": "google.cloud.compute.v1.RegionInstanceGroups.List", + "service": { + "fullName": "google.cloud.compute.v1.RegionInstanceGroups", + "shortName": "RegionInstanceGroups" + }, + "shortName": "List" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.ListRegionInstanceGroupsRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "region", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.compute_v1.services.region_instance_groups.pagers.ListPager", + "shortName": "list" + }, + "description": "Sample for List", + "file": "compute_v1_generated_region_instance_groups_list_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_RegionInstanceGroups_List_sync", + "segments": [ + { + "end": 53, + "start": 27, + "type": "FULL" + }, + { + "end": 53, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 54, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_region_instance_groups_list_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.RegionInstanceGroupsClient", + "shortName": "RegionInstanceGroupsClient" + }, + "fullName": "google.cloud.compute_v1.RegionInstanceGroupsClient.set_named_ports", + "method": { + "fullName": "google.cloud.compute.v1.RegionInstanceGroups.SetNamedPorts", + "service": { + "fullName": "google.cloud.compute.v1.RegionInstanceGroups", + "shortName": "RegionInstanceGroups" + }, + "shortName": "SetNamedPorts" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.SetNamedPortsRegionInstanceGroupRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "region", + "type": "str" + }, + { + "name": "instance_group", + "type": "str" + }, + { + "name": "region_instance_groups_set_named_ports_request_resource", + "type": "google.cloud.compute_v1.types.RegionInstanceGroupsSetNamedPortsRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.extended_operation.ExtendedOperation", + "shortName": "set_named_ports" + }, + "description": "Sample for SetNamedPorts", + "file": "compute_v1_generated_region_instance_groups_set_named_ports_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_RegionInstanceGroups_SetNamedPorts_sync", + "segments": [ + { + "end": 53, + "start": 27, + "type": "FULL" + }, + { + "end": 53, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 47, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 50, + "start": 48, + "type": "REQUEST_EXECUTION" + }, + { + "end": 54, + "start": 51, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_region_instance_groups_set_named_ports_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.RegionInstanceTemplatesClient", + "shortName": "RegionInstanceTemplatesClient" + }, + "fullName": "google.cloud.compute_v1.RegionInstanceTemplatesClient.delete", + "method": { + "fullName": "google.cloud.compute.v1.RegionInstanceTemplates.Delete", + "service": { + "fullName": "google.cloud.compute.v1.RegionInstanceTemplates", + "shortName": "RegionInstanceTemplates" + }, + "shortName": "Delete" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.DeleteRegionInstanceTemplateRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "region", + "type": "str" + }, + { + "name": "instance_template", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.extended_operation.ExtendedOperation", + "shortName": "delete" + }, + "description": "Sample for Delete", + "file": "compute_v1_generated_region_instance_templates_delete_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_RegionInstanceTemplates_Delete_sync", + "segments": [ + { + "end": 53, + "start": 27, + "type": "FULL" + }, + { + "end": 53, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 47, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 50, + "start": 48, + "type": "REQUEST_EXECUTION" + }, + { + "end": 54, + "start": 51, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_region_instance_templates_delete_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.RegionInstanceTemplatesClient", + "shortName": "RegionInstanceTemplatesClient" + }, + "fullName": "google.cloud.compute_v1.RegionInstanceTemplatesClient.get", + "method": { + "fullName": "google.cloud.compute.v1.RegionInstanceTemplates.Get", + "service": { + "fullName": "google.cloud.compute.v1.RegionInstanceTemplates", + "shortName": "RegionInstanceTemplates" + }, + "shortName": "Get" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.GetRegionInstanceTemplateRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "region", + "type": "str" + }, + { + "name": "instance_template", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.compute_v1.types.InstanceTemplate", + "shortName": "get" + }, + "description": "Sample for Get", + "file": "compute_v1_generated_region_instance_templates_get_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_RegionInstanceTemplates_Get_sync", + "segments": [ + { + "end": 53, + "start": 27, + "type": "FULL" + }, + { + "end": 53, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 47, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 50, + "start": 48, + "type": "REQUEST_EXECUTION" + }, + { + "end": 54, + "start": 51, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_region_instance_templates_get_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.RegionInstanceTemplatesClient", + "shortName": "RegionInstanceTemplatesClient" + }, + "fullName": "google.cloud.compute_v1.RegionInstanceTemplatesClient.insert", + "method": { + "fullName": "google.cloud.compute.v1.RegionInstanceTemplates.Insert", + "service": { + "fullName": "google.cloud.compute.v1.RegionInstanceTemplates", + "shortName": "RegionInstanceTemplates" + }, + "shortName": "Insert" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.InsertRegionInstanceTemplateRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "region", + "type": "str" + }, + { + "name": "instance_template_resource", + "type": "google.cloud.compute_v1.types.InstanceTemplate" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.extended_operation.ExtendedOperation", + "shortName": "insert" + }, + "description": "Sample for Insert", + "file": "compute_v1_generated_region_instance_templates_insert_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_RegionInstanceTemplates_Insert_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_region_instance_templates_insert_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.RegionInstanceTemplatesClient", + "shortName": "RegionInstanceTemplatesClient" + }, + "fullName": "google.cloud.compute_v1.RegionInstanceTemplatesClient.list", + "method": { + "fullName": "google.cloud.compute.v1.RegionInstanceTemplates.List", + "service": { + "fullName": "google.cloud.compute.v1.RegionInstanceTemplates", + "shortName": "RegionInstanceTemplates" + }, + "shortName": "List" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.ListRegionInstanceTemplatesRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "region", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.compute_v1.services.region_instance_templates.pagers.ListPager", + "shortName": "list" + }, + "description": "Sample for List", + "file": "compute_v1_generated_region_instance_templates_list_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_RegionInstanceTemplates_List_sync", + "segments": [ + { + "end": 53, + "start": 27, + "type": "FULL" + }, + { + "end": 53, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 54, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_region_instance_templates_list_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.RegionInstancesClient", + "shortName": "RegionInstancesClient" + }, + "fullName": "google.cloud.compute_v1.RegionInstancesClient.bulk_insert", + "method": { + "fullName": "google.cloud.compute.v1.RegionInstances.BulkInsert", + "service": { + "fullName": "google.cloud.compute.v1.RegionInstances", + "shortName": "RegionInstances" + }, + "shortName": "BulkInsert" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.BulkInsertRegionInstanceRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "region", + "type": "str" + }, + { + "name": "bulk_insert_instance_resource_resource", + "type": "google.cloud.compute_v1.types.BulkInsertInstanceResource" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.extended_operation.ExtendedOperation", + "shortName": "bulk_insert" + }, + "description": "Sample for BulkInsert", + "file": "compute_v1_generated_region_instances_bulk_insert_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_RegionInstances_BulkInsert_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_region_instances_bulk_insert_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.RegionNetworkEndpointGroupsClient", + "shortName": "RegionNetworkEndpointGroupsClient" + }, + "fullName": "google.cloud.compute_v1.RegionNetworkEndpointGroupsClient.delete", + "method": { + "fullName": "google.cloud.compute.v1.RegionNetworkEndpointGroups.Delete", + "service": { + "fullName": "google.cloud.compute.v1.RegionNetworkEndpointGroups", + "shortName": "RegionNetworkEndpointGroups" + }, + "shortName": "Delete" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.DeleteRegionNetworkEndpointGroupRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "region", + "type": "str" + }, + { + "name": "network_endpoint_group", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.extended_operation.ExtendedOperation", + "shortName": "delete" + }, + "description": "Sample for Delete", + "file": "compute_v1_generated_region_network_endpoint_groups_delete_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_RegionNetworkEndpointGroups_Delete_sync", + "segments": [ + { + "end": 53, + "start": 27, + "type": "FULL" + }, + { + "end": 53, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 47, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 50, + "start": 48, + "type": "REQUEST_EXECUTION" + }, + { + "end": 54, + "start": 51, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_region_network_endpoint_groups_delete_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.RegionNetworkEndpointGroupsClient", + "shortName": "RegionNetworkEndpointGroupsClient" + }, + "fullName": "google.cloud.compute_v1.RegionNetworkEndpointGroupsClient.get", + "method": { + "fullName": "google.cloud.compute.v1.RegionNetworkEndpointGroups.Get", + "service": { + "fullName": "google.cloud.compute.v1.RegionNetworkEndpointGroups", + "shortName": "RegionNetworkEndpointGroups" + }, + "shortName": "Get" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.GetRegionNetworkEndpointGroupRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "region", + "type": "str" + }, + { + "name": "network_endpoint_group", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.compute_v1.types.NetworkEndpointGroup", + "shortName": "get" + }, + "description": "Sample for Get", + "file": "compute_v1_generated_region_network_endpoint_groups_get_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_RegionNetworkEndpointGroups_Get_sync", + "segments": [ + { + "end": 53, + "start": 27, + "type": "FULL" + }, + { + "end": 53, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 47, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 50, + "start": 48, + "type": "REQUEST_EXECUTION" + }, + { + "end": 54, + "start": 51, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_region_network_endpoint_groups_get_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.RegionNetworkEndpointGroupsClient", + "shortName": "RegionNetworkEndpointGroupsClient" + }, + "fullName": "google.cloud.compute_v1.RegionNetworkEndpointGroupsClient.insert", + "method": { + "fullName": "google.cloud.compute.v1.RegionNetworkEndpointGroups.Insert", + "service": { + "fullName": "google.cloud.compute.v1.RegionNetworkEndpointGroups", + "shortName": "RegionNetworkEndpointGroups" + }, + "shortName": "Insert" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.InsertRegionNetworkEndpointGroupRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "region", + "type": "str" + }, + { + "name": "network_endpoint_group_resource", + "type": "google.cloud.compute_v1.types.NetworkEndpointGroup" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.extended_operation.ExtendedOperation", + "shortName": "insert" + }, + "description": "Sample for Insert", + "file": "compute_v1_generated_region_network_endpoint_groups_insert_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_RegionNetworkEndpointGroups_Insert_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_region_network_endpoint_groups_insert_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.RegionNetworkEndpointGroupsClient", + "shortName": "RegionNetworkEndpointGroupsClient" + }, + "fullName": "google.cloud.compute_v1.RegionNetworkEndpointGroupsClient.list", + "method": { + "fullName": "google.cloud.compute.v1.RegionNetworkEndpointGroups.List", + "service": { + "fullName": "google.cloud.compute.v1.RegionNetworkEndpointGroups", + "shortName": "RegionNetworkEndpointGroups" + }, + "shortName": "List" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.ListRegionNetworkEndpointGroupsRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "region", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.compute_v1.services.region_network_endpoint_groups.pagers.ListPager", + "shortName": "list" + }, + "description": "Sample for List", + "file": "compute_v1_generated_region_network_endpoint_groups_list_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_RegionNetworkEndpointGroups_List_sync", + "segments": [ + { + "end": 53, + "start": 27, + "type": "FULL" + }, + { + "end": 53, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 54, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_region_network_endpoint_groups_list_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.RegionNetworkFirewallPoliciesClient", + "shortName": "RegionNetworkFirewallPoliciesClient" + }, + "fullName": "google.cloud.compute_v1.RegionNetworkFirewallPoliciesClient.add_association", + "method": { + "fullName": "google.cloud.compute.v1.RegionNetworkFirewallPolicies.AddAssociation", + "service": { + "fullName": "google.cloud.compute.v1.RegionNetworkFirewallPolicies", + "shortName": "RegionNetworkFirewallPolicies" + }, + "shortName": "AddAssociation" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.AddAssociationRegionNetworkFirewallPolicyRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "region", + "type": "str" + }, + { + "name": "firewall_policy", + "type": "str" + }, + { + "name": "firewall_policy_association_resource", + "type": "google.cloud.compute_v1.types.FirewallPolicyAssociation" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.extended_operation.ExtendedOperation", + "shortName": "add_association" + }, + "description": "Sample for AddAssociation", + "file": "compute_v1_generated_region_network_firewall_policies_add_association_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_RegionNetworkFirewallPolicies_AddAssociation_sync", + "segments": [ + { + "end": 53, + "start": 27, + "type": "FULL" + }, + { + "end": 53, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 47, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 50, + "start": 48, + "type": "REQUEST_EXECUTION" + }, + { + "end": 54, + "start": 51, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_region_network_firewall_policies_add_association_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.RegionNetworkFirewallPoliciesClient", + "shortName": "RegionNetworkFirewallPoliciesClient" + }, + "fullName": "google.cloud.compute_v1.RegionNetworkFirewallPoliciesClient.add_rule", + "method": { + "fullName": "google.cloud.compute.v1.RegionNetworkFirewallPolicies.AddRule", + "service": { + "fullName": "google.cloud.compute.v1.RegionNetworkFirewallPolicies", + "shortName": "RegionNetworkFirewallPolicies" + }, + "shortName": "AddRule" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.AddRuleRegionNetworkFirewallPolicyRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "region", + "type": "str" + }, + { + "name": "firewall_policy", + "type": "str" + }, + { + "name": "firewall_policy_rule_resource", + "type": "google.cloud.compute_v1.types.FirewallPolicyRule" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.extended_operation.ExtendedOperation", + "shortName": "add_rule" + }, + "description": "Sample for AddRule", + "file": "compute_v1_generated_region_network_firewall_policies_add_rule_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_RegionNetworkFirewallPolicies_AddRule_sync", + "segments": [ + { + "end": 53, + "start": 27, + "type": "FULL" + }, + { + "end": 53, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 47, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 50, + "start": 48, + "type": "REQUEST_EXECUTION" + }, + { + "end": 54, + "start": 51, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_region_network_firewall_policies_add_rule_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.RegionNetworkFirewallPoliciesClient", + "shortName": "RegionNetworkFirewallPoliciesClient" + }, + "fullName": "google.cloud.compute_v1.RegionNetworkFirewallPoliciesClient.clone_rules", + "method": { + "fullName": "google.cloud.compute.v1.RegionNetworkFirewallPolicies.CloneRules", + "service": { + "fullName": "google.cloud.compute.v1.RegionNetworkFirewallPolicies", + "shortName": "RegionNetworkFirewallPolicies" + }, + "shortName": "CloneRules" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.CloneRulesRegionNetworkFirewallPolicyRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "region", + "type": "str" + }, + { + "name": "firewall_policy", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.extended_operation.ExtendedOperation", + "shortName": "clone_rules" + }, + "description": "Sample for CloneRules", + "file": "compute_v1_generated_region_network_firewall_policies_clone_rules_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_RegionNetworkFirewallPolicies_CloneRules_sync", + "segments": [ + { + "end": 53, + "start": 27, + "type": "FULL" + }, + { + "end": 53, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 47, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 50, + "start": 48, + "type": "REQUEST_EXECUTION" + }, + { + "end": 54, + "start": 51, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_region_network_firewall_policies_clone_rules_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.RegionNetworkFirewallPoliciesClient", + "shortName": "RegionNetworkFirewallPoliciesClient" + }, + "fullName": "google.cloud.compute_v1.RegionNetworkFirewallPoliciesClient.delete", + "method": { + "fullName": "google.cloud.compute.v1.RegionNetworkFirewallPolicies.Delete", + "service": { + "fullName": "google.cloud.compute.v1.RegionNetworkFirewallPolicies", + "shortName": "RegionNetworkFirewallPolicies" + }, + "shortName": "Delete" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.DeleteRegionNetworkFirewallPolicyRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "region", + "type": "str" + }, + { + "name": "firewall_policy", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.extended_operation.ExtendedOperation", + "shortName": "delete" + }, + "description": "Sample for Delete", + "file": "compute_v1_generated_region_network_firewall_policies_delete_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_RegionNetworkFirewallPolicies_Delete_sync", + "segments": [ + { + "end": 53, + "start": 27, + "type": "FULL" + }, + { + "end": 53, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 47, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 50, + "start": 48, + "type": "REQUEST_EXECUTION" + }, + { + "end": 54, + "start": 51, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_region_network_firewall_policies_delete_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.RegionNetworkFirewallPoliciesClient", + "shortName": "RegionNetworkFirewallPoliciesClient" + }, + "fullName": "google.cloud.compute_v1.RegionNetworkFirewallPoliciesClient.get_association", + "method": { + "fullName": "google.cloud.compute.v1.RegionNetworkFirewallPolicies.GetAssociation", + "service": { + "fullName": "google.cloud.compute.v1.RegionNetworkFirewallPolicies", + "shortName": "RegionNetworkFirewallPolicies" + }, + "shortName": "GetAssociation" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.GetAssociationRegionNetworkFirewallPolicyRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "region", + "type": "str" + }, + { + "name": "firewall_policy", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.compute_v1.types.FirewallPolicyAssociation", + "shortName": "get_association" + }, + "description": "Sample for GetAssociation", + "file": "compute_v1_generated_region_network_firewall_policies_get_association_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_RegionNetworkFirewallPolicies_GetAssociation_sync", + "segments": [ + { + "end": 53, + "start": 27, + "type": "FULL" + }, + { + "end": 53, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 47, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 50, + "start": 48, + "type": "REQUEST_EXECUTION" + }, + { + "end": 54, + "start": 51, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_region_network_firewall_policies_get_association_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.RegionNetworkFirewallPoliciesClient", + "shortName": "RegionNetworkFirewallPoliciesClient" + }, + "fullName": "google.cloud.compute_v1.RegionNetworkFirewallPoliciesClient.get_effective_firewalls", + "method": { + "fullName": "google.cloud.compute.v1.RegionNetworkFirewallPolicies.GetEffectiveFirewalls", + "service": { + "fullName": "google.cloud.compute.v1.RegionNetworkFirewallPolicies", + "shortName": "RegionNetworkFirewallPolicies" + }, + "shortName": "GetEffectiveFirewalls" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.GetEffectiveFirewallsRegionNetworkFirewallPolicyRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "region", + "type": "str" + }, + { + "name": "network", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.compute_v1.types.RegionNetworkFirewallPoliciesGetEffectiveFirewallsResponse", + "shortName": "get_effective_firewalls" + }, + "description": "Sample for GetEffectiveFirewalls", + "file": "compute_v1_generated_region_network_firewall_policies_get_effective_firewalls_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_RegionNetworkFirewallPolicies_GetEffectiveFirewalls_sync", + "segments": [ + { + "end": 53, + "start": 27, + "type": "FULL" + }, + { + "end": 53, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 47, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 50, + "start": 48, + "type": "REQUEST_EXECUTION" + }, + { + "end": 54, + "start": 51, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_region_network_firewall_policies_get_effective_firewalls_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.RegionNetworkFirewallPoliciesClient", + "shortName": "RegionNetworkFirewallPoliciesClient" + }, + "fullName": "google.cloud.compute_v1.RegionNetworkFirewallPoliciesClient.get_iam_policy", + "method": { + "fullName": "google.cloud.compute.v1.RegionNetworkFirewallPolicies.GetIamPolicy", + "service": { + "fullName": "google.cloud.compute.v1.RegionNetworkFirewallPolicies", + "shortName": "RegionNetworkFirewallPolicies" + }, + "shortName": "GetIamPolicy" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.GetIamPolicyRegionNetworkFirewallPolicyRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "region", + "type": "str" + }, + { + "name": "resource", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.compute_v1.types.Policy", + "shortName": "get_iam_policy" + }, + "description": "Sample for GetIamPolicy", + "file": "compute_v1_generated_region_network_firewall_policies_get_iam_policy_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_RegionNetworkFirewallPolicies_GetIamPolicy_sync", + "segments": [ + { + "end": 53, + "start": 27, + "type": "FULL" + }, + { + "end": 53, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 47, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 50, + "start": 48, + "type": "REQUEST_EXECUTION" + }, + { + "end": 54, + "start": 51, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_region_network_firewall_policies_get_iam_policy_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.RegionNetworkFirewallPoliciesClient", + "shortName": "RegionNetworkFirewallPoliciesClient" + }, + "fullName": "google.cloud.compute_v1.RegionNetworkFirewallPoliciesClient.get_rule", + "method": { + "fullName": "google.cloud.compute.v1.RegionNetworkFirewallPolicies.GetRule", + "service": { + "fullName": "google.cloud.compute.v1.RegionNetworkFirewallPolicies", + "shortName": "RegionNetworkFirewallPolicies" + }, + "shortName": "GetRule" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.GetRuleRegionNetworkFirewallPolicyRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "region", + "type": "str" + }, + { + "name": "firewall_policy", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.compute_v1.types.FirewallPolicyRule", + "shortName": "get_rule" + }, + "description": "Sample for GetRule", + "file": "compute_v1_generated_region_network_firewall_policies_get_rule_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_RegionNetworkFirewallPolicies_GetRule_sync", + "segments": [ + { + "end": 53, + "start": 27, + "type": "FULL" + }, + { + "end": 53, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 47, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 50, + "start": 48, + "type": "REQUEST_EXECUTION" + }, + { + "end": 54, + "start": 51, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_region_network_firewall_policies_get_rule_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.RegionNetworkFirewallPoliciesClient", + "shortName": "RegionNetworkFirewallPoliciesClient" + }, + "fullName": "google.cloud.compute_v1.RegionNetworkFirewallPoliciesClient.get", + "method": { + "fullName": "google.cloud.compute.v1.RegionNetworkFirewallPolicies.Get", + "service": { + "fullName": "google.cloud.compute.v1.RegionNetworkFirewallPolicies", + "shortName": "RegionNetworkFirewallPolicies" + }, + "shortName": "Get" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.GetRegionNetworkFirewallPolicyRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "region", + "type": "str" + }, + { + "name": "firewall_policy", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.compute_v1.types.FirewallPolicy", + "shortName": "get" + }, + "description": "Sample for Get", + "file": "compute_v1_generated_region_network_firewall_policies_get_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_RegionNetworkFirewallPolicies_Get_sync", + "segments": [ + { + "end": 53, + "start": 27, + "type": "FULL" + }, + { + "end": 53, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 47, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 50, + "start": 48, + "type": "REQUEST_EXECUTION" + }, + { + "end": 54, + "start": 51, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_region_network_firewall_policies_get_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.RegionNetworkFirewallPoliciesClient", + "shortName": "RegionNetworkFirewallPoliciesClient" + }, + "fullName": "google.cloud.compute_v1.RegionNetworkFirewallPoliciesClient.insert", + "method": { + "fullName": "google.cloud.compute.v1.RegionNetworkFirewallPolicies.Insert", + "service": { + "fullName": "google.cloud.compute.v1.RegionNetworkFirewallPolicies", + "shortName": "RegionNetworkFirewallPolicies" + }, + "shortName": "Insert" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.InsertRegionNetworkFirewallPolicyRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "region", + "type": "str" + }, + { + "name": "firewall_policy_resource", + "type": "google.cloud.compute_v1.types.FirewallPolicy" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.extended_operation.ExtendedOperation", + "shortName": "insert" + }, + "description": "Sample for Insert", + "file": "compute_v1_generated_region_network_firewall_policies_insert_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_RegionNetworkFirewallPolicies_Insert_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_region_network_firewall_policies_insert_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.RegionNetworkFirewallPoliciesClient", + "shortName": "RegionNetworkFirewallPoliciesClient" + }, + "fullName": "google.cloud.compute_v1.RegionNetworkFirewallPoliciesClient.list", + "method": { + "fullName": "google.cloud.compute.v1.RegionNetworkFirewallPolicies.List", + "service": { + "fullName": "google.cloud.compute.v1.RegionNetworkFirewallPolicies", + "shortName": "RegionNetworkFirewallPolicies" + }, + "shortName": "List" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.ListRegionNetworkFirewallPoliciesRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "region", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.compute_v1.services.region_network_firewall_policies.pagers.ListPager", + "shortName": "list" + }, + "description": "Sample for List", + "file": "compute_v1_generated_region_network_firewall_policies_list_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_RegionNetworkFirewallPolicies_List_sync", + "segments": [ + { + "end": 53, + "start": 27, + "type": "FULL" + }, + { + "end": 53, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 54, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_region_network_firewall_policies_list_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.RegionNetworkFirewallPoliciesClient", + "shortName": "RegionNetworkFirewallPoliciesClient" + }, + "fullName": "google.cloud.compute_v1.RegionNetworkFirewallPoliciesClient.patch_rule", + "method": { + "fullName": "google.cloud.compute.v1.RegionNetworkFirewallPolicies.PatchRule", + "service": { + "fullName": "google.cloud.compute.v1.RegionNetworkFirewallPolicies", + "shortName": "RegionNetworkFirewallPolicies" + }, + "shortName": "PatchRule" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.PatchRuleRegionNetworkFirewallPolicyRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "region", + "type": "str" + }, + { + "name": "firewall_policy", + "type": "str" + }, + { + "name": "firewall_policy_rule_resource", + "type": "google.cloud.compute_v1.types.FirewallPolicyRule" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.extended_operation.ExtendedOperation", + "shortName": "patch_rule" + }, + "description": "Sample for PatchRule", + "file": "compute_v1_generated_region_network_firewall_policies_patch_rule_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_RegionNetworkFirewallPolicies_PatchRule_sync", + "segments": [ + { + "end": 53, + "start": 27, + "type": "FULL" + }, + { + "end": 53, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 47, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 50, + "start": 48, + "type": "REQUEST_EXECUTION" + }, + { + "end": 54, + "start": 51, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_region_network_firewall_policies_patch_rule_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.RegionNetworkFirewallPoliciesClient", + "shortName": "RegionNetworkFirewallPoliciesClient" + }, + "fullName": "google.cloud.compute_v1.RegionNetworkFirewallPoliciesClient.patch", + "method": { + "fullName": "google.cloud.compute.v1.RegionNetworkFirewallPolicies.Patch", + "service": { + "fullName": "google.cloud.compute.v1.RegionNetworkFirewallPolicies", + "shortName": "RegionNetworkFirewallPolicies" + }, + "shortName": "Patch" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.PatchRegionNetworkFirewallPolicyRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "region", + "type": "str" + }, + { + "name": "firewall_policy", + "type": "str" + }, + { + "name": "firewall_policy_resource", + "type": "google.cloud.compute_v1.types.FirewallPolicy" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.extended_operation.ExtendedOperation", + "shortName": "patch" + }, + "description": "Sample for Patch", + "file": "compute_v1_generated_region_network_firewall_policies_patch_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_RegionNetworkFirewallPolicies_Patch_sync", + "segments": [ + { + "end": 53, + "start": 27, + "type": "FULL" + }, + { + "end": 53, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 47, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 50, + "start": 48, + "type": "REQUEST_EXECUTION" + }, + { + "end": 54, + "start": 51, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_region_network_firewall_policies_patch_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.RegionNetworkFirewallPoliciesClient", + "shortName": "RegionNetworkFirewallPoliciesClient" + }, + "fullName": "google.cloud.compute_v1.RegionNetworkFirewallPoliciesClient.remove_association", + "method": { + "fullName": "google.cloud.compute.v1.RegionNetworkFirewallPolicies.RemoveAssociation", + "service": { + "fullName": "google.cloud.compute.v1.RegionNetworkFirewallPolicies", + "shortName": "RegionNetworkFirewallPolicies" + }, + "shortName": "RemoveAssociation" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.RemoveAssociationRegionNetworkFirewallPolicyRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "region", + "type": "str" + }, + { + "name": "firewall_policy", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.extended_operation.ExtendedOperation", + "shortName": "remove_association" + }, + "description": "Sample for RemoveAssociation", + "file": "compute_v1_generated_region_network_firewall_policies_remove_association_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_RegionNetworkFirewallPolicies_RemoveAssociation_sync", + "segments": [ + { + "end": 53, + "start": 27, + "type": "FULL" + }, + { + "end": 53, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 47, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 50, + "start": 48, + "type": "REQUEST_EXECUTION" + }, + { + "end": 54, + "start": 51, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_region_network_firewall_policies_remove_association_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.RegionNetworkFirewallPoliciesClient", + "shortName": "RegionNetworkFirewallPoliciesClient" + }, + "fullName": "google.cloud.compute_v1.RegionNetworkFirewallPoliciesClient.remove_rule", + "method": { + "fullName": "google.cloud.compute.v1.RegionNetworkFirewallPolicies.RemoveRule", + "service": { + "fullName": "google.cloud.compute.v1.RegionNetworkFirewallPolicies", + "shortName": "RegionNetworkFirewallPolicies" + }, + "shortName": "RemoveRule" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.RemoveRuleRegionNetworkFirewallPolicyRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "region", + "type": "str" + }, + { + "name": "firewall_policy", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.extended_operation.ExtendedOperation", + "shortName": "remove_rule" + }, + "description": "Sample for RemoveRule", + "file": "compute_v1_generated_region_network_firewall_policies_remove_rule_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_RegionNetworkFirewallPolicies_RemoveRule_sync", + "segments": [ + { + "end": 53, + "start": 27, + "type": "FULL" + }, + { + "end": 53, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 47, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 50, + "start": 48, + "type": "REQUEST_EXECUTION" + }, + { + "end": 54, + "start": 51, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_region_network_firewall_policies_remove_rule_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.RegionNetworkFirewallPoliciesClient", + "shortName": "RegionNetworkFirewallPoliciesClient" + }, + "fullName": "google.cloud.compute_v1.RegionNetworkFirewallPoliciesClient.set_iam_policy", + "method": { + "fullName": "google.cloud.compute.v1.RegionNetworkFirewallPolicies.SetIamPolicy", + "service": { + "fullName": "google.cloud.compute.v1.RegionNetworkFirewallPolicies", + "shortName": "RegionNetworkFirewallPolicies" + }, + "shortName": "SetIamPolicy" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.SetIamPolicyRegionNetworkFirewallPolicyRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "region", + "type": "str" + }, + { + "name": "resource", + "type": "str" + }, + { + "name": "region_set_policy_request_resource", + "type": "google.cloud.compute_v1.types.RegionSetPolicyRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.compute_v1.types.Policy", + "shortName": "set_iam_policy" + }, + "description": "Sample for SetIamPolicy", + "file": "compute_v1_generated_region_network_firewall_policies_set_iam_policy_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_RegionNetworkFirewallPolicies_SetIamPolicy_sync", + "segments": [ + { + "end": 53, + "start": 27, + "type": "FULL" + }, + { + "end": 53, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 47, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 50, + "start": 48, + "type": "REQUEST_EXECUTION" + }, + { + "end": 54, + "start": 51, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_region_network_firewall_policies_set_iam_policy_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.RegionNetworkFirewallPoliciesClient", + "shortName": "RegionNetworkFirewallPoliciesClient" + }, + "fullName": "google.cloud.compute_v1.RegionNetworkFirewallPoliciesClient.test_iam_permissions", + "method": { + "fullName": "google.cloud.compute.v1.RegionNetworkFirewallPolicies.TestIamPermissions", + "service": { + "fullName": "google.cloud.compute.v1.RegionNetworkFirewallPolicies", + "shortName": "RegionNetworkFirewallPolicies" + }, + "shortName": "TestIamPermissions" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.TestIamPermissionsRegionNetworkFirewallPolicyRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "region", + "type": "str" + }, + { + "name": "resource", + "type": "str" + }, + { + "name": "test_permissions_request_resource", + "type": "google.cloud.compute_v1.types.TestPermissionsRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.compute_v1.types.TestPermissionsResponse", + "shortName": "test_iam_permissions" + }, + "description": "Sample for TestIamPermissions", + "file": "compute_v1_generated_region_network_firewall_policies_test_iam_permissions_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_RegionNetworkFirewallPolicies_TestIamPermissions_sync", + "segments": [ + { + "end": 53, + "start": 27, + "type": "FULL" + }, + { + "end": 53, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 47, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 50, + "start": 48, + "type": "REQUEST_EXECUTION" + }, + { + "end": 54, + "start": 51, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_region_network_firewall_policies_test_iam_permissions_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.RegionNotificationEndpointsClient", + "shortName": "RegionNotificationEndpointsClient" + }, + "fullName": "google.cloud.compute_v1.RegionNotificationEndpointsClient.delete", + "method": { + "fullName": "google.cloud.compute.v1.RegionNotificationEndpoints.Delete", + "service": { + "fullName": "google.cloud.compute.v1.RegionNotificationEndpoints", + "shortName": "RegionNotificationEndpoints" + }, + "shortName": "Delete" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.DeleteRegionNotificationEndpointRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "region", + "type": "str" + }, + { + "name": "notification_endpoint", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.extended_operation.ExtendedOperation", + "shortName": "delete" + }, + "description": "Sample for Delete", + "file": "compute_v1_generated_region_notification_endpoints_delete_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_RegionNotificationEndpoints_Delete_sync", + "segments": [ + { + "end": 53, + "start": 27, + "type": "FULL" + }, + { + "end": 53, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 47, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 50, + "start": 48, + "type": "REQUEST_EXECUTION" + }, + { + "end": 54, + "start": 51, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_region_notification_endpoints_delete_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.RegionNotificationEndpointsClient", + "shortName": "RegionNotificationEndpointsClient" + }, + "fullName": "google.cloud.compute_v1.RegionNotificationEndpointsClient.get", + "method": { + "fullName": "google.cloud.compute.v1.RegionNotificationEndpoints.Get", + "service": { + "fullName": "google.cloud.compute.v1.RegionNotificationEndpoints", + "shortName": "RegionNotificationEndpoints" + }, + "shortName": "Get" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.GetRegionNotificationEndpointRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "region", + "type": "str" + }, + { + "name": "notification_endpoint", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.compute_v1.types.NotificationEndpoint", + "shortName": "get" + }, + "description": "Sample for Get", + "file": "compute_v1_generated_region_notification_endpoints_get_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_RegionNotificationEndpoints_Get_sync", + "segments": [ + { + "end": 53, + "start": 27, + "type": "FULL" + }, + { + "end": 53, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 47, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 50, + "start": 48, + "type": "REQUEST_EXECUTION" + }, + { + "end": 54, + "start": 51, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_region_notification_endpoints_get_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.RegionNotificationEndpointsClient", + "shortName": "RegionNotificationEndpointsClient" + }, + "fullName": "google.cloud.compute_v1.RegionNotificationEndpointsClient.insert", + "method": { + "fullName": "google.cloud.compute.v1.RegionNotificationEndpoints.Insert", + "service": { + "fullName": "google.cloud.compute.v1.RegionNotificationEndpoints", + "shortName": "RegionNotificationEndpoints" + }, + "shortName": "Insert" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.InsertRegionNotificationEndpointRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "region", + "type": "str" + }, + { + "name": "notification_endpoint_resource", + "type": "google.cloud.compute_v1.types.NotificationEndpoint" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.extended_operation.ExtendedOperation", + "shortName": "insert" + }, + "description": "Sample for Insert", + "file": "compute_v1_generated_region_notification_endpoints_insert_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_RegionNotificationEndpoints_Insert_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_region_notification_endpoints_insert_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.RegionNotificationEndpointsClient", + "shortName": "RegionNotificationEndpointsClient" + }, + "fullName": "google.cloud.compute_v1.RegionNotificationEndpointsClient.list", + "method": { + "fullName": "google.cloud.compute.v1.RegionNotificationEndpoints.List", + "service": { + "fullName": "google.cloud.compute.v1.RegionNotificationEndpoints", + "shortName": "RegionNotificationEndpoints" + }, + "shortName": "List" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.ListRegionNotificationEndpointsRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "region", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.compute_v1.services.region_notification_endpoints.pagers.ListPager", + "shortName": "list" + }, + "description": "Sample for List", + "file": "compute_v1_generated_region_notification_endpoints_list_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_RegionNotificationEndpoints_List_sync", + "segments": [ + { + "end": 53, + "start": 27, + "type": "FULL" + }, + { + "end": 53, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 54, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_region_notification_endpoints_list_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.RegionOperationsClient", + "shortName": "RegionOperationsClient" + }, + "fullName": "google.cloud.compute_v1.RegionOperationsClient.delete", + "method": { + "fullName": "google.cloud.compute.v1.RegionOperations.Delete", + "service": { + "fullName": "google.cloud.compute.v1.RegionOperations", + "shortName": "RegionOperations" + }, + "shortName": "Delete" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.DeleteRegionOperationRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "region", + "type": "str" + }, + { + "name": "operation", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.compute_v1.types.DeleteRegionOperationResponse", + "shortName": "delete" + }, + "description": "Sample for Delete", + "file": "compute_v1_generated_region_operations_delete_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_RegionOperations_Delete_sync", + "segments": [ + { + "end": 53, + "start": 27, + "type": "FULL" + }, + { + "end": 53, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 47, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 50, + "start": 48, + "type": "REQUEST_EXECUTION" + }, + { + "end": 54, + "start": 51, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_region_operations_delete_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.RegionOperationsClient", + "shortName": "RegionOperationsClient" + }, + "fullName": "google.cloud.compute_v1.RegionOperationsClient.get", + "method": { + "fullName": "google.cloud.compute.v1.RegionOperations.Get", + "service": { + "fullName": "google.cloud.compute.v1.RegionOperations", + "shortName": "RegionOperations" + }, + "shortName": "Get" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.GetRegionOperationRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "region", + "type": "str" + }, + { + "name": "operation", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.compute_v1.types.Operation", + "shortName": "get" + }, + "description": "Sample for Get", + "file": "compute_v1_generated_region_operations_get_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_RegionOperations_Get_sync", + "segments": [ + { + "end": 53, + "start": 27, + "type": "FULL" + }, + { + "end": 53, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 47, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 50, + "start": 48, + "type": "REQUEST_EXECUTION" + }, + { + "end": 54, + "start": 51, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_region_operations_get_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.RegionOperationsClient", + "shortName": "RegionOperationsClient" + }, + "fullName": "google.cloud.compute_v1.RegionOperationsClient.list", + "method": { + "fullName": "google.cloud.compute.v1.RegionOperations.List", + "service": { + "fullName": "google.cloud.compute.v1.RegionOperations", + "shortName": "RegionOperations" + }, + "shortName": "List" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.ListRegionOperationsRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "region", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.compute_v1.services.region_operations.pagers.ListPager", + "shortName": "list" + }, + "description": "Sample for List", + "file": "compute_v1_generated_region_operations_list_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_RegionOperations_List_sync", + "segments": [ + { + "end": 53, + "start": 27, + "type": "FULL" + }, + { + "end": 53, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 54, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_region_operations_list_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.RegionOperationsClient", + "shortName": "RegionOperationsClient" + }, + "fullName": "google.cloud.compute_v1.RegionOperationsClient.wait", + "method": { + "fullName": "google.cloud.compute.v1.RegionOperations.Wait", + "service": { + "fullName": "google.cloud.compute.v1.RegionOperations", + "shortName": "RegionOperations" + }, + "shortName": "Wait" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.WaitRegionOperationRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "region", + "type": "str" + }, + { + "name": "operation", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.compute_v1.types.Operation", + "shortName": "wait" + }, + "description": "Sample for Wait", + "file": "compute_v1_generated_region_operations_wait_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_RegionOperations_Wait_sync", + "segments": [ + { + "end": 53, + "start": 27, + "type": "FULL" + }, + { + "end": 53, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 47, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 50, + "start": 48, + "type": "REQUEST_EXECUTION" + }, + { + "end": 54, + "start": 51, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_region_operations_wait_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.RegionSecurityPoliciesClient", + "shortName": "RegionSecurityPoliciesClient" + }, + "fullName": "google.cloud.compute_v1.RegionSecurityPoliciesClient.delete", + "method": { + "fullName": "google.cloud.compute.v1.RegionSecurityPolicies.Delete", + "service": { + "fullName": "google.cloud.compute.v1.RegionSecurityPolicies", + "shortName": "RegionSecurityPolicies" + }, + "shortName": "Delete" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.DeleteRegionSecurityPolicyRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "region", + "type": "str" + }, + { + "name": "security_policy", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.extended_operation.ExtendedOperation", + "shortName": "delete" + }, + "description": "Sample for Delete", + "file": "compute_v1_generated_region_security_policies_delete_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_RegionSecurityPolicies_Delete_sync", + "segments": [ + { + "end": 53, + "start": 27, + "type": "FULL" + }, + { + "end": 53, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 47, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 50, + "start": 48, + "type": "REQUEST_EXECUTION" + }, + { + "end": 54, + "start": 51, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_region_security_policies_delete_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.RegionSecurityPoliciesClient", + "shortName": "RegionSecurityPoliciesClient" + }, + "fullName": "google.cloud.compute_v1.RegionSecurityPoliciesClient.get", + "method": { + "fullName": "google.cloud.compute.v1.RegionSecurityPolicies.Get", + "service": { + "fullName": "google.cloud.compute.v1.RegionSecurityPolicies", + "shortName": "RegionSecurityPolicies" + }, + "shortName": "Get" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.GetRegionSecurityPolicyRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "region", + "type": "str" + }, + { + "name": "security_policy", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.compute_v1.types.SecurityPolicy", + "shortName": "get" + }, + "description": "Sample for Get", + "file": "compute_v1_generated_region_security_policies_get_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_RegionSecurityPolicies_Get_sync", + "segments": [ + { + "end": 53, + "start": 27, + "type": "FULL" + }, + { + "end": 53, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 47, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 50, + "start": 48, + "type": "REQUEST_EXECUTION" + }, + { + "end": 54, + "start": 51, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_region_security_policies_get_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.RegionSecurityPoliciesClient", + "shortName": "RegionSecurityPoliciesClient" + }, + "fullName": "google.cloud.compute_v1.RegionSecurityPoliciesClient.insert", + "method": { + "fullName": "google.cloud.compute.v1.RegionSecurityPolicies.Insert", + "service": { + "fullName": "google.cloud.compute.v1.RegionSecurityPolicies", + "shortName": "RegionSecurityPolicies" + }, + "shortName": "Insert" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.InsertRegionSecurityPolicyRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "region", + "type": "str" + }, + { + "name": "security_policy_resource", + "type": "google.cloud.compute_v1.types.SecurityPolicy" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.extended_operation.ExtendedOperation", + "shortName": "insert" + }, + "description": "Sample for Insert", + "file": "compute_v1_generated_region_security_policies_insert_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_RegionSecurityPolicies_Insert_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_region_security_policies_insert_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.RegionSecurityPoliciesClient", + "shortName": "RegionSecurityPoliciesClient" + }, + "fullName": "google.cloud.compute_v1.RegionSecurityPoliciesClient.list", + "method": { + "fullName": "google.cloud.compute.v1.RegionSecurityPolicies.List", + "service": { + "fullName": "google.cloud.compute.v1.RegionSecurityPolicies", + "shortName": "RegionSecurityPolicies" + }, + "shortName": "List" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.ListRegionSecurityPoliciesRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "region", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.compute_v1.services.region_security_policies.pagers.ListPager", + "shortName": "list" + }, + "description": "Sample for List", + "file": "compute_v1_generated_region_security_policies_list_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_RegionSecurityPolicies_List_sync", + "segments": [ + { + "end": 53, + "start": 27, + "type": "FULL" + }, + { + "end": 53, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 54, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_region_security_policies_list_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.RegionSecurityPoliciesClient", + "shortName": "RegionSecurityPoliciesClient" + }, + "fullName": "google.cloud.compute_v1.RegionSecurityPoliciesClient.patch", + "method": { + "fullName": "google.cloud.compute.v1.RegionSecurityPolicies.Patch", + "service": { + "fullName": "google.cloud.compute.v1.RegionSecurityPolicies", + "shortName": "RegionSecurityPolicies" + }, + "shortName": "Patch" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.PatchRegionSecurityPolicyRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "region", + "type": "str" + }, + { + "name": "security_policy", + "type": "str" + }, + { + "name": "security_policy_resource", + "type": "google.cloud.compute_v1.types.SecurityPolicy" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.extended_operation.ExtendedOperation", + "shortName": "patch" + }, + "description": "Sample for Patch", + "file": "compute_v1_generated_region_security_policies_patch_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_RegionSecurityPolicies_Patch_sync", + "segments": [ + { + "end": 53, + "start": 27, + "type": "FULL" + }, + { + "end": 53, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 47, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 50, + "start": 48, + "type": "REQUEST_EXECUTION" + }, + { + "end": 54, + "start": 51, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_region_security_policies_patch_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.RegionSslCertificatesClient", + "shortName": "RegionSslCertificatesClient" + }, + "fullName": "google.cloud.compute_v1.RegionSslCertificatesClient.delete", + "method": { + "fullName": "google.cloud.compute.v1.RegionSslCertificates.Delete", + "service": { + "fullName": "google.cloud.compute.v1.RegionSslCertificates", + "shortName": "RegionSslCertificates" + }, + "shortName": "Delete" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.DeleteRegionSslCertificateRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "region", + "type": "str" + }, + { + "name": "ssl_certificate", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.extended_operation.ExtendedOperation", + "shortName": "delete" + }, + "description": "Sample for Delete", + "file": "compute_v1_generated_region_ssl_certificates_delete_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_RegionSslCertificates_Delete_sync", + "segments": [ + { + "end": 53, + "start": 27, + "type": "FULL" + }, + { + "end": 53, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 47, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 50, + "start": 48, + "type": "REQUEST_EXECUTION" + }, + { + "end": 54, + "start": 51, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_region_ssl_certificates_delete_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.RegionSslCertificatesClient", + "shortName": "RegionSslCertificatesClient" + }, + "fullName": "google.cloud.compute_v1.RegionSslCertificatesClient.get", + "method": { + "fullName": "google.cloud.compute.v1.RegionSslCertificates.Get", + "service": { + "fullName": "google.cloud.compute.v1.RegionSslCertificates", + "shortName": "RegionSslCertificates" + }, + "shortName": "Get" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.GetRegionSslCertificateRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "region", + "type": "str" + }, + { + "name": "ssl_certificate", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.compute_v1.types.SslCertificate", + "shortName": "get" + }, + "description": "Sample for Get", + "file": "compute_v1_generated_region_ssl_certificates_get_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_RegionSslCertificates_Get_sync", + "segments": [ + { + "end": 53, + "start": 27, + "type": "FULL" + }, + { + "end": 53, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 47, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 50, + "start": 48, + "type": "REQUEST_EXECUTION" + }, + { + "end": 54, + "start": 51, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_region_ssl_certificates_get_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.RegionSslCertificatesClient", + "shortName": "RegionSslCertificatesClient" + }, + "fullName": "google.cloud.compute_v1.RegionSslCertificatesClient.insert", + "method": { + "fullName": "google.cloud.compute.v1.RegionSslCertificates.Insert", + "service": { + "fullName": "google.cloud.compute.v1.RegionSslCertificates", + "shortName": "RegionSslCertificates" + }, + "shortName": "Insert" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.InsertRegionSslCertificateRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "region", + "type": "str" + }, + { + "name": "ssl_certificate_resource", + "type": "google.cloud.compute_v1.types.SslCertificate" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.extended_operation.ExtendedOperation", + "shortName": "insert" + }, + "description": "Sample for Insert", + "file": "compute_v1_generated_region_ssl_certificates_insert_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_RegionSslCertificates_Insert_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_region_ssl_certificates_insert_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.RegionSslCertificatesClient", + "shortName": "RegionSslCertificatesClient" + }, + "fullName": "google.cloud.compute_v1.RegionSslCertificatesClient.list", + "method": { + "fullName": "google.cloud.compute.v1.RegionSslCertificates.List", + "service": { + "fullName": "google.cloud.compute.v1.RegionSslCertificates", + "shortName": "RegionSslCertificates" + }, + "shortName": "List" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.ListRegionSslCertificatesRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "region", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.compute_v1.services.region_ssl_certificates.pagers.ListPager", + "shortName": "list" + }, + "description": "Sample for List", + "file": "compute_v1_generated_region_ssl_certificates_list_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_RegionSslCertificates_List_sync", + "segments": [ + { + "end": 53, + "start": 27, + "type": "FULL" + }, + { + "end": 53, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 54, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_region_ssl_certificates_list_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.RegionSslPoliciesClient", + "shortName": "RegionSslPoliciesClient" + }, + "fullName": "google.cloud.compute_v1.RegionSslPoliciesClient.delete", + "method": { + "fullName": "google.cloud.compute.v1.RegionSslPolicies.Delete", + "service": { + "fullName": "google.cloud.compute.v1.RegionSslPolicies", + "shortName": "RegionSslPolicies" + }, + "shortName": "Delete" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.DeleteRegionSslPolicyRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "region", + "type": "str" + }, + { + "name": "ssl_policy", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.extended_operation.ExtendedOperation", + "shortName": "delete" + }, + "description": "Sample for Delete", + "file": "compute_v1_generated_region_ssl_policies_delete_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_RegionSslPolicies_Delete_sync", + "segments": [ + { + "end": 53, + "start": 27, + "type": "FULL" + }, + { + "end": 53, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 47, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 50, + "start": 48, + "type": "REQUEST_EXECUTION" + }, + { + "end": 54, + "start": 51, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_region_ssl_policies_delete_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.RegionSslPoliciesClient", + "shortName": "RegionSslPoliciesClient" + }, + "fullName": "google.cloud.compute_v1.RegionSslPoliciesClient.get", + "method": { + "fullName": "google.cloud.compute.v1.RegionSslPolicies.Get", + "service": { + "fullName": "google.cloud.compute.v1.RegionSslPolicies", + "shortName": "RegionSslPolicies" + }, + "shortName": "Get" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.GetRegionSslPolicyRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "region", + "type": "str" + }, + { + "name": "ssl_policy", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.compute_v1.types.SslPolicy", + "shortName": "get" + }, + "description": "Sample for Get", + "file": "compute_v1_generated_region_ssl_policies_get_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_RegionSslPolicies_Get_sync", + "segments": [ + { + "end": 53, + "start": 27, + "type": "FULL" + }, + { + "end": 53, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 47, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 50, + "start": 48, + "type": "REQUEST_EXECUTION" + }, + { + "end": 54, + "start": 51, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_region_ssl_policies_get_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.RegionSslPoliciesClient", + "shortName": "RegionSslPoliciesClient" + }, + "fullName": "google.cloud.compute_v1.RegionSslPoliciesClient.insert", + "method": { + "fullName": "google.cloud.compute.v1.RegionSslPolicies.Insert", + "service": { + "fullName": "google.cloud.compute.v1.RegionSslPolicies", + "shortName": "RegionSslPolicies" + }, + "shortName": "Insert" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.InsertRegionSslPolicyRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "region", + "type": "str" + }, + { + "name": "ssl_policy_resource", + "type": "google.cloud.compute_v1.types.SslPolicy" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.extended_operation.ExtendedOperation", + "shortName": "insert" + }, + "description": "Sample for Insert", + "file": "compute_v1_generated_region_ssl_policies_insert_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_RegionSslPolicies_Insert_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_region_ssl_policies_insert_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.RegionSslPoliciesClient", + "shortName": "RegionSslPoliciesClient" + }, + "fullName": "google.cloud.compute_v1.RegionSslPoliciesClient.list_available_features", + "method": { + "fullName": "google.cloud.compute.v1.RegionSslPolicies.ListAvailableFeatures", + "service": { + "fullName": "google.cloud.compute.v1.RegionSslPolicies", + "shortName": "RegionSslPolicies" + }, + "shortName": "ListAvailableFeatures" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.ListAvailableFeaturesRegionSslPoliciesRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "region", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.compute_v1.types.SslPoliciesListAvailableFeaturesResponse", + "shortName": "list_available_features" + }, + "description": "Sample for ListAvailableFeatures", + "file": "compute_v1_generated_region_ssl_policies_list_available_features_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_RegionSslPolicies_ListAvailableFeatures_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_region_ssl_policies_list_available_features_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.RegionSslPoliciesClient", + "shortName": "RegionSslPoliciesClient" + }, + "fullName": "google.cloud.compute_v1.RegionSslPoliciesClient.list", + "method": { + "fullName": "google.cloud.compute.v1.RegionSslPolicies.List", + "service": { + "fullName": "google.cloud.compute.v1.RegionSslPolicies", + "shortName": "RegionSslPolicies" + }, + "shortName": "List" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.ListRegionSslPoliciesRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "region", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.compute_v1.services.region_ssl_policies.pagers.ListPager", + "shortName": "list" + }, + "description": "Sample for List", + "file": "compute_v1_generated_region_ssl_policies_list_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_RegionSslPolicies_List_sync", + "segments": [ + { + "end": 53, + "start": 27, + "type": "FULL" + }, + { + "end": 53, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 54, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_region_ssl_policies_list_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.RegionSslPoliciesClient", + "shortName": "RegionSslPoliciesClient" + }, + "fullName": "google.cloud.compute_v1.RegionSslPoliciesClient.patch", + "method": { + "fullName": "google.cloud.compute.v1.RegionSslPolicies.Patch", + "service": { + "fullName": "google.cloud.compute.v1.RegionSslPolicies", + "shortName": "RegionSslPolicies" + }, + "shortName": "Patch" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.PatchRegionSslPolicyRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "region", + "type": "str" + }, + { + "name": "ssl_policy", + "type": "str" + }, + { + "name": "ssl_policy_resource", + "type": "google.cloud.compute_v1.types.SslPolicy" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.extended_operation.ExtendedOperation", + "shortName": "patch" + }, + "description": "Sample for Patch", + "file": "compute_v1_generated_region_ssl_policies_patch_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_RegionSslPolicies_Patch_sync", + "segments": [ + { + "end": 53, + "start": 27, + "type": "FULL" + }, + { + "end": 53, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 47, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 50, + "start": 48, + "type": "REQUEST_EXECUTION" + }, + { + "end": 54, + "start": 51, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_region_ssl_policies_patch_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.RegionTargetHttpProxiesClient", + "shortName": "RegionTargetHttpProxiesClient" + }, + "fullName": "google.cloud.compute_v1.RegionTargetHttpProxiesClient.delete", + "method": { + "fullName": "google.cloud.compute.v1.RegionTargetHttpProxies.Delete", + "service": { + "fullName": "google.cloud.compute.v1.RegionTargetHttpProxies", + "shortName": "RegionTargetHttpProxies" + }, + "shortName": "Delete" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.DeleteRegionTargetHttpProxyRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "region", + "type": "str" + }, + { + "name": "target_http_proxy", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.extended_operation.ExtendedOperation", + "shortName": "delete" + }, + "description": "Sample for Delete", + "file": "compute_v1_generated_region_target_http_proxies_delete_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_RegionTargetHttpProxies_Delete_sync", + "segments": [ + { + "end": 53, + "start": 27, + "type": "FULL" + }, + { + "end": 53, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 47, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 50, + "start": 48, + "type": "REQUEST_EXECUTION" + }, + { + "end": 54, + "start": 51, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_region_target_http_proxies_delete_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.RegionTargetHttpProxiesClient", + "shortName": "RegionTargetHttpProxiesClient" + }, + "fullName": "google.cloud.compute_v1.RegionTargetHttpProxiesClient.get", + "method": { + "fullName": "google.cloud.compute.v1.RegionTargetHttpProxies.Get", + "service": { + "fullName": "google.cloud.compute.v1.RegionTargetHttpProxies", + "shortName": "RegionTargetHttpProxies" + }, + "shortName": "Get" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.GetRegionTargetHttpProxyRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "region", + "type": "str" + }, + { + "name": "target_http_proxy", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.compute_v1.types.TargetHttpProxy", + "shortName": "get" + }, + "description": "Sample for Get", + "file": "compute_v1_generated_region_target_http_proxies_get_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_RegionTargetHttpProxies_Get_sync", + "segments": [ + { + "end": 53, + "start": 27, + "type": "FULL" + }, + { + "end": 53, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 47, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 50, + "start": 48, + "type": "REQUEST_EXECUTION" + }, + { + "end": 54, + "start": 51, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_region_target_http_proxies_get_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.RegionTargetHttpProxiesClient", + "shortName": "RegionTargetHttpProxiesClient" + }, + "fullName": "google.cloud.compute_v1.RegionTargetHttpProxiesClient.insert", + "method": { + "fullName": "google.cloud.compute.v1.RegionTargetHttpProxies.Insert", + "service": { + "fullName": "google.cloud.compute.v1.RegionTargetHttpProxies", + "shortName": "RegionTargetHttpProxies" + }, + "shortName": "Insert" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.InsertRegionTargetHttpProxyRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "region", + "type": "str" + }, + { + "name": "target_http_proxy_resource", + "type": "google.cloud.compute_v1.types.TargetHttpProxy" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.extended_operation.ExtendedOperation", + "shortName": "insert" + }, + "description": "Sample for Insert", + "file": "compute_v1_generated_region_target_http_proxies_insert_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_RegionTargetHttpProxies_Insert_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_region_target_http_proxies_insert_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.RegionTargetHttpProxiesClient", + "shortName": "RegionTargetHttpProxiesClient" + }, + "fullName": "google.cloud.compute_v1.RegionTargetHttpProxiesClient.list", + "method": { + "fullName": "google.cloud.compute.v1.RegionTargetHttpProxies.List", + "service": { + "fullName": "google.cloud.compute.v1.RegionTargetHttpProxies", + "shortName": "RegionTargetHttpProxies" + }, + "shortName": "List" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.ListRegionTargetHttpProxiesRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "region", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.compute_v1.services.region_target_http_proxies.pagers.ListPager", + "shortName": "list" + }, + "description": "Sample for List", + "file": "compute_v1_generated_region_target_http_proxies_list_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_RegionTargetHttpProxies_List_sync", + "segments": [ + { + "end": 53, + "start": 27, + "type": "FULL" + }, + { + "end": 53, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 54, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_region_target_http_proxies_list_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.RegionTargetHttpProxiesClient", + "shortName": "RegionTargetHttpProxiesClient" + }, + "fullName": "google.cloud.compute_v1.RegionTargetHttpProxiesClient.set_url_map", + "method": { + "fullName": "google.cloud.compute.v1.RegionTargetHttpProxies.SetUrlMap", + "service": { + "fullName": "google.cloud.compute.v1.RegionTargetHttpProxies", + "shortName": "RegionTargetHttpProxies" + }, + "shortName": "SetUrlMap" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.SetUrlMapRegionTargetHttpProxyRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "region", + "type": "str" + }, + { + "name": "target_http_proxy", + "type": "str" + }, + { + "name": "url_map_reference_resource", + "type": "google.cloud.compute_v1.types.UrlMapReference" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.extended_operation.ExtendedOperation", + "shortName": "set_url_map" + }, + "description": "Sample for SetUrlMap", + "file": "compute_v1_generated_region_target_http_proxies_set_url_map_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_RegionTargetHttpProxies_SetUrlMap_sync", + "segments": [ + { + "end": 53, + "start": 27, + "type": "FULL" + }, + { + "end": 53, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 47, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 50, + "start": 48, + "type": "REQUEST_EXECUTION" + }, + { + "end": 54, + "start": 51, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_region_target_http_proxies_set_url_map_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.RegionTargetHttpsProxiesClient", + "shortName": "RegionTargetHttpsProxiesClient" + }, + "fullName": "google.cloud.compute_v1.RegionTargetHttpsProxiesClient.delete", + "method": { + "fullName": "google.cloud.compute.v1.RegionTargetHttpsProxies.Delete", + "service": { + "fullName": "google.cloud.compute.v1.RegionTargetHttpsProxies", + "shortName": "RegionTargetHttpsProxies" + }, + "shortName": "Delete" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.DeleteRegionTargetHttpsProxyRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "region", + "type": "str" + }, + { + "name": "target_https_proxy", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.extended_operation.ExtendedOperation", + "shortName": "delete" + }, + "description": "Sample for Delete", + "file": "compute_v1_generated_region_target_https_proxies_delete_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_RegionTargetHttpsProxies_Delete_sync", + "segments": [ + { + "end": 53, + "start": 27, + "type": "FULL" + }, + { + "end": 53, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 47, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 50, + "start": 48, + "type": "REQUEST_EXECUTION" + }, + { + "end": 54, + "start": 51, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_region_target_https_proxies_delete_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.RegionTargetHttpsProxiesClient", + "shortName": "RegionTargetHttpsProxiesClient" + }, + "fullName": "google.cloud.compute_v1.RegionTargetHttpsProxiesClient.get", + "method": { + "fullName": "google.cloud.compute.v1.RegionTargetHttpsProxies.Get", + "service": { + "fullName": "google.cloud.compute.v1.RegionTargetHttpsProxies", + "shortName": "RegionTargetHttpsProxies" + }, + "shortName": "Get" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.GetRegionTargetHttpsProxyRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "region", + "type": "str" + }, + { + "name": "target_https_proxy", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.compute_v1.types.TargetHttpsProxy", + "shortName": "get" + }, + "description": "Sample for Get", + "file": "compute_v1_generated_region_target_https_proxies_get_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_RegionTargetHttpsProxies_Get_sync", + "segments": [ + { + "end": 53, + "start": 27, + "type": "FULL" + }, + { + "end": 53, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 47, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 50, + "start": 48, + "type": "REQUEST_EXECUTION" + }, + { + "end": 54, + "start": 51, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_region_target_https_proxies_get_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.RegionTargetHttpsProxiesClient", + "shortName": "RegionTargetHttpsProxiesClient" + }, + "fullName": "google.cloud.compute_v1.RegionTargetHttpsProxiesClient.insert", + "method": { + "fullName": "google.cloud.compute.v1.RegionTargetHttpsProxies.Insert", + "service": { + "fullName": "google.cloud.compute.v1.RegionTargetHttpsProxies", + "shortName": "RegionTargetHttpsProxies" + }, + "shortName": "Insert" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.InsertRegionTargetHttpsProxyRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "region", + "type": "str" + }, + { + "name": "target_https_proxy_resource", + "type": "google.cloud.compute_v1.types.TargetHttpsProxy" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.extended_operation.ExtendedOperation", + "shortName": "insert" + }, + "description": "Sample for Insert", + "file": "compute_v1_generated_region_target_https_proxies_insert_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_RegionTargetHttpsProxies_Insert_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_region_target_https_proxies_insert_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.RegionTargetHttpsProxiesClient", + "shortName": "RegionTargetHttpsProxiesClient" + }, + "fullName": "google.cloud.compute_v1.RegionTargetHttpsProxiesClient.list", + "method": { + "fullName": "google.cloud.compute.v1.RegionTargetHttpsProxies.List", + "service": { + "fullName": "google.cloud.compute.v1.RegionTargetHttpsProxies", + "shortName": "RegionTargetHttpsProxies" + }, + "shortName": "List" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.ListRegionTargetHttpsProxiesRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "region", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.compute_v1.services.region_target_https_proxies.pagers.ListPager", + "shortName": "list" + }, + "description": "Sample for List", + "file": "compute_v1_generated_region_target_https_proxies_list_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_RegionTargetHttpsProxies_List_sync", + "segments": [ + { + "end": 53, + "start": 27, + "type": "FULL" + }, + { + "end": 53, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 54, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_region_target_https_proxies_list_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.RegionTargetHttpsProxiesClient", + "shortName": "RegionTargetHttpsProxiesClient" + }, + "fullName": "google.cloud.compute_v1.RegionTargetHttpsProxiesClient.patch", + "method": { + "fullName": "google.cloud.compute.v1.RegionTargetHttpsProxies.Patch", + "service": { + "fullName": "google.cloud.compute.v1.RegionTargetHttpsProxies", + "shortName": "RegionTargetHttpsProxies" + }, + "shortName": "Patch" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.PatchRegionTargetHttpsProxyRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "region", + "type": "str" + }, + { + "name": "target_https_proxy", + "type": "str" + }, + { + "name": "target_https_proxy_resource", + "type": "google.cloud.compute_v1.types.TargetHttpsProxy" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.extended_operation.ExtendedOperation", + "shortName": "patch" + }, + "description": "Sample for Patch", + "file": "compute_v1_generated_region_target_https_proxies_patch_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_RegionTargetHttpsProxies_Patch_sync", + "segments": [ + { + "end": 53, + "start": 27, + "type": "FULL" + }, + { + "end": 53, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 47, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 50, + "start": 48, + "type": "REQUEST_EXECUTION" + }, + { + "end": 54, + "start": 51, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_region_target_https_proxies_patch_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.RegionTargetHttpsProxiesClient", + "shortName": "RegionTargetHttpsProxiesClient" + }, + "fullName": "google.cloud.compute_v1.RegionTargetHttpsProxiesClient.set_ssl_certificates", + "method": { + "fullName": "google.cloud.compute.v1.RegionTargetHttpsProxies.SetSslCertificates", + "service": { + "fullName": "google.cloud.compute.v1.RegionTargetHttpsProxies", + "shortName": "RegionTargetHttpsProxies" + }, + "shortName": "SetSslCertificates" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.SetSslCertificatesRegionTargetHttpsProxyRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "region", + "type": "str" + }, + { + "name": "target_https_proxy", + "type": "str" + }, + { + "name": "region_target_https_proxies_set_ssl_certificates_request_resource", + "type": "google.cloud.compute_v1.types.RegionTargetHttpsProxiesSetSslCertificatesRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.extended_operation.ExtendedOperation", + "shortName": "set_ssl_certificates" + }, + "description": "Sample for SetSslCertificates", + "file": "compute_v1_generated_region_target_https_proxies_set_ssl_certificates_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_RegionTargetHttpsProxies_SetSslCertificates_sync", + "segments": [ + { + "end": 53, + "start": 27, + "type": "FULL" + }, + { + "end": 53, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 47, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 50, + "start": 48, + "type": "REQUEST_EXECUTION" + }, + { + "end": 54, + "start": 51, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_region_target_https_proxies_set_ssl_certificates_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.RegionTargetHttpsProxiesClient", + "shortName": "RegionTargetHttpsProxiesClient" + }, + "fullName": "google.cloud.compute_v1.RegionTargetHttpsProxiesClient.set_url_map", + "method": { + "fullName": "google.cloud.compute.v1.RegionTargetHttpsProxies.SetUrlMap", + "service": { + "fullName": "google.cloud.compute.v1.RegionTargetHttpsProxies", + "shortName": "RegionTargetHttpsProxies" + }, + "shortName": "SetUrlMap" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.SetUrlMapRegionTargetHttpsProxyRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "region", + "type": "str" + }, + { + "name": "target_https_proxy", + "type": "str" + }, + { + "name": "url_map_reference_resource", + "type": "google.cloud.compute_v1.types.UrlMapReference" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.extended_operation.ExtendedOperation", + "shortName": "set_url_map" + }, + "description": "Sample for SetUrlMap", + "file": "compute_v1_generated_region_target_https_proxies_set_url_map_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_RegionTargetHttpsProxies_SetUrlMap_sync", + "segments": [ + { + "end": 53, + "start": 27, + "type": "FULL" + }, + { + "end": 53, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 47, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 50, + "start": 48, + "type": "REQUEST_EXECUTION" + }, + { + "end": 54, + "start": 51, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_region_target_https_proxies_set_url_map_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.RegionTargetTcpProxiesClient", + "shortName": "RegionTargetTcpProxiesClient" + }, + "fullName": "google.cloud.compute_v1.RegionTargetTcpProxiesClient.delete", + "method": { + "fullName": "google.cloud.compute.v1.RegionTargetTcpProxies.Delete", + "service": { + "fullName": "google.cloud.compute.v1.RegionTargetTcpProxies", + "shortName": "RegionTargetTcpProxies" + }, + "shortName": "Delete" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.DeleteRegionTargetTcpProxyRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "region", + "type": "str" + }, + { + "name": "target_tcp_proxy", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.extended_operation.ExtendedOperation", + "shortName": "delete" + }, + "description": "Sample for Delete", + "file": "compute_v1_generated_region_target_tcp_proxies_delete_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_RegionTargetTcpProxies_Delete_sync", + "segments": [ + { + "end": 53, + "start": 27, + "type": "FULL" + }, + { + "end": 53, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 47, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 50, + "start": 48, + "type": "REQUEST_EXECUTION" + }, + { + "end": 54, + "start": 51, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_region_target_tcp_proxies_delete_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.RegionTargetTcpProxiesClient", + "shortName": "RegionTargetTcpProxiesClient" + }, + "fullName": "google.cloud.compute_v1.RegionTargetTcpProxiesClient.get", + "method": { + "fullName": "google.cloud.compute.v1.RegionTargetTcpProxies.Get", + "service": { + "fullName": "google.cloud.compute.v1.RegionTargetTcpProxies", + "shortName": "RegionTargetTcpProxies" + }, + "shortName": "Get" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.GetRegionTargetTcpProxyRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "region", + "type": "str" + }, + { + "name": "target_tcp_proxy", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.compute_v1.types.TargetTcpProxy", + "shortName": "get" + }, + "description": "Sample for Get", + "file": "compute_v1_generated_region_target_tcp_proxies_get_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_RegionTargetTcpProxies_Get_sync", + "segments": [ + { + "end": 53, + "start": 27, + "type": "FULL" + }, + { + "end": 53, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 47, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 50, + "start": 48, + "type": "REQUEST_EXECUTION" + }, + { + "end": 54, + "start": 51, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_region_target_tcp_proxies_get_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.RegionTargetTcpProxiesClient", + "shortName": "RegionTargetTcpProxiesClient" + }, + "fullName": "google.cloud.compute_v1.RegionTargetTcpProxiesClient.insert", + "method": { + "fullName": "google.cloud.compute.v1.RegionTargetTcpProxies.Insert", + "service": { + "fullName": "google.cloud.compute.v1.RegionTargetTcpProxies", + "shortName": "RegionTargetTcpProxies" + }, + "shortName": "Insert" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.InsertRegionTargetTcpProxyRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "region", + "type": "str" + }, + { + "name": "target_tcp_proxy_resource", + "type": "google.cloud.compute_v1.types.TargetTcpProxy" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.extended_operation.ExtendedOperation", + "shortName": "insert" + }, + "description": "Sample for Insert", + "file": "compute_v1_generated_region_target_tcp_proxies_insert_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_RegionTargetTcpProxies_Insert_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_region_target_tcp_proxies_insert_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.RegionTargetTcpProxiesClient", + "shortName": "RegionTargetTcpProxiesClient" + }, + "fullName": "google.cloud.compute_v1.RegionTargetTcpProxiesClient.list", + "method": { + "fullName": "google.cloud.compute.v1.RegionTargetTcpProxies.List", + "service": { + "fullName": "google.cloud.compute.v1.RegionTargetTcpProxies", + "shortName": "RegionTargetTcpProxies" + }, + "shortName": "List" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.ListRegionTargetTcpProxiesRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "region", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.compute_v1.services.region_target_tcp_proxies.pagers.ListPager", + "shortName": "list" + }, + "description": "Sample for List", + "file": "compute_v1_generated_region_target_tcp_proxies_list_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_RegionTargetTcpProxies_List_sync", + "segments": [ + { + "end": 53, + "start": 27, + "type": "FULL" + }, + { + "end": 53, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 54, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_region_target_tcp_proxies_list_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.RegionUrlMapsClient", + "shortName": "RegionUrlMapsClient" + }, + "fullName": "google.cloud.compute_v1.RegionUrlMapsClient.delete", + "method": { + "fullName": "google.cloud.compute.v1.RegionUrlMaps.Delete", + "service": { + "fullName": "google.cloud.compute.v1.RegionUrlMaps", + "shortName": "RegionUrlMaps" + }, + "shortName": "Delete" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.DeleteRegionUrlMapRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "region", + "type": "str" + }, + { + "name": "url_map", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.extended_operation.ExtendedOperation", + "shortName": "delete" + }, + "description": "Sample for Delete", + "file": "compute_v1_generated_region_url_maps_delete_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_RegionUrlMaps_Delete_sync", + "segments": [ + { + "end": 53, + "start": 27, + "type": "FULL" + }, + { + "end": 53, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 47, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 50, + "start": 48, + "type": "REQUEST_EXECUTION" + }, + { + "end": 54, + "start": 51, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_region_url_maps_delete_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.RegionUrlMapsClient", + "shortName": "RegionUrlMapsClient" + }, + "fullName": "google.cloud.compute_v1.RegionUrlMapsClient.get", + "method": { + "fullName": "google.cloud.compute.v1.RegionUrlMaps.Get", + "service": { + "fullName": "google.cloud.compute.v1.RegionUrlMaps", + "shortName": "RegionUrlMaps" + }, + "shortName": "Get" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.GetRegionUrlMapRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "region", + "type": "str" + }, + { + "name": "url_map", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.compute_v1.types.UrlMap", + "shortName": "get" + }, + "description": "Sample for Get", + "file": "compute_v1_generated_region_url_maps_get_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_RegionUrlMaps_Get_sync", + "segments": [ + { + "end": 53, + "start": 27, + "type": "FULL" + }, + { + "end": 53, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 47, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 50, + "start": 48, + "type": "REQUEST_EXECUTION" + }, + { + "end": 54, + "start": 51, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_region_url_maps_get_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.RegionUrlMapsClient", + "shortName": "RegionUrlMapsClient" + }, + "fullName": "google.cloud.compute_v1.RegionUrlMapsClient.insert", + "method": { + "fullName": "google.cloud.compute.v1.RegionUrlMaps.Insert", + "service": { + "fullName": "google.cloud.compute.v1.RegionUrlMaps", + "shortName": "RegionUrlMaps" + }, + "shortName": "Insert" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.InsertRegionUrlMapRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "region", + "type": "str" + }, + { + "name": "url_map_resource", + "type": "google.cloud.compute_v1.types.UrlMap" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.extended_operation.ExtendedOperation", + "shortName": "insert" + }, + "description": "Sample for Insert", + "file": "compute_v1_generated_region_url_maps_insert_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_RegionUrlMaps_Insert_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_region_url_maps_insert_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.RegionUrlMapsClient", + "shortName": "RegionUrlMapsClient" + }, + "fullName": "google.cloud.compute_v1.RegionUrlMapsClient.list", + "method": { + "fullName": "google.cloud.compute.v1.RegionUrlMaps.List", + "service": { + "fullName": "google.cloud.compute.v1.RegionUrlMaps", + "shortName": "RegionUrlMaps" + }, + "shortName": "List" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.ListRegionUrlMapsRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "region", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.compute_v1.services.region_url_maps.pagers.ListPager", + "shortName": "list" + }, + "description": "Sample for List", + "file": "compute_v1_generated_region_url_maps_list_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_RegionUrlMaps_List_sync", + "segments": [ + { + "end": 53, + "start": 27, + "type": "FULL" + }, + { + "end": 53, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 54, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_region_url_maps_list_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.RegionUrlMapsClient", + "shortName": "RegionUrlMapsClient" + }, + "fullName": "google.cloud.compute_v1.RegionUrlMapsClient.patch", + "method": { + "fullName": "google.cloud.compute.v1.RegionUrlMaps.Patch", + "service": { + "fullName": "google.cloud.compute.v1.RegionUrlMaps", + "shortName": "RegionUrlMaps" + }, + "shortName": "Patch" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.PatchRegionUrlMapRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "region", + "type": "str" + }, + { + "name": "url_map", + "type": "str" + }, + { + "name": "url_map_resource", + "type": "google.cloud.compute_v1.types.UrlMap" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.extended_operation.ExtendedOperation", + "shortName": "patch" + }, + "description": "Sample for Patch", + "file": "compute_v1_generated_region_url_maps_patch_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_RegionUrlMaps_Patch_sync", + "segments": [ + { + "end": 53, + "start": 27, + "type": "FULL" + }, + { + "end": 53, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 47, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 50, + "start": 48, + "type": "REQUEST_EXECUTION" + }, + { + "end": 54, + "start": 51, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_region_url_maps_patch_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.RegionUrlMapsClient", + "shortName": "RegionUrlMapsClient" + }, + "fullName": "google.cloud.compute_v1.RegionUrlMapsClient.update", + "method": { + "fullName": "google.cloud.compute.v1.RegionUrlMaps.Update", + "service": { + "fullName": "google.cloud.compute.v1.RegionUrlMaps", + "shortName": "RegionUrlMaps" + }, + "shortName": "Update" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.UpdateRegionUrlMapRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "region", + "type": "str" + }, + { + "name": "url_map", + "type": "str" + }, + { + "name": "url_map_resource", + "type": "google.cloud.compute_v1.types.UrlMap" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.extended_operation.ExtendedOperation", + "shortName": "update" + }, + "description": "Sample for Update", + "file": "compute_v1_generated_region_url_maps_update_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_RegionUrlMaps_Update_sync", + "segments": [ + { + "end": 53, + "start": 27, + "type": "FULL" + }, + { + "end": 53, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 47, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 50, + "start": 48, + "type": "REQUEST_EXECUTION" + }, + { + "end": 54, + "start": 51, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_region_url_maps_update_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.RegionUrlMapsClient", + "shortName": "RegionUrlMapsClient" + }, + "fullName": "google.cloud.compute_v1.RegionUrlMapsClient.validate", + "method": { + "fullName": "google.cloud.compute.v1.RegionUrlMaps.Validate", + "service": { + "fullName": "google.cloud.compute.v1.RegionUrlMaps", + "shortName": "RegionUrlMaps" + }, + "shortName": "Validate" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.ValidateRegionUrlMapRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "region", + "type": "str" + }, + { + "name": "url_map", + "type": "str" + }, + { + "name": "region_url_maps_validate_request_resource", + "type": "google.cloud.compute_v1.types.RegionUrlMapsValidateRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.compute_v1.types.UrlMapsValidateResponse", + "shortName": "validate" + }, + "description": "Sample for Validate", + "file": "compute_v1_generated_region_url_maps_validate_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_RegionUrlMaps_Validate_sync", + "segments": [ + { + "end": 53, + "start": 27, + "type": "FULL" + }, + { + "end": 53, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 47, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 50, + "start": 48, + "type": "REQUEST_EXECUTION" + }, + { + "end": 54, + "start": 51, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_region_url_maps_validate_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.RegionsClient", + "shortName": "RegionsClient" + }, + "fullName": "google.cloud.compute_v1.RegionsClient.get", + "method": { + "fullName": "google.cloud.compute.v1.Regions.Get", + "service": { + "fullName": "google.cloud.compute.v1.Regions", + "shortName": "Regions" + }, + "shortName": "Get" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.GetRegionRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "region", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.compute_v1.types.Region", + "shortName": "get" + }, + "description": "Sample for Get", + "file": "compute_v1_generated_regions_get_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_Regions_Get_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_regions_get_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.RegionsClient", + "shortName": "RegionsClient" + }, + "fullName": "google.cloud.compute_v1.RegionsClient.list", + "method": { + "fullName": "google.cloud.compute.v1.Regions.List", + "service": { + "fullName": "google.cloud.compute.v1.Regions", + "shortName": "Regions" + }, + "shortName": "List" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.ListRegionsRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.compute_v1.services.regions.pagers.ListPager", + "shortName": "list" + }, + "description": "Sample for List", + "file": "compute_v1_generated_regions_list_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_Regions_List_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_regions_list_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.ReservationsClient", + "shortName": "ReservationsClient" + }, + "fullName": "google.cloud.compute_v1.ReservationsClient.aggregated_list", + "method": { + "fullName": "google.cloud.compute.v1.Reservations.AggregatedList", + "service": { + "fullName": "google.cloud.compute.v1.Reservations", + "shortName": "Reservations" + }, + "shortName": "AggregatedList" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.AggregatedListReservationsRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.compute_v1.services.reservations.pagers.AggregatedListPager", + "shortName": "aggregated_list" + }, + "description": "Sample for AggregatedList", + "file": "compute_v1_generated_reservations_aggregated_list_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_Reservations_AggregatedList_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_reservations_aggregated_list_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.ReservationsClient", + "shortName": "ReservationsClient" + }, + "fullName": "google.cloud.compute_v1.ReservationsClient.delete", + "method": { + "fullName": "google.cloud.compute.v1.Reservations.Delete", + "service": { + "fullName": "google.cloud.compute.v1.Reservations", + "shortName": "Reservations" + }, + "shortName": "Delete" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.DeleteReservationRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "zone", + "type": "str" + }, + { + "name": "reservation", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.extended_operation.ExtendedOperation", + "shortName": "delete" + }, + "description": "Sample for Delete", + "file": "compute_v1_generated_reservations_delete_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_Reservations_Delete_sync", + "segments": [ + { + "end": 53, + "start": 27, + "type": "FULL" + }, + { + "end": 53, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 47, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 50, + "start": 48, + "type": "REQUEST_EXECUTION" + }, + { + "end": 54, + "start": 51, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_reservations_delete_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.ReservationsClient", + "shortName": "ReservationsClient" + }, + "fullName": "google.cloud.compute_v1.ReservationsClient.get_iam_policy", + "method": { + "fullName": "google.cloud.compute.v1.Reservations.GetIamPolicy", + "service": { + "fullName": "google.cloud.compute.v1.Reservations", + "shortName": "Reservations" + }, + "shortName": "GetIamPolicy" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.GetIamPolicyReservationRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "zone", + "type": "str" + }, + { + "name": "resource", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.compute_v1.types.Policy", + "shortName": "get_iam_policy" + }, + "description": "Sample for GetIamPolicy", + "file": "compute_v1_generated_reservations_get_iam_policy_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_Reservations_GetIamPolicy_sync", + "segments": [ + { + "end": 53, + "start": 27, + "type": "FULL" + }, + { + "end": 53, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 47, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 50, + "start": 48, + "type": "REQUEST_EXECUTION" + }, + { + "end": 54, + "start": 51, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_reservations_get_iam_policy_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.ReservationsClient", + "shortName": "ReservationsClient" + }, + "fullName": "google.cloud.compute_v1.ReservationsClient.get", + "method": { + "fullName": "google.cloud.compute.v1.Reservations.Get", + "service": { + "fullName": "google.cloud.compute.v1.Reservations", + "shortName": "Reservations" + }, + "shortName": "Get" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.GetReservationRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "zone", + "type": "str" + }, + { + "name": "reservation", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.compute_v1.types.Reservation", + "shortName": "get" + }, + "description": "Sample for Get", + "file": "compute_v1_generated_reservations_get_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_Reservations_Get_sync", + "segments": [ + { + "end": 53, + "start": 27, + "type": "FULL" + }, + { + "end": 53, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 47, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 50, + "start": 48, + "type": "REQUEST_EXECUTION" + }, + { + "end": 54, + "start": 51, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_reservations_get_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.ReservationsClient", + "shortName": "ReservationsClient" + }, + "fullName": "google.cloud.compute_v1.ReservationsClient.insert", + "method": { + "fullName": "google.cloud.compute.v1.Reservations.Insert", + "service": { + "fullName": "google.cloud.compute.v1.Reservations", + "shortName": "Reservations" + }, + "shortName": "Insert" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.InsertReservationRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "zone", + "type": "str" + }, + { + "name": "reservation_resource", + "type": "google.cloud.compute_v1.types.Reservation" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.extended_operation.ExtendedOperation", + "shortName": "insert" + }, + "description": "Sample for Insert", + "file": "compute_v1_generated_reservations_insert_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_Reservations_Insert_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_reservations_insert_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.ReservationsClient", + "shortName": "ReservationsClient" + }, + "fullName": "google.cloud.compute_v1.ReservationsClient.list", + "method": { + "fullName": "google.cloud.compute.v1.Reservations.List", + "service": { + "fullName": "google.cloud.compute.v1.Reservations", + "shortName": "Reservations" + }, + "shortName": "List" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.ListReservationsRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "zone", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.compute_v1.services.reservations.pagers.ListPager", + "shortName": "list" + }, + "description": "Sample for List", + "file": "compute_v1_generated_reservations_list_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_Reservations_List_sync", + "segments": [ + { + "end": 53, + "start": 27, + "type": "FULL" + }, + { + "end": 53, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 54, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_reservations_list_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.ReservationsClient", + "shortName": "ReservationsClient" + }, + "fullName": "google.cloud.compute_v1.ReservationsClient.resize", + "method": { + "fullName": "google.cloud.compute.v1.Reservations.Resize", + "service": { + "fullName": "google.cloud.compute.v1.Reservations", + "shortName": "Reservations" + }, + "shortName": "Resize" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.ResizeReservationRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "zone", + "type": "str" + }, + { + "name": "reservation", + "type": "str" + }, + { + "name": "reservations_resize_request_resource", + "type": "google.cloud.compute_v1.types.ReservationsResizeRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.extended_operation.ExtendedOperation", + "shortName": "resize" + }, + "description": "Sample for Resize", + "file": "compute_v1_generated_reservations_resize_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_Reservations_Resize_sync", + "segments": [ + { + "end": 53, + "start": 27, + "type": "FULL" + }, + { + "end": 53, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 47, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 50, + "start": 48, + "type": "REQUEST_EXECUTION" + }, + { + "end": 54, + "start": 51, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_reservations_resize_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.ReservationsClient", + "shortName": "ReservationsClient" + }, + "fullName": "google.cloud.compute_v1.ReservationsClient.set_iam_policy", + "method": { + "fullName": "google.cloud.compute.v1.Reservations.SetIamPolicy", + "service": { + "fullName": "google.cloud.compute.v1.Reservations", + "shortName": "Reservations" + }, + "shortName": "SetIamPolicy" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.SetIamPolicyReservationRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "zone", + "type": "str" + }, + { + "name": "resource", + "type": "str" + }, + { + "name": "zone_set_policy_request_resource", + "type": "google.cloud.compute_v1.types.ZoneSetPolicyRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.compute_v1.types.Policy", + "shortName": "set_iam_policy" + }, + "description": "Sample for SetIamPolicy", + "file": "compute_v1_generated_reservations_set_iam_policy_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_Reservations_SetIamPolicy_sync", + "segments": [ + { + "end": 53, + "start": 27, + "type": "FULL" + }, + { + "end": 53, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 47, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 50, + "start": 48, + "type": "REQUEST_EXECUTION" + }, + { + "end": 54, + "start": 51, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_reservations_set_iam_policy_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.ReservationsClient", + "shortName": "ReservationsClient" + }, + "fullName": "google.cloud.compute_v1.ReservationsClient.test_iam_permissions", + "method": { + "fullName": "google.cloud.compute.v1.Reservations.TestIamPermissions", + "service": { + "fullName": "google.cloud.compute.v1.Reservations", + "shortName": "Reservations" + }, + "shortName": "TestIamPermissions" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.TestIamPermissionsReservationRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "zone", + "type": "str" + }, + { + "name": "resource", + "type": "str" + }, + { + "name": "test_permissions_request_resource", + "type": "google.cloud.compute_v1.types.TestPermissionsRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.compute_v1.types.TestPermissionsResponse", + "shortName": "test_iam_permissions" + }, + "description": "Sample for TestIamPermissions", + "file": "compute_v1_generated_reservations_test_iam_permissions_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_Reservations_TestIamPermissions_sync", + "segments": [ + { + "end": 53, + "start": 27, + "type": "FULL" + }, + { + "end": 53, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 47, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 50, + "start": 48, + "type": "REQUEST_EXECUTION" + }, + { + "end": 54, + "start": 51, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_reservations_test_iam_permissions_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.ReservationsClient", + "shortName": "ReservationsClient" + }, + "fullName": "google.cloud.compute_v1.ReservationsClient.update", + "method": { + "fullName": "google.cloud.compute.v1.Reservations.Update", + "service": { + "fullName": "google.cloud.compute.v1.Reservations", + "shortName": "Reservations" + }, + "shortName": "Update" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.UpdateReservationRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "zone", + "type": "str" + }, + { + "name": "reservation", + "type": "str" + }, + { + "name": "reservation_resource", + "type": "google.cloud.compute_v1.types.Reservation" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.extended_operation.ExtendedOperation", + "shortName": "update" + }, + "description": "Sample for Update", + "file": "compute_v1_generated_reservations_update_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_Reservations_Update_sync", + "segments": [ + { + "end": 53, + "start": 27, + "type": "FULL" + }, + { + "end": 53, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 47, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 50, + "start": 48, + "type": "REQUEST_EXECUTION" + }, + { + "end": 54, + "start": 51, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_reservations_update_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.ResourcePoliciesClient", + "shortName": "ResourcePoliciesClient" + }, + "fullName": "google.cloud.compute_v1.ResourcePoliciesClient.aggregated_list", + "method": { + "fullName": "google.cloud.compute.v1.ResourcePolicies.AggregatedList", + "service": { + "fullName": "google.cloud.compute.v1.ResourcePolicies", + "shortName": "ResourcePolicies" + }, + "shortName": "AggregatedList" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.AggregatedListResourcePoliciesRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.compute_v1.services.resource_policies.pagers.AggregatedListPager", + "shortName": "aggregated_list" + }, + "description": "Sample for AggregatedList", + "file": "compute_v1_generated_resource_policies_aggregated_list_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_ResourcePolicies_AggregatedList_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_resource_policies_aggregated_list_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.ResourcePoliciesClient", + "shortName": "ResourcePoliciesClient" + }, + "fullName": "google.cloud.compute_v1.ResourcePoliciesClient.delete", + "method": { + "fullName": "google.cloud.compute.v1.ResourcePolicies.Delete", + "service": { + "fullName": "google.cloud.compute.v1.ResourcePolicies", + "shortName": "ResourcePolicies" + }, + "shortName": "Delete" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.DeleteResourcePolicyRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "region", + "type": "str" + }, + { + "name": "resource_policy", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.extended_operation.ExtendedOperation", + "shortName": "delete" + }, + "description": "Sample for Delete", + "file": "compute_v1_generated_resource_policies_delete_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_ResourcePolicies_Delete_sync", + "segments": [ + { + "end": 53, + "start": 27, + "type": "FULL" + }, + { + "end": 53, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 47, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 50, + "start": 48, + "type": "REQUEST_EXECUTION" + }, + { + "end": 54, + "start": 51, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_resource_policies_delete_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.ResourcePoliciesClient", + "shortName": "ResourcePoliciesClient" + }, + "fullName": "google.cloud.compute_v1.ResourcePoliciesClient.get_iam_policy", + "method": { + "fullName": "google.cloud.compute.v1.ResourcePolicies.GetIamPolicy", + "service": { + "fullName": "google.cloud.compute.v1.ResourcePolicies", + "shortName": "ResourcePolicies" + }, + "shortName": "GetIamPolicy" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.GetIamPolicyResourcePolicyRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "region", + "type": "str" + }, + { + "name": "resource", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.compute_v1.types.Policy", + "shortName": "get_iam_policy" + }, + "description": "Sample for GetIamPolicy", + "file": "compute_v1_generated_resource_policies_get_iam_policy_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_ResourcePolicies_GetIamPolicy_sync", + "segments": [ + { + "end": 53, + "start": 27, + "type": "FULL" + }, + { + "end": 53, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 47, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 50, + "start": 48, + "type": "REQUEST_EXECUTION" + }, + { + "end": 54, + "start": 51, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_resource_policies_get_iam_policy_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.ResourcePoliciesClient", + "shortName": "ResourcePoliciesClient" + }, + "fullName": "google.cloud.compute_v1.ResourcePoliciesClient.get", + "method": { + "fullName": "google.cloud.compute.v1.ResourcePolicies.Get", + "service": { + "fullName": "google.cloud.compute.v1.ResourcePolicies", + "shortName": "ResourcePolicies" + }, + "shortName": "Get" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.GetResourcePolicyRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "region", + "type": "str" + }, + { + "name": "resource_policy", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.compute_v1.types.ResourcePolicy", + "shortName": "get" + }, + "description": "Sample for Get", + "file": "compute_v1_generated_resource_policies_get_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_ResourcePolicies_Get_sync", + "segments": [ + { + "end": 53, + "start": 27, + "type": "FULL" + }, + { + "end": 53, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 47, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 50, + "start": 48, + "type": "REQUEST_EXECUTION" + }, + { + "end": 54, + "start": 51, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_resource_policies_get_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.ResourcePoliciesClient", + "shortName": "ResourcePoliciesClient" + }, + "fullName": "google.cloud.compute_v1.ResourcePoliciesClient.insert", + "method": { + "fullName": "google.cloud.compute.v1.ResourcePolicies.Insert", + "service": { + "fullName": "google.cloud.compute.v1.ResourcePolicies", + "shortName": "ResourcePolicies" + }, + "shortName": "Insert" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.InsertResourcePolicyRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "region", + "type": "str" + }, + { + "name": "resource_policy_resource", + "type": "google.cloud.compute_v1.types.ResourcePolicy" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.extended_operation.ExtendedOperation", + "shortName": "insert" + }, + "description": "Sample for Insert", + "file": "compute_v1_generated_resource_policies_insert_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_ResourcePolicies_Insert_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_resource_policies_insert_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.ResourcePoliciesClient", + "shortName": "ResourcePoliciesClient" + }, + "fullName": "google.cloud.compute_v1.ResourcePoliciesClient.list", + "method": { + "fullName": "google.cloud.compute.v1.ResourcePolicies.List", + "service": { + "fullName": "google.cloud.compute.v1.ResourcePolicies", + "shortName": "ResourcePolicies" + }, + "shortName": "List" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.ListResourcePoliciesRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "region", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.compute_v1.services.resource_policies.pagers.ListPager", + "shortName": "list" + }, + "description": "Sample for List", + "file": "compute_v1_generated_resource_policies_list_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_ResourcePolicies_List_sync", + "segments": [ + { + "end": 53, + "start": 27, + "type": "FULL" + }, + { + "end": 53, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 54, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_resource_policies_list_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.ResourcePoliciesClient", + "shortName": "ResourcePoliciesClient" + }, + "fullName": "google.cloud.compute_v1.ResourcePoliciesClient.patch", + "method": { + "fullName": "google.cloud.compute.v1.ResourcePolicies.Patch", + "service": { + "fullName": "google.cloud.compute.v1.ResourcePolicies", + "shortName": "ResourcePolicies" + }, + "shortName": "Patch" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.PatchResourcePolicyRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "region", + "type": "str" + }, + { + "name": "resource_policy", + "type": "str" + }, + { + "name": "resource_policy_resource", + "type": "google.cloud.compute_v1.types.ResourcePolicy" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.extended_operation.ExtendedOperation", + "shortName": "patch" + }, + "description": "Sample for Patch", + "file": "compute_v1_generated_resource_policies_patch_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_ResourcePolicies_Patch_sync", + "segments": [ + { + "end": 53, + "start": 27, + "type": "FULL" + }, + { + "end": 53, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 47, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 50, + "start": 48, + "type": "REQUEST_EXECUTION" + }, + { + "end": 54, + "start": 51, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_resource_policies_patch_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.ResourcePoliciesClient", + "shortName": "ResourcePoliciesClient" + }, + "fullName": "google.cloud.compute_v1.ResourcePoliciesClient.set_iam_policy", + "method": { + "fullName": "google.cloud.compute.v1.ResourcePolicies.SetIamPolicy", + "service": { + "fullName": "google.cloud.compute.v1.ResourcePolicies", + "shortName": "ResourcePolicies" + }, + "shortName": "SetIamPolicy" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.SetIamPolicyResourcePolicyRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "region", + "type": "str" + }, + { + "name": "resource", + "type": "str" + }, + { + "name": "region_set_policy_request_resource", + "type": "google.cloud.compute_v1.types.RegionSetPolicyRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.compute_v1.types.Policy", + "shortName": "set_iam_policy" + }, + "description": "Sample for SetIamPolicy", + "file": "compute_v1_generated_resource_policies_set_iam_policy_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_ResourcePolicies_SetIamPolicy_sync", + "segments": [ + { + "end": 53, + "start": 27, + "type": "FULL" + }, + { + "end": 53, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 47, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 50, + "start": 48, + "type": "REQUEST_EXECUTION" + }, + { + "end": 54, + "start": 51, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_resource_policies_set_iam_policy_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.ResourcePoliciesClient", + "shortName": "ResourcePoliciesClient" + }, + "fullName": "google.cloud.compute_v1.ResourcePoliciesClient.test_iam_permissions", + "method": { + "fullName": "google.cloud.compute.v1.ResourcePolicies.TestIamPermissions", + "service": { + "fullName": "google.cloud.compute.v1.ResourcePolicies", + "shortName": "ResourcePolicies" + }, + "shortName": "TestIamPermissions" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.TestIamPermissionsResourcePolicyRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "region", + "type": "str" + }, + { + "name": "resource", + "type": "str" + }, + { + "name": "test_permissions_request_resource", + "type": "google.cloud.compute_v1.types.TestPermissionsRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.compute_v1.types.TestPermissionsResponse", + "shortName": "test_iam_permissions" + }, + "description": "Sample for TestIamPermissions", + "file": "compute_v1_generated_resource_policies_test_iam_permissions_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_ResourcePolicies_TestIamPermissions_sync", + "segments": [ + { + "end": 53, + "start": 27, + "type": "FULL" + }, + { + "end": 53, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 47, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 50, + "start": 48, + "type": "REQUEST_EXECUTION" + }, + { + "end": 54, + "start": 51, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_resource_policies_test_iam_permissions_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.RoutersClient", + "shortName": "RoutersClient" + }, + "fullName": "google.cloud.compute_v1.RoutersClient.aggregated_list", + "method": { + "fullName": "google.cloud.compute.v1.Routers.AggregatedList", + "service": { + "fullName": "google.cloud.compute.v1.Routers", + "shortName": "Routers" + }, + "shortName": "AggregatedList" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.AggregatedListRoutersRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.compute_v1.services.routers.pagers.AggregatedListPager", + "shortName": "aggregated_list" + }, + "description": "Sample for AggregatedList", + "file": "compute_v1_generated_routers_aggregated_list_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_Routers_AggregatedList_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_routers_aggregated_list_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.RoutersClient", + "shortName": "RoutersClient" + }, + "fullName": "google.cloud.compute_v1.RoutersClient.delete", + "method": { + "fullName": "google.cloud.compute.v1.Routers.Delete", + "service": { + "fullName": "google.cloud.compute.v1.Routers", + "shortName": "Routers" + }, + "shortName": "Delete" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.DeleteRouterRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "region", + "type": "str" + }, + { + "name": "router", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.extended_operation.ExtendedOperation", + "shortName": "delete" + }, + "description": "Sample for Delete", + "file": "compute_v1_generated_routers_delete_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_Routers_Delete_sync", + "segments": [ + { + "end": 53, + "start": 27, + "type": "FULL" + }, + { + "end": 53, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 47, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 50, + "start": 48, + "type": "REQUEST_EXECUTION" + }, + { + "end": 54, + "start": 51, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_routers_delete_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.RoutersClient", + "shortName": "RoutersClient" + }, + "fullName": "google.cloud.compute_v1.RoutersClient.get_nat_mapping_info", + "method": { + "fullName": "google.cloud.compute.v1.Routers.GetNatMappingInfo", + "service": { + "fullName": "google.cloud.compute.v1.Routers", + "shortName": "Routers" + }, + "shortName": "GetNatMappingInfo" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.GetNatMappingInfoRoutersRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "region", + "type": "str" + }, + { + "name": "router", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.compute_v1.services.routers.pagers.GetNatMappingInfoPager", + "shortName": "get_nat_mapping_info" + }, + "description": "Sample for GetNatMappingInfo", + "file": "compute_v1_generated_routers_get_nat_mapping_info_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_Routers_GetNatMappingInfo_sync", + "segments": [ + { + "end": 54, + "start": 27, + "type": "FULL" + }, + { + "end": 54, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 47, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 50, + "start": 48, + "type": "REQUEST_EXECUTION" + }, + { + "end": 55, + "start": 51, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_routers_get_nat_mapping_info_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.RoutersClient", + "shortName": "RoutersClient" + }, + "fullName": "google.cloud.compute_v1.RoutersClient.get_router_status", + "method": { + "fullName": "google.cloud.compute.v1.Routers.GetRouterStatus", + "service": { + "fullName": "google.cloud.compute.v1.Routers", + "shortName": "Routers" + }, + "shortName": "GetRouterStatus" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.GetRouterStatusRouterRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "region", + "type": "str" + }, + { + "name": "router", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.compute_v1.types.RouterStatusResponse", + "shortName": "get_router_status" + }, + "description": "Sample for GetRouterStatus", + "file": "compute_v1_generated_routers_get_router_status_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_Routers_GetRouterStatus_sync", + "segments": [ + { + "end": 53, + "start": 27, + "type": "FULL" + }, + { + "end": 53, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 47, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 50, + "start": 48, + "type": "REQUEST_EXECUTION" + }, + { + "end": 54, + "start": 51, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_routers_get_router_status_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.RoutersClient", + "shortName": "RoutersClient" + }, + "fullName": "google.cloud.compute_v1.RoutersClient.get", + "method": { + "fullName": "google.cloud.compute.v1.Routers.Get", + "service": { + "fullName": "google.cloud.compute.v1.Routers", + "shortName": "Routers" + }, + "shortName": "Get" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.GetRouterRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "region", + "type": "str" + }, + { + "name": "router", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.compute_v1.types.Router", + "shortName": "get" + }, + "description": "Sample for Get", + "file": "compute_v1_generated_routers_get_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_Routers_Get_sync", + "segments": [ + { + "end": 53, + "start": 27, + "type": "FULL" + }, + { + "end": 53, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 47, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 50, + "start": 48, + "type": "REQUEST_EXECUTION" + }, + { + "end": 54, + "start": 51, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_routers_get_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.RoutersClient", + "shortName": "RoutersClient" + }, + "fullName": "google.cloud.compute_v1.RoutersClient.insert", + "method": { + "fullName": "google.cloud.compute.v1.Routers.Insert", + "service": { + "fullName": "google.cloud.compute.v1.Routers", + "shortName": "Routers" + }, + "shortName": "Insert" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.InsertRouterRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "region", + "type": "str" + }, + { + "name": "router_resource", + "type": "google.cloud.compute_v1.types.Router" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.extended_operation.ExtendedOperation", + "shortName": "insert" + }, + "description": "Sample for Insert", + "file": "compute_v1_generated_routers_insert_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_Routers_Insert_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_routers_insert_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.RoutersClient", + "shortName": "RoutersClient" + }, + "fullName": "google.cloud.compute_v1.RoutersClient.list", + "method": { + "fullName": "google.cloud.compute.v1.Routers.List", + "service": { + "fullName": "google.cloud.compute.v1.Routers", + "shortName": "Routers" + }, + "shortName": "List" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.ListRoutersRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "region", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.compute_v1.services.routers.pagers.ListPager", + "shortName": "list" + }, + "description": "Sample for List", + "file": "compute_v1_generated_routers_list_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_Routers_List_sync", + "segments": [ + { + "end": 53, + "start": 27, + "type": "FULL" + }, + { + "end": 53, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 54, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_routers_list_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.RoutersClient", + "shortName": "RoutersClient" + }, + "fullName": "google.cloud.compute_v1.RoutersClient.patch", + "method": { + "fullName": "google.cloud.compute.v1.Routers.Patch", + "service": { + "fullName": "google.cloud.compute.v1.Routers", + "shortName": "Routers" + }, + "shortName": "Patch" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.PatchRouterRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "region", + "type": "str" + }, + { + "name": "router", + "type": "str" + }, + { + "name": "router_resource", + "type": "google.cloud.compute_v1.types.Router" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.extended_operation.ExtendedOperation", + "shortName": "patch" + }, + "description": "Sample for Patch", + "file": "compute_v1_generated_routers_patch_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_Routers_Patch_sync", + "segments": [ + { + "end": 53, + "start": 27, + "type": "FULL" + }, + { + "end": 53, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 47, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 50, + "start": 48, + "type": "REQUEST_EXECUTION" + }, + { + "end": 54, + "start": 51, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_routers_patch_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.RoutersClient", + "shortName": "RoutersClient" + }, + "fullName": "google.cloud.compute_v1.RoutersClient.preview", + "method": { + "fullName": "google.cloud.compute.v1.Routers.Preview", + "service": { + "fullName": "google.cloud.compute.v1.Routers", + "shortName": "Routers" + }, + "shortName": "Preview" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.PreviewRouterRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "region", + "type": "str" + }, + { + "name": "router", + "type": "str" + }, + { + "name": "router_resource", + "type": "google.cloud.compute_v1.types.Router" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.compute_v1.types.RoutersPreviewResponse", + "shortName": "preview" + }, + "description": "Sample for Preview", + "file": "compute_v1_generated_routers_preview_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_Routers_Preview_sync", + "segments": [ + { + "end": 53, + "start": 27, + "type": "FULL" + }, + { + "end": 53, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 47, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 50, + "start": 48, + "type": "REQUEST_EXECUTION" + }, + { + "end": 54, + "start": 51, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_routers_preview_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.RoutersClient", + "shortName": "RoutersClient" + }, + "fullName": "google.cloud.compute_v1.RoutersClient.update", + "method": { + "fullName": "google.cloud.compute.v1.Routers.Update", + "service": { + "fullName": "google.cloud.compute.v1.Routers", + "shortName": "Routers" + }, + "shortName": "Update" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.UpdateRouterRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "region", + "type": "str" + }, + { + "name": "router", + "type": "str" + }, + { + "name": "router_resource", + "type": "google.cloud.compute_v1.types.Router" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.extended_operation.ExtendedOperation", + "shortName": "update" + }, + "description": "Sample for Update", + "file": "compute_v1_generated_routers_update_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_Routers_Update_sync", + "segments": [ + { + "end": 53, + "start": 27, + "type": "FULL" + }, + { + "end": 53, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 47, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 50, + "start": 48, + "type": "REQUEST_EXECUTION" + }, + { + "end": 54, + "start": 51, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_routers_update_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.RoutesClient", + "shortName": "RoutesClient" + }, + "fullName": "google.cloud.compute_v1.RoutesClient.delete", + "method": { + "fullName": "google.cloud.compute.v1.Routes.Delete", + "service": { + "fullName": "google.cloud.compute.v1.Routes", + "shortName": "Routes" + }, + "shortName": "Delete" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.DeleteRouteRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "route", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.extended_operation.ExtendedOperation", + "shortName": "delete" + }, + "description": "Sample for Delete", + "file": "compute_v1_generated_routes_delete_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_Routes_Delete_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_routes_delete_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.RoutesClient", + "shortName": "RoutesClient" + }, + "fullName": "google.cloud.compute_v1.RoutesClient.get", + "method": { + "fullName": "google.cloud.compute.v1.Routes.Get", + "service": { + "fullName": "google.cloud.compute.v1.Routes", + "shortName": "Routes" + }, + "shortName": "Get" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.GetRouteRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "route", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.compute_v1.types.Route", + "shortName": "get" + }, + "description": "Sample for Get", + "file": "compute_v1_generated_routes_get_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_Routes_Get_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_routes_get_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.RoutesClient", + "shortName": "RoutesClient" + }, + "fullName": "google.cloud.compute_v1.RoutesClient.insert", + "method": { + "fullName": "google.cloud.compute.v1.Routes.Insert", + "service": { + "fullName": "google.cloud.compute.v1.Routes", + "shortName": "Routes" + }, + "shortName": "Insert" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.InsertRouteRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "route_resource", + "type": "google.cloud.compute_v1.types.Route" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.extended_operation.ExtendedOperation", + "shortName": "insert" + }, + "description": "Sample for Insert", + "file": "compute_v1_generated_routes_insert_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_Routes_Insert_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_routes_insert_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.RoutesClient", + "shortName": "RoutesClient" + }, + "fullName": "google.cloud.compute_v1.RoutesClient.list", + "method": { + "fullName": "google.cloud.compute.v1.Routes.List", + "service": { + "fullName": "google.cloud.compute.v1.Routes", + "shortName": "Routes" + }, + "shortName": "List" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.ListRoutesRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.compute_v1.services.routes.pagers.ListPager", + "shortName": "list" + }, + "description": "Sample for List", + "file": "compute_v1_generated_routes_list_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_Routes_List_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_routes_list_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.SecurityPoliciesClient", + "shortName": "SecurityPoliciesClient" + }, + "fullName": "google.cloud.compute_v1.SecurityPoliciesClient.add_rule", + "method": { + "fullName": "google.cloud.compute.v1.SecurityPolicies.AddRule", + "service": { + "fullName": "google.cloud.compute.v1.SecurityPolicies", + "shortName": "SecurityPolicies" + }, + "shortName": "AddRule" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.AddRuleSecurityPolicyRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "security_policy", + "type": "str" + }, + { + "name": "security_policy_rule_resource", + "type": "google.cloud.compute_v1.types.SecurityPolicyRule" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.extended_operation.ExtendedOperation", + "shortName": "add_rule" + }, + "description": "Sample for AddRule", + "file": "compute_v1_generated_security_policies_add_rule_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_SecurityPolicies_AddRule_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_security_policies_add_rule_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.SecurityPoliciesClient", + "shortName": "SecurityPoliciesClient" + }, + "fullName": "google.cloud.compute_v1.SecurityPoliciesClient.aggregated_list", + "method": { + "fullName": "google.cloud.compute.v1.SecurityPolicies.AggregatedList", + "service": { + "fullName": "google.cloud.compute.v1.SecurityPolicies", + "shortName": "SecurityPolicies" + }, + "shortName": "AggregatedList" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.AggregatedListSecurityPoliciesRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.compute_v1.services.security_policies.pagers.AggregatedListPager", + "shortName": "aggregated_list" + }, + "description": "Sample for AggregatedList", + "file": "compute_v1_generated_security_policies_aggregated_list_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_SecurityPolicies_AggregatedList_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_security_policies_aggregated_list_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.SecurityPoliciesClient", + "shortName": "SecurityPoliciesClient" + }, + "fullName": "google.cloud.compute_v1.SecurityPoliciesClient.delete", + "method": { + "fullName": "google.cloud.compute.v1.SecurityPolicies.Delete", + "service": { + "fullName": "google.cloud.compute.v1.SecurityPolicies", + "shortName": "SecurityPolicies" + }, + "shortName": "Delete" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.DeleteSecurityPolicyRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "security_policy", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.extended_operation.ExtendedOperation", + "shortName": "delete" + }, + "description": "Sample for Delete", + "file": "compute_v1_generated_security_policies_delete_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_SecurityPolicies_Delete_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_security_policies_delete_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.SecurityPoliciesClient", + "shortName": "SecurityPoliciesClient" + }, + "fullName": "google.cloud.compute_v1.SecurityPoliciesClient.get_rule", + "method": { + "fullName": "google.cloud.compute.v1.SecurityPolicies.GetRule", + "service": { + "fullName": "google.cloud.compute.v1.SecurityPolicies", + "shortName": "SecurityPolicies" + }, + "shortName": "GetRule" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.GetRuleSecurityPolicyRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "security_policy", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.compute_v1.types.SecurityPolicyRule", + "shortName": "get_rule" + }, + "description": "Sample for GetRule", + "file": "compute_v1_generated_security_policies_get_rule_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_SecurityPolicies_GetRule_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_security_policies_get_rule_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.SecurityPoliciesClient", + "shortName": "SecurityPoliciesClient" + }, + "fullName": "google.cloud.compute_v1.SecurityPoliciesClient.get", + "method": { + "fullName": "google.cloud.compute.v1.SecurityPolicies.Get", + "service": { + "fullName": "google.cloud.compute.v1.SecurityPolicies", + "shortName": "SecurityPolicies" + }, + "shortName": "Get" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.GetSecurityPolicyRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "security_policy", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.compute_v1.types.SecurityPolicy", + "shortName": "get" + }, + "description": "Sample for Get", + "file": "compute_v1_generated_security_policies_get_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_SecurityPolicies_Get_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_security_policies_get_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.SecurityPoliciesClient", + "shortName": "SecurityPoliciesClient" + }, + "fullName": "google.cloud.compute_v1.SecurityPoliciesClient.insert", + "method": { + "fullName": "google.cloud.compute.v1.SecurityPolicies.Insert", + "service": { + "fullName": "google.cloud.compute.v1.SecurityPolicies", + "shortName": "SecurityPolicies" + }, + "shortName": "Insert" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.InsertSecurityPolicyRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "security_policy_resource", + "type": "google.cloud.compute_v1.types.SecurityPolicy" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.extended_operation.ExtendedOperation", + "shortName": "insert" + }, + "description": "Sample for Insert", + "file": "compute_v1_generated_security_policies_insert_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_SecurityPolicies_Insert_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_security_policies_insert_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.SecurityPoliciesClient", + "shortName": "SecurityPoliciesClient" + }, + "fullName": "google.cloud.compute_v1.SecurityPoliciesClient.list_preconfigured_expression_sets", + "method": { + "fullName": "google.cloud.compute.v1.SecurityPolicies.ListPreconfiguredExpressionSets", + "service": { + "fullName": "google.cloud.compute.v1.SecurityPolicies", + "shortName": "SecurityPolicies" + }, + "shortName": "ListPreconfiguredExpressionSets" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.ListPreconfiguredExpressionSetsSecurityPoliciesRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.compute_v1.types.SecurityPoliciesListPreconfiguredExpressionSetsResponse", + "shortName": "list_preconfigured_expression_sets" + }, + "description": "Sample for ListPreconfiguredExpressionSets", + "file": "compute_v1_generated_security_policies_list_preconfigured_expression_sets_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_SecurityPolicies_ListPreconfiguredExpressionSets_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_security_policies_list_preconfigured_expression_sets_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.SecurityPoliciesClient", + "shortName": "SecurityPoliciesClient" + }, + "fullName": "google.cloud.compute_v1.SecurityPoliciesClient.list", + "method": { + "fullName": "google.cloud.compute.v1.SecurityPolicies.List", + "service": { + "fullName": "google.cloud.compute.v1.SecurityPolicies", + "shortName": "SecurityPolicies" + }, + "shortName": "List" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.ListSecurityPoliciesRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.compute_v1.services.security_policies.pagers.ListPager", + "shortName": "list" + }, + "description": "Sample for List", + "file": "compute_v1_generated_security_policies_list_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_SecurityPolicies_List_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_security_policies_list_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.SecurityPoliciesClient", + "shortName": "SecurityPoliciesClient" + }, + "fullName": "google.cloud.compute_v1.SecurityPoliciesClient.patch_rule", + "method": { + "fullName": "google.cloud.compute.v1.SecurityPolicies.PatchRule", + "service": { + "fullName": "google.cloud.compute.v1.SecurityPolicies", + "shortName": "SecurityPolicies" + }, + "shortName": "PatchRule" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.PatchRuleSecurityPolicyRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "security_policy", + "type": "str" + }, + { + "name": "security_policy_rule_resource", + "type": "google.cloud.compute_v1.types.SecurityPolicyRule" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.extended_operation.ExtendedOperation", + "shortName": "patch_rule" + }, + "description": "Sample for PatchRule", + "file": "compute_v1_generated_security_policies_patch_rule_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_SecurityPolicies_PatchRule_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_security_policies_patch_rule_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.SecurityPoliciesClient", + "shortName": "SecurityPoliciesClient" + }, + "fullName": "google.cloud.compute_v1.SecurityPoliciesClient.patch", + "method": { + "fullName": "google.cloud.compute.v1.SecurityPolicies.Patch", + "service": { + "fullName": "google.cloud.compute.v1.SecurityPolicies", + "shortName": "SecurityPolicies" + }, + "shortName": "Patch" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.PatchSecurityPolicyRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "security_policy", + "type": "str" + }, + { + "name": "security_policy_resource", + "type": "google.cloud.compute_v1.types.SecurityPolicy" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.extended_operation.ExtendedOperation", + "shortName": "patch" + }, + "description": "Sample for Patch", + "file": "compute_v1_generated_security_policies_patch_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_SecurityPolicies_Patch_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_security_policies_patch_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.SecurityPoliciesClient", + "shortName": "SecurityPoliciesClient" + }, + "fullName": "google.cloud.compute_v1.SecurityPoliciesClient.remove_rule", + "method": { + "fullName": "google.cloud.compute.v1.SecurityPolicies.RemoveRule", + "service": { + "fullName": "google.cloud.compute.v1.SecurityPolicies", + "shortName": "SecurityPolicies" + }, + "shortName": "RemoveRule" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.RemoveRuleSecurityPolicyRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "security_policy", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.extended_operation.ExtendedOperation", + "shortName": "remove_rule" + }, + "description": "Sample for RemoveRule", + "file": "compute_v1_generated_security_policies_remove_rule_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_SecurityPolicies_RemoveRule_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_security_policies_remove_rule_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.SecurityPoliciesClient", + "shortName": "SecurityPoliciesClient" + }, + "fullName": "google.cloud.compute_v1.SecurityPoliciesClient.set_labels", + "method": { + "fullName": "google.cloud.compute.v1.SecurityPolicies.SetLabels", + "service": { + "fullName": "google.cloud.compute.v1.SecurityPolicies", + "shortName": "SecurityPolicies" + }, + "shortName": "SetLabels" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.SetLabelsSecurityPolicyRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "resource", + "type": "str" + }, + { + "name": "global_set_labels_request_resource", + "type": "google.cloud.compute_v1.types.GlobalSetLabelsRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.extended_operation.ExtendedOperation", + "shortName": "set_labels" + }, + "description": "Sample for SetLabels", + "file": "compute_v1_generated_security_policies_set_labels_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_SecurityPolicies_SetLabels_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_security_policies_set_labels_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.ServiceAttachmentsClient", + "shortName": "ServiceAttachmentsClient" + }, + "fullName": "google.cloud.compute_v1.ServiceAttachmentsClient.aggregated_list", + "method": { + "fullName": "google.cloud.compute.v1.ServiceAttachments.AggregatedList", + "service": { + "fullName": "google.cloud.compute.v1.ServiceAttachments", + "shortName": "ServiceAttachments" + }, + "shortName": "AggregatedList" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.AggregatedListServiceAttachmentsRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.compute_v1.services.service_attachments.pagers.AggregatedListPager", + "shortName": "aggregated_list" + }, + "description": "Sample for AggregatedList", + "file": "compute_v1_generated_service_attachments_aggregated_list_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_ServiceAttachments_AggregatedList_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_service_attachments_aggregated_list_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.ServiceAttachmentsClient", + "shortName": "ServiceAttachmentsClient" + }, + "fullName": "google.cloud.compute_v1.ServiceAttachmentsClient.delete", + "method": { + "fullName": "google.cloud.compute.v1.ServiceAttachments.Delete", + "service": { + "fullName": "google.cloud.compute.v1.ServiceAttachments", + "shortName": "ServiceAttachments" + }, + "shortName": "Delete" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.DeleteServiceAttachmentRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "region", + "type": "str" + }, + { + "name": "service_attachment", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.extended_operation.ExtendedOperation", + "shortName": "delete" + }, + "description": "Sample for Delete", + "file": "compute_v1_generated_service_attachments_delete_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_ServiceAttachments_Delete_sync", + "segments": [ + { + "end": 53, + "start": 27, + "type": "FULL" + }, + { + "end": 53, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 47, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 50, + "start": 48, + "type": "REQUEST_EXECUTION" + }, + { + "end": 54, + "start": 51, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_service_attachments_delete_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.ServiceAttachmentsClient", + "shortName": "ServiceAttachmentsClient" + }, + "fullName": "google.cloud.compute_v1.ServiceAttachmentsClient.get_iam_policy", + "method": { + "fullName": "google.cloud.compute.v1.ServiceAttachments.GetIamPolicy", + "service": { + "fullName": "google.cloud.compute.v1.ServiceAttachments", + "shortName": "ServiceAttachments" + }, + "shortName": "GetIamPolicy" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.GetIamPolicyServiceAttachmentRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "region", + "type": "str" + }, + { + "name": "resource", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.compute_v1.types.Policy", + "shortName": "get_iam_policy" + }, + "description": "Sample for GetIamPolicy", + "file": "compute_v1_generated_service_attachments_get_iam_policy_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_ServiceAttachments_GetIamPolicy_sync", + "segments": [ + { + "end": 53, + "start": 27, + "type": "FULL" + }, + { + "end": 53, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 47, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 50, + "start": 48, + "type": "REQUEST_EXECUTION" + }, + { + "end": 54, + "start": 51, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_service_attachments_get_iam_policy_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.ServiceAttachmentsClient", + "shortName": "ServiceAttachmentsClient" + }, + "fullName": "google.cloud.compute_v1.ServiceAttachmentsClient.get", + "method": { + "fullName": "google.cloud.compute.v1.ServiceAttachments.Get", + "service": { + "fullName": "google.cloud.compute.v1.ServiceAttachments", + "shortName": "ServiceAttachments" + }, + "shortName": "Get" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.GetServiceAttachmentRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "region", + "type": "str" + }, + { + "name": "service_attachment", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.compute_v1.types.ServiceAttachment", + "shortName": "get" + }, + "description": "Sample for Get", + "file": "compute_v1_generated_service_attachments_get_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_ServiceAttachments_Get_sync", + "segments": [ + { + "end": 53, + "start": 27, + "type": "FULL" + }, + { + "end": 53, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 47, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 50, + "start": 48, + "type": "REQUEST_EXECUTION" + }, + { + "end": 54, + "start": 51, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_service_attachments_get_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.ServiceAttachmentsClient", + "shortName": "ServiceAttachmentsClient" + }, + "fullName": "google.cloud.compute_v1.ServiceAttachmentsClient.insert", + "method": { + "fullName": "google.cloud.compute.v1.ServiceAttachments.Insert", + "service": { + "fullName": "google.cloud.compute.v1.ServiceAttachments", + "shortName": "ServiceAttachments" + }, + "shortName": "Insert" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.InsertServiceAttachmentRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "region", + "type": "str" + }, + { + "name": "service_attachment_resource", + "type": "google.cloud.compute_v1.types.ServiceAttachment" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.extended_operation.ExtendedOperation", + "shortName": "insert" + }, + "description": "Sample for Insert", + "file": "compute_v1_generated_service_attachments_insert_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_ServiceAttachments_Insert_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_service_attachments_insert_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.ServiceAttachmentsClient", + "shortName": "ServiceAttachmentsClient" + }, + "fullName": "google.cloud.compute_v1.ServiceAttachmentsClient.list", + "method": { + "fullName": "google.cloud.compute.v1.ServiceAttachments.List", + "service": { + "fullName": "google.cloud.compute.v1.ServiceAttachments", + "shortName": "ServiceAttachments" + }, + "shortName": "List" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.ListServiceAttachmentsRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "region", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.compute_v1.services.service_attachments.pagers.ListPager", + "shortName": "list" + }, + "description": "Sample for List", + "file": "compute_v1_generated_service_attachments_list_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_ServiceAttachments_List_sync", + "segments": [ + { + "end": 53, + "start": 27, + "type": "FULL" + }, + { + "end": 53, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 54, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_service_attachments_list_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.ServiceAttachmentsClient", + "shortName": "ServiceAttachmentsClient" + }, + "fullName": "google.cloud.compute_v1.ServiceAttachmentsClient.patch", + "method": { + "fullName": "google.cloud.compute.v1.ServiceAttachments.Patch", + "service": { + "fullName": "google.cloud.compute.v1.ServiceAttachments", + "shortName": "ServiceAttachments" + }, + "shortName": "Patch" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.PatchServiceAttachmentRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "region", + "type": "str" + }, + { + "name": "service_attachment", + "type": "str" + }, + { + "name": "service_attachment_resource", + "type": "google.cloud.compute_v1.types.ServiceAttachment" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.extended_operation.ExtendedOperation", + "shortName": "patch" + }, + "description": "Sample for Patch", + "file": "compute_v1_generated_service_attachments_patch_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_ServiceAttachments_Patch_sync", + "segments": [ + { + "end": 53, + "start": 27, + "type": "FULL" + }, + { + "end": 53, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 47, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 50, + "start": 48, + "type": "REQUEST_EXECUTION" + }, + { + "end": 54, + "start": 51, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_service_attachments_patch_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.ServiceAttachmentsClient", + "shortName": "ServiceAttachmentsClient" + }, + "fullName": "google.cloud.compute_v1.ServiceAttachmentsClient.set_iam_policy", + "method": { + "fullName": "google.cloud.compute.v1.ServiceAttachments.SetIamPolicy", + "service": { + "fullName": "google.cloud.compute.v1.ServiceAttachments", + "shortName": "ServiceAttachments" + }, + "shortName": "SetIamPolicy" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.SetIamPolicyServiceAttachmentRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "region", + "type": "str" + }, + { + "name": "resource", + "type": "str" + }, + { + "name": "region_set_policy_request_resource", + "type": "google.cloud.compute_v1.types.RegionSetPolicyRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.compute_v1.types.Policy", + "shortName": "set_iam_policy" + }, + "description": "Sample for SetIamPolicy", + "file": "compute_v1_generated_service_attachments_set_iam_policy_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_ServiceAttachments_SetIamPolicy_sync", + "segments": [ + { + "end": 53, + "start": 27, + "type": "FULL" + }, + { + "end": 53, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 47, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 50, + "start": 48, + "type": "REQUEST_EXECUTION" + }, + { + "end": 54, + "start": 51, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_service_attachments_set_iam_policy_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.ServiceAttachmentsClient", + "shortName": "ServiceAttachmentsClient" + }, + "fullName": "google.cloud.compute_v1.ServiceAttachmentsClient.test_iam_permissions", + "method": { + "fullName": "google.cloud.compute.v1.ServiceAttachments.TestIamPermissions", + "service": { + "fullName": "google.cloud.compute.v1.ServiceAttachments", + "shortName": "ServiceAttachments" + }, + "shortName": "TestIamPermissions" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.TestIamPermissionsServiceAttachmentRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "region", + "type": "str" + }, + { + "name": "resource", + "type": "str" + }, + { + "name": "test_permissions_request_resource", + "type": "google.cloud.compute_v1.types.TestPermissionsRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.compute_v1.types.TestPermissionsResponse", + "shortName": "test_iam_permissions" + }, + "description": "Sample for TestIamPermissions", + "file": "compute_v1_generated_service_attachments_test_iam_permissions_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_ServiceAttachments_TestIamPermissions_sync", + "segments": [ + { + "end": 53, + "start": 27, + "type": "FULL" + }, + { + "end": 53, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 47, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 50, + "start": 48, + "type": "REQUEST_EXECUTION" + }, + { + "end": 54, + "start": 51, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_service_attachments_test_iam_permissions_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.SnapshotsClient", + "shortName": "SnapshotsClient" + }, + "fullName": "google.cloud.compute_v1.SnapshotsClient.delete", + "method": { + "fullName": "google.cloud.compute.v1.Snapshots.Delete", + "service": { + "fullName": "google.cloud.compute.v1.Snapshots", + "shortName": "Snapshots" + }, + "shortName": "Delete" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.DeleteSnapshotRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "snapshot", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.extended_operation.ExtendedOperation", + "shortName": "delete" + }, + "description": "Sample for Delete", + "file": "compute_v1_generated_snapshots_delete_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_Snapshots_Delete_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_snapshots_delete_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.SnapshotsClient", + "shortName": "SnapshotsClient" + }, + "fullName": "google.cloud.compute_v1.SnapshotsClient.get_iam_policy", + "method": { + "fullName": "google.cloud.compute.v1.Snapshots.GetIamPolicy", + "service": { + "fullName": "google.cloud.compute.v1.Snapshots", + "shortName": "Snapshots" + }, + "shortName": "GetIamPolicy" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.GetIamPolicySnapshotRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "resource", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.compute_v1.types.Policy", + "shortName": "get_iam_policy" + }, + "description": "Sample for GetIamPolicy", + "file": "compute_v1_generated_snapshots_get_iam_policy_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_Snapshots_GetIamPolicy_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_snapshots_get_iam_policy_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.SnapshotsClient", + "shortName": "SnapshotsClient" + }, + "fullName": "google.cloud.compute_v1.SnapshotsClient.get", + "method": { + "fullName": "google.cloud.compute.v1.Snapshots.Get", + "service": { + "fullName": "google.cloud.compute.v1.Snapshots", + "shortName": "Snapshots" + }, + "shortName": "Get" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.GetSnapshotRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "snapshot", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.compute_v1.types.Snapshot", + "shortName": "get" + }, + "description": "Sample for Get", + "file": "compute_v1_generated_snapshots_get_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_Snapshots_Get_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_snapshots_get_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.SnapshotsClient", + "shortName": "SnapshotsClient" + }, + "fullName": "google.cloud.compute_v1.SnapshotsClient.insert", + "method": { + "fullName": "google.cloud.compute.v1.Snapshots.Insert", + "service": { + "fullName": "google.cloud.compute.v1.Snapshots", + "shortName": "Snapshots" + }, + "shortName": "Insert" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.InsertSnapshotRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "snapshot_resource", + "type": "google.cloud.compute_v1.types.Snapshot" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.extended_operation.ExtendedOperation", + "shortName": "insert" + }, + "description": "Sample for Insert", + "file": "compute_v1_generated_snapshots_insert_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_Snapshots_Insert_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_snapshots_insert_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.SnapshotsClient", + "shortName": "SnapshotsClient" + }, + "fullName": "google.cloud.compute_v1.SnapshotsClient.list", + "method": { + "fullName": "google.cloud.compute.v1.Snapshots.List", + "service": { + "fullName": "google.cloud.compute.v1.Snapshots", + "shortName": "Snapshots" + }, + "shortName": "List" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.ListSnapshotsRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.compute_v1.services.snapshots.pagers.ListPager", + "shortName": "list" + }, + "description": "Sample for List", + "file": "compute_v1_generated_snapshots_list_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_Snapshots_List_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_snapshots_list_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.SnapshotsClient", + "shortName": "SnapshotsClient" + }, + "fullName": "google.cloud.compute_v1.SnapshotsClient.set_iam_policy", + "method": { + "fullName": "google.cloud.compute.v1.Snapshots.SetIamPolicy", + "service": { + "fullName": "google.cloud.compute.v1.Snapshots", + "shortName": "Snapshots" + }, + "shortName": "SetIamPolicy" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.SetIamPolicySnapshotRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "resource", + "type": "str" + }, + { + "name": "global_set_policy_request_resource", + "type": "google.cloud.compute_v1.types.GlobalSetPolicyRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.compute_v1.types.Policy", + "shortName": "set_iam_policy" + }, + "description": "Sample for SetIamPolicy", + "file": "compute_v1_generated_snapshots_set_iam_policy_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_Snapshots_SetIamPolicy_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_snapshots_set_iam_policy_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.SnapshotsClient", + "shortName": "SnapshotsClient" + }, + "fullName": "google.cloud.compute_v1.SnapshotsClient.set_labels", + "method": { + "fullName": "google.cloud.compute.v1.Snapshots.SetLabels", + "service": { + "fullName": "google.cloud.compute.v1.Snapshots", + "shortName": "Snapshots" + }, + "shortName": "SetLabels" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.SetLabelsSnapshotRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "resource", + "type": "str" + }, + { + "name": "global_set_labels_request_resource", + "type": "google.cloud.compute_v1.types.GlobalSetLabelsRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.extended_operation.ExtendedOperation", + "shortName": "set_labels" + }, + "description": "Sample for SetLabels", + "file": "compute_v1_generated_snapshots_set_labels_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_Snapshots_SetLabels_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_snapshots_set_labels_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.SnapshotsClient", + "shortName": "SnapshotsClient" + }, + "fullName": "google.cloud.compute_v1.SnapshotsClient.test_iam_permissions", + "method": { + "fullName": "google.cloud.compute.v1.Snapshots.TestIamPermissions", + "service": { + "fullName": "google.cloud.compute.v1.Snapshots", + "shortName": "Snapshots" + }, + "shortName": "TestIamPermissions" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.TestIamPermissionsSnapshotRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "resource", + "type": "str" + }, + { + "name": "test_permissions_request_resource", + "type": "google.cloud.compute_v1.types.TestPermissionsRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.compute_v1.types.TestPermissionsResponse", + "shortName": "test_iam_permissions" + }, + "description": "Sample for TestIamPermissions", + "file": "compute_v1_generated_snapshots_test_iam_permissions_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_Snapshots_TestIamPermissions_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_snapshots_test_iam_permissions_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.SslCertificatesClient", + "shortName": "SslCertificatesClient" + }, + "fullName": "google.cloud.compute_v1.SslCertificatesClient.aggregated_list", + "method": { + "fullName": "google.cloud.compute.v1.SslCertificates.AggregatedList", + "service": { + "fullName": "google.cloud.compute.v1.SslCertificates", + "shortName": "SslCertificates" + }, + "shortName": "AggregatedList" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.AggregatedListSslCertificatesRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.compute_v1.services.ssl_certificates.pagers.AggregatedListPager", + "shortName": "aggregated_list" + }, + "description": "Sample for AggregatedList", + "file": "compute_v1_generated_ssl_certificates_aggregated_list_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_SslCertificates_AggregatedList_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_ssl_certificates_aggregated_list_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.SslCertificatesClient", + "shortName": "SslCertificatesClient" + }, + "fullName": "google.cloud.compute_v1.SslCertificatesClient.delete", + "method": { + "fullName": "google.cloud.compute.v1.SslCertificates.Delete", + "service": { + "fullName": "google.cloud.compute.v1.SslCertificates", + "shortName": "SslCertificates" + }, + "shortName": "Delete" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.DeleteSslCertificateRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "ssl_certificate", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.extended_operation.ExtendedOperation", + "shortName": "delete" + }, + "description": "Sample for Delete", + "file": "compute_v1_generated_ssl_certificates_delete_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_SslCertificates_Delete_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_ssl_certificates_delete_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.SslCertificatesClient", + "shortName": "SslCertificatesClient" + }, + "fullName": "google.cloud.compute_v1.SslCertificatesClient.get", + "method": { + "fullName": "google.cloud.compute.v1.SslCertificates.Get", + "service": { + "fullName": "google.cloud.compute.v1.SslCertificates", + "shortName": "SslCertificates" + }, + "shortName": "Get" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.GetSslCertificateRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "ssl_certificate", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.compute_v1.types.SslCertificate", + "shortName": "get" + }, + "description": "Sample for Get", + "file": "compute_v1_generated_ssl_certificates_get_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_SslCertificates_Get_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_ssl_certificates_get_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.SslCertificatesClient", + "shortName": "SslCertificatesClient" + }, + "fullName": "google.cloud.compute_v1.SslCertificatesClient.insert", + "method": { + "fullName": "google.cloud.compute.v1.SslCertificates.Insert", + "service": { + "fullName": "google.cloud.compute.v1.SslCertificates", + "shortName": "SslCertificates" + }, + "shortName": "Insert" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.InsertSslCertificateRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "ssl_certificate_resource", + "type": "google.cloud.compute_v1.types.SslCertificate" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.extended_operation.ExtendedOperation", + "shortName": "insert" + }, + "description": "Sample for Insert", + "file": "compute_v1_generated_ssl_certificates_insert_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_SslCertificates_Insert_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_ssl_certificates_insert_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.SslCertificatesClient", + "shortName": "SslCertificatesClient" + }, + "fullName": "google.cloud.compute_v1.SslCertificatesClient.list", + "method": { + "fullName": "google.cloud.compute.v1.SslCertificates.List", + "service": { + "fullName": "google.cloud.compute.v1.SslCertificates", + "shortName": "SslCertificates" + }, + "shortName": "List" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.ListSslCertificatesRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.compute_v1.services.ssl_certificates.pagers.ListPager", + "shortName": "list" + }, + "description": "Sample for List", + "file": "compute_v1_generated_ssl_certificates_list_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_SslCertificates_List_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_ssl_certificates_list_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.SslPoliciesClient", + "shortName": "SslPoliciesClient" + }, + "fullName": "google.cloud.compute_v1.SslPoliciesClient.aggregated_list", + "method": { + "fullName": "google.cloud.compute.v1.SslPolicies.AggregatedList", + "service": { + "fullName": "google.cloud.compute.v1.SslPolicies", + "shortName": "SslPolicies" + }, + "shortName": "AggregatedList" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.AggregatedListSslPoliciesRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.compute_v1.services.ssl_policies.pagers.AggregatedListPager", + "shortName": "aggregated_list" + }, + "description": "Sample for AggregatedList", + "file": "compute_v1_generated_ssl_policies_aggregated_list_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_SslPolicies_AggregatedList_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_ssl_policies_aggregated_list_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.SslPoliciesClient", + "shortName": "SslPoliciesClient" + }, + "fullName": "google.cloud.compute_v1.SslPoliciesClient.delete", + "method": { + "fullName": "google.cloud.compute.v1.SslPolicies.Delete", + "service": { + "fullName": "google.cloud.compute.v1.SslPolicies", + "shortName": "SslPolicies" + }, + "shortName": "Delete" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.DeleteSslPolicyRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "ssl_policy", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.extended_operation.ExtendedOperation", + "shortName": "delete" + }, + "description": "Sample for Delete", + "file": "compute_v1_generated_ssl_policies_delete_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_SslPolicies_Delete_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_ssl_policies_delete_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.SslPoliciesClient", + "shortName": "SslPoliciesClient" + }, + "fullName": "google.cloud.compute_v1.SslPoliciesClient.get", + "method": { + "fullName": "google.cloud.compute.v1.SslPolicies.Get", + "service": { + "fullName": "google.cloud.compute.v1.SslPolicies", + "shortName": "SslPolicies" + }, + "shortName": "Get" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.GetSslPolicyRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "ssl_policy", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.compute_v1.types.SslPolicy", + "shortName": "get" + }, + "description": "Sample for Get", + "file": "compute_v1_generated_ssl_policies_get_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_SslPolicies_Get_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_ssl_policies_get_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.SslPoliciesClient", + "shortName": "SslPoliciesClient" + }, + "fullName": "google.cloud.compute_v1.SslPoliciesClient.insert", + "method": { + "fullName": "google.cloud.compute.v1.SslPolicies.Insert", + "service": { + "fullName": "google.cloud.compute.v1.SslPolicies", + "shortName": "SslPolicies" + }, + "shortName": "Insert" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.InsertSslPolicyRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "ssl_policy_resource", + "type": "google.cloud.compute_v1.types.SslPolicy" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.extended_operation.ExtendedOperation", + "shortName": "insert" + }, + "description": "Sample for Insert", + "file": "compute_v1_generated_ssl_policies_insert_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_SslPolicies_Insert_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_ssl_policies_insert_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.SslPoliciesClient", + "shortName": "SslPoliciesClient" + }, + "fullName": "google.cloud.compute_v1.SslPoliciesClient.list_available_features", + "method": { + "fullName": "google.cloud.compute.v1.SslPolicies.ListAvailableFeatures", + "service": { + "fullName": "google.cloud.compute.v1.SslPolicies", + "shortName": "SslPolicies" + }, + "shortName": "ListAvailableFeatures" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.ListAvailableFeaturesSslPoliciesRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.compute_v1.types.SslPoliciesListAvailableFeaturesResponse", + "shortName": "list_available_features" + }, + "description": "Sample for ListAvailableFeatures", + "file": "compute_v1_generated_ssl_policies_list_available_features_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_SslPolicies_ListAvailableFeatures_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_ssl_policies_list_available_features_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.SslPoliciesClient", + "shortName": "SslPoliciesClient" + }, + "fullName": "google.cloud.compute_v1.SslPoliciesClient.list", + "method": { + "fullName": "google.cloud.compute.v1.SslPolicies.List", + "service": { + "fullName": "google.cloud.compute.v1.SslPolicies", + "shortName": "SslPolicies" + }, + "shortName": "List" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.ListSslPoliciesRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.compute_v1.services.ssl_policies.pagers.ListPager", + "shortName": "list" + }, + "description": "Sample for List", + "file": "compute_v1_generated_ssl_policies_list_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_SslPolicies_List_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_ssl_policies_list_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.SslPoliciesClient", + "shortName": "SslPoliciesClient" + }, + "fullName": "google.cloud.compute_v1.SslPoliciesClient.patch", + "method": { + "fullName": "google.cloud.compute.v1.SslPolicies.Patch", + "service": { + "fullName": "google.cloud.compute.v1.SslPolicies", + "shortName": "SslPolicies" + }, + "shortName": "Patch" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.PatchSslPolicyRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "ssl_policy", + "type": "str" + }, + { + "name": "ssl_policy_resource", + "type": "google.cloud.compute_v1.types.SslPolicy" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.extended_operation.ExtendedOperation", + "shortName": "patch" + }, + "description": "Sample for Patch", + "file": "compute_v1_generated_ssl_policies_patch_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_SslPolicies_Patch_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_ssl_policies_patch_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.SubnetworksClient", + "shortName": "SubnetworksClient" + }, + "fullName": "google.cloud.compute_v1.SubnetworksClient.aggregated_list", + "method": { + "fullName": "google.cloud.compute.v1.Subnetworks.AggregatedList", + "service": { + "fullName": "google.cloud.compute.v1.Subnetworks", + "shortName": "Subnetworks" + }, + "shortName": "AggregatedList" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.AggregatedListSubnetworksRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.compute_v1.services.subnetworks.pagers.AggregatedListPager", + "shortName": "aggregated_list" + }, + "description": "Sample for AggregatedList", + "file": "compute_v1_generated_subnetworks_aggregated_list_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_Subnetworks_AggregatedList_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_subnetworks_aggregated_list_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.SubnetworksClient", + "shortName": "SubnetworksClient" + }, + "fullName": "google.cloud.compute_v1.SubnetworksClient.delete", + "method": { + "fullName": "google.cloud.compute.v1.Subnetworks.Delete", + "service": { + "fullName": "google.cloud.compute.v1.Subnetworks", + "shortName": "Subnetworks" + }, + "shortName": "Delete" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.DeleteSubnetworkRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "region", + "type": "str" + }, + { + "name": "subnetwork", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.extended_operation.ExtendedOperation", + "shortName": "delete" + }, + "description": "Sample for Delete", + "file": "compute_v1_generated_subnetworks_delete_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_Subnetworks_Delete_sync", + "segments": [ + { + "end": 53, + "start": 27, + "type": "FULL" + }, + { + "end": 53, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 47, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 50, + "start": 48, + "type": "REQUEST_EXECUTION" + }, + { + "end": 54, + "start": 51, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_subnetworks_delete_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.SubnetworksClient", + "shortName": "SubnetworksClient" + }, + "fullName": "google.cloud.compute_v1.SubnetworksClient.expand_ip_cidr_range", + "method": { + "fullName": "google.cloud.compute.v1.Subnetworks.ExpandIpCidrRange", + "service": { + "fullName": "google.cloud.compute.v1.Subnetworks", + "shortName": "Subnetworks" + }, + "shortName": "ExpandIpCidrRange" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.ExpandIpCidrRangeSubnetworkRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "region", + "type": "str" + }, + { + "name": "subnetwork", + "type": "str" + }, + { + "name": "subnetworks_expand_ip_cidr_range_request_resource", + "type": "google.cloud.compute_v1.types.SubnetworksExpandIpCidrRangeRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.extended_operation.ExtendedOperation", + "shortName": "expand_ip_cidr_range" + }, + "description": "Sample for ExpandIpCidrRange", + "file": "compute_v1_generated_subnetworks_expand_ip_cidr_range_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_Subnetworks_ExpandIpCidrRange_sync", + "segments": [ + { + "end": 53, + "start": 27, + "type": "FULL" + }, + { + "end": 53, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 47, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 50, + "start": 48, + "type": "REQUEST_EXECUTION" + }, + { + "end": 54, + "start": 51, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_subnetworks_expand_ip_cidr_range_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.SubnetworksClient", + "shortName": "SubnetworksClient" + }, + "fullName": "google.cloud.compute_v1.SubnetworksClient.get_iam_policy", + "method": { + "fullName": "google.cloud.compute.v1.Subnetworks.GetIamPolicy", + "service": { + "fullName": "google.cloud.compute.v1.Subnetworks", + "shortName": "Subnetworks" + }, + "shortName": "GetIamPolicy" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.GetIamPolicySubnetworkRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "region", + "type": "str" + }, + { + "name": "resource", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.compute_v1.types.Policy", + "shortName": "get_iam_policy" + }, + "description": "Sample for GetIamPolicy", + "file": "compute_v1_generated_subnetworks_get_iam_policy_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_Subnetworks_GetIamPolicy_sync", + "segments": [ + { + "end": 53, + "start": 27, + "type": "FULL" + }, + { + "end": 53, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 47, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 50, + "start": 48, + "type": "REQUEST_EXECUTION" + }, + { + "end": 54, + "start": 51, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_subnetworks_get_iam_policy_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.SubnetworksClient", + "shortName": "SubnetworksClient" + }, + "fullName": "google.cloud.compute_v1.SubnetworksClient.get", + "method": { + "fullName": "google.cloud.compute.v1.Subnetworks.Get", + "service": { + "fullName": "google.cloud.compute.v1.Subnetworks", + "shortName": "Subnetworks" + }, + "shortName": "Get" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.GetSubnetworkRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "region", + "type": "str" + }, + { + "name": "subnetwork", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.compute_v1.types.Subnetwork", + "shortName": "get" + }, + "description": "Sample for Get", + "file": "compute_v1_generated_subnetworks_get_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_Subnetworks_Get_sync", + "segments": [ + { + "end": 53, + "start": 27, + "type": "FULL" + }, + { + "end": 53, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 47, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 50, + "start": 48, + "type": "REQUEST_EXECUTION" + }, + { + "end": 54, + "start": 51, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_subnetworks_get_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.SubnetworksClient", + "shortName": "SubnetworksClient" + }, + "fullName": "google.cloud.compute_v1.SubnetworksClient.insert", + "method": { + "fullName": "google.cloud.compute.v1.Subnetworks.Insert", + "service": { + "fullName": "google.cloud.compute.v1.Subnetworks", + "shortName": "Subnetworks" + }, + "shortName": "Insert" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.InsertSubnetworkRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "region", + "type": "str" + }, + { + "name": "subnetwork_resource", + "type": "google.cloud.compute_v1.types.Subnetwork" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.extended_operation.ExtendedOperation", + "shortName": "insert" + }, + "description": "Sample for Insert", + "file": "compute_v1_generated_subnetworks_insert_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_Subnetworks_Insert_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_subnetworks_insert_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.SubnetworksClient", + "shortName": "SubnetworksClient" + }, + "fullName": "google.cloud.compute_v1.SubnetworksClient.list_usable", + "method": { + "fullName": "google.cloud.compute.v1.Subnetworks.ListUsable", + "service": { + "fullName": "google.cloud.compute.v1.Subnetworks", + "shortName": "Subnetworks" + }, + "shortName": "ListUsable" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.ListUsableSubnetworksRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.compute_v1.services.subnetworks.pagers.ListUsablePager", + "shortName": "list_usable" + }, + "description": "Sample for ListUsable", + "file": "compute_v1_generated_subnetworks_list_usable_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_Subnetworks_ListUsable_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_subnetworks_list_usable_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.SubnetworksClient", + "shortName": "SubnetworksClient" + }, + "fullName": "google.cloud.compute_v1.SubnetworksClient.list", + "method": { + "fullName": "google.cloud.compute.v1.Subnetworks.List", + "service": { + "fullName": "google.cloud.compute.v1.Subnetworks", + "shortName": "Subnetworks" + }, + "shortName": "List" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.ListSubnetworksRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "region", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.compute_v1.services.subnetworks.pagers.ListPager", + "shortName": "list" + }, + "description": "Sample for List", + "file": "compute_v1_generated_subnetworks_list_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_Subnetworks_List_sync", + "segments": [ + { + "end": 53, + "start": 27, + "type": "FULL" + }, + { + "end": 53, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 54, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_subnetworks_list_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.SubnetworksClient", + "shortName": "SubnetworksClient" + }, + "fullName": "google.cloud.compute_v1.SubnetworksClient.patch", + "method": { + "fullName": "google.cloud.compute.v1.Subnetworks.Patch", + "service": { + "fullName": "google.cloud.compute.v1.Subnetworks", + "shortName": "Subnetworks" + }, + "shortName": "Patch" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.PatchSubnetworkRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "region", + "type": "str" + }, + { + "name": "subnetwork", + "type": "str" + }, + { + "name": "subnetwork_resource", + "type": "google.cloud.compute_v1.types.Subnetwork" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.extended_operation.ExtendedOperation", + "shortName": "patch" + }, + "description": "Sample for Patch", + "file": "compute_v1_generated_subnetworks_patch_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_Subnetworks_Patch_sync", + "segments": [ + { + "end": 53, + "start": 27, + "type": "FULL" + }, + { + "end": 53, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 47, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 50, + "start": 48, + "type": "REQUEST_EXECUTION" + }, + { + "end": 54, + "start": 51, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_subnetworks_patch_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.SubnetworksClient", + "shortName": "SubnetworksClient" + }, + "fullName": "google.cloud.compute_v1.SubnetworksClient.set_iam_policy", + "method": { + "fullName": "google.cloud.compute.v1.Subnetworks.SetIamPolicy", + "service": { + "fullName": "google.cloud.compute.v1.Subnetworks", + "shortName": "Subnetworks" + }, + "shortName": "SetIamPolicy" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.SetIamPolicySubnetworkRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "region", + "type": "str" + }, + { + "name": "resource", + "type": "str" + }, + { + "name": "region_set_policy_request_resource", + "type": "google.cloud.compute_v1.types.RegionSetPolicyRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.compute_v1.types.Policy", + "shortName": "set_iam_policy" + }, + "description": "Sample for SetIamPolicy", + "file": "compute_v1_generated_subnetworks_set_iam_policy_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_Subnetworks_SetIamPolicy_sync", + "segments": [ + { + "end": 53, + "start": 27, + "type": "FULL" + }, + { + "end": 53, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 47, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 50, + "start": 48, + "type": "REQUEST_EXECUTION" + }, + { + "end": 54, + "start": 51, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_subnetworks_set_iam_policy_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.SubnetworksClient", + "shortName": "SubnetworksClient" + }, + "fullName": "google.cloud.compute_v1.SubnetworksClient.set_private_ip_google_access", + "method": { + "fullName": "google.cloud.compute.v1.Subnetworks.SetPrivateIpGoogleAccess", + "service": { + "fullName": "google.cloud.compute.v1.Subnetworks", + "shortName": "Subnetworks" + }, + "shortName": "SetPrivateIpGoogleAccess" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.SetPrivateIpGoogleAccessSubnetworkRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "region", + "type": "str" + }, + { + "name": "subnetwork", + "type": "str" + }, + { + "name": "subnetworks_set_private_ip_google_access_request_resource", + "type": "google.cloud.compute_v1.types.SubnetworksSetPrivateIpGoogleAccessRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.extended_operation.ExtendedOperation", + "shortName": "set_private_ip_google_access" + }, + "description": "Sample for SetPrivateIpGoogleAccess", + "file": "compute_v1_generated_subnetworks_set_private_ip_google_access_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_Subnetworks_SetPrivateIpGoogleAccess_sync", + "segments": [ + { + "end": 53, + "start": 27, + "type": "FULL" + }, + { + "end": 53, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 47, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 50, + "start": 48, + "type": "REQUEST_EXECUTION" + }, + { + "end": 54, + "start": 51, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_subnetworks_set_private_ip_google_access_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.SubnetworksClient", + "shortName": "SubnetworksClient" + }, + "fullName": "google.cloud.compute_v1.SubnetworksClient.test_iam_permissions", + "method": { + "fullName": "google.cloud.compute.v1.Subnetworks.TestIamPermissions", + "service": { + "fullName": "google.cloud.compute.v1.Subnetworks", + "shortName": "Subnetworks" + }, + "shortName": "TestIamPermissions" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.TestIamPermissionsSubnetworkRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "region", + "type": "str" + }, + { + "name": "resource", + "type": "str" + }, + { + "name": "test_permissions_request_resource", + "type": "google.cloud.compute_v1.types.TestPermissionsRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.compute_v1.types.TestPermissionsResponse", + "shortName": "test_iam_permissions" + }, + "description": "Sample for TestIamPermissions", + "file": "compute_v1_generated_subnetworks_test_iam_permissions_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_Subnetworks_TestIamPermissions_sync", + "segments": [ + { + "end": 53, + "start": 27, + "type": "FULL" + }, + { + "end": 53, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 47, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 50, + "start": 48, + "type": "REQUEST_EXECUTION" + }, + { + "end": 54, + "start": 51, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_subnetworks_test_iam_permissions_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.TargetGrpcProxiesClient", + "shortName": "TargetGrpcProxiesClient" + }, + "fullName": "google.cloud.compute_v1.TargetGrpcProxiesClient.delete", + "method": { + "fullName": "google.cloud.compute.v1.TargetGrpcProxies.Delete", + "service": { + "fullName": "google.cloud.compute.v1.TargetGrpcProxies", + "shortName": "TargetGrpcProxies" + }, + "shortName": "Delete" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.DeleteTargetGrpcProxyRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "target_grpc_proxy", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.extended_operation.ExtendedOperation", + "shortName": "delete" + }, + "description": "Sample for Delete", + "file": "compute_v1_generated_target_grpc_proxies_delete_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_TargetGrpcProxies_Delete_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_target_grpc_proxies_delete_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.TargetGrpcProxiesClient", + "shortName": "TargetGrpcProxiesClient" + }, + "fullName": "google.cloud.compute_v1.TargetGrpcProxiesClient.get", + "method": { + "fullName": "google.cloud.compute.v1.TargetGrpcProxies.Get", + "service": { + "fullName": "google.cloud.compute.v1.TargetGrpcProxies", + "shortName": "TargetGrpcProxies" + }, + "shortName": "Get" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.GetTargetGrpcProxyRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "target_grpc_proxy", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.compute_v1.types.TargetGrpcProxy", + "shortName": "get" + }, + "description": "Sample for Get", + "file": "compute_v1_generated_target_grpc_proxies_get_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_TargetGrpcProxies_Get_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_target_grpc_proxies_get_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.TargetGrpcProxiesClient", + "shortName": "TargetGrpcProxiesClient" + }, + "fullName": "google.cloud.compute_v1.TargetGrpcProxiesClient.insert", + "method": { + "fullName": "google.cloud.compute.v1.TargetGrpcProxies.Insert", + "service": { + "fullName": "google.cloud.compute.v1.TargetGrpcProxies", + "shortName": "TargetGrpcProxies" + }, + "shortName": "Insert" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.InsertTargetGrpcProxyRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "target_grpc_proxy_resource", + "type": "google.cloud.compute_v1.types.TargetGrpcProxy" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.extended_operation.ExtendedOperation", + "shortName": "insert" + }, + "description": "Sample for Insert", + "file": "compute_v1_generated_target_grpc_proxies_insert_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_TargetGrpcProxies_Insert_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_target_grpc_proxies_insert_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.TargetGrpcProxiesClient", + "shortName": "TargetGrpcProxiesClient" + }, + "fullName": "google.cloud.compute_v1.TargetGrpcProxiesClient.list", + "method": { + "fullName": "google.cloud.compute.v1.TargetGrpcProxies.List", + "service": { + "fullName": "google.cloud.compute.v1.TargetGrpcProxies", + "shortName": "TargetGrpcProxies" + }, + "shortName": "List" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.ListTargetGrpcProxiesRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.compute_v1.services.target_grpc_proxies.pagers.ListPager", + "shortName": "list" + }, + "description": "Sample for List", + "file": "compute_v1_generated_target_grpc_proxies_list_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_TargetGrpcProxies_List_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_target_grpc_proxies_list_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.TargetGrpcProxiesClient", + "shortName": "TargetGrpcProxiesClient" + }, + "fullName": "google.cloud.compute_v1.TargetGrpcProxiesClient.patch", + "method": { + "fullName": "google.cloud.compute.v1.TargetGrpcProxies.Patch", + "service": { + "fullName": "google.cloud.compute.v1.TargetGrpcProxies", + "shortName": "TargetGrpcProxies" + }, + "shortName": "Patch" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.PatchTargetGrpcProxyRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "target_grpc_proxy", + "type": "str" + }, + { + "name": "target_grpc_proxy_resource", + "type": "google.cloud.compute_v1.types.TargetGrpcProxy" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.extended_operation.ExtendedOperation", + "shortName": "patch" + }, + "description": "Sample for Patch", + "file": "compute_v1_generated_target_grpc_proxies_patch_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_TargetGrpcProxies_Patch_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_target_grpc_proxies_patch_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.TargetHttpProxiesClient", + "shortName": "TargetHttpProxiesClient" + }, + "fullName": "google.cloud.compute_v1.TargetHttpProxiesClient.aggregated_list", + "method": { + "fullName": "google.cloud.compute.v1.TargetHttpProxies.AggregatedList", + "service": { + "fullName": "google.cloud.compute.v1.TargetHttpProxies", + "shortName": "TargetHttpProxies" + }, + "shortName": "AggregatedList" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.AggregatedListTargetHttpProxiesRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.compute_v1.services.target_http_proxies.pagers.AggregatedListPager", + "shortName": "aggregated_list" + }, + "description": "Sample for AggregatedList", + "file": "compute_v1_generated_target_http_proxies_aggregated_list_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_TargetHttpProxies_AggregatedList_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_target_http_proxies_aggregated_list_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.TargetHttpProxiesClient", + "shortName": "TargetHttpProxiesClient" + }, + "fullName": "google.cloud.compute_v1.TargetHttpProxiesClient.delete", + "method": { + "fullName": "google.cloud.compute.v1.TargetHttpProxies.Delete", + "service": { + "fullName": "google.cloud.compute.v1.TargetHttpProxies", + "shortName": "TargetHttpProxies" + }, + "shortName": "Delete" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.DeleteTargetHttpProxyRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "target_http_proxy", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.extended_operation.ExtendedOperation", + "shortName": "delete" + }, + "description": "Sample for Delete", + "file": "compute_v1_generated_target_http_proxies_delete_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_TargetHttpProxies_Delete_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_target_http_proxies_delete_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.TargetHttpProxiesClient", + "shortName": "TargetHttpProxiesClient" + }, + "fullName": "google.cloud.compute_v1.TargetHttpProxiesClient.get", + "method": { + "fullName": "google.cloud.compute.v1.TargetHttpProxies.Get", + "service": { + "fullName": "google.cloud.compute.v1.TargetHttpProxies", + "shortName": "TargetHttpProxies" + }, + "shortName": "Get" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.GetTargetHttpProxyRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "target_http_proxy", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.compute_v1.types.TargetHttpProxy", + "shortName": "get" + }, + "description": "Sample for Get", + "file": "compute_v1_generated_target_http_proxies_get_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_TargetHttpProxies_Get_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_target_http_proxies_get_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.TargetHttpProxiesClient", + "shortName": "TargetHttpProxiesClient" + }, + "fullName": "google.cloud.compute_v1.TargetHttpProxiesClient.insert", + "method": { + "fullName": "google.cloud.compute.v1.TargetHttpProxies.Insert", + "service": { + "fullName": "google.cloud.compute.v1.TargetHttpProxies", + "shortName": "TargetHttpProxies" + }, + "shortName": "Insert" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.InsertTargetHttpProxyRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "target_http_proxy_resource", + "type": "google.cloud.compute_v1.types.TargetHttpProxy" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.extended_operation.ExtendedOperation", + "shortName": "insert" + }, + "description": "Sample for Insert", + "file": "compute_v1_generated_target_http_proxies_insert_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_TargetHttpProxies_Insert_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_target_http_proxies_insert_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.TargetHttpProxiesClient", + "shortName": "TargetHttpProxiesClient" + }, + "fullName": "google.cloud.compute_v1.TargetHttpProxiesClient.list", + "method": { + "fullName": "google.cloud.compute.v1.TargetHttpProxies.List", + "service": { + "fullName": "google.cloud.compute.v1.TargetHttpProxies", + "shortName": "TargetHttpProxies" + }, + "shortName": "List" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.ListTargetHttpProxiesRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.compute_v1.services.target_http_proxies.pagers.ListPager", + "shortName": "list" + }, + "description": "Sample for List", + "file": "compute_v1_generated_target_http_proxies_list_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_TargetHttpProxies_List_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_target_http_proxies_list_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.TargetHttpProxiesClient", + "shortName": "TargetHttpProxiesClient" + }, + "fullName": "google.cloud.compute_v1.TargetHttpProxiesClient.patch", + "method": { + "fullName": "google.cloud.compute.v1.TargetHttpProxies.Patch", + "service": { + "fullName": "google.cloud.compute.v1.TargetHttpProxies", + "shortName": "TargetHttpProxies" + }, + "shortName": "Patch" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.PatchTargetHttpProxyRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "target_http_proxy", + "type": "str" + }, + { + "name": "target_http_proxy_resource", + "type": "google.cloud.compute_v1.types.TargetHttpProxy" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.extended_operation.ExtendedOperation", + "shortName": "patch" + }, + "description": "Sample for Patch", + "file": "compute_v1_generated_target_http_proxies_patch_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_TargetHttpProxies_Patch_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_target_http_proxies_patch_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.TargetHttpProxiesClient", + "shortName": "TargetHttpProxiesClient" + }, + "fullName": "google.cloud.compute_v1.TargetHttpProxiesClient.set_url_map", + "method": { + "fullName": "google.cloud.compute.v1.TargetHttpProxies.SetUrlMap", + "service": { + "fullName": "google.cloud.compute.v1.TargetHttpProxies", + "shortName": "TargetHttpProxies" + }, + "shortName": "SetUrlMap" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.SetUrlMapTargetHttpProxyRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "target_http_proxy", + "type": "str" + }, + { + "name": "url_map_reference_resource", + "type": "google.cloud.compute_v1.types.UrlMapReference" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.extended_operation.ExtendedOperation", + "shortName": "set_url_map" + }, + "description": "Sample for SetUrlMap", + "file": "compute_v1_generated_target_http_proxies_set_url_map_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_TargetHttpProxies_SetUrlMap_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_target_http_proxies_set_url_map_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.TargetHttpsProxiesClient", + "shortName": "TargetHttpsProxiesClient" + }, + "fullName": "google.cloud.compute_v1.TargetHttpsProxiesClient.aggregated_list", + "method": { + "fullName": "google.cloud.compute.v1.TargetHttpsProxies.AggregatedList", + "service": { + "fullName": "google.cloud.compute.v1.TargetHttpsProxies", + "shortName": "TargetHttpsProxies" + }, + "shortName": "AggregatedList" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.AggregatedListTargetHttpsProxiesRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.compute_v1.services.target_https_proxies.pagers.AggregatedListPager", + "shortName": "aggregated_list" + }, + "description": "Sample for AggregatedList", + "file": "compute_v1_generated_target_https_proxies_aggregated_list_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_TargetHttpsProxies_AggregatedList_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_target_https_proxies_aggregated_list_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.TargetHttpsProxiesClient", + "shortName": "TargetHttpsProxiesClient" + }, + "fullName": "google.cloud.compute_v1.TargetHttpsProxiesClient.delete", + "method": { + "fullName": "google.cloud.compute.v1.TargetHttpsProxies.Delete", + "service": { + "fullName": "google.cloud.compute.v1.TargetHttpsProxies", + "shortName": "TargetHttpsProxies" + }, + "shortName": "Delete" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.DeleteTargetHttpsProxyRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "target_https_proxy", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.extended_operation.ExtendedOperation", + "shortName": "delete" + }, + "description": "Sample for Delete", + "file": "compute_v1_generated_target_https_proxies_delete_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_TargetHttpsProxies_Delete_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_target_https_proxies_delete_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.TargetHttpsProxiesClient", + "shortName": "TargetHttpsProxiesClient" + }, + "fullName": "google.cloud.compute_v1.TargetHttpsProxiesClient.get", + "method": { + "fullName": "google.cloud.compute.v1.TargetHttpsProxies.Get", + "service": { + "fullName": "google.cloud.compute.v1.TargetHttpsProxies", + "shortName": "TargetHttpsProxies" + }, + "shortName": "Get" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.GetTargetHttpsProxyRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "target_https_proxy", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.compute_v1.types.TargetHttpsProxy", + "shortName": "get" + }, + "description": "Sample for Get", + "file": "compute_v1_generated_target_https_proxies_get_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_TargetHttpsProxies_Get_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_target_https_proxies_get_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.TargetHttpsProxiesClient", + "shortName": "TargetHttpsProxiesClient" + }, + "fullName": "google.cloud.compute_v1.TargetHttpsProxiesClient.insert", + "method": { + "fullName": "google.cloud.compute.v1.TargetHttpsProxies.Insert", + "service": { + "fullName": "google.cloud.compute.v1.TargetHttpsProxies", + "shortName": "TargetHttpsProxies" + }, + "shortName": "Insert" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.InsertTargetHttpsProxyRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "target_https_proxy_resource", + "type": "google.cloud.compute_v1.types.TargetHttpsProxy" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.extended_operation.ExtendedOperation", + "shortName": "insert" + }, + "description": "Sample for Insert", + "file": "compute_v1_generated_target_https_proxies_insert_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_TargetHttpsProxies_Insert_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_target_https_proxies_insert_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.TargetHttpsProxiesClient", + "shortName": "TargetHttpsProxiesClient" + }, + "fullName": "google.cloud.compute_v1.TargetHttpsProxiesClient.list", + "method": { + "fullName": "google.cloud.compute.v1.TargetHttpsProxies.List", + "service": { + "fullName": "google.cloud.compute.v1.TargetHttpsProxies", + "shortName": "TargetHttpsProxies" + }, + "shortName": "List" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.ListTargetHttpsProxiesRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.compute_v1.services.target_https_proxies.pagers.ListPager", + "shortName": "list" + }, + "description": "Sample for List", + "file": "compute_v1_generated_target_https_proxies_list_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_TargetHttpsProxies_List_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_target_https_proxies_list_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.TargetHttpsProxiesClient", + "shortName": "TargetHttpsProxiesClient" + }, + "fullName": "google.cloud.compute_v1.TargetHttpsProxiesClient.patch", + "method": { + "fullName": "google.cloud.compute.v1.TargetHttpsProxies.Patch", + "service": { + "fullName": "google.cloud.compute.v1.TargetHttpsProxies", + "shortName": "TargetHttpsProxies" + }, + "shortName": "Patch" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.PatchTargetHttpsProxyRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "target_https_proxy", + "type": "str" + }, + { + "name": "target_https_proxy_resource", + "type": "google.cloud.compute_v1.types.TargetHttpsProxy" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.extended_operation.ExtendedOperation", + "shortName": "patch" + }, + "description": "Sample for Patch", + "file": "compute_v1_generated_target_https_proxies_patch_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_TargetHttpsProxies_Patch_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_target_https_proxies_patch_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.TargetHttpsProxiesClient", + "shortName": "TargetHttpsProxiesClient" + }, + "fullName": "google.cloud.compute_v1.TargetHttpsProxiesClient.set_certificate_map", + "method": { + "fullName": "google.cloud.compute.v1.TargetHttpsProxies.SetCertificateMap", + "service": { + "fullName": "google.cloud.compute.v1.TargetHttpsProxies", + "shortName": "TargetHttpsProxies" + }, + "shortName": "SetCertificateMap" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.SetCertificateMapTargetHttpsProxyRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "target_https_proxy", + "type": "str" + }, + { + "name": "target_https_proxies_set_certificate_map_request_resource", + "type": "google.cloud.compute_v1.types.TargetHttpsProxiesSetCertificateMapRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.extended_operation.ExtendedOperation", + "shortName": "set_certificate_map" + }, + "description": "Sample for SetCertificateMap", + "file": "compute_v1_generated_target_https_proxies_set_certificate_map_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_TargetHttpsProxies_SetCertificateMap_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_target_https_proxies_set_certificate_map_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.TargetHttpsProxiesClient", + "shortName": "TargetHttpsProxiesClient" + }, + "fullName": "google.cloud.compute_v1.TargetHttpsProxiesClient.set_quic_override", + "method": { + "fullName": "google.cloud.compute.v1.TargetHttpsProxies.SetQuicOverride", + "service": { + "fullName": "google.cloud.compute.v1.TargetHttpsProxies", + "shortName": "TargetHttpsProxies" + }, + "shortName": "SetQuicOverride" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.SetQuicOverrideTargetHttpsProxyRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "target_https_proxy", + "type": "str" + }, + { + "name": "target_https_proxies_set_quic_override_request_resource", + "type": "google.cloud.compute_v1.types.TargetHttpsProxiesSetQuicOverrideRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.extended_operation.ExtendedOperation", + "shortName": "set_quic_override" + }, + "description": "Sample for SetQuicOverride", + "file": "compute_v1_generated_target_https_proxies_set_quic_override_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_TargetHttpsProxies_SetQuicOverride_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_target_https_proxies_set_quic_override_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.TargetHttpsProxiesClient", + "shortName": "TargetHttpsProxiesClient" + }, + "fullName": "google.cloud.compute_v1.TargetHttpsProxiesClient.set_ssl_certificates", + "method": { + "fullName": "google.cloud.compute.v1.TargetHttpsProxies.SetSslCertificates", + "service": { + "fullName": "google.cloud.compute.v1.TargetHttpsProxies", + "shortName": "TargetHttpsProxies" + }, + "shortName": "SetSslCertificates" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.SetSslCertificatesTargetHttpsProxyRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "target_https_proxy", + "type": "str" + }, + { + "name": "target_https_proxies_set_ssl_certificates_request_resource", + "type": "google.cloud.compute_v1.types.TargetHttpsProxiesSetSslCertificatesRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.extended_operation.ExtendedOperation", + "shortName": "set_ssl_certificates" + }, + "description": "Sample for SetSslCertificates", + "file": "compute_v1_generated_target_https_proxies_set_ssl_certificates_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_TargetHttpsProxies_SetSslCertificates_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_target_https_proxies_set_ssl_certificates_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.TargetHttpsProxiesClient", + "shortName": "TargetHttpsProxiesClient" + }, + "fullName": "google.cloud.compute_v1.TargetHttpsProxiesClient.set_ssl_policy", + "method": { + "fullName": "google.cloud.compute.v1.TargetHttpsProxies.SetSslPolicy", + "service": { + "fullName": "google.cloud.compute.v1.TargetHttpsProxies", + "shortName": "TargetHttpsProxies" + }, + "shortName": "SetSslPolicy" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.SetSslPolicyTargetHttpsProxyRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "target_https_proxy", + "type": "str" + }, + { + "name": "ssl_policy_reference_resource", + "type": "google.cloud.compute_v1.types.SslPolicyReference" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.extended_operation.ExtendedOperation", + "shortName": "set_ssl_policy" + }, + "description": "Sample for SetSslPolicy", + "file": "compute_v1_generated_target_https_proxies_set_ssl_policy_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_TargetHttpsProxies_SetSslPolicy_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_target_https_proxies_set_ssl_policy_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.TargetHttpsProxiesClient", + "shortName": "TargetHttpsProxiesClient" + }, + "fullName": "google.cloud.compute_v1.TargetHttpsProxiesClient.set_url_map", + "method": { + "fullName": "google.cloud.compute.v1.TargetHttpsProxies.SetUrlMap", + "service": { + "fullName": "google.cloud.compute.v1.TargetHttpsProxies", + "shortName": "TargetHttpsProxies" + }, + "shortName": "SetUrlMap" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.SetUrlMapTargetHttpsProxyRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "target_https_proxy", + "type": "str" + }, + { + "name": "url_map_reference_resource", + "type": "google.cloud.compute_v1.types.UrlMapReference" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.extended_operation.ExtendedOperation", + "shortName": "set_url_map" + }, + "description": "Sample for SetUrlMap", + "file": "compute_v1_generated_target_https_proxies_set_url_map_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_TargetHttpsProxies_SetUrlMap_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_target_https_proxies_set_url_map_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.TargetInstancesClient", + "shortName": "TargetInstancesClient" + }, + "fullName": "google.cloud.compute_v1.TargetInstancesClient.aggregated_list", + "method": { + "fullName": "google.cloud.compute.v1.TargetInstances.AggregatedList", + "service": { + "fullName": "google.cloud.compute.v1.TargetInstances", + "shortName": "TargetInstances" + }, + "shortName": "AggregatedList" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.AggregatedListTargetInstancesRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.compute_v1.services.target_instances.pagers.AggregatedListPager", + "shortName": "aggregated_list" + }, + "description": "Sample for AggregatedList", + "file": "compute_v1_generated_target_instances_aggregated_list_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_TargetInstances_AggregatedList_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_target_instances_aggregated_list_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.TargetInstancesClient", + "shortName": "TargetInstancesClient" + }, + "fullName": "google.cloud.compute_v1.TargetInstancesClient.delete", + "method": { + "fullName": "google.cloud.compute.v1.TargetInstances.Delete", + "service": { + "fullName": "google.cloud.compute.v1.TargetInstances", + "shortName": "TargetInstances" + }, + "shortName": "Delete" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.DeleteTargetInstanceRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "zone", + "type": "str" + }, + { + "name": "target_instance", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.extended_operation.ExtendedOperation", + "shortName": "delete" + }, + "description": "Sample for Delete", + "file": "compute_v1_generated_target_instances_delete_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_TargetInstances_Delete_sync", + "segments": [ + { + "end": 53, + "start": 27, + "type": "FULL" + }, + { + "end": 53, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 47, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 50, + "start": 48, + "type": "REQUEST_EXECUTION" + }, + { + "end": 54, + "start": 51, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_target_instances_delete_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.TargetInstancesClient", + "shortName": "TargetInstancesClient" + }, + "fullName": "google.cloud.compute_v1.TargetInstancesClient.get", + "method": { + "fullName": "google.cloud.compute.v1.TargetInstances.Get", + "service": { + "fullName": "google.cloud.compute.v1.TargetInstances", + "shortName": "TargetInstances" + }, + "shortName": "Get" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.GetTargetInstanceRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "zone", + "type": "str" + }, + { + "name": "target_instance", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.compute_v1.types.TargetInstance", + "shortName": "get" + }, + "description": "Sample for Get", + "file": "compute_v1_generated_target_instances_get_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_TargetInstances_Get_sync", + "segments": [ + { + "end": 53, + "start": 27, + "type": "FULL" + }, + { + "end": 53, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 47, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 50, + "start": 48, + "type": "REQUEST_EXECUTION" + }, + { + "end": 54, + "start": 51, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_target_instances_get_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.TargetInstancesClient", + "shortName": "TargetInstancesClient" + }, + "fullName": "google.cloud.compute_v1.TargetInstancesClient.insert", + "method": { + "fullName": "google.cloud.compute.v1.TargetInstances.Insert", + "service": { + "fullName": "google.cloud.compute.v1.TargetInstances", + "shortName": "TargetInstances" + }, + "shortName": "Insert" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.InsertTargetInstanceRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "zone", + "type": "str" + }, + { + "name": "target_instance_resource", + "type": "google.cloud.compute_v1.types.TargetInstance" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.extended_operation.ExtendedOperation", + "shortName": "insert" + }, + "description": "Sample for Insert", + "file": "compute_v1_generated_target_instances_insert_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_TargetInstances_Insert_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_target_instances_insert_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.TargetInstancesClient", + "shortName": "TargetInstancesClient" + }, + "fullName": "google.cloud.compute_v1.TargetInstancesClient.list", + "method": { + "fullName": "google.cloud.compute.v1.TargetInstances.List", + "service": { + "fullName": "google.cloud.compute.v1.TargetInstances", + "shortName": "TargetInstances" + }, + "shortName": "List" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.ListTargetInstancesRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "zone", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.compute_v1.services.target_instances.pagers.ListPager", + "shortName": "list" + }, + "description": "Sample for List", + "file": "compute_v1_generated_target_instances_list_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_TargetInstances_List_sync", + "segments": [ + { + "end": 53, + "start": 27, + "type": "FULL" + }, + { + "end": 53, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 54, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_target_instances_list_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.TargetPoolsClient", + "shortName": "TargetPoolsClient" + }, + "fullName": "google.cloud.compute_v1.TargetPoolsClient.add_health_check", + "method": { + "fullName": "google.cloud.compute.v1.TargetPools.AddHealthCheck", + "service": { + "fullName": "google.cloud.compute.v1.TargetPools", + "shortName": "TargetPools" + }, + "shortName": "AddHealthCheck" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.AddHealthCheckTargetPoolRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "region", + "type": "str" + }, + { + "name": "target_pool", + "type": "str" + }, + { + "name": "target_pools_add_health_check_request_resource", + "type": "google.cloud.compute_v1.types.TargetPoolsAddHealthCheckRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.extended_operation.ExtendedOperation", + "shortName": "add_health_check" + }, + "description": "Sample for AddHealthCheck", + "file": "compute_v1_generated_target_pools_add_health_check_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_TargetPools_AddHealthCheck_sync", + "segments": [ + { + "end": 53, + "start": 27, + "type": "FULL" + }, + { + "end": 53, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 47, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 50, + "start": 48, + "type": "REQUEST_EXECUTION" + }, + { + "end": 54, + "start": 51, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_target_pools_add_health_check_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.TargetPoolsClient", + "shortName": "TargetPoolsClient" + }, + "fullName": "google.cloud.compute_v1.TargetPoolsClient.add_instance", + "method": { + "fullName": "google.cloud.compute.v1.TargetPools.AddInstance", + "service": { + "fullName": "google.cloud.compute.v1.TargetPools", + "shortName": "TargetPools" + }, + "shortName": "AddInstance" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.AddInstanceTargetPoolRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "region", + "type": "str" + }, + { + "name": "target_pool", + "type": "str" + }, + { + "name": "target_pools_add_instance_request_resource", + "type": "google.cloud.compute_v1.types.TargetPoolsAddInstanceRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.extended_operation.ExtendedOperation", + "shortName": "add_instance" + }, + "description": "Sample for AddInstance", + "file": "compute_v1_generated_target_pools_add_instance_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_TargetPools_AddInstance_sync", + "segments": [ + { + "end": 53, + "start": 27, + "type": "FULL" + }, + { + "end": 53, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 47, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 50, + "start": 48, + "type": "REQUEST_EXECUTION" + }, + { + "end": 54, + "start": 51, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_target_pools_add_instance_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.TargetPoolsClient", + "shortName": "TargetPoolsClient" + }, + "fullName": "google.cloud.compute_v1.TargetPoolsClient.aggregated_list", + "method": { + "fullName": "google.cloud.compute.v1.TargetPools.AggregatedList", + "service": { + "fullName": "google.cloud.compute.v1.TargetPools", + "shortName": "TargetPools" + }, + "shortName": "AggregatedList" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.AggregatedListTargetPoolsRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.compute_v1.services.target_pools.pagers.AggregatedListPager", + "shortName": "aggregated_list" + }, + "description": "Sample for AggregatedList", + "file": "compute_v1_generated_target_pools_aggregated_list_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_TargetPools_AggregatedList_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_target_pools_aggregated_list_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.TargetPoolsClient", + "shortName": "TargetPoolsClient" + }, + "fullName": "google.cloud.compute_v1.TargetPoolsClient.delete", + "method": { + "fullName": "google.cloud.compute.v1.TargetPools.Delete", + "service": { + "fullName": "google.cloud.compute.v1.TargetPools", + "shortName": "TargetPools" + }, + "shortName": "Delete" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.DeleteTargetPoolRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "region", + "type": "str" + }, + { + "name": "target_pool", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.extended_operation.ExtendedOperation", + "shortName": "delete" + }, + "description": "Sample for Delete", + "file": "compute_v1_generated_target_pools_delete_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_TargetPools_Delete_sync", + "segments": [ + { + "end": 53, + "start": 27, + "type": "FULL" + }, + { + "end": 53, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 47, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 50, + "start": 48, + "type": "REQUEST_EXECUTION" + }, + { + "end": 54, + "start": 51, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_target_pools_delete_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.TargetPoolsClient", + "shortName": "TargetPoolsClient" + }, + "fullName": "google.cloud.compute_v1.TargetPoolsClient.get_health", + "method": { + "fullName": "google.cloud.compute.v1.TargetPools.GetHealth", + "service": { + "fullName": "google.cloud.compute.v1.TargetPools", + "shortName": "TargetPools" + }, + "shortName": "GetHealth" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.GetHealthTargetPoolRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "region", + "type": "str" + }, + { + "name": "target_pool", + "type": "str" + }, + { + "name": "instance_reference_resource", + "type": "google.cloud.compute_v1.types.InstanceReference" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.compute_v1.types.TargetPoolInstanceHealth", + "shortName": "get_health" + }, + "description": "Sample for GetHealth", + "file": "compute_v1_generated_target_pools_get_health_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_TargetPools_GetHealth_sync", + "segments": [ + { + "end": 53, + "start": 27, + "type": "FULL" + }, + { + "end": 53, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 47, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 50, + "start": 48, + "type": "REQUEST_EXECUTION" + }, + { + "end": 54, + "start": 51, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_target_pools_get_health_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.TargetPoolsClient", + "shortName": "TargetPoolsClient" + }, + "fullName": "google.cloud.compute_v1.TargetPoolsClient.get", + "method": { + "fullName": "google.cloud.compute.v1.TargetPools.Get", + "service": { + "fullName": "google.cloud.compute.v1.TargetPools", + "shortName": "TargetPools" + }, + "shortName": "Get" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.GetTargetPoolRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "region", + "type": "str" + }, + { + "name": "target_pool", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.compute_v1.types.TargetPool", + "shortName": "get" + }, + "description": "Sample for Get", + "file": "compute_v1_generated_target_pools_get_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_TargetPools_Get_sync", + "segments": [ + { + "end": 53, + "start": 27, + "type": "FULL" + }, + { + "end": 53, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 47, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 50, + "start": 48, + "type": "REQUEST_EXECUTION" + }, + { + "end": 54, + "start": 51, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_target_pools_get_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.TargetPoolsClient", + "shortName": "TargetPoolsClient" + }, + "fullName": "google.cloud.compute_v1.TargetPoolsClient.insert", + "method": { + "fullName": "google.cloud.compute.v1.TargetPools.Insert", + "service": { + "fullName": "google.cloud.compute.v1.TargetPools", + "shortName": "TargetPools" + }, + "shortName": "Insert" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.InsertTargetPoolRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "region", + "type": "str" + }, + { + "name": "target_pool_resource", + "type": "google.cloud.compute_v1.types.TargetPool" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.extended_operation.ExtendedOperation", + "shortName": "insert" + }, + "description": "Sample for Insert", + "file": "compute_v1_generated_target_pools_insert_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_TargetPools_Insert_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_target_pools_insert_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.TargetPoolsClient", + "shortName": "TargetPoolsClient" + }, + "fullName": "google.cloud.compute_v1.TargetPoolsClient.list", + "method": { + "fullName": "google.cloud.compute.v1.TargetPools.List", + "service": { + "fullName": "google.cloud.compute.v1.TargetPools", + "shortName": "TargetPools" + }, + "shortName": "List" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.ListTargetPoolsRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "region", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.compute_v1.services.target_pools.pagers.ListPager", + "shortName": "list" + }, + "description": "Sample for List", + "file": "compute_v1_generated_target_pools_list_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_TargetPools_List_sync", + "segments": [ + { + "end": 53, + "start": 27, + "type": "FULL" + }, + { + "end": 53, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 54, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_target_pools_list_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.TargetPoolsClient", + "shortName": "TargetPoolsClient" + }, + "fullName": "google.cloud.compute_v1.TargetPoolsClient.remove_health_check", + "method": { + "fullName": "google.cloud.compute.v1.TargetPools.RemoveHealthCheck", + "service": { + "fullName": "google.cloud.compute.v1.TargetPools", + "shortName": "TargetPools" + }, + "shortName": "RemoveHealthCheck" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.RemoveHealthCheckTargetPoolRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "region", + "type": "str" + }, + { + "name": "target_pool", + "type": "str" + }, + { + "name": "target_pools_remove_health_check_request_resource", + "type": "google.cloud.compute_v1.types.TargetPoolsRemoveHealthCheckRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.extended_operation.ExtendedOperation", + "shortName": "remove_health_check" + }, + "description": "Sample for RemoveHealthCheck", + "file": "compute_v1_generated_target_pools_remove_health_check_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_TargetPools_RemoveHealthCheck_sync", + "segments": [ + { + "end": 53, + "start": 27, + "type": "FULL" + }, + { + "end": 53, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 47, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 50, + "start": 48, + "type": "REQUEST_EXECUTION" + }, + { + "end": 54, + "start": 51, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_target_pools_remove_health_check_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.TargetPoolsClient", + "shortName": "TargetPoolsClient" + }, + "fullName": "google.cloud.compute_v1.TargetPoolsClient.remove_instance", + "method": { + "fullName": "google.cloud.compute.v1.TargetPools.RemoveInstance", + "service": { + "fullName": "google.cloud.compute.v1.TargetPools", + "shortName": "TargetPools" + }, + "shortName": "RemoveInstance" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.RemoveInstanceTargetPoolRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "region", + "type": "str" + }, + { + "name": "target_pool", + "type": "str" + }, + { + "name": "target_pools_remove_instance_request_resource", + "type": "google.cloud.compute_v1.types.TargetPoolsRemoveInstanceRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.extended_operation.ExtendedOperation", + "shortName": "remove_instance" + }, + "description": "Sample for RemoveInstance", + "file": "compute_v1_generated_target_pools_remove_instance_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_TargetPools_RemoveInstance_sync", + "segments": [ + { + "end": 53, + "start": 27, + "type": "FULL" + }, + { + "end": 53, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 47, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 50, + "start": 48, + "type": "REQUEST_EXECUTION" + }, + { + "end": 54, + "start": 51, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_target_pools_remove_instance_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.TargetPoolsClient", + "shortName": "TargetPoolsClient" + }, + "fullName": "google.cloud.compute_v1.TargetPoolsClient.set_backup", + "method": { + "fullName": "google.cloud.compute.v1.TargetPools.SetBackup", + "service": { + "fullName": "google.cloud.compute.v1.TargetPools", + "shortName": "TargetPools" + }, + "shortName": "SetBackup" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.SetBackupTargetPoolRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "region", + "type": "str" + }, + { + "name": "target_pool", + "type": "str" + }, + { + "name": "target_reference_resource", + "type": "google.cloud.compute_v1.types.TargetReference" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.extended_operation.ExtendedOperation", + "shortName": "set_backup" + }, + "description": "Sample for SetBackup", + "file": "compute_v1_generated_target_pools_set_backup_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_TargetPools_SetBackup_sync", + "segments": [ + { + "end": 53, + "start": 27, + "type": "FULL" + }, + { + "end": 53, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 47, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 50, + "start": 48, + "type": "REQUEST_EXECUTION" + }, + { + "end": 54, + "start": 51, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_target_pools_set_backup_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.TargetSslProxiesClient", + "shortName": "TargetSslProxiesClient" + }, + "fullName": "google.cloud.compute_v1.TargetSslProxiesClient.delete", + "method": { + "fullName": "google.cloud.compute.v1.TargetSslProxies.Delete", + "service": { + "fullName": "google.cloud.compute.v1.TargetSslProxies", + "shortName": "TargetSslProxies" + }, + "shortName": "Delete" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.DeleteTargetSslProxyRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "target_ssl_proxy", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.extended_operation.ExtendedOperation", + "shortName": "delete" + }, + "description": "Sample for Delete", + "file": "compute_v1_generated_target_ssl_proxies_delete_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_TargetSslProxies_Delete_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_target_ssl_proxies_delete_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.TargetSslProxiesClient", + "shortName": "TargetSslProxiesClient" + }, + "fullName": "google.cloud.compute_v1.TargetSslProxiesClient.get", + "method": { + "fullName": "google.cloud.compute.v1.TargetSslProxies.Get", + "service": { + "fullName": "google.cloud.compute.v1.TargetSslProxies", + "shortName": "TargetSslProxies" + }, + "shortName": "Get" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.GetTargetSslProxyRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "target_ssl_proxy", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.compute_v1.types.TargetSslProxy", + "shortName": "get" + }, + "description": "Sample for Get", + "file": "compute_v1_generated_target_ssl_proxies_get_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_TargetSslProxies_Get_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_target_ssl_proxies_get_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.TargetSslProxiesClient", + "shortName": "TargetSslProxiesClient" + }, + "fullName": "google.cloud.compute_v1.TargetSslProxiesClient.insert", + "method": { + "fullName": "google.cloud.compute.v1.TargetSslProxies.Insert", + "service": { + "fullName": "google.cloud.compute.v1.TargetSslProxies", + "shortName": "TargetSslProxies" + }, + "shortName": "Insert" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.InsertTargetSslProxyRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "target_ssl_proxy_resource", + "type": "google.cloud.compute_v1.types.TargetSslProxy" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.extended_operation.ExtendedOperation", + "shortName": "insert" + }, + "description": "Sample for Insert", + "file": "compute_v1_generated_target_ssl_proxies_insert_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_TargetSslProxies_Insert_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_target_ssl_proxies_insert_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.TargetSslProxiesClient", + "shortName": "TargetSslProxiesClient" + }, + "fullName": "google.cloud.compute_v1.TargetSslProxiesClient.list", + "method": { + "fullName": "google.cloud.compute.v1.TargetSslProxies.List", + "service": { + "fullName": "google.cloud.compute.v1.TargetSslProxies", + "shortName": "TargetSslProxies" + }, + "shortName": "List" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.ListTargetSslProxiesRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.compute_v1.services.target_ssl_proxies.pagers.ListPager", + "shortName": "list" + }, + "description": "Sample for List", + "file": "compute_v1_generated_target_ssl_proxies_list_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_TargetSslProxies_List_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_target_ssl_proxies_list_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.TargetSslProxiesClient", + "shortName": "TargetSslProxiesClient" + }, + "fullName": "google.cloud.compute_v1.TargetSslProxiesClient.set_backend_service", + "method": { + "fullName": "google.cloud.compute.v1.TargetSslProxies.SetBackendService", + "service": { + "fullName": "google.cloud.compute.v1.TargetSslProxies", + "shortName": "TargetSslProxies" + }, + "shortName": "SetBackendService" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.SetBackendServiceTargetSslProxyRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "target_ssl_proxy", + "type": "str" + }, + { + "name": "target_ssl_proxies_set_backend_service_request_resource", + "type": "google.cloud.compute_v1.types.TargetSslProxiesSetBackendServiceRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.extended_operation.ExtendedOperation", + "shortName": "set_backend_service" + }, + "description": "Sample for SetBackendService", + "file": "compute_v1_generated_target_ssl_proxies_set_backend_service_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_TargetSslProxies_SetBackendService_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_target_ssl_proxies_set_backend_service_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.TargetSslProxiesClient", + "shortName": "TargetSslProxiesClient" + }, + "fullName": "google.cloud.compute_v1.TargetSslProxiesClient.set_certificate_map", + "method": { + "fullName": "google.cloud.compute.v1.TargetSslProxies.SetCertificateMap", + "service": { + "fullName": "google.cloud.compute.v1.TargetSslProxies", + "shortName": "TargetSslProxies" + }, + "shortName": "SetCertificateMap" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.SetCertificateMapTargetSslProxyRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "target_ssl_proxy", + "type": "str" + }, + { + "name": "target_ssl_proxies_set_certificate_map_request_resource", + "type": "google.cloud.compute_v1.types.TargetSslProxiesSetCertificateMapRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.extended_operation.ExtendedOperation", + "shortName": "set_certificate_map" + }, + "description": "Sample for SetCertificateMap", + "file": "compute_v1_generated_target_ssl_proxies_set_certificate_map_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_TargetSslProxies_SetCertificateMap_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_target_ssl_proxies_set_certificate_map_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.TargetSslProxiesClient", + "shortName": "TargetSslProxiesClient" + }, + "fullName": "google.cloud.compute_v1.TargetSslProxiesClient.set_proxy_header", + "method": { + "fullName": "google.cloud.compute.v1.TargetSslProxies.SetProxyHeader", + "service": { + "fullName": "google.cloud.compute.v1.TargetSslProxies", + "shortName": "TargetSslProxies" + }, + "shortName": "SetProxyHeader" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.SetProxyHeaderTargetSslProxyRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "target_ssl_proxy", + "type": "str" + }, + { + "name": "target_ssl_proxies_set_proxy_header_request_resource", + "type": "google.cloud.compute_v1.types.TargetSslProxiesSetProxyHeaderRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.extended_operation.ExtendedOperation", + "shortName": "set_proxy_header" + }, + "description": "Sample for SetProxyHeader", + "file": "compute_v1_generated_target_ssl_proxies_set_proxy_header_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_TargetSslProxies_SetProxyHeader_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_target_ssl_proxies_set_proxy_header_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.TargetSslProxiesClient", + "shortName": "TargetSslProxiesClient" + }, + "fullName": "google.cloud.compute_v1.TargetSslProxiesClient.set_ssl_certificates", + "method": { + "fullName": "google.cloud.compute.v1.TargetSslProxies.SetSslCertificates", + "service": { + "fullName": "google.cloud.compute.v1.TargetSslProxies", + "shortName": "TargetSslProxies" + }, + "shortName": "SetSslCertificates" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.SetSslCertificatesTargetSslProxyRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "target_ssl_proxy", + "type": "str" + }, + { + "name": "target_ssl_proxies_set_ssl_certificates_request_resource", + "type": "google.cloud.compute_v1.types.TargetSslProxiesSetSslCertificatesRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.extended_operation.ExtendedOperation", + "shortName": "set_ssl_certificates" + }, + "description": "Sample for SetSslCertificates", + "file": "compute_v1_generated_target_ssl_proxies_set_ssl_certificates_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_TargetSslProxies_SetSslCertificates_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_target_ssl_proxies_set_ssl_certificates_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.TargetSslProxiesClient", + "shortName": "TargetSslProxiesClient" + }, + "fullName": "google.cloud.compute_v1.TargetSslProxiesClient.set_ssl_policy", + "method": { + "fullName": "google.cloud.compute.v1.TargetSslProxies.SetSslPolicy", + "service": { + "fullName": "google.cloud.compute.v1.TargetSslProxies", + "shortName": "TargetSslProxies" + }, + "shortName": "SetSslPolicy" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.SetSslPolicyTargetSslProxyRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "target_ssl_proxy", + "type": "str" + }, + { + "name": "ssl_policy_reference_resource", + "type": "google.cloud.compute_v1.types.SslPolicyReference" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.extended_operation.ExtendedOperation", + "shortName": "set_ssl_policy" + }, + "description": "Sample for SetSslPolicy", + "file": "compute_v1_generated_target_ssl_proxies_set_ssl_policy_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_TargetSslProxies_SetSslPolicy_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_target_ssl_proxies_set_ssl_policy_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.TargetTcpProxiesClient", + "shortName": "TargetTcpProxiesClient" + }, + "fullName": "google.cloud.compute_v1.TargetTcpProxiesClient.aggregated_list", + "method": { + "fullName": "google.cloud.compute.v1.TargetTcpProxies.AggregatedList", + "service": { + "fullName": "google.cloud.compute.v1.TargetTcpProxies", + "shortName": "TargetTcpProxies" + }, + "shortName": "AggregatedList" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.AggregatedListTargetTcpProxiesRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.compute_v1.services.target_tcp_proxies.pagers.AggregatedListPager", + "shortName": "aggregated_list" + }, + "description": "Sample for AggregatedList", + "file": "compute_v1_generated_target_tcp_proxies_aggregated_list_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_TargetTcpProxies_AggregatedList_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_target_tcp_proxies_aggregated_list_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.TargetTcpProxiesClient", + "shortName": "TargetTcpProxiesClient" + }, + "fullName": "google.cloud.compute_v1.TargetTcpProxiesClient.delete", + "method": { + "fullName": "google.cloud.compute.v1.TargetTcpProxies.Delete", + "service": { + "fullName": "google.cloud.compute.v1.TargetTcpProxies", + "shortName": "TargetTcpProxies" + }, + "shortName": "Delete" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.DeleteTargetTcpProxyRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "target_tcp_proxy", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.extended_operation.ExtendedOperation", + "shortName": "delete" + }, + "description": "Sample for Delete", + "file": "compute_v1_generated_target_tcp_proxies_delete_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_TargetTcpProxies_Delete_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_target_tcp_proxies_delete_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.TargetTcpProxiesClient", + "shortName": "TargetTcpProxiesClient" + }, + "fullName": "google.cloud.compute_v1.TargetTcpProxiesClient.get", + "method": { + "fullName": "google.cloud.compute.v1.TargetTcpProxies.Get", + "service": { + "fullName": "google.cloud.compute.v1.TargetTcpProxies", + "shortName": "TargetTcpProxies" + }, + "shortName": "Get" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.GetTargetTcpProxyRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "target_tcp_proxy", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.compute_v1.types.TargetTcpProxy", + "shortName": "get" + }, + "description": "Sample for Get", + "file": "compute_v1_generated_target_tcp_proxies_get_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_TargetTcpProxies_Get_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_target_tcp_proxies_get_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.TargetTcpProxiesClient", + "shortName": "TargetTcpProxiesClient" + }, + "fullName": "google.cloud.compute_v1.TargetTcpProxiesClient.insert", + "method": { + "fullName": "google.cloud.compute.v1.TargetTcpProxies.Insert", + "service": { + "fullName": "google.cloud.compute.v1.TargetTcpProxies", + "shortName": "TargetTcpProxies" + }, + "shortName": "Insert" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.InsertTargetTcpProxyRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "target_tcp_proxy_resource", + "type": "google.cloud.compute_v1.types.TargetTcpProxy" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.extended_operation.ExtendedOperation", + "shortName": "insert" + }, + "description": "Sample for Insert", + "file": "compute_v1_generated_target_tcp_proxies_insert_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_TargetTcpProxies_Insert_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_target_tcp_proxies_insert_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.TargetTcpProxiesClient", + "shortName": "TargetTcpProxiesClient" + }, + "fullName": "google.cloud.compute_v1.TargetTcpProxiesClient.list", + "method": { + "fullName": "google.cloud.compute.v1.TargetTcpProxies.List", + "service": { + "fullName": "google.cloud.compute.v1.TargetTcpProxies", + "shortName": "TargetTcpProxies" + }, + "shortName": "List" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.ListTargetTcpProxiesRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.compute_v1.services.target_tcp_proxies.pagers.ListPager", + "shortName": "list" + }, + "description": "Sample for List", + "file": "compute_v1_generated_target_tcp_proxies_list_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_TargetTcpProxies_List_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_target_tcp_proxies_list_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.TargetTcpProxiesClient", + "shortName": "TargetTcpProxiesClient" + }, + "fullName": "google.cloud.compute_v1.TargetTcpProxiesClient.set_backend_service", + "method": { + "fullName": "google.cloud.compute.v1.TargetTcpProxies.SetBackendService", + "service": { + "fullName": "google.cloud.compute.v1.TargetTcpProxies", + "shortName": "TargetTcpProxies" + }, + "shortName": "SetBackendService" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.SetBackendServiceTargetTcpProxyRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "target_tcp_proxy", + "type": "str" + }, + { + "name": "target_tcp_proxies_set_backend_service_request_resource", + "type": "google.cloud.compute_v1.types.TargetTcpProxiesSetBackendServiceRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.extended_operation.ExtendedOperation", + "shortName": "set_backend_service" + }, + "description": "Sample for SetBackendService", + "file": "compute_v1_generated_target_tcp_proxies_set_backend_service_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_TargetTcpProxies_SetBackendService_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_target_tcp_proxies_set_backend_service_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.TargetTcpProxiesClient", + "shortName": "TargetTcpProxiesClient" + }, + "fullName": "google.cloud.compute_v1.TargetTcpProxiesClient.set_proxy_header", + "method": { + "fullName": "google.cloud.compute.v1.TargetTcpProxies.SetProxyHeader", + "service": { + "fullName": "google.cloud.compute.v1.TargetTcpProxies", + "shortName": "TargetTcpProxies" + }, + "shortName": "SetProxyHeader" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.SetProxyHeaderTargetTcpProxyRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "target_tcp_proxy", + "type": "str" + }, + { + "name": "target_tcp_proxies_set_proxy_header_request_resource", + "type": "google.cloud.compute_v1.types.TargetTcpProxiesSetProxyHeaderRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.extended_operation.ExtendedOperation", + "shortName": "set_proxy_header" + }, + "description": "Sample for SetProxyHeader", + "file": "compute_v1_generated_target_tcp_proxies_set_proxy_header_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_TargetTcpProxies_SetProxyHeader_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_target_tcp_proxies_set_proxy_header_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.TargetVpnGatewaysClient", + "shortName": "TargetVpnGatewaysClient" + }, + "fullName": "google.cloud.compute_v1.TargetVpnGatewaysClient.aggregated_list", + "method": { + "fullName": "google.cloud.compute.v1.TargetVpnGateways.AggregatedList", + "service": { + "fullName": "google.cloud.compute.v1.TargetVpnGateways", + "shortName": "TargetVpnGateways" + }, + "shortName": "AggregatedList" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.AggregatedListTargetVpnGatewaysRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.compute_v1.services.target_vpn_gateways.pagers.AggregatedListPager", + "shortName": "aggregated_list" + }, + "description": "Sample for AggregatedList", + "file": "compute_v1_generated_target_vpn_gateways_aggregated_list_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_TargetVpnGateways_AggregatedList_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_target_vpn_gateways_aggregated_list_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.TargetVpnGatewaysClient", + "shortName": "TargetVpnGatewaysClient" + }, + "fullName": "google.cloud.compute_v1.TargetVpnGatewaysClient.delete", + "method": { + "fullName": "google.cloud.compute.v1.TargetVpnGateways.Delete", + "service": { + "fullName": "google.cloud.compute.v1.TargetVpnGateways", + "shortName": "TargetVpnGateways" + }, + "shortName": "Delete" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.DeleteTargetVpnGatewayRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "region", + "type": "str" + }, + { + "name": "target_vpn_gateway", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.extended_operation.ExtendedOperation", + "shortName": "delete" + }, + "description": "Sample for Delete", + "file": "compute_v1_generated_target_vpn_gateways_delete_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_TargetVpnGateways_Delete_sync", + "segments": [ + { + "end": 53, + "start": 27, + "type": "FULL" + }, + { + "end": 53, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 47, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 50, + "start": 48, + "type": "REQUEST_EXECUTION" + }, + { + "end": 54, + "start": 51, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_target_vpn_gateways_delete_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.TargetVpnGatewaysClient", + "shortName": "TargetVpnGatewaysClient" + }, + "fullName": "google.cloud.compute_v1.TargetVpnGatewaysClient.get", + "method": { + "fullName": "google.cloud.compute.v1.TargetVpnGateways.Get", + "service": { + "fullName": "google.cloud.compute.v1.TargetVpnGateways", + "shortName": "TargetVpnGateways" + }, + "shortName": "Get" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.GetTargetVpnGatewayRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "region", + "type": "str" + }, + { + "name": "target_vpn_gateway", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.compute_v1.types.TargetVpnGateway", + "shortName": "get" + }, + "description": "Sample for Get", + "file": "compute_v1_generated_target_vpn_gateways_get_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_TargetVpnGateways_Get_sync", + "segments": [ + { + "end": 53, + "start": 27, + "type": "FULL" + }, + { + "end": 53, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 47, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 50, + "start": 48, + "type": "REQUEST_EXECUTION" + }, + { + "end": 54, + "start": 51, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_target_vpn_gateways_get_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.TargetVpnGatewaysClient", + "shortName": "TargetVpnGatewaysClient" + }, + "fullName": "google.cloud.compute_v1.TargetVpnGatewaysClient.insert", + "method": { + "fullName": "google.cloud.compute.v1.TargetVpnGateways.Insert", + "service": { + "fullName": "google.cloud.compute.v1.TargetVpnGateways", + "shortName": "TargetVpnGateways" + }, + "shortName": "Insert" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.InsertTargetVpnGatewayRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "region", + "type": "str" + }, + { + "name": "target_vpn_gateway_resource", + "type": "google.cloud.compute_v1.types.TargetVpnGateway" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.extended_operation.ExtendedOperation", + "shortName": "insert" + }, + "description": "Sample for Insert", + "file": "compute_v1_generated_target_vpn_gateways_insert_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_TargetVpnGateways_Insert_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_target_vpn_gateways_insert_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.TargetVpnGatewaysClient", + "shortName": "TargetVpnGatewaysClient" + }, + "fullName": "google.cloud.compute_v1.TargetVpnGatewaysClient.list", + "method": { + "fullName": "google.cloud.compute.v1.TargetVpnGateways.List", + "service": { + "fullName": "google.cloud.compute.v1.TargetVpnGateways", + "shortName": "TargetVpnGateways" + }, + "shortName": "List" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.ListTargetVpnGatewaysRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "region", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.compute_v1.services.target_vpn_gateways.pagers.ListPager", + "shortName": "list" + }, + "description": "Sample for List", + "file": "compute_v1_generated_target_vpn_gateways_list_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_TargetVpnGateways_List_sync", + "segments": [ + { + "end": 53, + "start": 27, + "type": "FULL" + }, + { + "end": 53, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 54, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_target_vpn_gateways_list_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.TargetVpnGatewaysClient", + "shortName": "TargetVpnGatewaysClient" + }, + "fullName": "google.cloud.compute_v1.TargetVpnGatewaysClient.set_labels", + "method": { + "fullName": "google.cloud.compute.v1.TargetVpnGateways.SetLabels", + "service": { + "fullName": "google.cloud.compute.v1.TargetVpnGateways", + "shortName": "TargetVpnGateways" + }, + "shortName": "SetLabels" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.SetLabelsTargetVpnGatewayRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "region", + "type": "str" + }, + { + "name": "resource", + "type": "str" + }, + { + "name": "region_set_labels_request_resource", + "type": "google.cloud.compute_v1.types.RegionSetLabelsRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.extended_operation.ExtendedOperation", + "shortName": "set_labels" + }, + "description": "Sample for SetLabels", + "file": "compute_v1_generated_target_vpn_gateways_set_labels_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_TargetVpnGateways_SetLabels_sync", + "segments": [ + { + "end": 53, + "start": 27, + "type": "FULL" + }, + { + "end": 53, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 47, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 50, + "start": 48, + "type": "REQUEST_EXECUTION" + }, + { + "end": 54, + "start": 51, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_target_vpn_gateways_set_labels_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.UrlMapsClient", + "shortName": "UrlMapsClient" + }, + "fullName": "google.cloud.compute_v1.UrlMapsClient.aggregated_list", + "method": { + "fullName": "google.cloud.compute.v1.UrlMaps.AggregatedList", + "service": { + "fullName": "google.cloud.compute.v1.UrlMaps", + "shortName": "UrlMaps" + }, + "shortName": "AggregatedList" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.AggregatedListUrlMapsRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.compute_v1.services.url_maps.pagers.AggregatedListPager", + "shortName": "aggregated_list" + }, + "description": "Sample for AggregatedList", + "file": "compute_v1_generated_url_maps_aggregated_list_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_UrlMaps_AggregatedList_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_url_maps_aggregated_list_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.UrlMapsClient", + "shortName": "UrlMapsClient" + }, + "fullName": "google.cloud.compute_v1.UrlMapsClient.delete", + "method": { + "fullName": "google.cloud.compute.v1.UrlMaps.Delete", + "service": { + "fullName": "google.cloud.compute.v1.UrlMaps", + "shortName": "UrlMaps" + }, + "shortName": "Delete" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.DeleteUrlMapRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "url_map", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.extended_operation.ExtendedOperation", + "shortName": "delete" + }, + "description": "Sample for Delete", + "file": "compute_v1_generated_url_maps_delete_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_UrlMaps_Delete_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_url_maps_delete_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.UrlMapsClient", + "shortName": "UrlMapsClient" + }, + "fullName": "google.cloud.compute_v1.UrlMapsClient.get", + "method": { + "fullName": "google.cloud.compute.v1.UrlMaps.Get", + "service": { + "fullName": "google.cloud.compute.v1.UrlMaps", + "shortName": "UrlMaps" + }, + "shortName": "Get" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.GetUrlMapRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "url_map", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.compute_v1.types.UrlMap", + "shortName": "get" + }, + "description": "Sample for Get", + "file": "compute_v1_generated_url_maps_get_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_UrlMaps_Get_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_url_maps_get_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.UrlMapsClient", + "shortName": "UrlMapsClient" + }, + "fullName": "google.cloud.compute_v1.UrlMapsClient.insert", + "method": { + "fullName": "google.cloud.compute.v1.UrlMaps.Insert", + "service": { + "fullName": "google.cloud.compute.v1.UrlMaps", + "shortName": "UrlMaps" + }, + "shortName": "Insert" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.InsertUrlMapRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "url_map_resource", + "type": "google.cloud.compute_v1.types.UrlMap" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.extended_operation.ExtendedOperation", + "shortName": "insert" + }, + "description": "Sample for Insert", + "file": "compute_v1_generated_url_maps_insert_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_UrlMaps_Insert_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_url_maps_insert_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.UrlMapsClient", + "shortName": "UrlMapsClient" + }, + "fullName": "google.cloud.compute_v1.UrlMapsClient.invalidate_cache", + "method": { + "fullName": "google.cloud.compute.v1.UrlMaps.InvalidateCache", + "service": { + "fullName": "google.cloud.compute.v1.UrlMaps", + "shortName": "UrlMaps" + }, + "shortName": "InvalidateCache" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.InvalidateCacheUrlMapRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "url_map", + "type": "str" + }, + { + "name": "cache_invalidation_rule_resource", + "type": "google.cloud.compute_v1.types.CacheInvalidationRule" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.extended_operation.ExtendedOperation", + "shortName": "invalidate_cache" + }, + "description": "Sample for InvalidateCache", + "file": "compute_v1_generated_url_maps_invalidate_cache_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_UrlMaps_InvalidateCache_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_url_maps_invalidate_cache_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.UrlMapsClient", + "shortName": "UrlMapsClient" + }, + "fullName": "google.cloud.compute_v1.UrlMapsClient.list", + "method": { + "fullName": "google.cloud.compute.v1.UrlMaps.List", + "service": { + "fullName": "google.cloud.compute.v1.UrlMaps", + "shortName": "UrlMaps" + }, + "shortName": "List" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.ListUrlMapsRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.compute_v1.services.url_maps.pagers.ListPager", + "shortName": "list" + }, + "description": "Sample for List", + "file": "compute_v1_generated_url_maps_list_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_UrlMaps_List_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_url_maps_list_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.UrlMapsClient", + "shortName": "UrlMapsClient" + }, + "fullName": "google.cloud.compute_v1.UrlMapsClient.patch", + "method": { + "fullName": "google.cloud.compute.v1.UrlMaps.Patch", + "service": { + "fullName": "google.cloud.compute.v1.UrlMaps", + "shortName": "UrlMaps" + }, + "shortName": "Patch" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.PatchUrlMapRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "url_map", + "type": "str" + }, + { + "name": "url_map_resource", + "type": "google.cloud.compute_v1.types.UrlMap" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.extended_operation.ExtendedOperation", + "shortName": "patch" + }, + "description": "Sample for Patch", + "file": "compute_v1_generated_url_maps_patch_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_UrlMaps_Patch_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_url_maps_patch_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.UrlMapsClient", + "shortName": "UrlMapsClient" + }, + "fullName": "google.cloud.compute_v1.UrlMapsClient.update", + "method": { + "fullName": "google.cloud.compute.v1.UrlMaps.Update", + "service": { + "fullName": "google.cloud.compute.v1.UrlMaps", + "shortName": "UrlMaps" + }, + "shortName": "Update" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.UpdateUrlMapRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "url_map", + "type": "str" + }, + { + "name": "url_map_resource", + "type": "google.cloud.compute_v1.types.UrlMap" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.extended_operation.ExtendedOperation", + "shortName": "update" + }, + "description": "Sample for Update", + "file": "compute_v1_generated_url_maps_update_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_UrlMaps_Update_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_url_maps_update_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.UrlMapsClient", + "shortName": "UrlMapsClient" + }, + "fullName": "google.cloud.compute_v1.UrlMapsClient.validate", + "method": { + "fullName": "google.cloud.compute.v1.UrlMaps.Validate", + "service": { + "fullName": "google.cloud.compute.v1.UrlMaps", + "shortName": "UrlMaps" + }, + "shortName": "Validate" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.ValidateUrlMapRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "url_map", + "type": "str" + }, + { + "name": "url_maps_validate_request_resource", + "type": "google.cloud.compute_v1.types.UrlMapsValidateRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.compute_v1.types.UrlMapsValidateResponse", + "shortName": "validate" + }, + "description": "Sample for Validate", + "file": "compute_v1_generated_url_maps_validate_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_UrlMaps_Validate_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_url_maps_validate_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.VpnGatewaysClient", + "shortName": "VpnGatewaysClient" + }, + "fullName": "google.cloud.compute_v1.VpnGatewaysClient.aggregated_list", + "method": { + "fullName": "google.cloud.compute.v1.VpnGateways.AggregatedList", + "service": { + "fullName": "google.cloud.compute.v1.VpnGateways", + "shortName": "VpnGateways" + }, + "shortName": "AggregatedList" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.AggregatedListVpnGatewaysRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.compute_v1.services.vpn_gateways.pagers.AggregatedListPager", + "shortName": "aggregated_list" + }, + "description": "Sample for AggregatedList", + "file": "compute_v1_generated_vpn_gateways_aggregated_list_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_VpnGateways_AggregatedList_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_vpn_gateways_aggregated_list_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.VpnGatewaysClient", + "shortName": "VpnGatewaysClient" + }, + "fullName": "google.cloud.compute_v1.VpnGatewaysClient.delete", + "method": { + "fullName": "google.cloud.compute.v1.VpnGateways.Delete", + "service": { + "fullName": "google.cloud.compute.v1.VpnGateways", + "shortName": "VpnGateways" + }, + "shortName": "Delete" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.DeleteVpnGatewayRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "region", + "type": "str" + }, + { + "name": "vpn_gateway", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.extended_operation.ExtendedOperation", + "shortName": "delete" + }, + "description": "Sample for Delete", + "file": "compute_v1_generated_vpn_gateways_delete_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_VpnGateways_Delete_sync", + "segments": [ + { + "end": 53, + "start": 27, + "type": "FULL" + }, + { + "end": 53, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 47, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 50, + "start": 48, + "type": "REQUEST_EXECUTION" + }, + { + "end": 54, + "start": 51, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_vpn_gateways_delete_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.VpnGatewaysClient", + "shortName": "VpnGatewaysClient" + }, + "fullName": "google.cloud.compute_v1.VpnGatewaysClient.get_status", + "method": { + "fullName": "google.cloud.compute.v1.VpnGateways.GetStatus", + "service": { + "fullName": "google.cloud.compute.v1.VpnGateways", + "shortName": "VpnGateways" + }, + "shortName": "GetStatus" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.GetStatusVpnGatewayRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "region", + "type": "str" + }, + { + "name": "vpn_gateway", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.compute_v1.types.VpnGatewaysGetStatusResponse", + "shortName": "get_status" + }, + "description": "Sample for GetStatus", + "file": "compute_v1_generated_vpn_gateways_get_status_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_VpnGateways_GetStatus_sync", + "segments": [ + { + "end": 53, + "start": 27, + "type": "FULL" + }, + { + "end": 53, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 47, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 50, + "start": 48, + "type": "REQUEST_EXECUTION" + }, + { + "end": 54, + "start": 51, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_vpn_gateways_get_status_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.VpnGatewaysClient", + "shortName": "VpnGatewaysClient" + }, + "fullName": "google.cloud.compute_v1.VpnGatewaysClient.get", + "method": { + "fullName": "google.cloud.compute.v1.VpnGateways.Get", + "service": { + "fullName": "google.cloud.compute.v1.VpnGateways", + "shortName": "VpnGateways" + }, + "shortName": "Get" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.GetVpnGatewayRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "region", + "type": "str" + }, + { + "name": "vpn_gateway", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.compute_v1.types.VpnGateway", + "shortName": "get" + }, + "description": "Sample for Get", + "file": "compute_v1_generated_vpn_gateways_get_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_VpnGateways_Get_sync", + "segments": [ + { + "end": 53, + "start": 27, + "type": "FULL" + }, + { + "end": 53, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 47, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 50, + "start": 48, + "type": "REQUEST_EXECUTION" + }, + { + "end": 54, + "start": 51, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_vpn_gateways_get_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.VpnGatewaysClient", + "shortName": "VpnGatewaysClient" + }, + "fullName": "google.cloud.compute_v1.VpnGatewaysClient.insert", + "method": { + "fullName": "google.cloud.compute.v1.VpnGateways.Insert", + "service": { + "fullName": "google.cloud.compute.v1.VpnGateways", + "shortName": "VpnGateways" + }, + "shortName": "Insert" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.InsertVpnGatewayRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "region", + "type": "str" + }, + { + "name": "vpn_gateway_resource", + "type": "google.cloud.compute_v1.types.VpnGateway" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.extended_operation.ExtendedOperation", + "shortName": "insert" + }, + "description": "Sample for Insert", + "file": "compute_v1_generated_vpn_gateways_insert_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_VpnGateways_Insert_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_vpn_gateways_insert_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.VpnGatewaysClient", + "shortName": "VpnGatewaysClient" + }, + "fullName": "google.cloud.compute_v1.VpnGatewaysClient.list", + "method": { + "fullName": "google.cloud.compute.v1.VpnGateways.List", + "service": { + "fullName": "google.cloud.compute.v1.VpnGateways", + "shortName": "VpnGateways" + }, + "shortName": "List" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.ListVpnGatewaysRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "region", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.compute_v1.services.vpn_gateways.pagers.ListPager", + "shortName": "list" + }, + "description": "Sample for List", + "file": "compute_v1_generated_vpn_gateways_list_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_VpnGateways_List_sync", + "segments": [ + { + "end": 53, + "start": 27, + "type": "FULL" + }, + { + "end": 53, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 54, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_vpn_gateways_list_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.VpnGatewaysClient", + "shortName": "VpnGatewaysClient" + }, + "fullName": "google.cloud.compute_v1.VpnGatewaysClient.set_labels", + "method": { + "fullName": "google.cloud.compute.v1.VpnGateways.SetLabels", + "service": { + "fullName": "google.cloud.compute.v1.VpnGateways", + "shortName": "VpnGateways" + }, + "shortName": "SetLabels" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.SetLabelsVpnGatewayRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "region", + "type": "str" + }, + { + "name": "resource", + "type": "str" + }, + { + "name": "region_set_labels_request_resource", + "type": "google.cloud.compute_v1.types.RegionSetLabelsRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.extended_operation.ExtendedOperation", + "shortName": "set_labels" + }, + "description": "Sample for SetLabels", + "file": "compute_v1_generated_vpn_gateways_set_labels_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_VpnGateways_SetLabels_sync", + "segments": [ + { + "end": 53, + "start": 27, + "type": "FULL" + }, + { + "end": 53, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 47, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 50, + "start": 48, + "type": "REQUEST_EXECUTION" + }, + { + "end": 54, + "start": 51, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_vpn_gateways_set_labels_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.VpnGatewaysClient", + "shortName": "VpnGatewaysClient" + }, + "fullName": "google.cloud.compute_v1.VpnGatewaysClient.test_iam_permissions", + "method": { + "fullName": "google.cloud.compute.v1.VpnGateways.TestIamPermissions", + "service": { + "fullName": "google.cloud.compute.v1.VpnGateways", + "shortName": "VpnGateways" + }, + "shortName": "TestIamPermissions" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.TestIamPermissionsVpnGatewayRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "region", + "type": "str" + }, + { + "name": "resource", + "type": "str" + }, + { + "name": "test_permissions_request_resource", + "type": "google.cloud.compute_v1.types.TestPermissionsRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.compute_v1.types.TestPermissionsResponse", + "shortName": "test_iam_permissions" + }, + "description": "Sample for TestIamPermissions", + "file": "compute_v1_generated_vpn_gateways_test_iam_permissions_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_VpnGateways_TestIamPermissions_sync", + "segments": [ + { + "end": 53, + "start": 27, + "type": "FULL" + }, + { + "end": 53, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 47, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 50, + "start": 48, + "type": "REQUEST_EXECUTION" + }, + { + "end": 54, + "start": 51, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_vpn_gateways_test_iam_permissions_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.VpnTunnelsClient", + "shortName": "VpnTunnelsClient" + }, + "fullName": "google.cloud.compute_v1.VpnTunnelsClient.aggregated_list", + "method": { + "fullName": "google.cloud.compute.v1.VpnTunnels.AggregatedList", + "service": { + "fullName": "google.cloud.compute.v1.VpnTunnels", + "shortName": "VpnTunnels" + }, + "shortName": "AggregatedList" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.AggregatedListVpnTunnelsRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.compute_v1.services.vpn_tunnels.pagers.AggregatedListPager", + "shortName": "aggregated_list" + }, + "description": "Sample for AggregatedList", + "file": "compute_v1_generated_vpn_tunnels_aggregated_list_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_VpnTunnels_AggregatedList_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_vpn_tunnels_aggregated_list_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.VpnTunnelsClient", + "shortName": "VpnTunnelsClient" + }, + "fullName": "google.cloud.compute_v1.VpnTunnelsClient.delete", + "method": { + "fullName": "google.cloud.compute.v1.VpnTunnels.Delete", + "service": { + "fullName": "google.cloud.compute.v1.VpnTunnels", + "shortName": "VpnTunnels" + }, + "shortName": "Delete" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.DeleteVpnTunnelRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "region", + "type": "str" + }, + { + "name": "vpn_tunnel", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.extended_operation.ExtendedOperation", + "shortName": "delete" + }, + "description": "Sample for Delete", + "file": "compute_v1_generated_vpn_tunnels_delete_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_VpnTunnels_Delete_sync", + "segments": [ + { + "end": 53, + "start": 27, + "type": "FULL" + }, + { + "end": 53, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 47, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 50, + "start": 48, + "type": "REQUEST_EXECUTION" + }, + { + "end": 54, + "start": 51, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_vpn_tunnels_delete_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.VpnTunnelsClient", + "shortName": "VpnTunnelsClient" + }, + "fullName": "google.cloud.compute_v1.VpnTunnelsClient.get", + "method": { + "fullName": "google.cloud.compute.v1.VpnTunnels.Get", + "service": { + "fullName": "google.cloud.compute.v1.VpnTunnels", + "shortName": "VpnTunnels" + }, + "shortName": "Get" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.GetVpnTunnelRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "region", + "type": "str" + }, + { + "name": "vpn_tunnel", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.compute_v1.types.VpnTunnel", + "shortName": "get" + }, + "description": "Sample for Get", + "file": "compute_v1_generated_vpn_tunnels_get_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_VpnTunnels_Get_sync", + "segments": [ + { + "end": 53, + "start": 27, + "type": "FULL" + }, + { + "end": 53, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 47, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 50, + "start": 48, + "type": "REQUEST_EXECUTION" + }, + { + "end": 54, + "start": 51, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_vpn_tunnels_get_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.VpnTunnelsClient", + "shortName": "VpnTunnelsClient" + }, + "fullName": "google.cloud.compute_v1.VpnTunnelsClient.insert", + "method": { + "fullName": "google.cloud.compute.v1.VpnTunnels.Insert", + "service": { + "fullName": "google.cloud.compute.v1.VpnTunnels", + "shortName": "VpnTunnels" + }, + "shortName": "Insert" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.InsertVpnTunnelRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "region", + "type": "str" + }, + { + "name": "vpn_tunnel_resource", + "type": "google.cloud.compute_v1.types.VpnTunnel" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.extended_operation.ExtendedOperation", + "shortName": "insert" + }, + "description": "Sample for Insert", + "file": "compute_v1_generated_vpn_tunnels_insert_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_VpnTunnels_Insert_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_vpn_tunnels_insert_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.VpnTunnelsClient", + "shortName": "VpnTunnelsClient" + }, + "fullName": "google.cloud.compute_v1.VpnTunnelsClient.list", + "method": { + "fullName": "google.cloud.compute.v1.VpnTunnels.List", + "service": { + "fullName": "google.cloud.compute.v1.VpnTunnels", + "shortName": "VpnTunnels" + }, + "shortName": "List" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.ListVpnTunnelsRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "region", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.compute_v1.services.vpn_tunnels.pagers.ListPager", + "shortName": "list" + }, + "description": "Sample for List", + "file": "compute_v1_generated_vpn_tunnels_list_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_VpnTunnels_List_sync", + "segments": [ + { + "end": 53, + "start": 27, + "type": "FULL" + }, + { + "end": 53, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 54, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_vpn_tunnels_list_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.VpnTunnelsClient", + "shortName": "VpnTunnelsClient" + }, + "fullName": "google.cloud.compute_v1.VpnTunnelsClient.set_labels", + "method": { + "fullName": "google.cloud.compute.v1.VpnTunnels.SetLabels", + "service": { + "fullName": "google.cloud.compute.v1.VpnTunnels", + "shortName": "VpnTunnels" + }, + "shortName": "SetLabels" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.SetLabelsVpnTunnelRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "region", + "type": "str" + }, + { + "name": "resource", + "type": "str" + }, + { + "name": "region_set_labels_request_resource", + "type": "google.cloud.compute_v1.types.RegionSetLabelsRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.extended_operation.ExtendedOperation", + "shortName": "set_labels" + }, + "description": "Sample for SetLabels", + "file": "compute_v1_generated_vpn_tunnels_set_labels_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_VpnTunnels_SetLabels_sync", + "segments": [ + { + "end": 53, + "start": 27, + "type": "FULL" + }, + { + "end": 53, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 47, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 50, + "start": 48, + "type": "REQUEST_EXECUTION" + }, + { + "end": 54, + "start": 51, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_vpn_tunnels_set_labels_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.ZoneOperationsClient", + "shortName": "ZoneOperationsClient" + }, + "fullName": "google.cloud.compute_v1.ZoneOperationsClient.delete", + "method": { + "fullName": "google.cloud.compute.v1.ZoneOperations.Delete", + "service": { + "fullName": "google.cloud.compute.v1.ZoneOperations", + "shortName": "ZoneOperations" + }, + "shortName": "Delete" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.DeleteZoneOperationRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "zone", + "type": "str" + }, + { + "name": "operation", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.compute_v1.types.DeleteZoneOperationResponse", + "shortName": "delete" + }, + "description": "Sample for Delete", + "file": "compute_v1_generated_zone_operations_delete_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_ZoneOperations_Delete_sync", + "segments": [ + { + "end": 53, + "start": 27, + "type": "FULL" + }, + { + "end": 53, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 47, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 50, + "start": 48, + "type": "REQUEST_EXECUTION" + }, + { + "end": 54, + "start": 51, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_zone_operations_delete_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.ZoneOperationsClient", + "shortName": "ZoneOperationsClient" + }, + "fullName": "google.cloud.compute_v1.ZoneOperationsClient.get", + "method": { + "fullName": "google.cloud.compute.v1.ZoneOperations.Get", + "service": { + "fullName": "google.cloud.compute.v1.ZoneOperations", + "shortName": "ZoneOperations" + }, + "shortName": "Get" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.GetZoneOperationRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "zone", + "type": "str" + }, + { + "name": "operation", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.compute_v1.types.Operation", + "shortName": "get" + }, + "description": "Sample for Get", + "file": "compute_v1_generated_zone_operations_get_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_ZoneOperations_Get_sync", + "segments": [ + { + "end": 53, + "start": 27, + "type": "FULL" + }, + { + "end": 53, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 47, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 50, + "start": 48, + "type": "REQUEST_EXECUTION" + }, + { + "end": 54, + "start": 51, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_zone_operations_get_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.ZoneOperationsClient", + "shortName": "ZoneOperationsClient" + }, + "fullName": "google.cloud.compute_v1.ZoneOperationsClient.list", + "method": { + "fullName": "google.cloud.compute.v1.ZoneOperations.List", + "service": { + "fullName": "google.cloud.compute.v1.ZoneOperations", + "shortName": "ZoneOperations" + }, + "shortName": "List" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.ListZoneOperationsRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "zone", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.compute_v1.services.zone_operations.pagers.ListPager", + "shortName": "list" + }, + "description": "Sample for List", + "file": "compute_v1_generated_zone_operations_list_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_ZoneOperations_List_sync", + "segments": [ + { + "end": 53, + "start": 27, + "type": "FULL" + }, + { + "end": 53, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 54, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_zone_operations_list_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.ZoneOperationsClient", + "shortName": "ZoneOperationsClient" + }, + "fullName": "google.cloud.compute_v1.ZoneOperationsClient.wait", + "method": { + "fullName": "google.cloud.compute.v1.ZoneOperations.Wait", + "service": { + "fullName": "google.cloud.compute.v1.ZoneOperations", + "shortName": "ZoneOperations" + }, + "shortName": "Wait" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.WaitZoneOperationRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "zone", + "type": "str" + }, + { + "name": "operation", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.compute_v1.types.Operation", + "shortName": "wait" + }, + "description": "Sample for Wait", + "file": "compute_v1_generated_zone_operations_wait_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_ZoneOperations_Wait_sync", + "segments": [ + { + "end": 53, + "start": 27, + "type": "FULL" + }, + { + "end": 53, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 47, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 50, + "start": 48, + "type": "REQUEST_EXECUTION" + }, + { + "end": 54, + "start": 51, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_zone_operations_wait_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.ZonesClient", + "shortName": "ZonesClient" + }, + "fullName": "google.cloud.compute_v1.ZonesClient.get", + "method": { + "fullName": "google.cloud.compute.v1.Zones.Get", + "service": { + "fullName": "google.cloud.compute.v1.Zones", + "shortName": "Zones" + }, + "shortName": "Get" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.GetZoneRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "zone", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.compute_v1.types.Zone", + "shortName": "get" + }, + "description": "Sample for Get", + "file": "compute_v1_generated_zones_get_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_Zones_Get_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_zones_get_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.ZonesClient", + "shortName": "ZonesClient" + }, + "fullName": "google.cloud.compute_v1.ZonesClient.list", + "method": { + "fullName": "google.cloud.compute.v1.Zones.List", + "service": { + "fullName": "google.cloud.compute.v1.Zones", + "shortName": "Zones" + }, + "shortName": "List" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.ListZonesRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.compute_v1.services.zones.pagers.ListPager", + "shortName": "list" + }, + "description": "Sample for List", + "file": "compute_v1_generated_zones_list_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_Zones_List_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_zones_list_sync.py" + } + ] +} diff --git a/owl-bot-staging/v1/scripts/fixup_compute_v1_keywords.py b/owl-bot-staging/v1/scripts/fixup_compute_v1_keywords.py new file mode 100644 index 000000000..318c54727 --- /dev/null +++ b/owl-bot-staging/v1/scripts/fixup_compute_v1_keywords.py @@ -0,0 +1,311 @@ +#! /usr/bin/env python3 +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import argparse +import os +import libcst as cst +import pathlib +import sys +from typing import (Any, Callable, Dict, List, Sequence, Tuple) + + +def partition( + predicate: Callable[[Any], bool], + iterator: Sequence[Any] +) -> Tuple[List[Any], List[Any]]: + """A stable, out-of-place partition.""" + results = ([], []) + + for i in iterator: + results[int(predicate(i))].append(i) + + # Returns trueList, falseList + return results[1], results[0] + + +class computeCallTransformer(cst.CSTTransformer): + CTRL_PARAMS: Tuple[str] = ('retry', 'timeout', 'metadata') + METHOD_TO_PARAMS: Dict[str, Tuple[str]] = { + 'abandon_instances': ('instance_group_manager', 'instance_group_managers_abandon_instances_request_resource', 'project', 'zone', 'request_id', ), + 'add_access_config': ('access_config_resource', 'instance', 'network_interface', 'project', 'zone', 'request_id', ), + 'add_association': ('firewall_policy', 'firewall_policy_association_resource', 'replace_existing_association', 'request_id', ), + 'add_health_check': ('project', 'region', 'target_pool', 'target_pools_add_health_check_request_resource', 'request_id', ), + 'add_instance': ('project', 'region', 'target_pool', 'target_pools_add_instance_request_resource', 'request_id', ), + 'add_instances': ('instance_group', 'instance_groups_add_instances_request_resource', 'project', 'zone', 'request_id', ), + 'add_nodes': ('node_group', 'node_groups_add_nodes_request_resource', 'project', 'zone', 'request_id', ), + 'add_peering': ('network', 'networks_add_peering_request_resource', 'project', 'request_id', ), + 'add_resource_policies': ('disk', 'disks_add_resource_policies_request_resource', 'project', 'zone', 'request_id', ), + 'add_rule': ('firewall_policy', 'firewall_policy_rule_resource', 'request_id', ), + 'add_signed_url_key': ('backend_bucket', 'project', 'signed_url_key_resource', 'request_id', ), + 'aggregated_list': ('project', 'filter', 'include_all_scopes', 'max_results', 'order_by', 'page_token', 'return_partial_success', ), + 'apply_updates_to_instances': ('instance_group_manager', 'instance_group_managers_apply_updates_request_resource', 'project', 'zone', ), + 'attach_disk': ('attached_disk_resource', 'instance', 'project', 'zone', 'force_attach', 'request_id', ), + 'attach_network_endpoints': ('global_network_endpoint_groups_attach_endpoints_request_resource', 'network_endpoint_group', 'project', 'request_id', ), + 'bulk_insert': ('bulk_insert_disk_resource_resource', 'project', 'zone', 'request_id', ), + 'clone_rules': ('firewall_policy', 'request_id', 'source_firewall_policy', ), + 'create_instances': ('instance_group_manager', 'instance_group_managers_create_instances_request_resource', 'project', 'zone', 'request_id', ), + 'create_snapshot': ('disk', 'project', 'snapshot_resource', 'zone', 'guest_flush', 'request_id', ), + 'delete': ('address', 'project', 'region', 'request_id', ), + 'delete_access_config': ('access_config', 'instance', 'network_interface', 'project', 'zone', 'request_id', ), + 'delete_instances': ('instance_group_manager', 'instance_group_managers_delete_instances_request_resource', 'project', 'zone', 'request_id', ), + 'delete_nodes': ('node_group', 'node_groups_delete_nodes_request_resource', 'project', 'zone', 'request_id', ), + 'delete_per_instance_configs': ('instance_group_manager', 'instance_group_managers_delete_per_instance_configs_req_resource', 'project', 'zone', ), + 'delete_signed_url_key': ('backend_bucket', 'key_name', 'project', 'request_id', ), + 'deprecate': ('deprecation_status_resource', 'image', 'project', 'request_id', ), + 'detach_disk': ('device_name', 'instance', 'project', 'zone', 'request_id', ), + 'detach_network_endpoints': ('global_network_endpoint_groups_detach_endpoints_request_resource', 'network_endpoint_group', 'project', 'request_id', ), + 'disable_xpn_host': ('project', 'request_id', ), + 'disable_xpn_resource': ('project', 'projects_disable_xpn_resource_request_resource', 'request_id', ), + 'enable_xpn_host': ('project', 'request_id', ), + 'enable_xpn_resource': ('project', 'projects_enable_xpn_resource_request_resource', 'request_id', ), + 'expand_ip_cidr_range': ('project', 'region', 'subnetwork', 'subnetworks_expand_ip_cidr_range_request_resource', 'request_id', ), + 'get': ('accelerator_type', 'project', 'zone', ), + 'get_association': ('firewall_policy', 'name', ), + 'get_diagnostics': ('interconnect', 'project', ), + 'get_effective_firewalls': ('instance', 'network_interface', 'project', 'zone', ), + 'get_from_family': ('family', 'project', ), + 'get_guest_attributes': ('instance', 'project', 'zone', 'query_path', 'variable_key', ), + 'get_health': ('backend_service', 'project', 'resource_group_reference_resource', ), + 'get_iam_policy': ('project', 'resource', 'options_requested_policy_version', ), + 'get_nat_mapping_info': ('project', 'region', 'router', 'filter', 'max_results', 'nat_name', 'order_by', 'page_token', 'return_partial_success', ), + 'get_router_status': ('project', 'region', 'router', ), + 'get_rule': ('firewall_policy', 'priority', ), + 'get_screenshot': ('instance', 'project', 'zone', ), + 'get_serial_port_output': ('instance', 'project', 'zone', 'port', 'start', ), + 'get_shielded_instance_identity': ('instance', 'project', 'zone', ), + 'get_status': ('project', 'region', 'vpn_gateway', ), + 'get_xpn_host': ('project', ), + 'get_xpn_resources': ('project', 'filter', 'max_results', 'order_by', 'page_token', 'return_partial_success', ), + 'insert': ('address_resource', 'project', 'region', 'request_id', ), + 'invalidate_cache': ('cache_invalidation_rule_resource', 'project', 'url_map', 'request_id', ), + 'list': ('project', 'zone', 'filter', 'max_results', 'order_by', 'page_token', 'return_partial_success', ), + 'list_associations': ('target_resource', ), + 'list_available_features': ('project', 'region', 'filter', 'max_results', 'order_by', 'page_token', 'return_partial_success', ), + 'list_errors': ('instance_group_manager', 'project', 'zone', 'filter', 'max_results', 'order_by', 'page_token', 'return_partial_success', ), + 'list_instances': ('instance_group', 'instance_groups_list_instances_request_resource', 'project', 'zone', 'filter', 'max_results', 'order_by', 'page_token', 'return_partial_success', ), + 'list_managed_instances': ('instance_group_manager', 'project', 'zone', 'filter', 'max_results', 'order_by', 'page_token', 'return_partial_success', ), + 'list_network_endpoints': ('network_endpoint_group', 'project', 'filter', 'max_results', 'order_by', 'page_token', 'return_partial_success', ), + 'list_nodes': ('node_group', 'project', 'zone', 'filter', 'max_results', 'order_by', 'page_token', 'return_partial_success', ), + 'list_peering_routes': ('network', 'project', 'direction', 'filter', 'max_results', 'order_by', 'page_token', 'peering_name', 'region', 'return_partial_success', ), + 'list_per_instance_configs': ('instance_group_manager', 'project', 'zone', 'filter', 'max_results', 'order_by', 'page_token', 'return_partial_success', ), + 'list_preconfigured_expression_sets': ('project', 'filter', 'max_results', 'order_by', 'page_token', 'return_partial_success', ), + 'list_referrers': ('instance', 'project', 'zone', 'filter', 'max_results', 'order_by', 'page_token', 'return_partial_success', ), + 'list_usable': ('project', 'filter', 'max_results', 'order_by', 'page_token', 'return_partial_success', ), + 'list_xpn_hosts': ('project', 'projects_list_xpn_hosts_request_resource', 'filter', 'max_results', 'order_by', 'page_token', 'return_partial_success', ), + 'move': ('address', 'project', 'region', 'region_addresses_move_request_resource', 'request_id', ), + 'move_disk': ('disk_move_request_resource', 'project', 'request_id', ), + 'move_instance': ('instance_move_request_resource', 'project', 'request_id', ), + 'patch': ('autoscaler_resource', 'project', 'zone', 'autoscaler', 'request_id', ), + 'patch_per_instance_configs': ('instance_group_manager', 'instance_group_managers_patch_per_instance_configs_req_resource', 'project', 'zone', 'request_id', ), + 'patch_rule': ('firewall_policy', 'firewall_policy_rule_resource', 'priority', 'request_id', ), + 'preview': ('project', 'region', 'router', 'router_resource', ), + 'recreate_instances': ('instance_group_manager', 'instance_group_managers_recreate_instances_request_resource', 'project', 'zone', 'request_id', ), + 'remove_association': ('firewall_policy', 'name', 'request_id', ), + 'remove_health_check': ('project', 'region', 'target_pool', 'target_pools_remove_health_check_request_resource', 'request_id', ), + 'remove_instance': ('project', 'region', 'target_pool', 'target_pools_remove_instance_request_resource', 'request_id', ), + 'remove_instances': ('instance_group', 'instance_groups_remove_instances_request_resource', 'project', 'zone', 'request_id', ), + 'remove_peering': ('network', 'networks_remove_peering_request_resource', 'project', 'request_id', ), + 'remove_resource_policies': ('disk', 'disks_remove_resource_policies_request_resource', 'project', 'zone', 'request_id', ), + 'remove_rule': ('firewall_policy', 'priority', 'request_id', ), + 'reset': ('instance', 'project', 'zone', 'request_id', ), + 'resize': ('disk', 'disks_resize_request_resource', 'project', 'zone', 'request_id', ), + 'resume': ('instance', 'project', 'zone', 'request_id', ), + 'send_diagnostic_interrupt': ('instance', 'project', 'zone', ), + 'set_backend_service': ('project', 'target_ssl_proxies_set_backend_service_request_resource', 'target_ssl_proxy', 'request_id', ), + 'set_backup': ('project', 'region', 'target_pool', 'target_reference_resource', 'failover_ratio', 'request_id', ), + 'set_certificate_map': ('project', 'target_https_proxies_set_certificate_map_request_resource', 'target_https_proxy', 'request_id', ), + 'set_common_instance_metadata': ('metadata_resource', 'project', 'request_id', ), + 'set_default_network_tier': ('project', 'projects_set_default_network_tier_request_resource', 'request_id', ), + 'set_deletion_protection': ('project', 'resource', 'zone', 'deletion_protection', 'request_id', ), + 'set_disk_auto_delete': ('auto_delete', 'device_name', 'instance', 'project', 'zone', 'request_id', ), + 'set_edge_security_policy': ('backend_bucket', 'project', 'security_policy_reference_resource', 'request_id', ), + 'set_iam_policy': ('global_set_policy_request_resource', 'project', 'resource', ), + 'set_instance_template': ('instance_group_manager', 'instance_group_managers_set_instance_template_request_resource', 'project', 'zone', 'request_id', ), + 'set_labels': ('project', 'region', 'region_set_labels_request_resource', 'resource', 'request_id', ), + 'set_machine_resources': ('instance', 'instances_set_machine_resources_request_resource', 'project', 'zone', 'request_id', ), + 'set_machine_type': ('instance', 'instances_set_machine_type_request_resource', 'project', 'zone', 'request_id', ), + 'set_metadata': ('instance', 'metadata_resource', 'project', 'zone', 'request_id', ), + 'set_min_cpu_platform': ('instance', 'instances_set_min_cpu_platform_request_resource', 'project', 'zone', 'request_id', ), + 'set_name': ('instance', 'instances_set_name_request_resource', 'project', 'zone', 'request_id', ), + 'set_named_ports': ('instance_group', 'instance_groups_set_named_ports_request_resource', 'project', 'zone', 'request_id', ), + 'set_node_template': ('node_group', 'node_groups_set_node_template_request_resource', 'project', 'zone', 'request_id', ), + 'set_private_ip_google_access': ('project', 'region', 'subnetwork', 'subnetworks_set_private_ip_google_access_request_resource', 'request_id', ), + 'set_proxy_header': ('project', 'target_ssl_proxies_set_proxy_header_request_resource', 'target_ssl_proxy', 'request_id', ), + 'set_quic_override': ('project', 'target_https_proxies_set_quic_override_request_resource', 'target_https_proxy', 'request_id', ), + 'set_scheduling': ('instance', 'project', 'scheduling_resource', 'zone', 'request_id', ), + 'set_security_policy': ('backend_service', 'project', 'security_policy_reference_resource', 'request_id', ), + 'set_service_account': ('instance', 'instances_set_service_account_request_resource', 'project', 'zone', 'request_id', ), + 'set_shielded_instance_integrity_policy': ('instance', 'project', 'shielded_instance_integrity_policy_resource', 'zone', 'request_id', ), + 'set_ssl_certificates': ('project', 'region', 'region_target_https_proxies_set_ssl_certificates_request_resource', 'target_https_proxy', 'request_id', ), + 'set_ssl_policy': ('project', 'ssl_policy_reference_resource', 'target_https_proxy', 'request_id', ), + 'set_tags': ('instance', 'project', 'tags_resource', 'zone', 'request_id', ), + 'set_target': ('forwarding_rule', 'project', 'region', 'target_reference_resource', 'request_id', ), + 'set_target_pools': ('instance_group_manager', 'instance_group_managers_set_target_pools_request_resource', 'project', 'zone', 'request_id', ), + 'set_url_map': ('project', 'region', 'target_http_proxy', 'url_map_reference_resource', 'request_id', ), + 'set_usage_export_bucket': ('project', 'usage_export_location_resource', 'request_id', ), + 'simulate_maintenance_event': ('instance', 'project', 'zone', 'request_id', ), + 'start': ('instance', 'project', 'zone', 'request_id', ), + 'start_async_replication': ('disk', 'disks_start_async_replication_request_resource', 'project', 'zone', 'request_id', ), + 'start_with_encryption_key': ('instance', 'instances_start_with_encryption_key_request_resource', 'project', 'zone', 'request_id', ), + 'stop': ('instance', 'project', 'zone', 'discard_local_ssd', 'request_id', ), + 'stop_async_replication': ('disk', 'project', 'zone', 'request_id', ), + 'stop_group_async_replication': ('disks_stop_group_async_replication_resource_resource', 'project', 'zone', 'request_id', ), + 'suspend': ('instance', 'project', 'zone', 'discard_local_ssd', 'request_id', ), + 'switch_to_custom_mode': ('network', 'project', 'request_id', ), + 'test_iam_permissions': ('project', 'resource', 'test_permissions_request_resource', 'zone', ), + 'update': ('autoscaler_resource', 'project', 'zone', 'autoscaler', 'request_id', ), + 'update_access_config': ('access_config_resource', 'instance', 'network_interface', 'project', 'zone', 'request_id', ), + 'update_display_device': ('display_device_resource', 'instance', 'project', 'zone', 'request_id', ), + 'update_network_interface': ('instance', 'network_interface', 'network_interface_resource', 'project', 'zone', 'request_id', ), + 'update_peering': ('network', 'networks_update_peering_request_resource', 'project', 'request_id', ), + 'update_per_instance_configs': ('instance_group_manager', 'instance_group_managers_update_per_instance_configs_req_resource', 'project', 'zone', 'request_id', ), + 'update_shielded_instance_config': ('instance', 'project', 'shielded_instance_config_resource', 'zone', 'request_id', ), + 'validate': ('project', 'region', 'region_url_maps_validate_request_resource', 'url_map', ), + 'wait': ('operation', 'project', ), + } + + def leave_Call(self, original: cst.Call, updated: cst.Call) -> cst.CSTNode: + try: + key = original.func.attr.value + kword_params = self.METHOD_TO_PARAMS[key] + except (AttributeError, KeyError): + # Either not a method from the API or too convoluted to be sure. + return updated + + # If the existing code is valid, keyword args come after positional args. + # Therefore, all positional args must map to the first parameters. + args, kwargs = partition(lambda a: not bool(a.keyword), updated.args) + if any(k.keyword.value == "request" for k in kwargs): + # We've already fixed this file, don't fix it again. + return updated + + kwargs, ctrl_kwargs = partition( + lambda a: a.keyword.value not in self.CTRL_PARAMS, + kwargs + ) + + args, ctrl_args = args[:len(kword_params)], args[len(kword_params):] + ctrl_kwargs.extend(cst.Arg(value=a.value, keyword=cst.Name(value=ctrl)) + for a, ctrl in zip(ctrl_args, self.CTRL_PARAMS)) + + request_arg = cst.Arg( + value=cst.Dict([ + cst.DictElement( + cst.SimpleString("'{}'".format(name)), +cst.Element(value=arg.value) + ) + # Note: the args + kwargs looks silly, but keep in mind that + # the control parameters had to be stripped out, and that + # those could have been passed positionally or by keyword. + for name, arg in zip(kword_params, args + kwargs)]), + keyword=cst.Name("request") + ) + + return updated.with_changes( + args=[request_arg] + ctrl_kwargs + ) + + +def fix_files( + in_dir: pathlib.Path, + out_dir: pathlib.Path, + *, + transformer=computeCallTransformer(), +): + """Duplicate the input dir to the output dir, fixing file method calls. + + Preconditions: + * in_dir is a real directory + * out_dir is a real, empty directory + """ + pyfile_gen = ( + pathlib.Path(os.path.join(root, f)) + for root, _, files in os.walk(in_dir) + for f in files if os.path.splitext(f)[1] == ".py" + ) + + for fpath in pyfile_gen: + with open(fpath, 'r') as f: + src = f.read() + + # Parse the code and insert method call fixes. + tree = cst.parse_module(src) + updated = tree.visit(transformer) + + # Create the path and directory structure for the new file. + updated_path = out_dir.joinpath(fpath.relative_to(in_dir)) + updated_path.parent.mkdir(parents=True, exist_ok=True) + + # Generate the updated source file at the corresponding path. + with open(updated_path, 'w') as f: + f.write(updated.code) + + +if __name__ == '__main__': + parser = argparse.ArgumentParser( + description="""Fix up source that uses the compute client library. + +The existing sources are NOT overwritten but are copied to output_dir with changes made. + +Note: This tool operates at a best-effort level at converting positional + parameters in client method calls to keyword based parameters. + Cases where it WILL FAIL include + A) * or ** expansion in a method call. + B) Calls via function or method alias (includes free function calls) + C) Indirect or dispatched calls (e.g. the method is looked up dynamically) + + These all constitute false negatives. The tool will also detect false + positives when an API method shares a name with another method. +""") + parser.add_argument( + '-d', + '--input-directory', + required=True, + dest='input_dir', + help='the input directory to walk for python files to fix up', + ) + parser.add_argument( + '-o', + '--output-directory', + required=True, + dest='output_dir', + help='the directory to output files fixed via un-flattening', + ) + args = parser.parse_args() + input_dir = pathlib.Path(args.input_dir) + output_dir = pathlib.Path(args.output_dir) + if not input_dir.is_dir(): + print( + f"input directory '{input_dir}' does not exist or is not a directory", + file=sys.stderr, + ) + sys.exit(-1) + + if not output_dir.is_dir(): + print( + f"output directory '{output_dir}' does not exist or is not a directory", + file=sys.stderr, + ) + sys.exit(-1) + + if os.listdir(output_dir): + print( + f"output directory '{output_dir}' is not empty", + file=sys.stderr, + ) + sys.exit(-1) + + fix_files(input_dir, output_dir) diff --git a/owl-bot-staging/v1/setup.py b/owl-bot-staging/v1/setup.py new file mode 100644 index 000000000..ce3f2936d --- /dev/null +++ b/owl-bot-staging/v1/setup.py @@ -0,0 +1,90 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import io +import os + +import setuptools # type: ignore + +package_root = os.path.abspath(os.path.dirname(__file__)) + +name = 'google-cloud-compute' + + +description = "Google Cloud Compute API client library" + +version = {} +with open(os.path.join(package_root, 'google/cloud/compute/gapic_version.py')) as fp: + exec(fp.read(), version) +version = version["__version__"] + +if version[0] == "0": + release_status = "Development Status :: 4 - Beta" +else: + release_status = "Development Status :: 5 - Production/Stable" + +dependencies = [ + "google-api-core[grpc] >= 1.34.0, <3.0.0dev,!=2.0.*,!=2.1.*,!=2.2.*,!=2.3.*,!=2.4.*,!=2.5.*,!=2.6.*,!=2.7.*,!=2.8.*,!=2.9.*,!=2.10.*", + "proto-plus >= 1.22.0, <2.0.0dev", + "proto-plus >= 1.22.2, <2.0.0dev; python_version>='3.11'", + "protobuf>=3.19.5,<5.0.0dev,!=3.20.0,!=3.20.1,!=4.21.0,!=4.21.1,!=4.21.2,!=4.21.3,!=4.21.4,!=4.21.5", +] +url = "https://github.com/googleapis/python-compute" + +package_root = os.path.abspath(os.path.dirname(__file__)) + +readme_filename = os.path.join(package_root, "README.rst") +with io.open(readme_filename, encoding="utf-8") as readme_file: + readme = readme_file.read() + +packages = [ + package + for package in setuptools.PEP420PackageFinder.find() + if package.startswith("google") +] + +namespaces = ["google", "google.cloud"] + +setuptools.setup( + name=name, + version=version, + description=description, + long_description=readme, + author="Google LLC", + author_email="googleapis-packages@google.com", + license="Apache 2.0", + url=url, + classifiers=[ + release_status, + "Intended Audience :: Developers", + "License :: OSI Approved :: Apache Software License", + "Programming Language :: Python", + "Programming Language :: Python :: 3", + "Programming Language :: Python :: 3.7", + "Programming Language :: Python :: 3.8", + "Programming Language :: Python :: 3.9", + "Programming Language :: Python :: 3.10", + "Programming Language :: Python :: 3.11", + "Operating System :: OS Independent", + "Topic :: Internet", + ], + platforms="Posix; MacOS X; Windows", + packages=packages, + python_requires=">=3.7", + namespace_packages=namespaces, + install_requires=dependencies, + include_package_data=True, + zip_safe=False, +) diff --git a/owl-bot-staging/v1/testing/constraints-3.10.txt b/owl-bot-staging/v1/testing/constraints-3.10.txt new file mode 100644 index 000000000..ed7f9aed2 --- /dev/null +++ b/owl-bot-staging/v1/testing/constraints-3.10.txt @@ -0,0 +1,6 @@ +# -*- coding: utf-8 -*- +# This constraints file is required for unit tests. +# List all library dependencies and extras in this file. +google-api-core +proto-plus +protobuf diff --git a/owl-bot-staging/v1/testing/constraints-3.11.txt b/owl-bot-staging/v1/testing/constraints-3.11.txt new file mode 100644 index 000000000..ed7f9aed2 --- /dev/null +++ b/owl-bot-staging/v1/testing/constraints-3.11.txt @@ -0,0 +1,6 @@ +# -*- coding: utf-8 -*- +# This constraints file is required for unit tests. +# List all library dependencies and extras in this file. +google-api-core +proto-plus +protobuf diff --git a/owl-bot-staging/v1/testing/constraints-3.12.txt b/owl-bot-staging/v1/testing/constraints-3.12.txt new file mode 100644 index 000000000..ed7f9aed2 --- /dev/null +++ b/owl-bot-staging/v1/testing/constraints-3.12.txt @@ -0,0 +1,6 @@ +# -*- coding: utf-8 -*- +# This constraints file is required for unit tests. +# List all library dependencies and extras in this file. +google-api-core +proto-plus +protobuf diff --git a/owl-bot-staging/v1/testing/constraints-3.7.txt b/owl-bot-staging/v1/testing/constraints-3.7.txt new file mode 100644 index 000000000..6c44adfea --- /dev/null +++ b/owl-bot-staging/v1/testing/constraints-3.7.txt @@ -0,0 +1,9 @@ +# This constraints file is used to check that lower bounds +# are correct in setup.py +# List all library dependencies and extras in this file. +# Pin the version to the lower bound. +# e.g., if setup.py has "google-cloud-foo >= 1.14.0, < 2.0.0dev", +# Then this file should have google-cloud-foo==1.14.0 +google-api-core==1.34.0 +proto-plus==1.22.0 +protobuf==3.19.5 diff --git a/owl-bot-staging/v1/testing/constraints-3.8.txt b/owl-bot-staging/v1/testing/constraints-3.8.txt new file mode 100644 index 000000000..ed7f9aed2 --- /dev/null +++ b/owl-bot-staging/v1/testing/constraints-3.8.txt @@ -0,0 +1,6 @@ +# -*- coding: utf-8 -*- +# This constraints file is required for unit tests. +# List all library dependencies and extras in this file. +google-api-core +proto-plus +protobuf diff --git a/owl-bot-staging/v1/testing/constraints-3.9.txt b/owl-bot-staging/v1/testing/constraints-3.9.txt new file mode 100644 index 000000000..ed7f9aed2 --- /dev/null +++ b/owl-bot-staging/v1/testing/constraints-3.9.txt @@ -0,0 +1,6 @@ +# -*- coding: utf-8 -*- +# This constraints file is required for unit tests. +# List all library dependencies and extras in this file. +google-api-core +proto-plus +protobuf diff --git a/owl-bot-staging/v1/tests/__init__.py b/owl-bot-staging/v1/tests/__init__.py new file mode 100644 index 000000000..1b4db446e --- /dev/null +++ b/owl-bot-staging/v1/tests/__init__.py @@ -0,0 +1,16 @@ + +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/owl-bot-staging/v1/tests/unit/__init__.py b/owl-bot-staging/v1/tests/unit/__init__.py new file mode 100644 index 000000000..1b4db446e --- /dev/null +++ b/owl-bot-staging/v1/tests/unit/__init__.py @@ -0,0 +1,16 @@ + +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/owl-bot-staging/v1/tests/unit/gapic/__init__.py b/owl-bot-staging/v1/tests/unit/gapic/__init__.py new file mode 100644 index 000000000..1b4db446e --- /dev/null +++ b/owl-bot-staging/v1/tests/unit/gapic/__init__.py @@ -0,0 +1,16 @@ + +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/owl-bot-staging/v1/tests/unit/gapic/compute_v1/__init__.py b/owl-bot-staging/v1/tests/unit/gapic/compute_v1/__init__.py new file mode 100644 index 000000000..1b4db446e --- /dev/null +++ b/owl-bot-staging/v1/tests/unit/gapic/compute_v1/__init__.py @@ -0,0 +1,16 @@ + +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/owl-bot-staging/v1/tests/unit/gapic/compute_v1/test_accelerator_types.py b/owl-bot-staging/v1/tests/unit/gapic/compute_v1/test_accelerator_types.py new file mode 100644 index 000000000..5f472d819 --- /dev/null +++ b/owl-bot-staging/v1/tests/unit/gapic/compute_v1/test_accelerator_types.py @@ -0,0 +1,1706 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import os +# try/except added for compatibility with python < 3.8 +try: + from unittest import mock + from unittest.mock import AsyncMock # pragma: NO COVER +except ImportError: # pragma: NO COVER + import mock + +import grpc +from grpc.experimental import aio +from collections.abc import Iterable +from google.protobuf import json_format +import json +import math +import pytest +from proto.marshal.rules.dates import DurationRule, TimestampRule +from proto.marshal.rules import wrappers +from requests import Response +from requests import Request, PreparedRequest +from requests.sessions import Session +from google.protobuf import json_format + +from google.api_core import client_options +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import grpc_helpers +from google.api_core import grpc_helpers_async +from google.api_core import path_template +from google.auth import credentials as ga_credentials +from google.auth.exceptions import MutualTLSChannelError +from google.cloud.compute_v1.services.accelerator_types import AcceleratorTypesClient +from google.cloud.compute_v1.services.accelerator_types import pagers +from google.cloud.compute_v1.services.accelerator_types import transports +from google.cloud.compute_v1.types import compute +from google.oauth2 import service_account +import google.auth + + +def client_cert_source_callback(): + return b"cert bytes", b"key bytes" + + +# If default endpoint is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint(client): + return "foo.googleapis.com" if ("localhost" in client.DEFAULT_ENDPOINT) else client.DEFAULT_ENDPOINT + + +def test__get_default_mtls_endpoint(): + api_endpoint = "example.googleapis.com" + api_mtls_endpoint = "example.mtls.googleapis.com" + sandbox_endpoint = "example.sandbox.googleapis.com" + sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com" + non_googleapi = "api.example.com" + + assert AcceleratorTypesClient._get_default_mtls_endpoint(None) is None + assert AcceleratorTypesClient._get_default_mtls_endpoint(api_endpoint) == api_mtls_endpoint + assert AcceleratorTypesClient._get_default_mtls_endpoint(api_mtls_endpoint) == api_mtls_endpoint + assert AcceleratorTypesClient._get_default_mtls_endpoint(sandbox_endpoint) == sandbox_mtls_endpoint + assert AcceleratorTypesClient._get_default_mtls_endpoint(sandbox_mtls_endpoint) == sandbox_mtls_endpoint + assert AcceleratorTypesClient._get_default_mtls_endpoint(non_googleapi) == non_googleapi + + +@pytest.mark.parametrize("client_class,transport_name", [ + (AcceleratorTypesClient, "rest"), +]) +def test_accelerator_types_client_from_service_account_info(client_class, transport_name): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object(service_account.Credentials, 'from_service_account_info') as factory: + factory.return_value = creds + info = {"valid": True} + client = client_class.from_service_account_info(info, transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + 'compute.googleapis.com:443' + if transport_name in ['grpc', 'grpc_asyncio'] + else + 'https://compute.googleapis.com' + ) + + +@pytest.mark.parametrize("transport_class,transport_name", [ + (transports.AcceleratorTypesRestTransport, "rest"), +]) +def test_accelerator_types_client_service_account_always_use_jwt(transport_class, transport_name): + with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=True) + use_jwt.assert_called_once_with(True) + + with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=False) + use_jwt.assert_not_called() + + +@pytest.mark.parametrize("client_class,transport_name", [ + (AcceleratorTypesClient, "rest"), +]) +def test_accelerator_types_client_from_service_account_file(client_class, transport_name): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object(service_account.Credentials, 'from_service_account_file') as factory: + factory.return_value = creds + client = client_class.from_service_account_file("dummy/file/path.json", transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + client = client_class.from_service_account_json("dummy/file/path.json", transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + 'compute.googleapis.com:443' + if transport_name in ['grpc', 'grpc_asyncio'] + else + 'https://compute.googleapis.com' + ) + + +def test_accelerator_types_client_get_transport_class(): + transport = AcceleratorTypesClient.get_transport_class() + available_transports = [ + transports.AcceleratorTypesRestTransport, + ] + assert transport in available_transports + + transport = AcceleratorTypesClient.get_transport_class("rest") + assert transport == transports.AcceleratorTypesRestTransport + + +@pytest.mark.parametrize("client_class,transport_class,transport_name", [ + (AcceleratorTypesClient, transports.AcceleratorTypesRestTransport, "rest"), +]) +@mock.patch.object(AcceleratorTypesClient, "DEFAULT_ENDPOINT", modify_default_endpoint(AcceleratorTypesClient)) +def test_accelerator_types_client_client_options(client_class, transport_class, transport_name): + # Check that if channel is provided we won't create a new one. + with mock.patch.object(AcceleratorTypesClient, 'get_transport_class') as gtc: + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ) + client = client_class(transport=transport) + gtc.assert_not_called() + + # Check that if channel is provided via str we will create a new one. + with mock.patch.object(AcceleratorTypesClient, 'get_transport_class') as gtc: + client = client_class(transport=transport_name) + gtc.assert_called() + + # Check the case api_endpoint is provided. + options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(transport=transport_name, client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_MTLS_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError): + client = client_class(transport=transport_name) + + # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): + with pytest.raises(ValueError): + client = client_class(transport=transport_name) + + # Check the case quota_project_id is provided + options = client_options.ClientOptions(quota_project_id="octopus") + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id="octopus", + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + # Check the case api_endpoint is provided + options = client_options.ClientOptions(api_audience="https://language.googleapis.com") + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience="https://language.googleapis.com" + ) + +@pytest.mark.parametrize("client_class,transport_class,transport_name,use_client_cert_env", [ + (AcceleratorTypesClient, transports.AcceleratorTypesRestTransport, "rest", "true"), + (AcceleratorTypesClient, transports.AcceleratorTypesRestTransport, "rest", "false"), +]) +@mock.patch.object(AcceleratorTypesClient, "DEFAULT_ENDPOINT", modify_default_endpoint(AcceleratorTypesClient)) +@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) +def test_accelerator_types_client_mtls_env_auto(client_class, transport_class, transport_name, use_client_cert_env): + # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default + # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists. + + # Check the case client_cert_source is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): + options = client_options.ClientOptions(client_cert_source=client_cert_source_callback) + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + + if use_client_cert_env == "false": + expected_client_cert_source = None + expected_host = client.DEFAULT_ENDPOINT + else: + expected_client_cert_source = client_cert_source_callback + expected_host = client.DEFAULT_MTLS_ENDPOINT + + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case ADC client cert is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): + with mock.patch.object(transport_class, '__init__') as patched: + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): + with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=client_cert_source_callback): + if use_client_cert_env == "false": + expected_host = client.DEFAULT_ENDPOINT + expected_client_cert_source = None + else: + expected_host = client.DEFAULT_MTLS_ENDPOINT + expected_client_cert_source = client_cert_source_callback + + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case client_cert_source and ADC client cert are not provided. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): + with mock.patch.object(transport_class, '__init__') as patched: + with mock.patch("google.auth.transport.mtls.has_default_client_cert_source", return_value=False): + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize("client_class", [ + AcceleratorTypesClient +]) +@mock.patch.object(AcceleratorTypesClient, "DEFAULT_ENDPOINT", modify_default_endpoint(AcceleratorTypesClient)) +def test_accelerator_types_client_get_mtls_endpoint_and_cert_source(client_class): + mock_client_cert_source = mock.Mock() + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "true". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + mock_api_endpoint = "foo" + options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(options) + assert api_endpoint == mock_api_endpoint + assert cert_source == mock_client_cert_source + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "false". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + mock_client_cert_source = mock.Mock() + mock_api_endpoint = "foo" + options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(options) + assert api_endpoint == mock_api_endpoint + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert doesn't exist. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=False): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert exists. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): + with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=mock_client_cert_source): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source == mock_client_cert_source + + +@pytest.mark.parametrize("client_class,transport_class,transport_name", [ + (AcceleratorTypesClient, transports.AcceleratorTypesRestTransport, "rest"), +]) +def test_accelerator_types_client_client_options_scopes(client_class, transport_class, transport_name): + # Check the case scopes are provided. + options = client_options.ClientOptions( + scopes=["1", "2"], + ) + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=["1", "2"], + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + +@pytest.mark.parametrize("client_class,transport_class,transport_name,grpc_helpers", [ + (AcceleratorTypesClient, transports.AcceleratorTypesRestTransport, "rest", None), +]) +def test_accelerator_types_client_client_options_credentials_file(client_class, transport_class, transport_name, grpc_helpers): + # Check the case credentials file is provided. + options = client_options.ClientOptions( + credentials_file="credentials.json" + ) + + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize("request_type", [ + compute.AggregatedListAcceleratorTypesRequest, + dict, +]) +def test_aggregated_list_rest(request_type): + client = AcceleratorTypesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.AcceleratorTypeAggregatedList( + id='id_value', + kind='kind_value', + next_page_token='next_page_token_value', + self_link='self_link_value', + unreachables=['unreachables_value'], + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.AcceleratorTypeAggregatedList.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.aggregated_list(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.AggregatedListPager) + assert response.id == 'id_value' + assert response.kind == 'kind_value' + assert response.next_page_token == 'next_page_token_value' + assert response.self_link == 'self_link_value' + assert response.unreachables == ['unreachables_value'] + + +def test_aggregated_list_rest_required_fields(request_type=compute.AggregatedListAcceleratorTypesRequest): + transport_class = transports.AcceleratorTypesRestTransport + + request_init = {} + request_init["project"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).aggregated_list._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).aggregated_list._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("filter", "include_all_scopes", "max_results", "order_by", "page_token", "return_partial_success", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + + client = AcceleratorTypesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.AcceleratorTypeAggregatedList() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "get", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.AcceleratorTypeAggregatedList.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.aggregated_list(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_aggregated_list_rest_unset_required_fields(): + transport = transports.AcceleratorTypesRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.aggregated_list._get_unset_required_fields({}) + assert set(unset_fields) == (set(("filter", "includeAllScopes", "maxResults", "orderBy", "pageToken", "returnPartialSuccess", )) & set(("project", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_aggregated_list_rest_interceptors(null_interceptor): + transport = transports.AcceleratorTypesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.AcceleratorTypesRestInterceptor(), + ) + client = AcceleratorTypesClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.AcceleratorTypesRestInterceptor, "post_aggregated_list") as post, \ + mock.patch.object(transports.AcceleratorTypesRestInterceptor, "pre_aggregated_list") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.AggregatedListAcceleratorTypesRequest.pb(compute.AggregatedListAcceleratorTypesRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.AcceleratorTypeAggregatedList.to_json(compute.AcceleratorTypeAggregatedList()) + + request = compute.AggregatedListAcceleratorTypesRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.AcceleratorTypeAggregatedList() + + client.aggregated_list(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_aggregated_list_rest_bad_request(transport: str = 'rest', request_type=compute.AggregatedListAcceleratorTypesRequest): + client = AcceleratorTypesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.aggregated_list(request) + + +def test_aggregated_list_rest_flattened(): + client = AcceleratorTypesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.AcceleratorTypeAggregatedList() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.AcceleratorTypeAggregatedList.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.aggregated_list(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/aggregated/acceleratorTypes" % client.transport._host, args[1]) + + +def test_aggregated_list_rest_flattened_error(transport: str = 'rest'): + client = AcceleratorTypesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.aggregated_list( + compute.AggregatedListAcceleratorTypesRequest(), + project='project_value', + ) + + +def test_aggregated_list_rest_pager(transport: str = 'rest'): + client = AcceleratorTypesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + #with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + compute.AcceleratorTypeAggregatedList( + items={ + 'a':compute.AcceleratorTypesScopedList(), + 'b':compute.AcceleratorTypesScopedList(), + 'c':compute.AcceleratorTypesScopedList(), + }, + next_page_token='abc', + ), + compute.AcceleratorTypeAggregatedList( + items={}, + next_page_token='def', + ), + compute.AcceleratorTypeAggregatedList( + items={ + 'g':compute.AcceleratorTypesScopedList(), + }, + next_page_token='ghi', + ), + compute.AcceleratorTypeAggregatedList( + items={ + 'h':compute.AcceleratorTypesScopedList(), + 'i':compute.AcceleratorTypesScopedList(), + }, + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple(compute.AcceleratorTypeAggregatedList.to_json(x) for x in response) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode('UTF-8') + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {'project': 'sample1'} + + pager = client.aggregated_list(request=sample_request) + + assert isinstance(pager.get('a'), compute.AcceleratorTypesScopedList) + assert pager.get('h') is None + + results = list(pager) + assert len(results) == 6 + assert all( + isinstance(i, tuple) + for i in results) + for result in results: + assert isinstance(result, tuple) + assert tuple(type(t) for t in result) == (str, compute.AcceleratorTypesScopedList) + + assert pager.get('a') is None + assert isinstance(pager.get('h'), compute.AcceleratorTypesScopedList) + + pages = list(client.aggregated_list(request=sample_request).pages) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize("request_type", [ + compute.GetAcceleratorTypeRequest, + dict, +]) +def test_get_rest(request_type): + client = AcceleratorTypesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'zone': 'sample2', 'accelerator_type': 'sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.AcceleratorType( + creation_timestamp='creation_timestamp_value', + description='description_value', + id=205, + kind='kind_value', + maximum_cards_per_instance=2756, + name='name_value', + self_link='self_link_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.AcceleratorType.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.get(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.AcceleratorType) + assert response.creation_timestamp == 'creation_timestamp_value' + assert response.description == 'description_value' + assert response.id == 205 + assert response.kind == 'kind_value' + assert response.maximum_cards_per_instance == 2756 + assert response.name == 'name_value' + assert response.self_link == 'self_link_value' + assert response.zone == 'zone_value' + + +def test_get_rest_required_fields(request_type=compute.GetAcceleratorTypeRequest): + transport_class = transports.AcceleratorTypesRestTransport + + request_init = {} + request_init["accelerator_type"] = "" + request_init["project"] = "" + request_init["zone"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["acceleratorType"] = 'accelerator_type_value' + jsonified_request["project"] = 'project_value' + jsonified_request["zone"] = 'zone_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "acceleratorType" in jsonified_request + assert jsonified_request["acceleratorType"] == 'accelerator_type_value' + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "zone" in jsonified_request + assert jsonified_request["zone"] == 'zone_value' + + client = AcceleratorTypesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.AcceleratorType() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "get", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.AcceleratorType.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.get(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_get_rest_unset_required_fields(): + transport = transports.AcceleratorTypesRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.get._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("acceleratorType", "project", "zone", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_rest_interceptors(null_interceptor): + transport = transports.AcceleratorTypesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.AcceleratorTypesRestInterceptor(), + ) + client = AcceleratorTypesClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.AcceleratorTypesRestInterceptor, "post_get") as post, \ + mock.patch.object(transports.AcceleratorTypesRestInterceptor, "pre_get") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.GetAcceleratorTypeRequest.pb(compute.GetAcceleratorTypeRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.AcceleratorType.to_json(compute.AcceleratorType()) + + request = compute.GetAcceleratorTypeRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.AcceleratorType() + + client.get(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_get_rest_bad_request(transport: str = 'rest', request_type=compute.GetAcceleratorTypeRequest): + client = AcceleratorTypesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'zone': 'sample2', 'accelerator_type': 'sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get(request) + + +def test_get_rest_flattened(): + client = AcceleratorTypesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.AcceleratorType() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'zone': 'sample2', 'accelerator_type': 'sample3'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + zone='zone_value', + accelerator_type='accelerator_type_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.AcceleratorType.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.get(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/zones/{zone}/acceleratorTypes/{accelerator_type}" % client.transport._host, args[1]) + + +def test_get_rest_flattened_error(transport: str = 'rest'): + client = AcceleratorTypesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get( + compute.GetAcceleratorTypeRequest(), + project='project_value', + zone='zone_value', + accelerator_type='accelerator_type_value', + ) + + +def test_get_rest_error(): + client = AcceleratorTypesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.ListAcceleratorTypesRequest, + dict, +]) +def test_list_rest(request_type): + client = AcceleratorTypesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'zone': 'sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.AcceleratorTypeList( + id='id_value', + kind='kind_value', + next_page_token='next_page_token_value', + self_link='self_link_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.AcceleratorTypeList.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.list(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListPager) + assert response.id == 'id_value' + assert response.kind == 'kind_value' + assert response.next_page_token == 'next_page_token_value' + assert response.self_link == 'self_link_value' + + +def test_list_rest_required_fields(request_type=compute.ListAcceleratorTypesRequest): + transport_class = transports.AcceleratorTypesRestTransport + + request_init = {} + request_init["project"] = "" + request_init["zone"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + jsonified_request["zone"] = 'zone_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("filter", "max_results", "order_by", "page_token", "return_partial_success", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "zone" in jsonified_request + assert jsonified_request["zone"] == 'zone_value' + + client = AcceleratorTypesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.AcceleratorTypeList() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "get", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.AcceleratorTypeList.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.list(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_list_rest_unset_required_fields(): + transport = transports.AcceleratorTypesRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.list._get_unset_required_fields({}) + assert set(unset_fields) == (set(("filter", "maxResults", "orderBy", "pageToken", "returnPartialSuccess", )) & set(("project", "zone", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_rest_interceptors(null_interceptor): + transport = transports.AcceleratorTypesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.AcceleratorTypesRestInterceptor(), + ) + client = AcceleratorTypesClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.AcceleratorTypesRestInterceptor, "post_list") as post, \ + mock.patch.object(transports.AcceleratorTypesRestInterceptor, "pre_list") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.ListAcceleratorTypesRequest.pb(compute.ListAcceleratorTypesRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.AcceleratorTypeList.to_json(compute.AcceleratorTypeList()) + + request = compute.ListAcceleratorTypesRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.AcceleratorTypeList() + + client.list(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_list_rest_bad_request(transport: str = 'rest', request_type=compute.ListAcceleratorTypesRequest): + client = AcceleratorTypesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'zone': 'sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list(request) + + +def test_list_rest_flattened(): + client = AcceleratorTypesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.AcceleratorTypeList() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'zone': 'sample2'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + zone='zone_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.AcceleratorTypeList.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.list(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/zones/{zone}/acceleratorTypes" % client.transport._host, args[1]) + + +def test_list_rest_flattened_error(transport: str = 'rest'): + client = AcceleratorTypesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list( + compute.ListAcceleratorTypesRequest(), + project='project_value', + zone='zone_value', + ) + + +def test_list_rest_pager(transport: str = 'rest'): + client = AcceleratorTypesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + #with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + compute.AcceleratorTypeList( + items=[ + compute.AcceleratorType(), + compute.AcceleratorType(), + compute.AcceleratorType(), + ], + next_page_token='abc', + ), + compute.AcceleratorTypeList( + items=[], + next_page_token='def', + ), + compute.AcceleratorTypeList( + items=[ + compute.AcceleratorType(), + ], + next_page_token='ghi', + ), + compute.AcceleratorTypeList( + items=[ + compute.AcceleratorType(), + compute.AcceleratorType(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple(compute.AcceleratorTypeList.to_json(x) for x in response) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode('UTF-8') + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {'project': 'sample1', 'zone': 'sample2'} + + pager = client.list(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, compute.AcceleratorType) + for i in results) + + pages = list(client.list(request=sample_request).pages) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + + +def test_credentials_transport_error(): + # It is an error to provide credentials and a transport instance. + transport = transports.AcceleratorTypesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = AcceleratorTypesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # It is an error to provide a credentials file and a transport instance. + transport = transports.AcceleratorTypesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = AcceleratorTypesClient( + client_options={"credentials_file": "credentials.json"}, + transport=transport, + ) + + # It is an error to provide an api_key and a transport instance. + transport = transports.AcceleratorTypesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = AcceleratorTypesClient( + client_options=options, + transport=transport, + ) + + # It is an error to provide an api_key and a credential. + options = mock.Mock() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = AcceleratorTypesClient( + client_options=options, + credentials=ga_credentials.AnonymousCredentials() + ) + + # It is an error to provide scopes and a transport instance. + transport = transports.AcceleratorTypesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = AcceleratorTypesClient( + client_options={"scopes": ["1", "2"]}, + transport=transport, + ) + + +def test_transport_instance(): + # A client may be instantiated with a custom transport instance. + transport = transports.AcceleratorTypesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + client = AcceleratorTypesClient(transport=transport) + assert client.transport is transport + + +@pytest.mark.parametrize("transport_class", [ + transports.AcceleratorTypesRestTransport, +]) +def test_transport_adc(transport_class): + # Test default credentials are used if not provided. + with mock.patch.object(google.auth, 'default') as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class() + adc.assert_called_once() + +@pytest.mark.parametrize("transport_name", [ + "rest", +]) +def test_transport_kind(transport_name): + transport = AcceleratorTypesClient.get_transport_class(transport_name)( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert transport.kind == transport_name + + +def test_accelerator_types_base_transport_error(): + # Passing both a credentials object and credentials_file should raise an error + with pytest.raises(core_exceptions.DuplicateCredentialArgs): + transport = transports.AcceleratorTypesTransport( + credentials=ga_credentials.AnonymousCredentials(), + credentials_file="credentials.json" + ) + + +def test_accelerator_types_base_transport(): + # Instantiate the base transport. + with mock.patch('google.cloud.compute_v1.services.accelerator_types.transports.AcceleratorTypesTransport.__init__') as Transport: + Transport.return_value = None + transport = transports.AcceleratorTypesTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Every method on the transport should just blindly + # raise NotImplementedError. + methods = ( + 'aggregated_list', + 'get', + 'list', + ) + for method in methods: + with pytest.raises(NotImplementedError): + getattr(transport, method)(request=object()) + + with pytest.raises(NotImplementedError): + transport.close() + + # Catch all for all remaining methods and properties + remainder = [ + 'kind', + ] + for r in remainder: + with pytest.raises(NotImplementedError): + getattr(transport, r)() + + +def test_accelerator_types_base_transport_with_credentials_file(): + # Instantiate the base transport with a credentials file + with mock.patch.object(google.auth, 'load_credentials_from_file', autospec=True) as load_creds, mock.patch('google.cloud.compute_v1.services.accelerator_types.transports.AcceleratorTypesTransport._prep_wrapped_messages') as Transport: + Transport.return_value = None + load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.AcceleratorTypesTransport( + credentials_file="credentials.json", + quota_project_id="octopus", + ) + load_creds.assert_called_once_with("credentials.json", + scopes=None, + default_scopes=( + 'https://www.googleapis.com/auth/compute.readonly', + 'https://www.googleapis.com/auth/compute', + 'https://www.googleapis.com/auth/cloud-platform', +), + quota_project_id="octopus", + ) + + +def test_accelerator_types_base_transport_with_adc(): + # Test the default credentials are used if credentials and credentials_file are None. + with mock.patch.object(google.auth, 'default', autospec=True) as adc, mock.patch('google.cloud.compute_v1.services.accelerator_types.transports.AcceleratorTypesTransport._prep_wrapped_messages') as Transport: + Transport.return_value = None + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.AcceleratorTypesTransport() + adc.assert_called_once() + + +def test_accelerator_types_auth_adc(): + # If no credentials are provided, we should use ADC credentials. + with mock.patch.object(google.auth, 'default', autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + AcceleratorTypesClient() + adc.assert_called_once_with( + scopes=None, + default_scopes=( + 'https://www.googleapis.com/auth/compute.readonly', + 'https://www.googleapis.com/auth/compute', + 'https://www.googleapis.com/auth/cloud-platform', +), + quota_project_id=None, + ) + + +def test_accelerator_types_http_transport_client_cert_source_for_mtls(): + cred = ga_credentials.AnonymousCredentials() + with mock.patch("google.auth.transport.requests.AuthorizedSession.configure_mtls_channel") as mock_configure_mtls_channel: + transports.AcceleratorTypesRestTransport ( + credentials=cred, + client_cert_source_for_mtls=client_cert_source_callback + ) + mock_configure_mtls_channel.assert_called_once_with(client_cert_source_callback) + + +@pytest.mark.parametrize("transport_name", [ + "rest", +]) +def test_accelerator_types_host_no_port(transport_name): + client = AcceleratorTypesClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions(api_endpoint='compute.googleapis.com'), + transport=transport_name, + ) + assert client.transport._host == ( + 'compute.googleapis.com:443' + if transport_name in ['grpc', 'grpc_asyncio'] + else 'https://compute.googleapis.com' + ) + +@pytest.mark.parametrize("transport_name", [ + "rest", +]) +def test_accelerator_types_host_with_port(transport_name): + client = AcceleratorTypesClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions(api_endpoint='compute.googleapis.com:8000'), + transport=transport_name, + ) + assert client.transport._host == ( + 'compute.googleapis.com:8000' + if transport_name in ['grpc', 'grpc_asyncio'] + else 'https://compute.googleapis.com:8000' + ) + +@pytest.mark.parametrize("transport_name", [ + "rest", +]) +def test_accelerator_types_client_transport_session_collision(transport_name): + creds1 = ga_credentials.AnonymousCredentials() + creds2 = ga_credentials.AnonymousCredentials() + client1 = AcceleratorTypesClient( + credentials=creds1, + transport=transport_name, + ) + client2 = AcceleratorTypesClient( + credentials=creds2, + transport=transport_name, + ) + session1 = client1.transport.aggregated_list._session + session2 = client2.transport.aggregated_list._session + assert session1 != session2 + session1 = client1.transport.get._session + session2 = client2.transport.get._session + assert session1 != session2 + session1 = client1.transport.list._session + session2 = client2.transport.list._session + assert session1 != session2 + +def test_common_billing_account_path(): + billing_account = "squid" + expected = "billingAccounts/{billing_account}".format(billing_account=billing_account, ) + actual = AcceleratorTypesClient.common_billing_account_path(billing_account) + assert expected == actual + + +def test_parse_common_billing_account_path(): + expected = { + "billing_account": "clam", + } + path = AcceleratorTypesClient.common_billing_account_path(**expected) + + # Check that the path construction is reversible. + actual = AcceleratorTypesClient.parse_common_billing_account_path(path) + assert expected == actual + +def test_common_folder_path(): + folder = "whelk" + expected = "folders/{folder}".format(folder=folder, ) + actual = AcceleratorTypesClient.common_folder_path(folder) + assert expected == actual + + +def test_parse_common_folder_path(): + expected = { + "folder": "octopus", + } + path = AcceleratorTypesClient.common_folder_path(**expected) + + # Check that the path construction is reversible. + actual = AcceleratorTypesClient.parse_common_folder_path(path) + assert expected == actual + +def test_common_organization_path(): + organization = "oyster" + expected = "organizations/{organization}".format(organization=organization, ) + actual = AcceleratorTypesClient.common_organization_path(organization) + assert expected == actual + + +def test_parse_common_organization_path(): + expected = { + "organization": "nudibranch", + } + path = AcceleratorTypesClient.common_organization_path(**expected) + + # Check that the path construction is reversible. + actual = AcceleratorTypesClient.parse_common_organization_path(path) + assert expected == actual + +def test_common_project_path(): + project = "cuttlefish" + expected = "projects/{project}".format(project=project, ) + actual = AcceleratorTypesClient.common_project_path(project) + assert expected == actual + + +def test_parse_common_project_path(): + expected = { + "project": "mussel", + } + path = AcceleratorTypesClient.common_project_path(**expected) + + # Check that the path construction is reversible. + actual = AcceleratorTypesClient.parse_common_project_path(path) + assert expected == actual + +def test_common_location_path(): + project = "winkle" + location = "nautilus" + expected = "projects/{project}/locations/{location}".format(project=project, location=location, ) + actual = AcceleratorTypesClient.common_location_path(project, location) + assert expected == actual + + +def test_parse_common_location_path(): + expected = { + "project": "scallop", + "location": "abalone", + } + path = AcceleratorTypesClient.common_location_path(**expected) + + # Check that the path construction is reversible. + actual = AcceleratorTypesClient.parse_common_location_path(path) + assert expected == actual + + +def test_client_with_default_client_info(): + client_info = gapic_v1.client_info.ClientInfo() + + with mock.patch.object(transports.AcceleratorTypesTransport, '_prep_wrapped_messages') as prep: + client = AcceleratorTypesClient( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + with mock.patch.object(transports.AcceleratorTypesTransport, '_prep_wrapped_messages') as prep: + transport_class = AcceleratorTypesClient.get_transport_class() + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + +def test_transport_close(): + transports = { + "rest": "_session", + } + + for transport, close_name in transports.items(): + client = AcceleratorTypesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport + ) + with mock.patch.object(type(getattr(client.transport, close_name)), "close") as close: + with client: + close.assert_not_called() + close.assert_called_once() + +def test_client_ctx(): + transports = [ + 'rest', + ] + for transport in transports: + client = AcceleratorTypesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport + ) + # Test client calls underlying transport. + with mock.patch.object(type(client.transport), "close") as close: + close.assert_not_called() + with client: + pass + close.assert_called() + +@pytest.mark.parametrize("client_class,transport_class", [ + (AcceleratorTypesClient, transports.AcceleratorTypesRestTransport), +]) +def test_api_key_credentials(client_class, transport_class): + with mock.patch.object( + google.auth._default, "get_api_key_credentials", create=True + ) as get_api_key_credentials: + mock_cred = mock.Mock() + get_api_key_credentials.return_value = mock_cred + options = client_options.ClientOptions() + options.api_key = "api_key" + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=mock_cred, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) diff --git a/owl-bot-staging/v1/tests/unit/gapic/compute_v1/test_addresses.py b/owl-bot-staging/v1/tests/unit/gapic/compute_v1/test_addresses.py new file mode 100644 index 000000000..dba36fc95 --- /dev/null +++ b/owl-bot-staging/v1/tests/unit/gapic/compute_v1/test_addresses.py @@ -0,0 +1,3978 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import os +# try/except added for compatibility with python < 3.8 +try: + from unittest import mock + from unittest.mock import AsyncMock # pragma: NO COVER +except ImportError: # pragma: NO COVER + import mock + +import grpc +from grpc.experimental import aio +from collections.abc import Iterable +from google.protobuf import json_format +import json +import math +import pytest +from proto.marshal.rules.dates import DurationRule, TimestampRule +from proto.marshal.rules import wrappers +from requests import Response +from requests import Request, PreparedRequest +from requests.sessions import Session +from google.protobuf import json_format + +from google.api_core import client_options +from google.api_core import exceptions as core_exceptions +from google.api_core import extended_operation # type: ignore +from google.api_core import future +from google.api_core import gapic_v1 +from google.api_core import grpc_helpers +from google.api_core import grpc_helpers_async +from google.api_core import path_template +from google.auth import credentials as ga_credentials +from google.auth.exceptions import MutualTLSChannelError +from google.cloud.compute_v1.services.addresses import AddressesClient +from google.cloud.compute_v1.services.addresses import pagers +from google.cloud.compute_v1.services.addresses import transports +from google.cloud.compute_v1.types import compute +from google.oauth2 import service_account +import google.auth + + +def client_cert_source_callback(): + return b"cert bytes", b"key bytes" + + +# If default endpoint is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint(client): + return "foo.googleapis.com" if ("localhost" in client.DEFAULT_ENDPOINT) else client.DEFAULT_ENDPOINT + + +def test__get_default_mtls_endpoint(): + api_endpoint = "example.googleapis.com" + api_mtls_endpoint = "example.mtls.googleapis.com" + sandbox_endpoint = "example.sandbox.googleapis.com" + sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com" + non_googleapi = "api.example.com" + + assert AddressesClient._get_default_mtls_endpoint(None) is None + assert AddressesClient._get_default_mtls_endpoint(api_endpoint) == api_mtls_endpoint + assert AddressesClient._get_default_mtls_endpoint(api_mtls_endpoint) == api_mtls_endpoint + assert AddressesClient._get_default_mtls_endpoint(sandbox_endpoint) == sandbox_mtls_endpoint + assert AddressesClient._get_default_mtls_endpoint(sandbox_mtls_endpoint) == sandbox_mtls_endpoint + assert AddressesClient._get_default_mtls_endpoint(non_googleapi) == non_googleapi + + +@pytest.mark.parametrize("client_class,transport_name", [ + (AddressesClient, "rest"), +]) +def test_addresses_client_from_service_account_info(client_class, transport_name): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object(service_account.Credentials, 'from_service_account_info') as factory: + factory.return_value = creds + info = {"valid": True} + client = client_class.from_service_account_info(info, transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + 'compute.googleapis.com:443' + if transport_name in ['grpc', 'grpc_asyncio'] + else + 'https://compute.googleapis.com' + ) + + +@pytest.mark.parametrize("transport_class,transport_name", [ + (transports.AddressesRestTransport, "rest"), +]) +def test_addresses_client_service_account_always_use_jwt(transport_class, transport_name): + with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=True) + use_jwt.assert_called_once_with(True) + + with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=False) + use_jwt.assert_not_called() + + +@pytest.mark.parametrize("client_class,transport_name", [ + (AddressesClient, "rest"), +]) +def test_addresses_client_from_service_account_file(client_class, transport_name): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object(service_account.Credentials, 'from_service_account_file') as factory: + factory.return_value = creds + client = client_class.from_service_account_file("dummy/file/path.json", transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + client = client_class.from_service_account_json("dummy/file/path.json", transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + 'compute.googleapis.com:443' + if transport_name in ['grpc', 'grpc_asyncio'] + else + 'https://compute.googleapis.com' + ) + + +def test_addresses_client_get_transport_class(): + transport = AddressesClient.get_transport_class() + available_transports = [ + transports.AddressesRestTransport, + ] + assert transport in available_transports + + transport = AddressesClient.get_transport_class("rest") + assert transport == transports.AddressesRestTransport + + +@pytest.mark.parametrize("client_class,transport_class,transport_name", [ + (AddressesClient, transports.AddressesRestTransport, "rest"), +]) +@mock.patch.object(AddressesClient, "DEFAULT_ENDPOINT", modify_default_endpoint(AddressesClient)) +def test_addresses_client_client_options(client_class, transport_class, transport_name): + # Check that if channel is provided we won't create a new one. + with mock.patch.object(AddressesClient, 'get_transport_class') as gtc: + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ) + client = client_class(transport=transport) + gtc.assert_not_called() + + # Check that if channel is provided via str we will create a new one. + with mock.patch.object(AddressesClient, 'get_transport_class') as gtc: + client = client_class(transport=transport_name) + gtc.assert_called() + + # Check the case api_endpoint is provided. + options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(transport=transport_name, client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_MTLS_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError): + client = client_class(transport=transport_name) + + # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): + with pytest.raises(ValueError): + client = client_class(transport=transport_name) + + # Check the case quota_project_id is provided + options = client_options.ClientOptions(quota_project_id="octopus") + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id="octopus", + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + # Check the case api_endpoint is provided + options = client_options.ClientOptions(api_audience="https://language.googleapis.com") + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience="https://language.googleapis.com" + ) + +@pytest.mark.parametrize("client_class,transport_class,transport_name,use_client_cert_env", [ + (AddressesClient, transports.AddressesRestTransport, "rest", "true"), + (AddressesClient, transports.AddressesRestTransport, "rest", "false"), +]) +@mock.patch.object(AddressesClient, "DEFAULT_ENDPOINT", modify_default_endpoint(AddressesClient)) +@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) +def test_addresses_client_mtls_env_auto(client_class, transport_class, transport_name, use_client_cert_env): + # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default + # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists. + + # Check the case client_cert_source is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): + options = client_options.ClientOptions(client_cert_source=client_cert_source_callback) + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + + if use_client_cert_env == "false": + expected_client_cert_source = None + expected_host = client.DEFAULT_ENDPOINT + else: + expected_client_cert_source = client_cert_source_callback + expected_host = client.DEFAULT_MTLS_ENDPOINT + + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case ADC client cert is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): + with mock.patch.object(transport_class, '__init__') as patched: + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): + with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=client_cert_source_callback): + if use_client_cert_env == "false": + expected_host = client.DEFAULT_ENDPOINT + expected_client_cert_source = None + else: + expected_host = client.DEFAULT_MTLS_ENDPOINT + expected_client_cert_source = client_cert_source_callback + + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case client_cert_source and ADC client cert are not provided. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): + with mock.patch.object(transport_class, '__init__') as patched: + with mock.patch("google.auth.transport.mtls.has_default_client_cert_source", return_value=False): + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize("client_class", [ + AddressesClient +]) +@mock.patch.object(AddressesClient, "DEFAULT_ENDPOINT", modify_default_endpoint(AddressesClient)) +def test_addresses_client_get_mtls_endpoint_and_cert_source(client_class): + mock_client_cert_source = mock.Mock() + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "true". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + mock_api_endpoint = "foo" + options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(options) + assert api_endpoint == mock_api_endpoint + assert cert_source == mock_client_cert_source + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "false". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + mock_client_cert_source = mock.Mock() + mock_api_endpoint = "foo" + options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(options) + assert api_endpoint == mock_api_endpoint + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert doesn't exist. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=False): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert exists. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): + with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=mock_client_cert_source): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source == mock_client_cert_source + + +@pytest.mark.parametrize("client_class,transport_class,transport_name", [ + (AddressesClient, transports.AddressesRestTransport, "rest"), +]) +def test_addresses_client_client_options_scopes(client_class, transport_class, transport_name): + # Check the case scopes are provided. + options = client_options.ClientOptions( + scopes=["1", "2"], + ) + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=["1", "2"], + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + +@pytest.mark.parametrize("client_class,transport_class,transport_name,grpc_helpers", [ + (AddressesClient, transports.AddressesRestTransport, "rest", None), +]) +def test_addresses_client_client_options_credentials_file(client_class, transport_class, transport_name, grpc_helpers): + # Check the case credentials file is provided. + options = client_options.ClientOptions( + credentials_file="credentials.json" + ) + + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize("request_type", [ + compute.AggregatedListAddressesRequest, + dict, +]) +def test_aggregated_list_rest(request_type): + client = AddressesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.AddressAggregatedList( + id='id_value', + kind='kind_value', + next_page_token='next_page_token_value', + self_link='self_link_value', + unreachables=['unreachables_value'], + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.AddressAggregatedList.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.aggregated_list(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.AggregatedListPager) + assert response.id == 'id_value' + assert response.kind == 'kind_value' + assert response.next_page_token == 'next_page_token_value' + assert response.self_link == 'self_link_value' + assert response.unreachables == ['unreachables_value'] + + +def test_aggregated_list_rest_required_fields(request_type=compute.AggregatedListAddressesRequest): + transport_class = transports.AddressesRestTransport + + request_init = {} + request_init["project"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).aggregated_list._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).aggregated_list._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("filter", "include_all_scopes", "max_results", "order_by", "page_token", "return_partial_success", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + + client = AddressesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.AddressAggregatedList() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "get", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.AddressAggregatedList.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.aggregated_list(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_aggregated_list_rest_unset_required_fields(): + transport = transports.AddressesRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.aggregated_list._get_unset_required_fields({}) + assert set(unset_fields) == (set(("filter", "includeAllScopes", "maxResults", "orderBy", "pageToken", "returnPartialSuccess", )) & set(("project", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_aggregated_list_rest_interceptors(null_interceptor): + transport = transports.AddressesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.AddressesRestInterceptor(), + ) + client = AddressesClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.AddressesRestInterceptor, "post_aggregated_list") as post, \ + mock.patch.object(transports.AddressesRestInterceptor, "pre_aggregated_list") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.AggregatedListAddressesRequest.pb(compute.AggregatedListAddressesRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.AddressAggregatedList.to_json(compute.AddressAggregatedList()) + + request = compute.AggregatedListAddressesRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.AddressAggregatedList() + + client.aggregated_list(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_aggregated_list_rest_bad_request(transport: str = 'rest', request_type=compute.AggregatedListAddressesRequest): + client = AddressesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.aggregated_list(request) + + +def test_aggregated_list_rest_flattened(): + client = AddressesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.AddressAggregatedList() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.AddressAggregatedList.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.aggregated_list(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/aggregated/addresses" % client.transport._host, args[1]) + + +def test_aggregated_list_rest_flattened_error(transport: str = 'rest'): + client = AddressesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.aggregated_list( + compute.AggregatedListAddressesRequest(), + project='project_value', + ) + + +def test_aggregated_list_rest_pager(transport: str = 'rest'): + client = AddressesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + #with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + compute.AddressAggregatedList( + items={ + 'a':compute.AddressesScopedList(), + 'b':compute.AddressesScopedList(), + 'c':compute.AddressesScopedList(), + }, + next_page_token='abc', + ), + compute.AddressAggregatedList( + items={}, + next_page_token='def', + ), + compute.AddressAggregatedList( + items={ + 'g':compute.AddressesScopedList(), + }, + next_page_token='ghi', + ), + compute.AddressAggregatedList( + items={ + 'h':compute.AddressesScopedList(), + 'i':compute.AddressesScopedList(), + }, + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple(compute.AddressAggregatedList.to_json(x) for x in response) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode('UTF-8') + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {'project': 'sample1'} + + pager = client.aggregated_list(request=sample_request) + + assert isinstance(pager.get('a'), compute.AddressesScopedList) + assert pager.get('h') is None + + results = list(pager) + assert len(results) == 6 + assert all( + isinstance(i, tuple) + for i in results) + for result in results: + assert isinstance(result, tuple) + assert tuple(type(t) for t in result) == (str, compute.AddressesScopedList) + + assert pager.get('a') is None + assert isinstance(pager.get('h'), compute.AddressesScopedList) + + pages = list(client.aggregated_list(request=sample_request).pages) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize("request_type", [ + compute.DeleteAddressRequest, + dict, +]) +def test_delete_rest(request_type): + client = AddressesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2', 'address': 'sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.delete(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, extended_operation.ExtendedOperation) + assert response.client_operation_id == 'client_operation_id_value' + assert response.creation_timestamp == 'creation_timestamp_value' + assert response.description == 'description_value' + assert response.end_time == 'end_time_value' + assert response.http_error_message == 'http_error_message_value' + assert response.http_error_status_code == 2374 + assert response.id == 205 + assert response.insert_time == 'insert_time_value' + assert response.kind == 'kind_value' + assert response.name == 'name_value' + assert response.operation_group_id == 'operation_group_id_value' + assert response.operation_type == 'operation_type_value' + assert response.progress == 885 + assert response.region == 'region_value' + assert response.self_link == 'self_link_value' + assert response.start_time == 'start_time_value' + assert response.status == compute.Operation.Status.DONE + assert response.status_message == 'status_message_value' + assert response.target_id == 947 + assert response.target_link == 'target_link_value' + assert response.user == 'user_value' + assert response.zone == 'zone_value' + + +def test_delete_rest_required_fields(request_type=compute.DeleteAddressRequest): + transport_class = transports.AddressesRestTransport + + request_init = {} + request_init["address"] = "" + request_init["project"] = "" + request_init["region"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["address"] = 'address_value' + jsonified_request["project"] = 'project_value' + jsonified_request["region"] = 'region_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "address" in jsonified_request + assert jsonified_request["address"] == 'address_value' + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "region" in jsonified_request + assert jsonified_request["region"] == 'region_value' + + client = AddressesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "delete", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.delete(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_delete_rest_unset_required_fields(): + transport = transports.AddressesRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.delete._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("address", "project", "region", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_rest_interceptors(null_interceptor): + transport = transports.AddressesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.AddressesRestInterceptor(), + ) + client = AddressesClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.AddressesRestInterceptor, "post_delete") as post, \ + mock.patch.object(transports.AddressesRestInterceptor, "pre_delete") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.DeleteAddressRequest.pb(compute.DeleteAddressRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.DeleteAddressRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.delete(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_delete_rest_bad_request(transport: str = 'rest', request_type=compute.DeleteAddressRequest): + client = AddressesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2', 'address': 'sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.delete(request) + + +def test_delete_rest_flattened(): + client = AddressesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'region': 'sample2', 'address': 'sample3'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + region='region_value', + address='address_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.delete(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/regions/{region}/addresses/{address}" % client.transport._host, args[1]) + + +def test_delete_rest_flattened_error(transport: str = 'rest'): + client = AddressesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete( + compute.DeleteAddressRequest(), + project='project_value', + region='region_value', + address='address_value', + ) + + +def test_delete_rest_error(): + client = AddressesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.DeleteAddressRequest, + dict, +]) +def test_delete_unary_rest(request_type): + client = AddressesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2', 'address': 'sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.delete_unary(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.Operation) + + +def test_delete_unary_rest_required_fields(request_type=compute.DeleteAddressRequest): + transport_class = transports.AddressesRestTransport + + request_init = {} + request_init["address"] = "" + request_init["project"] = "" + request_init["region"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["address"] = 'address_value' + jsonified_request["project"] = 'project_value' + jsonified_request["region"] = 'region_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "address" in jsonified_request + assert jsonified_request["address"] == 'address_value' + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "region" in jsonified_request + assert jsonified_request["region"] == 'region_value' + + client = AddressesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "delete", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.delete_unary(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_delete_unary_rest_unset_required_fields(): + transport = transports.AddressesRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.delete._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("address", "project", "region", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_unary_rest_interceptors(null_interceptor): + transport = transports.AddressesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.AddressesRestInterceptor(), + ) + client = AddressesClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.AddressesRestInterceptor, "post_delete") as post, \ + mock.patch.object(transports.AddressesRestInterceptor, "pre_delete") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.DeleteAddressRequest.pb(compute.DeleteAddressRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.DeleteAddressRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.delete_unary(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_delete_unary_rest_bad_request(transport: str = 'rest', request_type=compute.DeleteAddressRequest): + client = AddressesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2', 'address': 'sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.delete_unary(request) + + +def test_delete_unary_rest_flattened(): + client = AddressesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'region': 'sample2', 'address': 'sample3'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + region='region_value', + address='address_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.delete_unary(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/regions/{region}/addresses/{address}" % client.transport._host, args[1]) + + +def test_delete_unary_rest_flattened_error(transport: str = 'rest'): + client = AddressesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_unary( + compute.DeleteAddressRequest(), + project='project_value', + region='region_value', + address='address_value', + ) + + +def test_delete_unary_rest_error(): + client = AddressesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.GetAddressRequest, + dict, +]) +def test_get_rest(request_type): + client = AddressesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2', 'address': 'sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Address( + address='address_value', + address_type='address_type_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + id=205, + ip_version='ip_version_value', + ipv6_endpoint_type='ipv6_endpoint_type_value', + kind='kind_value', + label_fingerprint='label_fingerprint_value', + name='name_value', + network='network_value', + network_tier='network_tier_value', + prefix_length=1391, + purpose='purpose_value', + region='region_value', + self_link='self_link_value', + status='status_value', + subnetwork='subnetwork_value', + users=['users_value'], + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Address.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.get(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.Address) + assert response.address == 'address_value' + assert response.address_type == 'address_type_value' + assert response.creation_timestamp == 'creation_timestamp_value' + assert response.description == 'description_value' + assert response.id == 205 + assert response.ip_version == 'ip_version_value' + assert response.ipv6_endpoint_type == 'ipv6_endpoint_type_value' + assert response.kind == 'kind_value' + assert response.label_fingerprint == 'label_fingerprint_value' + assert response.name == 'name_value' + assert response.network == 'network_value' + assert response.network_tier == 'network_tier_value' + assert response.prefix_length == 1391 + assert response.purpose == 'purpose_value' + assert response.region == 'region_value' + assert response.self_link == 'self_link_value' + assert response.status == 'status_value' + assert response.subnetwork == 'subnetwork_value' + assert response.users == ['users_value'] + + +def test_get_rest_required_fields(request_type=compute.GetAddressRequest): + transport_class = transports.AddressesRestTransport + + request_init = {} + request_init["address"] = "" + request_init["project"] = "" + request_init["region"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["address"] = 'address_value' + jsonified_request["project"] = 'project_value' + jsonified_request["region"] = 'region_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "address" in jsonified_request + assert jsonified_request["address"] == 'address_value' + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "region" in jsonified_request + assert jsonified_request["region"] == 'region_value' + + client = AddressesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Address() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "get", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Address.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.get(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_get_rest_unset_required_fields(): + transport = transports.AddressesRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.get._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("address", "project", "region", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_rest_interceptors(null_interceptor): + transport = transports.AddressesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.AddressesRestInterceptor(), + ) + client = AddressesClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.AddressesRestInterceptor, "post_get") as post, \ + mock.patch.object(transports.AddressesRestInterceptor, "pre_get") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.GetAddressRequest.pb(compute.GetAddressRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Address.to_json(compute.Address()) + + request = compute.GetAddressRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Address() + + client.get(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_get_rest_bad_request(transport: str = 'rest', request_type=compute.GetAddressRequest): + client = AddressesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2', 'address': 'sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get(request) + + +def test_get_rest_flattened(): + client = AddressesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Address() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'region': 'sample2', 'address': 'sample3'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + region='region_value', + address='address_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Address.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.get(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/regions/{region}/addresses/{address}" % client.transport._host, args[1]) + + +def test_get_rest_flattened_error(transport: str = 'rest'): + client = AddressesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get( + compute.GetAddressRequest(), + project='project_value', + region='region_value', + address='address_value', + ) + + +def test_get_rest_error(): + client = AddressesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.InsertAddressRequest, + dict, +]) +def test_insert_rest(request_type): + client = AddressesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2'} + request_init["address_resource"] = {'address': 'address_value', 'address_type': 'address_type_value', 'creation_timestamp': 'creation_timestamp_value', 'description': 'description_value', 'id': 205, 'ip_version': 'ip_version_value', 'ipv6_endpoint_type': 'ipv6_endpoint_type_value', 'kind': 'kind_value', 'label_fingerprint': 'label_fingerprint_value', 'labels': {}, 'name': 'name_value', 'network': 'network_value', 'network_tier': 'network_tier_value', 'prefix_length': 1391, 'purpose': 'purpose_value', 'region': 'region_value', 'self_link': 'self_link_value', 'status': 'status_value', 'subnetwork': 'subnetwork_value', 'users': ['users_value1', 'users_value2']} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.insert(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, extended_operation.ExtendedOperation) + assert response.client_operation_id == 'client_operation_id_value' + assert response.creation_timestamp == 'creation_timestamp_value' + assert response.description == 'description_value' + assert response.end_time == 'end_time_value' + assert response.http_error_message == 'http_error_message_value' + assert response.http_error_status_code == 2374 + assert response.id == 205 + assert response.insert_time == 'insert_time_value' + assert response.kind == 'kind_value' + assert response.name == 'name_value' + assert response.operation_group_id == 'operation_group_id_value' + assert response.operation_type == 'operation_type_value' + assert response.progress == 885 + assert response.region == 'region_value' + assert response.self_link == 'self_link_value' + assert response.start_time == 'start_time_value' + assert response.status == compute.Operation.Status.DONE + assert response.status_message == 'status_message_value' + assert response.target_id == 947 + assert response.target_link == 'target_link_value' + assert response.user == 'user_value' + assert response.zone == 'zone_value' + + +def test_insert_rest_required_fields(request_type=compute.InsertAddressRequest): + transport_class = transports.AddressesRestTransport + + request_init = {} + request_init["project"] = "" + request_init["region"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).insert._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + jsonified_request["region"] = 'region_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).insert._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "region" in jsonified_request + assert jsonified_request["region"] == 'region_value' + + client = AddressesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.insert(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_insert_rest_unset_required_fields(): + transport = transports.AddressesRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.insert._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("addressResource", "project", "region", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_insert_rest_interceptors(null_interceptor): + transport = transports.AddressesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.AddressesRestInterceptor(), + ) + client = AddressesClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.AddressesRestInterceptor, "post_insert") as post, \ + mock.patch.object(transports.AddressesRestInterceptor, "pre_insert") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.InsertAddressRequest.pb(compute.InsertAddressRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.InsertAddressRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.insert(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_insert_rest_bad_request(transport: str = 'rest', request_type=compute.InsertAddressRequest): + client = AddressesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2'} + request_init["address_resource"] = {'address': 'address_value', 'address_type': 'address_type_value', 'creation_timestamp': 'creation_timestamp_value', 'description': 'description_value', 'id': 205, 'ip_version': 'ip_version_value', 'ipv6_endpoint_type': 'ipv6_endpoint_type_value', 'kind': 'kind_value', 'label_fingerprint': 'label_fingerprint_value', 'labels': {}, 'name': 'name_value', 'network': 'network_value', 'network_tier': 'network_tier_value', 'prefix_length': 1391, 'purpose': 'purpose_value', 'region': 'region_value', 'self_link': 'self_link_value', 'status': 'status_value', 'subnetwork': 'subnetwork_value', 'users': ['users_value1', 'users_value2']} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.insert(request) + + +def test_insert_rest_flattened(): + client = AddressesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'region': 'sample2'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + region='region_value', + address_resource=compute.Address(address='address_value'), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.insert(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/regions/{region}/addresses" % client.transport._host, args[1]) + + +def test_insert_rest_flattened_error(transport: str = 'rest'): + client = AddressesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.insert( + compute.InsertAddressRequest(), + project='project_value', + region='region_value', + address_resource=compute.Address(address='address_value'), + ) + + +def test_insert_rest_error(): + client = AddressesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.InsertAddressRequest, + dict, +]) +def test_insert_unary_rest(request_type): + client = AddressesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2'} + request_init["address_resource"] = {'address': 'address_value', 'address_type': 'address_type_value', 'creation_timestamp': 'creation_timestamp_value', 'description': 'description_value', 'id': 205, 'ip_version': 'ip_version_value', 'ipv6_endpoint_type': 'ipv6_endpoint_type_value', 'kind': 'kind_value', 'label_fingerprint': 'label_fingerprint_value', 'labels': {}, 'name': 'name_value', 'network': 'network_value', 'network_tier': 'network_tier_value', 'prefix_length': 1391, 'purpose': 'purpose_value', 'region': 'region_value', 'self_link': 'self_link_value', 'status': 'status_value', 'subnetwork': 'subnetwork_value', 'users': ['users_value1', 'users_value2']} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.insert_unary(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.Operation) + + +def test_insert_unary_rest_required_fields(request_type=compute.InsertAddressRequest): + transport_class = transports.AddressesRestTransport + + request_init = {} + request_init["project"] = "" + request_init["region"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).insert._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + jsonified_request["region"] = 'region_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).insert._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "region" in jsonified_request + assert jsonified_request["region"] == 'region_value' + + client = AddressesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.insert_unary(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_insert_unary_rest_unset_required_fields(): + transport = transports.AddressesRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.insert._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("addressResource", "project", "region", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_insert_unary_rest_interceptors(null_interceptor): + transport = transports.AddressesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.AddressesRestInterceptor(), + ) + client = AddressesClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.AddressesRestInterceptor, "post_insert") as post, \ + mock.patch.object(transports.AddressesRestInterceptor, "pre_insert") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.InsertAddressRequest.pb(compute.InsertAddressRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.InsertAddressRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.insert_unary(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_insert_unary_rest_bad_request(transport: str = 'rest', request_type=compute.InsertAddressRequest): + client = AddressesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2'} + request_init["address_resource"] = {'address': 'address_value', 'address_type': 'address_type_value', 'creation_timestamp': 'creation_timestamp_value', 'description': 'description_value', 'id': 205, 'ip_version': 'ip_version_value', 'ipv6_endpoint_type': 'ipv6_endpoint_type_value', 'kind': 'kind_value', 'label_fingerprint': 'label_fingerprint_value', 'labels': {}, 'name': 'name_value', 'network': 'network_value', 'network_tier': 'network_tier_value', 'prefix_length': 1391, 'purpose': 'purpose_value', 'region': 'region_value', 'self_link': 'self_link_value', 'status': 'status_value', 'subnetwork': 'subnetwork_value', 'users': ['users_value1', 'users_value2']} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.insert_unary(request) + + +def test_insert_unary_rest_flattened(): + client = AddressesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'region': 'sample2'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + region='region_value', + address_resource=compute.Address(address='address_value'), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.insert_unary(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/regions/{region}/addresses" % client.transport._host, args[1]) + + +def test_insert_unary_rest_flattened_error(transport: str = 'rest'): + client = AddressesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.insert_unary( + compute.InsertAddressRequest(), + project='project_value', + region='region_value', + address_resource=compute.Address(address='address_value'), + ) + + +def test_insert_unary_rest_error(): + client = AddressesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.ListAddressesRequest, + dict, +]) +def test_list_rest(request_type): + client = AddressesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.AddressList( + id='id_value', + kind='kind_value', + next_page_token='next_page_token_value', + self_link='self_link_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.AddressList.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.list(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListPager) + assert response.id == 'id_value' + assert response.kind == 'kind_value' + assert response.next_page_token == 'next_page_token_value' + assert response.self_link == 'self_link_value' + + +def test_list_rest_required_fields(request_type=compute.ListAddressesRequest): + transport_class = transports.AddressesRestTransport + + request_init = {} + request_init["project"] = "" + request_init["region"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + jsonified_request["region"] = 'region_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("filter", "max_results", "order_by", "page_token", "return_partial_success", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "region" in jsonified_request + assert jsonified_request["region"] == 'region_value' + + client = AddressesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.AddressList() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "get", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.AddressList.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.list(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_list_rest_unset_required_fields(): + transport = transports.AddressesRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.list._get_unset_required_fields({}) + assert set(unset_fields) == (set(("filter", "maxResults", "orderBy", "pageToken", "returnPartialSuccess", )) & set(("project", "region", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_rest_interceptors(null_interceptor): + transport = transports.AddressesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.AddressesRestInterceptor(), + ) + client = AddressesClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.AddressesRestInterceptor, "post_list") as post, \ + mock.patch.object(transports.AddressesRestInterceptor, "pre_list") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.ListAddressesRequest.pb(compute.ListAddressesRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.AddressList.to_json(compute.AddressList()) + + request = compute.ListAddressesRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.AddressList() + + client.list(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_list_rest_bad_request(transport: str = 'rest', request_type=compute.ListAddressesRequest): + client = AddressesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list(request) + + +def test_list_rest_flattened(): + client = AddressesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.AddressList() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'region': 'sample2'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + region='region_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.AddressList.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.list(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/regions/{region}/addresses" % client.transport._host, args[1]) + + +def test_list_rest_flattened_error(transport: str = 'rest'): + client = AddressesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list( + compute.ListAddressesRequest(), + project='project_value', + region='region_value', + ) + + +def test_list_rest_pager(transport: str = 'rest'): + client = AddressesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + #with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + compute.AddressList( + items=[ + compute.Address(), + compute.Address(), + compute.Address(), + ], + next_page_token='abc', + ), + compute.AddressList( + items=[], + next_page_token='def', + ), + compute.AddressList( + items=[ + compute.Address(), + ], + next_page_token='ghi', + ), + compute.AddressList( + items=[ + compute.Address(), + compute.Address(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple(compute.AddressList.to_json(x) for x in response) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode('UTF-8') + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {'project': 'sample1', 'region': 'sample2'} + + pager = client.list(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, compute.Address) + for i in results) + + pages = list(client.list(request=sample_request).pages) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize("request_type", [ + compute.MoveAddressRequest, + dict, +]) +def test_move_rest(request_type): + client = AddressesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2', 'address': 'sample3'} + request_init["region_addresses_move_request_resource"] = {'description': 'description_value', 'destination_address': 'destination_address_value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.move(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, extended_operation.ExtendedOperation) + assert response.client_operation_id == 'client_operation_id_value' + assert response.creation_timestamp == 'creation_timestamp_value' + assert response.description == 'description_value' + assert response.end_time == 'end_time_value' + assert response.http_error_message == 'http_error_message_value' + assert response.http_error_status_code == 2374 + assert response.id == 205 + assert response.insert_time == 'insert_time_value' + assert response.kind == 'kind_value' + assert response.name == 'name_value' + assert response.operation_group_id == 'operation_group_id_value' + assert response.operation_type == 'operation_type_value' + assert response.progress == 885 + assert response.region == 'region_value' + assert response.self_link == 'self_link_value' + assert response.start_time == 'start_time_value' + assert response.status == compute.Operation.Status.DONE + assert response.status_message == 'status_message_value' + assert response.target_id == 947 + assert response.target_link == 'target_link_value' + assert response.user == 'user_value' + assert response.zone == 'zone_value' + + +def test_move_rest_required_fields(request_type=compute.MoveAddressRequest): + transport_class = transports.AddressesRestTransport + + request_init = {} + request_init["address"] = "" + request_init["project"] = "" + request_init["region"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).move._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["address"] = 'address_value' + jsonified_request["project"] = 'project_value' + jsonified_request["region"] = 'region_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).move._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "address" in jsonified_request + assert jsonified_request["address"] == 'address_value' + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "region" in jsonified_request + assert jsonified_request["region"] == 'region_value' + + client = AddressesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.move(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_move_rest_unset_required_fields(): + transport = transports.AddressesRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.move._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("address", "project", "region", "regionAddressesMoveRequestResource", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_move_rest_interceptors(null_interceptor): + transport = transports.AddressesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.AddressesRestInterceptor(), + ) + client = AddressesClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.AddressesRestInterceptor, "post_move") as post, \ + mock.patch.object(transports.AddressesRestInterceptor, "pre_move") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.MoveAddressRequest.pb(compute.MoveAddressRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.MoveAddressRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.move(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_move_rest_bad_request(transport: str = 'rest', request_type=compute.MoveAddressRequest): + client = AddressesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2', 'address': 'sample3'} + request_init["region_addresses_move_request_resource"] = {'description': 'description_value', 'destination_address': 'destination_address_value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.move(request) + + +def test_move_rest_flattened(): + client = AddressesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'region': 'sample2', 'address': 'sample3'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + region='region_value', + address='address_value', + region_addresses_move_request_resource=compute.RegionAddressesMoveRequest(description='description_value'), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.move(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/regions/{region}/addresses/{address}/move" % client.transport._host, args[1]) + + +def test_move_rest_flattened_error(transport: str = 'rest'): + client = AddressesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.move( + compute.MoveAddressRequest(), + project='project_value', + region='region_value', + address='address_value', + region_addresses_move_request_resource=compute.RegionAddressesMoveRequest(description='description_value'), + ) + + +def test_move_rest_error(): + client = AddressesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.MoveAddressRequest, + dict, +]) +def test_move_unary_rest(request_type): + client = AddressesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2', 'address': 'sample3'} + request_init["region_addresses_move_request_resource"] = {'description': 'description_value', 'destination_address': 'destination_address_value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.move_unary(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.Operation) + + +def test_move_unary_rest_required_fields(request_type=compute.MoveAddressRequest): + transport_class = transports.AddressesRestTransport + + request_init = {} + request_init["address"] = "" + request_init["project"] = "" + request_init["region"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).move._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["address"] = 'address_value' + jsonified_request["project"] = 'project_value' + jsonified_request["region"] = 'region_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).move._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "address" in jsonified_request + assert jsonified_request["address"] == 'address_value' + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "region" in jsonified_request + assert jsonified_request["region"] == 'region_value' + + client = AddressesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.move_unary(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_move_unary_rest_unset_required_fields(): + transport = transports.AddressesRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.move._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("address", "project", "region", "regionAddressesMoveRequestResource", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_move_unary_rest_interceptors(null_interceptor): + transport = transports.AddressesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.AddressesRestInterceptor(), + ) + client = AddressesClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.AddressesRestInterceptor, "post_move") as post, \ + mock.patch.object(transports.AddressesRestInterceptor, "pre_move") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.MoveAddressRequest.pb(compute.MoveAddressRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.MoveAddressRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.move_unary(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_move_unary_rest_bad_request(transport: str = 'rest', request_type=compute.MoveAddressRequest): + client = AddressesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2', 'address': 'sample3'} + request_init["region_addresses_move_request_resource"] = {'description': 'description_value', 'destination_address': 'destination_address_value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.move_unary(request) + + +def test_move_unary_rest_flattened(): + client = AddressesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'region': 'sample2', 'address': 'sample3'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + region='region_value', + address='address_value', + region_addresses_move_request_resource=compute.RegionAddressesMoveRequest(description='description_value'), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.move_unary(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/regions/{region}/addresses/{address}/move" % client.transport._host, args[1]) + + +def test_move_unary_rest_flattened_error(transport: str = 'rest'): + client = AddressesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.move_unary( + compute.MoveAddressRequest(), + project='project_value', + region='region_value', + address='address_value', + region_addresses_move_request_resource=compute.RegionAddressesMoveRequest(description='description_value'), + ) + + +def test_move_unary_rest_error(): + client = AddressesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.SetLabelsAddressRequest, + dict, +]) +def test_set_labels_rest(request_type): + client = AddressesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2', 'resource': 'sample3'} + request_init["region_set_labels_request_resource"] = {'label_fingerprint': 'label_fingerprint_value', 'labels': {}} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.set_labels(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, extended_operation.ExtendedOperation) + assert response.client_operation_id == 'client_operation_id_value' + assert response.creation_timestamp == 'creation_timestamp_value' + assert response.description == 'description_value' + assert response.end_time == 'end_time_value' + assert response.http_error_message == 'http_error_message_value' + assert response.http_error_status_code == 2374 + assert response.id == 205 + assert response.insert_time == 'insert_time_value' + assert response.kind == 'kind_value' + assert response.name == 'name_value' + assert response.operation_group_id == 'operation_group_id_value' + assert response.operation_type == 'operation_type_value' + assert response.progress == 885 + assert response.region == 'region_value' + assert response.self_link == 'self_link_value' + assert response.start_time == 'start_time_value' + assert response.status == compute.Operation.Status.DONE + assert response.status_message == 'status_message_value' + assert response.target_id == 947 + assert response.target_link == 'target_link_value' + assert response.user == 'user_value' + assert response.zone == 'zone_value' + + +def test_set_labels_rest_required_fields(request_type=compute.SetLabelsAddressRequest): + transport_class = transports.AddressesRestTransport + + request_init = {} + request_init["project"] = "" + request_init["region"] = "" + request_init["resource"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).set_labels._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + jsonified_request["region"] = 'region_value' + jsonified_request["resource"] = 'resource_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).set_labels._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "region" in jsonified_request + assert jsonified_request["region"] == 'region_value' + assert "resource" in jsonified_request + assert jsonified_request["resource"] == 'resource_value' + + client = AddressesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.set_labels(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_set_labels_rest_unset_required_fields(): + transport = transports.AddressesRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.set_labels._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("project", "region", "regionSetLabelsRequestResource", "resource", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_set_labels_rest_interceptors(null_interceptor): + transport = transports.AddressesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.AddressesRestInterceptor(), + ) + client = AddressesClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.AddressesRestInterceptor, "post_set_labels") as post, \ + mock.patch.object(transports.AddressesRestInterceptor, "pre_set_labels") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.SetLabelsAddressRequest.pb(compute.SetLabelsAddressRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.SetLabelsAddressRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.set_labels(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_set_labels_rest_bad_request(transport: str = 'rest', request_type=compute.SetLabelsAddressRequest): + client = AddressesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2', 'resource': 'sample3'} + request_init["region_set_labels_request_resource"] = {'label_fingerprint': 'label_fingerprint_value', 'labels': {}} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.set_labels(request) + + +def test_set_labels_rest_flattened(): + client = AddressesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'region': 'sample2', 'resource': 'sample3'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + region='region_value', + resource='resource_value', + region_set_labels_request_resource=compute.RegionSetLabelsRequest(label_fingerprint='label_fingerprint_value'), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.set_labels(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/regions/{region}/addresses/{resource}/setLabels" % client.transport._host, args[1]) + + +def test_set_labels_rest_flattened_error(transport: str = 'rest'): + client = AddressesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.set_labels( + compute.SetLabelsAddressRequest(), + project='project_value', + region='region_value', + resource='resource_value', + region_set_labels_request_resource=compute.RegionSetLabelsRequest(label_fingerprint='label_fingerprint_value'), + ) + + +def test_set_labels_rest_error(): + client = AddressesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.SetLabelsAddressRequest, + dict, +]) +def test_set_labels_unary_rest(request_type): + client = AddressesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2', 'resource': 'sample3'} + request_init["region_set_labels_request_resource"] = {'label_fingerprint': 'label_fingerprint_value', 'labels': {}} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.set_labels_unary(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.Operation) + + +def test_set_labels_unary_rest_required_fields(request_type=compute.SetLabelsAddressRequest): + transport_class = transports.AddressesRestTransport + + request_init = {} + request_init["project"] = "" + request_init["region"] = "" + request_init["resource"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).set_labels._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + jsonified_request["region"] = 'region_value' + jsonified_request["resource"] = 'resource_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).set_labels._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "region" in jsonified_request + assert jsonified_request["region"] == 'region_value' + assert "resource" in jsonified_request + assert jsonified_request["resource"] == 'resource_value' + + client = AddressesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.set_labels_unary(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_set_labels_unary_rest_unset_required_fields(): + transport = transports.AddressesRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.set_labels._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("project", "region", "regionSetLabelsRequestResource", "resource", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_set_labels_unary_rest_interceptors(null_interceptor): + transport = transports.AddressesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.AddressesRestInterceptor(), + ) + client = AddressesClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.AddressesRestInterceptor, "post_set_labels") as post, \ + mock.patch.object(transports.AddressesRestInterceptor, "pre_set_labels") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.SetLabelsAddressRequest.pb(compute.SetLabelsAddressRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.SetLabelsAddressRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.set_labels_unary(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_set_labels_unary_rest_bad_request(transport: str = 'rest', request_type=compute.SetLabelsAddressRequest): + client = AddressesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2', 'resource': 'sample3'} + request_init["region_set_labels_request_resource"] = {'label_fingerprint': 'label_fingerprint_value', 'labels': {}} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.set_labels_unary(request) + + +def test_set_labels_unary_rest_flattened(): + client = AddressesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'region': 'sample2', 'resource': 'sample3'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + region='region_value', + resource='resource_value', + region_set_labels_request_resource=compute.RegionSetLabelsRequest(label_fingerprint='label_fingerprint_value'), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.set_labels_unary(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/regions/{region}/addresses/{resource}/setLabels" % client.transport._host, args[1]) + + +def test_set_labels_unary_rest_flattened_error(transport: str = 'rest'): + client = AddressesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.set_labels_unary( + compute.SetLabelsAddressRequest(), + project='project_value', + region='region_value', + resource='resource_value', + region_set_labels_request_resource=compute.RegionSetLabelsRequest(label_fingerprint='label_fingerprint_value'), + ) + + +def test_set_labels_unary_rest_error(): + client = AddressesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +def test_credentials_transport_error(): + # It is an error to provide credentials and a transport instance. + transport = transports.AddressesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = AddressesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # It is an error to provide a credentials file and a transport instance. + transport = transports.AddressesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = AddressesClient( + client_options={"credentials_file": "credentials.json"}, + transport=transport, + ) + + # It is an error to provide an api_key and a transport instance. + transport = transports.AddressesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = AddressesClient( + client_options=options, + transport=transport, + ) + + # It is an error to provide an api_key and a credential. + options = mock.Mock() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = AddressesClient( + client_options=options, + credentials=ga_credentials.AnonymousCredentials() + ) + + # It is an error to provide scopes and a transport instance. + transport = transports.AddressesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = AddressesClient( + client_options={"scopes": ["1", "2"]}, + transport=transport, + ) + + +def test_transport_instance(): + # A client may be instantiated with a custom transport instance. + transport = transports.AddressesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + client = AddressesClient(transport=transport) + assert client.transport is transport + + +@pytest.mark.parametrize("transport_class", [ + transports.AddressesRestTransport, +]) +def test_transport_adc(transport_class): + # Test default credentials are used if not provided. + with mock.patch.object(google.auth, 'default') as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class() + adc.assert_called_once() + +@pytest.mark.parametrize("transport_name", [ + "rest", +]) +def test_transport_kind(transport_name): + transport = AddressesClient.get_transport_class(transport_name)( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert transport.kind == transport_name + + +def test_addresses_base_transport_error(): + # Passing both a credentials object and credentials_file should raise an error + with pytest.raises(core_exceptions.DuplicateCredentialArgs): + transport = transports.AddressesTransport( + credentials=ga_credentials.AnonymousCredentials(), + credentials_file="credentials.json" + ) + + +def test_addresses_base_transport(): + # Instantiate the base transport. + with mock.patch('google.cloud.compute_v1.services.addresses.transports.AddressesTransport.__init__') as Transport: + Transport.return_value = None + transport = transports.AddressesTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Every method on the transport should just blindly + # raise NotImplementedError. + methods = ( + 'aggregated_list', + 'delete', + 'get', + 'insert', + 'list', + 'move', + 'set_labels', + ) + for method in methods: + with pytest.raises(NotImplementedError): + getattr(transport, method)(request=object()) + + with pytest.raises(NotImplementedError): + transport.close() + + # Catch all for all remaining methods and properties + remainder = [ + 'kind', + ] + for r in remainder: + with pytest.raises(NotImplementedError): + getattr(transport, r)() + + +def test_addresses_base_transport_with_credentials_file(): + # Instantiate the base transport with a credentials file + with mock.patch.object(google.auth, 'load_credentials_from_file', autospec=True) as load_creds, mock.patch('google.cloud.compute_v1.services.addresses.transports.AddressesTransport._prep_wrapped_messages') as Transport: + Transport.return_value = None + load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.AddressesTransport( + credentials_file="credentials.json", + quota_project_id="octopus", + ) + load_creds.assert_called_once_with("credentials.json", + scopes=None, + default_scopes=( + 'https://www.googleapis.com/auth/compute', + 'https://www.googleapis.com/auth/cloud-platform', +), + quota_project_id="octopus", + ) + + +def test_addresses_base_transport_with_adc(): + # Test the default credentials are used if credentials and credentials_file are None. + with mock.patch.object(google.auth, 'default', autospec=True) as adc, mock.patch('google.cloud.compute_v1.services.addresses.transports.AddressesTransport._prep_wrapped_messages') as Transport: + Transport.return_value = None + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.AddressesTransport() + adc.assert_called_once() + + +def test_addresses_auth_adc(): + # If no credentials are provided, we should use ADC credentials. + with mock.patch.object(google.auth, 'default', autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + AddressesClient() + adc.assert_called_once_with( + scopes=None, + default_scopes=( + 'https://www.googleapis.com/auth/compute', + 'https://www.googleapis.com/auth/cloud-platform', +), + quota_project_id=None, + ) + + +def test_addresses_http_transport_client_cert_source_for_mtls(): + cred = ga_credentials.AnonymousCredentials() + with mock.patch("google.auth.transport.requests.AuthorizedSession.configure_mtls_channel") as mock_configure_mtls_channel: + transports.AddressesRestTransport ( + credentials=cred, + client_cert_source_for_mtls=client_cert_source_callback + ) + mock_configure_mtls_channel.assert_called_once_with(client_cert_source_callback) + + +@pytest.mark.parametrize("transport_name", [ + "rest", +]) +def test_addresses_host_no_port(transport_name): + client = AddressesClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions(api_endpoint='compute.googleapis.com'), + transport=transport_name, + ) + assert client.transport._host == ( + 'compute.googleapis.com:443' + if transport_name in ['grpc', 'grpc_asyncio'] + else 'https://compute.googleapis.com' + ) + +@pytest.mark.parametrize("transport_name", [ + "rest", +]) +def test_addresses_host_with_port(transport_name): + client = AddressesClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions(api_endpoint='compute.googleapis.com:8000'), + transport=transport_name, + ) + assert client.transport._host == ( + 'compute.googleapis.com:8000' + if transport_name in ['grpc', 'grpc_asyncio'] + else 'https://compute.googleapis.com:8000' + ) + +@pytest.mark.parametrize("transport_name", [ + "rest", +]) +def test_addresses_client_transport_session_collision(transport_name): + creds1 = ga_credentials.AnonymousCredentials() + creds2 = ga_credentials.AnonymousCredentials() + client1 = AddressesClient( + credentials=creds1, + transport=transport_name, + ) + client2 = AddressesClient( + credentials=creds2, + transport=transport_name, + ) + session1 = client1.transport.aggregated_list._session + session2 = client2.transport.aggregated_list._session + assert session1 != session2 + session1 = client1.transport.delete._session + session2 = client2.transport.delete._session + assert session1 != session2 + session1 = client1.transport.get._session + session2 = client2.transport.get._session + assert session1 != session2 + session1 = client1.transport.insert._session + session2 = client2.transport.insert._session + assert session1 != session2 + session1 = client1.transport.list._session + session2 = client2.transport.list._session + assert session1 != session2 + session1 = client1.transport.move._session + session2 = client2.transport.move._session + assert session1 != session2 + session1 = client1.transport.set_labels._session + session2 = client2.transport.set_labels._session + assert session1 != session2 + +def test_common_billing_account_path(): + billing_account = "squid" + expected = "billingAccounts/{billing_account}".format(billing_account=billing_account, ) + actual = AddressesClient.common_billing_account_path(billing_account) + assert expected == actual + + +def test_parse_common_billing_account_path(): + expected = { + "billing_account": "clam", + } + path = AddressesClient.common_billing_account_path(**expected) + + # Check that the path construction is reversible. + actual = AddressesClient.parse_common_billing_account_path(path) + assert expected == actual + +def test_common_folder_path(): + folder = "whelk" + expected = "folders/{folder}".format(folder=folder, ) + actual = AddressesClient.common_folder_path(folder) + assert expected == actual + + +def test_parse_common_folder_path(): + expected = { + "folder": "octopus", + } + path = AddressesClient.common_folder_path(**expected) + + # Check that the path construction is reversible. + actual = AddressesClient.parse_common_folder_path(path) + assert expected == actual + +def test_common_organization_path(): + organization = "oyster" + expected = "organizations/{organization}".format(organization=organization, ) + actual = AddressesClient.common_organization_path(organization) + assert expected == actual + + +def test_parse_common_organization_path(): + expected = { + "organization": "nudibranch", + } + path = AddressesClient.common_organization_path(**expected) + + # Check that the path construction is reversible. + actual = AddressesClient.parse_common_organization_path(path) + assert expected == actual + +def test_common_project_path(): + project = "cuttlefish" + expected = "projects/{project}".format(project=project, ) + actual = AddressesClient.common_project_path(project) + assert expected == actual + + +def test_parse_common_project_path(): + expected = { + "project": "mussel", + } + path = AddressesClient.common_project_path(**expected) + + # Check that the path construction is reversible. + actual = AddressesClient.parse_common_project_path(path) + assert expected == actual + +def test_common_location_path(): + project = "winkle" + location = "nautilus" + expected = "projects/{project}/locations/{location}".format(project=project, location=location, ) + actual = AddressesClient.common_location_path(project, location) + assert expected == actual + + +def test_parse_common_location_path(): + expected = { + "project": "scallop", + "location": "abalone", + } + path = AddressesClient.common_location_path(**expected) + + # Check that the path construction is reversible. + actual = AddressesClient.parse_common_location_path(path) + assert expected == actual + + +def test_client_with_default_client_info(): + client_info = gapic_v1.client_info.ClientInfo() + + with mock.patch.object(transports.AddressesTransport, '_prep_wrapped_messages') as prep: + client = AddressesClient( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + with mock.patch.object(transports.AddressesTransport, '_prep_wrapped_messages') as prep: + transport_class = AddressesClient.get_transport_class() + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + +def test_transport_close(): + transports = { + "rest": "_session", + } + + for transport, close_name in transports.items(): + client = AddressesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport + ) + with mock.patch.object(type(getattr(client.transport, close_name)), "close") as close: + with client: + close.assert_not_called() + close.assert_called_once() + +def test_client_ctx(): + transports = [ + 'rest', + ] + for transport in transports: + client = AddressesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport + ) + # Test client calls underlying transport. + with mock.patch.object(type(client.transport), "close") as close: + close.assert_not_called() + with client: + pass + close.assert_called() + +@pytest.mark.parametrize("client_class,transport_class", [ + (AddressesClient, transports.AddressesRestTransport), +]) +def test_api_key_credentials(client_class, transport_class): + with mock.patch.object( + google.auth._default, "get_api_key_credentials", create=True + ) as get_api_key_credentials: + mock_cred = mock.Mock() + get_api_key_credentials.return_value = mock_cred + options = client_options.ClientOptions() + options.api_key = "api_key" + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=mock_cred, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) diff --git a/owl-bot-staging/v1/tests/unit/gapic/compute_v1/test_autoscalers.py b/owl-bot-staging/v1/tests/unit/gapic/compute_v1/test_autoscalers.py new file mode 100644 index 000000000..39517df22 --- /dev/null +++ b/owl-bot-staging/v1/tests/unit/gapic/compute_v1/test_autoscalers.py @@ -0,0 +1,3938 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import os +# try/except added for compatibility with python < 3.8 +try: + from unittest import mock + from unittest.mock import AsyncMock # pragma: NO COVER +except ImportError: # pragma: NO COVER + import mock + +import grpc +from grpc.experimental import aio +from collections.abc import Iterable +from google.protobuf import json_format +import json +import math +import pytest +from proto.marshal.rules.dates import DurationRule, TimestampRule +from proto.marshal.rules import wrappers +from requests import Response +from requests import Request, PreparedRequest +from requests.sessions import Session +from google.protobuf import json_format + +from google.api_core import client_options +from google.api_core import exceptions as core_exceptions +from google.api_core import extended_operation # type: ignore +from google.api_core import future +from google.api_core import gapic_v1 +from google.api_core import grpc_helpers +from google.api_core import grpc_helpers_async +from google.api_core import path_template +from google.auth import credentials as ga_credentials +from google.auth.exceptions import MutualTLSChannelError +from google.cloud.compute_v1.services.autoscalers import AutoscalersClient +from google.cloud.compute_v1.services.autoscalers import pagers +from google.cloud.compute_v1.services.autoscalers import transports +from google.cloud.compute_v1.types import compute +from google.oauth2 import service_account +import google.auth + + +def client_cert_source_callback(): + return b"cert bytes", b"key bytes" + + +# If default endpoint is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint(client): + return "foo.googleapis.com" if ("localhost" in client.DEFAULT_ENDPOINT) else client.DEFAULT_ENDPOINT + + +def test__get_default_mtls_endpoint(): + api_endpoint = "example.googleapis.com" + api_mtls_endpoint = "example.mtls.googleapis.com" + sandbox_endpoint = "example.sandbox.googleapis.com" + sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com" + non_googleapi = "api.example.com" + + assert AutoscalersClient._get_default_mtls_endpoint(None) is None + assert AutoscalersClient._get_default_mtls_endpoint(api_endpoint) == api_mtls_endpoint + assert AutoscalersClient._get_default_mtls_endpoint(api_mtls_endpoint) == api_mtls_endpoint + assert AutoscalersClient._get_default_mtls_endpoint(sandbox_endpoint) == sandbox_mtls_endpoint + assert AutoscalersClient._get_default_mtls_endpoint(sandbox_mtls_endpoint) == sandbox_mtls_endpoint + assert AutoscalersClient._get_default_mtls_endpoint(non_googleapi) == non_googleapi + + +@pytest.mark.parametrize("client_class,transport_name", [ + (AutoscalersClient, "rest"), +]) +def test_autoscalers_client_from_service_account_info(client_class, transport_name): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object(service_account.Credentials, 'from_service_account_info') as factory: + factory.return_value = creds + info = {"valid": True} + client = client_class.from_service_account_info(info, transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + 'compute.googleapis.com:443' + if transport_name in ['grpc', 'grpc_asyncio'] + else + 'https://compute.googleapis.com' + ) + + +@pytest.mark.parametrize("transport_class,transport_name", [ + (transports.AutoscalersRestTransport, "rest"), +]) +def test_autoscalers_client_service_account_always_use_jwt(transport_class, transport_name): + with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=True) + use_jwt.assert_called_once_with(True) + + with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=False) + use_jwt.assert_not_called() + + +@pytest.mark.parametrize("client_class,transport_name", [ + (AutoscalersClient, "rest"), +]) +def test_autoscalers_client_from_service_account_file(client_class, transport_name): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object(service_account.Credentials, 'from_service_account_file') as factory: + factory.return_value = creds + client = client_class.from_service_account_file("dummy/file/path.json", transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + client = client_class.from_service_account_json("dummy/file/path.json", transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + 'compute.googleapis.com:443' + if transport_name in ['grpc', 'grpc_asyncio'] + else + 'https://compute.googleapis.com' + ) + + +def test_autoscalers_client_get_transport_class(): + transport = AutoscalersClient.get_transport_class() + available_transports = [ + transports.AutoscalersRestTransport, + ] + assert transport in available_transports + + transport = AutoscalersClient.get_transport_class("rest") + assert transport == transports.AutoscalersRestTransport + + +@pytest.mark.parametrize("client_class,transport_class,transport_name", [ + (AutoscalersClient, transports.AutoscalersRestTransport, "rest"), +]) +@mock.patch.object(AutoscalersClient, "DEFAULT_ENDPOINT", modify_default_endpoint(AutoscalersClient)) +def test_autoscalers_client_client_options(client_class, transport_class, transport_name): + # Check that if channel is provided we won't create a new one. + with mock.patch.object(AutoscalersClient, 'get_transport_class') as gtc: + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ) + client = client_class(transport=transport) + gtc.assert_not_called() + + # Check that if channel is provided via str we will create a new one. + with mock.patch.object(AutoscalersClient, 'get_transport_class') as gtc: + client = client_class(transport=transport_name) + gtc.assert_called() + + # Check the case api_endpoint is provided. + options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(transport=transport_name, client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_MTLS_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError): + client = client_class(transport=transport_name) + + # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): + with pytest.raises(ValueError): + client = client_class(transport=transport_name) + + # Check the case quota_project_id is provided + options = client_options.ClientOptions(quota_project_id="octopus") + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id="octopus", + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + # Check the case api_endpoint is provided + options = client_options.ClientOptions(api_audience="https://language.googleapis.com") + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience="https://language.googleapis.com" + ) + +@pytest.mark.parametrize("client_class,transport_class,transport_name,use_client_cert_env", [ + (AutoscalersClient, transports.AutoscalersRestTransport, "rest", "true"), + (AutoscalersClient, transports.AutoscalersRestTransport, "rest", "false"), +]) +@mock.patch.object(AutoscalersClient, "DEFAULT_ENDPOINT", modify_default_endpoint(AutoscalersClient)) +@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) +def test_autoscalers_client_mtls_env_auto(client_class, transport_class, transport_name, use_client_cert_env): + # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default + # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists. + + # Check the case client_cert_source is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): + options = client_options.ClientOptions(client_cert_source=client_cert_source_callback) + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + + if use_client_cert_env == "false": + expected_client_cert_source = None + expected_host = client.DEFAULT_ENDPOINT + else: + expected_client_cert_source = client_cert_source_callback + expected_host = client.DEFAULT_MTLS_ENDPOINT + + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case ADC client cert is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): + with mock.patch.object(transport_class, '__init__') as patched: + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): + with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=client_cert_source_callback): + if use_client_cert_env == "false": + expected_host = client.DEFAULT_ENDPOINT + expected_client_cert_source = None + else: + expected_host = client.DEFAULT_MTLS_ENDPOINT + expected_client_cert_source = client_cert_source_callback + + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case client_cert_source and ADC client cert are not provided. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): + with mock.patch.object(transport_class, '__init__') as patched: + with mock.patch("google.auth.transport.mtls.has_default_client_cert_source", return_value=False): + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize("client_class", [ + AutoscalersClient +]) +@mock.patch.object(AutoscalersClient, "DEFAULT_ENDPOINT", modify_default_endpoint(AutoscalersClient)) +def test_autoscalers_client_get_mtls_endpoint_and_cert_source(client_class): + mock_client_cert_source = mock.Mock() + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "true". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + mock_api_endpoint = "foo" + options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(options) + assert api_endpoint == mock_api_endpoint + assert cert_source == mock_client_cert_source + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "false". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + mock_client_cert_source = mock.Mock() + mock_api_endpoint = "foo" + options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(options) + assert api_endpoint == mock_api_endpoint + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert doesn't exist. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=False): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert exists. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): + with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=mock_client_cert_source): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source == mock_client_cert_source + + +@pytest.mark.parametrize("client_class,transport_class,transport_name", [ + (AutoscalersClient, transports.AutoscalersRestTransport, "rest"), +]) +def test_autoscalers_client_client_options_scopes(client_class, transport_class, transport_name): + # Check the case scopes are provided. + options = client_options.ClientOptions( + scopes=["1", "2"], + ) + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=["1", "2"], + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + +@pytest.mark.parametrize("client_class,transport_class,transport_name,grpc_helpers", [ + (AutoscalersClient, transports.AutoscalersRestTransport, "rest", None), +]) +def test_autoscalers_client_client_options_credentials_file(client_class, transport_class, transport_name, grpc_helpers): + # Check the case credentials file is provided. + options = client_options.ClientOptions( + credentials_file="credentials.json" + ) + + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize("request_type", [ + compute.AggregatedListAutoscalersRequest, + dict, +]) +def test_aggregated_list_rest(request_type): + client = AutoscalersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.AutoscalerAggregatedList( + id='id_value', + kind='kind_value', + next_page_token='next_page_token_value', + self_link='self_link_value', + unreachables=['unreachables_value'], + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.AutoscalerAggregatedList.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.aggregated_list(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.AggregatedListPager) + assert response.id == 'id_value' + assert response.kind == 'kind_value' + assert response.next_page_token == 'next_page_token_value' + assert response.self_link == 'self_link_value' + assert response.unreachables == ['unreachables_value'] + + +def test_aggregated_list_rest_required_fields(request_type=compute.AggregatedListAutoscalersRequest): + transport_class = transports.AutoscalersRestTransport + + request_init = {} + request_init["project"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).aggregated_list._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).aggregated_list._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("filter", "include_all_scopes", "max_results", "order_by", "page_token", "return_partial_success", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + + client = AutoscalersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.AutoscalerAggregatedList() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "get", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.AutoscalerAggregatedList.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.aggregated_list(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_aggregated_list_rest_unset_required_fields(): + transport = transports.AutoscalersRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.aggregated_list._get_unset_required_fields({}) + assert set(unset_fields) == (set(("filter", "includeAllScopes", "maxResults", "orderBy", "pageToken", "returnPartialSuccess", )) & set(("project", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_aggregated_list_rest_interceptors(null_interceptor): + transport = transports.AutoscalersRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.AutoscalersRestInterceptor(), + ) + client = AutoscalersClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.AutoscalersRestInterceptor, "post_aggregated_list") as post, \ + mock.patch.object(transports.AutoscalersRestInterceptor, "pre_aggregated_list") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.AggregatedListAutoscalersRequest.pb(compute.AggregatedListAutoscalersRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.AutoscalerAggregatedList.to_json(compute.AutoscalerAggregatedList()) + + request = compute.AggregatedListAutoscalersRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.AutoscalerAggregatedList() + + client.aggregated_list(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_aggregated_list_rest_bad_request(transport: str = 'rest', request_type=compute.AggregatedListAutoscalersRequest): + client = AutoscalersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.aggregated_list(request) + + +def test_aggregated_list_rest_flattened(): + client = AutoscalersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.AutoscalerAggregatedList() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.AutoscalerAggregatedList.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.aggregated_list(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/aggregated/autoscalers" % client.transport._host, args[1]) + + +def test_aggregated_list_rest_flattened_error(transport: str = 'rest'): + client = AutoscalersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.aggregated_list( + compute.AggregatedListAutoscalersRequest(), + project='project_value', + ) + + +def test_aggregated_list_rest_pager(transport: str = 'rest'): + client = AutoscalersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + #with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + compute.AutoscalerAggregatedList( + items={ + 'a':compute.AutoscalersScopedList(), + 'b':compute.AutoscalersScopedList(), + 'c':compute.AutoscalersScopedList(), + }, + next_page_token='abc', + ), + compute.AutoscalerAggregatedList( + items={}, + next_page_token='def', + ), + compute.AutoscalerAggregatedList( + items={ + 'g':compute.AutoscalersScopedList(), + }, + next_page_token='ghi', + ), + compute.AutoscalerAggregatedList( + items={ + 'h':compute.AutoscalersScopedList(), + 'i':compute.AutoscalersScopedList(), + }, + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple(compute.AutoscalerAggregatedList.to_json(x) for x in response) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode('UTF-8') + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {'project': 'sample1'} + + pager = client.aggregated_list(request=sample_request) + + assert isinstance(pager.get('a'), compute.AutoscalersScopedList) + assert pager.get('h') is None + + results = list(pager) + assert len(results) == 6 + assert all( + isinstance(i, tuple) + for i in results) + for result in results: + assert isinstance(result, tuple) + assert tuple(type(t) for t in result) == (str, compute.AutoscalersScopedList) + + assert pager.get('a') is None + assert isinstance(pager.get('h'), compute.AutoscalersScopedList) + + pages = list(client.aggregated_list(request=sample_request).pages) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize("request_type", [ + compute.DeleteAutoscalerRequest, + dict, +]) +def test_delete_rest(request_type): + client = AutoscalersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'zone': 'sample2', 'autoscaler': 'sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.delete(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, extended_operation.ExtendedOperation) + assert response.client_operation_id == 'client_operation_id_value' + assert response.creation_timestamp == 'creation_timestamp_value' + assert response.description == 'description_value' + assert response.end_time == 'end_time_value' + assert response.http_error_message == 'http_error_message_value' + assert response.http_error_status_code == 2374 + assert response.id == 205 + assert response.insert_time == 'insert_time_value' + assert response.kind == 'kind_value' + assert response.name == 'name_value' + assert response.operation_group_id == 'operation_group_id_value' + assert response.operation_type == 'operation_type_value' + assert response.progress == 885 + assert response.region == 'region_value' + assert response.self_link == 'self_link_value' + assert response.start_time == 'start_time_value' + assert response.status == compute.Operation.Status.DONE + assert response.status_message == 'status_message_value' + assert response.target_id == 947 + assert response.target_link == 'target_link_value' + assert response.user == 'user_value' + assert response.zone == 'zone_value' + + +def test_delete_rest_required_fields(request_type=compute.DeleteAutoscalerRequest): + transport_class = transports.AutoscalersRestTransport + + request_init = {} + request_init["autoscaler"] = "" + request_init["project"] = "" + request_init["zone"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["autoscaler"] = 'autoscaler_value' + jsonified_request["project"] = 'project_value' + jsonified_request["zone"] = 'zone_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "autoscaler" in jsonified_request + assert jsonified_request["autoscaler"] == 'autoscaler_value' + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "zone" in jsonified_request + assert jsonified_request["zone"] == 'zone_value' + + client = AutoscalersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "delete", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.delete(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_delete_rest_unset_required_fields(): + transport = transports.AutoscalersRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.delete._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("autoscaler", "project", "zone", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_rest_interceptors(null_interceptor): + transport = transports.AutoscalersRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.AutoscalersRestInterceptor(), + ) + client = AutoscalersClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.AutoscalersRestInterceptor, "post_delete") as post, \ + mock.patch.object(transports.AutoscalersRestInterceptor, "pre_delete") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.DeleteAutoscalerRequest.pb(compute.DeleteAutoscalerRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.DeleteAutoscalerRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.delete(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_delete_rest_bad_request(transport: str = 'rest', request_type=compute.DeleteAutoscalerRequest): + client = AutoscalersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'zone': 'sample2', 'autoscaler': 'sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.delete(request) + + +def test_delete_rest_flattened(): + client = AutoscalersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'zone': 'sample2', 'autoscaler': 'sample3'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + zone='zone_value', + autoscaler='autoscaler_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.delete(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/zones/{zone}/autoscalers/{autoscaler}" % client.transport._host, args[1]) + + +def test_delete_rest_flattened_error(transport: str = 'rest'): + client = AutoscalersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete( + compute.DeleteAutoscalerRequest(), + project='project_value', + zone='zone_value', + autoscaler='autoscaler_value', + ) + + +def test_delete_rest_error(): + client = AutoscalersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.DeleteAutoscalerRequest, + dict, +]) +def test_delete_unary_rest(request_type): + client = AutoscalersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'zone': 'sample2', 'autoscaler': 'sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.delete_unary(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.Operation) + + +def test_delete_unary_rest_required_fields(request_type=compute.DeleteAutoscalerRequest): + transport_class = transports.AutoscalersRestTransport + + request_init = {} + request_init["autoscaler"] = "" + request_init["project"] = "" + request_init["zone"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["autoscaler"] = 'autoscaler_value' + jsonified_request["project"] = 'project_value' + jsonified_request["zone"] = 'zone_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "autoscaler" in jsonified_request + assert jsonified_request["autoscaler"] == 'autoscaler_value' + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "zone" in jsonified_request + assert jsonified_request["zone"] == 'zone_value' + + client = AutoscalersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "delete", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.delete_unary(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_delete_unary_rest_unset_required_fields(): + transport = transports.AutoscalersRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.delete._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("autoscaler", "project", "zone", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_unary_rest_interceptors(null_interceptor): + transport = transports.AutoscalersRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.AutoscalersRestInterceptor(), + ) + client = AutoscalersClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.AutoscalersRestInterceptor, "post_delete") as post, \ + mock.patch.object(transports.AutoscalersRestInterceptor, "pre_delete") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.DeleteAutoscalerRequest.pb(compute.DeleteAutoscalerRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.DeleteAutoscalerRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.delete_unary(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_delete_unary_rest_bad_request(transport: str = 'rest', request_type=compute.DeleteAutoscalerRequest): + client = AutoscalersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'zone': 'sample2', 'autoscaler': 'sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.delete_unary(request) + + +def test_delete_unary_rest_flattened(): + client = AutoscalersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'zone': 'sample2', 'autoscaler': 'sample3'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + zone='zone_value', + autoscaler='autoscaler_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.delete_unary(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/zones/{zone}/autoscalers/{autoscaler}" % client.transport._host, args[1]) + + +def test_delete_unary_rest_flattened_error(transport: str = 'rest'): + client = AutoscalersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_unary( + compute.DeleteAutoscalerRequest(), + project='project_value', + zone='zone_value', + autoscaler='autoscaler_value', + ) + + +def test_delete_unary_rest_error(): + client = AutoscalersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.GetAutoscalerRequest, + dict, +]) +def test_get_rest(request_type): + client = AutoscalersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'zone': 'sample2', 'autoscaler': 'sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Autoscaler( + creation_timestamp='creation_timestamp_value', + description='description_value', + id=205, + kind='kind_value', + name='name_value', + recommended_size=1693, + region='region_value', + self_link='self_link_value', + status='status_value', + target='target_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Autoscaler.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.get(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.Autoscaler) + assert response.creation_timestamp == 'creation_timestamp_value' + assert response.description == 'description_value' + assert response.id == 205 + assert response.kind == 'kind_value' + assert response.name == 'name_value' + assert response.recommended_size == 1693 + assert response.region == 'region_value' + assert response.self_link == 'self_link_value' + assert response.status == 'status_value' + assert response.target == 'target_value' + assert response.zone == 'zone_value' + + +def test_get_rest_required_fields(request_type=compute.GetAutoscalerRequest): + transport_class = transports.AutoscalersRestTransport + + request_init = {} + request_init["autoscaler"] = "" + request_init["project"] = "" + request_init["zone"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["autoscaler"] = 'autoscaler_value' + jsonified_request["project"] = 'project_value' + jsonified_request["zone"] = 'zone_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "autoscaler" in jsonified_request + assert jsonified_request["autoscaler"] == 'autoscaler_value' + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "zone" in jsonified_request + assert jsonified_request["zone"] == 'zone_value' + + client = AutoscalersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Autoscaler() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "get", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Autoscaler.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.get(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_get_rest_unset_required_fields(): + transport = transports.AutoscalersRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.get._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("autoscaler", "project", "zone", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_rest_interceptors(null_interceptor): + transport = transports.AutoscalersRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.AutoscalersRestInterceptor(), + ) + client = AutoscalersClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.AutoscalersRestInterceptor, "post_get") as post, \ + mock.patch.object(transports.AutoscalersRestInterceptor, "pre_get") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.GetAutoscalerRequest.pb(compute.GetAutoscalerRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Autoscaler.to_json(compute.Autoscaler()) + + request = compute.GetAutoscalerRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Autoscaler() + + client.get(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_get_rest_bad_request(transport: str = 'rest', request_type=compute.GetAutoscalerRequest): + client = AutoscalersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'zone': 'sample2', 'autoscaler': 'sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get(request) + + +def test_get_rest_flattened(): + client = AutoscalersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Autoscaler() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'zone': 'sample2', 'autoscaler': 'sample3'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + zone='zone_value', + autoscaler='autoscaler_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Autoscaler.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.get(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/zones/{zone}/autoscalers/{autoscaler}" % client.transport._host, args[1]) + + +def test_get_rest_flattened_error(transport: str = 'rest'): + client = AutoscalersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get( + compute.GetAutoscalerRequest(), + project='project_value', + zone='zone_value', + autoscaler='autoscaler_value', + ) + + +def test_get_rest_error(): + client = AutoscalersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.InsertAutoscalerRequest, + dict, +]) +def test_insert_rest(request_type): + client = AutoscalersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'zone': 'sample2'} + request_init["autoscaler_resource"] = {'autoscaling_policy': {'cool_down_period_sec': 2112, 'cpu_utilization': {'predictive_method': 'predictive_method_value', 'utilization_target': 0.19540000000000002}, 'custom_metric_utilizations': [{'filter': 'filter_value', 'metric': 'metric_value', 'single_instance_assignment': 0.2766, 'utilization_target': 0.19540000000000002, 'utilization_target_type': 'utilization_target_type_value'}], 'load_balancing_utilization': {'utilization_target': 0.19540000000000002}, 'max_num_replicas': 1703, 'min_num_replicas': 1701, 'mode': 'mode_value', 'scale_in_control': {'max_scaled_in_replicas': {'calculated': 1042, 'fixed': 528, 'percent': 753}, 'time_window_sec': 1600}, 'scaling_schedules': {}}, 'creation_timestamp': 'creation_timestamp_value', 'description': 'description_value', 'id': 205, 'kind': 'kind_value', 'name': 'name_value', 'recommended_size': 1693, 'region': 'region_value', 'scaling_schedule_status': {}, 'self_link': 'self_link_value', 'status': 'status_value', 'status_details': [{'message': 'message_value', 'type_': 'type__value'}], 'target': 'target_value', 'zone': 'zone_value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.insert(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, extended_operation.ExtendedOperation) + assert response.client_operation_id == 'client_operation_id_value' + assert response.creation_timestamp == 'creation_timestamp_value' + assert response.description == 'description_value' + assert response.end_time == 'end_time_value' + assert response.http_error_message == 'http_error_message_value' + assert response.http_error_status_code == 2374 + assert response.id == 205 + assert response.insert_time == 'insert_time_value' + assert response.kind == 'kind_value' + assert response.name == 'name_value' + assert response.operation_group_id == 'operation_group_id_value' + assert response.operation_type == 'operation_type_value' + assert response.progress == 885 + assert response.region == 'region_value' + assert response.self_link == 'self_link_value' + assert response.start_time == 'start_time_value' + assert response.status == compute.Operation.Status.DONE + assert response.status_message == 'status_message_value' + assert response.target_id == 947 + assert response.target_link == 'target_link_value' + assert response.user == 'user_value' + assert response.zone == 'zone_value' + + +def test_insert_rest_required_fields(request_type=compute.InsertAutoscalerRequest): + transport_class = transports.AutoscalersRestTransport + + request_init = {} + request_init["project"] = "" + request_init["zone"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).insert._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + jsonified_request["zone"] = 'zone_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).insert._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "zone" in jsonified_request + assert jsonified_request["zone"] == 'zone_value' + + client = AutoscalersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.insert(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_insert_rest_unset_required_fields(): + transport = transports.AutoscalersRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.insert._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("autoscalerResource", "project", "zone", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_insert_rest_interceptors(null_interceptor): + transport = transports.AutoscalersRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.AutoscalersRestInterceptor(), + ) + client = AutoscalersClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.AutoscalersRestInterceptor, "post_insert") as post, \ + mock.patch.object(transports.AutoscalersRestInterceptor, "pre_insert") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.InsertAutoscalerRequest.pb(compute.InsertAutoscalerRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.InsertAutoscalerRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.insert(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_insert_rest_bad_request(transport: str = 'rest', request_type=compute.InsertAutoscalerRequest): + client = AutoscalersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'zone': 'sample2'} + request_init["autoscaler_resource"] = {'autoscaling_policy': {'cool_down_period_sec': 2112, 'cpu_utilization': {'predictive_method': 'predictive_method_value', 'utilization_target': 0.19540000000000002}, 'custom_metric_utilizations': [{'filter': 'filter_value', 'metric': 'metric_value', 'single_instance_assignment': 0.2766, 'utilization_target': 0.19540000000000002, 'utilization_target_type': 'utilization_target_type_value'}], 'load_balancing_utilization': {'utilization_target': 0.19540000000000002}, 'max_num_replicas': 1703, 'min_num_replicas': 1701, 'mode': 'mode_value', 'scale_in_control': {'max_scaled_in_replicas': {'calculated': 1042, 'fixed': 528, 'percent': 753}, 'time_window_sec': 1600}, 'scaling_schedules': {}}, 'creation_timestamp': 'creation_timestamp_value', 'description': 'description_value', 'id': 205, 'kind': 'kind_value', 'name': 'name_value', 'recommended_size': 1693, 'region': 'region_value', 'scaling_schedule_status': {}, 'self_link': 'self_link_value', 'status': 'status_value', 'status_details': [{'message': 'message_value', 'type_': 'type__value'}], 'target': 'target_value', 'zone': 'zone_value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.insert(request) + + +def test_insert_rest_flattened(): + client = AutoscalersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'zone': 'sample2'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + zone='zone_value', + autoscaler_resource=compute.Autoscaler(autoscaling_policy=compute.AutoscalingPolicy(cool_down_period_sec=2112)), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.insert(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/zones/{zone}/autoscalers" % client.transport._host, args[1]) + + +def test_insert_rest_flattened_error(transport: str = 'rest'): + client = AutoscalersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.insert( + compute.InsertAutoscalerRequest(), + project='project_value', + zone='zone_value', + autoscaler_resource=compute.Autoscaler(autoscaling_policy=compute.AutoscalingPolicy(cool_down_period_sec=2112)), + ) + + +def test_insert_rest_error(): + client = AutoscalersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.InsertAutoscalerRequest, + dict, +]) +def test_insert_unary_rest(request_type): + client = AutoscalersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'zone': 'sample2'} + request_init["autoscaler_resource"] = {'autoscaling_policy': {'cool_down_period_sec': 2112, 'cpu_utilization': {'predictive_method': 'predictive_method_value', 'utilization_target': 0.19540000000000002}, 'custom_metric_utilizations': [{'filter': 'filter_value', 'metric': 'metric_value', 'single_instance_assignment': 0.2766, 'utilization_target': 0.19540000000000002, 'utilization_target_type': 'utilization_target_type_value'}], 'load_balancing_utilization': {'utilization_target': 0.19540000000000002}, 'max_num_replicas': 1703, 'min_num_replicas': 1701, 'mode': 'mode_value', 'scale_in_control': {'max_scaled_in_replicas': {'calculated': 1042, 'fixed': 528, 'percent': 753}, 'time_window_sec': 1600}, 'scaling_schedules': {}}, 'creation_timestamp': 'creation_timestamp_value', 'description': 'description_value', 'id': 205, 'kind': 'kind_value', 'name': 'name_value', 'recommended_size': 1693, 'region': 'region_value', 'scaling_schedule_status': {}, 'self_link': 'self_link_value', 'status': 'status_value', 'status_details': [{'message': 'message_value', 'type_': 'type__value'}], 'target': 'target_value', 'zone': 'zone_value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.insert_unary(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.Operation) + + +def test_insert_unary_rest_required_fields(request_type=compute.InsertAutoscalerRequest): + transport_class = transports.AutoscalersRestTransport + + request_init = {} + request_init["project"] = "" + request_init["zone"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).insert._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + jsonified_request["zone"] = 'zone_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).insert._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "zone" in jsonified_request + assert jsonified_request["zone"] == 'zone_value' + + client = AutoscalersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.insert_unary(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_insert_unary_rest_unset_required_fields(): + transport = transports.AutoscalersRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.insert._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("autoscalerResource", "project", "zone", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_insert_unary_rest_interceptors(null_interceptor): + transport = transports.AutoscalersRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.AutoscalersRestInterceptor(), + ) + client = AutoscalersClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.AutoscalersRestInterceptor, "post_insert") as post, \ + mock.patch.object(transports.AutoscalersRestInterceptor, "pre_insert") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.InsertAutoscalerRequest.pb(compute.InsertAutoscalerRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.InsertAutoscalerRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.insert_unary(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_insert_unary_rest_bad_request(transport: str = 'rest', request_type=compute.InsertAutoscalerRequest): + client = AutoscalersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'zone': 'sample2'} + request_init["autoscaler_resource"] = {'autoscaling_policy': {'cool_down_period_sec': 2112, 'cpu_utilization': {'predictive_method': 'predictive_method_value', 'utilization_target': 0.19540000000000002}, 'custom_metric_utilizations': [{'filter': 'filter_value', 'metric': 'metric_value', 'single_instance_assignment': 0.2766, 'utilization_target': 0.19540000000000002, 'utilization_target_type': 'utilization_target_type_value'}], 'load_balancing_utilization': {'utilization_target': 0.19540000000000002}, 'max_num_replicas': 1703, 'min_num_replicas': 1701, 'mode': 'mode_value', 'scale_in_control': {'max_scaled_in_replicas': {'calculated': 1042, 'fixed': 528, 'percent': 753}, 'time_window_sec': 1600}, 'scaling_schedules': {}}, 'creation_timestamp': 'creation_timestamp_value', 'description': 'description_value', 'id': 205, 'kind': 'kind_value', 'name': 'name_value', 'recommended_size': 1693, 'region': 'region_value', 'scaling_schedule_status': {}, 'self_link': 'self_link_value', 'status': 'status_value', 'status_details': [{'message': 'message_value', 'type_': 'type__value'}], 'target': 'target_value', 'zone': 'zone_value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.insert_unary(request) + + +def test_insert_unary_rest_flattened(): + client = AutoscalersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'zone': 'sample2'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + zone='zone_value', + autoscaler_resource=compute.Autoscaler(autoscaling_policy=compute.AutoscalingPolicy(cool_down_period_sec=2112)), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.insert_unary(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/zones/{zone}/autoscalers" % client.transport._host, args[1]) + + +def test_insert_unary_rest_flattened_error(transport: str = 'rest'): + client = AutoscalersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.insert_unary( + compute.InsertAutoscalerRequest(), + project='project_value', + zone='zone_value', + autoscaler_resource=compute.Autoscaler(autoscaling_policy=compute.AutoscalingPolicy(cool_down_period_sec=2112)), + ) + + +def test_insert_unary_rest_error(): + client = AutoscalersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.ListAutoscalersRequest, + dict, +]) +def test_list_rest(request_type): + client = AutoscalersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'zone': 'sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.AutoscalerList( + id='id_value', + kind='kind_value', + next_page_token='next_page_token_value', + self_link='self_link_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.AutoscalerList.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.list(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListPager) + assert response.id == 'id_value' + assert response.kind == 'kind_value' + assert response.next_page_token == 'next_page_token_value' + assert response.self_link == 'self_link_value' + + +def test_list_rest_required_fields(request_type=compute.ListAutoscalersRequest): + transport_class = transports.AutoscalersRestTransport + + request_init = {} + request_init["project"] = "" + request_init["zone"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + jsonified_request["zone"] = 'zone_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("filter", "max_results", "order_by", "page_token", "return_partial_success", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "zone" in jsonified_request + assert jsonified_request["zone"] == 'zone_value' + + client = AutoscalersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.AutoscalerList() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "get", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.AutoscalerList.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.list(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_list_rest_unset_required_fields(): + transport = transports.AutoscalersRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.list._get_unset_required_fields({}) + assert set(unset_fields) == (set(("filter", "maxResults", "orderBy", "pageToken", "returnPartialSuccess", )) & set(("project", "zone", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_rest_interceptors(null_interceptor): + transport = transports.AutoscalersRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.AutoscalersRestInterceptor(), + ) + client = AutoscalersClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.AutoscalersRestInterceptor, "post_list") as post, \ + mock.patch.object(transports.AutoscalersRestInterceptor, "pre_list") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.ListAutoscalersRequest.pb(compute.ListAutoscalersRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.AutoscalerList.to_json(compute.AutoscalerList()) + + request = compute.ListAutoscalersRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.AutoscalerList() + + client.list(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_list_rest_bad_request(transport: str = 'rest', request_type=compute.ListAutoscalersRequest): + client = AutoscalersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'zone': 'sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list(request) + + +def test_list_rest_flattened(): + client = AutoscalersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.AutoscalerList() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'zone': 'sample2'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + zone='zone_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.AutoscalerList.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.list(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/zones/{zone}/autoscalers" % client.transport._host, args[1]) + + +def test_list_rest_flattened_error(transport: str = 'rest'): + client = AutoscalersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list( + compute.ListAutoscalersRequest(), + project='project_value', + zone='zone_value', + ) + + +def test_list_rest_pager(transport: str = 'rest'): + client = AutoscalersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + #with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + compute.AutoscalerList( + items=[ + compute.Autoscaler(), + compute.Autoscaler(), + compute.Autoscaler(), + ], + next_page_token='abc', + ), + compute.AutoscalerList( + items=[], + next_page_token='def', + ), + compute.AutoscalerList( + items=[ + compute.Autoscaler(), + ], + next_page_token='ghi', + ), + compute.AutoscalerList( + items=[ + compute.Autoscaler(), + compute.Autoscaler(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple(compute.AutoscalerList.to_json(x) for x in response) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode('UTF-8') + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {'project': 'sample1', 'zone': 'sample2'} + + pager = client.list(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, compute.Autoscaler) + for i in results) + + pages = list(client.list(request=sample_request).pages) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize("request_type", [ + compute.PatchAutoscalerRequest, + dict, +]) +def test_patch_rest(request_type): + client = AutoscalersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'zone': 'sample2'} + request_init["autoscaler_resource"] = {'autoscaling_policy': {'cool_down_period_sec': 2112, 'cpu_utilization': {'predictive_method': 'predictive_method_value', 'utilization_target': 0.19540000000000002}, 'custom_metric_utilizations': [{'filter': 'filter_value', 'metric': 'metric_value', 'single_instance_assignment': 0.2766, 'utilization_target': 0.19540000000000002, 'utilization_target_type': 'utilization_target_type_value'}], 'load_balancing_utilization': {'utilization_target': 0.19540000000000002}, 'max_num_replicas': 1703, 'min_num_replicas': 1701, 'mode': 'mode_value', 'scale_in_control': {'max_scaled_in_replicas': {'calculated': 1042, 'fixed': 528, 'percent': 753}, 'time_window_sec': 1600}, 'scaling_schedules': {}}, 'creation_timestamp': 'creation_timestamp_value', 'description': 'description_value', 'id': 205, 'kind': 'kind_value', 'name': 'name_value', 'recommended_size': 1693, 'region': 'region_value', 'scaling_schedule_status': {}, 'self_link': 'self_link_value', 'status': 'status_value', 'status_details': [{'message': 'message_value', 'type_': 'type__value'}], 'target': 'target_value', 'zone': 'zone_value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.patch(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, extended_operation.ExtendedOperation) + assert response.client_operation_id == 'client_operation_id_value' + assert response.creation_timestamp == 'creation_timestamp_value' + assert response.description == 'description_value' + assert response.end_time == 'end_time_value' + assert response.http_error_message == 'http_error_message_value' + assert response.http_error_status_code == 2374 + assert response.id == 205 + assert response.insert_time == 'insert_time_value' + assert response.kind == 'kind_value' + assert response.name == 'name_value' + assert response.operation_group_id == 'operation_group_id_value' + assert response.operation_type == 'operation_type_value' + assert response.progress == 885 + assert response.region == 'region_value' + assert response.self_link == 'self_link_value' + assert response.start_time == 'start_time_value' + assert response.status == compute.Operation.Status.DONE + assert response.status_message == 'status_message_value' + assert response.target_id == 947 + assert response.target_link == 'target_link_value' + assert response.user == 'user_value' + assert response.zone == 'zone_value' + + +def test_patch_rest_required_fields(request_type=compute.PatchAutoscalerRequest): + transport_class = transports.AutoscalersRestTransport + + request_init = {} + request_init["project"] = "" + request_init["zone"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).patch._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + jsonified_request["zone"] = 'zone_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).patch._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("autoscaler", "request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "zone" in jsonified_request + assert jsonified_request["zone"] == 'zone_value' + + client = AutoscalersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "patch", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.patch(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_patch_rest_unset_required_fields(): + transport = transports.AutoscalersRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.patch._get_unset_required_fields({}) + assert set(unset_fields) == (set(("autoscaler", "requestId", )) & set(("autoscalerResource", "project", "zone", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_patch_rest_interceptors(null_interceptor): + transport = transports.AutoscalersRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.AutoscalersRestInterceptor(), + ) + client = AutoscalersClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.AutoscalersRestInterceptor, "post_patch") as post, \ + mock.patch.object(transports.AutoscalersRestInterceptor, "pre_patch") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.PatchAutoscalerRequest.pb(compute.PatchAutoscalerRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.PatchAutoscalerRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.patch(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_patch_rest_bad_request(transport: str = 'rest', request_type=compute.PatchAutoscalerRequest): + client = AutoscalersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'zone': 'sample2'} + request_init["autoscaler_resource"] = {'autoscaling_policy': {'cool_down_period_sec': 2112, 'cpu_utilization': {'predictive_method': 'predictive_method_value', 'utilization_target': 0.19540000000000002}, 'custom_metric_utilizations': [{'filter': 'filter_value', 'metric': 'metric_value', 'single_instance_assignment': 0.2766, 'utilization_target': 0.19540000000000002, 'utilization_target_type': 'utilization_target_type_value'}], 'load_balancing_utilization': {'utilization_target': 0.19540000000000002}, 'max_num_replicas': 1703, 'min_num_replicas': 1701, 'mode': 'mode_value', 'scale_in_control': {'max_scaled_in_replicas': {'calculated': 1042, 'fixed': 528, 'percent': 753}, 'time_window_sec': 1600}, 'scaling_schedules': {}}, 'creation_timestamp': 'creation_timestamp_value', 'description': 'description_value', 'id': 205, 'kind': 'kind_value', 'name': 'name_value', 'recommended_size': 1693, 'region': 'region_value', 'scaling_schedule_status': {}, 'self_link': 'self_link_value', 'status': 'status_value', 'status_details': [{'message': 'message_value', 'type_': 'type__value'}], 'target': 'target_value', 'zone': 'zone_value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.patch(request) + + +def test_patch_rest_flattened(): + client = AutoscalersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'zone': 'sample2'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + zone='zone_value', + autoscaler_resource=compute.Autoscaler(autoscaling_policy=compute.AutoscalingPolicy(cool_down_period_sec=2112)), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.patch(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/zones/{zone}/autoscalers" % client.transport._host, args[1]) + + +def test_patch_rest_flattened_error(transport: str = 'rest'): + client = AutoscalersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.patch( + compute.PatchAutoscalerRequest(), + project='project_value', + zone='zone_value', + autoscaler_resource=compute.Autoscaler(autoscaling_policy=compute.AutoscalingPolicy(cool_down_period_sec=2112)), + ) + + +def test_patch_rest_error(): + client = AutoscalersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.PatchAutoscalerRequest, + dict, +]) +def test_patch_unary_rest(request_type): + client = AutoscalersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'zone': 'sample2'} + request_init["autoscaler_resource"] = {'autoscaling_policy': {'cool_down_period_sec': 2112, 'cpu_utilization': {'predictive_method': 'predictive_method_value', 'utilization_target': 0.19540000000000002}, 'custom_metric_utilizations': [{'filter': 'filter_value', 'metric': 'metric_value', 'single_instance_assignment': 0.2766, 'utilization_target': 0.19540000000000002, 'utilization_target_type': 'utilization_target_type_value'}], 'load_balancing_utilization': {'utilization_target': 0.19540000000000002}, 'max_num_replicas': 1703, 'min_num_replicas': 1701, 'mode': 'mode_value', 'scale_in_control': {'max_scaled_in_replicas': {'calculated': 1042, 'fixed': 528, 'percent': 753}, 'time_window_sec': 1600}, 'scaling_schedules': {}}, 'creation_timestamp': 'creation_timestamp_value', 'description': 'description_value', 'id': 205, 'kind': 'kind_value', 'name': 'name_value', 'recommended_size': 1693, 'region': 'region_value', 'scaling_schedule_status': {}, 'self_link': 'self_link_value', 'status': 'status_value', 'status_details': [{'message': 'message_value', 'type_': 'type__value'}], 'target': 'target_value', 'zone': 'zone_value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.patch_unary(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.Operation) + + +def test_patch_unary_rest_required_fields(request_type=compute.PatchAutoscalerRequest): + transport_class = transports.AutoscalersRestTransport + + request_init = {} + request_init["project"] = "" + request_init["zone"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).patch._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + jsonified_request["zone"] = 'zone_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).patch._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("autoscaler", "request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "zone" in jsonified_request + assert jsonified_request["zone"] == 'zone_value' + + client = AutoscalersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "patch", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.patch_unary(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_patch_unary_rest_unset_required_fields(): + transport = transports.AutoscalersRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.patch._get_unset_required_fields({}) + assert set(unset_fields) == (set(("autoscaler", "requestId", )) & set(("autoscalerResource", "project", "zone", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_patch_unary_rest_interceptors(null_interceptor): + transport = transports.AutoscalersRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.AutoscalersRestInterceptor(), + ) + client = AutoscalersClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.AutoscalersRestInterceptor, "post_patch") as post, \ + mock.patch.object(transports.AutoscalersRestInterceptor, "pre_patch") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.PatchAutoscalerRequest.pb(compute.PatchAutoscalerRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.PatchAutoscalerRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.patch_unary(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_patch_unary_rest_bad_request(transport: str = 'rest', request_type=compute.PatchAutoscalerRequest): + client = AutoscalersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'zone': 'sample2'} + request_init["autoscaler_resource"] = {'autoscaling_policy': {'cool_down_period_sec': 2112, 'cpu_utilization': {'predictive_method': 'predictive_method_value', 'utilization_target': 0.19540000000000002}, 'custom_metric_utilizations': [{'filter': 'filter_value', 'metric': 'metric_value', 'single_instance_assignment': 0.2766, 'utilization_target': 0.19540000000000002, 'utilization_target_type': 'utilization_target_type_value'}], 'load_balancing_utilization': {'utilization_target': 0.19540000000000002}, 'max_num_replicas': 1703, 'min_num_replicas': 1701, 'mode': 'mode_value', 'scale_in_control': {'max_scaled_in_replicas': {'calculated': 1042, 'fixed': 528, 'percent': 753}, 'time_window_sec': 1600}, 'scaling_schedules': {}}, 'creation_timestamp': 'creation_timestamp_value', 'description': 'description_value', 'id': 205, 'kind': 'kind_value', 'name': 'name_value', 'recommended_size': 1693, 'region': 'region_value', 'scaling_schedule_status': {}, 'self_link': 'self_link_value', 'status': 'status_value', 'status_details': [{'message': 'message_value', 'type_': 'type__value'}], 'target': 'target_value', 'zone': 'zone_value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.patch_unary(request) + + +def test_patch_unary_rest_flattened(): + client = AutoscalersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'zone': 'sample2'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + zone='zone_value', + autoscaler_resource=compute.Autoscaler(autoscaling_policy=compute.AutoscalingPolicy(cool_down_period_sec=2112)), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.patch_unary(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/zones/{zone}/autoscalers" % client.transport._host, args[1]) + + +def test_patch_unary_rest_flattened_error(transport: str = 'rest'): + client = AutoscalersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.patch_unary( + compute.PatchAutoscalerRequest(), + project='project_value', + zone='zone_value', + autoscaler_resource=compute.Autoscaler(autoscaling_policy=compute.AutoscalingPolicy(cool_down_period_sec=2112)), + ) + + +def test_patch_unary_rest_error(): + client = AutoscalersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.UpdateAutoscalerRequest, + dict, +]) +def test_update_rest(request_type): + client = AutoscalersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'zone': 'sample2'} + request_init["autoscaler_resource"] = {'autoscaling_policy': {'cool_down_period_sec': 2112, 'cpu_utilization': {'predictive_method': 'predictive_method_value', 'utilization_target': 0.19540000000000002}, 'custom_metric_utilizations': [{'filter': 'filter_value', 'metric': 'metric_value', 'single_instance_assignment': 0.2766, 'utilization_target': 0.19540000000000002, 'utilization_target_type': 'utilization_target_type_value'}], 'load_balancing_utilization': {'utilization_target': 0.19540000000000002}, 'max_num_replicas': 1703, 'min_num_replicas': 1701, 'mode': 'mode_value', 'scale_in_control': {'max_scaled_in_replicas': {'calculated': 1042, 'fixed': 528, 'percent': 753}, 'time_window_sec': 1600}, 'scaling_schedules': {}}, 'creation_timestamp': 'creation_timestamp_value', 'description': 'description_value', 'id': 205, 'kind': 'kind_value', 'name': 'name_value', 'recommended_size': 1693, 'region': 'region_value', 'scaling_schedule_status': {}, 'self_link': 'self_link_value', 'status': 'status_value', 'status_details': [{'message': 'message_value', 'type_': 'type__value'}], 'target': 'target_value', 'zone': 'zone_value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.update(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, extended_operation.ExtendedOperation) + assert response.client_operation_id == 'client_operation_id_value' + assert response.creation_timestamp == 'creation_timestamp_value' + assert response.description == 'description_value' + assert response.end_time == 'end_time_value' + assert response.http_error_message == 'http_error_message_value' + assert response.http_error_status_code == 2374 + assert response.id == 205 + assert response.insert_time == 'insert_time_value' + assert response.kind == 'kind_value' + assert response.name == 'name_value' + assert response.operation_group_id == 'operation_group_id_value' + assert response.operation_type == 'operation_type_value' + assert response.progress == 885 + assert response.region == 'region_value' + assert response.self_link == 'self_link_value' + assert response.start_time == 'start_time_value' + assert response.status == compute.Operation.Status.DONE + assert response.status_message == 'status_message_value' + assert response.target_id == 947 + assert response.target_link == 'target_link_value' + assert response.user == 'user_value' + assert response.zone == 'zone_value' + + +def test_update_rest_required_fields(request_type=compute.UpdateAutoscalerRequest): + transport_class = transports.AutoscalersRestTransport + + request_init = {} + request_init["project"] = "" + request_init["zone"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).update._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + jsonified_request["zone"] = 'zone_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).update._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("autoscaler", "request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "zone" in jsonified_request + assert jsonified_request["zone"] == 'zone_value' + + client = AutoscalersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "put", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.update(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_update_rest_unset_required_fields(): + transport = transports.AutoscalersRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.update._get_unset_required_fields({}) + assert set(unset_fields) == (set(("autoscaler", "requestId", )) & set(("autoscalerResource", "project", "zone", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_update_rest_interceptors(null_interceptor): + transport = transports.AutoscalersRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.AutoscalersRestInterceptor(), + ) + client = AutoscalersClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.AutoscalersRestInterceptor, "post_update") as post, \ + mock.patch.object(transports.AutoscalersRestInterceptor, "pre_update") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.UpdateAutoscalerRequest.pb(compute.UpdateAutoscalerRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.UpdateAutoscalerRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.update(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_update_rest_bad_request(transport: str = 'rest', request_type=compute.UpdateAutoscalerRequest): + client = AutoscalersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'zone': 'sample2'} + request_init["autoscaler_resource"] = {'autoscaling_policy': {'cool_down_period_sec': 2112, 'cpu_utilization': {'predictive_method': 'predictive_method_value', 'utilization_target': 0.19540000000000002}, 'custom_metric_utilizations': [{'filter': 'filter_value', 'metric': 'metric_value', 'single_instance_assignment': 0.2766, 'utilization_target': 0.19540000000000002, 'utilization_target_type': 'utilization_target_type_value'}], 'load_balancing_utilization': {'utilization_target': 0.19540000000000002}, 'max_num_replicas': 1703, 'min_num_replicas': 1701, 'mode': 'mode_value', 'scale_in_control': {'max_scaled_in_replicas': {'calculated': 1042, 'fixed': 528, 'percent': 753}, 'time_window_sec': 1600}, 'scaling_schedules': {}}, 'creation_timestamp': 'creation_timestamp_value', 'description': 'description_value', 'id': 205, 'kind': 'kind_value', 'name': 'name_value', 'recommended_size': 1693, 'region': 'region_value', 'scaling_schedule_status': {}, 'self_link': 'self_link_value', 'status': 'status_value', 'status_details': [{'message': 'message_value', 'type_': 'type__value'}], 'target': 'target_value', 'zone': 'zone_value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.update(request) + + +def test_update_rest_flattened(): + client = AutoscalersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'zone': 'sample2'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + zone='zone_value', + autoscaler_resource=compute.Autoscaler(autoscaling_policy=compute.AutoscalingPolicy(cool_down_period_sec=2112)), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.update(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/zones/{zone}/autoscalers" % client.transport._host, args[1]) + + +def test_update_rest_flattened_error(transport: str = 'rest'): + client = AutoscalersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update( + compute.UpdateAutoscalerRequest(), + project='project_value', + zone='zone_value', + autoscaler_resource=compute.Autoscaler(autoscaling_policy=compute.AutoscalingPolicy(cool_down_period_sec=2112)), + ) + + +def test_update_rest_error(): + client = AutoscalersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.UpdateAutoscalerRequest, + dict, +]) +def test_update_unary_rest(request_type): + client = AutoscalersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'zone': 'sample2'} + request_init["autoscaler_resource"] = {'autoscaling_policy': {'cool_down_period_sec': 2112, 'cpu_utilization': {'predictive_method': 'predictive_method_value', 'utilization_target': 0.19540000000000002}, 'custom_metric_utilizations': [{'filter': 'filter_value', 'metric': 'metric_value', 'single_instance_assignment': 0.2766, 'utilization_target': 0.19540000000000002, 'utilization_target_type': 'utilization_target_type_value'}], 'load_balancing_utilization': {'utilization_target': 0.19540000000000002}, 'max_num_replicas': 1703, 'min_num_replicas': 1701, 'mode': 'mode_value', 'scale_in_control': {'max_scaled_in_replicas': {'calculated': 1042, 'fixed': 528, 'percent': 753}, 'time_window_sec': 1600}, 'scaling_schedules': {}}, 'creation_timestamp': 'creation_timestamp_value', 'description': 'description_value', 'id': 205, 'kind': 'kind_value', 'name': 'name_value', 'recommended_size': 1693, 'region': 'region_value', 'scaling_schedule_status': {}, 'self_link': 'self_link_value', 'status': 'status_value', 'status_details': [{'message': 'message_value', 'type_': 'type__value'}], 'target': 'target_value', 'zone': 'zone_value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.update_unary(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.Operation) + + +def test_update_unary_rest_required_fields(request_type=compute.UpdateAutoscalerRequest): + transport_class = transports.AutoscalersRestTransport + + request_init = {} + request_init["project"] = "" + request_init["zone"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).update._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + jsonified_request["zone"] = 'zone_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).update._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("autoscaler", "request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "zone" in jsonified_request + assert jsonified_request["zone"] == 'zone_value' + + client = AutoscalersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "put", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.update_unary(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_update_unary_rest_unset_required_fields(): + transport = transports.AutoscalersRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.update._get_unset_required_fields({}) + assert set(unset_fields) == (set(("autoscaler", "requestId", )) & set(("autoscalerResource", "project", "zone", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_update_unary_rest_interceptors(null_interceptor): + transport = transports.AutoscalersRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.AutoscalersRestInterceptor(), + ) + client = AutoscalersClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.AutoscalersRestInterceptor, "post_update") as post, \ + mock.patch.object(transports.AutoscalersRestInterceptor, "pre_update") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.UpdateAutoscalerRequest.pb(compute.UpdateAutoscalerRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.UpdateAutoscalerRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.update_unary(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_update_unary_rest_bad_request(transport: str = 'rest', request_type=compute.UpdateAutoscalerRequest): + client = AutoscalersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'zone': 'sample2'} + request_init["autoscaler_resource"] = {'autoscaling_policy': {'cool_down_period_sec': 2112, 'cpu_utilization': {'predictive_method': 'predictive_method_value', 'utilization_target': 0.19540000000000002}, 'custom_metric_utilizations': [{'filter': 'filter_value', 'metric': 'metric_value', 'single_instance_assignment': 0.2766, 'utilization_target': 0.19540000000000002, 'utilization_target_type': 'utilization_target_type_value'}], 'load_balancing_utilization': {'utilization_target': 0.19540000000000002}, 'max_num_replicas': 1703, 'min_num_replicas': 1701, 'mode': 'mode_value', 'scale_in_control': {'max_scaled_in_replicas': {'calculated': 1042, 'fixed': 528, 'percent': 753}, 'time_window_sec': 1600}, 'scaling_schedules': {}}, 'creation_timestamp': 'creation_timestamp_value', 'description': 'description_value', 'id': 205, 'kind': 'kind_value', 'name': 'name_value', 'recommended_size': 1693, 'region': 'region_value', 'scaling_schedule_status': {}, 'self_link': 'self_link_value', 'status': 'status_value', 'status_details': [{'message': 'message_value', 'type_': 'type__value'}], 'target': 'target_value', 'zone': 'zone_value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.update_unary(request) + + +def test_update_unary_rest_flattened(): + client = AutoscalersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'zone': 'sample2'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + zone='zone_value', + autoscaler_resource=compute.Autoscaler(autoscaling_policy=compute.AutoscalingPolicy(cool_down_period_sec=2112)), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.update_unary(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/zones/{zone}/autoscalers" % client.transport._host, args[1]) + + +def test_update_unary_rest_flattened_error(transport: str = 'rest'): + client = AutoscalersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_unary( + compute.UpdateAutoscalerRequest(), + project='project_value', + zone='zone_value', + autoscaler_resource=compute.Autoscaler(autoscaling_policy=compute.AutoscalingPolicy(cool_down_period_sec=2112)), + ) + + +def test_update_unary_rest_error(): + client = AutoscalersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +def test_credentials_transport_error(): + # It is an error to provide credentials and a transport instance. + transport = transports.AutoscalersRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = AutoscalersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # It is an error to provide a credentials file and a transport instance. + transport = transports.AutoscalersRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = AutoscalersClient( + client_options={"credentials_file": "credentials.json"}, + transport=transport, + ) + + # It is an error to provide an api_key and a transport instance. + transport = transports.AutoscalersRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = AutoscalersClient( + client_options=options, + transport=transport, + ) + + # It is an error to provide an api_key and a credential. + options = mock.Mock() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = AutoscalersClient( + client_options=options, + credentials=ga_credentials.AnonymousCredentials() + ) + + # It is an error to provide scopes and a transport instance. + transport = transports.AutoscalersRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = AutoscalersClient( + client_options={"scopes": ["1", "2"]}, + transport=transport, + ) + + +def test_transport_instance(): + # A client may be instantiated with a custom transport instance. + transport = transports.AutoscalersRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + client = AutoscalersClient(transport=transport) + assert client.transport is transport + + +@pytest.mark.parametrize("transport_class", [ + transports.AutoscalersRestTransport, +]) +def test_transport_adc(transport_class): + # Test default credentials are used if not provided. + with mock.patch.object(google.auth, 'default') as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class() + adc.assert_called_once() + +@pytest.mark.parametrize("transport_name", [ + "rest", +]) +def test_transport_kind(transport_name): + transport = AutoscalersClient.get_transport_class(transport_name)( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert transport.kind == transport_name + + +def test_autoscalers_base_transport_error(): + # Passing both a credentials object and credentials_file should raise an error + with pytest.raises(core_exceptions.DuplicateCredentialArgs): + transport = transports.AutoscalersTransport( + credentials=ga_credentials.AnonymousCredentials(), + credentials_file="credentials.json" + ) + + +def test_autoscalers_base_transport(): + # Instantiate the base transport. + with mock.patch('google.cloud.compute_v1.services.autoscalers.transports.AutoscalersTransport.__init__') as Transport: + Transport.return_value = None + transport = transports.AutoscalersTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Every method on the transport should just blindly + # raise NotImplementedError. + methods = ( + 'aggregated_list', + 'delete', + 'get', + 'insert', + 'list', + 'patch', + 'update', + ) + for method in methods: + with pytest.raises(NotImplementedError): + getattr(transport, method)(request=object()) + + with pytest.raises(NotImplementedError): + transport.close() + + # Catch all for all remaining methods and properties + remainder = [ + 'kind', + ] + for r in remainder: + with pytest.raises(NotImplementedError): + getattr(transport, r)() + + +def test_autoscalers_base_transport_with_credentials_file(): + # Instantiate the base transport with a credentials file + with mock.patch.object(google.auth, 'load_credentials_from_file', autospec=True) as load_creds, mock.patch('google.cloud.compute_v1.services.autoscalers.transports.AutoscalersTransport._prep_wrapped_messages') as Transport: + Transport.return_value = None + load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.AutoscalersTransport( + credentials_file="credentials.json", + quota_project_id="octopus", + ) + load_creds.assert_called_once_with("credentials.json", + scopes=None, + default_scopes=( + 'https://www.googleapis.com/auth/compute', + 'https://www.googleapis.com/auth/cloud-platform', +), + quota_project_id="octopus", + ) + + +def test_autoscalers_base_transport_with_adc(): + # Test the default credentials are used if credentials and credentials_file are None. + with mock.patch.object(google.auth, 'default', autospec=True) as adc, mock.patch('google.cloud.compute_v1.services.autoscalers.transports.AutoscalersTransport._prep_wrapped_messages') as Transport: + Transport.return_value = None + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.AutoscalersTransport() + adc.assert_called_once() + + +def test_autoscalers_auth_adc(): + # If no credentials are provided, we should use ADC credentials. + with mock.patch.object(google.auth, 'default', autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + AutoscalersClient() + adc.assert_called_once_with( + scopes=None, + default_scopes=( + 'https://www.googleapis.com/auth/compute', + 'https://www.googleapis.com/auth/cloud-platform', +), + quota_project_id=None, + ) + + +def test_autoscalers_http_transport_client_cert_source_for_mtls(): + cred = ga_credentials.AnonymousCredentials() + with mock.patch("google.auth.transport.requests.AuthorizedSession.configure_mtls_channel") as mock_configure_mtls_channel: + transports.AutoscalersRestTransport ( + credentials=cred, + client_cert_source_for_mtls=client_cert_source_callback + ) + mock_configure_mtls_channel.assert_called_once_with(client_cert_source_callback) + + +@pytest.mark.parametrize("transport_name", [ + "rest", +]) +def test_autoscalers_host_no_port(transport_name): + client = AutoscalersClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions(api_endpoint='compute.googleapis.com'), + transport=transport_name, + ) + assert client.transport._host == ( + 'compute.googleapis.com:443' + if transport_name in ['grpc', 'grpc_asyncio'] + else 'https://compute.googleapis.com' + ) + +@pytest.mark.parametrize("transport_name", [ + "rest", +]) +def test_autoscalers_host_with_port(transport_name): + client = AutoscalersClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions(api_endpoint='compute.googleapis.com:8000'), + transport=transport_name, + ) + assert client.transport._host == ( + 'compute.googleapis.com:8000' + if transport_name in ['grpc', 'grpc_asyncio'] + else 'https://compute.googleapis.com:8000' + ) + +@pytest.mark.parametrize("transport_name", [ + "rest", +]) +def test_autoscalers_client_transport_session_collision(transport_name): + creds1 = ga_credentials.AnonymousCredentials() + creds2 = ga_credentials.AnonymousCredentials() + client1 = AutoscalersClient( + credentials=creds1, + transport=transport_name, + ) + client2 = AutoscalersClient( + credentials=creds2, + transport=transport_name, + ) + session1 = client1.transport.aggregated_list._session + session2 = client2.transport.aggregated_list._session + assert session1 != session2 + session1 = client1.transport.delete._session + session2 = client2.transport.delete._session + assert session1 != session2 + session1 = client1.transport.get._session + session2 = client2.transport.get._session + assert session1 != session2 + session1 = client1.transport.insert._session + session2 = client2.transport.insert._session + assert session1 != session2 + session1 = client1.transport.list._session + session2 = client2.transport.list._session + assert session1 != session2 + session1 = client1.transport.patch._session + session2 = client2.transport.patch._session + assert session1 != session2 + session1 = client1.transport.update._session + session2 = client2.transport.update._session + assert session1 != session2 + +def test_common_billing_account_path(): + billing_account = "squid" + expected = "billingAccounts/{billing_account}".format(billing_account=billing_account, ) + actual = AutoscalersClient.common_billing_account_path(billing_account) + assert expected == actual + + +def test_parse_common_billing_account_path(): + expected = { + "billing_account": "clam", + } + path = AutoscalersClient.common_billing_account_path(**expected) + + # Check that the path construction is reversible. + actual = AutoscalersClient.parse_common_billing_account_path(path) + assert expected == actual + +def test_common_folder_path(): + folder = "whelk" + expected = "folders/{folder}".format(folder=folder, ) + actual = AutoscalersClient.common_folder_path(folder) + assert expected == actual + + +def test_parse_common_folder_path(): + expected = { + "folder": "octopus", + } + path = AutoscalersClient.common_folder_path(**expected) + + # Check that the path construction is reversible. + actual = AutoscalersClient.parse_common_folder_path(path) + assert expected == actual + +def test_common_organization_path(): + organization = "oyster" + expected = "organizations/{organization}".format(organization=organization, ) + actual = AutoscalersClient.common_organization_path(organization) + assert expected == actual + + +def test_parse_common_organization_path(): + expected = { + "organization": "nudibranch", + } + path = AutoscalersClient.common_organization_path(**expected) + + # Check that the path construction is reversible. + actual = AutoscalersClient.parse_common_organization_path(path) + assert expected == actual + +def test_common_project_path(): + project = "cuttlefish" + expected = "projects/{project}".format(project=project, ) + actual = AutoscalersClient.common_project_path(project) + assert expected == actual + + +def test_parse_common_project_path(): + expected = { + "project": "mussel", + } + path = AutoscalersClient.common_project_path(**expected) + + # Check that the path construction is reversible. + actual = AutoscalersClient.parse_common_project_path(path) + assert expected == actual + +def test_common_location_path(): + project = "winkle" + location = "nautilus" + expected = "projects/{project}/locations/{location}".format(project=project, location=location, ) + actual = AutoscalersClient.common_location_path(project, location) + assert expected == actual + + +def test_parse_common_location_path(): + expected = { + "project": "scallop", + "location": "abalone", + } + path = AutoscalersClient.common_location_path(**expected) + + # Check that the path construction is reversible. + actual = AutoscalersClient.parse_common_location_path(path) + assert expected == actual + + +def test_client_with_default_client_info(): + client_info = gapic_v1.client_info.ClientInfo() + + with mock.patch.object(transports.AutoscalersTransport, '_prep_wrapped_messages') as prep: + client = AutoscalersClient( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + with mock.patch.object(transports.AutoscalersTransport, '_prep_wrapped_messages') as prep: + transport_class = AutoscalersClient.get_transport_class() + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + +def test_transport_close(): + transports = { + "rest": "_session", + } + + for transport, close_name in transports.items(): + client = AutoscalersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport + ) + with mock.patch.object(type(getattr(client.transport, close_name)), "close") as close: + with client: + close.assert_not_called() + close.assert_called_once() + +def test_client_ctx(): + transports = [ + 'rest', + ] + for transport in transports: + client = AutoscalersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport + ) + # Test client calls underlying transport. + with mock.patch.object(type(client.transport), "close") as close: + close.assert_not_called() + with client: + pass + close.assert_called() + +@pytest.mark.parametrize("client_class,transport_class", [ + (AutoscalersClient, transports.AutoscalersRestTransport), +]) +def test_api_key_credentials(client_class, transport_class): + with mock.patch.object( + google.auth._default, "get_api_key_credentials", create=True + ) as get_api_key_credentials: + mock_cred = mock.Mock() + get_api_key_credentials.return_value = mock_cred + options = client_options.ClientOptions() + options.api_key = "api_key" + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=mock_cred, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) diff --git a/owl-bot-staging/v1/tests/unit/gapic/compute_v1/test_backend_buckets.py b/owl-bot-staging/v1/tests/unit/gapic/compute_v1/test_backend_buckets.py new file mode 100644 index 000000000..ae769efa2 --- /dev/null +++ b/owl-bot-staging/v1/tests/unit/gapic/compute_v1/test_backend_buckets.py @@ -0,0 +1,5275 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import os +# try/except added for compatibility with python < 3.8 +try: + from unittest import mock + from unittest.mock import AsyncMock # pragma: NO COVER +except ImportError: # pragma: NO COVER + import mock + +import grpc +from grpc.experimental import aio +from collections.abc import Iterable +from google.protobuf import json_format +import json +import math +import pytest +from proto.marshal.rules.dates import DurationRule, TimestampRule +from proto.marshal.rules import wrappers +from requests import Response +from requests import Request, PreparedRequest +from requests.sessions import Session +from google.protobuf import json_format + +from google.api_core import client_options +from google.api_core import exceptions as core_exceptions +from google.api_core import extended_operation # type: ignore +from google.api_core import future +from google.api_core import gapic_v1 +from google.api_core import grpc_helpers +from google.api_core import grpc_helpers_async +from google.api_core import path_template +from google.auth import credentials as ga_credentials +from google.auth.exceptions import MutualTLSChannelError +from google.cloud.compute_v1.services.backend_buckets import BackendBucketsClient +from google.cloud.compute_v1.services.backend_buckets import pagers +from google.cloud.compute_v1.services.backend_buckets import transports +from google.cloud.compute_v1.types import compute +from google.oauth2 import service_account +import google.auth + + +def client_cert_source_callback(): + return b"cert bytes", b"key bytes" + + +# If default endpoint is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint(client): + return "foo.googleapis.com" if ("localhost" in client.DEFAULT_ENDPOINT) else client.DEFAULT_ENDPOINT + + +def test__get_default_mtls_endpoint(): + api_endpoint = "example.googleapis.com" + api_mtls_endpoint = "example.mtls.googleapis.com" + sandbox_endpoint = "example.sandbox.googleapis.com" + sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com" + non_googleapi = "api.example.com" + + assert BackendBucketsClient._get_default_mtls_endpoint(None) is None + assert BackendBucketsClient._get_default_mtls_endpoint(api_endpoint) == api_mtls_endpoint + assert BackendBucketsClient._get_default_mtls_endpoint(api_mtls_endpoint) == api_mtls_endpoint + assert BackendBucketsClient._get_default_mtls_endpoint(sandbox_endpoint) == sandbox_mtls_endpoint + assert BackendBucketsClient._get_default_mtls_endpoint(sandbox_mtls_endpoint) == sandbox_mtls_endpoint + assert BackendBucketsClient._get_default_mtls_endpoint(non_googleapi) == non_googleapi + + +@pytest.mark.parametrize("client_class,transport_name", [ + (BackendBucketsClient, "rest"), +]) +def test_backend_buckets_client_from_service_account_info(client_class, transport_name): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object(service_account.Credentials, 'from_service_account_info') as factory: + factory.return_value = creds + info = {"valid": True} + client = client_class.from_service_account_info(info, transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + 'compute.googleapis.com:443' + if transport_name in ['grpc', 'grpc_asyncio'] + else + 'https://compute.googleapis.com' + ) + + +@pytest.mark.parametrize("transport_class,transport_name", [ + (transports.BackendBucketsRestTransport, "rest"), +]) +def test_backend_buckets_client_service_account_always_use_jwt(transport_class, transport_name): + with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=True) + use_jwt.assert_called_once_with(True) + + with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=False) + use_jwt.assert_not_called() + + +@pytest.mark.parametrize("client_class,transport_name", [ + (BackendBucketsClient, "rest"), +]) +def test_backend_buckets_client_from_service_account_file(client_class, transport_name): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object(service_account.Credentials, 'from_service_account_file') as factory: + factory.return_value = creds + client = client_class.from_service_account_file("dummy/file/path.json", transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + client = client_class.from_service_account_json("dummy/file/path.json", transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + 'compute.googleapis.com:443' + if transport_name in ['grpc', 'grpc_asyncio'] + else + 'https://compute.googleapis.com' + ) + + +def test_backend_buckets_client_get_transport_class(): + transport = BackendBucketsClient.get_transport_class() + available_transports = [ + transports.BackendBucketsRestTransport, + ] + assert transport in available_transports + + transport = BackendBucketsClient.get_transport_class("rest") + assert transport == transports.BackendBucketsRestTransport + + +@pytest.mark.parametrize("client_class,transport_class,transport_name", [ + (BackendBucketsClient, transports.BackendBucketsRestTransport, "rest"), +]) +@mock.patch.object(BackendBucketsClient, "DEFAULT_ENDPOINT", modify_default_endpoint(BackendBucketsClient)) +def test_backend_buckets_client_client_options(client_class, transport_class, transport_name): + # Check that if channel is provided we won't create a new one. + with mock.patch.object(BackendBucketsClient, 'get_transport_class') as gtc: + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ) + client = client_class(transport=transport) + gtc.assert_not_called() + + # Check that if channel is provided via str we will create a new one. + with mock.patch.object(BackendBucketsClient, 'get_transport_class') as gtc: + client = client_class(transport=transport_name) + gtc.assert_called() + + # Check the case api_endpoint is provided. + options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(transport=transport_name, client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_MTLS_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError): + client = client_class(transport=transport_name) + + # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): + with pytest.raises(ValueError): + client = client_class(transport=transport_name) + + # Check the case quota_project_id is provided + options = client_options.ClientOptions(quota_project_id="octopus") + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id="octopus", + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + # Check the case api_endpoint is provided + options = client_options.ClientOptions(api_audience="https://language.googleapis.com") + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience="https://language.googleapis.com" + ) + +@pytest.mark.parametrize("client_class,transport_class,transport_name,use_client_cert_env", [ + (BackendBucketsClient, transports.BackendBucketsRestTransport, "rest", "true"), + (BackendBucketsClient, transports.BackendBucketsRestTransport, "rest", "false"), +]) +@mock.patch.object(BackendBucketsClient, "DEFAULT_ENDPOINT", modify_default_endpoint(BackendBucketsClient)) +@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) +def test_backend_buckets_client_mtls_env_auto(client_class, transport_class, transport_name, use_client_cert_env): + # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default + # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists. + + # Check the case client_cert_source is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): + options = client_options.ClientOptions(client_cert_source=client_cert_source_callback) + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + + if use_client_cert_env == "false": + expected_client_cert_source = None + expected_host = client.DEFAULT_ENDPOINT + else: + expected_client_cert_source = client_cert_source_callback + expected_host = client.DEFAULT_MTLS_ENDPOINT + + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case ADC client cert is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): + with mock.patch.object(transport_class, '__init__') as patched: + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): + with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=client_cert_source_callback): + if use_client_cert_env == "false": + expected_host = client.DEFAULT_ENDPOINT + expected_client_cert_source = None + else: + expected_host = client.DEFAULT_MTLS_ENDPOINT + expected_client_cert_source = client_cert_source_callback + + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case client_cert_source and ADC client cert are not provided. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): + with mock.patch.object(transport_class, '__init__') as patched: + with mock.patch("google.auth.transport.mtls.has_default_client_cert_source", return_value=False): + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize("client_class", [ + BackendBucketsClient +]) +@mock.patch.object(BackendBucketsClient, "DEFAULT_ENDPOINT", modify_default_endpoint(BackendBucketsClient)) +def test_backend_buckets_client_get_mtls_endpoint_and_cert_source(client_class): + mock_client_cert_source = mock.Mock() + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "true". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + mock_api_endpoint = "foo" + options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(options) + assert api_endpoint == mock_api_endpoint + assert cert_source == mock_client_cert_source + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "false". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + mock_client_cert_source = mock.Mock() + mock_api_endpoint = "foo" + options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(options) + assert api_endpoint == mock_api_endpoint + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert doesn't exist. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=False): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert exists. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): + with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=mock_client_cert_source): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source == mock_client_cert_source + + +@pytest.mark.parametrize("client_class,transport_class,transport_name", [ + (BackendBucketsClient, transports.BackendBucketsRestTransport, "rest"), +]) +def test_backend_buckets_client_client_options_scopes(client_class, transport_class, transport_name): + # Check the case scopes are provided. + options = client_options.ClientOptions( + scopes=["1", "2"], + ) + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=["1", "2"], + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + +@pytest.mark.parametrize("client_class,transport_class,transport_name,grpc_helpers", [ + (BackendBucketsClient, transports.BackendBucketsRestTransport, "rest", None), +]) +def test_backend_buckets_client_client_options_credentials_file(client_class, transport_class, transport_name, grpc_helpers): + # Check the case credentials file is provided. + options = client_options.ClientOptions( + credentials_file="credentials.json" + ) + + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize("request_type", [ + compute.AddSignedUrlKeyBackendBucketRequest, + dict, +]) +def test_add_signed_url_key_rest(request_type): + client = BackendBucketsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'backend_bucket': 'sample2'} + request_init["signed_url_key_resource"] = {'key_name': 'key_name_value', 'key_value': 'key_value_value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.add_signed_url_key(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, extended_operation.ExtendedOperation) + assert response.client_operation_id == 'client_operation_id_value' + assert response.creation_timestamp == 'creation_timestamp_value' + assert response.description == 'description_value' + assert response.end_time == 'end_time_value' + assert response.http_error_message == 'http_error_message_value' + assert response.http_error_status_code == 2374 + assert response.id == 205 + assert response.insert_time == 'insert_time_value' + assert response.kind == 'kind_value' + assert response.name == 'name_value' + assert response.operation_group_id == 'operation_group_id_value' + assert response.operation_type == 'operation_type_value' + assert response.progress == 885 + assert response.region == 'region_value' + assert response.self_link == 'self_link_value' + assert response.start_time == 'start_time_value' + assert response.status == compute.Operation.Status.DONE + assert response.status_message == 'status_message_value' + assert response.target_id == 947 + assert response.target_link == 'target_link_value' + assert response.user == 'user_value' + assert response.zone == 'zone_value' + + +def test_add_signed_url_key_rest_required_fields(request_type=compute.AddSignedUrlKeyBackendBucketRequest): + transport_class = transports.BackendBucketsRestTransport + + request_init = {} + request_init["backend_bucket"] = "" + request_init["project"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).add_signed_url_key._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["backendBucket"] = 'backend_bucket_value' + jsonified_request["project"] = 'project_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).add_signed_url_key._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "backendBucket" in jsonified_request + assert jsonified_request["backendBucket"] == 'backend_bucket_value' + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + + client = BackendBucketsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.add_signed_url_key(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_add_signed_url_key_rest_unset_required_fields(): + transport = transports.BackendBucketsRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.add_signed_url_key._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("backendBucket", "project", "signedUrlKeyResource", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_add_signed_url_key_rest_interceptors(null_interceptor): + transport = transports.BackendBucketsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.BackendBucketsRestInterceptor(), + ) + client = BackendBucketsClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.BackendBucketsRestInterceptor, "post_add_signed_url_key") as post, \ + mock.patch.object(transports.BackendBucketsRestInterceptor, "pre_add_signed_url_key") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.AddSignedUrlKeyBackendBucketRequest.pb(compute.AddSignedUrlKeyBackendBucketRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.AddSignedUrlKeyBackendBucketRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.add_signed_url_key(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_add_signed_url_key_rest_bad_request(transport: str = 'rest', request_type=compute.AddSignedUrlKeyBackendBucketRequest): + client = BackendBucketsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'backend_bucket': 'sample2'} + request_init["signed_url_key_resource"] = {'key_name': 'key_name_value', 'key_value': 'key_value_value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.add_signed_url_key(request) + + +def test_add_signed_url_key_rest_flattened(): + client = BackendBucketsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'backend_bucket': 'sample2'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + backend_bucket='backend_bucket_value', + signed_url_key_resource=compute.SignedUrlKey(key_name='key_name_value'), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.add_signed_url_key(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/global/backendBuckets/{backend_bucket}/addSignedUrlKey" % client.transport._host, args[1]) + + +def test_add_signed_url_key_rest_flattened_error(transport: str = 'rest'): + client = BackendBucketsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.add_signed_url_key( + compute.AddSignedUrlKeyBackendBucketRequest(), + project='project_value', + backend_bucket='backend_bucket_value', + signed_url_key_resource=compute.SignedUrlKey(key_name='key_name_value'), + ) + + +def test_add_signed_url_key_rest_error(): + client = BackendBucketsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.AddSignedUrlKeyBackendBucketRequest, + dict, +]) +def test_add_signed_url_key_unary_rest(request_type): + client = BackendBucketsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'backend_bucket': 'sample2'} + request_init["signed_url_key_resource"] = {'key_name': 'key_name_value', 'key_value': 'key_value_value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.add_signed_url_key_unary(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.Operation) + + +def test_add_signed_url_key_unary_rest_required_fields(request_type=compute.AddSignedUrlKeyBackendBucketRequest): + transport_class = transports.BackendBucketsRestTransport + + request_init = {} + request_init["backend_bucket"] = "" + request_init["project"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).add_signed_url_key._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["backendBucket"] = 'backend_bucket_value' + jsonified_request["project"] = 'project_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).add_signed_url_key._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "backendBucket" in jsonified_request + assert jsonified_request["backendBucket"] == 'backend_bucket_value' + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + + client = BackendBucketsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.add_signed_url_key_unary(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_add_signed_url_key_unary_rest_unset_required_fields(): + transport = transports.BackendBucketsRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.add_signed_url_key._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("backendBucket", "project", "signedUrlKeyResource", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_add_signed_url_key_unary_rest_interceptors(null_interceptor): + transport = transports.BackendBucketsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.BackendBucketsRestInterceptor(), + ) + client = BackendBucketsClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.BackendBucketsRestInterceptor, "post_add_signed_url_key") as post, \ + mock.patch.object(transports.BackendBucketsRestInterceptor, "pre_add_signed_url_key") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.AddSignedUrlKeyBackendBucketRequest.pb(compute.AddSignedUrlKeyBackendBucketRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.AddSignedUrlKeyBackendBucketRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.add_signed_url_key_unary(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_add_signed_url_key_unary_rest_bad_request(transport: str = 'rest', request_type=compute.AddSignedUrlKeyBackendBucketRequest): + client = BackendBucketsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'backend_bucket': 'sample2'} + request_init["signed_url_key_resource"] = {'key_name': 'key_name_value', 'key_value': 'key_value_value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.add_signed_url_key_unary(request) + + +def test_add_signed_url_key_unary_rest_flattened(): + client = BackendBucketsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'backend_bucket': 'sample2'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + backend_bucket='backend_bucket_value', + signed_url_key_resource=compute.SignedUrlKey(key_name='key_name_value'), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.add_signed_url_key_unary(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/global/backendBuckets/{backend_bucket}/addSignedUrlKey" % client.transport._host, args[1]) + + +def test_add_signed_url_key_unary_rest_flattened_error(transport: str = 'rest'): + client = BackendBucketsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.add_signed_url_key_unary( + compute.AddSignedUrlKeyBackendBucketRequest(), + project='project_value', + backend_bucket='backend_bucket_value', + signed_url_key_resource=compute.SignedUrlKey(key_name='key_name_value'), + ) + + +def test_add_signed_url_key_unary_rest_error(): + client = BackendBucketsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.DeleteBackendBucketRequest, + dict, +]) +def test_delete_rest(request_type): + client = BackendBucketsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'backend_bucket': 'sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.delete(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, extended_operation.ExtendedOperation) + assert response.client_operation_id == 'client_operation_id_value' + assert response.creation_timestamp == 'creation_timestamp_value' + assert response.description == 'description_value' + assert response.end_time == 'end_time_value' + assert response.http_error_message == 'http_error_message_value' + assert response.http_error_status_code == 2374 + assert response.id == 205 + assert response.insert_time == 'insert_time_value' + assert response.kind == 'kind_value' + assert response.name == 'name_value' + assert response.operation_group_id == 'operation_group_id_value' + assert response.operation_type == 'operation_type_value' + assert response.progress == 885 + assert response.region == 'region_value' + assert response.self_link == 'self_link_value' + assert response.start_time == 'start_time_value' + assert response.status == compute.Operation.Status.DONE + assert response.status_message == 'status_message_value' + assert response.target_id == 947 + assert response.target_link == 'target_link_value' + assert response.user == 'user_value' + assert response.zone == 'zone_value' + + +def test_delete_rest_required_fields(request_type=compute.DeleteBackendBucketRequest): + transport_class = transports.BackendBucketsRestTransport + + request_init = {} + request_init["backend_bucket"] = "" + request_init["project"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["backendBucket"] = 'backend_bucket_value' + jsonified_request["project"] = 'project_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "backendBucket" in jsonified_request + assert jsonified_request["backendBucket"] == 'backend_bucket_value' + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + + client = BackendBucketsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "delete", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.delete(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_delete_rest_unset_required_fields(): + transport = transports.BackendBucketsRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.delete._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("backendBucket", "project", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_rest_interceptors(null_interceptor): + transport = transports.BackendBucketsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.BackendBucketsRestInterceptor(), + ) + client = BackendBucketsClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.BackendBucketsRestInterceptor, "post_delete") as post, \ + mock.patch.object(transports.BackendBucketsRestInterceptor, "pre_delete") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.DeleteBackendBucketRequest.pb(compute.DeleteBackendBucketRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.DeleteBackendBucketRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.delete(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_delete_rest_bad_request(transport: str = 'rest', request_type=compute.DeleteBackendBucketRequest): + client = BackendBucketsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'backend_bucket': 'sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.delete(request) + + +def test_delete_rest_flattened(): + client = BackendBucketsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'backend_bucket': 'sample2'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + backend_bucket='backend_bucket_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.delete(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/global/backendBuckets/{backend_bucket}" % client.transport._host, args[1]) + + +def test_delete_rest_flattened_error(transport: str = 'rest'): + client = BackendBucketsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete( + compute.DeleteBackendBucketRequest(), + project='project_value', + backend_bucket='backend_bucket_value', + ) + + +def test_delete_rest_error(): + client = BackendBucketsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.DeleteBackendBucketRequest, + dict, +]) +def test_delete_unary_rest(request_type): + client = BackendBucketsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'backend_bucket': 'sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.delete_unary(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.Operation) + + +def test_delete_unary_rest_required_fields(request_type=compute.DeleteBackendBucketRequest): + transport_class = transports.BackendBucketsRestTransport + + request_init = {} + request_init["backend_bucket"] = "" + request_init["project"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["backendBucket"] = 'backend_bucket_value' + jsonified_request["project"] = 'project_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "backendBucket" in jsonified_request + assert jsonified_request["backendBucket"] == 'backend_bucket_value' + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + + client = BackendBucketsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "delete", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.delete_unary(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_delete_unary_rest_unset_required_fields(): + transport = transports.BackendBucketsRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.delete._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("backendBucket", "project", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_unary_rest_interceptors(null_interceptor): + transport = transports.BackendBucketsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.BackendBucketsRestInterceptor(), + ) + client = BackendBucketsClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.BackendBucketsRestInterceptor, "post_delete") as post, \ + mock.patch.object(transports.BackendBucketsRestInterceptor, "pre_delete") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.DeleteBackendBucketRequest.pb(compute.DeleteBackendBucketRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.DeleteBackendBucketRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.delete_unary(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_delete_unary_rest_bad_request(transport: str = 'rest', request_type=compute.DeleteBackendBucketRequest): + client = BackendBucketsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'backend_bucket': 'sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.delete_unary(request) + + +def test_delete_unary_rest_flattened(): + client = BackendBucketsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'backend_bucket': 'sample2'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + backend_bucket='backend_bucket_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.delete_unary(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/global/backendBuckets/{backend_bucket}" % client.transport._host, args[1]) + + +def test_delete_unary_rest_flattened_error(transport: str = 'rest'): + client = BackendBucketsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_unary( + compute.DeleteBackendBucketRequest(), + project='project_value', + backend_bucket='backend_bucket_value', + ) + + +def test_delete_unary_rest_error(): + client = BackendBucketsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.DeleteSignedUrlKeyBackendBucketRequest, + dict, +]) +def test_delete_signed_url_key_rest(request_type): + client = BackendBucketsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'backend_bucket': 'sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.delete_signed_url_key(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, extended_operation.ExtendedOperation) + assert response.client_operation_id == 'client_operation_id_value' + assert response.creation_timestamp == 'creation_timestamp_value' + assert response.description == 'description_value' + assert response.end_time == 'end_time_value' + assert response.http_error_message == 'http_error_message_value' + assert response.http_error_status_code == 2374 + assert response.id == 205 + assert response.insert_time == 'insert_time_value' + assert response.kind == 'kind_value' + assert response.name == 'name_value' + assert response.operation_group_id == 'operation_group_id_value' + assert response.operation_type == 'operation_type_value' + assert response.progress == 885 + assert response.region == 'region_value' + assert response.self_link == 'self_link_value' + assert response.start_time == 'start_time_value' + assert response.status == compute.Operation.Status.DONE + assert response.status_message == 'status_message_value' + assert response.target_id == 947 + assert response.target_link == 'target_link_value' + assert response.user == 'user_value' + assert response.zone == 'zone_value' + + +def test_delete_signed_url_key_rest_required_fields(request_type=compute.DeleteSignedUrlKeyBackendBucketRequest): + transport_class = transports.BackendBucketsRestTransport + + request_init = {} + request_init["backend_bucket"] = "" + request_init["key_name"] = "" + request_init["project"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + assert "keyName" not in jsonified_request + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete_signed_url_key._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + assert "keyName" in jsonified_request + assert jsonified_request["keyName"] == request_init["key_name"] + + jsonified_request["backendBucket"] = 'backend_bucket_value' + jsonified_request["keyName"] = 'key_name_value' + jsonified_request["project"] = 'project_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete_signed_url_key._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("key_name", "request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "backendBucket" in jsonified_request + assert jsonified_request["backendBucket"] == 'backend_bucket_value' + assert "keyName" in jsonified_request + assert jsonified_request["keyName"] == 'key_name_value' + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + + client = BackendBucketsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.delete_signed_url_key(request) + + expected_params = [ + ( + "keyName", + "", + ), + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_delete_signed_url_key_rest_unset_required_fields(): + transport = transports.BackendBucketsRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.delete_signed_url_key._get_unset_required_fields({}) + assert set(unset_fields) == (set(("keyName", "requestId", )) & set(("backendBucket", "keyName", "project", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_signed_url_key_rest_interceptors(null_interceptor): + transport = transports.BackendBucketsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.BackendBucketsRestInterceptor(), + ) + client = BackendBucketsClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.BackendBucketsRestInterceptor, "post_delete_signed_url_key") as post, \ + mock.patch.object(transports.BackendBucketsRestInterceptor, "pre_delete_signed_url_key") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.DeleteSignedUrlKeyBackendBucketRequest.pb(compute.DeleteSignedUrlKeyBackendBucketRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.DeleteSignedUrlKeyBackendBucketRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.delete_signed_url_key(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_delete_signed_url_key_rest_bad_request(transport: str = 'rest', request_type=compute.DeleteSignedUrlKeyBackendBucketRequest): + client = BackendBucketsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'backend_bucket': 'sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.delete_signed_url_key(request) + + +def test_delete_signed_url_key_rest_flattened(): + client = BackendBucketsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'backend_bucket': 'sample2'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + backend_bucket='backend_bucket_value', + key_name='key_name_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.delete_signed_url_key(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/global/backendBuckets/{backend_bucket}/deleteSignedUrlKey" % client.transport._host, args[1]) + + +def test_delete_signed_url_key_rest_flattened_error(transport: str = 'rest'): + client = BackendBucketsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_signed_url_key( + compute.DeleteSignedUrlKeyBackendBucketRequest(), + project='project_value', + backend_bucket='backend_bucket_value', + key_name='key_name_value', + ) + + +def test_delete_signed_url_key_rest_error(): + client = BackendBucketsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.DeleteSignedUrlKeyBackendBucketRequest, + dict, +]) +def test_delete_signed_url_key_unary_rest(request_type): + client = BackendBucketsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'backend_bucket': 'sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.delete_signed_url_key_unary(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.Operation) + + +def test_delete_signed_url_key_unary_rest_required_fields(request_type=compute.DeleteSignedUrlKeyBackendBucketRequest): + transport_class = transports.BackendBucketsRestTransport + + request_init = {} + request_init["backend_bucket"] = "" + request_init["key_name"] = "" + request_init["project"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + assert "keyName" not in jsonified_request + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete_signed_url_key._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + assert "keyName" in jsonified_request + assert jsonified_request["keyName"] == request_init["key_name"] + + jsonified_request["backendBucket"] = 'backend_bucket_value' + jsonified_request["keyName"] = 'key_name_value' + jsonified_request["project"] = 'project_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete_signed_url_key._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("key_name", "request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "backendBucket" in jsonified_request + assert jsonified_request["backendBucket"] == 'backend_bucket_value' + assert "keyName" in jsonified_request + assert jsonified_request["keyName"] == 'key_name_value' + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + + client = BackendBucketsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.delete_signed_url_key_unary(request) + + expected_params = [ + ( + "keyName", + "", + ), + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_delete_signed_url_key_unary_rest_unset_required_fields(): + transport = transports.BackendBucketsRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.delete_signed_url_key._get_unset_required_fields({}) + assert set(unset_fields) == (set(("keyName", "requestId", )) & set(("backendBucket", "keyName", "project", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_signed_url_key_unary_rest_interceptors(null_interceptor): + transport = transports.BackendBucketsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.BackendBucketsRestInterceptor(), + ) + client = BackendBucketsClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.BackendBucketsRestInterceptor, "post_delete_signed_url_key") as post, \ + mock.patch.object(transports.BackendBucketsRestInterceptor, "pre_delete_signed_url_key") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.DeleteSignedUrlKeyBackendBucketRequest.pb(compute.DeleteSignedUrlKeyBackendBucketRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.DeleteSignedUrlKeyBackendBucketRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.delete_signed_url_key_unary(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_delete_signed_url_key_unary_rest_bad_request(transport: str = 'rest', request_type=compute.DeleteSignedUrlKeyBackendBucketRequest): + client = BackendBucketsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'backend_bucket': 'sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.delete_signed_url_key_unary(request) + + +def test_delete_signed_url_key_unary_rest_flattened(): + client = BackendBucketsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'backend_bucket': 'sample2'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + backend_bucket='backend_bucket_value', + key_name='key_name_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.delete_signed_url_key_unary(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/global/backendBuckets/{backend_bucket}/deleteSignedUrlKey" % client.transport._host, args[1]) + + +def test_delete_signed_url_key_unary_rest_flattened_error(transport: str = 'rest'): + client = BackendBucketsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_signed_url_key_unary( + compute.DeleteSignedUrlKeyBackendBucketRequest(), + project='project_value', + backend_bucket='backend_bucket_value', + key_name='key_name_value', + ) + + +def test_delete_signed_url_key_unary_rest_error(): + client = BackendBucketsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.GetBackendBucketRequest, + dict, +]) +def test_get_rest(request_type): + client = BackendBucketsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'backend_bucket': 'sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.BackendBucket( + bucket_name='bucket_name_value', + compression_mode='compression_mode_value', + creation_timestamp='creation_timestamp_value', + custom_response_headers=['custom_response_headers_value'], + description='description_value', + edge_security_policy='edge_security_policy_value', + enable_cdn=True, + id=205, + kind='kind_value', + name='name_value', + self_link='self_link_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.BackendBucket.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.get(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.BackendBucket) + assert response.bucket_name == 'bucket_name_value' + assert response.compression_mode == 'compression_mode_value' + assert response.creation_timestamp == 'creation_timestamp_value' + assert response.custom_response_headers == ['custom_response_headers_value'] + assert response.description == 'description_value' + assert response.edge_security_policy == 'edge_security_policy_value' + assert response.enable_cdn is True + assert response.id == 205 + assert response.kind == 'kind_value' + assert response.name == 'name_value' + assert response.self_link == 'self_link_value' + + +def test_get_rest_required_fields(request_type=compute.GetBackendBucketRequest): + transport_class = transports.BackendBucketsRestTransport + + request_init = {} + request_init["backend_bucket"] = "" + request_init["project"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["backendBucket"] = 'backend_bucket_value' + jsonified_request["project"] = 'project_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "backendBucket" in jsonified_request + assert jsonified_request["backendBucket"] == 'backend_bucket_value' + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + + client = BackendBucketsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.BackendBucket() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "get", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.BackendBucket.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.get(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_get_rest_unset_required_fields(): + transport = transports.BackendBucketsRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.get._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("backendBucket", "project", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_rest_interceptors(null_interceptor): + transport = transports.BackendBucketsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.BackendBucketsRestInterceptor(), + ) + client = BackendBucketsClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.BackendBucketsRestInterceptor, "post_get") as post, \ + mock.patch.object(transports.BackendBucketsRestInterceptor, "pre_get") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.GetBackendBucketRequest.pb(compute.GetBackendBucketRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.BackendBucket.to_json(compute.BackendBucket()) + + request = compute.GetBackendBucketRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.BackendBucket() + + client.get(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_get_rest_bad_request(transport: str = 'rest', request_type=compute.GetBackendBucketRequest): + client = BackendBucketsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'backend_bucket': 'sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get(request) + + +def test_get_rest_flattened(): + client = BackendBucketsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.BackendBucket() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'backend_bucket': 'sample2'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + backend_bucket='backend_bucket_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.BackendBucket.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.get(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/global/backendBuckets/{backend_bucket}" % client.transport._host, args[1]) + + +def test_get_rest_flattened_error(transport: str = 'rest'): + client = BackendBucketsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get( + compute.GetBackendBucketRequest(), + project='project_value', + backend_bucket='backend_bucket_value', + ) + + +def test_get_rest_error(): + client = BackendBucketsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.InsertBackendBucketRequest, + dict, +]) +def test_insert_rest(request_type): + client = BackendBucketsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1'} + request_init["backend_bucket_resource"] = {'bucket_name': 'bucket_name_value', 'cdn_policy': {'bypass_cache_on_request_headers': [{'header_name': 'header_name_value'}], 'cache_key_policy': {'include_http_headers': ['include_http_headers_value1', 'include_http_headers_value2'], 'query_string_whitelist': ['query_string_whitelist_value1', 'query_string_whitelist_value2']}, 'cache_mode': 'cache_mode_value', 'client_ttl': 1074, 'default_ttl': 1176, 'max_ttl': 761, 'negative_caching': True, 'negative_caching_policy': [{'code': 411, 'ttl': 340}], 'request_coalescing': True, 'serve_while_stale': 1813, 'signed_url_cache_max_age_sec': 2890, 'signed_url_key_names': ['signed_url_key_names_value1', 'signed_url_key_names_value2']}, 'compression_mode': 'compression_mode_value', 'creation_timestamp': 'creation_timestamp_value', 'custom_response_headers': ['custom_response_headers_value1', 'custom_response_headers_value2'], 'description': 'description_value', 'edge_security_policy': 'edge_security_policy_value', 'enable_cdn': True, 'id': 205, 'kind': 'kind_value', 'name': 'name_value', 'self_link': 'self_link_value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.insert(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, extended_operation.ExtendedOperation) + assert response.client_operation_id == 'client_operation_id_value' + assert response.creation_timestamp == 'creation_timestamp_value' + assert response.description == 'description_value' + assert response.end_time == 'end_time_value' + assert response.http_error_message == 'http_error_message_value' + assert response.http_error_status_code == 2374 + assert response.id == 205 + assert response.insert_time == 'insert_time_value' + assert response.kind == 'kind_value' + assert response.name == 'name_value' + assert response.operation_group_id == 'operation_group_id_value' + assert response.operation_type == 'operation_type_value' + assert response.progress == 885 + assert response.region == 'region_value' + assert response.self_link == 'self_link_value' + assert response.start_time == 'start_time_value' + assert response.status == compute.Operation.Status.DONE + assert response.status_message == 'status_message_value' + assert response.target_id == 947 + assert response.target_link == 'target_link_value' + assert response.user == 'user_value' + assert response.zone == 'zone_value' + + +def test_insert_rest_required_fields(request_type=compute.InsertBackendBucketRequest): + transport_class = transports.BackendBucketsRestTransport + + request_init = {} + request_init["project"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).insert._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).insert._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + + client = BackendBucketsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.insert(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_insert_rest_unset_required_fields(): + transport = transports.BackendBucketsRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.insert._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("backendBucketResource", "project", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_insert_rest_interceptors(null_interceptor): + transport = transports.BackendBucketsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.BackendBucketsRestInterceptor(), + ) + client = BackendBucketsClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.BackendBucketsRestInterceptor, "post_insert") as post, \ + mock.patch.object(transports.BackendBucketsRestInterceptor, "pre_insert") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.InsertBackendBucketRequest.pb(compute.InsertBackendBucketRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.InsertBackendBucketRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.insert(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_insert_rest_bad_request(transport: str = 'rest', request_type=compute.InsertBackendBucketRequest): + client = BackendBucketsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1'} + request_init["backend_bucket_resource"] = {'bucket_name': 'bucket_name_value', 'cdn_policy': {'bypass_cache_on_request_headers': [{'header_name': 'header_name_value'}], 'cache_key_policy': {'include_http_headers': ['include_http_headers_value1', 'include_http_headers_value2'], 'query_string_whitelist': ['query_string_whitelist_value1', 'query_string_whitelist_value2']}, 'cache_mode': 'cache_mode_value', 'client_ttl': 1074, 'default_ttl': 1176, 'max_ttl': 761, 'negative_caching': True, 'negative_caching_policy': [{'code': 411, 'ttl': 340}], 'request_coalescing': True, 'serve_while_stale': 1813, 'signed_url_cache_max_age_sec': 2890, 'signed_url_key_names': ['signed_url_key_names_value1', 'signed_url_key_names_value2']}, 'compression_mode': 'compression_mode_value', 'creation_timestamp': 'creation_timestamp_value', 'custom_response_headers': ['custom_response_headers_value1', 'custom_response_headers_value2'], 'description': 'description_value', 'edge_security_policy': 'edge_security_policy_value', 'enable_cdn': True, 'id': 205, 'kind': 'kind_value', 'name': 'name_value', 'self_link': 'self_link_value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.insert(request) + + +def test_insert_rest_flattened(): + client = BackendBucketsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + backend_bucket_resource=compute.BackendBucket(bucket_name='bucket_name_value'), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.insert(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/global/backendBuckets" % client.transport._host, args[1]) + + +def test_insert_rest_flattened_error(transport: str = 'rest'): + client = BackendBucketsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.insert( + compute.InsertBackendBucketRequest(), + project='project_value', + backend_bucket_resource=compute.BackendBucket(bucket_name='bucket_name_value'), + ) + + +def test_insert_rest_error(): + client = BackendBucketsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.InsertBackendBucketRequest, + dict, +]) +def test_insert_unary_rest(request_type): + client = BackendBucketsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1'} + request_init["backend_bucket_resource"] = {'bucket_name': 'bucket_name_value', 'cdn_policy': {'bypass_cache_on_request_headers': [{'header_name': 'header_name_value'}], 'cache_key_policy': {'include_http_headers': ['include_http_headers_value1', 'include_http_headers_value2'], 'query_string_whitelist': ['query_string_whitelist_value1', 'query_string_whitelist_value2']}, 'cache_mode': 'cache_mode_value', 'client_ttl': 1074, 'default_ttl': 1176, 'max_ttl': 761, 'negative_caching': True, 'negative_caching_policy': [{'code': 411, 'ttl': 340}], 'request_coalescing': True, 'serve_while_stale': 1813, 'signed_url_cache_max_age_sec': 2890, 'signed_url_key_names': ['signed_url_key_names_value1', 'signed_url_key_names_value2']}, 'compression_mode': 'compression_mode_value', 'creation_timestamp': 'creation_timestamp_value', 'custom_response_headers': ['custom_response_headers_value1', 'custom_response_headers_value2'], 'description': 'description_value', 'edge_security_policy': 'edge_security_policy_value', 'enable_cdn': True, 'id': 205, 'kind': 'kind_value', 'name': 'name_value', 'self_link': 'self_link_value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.insert_unary(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.Operation) + + +def test_insert_unary_rest_required_fields(request_type=compute.InsertBackendBucketRequest): + transport_class = transports.BackendBucketsRestTransport + + request_init = {} + request_init["project"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).insert._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).insert._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + + client = BackendBucketsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.insert_unary(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_insert_unary_rest_unset_required_fields(): + transport = transports.BackendBucketsRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.insert._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("backendBucketResource", "project", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_insert_unary_rest_interceptors(null_interceptor): + transport = transports.BackendBucketsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.BackendBucketsRestInterceptor(), + ) + client = BackendBucketsClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.BackendBucketsRestInterceptor, "post_insert") as post, \ + mock.patch.object(transports.BackendBucketsRestInterceptor, "pre_insert") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.InsertBackendBucketRequest.pb(compute.InsertBackendBucketRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.InsertBackendBucketRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.insert_unary(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_insert_unary_rest_bad_request(transport: str = 'rest', request_type=compute.InsertBackendBucketRequest): + client = BackendBucketsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1'} + request_init["backend_bucket_resource"] = {'bucket_name': 'bucket_name_value', 'cdn_policy': {'bypass_cache_on_request_headers': [{'header_name': 'header_name_value'}], 'cache_key_policy': {'include_http_headers': ['include_http_headers_value1', 'include_http_headers_value2'], 'query_string_whitelist': ['query_string_whitelist_value1', 'query_string_whitelist_value2']}, 'cache_mode': 'cache_mode_value', 'client_ttl': 1074, 'default_ttl': 1176, 'max_ttl': 761, 'negative_caching': True, 'negative_caching_policy': [{'code': 411, 'ttl': 340}], 'request_coalescing': True, 'serve_while_stale': 1813, 'signed_url_cache_max_age_sec': 2890, 'signed_url_key_names': ['signed_url_key_names_value1', 'signed_url_key_names_value2']}, 'compression_mode': 'compression_mode_value', 'creation_timestamp': 'creation_timestamp_value', 'custom_response_headers': ['custom_response_headers_value1', 'custom_response_headers_value2'], 'description': 'description_value', 'edge_security_policy': 'edge_security_policy_value', 'enable_cdn': True, 'id': 205, 'kind': 'kind_value', 'name': 'name_value', 'self_link': 'self_link_value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.insert_unary(request) + + +def test_insert_unary_rest_flattened(): + client = BackendBucketsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + backend_bucket_resource=compute.BackendBucket(bucket_name='bucket_name_value'), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.insert_unary(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/global/backendBuckets" % client.transport._host, args[1]) + + +def test_insert_unary_rest_flattened_error(transport: str = 'rest'): + client = BackendBucketsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.insert_unary( + compute.InsertBackendBucketRequest(), + project='project_value', + backend_bucket_resource=compute.BackendBucket(bucket_name='bucket_name_value'), + ) + + +def test_insert_unary_rest_error(): + client = BackendBucketsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.ListBackendBucketsRequest, + dict, +]) +def test_list_rest(request_type): + client = BackendBucketsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.BackendBucketList( + id='id_value', + kind='kind_value', + next_page_token='next_page_token_value', + self_link='self_link_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.BackendBucketList.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.list(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListPager) + assert response.id == 'id_value' + assert response.kind == 'kind_value' + assert response.next_page_token == 'next_page_token_value' + assert response.self_link == 'self_link_value' + + +def test_list_rest_required_fields(request_type=compute.ListBackendBucketsRequest): + transport_class = transports.BackendBucketsRestTransport + + request_init = {} + request_init["project"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("filter", "max_results", "order_by", "page_token", "return_partial_success", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + + client = BackendBucketsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.BackendBucketList() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "get", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.BackendBucketList.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.list(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_list_rest_unset_required_fields(): + transport = transports.BackendBucketsRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.list._get_unset_required_fields({}) + assert set(unset_fields) == (set(("filter", "maxResults", "orderBy", "pageToken", "returnPartialSuccess", )) & set(("project", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_rest_interceptors(null_interceptor): + transport = transports.BackendBucketsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.BackendBucketsRestInterceptor(), + ) + client = BackendBucketsClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.BackendBucketsRestInterceptor, "post_list") as post, \ + mock.patch.object(transports.BackendBucketsRestInterceptor, "pre_list") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.ListBackendBucketsRequest.pb(compute.ListBackendBucketsRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.BackendBucketList.to_json(compute.BackendBucketList()) + + request = compute.ListBackendBucketsRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.BackendBucketList() + + client.list(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_list_rest_bad_request(transport: str = 'rest', request_type=compute.ListBackendBucketsRequest): + client = BackendBucketsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list(request) + + +def test_list_rest_flattened(): + client = BackendBucketsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.BackendBucketList() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.BackendBucketList.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.list(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/global/backendBuckets" % client.transport._host, args[1]) + + +def test_list_rest_flattened_error(transport: str = 'rest'): + client = BackendBucketsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list( + compute.ListBackendBucketsRequest(), + project='project_value', + ) + + +def test_list_rest_pager(transport: str = 'rest'): + client = BackendBucketsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + #with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + compute.BackendBucketList( + items=[ + compute.BackendBucket(), + compute.BackendBucket(), + compute.BackendBucket(), + ], + next_page_token='abc', + ), + compute.BackendBucketList( + items=[], + next_page_token='def', + ), + compute.BackendBucketList( + items=[ + compute.BackendBucket(), + ], + next_page_token='ghi', + ), + compute.BackendBucketList( + items=[ + compute.BackendBucket(), + compute.BackendBucket(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple(compute.BackendBucketList.to_json(x) for x in response) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode('UTF-8') + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {'project': 'sample1'} + + pager = client.list(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, compute.BackendBucket) + for i in results) + + pages = list(client.list(request=sample_request).pages) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize("request_type", [ + compute.PatchBackendBucketRequest, + dict, +]) +def test_patch_rest(request_type): + client = BackendBucketsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'backend_bucket': 'sample2'} + request_init["backend_bucket_resource"] = {'bucket_name': 'bucket_name_value', 'cdn_policy': {'bypass_cache_on_request_headers': [{'header_name': 'header_name_value'}], 'cache_key_policy': {'include_http_headers': ['include_http_headers_value1', 'include_http_headers_value2'], 'query_string_whitelist': ['query_string_whitelist_value1', 'query_string_whitelist_value2']}, 'cache_mode': 'cache_mode_value', 'client_ttl': 1074, 'default_ttl': 1176, 'max_ttl': 761, 'negative_caching': True, 'negative_caching_policy': [{'code': 411, 'ttl': 340}], 'request_coalescing': True, 'serve_while_stale': 1813, 'signed_url_cache_max_age_sec': 2890, 'signed_url_key_names': ['signed_url_key_names_value1', 'signed_url_key_names_value2']}, 'compression_mode': 'compression_mode_value', 'creation_timestamp': 'creation_timestamp_value', 'custom_response_headers': ['custom_response_headers_value1', 'custom_response_headers_value2'], 'description': 'description_value', 'edge_security_policy': 'edge_security_policy_value', 'enable_cdn': True, 'id': 205, 'kind': 'kind_value', 'name': 'name_value', 'self_link': 'self_link_value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.patch(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, extended_operation.ExtendedOperation) + assert response.client_operation_id == 'client_operation_id_value' + assert response.creation_timestamp == 'creation_timestamp_value' + assert response.description == 'description_value' + assert response.end_time == 'end_time_value' + assert response.http_error_message == 'http_error_message_value' + assert response.http_error_status_code == 2374 + assert response.id == 205 + assert response.insert_time == 'insert_time_value' + assert response.kind == 'kind_value' + assert response.name == 'name_value' + assert response.operation_group_id == 'operation_group_id_value' + assert response.operation_type == 'operation_type_value' + assert response.progress == 885 + assert response.region == 'region_value' + assert response.self_link == 'self_link_value' + assert response.start_time == 'start_time_value' + assert response.status == compute.Operation.Status.DONE + assert response.status_message == 'status_message_value' + assert response.target_id == 947 + assert response.target_link == 'target_link_value' + assert response.user == 'user_value' + assert response.zone == 'zone_value' + + +def test_patch_rest_required_fields(request_type=compute.PatchBackendBucketRequest): + transport_class = transports.BackendBucketsRestTransport + + request_init = {} + request_init["backend_bucket"] = "" + request_init["project"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).patch._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["backendBucket"] = 'backend_bucket_value' + jsonified_request["project"] = 'project_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).patch._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "backendBucket" in jsonified_request + assert jsonified_request["backendBucket"] == 'backend_bucket_value' + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + + client = BackendBucketsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "patch", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.patch(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_patch_rest_unset_required_fields(): + transport = transports.BackendBucketsRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.patch._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("backendBucket", "backendBucketResource", "project", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_patch_rest_interceptors(null_interceptor): + transport = transports.BackendBucketsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.BackendBucketsRestInterceptor(), + ) + client = BackendBucketsClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.BackendBucketsRestInterceptor, "post_patch") as post, \ + mock.patch.object(transports.BackendBucketsRestInterceptor, "pre_patch") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.PatchBackendBucketRequest.pb(compute.PatchBackendBucketRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.PatchBackendBucketRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.patch(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_patch_rest_bad_request(transport: str = 'rest', request_type=compute.PatchBackendBucketRequest): + client = BackendBucketsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'backend_bucket': 'sample2'} + request_init["backend_bucket_resource"] = {'bucket_name': 'bucket_name_value', 'cdn_policy': {'bypass_cache_on_request_headers': [{'header_name': 'header_name_value'}], 'cache_key_policy': {'include_http_headers': ['include_http_headers_value1', 'include_http_headers_value2'], 'query_string_whitelist': ['query_string_whitelist_value1', 'query_string_whitelist_value2']}, 'cache_mode': 'cache_mode_value', 'client_ttl': 1074, 'default_ttl': 1176, 'max_ttl': 761, 'negative_caching': True, 'negative_caching_policy': [{'code': 411, 'ttl': 340}], 'request_coalescing': True, 'serve_while_stale': 1813, 'signed_url_cache_max_age_sec': 2890, 'signed_url_key_names': ['signed_url_key_names_value1', 'signed_url_key_names_value2']}, 'compression_mode': 'compression_mode_value', 'creation_timestamp': 'creation_timestamp_value', 'custom_response_headers': ['custom_response_headers_value1', 'custom_response_headers_value2'], 'description': 'description_value', 'edge_security_policy': 'edge_security_policy_value', 'enable_cdn': True, 'id': 205, 'kind': 'kind_value', 'name': 'name_value', 'self_link': 'self_link_value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.patch(request) + + +def test_patch_rest_flattened(): + client = BackendBucketsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'backend_bucket': 'sample2'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + backend_bucket='backend_bucket_value', + backend_bucket_resource=compute.BackendBucket(bucket_name='bucket_name_value'), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.patch(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/global/backendBuckets/{backend_bucket}" % client.transport._host, args[1]) + + +def test_patch_rest_flattened_error(transport: str = 'rest'): + client = BackendBucketsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.patch( + compute.PatchBackendBucketRequest(), + project='project_value', + backend_bucket='backend_bucket_value', + backend_bucket_resource=compute.BackendBucket(bucket_name='bucket_name_value'), + ) + + +def test_patch_rest_error(): + client = BackendBucketsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.PatchBackendBucketRequest, + dict, +]) +def test_patch_unary_rest(request_type): + client = BackendBucketsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'backend_bucket': 'sample2'} + request_init["backend_bucket_resource"] = {'bucket_name': 'bucket_name_value', 'cdn_policy': {'bypass_cache_on_request_headers': [{'header_name': 'header_name_value'}], 'cache_key_policy': {'include_http_headers': ['include_http_headers_value1', 'include_http_headers_value2'], 'query_string_whitelist': ['query_string_whitelist_value1', 'query_string_whitelist_value2']}, 'cache_mode': 'cache_mode_value', 'client_ttl': 1074, 'default_ttl': 1176, 'max_ttl': 761, 'negative_caching': True, 'negative_caching_policy': [{'code': 411, 'ttl': 340}], 'request_coalescing': True, 'serve_while_stale': 1813, 'signed_url_cache_max_age_sec': 2890, 'signed_url_key_names': ['signed_url_key_names_value1', 'signed_url_key_names_value2']}, 'compression_mode': 'compression_mode_value', 'creation_timestamp': 'creation_timestamp_value', 'custom_response_headers': ['custom_response_headers_value1', 'custom_response_headers_value2'], 'description': 'description_value', 'edge_security_policy': 'edge_security_policy_value', 'enable_cdn': True, 'id': 205, 'kind': 'kind_value', 'name': 'name_value', 'self_link': 'self_link_value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.patch_unary(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.Operation) + + +def test_patch_unary_rest_required_fields(request_type=compute.PatchBackendBucketRequest): + transport_class = transports.BackendBucketsRestTransport + + request_init = {} + request_init["backend_bucket"] = "" + request_init["project"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).patch._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["backendBucket"] = 'backend_bucket_value' + jsonified_request["project"] = 'project_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).patch._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "backendBucket" in jsonified_request + assert jsonified_request["backendBucket"] == 'backend_bucket_value' + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + + client = BackendBucketsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "patch", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.patch_unary(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_patch_unary_rest_unset_required_fields(): + transport = transports.BackendBucketsRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.patch._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("backendBucket", "backendBucketResource", "project", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_patch_unary_rest_interceptors(null_interceptor): + transport = transports.BackendBucketsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.BackendBucketsRestInterceptor(), + ) + client = BackendBucketsClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.BackendBucketsRestInterceptor, "post_patch") as post, \ + mock.patch.object(transports.BackendBucketsRestInterceptor, "pre_patch") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.PatchBackendBucketRequest.pb(compute.PatchBackendBucketRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.PatchBackendBucketRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.patch_unary(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_patch_unary_rest_bad_request(transport: str = 'rest', request_type=compute.PatchBackendBucketRequest): + client = BackendBucketsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'backend_bucket': 'sample2'} + request_init["backend_bucket_resource"] = {'bucket_name': 'bucket_name_value', 'cdn_policy': {'bypass_cache_on_request_headers': [{'header_name': 'header_name_value'}], 'cache_key_policy': {'include_http_headers': ['include_http_headers_value1', 'include_http_headers_value2'], 'query_string_whitelist': ['query_string_whitelist_value1', 'query_string_whitelist_value2']}, 'cache_mode': 'cache_mode_value', 'client_ttl': 1074, 'default_ttl': 1176, 'max_ttl': 761, 'negative_caching': True, 'negative_caching_policy': [{'code': 411, 'ttl': 340}], 'request_coalescing': True, 'serve_while_stale': 1813, 'signed_url_cache_max_age_sec': 2890, 'signed_url_key_names': ['signed_url_key_names_value1', 'signed_url_key_names_value2']}, 'compression_mode': 'compression_mode_value', 'creation_timestamp': 'creation_timestamp_value', 'custom_response_headers': ['custom_response_headers_value1', 'custom_response_headers_value2'], 'description': 'description_value', 'edge_security_policy': 'edge_security_policy_value', 'enable_cdn': True, 'id': 205, 'kind': 'kind_value', 'name': 'name_value', 'self_link': 'self_link_value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.patch_unary(request) + + +def test_patch_unary_rest_flattened(): + client = BackendBucketsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'backend_bucket': 'sample2'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + backend_bucket='backend_bucket_value', + backend_bucket_resource=compute.BackendBucket(bucket_name='bucket_name_value'), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.patch_unary(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/global/backendBuckets/{backend_bucket}" % client.transport._host, args[1]) + + +def test_patch_unary_rest_flattened_error(transport: str = 'rest'): + client = BackendBucketsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.patch_unary( + compute.PatchBackendBucketRequest(), + project='project_value', + backend_bucket='backend_bucket_value', + backend_bucket_resource=compute.BackendBucket(bucket_name='bucket_name_value'), + ) + + +def test_patch_unary_rest_error(): + client = BackendBucketsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.SetEdgeSecurityPolicyBackendBucketRequest, + dict, +]) +def test_set_edge_security_policy_rest(request_type): + client = BackendBucketsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'backend_bucket': 'sample2'} + request_init["security_policy_reference_resource"] = {'security_policy': 'security_policy_value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.set_edge_security_policy(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, extended_operation.ExtendedOperation) + assert response.client_operation_id == 'client_operation_id_value' + assert response.creation_timestamp == 'creation_timestamp_value' + assert response.description == 'description_value' + assert response.end_time == 'end_time_value' + assert response.http_error_message == 'http_error_message_value' + assert response.http_error_status_code == 2374 + assert response.id == 205 + assert response.insert_time == 'insert_time_value' + assert response.kind == 'kind_value' + assert response.name == 'name_value' + assert response.operation_group_id == 'operation_group_id_value' + assert response.operation_type == 'operation_type_value' + assert response.progress == 885 + assert response.region == 'region_value' + assert response.self_link == 'self_link_value' + assert response.start_time == 'start_time_value' + assert response.status == compute.Operation.Status.DONE + assert response.status_message == 'status_message_value' + assert response.target_id == 947 + assert response.target_link == 'target_link_value' + assert response.user == 'user_value' + assert response.zone == 'zone_value' + + +def test_set_edge_security_policy_rest_required_fields(request_type=compute.SetEdgeSecurityPolicyBackendBucketRequest): + transport_class = transports.BackendBucketsRestTransport + + request_init = {} + request_init["backend_bucket"] = "" + request_init["project"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).set_edge_security_policy._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["backendBucket"] = 'backend_bucket_value' + jsonified_request["project"] = 'project_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).set_edge_security_policy._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "backendBucket" in jsonified_request + assert jsonified_request["backendBucket"] == 'backend_bucket_value' + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + + client = BackendBucketsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.set_edge_security_policy(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_set_edge_security_policy_rest_unset_required_fields(): + transport = transports.BackendBucketsRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.set_edge_security_policy._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("backendBucket", "project", "securityPolicyReferenceResource", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_set_edge_security_policy_rest_interceptors(null_interceptor): + transport = transports.BackendBucketsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.BackendBucketsRestInterceptor(), + ) + client = BackendBucketsClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.BackendBucketsRestInterceptor, "post_set_edge_security_policy") as post, \ + mock.patch.object(transports.BackendBucketsRestInterceptor, "pre_set_edge_security_policy") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.SetEdgeSecurityPolicyBackendBucketRequest.pb(compute.SetEdgeSecurityPolicyBackendBucketRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.SetEdgeSecurityPolicyBackendBucketRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.set_edge_security_policy(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_set_edge_security_policy_rest_bad_request(transport: str = 'rest', request_type=compute.SetEdgeSecurityPolicyBackendBucketRequest): + client = BackendBucketsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'backend_bucket': 'sample2'} + request_init["security_policy_reference_resource"] = {'security_policy': 'security_policy_value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.set_edge_security_policy(request) + + +def test_set_edge_security_policy_rest_flattened(): + client = BackendBucketsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'backend_bucket': 'sample2'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + backend_bucket='backend_bucket_value', + security_policy_reference_resource=compute.SecurityPolicyReference(security_policy='security_policy_value'), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.set_edge_security_policy(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/global/backendBuckets/{backend_bucket}/setEdgeSecurityPolicy" % client.transport._host, args[1]) + + +def test_set_edge_security_policy_rest_flattened_error(transport: str = 'rest'): + client = BackendBucketsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.set_edge_security_policy( + compute.SetEdgeSecurityPolicyBackendBucketRequest(), + project='project_value', + backend_bucket='backend_bucket_value', + security_policy_reference_resource=compute.SecurityPolicyReference(security_policy='security_policy_value'), + ) + + +def test_set_edge_security_policy_rest_error(): + client = BackendBucketsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.SetEdgeSecurityPolicyBackendBucketRequest, + dict, +]) +def test_set_edge_security_policy_unary_rest(request_type): + client = BackendBucketsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'backend_bucket': 'sample2'} + request_init["security_policy_reference_resource"] = {'security_policy': 'security_policy_value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.set_edge_security_policy_unary(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.Operation) + + +def test_set_edge_security_policy_unary_rest_required_fields(request_type=compute.SetEdgeSecurityPolicyBackendBucketRequest): + transport_class = transports.BackendBucketsRestTransport + + request_init = {} + request_init["backend_bucket"] = "" + request_init["project"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).set_edge_security_policy._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["backendBucket"] = 'backend_bucket_value' + jsonified_request["project"] = 'project_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).set_edge_security_policy._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "backendBucket" in jsonified_request + assert jsonified_request["backendBucket"] == 'backend_bucket_value' + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + + client = BackendBucketsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.set_edge_security_policy_unary(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_set_edge_security_policy_unary_rest_unset_required_fields(): + transport = transports.BackendBucketsRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.set_edge_security_policy._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("backendBucket", "project", "securityPolicyReferenceResource", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_set_edge_security_policy_unary_rest_interceptors(null_interceptor): + transport = transports.BackendBucketsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.BackendBucketsRestInterceptor(), + ) + client = BackendBucketsClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.BackendBucketsRestInterceptor, "post_set_edge_security_policy") as post, \ + mock.patch.object(transports.BackendBucketsRestInterceptor, "pre_set_edge_security_policy") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.SetEdgeSecurityPolicyBackendBucketRequest.pb(compute.SetEdgeSecurityPolicyBackendBucketRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.SetEdgeSecurityPolicyBackendBucketRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.set_edge_security_policy_unary(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_set_edge_security_policy_unary_rest_bad_request(transport: str = 'rest', request_type=compute.SetEdgeSecurityPolicyBackendBucketRequest): + client = BackendBucketsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'backend_bucket': 'sample2'} + request_init["security_policy_reference_resource"] = {'security_policy': 'security_policy_value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.set_edge_security_policy_unary(request) + + +def test_set_edge_security_policy_unary_rest_flattened(): + client = BackendBucketsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'backend_bucket': 'sample2'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + backend_bucket='backend_bucket_value', + security_policy_reference_resource=compute.SecurityPolicyReference(security_policy='security_policy_value'), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.set_edge_security_policy_unary(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/global/backendBuckets/{backend_bucket}/setEdgeSecurityPolicy" % client.transport._host, args[1]) + + +def test_set_edge_security_policy_unary_rest_flattened_error(transport: str = 'rest'): + client = BackendBucketsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.set_edge_security_policy_unary( + compute.SetEdgeSecurityPolicyBackendBucketRequest(), + project='project_value', + backend_bucket='backend_bucket_value', + security_policy_reference_resource=compute.SecurityPolicyReference(security_policy='security_policy_value'), + ) + + +def test_set_edge_security_policy_unary_rest_error(): + client = BackendBucketsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.UpdateBackendBucketRequest, + dict, +]) +def test_update_rest(request_type): + client = BackendBucketsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'backend_bucket': 'sample2'} + request_init["backend_bucket_resource"] = {'bucket_name': 'bucket_name_value', 'cdn_policy': {'bypass_cache_on_request_headers': [{'header_name': 'header_name_value'}], 'cache_key_policy': {'include_http_headers': ['include_http_headers_value1', 'include_http_headers_value2'], 'query_string_whitelist': ['query_string_whitelist_value1', 'query_string_whitelist_value2']}, 'cache_mode': 'cache_mode_value', 'client_ttl': 1074, 'default_ttl': 1176, 'max_ttl': 761, 'negative_caching': True, 'negative_caching_policy': [{'code': 411, 'ttl': 340}], 'request_coalescing': True, 'serve_while_stale': 1813, 'signed_url_cache_max_age_sec': 2890, 'signed_url_key_names': ['signed_url_key_names_value1', 'signed_url_key_names_value2']}, 'compression_mode': 'compression_mode_value', 'creation_timestamp': 'creation_timestamp_value', 'custom_response_headers': ['custom_response_headers_value1', 'custom_response_headers_value2'], 'description': 'description_value', 'edge_security_policy': 'edge_security_policy_value', 'enable_cdn': True, 'id': 205, 'kind': 'kind_value', 'name': 'name_value', 'self_link': 'self_link_value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.update(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, extended_operation.ExtendedOperation) + assert response.client_operation_id == 'client_operation_id_value' + assert response.creation_timestamp == 'creation_timestamp_value' + assert response.description == 'description_value' + assert response.end_time == 'end_time_value' + assert response.http_error_message == 'http_error_message_value' + assert response.http_error_status_code == 2374 + assert response.id == 205 + assert response.insert_time == 'insert_time_value' + assert response.kind == 'kind_value' + assert response.name == 'name_value' + assert response.operation_group_id == 'operation_group_id_value' + assert response.operation_type == 'operation_type_value' + assert response.progress == 885 + assert response.region == 'region_value' + assert response.self_link == 'self_link_value' + assert response.start_time == 'start_time_value' + assert response.status == compute.Operation.Status.DONE + assert response.status_message == 'status_message_value' + assert response.target_id == 947 + assert response.target_link == 'target_link_value' + assert response.user == 'user_value' + assert response.zone == 'zone_value' + + +def test_update_rest_required_fields(request_type=compute.UpdateBackendBucketRequest): + transport_class = transports.BackendBucketsRestTransport + + request_init = {} + request_init["backend_bucket"] = "" + request_init["project"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).update._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["backendBucket"] = 'backend_bucket_value' + jsonified_request["project"] = 'project_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).update._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "backendBucket" in jsonified_request + assert jsonified_request["backendBucket"] == 'backend_bucket_value' + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + + client = BackendBucketsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "put", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.update(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_update_rest_unset_required_fields(): + transport = transports.BackendBucketsRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.update._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("backendBucket", "backendBucketResource", "project", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_update_rest_interceptors(null_interceptor): + transport = transports.BackendBucketsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.BackendBucketsRestInterceptor(), + ) + client = BackendBucketsClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.BackendBucketsRestInterceptor, "post_update") as post, \ + mock.patch.object(transports.BackendBucketsRestInterceptor, "pre_update") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.UpdateBackendBucketRequest.pb(compute.UpdateBackendBucketRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.UpdateBackendBucketRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.update(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_update_rest_bad_request(transport: str = 'rest', request_type=compute.UpdateBackendBucketRequest): + client = BackendBucketsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'backend_bucket': 'sample2'} + request_init["backend_bucket_resource"] = {'bucket_name': 'bucket_name_value', 'cdn_policy': {'bypass_cache_on_request_headers': [{'header_name': 'header_name_value'}], 'cache_key_policy': {'include_http_headers': ['include_http_headers_value1', 'include_http_headers_value2'], 'query_string_whitelist': ['query_string_whitelist_value1', 'query_string_whitelist_value2']}, 'cache_mode': 'cache_mode_value', 'client_ttl': 1074, 'default_ttl': 1176, 'max_ttl': 761, 'negative_caching': True, 'negative_caching_policy': [{'code': 411, 'ttl': 340}], 'request_coalescing': True, 'serve_while_stale': 1813, 'signed_url_cache_max_age_sec': 2890, 'signed_url_key_names': ['signed_url_key_names_value1', 'signed_url_key_names_value2']}, 'compression_mode': 'compression_mode_value', 'creation_timestamp': 'creation_timestamp_value', 'custom_response_headers': ['custom_response_headers_value1', 'custom_response_headers_value2'], 'description': 'description_value', 'edge_security_policy': 'edge_security_policy_value', 'enable_cdn': True, 'id': 205, 'kind': 'kind_value', 'name': 'name_value', 'self_link': 'self_link_value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.update(request) + + +def test_update_rest_flattened(): + client = BackendBucketsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'backend_bucket': 'sample2'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + backend_bucket='backend_bucket_value', + backend_bucket_resource=compute.BackendBucket(bucket_name='bucket_name_value'), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.update(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/global/backendBuckets/{backend_bucket}" % client.transport._host, args[1]) + + +def test_update_rest_flattened_error(transport: str = 'rest'): + client = BackendBucketsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update( + compute.UpdateBackendBucketRequest(), + project='project_value', + backend_bucket='backend_bucket_value', + backend_bucket_resource=compute.BackendBucket(bucket_name='bucket_name_value'), + ) + + +def test_update_rest_error(): + client = BackendBucketsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.UpdateBackendBucketRequest, + dict, +]) +def test_update_unary_rest(request_type): + client = BackendBucketsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'backend_bucket': 'sample2'} + request_init["backend_bucket_resource"] = {'bucket_name': 'bucket_name_value', 'cdn_policy': {'bypass_cache_on_request_headers': [{'header_name': 'header_name_value'}], 'cache_key_policy': {'include_http_headers': ['include_http_headers_value1', 'include_http_headers_value2'], 'query_string_whitelist': ['query_string_whitelist_value1', 'query_string_whitelist_value2']}, 'cache_mode': 'cache_mode_value', 'client_ttl': 1074, 'default_ttl': 1176, 'max_ttl': 761, 'negative_caching': True, 'negative_caching_policy': [{'code': 411, 'ttl': 340}], 'request_coalescing': True, 'serve_while_stale': 1813, 'signed_url_cache_max_age_sec': 2890, 'signed_url_key_names': ['signed_url_key_names_value1', 'signed_url_key_names_value2']}, 'compression_mode': 'compression_mode_value', 'creation_timestamp': 'creation_timestamp_value', 'custom_response_headers': ['custom_response_headers_value1', 'custom_response_headers_value2'], 'description': 'description_value', 'edge_security_policy': 'edge_security_policy_value', 'enable_cdn': True, 'id': 205, 'kind': 'kind_value', 'name': 'name_value', 'self_link': 'self_link_value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.update_unary(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.Operation) + + +def test_update_unary_rest_required_fields(request_type=compute.UpdateBackendBucketRequest): + transport_class = transports.BackendBucketsRestTransport + + request_init = {} + request_init["backend_bucket"] = "" + request_init["project"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).update._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["backendBucket"] = 'backend_bucket_value' + jsonified_request["project"] = 'project_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).update._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "backendBucket" in jsonified_request + assert jsonified_request["backendBucket"] == 'backend_bucket_value' + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + + client = BackendBucketsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "put", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.update_unary(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_update_unary_rest_unset_required_fields(): + transport = transports.BackendBucketsRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.update._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("backendBucket", "backendBucketResource", "project", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_update_unary_rest_interceptors(null_interceptor): + transport = transports.BackendBucketsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.BackendBucketsRestInterceptor(), + ) + client = BackendBucketsClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.BackendBucketsRestInterceptor, "post_update") as post, \ + mock.patch.object(transports.BackendBucketsRestInterceptor, "pre_update") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.UpdateBackendBucketRequest.pb(compute.UpdateBackendBucketRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.UpdateBackendBucketRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.update_unary(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_update_unary_rest_bad_request(transport: str = 'rest', request_type=compute.UpdateBackendBucketRequest): + client = BackendBucketsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'backend_bucket': 'sample2'} + request_init["backend_bucket_resource"] = {'bucket_name': 'bucket_name_value', 'cdn_policy': {'bypass_cache_on_request_headers': [{'header_name': 'header_name_value'}], 'cache_key_policy': {'include_http_headers': ['include_http_headers_value1', 'include_http_headers_value2'], 'query_string_whitelist': ['query_string_whitelist_value1', 'query_string_whitelist_value2']}, 'cache_mode': 'cache_mode_value', 'client_ttl': 1074, 'default_ttl': 1176, 'max_ttl': 761, 'negative_caching': True, 'negative_caching_policy': [{'code': 411, 'ttl': 340}], 'request_coalescing': True, 'serve_while_stale': 1813, 'signed_url_cache_max_age_sec': 2890, 'signed_url_key_names': ['signed_url_key_names_value1', 'signed_url_key_names_value2']}, 'compression_mode': 'compression_mode_value', 'creation_timestamp': 'creation_timestamp_value', 'custom_response_headers': ['custom_response_headers_value1', 'custom_response_headers_value2'], 'description': 'description_value', 'edge_security_policy': 'edge_security_policy_value', 'enable_cdn': True, 'id': 205, 'kind': 'kind_value', 'name': 'name_value', 'self_link': 'self_link_value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.update_unary(request) + + +def test_update_unary_rest_flattened(): + client = BackendBucketsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'backend_bucket': 'sample2'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + backend_bucket='backend_bucket_value', + backend_bucket_resource=compute.BackendBucket(bucket_name='bucket_name_value'), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.update_unary(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/global/backendBuckets/{backend_bucket}" % client.transport._host, args[1]) + + +def test_update_unary_rest_flattened_error(transport: str = 'rest'): + client = BackendBucketsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_unary( + compute.UpdateBackendBucketRequest(), + project='project_value', + backend_bucket='backend_bucket_value', + backend_bucket_resource=compute.BackendBucket(bucket_name='bucket_name_value'), + ) + + +def test_update_unary_rest_error(): + client = BackendBucketsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +def test_credentials_transport_error(): + # It is an error to provide credentials and a transport instance. + transport = transports.BackendBucketsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = BackendBucketsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # It is an error to provide a credentials file and a transport instance. + transport = transports.BackendBucketsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = BackendBucketsClient( + client_options={"credentials_file": "credentials.json"}, + transport=transport, + ) + + # It is an error to provide an api_key and a transport instance. + transport = transports.BackendBucketsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = BackendBucketsClient( + client_options=options, + transport=transport, + ) + + # It is an error to provide an api_key and a credential. + options = mock.Mock() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = BackendBucketsClient( + client_options=options, + credentials=ga_credentials.AnonymousCredentials() + ) + + # It is an error to provide scopes and a transport instance. + transport = transports.BackendBucketsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = BackendBucketsClient( + client_options={"scopes": ["1", "2"]}, + transport=transport, + ) + + +def test_transport_instance(): + # A client may be instantiated with a custom transport instance. + transport = transports.BackendBucketsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + client = BackendBucketsClient(transport=transport) + assert client.transport is transport + + +@pytest.mark.parametrize("transport_class", [ + transports.BackendBucketsRestTransport, +]) +def test_transport_adc(transport_class): + # Test default credentials are used if not provided. + with mock.patch.object(google.auth, 'default') as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class() + adc.assert_called_once() + +@pytest.mark.parametrize("transport_name", [ + "rest", +]) +def test_transport_kind(transport_name): + transport = BackendBucketsClient.get_transport_class(transport_name)( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert transport.kind == transport_name + + +def test_backend_buckets_base_transport_error(): + # Passing both a credentials object and credentials_file should raise an error + with pytest.raises(core_exceptions.DuplicateCredentialArgs): + transport = transports.BackendBucketsTransport( + credentials=ga_credentials.AnonymousCredentials(), + credentials_file="credentials.json" + ) + + +def test_backend_buckets_base_transport(): + # Instantiate the base transport. + with mock.patch('google.cloud.compute_v1.services.backend_buckets.transports.BackendBucketsTransport.__init__') as Transport: + Transport.return_value = None + transport = transports.BackendBucketsTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Every method on the transport should just blindly + # raise NotImplementedError. + methods = ( + 'add_signed_url_key', + 'delete', + 'delete_signed_url_key', + 'get', + 'insert', + 'list', + 'patch', + 'set_edge_security_policy', + 'update', + ) + for method in methods: + with pytest.raises(NotImplementedError): + getattr(transport, method)(request=object()) + + with pytest.raises(NotImplementedError): + transport.close() + + # Catch all for all remaining methods and properties + remainder = [ + 'kind', + ] + for r in remainder: + with pytest.raises(NotImplementedError): + getattr(transport, r)() + + +def test_backend_buckets_base_transport_with_credentials_file(): + # Instantiate the base transport with a credentials file + with mock.patch.object(google.auth, 'load_credentials_from_file', autospec=True) as load_creds, mock.patch('google.cloud.compute_v1.services.backend_buckets.transports.BackendBucketsTransport._prep_wrapped_messages') as Transport: + Transport.return_value = None + load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.BackendBucketsTransport( + credentials_file="credentials.json", + quota_project_id="octopus", + ) + load_creds.assert_called_once_with("credentials.json", + scopes=None, + default_scopes=( + 'https://www.googleapis.com/auth/compute', + 'https://www.googleapis.com/auth/cloud-platform', +), + quota_project_id="octopus", + ) + + +def test_backend_buckets_base_transport_with_adc(): + # Test the default credentials are used if credentials and credentials_file are None. + with mock.patch.object(google.auth, 'default', autospec=True) as adc, mock.patch('google.cloud.compute_v1.services.backend_buckets.transports.BackendBucketsTransport._prep_wrapped_messages') as Transport: + Transport.return_value = None + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.BackendBucketsTransport() + adc.assert_called_once() + + +def test_backend_buckets_auth_adc(): + # If no credentials are provided, we should use ADC credentials. + with mock.patch.object(google.auth, 'default', autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + BackendBucketsClient() + adc.assert_called_once_with( + scopes=None, + default_scopes=( + 'https://www.googleapis.com/auth/compute', + 'https://www.googleapis.com/auth/cloud-platform', +), + quota_project_id=None, + ) + + +def test_backend_buckets_http_transport_client_cert_source_for_mtls(): + cred = ga_credentials.AnonymousCredentials() + with mock.patch("google.auth.transport.requests.AuthorizedSession.configure_mtls_channel") as mock_configure_mtls_channel: + transports.BackendBucketsRestTransport ( + credentials=cred, + client_cert_source_for_mtls=client_cert_source_callback + ) + mock_configure_mtls_channel.assert_called_once_with(client_cert_source_callback) + + +@pytest.mark.parametrize("transport_name", [ + "rest", +]) +def test_backend_buckets_host_no_port(transport_name): + client = BackendBucketsClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions(api_endpoint='compute.googleapis.com'), + transport=transport_name, + ) + assert client.transport._host == ( + 'compute.googleapis.com:443' + if transport_name in ['grpc', 'grpc_asyncio'] + else 'https://compute.googleapis.com' + ) + +@pytest.mark.parametrize("transport_name", [ + "rest", +]) +def test_backend_buckets_host_with_port(transport_name): + client = BackendBucketsClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions(api_endpoint='compute.googleapis.com:8000'), + transport=transport_name, + ) + assert client.transport._host == ( + 'compute.googleapis.com:8000' + if transport_name in ['grpc', 'grpc_asyncio'] + else 'https://compute.googleapis.com:8000' + ) + +@pytest.mark.parametrize("transport_name", [ + "rest", +]) +def test_backend_buckets_client_transport_session_collision(transport_name): + creds1 = ga_credentials.AnonymousCredentials() + creds2 = ga_credentials.AnonymousCredentials() + client1 = BackendBucketsClient( + credentials=creds1, + transport=transport_name, + ) + client2 = BackendBucketsClient( + credentials=creds2, + transport=transport_name, + ) + session1 = client1.transport.add_signed_url_key._session + session2 = client2.transport.add_signed_url_key._session + assert session1 != session2 + session1 = client1.transport.delete._session + session2 = client2.transport.delete._session + assert session1 != session2 + session1 = client1.transport.delete_signed_url_key._session + session2 = client2.transport.delete_signed_url_key._session + assert session1 != session2 + session1 = client1.transport.get._session + session2 = client2.transport.get._session + assert session1 != session2 + session1 = client1.transport.insert._session + session2 = client2.transport.insert._session + assert session1 != session2 + session1 = client1.transport.list._session + session2 = client2.transport.list._session + assert session1 != session2 + session1 = client1.transport.patch._session + session2 = client2.transport.patch._session + assert session1 != session2 + session1 = client1.transport.set_edge_security_policy._session + session2 = client2.transport.set_edge_security_policy._session + assert session1 != session2 + session1 = client1.transport.update._session + session2 = client2.transport.update._session + assert session1 != session2 + +def test_common_billing_account_path(): + billing_account = "squid" + expected = "billingAccounts/{billing_account}".format(billing_account=billing_account, ) + actual = BackendBucketsClient.common_billing_account_path(billing_account) + assert expected == actual + + +def test_parse_common_billing_account_path(): + expected = { + "billing_account": "clam", + } + path = BackendBucketsClient.common_billing_account_path(**expected) + + # Check that the path construction is reversible. + actual = BackendBucketsClient.parse_common_billing_account_path(path) + assert expected == actual + +def test_common_folder_path(): + folder = "whelk" + expected = "folders/{folder}".format(folder=folder, ) + actual = BackendBucketsClient.common_folder_path(folder) + assert expected == actual + + +def test_parse_common_folder_path(): + expected = { + "folder": "octopus", + } + path = BackendBucketsClient.common_folder_path(**expected) + + # Check that the path construction is reversible. + actual = BackendBucketsClient.parse_common_folder_path(path) + assert expected == actual + +def test_common_organization_path(): + organization = "oyster" + expected = "organizations/{organization}".format(organization=organization, ) + actual = BackendBucketsClient.common_organization_path(organization) + assert expected == actual + + +def test_parse_common_organization_path(): + expected = { + "organization": "nudibranch", + } + path = BackendBucketsClient.common_organization_path(**expected) + + # Check that the path construction is reversible. + actual = BackendBucketsClient.parse_common_organization_path(path) + assert expected == actual + +def test_common_project_path(): + project = "cuttlefish" + expected = "projects/{project}".format(project=project, ) + actual = BackendBucketsClient.common_project_path(project) + assert expected == actual + + +def test_parse_common_project_path(): + expected = { + "project": "mussel", + } + path = BackendBucketsClient.common_project_path(**expected) + + # Check that the path construction is reversible. + actual = BackendBucketsClient.parse_common_project_path(path) + assert expected == actual + +def test_common_location_path(): + project = "winkle" + location = "nautilus" + expected = "projects/{project}/locations/{location}".format(project=project, location=location, ) + actual = BackendBucketsClient.common_location_path(project, location) + assert expected == actual + + +def test_parse_common_location_path(): + expected = { + "project": "scallop", + "location": "abalone", + } + path = BackendBucketsClient.common_location_path(**expected) + + # Check that the path construction is reversible. + actual = BackendBucketsClient.parse_common_location_path(path) + assert expected == actual + + +def test_client_with_default_client_info(): + client_info = gapic_v1.client_info.ClientInfo() + + with mock.patch.object(transports.BackendBucketsTransport, '_prep_wrapped_messages') as prep: + client = BackendBucketsClient( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + with mock.patch.object(transports.BackendBucketsTransport, '_prep_wrapped_messages') as prep: + transport_class = BackendBucketsClient.get_transport_class() + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + +def test_transport_close(): + transports = { + "rest": "_session", + } + + for transport, close_name in transports.items(): + client = BackendBucketsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport + ) + with mock.patch.object(type(getattr(client.transport, close_name)), "close") as close: + with client: + close.assert_not_called() + close.assert_called_once() + +def test_client_ctx(): + transports = [ + 'rest', + ] + for transport in transports: + client = BackendBucketsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport + ) + # Test client calls underlying transport. + with mock.patch.object(type(client.transport), "close") as close: + close.assert_not_called() + with client: + pass + close.assert_called() + +@pytest.mark.parametrize("client_class,transport_class", [ + (BackendBucketsClient, transports.BackendBucketsRestTransport), +]) +def test_api_key_credentials(client_class, transport_class): + with mock.patch.object( + google.auth._default, "get_api_key_credentials", create=True + ) as get_api_key_credentials: + mock_cred = mock.Mock() + get_api_key_credentials.return_value = mock_cred + options = client_options.ClientOptions() + options.api_key = "api_key" + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=mock_cred, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) diff --git a/owl-bot-staging/v1/tests/unit/gapic/compute_v1/test_backend_services.py b/owl-bot-staging/v1/tests/unit/gapic/compute_v1/test_backend_services.py new file mode 100644 index 000000000..389f51c33 --- /dev/null +++ b/owl-bot-staging/v1/tests/unit/gapic/compute_v1/test_backend_services.py @@ -0,0 +1,6916 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import os +# try/except added for compatibility with python < 3.8 +try: + from unittest import mock + from unittest.mock import AsyncMock # pragma: NO COVER +except ImportError: # pragma: NO COVER + import mock + +import grpc +from grpc.experimental import aio +from collections.abc import Iterable +from google.protobuf import json_format +import json +import math +import pytest +from proto.marshal.rules.dates import DurationRule, TimestampRule +from proto.marshal.rules import wrappers +from requests import Response +from requests import Request, PreparedRequest +from requests.sessions import Session +from google.protobuf import json_format + +from google.api_core import client_options +from google.api_core import exceptions as core_exceptions +from google.api_core import extended_operation # type: ignore +from google.api_core import future +from google.api_core import gapic_v1 +from google.api_core import grpc_helpers +from google.api_core import grpc_helpers_async +from google.api_core import path_template +from google.auth import credentials as ga_credentials +from google.auth.exceptions import MutualTLSChannelError +from google.cloud.compute_v1.services.backend_services import BackendServicesClient +from google.cloud.compute_v1.services.backend_services import pagers +from google.cloud.compute_v1.services.backend_services import transports +from google.cloud.compute_v1.types import compute +from google.oauth2 import service_account +import google.auth + + +def client_cert_source_callback(): + return b"cert bytes", b"key bytes" + + +# If default endpoint is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint(client): + return "foo.googleapis.com" if ("localhost" in client.DEFAULT_ENDPOINT) else client.DEFAULT_ENDPOINT + + +def test__get_default_mtls_endpoint(): + api_endpoint = "example.googleapis.com" + api_mtls_endpoint = "example.mtls.googleapis.com" + sandbox_endpoint = "example.sandbox.googleapis.com" + sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com" + non_googleapi = "api.example.com" + + assert BackendServicesClient._get_default_mtls_endpoint(None) is None + assert BackendServicesClient._get_default_mtls_endpoint(api_endpoint) == api_mtls_endpoint + assert BackendServicesClient._get_default_mtls_endpoint(api_mtls_endpoint) == api_mtls_endpoint + assert BackendServicesClient._get_default_mtls_endpoint(sandbox_endpoint) == sandbox_mtls_endpoint + assert BackendServicesClient._get_default_mtls_endpoint(sandbox_mtls_endpoint) == sandbox_mtls_endpoint + assert BackendServicesClient._get_default_mtls_endpoint(non_googleapi) == non_googleapi + + +@pytest.mark.parametrize("client_class,transport_name", [ + (BackendServicesClient, "rest"), +]) +def test_backend_services_client_from_service_account_info(client_class, transport_name): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object(service_account.Credentials, 'from_service_account_info') as factory: + factory.return_value = creds + info = {"valid": True} + client = client_class.from_service_account_info(info, transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + 'compute.googleapis.com:443' + if transport_name in ['grpc', 'grpc_asyncio'] + else + 'https://compute.googleapis.com' + ) + + +@pytest.mark.parametrize("transport_class,transport_name", [ + (transports.BackendServicesRestTransport, "rest"), +]) +def test_backend_services_client_service_account_always_use_jwt(transport_class, transport_name): + with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=True) + use_jwt.assert_called_once_with(True) + + with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=False) + use_jwt.assert_not_called() + + +@pytest.mark.parametrize("client_class,transport_name", [ + (BackendServicesClient, "rest"), +]) +def test_backend_services_client_from_service_account_file(client_class, transport_name): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object(service_account.Credentials, 'from_service_account_file') as factory: + factory.return_value = creds + client = client_class.from_service_account_file("dummy/file/path.json", transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + client = client_class.from_service_account_json("dummy/file/path.json", transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + 'compute.googleapis.com:443' + if transport_name in ['grpc', 'grpc_asyncio'] + else + 'https://compute.googleapis.com' + ) + + +def test_backend_services_client_get_transport_class(): + transport = BackendServicesClient.get_transport_class() + available_transports = [ + transports.BackendServicesRestTransport, + ] + assert transport in available_transports + + transport = BackendServicesClient.get_transport_class("rest") + assert transport == transports.BackendServicesRestTransport + + +@pytest.mark.parametrize("client_class,transport_class,transport_name", [ + (BackendServicesClient, transports.BackendServicesRestTransport, "rest"), +]) +@mock.patch.object(BackendServicesClient, "DEFAULT_ENDPOINT", modify_default_endpoint(BackendServicesClient)) +def test_backend_services_client_client_options(client_class, transport_class, transport_name): + # Check that if channel is provided we won't create a new one. + with mock.patch.object(BackendServicesClient, 'get_transport_class') as gtc: + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ) + client = client_class(transport=transport) + gtc.assert_not_called() + + # Check that if channel is provided via str we will create a new one. + with mock.patch.object(BackendServicesClient, 'get_transport_class') as gtc: + client = client_class(transport=transport_name) + gtc.assert_called() + + # Check the case api_endpoint is provided. + options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(transport=transport_name, client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_MTLS_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError): + client = client_class(transport=transport_name) + + # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): + with pytest.raises(ValueError): + client = client_class(transport=transport_name) + + # Check the case quota_project_id is provided + options = client_options.ClientOptions(quota_project_id="octopus") + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id="octopus", + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + # Check the case api_endpoint is provided + options = client_options.ClientOptions(api_audience="https://language.googleapis.com") + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience="https://language.googleapis.com" + ) + +@pytest.mark.parametrize("client_class,transport_class,transport_name,use_client_cert_env", [ + (BackendServicesClient, transports.BackendServicesRestTransport, "rest", "true"), + (BackendServicesClient, transports.BackendServicesRestTransport, "rest", "false"), +]) +@mock.patch.object(BackendServicesClient, "DEFAULT_ENDPOINT", modify_default_endpoint(BackendServicesClient)) +@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) +def test_backend_services_client_mtls_env_auto(client_class, transport_class, transport_name, use_client_cert_env): + # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default + # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists. + + # Check the case client_cert_source is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): + options = client_options.ClientOptions(client_cert_source=client_cert_source_callback) + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + + if use_client_cert_env == "false": + expected_client_cert_source = None + expected_host = client.DEFAULT_ENDPOINT + else: + expected_client_cert_source = client_cert_source_callback + expected_host = client.DEFAULT_MTLS_ENDPOINT + + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case ADC client cert is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): + with mock.patch.object(transport_class, '__init__') as patched: + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): + with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=client_cert_source_callback): + if use_client_cert_env == "false": + expected_host = client.DEFAULT_ENDPOINT + expected_client_cert_source = None + else: + expected_host = client.DEFAULT_MTLS_ENDPOINT + expected_client_cert_source = client_cert_source_callback + + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case client_cert_source and ADC client cert are not provided. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): + with mock.patch.object(transport_class, '__init__') as patched: + with mock.patch("google.auth.transport.mtls.has_default_client_cert_source", return_value=False): + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize("client_class", [ + BackendServicesClient +]) +@mock.patch.object(BackendServicesClient, "DEFAULT_ENDPOINT", modify_default_endpoint(BackendServicesClient)) +def test_backend_services_client_get_mtls_endpoint_and_cert_source(client_class): + mock_client_cert_source = mock.Mock() + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "true". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + mock_api_endpoint = "foo" + options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(options) + assert api_endpoint == mock_api_endpoint + assert cert_source == mock_client_cert_source + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "false". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + mock_client_cert_source = mock.Mock() + mock_api_endpoint = "foo" + options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(options) + assert api_endpoint == mock_api_endpoint + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert doesn't exist. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=False): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert exists. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): + with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=mock_client_cert_source): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source == mock_client_cert_source + + +@pytest.mark.parametrize("client_class,transport_class,transport_name", [ + (BackendServicesClient, transports.BackendServicesRestTransport, "rest"), +]) +def test_backend_services_client_client_options_scopes(client_class, transport_class, transport_name): + # Check the case scopes are provided. + options = client_options.ClientOptions( + scopes=["1", "2"], + ) + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=["1", "2"], + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + +@pytest.mark.parametrize("client_class,transport_class,transport_name,grpc_helpers", [ + (BackendServicesClient, transports.BackendServicesRestTransport, "rest", None), +]) +def test_backend_services_client_client_options_credentials_file(client_class, transport_class, transport_name, grpc_helpers): + # Check the case credentials file is provided. + options = client_options.ClientOptions( + credentials_file="credentials.json" + ) + + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize("request_type", [ + compute.AddSignedUrlKeyBackendServiceRequest, + dict, +]) +def test_add_signed_url_key_rest(request_type): + client = BackendServicesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'backend_service': 'sample2'} + request_init["signed_url_key_resource"] = {'key_name': 'key_name_value', 'key_value': 'key_value_value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.add_signed_url_key(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, extended_operation.ExtendedOperation) + assert response.client_operation_id == 'client_operation_id_value' + assert response.creation_timestamp == 'creation_timestamp_value' + assert response.description == 'description_value' + assert response.end_time == 'end_time_value' + assert response.http_error_message == 'http_error_message_value' + assert response.http_error_status_code == 2374 + assert response.id == 205 + assert response.insert_time == 'insert_time_value' + assert response.kind == 'kind_value' + assert response.name == 'name_value' + assert response.operation_group_id == 'operation_group_id_value' + assert response.operation_type == 'operation_type_value' + assert response.progress == 885 + assert response.region == 'region_value' + assert response.self_link == 'self_link_value' + assert response.start_time == 'start_time_value' + assert response.status == compute.Operation.Status.DONE + assert response.status_message == 'status_message_value' + assert response.target_id == 947 + assert response.target_link == 'target_link_value' + assert response.user == 'user_value' + assert response.zone == 'zone_value' + + +def test_add_signed_url_key_rest_required_fields(request_type=compute.AddSignedUrlKeyBackendServiceRequest): + transport_class = transports.BackendServicesRestTransport + + request_init = {} + request_init["backend_service"] = "" + request_init["project"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).add_signed_url_key._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["backendService"] = 'backend_service_value' + jsonified_request["project"] = 'project_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).add_signed_url_key._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "backendService" in jsonified_request + assert jsonified_request["backendService"] == 'backend_service_value' + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + + client = BackendServicesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.add_signed_url_key(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_add_signed_url_key_rest_unset_required_fields(): + transport = transports.BackendServicesRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.add_signed_url_key._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("backendService", "project", "signedUrlKeyResource", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_add_signed_url_key_rest_interceptors(null_interceptor): + transport = transports.BackendServicesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.BackendServicesRestInterceptor(), + ) + client = BackendServicesClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.BackendServicesRestInterceptor, "post_add_signed_url_key") as post, \ + mock.patch.object(transports.BackendServicesRestInterceptor, "pre_add_signed_url_key") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.AddSignedUrlKeyBackendServiceRequest.pb(compute.AddSignedUrlKeyBackendServiceRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.AddSignedUrlKeyBackendServiceRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.add_signed_url_key(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_add_signed_url_key_rest_bad_request(transport: str = 'rest', request_type=compute.AddSignedUrlKeyBackendServiceRequest): + client = BackendServicesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'backend_service': 'sample2'} + request_init["signed_url_key_resource"] = {'key_name': 'key_name_value', 'key_value': 'key_value_value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.add_signed_url_key(request) + + +def test_add_signed_url_key_rest_flattened(): + client = BackendServicesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'backend_service': 'sample2'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + backend_service='backend_service_value', + signed_url_key_resource=compute.SignedUrlKey(key_name='key_name_value'), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.add_signed_url_key(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/global/backendServices/{backend_service}/addSignedUrlKey" % client.transport._host, args[1]) + + +def test_add_signed_url_key_rest_flattened_error(transport: str = 'rest'): + client = BackendServicesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.add_signed_url_key( + compute.AddSignedUrlKeyBackendServiceRequest(), + project='project_value', + backend_service='backend_service_value', + signed_url_key_resource=compute.SignedUrlKey(key_name='key_name_value'), + ) + + +def test_add_signed_url_key_rest_error(): + client = BackendServicesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.AddSignedUrlKeyBackendServiceRequest, + dict, +]) +def test_add_signed_url_key_unary_rest(request_type): + client = BackendServicesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'backend_service': 'sample2'} + request_init["signed_url_key_resource"] = {'key_name': 'key_name_value', 'key_value': 'key_value_value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.add_signed_url_key_unary(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.Operation) + + +def test_add_signed_url_key_unary_rest_required_fields(request_type=compute.AddSignedUrlKeyBackendServiceRequest): + transport_class = transports.BackendServicesRestTransport + + request_init = {} + request_init["backend_service"] = "" + request_init["project"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).add_signed_url_key._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["backendService"] = 'backend_service_value' + jsonified_request["project"] = 'project_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).add_signed_url_key._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "backendService" in jsonified_request + assert jsonified_request["backendService"] == 'backend_service_value' + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + + client = BackendServicesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.add_signed_url_key_unary(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_add_signed_url_key_unary_rest_unset_required_fields(): + transport = transports.BackendServicesRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.add_signed_url_key._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("backendService", "project", "signedUrlKeyResource", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_add_signed_url_key_unary_rest_interceptors(null_interceptor): + transport = transports.BackendServicesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.BackendServicesRestInterceptor(), + ) + client = BackendServicesClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.BackendServicesRestInterceptor, "post_add_signed_url_key") as post, \ + mock.patch.object(transports.BackendServicesRestInterceptor, "pre_add_signed_url_key") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.AddSignedUrlKeyBackendServiceRequest.pb(compute.AddSignedUrlKeyBackendServiceRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.AddSignedUrlKeyBackendServiceRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.add_signed_url_key_unary(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_add_signed_url_key_unary_rest_bad_request(transport: str = 'rest', request_type=compute.AddSignedUrlKeyBackendServiceRequest): + client = BackendServicesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'backend_service': 'sample2'} + request_init["signed_url_key_resource"] = {'key_name': 'key_name_value', 'key_value': 'key_value_value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.add_signed_url_key_unary(request) + + +def test_add_signed_url_key_unary_rest_flattened(): + client = BackendServicesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'backend_service': 'sample2'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + backend_service='backend_service_value', + signed_url_key_resource=compute.SignedUrlKey(key_name='key_name_value'), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.add_signed_url_key_unary(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/global/backendServices/{backend_service}/addSignedUrlKey" % client.transport._host, args[1]) + + +def test_add_signed_url_key_unary_rest_flattened_error(transport: str = 'rest'): + client = BackendServicesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.add_signed_url_key_unary( + compute.AddSignedUrlKeyBackendServiceRequest(), + project='project_value', + backend_service='backend_service_value', + signed_url_key_resource=compute.SignedUrlKey(key_name='key_name_value'), + ) + + +def test_add_signed_url_key_unary_rest_error(): + client = BackendServicesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.AggregatedListBackendServicesRequest, + dict, +]) +def test_aggregated_list_rest(request_type): + client = BackendServicesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.BackendServiceAggregatedList( + id='id_value', + kind='kind_value', + next_page_token='next_page_token_value', + self_link='self_link_value', + unreachables=['unreachables_value'], + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.BackendServiceAggregatedList.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.aggregated_list(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.AggregatedListPager) + assert response.id == 'id_value' + assert response.kind == 'kind_value' + assert response.next_page_token == 'next_page_token_value' + assert response.self_link == 'self_link_value' + assert response.unreachables == ['unreachables_value'] + + +def test_aggregated_list_rest_required_fields(request_type=compute.AggregatedListBackendServicesRequest): + transport_class = transports.BackendServicesRestTransport + + request_init = {} + request_init["project"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).aggregated_list._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).aggregated_list._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("filter", "include_all_scopes", "max_results", "order_by", "page_token", "return_partial_success", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + + client = BackendServicesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.BackendServiceAggregatedList() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "get", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.BackendServiceAggregatedList.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.aggregated_list(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_aggregated_list_rest_unset_required_fields(): + transport = transports.BackendServicesRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.aggregated_list._get_unset_required_fields({}) + assert set(unset_fields) == (set(("filter", "includeAllScopes", "maxResults", "orderBy", "pageToken", "returnPartialSuccess", )) & set(("project", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_aggregated_list_rest_interceptors(null_interceptor): + transport = transports.BackendServicesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.BackendServicesRestInterceptor(), + ) + client = BackendServicesClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.BackendServicesRestInterceptor, "post_aggregated_list") as post, \ + mock.patch.object(transports.BackendServicesRestInterceptor, "pre_aggregated_list") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.AggregatedListBackendServicesRequest.pb(compute.AggregatedListBackendServicesRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.BackendServiceAggregatedList.to_json(compute.BackendServiceAggregatedList()) + + request = compute.AggregatedListBackendServicesRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.BackendServiceAggregatedList() + + client.aggregated_list(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_aggregated_list_rest_bad_request(transport: str = 'rest', request_type=compute.AggregatedListBackendServicesRequest): + client = BackendServicesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.aggregated_list(request) + + +def test_aggregated_list_rest_flattened(): + client = BackendServicesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.BackendServiceAggregatedList() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.BackendServiceAggregatedList.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.aggregated_list(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/aggregated/backendServices" % client.transport._host, args[1]) + + +def test_aggregated_list_rest_flattened_error(transport: str = 'rest'): + client = BackendServicesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.aggregated_list( + compute.AggregatedListBackendServicesRequest(), + project='project_value', + ) + + +def test_aggregated_list_rest_pager(transport: str = 'rest'): + client = BackendServicesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + #with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + compute.BackendServiceAggregatedList( + items={ + 'a':compute.BackendServicesScopedList(), + 'b':compute.BackendServicesScopedList(), + 'c':compute.BackendServicesScopedList(), + }, + next_page_token='abc', + ), + compute.BackendServiceAggregatedList( + items={}, + next_page_token='def', + ), + compute.BackendServiceAggregatedList( + items={ + 'g':compute.BackendServicesScopedList(), + }, + next_page_token='ghi', + ), + compute.BackendServiceAggregatedList( + items={ + 'h':compute.BackendServicesScopedList(), + 'i':compute.BackendServicesScopedList(), + }, + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple(compute.BackendServiceAggregatedList.to_json(x) for x in response) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode('UTF-8') + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {'project': 'sample1'} + + pager = client.aggregated_list(request=sample_request) + + assert isinstance(pager.get('a'), compute.BackendServicesScopedList) + assert pager.get('h') is None + + results = list(pager) + assert len(results) == 6 + assert all( + isinstance(i, tuple) + for i in results) + for result in results: + assert isinstance(result, tuple) + assert tuple(type(t) for t in result) == (str, compute.BackendServicesScopedList) + + assert pager.get('a') is None + assert isinstance(pager.get('h'), compute.BackendServicesScopedList) + + pages = list(client.aggregated_list(request=sample_request).pages) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize("request_type", [ + compute.DeleteBackendServiceRequest, + dict, +]) +def test_delete_rest(request_type): + client = BackendServicesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'backend_service': 'sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.delete(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, extended_operation.ExtendedOperation) + assert response.client_operation_id == 'client_operation_id_value' + assert response.creation_timestamp == 'creation_timestamp_value' + assert response.description == 'description_value' + assert response.end_time == 'end_time_value' + assert response.http_error_message == 'http_error_message_value' + assert response.http_error_status_code == 2374 + assert response.id == 205 + assert response.insert_time == 'insert_time_value' + assert response.kind == 'kind_value' + assert response.name == 'name_value' + assert response.operation_group_id == 'operation_group_id_value' + assert response.operation_type == 'operation_type_value' + assert response.progress == 885 + assert response.region == 'region_value' + assert response.self_link == 'self_link_value' + assert response.start_time == 'start_time_value' + assert response.status == compute.Operation.Status.DONE + assert response.status_message == 'status_message_value' + assert response.target_id == 947 + assert response.target_link == 'target_link_value' + assert response.user == 'user_value' + assert response.zone == 'zone_value' + + +def test_delete_rest_required_fields(request_type=compute.DeleteBackendServiceRequest): + transport_class = transports.BackendServicesRestTransport + + request_init = {} + request_init["backend_service"] = "" + request_init["project"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["backendService"] = 'backend_service_value' + jsonified_request["project"] = 'project_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "backendService" in jsonified_request + assert jsonified_request["backendService"] == 'backend_service_value' + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + + client = BackendServicesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "delete", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.delete(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_delete_rest_unset_required_fields(): + transport = transports.BackendServicesRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.delete._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("backendService", "project", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_rest_interceptors(null_interceptor): + transport = transports.BackendServicesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.BackendServicesRestInterceptor(), + ) + client = BackendServicesClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.BackendServicesRestInterceptor, "post_delete") as post, \ + mock.patch.object(transports.BackendServicesRestInterceptor, "pre_delete") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.DeleteBackendServiceRequest.pb(compute.DeleteBackendServiceRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.DeleteBackendServiceRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.delete(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_delete_rest_bad_request(transport: str = 'rest', request_type=compute.DeleteBackendServiceRequest): + client = BackendServicesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'backend_service': 'sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.delete(request) + + +def test_delete_rest_flattened(): + client = BackendServicesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'backend_service': 'sample2'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + backend_service='backend_service_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.delete(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/global/backendServices/{backend_service}" % client.transport._host, args[1]) + + +def test_delete_rest_flattened_error(transport: str = 'rest'): + client = BackendServicesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete( + compute.DeleteBackendServiceRequest(), + project='project_value', + backend_service='backend_service_value', + ) + + +def test_delete_rest_error(): + client = BackendServicesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.DeleteBackendServiceRequest, + dict, +]) +def test_delete_unary_rest(request_type): + client = BackendServicesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'backend_service': 'sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.delete_unary(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.Operation) + + +def test_delete_unary_rest_required_fields(request_type=compute.DeleteBackendServiceRequest): + transport_class = transports.BackendServicesRestTransport + + request_init = {} + request_init["backend_service"] = "" + request_init["project"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["backendService"] = 'backend_service_value' + jsonified_request["project"] = 'project_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "backendService" in jsonified_request + assert jsonified_request["backendService"] == 'backend_service_value' + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + + client = BackendServicesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "delete", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.delete_unary(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_delete_unary_rest_unset_required_fields(): + transport = transports.BackendServicesRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.delete._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("backendService", "project", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_unary_rest_interceptors(null_interceptor): + transport = transports.BackendServicesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.BackendServicesRestInterceptor(), + ) + client = BackendServicesClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.BackendServicesRestInterceptor, "post_delete") as post, \ + mock.patch.object(transports.BackendServicesRestInterceptor, "pre_delete") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.DeleteBackendServiceRequest.pb(compute.DeleteBackendServiceRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.DeleteBackendServiceRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.delete_unary(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_delete_unary_rest_bad_request(transport: str = 'rest', request_type=compute.DeleteBackendServiceRequest): + client = BackendServicesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'backend_service': 'sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.delete_unary(request) + + +def test_delete_unary_rest_flattened(): + client = BackendServicesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'backend_service': 'sample2'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + backend_service='backend_service_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.delete_unary(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/global/backendServices/{backend_service}" % client.transport._host, args[1]) + + +def test_delete_unary_rest_flattened_error(transport: str = 'rest'): + client = BackendServicesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_unary( + compute.DeleteBackendServiceRequest(), + project='project_value', + backend_service='backend_service_value', + ) + + +def test_delete_unary_rest_error(): + client = BackendServicesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.DeleteSignedUrlKeyBackendServiceRequest, + dict, +]) +def test_delete_signed_url_key_rest(request_type): + client = BackendServicesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'backend_service': 'sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.delete_signed_url_key(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, extended_operation.ExtendedOperation) + assert response.client_operation_id == 'client_operation_id_value' + assert response.creation_timestamp == 'creation_timestamp_value' + assert response.description == 'description_value' + assert response.end_time == 'end_time_value' + assert response.http_error_message == 'http_error_message_value' + assert response.http_error_status_code == 2374 + assert response.id == 205 + assert response.insert_time == 'insert_time_value' + assert response.kind == 'kind_value' + assert response.name == 'name_value' + assert response.operation_group_id == 'operation_group_id_value' + assert response.operation_type == 'operation_type_value' + assert response.progress == 885 + assert response.region == 'region_value' + assert response.self_link == 'self_link_value' + assert response.start_time == 'start_time_value' + assert response.status == compute.Operation.Status.DONE + assert response.status_message == 'status_message_value' + assert response.target_id == 947 + assert response.target_link == 'target_link_value' + assert response.user == 'user_value' + assert response.zone == 'zone_value' + + +def test_delete_signed_url_key_rest_required_fields(request_type=compute.DeleteSignedUrlKeyBackendServiceRequest): + transport_class = transports.BackendServicesRestTransport + + request_init = {} + request_init["backend_service"] = "" + request_init["key_name"] = "" + request_init["project"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + assert "keyName" not in jsonified_request + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete_signed_url_key._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + assert "keyName" in jsonified_request + assert jsonified_request["keyName"] == request_init["key_name"] + + jsonified_request["backendService"] = 'backend_service_value' + jsonified_request["keyName"] = 'key_name_value' + jsonified_request["project"] = 'project_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete_signed_url_key._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("key_name", "request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "backendService" in jsonified_request + assert jsonified_request["backendService"] == 'backend_service_value' + assert "keyName" in jsonified_request + assert jsonified_request["keyName"] == 'key_name_value' + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + + client = BackendServicesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.delete_signed_url_key(request) + + expected_params = [ + ( + "keyName", + "", + ), + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_delete_signed_url_key_rest_unset_required_fields(): + transport = transports.BackendServicesRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.delete_signed_url_key._get_unset_required_fields({}) + assert set(unset_fields) == (set(("keyName", "requestId", )) & set(("backendService", "keyName", "project", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_signed_url_key_rest_interceptors(null_interceptor): + transport = transports.BackendServicesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.BackendServicesRestInterceptor(), + ) + client = BackendServicesClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.BackendServicesRestInterceptor, "post_delete_signed_url_key") as post, \ + mock.patch.object(transports.BackendServicesRestInterceptor, "pre_delete_signed_url_key") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.DeleteSignedUrlKeyBackendServiceRequest.pb(compute.DeleteSignedUrlKeyBackendServiceRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.DeleteSignedUrlKeyBackendServiceRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.delete_signed_url_key(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_delete_signed_url_key_rest_bad_request(transport: str = 'rest', request_type=compute.DeleteSignedUrlKeyBackendServiceRequest): + client = BackendServicesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'backend_service': 'sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.delete_signed_url_key(request) + + +def test_delete_signed_url_key_rest_flattened(): + client = BackendServicesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'backend_service': 'sample2'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + backend_service='backend_service_value', + key_name='key_name_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.delete_signed_url_key(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/global/backendServices/{backend_service}/deleteSignedUrlKey" % client.transport._host, args[1]) + + +def test_delete_signed_url_key_rest_flattened_error(transport: str = 'rest'): + client = BackendServicesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_signed_url_key( + compute.DeleteSignedUrlKeyBackendServiceRequest(), + project='project_value', + backend_service='backend_service_value', + key_name='key_name_value', + ) + + +def test_delete_signed_url_key_rest_error(): + client = BackendServicesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.DeleteSignedUrlKeyBackendServiceRequest, + dict, +]) +def test_delete_signed_url_key_unary_rest(request_type): + client = BackendServicesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'backend_service': 'sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.delete_signed_url_key_unary(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.Operation) + + +def test_delete_signed_url_key_unary_rest_required_fields(request_type=compute.DeleteSignedUrlKeyBackendServiceRequest): + transport_class = transports.BackendServicesRestTransport + + request_init = {} + request_init["backend_service"] = "" + request_init["key_name"] = "" + request_init["project"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + assert "keyName" not in jsonified_request + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete_signed_url_key._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + assert "keyName" in jsonified_request + assert jsonified_request["keyName"] == request_init["key_name"] + + jsonified_request["backendService"] = 'backend_service_value' + jsonified_request["keyName"] = 'key_name_value' + jsonified_request["project"] = 'project_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete_signed_url_key._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("key_name", "request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "backendService" in jsonified_request + assert jsonified_request["backendService"] == 'backend_service_value' + assert "keyName" in jsonified_request + assert jsonified_request["keyName"] == 'key_name_value' + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + + client = BackendServicesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.delete_signed_url_key_unary(request) + + expected_params = [ + ( + "keyName", + "", + ), + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_delete_signed_url_key_unary_rest_unset_required_fields(): + transport = transports.BackendServicesRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.delete_signed_url_key._get_unset_required_fields({}) + assert set(unset_fields) == (set(("keyName", "requestId", )) & set(("backendService", "keyName", "project", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_signed_url_key_unary_rest_interceptors(null_interceptor): + transport = transports.BackendServicesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.BackendServicesRestInterceptor(), + ) + client = BackendServicesClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.BackendServicesRestInterceptor, "post_delete_signed_url_key") as post, \ + mock.patch.object(transports.BackendServicesRestInterceptor, "pre_delete_signed_url_key") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.DeleteSignedUrlKeyBackendServiceRequest.pb(compute.DeleteSignedUrlKeyBackendServiceRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.DeleteSignedUrlKeyBackendServiceRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.delete_signed_url_key_unary(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_delete_signed_url_key_unary_rest_bad_request(transport: str = 'rest', request_type=compute.DeleteSignedUrlKeyBackendServiceRequest): + client = BackendServicesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'backend_service': 'sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.delete_signed_url_key_unary(request) + + +def test_delete_signed_url_key_unary_rest_flattened(): + client = BackendServicesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'backend_service': 'sample2'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + backend_service='backend_service_value', + key_name='key_name_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.delete_signed_url_key_unary(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/global/backendServices/{backend_service}/deleteSignedUrlKey" % client.transport._host, args[1]) + + +def test_delete_signed_url_key_unary_rest_flattened_error(transport: str = 'rest'): + client = BackendServicesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_signed_url_key_unary( + compute.DeleteSignedUrlKeyBackendServiceRequest(), + project='project_value', + backend_service='backend_service_value', + key_name='key_name_value', + ) + + +def test_delete_signed_url_key_unary_rest_error(): + client = BackendServicesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.GetBackendServiceRequest, + dict, +]) +def test_get_rest(request_type): + client = BackendServicesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'backend_service': 'sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.BackendService( + affinity_cookie_ttl_sec=2432, + compression_mode='compression_mode_value', + creation_timestamp='creation_timestamp_value', + custom_request_headers=['custom_request_headers_value'], + custom_response_headers=['custom_response_headers_value'], + description='description_value', + edge_security_policy='edge_security_policy_value', + enable_c_d_n=True, + fingerprint='fingerprint_value', + health_checks=['health_checks_value'], + id=205, + kind='kind_value', + load_balancing_scheme='load_balancing_scheme_value', + locality_lb_policy='locality_lb_policy_value', + name='name_value', + network='network_value', + port=453, + port_name='port_name_value', + protocol='protocol_value', + region='region_value', + security_policy='security_policy_value', + self_link='self_link_value', + service_bindings=['service_bindings_value'], + session_affinity='session_affinity_value', + timeout_sec=1185, + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.BackendService.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.get(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.BackendService) + assert response.affinity_cookie_ttl_sec == 2432 + assert response.compression_mode == 'compression_mode_value' + assert response.creation_timestamp == 'creation_timestamp_value' + assert response.custom_request_headers == ['custom_request_headers_value'] + assert response.custom_response_headers == ['custom_response_headers_value'] + assert response.description == 'description_value' + assert response.edge_security_policy == 'edge_security_policy_value' + assert response.enable_c_d_n is True + assert response.fingerprint == 'fingerprint_value' + assert response.health_checks == ['health_checks_value'] + assert response.id == 205 + assert response.kind == 'kind_value' + assert response.load_balancing_scheme == 'load_balancing_scheme_value' + assert response.locality_lb_policy == 'locality_lb_policy_value' + assert response.name == 'name_value' + assert response.network == 'network_value' + assert response.port == 453 + assert response.port_name == 'port_name_value' + assert response.protocol == 'protocol_value' + assert response.region == 'region_value' + assert response.security_policy == 'security_policy_value' + assert response.self_link == 'self_link_value' + assert response.service_bindings == ['service_bindings_value'] + assert response.session_affinity == 'session_affinity_value' + assert response.timeout_sec == 1185 + + +def test_get_rest_required_fields(request_type=compute.GetBackendServiceRequest): + transport_class = transports.BackendServicesRestTransport + + request_init = {} + request_init["backend_service"] = "" + request_init["project"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["backendService"] = 'backend_service_value' + jsonified_request["project"] = 'project_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "backendService" in jsonified_request + assert jsonified_request["backendService"] == 'backend_service_value' + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + + client = BackendServicesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.BackendService() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "get", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.BackendService.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.get(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_get_rest_unset_required_fields(): + transport = transports.BackendServicesRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.get._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("backendService", "project", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_rest_interceptors(null_interceptor): + transport = transports.BackendServicesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.BackendServicesRestInterceptor(), + ) + client = BackendServicesClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.BackendServicesRestInterceptor, "post_get") as post, \ + mock.patch.object(transports.BackendServicesRestInterceptor, "pre_get") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.GetBackendServiceRequest.pb(compute.GetBackendServiceRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.BackendService.to_json(compute.BackendService()) + + request = compute.GetBackendServiceRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.BackendService() + + client.get(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_get_rest_bad_request(transport: str = 'rest', request_type=compute.GetBackendServiceRequest): + client = BackendServicesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'backend_service': 'sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get(request) + + +def test_get_rest_flattened(): + client = BackendServicesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.BackendService() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'backend_service': 'sample2'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + backend_service='backend_service_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.BackendService.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.get(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/global/backendServices/{backend_service}" % client.transport._host, args[1]) + + +def test_get_rest_flattened_error(transport: str = 'rest'): + client = BackendServicesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get( + compute.GetBackendServiceRequest(), + project='project_value', + backend_service='backend_service_value', + ) + + +def test_get_rest_error(): + client = BackendServicesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.GetHealthBackendServiceRequest, + dict, +]) +def test_get_health_rest(request_type): + client = BackendServicesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'backend_service': 'sample2'} + request_init["resource_group_reference_resource"] = {'group': 'group_value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.BackendServiceGroupHealth( + kind='kind_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.BackendServiceGroupHealth.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.get_health(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.BackendServiceGroupHealth) + assert response.kind == 'kind_value' + + +def test_get_health_rest_required_fields(request_type=compute.GetHealthBackendServiceRequest): + transport_class = transports.BackendServicesRestTransport + + request_init = {} + request_init["backend_service"] = "" + request_init["project"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_health._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["backendService"] = 'backend_service_value' + jsonified_request["project"] = 'project_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_health._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "backendService" in jsonified_request + assert jsonified_request["backendService"] == 'backend_service_value' + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + + client = BackendServicesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.BackendServiceGroupHealth() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.BackendServiceGroupHealth.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.get_health(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_get_health_rest_unset_required_fields(): + transport = transports.BackendServicesRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.get_health._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("backendService", "project", "resourceGroupReferenceResource", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_health_rest_interceptors(null_interceptor): + transport = transports.BackendServicesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.BackendServicesRestInterceptor(), + ) + client = BackendServicesClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.BackendServicesRestInterceptor, "post_get_health") as post, \ + mock.patch.object(transports.BackendServicesRestInterceptor, "pre_get_health") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.GetHealthBackendServiceRequest.pb(compute.GetHealthBackendServiceRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.BackendServiceGroupHealth.to_json(compute.BackendServiceGroupHealth()) + + request = compute.GetHealthBackendServiceRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.BackendServiceGroupHealth() + + client.get_health(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_get_health_rest_bad_request(transport: str = 'rest', request_type=compute.GetHealthBackendServiceRequest): + client = BackendServicesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'backend_service': 'sample2'} + request_init["resource_group_reference_resource"] = {'group': 'group_value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_health(request) + + +def test_get_health_rest_flattened(): + client = BackendServicesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.BackendServiceGroupHealth() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'backend_service': 'sample2'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + backend_service='backend_service_value', + resource_group_reference_resource=compute.ResourceGroupReference(group='group_value'), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.BackendServiceGroupHealth.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.get_health(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/global/backendServices/{backend_service}/getHealth" % client.transport._host, args[1]) + + +def test_get_health_rest_flattened_error(transport: str = 'rest'): + client = BackendServicesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_health( + compute.GetHealthBackendServiceRequest(), + project='project_value', + backend_service='backend_service_value', + resource_group_reference_resource=compute.ResourceGroupReference(group='group_value'), + ) + + +def test_get_health_rest_error(): + client = BackendServicesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.GetIamPolicyBackendServiceRequest, + dict, +]) +def test_get_iam_policy_rest(request_type): + client = BackendServicesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'resource': 'sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Policy( + etag='etag_value', + iam_owned=True, + version=774, + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Policy.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.get_iam_policy(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.Policy) + assert response.etag == 'etag_value' + assert response.iam_owned is True + assert response.version == 774 + + +def test_get_iam_policy_rest_required_fields(request_type=compute.GetIamPolicyBackendServiceRequest): + transport_class = transports.BackendServicesRestTransport + + request_init = {} + request_init["project"] = "" + request_init["resource"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_iam_policy._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + jsonified_request["resource"] = 'resource_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_iam_policy._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("options_requested_policy_version", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "resource" in jsonified_request + assert jsonified_request["resource"] == 'resource_value' + + client = BackendServicesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Policy() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "get", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Policy.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.get_iam_policy(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_get_iam_policy_rest_unset_required_fields(): + transport = transports.BackendServicesRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.get_iam_policy._get_unset_required_fields({}) + assert set(unset_fields) == (set(("optionsRequestedPolicyVersion", )) & set(("project", "resource", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_iam_policy_rest_interceptors(null_interceptor): + transport = transports.BackendServicesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.BackendServicesRestInterceptor(), + ) + client = BackendServicesClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.BackendServicesRestInterceptor, "post_get_iam_policy") as post, \ + mock.patch.object(transports.BackendServicesRestInterceptor, "pre_get_iam_policy") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.GetIamPolicyBackendServiceRequest.pb(compute.GetIamPolicyBackendServiceRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Policy.to_json(compute.Policy()) + + request = compute.GetIamPolicyBackendServiceRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Policy() + + client.get_iam_policy(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_get_iam_policy_rest_bad_request(transport: str = 'rest', request_type=compute.GetIamPolicyBackendServiceRequest): + client = BackendServicesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'resource': 'sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_iam_policy(request) + + +def test_get_iam_policy_rest_flattened(): + client = BackendServicesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Policy() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'resource': 'sample2'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + resource='resource_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Policy.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.get_iam_policy(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/global/backendServices/{resource}/getIamPolicy" % client.transport._host, args[1]) + + +def test_get_iam_policy_rest_flattened_error(transport: str = 'rest'): + client = BackendServicesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_iam_policy( + compute.GetIamPolicyBackendServiceRequest(), + project='project_value', + resource='resource_value', + ) + + +def test_get_iam_policy_rest_error(): + client = BackendServicesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.InsertBackendServiceRequest, + dict, +]) +def test_insert_rest(request_type): + client = BackendServicesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1'} + request_init["backend_service_resource"] = {'affinity_cookie_ttl_sec': 2432, 'backends': [{'balancing_mode': 'balancing_mode_value', 'capacity_scaler': 0.1575, 'description': 'description_value', 'failover': True, 'group': 'group_value', 'max_connections': 1608, 'max_connections_per_endpoint': 2990, 'max_connections_per_instance': 2978, 'max_rate': 849, 'max_rate_per_endpoint': 0.22310000000000002, 'max_rate_per_instance': 0.22190000000000001, 'max_utilization': 0.1633}], 'cdn_policy': {'bypass_cache_on_request_headers': [{'header_name': 'header_name_value'}], 'cache_key_policy': {'include_host': True, 'include_http_headers': ['include_http_headers_value1', 'include_http_headers_value2'], 'include_named_cookies': ['include_named_cookies_value1', 'include_named_cookies_value2'], 'include_protocol': True, 'include_query_string': True, 'query_string_blacklist': ['query_string_blacklist_value1', 'query_string_blacklist_value2'], 'query_string_whitelist': ['query_string_whitelist_value1', 'query_string_whitelist_value2']}, 'cache_mode': 'cache_mode_value', 'client_ttl': 1074, 'default_ttl': 1176, 'max_ttl': 761, 'negative_caching': True, 'negative_caching_policy': [{'code': 411, 'ttl': 340}], 'request_coalescing': True, 'serve_while_stale': 1813, 'signed_url_cache_max_age_sec': 2890, 'signed_url_key_names': ['signed_url_key_names_value1', 'signed_url_key_names_value2']}, 'circuit_breakers': {'max_connections': 1608, 'max_pending_requests': 2149, 'max_requests': 1313, 'max_requests_per_connection': 2902, 'max_retries': 1187}, 'compression_mode': 'compression_mode_value', 'connection_draining': {'draining_timeout_sec': 2124}, 'connection_tracking_policy': {'connection_persistence_on_unhealthy_backends': 'connection_persistence_on_unhealthy_backends_value', 'enable_strong_affinity': True, 'idle_timeout_sec': 1694, 'tracking_mode': 'tracking_mode_value'}, 'consistent_hash': {'http_cookie': {'name': 'name_value', 'path': 'path_value', 'ttl': {'nanos': 543, 'seconds': 751}}, 'http_header_name': 'http_header_name_value', 'minimum_ring_size': 1829}, 'creation_timestamp': 'creation_timestamp_value', 'custom_request_headers': ['custom_request_headers_value1', 'custom_request_headers_value2'], 'custom_response_headers': ['custom_response_headers_value1', 'custom_response_headers_value2'], 'description': 'description_value', 'edge_security_policy': 'edge_security_policy_value', 'enable_c_d_n': True, 'failover_policy': {'disable_connection_drain_on_failover': True, 'drop_traffic_if_unhealthy': True, 'failover_ratio': 0.1494}, 'fingerprint': 'fingerprint_value', 'health_checks': ['health_checks_value1', 'health_checks_value2'], 'iap': {'enabled': True, 'oauth2_client_id': 'oauth2_client_id_value', 'oauth2_client_secret': 'oauth2_client_secret_value', 'oauth2_client_secret_sha256': 'oauth2_client_secret_sha256_value'}, 'id': 205, 'kind': 'kind_value', 'load_balancing_scheme': 'load_balancing_scheme_value', 'locality_lb_policies': [{'custom_policy': {'data': 'data_value', 'name': 'name_value'}, 'policy': {'name': 'name_value'}}], 'locality_lb_policy': 'locality_lb_policy_value', 'log_config': {'enable': True, 'optional_fields': ['optional_fields_value1', 'optional_fields_value2'], 'optional_mode': 'optional_mode_value', 'sample_rate': 0.1165}, 'max_stream_duration': {}, 'metadatas': {}, 'name': 'name_value', 'network': 'network_value', 'outlier_detection': {'base_ejection_time': {}, 'consecutive_errors': 1956, 'consecutive_gateway_failure': 2880, 'enforcing_consecutive_errors': 3006, 'enforcing_consecutive_gateway_failure': 3930, 'enforcing_success_rate': 2334, 'interval': {}, 'max_ejection_percent': 2118, 'success_rate_minimum_hosts': 2799, 'success_rate_request_volume': 2915, 'success_rate_stdev_factor': 2663}, 'port': 453, 'port_name': 'port_name_value', 'protocol': 'protocol_value', 'region': 'region_value', 'security_policy': 'security_policy_value', 'security_settings': {'client_tls_policy': 'client_tls_policy_value', 'subject_alt_names': ['subject_alt_names_value1', 'subject_alt_names_value2']}, 'self_link': 'self_link_value', 'service_bindings': ['service_bindings_value1', 'service_bindings_value2'], 'session_affinity': 'session_affinity_value', 'subsetting': {'policy': 'policy_value'}, 'timeout_sec': 1185} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.insert(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, extended_operation.ExtendedOperation) + assert response.client_operation_id == 'client_operation_id_value' + assert response.creation_timestamp == 'creation_timestamp_value' + assert response.description == 'description_value' + assert response.end_time == 'end_time_value' + assert response.http_error_message == 'http_error_message_value' + assert response.http_error_status_code == 2374 + assert response.id == 205 + assert response.insert_time == 'insert_time_value' + assert response.kind == 'kind_value' + assert response.name == 'name_value' + assert response.operation_group_id == 'operation_group_id_value' + assert response.operation_type == 'operation_type_value' + assert response.progress == 885 + assert response.region == 'region_value' + assert response.self_link == 'self_link_value' + assert response.start_time == 'start_time_value' + assert response.status == compute.Operation.Status.DONE + assert response.status_message == 'status_message_value' + assert response.target_id == 947 + assert response.target_link == 'target_link_value' + assert response.user == 'user_value' + assert response.zone == 'zone_value' + + +def test_insert_rest_required_fields(request_type=compute.InsertBackendServiceRequest): + transport_class = transports.BackendServicesRestTransport + + request_init = {} + request_init["project"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).insert._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).insert._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + + client = BackendServicesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.insert(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_insert_rest_unset_required_fields(): + transport = transports.BackendServicesRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.insert._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("backendServiceResource", "project", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_insert_rest_interceptors(null_interceptor): + transport = transports.BackendServicesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.BackendServicesRestInterceptor(), + ) + client = BackendServicesClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.BackendServicesRestInterceptor, "post_insert") as post, \ + mock.patch.object(transports.BackendServicesRestInterceptor, "pre_insert") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.InsertBackendServiceRequest.pb(compute.InsertBackendServiceRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.InsertBackendServiceRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.insert(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_insert_rest_bad_request(transport: str = 'rest', request_type=compute.InsertBackendServiceRequest): + client = BackendServicesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1'} + request_init["backend_service_resource"] = {'affinity_cookie_ttl_sec': 2432, 'backends': [{'balancing_mode': 'balancing_mode_value', 'capacity_scaler': 0.1575, 'description': 'description_value', 'failover': True, 'group': 'group_value', 'max_connections': 1608, 'max_connections_per_endpoint': 2990, 'max_connections_per_instance': 2978, 'max_rate': 849, 'max_rate_per_endpoint': 0.22310000000000002, 'max_rate_per_instance': 0.22190000000000001, 'max_utilization': 0.1633}], 'cdn_policy': {'bypass_cache_on_request_headers': [{'header_name': 'header_name_value'}], 'cache_key_policy': {'include_host': True, 'include_http_headers': ['include_http_headers_value1', 'include_http_headers_value2'], 'include_named_cookies': ['include_named_cookies_value1', 'include_named_cookies_value2'], 'include_protocol': True, 'include_query_string': True, 'query_string_blacklist': ['query_string_blacklist_value1', 'query_string_blacklist_value2'], 'query_string_whitelist': ['query_string_whitelist_value1', 'query_string_whitelist_value2']}, 'cache_mode': 'cache_mode_value', 'client_ttl': 1074, 'default_ttl': 1176, 'max_ttl': 761, 'negative_caching': True, 'negative_caching_policy': [{'code': 411, 'ttl': 340}], 'request_coalescing': True, 'serve_while_stale': 1813, 'signed_url_cache_max_age_sec': 2890, 'signed_url_key_names': ['signed_url_key_names_value1', 'signed_url_key_names_value2']}, 'circuit_breakers': {'max_connections': 1608, 'max_pending_requests': 2149, 'max_requests': 1313, 'max_requests_per_connection': 2902, 'max_retries': 1187}, 'compression_mode': 'compression_mode_value', 'connection_draining': {'draining_timeout_sec': 2124}, 'connection_tracking_policy': {'connection_persistence_on_unhealthy_backends': 'connection_persistence_on_unhealthy_backends_value', 'enable_strong_affinity': True, 'idle_timeout_sec': 1694, 'tracking_mode': 'tracking_mode_value'}, 'consistent_hash': {'http_cookie': {'name': 'name_value', 'path': 'path_value', 'ttl': {'nanos': 543, 'seconds': 751}}, 'http_header_name': 'http_header_name_value', 'minimum_ring_size': 1829}, 'creation_timestamp': 'creation_timestamp_value', 'custom_request_headers': ['custom_request_headers_value1', 'custom_request_headers_value2'], 'custom_response_headers': ['custom_response_headers_value1', 'custom_response_headers_value2'], 'description': 'description_value', 'edge_security_policy': 'edge_security_policy_value', 'enable_c_d_n': True, 'failover_policy': {'disable_connection_drain_on_failover': True, 'drop_traffic_if_unhealthy': True, 'failover_ratio': 0.1494}, 'fingerprint': 'fingerprint_value', 'health_checks': ['health_checks_value1', 'health_checks_value2'], 'iap': {'enabled': True, 'oauth2_client_id': 'oauth2_client_id_value', 'oauth2_client_secret': 'oauth2_client_secret_value', 'oauth2_client_secret_sha256': 'oauth2_client_secret_sha256_value'}, 'id': 205, 'kind': 'kind_value', 'load_balancing_scheme': 'load_balancing_scheme_value', 'locality_lb_policies': [{'custom_policy': {'data': 'data_value', 'name': 'name_value'}, 'policy': {'name': 'name_value'}}], 'locality_lb_policy': 'locality_lb_policy_value', 'log_config': {'enable': True, 'optional_fields': ['optional_fields_value1', 'optional_fields_value2'], 'optional_mode': 'optional_mode_value', 'sample_rate': 0.1165}, 'max_stream_duration': {}, 'metadatas': {}, 'name': 'name_value', 'network': 'network_value', 'outlier_detection': {'base_ejection_time': {}, 'consecutive_errors': 1956, 'consecutive_gateway_failure': 2880, 'enforcing_consecutive_errors': 3006, 'enforcing_consecutive_gateway_failure': 3930, 'enforcing_success_rate': 2334, 'interval': {}, 'max_ejection_percent': 2118, 'success_rate_minimum_hosts': 2799, 'success_rate_request_volume': 2915, 'success_rate_stdev_factor': 2663}, 'port': 453, 'port_name': 'port_name_value', 'protocol': 'protocol_value', 'region': 'region_value', 'security_policy': 'security_policy_value', 'security_settings': {'client_tls_policy': 'client_tls_policy_value', 'subject_alt_names': ['subject_alt_names_value1', 'subject_alt_names_value2']}, 'self_link': 'self_link_value', 'service_bindings': ['service_bindings_value1', 'service_bindings_value2'], 'session_affinity': 'session_affinity_value', 'subsetting': {'policy': 'policy_value'}, 'timeout_sec': 1185} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.insert(request) + + +def test_insert_rest_flattened(): + client = BackendServicesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + backend_service_resource=compute.BackendService(affinity_cookie_ttl_sec=2432), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.insert(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/global/backendServices" % client.transport._host, args[1]) + + +def test_insert_rest_flattened_error(transport: str = 'rest'): + client = BackendServicesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.insert( + compute.InsertBackendServiceRequest(), + project='project_value', + backend_service_resource=compute.BackendService(affinity_cookie_ttl_sec=2432), + ) + + +def test_insert_rest_error(): + client = BackendServicesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.InsertBackendServiceRequest, + dict, +]) +def test_insert_unary_rest(request_type): + client = BackendServicesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1'} + request_init["backend_service_resource"] = {'affinity_cookie_ttl_sec': 2432, 'backends': [{'balancing_mode': 'balancing_mode_value', 'capacity_scaler': 0.1575, 'description': 'description_value', 'failover': True, 'group': 'group_value', 'max_connections': 1608, 'max_connections_per_endpoint': 2990, 'max_connections_per_instance': 2978, 'max_rate': 849, 'max_rate_per_endpoint': 0.22310000000000002, 'max_rate_per_instance': 0.22190000000000001, 'max_utilization': 0.1633}], 'cdn_policy': {'bypass_cache_on_request_headers': [{'header_name': 'header_name_value'}], 'cache_key_policy': {'include_host': True, 'include_http_headers': ['include_http_headers_value1', 'include_http_headers_value2'], 'include_named_cookies': ['include_named_cookies_value1', 'include_named_cookies_value2'], 'include_protocol': True, 'include_query_string': True, 'query_string_blacklist': ['query_string_blacklist_value1', 'query_string_blacklist_value2'], 'query_string_whitelist': ['query_string_whitelist_value1', 'query_string_whitelist_value2']}, 'cache_mode': 'cache_mode_value', 'client_ttl': 1074, 'default_ttl': 1176, 'max_ttl': 761, 'negative_caching': True, 'negative_caching_policy': [{'code': 411, 'ttl': 340}], 'request_coalescing': True, 'serve_while_stale': 1813, 'signed_url_cache_max_age_sec': 2890, 'signed_url_key_names': ['signed_url_key_names_value1', 'signed_url_key_names_value2']}, 'circuit_breakers': {'max_connections': 1608, 'max_pending_requests': 2149, 'max_requests': 1313, 'max_requests_per_connection': 2902, 'max_retries': 1187}, 'compression_mode': 'compression_mode_value', 'connection_draining': {'draining_timeout_sec': 2124}, 'connection_tracking_policy': {'connection_persistence_on_unhealthy_backends': 'connection_persistence_on_unhealthy_backends_value', 'enable_strong_affinity': True, 'idle_timeout_sec': 1694, 'tracking_mode': 'tracking_mode_value'}, 'consistent_hash': {'http_cookie': {'name': 'name_value', 'path': 'path_value', 'ttl': {'nanos': 543, 'seconds': 751}}, 'http_header_name': 'http_header_name_value', 'minimum_ring_size': 1829}, 'creation_timestamp': 'creation_timestamp_value', 'custom_request_headers': ['custom_request_headers_value1', 'custom_request_headers_value2'], 'custom_response_headers': ['custom_response_headers_value1', 'custom_response_headers_value2'], 'description': 'description_value', 'edge_security_policy': 'edge_security_policy_value', 'enable_c_d_n': True, 'failover_policy': {'disable_connection_drain_on_failover': True, 'drop_traffic_if_unhealthy': True, 'failover_ratio': 0.1494}, 'fingerprint': 'fingerprint_value', 'health_checks': ['health_checks_value1', 'health_checks_value2'], 'iap': {'enabled': True, 'oauth2_client_id': 'oauth2_client_id_value', 'oauth2_client_secret': 'oauth2_client_secret_value', 'oauth2_client_secret_sha256': 'oauth2_client_secret_sha256_value'}, 'id': 205, 'kind': 'kind_value', 'load_balancing_scheme': 'load_balancing_scheme_value', 'locality_lb_policies': [{'custom_policy': {'data': 'data_value', 'name': 'name_value'}, 'policy': {'name': 'name_value'}}], 'locality_lb_policy': 'locality_lb_policy_value', 'log_config': {'enable': True, 'optional_fields': ['optional_fields_value1', 'optional_fields_value2'], 'optional_mode': 'optional_mode_value', 'sample_rate': 0.1165}, 'max_stream_duration': {}, 'metadatas': {}, 'name': 'name_value', 'network': 'network_value', 'outlier_detection': {'base_ejection_time': {}, 'consecutive_errors': 1956, 'consecutive_gateway_failure': 2880, 'enforcing_consecutive_errors': 3006, 'enforcing_consecutive_gateway_failure': 3930, 'enforcing_success_rate': 2334, 'interval': {}, 'max_ejection_percent': 2118, 'success_rate_minimum_hosts': 2799, 'success_rate_request_volume': 2915, 'success_rate_stdev_factor': 2663}, 'port': 453, 'port_name': 'port_name_value', 'protocol': 'protocol_value', 'region': 'region_value', 'security_policy': 'security_policy_value', 'security_settings': {'client_tls_policy': 'client_tls_policy_value', 'subject_alt_names': ['subject_alt_names_value1', 'subject_alt_names_value2']}, 'self_link': 'self_link_value', 'service_bindings': ['service_bindings_value1', 'service_bindings_value2'], 'session_affinity': 'session_affinity_value', 'subsetting': {'policy': 'policy_value'}, 'timeout_sec': 1185} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.insert_unary(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.Operation) + + +def test_insert_unary_rest_required_fields(request_type=compute.InsertBackendServiceRequest): + transport_class = transports.BackendServicesRestTransport + + request_init = {} + request_init["project"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).insert._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).insert._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + + client = BackendServicesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.insert_unary(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_insert_unary_rest_unset_required_fields(): + transport = transports.BackendServicesRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.insert._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("backendServiceResource", "project", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_insert_unary_rest_interceptors(null_interceptor): + transport = transports.BackendServicesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.BackendServicesRestInterceptor(), + ) + client = BackendServicesClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.BackendServicesRestInterceptor, "post_insert") as post, \ + mock.patch.object(transports.BackendServicesRestInterceptor, "pre_insert") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.InsertBackendServiceRequest.pb(compute.InsertBackendServiceRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.InsertBackendServiceRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.insert_unary(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_insert_unary_rest_bad_request(transport: str = 'rest', request_type=compute.InsertBackendServiceRequest): + client = BackendServicesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1'} + request_init["backend_service_resource"] = {'affinity_cookie_ttl_sec': 2432, 'backends': [{'balancing_mode': 'balancing_mode_value', 'capacity_scaler': 0.1575, 'description': 'description_value', 'failover': True, 'group': 'group_value', 'max_connections': 1608, 'max_connections_per_endpoint': 2990, 'max_connections_per_instance': 2978, 'max_rate': 849, 'max_rate_per_endpoint': 0.22310000000000002, 'max_rate_per_instance': 0.22190000000000001, 'max_utilization': 0.1633}], 'cdn_policy': {'bypass_cache_on_request_headers': [{'header_name': 'header_name_value'}], 'cache_key_policy': {'include_host': True, 'include_http_headers': ['include_http_headers_value1', 'include_http_headers_value2'], 'include_named_cookies': ['include_named_cookies_value1', 'include_named_cookies_value2'], 'include_protocol': True, 'include_query_string': True, 'query_string_blacklist': ['query_string_blacklist_value1', 'query_string_blacklist_value2'], 'query_string_whitelist': ['query_string_whitelist_value1', 'query_string_whitelist_value2']}, 'cache_mode': 'cache_mode_value', 'client_ttl': 1074, 'default_ttl': 1176, 'max_ttl': 761, 'negative_caching': True, 'negative_caching_policy': [{'code': 411, 'ttl': 340}], 'request_coalescing': True, 'serve_while_stale': 1813, 'signed_url_cache_max_age_sec': 2890, 'signed_url_key_names': ['signed_url_key_names_value1', 'signed_url_key_names_value2']}, 'circuit_breakers': {'max_connections': 1608, 'max_pending_requests': 2149, 'max_requests': 1313, 'max_requests_per_connection': 2902, 'max_retries': 1187}, 'compression_mode': 'compression_mode_value', 'connection_draining': {'draining_timeout_sec': 2124}, 'connection_tracking_policy': {'connection_persistence_on_unhealthy_backends': 'connection_persistence_on_unhealthy_backends_value', 'enable_strong_affinity': True, 'idle_timeout_sec': 1694, 'tracking_mode': 'tracking_mode_value'}, 'consistent_hash': {'http_cookie': {'name': 'name_value', 'path': 'path_value', 'ttl': {'nanos': 543, 'seconds': 751}}, 'http_header_name': 'http_header_name_value', 'minimum_ring_size': 1829}, 'creation_timestamp': 'creation_timestamp_value', 'custom_request_headers': ['custom_request_headers_value1', 'custom_request_headers_value2'], 'custom_response_headers': ['custom_response_headers_value1', 'custom_response_headers_value2'], 'description': 'description_value', 'edge_security_policy': 'edge_security_policy_value', 'enable_c_d_n': True, 'failover_policy': {'disable_connection_drain_on_failover': True, 'drop_traffic_if_unhealthy': True, 'failover_ratio': 0.1494}, 'fingerprint': 'fingerprint_value', 'health_checks': ['health_checks_value1', 'health_checks_value2'], 'iap': {'enabled': True, 'oauth2_client_id': 'oauth2_client_id_value', 'oauth2_client_secret': 'oauth2_client_secret_value', 'oauth2_client_secret_sha256': 'oauth2_client_secret_sha256_value'}, 'id': 205, 'kind': 'kind_value', 'load_balancing_scheme': 'load_balancing_scheme_value', 'locality_lb_policies': [{'custom_policy': {'data': 'data_value', 'name': 'name_value'}, 'policy': {'name': 'name_value'}}], 'locality_lb_policy': 'locality_lb_policy_value', 'log_config': {'enable': True, 'optional_fields': ['optional_fields_value1', 'optional_fields_value2'], 'optional_mode': 'optional_mode_value', 'sample_rate': 0.1165}, 'max_stream_duration': {}, 'metadatas': {}, 'name': 'name_value', 'network': 'network_value', 'outlier_detection': {'base_ejection_time': {}, 'consecutive_errors': 1956, 'consecutive_gateway_failure': 2880, 'enforcing_consecutive_errors': 3006, 'enforcing_consecutive_gateway_failure': 3930, 'enforcing_success_rate': 2334, 'interval': {}, 'max_ejection_percent': 2118, 'success_rate_minimum_hosts': 2799, 'success_rate_request_volume': 2915, 'success_rate_stdev_factor': 2663}, 'port': 453, 'port_name': 'port_name_value', 'protocol': 'protocol_value', 'region': 'region_value', 'security_policy': 'security_policy_value', 'security_settings': {'client_tls_policy': 'client_tls_policy_value', 'subject_alt_names': ['subject_alt_names_value1', 'subject_alt_names_value2']}, 'self_link': 'self_link_value', 'service_bindings': ['service_bindings_value1', 'service_bindings_value2'], 'session_affinity': 'session_affinity_value', 'subsetting': {'policy': 'policy_value'}, 'timeout_sec': 1185} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.insert_unary(request) + + +def test_insert_unary_rest_flattened(): + client = BackendServicesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + backend_service_resource=compute.BackendService(affinity_cookie_ttl_sec=2432), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.insert_unary(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/global/backendServices" % client.transport._host, args[1]) + + +def test_insert_unary_rest_flattened_error(transport: str = 'rest'): + client = BackendServicesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.insert_unary( + compute.InsertBackendServiceRequest(), + project='project_value', + backend_service_resource=compute.BackendService(affinity_cookie_ttl_sec=2432), + ) + + +def test_insert_unary_rest_error(): + client = BackendServicesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.ListBackendServicesRequest, + dict, +]) +def test_list_rest(request_type): + client = BackendServicesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.BackendServiceList( + id='id_value', + kind='kind_value', + next_page_token='next_page_token_value', + self_link='self_link_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.BackendServiceList.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.list(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListPager) + assert response.id == 'id_value' + assert response.kind == 'kind_value' + assert response.next_page_token == 'next_page_token_value' + assert response.self_link == 'self_link_value' + + +def test_list_rest_required_fields(request_type=compute.ListBackendServicesRequest): + transport_class = transports.BackendServicesRestTransport + + request_init = {} + request_init["project"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("filter", "max_results", "order_by", "page_token", "return_partial_success", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + + client = BackendServicesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.BackendServiceList() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "get", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.BackendServiceList.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.list(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_list_rest_unset_required_fields(): + transport = transports.BackendServicesRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.list._get_unset_required_fields({}) + assert set(unset_fields) == (set(("filter", "maxResults", "orderBy", "pageToken", "returnPartialSuccess", )) & set(("project", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_rest_interceptors(null_interceptor): + transport = transports.BackendServicesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.BackendServicesRestInterceptor(), + ) + client = BackendServicesClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.BackendServicesRestInterceptor, "post_list") as post, \ + mock.patch.object(transports.BackendServicesRestInterceptor, "pre_list") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.ListBackendServicesRequest.pb(compute.ListBackendServicesRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.BackendServiceList.to_json(compute.BackendServiceList()) + + request = compute.ListBackendServicesRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.BackendServiceList() + + client.list(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_list_rest_bad_request(transport: str = 'rest', request_type=compute.ListBackendServicesRequest): + client = BackendServicesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list(request) + + +def test_list_rest_flattened(): + client = BackendServicesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.BackendServiceList() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.BackendServiceList.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.list(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/global/backendServices" % client.transport._host, args[1]) + + +def test_list_rest_flattened_error(transport: str = 'rest'): + client = BackendServicesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list( + compute.ListBackendServicesRequest(), + project='project_value', + ) + + +def test_list_rest_pager(transport: str = 'rest'): + client = BackendServicesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + #with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + compute.BackendServiceList( + items=[ + compute.BackendService(), + compute.BackendService(), + compute.BackendService(), + ], + next_page_token='abc', + ), + compute.BackendServiceList( + items=[], + next_page_token='def', + ), + compute.BackendServiceList( + items=[ + compute.BackendService(), + ], + next_page_token='ghi', + ), + compute.BackendServiceList( + items=[ + compute.BackendService(), + compute.BackendService(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple(compute.BackendServiceList.to_json(x) for x in response) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode('UTF-8') + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {'project': 'sample1'} + + pager = client.list(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, compute.BackendService) + for i in results) + + pages = list(client.list(request=sample_request).pages) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize("request_type", [ + compute.PatchBackendServiceRequest, + dict, +]) +def test_patch_rest(request_type): + client = BackendServicesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'backend_service': 'sample2'} + request_init["backend_service_resource"] = {'affinity_cookie_ttl_sec': 2432, 'backends': [{'balancing_mode': 'balancing_mode_value', 'capacity_scaler': 0.1575, 'description': 'description_value', 'failover': True, 'group': 'group_value', 'max_connections': 1608, 'max_connections_per_endpoint': 2990, 'max_connections_per_instance': 2978, 'max_rate': 849, 'max_rate_per_endpoint': 0.22310000000000002, 'max_rate_per_instance': 0.22190000000000001, 'max_utilization': 0.1633}], 'cdn_policy': {'bypass_cache_on_request_headers': [{'header_name': 'header_name_value'}], 'cache_key_policy': {'include_host': True, 'include_http_headers': ['include_http_headers_value1', 'include_http_headers_value2'], 'include_named_cookies': ['include_named_cookies_value1', 'include_named_cookies_value2'], 'include_protocol': True, 'include_query_string': True, 'query_string_blacklist': ['query_string_blacklist_value1', 'query_string_blacklist_value2'], 'query_string_whitelist': ['query_string_whitelist_value1', 'query_string_whitelist_value2']}, 'cache_mode': 'cache_mode_value', 'client_ttl': 1074, 'default_ttl': 1176, 'max_ttl': 761, 'negative_caching': True, 'negative_caching_policy': [{'code': 411, 'ttl': 340}], 'request_coalescing': True, 'serve_while_stale': 1813, 'signed_url_cache_max_age_sec': 2890, 'signed_url_key_names': ['signed_url_key_names_value1', 'signed_url_key_names_value2']}, 'circuit_breakers': {'max_connections': 1608, 'max_pending_requests': 2149, 'max_requests': 1313, 'max_requests_per_connection': 2902, 'max_retries': 1187}, 'compression_mode': 'compression_mode_value', 'connection_draining': {'draining_timeout_sec': 2124}, 'connection_tracking_policy': {'connection_persistence_on_unhealthy_backends': 'connection_persistence_on_unhealthy_backends_value', 'enable_strong_affinity': True, 'idle_timeout_sec': 1694, 'tracking_mode': 'tracking_mode_value'}, 'consistent_hash': {'http_cookie': {'name': 'name_value', 'path': 'path_value', 'ttl': {'nanos': 543, 'seconds': 751}}, 'http_header_name': 'http_header_name_value', 'minimum_ring_size': 1829}, 'creation_timestamp': 'creation_timestamp_value', 'custom_request_headers': ['custom_request_headers_value1', 'custom_request_headers_value2'], 'custom_response_headers': ['custom_response_headers_value1', 'custom_response_headers_value2'], 'description': 'description_value', 'edge_security_policy': 'edge_security_policy_value', 'enable_c_d_n': True, 'failover_policy': {'disable_connection_drain_on_failover': True, 'drop_traffic_if_unhealthy': True, 'failover_ratio': 0.1494}, 'fingerprint': 'fingerprint_value', 'health_checks': ['health_checks_value1', 'health_checks_value2'], 'iap': {'enabled': True, 'oauth2_client_id': 'oauth2_client_id_value', 'oauth2_client_secret': 'oauth2_client_secret_value', 'oauth2_client_secret_sha256': 'oauth2_client_secret_sha256_value'}, 'id': 205, 'kind': 'kind_value', 'load_balancing_scheme': 'load_balancing_scheme_value', 'locality_lb_policies': [{'custom_policy': {'data': 'data_value', 'name': 'name_value'}, 'policy': {'name': 'name_value'}}], 'locality_lb_policy': 'locality_lb_policy_value', 'log_config': {'enable': True, 'optional_fields': ['optional_fields_value1', 'optional_fields_value2'], 'optional_mode': 'optional_mode_value', 'sample_rate': 0.1165}, 'max_stream_duration': {}, 'metadatas': {}, 'name': 'name_value', 'network': 'network_value', 'outlier_detection': {'base_ejection_time': {}, 'consecutive_errors': 1956, 'consecutive_gateway_failure': 2880, 'enforcing_consecutive_errors': 3006, 'enforcing_consecutive_gateway_failure': 3930, 'enforcing_success_rate': 2334, 'interval': {}, 'max_ejection_percent': 2118, 'success_rate_minimum_hosts': 2799, 'success_rate_request_volume': 2915, 'success_rate_stdev_factor': 2663}, 'port': 453, 'port_name': 'port_name_value', 'protocol': 'protocol_value', 'region': 'region_value', 'security_policy': 'security_policy_value', 'security_settings': {'client_tls_policy': 'client_tls_policy_value', 'subject_alt_names': ['subject_alt_names_value1', 'subject_alt_names_value2']}, 'self_link': 'self_link_value', 'service_bindings': ['service_bindings_value1', 'service_bindings_value2'], 'session_affinity': 'session_affinity_value', 'subsetting': {'policy': 'policy_value'}, 'timeout_sec': 1185} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.patch(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, extended_operation.ExtendedOperation) + assert response.client_operation_id == 'client_operation_id_value' + assert response.creation_timestamp == 'creation_timestamp_value' + assert response.description == 'description_value' + assert response.end_time == 'end_time_value' + assert response.http_error_message == 'http_error_message_value' + assert response.http_error_status_code == 2374 + assert response.id == 205 + assert response.insert_time == 'insert_time_value' + assert response.kind == 'kind_value' + assert response.name == 'name_value' + assert response.operation_group_id == 'operation_group_id_value' + assert response.operation_type == 'operation_type_value' + assert response.progress == 885 + assert response.region == 'region_value' + assert response.self_link == 'self_link_value' + assert response.start_time == 'start_time_value' + assert response.status == compute.Operation.Status.DONE + assert response.status_message == 'status_message_value' + assert response.target_id == 947 + assert response.target_link == 'target_link_value' + assert response.user == 'user_value' + assert response.zone == 'zone_value' + + +def test_patch_rest_required_fields(request_type=compute.PatchBackendServiceRequest): + transport_class = transports.BackendServicesRestTransport + + request_init = {} + request_init["backend_service"] = "" + request_init["project"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).patch._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["backendService"] = 'backend_service_value' + jsonified_request["project"] = 'project_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).patch._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "backendService" in jsonified_request + assert jsonified_request["backendService"] == 'backend_service_value' + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + + client = BackendServicesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "patch", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.patch(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_patch_rest_unset_required_fields(): + transport = transports.BackendServicesRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.patch._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("backendService", "backendServiceResource", "project", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_patch_rest_interceptors(null_interceptor): + transport = transports.BackendServicesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.BackendServicesRestInterceptor(), + ) + client = BackendServicesClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.BackendServicesRestInterceptor, "post_patch") as post, \ + mock.patch.object(transports.BackendServicesRestInterceptor, "pre_patch") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.PatchBackendServiceRequest.pb(compute.PatchBackendServiceRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.PatchBackendServiceRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.patch(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_patch_rest_bad_request(transport: str = 'rest', request_type=compute.PatchBackendServiceRequest): + client = BackendServicesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'backend_service': 'sample2'} + request_init["backend_service_resource"] = {'affinity_cookie_ttl_sec': 2432, 'backends': [{'balancing_mode': 'balancing_mode_value', 'capacity_scaler': 0.1575, 'description': 'description_value', 'failover': True, 'group': 'group_value', 'max_connections': 1608, 'max_connections_per_endpoint': 2990, 'max_connections_per_instance': 2978, 'max_rate': 849, 'max_rate_per_endpoint': 0.22310000000000002, 'max_rate_per_instance': 0.22190000000000001, 'max_utilization': 0.1633}], 'cdn_policy': {'bypass_cache_on_request_headers': [{'header_name': 'header_name_value'}], 'cache_key_policy': {'include_host': True, 'include_http_headers': ['include_http_headers_value1', 'include_http_headers_value2'], 'include_named_cookies': ['include_named_cookies_value1', 'include_named_cookies_value2'], 'include_protocol': True, 'include_query_string': True, 'query_string_blacklist': ['query_string_blacklist_value1', 'query_string_blacklist_value2'], 'query_string_whitelist': ['query_string_whitelist_value1', 'query_string_whitelist_value2']}, 'cache_mode': 'cache_mode_value', 'client_ttl': 1074, 'default_ttl': 1176, 'max_ttl': 761, 'negative_caching': True, 'negative_caching_policy': [{'code': 411, 'ttl': 340}], 'request_coalescing': True, 'serve_while_stale': 1813, 'signed_url_cache_max_age_sec': 2890, 'signed_url_key_names': ['signed_url_key_names_value1', 'signed_url_key_names_value2']}, 'circuit_breakers': {'max_connections': 1608, 'max_pending_requests': 2149, 'max_requests': 1313, 'max_requests_per_connection': 2902, 'max_retries': 1187}, 'compression_mode': 'compression_mode_value', 'connection_draining': {'draining_timeout_sec': 2124}, 'connection_tracking_policy': {'connection_persistence_on_unhealthy_backends': 'connection_persistence_on_unhealthy_backends_value', 'enable_strong_affinity': True, 'idle_timeout_sec': 1694, 'tracking_mode': 'tracking_mode_value'}, 'consistent_hash': {'http_cookie': {'name': 'name_value', 'path': 'path_value', 'ttl': {'nanos': 543, 'seconds': 751}}, 'http_header_name': 'http_header_name_value', 'minimum_ring_size': 1829}, 'creation_timestamp': 'creation_timestamp_value', 'custom_request_headers': ['custom_request_headers_value1', 'custom_request_headers_value2'], 'custom_response_headers': ['custom_response_headers_value1', 'custom_response_headers_value2'], 'description': 'description_value', 'edge_security_policy': 'edge_security_policy_value', 'enable_c_d_n': True, 'failover_policy': {'disable_connection_drain_on_failover': True, 'drop_traffic_if_unhealthy': True, 'failover_ratio': 0.1494}, 'fingerprint': 'fingerprint_value', 'health_checks': ['health_checks_value1', 'health_checks_value2'], 'iap': {'enabled': True, 'oauth2_client_id': 'oauth2_client_id_value', 'oauth2_client_secret': 'oauth2_client_secret_value', 'oauth2_client_secret_sha256': 'oauth2_client_secret_sha256_value'}, 'id': 205, 'kind': 'kind_value', 'load_balancing_scheme': 'load_balancing_scheme_value', 'locality_lb_policies': [{'custom_policy': {'data': 'data_value', 'name': 'name_value'}, 'policy': {'name': 'name_value'}}], 'locality_lb_policy': 'locality_lb_policy_value', 'log_config': {'enable': True, 'optional_fields': ['optional_fields_value1', 'optional_fields_value2'], 'optional_mode': 'optional_mode_value', 'sample_rate': 0.1165}, 'max_stream_duration': {}, 'metadatas': {}, 'name': 'name_value', 'network': 'network_value', 'outlier_detection': {'base_ejection_time': {}, 'consecutive_errors': 1956, 'consecutive_gateway_failure': 2880, 'enforcing_consecutive_errors': 3006, 'enforcing_consecutive_gateway_failure': 3930, 'enforcing_success_rate': 2334, 'interval': {}, 'max_ejection_percent': 2118, 'success_rate_minimum_hosts': 2799, 'success_rate_request_volume': 2915, 'success_rate_stdev_factor': 2663}, 'port': 453, 'port_name': 'port_name_value', 'protocol': 'protocol_value', 'region': 'region_value', 'security_policy': 'security_policy_value', 'security_settings': {'client_tls_policy': 'client_tls_policy_value', 'subject_alt_names': ['subject_alt_names_value1', 'subject_alt_names_value2']}, 'self_link': 'self_link_value', 'service_bindings': ['service_bindings_value1', 'service_bindings_value2'], 'session_affinity': 'session_affinity_value', 'subsetting': {'policy': 'policy_value'}, 'timeout_sec': 1185} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.patch(request) + + +def test_patch_rest_flattened(): + client = BackendServicesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'backend_service': 'sample2'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + backend_service='backend_service_value', + backend_service_resource=compute.BackendService(affinity_cookie_ttl_sec=2432), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.patch(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/global/backendServices/{backend_service}" % client.transport._host, args[1]) + + +def test_patch_rest_flattened_error(transport: str = 'rest'): + client = BackendServicesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.patch( + compute.PatchBackendServiceRequest(), + project='project_value', + backend_service='backend_service_value', + backend_service_resource=compute.BackendService(affinity_cookie_ttl_sec=2432), + ) + + +def test_patch_rest_error(): + client = BackendServicesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.PatchBackendServiceRequest, + dict, +]) +def test_patch_unary_rest(request_type): + client = BackendServicesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'backend_service': 'sample2'} + request_init["backend_service_resource"] = {'affinity_cookie_ttl_sec': 2432, 'backends': [{'balancing_mode': 'balancing_mode_value', 'capacity_scaler': 0.1575, 'description': 'description_value', 'failover': True, 'group': 'group_value', 'max_connections': 1608, 'max_connections_per_endpoint': 2990, 'max_connections_per_instance': 2978, 'max_rate': 849, 'max_rate_per_endpoint': 0.22310000000000002, 'max_rate_per_instance': 0.22190000000000001, 'max_utilization': 0.1633}], 'cdn_policy': {'bypass_cache_on_request_headers': [{'header_name': 'header_name_value'}], 'cache_key_policy': {'include_host': True, 'include_http_headers': ['include_http_headers_value1', 'include_http_headers_value2'], 'include_named_cookies': ['include_named_cookies_value1', 'include_named_cookies_value2'], 'include_protocol': True, 'include_query_string': True, 'query_string_blacklist': ['query_string_blacklist_value1', 'query_string_blacklist_value2'], 'query_string_whitelist': ['query_string_whitelist_value1', 'query_string_whitelist_value2']}, 'cache_mode': 'cache_mode_value', 'client_ttl': 1074, 'default_ttl': 1176, 'max_ttl': 761, 'negative_caching': True, 'negative_caching_policy': [{'code': 411, 'ttl': 340}], 'request_coalescing': True, 'serve_while_stale': 1813, 'signed_url_cache_max_age_sec': 2890, 'signed_url_key_names': ['signed_url_key_names_value1', 'signed_url_key_names_value2']}, 'circuit_breakers': {'max_connections': 1608, 'max_pending_requests': 2149, 'max_requests': 1313, 'max_requests_per_connection': 2902, 'max_retries': 1187}, 'compression_mode': 'compression_mode_value', 'connection_draining': {'draining_timeout_sec': 2124}, 'connection_tracking_policy': {'connection_persistence_on_unhealthy_backends': 'connection_persistence_on_unhealthy_backends_value', 'enable_strong_affinity': True, 'idle_timeout_sec': 1694, 'tracking_mode': 'tracking_mode_value'}, 'consistent_hash': {'http_cookie': {'name': 'name_value', 'path': 'path_value', 'ttl': {'nanos': 543, 'seconds': 751}}, 'http_header_name': 'http_header_name_value', 'minimum_ring_size': 1829}, 'creation_timestamp': 'creation_timestamp_value', 'custom_request_headers': ['custom_request_headers_value1', 'custom_request_headers_value2'], 'custom_response_headers': ['custom_response_headers_value1', 'custom_response_headers_value2'], 'description': 'description_value', 'edge_security_policy': 'edge_security_policy_value', 'enable_c_d_n': True, 'failover_policy': {'disable_connection_drain_on_failover': True, 'drop_traffic_if_unhealthy': True, 'failover_ratio': 0.1494}, 'fingerprint': 'fingerprint_value', 'health_checks': ['health_checks_value1', 'health_checks_value2'], 'iap': {'enabled': True, 'oauth2_client_id': 'oauth2_client_id_value', 'oauth2_client_secret': 'oauth2_client_secret_value', 'oauth2_client_secret_sha256': 'oauth2_client_secret_sha256_value'}, 'id': 205, 'kind': 'kind_value', 'load_balancing_scheme': 'load_balancing_scheme_value', 'locality_lb_policies': [{'custom_policy': {'data': 'data_value', 'name': 'name_value'}, 'policy': {'name': 'name_value'}}], 'locality_lb_policy': 'locality_lb_policy_value', 'log_config': {'enable': True, 'optional_fields': ['optional_fields_value1', 'optional_fields_value2'], 'optional_mode': 'optional_mode_value', 'sample_rate': 0.1165}, 'max_stream_duration': {}, 'metadatas': {}, 'name': 'name_value', 'network': 'network_value', 'outlier_detection': {'base_ejection_time': {}, 'consecutive_errors': 1956, 'consecutive_gateway_failure': 2880, 'enforcing_consecutive_errors': 3006, 'enforcing_consecutive_gateway_failure': 3930, 'enforcing_success_rate': 2334, 'interval': {}, 'max_ejection_percent': 2118, 'success_rate_minimum_hosts': 2799, 'success_rate_request_volume': 2915, 'success_rate_stdev_factor': 2663}, 'port': 453, 'port_name': 'port_name_value', 'protocol': 'protocol_value', 'region': 'region_value', 'security_policy': 'security_policy_value', 'security_settings': {'client_tls_policy': 'client_tls_policy_value', 'subject_alt_names': ['subject_alt_names_value1', 'subject_alt_names_value2']}, 'self_link': 'self_link_value', 'service_bindings': ['service_bindings_value1', 'service_bindings_value2'], 'session_affinity': 'session_affinity_value', 'subsetting': {'policy': 'policy_value'}, 'timeout_sec': 1185} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.patch_unary(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.Operation) + + +def test_patch_unary_rest_required_fields(request_type=compute.PatchBackendServiceRequest): + transport_class = transports.BackendServicesRestTransport + + request_init = {} + request_init["backend_service"] = "" + request_init["project"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).patch._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["backendService"] = 'backend_service_value' + jsonified_request["project"] = 'project_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).patch._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "backendService" in jsonified_request + assert jsonified_request["backendService"] == 'backend_service_value' + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + + client = BackendServicesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "patch", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.patch_unary(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_patch_unary_rest_unset_required_fields(): + transport = transports.BackendServicesRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.patch._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("backendService", "backendServiceResource", "project", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_patch_unary_rest_interceptors(null_interceptor): + transport = transports.BackendServicesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.BackendServicesRestInterceptor(), + ) + client = BackendServicesClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.BackendServicesRestInterceptor, "post_patch") as post, \ + mock.patch.object(transports.BackendServicesRestInterceptor, "pre_patch") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.PatchBackendServiceRequest.pb(compute.PatchBackendServiceRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.PatchBackendServiceRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.patch_unary(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_patch_unary_rest_bad_request(transport: str = 'rest', request_type=compute.PatchBackendServiceRequest): + client = BackendServicesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'backend_service': 'sample2'} + request_init["backend_service_resource"] = {'affinity_cookie_ttl_sec': 2432, 'backends': [{'balancing_mode': 'balancing_mode_value', 'capacity_scaler': 0.1575, 'description': 'description_value', 'failover': True, 'group': 'group_value', 'max_connections': 1608, 'max_connections_per_endpoint': 2990, 'max_connections_per_instance': 2978, 'max_rate': 849, 'max_rate_per_endpoint': 0.22310000000000002, 'max_rate_per_instance': 0.22190000000000001, 'max_utilization': 0.1633}], 'cdn_policy': {'bypass_cache_on_request_headers': [{'header_name': 'header_name_value'}], 'cache_key_policy': {'include_host': True, 'include_http_headers': ['include_http_headers_value1', 'include_http_headers_value2'], 'include_named_cookies': ['include_named_cookies_value1', 'include_named_cookies_value2'], 'include_protocol': True, 'include_query_string': True, 'query_string_blacklist': ['query_string_blacklist_value1', 'query_string_blacklist_value2'], 'query_string_whitelist': ['query_string_whitelist_value1', 'query_string_whitelist_value2']}, 'cache_mode': 'cache_mode_value', 'client_ttl': 1074, 'default_ttl': 1176, 'max_ttl': 761, 'negative_caching': True, 'negative_caching_policy': [{'code': 411, 'ttl': 340}], 'request_coalescing': True, 'serve_while_stale': 1813, 'signed_url_cache_max_age_sec': 2890, 'signed_url_key_names': ['signed_url_key_names_value1', 'signed_url_key_names_value2']}, 'circuit_breakers': {'max_connections': 1608, 'max_pending_requests': 2149, 'max_requests': 1313, 'max_requests_per_connection': 2902, 'max_retries': 1187}, 'compression_mode': 'compression_mode_value', 'connection_draining': {'draining_timeout_sec': 2124}, 'connection_tracking_policy': {'connection_persistence_on_unhealthy_backends': 'connection_persistence_on_unhealthy_backends_value', 'enable_strong_affinity': True, 'idle_timeout_sec': 1694, 'tracking_mode': 'tracking_mode_value'}, 'consistent_hash': {'http_cookie': {'name': 'name_value', 'path': 'path_value', 'ttl': {'nanos': 543, 'seconds': 751}}, 'http_header_name': 'http_header_name_value', 'minimum_ring_size': 1829}, 'creation_timestamp': 'creation_timestamp_value', 'custom_request_headers': ['custom_request_headers_value1', 'custom_request_headers_value2'], 'custom_response_headers': ['custom_response_headers_value1', 'custom_response_headers_value2'], 'description': 'description_value', 'edge_security_policy': 'edge_security_policy_value', 'enable_c_d_n': True, 'failover_policy': {'disable_connection_drain_on_failover': True, 'drop_traffic_if_unhealthy': True, 'failover_ratio': 0.1494}, 'fingerprint': 'fingerprint_value', 'health_checks': ['health_checks_value1', 'health_checks_value2'], 'iap': {'enabled': True, 'oauth2_client_id': 'oauth2_client_id_value', 'oauth2_client_secret': 'oauth2_client_secret_value', 'oauth2_client_secret_sha256': 'oauth2_client_secret_sha256_value'}, 'id': 205, 'kind': 'kind_value', 'load_balancing_scheme': 'load_balancing_scheme_value', 'locality_lb_policies': [{'custom_policy': {'data': 'data_value', 'name': 'name_value'}, 'policy': {'name': 'name_value'}}], 'locality_lb_policy': 'locality_lb_policy_value', 'log_config': {'enable': True, 'optional_fields': ['optional_fields_value1', 'optional_fields_value2'], 'optional_mode': 'optional_mode_value', 'sample_rate': 0.1165}, 'max_stream_duration': {}, 'metadatas': {}, 'name': 'name_value', 'network': 'network_value', 'outlier_detection': {'base_ejection_time': {}, 'consecutive_errors': 1956, 'consecutive_gateway_failure': 2880, 'enforcing_consecutive_errors': 3006, 'enforcing_consecutive_gateway_failure': 3930, 'enforcing_success_rate': 2334, 'interval': {}, 'max_ejection_percent': 2118, 'success_rate_minimum_hosts': 2799, 'success_rate_request_volume': 2915, 'success_rate_stdev_factor': 2663}, 'port': 453, 'port_name': 'port_name_value', 'protocol': 'protocol_value', 'region': 'region_value', 'security_policy': 'security_policy_value', 'security_settings': {'client_tls_policy': 'client_tls_policy_value', 'subject_alt_names': ['subject_alt_names_value1', 'subject_alt_names_value2']}, 'self_link': 'self_link_value', 'service_bindings': ['service_bindings_value1', 'service_bindings_value2'], 'session_affinity': 'session_affinity_value', 'subsetting': {'policy': 'policy_value'}, 'timeout_sec': 1185} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.patch_unary(request) + + +def test_patch_unary_rest_flattened(): + client = BackendServicesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'backend_service': 'sample2'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + backend_service='backend_service_value', + backend_service_resource=compute.BackendService(affinity_cookie_ttl_sec=2432), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.patch_unary(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/global/backendServices/{backend_service}" % client.transport._host, args[1]) + + +def test_patch_unary_rest_flattened_error(transport: str = 'rest'): + client = BackendServicesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.patch_unary( + compute.PatchBackendServiceRequest(), + project='project_value', + backend_service='backend_service_value', + backend_service_resource=compute.BackendService(affinity_cookie_ttl_sec=2432), + ) + + +def test_patch_unary_rest_error(): + client = BackendServicesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.SetEdgeSecurityPolicyBackendServiceRequest, + dict, +]) +def test_set_edge_security_policy_rest(request_type): + client = BackendServicesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'backend_service': 'sample2'} + request_init["security_policy_reference_resource"] = {'security_policy': 'security_policy_value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.set_edge_security_policy(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, extended_operation.ExtendedOperation) + assert response.client_operation_id == 'client_operation_id_value' + assert response.creation_timestamp == 'creation_timestamp_value' + assert response.description == 'description_value' + assert response.end_time == 'end_time_value' + assert response.http_error_message == 'http_error_message_value' + assert response.http_error_status_code == 2374 + assert response.id == 205 + assert response.insert_time == 'insert_time_value' + assert response.kind == 'kind_value' + assert response.name == 'name_value' + assert response.operation_group_id == 'operation_group_id_value' + assert response.operation_type == 'operation_type_value' + assert response.progress == 885 + assert response.region == 'region_value' + assert response.self_link == 'self_link_value' + assert response.start_time == 'start_time_value' + assert response.status == compute.Operation.Status.DONE + assert response.status_message == 'status_message_value' + assert response.target_id == 947 + assert response.target_link == 'target_link_value' + assert response.user == 'user_value' + assert response.zone == 'zone_value' + + +def test_set_edge_security_policy_rest_required_fields(request_type=compute.SetEdgeSecurityPolicyBackendServiceRequest): + transport_class = transports.BackendServicesRestTransport + + request_init = {} + request_init["backend_service"] = "" + request_init["project"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).set_edge_security_policy._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["backendService"] = 'backend_service_value' + jsonified_request["project"] = 'project_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).set_edge_security_policy._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "backendService" in jsonified_request + assert jsonified_request["backendService"] == 'backend_service_value' + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + + client = BackendServicesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.set_edge_security_policy(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_set_edge_security_policy_rest_unset_required_fields(): + transport = transports.BackendServicesRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.set_edge_security_policy._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("backendService", "project", "securityPolicyReferenceResource", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_set_edge_security_policy_rest_interceptors(null_interceptor): + transport = transports.BackendServicesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.BackendServicesRestInterceptor(), + ) + client = BackendServicesClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.BackendServicesRestInterceptor, "post_set_edge_security_policy") as post, \ + mock.patch.object(transports.BackendServicesRestInterceptor, "pre_set_edge_security_policy") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.SetEdgeSecurityPolicyBackendServiceRequest.pb(compute.SetEdgeSecurityPolicyBackendServiceRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.SetEdgeSecurityPolicyBackendServiceRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.set_edge_security_policy(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_set_edge_security_policy_rest_bad_request(transport: str = 'rest', request_type=compute.SetEdgeSecurityPolicyBackendServiceRequest): + client = BackendServicesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'backend_service': 'sample2'} + request_init["security_policy_reference_resource"] = {'security_policy': 'security_policy_value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.set_edge_security_policy(request) + + +def test_set_edge_security_policy_rest_flattened(): + client = BackendServicesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'backend_service': 'sample2'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + backend_service='backend_service_value', + security_policy_reference_resource=compute.SecurityPolicyReference(security_policy='security_policy_value'), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.set_edge_security_policy(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/global/backendServices/{backend_service}/setEdgeSecurityPolicy" % client.transport._host, args[1]) + + +def test_set_edge_security_policy_rest_flattened_error(transport: str = 'rest'): + client = BackendServicesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.set_edge_security_policy( + compute.SetEdgeSecurityPolicyBackendServiceRequest(), + project='project_value', + backend_service='backend_service_value', + security_policy_reference_resource=compute.SecurityPolicyReference(security_policy='security_policy_value'), + ) + + +def test_set_edge_security_policy_rest_error(): + client = BackendServicesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.SetEdgeSecurityPolicyBackendServiceRequest, + dict, +]) +def test_set_edge_security_policy_unary_rest(request_type): + client = BackendServicesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'backend_service': 'sample2'} + request_init["security_policy_reference_resource"] = {'security_policy': 'security_policy_value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.set_edge_security_policy_unary(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.Operation) + + +def test_set_edge_security_policy_unary_rest_required_fields(request_type=compute.SetEdgeSecurityPolicyBackendServiceRequest): + transport_class = transports.BackendServicesRestTransport + + request_init = {} + request_init["backend_service"] = "" + request_init["project"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).set_edge_security_policy._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["backendService"] = 'backend_service_value' + jsonified_request["project"] = 'project_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).set_edge_security_policy._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "backendService" in jsonified_request + assert jsonified_request["backendService"] == 'backend_service_value' + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + + client = BackendServicesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.set_edge_security_policy_unary(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_set_edge_security_policy_unary_rest_unset_required_fields(): + transport = transports.BackendServicesRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.set_edge_security_policy._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("backendService", "project", "securityPolicyReferenceResource", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_set_edge_security_policy_unary_rest_interceptors(null_interceptor): + transport = transports.BackendServicesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.BackendServicesRestInterceptor(), + ) + client = BackendServicesClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.BackendServicesRestInterceptor, "post_set_edge_security_policy") as post, \ + mock.patch.object(transports.BackendServicesRestInterceptor, "pre_set_edge_security_policy") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.SetEdgeSecurityPolicyBackendServiceRequest.pb(compute.SetEdgeSecurityPolicyBackendServiceRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.SetEdgeSecurityPolicyBackendServiceRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.set_edge_security_policy_unary(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_set_edge_security_policy_unary_rest_bad_request(transport: str = 'rest', request_type=compute.SetEdgeSecurityPolicyBackendServiceRequest): + client = BackendServicesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'backend_service': 'sample2'} + request_init["security_policy_reference_resource"] = {'security_policy': 'security_policy_value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.set_edge_security_policy_unary(request) + + +def test_set_edge_security_policy_unary_rest_flattened(): + client = BackendServicesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'backend_service': 'sample2'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + backend_service='backend_service_value', + security_policy_reference_resource=compute.SecurityPolicyReference(security_policy='security_policy_value'), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.set_edge_security_policy_unary(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/global/backendServices/{backend_service}/setEdgeSecurityPolicy" % client.transport._host, args[1]) + + +def test_set_edge_security_policy_unary_rest_flattened_error(transport: str = 'rest'): + client = BackendServicesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.set_edge_security_policy_unary( + compute.SetEdgeSecurityPolicyBackendServiceRequest(), + project='project_value', + backend_service='backend_service_value', + security_policy_reference_resource=compute.SecurityPolicyReference(security_policy='security_policy_value'), + ) + + +def test_set_edge_security_policy_unary_rest_error(): + client = BackendServicesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.SetIamPolicyBackendServiceRequest, + dict, +]) +def test_set_iam_policy_rest(request_type): + client = BackendServicesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'resource': 'sample2'} + request_init["global_set_policy_request_resource"] = {'bindings': [{'binding_id': 'binding_id_value', 'condition': {'description': 'description_value', 'expression': 'expression_value', 'location': 'location_value', 'title': 'title_value'}, 'members': ['members_value1', 'members_value2'], 'role': 'role_value'}], 'etag': 'etag_value', 'policy': {'audit_configs': [{'audit_log_configs': [{'exempted_members': ['exempted_members_value1', 'exempted_members_value2'], 'ignore_child_exemptions': True, 'log_type': 'log_type_value'}], 'exempted_members': ['exempted_members_value1', 'exempted_members_value2'], 'service': 'service_value'}], 'bindings': {}, 'etag': 'etag_value', 'iam_owned': True, 'rules': [{'action': 'action_value', 'conditions': [{'iam': 'iam_value', 'op': 'op_value', 'svc': 'svc_value', 'sys': 'sys_value', 'values': ['values_value1', 'values_value2']}], 'description': 'description_value', 'ins': ['ins_value1', 'ins_value2'], 'log_configs': [{'cloud_audit': {'authorization_logging_options': {'permission_type': 'permission_type_value'}, 'log_name': 'log_name_value'}, 'counter': {'custom_fields': [{'name': 'name_value', 'value': 'value_value'}], 'field': 'field_value', 'metric': 'metric_value'}, 'data_access': {'log_mode': 'log_mode_value'}}], 'not_ins': ['not_ins_value1', 'not_ins_value2'], 'permissions': ['permissions_value1', 'permissions_value2']}], 'version': 774}} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Policy( + etag='etag_value', + iam_owned=True, + version=774, + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Policy.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.set_iam_policy(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.Policy) + assert response.etag == 'etag_value' + assert response.iam_owned is True + assert response.version == 774 + + +def test_set_iam_policy_rest_required_fields(request_type=compute.SetIamPolicyBackendServiceRequest): + transport_class = transports.BackendServicesRestTransport + + request_init = {} + request_init["project"] = "" + request_init["resource"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).set_iam_policy._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + jsonified_request["resource"] = 'resource_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).set_iam_policy._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "resource" in jsonified_request + assert jsonified_request["resource"] == 'resource_value' + + client = BackendServicesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Policy() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Policy.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.set_iam_policy(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_set_iam_policy_rest_unset_required_fields(): + transport = transports.BackendServicesRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.set_iam_policy._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("globalSetPolicyRequestResource", "project", "resource", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_set_iam_policy_rest_interceptors(null_interceptor): + transport = transports.BackendServicesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.BackendServicesRestInterceptor(), + ) + client = BackendServicesClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.BackendServicesRestInterceptor, "post_set_iam_policy") as post, \ + mock.patch.object(transports.BackendServicesRestInterceptor, "pre_set_iam_policy") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.SetIamPolicyBackendServiceRequest.pb(compute.SetIamPolicyBackendServiceRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Policy.to_json(compute.Policy()) + + request = compute.SetIamPolicyBackendServiceRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Policy() + + client.set_iam_policy(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_set_iam_policy_rest_bad_request(transport: str = 'rest', request_type=compute.SetIamPolicyBackendServiceRequest): + client = BackendServicesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'resource': 'sample2'} + request_init["global_set_policy_request_resource"] = {'bindings': [{'binding_id': 'binding_id_value', 'condition': {'description': 'description_value', 'expression': 'expression_value', 'location': 'location_value', 'title': 'title_value'}, 'members': ['members_value1', 'members_value2'], 'role': 'role_value'}], 'etag': 'etag_value', 'policy': {'audit_configs': [{'audit_log_configs': [{'exempted_members': ['exempted_members_value1', 'exempted_members_value2'], 'ignore_child_exemptions': True, 'log_type': 'log_type_value'}], 'exempted_members': ['exempted_members_value1', 'exempted_members_value2'], 'service': 'service_value'}], 'bindings': {}, 'etag': 'etag_value', 'iam_owned': True, 'rules': [{'action': 'action_value', 'conditions': [{'iam': 'iam_value', 'op': 'op_value', 'svc': 'svc_value', 'sys': 'sys_value', 'values': ['values_value1', 'values_value2']}], 'description': 'description_value', 'ins': ['ins_value1', 'ins_value2'], 'log_configs': [{'cloud_audit': {'authorization_logging_options': {'permission_type': 'permission_type_value'}, 'log_name': 'log_name_value'}, 'counter': {'custom_fields': [{'name': 'name_value', 'value': 'value_value'}], 'field': 'field_value', 'metric': 'metric_value'}, 'data_access': {'log_mode': 'log_mode_value'}}], 'not_ins': ['not_ins_value1', 'not_ins_value2'], 'permissions': ['permissions_value1', 'permissions_value2']}], 'version': 774}} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.set_iam_policy(request) + + +def test_set_iam_policy_rest_flattened(): + client = BackendServicesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Policy() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'resource': 'sample2'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + resource='resource_value', + global_set_policy_request_resource=compute.GlobalSetPolicyRequest(bindings=[compute.Binding(binding_id='binding_id_value')]), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Policy.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.set_iam_policy(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/global/backendServices/{resource}/setIamPolicy" % client.transport._host, args[1]) + + +def test_set_iam_policy_rest_flattened_error(transport: str = 'rest'): + client = BackendServicesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.set_iam_policy( + compute.SetIamPolicyBackendServiceRequest(), + project='project_value', + resource='resource_value', + global_set_policy_request_resource=compute.GlobalSetPolicyRequest(bindings=[compute.Binding(binding_id='binding_id_value')]), + ) + + +def test_set_iam_policy_rest_error(): + client = BackendServicesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.SetSecurityPolicyBackendServiceRequest, + dict, +]) +def test_set_security_policy_rest(request_type): + client = BackendServicesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'backend_service': 'sample2'} + request_init["security_policy_reference_resource"] = {'security_policy': 'security_policy_value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.set_security_policy(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, extended_operation.ExtendedOperation) + assert response.client_operation_id == 'client_operation_id_value' + assert response.creation_timestamp == 'creation_timestamp_value' + assert response.description == 'description_value' + assert response.end_time == 'end_time_value' + assert response.http_error_message == 'http_error_message_value' + assert response.http_error_status_code == 2374 + assert response.id == 205 + assert response.insert_time == 'insert_time_value' + assert response.kind == 'kind_value' + assert response.name == 'name_value' + assert response.operation_group_id == 'operation_group_id_value' + assert response.operation_type == 'operation_type_value' + assert response.progress == 885 + assert response.region == 'region_value' + assert response.self_link == 'self_link_value' + assert response.start_time == 'start_time_value' + assert response.status == compute.Operation.Status.DONE + assert response.status_message == 'status_message_value' + assert response.target_id == 947 + assert response.target_link == 'target_link_value' + assert response.user == 'user_value' + assert response.zone == 'zone_value' + + +def test_set_security_policy_rest_required_fields(request_type=compute.SetSecurityPolicyBackendServiceRequest): + transport_class = transports.BackendServicesRestTransport + + request_init = {} + request_init["backend_service"] = "" + request_init["project"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).set_security_policy._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["backendService"] = 'backend_service_value' + jsonified_request["project"] = 'project_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).set_security_policy._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "backendService" in jsonified_request + assert jsonified_request["backendService"] == 'backend_service_value' + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + + client = BackendServicesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.set_security_policy(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_set_security_policy_rest_unset_required_fields(): + transport = transports.BackendServicesRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.set_security_policy._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("backendService", "project", "securityPolicyReferenceResource", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_set_security_policy_rest_interceptors(null_interceptor): + transport = transports.BackendServicesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.BackendServicesRestInterceptor(), + ) + client = BackendServicesClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.BackendServicesRestInterceptor, "post_set_security_policy") as post, \ + mock.patch.object(transports.BackendServicesRestInterceptor, "pre_set_security_policy") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.SetSecurityPolicyBackendServiceRequest.pb(compute.SetSecurityPolicyBackendServiceRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.SetSecurityPolicyBackendServiceRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.set_security_policy(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_set_security_policy_rest_bad_request(transport: str = 'rest', request_type=compute.SetSecurityPolicyBackendServiceRequest): + client = BackendServicesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'backend_service': 'sample2'} + request_init["security_policy_reference_resource"] = {'security_policy': 'security_policy_value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.set_security_policy(request) + + +def test_set_security_policy_rest_flattened(): + client = BackendServicesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'backend_service': 'sample2'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + backend_service='backend_service_value', + security_policy_reference_resource=compute.SecurityPolicyReference(security_policy='security_policy_value'), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.set_security_policy(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/global/backendServices/{backend_service}/setSecurityPolicy" % client.transport._host, args[1]) + + +def test_set_security_policy_rest_flattened_error(transport: str = 'rest'): + client = BackendServicesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.set_security_policy( + compute.SetSecurityPolicyBackendServiceRequest(), + project='project_value', + backend_service='backend_service_value', + security_policy_reference_resource=compute.SecurityPolicyReference(security_policy='security_policy_value'), + ) + + +def test_set_security_policy_rest_error(): + client = BackendServicesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.SetSecurityPolicyBackendServiceRequest, + dict, +]) +def test_set_security_policy_unary_rest(request_type): + client = BackendServicesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'backend_service': 'sample2'} + request_init["security_policy_reference_resource"] = {'security_policy': 'security_policy_value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.set_security_policy_unary(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.Operation) + + +def test_set_security_policy_unary_rest_required_fields(request_type=compute.SetSecurityPolicyBackendServiceRequest): + transport_class = transports.BackendServicesRestTransport + + request_init = {} + request_init["backend_service"] = "" + request_init["project"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).set_security_policy._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["backendService"] = 'backend_service_value' + jsonified_request["project"] = 'project_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).set_security_policy._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "backendService" in jsonified_request + assert jsonified_request["backendService"] == 'backend_service_value' + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + + client = BackendServicesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.set_security_policy_unary(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_set_security_policy_unary_rest_unset_required_fields(): + transport = transports.BackendServicesRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.set_security_policy._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("backendService", "project", "securityPolicyReferenceResource", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_set_security_policy_unary_rest_interceptors(null_interceptor): + transport = transports.BackendServicesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.BackendServicesRestInterceptor(), + ) + client = BackendServicesClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.BackendServicesRestInterceptor, "post_set_security_policy") as post, \ + mock.patch.object(transports.BackendServicesRestInterceptor, "pre_set_security_policy") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.SetSecurityPolicyBackendServiceRequest.pb(compute.SetSecurityPolicyBackendServiceRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.SetSecurityPolicyBackendServiceRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.set_security_policy_unary(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_set_security_policy_unary_rest_bad_request(transport: str = 'rest', request_type=compute.SetSecurityPolicyBackendServiceRequest): + client = BackendServicesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'backend_service': 'sample2'} + request_init["security_policy_reference_resource"] = {'security_policy': 'security_policy_value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.set_security_policy_unary(request) + + +def test_set_security_policy_unary_rest_flattened(): + client = BackendServicesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'backend_service': 'sample2'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + backend_service='backend_service_value', + security_policy_reference_resource=compute.SecurityPolicyReference(security_policy='security_policy_value'), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.set_security_policy_unary(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/global/backendServices/{backend_service}/setSecurityPolicy" % client.transport._host, args[1]) + + +def test_set_security_policy_unary_rest_flattened_error(transport: str = 'rest'): + client = BackendServicesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.set_security_policy_unary( + compute.SetSecurityPolicyBackendServiceRequest(), + project='project_value', + backend_service='backend_service_value', + security_policy_reference_resource=compute.SecurityPolicyReference(security_policy='security_policy_value'), + ) + + +def test_set_security_policy_unary_rest_error(): + client = BackendServicesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.UpdateBackendServiceRequest, + dict, +]) +def test_update_rest(request_type): + client = BackendServicesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'backend_service': 'sample2'} + request_init["backend_service_resource"] = {'affinity_cookie_ttl_sec': 2432, 'backends': [{'balancing_mode': 'balancing_mode_value', 'capacity_scaler': 0.1575, 'description': 'description_value', 'failover': True, 'group': 'group_value', 'max_connections': 1608, 'max_connections_per_endpoint': 2990, 'max_connections_per_instance': 2978, 'max_rate': 849, 'max_rate_per_endpoint': 0.22310000000000002, 'max_rate_per_instance': 0.22190000000000001, 'max_utilization': 0.1633}], 'cdn_policy': {'bypass_cache_on_request_headers': [{'header_name': 'header_name_value'}], 'cache_key_policy': {'include_host': True, 'include_http_headers': ['include_http_headers_value1', 'include_http_headers_value2'], 'include_named_cookies': ['include_named_cookies_value1', 'include_named_cookies_value2'], 'include_protocol': True, 'include_query_string': True, 'query_string_blacklist': ['query_string_blacklist_value1', 'query_string_blacklist_value2'], 'query_string_whitelist': ['query_string_whitelist_value1', 'query_string_whitelist_value2']}, 'cache_mode': 'cache_mode_value', 'client_ttl': 1074, 'default_ttl': 1176, 'max_ttl': 761, 'negative_caching': True, 'negative_caching_policy': [{'code': 411, 'ttl': 340}], 'request_coalescing': True, 'serve_while_stale': 1813, 'signed_url_cache_max_age_sec': 2890, 'signed_url_key_names': ['signed_url_key_names_value1', 'signed_url_key_names_value2']}, 'circuit_breakers': {'max_connections': 1608, 'max_pending_requests': 2149, 'max_requests': 1313, 'max_requests_per_connection': 2902, 'max_retries': 1187}, 'compression_mode': 'compression_mode_value', 'connection_draining': {'draining_timeout_sec': 2124}, 'connection_tracking_policy': {'connection_persistence_on_unhealthy_backends': 'connection_persistence_on_unhealthy_backends_value', 'enable_strong_affinity': True, 'idle_timeout_sec': 1694, 'tracking_mode': 'tracking_mode_value'}, 'consistent_hash': {'http_cookie': {'name': 'name_value', 'path': 'path_value', 'ttl': {'nanos': 543, 'seconds': 751}}, 'http_header_name': 'http_header_name_value', 'minimum_ring_size': 1829}, 'creation_timestamp': 'creation_timestamp_value', 'custom_request_headers': ['custom_request_headers_value1', 'custom_request_headers_value2'], 'custom_response_headers': ['custom_response_headers_value1', 'custom_response_headers_value2'], 'description': 'description_value', 'edge_security_policy': 'edge_security_policy_value', 'enable_c_d_n': True, 'failover_policy': {'disable_connection_drain_on_failover': True, 'drop_traffic_if_unhealthy': True, 'failover_ratio': 0.1494}, 'fingerprint': 'fingerprint_value', 'health_checks': ['health_checks_value1', 'health_checks_value2'], 'iap': {'enabled': True, 'oauth2_client_id': 'oauth2_client_id_value', 'oauth2_client_secret': 'oauth2_client_secret_value', 'oauth2_client_secret_sha256': 'oauth2_client_secret_sha256_value'}, 'id': 205, 'kind': 'kind_value', 'load_balancing_scheme': 'load_balancing_scheme_value', 'locality_lb_policies': [{'custom_policy': {'data': 'data_value', 'name': 'name_value'}, 'policy': {'name': 'name_value'}}], 'locality_lb_policy': 'locality_lb_policy_value', 'log_config': {'enable': True, 'optional_fields': ['optional_fields_value1', 'optional_fields_value2'], 'optional_mode': 'optional_mode_value', 'sample_rate': 0.1165}, 'max_stream_duration': {}, 'metadatas': {}, 'name': 'name_value', 'network': 'network_value', 'outlier_detection': {'base_ejection_time': {}, 'consecutive_errors': 1956, 'consecutive_gateway_failure': 2880, 'enforcing_consecutive_errors': 3006, 'enforcing_consecutive_gateway_failure': 3930, 'enforcing_success_rate': 2334, 'interval': {}, 'max_ejection_percent': 2118, 'success_rate_minimum_hosts': 2799, 'success_rate_request_volume': 2915, 'success_rate_stdev_factor': 2663}, 'port': 453, 'port_name': 'port_name_value', 'protocol': 'protocol_value', 'region': 'region_value', 'security_policy': 'security_policy_value', 'security_settings': {'client_tls_policy': 'client_tls_policy_value', 'subject_alt_names': ['subject_alt_names_value1', 'subject_alt_names_value2']}, 'self_link': 'self_link_value', 'service_bindings': ['service_bindings_value1', 'service_bindings_value2'], 'session_affinity': 'session_affinity_value', 'subsetting': {'policy': 'policy_value'}, 'timeout_sec': 1185} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.update(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, extended_operation.ExtendedOperation) + assert response.client_operation_id == 'client_operation_id_value' + assert response.creation_timestamp == 'creation_timestamp_value' + assert response.description == 'description_value' + assert response.end_time == 'end_time_value' + assert response.http_error_message == 'http_error_message_value' + assert response.http_error_status_code == 2374 + assert response.id == 205 + assert response.insert_time == 'insert_time_value' + assert response.kind == 'kind_value' + assert response.name == 'name_value' + assert response.operation_group_id == 'operation_group_id_value' + assert response.operation_type == 'operation_type_value' + assert response.progress == 885 + assert response.region == 'region_value' + assert response.self_link == 'self_link_value' + assert response.start_time == 'start_time_value' + assert response.status == compute.Operation.Status.DONE + assert response.status_message == 'status_message_value' + assert response.target_id == 947 + assert response.target_link == 'target_link_value' + assert response.user == 'user_value' + assert response.zone == 'zone_value' + + +def test_update_rest_required_fields(request_type=compute.UpdateBackendServiceRequest): + transport_class = transports.BackendServicesRestTransport + + request_init = {} + request_init["backend_service"] = "" + request_init["project"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).update._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["backendService"] = 'backend_service_value' + jsonified_request["project"] = 'project_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).update._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "backendService" in jsonified_request + assert jsonified_request["backendService"] == 'backend_service_value' + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + + client = BackendServicesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "put", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.update(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_update_rest_unset_required_fields(): + transport = transports.BackendServicesRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.update._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("backendService", "backendServiceResource", "project", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_update_rest_interceptors(null_interceptor): + transport = transports.BackendServicesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.BackendServicesRestInterceptor(), + ) + client = BackendServicesClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.BackendServicesRestInterceptor, "post_update") as post, \ + mock.patch.object(transports.BackendServicesRestInterceptor, "pre_update") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.UpdateBackendServiceRequest.pb(compute.UpdateBackendServiceRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.UpdateBackendServiceRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.update(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_update_rest_bad_request(transport: str = 'rest', request_type=compute.UpdateBackendServiceRequest): + client = BackendServicesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'backend_service': 'sample2'} + request_init["backend_service_resource"] = {'affinity_cookie_ttl_sec': 2432, 'backends': [{'balancing_mode': 'balancing_mode_value', 'capacity_scaler': 0.1575, 'description': 'description_value', 'failover': True, 'group': 'group_value', 'max_connections': 1608, 'max_connections_per_endpoint': 2990, 'max_connections_per_instance': 2978, 'max_rate': 849, 'max_rate_per_endpoint': 0.22310000000000002, 'max_rate_per_instance': 0.22190000000000001, 'max_utilization': 0.1633}], 'cdn_policy': {'bypass_cache_on_request_headers': [{'header_name': 'header_name_value'}], 'cache_key_policy': {'include_host': True, 'include_http_headers': ['include_http_headers_value1', 'include_http_headers_value2'], 'include_named_cookies': ['include_named_cookies_value1', 'include_named_cookies_value2'], 'include_protocol': True, 'include_query_string': True, 'query_string_blacklist': ['query_string_blacklist_value1', 'query_string_blacklist_value2'], 'query_string_whitelist': ['query_string_whitelist_value1', 'query_string_whitelist_value2']}, 'cache_mode': 'cache_mode_value', 'client_ttl': 1074, 'default_ttl': 1176, 'max_ttl': 761, 'negative_caching': True, 'negative_caching_policy': [{'code': 411, 'ttl': 340}], 'request_coalescing': True, 'serve_while_stale': 1813, 'signed_url_cache_max_age_sec': 2890, 'signed_url_key_names': ['signed_url_key_names_value1', 'signed_url_key_names_value2']}, 'circuit_breakers': {'max_connections': 1608, 'max_pending_requests': 2149, 'max_requests': 1313, 'max_requests_per_connection': 2902, 'max_retries': 1187}, 'compression_mode': 'compression_mode_value', 'connection_draining': {'draining_timeout_sec': 2124}, 'connection_tracking_policy': {'connection_persistence_on_unhealthy_backends': 'connection_persistence_on_unhealthy_backends_value', 'enable_strong_affinity': True, 'idle_timeout_sec': 1694, 'tracking_mode': 'tracking_mode_value'}, 'consistent_hash': {'http_cookie': {'name': 'name_value', 'path': 'path_value', 'ttl': {'nanos': 543, 'seconds': 751}}, 'http_header_name': 'http_header_name_value', 'minimum_ring_size': 1829}, 'creation_timestamp': 'creation_timestamp_value', 'custom_request_headers': ['custom_request_headers_value1', 'custom_request_headers_value2'], 'custom_response_headers': ['custom_response_headers_value1', 'custom_response_headers_value2'], 'description': 'description_value', 'edge_security_policy': 'edge_security_policy_value', 'enable_c_d_n': True, 'failover_policy': {'disable_connection_drain_on_failover': True, 'drop_traffic_if_unhealthy': True, 'failover_ratio': 0.1494}, 'fingerprint': 'fingerprint_value', 'health_checks': ['health_checks_value1', 'health_checks_value2'], 'iap': {'enabled': True, 'oauth2_client_id': 'oauth2_client_id_value', 'oauth2_client_secret': 'oauth2_client_secret_value', 'oauth2_client_secret_sha256': 'oauth2_client_secret_sha256_value'}, 'id': 205, 'kind': 'kind_value', 'load_balancing_scheme': 'load_balancing_scheme_value', 'locality_lb_policies': [{'custom_policy': {'data': 'data_value', 'name': 'name_value'}, 'policy': {'name': 'name_value'}}], 'locality_lb_policy': 'locality_lb_policy_value', 'log_config': {'enable': True, 'optional_fields': ['optional_fields_value1', 'optional_fields_value2'], 'optional_mode': 'optional_mode_value', 'sample_rate': 0.1165}, 'max_stream_duration': {}, 'metadatas': {}, 'name': 'name_value', 'network': 'network_value', 'outlier_detection': {'base_ejection_time': {}, 'consecutive_errors': 1956, 'consecutive_gateway_failure': 2880, 'enforcing_consecutive_errors': 3006, 'enforcing_consecutive_gateway_failure': 3930, 'enforcing_success_rate': 2334, 'interval': {}, 'max_ejection_percent': 2118, 'success_rate_minimum_hosts': 2799, 'success_rate_request_volume': 2915, 'success_rate_stdev_factor': 2663}, 'port': 453, 'port_name': 'port_name_value', 'protocol': 'protocol_value', 'region': 'region_value', 'security_policy': 'security_policy_value', 'security_settings': {'client_tls_policy': 'client_tls_policy_value', 'subject_alt_names': ['subject_alt_names_value1', 'subject_alt_names_value2']}, 'self_link': 'self_link_value', 'service_bindings': ['service_bindings_value1', 'service_bindings_value2'], 'session_affinity': 'session_affinity_value', 'subsetting': {'policy': 'policy_value'}, 'timeout_sec': 1185} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.update(request) + + +def test_update_rest_flattened(): + client = BackendServicesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'backend_service': 'sample2'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + backend_service='backend_service_value', + backend_service_resource=compute.BackendService(affinity_cookie_ttl_sec=2432), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.update(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/global/backendServices/{backend_service}" % client.transport._host, args[1]) + + +def test_update_rest_flattened_error(transport: str = 'rest'): + client = BackendServicesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update( + compute.UpdateBackendServiceRequest(), + project='project_value', + backend_service='backend_service_value', + backend_service_resource=compute.BackendService(affinity_cookie_ttl_sec=2432), + ) + + +def test_update_rest_error(): + client = BackendServicesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.UpdateBackendServiceRequest, + dict, +]) +def test_update_unary_rest(request_type): + client = BackendServicesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'backend_service': 'sample2'} + request_init["backend_service_resource"] = {'affinity_cookie_ttl_sec': 2432, 'backends': [{'balancing_mode': 'balancing_mode_value', 'capacity_scaler': 0.1575, 'description': 'description_value', 'failover': True, 'group': 'group_value', 'max_connections': 1608, 'max_connections_per_endpoint': 2990, 'max_connections_per_instance': 2978, 'max_rate': 849, 'max_rate_per_endpoint': 0.22310000000000002, 'max_rate_per_instance': 0.22190000000000001, 'max_utilization': 0.1633}], 'cdn_policy': {'bypass_cache_on_request_headers': [{'header_name': 'header_name_value'}], 'cache_key_policy': {'include_host': True, 'include_http_headers': ['include_http_headers_value1', 'include_http_headers_value2'], 'include_named_cookies': ['include_named_cookies_value1', 'include_named_cookies_value2'], 'include_protocol': True, 'include_query_string': True, 'query_string_blacklist': ['query_string_blacklist_value1', 'query_string_blacklist_value2'], 'query_string_whitelist': ['query_string_whitelist_value1', 'query_string_whitelist_value2']}, 'cache_mode': 'cache_mode_value', 'client_ttl': 1074, 'default_ttl': 1176, 'max_ttl': 761, 'negative_caching': True, 'negative_caching_policy': [{'code': 411, 'ttl': 340}], 'request_coalescing': True, 'serve_while_stale': 1813, 'signed_url_cache_max_age_sec': 2890, 'signed_url_key_names': ['signed_url_key_names_value1', 'signed_url_key_names_value2']}, 'circuit_breakers': {'max_connections': 1608, 'max_pending_requests': 2149, 'max_requests': 1313, 'max_requests_per_connection': 2902, 'max_retries': 1187}, 'compression_mode': 'compression_mode_value', 'connection_draining': {'draining_timeout_sec': 2124}, 'connection_tracking_policy': {'connection_persistence_on_unhealthy_backends': 'connection_persistence_on_unhealthy_backends_value', 'enable_strong_affinity': True, 'idle_timeout_sec': 1694, 'tracking_mode': 'tracking_mode_value'}, 'consistent_hash': {'http_cookie': {'name': 'name_value', 'path': 'path_value', 'ttl': {'nanos': 543, 'seconds': 751}}, 'http_header_name': 'http_header_name_value', 'minimum_ring_size': 1829}, 'creation_timestamp': 'creation_timestamp_value', 'custom_request_headers': ['custom_request_headers_value1', 'custom_request_headers_value2'], 'custom_response_headers': ['custom_response_headers_value1', 'custom_response_headers_value2'], 'description': 'description_value', 'edge_security_policy': 'edge_security_policy_value', 'enable_c_d_n': True, 'failover_policy': {'disable_connection_drain_on_failover': True, 'drop_traffic_if_unhealthy': True, 'failover_ratio': 0.1494}, 'fingerprint': 'fingerprint_value', 'health_checks': ['health_checks_value1', 'health_checks_value2'], 'iap': {'enabled': True, 'oauth2_client_id': 'oauth2_client_id_value', 'oauth2_client_secret': 'oauth2_client_secret_value', 'oauth2_client_secret_sha256': 'oauth2_client_secret_sha256_value'}, 'id': 205, 'kind': 'kind_value', 'load_balancing_scheme': 'load_balancing_scheme_value', 'locality_lb_policies': [{'custom_policy': {'data': 'data_value', 'name': 'name_value'}, 'policy': {'name': 'name_value'}}], 'locality_lb_policy': 'locality_lb_policy_value', 'log_config': {'enable': True, 'optional_fields': ['optional_fields_value1', 'optional_fields_value2'], 'optional_mode': 'optional_mode_value', 'sample_rate': 0.1165}, 'max_stream_duration': {}, 'metadatas': {}, 'name': 'name_value', 'network': 'network_value', 'outlier_detection': {'base_ejection_time': {}, 'consecutive_errors': 1956, 'consecutive_gateway_failure': 2880, 'enforcing_consecutive_errors': 3006, 'enforcing_consecutive_gateway_failure': 3930, 'enforcing_success_rate': 2334, 'interval': {}, 'max_ejection_percent': 2118, 'success_rate_minimum_hosts': 2799, 'success_rate_request_volume': 2915, 'success_rate_stdev_factor': 2663}, 'port': 453, 'port_name': 'port_name_value', 'protocol': 'protocol_value', 'region': 'region_value', 'security_policy': 'security_policy_value', 'security_settings': {'client_tls_policy': 'client_tls_policy_value', 'subject_alt_names': ['subject_alt_names_value1', 'subject_alt_names_value2']}, 'self_link': 'self_link_value', 'service_bindings': ['service_bindings_value1', 'service_bindings_value2'], 'session_affinity': 'session_affinity_value', 'subsetting': {'policy': 'policy_value'}, 'timeout_sec': 1185} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.update_unary(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.Operation) + + +def test_update_unary_rest_required_fields(request_type=compute.UpdateBackendServiceRequest): + transport_class = transports.BackendServicesRestTransport + + request_init = {} + request_init["backend_service"] = "" + request_init["project"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).update._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["backendService"] = 'backend_service_value' + jsonified_request["project"] = 'project_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).update._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "backendService" in jsonified_request + assert jsonified_request["backendService"] == 'backend_service_value' + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + + client = BackendServicesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "put", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.update_unary(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_update_unary_rest_unset_required_fields(): + transport = transports.BackendServicesRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.update._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("backendService", "backendServiceResource", "project", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_update_unary_rest_interceptors(null_interceptor): + transport = transports.BackendServicesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.BackendServicesRestInterceptor(), + ) + client = BackendServicesClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.BackendServicesRestInterceptor, "post_update") as post, \ + mock.patch.object(transports.BackendServicesRestInterceptor, "pre_update") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.UpdateBackendServiceRequest.pb(compute.UpdateBackendServiceRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.UpdateBackendServiceRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.update_unary(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_update_unary_rest_bad_request(transport: str = 'rest', request_type=compute.UpdateBackendServiceRequest): + client = BackendServicesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'backend_service': 'sample2'} + request_init["backend_service_resource"] = {'affinity_cookie_ttl_sec': 2432, 'backends': [{'balancing_mode': 'balancing_mode_value', 'capacity_scaler': 0.1575, 'description': 'description_value', 'failover': True, 'group': 'group_value', 'max_connections': 1608, 'max_connections_per_endpoint': 2990, 'max_connections_per_instance': 2978, 'max_rate': 849, 'max_rate_per_endpoint': 0.22310000000000002, 'max_rate_per_instance': 0.22190000000000001, 'max_utilization': 0.1633}], 'cdn_policy': {'bypass_cache_on_request_headers': [{'header_name': 'header_name_value'}], 'cache_key_policy': {'include_host': True, 'include_http_headers': ['include_http_headers_value1', 'include_http_headers_value2'], 'include_named_cookies': ['include_named_cookies_value1', 'include_named_cookies_value2'], 'include_protocol': True, 'include_query_string': True, 'query_string_blacklist': ['query_string_blacklist_value1', 'query_string_blacklist_value2'], 'query_string_whitelist': ['query_string_whitelist_value1', 'query_string_whitelist_value2']}, 'cache_mode': 'cache_mode_value', 'client_ttl': 1074, 'default_ttl': 1176, 'max_ttl': 761, 'negative_caching': True, 'negative_caching_policy': [{'code': 411, 'ttl': 340}], 'request_coalescing': True, 'serve_while_stale': 1813, 'signed_url_cache_max_age_sec': 2890, 'signed_url_key_names': ['signed_url_key_names_value1', 'signed_url_key_names_value2']}, 'circuit_breakers': {'max_connections': 1608, 'max_pending_requests': 2149, 'max_requests': 1313, 'max_requests_per_connection': 2902, 'max_retries': 1187}, 'compression_mode': 'compression_mode_value', 'connection_draining': {'draining_timeout_sec': 2124}, 'connection_tracking_policy': {'connection_persistence_on_unhealthy_backends': 'connection_persistence_on_unhealthy_backends_value', 'enable_strong_affinity': True, 'idle_timeout_sec': 1694, 'tracking_mode': 'tracking_mode_value'}, 'consistent_hash': {'http_cookie': {'name': 'name_value', 'path': 'path_value', 'ttl': {'nanos': 543, 'seconds': 751}}, 'http_header_name': 'http_header_name_value', 'minimum_ring_size': 1829}, 'creation_timestamp': 'creation_timestamp_value', 'custom_request_headers': ['custom_request_headers_value1', 'custom_request_headers_value2'], 'custom_response_headers': ['custom_response_headers_value1', 'custom_response_headers_value2'], 'description': 'description_value', 'edge_security_policy': 'edge_security_policy_value', 'enable_c_d_n': True, 'failover_policy': {'disable_connection_drain_on_failover': True, 'drop_traffic_if_unhealthy': True, 'failover_ratio': 0.1494}, 'fingerprint': 'fingerprint_value', 'health_checks': ['health_checks_value1', 'health_checks_value2'], 'iap': {'enabled': True, 'oauth2_client_id': 'oauth2_client_id_value', 'oauth2_client_secret': 'oauth2_client_secret_value', 'oauth2_client_secret_sha256': 'oauth2_client_secret_sha256_value'}, 'id': 205, 'kind': 'kind_value', 'load_balancing_scheme': 'load_balancing_scheme_value', 'locality_lb_policies': [{'custom_policy': {'data': 'data_value', 'name': 'name_value'}, 'policy': {'name': 'name_value'}}], 'locality_lb_policy': 'locality_lb_policy_value', 'log_config': {'enable': True, 'optional_fields': ['optional_fields_value1', 'optional_fields_value2'], 'optional_mode': 'optional_mode_value', 'sample_rate': 0.1165}, 'max_stream_duration': {}, 'metadatas': {}, 'name': 'name_value', 'network': 'network_value', 'outlier_detection': {'base_ejection_time': {}, 'consecutive_errors': 1956, 'consecutive_gateway_failure': 2880, 'enforcing_consecutive_errors': 3006, 'enforcing_consecutive_gateway_failure': 3930, 'enforcing_success_rate': 2334, 'interval': {}, 'max_ejection_percent': 2118, 'success_rate_minimum_hosts': 2799, 'success_rate_request_volume': 2915, 'success_rate_stdev_factor': 2663}, 'port': 453, 'port_name': 'port_name_value', 'protocol': 'protocol_value', 'region': 'region_value', 'security_policy': 'security_policy_value', 'security_settings': {'client_tls_policy': 'client_tls_policy_value', 'subject_alt_names': ['subject_alt_names_value1', 'subject_alt_names_value2']}, 'self_link': 'self_link_value', 'service_bindings': ['service_bindings_value1', 'service_bindings_value2'], 'session_affinity': 'session_affinity_value', 'subsetting': {'policy': 'policy_value'}, 'timeout_sec': 1185} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.update_unary(request) + + +def test_update_unary_rest_flattened(): + client = BackendServicesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'backend_service': 'sample2'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + backend_service='backend_service_value', + backend_service_resource=compute.BackendService(affinity_cookie_ttl_sec=2432), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.update_unary(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/global/backendServices/{backend_service}" % client.transport._host, args[1]) + + +def test_update_unary_rest_flattened_error(transport: str = 'rest'): + client = BackendServicesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_unary( + compute.UpdateBackendServiceRequest(), + project='project_value', + backend_service='backend_service_value', + backend_service_resource=compute.BackendService(affinity_cookie_ttl_sec=2432), + ) + + +def test_update_unary_rest_error(): + client = BackendServicesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +def test_credentials_transport_error(): + # It is an error to provide credentials and a transport instance. + transport = transports.BackendServicesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = BackendServicesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # It is an error to provide a credentials file and a transport instance. + transport = transports.BackendServicesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = BackendServicesClient( + client_options={"credentials_file": "credentials.json"}, + transport=transport, + ) + + # It is an error to provide an api_key and a transport instance. + transport = transports.BackendServicesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = BackendServicesClient( + client_options=options, + transport=transport, + ) + + # It is an error to provide an api_key and a credential. + options = mock.Mock() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = BackendServicesClient( + client_options=options, + credentials=ga_credentials.AnonymousCredentials() + ) + + # It is an error to provide scopes and a transport instance. + transport = transports.BackendServicesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = BackendServicesClient( + client_options={"scopes": ["1", "2"]}, + transport=transport, + ) + + +def test_transport_instance(): + # A client may be instantiated with a custom transport instance. + transport = transports.BackendServicesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + client = BackendServicesClient(transport=transport) + assert client.transport is transport + + +@pytest.mark.parametrize("transport_class", [ + transports.BackendServicesRestTransport, +]) +def test_transport_adc(transport_class): + # Test default credentials are used if not provided. + with mock.patch.object(google.auth, 'default') as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class() + adc.assert_called_once() + +@pytest.mark.parametrize("transport_name", [ + "rest", +]) +def test_transport_kind(transport_name): + transport = BackendServicesClient.get_transport_class(transport_name)( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert transport.kind == transport_name + + +def test_backend_services_base_transport_error(): + # Passing both a credentials object and credentials_file should raise an error + with pytest.raises(core_exceptions.DuplicateCredentialArgs): + transport = transports.BackendServicesTransport( + credentials=ga_credentials.AnonymousCredentials(), + credentials_file="credentials.json" + ) + + +def test_backend_services_base_transport(): + # Instantiate the base transport. + with mock.patch('google.cloud.compute_v1.services.backend_services.transports.BackendServicesTransport.__init__') as Transport: + Transport.return_value = None + transport = transports.BackendServicesTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Every method on the transport should just blindly + # raise NotImplementedError. + methods = ( + 'add_signed_url_key', + 'aggregated_list', + 'delete', + 'delete_signed_url_key', + 'get', + 'get_health', + 'get_iam_policy', + 'insert', + 'list', + 'patch', + 'set_edge_security_policy', + 'set_iam_policy', + 'set_security_policy', + 'update', + ) + for method in methods: + with pytest.raises(NotImplementedError): + getattr(transport, method)(request=object()) + + with pytest.raises(NotImplementedError): + transport.close() + + # Catch all for all remaining methods and properties + remainder = [ + 'kind', + ] + for r in remainder: + with pytest.raises(NotImplementedError): + getattr(transport, r)() + + +def test_backend_services_base_transport_with_credentials_file(): + # Instantiate the base transport with a credentials file + with mock.patch.object(google.auth, 'load_credentials_from_file', autospec=True) as load_creds, mock.patch('google.cloud.compute_v1.services.backend_services.transports.BackendServicesTransport._prep_wrapped_messages') as Transport: + Transport.return_value = None + load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.BackendServicesTransport( + credentials_file="credentials.json", + quota_project_id="octopus", + ) + load_creds.assert_called_once_with("credentials.json", + scopes=None, + default_scopes=( + 'https://www.googleapis.com/auth/compute', + 'https://www.googleapis.com/auth/cloud-platform', +), + quota_project_id="octopus", + ) + + +def test_backend_services_base_transport_with_adc(): + # Test the default credentials are used if credentials and credentials_file are None. + with mock.patch.object(google.auth, 'default', autospec=True) as adc, mock.patch('google.cloud.compute_v1.services.backend_services.transports.BackendServicesTransport._prep_wrapped_messages') as Transport: + Transport.return_value = None + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.BackendServicesTransport() + adc.assert_called_once() + + +def test_backend_services_auth_adc(): + # If no credentials are provided, we should use ADC credentials. + with mock.patch.object(google.auth, 'default', autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + BackendServicesClient() + adc.assert_called_once_with( + scopes=None, + default_scopes=( + 'https://www.googleapis.com/auth/compute', + 'https://www.googleapis.com/auth/cloud-platform', +), + quota_project_id=None, + ) + + +def test_backend_services_http_transport_client_cert_source_for_mtls(): + cred = ga_credentials.AnonymousCredentials() + with mock.patch("google.auth.transport.requests.AuthorizedSession.configure_mtls_channel") as mock_configure_mtls_channel: + transports.BackendServicesRestTransport ( + credentials=cred, + client_cert_source_for_mtls=client_cert_source_callback + ) + mock_configure_mtls_channel.assert_called_once_with(client_cert_source_callback) + + +@pytest.mark.parametrize("transport_name", [ + "rest", +]) +def test_backend_services_host_no_port(transport_name): + client = BackendServicesClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions(api_endpoint='compute.googleapis.com'), + transport=transport_name, + ) + assert client.transport._host == ( + 'compute.googleapis.com:443' + if transport_name in ['grpc', 'grpc_asyncio'] + else 'https://compute.googleapis.com' + ) + +@pytest.mark.parametrize("transport_name", [ + "rest", +]) +def test_backend_services_host_with_port(transport_name): + client = BackendServicesClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions(api_endpoint='compute.googleapis.com:8000'), + transport=transport_name, + ) + assert client.transport._host == ( + 'compute.googleapis.com:8000' + if transport_name in ['grpc', 'grpc_asyncio'] + else 'https://compute.googleapis.com:8000' + ) + +@pytest.mark.parametrize("transport_name", [ + "rest", +]) +def test_backend_services_client_transport_session_collision(transport_name): + creds1 = ga_credentials.AnonymousCredentials() + creds2 = ga_credentials.AnonymousCredentials() + client1 = BackendServicesClient( + credentials=creds1, + transport=transport_name, + ) + client2 = BackendServicesClient( + credentials=creds2, + transport=transport_name, + ) + session1 = client1.transport.add_signed_url_key._session + session2 = client2.transport.add_signed_url_key._session + assert session1 != session2 + session1 = client1.transport.aggregated_list._session + session2 = client2.transport.aggregated_list._session + assert session1 != session2 + session1 = client1.transport.delete._session + session2 = client2.transport.delete._session + assert session1 != session2 + session1 = client1.transport.delete_signed_url_key._session + session2 = client2.transport.delete_signed_url_key._session + assert session1 != session2 + session1 = client1.transport.get._session + session2 = client2.transport.get._session + assert session1 != session2 + session1 = client1.transport.get_health._session + session2 = client2.transport.get_health._session + assert session1 != session2 + session1 = client1.transport.get_iam_policy._session + session2 = client2.transport.get_iam_policy._session + assert session1 != session2 + session1 = client1.transport.insert._session + session2 = client2.transport.insert._session + assert session1 != session2 + session1 = client1.transport.list._session + session2 = client2.transport.list._session + assert session1 != session2 + session1 = client1.transport.patch._session + session2 = client2.transport.patch._session + assert session1 != session2 + session1 = client1.transport.set_edge_security_policy._session + session2 = client2.transport.set_edge_security_policy._session + assert session1 != session2 + session1 = client1.transport.set_iam_policy._session + session2 = client2.transport.set_iam_policy._session + assert session1 != session2 + session1 = client1.transport.set_security_policy._session + session2 = client2.transport.set_security_policy._session + assert session1 != session2 + session1 = client1.transport.update._session + session2 = client2.transport.update._session + assert session1 != session2 + +def test_common_billing_account_path(): + billing_account = "squid" + expected = "billingAccounts/{billing_account}".format(billing_account=billing_account, ) + actual = BackendServicesClient.common_billing_account_path(billing_account) + assert expected == actual + + +def test_parse_common_billing_account_path(): + expected = { + "billing_account": "clam", + } + path = BackendServicesClient.common_billing_account_path(**expected) + + # Check that the path construction is reversible. + actual = BackendServicesClient.parse_common_billing_account_path(path) + assert expected == actual + +def test_common_folder_path(): + folder = "whelk" + expected = "folders/{folder}".format(folder=folder, ) + actual = BackendServicesClient.common_folder_path(folder) + assert expected == actual + + +def test_parse_common_folder_path(): + expected = { + "folder": "octopus", + } + path = BackendServicesClient.common_folder_path(**expected) + + # Check that the path construction is reversible. + actual = BackendServicesClient.parse_common_folder_path(path) + assert expected == actual + +def test_common_organization_path(): + organization = "oyster" + expected = "organizations/{organization}".format(organization=organization, ) + actual = BackendServicesClient.common_organization_path(organization) + assert expected == actual + + +def test_parse_common_organization_path(): + expected = { + "organization": "nudibranch", + } + path = BackendServicesClient.common_organization_path(**expected) + + # Check that the path construction is reversible. + actual = BackendServicesClient.parse_common_organization_path(path) + assert expected == actual + +def test_common_project_path(): + project = "cuttlefish" + expected = "projects/{project}".format(project=project, ) + actual = BackendServicesClient.common_project_path(project) + assert expected == actual + + +def test_parse_common_project_path(): + expected = { + "project": "mussel", + } + path = BackendServicesClient.common_project_path(**expected) + + # Check that the path construction is reversible. + actual = BackendServicesClient.parse_common_project_path(path) + assert expected == actual + +def test_common_location_path(): + project = "winkle" + location = "nautilus" + expected = "projects/{project}/locations/{location}".format(project=project, location=location, ) + actual = BackendServicesClient.common_location_path(project, location) + assert expected == actual + + +def test_parse_common_location_path(): + expected = { + "project": "scallop", + "location": "abalone", + } + path = BackendServicesClient.common_location_path(**expected) + + # Check that the path construction is reversible. + actual = BackendServicesClient.parse_common_location_path(path) + assert expected == actual + + +def test_client_with_default_client_info(): + client_info = gapic_v1.client_info.ClientInfo() + + with mock.patch.object(transports.BackendServicesTransport, '_prep_wrapped_messages') as prep: + client = BackendServicesClient( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + with mock.patch.object(transports.BackendServicesTransport, '_prep_wrapped_messages') as prep: + transport_class = BackendServicesClient.get_transport_class() + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + +def test_transport_close(): + transports = { + "rest": "_session", + } + + for transport, close_name in transports.items(): + client = BackendServicesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport + ) + with mock.patch.object(type(getattr(client.transport, close_name)), "close") as close: + with client: + close.assert_not_called() + close.assert_called_once() + +def test_client_ctx(): + transports = [ + 'rest', + ] + for transport in transports: + client = BackendServicesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport + ) + # Test client calls underlying transport. + with mock.patch.object(type(client.transport), "close") as close: + close.assert_not_called() + with client: + pass + close.assert_called() + +@pytest.mark.parametrize("client_class,transport_class", [ + (BackendServicesClient, transports.BackendServicesRestTransport), +]) +def test_api_key_credentials(client_class, transport_class): + with mock.patch.object( + google.auth._default, "get_api_key_credentials", create=True + ) as get_api_key_credentials: + mock_cred = mock.Mock() + get_api_key_credentials.return_value = mock_cred + options = client_options.ClientOptions() + options.api_key = "api_key" + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=mock_cred, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) diff --git a/owl-bot-staging/v1/tests/unit/gapic/compute_v1/test_disk_types.py b/owl-bot-staging/v1/tests/unit/gapic/compute_v1/test_disk_types.py new file mode 100644 index 000000000..08e862f72 --- /dev/null +++ b/owl-bot-staging/v1/tests/unit/gapic/compute_v1/test_disk_types.py @@ -0,0 +1,1710 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import os +# try/except added for compatibility with python < 3.8 +try: + from unittest import mock + from unittest.mock import AsyncMock # pragma: NO COVER +except ImportError: # pragma: NO COVER + import mock + +import grpc +from grpc.experimental import aio +from collections.abc import Iterable +from google.protobuf import json_format +import json +import math +import pytest +from proto.marshal.rules.dates import DurationRule, TimestampRule +from proto.marshal.rules import wrappers +from requests import Response +from requests import Request, PreparedRequest +from requests.sessions import Session +from google.protobuf import json_format + +from google.api_core import client_options +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import grpc_helpers +from google.api_core import grpc_helpers_async +from google.api_core import path_template +from google.auth import credentials as ga_credentials +from google.auth.exceptions import MutualTLSChannelError +from google.cloud.compute_v1.services.disk_types import DiskTypesClient +from google.cloud.compute_v1.services.disk_types import pagers +from google.cloud.compute_v1.services.disk_types import transports +from google.cloud.compute_v1.types import compute +from google.oauth2 import service_account +import google.auth + + +def client_cert_source_callback(): + return b"cert bytes", b"key bytes" + + +# If default endpoint is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint(client): + return "foo.googleapis.com" if ("localhost" in client.DEFAULT_ENDPOINT) else client.DEFAULT_ENDPOINT + + +def test__get_default_mtls_endpoint(): + api_endpoint = "example.googleapis.com" + api_mtls_endpoint = "example.mtls.googleapis.com" + sandbox_endpoint = "example.sandbox.googleapis.com" + sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com" + non_googleapi = "api.example.com" + + assert DiskTypesClient._get_default_mtls_endpoint(None) is None + assert DiskTypesClient._get_default_mtls_endpoint(api_endpoint) == api_mtls_endpoint + assert DiskTypesClient._get_default_mtls_endpoint(api_mtls_endpoint) == api_mtls_endpoint + assert DiskTypesClient._get_default_mtls_endpoint(sandbox_endpoint) == sandbox_mtls_endpoint + assert DiskTypesClient._get_default_mtls_endpoint(sandbox_mtls_endpoint) == sandbox_mtls_endpoint + assert DiskTypesClient._get_default_mtls_endpoint(non_googleapi) == non_googleapi + + +@pytest.mark.parametrize("client_class,transport_name", [ + (DiskTypesClient, "rest"), +]) +def test_disk_types_client_from_service_account_info(client_class, transport_name): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object(service_account.Credentials, 'from_service_account_info') as factory: + factory.return_value = creds + info = {"valid": True} + client = client_class.from_service_account_info(info, transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + 'compute.googleapis.com:443' + if transport_name in ['grpc', 'grpc_asyncio'] + else + 'https://compute.googleapis.com' + ) + + +@pytest.mark.parametrize("transport_class,transport_name", [ + (transports.DiskTypesRestTransport, "rest"), +]) +def test_disk_types_client_service_account_always_use_jwt(transport_class, transport_name): + with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=True) + use_jwt.assert_called_once_with(True) + + with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=False) + use_jwt.assert_not_called() + + +@pytest.mark.parametrize("client_class,transport_name", [ + (DiskTypesClient, "rest"), +]) +def test_disk_types_client_from_service_account_file(client_class, transport_name): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object(service_account.Credentials, 'from_service_account_file') as factory: + factory.return_value = creds + client = client_class.from_service_account_file("dummy/file/path.json", transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + client = client_class.from_service_account_json("dummy/file/path.json", transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + 'compute.googleapis.com:443' + if transport_name in ['grpc', 'grpc_asyncio'] + else + 'https://compute.googleapis.com' + ) + + +def test_disk_types_client_get_transport_class(): + transport = DiskTypesClient.get_transport_class() + available_transports = [ + transports.DiskTypesRestTransport, + ] + assert transport in available_transports + + transport = DiskTypesClient.get_transport_class("rest") + assert transport == transports.DiskTypesRestTransport + + +@pytest.mark.parametrize("client_class,transport_class,transport_name", [ + (DiskTypesClient, transports.DiskTypesRestTransport, "rest"), +]) +@mock.patch.object(DiskTypesClient, "DEFAULT_ENDPOINT", modify_default_endpoint(DiskTypesClient)) +def test_disk_types_client_client_options(client_class, transport_class, transport_name): + # Check that if channel is provided we won't create a new one. + with mock.patch.object(DiskTypesClient, 'get_transport_class') as gtc: + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ) + client = client_class(transport=transport) + gtc.assert_not_called() + + # Check that if channel is provided via str we will create a new one. + with mock.patch.object(DiskTypesClient, 'get_transport_class') as gtc: + client = client_class(transport=transport_name) + gtc.assert_called() + + # Check the case api_endpoint is provided. + options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(transport=transport_name, client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_MTLS_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError): + client = client_class(transport=transport_name) + + # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): + with pytest.raises(ValueError): + client = client_class(transport=transport_name) + + # Check the case quota_project_id is provided + options = client_options.ClientOptions(quota_project_id="octopus") + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id="octopus", + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + # Check the case api_endpoint is provided + options = client_options.ClientOptions(api_audience="https://language.googleapis.com") + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience="https://language.googleapis.com" + ) + +@pytest.mark.parametrize("client_class,transport_class,transport_name,use_client_cert_env", [ + (DiskTypesClient, transports.DiskTypesRestTransport, "rest", "true"), + (DiskTypesClient, transports.DiskTypesRestTransport, "rest", "false"), +]) +@mock.patch.object(DiskTypesClient, "DEFAULT_ENDPOINT", modify_default_endpoint(DiskTypesClient)) +@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) +def test_disk_types_client_mtls_env_auto(client_class, transport_class, transport_name, use_client_cert_env): + # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default + # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists. + + # Check the case client_cert_source is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): + options = client_options.ClientOptions(client_cert_source=client_cert_source_callback) + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + + if use_client_cert_env == "false": + expected_client_cert_source = None + expected_host = client.DEFAULT_ENDPOINT + else: + expected_client_cert_source = client_cert_source_callback + expected_host = client.DEFAULT_MTLS_ENDPOINT + + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case ADC client cert is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): + with mock.patch.object(transport_class, '__init__') as patched: + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): + with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=client_cert_source_callback): + if use_client_cert_env == "false": + expected_host = client.DEFAULT_ENDPOINT + expected_client_cert_source = None + else: + expected_host = client.DEFAULT_MTLS_ENDPOINT + expected_client_cert_source = client_cert_source_callback + + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case client_cert_source and ADC client cert are not provided. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): + with mock.patch.object(transport_class, '__init__') as patched: + with mock.patch("google.auth.transport.mtls.has_default_client_cert_source", return_value=False): + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize("client_class", [ + DiskTypesClient +]) +@mock.patch.object(DiskTypesClient, "DEFAULT_ENDPOINT", modify_default_endpoint(DiskTypesClient)) +def test_disk_types_client_get_mtls_endpoint_and_cert_source(client_class): + mock_client_cert_source = mock.Mock() + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "true". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + mock_api_endpoint = "foo" + options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(options) + assert api_endpoint == mock_api_endpoint + assert cert_source == mock_client_cert_source + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "false". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + mock_client_cert_source = mock.Mock() + mock_api_endpoint = "foo" + options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(options) + assert api_endpoint == mock_api_endpoint + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert doesn't exist. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=False): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert exists. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): + with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=mock_client_cert_source): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source == mock_client_cert_source + + +@pytest.mark.parametrize("client_class,transport_class,transport_name", [ + (DiskTypesClient, transports.DiskTypesRestTransport, "rest"), +]) +def test_disk_types_client_client_options_scopes(client_class, transport_class, transport_name): + # Check the case scopes are provided. + options = client_options.ClientOptions( + scopes=["1", "2"], + ) + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=["1", "2"], + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + +@pytest.mark.parametrize("client_class,transport_class,transport_name,grpc_helpers", [ + (DiskTypesClient, transports.DiskTypesRestTransport, "rest", None), +]) +def test_disk_types_client_client_options_credentials_file(client_class, transport_class, transport_name, grpc_helpers): + # Check the case credentials file is provided. + options = client_options.ClientOptions( + credentials_file="credentials.json" + ) + + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize("request_type", [ + compute.AggregatedListDiskTypesRequest, + dict, +]) +def test_aggregated_list_rest(request_type): + client = DiskTypesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.DiskTypeAggregatedList( + id='id_value', + kind='kind_value', + next_page_token='next_page_token_value', + self_link='self_link_value', + unreachables=['unreachables_value'], + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.DiskTypeAggregatedList.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.aggregated_list(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.AggregatedListPager) + assert response.id == 'id_value' + assert response.kind == 'kind_value' + assert response.next_page_token == 'next_page_token_value' + assert response.self_link == 'self_link_value' + assert response.unreachables == ['unreachables_value'] + + +def test_aggregated_list_rest_required_fields(request_type=compute.AggregatedListDiskTypesRequest): + transport_class = transports.DiskTypesRestTransport + + request_init = {} + request_init["project"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).aggregated_list._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).aggregated_list._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("filter", "include_all_scopes", "max_results", "order_by", "page_token", "return_partial_success", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + + client = DiskTypesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.DiskTypeAggregatedList() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "get", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.DiskTypeAggregatedList.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.aggregated_list(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_aggregated_list_rest_unset_required_fields(): + transport = transports.DiskTypesRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.aggregated_list._get_unset_required_fields({}) + assert set(unset_fields) == (set(("filter", "includeAllScopes", "maxResults", "orderBy", "pageToken", "returnPartialSuccess", )) & set(("project", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_aggregated_list_rest_interceptors(null_interceptor): + transport = transports.DiskTypesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.DiskTypesRestInterceptor(), + ) + client = DiskTypesClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.DiskTypesRestInterceptor, "post_aggregated_list") as post, \ + mock.patch.object(transports.DiskTypesRestInterceptor, "pre_aggregated_list") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.AggregatedListDiskTypesRequest.pb(compute.AggregatedListDiskTypesRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.DiskTypeAggregatedList.to_json(compute.DiskTypeAggregatedList()) + + request = compute.AggregatedListDiskTypesRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.DiskTypeAggregatedList() + + client.aggregated_list(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_aggregated_list_rest_bad_request(transport: str = 'rest', request_type=compute.AggregatedListDiskTypesRequest): + client = DiskTypesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.aggregated_list(request) + + +def test_aggregated_list_rest_flattened(): + client = DiskTypesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.DiskTypeAggregatedList() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.DiskTypeAggregatedList.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.aggregated_list(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/aggregated/diskTypes" % client.transport._host, args[1]) + + +def test_aggregated_list_rest_flattened_error(transport: str = 'rest'): + client = DiskTypesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.aggregated_list( + compute.AggregatedListDiskTypesRequest(), + project='project_value', + ) + + +def test_aggregated_list_rest_pager(transport: str = 'rest'): + client = DiskTypesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + #with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + compute.DiskTypeAggregatedList( + items={ + 'a':compute.DiskTypesScopedList(), + 'b':compute.DiskTypesScopedList(), + 'c':compute.DiskTypesScopedList(), + }, + next_page_token='abc', + ), + compute.DiskTypeAggregatedList( + items={}, + next_page_token='def', + ), + compute.DiskTypeAggregatedList( + items={ + 'g':compute.DiskTypesScopedList(), + }, + next_page_token='ghi', + ), + compute.DiskTypeAggregatedList( + items={ + 'h':compute.DiskTypesScopedList(), + 'i':compute.DiskTypesScopedList(), + }, + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple(compute.DiskTypeAggregatedList.to_json(x) for x in response) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode('UTF-8') + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {'project': 'sample1'} + + pager = client.aggregated_list(request=sample_request) + + assert isinstance(pager.get('a'), compute.DiskTypesScopedList) + assert pager.get('h') is None + + results = list(pager) + assert len(results) == 6 + assert all( + isinstance(i, tuple) + for i in results) + for result in results: + assert isinstance(result, tuple) + assert tuple(type(t) for t in result) == (str, compute.DiskTypesScopedList) + + assert pager.get('a') is None + assert isinstance(pager.get('h'), compute.DiskTypesScopedList) + + pages = list(client.aggregated_list(request=sample_request).pages) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize("request_type", [ + compute.GetDiskTypeRequest, + dict, +]) +def test_get_rest(request_type): + client = DiskTypesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'zone': 'sample2', 'disk_type': 'sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.DiskType( + creation_timestamp='creation_timestamp_value', + default_disk_size_gb=2097, + description='description_value', + id=205, + kind='kind_value', + name='name_value', + region='region_value', + self_link='self_link_value', + valid_disk_size='valid_disk_size_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.DiskType.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.get(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.DiskType) + assert response.creation_timestamp == 'creation_timestamp_value' + assert response.default_disk_size_gb == 2097 + assert response.description == 'description_value' + assert response.id == 205 + assert response.kind == 'kind_value' + assert response.name == 'name_value' + assert response.region == 'region_value' + assert response.self_link == 'self_link_value' + assert response.valid_disk_size == 'valid_disk_size_value' + assert response.zone == 'zone_value' + + +def test_get_rest_required_fields(request_type=compute.GetDiskTypeRequest): + transport_class = transports.DiskTypesRestTransport + + request_init = {} + request_init["disk_type"] = "" + request_init["project"] = "" + request_init["zone"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["diskType"] = 'disk_type_value' + jsonified_request["project"] = 'project_value' + jsonified_request["zone"] = 'zone_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "diskType" in jsonified_request + assert jsonified_request["diskType"] == 'disk_type_value' + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "zone" in jsonified_request + assert jsonified_request["zone"] == 'zone_value' + + client = DiskTypesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.DiskType() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "get", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.DiskType.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.get(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_get_rest_unset_required_fields(): + transport = transports.DiskTypesRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.get._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("diskType", "project", "zone", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_rest_interceptors(null_interceptor): + transport = transports.DiskTypesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.DiskTypesRestInterceptor(), + ) + client = DiskTypesClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.DiskTypesRestInterceptor, "post_get") as post, \ + mock.patch.object(transports.DiskTypesRestInterceptor, "pre_get") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.GetDiskTypeRequest.pb(compute.GetDiskTypeRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.DiskType.to_json(compute.DiskType()) + + request = compute.GetDiskTypeRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.DiskType() + + client.get(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_get_rest_bad_request(transport: str = 'rest', request_type=compute.GetDiskTypeRequest): + client = DiskTypesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'zone': 'sample2', 'disk_type': 'sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get(request) + + +def test_get_rest_flattened(): + client = DiskTypesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.DiskType() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'zone': 'sample2', 'disk_type': 'sample3'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + zone='zone_value', + disk_type='disk_type_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.DiskType.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.get(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/zones/{zone}/diskTypes/{disk_type}" % client.transport._host, args[1]) + + +def test_get_rest_flattened_error(transport: str = 'rest'): + client = DiskTypesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get( + compute.GetDiskTypeRequest(), + project='project_value', + zone='zone_value', + disk_type='disk_type_value', + ) + + +def test_get_rest_error(): + client = DiskTypesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.ListDiskTypesRequest, + dict, +]) +def test_list_rest(request_type): + client = DiskTypesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'zone': 'sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.DiskTypeList( + id='id_value', + kind='kind_value', + next_page_token='next_page_token_value', + self_link='self_link_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.DiskTypeList.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.list(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListPager) + assert response.id == 'id_value' + assert response.kind == 'kind_value' + assert response.next_page_token == 'next_page_token_value' + assert response.self_link == 'self_link_value' + + +def test_list_rest_required_fields(request_type=compute.ListDiskTypesRequest): + transport_class = transports.DiskTypesRestTransport + + request_init = {} + request_init["project"] = "" + request_init["zone"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + jsonified_request["zone"] = 'zone_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("filter", "max_results", "order_by", "page_token", "return_partial_success", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "zone" in jsonified_request + assert jsonified_request["zone"] == 'zone_value' + + client = DiskTypesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.DiskTypeList() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "get", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.DiskTypeList.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.list(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_list_rest_unset_required_fields(): + transport = transports.DiskTypesRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.list._get_unset_required_fields({}) + assert set(unset_fields) == (set(("filter", "maxResults", "orderBy", "pageToken", "returnPartialSuccess", )) & set(("project", "zone", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_rest_interceptors(null_interceptor): + transport = transports.DiskTypesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.DiskTypesRestInterceptor(), + ) + client = DiskTypesClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.DiskTypesRestInterceptor, "post_list") as post, \ + mock.patch.object(transports.DiskTypesRestInterceptor, "pre_list") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.ListDiskTypesRequest.pb(compute.ListDiskTypesRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.DiskTypeList.to_json(compute.DiskTypeList()) + + request = compute.ListDiskTypesRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.DiskTypeList() + + client.list(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_list_rest_bad_request(transport: str = 'rest', request_type=compute.ListDiskTypesRequest): + client = DiskTypesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'zone': 'sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list(request) + + +def test_list_rest_flattened(): + client = DiskTypesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.DiskTypeList() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'zone': 'sample2'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + zone='zone_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.DiskTypeList.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.list(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/zones/{zone}/diskTypes" % client.transport._host, args[1]) + + +def test_list_rest_flattened_error(transport: str = 'rest'): + client = DiskTypesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list( + compute.ListDiskTypesRequest(), + project='project_value', + zone='zone_value', + ) + + +def test_list_rest_pager(transport: str = 'rest'): + client = DiskTypesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + #with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + compute.DiskTypeList( + items=[ + compute.DiskType(), + compute.DiskType(), + compute.DiskType(), + ], + next_page_token='abc', + ), + compute.DiskTypeList( + items=[], + next_page_token='def', + ), + compute.DiskTypeList( + items=[ + compute.DiskType(), + ], + next_page_token='ghi', + ), + compute.DiskTypeList( + items=[ + compute.DiskType(), + compute.DiskType(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple(compute.DiskTypeList.to_json(x) for x in response) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode('UTF-8') + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {'project': 'sample1', 'zone': 'sample2'} + + pager = client.list(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, compute.DiskType) + for i in results) + + pages = list(client.list(request=sample_request).pages) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + + +def test_credentials_transport_error(): + # It is an error to provide credentials and a transport instance. + transport = transports.DiskTypesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = DiskTypesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # It is an error to provide a credentials file and a transport instance. + transport = transports.DiskTypesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = DiskTypesClient( + client_options={"credentials_file": "credentials.json"}, + transport=transport, + ) + + # It is an error to provide an api_key and a transport instance. + transport = transports.DiskTypesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = DiskTypesClient( + client_options=options, + transport=transport, + ) + + # It is an error to provide an api_key and a credential. + options = mock.Mock() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = DiskTypesClient( + client_options=options, + credentials=ga_credentials.AnonymousCredentials() + ) + + # It is an error to provide scopes and a transport instance. + transport = transports.DiskTypesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = DiskTypesClient( + client_options={"scopes": ["1", "2"]}, + transport=transport, + ) + + +def test_transport_instance(): + # A client may be instantiated with a custom transport instance. + transport = transports.DiskTypesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + client = DiskTypesClient(transport=transport) + assert client.transport is transport + + +@pytest.mark.parametrize("transport_class", [ + transports.DiskTypesRestTransport, +]) +def test_transport_adc(transport_class): + # Test default credentials are used if not provided. + with mock.patch.object(google.auth, 'default') as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class() + adc.assert_called_once() + +@pytest.mark.parametrize("transport_name", [ + "rest", +]) +def test_transport_kind(transport_name): + transport = DiskTypesClient.get_transport_class(transport_name)( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert transport.kind == transport_name + + +def test_disk_types_base_transport_error(): + # Passing both a credentials object and credentials_file should raise an error + with pytest.raises(core_exceptions.DuplicateCredentialArgs): + transport = transports.DiskTypesTransport( + credentials=ga_credentials.AnonymousCredentials(), + credentials_file="credentials.json" + ) + + +def test_disk_types_base_transport(): + # Instantiate the base transport. + with mock.patch('google.cloud.compute_v1.services.disk_types.transports.DiskTypesTransport.__init__') as Transport: + Transport.return_value = None + transport = transports.DiskTypesTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Every method on the transport should just blindly + # raise NotImplementedError. + methods = ( + 'aggregated_list', + 'get', + 'list', + ) + for method in methods: + with pytest.raises(NotImplementedError): + getattr(transport, method)(request=object()) + + with pytest.raises(NotImplementedError): + transport.close() + + # Catch all for all remaining methods and properties + remainder = [ + 'kind', + ] + for r in remainder: + with pytest.raises(NotImplementedError): + getattr(transport, r)() + + +def test_disk_types_base_transport_with_credentials_file(): + # Instantiate the base transport with a credentials file + with mock.patch.object(google.auth, 'load_credentials_from_file', autospec=True) as load_creds, mock.patch('google.cloud.compute_v1.services.disk_types.transports.DiskTypesTransport._prep_wrapped_messages') as Transport: + Transport.return_value = None + load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.DiskTypesTransport( + credentials_file="credentials.json", + quota_project_id="octopus", + ) + load_creds.assert_called_once_with("credentials.json", + scopes=None, + default_scopes=( + 'https://www.googleapis.com/auth/compute.readonly', + 'https://www.googleapis.com/auth/compute', + 'https://www.googleapis.com/auth/cloud-platform', +), + quota_project_id="octopus", + ) + + +def test_disk_types_base_transport_with_adc(): + # Test the default credentials are used if credentials and credentials_file are None. + with mock.patch.object(google.auth, 'default', autospec=True) as adc, mock.patch('google.cloud.compute_v1.services.disk_types.transports.DiskTypesTransport._prep_wrapped_messages') as Transport: + Transport.return_value = None + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.DiskTypesTransport() + adc.assert_called_once() + + +def test_disk_types_auth_adc(): + # If no credentials are provided, we should use ADC credentials. + with mock.patch.object(google.auth, 'default', autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + DiskTypesClient() + adc.assert_called_once_with( + scopes=None, + default_scopes=( + 'https://www.googleapis.com/auth/compute.readonly', + 'https://www.googleapis.com/auth/compute', + 'https://www.googleapis.com/auth/cloud-platform', +), + quota_project_id=None, + ) + + +def test_disk_types_http_transport_client_cert_source_for_mtls(): + cred = ga_credentials.AnonymousCredentials() + with mock.patch("google.auth.transport.requests.AuthorizedSession.configure_mtls_channel") as mock_configure_mtls_channel: + transports.DiskTypesRestTransport ( + credentials=cred, + client_cert_source_for_mtls=client_cert_source_callback + ) + mock_configure_mtls_channel.assert_called_once_with(client_cert_source_callback) + + +@pytest.mark.parametrize("transport_name", [ + "rest", +]) +def test_disk_types_host_no_port(transport_name): + client = DiskTypesClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions(api_endpoint='compute.googleapis.com'), + transport=transport_name, + ) + assert client.transport._host == ( + 'compute.googleapis.com:443' + if transport_name in ['grpc', 'grpc_asyncio'] + else 'https://compute.googleapis.com' + ) + +@pytest.mark.parametrize("transport_name", [ + "rest", +]) +def test_disk_types_host_with_port(transport_name): + client = DiskTypesClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions(api_endpoint='compute.googleapis.com:8000'), + transport=transport_name, + ) + assert client.transport._host == ( + 'compute.googleapis.com:8000' + if transport_name in ['grpc', 'grpc_asyncio'] + else 'https://compute.googleapis.com:8000' + ) + +@pytest.mark.parametrize("transport_name", [ + "rest", +]) +def test_disk_types_client_transport_session_collision(transport_name): + creds1 = ga_credentials.AnonymousCredentials() + creds2 = ga_credentials.AnonymousCredentials() + client1 = DiskTypesClient( + credentials=creds1, + transport=transport_name, + ) + client2 = DiskTypesClient( + credentials=creds2, + transport=transport_name, + ) + session1 = client1.transport.aggregated_list._session + session2 = client2.transport.aggregated_list._session + assert session1 != session2 + session1 = client1.transport.get._session + session2 = client2.transport.get._session + assert session1 != session2 + session1 = client1.transport.list._session + session2 = client2.transport.list._session + assert session1 != session2 + +def test_common_billing_account_path(): + billing_account = "squid" + expected = "billingAccounts/{billing_account}".format(billing_account=billing_account, ) + actual = DiskTypesClient.common_billing_account_path(billing_account) + assert expected == actual + + +def test_parse_common_billing_account_path(): + expected = { + "billing_account": "clam", + } + path = DiskTypesClient.common_billing_account_path(**expected) + + # Check that the path construction is reversible. + actual = DiskTypesClient.parse_common_billing_account_path(path) + assert expected == actual + +def test_common_folder_path(): + folder = "whelk" + expected = "folders/{folder}".format(folder=folder, ) + actual = DiskTypesClient.common_folder_path(folder) + assert expected == actual + + +def test_parse_common_folder_path(): + expected = { + "folder": "octopus", + } + path = DiskTypesClient.common_folder_path(**expected) + + # Check that the path construction is reversible. + actual = DiskTypesClient.parse_common_folder_path(path) + assert expected == actual + +def test_common_organization_path(): + organization = "oyster" + expected = "organizations/{organization}".format(organization=organization, ) + actual = DiskTypesClient.common_organization_path(organization) + assert expected == actual + + +def test_parse_common_organization_path(): + expected = { + "organization": "nudibranch", + } + path = DiskTypesClient.common_organization_path(**expected) + + # Check that the path construction is reversible. + actual = DiskTypesClient.parse_common_organization_path(path) + assert expected == actual + +def test_common_project_path(): + project = "cuttlefish" + expected = "projects/{project}".format(project=project, ) + actual = DiskTypesClient.common_project_path(project) + assert expected == actual + + +def test_parse_common_project_path(): + expected = { + "project": "mussel", + } + path = DiskTypesClient.common_project_path(**expected) + + # Check that the path construction is reversible. + actual = DiskTypesClient.parse_common_project_path(path) + assert expected == actual + +def test_common_location_path(): + project = "winkle" + location = "nautilus" + expected = "projects/{project}/locations/{location}".format(project=project, location=location, ) + actual = DiskTypesClient.common_location_path(project, location) + assert expected == actual + + +def test_parse_common_location_path(): + expected = { + "project": "scallop", + "location": "abalone", + } + path = DiskTypesClient.common_location_path(**expected) + + # Check that the path construction is reversible. + actual = DiskTypesClient.parse_common_location_path(path) + assert expected == actual + + +def test_client_with_default_client_info(): + client_info = gapic_v1.client_info.ClientInfo() + + with mock.patch.object(transports.DiskTypesTransport, '_prep_wrapped_messages') as prep: + client = DiskTypesClient( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + with mock.patch.object(transports.DiskTypesTransport, '_prep_wrapped_messages') as prep: + transport_class = DiskTypesClient.get_transport_class() + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + +def test_transport_close(): + transports = { + "rest": "_session", + } + + for transport, close_name in transports.items(): + client = DiskTypesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport + ) + with mock.patch.object(type(getattr(client.transport, close_name)), "close") as close: + with client: + close.assert_not_called() + close.assert_called_once() + +def test_client_ctx(): + transports = [ + 'rest', + ] + for transport in transports: + client = DiskTypesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport + ) + # Test client calls underlying transport. + with mock.patch.object(type(client.transport), "close") as close: + close.assert_not_called() + with client: + pass + close.assert_called() + +@pytest.mark.parametrize("client_class,transport_class", [ + (DiskTypesClient, transports.DiskTypesRestTransport), +]) +def test_api_key_credentials(client_class, transport_class): + with mock.patch.object( + google.auth._default, "get_api_key_credentials", create=True + ) as get_api_key_credentials: + mock_cred = mock.Mock() + get_api_key_credentials.return_value = mock_cred + options = client_options.ClientOptions() + options.api_key = "api_key" + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=mock_cred, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) diff --git a/owl-bot-staging/v1/tests/unit/gapic/compute_v1/test_disks.py b/owl-bot-staging/v1/tests/unit/gapic/compute_v1/test_disks.py new file mode 100644 index 000000000..306bda5df --- /dev/null +++ b/owl-bot-staging/v1/tests/unit/gapic/compute_v1/test_disks.py @@ -0,0 +1,9284 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import os +# try/except added for compatibility with python < 3.8 +try: + from unittest import mock + from unittest.mock import AsyncMock # pragma: NO COVER +except ImportError: # pragma: NO COVER + import mock + +import grpc +from grpc.experimental import aio +from collections.abc import Iterable +from google.protobuf import json_format +import json +import math +import pytest +from proto.marshal.rules.dates import DurationRule, TimestampRule +from proto.marshal.rules import wrappers +from requests import Response +from requests import Request, PreparedRequest +from requests.sessions import Session +from google.protobuf import json_format + +from google.api_core import client_options +from google.api_core import exceptions as core_exceptions +from google.api_core import extended_operation # type: ignore +from google.api_core import future +from google.api_core import gapic_v1 +from google.api_core import grpc_helpers +from google.api_core import grpc_helpers_async +from google.api_core import path_template +from google.auth import credentials as ga_credentials +from google.auth.exceptions import MutualTLSChannelError +from google.cloud.compute_v1.services.disks import DisksClient +from google.cloud.compute_v1.services.disks import pagers +from google.cloud.compute_v1.services.disks import transports +from google.cloud.compute_v1.types import compute +from google.oauth2 import service_account +import google.auth + + +def client_cert_source_callback(): + return b"cert bytes", b"key bytes" + + +# If default endpoint is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint(client): + return "foo.googleapis.com" if ("localhost" in client.DEFAULT_ENDPOINT) else client.DEFAULT_ENDPOINT + + +def test__get_default_mtls_endpoint(): + api_endpoint = "example.googleapis.com" + api_mtls_endpoint = "example.mtls.googleapis.com" + sandbox_endpoint = "example.sandbox.googleapis.com" + sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com" + non_googleapi = "api.example.com" + + assert DisksClient._get_default_mtls_endpoint(None) is None + assert DisksClient._get_default_mtls_endpoint(api_endpoint) == api_mtls_endpoint + assert DisksClient._get_default_mtls_endpoint(api_mtls_endpoint) == api_mtls_endpoint + assert DisksClient._get_default_mtls_endpoint(sandbox_endpoint) == sandbox_mtls_endpoint + assert DisksClient._get_default_mtls_endpoint(sandbox_mtls_endpoint) == sandbox_mtls_endpoint + assert DisksClient._get_default_mtls_endpoint(non_googleapi) == non_googleapi + + +@pytest.mark.parametrize("client_class,transport_name", [ + (DisksClient, "rest"), +]) +def test_disks_client_from_service_account_info(client_class, transport_name): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object(service_account.Credentials, 'from_service_account_info') as factory: + factory.return_value = creds + info = {"valid": True} + client = client_class.from_service_account_info(info, transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + 'compute.googleapis.com:443' + if transport_name in ['grpc', 'grpc_asyncio'] + else + 'https://compute.googleapis.com' + ) + + +@pytest.mark.parametrize("transport_class,transport_name", [ + (transports.DisksRestTransport, "rest"), +]) +def test_disks_client_service_account_always_use_jwt(transport_class, transport_name): + with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=True) + use_jwt.assert_called_once_with(True) + + with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=False) + use_jwt.assert_not_called() + + +@pytest.mark.parametrize("client_class,transport_name", [ + (DisksClient, "rest"), +]) +def test_disks_client_from_service_account_file(client_class, transport_name): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object(service_account.Credentials, 'from_service_account_file') as factory: + factory.return_value = creds + client = client_class.from_service_account_file("dummy/file/path.json", transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + client = client_class.from_service_account_json("dummy/file/path.json", transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + 'compute.googleapis.com:443' + if transport_name in ['grpc', 'grpc_asyncio'] + else + 'https://compute.googleapis.com' + ) + + +def test_disks_client_get_transport_class(): + transport = DisksClient.get_transport_class() + available_transports = [ + transports.DisksRestTransport, + ] + assert transport in available_transports + + transport = DisksClient.get_transport_class("rest") + assert transport == transports.DisksRestTransport + + +@pytest.mark.parametrize("client_class,transport_class,transport_name", [ + (DisksClient, transports.DisksRestTransport, "rest"), +]) +@mock.patch.object(DisksClient, "DEFAULT_ENDPOINT", modify_default_endpoint(DisksClient)) +def test_disks_client_client_options(client_class, transport_class, transport_name): + # Check that if channel is provided we won't create a new one. + with mock.patch.object(DisksClient, 'get_transport_class') as gtc: + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ) + client = client_class(transport=transport) + gtc.assert_not_called() + + # Check that if channel is provided via str we will create a new one. + with mock.patch.object(DisksClient, 'get_transport_class') as gtc: + client = client_class(transport=transport_name) + gtc.assert_called() + + # Check the case api_endpoint is provided. + options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(transport=transport_name, client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_MTLS_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError): + client = client_class(transport=transport_name) + + # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): + with pytest.raises(ValueError): + client = client_class(transport=transport_name) + + # Check the case quota_project_id is provided + options = client_options.ClientOptions(quota_project_id="octopus") + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id="octopus", + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + # Check the case api_endpoint is provided + options = client_options.ClientOptions(api_audience="https://language.googleapis.com") + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience="https://language.googleapis.com" + ) + +@pytest.mark.parametrize("client_class,transport_class,transport_name,use_client_cert_env", [ + (DisksClient, transports.DisksRestTransport, "rest", "true"), + (DisksClient, transports.DisksRestTransport, "rest", "false"), +]) +@mock.patch.object(DisksClient, "DEFAULT_ENDPOINT", modify_default_endpoint(DisksClient)) +@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) +def test_disks_client_mtls_env_auto(client_class, transport_class, transport_name, use_client_cert_env): + # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default + # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists. + + # Check the case client_cert_source is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): + options = client_options.ClientOptions(client_cert_source=client_cert_source_callback) + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + + if use_client_cert_env == "false": + expected_client_cert_source = None + expected_host = client.DEFAULT_ENDPOINT + else: + expected_client_cert_source = client_cert_source_callback + expected_host = client.DEFAULT_MTLS_ENDPOINT + + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case ADC client cert is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): + with mock.patch.object(transport_class, '__init__') as patched: + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): + with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=client_cert_source_callback): + if use_client_cert_env == "false": + expected_host = client.DEFAULT_ENDPOINT + expected_client_cert_source = None + else: + expected_host = client.DEFAULT_MTLS_ENDPOINT + expected_client_cert_source = client_cert_source_callback + + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case client_cert_source and ADC client cert are not provided. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): + with mock.patch.object(transport_class, '__init__') as patched: + with mock.patch("google.auth.transport.mtls.has_default_client_cert_source", return_value=False): + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize("client_class", [ + DisksClient +]) +@mock.patch.object(DisksClient, "DEFAULT_ENDPOINT", modify_default_endpoint(DisksClient)) +def test_disks_client_get_mtls_endpoint_and_cert_source(client_class): + mock_client_cert_source = mock.Mock() + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "true". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + mock_api_endpoint = "foo" + options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(options) + assert api_endpoint == mock_api_endpoint + assert cert_source == mock_client_cert_source + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "false". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + mock_client_cert_source = mock.Mock() + mock_api_endpoint = "foo" + options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(options) + assert api_endpoint == mock_api_endpoint + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert doesn't exist. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=False): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert exists. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): + with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=mock_client_cert_source): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source == mock_client_cert_source + + +@pytest.mark.parametrize("client_class,transport_class,transport_name", [ + (DisksClient, transports.DisksRestTransport, "rest"), +]) +def test_disks_client_client_options_scopes(client_class, transport_class, transport_name): + # Check the case scopes are provided. + options = client_options.ClientOptions( + scopes=["1", "2"], + ) + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=["1", "2"], + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + +@pytest.mark.parametrize("client_class,transport_class,transport_name,grpc_helpers", [ + (DisksClient, transports.DisksRestTransport, "rest", None), +]) +def test_disks_client_client_options_credentials_file(client_class, transport_class, transport_name, grpc_helpers): + # Check the case credentials file is provided. + options = client_options.ClientOptions( + credentials_file="credentials.json" + ) + + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize("request_type", [ + compute.AddResourcePoliciesDiskRequest, + dict, +]) +def test_add_resource_policies_rest(request_type): + client = DisksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'zone': 'sample2', 'disk': 'sample3'} + request_init["disks_add_resource_policies_request_resource"] = {'resource_policies': ['resource_policies_value1', 'resource_policies_value2']} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.add_resource_policies(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, extended_operation.ExtendedOperation) + assert response.client_operation_id == 'client_operation_id_value' + assert response.creation_timestamp == 'creation_timestamp_value' + assert response.description == 'description_value' + assert response.end_time == 'end_time_value' + assert response.http_error_message == 'http_error_message_value' + assert response.http_error_status_code == 2374 + assert response.id == 205 + assert response.insert_time == 'insert_time_value' + assert response.kind == 'kind_value' + assert response.name == 'name_value' + assert response.operation_group_id == 'operation_group_id_value' + assert response.operation_type == 'operation_type_value' + assert response.progress == 885 + assert response.region == 'region_value' + assert response.self_link == 'self_link_value' + assert response.start_time == 'start_time_value' + assert response.status == compute.Operation.Status.DONE + assert response.status_message == 'status_message_value' + assert response.target_id == 947 + assert response.target_link == 'target_link_value' + assert response.user == 'user_value' + assert response.zone == 'zone_value' + + +def test_add_resource_policies_rest_required_fields(request_type=compute.AddResourcePoliciesDiskRequest): + transport_class = transports.DisksRestTransport + + request_init = {} + request_init["disk"] = "" + request_init["project"] = "" + request_init["zone"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).add_resource_policies._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["disk"] = 'disk_value' + jsonified_request["project"] = 'project_value' + jsonified_request["zone"] = 'zone_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).add_resource_policies._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "disk" in jsonified_request + assert jsonified_request["disk"] == 'disk_value' + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "zone" in jsonified_request + assert jsonified_request["zone"] == 'zone_value' + + client = DisksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.add_resource_policies(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_add_resource_policies_rest_unset_required_fields(): + transport = transports.DisksRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.add_resource_policies._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("disk", "disksAddResourcePoliciesRequestResource", "project", "zone", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_add_resource_policies_rest_interceptors(null_interceptor): + transport = transports.DisksRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.DisksRestInterceptor(), + ) + client = DisksClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.DisksRestInterceptor, "post_add_resource_policies") as post, \ + mock.patch.object(transports.DisksRestInterceptor, "pre_add_resource_policies") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.AddResourcePoliciesDiskRequest.pb(compute.AddResourcePoliciesDiskRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.AddResourcePoliciesDiskRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.add_resource_policies(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_add_resource_policies_rest_bad_request(transport: str = 'rest', request_type=compute.AddResourcePoliciesDiskRequest): + client = DisksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'zone': 'sample2', 'disk': 'sample3'} + request_init["disks_add_resource_policies_request_resource"] = {'resource_policies': ['resource_policies_value1', 'resource_policies_value2']} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.add_resource_policies(request) + + +def test_add_resource_policies_rest_flattened(): + client = DisksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'zone': 'sample2', 'disk': 'sample3'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + zone='zone_value', + disk='disk_value', + disks_add_resource_policies_request_resource=compute.DisksAddResourcePoliciesRequest(resource_policies=['resource_policies_value']), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.add_resource_policies(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/zones/{zone}/disks/{disk}/addResourcePolicies" % client.transport._host, args[1]) + + +def test_add_resource_policies_rest_flattened_error(transport: str = 'rest'): + client = DisksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.add_resource_policies( + compute.AddResourcePoliciesDiskRequest(), + project='project_value', + zone='zone_value', + disk='disk_value', + disks_add_resource_policies_request_resource=compute.DisksAddResourcePoliciesRequest(resource_policies=['resource_policies_value']), + ) + + +def test_add_resource_policies_rest_error(): + client = DisksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.AddResourcePoliciesDiskRequest, + dict, +]) +def test_add_resource_policies_unary_rest(request_type): + client = DisksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'zone': 'sample2', 'disk': 'sample3'} + request_init["disks_add_resource_policies_request_resource"] = {'resource_policies': ['resource_policies_value1', 'resource_policies_value2']} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.add_resource_policies_unary(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.Operation) + + +def test_add_resource_policies_unary_rest_required_fields(request_type=compute.AddResourcePoliciesDiskRequest): + transport_class = transports.DisksRestTransport + + request_init = {} + request_init["disk"] = "" + request_init["project"] = "" + request_init["zone"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).add_resource_policies._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["disk"] = 'disk_value' + jsonified_request["project"] = 'project_value' + jsonified_request["zone"] = 'zone_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).add_resource_policies._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "disk" in jsonified_request + assert jsonified_request["disk"] == 'disk_value' + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "zone" in jsonified_request + assert jsonified_request["zone"] == 'zone_value' + + client = DisksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.add_resource_policies_unary(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_add_resource_policies_unary_rest_unset_required_fields(): + transport = transports.DisksRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.add_resource_policies._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("disk", "disksAddResourcePoliciesRequestResource", "project", "zone", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_add_resource_policies_unary_rest_interceptors(null_interceptor): + transport = transports.DisksRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.DisksRestInterceptor(), + ) + client = DisksClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.DisksRestInterceptor, "post_add_resource_policies") as post, \ + mock.patch.object(transports.DisksRestInterceptor, "pre_add_resource_policies") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.AddResourcePoliciesDiskRequest.pb(compute.AddResourcePoliciesDiskRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.AddResourcePoliciesDiskRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.add_resource_policies_unary(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_add_resource_policies_unary_rest_bad_request(transport: str = 'rest', request_type=compute.AddResourcePoliciesDiskRequest): + client = DisksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'zone': 'sample2', 'disk': 'sample3'} + request_init["disks_add_resource_policies_request_resource"] = {'resource_policies': ['resource_policies_value1', 'resource_policies_value2']} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.add_resource_policies_unary(request) + + +def test_add_resource_policies_unary_rest_flattened(): + client = DisksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'zone': 'sample2', 'disk': 'sample3'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + zone='zone_value', + disk='disk_value', + disks_add_resource_policies_request_resource=compute.DisksAddResourcePoliciesRequest(resource_policies=['resource_policies_value']), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.add_resource_policies_unary(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/zones/{zone}/disks/{disk}/addResourcePolicies" % client.transport._host, args[1]) + + +def test_add_resource_policies_unary_rest_flattened_error(transport: str = 'rest'): + client = DisksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.add_resource_policies_unary( + compute.AddResourcePoliciesDiskRequest(), + project='project_value', + zone='zone_value', + disk='disk_value', + disks_add_resource_policies_request_resource=compute.DisksAddResourcePoliciesRequest(resource_policies=['resource_policies_value']), + ) + + +def test_add_resource_policies_unary_rest_error(): + client = DisksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.AggregatedListDisksRequest, + dict, +]) +def test_aggregated_list_rest(request_type): + client = DisksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.DiskAggregatedList( + id='id_value', + kind='kind_value', + next_page_token='next_page_token_value', + self_link='self_link_value', + unreachables=['unreachables_value'], + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.DiskAggregatedList.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.aggregated_list(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.AggregatedListPager) + assert response.id == 'id_value' + assert response.kind == 'kind_value' + assert response.next_page_token == 'next_page_token_value' + assert response.self_link == 'self_link_value' + assert response.unreachables == ['unreachables_value'] + + +def test_aggregated_list_rest_required_fields(request_type=compute.AggregatedListDisksRequest): + transport_class = transports.DisksRestTransport + + request_init = {} + request_init["project"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).aggregated_list._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).aggregated_list._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("filter", "include_all_scopes", "max_results", "order_by", "page_token", "return_partial_success", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + + client = DisksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.DiskAggregatedList() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "get", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.DiskAggregatedList.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.aggregated_list(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_aggregated_list_rest_unset_required_fields(): + transport = transports.DisksRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.aggregated_list._get_unset_required_fields({}) + assert set(unset_fields) == (set(("filter", "includeAllScopes", "maxResults", "orderBy", "pageToken", "returnPartialSuccess", )) & set(("project", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_aggregated_list_rest_interceptors(null_interceptor): + transport = transports.DisksRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.DisksRestInterceptor(), + ) + client = DisksClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.DisksRestInterceptor, "post_aggregated_list") as post, \ + mock.patch.object(transports.DisksRestInterceptor, "pre_aggregated_list") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.AggregatedListDisksRequest.pb(compute.AggregatedListDisksRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.DiskAggregatedList.to_json(compute.DiskAggregatedList()) + + request = compute.AggregatedListDisksRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.DiskAggregatedList() + + client.aggregated_list(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_aggregated_list_rest_bad_request(transport: str = 'rest', request_type=compute.AggregatedListDisksRequest): + client = DisksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.aggregated_list(request) + + +def test_aggregated_list_rest_flattened(): + client = DisksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.DiskAggregatedList() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.DiskAggregatedList.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.aggregated_list(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/aggregated/disks" % client.transport._host, args[1]) + + +def test_aggregated_list_rest_flattened_error(transport: str = 'rest'): + client = DisksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.aggregated_list( + compute.AggregatedListDisksRequest(), + project='project_value', + ) + + +def test_aggregated_list_rest_pager(transport: str = 'rest'): + client = DisksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + #with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + compute.DiskAggregatedList( + items={ + 'a':compute.DisksScopedList(), + 'b':compute.DisksScopedList(), + 'c':compute.DisksScopedList(), + }, + next_page_token='abc', + ), + compute.DiskAggregatedList( + items={}, + next_page_token='def', + ), + compute.DiskAggregatedList( + items={ + 'g':compute.DisksScopedList(), + }, + next_page_token='ghi', + ), + compute.DiskAggregatedList( + items={ + 'h':compute.DisksScopedList(), + 'i':compute.DisksScopedList(), + }, + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple(compute.DiskAggregatedList.to_json(x) for x in response) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode('UTF-8') + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {'project': 'sample1'} + + pager = client.aggregated_list(request=sample_request) + + assert isinstance(pager.get('a'), compute.DisksScopedList) + assert pager.get('h') is None + + results = list(pager) + assert len(results) == 6 + assert all( + isinstance(i, tuple) + for i in results) + for result in results: + assert isinstance(result, tuple) + assert tuple(type(t) for t in result) == (str, compute.DisksScopedList) + + assert pager.get('a') is None + assert isinstance(pager.get('h'), compute.DisksScopedList) + + pages = list(client.aggregated_list(request=sample_request).pages) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize("request_type", [ + compute.BulkInsertDiskRequest, + dict, +]) +def test_bulk_insert_rest(request_type): + client = DisksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'zone': 'sample2'} + request_init["bulk_insert_disk_resource_resource"] = {'source_consistency_group_policy': 'source_consistency_group_policy_value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.bulk_insert(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, extended_operation.ExtendedOperation) + assert response.client_operation_id == 'client_operation_id_value' + assert response.creation_timestamp == 'creation_timestamp_value' + assert response.description == 'description_value' + assert response.end_time == 'end_time_value' + assert response.http_error_message == 'http_error_message_value' + assert response.http_error_status_code == 2374 + assert response.id == 205 + assert response.insert_time == 'insert_time_value' + assert response.kind == 'kind_value' + assert response.name == 'name_value' + assert response.operation_group_id == 'operation_group_id_value' + assert response.operation_type == 'operation_type_value' + assert response.progress == 885 + assert response.region == 'region_value' + assert response.self_link == 'self_link_value' + assert response.start_time == 'start_time_value' + assert response.status == compute.Operation.Status.DONE + assert response.status_message == 'status_message_value' + assert response.target_id == 947 + assert response.target_link == 'target_link_value' + assert response.user == 'user_value' + assert response.zone == 'zone_value' + + +def test_bulk_insert_rest_required_fields(request_type=compute.BulkInsertDiskRequest): + transport_class = transports.DisksRestTransport + + request_init = {} + request_init["project"] = "" + request_init["zone"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).bulk_insert._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + jsonified_request["zone"] = 'zone_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).bulk_insert._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "zone" in jsonified_request + assert jsonified_request["zone"] == 'zone_value' + + client = DisksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.bulk_insert(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_bulk_insert_rest_unset_required_fields(): + transport = transports.DisksRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.bulk_insert._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("bulkInsertDiskResourceResource", "project", "zone", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_bulk_insert_rest_interceptors(null_interceptor): + transport = transports.DisksRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.DisksRestInterceptor(), + ) + client = DisksClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.DisksRestInterceptor, "post_bulk_insert") as post, \ + mock.patch.object(transports.DisksRestInterceptor, "pre_bulk_insert") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.BulkInsertDiskRequest.pb(compute.BulkInsertDiskRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.BulkInsertDiskRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.bulk_insert(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_bulk_insert_rest_bad_request(transport: str = 'rest', request_type=compute.BulkInsertDiskRequest): + client = DisksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'zone': 'sample2'} + request_init["bulk_insert_disk_resource_resource"] = {'source_consistency_group_policy': 'source_consistency_group_policy_value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.bulk_insert(request) + + +def test_bulk_insert_rest_flattened(): + client = DisksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'zone': 'sample2'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + zone='zone_value', + bulk_insert_disk_resource_resource=compute.BulkInsertDiskResource(source_consistency_group_policy='source_consistency_group_policy_value'), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.bulk_insert(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/zones/{zone}/disks/bulkInsert" % client.transport._host, args[1]) + + +def test_bulk_insert_rest_flattened_error(transport: str = 'rest'): + client = DisksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.bulk_insert( + compute.BulkInsertDiskRequest(), + project='project_value', + zone='zone_value', + bulk_insert_disk_resource_resource=compute.BulkInsertDiskResource(source_consistency_group_policy='source_consistency_group_policy_value'), + ) + + +def test_bulk_insert_rest_error(): + client = DisksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.BulkInsertDiskRequest, + dict, +]) +def test_bulk_insert_unary_rest(request_type): + client = DisksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'zone': 'sample2'} + request_init["bulk_insert_disk_resource_resource"] = {'source_consistency_group_policy': 'source_consistency_group_policy_value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.bulk_insert_unary(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.Operation) + + +def test_bulk_insert_unary_rest_required_fields(request_type=compute.BulkInsertDiskRequest): + transport_class = transports.DisksRestTransport + + request_init = {} + request_init["project"] = "" + request_init["zone"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).bulk_insert._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + jsonified_request["zone"] = 'zone_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).bulk_insert._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "zone" in jsonified_request + assert jsonified_request["zone"] == 'zone_value' + + client = DisksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.bulk_insert_unary(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_bulk_insert_unary_rest_unset_required_fields(): + transport = transports.DisksRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.bulk_insert._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("bulkInsertDiskResourceResource", "project", "zone", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_bulk_insert_unary_rest_interceptors(null_interceptor): + transport = transports.DisksRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.DisksRestInterceptor(), + ) + client = DisksClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.DisksRestInterceptor, "post_bulk_insert") as post, \ + mock.patch.object(transports.DisksRestInterceptor, "pre_bulk_insert") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.BulkInsertDiskRequest.pb(compute.BulkInsertDiskRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.BulkInsertDiskRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.bulk_insert_unary(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_bulk_insert_unary_rest_bad_request(transport: str = 'rest', request_type=compute.BulkInsertDiskRequest): + client = DisksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'zone': 'sample2'} + request_init["bulk_insert_disk_resource_resource"] = {'source_consistency_group_policy': 'source_consistency_group_policy_value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.bulk_insert_unary(request) + + +def test_bulk_insert_unary_rest_flattened(): + client = DisksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'zone': 'sample2'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + zone='zone_value', + bulk_insert_disk_resource_resource=compute.BulkInsertDiskResource(source_consistency_group_policy='source_consistency_group_policy_value'), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.bulk_insert_unary(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/zones/{zone}/disks/bulkInsert" % client.transport._host, args[1]) + + +def test_bulk_insert_unary_rest_flattened_error(transport: str = 'rest'): + client = DisksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.bulk_insert_unary( + compute.BulkInsertDiskRequest(), + project='project_value', + zone='zone_value', + bulk_insert_disk_resource_resource=compute.BulkInsertDiskResource(source_consistency_group_policy='source_consistency_group_policy_value'), + ) + + +def test_bulk_insert_unary_rest_error(): + client = DisksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.CreateSnapshotDiskRequest, + dict, +]) +def test_create_snapshot_rest(request_type): + client = DisksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'zone': 'sample2', 'disk': 'sample3'} + request_init["snapshot_resource"] = {'architecture': 'architecture_value', 'auto_created': True, 'chain_name': 'chain_name_value', 'creation_size_bytes': 2037, 'creation_timestamp': 'creation_timestamp_value', 'description': 'description_value', 'disk_size_gb': 1261, 'download_bytes': 1502, 'id': 205, 'kind': 'kind_value', 'label_fingerprint': 'label_fingerprint_value', 'labels': {}, 'license_codes': [1361, 1362], 'licenses': ['licenses_value1', 'licenses_value2'], 'location_hint': 'location_hint_value', 'name': 'name_value', 'satisfies_pzs': True, 'self_link': 'self_link_value', 'snapshot_encryption_key': {'kms_key_name': 'kms_key_name_value', 'kms_key_service_account': 'kms_key_service_account_value', 'raw_key': 'raw_key_value', 'rsa_encrypted_key': 'rsa_encrypted_key_value', 'sha256': 'sha256_value'}, 'snapshot_type': 'snapshot_type_value', 'source_disk': 'source_disk_value', 'source_disk_encryption_key': {}, 'source_disk_id': 'source_disk_id_value', 'source_snapshot_schedule_policy': 'source_snapshot_schedule_policy_value', 'source_snapshot_schedule_policy_id': 'source_snapshot_schedule_policy_id_value', 'status': 'status_value', 'storage_bytes': 1403, 'storage_bytes_status': 'storage_bytes_status_value', 'storage_locations': ['storage_locations_value1', 'storage_locations_value2']} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.create_snapshot(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, extended_operation.ExtendedOperation) + assert response.client_operation_id == 'client_operation_id_value' + assert response.creation_timestamp == 'creation_timestamp_value' + assert response.description == 'description_value' + assert response.end_time == 'end_time_value' + assert response.http_error_message == 'http_error_message_value' + assert response.http_error_status_code == 2374 + assert response.id == 205 + assert response.insert_time == 'insert_time_value' + assert response.kind == 'kind_value' + assert response.name == 'name_value' + assert response.operation_group_id == 'operation_group_id_value' + assert response.operation_type == 'operation_type_value' + assert response.progress == 885 + assert response.region == 'region_value' + assert response.self_link == 'self_link_value' + assert response.start_time == 'start_time_value' + assert response.status == compute.Operation.Status.DONE + assert response.status_message == 'status_message_value' + assert response.target_id == 947 + assert response.target_link == 'target_link_value' + assert response.user == 'user_value' + assert response.zone == 'zone_value' + + +def test_create_snapshot_rest_required_fields(request_type=compute.CreateSnapshotDiskRequest): + transport_class = transports.DisksRestTransport + + request_init = {} + request_init["disk"] = "" + request_init["project"] = "" + request_init["zone"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_snapshot._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["disk"] = 'disk_value' + jsonified_request["project"] = 'project_value' + jsonified_request["zone"] = 'zone_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_snapshot._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("guest_flush", "request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "disk" in jsonified_request + assert jsonified_request["disk"] == 'disk_value' + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "zone" in jsonified_request + assert jsonified_request["zone"] == 'zone_value' + + client = DisksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.create_snapshot(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_create_snapshot_rest_unset_required_fields(): + transport = transports.DisksRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.create_snapshot._get_unset_required_fields({}) + assert set(unset_fields) == (set(("guestFlush", "requestId", )) & set(("disk", "project", "snapshotResource", "zone", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_create_snapshot_rest_interceptors(null_interceptor): + transport = transports.DisksRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.DisksRestInterceptor(), + ) + client = DisksClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.DisksRestInterceptor, "post_create_snapshot") as post, \ + mock.patch.object(transports.DisksRestInterceptor, "pre_create_snapshot") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.CreateSnapshotDiskRequest.pb(compute.CreateSnapshotDiskRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.CreateSnapshotDiskRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.create_snapshot(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_create_snapshot_rest_bad_request(transport: str = 'rest', request_type=compute.CreateSnapshotDiskRequest): + client = DisksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'zone': 'sample2', 'disk': 'sample3'} + request_init["snapshot_resource"] = {'architecture': 'architecture_value', 'auto_created': True, 'chain_name': 'chain_name_value', 'creation_size_bytes': 2037, 'creation_timestamp': 'creation_timestamp_value', 'description': 'description_value', 'disk_size_gb': 1261, 'download_bytes': 1502, 'id': 205, 'kind': 'kind_value', 'label_fingerprint': 'label_fingerprint_value', 'labels': {}, 'license_codes': [1361, 1362], 'licenses': ['licenses_value1', 'licenses_value2'], 'location_hint': 'location_hint_value', 'name': 'name_value', 'satisfies_pzs': True, 'self_link': 'self_link_value', 'snapshot_encryption_key': {'kms_key_name': 'kms_key_name_value', 'kms_key_service_account': 'kms_key_service_account_value', 'raw_key': 'raw_key_value', 'rsa_encrypted_key': 'rsa_encrypted_key_value', 'sha256': 'sha256_value'}, 'snapshot_type': 'snapshot_type_value', 'source_disk': 'source_disk_value', 'source_disk_encryption_key': {}, 'source_disk_id': 'source_disk_id_value', 'source_snapshot_schedule_policy': 'source_snapshot_schedule_policy_value', 'source_snapshot_schedule_policy_id': 'source_snapshot_schedule_policy_id_value', 'status': 'status_value', 'storage_bytes': 1403, 'storage_bytes_status': 'storage_bytes_status_value', 'storage_locations': ['storage_locations_value1', 'storage_locations_value2']} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.create_snapshot(request) + + +def test_create_snapshot_rest_flattened(): + client = DisksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'zone': 'sample2', 'disk': 'sample3'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + zone='zone_value', + disk='disk_value', + snapshot_resource=compute.Snapshot(architecture='architecture_value'), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.create_snapshot(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/zones/{zone}/disks/{disk}/createSnapshot" % client.transport._host, args[1]) + + +def test_create_snapshot_rest_flattened_error(transport: str = 'rest'): + client = DisksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_snapshot( + compute.CreateSnapshotDiskRequest(), + project='project_value', + zone='zone_value', + disk='disk_value', + snapshot_resource=compute.Snapshot(architecture='architecture_value'), + ) + + +def test_create_snapshot_rest_error(): + client = DisksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.CreateSnapshotDiskRequest, + dict, +]) +def test_create_snapshot_unary_rest(request_type): + client = DisksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'zone': 'sample2', 'disk': 'sample3'} + request_init["snapshot_resource"] = {'architecture': 'architecture_value', 'auto_created': True, 'chain_name': 'chain_name_value', 'creation_size_bytes': 2037, 'creation_timestamp': 'creation_timestamp_value', 'description': 'description_value', 'disk_size_gb': 1261, 'download_bytes': 1502, 'id': 205, 'kind': 'kind_value', 'label_fingerprint': 'label_fingerprint_value', 'labels': {}, 'license_codes': [1361, 1362], 'licenses': ['licenses_value1', 'licenses_value2'], 'location_hint': 'location_hint_value', 'name': 'name_value', 'satisfies_pzs': True, 'self_link': 'self_link_value', 'snapshot_encryption_key': {'kms_key_name': 'kms_key_name_value', 'kms_key_service_account': 'kms_key_service_account_value', 'raw_key': 'raw_key_value', 'rsa_encrypted_key': 'rsa_encrypted_key_value', 'sha256': 'sha256_value'}, 'snapshot_type': 'snapshot_type_value', 'source_disk': 'source_disk_value', 'source_disk_encryption_key': {}, 'source_disk_id': 'source_disk_id_value', 'source_snapshot_schedule_policy': 'source_snapshot_schedule_policy_value', 'source_snapshot_schedule_policy_id': 'source_snapshot_schedule_policy_id_value', 'status': 'status_value', 'storage_bytes': 1403, 'storage_bytes_status': 'storage_bytes_status_value', 'storage_locations': ['storage_locations_value1', 'storage_locations_value2']} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.create_snapshot_unary(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.Operation) + + +def test_create_snapshot_unary_rest_required_fields(request_type=compute.CreateSnapshotDiskRequest): + transport_class = transports.DisksRestTransport + + request_init = {} + request_init["disk"] = "" + request_init["project"] = "" + request_init["zone"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_snapshot._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["disk"] = 'disk_value' + jsonified_request["project"] = 'project_value' + jsonified_request["zone"] = 'zone_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_snapshot._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("guest_flush", "request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "disk" in jsonified_request + assert jsonified_request["disk"] == 'disk_value' + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "zone" in jsonified_request + assert jsonified_request["zone"] == 'zone_value' + + client = DisksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.create_snapshot_unary(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_create_snapshot_unary_rest_unset_required_fields(): + transport = transports.DisksRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.create_snapshot._get_unset_required_fields({}) + assert set(unset_fields) == (set(("guestFlush", "requestId", )) & set(("disk", "project", "snapshotResource", "zone", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_create_snapshot_unary_rest_interceptors(null_interceptor): + transport = transports.DisksRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.DisksRestInterceptor(), + ) + client = DisksClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.DisksRestInterceptor, "post_create_snapshot") as post, \ + mock.patch.object(transports.DisksRestInterceptor, "pre_create_snapshot") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.CreateSnapshotDiskRequest.pb(compute.CreateSnapshotDiskRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.CreateSnapshotDiskRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.create_snapshot_unary(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_create_snapshot_unary_rest_bad_request(transport: str = 'rest', request_type=compute.CreateSnapshotDiskRequest): + client = DisksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'zone': 'sample2', 'disk': 'sample3'} + request_init["snapshot_resource"] = {'architecture': 'architecture_value', 'auto_created': True, 'chain_name': 'chain_name_value', 'creation_size_bytes': 2037, 'creation_timestamp': 'creation_timestamp_value', 'description': 'description_value', 'disk_size_gb': 1261, 'download_bytes': 1502, 'id': 205, 'kind': 'kind_value', 'label_fingerprint': 'label_fingerprint_value', 'labels': {}, 'license_codes': [1361, 1362], 'licenses': ['licenses_value1', 'licenses_value2'], 'location_hint': 'location_hint_value', 'name': 'name_value', 'satisfies_pzs': True, 'self_link': 'self_link_value', 'snapshot_encryption_key': {'kms_key_name': 'kms_key_name_value', 'kms_key_service_account': 'kms_key_service_account_value', 'raw_key': 'raw_key_value', 'rsa_encrypted_key': 'rsa_encrypted_key_value', 'sha256': 'sha256_value'}, 'snapshot_type': 'snapshot_type_value', 'source_disk': 'source_disk_value', 'source_disk_encryption_key': {}, 'source_disk_id': 'source_disk_id_value', 'source_snapshot_schedule_policy': 'source_snapshot_schedule_policy_value', 'source_snapshot_schedule_policy_id': 'source_snapshot_schedule_policy_id_value', 'status': 'status_value', 'storage_bytes': 1403, 'storage_bytes_status': 'storage_bytes_status_value', 'storage_locations': ['storage_locations_value1', 'storage_locations_value2']} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.create_snapshot_unary(request) + + +def test_create_snapshot_unary_rest_flattened(): + client = DisksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'zone': 'sample2', 'disk': 'sample3'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + zone='zone_value', + disk='disk_value', + snapshot_resource=compute.Snapshot(architecture='architecture_value'), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.create_snapshot_unary(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/zones/{zone}/disks/{disk}/createSnapshot" % client.transport._host, args[1]) + + +def test_create_snapshot_unary_rest_flattened_error(transport: str = 'rest'): + client = DisksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_snapshot_unary( + compute.CreateSnapshotDiskRequest(), + project='project_value', + zone='zone_value', + disk='disk_value', + snapshot_resource=compute.Snapshot(architecture='architecture_value'), + ) + + +def test_create_snapshot_unary_rest_error(): + client = DisksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.DeleteDiskRequest, + dict, +]) +def test_delete_rest(request_type): + client = DisksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'zone': 'sample2', 'disk': 'sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.delete(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, extended_operation.ExtendedOperation) + assert response.client_operation_id == 'client_operation_id_value' + assert response.creation_timestamp == 'creation_timestamp_value' + assert response.description == 'description_value' + assert response.end_time == 'end_time_value' + assert response.http_error_message == 'http_error_message_value' + assert response.http_error_status_code == 2374 + assert response.id == 205 + assert response.insert_time == 'insert_time_value' + assert response.kind == 'kind_value' + assert response.name == 'name_value' + assert response.operation_group_id == 'operation_group_id_value' + assert response.operation_type == 'operation_type_value' + assert response.progress == 885 + assert response.region == 'region_value' + assert response.self_link == 'self_link_value' + assert response.start_time == 'start_time_value' + assert response.status == compute.Operation.Status.DONE + assert response.status_message == 'status_message_value' + assert response.target_id == 947 + assert response.target_link == 'target_link_value' + assert response.user == 'user_value' + assert response.zone == 'zone_value' + + +def test_delete_rest_required_fields(request_type=compute.DeleteDiskRequest): + transport_class = transports.DisksRestTransport + + request_init = {} + request_init["disk"] = "" + request_init["project"] = "" + request_init["zone"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["disk"] = 'disk_value' + jsonified_request["project"] = 'project_value' + jsonified_request["zone"] = 'zone_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "disk" in jsonified_request + assert jsonified_request["disk"] == 'disk_value' + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "zone" in jsonified_request + assert jsonified_request["zone"] == 'zone_value' + + client = DisksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "delete", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.delete(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_delete_rest_unset_required_fields(): + transport = transports.DisksRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.delete._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("disk", "project", "zone", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_rest_interceptors(null_interceptor): + transport = transports.DisksRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.DisksRestInterceptor(), + ) + client = DisksClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.DisksRestInterceptor, "post_delete") as post, \ + mock.patch.object(transports.DisksRestInterceptor, "pre_delete") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.DeleteDiskRequest.pb(compute.DeleteDiskRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.DeleteDiskRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.delete(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_delete_rest_bad_request(transport: str = 'rest', request_type=compute.DeleteDiskRequest): + client = DisksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'zone': 'sample2', 'disk': 'sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.delete(request) + + +def test_delete_rest_flattened(): + client = DisksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'zone': 'sample2', 'disk': 'sample3'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + zone='zone_value', + disk='disk_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.delete(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/zones/{zone}/disks/{disk}" % client.transport._host, args[1]) + + +def test_delete_rest_flattened_error(transport: str = 'rest'): + client = DisksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete( + compute.DeleteDiskRequest(), + project='project_value', + zone='zone_value', + disk='disk_value', + ) + + +def test_delete_rest_error(): + client = DisksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.DeleteDiskRequest, + dict, +]) +def test_delete_unary_rest(request_type): + client = DisksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'zone': 'sample2', 'disk': 'sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.delete_unary(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.Operation) + + +def test_delete_unary_rest_required_fields(request_type=compute.DeleteDiskRequest): + transport_class = transports.DisksRestTransport + + request_init = {} + request_init["disk"] = "" + request_init["project"] = "" + request_init["zone"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["disk"] = 'disk_value' + jsonified_request["project"] = 'project_value' + jsonified_request["zone"] = 'zone_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "disk" in jsonified_request + assert jsonified_request["disk"] == 'disk_value' + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "zone" in jsonified_request + assert jsonified_request["zone"] == 'zone_value' + + client = DisksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "delete", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.delete_unary(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_delete_unary_rest_unset_required_fields(): + transport = transports.DisksRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.delete._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("disk", "project", "zone", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_unary_rest_interceptors(null_interceptor): + transport = transports.DisksRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.DisksRestInterceptor(), + ) + client = DisksClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.DisksRestInterceptor, "post_delete") as post, \ + mock.patch.object(transports.DisksRestInterceptor, "pre_delete") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.DeleteDiskRequest.pb(compute.DeleteDiskRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.DeleteDiskRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.delete_unary(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_delete_unary_rest_bad_request(transport: str = 'rest', request_type=compute.DeleteDiskRequest): + client = DisksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'zone': 'sample2', 'disk': 'sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.delete_unary(request) + + +def test_delete_unary_rest_flattened(): + client = DisksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'zone': 'sample2', 'disk': 'sample3'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + zone='zone_value', + disk='disk_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.delete_unary(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/zones/{zone}/disks/{disk}" % client.transport._host, args[1]) + + +def test_delete_unary_rest_flattened_error(transport: str = 'rest'): + client = DisksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_unary( + compute.DeleteDiskRequest(), + project='project_value', + zone='zone_value', + disk='disk_value', + ) + + +def test_delete_unary_rest_error(): + client = DisksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.GetDiskRequest, + dict, +]) +def test_get_rest(request_type): + client = DisksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'zone': 'sample2', 'disk': 'sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Disk( + architecture='architecture_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + id=205, + kind='kind_value', + label_fingerprint='label_fingerprint_value', + last_attach_timestamp='last_attach_timestamp_value', + last_detach_timestamp='last_detach_timestamp_value', + license_codes=[1360], + licenses=['licenses_value'], + location_hint='location_hint_value', + name='name_value', + options='options_value', + physical_block_size_bytes=2663, + provisioned_iops=1740, + provisioned_throughput=2411, + region='region_value', + replica_zones=['replica_zones_value'], + resource_policies=['resource_policies_value'], + satisfies_pzs=True, + self_link='self_link_value', + size_gb=739, + source_consistency_group_policy='source_consistency_group_policy_value', + source_consistency_group_policy_id='source_consistency_group_policy_id_value', + source_disk='source_disk_value', + source_disk_id='source_disk_id_value', + source_image='source_image_value', + source_image_id='source_image_id_value', + source_snapshot='source_snapshot_value', + source_snapshot_id='source_snapshot_id_value', + source_storage_object='source_storage_object_value', + status='status_value', + type_='type__value', + users=['users_value'], + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Disk.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.get(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.Disk) + assert response.architecture == 'architecture_value' + assert response.creation_timestamp == 'creation_timestamp_value' + assert response.description == 'description_value' + assert response.id == 205 + assert response.kind == 'kind_value' + assert response.label_fingerprint == 'label_fingerprint_value' + assert response.last_attach_timestamp == 'last_attach_timestamp_value' + assert response.last_detach_timestamp == 'last_detach_timestamp_value' + assert response.license_codes == [1360] + assert response.licenses == ['licenses_value'] + assert response.location_hint == 'location_hint_value' + assert response.name == 'name_value' + assert response.options == 'options_value' + assert response.physical_block_size_bytes == 2663 + assert response.provisioned_iops == 1740 + assert response.provisioned_throughput == 2411 + assert response.region == 'region_value' + assert response.replica_zones == ['replica_zones_value'] + assert response.resource_policies == ['resource_policies_value'] + assert response.satisfies_pzs is True + assert response.self_link == 'self_link_value' + assert response.size_gb == 739 + assert response.source_consistency_group_policy == 'source_consistency_group_policy_value' + assert response.source_consistency_group_policy_id == 'source_consistency_group_policy_id_value' + assert response.source_disk == 'source_disk_value' + assert response.source_disk_id == 'source_disk_id_value' + assert response.source_image == 'source_image_value' + assert response.source_image_id == 'source_image_id_value' + assert response.source_snapshot == 'source_snapshot_value' + assert response.source_snapshot_id == 'source_snapshot_id_value' + assert response.source_storage_object == 'source_storage_object_value' + assert response.status == 'status_value' + assert response.type_ == 'type__value' + assert response.users == ['users_value'] + assert response.zone == 'zone_value' + + +def test_get_rest_required_fields(request_type=compute.GetDiskRequest): + transport_class = transports.DisksRestTransport + + request_init = {} + request_init["disk"] = "" + request_init["project"] = "" + request_init["zone"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["disk"] = 'disk_value' + jsonified_request["project"] = 'project_value' + jsonified_request["zone"] = 'zone_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "disk" in jsonified_request + assert jsonified_request["disk"] == 'disk_value' + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "zone" in jsonified_request + assert jsonified_request["zone"] == 'zone_value' + + client = DisksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Disk() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "get", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Disk.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.get(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_get_rest_unset_required_fields(): + transport = transports.DisksRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.get._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("disk", "project", "zone", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_rest_interceptors(null_interceptor): + transport = transports.DisksRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.DisksRestInterceptor(), + ) + client = DisksClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.DisksRestInterceptor, "post_get") as post, \ + mock.patch.object(transports.DisksRestInterceptor, "pre_get") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.GetDiskRequest.pb(compute.GetDiskRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Disk.to_json(compute.Disk()) + + request = compute.GetDiskRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Disk() + + client.get(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_get_rest_bad_request(transport: str = 'rest', request_type=compute.GetDiskRequest): + client = DisksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'zone': 'sample2', 'disk': 'sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get(request) + + +def test_get_rest_flattened(): + client = DisksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Disk() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'zone': 'sample2', 'disk': 'sample3'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + zone='zone_value', + disk='disk_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Disk.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.get(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/zones/{zone}/disks/{disk}" % client.transport._host, args[1]) + + +def test_get_rest_flattened_error(transport: str = 'rest'): + client = DisksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get( + compute.GetDiskRequest(), + project='project_value', + zone='zone_value', + disk='disk_value', + ) + + +def test_get_rest_error(): + client = DisksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.GetIamPolicyDiskRequest, + dict, +]) +def test_get_iam_policy_rest(request_type): + client = DisksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'zone': 'sample2', 'resource': 'sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Policy( + etag='etag_value', + iam_owned=True, + version=774, + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Policy.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.get_iam_policy(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.Policy) + assert response.etag == 'etag_value' + assert response.iam_owned is True + assert response.version == 774 + + +def test_get_iam_policy_rest_required_fields(request_type=compute.GetIamPolicyDiskRequest): + transport_class = transports.DisksRestTransport + + request_init = {} + request_init["project"] = "" + request_init["resource"] = "" + request_init["zone"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_iam_policy._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + jsonified_request["resource"] = 'resource_value' + jsonified_request["zone"] = 'zone_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_iam_policy._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("options_requested_policy_version", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "resource" in jsonified_request + assert jsonified_request["resource"] == 'resource_value' + assert "zone" in jsonified_request + assert jsonified_request["zone"] == 'zone_value' + + client = DisksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Policy() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "get", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Policy.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.get_iam_policy(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_get_iam_policy_rest_unset_required_fields(): + transport = transports.DisksRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.get_iam_policy._get_unset_required_fields({}) + assert set(unset_fields) == (set(("optionsRequestedPolicyVersion", )) & set(("project", "resource", "zone", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_iam_policy_rest_interceptors(null_interceptor): + transport = transports.DisksRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.DisksRestInterceptor(), + ) + client = DisksClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.DisksRestInterceptor, "post_get_iam_policy") as post, \ + mock.patch.object(transports.DisksRestInterceptor, "pre_get_iam_policy") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.GetIamPolicyDiskRequest.pb(compute.GetIamPolicyDiskRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Policy.to_json(compute.Policy()) + + request = compute.GetIamPolicyDiskRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Policy() + + client.get_iam_policy(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_get_iam_policy_rest_bad_request(transport: str = 'rest', request_type=compute.GetIamPolicyDiskRequest): + client = DisksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'zone': 'sample2', 'resource': 'sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_iam_policy(request) + + +def test_get_iam_policy_rest_flattened(): + client = DisksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Policy() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'zone': 'sample2', 'resource': 'sample3'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + zone='zone_value', + resource='resource_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Policy.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.get_iam_policy(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/zones/{zone}/disks/{resource}/getIamPolicy" % client.transport._host, args[1]) + + +def test_get_iam_policy_rest_flattened_error(transport: str = 'rest'): + client = DisksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_iam_policy( + compute.GetIamPolicyDiskRequest(), + project='project_value', + zone='zone_value', + resource='resource_value', + ) + + +def test_get_iam_policy_rest_error(): + client = DisksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.InsertDiskRequest, + dict, +]) +def test_insert_rest(request_type): + client = DisksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'zone': 'sample2'} + request_init["disk_resource"] = {'architecture': 'architecture_value', 'async_primary_disk': {'consistency_group_policy': 'consistency_group_policy_value', 'consistency_group_policy_id': 'consistency_group_policy_id_value', 'disk': 'disk_value', 'disk_id': 'disk_id_value'}, 'async_secondary_disks': {}, 'creation_timestamp': 'creation_timestamp_value', 'description': 'description_value', 'disk_encryption_key': {'kms_key_name': 'kms_key_name_value', 'kms_key_service_account': 'kms_key_service_account_value', 'raw_key': 'raw_key_value', 'rsa_encrypted_key': 'rsa_encrypted_key_value', 'sha256': 'sha256_value'}, 'guest_os_features': [{'type_': 'type__value'}], 'id': 205, 'kind': 'kind_value', 'label_fingerprint': 'label_fingerprint_value', 'labels': {}, 'last_attach_timestamp': 'last_attach_timestamp_value', 'last_detach_timestamp': 'last_detach_timestamp_value', 'license_codes': [1361, 1362], 'licenses': ['licenses_value1', 'licenses_value2'], 'location_hint': 'location_hint_value', 'name': 'name_value', 'options': 'options_value', 'params': {'resource_manager_tags': {}}, 'physical_block_size_bytes': 2663, 'provisioned_iops': 1740, 'provisioned_throughput': 2411, 'region': 'region_value', 'replica_zones': ['replica_zones_value1', 'replica_zones_value2'], 'resource_policies': ['resource_policies_value1', 'resource_policies_value2'], 'resource_status': {'async_primary_disk': {'state': 'state_value'}, 'async_secondary_disks': {}}, 'satisfies_pzs': True, 'self_link': 'self_link_value', 'size_gb': 739, 'source_consistency_group_policy': 'source_consistency_group_policy_value', 'source_consistency_group_policy_id': 'source_consistency_group_policy_id_value', 'source_disk': 'source_disk_value', 'source_disk_id': 'source_disk_id_value', 'source_image': 'source_image_value', 'source_image_encryption_key': {}, 'source_image_id': 'source_image_id_value', 'source_snapshot': 'source_snapshot_value', 'source_snapshot_encryption_key': {}, 'source_snapshot_id': 'source_snapshot_id_value', 'source_storage_object': 'source_storage_object_value', 'status': 'status_value', 'type_': 'type__value', 'users': ['users_value1', 'users_value2'], 'zone': 'zone_value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.insert(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, extended_operation.ExtendedOperation) + assert response.client_operation_id == 'client_operation_id_value' + assert response.creation_timestamp == 'creation_timestamp_value' + assert response.description == 'description_value' + assert response.end_time == 'end_time_value' + assert response.http_error_message == 'http_error_message_value' + assert response.http_error_status_code == 2374 + assert response.id == 205 + assert response.insert_time == 'insert_time_value' + assert response.kind == 'kind_value' + assert response.name == 'name_value' + assert response.operation_group_id == 'operation_group_id_value' + assert response.operation_type == 'operation_type_value' + assert response.progress == 885 + assert response.region == 'region_value' + assert response.self_link == 'self_link_value' + assert response.start_time == 'start_time_value' + assert response.status == compute.Operation.Status.DONE + assert response.status_message == 'status_message_value' + assert response.target_id == 947 + assert response.target_link == 'target_link_value' + assert response.user == 'user_value' + assert response.zone == 'zone_value' + + +def test_insert_rest_required_fields(request_type=compute.InsertDiskRequest): + transport_class = transports.DisksRestTransport + + request_init = {} + request_init["project"] = "" + request_init["zone"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).insert._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + jsonified_request["zone"] = 'zone_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).insert._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", "source_image", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "zone" in jsonified_request + assert jsonified_request["zone"] == 'zone_value' + + client = DisksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.insert(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_insert_rest_unset_required_fields(): + transport = transports.DisksRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.insert._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", "sourceImage", )) & set(("diskResource", "project", "zone", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_insert_rest_interceptors(null_interceptor): + transport = transports.DisksRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.DisksRestInterceptor(), + ) + client = DisksClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.DisksRestInterceptor, "post_insert") as post, \ + mock.patch.object(transports.DisksRestInterceptor, "pre_insert") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.InsertDiskRequest.pb(compute.InsertDiskRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.InsertDiskRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.insert(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_insert_rest_bad_request(transport: str = 'rest', request_type=compute.InsertDiskRequest): + client = DisksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'zone': 'sample2'} + request_init["disk_resource"] = {'architecture': 'architecture_value', 'async_primary_disk': {'consistency_group_policy': 'consistency_group_policy_value', 'consistency_group_policy_id': 'consistency_group_policy_id_value', 'disk': 'disk_value', 'disk_id': 'disk_id_value'}, 'async_secondary_disks': {}, 'creation_timestamp': 'creation_timestamp_value', 'description': 'description_value', 'disk_encryption_key': {'kms_key_name': 'kms_key_name_value', 'kms_key_service_account': 'kms_key_service_account_value', 'raw_key': 'raw_key_value', 'rsa_encrypted_key': 'rsa_encrypted_key_value', 'sha256': 'sha256_value'}, 'guest_os_features': [{'type_': 'type__value'}], 'id': 205, 'kind': 'kind_value', 'label_fingerprint': 'label_fingerprint_value', 'labels': {}, 'last_attach_timestamp': 'last_attach_timestamp_value', 'last_detach_timestamp': 'last_detach_timestamp_value', 'license_codes': [1361, 1362], 'licenses': ['licenses_value1', 'licenses_value2'], 'location_hint': 'location_hint_value', 'name': 'name_value', 'options': 'options_value', 'params': {'resource_manager_tags': {}}, 'physical_block_size_bytes': 2663, 'provisioned_iops': 1740, 'provisioned_throughput': 2411, 'region': 'region_value', 'replica_zones': ['replica_zones_value1', 'replica_zones_value2'], 'resource_policies': ['resource_policies_value1', 'resource_policies_value2'], 'resource_status': {'async_primary_disk': {'state': 'state_value'}, 'async_secondary_disks': {}}, 'satisfies_pzs': True, 'self_link': 'self_link_value', 'size_gb': 739, 'source_consistency_group_policy': 'source_consistency_group_policy_value', 'source_consistency_group_policy_id': 'source_consistency_group_policy_id_value', 'source_disk': 'source_disk_value', 'source_disk_id': 'source_disk_id_value', 'source_image': 'source_image_value', 'source_image_encryption_key': {}, 'source_image_id': 'source_image_id_value', 'source_snapshot': 'source_snapshot_value', 'source_snapshot_encryption_key': {}, 'source_snapshot_id': 'source_snapshot_id_value', 'source_storage_object': 'source_storage_object_value', 'status': 'status_value', 'type_': 'type__value', 'users': ['users_value1', 'users_value2'], 'zone': 'zone_value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.insert(request) + + +def test_insert_rest_flattened(): + client = DisksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'zone': 'sample2'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + zone='zone_value', + disk_resource=compute.Disk(architecture='architecture_value'), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.insert(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/zones/{zone}/disks" % client.transport._host, args[1]) + + +def test_insert_rest_flattened_error(transport: str = 'rest'): + client = DisksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.insert( + compute.InsertDiskRequest(), + project='project_value', + zone='zone_value', + disk_resource=compute.Disk(architecture='architecture_value'), + ) + + +def test_insert_rest_error(): + client = DisksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.InsertDiskRequest, + dict, +]) +def test_insert_unary_rest(request_type): + client = DisksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'zone': 'sample2'} + request_init["disk_resource"] = {'architecture': 'architecture_value', 'async_primary_disk': {'consistency_group_policy': 'consistency_group_policy_value', 'consistency_group_policy_id': 'consistency_group_policy_id_value', 'disk': 'disk_value', 'disk_id': 'disk_id_value'}, 'async_secondary_disks': {}, 'creation_timestamp': 'creation_timestamp_value', 'description': 'description_value', 'disk_encryption_key': {'kms_key_name': 'kms_key_name_value', 'kms_key_service_account': 'kms_key_service_account_value', 'raw_key': 'raw_key_value', 'rsa_encrypted_key': 'rsa_encrypted_key_value', 'sha256': 'sha256_value'}, 'guest_os_features': [{'type_': 'type__value'}], 'id': 205, 'kind': 'kind_value', 'label_fingerprint': 'label_fingerprint_value', 'labels': {}, 'last_attach_timestamp': 'last_attach_timestamp_value', 'last_detach_timestamp': 'last_detach_timestamp_value', 'license_codes': [1361, 1362], 'licenses': ['licenses_value1', 'licenses_value2'], 'location_hint': 'location_hint_value', 'name': 'name_value', 'options': 'options_value', 'params': {'resource_manager_tags': {}}, 'physical_block_size_bytes': 2663, 'provisioned_iops': 1740, 'provisioned_throughput': 2411, 'region': 'region_value', 'replica_zones': ['replica_zones_value1', 'replica_zones_value2'], 'resource_policies': ['resource_policies_value1', 'resource_policies_value2'], 'resource_status': {'async_primary_disk': {'state': 'state_value'}, 'async_secondary_disks': {}}, 'satisfies_pzs': True, 'self_link': 'self_link_value', 'size_gb': 739, 'source_consistency_group_policy': 'source_consistency_group_policy_value', 'source_consistency_group_policy_id': 'source_consistency_group_policy_id_value', 'source_disk': 'source_disk_value', 'source_disk_id': 'source_disk_id_value', 'source_image': 'source_image_value', 'source_image_encryption_key': {}, 'source_image_id': 'source_image_id_value', 'source_snapshot': 'source_snapshot_value', 'source_snapshot_encryption_key': {}, 'source_snapshot_id': 'source_snapshot_id_value', 'source_storage_object': 'source_storage_object_value', 'status': 'status_value', 'type_': 'type__value', 'users': ['users_value1', 'users_value2'], 'zone': 'zone_value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.insert_unary(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.Operation) + + +def test_insert_unary_rest_required_fields(request_type=compute.InsertDiskRequest): + transport_class = transports.DisksRestTransport + + request_init = {} + request_init["project"] = "" + request_init["zone"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).insert._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + jsonified_request["zone"] = 'zone_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).insert._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", "source_image", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "zone" in jsonified_request + assert jsonified_request["zone"] == 'zone_value' + + client = DisksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.insert_unary(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_insert_unary_rest_unset_required_fields(): + transport = transports.DisksRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.insert._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", "sourceImage", )) & set(("diskResource", "project", "zone", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_insert_unary_rest_interceptors(null_interceptor): + transport = transports.DisksRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.DisksRestInterceptor(), + ) + client = DisksClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.DisksRestInterceptor, "post_insert") as post, \ + mock.patch.object(transports.DisksRestInterceptor, "pre_insert") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.InsertDiskRequest.pb(compute.InsertDiskRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.InsertDiskRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.insert_unary(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_insert_unary_rest_bad_request(transport: str = 'rest', request_type=compute.InsertDiskRequest): + client = DisksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'zone': 'sample2'} + request_init["disk_resource"] = {'architecture': 'architecture_value', 'async_primary_disk': {'consistency_group_policy': 'consistency_group_policy_value', 'consistency_group_policy_id': 'consistency_group_policy_id_value', 'disk': 'disk_value', 'disk_id': 'disk_id_value'}, 'async_secondary_disks': {}, 'creation_timestamp': 'creation_timestamp_value', 'description': 'description_value', 'disk_encryption_key': {'kms_key_name': 'kms_key_name_value', 'kms_key_service_account': 'kms_key_service_account_value', 'raw_key': 'raw_key_value', 'rsa_encrypted_key': 'rsa_encrypted_key_value', 'sha256': 'sha256_value'}, 'guest_os_features': [{'type_': 'type__value'}], 'id': 205, 'kind': 'kind_value', 'label_fingerprint': 'label_fingerprint_value', 'labels': {}, 'last_attach_timestamp': 'last_attach_timestamp_value', 'last_detach_timestamp': 'last_detach_timestamp_value', 'license_codes': [1361, 1362], 'licenses': ['licenses_value1', 'licenses_value2'], 'location_hint': 'location_hint_value', 'name': 'name_value', 'options': 'options_value', 'params': {'resource_manager_tags': {}}, 'physical_block_size_bytes': 2663, 'provisioned_iops': 1740, 'provisioned_throughput': 2411, 'region': 'region_value', 'replica_zones': ['replica_zones_value1', 'replica_zones_value2'], 'resource_policies': ['resource_policies_value1', 'resource_policies_value2'], 'resource_status': {'async_primary_disk': {'state': 'state_value'}, 'async_secondary_disks': {}}, 'satisfies_pzs': True, 'self_link': 'self_link_value', 'size_gb': 739, 'source_consistency_group_policy': 'source_consistency_group_policy_value', 'source_consistency_group_policy_id': 'source_consistency_group_policy_id_value', 'source_disk': 'source_disk_value', 'source_disk_id': 'source_disk_id_value', 'source_image': 'source_image_value', 'source_image_encryption_key': {}, 'source_image_id': 'source_image_id_value', 'source_snapshot': 'source_snapshot_value', 'source_snapshot_encryption_key': {}, 'source_snapshot_id': 'source_snapshot_id_value', 'source_storage_object': 'source_storage_object_value', 'status': 'status_value', 'type_': 'type__value', 'users': ['users_value1', 'users_value2'], 'zone': 'zone_value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.insert_unary(request) + + +def test_insert_unary_rest_flattened(): + client = DisksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'zone': 'sample2'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + zone='zone_value', + disk_resource=compute.Disk(architecture='architecture_value'), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.insert_unary(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/zones/{zone}/disks" % client.transport._host, args[1]) + + +def test_insert_unary_rest_flattened_error(transport: str = 'rest'): + client = DisksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.insert_unary( + compute.InsertDiskRequest(), + project='project_value', + zone='zone_value', + disk_resource=compute.Disk(architecture='architecture_value'), + ) + + +def test_insert_unary_rest_error(): + client = DisksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.ListDisksRequest, + dict, +]) +def test_list_rest(request_type): + client = DisksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'zone': 'sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.DiskList( + id='id_value', + kind='kind_value', + next_page_token='next_page_token_value', + self_link='self_link_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.DiskList.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.list(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListPager) + assert response.id == 'id_value' + assert response.kind == 'kind_value' + assert response.next_page_token == 'next_page_token_value' + assert response.self_link == 'self_link_value' + + +def test_list_rest_required_fields(request_type=compute.ListDisksRequest): + transport_class = transports.DisksRestTransport + + request_init = {} + request_init["project"] = "" + request_init["zone"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + jsonified_request["zone"] = 'zone_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("filter", "max_results", "order_by", "page_token", "return_partial_success", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "zone" in jsonified_request + assert jsonified_request["zone"] == 'zone_value' + + client = DisksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.DiskList() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "get", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.DiskList.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.list(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_list_rest_unset_required_fields(): + transport = transports.DisksRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.list._get_unset_required_fields({}) + assert set(unset_fields) == (set(("filter", "maxResults", "orderBy", "pageToken", "returnPartialSuccess", )) & set(("project", "zone", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_rest_interceptors(null_interceptor): + transport = transports.DisksRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.DisksRestInterceptor(), + ) + client = DisksClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.DisksRestInterceptor, "post_list") as post, \ + mock.patch.object(transports.DisksRestInterceptor, "pre_list") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.ListDisksRequest.pb(compute.ListDisksRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.DiskList.to_json(compute.DiskList()) + + request = compute.ListDisksRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.DiskList() + + client.list(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_list_rest_bad_request(transport: str = 'rest', request_type=compute.ListDisksRequest): + client = DisksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'zone': 'sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list(request) + + +def test_list_rest_flattened(): + client = DisksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.DiskList() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'zone': 'sample2'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + zone='zone_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.DiskList.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.list(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/zones/{zone}/disks" % client.transport._host, args[1]) + + +def test_list_rest_flattened_error(transport: str = 'rest'): + client = DisksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list( + compute.ListDisksRequest(), + project='project_value', + zone='zone_value', + ) + + +def test_list_rest_pager(transport: str = 'rest'): + client = DisksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + #with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + compute.DiskList( + items=[ + compute.Disk(), + compute.Disk(), + compute.Disk(), + ], + next_page_token='abc', + ), + compute.DiskList( + items=[], + next_page_token='def', + ), + compute.DiskList( + items=[ + compute.Disk(), + ], + next_page_token='ghi', + ), + compute.DiskList( + items=[ + compute.Disk(), + compute.Disk(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple(compute.DiskList.to_json(x) for x in response) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode('UTF-8') + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {'project': 'sample1', 'zone': 'sample2'} + + pager = client.list(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, compute.Disk) + for i in results) + + pages = list(client.list(request=sample_request).pages) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize("request_type", [ + compute.RemoveResourcePoliciesDiskRequest, + dict, +]) +def test_remove_resource_policies_rest(request_type): + client = DisksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'zone': 'sample2', 'disk': 'sample3'} + request_init["disks_remove_resource_policies_request_resource"] = {'resource_policies': ['resource_policies_value1', 'resource_policies_value2']} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.remove_resource_policies(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, extended_operation.ExtendedOperation) + assert response.client_operation_id == 'client_operation_id_value' + assert response.creation_timestamp == 'creation_timestamp_value' + assert response.description == 'description_value' + assert response.end_time == 'end_time_value' + assert response.http_error_message == 'http_error_message_value' + assert response.http_error_status_code == 2374 + assert response.id == 205 + assert response.insert_time == 'insert_time_value' + assert response.kind == 'kind_value' + assert response.name == 'name_value' + assert response.operation_group_id == 'operation_group_id_value' + assert response.operation_type == 'operation_type_value' + assert response.progress == 885 + assert response.region == 'region_value' + assert response.self_link == 'self_link_value' + assert response.start_time == 'start_time_value' + assert response.status == compute.Operation.Status.DONE + assert response.status_message == 'status_message_value' + assert response.target_id == 947 + assert response.target_link == 'target_link_value' + assert response.user == 'user_value' + assert response.zone == 'zone_value' + + +def test_remove_resource_policies_rest_required_fields(request_type=compute.RemoveResourcePoliciesDiskRequest): + transport_class = transports.DisksRestTransport + + request_init = {} + request_init["disk"] = "" + request_init["project"] = "" + request_init["zone"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).remove_resource_policies._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["disk"] = 'disk_value' + jsonified_request["project"] = 'project_value' + jsonified_request["zone"] = 'zone_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).remove_resource_policies._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "disk" in jsonified_request + assert jsonified_request["disk"] == 'disk_value' + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "zone" in jsonified_request + assert jsonified_request["zone"] == 'zone_value' + + client = DisksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.remove_resource_policies(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_remove_resource_policies_rest_unset_required_fields(): + transport = transports.DisksRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.remove_resource_policies._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("disk", "disksRemoveResourcePoliciesRequestResource", "project", "zone", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_remove_resource_policies_rest_interceptors(null_interceptor): + transport = transports.DisksRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.DisksRestInterceptor(), + ) + client = DisksClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.DisksRestInterceptor, "post_remove_resource_policies") as post, \ + mock.patch.object(transports.DisksRestInterceptor, "pre_remove_resource_policies") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.RemoveResourcePoliciesDiskRequest.pb(compute.RemoveResourcePoliciesDiskRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.RemoveResourcePoliciesDiskRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.remove_resource_policies(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_remove_resource_policies_rest_bad_request(transport: str = 'rest', request_type=compute.RemoveResourcePoliciesDiskRequest): + client = DisksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'zone': 'sample2', 'disk': 'sample3'} + request_init["disks_remove_resource_policies_request_resource"] = {'resource_policies': ['resource_policies_value1', 'resource_policies_value2']} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.remove_resource_policies(request) + + +def test_remove_resource_policies_rest_flattened(): + client = DisksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'zone': 'sample2', 'disk': 'sample3'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + zone='zone_value', + disk='disk_value', + disks_remove_resource_policies_request_resource=compute.DisksRemoveResourcePoliciesRequest(resource_policies=['resource_policies_value']), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.remove_resource_policies(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/zones/{zone}/disks/{disk}/removeResourcePolicies" % client.transport._host, args[1]) + + +def test_remove_resource_policies_rest_flattened_error(transport: str = 'rest'): + client = DisksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.remove_resource_policies( + compute.RemoveResourcePoliciesDiskRequest(), + project='project_value', + zone='zone_value', + disk='disk_value', + disks_remove_resource_policies_request_resource=compute.DisksRemoveResourcePoliciesRequest(resource_policies=['resource_policies_value']), + ) + + +def test_remove_resource_policies_rest_error(): + client = DisksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.RemoveResourcePoliciesDiskRequest, + dict, +]) +def test_remove_resource_policies_unary_rest(request_type): + client = DisksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'zone': 'sample2', 'disk': 'sample3'} + request_init["disks_remove_resource_policies_request_resource"] = {'resource_policies': ['resource_policies_value1', 'resource_policies_value2']} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.remove_resource_policies_unary(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.Operation) + + +def test_remove_resource_policies_unary_rest_required_fields(request_type=compute.RemoveResourcePoliciesDiskRequest): + transport_class = transports.DisksRestTransport + + request_init = {} + request_init["disk"] = "" + request_init["project"] = "" + request_init["zone"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).remove_resource_policies._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["disk"] = 'disk_value' + jsonified_request["project"] = 'project_value' + jsonified_request["zone"] = 'zone_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).remove_resource_policies._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "disk" in jsonified_request + assert jsonified_request["disk"] == 'disk_value' + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "zone" in jsonified_request + assert jsonified_request["zone"] == 'zone_value' + + client = DisksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.remove_resource_policies_unary(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_remove_resource_policies_unary_rest_unset_required_fields(): + transport = transports.DisksRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.remove_resource_policies._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("disk", "disksRemoveResourcePoliciesRequestResource", "project", "zone", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_remove_resource_policies_unary_rest_interceptors(null_interceptor): + transport = transports.DisksRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.DisksRestInterceptor(), + ) + client = DisksClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.DisksRestInterceptor, "post_remove_resource_policies") as post, \ + mock.patch.object(transports.DisksRestInterceptor, "pre_remove_resource_policies") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.RemoveResourcePoliciesDiskRequest.pb(compute.RemoveResourcePoliciesDiskRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.RemoveResourcePoliciesDiskRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.remove_resource_policies_unary(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_remove_resource_policies_unary_rest_bad_request(transport: str = 'rest', request_type=compute.RemoveResourcePoliciesDiskRequest): + client = DisksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'zone': 'sample2', 'disk': 'sample3'} + request_init["disks_remove_resource_policies_request_resource"] = {'resource_policies': ['resource_policies_value1', 'resource_policies_value2']} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.remove_resource_policies_unary(request) + + +def test_remove_resource_policies_unary_rest_flattened(): + client = DisksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'zone': 'sample2', 'disk': 'sample3'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + zone='zone_value', + disk='disk_value', + disks_remove_resource_policies_request_resource=compute.DisksRemoveResourcePoliciesRequest(resource_policies=['resource_policies_value']), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.remove_resource_policies_unary(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/zones/{zone}/disks/{disk}/removeResourcePolicies" % client.transport._host, args[1]) + + +def test_remove_resource_policies_unary_rest_flattened_error(transport: str = 'rest'): + client = DisksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.remove_resource_policies_unary( + compute.RemoveResourcePoliciesDiskRequest(), + project='project_value', + zone='zone_value', + disk='disk_value', + disks_remove_resource_policies_request_resource=compute.DisksRemoveResourcePoliciesRequest(resource_policies=['resource_policies_value']), + ) + + +def test_remove_resource_policies_unary_rest_error(): + client = DisksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.ResizeDiskRequest, + dict, +]) +def test_resize_rest(request_type): + client = DisksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'zone': 'sample2', 'disk': 'sample3'} + request_init["disks_resize_request_resource"] = {'size_gb': 739} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.resize(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, extended_operation.ExtendedOperation) + assert response.client_operation_id == 'client_operation_id_value' + assert response.creation_timestamp == 'creation_timestamp_value' + assert response.description == 'description_value' + assert response.end_time == 'end_time_value' + assert response.http_error_message == 'http_error_message_value' + assert response.http_error_status_code == 2374 + assert response.id == 205 + assert response.insert_time == 'insert_time_value' + assert response.kind == 'kind_value' + assert response.name == 'name_value' + assert response.operation_group_id == 'operation_group_id_value' + assert response.operation_type == 'operation_type_value' + assert response.progress == 885 + assert response.region == 'region_value' + assert response.self_link == 'self_link_value' + assert response.start_time == 'start_time_value' + assert response.status == compute.Operation.Status.DONE + assert response.status_message == 'status_message_value' + assert response.target_id == 947 + assert response.target_link == 'target_link_value' + assert response.user == 'user_value' + assert response.zone == 'zone_value' + + +def test_resize_rest_required_fields(request_type=compute.ResizeDiskRequest): + transport_class = transports.DisksRestTransport + + request_init = {} + request_init["disk"] = "" + request_init["project"] = "" + request_init["zone"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).resize._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["disk"] = 'disk_value' + jsonified_request["project"] = 'project_value' + jsonified_request["zone"] = 'zone_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).resize._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "disk" in jsonified_request + assert jsonified_request["disk"] == 'disk_value' + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "zone" in jsonified_request + assert jsonified_request["zone"] == 'zone_value' + + client = DisksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.resize(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_resize_rest_unset_required_fields(): + transport = transports.DisksRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.resize._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("disk", "disksResizeRequestResource", "project", "zone", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_resize_rest_interceptors(null_interceptor): + transport = transports.DisksRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.DisksRestInterceptor(), + ) + client = DisksClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.DisksRestInterceptor, "post_resize") as post, \ + mock.patch.object(transports.DisksRestInterceptor, "pre_resize") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.ResizeDiskRequest.pb(compute.ResizeDiskRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.ResizeDiskRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.resize(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_resize_rest_bad_request(transport: str = 'rest', request_type=compute.ResizeDiskRequest): + client = DisksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'zone': 'sample2', 'disk': 'sample3'} + request_init["disks_resize_request_resource"] = {'size_gb': 739} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.resize(request) + + +def test_resize_rest_flattened(): + client = DisksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'zone': 'sample2', 'disk': 'sample3'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + zone='zone_value', + disk='disk_value', + disks_resize_request_resource=compute.DisksResizeRequest(size_gb=739), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.resize(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/zones/{zone}/disks/{disk}/resize" % client.transport._host, args[1]) + + +def test_resize_rest_flattened_error(transport: str = 'rest'): + client = DisksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.resize( + compute.ResizeDiskRequest(), + project='project_value', + zone='zone_value', + disk='disk_value', + disks_resize_request_resource=compute.DisksResizeRequest(size_gb=739), + ) + + +def test_resize_rest_error(): + client = DisksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.ResizeDiskRequest, + dict, +]) +def test_resize_unary_rest(request_type): + client = DisksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'zone': 'sample2', 'disk': 'sample3'} + request_init["disks_resize_request_resource"] = {'size_gb': 739} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.resize_unary(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.Operation) + + +def test_resize_unary_rest_required_fields(request_type=compute.ResizeDiskRequest): + transport_class = transports.DisksRestTransport + + request_init = {} + request_init["disk"] = "" + request_init["project"] = "" + request_init["zone"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).resize._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["disk"] = 'disk_value' + jsonified_request["project"] = 'project_value' + jsonified_request["zone"] = 'zone_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).resize._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "disk" in jsonified_request + assert jsonified_request["disk"] == 'disk_value' + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "zone" in jsonified_request + assert jsonified_request["zone"] == 'zone_value' + + client = DisksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.resize_unary(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_resize_unary_rest_unset_required_fields(): + transport = transports.DisksRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.resize._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("disk", "disksResizeRequestResource", "project", "zone", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_resize_unary_rest_interceptors(null_interceptor): + transport = transports.DisksRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.DisksRestInterceptor(), + ) + client = DisksClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.DisksRestInterceptor, "post_resize") as post, \ + mock.patch.object(transports.DisksRestInterceptor, "pre_resize") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.ResizeDiskRequest.pb(compute.ResizeDiskRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.ResizeDiskRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.resize_unary(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_resize_unary_rest_bad_request(transport: str = 'rest', request_type=compute.ResizeDiskRequest): + client = DisksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'zone': 'sample2', 'disk': 'sample3'} + request_init["disks_resize_request_resource"] = {'size_gb': 739} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.resize_unary(request) + + +def test_resize_unary_rest_flattened(): + client = DisksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'zone': 'sample2', 'disk': 'sample3'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + zone='zone_value', + disk='disk_value', + disks_resize_request_resource=compute.DisksResizeRequest(size_gb=739), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.resize_unary(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/zones/{zone}/disks/{disk}/resize" % client.transport._host, args[1]) + + +def test_resize_unary_rest_flattened_error(transport: str = 'rest'): + client = DisksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.resize_unary( + compute.ResizeDiskRequest(), + project='project_value', + zone='zone_value', + disk='disk_value', + disks_resize_request_resource=compute.DisksResizeRequest(size_gb=739), + ) + + +def test_resize_unary_rest_error(): + client = DisksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.SetIamPolicyDiskRequest, + dict, +]) +def test_set_iam_policy_rest(request_type): + client = DisksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'zone': 'sample2', 'resource': 'sample3'} + request_init["zone_set_policy_request_resource"] = {'bindings': [{'binding_id': 'binding_id_value', 'condition': {'description': 'description_value', 'expression': 'expression_value', 'location': 'location_value', 'title': 'title_value'}, 'members': ['members_value1', 'members_value2'], 'role': 'role_value'}], 'etag': 'etag_value', 'policy': {'audit_configs': [{'audit_log_configs': [{'exempted_members': ['exempted_members_value1', 'exempted_members_value2'], 'ignore_child_exemptions': True, 'log_type': 'log_type_value'}], 'exempted_members': ['exempted_members_value1', 'exempted_members_value2'], 'service': 'service_value'}], 'bindings': {}, 'etag': 'etag_value', 'iam_owned': True, 'rules': [{'action': 'action_value', 'conditions': [{'iam': 'iam_value', 'op': 'op_value', 'svc': 'svc_value', 'sys': 'sys_value', 'values': ['values_value1', 'values_value2']}], 'description': 'description_value', 'ins': ['ins_value1', 'ins_value2'], 'log_configs': [{'cloud_audit': {'authorization_logging_options': {'permission_type': 'permission_type_value'}, 'log_name': 'log_name_value'}, 'counter': {'custom_fields': [{'name': 'name_value', 'value': 'value_value'}], 'field': 'field_value', 'metric': 'metric_value'}, 'data_access': {'log_mode': 'log_mode_value'}}], 'not_ins': ['not_ins_value1', 'not_ins_value2'], 'permissions': ['permissions_value1', 'permissions_value2']}], 'version': 774}} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Policy( + etag='etag_value', + iam_owned=True, + version=774, + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Policy.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.set_iam_policy(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.Policy) + assert response.etag == 'etag_value' + assert response.iam_owned is True + assert response.version == 774 + + +def test_set_iam_policy_rest_required_fields(request_type=compute.SetIamPolicyDiskRequest): + transport_class = transports.DisksRestTransport + + request_init = {} + request_init["project"] = "" + request_init["resource"] = "" + request_init["zone"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).set_iam_policy._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + jsonified_request["resource"] = 'resource_value' + jsonified_request["zone"] = 'zone_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).set_iam_policy._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "resource" in jsonified_request + assert jsonified_request["resource"] == 'resource_value' + assert "zone" in jsonified_request + assert jsonified_request["zone"] == 'zone_value' + + client = DisksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Policy() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Policy.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.set_iam_policy(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_set_iam_policy_rest_unset_required_fields(): + transport = transports.DisksRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.set_iam_policy._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("project", "resource", "zone", "zoneSetPolicyRequestResource", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_set_iam_policy_rest_interceptors(null_interceptor): + transport = transports.DisksRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.DisksRestInterceptor(), + ) + client = DisksClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.DisksRestInterceptor, "post_set_iam_policy") as post, \ + mock.patch.object(transports.DisksRestInterceptor, "pre_set_iam_policy") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.SetIamPolicyDiskRequest.pb(compute.SetIamPolicyDiskRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Policy.to_json(compute.Policy()) + + request = compute.SetIamPolicyDiskRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Policy() + + client.set_iam_policy(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_set_iam_policy_rest_bad_request(transport: str = 'rest', request_type=compute.SetIamPolicyDiskRequest): + client = DisksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'zone': 'sample2', 'resource': 'sample3'} + request_init["zone_set_policy_request_resource"] = {'bindings': [{'binding_id': 'binding_id_value', 'condition': {'description': 'description_value', 'expression': 'expression_value', 'location': 'location_value', 'title': 'title_value'}, 'members': ['members_value1', 'members_value2'], 'role': 'role_value'}], 'etag': 'etag_value', 'policy': {'audit_configs': [{'audit_log_configs': [{'exempted_members': ['exempted_members_value1', 'exempted_members_value2'], 'ignore_child_exemptions': True, 'log_type': 'log_type_value'}], 'exempted_members': ['exempted_members_value1', 'exempted_members_value2'], 'service': 'service_value'}], 'bindings': {}, 'etag': 'etag_value', 'iam_owned': True, 'rules': [{'action': 'action_value', 'conditions': [{'iam': 'iam_value', 'op': 'op_value', 'svc': 'svc_value', 'sys': 'sys_value', 'values': ['values_value1', 'values_value2']}], 'description': 'description_value', 'ins': ['ins_value1', 'ins_value2'], 'log_configs': [{'cloud_audit': {'authorization_logging_options': {'permission_type': 'permission_type_value'}, 'log_name': 'log_name_value'}, 'counter': {'custom_fields': [{'name': 'name_value', 'value': 'value_value'}], 'field': 'field_value', 'metric': 'metric_value'}, 'data_access': {'log_mode': 'log_mode_value'}}], 'not_ins': ['not_ins_value1', 'not_ins_value2'], 'permissions': ['permissions_value1', 'permissions_value2']}], 'version': 774}} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.set_iam_policy(request) + + +def test_set_iam_policy_rest_flattened(): + client = DisksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Policy() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'zone': 'sample2', 'resource': 'sample3'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + zone='zone_value', + resource='resource_value', + zone_set_policy_request_resource=compute.ZoneSetPolicyRequest(bindings=[compute.Binding(binding_id='binding_id_value')]), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Policy.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.set_iam_policy(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/zones/{zone}/disks/{resource}/setIamPolicy" % client.transport._host, args[1]) + + +def test_set_iam_policy_rest_flattened_error(transport: str = 'rest'): + client = DisksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.set_iam_policy( + compute.SetIamPolicyDiskRequest(), + project='project_value', + zone='zone_value', + resource='resource_value', + zone_set_policy_request_resource=compute.ZoneSetPolicyRequest(bindings=[compute.Binding(binding_id='binding_id_value')]), + ) + + +def test_set_iam_policy_rest_error(): + client = DisksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.SetLabelsDiskRequest, + dict, +]) +def test_set_labels_rest(request_type): + client = DisksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'zone': 'sample2', 'resource': 'sample3'} + request_init["zone_set_labels_request_resource"] = {'label_fingerprint': 'label_fingerprint_value', 'labels': {}} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.set_labels(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, extended_operation.ExtendedOperation) + assert response.client_operation_id == 'client_operation_id_value' + assert response.creation_timestamp == 'creation_timestamp_value' + assert response.description == 'description_value' + assert response.end_time == 'end_time_value' + assert response.http_error_message == 'http_error_message_value' + assert response.http_error_status_code == 2374 + assert response.id == 205 + assert response.insert_time == 'insert_time_value' + assert response.kind == 'kind_value' + assert response.name == 'name_value' + assert response.operation_group_id == 'operation_group_id_value' + assert response.operation_type == 'operation_type_value' + assert response.progress == 885 + assert response.region == 'region_value' + assert response.self_link == 'self_link_value' + assert response.start_time == 'start_time_value' + assert response.status == compute.Operation.Status.DONE + assert response.status_message == 'status_message_value' + assert response.target_id == 947 + assert response.target_link == 'target_link_value' + assert response.user == 'user_value' + assert response.zone == 'zone_value' + + +def test_set_labels_rest_required_fields(request_type=compute.SetLabelsDiskRequest): + transport_class = transports.DisksRestTransport + + request_init = {} + request_init["project"] = "" + request_init["resource"] = "" + request_init["zone"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).set_labels._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + jsonified_request["resource"] = 'resource_value' + jsonified_request["zone"] = 'zone_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).set_labels._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "resource" in jsonified_request + assert jsonified_request["resource"] == 'resource_value' + assert "zone" in jsonified_request + assert jsonified_request["zone"] == 'zone_value' + + client = DisksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.set_labels(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_set_labels_rest_unset_required_fields(): + transport = transports.DisksRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.set_labels._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("project", "resource", "zone", "zoneSetLabelsRequestResource", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_set_labels_rest_interceptors(null_interceptor): + transport = transports.DisksRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.DisksRestInterceptor(), + ) + client = DisksClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.DisksRestInterceptor, "post_set_labels") as post, \ + mock.patch.object(transports.DisksRestInterceptor, "pre_set_labels") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.SetLabelsDiskRequest.pb(compute.SetLabelsDiskRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.SetLabelsDiskRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.set_labels(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_set_labels_rest_bad_request(transport: str = 'rest', request_type=compute.SetLabelsDiskRequest): + client = DisksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'zone': 'sample2', 'resource': 'sample3'} + request_init["zone_set_labels_request_resource"] = {'label_fingerprint': 'label_fingerprint_value', 'labels': {}} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.set_labels(request) + + +def test_set_labels_rest_flattened(): + client = DisksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'zone': 'sample2', 'resource': 'sample3'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + zone='zone_value', + resource='resource_value', + zone_set_labels_request_resource=compute.ZoneSetLabelsRequest(label_fingerprint='label_fingerprint_value'), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.set_labels(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/zones/{zone}/disks/{resource}/setLabels" % client.transport._host, args[1]) + + +def test_set_labels_rest_flattened_error(transport: str = 'rest'): + client = DisksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.set_labels( + compute.SetLabelsDiskRequest(), + project='project_value', + zone='zone_value', + resource='resource_value', + zone_set_labels_request_resource=compute.ZoneSetLabelsRequest(label_fingerprint='label_fingerprint_value'), + ) + + +def test_set_labels_rest_error(): + client = DisksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.SetLabelsDiskRequest, + dict, +]) +def test_set_labels_unary_rest(request_type): + client = DisksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'zone': 'sample2', 'resource': 'sample3'} + request_init["zone_set_labels_request_resource"] = {'label_fingerprint': 'label_fingerprint_value', 'labels': {}} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.set_labels_unary(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.Operation) + + +def test_set_labels_unary_rest_required_fields(request_type=compute.SetLabelsDiskRequest): + transport_class = transports.DisksRestTransport + + request_init = {} + request_init["project"] = "" + request_init["resource"] = "" + request_init["zone"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).set_labels._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + jsonified_request["resource"] = 'resource_value' + jsonified_request["zone"] = 'zone_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).set_labels._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "resource" in jsonified_request + assert jsonified_request["resource"] == 'resource_value' + assert "zone" in jsonified_request + assert jsonified_request["zone"] == 'zone_value' + + client = DisksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.set_labels_unary(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_set_labels_unary_rest_unset_required_fields(): + transport = transports.DisksRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.set_labels._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("project", "resource", "zone", "zoneSetLabelsRequestResource", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_set_labels_unary_rest_interceptors(null_interceptor): + transport = transports.DisksRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.DisksRestInterceptor(), + ) + client = DisksClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.DisksRestInterceptor, "post_set_labels") as post, \ + mock.patch.object(transports.DisksRestInterceptor, "pre_set_labels") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.SetLabelsDiskRequest.pb(compute.SetLabelsDiskRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.SetLabelsDiskRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.set_labels_unary(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_set_labels_unary_rest_bad_request(transport: str = 'rest', request_type=compute.SetLabelsDiskRequest): + client = DisksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'zone': 'sample2', 'resource': 'sample3'} + request_init["zone_set_labels_request_resource"] = {'label_fingerprint': 'label_fingerprint_value', 'labels': {}} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.set_labels_unary(request) + + +def test_set_labels_unary_rest_flattened(): + client = DisksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'zone': 'sample2', 'resource': 'sample3'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + zone='zone_value', + resource='resource_value', + zone_set_labels_request_resource=compute.ZoneSetLabelsRequest(label_fingerprint='label_fingerprint_value'), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.set_labels_unary(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/zones/{zone}/disks/{resource}/setLabels" % client.transport._host, args[1]) + + +def test_set_labels_unary_rest_flattened_error(transport: str = 'rest'): + client = DisksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.set_labels_unary( + compute.SetLabelsDiskRequest(), + project='project_value', + zone='zone_value', + resource='resource_value', + zone_set_labels_request_resource=compute.ZoneSetLabelsRequest(label_fingerprint='label_fingerprint_value'), + ) + + +def test_set_labels_unary_rest_error(): + client = DisksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.StartAsyncReplicationDiskRequest, + dict, +]) +def test_start_async_replication_rest(request_type): + client = DisksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'zone': 'sample2', 'disk': 'sample3'} + request_init["disks_start_async_replication_request_resource"] = {'async_secondary_disk': 'async_secondary_disk_value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.start_async_replication(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, extended_operation.ExtendedOperation) + assert response.client_operation_id == 'client_operation_id_value' + assert response.creation_timestamp == 'creation_timestamp_value' + assert response.description == 'description_value' + assert response.end_time == 'end_time_value' + assert response.http_error_message == 'http_error_message_value' + assert response.http_error_status_code == 2374 + assert response.id == 205 + assert response.insert_time == 'insert_time_value' + assert response.kind == 'kind_value' + assert response.name == 'name_value' + assert response.operation_group_id == 'operation_group_id_value' + assert response.operation_type == 'operation_type_value' + assert response.progress == 885 + assert response.region == 'region_value' + assert response.self_link == 'self_link_value' + assert response.start_time == 'start_time_value' + assert response.status == compute.Operation.Status.DONE + assert response.status_message == 'status_message_value' + assert response.target_id == 947 + assert response.target_link == 'target_link_value' + assert response.user == 'user_value' + assert response.zone == 'zone_value' + + +def test_start_async_replication_rest_required_fields(request_type=compute.StartAsyncReplicationDiskRequest): + transport_class = transports.DisksRestTransport + + request_init = {} + request_init["disk"] = "" + request_init["project"] = "" + request_init["zone"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).start_async_replication._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["disk"] = 'disk_value' + jsonified_request["project"] = 'project_value' + jsonified_request["zone"] = 'zone_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).start_async_replication._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "disk" in jsonified_request + assert jsonified_request["disk"] == 'disk_value' + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "zone" in jsonified_request + assert jsonified_request["zone"] == 'zone_value' + + client = DisksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.start_async_replication(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_start_async_replication_rest_unset_required_fields(): + transport = transports.DisksRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.start_async_replication._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("disk", "disksStartAsyncReplicationRequestResource", "project", "zone", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_start_async_replication_rest_interceptors(null_interceptor): + transport = transports.DisksRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.DisksRestInterceptor(), + ) + client = DisksClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.DisksRestInterceptor, "post_start_async_replication") as post, \ + mock.patch.object(transports.DisksRestInterceptor, "pre_start_async_replication") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.StartAsyncReplicationDiskRequest.pb(compute.StartAsyncReplicationDiskRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.StartAsyncReplicationDiskRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.start_async_replication(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_start_async_replication_rest_bad_request(transport: str = 'rest', request_type=compute.StartAsyncReplicationDiskRequest): + client = DisksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'zone': 'sample2', 'disk': 'sample3'} + request_init["disks_start_async_replication_request_resource"] = {'async_secondary_disk': 'async_secondary_disk_value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.start_async_replication(request) + + +def test_start_async_replication_rest_flattened(): + client = DisksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'zone': 'sample2', 'disk': 'sample3'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + zone='zone_value', + disk='disk_value', + disks_start_async_replication_request_resource=compute.DisksStartAsyncReplicationRequest(async_secondary_disk='async_secondary_disk_value'), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.start_async_replication(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/zones/{zone}/disks/{disk}/startAsyncReplication" % client.transport._host, args[1]) + + +def test_start_async_replication_rest_flattened_error(transport: str = 'rest'): + client = DisksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.start_async_replication( + compute.StartAsyncReplicationDiskRequest(), + project='project_value', + zone='zone_value', + disk='disk_value', + disks_start_async_replication_request_resource=compute.DisksStartAsyncReplicationRequest(async_secondary_disk='async_secondary_disk_value'), + ) + + +def test_start_async_replication_rest_error(): + client = DisksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.StartAsyncReplicationDiskRequest, + dict, +]) +def test_start_async_replication_unary_rest(request_type): + client = DisksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'zone': 'sample2', 'disk': 'sample3'} + request_init["disks_start_async_replication_request_resource"] = {'async_secondary_disk': 'async_secondary_disk_value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.start_async_replication_unary(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.Operation) + + +def test_start_async_replication_unary_rest_required_fields(request_type=compute.StartAsyncReplicationDiskRequest): + transport_class = transports.DisksRestTransport + + request_init = {} + request_init["disk"] = "" + request_init["project"] = "" + request_init["zone"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).start_async_replication._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["disk"] = 'disk_value' + jsonified_request["project"] = 'project_value' + jsonified_request["zone"] = 'zone_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).start_async_replication._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "disk" in jsonified_request + assert jsonified_request["disk"] == 'disk_value' + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "zone" in jsonified_request + assert jsonified_request["zone"] == 'zone_value' + + client = DisksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.start_async_replication_unary(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_start_async_replication_unary_rest_unset_required_fields(): + transport = transports.DisksRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.start_async_replication._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("disk", "disksStartAsyncReplicationRequestResource", "project", "zone", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_start_async_replication_unary_rest_interceptors(null_interceptor): + transport = transports.DisksRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.DisksRestInterceptor(), + ) + client = DisksClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.DisksRestInterceptor, "post_start_async_replication") as post, \ + mock.patch.object(transports.DisksRestInterceptor, "pre_start_async_replication") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.StartAsyncReplicationDiskRequest.pb(compute.StartAsyncReplicationDiskRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.StartAsyncReplicationDiskRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.start_async_replication_unary(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_start_async_replication_unary_rest_bad_request(transport: str = 'rest', request_type=compute.StartAsyncReplicationDiskRequest): + client = DisksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'zone': 'sample2', 'disk': 'sample3'} + request_init["disks_start_async_replication_request_resource"] = {'async_secondary_disk': 'async_secondary_disk_value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.start_async_replication_unary(request) + + +def test_start_async_replication_unary_rest_flattened(): + client = DisksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'zone': 'sample2', 'disk': 'sample3'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + zone='zone_value', + disk='disk_value', + disks_start_async_replication_request_resource=compute.DisksStartAsyncReplicationRequest(async_secondary_disk='async_secondary_disk_value'), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.start_async_replication_unary(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/zones/{zone}/disks/{disk}/startAsyncReplication" % client.transport._host, args[1]) + + +def test_start_async_replication_unary_rest_flattened_error(transport: str = 'rest'): + client = DisksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.start_async_replication_unary( + compute.StartAsyncReplicationDiskRequest(), + project='project_value', + zone='zone_value', + disk='disk_value', + disks_start_async_replication_request_resource=compute.DisksStartAsyncReplicationRequest(async_secondary_disk='async_secondary_disk_value'), + ) + + +def test_start_async_replication_unary_rest_error(): + client = DisksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.StopAsyncReplicationDiskRequest, + dict, +]) +def test_stop_async_replication_rest(request_type): + client = DisksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'zone': 'sample2', 'disk': 'sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.stop_async_replication(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, extended_operation.ExtendedOperation) + assert response.client_operation_id == 'client_operation_id_value' + assert response.creation_timestamp == 'creation_timestamp_value' + assert response.description == 'description_value' + assert response.end_time == 'end_time_value' + assert response.http_error_message == 'http_error_message_value' + assert response.http_error_status_code == 2374 + assert response.id == 205 + assert response.insert_time == 'insert_time_value' + assert response.kind == 'kind_value' + assert response.name == 'name_value' + assert response.operation_group_id == 'operation_group_id_value' + assert response.operation_type == 'operation_type_value' + assert response.progress == 885 + assert response.region == 'region_value' + assert response.self_link == 'self_link_value' + assert response.start_time == 'start_time_value' + assert response.status == compute.Operation.Status.DONE + assert response.status_message == 'status_message_value' + assert response.target_id == 947 + assert response.target_link == 'target_link_value' + assert response.user == 'user_value' + assert response.zone == 'zone_value' + + +def test_stop_async_replication_rest_required_fields(request_type=compute.StopAsyncReplicationDiskRequest): + transport_class = transports.DisksRestTransport + + request_init = {} + request_init["disk"] = "" + request_init["project"] = "" + request_init["zone"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).stop_async_replication._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["disk"] = 'disk_value' + jsonified_request["project"] = 'project_value' + jsonified_request["zone"] = 'zone_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).stop_async_replication._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "disk" in jsonified_request + assert jsonified_request["disk"] == 'disk_value' + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "zone" in jsonified_request + assert jsonified_request["zone"] == 'zone_value' + + client = DisksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.stop_async_replication(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_stop_async_replication_rest_unset_required_fields(): + transport = transports.DisksRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.stop_async_replication._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("disk", "project", "zone", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_stop_async_replication_rest_interceptors(null_interceptor): + transport = transports.DisksRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.DisksRestInterceptor(), + ) + client = DisksClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.DisksRestInterceptor, "post_stop_async_replication") as post, \ + mock.patch.object(transports.DisksRestInterceptor, "pre_stop_async_replication") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.StopAsyncReplicationDiskRequest.pb(compute.StopAsyncReplicationDiskRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.StopAsyncReplicationDiskRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.stop_async_replication(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_stop_async_replication_rest_bad_request(transport: str = 'rest', request_type=compute.StopAsyncReplicationDiskRequest): + client = DisksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'zone': 'sample2', 'disk': 'sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.stop_async_replication(request) + + +def test_stop_async_replication_rest_flattened(): + client = DisksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'zone': 'sample2', 'disk': 'sample3'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + zone='zone_value', + disk='disk_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.stop_async_replication(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/zones/{zone}/disks/{disk}/stopAsyncReplication" % client.transport._host, args[1]) + + +def test_stop_async_replication_rest_flattened_error(transport: str = 'rest'): + client = DisksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.stop_async_replication( + compute.StopAsyncReplicationDiskRequest(), + project='project_value', + zone='zone_value', + disk='disk_value', + ) + + +def test_stop_async_replication_rest_error(): + client = DisksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.StopAsyncReplicationDiskRequest, + dict, +]) +def test_stop_async_replication_unary_rest(request_type): + client = DisksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'zone': 'sample2', 'disk': 'sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.stop_async_replication_unary(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.Operation) + + +def test_stop_async_replication_unary_rest_required_fields(request_type=compute.StopAsyncReplicationDiskRequest): + transport_class = transports.DisksRestTransport + + request_init = {} + request_init["disk"] = "" + request_init["project"] = "" + request_init["zone"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).stop_async_replication._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["disk"] = 'disk_value' + jsonified_request["project"] = 'project_value' + jsonified_request["zone"] = 'zone_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).stop_async_replication._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "disk" in jsonified_request + assert jsonified_request["disk"] == 'disk_value' + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "zone" in jsonified_request + assert jsonified_request["zone"] == 'zone_value' + + client = DisksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.stop_async_replication_unary(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_stop_async_replication_unary_rest_unset_required_fields(): + transport = transports.DisksRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.stop_async_replication._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("disk", "project", "zone", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_stop_async_replication_unary_rest_interceptors(null_interceptor): + transport = transports.DisksRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.DisksRestInterceptor(), + ) + client = DisksClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.DisksRestInterceptor, "post_stop_async_replication") as post, \ + mock.patch.object(transports.DisksRestInterceptor, "pre_stop_async_replication") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.StopAsyncReplicationDiskRequest.pb(compute.StopAsyncReplicationDiskRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.StopAsyncReplicationDiskRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.stop_async_replication_unary(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_stop_async_replication_unary_rest_bad_request(transport: str = 'rest', request_type=compute.StopAsyncReplicationDiskRequest): + client = DisksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'zone': 'sample2', 'disk': 'sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.stop_async_replication_unary(request) + + +def test_stop_async_replication_unary_rest_flattened(): + client = DisksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'zone': 'sample2', 'disk': 'sample3'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + zone='zone_value', + disk='disk_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.stop_async_replication_unary(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/zones/{zone}/disks/{disk}/stopAsyncReplication" % client.transport._host, args[1]) + + +def test_stop_async_replication_unary_rest_flattened_error(transport: str = 'rest'): + client = DisksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.stop_async_replication_unary( + compute.StopAsyncReplicationDiskRequest(), + project='project_value', + zone='zone_value', + disk='disk_value', + ) + + +def test_stop_async_replication_unary_rest_error(): + client = DisksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.StopGroupAsyncReplicationDiskRequest, + dict, +]) +def test_stop_group_async_replication_rest(request_type): + client = DisksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'zone': 'sample2'} + request_init["disks_stop_group_async_replication_resource_resource"] = {'resource_policy': 'resource_policy_value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.stop_group_async_replication(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, extended_operation.ExtendedOperation) + assert response.client_operation_id == 'client_operation_id_value' + assert response.creation_timestamp == 'creation_timestamp_value' + assert response.description == 'description_value' + assert response.end_time == 'end_time_value' + assert response.http_error_message == 'http_error_message_value' + assert response.http_error_status_code == 2374 + assert response.id == 205 + assert response.insert_time == 'insert_time_value' + assert response.kind == 'kind_value' + assert response.name == 'name_value' + assert response.operation_group_id == 'operation_group_id_value' + assert response.operation_type == 'operation_type_value' + assert response.progress == 885 + assert response.region == 'region_value' + assert response.self_link == 'self_link_value' + assert response.start_time == 'start_time_value' + assert response.status == compute.Operation.Status.DONE + assert response.status_message == 'status_message_value' + assert response.target_id == 947 + assert response.target_link == 'target_link_value' + assert response.user == 'user_value' + assert response.zone == 'zone_value' + + +def test_stop_group_async_replication_rest_required_fields(request_type=compute.StopGroupAsyncReplicationDiskRequest): + transport_class = transports.DisksRestTransport + + request_init = {} + request_init["project"] = "" + request_init["zone"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).stop_group_async_replication._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + jsonified_request["zone"] = 'zone_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).stop_group_async_replication._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "zone" in jsonified_request + assert jsonified_request["zone"] == 'zone_value' + + client = DisksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.stop_group_async_replication(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_stop_group_async_replication_rest_unset_required_fields(): + transport = transports.DisksRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.stop_group_async_replication._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("disksStopGroupAsyncReplicationResourceResource", "project", "zone", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_stop_group_async_replication_rest_interceptors(null_interceptor): + transport = transports.DisksRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.DisksRestInterceptor(), + ) + client = DisksClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.DisksRestInterceptor, "post_stop_group_async_replication") as post, \ + mock.patch.object(transports.DisksRestInterceptor, "pre_stop_group_async_replication") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.StopGroupAsyncReplicationDiskRequest.pb(compute.StopGroupAsyncReplicationDiskRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.StopGroupAsyncReplicationDiskRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.stop_group_async_replication(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_stop_group_async_replication_rest_bad_request(transport: str = 'rest', request_type=compute.StopGroupAsyncReplicationDiskRequest): + client = DisksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'zone': 'sample2'} + request_init["disks_stop_group_async_replication_resource_resource"] = {'resource_policy': 'resource_policy_value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.stop_group_async_replication(request) + + +def test_stop_group_async_replication_rest_flattened(): + client = DisksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'zone': 'sample2'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + zone='zone_value', + disks_stop_group_async_replication_resource_resource=compute.DisksStopGroupAsyncReplicationResource(resource_policy='resource_policy_value'), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.stop_group_async_replication(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/zones/{zone}/disks/stopGroupAsyncReplication" % client.transport._host, args[1]) + + +def test_stop_group_async_replication_rest_flattened_error(transport: str = 'rest'): + client = DisksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.stop_group_async_replication( + compute.StopGroupAsyncReplicationDiskRequest(), + project='project_value', + zone='zone_value', + disks_stop_group_async_replication_resource_resource=compute.DisksStopGroupAsyncReplicationResource(resource_policy='resource_policy_value'), + ) + + +def test_stop_group_async_replication_rest_error(): + client = DisksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.StopGroupAsyncReplicationDiskRequest, + dict, +]) +def test_stop_group_async_replication_unary_rest(request_type): + client = DisksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'zone': 'sample2'} + request_init["disks_stop_group_async_replication_resource_resource"] = {'resource_policy': 'resource_policy_value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.stop_group_async_replication_unary(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.Operation) + + +def test_stop_group_async_replication_unary_rest_required_fields(request_type=compute.StopGroupAsyncReplicationDiskRequest): + transport_class = transports.DisksRestTransport + + request_init = {} + request_init["project"] = "" + request_init["zone"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).stop_group_async_replication._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + jsonified_request["zone"] = 'zone_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).stop_group_async_replication._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "zone" in jsonified_request + assert jsonified_request["zone"] == 'zone_value' + + client = DisksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.stop_group_async_replication_unary(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_stop_group_async_replication_unary_rest_unset_required_fields(): + transport = transports.DisksRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.stop_group_async_replication._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("disksStopGroupAsyncReplicationResourceResource", "project", "zone", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_stop_group_async_replication_unary_rest_interceptors(null_interceptor): + transport = transports.DisksRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.DisksRestInterceptor(), + ) + client = DisksClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.DisksRestInterceptor, "post_stop_group_async_replication") as post, \ + mock.patch.object(transports.DisksRestInterceptor, "pre_stop_group_async_replication") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.StopGroupAsyncReplicationDiskRequest.pb(compute.StopGroupAsyncReplicationDiskRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.StopGroupAsyncReplicationDiskRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.stop_group_async_replication_unary(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_stop_group_async_replication_unary_rest_bad_request(transport: str = 'rest', request_type=compute.StopGroupAsyncReplicationDiskRequest): + client = DisksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'zone': 'sample2'} + request_init["disks_stop_group_async_replication_resource_resource"] = {'resource_policy': 'resource_policy_value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.stop_group_async_replication_unary(request) + + +def test_stop_group_async_replication_unary_rest_flattened(): + client = DisksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'zone': 'sample2'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + zone='zone_value', + disks_stop_group_async_replication_resource_resource=compute.DisksStopGroupAsyncReplicationResource(resource_policy='resource_policy_value'), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.stop_group_async_replication_unary(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/zones/{zone}/disks/stopGroupAsyncReplication" % client.transport._host, args[1]) + + +def test_stop_group_async_replication_unary_rest_flattened_error(transport: str = 'rest'): + client = DisksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.stop_group_async_replication_unary( + compute.StopGroupAsyncReplicationDiskRequest(), + project='project_value', + zone='zone_value', + disks_stop_group_async_replication_resource_resource=compute.DisksStopGroupAsyncReplicationResource(resource_policy='resource_policy_value'), + ) + + +def test_stop_group_async_replication_unary_rest_error(): + client = DisksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.TestIamPermissionsDiskRequest, + dict, +]) +def test_test_iam_permissions_rest(request_type): + client = DisksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'zone': 'sample2', 'resource': 'sample3'} + request_init["test_permissions_request_resource"] = {'permissions': ['permissions_value1', 'permissions_value2']} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.TestPermissionsResponse( + permissions=['permissions_value'], + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.TestPermissionsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.test_iam_permissions(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.TestPermissionsResponse) + assert response.permissions == ['permissions_value'] + + +def test_test_iam_permissions_rest_required_fields(request_type=compute.TestIamPermissionsDiskRequest): + transport_class = transports.DisksRestTransport + + request_init = {} + request_init["project"] = "" + request_init["resource"] = "" + request_init["zone"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).test_iam_permissions._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + jsonified_request["resource"] = 'resource_value' + jsonified_request["zone"] = 'zone_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).test_iam_permissions._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "resource" in jsonified_request + assert jsonified_request["resource"] == 'resource_value' + assert "zone" in jsonified_request + assert jsonified_request["zone"] == 'zone_value' + + client = DisksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.TestPermissionsResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.TestPermissionsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.test_iam_permissions(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_test_iam_permissions_rest_unset_required_fields(): + transport = transports.DisksRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.test_iam_permissions._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("project", "resource", "testPermissionsRequestResource", "zone", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_test_iam_permissions_rest_interceptors(null_interceptor): + transport = transports.DisksRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.DisksRestInterceptor(), + ) + client = DisksClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.DisksRestInterceptor, "post_test_iam_permissions") as post, \ + mock.patch.object(transports.DisksRestInterceptor, "pre_test_iam_permissions") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.TestIamPermissionsDiskRequest.pb(compute.TestIamPermissionsDiskRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.TestPermissionsResponse.to_json(compute.TestPermissionsResponse()) + + request = compute.TestIamPermissionsDiskRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.TestPermissionsResponse() + + client.test_iam_permissions(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_test_iam_permissions_rest_bad_request(transport: str = 'rest', request_type=compute.TestIamPermissionsDiskRequest): + client = DisksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'zone': 'sample2', 'resource': 'sample3'} + request_init["test_permissions_request_resource"] = {'permissions': ['permissions_value1', 'permissions_value2']} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.test_iam_permissions(request) + + +def test_test_iam_permissions_rest_flattened(): + client = DisksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.TestPermissionsResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'zone': 'sample2', 'resource': 'sample3'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + zone='zone_value', + resource='resource_value', + test_permissions_request_resource=compute.TestPermissionsRequest(permissions=['permissions_value']), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.TestPermissionsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.test_iam_permissions(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/zones/{zone}/disks/{resource}/testIamPermissions" % client.transport._host, args[1]) + + +def test_test_iam_permissions_rest_flattened_error(transport: str = 'rest'): + client = DisksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.test_iam_permissions( + compute.TestIamPermissionsDiskRequest(), + project='project_value', + zone='zone_value', + resource='resource_value', + test_permissions_request_resource=compute.TestPermissionsRequest(permissions=['permissions_value']), + ) + + +def test_test_iam_permissions_rest_error(): + client = DisksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.UpdateDiskRequest, + dict, +]) +def test_update_rest(request_type): + client = DisksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'zone': 'sample2', 'disk': 'sample3'} + request_init["disk_resource"] = {'architecture': 'architecture_value', 'async_primary_disk': {'consistency_group_policy': 'consistency_group_policy_value', 'consistency_group_policy_id': 'consistency_group_policy_id_value', 'disk': 'disk_value', 'disk_id': 'disk_id_value'}, 'async_secondary_disks': {}, 'creation_timestamp': 'creation_timestamp_value', 'description': 'description_value', 'disk_encryption_key': {'kms_key_name': 'kms_key_name_value', 'kms_key_service_account': 'kms_key_service_account_value', 'raw_key': 'raw_key_value', 'rsa_encrypted_key': 'rsa_encrypted_key_value', 'sha256': 'sha256_value'}, 'guest_os_features': [{'type_': 'type__value'}], 'id': 205, 'kind': 'kind_value', 'label_fingerprint': 'label_fingerprint_value', 'labels': {}, 'last_attach_timestamp': 'last_attach_timestamp_value', 'last_detach_timestamp': 'last_detach_timestamp_value', 'license_codes': [1361, 1362], 'licenses': ['licenses_value1', 'licenses_value2'], 'location_hint': 'location_hint_value', 'name': 'name_value', 'options': 'options_value', 'params': {'resource_manager_tags': {}}, 'physical_block_size_bytes': 2663, 'provisioned_iops': 1740, 'provisioned_throughput': 2411, 'region': 'region_value', 'replica_zones': ['replica_zones_value1', 'replica_zones_value2'], 'resource_policies': ['resource_policies_value1', 'resource_policies_value2'], 'resource_status': {'async_primary_disk': {'state': 'state_value'}, 'async_secondary_disks': {}}, 'satisfies_pzs': True, 'self_link': 'self_link_value', 'size_gb': 739, 'source_consistency_group_policy': 'source_consistency_group_policy_value', 'source_consistency_group_policy_id': 'source_consistency_group_policy_id_value', 'source_disk': 'source_disk_value', 'source_disk_id': 'source_disk_id_value', 'source_image': 'source_image_value', 'source_image_encryption_key': {}, 'source_image_id': 'source_image_id_value', 'source_snapshot': 'source_snapshot_value', 'source_snapshot_encryption_key': {}, 'source_snapshot_id': 'source_snapshot_id_value', 'source_storage_object': 'source_storage_object_value', 'status': 'status_value', 'type_': 'type__value', 'users': ['users_value1', 'users_value2'], 'zone': 'zone_value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.update(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, extended_operation.ExtendedOperation) + assert response.client_operation_id == 'client_operation_id_value' + assert response.creation_timestamp == 'creation_timestamp_value' + assert response.description == 'description_value' + assert response.end_time == 'end_time_value' + assert response.http_error_message == 'http_error_message_value' + assert response.http_error_status_code == 2374 + assert response.id == 205 + assert response.insert_time == 'insert_time_value' + assert response.kind == 'kind_value' + assert response.name == 'name_value' + assert response.operation_group_id == 'operation_group_id_value' + assert response.operation_type == 'operation_type_value' + assert response.progress == 885 + assert response.region == 'region_value' + assert response.self_link == 'self_link_value' + assert response.start_time == 'start_time_value' + assert response.status == compute.Operation.Status.DONE + assert response.status_message == 'status_message_value' + assert response.target_id == 947 + assert response.target_link == 'target_link_value' + assert response.user == 'user_value' + assert response.zone == 'zone_value' + + +def test_update_rest_required_fields(request_type=compute.UpdateDiskRequest): + transport_class = transports.DisksRestTransport + + request_init = {} + request_init["disk"] = "" + request_init["project"] = "" + request_init["zone"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).update._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["disk"] = 'disk_value' + jsonified_request["project"] = 'project_value' + jsonified_request["zone"] = 'zone_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).update._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("paths", "request_id", "update_mask", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "disk" in jsonified_request + assert jsonified_request["disk"] == 'disk_value' + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "zone" in jsonified_request + assert jsonified_request["zone"] == 'zone_value' + + client = DisksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "patch", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.update(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_update_rest_unset_required_fields(): + transport = transports.DisksRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.update._get_unset_required_fields({}) + assert set(unset_fields) == (set(("paths", "requestId", "updateMask", )) & set(("disk", "diskResource", "project", "zone", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_update_rest_interceptors(null_interceptor): + transport = transports.DisksRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.DisksRestInterceptor(), + ) + client = DisksClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.DisksRestInterceptor, "post_update") as post, \ + mock.patch.object(transports.DisksRestInterceptor, "pre_update") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.UpdateDiskRequest.pb(compute.UpdateDiskRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.UpdateDiskRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.update(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_update_rest_bad_request(transport: str = 'rest', request_type=compute.UpdateDiskRequest): + client = DisksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'zone': 'sample2', 'disk': 'sample3'} + request_init["disk_resource"] = {'architecture': 'architecture_value', 'async_primary_disk': {'consistency_group_policy': 'consistency_group_policy_value', 'consistency_group_policy_id': 'consistency_group_policy_id_value', 'disk': 'disk_value', 'disk_id': 'disk_id_value'}, 'async_secondary_disks': {}, 'creation_timestamp': 'creation_timestamp_value', 'description': 'description_value', 'disk_encryption_key': {'kms_key_name': 'kms_key_name_value', 'kms_key_service_account': 'kms_key_service_account_value', 'raw_key': 'raw_key_value', 'rsa_encrypted_key': 'rsa_encrypted_key_value', 'sha256': 'sha256_value'}, 'guest_os_features': [{'type_': 'type__value'}], 'id': 205, 'kind': 'kind_value', 'label_fingerprint': 'label_fingerprint_value', 'labels': {}, 'last_attach_timestamp': 'last_attach_timestamp_value', 'last_detach_timestamp': 'last_detach_timestamp_value', 'license_codes': [1361, 1362], 'licenses': ['licenses_value1', 'licenses_value2'], 'location_hint': 'location_hint_value', 'name': 'name_value', 'options': 'options_value', 'params': {'resource_manager_tags': {}}, 'physical_block_size_bytes': 2663, 'provisioned_iops': 1740, 'provisioned_throughput': 2411, 'region': 'region_value', 'replica_zones': ['replica_zones_value1', 'replica_zones_value2'], 'resource_policies': ['resource_policies_value1', 'resource_policies_value2'], 'resource_status': {'async_primary_disk': {'state': 'state_value'}, 'async_secondary_disks': {}}, 'satisfies_pzs': True, 'self_link': 'self_link_value', 'size_gb': 739, 'source_consistency_group_policy': 'source_consistency_group_policy_value', 'source_consistency_group_policy_id': 'source_consistency_group_policy_id_value', 'source_disk': 'source_disk_value', 'source_disk_id': 'source_disk_id_value', 'source_image': 'source_image_value', 'source_image_encryption_key': {}, 'source_image_id': 'source_image_id_value', 'source_snapshot': 'source_snapshot_value', 'source_snapshot_encryption_key': {}, 'source_snapshot_id': 'source_snapshot_id_value', 'source_storage_object': 'source_storage_object_value', 'status': 'status_value', 'type_': 'type__value', 'users': ['users_value1', 'users_value2'], 'zone': 'zone_value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.update(request) + + +def test_update_rest_flattened(): + client = DisksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'zone': 'sample2', 'disk': 'sample3'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + zone='zone_value', + disk='disk_value', + disk_resource=compute.Disk(architecture='architecture_value'), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.update(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/zones/{zone}/disks/{disk}" % client.transport._host, args[1]) + + +def test_update_rest_flattened_error(transport: str = 'rest'): + client = DisksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update( + compute.UpdateDiskRequest(), + project='project_value', + zone='zone_value', + disk='disk_value', + disk_resource=compute.Disk(architecture='architecture_value'), + ) + + +def test_update_rest_error(): + client = DisksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.UpdateDiskRequest, + dict, +]) +def test_update_unary_rest(request_type): + client = DisksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'zone': 'sample2', 'disk': 'sample3'} + request_init["disk_resource"] = {'architecture': 'architecture_value', 'async_primary_disk': {'consistency_group_policy': 'consistency_group_policy_value', 'consistency_group_policy_id': 'consistency_group_policy_id_value', 'disk': 'disk_value', 'disk_id': 'disk_id_value'}, 'async_secondary_disks': {}, 'creation_timestamp': 'creation_timestamp_value', 'description': 'description_value', 'disk_encryption_key': {'kms_key_name': 'kms_key_name_value', 'kms_key_service_account': 'kms_key_service_account_value', 'raw_key': 'raw_key_value', 'rsa_encrypted_key': 'rsa_encrypted_key_value', 'sha256': 'sha256_value'}, 'guest_os_features': [{'type_': 'type__value'}], 'id': 205, 'kind': 'kind_value', 'label_fingerprint': 'label_fingerprint_value', 'labels': {}, 'last_attach_timestamp': 'last_attach_timestamp_value', 'last_detach_timestamp': 'last_detach_timestamp_value', 'license_codes': [1361, 1362], 'licenses': ['licenses_value1', 'licenses_value2'], 'location_hint': 'location_hint_value', 'name': 'name_value', 'options': 'options_value', 'params': {'resource_manager_tags': {}}, 'physical_block_size_bytes': 2663, 'provisioned_iops': 1740, 'provisioned_throughput': 2411, 'region': 'region_value', 'replica_zones': ['replica_zones_value1', 'replica_zones_value2'], 'resource_policies': ['resource_policies_value1', 'resource_policies_value2'], 'resource_status': {'async_primary_disk': {'state': 'state_value'}, 'async_secondary_disks': {}}, 'satisfies_pzs': True, 'self_link': 'self_link_value', 'size_gb': 739, 'source_consistency_group_policy': 'source_consistency_group_policy_value', 'source_consistency_group_policy_id': 'source_consistency_group_policy_id_value', 'source_disk': 'source_disk_value', 'source_disk_id': 'source_disk_id_value', 'source_image': 'source_image_value', 'source_image_encryption_key': {}, 'source_image_id': 'source_image_id_value', 'source_snapshot': 'source_snapshot_value', 'source_snapshot_encryption_key': {}, 'source_snapshot_id': 'source_snapshot_id_value', 'source_storage_object': 'source_storage_object_value', 'status': 'status_value', 'type_': 'type__value', 'users': ['users_value1', 'users_value2'], 'zone': 'zone_value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.update_unary(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.Operation) + + +def test_update_unary_rest_required_fields(request_type=compute.UpdateDiskRequest): + transport_class = transports.DisksRestTransport + + request_init = {} + request_init["disk"] = "" + request_init["project"] = "" + request_init["zone"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).update._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["disk"] = 'disk_value' + jsonified_request["project"] = 'project_value' + jsonified_request["zone"] = 'zone_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).update._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("paths", "request_id", "update_mask", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "disk" in jsonified_request + assert jsonified_request["disk"] == 'disk_value' + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "zone" in jsonified_request + assert jsonified_request["zone"] == 'zone_value' + + client = DisksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "patch", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.update_unary(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_update_unary_rest_unset_required_fields(): + transport = transports.DisksRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.update._get_unset_required_fields({}) + assert set(unset_fields) == (set(("paths", "requestId", "updateMask", )) & set(("disk", "diskResource", "project", "zone", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_update_unary_rest_interceptors(null_interceptor): + transport = transports.DisksRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.DisksRestInterceptor(), + ) + client = DisksClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.DisksRestInterceptor, "post_update") as post, \ + mock.patch.object(transports.DisksRestInterceptor, "pre_update") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.UpdateDiskRequest.pb(compute.UpdateDiskRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.UpdateDiskRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.update_unary(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_update_unary_rest_bad_request(transport: str = 'rest', request_type=compute.UpdateDiskRequest): + client = DisksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'zone': 'sample2', 'disk': 'sample3'} + request_init["disk_resource"] = {'architecture': 'architecture_value', 'async_primary_disk': {'consistency_group_policy': 'consistency_group_policy_value', 'consistency_group_policy_id': 'consistency_group_policy_id_value', 'disk': 'disk_value', 'disk_id': 'disk_id_value'}, 'async_secondary_disks': {}, 'creation_timestamp': 'creation_timestamp_value', 'description': 'description_value', 'disk_encryption_key': {'kms_key_name': 'kms_key_name_value', 'kms_key_service_account': 'kms_key_service_account_value', 'raw_key': 'raw_key_value', 'rsa_encrypted_key': 'rsa_encrypted_key_value', 'sha256': 'sha256_value'}, 'guest_os_features': [{'type_': 'type__value'}], 'id': 205, 'kind': 'kind_value', 'label_fingerprint': 'label_fingerprint_value', 'labels': {}, 'last_attach_timestamp': 'last_attach_timestamp_value', 'last_detach_timestamp': 'last_detach_timestamp_value', 'license_codes': [1361, 1362], 'licenses': ['licenses_value1', 'licenses_value2'], 'location_hint': 'location_hint_value', 'name': 'name_value', 'options': 'options_value', 'params': {'resource_manager_tags': {}}, 'physical_block_size_bytes': 2663, 'provisioned_iops': 1740, 'provisioned_throughput': 2411, 'region': 'region_value', 'replica_zones': ['replica_zones_value1', 'replica_zones_value2'], 'resource_policies': ['resource_policies_value1', 'resource_policies_value2'], 'resource_status': {'async_primary_disk': {'state': 'state_value'}, 'async_secondary_disks': {}}, 'satisfies_pzs': True, 'self_link': 'self_link_value', 'size_gb': 739, 'source_consistency_group_policy': 'source_consistency_group_policy_value', 'source_consistency_group_policy_id': 'source_consistency_group_policy_id_value', 'source_disk': 'source_disk_value', 'source_disk_id': 'source_disk_id_value', 'source_image': 'source_image_value', 'source_image_encryption_key': {}, 'source_image_id': 'source_image_id_value', 'source_snapshot': 'source_snapshot_value', 'source_snapshot_encryption_key': {}, 'source_snapshot_id': 'source_snapshot_id_value', 'source_storage_object': 'source_storage_object_value', 'status': 'status_value', 'type_': 'type__value', 'users': ['users_value1', 'users_value2'], 'zone': 'zone_value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.update_unary(request) + + +def test_update_unary_rest_flattened(): + client = DisksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'zone': 'sample2', 'disk': 'sample3'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + zone='zone_value', + disk='disk_value', + disk_resource=compute.Disk(architecture='architecture_value'), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.update_unary(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/zones/{zone}/disks/{disk}" % client.transport._host, args[1]) + + +def test_update_unary_rest_flattened_error(transport: str = 'rest'): + client = DisksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_unary( + compute.UpdateDiskRequest(), + project='project_value', + zone='zone_value', + disk='disk_value', + disk_resource=compute.Disk(architecture='architecture_value'), + ) + + +def test_update_unary_rest_error(): + client = DisksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +def test_credentials_transport_error(): + # It is an error to provide credentials and a transport instance. + transport = transports.DisksRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = DisksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # It is an error to provide a credentials file and a transport instance. + transport = transports.DisksRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = DisksClient( + client_options={"credentials_file": "credentials.json"}, + transport=transport, + ) + + # It is an error to provide an api_key and a transport instance. + transport = transports.DisksRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = DisksClient( + client_options=options, + transport=transport, + ) + + # It is an error to provide an api_key and a credential. + options = mock.Mock() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = DisksClient( + client_options=options, + credentials=ga_credentials.AnonymousCredentials() + ) + + # It is an error to provide scopes and a transport instance. + transport = transports.DisksRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = DisksClient( + client_options={"scopes": ["1", "2"]}, + transport=transport, + ) + + +def test_transport_instance(): + # A client may be instantiated with a custom transport instance. + transport = transports.DisksRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + client = DisksClient(transport=transport) + assert client.transport is transport + + +@pytest.mark.parametrize("transport_class", [ + transports.DisksRestTransport, +]) +def test_transport_adc(transport_class): + # Test default credentials are used if not provided. + with mock.patch.object(google.auth, 'default') as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class() + adc.assert_called_once() + +@pytest.mark.parametrize("transport_name", [ + "rest", +]) +def test_transport_kind(transport_name): + transport = DisksClient.get_transport_class(transport_name)( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert transport.kind == transport_name + + +def test_disks_base_transport_error(): + # Passing both a credentials object and credentials_file should raise an error + with pytest.raises(core_exceptions.DuplicateCredentialArgs): + transport = transports.DisksTransport( + credentials=ga_credentials.AnonymousCredentials(), + credentials_file="credentials.json" + ) + + +def test_disks_base_transport(): + # Instantiate the base transport. + with mock.patch('google.cloud.compute_v1.services.disks.transports.DisksTransport.__init__') as Transport: + Transport.return_value = None + transport = transports.DisksTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Every method on the transport should just blindly + # raise NotImplementedError. + methods = ( + 'add_resource_policies', + 'aggregated_list', + 'bulk_insert', + 'create_snapshot', + 'delete', + 'get', + 'get_iam_policy', + 'insert', + 'list', + 'remove_resource_policies', + 'resize', + 'set_iam_policy', + 'set_labels', + 'start_async_replication', + 'stop_async_replication', + 'stop_group_async_replication', + 'test_iam_permissions', + 'update', + ) + for method in methods: + with pytest.raises(NotImplementedError): + getattr(transport, method)(request=object()) + + with pytest.raises(NotImplementedError): + transport.close() + + # Catch all for all remaining methods and properties + remainder = [ + 'kind', + ] + for r in remainder: + with pytest.raises(NotImplementedError): + getattr(transport, r)() + + +def test_disks_base_transport_with_credentials_file(): + # Instantiate the base transport with a credentials file + with mock.patch.object(google.auth, 'load_credentials_from_file', autospec=True) as load_creds, mock.patch('google.cloud.compute_v1.services.disks.transports.DisksTransport._prep_wrapped_messages') as Transport: + Transport.return_value = None + load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.DisksTransport( + credentials_file="credentials.json", + quota_project_id="octopus", + ) + load_creds.assert_called_once_with("credentials.json", + scopes=None, + default_scopes=( + 'https://www.googleapis.com/auth/compute', + 'https://www.googleapis.com/auth/cloud-platform', +), + quota_project_id="octopus", + ) + + +def test_disks_base_transport_with_adc(): + # Test the default credentials are used if credentials and credentials_file are None. + with mock.patch.object(google.auth, 'default', autospec=True) as adc, mock.patch('google.cloud.compute_v1.services.disks.transports.DisksTransport._prep_wrapped_messages') as Transport: + Transport.return_value = None + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.DisksTransport() + adc.assert_called_once() + + +def test_disks_auth_adc(): + # If no credentials are provided, we should use ADC credentials. + with mock.patch.object(google.auth, 'default', autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + DisksClient() + adc.assert_called_once_with( + scopes=None, + default_scopes=( + 'https://www.googleapis.com/auth/compute', + 'https://www.googleapis.com/auth/cloud-platform', +), + quota_project_id=None, + ) + + +def test_disks_http_transport_client_cert_source_for_mtls(): + cred = ga_credentials.AnonymousCredentials() + with mock.patch("google.auth.transport.requests.AuthorizedSession.configure_mtls_channel") as mock_configure_mtls_channel: + transports.DisksRestTransport ( + credentials=cred, + client_cert_source_for_mtls=client_cert_source_callback + ) + mock_configure_mtls_channel.assert_called_once_with(client_cert_source_callback) + + +@pytest.mark.parametrize("transport_name", [ + "rest", +]) +def test_disks_host_no_port(transport_name): + client = DisksClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions(api_endpoint='compute.googleapis.com'), + transport=transport_name, + ) + assert client.transport._host == ( + 'compute.googleapis.com:443' + if transport_name in ['grpc', 'grpc_asyncio'] + else 'https://compute.googleapis.com' + ) + +@pytest.mark.parametrize("transport_name", [ + "rest", +]) +def test_disks_host_with_port(transport_name): + client = DisksClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions(api_endpoint='compute.googleapis.com:8000'), + transport=transport_name, + ) + assert client.transport._host == ( + 'compute.googleapis.com:8000' + if transport_name in ['grpc', 'grpc_asyncio'] + else 'https://compute.googleapis.com:8000' + ) + +@pytest.mark.parametrize("transport_name", [ + "rest", +]) +def test_disks_client_transport_session_collision(transport_name): + creds1 = ga_credentials.AnonymousCredentials() + creds2 = ga_credentials.AnonymousCredentials() + client1 = DisksClient( + credentials=creds1, + transport=transport_name, + ) + client2 = DisksClient( + credentials=creds2, + transport=transport_name, + ) + session1 = client1.transport.add_resource_policies._session + session2 = client2.transport.add_resource_policies._session + assert session1 != session2 + session1 = client1.transport.aggregated_list._session + session2 = client2.transport.aggregated_list._session + assert session1 != session2 + session1 = client1.transport.bulk_insert._session + session2 = client2.transport.bulk_insert._session + assert session1 != session2 + session1 = client1.transport.create_snapshot._session + session2 = client2.transport.create_snapshot._session + assert session1 != session2 + session1 = client1.transport.delete._session + session2 = client2.transport.delete._session + assert session1 != session2 + session1 = client1.transport.get._session + session2 = client2.transport.get._session + assert session1 != session2 + session1 = client1.transport.get_iam_policy._session + session2 = client2.transport.get_iam_policy._session + assert session1 != session2 + session1 = client1.transport.insert._session + session2 = client2.transport.insert._session + assert session1 != session2 + session1 = client1.transport.list._session + session2 = client2.transport.list._session + assert session1 != session2 + session1 = client1.transport.remove_resource_policies._session + session2 = client2.transport.remove_resource_policies._session + assert session1 != session2 + session1 = client1.transport.resize._session + session2 = client2.transport.resize._session + assert session1 != session2 + session1 = client1.transport.set_iam_policy._session + session2 = client2.transport.set_iam_policy._session + assert session1 != session2 + session1 = client1.transport.set_labels._session + session2 = client2.transport.set_labels._session + assert session1 != session2 + session1 = client1.transport.start_async_replication._session + session2 = client2.transport.start_async_replication._session + assert session1 != session2 + session1 = client1.transport.stop_async_replication._session + session2 = client2.transport.stop_async_replication._session + assert session1 != session2 + session1 = client1.transport.stop_group_async_replication._session + session2 = client2.transport.stop_group_async_replication._session + assert session1 != session2 + session1 = client1.transport.test_iam_permissions._session + session2 = client2.transport.test_iam_permissions._session + assert session1 != session2 + session1 = client1.transport.update._session + session2 = client2.transport.update._session + assert session1 != session2 + +def test_common_billing_account_path(): + billing_account = "squid" + expected = "billingAccounts/{billing_account}".format(billing_account=billing_account, ) + actual = DisksClient.common_billing_account_path(billing_account) + assert expected == actual + + +def test_parse_common_billing_account_path(): + expected = { + "billing_account": "clam", + } + path = DisksClient.common_billing_account_path(**expected) + + # Check that the path construction is reversible. + actual = DisksClient.parse_common_billing_account_path(path) + assert expected == actual + +def test_common_folder_path(): + folder = "whelk" + expected = "folders/{folder}".format(folder=folder, ) + actual = DisksClient.common_folder_path(folder) + assert expected == actual + + +def test_parse_common_folder_path(): + expected = { + "folder": "octopus", + } + path = DisksClient.common_folder_path(**expected) + + # Check that the path construction is reversible. + actual = DisksClient.parse_common_folder_path(path) + assert expected == actual + +def test_common_organization_path(): + organization = "oyster" + expected = "organizations/{organization}".format(organization=organization, ) + actual = DisksClient.common_organization_path(organization) + assert expected == actual + + +def test_parse_common_organization_path(): + expected = { + "organization": "nudibranch", + } + path = DisksClient.common_organization_path(**expected) + + # Check that the path construction is reversible. + actual = DisksClient.parse_common_organization_path(path) + assert expected == actual + +def test_common_project_path(): + project = "cuttlefish" + expected = "projects/{project}".format(project=project, ) + actual = DisksClient.common_project_path(project) + assert expected == actual + + +def test_parse_common_project_path(): + expected = { + "project": "mussel", + } + path = DisksClient.common_project_path(**expected) + + # Check that the path construction is reversible. + actual = DisksClient.parse_common_project_path(path) + assert expected == actual + +def test_common_location_path(): + project = "winkle" + location = "nautilus" + expected = "projects/{project}/locations/{location}".format(project=project, location=location, ) + actual = DisksClient.common_location_path(project, location) + assert expected == actual + + +def test_parse_common_location_path(): + expected = { + "project": "scallop", + "location": "abalone", + } + path = DisksClient.common_location_path(**expected) + + # Check that the path construction is reversible. + actual = DisksClient.parse_common_location_path(path) + assert expected == actual + + +def test_client_with_default_client_info(): + client_info = gapic_v1.client_info.ClientInfo() + + with mock.patch.object(transports.DisksTransport, '_prep_wrapped_messages') as prep: + client = DisksClient( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + with mock.patch.object(transports.DisksTransport, '_prep_wrapped_messages') as prep: + transport_class = DisksClient.get_transport_class() + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + +def test_transport_close(): + transports = { + "rest": "_session", + } + + for transport, close_name in transports.items(): + client = DisksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport + ) + with mock.patch.object(type(getattr(client.transport, close_name)), "close") as close: + with client: + close.assert_not_called() + close.assert_called_once() + +def test_client_ctx(): + transports = [ + 'rest', + ] + for transport in transports: + client = DisksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport + ) + # Test client calls underlying transport. + with mock.patch.object(type(client.transport), "close") as close: + close.assert_not_called() + with client: + pass + close.assert_called() + +@pytest.mark.parametrize("client_class,transport_class", [ + (DisksClient, transports.DisksRestTransport), +]) +def test_api_key_credentials(client_class, transport_class): + with mock.patch.object( + google.auth._default, "get_api_key_credentials", create=True + ) as get_api_key_credentials: + mock_cred = mock.Mock() + get_api_key_credentials.return_value = mock_cred + options = client_options.ClientOptions() + options.api_key = "api_key" + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=mock_cred, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) diff --git a/owl-bot-staging/v1/tests/unit/gapic/compute_v1/test_external_vpn_gateways.py b/owl-bot-staging/v1/tests/unit/gapic/compute_v1/test_external_vpn_gateways.py new file mode 100644 index 000000000..21645b144 --- /dev/null +++ b/owl-bot-staging/v1/tests/unit/gapic/compute_v1/test_external_vpn_gateways.py @@ -0,0 +1,3274 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import os +# try/except added for compatibility with python < 3.8 +try: + from unittest import mock + from unittest.mock import AsyncMock # pragma: NO COVER +except ImportError: # pragma: NO COVER + import mock + +import grpc +from grpc.experimental import aio +from collections.abc import Iterable +from google.protobuf import json_format +import json +import math +import pytest +from proto.marshal.rules.dates import DurationRule, TimestampRule +from proto.marshal.rules import wrappers +from requests import Response +from requests import Request, PreparedRequest +from requests.sessions import Session +from google.protobuf import json_format + +from google.api_core import client_options +from google.api_core import exceptions as core_exceptions +from google.api_core import extended_operation # type: ignore +from google.api_core import future +from google.api_core import gapic_v1 +from google.api_core import grpc_helpers +from google.api_core import grpc_helpers_async +from google.api_core import path_template +from google.auth import credentials as ga_credentials +from google.auth.exceptions import MutualTLSChannelError +from google.cloud.compute_v1.services.external_vpn_gateways import ExternalVpnGatewaysClient +from google.cloud.compute_v1.services.external_vpn_gateways import pagers +from google.cloud.compute_v1.services.external_vpn_gateways import transports +from google.cloud.compute_v1.types import compute +from google.oauth2 import service_account +import google.auth + + +def client_cert_source_callback(): + return b"cert bytes", b"key bytes" + + +# If default endpoint is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint(client): + return "foo.googleapis.com" if ("localhost" in client.DEFAULT_ENDPOINT) else client.DEFAULT_ENDPOINT + + +def test__get_default_mtls_endpoint(): + api_endpoint = "example.googleapis.com" + api_mtls_endpoint = "example.mtls.googleapis.com" + sandbox_endpoint = "example.sandbox.googleapis.com" + sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com" + non_googleapi = "api.example.com" + + assert ExternalVpnGatewaysClient._get_default_mtls_endpoint(None) is None + assert ExternalVpnGatewaysClient._get_default_mtls_endpoint(api_endpoint) == api_mtls_endpoint + assert ExternalVpnGatewaysClient._get_default_mtls_endpoint(api_mtls_endpoint) == api_mtls_endpoint + assert ExternalVpnGatewaysClient._get_default_mtls_endpoint(sandbox_endpoint) == sandbox_mtls_endpoint + assert ExternalVpnGatewaysClient._get_default_mtls_endpoint(sandbox_mtls_endpoint) == sandbox_mtls_endpoint + assert ExternalVpnGatewaysClient._get_default_mtls_endpoint(non_googleapi) == non_googleapi + + +@pytest.mark.parametrize("client_class,transport_name", [ + (ExternalVpnGatewaysClient, "rest"), +]) +def test_external_vpn_gateways_client_from_service_account_info(client_class, transport_name): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object(service_account.Credentials, 'from_service_account_info') as factory: + factory.return_value = creds + info = {"valid": True} + client = client_class.from_service_account_info(info, transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + 'compute.googleapis.com:443' + if transport_name in ['grpc', 'grpc_asyncio'] + else + 'https://compute.googleapis.com' + ) + + +@pytest.mark.parametrize("transport_class,transport_name", [ + (transports.ExternalVpnGatewaysRestTransport, "rest"), +]) +def test_external_vpn_gateways_client_service_account_always_use_jwt(transport_class, transport_name): + with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=True) + use_jwt.assert_called_once_with(True) + + with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=False) + use_jwt.assert_not_called() + + +@pytest.mark.parametrize("client_class,transport_name", [ + (ExternalVpnGatewaysClient, "rest"), +]) +def test_external_vpn_gateways_client_from_service_account_file(client_class, transport_name): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object(service_account.Credentials, 'from_service_account_file') as factory: + factory.return_value = creds + client = client_class.from_service_account_file("dummy/file/path.json", transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + client = client_class.from_service_account_json("dummy/file/path.json", transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + 'compute.googleapis.com:443' + if transport_name in ['grpc', 'grpc_asyncio'] + else + 'https://compute.googleapis.com' + ) + + +def test_external_vpn_gateways_client_get_transport_class(): + transport = ExternalVpnGatewaysClient.get_transport_class() + available_transports = [ + transports.ExternalVpnGatewaysRestTransport, + ] + assert transport in available_transports + + transport = ExternalVpnGatewaysClient.get_transport_class("rest") + assert transport == transports.ExternalVpnGatewaysRestTransport + + +@pytest.mark.parametrize("client_class,transport_class,transport_name", [ + (ExternalVpnGatewaysClient, transports.ExternalVpnGatewaysRestTransport, "rest"), +]) +@mock.patch.object(ExternalVpnGatewaysClient, "DEFAULT_ENDPOINT", modify_default_endpoint(ExternalVpnGatewaysClient)) +def test_external_vpn_gateways_client_client_options(client_class, transport_class, transport_name): + # Check that if channel is provided we won't create a new one. + with mock.patch.object(ExternalVpnGatewaysClient, 'get_transport_class') as gtc: + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ) + client = client_class(transport=transport) + gtc.assert_not_called() + + # Check that if channel is provided via str we will create a new one. + with mock.patch.object(ExternalVpnGatewaysClient, 'get_transport_class') as gtc: + client = client_class(transport=transport_name) + gtc.assert_called() + + # Check the case api_endpoint is provided. + options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(transport=transport_name, client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_MTLS_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError): + client = client_class(transport=transport_name) + + # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): + with pytest.raises(ValueError): + client = client_class(transport=transport_name) + + # Check the case quota_project_id is provided + options = client_options.ClientOptions(quota_project_id="octopus") + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id="octopus", + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + # Check the case api_endpoint is provided + options = client_options.ClientOptions(api_audience="https://language.googleapis.com") + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience="https://language.googleapis.com" + ) + +@pytest.mark.parametrize("client_class,transport_class,transport_name,use_client_cert_env", [ + (ExternalVpnGatewaysClient, transports.ExternalVpnGatewaysRestTransport, "rest", "true"), + (ExternalVpnGatewaysClient, transports.ExternalVpnGatewaysRestTransport, "rest", "false"), +]) +@mock.patch.object(ExternalVpnGatewaysClient, "DEFAULT_ENDPOINT", modify_default_endpoint(ExternalVpnGatewaysClient)) +@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) +def test_external_vpn_gateways_client_mtls_env_auto(client_class, transport_class, transport_name, use_client_cert_env): + # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default + # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists. + + # Check the case client_cert_source is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): + options = client_options.ClientOptions(client_cert_source=client_cert_source_callback) + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + + if use_client_cert_env == "false": + expected_client_cert_source = None + expected_host = client.DEFAULT_ENDPOINT + else: + expected_client_cert_source = client_cert_source_callback + expected_host = client.DEFAULT_MTLS_ENDPOINT + + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case ADC client cert is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): + with mock.patch.object(transport_class, '__init__') as patched: + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): + with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=client_cert_source_callback): + if use_client_cert_env == "false": + expected_host = client.DEFAULT_ENDPOINT + expected_client_cert_source = None + else: + expected_host = client.DEFAULT_MTLS_ENDPOINT + expected_client_cert_source = client_cert_source_callback + + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case client_cert_source and ADC client cert are not provided. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): + with mock.patch.object(transport_class, '__init__') as patched: + with mock.patch("google.auth.transport.mtls.has_default_client_cert_source", return_value=False): + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize("client_class", [ + ExternalVpnGatewaysClient +]) +@mock.patch.object(ExternalVpnGatewaysClient, "DEFAULT_ENDPOINT", modify_default_endpoint(ExternalVpnGatewaysClient)) +def test_external_vpn_gateways_client_get_mtls_endpoint_and_cert_source(client_class): + mock_client_cert_source = mock.Mock() + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "true". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + mock_api_endpoint = "foo" + options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(options) + assert api_endpoint == mock_api_endpoint + assert cert_source == mock_client_cert_source + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "false". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + mock_client_cert_source = mock.Mock() + mock_api_endpoint = "foo" + options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(options) + assert api_endpoint == mock_api_endpoint + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert doesn't exist. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=False): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert exists. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): + with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=mock_client_cert_source): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source == mock_client_cert_source + + +@pytest.mark.parametrize("client_class,transport_class,transport_name", [ + (ExternalVpnGatewaysClient, transports.ExternalVpnGatewaysRestTransport, "rest"), +]) +def test_external_vpn_gateways_client_client_options_scopes(client_class, transport_class, transport_name): + # Check the case scopes are provided. + options = client_options.ClientOptions( + scopes=["1", "2"], + ) + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=["1", "2"], + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + +@pytest.mark.parametrize("client_class,transport_class,transport_name,grpc_helpers", [ + (ExternalVpnGatewaysClient, transports.ExternalVpnGatewaysRestTransport, "rest", None), +]) +def test_external_vpn_gateways_client_client_options_credentials_file(client_class, transport_class, transport_name, grpc_helpers): + # Check the case credentials file is provided. + options = client_options.ClientOptions( + credentials_file="credentials.json" + ) + + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize("request_type", [ + compute.DeleteExternalVpnGatewayRequest, + dict, +]) +def test_delete_rest(request_type): + client = ExternalVpnGatewaysClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'external_vpn_gateway': 'sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.delete(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, extended_operation.ExtendedOperation) + assert response.client_operation_id == 'client_operation_id_value' + assert response.creation_timestamp == 'creation_timestamp_value' + assert response.description == 'description_value' + assert response.end_time == 'end_time_value' + assert response.http_error_message == 'http_error_message_value' + assert response.http_error_status_code == 2374 + assert response.id == 205 + assert response.insert_time == 'insert_time_value' + assert response.kind == 'kind_value' + assert response.name == 'name_value' + assert response.operation_group_id == 'operation_group_id_value' + assert response.operation_type == 'operation_type_value' + assert response.progress == 885 + assert response.region == 'region_value' + assert response.self_link == 'self_link_value' + assert response.start_time == 'start_time_value' + assert response.status == compute.Operation.Status.DONE + assert response.status_message == 'status_message_value' + assert response.target_id == 947 + assert response.target_link == 'target_link_value' + assert response.user == 'user_value' + assert response.zone == 'zone_value' + + +def test_delete_rest_required_fields(request_type=compute.DeleteExternalVpnGatewayRequest): + transport_class = transports.ExternalVpnGatewaysRestTransport + + request_init = {} + request_init["external_vpn_gateway"] = "" + request_init["project"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["externalVpnGateway"] = 'external_vpn_gateway_value' + jsonified_request["project"] = 'project_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "externalVpnGateway" in jsonified_request + assert jsonified_request["externalVpnGateway"] == 'external_vpn_gateway_value' + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + + client = ExternalVpnGatewaysClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "delete", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.delete(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_delete_rest_unset_required_fields(): + transport = transports.ExternalVpnGatewaysRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.delete._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("externalVpnGateway", "project", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_rest_interceptors(null_interceptor): + transport = transports.ExternalVpnGatewaysRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.ExternalVpnGatewaysRestInterceptor(), + ) + client = ExternalVpnGatewaysClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.ExternalVpnGatewaysRestInterceptor, "post_delete") as post, \ + mock.patch.object(transports.ExternalVpnGatewaysRestInterceptor, "pre_delete") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.DeleteExternalVpnGatewayRequest.pb(compute.DeleteExternalVpnGatewayRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.DeleteExternalVpnGatewayRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.delete(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_delete_rest_bad_request(transport: str = 'rest', request_type=compute.DeleteExternalVpnGatewayRequest): + client = ExternalVpnGatewaysClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'external_vpn_gateway': 'sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.delete(request) + + +def test_delete_rest_flattened(): + client = ExternalVpnGatewaysClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'external_vpn_gateway': 'sample2'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + external_vpn_gateway='external_vpn_gateway_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.delete(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/global/externalVpnGateways/{external_vpn_gateway}" % client.transport._host, args[1]) + + +def test_delete_rest_flattened_error(transport: str = 'rest'): + client = ExternalVpnGatewaysClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete( + compute.DeleteExternalVpnGatewayRequest(), + project='project_value', + external_vpn_gateway='external_vpn_gateway_value', + ) + + +def test_delete_rest_error(): + client = ExternalVpnGatewaysClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.DeleteExternalVpnGatewayRequest, + dict, +]) +def test_delete_unary_rest(request_type): + client = ExternalVpnGatewaysClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'external_vpn_gateway': 'sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.delete_unary(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.Operation) + + +def test_delete_unary_rest_required_fields(request_type=compute.DeleteExternalVpnGatewayRequest): + transport_class = transports.ExternalVpnGatewaysRestTransport + + request_init = {} + request_init["external_vpn_gateway"] = "" + request_init["project"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["externalVpnGateway"] = 'external_vpn_gateway_value' + jsonified_request["project"] = 'project_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "externalVpnGateway" in jsonified_request + assert jsonified_request["externalVpnGateway"] == 'external_vpn_gateway_value' + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + + client = ExternalVpnGatewaysClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "delete", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.delete_unary(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_delete_unary_rest_unset_required_fields(): + transport = transports.ExternalVpnGatewaysRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.delete._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("externalVpnGateway", "project", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_unary_rest_interceptors(null_interceptor): + transport = transports.ExternalVpnGatewaysRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.ExternalVpnGatewaysRestInterceptor(), + ) + client = ExternalVpnGatewaysClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.ExternalVpnGatewaysRestInterceptor, "post_delete") as post, \ + mock.patch.object(transports.ExternalVpnGatewaysRestInterceptor, "pre_delete") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.DeleteExternalVpnGatewayRequest.pb(compute.DeleteExternalVpnGatewayRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.DeleteExternalVpnGatewayRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.delete_unary(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_delete_unary_rest_bad_request(transport: str = 'rest', request_type=compute.DeleteExternalVpnGatewayRequest): + client = ExternalVpnGatewaysClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'external_vpn_gateway': 'sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.delete_unary(request) + + +def test_delete_unary_rest_flattened(): + client = ExternalVpnGatewaysClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'external_vpn_gateway': 'sample2'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + external_vpn_gateway='external_vpn_gateway_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.delete_unary(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/global/externalVpnGateways/{external_vpn_gateway}" % client.transport._host, args[1]) + + +def test_delete_unary_rest_flattened_error(transport: str = 'rest'): + client = ExternalVpnGatewaysClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_unary( + compute.DeleteExternalVpnGatewayRequest(), + project='project_value', + external_vpn_gateway='external_vpn_gateway_value', + ) + + +def test_delete_unary_rest_error(): + client = ExternalVpnGatewaysClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.GetExternalVpnGatewayRequest, + dict, +]) +def test_get_rest(request_type): + client = ExternalVpnGatewaysClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'external_vpn_gateway': 'sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.ExternalVpnGateway( + creation_timestamp='creation_timestamp_value', + description='description_value', + id=205, + kind='kind_value', + label_fingerprint='label_fingerprint_value', + name='name_value', + redundancy_type='redundancy_type_value', + self_link='self_link_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.ExternalVpnGateway.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.get(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.ExternalVpnGateway) + assert response.creation_timestamp == 'creation_timestamp_value' + assert response.description == 'description_value' + assert response.id == 205 + assert response.kind == 'kind_value' + assert response.label_fingerprint == 'label_fingerprint_value' + assert response.name == 'name_value' + assert response.redundancy_type == 'redundancy_type_value' + assert response.self_link == 'self_link_value' + + +def test_get_rest_required_fields(request_type=compute.GetExternalVpnGatewayRequest): + transport_class = transports.ExternalVpnGatewaysRestTransport + + request_init = {} + request_init["external_vpn_gateway"] = "" + request_init["project"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["externalVpnGateway"] = 'external_vpn_gateway_value' + jsonified_request["project"] = 'project_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "externalVpnGateway" in jsonified_request + assert jsonified_request["externalVpnGateway"] == 'external_vpn_gateway_value' + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + + client = ExternalVpnGatewaysClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.ExternalVpnGateway() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "get", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.ExternalVpnGateway.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.get(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_get_rest_unset_required_fields(): + transport = transports.ExternalVpnGatewaysRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.get._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("externalVpnGateway", "project", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_rest_interceptors(null_interceptor): + transport = transports.ExternalVpnGatewaysRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.ExternalVpnGatewaysRestInterceptor(), + ) + client = ExternalVpnGatewaysClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.ExternalVpnGatewaysRestInterceptor, "post_get") as post, \ + mock.patch.object(transports.ExternalVpnGatewaysRestInterceptor, "pre_get") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.GetExternalVpnGatewayRequest.pb(compute.GetExternalVpnGatewayRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.ExternalVpnGateway.to_json(compute.ExternalVpnGateway()) + + request = compute.GetExternalVpnGatewayRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.ExternalVpnGateway() + + client.get(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_get_rest_bad_request(transport: str = 'rest', request_type=compute.GetExternalVpnGatewayRequest): + client = ExternalVpnGatewaysClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'external_vpn_gateway': 'sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get(request) + + +def test_get_rest_flattened(): + client = ExternalVpnGatewaysClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.ExternalVpnGateway() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'external_vpn_gateway': 'sample2'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + external_vpn_gateway='external_vpn_gateway_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.ExternalVpnGateway.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.get(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/global/externalVpnGateways/{external_vpn_gateway}" % client.transport._host, args[1]) + + +def test_get_rest_flattened_error(transport: str = 'rest'): + client = ExternalVpnGatewaysClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get( + compute.GetExternalVpnGatewayRequest(), + project='project_value', + external_vpn_gateway='external_vpn_gateway_value', + ) + + +def test_get_rest_error(): + client = ExternalVpnGatewaysClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.InsertExternalVpnGatewayRequest, + dict, +]) +def test_insert_rest(request_type): + client = ExternalVpnGatewaysClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1'} + request_init["external_vpn_gateway_resource"] = {'creation_timestamp': 'creation_timestamp_value', 'description': 'description_value', 'id': 205, 'interfaces': [{'id': 205, 'ip_address': 'ip_address_value'}], 'kind': 'kind_value', 'label_fingerprint': 'label_fingerprint_value', 'labels': {}, 'name': 'name_value', 'redundancy_type': 'redundancy_type_value', 'self_link': 'self_link_value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.insert(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, extended_operation.ExtendedOperation) + assert response.client_operation_id == 'client_operation_id_value' + assert response.creation_timestamp == 'creation_timestamp_value' + assert response.description == 'description_value' + assert response.end_time == 'end_time_value' + assert response.http_error_message == 'http_error_message_value' + assert response.http_error_status_code == 2374 + assert response.id == 205 + assert response.insert_time == 'insert_time_value' + assert response.kind == 'kind_value' + assert response.name == 'name_value' + assert response.operation_group_id == 'operation_group_id_value' + assert response.operation_type == 'operation_type_value' + assert response.progress == 885 + assert response.region == 'region_value' + assert response.self_link == 'self_link_value' + assert response.start_time == 'start_time_value' + assert response.status == compute.Operation.Status.DONE + assert response.status_message == 'status_message_value' + assert response.target_id == 947 + assert response.target_link == 'target_link_value' + assert response.user == 'user_value' + assert response.zone == 'zone_value' + + +def test_insert_rest_required_fields(request_type=compute.InsertExternalVpnGatewayRequest): + transport_class = transports.ExternalVpnGatewaysRestTransport + + request_init = {} + request_init["project"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).insert._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).insert._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + + client = ExternalVpnGatewaysClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.insert(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_insert_rest_unset_required_fields(): + transport = transports.ExternalVpnGatewaysRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.insert._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("externalVpnGatewayResource", "project", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_insert_rest_interceptors(null_interceptor): + transport = transports.ExternalVpnGatewaysRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.ExternalVpnGatewaysRestInterceptor(), + ) + client = ExternalVpnGatewaysClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.ExternalVpnGatewaysRestInterceptor, "post_insert") as post, \ + mock.patch.object(transports.ExternalVpnGatewaysRestInterceptor, "pre_insert") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.InsertExternalVpnGatewayRequest.pb(compute.InsertExternalVpnGatewayRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.InsertExternalVpnGatewayRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.insert(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_insert_rest_bad_request(transport: str = 'rest', request_type=compute.InsertExternalVpnGatewayRequest): + client = ExternalVpnGatewaysClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1'} + request_init["external_vpn_gateway_resource"] = {'creation_timestamp': 'creation_timestamp_value', 'description': 'description_value', 'id': 205, 'interfaces': [{'id': 205, 'ip_address': 'ip_address_value'}], 'kind': 'kind_value', 'label_fingerprint': 'label_fingerprint_value', 'labels': {}, 'name': 'name_value', 'redundancy_type': 'redundancy_type_value', 'self_link': 'self_link_value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.insert(request) + + +def test_insert_rest_flattened(): + client = ExternalVpnGatewaysClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + external_vpn_gateway_resource=compute.ExternalVpnGateway(creation_timestamp='creation_timestamp_value'), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.insert(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/global/externalVpnGateways" % client.transport._host, args[1]) + + +def test_insert_rest_flattened_error(transport: str = 'rest'): + client = ExternalVpnGatewaysClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.insert( + compute.InsertExternalVpnGatewayRequest(), + project='project_value', + external_vpn_gateway_resource=compute.ExternalVpnGateway(creation_timestamp='creation_timestamp_value'), + ) + + +def test_insert_rest_error(): + client = ExternalVpnGatewaysClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.InsertExternalVpnGatewayRequest, + dict, +]) +def test_insert_unary_rest(request_type): + client = ExternalVpnGatewaysClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1'} + request_init["external_vpn_gateway_resource"] = {'creation_timestamp': 'creation_timestamp_value', 'description': 'description_value', 'id': 205, 'interfaces': [{'id': 205, 'ip_address': 'ip_address_value'}], 'kind': 'kind_value', 'label_fingerprint': 'label_fingerprint_value', 'labels': {}, 'name': 'name_value', 'redundancy_type': 'redundancy_type_value', 'self_link': 'self_link_value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.insert_unary(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.Operation) + + +def test_insert_unary_rest_required_fields(request_type=compute.InsertExternalVpnGatewayRequest): + transport_class = transports.ExternalVpnGatewaysRestTransport + + request_init = {} + request_init["project"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).insert._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).insert._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + + client = ExternalVpnGatewaysClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.insert_unary(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_insert_unary_rest_unset_required_fields(): + transport = transports.ExternalVpnGatewaysRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.insert._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("externalVpnGatewayResource", "project", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_insert_unary_rest_interceptors(null_interceptor): + transport = transports.ExternalVpnGatewaysRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.ExternalVpnGatewaysRestInterceptor(), + ) + client = ExternalVpnGatewaysClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.ExternalVpnGatewaysRestInterceptor, "post_insert") as post, \ + mock.patch.object(transports.ExternalVpnGatewaysRestInterceptor, "pre_insert") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.InsertExternalVpnGatewayRequest.pb(compute.InsertExternalVpnGatewayRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.InsertExternalVpnGatewayRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.insert_unary(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_insert_unary_rest_bad_request(transport: str = 'rest', request_type=compute.InsertExternalVpnGatewayRequest): + client = ExternalVpnGatewaysClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1'} + request_init["external_vpn_gateway_resource"] = {'creation_timestamp': 'creation_timestamp_value', 'description': 'description_value', 'id': 205, 'interfaces': [{'id': 205, 'ip_address': 'ip_address_value'}], 'kind': 'kind_value', 'label_fingerprint': 'label_fingerprint_value', 'labels': {}, 'name': 'name_value', 'redundancy_type': 'redundancy_type_value', 'self_link': 'self_link_value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.insert_unary(request) + + +def test_insert_unary_rest_flattened(): + client = ExternalVpnGatewaysClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + external_vpn_gateway_resource=compute.ExternalVpnGateway(creation_timestamp='creation_timestamp_value'), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.insert_unary(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/global/externalVpnGateways" % client.transport._host, args[1]) + + +def test_insert_unary_rest_flattened_error(transport: str = 'rest'): + client = ExternalVpnGatewaysClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.insert_unary( + compute.InsertExternalVpnGatewayRequest(), + project='project_value', + external_vpn_gateway_resource=compute.ExternalVpnGateway(creation_timestamp='creation_timestamp_value'), + ) + + +def test_insert_unary_rest_error(): + client = ExternalVpnGatewaysClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.ListExternalVpnGatewaysRequest, + dict, +]) +def test_list_rest(request_type): + client = ExternalVpnGatewaysClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.ExternalVpnGatewayList( + etag='etag_value', + id='id_value', + kind='kind_value', + next_page_token='next_page_token_value', + self_link='self_link_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.ExternalVpnGatewayList.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.list(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListPager) + assert response.etag == 'etag_value' + assert response.id == 'id_value' + assert response.kind == 'kind_value' + assert response.next_page_token == 'next_page_token_value' + assert response.self_link == 'self_link_value' + + +def test_list_rest_required_fields(request_type=compute.ListExternalVpnGatewaysRequest): + transport_class = transports.ExternalVpnGatewaysRestTransport + + request_init = {} + request_init["project"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("filter", "max_results", "order_by", "page_token", "return_partial_success", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + + client = ExternalVpnGatewaysClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.ExternalVpnGatewayList() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "get", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.ExternalVpnGatewayList.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.list(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_list_rest_unset_required_fields(): + transport = transports.ExternalVpnGatewaysRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.list._get_unset_required_fields({}) + assert set(unset_fields) == (set(("filter", "maxResults", "orderBy", "pageToken", "returnPartialSuccess", )) & set(("project", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_rest_interceptors(null_interceptor): + transport = transports.ExternalVpnGatewaysRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.ExternalVpnGatewaysRestInterceptor(), + ) + client = ExternalVpnGatewaysClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.ExternalVpnGatewaysRestInterceptor, "post_list") as post, \ + mock.patch.object(transports.ExternalVpnGatewaysRestInterceptor, "pre_list") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.ListExternalVpnGatewaysRequest.pb(compute.ListExternalVpnGatewaysRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.ExternalVpnGatewayList.to_json(compute.ExternalVpnGatewayList()) + + request = compute.ListExternalVpnGatewaysRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.ExternalVpnGatewayList() + + client.list(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_list_rest_bad_request(transport: str = 'rest', request_type=compute.ListExternalVpnGatewaysRequest): + client = ExternalVpnGatewaysClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list(request) + + +def test_list_rest_flattened(): + client = ExternalVpnGatewaysClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.ExternalVpnGatewayList() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.ExternalVpnGatewayList.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.list(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/global/externalVpnGateways" % client.transport._host, args[1]) + + +def test_list_rest_flattened_error(transport: str = 'rest'): + client = ExternalVpnGatewaysClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list( + compute.ListExternalVpnGatewaysRequest(), + project='project_value', + ) + + +def test_list_rest_pager(transport: str = 'rest'): + client = ExternalVpnGatewaysClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + #with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + compute.ExternalVpnGatewayList( + items=[ + compute.ExternalVpnGateway(), + compute.ExternalVpnGateway(), + compute.ExternalVpnGateway(), + ], + next_page_token='abc', + ), + compute.ExternalVpnGatewayList( + items=[], + next_page_token='def', + ), + compute.ExternalVpnGatewayList( + items=[ + compute.ExternalVpnGateway(), + ], + next_page_token='ghi', + ), + compute.ExternalVpnGatewayList( + items=[ + compute.ExternalVpnGateway(), + compute.ExternalVpnGateway(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple(compute.ExternalVpnGatewayList.to_json(x) for x in response) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode('UTF-8') + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {'project': 'sample1'} + + pager = client.list(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, compute.ExternalVpnGateway) + for i in results) + + pages = list(client.list(request=sample_request).pages) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize("request_type", [ + compute.SetLabelsExternalVpnGatewayRequest, + dict, +]) +def test_set_labels_rest(request_type): + client = ExternalVpnGatewaysClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'resource': 'sample2'} + request_init["global_set_labels_request_resource"] = {'label_fingerprint': 'label_fingerprint_value', 'labels': {}} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.set_labels(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, extended_operation.ExtendedOperation) + assert response.client_operation_id == 'client_operation_id_value' + assert response.creation_timestamp == 'creation_timestamp_value' + assert response.description == 'description_value' + assert response.end_time == 'end_time_value' + assert response.http_error_message == 'http_error_message_value' + assert response.http_error_status_code == 2374 + assert response.id == 205 + assert response.insert_time == 'insert_time_value' + assert response.kind == 'kind_value' + assert response.name == 'name_value' + assert response.operation_group_id == 'operation_group_id_value' + assert response.operation_type == 'operation_type_value' + assert response.progress == 885 + assert response.region == 'region_value' + assert response.self_link == 'self_link_value' + assert response.start_time == 'start_time_value' + assert response.status == compute.Operation.Status.DONE + assert response.status_message == 'status_message_value' + assert response.target_id == 947 + assert response.target_link == 'target_link_value' + assert response.user == 'user_value' + assert response.zone == 'zone_value' + + +def test_set_labels_rest_required_fields(request_type=compute.SetLabelsExternalVpnGatewayRequest): + transport_class = transports.ExternalVpnGatewaysRestTransport + + request_init = {} + request_init["project"] = "" + request_init["resource"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).set_labels._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + jsonified_request["resource"] = 'resource_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).set_labels._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "resource" in jsonified_request + assert jsonified_request["resource"] == 'resource_value' + + client = ExternalVpnGatewaysClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.set_labels(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_set_labels_rest_unset_required_fields(): + transport = transports.ExternalVpnGatewaysRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.set_labels._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("globalSetLabelsRequestResource", "project", "resource", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_set_labels_rest_interceptors(null_interceptor): + transport = transports.ExternalVpnGatewaysRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.ExternalVpnGatewaysRestInterceptor(), + ) + client = ExternalVpnGatewaysClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.ExternalVpnGatewaysRestInterceptor, "post_set_labels") as post, \ + mock.patch.object(transports.ExternalVpnGatewaysRestInterceptor, "pre_set_labels") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.SetLabelsExternalVpnGatewayRequest.pb(compute.SetLabelsExternalVpnGatewayRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.SetLabelsExternalVpnGatewayRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.set_labels(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_set_labels_rest_bad_request(transport: str = 'rest', request_type=compute.SetLabelsExternalVpnGatewayRequest): + client = ExternalVpnGatewaysClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'resource': 'sample2'} + request_init["global_set_labels_request_resource"] = {'label_fingerprint': 'label_fingerprint_value', 'labels': {}} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.set_labels(request) + + +def test_set_labels_rest_flattened(): + client = ExternalVpnGatewaysClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'resource': 'sample2'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + resource='resource_value', + global_set_labels_request_resource=compute.GlobalSetLabelsRequest(label_fingerprint='label_fingerprint_value'), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.set_labels(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/global/externalVpnGateways/{resource}/setLabels" % client.transport._host, args[1]) + + +def test_set_labels_rest_flattened_error(transport: str = 'rest'): + client = ExternalVpnGatewaysClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.set_labels( + compute.SetLabelsExternalVpnGatewayRequest(), + project='project_value', + resource='resource_value', + global_set_labels_request_resource=compute.GlobalSetLabelsRequest(label_fingerprint='label_fingerprint_value'), + ) + + +def test_set_labels_rest_error(): + client = ExternalVpnGatewaysClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.SetLabelsExternalVpnGatewayRequest, + dict, +]) +def test_set_labels_unary_rest(request_type): + client = ExternalVpnGatewaysClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'resource': 'sample2'} + request_init["global_set_labels_request_resource"] = {'label_fingerprint': 'label_fingerprint_value', 'labels': {}} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.set_labels_unary(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.Operation) + + +def test_set_labels_unary_rest_required_fields(request_type=compute.SetLabelsExternalVpnGatewayRequest): + transport_class = transports.ExternalVpnGatewaysRestTransport + + request_init = {} + request_init["project"] = "" + request_init["resource"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).set_labels._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + jsonified_request["resource"] = 'resource_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).set_labels._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "resource" in jsonified_request + assert jsonified_request["resource"] == 'resource_value' + + client = ExternalVpnGatewaysClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.set_labels_unary(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_set_labels_unary_rest_unset_required_fields(): + transport = transports.ExternalVpnGatewaysRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.set_labels._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("globalSetLabelsRequestResource", "project", "resource", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_set_labels_unary_rest_interceptors(null_interceptor): + transport = transports.ExternalVpnGatewaysRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.ExternalVpnGatewaysRestInterceptor(), + ) + client = ExternalVpnGatewaysClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.ExternalVpnGatewaysRestInterceptor, "post_set_labels") as post, \ + mock.patch.object(transports.ExternalVpnGatewaysRestInterceptor, "pre_set_labels") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.SetLabelsExternalVpnGatewayRequest.pb(compute.SetLabelsExternalVpnGatewayRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.SetLabelsExternalVpnGatewayRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.set_labels_unary(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_set_labels_unary_rest_bad_request(transport: str = 'rest', request_type=compute.SetLabelsExternalVpnGatewayRequest): + client = ExternalVpnGatewaysClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'resource': 'sample2'} + request_init["global_set_labels_request_resource"] = {'label_fingerprint': 'label_fingerprint_value', 'labels': {}} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.set_labels_unary(request) + + +def test_set_labels_unary_rest_flattened(): + client = ExternalVpnGatewaysClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'resource': 'sample2'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + resource='resource_value', + global_set_labels_request_resource=compute.GlobalSetLabelsRequest(label_fingerprint='label_fingerprint_value'), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.set_labels_unary(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/global/externalVpnGateways/{resource}/setLabels" % client.transport._host, args[1]) + + +def test_set_labels_unary_rest_flattened_error(transport: str = 'rest'): + client = ExternalVpnGatewaysClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.set_labels_unary( + compute.SetLabelsExternalVpnGatewayRequest(), + project='project_value', + resource='resource_value', + global_set_labels_request_resource=compute.GlobalSetLabelsRequest(label_fingerprint='label_fingerprint_value'), + ) + + +def test_set_labels_unary_rest_error(): + client = ExternalVpnGatewaysClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.TestIamPermissionsExternalVpnGatewayRequest, + dict, +]) +def test_test_iam_permissions_rest(request_type): + client = ExternalVpnGatewaysClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'resource': 'sample2'} + request_init["test_permissions_request_resource"] = {'permissions': ['permissions_value1', 'permissions_value2']} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.TestPermissionsResponse( + permissions=['permissions_value'], + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.TestPermissionsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.test_iam_permissions(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.TestPermissionsResponse) + assert response.permissions == ['permissions_value'] + + +def test_test_iam_permissions_rest_required_fields(request_type=compute.TestIamPermissionsExternalVpnGatewayRequest): + transport_class = transports.ExternalVpnGatewaysRestTransport + + request_init = {} + request_init["project"] = "" + request_init["resource"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).test_iam_permissions._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + jsonified_request["resource"] = 'resource_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).test_iam_permissions._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "resource" in jsonified_request + assert jsonified_request["resource"] == 'resource_value' + + client = ExternalVpnGatewaysClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.TestPermissionsResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.TestPermissionsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.test_iam_permissions(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_test_iam_permissions_rest_unset_required_fields(): + transport = transports.ExternalVpnGatewaysRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.test_iam_permissions._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("project", "resource", "testPermissionsRequestResource", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_test_iam_permissions_rest_interceptors(null_interceptor): + transport = transports.ExternalVpnGatewaysRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.ExternalVpnGatewaysRestInterceptor(), + ) + client = ExternalVpnGatewaysClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.ExternalVpnGatewaysRestInterceptor, "post_test_iam_permissions") as post, \ + mock.patch.object(transports.ExternalVpnGatewaysRestInterceptor, "pre_test_iam_permissions") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.TestIamPermissionsExternalVpnGatewayRequest.pb(compute.TestIamPermissionsExternalVpnGatewayRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.TestPermissionsResponse.to_json(compute.TestPermissionsResponse()) + + request = compute.TestIamPermissionsExternalVpnGatewayRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.TestPermissionsResponse() + + client.test_iam_permissions(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_test_iam_permissions_rest_bad_request(transport: str = 'rest', request_type=compute.TestIamPermissionsExternalVpnGatewayRequest): + client = ExternalVpnGatewaysClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'resource': 'sample2'} + request_init["test_permissions_request_resource"] = {'permissions': ['permissions_value1', 'permissions_value2']} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.test_iam_permissions(request) + + +def test_test_iam_permissions_rest_flattened(): + client = ExternalVpnGatewaysClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.TestPermissionsResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'resource': 'sample2'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + resource='resource_value', + test_permissions_request_resource=compute.TestPermissionsRequest(permissions=['permissions_value']), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.TestPermissionsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.test_iam_permissions(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/global/externalVpnGateways/{resource}/testIamPermissions" % client.transport._host, args[1]) + + +def test_test_iam_permissions_rest_flattened_error(transport: str = 'rest'): + client = ExternalVpnGatewaysClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.test_iam_permissions( + compute.TestIamPermissionsExternalVpnGatewayRequest(), + project='project_value', + resource='resource_value', + test_permissions_request_resource=compute.TestPermissionsRequest(permissions=['permissions_value']), + ) + + +def test_test_iam_permissions_rest_error(): + client = ExternalVpnGatewaysClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +def test_credentials_transport_error(): + # It is an error to provide credentials and a transport instance. + transport = transports.ExternalVpnGatewaysRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = ExternalVpnGatewaysClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # It is an error to provide a credentials file and a transport instance. + transport = transports.ExternalVpnGatewaysRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = ExternalVpnGatewaysClient( + client_options={"credentials_file": "credentials.json"}, + transport=transport, + ) + + # It is an error to provide an api_key and a transport instance. + transport = transports.ExternalVpnGatewaysRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = ExternalVpnGatewaysClient( + client_options=options, + transport=transport, + ) + + # It is an error to provide an api_key and a credential. + options = mock.Mock() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = ExternalVpnGatewaysClient( + client_options=options, + credentials=ga_credentials.AnonymousCredentials() + ) + + # It is an error to provide scopes and a transport instance. + transport = transports.ExternalVpnGatewaysRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = ExternalVpnGatewaysClient( + client_options={"scopes": ["1", "2"]}, + transport=transport, + ) + + +def test_transport_instance(): + # A client may be instantiated with a custom transport instance. + transport = transports.ExternalVpnGatewaysRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + client = ExternalVpnGatewaysClient(transport=transport) + assert client.transport is transport + + +@pytest.mark.parametrize("transport_class", [ + transports.ExternalVpnGatewaysRestTransport, +]) +def test_transport_adc(transport_class): + # Test default credentials are used if not provided. + with mock.patch.object(google.auth, 'default') as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class() + adc.assert_called_once() + +@pytest.mark.parametrize("transport_name", [ + "rest", +]) +def test_transport_kind(transport_name): + transport = ExternalVpnGatewaysClient.get_transport_class(transport_name)( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert transport.kind == transport_name + + +def test_external_vpn_gateways_base_transport_error(): + # Passing both a credentials object and credentials_file should raise an error + with pytest.raises(core_exceptions.DuplicateCredentialArgs): + transport = transports.ExternalVpnGatewaysTransport( + credentials=ga_credentials.AnonymousCredentials(), + credentials_file="credentials.json" + ) + + +def test_external_vpn_gateways_base_transport(): + # Instantiate the base transport. + with mock.patch('google.cloud.compute_v1.services.external_vpn_gateways.transports.ExternalVpnGatewaysTransport.__init__') as Transport: + Transport.return_value = None + transport = transports.ExternalVpnGatewaysTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Every method on the transport should just blindly + # raise NotImplementedError. + methods = ( + 'delete', + 'get', + 'insert', + 'list', + 'set_labels', + 'test_iam_permissions', + ) + for method in methods: + with pytest.raises(NotImplementedError): + getattr(transport, method)(request=object()) + + with pytest.raises(NotImplementedError): + transport.close() + + # Catch all for all remaining methods and properties + remainder = [ + 'kind', + ] + for r in remainder: + with pytest.raises(NotImplementedError): + getattr(transport, r)() + + +def test_external_vpn_gateways_base_transport_with_credentials_file(): + # Instantiate the base transport with a credentials file + with mock.patch.object(google.auth, 'load_credentials_from_file', autospec=True) as load_creds, mock.patch('google.cloud.compute_v1.services.external_vpn_gateways.transports.ExternalVpnGatewaysTransport._prep_wrapped_messages') as Transport: + Transport.return_value = None + load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.ExternalVpnGatewaysTransport( + credentials_file="credentials.json", + quota_project_id="octopus", + ) + load_creds.assert_called_once_with("credentials.json", + scopes=None, + default_scopes=( + 'https://www.googleapis.com/auth/compute', + 'https://www.googleapis.com/auth/cloud-platform', +), + quota_project_id="octopus", + ) + + +def test_external_vpn_gateways_base_transport_with_adc(): + # Test the default credentials are used if credentials and credentials_file are None. + with mock.patch.object(google.auth, 'default', autospec=True) as adc, mock.patch('google.cloud.compute_v1.services.external_vpn_gateways.transports.ExternalVpnGatewaysTransport._prep_wrapped_messages') as Transport: + Transport.return_value = None + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.ExternalVpnGatewaysTransport() + adc.assert_called_once() + + +def test_external_vpn_gateways_auth_adc(): + # If no credentials are provided, we should use ADC credentials. + with mock.patch.object(google.auth, 'default', autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + ExternalVpnGatewaysClient() + adc.assert_called_once_with( + scopes=None, + default_scopes=( + 'https://www.googleapis.com/auth/compute', + 'https://www.googleapis.com/auth/cloud-platform', +), + quota_project_id=None, + ) + + +def test_external_vpn_gateways_http_transport_client_cert_source_for_mtls(): + cred = ga_credentials.AnonymousCredentials() + with mock.patch("google.auth.transport.requests.AuthorizedSession.configure_mtls_channel") as mock_configure_mtls_channel: + transports.ExternalVpnGatewaysRestTransport ( + credentials=cred, + client_cert_source_for_mtls=client_cert_source_callback + ) + mock_configure_mtls_channel.assert_called_once_with(client_cert_source_callback) + + +@pytest.mark.parametrize("transport_name", [ + "rest", +]) +def test_external_vpn_gateways_host_no_port(transport_name): + client = ExternalVpnGatewaysClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions(api_endpoint='compute.googleapis.com'), + transport=transport_name, + ) + assert client.transport._host == ( + 'compute.googleapis.com:443' + if transport_name in ['grpc', 'grpc_asyncio'] + else 'https://compute.googleapis.com' + ) + +@pytest.mark.parametrize("transport_name", [ + "rest", +]) +def test_external_vpn_gateways_host_with_port(transport_name): + client = ExternalVpnGatewaysClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions(api_endpoint='compute.googleapis.com:8000'), + transport=transport_name, + ) + assert client.transport._host == ( + 'compute.googleapis.com:8000' + if transport_name in ['grpc', 'grpc_asyncio'] + else 'https://compute.googleapis.com:8000' + ) + +@pytest.mark.parametrize("transport_name", [ + "rest", +]) +def test_external_vpn_gateways_client_transport_session_collision(transport_name): + creds1 = ga_credentials.AnonymousCredentials() + creds2 = ga_credentials.AnonymousCredentials() + client1 = ExternalVpnGatewaysClient( + credentials=creds1, + transport=transport_name, + ) + client2 = ExternalVpnGatewaysClient( + credentials=creds2, + transport=transport_name, + ) + session1 = client1.transport.delete._session + session2 = client2.transport.delete._session + assert session1 != session2 + session1 = client1.transport.get._session + session2 = client2.transport.get._session + assert session1 != session2 + session1 = client1.transport.insert._session + session2 = client2.transport.insert._session + assert session1 != session2 + session1 = client1.transport.list._session + session2 = client2.transport.list._session + assert session1 != session2 + session1 = client1.transport.set_labels._session + session2 = client2.transport.set_labels._session + assert session1 != session2 + session1 = client1.transport.test_iam_permissions._session + session2 = client2.transport.test_iam_permissions._session + assert session1 != session2 + +def test_common_billing_account_path(): + billing_account = "squid" + expected = "billingAccounts/{billing_account}".format(billing_account=billing_account, ) + actual = ExternalVpnGatewaysClient.common_billing_account_path(billing_account) + assert expected == actual + + +def test_parse_common_billing_account_path(): + expected = { + "billing_account": "clam", + } + path = ExternalVpnGatewaysClient.common_billing_account_path(**expected) + + # Check that the path construction is reversible. + actual = ExternalVpnGatewaysClient.parse_common_billing_account_path(path) + assert expected == actual + +def test_common_folder_path(): + folder = "whelk" + expected = "folders/{folder}".format(folder=folder, ) + actual = ExternalVpnGatewaysClient.common_folder_path(folder) + assert expected == actual + + +def test_parse_common_folder_path(): + expected = { + "folder": "octopus", + } + path = ExternalVpnGatewaysClient.common_folder_path(**expected) + + # Check that the path construction is reversible. + actual = ExternalVpnGatewaysClient.parse_common_folder_path(path) + assert expected == actual + +def test_common_organization_path(): + organization = "oyster" + expected = "organizations/{organization}".format(organization=organization, ) + actual = ExternalVpnGatewaysClient.common_organization_path(organization) + assert expected == actual + + +def test_parse_common_organization_path(): + expected = { + "organization": "nudibranch", + } + path = ExternalVpnGatewaysClient.common_organization_path(**expected) + + # Check that the path construction is reversible. + actual = ExternalVpnGatewaysClient.parse_common_organization_path(path) + assert expected == actual + +def test_common_project_path(): + project = "cuttlefish" + expected = "projects/{project}".format(project=project, ) + actual = ExternalVpnGatewaysClient.common_project_path(project) + assert expected == actual + + +def test_parse_common_project_path(): + expected = { + "project": "mussel", + } + path = ExternalVpnGatewaysClient.common_project_path(**expected) + + # Check that the path construction is reversible. + actual = ExternalVpnGatewaysClient.parse_common_project_path(path) + assert expected == actual + +def test_common_location_path(): + project = "winkle" + location = "nautilus" + expected = "projects/{project}/locations/{location}".format(project=project, location=location, ) + actual = ExternalVpnGatewaysClient.common_location_path(project, location) + assert expected == actual + + +def test_parse_common_location_path(): + expected = { + "project": "scallop", + "location": "abalone", + } + path = ExternalVpnGatewaysClient.common_location_path(**expected) + + # Check that the path construction is reversible. + actual = ExternalVpnGatewaysClient.parse_common_location_path(path) + assert expected == actual + + +def test_client_with_default_client_info(): + client_info = gapic_v1.client_info.ClientInfo() + + with mock.patch.object(transports.ExternalVpnGatewaysTransport, '_prep_wrapped_messages') as prep: + client = ExternalVpnGatewaysClient( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + with mock.patch.object(transports.ExternalVpnGatewaysTransport, '_prep_wrapped_messages') as prep: + transport_class = ExternalVpnGatewaysClient.get_transport_class() + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + +def test_transport_close(): + transports = { + "rest": "_session", + } + + for transport, close_name in transports.items(): + client = ExternalVpnGatewaysClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport + ) + with mock.patch.object(type(getattr(client.transport, close_name)), "close") as close: + with client: + close.assert_not_called() + close.assert_called_once() + +def test_client_ctx(): + transports = [ + 'rest', + ] + for transport in transports: + client = ExternalVpnGatewaysClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport + ) + # Test client calls underlying transport. + with mock.patch.object(type(client.transport), "close") as close: + close.assert_not_called() + with client: + pass + close.assert_called() + +@pytest.mark.parametrize("client_class,transport_class", [ + (ExternalVpnGatewaysClient, transports.ExternalVpnGatewaysRestTransport), +]) +def test_api_key_credentials(client_class, transport_class): + with mock.patch.object( + google.auth._default, "get_api_key_credentials", create=True + ) as get_api_key_credentials: + mock_cred = mock.Mock() + get_api_key_credentials.return_value = mock_cred + options = client_options.ClientOptions() + options.api_key = "api_key" + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=mock_cred, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) diff --git a/owl-bot-staging/v1/tests/unit/gapic/compute_v1/test_firewall_policies.py b/owl-bot-staging/v1/tests/unit/gapic/compute_v1/test_firewall_policies.py new file mode 100644 index 000000000..8eef64ca0 --- /dev/null +++ b/owl-bot-staging/v1/tests/unit/gapic/compute_v1/test_firewall_policies.py @@ -0,0 +1,8023 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import os +# try/except added for compatibility with python < 3.8 +try: + from unittest import mock + from unittest.mock import AsyncMock # pragma: NO COVER +except ImportError: # pragma: NO COVER + import mock + +import grpc +from grpc.experimental import aio +from collections.abc import Iterable +from google.protobuf import json_format +import json +import math +import pytest +from proto.marshal.rules.dates import DurationRule, TimestampRule +from proto.marshal.rules import wrappers +from requests import Response +from requests import Request, PreparedRequest +from requests.sessions import Session +from google.protobuf import json_format + +from google.api_core import client_options +from google.api_core import exceptions as core_exceptions +from google.api_core import extended_operation # type: ignore +from google.api_core import future +from google.api_core import gapic_v1 +from google.api_core import grpc_helpers +from google.api_core import grpc_helpers_async +from google.api_core import path_template +from google.auth import credentials as ga_credentials +from google.auth.exceptions import MutualTLSChannelError +from google.cloud.compute_v1.services.firewall_policies import FirewallPoliciesClient +from google.cloud.compute_v1.services.firewall_policies import pagers +from google.cloud.compute_v1.services.firewall_policies import transports +from google.cloud.compute_v1.types import compute +from google.oauth2 import service_account +import google.auth + + +def client_cert_source_callback(): + return b"cert bytes", b"key bytes" + + +# If default endpoint is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint(client): + return "foo.googleapis.com" if ("localhost" in client.DEFAULT_ENDPOINT) else client.DEFAULT_ENDPOINT + + +def test__get_default_mtls_endpoint(): + api_endpoint = "example.googleapis.com" + api_mtls_endpoint = "example.mtls.googleapis.com" + sandbox_endpoint = "example.sandbox.googleapis.com" + sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com" + non_googleapi = "api.example.com" + + assert FirewallPoliciesClient._get_default_mtls_endpoint(None) is None + assert FirewallPoliciesClient._get_default_mtls_endpoint(api_endpoint) == api_mtls_endpoint + assert FirewallPoliciesClient._get_default_mtls_endpoint(api_mtls_endpoint) == api_mtls_endpoint + assert FirewallPoliciesClient._get_default_mtls_endpoint(sandbox_endpoint) == sandbox_mtls_endpoint + assert FirewallPoliciesClient._get_default_mtls_endpoint(sandbox_mtls_endpoint) == sandbox_mtls_endpoint + assert FirewallPoliciesClient._get_default_mtls_endpoint(non_googleapi) == non_googleapi + + +@pytest.mark.parametrize("client_class,transport_name", [ + (FirewallPoliciesClient, "rest"), +]) +def test_firewall_policies_client_from_service_account_info(client_class, transport_name): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object(service_account.Credentials, 'from_service_account_info') as factory: + factory.return_value = creds + info = {"valid": True} + client = client_class.from_service_account_info(info, transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + 'compute.googleapis.com:443' + if transport_name in ['grpc', 'grpc_asyncio'] + else + 'https://compute.googleapis.com' + ) + + +@pytest.mark.parametrize("transport_class,transport_name", [ + (transports.FirewallPoliciesRestTransport, "rest"), +]) +def test_firewall_policies_client_service_account_always_use_jwt(transport_class, transport_name): + with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=True) + use_jwt.assert_called_once_with(True) + + with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=False) + use_jwt.assert_not_called() + + +@pytest.mark.parametrize("client_class,transport_name", [ + (FirewallPoliciesClient, "rest"), +]) +def test_firewall_policies_client_from_service_account_file(client_class, transport_name): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object(service_account.Credentials, 'from_service_account_file') as factory: + factory.return_value = creds + client = client_class.from_service_account_file("dummy/file/path.json", transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + client = client_class.from_service_account_json("dummy/file/path.json", transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + 'compute.googleapis.com:443' + if transport_name in ['grpc', 'grpc_asyncio'] + else + 'https://compute.googleapis.com' + ) + + +def test_firewall_policies_client_get_transport_class(): + transport = FirewallPoliciesClient.get_transport_class() + available_transports = [ + transports.FirewallPoliciesRestTransport, + ] + assert transport in available_transports + + transport = FirewallPoliciesClient.get_transport_class("rest") + assert transport == transports.FirewallPoliciesRestTransport + + +@pytest.mark.parametrize("client_class,transport_class,transport_name", [ + (FirewallPoliciesClient, transports.FirewallPoliciesRestTransport, "rest"), +]) +@mock.patch.object(FirewallPoliciesClient, "DEFAULT_ENDPOINT", modify_default_endpoint(FirewallPoliciesClient)) +def test_firewall_policies_client_client_options(client_class, transport_class, transport_name): + # Check that if channel is provided we won't create a new one. + with mock.patch.object(FirewallPoliciesClient, 'get_transport_class') as gtc: + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ) + client = client_class(transport=transport) + gtc.assert_not_called() + + # Check that if channel is provided via str we will create a new one. + with mock.patch.object(FirewallPoliciesClient, 'get_transport_class') as gtc: + client = client_class(transport=transport_name) + gtc.assert_called() + + # Check the case api_endpoint is provided. + options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(transport=transport_name, client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_MTLS_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError): + client = client_class(transport=transport_name) + + # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): + with pytest.raises(ValueError): + client = client_class(transport=transport_name) + + # Check the case quota_project_id is provided + options = client_options.ClientOptions(quota_project_id="octopus") + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id="octopus", + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + # Check the case api_endpoint is provided + options = client_options.ClientOptions(api_audience="https://language.googleapis.com") + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience="https://language.googleapis.com" + ) + +@pytest.mark.parametrize("client_class,transport_class,transport_name,use_client_cert_env", [ + (FirewallPoliciesClient, transports.FirewallPoliciesRestTransport, "rest", "true"), + (FirewallPoliciesClient, transports.FirewallPoliciesRestTransport, "rest", "false"), +]) +@mock.patch.object(FirewallPoliciesClient, "DEFAULT_ENDPOINT", modify_default_endpoint(FirewallPoliciesClient)) +@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) +def test_firewall_policies_client_mtls_env_auto(client_class, transport_class, transport_name, use_client_cert_env): + # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default + # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists. + + # Check the case client_cert_source is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): + options = client_options.ClientOptions(client_cert_source=client_cert_source_callback) + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + + if use_client_cert_env == "false": + expected_client_cert_source = None + expected_host = client.DEFAULT_ENDPOINT + else: + expected_client_cert_source = client_cert_source_callback + expected_host = client.DEFAULT_MTLS_ENDPOINT + + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case ADC client cert is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): + with mock.patch.object(transport_class, '__init__') as patched: + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): + with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=client_cert_source_callback): + if use_client_cert_env == "false": + expected_host = client.DEFAULT_ENDPOINT + expected_client_cert_source = None + else: + expected_host = client.DEFAULT_MTLS_ENDPOINT + expected_client_cert_source = client_cert_source_callback + + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case client_cert_source and ADC client cert are not provided. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): + with mock.patch.object(transport_class, '__init__') as patched: + with mock.patch("google.auth.transport.mtls.has_default_client_cert_source", return_value=False): + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize("client_class", [ + FirewallPoliciesClient +]) +@mock.patch.object(FirewallPoliciesClient, "DEFAULT_ENDPOINT", modify_default_endpoint(FirewallPoliciesClient)) +def test_firewall_policies_client_get_mtls_endpoint_and_cert_source(client_class): + mock_client_cert_source = mock.Mock() + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "true". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + mock_api_endpoint = "foo" + options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(options) + assert api_endpoint == mock_api_endpoint + assert cert_source == mock_client_cert_source + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "false". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + mock_client_cert_source = mock.Mock() + mock_api_endpoint = "foo" + options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(options) + assert api_endpoint == mock_api_endpoint + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert doesn't exist. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=False): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert exists. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): + with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=mock_client_cert_source): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source == mock_client_cert_source + + +@pytest.mark.parametrize("client_class,transport_class,transport_name", [ + (FirewallPoliciesClient, transports.FirewallPoliciesRestTransport, "rest"), +]) +def test_firewall_policies_client_client_options_scopes(client_class, transport_class, transport_name): + # Check the case scopes are provided. + options = client_options.ClientOptions( + scopes=["1", "2"], + ) + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=["1", "2"], + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + +@pytest.mark.parametrize("client_class,transport_class,transport_name,grpc_helpers", [ + (FirewallPoliciesClient, transports.FirewallPoliciesRestTransport, "rest", None), +]) +def test_firewall_policies_client_client_options_credentials_file(client_class, transport_class, transport_name, grpc_helpers): + # Check the case credentials file is provided. + options = client_options.ClientOptions( + credentials_file="credentials.json" + ) + + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize("request_type", [ + compute.AddAssociationFirewallPolicyRequest, + dict, +]) +def test_add_association_rest(request_type): + client = FirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'firewall_policy': 'sample1'} + request_init["firewall_policy_association_resource"] = {'attachment_target': 'attachment_target_value', 'display_name': 'display_name_value', 'firewall_policy_id': 'firewall_policy_id_value', 'name': 'name_value', 'short_name': 'short_name_value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.add_association(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, extended_operation.ExtendedOperation) + assert response.client_operation_id == 'client_operation_id_value' + assert response.creation_timestamp == 'creation_timestamp_value' + assert response.description == 'description_value' + assert response.end_time == 'end_time_value' + assert response.http_error_message == 'http_error_message_value' + assert response.http_error_status_code == 2374 + assert response.id == 205 + assert response.insert_time == 'insert_time_value' + assert response.kind == 'kind_value' + assert response.name == 'name_value' + assert response.operation_group_id == 'operation_group_id_value' + assert response.operation_type == 'operation_type_value' + assert response.progress == 885 + assert response.region == 'region_value' + assert response.self_link == 'self_link_value' + assert response.start_time == 'start_time_value' + assert response.status == compute.Operation.Status.DONE + assert response.status_message == 'status_message_value' + assert response.target_id == 947 + assert response.target_link == 'target_link_value' + assert response.user == 'user_value' + assert response.zone == 'zone_value' + + +def test_add_association_rest_required_fields(request_type=compute.AddAssociationFirewallPolicyRequest): + transport_class = transports.FirewallPoliciesRestTransport + + request_init = {} + request_init["firewall_policy"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).add_association._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["firewallPolicy"] = 'firewall_policy_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).add_association._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("replace_existing_association", "request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "firewallPolicy" in jsonified_request + assert jsonified_request["firewallPolicy"] == 'firewall_policy_value' + + client = FirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.add_association(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_add_association_rest_unset_required_fields(): + transport = transports.FirewallPoliciesRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.add_association._get_unset_required_fields({}) + assert set(unset_fields) == (set(("replaceExistingAssociation", "requestId", )) & set(("firewallPolicy", "firewallPolicyAssociationResource", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_add_association_rest_interceptors(null_interceptor): + transport = transports.FirewallPoliciesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.FirewallPoliciesRestInterceptor(), + ) + client = FirewallPoliciesClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.FirewallPoliciesRestInterceptor, "post_add_association") as post, \ + mock.patch.object(transports.FirewallPoliciesRestInterceptor, "pre_add_association") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.AddAssociationFirewallPolicyRequest.pb(compute.AddAssociationFirewallPolicyRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.AddAssociationFirewallPolicyRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.add_association(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_add_association_rest_bad_request(transport: str = 'rest', request_type=compute.AddAssociationFirewallPolicyRequest): + client = FirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'firewall_policy': 'sample1'} + request_init["firewall_policy_association_resource"] = {'attachment_target': 'attachment_target_value', 'display_name': 'display_name_value', 'firewall_policy_id': 'firewall_policy_id_value', 'name': 'name_value', 'short_name': 'short_name_value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.add_association(request) + + +def test_add_association_rest_flattened(): + client = FirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'firewall_policy': 'sample1'} + + # get truthy value for each flattened field + mock_args = dict( + firewall_policy='firewall_policy_value', + firewall_policy_association_resource=compute.FirewallPolicyAssociation(attachment_target='attachment_target_value'), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.add_association(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/locations/global/firewallPolicies/{firewall_policy}/addAssociation" % client.transport._host, args[1]) + + +def test_add_association_rest_flattened_error(transport: str = 'rest'): + client = FirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.add_association( + compute.AddAssociationFirewallPolicyRequest(), + firewall_policy='firewall_policy_value', + firewall_policy_association_resource=compute.FirewallPolicyAssociation(attachment_target='attachment_target_value'), + ) + + +def test_add_association_rest_error(): + client = FirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.AddAssociationFirewallPolicyRequest, + dict, +]) +def test_add_association_unary_rest(request_type): + client = FirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'firewall_policy': 'sample1'} + request_init["firewall_policy_association_resource"] = {'attachment_target': 'attachment_target_value', 'display_name': 'display_name_value', 'firewall_policy_id': 'firewall_policy_id_value', 'name': 'name_value', 'short_name': 'short_name_value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.add_association_unary(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.Operation) + + +def test_add_association_unary_rest_required_fields(request_type=compute.AddAssociationFirewallPolicyRequest): + transport_class = transports.FirewallPoliciesRestTransport + + request_init = {} + request_init["firewall_policy"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).add_association._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["firewallPolicy"] = 'firewall_policy_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).add_association._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("replace_existing_association", "request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "firewallPolicy" in jsonified_request + assert jsonified_request["firewallPolicy"] == 'firewall_policy_value' + + client = FirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.add_association_unary(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_add_association_unary_rest_unset_required_fields(): + transport = transports.FirewallPoliciesRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.add_association._get_unset_required_fields({}) + assert set(unset_fields) == (set(("replaceExistingAssociation", "requestId", )) & set(("firewallPolicy", "firewallPolicyAssociationResource", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_add_association_unary_rest_interceptors(null_interceptor): + transport = transports.FirewallPoliciesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.FirewallPoliciesRestInterceptor(), + ) + client = FirewallPoliciesClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.FirewallPoliciesRestInterceptor, "post_add_association") as post, \ + mock.patch.object(transports.FirewallPoliciesRestInterceptor, "pre_add_association") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.AddAssociationFirewallPolicyRequest.pb(compute.AddAssociationFirewallPolicyRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.AddAssociationFirewallPolicyRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.add_association_unary(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_add_association_unary_rest_bad_request(transport: str = 'rest', request_type=compute.AddAssociationFirewallPolicyRequest): + client = FirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'firewall_policy': 'sample1'} + request_init["firewall_policy_association_resource"] = {'attachment_target': 'attachment_target_value', 'display_name': 'display_name_value', 'firewall_policy_id': 'firewall_policy_id_value', 'name': 'name_value', 'short_name': 'short_name_value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.add_association_unary(request) + + +def test_add_association_unary_rest_flattened(): + client = FirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'firewall_policy': 'sample1'} + + # get truthy value for each flattened field + mock_args = dict( + firewall_policy='firewall_policy_value', + firewall_policy_association_resource=compute.FirewallPolicyAssociation(attachment_target='attachment_target_value'), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.add_association_unary(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/locations/global/firewallPolicies/{firewall_policy}/addAssociation" % client.transport._host, args[1]) + + +def test_add_association_unary_rest_flattened_error(transport: str = 'rest'): + client = FirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.add_association_unary( + compute.AddAssociationFirewallPolicyRequest(), + firewall_policy='firewall_policy_value', + firewall_policy_association_resource=compute.FirewallPolicyAssociation(attachment_target='attachment_target_value'), + ) + + +def test_add_association_unary_rest_error(): + client = FirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.AddRuleFirewallPolicyRequest, + dict, +]) +def test_add_rule_rest(request_type): + client = FirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'firewall_policy': 'sample1'} + request_init["firewall_policy_rule_resource"] = {'action': 'action_value', 'description': 'description_value', 'direction': 'direction_value', 'disabled': True, 'enable_logging': True, 'kind': 'kind_value', 'match': {'dest_address_groups': ['dest_address_groups_value1', 'dest_address_groups_value2'], 'dest_fqdns': ['dest_fqdns_value1', 'dest_fqdns_value2'], 'dest_ip_ranges': ['dest_ip_ranges_value1', 'dest_ip_ranges_value2'], 'dest_region_codes': ['dest_region_codes_value1', 'dest_region_codes_value2'], 'dest_threat_intelligences': ['dest_threat_intelligences_value1', 'dest_threat_intelligences_value2'], 'layer4_configs': [{'ip_protocol': 'ip_protocol_value', 'ports': ['ports_value1', 'ports_value2']}], 'src_address_groups': ['src_address_groups_value1', 'src_address_groups_value2'], 'src_fqdns': ['src_fqdns_value1', 'src_fqdns_value2'], 'src_ip_ranges': ['src_ip_ranges_value1', 'src_ip_ranges_value2'], 'src_region_codes': ['src_region_codes_value1', 'src_region_codes_value2'], 'src_secure_tags': [{'name': 'name_value', 'state': 'state_value'}], 'src_threat_intelligences': ['src_threat_intelligences_value1', 'src_threat_intelligences_value2']}, 'priority': 898, 'rule_name': 'rule_name_value', 'rule_tuple_count': 1737, 'target_resources': ['target_resources_value1', 'target_resources_value2'], 'target_secure_tags': {}, 'target_service_accounts': ['target_service_accounts_value1', 'target_service_accounts_value2']} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.add_rule(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, extended_operation.ExtendedOperation) + assert response.client_operation_id == 'client_operation_id_value' + assert response.creation_timestamp == 'creation_timestamp_value' + assert response.description == 'description_value' + assert response.end_time == 'end_time_value' + assert response.http_error_message == 'http_error_message_value' + assert response.http_error_status_code == 2374 + assert response.id == 205 + assert response.insert_time == 'insert_time_value' + assert response.kind == 'kind_value' + assert response.name == 'name_value' + assert response.operation_group_id == 'operation_group_id_value' + assert response.operation_type == 'operation_type_value' + assert response.progress == 885 + assert response.region == 'region_value' + assert response.self_link == 'self_link_value' + assert response.start_time == 'start_time_value' + assert response.status == compute.Operation.Status.DONE + assert response.status_message == 'status_message_value' + assert response.target_id == 947 + assert response.target_link == 'target_link_value' + assert response.user == 'user_value' + assert response.zone == 'zone_value' + + +def test_add_rule_rest_required_fields(request_type=compute.AddRuleFirewallPolicyRequest): + transport_class = transports.FirewallPoliciesRestTransport + + request_init = {} + request_init["firewall_policy"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).add_rule._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["firewallPolicy"] = 'firewall_policy_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).add_rule._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "firewallPolicy" in jsonified_request + assert jsonified_request["firewallPolicy"] == 'firewall_policy_value' + + client = FirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.add_rule(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_add_rule_rest_unset_required_fields(): + transport = transports.FirewallPoliciesRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.add_rule._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("firewallPolicy", "firewallPolicyRuleResource", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_add_rule_rest_interceptors(null_interceptor): + transport = transports.FirewallPoliciesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.FirewallPoliciesRestInterceptor(), + ) + client = FirewallPoliciesClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.FirewallPoliciesRestInterceptor, "post_add_rule") as post, \ + mock.patch.object(transports.FirewallPoliciesRestInterceptor, "pre_add_rule") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.AddRuleFirewallPolicyRequest.pb(compute.AddRuleFirewallPolicyRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.AddRuleFirewallPolicyRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.add_rule(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_add_rule_rest_bad_request(transport: str = 'rest', request_type=compute.AddRuleFirewallPolicyRequest): + client = FirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'firewall_policy': 'sample1'} + request_init["firewall_policy_rule_resource"] = {'action': 'action_value', 'description': 'description_value', 'direction': 'direction_value', 'disabled': True, 'enable_logging': True, 'kind': 'kind_value', 'match': {'dest_address_groups': ['dest_address_groups_value1', 'dest_address_groups_value2'], 'dest_fqdns': ['dest_fqdns_value1', 'dest_fqdns_value2'], 'dest_ip_ranges': ['dest_ip_ranges_value1', 'dest_ip_ranges_value2'], 'dest_region_codes': ['dest_region_codes_value1', 'dest_region_codes_value2'], 'dest_threat_intelligences': ['dest_threat_intelligences_value1', 'dest_threat_intelligences_value2'], 'layer4_configs': [{'ip_protocol': 'ip_protocol_value', 'ports': ['ports_value1', 'ports_value2']}], 'src_address_groups': ['src_address_groups_value1', 'src_address_groups_value2'], 'src_fqdns': ['src_fqdns_value1', 'src_fqdns_value2'], 'src_ip_ranges': ['src_ip_ranges_value1', 'src_ip_ranges_value2'], 'src_region_codes': ['src_region_codes_value1', 'src_region_codes_value2'], 'src_secure_tags': [{'name': 'name_value', 'state': 'state_value'}], 'src_threat_intelligences': ['src_threat_intelligences_value1', 'src_threat_intelligences_value2']}, 'priority': 898, 'rule_name': 'rule_name_value', 'rule_tuple_count': 1737, 'target_resources': ['target_resources_value1', 'target_resources_value2'], 'target_secure_tags': {}, 'target_service_accounts': ['target_service_accounts_value1', 'target_service_accounts_value2']} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.add_rule(request) + + +def test_add_rule_rest_flattened(): + client = FirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'firewall_policy': 'sample1'} + + # get truthy value for each flattened field + mock_args = dict( + firewall_policy='firewall_policy_value', + firewall_policy_rule_resource=compute.FirewallPolicyRule(action='action_value'), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.add_rule(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/locations/global/firewallPolicies/{firewall_policy}/addRule" % client.transport._host, args[1]) + + +def test_add_rule_rest_flattened_error(transport: str = 'rest'): + client = FirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.add_rule( + compute.AddRuleFirewallPolicyRequest(), + firewall_policy='firewall_policy_value', + firewall_policy_rule_resource=compute.FirewallPolicyRule(action='action_value'), + ) + + +def test_add_rule_rest_error(): + client = FirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.AddRuleFirewallPolicyRequest, + dict, +]) +def test_add_rule_unary_rest(request_type): + client = FirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'firewall_policy': 'sample1'} + request_init["firewall_policy_rule_resource"] = {'action': 'action_value', 'description': 'description_value', 'direction': 'direction_value', 'disabled': True, 'enable_logging': True, 'kind': 'kind_value', 'match': {'dest_address_groups': ['dest_address_groups_value1', 'dest_address_groups_value2'], 'dest_fqdns': ['dest_fqdns_value1', 'dest_fqdns_value2'], 'dest_ip_ranges': ['dest_ip_ranges_value1', 'dest_ip_ranges_value2'], 'dest_region_codes': ['dest_region_codes_value1', 'dest_region_codes_value2'], 'dest_threat_intelligences': ['dest_threat_intelligences_value1', 'dest_threat_intelligences_value2'], 'layer4_configs': [{'ip_protocol': 'ip_protocol_value', 'ports': ['ports_value1', 'ports_value2']}], 'src_address_groups': ['src_address_groups_value1', 'src_address_groups_value2'], 'src_fqdns': ['src_fqdns_value1', 'src_fqdns_value2'], 'src_ip_ranges': ['src_ip_ranges_value1', 'src_ip_ranges_value2'], 'src_region_codes': ['src_region_codes_value1', 'src_region_codes_value2'], 'src_secure_tags': [{'name': 'name_value', 'state': 'state_value'}], 'src_threat_intelligences': ['src_threat_intelligences_value1', 'src_threat_intelligences_value2']}, 'priority': 898, 'rule_name': 'rule_name_value', 'rule_tuple_count': 1737, 'target_resources': ['target_resources_value1', 'target_resources_value2'], 'target_secure_tags': {}, 'target_service_accounts': ['target_service_accounts_value1', 'target_service_accounts_value2']} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.add_rule_unary(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.Operation) + + +def test_add_rule_unary_rest_required_fields(request_type=compute.AddRuleFirewallPolicyRequest): + transport_class = transports.FirewallPoliciesRestTransport + + request_init = {} + request_init["firewall_policy"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).add_rule._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["firewallPolicy"] = 'firewall_policy_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).add_rule._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "firewallPolicy" in jsonified_request + assert jsonified_request["firewallPolicy"] == 'firewall_policy_value' + + client = FirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.add_rule_unary(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_add_rule_unary_rest_unset_required_fields(): + transport = transports.FirewallPoliciesRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.add_rule._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("firewallPolicy", "firewallPolicyRuleResource", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_add_rule_unary_rest_interceptors(null_interceptor): + transport = transports.FirewallPoliciesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.FirewallPoliciesRestInterceptor(), + ) + client = FirewallPoliciesClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.FirewallPoliciesRestInterceptor, "post_add_rule") as post, \ + mock.patch.object(transports.FirewallPoliciesRestInterceptor, "pre_add_rule") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.AddRuleFirewallPolicyRequest.pb(compute.AddRuleFirewallPolicyRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.AddRuleFirewallPolicyRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.add_rule_unary(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_add_rule_unary_rest_bad_request(transport: str = 'rest', request_type=compute.AddRuleFirewallPolicyRequest): + client = FirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'firewall_policy': 'sample1'} + request_init["firewall_policy_rule_resource"] = {'action': 'action_value', 'description': 'description_value', 'direction': 'direction_value', 'disabled': True, 'enable_logging': True, 'kind': 'kind_value', 'match': {'dest_address_groups': ['dest_address_groups_value1', 'dest_address_groups_value2'], 'dest_fqdns': ['dest_fqdns_value1', 'dest_fqdns_value2'], 'dest_ip_ranges': ['dest_ip_ranges_value1', 'dest_ip_ranges_value2'], 'dest_region_codes': ['dest_region_codes_value1', 'dest_region_codes_value2'], 'dest_threat_intelligences': ['dest_threat_intelligences_value1', 'dest_threat_intelligences_value2'], 'layer4_configs': [{'ip_protocol': 'ip_protocol_value', 'ports': ['ports_value1', 'ports_value2']}], 'src_address_groups': ['src_address_groups_value1', 'src_address_groups_value2'], 'src_fqdns': ['src_fqdns_value1', 'src_fqdns_value2'], 'src_ip_ranges': ['src_ip_ranges_value1', 'src_ip_ranges_value2'], 'src_region_codes': ['src_region_codes_value1', 'src_region_codes_value2'], 'src_secure_tags': [{'name': 'name_value', 'state': 'state_value'}], 'src_threat_intelligences': ['src_threat_intelligences_value1', 'src_threat_intelligences_value2']}, 'priority': 898, 'rule_name': 'rule_name_value', 'rule_tuple_count': 1737, 'target_resources': ['target_resources_value1', 'target_resources_value2'], 'target_secure_tags': {}, 'target_service_accounts': ['target_service_accounts_value1', 'target_service_accounts_value2']} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.add_rule_unary(request) + + +def test_add_rule_unary_rest_flattened(): + client = FirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'firewall_policy': 'sample1'} + + # get truthy value for each flattened field + mock_args = dict( + firewall_policy='firewall_policy_value', + firewall_policy_rule_resource=compute.FirewallPolicyRule(action='action_value'), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.add_rule_unary(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/locations/global/firewallPolicies/{firewall_policy}/addRule" % client.transport._host, args[1]) + + +def test_add_rule_unary_rest_flattened_error(transport: str = 'rest'): + client = FirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.add_rule_unary( + compute.AddRuleFirewallPolicyRequest(), + firewall_policy='firewall_policy_value', + firewall_policy_rule_resource=compute.FirewallPolicyRule(action='action_value'), + ) + + +def test_add_rule_unary_rest_error(): + client = FirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.CloneRulesFirewallPolicyRequest, + dict, +]) +def test_clone_rules_rest(request_type): + client = FirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'firewall_policy': 'sample1'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.clone_rules(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, extended_operation.ExtendedOperation) + assert response.client_operation_id == 'client_operation_id_value' + assert response.creation_timestamp == 'creation_timestamp_value' + assert response.description == 'description_value' + assert response.end_time == 'end_time_value' + assert response.http_error_message == 'http_error_message_value' + assert response.http_error_status_code == 2374 + assert response.id == 205 + assert response.insert_time == 'insert_time_value' + assert response.kind == 'kind_value' + assert response.name == 'name_value' + assert response.operation_group_id == 'operation_group_id_value' + assert response.operation_type == 'operation_type_value' + assert response.progress == 885 + assert response.region == 'region_value' + assert response.self_link == 'self_link_value' + assert response.start_time == 'start_time_value' + assert response.status == compute.Operation.Status.DONE + assert response.status_message == 'status_message_value' + assert response.target_id == 947 + assert response.target_link == 'target_link_value' + assert response.user == 'user_value' + assert response.zone == 'zone_value' + + +def test_clone_rules_rest_required_fields(request_type=compute.CloneRulesFirewallPolicyRequest): + transport_class = transports.FirewallPoliciesRestTransport + + request_init = {} + request_init["firewall_policy"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).clone_rules._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["firewallPolicy"] = 'firewall_policy_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).clone_rules._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", "source_firewall_policy", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "firewallPolicy" in jsonified_request + assert jsonified_request["firewallPolicy"] == 'firewall_policy_value' + + client = FirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.clone_rules(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_clone_rules_rest_unset_required_fields(): + transport = transports.FirewallPoliciesRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.clone_rules._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", "sourceFirewallPolicy", )) & set(("firewallPolicy", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_clone_rules_rest_interceptors(null_interceptor): + transport = transports.FirewallPoliciesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.FirewallPoliciesRestInterceptor(), + ) + client = FirewallPoliciesClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.FirewallPoliciesRestInterceptor, "post_clone_rules") as post, \ + mock.patch.object(transports.FirewallPoliciesRestInterceptor, "pre_clone_rules") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.CloneRulesFirewallPolicyRequest.pb(compute.CloneRulesFirewallPolicyRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.CloneRulesFirewallPolicyRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.clone_rules(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_clone_rules_rest_bad_request(transport: str = 'rest', request_type=compute.CloneRulesFirewallPolicyRequest): + client = FirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'firewall_policy': 'sample1'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.clone_rules(request) + + +def test_clone_rules_rest_flattened(): + client = FirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'firewall_policy': 'sample1'} + + # get truthy value for each flattened field + mock_args = dict( + firewall_policy='firewall_policy_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.clone_rules(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/locations/global/firewallPolicies/{firewall_policy}/cloneRules" % client.transport._host, args[1]) + + +def test_clone_rules_rest_flattened_error(transport: str = 'rest'): + client = FirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.clone_rules( + compute.CloneRulesFirewallPolicyRequest(), + firewall_policy='firewall_policy_value', + ) + + +def test_clone_rules_rest_error(): + client = FirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.CloneRulesFirewallPolicyRequest, + dict, +]) +def test_clone_rules_unary_rest(request_type): + client = FirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'firewall_policy': 'sample1'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.clone_rules_unary(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.Operation) + + +def test_clone_rules_unary_rest_required_fields(request_type=compute.CloneRulesFirewallPolicyRequest): + transport_class = transports.FirewallPoliciesRestTransport + + request_init = {} + request_init["firewall_policy"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).clone_rules._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["firewallPolicy"] = 'firewall_policy_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).clone_rules._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", "source_firewall_policy", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "firewallPolicy" in jsonified_request + assert jsonified_request["firewallPolicy"] == 'firewall_policy_value' + + client = FirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.clone_rules_unary(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_clone_rules_unary_rest_unset_required_fields(): + transport = transports.FirewallPoliciesRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.clone_rules._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", "sourceFirewallPolicy", )) & set(("firewallPolicy", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_clone_rules_unary_rest_interceptors(null_interceptor): + transport = transports.FirewallPoliciesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.FirewallPoliciesRestInterceptor(), + ) + client = FirewallPoliciesClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.FirewallPoliciesRestInterceptor, "post_clone_rules") as post, \ + mock.patch.object(transports.FirewallPoliciesRestInterceptor, "pre_clone_rules") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.CloneRulesFirewallPolicyRequest.pb(compute.CloneRulesFirewallPolicyRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.CloneRulesFirewallPolicyRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.clone_rules_unary(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_clone_rules_unary_rest_bad_request(transport: str = 'rest', request_type=compute.CloneRulesFirewallPolicyRequest): + client = FirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'firewall_policy': 'sample1'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.clone_rules_unary(request) + + +def test_clone_rules_unary_rest_flattened(): + client = FirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'firewall_policy': 'sample1'} + + # get truthy value for each flattened field + mock_args = dict( + firewall_policy='firewall_policy_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.clone_rules_unary(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/locations/global/firewallPolicies/{firewall_policy}/cloneRules" % client.transport._host, args[1]) + + +def test_clone_rules_unary_rest_flattened_error(transport: str = 'rest'): + client = FirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.clone_rules_unary( + compute.CloneRulesFirewallPolicyRequest(), + firewall_policy='firewall_policy_value', + ) + + +def test_clone_rules_unary_rest_error(): + client = FirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.DeleteFirewallPolicyRequest, + dict, +]) +def test_delete_rest(request_type): + client = FirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'firewall_policy': 'sample1'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.delete(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, extended_operation.ExtendedOperation) + assert response.client_operation_id == 'client_operation_id_value' + assert response.creation_timestamp == 'creation_timestamp_value' + assert response.description == 'description_value' + assert response.end_time == 'end_time_value' + assert response.http_error_message == 'http_error_message_value' + assert response.http_error_status_code == 2374 + assert response.id == 205 + assert response.insert_time == 'insert_time_value' + assert response.kind == 'kind_value' + assert response.name == 'name_value' + assert response.operation_group_id == 'operation_group_id_value' + assert response.operation_type == 'operation_type_value' + assert response.progress == 885 + assert response.region == 'region_value' + assert response.self_link == 'self_link_value' + assert response.start_time == 'start_time_value' + assert response.status == compute.Operation.Status.DONE + assert response.status_message == 'status_message_value' + assert response.target_id == 947 + assert response.target_link == 'target_link_value' + assert response.user == 'user_value' + assert response.zone == 'zone_value' + + +def test_delete_rest_required_fields(request_type=compute.DeleteFirewallPolicyRequest): + transport_class = transports.FirewallPoliciesRestTransport + + request_init = {} + request_init["firewall_policy"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["firewallPolicy"] = 'firewall_policy_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "firewallPolicy" in jsonified_request + assert jsonified_request["firewallPolicy"] == 'firewall_policy_value' + + client = FirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "delete", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.delete(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_delete_rest_unset_required_fields(): + transport = transports.FirewallPoliciesRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.delete._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("firewallPolicy", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_rest_interceptors(null_interceptor): + transport = transports.FirewallPoliciesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.FirewallPoliciesRestInterceptor(), + ) + client = FirewallPoliciesClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.FirewallPoliciesRestInterceptor, "post_delete") as post, \ + mock.patch.object(transports.FirewallPoliciesRestInterceptor, "pre_delete") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.DeleteFirewallPolicyRequest.pb(compute.DeleteFirewallPolicyRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.DeleteFirewallPolicyRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.delete(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_delete_rest_bad_request(transport: str = 'rest', request_type=compute.DeleteFirewallPolicyRequest): + client = FirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'firewall_policy': 'sample1'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.delete(request) + + +def test_delete_rest_flattened(): + client = FirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'firewall_policy': 'sample1'} + + # get truthy value for each flattened field + mock_args = dict( + firewall_policy='firewall_policy_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.delete(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/locations/global/firewallPolicies/{firewall_policy}" % client.transport._host, args[1]) + + +def test_delete_rest_flattened_error(transport: str = 'rest'): + client = FirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete( + compute.DeleteFirewallPolicyRequest(), + firewall_policy='firewall_policy_value', + ) + + +def test_delete_rest_error(): + client = FirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.DeleteFirewallPolicyRequest, + dict, +]) +def test_delete_unary_rest(request_type): + client = FirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'firewall_policy': 'sample1'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.delete_unary(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.Operation) + + +def test_delete_unary_rest_required_fields(request_type=compute.DeleteFirewallPolicyRequest): + transport_class = transports.FirewallPoliciesRestTransport + + request_init = {} + request_init["firewall_policy"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["firewallPolicy"] = 'firewall_policy_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "firewallPolicy" in jsonified_request + assert jsonified_request["firewallPolicy"] == 'firewall_policy_value' + + client = FirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "delete", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.delete_unary(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_delete_unary_rest_unset_required_fields(): + transport = transports.FirewallPoliciesRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.delete._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("firewallPolicy", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_unary_rest_interceptors(null_interceptor): + transport = transports.FirewallPoliciesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.FirewallPoliciesRestInterceptor(), + ) + client = FirewallPoliciesClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.FirewallPoliciesRestInterceptor, "post_delete") as post, \ + mock.patch.object(transports.FirewallPoliciesRestInterceptor, "pre_delete") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.DeleteFirewallPolicyRequest.pb(compute.DeleteFirewallPolicyRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.DeleteFirewallPolicyRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.delete_unary(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_delete_unary_rest_bad_request(transport: str = 'rest', request_type=compute.DeleteFirewallPolicyRequest): + client = FirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'firewall_policy': 'sample1'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.delete_unary(request) + + +def test_delete_unary_rest_flattened(): + client = FirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'firewall_policy': 'sample1'} + + # get truthy value for each flattened field + mock_args = dict( + firewall_policy='firewall_policy_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.delete_unary(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/locations/global/firewallPolicies/{firewall_policy}" % client.transport._host, args[1]) + + +def test_delete_unary_rest_flattened_error(transport: str = 'rest'): + client = FirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_unary( + compute.DeleteFirewallPolicyRequest(), + firewall_policy='firewall_policy_value', + ) + + +def test_delete_unary_rest_error(): + client = FirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.GetFirewallPolicyRequest, + dict, +]) +def test_get_rest(request_type): + client = FirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'firewall_policy': 'sample1'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.FirewallPolicy( + creation_timestamp='creation_timestamp_value', + description='description_value', + display_name='display_name_value', + fingerprint='fingerprint_value', + id=205, + kind='kind_value', + name='name_value', + parent='parent_value', + region='region_value', + rule_tuple_count=1737, + self_link='self_link_value', + self_link_with_id='self_link_with_id_value', + short_name='short_name_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.FirewallPolicy.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.get(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.FirewallPolicy) + assert response.creation_timestamp == 'creation_timestamp_value' + assert response.description == 'description_value' + assert response.display_name == 'display_name_value' + assert response.fingerprint == 'fingerprint_value' + assert response.id == 205 + assert response.kind == 'kind_value' + assert response.name == 'name_value' + assert response.parent == 'parent_value' + assert response.region == 'region_value' + assert response.rule_tuple_count == 1737 + assert response.self_link == 'self_link_value' + assert response.self_link_with_id == 'self_link_with_id_value' + assert response.short_name == 'short_name_value' + + +def test_get_rest_required_fields(request_type=compute.GetFirewallPolicyRequest): + transport_class = transports.FirewallPoliciesRestTransport + + request_init = {} + request_init["firewall_policy"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["firewallPolicy"] = 'firewall_policy_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "firewallPolicy" in jsonified_request + assert jsonified_request["firewallPolicy"] == 'firewall_policy_value' + + client = FirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.FirewallPolicy() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "get", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.FirewallPolicy.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.get(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_get_rest_unset_required_fields(): + transport = transports.FirewallPoliciesRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.get._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("firewallPolicy", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_rest_interceptors(null_interceptor): + transport = transports.FirewallPoliciesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.FirewallPoliciesRestInterceptor(), + ) + client = FirewallPoliciesClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.FirewallPoliciesRestInterceptor, "post_get") as post, \ + mock.patch.object(transports.FirewallPoliciesRestInterceptor, "pre_get") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.GetFirewallPolicyRequest.pb(compute.GetFirewallPolicyRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.FirewallPolicy.to_json(compute.FirewallPolicy()) + + request = compute.GetFirewallPolicyRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.FirewallPolicy() + + client.get(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_get_rest_bad_request(transport: str = 'rest', request_type=compute.GetFirewallPolicyRequest): + client = FirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'firewall_policy': 'sample1'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get(request) + + +def test_get_rest_flattened(): + client = FirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.FirewallPolicy() + + # get arguments that satisfy an http rule for this method + sample_request = {'firewall_policy': 'sample1'} + + # get truthy value for each flattened field + mock_args = dict( + firewall_policy='firewall_policy_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.FirewallPolicy.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.get(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/locations/global/firewallPolicies/{firewall_policy}" % client.transport._host, args[1]) + + +def test_get_rest_flattened_error(transport: str = 'rest'): + client = FirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get( + compute.GetFirewallPolicyRequest(), + firewall_policy='firewall_policy_value', + ) + + +def test_get_rest_error(): + client = FirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.GetAssociationFirewallPolicyRequest, + dict, +]) +def test_get_association_rest(request_type): + client = FirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'firewall_policy': 'sample1'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.FirewallPolicyAssociation( + attachment_target='attachment_target_value', + display_name='display_name_value', + firewall_policy_id='firewall_policy_id_value', + name='name_value', + short_name='short_name_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.FirewallPolicyAssociation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.get_association(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.FirewallPolicyAssociation) + assert response.attachment_target == 'attachment_target_value' + assert response.display_name == 'display_name_value' + assert response.firewall_policy_id == 'firewall_policy_id_value' + assert response.name == 'name_value' + assert response.short_name == 'short_name_value' + + +def test_get_association_rest_required_fields(request_type=compute.GetAssociationFirewallPolicyRequest): + transport_class = transports.FirewallPoliciesRestTransport + + request_init = {} + request_init["firewall_policy"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_association._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["firewallPolicy"] = 'firewall_policy_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_association._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("name", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "firewallPolicy" in jsonified_request + assert jsonified_request["firewallPolicy"] == 'firewall_policy_value' + + client = FirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.FirewallPolicyAssociation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "get", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.FirewallPolicyAssociation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.get_association(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_get_association_rest_unset_required_fields(): + transport = transports.FirewallPoliciesRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.get_association._get_unset_required_fields({}) + assert set(unset_fields) == (set(("name", )) & set(("firewallPolicy", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_association_rest_interceptors(null_interceptor): + transport = transports.FirewallPoliciesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.FirewallPoliciesRestInterceptor(), + ) + client = FirewallPoliciesClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.FirewallPoliciesRestInterceptor, "post_get_association") as post, \ + mock.patch.object(transports.FirewallPoliciesRestInterceptor, "pre_get_association") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.GetAssociationFirewallPolicyRequest.pb(compute.GetAssociationFirewallPolicyRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.FirewallPolicyAssociation.to_json(compute.FirewallPolicyAssociation()) + + request = compute.GetAssociationFirewallPolicyRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.FirewallPolicyAssociation() + + client.get_association(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_get_association_rest_bad_request(transport: str = 'rest', request_type=compute.GetAssociationFirewallPolicyRequest): + client = FirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'firewall_policy': 'sample1'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_association(request) + + +def test_get_association_rest_flattened(): + client = FirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.FirewallPolicyAssociation() + + # get arguments that satisfy an http rule for this method + sample_request = {'firewall_policy': 'sample1'} + + # get truthy value for each flattened field + mock_args = dict( + firewall_policy='firewall_policy_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.FirewallPolicyAssociation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.get_association(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/locations/global/firewallPolicies/{firewall_policy}/getAssociation" % client.transport._host, args[1]) + + +def test_get_association_rest_flattened_error(transport: str = 'rest'): + client = FirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_association( + compute.GetAssociationFirewallPolicyRequest(), + firewall_policy='firewall_policy_value', + ) + + +def test_get_association_rest_error(): + client = FirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.GetIamPolicyFirewallPolicyRequest, + dict, +]) +def test_get_iam_policy_rest(request_type): + client = FirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'resource': 'sample1'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Policy( + etag='etag_value', + iam_owned=True, + version=774, + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Policy.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.get_iam_policy(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.Policy) + assert response.etag == 'etag_value' + assert response.iam_owned is True + assert response.version == 774 + + +def test_get_iam_policy_rest_required_fields(request_type=compute.GetIamPolicyFirewallPolicyRequest): + transport_class = transports.FirewallPoliciesRestTransport + + request_init = {} + request_init["resource"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_iam_policy._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["resource"] = 'resource_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_iam_policy._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("options_requested_policy_version", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "resource" in jsonified_request + assert jsonified_request["resource"] == 'resource_value' + + client = FirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Policy() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "get", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Policy.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.get_iam_policy(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_get_iam_policy_rest_unset_required_fields(): + transport = transports.FirewallPoliciesRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.get_iam_policy._get_unset_required_fields({}) + assert set(unset_fields) == (set(("optionsRequestedPolicyVersion", )) & set(("resource", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_iam_policy_rest_interceptors(null_interceptor): + transport = transports.FirewallPoliciesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.FirewallPoliciesRestInterceptor(), + ) + client = FirewallPoliciesClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.FirewallPoliciesRestInterceptor, "post_get_iam_policy") as post, \ + mock.patch.object(transports.FirewallPoliciesRestInterceptor, "pre_get_iam_policy") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.GetIamPolicyFirewallPolicyRequest.pb(compute.GetIamPolicyFirewallPolicyRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Policy.to_json(compute.Policy()) + + request = compute.GetIamPolicyFirewallPolicyRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Policy() + + client.get_iam_policy(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_get_iam_policy_rest_bad_request(transport: str = 'rest', request_type=compute.GetIamPolicyFirewallPolicyRequest): + client = FirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'resource': 'sample1'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_iam_policy(request) + + +def test_get_iam_policy_rest_flattened(): + client = FirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Policy() + + # get arguments that satisfy an http rule for this method + sample_request = {'resource': 'sample1'} + + # get truthy value for each flattened field + mock_args = dict( + resource='resource_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Policy.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.get_iam_policy(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/locations/global/firewallPolicies/{resource}/getIamPolicy" % client.transport._host, args[1]) + + +def test_get_iam_policy_rest_flattened_error(transport: str = 'rest'): + client = FirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_iam_policy( + compute.GetIamPolicyFirewallPolicyRequest(), + resource='resource_value', + ) + + +def test_get_iam_policy_rest_error(): + client = FirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.GetRuleFirewallPolicyRequest, + dict, +]) +def test_get_rule_rest(request_type): + client = FirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'firewall_policy': 'sample1'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.FirewallPolicyRule( + action='action_value', + description='description_value', + direction='direction_value', + disabled=True, + enable_logging=True, + kind='kind_value', + priority=898, + rule_name='rule_name_value', + rule_tuple_count=1737, + target_resources=['target_resources_value'], + target_service_accounts=['target_service_accounts_value'], + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.FirewallPolicyRule.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.get_rule(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.FirewallPolicyRule) + assert response.action == 'action_value' + assert response.description == 'description_value' + assert response.direction == 'direction_value' + assert response.disabled is True + assert response.enable_logging is True + assert response.kind == 'kind_value' + assert response.priority == 898 + assert response.rule_name == 'rule_name_value' + assert response.rule_tuple_count == 1737 + assert response.target_resources == ['target_resources_value'] + assert response.target_service_accounts == ['target_service_accounts_value'] + + +def test_get_rule_rest_required_fields(request_type=compute.GetRuleFirewallPolicyRequest): + transport_class = transports.FirewallPoliciesRestTransport + + request_init = {} + request_init["firewall_policy"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_rule._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["firewallPolicy"] = 'firewall_policy_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_rule._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("priority", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "firewallPolicy" in jsonified_request + assert jsonified_request["firewallPolicy"] == 'firewall_policy_value' + + client = FirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.FirewallPolicyRule() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "get", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.FirewallPolicyRule.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.get_rule(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_get_rule_rest_unset_required_fields(): + transport = transports.FirewallPoliciesRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.get_rule._get_unset_required_fields({}) + assert set(unset_fields) == (set(("priority", )) & set(("firewallPolicy", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_rule_rest_interceptors(null_interceptor): + transport = transports.FirewallPoliciesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.FirewallPoliciesRestInterceptor(), + ) + client = FirewallPoliciesClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.FirewallPoliciesRestInterceptor, "post_get_rule") as post, \ + mock.patch.object(transports.FirewallPoliciesRestInterceptor, "pre_get_rule") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.GetRuleFirewallPolicyRequest.pb(compute.GetRuleFirewallPolicyRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.FirewallPolicyRule.to_json(compute.FirewallPolicyRule()) + + request = compute.GetRuleFirewallPolicyRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.FirewallPolicyRule() + + client.get_rule(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_get_rule_rest_bad_request(transport: str = 'rest', request_type=compute.GetRuleFirewallPolicyRequest): + client = FirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'firewall_policy': 'sample1'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_rule(request) + + +def test_get_rule_rest_flattened(): + client = FirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.FirewallPolicyRule() + + # get arguments that satisfy an http rule for this method + sample_request = {'firewall_policy': 'sample1'} + + # get truthy value for each flattened field + mock_args = dict( + firewall_policy='firewall_policy_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.FirewallPolicyRule.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.get_rule(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/locations/global/firewallPolicies/{firewall_policy}/getRule" % client.transport._host, args[1]) + + +def test_get_rule_rest_flattened_error(transport: str = 'rest'): + client = FirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_rule( + compute.GetRuleFirewallPolicyRequest(), + firewall_policy='firewall_policy_value', + ) + + +def test_get_rule_rest_error(): + client = FirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.InsertFirewallPolicyRequest, + dict, +]) +def test_insert_rest(request_type): + client = FirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {} + request_init["firewall_policy_resource"] = {'associations': [{'attachment_target': 'attachment_target_value', 'display_name': 'display_name_value', 'firewall_policy_id': 'firewall_policy_id_value', 'name': 'name_value', 'short_name': 'short_name_value'}], 'creation_timestamp': 'creation_timestamp_value', 'description': 'description_value', 'display_name': 'display_name_value', 'fingerprint': 'fingerprint_value', 'id': 205, 'kind': 'kind_value', 'name': 'name_value', 'parent': 'parent_value', 'region': 'region_value', 'rule_tuple_count': 1737, 'rules': [{'action': 'action_value', 'description': 'description_value', 'direction': 'direction_value', 'disabled': True, 'enable_logging': True, 'kind': 'kind_value', 'match': {'dest_address_groups': ['dest_address_groups_value1', 'dest_address_groups_value2'], 'dest_fqdns': ['dest_fqdns_value1', 'dest_fqdns_value2'], 'dest_ip_ranges': ['dest_ip_ranges_value1', 'dest_ip_ranges_value2'], 'dest_region_codes': ['dest_region_codes_value1', 'dest_region_codes_value2'], 'dest_threat_intelligences': ['dest_threat_intelligences_value1', 'dest_threat_intelligences_value2'], 'layer4_configs': [{'ip_protocol': 'ip_protocol_value', 'ports': ['ports_value1', 'ports_value2']}], 'src_address_groups': ['src_address_groups_value1', 'src_address_groups_value2'], 'src_fqdns': ['src_fqdns_value1', 'src_fqdns_value2'], 'src_ip_ranges': ['src_ip_ranges_value1', 'src_ip_ranges_value2'], 'src_region_codes': ['src_region_codes_value1', 'src_region_codes_value2'], 'src_secure_tags': [{'name': 'name_value', 'state': 'state_value'}], 'src_threat_intelligences': ['src_threat_intelligences_value1', 'src_threat_intelligences_value2']}, 'priority': 898, 'rule_name': 'rule_name_value', 'rule_tuple_count': 1737, 'target_resources': ['target_resources_value1', 'target_resources_value2'], 'target_secure_tags': {}, 'target_service_accounts': ['target_service_accounts_value1', 'target_service_accounts_value2']}], 'self_link': 'self_link_value', 'self_link_with_id': 'self_link_with_id_value', 'short_name': 'short_name_value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.insert(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, extended_operation.ExtendedOperation) + assert response.client_operation_id == 'client_operation_id_value' + assert response.creation_timestamp == 'creation_timestamp_value' + assert response.description == 'description_value' + assert response.end_time == 'end_time_value' + assert response.http_error_message == 'http_error_message_value' + assert response.http_error_status_code == 2374 + assert response.id == 205 + assert response.insert_time == 'insert_time_value' + assert response.kind == 'kind_value' + assert response.name == 'name_value' + assert response.operation_group_id == 'operation_group_id_value' + assert response.operation_type == 'operation_type_value' + assert response.progress == 885 + assert response.region == 'region_value' + assert response.self_link == 'self_link_value' + assert response.start_time == 'start_time_value' + assert response.status == compute.Operation.Status.DONE + assert response.status_message == 'status_message_value' + assert response.target_id == 947 + assert response.target_link == 'target_link_value' + assert response.user == 'user_value' + assert response.zone == 'zone_value' + + +def test_insert_rest_required_fields(request_type=compute.InsertFirewallPolicyRequest): + transport_class = transports.FirewallPoliciesRestTransport + + request_init = {} + request_init["parent_id"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + assert "parentId" not in jsonified_request + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).insert._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + assert "parentId" in jsonified_request + assert jsonified_request["parentId"] == request_init["parent_id"] + + jsonified_request["parentId"] = 'parent_id_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).insert._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("parent_id", "request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parentId" in jsonified_request + assert jsonified_request["parentId"] == 'parent_id_value' + + client = FirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.insert(request) + + expected_params = [ + ( + "parentId", + "", + ), + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_insert_rest_unset_required_fields(): + transport = transports.FirewallPoliciesRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.insert._get_unset_required_fields({}) + assert set(unset_fields) == (set(("parentId", "requestId", )) & set(("firewallPolicyResource", "parentId", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_insert_rest_interceptors(null_interceptor): + transport = transports.FirewallPoliciesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.FirewallPoliciesRestInterceptor(), + ) + client = FirewallPoliciesClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.FirewallPoliciesRestInterceptor, "post_insert") as post, \ + mock.patch.object(transports.FirewallPoliciesRestInterceptor, "pre_insert") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.InsertFirewallPolicyRequest.pb(compute.InsertFirewallPolicyRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.InsertFirewallPolicyRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.insert(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_insert_rest_bad_request(transport: str = 'rest', request_type=compute.InsertFirewallPolicyRequest): + client = FirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {} + request_init["firewall_policy_resource"] = {'associations': [{'attachment_target': 'attachment_target_value', 'display_name': 'display_name_value', 'firewall_policy_id': 'firewall_policy_id_value', 'name': 'name_value', 'short_name': 'short_name_value'}], 'creation_timestamp': 'creation_timestamp_value', 'description': 'description_value', 'display_name': 'display_name_value', 'fingerprint': 'fingerprint_value', 'id': 205, 'kind': 'kind_value', 'name': 'name_value', 'parent': 'parent_value', 'region': 'region_value', 'rule_tuple_count': 1737, 'rules': [{'action': 'action_value', 'description': 'description_value', 'direction': 'direction_value', 'disabled': True, 'enable_logging': True, 'kind': 'kind_value', 'match': {'dest_address_groups': ['dest_address_groups_value1', 'dest_address_groups_value2'], 'dest_fqdns': ['dest_fqdns_value1', 'dest_fqdns_value2'], 'dest_ip_ranges': ['dest_ip_ranges_value1', 'dest_ip_ranges_value2'], 'dest_region_codes': ['dest_region_codes_value1', 'dest_region_codes_value2'], 'dest_threat_intelligences': ['dest_threat_intelligences_value1', 'dest_threat_intelligences_value2'], 'layer4_configs': [{'ip_protocol': 'ip_protocol_value', 'ports': ['ports_value1', 'ports_value2']}], 'src_address_groups': ['src_address_groups_value1', 'src_address_groups_value2'], 'src_fqdns': ['src_fqdns_value1', 'src_fqdns_value2'], 'src_ip_ranges': ['src_ip_ranges_value1', 'src_ip_ranges_value2'], 'src_region_codes': ['src_region_codes_value1', 'src_region_codes_value2'], 'src_secure_tags': [{'name': 'name_value', 'state': 'state_value'}], 'src_threat_intelligences': ['src_threat_intelligences_value1', 'src_threat_intelligences_value2']}, 'priority': 898, 'rule_name': 'rule_name_value', 'rule_tuple_count': 1737, 'target_resources': ['target_resources_value1', 'target_resources_value2'], 'target_secure_tags': {}, 'target_service_accounts': ['target_service_accounts_value1', 'target_service_accounts_value2']}], 'self_link': 'self_link_value', 'self_link_with_id': 'self_link_with_id_value', 'short_name': 'short_name_value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.insert(request) + + +def test_insert_rest_flattened(): + client = FirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {} + + # get truthy value for each flattened field + mock_args = dict( + parent_id='parent_id_value', + firewall_policy_resource=compute.FirewallPolicy(associations=[compute.FirewallPolicyAssociation(attachment_target='attachment_target_value')]), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.insert(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/locations/global/firewallPolicies" % client.transport._host, args[1]) + + +def test_insert_rest_flattened_error(transport: str = 'rest'): + client = FirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.insert( + compute.InsertFirewallPolicyRequest(), + parent_id='parent_id_value', + firewall_policy_resource=compute.FirewallPolicy(associations=[compute.FirewallPolicyAssociation(attachment_target='attachment_target_value')]), + ) + + +def test_insert_rest_error(): + client = FirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.InsertFirewallPolicyRequest, + dict, +]) +def test_insert_unary_rest(request_type): + client = FirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {} + request_init["firewall_policy_resource"] = {'associations': [{'attachment_target': 'attachment_target_value', 'display_name': 'display_name_value', 'firewall_policy_id': 'firewall_policy_id_value', 'name': 'name_value', 'short_name': 'short_name_value'}], 'creation_timestamp': 'creation_timestamp_value', 'description': 'description_value', 'display_name': 'display_name_value', 'fingerprint': 'fingerprint_value', 'id': 205, 'kind': 'kind_value', 'name': 'name_value', 'parent': 'parent_value', 'region': 'region_value', 'rule_tuple_count': 1737, 'rules': [{'action': 'action_value', 'description': 'description_value', 'direction': 'direction_value', 'disabled': True, 'enable_logging': True, 'kind': 'kind_value', 'match': {'dest_address_groups': ['dest_address_groups_value1', 'dest_address_groups_value2'], 'dest_fqdns': ['dest_fqdns_value1', 'dest_fqdns_value2'], 'dest_ip_ranges': ['dest_ip_ranges_value1', 'dest_ip_ranges_value2'], 'dest_region_codes': ['dest_region_codes_value1', 'dest_region_codes_value2'], 'dest_threat_intelligences': ['dest_threat_intelligences_value1', 'dest_threat_intelligences_value2'], 'layer4_configs': [{'ip_protocol': 'ip_protocol_value', 'ports': ['ports_value1', 'ports_value2']}], 'src_address_groups': ['src_address_groups_value1', 'src_address_groups_value2'], 'src_fqdns': ['src_fqdns_value1', 'src_fqdns_value2'], 'src_ip_ranges': ['src_ip_ranges_value1', 'src_ip_ranges_value2'], 'src_region_codes': ['src_region_codes_value1', 'src_region_codes_value2'], 'src_secure_tags': [{'name': 'name_value', 'state': 'state_value'}], 'src_threat_intelligences': ['src_threat_intelligences_value1', 'src_threat_intelligences_value2']}, 'priority': 898, 'rule_name': 'rule_name_value', 'rule_tuple_count': 1737, 'target_resources': ['target_resources_value1', 'target_resources_value2'], 'target_secure_tags': {}, 'target_service_accounts': ['target_service_accounts_value1', 'target_service_accounts_value2']}], 'self_link': 'self_link_value', 'self_link_with_id': 'self_link_with_id_value', 'short_name': 'short_name_value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.insert_unary(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.Operation) + + +def test_insert_unary_rest_required_fields(request_type=compute.InsertFirewallPolicyRequest): + transport_class = transports.FirewallPoliciesRestTransport + + request_init = {} + request_init["parent_id"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + assert "parentId" not in jsonified_request + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).insert._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + assert "parentId" in jsonified_request + assert jsonified_request["parentId"] == request_init["parent_id"] + + jsonified_request["parentId"] = 'parent_id_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).insert._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("parent_id", "request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parentId" in jsonified_request + assert jsonified_request["parentId"] == 'parent_id_value' + + client = FirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.insert_unary(request) + + expected_params = [ + ( + "parentId", + "", + ), + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_insert_unary_rest_unset_required_fields(): + transport = transports.FirewallPoliciesRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.insert._get_unset_required_fields({}) + assert set(unset_fields) == (set(("parentId", "requestId", )) & set(("firewallPolicyResource", "parentId", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_insert_unary_rest_interceptors(null_interceptor): + transport = transports.FirewallPoliciesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.FirewallPoliciesRestInterceptor(), + ) + client = FirewallPoliciesClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.FirewallPoliciesRestInterceptor, "post_insert") as post, \ + mock.patch.object(transports.FirewallPoliciesRestInterceptor, "pre_insert") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.InsertFirewallPolicyRequest.pb(compute.InsertFirewallPolicyRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.InsertFirewallPolicyRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.insert_unary(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_insert_unary_rest_bad_request(transport: str = 'rest', request_type=compute.InsertFirewallPolicyRequest): + client = FirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {} + request_init["firewall_policy_resource"] = {'associations': [{'attachment_target': 'attachment_target_value', 'display_name': 'display_name_value', 'firewall_policy_id': 'firewall_policy_id_value', 'name': 'name_value', 'short_name': 'short_name_value'}], 'creation_timestamp': 'creation_timestamp_value', 'description': 'description_value', 'display_name': 'display_name_value', 'fingerprint': 'fingerprint_value', 'id': 205, 'kind': 'kind_value', 'name': 'name_value', 'parent': 'parent_value', 'region': 'region_value', 'rule_tuple_count': 1737, 'rules': [{'action': 'action_value', 'description': 'description_value', 'direction': 'direction_value', 'disabled': True, 'enable_logging': True, 'kind': 'kind_value', 'match': {'dest_address_groups': ['dest_address_groups_value1', 'dest_address_groups_value2'], 'dest_fqdns': ['dest_fqdns_value1', 'dest_fqdns_value2'], 'dest_ip_ranges': ['dest_ip_ranges_value1', 'dest_ip_ranges_value2'], 'dest_region_codes': ['dest_region_codes_value1', 'dest_region_codes_value2'], 'dest_threat_intelligences': ['dest_threat_intelligences_value1', 'dest_threat_intelligences_value2'], 'layer4_configs': [{'ip_protocol': 'ip_protocol_value', 'ports': ['ports_value1', 'ports_value2']}], 'src_address_groups': ['src_address_groups_value1', 'src_address_groups_value2'], 'src_fqdns': ['src_fqdns_value1', 'src_fqdns_value2'], 'src_ip_ranges': ['src_ip_ranges_value1', 'src_ip_ranges_value2'], 'src_region_codes': ['src_region_codes_value1', 'src_region_codes_value2'], 'src_secure_tags': [{'name': 'name_value', 'state': 'state_value'}], 'src_threat_intelligences': ['src_threat_intelligences_value1', 'src_threat_intelligences_value2']}, 'priority': 898, 'rule_name': 'rule_name_value', 'rule_tuple_count': 1737, 'target_resources': ['target_resources_value1', 'target_resources_value2'], 'target_secure_tags': {}, 'target_service_accounts': ['target_service_accounts_value1', 'target_service_accounts_value2']}], 'self_link': 'self_link_value', 'self_link_with_id': 'self_link_with_id_value', 'short_name': 'short_name_value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.insert_unary(request) + + +def test_insert_unary_rest_flattened(): + client = FirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {} + + # get truthy value for each flattened field + mock_args = dict( + parent_id='parent_id_value', + firewall_policy_resource=compute.FirewallPolicy(associations=[compute.FirewallPolicyAssociation(attachment_target='attachment_target_value')]), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.insert_unary(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/locations/global/firewallPolicies" % client.transport._host, args[1]) + + +def test_insert_unary_rest_flattened_error(transport: str = 'rest'): + client = FirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.insert_unary( + compute.InsertFirewallPolicyRequest(), + parent_id='parent_id_value', + firewall_policy_resource=compute.FirewallPolicy(associations=[compute.FirewallPolicyAssociation(attachment_target='attachment_target_value')]), + ) + + +def test_insert_unary_rest_error(): + client = FirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.ListFirewallPoliciesRequest, + dict, +]) +def test_list_rest(request_type): + client = FirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.FirewallPolicyList( + id='id_value', + kind='kind_value', + next_page_token='next_page_token_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.FirewallPolicyList.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.list(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListPager) + assert response.id == 'id_value' + assert response.kind == 'kind_value' + assert response.next_page_token == 'next_page_token_value' + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_rest_interceptors(null_interceptor): + transport = transports.FirewallPoliciesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.FirewallPoliciesRestInterceptor(), + ) + client = FirewallPoliciesClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.FirewallPoliciesRestInterceptor, "post_list") as post, \ + mock.patch.object(transports.FirewallPoliciesRestInterceptor, "pre_list") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.ListFirewallPoliciesRequest.pb(compute.ListFirewallPoliciesRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.FirewallPolicyList.to_json(compute.FirewallPolicyList()) + + request = compute.ListFirewallPoliciesRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.FirewallPolicyList() + + client.list(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_list_rest_bad_request(transport: str = 'rest', request_type=compute.ListFirewallPoliciesRequest): + client = FirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list(request) + + +def test_list_rest_pager(transport: str = 'rest'): + client = FirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + #with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + compute.FirewallPolicyList( + items=[ + compute.FirewallPolicy(), + compute.FirewallPolicy(), + compute.FirewallPolicy(), + ], + next_page_token='abc', + ), + compute.FirewallPolicyList( + items=[], + next_page_token='def', + ), + compute.FirewallPolicyList( + items=[ + compute.FirewallPolicy(), + ], + next_page_token='ghi', + ), + compute.FirewallPolicyList( + items=[ + compute.FirewallPolicy(), + compute.FirewallPolicy(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple(compute.FirewallPolicyList.to_json(x) for x in response) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode('UTF-8') + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {} + + pager = client.list(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, compute.FirewallPolicy) + for i in results) + + pages = list(client.list(request=sample_request).pages) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize("request_type", [ + compute.ListAssociationsFirewallPolicyRequest, + dict, +]) +def test_list_associations_rest(request_type): + client = FirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.FirewallPoliciesListAssociationsResponse( + kind='kind_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.FirewallPoliciesListAssociationsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.list_associations(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.FirewallPoliciesListAssociationsResponse) + assert response.kind == 'kind_value' + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_associations_rest_interceptors(null_interceptor): + transport = transports.FirewallPoliciesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.FirewallPoliciesRestInterceptor(), + ) + client = FirewallPoliciesClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.FirewallPoliciesRestInterceptor, "post_list_associations") as post, \ + mock.patch.object(transports.FirewallPoliciesRestInterceptor, "pre_list_associations") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.ListAssociationsFirewallPolicyRequest.pb(compute.ListAssociationsFirewallPolicyRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.FirewallPoliciesListAssociationsResponse.to_json(compute.FirewallPoliciesListAssociationsResponse()) + + request = compute.ListAssociationsFirewallPolicyRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.FirewallPoliciesListAssociationsResponse() + + client.list_associations(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_list_associations_rest_bad_request(transport: str = 'rest', request_type=compute.ListAssociationsFirewallPolicyRequest): + client = FirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_associations(request) + + +def test_list_associations_rest_error(): + client = FirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.MoveFirewallPolicyRequest, + dict, +]) +def test_move_rest(request_type): + client = FirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'firewall_policy': 'sample1'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.move(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, extended_operation.ExtendedOperation) + assert response.client_operation_id == 'client_operation_id_value' + assert response.creation_timestamp == 'creation_timestamp_value' + assert response.description == 'description_value' + assert response.end_time == 'end_time_value' + assert response.http_error_message == 'http_error_message_value' + assert response.http_error_status_code == 2374 + assert response.id == 205 + assert response.insert_time == 'insert_time_value' + assert response.kind == 'kind_value' + assert response.name == 'name_value' + assert response.operation_group_id == 'operation_group_id_value' + assert response.operation_type == 'operation_type_value' + assert response.progress == 885 + assert response.region == 'region_value' + assert response.self_link == 'self_link_value' + assert response.start_time == 'start_time_value' + assert response.status == compute.Operation.Status.DONE + assert response.status_message == 'status_message_value' + assert response.target_id == 947 + assert response.target_link == 'target_link_value' + assert response.user == 'user_value' + assert response.zone == 'zone_value' + + +def test_move_rest_required_fields(request_type=compute.MoveFirewallPolicyRequest): + transport_class = transports.FirewallPoliciesRestTransport + + request_init = {} + request_init["firewall_policy"] = "" + request_init["parent_id"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + assert "parentId" not in jsonified_request + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).move._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + assert "parentId" in jsonified_request + assert jsonified_request["parentId"] == request_init["parent_id"] + + jsonified_request["firewallPolicy"] = 'firewall_policy_value' + jsonified_request["parentId"] = 'parent_id_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).move._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("parent_id", "request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "firewallPolicy" in jsonified_request + assert jsonified_request["firewallPolicy"] == 'firewall_policy_value' + assert "parentId" in jsonified_request + assert jsonified_request["parentId"] == 'parent_id_value' + + client = FirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.move(request) + + expected_params = [ + ( + "parentId", + "", + ), + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_move_rest_unset_required_fields(): + transport = transports.FirewallPoliciesRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.move._get_unset_required_fields({}) + assert set(unset_fields) == (set(("parentId", "requestId", )) & set(("firewallPolicy", "parentId", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_move_rest_interceptors(null_interceptor): + transport = transports.FirewallPoliciesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.FirewallPoliciesRestInterceptor(), + ) + client = FirewallPoliciesClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.FirewallPoliciesRestInterceptor, "post_move") as post, \ + mock.patch.object(transports.FirewallPoliciesRestInterceptor, "pre_move") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.MoveFirewallPolicyRequest.pb(compute.MoveFirewallPolicyRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.MoveFirewallPolicyRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.move(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_move_rest_bad_request(transport: str = 'rest', request_type=compute.MoveFirewallPolicyRequest): + client = FirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'firewall_policy': 'sample1'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.move(request) + + +def test_move_rest_flattened(): + client = FirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'firewall_policy': 'sample1'} + + # get truthy value for each flattened field + mock_args = dict( + firewall_policy='firewall_policy_value', + parent_id='parent_id_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.move(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/locations/global/firewallPolicies/{firewall_policy}/move" % client.transport._host, args[1]) + + +def test_move_rest_flattened_error(transport: str = 'rest'): + client = FirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.move( + compute.MoveFirewallPolicyRequest(), + firewall_policy='firewall_policy_value', + parent_id='parent_id_value', + ) + + +def test_move_rest_error(): + client = FirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.MoveFirewallPolicyRequest, + dict, +]) +def test_move_unary_rest(request_type): + client = FirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'firewall_policy': 'sample1'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.move_unary(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.Operation) + + +def test_move_unary_rest_required_fields(request_type=compute.MoveFirewallPolicyRequest): + transport_class = transports.FirewallPoliciesRestTransport + + request_init = {} + request_init["firewall_policy"] = "" + request_init["parent_id"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + assert "parentId" not in jsonified_request + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).move._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + assert "parentId" in jsonified_request + assert jsonified_request["parentId"] == request_init["parent_id"] + + jsonified_request["firewallPolicy"] = 'firewall_policy_value' + jsonified_request["parentId"] = 'parent_id_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).move._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("parent_id", "request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "firewallPolicy" in jsonified_request + assert jsonified_request["firewallPolicy"] == 'firewall_policy_value' + assert "parentId" in jsonified_request + assert jsonified_request["parentId"] == 'parent_id_value' + + client = FirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.move_unary(request) + + expected_params = [ + ( + "parentId", + "", + ), + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_move_unary_rest_unset_required_fields(): + transport = transports.FirewallPoliciesRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.move._get_unset_required_fields({}) + assert set(unset_fields) == (set(("parentId", "requestId", )) & set(("firewallPolicy", "parentId", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_move_unary_rest_interceptors(null_interceptor): + transport = transports.FirewallPoliciesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.FirewallPoliciesRestInterceptor(), + ) + client = FirewallPoliciesClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.FirewallPoliciesRestInterceptor, "post_move") as post, \ + mock.patch.object(transports.FirewallPoliciesRestInterceptor, "pre_move") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.MoveFirewallPolicyRequest.pb(compute.MoveFirewallPolicyRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.MoveFirewallPolicyRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.move_unary(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_move_unary_rest_bad_request(transport: str = 'rest', request_type=compute.MoveFirewallPolicyRequest): + client = FirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'firewall_policy': 'sample1'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.move_unary(request) + + +def test_move_unary_rest_flattened(): + client = FirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'firewall_policy': 'sample1'} + + # get truthy value for each flattened field + mock_args = dict( + firewall_policy='firewall_policy_value', + parent_id='parent_id_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.move_unary(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/locations/global/firewallPolicies/{firewall_policy}/move" % client.transport._host, args[1]) + + +def test_move_unary_rest_flattened_error(transport: str = 'rest'): + client = FirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.move_unary( + compute.MoveFirewallPolicyRequest(), + firewall_policy='firewall_policy_value', + parent_id='parent_id_value', + ) + + +def test_move_unary_rest_error(): + client = FirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.PatchFirewallPolicyRequest, + dict, +]) +def test_patch_rest(request_type): + client = FirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'firewall_policy': 'sample1'} + request_init["firewall_policy_resource"] = {'associations': [{'attachment_target': 'attachment_target_value', 'display_name': 'display_name_value', 'firewall_policy_id': 'firewall_policy_id_value', 'name': 'name_value', 'short_name': 'short_name_value'}], 'creation_timestamp': 'creation_timestamp_value', 'description': 'description_value', 'display_name': 'display_name_value', 'fingerprint': 'fingerprint_value', 'id': 205, 'kind': 'kind_value', 'name': 'name_value', 'parent': 'parent_value', 'region': 'region_value', 'rule_tuple_count': 1737, 'rules': [{'action': 'action_value', 'description': 'description_value', 'direction': 'direction_value', 'disabled': True, 'enable_logging': True, 'kind': 'kind_value', 'match': {'dest_address_groups': ['dest_address_groups_value1', 'dest_address_groups_value2'], 'dest_fqdns': ['dest_fqdns_value1', 'dest_fqdns_value2'], 'dest_ip_ranges': ['dest_ip_ranges_value1', 'dest_ip_ranges_value2'], 'dest_region_codes': ['dest_region_codes_value1', 'dest_region_codes_value2'], 'dest_threat_intelligences': ['dest_threat_intelligences_value1', 'dest_threat_intelligences_value2'], 'layer4_configs': [{'ip_protocol': 'ip_protocol_value', 'ports': ['ports_value1', 'ports_value2']}], 'src_address_groups': ['src_address_groups_value1', 'src_address_groups_value2'], 'src_fqdns': ['src_fqdns_value1', 'src_fqdns_value2'], 'src_ip_ranges': ['src_ip_ranges_value1', 'src_ip_ranges_value2'], 'src_region_codes': ['src_region_codes_value1', 'src_region_codes_value2'], 'src_secure_tags': [{'name': 'name_value', 'state': 'state_value'}], 'src_threat_intelligences': ['src_threat_intelligences_value1', 'src_threat_intelligences_value2']}, 'priority': 898, 'rule_name': 'rule_name_value', 'rule_tuple_count': 1737, 'target_resources': ['target_resources_value1', 'target_resources_value2'], 'target_secure_tags': {}, 'target_service_accounts': ['target_service_accounts_value1', 'target_service_accounts_value2']}], 'self_link': 'self_link_value', 'self_link_with_id': 'self_link_with_id_value', 'short_name': 'short_name_value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.patch(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, extended_operation.ExtendedOperation) + assert response.client_operation_id == 'client_operation_id_value' + assert response.creation_timestamp == 'creation_timestamp_value' + assert response.description == 'description_value' + assert response.end_time == 'end_time_value' + assert response.http_error_message == 'http_error_message_value' + assert response.http_error_status_code == 2374 + assert response.id == 205 + assert response.insert_time == 'insert_time_value' + assert response.kind == 'kind_value' + assert response.name == 'name_value' + assert response.operation_group_id == 'operation_group_id_value' + assert response.operation_type == 'operation_type_value' + assert response.progress == 885 + assert response.region == 'region_value' + assert response.self_link == 'self_link_value' + assert response.start_time == 'start_time_value' + assert response.status == compute.Operation.Status.DONE + assert response.status_message == 'status_message_value' + assert response.target_id == 947 + assert response.target_link == 'target_link_value' + assert response.user == 'user_value' + assert response.zone == 'zone_value' + + +def test_patch_rest_required_fields(request_type=compute.PatchFirewallPolicyRequest): + transport_class = transports.FirewallPoliciesRestTransport + + request_init = {} + request_init["firewall_policy"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).patch._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["firewallPolicy"] = 'firewall_policy_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).patch._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "firewallPolicy" in jsonified_request + assert jsonified_request["firewallPolicy"] == 'firewall_policy_value' + + client = FirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "patch", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.patch(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_patch_rest_unset_required_fields(): + transport = transports.FirewallPoliciesRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.patch._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("firewallPolicy", "firewallPolicyResource", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_patch_rest_interceptors(null_interceptor): + transport = transports.FirewallPoliciesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.FirewallPoliciesRestInterceptor(), + ) + client = FirewallPoliciesClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.FirewallPoliciesRestInterceptor, "post_patch") as post, \ + mock.patch.object(transports.FirewallPoliciesRestInterceptor, "pre_patch") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.PatchFirewallPolicyRequest.pb(compute.PatchFirewallPolicyRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.PatchFirewallPolicyRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.patch(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_patch_rest_bad_request(transport: str = 'rest', request_type=compute.PatchFirewallPolicyRequest): + client = FirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'firewall_policy': 'sample1'} + request_init["firewall_policy_resource"] = {'associations': [{'attachment_target': 'attachment_target_value', 'display_name': 'display_name_value', 'firewall_policy_id': 'firewall_policy_id_value', 'name': 'name_value', 'short_name': 'short_name_value'}], 'creation_timestamp': 'creation_timestamp_value', 'description': 'description_value', 'display_name': 'display_name_value', 'fingerprint': 'fingerprint_value', 'id': 205, 'kind': 'kind_value', 'name': 'name_value', 'parent': 'parent_value', 'region': 'region_value', 'rule_tuple_count': 1737, 'rules': [{'action': 'action_value', 'description': 'description_value', 'direction': 'direction_value', 'disabled': True, 'enable_logging': True, 'kind': 'kind_value', 'match': {'dest_address_groups': ['dest_address_groups_value1', 'dest_address_groups_value2'], 'dest_fqdns': ['dest_fqdns_value1', 'dest_fqdns_value2'], 'dest_ip_ranges': ['dest_ip_ranges_value1', 'dest_ip_ranges_value2'], 'dest_region_codes': ['dest_region_codes_value1', 'dest_region_codes_value2'], 'dest_threat_intelligences': ['dest_threat_intelligences_value1', 'dest_threat_intelligences_value2'], 'layer4_configs': [{'ip_protocol': 'ip_protocol_value', 'ports': ['ports_value1', 'ports_value2']}], 'src_address_groups': ['src_address_groups_value1', 'src_address_groups_value2'], 'src_fqdns': ['src_fqdns_value1', 'src_fqdns_value2'], 'src_ip_ranges': ['src_ip_ranges_value1', 'src_ip_ranges_value2'], 'src_region_codes': ['src_region_codes_value1', 'src_region_codes_value2'], 'src_secure_tags': [{'name': 'name_value', 'state': 'state_value'}], 'src_threat_intelligences': ['src_threat_intelligences_value1', 'src_threat_intelligences_value2']}, 'priority': 898, 'rule_name': 'rule_name_value', 'rule_tuple_count': 1737, 'target_resources': ['target_resources_value1', 'target_resources_value2'], 'target_secure_tags': {}, 'target_service_accounts': ['target_service_accounts_value1', 'target_service_accounts_value2']}], 'self_link': 'self_link_value', 'self_link_with_id': 'self_link_with_id_value', 'short_name': 'short_name_value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.patch(request) + + +def test_patch_rest_flattened(): + client = FirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'firewall_policy': 'sample1'} + + # get truthy value for each flattened field + mock_args = dict( + firewall_policy='firewall_policy_value', + firewall_policy_resource=compute.FirewallPolicy(associations=[compute.FirewallPolicyAssociation(attachment_target='attachment_target_value')]), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.patch(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/locations/global/firewallPolicies/{firewall_policy}" % client.transport._host, args[1]) + + +def test_patch_rest_flattened_error(transport: str = 'rest'): + client = FirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.patch( + compute.PatchFirewallPolicyRequest(), + firewall_policy='firewall_policy_value', + firewall_policy_resource=compute.FirewallPolicy(associations=[compute.FirewallPolicyAssociation(attachment_target='attachment_target_value')]), + ) + + +def test_patch_rest_error(): + client = FirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.PatchFirewallPolicyRequest, + dict, +]) +def test_patch_unary_rest(request_type): + client = FirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'firewall_policy': 'sample1'} + request_init["firewall_policy_resource"] = {'associations': [{'attachment_target': 'attachment_target_value', 'display_name': 'display_name_value', 'firewall_policy_id': 'firewall_policy_id_value', 'name': 'name_value', 'short_name': 'short_name_value'}], 'creation_timestamp': 'creation_timestamp_value', 'description': 'description_value', 'display_name': 'display_name_value', 'fingerprint': 'fingerprint_value', 'id': 205, 'kind': 'kind_value', 'name': 'name_value', 'parent': 'parent_value', 'region': 'region_value', 'rule_tuple_count': 1737, 'rules': [{'action': 'action_value', 'description': 'description_value', 'direction': 'direction_value', 'disabled': True, 'enable_logging': True, 'kind': 'kind_value', 'match': {'dest_address_groups': ['dest_address_groups_value1', 'dest_address_groups_value2'], 'dest_fqdns': ['dest_fqdns_value1', 'dest_fqdns_value2'], 'dest_ip_ranges': ['dest_ip_ranges_value1', 'dest_ip_ranges_value2'], 'dest_region_codes': ['dest_region_codes_value1', 'dest_region_codes_value2'], 'dest_threat_intelligences': ['dest_threat_intelligences_value1', 'dest_threat_intelligences_value2'], 'layer4_configs': [{'ip_protocol': 'ip_protocol_value', 'ports': ['ports_value1', 'ports_value2']}], 'src_address_groups': ['src_address_groups_value1', 'src_address_groups_value2'], 'src_fqdns': ['src_fqdns_value1', 'src_fqdns_value2'], 'src_ip_ranges': ['src_ip_ranges_value1', 'src_ip_ranges_value2'], 'src_region_codes': ['src_region_codes_value1', 'src_region_codes_value2'], 'src_secure_tags': [{'name': 'name_value', 'state': 'state_value'}], 'src_threat_intelligences': ['src_threat_intelligences_value1', 'src_threat_intelligences_value2']}, 'priority': 898, 'rule_name': 'rule_name_value', 'rule_tuple_count': 1737, 'target_resources': ['target_resources_value1', 'target_resources_value2'], 'target_secure_tags': {}, 'target_service_accounts': ['target_service_accounts_value1', 'target_service_accounts_value2']}], 'self_link': 'self_link_value', 'self_link_with_id': 'self_link_with_id_value', 'short_name': 'short_name_value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.patch_unary(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.Operation) + + +def test_patch_unary_rest_required_fields(request_type=compute.PatchFirewallPolicyRequest): + transport_class = transports.FirewallPoliciesRestTransport + + request_init = {} + request_init["firewall_policy"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).patch._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["firewallPolicy"] = 'firewall_policy_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).patch._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "firewallPolicy" in jsonified_request + assert jsonified_request["firewallPolicy"] == 'firewall_policy_value' + + client = FirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "patch", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.patch_unary(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_patch_unary_rest_unset_required_fields(): + transport = transports.FirewallPoliciesRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.patch._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("firewallPolicy", "firewallPolicyResource", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_patch_unary_rest_interceptors(null_interceptor): + transport = transports.FirewallPoliciesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.FirewallPoliciesRestInterceptor(), + ) + client = FirewallPoliciesClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.FirewallPoliciesRestInterceptor, "post_patch") as post, \ + mock.patch.object(transports.FirewallPoliciesRestInterceptor, "pre_patch") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.PatchFirewallPolicyRequest.pb(compute.PatchFirewallPolicyRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.PatchFirewallPolicyRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.patch_unary(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_patch_unary_rest_bad_request(transport: str = 'rest', request_type=compute.PatchFirewallPolicyRequest): + client = FirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'firewall_policy': 'sample1'} + request_init["firewall_policy_resource"] = {'associations': [{'attachment_target': 'attachment_target_value', 'display_name': 'display_name_value', 'firewall_policy_id': 'firewall_policy_id_value', 'name': 'name_value', 'short_name': 'short_name_value'}], 'creation_timestamp': 'creation_timestamp_value', 'description': 'description_value', 'display_name': 'display_name_value', 'fingerprint': 'fingerprint_value', 'id': 205, 'kind': 'kind_value', 'name': 'name_value', 'parent': 'parent_value', 'region': 'region_value', 'rule_tuple_count': 1737, 'rules': [{'action': 'action_value', 'description': 'description_value', 'direction': 'direction_value', 'disabled': True, 'enable_logging': True, 'kind': 'kind_value', 'match': {'dest_address_groups': ['dest_address_groups_value1', 'dest_address_groups_value2'], 'dest_fqdns': ['dest_fqdns_value1', 'dest_fqdns_value2'], 'dest_ip_ranges': ['dest_ip_ranges_value1', 'dest_ip_ranges_value2'], 'dest_region_codes': ['dest_region_codes_value1', 'dest_region_codes_value2'], 'dest_threat_intelligences': ['dest_threat_intelligences_value1', 'dest_threat_intelligences_value2'], 'layer4_configs': [{'ip_protocol': 'ip_protocol_value', 'ports': ['ports_value1', 'ports_value2']}], 'src_address_groups': ['src_address_groups_value1', 'src_address_groups_value2'], 'src_fqdns': ['src_fqdns_value1', 'src_fqdns_value2'], 'src_ip_ranges': ['src_ip_ranges_value1', 'src_ip_ranges_value2'], 'src_region_codes': ['src_region_codes_value1', 'src_region_codes_value2'], 'src_secure_tags': [{'name': 'name_value', 'state': 'state_value'}], 'src_threat_intelligences': ['src_threat_intelligences_value1', 'src_threat_intelligences_value2']}, 'priority': 898, 'rule_name': 'rule_name_value', 'rule_tuple_count': 1737, 'target_resources': ['target_resources_value1', 'target_resources_value2'], 'target_secure_tags': {}, 'target_service_accounts': ['target_service_accounts_value1', 'target_service_accounts_value2']}], 'self_link': 'self_link_value', 'self_link_with_id': 'self_link_with_id_value', 'short_name': 'short_name_value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.patch_unary(request) + + +def test_patch_unary_rest_flattened(): + client = FirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'firewall_policy': 'sample1'} + + # get truthy value for each flattened field + mock_args = dict( + firewall_policy='firewall_policy_value', + firewall_policy_resource=compute.FirewallPolicy(associations=[compute.FirewallPolicyAssociation(attachment_target='attachment_target_value')]), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.patch_unary(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/locations/global/firewallPolicies/{firewall_policy}" % client.transport._host, args[1]) + + +def test_patch_unary_rest_flattened_error(transport: str = 'rest'): + client = FirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.patch_unary( + compute.PatchFirewallPolicyRequest(), + firewall_policy='firewall_policy_value', + firewall_policy_resource=compute.FirewallPolicy(associations=[compute.FirewallPolicyAssociation(attachment_target='attachment_target_value')]), + ) + + +def test_patch_unary_rest_error(): + client = FirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.PatchRuleFirewallPolicyRequest, + dict, +]) +def test_patch_rule_rest(request_type): + client = FirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'firewall_policy': 'sample1'} + request_init["firewall_policy_rule_resource"] = {'action': 'action_value', 'description': 'description_value', 'direction': 'direction_value', 'disabled': True, 'enable_logging': True, 'kind': 'kind_value', 'match': {'dest_address_groups': ['dest_address_groups_value1', 'dest_address_groups_value2'], 'dest_fqdns': ['dest_fqdns_value1', 'dest_fqdns_value2'], 'dest_ip_ranges': ['dest_ip_ranges_value1', 'dest_ip_ranges_value2'], 'dest_region_codes': ['dest_region_codes_value1', 'dest_region_codes_value2'], 'dest_threat_intelligences': ['dest_threat_intelligences_value1', 'dest_threat_intelligences_value2'], 'layer4_configs': [{'ip_protocol': 'ip_protocol_value', 'ports': ['ports_value1', 'ports_value2']}], 'src_address_groups': ['src_address_groups_value1', 'src_address_groups_value2'], 'src_fqdns': ['src_fqdns_value1', 'src_fqdns_value2'], 'src_ip_ranges': ['src_ip_ranges_value1', 'src_ip_ranges_value2'], 'src_region_codes': ['src_region_codes_value1', 'src_region_codes_value2'], 'src_secure_tags': [{'name': 'name_value', 'state': 'state_value'}], 'src_threat_intelligences': ['src_threat_intelligences_value1', 'src_threat_intelligences_value2']}, 'priority': 898, 'rule_name': 'rule_name_value', 'rule_tuple_count': 1737, 'target_resources': ['target_resources_value1', 'target_resources_value2'], 'target_secure_tags': {}, 'target_service_accounts': ['target_service_accounts_value1', 'target_service_accounts_value2']} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.patch_rule(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, extended_operation.ExtendedOperation) + assert response.client_operation_id == 'client_operation_id_value' + assert response.creation_timestamp == 'creation_timestamp_value' + assert response.description == 'description_value' + assert response.end_time == 'end_time_value' + assert response.http_error_message == 'http_error_message_value' + assert response.http_error_status_code == 2374 + assert response.id == 205 + assert response.insert_time == 'insert_time_value' + assert response.kind == 'kind_value' + assert response.name == 'name_value' + assert response.operation_group_id == 'operation_group_id_value' + assert response.operation_type == 'operation_type_value' + assert response.progress == 885 + assert response.region == 'region_value' + assert response.self_link == 'self_link_value' + assert response.start_time == 'start_time_value' + assert response.status == compute.Operation.Status.DONE + assert response.status_message == 'status_message_value' + assert response.target_id == 947 + assert response.target_link == 'target_link_value' + assert response.user == 'user_value' + assert response.zone == 'zone_value' + + +def test_patch_rule_rest_required_fields(request_type=compute.PatchRuleFirewallPolicyRequest): + transport_class = transports.FirewallPoliciesRestTransport + + request_init = {} + request_init["firewall_policy"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).patch_rule._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["firewallPolicy"] = 'firewall_policy_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).patch_rule._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("priority", "request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "firewallPolicy" in jsonified_request + assert jsonified_request["firewallPolicy"] == 'firewall_policy_value' + + client = FirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.patch_rule(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_patch_rule_rest_unset_required_fields(): + transport = transports.FirewallPoliciesRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.patch_rule._get_unset_required_fields({}) + assert set(unset_fields) == (set(("priority", "requestId", )) & set(("firewallPolicy", "firewallPolicyRuleResource", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_patch_rule_rest_interceptors(null_interceptor): + transport = transports.FirewallPoliciesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.FirewallPoliciesRestInterceptor(), + ) + client = FirewallPoliciesClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.FirewallPoliciesRestInterceptor, "post_patch_rule") as post, \ + mock.patch.object(transports.FirewallPoliciesRestInterceptor, "pre_patch_rule") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.PatchRuleFirewallPolicyRequest.pb(compute.PatchRuleFirewallPolicyRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.PatchRuleFirewallPolicyRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.patch_rule(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_patch_rule_rest_bad_request(transport: str = 'rest', request_type=compute.PatchRuleFirewallPolicyRequest): + client = FirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'firewall_policy': 'sample1'} + request_init["firewall_policy_rule_resource"] = {'action': 'action_value', 'description': 'description_value', 'direction': 'direction_value', 'disabled': True, 'enable_logging': True, 'kind': 'kind_value', 'match': {'dest_address_groups': ['dest_address_groups_value1', 'dest_address_groups_value2'], 'dest_fqdns': ['dest_fqdns_value1', 'dest_fqdns_value2'], 'dest_ip_ranges': ['dest_ip_ranges_value1', 'dest_ip_ranges_value2'], 'dest_region_codes': ['dest_region_codes_value1', 'dest_region_codes_value2'], 'dest_threat_intelligences': ['dest_threat_intelligences_value1', 'dest_threat_intelligences_value2'], 'layer4_configs': [{'ip_protocol': 'ip_protocol_value', 'ports': ['ports_value1', 'ports_value2']}], 'src_address_groups': ['src_address_groups_value1', 'src_address_groups_value2'], 'src_fqdns': ['src_fqdns_value1', 'src_fqdns_value2'], 'src_ip_ranges': ['src_ip_ranges_value1', 'src_ip_ranges_value2'], 'src_region_codes': ['src_region_codes_value1', 'src_region_codes_value2'], 'src_secure_tags': [{'name': 'name_value', 'state': 'state_value'}], 'src_threat_intelligences': ['src_threat_intelligences_value1', 'src_threat_intelligences_value2']}, 'priority': 898, 'rule_name': 'rule_name_value', 'rule_tuple_count': 1737, 'target_resources': ['target_resources_value1', 'target_resources_value2'], 'target_secure_tags': {}, 'target_service_accounts': ['target_service_accounts_value1', 'target_service_accounts_value2']} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.patch_rule(request) + + +def test_patch_rule_rest_flattened(): + client = FirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'firewall_policy': 'sample1'} + + # get truthy value for each flattened field + mock_args = dict( + firewall_policy='firewall_policy_value', + firewall_policy_rule_resource=compute.FirewallPolicyRule(action='action_value'), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.patch_rule(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/locations/global/firewallPolicies/{firewall_policy}/patchRule" % client.transport._host, args[1]) + + +def test_patch_rule_rest_flattened_error(transport: str = 'rest'): + client = FirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.patch_rule( + compute.PatchRuleFirewallPolicyRequest(), + firewall_policy='firewall_policy_value', + firewall_policy_rule_resource=compute.FirewallPolicyRule(action='action_value'), + ) + + +def test_patch_rule_rest_error(): + client = FirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.PatchRuleFirewallPolicyRequest, + dict, +]) +def test_patch_rule_unary_rest(request_type): + client = FirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'firewall_policy': 'sample1'} + request_init["firewall_policy_rule_resource"] = {'action': 'action_value', 'description': 'description_value', 'direction': 'direction_value', 'disabled': True, 'enable_logging': True, 'kind': 'kind_value', 'match': {'dest_address_groups': ['dest_address_groups_value1', 'dest_address_groups_value2'], 'dest_fqdns': ['dest_fqdns_value1', 'dest_fqdns_value2'], 'dest_ip_ranges': ['dest_ip_ranges_value1', 'dest_ip_ranges_value2'], 'dest_region_codes': ['dest_region_codes_value1', 'dest_region_codes_value2'], 'dest_threat_intelligences': ['dest_threat_intelligences_value1', 'dest_threat_intelligences_value2'], 'layer4_configs': [{'ip_protocol': 'ip_protocol_value', 'ports': ['ports_value1', 'ports_value2']}], 'src_address_groups': ['src_address_groups_value1', 'src_address_groups_value2'], 'src_fqdns': ['src_fqdns_value1', 'src_fqdns_value2'], 'src_ip_ranges': ['src_ip_ranges_value1', 'src_ip_ranges_value2'], 'src_region_codes': ['src_region_codes_value1', 'src_region_codes_value2'], 'src_secure_tags': [{'name': 'name_value', 'state': 'state_value'}], 'src_threat_intelligences': ['src_threat_intelligences_value1', 'src_threat_intelligences_value2']}, 'priority': 898, 'rule_name': 'rule_name_value', 'rule_tuple_count': 1737, 'target_resources': ['target_resources_value1', 'target_resources_value2'], 'target_secure_tags': {}, 'target_service_accounts': ['target_service_accounts_value1', 'target_service_accounts_value2']} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.patch_rule_unary(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.Operation) + + +def test_patch_rule_unary_rest_required_fields(request_type=compute.PatchRuleFirewallPolicyRequest): + transport_class = transports.FirewallPoliciesRestTransport + + request_init = {} + request_init["firewall_policy"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).patch_rule._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["firewallPolicy"] = 'firewall_policy_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).patch_rule._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("priority", "request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "firewallPolicy" in jsonified_request + assert jsonified_request["firewallPolicy"] == 'firewall_policy_value' + + client = FirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.patch_rule_unary(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_patch_rule_unary_rest_unset_required_fields(): + transport = transports.FirewallPoliciesRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.patch_rule._get_unset_required_fields({}) + assert set(unset_fields) == (set(("priority", "requestId", )) & set(("firewallPolicy", "firewallPolicyRuleResource", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_patch_rule_unary_rest_interceptors(null_interceptor): + transport = transports.FirewallPoliciesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.FirewallPoliciesRestInterceptor(), + ) + client = FirewallPoliciesClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.FirewallPoliciesRestInterceptor, "post_patch_rule") as post, \ + mock.patch.object(transports.FirewallPoliciesRestInterceptor, "pre_patch_rule") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.PatchRuleFirewallPolicyRequest.pb(compute.PatchRuleFirewallPolicyRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.PatchRuleFirewallPolicyRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.patch_rule_unary(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_patch_rule_unary_rest_bad_request(transport: str = 'rest', request_type=compute.PatchRuleFirewallPolicyRequest): + client = FirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'firewall_policy': 'sample1'} + request_init["firewall_policy_rule_resource"] = {'action': 'action_value', 'description': 'description_value', 'direction': 'direction_value', 'disabled': True, 'enable_logging': True, 'kind': 'kind_value', 'match': {'dest_address_groups': ['dest_address_groups_value1', 'dest_address_groups_value2'], 'dest_fqdns': ['dest_fqdns_value1', 'dest_fqdns_value2'], 'dest_ip_ranges': ['dest_ip_ranges_value1', 'dest_ip_ranges_value2'], 'dest_region_codes': ['dest_region_codes_value1', 'dest_region_codes_value2'], 'dest_threat_intelligences': ['dest_threat_intelligences_value1', 'dest_threat_intelligences_value2'], 'layer4_configs': [{'ip_protocol': 'ip_protocol_value', 'ports': ['ports_value1', 'ports_value2']}], 'src_address_groups': ['src_address_groups_value1', 'src_address_groups_value2'], 'src_fqdns': ['src_fqdns_value1', 'src_fqdns_value2'], 'src_ip_ranges': ['src_ip_ranges_value1', 'src_ip_ranges_value2'], 'src_region_codes': ['src_region_codes_value1', 'src_region_codes_value2'], 'src_secure_tags': [{'name': 'name_value', 'state': 'state_value'}], 'src_threat_intelligences': ['src_threat_intelligences_value1', 'src_threat_intelligences_value2']}, 'priority': 898, 'rule_name': 'rule_name_value', 'rule_tuple_count': 1737, 'target_resources': ['target_resources_value1', 'target_resources_value2'], 'target_secure_tags': {}, 'target_service_accounts': ['target_service_accounts_value1', 'target_service_accounts_value2']} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.patch_rule_unary(request) + + +def test_patch_rule_unary_rest_flattened(): + client = FirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'firewall_policy': 'sample1'} + + # get truthy value for each flattened field + mock_args = dict( + firewall_policy='firewall_policy_value', + firewall_policy_rule_resource=compute.FirewallPolicyRule(action='action_value'), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.patch_rule_unary(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/locations/global/firewallPolicies/{firewall_policy}/patchRule" % client.transport._host, args[1]) + + +def test_patch_rule_unary_rest_flattened_error(transport: str = 'rest'): + client = FirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.patch_rule_unary( + compute.PatchRuleFirewallPolicyRequest(), + firewall_policy='firewall_policy_value', + firewall_policy_rule_resource=compute.FirewallPolicyRule(action='action_value'), + ) + + +def test_patch_rule_unary_rest_error(): + client = FirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.RemoveAssociationFirewallPolicyRequest, + dict, +]) +def test_remove_association_rest(request_type): + client = FirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'firewall_policy': 'sample1'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.remove_association(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, extended_operation.ExtendedOperation) + assert response.client_operation_id == 'client_operation_id_value' + assert response.creation_timestamp == 'creation_timestamp_value' + assert response.description == 'description_value' + assert response.end_time == 'end_time_value' + assert response.http_error_message == 'http_error_message_value' + assert response.http_error_status_code == 2374 + assert response.id == 205 + assert response.insert_time == 'insert_time_value' + assert response.kind == 'kind_value' + assert response.name == 'name_value' + assert response.operation_group_id == 'operation_group_id_value' + assert response.operation_type == 'operation_type_value' + assert response.progress == 885 + assert response.region == 'region_value' + assert response.self_link == 'self_link_value' + assert response.start_time == 'start_time_value' + assert response.status == compute.Operation.Status.DONE + assert response.status_message == 'status_message_value' + assert response.target_id == 947 + assert response.target_link == 'target_link_value' + assert response.user == 'user_value' + assert response.zone == 'zone_value' + + +def test_remove_association_rest_required_fields(request_type=compute.RemoveAssociationFirewallPolicyRequest): + transport_class = transports.FirewallPoliciesRestTransport + + request_init = {} + request_init["firewall_policy"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).remove_association._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["firewallPolicy"] = 'firewall_policy_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).remove_association._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("name", "request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "firewallPolicy" in jsonified_request + assert jsonified_request["firewallPolicy"] == 'firewall_policy_value' + + client = FirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.remove_association(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_remove_association_rest_unset_required_fields(): + transport = transports.FirewallPoliciesRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.remove_association._get_unset_required_fields({}) + assert set(unset_fields) == (set(("name", "requestId", )) & set(("firewallPolicy", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_remove_association_rest_interceptors(null_interceptor): + transport = transports.FirewallPoliciesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.FirewallPoliciesRestInterceptor(), + ) + client = FirewallPoliciesClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.FirewallPoliciesRestInterceptor, "post_remove_association") as post, \ + mock.patch.object(transports.FirewallPoliciesRestInterceptor, "pre_remove_association") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.RemoveAssociationFirewallPolicyRequest.pb(compute.RemoveAssociationFirewallPolicyRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.RemoveAssociationFirewallPolicyRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.remove_association(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_remove_association_rest_bad_request(transport: str = 'rest', request_type=compute.RemoveAssociationFirewallPolicyRequest): + client = FirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'firewall_policy': 'sample1'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.remove_association(request) + + +def test_remove_association_rest_flattened(): + client = FirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'firewall_policy': 'sample1'} + + # get truthy value for each flattened field + mock_args = dict( + firewall_policy='firewall_policy_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.remove_association(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/locations/global/firewallPolicies/{firewall_policy}/removeAssociation" % client.transport._host, args[1]) + + +def test_remove_association_rest_flattened_error(transport: str = 'rest'): + client = FirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.remove_association( + compute.RemoveAssociationFirewallPolicyRequest(), + firewall_policy='firewall_policy_value', + ) + + +def test_remove_association_rest_error(): + client = FirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.RemoveAssociationFirewallPolicyRequest, + dict, +]) +def test_remove_association_unary_rest(request_type): + client = FirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'firewall_policy': 'sample1'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.remove_association_unary(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.Operation) + + +def test_remove_association_unary_rest_required_fields(request_type=compute.RemoveAssociationFirewallPolicyRequest): + transport_class = transports.FirewallPoliciesRestTransport + + request_init = {} + request_init["firewall_policy"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).remove_association._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["firewallPolicy"] = 'firewall_policy_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).remove_association._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("name", "request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "firewallPolicy" in jsonified_request + assert jsonified_request["firewallPolicy"] == 'firewall_policy_value' + + client = FirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.remove_association_unary(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_remove_association_unary_rest_unset_required_fields(): + transport = transports.FirewallPoliciesRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.remove_association._get_unset_required_fields({}) + assert set(unset_fields) == (set(("name", "requestId", )) & set(("firewallPolicy", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_remove_association_unary_rest_interceptors(null_interceptor): + transport = transports.FirewallPoliciesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.FirewallPoliciesRestInterceptor(), + ) + client = FirewallPoliciesClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.FirewallPoliciesRestInterceptor, "post_remove_association") as post, \ + mock.patch.object(transports.FirewallPoliciesRestInterceptor, "pre_remove_association") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.RemoveAssociationFirewallPolicyRequest.pb(compute.RemoveAssociationFirewallPolicyRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.RemoveAssociationFirewallPolicyRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.remove_association_unary(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_remove_association_unary_rest_bad_request(transport: str = 'rest', request_type=compute.RemoveAssociationFirewallPolicyRequest): + client = FirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'firewall_policy': 'sample1'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.remove_association_unary(request) + + +def test_remove_association_unary_rest_flattened(): + client = FirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'firewall_policy': 'sample1'} + + # get truthy value for each flattened field + mock_args = dict( + firewall_policy='firewall_policy_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.remove_association_unary(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/locations/global/firewallPolicies/{firewall_policy}/removeAssociation" % client.transport._host, args[1]) + + +def test_remove_association_unary_rest_flattened_error(transport: str = 'rest'): + client = FirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.remove_association_unary( + compute.RemoveAssociationFirewallPolicyRequest(), + firewall_policy='firewall_policy_value', + ) + + +def test_remove_association_unary_rest_error(): + client = FirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.RemoveRuleFirewallPolicyRequest, + dict, +]) +def test_remove_rule_rest(request_type): + client = FirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'firewall_policy': 'sample1'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.remove_rule(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, extended_operation.ExtendedOperation) + assert response.client_operation_id == 'client_operation_id_value' + assert response.creation_timestamp == 'creation_timestamp_value' + assert response.description == 'description_value' + assert response.end_time == 'end_time_value' + assert response.http_error_message == 'http_error_message_value' + assert response.http_error_status_code == 2374 + assert response.id == 205 + assert response.insert_time == 'insert_time_value' + assert response.kind == 'kind_value' + assert response.name == 'name_value' + assert response.operation_group_id == 'operation_group_id_value' + assert response.operation_type == 'operation_type_value' + assert response.progress == 885 + assert response.region == 'region_value' + assert response.self_link == 'self_link_value' + assert response.start_time == 'start_time_value' + assert response.status == compute.Operation.Status.DONE + assert response.status_message == 'status_message_value' + assert response.target_id == 947 + assert response.target_link == 'target_link_value' + assert response.user == 'user_value' + assert response.zone == 'zone_value' + + +def test_remove_rule_rest_required_fields(request_type=compute.RemoveRuleFirewallPolicyRequest): + transport_class = transports.FirewallPoliciesRestTransport + + request_init = {} + request_init["firewall_policy"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).remove_rule._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["firewallPolicy"] = 'firewall_policy_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).remove_rule._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("priority", "request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "firewallPolicy" in jsonified_request + assert jsonified_request["firewallPolicy"] == 'firewall_policy_value' + + client = FirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.remove_rule(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_remove_rule_rest_unset_required_fields(): + transport = transports.FirewallPoliciesRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.remove_rule._get_unset_required_fields({}) + assert set(unset_fields) == (set(("priority", "requestId", )) & set(("firewallPolicy", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_remove_rule_rest_interceptors(null_interceptor): + transport = transports.FirewallPoliciesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.FirewallPoliciesRestInterceptor(), + ) + client = FirewallPoliciesClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.FirewallPoliciesRestInterceptor, "post_remove_rule") as post, \ + mock.patch.object(transports.FirewallPoliciesRestInterceptor, "pre_remove_rule") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.RemoveRuleFirewallPolicyRequest.pb(compute.RemoveRuleFirewallPolicyRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.RemoveRuleFirewallPolicyRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.remove_rule(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_remove_rule_rest_bad_request(transport: str = 'rest', request_type=compute.RemoveRuleFirewallPolicyRequest): + client = FirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'firewall_policy': 'sample1'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.remove_rule(request) + + +def test_remove_rule_rest_flattened(): + client = FirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'firewall_policy': 'sample1'} + + # get truthy value for each flattened field + mock_args = dict( + firewall_policy='firewall_policy_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.remove_rule(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/locations/global/firewallPolicies/{firewall_policy}/removeRule" % client.transport._host, args[1]) + + +def test_remove_rule_rest_flattened_error(transport: str = 'rest'): + client = FirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.remove_rule( + compute.RemoveRuleFirewallPolicyRequest(), + firewall_policy='firewall_policy_value', + ) + + +def test_remove_rule_rest_error(): + client = FirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.RemoveRuleFirewallPolicyRequest, + dict, +]) +def test_remove_rule_unary_rest(request_type): + client = FirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'firewall_policy': 'sample1'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.remove_rule_unary(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.Operation) + + +def test_remove_rule_unary_rest_required_fields(request_type=compute.RemoveRuleFirewallPolicyRequest): + transport_class = transports.FirewallPoliciesRestTransport + + request_init = {} + request_init["firewall_policy"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).remove_rule._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["firewallPolicy"] = 'firewall_policy_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).remove_rule._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("priority", "request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "firewallPolicy" in jsonified_request + assert jsonified_request["firewallPolicy"] == 'firewall_policy_value' + + client = FirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.remove_rule_unary(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_remove_rule_unary_rest_unset_required_fields(): + transport = transports.FirewallPoliciesRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.remove_rule._get_unset_required_fields({}) + assert set(unset_fields) == (set(("priority", "requestId", )) & set(("firewallPolicy", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_remove_rule_unary_rest_interceptors(null_interceptor): + transport = transports.FirewallPoliciesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.FirewallPoliciesRestInterceptor(), + ) + client = FirewallPoliciesClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.FirewallPoliciesRestInterceptor, "post_remove_rule") as post, \ + mock.patch.object(transports.FirewallPoliciesRestInterceptor, "pre_remove_rule") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.RemoveRuleFirewallPolicyRequest.pb(compute.RemoveRuleFirewallPolicyRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.RemoveRuleFirewallPolicyRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.remove_rule_unary(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_remove_rule_unary_rest_bad_request(transport: str = 'rest', request_type=compute.RemoveRuleFirewallPolicyRequest): + client = FirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'firewall_policy': 'sample1'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.remove_rule_unary(request) + + +def test_remove_rule_unary_rest_flattened(): + client = FirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'firewall_policy': 'sample1'} + + # get truthy value for each flattened field + mock_args = dict( + firewall_policy='firewall_policy_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.remove_rule_unary(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/locations/global/firewallPolicies/{firewall_policy}/removeRule" % client.transport._host, args[1]) + + +def test_remove_rule_unary_rest_flattened_error(transport: str = 'rest'): + client = FirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.remove_rule_unary( + compute.RemoveRuleFirewallPolicyRequest(), + firewall_policy='firewall_policy_value', + ) + + +def test_remove_rule_unary_rest_error(): + client = FirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.SetIamPolicyFirewallPolicyRequest, + dict, +]) +def test_set_iam_policy_rest(request_type): + client = FirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'resource': 'sample1'} + request_init["global_organization_set_policy_request_resource"] = {'bindings': [{'binding_id': 'binding_id_value', 'condition': {'description': 'description_value', 'expression': 'expression_value', 'location': 'location_value', 'title': 'title_value'}, 'members': ['members_value1', 'members_value2'], 'role': 'role_value'}], 'etag': 'etag_value', 'policy': {'audit_configs': [{'audit_log_configs': [{'exempted_members': ['exempted_members_value1', 'exempted_members_value2'], 'ignore_child_exemptions': True, 'log_type': 'log_type_value'}], 'exempted_members': ['exempted_members_value1', 'exempted_members_value2'], 'service': 'service_value'}], 'bindings': {}, 'etag': 'etag_value', 'iam_owned': True, 'rules': [{'action': 'action_value', 'conditions': [{'iam': 'iam_value', 'op': 'op_value', 'svc': 'svc_value', 'sys': 'sys_value', 'values': ['values_value1', 'values_value2']}], 'description': 'description_value', 'ins': ['ins_value1', 'ins_value2'], 'log_configs': [{'cloud_audit': {'authorization_logging_options': {'permission_type': 'permission_type_value'}, 'log_name': 'log_name_value'}, 'counter': {'custom_fields': [{'name': 'name_value', 'value': 'value_value'}], 'field': 'field_value', 'metric': 'metric_value'}, 'data_access': {'log_mode': 'log_mode_value'}}], 'not_ins': ['not_ins_value1', 'not_ins_value2'], 'permissions': ['permissions_value1', 'permissions_value2']}], 'version': 774}} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Policy( + etag='etag_value', + iam_owned=True, + version=774, + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Policy.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.set_iam_policy(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.Policy) + assert response.etag == 'etag_value' + assert response.iam_owned is True + assert response.version == 774 + + +def test_set_iam_policy_rest_required_fields(request_type=compute.SetIamPolicyFirewallPolicyRequest): + transport_class = transports.FirewallPoliciesRestTransport + + request_init = {} + request_init["resource"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).set_iam_policy._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["resource"] = 'resource_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).set_iam_policy._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "resource" in jsonified_request + assert jsonified_request["resource"] == 'resource_value' + + client = FirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Policy() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Policy.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.set_iam_policy(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_set_iam_policy_rest_unset_required_fields(): + transport = transports.FirewallPoliciesRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.set_iam_policy._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("globalOrganizationSetPolicyRequestResource", "resource", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_set_iam_policy_rest_interceptors(null_interceptor): + transport = transports.FirewallPoliciesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.FirewallPoliciesRestInterceptor(), + ) + client = FirewallPoliciesClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.FirewallPoliciesRestInterceptor, "post_set_iam_policy") as post, \ + mock.patch.object(transports.FirewallPoliciesRestInterceptor, "pre_set_iam_policy") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.SetIamPolicyFirewallPolicyRequest.pb(compute.SetIamPolicyFirewallPolicyRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Policy.to_json(compute.Policy()) + + request = compute.SetIamPolicyFirewallPolicyRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Policy() + + client.set_iam_policy(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_set_iam_policy_rest_bad_request(transport: str = 'rest', request_type=compute.SetIamPolicyFirewallPolicyRequest): + client = FirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'resource': 'sample1'} + request_init["global_organization_set_policy_request_resource"] = {'bindings': [{'binding_id': 'binding_id_value', 'condition': {'description': 'description_value', 'expression': 'expression_value', 'location': 'location_value', 'title': 'title_value'}, 'members': ['members_value1', 'members_value2'], 'role': 'role_value'}], 'etag': 'etag_value', 'policy': {'audit_configs': [{'audit_log_configs': [{'exempted_members': ['exempted_members_value1', 'exempted_members_value2'], 'ignore_child_exemptions': True, 'log_type': 'log_type_value'}], 'exempted_members': ['exempted_members_value1', 'exempted_members_value2'], 'service': 'service_value'}], 'bindings': {}, 'etag': 'etag_value', 'iam_owned': True, 'rules': [{'action': 'action_value', 'conditions': [{'iam': 'iam_value', 'op': 'op_value', 'svc': 'svc_value', 'sys': 'sys_value', 'values': ['values_value1', 'values_value2']}], 'description': 'description_value', 'ins': ['ins_value1', 'ins_value2'], 'log_configs': [{'cloud_audit': {'authorization_logging_options': {'permission_type': 'permission_type_value'}, 'log_name': 'log_name_value'}, 'counter': {'custom_fields': [{'name': 'name_value', 'value': 'value_value'}], 'field': 'field_value', 'metric': 'metric_value'}, 'data_access': {'log_mode': 'log_mode_value'}}], 'not_ins': ['not_ins_value1', 'not_ins_value2'], 'permissions': ['permissions_value1', 'permissions_value2']}], 'version': 774}} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.set_iam_policy(request) + + +def test_set_iam_policy_rest_flattened(): + client = FirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Policy() + + # get arguments that satisfy an http rule for this method + sample_request = {'resource': 'sample1'} + + # get truthy value for each flattened field + mock_args = dict( + resource='resource_value', + global_organization_set_policy_request_resource=compute.GlobalOrganizationSetPolicyRequest(bindings=[compute.Binding(binding_id='binding_id_value')]), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Policy.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.set_iam_policy(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/locations/global/firewallPolicies/{resource}/setIamPolicy" % client.transport._host, args[1]) + + +def test_set_iam_policy_rest_flattened_error(transport: str = 'rest'): + client = FirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.set_iam_policy( + compute.SetIamPolicyFirewallPolicyRequest(), + resource='resource_value', + global_organization_set_policy_request_resource=compute.GlobalOrganizationSetPolicyRequest(bindings=[compute.Binding(binding_id='binding_id_value')]), + ) + + +def test_set_iam_policy_rest_error(): + client = FirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.TestIamPermissionsFirewallPolicyRequest, + dict, +]) +def test_test_iam_permissions_rest(request_type): + client = FirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'resource': 'sample1'} + request_init["test_permissions_request_resource"] = {'permissions': ['permissions_value1', 'permissions_value2']} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.TestPermissionsResponse( + permissions=['permissions_value'], + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.TestPermissionsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.test_iam_permissions(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.TestPermissionsResponse) + assert response.permissions == ['permissions_value'] + + +def test_test_iam_permissions_rest_required_fields(request_type=compute.TestIamPermissionsFirewallPolicyRequest): + transport_class = transports.FirewallPoliciesRestTransport + + request_init = {} + request_init["resource"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).test_iam_permissions._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["resource"] = 'resource_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).test_iam_permissions._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "resource" in jsonified_request + assert jsonified_request["resource"] == 'resource_value' + + client = FirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.TestPermissionsResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.TestPermissionsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.test_iam_permissions(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_test_iam_permissions_rest_unset_required_fields(): + transport = transports.FirewallPoliciesRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.test_iam_permissions._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("resource", "testPermissionsRequestResource", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_test_iam_permissions_rest_interceptors(null_interceptor): + transport = transports.FirewallPoliciesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.FirewallPoliciesRestInterceptor(), + ) + client = FirewallPoliciesClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.FirewallPoliciesRestInterceptor, "post_test_iam_permissions") as post, \ + mock.patch.object(transports.FirewallPoliciesRestInterceptor, "pre_test_iam_permissions") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.TestIamPermissionsFirewallPolicyRequest.pb(compute.TestIamPermissionsFirewallPolicyRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.TestPermissionsResponse.to_json(compute.TestPermissionsResponse()) + + request = compute.TestIamPermissionsFirewallPolicyRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.TestPermissionsResponse() + + client.test_iam_permissions(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_test_iam_permissions_rest_bad_request(transport: str = 'rest', request_type=compute.TestIamPermissionsFirewallPolicyRequest): + client = FirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'resource': 'sample1'} + request_init["test_permissions_request_resource"] = {'permissions': ['permissions_value1', 'permissions_value2']} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.test_iam_permissions(request) + + +def test_test_iam_permissions_rest_flattened(): + client = FirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.TestPermissionsResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {'resource': 'sample1'} + + # get truthy value for each flattened field + mock_args = dict( + resource='resource_value', + test_permissions_request_resource=compute.TestPermissionsRequest(permissions=['permissions_value']), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.TestPermissionsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.test_iam_permissions(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/locations/global/firewallPolicies/{resource}/testIamPermissions" % client.transport._host, args[1]) + + +def test_test_iam_permissions_rest_flattened_error(transport: str = 'rest'): + client = FirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.test_iam_permissions( + compute.TestIamPermissionsFirewallPolicyRequest(), + resource='resource_value', + test_permissions_request_resource=compute.TestPermissionsRequest(permissions=['permissions_value']), + ) + + +def test_test_iam_permissions_rest_error(): + client = FirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +def test_credentials_transport_error(): + # It is an error to provide credentials and a transport instance. + transport = transports.FirewallPoliciesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = FirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # It is an error to provide a credentials file and a transport instance. + transport = transports.FirewallPoliciesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = FirewallPoliciesClient( + client_options={"credentials_file": "credentials.json"}, + transport=transport, + ) + + # It is an error to provide an api_key and a transport instance. + transport = transports.FirewallPoliciesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = FirewallPoliciesClient( + client_options=options, + transport=transport, + ) + + # It is an error to provide an api_key and a credential. + options = mock.Mock() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = FirewallPoliciesClient( + client_options=options, + credentials=ga_credentials.AnonymousCredentials() + ) + + # It is an error to provide scopes and a transport instance. + transport = transports.FirewallPoliciesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = FirewallPoliciesClient( + client_options={"scopes": ["1", "2"]}, + transport=transport, + ) + + +def test_transport_instance(): + # A client may be instantiated with a custom transport instance. + transport = transports.FirewallPoliciesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + client = FirewallPoliciesClient(transport=transport) + assert client.transport is transport + + +@pytest.mark.parametrize("transport_class", [ + transports.FirewallPoliciesRestTransport, +]) +def test_transport_adc(transport_class): + # Test default credentials are used if not provided. + with mock.patch.object(google.auth, 'default') as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class() + adc.assert_called_once() + +@pytest.mark.parametrize("transport_name", [ + "rest", +]) +def test_transport_kind(transport_name): + transport = FirewallPoliciesClient.get_transport_class(transport_name)( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert transport.kind == transport_name + + +def test_firewall_policies_base_transport_error(): + # Passing both a credentials object and credentials_file should raise an error + with pytest.raises(core_exceptions.DuplicateCredentialArgs): + transport = transports.FirewallPoliciesTransport( + credentials=ga_credentials.AnonymousCredentials(), + credentials_file="credentials.json" + ) + + +def test_firewall_policies_base_transport(): + # Instantiate the base transport. + with mock.patch('google.cloud.compute_v1.services.firewall_policies.transports.FirewallPoliciesTransport.__init__') as Transport: + Transport.return_value = None + transport = transports.FirewallPoliciesTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Every method on the transport should just blindly + # raise NotImplementedError. + methods = ( + 'add_association', + 'add_rule', + 'clone_rules', + 'delete', + 'get', + 'get_association', + 'get_iam_policy', + 'get_rule', + 'insert', + 'list', + 'list_associations', + 'move', + 'patch', + 'patch_rule', + 'remove_association', + 'remove_rule', + 'set_iam_policy', + 'test_iam_permissions', + ) + for method in methods: + with pytest.raises(NotImplementedError): + getattr(transport, method)(request=object()) + + with pytest.raises(NotImplementedError): + transport.close() + + # Catch all for all remaining methods and properties + remainder = [ + 'kind', + ] + for r in remainder: + with pytest.raises(NotImplementedError): + getattr(transport, r)() + + +def test_firewall_policies_base_transport_with_credentials_file(): + # Instantiate the base transport with a credentials file + with mock.patch.object(google.auth, 'load_credentials_from_file', autospec=True) as load_creds, mock.patch('google.cloud.compute_v1.services.firewall_policies.transports.FirewallPoliciesTransport._prep_wrapped_messages') as Transport: + Transport.return_value = None + load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.FirewallPoliciesTransport( + credentials_file="credentials.json", + quota_project_id="octopus", + ) + load_creds.assert_called_once_with("credentials.json", + scopes=None, + default_scopes=( + 'https://www.googleapis.com/auth/compute', + 'https://www.googleapis.com/auth/cloud-platform', +), + quota_project_id="octopus", + ) + + +def test_firewall_policies_base_transport_with_adc(): + # Test the default credentials are used if credentials and credentials_file are None. + with mock.patch.object(google.auth, 'default', autospec=True) as adc, mock.patch('google.cloud.compute_v1.services.firewall_policies.transports.FirewallPoliciesTransport._prep_wrapped_messages') as Transport: + Transport.return_value = None + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.FirewallPoliciesTransport() + adc.assert_called_once() + + +def test_firewall_policies_auth_adc(): + # If no credentials are provided, we should use ADC credentials. + with mock.patch.object(google.auth, 'default', autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + FirewallPoliciesClient() + adc.assert_called_once_with( + scopes=None, + default_scopes=( + 'https://www.googleapis.com/auth/compute', + 'https://www.googleapis.com/auth/cloud-platform', +), + quota_project_id=None, + ) + + +def test_firewall_policies_http_transport_client_cert_source_for_mtls(): + cred = ga_credentials.AnonymousCredentials() + with mock.patch("google.auth.transport.requests.AuthorizedSession.configure_mtls_channel") as mock_configure_mtls_channel: + transports.FirewallPoliciesRestTransport ( + credentials=cred, + client_cert_source_for_mtls=client_cert_source_callback + ) + mock_configure_mtls_channel.assert_called_once_with(client_cert_source_callback) + + +@pytest.mark.parametrize("transport_name", [ + "rest", +]) +def test_firewall_policies_host_no_port(transport_name): + client = FirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions(api_endpoint='compute.googleapis.com'), + transport=transport_name, + ) + assert client.transport._host == ( + 'compute.googleapis.com:443' + if transport_name in ['grpc', 'grpc_asyncio'] + else 'https://compute.googleapis.com' + ) + +@pytest.mark.parametrize("transport_name", [ + "rest", +]) +def test_firewall_policies_host_with_port(transport_name): + client = FirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions(api_endpoint='compute.googleapis.com:8000'), + transport=transport_name, + ) + assert client.transport._host == ( + 'compute.googleapis.com:8000' + if transport_name in ['grpc', 'grpc_asyncio'] + else 'https://compute.googleapis.com:8000' + ) + +@pytest.mark.parametrize("transport_name", [ + "rest", +]) +def test_firewall_policies_client_transport_session_collision(transport_name): + creds1 = ga_credentials.AnonymousCredentials() + creds2 = ga_credentials.AnonymousCredentials() + client1 = FirewallPoliciesClient( + credentials=creds1, + transport=transport_name, + ) + client2 = FirewallPoliciesClient( + credentials=creds2, + transport=transport_name, + ) + session1 = client1.transport.add_association._session + session2 = client2.transport.add_association._session + assert session1 != session2 + session1 = client1.transport.add_rule._session + session2 = client2.transport.add_rule._session + assert session1 != session2 + session1 = client1.transport.clone_rules._session + session2 = client2.transport.clone_rules._session + assert session1 != session2 + session1 = client1.transport.delete._session + session2 = client2.transport.delete._session + assert session1 != session2 + session1 = client1.transport.get._session + session2 = client2.transport.get._session + assert session1 != session2 + session1 = client1.transport.get_association._session + session2 = client2.transport.get_association._session + assert session1 != session2 + session1 = client1.transport.get_iam_policy._session + session2 = client2.transport.get_iam_policy._session + assert session1 != session2 + session1 = client1.transport.get_rule._session + session2 = client2.transport.get_rule._session + assert session1 != session2 + session1 = client1.transport.insert._session + session2 = client2.transport.insert._session + assert session1 != session2 + session1 = client1.transport.list._session + session2 = client2.transport.list._session + assert session1 != session2 + session1 = client1.transport.list_associations._session + session2 = client2.transport.list_associations._session + assert session1 != session2 + session1 = client1.transport.move._session + session2 = client2.transport.move._session + assert session1 != session2 + session1 = client1.transport.patch._session + session2 = client2.transport.patch._session + assert session1 != session2 + session1 = client1.transport.patch_rule._session + session2 = client2.transport.patch_rule._session + assert session1 != session2 + session1 = client1.transport.remove_association._session + session2 = client2.transport.remove_association._session + assert session1 != session2 + session1 = client1.transport.remove_rule._session + session2 = client2.transport.remove_rule._session + assert session1 != session2 + session1 = client1.transport.set_iam_policy._session + session2 = client2.transport.set_iam_policy._session + assert session1 != session2 + session1 = client1.transport.test_iam_permissions._session + session2 = client2.transport.test_iam_permissions._session + assert session1 != session2 + +def test_common_billing_account_path(): + billing_account = "squid" + expected = "billingAccounts/{billing_account}".format(billing_account=billing_account, ) + actual = FirewallPoliciesClient.common_billing_account_path(billing_account) + assert expected == actual + + +def test_parse_common_billing_account_path(): + expected = { + "billing_account": "clam", + } + path = FirewallPoliciesClient.common_billing_account_path(**expected) + + # Check that the path construction is reversible. + actual = FirewallPoliciesClient.parse_common_billing_account_path(path) + assert expected == actual + +def test_common_folder_path(): + folder = "whelk" + expected = "folders/{folder}".format(folder=folder, ) + actual = FirewallPoliciesClient.common_folder_path(folder) + assert expected == actual + + +def test_parse_common_folder_path(): + expected = { + "folder": "octopus", + } + path = FirewallPoliciesClient.common_folder_path(**expected) + + # Check that the path construction is reversible. + actual = FirewallPoliciesClient.parse_common_folder_path(path) + assert expected == actual + +def test_common_organization_path(): + organization = "oyster" + expected = "organizations/{organization}".format(organization=organization, ) + actual = FirewallPoliciesClient.common_organization_path(organization) + assert expected == actual + + +def test_parse_common_organization_path(): + expected = { + "organization": "nudibranch", + } + path = FirewallPoliciesClient.common_organization_path(**expected) + + # Check that the path construction is reversible. + actual = FirewallPoliciesClient.parse_common_organization_path(path) + assert expected == actual + +def test_common_project_path(): + project = "cuttlefish" + expected = "projects/{project}".format(project=project, ) + actual = FirewallPoliciesClient.common_project_path(project) + assert expected == actual + + +def test_parse_common_project_path(): + expected = { + "project": "mussel", + } + path = FirewallPoliciesClient.common_project_path(**expected) + + # Check that the path construction is reversible. + actual = FirewallPoliciesClient.parse_common_project_path(path) + assert expected == actual + +def test_common_location_path(): + project = "winkle" + location = "nautilus" + expected = "projects/{project}/locations/{location}".format(project=project, location=location, ) + actual = FirewallPoliciesClient.common_location_path(project, location) + assert expected == actual + + +def test_parse_common_location_path(): + expected = { + "project": "scallop", + "location": "abalone", + } + path = FirewallPoliciesClient.common_location_path(**expected) + + # Check that the path construction is reversible. + actual = FirewallPoliciesClient.parse_common_location_path(path) + assert expected == actual + + +def test_client_with_default_client_info(): + client_info = gapic_v1.client_info.ClientInfo() + + with mock.patch.object(transports.FirewallPoliciesTransport, '_prep_wrapped_messages') as prep: + client = FirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + with mock.patch.object(transports.FirewallPoliciesTransport, '_prep_wrapped_messages') as prep: + transport_class = FirewallPoliciesClient.get_transport_class() + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + +def test_transport_close(): + transports = { + "rest": "_session", + } + + for transport, close_name in transports.items(): + client = FirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport + ) + with mock.patch.object(type(getattr(client.transport, close_name)), "close") as close: + with client: + close.assert_not_called() + close.assert_called_once() + +def test_client_ctx(): + transports = [ + 'rest', + ] + for transport in transports: + client = FirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport + ) + # Test client calls underlying transport. + with mock.patch.object(type(client.transport), "close") as close: + close.assert_not_called() + with client: + pass + close.assert_called() + +@pytest.mark.parametrize("client_class,transport_class", [ + (FirewallPoliciesClient, transports.FirewallPoliciesRestTransport), +]) +def test_api_key_credentials(client_class, transport_class): + with mock.patch.object( + google.auth._default, "get_api_key_credentials", create=True + ) as get_api_key_credentials: + mock_cred = mock.Mock() + get_api_key_credentials.return_value = mock_cred + options = client_options.ClientOptions() + options.api_key = "api_key" + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=mock_cred, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) diff --git a/owl-bot-staging/v1/tests/unit/gapic/compute_v1/test_firewalls.py b/owl-bot-staging/v1/tests/unit/gapic/compute_v1/test_firewalls.py new file mode 100644 index 000000000..de5895366 --- /dev/null +++ b/owl-bot-staging/v1/tests/unit/gapic/compute_v1/test_firewalls.py @@ -0,0 +1,3601 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import os +# try/except added for compatibility with python < 3.8 +try: + from unittest import mock + from unittest.mock import AsyncMock # pragma: NO COVER +except ImportError: # pragma: NO COVER + import mock + +import grpc +from grpc.experimental import aio +from collections.abc import Iterable +from google.protobuf import json_format +import json +import math +import pytest +from proto.marshal.rules.dates import DurationRule, TimestampRule +from proto.marshal.rules import wrappers +from requests import Response +from requests import Request, PreparedRequest +from requests.sessions import Session +from google.protobuf import json_format + +from google.api_core import client_options +from google.api_core import exceptions as core_exceptions +from google.api_core import extended_operation # type: ignore +from google.api_core import future +from google.api_core import gapic_v1 +from google.api_core import grpc_helpers +from google.api_core import grpc_helpers_async +from google.api_core import path_template +from google.auth import credentials as ga_credentials +from google.auth.exceptions import MutualTLSChannelError +from google.cloud.compute_v1.services.firewalls import FirewallsClient +from google.cloud.compute_v1.services.firewalls import pagers +from google.cloud.compute_v1.services.firewalls import transports +from google.cloud.compute_v1.types import compute +from google.oauth2 import service_account +import google.auth + + +def client_cert_source_callback(): + return b"cert bytes", b"key bytes" + + +# If default endpoint is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint(client): + return "foo.googleapis.com" if ("localhost" in client.DEFAULT_ENDPOINT) else client.DEFAULT_ENDPOINT + + +def test__get_default_mtls_endpoint(): + api_endpoint = "example.googleapis.com" + api_mtls_endpoint = "example.mtls.googleapis.com" + sandbox_endpoint = "example.sandbox.googleapis.com" + sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com" + non_googleapi = "api.example.com" + + assert FirewallsClient._get_default_mtls_endpoint(None) is None + assert FirewallsClient._get_default_mtls_endpoint(api_endpoint) == api_mtls_endpoint + assert FirewallsClient._get_default_mtls_endpoint(api_mtls_endpoint) == api_mtls_endpoint + assert FirewallsClient._get_default_mtls_endpoint(sandbox_endpoint) == sandbox_mtls_endpoint + assert FirewallsClient._get_default_mtls_endpoint(sandbox_mtls_endpoint) == sandbox_mtls_endpoint + assert FirewallsClient._get_default_mtls_endpoint(non_googleapi) == non_googleapi + + +@pytest.mark.parametrize("client_class,transport_name", [ + (FirewallsClient, "rest"), +]) +def test_firewalls_client_from_service_account_info(client_class, transport_name): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object(service_account.Credentials, 'from_service_account_info') as factory: + factory.return_value = creds + info = {"valid": True} + client = client_class.from_service_account_info(info, transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + 'compute.googleapis.com:443' + if transport_name in ['grpc', 'grpc_asyncio'] + else + 'https://compute.googleapis.com' + ) + + +@pytest.mark.parametrize("transport_class,transport_name", [ + (transports.FirewallsRestTransport, "rest"), +]) +def test_firewalls_client_service_account_always_use_jwt(transport_class, transport_name): + with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=True) + use_jwt.assert_called_once_with(True) + + with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=False) + use_jwt.assert_not_called() + + +@pytest.mark.parametrize("client_class,transport_name", [ + (FirewallsClient, "rest"), +]) +def test_firewalls_client_from_service_account_file(client_class, transport_name): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object(service_account.Credentials, 'from_service_account_file') as factory: + factory.return_value = creds + client = client_class.from_service_account_file("dummy/file/path.json", transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + client = client_class.from_service_account_json("dummy/file/path.json", transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + 'compute.googleapis.com:443' + if transport_name in ['grpc', 'grpc_asyncio'] + else + 'https://compute.googleapis.com' + ) + + +def test_firewalls_client_get_transport_class(): + transport = FirewallsClient.get_transport_class() + available_transports = [ + transports.FirewallsRestTransport, + ] + assert transport in available_transports + + transport = FirewallsClient.get_transport_class("rest") + assert transport == transports.FirewallsRestTransport + + +@pytest.mark.parametrize("client_class,transport_class,transport_name", [ + (FirewallsClient, transports.FirewallsRestTransport, "rest"), +]) +@mock.patch.object(FirewallsClient, "DEFAULT_ENDPOINT", modify_default_endpoint(FirewallsClient)) +def test_firewalls_client_client_options(client_class, transport_class, transport_name): + # Check that if channel is provided we won't create a new one. + with mock.patch.object(FirewallsClient, 'get_transport_class') as gtc: + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ) + client = client_class(transport=transport) + gtc.assert_not_called() + + # Check that if channel is provided via str we will create a new one. + with mock.patch.object(FirewallsClient, 'get_transport_class') as gtc: + client = client_class(transport=transport_name) + gtc.assert_called() + + # Check the case api_endpoint is provided. + options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(transport=transport_name, client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_MTLS_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError): + client = client_class(transport=transport_name) + + # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): + with pytest.raises(ValueError): + client = client_class(transport=transport_name) + + # Check the case quota_project_id is provided + options = client_options.ClientOptions(quota_project_id="octopus") + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id="octopus", + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + # Check the case api_endpoint is provided + options = client_options.ClientOptions(api_audience="https://language.googleapis.com") + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience="https://language.googleapis.com" + ) + +@pytest.mark.parametrize("client_class,transport_class,transport_name,use_client_cert_env", [ + (FirewallsClient, transports.FirewallsRestTransport, "rest", "true"), + (FirewallsClient, transports.FirewallsRestTransport, "rest", "false"), +]) +@mock.patch.object(FirewallsClient, "DEFAULT_ENDPOINT", modify_default_endpoint(FirewallsClient)) +@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) +def test_firewalls_client_mtls_env_auto(client_class, transport_class, transport_name, use_client_cert_env): + # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default + # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists. + + # Check the case client_cert_source is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): + options = client_options.ClientOptions(client_cert_source=client_cert_source_callback) + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + + if use_client_cert_env == "false": + expected_client_cert_source = None + expected_host = client.DEFAULT_ENDPOINT + else: + expected_client_cert_source = client_cert_source_callback + expected_host = client.DEFAULT_MTLS_ENDPOINT + + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case ADC client cert is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): + with mock.patch.object(transport_class, '__init__') as patched: + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): + with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=client_cert_source_callback): + if use_client_cert_env == "false": + expected_host = client.DEFAULT_ENDPOINT + expected_client_cert_source = None + else: + expected_host = client.DEFAULT_MTLS_ENDPOINT + expected_client_cert_source = client_cert_source_callback + + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case client_cert_source and ADC client cert are not provided. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): + with mock.patch.object(transport_class, '__init__') as patched: + with mock.patch("google.auth.transport.mtls.has_default_client_cert_source", return_value=False): + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize("client_class", [ + FirewallsClient +]) +@mock.patch.object(FirewallsClient, "DEFAULT_ENDPOINT", modify_default_endpoint(FirewallsClient)) +def test_firewalls_client_get_mtls_endpoint_and_cert_source(client_class): + mock_client_cert_source = mock.Mock() + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "true". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + mock_api_endpoint = "foo" + options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(options) + assert api_endpoint == mock_api_endpoint + assert cert_source == mock_client_cert_source + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "false". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + mock_client_cert_source = mock.Mock() + mock_api_endpoint = "foo" + options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(options) + assert api_endpoint == mock_api_endpoint + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert doesn't exist. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=False): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert exists. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): + with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=mock_client_cert_source): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source == mock_client_cert_source + + +@pytest.mark.parametrize("client_class,transport_class,transport_name", [ + (FirewallsClient, transports.FirewallsRestTransport, "rest"), +]) +def test_firewalls_client_client_options_scopes(client_class, transport_class, transport_name): + # Check the case scopes are provided. + options = client_options.ClientOptions( + scopes=["1", "2"], + ) + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=["1", "2"], + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + +@pytest.mark.parametrize("client_class,transport_class,transport_name,grpc_helpers", [ + (FirewallsClient, transports.FirewallsRestTransport, "rest", None), +]) +def test_firewalls_client_client_options_credentials_file(client_class, transport_class, transport_name, grpc_helpers): + # Check the case credentials file is provided. + options = client_options.ClientOptions( + credentials_file="credentials.json" + ) + + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize("request_type", [ + compute.DeleteFirewallRequest, + dict, +]) +def test_delete_rest(request_type): + client = FirewallsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'firewall': 'sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.delete(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, extended_operation.ExtendedOperation) + assert response.client_operation_id == 'client_operation_id_value' + assert response.creation_timestamp == 'creation_timestamp_value' + assert response.description == 'description_value' + assert response.end_time == 'end_time_value' + assert response.http_error_message == 'http_error_message_value' + assert response.http_error_status_code == 2374 + assert response.id == 205 + assert response.insert_time == 'insert_time_value' + assert response.kind == 'kind_value' + assert response.name == 'name_value' + assert response.operation_group_id == 'operation_group_id_value' + assert response.operation_type == 'operation_type_value' + assert response.progress == 885 + assert response.region == 'region_value' + assert response.self_link == 'self_link_value' + assert response.start_time == 'start_time_value' + assert response.status == compute.Operation.Status.DONE + assert response.status_message == 'status_message_value' + assert response.target_id == 947 + assert response.target_link == 'target_link_value' + assert response.user == 'user_value' + assert response.zone == 'zone_value' + + +def test_delete_rest_required_fields(request_type=compute.DeleteFirewallRequest): + transport_class = transports.FirewallsRestTransport + + request_init = {} + request_init["firewall"] = "" + request_init["project"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["firewall"] = 'firewall_value' + jsonified_request["project"] = 'project_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "firewall" in jsonified_request + assert jsonified_request["firewall"] == 'firewall_value' + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + + client = FirewallsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "delete", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.delete(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_delete_rest_unset_required_fields(): + transport = transports.FirewallsRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.delete._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("firewall", "project", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_rest_interceptors(null_interceptor): + transport = transports.FirewallsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.FirewallsRestInterceptor(), + ) + client = FirewallsClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.FirewallsRestInterceptor, "post_delete") as post, \ + mock.patch.object(transports.FirewallsRestInterceptor, "pre_delete") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.DeleteFirewallRequest.pb(compute.DeleteFirewallRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.DeleteFirewallRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.delete(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_delete_rest_bad_request(transport: str = 'rest', request_type=compute.DeleteFirewallRequest): + client = FirewallsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'firewall': 'sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.delete(request) + + +def test_delete_rest_flattened(): + client = FirewallsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'firewall': 'sample2'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + firewall='firewall_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.delete(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/global/firewalls/{firewall}" % client.transport._host, args[1]) + + +def test_delete_rest_flattened_error(transport: str = 'rest'): + client = FirewallsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete( + compute.DeleteFirewallRequest(), + project='project_value', + firewall='firewall_value', + ) + + +def test_delete_rest_error(): + client = FirewallsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.DeleteFirewallRequest, + dict, +]) +def test_delete_unary_rest(request_type): + client = FirewallsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'firewall': 'sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.delete_unary(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.Operation) + + +def test_delete_unary_rest_required_fields(request_type=compute.DeleteFirewallRequest): + transport_class = transports.FirewallsRestTransport + + request_init = {} + request_init["firewall"] = "" + request_init["project"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["firewall"] = 'firewall_value' + jsonified_request["project"] = 'project_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "firewall" in jsonified_request + assert jsonified_request["firewall"] == 'firewall_value' + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + + client = FirewallsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "delete", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.delete_unary(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_delete_unary_rest_unset_required_fields(): + transport = transports.FirewallsRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.delete._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("firewall", "project", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_unary_rest_interceptors(null_interceptor): + transport = transports.FirewallsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.FirewallsRestInterceptor(), + ) + client = FirewallsClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.FirewallsRestInterceptor, "post_delete") as post, \ + mock.patch.object(transports.FirewallsRestInterceptor, "pre_delete") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.DeleteFirewallRequest.pb(compute.DeleteFirewallRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.DeleteFirewallRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.delete_unary(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_delete_unary_rest_bad_request(transport: str = 'rest', request_type=compute.DeleteFirewallRequest): + client = FirewallsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'firewall': 'sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.delete_unary(request) + + +def test_delete_unary_rest_flattened(): + client = FirewallsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'firewall': 'sample2'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + firewall='firewall_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.delete_unary(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/global/firewalls/{firewall}" % client.transport._host, args[1]) + + +def test_delete_unary_rest_flattened_error(transport: str = 'rest'): + client = FirewallsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_unary( + compute.DeleteFirewallRequest(), + project='project_value', + firewall='firewall_value', + ) + + +def test_delete_unary_rest_error(): + client = FirewallsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.GetFirewallRequest, + dict, +]) +def test_get_rest(request_type): + client = FirewallsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'firewall': 'sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Firewall( + creation_timestamp='creation_timestamp_value', + description='description_value', + destination_ranges=['destination_ranges_value'], + direction='direction_value', + disabled=True, + id=205, + kind='kind_value', + name='name_value', + network='network_value', + priority=898, + self_link='self_link_value', + source_ranges=['source_ranges_value'], + source_service_accounts=['source_service_accounts_value'], + source_tags=['source_tags_value'], + target_service_accounts=['target_service_accounts_value'], + target_tags=['target_tags_value'], + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Firewall.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.get(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.Firewall) + assert response.creation_timestamp == 'creation_timestamp_value' + assert response.description == 'description_value' + assert response.destination_ranges == ['destination_ranges_value'] + assert response.direction == 'direction_value' + assert response.disabled is True + assert response.id == 205 + assert response.kind == 'kind_value' + assert response.name == 'name_value' + assert response.network == 'network_value' + assert response.priority == 898 + assert response.self_link == 'self_link_value' + assert response.source_ranges == ['source_ranges_value'] + assert response.source_service_accounts == ['source_service_accounts_value'] + assert response.source_tags == ['source_tags_value'] + assert response.target_service_accounts == ['target_service_accounts_value'] + assert response.target_tags == ['target_tags_value'] + + +def test_get_rest_required_fields(request_type=compute.GetFirewallRequest): + transport_class = transports.FirewallsRestTransport + + request_init = {} + request_init["firewall"] = "" + request_init["project"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["firewall"] = 'firewall_value' + jsonified_request["project"] = 'project_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "firewall" in jsonified_request + assert jsonified_request["firewall"] == 'firewall_value' + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + + client = FirewallsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Firewall() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "get", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Firewall.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.get(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_get_rest_unset_required_fields(): + transport = transports.FirewallsRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.get._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("firewall", "project", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_rest_interceptors(null_interceptor): + transport = transports.FirewallsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.FirewallsRestInterceptor(), + ) + client = FirewallsClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.FirewallsRestInterceptor, "post_get") as post, \ + mock.patch.object(transports.FirewallsRestInterceptor, "pre_get") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.GetFirewallRequest.pb(compute.GetFirewallRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Firewall.to_json(compute.Firewall()) + + request = compute.GetFirewallRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Firewall() + + client.get(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_get_rest_bad_request(transport: str = 'rest', request_type=compute.GetFirewallRequest): + client = FirewallsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'firewall': 'sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get(request) + + +def test_get_rest_flattened(): + client = FirewallsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Firewall() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'firewall': 'sample2'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + firewall='firewall_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Firewall.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.get(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/global/firewalls/{firewall}" % client.transport._host, args[1]) + + +def test_get_rest_flattened_error(transport: str = 'rest'): + client = FirewallsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get( + compute.GetFirewallRequest(), + project='project_value', + firewall='firewall_value', + ) + + +def test_get_rest_error(): + client = FirewallsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.InsertFirewallRequest, + dict, +]) +def test_insert_rest(request_type): + client = FirewallsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1'} + request_init["firewall_resource"] = {'allowed': [{'I_p_protocol': 'I_p_protocol_value', 'ports': ['ports_value1', 'ports_value2']}], 'creation_timestamp': 'creation_timestamp_value', 'denied': [{'I_p_protocol': 'I_p_protocol_value', 'ports': ['ports_value1', 'ports_value2']}], 'description': 'description_value', 'destination_ranges': ['destination_ranges_value1', 'destination_ranges_value2'], 'direction': 'direction_value', 'disabled': True, 'id': 205, 'kind': 'kind_value', 'log_config': {'enable': True, 'metadata': 'metadata_value'}, 'name': 'name_value', 'network': 'network_value', 'priority': 898, 'self_link': 'self_link_value', 'source_ranges': ['source_ranges_value1', 'source_ranges_value2'], 'source_service_accounts': ['source_service_accounts_value1', 'source_service_accounts_value2'], 'source_tags': ['source_tags_value1', 'source_tags_value2'], 'target_service_accounts': ['target_service_accounts_value1', 'target_service_accounts_value2'], 'target_tags': ['target_tags_value1', 'target_tags_value2']} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.insert(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, extended_operation.ExtendedOperation) + assert response.client_operation_id == 'client_operation_id_value' + assert response.creation_timestamp == 'creation_timestamp_value' + assert response.description == 'description_value' + assert response.end_time == 'end_time_value' + assert response.http_error_message == 'http_error_message_value' + assert response.http_error_status_code == 2374 + assert response.id == 205 + assert response.insert_time == 'insert_time_value' + assert response.kind == 'kind_value' + assert response.name == 'name_value' + assert response.operation_group_id == 'operation_group_id_value' + assert response.operation_type == 'operation_type_value' + assert response.progress == 885 + assert response.region == 'region_value' + assert response.self_link == 'self_link_value' + assert response.start_time == 'start_time_value' + assert response.status == compute.Operation.Status.DONE + assert response.status_message == 'status_message_value' + assert response.target_id == 947 + assert response.target_link == 'target_link_value' + assert response.user == 'user_value' + assert response.zone == 'zone_value' + + +def test_insert_rest_required_fields(request_type=compute.InsertFirewallRequest): + transport_class = transports.FirewallsRestTransport + + request_init = {} + request_init["project"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).insert._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).insert._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + + client = FirewallsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.insert(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_insert_rest_unset_required_fields(): + transport = transports.FirewallsRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.insert._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("firewallResource", "project", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_insert_rest_interceptors(null_interceptor): + transport = transports.FirewallsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.FirewallsRestInterceptor(), + ) + client = FirewallsClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.FirewallsRestInterceptor, "post_insert") as post, \ + mock.patch.object(transports.FirewallsRestInterceptor, "pre_insert") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.InsertFirewallRequest.pb(compute.InsertFirewallRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.InsertFirewallRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.insert(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_insert_rest_bad_request(transport: str = 'rest', request_type=compute.InsertFirewallRequest): + client = FirewallsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1'} + request_init["firewall_resource"] = {'allowed': [{'I_p_protocol': 'I_p_protocol_value', 'ports': ['ports_value1', 'ports_value2']}], 'creation_timestamp': 'creation_timestamp_value', 'denied': [{'I_p_protocol': 'I_p_protocol_value', 'ports': ['ports_value1', 'ports_value2']}], 'description': 'description_value', 'destination_ranges': ['destination_ranges_value1', 'destination_ranges_value2'], 'direction': 'direction_value', 'disabled': True, 'id': 205, 'kind': 'kind_value', 'log_config': {'enable': True, 'metadata': 'metadata_value'}, 'name': 'name_value', 'network': 'network_value', 'priority': 898, 'self_link': 'self_link_value', 'source_ranges': ['source_ranges_value1', 'source_ranges_value2'], 'source_service_accounts': ['source_service_accounts_value1', 'source_service_accounts_value2'], 'source_tags': ['source_tags_value1', 'source_tags_value2'], 'target_service_accounts': ['target_service_accounts_value1', 'target_service_accounts_value2'], 'target_tags': ['target_tags_value1', 'target_tags_value2']} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.insert(request) + + +def test_insert_rest_flattened(): + client = FirewallsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + firewall_resource=compute.Firewall(allowed=[compute.Allowed(I_p_protocol='I_p_protocol_value')]), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.insert(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/global/firewalls" % client.transport._host, args[1]) + + +def test_insert_rest_flattened_error(transport: str = 'rest'): + client = FirewallsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.insert( + compute.InsertFirewallRequest(), + project='project_value', + firewall_resource=compute.Firewall(allowed=[compute.Allowed(I_p_protocol='I_p_protocol_value')]), + ) + + +def test_insert_rest_error(): + client = FirewallsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.InsertFirewallRequest, + dict, +]) +def test_insert_unary_rest(request_type): + client = FirewallsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1'} + request_init["firewall_resource"] = {'allowed': [{'I_p_protocol': 'I_p_protocol_value', 'ports': ['ports_value1', 'ports_value2']}], 'creation_timestamp': 'creation_timestamp_value', 'denied': [{'I_p_protocol': 'I_p_protocol_value', 'ports': ['ports_value1', 'ports_value2']}], 'description': 'description_value', 'destination_ranges': ['destination_ranges_value1', 'destination_ranges_value2'], 'direction': 'direction_value', 'disabled': True, 'id': 205, 'kind': 'kind_value', 'log_config': {'enable': True, 'metadata': 'metadata_value'}, 'name': 'name_value', 'network': 'network_value', 'priority': 898, 'self_link': 'self_link_value', 'source_ranges': ['source_ranges_value1', 'source_ranges_value2'], 'source_service_accounts': ['source_service_accounts_value1', 'source_service_accounts_value2'], 'source_tags': ['source_tags_value1', 'source_tags_value2'], 'target_service_accounts': ['target_service_accounts_value1', 'target_service_accounts_value2'], 'target_tags': ['target_tags_value1', 'target_tags_value2']} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.insert_unary(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.Operation) + + +def test_insert_unary_rest_required_fields(request_type=compute.InsertFirewallRequest): + transport_class = transports.FirewallsRestTransport + + request_init = {} + request_init["project"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).insert._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).insert._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + + client = FirewallsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.insert_unary(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_insert_unary_rest_unset_required_fields(): + transport = transports.FirewallsRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.insert._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("firewallResource", "project", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_insert_unary_rest_interceptors(null_interceptor): + transport = transports.FirewallsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.FirewallsRestInterceptor(), + ) + client = FirewallsClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.FirewallsRestInterceptor, "post_insert") as post, \ + mock.patch.object(transports.FirewallsRestInterceptor, "pre_insert") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.InsertFirewallRequest.pb(compute.InsertFirewallRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.InsertFirewallRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.insert_unary(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_insert_unary_rest_bad_request(transport: str = 'rest', request_type=compute.InsertFirewallRequest): + client = FirewallsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1'} + request_init["firewall_resource"] = {'allowed': [{'I_p_protocol': 'I_p_protocol_value', 'ports': ['ports_value1', 'ports_value2']}], 'creation_timestamp': 'creation_timestamp_value', 'denied': [{'I_p_protocol': 'I_p_protocol_value', 'ports': ['ports_value1', 'ports_value2']}], 'description': 'description_value', 'destination_ranges': ['destination_ranges_value1', 'destination_ranges_value2'], 'direction': 'direction_value', 'disabled': True, 'id': 205, 'kind': 'kind_value', 'log_config': {'enable': True, 'metadata': 'metadata_value'}, 'name': 'name_value', 'network': 'network_value', 'priority': 898, 'self_link': 'self_link_value', 'source_ranges': ['source_ranges_value1', 'source_ranges_value2'], 'source_service_accounts': ['source_service_accounts_value1', 'source_service_accounts_value2'], 'source_tags': ['source_tags_value1', 'source_tags_value2'], 'target_service_accounts': ['target_service_accounts_value1', 'target_service_accounts_value2'], 'target_tags': ['target_tags_value1', 'target_tags_value2']} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.insert_unary(request) + + +def test_insert_unary_rest_flattened(): + client = FirewallsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + firewall_resource=compute.Firewall(allowed=[compute.Allowed(I_p_protocol='I_p_protocol_value')]), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.insert_unary(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/global/firewalls" % client.transport._host, args[1]) + + +def test_insert_unary_rest_flattened_error(transport: str = 'rest'): + client = FirewallsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.insert_unary( + compute.InsertFirewallRequest(), + project='project_value', + firewall_resource=compute.Firewall(allowed=[compute.Allowed(I_p_protocol='I_p_protocol_value')]), + ) + + +def test_insert_unary_rest_error(): + client = FirewallsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.ListFirewallsRequest, + dict, +]) +def test_list_rest(request_type): + client = FirewallsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.FirewallList( + id='id_value', + kind='kind_value', + next_page_token='next_page_token_value', + self_link='self_link_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.FirewallList.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.list(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListPager) + assert response.id == 'id_value' + assert response.kind == 'kind_value' + assert response.next_page_token == 'next_page_token_value' + assert response.self_link == 'self_link_value' + + +def test_list_rest_required_fields(request_type=compute.ListFirewallsRequest): + transport_class = transports.FirewallsRestTransport + + request_init = {} + request_init["project"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("filter", "max_results", "order_by", "page_token", "return_partial_success", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + + client = FirewallsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.FirewallList() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "get", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.FirewallList.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.list(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_list_rest_unset_required_fields(): + transport = transports.FirewallsRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.list._get_unset_required_fields({}) + assert set(unset_fields) == (set(("filter", "maxResults", "orderBy", "pageToken", "returnPartialSuccess", )) & set(("project", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_rest_interceptors(null_interceptor): + transport = transports.FirewallsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.FirewallsRestInterceptor(), + ) + client = FirewallsClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.FirewallsRestInterceptor, "post_list") as post, \ + mock.patch.object(transports.FirewallsRestInterceptor, "pre_list") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.ListFirewallsRequest.pb(compute.ListFirewallsRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.FirewallList.to_json(compute.FirewallList()) + + request = compute.ListFirewallsRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.FirewallList() + + client.list(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_list_rest_bad_request(transport: str = 'rest', request_type=compute.ListFirewallsRequest): + client = FirewallsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list(request) + + +def test_list_rest_flattened(): + client = FirewallsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.FirewallList() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.FirewallList.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.list(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/global/firewalls" % client.transport._host, args[1]) + + +def test_list_rest_flattened_error(transport: str = 'rest'): + client = FirewallsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list( + compute.ListFirewallsRequest(), + project='project_value', + ) + + +def test_list_rest_pager(transport: str = 'rest'): + client = FirewallsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + #with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + compute.FirewallList( + items=[ + compute.Firewall(), + compute.Firewall(), + compute.Firewall(), + ], + next_page_token='abc', + ), + compute.FirewallList( + items=[], + next_page_token='def', + ), + compute.FirewallList( + items=[ + compute.Firewall(), + ], + next_page_token='ghi', + ), + compute.FirewallList( + items=[ + compute.Firewall(), + compute.Firewall(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple(compute.FirewallList.to_json(x) for x in response) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode('UTF-8') + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {'project': 'sample1'} + + pager = client.list(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, compute.Firewall) + for i in results) + + pages = list(client.list(request=sample_request).pages) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize("request_type", [ + compute.PatchFirewallRequest, + dict, +]) +def test_patch_rest(request_type): + client = FirewallsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'firewall': 'sample2'} + request_init["firewall_resource"] = {'allowed': [{'I_p_protocol': 'I_p_protocol_value', 'ports': ['ports_value1', 'ports_value2']}], 'creation_timestamp': 'creation_timestamp_value', 'denied': [{'I_p_protocol': 'I_p_protocol_value', 'ports': ['ports_value1', 'ports_value2']}], 'description': 'description_value', 'destination_ranges': ['destination_ranges_value1', 'destination_ranges_value2'], 'direction': 'direction_value', 'disabled': True, 'id': 205, 'kind': 'kind_value', 'log_config': {'enable': True, 'metadata': 'metadata_value'}, 'name': 'name_value', 'network': 'network_value', 'priority': 898, 'self_link': 'self_link_value', 'source_ranges': ['source_ranges_value1', 'source_ranges_value2'], 'source_service_accounts': ['source_service_accounts_value1', 'source_service_accounts_value2'], 'source_tags': ['source_tags_value1', 'source_tags_value2'], 'target_service_accounts': ['target_service_accounts_value1', 'target_service_accounts_value2'], 'target_tags': ['target_tags_value1', 'target_tags_value2']} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.patch(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, extended_operation.ExtendedOperation) + assert response.client_operation_id == 'client_operation_id_value' + assert response.creation_timestamp == 'creation_timestamp_value' + assert response.description == 'description_value' + assert response.end_time == 'end_time_value' + assert response.http_error_message == 'http_error_message_value' + assert response.http_error_status_code == 2374 + assert response.id == 205 + assert response.insert_time == 'insert_time_value' + assert response.kind == 'kind_value' + assert response.name == 'name_value' + assert response.operation_group_id == 'operation_group_id_value' + assert response.operation_type == 'operation_type_value' + assert response.progress == 885 + assert response.region == 'region_value' + assert response.self_link == 'self_link_value' + assert response.start_time == 'start_time_value' + assert response.status == compute.Operation.Status.DONE + assert response.status_message == 'status_message_value' + assert response.target_id == 947 + assert response.target_link == 'target_link_value' + assert response.user == 'user_value' + assert response.zone == 'zone_value' + + +def test_patch_rest_required_fields(request_type=compute.PatchFirewallRequest): + transport_class = transports.FirewallsRestTransport + + request_init = {} + request_init["firewall"] = "" + request_init["project"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).patch._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["firewall"] = 'firewall_value' + jsonified_request["project"] = 'project_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).patch._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "firewall" in jsonified_request + assert jsonified_request["firewall"] == 'firewall_value' + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + + client = FirewallsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "patch", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.patch(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_patch_rest_unset_required_fields(): + transport = transports.FirewallsRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.patch._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("firewall", "firewallResource", "project", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_patch_rest_interceptors(null_interceptor): + transport = transports.FirewallsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.FirewallsRestInterceptor(), + ) + client = FirewallsClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.FirewallsRestInterceptor, "post_patch") as post, \ + mock.patch.object(transports.FirewallsRestInterceptor, "pre_patch") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.PatchFirewallRequest.pb(compute.PatchFirewallRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.PatchFirewallRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.patch(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_patch_rest_bad_request(transport: str = 'rest', request_type=compute.PatchFirewallRequest): + client = FirewallsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'firewall': 'sample2'} + request_init["firewall_resource"] = {'allowed': [{'I_p_protocol': 'I_p_protocol_value', 'ports': ['ports_value1', 'ports_value2']}], 'creation_timestamp': 'creation_timestamp_value', 'denied': [{'I_p_protocol': 'I_p_protocol_value', 'ports': ['ports_value1', 'ports_value2']}], 'description': 'description_value', 'destination_ranges': ['destination_ranges_value1', 'destination_ranges_value2'], 'direction': 'direction_value', 'disabled': True, 'id': 205, 'kind': 'kind_value', 'log_config': {'enable': True, 'metadata': 'metadata_value'}, 'name': 'name_value', 'network': 'network_value', 'priority': 898, 'self_link': 'self_link_value', 'source_ranges': ['source_ranges_value1', 'source_ranges_value2'], 'source_service_accounts': ['source_service_accounts_value1', 'source_service_accounts_value2'], 'source_tags': ['source_tags_value1', 'source_tags_value2'], 'target_service_accounts': ['target_service_accounts_value1', 'target_service_accounts_value2'], 'target_tags': ['target_tags_value1', 'target_tags_value2']} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.patch(request) + + +def test_patch_rest_flattened(): + client = FirewallsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'firewall': 'sample2'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + firewall='firewall_value', + firewall_resource=compute.Firewall(allowed=[compute.Allowed(I_p_protocol='I_p_protocol_value')]), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.patch(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/global/firewalls/{firewall}" % client.transport._host, args[1]) + + +def test_patch_rest_flattened_error(transport: str = 'rest'): + client = FirewallsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.patch( + compute.PatchFirewallRequest(), + project='project_value', + firewall='firewall_value', + firewall_resource=compute.Firewall(allowed=[compute.Allowed(I_p_protocol='I_p_protocol_value')]), + ) + + +def test_patch_rest_error(): + client = FirewallsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.PatchFirewallRequest, + dict, +]) +def test_patch_unary_rest(request_type): + client = FirewallsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'firewall': 'sample2'} + request_init["firewall_resource"] = {'allowed': [{'I_p_protocol': 'I_p_protocol_value', 'ports': ['ports_value1', 'ports_value2']}], 'creation_timestamp': 'creation_timestamp_value', 'denied': [{'I_p_protocol': 'I_p_protocol_value', 'ports': ['ports_value1', 'ports_value2']}], 'description': 'description_value', 'destination_ranges': ['destination_ranges_value1', 'destination_ranges_value2'], 'direction': 'direction_value', 'disabled': True, 'id': 205, 'kind': 'kind_value', 'log_config': {'enable': True, 'metadata': 'metadata_value'}, 'name': 'name_value', 'network': 'network_value', 'priority': 898, 'self_link': 'self_link_value', 'source_ranges': ['source_ranges_value1', 'source_ranges_value2'], 'source_service_accounts': ['source_service_accounts_value1', 'source_service_accounts_value2'], 'source_tags': ['source_tags_value1', 'source_tags_value2'], 'target_service_accounts': ['target_service_accounts_value1', 'target_service_accounts_value2'], 'target_tags': ['target_tags_value1', 'target_tags_value2']} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.patch_unary(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.Operation) + + +def test_patch_unary_rest_required_fields(request_type=compute.PatchFirewallRequest): + transport_class = transports.FirewallsRestTransport + + request_init = {} + request_init["firewall"] = "" + request_init["project"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).patch._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["firewall"] = 'firewall_value' + jsonified_request["project"] = 'project_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).patch._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "firewall" in jsonified_request + assert jsonified_request["firewall"] == 'firewall_value' + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + + client = FirewallsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "patch", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.patch_unary(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_patch_unary_rest_unset_required_fields(): + transport = transports.FirewallsRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.patch._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("firewall", "firewallResource", "project", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_patch_unary_rest_interceptors(null_interceptor): + transport = transports.FirewallsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.FirewallsRestInterceptor(), + ) + client = FirewallsClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.FirewallsRestInterceptor, "post_patch") as post, \ + mock.patch.object(transports.FirewallsRestInterceptor, "pre_patch") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.PatchFirewallRequest.pb(compute.PatchFirewallRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.PatchFirewallRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.patch_unary(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_patch_unary_rest_bad_request(transport: str = 'rest', request_type=compute.PatchFirewallRequest): + client = FirewallsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'firewall': 'sample2'} + request_init["firewall_resource"] = {'allowed': [{'I_p_protocol': 'I_p_protocol_value', 'ports': ['ports_value1', 'ports_value2']}], 'creation_timestamp': 'creation_timestamp_value', 'denied': [{'I_p_protocol': 'I_p_protocol_value', 'ports': ['ports_value1', 'ports_value2']}], 'description': 'description_value', 'destination_ranges': ['destination_ranges_value1', 'destination_ranges_value2'], 'direction': 'direction_value', 'disabled': True, 'id': 205, 'kind': 'kind_value', 'log_config': {'enable': True, 'metadata': 'metadata_value'}, 'name': 'name_value', 'network': 'network_value', 'priority': 898, 'self_link': 'self_link_value', 'source_ranges': ['source_ranges_value1', 'source_ranges_value2'], 'source_service_accounts': ['source_service_accounts_value1', 'source_service_accounts_value2'], 'source_tags': ['source_tags_value1', 'source_tags_value2'], 'target_service_accounts': ['target_service_accounts_value1', 'target_service_accounts_value2'], 'target_tags': ['target_tags_value1', 'target_tags_value2']} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.patch_unary(request) + + +def test_patch_unary_rest_flattened(): + client = FirewallsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'firewall': 'sample2'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + firewall='firewall_value', + firewall_resource=compute.Firewall(allowed=[compute.Allowed(I_p_protocol='I_p_protocol_value')]), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.patch_unary(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/global/firewalls/{firewall}" % client.transport._host, args[1]) + + +def test_patch_unary_rest_flattened_error(transport: str = 'rest'): + client = FirewallsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.patch_unary( + compute.PatchFirewallRequest(), + project='project_value', + firewall='firewall_value', + firewall_resource=compute.Firewall(allowed=[compute.Allowed(I_p_protocol='I_p_protocol_value')]), + ) + + +def test_patch_unary_rest_error(): + client = FirewallsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.UpdateFirewallRequest, + dict, +]) +def test_update_rest(request_type): + client = FirewallsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'firewall': 'sample2'} + request_init["firewall_resource"] = {'allowed': [{'I_p_protocol': 'I_p_protocol_value', 'ports': ['ports_value1', 'ports_value2']}], 'creation_timestamp': 'creation_timestamp_value', 'denied': [{'I_p_protocol': 'I_p_protocol_value', 'ports': ['ports_value1', 'ports_value2']}], 'description': 'description_value', 'destination_ranges': ['destination_ranges_value1', 'destination_ranges_value2'], 'direction': 'direction_value', 'disabled': True, 'id': 205, 'kind': 'kind_value', 'log_config': {'enable': True, 'metadata': 'metadata_value'}, 'name': 'name_value', 'network': 'network_value', 'priority': 898, 'self_link': 'self_link_value', 'source_ranges': ['source_ranges_value1', 'source_ranges_value2'], 'source_service_accounts': ['source_service_accounts_value1', 'source_service_accounts_value2'], 'source_tags': ['source_tags_value1', 'source_tags_value2'], 'target_service_accounts': ['target_service_accounts_value1', 'target_service_accounts_value2'], 'target_tags': ['target_tags_value1', 'target_tags_value2']} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.update(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, extended_operation.ExtendedOperation) + assert response.client_operation_id == 'client_operation_id_value' + assert response.creation_timestamp == 'creation_timestamp_value' + assert response.description == 'description_value' + assert response.end_time == 'end_time_value' + assert response.http_error_message == 'http_error_message_value' + assert response.http_error_status_code == 2374 + assert response.id == 205 + assert response.insert_time == 'insert_time_value' + assert response.kind == 'kind_value' + assert response.name == 'name_value' + assert response.operation_group_id == 'operation_group_id_value' + assert response.operation_type == 'operation_type_value' + assert response.progress == 885 + assert response.region == 'region_value' + assert response.self_link == 'self_link_value' + assert response.start_time == 'start_time_value' + assert response.status == compute.Operation.Status.DONE + assert response.status_message == 'status_message_value' + assert response.target_id == 947 + assert response.target_link == 'target_link_value' + assert response.user == 'user_value' + assert response.zone == 'zone_value' + + +def test_update_rest_required_fields(request_type=compute.UpdateFirewallRequest): + transport_class = transports.FirewallsRestTransport + + request_init = {} + request_init["firewall"] = "" + request_init["project"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).update._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["firewall"] = 'firewall_value' + jsonified_request["project"] = 'project_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).update._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "firewall" in jsonified_request + assert jsonified_request["firewall"] == 'firewall_value' + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + + client = FirewallsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "put", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.update(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_update_rest_unset_required_fields(): + transport = transports.FirewallsRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.update._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("firewall", "firewallResource", "project", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_update_rest_interceptors(null_interceptor): + transport = transports.FirewallsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.FirewallsRestInterceptor(), + ) + client = FirewallsClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.FirewallsRestInterceptor, "post_update") as post, \ + mock.patch.object(transports.FirewallsRestInterceptor, "pre_update") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.UpdateFirewallRequest.pb(compute.UpdateFirewallRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.UpdateFirewallRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.update(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_update_rest_bad_request(transport: str = 'rest', request_type=compute.UpdateFirewallRequest): + client = FirewallsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'firewall': 'sample2'} + request_init["firewall_resource"] = {'allowed': [{'I_p_protocol': 'I_p_protocol_value', 'ports': ['ports_value1', 'ports_value2']}], 'creation_timestamp': 'creation_timestamp_value', 'denied': [{'I_p_protocol': 'I_p_protocol_value', 'ports': ['ports_value1', 'ports_value2']}], 'description': 'description_value', 'destination_ranges': ['destination_ranges_value1', 'destination_ranges_value2'], 'direction': 'direction_value', 'disabled': True, 'id': 205, 'kind': 'kind_value', 'log_config': {'enable': True, 'metadata': 'metadata_value'}, 'name': 'name_value', 'network': 'network_value', 'priority': 898, 'self_link': 'self_link_value', 'source_ranges': ['source_ranges_value1', 'source_ranges_value2'], 'source_service_accounts': ['source_service_accounts_value1', 'source_service_accounts_value2'], 'source_tags': ['source_tags_value1', 'source_tags_value2'], 'target_service_accounts': ['target_service_accounts_value1', 'target_service_accounts_value2'], 'target_tags': ['target_tags_value1', 'target_tags_value2']} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.update(request) + + +def test_update_rest_flattened(): + client = FirewallsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'firewall': 'sample2'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + firewall='firewall_value', + firewall_resource=compute.Firewall(allowed=[compute.Allowed(I_p_protocol='I_p_protocol_value')]), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.update(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/global/firewalls/{firewall}" % client.transport._host, args[1]) + + +def test_update_rest_flattened_error(transport: str = 'rest'): + client = FirewallsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update( + compute.UpdateFirewallRequest(), + project='project_value', + firewall='firewall_value', + firewall_resource=compute.Firewall(allowed=[compute.Allowed(I_p_protocol='I_p_protocol_value')]), + ) + + +def test_update_rest_error(): + client = FirewallsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.UpdateFirewallRequest, + dict, +]) +def test_update_unary_rest(request_type): + client = FirewallsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'firewall': 'sample2'} + request_init["firewall_resource"] = {'allowed': [{'I_p_protocol': 'I_p_protocol_value', 'ports': ['ports_value1', 'ports_value2']}], 'creation_timestamp': 'creation_timestamp_value', 'denied': [{'I_p_protocol': 'I_p_protocol_value', 'ports': ['ports_value1', 'ports_value2']}], 'description': 'description_value', 'destination_ranges': ['destination_ranges_value1', 'destination_ranges_value2'], 'direction': 'direction_value', 'disabled': True, 'id': 205, 'kind': 'kind_value', 'log_config': {'enable': True, 'metadata': 'metadata_value'}, 'name': 'name_value', 'network': 'network_value', 'priority': 898, 'self_link': 'self_link_value', 'source_ranges': ['source_ranges_value1', 'source_ranges_value2'], 'source_service_accounts': ['source_service_accounts_value1', 'source_service_accounts_value2'], 'source_tags': ['source_tags_value1', 'source_tags_value2'], 'target_service_accounts': ['target_service_accounts_value1', 'target_service_accounts_value2'], 'target_tags': ['target_tags_value1', 'target_tags_value2']} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.update_unary(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.Operation) + + +def test_update_unary_rest_required_fields(request_type=compute.UpdateFirewallRequest): + transport_class = transports.FirewallsRestTransport + + request_init = {} + request_init["firewall"] = "" + request_init["project"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).update._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["firewall"] = 'firewall_value' + jsonified_request["project"] = 'project_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).update._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "firewall" in jsonified_request + assert jsonified_request["firewall"] == 'firewall_value' + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + + client = FirewallsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "put", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.update_unary(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_update_unary_rest_unset_required_fields(): + transport = transports.FirewallsRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.update._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("firewall", "firewallResource", "project", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_update_unary_rest_interceptors(null_interceptor): + transport = transports.FirewallsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.FirewallsRestInterceptor(), + ) + client = FirewallsClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.FirewallsRestInterceptor, "post_update") as post, \ + mock.patch.object(transports.FirewallsRestInterceptor, "pre_update") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.UpdateFirewallRequest.pb(compute.UpdateFirewallRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.UpdateFirewallRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.update_unary(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_update_unary_rest_bad_request(transport: str = 'rest', request_type=compute.UpdateFirewallRequest): + client = FirewallsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'firewall': 'sample2'} + request_init["firewall_resource"] = {'allowed': [{'I_p_protocol': 'I_p_protocol_value', 'ports': ['ports_value1', 'ports_value2']}], 'creation_timestamp': 'creation_timestamp_value', 'denied': [{'I_p_protocol': 'I_p_protocol_value', 'ports': ['ports_value1', 'ports_value2']}], 'description': 'description_value', 'destination_ranges': ['destination_ranges_value1', 'destination_ranges_value2'], 'direction': 'direction_value', 'disabled': True, 'id': 205, 'kind': 'kind_value', 'log_config': {'enable': True, 'metadata': 'metadata_value'}, 'name': 'name_value', 'network': 'network_value', 'priority': 898, 'self_link': 'self_link_value', 'source_ranges': ['source_ranges_value1', 'source_ranges_value2'], 'source_service_accounts': ['source_service_accounts_value1', 'source_service_accounts_value2'], 'source_tags': ['source_tags_value1', 'source_tags_value2'], 'target_service_accounts': ['target_service_accounts_value1', 'target_service_accounts_value2'], 'target_tags': ['target_tags_value1', 'target_tags_value2']} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.update_unary(request) + + +def test_update_unary_rest_flattened(): + client = FirewallsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'firewall': 'sample2'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + firewall='firewall_value', + firewall_resource=compute.Firewall(allowed=[compute.Allowed(I_p_protocol='I_p_protocol_value')]), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.update_unary(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/global/firewalls/{firewall}" % client.transport._host, args[1]) + + +def test_update_unary_rest_flattened_error(transport: str = 'rest'): + client = FirewallsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_unary( + compute.UpdateFirewallRequest(), + project='project_value', + firewall='firewall_value', + firewall_resource=compute.Firewall(allowed=[compute.Allowed(I_p_protocol='I_p_protocol_value')]), + ) + + +def test_update_unary_rest_error(): + client = FirewallsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +def test_credentials_transport_error(): + # It is an error to provide credentials and a transport instance. + transport = transports.FirewallsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = FirewallsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # It is an error to provide a credentials file and a transport instance. + transport = transports.FirewallsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = FirewallsClient( + client_options={"credentials_file": "credentials.json"}, + transport=transport, + ) + + # It is an error to provide an api_key and a transport instance. + transport = transports.FirewallsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = FirewallsClient( + client_options=options, + transport=transport, + ) + + # It is an error to provide an api_key and a credential. + options = mock.Mock() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = FirewallsClient( + client_options=options, + credentials=ga_credentials.AnonymousCredentials() + ) + + # It is an error to provide scopes and a transport instance. + transport = transports.FirewallsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = FirewallsClient( + client_options={"scopes": ["1", "2"]}, + transport=transport, + ) + + +def test_transport_instance(): + # A client may be instantiated with a custom transport instance. + transport = transports.FirewallsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + client = FirewallsClient(transport=transport) + assert client.transport is transport + + +@pytest.mark.parametrize("transport_class", [ + transports.FirewallsRestTransport, +]) +def test_transport_adc(transport_class): + # Test default credentials are used if not provided. + with mock.patch.object(google.auth, 'default') as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class() + adc.assert_called_once() + +@pytest.mark.parametrize("transport_name", [ + "rest", +]) +def test_transport_kind(transport_name): + transport = FirewallsClient.get_transport_class(transport_name)( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert transport.kind == transport_name + + +def test_firewalls_base_transport_error(): + # Passing both a credentials object and credentials_file should raise an error + with pytest.raises(core_exceptions.DuplicateCredentialArgs): + transport = transports.FirewallsTransport( + credentials=ga_credentials.AnonymousCredentials(), + credentials_file="credentials.json" + ) + + +def test_firewalls_base_transport(): + # Instantiate the base transport. + with mock.patch('google.cloud.compute_v1.services.firewalls.transports.FirewallsTransport.__init__') as Transport: + Transport.return_value = None + transport = transports.FirewallsTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Every method on the transport should just blindly + # raise NotImplementedError. + methods = ( + 'delete', + 'get', + 'insert', + 'list', + 'patch', + 'update', + ) + for method in methods: + with pytest.raises(NotImplementedError): + getattr(transport, method)(request=object()) + + with pytest.raises(NotImplementedError): + transport.close() + + # Catch all for all remaining methods and properties + remainder = [ + 'kind', + ] + for r in remainder: + with pytest.raises(NotImplementedError): + getattr(transport, r)() + + +def test_firewalls_base_transport_with_credentials_file(): + # Instantiate the base transport with a credentials file + with mock.patch.object(google.auth, 'load_credentials_from_file', autospec=True) as load_creds, mock.patch('google.cloud.compute_v1.services.firewalls.transports.FirewallsTransport._prep_wrapped_messages') as Transport: + Transport.return_value = None + load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.FirewallsTransport( + credentials_file="credentials.json", + quota_project_id="octopus", + ) + load_creds.assert_called_once_with("credentials.json", + scopes=None, + default_scopes=( + 'https://www.googleapis.com/auth/compute', + 'https://www.googleapis.com/auth/cloud-platform', +), + quota_project_id="octopus", + ) + + +def test_firewalls_base_transport_with_adc(): + # Test the default credentials are used if credentials and credentials_file are None. + with mock.patch.object(google.auth, 'default', autospec=True) as adc, mock.patch('google.cloud.compute_v1.services.firewalls.transports.FirewallsTransport._prep_wrapped_messages') as Transport: + Transport.return_value = None + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.FirewallsTransport() + adc.assert_called_once() + + +def test_firewalls_auth_adc(): + # If no credentials are provided, we should use ADC credentials. + with mock.patch.object(google.auth, 'default', autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + FirewallsClient() + adc.assert_called_once_with( + scopes=None, + default_scopes=( + 'https://www.googleapis.com/auth/compute', + 'https://www.googleapis.com/auth/cloud-platform', +), + quota_project_id=None, + ) + + +def test_firewalls_http_transport_client_cert_source_for_mtls(): + cred = ga_credentials.AnonymousCredentials() + with mock.patch("google.auth.transport.requests.AuthorizedSession.configure_mtls_channel") as mock_configure_mtls_channel: + transports.FirewallsRestTransport ( + credentials=cred, + client_cert_source_for_mtls=client_cert_source_callback + ) + mock_configure_mtls_channel.assert_called_once_with(client_cert_source_callback) + + +@pytest.mark.parametrize("transport_name", [ + "rest", +]) +def test_firewalls_host_no_port(transport_name): + client = FirewallsClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions(api_endpoint='compute.googleapis.com'), + transport=transport_name, + ) + assert client.transport._host == ( + 'compute.googleapis.com:443' + if transport_name in ['grpc', 'grpc_asyncio'] + else 'https://compute.googleapis.com' + ) + +@pytest.mark.parametrize("transport_name", [ + "rest", +]) +def test_firewalls_host_with_port(transport_name): + client = FirewallsClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions(api_endpoint='compute.googleapis.com:8000'), + transport=transport_name, + ) + assert client.transport._host == ( + 'compute.googleapis.com:8000' + if transport_name in ['grpc', 'grpc_asyncio'] + else 'https://compute.googleapis.com:8000' + ) + +@pytest.mark.parametrize("transport_name", [ + "rest", +]) +def test_firewalls_client_transport_session_collision(transport_name): + creds1 = ga_credentials.AnonymousCredentials() + creds2 = ga_credentials.AnonymousCredentials() + client1 = FirewallsClient( + credentials=creds1, + transport=transport_name, + ) + client2 = FirewallsClient( + credentials=creds2, + transport=transport_name, + ) + session1 = client1.transport.delete._session + session2 = client2.transport.delete._session + assert session1 != session2 + session1 = client1.transport.get._session + session2 = client2.transport.get._session + assert session1 != session2 + session1 = client1.transport.insert._session + session2 = client2.transport.insert._session + assert session1 != session2 + session1 = client1.transport.list._session + session2 = client2.transport.list._session + assert session1 != session2 + session1 = client1.transport.patch._session + session2 = client2.transport.patch._session + assert session1 != session2 + session1 = client1.transport.update._session + session2 = client2.transport.update._session + assert session1 != session2 + +def test_common_billing_account_path(): + billing_account = "squid" + expected = "billingAccounts/{billing_account}".format(billing_account=billing_account, ) + actual = FirewallsClient.common_billing_account_path(billing_account) + assert expected == actual + + +def test_parse_common_billing_account_path(): + expected = { + "billing_account": "clam", + } + path = FirewallsClient.common_billing_account_path(**expected) + + # Check that the path construction is reversible. + actual = FirewallsClient.parse_common_billing_account_path(path) + assert expected == actual + +def test_common_folder_path(): + folder = "whelk" + expected = "folders/{folder}".format(folder=folder, ) + actual = FirewallsClient.common_folder_path(folder) + assert expected == actual + + +def test_parse_common_folder_path(): + expected = { + "folder": "octopus", + } + path = FirewallsClient.common_folder_path(**expected) + + # Check that the path construction is reversible. + actual = FirewallsClient.parse_common_folder_path(path) + assert expected == actual + +def test_common_organization_path(): + organization = "oyster" + expected = "organizations/{organization}".format(organization=organization, ) + actual = FirewallsClient.common_organization_path(organization) + assert expected == actual + + +def test_parse_common_organization_path(): + expected = { + "organization": "nudibranch", + } + path = FirewallsClient.common_organization_path(**expected) + + # Check that the path construction is reversible. + actual = FirewallsClient.parse_common_organization_path(path) + assert expected == actual + +def test_common_project_path(): + project = "cuttlefish" + expected = "projects/{project}".format(project=project, ) + actual = FirewallsClient.common_project_path(project) + assert expected == actual + + +def test_parse_common_project_path(): + expected = { + "project": "mussel", + } + path = FirewallsClient.common_project_path(**expected) + + # Check that the path construction is reversible. + actual = FirewallsClient.parse_common_project_path(path) + assert expected == actual + +def test_common_location_path(): + project = "winkle" + location = "nautilus" + expected = "projects/{project}/locations/{location}".format(project=project, location=location, ) + actual = FirewallsClient.common_location_path(project, location) + assert expected == actual + + +def test_parse_common_location_path(): + expected = { + "project": "scallop", + "location": "abalone", + } + path = FirewallsClient.common_location_path(**expected) + + # Check that the path construction is reversible. + actual = FirewallsClient.parse_common_location_path(path) + assert expected == actual + + +def test_client_with_default_client_info(): + client_info = gapic_v1.client_info.ClientInfo() + + with mock.patch.object(transports.FirewallsTransport, '_prep_wrapped_messages') as prep: + client = FirewallsClient( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + with mock.patch.object(transports.FirewallsTransport, '_prep_wrapped_messages') as prep: + transport_class = FirewallsClient.get_transport_class() + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + +def test_transport_close(): + transports = { + "rest": "_session", + } + + for transport, close_name in transports.items(): + client = FirewallsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport + ) + with mock.patch.object(type(getattr(client.transport, close_name)), "close") as close: + with client: + close.assert_not_called() + close.assert_called_once() + +def test_client_ctx(): + transports = [ + 'rest', + ] + for transport in transports: + client = FirewallsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport + ) + # Test client calls underlying transport. + with mock.patch.object(type(client.transport), "close") as close: + close.assert_not_called() + with client: + pass + close.assert_called() + +@pytest.mark.parametrize("client_class,transport_class", [ + (FirewallsClient, transports.FirewallsRestTransport), +]) +def test_api_key_credentials(client_class, transport_class): + with mock.patch.object( + google.auth._default, "get_api_key_credentials", create=True + ) as get_api_key_credentials: + mock_cred = mock.Mock() + get_api_key_credentials.return_value = mock_cred + options = client_options.ClientOptions() + options.api_key = "api_key" + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=mock_cred, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) diff --git a/owl-bot-staging/v1/tests/unit/gapic/compute_v1/test_forwarding_rules.py b/owl-bot-staging/v1/tests/unit/gapic/compute_v1/test_forwarding_rules.py new file mode 100644 index 000000000..98438b1fb --- /dev/null +++ b/owl-bot-staging/v1/tests/unit/gapic/compute_v1/test_forwarding_rules.py @@ -0,0 +1,4570 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import os +# try/except added for compatibility with python < 3.8 +try: + from unittest import mock + from unittest.mock import AsyncMock # pragma: NO COVER +except ImportError: # pragma: NO COVER + import mock + +import grpc +from grpc.experimental import aio +from collections.abc import Iterable +from google.protobuf import json_format +import json +import math +import pytest +from proto.marshal.rules.dates import DurationRule, TimestampRule +from proto.marshal.rules import wrappers +from requests import Response +from requests import Request, PreparedRequest +from requests.sessions import Session +from google.protobuf import json_format + +from google.api_core import client_options +from google.api_core import exceptions as core_exceptions +from google.api_core import extended_operation # type: ignore +from google.api_core import future +from google.api_core import gapic_v1 +from google.api_core import grpc_helpers +from google.api_core import grpc_helpers_async +from google.api_core import path_template +from google.auth import credentials as ga_credentials +from google.auth.exceptions import MutualTLSChannelError +from google.cloud.compute_v1.services.forwarding_rules import ForwardingRulesClient +from google.cloud.compute_v1.services.forwarding_rules import pagers +from google.cloud.compute_v1.services.forwarding_rules import transports +from google.cloud.compute_v1.types import compute +from google.oauth2 import service_account +import google.auth + + +def client_cert_source_callback(): + return b"cert bytes", b"key bytes" + + +# If default endpoint is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint(client): + return "foo.googleapis.com" if ("localhost" in client.DEFAULT_ENDPOINT) else client.DEFAULT_ENDPOINT + + +def test__get_default_mtls_endpoint(): + api_endpoint = "example.googleapis.com" + api_mtls_endpoint = "example.mtls.googleapis.com" + sandbox_endpoint = "example.sandbox.googleapis.com" + sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com" + non_googleapi = "api.example.com" + + assert ForwardingRulesClient._get_default_mtls_endpoint(None) is None + assert ForwardingRulesClient._get_default_mtls_endpoint(api_endpoint) == api_mtls_endpoint + assert ForwardingRulesClient._get_default_mtls_endpoint(api_mtls_endpoint) == api_mtls_endpoint + assert ForwardingRulesClient._get_default_mtls_endpoint(sandbox_endpoint) == sandbox_mtls_endpoint + assert ForwardingRulesClient._get_default_mtls_endpoint(sandbox_mtls_endpoint) == sandbox_mtls_endpoint + assert ForwardingRulesClient._get_default_mtls_endpoint(non_googleapi) == non_googleapi + + +@pytest.mark.parametrize("client_class,transport_name", [ + (ForwardingRulesClient, "rest"), +]) +def test_forwarding_rules_client_from_service_account_info(client_class, transport_name): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object(service_account.Credentials, 'from_service_account_info') as factory: + factory.return_value = creds + info = {"valid": True} + client = client_class.from_service_account_info(info, transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + 'compute.googleapis.com:443' + if transport_name in ['grpc', 'grpc_asyncio'] + else + 'https://compute.googleapis.com' + ) + + +@pytest.mark.parametrize("transport_class,transport_name", [ + (transports.ForwardingRulesRestTransport, "rest"), +]) +def test_forwarding_rules_client_service_account_always_use_jwt(transport_class, transport_name): + with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=True) + use_jwt.assert_called_once_with(True) + + with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=False) + use_jwt.assert_not_called() + + +@pytest.mark.parametrize("client_class,transport_name", [ + (ForwardingRulesClient, "rest"), +]) +def test_forwarding_rules_client_from_service_account_file(client_class, transport_name): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object(service_account.Credentials, 'from_service_account_file') as factory: + factory.return_value = creds + client = client_class.from_service_account_file("dummy/file/path.json", transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + client = client_class.from_service_account_json("dummy/file/path.json", transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + 'compute.googleapis.com:443' + if transport_name in ['grpc', 'grpc_asyncio'] + else + 'https://compute.googleapis.com' + ) + + +def test_forwarding_rules_client_get_transport_class(): + transport = ForwardingRulesClient.get_transport_class() + available_transports = [ + transports.ForwardingRulesRestTransport, + ] + assert transport in available_transports + + transport = ForwardingRulesClient.get_transport_class("rest") + assert transport == transports.ForwardingRulesRestTransport + + +@pytest.mark.parametrize("client_class,transport_class,transport_name", [ + (ForwardingRulesClient, transports.ForwardingRulesRestTransport, "rest"), +]) +@mock.patch.object(ForwardingRulesClient, "DEFAULT_ENDPOINT", modify_default_endpoint(ForwardingRulesClient)) +def test_forwarding_rules_client_client_options(client_class, transport_class, transport_name): + # Check that if channel is provided we won't create a new one. + with mock.patch.object(ForwardingRulesClient, 'get_transport_class') as gtc: + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ) + client = client_class(transport=transport) + gtc.assert_not_called() + + # Check that if channel is provided via str we will create a new one. + with mock.patch.object(ForwardingRulesClient, 'get_transport_class') as gtc: + client = client_class(transport=transport_name) + gtc.assert_called() + + # Check the case api_endpoint is provided. + options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(transport=transport_name, client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_MTLS_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError): + client = client_class(transport=transport_name) + + # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): + with pytest.raises(ValueError): + client = client_class(transport=transport_name) + + # Check the case quota_project_id is provided + options = client_options.ClientOptions(quota_project_id="octopus") + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id="octopus", + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + # Check the case api_endpoint is provided + options = client_options.ClientOptions(api_audience="https://language.googleapis.com") + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience="https://language.googleapis.com" + ) + +@pytest.mark.parametrize("client_class,transport_class,transport_name,use_client_cert_env", [ + (ForwardingRulesClient, transports.ForwardingRulesRestTransport, "rest", "true"), + (ForwardingRulesClient, transports.ForwardingRulesRestTransport, "rest", "false"), +]) +@mock.patch.object(ForwardingRulesClient, "DEFAULT_ENDPOINT", modify_default_endpoint(ForwardingRulesClient)) +@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) +def test_forwarding_rules_client_mtls_env_auto(client_class, transport_class, transport_name, use_client_cert_env): + # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default + # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists. + + # Check the case client_cert_source is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): + options = client_options.ClientOptions(client_cert_source=client_cert_source_callback) + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + + if use_client_cert_env == "false": + expected_client_cert_source = None + expected_host = client.DEFAULT_ENDPOINT + else: + expected_client_cert_source = client_cert_source_callback + expected_host = client.DEFAULT_MTLS_ENDPOINT + + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case ADC client cert is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): + with mock.patch.object(transport_class, '__init__') as patched: + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): + with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=client_cert_source_callback): + if use_client_cert_env == "false": + expected_host = client.DEFAULT_ENDPOINT + expected_client_cert_source = None + else: + expected_host = client.DEFAULT_MTLS_ENDPOINT + expected_client_cert_source = client_cert_source_callback + + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case client_cert_source and ADC client cert are not provided. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): + with mock.patch.object(transport_class, '__init__') as patched: + with mock.patch("google.auth.transport.mtls.has_default_client_cert_source", return_value=False): + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize("client_class", [ + ForwardingRulesClient +]) +@mock.patch.object(ForwardingRulesClient, "DEFAULT_ENDPOINT", modify_default_endpoint(ForwardingRulesClient)) +def test_forwarding_rules_client_get_mtls_endpoint_and_cert_source(client_class): + mock_client_cert_source = mock.Mock() + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "true". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + mock_api_endpoint = "foo" + options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(options) + assert api_endpoint == mock_api_endpoint + assert cert_source == mock_client_cert_source + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "false". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + mock_client_cert_source = mock.Mock() + mock_api_endpoint = "foo" + options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(options) + assert api_endpoint == mock_api_endpoint + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert doesn't exist. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=False): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert exists. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): + with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=mock_client_cert_source): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source == mock_client_cert_source + + +@pytest.mark.parametrize("client_class,transport_class,transport_name", [ + (ForwardingRulesClient, transports.ForwardingRulesRestTransport, "rest"), +]) +def test_forwarding_rules_client_client_options_scopes(client_class, transport_class, transport_name): + # Check the case scopes are provided. + options = client_options.ClientOptions( + scopes=["1", "2"], + ) + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=["1", "2"], + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + +@pytest.mark.parametrize("client_class,transport_class,transport_name,grpc_helpers", [ + (ForwardingRulesClient, transports.ForwardingRulesRestTransport, "rest", None), +]) +def test_forwarding_rules_client_client_options_credentials_file(client_class, transport_class, transport_name, grpc_helpers): + # Check the case credentials file is provided. + options = client_options.ClientOptions( + credentials_file="credentials.json" + ) + + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize("request_type", [ + compute.AggregatedListForwardingRulesRequest, + dict, +]) +def test_aggregated_list_rest(request_type): + client = ForwardingRulesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.ForwardingRuleAggregatedList( + id='id_value', + kind='kind_value', + next_page_token='next_page_token_value', + self_link='self_link_value', + unreachables=['unreachables_value'], + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.ForwardingRuleAggregatedList.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.aggregated_list(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.AggregatedListPager) + assert response.id == 'id_value' + assert response.kind == 'kind_value' + assert response.next_page_token == 'next_page_token_value' + assert response.self_link == 'self_link_value' + assert response.unreachables == ['unreachables_value'] + + +def test_aggregated_list_rest_required_fields(request_type=compute.AggregatedListForwardingRulesRequest): + transport_class = transports.ForwardingRulesRestTransport + + request_init = {} + request_init["project"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).aggregated_list._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).aggregated_list._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("filter", "include_all_scopes", "max_results", "order_by", "page_token", "return_partial_success", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + + client = ForwardingRulesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.ForwardingRuleAggregatedList() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "get", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.ForwardingRuleAggregatedList.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.aggregated_list(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_aggregated_list_rest_unset_required_fields(): + transport = transports.ForwardingRulesRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.aggregated_list._get_unset_required_fields({}) + assert set(unset_fields) == (set(("filter", "includeAllScopes", "maxResults", "orderBy", "pageToken", "returnPartialSuccess", )) & set(("project", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_aggregated_list_rest_interceptors(null_interceptor): + transport = transports.ForwardingRulesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.ForwardingRulesRestInterceptor(), + ) + client = ForwardingRulesClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.ForwardingRulesRestInterceptor, "post_aggregated_list") as post, \ + mock.patch.object(transports.ForwardingRulesRestInterceptor, "pre_aggregated_list") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.AggregatedListForwardingRulesRequest.pb(compute.AggregatedListForwardingRulesRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.ForwardingRuleAggregatedList.to_json(compute.ForwardingRuleAggregatedList()) + + request = compute.AggregatedListForwardingRulesRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.ForwardingRuleAggregatedList() + + client.aggregated_list(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_aggregated_list_rest_bad_request(transport: str = 'rest', request_type=compute.AggregatedListForwardingRulesRequest): + client = ForwardingRulesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.aggregated_list(request) + + +def test_aggregated_list_rest_flattened(): + client = ForwardingRulesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.ForwardingRuleAggregatedList() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.ForwardingRuleAggregatedList.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.aggregated_list(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/aggregated/forwardingRules" % client.transport._host, args[1]) + + +def test_aggregated_list_rest_flattened_error(transport: str = 'rest'): + client = ForwardingRulesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.aggregated_list( + compute.AggregatedListForwardingRulesRequest(), + project='project_value', + ) + + +def test_aggregated_list_rest_pager(transport: str = 'rest'): + client = ForwardingRulesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + #with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + compute.ForwardingRuleAggregatedList( + items={ + 'a':compute.ForwardingRulesScopedList(), + 'b':compute.ForwardingRulesScopedList(), + 'c':compute.ForwardingRulesScopedList(), + }, + next_page_token='abc', + ), + compute.ForwardingRuleAggregatedList( + items={}, + next_page_token='def', + ), + compute.ForwardingRuleAggregatedList( + items={ + 'g':compute.ForwardingRulesScopedList(), + }, + next_page_token='ghi', + ), + compute.ForwardingRuleAggregatedList( + items={ + 'h':compute.ForwardingRulesScopedList(), + 'i':compute.ForwardingRulesScopedList(), + }, + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple(compute.ForwardingRuleAggregatedList.to_json(x) for x in response) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode('UTF-8') + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {'project': 'sample1'} + + pager = client.aggregated_list(request=sample_request) + + assert isinstance(pager.get('a'), compute.ForwardingRulesScopedList) + assert pager.get('h') is None + + results = list(pager) + assert len(results) == 6 + assert all( + isinstance(i, tuple) + for i in results) + for result in results: + assert isinstance(result, tuple) + assert tuple(type(t) for t in result) == (str, compute.ForwardingRulesScopedList) + + assert pager.get('a') is None + assert isinstance(pager.get('h'), compute.ForwardingRulesScopedList) + + pages = list(client.aggregated_list(request=sample_request).pages) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize("request_type", [ + compute.DeleteForwardingRuleRequest, + dict, +]) +def test_delete_rest(request_type): + client = ForwardingRulesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2', 'forwarding_rule': 'sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.delete(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, extended_operation.ExtendedOperation) + assert response.client_operation_id == 'client_operation_id_value' + assert response.creation_timestamp == 'creation_timestamp_value' + assert response.description == 'description_value' + assert response.end_time == 'end_time_value' + assert response.http_error_message == 'http_error_message_value' + assert response.http_error_status_code == 2374 + assert response.id == 205 + assert response.insert_time == 'insert_time_value' + assert response.kind == 'kind_value' + assert response.name == 'name_value' + assert response.operation_group_id == 'operation_group_id_value' + assert response.operation_type == 'operation_type_value' + assert response.progress == 885 + assert response.region == 'region_value' + assert response.self_link == 'self_link_value' + assert response.start_time == 'start_time_value' + assert response.status == compute.Operation.Status.DONE + assert response.status_message == 'status_message_value' + assert response.target_id == 947 + assert response.target_link == 'target_link_value' + assert response.user == 'user_value' + assert response.zone == 'zone_value' + + +def test_delete_rest_required_fields(request_type=compute.DeleteForwardingRuleRequest): + transport_class = transports.ForwardingRulesRestTransport + + request_init = {} + request_init["forwarding_rule"] = "" + request_init["project"] = "" + request_init["region"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["forwardingRule"] = 'forwarding_rule_value' + jsonified_request["project"] = 'project_value' + jsonified_request["region"] = 'region_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "forwardingRule" in jsonified_request + assert jsonified_request["forwardingRule"] == 'forwarding_rule_value' + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "region" in jsonified_request + assert jsonified_request["region"] == 'region_value' + + client = ForwardingRulesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "delete", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.delete(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_delete_rest_unset_required_fields(): + transport = transports.ForwardingRulesRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.delete._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("forwardingRule", "project", "region", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_rest_interceptors(null_interceptor): + transport = transports.ForwardingRulesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.ForwardingRulesRestInterceptor(), + ) + client = ForwardingRulesClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.ForwardingRulesRestInterceptor, "post_delete") as post, \ + mock.patch.object(transports.ForwardingRulesRestInterceptor, "pre_delete") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.DeleteForwardingRuleRequest.pb(compute.DeleteForwardingRuleRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.DeleteForwardingRuleRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.delete(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_delete_rest_bad_request(transport: str = 'rest', request_type=compute.DeleteForwardingRuleRequest): + client = ForwardingRulesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2', 'forwarding_rule': 'sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.delete(request) + + +def test_delete_rest_flattened(): + client = ForwardingRulesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'region': 'sample2', 'forwarding_rule': 'sample3'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + region='region_value', + forwarding_rule='forwarding_rule_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.delete(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/regions/{region}/forwardingRules/{forwarding_rule}" % client.transport._host, args[1]) + + +def test_delete_rest_flattened_error(transport: str = 'rest'): + client = ForwardingRulesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete( + compute.DeleteForwardingRuleRequest(), + project='project_value', + region='region_value', + forwarding_rule='forwarding_rule_value', + ) + + +def test_delete_rest_error(): + client = ForwardingRulesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.DeleteForwardingRuleRequest, + dict, +]) +def test_delete_unary_rest(request_type): + client = ForwardingRulesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2', 'forwarding_rule': 'sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.delete_unary(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.Operation) + + +def test_delete_unary_rest_required_fields(request_type=compute.DeleteForwardingRuleRequest): + transport_class = transports.ForwardingRulesRestTransport + + request_init = {} + request_init["forwarding_rule"] = "" + request_init["project"] = "" + request_init["region"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["forwardingRule"] = 'forwarding_rule_value' + jsonified_request["project"] = 'project_value' + jsonified_request["region"] = 'region_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "forwardingRule" in jsonified_request + assert jsonified_request["forwardingRule"] == 'forwarding_rule_value' + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "region" in jsonified_request + assert jsonified_request["region"] == 'region_value' + + client = ForwardingRulesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "delete", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.delete_unary(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_delete_unary_rest_unset_required_fields(): + transport = transports.ForwardingRulesRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.delete._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("forwardingRule", "project", "region", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_unary_rest_interceptors(null_interceptor): + transport = transports.ForwardingRulesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.ForwardingRulesRestInterceptor(), + ) + client = ForwardingRulesClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.ForwardingRulesRestInterceptor, "post_delete") as post, \ + mock.patch.object(transports.ForwardingRulesRestInterceptor, "pre_delete") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.DeleteForwardingRuleRequest.pb(compute.DeleteForwardingRuleRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.DeleteForwardingRuleRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.delete_unary(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_delete_unary_rest_bad_request(transport: str = 'rest', request_type=compute.DeleteForwardingRuleRequest): + client = ForwardingRulesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2', 'forwarding_rule': 'sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.delete_unary(request) + + +def test_delete_unary_rest_flattened(): + client = ForwardingRulesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'region': 'sample2', 'forwarding_rule': 'sample3'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + region='region_value', + forwarding_rule='forwarding_rule_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.delete_unary(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/regions/{region}/forwardingRules/{forwarding_rule}" % client.transport._host, args[1]) + + +def test_delete_unary_rest_flattened_error(transport: str = 'rest'): + client = ForwardingRulesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_unary( + compute.DeleteForwardingRuleRequest(), + project='project_value', + region='region_value', + forwarding_rule='forwarding_rule_value', + ) + + +def test_delete_unary_rest_error(): + client = ForwardingRulesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.GetForwardingRuleRequest, + dict, +]) +def test_get_rest(request_type): + client = ForwardingRulesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2', 'forwarding_rule': 'sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.ForwardingRule( + I_p_address='I_p_address_value', + I_p_protocol='I_p_protocol_value', + all_ports=True, + allow_global_access=True, + allow_psc_global_access=True, + backend_service='backend_service_value', + base_forwarding_rule='base_forwarding_rule_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + fingerprint='fingerprint_value', + id=205, + ip_version='ip_version_value', + is_mirroring_collector=True, + kind='kind_value', + label_fingerprint='label_fingerprint_value', + load_balancing_scheme='load_balancing_scheme_value', + name='name_value', + network='network_value', + network_tier='network_tier_value', + no_automate_dns_zone=True, + port_range='port_range_value', + ports=['ports_value'], + psc_connection_id=1793, + psc_connection_status='psc_connection_status_value', + region='region_value', + self_link='self_link_value', + service_label='service_label_value', + service_name='service_name_value', + source_ip_ranges=['source_ip_ranges_value'], + subnetwork='subnetwork_value', + target='target_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.ForwardingRule.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.get(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.ForwardingRule) + assert response.I_p_address == 'I_p_address_value' + assert response.I_p_protocol == 'I_p_protocol_value' + assert response.all_ports is True + assert response.allow_global_access is True + assert response.allow_psc_global_access is True + assert response.backend_service == 'backend_service_value' + assert response.base_forwarding_rule == 'base_forwarding_rule_value' + assert response.creation_timestamp == 'creation_timestamp_value' + assert response.description == 'description_value' + assert response.fingerprint == 'fingerprint_value' + assert response.id == 205 + assert response.ip_version == 'ip_version_value' + assert response.is_mirroring_collector is True + assert response.kind == 'kind_value' + assert response.label_fingerprint == 'label_fingerprint_value' + assert response.load_balancing_scheme == 'load_balancing_scheme_value' + assert response.name == 'name_value' + assert response.network == 'network_value' + assert response.network_tier == 'network_tier_value' + assert response.no_automate_dns_zone is True + assert response.port_range == 'port_range_value' + assert response.ports == ['ports_value'] + assert response.psc_connection_id == 1793 + assert response.psc_connection_status == 'psc_connection_status_value' + assert response.region == 'region_value' + assert response.self_link == 'self_link_value' + assert response.service_label == 'service_label_value' + assert response.service_name == 'service_name_value' + assert response.source_ip_ranges == ['source_ip_ranges_value'] + assert response.subnetwork == 'subnetwork_value' + assert response.target == 'target_value' + + +def test_get_rest_required_fields(request_type=compute.GetForwardingRuleRequest): + transport_class = transports.ForwardingRulesRestTransport + + request_init = {} + request_init["forwarding_rule"] = "" + request_init["project"] = "" + request_init["region"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["forwardingRule"] = 'forwarding_rule_value' + jsonified_request["project"] = 'project_value' + jsonified_request["region"] = 'region_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "forwardingRule" in jsonified_request + assert jsonified_request["forwardingRule"] == 'forwarding_rule_value' + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "region" in jsonified_request + assert jsonified_request["region"] == 'region_value' + + client = ForwardingRulesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.ForwardingRule() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "get", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.ForwardingRule.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.get(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_get_rest_unset_required_fields(): + transport = transports.ForwardingRulesRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.get._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("forwardingRule", "project", "region", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_rest_interceptors(null_interceptor): + transport = transports.ForwardingRulesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.ForwardingRulesRestInterceptor(), + ) + client = ForwardingRulesClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.ForwardingRulesRestInterceptor, "post_get") as post, \ + mock.patch.object(transports.ForwardingRulesRestInterceptor, "pre_get") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.GetForwardingRuleRequest.pb(compute.GetForwardingRuleRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.ForwardingRule.to_json(compute.ForwardingRule()) + + request = compute.GetForwardingRuleRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.ForwardingRule() + + client.get(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_get_rest_bad_request(transport: str = 'rest', request_type=compute.GetForwardingRuleRequest): + client = ForwardingRulesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2', 'forwarding_rule': 'sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get(request) + + +def test_get_rest_flattened(): + client = ForwardingRulesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.ForwardingRule() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'region': 'sample2', 'forwarding_rule': 'sample3'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + region='region_value', + forwarding_rule='forwarding_rule_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.ForwardingRule.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.get(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/regions/{region}/forwardingRules/{forwarding_rule}" % client.transport._host, args[1]) + + +def test_get_rest_flattened_error(transport: str = 'rest'): + client = ForwardingRulesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get( + compute.GetForwardingRuleRequest(), + project='project_value', + region='region_value', + forwarding_rule='forwarding_rule_value', + ) + + +def test_get_rest_error(): + client = ForwardingRulesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.InsertForwardingRuleRequest, + dict, +]) +def test_insert_rest(request_type): + client = ForwardingRulesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2'} + request_init["forwarding_rule_resource"] = {'I_p_address': 'I_p_address_value', 'I_p_protocol': 'I_p_protocol_value', 'all_ports': True, 'allow_global_access': True, 'allow_psc_global_access': True, 'backend_service': 'backend_service_value', 'base_forwarding_rule': 'base_forwarding_rule_value', 'creation_timestamp': 'creation_timestamp_value', 'description': 'description_value', 'fingerprint': 'fingerprint_value', 'id': 205, 'ip_version': 'ip_version_value', 'is_mirroring_collector': True, 'kind': 'kind_value', 'label_fingerprint': 'label_fingerprint_value', 'labels': {}, 'load_balancing_scheme': 'load_balancing_scheme_value', 'metadata_filters': [{'filter_labels': [{'name': 'name_value', 'value': 'value_value'}], 'filter_match_criteria': 'filter_match_criteria_value'}], 'name': 'name_value', 'network': 'network_value', 'network_tier': 'network_tier_value', 'no_automate_dns_zone': True, 'port_range': 'port_range_value', 'ports': ['ports_value1', 'ports_value2'], 'psc_connection_id': 1793, 'psc_connection_status': 'psc_connection_status_value', 'region': 'region_value', 'self_link': 'self_link_value', 'service_directory_registrations': [{'namespace': 'namespace_value', 'service': 'service_value', 'service_directory_region': 'service_directory_region_value'}], 'service_label': 'service_label_value', 'service_name': 'service_name_value', 'source_ip_ranges': ['source_ip_ranges_value1', 'source_ip_ranges_value2'], 'subnetwork': 'subnetwork_value', 'target': 'target_value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.insert(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, extended_operation.ExtendedOperation) + assert response.client_operation_id == 'client_operation_id_value' + assert response.creation_timestamp == 'creation_timestamp_value' + assert response.description == 'description_value' + assert response.end_time == 'end_time_value' + assert response.http_error_message == 'http_error_message_value' + assert response.http_error_status_code == 2374 + assert response.id == 205 + assert response.insert_time == 'insert_time_value' + assert response.kind == 'kind_value' + assert response.name == 'name_value' + assert response.operation_group_id == 'operation_group_id_value' + assert response.operation_type == 'operation_type_value' + assert response.progress == 885 + assert response.region == 'region_value' + assert response.self_link == 'self_link_value' + assert response.start_time == 'start_time_value' + assert response.status == compute.Operation.Status.DONE + assert response.status_message == 'status_message_value' + assert response.target_id == 947 + assert response.target_link == 'target_link_value' + assert response.user == 'user_value' + assert response.zone == 'zone_value' + + +def test_insert_rest_required_fields(request_type=compute.InsertForwardingRuleRequest): + transport_class = transports.ForwardingRulesRestTransport + + request_init = {} + request_init["project"] = "" + request_init["region"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).insert._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + jsonified_request["region"] = 'region_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).insert._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "region" in jsonified_request + assert jsonified_request["region"] == 'region_value' + + client = ForwardingRulesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.insert(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_insert_rest_unset_required_fields(): + transport = transports.ForwardingRulesRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.insert._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("forwardingRuleResource", "project", "region", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_insert_rest_interceptors(null_interceptor): + transport = transports.ForwardingRulesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.ForwardingRulesRestInterceptor(), + ) + client = ForwardingRulesClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.ForwardingRulesRestInterceptor, "post_insert") as post, \ + mock.patch.object(transports.ForwardingRulesRestInterceptor, "pre_insert") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.InsertForwardingRuleRequest.pb(compute.InsertForwardingRuleRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.InsertForwardingRuleRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.insert(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_insert_rest_bad_request(transport: str = 'rest', request_type=compute.InsertForwardingRuleRequest): + client = ForwardingRulesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2'} + request_init["forwarding_rule_resource"] = {'I_p_address': 'I_p_address_value', 'I_p_protocol': 'I_p_protocol_value', 'all_ports': True, 'allow_global_access': True, 'allow_psc_global_access': True, 'backend_service': 'backend_service_value', 'base_forwarding_rule': 'base_forwarding_rule_value', 'creation_timestamp': 'creation_timestamp_value', 'description': 'description_value', 'fingerprint': 'fingerprint_value', 'id': 205, 'ip_version': 'ip_version_value', 'is_mirroring_collector': True, 'kind': 'kind_value', 'label_fingerprint': 'label_fingerprint_value', 'labels': {}, 'load_balancing_scheme': 'load_balancing_scheme_value', 'metadata_filters': [{'filter_labels': [{'name': 'name_value', 'value': 'value_value'}], 'filter_match_criteria': 'filter_match_criteria_value'}], 'name': 'name_value', 'network': 'network_value', 'network_tier': 'network_tier_value', 'no_automate_dns_zone': True, 'port_range': 'port_range_value', 'ports': ['ports_value1', 'ports_value2'], 'psc_connection_id': 1793, 'psc_connection_status': 'psc_connection_status_value', 'region': 'region_value', 'self_link': 'self_link_value', 'service_directory_registrations': [{'namespace': 'namespace_value', 'service': 'service_value', 'service_directory_region': 'service_directory_region_value'}], 'service_label': 'service_label_value', 'service_name': 'service_name_value', 'source_ip_ranges': ['source_ip_ranges_value1', 'source_ip_ranges_value2'], 'subnetwork': 'subnetwork_value', 'target': 'target_value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.insert(request) + + +def test_insert_rest_flattened(): + client = ForwardingRulesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'region': 'sample2'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + region='region_value', + forwarding_rule_resource=compute.ForwardingRule(I_p_address='I_p_address_value'), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.insert(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/regions/{region}/forwardingRules" % client.transport._host, args[1]) + + +def test_insert_rest_flattened_error(transport: str = 'rest'): + client = ForwardingRulesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.insert( + compute.InsertForwardingRuleRequest(), + project='project_value', + region='region_value', + forwarding_rule_resource=compute.ForwardingRule(I_p_address='I_p_address_value'), + ) + + +def test_insert_rest_error(): + client = ForwardingRulesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.InsertForwardingRuleRequest, + dict, +]) +def test_insert_unary_rest(request_type): + client = ForwardingRulesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2'} + request_init["forwarding_rule_resource"] = {'I_p_address': 'I_p_address_value', 'I_p_protocol': 'I_p_protocol_value', 'all_ports': True, 'allow_global_access': True, 'allow_psc_global_access': True, 'backend_service': 'backend_service_value', 'base_forwarding_rule': 'base_forwarding_rule_value', 'creation_timestamp': 'creation_timestamp_value', 'description': 'description_value', 'fingerprint': 'fingerprint_value', 'id': 205, 'ip_version': 'ip_version_value', 'is_mirroring_collector': True, 'kind': 'kind_value', 'label_fingerprint': 'label_fingerprint_value', 'labels': {}, 'load_balancing_scheme': 'load_balancing_scheme_value', 'metadata_filters': [{'filter_labels': [{'name': 'name_value', 'value': 'value_value'}], 'filter_match_criteria': 'filter_match_criteria_value'}], 'name': 'name_value', 'network': 'network_value', 'network_tier': 'network_tier_value', 'no_automate_dns_zone': True, 'port_range': 'port_range_value', 'ports': ['ports_value1', 'ports_value2'], 'psc_connection_id': 1793, 'psc_connection_status': 'psc_connection_status_value', 'region': 'region_value', 'self_link': 'self_link_value', 'service_directory_registrations': [{'namespace': 'namespace_value', 'service': 'service_value', 'service_directory_region': 'service_directory_region_value'}], 'service_label': 'service_label_value', 'service_name': 'service_name_value', 'source_ip_ranges': ['source_ip_ranges_value1', 'source_ip_ranges_value2'], 'subnetwork': 'subnetwork_value', 'target': 'target_value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.insert_unary(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.Operation) + + +def test_insert_unary_rest_required_fields(request_type=compute.InsertForwardingRuleRequest): + transport_class = transports.ForwardingRulesRestTransport + + request_init = {} + request_init["project"] = "" + request_init["region"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).insert._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + jsonified_request["region"] = 'region_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).insert._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "region" in jsonified_request + assert jsonified_request["region"] == 'region_value' + + client = ForwardingRulesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.insert_unary(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_insert_unary_rest_unset_required_fields(): + transport = transports.ForwardingRulesRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.insert._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("forwardingRuleResource", "project", "region", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_insert_unary_rest_interceptors(null_interceptor): + transport = transports.ForwardingRulesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.ForwardingRulesRestInterceptor(), + ) + client = ForwardingRulesClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.ForwardingRulesRestInterceptor, "post_insert") as post, \ + mock.patch.object(transports.ForwardingRulesRestInterceptor, "pre_insert") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.InsertForwardingRuleRequest.pb(compute.InsertForwardingRuleRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.InsertForwardingRuleRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.insert_unary(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_insert_unary_rest_bad_request(transport: str = 'rest', request_type=compute.InsertForwardingRuleRequest): + client = ForwardingRulesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2'} + request_init["forwarding_rule_resource"] = {'I_p_address': 'I_p_address_value', 'I_p_protocol': 'I_p_protocol_value', 'all_ports': True, 'allow_global_access': True, 'allow_psc_global_access': True, 'backend_service': 'backend_service_value', 'base_forwarding_rule': 'base_forwarding_rule_value', 'creation_timestamp': 'creation_timestamp_value', 'description': 'description_value', 'fingerprint': 'fingerprint_value', 'id': 205, 'ip_version': 'ip_version_value', 'is_mirroring_collector': True, 'kind': 'kind_value', 'label_fingerprint': 'label_fingerprint_value', 'labels': {}, 'load_balancing_scheme': 'load_balancing_scheme_value', 'metadata_filters': [{'filter_labels': [{'name': 'name_value', 'value': 'value_value'}], 'filter_match_criteria': 'filter_match_criteria_value'}], 'name': 'name_value', 'network': 'network_value', 'network_tier': 'network_tier_value', 'no_automate_dns_zone': True, 'port_range': 'port_range_value', 'ports': ['ports_value1', 'ports_value2'], 'psc_connection_id': 1793, 'psc_connection_status': 'psc_connection_status_value', 'region': 'region_value', 'self_link': 'self_link_value', 'service_directory_registrations': [{'namespace': 'namespace_value', 'service': 'service_value', 'service_directory_region': 'service_directory_region_value'}], 'service_label': 'service_label_value', 'service_name': 'service_name_value', 'source_ip_ranges': ['source_ip_ranges_value1', 'source_ip_ranges_value2'], 'subnetwork': 'subnetwork_value', 'target': 'target_value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.insert_unary(request) + + +def test_insert_unary_rest_flattened(): + client = ForwardingRulesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'region': 'sample2'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + region='region_value', + forwarding_rule_resource=compute.ForwardingRule(I_p_address='I_p_address_value'), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.insert_unary(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/regions/{region}/forwardingRules" % client.transport._host, args[1]) + + +def test_insert_unary_rest_flattened_error(transport: str = 'rest'): + client = ForwardingRulesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.insert_unary( + compute.InsertForwardingRuleRequest(), + project='project_value', + region='region_value', + forwarding_rule_resource=compute.ForwardingRule(I_p_address='I_p_address_value'), + ) + + +def test_insert_unary_rest_error(): + client = ForwardingRulesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.ListForwardingRulesRequest, + dict, +]) +def test_list_rest(request_type): + client = ForwardingRulesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.ForwardingRuleList( + id='id_value', + kind='kind_value', + next_page_token='next_page_token_value', + self_link='self_link_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.ForwardingRuleList.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.list(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListPager) + assert response.id == 'id_value' + assert response.kind == 'kind_value' + assert response.next_page_token == 'next_page_token_value' + assert response.self_link == 'self_link_value' + + +def test_list_rest_required_fields(request_type=compute.ListForwardingRulesRequest): + transport_class = transports.ForwardingRulesRestTransport + + request_init = {} + request_init["project"] = "" + request_init["region"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + jsonified_request["region"] = 'region_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("filter", "max_results", "order_by", "page_token", "return_partial_success", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "region" in jsonified_request + assert jsonified_request["region"] == 'region_value' + + client = ForwardingRulesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.ForwardingRuleList() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "get", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.ForwardingRuleList.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.list(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_list_rest_unset_required_fields(): + transport = transports.ForwardingRulesRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.list._get_unset_required_fields({}) + assert set(unset_fields) == (set(("filter", "maxResults", "orderBy", "pageToken", "returnPartialSuccess", )) & set(("project", "region", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_rest_interceptors(null_interceptor): + transport = transports.ForwardingRulesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.ForwardingRulesRestInterceptor(), + ) + client = ForwardingRulesClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.ForwardingRulesRestInterceptor, "post_list") as post, \ + mock.patch.object(transports.ForwardingRulesRestInterceptor, "pre_list") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.ListForwardingRulesRequest.pb(compute.ListForwardingRulesRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.ForwardingRuleList.to_json(compute.ForwardingRuleList()) + + request = compute.ListForwardingRulesRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.ForwardingRuleList() + + client.list(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_list_rest_bad_request(transport: str = 'rest', request_type=compute.ListForwardingRulesRequest): + client = ForwardingRulesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list(request) + + +def test_list_rest_flattened(): + client = ForwardingRulesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.ForwardingRuleList() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'region': 'sample2'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + region='region_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.ForwardingRuleList.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.list(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/regions/{region}/forwardingRules" % client.transport._host, args[1]) + + +def test_list_rest_flattened_error(transport: str = 'rest'): + client = ForwardingRulesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list( + compute.ListForwardingRulesRequest(), + project='project_value', + region='region_value', + ) + + +def test_list_rest_pager(transport: str = 'rest'): + client = ForwardingRulesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + #with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + compute.ForwardingRuleList( + items=[ + compute.ForwardingRule(), + compute.ForwardingRule(), + compute.ForwardingRule(), + ], + next_page_token='abc', + ), + compute.ForwardingRuleList( + items=[], + next_page_token='def', + ), + compute.ForwardingRuleList( + items=[ + compute.ForwardingRule(), + ], + next_page_token='ghi', + ), + compute.ForwardingRuleList( + items=[ + compute.ForwardingRule(), + compute.ForwardingRule(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple(compute.ForwardingRuleList.to_json(x) for x in response) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode('UTF-8') + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {'project': 'sample1', 'region': 'sample2'} + + pager = client.list(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, compute.ForwardingRule) + for i in results) + + pages = list(client.list(request=sample_request).pages) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize("request_type", [ + compute.PatchForwardingRuleRequest, + dict, +]) +def test_patch_rest(request_type): + client = ForwardingRulesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2', 'forwarding_rule': 'sample3'} + request_init["forwarding_rule_resource"] = {'I_p_address': 'I_p_address_value', 'I_p_protocol': 'I_p_protocol_value', 'all_ports': True, 'allow_global_access': True, 'allow_psc_global_access': True, 'backend_service': 'backend_service_value', 'base_forwarding_rule': 'base_forwarding_rule_value', 'creation_timestamp': 'creation_timestamp_value', 'description': 'description_value', 'fingerprint': 'fingerprint_value', 'id': 205, 'ip_version': 'ip_version_value', 'is_mirroring_collector': True, 'kind': 'kind_value', 'label_fingerprint': 'label_fingerprint_value', 'labels': {}, 'load_balancing_scheme': 'load_balancing_scheme_value', 'metadata_filters': [{'filter_labels': [{'name': 'name_value', 'value': 'value_value'}], 'filter_match_criteria': 'filter_match_criteria_value'}], 'name': 'name_value', 'network': 'network_value', 'network_tier': 'network_tier_value', 'no_automate_dns_zone': True, 'port_range': 'port_range_value', 'ports': ['ports_value1', 'ports_value2'], 'psc_connection_id': 1793, 'psc_connection_status': 'psc_connection_status_value', 'region': 'region_value', 'self_link': 'self_link_value', 'service_directory_registrations': [{'namespace': 'namespace_value', 'service': 'service_value', 'service_directory_region': 'service_directory_region_value'}], 'service_label': 'service_label_value', 'service_name': 'service_name_value', 'source_ip_ranges': ['source_ip_ranges_value1', 'source_ip_ranges_value2'], 'subnetwork': 'subnetwork_value', 'target': 'target_value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.patch(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, extended_operation.ExtendedOperation) + assert response.client_operation_id == 'client_operation_id_value' + assert response.creation_timestamp == 'creation_timestamp_value' + assert response.description == 'description_value' + assert response.end_time == 'end_time_value' + assert response.http_error_message == 'http_error_message_value' + assert response.http_error_status_code == 2374 + assert response.id == 205 + assert response.insert_time == 'insert_time_value' + assert response.kind == 'kind_value' + assert response.name == 'name_value' + assert response.operation_group_id == 'operation_group_id_value' + assert response.operation_type == 'operation_type_value' + assert response.progress == 885 + assert response.region == 'region_value' + assert response.self_link == 'self_link_value' + assert response.start_time == 'start_time_value' + assert response.status == compute.Operation.Status.DONE + assert response.status_message == 'status_message_value' + assert response.target_id == 947 + assert response.target_link == 'target_link_value' + assert response.user == 'user_value' + assert response.zone == 'zone_value' + + +def test_patch_rest_required_fields(request_type=compute.PatchForwardingRuleRequest): + transport_class = transports.ForwardingRulesRestTransport + + request_init = {} + request_init["forwarding_rule"] = "" + request_init["project"] = "" + request_init["region"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).patch._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["forwardingRule"] = 'forwarding_rule_value' + jsonified_request["project"] = 'project_value' + jsonified_request["region"] = 'region_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).patch._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "forwardingRule" in jsonified_request + assert jsonified_request["forwardingRule"] == 'forwarding_rule_value' + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "region" in jsonified_request + assert jsonified_request["region"] == 'region_value' + + client = ForwardingRulesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "patch", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.patch(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_patch_rest_unset_required_fields(): + transport = transports.ForwardingRulesRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.patch._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("forwardingRule", "forwardingRuleResource", "project", "region", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_patch_rest_interceptors(null_interceptor): + transport = transports.ForwardingRulesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.ForwardingRulesRestInterceptor(), + ) + client = ForwardingRulesClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.ForwardingRulesRestInterceptor, "post_patch") as post, \ + mock.patch.object(transports.ForwardingRulesRestInterceptor, "pre_patch") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.PatchForwardingRuleRequest.pb(compute.PatchForwardingRuleRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.PatchForwardingRuleRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.patch(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_patch_rest_bad_request(transport: str = 'rest', request_type=compute.PatchForwardingRuleRequest): + client = ForwardingRulesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2', 'forwarding_rule': 'sample3'} + request_init["forwarding_rule_resource"] = {'I_p_address': 'I_p_address_value', 'I_p_protocol': 'I_p_protocol_value', 'all_ports': True, 'allow_global_access': True, 'allow_psc_global_access': True, 'backend_service': 'backend_service_value', 'base_forwarding_rule': 'base_forwarding_rule_value', 'creation_timestamp': 'creation_timestamp_value', 'description': 'description_value', 'fingerprint': 'fingerprint_value', 'id': 205, 'ip_version': 'ip_version_value', 'is_mirroring_collector': True, 'kind': 'kind_value', 'label_fingerprint': 'label_fingerprint_value', 'labels': {}, 'load_balancing_scheme': 'load_balancing_scheme_value', 'metadata_filters': [{'filter_labels': [{'name': 'name_value', 'value': 'value_value'}], 'filter_match_criteria': 'filter_match_criteria_value'}], 'name': 'name_value', 'network': 'network_value', 'network_tier': 'network_tier_value', 'no_automate_dns_zone': True, 'port_range': 'port_range_value', 'ports': ['ports_value1', 'ports_value2'], 'psc_connection_id': 1793, 'psc_connection_status': 'psc_connection_status_value', 'region': 'region_value', 'self_link': 'self_link_value', 'service_directory_registrations': [{'namespace': 'namespace_value', 'service': 'service_value', 'service_directory_region': 'service_directory_region_value'}], 'service_label': 'service_label_value', 'service_name': 'service_name_value', 'source_ip_ranges': ['source_ip_ranges_value1', 'source_ip_ranges_value2'], 'subnetwork': 'subnetwork_value', 'target': 'target_value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.patch(request) + + +def test_patch_rest_flattened(): + client = ForwardingRulesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'region': 'sample2', 'forwarding_rule': 'sample3'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + region='region_value', + forwarding_rule='forwarding_rule_value', + forwarding_rule_resource=compute.ForwardingRule(I_p_address='I_p_address_value'), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.patch(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/regions/{region}/forwardingRules/{forwarding_rule}" % client.transport._host, args[1]) + + +def test_patch_rest_flattened_error(transport: str = 'rest'): + client = ForwardingRulesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.patch( + compute.PatchForwardingRuleRequest(), + project='project_value', + region='region_value', + forwarding_rule='forwarding_rule_value', + forwarding_rule_resource=compute.ForwardingRule(I_p_address='I_p_address_value'), + ) + + +def test_patch_rest_error(): + client = ForwardingRulesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.PatchForwardingRuleRequest, + dict, +]) +def test_patch_unary_rest(request_type): + client = ForwardingRulesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2', 'forwarding_rule': 'sample3'} + request_init["forwarding_rule_resource"] = {'I_p_address': 'I_p_address_value', 'I_p_protocol': 'I_p_protocol_value', 'all_ports': True, 'allow_global_access': True, 'allow_psc_global_access': True, 'backend_service': 'backend_service_value', 'base_forwarding_rule': 'base_forwarding_rule_value', 'creation_timestamp': 'creation_timestamp_value', 'description': 'description_value', 'fingerprint': 'fingerprint_value', 'id': 205, 'ip_version': 'ip_version_value', 'is_mirroring_collector': True, 'kind': 'kind_value', 'label_fingerprint': 'label_fingerprint_value', 'labels': {}, 'load_balancing_scheme': 'load_balancing_scheme_value', 'metadata_filters': [{'filter_labels': [{'name': 'name_value', 'value': 'value_value'}], 'filter_match_criteria': 'filter_match_criteria_value'}], 'name': 'name_value', 'network': 'network_value', 'network_tier': 'network_tier_value', 'no_automate_dns_zone': True, 'port_range': 'port_range_value', 'ports': ['ports_value1', 'ports_value2'], 'psc_connection_id': 1793, 'psc_connection_status': 'psc_connection_status_value', 'region': 'region_value', 'self_link': 'self_link_value', 'service_directory_registrations': [{'namespace': 'namespace_value', 'service': 'service_value', 'service_directory_region': 'service_directory_region_value'}], 'service_label': 'service_label_value', 'service_name': 'service_name_value', 'source_ip_ranges': ['source_ip_ranges_value1', 'source_ip_ranges_value2'], 'subnetwork': 'subnetwork_value', 'target': 'target_value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.patch_unary(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.Operation) + + +def test_patch_unary_rest_required_fields(request_type=compute.PatchForwardingRuleRequest): + transport_class = transports.ForwardingRulesRestTransport + + request_init = {} + request_init["forwarding_rule"] = "" + request_init["project"] = "" + request_init["region"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).patch._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["forwardingRule"] = 'forwarding_rule_value' + jsonified_request["project"] = 'project_value' + jsonified_request["region"] = 'region_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).patch._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "forwardingRule" in jsonified_request + assert jsonified_request["forwardingRule"] == 'forwarding_rule_value' + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "region" in jsonified_request + assert jsonified_request["region"] == 'region_value' + + client = ForwardingRulesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "patch", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.patch_unary(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_patch_unary_rest_unset_required_fields(): + transport = transports.ForwardingRulesRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.patch._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("forwardingRule", "forwardingRuleResource", "project", "region", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_patch_unary_rest_interceptors(null_interceptor): + transport = transports.ForwardingRulesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.ForwardingRulesRestInterceptor(), + ) + client = ForwardingRulesClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.ForwardingRulesRestInterceptor, "post_patch") as post, \ + mock.patch.object(transports.ForwardingRulesRestInterceptor, "pre_patch") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.PatchForwardingRuleRequest.pb(compute.PatchForwardingRuleRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.PatchForwardingRuleRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.patch_unary(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_patch_unary_rest_bad_request(transport: str = 'rest', request_type=compute.PatchForwardingRuleRequest): + client = ForwardingRulesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2', 'forwarding_rule': 'sample3'} + request_init["forwarding_rule_resource"] = {'I_p_address': 'I_p_address_value', 'I_p_protocol': 'I_p_protocol_value', 'all_ports': True, 'allow_global_access': True, 'allow_psc_global_access': True, 'backend_service': 'backend_service_value', 'base_forwarding_rule': 'base_forwarding_rule_value', 'creation_timestamp': 'creation_timestamp_value', 'description': 'description_value', 'fingerprint': 'fingerprint_value', 'id': 205, 'ip_version': 'ip_version_value', 'is_mirroring_collector': True, 'kind': 'kind_value', 'label_fingerprint': 'label_fingerprint_value', 'labels': {}, 'load_balancing_scheme': 'load_balancing_scheme_value', 'metadata_filters': [{'filter_labels': [{'name': 'name_value', 'value': 'value_value'}], 'filter_match_criteria': 'filter_match_criteria_value'}], 'name': 'name_value', 'network': 'network_value', 'network_tier': 'network_tier_value', 'no_automate_dns_zone': True, 'port_range': 'port_range_value', 'ports': ['ports_value1', 'ports_value2'], 'psc_connection_id': 1793, 'psc_connection_status': 'psc_connection_status_value', 'region': 'region_value', 'self_link': 'self_link_value', 'service_directory_registrations': [{'namespace': 'namespace_value', 'service': 'service_value', 'service_directory_region': 'service_directory_region_value'}], 'service_label': 'service_label_value', 'service_name': 'service_name_value', 'source_ip_ranges': ['source_ip_ranges_value1', 'source_ip_ranges_value2'], 'subnetwork': 'subnetwork_value', 'target': 'target_value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.patch_unary(request) + + +def test_patch_unary_rest_flattened(): + client = ForwardingRulesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'region': 'sample2', 'forwarding_rule': 'sample3'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + region='region_value', + forwarding_rule='forwarding_rule_value', + forwarding_rule_resource=compute.ForwardingRule(I_p_address='I_p_address_value'), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.patch_unary(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/regions/{region}/forwardingRules/{forwarding_rule}" % client.transport._host, args[1]) + + +def test_patch_unary_rest_flattened_error(transport: str = 'rest'): + client = ForwardingRulesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.patch_unary( + compute.PatchForwardingRuleRequest(), + project='project_value', + region='region_value', + forwarding_rule='forwarding_rule_value', + forwarding_rule_resource=compute.ForwardingRule(I_p_address='I_p_address_value'), + ) + + +def test_patch_unary_rest_error(): + client = ForwardingRulesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.SetLabelsForwardingRuleRequest, + dict, +]) +def test_set_labels_rest(request_type): + client = ForwardingRulesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2', 'resource': 'sample3'} + request_init["region_set_labels_request_resource"] = {'label_fingerprint': 'label_fingerprint_value', 'labels': {}} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.set_labels(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, extended_operation.ExtendedOperation) + assert response.client_operation_id == 'client_operation_id_value' + assert response.creation_timestamp == 'creation_timestamp_value' + assert response.description == 'description_value' + assert response.end_time == 'end_time_value' + assert response.http_error_message == 'http_error_message_value' + assert response.http_error_status_code == 2374 + assert response.id == 205 + assert response.insert_time == 'insert_time_value' + assert response.kind == 'kind_value' + assert response.name == 'name_value' + assert response.operation_group_id == 'operation_group_id_value' + assert response.operation_type == 'operation_type_value' + assert response.progress == 885 + assert response.region == 'region_value' + assert response.self_link == 'self_link_value' + assert response.start_time == 'start_time_value' + assert response.status == compute.Operation.Status.DONE + assert response.status_message == 'status_message_value' + assert response.target_id == 947 + assert response.target_link == 'target_link_value' + assert response.user == 'user_value' + assert response.zone == 'zone_value' + + +def test_set_labels_rest_required_fields(request_type=compute.SetLabelsForwardingRuleRequest): + transport_class = transports.ForwardingRulesRestTransport + + request_init = {} + request_init["project"] = "" + request_init["region"] = "" + request_init["resource"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).set_labels._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + jsonified_request["region"] = 'region_value' + jsonified_request["resource"] = 'resource_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).set_labels._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "region" in jsonified_request + assert jsonified_request["region"] == 'region_value' + assert "resource" in jsonified_request + assert jsonified_request["resource"] == 'resource_value' + + client = ForwardingRulesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.set_labels(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_set_labels_rest_unset_required_fields(): + transport = transports.ForwardingRulesRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.set_labels._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("project", "region", "regionSetLabelsRequestResource", "resource", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_set_labels_rest_interceptors(null_interceptor): + transport = transports.ForwardingRulesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.ForwardingRulesRestInterceptor(), + ) + client = ForwardingRulesClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.ForwardingRulesRestInterceptor, "post_set_labels") as post, \ + mock.patch.object(transports.ForwardingRulesRestInterceptor, "pre_set_labels") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.SetLabelsForwardingRuleRequest.pb(compute.SetLabelsForwardingRuleRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.SetLabelsForwardingRuleRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.set_labels(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_set_labels_rest_bad_request(transport: str = 'rest', request_type=compute.SetLabelsForwardingRuleRequest): + client = ForwardingRulesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2', 'resource': 'sample3'} + request_init["region_set_labels_request_resource"] = {'label_fingerprint': 'label_fingerprint_value', 'labels': {}} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.set_labels(request) + + +def test_set_labels_rest_flattened(): + client = ForwardingRulesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'region': 'sample2', 'resource': 'sample3'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + region='region_value', + resource='resource_value', + region_set_labels_request_resource=compute.RegionSetLabelsRequest(label_fingerprint='label_fingerprint_value'), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.set_labels(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/regions/{region}/forwardingRules/{resource}/setLabels" % client.transport._host, args[1]) + + +def test_set_labels_rest_flattened_error(transport: str = 'rest'): + client = ForwardingRulesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.set_labels( + compute.SetLabelsForwardingRuleRequest(), + project='project_value', + region='region_value', + resource='resource_value', + region_set_labels_request_resource=compute.RegionSetLabelsRequest(label_fingerprint='label_fingerprint_value'), + ) + + +def test_set_labels_rest_error(): + client = ForwardingRulesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.SetLabelsForwardingRuleRequest, + dict, +]) +def test_set_labels_unary_rest(request_type): + client = ForwardingRulesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2', 'resource': 'sample3'} + request_init["region_set_labels_request_resource"] = {'label_fingerprint': 'label_fingerprint_value', 'labels': {}} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.set_labels_unary(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.Operation) + + +def test_set_labels_unary_rest_required_fields(request_type=compute.SetLabelsForwardingRuleRequest): + transport_class = transports.ForwardingRulesRestTransport + + request_init = {} + request_init["project"] = "" + request_init["region"] = "" + request_init["resource"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).set_labels._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + jsonified_request["region"] = 'region_value' + jsonified_request["resource"] = 'resource_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).set_labels._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "region" in jsonified_request + assert jsonified_request["region"] == 'region_value' + assert "resource" in jsonified_request + assert jsonified_request["resource"] == 'resource_value' + + client = ForwardingRulesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.set_labels_unary(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_set_labels_unary_rest_unset_required_fields(): + transport = transports.ForwardingRulesRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.set_labels._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("project", "region", "regionSetLabelsRequestResource", "resource", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_set_labels_unary_rest_interceptors(null_interceptor): + transport = transports.ForwardingRulesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.ForwardingRulesRestInterceptor(), + ) + client = ForwardingRulesClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.ForwardingRulesRestInterceptor, "post_set_labels") as post, \ + mock.patch.object(transports.ForwardingRulesRestInterceptor, "pre_set_labels") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.SetLabelsForwardingRuleRequest.pb(compute.SetLabelsForwardingRuleRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.SetLabelsForwardingRuleRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.set_labels_unary(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_set_labels_unary_rest_bad_request(transport: str = 'rest', request_type=compute.SetLabelsForwardingRuleRequest): + client = ForwardingRulesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2', 'resource': 'sample3'} + request_init["region_set_labels_request_resource"] = {'label_fingerprint': 'label_fingerprint_value', 'labels': {}} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.set_labels_unary(request) + + +def test_set_labels_unary_rest_flattened(): + client = ForwardingRulesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'region': 'sample2', 'resource': 'sample3'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + region='region_value', + resource='resource_value', + region_set_labels_request_resource=compute.RegionSetLabelsRequest(label_fingerprint='label_fingerprint_value'), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.set_labels_unary(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/regions/{region}/forwardingRules/{resource}/setLabels" % client.transport._host, args[1]) + + +def test_set_labels_unary_rest_flattened_error(transport: str = 'rest'): + client = ForwardingRulesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.set_labels_unary( + compute.SetLabelsForwardingRuleRequest(), + project='project_value', + region='region_value', + resource='resource_value', + region_set_labels_request_resource=compute.RegionSetLabelsRequest(label_fingerprint='label_fingerprint_value'), + ) + + +def test_set_labels_unary_rest_error(): + client = ForwardingRulesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.SetTargetForwardingRuleRequest, + dict, +]) +def test_set_target_rest(request_type): + client = ForwardingRulesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2', 'forwarding_rule': 'sample3'} + request_init["target_reference_resource"] = {'target': 'target_value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.set_target(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, extended_operation.ExtendedOperation) + assert response.client_operation_id == 'client_operation_id_value' + assert response.creation_timestamp == 'creation_timestamp_value' + assert response.description == 'description_value' + assert response.end_time == 'end_time_value' + assert response.http_error_message == 'http_error_message_value' + assert response.http_error_status_code == 2374 + assert response.id == 205 + assert response.insert_time == 'insert_time_value' + assert response.kind == 'kind_value' + assert response.name == 'name_value' + assert response.operation_group_id == 'operation_group_id_value' + assert response.operation_type == 'operation_type_value' + assert response.progress == 885 + assert response.region == 'region_value' + assert response.self_link == 'self_link_value' + assert response.start_time == 'start_time_value' + assert response.status == compute.Operation.Status.DONE + assert response.status_message == 'status_message_value' + assert response.target_id == 947 + assert response.target_link == 'target_link_value' + assert response.user == 'user_value' + assert response.zone == 'zone_value' + + +def test_set_target_rest_required_fields(request_type=compute.SetTargetForwardingRuleRequest): + transport_class = transports.ForwardingRulesRestTransport + + request_init = {} + request_init["forwarding_rule"] = "" + request_init["project"] = "" + request_init["region"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).set_target._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["forwardingRule"] = 'forwarding_rule_value' + jsonified_request["project"] = 'project_value' + jsonified_request["region"] = 'region_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).set_target._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "forwardingRule" in jsonified_request + assert jsonified_request["forwardingRule"] == 'forwarding_rule_value' + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "region" in jsonified_request + assert jsonified_request["region"] == 'region_value' + + client = ForwardingRulesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.set_target(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_set_target_rest_unset_required_fields(): + transport = transports.ForwardingRulesRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.set_target._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("forwardingRule", "project", "region", "targetReferenceResource", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_set_target_rest_interceptors(null_interceptor): + transport = transports.ForwardingRulesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.ForwardingRulesRestInterceptor(), + ) + client = ForwardingRulesClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.ForwardingRulesRestInterceptor, "post_set_target") as post, \ + mock.patch.object(transports.ForwardingRulesRestInterceptor, "pre_set_target") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.SetTargetForwardingRuleRequest.pb(compute.SetTargetForwardingRuleRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.SetTargetForwardingRuleRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.set_target(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_set_target_rest_bad_request(transport: str = 'rest', request_type=compute.SetTargetForwardingRuleRequest): + client = ForwardingRulesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2', 'forwarding_rule': 'sample3'} + request_init["target_reference_resource"] = {'target': 'target_value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.set_target(request) + + +def test_set_target_rest_flattened(): + client = ForwardingRulesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'region': 'sample2', 'forwarding_rule': 'sample3'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + region='region_value', + forwarding_rule='forwarding_rule_value', + target_reference_resource=compute.TargetReference(target='target_value'), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.set_target(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/regions/{region}/forwardingRules/{forwarding_rule}/setTarget" % client.transport._host, args[1]) + + +def test_set_target_rest_flattened_error(transport: str = 'rest'): + client = ForwardingRulesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.set_target( + compute.SetTargetForwardingRuleRequest(), + project='project_value', + region='region_value', + forwarding_rule='forwarding_rule_value', + target_reference_resource=compute.TargetReference(target='target_value'), + ) + + +def test_set_target_rest_error(): + client = ForwardingRulesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.SetTargetForwardingRuleRequest, + dict, +]) +def test_set_target_unary_rest(request_type): + client = ForwardingRulesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2', 'forwarding_rule': 'sample3'} + request_init["target_reference_resource"] = {'target': 'target_value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.set_target_unary(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.Operation) + + +def test_set_target_unary_rest_required_fields(request_type=compute.SetTargetForwardingRuleRequest): + transport_class = transports.ForwardingRulesRestTransport + + request_init = {} + request_init["forwarding_rule"] = "" + request_init["project"] = "" + request_init["region"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).set_target._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["forwardingRule"] = 'forwarding_rule_value' + jsonified_request["project"] = 'project_value' + jsonified_request["region"] = 'region_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).set_target._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "forwardingRule" in jsonified_request + assert jsonified_request["forwardingRule"] == 'forwarding_rule_value' + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "region" in jsonified_request + assert jsonified_request["region"] == 'region_value' + + client = ForwardingRulesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.set_target_unary(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_set_target_unary_rest_unset_required_fields(): + transport = transports.ForwardingRulesRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.set_target._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("forwardingRule", "project", "region", "targetReferenceResource", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_set_target_unary_rest_interceptors(null_interceptor): + transport = transports.ForwardingRulesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.ForwardingRulesRestInterceptor(), + ) + client = ForwardingRulesClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.ForwardingRulesRestInterceptor, "post_set_target") as post, \ + mock.patch.object(transports.ForwardingRulesRestInterceptor, "pre_set_target") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.SetTargetForwardingRuleRequest.pb(compute.SetTargetForwardingRuleRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.SetTargetForwardingRuleRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.set_target_unary(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_set_target_unary_rest_bad_request(transport: str = 'rest', request_type=compute.SetTargetForwardingRuleRequest): + client = ForwardingRulesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2', 'forwarding_rule': 'sample3'} + request_init["target_reference_resource"] = {'target': 'target_value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.set_target_unary(request) + + +def test_set_target_unary_rest_flattened(): + client = ForwardingRulesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'region': 'sample2', 'forwarding_rule': 'sample3'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + region='region_value', + forwarding_rule='forwarding_rule_value', + target_reference_resource=compute.TargetReference(target='target_value'), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.set_target_unary(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/regions/{region}/forwardingRules/{forwarding_rule}/setTarget" % client.transport._host, args[1]) + + +def test_set_target_unary_rest_flattened_error(transport: str = 'rest'): + client = ForwardingRulesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.set_target_unary( + compute.SetTargetForwardingRuleRequest(), + project='project_value', + region='region_value', + forwarding_rule='forwarding_rule_value', + target_reference_resource=compute.TargetReference(target='target_value'), + ) + + +def test_set_target_unary_rest_error(): + client = ForwardingRulesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +def test_credentials_transport_error(): + # It is an error to provide credentials and a transport instance. + transport = transports.ForwardingRulesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = ForwardingRulesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # It is an error to provide a credentials file and a transport instance. + transport = transports.ForwardingRulesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = ForwardingRulesClient( + client_options={"credentials_file": "credentials.json"}, + transport=transport, + ) + + # It is an error to provide an api_key and a transport instance. + transport = transports.ForwardingRulesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = ForwardingRulesClient( + client_options=options, + transport=transport, + ) + + # It is an error to provide an api_key and a credential. + options = mock.Mock() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = ForwardingRulesClient( + client_options=options, + credentials=ga_credentials.AnonymousCredentials() + ) + + # It is an error to provide scopes and a transport instance. + transport = transports.ForwardingRulesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = ForwardingRulesClient( + client_options={"scopes": ["1", "2"]}, + transport=transport, + ) + + +def test_transport_instance(): + # A client may be instantiated with a custom transport instance. + transport = transports.ForwardingRulesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + client = ForwardingRulesClient(transport=transport) + assert client.transport is transport + + +@pytest.mark.parametrize("transport_class", [ + transports.ForwardingRulesRestTransport, +]) +def test_transport_adc(transport_class): + # Test default credentials are used if not provided. + with mock.patch.object(google.auth, 'default') as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class() + adc.assert_called_once() + +@pytest.mark.parametrize("transport_name", [ + "rest", +]) +def test_transport_kind(transport_name): + transport = ForwardingRulesClient.get_transport_class(transport_name)( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert transport.kind == transport_name + + +def test_forwarding_rules_base_transport_error(): + # Passing both a credentials object and credentials_file should raise an error + with pytest.raises(core_exceptions.DuplicateCredentialArgs): + transport = transports.ForwardingRulesTransport( + credentials=ga_credentials.AnonymousCredentials(), + credentials_file="credentials.json" + ) + + +def test_forwarding_rules_base_transport(): + # Instantiate the base transport. + with mock.patch('google.cloud.compute_v1.services.forwarding_rules.transports.ForwardingRulesTransport.__init__') as Transport: + Transport.return_value = None + transport = transports.ForwardingRulesTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Every method on the transport should just blindly + # raise NotImplementedError. + methods = ( + 'aggregated_list', + 'delete', + 'get', + 'insert', + 'list', + 'patch', + 'set_labels', + 'set_target', + ) + for method in methods: + with pytest.raises(NotImplementedError): + getattr(transport, method)(request=object()) + + with pytest.raises(NotImplementedError): + transport.close() + + # Catch all for all remaining methods and properties + remainder = [ + 'kind', + ] + for r in remainder: + with pytest.raises(NotImplementedError): + getattr(transport, r)() + + +def test_forwarding_rules_base_transport_with_credentials_file(): + # Instantiate the base transport with a credentials file + with mock.patch.object(google.auth, 'load_credentials_from_file', autospec=True) as load_creds, mock.patch('google.cloud.compute_v1.services.forwarding_rules.transports.ForwardingRulesTransport._prep_wrapped_messages') as Transport: + Transport.return_value = None + load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.ForwardingRulesTransport( + credentials_file="credentials.json", + quota_project_id="octopus", + ) + load_creds.assert_called_once_with("credentials.json", + scopes=None, + default_scopes=( + 'https://www.googleapis.com/auth/compute', + 'https://www.googleapis.com/auth/cloud-platform', +), + quota_project_id="octopus", + ) + + +def test_forwarding_rules_base_transport_with_adc(): + # Test the default credentials are used if credentials and credentials_file are None. + with mock.patch.object(google.auth, 'default', autospec=True) as adc, mock.patch('google.cloud.compute_v1.services.forwarding_rules.transports.ForwardingRulesTransport._prep_wrapped_messages') as Transport: + Transport.return_value = None + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.ForwardingRulesTransport() + adc.assert_called_once() + + +def test_forwarding_rules_auth_adc(): + # If no credentials are provided, we should use ADC credentials. + with mock.patch.object(google.auth, 'default', autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + ForwardingRulesClient() + adc.assert_called_once_with( + scopes=None, + default_scopes=( + 'https://www.googleapis.com/auth/compute', + 'https://www.googleapis.com/auth/cloud-platform', +), + quota_project_id=None, + ) + + +def test_forwarding_rules_http_transport_client_cert_source_for_mtls(): + cred = ga_credentials.AnonymousCredentials() + with mock.patch("google.auth.transport.requests.AuthorizedSession.configure_mtls_channel") as mock_configure_mtls_channel: + transports.ForwardingRulesRestTransport ( + credentials=cred, + client_cert_source_for_mtls=client_cert_source_callback + ) + mock_configure_mtls_channel.assert_called_once_with(client_cert_source_callback) + + +@pytest.mark.parametrize("transport_name", [ + "rest", +]) +def test_forwarding_rules_host_no_port(transport_name): + client = ForwardingRulesClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions(api_endpoint='compute.googleapis.com'), + transport=transport_name, + ) + assert client.transport._host == ( + 'compute.googleapis.com:443' + if transport_name in ['grpc', 'grpc_asyncio'] + else 'https://compute.googleapis.com' + ) + +@pytest.mark.parametrize("transport_name", [ + "rest", +]) +def test_forwarding_rules_host_with_port(transport_name): + client = ForwardingRulesClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions(api_endpoint='compute.googleapis.com:8000'), + transport=transport_name, + ) + assert client.transport._host == ( + 'compute.googleapis.com:8000' + if transport_name in ['grpc', 'grpc_asyncio'] + else 'https://compute.googleapis.com:8000' + ) + +@pytest.mark.parametrize("transport_name", [ + "rest", +]) +def test_forwarding_rules_client_transport_session_collision(transport_name): + creds1 = ga_credentials.AnonymousCredentials() + creds2 = ga_credentials.AnonymousCredentials() + client1 = ForwardingRulesClient( + credentials=creds1, + transport=transport_name, + ) + client2 = ForwardingRulesClient( + credentials=creds2, + transport=transport_name, + ) + session1 = client1.transport.aggregated_list._session + session2 = client2.transport.aggregated_list._session + assert session1 != session2 + session1 = client1.transport.delete._session + session2 = client2.transport.delete._session + assert session1 != session2 + session1 = client1.transport.get._session + session2 = client2.transport.get._session + assert session1 != session2 + session1 = client1.transport.insert._session + session2 = client2.transport.insert._session + assert session1 != session2 + session1 = client1.transport.list._session + session2 = client2.transport.list._session + assert session1 != session2 + session1 = client1.transport.patch._session + session2 = client2.transport.patch._session + assert session1 != session2 + session1 = client1.transport.set_labels._session + session2 = client2.transport.set_labels._session + assert session1 != session2 + session1 = client1.transport.set_target._session + session2 = client2.transport.set_target._session + assert session1 != session2 + +def test_common_billing_account_path(): + billing_account = "squid" + expected = "billingAccounts/{billing_account}".format(billing_account=billing_account, ) + actual = ForwardingRulesClient.common_billing_account_path(billing_account) + assert expected == actual + + +def test_parse_common_billing_account_path(): + expected = { + "billing_account": "clam", + } + path = ForwardingRulesClient.common_billing_account_path(**expected) + + # Check that the path construction is reversible. + actual = ForwardingRulesClient.parse_common_billing_account_path(path) + assert expected == actual + +def test_common_folder_path(): + folder = "whelk" + expected = "folders/{folder}".format(folder=folder, ) + actual = ForwardingRulesClient.common_folder_path(folder) + assert expected == actual + + +def test_parse_common_folder_path(): + expected = { + "folder": "octopus", + } + path = ForwardingRulesClient.common_folder_path(**expected) + + # Check that the path construction is reversible. + actual = ForwardingRulesClient.parse_common_folder_path(path) + assert expected == actual + +def test_common_organization_path(): + organization = "oyster" + expected = "organizations/{organization}".format(organization=organization, ) + actual = ForwardingRulesClient.common_organization_path(organization) + assert expected == actual + + +def test_parse_common_organization_path(): + expected = { + "organization": "nudibranch", + } + path = ForwardingRulesClient.common_organization_path(**expected) + + # Check that the path construction is reversible. + actual = ForwardingRulesClient.parse_common_organization_path(path) + assert expected == actual + +def test_common_project_path(): + project = "cuttlefish" + expected = "projects/{project}".format(project=project, ) + actual = ForwardingRulesClient.common_project_path(project) + assert expected == actual + + +def test_parse_common_project_path(): + expected = { + "project": "mussel", + } + path = ForwardingRulesClient.common_project_path(**expected) + + # Check that the path construction is reversible. + actual = ForwardingRulesClient.parse_common_project_path(path) + assert expected == actual + +def test_common_location_path(): + project = "winkle" + location = "nautilus" + expected = "projects/{project}/locations/{location}".format(project=project, location=location, ) + actual = ForwardingRulesClient.common_location_path(project, location) + assert expected == actual + + +def test_parse_common_location_path(): + expected = { + "project": "scallop", + "location": "abalone", + } + path = ForwardingRulesClient.common_location_path(**expected) + + # Check that the path construction is reversible. + actual = ForwardingRulesClient.parse_common_location_path(path) + assert expected == actual + + +def test_client_with_default_client_info(): + client_info = gapic_v1.client_info.ClientInfo() + + with mock.patch.object(transports.ForwardingRulesTransport, '_prep_wrapped_messages') as prep: + client = ForwardingRulesClient( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + with mock.patch.object(transports.ForwardingRulesTransport, '_prep_wrapped_messages') as prep: + transport_class = ForwardingRulesClient.get_transport_class() + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + +def test_transport_close(): + transports = { + "rest": "_session", + } + + for transport, close_name in transports.items(): + client = ForwardingRulesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport + ) + with mock.patch.object(type(getattr(client.transport, close_name)), "close") as close: + with client: + close.assert_not_called() + close.assert_called_once() + +def test_client_ctx(): + transports = [ + 'rest', + ] + for transport in transports: + client = ForwardingRulesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport + ) + # Test client calls underlying transport. + with mock.patch.object(type(client.transport), "close") as close: + close.assert_not_called() + with client: + pass + close.assert_called() + +@pytest.mark.parametrize("client_class,transport_class", [ + (ForwardingRulesClient, transports.ForwardingRulesRestTransport), +]) +def test_api_key_credentials(client_class, transport_class): + with mock.patch.object( + google.auth._default, "get_api_key_credentials", create=True + ) as get_api_key_credentials: + mock_cred = mock.Mock() + get_api_key_credentials.return_value = mock_cred + options = client_options.ClientOptions() + options.api_key = "api_key" + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=mock_cred, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) diff --git a/owl-bot-staging/v1/tests/unit/gapic/compute_v1/test_global_addresses.py b/owl-bot-staging/v1/tests/unit/gapic/compute_v1/test_global_addresses.py new file mode 100644 index 000000000..6ae9fa055 --- /dev/null +++ b/owl-bot-staging/v1/tests/unit/gapic/compute_v1/test_global_addresses.py @@ -0,0 +1,3603 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import os +# try/except added for compatibility with python < 3.8 +try: + from unittest import mock + from unittest.mock import AsyncMock # pragma: NO COVER +except ImportError: # pragma: NO COVER + import mock + +import grpc +from grpc.experimental import aio +from collections.abc import Iterable +from google.protobuf import json_format +import json +import math +import pytest +from proto.marshal.rules.dates import DurationRule, TimestampRule +from proto.marshal.rules import wrappers +from requests import Response +from requests import Request, PreparedRequest +from requests.sessions import Session +from google.protobuf import json_format + +from google.api_core import client_options +from google.api_core import exceptions as core_exceptions +from google.api_core import extended_operation # type: ignore +from google.api_core import future +from google.api_core import gapic_v1 +from google.api_core import grpc_helpers +from google.api_core import grpc_helpers_async +from google.api_core import path_template +from google.auth import credentials as ga_credentials +from google.auth.exceptions import MutualTLSChannelError +from google.cloud.compute_v1.services.global_addresses import GlobalAddressesClient +from google.cloud.compute_v1.services.global_addresses import pagers +from google.cloud.compute_v1.services.global_addresses import transports +from google.cloud.compute_v1.types import compute +from google.oauth2 import service_account +import google.auth + + +def client_cert_source_callback(): + return b"cert bytes", b"key bytes" + + +# If default endpoint is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint(client): + return "foo.googleapis.com" if ("localhost" in client.DEFAULT_ENDPOINT) else client.DEFAULT_ENDPOINT + + +def test__get_default_mtls_endpoint(): + api_endpoint = "example.googleapis.com" + api_mtls_endpoint = "example.mtls.googleapis.com" + sandbox_endpoint = "example.sandbox.googleapis.com" + sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com" + non_googleapi = "api.example.com" + + assert GlobalAddressesClient._get_default_mtls_endpoint(None) is None + assert GlobalAddressesClient._get_default_mtls_endpoint(api_endpoint) == api_mtls_endpoint + assert GlobalAddressesClient._get_default_mtls_endpoint(api_mtls_endpoint) == api_mtls_endpoint + assert GlobalAddressesClient._get_default_mtls_endpoint(sandbox_endpoint) == sandbox_mtls_endpoint + assert GlobalAddressesClient._get_default_mtls_endpoint(sandbox_mtls_endpoint) == sandbox_mtls_endpoint + assert GlobalAddressesClient._get_default_mtls_endpoint(non_googleapi) == non_googleapi + + +@pytest.mark.parametrize("client_class,transport_name", [ + (GlobalAddressesClient, "rest"), +]) +def test_global_addresses_client_from_service_account_info(client_class, transport_name): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object(service_account.Credentials, 'from_service_account_info') as factory: + factory.return_value = creds + info = {"valid": True} + client = client_class.from_service_account_info(info, transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + 'compute.googleapis.com:443' + if transport_name in ['grpc', 'grpc_asyncio'] + else + 'https://compute.googleapis.com' + ) + + +@pytest.mark.parametrize("transport_class,transport_name", [ + (transports.GlobalAddressesRestTransport, "rest"), +]) +def test_global_addresses_client_service_account_always_use_jwt(transport_class, transport_name): + with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=True) + use_jwt.assert_called_once_with(True) + + with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=False) + use_jwt.assert_not_called() + + +@pytest.mark.parametrize("client_class,transport_name", [ + (GlobalAddressesClient, "rest"), +]) +def test_global_addresses_client_from_service_account_file(client_class, transport_name): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object(service_account.Credentials, 'from_service_account_file') as factory: + factory.return_value = creds + client = client_class.from_service_account_file("dummy/file/path.json", transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + client = client_class.from_service_account_json("dummy/file/path.json", transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + 'compute.googleapis.com:443' + if transport_name in ['grpc', 'grpc_asyncio'] + else + 'https://compute.googleapis.com' + ) + + +def test_global_addresses_client_get_transport_class(): + transport = GlobalAddressesClient.get_transport_class() + available_transports = [ + transports.GlobalAddressesRestTransport, + ] + assert transport in available_transports + + transport = GlobalAddressesClient.get_transport_class("rest") + assert transport == transports.GlobalAddressesRestTransport + + +@pytest.mark.parametrize("client_class,transport_class,transport_name", [ + (GlobalAddressesClient, transports.GlobalAddressesRestTransport, "rest"), +]) +@mock.patch.object(GlobalAddressesClient, "DEFAULT_ENDPOINT", modify_default_endpoint(GlobalAddressesClient)) +def test_global_addresses_client_client_options(client_class, transport_class, transport_name): + # Check that if channel is provided we won't create a new one. + with mock.patch.object(GlobalAddressesClient, 'get_transport_class') as gtc: + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ) + client = client_class(transport=transport) + gtc.assert_not_called() + + # Check that if channel is provided via str we will create a new one. + with mock.patch.object(GlobalAddressesClient, 'get_transport_class') as gtc: + client = client_class(transport=transport_name) + gtc.assert_called() + + # Check the case api_endpoint is provided. + options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(transport=transport_name, client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_MTLS_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError): + client = client_class(transport=transport_name) + + # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): + with pytest.raises(ValueError): + client = client_class(transport=transport_name) + + # Check the case quota_project_id is provided + options = client_options.ClientOptions(quota_project_id="octopus") + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id="octopus", + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + # Check the case api_endpoint is provided + options = client_options.ClientOptions(api_audience="https://language.googleapis.com") + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience="https://language.googleapis.com" + ) + +@pytest.mark.parametrize("client_class,transport_class,transport_name,use_client_cert_env", [ + (GlobalAddressesClient, transports.GlobalAddressesRestTransport, "rest", "true"), + (GlobalAddressesClient, transports.GlobalAddressesRestTransport, "rest", "false"), +]) +@mock.patch.object(GlobalAddressesClient, "DEFAULT_ENDPOINT", modify_default_endpoint(GlobalAddressesClient)) +@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) +def test_global_addresses_client_mtls_env_auto(client_class, transport_class, transport_name, use_client_cert_env): + # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default + # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists. + + # Check the case client_cert_source is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): + options = client_options.ClientOptions(client_cert_source=client_cert_source_callback) + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + + if use_client_cert_env == "false": + expected_client_cert_source = None + expected_host = client.DEFAULT_ENDPOINT + else: + expected_client_cert_source = client_cert_source_callback + expected_host = client.DEFAULT_MTLS_ENDPOINT + + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case ADC client cert is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): + with mock.patch.object(transport_class, '__init__') as patched: + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): + with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=client_cert_source_callback): + if use_client_cert_env == "false": + expected_host = client.DEFAULT_ENDPOINT + expected_client_cert_source = None + else: + expected_host = client.DEFAULT_MTLS_ENDPOINT + expected_client_cert_source = client_cert_source_callback + + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case client_cert_source and ADC client cert are not provided. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): + with mock.patch.object(transport_class, '__init__') as patched: + with mock.patch("google.auth.transport.mtls.has_default_client_cert_source", return_value=False): + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize("client_class", [ + GlobalAddressesClient +]) +@mock.patch.object(GlobalAddressesClient, "DEFAULT_ENDPOINT", modify_default_endpoint(GlobalAddressesClient)) +def test_global_addresses_client_get_mtls_endpoint_and_cert_source(client_class): + mock_client_cert_source = mock.Mock() + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "true". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + mock_api_endpoint = "foo" + options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(options) + assert api_endpoint == mock_api_endpoint + assert cert_source == mock_client_cert_source + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "false". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + mock_client_cert_source = mock.Mock() + mock_api_endpoint = "foo" + options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(options) + assert api_endpoint == mock_api_endpoint + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert doesn't exist. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=False): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert exists. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): + with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=mock_client_cert_source): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source == mock_client_cert_source + + +@pytest.mark.parametrize("client_class,transport_class,transport_name", [ + (GlobalAddressesClient, transports.GlobalAddressesRestTransport, "rest"), +]) +def test_global_addresses_client_client_options_scopes(client_class, transport_class, transport_name): + # Check the case scopes are provided. + options = client_options.ClientOptions( + scopes=["1", "2"], + ) + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=["1", "2"], + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + +@pytest.mark.parametrize("client_class,transport_class,transport_name,grpc_helpers", [ + (GlobalAddressesClient, transports.GlobalAddressesRestTransport, "rest", None), +]) +def test_global_addresses_client_client_options_credentials_file(client_class, transport_class, transport_name, grpc_helpers): + # Check the case credentials file is provided. + options = client_options.ClientOptions( + credentials_file="credentials.json" + ) + + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize("request_type", [ + compute.DeleteGlobalAddressRequest, + dict, +]) +def test_delete_rest(request_type): + client = GlobalAddressesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'address': 'sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.delete(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, extended_operation.ExtendedOperation) + assert response.client_operation_id == 'client_operation_id_value' + assert response.creation_timestamp == 'creation_timestamp_value' + assert response.description == 'description_value' + assert response.end_time == 'end_time_value' + assert response.http_error_message == 'http_error_message_value' + assert response.http_error_status_code == 2374 + assert response.id == 205 + assert response.insert_time == 'insert_time_value' + assert response.kind == 'kind_value' + assert response.name == 'name_value' + assert response.operation_group_id == 'operation_group_id_value' + assert response.operation_type == 'operation_type_value' + assert response.progress == 885 + assert response.region == 'region_value' + assert response.self_link == 'self_link_value' + assert response.start_time == 'start_time_value' + assert response.status == compute.Operation.Status.DONE + assert response.status_message == 'status_message_value' + assert response.target_id == 947 + assert response.target_link == 'target_link_value' + assert response.user == 'user_value' + assert response.zone == 'zone_value' + + +def test_delete_rest_required_fields(request_type=compute.DeleteGlobalAddressRequest): + transport_class = transports.GlobalAddressesRestTransport + + request_init = {} + request_init["address"] = "" + request_init["project"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["address"] = 'address_value' + jsonified_request["project"] = 'project_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "address" in jsonified_request + assert jsonified_request["address"] == 'address_value' + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + + client = GlobalAddressesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "delete", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.delete(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_delete_rest_unset_required_fields(): + transport = transports.GlobalAddressesRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.delete._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("address", "project", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_rest_interceptors(null_interceptor): + transport = transports.GlobalAddressesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.GlobalAddressesRestInterceptor(), + ) + client = GlobalAddressesClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.GlobalAddressesRestInterceptor, "post_delete") as post, \ + mock.patch.object(transports.GlobalAddressesRestInterceptor, "pre_delete") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.DeleteGlobalAddressRequest.pb(compute.DeleteGlobalAddressRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.DeleteGlobalAddressRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.delete(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_delete_rest_bad_request(transport: str = 'rest', request_type=compute.DeleteGlobalAddressRequest): + client = GlobalAddressesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'address': 'sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.delete(request) + + +def test_delete_rest_flattened(): + client = GlobalAddressesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'address': 'sample2'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + address='address_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.delete(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/global/addresses/{address}" % client.transport._host, args[1]) + + +def test_delete_rest_flattened_error(transport: str = 'rest'): + client = GlobalAddressesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete( + compute.DeleteGlobalAddressRequest(), + project='project_value', + address='address_value', + ) + + +def test_delete_rest_error(): + client = GlobalAddressesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.DeleteGlobalAddressRequest, + dict, +]) +def test_delete_unary_rest(request_type): + client = GlobalAddressesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'address': 'sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.delete_unary(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.Operation) + + +def test_delete_unary_rest_required_fields(request_type=compute.DeleteGlobalAddressRequest): + transport_class = transports.GlobalAddressesRestTransport + + request_init = {} + request_init["address"] = "" + request_init["project"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["address"] = 'address_value' + jsonified_request["project"] = 'project_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "address" in jsonified_request + assert jsonified_request["address"] == 'address_value' + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + + client = GlobalAddressesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "delete", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.delete_unary(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_delete_unary_rest_unset_required_fields(): + transport = transports.GlobalAddressesRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.delete._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("address", "project", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_unary_rest_interceptors(null_interceptor): + transport = transports.GlobalAddressesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.GlobalAddressesRestInterceptor(), + ) + client = GlobalAddressesClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.GlobalAddressesRestInterceptor, "post_delete") as post, \ + mock.patch.object(transports.GlobalAddressesRestInterceptor, "pre_delete") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.DeleteGlobalAddressRequest.pb(compute.DeleteGlobalAddressRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.DeleteGlobalAddressRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.delete_unary(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_delete_unary_rest_bad_request(transport: str = 'rest', request_type=compute.DeleteGlobalAddressRequest): + client = GlobalAddressesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'address': 'sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.delete_unary(request) + + +def test_delete_unary_rest_flattened(): + client = GlobalAddressesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'address': 'sample2'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + address='address_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.delete_unary(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/global/addresses/{address}" % client.transport._host, args[1]) + + +def test_delete_unary_rest_flattened_error(transport: str = 'rest'): + client = GlobalAddressesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_unary( + compute.DeleteGlobalAddressRequest(), + project='project_value', + address='address_value', + ) + + +def test_delete_unary_rest_error(): + client = GlobalAddressesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.GetGlobalAddressRequest, + dict, +]) +def test_get_rest(request_type): + client = GlobalAddressesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'address': 'sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Address( + address='address_value', + address_type='address_type_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + id=205, + ip_version='ip_version_value', + ipv6_endpoint_type='ipv6_endpoint_type_value', + kind='kind_value', + label_fingerprint='label_fingerprint_value', + name='name_value', + network='network_value', + network_tier='network_tier_value', + prefix_length=1391, + purpose='purpose_value', + region='region_value', + self_link='self_link_value', + status='status_value', + subnetwork='subnetwork_value', + users=['users_value'], + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Address.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.get(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.Address) + assert response.address == 'address_value' + assert response.address_type == 'address_type_value' + assert response.creation_timestamp == 'creation_timestamp_value' + assert response.description == 'description_value' + assert response.id == 205 + assert response.ip_version == 'ip_version_value' + assert response.ipv6_endpoint_type == 'ipv6_endpoint_type_value' + assert response.kind == 'kind_value' + assert response.label_fingerprint == 'label_fingerprint_value' + assert response.name == 'name_value' + assert response.network == 'network_value' + assert response.network_tier == 'network_tier_value' + assert response.prefix_length == 1391 + assert response.purpose == 'purpose_value' + assert response.region == 'region_value' + assert response.self_link == 'self_link_value' + assert response.status == 'status_value' + assert response.subnetwork == 'subnetwork_value' + assert response.users == ['users_value'] + + +def test_get_rest_required_fields(request_type=compute.GetGlobalAddressRequest): + transport_class = transports.GlobalAddressesRestTransport + + request_init = {} + request_init["address"] = "" + request_init["project"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["address"] = 'address_value' + jsonified_request["project"] = 'project_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "address" in jsonified_request + assert jsonified_request["address"] == 'address_value' + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + + client = GlobalAddressesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Address() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "get", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Address.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.get(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_get_rest_unset_required_fields(): + transport = transports.GlobalAddressesRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.get._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("address", "project", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_rest_interceptors(null_interceptor): + transport = transports.GlobalAddressesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.GlobalAddressesRestInterceptor(), + ) + client = GlobalAddressesClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.GlobalAddressesRestInterceptor, "post_get") as post, \ + mock.patch.object(transports.GlobalAddressesRestInterceptor, "pre_get") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.GetGlobalAddressRequest.pb(compute.GetGlobalAddressRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Address.to_json(compute.Address()) + + request = compute.GetGlobalAddressRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Address() + + client.get(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_get_rest_bad_request(transport: str = 'rest', request_type=compute.GetGlobalAddressRequest): + client = GlobalAddressesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'address': 'sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get(request) + + +def test_get_rest_flattened(): + client = GlobalAddressesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Address() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'address': 'sample2'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + address='address_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Address.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.get(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/global/addresses/{address}" % client.transport._host, args[1]) + + +def test_get_rest_flattened_error(transport: str = 'rest'): + client = GlobalAddressesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get( + compute.GetGlobalAddressRequest(), + project='project_value', + address='address_value', + ) + + +def test_get_rest_error(): + client = GlobalAddressesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.InsertGlobalAddressRequest, + dict, +]) +def test_insert_rest(request_type): + client = GlobalAddressesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1'} + request_init["address_resource"] = {'address': 'address_value', 'address_type': 'address_type_value', 'creation_timestamp': 'creation_timestamp_value', 'description': 'description_value', 'id': 205, 'ip_version': 'ip_version_value', 'ipv6_endpoint_type': 'ipv6_endpoint_type_value', 'kind': 'kind_value', 'label_fingerprint': 'label_fingerprint_value', 'labels': {}, 'name': 'name_value', 'network': 'network_value', 'network_tier': 'network_tier_value', 'prefix_length': 1391, 'purpose': 'purpose_value', 'region': 'region_value', 'self_link': 'self_link_value', 'status': 'status_value', 'subnetwork': 'subnetwork_value', 'users': ['users_value1', 'users_value2']} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.insert(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, extended_operation.ExtendedOperation) + assert response.client_operation_id == 'client_operation_id_value' + assert response.creation_timestamp == 'creation_timestamp_value' + assert response.description == 'description_value' + assert response.end_time == 'end_time_value' + assert response.http_error_message == 'http_error_message_value' + assert response.http_error_status_code == 2374 + assert response.id == 205 + assert response.insert_time == 'insert_time_value' + assert response.kind == 'kind_value' + assert response.name == 'name_value' + assert response.operation_group_id == 'operation_group_id_value' + assert response.operation_type == 'operation_type_value' + assert response.progress == 885 + assert response.region == 'region_value' + assert response.self_link == 'self_link_value' + assert response.start_time == 'start_time_value' + assert response.status == compute.Operation.Status.DONE + assert response.status_message == 'status_message_value' + assert response.target_id == 947 + assert response.target_link == 'target_link_value' + assert response.user == 'user_value' + assert response.zone == 'zone_value' + + +def test_insert_rest_required_fields(request_type=compute.InsertGlobalAddressRequest): + transport_class = transports.GlobalAddressesRestTransport + + request_init = {} + request_init["project"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).insert._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).insert._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + + client = GlobalAddressesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.insert(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_insert_rest_unset_required_fields(): + transport = transports.GlobalAddressesRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.insert._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("addressResource", "project", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_insert_rest_interceptors(null_interceptor): + transport = transports.GlobalAddressesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.GlobalAddressesRestInterceptor(), + ) + client = GlobalAddressesClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.GlobalAddressesRestInterceptor, "post_insert") as post, \ + mock.patch.object(transports.GlobalAddressesRestInterceptor, "pre_insert") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.InsertGlobalAddressRequest.pb(compute.InsertGlobalAddressRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.InsertGlobalAddressRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.insert(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_insert_rest_bad_request(transport: str = 'rest', request_type=compute.InsertGlobalAddressRequest): + client = GlobalAddressesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1'} + request_init["address_resource"] = {'address': 'address_value', 'address_type': 'address_type_value', 'creation_timestamp': 'creation_timestamp_value', 'description': 'description_value', 'id': 205, 'ip_version': 'ip_version_value', 'ipv6_endpoint_type': 'ipv6_endpoint_type_value', 'kind': 'kind_value', 'label_fingerprint': 'label_fingerprint_value', 'labels': {}, 'name': 'name_value', 'network': 'network_value', 'network_tier': 'network_tier_value', 'prefix_length': 1391, 'purpose': 'purpose_value', 'region': 'region_value', 'self_link': 'self_link_value', 'status': 'status_value', 'subnetwork': 'subnetwork_value', 'users': ['users_value1', 'users_value2']} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.insert(request) + + +def test_insert_rest_flattened(): + client = GlobalAddressesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + address_resource=compute.Address(address='address_value'), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.insert(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/global/addresses" % client.transport._host, args[1]) + + +def test_insert_rest_flattened_error(transport: str = 'rest'): + client = GlobalAddressesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.insert( + compute.InsertGlobalAddressRequest(), + project='project_value', + address_resource=compute.Address(address='address_value'), + ) + + +def test_insert_rest_error(): + client = GlobalAddressesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.InsertGlobalAddressRequest, + dict, +]) +def test_insert_unary_rest(request_type): + client = GlobalAddressesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1'} + request_init["address_resource"] = {'address': 'address_value', 'address_type': 'address_type_value', 'creation_timestamp': 'creation_timestamp_value', 'description': 'description_value', 'id': 205, 'ip_version': 'ip_version_value', 'ipv6_endpoint_type': 'ipv6_endpoint_type_value', 'kind': 'kind_value', 'label_fingerprint': 'label_fingerprint_value', 'labels': {}, 'name': 'name_value', 'network': 'network_value', 'network_tier': 'network_tier_value', 'prefix_length': 1391, 'purpose': 'purpose_value', 'region': 'region_value', 'self_link': 'self_link_value', 'status': 'status_value', 'subnetwork': 'subnetwork_value', 'users': ['users_value1', 'users_value2']} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.insert_unary(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.Operation) + + +def test_insert_unary_rest_required_fields(request_type=compute.InsertGlobalAddressRequest): + transport_class = transports.GlobalAddressesRestTransport + + request_init = {} + request_init["project"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).insert._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).insert._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + + client = GlobalAddressesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.insert_unary(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_insert_unary_rest_unset_required_fields(): + transport = transports.GlobalAddressesRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.insert._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("addressResource", "project", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_insert_unary_rest_interceptors(null_interceptor): + transport = transports.GlobalAddressesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.GlobalAddressesRestInterceptor(), + ) + client = GlobalAddressesClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.GlobalAddressesRestInterceptor, "post_insert") as post, \ + mock.patch.object(transports.GlobalAddressesRestInterceptor, "pre_insert") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.InsertGlobalAddressRequest.pb(compute.InsertGlobalAddressRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.InsertGlobalAddressRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.insert_unary(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_insert_unary_rest_bad_request(transport: str = 'rest', request_type=compute.InsertGlobalAddressRequest): + client = GlobalAddressesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1'} + request_init["address_resource"] = {'address': 'address_value', 'address_type': 'address_type_value', 'creation_timestamp': 'creation_timestamp_value', 'description': 'description_value', 'id': 205, 'ip_version': 'ip_version_value', 'ipv6_endpoint_type': 'ipv6_endpoint_type_value', 'kind': 'kind_value', 'label_fingerprint': 'label_fingerprint_value', 'labels': {}, 'name': 'name_value', 'network': 'network_value', 'network_tier': 'network_tier_value', 'prefix_length': 1391, 'purpose': 'purpose_value', 'region': 'region_value', 'self_link': 'self_link_value', 'status': 'status_value', 'subnetwork': 'subnetwork_value', 'users': ['users_value1', 'users_value2']} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.insert_unary(request) + + +def test_insert_unary_rest_flattened(): + client = GlobalAddressesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + address_resource=compute.Address(address='address_value'), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.insert_unary(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/global/addresses" % client.transport._host, args[1]) + + +def test_insert_unary_rest_flattened_error(transport: str = 'rest'): + client = GlobalAddressesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.insert_unary( + compute.InsertGlobalAddressRequest(), + project='project_value', + address_resource=compute.Address(address='address_value'), + ) + + +def test_insert_unary_rest_error(): + client = GlobalAddressesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.ListGlobalAddressesRequest, + dict, +]) +def test_list_rest(request_type): + client = GlobalAddressesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.AddressList( + id='id_value', + kind='kind_value', + next_page_token='next_page_token_value', + self_link='self_link_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.AddressList.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.list(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListPager) + assert response.id == 'id_value' + assert response.kind == 'kind_value' + assert response.next_page_token == 'next_page_token_value' + assert response.self_link == 'self_link_value' + + +def test_list_rest_required_fields(request_type=compute.ListGlobalAddressesRequest): + transport_class = transports.GlobalAddressesRestTransport + + request_init = {} + request_init["project"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("filter", "max_results", "order_by", "page_token", "return_partial_success", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + + client = GlobalAddressesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.AddressList() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "get", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.AddressList.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.list(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_list_rest_unset_required_fields(): + transport = transports.GlobalAddressesRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.list._get_unset_required_fields({}) + assert set(unset_fields) == (set(("filter", "maxResults", "orderBy", "pageToken", "returnPartialSuccess", )) & set(("project", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_rest_interceptors(null_interceptor): + transport = transports.GlobalAddressesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.GlobalAddressesRestInterceptor(), + ) + client = GlobalAddressesClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.GlobalAddressesRestInterceptor, "post_list") as post, \ + mock.patch.object(transports.GlobalAddressesRestInterceptor, "pre_list") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.ListGlobalAddressesRequest.pb(compute.ListGlobalAddressesRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.AddressList.to_json(compute.AddressList()) + + request = compute.ListGlobalAddressesRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.AddressList() + + client.list(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_list_rest_bad_request(transport: str = 'rest', request_type=compute.ListGlobalAddressesRequest): + client = GlobalAddressesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list(request) + + +def test_list_rest_flattened(): + client = GlobalAddressesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.AddressList() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.AddressList.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.list(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/global/addresses" % client.transport._host, args[1]) + + +def test_list_rest_flattened_error(transport: str = 'rest'): + client = GlobalAddressesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list( + compute.ListGlobalAddressesRequest(), + project='project_value', + ) + + +def test_list_rest_pager(transport: str = 'rest'): + client = GlobalAddressesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + #with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + compute.AddressList( + items=[ + compute.Address(), + compute.Address(), + compute.Address(), + ], + next_page_token='abc', + ), + compute.AddressList( + items=[], + next_page_token='def', + ), + compute.AddressList( + items=[ + compute.Address(), + ], + next_page_token='ghi', + ), + compute.AddressList( + items=[ + compute.Address(), + compute.Address(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple(compute.AddressList.to_json(x) for x in response) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode('UTF-8') + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {'project': 'sample1'} + + pager = client.list(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, compute.Address) + for i in results) + + pages = list(client.list(request=sample_request).pages) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize("request_type", [ + compute.MoveGlobalAddressRequest, + dict, +]) +def test_move_rest(request_type): + client = GlobalAddressesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'address': 'sample2'} + request_init["global_addresses_move_request_resource"] = {'description': 'description_value', 'destination_address': 'destination_address_value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.move(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, extended_operation.ExtendedOperation) + assert response.client_operation_id == 'client_operation_id_value' + assert response.creation_timestamp == 'creation_timestamp_value' + assert response.description == 'description_value' + assert response.end_time == 'end_time_value' + assert response.http_error_message == 'http_error_message_value' + assert response.http_error_status_code == 2374 + assert response.id == 205 + assert response.insert_time == 'insert_time_value' + assert response.kind == 'kind_value' + assert response.name == 'name_value' + assert response.operation_group_id == 'operation_group_id_value' + assert response.operation_type == 'operation_type_value' + assert response.progress == 885 + assert response.region == 'region_value' + assert response.self_link == 'self_link_value' + assert response.start_time == 'start_time_value' + assert response.status == compute.Operation.Status.DONE + assert response.status_message == 'status_message_value' + assert response.target_id == 947 + assert response.target_link == 'target_link_value' + assert response.user == 'user_value' + assert response.zone == 'zone_value' + + +def test_move_rest_required_fields(request_type=compute.MoveGlobalAddressRequest): + transport_class = transports.GlobalAddressesRestTransport + + request_init = {} + request_init["address"] = "" + request_init["project"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).move._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["address"] = 'address_value' + jsonified_request["project"] = 'project_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).move._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "address" in jsonified_request + assert jsonified_request["address"] == 'address_value' + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + + client = GlobalAddressesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.move(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_move_rest_unset_required_fields(): + transport = transports.GlobalAddressesRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.move._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("address", "globalAddressesMoveRequestResource", "project", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_move_rest_interceptors(null_interceptor): + transport = transports.GlobalAddressesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.GlobalAddressesRestInterceptor(), + ) + client = GlobalAddressesClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.GlobalAddressesRestInterceptor, "post_move") as post, \ + mock.patch.object(transports.GlobalAddressesRestInterceptor, "pre_move") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.MoveGlobalAddressRequest.pb(compute.MoveGlobalAddressRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.MoveGlobalAddressRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.move(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_move_rest_bad_request(transport: str = 'rest', request_type=compute.MoveGlobalAddressRequest): + client = GlobalAddressesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'address': 'sample2'} + request_init["global_addresses_move_request_resource"] = {'description': 'description_value', 'destination_address': 'destination_address_value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.move(request) + + +def test_move_rest_flattened(): + client = GlobalAddressesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'address': 'sample2'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + address='address_value', + global_addresses_move_request_resource=compute.GlobalAddressesMoveRequest(description='description_value'), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.move(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/global/addresses/{address}/move" % client.transport._host, args[1]) + + +def test_move_rest_flattened_error(transport: str = 'rest'): + client = GlobalAddressesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.move( + compute.MoveGlobalAddressRequest(), + project='project_value', + address='address_value', + global_addresses_move_request_resource=compute.GlobalAddressesMoveRequest(description='description_value'), + ) + + +def test_move_rest_error(): + client = GlobalAddressesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.MoveGlobalAddressRequest, + dict, +]) +def test_move_unary_rest(request_type): + client = GlobalAddressesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'address': 'sample2'} + request_init["global_addresses_move_request_resource"] = {'description': 'description_value', 'destination_address': 'destination_address_value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.move_unary(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.Operation) + + +def test_move_unary_rest_required_fields(request_type=compute.MoveGlobalAddressRequest): + transport_class = transports.GlobalAddressesRestTransport + + request_init = {} + request_init["address"] = "" + request_init["project"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).move._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["address"] = 'address_value' + jsonified_request["project"] = 'project_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).move._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "address" in jsonified_request + assert jsonified_request["address"] == 'address_value' + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + + client = GlobalAddressesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.move_unary(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_move_unary_rest_unset_required_fields(): + transport = transports.GlobalAddressesRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.move._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("address", "globalAddressesMoveRequestResource", "project", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_move_unary_rest_interceptors(null_interceptor): + transport = transports.GlobalAddressesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.GlobalAddressesRestInterceptor(), + ) + client = GlobalAddressesClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.GlobalAddressesRestInterceptor, "post_move") as post, \ + mock.patch.object(transports.GlobalAddressesRestInterceptor, "pre_move") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.MoveGlobalAddressRequest.pb(compute.MoveGlobalAddressRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.MoveGlobalAddressRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.move_unary(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_move_unary_rest_bad_request(transport: str = 'rest', request_type=compute.MoveGlobalAddressRequest): + client = GlobalAddressesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'address': 'sample2'} + request_init["global_addresses_move_request_resource"] = {'description': 'description_value', 'destination_address': 'destination_address_value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.move_unary(request) + + +def test_move_unary_rest_flattened(): + client = GlobalAddressesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'address': 'sample2'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + address='address_value', + global_addresses_move_request_resource=compute.GlobalAddressesMoveRequest(description='description_value'), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.move_unary(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/global/addresses/{address}/move" % client.transport._host, args[1]) + + +def test_move_unary_rest_flattened_error(transport: str = 'rest'): + client = GlobalAddressesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.move_unary( + compute.MoveGlobalAddressRequest(), + project='project_value', + address='address_value', + global_addresses_move_request_resource=compute.GlobalAddressesMoveRequest(description='description_value'), + ) + + +def test_move_unary_rest_error(): + client = GlobalAddressesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.SetLabelsGlobalAddressRequest, + dict, +]) +def test_set_labels_rest(request_type): + client = GlobalAddressesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'resource': 'sample2'} + request_init["global_set_labels_request_resource"] = {'label_fingerprint': 'label_fingerprint_value', 'labels': {}} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.set_labels(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, extended_operation.ExtendedOperation) + assert response.client_operation_id == 'client_operation_id_value' + assert response.creation_timestamp == 'creation_timestamp_value' + assert response.description == 'description_value' + assert response.end_time == 'end_time_value' + assert response.http_error_message == 'http_error_message_value' + assert response.http_error_status_code == 2374 + assert response.id == 205 + assert response.insert_time == 'insert_time_value' + assert response.kind == 'kind_value' + assert response.name == 'name_value' + assert response.operation_group_id == 'operation_group_id_value' + assert response.operation_type == 'operation_type_value' + assert response.progress == 885 + assert response.region == 'region_value' + assert response.self_link == 'self_link_value' + assert response.start_time == 'start_time_value' + assert response.status == compute.Operation.Status.DONE + assert response.status_message == 'status_message_value' + assert response.target_id == 947 + assert response.target_link == 'target_link_value' + assert response.user == 'user_value' + assert response.zone == 'zone_value' + + +def test_set_labels_rest_required_fields(request_type=compute.SetLabelsGlobalAddressRequest): + transport_class = transports.GlobalAddressesRestTransport + + request_init = {} + request_init["project"] = "" + request_init["resource"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).set_labels._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + jsonified_request["resource"] = 'resource_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).set_labels._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "resource" in jsonified_request + assert jsonified_request["resource"] == 'resource_value' + + client = GlobalAddressesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.set_labels(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_set_labels_rest_unset_required_fields(): + transport = transports.GlobalAddressesRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.set_labels._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("globalSetLabelsRequestResource", "project", "resource", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_set_labels_rest_interceptors(null_interceptor): + transport = transports.GlobalAddressesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.GlobalAddressesRestInterceptor(), + ) + client = GlobalAddressesClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.GlobalAddressesRestInterceptor, "post_set_labels") as post, \ + mock.patch.object(transports.GlobalAddressesRestInterceptor, "pre_set_labels") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.SetLabelsGlobalAddressRequest.pb(compute.SetLabelsGlobalAddressRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.SetLabelsGlobalAddressRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.set_labels(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_set_labels_rest_bad_request(transport: str = 'rest', request_type=compute.SetLabelsGlobalAddressRequest): + client = GlobalAddressesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'resource': 'sample2'} + request_init["global_set_labels_request_resource"] = {'label_fingerprint': 'label_fingerprint_value', 'labels': {}} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.set_labels(request) + + +def test_set_labels_rest_flattened(): + client = GlobalAddressesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'resource': 'sample2'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + resource='resource_value', + global_set_labels_request_resource=compute.GlobalSetLabelsRequest(label_fingerprint='label_fingerprint_value'), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.set_labels(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/global/addresses/{resource}/setLabels" % client.transport._host, args[1]) + + +def test_set_labels_rest_flattened_error(transport: str = 'rest'): + client = GlobalAddressesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.set_labels( + compute.SetLabelsGlobalAddressRequest(), + project='project_value', + resource='resource_value', + global_set_labels_request_resource=compute.GlobalSetLabelsRequest(label_fingerprint='label_fingerprint_value'), + ) + + +def test_set_labels_rest_error(): + client = GlobalAddressesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.SetLabelsGlobalAddressRequest, + dict, +]) +def test_set_labels_unary_rest(request_type): + client = GlobalAddressesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'resource': 'sample2'} + request_init["global_set_labels_request_resource"] = {'label_fingerprint': 'label_fingerprint_value', 'labels': {}} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.set_labels_unary(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.Operation) + + +def test_set_labels_unary_rest_required_fields(request_type=compute.SetLabelsGlobalAddressRequest): + transport_class = transports.GlobalAddressesRestTransport + + request_init = {} + request_init["project"] = "" + request_init["resource"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).set_labels._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + jsonified_request["resource"] = 'resource_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).set_labels._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "resource" in jsonified_request + assert jsonified_request["resource"] == 'resource_value' + + client = GlobalAddressesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.set_labels_unary(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_set_labels_unary_rest_unset_required_fields(): + transport = transports.GlobalAddressesRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.set_labels._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("globalSetLabelsRequestResource", "project", "resource", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_set_labels_unary_rest_interceptors(null_interceptor): + transport = transports.GlobalAddressesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.GlobalAddressesRestInterceptor(), + ) + client = GlobalAddressesClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.GlobalAddressesRestInterceptor, "post_set_labels") as post, \ + mock.patch.object(transports.GlobalAddressesRestInterceptor, "pre_set_labels") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.SetLabelsGlobalAddressRequest.pb(compute.SetLabelsGlobalAddressRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.SetLabelsGlobalAddressRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.set_labels_unary(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_set_labels_unary_rest_bad_request(transport: str = 'rest', request_type=compute.SetLabelsGlobalAddressRequest): + client = GlobalAddressesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'resource': 'sample2'} + request_init["global_set_labels_request_resource"] = {'label_fingerprint': 'label_fingerprint_value', 'labels': {}} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.set_labels_unary(request) + + +def test_set_labels_unary_rest_flattened(): + client = GlobalAddressesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'resource': 'sample2'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + resource='resource_value', + global_set_labels_request_resource=compute.GlobalSetLabelsRequest(label_fingerprint='label_fingerprint_value'), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.set_labels_unary(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/global/addresses/{resource}/setLabels" % client.transport._host, args[1]) + + +def test_set_labels_unary_rest_flattened_error(transport: str = 'rest'): + client = GlobalAddressesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.set_labels_unary( + compute.SetLabelsGlobalAddressRequest(), + project='project_value', + resource='resource_value', + global_set_labels_request_resource=compute.GlobalSetLabelsRequest(label_fingerprint='label_fingerprint_value'), + ) + + +def test_set_labels_unary_rest_error(): + client = GlobalAddressesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +def test_credentials_transport_error(): + # It is an error to provide credentials and a transport instance. + transport = transports.GlobalAddressesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = GlobalAddressesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # It is an error to provide a credentials file and a transport instance. + transport = transports.GlobalAddressesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = GlobalAddressesClient( + client_options={"credentials_file": "credentials.json"}, + transport=transport, + ) + + # It is an error to provide an api_key and a transport instance. + transport = transports.GlobalAddressesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = GlobalAddressesClient( + client_options=options, + transport=transport, + ) + + # It is an error to provide an api_key and a credential. + options = mock.Mock() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = GlobalAddressesClient( + client_options=options, + credentials=ga_credentials.AnonymousCredentials() + ) + + # It is an error to provide scopes and a transport instance. + transport = transports.GlobalAddressesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = GlobalAddressesClient( + client_options={"scopes": ["1", "2"]}, + transport=transport, + ) + + +def test_transport_instance(): + # A client may be instantiated with a custom transport instance. + transport = transports.GlobalAddressesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + client = GlobalAddressesClient(transport=transport) + assert client.transport is transport + + +@pytest.mark.parametrize("transport_class", [ + transports.GlobalAddressesRestTransport, +]) +def test_transport_adc(transport_class): + # Test default credentials are used if not provided. + with mock.patch.object(google.auth, 'default') as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class() + adc.assert_called_once() + +@pytest.mark.parametrize("transport_name", [ + "rest", +]) +def test_transport_kind(transport_name): + transport = GlobalAddressesClient.get_transport_class(transport_name)( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert transport.kind == transport_name + + +def test_global_addresses_base_transport_error(): + # Passing both a credentials object and credentials_file should raise an error + with pytest.raises(core_exceptions.DuplicateCredentialArgs): + transport = transports.GlobalAddressesTransport( + credentials=ga_credentials.AnonymousCredentials(), + credentials_file="credentials.json" + ) + + +def test_global_addresses_base_transport(): + # Instantiate the base transport. + with mock.patch('google.cloud.compute_v1.services.global_addresses.transports.GlobalAddressesTransport.__init__') as Transport: + Transport.return_value = None + transport = transports.GlobalAddressesTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Every method on the transport should just blindly + # raise NotImplementedError. + methods = ( + 'delete', + 'get', + 'insert', + 'list', + 'move', + 'set_labels', + ) + for method in methods: + with pytest.raises(NotImplementedError): + getattr(transport, method)(request=object()) + + with pytest.raises(NotImplementedError): + transport.close() + + # Catch all for all remaining methods and properties + remainder = [ + 'kind', + ] + for r in remainder: + with pytest.raises(NotImplementedError): + getattr(transport, r)() + + +def test_global_addresses_base_transport_with_credentials_file(): + # Instantiate the base transport with a credentials file + with mock.patch.object(google.auth, 'load_credentials_from_file', autospec=True) as load_creds, mock.patch('google.cloud.compute_v1.services.global_addresses.transports.GlobalAddressesTransport._prep_wrapped_messages') as Transport: + Transport.return_value = None + load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.GlobalAddressesTransport( + credentials_file="credentials.json", + quota_project_id="octopus", + ) + load_creds.assert_called_once_with("credentials.json", + scopes=None, + default_scopes=( + 'https://www.googleapis.com/auth/compute', + 'https://www.googleapis.com/auth/cloud-platform', +), + quota_project_id="octopus", + ) + + +def test_global_addresses_base_transport_with_adc(): + # Test the default credentials are used if credentials and credentials_file are None. + with mock.patch.object(google.auth, 'default', autospec=True) as adc, mock.patch('google.cloud.compute_v1.services.global_addresses.transports.GlobalAddressesTransport._prep_wrapped_messages') as Transport: + Transport.return_value = None + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.GlobalAddressesTransport() + adc.assert_called_once() + + +def test_global_addresses_auth_adc(): + # If no credentials are provided, we should use ADC credentials. + with mock.patch.object(google.auth, 'default', autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + GlobalAddressesClient() + adc.assert_called_once_with( + scopes=None, + default_scopes=( + 'https://www.googleapis.com/auth/compute', + 'https://www.googleapis.com/auth/cloud-platform', +), + quota_project_id=None, + ) + + +def test_global_addresses_http_transport_client_cert_source_for_mtls(): + cred = ga_credentials.AnonymousCredentials() + with mock.patch("google.auth.transport.requests.AuthorizedSession.configure_mtls_channel") as mock_configure_mtls_channel: + transports.GlobalAddressesRestTransport ( + credentials=cred, + client_cert_source_for_mtls=client_cert_source_callback + ) + mock_configure_mtls_channel.assert_called_once_with(client_cert_source_callback) + + +@pytest.mark.parametrize("transport_name", [ + "rest", +]) +def test_global_addresses_host_no_port(transport_name): + client = GlobalAddressesClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions(api_endpoint='compute.googleapis.com'), + transport=transport_name, + ) + assert client.transport._host == ( + 'compute.googleapis.com:443' + if transport_name in ['grpc', 'grpc_asyncio'] + else 'https://compute.googleapis.com' + ) + +@pytest.mark.parametrize("transport_name", [ + "rest", +]) +def test_global_addresses_host_with_port(transport_name): + client = GlobalAddressesClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions(api_endpoint='compute.googleapis.com:8000'), + transport=transport_name, + ) + assert client.transport._host == ( + 'compute.googleapis.com:8000' + if transport_name in ['grpc', 'grpc_asyncio'] + else 'https://compute.googleapis.com:8000' + ) + +@pytest.mark.parametrize("transport_name", [ + "rest", +]) +def test_global_addresses_client_transport_session_collision(transport_name): + creds1 = ga_credentials.AnonymousCredentials() + creds2 = ga_credentials.AnonymousCredentials() + client1 = GlobalAddressesClient( + credentials=creds1, + transport=transport_name, + ) + client2 = GlobalAddressesClient( + credentials=creds2, + transport=transport_name, + ) + session1 = client1.transport.delete._session + session2 = client2.transport.delete._session + assert session1 != session2 + session1 = client1.transport.get._session + session2 = client2.transport.get._session + assert session1 != session2 + session1 = client1.transport.insert._session + session2 = client2.transport.insert._session + assert session1 != session2 + session1 = client1.transport.list._session + session2 = client2.transport.list._session + assert session1 != session2 + session1 = client1.transport.move._session + session2 = client2.transport.move._session + assert session1 != session2 + session1 = client1.transport.set_labels._session + session2 = client2.transport.set_labels._session + assert session1 != session2 + +def test_common_billing_account_path(): + billing_account = "squid" + expected = "billingAccounts/{billing_account}".format(billing_account=billing_account, ) + actual = GlobalAddressesClient.common_billing_account_path(billing_account) + assert expected == actual + + +def test_parse_common_billing_account_path(): + expected = { + "billing_account": "clam", + } + path = GlobalAddressesClient.common_billing_account_path(**expected) + + # Check that the path construction is reversible. + actual = GlobalAddressesClient.parse_common_billing_account_path(path) + assert expected == actual + +def test_common_folder_path(): + folder = "whelk" + expected = "folders/{folder}".format(folder=folder, ) + actual = GlobalAddressesClient.common_folder_path(folder) + assert expected == actual + + +def test_parse_common_folder_path(): + expected = { + "folder": "octopus", + } + path = GlobalAddressesClient.common_folder_path(**expected) + + # Check that the path construction is reversible. + actual = GlobalAddressesClient.parse_common_folder_path(path) + assert expected == actual + +def test_common_organization_path(): + organization = "oyster" + expected = "organizations/{organization}".format(organization=organization, ) + actual = GlobalAddressesClient.common_organization_path(organization) + assert expected == actual + + +def test_parse_common_organization_path(): + expected = { + "organization": "nudibranch", + } + path = GlobalAddressesClient.common_organization_path(**expected) + + # Check that the path construction is reversible. + actual = GlobalAddressesClient.parse_common_organization_path(path) + assert expected == actual + +def test_common_project_path(): + project = "cuttlefish" + expected = "projects/{project}".format(project=project, ) + actual = GlobalAddressesClient.common_project_path(project) + assert expected == actual + + +def test_parse_common_project_path(): + expected = { + "project": "mussel", + } + path = GlobalAddressesClient.common_project_path(**expected) + + # Check that the path construction is reversible. + actual = GlobalAddressesClient.parse_common_project_path(path) + assert expected == actual + +def test_common_location_path(): + project = "winkle" + location = "nautilus" + expected = "projects/{project}/locations/{location}".format(project=project, location=location, ) + actual = GlobalAddressesClient.common_location_path(project, location) + assert expected == actual + + +def test_parse_common_location_path(): + expected = { + "project": "scallop", + "location": "abalone", + } + path = GlobalAddressesClient.common_location_path(**expected) + + # Check that the path construction is reversible. + actual = GlobalAddressesClient.parse_common_location_path(path) + assert expected == actual + + +def test_client_with_default_client_info(): + client_info = gapic_v1.client_info.ClientInfo() + + with mock.patch.object(transports.GlobalAddressesTransport, '_prep_wrapped_messages') as prep: + client = GlobalAddressesClient( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + with mock.patch.object(transports.GlobalAddressesTransport, '_prep_wrapped_messages') as prep: + transport_class = GlobalAddressesClient.get_transport_class() + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + +def test_transport_close(): + transports = { + "rest": "_session", + } + + for transport, close_name in transports.items(): + client = GlobalAddressesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport + ) + with mock.patch.object(type(getattr(client.transport, close_name)), "close") as close: + with client: + close.assert_not_called() + close.assert_called_once() + +def test_client_ctx(): + transports = [ + 'rest', + ] + for transport in transports: + client = GlobalAddressesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport + ) + # Test client calls underlying transport. + with mock.patch.object(type(client.transport), "close") as close: + close.assert_not_called() + with client: + pass + close.assert_called() + +@pytest.mark.parametrize("client_class,transport_class", [ + (GlobalAddressesClient, transports.GlobalAddressesRestTransport), +]) +def test_api_key_credentials(client_class, transport_class): + with mock.patch.object( + google.auth._default, "get_api_key_credentials", create=True + ) as get_api_key_credentials: + mock_cred = mock.Mock() + get_api_key_credentials.return_value = mock_cred + options = client_options.ClientOptions() + options.api_key = "api_key" + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=mock_cred, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) diff --git a/owl-bot-staging/v1/tests/unit/gapic/compute_v1/test_global_forwarding_rules.py b/owl-bot-staging/v1/tests/unit/gapic/compute_v1/test_global_forwarding_rules.py new file mode 100644 index 000000000..0233c4ae4 --- /dev/null +++ b/owl-bot-staging/v1/tests/unit/gapic/compute_v1/test_global_forwarding_rules.py @@ -0,0 +1,4183 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import os +# try/except added for compatibility with python < 3.8 +try: + from unittest import mock + from unittest.mock import AsyncMock # pragma: NO COVER +except ImportError: # pragma: NO COVER + import mock + +import grpc +from grpc.experimental import aio +from collections.abc import Iterable +from google.protobuf import json_format +import json +import math +import pytest +from proto.marshal.rules.dates import DurationRule, TimestampRule +from proto.marshal.rules import wrappers +from requests import Response +from requests import Request, PreparedRequest +from requests.sessions import Session +from google.protobuf import json_format + +from google.api_core import client_options +from google.api_core import exceptions as core_exceptions +from google.api_core import extended_operation # type: ignore +from google.api_core import future +from google.api_core import gapic_v1 +from google.api_core import grpc_helpers +from google.api_core import grpc_helpers_async +from google.api_core import path_template +from google.auth import credentials as ga_credentials +from google.auth.exceptions import MutualTLSChannelError +from google.cloud.compute_v1.services.global_forwarding_rules import GlobalForwardingRulesClient +from google.cloud.compute_v1.services.global_forwarding_rules import pagers +from google.cloud.compute_v1.services.global_forwarding_rules import transports +from google.cloud.compute_v1.types import compute +from google.oauth2 import service_account +import google.auth + + +def client_cert_source_callback(): + return b"cert bytes", b"key bytes" + + +# If default endpoint is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint(client): + return "foo.googleapis.com" if ("localhost" in client.DEFAULT_ENDPOINT) else client.DEFAULT_ENDPOINT + + +def test__get_default_mtls_endpoint(): + api_endpoint = "example.googleapis.com" + api_mtls_endpoint = "example.mtls.googleapis.com" + sandbox_endpoint = "example.sandbox.googleapis.com" + sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com" + non_googleapi = "api.example.com" + + assert GlobalForwardingRulesClient._get_default_mtls_endpoint(None) is None + assert GlobalForwardingRulesClient._get_default_mtls_endpoint(api_endpoint) == api_mtls_endpoint + assert GlobalForwardingRulesClient._get_default_mtls_endpoint(api_mtls_endpoint) == api_mtls_endpoint + assert GlobalForwardingRulesClient._get_default_mtls_endpoint(sandbox_endpoint) == sandbox_mtls_endpoint + assert GlobalForwardingRulesClient._get_default_mtls_endpoint(sandbox_mtls_endpoint) == sandbox_mtls_endpoint + assert GlobalForwardingRulesClient._get_default_mtls_endpoint(non_googleapi) == non_googleapi + + +@pytest.mark.parametrize("client_class,transport_name", [ + (GlobalForwardingRulesClient, "rest"), +]) +def test_global_forwarding_rules_client_from_service_account_info(client_class, transport_name): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object(service_account.Credentials, 'from_service_account_info') as factory: + factory.return_value = creds + info = {"valid": True} + client = client_class.from_service_account_info(info, transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + 'compute.googleapis.com:443' + if transport_name in ['grpc', 'grpc_asyncio'] + else + 'https://compute.googleapis.com' + ) + + +@pytest.mark.parametrize("transport_class,transport_name", [ + (transports.GlobalForwardingRulesRestTransport, "rest"), +]) +def test_global_forwarding_rules_client_service_account_always_use_jwt(transport_class, transport_name): + with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=True) + use_jwt.assert_called_once_with(True) + + with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=False) + use_jwt.assert_not_called() + + +@pytest.mark.parametrize("client_class,transport_name", [ + (GlobalForwardingRulesClient, "rest"), +]) +def test_global_forwarding_rules_client_from_service_account_file(client_class, transport_name): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object(service_account.Credentials, 'from_service_account_file') as factory: + factory.return_value = creds + client = client_class.from_service_account_file("dummy/file/path.json", transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + client = client_class.from_service_account_json("dummy/file/path.json", transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + 'compute.googleapis.com:443' + if transport_name in ['grpc', 'grpc_asyncio'] + else + 'https://compute.googleapis.com' + ) + + +def test_global_forwarding_rules_client_get_transport_class(): + transport = GlobalForwardingRulesClient.get_transport_class() + available_transports = [ + transports.GlobalForwardingRulesRestTransport, + ] + assert transport in available_transports + + transport = GlobalForwardingRulesClient.get_transport_class("rest") + assert transport == transports.GlobalForwardingRulesRestTransport + + +@pytest.mark.parametrize("client_class,transport_class,transport_name", [ + (GlobalForwardingRulesClient, transports.GlobalForwardingRulesRestTransport, "rest"), +]) +@mock.patch.object(GlobalForwardingRulesClient, "DEFAULT_ENDPOINT", modify_default_endpoint(GlobalForwardingRulesClient)) +def test_global_forwarding_rules_client_client_options(client_class, transport_class, transport_name): + # Check that if channel is provided we won't create a new one. + with mock.patch.object(GlobalForwardingRulesClient, 'get_transport_class') as gtc: + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ) + client = client_class(transport=transport) + gtc.assert_not_called() + + # Check that if channel is provided via str we will create a new one. + with mock.patch.object(GlobalForwardingRulesClient, 'get_transport_class') as gtc: + client = client_class(transport=transport_name) + gtc.assert_called() + + # Check the case api_endpoint is provided. + options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(transport=transport_name, client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_MTLS_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError): + client = client_class(transport=transport_name) + + # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): + with pytest.raises(ValueError): + client = client_class(transport=transport_name) + + # Check the case quota_project_id is provided + options = client_options.ClientOptions(quota_project_id="octopus") + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id="octopus", + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + # Check the case api_endpoint is provided + options = client_options.ClientOptions(api_audience="https://language.googleapis.com") + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience="https://language.googleapis.com" + ) + +@pytest.mark.parametrize("client_class,transport_class,transport_name,use_client_cert_env", [ + (GlobalForwardingRulesClient, transports.GlobalForwardingRulesRestTransport, "rest", "true"), + (GlobalForwardingRulesClient, transports.GlobalForwardingRulesRestTransport, "rest", "false"), +]) +@mock.patch.object(GlobalForwardingRulesClient, "DEFAULT_ENDPOINT", modify_default_endpoint(GlobalForwardingRulesClient)) +@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) +def test_global_forwarding_rules_client_mtls_env_auto(client_class, transport_class, transport_name, use_client_cert_env): + # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default + # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists. + + # Check the case client_cert_source is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): + options = client_options.ClientOptions(client_cert_source=client_cert_source_callback) + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + + if use_client_cert_env == "false": + expected_client_cert_source = None + expected_host = client.DEFAULT_ENDPOINT + else: + expected_client_cert_source = client_cert_source_callback + expected_host = client.DEFAULT_MTLS_ENDPOINT + + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case ADC client cert is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): + with mock.patch.object(transport_class, '__init__') as patched: + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): + with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=client_cert_source_callback): + if use_client_cert_env == "false": + expected_host = client.DEFAULT_ENDPOINT + expected_client_cert_source = None + else: + expected_host = client.DEFAULT_MTLS_ENDPOINT + expected_client_cert_source = client_cert_source_callback + + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case client_cert_source and ADC client cert are not provided. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): + with mock.patch.object(transport_class, '__init__') as patched: + with mock.patch("google.auth.transport.mtls.has_default_client_cert_source", return_value=False): + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize("client_class", [ + GlobalForwardingRulesClient +]) +@mock.patch.object(GlobalForwardingRulesClient, "DEFAULT_ENDPOINT", modify_default_endpoint(GlobalForwardingRulesClient)) +def test_global_forwarding_rules_client_get_mtls_endpoint_and_cert_source(client_class): + mock_client_cert_source = mock.Mock() + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "true". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + mock_api_endpoint = "foo" + options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(options) + assert api_endpoint == mock_api_endpoint + assert cert_source == mock_client_cert_source + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "false". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + mock_client_cert_source = mock.Mock() + mock_api_endpoint = "foo" + options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(options) + assert api_endpoint == mock_api_endpoint + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert doesn't exist. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=False): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert exists. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): + with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=mock_client_cert_source): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source == mock_client_cert_source + + +@pytest.mark.parametrize("client_class,transport_class,transport_name", [ + (GlobalForwardingRulesClient, transports.GlobalForwardingRulesRestTransport, "rest"), +]) +def test_global_forwarding_rules_client_client_options_scopes(client_class, transport_class, transport_name): + # Check the case scopes are provided. + options = client_options.ClientOptions( + scopes=["1", "2"], + ) + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=["1", "2"], + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + +@pytest.mark.parametrize("client_class,transport_class,transport_name,grpc_helpers", [ + (GlobalForwardingRulesClient, transports.GlobalForwardingRulesRestTransport, "rest", None), +]) +def test_global_forwarding_rules_client_client_options_credentials_file(client_class, transport_class, transport_name, grpc_helpers): + # Check the case credentials file is provided. + options = client_options.ClientOptions( + credentials_file="credentials.json" + ) + + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize("request_type", [ + compute.DeleteGlobalForwardingRuleRequest, + dict, +]) +def test_delete_rest(request_type): + client = GlobalForwardingRulesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'forwarding_rule': 'sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.delete(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, extended_operation.ExtendedOperation) + assert response.client_operation_id == 'client_operation_id_value' + assert response.creation_timestamp == 'creation_timestamp_value' + assert response.description == 'description_value' + assert response.end_time == 'end_time_value' + assert response.http_error_message == 'http_error_message_value' + assert response.http_error_status_code == 2374 + assert response.id == 205 + assert response.insert_time == 'insert_time_value' + assert response.kind == 'kind_value' + assert response.name == 'name_value' + assert response.operation_group_id == 'operation_group_id_value' + assert response.operation_type == 'operation_type_value' + assert response.progress == 885 + assert response.region == 'region_value' + assert response.self_link == 'self_link_value' + assert response.start_time == 'start_time_value' + assert response.status == compute.Operation.Status.DONE + assert response.status_message == 'status_message_value' + assert response.target_id == 947 + assert response.target_link == 'target_link_value' + assert response.user == 'user_value' + assert response.zone == 'zone_value' + + +def test_delete_rest_required_fields(request_type=compute.DeleteGlobalForwardingRuleRequest): + transport_class = transports.GlobalForwardingRulesRestTransport + + request_init = {} + request_init["forwarding_rule"] = "" + request_init["project"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["forwardingRule"] = 'forwarding_rule_value' + jsonified_request["project"] = 'project_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "forwardingRule" in jsonified_request + assert jsonified_request["forwardingRule"] == 'forwarding_rule_value' + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + + client = GlobalForwardingRulesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "delete", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.delete(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_delete_rest_unset_required_fields(): + transport = transports.GlobalForwardingRulesRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.delete._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("forwardingRule", "project", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_rest_interceptors(null_interceptor): + transport = transports.GlobalForwardingRulesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.GlobalForwardingRulesRestInterceptor(), + ) + client = GlobalForwardingRulesClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.GlobalForwardingRulesRestInterceptor, "post_delete") as post, \ + mock.patch.object(transports.GlobalForwardingRulesRestInterceptor, "pre_delete") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.DeleteGlobalForwardingRuleRequest.pb(compute.DeleteGlobalForwardingRuleRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.DeleteGlobalForwardingRuleRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.delete(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_delete_rest_bad_request(transport: str = 'rest', request_type=compute.DeleteGlobalForwardingRuleRequest): + client = GlobalForwardingRulesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'forwarding_rule': 'sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.delete(request) + + +def test_delete_rest_flattened(): + client = GlobalForwardingRulesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'forwarding_rule': 'sample2'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + forwarding_rule='forwarding_rule_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.delete(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/global/forwardingRules/{forwarding_rule}" % client.transport._host, args[1]) + + +def test_delete_rest_flattened_error(transport: str = 'rest'): + client = GlobalForwardingRulesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete( + compute.DeleteGlobalForwardingRuleRequest(), + project='project_value', + forwarding_rule='forwarding_rule_value', + ) + + +def test_delete_rest_error(): + client = GlobalForwardingRulesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.DeleteGlobalForwardingRuleRequest, + dict, +]) +def test_delete_unary_rest(request_type): + client = GlobalForwardingRulesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'forwarding_rule': 'sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.delete_unary(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.Operation) + + +def test_delete_unary_rest_required_fields(request_type=compute.DeleteGlobalForwardingRuleRequest): + transport_class = transports.GlobalForwardingRulesRestTransport + + request_init = {} + request_init["forwarding_rule"] = "" + request_init["project"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["forwardingRule"] = 'forwarding_rule_value' + jsonified_request["project"] = 'project_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "forwardingRule" in jsonified_request + assert jsonified_request["forwardingRule"] == 'forwarding_rule_value' + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + + client = GlobalForwardingRulesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "delete", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.delete_unary(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_delete_unary_rest_unset_required_fields(): + transport = transports.GlobalForwardingRulesRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.delete._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("forwardingRule", "project", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_unary_rest_interceptors(null_interceptor): + transport = transports.GlobalForwardingRulesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.GlobalForwardingRulesRestInterceptor(), + ) + client = GlobalForwardingRulesClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.GlobalForwardingRulesRestInterceptor, "post_delete") as post, \ + mock.patch.object(transports.GlobalForwardingRulesRestInterceptor, "pre_delete") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.DeleteGlobalForwardingRuleRequest.pb(compute.DeleteGlobalForwardingRuleRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.DeleteGlobalForwardingRuleRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.delete_unary(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_delete_unary_rest_bad_request(transport: str = 'rest', request_type=compute.DeleteGlobalForwardingRuleRequest): + client = GlobalForwardingRulesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'forwarding_rule': 'sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.delete_unary(request) + + +def test_delete_unary_rest_flattened(): + client = GlobalForwardingRulesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'forwarding_rule': 'sample2'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + forwarding_rule='forwarding_rule_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.delete_unary(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/global/forwardingRules/{forwarding_rule}" % client.transport._host, args[1]) + + +def test_delete_unary_rest_flattened_error(transport: str = 'rest'): + client = GlobalForwardingRulesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_unary( + compute.DeleteGlobalForwardingRuleRequest(), + project='project_value', + forwarding_rule='forwarding_rule_value', + ) + + +def test_delete_unary_rest_error(): + client = GlobalForwardingRulesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.GetGlobalForwardingRuleRequest, + dict, +]) +def test_get_rest(request_type): + client = GlobalForwardingRulesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'forwarding_rule': 'sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.ForwardingRule( + I_p_address='I_p_address_value', + I_p_protocol='I_p_protocol_value', + all_ports=True, + allow_global_access=True, + allow_psc_global_access=True, + backend_service='backend_service_value', + base_forwarding_rule='base_forwarding_rule_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + fingerprint='fingerprint_value', + id=205, + ip_version='ip_version_value', + is_mirroring_collector=True, + kind='kind_value', + label_fingerprint='label_fingerprint_value', + load_balancing_scheme='load_balancing_scheme_value', + name='name_value', + network='network_value', + network_tier='network_tier_value', + no_automate_dns_zone=True, + port_range='port_range_value', + ports=['ports_value'], + psc_connection_id=1793, + psc_connection_status='psc_connection_status_value', + region='region_value', + self_link='self_link_value', + service_label='service_label_value', + service_name='service_name_value', + source_ip_ranges=['source_ip_ranges_value'], + subnetwork='subnetwork_value', + target='target_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.ForwardingRule.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.get(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.ForwardingRule) + assert response.I_p_address == 'I_p_address_value' + assert response.I_p_protocol == 'I_p_protocol_value' + assert response.all_ports is True + assert response.allow_global_access is True + assert response.allow_psc_global_access is True + assert response.backend_service == 'backend_service_value' + assert response.base_forwarding_rule == 'base_forwarding_rule_value' + assert response.creation_timestamp == 'creation_timestamp_value' + assert response.description == 'description_value' + assert response.fingerprint == 'fingerprint_value' + assert response.id == 205 + assert response.ip_version == 'ip_version_value' + assert response.is_mirroring_collector is True + assert response.kind == 'kind_value' + assert response.label_fingerprint == 'label_fingerprint_value' + assert response.load_balancing_scheme == 'load_balancing_scheme_value' + assert response.name == 'name_value' + assert response.network == 'network_value' + assert response.network_tier == 'network_tier_value' + assert response.no_automate_dns_zone is True + assert response.port_range == 'port_range_value' + assert response.ports == ['ports_value'] + assert response.psc_connection_id == 1793 + assert response.psc_connection_status == 'psc_connection_status_value' + assert response.region == 'region_value' + assert response.self_link == 'self_link_value' + assert response.service_label == 'service_label_value' + assert response.service_name == 'service_name_value' + assert response.source_ip_ranges == ['source_ip_ranges_value'] + assert response.subnetwork == 'subnetwork_value' + assert response.target == 'target_value' + + +def test_get_rest_required_fields(request_type=compute.GetGlobalForwardingRuleRequest): + transport_class = transports.GlobalForwardingRulesRestTransport + + request_init = {} + request_init["forwarding_rule"] = "" + request_init["project"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["forwardingRule"] = 'forwarding_rule_value' + jsonified_request["project"] = 'project_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "forwardingRule" in jsonified_request + assert jsonified_request["forwardingRule"] == 'forwarding_rule_value' + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + + client = GlobalForwardingRulesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.ForwardingRule() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "get", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.ForwardingRule.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.get(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_get_rest_unset_required_fields(): + transport = transports.GlobalForwardingRulesRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.get._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("forwardingRule", "project", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_rest_interceptors(null_interceptor): + transport = transports.GlobalForwardingRulesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.GlobalForwardingRulesRestInterceptor(), + ) + client = GlobalForwardingRulesClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.GlobalForwardingRulesRestInterceptor, "post_get") as post, \ + mock.patch.object(transports.GlobalForwardingRulesRestInterceptor, "pre_get") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.GetGlobalForwardingRuleRequest.pb(compute.GetGlobalForwardingRuleRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.ForwardingRule.to_json(compute.ForwardingRule()) + + request = compute.GetGlobalForwardingRuleRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.ForwardingRule() + + client.get(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_get_rest_bad_request(transport: str = 'rest', request_type=compute.GetGlobalForwardingRuleRequest): + client = GlobalForwardingRulesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'forwarding_rule': 'sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get(request) + + +def test_get_rest_flattened(): + client = GlobalForwardingRulesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.ForwardingRule() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'forwarding_rule': 'sample2'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + forwarding_rule='forwarding_rule_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.ForwardingRule.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.get(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/global/forwardingRules/{forwarding_rule}" % client.transport._host, args[1]) + + +def test_get_rest_flattened_error(transport: str = 'rest'): + client = GlobalForwardingRulesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get( + compute.GetGlobalForwardingRuleRequest(), + project='project_value', + forwarding_rule='forwarding_rule_value', + ) + + +def test_get_rest_error(): + client = GlobalForwardingRulesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.InsertGlobalForwardingRuleRequest, + dict, +]) +def test_insert_rest(request_type): + client = GlobalForwardingRulesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1'} + request_init["forwarding_rule_resource"] = {'I_p_address': 'I_p_address_value', 'I_p_protocol': 'I_p_protocol_value', 'all_ports': True, 'allow_global_access': True, 'allow_psc_global_access': True, 'backend_service': 'backend_service_value', 'base_forwarding_rule': 'base_forwarding_rule_value', 'creation_timestamp': 'creation_timestamp_value', 'description': 'description_value', 'fingerprint': 'fingerprint_value', 'id': 205, 'ip_version': 'ip_version_value', 'is_mirroring_collector': True, 'kind': 'kind_value', 'label_fingerprint': 'label_fingerprint_value', 'labels': {}, 'load_balancing_scheme': 'load_balancing_scheme_value', 'metadata_filters': [{'filter_labels': [{'name': 'name_value', 'value': 'value_value'}], 'filter_match_criteria': 'filter_match_criteria_value'}], 'name': 'name_value', 'network': 'network_value', 'network_tier': 'network_tier_value', 'no_automate_dns_zone': True, 'port_range': 'port_range_value', 'ports': ['ports_value1', 'ports_value2'], 'psc_connection_id': 1793, 'psc_connection_status': 'psc_connection_status_value', 'region': 'region_value', 'self_link': 'self_link_value', 'service_directory_registrations': [{'namespace': 'namespace_value', 'service': 'service_value', 'service_directory_region': 'service_directory_region_value'}], 'service_label': 'service_label_value', 'service_name': 'service_name_value', 'source_ip_ranges': ['source_ip_ranges_value1', 'source_ip_ranges_value2'], 'subnetwork': 'subnetwork_value', 'target': 'target_value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.insert(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, extended_operation.ExtendedOperation) + assert response.client_operation_id == 'client_operation_id_value' + assert response.creation_timestamp == 'creation_timestamp_value' + assert response.description == 'description_value' + assert response.end_time == 'end_time_value' + assert response.http_error_message == 'http_error_message_value' + assert response.http_error_status_code == 2374 + assert response.id == 205 + assert response.insert_time == 'insert_time_value' + assert response.kind == 'kind_value' + assert response.name == 'name_value' + assert response.operation_group_id == 'operation_group_id_value' + assert response.operation_type == 'operation_type_value' + assert response.progress == 885 + assert response.region == 'region_value' + assert response.self_link == 'self_link_value' + assert response.start_time == 'start_time_value' + assert response.status == compute.Operation.Status.DONE + assert response.status_message == 'status_message_value' + assert response.target_id == 947 + assert response.target_link == 'target_link_value' + assert response.user == 'user_value' + assert response.zone == 'zone_value' + + +def test_insert_rest_required_fields(request_type=compute.InsertGlobalForwardingRuleRequest): + transport_class = transports.GlobalForwardingRulesRestTransport + + request_init = {} + request_init["project"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).insert._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).insert._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + + client = GlobalForwardingRulesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.insert(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_insert_rest_unset_required_fields(): + transport = transports.GlobalForwardingRulesRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.insert._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("forwardingRuleResource", "project", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_insert_rest_interceptors(null_interceptor): + transport = transports.GlobalForwardingRulesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.GlobalForwardingRulesRestInterceptor(), + ) + client = GlobalForwardingRulesClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.GlobalForwardingRulesRestInterceptor, "post_insert") as post, \ + mock.patch.object(transports.GlobalForwardingRulesRestInterceptor, "pre_insert") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.InsertGlobalForwardingRuleRequest.pb(compute.InsertGlobalForwardingRuleRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.InsertGlobalForwardingRuleRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.insert(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_insert_rest_bad_request(transport: str = 'rest', request_type=compute.InsertGlobalForwardingRuleRequest): + client = GlobalForwardingRulesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1'} + request_init["forwarding_rule_resource"] = {'I_p_address': 'I_p_address_value', 'I_p_protocol': 'I_p_protocol_value', 'all_ports': True, 'allow_global_access': True, 'allow_psc_global_access': True, 'backend_service': 'backend_service_value', 'base_forwarding_rule': 'base_forwarding_rule_value', 'creation_timestamp': 'creation_timestamp_value', 'description': 'description_value', 'fingerprint': 'fingerprint_value', 'id': 205, 'ip_version': 'ip_version_value', 'is_mirroring_collector': True, 'kind': 'kind_value', 'label_fingerprint': 'label_fingerprint_value', 'labels': {}, 'load_balancing_scheme': 'load_balancing_scheme_value', 'metadata_filters': [{'filter_labels': [{'name': 'name_value', 'value': 'value_value'}], 'filter_match_criteria': 'filter_match_criteria_value'}], 'name': 'name_value', 'network': 'network_value', 'network_tier': 'network_tier_value', 'no_automate_dns_zone': True, 'port_range': 'port_range_value', 'ports': ['ports_value1', 'ports_value2'], 'psc_connection_id': 1793, 'psc_connection_status': 'psc_connection_status_value', 'region': 'region_value', 'self_link': 'self_link_value', 'service_directory_registrations': [{'namespace': 'namespace_value', 'service': 'service_value', 'service_directory_region': 'service_directory_region_value'}], 'service_label': 'service_label_value', 'service_name': 'service_name_value', 'source_ip_ranges': ['source_ip_ranges_value1', 'source_ip_ranges_value2'], 'subnetwork': 'subnetwork_value', 'target': 'target_value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.insert(request) + + +def test_insert_rest_flattened(): + client = GlobalForwardingRulesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + forwarding_rule_resource=compute.ForwardingRule(I_p_address='I_p_address_value'), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.insert(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/global/forwardingRules" % client.transport._host, args[1]) + + +def test_insert_rest_flattened_error(transport: str = 'rest'): + client = GlobalForwardingRulesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.insert( + compute.InsertGlobalForwardingRuleRequest(), + project='project_value', + forwarding_rule_resource=compute.ForwardingRule(I_p_address='I_p_address_value'), + ) + + +def test_insert_rest_error(): + client = GlobalForwardingRulesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.InsertGlobalForwardingRuleRequest, + dict, +]) +def test_insert_unary_rest(request_type): + client = GlobalForwardingRulesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1'} + request_init["forwarding_rule_resource"] = {'I_p_address': 'I_p_address_value', 'I_p_protocol': 'I_p_protocol_value', 'all_ports': True, 'allow_global_access': True, 'allow_psc_global_access': True, 'backend_service': 'backend_service_value', 'base_forwarding_rule': 'base_forwarding_rule_value', 'creation_timestamp': 'creation_timestamp_value', 'description': 'description_value', 'fingerprint': 'fingerprint_value', 'id': 205, 'ip_version': 'ip_version_value', 'is_mirroring_collector': True, 'kind': 'kind_value', 'label_fingerprint': 'label_fingerprint_value', 'labels': {}, 'load_balancing_scheme': 'load_balancing_scheme_value', 'metadata_filters': [{'filter_labels': [{'name': 'name_value', 'value': 'value_value'}], 'filter_match_criteria': 'filter_match_criteria_value'}], 'name': 'name_value', 'network': 'network_value', 'network_tier': 'network_tier_value', 'no_automate_dns_zone': True, 'port_range': 'port_range_value', 'ports': ['ports_value1', 'ports_value2'], 'psc_connection_id': 1793, 'psc_connection_status': 'psc_connection_status_value', 'region': 'region_value', 'self_link': 'self_link_value', 'service_directory_registrations': [{'namespace': 'namespace_value', 'service': 'service_value', 'service_directory_region': 'service_directory_region_value'}], 'service_label': 'service_label_value', 'service_name': 'service_name_value', 'source_ip_ranges': ['source_ip_ranges_value1', 'source_ip_ranges_value2'], 'subnetwork': 'subnetwork_value', 'target': 'target_value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.insert_unary(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.Operation) + + +def test_insert_unary_rest_required_fields(request_type=compute.InsertGlobalForwardingRuleRequest): + transport_class = transports.GlobalForwardingRulesRestTransport + + request_init = {} + request_init["project"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).insert._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).insert._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + + client = GlobalForwardingRulesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.insert_unary(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_insert_unary_rest_unset_required_fields(): + transport = transports.GlobalForwardingRulesRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.insert._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("forwardingRuleResource", "project", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_insert_unary_rest_interceptors(null_interceptor): + transport = transports.GlobalForwardingRulesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.GlobalForwardingRulesRestInterceptor(), + ) + client = GlobalForwardingRulesClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.GlobalForwardingRulesRestInterceptor, "post_insert") as post, \ + mock.patch.object(transports.GlobalForwardingRulesRestInterceptor, "pre_insert") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.InsertGlobalForwardingRuleRequest.pb(compute.InsertGlobalForwardingRuleRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.InsertGlobalForwardingRuleRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.insert_unary(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_insert_unary_rest_bad_request(transport: str = 'rest', request_type=compute.InsertGlobalForwardingRuleRequest): + client = GlobalForwardingRulesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1'} + request_init["forwarding_rule_resource"] = {'I_p_address': 'I_p_address_value', 'I_p_protocol': 'I_p_protocol_value', 'all_ports': True, 'allow_global_access': True, 'allow_psc_global_access': True, 'backend_service': 'backend_service_value', 'base_forwarding_rule': 'base_forwarding_rule_value', 'creation_timestamp': 'creation_timestamp_value', 'description': 'description_value', 'fingerprint': 'fingerprint_value', 'id': 205, 'ip_version': 'ip_version_value', 'is_mirroring_collector': True, 'kind': 'kind_value', 'label_fingerprint': 'label_fingerprint_value', 'labels': {}, 'load_balancing_scheme': 'load_balancing_scheme_value', 'metadata_filters': [{'filter_labels': [{'name': 'name_value', 'value': 'value_value'}], 'filter_match_criteria': 'filter_match_criteria_value'}], 'name': 'name_value', 'network': 'network_value', 'network_tier': 'network_tier_value', 'no_automate_dns_zone': True, 'port_range': 'port_range_value', 'ports': ['ports_value1', 'ports_value2'], 'psc_connection_id': 1793, 'psc_connection_status': 'psc_connection_status_value', 'region': 'region_value', 'self_link': 'self_link_value', 'service_directory_registrations': [{'namespace': 'namespace_value', 'service': 'service_value', 'service_directory_region': 'service_directory_region_value'}], 'service_label': 'service_label_value', 'service_name': 'service_name_value', 'source_ip_ranges': ['source_ip_ranges_value1', 'source_ip_ranges_value2'], 'subnetwork': 'subnetwork_value', 'target': 'target_value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.insert_unary(request) + + +def test_insert_unary_rest_flattened(): + client = GlobalForwardingRulesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + forwarding_rule_resource=compute.ForwardingRule(I_p_address='I_p_address_value'), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.insert_unary(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/global/forwardingRules" % client.transport._host, args[1]) + + +def test_insert_unary_rest_flattened_error(transport: str = 'rest'): + client = GlobalForwardingRulesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.insert_unary( + compute.InsertGlobalForwardingRuleRequest(), + project='project_value', + forwarding_rule_resource=compute.ForwardingRule(I_p_address='I_p_address_value'), + ) + + +def test_insert_unary_rest_error(): + client = GlobalForwardingRulesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.ListGlobalForwardingRulesRequest, + dict, +]) +def test_list_rest(request_type): + client = GlobalForwardingRulesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.ForwardingRuleList( + id='id_value', + kind='kind_value', + next_page_token='next_page_token_value', + self_link='self_link_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.ForwardingRuleList.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.list(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListPager) + assert response.id == 'id_value' + assert response.kind == 'kind_value' + assert response.next_page_token == 'next_page_token_value' + assert response.self_link == 'self_link_value' + + +def test_list_rest_required_fields(request_type=compute.ListGlobalForwardingRulesRequest): + transport_class = transports.GlobalForwardingRulesRestTransport + + request_init = {} + request_init["project"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("filter", "max_results", "order_by", "page_token", "return_partial_success", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + + client = GlobalForwardingRulesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.ForwardingRuleList() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "get", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.ForwardingRuleList.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.list(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_list_rest_unset_required_fields(): + transport = transports.GlobalForwardingRulesRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.list._get_unset_required_fields({}) + assert set(unset_fields) == (set(("filter", "maxResults", "orderBy", "pageToken", "returnPartialSuccess", )) & set(("project", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_rest_interceptors(null_interceptor): + transport = transports.GlobalForwardingRulesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.GlobalForwardingRulesRestInterceptor(), + ) + client = GlobalForwardingRulesClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.GlobalForwardingRulesRestInterceptor, "post_list") as post, \ + mock.patch.object(transports.GlobalForwardingRulesRestInterceptor, "pre_list") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.ListGlobalForwardingRulesRequest.pb(compute.ListGlobalForwardingRulesRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.ForwardingRuleList.to_json(compute.ForwardingRuleList()) + + request = compute.ListGlobalForwardingRulesRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.ForwardingRuleList() + + client.list(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_list_rest_bad_request(transport: str = 'rest', request_type=compute.ListGlobalForwardingRulesRequest): + client = GlobalForwardingRulesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list(request) + + +def test_list_rest_flattened(): + client = GlobalForwardingRulesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.ForwardingRuleList() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.ForwardingRuleList.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.list(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/global/forwardingRules" % client.transport._host, args[1]) + + +def test_list_rest_flattened_error(transport: str = 'rest'): + client = GlobalForwardingRulesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list( + compute.ListGlobalForwardingRulesRequest(), + project='project_value', + ) + + +def test_list_rest_pager(transport: str = 'rest'): + client = GlobalForwardingRulesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + #with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + compute.ForwardingRuleList( + items=[ + compute.ForwardingRule(), + compute.ForwardingRule(), + compute.ForwardingRule(), + ], + next_page_token='abc', + ), + compute.ForwardingRuleList( + items=[], + next_page_token='def', + ), + compute.ForwardingRuleList( + items=[ + compute.ForwardingRule(), + ], + next_page_token='ghi', + ), + compute.ForwardingRuleList( + items=[ + compute.ForwardingRule(), + compute.ForwardingRule(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple(compute.ForwardingRuleList.to_json(x) for x in response) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode('UTF-8') + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {'project': 'sample1'} + + pager = client.list(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, compute.ForwardingRule) + for i in results) + + pages = list(client.list(request=sample_request).pages) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize("request_type", [ + compute.PatchGlobalForwardingRuleRequest, + dict, +]) +def test_patch_rest(request_type): + client = GlobalForwardingRulesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'forwarding_rule': 'sample2'} + request_init["forwarding_rule_resource"] = {'I_p_address': 'I_p_address_value', 'I_p_protocol': 'I_p_protocol_value', 'all_ports': True, 'allow_global_access': True, 'allow_psc_global_access': True, 'backend_service': 'backend_service_value', 'base_forwarding_rule': 'base_forwarding_rule_value', 'creation_timestamp': 'creation_timestamp_value', 'description': 'description_value', 'fingerprint': 'fingerprint_value', 'id': 205, 'ip_version': 'ip_version_value', 'is_mirroring_collector': True, 'kind': 'kind_value', 'label_fingerprint': 'label_fingerprint_value', 'labels': {}, 'load_balancing_scheme': 'load_balancing_scheme_value', 'metadata_filters': [{'filter_labels': [{'name': 'name_value', 'value': 'value_value'}], 'filter_match_criteria': 'filter_match_criteria_value'}], 'name': 'name_value', 'network': 'network_value', 'network_tier': 'network_tier_value', 'no_automate_dns_zone': True, 'port_range': 'port_range_value', 'ports': ['ports_value1', 'ports_value2'], 'psc_connection_id': 1793, 'psc_connection_status': 'psc_connection_status_value', 'region': 'region_value', 'self_link': 'self_link_value', 'service_directory_registrations': [{'namespace': 'namespace_value', 'service': 'service_value', 'service_directory_region': 'service_directory_region_value'}], 'service_label': 'service_label_value', 'service_name': 'service_name_value', 'source_ip_ranges': ['source_ip_ranges_value1', 'source_ip_ranges_value2'], 'subnetwork': 'subnetwork_value', 'target': 'target_value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.patch(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, extended_operation.ExtendedOperation) + assert response.client_operation_id == 'client_operation_id_value' + assert response.creation_timestamp == 'creation_timestamp_value' + assert response.description == 'description_value' + assert response.end_time == 'end_time_value' + assert response.http_error_message == 'http_error_message_value' + assert response.http_error_status_code == 2374 + assert response.id == 205 + assert response.insert_time == 'insert_time_value' + assert response.kind == 'kind_value' + assert response.name == 'name_value' + assert response.operation_group_id == 'operation_group_id_value' + assert response.operation_type == 'operation_type_value' + assert response.progress == 885 + assert response.region == 'region_value' + assert response.self_link == 'self_link_value' + assert response.start_time == 'start_time_value' + assert response.status == compute.Operation.Status.DONE + assert response.status_message == 'status_message_value' + assert response.target_id == 947 + assert response.target_link == 'target_link_value' + assert response.user == 'user_value' + assert response.zone == 'zone_value' + + +def test_patch_rest_required_fields(request_type=compute.PatchGlobalForwardingRuleRequest): + transport_class = transports.GlobalForwardingRulesRestTransport + + request_init = {} + request_init["forwarding_rule"] = "" + request_init["project"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).patch._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["forwardingRule"] = 'forwarding_rule_value' + jsonified_request["project"] = 'project_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).patch._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "forwardingRule" in jsonified_request + assert jsonified_request["forwardingRule"] == 'forwarding_rule_value' + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + + client = GlobalForwardingRulesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "patch", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.patch(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_patch_rest_unset_required_fields(): + transport = transports.GlobalForwardingRulesRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.patch._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("forwardingRule", "forwardingRuleResource", "project", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_patch_rest_interceptors(null_interceptor): + transport = transports.GlobalForwardingRulesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.GlobalForwardingRulesRestInterceptor(), + ) + client = GlobalForwardingRulesClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.GlobalForwardingRulesRestInterceptor, "post_patch") as post, \ + mock.patch.object(transports.GlobalForwardingRulesRestInterceptor, "pre_patch") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.PatchGlobalForwardingRuleRequest.pb(compute.PatchGlobalForwardingRuleRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.PatchGlobalForwardingRuleRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.patch(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_patch_rest_bad_request(transport: str = 'rest', request_type=compute.PatchGlobalForwardingRuleRequest): + client = GlobalForwardingRulesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'forwarding_rule': 'sample2'} + request_init["forwarding_rule_resource"] = {'I_p_address': 'I_p_address_value', 'I_p_protocol': 'I_p_protocol_value', 'all_ports': True, 'allow_global_access': True, 'allow_psc_global_access': True, 'backend_service': 'backend_service_value', 'base_forwarding_rule': 'base_forwarding_rule_value', 'creation_timestamp': 'creation_timestamp_value', 'description': 'description_value', 'fingerprint': 'fingerprint_value', 'id': 205, 'ip_version': 'ip_version_value', 'is_mirroring_collector': True, 'kind': 'kind_value', 'label_fingerprint': 'label_fingerprint_value', 'labels': {}, 'load_balancing_scheme': 'load_balancing_scheme_value', 'metadata_filters': [{'filter_labels': [{'name': 'name_value', 'value': 'value_value'}], 'filter_match_criteria': 'filter_match_criteria_value'}], 'name': 'name_value', 'network': 'network_value', 'network_tier': 'network_tier_value', 'no_automate_dns_zone': True, 'port_range': 'port_range_value', 'ports': ['ports_value1', 'ports_value2'], 'psc_connection_id': 1793, 'psc_connection_status': 'psc_connection_status_value', 'region': 'region_value', 'self_link': 'self_link_value', 'service_directory_registrations': [{'namespace': 'namespace_value', 'service': 'service_value', 'service_directory_region': 'service_directory_region_value'}], 'service_label': 'service_label_value', 'service_name': 'service_name_value', 'source_ip_ranges': ['source_ip_ranges_value1', 'source_ip_ranges_value2'], 'subnetwork': 'subnetwork_value', 'target': 'target_value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.patch(request) + + +def test_patch_rest_flattened(): + client = GlobalForwardingRulesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'forwarding_rule': 'sample2'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + forwarding_rule='forwarding_rule_value', + forwarding_rule_resource=compute.ForwardingRule(I_p_address='I_p_address_value'), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.patch(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/global/forwardingRules/{forwarding_rule}" % client.transport._host, args[1]) + + +def test_patch_rest_flattened_error(transport: str = 'rest'): + client = GlobalForwardingRulesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.patch( + compute.PatchGlobalForwardingRuleRequest(), + project='project_value', + forwarding_rule='forwarding_rule_value', + forwarding_rule_resource=compute.ForwardingRule(I_p_address='I_p_address_value'), + ) + + +def test_patch_rest_error(): + client = GlobalForwardingRulesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.PatchGlobalForwardingRuleRequest, + dict, +]) +def test_patch_unary_rest(request_type): + client = GlobalForwardingRulesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'forwarding_rule': 'sample2'} + request_init["forwarding_rule_resource"] = {'I_p_address': 'I_p_address_value', 'I_p_protocol': 'I_p_protocol_value', 'all_ports': True, 'allow_global_access': True, 'allow_psc_global_access': True, 'backend_service': 'backend_service_value', 'base_forwarding_rule': 'base_forwarding_rule_value', 'creation_timestamp': 'creation_timestamp_value', 'description': 'description_value', 'fingerprint': 'fingerprint_value', 'id': 205, 'ip_version': 'ip_version_value', 'is_mirroring_collector': True, 'kind': 'kind_value', 'label_fingerprint': 'label_fingerprint_value', 'labels': {}, 'load_balancing_scheme': 'load_balancing_scheme_value', 'metadata_filters': [{'filter_labels': [{'name': 'name_value', 'value': 'value_value'}], 'filter_match_criteria': 'filter_match_criteria_value'}], 'name': 'name_value', 'network': 'network_value', 'network_tier': 'network_tier_value', 'no_automate_dns_zone': True, 'port_range': 'port_range_value', 'ports': ['ports_value1', 'ports_value2'], 'psc_connection_id': 1793, 'psc_connection_status': 'psc_connection_status_value', 'region': 'region_value', 'self_link': 'self_link_value', 'service_directory_registrations': [{'namespace': 'namespace_value', 'service': 'service_value', 'service_directory_region': 'service_directory_region_value'}], 'service_label': 'service_label_value', 'service_name': 'service_name_value', 'source_ip_ranges': ['source_ip_ranges_value1', 'source_ip_ranges_value2'], 'subnetwork': 'subnetwork_value', 'target': 'target_value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.patch_unary(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.Operation) + + +def test_patch_unary_rest_required_fields(request_type=compute.PatchGlobalForwardingRuleRequest): + transport_class = transports.GlobalForwardingRulesRestTransport + + request_init = {} + request_init["forwarding_rule"] = "" + request_init["project"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).patch._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["forwardingRule"] = 'forwarding_rule_value' + jsonified_request["project"] = 'project_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).patch._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "forwardingRule" in jsonified_request + assert jsonified_request["forwardingRule"] == 'forwarding_rule_value' + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + + client = GlobalForwardingRulesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "patch", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.patch_unary(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_patch_unary_rest_unset_required_fields(): + transport = transports.GlobalForwardingRulesRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.patch._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("forwardingRule", "forwardingRuleResource", "project", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_patch_unary_rest_interceptors(null_interceptor): + transport = transports.GlobalForwardingRulesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.GlobalForwardingRulesRestInterceptor(), + ) + client = GlobalForwardingRulesClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.GlobalForwardingRulesRestInterceptor, "post_patch") as post, \ + mock.patch.object(transports.GlobalForwardingRulesRestInterceptor, "pre_patch") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.PatchGlobalForwardingRuleRequest.pb(compute.PatchGlobalForwardingRuleRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.PatchGlobalForwardingRuleRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.patch_unary(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_patch_unary_rest_bad_request(transport: str = 'rest', request_type=compute.PatchGlobalForwardingRuleRequest): + client = GlobalForwardingRulesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'forwarding_rule': 'sample2'} + request_init["forwarding_rule_resource"] = {'I_p_address': 'I_p_address_value', 'I_p_protocol': 'I_p_protocol_value', 'all_ports': True, 'allow_global_access': True, 'allow_psc_global_access': True, 'backend_service': 'backend_service_value', 'base_forwarding_rule': 'base_forwarding_rule_value', 'creation_timestamp': 'creation_timestamp_value', 'description': 'description_value', 'fingerprint': 'fingerprint_value', 'id': 205, 'ip_version': 'ip_version_value', 'is_mirroring_collector': True, 'kind': 'kind_value', 'label_fingerprint': 'label_fingerprint_value', 'labels': {}, 'load_balancing_scheme': 'load_balancing_scheme_value', 'metadata_filters': [{'filter_labels': [{'name': 'name_value', 'value': 'value_value'}], 'filter_match_criteria': 'filter_match_criteria_value'}], 'name': 'name_value', 'network': 'network_value', 'network_tier': 'network_tier_value', 'no_automate_dns_zone': True, 'port_range': 'port_range_value', 'ports': ['ports_value1', 'ports_value2'], 'psc_connection_id': 1793, 'psc_connection_status': 'psc_connection_status_value', 'region': 'region_value', 'self_link': 'self_link_value', 'service_directory_registrations': [{'namespace': 'namespace_value', 'service': 'service_value', 'service_directory_region': 'service_directory_region_value'}], 'service_label': 'service_label_value', 'service_name': 'service_name_value', 'source_ip_ranges': ['source_ip_ranges_value1', 'source_ip_ranges_value2'], 'subnetwork': 'subnetwork_value', 'target': 'target_value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.patch_unary(request) + + +def test_patch_unary_rest_flattened(): + client = GlobalForwardingRulesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'forwarding_rule': 'sample2'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + forwarding_rule='forwarding_rule_value', + forwarding_rule_resource=compute.ForwardingRule(I_p_address='I_p_address_value'), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.patch_unary(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/global/forwardingRules/{forwarding_rule}" % client.transport._host, args[1]) + + +def test_patch_unary_rest_flattened_error(transport: str = 'rest'): + client = GlobalForwardingRulesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.patch_unary( + compute.PatchGlobalForwardingRuleRequest(), + project='project_value', + forwarding_rule='forwarding_rule_value', + forwarding_rule_resource=compute.ForwardingRule(I_p_address='I_p_address_value'), + ) + + +def test_patch_unary_rest_error(): + client = GlobalForwardingRulesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.SetLabelsGlobalForwardingRuleRequest, + dict, +]) +def test_set_labels_rest(request_type): + client = GlobalForwardingRulesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'resource': 'sample2'} + request_init["global_set_labels_request_resource"] = {'label_fingerprint': 'label_fingerprint_value', 'labels': {}} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.set_labels(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, extended_operation.ExtendedOperation) + assert response.client_operation_id == 'client_operation_id_value' + assert response.creation_timestamp == 'creation_timestamp_value' + assert response.description == 'description_value' + assert response.end_time == 'end_time_value' + assert response.http_error_message == 'http_error_message_value' + assert response.http_error_status_code == 2374 + assert response.id == 205 + assert response.insert_time == 'insert_time_value' + assert response.kind == 'kind_value' + assert response.name == 'name_value' + assert response.operation_group_id == 'operation_group_id_value' + assert response.operation_type == 'operation_type_value' + assert response.progress == 885 + assert response.region == 'region_value' + assert response.self_link == 'self_link_value' + assert response.start_time == 'start_time_value' + assert response.status == compute.Operation.Status.DONE + assert response.status_message == 'status_message_value' + assert response.target_id == 947 + assert response.target_link == 'target_link_value' + assert response.user == 'user_value' + assert response.zone == 'zone_value' + + +def test_set_labels_rest_required_fields(request_type=compute.SetLabelsGlobalForwardingRuleRequest): + transport_class = transports.GlobalForwardingRulesRestTransport + + request_init = {} + request_init["project"] = "" + request_init["resource"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).set_labels._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + jsonified_request["resource"] = 'resource_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).set_labels._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "resource" in jsonified_request + assert jsonified_request["resource"] == 'resource_value' + + client = GlobalForwardingRulesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.set_labels(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_set_labels_rest_unset_required_fields(): + transport = transports.GlobalForwardingRulesRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.set_labels._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("globalSetLabelsRequestResource", "project", "resource", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_set_labels_rest_interceptors(null_interceptor): + transport = transports.GlobalForwardingRulesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.GlobalForwardingRulesRestInterceptor(), + ) + client = GlobalForwardingRulesClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.GlobalForwardingRulesRestInterceptor, "post_set_labels") as post, \ + mock.patch.object(transports.GlobalForwardingRulesRestInterceptor, "pre_set_labels") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.SetLabelsGlobalForwardingRuleRequest.pb(compute.SetLabelsGlobalForwardingRuleRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.SetLabelsGlobalForwardingRuleRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.set_labels(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_set_labels_rest_bad_request(transport: str = 'rest', request_type=compute.SetLabelsGlobalForwardingRuleRequest): + client = GlobalForwardingRulesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'resource': 'sample2'} + request_init["global_set_labels_request_resource"] = {'label_fingerprint': 'label_fingerprint_value', 'labels': {}} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.set_labels(request) + + +def test_set_labels_rest_flattened(): + client = GlobalForwardingRulesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'resource': 'sample2'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + resource='resource_value', + global_set_labels_request_resource=compute.GlobalSetLabelsRequest(label_fingerprint='label_fingerprint_value'), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.set_labels(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/global/forwardingRules/{resource}/setLabels" % client.transport._host, args[1]) + + +def test_set_labels_rest_flattened_error(transport: str = 'rest'): + client = GlobalForwardingRulesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.set_labels( + compute.SetLabelsGlobalForwardingRuleRequest(), + project='project_value', + resource='resource_value', + global_set_labels_request_resource=compute.GlobalSetLabelsRequest(label_fingerprint='label_fingerprint_value'), + ) + + +def test_set_labels_rest_error(): + client = GlobalForwardingRulesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.SetLabelsGlobalForwardingRuleRequest, + dict, +]) +def test_set_labels_unary_rest(request_type): + client = GlobalForwardingRulesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'resource': 'sample2'} + request_init["global_set_labels_request_resource"] = {'label_fingerprint': 'label_fingerprint_value', 'labels': {}} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.set_labels_unary(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.Operation) + + +def test_set_labels_unary_rest_required_fields(request_type=compute.SetLabelsGlobalForwardingRuleRequest): + transport_class = transports.GlobalForwardingRulesRestTransport + + request_init = {} + request_init["project"] = "" + request_init["resource"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).set_labels._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + jsonified_request["resource"] = 'resource_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).set_labels._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "resource" in jsonified_request + assert jsonified_request["resource"] == 'resource_value' + + client = GlobalForwardingRulesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.set_labels_unary(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_set_labels_unary_rest_unset_required_fields(): + transport = transports.GlobalForwardingRulesRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.set_labels._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("globalSetLabelsRequestResource", "project", "resource", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_set_labels_unary_rest_interceptors(null_interceptor): + transport = transports.GlobalForwardingRulesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.GlobalForwardingRulesRestInterceptor(), + ) + client = GlobalForwardingRulesClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.GlobalForwardingRulesRestInterceptor, "post_set_labels") as post, \ + mock.patch.object(transports.GlobalForwardingRulesRestInterceptor, "pre_set_labels") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.SetLabelsGlobalForwardingRuleRequest.pb(compute.SetLabelsGlobalForwardingRuleRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.SetLabelsGlobalForwardingRuleRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.set_labels_unary(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_set_labels_unary_rest_bad_request(transport: str = 'rest', request_type=compute.SetLabelsGlobalForwardingRuleRequest): + client = GlobalForwardingRulesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'resource': 'sample2'} + request_init["global_set_labels_request_resource"] = {'label_fingerprint': 'label_fingerprint_value', 'labels': {}} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.set_labels_unary(request) + + +def test_set_labels_unary_rest_flattened(): + client = GlobalForwardingRulesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'resource': 'sample2'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + resource='resource_value', + global_set_labels_request_resource=compute.GlobalSetLabelsRequest(label_fingerprint='label_fingerprint_value'), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.set_labels_unary(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/global/forwardingRules/{resource}/setLabels" % client.transport._host, args[1]) + + +def test_set_labels_unary_rest_flattened_error(transport: str = 'rest'): + client = GlobalForwardingRulesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.set_labels_unary( + compute.SetLabelsGlobalForwardingRuleRequest(), + project='project_value', + resource='resource_value', + global_set_labels_request_resource=compute.GlobalSetLabelsRequest(label_fingerprint='label_fingerprint_value'), + ) + + +def test_set_labels_unary_rest_error(): + client = GlobalForwardingRulesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.SetTargetGlobalForwardingRuleRequest, + dict, +]) +def test_set_target_rest(request_type): + client = GlobalForwardingRulesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'forwarding_rule': 'sample2'} + request_init["target_reference_resource"] = {'target': 'target_value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.set_target(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, extended_operation.ExtendedOperation) + assert response.client_operation_id == 'client_operation_id_value' + assert response.creation_timestamp == 'creation_timestamp_value' + assert response.description == 'description_value' + assert response.end_time == 'end_time_value' + assert response.http_error_message == 'http_error_message_value' + assert response.http_error_status_code == 2374 + assert response.id == 205 + assert response.insert_time == 'insert_time_value' + assert response.kind == 'kind_value' + assert response.name == 'name_value' + assert response.operation_group_id == 'operation_group_id_value' + assert response.operation_type == 'operation_type_value' + assert response.progress == 885 + assert response.region == 'region_value' + assert response.self_link == 'self_link_value' + assert response.start_time == 'start_time_value' + assert response.status == compute.Operation.Status.DONE + assert response.status_message == 'status_message_value' + assert response.target_id == 947 + assert response.target_link == 'target_link_value' + assert response.user == 'user_value' + assert response.zone == 'zone_value' + + +def test_set_target_rest_required_fields(request_type=compute.SetTargetGlobalForwardingRuleRequest): + transport_class = transports.GlobalForwardingRulesRestTransport + + request_init = {} + request_init["forwarding_rule"] = "" + request_init["project"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).set_target._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["forwardingRule"] = 'forwarding_rule_value' + jsonified_request["project"] = 'project_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).set_target._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "forwardingRule" in jsonified_request + assert jsonified_request["forwardingRule"] == 'forwarding_rule_value' + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + + client = GlobalForwardingRulesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.set_target(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_set_target_rest_unset_required_fields(): + transport = transports.GlobalForwardingRulesRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.set_target._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("forwardingRule", "project", "targetReferenceResource", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_set_target_rest_interceptors(null_interceptor): + transport = transports.GlobalForwardingRulesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.GlobalForwardingRulesRestInterceptor(), + ) + client = GlobalForwardingRulesClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.GlobalForwardingRulesRestInterceptor, "post_set_target") as post, \ + mock.patch.object(transports.GlobalForwardingRulesRestInterceptor, "pre_set_target") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.SetTargetGlobalForwardingRuleRequest.pb(compute.SetTargetGlobalForwardingRuleRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.SetTargetGlobalForwardingRuleRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.set_target(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_set_target_rest_bad_request(transport: str = 'rest', request_type=compute.SetTargetGlobalForwardingRuleRequest): + client = GlobalForwardingRulesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'forwarding_rule': 'sample2'} + request_init["target_reference_resource"] = {'target': 'target_value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.set_target(request) + + +def test_set_target_rest_flattened(): + client = GlobalForwardingRulesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'forwarding_rule': 'sample2'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + forwarding_rule='forwarding_rule_value', + target_reference_resource=compute.TargetReference(target='target_value'), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.set_target(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/global/forwardingRules/{forwarding_rule}/setTarget" % client.transport._host, args[1]) + + +def test_set_target_rest_flattened_error(transport: str = 'rest'): + client = GlobalForwardingRulesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.set_target( + compute.SetTargetGlobalForwardingRuleRequest(), + project='project_value', + forwarding_rule='forwarding_rule_value', + target_reference_resource=compute.TargetReference(target='target_value'), + ) + + +def test_set_target_rest_error(): + client = GlobalForwardingRulesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.SetTargetGlobalForwardingRuleRequest, + dict, +]) +def test_set_target_unary_rest(request_type): + client = GlobalForwardingRulesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'forwarding_rule': 'sample2'} + request_init["target_reference_resource"] = {'target': 'target_value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.set_target_unary(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.Operation) + + +def test_set_target_unary_rest_required_fields(request_type=compute.SetTargetGlobalForwardingRuleRequest): + transport_class = transports.GlobalForwardingRulesRestTransport + + request_init = {} + request_init["forwarding_rule"] = "" + request_init["project"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).set_target._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["forwardingRule"] = 'forwarding_rule_value' + jsonified_request["project"] = 'project_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).set_target._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "forwardingRule" in jsonified_request + assert jsonified_request["forwardingRule"] == 'forwarding_rule_value' + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + + client = GlobalForwardingRulesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.set_target_unary(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_set_target_unary_rest_unset_required_fields(): + transport = transports.GlobalForwardingRulesRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.set_target._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("forwardingRule", "project", "targetReferenceResource", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_set_target_unary_rest_interceptors(null_interceptor): + transport = transports.GlobalForwardingRulesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.GlobalForwardingRulesRestInterceptor(), + ) + client = GlobalForwardingRulesClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.GlobalForwardingRulesRestInterceptor, "post_set_target") as post, \ + mock.patch.object(transports.GlobalForwardingRulesRestInterceptor, "pre_set_target") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.SetTargetGlobalForwardingRuleRequest.pb(compute.SetTargetGlobalForwardingRuleRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.SetTargetGlobalForwardingRuleRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.set_target_unary(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_set_target_unary_rest_bad_request(transport: str = 'rest', request_type=compute.SetTargetGlobalForwardingRuleRequest): + client = GlobalForwardingRulesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'forwarding_rule': 'sample2'} + request_init["target_reference_resource"] = {'target': 'target_value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.set_target_unary(request) + + +def test_set_target_unary_rest_flattened(): + client = GlobalForwardingRulesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'forwarding_rule': 'sample2'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + forwarding_rule='forwarding_rule_value', + target_reference_resource=compute.TargetReference(target='target_value'), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.set_target_unary(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/global/forwardingRules/{forwarding_rule}/setTarget" % client.transport._host, args[1]) + + +def test_set_target_unary_rest_flattened_error(transport: str = 'rest'): + client = GlobalForwardingRulesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.set_target_unary( + compute.SetTargetGlobalForwardingRuleRequest(), + project='project_value', + forwarding_rule='forwarding_rule_value', + target_reference_resource=compute.TargetReference(target='target_value'), + ) + + +def test_set_target_unary_rest_error(): + client = GlobalForwardingRulesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +def test_credentials_transport_error(): + # It is an error to provide credentials and a transport instance. + transport = transports.GlobalForwardingRulesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = GlobalForwardingRulesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # It is an error to provide a credentials file and a transport instance. + transport = transports.GlobalForwardingRulesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = GlobalForwardingRulesClient( + client_options={"credentials_file": "credentials.json"}, + transport=transport, + ) + + # It is an error to provide an api_key and a transport instance. + transport = transports.GlobalForwardingRulesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = GlobalForwardingRulesClient( + client_options=options, + transport=transport, + ) + + # It is an error to provide an api_key and a credential. + options = mock.Mock() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = GlobalForwardingRulesClient( + client_options=options, + credentials=ga_credentials.AnonymousCredentials() + ) + + # It is an error to provide scopes and a transport instance. + transport = transports.GlobalForwardingRulesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = GlobalForwardingRulesClient( + client_options={"scopes": ["1", "2"]}, + transport=transport, + ) + + +def test_transport_instance(): + # A client may be instantiated with a custom transport instance. + transport = transports.GlobalForwardingRulesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + client = GlobalForwardingRulesClient(transport=transport) + assert client.transport is transport + + +@pytest.mark.parametrize("transport_class", [ + transports.GlobalForwardingRulesRestTransport, +]) +def test_transport_adc(transport_class): + # Test default credentials are used if not provided. + with mock.patch.object(google.auth, 'default') as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class() + adc.assert_called_once() + +@pytest.mark.parametrize("transport_name", [ + "rest", +]) +def test_transport_kind(transport_name): + transport = GlobalForwardingRulesClient.get_transport_class(transport_name)( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert transport.kind == transport_name + + +def test_global_forwarding_rules_base_transport_error(): + # Passing both a credentials object and credentials_file should raise an error + with pytest.raises(core_exceptions.DuplicateCredentialArgs): + transport = transports.GlobalForwardingRulesTransport( + credentials=ga_credentials.AnonymousCredentials(), + credentials_file="credentials.json" + ) + + +def test_global_forwarding_rules_base_transport(): + # Instantiate the base transport. + with mock.patch('google.cloud.compute_v1.services.global_forwarding_rules.transports.GlobalForwardingRulesTransport.__init__') as Transport: + Transport.return_value = None + transport = transports.GlobalForwardingRulesTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Every method on the transport should just blindly + # raise NotImplementedError. + methods = ( + 'delete', + 'get', + 'insert', + 'list', + 'patch', + 'set_labels', + 'set_target', + ) + for method in methods: + with pytest.raises(NotImplementedError): + getattr(transport, method)(request=object()) + + with pytest.raises(NotImplementedError): + transport.close() + + # Catch all for all remaining methods and properties + remainder = [ + 'kind', + ] + for r in remainder: + with pytest.raises(NotImplementedError): + getattr(transport, r)() + + +def test_global_forwarding_rules_base_transport_with_credentials_file(): + # Instantiate the base transport with a credentials file + with mock.patch.object(google.auth, 'load_credentials_from_file', autospec=True) as load_creds, mock.patch('google.cloud.compute_v1.services.global_forwarding_rules.transports.GlobalForwardingRulesTransport._prep_wrapped_messages') as Transport: + Transport.return_value = None + load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.GlobalForwardingRulesTransport( + credentials_file="credentials.json", + quota_project_id="octopus", + ) + load_creds.assert_called_once_with("credentials.json", + scopes=None, + default_scopes=( + 'https://www.googleapis.com/auth/compute', + 'https://www.googleapis.com/auth/cloud-platform', +), + quota_project_id="octopus", + ) + + +def test_global_forwarding_rules_base_transport_with_adc(): + # Test the default credentials are used if credentials and credentials_file are None. + with mock.patch.object(google.auth, 'default', autospec=True) as adc, mock.patch('google.cloud.compute_v1.services.global_forwarding_rules.transports.GlobalForwardingRulesTransport._prep_wrapped_messages') as Transport: + Transport.return_value = None + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.GlobalForwardingRulesTransport() + adc.assert_called_once() + + +def test_global_forwarding_rules_auth_adc(): + # If no credentials are provided, we should use ADC credentials. + with mock.patch.object(google.auth, 'default', autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + GlobalForwardingRulesClient() + adc.assert_called_once_with( + scopes=None, + default_scopes=( + 'https://www.googleapis.com/auth/compute', + 'https://www.googleapis.com/auth/cloud-platform', +), + quota_project_id=None, + ) + + +def test_global_forwarding_rules_http_transport_client_cert_source_for_mtls(): + cred = ga_credentials.AnonymousCredentials() + with mock.patch("google.auth.transport.requests.AuthorizedSession.configure_mtls_channel") as mock_configure_mtls_channel: + transports.GlobalForwardingRulesRestTransport ( + credentials=cred, + client_cert_source_for_mtls=client_cert_source_callback + ) + mock_configure_mtls_channel.assert_called_once_with(client_cert_source_callback) + + +@pytest.mark.parametrize("transport_name", [ + "rest", +]) +def test_global_forwarding_rules_host_no_port(transport_name): + client = GlobalForwardingRulesClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions(api_endpoint='compute.googleapis.com'), + transport=transport_name, + ) + assert client.transport._host == ( + 'compute.googleapis.com:443' + if transport_name in ['grpc', 'grpc_asyncio'] + else 'https://compute.googleapis.com' + ) + +@pytest.mark.parametrize("transport_name", [ + "rest", +]) +def test_global_forwarding_rules_host_with_port(transport_name): + client = GlobalForwardingRulesClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions(api_endpoint='compute.googleapis.com:8000'), + transport=transport_name, + ) + assert client.transport._host == ( + 'compute.googleapis.com:8000' + if transport_name in ['grpc', 'grpc_asyncio'] + else 'https://compute.googleapis.com:8000' + ) + +@pytest.mark.parametrize("transport_name", [ + "rest", +]) +def test_global_forwarding_rules_client_transport_session_collision(transport_name): + creds1 = ga_credentials.AnonymousCredentials() + creds2 = ga_credentials.AnonymousCredentials() + client1 = GlobalForwardingRulesClient( + credentials=creds1, + transport=transport_name, + ) + client2 = GlobalForwardingRulesClient( + credentials=creds2, + transport=transport_name, + ) + session1 = client1.transport.delete._session + session2 = client2.transport.delete._session + assert session1 != session2 + session1 = client1.transport.get._session + session2 = client2.transport.get._session + assert session1 != session2 + session1 = client1.transport.insert._session + session2 = client2.transport.insert._session + assert session1 != session2 + session1 = client1.transport.list._session + session2 = client2.transport.list._session + assert session1 != session2 + session1 = client1.transport.patch._session + session2 = client2.transport.patch._session + assert session1 != session2 + session1 = client1.transport.set_labels._session + session2 = client2.transport.set_labels._session + assert session1 != session2 + session1 = client1.transport.set_target._session + session2 = client2.transport.set_target._session + assert session1 != session2 + +def test_common_billing_account_path(): + billing_account = "squid" + expected = "billingAccounts/{billing_account}".format(billing_account=billing_account, ) + actual = GlobalForwardingRulesClient.common_billing_account_path(billing_account) + assert expected == actual + + +def test_parse_common_billing_account_path(): + expected = { + "billing_account": "clam", + } + path = GlobalForwardingRulesClient.common_billing_account_path(**expected) + + # Check that the path construction is reversible. + actual = GlobalForwardingRulesClient.parse_common_billing_account_path(path) + assert expected == actual + +def test_common_folder_path(): + folder = "whelk" + expected = "folders/{folder}".format(folder=folder, ) + actual = GlobalForwardingRulesClient.common_folder_path(folder) + assert expected == actual + + +def test_parse_common_folder_path(): + expected = { + "folder": "octopus", + } + path = GlobalForwardingRulesClient.common_folder_path(**expected) + + # Check that the path construction is reversible. + actual = GlobalForwardingRulesClient.parse_common_folder_path(path) + assert expected == actual + +def test_common_organization_path(): + organization = "oyster" + expected = "organizations/{organization}".format(organization=organization, ) + actual = GlobalForwardingRulesClient.common_organization_path(organization) + assert expected == actual + + +def test_parse_common_organization_path(): + expected = { + "organization": "nudibranch", + } + path = GlobalForwardingRulesClient.common_organization_path(**expected) + + # Check that the path construction is reversible. + actual = GlobalForwardingRulesClient.parse_common_organization_path(path) + assert expected == actual + +def test_common_project_path(): + project = "cuttlefish" + expected = "projects/{project}".format(project=project, ) + actual = GlobalForwardingRulesClient.common_project_path(project) + assert expected == actual + + +def test_parse_common_project_path(): + expected = { + "project": "mussel", + } + path = GlobalForwardingRulesClient.common_project_path(**expected) + + # Check that the path construction is reversible. + actual = GlobalForwardingRulesClient.parse_common_project_path(path) + assert expected == actual + +def test_common_location_path(): + project = "winkle" + location = "nautilus" + expected = "projects/{project}/locations/{location}".format(project=project, location=location, ) + actual = GlobalForwardingRulesClient.common_location_path(project, location) + assert expected == actual + + +def test_parse_common_location_path(): + expected = { + "project": "scallop", + "location": "abalone", + } + path = GlobalForwardingRulesClient.common_location_path(**expected) + + # Check that the path construction is reversible. + actual = GlobalForwardingRulesClient.parse_common_location_path(path) + assert expected == actual + + +def test_client_with_default_client_info(): + client_info = gapic_v1.client_info.ClientInfo() + + with mock.patch.object(transports.GlobalForwardingRulesTransport, '_prep_wrapped_messages') as prep: + client = GlobalForwardingRulesClient( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + with mock.patch.object(transports.GlobalForwardingRulesTransport, '_prep_wrapped_messages') as prep: + transport_class = GlobalForwardingRulesClient.get_transport_class() + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + +def test_transport_close(): + transports = { + "rest": "_session", + } + + for transport, close_name in transports.items(): + client = GlobalForwardingRulesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport + ) + with mock.patch.object(type(getattr(client.transport, close_name)), "close") as close: + with client: + close.assert_not_called() + close.assert_called_once() + +def test_client_ctx(): + transports = [ + 'rest', + ] + for transport in transports: + client = GlobalForwardingRulesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport + ) + # Test client calls underlying transport. + with mock.patch.object(type(client.transport), "close") as close: + close.assert_not_called() + with client: + pass + close.assert_called() + +@pytest.mark.parametrize("client_class,transport_class", [ + (GlobalForwardingRulesClient, transports.GlobalForwardingRulesRestTransport), +]) +def test_api_key_credentials(client_class, transport_class): + with mock.patch.object( + google.auth._default, "get_api_key_credentials", create=True + ) as get_api_key_credentials: + mock_cred = mock.Mock() + get_api_key_credentials.return_value = mock_cred + options = client_options.ClientOptions() + options.api_key = "api_key" + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=mock_cred, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) diff --git a/owl-bot-staging/v1/tests/unit/gapic/compute_v1/test_global_network_endpoint_groups.py b/owl-bot-staging/v1/tests/unit/gapic/compute_v1/test_global_network_endpoint_groups.py new file mode 100644 index 000000000..eac7bc201 --- /dev/null +++ b/owl-bot-staging/v1/tests/unit/gapic/compute_v1/test_global_network_endpoint_groups.py @@ -0,0 +1,3900 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import os +# try/except added for compatibility with python < 3.8 +try: + from unittest import mock + from unittest.mock import AsyncMock # pragma: NO COVER +except ImportError: # pragma: NO COVER + import mock + +import grpc +from grpc.experimental import aio +from collections.abc import Iterable +from google.protobuf import json_format +import json +import math +import pytest +from proto.marshal.rules.dates import DurationRule, TimestampRule +from proto.marshal.rules import wrappers +from requests import Response +from requests import Request, PreparedRequest +from requests.sessions import Session +from google.protobuf import json_format + +from google.api_core import client_options +from google.api_core import exceptions as core_exceptions +from google.api_core import extended_operation # type: ignore +from google.api_core import future +from google.api_core import gapic_v1 +from google.api_core import grpc_helpers +from google.api_core import grpc_helpers_async +from google.api_core import path_template +from google.auth import credentials as ga_credentials +from google.auth.exceptions import MutualTLSChannelError +from google.cloud.compute_v1.services.global_network_endpoint_groups import GlobalNetworkEndpointGroupsClient +from google.cloud.compute_v1.services.global_network_endpoint_groups import pagers +from google.cloud.compute_v1.services.global_network_endpoint_groups import transports +from google.cloud.compute_v1.types import compute +from google.oauth2 import service_account +import google.auth + + +def client_cert_source_callback(): + return b"cert bytes", b"key bytes" + + +# If default endpoint is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint(client): + return "foo.googleapis.com" if ("localhost" in client.DEFAULT_ENDPOINT) else client.DEFAULT_ENDPOINT + + +def test__get_default_mtls_endpoint(): + api_endpoint = "example.googleapis.com" + api_mtls_endpoint = "example.mtls.googleapis.com" + sandbox_endpoint = "example.sandbox.googleapis.com" + sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com" + non_googleapi = "api.example.com" + + assert GlobalNetworkEndpointGroupsClient._get_default_mtls_endpoint(None) is None + assert GlobalNetworkEndpointGroupsClient._get_default_mtls_endpoint(api_endpoint) == api_mtls_endpoint + assert GlobalNetworkEndpointGroupsClient._get_default_mtls_endpoint(api_mtls_endpoint) == api_mtls_endpoint + assert GlobalNetworkEndpointGroupsClient._get_default_mtls_endpoint(sandbox_endpoint) == sandbox_mtls_endpoint + assert GlobalNetworkEndpointGroupsClient._get_default_mtls_endpoint(sandbox_mtls_endpoint) == sandbox_mtls_endpoint + assert GlobalNetworkEndpointGroupsClient._get_default_mtls_endpoint(non_googleapi) == non_googleapi + + +@pytest.mark.parametrize("client_class,transport_name", [ + (GlobalNetworkEndpointGroupsClient, "rest"), +]) +def test_global_network_endpoint_groups_client_from_service_account_info(client_class, transport_name): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object(service_account.Credentials, 'from_service_account_info') as factory: + factory.return_value = creds + info = {"valid": True} + client = client_class.from_service_account_info(info, transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + 'compute.googleapis.com:443' + if transport_name in ['grpc', 'grpc_asyncio'] + else + 'https://compute.googleapis.com' + ) + + +@pytest.mark.parametrize("transport_class,transport_name", [ + (transports.GlobalNetworkEndpointGroupsRestTransport, "rest"), +]) +def test_global_network_endpoint_groups_client_service_account_always_use_jwt(transport_class, transport_name): + with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=True) + use_jwt.assert_called_once_with(True) + + with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=False) + use_jwt.assert_not_called() + + +@pytest.mark.parametrize("client_class,transport_name", [ + (GlobalNetworkEndpointGroupsClient, "rest"), +]) +def test_global_network_endpoint_groups_client_from_service_account_file(client_class, transport_name): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object(service_account.Credentials, 'from_service_account_file') as factory: + factory.return_value = creds + client = client_class.from_service_account_file("dummy/file/path.json", transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + client = client_class.from_service_account_json("dummy/file/path.json", transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + 'compute.googleapis.com:443' + if transport_name in ['grpc', 'grpc_asyncio'] + else + 'https://compute.googleapis.com' + ) + + +def test_global_network_endpoint_groups_client_get_transport_class(): + transport = GlobalNetworkEndpointGroupsClient.get_transport_class() + available_transports = [ + transports.GlobalNetworkEndpointGroupsRestTransport, + ] + assert transport in available_transports + + transport = GlobalNetworkEndpointGroupsClient.get_transport_class("rest") + assert transport == transports.GlobalNetworkEndpointGroupsRestTransport + + +@pytest.mark.parametrize("client_class,transport_class,transport_name", [ + (GlobalNetworkEndpointGroupsClient, transports.GlobalNetworkEndpointGroupsRestTransport, "rest"), +]) +@mock.patch.object(GlobalNetworkEndpointGroupsClient, "DEFAULT_ENDPOINT", modify_default_endpoint(GlobalNetworkEndpointGroupsClient)) +def test_global_network_endpoint_groups_client_client_options(client_class, transport_class, transport_name): + # Check that if channel is provided we won't create a new one. + with mock.patch.object(GlobalNetworkEndpointGroupsClient, 'get_transport_class') as gtc: + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ) + client = client_class(transport=transport) + gtc.assert_not_called() + + # Check that if channel is provided via str we will create a new one. + with mock.patch.object(GlobalNetworkEndpointGroupsClient, 'get_transport_class') as gtc: + client = client_class(transport=transport_name) + gtc.assert_called() + + # Check the case api_endpoint is provided. + options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(transport=transport_name, client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_MTLS_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError): + client = client_class(transport=transport_name) + + # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): + with pytest.raises(ValueError): + client = client_class(transport=transport_name) + + # Check the case quota_project_id is provided + options = client_options.ClientOptions(quota_project_id="octopus") + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id="octopus", + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + # Check the case api_endpoint is provided + options = client_options.ClientOptions(api_audience="https://language.googleapis.com") + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience="https://language.googleapis.com" + ) + +@pytest.mark.parametrize("client_class,transport_class,transport_name,use_client_cert_env", [ + (GlobalNetworkEndpointGroupsClient, transports.GlobalNetworkEndpointGroupsRestTransport, "rest", "true"), + (GlobalNetworkEndpointGroupsClient, transports.GlobalNetworkEndpointGroupsRestTransport, "rest", "false"), +]) +@mock.patch.object(GlobalNetworkEndpointGroupsClient, "DEFAULT_ENDPOINT", modify_default_endpoint(GlobalNetworkEndpointGroupsClient)) +@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) +def test_global_network_endpoint_groups_client_mtls_env_auto(client_class, transport_class, transport_name, use_client_cert_env): + # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default + # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists. + + # Check the case client_cert_source is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): + options = client_options.ClientOptions(client_cert_source=client_cert_source_callback) + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + + if use_client_cert_env == "false": + expected_client_cert_source = None + expected_host = client.DEFAULT_ENDPOINT + else: + expected_client_cert_source = client_cert_source_callback + expected_host = client.DEFAULT_MTLS_ENDPOINT + + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case ADC client cert is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): + with mock.patch.object(transport_class, '__init__') as patched: + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): + with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=client_cert_source_callback): + if use_client_cert_env == "false": + expected_host = client.DEFAULT_ENDPOINT + expected_client_cert_source = None + else: + expected_host = client.DEFAULT_MTLS_ENDPOINT + expected_client_cert_source = client_cert_source_callback + + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case client_cert_source and ADC client cert are not provided. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): + with mock.patch.object(transport_class, '__init__') as patched: + with mock.patch("google.auth.transport.mtls.has_default_client_cert_source", return_value=False): + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize("client_class", [ + GlobalNetworkEndpointGroupsClient +]) +@mock.patch.object(GlobalNetworkEndpointGroupsClient, "DEFAULT_ENDPOINT", modify_default_endpoint(GlobalNetworkEndpointGroupsClient)) +def test_global_network_endpoint_groups_client_get_mtls_endpoint_and_cert_source(client_class): + mock_client_cert_source = mock.Mock() + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "true". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + mock_api_endpoint = "foo" + options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(options) + assert api_endpoint == mock_api_endpoint + assert cert_source == mock_client_cert_source + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "false". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + mock_client_cert_source = mock.Mock() + mock_api_endpoint = "foo" + options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(options) + assert api_endpoint == mock_api_endpoint + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert doesn't exist. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=False): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert exists. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): + with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=mock_client_cert_source): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source == mock_client_cert_source + + +@pytest.mark.parametrize("client_class,transport_class,transport_name", [ + (GlobalNetworkEndpointGroupsClient, transports.GlobalNetworkEndpointGroupsRestTransport, "rest"), +]) +def test_global_network_endpoint_groups_client_client_options_scopes(client_class, transport_class, transport_name): + # Check the case scopes are provided. + options = client_options.ClientOptions( + scopes=["1", "2"], + ) + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=["1", "2"], + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + +@pytest.mark.parametrize("client_class,transport_class,transport_name,grpc_helpers", [ + (GlobalNetworkEndpointGroupsClient, transports.GlobalNetworkEndpointGroupsRestTransport, "rest", None), +]) +def test_global_network_endpoint_groups_client_client_options_credentials_file(client_class, transport_class, transport_name, grpc_helpers): + # Check the case credentials file is provided. + options = client_options.ClientOptions( + credentials_file="credentials.json" + ) + + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize("request_type", [ + compute.AttachNetworkEndpointsGlobalNetworkEndpointGroupRequest, + dict, +]) +def test_attach_network_endpoints_rest(request_type): + client = GlobalNetworkEndpointGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'network_endpoint_group': 'sample2'} + request_init["global_network_endpoint_groups_attach_endpoints_request_resource"] = {'network_endpoints': [{'annotations': {}, 'fqdn': 'fqdn_value', 'instance': 'instance_value', 'ip_address': 'ip_address_value', 'port': 453}]} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.attach_network_endpoints(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, extended_operation.ExtendedOperation) + assert response.client_operation_id == 'client_operation_id_value' + assert response.creation_timestamp == 'creation_timestamp_value' + assert response.description == 'description_value' + assert response.end_time == 'end_time_value' + assert response.http_error_message == 'http_error_message_value' + assert response.http_error_status_code == 2374 + assert response.id == 205 + assert response.insert_time == 'insert_time_value' + assert response.kind == 'kind_value' + assert response.name == 'name_value' + assert response.operation_group_id == 'operation_group_id_value' + assert response.operation_type == 'operation_type_value' + assert response.progress == 885 + assert response.region == 'region_value' + assert response.self_link == 'self_link_value' + assert response.start_time == 'start_time_value' + assert response.status == compute.Operation.Status.DONE + assert response.status_message == 'status_message_value' + assert response.target_id == 947 + assert response.target_link == 'target_link_value' + assert response.user == 'user_value' + assert response.zone == 'zone_value' + + +def test_attach_network_endpoints_rest_required_fields(request_type=compute.AttachNetworkEndpointsGlobalNetworkEndpointGroupRequest): + transport_class = transports.GlobalNetworkEndpointGroupsRestTransport + + request_init = {} + request_init["network_endpoint_group"] = "" + request_init["project"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).attach_network_endpoints._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["networkEndpointGroup"] = 'network_endpoint_group_value' + jsonified_request["project"] = 'project_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).attach_network_endpoints._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "networkEndpointGroup" in jsonified_request + assert jsonified_request["networkEndpointGroup"] == 'network_endpoint_group_value' + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + + client = GlobalNetworkEndpointGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.attach_network_endpoints(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_attach_network_endpoints_rest_unset_required_fields(): + transport = transports.GlobalNetworkEndpointGroupsRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.attach_network_endpoints._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("globalNetworkEndpointGroupsAttachEndpointsRequestResource", "networkEndpointGroup", "project", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_attach_network_endpoints_rest_interceptors(null_interceptor): + transport = transports.GlobalNetworkEndpointGroupsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.GlobalNetworkEndpointGroupsRestInterceptor(), + ) + client = GlobalNetworkEndpointGroupsClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.GlobalNetworkEndpointGroupsRestInterceptor, "post_attach_network_endpoints") as post, \ + mock.patch.object(transports.GlobalNetworkEndpointGroupsRestInterceptor, "pre_attach_network_endpoints") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.AttachNetworkEndpointsGlobalNetworkEndpointGroupRequest.pb(compute.AttachNetworkEndpointsGlobalNetworkEndpointGroupRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.AttachNetworkEndpointsGlobalNetworkEndpointGroupRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.attach_network_endpoints(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_attach_network_endpoints_rest_bad_request(transport: str = 'rest', request_type=compute.AttachNetworkEndpointsGlobalNetworkEndpointGroupRequest): + client = GlobalNetworkEndpointGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'network_endpoint_group': 'sample2'} + request_init["global_network_endpoint_groups_attach_endpoints_request_resource"] = {'network_endpoints': [{'annotations': {}, 'fqdn': 'fqdn_value', 'instance': 'instance_value', 'ip_address': 'ip_address_value', 'port': 453}]} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.attach_network_endpoints(request) + + +def test_attach_network_endpoints_rest_flattened(): + client = GlobalNetworkEndpointGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'network_endpoint_group': 'sample2'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + network_endpoint_group='network_endpoint_group_value', + global_network_endpoint_groups_attach_endpoints_request_resource=compute.GlobalNetworkEndpointGroupsAttachEndpointsRequest(network_endpoints=[compute.NetworkEndpoint(annotations={'key_value': 'value_value'})]), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.attach_network_endpoints(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/global/networkEndpointGroups/{network_endpoint_group}/attachNetworkEndpoints" % client.transport._host, args[1]) + + +def test_attach_network_endpoints_rest_flattened_error(transport: str = 'rest'): + client = GlobalNetworkEndpointGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.attach_network_endpoints( + compute.AttachNetworkEndpointsGlobalNetworkEndpointGroupRequest(), + project='project_value', + network_endpoint_group='network_endpoint_group_value', + global_network_endpoint_groups_attach_endpoints_request_resource=compute.GlobalNetworkEndpointGroupsAttachEndpointsRequest(network_endpoints=[compute.NetworkEndpoint(annotations={'key_value': 'value_value'})]), + ) + + +def test_attach_network_endpoints_rest_error(): + client = GlobalNetworkEndpointGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.AttachNetworkEndpointsGlobalNetworkEndpointGroupRequest, + dict, +]) +def test_attach_network_endpoints_unary_rest(request_type): + client = GlobalNetworkEndpointGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'network_endpoint_group': 'sample2'} + request_init["global_network_endpoint_groups_attach_endpoints_request_resource"] = {'network_endpoints': [{'annotations': {}, 'fqdn': 'fqdn_value', 'instance': 'instance_value', 'ip_address': 'ip_address_value', 'port': 453}]} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.attach_network_endpoints_unary(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.Operation) + + +def test_attach_network_endpoints_unary_rest_required_fields(request_type=compute.AttachNetworkEndpointsGlobalNetworkEndpointGroupRequest): + transport_class = transports.GlobalNetworkEndpointGroupsRestTransport + + request_init = {} + request_init["network_endpoint_group"] = "" + request_init["project"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).attach_network_endpoints._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["networkEndpointGroup"] = 'network_endpoint_group_value' + jsonified_request["project"] = 'project_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).attach_network_endpoints._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "networkEndpointGroup" in jsonified_request + assert jsonified_request["networkEndpointGroup"] == 'network_endpoint_group_value' + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + + client = GlobalNetworkEndpointGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.attach_network_endpoints_unary(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_attach_network_endpoints_unary_rest_unset_required_fields(): + transport = transports.GlobalNetworkEndpointGroupsRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.attach_network_endpoints._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("globalNetworkEndpointGroupsAttachEndpointsRequestResource", "networkEndpointGroup", "project", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_attach_network_endpoints_unary_rest_interceptors(null_interceptor): + transport = transports.GlobalNetworkEndpointGroupsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.GlobalNetworkEndpointGroupsRestInterceptor(), + ) + client = GlobalNetworkEndpointGroupsClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.GlobalNetworkEndpointGroupsRestInterceptor, "post_attach_network_endpoints") as post, \ + mock.patch.object(transports.GlobalNetworkEndpointGroupsRestInterceptor, "pre_attach_network_endpoints") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.AttachNetworkEndpointsGlobalNetworkEndpointGroupRequest.pb(compute.AttachNetworkEndpointsGlobalNetworkEndpointGroupRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.AttachNetworkEndpointsGlobalNetworkEndpointGroupRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.attach_network_endpoints_unary(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_attach_network_endpoints_unary_rest_bad_request(transport: str = 'rest', request_type=compute.AttachNetworkEndpointsGlobalNetworkEndpointGroupRequest): + client = GlobalNetworkEndpointGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'network_endpoint_group': 'sample2'} + request_init["global_network_endpoint_groups_attach_endpoints_request_resource"] = {'network_endpoints': [{'annotations': {}, 'fqdn': 'fqdn_value', 'instance': 'instance_value', 'ip_address': 'ip_address_value', 'port': 453}]} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.attach_network_endpoints_unary(request) + + +def test_attach_network_endpoints_unary_rest_flattened(): + client = GlobalNetworkEndpointGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'network_endpoint_group': 'sample2'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + network_endpoint_group='network_endpoint_group_value', + global_network_endpoint_groups_attach_endpoints_request_resource=compute.GlobalNetworkEndpointGroupsAttachEndpointsRequest(network_endpoints=[compute.NetworkEndpoint(annotations={'key_value': 'value_value'})]), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.attach_network_endpoints_unary(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/global/networkEndpointGroups/{network_endpoint_group}/attachNetworkEndpoints" % client.transport._host, args[1]) + + +def test_attach_network_endpoints_unary_rest_flattened_error(transport: str = 'rest'): + client = GlobalNetworkEndpointGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.attach_network_endpoints_unary( + compute.AttachNetworkEndpointsGlobalNetworkEndpointGroupRequest(), + project='project_value', + network_endpoint_group='network_endpoint_group_value', + global_network_endpoint_groups_attach_endpoints_request_resource=compute.GlobalNetworkEndpointGroupsAttachEndpointsRequest(network_endpoints=[compute.NetworkEndpoint(annotations={'key_value': 'value_value'})]), + ) + + +def test_attach_network_endpoints_unary_rest_error(): + client = GlobalNetworkEndpointGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.DeleteGlobalNetworkEndpointGroupRequest, + dict, +]) +def test_delete_rest(request_type): + client = GlobalNetworkEndpointGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'network_endpoint_group': 'sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.delete(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, extended_operation.ExtendedOperation) + assert response.client_operation_id == 'client_operation_id_value' + assert response.creation_timestamp == 'creation_timestamp_value' + assert response.description == 'description_value' + assert response.end_time == 'end_time_value' + assert response.http_error_message == 'http_error_message_value' + assert response.http_error_status_code == 2374 + assert response.id == 205 + assert response.insert_time == 'insert_time_value' + assert response.kind == 'kind_value' + assert response.name == 'name_value' + assert response.operation_group_id == 'operation_group_id_value' + assert response.operation_type == 'operation_type_value' + assert response.progress == 885 + assert response.region == 'region_value' + assert response.self_link == 'self_link_value' + assert response.start_time == 'start_time_value' + assert response.status == compute.Operation.Status.DONE + assert response.status_message == 'status_message_value' + assert response.target_id == 947 + assert response.target_link == 'target_link_value' + assert response.user == 'user_value' + assert response.zone == 'zone_value' + + +def test_delete_rest_required_fields(request_type=compute.DeleteGlobalNetworkEndpointGroupRequest): + transport_class = transports.GlobalNetworkEndpointGroupsRestTransport + + request_init = {} + request_init["network_endpoint_group"] = "" + request_init["project"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["networkEndpointGroup"] = 'network_endpoint_group_value' + jsonified_request["project"] = 'project_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "networkEndpointGroup" in jsonified_request + assert jsonified_request["networkEndpointGroup"] == 'network_endpoint_group_value' + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + + client = GlobalNetworkEndpointGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "delete", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.delete(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_delete_rest_unset_required_fields(): + transport = transports.GlobalNetworkEndpointGroupsRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.delete._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("networkEndpointGroup", "project", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_rest_interceptors(null_interceptor): + transport = transports.GlobalNetworkEndpointGroupsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.GlobalNetworkEndpointGroupsRestInterceptor(), + ) + client = GlobalNetworkEndpointGroupsClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.GlobalNetworkEndpointGroupsRestInterceptor, "post_delete") as post, \ + mock.patch.object(transports.GlobalNetworkEndpointGroupsRestInterceptor, "pre_delete") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.DeleteGlobalNetworkEndpointGroupRequest.pb(compute.DeleteGlobalNetworkEndpointGroupRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.DeleteGlobalNetworkEndpointGroupRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.delete(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_delete_rest_bad_request(transport: str = 'rest', request_type=compute.DeleteGlobalNetworkEndpointGroupRequest): + client = GlobalNetworkEndpointGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'network_endpoint_group': 'sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.delete(request) + + +def test_delete_rest_flattened(): + client = GlobalNetworkEndpointGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'network_endpoint_group': 'sample2'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + network_endpoint_group='network_endpoint_group_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.delete(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/global/networkEndpointGroups/{network_endpoint_group}" % client.transport._host, args[1]) + + +def test_delete_rest_flattened_error(transport: str = 'rest'): + client = GlobalNetworkEndpointGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete( + compute.DeleteGlobalNetworkEndpointGroupRequest(), + project='project_value', + network_endpoint_group='network_endpoint_group_value', + ) + + +def test_delete_rest_error(): + client = GlobalNetworkEndpointGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.DeleteGlobalNetworkEndpointGroupRequest, + dict, +]) +def test_delete_unary_rest(request_type): + client = GlobalNetworkEndpointGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'network_endpoint_group': 'sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.delete_unary(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.Operation) + + +def test_delete_unary_rest_required_fields(request_type=compute.DeleteGlobalNetworkEndpointGroupRequest): + transport_class = transports.GlobalNetworkEndpointGroupsRestTransport + + request_init = {} + request_init["network_endpoint_group"] = "" + request_init["project"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["networkEndpointGroup"] = 'network_endpoint_group_value' + jsonified_request["project"] = 'project_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "networkEndpointGroup" in jsonified_request + assert jsonified_request["networkEndpointGroup"] == 'network_endpoint_group_value' + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + + client = GlobalNetworkEndpointGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "delete", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.delete_unary(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_delete_unary_rest_unset_required_fields(): + transport = transports.GlobalNetworkEndpointGroupsRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.delete._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("networkEndpointGroup", "project", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_unary_rest_interceptors(null_interceptor): + transport = transports.GlobalNetworkEndpointGroupsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.GlobalNetworkEndpointGroupsRestInterceptor(), + ) + client = GlobalNetworkEndpointGroupsClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.GlobalNetworkEndpointGroupsRestInterceptor, "post_delete") as post, \ + mock.patch.object(transports.GlobalNetworkEndpointGroupsRestInterceptor, "pre_delete") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.DeleteGlobalNetworkEndpointGroupRequest.pb(compute.DeleteGlobalNetworkEndpointGroupRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.DeleteGlobalNetworkEndpointGroupRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.delete_unary(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_delete_unary_rest_bad_request(transport: str = 'rest', request_type=compute.DeleteGlobalNetworkEndpointGroupRequest): + client = GlobalNetworkEndpointGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'network_endpoint_group': 'sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.delete_unary(request) + + +def test_delete_unary_rest_flattened(): + client = GlobalNetworkEndpointGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'network_endpoint_group': 'sample2'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + network_endpoint_group='network_endpoint_group_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.delete_unary(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/global/networkEndpointGroups/{network_endpoint_group}" % client.transport._host, args[1]) + + +def test_delete_unary_rest_flattened_error(transport: str = 'rest'): + client = GlobalNetworkEndpointGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_unary( + compute.DeleteGlobalNetworkEndpointGroupRequest(), + project='project_value', + network_endpoint_group='network_endpoint_group_value', + ) + + +def test_delete_unary_rest_error(): + client = GlobalNetworkEndpointGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.DetachNetworkEndpointsGlobalNetworkEndpointGroupRequest, + dict, +]) +def test_detach_network_endpoints_rest(request_type): + client = GlobalNetworkEndpointGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'network_endpoint_group': 'sample2'} + request_init["global_network_endpoint_groups_detach_endpoints_request_resource"] = {'network_endpoints': [{'annotations': {}, 'fqdn': 'fqdn_value', 'instance': 'instance_value', 'ip_address': 'ip_address_value', 'port': 453}]} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.detach_network_endpoints(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, extended_operation.ExtendedOperation) + assert response.client_operation_id == 'client_operation_id_value' + assert response.creation_timestamp == 'creation_timestamp_value' + assert response.description == 'description_value' + assert response.end_time == 'end_time_value' + assert response.http_error_message == 'http_error_message_value' + assert response.http_error_status_code == 2374 + assert response.id == 205 + assert response.insert_time == 'insert_time_value' + assert response.kind == 'kind_value' + assert response.name == 'name_value' + assert response.operation_group_id == 'operation_group_id_value' + assert response.operation_type == 'operation_type_value' + assert response.progress == 885 + assert response.region == 'region_value' + assert response.self_link == 'self_link_value' + assert response.start_time == 'start_time_value' + assert response.status == compute.Operation.Status.DONE + assert response.status_message == 'status_message_value' + assert response.target_id == 947 + assert response.target_link == 'target_link_value' + assert response.user == 'user_value' + assert response.zone == 'zone_value' + + +def test_detach_network_endpoints_rest_required_fields(request_type=compute.DetachNetworkEndpointsGlobalNetworkEndpointGroupRequest): + transport_class = transports.GlobalNetworkEndpointGroupsRestTransport + + request_init = {} + request_init["network_endpoint_group"] = "" + request_init["project"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).detach_network_endpoints._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["networkEndpointGroup"] = 'network_endpoint_group_value' + jsonified_request["project"] = 'project_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).detach_network_endpoints._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "networkEndpointGroup" in jsonified_request + assert jsonified_request["networkEndpointGroup"] == 'network_endpoint_group_value' + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + + client = GlobalNetworkEndpointGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.detach_network_endpoints(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_detach_network_endpoints_rest_unset_required_fields(): + transport = transports.GlobalNetworkEndpointGroupsRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.detach_network_endpoints._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("globalNetworkEndpointGroupsDetachEndpointsRequestResource", "networkEndpointGroup", "project", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_detach_network_endpoints_rest_interceptors(null_interceptor): + transport = transports.GlobalNetworkEndpointGroupsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.GlobalNetworkEndpointGroupsRestInterceptor(), + ) + client = GlobalNetworkEndpointGroupsClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.GlobalNetworkEndpointGroupsRestInterceptor, "post_detach_network_endpoints") as post, \ + mock.patch.object(transports.GlobalNetworkEndpointGroupsRestInterceptor, "pre_detach_network_endpoints") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.DetachNetworkEndpointsGlobalNetworkEndpointGroupRequest.pb(compute.DetachNetworkEndpointsGlobalNetworkEndpointGroupRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.DetachNetworkEndpointsGlobalNetworkEndpointGroupRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.detach_network_endpoints(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_detach_network_endpoints_rest_bad_request(transport: str = 'rest', request_type=compute.DetachNetworkEndpointsGlobalNetworkEndpointGroupRequest): + client = GlobalNetworkEndpointGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'network_endpoint_group': 'sample2'} + request_init["global_network_endpoint_groups_detach_endpoints_request_resource"] = {'network_endpoints': [{'annotations': {}, 'fqdn': 'fqdn_value', 'instance': 'instance_value', 'ip_address': 'ip_address_value', 'port': 453}]} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.detach_network_endpoints(request) + + +def test_detach_network_endpoints_rest_flattened(): + client = GlobalNetworkEndpointGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'network_endpoint_group': 'sample2'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + network_endpoint_group='network_endpoint_group_value', + global_network_endpoint_groups_detach_endpoints_request_resource=compute.GlobalNetworkEndpointGroupsDetachEndpointsRequest(network_endpoints=[compute.NetworkEndpoint(annotations={'key_value': 'value_value'})]), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.detach_network_endpoints(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/global/networkEndpointGroups/{network_endpoint_group}/detachNetworkEndpoints" % client.transport._host, args[1]) + + +def test_detach_network_endpoints_rest_flattened_error(transport: str = 'rest'): + client = GlobalNetworkEndpointGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.detach_network_endpoints( + compute.DetachNetworkEndpointsGlobalNetworkEndpointGroupRequest(), + project='project_value', + network_endpoint_group='network_endpoint_group_value', + global_network_endpoint_groups_detach_endpoints_request_resource=compute.GlobalNetworkEndpointGroupsDetachEndpointsRequest(network_endpoints=[compute.NetworkEndpoint(annotations={'key_value': 'value_value'})]), + ) + + +def test_detach_network_endpoints_rest_error(): + client = GlobalNetworkEndpointGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.DetachNetworkEndpointsGlobalNetworkEndpointGroupRequest, + dict, +]) +def test_detach_network_endpoints_unary_rest(request_type): + client = GlobalNetworkEndpointGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'network_endpoint_group': 'sample2'} + request_init["global_network_endpoint_groups_detach_endpoints_request_resource"] = {'network_endpoints': [{'annotations': {}, 'fqdn': 'fqdn_value', 'instance': 'instance_value', 'ip_address': 'ip_address_value', 'port': 453}]} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.detach_network_endpoints_unary(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.Operation) + + +def test_detach_network_endpoints_unary_rest_required_fields(request_type=compute.DetachNetworkEndpointsGlobalNetworkEndpointGroupRequest): + transport_class = transports.GlobalNetworkEndpointGroupsRestTransport + + request_init = {} + request_init["network_endpoint_group"] = "" + request_init["project"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).detach_network_endpoints._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["networkEndpointGroup"] = 'network_endpoint_group_value' + jsonified_request["project"] = 'project_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).detach_network_endpoints._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "networkEndpointGroup" in jsonified_request + assert jsonified_request["networkEndpointGroup"] == 'network_endpoint_group_value' + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + + client = GlobalNetworkEndpointGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.detach_network_endpoints_unary(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_detach_network_endpoints_unary_rest_unset_required_fields(): + transport = transports.GlobalNetworkEndpointGroupsRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.detach_network_endpoints._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("globalNetworkEndpointGroupsDetachEndpointsRequestResource", "networkEndpointGroup", "project", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_detach_network_endpoints_unary_rest_interceptors(null_interceptor): + transport = transports.GlobalNetworkEndpointGroupsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.GlobalNetworkEndpointGroupsRestInterceptor(), + ) + client = GlobalNetworkEndpointGroupsClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.GlobalNetworkEndpointGroupsRestInterceptor, "post_detach_network_endpoints") as post, \ + mock.patch.object(transports.GlobalNetworkEndpointGroupsRestInterceptor, "pre_detach_network_endpoints") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.DetachNetworkEndpointsGlobalNetworkEndpointGroupRequest.pb(compute.DetachNetworkEndpointsGlobalNetworkEndpointGroupRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.DetachNetworkEndpointsGlobalNetworkEndpointGroupRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.detach_network_endpoints_unary(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_detach_network_endpoints_unary_rest_bad_request(transport: str = 'rest', request_type=compute.DetachNetworkEndpointsGlobalNetworkEndpointGroupRequest): + client = GlobalNetworkEndpointGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'network_endpoint_group': 'sample2'} + request_init["global_network_endpoint_groups_detach_endpoints_request_resource"] = {'network_endpoints': [{'annotations': {}, 'fqdn': 'fqdn_value', 'instance': 'instance_value', 'ip_address': 'ip_address_value', 'port': 453}]} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.detach_network_endpoints_unary(request) + + +def test_detach_network_endpoints_unary_rest_flattened(): + client = GlobalNetworkEndpointGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'network_endpoint_group': 'sample2'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + network_endpoint_group='network_endpoint_group_value', + global_network_endpoint_groups_detach_endpoints_request_resource=compute.GlobalNetworkEndpointGroupsDetachEndpointsRequest(network_endpoints=[compute.NetworkEndpoint(annotations={'key_value': 'value_value'})]), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.detach_network_endpoints_unary(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/global/networkEndpointGroups/{network_endpoint_group}/detachNetworkEndpoints" % client.transport._host, args[1]) + + +def test_detach_network_endpoints_unary_rest_flattened_error(transport: str = 'rest'): + client = GlobalNetworkEndpointGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.detach_network_endpoints_unary( + compute.DetachNetworkEndpointsGlobalNetworkEndpointGroupRequest(), + project='project_value', + network_endpoint_group='network_endpoint_group_value', + global_network_endpoint_groups_detach_endpoints_request_resource=compute.GlobalNetworkEndpointGroupsDetachEndpointsRequest(network_endpoints=[compute.NetworkEndpoint(annotations={'key_value': 'value_value'})]), + ) + + +def test_detach_network_endpoints_unary_rest_error(): + client = GlobalNetworkEndpointGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.GetGlobalNetworkEndpointGroupRequest, + dict, +]) +def test_get_rest(request_type): + client = GlobalNetworkEndpointGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'network_endpoint_group': 'sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.NetworkEndpointGroup( + creation_timestamp='creation_timestamp_value', + default_port=1289, + description='description_value', + id=205, + kind='kind_value', + name='name_value', + network='network_value', + network_endpoint_type='network_endpoint_type_value', + psc_target_service='psc_target_service_value', + region='region_value', + self_link='self_link_value', + size=443, + subnetwork='subnetwork_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.NetworkEndpointGroup.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.get(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.NetworkEndpointGroup) + assert response.creation_timestamp == 'creation_timestamp_value' + assert response.default_port == 1289 + assert response.description == 'description_value' + assert response.id == 205 + assert response.kind == 'kind_value' + assert response.name == 'name_value' + assert response.network == 'network_value' + assert response.network_endpoint_type == 'network_endpoint_type_value' + assert response.psc_target_service == 'psc_target_service_value' + assert response.region == 'region_value' + assert response.self_link == 'self_link_value' + assert response.size == 443 + assert response.subnetwork == 'subnetwork_value' + assert response.zone == 'zone_value' + + +def test_get_rest_required_fields(request_type=compute.GetGlobalNetworkEndpointGroupRequest): + transport_class = transports.GlobalNetworkEndpointGroupsRestTransport + + request_init = {} + request_init["network_endpoint_group"] = "" + request_init["project"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["networkEndpointGroup"] = 'network_endpoint_group_value' + jsonified_request["project"] = 'project_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "networkEndpointGroup" in jsonified_request + assert jsonified_request["networkEndpointGroup"] == 'network_endpoint_group_value' + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + + client = GlobalNetworkEndpointGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.NetworkEndpointGroup() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "get", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.NetworkEndpointGroup.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.get(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_get_rest_unset_required_fields(): + transport = transports.GlobalNetworkEndpointGroupsRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.get._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("networkEndpointGroup", "project", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_rest_interceptors(null_interceptor): + transport = transports.GlobalNetworkEndpointGroupsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.GlobalNetworkEndpointGroupsRestInterceptor(), + ) + client = GlobalNetworkEndpointGroupsClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.GlobalNetworkEndpointGroupsRestInterceptor, "post_get") as post, \ + mock.patch.object(transports.GlobalNetworkEndpointGroupsRestInterceptor, "pre_get") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.GetGlobalNetworkEndpointGroupRequest.pb(compute.GetGlobalNetworkEndpointGroupRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.NetworkEndpointGroup.to_json(compute.NetworkEndpointGroup()) + + request = compute.GetGlobalNetworkEndpointGroupRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.NetworkEndpointGroup() + + client.get(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_get_rest_bad_request(transport: str = 'rest', request_type=compute.GetGlobalNetworkEndpointGroupRequest): + client = GlobalNetworkEndpointGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'network_endpoint_group': 'sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get(request) + + +def test_get_rest_flattened(): + client = GlobalNetworkEndpointGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.NetworkEndpointGroup() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'network_endpoint_group': 'sample2'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + network_endpoint_group='network_endpoint_group_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.NetworkEndpointGroup.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.get(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/global/networkEndpointGroups/{network_endpoint_group}" % client.transport._host, args[1]) + + +def test_get_rest_flattened_error(transport: str = 'rest'): + client = GlobalNetworkEndpointGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get( + compute.GetGlobalNetworkEndpointGroupRequest(), + project='project_value', + network_endpoint_group='network_endpoint_group_value', + ) + + +def test_get_rest_error(): + client = GlobalNetworkEndpointGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.InsertGlobalNetworkEndpointGroupRequest, + dict, +]) +def test_insert_rest(request_type): + client = GlobalNetworkEndpointGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1'} + request_init["network_endpoint_group_resource"] = {'annotations': {}, 'app_engine': {'service': 'service_value', 'url_mask': 'url_mask_value', 'version': 'version_value'}, 'cloud_function': {'function': 'function_value', 'url_mask': 'url_mask_value'}, 'cloud_run': {'service': 'service_value', 'tag': 'tag_value', 'url_mask': 'url_mask_value'}, 'creation_timestamp': 'creation_timestamp_value', 'default_port': 1289, 'description': 'description_value', 'id': 205, 'kind': 'kind_value', 'name': 'name_value', 'network': 'network_value', 'network_endpoint_type': 'network_endpoint_type_value', 'psc_data': {'consumer_psc_address': 'consumer_psc_address_value', 'psc_connection_id': 1793, 'psc_connection_status': 'psc_connection_status_value'}, 'psc_target_service': 'psc_target_service_value', 'region': 'region_value', 'self_link': 'self_link_value', 'size': 443, 'subnetwork': 'subnetwork_value', 'zone': 'zone_value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.insert(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, extended_operation.ExtendedOperation) + assert response.client_operation_id == 'client_operation_id_value' + assert response.creation_timestamp == 'creation_timestamp_value' + assert response.description == 'description_value' + assert response.end_time == 'end_time_value' + assert response.http_error_message == 'http_error_message_value' + assert response.http_error_status_code == 2374 + assert response.id == 205 + assert response.insert_time == 'insert_time_value' + assert response.kind == 'kind_value' + assert response.name == 'name_value' + assert response.operation_group_id == 'operation_group_id_value' + assert response.operation_type == 'operation_type_value' + assert response.progress == 885 + assert response.region == 'region_value' + assert response.self_link == 'self_link_value' + assert response.start_time == 'start_time_value' + assert response.status == compute.Operation.Status.DONE + assert response.status_message == 'status_message_value' + assert response.target_id == 947 + assert response.target_link == 'target_link_value' + assert response.user == 'user_value' + assert response.zone == 'zone_value' + + +def test_insert_rest_required_fields(request_type=compute.InsertGlobalNetworkEndpointGroupRequest): + transport_class = transports.GlobalNetworkEndpointGroupsRestTransport + + request_init = {} + request_init["project"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).insert._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).insert._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + + client = GlobalNetworkEndpointGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.insert(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_insert_rest_unset_required_fields(): + transport = transports.GlobalNetworkEndpointGroupsRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.insert._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("networkEndpointGroupResource", "project", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_insert_rest_interceptors(null_interceptor): + transport = transports.GlobalNetworkEndpointGroupsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.GlobalNetworkEndpointGroupsRestInterceptor(), + ) + client = GlobalNetworkEndpointGroupsClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.GlobalNetworkEndpointGroupsRestInterceptor, "post_insert") as post, \ + mock.patch.object(transports.GlobalNetworkEndpointGroupsRestInterceptor, "pre_insert") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.InsertGlobalNetworkEndpointGroupRequest.pb(compute.InsertGlobalNetworkEndpointGroupRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.InsertGlobalNetworkEndpointGroupRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.insert(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_insert_rest_bad_request(transport: str = 'rest', request_type=compute.InsertGlobalNetworkEndpointGroupRequest): + client = GlobalNetworkEndpointGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1'} + request_init["network_endpoint_group_resource"] = {'annotations': {}, 'app_engine': {'service': 'service_value', 'url_mask': 'url_mask_value', 'version': 'version_value'}, 'cloud_function': {'function': 'function_value', 'url_mask': 'url_mask_value'}, 'cloud_run': {'service': 'service_value', 'tag': 'tag_value', 'url_mask': 'url_mask_value'}, 'creation_timestamp': 'creation_timestamp_value', 'default_port': 1289, 'description': 'description_value', 'id': 205, 'kind': 'kind_value', 'name': 'name_value', 'network': 'network_value', 'network_endpoint_type': 'network_endpoint_type_value', 'psc_data': {'consumer_psc_address': 'consumer_psc_address_value', 'psc_connection_id': 1793, 'psc_connection_status': 'psc_connection_status_value'}, 'psc_target_service': 'psc_target_service_value', 'region': 'region_value', 'self_link': 'self_link_value', 'size': 443, 'subnetwork': 'subnetwork_value', 'zone': 'zone_value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.insert(request) + + +def test_insert_rest_flattened(): + client = GlobalNetworkEndpointGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + network_endpoint_group_resource=compute.NetworkEndpointGroup(annotations={'key_value': 'value_value'}), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.insert(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/global/networkEndpointGroups" % client.transport._host, args[1]) + + +def test_insert_rest_flattened_error(transport: str = 'rest'): + client = GlobalNetworkEndpointGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.insert( + compute.InsertGlobalNetworkEndpointGroupRequest(), + project='project_value', + network_endpoint_group_resource=compute.NetworkEndpointGroup(annotations={'key_value': 'value_value'}), + ) + + +def test_insert_rest_error(): + client = GlobalNetworkEndpointGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.InsertGlobalNetworkEndpointGroupRequest, + dict, +]) +def test_insert_unary_rest(request_type): + client = GlobalNetworkEndpointGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1'} + request_init["network_endpoint_group_resource"] = {'annotations': {}, 'app_engine': {'service': 'service_value', 'url_mask': 'url_mask_value', 'version': 'version_value'}, 'cloud_function': {'function': 'function_value', 'url_mask': 'url_mask_value'}, 'cloud_run': {'service': 'service_value', 'tag': 'tag_value', 'url_mask': 'url_mask_value'}, 'creation_timestamp': 'creation_timestamp_value', 'default_port': 1289, 'description': 'description_value', 'id': 205, 'kind': 'kind_value', 'name': 'name_value', 'network': 'network_value', 'network_endpoint_type': 'network_endpoint_type_value', 'psc_data': {'consumer_psc_address': 'consumer_psc_address_value', 'psc_connection_id': 1793, 'psc_connection_status': 'psc_connection_status_value'}, 'psc_target_service': 'psc_target_service_value', 'region': 'region_value', 'self_link': 'self_link_value', 'size': 443, 'subnetwork': 'subnetwork_value', 'zone': 'zone_value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.insert_unary(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.Operation) + + +def test_insert_unary_rest_required_fields(request_type=compute.InsertGlobalNetworkEndpointGroupRequest): + transport_class = transports.GlobalNetworkEndpointGroupsRestTransport + + request_init = {} + request_init["project"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).insert._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).insert._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + + client = GlobalNetworkEndpointGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.insert_unary(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_insert_unary_rest_unset_required_fields(): + transport = transports.GlobalNetworkEndpointGroupsRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.insert._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("networkEndpointGroupResource", "project", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_insert_unary_rest_interceptors(null_interceptor): + transport = transports.GlobalNetworkEndpointGroupsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.GlobalNetworkEndpointGroupsRestInterceptor(), + ) + client = GlobalNetworkEndpointGroupsClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.GlobalNetworkEndpointGroupsRestInterceptor, "post_insert") as post, \ + mock.patch.object(transports.GlobalNetworkEndpointGroupsRestInterceptor, "pre_insert") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.InsertGlobalNetworkEndpointGroupRequest.pb(compute.InsertGlobalNetworkEndpointGroupRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.InsertGlobalNetworkEndpointGroupRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.insert_unary(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_insert_unary_rest_bad_request(transport: str = 'rest', request_type=compute.InsertGlobalNetworkEndpointGroupRequest): + client = GlobalNetworkEndpointGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1'} + request_init["network_endpoint_group_resource"] = {'annotations': {}, 'app_engine': {'service': 'service_value', 'url_mask': 'url_mask_value', 'version': 'version_value'}, 'cloud_function': {'function': 'function_value', 'url_mask': 'url_mask_value'}, 'cloud_run': {'service': 'service_value', 'tag': 'tag_value', 'url_mask': 'url_mask_value'}, 'creation_timestamp': 'creation_timestamp_value', 'default_port': 1289, 'description': 'description_value', 'id': 205, 'kind': 'kind_value', 'name': 'name_value', 'network': 'network_value', 'network_endpoint_type': 'network_endpoint_type_value', 'psc_data': {'consumer_psc_address': 'consumer_psc_address_value', 'psc_connection_id': 1793, 'psc_connection_status': 'psc_connection_status_value'}, 'psc_target_service': 'psc_target_service_value', 'region': 'region_value', 'self_link': 'self_link_value', 'size': 443, 'subnetwork': 'subnetwork_value', 'zone': 'zone_value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.insert_unary(request) + + +def test_insert_unary_rest_flattened(): + client = GlobalNetworkEndpointGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + network_endpoint_group_resource=compute.NetworkEndpointGroup(annotations={'key_value': 'value_value'}), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.insert_unary(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/global/networkEndpointGroups" % client.transport._host, args[1]) + + +def test_insert_unary_rest_flattened_error(transport: str = 'rest'): + client = GlobalNetworkEndpointGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.insert_unary( + compute.InsertGlobalNetworkEndpointGroupRequest(), + project='project_value', + network_endpoint_group_resource=compute.NetworkEndpointGroup(annotations={'key_value': 'value_value'}), + ) + + +def test_insert_unary_rest_error(): + client = GlobalNetworkEndpointGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.ListGlobalNetworkEndpointGroupsRequest, + dict, +]) +def test_list_rest(request_type): + client = GlobalNetworkEndpointGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.NetworkEndpointGroupList( + id='id_value', + kind='kind_value', + next_page_token='next_page_token_value', + self_link='self_link_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.NetworkEndpointGroupList.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.list(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListPager) + assert response.id == 'id_value' + assert response.kind == 'kind_value' + assert response.next_page_token == 'next_page_token_value' + assert response.self_link == 'self_link_value' + + +def test_list_rest_required_fields(request_type=compute.ListGlobalNetworkEndpointGroupsRequest): + transport_class = transports.GlobalNetworkEndpointGroupsRestTransport + + request_init = {} + request_init["project"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("filter", "max_results", "order_by", "page_token", "return_partial_success", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + + client = GlobalNetworkEndpointGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.NetworkEndpointGroupList() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "get", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.NetworkEndpointGroupList.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.list(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_list_rest_unset_required_fields(): + transport = transports.GlobalNetworkEndpointGroupsRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.list._get_unset_required_fields({}) + assert set(unset_fields) == (set(("filter", "maxResults", "orderBy", "pageToken", "returnPartialSuccess", )) & set(("project", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_rest_interceptors(null_interceptor): + transport = transports.GlobalNetworkEndpointGroupsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.GlobalNetworkEndpointGroupsRestInterceptor(), + ) + client = GlobalNetworkEndpointGroupsClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.GlobalNetworkEndpointGroupsRestInterceptor, "post_list") as post, \ + mock.patch.object(transports.GlobalNetworkEndpointGroupsRestInterceptor, "pre_list") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.ListGlobalNetworkEndpointGroupsRequest.pb(compute.ListGlobalNetworkEndpointGroupsRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.NetworkEndpointGroupList.to_json(compute.NetworkEndpointGroupList()) + + request = compute.ListGlobalNetworkEndpointGroupsRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.NetworkEndpointGroupList() + + client.list(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_list_rest_bad_request(transport: str = 'rest', request_type=compute.ListGlobalNetworkEndpointGroupsRequest): + client = GlobalNetworkEndpointGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list(request) + + +def test_list_rest_flattened(): + client = GlobalNetworkEndpointGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.NetworkEndpointGroupList() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.NetworkEndpointGroupList.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.list(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/global/networkEndpointGroups" % client.transport._host, args[1]) + + +def test_list_rest_flattened_error(transport: str = 'rest'): + client = GlobalNetworkEndpointGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list( + compute.ListGlobalNetworkEndpointGroupsRequest(), + project='project_value', + ) + + +def test_list_rest_pager(transport: str = 'rest'): + client = GlobalNetworkEndpointGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + #with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + compute.NetworkEndpointGroupList( + items=[ + compute.NetworkEndpointGroup(), + compute.NetworkEndpointGroup(), + compute.NetworkEndpointGroup(), + ], + next_page_token='abc', + ), + compute.NetworkEndpointGroupList( + items=[], + next_page_token='def', + ), + compute.NetworkEndpointGroupList( + items=[ + compute.NetworkEndpointGroup(), + ], + next_page_token='ghi', + ), + compute.NetworkEndpointGroupList( + items=[ + compute.NetworkEndpointGroup(), + compute.NetworkEndpointGroup(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple(compute.NetworkEndpointGroupList.to_json(x) for x in response) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode('UTF-8') + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {'project': 'sample1'} + + pager = client.list(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, compute.NetworkEndpointGroup) + for i in results) + + pages = list(client.list(request=sample_request).pages) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize("request_type", [ + compute.ListNetworkEndpointsGlobalNetworkEndpointGroupsRequest, + dict, +]) +def test_list_network_endpoints_rest(request_type): + client = GlobalNetworkEndpointGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'network_endpoint_group': 'sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.NetworkEndpointGroupsListNetworkEndpoints( + id='id_value', + kind='kind_value', + next_page_token='next_page_token_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.NetworkEndpointGroupsListNetworkEndpoints.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.list_network_endpoints(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListNetworkEndpointsPager) + assert response.id == 'id_value' + assert response.kind == 'kind_value' + assert response.next_page_token == 'next_page_token_value' + + +def test_list_network_endpoints_rest_required_fields(request_type=compute.ListNetworkEndpointsGlobalNetworkEndpointGroupsRequest): + transport_class = transports.GlobalNetworkEndpointGroupsRestTransport + + request_init = {} + request_init["network_endpoint_group"] = "" + request_init["project"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_network_endpoints._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["networkEndpointGroup"] = 'network_endpoint_group_value' + jsonified_request["project"] = 'project_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_network_endpoints._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("filter", "max_results", "order_by", "page_token", "return_partial_success", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "networkEndpointGroup" in jsonified_request + assert jsonified_request["networkEndpointGroup"] == 'network_endpoint_group_value' + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + + client = GlobalNetworkEndpointGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.NetworkEndpointGroupsListNetworkEndpoints() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.NetworkEndpointGroupsListNetworkEndpoints.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.list_network_endpoints(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_list_network_endpoints_rest_unset_required_fields(): + transport = transports.GlobalNetworkEndpointGroupsRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.list_network_endpoints._get_unset_required_fields({}) + assert set(unset_fields) == (set(("filter", "maxResults", "orderBy", "pageToken", "returnPartialSuccess", )) & set(("networkEndpointGroup", "project", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_network_endpoints_rest_interceptors(null_interceptor): + transport = transports.GlobalNetworkEndpointGroupsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.GlobalNetworkEndpointGroupsRestInterceptor(), + ) + client = GlobalNetworkEndpointGroupsClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.GlobalNetworkEndpointGroupsRestInterceptor, "post_list_network_endpoints") as post, \ + mock.patch.object(transports.GlobalNetworkEndpointGroupsRestInterceptor, "pre_list_network_endpoints") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.ListNetworkEndpointsGlobalNetworkEndpointGroupsRequest.pb(compute.ListNetworkEndpointsGlobalNetworkEndpointGroupsRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.NetworkEndpointGroupsListNetworkEndpoints.to_json(compute.NetworkEndpointGroupsListNetworkEndpoints()) + + request = compute.ListNetworkEndpointsGlobalNetworkEndpointGroupsRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.NetworkEndpointGroupsListNetworkEndpoints() + + client.list_network_endpoints(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_list_network_endpoints_rest_bad_request(transport: str = 'rest', request_type=compute.ListNetworkEndpointsGlobalNetworkEndpointGroupsRequest): + client = GlobalNetworkEndpointGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'network_endpoint_group': 'sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_network_endpoints(request) + + +def test_list_network_endpoints_rest_flattened(): + client = GlobalNetworkEndpointGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.NetworkEndpointGroupsListNetworkEndpoints() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'network_endpoint_group': 'sample2'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + network_endpoint_group='network_endpoint_group_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.NetworkEndpointGroupsListNetworkEndpoints.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.list_network_endpoints(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/global/networkEndpointGroups/{network_endpoint_group}/listNetworkEndpoints" % client.transport._host, args[1]) + + +def test_list_network_endpoints_rest_flattened_error(transport: str = 'rest'): + client = GlobalNetworkEndpointGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_network_endpoints( + compute.ListNetworkEndpointsGlobalNetworkEndpointGroupsRequest(), + project='project_value', + network_endpoint_group='network_endpoint_group_value', + ) + + +def test_list_network_endpoints_rest_pager(transport: str = 'rest'): + client = GlobalNetworkEndpointGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + #with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + compute.NetworkEndpointGroupsListNetworkEndpoints( + items=[ + compute.NetworkEndpointWithHealthStatus(), + compute.NetworkEndpointWithHealthStatus(), + compute.NetworkEndpointWithHealthStatus(), + ], + next_page_token='abc', + ), + compute.NetworkEndpointGroupsListNetworkEndpoints( + items=[], + next_page_token='def', + ), + compute.NetworkEndpointGroupsListNetworkEndpoints( + items=[ + compute.NetworkEndpointWithHealthStatus(), + ], + next_page_token='ghi', + ), + compute.NetworkEndpointGroupsListNetworkEndpoints( + items=[ + compute.NetworkEndpointWithHealthStatus(), + compute.NetworkEndpointWithHealthStatus(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple(compute.NetworkEndpointGroupsListNetworkEndpoints.to_json(x) for x in response) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode('UTF-8') + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {'project': 'sample1', 'network_endpoint_group': 'sample2'} + + pager = client.list_network_endpoints(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, compute.NetworkEndpointWithHealthStatus) + for i in results) + + pages = list(client.list_network_endpoints(request=sample_request).pages) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + + +def test_credentials_transport_error(): + # It is an error to provide credentials and a transport instance. + transport = transports.GlobalNetworkEndpointGroupsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = GlobalNetworkEndpointGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # It is an error to provide a credentials file and a transport instance. + transport = transports.GlobalNetworkEndpointGroupsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = GlobalNetworkEndpointGroupsClient( + client_options={"credentials_file": "credentials.json"}, + transport=transport, + ) + + # It is an error to provide an api_key and a transport instance. + transport = transports.GlobalNetworkEndpointGroupsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = GlobalNetworkEndpointGroupsClient( + client_options=options, + transport=transport, + ) + + # It is an error to provide an api_key and a credential. + options = mock.Mock() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = GlobalNetworkEndpointGroupsClient( + client_options=options, + credentials=ga_credentials.AnonymousCredentials() + ) + + # It is an error to provide scopes and a transport instance. + transport = transports.GlobalNetworkEndpointGroupsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = GlobalNetworkEndpointGroupsClient( + client_options={"scopes": ["1", "2"]}, + transport=transport, + ) + + +def test_transport_instance(): + # A client may be instantiated with a custom transport instance. + transport = transports.GlobalNetworkEndpointGroupsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + client = GlobalNetworkEndpointGroupsClient(transport=transport) + assert client.transport is transport + + +@pytest.mark.parametrize("transport_class", [ + transports.GlobalNetworkEndpointGroupsRestTransport, +]) +def test_transport_adc(transport_class): + # Test default credentials are used if not provided. + with mock.patch.object(google.auth, 'default') as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class() + adc.assert_called_once() + +@pytest.mark.parametrize("transport_name", [ + "rest", +]) +def test_transport_kind(transport_name): + transport = GlobalNetworkEndpointGroupsClient.get_transport_class(transport_name)( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert transport.kind == transport_name + + +def test_global_network_endpoint_groups_base_transport_error(): + # Passing both a credentials object and credentials_file should raise an error + with pytest.raises(core_exceptions.DuplicateCredentialArgs): + transport = transports.GlobalNetworkEndpointGroupsTransport( + credentials=ga_credentials.AnonymousCredentials(), + credentials_file="credentials.json" + ) + + +def test_global_network_endpoint_groups_base_transport(): + # Instantiate the base transport. + with mock.patch('google.cloud.compute_v1.services.global_network_endpoint_groups.transports.GlobalNetworkEndpointGroupsTransport.__init__') as Transport: + Transport.return_value = None + transport = transports.GlobalNetworkEndpointGroupsTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Every method on the transport should just blindly + # raise NotImplementedError. + methods = ( + 'attach_network_endpoints', + 'delete', + 'detach_network_endpoints', + 'get', + 'insert', + 'list', + 'list_network_endpoints', + ) + for method in methods: + with pytest.raises(NotImplementedError): + getattr(transport, method)(request=object()) + + with pytest.raises(NotImplementedError): + transport.close() + + # Catch all for all remaining methods and properties + remainder = [ + 'kind', + ] + for r in remainder: + with pytest.raises(NotImplementedError): + getattr(transport, r)() + + +def test_global_network_endpoint_groups_base_transport_with_credentials_file(): + # Instantiate the base transport with a credentials file + with mock.patch.object(google.auth, 'load_credentials_from_file', autospec=True) as load_creds, mock.patch('google.cloud.compute_v1.services.global_network_endpoint_groups.transports.GlobalNetworkEndpointGroupsTransport._prep_wrapped_messages') as Transport: + Transport.return_value = None + load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.GlobalNetworkEndpointGroupsTransport( + credentials_file="credentials.json", + quota_project_id="octopus", + ) + load_creds.assert_called_once_with("credentials.json", + scopes=None, + default_scopes=( + 'https://www.googleapis.com/auth/compute', + 'https://www.googleapis.com/auth/cloud-platform', +), + quota_project_id="octopus", + ) + + +def test_global_network_endpoint_groups_base_transport_with_adc(): + # Test the default credentials are used if credentials and credentials_file are None. + with mock.patch.object(google.auth, 'default', autospec=True) as adc, mock.patch('google.cloud.compute_v1.services.global_network_endpoint_groups.transports.GlobalNetworkEndpointGroupsTransport._prep_wrapped_messages') as Transport: + Transport.return_value = None + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.GlobalNetworkEndpointGroupsTransport() + adc.assert_called_once() + + +def test_global_network_endpoint_groups_auth_adc(): + # If no credentials are provided, we should use ADC credentials. + with mock.patch.object(google.auth, 'default', autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + GlobalNetworkEndpointGroupsClient() + adc.assert_called_once_with( + scopes=None, + default_scopes=( + 'https://www.googleapis.com/auth/compute', + 'https://www.googleapis.com/auth/cloud-platform', +), + quota_project_id=None, + ) + + +def test_global_network_endpoint_groups_http_transport_client_cert_source_for_mtls(): + cred = ga_credentials.AnonymousCredentials() + with mock.patch("google.auth.transport.requests.AuthorizedSession.configure_mtls_channel") as mock_configure_mtls_channel: + transports.GlobalNetworkEndpointGroupsRestTransport ( + credentials=cred, + client_cert_source_for_mtls=client_cert_source_callback + ) + mock_configure_mtls_channel.assert_called_once_with(client_cert_source_callback) + + +@pytest.mark.parametrize("transport_name", [ + "rest", +]) +def test_global_network_endpoint_groups_host_no_port(transport_name): + client = GlobalNetworkEndpointGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions(api_endpoint='compute.googleapis.com'), + transport=transport_name, + ) + assert client.transport._host == ( + 'compute.googleapis.com:443' + if transport_name in ['grpc', 'grpc_asyncio'] + else 'https://compute.googleapis.com' + ) + +@pytest.mark.parametrize("transport_name", [ + "rest", +]) +def test_global_network_endpoint_groups_host_with_port(transport_name): + client = GlobalNetworkEndpointGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions(api_endpoint='compute.googleapis.com:8000'), + transport=transport_name, + ) + assert client.transport._host == ( + 'compute.googleapis.com:8000' + if transport_name in ['grpc', 'grpc_asyncio'] + else 'https://compute.googleapis.com:8000' + ) + +@pytest.mark.parametrize("transport_name", [ + "rest", +]) +def test_global_network_endpoint_groups_client_transport_session_collision(transport_name): + creds1 = ga_credentials.AnonymousCredentials() + creds2 = ga_credentials.AnonymousCredentials() + client1 = GlobalNetworkEndpointGroupsClient( + credentials=creds1, + transport=transport_name, + ) + client2 = GlobalNetworkEndpointGroupsClient( + credentials=creds2, + transport=transport_name, + ) + session1 = client1.transport.attach_network_endpoints._session + session2 = client2.transport.attach_network_endpoints._session + assert session1 != session2 + session1 = client1.transport.delete._session + session2 = client2.transport.delete._session + assert session1 != session2 + session1 = client1.transport.detach_network_endpoints._session + session2 = client2.transport.detach_network_endpoints._session + assert session1 != session2 + session1 = client1.transport.get._session + session2 = client2.transport.get._session + assert session1 != session2 + session1 = client1.transport.insert._session + session2 = client2.transport.insert._session + assert session1 != session2 + session1 = client1.transport.list._session + session2 = client2.transport.list._session + assert session1 != session2 + session1 = client1.transport.list_network_endpoints._session + session2 = client2.transport.list_network_endpoints._session + assert session1 != session2 + +def test_common_billing_account_path(): + billing_account = "squid" + expected = "billingAccounts/{billing_account}".format(billing_account=billing_account, ) + actual = GlobalNetworkEndpointGroupsClient.common_billing_account_path(billing_account) + assert expected == actual + + +def test_parse_common_billing_account_path(): + expected = { + "billing_account": "clam", + } + path = GlobalNetworkEndpointGroupsClient.common_billing_account_path(**expected) + + # Check that the path construction is reversible. + actual = GlobalNetworkEndpointGroupsClient.parse_common_billing_account_path(path) + assert expected == actual + +def test_common_folder_path(): + folder = "whelk" + expected = "folders/{folder}".format(folder=folder, ) + actual = GlobalNetworkEndpointGroupsClient.common_folder_path(folder) + assert expected == actual + + +def test_parse_common_folder_path(): + expected = { + "folder": "octopus", + } + path = GlobalNetworkEndpointGroupsClient.common_folder_path(**expected) + + # Check that the path construction is reversible. + actual = GlobalNetworkEndpointGroupsClient.parse_common_folder_path(path) + assert expected == actual + +def test_common_organization_path(): + organization = "oyster" + expected = "organizations/{organization}".format(organization=organization, ) + actual = GlobalNetworkEndpointGroupsClient.common_organization_path(organization) + assert expected == actual + + +def test_parse_common_organization_path(): + expected = { + "organization": "nudibranch", + } + path = GlobalNetworkEndpointGroupsClient.common_organization_path(**expected) + + # Check that the path construction is reversible. + actual = GlobalNetworkEndpointGroupsClient.parse_common_organization_path(path) + assert expected == actual + +def test_common_project_path(): + project = "cuttlefish" + expected = "projects/{project}".format(project=project, ) + actual = GlobalNetworkEndpointGroupsClient.common_project_path(project) + assert expected == actual + + +def test_parse_common_project_path(): + expected = { + "project": "mussel", + } + path = GlobalNetworkEndpointGroupsClient.common_project_path(**expected) + + # Check that the path construction is reversible. + actual = GlobalNetworkEndpointGroupsClient.parse_common_project_path(path) + assert expected == actual + +def test_common_location_path(): + project = "winkle" + location = "nautilus" + expected = "projects/{project}/locations/{location}".format(project=project, location=location, ) + actual = GlobalNetworkEndpointGroupsClient.common_location_path(project, location) + assert expected == actual + + +def test_parse_common_location_path(): + expected = { + "project": "scallop", + "location": "abalone", + } + path = GlobalNetworkEndpointGroupsClient.common_location_path(**expected) + + # Check that the path construction is reversible. + actual = GlobalNetworkEndpointGroupsClient.parse_common_location_path(path) + assert expected == actual + + +def test_client_with_default_client_info(): + client_info = gapic_v1.client_info.ClientInfo() + + with mock.patch.object(transports.GlobalNetworkEndpointGroupsTransport, '_prep_wrapped_messages') as prep: + client = GlobalNetworkEndpointGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + with mock.patch.object(transports.GlobalNetworkEndpointGroupsTransport, '_prep_wrapped_messages') as prep: + transport_class = GlobalNetworkEndpointGroupsClient.get_transport_class() + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + +def test_transport_close(): + transports = { + "rest": "_session", + } + + for transport, close_name in transports.items(): + client = GlobalNetworkEndpointGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport + ) + with mock.patch.object(type(getattr(client.transport, close_name)), "close") as close: + with client: + close.assert_not_called() + close.assert_called_once() + +def test_client_ctx(): + transports = [ + 'rest', + ] + for transport in transports: + client = GlobalNetworkEndpointGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport + ) + # Test client calls underlying transport. + with mock.patch.object(type(client.transport), "close") as close: + close.assert_not_called() + with client: + pass + close.assert_called() + +@pytest.mark.parametrize("client_class,transport_class", [ + (GlobalNetworkEndpointGroupsClient, transports.GlobalNetworkEndpointGroupsRestTransport), +]) +def test_api_key_credentials(client_class, transport_class): + with mock.patch.object( + google.auth._default, "get_api_key_credentials", create=True + ) as get_api_key_credentials: + mock_cred = mock.Mock() + get_api_key_credentials.return_value = mock_cred + options = client_options.ClientOptions() + options.api_key = "api_key" + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=mock_cred, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) diff --git a/owl-bot-staging/v1/tests/unit/gapic/compute_v1/test_global_operations.py b/owl-bot-staging/v1/tests/unit/gapic/compute_v1/test_global_operations.py new file mode 100644 index 000000000..275bfc7a3 --- /dev/null +++ b/owl-bot-staging/v1/tests/unit/gapic/compute_v1/test_global_operations.py @@ -0,0 +1,2244 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import os +# try/except added for compatibility with python < 3.8 +try: + from unittest import mock + from unittest.mock import AsyncMock # pragma: NO COVER +except ImportError: # pragma: NO COVER + import mock + +import grpc +from grpc.experimental import aio +from collections.abc import Iterable +from google.protobuf import json_format +import json +import math +import pytest +from proto.marshal.rules.dates import DurationRule, TimestampRule +from proto.marshal.rules import wrappers +from requests import Response +from requests import Request, PreparedRequest +from requests.sessions import Session +from google.protobuf import json_format + +from google.api_core import client_options +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import grpc_helpers +from google.api_core import grpc_helpers_async +from google.api_core import path_template +from google.auth import credentials as ga_credentials +from google.auth.exceptions import MutualTLSChannelError +from google.cloud.compute_v1.services.global_operations import GlobalOperationsClient +from google.cloud.compute_v1.services.global_operations import pagers +from google.cloud.compute_v1.services.global_operations import transports +from google.cloud.compute_v1.types import compute +from google.oauth2 import service_account +import google.auth + + +def client_cert_source_callback(): + return b"cert bytes", b"key bytes" + + +# If default endpoint is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint(client): + return "foo.googleapis.com" if ("localhost" in client.DEFAULT_ENDPOINT) else client.DEFAULT_ENDPOINT + + +def test__get_default_mtls_endpoint(): + api_endpoint = "example.googleapis.com" + api_mtls_endpoint = "example.mtls.googleapis.com" + sandbox_endpoint = "example.sandbox.googleapis.com" + sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com" + non_googleapi = "api.example.com" + + assert GlobalOperationsClient._get_default_mtls_endpoint(None) is None + assert GlobalOperationsClient._get_default_mtls_endpoint(api_endpoint) == api_mtls_endpoint + assert GlobalOperationsClient._get_default_mtls_endpoint(api_mtls_endpoint) == api_mtls_endpoint + assert GlobalOperationsClient._get_default_mtls_endpoint(sandbox_endpoint) == sandbox_mtls_endpoint + assert GlobalOperationsClient._get_default_mtls_endpoint(sandbox_mtls_endpoint) == sandbox_mtls_endpoint + assert GlobalOperationsClient._get_default_mtls_endpoint(non_googleapi) == non_googleapi + + +@pytest.mark.parametrize("client_class,transport_name", [ + (GlobalOperationsClient, "rest"), +]) +def test_global_operations_client_from_service_account_info(client_class, transport_name): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object(service_account.Credentials, 'from_service_account_info') as factory: + factory.return_value = creds + info = {"valid": True} + client = client_class.from_service_account_info(info, transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + 'compute.googleapis.com:443' + if transport_name in ['grpc', 'grpc_asyncio'] + else + 'https://compute.googleapis.com' + ) + + +@pytest.mark.parametrize("transport_class,transport_name", [ + (transports.GlobalOperationsRestTransport, "rest"), +]) +def test_global_operations_client_service_account_always_use_jwt(transport_class, transport_name): + with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=True) + use_jwt.assert_called_once_with(True) + + with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=False) + use_jwt.assert_not_called() + + +@pytest.mark.parametrize("client_class,transport_name", [ + (GlobalOperationsClient, "rest"), +]) +def test_global_operations_client_from_service_account_file(client_class, transport_name): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object(service_account.Credentials, 'from_service_account_file') as factory: + factory.return_value = creds + client = client_class.from_service_account_file("dummy/file/path.json", transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + client = client_class.from_service_account_json("dummy/file/path.json", transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + 'compute.googleapis.com:443' + if transport_name in ['grpc', 'grpc_asyncio'] + else + 'https://compute.googleapis.com' + ) + + +def test_global_operations_client_get_transport_class(): + transport = GlobalOperationsClient.get_transport_class() + available_transports = [ + transports.GlobalOperationsRestTransport, + ] + assert transport in available_transports + + transport = GlobalOperationsClient.get_transport_class("rest") + assert transport == transports.GlobalOperationsRestTransport + + +@pytest.mark.parametrize("client_class,transport_class,transport_name", [ + (GlobalOperationsClient, transports.GlobalOperationsRestTransport, "rest"), +]) +@mock.patch.object(GlobalOperationsClient, "DEFAULT_ENDPOINT", modify_default_endpoint(GlobalOperationsClient)) +def test_global_operations_client_client_options(client_class, transport_class, transport_name): + # Check that if channel is provided we won't create a new one. + with mock.patch.object(GlobalOperationsClient, 'get_transport_class') as gtc: + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ) + client = client_class(transport=transport) + gtc.assert_not_called() + + # Check that if channel is provided via str we will create a new one. + with mock.patch.object(GlobalOperationsClient, 'get_transport_class') as gtc: + client = client_class(transport=transport_name) + gtc.assert_called() + + # Check the case api_endpoint is provided. + options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(transport=transport_name, client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_MTLS_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError): + client = client_class(transport=transport_name) + + # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): + with pytest.raises(ValueError): + client = client_class(transport=transport_name) + + # Check the case quota_project_id is provided + options = client_options.ClientOptions(quota_project_id="octopus") + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id="octopus", + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + # Check the case api_endpoint is provided + options = client_options.ClientOptions(api_audience="https://language.googleapis.com") + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience="https://language.googleapis.com" + ) + +@pytest.mark.parametrize("client_class,transport_class,transport_name,use_client_cert_env", [ + (GlobalOperationsClient, transports.GlobalOperationsRestTransport, "rest", "true"), + (GlobalOperationsClient, transports.GlobalOperationsRestTransport, "rest", "false"), +]) +@mock.patch.object(GlobalOperationsClient, "DEFAULT_ENDPOINT", modify_default_endpoint(GlobalOperationsClient)) +@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) +def test_global_operations_client_mtls_env_auto(client_class, transport_class, transport_name, use_client_cert_env): + # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default + # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists. + + # Check the case client_cert_source is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): + options = client_options.ClientOptions(client_cert_source=client_cert_source_callback) + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + + if use_client_cert_env == "false": + expected_client_cert_source = None + expected_host = client.DEFAULT_ENDPOINT + else: + expected_client_cert_source = client_cert_source_callback + expected_host = client.DEFAULT_MTLS_ENDPOINT + + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case ADC client cert is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): + with mock.patch.object(transport_class, '__init__') as patched: + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): + with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=client_cert_source_callback): + if use_client_cert_env == "false": + expected_host = client.DEFAULT_ENDPOINT + expected_client_cert_source = None + else: + expected_host = client.DEFAULT_MTLS_ENDPOINT + expected_client_cert_source = client_cert_source_callback + + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case client_cert_source and ADC client cert are not provided. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): + with mock.patch.object(transport_class, '__init__') as patched: + with mock.patch("google.auth.transport.mtls.has_default_client_cert_source", return_value=False): + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize("client_class", [ + GlobalOperationsClient +]) +@mock.patch.object(GlobalOperationsClient, "DEFAULT_ENDPOINT", modify_default_endpoint(GlobalOperationsClient)) +def test_global_operations_client_get_mtls_endpoint_and_cert_source(client_class): + mock_client_cert_source = mock.Mock() + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "true". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + mock_api_endpoint = "foo" + options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(options) + assert api_endpoint == mock_api_endpoint + assert cert_source == mock_client_cert_source + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "false". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + mock_client_cert_source = mock.Mock() + mock_api_endpoint = "foo" + options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(options) + assert api_endpoint == mock_api_endpoint + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert doesn't exist. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=False): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert exists. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): + with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=mock_client_cert_source): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source == mock_client_cert_source + + +@pytest.mark.parametrize("client_class,transport_class,transport_name", [ + (GlobalOperationsClient, transports.GlobalOperationsRestTransport, "rest"), +]) +def test_global_operations_client_client_options_scopes(client_class, transport_class, transport_name): + # Check the case scopes are provided. + options = client_options.ClientOptions( + scopes=["1", "2"], + ) + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=["1", "2"], + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + +@pytest.mark.parametrize("client_class,transport_class,transport_name,grpc_helpers", [ + (GlobalOperationsClient, transports.GlobalOperationsRestTransport, "rest", None), +]) +def test_global_operations_client_client_options_credentials_file(client_class, transport_class, transport_name, grpc_helpers): + # Check the case credentials file is provided. + options = client_options.ClientOptions( + credentials_file="credentials.json" + ) + + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize("request_type", [ + compute.AggregatedListGlobalOperationsRequest, + dict, +]) +def test_aggregated_list_rest(request_type): + client = GlobalOperationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.OperationAggregatedList( + id='id_value', + kind='kind_value', + next_page_token='next_page_token_value', + self_link='self_link_value', + unreachables=['unreachables_value'], + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.OperationAggregatedList.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.aggregated_list(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.AggregatedListPager) + assert response.id == 'id_value' + assert response.kind == 'kind_value' + assert response.next_page_token == 'next_page_token_value' + assert response.self_link == 'self_link_value' + assert response.unreachables == ['unreachables_value'] + + +def test_aggregated_list_rest_required_fields(request_type=compute.AggregatedListGlobalOperationsRequest): + transport_class = transports.GlobalOperationsRestTransport + + request_init = {} + request_init["project"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).aggregated_list._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).aggregated_list._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("filter", "include_all_scopes", "max_results", "order_by", "page_token", "return_partial_success", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + + client = GlobalOperationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.OperationAggregatedList() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "get", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.OperationAggregatedList.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.aggregated_list(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_aggregated_list_rest_unset_required_fields(): + transport = transports.GlobalOperationsRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.aggregated_list._get_unset_required_fields({}) + assert set(unset_fields) == (set(("filter", "includeAllScopes", "maxResults", "orderBy", "pageToken", "returnPartialSuccess", )) & set(("project", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_aggregated_list_rest_interceptors(null_interceptor): + transport = transports.GlobalOperationsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.GlobalOperationsRestInterceptor(), + ) + client = GlobalOperationsClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.GlobalOperationsRestInterceptor, "post_aggregated_list") as post, \ + mock.patch.object(transports.GlobalOperationsRestInterceptor, "pre_aggregated_list") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.AggregatedListGlobalOperationsRequest.pb(compute.AggregatedListGlobalOperationsRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.OperationAggregatedList.to_json(compute.OperationAggregatedList()) + + request = compute.AggregatedListGlobalOperationsRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.OperationAggregatedList() + + client.aggregated_list(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_aggregated_list_rest_bad_request(transport: str = 'rest', request_type=compute.AggregatedListGlobalOperationsRequest): + client = GlobalOperationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.aggregated_list(request) + + +def test_aggregated_list_rest_flattened(): + client = GlobalOperationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.OperationAggregatedList() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.OperationAggregatedList.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.aggregated_list(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/aggregated/operations" % client.transport._host, args[1]) + + +def test_aggregated_list_rest_flattened_error(transport: str = 'rest'): + client = GlobalOperationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.aggregated_list( + compute.AggregatedListGlobalOperationsRequest(), + project='project_value', + ) + + +def test_aggregated_list_rest_pager(transport: str = 'rest'): + client = GlobalOperationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + #with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + compute.OperationAggregatedList( + items={ + 'a':compute.OperationsScopedList(), + 'b':compute.OperationsScopedList(), + 'c':compute.OperationsScopedList(), + }, + next_page_token='abc', + ), + compute.OperationAggregatedList( + items={}, + next_page_token='def', + ), + compute.OperationAggregatedList( + items={ + 'g':compute.OperationsScopedList(), + }, + next_page_token='ghi', + ), + compute.OperationAggregatedList( + items={ + 'h':compute.OperationsScopedList(), + 'i':compute.OperationsScopedList(), + }, + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple(compute.OperationAggregatedList.to_json(x) for x in response) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode('UTF-8') + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {'project': 'sample1'} + + pager = client.aggregated_list(request=sample_request) + + assert isinstance(pager.get('a'), compute.OperationsScopedList) + assert pager.get('h') is None + + results = list(pager) + assert len(results) == 6 + assert all( + isinstance(i, tuple) + for i in results) + for result in results: + assert isinstance(result, tuple) + assert tuple(type(t) for t in result) == (str, compute.OperationsScopedList) + + assert pager.get('a') is None + assert isinstance(pager.get('h'), compute.OperationsScopedList) + + pages = list(client.aggregated_list(request=sample_request).pages) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize("request_type", [ + compute.DeleteGlobalOperationRequest, + dict, +]) +def test_delete_rest(request_type): + client = GlobalOperationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'operation': 'sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.DeleteGlobalOperationResponse( + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.DeleteGlobalOperationResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.delete(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.DeleteGlobalOperationResponse) + + +def test_delete_rest_required_fields(request_type=compute.DeleteGlobalOperationRequest): + transport_class = transports.GlobalOperationsRestTransport + + request_init = {} + request_init["operation"] = "" + request_init["project"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["operation"] = 'operation_value' + jsonified_request["project"] = 'project_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "operation" in jsonified_request + assert jsonified_request["operation"] == 'operation_value' + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + + client = GlobalOperationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.DeleteGlobalOperationResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "delete", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.DeleteGlobalOperationResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.delete(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_delete_rest_unset_required_fields(): + transport = transports.GlobalOperationsRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.delete._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("operation", "project", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_rest_interceptors(null_interceptor): + transport = transports.GlobalOperationsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.GlobalOperationsRestInterceptor(), + ) + client = GlobalOperationsClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.GlobalOperationsRestInterceptor, "post_delete") as post, \ + mock.patch.object(transports.GlobalOperationsRestInterceptor, "pre_delete") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.DeleteGlobalOperationRequest.pb(compute.DeleteGlobalOperationRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.DeleteGlobalOperationResponse.to_json(compute.DeleteGlobalOperationResponse()) + + request = compute.DeleteGlobalOperationRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.DeleteGlobalOperationResponse() + + client.delete(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_delete_rest_bad_request(transport: str = 'rest', request_type=compute.DeleteGlobalOperationRequest): + client = GlobalOperationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'operation': 'sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.delete(request) + + +def test_delete_rest_flattened(): + client = GlobalOperationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.DeleteGlobalOperationResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'operation': 'sample2'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + operation='operation_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.DeleteGlobalOperationResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.delete(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/global/operations/{operation}" % client.transport._host, args[1]) + + +def test_delete_rest_flattened_error(transport: str = 'rest'): + client = GlobalOperationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete( + compute.DeleteGlobalOperationRequest(), + project='project_value', + operation='operation_value', + ) + + +def test_delete_rest_error(): + client = GlobalOperationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.GetGlobalOperationRequest, + dict, +]) +def test_get_rest(request_type): + client = GlobalOperationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'operation': 'sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.get(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.Operation) + assert response.client_operation_id == 'client_operation_id_value' + assert response.creation_timestamp == 'creation_timestamp_value' + assert response.description == 'description_value' + assert response.end_time == 'end_time_value' + assert response.http_error_message == 'http_error_message_value' + assert response.http_error_status_code == 2374 + assert response.id == 205 + assert response.insert_time == 'insert_time_value' + assert response.kind == 'kind_value' + assert response.name == 'name_value' + assert response.operation_group_id == 'operation_group_id_value' + assert response.operation_type == 'operation_type_value' + assert response.progress == 885 + assert response.region == 'region_value' + assert response.self_link == 'self_link_value' + assert response.start_time == 'start_time_value' + assert response.status == compute.Operation.Status.DONE + assert response.status_message == 'status_message_value' + assert response.target_id == 947 + assert response.target_link == 'target_link_value' + assert response.user == 'user_value' + assert response.zone == 'zone_value' + + +def test_get_rest_required_fields(request_type=compute.GetGlobalOperationRequest): + transport_class = transports.GlobalOperationsRestTransport + + request_init = {} + request_init["operation"] = "" + request_init["project"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["operation"] = 'operation_value' + jsonified_request["project"] = 'project_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "operation" in jsonified_request + assert jsonified_request["operation"] == 'operation_value' + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + + client = GlobalOperationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "get", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.get(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_get_rest_unset_required_fields(): + transport = transports.GlobalOperationsRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.get._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("operation", "project", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_rest_interceptors(null_interceptor): + transport = transports.GlobalOperationsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.GlobalOperationsRestInterceptor(), + ) + client = GlobalOperationsClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.GlobalOperationsRestInterceptor, "post_get") as post, \ + mock.patch.object(transports.GlobalOperationsRestInterceptor, "pre_get") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.GetGlobalOperationRequest.pb(compute.GetGlobalOperationRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.GetGlobalOperationRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.get(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_get_rest_bad_request(transport: str = 'rest', request_type=compute.GetGlobalOperationRequest): + client = GlobalOperationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'operation': 'sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get(request) + + +def test_get_rest_flattened(): + client = GlobalOperationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'operation': 'sample2'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + operation='operation_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.get(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/global/operations/{operation}" % client.transport._host, args[1]) + + +def test_get_rest_flattened_error(transport: str = 'rest'): + client = GlobalOperationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get( + compute.GetGlobalOperationRequest(), + project='project_value', + operation='operation_value', + ) + + +def test_get_rest_error(): + client = GlobalOperationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.ListGlobalOperationsRequest, + dict, +]) +def test_list_rest(request_type): + client = GlobalOperationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.OperationList( + id='id_value', + kind='kind_value', + next_page_token='next_page_token_value', + self_link='self_link_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.OperationList.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.list(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListPager) + assert response.id == 'id_value' + assert response.kind == 'kind_value' + assert response.next_page_token == 'next_page_token_value' + assert response.self_link == 'self_link_value' + + +def test_list_rest_required_fields(request_type=compute.ListGlobalOperationsRequest): + transport_class = transports.GlobalOperationsRestTransport + + request_init = {} + request_init["project"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("filter", "max_results", "order_by", "page_token", "return_partial_success", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + + client = GlobalOperationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.OperationList() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "get", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.OperationList.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.list(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_list_rest_unset_required_fields(): + transport = transports.GlobalOperationsRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.list._get_unset_required_fields({}) + assert set(unset_fields) == (set(("filter", "maxResults", "orderBy", "pageToken", "returnPartialSuccess", )) & set(("project", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_rest_interceptors(null_interceptor): + transport = transports.GlobalOperationsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.GlobalOperationsRestInterceptor(), + ) + client = GlobalOperationsClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.GlobalOperationsRestInterceptor, "post_list") as post, \ + mock.patch.object(transports.GlobalOperationsRestInterceptor, "pre_list") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.ListGlobalOperationsRequest.pb(compute.ListGlobalOperationsRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.OperationList.to_json(compute.OperationList()) + + request = compute.ListGlobalOperationsRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.OperationList() + + client.list(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_list_rest_bad_request(transport: str = 'rest', request_type=compute.ListGlobalOperationsRequest): + client = GlobalOperationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list(request) + + +def test_list_rest_flattened(): + client = GlobalOperationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.OperationList() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.OperationList.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.list(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/global/operations" % client.transport._host, args[1]) + + +def test_list_rest_flattened_error(transport: str = 'rest'): + client = GlobalOperationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list( + compute.ListGlobalOperationsRequest(), + project='project_value', + ) + + +def test_list_rest_pager(transport: str = 'rest'): + client = GlobalOperationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + #with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + compute.OperationList( + items=[ + compute.Operation(), + compute.Operation(), + compute.Operation(), + ], + next_page_token='abc', + ), + compute.OperationList( + items=[], + next_page_token='def', + ), + compute.OperationList( + items=[ + compute.Operation(), + ], + next_page_token='ghi', + ), + compute.OperationList( + items=[ + compute.Operation(), + compute.Operation(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple(compute.OperationList.to_json(x) for x in response) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode('UTF-8') + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {'project': 'sample1'} + + pager = client.list(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, compute.Operation) + for i in results) + + pages = list(client.list(request=sample_request).pages) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize("request_type", [ + compute.WaitGlobalOperationRequest, + dict, +]) +def test_wait_rest(request_type): + client = GlobalOperationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'operation': 'sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.wait(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.Operation) + assert response.client_operation_id == 'client_operation_id_value' + assert response.creation_timestamp == 'creation_timestamp_value' + assert response.description == 'description_value' + assert response.end_time == 'end_time_value' + assert response.http_error_message == 'http_error_message_value' + assert response.http_error_status_code == 2374 + assert response.id == 205 + assert response.insert_time == 'insert_time_value' + assert response.kind == 'kind_value' + assert response.name == 'name_value' + assert response.operation_group_id == 'operation_group_id_value' + assert response.operation_type == 'operation_type_value' + assert response.progress == 885 + assert response.region == 'region_value' + assert response.self_link == 'self_link_value' + assert response.start_time == 'start_time_value' + assert response.status == compute.Operation.Status.DONE + assert response.status_message == 'status_message_value' + assert response.target_id == 947 + assert response.target_link == 'target_link_value' + assert response.user == 'user_value' + assert response.zone == 'zone_value' + + +def test_wait_rest_required_fields(request_type=compute.WaitGlobalOperationRequest): + transport_class = transports.GlobalOperationsRestTransport + + request_init = {} + request_init["operation"] = "" + request_init["project"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).wait._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["operation"] = 'operation_value' + jsonified_request["project"] = 'project_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).wait._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "operation" in jsonified_request + assert jsonified_request["operation"] == 'operation_value' + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + + client = GlobalOperationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.wait(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_wait_rest_unset_required_fields(): + transport = transports.GlobalOperationsRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.wait._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("operation", "project", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_wait_rest_interceptors(null_interceptor): + transport = transports.GlobalOperationsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.GlobalOperationsRestInterceptor(), + ) + client = GlobalOperationsClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.GlobalOperationsRestInterceptor, "post_wait") as post, \ + mock.patch.object(transports.GlobalOperationsRestInterceptor, "pre_wait") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.WaitGlobalOperationRequest.pb(compute.WaitGlobalOperationRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.WaitGlobalOperationRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.wait(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_wait_rest_bad_request(transport: str = 'rest', request_type=compute.WaitGlobalOperationRequest): + client = GlobalOperationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'operation': 'sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.wait(request) + + +def test_wait_rest_flattened(): + client = GlobalOperationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'operation': 'sample2'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + operation='operation_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.wait(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/global/operations/{operation}/wait" % client.transport._host, args[1]) + + +def test_wait_rest_flattened_error(transport: str = 'rest'): + client = GlobalOperationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.wait( + compute.WaitGlobalOperationRequest(), + project='project_value', + operation='operation_value', + ) + + +def test_wait_rest_error(): + client = GlobalOperationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +def test_credentials_transport_error(): + # It is an error to provide credentials and a transport instance. + transport = transports.GlobalOperationsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = GlobalOperationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # It is an error to provide a credentials file and a transport instance. + transport = transports.GlobalOperationsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = GlobalOperationsClient( + client_options={"credentials_file": "credentials.json"}, + transport=transport, + ) + + # It is an error to provide an api_key and a transport instance. + transport = transports.GlobalOperationsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = GlobalOperationsClient( + client_options=options, + transport=transport, + ) + + # It is an error to provide an api_key and a credential. + options = mock.Mock() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = GlobalOperationsClient( + client_options=options, + credentials=ga_credentials.AnonymousCredentials() + ) + + # It is an error to provide scopes and a transport instance. + transport = transports.GlobalOperationsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = GlobalOperationsClient( + client_options={"scopes": ["1", "2"]}, + transport=transport, + ) + + +def test_transport_instance(): + # A client may be instantiated with a custom transport instance. + transport = transports.GlobalOperationsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + client = GlobalOperationsClient(transport=transport) + assert client.transport is transport + + +@pytest.mark.parametrize("transport_class", [ + transports.GlobalOperationsRestTransport, +]) +def test_transport_adc(transport_class): + # Test default credentials are used if not provided. + with mock.patch.object(google.auth, 'default') as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class() + adc.assert_called_once() + +@pytest.mark.parametrize("transport_name", [ + "rest", +]) +def test_transport_kind(transport_name): + transport = GlobalOperationsClient.get_transport_class(transport_name)( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert transport.kind == transport_name + + +def test_global_operations_base_transport_error(): + # Passing both a credentials object and credentials_file should raise an error + with pytest.raises(core_exceptions.DuplicateCredentialArgs): + transport = transports.GlobalOperationsTransport( + credentials=ga_credentials.AnonymousCredentials(), + credentials_file="credentials.json" + ) + + +def test_global_operations_base_transport(): + # Instantiate the base transport. + with mock.patch('google.cloud.compute_v1.services.global_operations.transports.GlobalOperationsTransport.__init__') as Transport: + Transport.return_value = None + transport = transports.GlobalOperationsTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Every method on the transport should just blindly + # raise NotImplementedError. + methods = ( + 'aggregated_list', + 'delete', + 'get', + 'list', + 'wait', + ) + for method in methods: + with pytest.raises(NotImplementedError): + getattr(transport, method)(request=object()) + + with pytest.raises(NotImplementedError): + transport.close() + + # Catch all for all remaining methods and properties + remainder = [ + 'kind', + ] + for r in remainder: + with pytest.raises(NotImplementedError): + getattr(transport, r)() + + +def test_global_operations_base_transport_with_credentials_file(): + # Instantiate the base transport with a credentials file + with mock.patch.object(google.auth, 'load_credentials_from_file', autospec=True) as load_creds, mock.patch('google.cloud.compute_v1.services.global_operations.transports.GlobalOperationsTransport._prep_wrapped_messages') as Transport: + Transport.return_value = None + load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.GlobalOperationsTransport( + credentials_file="credentials.json", + quota_project_id="octopus", + ) + load_creds.assert_called_once_with("credentials.json", + scopes=None, + default_scopes=( + 'https://www.googleapis.com/auth/compute', + 'https://www.googleapis.com/auth/cloud-platform', +), + quota_project_id="octopus", + ) + + +def test_global_operations_base_transport_with_adc(): + # Test the default credentials are used if credentials and credentials_file are None. + with mock.patch.object(google.auth, 'default', autospec=True) as adc, mock.patch('google.cloud.compute_v1.services.global_operations.transports.GlobalOperationsTransport._prep_wrapped_messages') as Transport: + Transport.return_value = None + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.GlobalOperationsTransport() + adc.assert_called_once() + + +def test_global_operations_auth_adc(): + # If no credentials are provided, we should use ADC credentials. + with mock.patch.object(google.auth, 'default', autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + GlobalOperationsClient() + adc.assert_called_once_with( + scopes=None, + default_scopes=( + 'https://www.googleapis.com/auth/compute', + 'https://www.googleapis.com/auth/cloud-platform', +), + quota_project_id=None, + ) + + +def test_global_operations_http_transport_client_cert_source_for_mtls(): + cred = ga_credentials.AnonymousCredentials() + with mock.patch("google.auth.transport.requests.AuthorizedSession.configure_mtls_channel") as mock_configure_mtls_channel: + transports.GlobalOperationsRestTransport ( + credentials=cred, + client_cert_source_for_mtls=client_cert_source_callback + ) + mock_configure_mtls_channel.assert_called_once_with(client_cert_source_callback) + + +@pytest.mark.parametrize("transport_name", [ + "rest", +]) +def test_global_operations_host_no_port(transport_name): + client = GlobalOperationsClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions(api_endpoint='compute.googleapis.com'), + transport=transport_name, + ) + assert client.transport._host == ( + 'compute.googleapis.com:443' + if transport_name in ['grpc', 'grpc_asyncio'] + else 'https://compute.googleapis.com' + ) + +@pytest.mark.parametrize("transport_name", [ + "rest", +]) +def test_global_operations_host_with_port(transport_name): + client = GlobalOperationsClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions(api_endpoint='compute.googleapis.com:8000'), + transport=transport_name, + ) + assert client.transport._host == ( + 'compute.googleapis.com:8000' + if transport_name in ['grpc', 'grpc_asyncio'] + else 'https://compute.googleapis.com:8000' + ) + +@pytest.mark.parametrize("transport_name", [ + "rest", +]) +def test_global_operations_client_transport_session_collision(transport_name): + creds1 = ga_credentials.AnonymousCredentials() + creds2 = ga_credentials.AnonymousCredentials() + client1 = GlobalOperationsClient( + credentials=creds1, + transport=transport_name, + ) + client2 = GlobalOperationsClient( + credentials=creds2, + transport=transport_name, + ) + session1 = client1.transport.aggregated_list._session + session2 = client2.transport.aggregated_list._session + assert session1 != session2 + session1 = client1.transport.delete._session + session2 = client2.transport.delete._session + assert session1 != session2 + session1 = client1.transport.get._session + session2 = client2.transport.get._session + assert session1 != session2 + session1 = client1.transport.list._session + session2 = client2.transport.list._session + assert session1 != session2 + session1 = client1.transport.wait._session + session2 = client2.transport.wait._session + assert session1 != session2 + +def test_common_billing_account_path(): + billing_account = "squid" + expected = "billingAccounts/{billing_account}".format(billing_account=billing_account, ) + actual = GlobalOperationsClient.common_billing_account_path(billing_account) + assert expected == actual + + +def test_parse_common_billing_account_path(): + expected = { + "billing_account": "clam", + } + path = GlobalOperationsClient.common_billing_account_path(**expected) + + # Check that the path construction is reversible. + actual = GlobalOperationsClient.parse_common_billing_account_path(path) + assert expected == actual + +def test_common_folder_path(): + folder = "whelk" + expected = "folders/{folder}".format(folder=folder, ) + actual = GlobalOperationsClient.common_folder_path(folder) + assert expected == actual + + +def test_parse_common_folder_path(): + expected = { + "folder": "octopus", + } + path = GlobalOperationsClient.common_folder_path(**expected) + + # Check that the path construction is reversible. + actual = GlobalOperationsClient.parse_common_folder_path(path) + assert expected == actual + +def test_common_organization_path(): + organization = "oyster" + expected = "organizations/{organization}".format(organization=organization, ) + actual = GlobalOperationsClient.common_organization_path(organization) + assert expected == actual + + +def test_parse_common_organization_path(): + expected = { + "organization": "nudibranch", + } + path = GlobalOperationsClient.common_organization_path(**expected) + + # Check that the path construction is reversible. + actual = GlobalOperationsClient.parse_common_organization_path(path) + assert expected == actual + +def test_common_project_path(): + project = "cuttlefish" + expected = "projects/{project}".format(project=project, ) + actual = GlobalOperationsClient.common_project_path(project) + assert expected == actual + + +def test_parse_common_project_path(): + expected = { + "project": "mussel", + } + path = GlobalOperationsClient.common_project_path(**expected) + + # Check that the path construction is reversible. + actual = GlobalOperationsClient.parse_common_project_path(path) + assert expected == actual + +def test_common_location_path(): + project = "winkle" + location = "nautilus" + expected = "projects/{project}/locations/{location}".format(project=project, location=location, ) + actual = GlobalOperationsClient.common_location_path(project, location) + assert expected == actual + + +def test_parse_common_location_path(): + expected = { + "project": "scallop", + "location": "abalone", + } + path = GlobalOperationsClient.common_location_path(**expected) + + # Check that the path construction is reversible. + actual = GlobalOperationsClient.parse_common_location_path(path) + assert expected == actual + + +def test_client_with_default_client_info(): + client_info = gapic_v1.client_info.ClientInfo() + + with mock.patch.object(transports.GlobalOperationsTransport, '_prep_wrapped_messages') as prep: + client = GlobalOperationsClient( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + with mock.patch.object(transports.GlobalOperationsTransport, '_prep_wrapped_messages') as prep: + transport_class = GlobalOperationsClient.get_transport_class() + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + +def test_transport_close(): + transports = { + "rest": "_session", + } + + for transport, close_name in transports.items(): + client = GlobalOperationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport + ) + with mock.patch.object(type(getattr(client.transport, close_name)), "close") as close: + with client: + close.assert_not_called() + close.assert_called_once() + +def test_client_ctx(): + transports = [ + 'rest', + ] + for transport in transports: + client = GlobalOperationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport + ) + # Test client calls underlying transport. + with mock.patch.object(type(client.transport), "close") as close: + close.assert_not_called() + with client: + pass + close.assert_called() + +@pytest.mark.parametrize("client_class,transport_class", [ + (GlobalOperationsClient, transports.GlobalOperationsRestTransport), +]) +def test_api_key_credentials(client_class, transport_class): + with mock.patch.object( + google.auth._default, "get_api_key_credentials", create=True + ) as get_api_key_credentials: + mock_cred = mock.Mock() + get_api_key_credentials.return_value = mock_cred + options = client_options.ClientOptions() + options.api_key = "api_key" + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=mock_cred, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) diff --git a/owl-bot-staging/v1/tests/unit/gapic/compute_v1/test_global_organization_operations.py b/owl-bot-staging/v1/tests/unit/gapic/compute_v1/test_global_organization_operations.py new file mode 100644 index 000000000..7e784b0f8 --- /dev/null +++ b/owl-bot-staging/v1/tests/unit/gapic/compute_v1/test_global_organization_operations.py @@ -0,0 +1,1510 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import os +# try/except added for compatibility with python < 3.8 +try: + from unittest import mock + from unittest.mock import AsyncMock # pragma: NO COVER +except ImportError: # pragma: NO COVER + import mock + +import grpc +from grpc.experimental import aio +from collections.abc import Iterable +from google.protobuf import json_format +import json +import math +import pytest +from proto.marshal.rules.dates import DurationRule, TimestampRule +from proto.marshal.rules import wrappers +from requests import Response +from requests import Request, PreparedRequest +from requests.sessions import Session +from google.protobuf import json_format + +from google.api_core import client_options +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import grpc_helpers +from google.api_core import grpc_helpers_async +from google.api_core import path_template +from google.auth import credentials as ga_credentials +from google.auth.exceptions import MutualTLSChannelError +from google.cloud.compute_v1.services.global_organization_operations import GlobalOrganizationOperationsClient +from google.cloud.compute_v1.services.global_organization_operations import pagers +from google.cloud.compute_v1.services.global_organization_operations import transports +from google.cloud.compute_v1.types import compute +from google.oauth2 import service_account +import google.auth + + +def client_cert_source_callback(): + return b"cert bytes", b"key bytes" + + +# If default endpoint is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint(client): + return "foo.googleapis.com" if ("localhost" in client.DEFAULT_ENDPOINT) else client.DEFAULT_ENDPOINT + + +def test__get_default_mtls_endpoint(): + api_endpoint = "example.googleapis.com" + api_mtls_endpoint = "example.mtls.googleapis.com" + sandbox_endpoint = "example.sandbox.googleapis.com" + sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com" + non_googleapi = "api.example.com" + + assert GlobalOrganizationOperationsClient._get_default_mtls_endpoint(None) is None + assert GlobalOrganizationOperationsClient._get_default_mtls_endpoint(api_endpoint) == api_mtls_endpoint + assert GlobalOrganizationOperationsClient._get_default_mtls_endpoint(api_mtls_endpoint) == api_mtls_endpoint + assert GlobalOrganizationOperationsClient._get_default_mtls_endpoint(sandbox_endpoint) == sandbox_mtls_endpoint + assert GlobalOrganizationOperationsClient._get_default_mtls_endpoint(sandbox_mtls_endpoint) == sandbox_mtls_endpoint + assert GlobalOrganizationOperationsClient._get_default_mtls_endpoint(non_googleapi) == non_googleapi + + +@pytest.mark.parametrize("client_class,transport_name", [ + (GlobalOrganizationOperationsClient, "rest"), +]) +def test_global_organization_operations_client_from_service_account_info(client_class, transport_name): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object(service_account.Credentials, 'from_service_account_info') as factory: + factory.return_value = creds + info = {"valid": True} + client = client_class.from_service_account_info(info, transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + 'compute.googleapis.com:443' + if transport_name in ['grpc', 'grpc_asyncio'] + else + 'https://compute.googleapis.com' + ) + + +@pytest.mark.parametrize("transport_class,transport_name", [ + (transports.GlobalOrganizationOperationsRestTransport, "rest"), +]) +def test_global_organization_operations_client_service_account_always_use_jwt(transport_class, transport_name): + with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=True) + use_jwt.assert_called_once_with(True) + + with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=False) + use_jwt.assert_not_called() + + +@pytest.mark.parametrize("client_class,transport_name", [ + (GlobalOrganizationOperationsClient, "rest"), +]) +def test_global_organization_operations_client_from_service_account_file(client_class, transport_name): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object(service_account.Credentials, 'from_service_account_file') as factory: + factory.return_value = creds + client = client_class.from_service_account_file("dummy/file/path.json", transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + client = client_class.from_service_account_json("dummy/file/path.json", transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + 'compute.googleapis.com:443' + if transport_name in ['grpc', 'grpc_asyncio'] + else + 'https://compute.googleapis.com' + ) + + +def test_global_organization_operations_client_get_transport_class(): + transport = GlobalOrganizationOperationsClient.get_transport_class() + available_transports = [ + transports.GlobalOrganizationOperationsRestTransport, + ] + assert transport in available_transports + + transport = GlobalOrganizationOperationsClient.get_transport_class("rest") + assert transport == transports.GlobalOrganizationOperationsRestTransport + + +@pytest.mark.parametrize("client_class,transport_class,transport_name", [ + (GlobalOrganizationOperationsClient, transports.GlobalOrganizationOperationsRestTransport, "rest"), +]) +@mock.patch.object(GlobalOrganizationOperationsClient, "DEFAULT_ENDPOINT", modify_default_endpoint(GlobalOrganizationOperationsClient)) +def test_global_organization_operations_client_client_options(client_class, transport_class, transport_name): + # Check that if channel is provided we won't create a new one. + with mock.patch.object(GlobalOrganizationOperationsClient, 'get_transport_class') as gtc: + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ) + client = client_class(transport=transport) + gtc.assert_not_called() + + # Check that if channel is provided via str we will create a new one. + with mock.patch.object(GlobalOrganizationOperationsClient, 'get_transport_class') as gtc: + client = client_class(transport=transport_name) + gtc.assert_called() + + # Check the case api_endpoint is provided. + options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(transport=transport_name, client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_MTLS_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError): + client = client_class(transport=transport_name) + + # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): + with pytest.raises(ValueError): + client = client_class(transport=transport_name) + + # Check the case quota_project_id is provided + options = client_options.ClientOptions(quota_project_id="octopus") + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id="octopus", + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + # Check the case api_endpoint is provided + options = client_options.ClientOptions(api_audience="https://language.googleapis.com") + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience="https://language.googleapis.com" + ) + +@pytest.mark.parametrize("client_class,transport_class,transport_name,use_client_cert_env", [ + (GlobalOrganizationOperationsClient, transports.GlobalOrganizationOperationsRestTransport, "rest", "true"), + (GlobalOrganizationOperationsClient, transports.GlobalOrganizationOperationsRestTransport, "rest", "false"), +]) +@mock.patch.object(GlobalOrganizationOperationsClient, "DEFAULT_ENDPOINT", modify_default_endpoint(GlobalOrganizationOperationsClient)) +@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) +def test_global_organization_operations_client_mtls_env_auto(client_class, transport_class, transport_name, use_client_cert_env): + # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default + # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists. + + # Check the case client_cert_source is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): + options = client_options.ClientOptions(client_cert_source=client_cert_source_callback) + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + + if use_client_cert_env == "false": + expected_client_cert_source = None + expected_host = client.DEFAULT_ENDPOINT + else: + expected_client_cert_source = client_cert_source_callback + expected_host = client.DEFAULT_MTLS_ENDPOINT + + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case ADC client cert is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): + with mock.patch.object(transport_class, '__init__') as patched: + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): + with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=client_cert_source_callback): + if use_client_cert_env == "false": + expected_host = client.DEFAULT_ENDPOINT + expected_client_cert_source = None + else: + expected_host = client.DEFAULT_MTLS_ENDPOINT + expected_client_cert_source = client_cert_source_callback + + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case client_cert_source and ADC client cert are not provided. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): + with mock.patch.object(transport_class, '__init__') as patched: + with mock.patch("google.auth.transport.mtls.has_default_client_cert_source", return_value=False): + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize("client_class", [ + GlobalOrganizationOperationsClient +]) +@mock.patch.object(GlobalOrganizationOperationsClient, "DEFAULT_ENDPOINT", modify_default_endpoint(GlobalOrganizationOperationsClient)) +def test_global_organization_operations_client_get_mtls_endpoint_and_cert_source(client_class): + mock_client_cert_source = mock.Mock() + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "true". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + mock_api_endpoint = "foo" + options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(options) + assert api_endpoint == mock_api_endpoint + assert cert_source == mock_client_cert_source + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "false". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + mock_client_cert_source = mock.Mock() + mock_api_endpoint = "foo" + options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(options) + assert api_endpoint == mock_api_endpoint + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert doesn't exist. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=False): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert exists. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): + with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=mock_client_cert_source): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source == mock_client_cert_source + + +@pytest.mark.parametrize("client_class,transport_class,transport_name", [ + (GlobalOrganizationOperationsClient, transports.GlobalOrganizationOperationsRestTransport, "rest"), +]) +def test_global_organization_operations_client_client_options_scopes(client_class, transport_class, transport_name): + # Check the case scopes are provided. + options = client_options.ClientOptions( + scopes=["1", "2"], + ) + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=["1", "2"], + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + +@pytest.mark.parametrize("client_class,transport_class,transport_name,grpc_helpers", [ + (GlobalOrganizationOperationsClient, transports.GlobalOrganizationOperationsRestTransport, "rest", None), +]) +def test_global_organization_operations_client_client_options_credentials_file(client_class, transport_class, transport_name, grpc_helpers): + # Check the case credentials file is provided. + options = client_options.ClientOptions( + credentials_file="credentials.json" + ) + + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize("request_type", [ + compute.DeleteGlobalOrganizationOperationRequest, + dict, +]) +def test_delete_rest(request_type): + client = GlobalOrganizationOperationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'operation': 'sample1'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.DeleteGlobalOrganizationOperationResponse( + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.DeleteGlobalOrganizationOperationResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.delete(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.DeleteGlobalOrganizationOperationResponse) + + +def test_delete_rest_required_fields(request_type=compute.DeleteGlobalOrganizationOperationRequest): + transport_class = transports.GlobalOrganizationOperationsRestTransport + + request_init = {} + request_init["operation"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["operation"] = 'operation_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("parent_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "operation" in jsonified_request + assert jsonified_request["operation"] == 'operation_value' + + client = GlobalOrganizationOperationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.DeleteGlobalOrganizationOperationResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "delete", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.DeleteGlobalOrganizationOperationResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.delete(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_delete_rest_unset_required_fields(): + transport = transports.GlobalOrganizationOperationsRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.delete._get_unset_required_fields({}) + assert set(unset_fields) == (set(("parentId", )) & set(("operation", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_rest_interceptors(null_interceptor): + transport = transports.GlobalOrganizationOperationsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.GlobalOrganizationOperationsRestInterceptor(), + ) + client = GlobalOrganizationOperationsClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.GlobalOrganizationOperationsRestInterceptor, "post_delete") as post, \ + mock.patch.object(transports.GlobalOrganizationOperationsRestInterceptor, "pre_delete") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.DeleteGlobalOrganizationOperationRequest.pb(compute.DeleteGlobalOrganizationOperationRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.DeleteGlobalOrganizationOperationResponse.to_json(compute.DeleteGlobalOrganizationOperationResponse()) + + request = compute.DeleteGlobalOrganizationOperationRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.DeleteGlobalOrganizationOperationResponse() + + client.delete(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_delete_rest_bad_request(transport: str = 'rest', request_type=compute.DeleteGlobalOrganizationOperationRequest): + client = GlobalOrganizationOperationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'operation': 'sample1'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.delete(request) + + +def test_delete_rest_flattened(): + client = GlobalOrganizationOperationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.DeleteGlobalOrganizationOperationResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {'operation': 'sample1'} + + # get truthy value for each flattened field + mock_args = dict( + operation='operation_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.DeleteGlobalOrganizationOperationResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.delete(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/locations/global/operations/{operation}" % client.transport._host, args[1]) + + +def test_delete_rest_flattened_error(transport: str = 'rest'): + client = GlobalOrganizationOperationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete( + compute.DeleteGlobalOrganizationOperationRequest(), + operation='operation_value', + ) + + +def test_delete_rest_error(): + client = GlobalOrganizationOperationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.GetGlobalOrganizationOperationRequest, + dict, +]) +def test_get_rest(request_type): + client = GlobalOrganizationOperationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'operation': 'sample1'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.get(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.Operation) + assert response.client_operation_id == 'client_operation_id_value' + assert response.creation_timestamp == 'creation_timestamp_value' + assert response.description == 'description_value' + assert response.end_time == 'end_time_value' + assert response.http_error_message == 'http_error_message_value' + assert response.http_error_status_code == 2374 + assert response.id == 205 + assert response.insert_time == 'insert_time_value' + assert response.kind == 'kind_value' + assert response.name == 'name_value' + assert response.operation_group_id == 'operation_group_id_value' + assert response.operation_type == 'operation_type_value' + assert response.progress == 885 + assert response.region == 'region_value' + assert response.self_link == 'self_link_value' + assert response.start_time == 'start_time_value' + assert response.status == compute.Operation.Status.DONE + assert response.status_message == 'status_message_value' + assert response.target_id == 947 + assert response.target_link == 'target_link_value' + assert response.user == 'user_value' + assert response.zone == 'zone_value' + + +def test_get_rest_required_fields(request_type=compute.GetGlobalOrganizationOperationRequest): + transport_class = transports.GlobalOrganizationOperationsRestTransport + + request_init = {} + request_init["operation"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["operation"] = 'operation_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("parent_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "operation" in jsonified_request + assert jsonified_request["operation"] == 'operation_value' + + client = GlobalOrganizationOperationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "get", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.get(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_get_rest_unset_required_fields(): + transport = transports.GlobalOrganizationOperationsRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.get._get_unset_required_fields({}) + assert set(unset_fields) == (set(("parentId", )) & set(("operation", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_rest_interceptors(null_interceptor): + transport = transports.GlobalOrganizationOperationsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.GlobalOrganizationOperationsRestInterceptor(), + ) + client = GlobalOrganizationOperationsClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.GlobalOrganizationOperationsRestInterceptor, "post_get") as post, \ + mock.patch.object(transports.GlobalOrganizationOperationsRestInterceptor, "pre_get") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.GetGlobalOrganizationOperationRequest.pb(compute.GetGlobalOrganizationOperationRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.GetGlobalOrganizationOperationRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.get(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_get_rest_bad_request(transport: str = 'rest', request_type=compute.GetGlobalOrganizationOperationRequest): + client = GlobalOrganizationOperationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'operation': 'sample1'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get(request) + + +def test_get_rest_flattened(): + client = GlobalOrganizationOperationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'operation': 'sample1'} + + # get truthy value for each flattened field + mock_args = dict( + operation='operation_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.get(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/locations/global/operations/{operation}" % client.transport._host, args[1]) + + +def test_get_rest_flattened_error(transport: str = 'rest'): + client = GlobalOrganizationOperationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get( + compute.GetGlobalOrganizationOperationRequest(), + operation='operation_value', + ) + + +def test_get_rest_error(): + client = GlobalOrganizationOperationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.ListGlobalOrganizationOperationsRequest, + dict, +]) +def test_list_rest(request_type): + client = GlobalOrganizationOperationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.OperationList( + id='id_value', + kind='kind_value', + next_page_token='next_page_token_value', + self_link='self_link_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.OperationList.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.list(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListPager) + assert response.id == 'id_value' + assert response.kind == 'kind_value' + assert response.next_page_token == 'next_page_token_value' + assert response.self_link == 'self_link_value' + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_rest_interceptors(null_interceptor): + transport = transports.GlobalOrganizationOperationsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.GlobalOrganizationOperationsRestInterceptor(), + ) + client = GlobalOrganizationOperationsClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.GlobalOrganizationOperationsRestInterceptor, "post_list") as post, \ + mock.patch.object(transports.GlobalOrganizationOperationsRestInterceptor, "pre_list") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.ListGlobalOrganizationOperationsRequest.pb(compute.ListGlobalOrganizationOperationsRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.OperationList.to_json(compute.OperationList()) + + request = compute.ListGlobalOrganizationOperationsRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.OperationList() + + client.list(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_list_rest_bad_request(transport: str = 'rest', request_type=compute.ListGlobalOrganizationOperationsRequest): + client = GlobalOrganizationOperationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list(request) + + +def test_list_rest_pager(transport: str = 'rest'): + client = GlobalOrganizationOperationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + #with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + compute.OperationList( + items=[ + compute.Operation(), + compute.Operation(), + compute.Operation(), + ], + next_page_token='abc', + ), + compute.OperationList( + items=[], + next_page_token='def', + ), + compute.OperationList( + items=[ + compute.Operation(), + ], + next_page_token='ghi', + ), + compute.OperationList( + items=[ + compute.Operation(), + compute.Operation(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple(compute.OperationList.to_json(x) for x in response) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode('UTF-8') + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {} + + pager = client.list(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, compute.Operation) + for i in results) + + pages = list(client.list(request=sample_request).pages) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + + +def test_credentials_transport_error(): + # It is an error to provide credentials and a transport instance. + transport = transports.GlobalOrganizationOperationsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = GlobalOrganizationOperationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # It is an error to provide a credentials file and a transport instance. + transport = transports.GlobalOrganizationOperationsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = GlobalOrganizationOperationsClient( + client_options={"credentials_file": "credentials.json"}, + transport=transport, + ) + + # It is an error to provide an api_key and a transport instance. + transport = transports.GlobalOrganizationOperationsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = GlobalOrganizationOperationsClient( + client_options=options, + transport=transport, + ) + + # It is an error to provide an api_key and a credential. + options = mock.Mock() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = GlobalOrganizationOperationsClient( + client_options=options, + credentials=ga_credentials.AnonymousCredentials() + ) + + # It is an error to provide scopes and a transport instance. + transport = transports.GlobalOrganizationOperationsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = GlobalOrganizationOperationsClient( + client_options={"scopes": ["1", "2"]}, + transport=transport, + ) + + +def test_transport_instance(): + # A client may be instantiated with a custom transport instance. + transport = transports.GlobalOrganizationOperationsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + client = GlobalOrganizationOperationsClient(transport=transport) + assert client.transport is transport + + +@pytest.mark.parametrize("transport_class", [ + transports.GlobalOrganizationOperationsRestTransport, +]) +def test_transport_adc(transport_class): + # Test default credentials are used if not provided. + with mock.patch.object(google.auth, 'default') as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class() + adc.assert_called_once() + +@pytest.mark.parametrize("transport_name", [ + "rest", +]) +def test_transport_kind(transport_name): + transport = GlobalOrganizationOperationsClient.get_transport_class(transport_name)( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert transport.kind == transport_name + + +def test_global_organization_operations_base_transport_error(): + # Passing both a credentials object and credentials_file should raise an error + with pytest.raises(core_exceptions.DuplicateCredentialArgs): + transport = transports.GlobalOrganizationOperationsTransport( + credentials=ga_credentials.AnonymousCredentials(), + credentials_file="credentials.json" + ) + + +def test_global_organization_operations_base_transport(): + # Instantiate the base transport. + with mock.patch('google.cloud.compute_v1.services.global_organization_operations.transports.GlobalOrganizationOperationsTransport.__init__') as Transport: + Transport.return_value = None + transport = transports.GlobalOrganizationOperationsTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Every method on the transport should just blindly + # raise NotImplementedError. + methods = ( + 'delete', + 'get', + 'list', + ) + for method in methods: + with pytest.raises(NotImplementedError): + getattr(transport, method)(request=object()) + + with pytest.raises(NotImplementedError): + transport.close() + + # Catch all for all remaining methods and properties + remainder = [ + 'kind', + ] + for r in remainder: + with pytest.raises(NotImplementedError): + getattr(transport, r)() + + +def test_global_organization_operations_base_transport_with_credentials_file(): + # Instantiate the base transport with a credentials file + with mock.patch.object(google.auth, 'load_credentials_from_file', autospec=True) as load_creds, mock.patch('google.cloud.compute_v1.services.global_organization_operations.transports.GlobalOrganizationOperationsTransport._prep_wrapped_messages') as Transport: + Transport.return_value = None + load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.GlobalOrganizationOperationsTransport( + credentials_file="credentials.json", + quota_project_id="octopus", + ) + load_creds.assert_called_once_with("credentials.json", + scopes=None, + default_scopes=( + 'https://www.googleapis.com/auth/compute', + 'https://www.googleapis.com/auth/cloud-platform', +), + quota_project_id="octopus", + ) + + +def test_global_organization_operations_base_transport_with_adc(): + # Test the default credentials are used if credentials and credentials_file are None. + with mock.patch.object(google.auth, 'default', autospec=True) as adc, mock.patch('google.cloud.compute_v1.services.global_organization_operations.transports.GlobalOrganizationOperationsTransport._prep_wrapped_messages') as Transport: + Transport.return_value = None + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.GlobalOrganizationOperationsTransport() + adc.assert_called_once() + + +def test_global_organization_operations_auth_adc(): + # If no credentials are provided, we should use ADC credentials. + with mock.patch.object(google.auth, 'default', autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + GlobalOrganizationOperationsClient() + adc.assert_called_once_with( + scopes=None, + default_scopes=( + 'https://www.googleapis.com/auth/compute', + 'https://www.googleapis.com/auth/cloud-platform', +), + quota_project_id=None, + ) + + +def test_global_organization_operations_http_transport_client_cert_source_for_mtls(): + cred = ga_credentials.AnonymousCredentials() + with mock.patch("google.auth.transport.requests.AuthorizedSession.configure_mtls_channel") as mock_configure_mtls_channel: + transports.GlobalOrganizationOperationsRestTransport ( + credentials=cred, + client_cert_source_for_mtls=client_cert_source_callback + ) + mock_configure_mtls_channel.assert_called_once_with(client_cert_source_callback) + + +@pytest.mark.parametrize("transport_name", [ + "rest", +]) +def test_global_organization_operations_host_no_port(transport_name): + client = GlobalOrganizationOperationsClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions(api_endpoint='compute.googleapis.com'), + transport=transport_name, + ) + assert client.transport._host == ( + 'compute.googleapis.com:443' + if transport_name in ['grpc', 'grpc_asyncio'] + else 'https://compute.googleapis.com' + ) + +@pytest.mark.parametrize("transport_name", [ + "rest", +]) +def test_global_organization_operations_host_with_port(transport_name): + client = GlobalOrganizationOperationsClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions(api_endpoint='compute.googleapis.com:8000'), + transport=transport_name, + ) + assert client.transport._host == ( + 'compute.googleapis.com:8000' + if transport_name in ['grpc', 'grpc_asyncio'] + else 'https://compute.googleapis.com:8000' + ) + +@pytest.mark.parametrize("transport_name", [ + "rest", +]) +def test_global_organization_operations_client_transport_session_collision(transport_name): + creds1 = ga_credentials.AnonymousCredentials() + creds2 = ga_credentials.AnonymousCredentials() + client1 = GlobalOrganizationOperationsClient( + credentials=creds1, + transport=transport_name, + ) + client2 = GlobalOrganizationOperationsClient( + credentials=creds2, + transport=transport_name, + ) + session1 = client1.transport.delete._session + session2 = client2.transport.delete._session + assert session1 != session2 + session1 = client1.transport.get._session + session2 = client2.transport.get._session + assert session1 != session2 + session1 = client1.transport.list._session + session2 = client2.transport.list._session + assert session1 != session2 + +def test_common_billing_account_path(): + billing_account = "squid" + expected = "billingAccounts/{billing_account}".format(billing_account=billing_account, ) + actual = GlobalOrganizationOperationsClient.common_billing_account_path(billing_account) + assert expected == actual + + +def test_parse_common_billing_account_path(): + expected = { + "billing_account": "clam", + } + path = GlobalOrganizationOperationsClient.common_billing_account_path(**expected) + + # Check that the path construction is reversible. + actual = GlobalOrganizationOperationsClient.parse_common_billing_account_path(path) + assert expected == actual + +def test_common_folder_path(): + folder = "whelk" + expected = "folders/{folder}".format(folder=folder, ) + actual = GlobalOrganizationOperationsClient.common_folder_path(folder) + assert expected == actual + + +def test_parse_common_folder_path(): + expected = { + "folder": "octopus", + } + path = GlobalOrganizationOperationsClient.common_folder_path(**expected) + + # Check that the path construction is reversible. + actual = GlobalOrganizationOperationsClient.parse_common_folder_path(path) + assert expected == actual + +def test_common_organization_path(): + organization = "oyster" + expected = "organizations/{organization}".format(organization=organization, ) + actual = GlobalOrganizationOperationsClient.common_organization_path(organization) + assert expected == actual + + +def test_parse_common_organization_path(): + expected = { + "organization": "nudibranch", + } + path = GlobalOrganizationOperationsClient.common_organization_path(**expected) + + # Check that the path construction is reversible. + actual = GlobalOrganizationOperationsClient.parse_common_organization_path(path) + assert expected == actual + +def test_common_project_path(): + project = "cuttlefish" + expected = "projects/{project}".format(project=project, ) + actual = GlobalOrganizationOperationsClient.common_project_path(project) + assert expected == actual + + +def test_parse_common_project_path(): + expected = { + "project": "mussel", + } + path = GlobalOrganizationOperationsClient.common_project_path(**expected) + + # Check that the path construction is reversible. + actual = GlobalOrganizationOperationsClient.parse_common_project_path(path) + assert expected == actual + +def test_common_location_path(): + project = "winkle" + location = "nautilus" + expected = "projects/{project}/locations/{location}".format(project=project, location=location, ) + actual = GlobalOrganizationOperationsClient.common_location_path(project, location) + assert expected == actual + + +def test_parse_common_location_path(): + expected = { + "project": "scallop", + "location": "abalone", + } + path = GlobalOrganizationOperationsClient.common_location_path(**expected) + + # Check that the path construction is reversible. + actual = GlobalOrganizationOperationsClient.parse_common_location_path(path) + assert expected == actual + + +def test_client_with_default_client_info(): + client_info = gapic_v1.client_info.ClientInfo() + + with mock.patch.object(transports.GlobalOrganizationOperationsTransport, '_prep_wrapped_messages') as prep: + client = GlobalOrganizationOperationsClient( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + with mock.patch.object(transports.GlobalOrganizationOperationsTransport, '_prep_wrapped_messages') as prep: + transport_class = GlobalOrganizationOperationsClient.get_transport_class() + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + +def test_transport_close(): + transports = { + "rest": "_session", + } + + for transport, close_name in transports.items(): + client = GlobalOrganizationOperationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport + ) + with mock.patch.object(type(getattr(client.transport, close_name)), "close") as close: + with client: + close.assert_not_called() + close.assert_called_once() + +def test_client_ctx(): + transports = [ + 'rest', + ] + for transport in transports: + client = GlobalOrganizationOperationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport + ) + # Test client calls underlying transport. + with mock.patch.object(type(client.transport), "close") as close: + close.assert_not_called() + with client: + pass + close.assert_called() + +@pytest.mark.parametrize("client_class,transport_class", [ + (GlobalOrganizationOperationsClient, transports.GlobalOrganizationOperationsRestTransport), +]) +def test_api_key_credentials(client_class, transport_class): + with mock.patch.object( + google.auth._default, "get_api_key_credentials", create=True + ) as get_api_key_credentials: + mock_cred = mock.Mock() + get_api_key_credentials.return_value = mock_cred + options = client_options.ClientOptions() + options.api_key = "api_key" + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=mock_cred, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) diff --git a/owl-bot-staging/v1/tests/unit/gapic/compute_v1/test_global_public_delegated_prefixes.py b/owl-bot-staging/v1/tests/unit/gapic/compute_v1/test_global_public_delegated_prefixes.py new file mode 100644 index 000000000..6a5ac6fa6 --- /dev/null +++ b/owl-bot-staging/v1/tests/unit/gapic/compute_v1/test_global_public_delegated_prefixes.py @@ -0,0 +1,3037 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import os +# try/except added for compatibility with python < 3.8 +try: + from unittest import mock + from unittest.mock import AsyncMock # pragma: NO COVER +except ImportError: # pragma: NO COVER + import mock + +import grpc +from grpc.experimental import aio +from collections.abc import Iterable +from google.protobuf import json_format +import json +import math +import pytest +from proto.marshal.rules.dates import DurationRule, TimestampRule +from proto.marshal.rules import wrappers +from requests import Response +from requests import Request, PreparedRequest +from requests.sessions import Session +from google.protobuf import json_format + +from google.api_core import client_options +from google.api_core import exceptions as core_exceptions +from google.api_core import extended_operation # type: ignore +from google.api_core import future +from google.api_core import gapic_v1 +from google.api_core import grpc_helpers +from google.api_core import grpc_helpers_async +from google.api_core import path_template +from google.auth import credentials as ga_credentials +from google.auth.exceptions import MutualTLSChannelError +from google.cloud.compute_v1.services.global_public_delegated_prefixes import GlobalPublicDelegatedPrefixesClient +from google.cloud.compute_v1.services.global_public_delegated_prefixes import pagers +from google.cloud.compute_v1.services.global_public_delegated_prefixes import transports +from google.cloud.compute_v1.types import compute +from google.oauth2 import service_account +import google.auth + + +def client_cert_source_callback(): + return b"cert bytes", b"key bytes" + + +# If default endpoint is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint(client): + return "foo.googleapis.com" if ("localhost" in client.DEFAULT_ENDPOINT) else client.DEFAULT_ENDPOINT + + +def test__get_default_mtls_endpoint(): + api_endpoint = "example.googleapis.com" + api_mtls_endpoint = "example.mtls.googleapis.com" + sandbox_endpoint = "example.sandbox.googleapis.com" + sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com" + non_googleapi = "api.example.com" + + assert GlobalPublicDelegatedPrefixesClient._get_default_mtls_endpoint(None) is None + assert GlobalPublicDelegatedPrefixesClient._get_default_mtls_endpoint(api_endpoint) == api_mtls_endpoint + assert GlobalPublicDelegatedPrefixesClient._get_default_mtls_endpoint(api_mtls_endpoint) == api_mtls_endpoint + assert GlobalPublicDelegatedPrefixesClient._get_default_mtls_endpoint(sandbox_endpoint) == sandbox_mtls_endpoint + assert GlobalPublicDelegatedPrefixesClient._get_default_mtls_endpoint(sandbox_mtls_endpoint) == sandbox_mtls_endpoint + assert GlobalPublicDelegatedPrefixesClient._get_default_mtls_endpoint(non_googleapi) == non_googleapi + + +@pytest.mark.parametrize("client_class,transport_name", [ + (GlobalPublicDelegatedPrefixesClient, "rest"), +]) +def test_global_public_delegated_prefixes_client_from_service_account_info(client_class, transport_name): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object(service_account.Credentials, 'from_service_account_info') as factory: + factory.return_value = creds + info = {"valid": True} + client = client_class.from_service_account_info(info, transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + 'compute.googleapis.com:443' + if transport_name in ['grpc', 'grpc_asyncio'] + else + 'https://compute.googleapis.com' + ) + + +@pytest.mark.parametrize("transport_class,transport_name", [ + (transports.GlobalPublicDelegatedPrefixesRestTransport, "rest"), +]) +def test_global_public_delegated_prefixes_client_service_account_always_use_jwt(transport_class, transport_name): + with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=True) + use_jwt.assert_called_once_with(True) + + with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=False) + use_jwt.assert_not_called() + + +@pytest.mark.parametrize("client_class,transport_name", [ + (GlobalPublicDelegatedPrefixesClient, "rest"), +]) +def test_global_public_delegated_prefixes_client_from_service_account_file(client_class, transport_name): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object(service_account.Credentials, 'from_service_account_file') as factory: + factory.return_value = creds + client = client_class.from_service_account_file("dummy/file/path.json", transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + client = client_class.from_service_account_json("dummy/file/path.json", transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + 'compute.googleapis.com:443' + if transport_name in ['grpc', 'grpc_asyncio'] + else + 'https://compute.googleapis.com' + ) + + +def test_global_public_delegated_prefixes_client_get_transport_class(): + transport = GlobalPublicDelegatedPrefixesClient.get_transport_class() + available_transports = [ + transports.GlobalPublicDelegatedPrefixesRestTransport, + ] + assert transport in available_transports + + transport = GlobalPublicDelegatedPrefixesClient.get_transport_class("rest") + assert transport == transports.GlobalPublicDelegatedPrefixesRestTransport + + +@pytest.mark.parametrize("client_class,transport_class,transport_name", [ + (GlobalPublicDelegatedPrefixesClient, transports.GlobalPublicDelegatedPrefixesRestTransport, "rest"), +]) +@mock.patch.object(GlobalPublicDelegatedPrefixesClient, "DEFAULT_ENDPOINT", modify_default_endpoint(GlobalPublicDelegatedPrefixesClient)) +def test_global_public_delegated_prefixes_client_client_options(client_class, transport_class, transport_name): + # Check that if channel is provided we won't create a new one. + with mock.patch.object(GlobalPublicDelegatedPrefixesClient, 'get_transport_class') as gtc: + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ) + client = client_class(transport=transport) + gtc.assert_not_called() + + # Check that if channel is provided via str we will create a new one. + with mock.patch.object(GlobalPublicDelegatedPrefixesClient, 'get_transport_class') as gtc: + client = client_class(transport=transport_name) + gtc.assert_called() + + # Check the case api_endpoint is provided. + options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(transport=transport_name, client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_MTLS_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError): + client = client_class(transport=transport_name) + + # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): + with pytest.raises(ValueError): + client = client_class(transport=transport_name) + + # Check the case quota_project_id is provided + options = client_options.ClientOptions(quota_project_id="octopus") + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id="octopus", + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + # Check the case api_endpoint is provided + options = client_options.ClientOptions(api_audience="https://language.googleapis.com") + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience="https://language.googleapis.com" + ) + +@pytest.mark.parametrize("client_class,transport_class,transport_name,use_client_cert_env", [ + (GlobalPublicDelegatedPrefixesClient, transports.GlobalPublicDelegatedPrefixesRestTransport, "rest", "true"), + (GlobalPublicDelegatedPrefixesClient, transports.GlobalPublicDelegatedPrefixesRestTransport, "rest", "false"), +]) +@mock.patch.object(GlobalPublicDelegatedPrefixesClient, "DEFAULT_ENDPOINT", modify_default_endpoint(GlobalPublicDelegatedPrefixesClient)) +@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) +def test_global_public_delegated_prefixes_client_mtls_env_auto(client_class, transport_class, transport_name, use_client_cert_env): + # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default + # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists. + + # Check the case client_cert_source is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): + options = client_options.ClientOptions(client_cert_source=client_cert_source_callback) + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + + if use_client_cert_env == "false": + expected_client_cert_source = None + expected_host = client.DEFAULT_ENDPOINT + else: + expected_client_cert_source = client_cert_source_callback + expected_host = client.DEFAULT_MTLS_ENDPOINT + + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case ADC client cert is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): + with mock.patch.object(transport_class, '__init__') as patched: + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): + with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=client_cert_source_callback): + if use_client_cert_env == "false": + expected_host = client.DEFAULT_ENDPOINT + expected_client_cert_source = None + else: + expected_host = client.DEFAULT_MTLS_ENDPOINT + expected_client_cert_source = client_cert_source_callback + + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case client_cert_source and ADC client cert are not provided. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): + with mock.patch.object(transport_class, '__init__') as patched: + with mock.patch("google.auth.transport.mtls.has_default_client_cert_source", return_value=False): + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize("client_class", [ + GlobalPublicDelegatedPrefixesClient +]) +@mock.patch.object(GlobalPublicDelegatedPrefixesClient, "DEFAULT_ENDPOINT", modify_default_endpoint(GlobalPublicDelegatedPrefixesClient)) +def test_global_public_delegated_prefixes_client_get_mtls_endpoint_and_cert_source(client_class): + mock_client_cert_source = mock.Mock() + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "true". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + mock_api_endpoint = "foo" + options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(options) + assert api_endpoint == mock_api_endpoint + assert cert_source == mock_client_cert_source + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "false". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + mock_client_cert_source = mock.Mock() + mock_api_endpoint = "foo" + options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(options) + assert api_endpoint == mock_api_endpoint + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert doesn't exist. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=False): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert exists. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): + with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=mock_client_cert_source): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source == mock_client_cert_source + + +@pytest.mark.parametrize("client_class,transport_class,transport_name", [ + (GlobalPublicDelegatedPrefixesClient, transports.GlobalPublicDelegatedPrefixesRestTransport, "rest"), +]) +def test_global_public_delegated_prefixes_client_client_options_scopes(client_class, transport_class, transport_name): + # Check the case scopes are provided. + options = client_options.ClientOptions( + scopes=["1", "2"], + ) + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=["1", "2"], + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + +@pytest.mark.parametrize("client_class,transport_class,transport_name,grpc_helpers", [ + (GlobalPublicDelegatedPrefixesClient, transports.GlobalPublicDelegatedPrefixesRestTransport, "rest", None), +]) +def test_global_public_delegated_prefixes_client_client_options_credentials_file(client_class, transport_class, transport_name, grpc_helpers): + # Check the case credentials file is provided. + options = client_options.ClientOptions( + credentials_file="credentials.json" + ) + + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize("request_type", [ + compute.DeleteGlobalPublicDelegatedPrefixeRequest, + dict, +]) +def test_delete_rest(request_type): + client = GlobalPublicDelegatedPrefixesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'public_delegated_prefix': 'sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.delete(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, extended_operation.ExtendedOperation) + assert response.client_operation_id == 'client_operation_id_value' + assert response.creation_timestamp == 'creation_timestamp_value' + assert response.description == 'description_value' + assert response.end_time == 'end_time_value' + assert response.http_error_message == 'http_error_message_value' + assert response.http_error_status_code == 2374 + assert response.id == 205 + assert response.insert_time == 'insert_time_value' + assert response.kind == 'kind_value' + assert response.name == 'name_value' + assert response.operation_group_id == 'operation_group_id_value' + assert response.operation_type == 'operation_type_value' + assert response.progress == 885 + assert response.region == 'region_value' + assert response.self_link == 'self_link_value' + assert response.start_time == 'start_time_value' + assert response.status == compute.Operation.Status.DONE + assert response.status_message == 'status_message_value' + assert response.target_id == 947 + assert response.target_link == 'target_link_value' + assert response.user == 'user_value' + assert response.zone == 'zone_value' + + +def test_delete_rest_required_fields(request_type=compute.DeleteGlobalPublicDelegatedPrefixeRequest): + transport_class = transports.GlobalPublicDelegatedPrefixesRestTransport + + request_init = {} + request_init["project"] = "" + request_init["public_delegated_prefix"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + jsonified_request["publicDelegatedPrefix"] = 'public_delegated_prefix_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "publicDelegatedPrefix" in jsonified_request + assert jsonified_request["publicDelegatedPrefix"] == 'public_delegated_prefix_value' + + client = GlobalPublicDelegatedPrefixesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "delete", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.delete(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_delete_rest_unset_required_fields(): + transport = transports.GlobalPublicDelegatedPrefixesRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.delete._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("project", "publicDelegatedPrefix", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_rest_interceptors(null_interceptor): + transport = transports.GlobalPublicDelegatedPrefixesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.GlobalPublicDelegatedPrefixesRestInterceptor(), + ) + client = GlobalPublicDelegatedPrefixesClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.GlobalPublicDelegatedPrefixesRestInterceptor, "post_delete") as post, \ + mock.patch.object(transports.GlobalPublicDelegatedPrefixesRestInterceptor, "pre_delete") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.DeleteGlobalPublicDelegatedPrefixeRequest.pb(compute.DeleteGlobalPublicDelegatedPrefixeRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.DeleteGlobalPublicDelegatedPrefixeRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.delete(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_delete_rest_bad_request(transport: str = 'rest', request_type=compute.DeleteGlobalPublicDelegatedPrefixeRequest): + client = GlobalPublicDelegatedPrefixesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'public_delegated_prefix': 'sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.delete(request) + + +def test_delete_rest_flattened(): + client = GlobalPublicDelegatedPrefixesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'public_delegated_prefix': 'sample2'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + public_delegated_prefix='public_delegated_prefix_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.delete(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/global/publicDelegatedPrefixes/{public_delegated_prefix}" % client.transport._host, args[1]) + + +def test_delete_rest_flattened_error(transport: str = 'rest'): + client = GlobalPublicDelegatedPrefixesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete( + compute.DeleteGlobalPublicDelegatedPrefixeRequest(), + project='project_value', + public_delegated_prefix='public_delegated_prefix_value', + ) + + +def test_delete_rest_error(): + client = GlobalPublicDelegatedPrefixesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.DeleteGlobalPublicDelegatedPrefixeRequest, + dict, +]) +def test_delete_unary_rest(request_type): + client = GlobalPublicDelegatedPrefixesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'public_delegated_prefix': 'sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.delete_unary(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.Operation) + + +def test_delete_unary_rest_required_fields(request_type=compute.DeleteGlobalPublicDelegatedPrefixeRequest): + transport_class = transports.GlobalPublicDelegatedPrefixesRestTransport + + request_init = {} + request_init["project"] = "" + request_init["public_delegated_prefix"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + jsonified_request["publicDelegatedPrefix"] = 'public_delegated_prefix_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "publicDelegatedPrefix" in jsonified_request + assert jsonified_request["publicDelegatedPrefix"] == 'public_delegated_prefix_value' + + client = GlobalPublicDelegatedPrefixesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "delete", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.delete_unary(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_delete_unary_rest_unset_required_fields(): + transport = transports.GlobalPublicDelegatedPrefixesRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.delete._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("project", "publicDelegatedPrefix", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_unary_rest_interceptors(null_interceptor): + transport = transports.GlobalPublicDelegatedPrefixesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.GlobalPublicDelegatedPrefixesRestInterceptor(), + ) + client = GlobalPublicDelegatedPrefixesClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.GlobalPublicDelegatedPrefixesRestInterceptor, "post_delete") as post, \ + mock.patch.object(transports.GlobalPublicDelegatedPrefixesRestInterceptor, "pre_delete") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.DeleteGlobalPublicDelegatedPrefixeRequest.pb(compute.DeleteGlobalPublicDelegatedPrefixeRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.DeleteGlobalPublicDelegatedPrefixeRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.delete_unary(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_delete_unary_rest_bad_request(transport: str = 'rest', request_type=compute.DeleteGlobalPublicDelegatedPrefixeRequest): + client = GlobalPublicDelegatedPrefixesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'public_delegated_prefix': 'sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.delete_unary(request) + + +def test_delete_unary_rest_flattened(): + client = GlobalPublicDelegatedPrefixesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'public_delegated_prefix': 'sample2'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + public_delegated_prefix='public_delegated_prefix_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.delete_unary(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/global/publicDelegatedPrefixes/{public_delegated_prefix}" % client.transport._host, args[1]) + + +def test_delete_unary_rest_flattened_error(transport: str = 'rest'): + client = GlobalPublicDelegatedPrefixesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_unary( + compute.DeleteGlobalPublicDelegatedPrefixeRequest(), + project='project_value', + public_delegated_prefix='public_delegated_prefix_value', + ) + + +def test_delete_unary_rest_error(): + client = GlobalPublicDelegatedPrefixesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.GetGlobalPublicDelegatedPrefixeRequest, + dict, +]) +def test_get_rest(request_type): + client = GlobalPublicDelegatedPrefixesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'public_delegated_prefix': 'sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.PublicDelegatedPrefix( + creation_timestamp='creation_timestamp_value', + description='description_value', + fingerprint='fingerprint_value', + id=205, + ip_cidr_range='ip_cidr_range_value', + is_live_migration=True, + kind='kind_value', + name='name_value', + parent_prefix='parent_prefix_value', + region='region_value', + self_link='self_link_value', + status='status_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.PublicDelegatedPrefix.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.get(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.PublicDelegatedPrefix) + assert response.creation_timestamp == 'creation_timestamp_value' + assert response.description == 'description_value' + assert response.fingerprint == 'fingerprint_value' + assert response.id == 205 + assert response.ip_cidr_range == 'ip_cidr_range_value' + assert response.is_live_migration is True + assert response.kind == 'kind_value' + assert response.name == 'name_value' + assert response.parent_prefix == 'parent_prefix_value' + assert response.region == 'region_value' + assert response.self_link == 'self_link_value' + assert response.status == 'status_value' + + +def test_get_rest_required_fields(request_type=compute.GetGlobalPublicDelegatedPrefixeRequest): + transport_class = transports.GlobalPublicDelegatedPrefixesRestTransport + + request_init = {} + request_init["project"] = "" + request_init["public_delegated_prefix"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + jsonified_request["publicDelegatedPrefix"] = 'public_delegated_prefix_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "publicDelegatedPrefix" in jsonified_request + assert jsonified_request["publicDelegatedPrefix"] == 'public_delegated_prefix_value' + + client = GlobalPublicDelegatedPrefixesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.PublicDelegatedPrefix() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "get", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.PublicDelegatedPrefix.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.get(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_get_rest_unset_required_fields(): + transport = transports.GlobalPublicDelegatedPrefixesRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.get._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("project", "publicDelegatedPrefix", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_rest_interceptors(null_interceptor): + transport = transports.GlobalPublicDelegatedPrefixesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.GlobalPublicDelegatedPrefixesRestInterceptor(), + ) + client = GlobalPublicDelegatedPrefixesClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.GlobalPublicDelegatedPrefixesRestInterceptor, "post_get") as post, \ + mock.patch.object(transports.GlobalPublicDelegatedPrefixesRestInterceptor, "pre_get") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.GetGlobalPublicDelegatedPrefixeRequest.pb(compute.GetGlobalPublicDelegatedPrefixeRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.PublicDelegatedPrefix.to_json(compute.PublicDelegatedPrefix()) + + request = compute.GetGlobalPublicDelegatedPrefixeRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.PublicDelegatedPrefix() + + client.get(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_get_rest_bad_request(transport: str = 'rest', request_type=compute.GetGlobalPublicDelegatedPrefixeRequest): + client = GlobalPublicDelegatedPrefixesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'public_delegated_prefix': 'sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get(request) + + +def test_get_rest_flattened(): + client = GlobalPublicDelegatedPrefixesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.PublicDelegatedPrefix() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'public_delegated_prefix': 'sample2'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + public_delegated_prefix='public_delegated_prefix_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.PublicDelegatedPrefix.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.get(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/global/publicDelegatedPrefixes/{public_delegated_prefix}" % client.transport._host, args[1]) + + +def test_get_rest_flattened_error(transport: str = 'rest'): + client = GlobalPublicDelegatedPrefixesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get( + compute.GetGlobalPublicDelegatedPrefixeRequest(), + project='project_value', + public_delegated_prefix='public_delegated_prefix_value', + ) + + +def test_get_rest_error(): + client = GlobalPublicDelegatedPrefixesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.InsertGlobalPublicDelegatedPrefixeRequest, + dict, +]) +def test_insert_rest(request_type): + client = GlobalPublicDelegatedPrefixesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1'} + request_init["public_delegated_prefix_resource"] = {'creation_timestamp': 'creation_timestamp_value', 'description': 'description_value', 'fingerprint': 'fingerprint_value', 'id': 205, 'ip_cidr_range': 'ip_cidr_range_value', 'is_live_migration': True, 'kind': 'kind_value', 'name': 'name_value', 'parent_prefix': 'parent_prefix_value', 'public_delegated_sub_prefixs': [{'delegatee_project': 'delegatee_project_value', 'description': 'description_value', 'ip_cidr_range': 'ip_cidr_range_value', 'is_address': True, 'name': 'name_value', 'region': 'region_value', 'status': 'status_value'}], 'region': 'region_value', 'self_link': 'self_link_value', 'status': 'status_value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.insert(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, extended_operation.ExtendedOperation) + assert response.client_operation_id == 'client_operation_id_value' + assert response.creation_timestamp == 'creation_timestamp_value' + assert response.description == 'description_value' + assert response.end_time == 'end_time_value' + assert response.http_error_message == 'http_error_message_value' + assert response.http_error_status_code == 2374 + assert response.id == 205 + assert response.insert_time == 'insert_time_value' + assert response.kind == 'kind_value' + assert response.name == 'name_value' + assert response.operation_group_id == 'operation_group_id_value' + assert response.operation_type == 'operation_type_value' + assert response.progress == 885 + assert response.region == 'region_value' + assert response.self_link == 'self_link_value' + assert response.start_time == 'start_time_value' + assert response.status == compute.Operation.Status.DONE + assert response.status_message == 'status_message_value' + assert response.target_id == 947 + assert response.target_link == 'target_link_value' + assert response.user == 'user_value' + assert response.zone == 'zone_value' + + +def test_insert_rest_required_fields(request_type=compute.InsertGlobalPublicDelegatedPrefixeRequest): + transport_class = transports.GlobalPublicDelegatedPrefixesRestTransport + + request_init = {} + request_init["project"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).insert._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).insert._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + + client = GlobalPublicDelegatedPrefixesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.insert(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_insert_rest_unset_required_fields(): + transport = transports.GlobalPublicDelegatedPrefixesRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.insert._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("project", "publicDelegatedPrefixResource", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_insert_rest_interceptors(null_interceptor): + transport = transports.GlobalPublicDelegatedPrefixesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.GlobalPublicDelegatedPrefixesRestInterceptor(), + ) + client = GlobalPublicDelegatedPrefixesClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.GlobalPublicDelegatedPrefixesRestInterceptor, "post_insert") as post, \ + mock.patch.object(transports.GlobalPublicDelegatedPrefixesRestInterceptor, "pre_insert") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.InsertGlobalPublicDelegatedPrefixeRequest.pb(compute.InsertGlobalPublicDelegatedPrefixeRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.InsertGlobalPublicDelegatedPrefixeRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.insert(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_insert_rest_bad_request(transport: str = 'rest', request_type=compute.InsertGlobalPublicDelegatedPrefixeRequest): + client = GlobalPublicDelegatedPrefixesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1'} + request_init["public_delegated_prefix_resource"] = {'creation_timestamp': 'creation_timestamp_value', 'description': 'description_value', 'fingerprint': 'fingerprint_value', 'id': 205, 'ip_cidr_range': 'ip_cidr_range_value', 'is_live_migration': True, 'kind': 'kind_value', 'name': 'name_value', 'parent_prefix': 'parent_prefix_value', 'public_delegated_sub_prefixs': [{'delegatee_project': 'delegatee_project_value', 'description': 'description_value', 'ip_cidr_range': 'ip_cidr_range_value', 'is_address': True, 'name': 'name_value', 'region': 'region_value', 'status': 'status_value'}], 'region': 'region_value', 'self_link': 'self_link_value', 'status': 'status_value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.insert(request) + + +def test_insert_rest_flattened(): + client = GlobalPublicDelegatedPrefixesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + public_delegated_prefix_resource=compute.PublicDelegatedPrefix(creation_timestamp='creation_timestamp_value'), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.insert(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/global/publicDelegatedPrefixes" % client.transport._host, args[1]) + + +def test_insert_rest_flattened_error(transport: str = 'rest'): + client = GlobalPublicDelegatedPrefixesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.insert( + compute.InsertGlobalPublicDelegatedPrefixeRequest(), + project='project_value', + public_delegated_prefix_resource=compute.PublicDelegatedPrefix(creation_timestamp='creation_timestamp_value'), + ) + + +def test_insert_rest_error(): + client = GlobalPublicDelegatedPrefixesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.InsertGlobalPublicDelegatedPrefixeRequest, + dict, +]) +def test_insert_unary_rest(request_type): + client = GlobalPublicDelegatedPrefixesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1'} + request_init["public_delegated_prefix_resource"] = {'creation_timestamp': 'creation_timestamp_value', 'description': 'description_value', 'fingerprint': 'fingerprint_value', 'id': 205, 'ip_cidr_range': 'ip_cidr_range_value', 'is_live_migration': True, 'kind': 'kind_value', 'name': 'name_value', 'parent_prefix': 'parent_prefix_value', 'public_delegated_sub_prefixs': [{'delegatee_project': 'delegatee_project_value', 'description': 'description_value', 'ip_cidr_range': 'ip_cidr_range_value', 'is_address': True, 'name': 'name_value', 'region': 'region_value', 'status': 'status_value'}], 'region': 'region_value', 'self_link': 'self_link_value', 'status': 'status_value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.insert_unary(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.Operation) + + +def test_insert_unary_rest_required_fields(request_type=compute.InsertGlobalPublicDelegatedPrefixeRequest): + transport_class = transports.GlobalPublicDelegatedPrefixesRestTransport + + request_init = {} + request_init["project"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).insert._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).insert._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + + client = GlobalPublicDelegatedPrefixesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.insert_unary(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_insert_unary_rest_unset_required_fields(): + transport = transports.GlobalPublicDelegatedPrefixesRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.insert._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("project", "publicDelegatedPrefixResource", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_insert_unary_rest_interceptors(null_interceptor): + transport = transports.GlobalPublicDelegatedPrefixesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.GlobalPublicDelegatedPrefixesRestInterceptor(), + ) + client = GlobalPublicDelegatedPrefixesClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.GlobalPublicDelegatedPrefixesRestInterceptor, "post_insert") as post, \ + mock.patch.object(transports.GlobalPublicDelegatedPrefixesRestInterceptor, "pre_insert") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.InsertGlobalPublicDelegatedPrefixeRequest.pb(compute.InsertGlobalPublicDelegatedPrefixeRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.InsertGlobalPublicDelegatedPrefixeRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.insert_unary(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_insert_unary_rest_bad_request(transport: str = 'rest', request_type=compute.InsertGlobalPublicDelegatedPrefixeRequest): + client = GlobalPublicDelegatedPrefixesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1'} + request_init["public_delegated_prefix_resource"] = {'creation_timestamp': 'creation_timestamp_value', 'description': 'description_value', 'fingerprint': 'fingerprint_value', 'id': 205, 'ip_cidr_range': 'ip_cidr_range_value', 'is_live_migration': True, 'kind': 'kind_value', 'name': 'name_value', 'parent_prefix': 'parent_prefix_value', 'public_delegated_sub_prefixs': [{'delegatee_project': 'delegatee_project_value', 'description': 'description_value', 'ip_cidr_range': 'ip_cidr_range_value', 'is_address': True, 'name': 'name_value', 'region': 'region_value', 'status': 'status_value'}], 'region': 'region_value', 'self_link': 'self_link_value', 'status': 'status_value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.insert_unary(request) + + +def test_insert_unary_rest_flattened(): + client = GlobalPublicDelegatedPrefixesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + public_delegated_prefix_resource=compute.PublicDelegatedPrefix(creation_timestamp='creation_timestamp_value'), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.insert_unary(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/global/publicDelegatedPrefixes" % client.transport._host, args[1]) + + +def test_insert_unary_rest_flattened_error(transport: str = 'rest'): + client = GlobalPublicDelegatedPrefixesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.insert_unary( + compute.InsertGlobalPublicDelegatedPrefixeRequest(), + project='project_value', + public_delegated_prefix_resource=compute.PublicDelegatedPrefix(creation_timestamp='creation_timestamp_value'), + ) + + +def test_insert_unary_rest_error(): + client = GlobalPublicDelegatedPrefixesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.ListGlobalPublicDelegatedPrefixesRequest, + dict, +]) +def test_list_rest(request_type): + client = GlobalPublicDelegatedPrefixesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.PublicDelegatedPrefixList( + id='id_value', + kind='kind_value', + next_page_token='next_page_token_value', + self_link='self_link_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.PublicDelegatedPrefixList.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.list(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListPager) + assert response.id == 'id_value' + assert response.kind == 'kind_value' + assert response.next_page_token == 'next_page_token_value' + assert response.self_link == 'self_link_value' + + +def test_list_rest_required_fields(request_type=compute.ListGlobalPublicDelegatedPrefixesRequest): + transport_class = transports.GlobalPublicDelegatedPrefixesRestTransport + + request_init = {} + request_init["project"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("filter", "max_results", "order_by", "page_token", "return_partial_success", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + + client = GlobalPublicDelegatedPrefixesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.PublicDelegatedPrefixList() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "get", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.PublicDelegatedPrefixList.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.list(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_list_rest_unset_required_fields(): + transport = transports.GlobalPublicDelegatedPrefixesRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.list._get_unset_required_fields({}) + assert set(unset_fields) == (set(("filter", "maxResults", "orderBy", "pageToken", "returnPartialSuccess", )) & set(("project", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_rest_interceptors(null_interceptor): + transport = transports.GlobalPublicDelegatedPrefixesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.GlobalPublicDelegatedPrefixesRestInterceptor(), + ) + client = GlobalPublicDelegatedPrefixesClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.GlobalPublicDelegatedPrefixesRestInterceptor, "post_list") as post, \ + mock.patch.object(transports.GlobalPublicDelegatedPrefixesRestInterceptor, "pre_list") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.ListGlobalPublicDelegatedPrefixesRequest.pb(compute.ListGlobalPublicDelegatedPrefixesRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.PublicDelegatedPrefixList.to_json(compute.PublicDelegatedPrefixList()) + + request = compute.ListGlobalPublicDelegatedPrefixesRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.PublicDelegatedPrefixList() + + client.list(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_list_rest_bad_request(transport: str = 'rest', request_type=compute.ListGlobalPublicDelegatedPrefixesRequest): + client = GlobalPublicDelegatedPrefixesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list(request) + + +def test_list_rest_flattened(): + client = GlobalPublicDelegatedPrefixesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.PublicDelegatedPrefixList() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.PublicDelegatedPrefixList.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.list(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/global/publicDelegatedPrefixes" % client.transport._host, args[1]) + + +def test_list_rest_flattened_error(transport: str = 'rest'): + client = GlobalPublicDelegatedPrefixesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list( + compute.ListGlobalPublicDelegatedPrefixesRequest(), + project='project_value', + ) + + +def test_list_rest_pager(transport: str = 'rest'): + client = GlobalPublicDelegatedPrefixesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + #with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + compute.PublicDelegatedPrefixList( + items=[ + compute.PublicDelegatedPrefix(), + compute.PublicDelegatedPrefix(), + compute.PublicDelegatedPrefix(), + ], + next_page_token='abc', + ), + compute.PublicDelegatedPrefixList( + items=[], + next_page_token='def', + ), + compute.PublicDelegatedPrefixList( + items=[ + compute.PublicDelegatedPrefix(), + ], + next_page_token='ghi', + ), + compute.PublicDelegatedPrefixList( + items=[ + compute.PublicDelegatedPrefix(), + compute.PublicDelegatedPrefix(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple(compute.PublicDelegatedPrefixList.to_json(x) for x in response) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode('UTF-8') + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {'project': 'sample1'} + + pager = client.list(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, compute.PublicDelegatedPrefix) + for i in results) + + pages = list(client.list(request=sample_request).pages) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize("request_type", [ + compute.PatchGlobalPublicDelegatedPrefixeRequest, + dict, +]) +def test_patch_rest(request_type): + client = GlobalPublicDelegatedPrefixesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'public_delegated_prefix': 'sample2'} + request_init["public_delegated_prefix_resource"] = {'creation_timestamp': 'creation_timestamp_value', 'description': 'description_value', 'fingerprint': 'fingerprint_value', 'id': 205, 'ip_cidr_range': 'ip_cidr_range_value', 'is_live_migration': True, 'kind': 'kind_value', 'name': 'name_value', 'parent_prefix': 'parent_prefix_value', 'public_delegated_sub_prefixs': [{'delegatee_project': 'delegatee_project_value', 'description': 'description_value', 'ip_cidr_range': 'ip_cidr_range_value', 'is_address': True, 'name': 'name_value', 'region': 'region_value', 'status': 'status_value'}], 'region': 'region_value', 'self_link': 'self_link_value', 'status': 'status_value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.patch(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, extended_operation.ExtendedOperation) + assert response.client_operation_id == 'client_operation_id_value' + assert response.creation_timestamp == 'creation_timestamp_value' + assert response.description == 'description_value' + assert response.end_time == 'end_time_value' + assert response.http_error_message == 'http_error_message_value' + assert response.http_error_status_code == 2374 + assert response.id == 205 + assert response.insert_time == 'insert_time_value' + assert response.kind == 'kind_value' + assert response.name == 'name_value' + assert response.operation_group_id == 'operation_group_id_value' + assert response.operation_type == 'operation_type_value' + assert response.progress == 885 + assert response.region == 'region_value' + assert response.self_link == 'self_link_value' + assert response.start_time == 'start_time_value' + assert response.status == compute.Operation.Status.DONE + assert response.status_message == 'status_message_value' + assert response.target_id == 947 + assert response.target_link == 'target_link_value' + assert response.user == 'user_value' + assert response.zone == 'zone_value' + + +def test_patch_rest_required_fields(request_type=compute.PatchGlobalPublicDelegatedPrefixeRequest): + transport_class = transports.GlobalPublicDelegatedPrefixesRestTransport + + request_init = {} + request_init["project"] = "" + request_init["public_delegated_prefix"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).patch._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + jsonified_request["publicDelegatedPrefix"] = 'public_delegated_prefix_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).patch._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "publicDelegatedPrefix" in jsonified_request + assert jsonified_request["publicDelegatedPrefix"] == 'public_delegated_prefix_value' + + client = GlobalPublicDelegatedPrefixesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "patch", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.patch(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_patch_rest_unset_required_fields(): + transport = transports.GlobalPublicDelegatedPrefixesRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.patch._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("project", "publicDelegatedPrefix", "publicDelegatedPrefixResource", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_patch_rest_interceptors(null_interceptor): + transport = transports.GlobalPublicDelegatedPrefixesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.GlobalPublicDelegatedPrefixesRestInterceptor(), + ) + client = GlobalPublicDelegatedPrefixesClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.GlobalPublicDelegatedPrefixesRestInterceptor, "post_patch") as post, \ + mock.patch.object(transports.GlobalPublicDelegatedPrefixesRestInterceptor, "pre_patch") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.PatchGlobalPublicDelegatedPrefixeRequest.pb(compute.PatchGlobalPublicDelegatedPrefixeRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.PatchGlobalPublicDelegatedPrefixeRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.patch(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_patch_rest_bad_request(transport: str = 'rest', request_type=compute.PatchGlobalPublicDelegatedPrefixeRequest): + client = GlobalPublicDelegatedPrefixesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'public_delegated_prefix': 'sample2'} + request_init["public_delegated_prefix_resource"] = {'creation_timestamp': 'creation_timestamp_value', 'description': 'description_value', 'fingerprint': 'fingerprint_value', 'id': 205, 'ip_cidr_range': 'ip_cidr_range_value', 'is_live_migration': True, 'kind': 'kind_value', 'name': 'name_value', 'parent_prefix': 'parent_prefix_value', 'public_delegated_sub_prefixs': [{'delegatee_project': 'delegatee_project_value', 'description': 'description_value', 'ip_cidr_range': 'ip_cidr_range_value', 'is_address': True, 'name': 'name_value', 'region': 'region_value', 'status': 'status_value'}], 'region': 'region_value', 'self_link': 'self_link_value', 'status': 'status_value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.patch(request) + + +def test_patch_rest_flattened(): + client = GlobalPublicDelegatedPrefixesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'public_delegated_prefix': 'sample2'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + public_delegated_prefix='public_delegated_prefix_value', + public_delegated_prefix_resource=compute.PublicDelegatedPrefix(creation_timestamp='creation_timestamp_value'), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.patch(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/global/publicDelegatedPrefixes/{public_delegated_prefix}" % client.transport._host, args[1]) + + +def test_patch_rest_flattened_error(transport: str = 'rest'): + client = GlobalPublicDelegatedPrefixesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.patch( + compute.PatchGlobalPublicDelegatedPrefixeRequest(), + project='project_value', + public_delegated_prefix='public_delegated_prefix_value', + public_delegated_prefix_resource=compute.PublicDelegatedPrefix(creation_timestamp='creation_timestamp_value'), + ) + + +def test_patch_rest_error(): + client = GlobalPublicDelegatedPrefixesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.PatchGlobalPublicDelegatedPrefixeRequest, + dict, +]) +def test_patch_unary_rest(request_type): + client = GlobalPublicDelegatedPrefixesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'public_delegated_prefix': 'sample2'} + request_init["public_delegated_prefix_resource"] = {'creation_timestamp': 'creation_timestamp_value', 'description': 'description_value', 'fingerprint': 'fingerprint_value', 'id': 205, 'ip_cidr_range': 'ip_cidr_range_value', 'is_live_migration': True, 'kind': 'kind_value', 'name': 'name_value', 'parent_prefix': 'parent_prefix_value', 'public_delegated_sub_prefixs': [{'delegatee_project': 'delegatee_project_value', 'description': 'description_value', 'ip_cidr_range': 'ip_cidr_range_value', 'is_address': True, 'name': 'name_value', 'region': 'region_value', 'status': 'status_value'}], 'region': 'region_value', 'self_link': 'self_link_value', 'status': 'status_value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.patch_unary(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.Operation) + + +def test_patch_unary_rest_required_fields(request_type=compute.PatchGlobalPublicDelegatedPrefixeRequest): + transport_class = transports.GlobalPublicDelegatedPrefixesRestTransport + + request_init = {} + request_init["project"] = "" + request_init["public_delegated_prefix"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).patch._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + jsonified_request["publicDelegatedPrefix"] = 'public_delegated_prefix_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).patch._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "publicDelegatedPrefix" in jsonified_request + assert jsonified_request["publicDelegatedPrefix"] == 'public_delegated_prefix_value' + + client = GlobalPublicDelegatedPrefixesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "patch", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.patch_unary(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_patch_unary_rest_unset_required_fields(): + transport = transports.GlobalPublicDelegatedPrefixesRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.patch._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("project", "publicDelegatedPrefix", "publicDelegatedPrefixResource", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_patch_unary_rest_interceptors(null_interceptor): + transport = transports.GlobalPublicDelegatedPrefixesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.GlobalPublicDelegatedPrefixesRestInterceptor(), + ) + client = GlobalPublicDelegatedPrefixesClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.GlobalPublicDelegatedPrefixesRestInterceptor, "post_patch") as post, \ + mock.patch.object(transports.GlobalPublicDelegatedPrefixesRestInterceptor, "pre_patch") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.PatchGlobalPublicDelegatedPrefixeRequest.pb(compute.PatchGlobalPublicDelegatedPrefixeRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.PatchGlobalPublicDelegatedPrefixeRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.patch_unary(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_patch_unary_rest_bad_request(transport: str = 'rest', request_type=compute.PatchGlobalPublicDelegatedPrefixeRequest): + client = GlobalPublicDelegatedPrefixesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'public_delegated_prefix': 'sample2'} + request_init["public_delegated_prefix_resource"] = {'creation_timestamp': 'creation_timestamp_value', 'description': 'description_value', 'fingerprint': 'fingerprint_value', 'id': 205, 'ip_cidr_range': 'ip_cidr_range_value', 'is_live_migration': True, 'kind': 'kind_value', 'name': 'name_value', 'parent_prefix': 'parent_prefix_value', 'public_delegated_sub_prefixs': [{'delegatee_project': 'delegatee_project_value', 'description': 'description_value', 'ip_cidr_range': 'ip_cidr_range_value', 'is_address': True, 'name': 'name_value', 'region': 'region_value', 'status': 'status_value'}], 'region': 'region_value', 'self_link': 'self_link_value', 'status': 'status_value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.patch_unary(request) + + +def test_patch_unary_rest_flattened(): + client = GlobalPublicDelegatedPrefixesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'public_delegated_prefix': 'sample2'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + public_delegated_prefix='public_delegated_prefix_value', + public_delegated_prefix_resource=compute.PublicDelegatedPrefix(creation_timestamp='creation_timestamp_value'), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.patch_unary(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/global/publicDelegatedPrefixes/{public_delegated_prefix}" % client.transport._host, args[1]) + + +def test_patch_unary_rest_flattened_error(transport: str = 'rest'): + client = GlobalPublicDelegatedPrefixesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.patch_unary( + compute.PatchGlobalPublicDelegatedPrefixeRequest(), + project='project_value', + public_delegated_prefix='public_delegated_prefix_value', + public_delegated_prefix_resource=compute.PublicDelegatedPrefix(creation_timestamp='creation_timestamp_value'), + ) + + +def test_patch_unary_rest_error(): + client = GlobalPublicDelegatedPrefixesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +def test_credentials_transport_error(): + # It is an error to provide credentials and a transport instance. + transport = transports.GlobalPublicDelegatedPrefixesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = GlobalPublicDelegatedPrefixesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # It is an error to provide a credentials file and a transport instance. + transport = transports.GlobalPublicDelegatedPrefixesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = GlobalPublicDelegatedPrefixesClient( + client_options={"credentials_file": "credentials.json"}, + transport=transport, + ) + + # It is an error to provide an api_key and a transport instance. + transport = transports.GlobalPublicDelegatedPrefixesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = GlobalPublicDelegatedPrefixesClient( + client_options=options, + transport=transport, + ) + + # It is an error to provide an api_key and a credential. + options = mock.Mock() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = GlobalPublicDelegatedPrefixesClient( + client_options=options, + credentials=ga_credentials.AnonymousCredentials() + ) + + # It is an error to provide scopes and a transport instance. + transport = transports.GlobalPublicDelegatedPrefixesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = GlobalPublicDelegatedPrefixesClient( + client_options={"scopes": ["1", "2"]}, + transport=transport, + ) + + +def test_transport_instance(): + # A client may be instantiated with a custom transport instance. + transport = transports.GlobalPublicDelegatedPrefixesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + client = GlobalPublicDelegatedPrefixesClient(transport=transport) + assert client.transport is transport + + +@pytest.mark.parametrize("transport_class", [ + transports.GlobalPublicDelegatedPrefixesRestTransport, +]) +def test_transport_adc(transport_class): + # Test default credentials are used if not provided. + with mock.patch.object(google.auth, 'default') as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class() + adc.assert_called_once() + +@pytest.mark.parametrize("transport_name", [ + "rest", +]) +def test_transport_kind(transport_name): + transport = GlobalPublicDelegatedPrefixesClient.get_transport_class(transport_name)( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert transport.kind == transport_name + + +def test_global_public_delegated_prefixes_base_transport_error(): + # Passing both a credentials object and credentials_file should raise an error + with pytest.raises(core_exceptions.DuplicateCredentialArgs): + transport = transports.GlobalPublicDelegatedPrefixesTransport( + credentials=ga_credentials.AnonymousCredentials(), + credentials_file="credentials.json" + ) + + +def test_global_public_delegated_prefixes_base_transport(): + # Instantiate the base transport. + with mock.patch('google.cloud.compute_v1.services.global_public_delegated_prefixes.transports.GlobalPublicDelegatedPrefixesTransport.__init__') as Transport: + Transport.return_value = None + transport = transports.GlobalPublicDelegatedPrefixesTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Every method on the transport should just blindly + # raise NotImplementedError. + methods = ( + 'delete', + 'get', + 'insert', + 'list', + 'patch', + ) + for method in methods: + with pytest.raises(NotImplementedError): + getattr(transport, method)(request=object()) + + with pytest.raises(NotImplementedError): + transport.close() + + # Catch all for all remaining methods and properties + remainder = [ + 'kind', + ] + for r in remainder: + with pytest.raises(NotImplementedError): + getattr(transport, r)() + + +def test_global_public_delegated_prefixes_base_transport_with_credentials_file(): + # Instantiate the base transport with a credentials file + with mock.patch.object(google.auth, 'load_credentials_from_file', autospec=True) as load_creds, mock.patch('google.cloud.compute_v1.services.global_public_delegated_prefixes.transports.GlobalPublicDelegatedPrefixesTransport._prep_wrapped_messages') as Transport: + Transport.return_value = None + load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.GlobalPublicDelegatedPrefixesTransport( + credentials_file="credentials.json", + quota_project_id="octopus", + ) + load_creds.assert_called_once_with("credentials.json", + scopes=None, + default_scopes=( + 'https://www.googleapis.com/auth/compute', + 'https://www.googleapis.com/auth/cloud-platform', +), + quota_project_id="octopus", + ) + + +def test_global_public_delegated_prefixes_base_transport_with_adc(): + # Test the default credentials are used if credentials and credentials_file are None. + with mock.patch.object(google.auth, 'default', autospec=True) as adc, mock.patch('google.cloud.compute_v1.services.global_public_delegated_prefixes.transports.GlobalPublicDelegatedPrefixesTransport._prep_wrapped_messages') as Transport: + Transport.return_value = None + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.GlobalPublicDelegatedPrefixesTransport() + adc.assert_called_once() + + +def test_global_public_delegated_prefixes_auth_adc(): + # If no credentials are provided, we should use ADC credentials. + with mock.patch.object(google.auth, 'default', autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + GlobalPublicDelegatedPrefixesClient() + adc.assert_called_once_with( + scopes=None, + default_scopes=( + 'https://www.googleapis.com/auth/compute', + 'https://www.googleapis.com/auth/cloud-platform', +), + quota_project_id=None, + ) + + +def test_global_public_delegated_prefixes_http_transport_client_cert_source_for_mtls(): + cred = ga_credentials.AnonymousCredentials() + with mock.patch("google.auth.transport.requests.AuthorizedSession.configure_mtls_channel") as mock_configure_mtls_channel: + transports.GlobalPublicDelegatedPrefixesRestTransport ( + credentials=cred, + client_cert_source_for_mtls=client_cert_source_callback + ) + mock_configure_mtls_channel.assert_called_once_with(client_cert_source_callback) + + +@pytest.mark.parametrize("transport_name", [ + "rest", +]) +def test_global_public_delegated_prefixes_host_no_port(transport_name): + client = GlobalPublicDelegatedPrefixesClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions(api_endpoint='compute.googleapis.com'), + transport=transport_name, + ) + assert client.transport._host == ( + 'compute.googleapis.com:443' + if transport_name in ['grpc', 'grpc_asyncio'] + else 'https://compute.googleapis.com' + ) + +@pytest.mark.parametrize("transport_name", [ + "rest", +]) +def test_global_public_delegated_prefixes_host_with_port(transport_name): + client = GlobalPublicDelegatedPrefixesClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions(api_endpoint='compute.googleapis.com:8000'), + transport=transport_name, + ) + assert client.transport._host == ( + 'compute.googleapis.com:8000' + if transport_name in ['grpc', 'grpc_asyncio'] + else 'https://compute.googleapis.com:8000' + ) + +@pytest.mark.parametrize("transport_name", [ + "rest", +]) +def test_global_public_delegated_prefixes_client_transport_session_collision(transport_name): + creds1 = ga_credentials.AnonymousCredentials() + creds2 = ga_credentials.AnonymousCredentials() + client1 = GlobalPublicDelegatedPrefixesClient( + credentials=creds1, + transport=transport_name, + ) + client2 = GlobalPublicDelegatedPrefixesClient( + credentials=creds2, + transport=transport_name, + ) + session1 = client1.transport.delete._session + session2 = client2.transport.delete._session + assert session1 != session2 + session1 = client1.transport.get._session + session2 = client2.transport.get._session + assert session1 != session2 + session1 = client1.transport.insert._session + session2 = client2.transport.insert._session + assert session1 != session2 + session1 = client1.transport.list._session + session2 = client2.transport.list._session + assert session1 != session2 + session1 = client1.transport.patch._session + session2 = client2.transport.patch._session + assert session1 != session2 + +def test_common_billing_account_path(): + billing_account = "squid" + expected = "billingAccounts/{billing_account}".format(billing_account=billing_account, ) + actual = GlobalPublicDelegatedPrefixesClient.common_billing_account_path(billing_account) + assert expected == actual + + +def test_parse_common_billing_account_path(): + expected = { + "billing_account": "clam", + } + path = GlobalPublicDelegatedPrefixesClient.common_billing_account_path(**expected) + + # Check that the path construction is reversible. + actual = GlobalPublicDelegatedPrefixesClient.parse_common_billing_account_path(path) + assert expected == actual + +def test_common_folder_path(): + folder = "whelk" + expected = "folders/{folder}".format(folder=folder, ) + actual = GlobalPublicDelegatedPrefixesClient.common_folder_path(folder) + assert expected == actual + + +def test_parse_common_folder_path(): + expected = { + "folder": "octopus", + } + path = GlobalPublicDelegatedPrefixesClient.common_folder_path(**expected) + + # Check that the path construction is reversible. + actual = GlobalPublicDelegatedPrefixesClient.parse_common_folder_path(path) + assert expected == actual + +def test_common_organization_path(): + organization = "oyster" + expected = "organizations/{organization}".format(organization=organization, ) + actual = GlobalPublicDelegatedPrefixesClient.common_organization_path(organization) + assert expected == actual + + +def test_parse_common_organization_path(): + expected = { + "organization": "nudibranch", + } + path = GlobalPublicDelegatedPrefixesClient.common_organization_path(**expected) + + # Check that the path construction is reversible. + actual = GlobalPublicDelegatedPrefixesClient.parse_common_organization_path(path) + assert expected == actual + +def test_common_project_path(): + project = "cuttlefish" + expected = "projects/{project}".format(project=project, ) + actual = GlobalPublicDelegatedPrefixesClient.common_project_path(project) + assert expected == actual + + +def test_parse_common_project_path(): + expected = { + "project": "mussel", + } + path = GlobalPublicDelegatedPrefixesClient.common_project_path(**expected) + + # Check that the path construction is reversible. + actual = GlobalPublicDelegatedPrefixesClient.parse_common_project_path(path) + assert expected == actual + +def test_common_location_path(): + project = "winkle" + location = "nautilus" + expected = "projects/{project}/locations/{location}".format(project=project, location=location, ) + actual = GlobalPublicDelegatedPrefixesClient.common_location_path(project, location) + assert expected == actual + + +def test_parse_common_location_path(): + expected = { + "project": "scallop", + "location": "abalone", + } + path = GlobalPublicDelegatedPrefixesClient.common_location_path(**expected) + + # Check that the path construction is reversible. + actual = GlobalPublicDelegatedPrefixesClient.parse_common_location_path(path) + assert expected == actual + + +def test_client_with_default_client_info(): + client_info = gapic_v1.client_info.ClientInfo() + + with mock.patch.object(transports.GlobalPublicDelegatedPrefixesTransport, '_prep_wrapped_messages') as prep: + client = GlobalPublicDelegatedPrefixesClient( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + with mock.patch.object(transports.GlobalPublicDelegatedPrefixesTransport, '_prep_wrapped_messages') as prep: + transport_class = GlobalPublicDelegatedPrefixesClient.get_transport_class() + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + +def test_transport_close(): + transports = { + "rest": "_session", + } + + for transport, close_name in transports.items(): + client = GlobalPublicDelegatedPrefixesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport + ) + with mock.patch.object(type(getattr(client.transport, close_name)), "close") as close: + with client: + close.assert_not_called() + close.assert_called_once() + +def test_client_ctx(): + transports = [ + 'rest', + ] + for transport in transports: + client = GlobalPublicDelegatedPrefixesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport + ) + # Test client calls underlying transport. + with mock.patch.object(type(client.transport), "close") as close: + close.assert_not_called() + with client: + pass + close.assert_called() + +@pytest.mark.parametrize("client_class,transport_class", [ + (GlobalPublicDelegatedPrefixesClient, transports.GlobalPublicDelegatedPrefixesRestTransport), +]) +def test_api_key_credentials(client_class, transport_class): + with mock.patch.object( + google.auth._default, "get_api_key_credentials", create=True + ) as get_api_key_credentials: + mock_cred = mock.Mock() + get_api_key_credentials.return_value = mock_cred + options = client_options.ClientOptions() + options.api_key = "api_key" + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=mock_cred, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) diff --git a/owl-bot-staging/v1/tests/unit/gapic/compute_v1/test_health_checks.py b/owl-bot-staging/v1/tests/unit/gapic/compute_v1/test_health_checks.py new file mode 100644 index 000000000..487b5d7e5 --- /dev/null +++ b/owl-bot-staging/v1/tests/unit/gapic/compute_v1/test_health_checks.py @@ -0,0 +1,3904 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import os +# try/except added for compatibility with python < 3.8 +try: + from unittest import mock + from unittest.mock import AsyncMock # pragma: NO COVER +except ImportError: # pragma: NO COVER + import mock + +import grpc +from grpc.experimental import aio +from collections.abc import Iterable +from google.protobuf import json_format +import json +import math +import pytest +from proto.marshal.rules.dates import DurationRule, TimestampRule +from proto.marshal.rules import wrappers +from requests import Response +from requests import Request, PreparedRequest +from requests.sessions import Session +from google.protobuf import json_format + +from google.api_core import client_options +from google.api_core import exceptions as core_exceptions +from google.api_core import extended_operation # type: ignore +from google.api_core import future +from google.api_core import gapic_v1 +from google.api_core import grpc_helpers +from google.api_core import grpc_helpers_async +from google.api_core import path_template +from google.auth import credentials as ga_credentials +from google.auth.exceptions import MutualTLSChannelError +from google.cloud.compute_v1.services.health_checks import HealthChecksClient +from google.cloud.compute_v1.services.health_checks import pagers +from google.cloud.compute_v1.services.health_checks import transports +from google.cloud.compute_v1.types import compute +from google.oauth2 import service_account +import google.auth + + +def client_cert_source_callback(): + return b"cert bytes", b"key bytes" + + +# If default endpoint is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint(client): + return "foo.googleapis.com" if ("localhost" in client.DEFAULT_ENDPOINT) else client.DEFAULT_ENDPOINT + + +def test__get_default_mtls_endpoint(): + api_endpoint = "example.googleapis.com" + api_mtls_endpoint = "example.mtls.googleapis.com" + sandbox_endpoint = "example.sandbox.googleapis.com" + sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com" + non_googleapi = "api.example.com" + + assert HealthChecksClient._get_default_mtls_endpoint(None) is None + assert HealthChecksClient._get_default_mtls_endpoint(api_endpoint) == api_mtls_endpoint + assert HealthChecksClient._get_default_mtls_endpoint(api_mtls_endpoint) == api_mtls_endpoint + assert HealthChecksClient._get_default_mtls_endpoint(sandbox_endpoint) == sandbox_mtls_endpoint + assert HealthChecksClient._get_default_mtls_endpoint(sandbox_mtls_endpoint) == sandbox_mtls_endpoint + assert HealthChecksClient._get_default_mtls_endpoint(non_googleapi) == non_googleapi + + +@pytest.mark.parametrize("client_class,transport_name", [ + (HealthChecksClient, "rest"), +]) +def test_health_checks_client_from_service_account_info(client_class, transport_name): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object(service_account.Credentials, 'from_service_account_info') as factory: + factory.return_value = creds + info = {"valid": True} + client = client_class.from_service_account_info(info, transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + 'compute.googleapis.com:443' + if transport_name in ['grpc', 'grpc_asyncio'] + else + 'https://compute.googleapis.com' + ) + + +@pytest.mark.parametrize("transport_class,transport_name", [ + (transports.HealthChecksRestTransport, "rest"), +]) +def test_health_checks_client_service_account_always_use_jwt(transport_class, transport_name): + with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=True) + use_jwt.assert_called_once_with(True) + + with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=False) + use_jwt.assert_not_called() + + +@pytest.mark.parametrize("client_class,transport_name", [ + (HealthChecksClient, "rest"), +]) +def test_health_checks_client_from_service_account_file(client_class, transport_name): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object(service_account.Credentials, 'from_service_account_file') as factory: + factory.return_value = creds + client = client_class.from_service_account_file("dummy/file/path.json", transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + client = client_class.from_service_account_json("dummy/file/path.json", transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + 'compute.googleapis.com:443' + if transport_name in ['grpc', 'grpc_asyncio'] + else + 'https://compute.googleapis.com' + ) + + +def test_health_checks_client_get_transport_class(): + transport = HealthChecksClient.get_transport_class() + available_transports = [ + transports.HealthChecksRestTransport, + ] + assert transport in available_transports + + transport = HealthChecksClient.get_transport_class("rest") + assert transport == transports.HealthChecksRestTransport + + +@pytest.mark.parametrize("client_class,transport_class,transport_name", [ + (HealthChecksClient, transports.HealthChecksRestTransport, "rest"), +]) +@mock.patch.object(HealthChecksClient, "DEFAULT_ENDPOINT", modify_default_endpoint(HealthChecksClient)) +def test_health_checks_client_client_options(client_class, transport_class, transport_name): + # Check that if channel is provided we won't create a new one. + with mock.patch.object(HealthChecksClient, 'get_transport_class') as gtc: + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ) + client = client_class(transport=transport) + gtc.assert_not_called() + + # Check that if channel is provided via str we will create a new one. + with mock.patch.object(HealthChecksClient, 'get_transport_class') as gtc: + client = client_class(transport=transport_name) + gtc.assert_called() + + # Check the case api_endpoint is provided. + options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(transport=transport_name, client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_MTLS_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError): + client = client_class(transport=transport_name) + + # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): + with pytest.raises(ValueError): + client = client_class(transport=transport_name) + + # Check the case quota_project_id is provided + options = client_options.ClientOptions(quota_project_id="octopus") + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id="octopus", + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + # Check the case api_endpoint is provided + options = client_options.ClientOptions(api_audience="https://language.googleapis.com") + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience="https://language.googleapis.com" + ) + +@pytest.mark.parametrize("client_class,transport_class,transport_name,use_client_cert_env", [ + (HealthChecksClient, transports.HealthChecksRestTransport, "rest", "true"), + (HealthChecksClient, transports.HealthChecksRestTransport, "rest", "false"), +]) +@mock.patch.object(HealthChecksClient, "DEFAULT_ENDPOINT", modify_default_endpoint(HealthChecksClient)) +@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) +def test_health_checks_client_mtls_env_auto(client_class, transport_class, transport_name, use_client_cert_env): + # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default + # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists. + + # Check the case client_cert_source is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): + options = client_options.ClientOptions(client_cert_source=client_cert_source_callback) + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + + if use_client_cert_env == "false": + expected_client_cert_source = None + expected_host = client.DEFAULT_ENDPOINT + else: + expected_client_cert_source = client_cert_source_callback + expected_host = client.DEFAULT_MTLS_ENDPOINT + + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case ADC client cert is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): + with mock.patch.object(transport_class, '__init__') as patched: + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): + with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=client_cert_source_callback): + if use_client_cert_env == "false": + expected_host = client.DEFAULT_ENDPOINT + expected_client_cert_source = None + else: + expected_host = client.DEFAULT_MTLS_ENDPOINT + expected_client_cert_source = client_cert_source_callback + + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case client_cert_source and ADC client cert are not provided. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): + with mock.patch.object(transport_class, '__init__') as patched: + with mock.patch("google.auth.transport.mtls.has_default_client_cert_source", return_value=False): + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize("client_class", [ + HealthChecksClient +]) +@mock.patch.object(HealthChecksClient, "DEFAULT_ENDPOINT", modify_default_endpoint(HealthChecksClient)) +def test_health_checks_client_get_mtls_endpoint_and_cert_source(client_class): + mock_client_cert_source = mock.Mock() + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "true". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + mock_api_endpoint = "foo" + options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(options) + assert api_endpoint == mock_api_endpoint + assert cert_source == mock_client_cert_source + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "false". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + mock_client_cert_source = mock.Mock() + mock_api_endpoint = "foo" + options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(options) + assert api_endpoint == mock_api_endpoint + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert doesn't exist. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=False): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert exists. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): + with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=mock_client_cert_source): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source == mock_client_cert_source + + +@pytest.mark.parametrize("client_class,transport_class,transport_name", [ + (HealthChecksClient, transports.HealthChecksRestTransport, "rest"), +]) +def test_health_checks_client_client_options_scopes(client_class, transport_class, transport_name): + # Check the case scopes are provided. + options = client_options.ClientOptions( + scopes=["1", "2"], + ) + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=["1", "2"], + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + +@pytest.mark.parametrize("client_class,transport_class,transport_name,grpc_helpers", [ + (HealthChecksClient, transports.HealthChecksRestTransport, "rest", None), +]) +def test_health_checks_client_client_options_credentials_file(client_class, transport_class, transport_name, grpc_helpers): + # Check the case credentials file is provided. + options = client_options.ClientOptions( + credentials_file="credentials.json" + ) + + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize("request_type", [ + compute.AggregatedListHealthChecksRequest, + dict, +]) +def test_aggregated_list_rest(request_type): + client = HealthChecksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.HealthChecksAggregatedList( + id='id_value', + kind='kind_value', + next_page_token='next_page_token_value', + self_link='self_link_value', + unreachables=['unreachables_value'], + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.HealthChecksAggregatedList.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.aggregated_list(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.AggregatedListPager) + assert response.id == 'id_value' + assert response.kind == 'kind_value' + assert response.next_page_token == 'next_page_token_value' + assert response.self_link == 'self_link_value' + assert response.unreachables == ['unreachables_value'] + + +def test_aggregated_list_rest_required_fields(request_type=compute.AggregatedListHealthChecksRequest): + transport_class = transports.HealthChecksRestTransport + + request_init = {} + request_init["project"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).aggregated_list._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).aggregated_list._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("filter", "include_all_scopes", "max_results", "order_by", "page_token", "return_partial_success", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + + client = HealthChecksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.HealthChecksAggregatedList() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "get", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.HealthChecksAggregatedList.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.aggregated_list(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_aggregated_list_rest_unset_required_fields(): + transport = transports.HealthChecksRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.aggregated_list._get_unset_required_fields({}) + assert set(unset_fields) == (set(("filter", "includeAllScopes", "maxResults", "orderBy", "pageToken", "returnPartialSuccess", )) & set(("project", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_aggregated_list_rest_interceptors(null_interceptor): + transport = transports.HealthChecksRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.HealthChecksRestInterceptor(), + ) + client = HealthChecksClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.HealthChecksRestInterceptor, "post_aggregated_list") as post, \ + mock.patch.object(transports.HealthChecksRestInterceptor, "pre_aggregated_list") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.AggregatedListHealthChecksRequest.pb(compute.AggregatedListHealthChecksRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.HealthChecksAggregatedList.to_json(compute.HealthChecksAggregatedList()) + + request = compute.AggregatedListHealthChecksRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.HealthChecksAggregatedList() + + client.aggregated_list(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_aggregated_list_rest_bad_request(transport: str = 'rest', request_type=compute.AggregatedListHealthChecksRequest): + client = HealthChecksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.aggregated_list(request) + + +def test_aggregated_list_rest_flattened(): + client = HealthChecksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.HealthChecksAggregatedList() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.HealthChecksAggregatedList.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.aggregated_list(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/aggregated/healthChecks" % client.transport._host, args[1]) + + +def test_aggregated_list_rest_flattened_error(transport: str = 'rest'): + client = HealthChecksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.aggregated_list( + compute.AggregatedListHealthChecksRequest(), + project='project_value', + ) + + +def test_aggregated_list_rest_pager(transport: str = 'rest'): + client = HealthChecksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + #with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + compute.HealthChecksAggregatedList( + items={ + 'a':compute.HealthChecksScopedList(), + 'b':compute.HealthChecksScopedList(), + 'c':compute.HealthChecksScopedList(), + }, + next_page_token='abc', + ), + compute.HealthChecksAggregatedList( + items={}, + next_page_token='def', + ), + compute.HealthChecksAggregatedList( + items={ + 'g':compute.HealthChecksScopedList(), + }, + next_page_token='ghi', + ), + compute.HealthChecksAggregatedList( + items={ + 'h':compute.HealthChecksScopedList(), + 'i':compute.HealthChecksScopedList(), + }, + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple(compute.HealthChecksAggregatedList.to_json(x) for x in response) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode('UTF-8') + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {'project': 'sample1'} + + pager = client.aggregated_list(request=sample_request) + + assert isinstance(pager.get('a'), compute.HealthChecksScopedList) + assert pager.get('h') is None + + results = list(pager) + assert len(results) == 6 + assert all( + isinstance(i, tuple) + for i in results) + for result in results: + assert isinstance(result, tuple) + assert tuple(type(t) for t in result) == (str, compute.HealthChecksScopedList) + + assert pager.get('a') is None + assert isinstance(pager.get('h'), compute.HealthChecksScopedList) + + pages = list(client.aggregated_list(request=sample_request).pages) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize("request_type", [ + compute.DeleteHealthCheckRequest, + dict, +]) +def test_delete_rest(request_type): + client = HealthChecksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'health_check': 'sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.delete(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, extended_operation.ExtendedOperation) + assert response.client_operation_id == 'client_operation_id_value' + assert response.creation_timestamp == 'creation_timestamp_value' + assert response.description == 'description_value' + assert response.end_time == 'end_time_value' + assert response.http_error_message == 'http_error_message_value' + assert response.http_error_status_code == 2374 + assert response.id == 205 + assert response.insert_time == 'insert_time_value' + assert response.kind == 'kind_value' + assert response.name == 'name_value' + assert response.operation_group_id == 'operation_group_id_value' + assert response.operation_type == 'operation_type_value' + assert response.progress == 885 + assert response.region == 'region_value' + assert response.self_link == 'self_link_value' + assert response.start_time == 'start_time_value' + assert response.status == compute.Operation.Status.DONE + assert response.status_message == 'status_message_value' + assert response.target_id == 947 + assert response.target_link == 'target_link_value' + assert response.user == 'user_value' + assert response.zone == 'zone_value' + + +def test_delete_rest_required_fields(request_type=compute.DeleteHealthCheckRequest): + transport_class = transports.HealthChecksRestTransport + + request_init = {} + request_init["health_check"] = "" + request_init["project"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["healthCheck"] = 'health_check_value' + jsonified_request["project"] = 'project_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "healthCheck" in jsonified_request + assert jsonified_request["healthCheck"] == 'health_check_value' + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + + client = HealthChecksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "delete", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.delete(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_delete_rest_unset_required_fields(): + transport = transports.HealthChecksRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.delete._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("healthCheck", "project", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_rest_interceptors(null_interceptor): + transport = transports.HealthChecksRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.HealthChecksRestInterceptor(), + ) + client = HealthChecksClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.HealthChecksRestInterceptor, "post_delete") as post, \ + mock.patch.object(transports.HealthChecksRestInterceptor, "pre_delete") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.DeleteHealthCheckRequest.pb(compute.DeleteHealthCheckRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.DeleteHealthCheckRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.delete(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_delete_rest_bad_request(transport: str = 'rest', request_type=compute.DeleteHealthCheckRequest): + client = HealthChecksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'health_check': 'sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.delete(request) + + +def test_delete_rest_flattened(): + client = HealthChecksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'health_check': 'sample2'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + health_check='health_check_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.delete(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/global/healthChecks/{health_check}" % client.transport._host, args[1]) + + +def test_delete_rest_flattened_error(transport: str = 'rest'): + client = HealthChecksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete( + compute.DeleteHealthCheckRequest(), + project='project_value', + health_check='health_check_value', + ) + + +def test_delete_rest_error(): + client = HealthChecksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.DeleteHealthCheckRequest, + dict, +]) +def test_delete_unary_rest(request_type): + client = HealthChecksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'health_check': 'sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.delete_unary(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.Operation) + + +def test_delete_unary_rest_required_fields(request_type=compute.DeleteHealthCheckRequest): + transport_class = transports.HealthChecksRestTransport + + request_init = {} + request_init["health_check"] = "" + request_init["project"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["healthCheck"] = 'health_check_value' + jsonified_request["project"] = 'project_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "healthCheck" in jsonified_request + assert jsonified_request["healthCheck"] == 'health_check_value' + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + + client = HealthChecksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "delete", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.delete_unary(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_delete_unary_rest_unset_required_fields(): + transport = transports.HealthChecksRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.delete._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("healthCheck", "project", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_unary_rest_interceptors(null_interceptor): + transport = transports.HealthChecksRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.HealthChecksRestInterceptor(), + ) + client = HealthChecksClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.HealthChecksRestInterceptor, "post_delete") as post, \ + mock.patch.object(transports.HealthChecksRestInterceptor, "pre_delete") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.DeleteHealthCheckRequest.pb(compute.DeleteHealthCheckRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.DeleteHealthCheckRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.delete_unary(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_delete_unary_rest_bad_request(transport: str = 'rest', request_type=compute.DeleteHealthCheckRequest): + client = HealthChecksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'health_check': 'sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.delete_unary(request) + + +def test_delete_unary_rest_flattened(): + client = HealthChecksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'health_check': 'sample2'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + health_check='health_check_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.delete_unary(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/global/healthChecks/{health_check}" % client.transport._host, args[1]) + + +def test_delete_unary_rest_flattened_error(transport: str = 'rest'): + client = HealthChecksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_unary( + compute.DeleteHealthCheckRequest(), + project='project_value', + health_check='health_check_value', + ) + + +def test_delete_unary_rest_error(): + client = HealthChecksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.GetHealthCheckRequest, + dict, +]) +def test_get_rest(request_type): + client = HealthChecksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'health_check': 'sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.HealthCheck( + check_interval_sec=1884, + creation_timestamp='creation_timestamp_value', + description='description_value', + healthy_threshold=1819, + id=205, + kind='kind_value', + name='name_value', + region='region_value', + self_link='self_link_value', + timeout_sec=1185, + type_='type__value', + unhealthy_threshold=2046, + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.HealthCheck.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.get(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.HealthCheck) + assert response.check_interval_sec == 1884 + assert response.creation_timestamp == 'creation_timestamp_value' + assert response.description == 'description_value' + assert response.healthy_threshold == 1819 + assert response.id == 205 + assert response.kind == 'kind_value' + assert response.name == 'name_value' + assert response.region == 'region_value' + assert response.self_link == 'self_link_value' + assert response.timeout_sec == 1185 + assert response.type_ == 'type__value' + assert response.unhealthy_threshold == 2046 + + +def test_get_rest_required_fields(request_type=compute.GetHealthCheckRequest): + transport_class = transports.HealthChecksRestTransport + + request_init = {} + request_init["health_check"] = "" + request_init["project"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["healthCheck"] = 'health_check_value' + jsonified_request["project"] = 'project_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "healthCheck" in jsonified_request + assert jsonified_request["healthCheck"] == 'health_check_value' + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + + client = HealthChecksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.HealthCheck() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "get", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.HealthCheck.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.get(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_get_rest_unset_required_fields(): + transport = transports.HealthChecksRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.get._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("healthCheck", "project", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_rest_interceptors(null_interceptor): + transport = transports.HealthChecksRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.HealthChecksRestInterceptor(), + ) + client = HealthChecksClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.HealthChecksRestInterceptor, "post_get") as post, \ + mock.patch.object(transports.HealthChecksRestInterceptor, "pre_get") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.GetHealthCheckRequest.pb(compute.GetHealthCheckRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.HealthCheck.to_json(compute.HealthCheck()) + + request = compute.GetHealthCheckRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.HealthCheck() + + client.get(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_get_rest_bad_request(transport: str = 'rest', request_type=compute.GetHealthCheckRequest): + client = HealthChecksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'health_check': 'sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get(request) + + +def test_get_rest_flattened(): + client = HealthChecksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.HealthCheck() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'health_check': 'sample2'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + health_check='health_check_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.HealthCheck.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.get(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/global/healthChecks/{health_check}" % client.transport._host, args[1]) + + +def test_get_rest_flattened_error(transport: str = 'rest'): + client = HealthChecksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get( + compute.GetHealthCheckRequest(), + project='project_value', + health_check='health_check_value', + ) + + +def test_get_rest_error(): + client = HealthChecksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.InsertHealthCheckRequest, + dict, +]) +def test_insert_rest(request_type): + client = HealthChecksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1'} + request_init["health_check_resource"] = {'check_interval_sec': 1884, 'creation_timestamp': 'creation_timestamp_value', 'description': 'description_value', 'grpc_health_check': {'grpc_service_name': 'grpc_service_name_value', 'port': 453, 'port_name': 'port_name_value', 'port_specification': 'port_specification_value'}, 'healthy_threshold': 1819, 'http2_health_check': {'host': 'host_value', 'port': 453, 'port_name': 'port_name_value', 'port_specification': 'port_specification_value', 'proxy_header': 'proxy_header_value', 'request_path': 'request_path_value', 'response': 'response_value'}, 'http_health_check': {'host': 'host_value', 'port': 453, 'port_name': 'port_name_value', 'port_specification': 'port_specification_value', 'proxy_header': 'proxy_header_value', 'request_path': 'request_path_value', 'response': 'response_value'}, 'https_health_check': {'host': 'host_value', 'port': 453, 'port_name': 'port_name_value', 'port_specification': 'port_specification_value', 'proxy_header': 'proxy_header_value', 'request_path': 'request_path_value', 'response': 'response_value'}, 'id': 205, 'kind': 'kind_value', 'log_config': {'enable': True}, 'name': 'name_value', 'region': 'region_value', 'self_link': 'self_link_value', 'ssl_health_check': {'port': 453, 'port_name': 'port_name_value', 'port_specification': 'port_specification_value', 'proxy_header': 'proxy_header_value', 'request': 'request_value', 'response': 'response_value'}, 'tcp_health_check': {'port': 453, 'port_name': 'port_name_value', 'port_specification': 'port_specification_value', 'proxy_header': 'proxy_header_value', 'request': 'request_value', 'response': 'response_value'}, 'timeout_sec': 1185, 'type_': 'type__value', 'unhealthy_threshold': 2046} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.insert(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, extended_operation.ExtendedOperation) + assert response.client_operation_id == 'client_operation_id_value' + assert response.creation_timestamp == 'creation_timestamp_value' + assert response.description == 'description_value' + assert response.end_time == 'end_time_value' + assert response.http_error_message == 'http_error_message_value' + assert response.http_error_status_code == 2374 + assert response.id == 205 + assert response.insert_time == 'insert_time_value' + assert response.kind == 'kind_value' + assert response.name == 'name_value' + assert response.operation_group_id == 'operation_group_id_value' + assert response.operation_type == 'operation_type_value' + assert response.progress == 885 + assert response.region == 'region_value' + assert response.self_link == 'self_link_value' + assert response.start_time == 'start_time_value' + assert response.status == compute.Operation.Status.DONE + assert response.status_message == 'status_message_value' + assert response.target_id == 947 + assert response.target_link == 'target_link_value' + assert response.user == 'user_value' + assert response.zone == 'zone_value' + + +def test_insert_rest_required_fields(request_type=compute.InsertHealthCheckRequest): + transport_class = transports.HealthChecksRestTransport + + request_init = {} + request_init["project"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).insert._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).insert._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + + client = HealthChecksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.insert(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_insert_rest_unset_required_fields(): + transport = transports.HealthChecksRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.insert._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("healthCheckResource", "project", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_insert_rest_interceptors(null_interceptor): + transport = transports.HealthChecksRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.HealthChecksRestInterceptor(), + ) + client = HealthChecksClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.HealthChecksRestInterceptor, "post_insert") as post, \ + mock.patch.object(transports.HealthChecksRestInterceptor, "pre_insert") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.InsertHealthCheckRequest.pb(compute.InsertHealthCheckRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.InsertHealthCheckRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.insert(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_insert_rest_bad_request(transport: str = 'rest', request_type=compute.InsertHealthCheckRequest): + client = HealthChecksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1'} + request_init["health_check_resource"] = {'check_interval_sec': 1884, 'creation_timestamp': 'creation_timestamp_value', 'description': 'description_value', 'grpc_health_check': {'grpc_service_name': 'grpc_service_name_value', 'port': 453, 'port_name': 'port_name_value', 'port_specification': 'port_specification_value'}, 'healthy_threshold': 1819, 'http2_health_check': {'host': 'host_value', 'port': 453, 'port_name': 'port_name_value', 'port_specification': 'port_specification_value', 'proxy_header': 'proxy_header_value', 'request_path': 'request_path_value', 'response': 'response_value'}, 'http_health_check': {'host': 'host_value', 'port': 453, 'port_name': 'port_name_value', 'port_specification': 'port_specification_value', 'proxy_header': 'proxy_header_value', 'request_path': 'request_path_value', 'response': 'response_value'}, 'https_health_check': {'host': 'host_value', 'port': 453, 'port_name': 'port_name_value', 'port_specification': 'port_specification_value', 'proxy_header': 'proxy_header_value', 'request_path': 'request_path_value', 'response': 'response_value'}, 'id': 205, 'kind': 'kind_value', 'log_config': {'enable': True}, 'name': 'name_value', 'region': 'region_value', 'self_link': 'self_link_value', 'ssl_health_check': {'port': 453, 'port_name': 'port_name_value', 'port_specification': 'port_specification_value', 'proxy_header': 'proxy_header_value', 'request': 'request_value', 'response': 'response_value'}, 'tcp_health_check': {'port': 453, 'port_name': 'port_name_value', 'port_specification': 'port_specification_value', 'proxy_header': 'proxy_header_value', 'request': 'request_value', 'response': 'response_value'}, 'timeout_sec': 1185, 'type_': 'type__value', 'unhealthy_threshold': 2046} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.insert(request) + + +def test_insert_rest_flattened(): + client = HealthChecksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + health_check_resource=compute.HealthCheck(check_interval_sec=1884), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.insert(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/global/healthChecks" % client.transport._host, args[1]) + + +def test_insert_rest_flattened_error(transport: str = 'rest'): + client = HealthChecksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.insert( + compute.InsertHealthCheckRequest(), + project='project_value', + health_check_resource=compute.HealthCheck(check_interval_sec=1884), + ) + + +def test_insert_rest_error(): + client = HealthChecksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.InsertHealthCheckRequest, + dict, +]) +def test_insert_unary_rest(request_type): + client = HealthChecksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1'} + request_init["health_check_resource"] = {'check_interval_sec': 1884, 'creation_timestamp': 'creation_timestamp_value', 'description': 'description_value', 'grpc_health_check': {'grpc_service_name': 'grpc_service_name_value', 'port': 453, 'port_name': 'port_name_value', 'port_specification': 'port_specification_value'}, 'healthy_threshold': 1819, 'http2_health_check': {'host': 'host_value', 'port': 453, 'port_name': 'port_name_value', 'port_specification': 'port_specification_value', 'proxy_header': 'proxy_header_value', 'request_path': 'request_path_value', 'response': 'response_value'}, 'http_health_check': {'host': 'host_value', 'port': 453, 'port_name': 'port_name_value', 'port_specification': 'port_specification_value', 'proxy_header': 'proxy_header_value', 'request_path': 'request_path_value', 'response': 'response_value'}, 'https_health_check': {'host': 'host_value', 'port': 453, 'port_name': 'port_name_value', 'port_specification': 'port_specification_value', 'proxy_header': 'proxy_header_value', 'request_path': 'request_path_value', 'response': 'response_value'}, 'id': 205, 'kind': 'kind_value', 'log_config': {'enable': True}, 'name': 'name_value', 'region': 'region_value', 'self_link': 'self_link_value', 'ssl_health_check': {'port': 453, 'port_name': 'port_name_value', 'port_specification': 'port_specification_value', 'proxy_header': 'proxy_header_value', 'request': 'request_value', 'response': 'response_value'}, 'tcp_health_check': {'port': 453, 'port_name': 'port_name_value', 'port_specification': 'port_specification_value', 'proxy_header': 'proxy_header_value', 'request': 'request_value', 'response': 'response_value'}, 'timeout_sec': 1185, 'type_': 'type__value', 'unhealthy_threshold': 2046} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.insert_unary(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.Operation) + + +def test_insert_unary_rest_required_fields(request_type=compute.InsertHealthCheckRequest): + transport_class = transports.HealthChecksRestTransport + + request_init = {} + request_init["project"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).insert._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).insert._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + + client = HealthChecksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.insert_unary(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_insert_unary_rest_unset_required_fields(): + transport = transports.HealthChecksRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.insert._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("healthCheckResource", "project", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_insert_unary_rest_interceptors(null_interceptor): + transport = transports.HealthChecksRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.HealthChecksRestInterceptor(), + ) + client = HealthChecksClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.HealthChecksRestInterceptor, "post_insert") as post, \ + mock.patch.object(transports.HealthChecksRestInterceptor, "pre_insert") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.InsertHealthCheckRequest.pb(compute.InsertHealthCheckRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.InsertHealthCheckRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.insert_unary(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_insert_unary_rest_bad_request(transport: str = 'rest', request_type=compute.InsertHealthCheckRequest): + client = HealthChecksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1'} + request_init["health_check_resource"] = {'check_interval_sec': 1884, 'creation_timestamp': 'creation_timestamp_value', 'description': 'description_value', 'grpc_health_check': {'grpc_service_name': 'grpc_service_name_value', 'port': 453, 'port_name': 'port_name_value', 'port_specification': 'port_specification_value'}, 'healthy_threshold': 1819, 'http2_health_check': {'host': 'host_value', 'port': 453, 'port_name': 'port_name_value', 'port_specification': 'port_specification_value', 'proxy_header': 'proxy_header_value', 'request_path': 'request_path_value', 'response': 'response_value'}, 'http_health_check': {'host': 'host_value', 'port': 453, 'port_name': 'port_name_value', 'port_specification': 'port_specification_value', 'proxy_header': 'proxy_header_value', 'request_path': 'request_path_value', 'response': 'response_value'}, 'https_health_check': {'host': 'host_value', 'port': 453, 'port_name': 'port_name_value', 'port_specification': 'port_specification_value', 'proxy_header': 'proxy_header_value', 'request_path': 'request_path_value', 'response': 'response_value'}, 'id': 205, 'kind': 'kind_value', 'log_config': {'enable': True}, 'name': 'name_value', 'region': 'region_value', 'self_link': 'self_link_value', 'ssl_health_check': {'port': 453, 'port_name': 'port_name_value', 'port_specification': 'port_specification_value', 'proxy_header': 'proxy_header_value', 'request': 'request_value', 'response': 'response_value'}, 'tcp_health_check': {'port': 453, 'port_name': 'port_name_value', 'port_specification': 'port_specification_value', 'proxy_header': 'proxy_header_value', 'request': 'request_value', 'response': 'response_value'}, 'timeout_sec': 1185, 'type_': 'type__value', 'unhealthy_threshold': 2046} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.insert_unary(request) + + +def test_insert_unary_rest_flattened(): + client = HealthChecksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + health_check_resource=compute.HealthCheck(check_interval_sec=1884), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.insert_unary(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/global/healthChecks" % client.transport._host, args[1]) + + +def test_insert_unary_rest_flattened_error(transport: str = 'rest'): + client = HealthChecksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.insert_unary( + compute.InsertHealthCheckRequest(), + project='project_value', + health_check_resource=compute.HealthCheck(check_interval_sec=1884), + ) + + +def test_insert_unary_rest_error(): + client = HealthChecksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.ListHealthChecksRequest, + dict, +]) +def test_list_rest(request_type): + client = HealthChecksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.HealthCheckList( + id='id_value', + kind='kind_value', + next_page_token='next_page_token_value', + self_link='self_link_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.HealthCheckList.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.list(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListPager) + assert response.id == 'id_value' + assert response.kind == 'kind_value' + assert response.next_page_token == 'next_page_token_value' + assert response.self_link == 'self_link_value' + + +def test_list_rest_required_fields(request_type=compute.ListHealthChecksRequest): + transport_class = transports.HealthChecksRestTransport + + request_init = {} + request_init["project"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("filter", "max_results", "order_by", "page_token", "return_partial_success", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + + client = HealthChecksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.HealthCheckList() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "get", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.HealthCheckList.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.list(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_list_rest_unset_required_fields(): + transport = transports.HealthChecksRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.list._get_unset_required_fields({}) + assert set(unset_fields) == (set(("filter", "maxResults", "orderBy", "pageToken", "returnPartialSuccess", )) & set(("project", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_rest_interceptors(null_interceptor): + transport = transports.HealthChecksRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.HealthChecksRestInterceptor(), + ) + client = HealthChecksClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.HealthChecksRestInterceptor, "post_list") as post, \ + mock.patch.object(transports.HealthChecksRestInterceptor, "pre_list") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.ListHealthChecksRequest.pb(compute.ListHealthChecksRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.HealthCheckList.to_json(compute.HealthCheckList()) + + request = compute.ListHealthChecksRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.HealthCheckList() + + client.list(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_list_rest_bad_request(transport: str = 'rest', request_type=compute.ListHealthChecksRequest): + client = HealthChecksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list(request) + + +def test_list_rest_flattened(): + client = HealthChecksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.HealthCheckList() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.HealthCheckList.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.list(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/global/healthChecks" % client.transport._host, args[1]) + + +def test_list_rest_flattened_error(transport: str = 'rest'): + client = HealthChecksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list( + compute.ListHealthChecksRequest(), + project='project_value', + ) + + +def test_list_rest_pager(transport: str = 'rest'): + client = HealthChecksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + #with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + compute.HealthCheckList( + items=[ + compute.HealthCheck(), + compute.HealthCheck(), + compute.HealthCheck(), + ], + next_page_token='abc', + ), + compute.HealthCheckList( + items=[], + next_page_token='def', + ), + compute.HealthCheckList( + items=[ + compute.HealthCheck(), + ], + next_page_token='ghi', + ), + compute.HealthCheckList( + items=[ + compute.HealthCheck(), + compute.HealthCheck(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple(compute.HealthCheckList.to_json(x) for x in response) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode('UTF-8') + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {'project': 'sample1'} + + pager = client.list(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, compute.HealthCheck) + for i in results) + + pages = list(client.list(request=sample_request).pages) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize("request_type", [ + compute.PatchHealthCheckRequest, + dict, +]) +def test_patch_rest(request_type): + client = HealthChecksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'health_check': 'sample2'} + request_init["health_check_resource"] = {'check_interval_sec': 1884, 'creation_timestamp': 'creation_timestamp_value', 'description': 'description_value', 'grpc_health_check': {'grpc_service_name': 'grpc_service_name_value', 'port': 453, 'port_name': 'port_name_value', 'port_specification': 'port_specification_value'}, 'healthy_threshold': 1819, 'http2_health_check': {'host': 'host_value', 'port': 453, 'port_name': 'port_name_value', 'port_specification': 'port_specification_value', 'proxy_header': 'proxy_header_value', 'request_path': 'request_path_value', 'response': 'response_value'}, 'http_health_check': {'host': 'host_value', 'port': 453, 'port_name': 'port_name_value', 'port_specification': 'port_specification_value', 'proxy_header': 'proxy_header_value', 'request_path': 'request_path_value', 'response': 'response_value'}, 'https_health_check': {'host': 'host_value', 'port': 453, 'port_name': 'port_name_value', 'port_specification': 'port_specification_value', 'proxy_header': 'proxy_header_value', 'request_path': 'request_path_value', 'response': 'response_value'}, 'id': 205, 'kind': 'kind_value', 'log_config': {'enable': True}, 'name': 'name_value', 'region': 'region_value', 'self_link': 'self_link_value', 'ssl_health_check': {'port': 453, 'port_name': 'port_name_value', 'port_specification': 'port_specification_value', 'proxy_header': 'proxy_header_value', 'request': 'request_value', 'response': 'response_value'}, 'tcp_health_check': {'port': 453, 'port_name': 'port_name_value', 'port_specification': 'port_specification_value', 'proxy_header': 'proxy_header_value', 'request': 'request_value', 'response': 'response_value'}, 'timeout_sec': 1185, 'type_': 'type__value', 'unhealthy_threshold': 2046} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.patch(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, extended_operation.ExtendedOperation) + assert response.client_operation_id == 'client_operation_id_value' + assert response.creation_timestamp == 'creation_timestamp_value' + assert response.description == 'description_value' + assert response.end_time == 'end_time_value' + assert response.http_error_message == 'http_error_message_value' + assert response.http_error_status_code == 2374 + assert response.id == 205 + assert response.insert_time == 'insert_time_value' + assert response.kind == 'kind_value' + assert response.name == 'name_value' + assert response.operation_group_id == 'operation_group_id_value' + assert response.operation_type == 'operation_type_value' + assert response.progress == 885 + assert response.region == 'region_value' + assert response.self_link == 'self_link_value' + assert response.start_time == 'start_time_value' + assert response.status == compute.Operation.Status.DONE + assert response.status_message == 'status_message_value' + assert response.target_id == 947 + assert response.target_link == 'target_link_value' + assert response.user == 'user_value' + assert response.zone == 'zone_value' + + +def test_patch_rest_required_fields(request_type=compute.PatchHealthCheckRequest): + transport_class = transports.HealthChecksRestTransport + + request_init = {} + request_init["health_check"] = "" + request_init["project"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).patch._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["healthCheck"] = 'health_check_value' + jsonified_request["project"] = 'project_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).patch._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "healthCheck" in jsonified_request + assert jsonified_request["healthCheck"] == 'health_check_value' + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + + client = HealthChecksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "patch", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.patch(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_patch_rest_unset_required_fields(): + transport = transports.HealthChecksRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.patch._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("healthCheck", "healthCheckResource", "project", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_patch_rest_interceptors(null_interceptor): + transport = transports.HealthChecksRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.HealthChecksRestInterceptor(), + ) + client = HealthChecksClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.HealthChecksRestInterceptor, "post_patch") as post, \ + mock.patch.object(transports.HealthChecksRestInterceptor, "pre_patch") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.PatchHealthCheckRequest.pb(compute.PatchHealthCheckRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.PatchHealthCheckRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.patch(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_patch_rest_bad_request(transport: str = 'rest', request_type=compute.PatchHealthCheckRequest): + client = HealthChecksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'health_check': 'sample2'} + request_init["health_check_resource"] = {'check_interval_sec': 1884, 'creation_timestamp': 'creation_timestamp_value', 'description': 'description_value', 'grpc_health_check': {'grpc_service_name': 'grpc_service_name_value', 'port': 453, 'port_name': 'port_name_value', 'port_specification': 'port_specification_value'}, 'healthy_threshold': 1819, 'http2_health_check': {'host': 'host_value', 'port': 453, 'port_name': 'port_name_value', 'port_specification': 'port_specification_value', 'proxy_header': 'proxy_header_value', 'request_path': 'request_path_value', 'response': 'response_value'}, 'http_health_check': {'host': 'host_value', 'port': 453, 'port_name': 'port_name_value', 'port_specification': 'port_specification_value', 'proxy_header': 'proxy_header_value', 'request_path': 'request_path_value', 'response': 'response_value'}, 'https_health_check': {'host': 'host_value', 'port': 453, 'port_name': 'port_name_value', 'port_specification': 'port_specification_value', 'proxy_header': 'proxy_header_value', 'request_path': 'request_path_value', 'response': 'response_value'}, 'id': 205, 'kind': 'kind_value', 'log_config': {'enable': True}, 'name': 'name_value', 'region': 'region_value', 'self_link': 'self_link_value', 'ssl_health_check': {'port': 453, 'port_name': 'port_name_value', 'port_specification': 'port_specification_value', 'proxy_header': 'proxy_header_value', 'request': 'request_value', 'response': 'response_value'}, 'tcp_health_check': {'port': 453, 'port_name': 'port_name_value', 'port_specification': 'port_specification_value', 'proxy_header': 'proxy_header_value', 'request': 'request_value', 'response': 'response_value'}, 'timeout_sec': 1185, 'type_': 'type__value', 'unhealthy_threshold': 2046} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.patch(request) + + +def test_patch_rest_flattened(): + client = HealthChecksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'health_check': 'sample2'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + health_check='health_check_value', + health_check_resource=compute.HealthCheck(check_interval_sec=1884), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.patch(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/global/healthChecks/{health_check}" % client.transport._host, args[1]) + + +def test_patch_rest_flattened_error(transport: str = 'rest'): + client = HealthChecksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.patch( + compute.PatchHealthCheckRequest(), + project='project_value', + health_check='health_check_value', + health_check_resource=compute.HealthCheck(check_interval_sec=1884), + ) + + +def test_patch_rest_error(): + client = HealthChecksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.PatchHealthCheckRequest, + dict, +]) +def test_patch_unary_rest(request_type): + client = HealthChecksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'health_check': 'sample2'} + request_init["health_check_resource"] = {'check_interval_sec': 1884, 'creation_timestamp': 'creation_timestamp_value', 'description': 'description_value', 'grpc_health_check': {'grpc_service_name': 'grpc_service_name_value', 'port': 453, 'port_name': 'port_name_value', 'port_specification': 'port_specification_value'}, 'healthy_threshold': 1819, 'http2_health_check': {'host': 'host_value', 'port': 453, 'port_name': 'port_name_value', 'port_specification': 'port_specification_value', 'proxy_header': 'proxy_header_value', 'request_path': 'request_path_value', 'response': 'response_value'}, 'http_health_check': {'host': 'host_value', 'port': 453, 'port_name': 'port_name_value', 'port_specification': 'port_specification_value', 'proxy_header': 'proxy_header_value', 'request_path': 'request_path_value', 'response': 'response_value'}, 'https_health_check': {'host': 'host_value', 'port': 453, 'port_name': 'port_name_value', 'port_specification': 'port_specification_value', 'proxy_header': 'proxy_header_value', 'request_path': 'request_path_value', 'response': 'response_value'}, 'id': 205, 'kind': 'kind_value', 'log_config': {'enable': True}, 'name': 'name_value', 'region': 'region_value', 'self_link': 'self_link_value', 'ssl_health_check': {'port': 453, 'port_name': 'port_name_value', 'port_specification': 'port_specification_value', 'proxy_header': 'proxy_header_value', 'request': 'request_value', 'response': 'response_value'}, 'tcp_health_check': {'port': 453, 'port_name': 'port_name_value', 'port_specification': 'port_specification_value', 'proxy_header': 'proxy_header_value', 'request': 'request_value', 'response': 'response_value'}, 'timeout_sec': 1185, 'type_': 'type__value', 'unhealthy_threshold': 2046} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.patch_unary(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.Operation) + + +def test_patch_unary_rest_required_fields(request_type=compute.PatchHealthCheckRequest): + transport_class = transports.HealthChecksRestTransport + + request_init = {} + request_init["health_check"] = "" + request_init["project"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).patch._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["healthCheck"] = 'health_check_value' + jsonified_request["project"] = 'project_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).patch._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "healthCheck" in jsonified_request + assert jsonified_request["healthCheck"] == 'health_check_value' + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + + client = HealthChecksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "patch", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.patch_unary(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_patch_unary_rest_unset_required_fields(): + transport = transports.HealthChecksRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.patch._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("healthCheck", "healthCheckResource", "project", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_patch_unary_rest_interceptors(null_interceptor): + transport = transports.HealthChecksRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.HealthChecksRestInterceptor(), + ) + client = HealthChecksClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.HealthChecksRestInterceptor, "post_patch") as post, \ + mock.patch.object(transports.HealthChecksRestInterceptor, "pre_patch") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.PatchHealthCheckRequest.pb(compute.PatchHealthCheckRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.PatchHealthCheckRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.patch_unary(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_patch_unary_rest_bad_request(transport: str = 'rest', request_type=compute.PatchHealthCheckRequest): + client = HealthChecksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'health_check': 'sample2'} + request_init["health_check_resource"] = {'check_interval_sec': 1884, 'creation_timestamp': 'creation_timestamp_value', 'description': 'description_value', 'grpc_health_check': {'grpc_service_name': 'grpc_service_name_value', 'port': 453, 'port_name': 'port_name_value', 'port_specification': 'port_specification_value'}, 'healthy_threshold': 1819, 'http2_health_check': {'host': 'host_value', 'port': 453, 'port_name': 'port_name_value', 'port_specification': 'port_specification_value', 'proxy_header': 'proxy_header_value', 'request_path': 'request_path_value', 'response': 'response_value'}, 'http_health_check': {'host': 'host_value', 'port': 453, 'port_name': 'port_name_value', 'port_specification': 'port_specification_value', 'proxy_header': 'proxy_header_value', 'request_path': 'request_path_value', 'response': 'response_value'}, 'https_health_check': {'host': 'host_value', 'port': 453, 'port_name': 'port_name_value', 'port_specification': 'port_specification_value', 'proxy_header': 'proxy_header_value', 'request_path': 'request_path_value', 'response': 'response_value'}, 'id': 205, 'kind': 'kind_value', 'log_config': {'enable': True}, 'name': 'name_value', 'region': 'region_value', 'self_link': 'self_link_value', 'ssl_health_check': {'port': 453, 'port_name': 'port_name_value', 'port_specification': 'port_specification_value', 'proxy_header': 'proxy_header_value', 'request': 'request_value', 'response': 'response_value'}, 'tcp_health_check': {'port': 453, 'port_name': 'port_name_value', 'port_specification': 'port_specification_value', 'proxy_header': 'proxy_header_value', 'request': 'request_value', 'response': 'response_value'}, 'timeout_sec': 1185, 'type_': 'type__value', 'unhealthy_threshold': 2046} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.patch_unary(request) + + +def test_patch_unary_rest_flattened(): + client = HealthChecksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'health_check': 'sample2'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + health_check='health_check_value', + health_check_resource=compute.HealthCheck(check_interval_sec=1884), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.patch_unary(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/global/healthChecks/{health_check}" % client.transport._host, args[1]) + + +def test_patch_unary_rest_flattened_error(transport: str = 'rest'): + client = HealthChecksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.patch_unary( + compute.PatchHealthCheckRequest(), + project='project_value', + health_check='health_check_value', + health_check_resource=compute.HealthCheck(check_interval_sec=1884), + ) + + +def test_patch_unary_rest_error(): + client = HealthChecksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.UpdateHealthCheckRequest, + dict, +]) +def test_update_rest(request_type): + client = HealthChecksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'health_check': 'sample2'} + request_init["health_check_resource"] = {'check_interval_sec': 1884, 'creation_timestamp': 'creation_timestamp_value', 'description': 'description_value', 'grpc_health_check': {'grpc_service_name': 'grpc_service_name_value', 'port': 453, 'port_name': 'port_name_value', 'port_specification': 'port_specification_value'}, 'healthy_threshold': 1819, 'http2_health_check': {'host': 'host_value', 'port': 453, 'port_name': 'port_name_value', 'port_specification': 'port_specification_value', 'proxy_header': 'proxy_header_value', 'request_path': 'request_path_value', 'response': 'response_value'}, 'http_health_check': {'host': 'host_value', 'port': 453, 'port_name': 'port_name_value', 'port_specification': 'port_specification_value', 'proxy_header': 'proxy_header_value', 'request_path': 'request_path_value', 'response': 'response_value'}, 'https_health_check': {'host': 'host_value', 'port': 453, 'port_name': 'port_name_value', 'port_specification': 'port_specification_value', 'proxy_header': 'proxy_header_value', 'request_path': 'request_path_value', 'response': 'response_value'}, 'id': 205, 'kind': 'kind_value', 'log_config': {'enable': True}, 'name': 'name_value', 'region': 'region_value', 'self_link': 'self_link_value', 'ssl_health_check': {'port': 453, 'port_name': 'port_name_value', 'port_specification': 'port_specification_value', 'proxy_header': 'proxy_header_value', 'request': 'request_value', 'response': 'response_value'}, 'tcp_health_check': {'port': 453, 'port_name': 'port_name_value', 'port_specification': 'port_specification_value', 'proxy_header': 'proxy_header_value', 'request': 'request_value', 'response': 'response_value'}, 'timeout_sec': 1185, 'type_': 'type__value', 'unhealthy_threshold': 2046} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.update(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, extended_operation.ExtendedOperation) + assert response.client_operation_id == 'client_operation_id_value' + assert response.creation_timestamp == 'creation_timestamp_value' + assert response.description == 'description_value' + assert response.end_time == 'end_time_value' + assert response.http_error_message == 'http_error_message_value' + assert response.http_error_status_code == 2374 + assert response.id == 205 + assert response.insert_time == 'insert_time_value' + assert response.kind == 'kind_value' + assert response.name == 'name_value' + assert response.operation_group_id == 'operation_group_id_value' + assert response.operation_type == 'operation_type_value' + assert response.progress == 885 + assert response.region == 'region_value' + assert response.self_link == 'self_link_value' + assert response.start_time == 'start_time_value' + assert response.status == compute.Operation.Status.DONE + assert response.status_message == 'status_message_value' + assert response.target_id == 947 + assert response.target_link == 'target_link_value' + assert response.user == 'user_value' + assert response.zone == 'zone_value' + + +def test_update_rest_required_fields(request_type=compute.UpdateHealthCheckRequest): + transport_class = transports.HealthChecksRestTransport + + request_init = {} + request_init["health_check"] = "" + request_init["project"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).update._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["healthCheck"] = 'health_check_value' + jsonified_request["project"] = 'project_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).update._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "healthCheck" in jsonified_request + assert jsonified_request["healthCheck"] == 'health_check_value' + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + + client = HealthChecksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "put", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.update(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_update_rest_unset_required_fields(): + transport = transports.HealthChecksRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.update._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("healthCheck", "healthCheckResource", "project", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_update_rest_interceptors(null_interceptor): + transport = transports.HealthChecksRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.HealthChecksRestInterceptor(), + ) + client = HealthChecksClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.HealthChecksRestInterceptor, "post_update") as post, \ + mock.patch.object(transports.HealthChecksRestInterceptor, "pre_update") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.UpdateHealthCheckRequest.pb(compute.UpdateHealthCheckRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.UpdateHealthCheckRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.update(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_update_rest_bad_request(transport: str = 'rest', request_type=compute.UpdateHealthCheckRequest): + client = HealthChecksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'health_check': 'sample2'} + request_init["health_check_resource"] = {'check_interval_sec': 1884, 'creation_timestamp': 'creation_timestamp_value', 'description': 'description_value', 'grpc_health_check': {'grpc_service_name': 'grpc_service_name_value', 'port': 453, 'port_name': 'port_name_value', 'port_specification': 'port_specification_value'}, 'healthy_threshold': 1819, 'http2_health_check': {'host': 'host_value', 'port': 453, 'port_name': 'port_name_value', 'port_specification': 'port_specification_value', 'proxy_header': 'proxy_header_value', 'request_path': 'request_path_value', 'response': 'response_value'}, 'http_health_check': {'host': 'host_value', 'port': 453, 'port_name': 'port_name_value', 'port_specification': 'port_specification_value', 'proxy_header': 'proxy_header_value', 'request_path': 'request_path_value', 'response': 'response_value'}, 'https_health_check': {'host': 'host_value', 'port': 453, 'port_name': 'port_name_value', 'port_specification': 'port_specification_value', 'proxy_header': 'proxy_header_value', 'request_path': 'request_path_value', 'response': 'response_value'}, 'id': 205, 'kind': 'kind_value', 'log_config': {'enable': True}, 'name': 'name_value', 'region': 'region_value', 'self_link': 'self_link_value', 'ssl_health_check': {'port': 453, 'port_name': 'port_name_value', 'port_specification': 'port_specification_value', 'proxy_header': 'proxy_header_value', 'request': 'request_value', 'response': 'response_value'}, 'tcp_health_check': {'port': 453, 'port_name': 'port_name_value', 'port_specification': 'port_specification_value', 'proxy_header': 'proxy_header_value', 'request': 'request_value', 'response': 'response_value'}, 'timeout_sec': 1185, 'type_': 'type__value', 'unhealthy_threshold': 2046} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.update(request) + + +def test_update_rest_flattened(): + client = HealthChecksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'health_check': 'sample2'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + health_check='health_check_value', + health_check_resource=compute.HealthCheck(check_interval_sec=1884), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.update(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/global/healthChecks/{health_check}" % client.transport._host, args[1]) + + +def test_update_rest_flattened_error(transport: str = 'rest'): + client = HealthChecksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update( + compute.UpdateHealthCheckRequest(), + project='project_value', + health_check='health_check_value', + health_check_resource=compute.HealthCheck(check_interval_sec=1884), + ) + + +def test_update_rest_error(): + client = HealthChecksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.UpdateHealthCheckRequest, + dict, +]) +def test_update_unary_rest(request_type): + client = HealthChecksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'health_check': 'sample2'} + request_init["health_check_resource"] = {'check_interval_sec': 1884, 'creation_timestamp': 'creation_timestamp_value', 'description': 'description_value', 'grpc_health_check': {'grpc_service_name': 'grpc_service_name_value', 'port': 453, 'port_name': 'port_name_value', 'port_specification': 'port_specification_value'}, 'healthy_threshold': 1819, 'http2_health_check': {'host': 'host_value', 'port': 453, 'port_name': 'port_name_value', 'port_specification': 'port_specification_value', 'proxy_header': 'proxy_header_value', 'request_path': 'request_path_value', 'response': 'response_value'}, 'http_health_check': {'host': 'host_value', 'port': 453, 'port_name': 'port_name_value', 'port_specification': 'port_specification_value', 'proxy_header': 'proxy_header_value', 'request_path': 'request_path_value', 'response': 'response_value'}, 'https_health_check': {'host': 'host_value', 'port': 453, 'port_name': 'port_name_value', 'port_specification': 'port_specification_value', 'proxy_header': 'proxy_header_value', 'request_path': 'request_path_value', 'response': 'response_value'}, 'id': 205, 'kind': 'kind_value', 'log_config': {'enable': True}, 'name': 'name_value', 'region': 'region_value', 'self_link': 'self_link_value', 'ssl_health_check': {'port': 453, 'port_name': 'port_name_value', 'port_specification': 'port_specification_value', 'proxy_header': 'proxy_header_value', 'request': 'request_value', 'response': 'response_value'}, 'tcp_health_check': {'port': 453, 'port_name': 'port_name_value', 'port_specification': 'port_specification_value', 'proxy_header': 'proxy_header_value', 'request': 'request_value', 'response': 'response_value'}, 'timeout_sec': 1185, 'type_': 'type__value', 'unhealthy_threshold': 2046} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.update_unary(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.Operation) + + +def test_update_unary_rest_required_fields(request_type=compute.UpdateHealthCheckRequest): + transport_class = transports.HealthChecksRestTransport + + request_init = {} + request_init["health_check"] = "" + request_init["project"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).update._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["healthCheck"] = 'health_check_value' + jsonified_request["project"] = 'project_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).update._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "healthCheck" in jsonified_request + assert jsonified_request["healthCheck"] == 'health_check_value' + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + + client = HealthChecksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "put", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.update_unary(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_update_unary_rest_unset_required_fields(): + transport = transports.HealthChecksRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.update._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("healthCheck", "healthCheckResource", "project", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_update_unary_rest_interceptors(null_interceptor): + transport = transports.HealthChecksRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.HealthChecksRestInterceptor(), + ) + client = HealthChecksClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.HealthChecksRestInterceptor, "post_update") as post, \ + mock.patch.object(transports.HealthChecksRestInterceptor, "pre_update") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.UpdateHealthCheckRequest.pb(compute.UpdateHealthCheckRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.UpdateHealthCheckRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.update_unary(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_update_unary_rest_bad_request(transport: str = 'rest', request_type=compute.UpdateHealthCheckRequest): + client = HealthChecksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'health_check': 'sample2'} + request_init["health_check_resource"] = {'check_interval_sec': 1884, 'creation_timestamp': 'creation_timestamp_value', 'description': 'description_value', 'grpc_health_check': {'grpc_service_name': 'grpc_service_name_value', 'port': 453, 'port_name': 'port_name_value', 'port_specification': 'port_specification_value'}, 'healthy_threshold': 1819, 'http2_health_check': {'host': 'host_value', 'port': 453, 'port_name': 'port_name_value', 'port_specification': 'port_specification_value', 'proxy_header': 'proxy_header_value', 'request_path': 'request_path_value', 'response': 'response_value'}, 'http_health_check': {'host': 'host_value', 'port': 453, 'port_name': 'port_name_value', 'port_specification': 'port_specification_value', 'proxy_header': 'proxy_header_value', 'request_path': 'request_path_value', 'response': 'response_value'}, 'https_health_check': {'host': 'host_value', 'port': 453, 'port_name': 'port_name_value', 'port_specification': 'port_specification_value', 'proxy_header': 'proxy_header_value', 'request_path': 'request_path_value', 'response': 'response_value'}, 'id': 205, 'kind': 'kind_value', 'log_config': {'enable': True}, 'name': 'name_value', 'region': 'region_value', 'self_link': 'self_link_value', 'ssl_health_check': {'port': 453, 'port_name': 'port_name_value', 'port_specification': 'port_specification_value', 'proxy_header': 'proxy_header_value', 'request': 'request_value', 'response': 'response_value'}, 'tcp_health_check': {'port': 453, 'port_name': 'port_name_value', 'port_specification': 'port_specification_value', 'proxy_header': 'proxy_header_value', 'request': 'request_value', 'response': 'response_value'}, 'timeout_sec': 1185, 'type_': 'type__value', 'unhealthy_threshold': 2046} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.update_unary(request) + + +def test_update_unary_rest_flattened(): + client = HealthChecksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'health_check': 'sample2'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + health_check='health_check_value', + health_check_resource=compute.HealthCheck(check_interval_sec=1884), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.update_unary(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/global/healthChecks/{health_check}" % client.transport._host, args[1]) + + +def test_update_unary_rest_flattened_error(transport: str = 'rest'): + client = HealthChecksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_unary( + compute.UpdateHealthCheckRequest(), + project='project_value', + health_check='health_check_value', + health_check_resource=compute.HealthCheck(check_interval_sec=1884), + ) + + +def test_update_unary_rest_error(): + client = HealthChecksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +def test_credentials_transport_error(): + # It is an error to provide credentials and a transport instance. + transport = transports.HealthChecksRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = HealthChecksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # It is an error to provide a credentials file and a transport instance. + transport = transports.HealthChecksRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = HealthChecksClient( + client_options={"credentials_file": "credentials.json"}, + transport=transport, + ) + + # It is an error to provide an api_key and a transport instance. + transport = transports.HealthChecksRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = HealthChecksClient( + client_options=options, + transport=transport, + ) + + # It is an error to provide an api_key and a credential. + options = mock.Mock() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = HealthChecksClient( + client_options=options, + credentials=ga_credentials.AnonymousCredentials() + ) + + # It is an error to provide scopes and a transport instance. + transport = transports.HealthChecksRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = HealthChecksClient( + client_options={"scopes": ["1", "2"]}, + transport=transport, + ) + + +def test_transport_instance(): + # A client may be instantiated with a custom transport instance. + transport = transports.HealthChecksRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + client = HealthChecksClient(transport=transport) + assert client.transport is transport + + +@pytest.mark.parametrize("transport_class", [ + transports.HealthChecksRestTransport, +]) +def test_transport_adc(transport_class): + # Test default credentials are used if not provided. + with mock.patch.object(google.auth, 'default') as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class() + adc.assert_called_once() + +@pytest.mark.parametrize("transport_name", [ + "rest", +]) +def test_transport_kind(transport_name): + transport = HealthChecksClient.get_transport_class(transport_name)( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert transport.kind == transport_name + + +def test_health_checks_base_transport_error(): + # Passing both a credentials object and credentials_file should raise an error + with pytest.raises(core_exceptions.DuplicateCredentialArgs): + transport = transports.HealthChecksTransport( + credentials=ga_credentials.AnonymousCredentials(), + credentials_file="credentials.json" + ) + + +def test_health_checks_base_transport(): + # Instantiate the base transport. + with mock.patch('google.cloud.compute_v1.services.health_checks.transports.HealthChecksTransport.__init__') as Transport: + Transport.return_value = None + transport = transports.HealthChecksTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Every method on the transport should just blindly + # raise NotImplementedError. + methods = ( + 'aggregated_list', + 'delete', + 'get', + 'insert', + 'list', + 'patch', + 'update', + ) + for method in methods: + with pytest.raises(NotImplementedError): + getattr(transport, method)(request=object()) + + with pytest.raises(NotImplementedError): + transport.close() + + # Catch all for all remaining methods and properties + remainder = [ + 'kind', + ] + for r in remainder: + with pytest.raises(NotImplementedError): + getattr(transport, r)() + + +def test_health_checks_base_transport_with_credentials_file(): + # Instantiate the base transport with a credentials file + with mock.patch.object(google.auth, 'load_credentials_from_file', autospec=True) as load_creds, mock.patch('google.cloud.compute_v1.services.health_checks.transports.HealthChecksTransport._prep_wrapped_messages') as Transport: + Transport.return_value = None + load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.HealthChecksTransport( + credentials_file="credentials.json", + quota_project_id="octopus", + ) + load_creds.assert_called_once_with("credentials.json", + scopes=None, + default_scopes=( + 'https://www.googleapis.com/auth/compute', + 'https://www.googleapis.com/auth/cloud-platform', +), + quota_project_id="octopus", + ) + + +def test_health_checks_base_transport_with_adc(): + # Test the default credentials are used if credentials and credentials_file are None. + with mock.patch.object(google.auth, 'default', autospec=True) as adc, mock.patch('google.cloud.compute_v1.services.health_checks.transports.HealthChecksTransport._prep_wrapped_messages') as Transport: + Transport.return_value = None + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.HealthChecksTransport() + adc.assert_called_once() + + +def test_health_checks_auth_adc(): + # If no credentials are provided, we should use ADC credentials. + with mock.patch.object(google.auth, 'default', autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + HealthChecksClient() + adc.assert_called_once_with( + scopes=None, + default_scopes=( + 'https://www.googleapis.com/auth/compute', + 'https://www.googleapis.com/auth/cloud-platform', +), + quota_project_id=None, + ) + + +def test_health_checks_http_transport_client_cert_source_for_mtls(): + cred = ga_credentials.AnonymousCredentials() + with mock.patch("google.auth.transport.requests.AuthorizedSession.configure_mtls_channel") as mock_configure_mtls_channel: + transports.HealthChecksRestTransport ( + credentials=cred, + client_cert_source_for_mtls=client_cert_source_callback + ) + mock_configure_mtls_channel.assert_called_once_with(client_cert_source_callback) + + +@pytest.mark.parametrize("transport_name", [ + "rest", +]) +def test_health_checks_host_no_port(transport_name): + client = HealthChecksClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions(api_endpoint='compute.googleapis.com'), + transport=transport_name, + ) + assert client.transport._host == ( + 'compute.googleapis.com:443' + if transport_name in ['grpc', 'grpc_asyncio'] + else 'https://compute.googleapis.com' + ) + +@pytest.mark.parametrize("transport_name", [ + "rest", +]) +def test_health_checks_host_with_port(transport_name): + client = HealthChecksClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions(api_endpoint='compute.googleapis.com:8000'), + transport=transport_name, + ) + assert client.transport._host == ( + 'compute.googleapis.com:8000' + if transport_name in ['grpc', 'grpc_asyncio'] + else 'https://compute.googleapis.com:8000' + ) + +@pytest.mark.parametrize("transport_name", [ + "rest", +]) +def test_health_checks_client_transport_session_collision(transport_name): + creds1 = ga_credentials.AnonymousCredentials() + creds2 = ga_credentials.AnonymousCredentials() + client1 = HealthChecksClient( + credentials=creds1, + transport=transport_name, + ) + client2 = HealthChecksClient( + credentials=creds2, + transport=transport_name, + ) + session1 = client1.transport.aggregated_list._session + session2 = client2.transport.aggregated_list._session + assert session1 != session2 + session1 = client1.transport.delete._session + session2 = client2.transport.delete._session + assert session1 != session2 + session1 = client1.transport.get._session + session2 = client2.transport.get._session + assert session1 != session2 + session1 = client1.transport.insert._session + session2 = client2.transport.insert._session + assert session1 != session2 + session1 = client1.transport.list._session + session2 = client2.transport.list._session + assert session1 != session2 + session1 = client1.transport.patch._session + session2 = client2.transport.patch._session + assert session1 != session2 + session1 = client1.transport.update._session + session2 = client2.transport.update._session + assert session1 != session2 + +def test_common_billing_account_path(): + billing_account = "squid" + expected = "billingAccounts/{billing_account}".format(billing_account=billing_account, ) + actual = HealthChecksClient.common_billing_account_path(billing_account) + assert expected == actual + + +def test_parse_common_billing_account_path(): + expected = { + "billing_account": "clam", + } + path = HealthChecksClient.common_billing_account_path(**expected) + + # Check that the path construction is reversible. + actual = HealthChecksClient.parse_common_billing_account_path(path) + assert expected == actual + +def test_common_folder_path(): + folder = "whelk" + expected = "folders/{folder}".format(folder=folder, ) + actual = HealthChecksClient.common_folder_path(folder) + assert expected == actual + + +def test_parse_common_folder_path(): + expected = { + "folder": "octopus", + } + path = HealthChecksClient.common_folder_path(**expected) + + # Check that the path construction is reversible. + actual = HealthChecksClient.parse_common_folder_path(path) + assert expected == actual + +def test_common_organization_path(): + organization = "oyster" + expected = "organizations/{organization}".format(organization=organization, ) + actual = HealthChecksClient.common_organization_path(organization) + assert expected == actual + + +def test_parse_common_organization_path(): + expected = { + "organization": "nudibranch", + } + path = HealthChecksClient.common_organization_path(**expected) + + # Check that the path construction is reversible. + actual = HealthChecksClient.parse_common_organization_path(path) + assert expected == actual + +def test_common_project_path(): + project = "cuttlefish" + expected = "projects/{project}".format(project=project, ) + actual = HealthChecksClient.common_project_path(project) + assert expected == actual + + +def test_parse_common_project_path(): + expected = { + "project": "mussel", + } + path = HealthChecksClient.common_project_path(**expected) + + # Check that the path construction is reversible. + actual = HealthChecksClient.parse_common_project_path(path) + assert expected == actual + +def test_common_location_path(): + project = "winkle" + location = "nautilus" + expected = "projects/{project}/locations/{location}".format(project=project, location=location, ) + actual = HealthChecksClient.common_location_path(project, location) + assert expected == actual + + +def test_parse_common_location_path(): + expected = { + "project": "scallop", + "location": "abalone", + } + path = HealthChecksClient.common_location_path(**expected) + + # Check that the path construction is reversible. + actual = HealthChecksClient.parse_common_location_path(path) + assert expected == actual + + +def test_client_with_default_client_info(): + client_info = gapic_v1.client_info.ClientInfo() + + with mock.patch.object(transports.HealthChecksTransport, '_prep_wrapped_messages') as prep: + client = HealthChecksClient( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + with mock.patch.object(transports.HealthChecksTransport, '_prep_wrapped_messages') as prep: + transport_class = HealthChecksClient.get_transport_class() + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + +def test_transport_close(): + transports = { + "rest": "_session", + } + + for transport, close_name in transports.items(): + client = HealthChecksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport + ) + with mock.patch.object(type(getattr(client.transport, close_name)), "close") as close: + with client: + close.assert_not_called() + close.assert_called_once() + +def test_client_ctx(): + transports = [ + 'rest', + ] + for transport in transports: + client = HealthChecksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport + ) + # Test client calls underlying transport. + with mock.patch.object(type(client.transport), "close") as close: + close.assert_not_called() + with client: + pass + close.assert_called() + +@pytest.mark.parametrize("client_class,transport_class", [ + (HealthChecksClient, transports.HealthChecksRestTransport), +]) +def test_api_key_credentials(client_class, transport_class): + with mock.patch.object( + google.auth._default, "get_api_key_credentials", create=True + ) as get_api_key_credentials: + mock_cred = mock.Mock() + get_api_key_credentials.return_value = mock_cred + options = client_options.ClientOptions() + options.api_key = "api_key" + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=mock_cred, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) diff --git a/owl-bot-staging/v1/tests/unit/gapic/compute_v1/test_image_family_views.py b/owl-bot-staging/v1/tests/unit/gapic/compute_v1/test_image_family_views.py new file mode 100644 index 000000000..8cb611d41 --- /dev/null +++ b/owl-bot-staging/v1/tests/unit/gapic/compute_v1/test_image_family_views.py @@ -0,0 +1,1073 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import os +# try/except added for compatibility with python < 3.8 +try: + from unittest import mock + from unittest.mock import AsyncMock # pragma: NO COVER +except ImportError: # pragma: NO COVER + import mock + +import grpc +from grpc.experimental import aio +from collections.abc import Iterable +from google.protobuf import json_format +import json +import math +import pytest +from proto.marshal.rules.dates import DurationRule, TimestampRule +from proto.marshal.rules import wrappers +from requests import Response +from requests import Request, PreparedRequest +from requests.sessions import Session +from google.protobuf import json_format + +from google.api_core import client_options +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import grpc_helpers +from google.api_core import grpc_helpers_async +from google.api_core import path_template +from google.auth import credentials as ga_credentials +from google.auth.exceptions import MutualTLSChannelError +from google.cloud.compute_v1.services.image_family_views import ImageFamilyViewsClient +from google.cloud.compute_v1.services.image_family_views import transports +from google.cloud.compute_v1.types import compute +from google.oauth2 import service_account +import google.auth + + +def client_cert_source_callback(): + return b"cert bytes", b"key bytes" + + +# If default endpoint is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint(client): + return "foo.googleapis.com" if ("localhost" in client.DEFAULT_ENDPOINT) else client.DEFAULT_ENDPOINT + + +def test__get_default_mtls_endpoint(): + api_endpoint = "example.googleapis.com" + api_mtls_endpoint = "example.mtls.googleapis.com" + sandbox_endpoint = "example.sandbox.googleapis.com" + sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com" + non_googleapi = "api.example.com" + + assert ImageFamilyViewsClient._get_default_mtls_endpoint(None) is None + assert ImageFamilyViewsClient._get_default_mtls_endpoint(api_endpoint) == api_mtls_endpoint + assert ImageFamilyViewsClient._get_default_mtls_endpoint(api_mtls_endpoint) == api_mtls_endpoint + assert ImageFamilyViewsClient._get_default_mtls_endpoint(sandbox_endpoint) == sandbox_mtls_endpoint + assert ImageFamilyViewsClient._get_default_mtls_endpoint(sandbox_mtls_endpoint) == sandbox_mtls_endpoint + assert ImageFamilyViewsClient._get_default_mtls_endpoint(non_googleapi) == non_googleapi + + +@pytest.mark.parametrize("client_class,transport_name", [ + (ImageFamilyViewsClient, "rest"), +]) +def test_image_family_views_client_from_service_account_info(client_class, transport_name): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object(service_account.Credentials, 'from_service_account_info') as factory: + factory.return_value = creds + info = {"valid": True} + client = client_class.from_service_account_info(info, transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + 'compute.googleapis.com:443' + if transport_name in ['grpc', 'grpc_asyncio'] + else + 'https://compute.googleapis.com' + ) + + +@pytest.mark.parametrize("transport_class,transport_name", [ + (transports.ImageFamilyViewsRestTransport, "rest"), +]) +def test_image_family_views_client_service_account_always_use_jwt(transport_class, transport_name): + with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=True) + use_jwt.assert_called_once_with(True) + + with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=False) + use_jwt.assert_not_called() + + +@pytest.mark.parametrize("client_class,transport_name", [ + (ImageFamilyViewsClient, "rest"), +]) +def test_image_family_views_client_from_service_account_file(client_class, transport_name): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object(service_account.Credentials, 'from_service_account_file') as factory: + factory.return_value = creds + client = client_class.from_service_account_file("dummy/file/path.json", transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + client = client_class.from_service_account_json("dummy/file/path.json", transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + 'compute.googleapis.com:443' + if transport_name in ['grpc', 'grpc_asyncio'] + else + 'https://compute.googleapis.com' + ) + + +def test_image_family_views_client_get_transport_class(): + transport = ImageFamilyViewsClient.get_transport_class() + available_transports = [ + transports.ImageFamilyViewsRestTransport, + ] + assert transport in available_transports + + transport = ImageFamilyViewsClient.get_transport_class("rest") + assert transport == transports.ImageFamilyViewsRestTransport + + +@pytest.mark.parametrize("client_class,transport_class,transport_name", [ + (ImageFamilyViewsClient, transports.ImageFamilyViewsRestTransport, "rest"), +]) +@mock.patch.object(ImageFamilyViewsClient, "DEFAULT_ENDPOINT", modify_default_endpoint(ImageFamilyViewsClient)) +def test_image_family_views_client_client_options(client_class, transport_class, transport_name): + # Check that if channel is provided we won't create a new one. + with mock.patch.object(ImageFamilyViewsClient, 'get_transport_class') as gtc: + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ) + client = client_class(transport=transport) + gtc.assert_not_called() + + # Check that if channel is provided via str we will create a new one. + with mock.patch.object(ImageFamilyViewsClient, 'get_transport_class') as gtc: + client = client_class(transport=transport_name) + gtc.assert_called() + + # Check the case api_endpoint is provided. + options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(transport=transport_name, client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_MTLS_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError): + client = client_class(transport=transport_name) + + # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): + with pytest.raises(ValueError): + client = client_class(transport=transport_name) + + # Check the case quota_project_id is provided + options = client_options.ClientOptions(quota_project_id="octopus") + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id="octopus", + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + # Check the case api_endpoint is provided + options = client_options.ClientOptions(api_audience="https://language.googleapis.com") + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience="https://language.googleapis.com" + ) + +@pytest.mark.parametrize("client_class,transport_class,transport_name,use_client_cert_env", [ + (ImageFamilyViewsClient, transports.ImageFamilyViewsRestTransport, "rest", "true"), + (ImageFamilyViewsClient, transports.ImageFamilyViewsRestTransport, "rest", "false"), +]) +@mock.patch.object(ImageFamilyViewsClient, "DEFAULT_ENDPOINT", modify_default_endpoint(ImageFamilyViewsClient)) +@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) +def test_image_family_views_client_mtls_env_auto(client_class, transport_class, transport_name, use_client_cert_env): + # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default + # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists. + + # Check the case client_cert_source is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): + options = client_options.ClientOptions(client_cert_source=client_cert_source_callback) + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + + if use_client_cert_env == "false": + expected_client_cert_source = None + expected_host = client.DEFAULT_ENDPOINT + else: + expected_client_cert_source = client_cert_source_callback + expected_host = client.DEFAULT_MTLS_ENDPOINT + + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case ADC client cert is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): + with mock.patch.object(transport_class, '__init__') as patched: + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): + with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=client_cert_source_callback): + if use_client_cert_env == "false": + expected_host = client.DEFAULT_ENDPOINT + expected_client_cert_source = None + else: + expected_host = client.DEFAULT_MTLS_ENDPOINT + expected_client_cert_source = client_cert_source_callback + + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case client_cert_source and ADC client cert are not provided. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): + with mock.patch.object(transport_class, '__init__') as patched: + with mock.patch("google.auth.transport.mtls.has_default_client_cert_source", return_value=False): + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize("client_class", [ + ImageFamilyViewsClient +]) +@mock.patch.object(ImageFamilyViewsClient, "DEFAULT_ENDPOINT", modify_default_endpoint(ImageFamilyViewsClient)) +def test_image_family_views_client_get_mtls_endpoint_and_cert_source(client_class): + mock_client_cert_source = mock.Mock() + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "true". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + mock_api_endpoint = "foo" + options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(options) + assert api_endpoint == mock_api_endpoint + assert cert_source == mock_client_cert_source + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "false". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + mock_client_cert_source = mock.Mock() + mock_api_endpoint = "foo" + options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(options) + assert api_endpoint == mock_api_endpoint + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert doesn't exist. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=False): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert exists. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): + with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=mock_client_cert_source): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source == mock_client_cert_source + + +@pytest.mark.parametrize("client_class,transport_class,transport_name", [ + (ImageFamilyViewsClient, transports.ImageFamilyViewsRestTransport, "rest"), +]) +def test_image_family_views_client_client_options_scopes(client_class, transport_class, transport_name): + # Check the case scopes are provided. + options = client_options.ClientOptions( + scopes=["1", "2"], + ) + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=["1", "2"], + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + +@pytest.mark.parametrize("client_class,transport_class,transport_name,grpc_helpers", [ + (ImageFamilyViewsClient, transports.ImageFamilyViewsRestTransport, "rest", None), +]) +def test_image_family_views_client_client_options_credentials_file(client_class, transport_class, transport_name, grpc_helpers): + # Check the case credentials file is provided. + options = client_options.ClientOptions( + credentials_file="credentials.json" + ) + + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize("request_type", [ + compute.GetImageFamilyViewRequest, + dict, +]) +def test_get_rest(request_type): + client = ImageFamilyViewsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'zone': 'sample2', 'family': 'sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.ImageFamilyView( + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.ImageFamilyView.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.get(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.ImageFamilyView) + + +def test_get_rest_required_fields(request_type=compute.GetImageFamilyViewRequest): + transport_class = transports.ImageFamilyViewsRestTransport + + request_init = {} + request_init["family"] = "" + request_init["project"] = "" + request_init["zone"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["family"] = 'family_value' + jsonified_request["project"] = 'project_value' + jsonified_request["zone"] = 'zone_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "family" in jsonified_request + assert jsonified_request["family"] == 'family_value' + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "zone" in jsonified_request + assert jsonified_request["zone"] == 'zone_value' + + client = ImageFamilyViewsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.ImageFamilyView() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "get", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.ImageFamilyView.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.get(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_get_rest_unset_required_fields(): + transport = transports.ImageFamilyViewsRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.get._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("family", "project", "zone", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_rest_interceptors(null_interceptor): + transport = transports.ImageFamilyViewsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.ImageFamilyViewsRestInterceptor(), + ) + client = ImageFamilyViewsClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.ImageFamilyViewsRestInterceptor, "post_get") as post, \ + mock.patch.object(transports.ImageFamilyViewsRestInterceptor, "pre_get") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.GetImageFamilyViewRequest.pb(compute.GetImageFamilyViewRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.ImageFamilyView.to_json(compute.ImageFamilyView()) + + request = compute.GetImageFamilyViewRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.ImageFamilyView() + + client.get(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_get_rest_bad_request(transport: str = 'rest', request_type=compute.GetImageFamilyViewRequest): + client = ImageFamilyViewsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'zone': 'sample2', 'family': 'sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get(request) + + +def test_get_rest_flattened(): + client = ImageFamilyViewsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.ImageFamilyView() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'zone': 'sample2', 'family': 'sample3'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + zone='zone_value', + family='family_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.ImageFamilyView.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.get(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/zones/{zone}/imageFamilyViews/{family}" % client.transport._host, args[1]) + + +def test_get_rest_flattened_error(transport: str = 'rest'): + client = ImageFamilyViewsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get( + compute.GetImageFamilyViewRequest(), + project='project_value', + zone='zone_value', + family='family_value', + ) + + +def test_get_rest_error(): + client = ImageFamilyViewsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +def test_credentials_transport_error(): + # It is an error to provide credentials and a transport instance. + transport = transports.ImageFamilyViewsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = ImageFamilyViewsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # It is an error to provide a credentials file and a transport instance. + transport = transports.ImageFamilyViewsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = ImageFamilyViewsClient( + client_options={"credentials_file": "credentials.json"}, + transport=transport, + ) + + # It is an error to provide an api_key and a transport instance. + transport = transports.ImageFamilyViewsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = ImageFamilyViewsClient( + client_options=options, + transport=transport, + ) + + # It is an error to provide an api_key and a credential. + options = mock.Mock() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = ImageFamilyViewsClient( + client_options=options, + credentials=ga_credentials.AnonymousCredentials() + ) + + # It is an error to provide scopes and a transport instance. + transport = transports.ImageFamilyViewsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = ImageFamilyViewsClient( + client_options={"scopes": ["1", "2"]}, + transport=transport, + ) + + +def test_transport_instance(): + # A client may be instantiated with a custom transport instance. + transport = transports.ImageFamilyViewsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + client = ImageFamilyViewsClient(transport=transport) + assert client.transport is transport + + +@pytest.mark.parametrize("transport_class", [ + transports.ImageFamilyViewsRestTransport, +]) +def test_transport_adc(transport_class): + # Test default credentials are used if not provided. + with mock.patch.object(google.auth, 'default') as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class() + adc.assert_called_once() + +@pytest.mark.parametrize("transport_name", [ + "rest", +]) +def test_transport_kind(transport_name): + transport = ImageFamilyViewsClient.get_transport_class(transport_name)( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert transport.kind == transport_name + + +def test_image_family_views_base_transport_error(): + # Passing both a credentials object and credentials_file should raise an error + with pytest.raises(core_exceptions.DuplicateCredentialArgs): + transport = transports.ImageFamilyViewsTransport( + credentials=ga_credentials.AnonymousCredentials(), + credentials_file="credentials.json" + ) + + +def test_image_family_views_base_transport(): + # Instantiate the base transport. + with mock.patch('google.cloud.compute_v1.services.image_family_views.transports.ImageFamilyViewsTransport.__init__') as Transport: + Transport.return_value = None + transport = transports.ImageFamilyViewsTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Every method on the transport should just blindly + # raise NotImplementedError. + methods = ( + 'get', + ) + for method in methods: + with pytest.raises(NotImplementedError): + getattr(transport, method)(request=object()) + + with pytest.raises(NotImplementedError): + transport.close() + + # Catch all for all remaining methods and properties + remainder = [ + 'kind', + ] + for r in remainder: + with pytest.raises(NotImplementedError): + getattr(transport, r)() + + +def test_image_family_views_base_transport_with_credentials_file(): + # Instantiate the base transport with a credentials file + with mock.patch.object(google.auth, 'load_credentials_from_file', autospec=True) as load_creds, mock.patch('google.cloud.compute_v1.services.image_family_views.transports.ImageFamilyViewsTransport._prep_wrapped_messages') as Transport: + Transport.return_value = None + load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.ImageFamilyViewsTransport( + credentials_file="credentials.json", + quota_project_id="octopus", + ) + load_creds.assert_called_once_with("credentials.json", + scopes=None, + default_scopes=( + 'https://www.googleapis.com/auth/compute.readonly', + 'https://www.googleapis.com/auth/compute', + 'https://www.googleapis.com/auth/cloud-platform', +), + quota_project_id="octopus", + ) + + +def test_image_family_views_base_transport_with_adc(): + # Test the default credentials are used if credentials and credentials_file are None. + with mock.patch.object(google.auth, 'default', autospec=True) as adc, mock.patch('google.cloud.compute_v1.services.image_family_views.transports.ImageFamilyViewsTransport._prep_wrapped_messages') as Transport: + Transport.return_value = None + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.ImageFamilyViewsTransport() + adc.assert_called_once() + + +def test_image_family_views_auth_adc(): + # If no credentials are provided, we should use ADC credentials. + with mock.patch.object(google.auth, 'default', autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + ImageFamilyViewsClient() + adc.assert_called_once_with( + scopes=None, + default_scopes=( + 'https://www.googleapis.com/auth/compute.readonly', + 'https://www.googleapis.com/auth/compute', + 'https://www.googleapis.com/auth/cloud-platform', +), + quota_project_id=None, + ) + + +def test_image_family_views_http_transport_client_cert_source_for_mtls(): + cred = ga_credentials.AnonymousCredentials() + with mock.patch("google.auth.transport.requests.AuthorizedSession.configure_mtls_channel") as mock_configure_mtls_channel: + transports.ImageFamilyViewsRestTransport ( + credentials=cred, + client_cert_source_for_mtls=client_cert_source_callback + ) + mock_configure_mtls_channel.assert_called_once_with(client_cert_source_callback) + + +@pytest.mark.parametrize("transport_name", [ + "rest", +]) +def test_image_family_views_host_no_port(transport_name): + client = ImageFamilyViewsClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions(api_endpoint='compute.googleapis.com'), + transport=transport_name, + ) + assert client.transport._host == ( + 'compute.googleapis.com:443' + if transport_name in ['grpc', 'grpc_asyncio'] + else 'https://compute.googleapis.com' + ) + +@pytest.mark.parametrize("transport_name", [ + "rest", +]) +def test_image_family_views_host_with_port(transport_name): + client = ImageFamilyViewsClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions(api_endpoint='compute.googleapis.com:8000'), + transport=transport_name, + ) + assert client.transport._host == ( + 'compute.googleapis.com:8000' + if transport_name in ['grpc', 'grpc_asyncio'] + else 'https://compute.googleapis.com:8000' + ) + +@pytest.mark.parametrize("transport_name", [ + "rest", +]) +def test_image_family_views_client_transport_session_collision(transport_name): + creds1 = ga_credentials.AnonymousCredentials() + creds2 = ga_credentials.AnonymousCredentials() + client1 = ImageFamilyViewsClient( + credentials=creds1, + transport=transport_name, + ) + client2 = ImageFamilyViewsClient( + credentials=creds2, + transport=transport_name, + ) + session1 = client1.transport.get._session + session2 = client2.transport.get._session + assert session1 != session2 + +def test_common_billing_account_path(): + billing_account = "squid" + expected = "billingAccounts/{billing_account}".format(billing_account=billing_account, ) + actual = ImageFamilyViewsClient.common_billing_account_path(billing_account) + assert expected == actual + + +def test_parse_common_billing_account_path(): + expected = { + "billing_account": "clam", + } + path = ImageFamilyViewsClient.common_billing_account_path(**expected) + + # Check that the path construction is reversible. + actual = ImageFamilyViewsClient.parse_common_billing_account_path(path) + assert expected == actual + +def test_common_folder_path(): + folder = "whelk" + expected = "folders/{folder}".format(folder=folder, ) + actual = ImageFamilyViewsClient.common_folder_path(folder) + assert expected == actual + + +def test_parse_common_folder_path(): + expected = { + "folder": "octopus", + } + path = ImageFamilyViewsClient.common_folder_path(**expected) + + # Check that the path construction is reversible. + actual = ImageFamilyViewsClient.parse_common_folder_path(path) + assert expected == actual + +def test_common_organization_path(): + organization = "oyster" + expected = "organizations/{organization}".format(organization=organization, ) + actual = ImageFamilyViewsClient.common_organization_path(organization) + assert expected == actual + + +def test_parse_common_organization_path(): + expected = { + "organization": "nudibranch", + } + path = ImageFamilyViewsClient.common_organization_path(**expected) + + # Check that the path construction is reversible. + actual = ImageFamilyViewsClient.parse_common_organization_path(path) + assert expected == actual + +def test_common_project_path(): + project = "cuttlefish" + expected = "projects/{project}".format(project=project, ) + actual = ImageFamilyViewsClient.common_project_path(project) + assert expected == actual + + +def test_parse_common_project_path(): + expected = { + "project": "mussel", + } + path = ImageFamilyViewsClient.common_project_path(**expected) + + # Check that the path construction is reversible. + actual = ImageFamilyViewsClient.parse_common_project_path(path) + assert expected == actual + +def test_common_location_path(): + project = "winkle" + location = "nautilus" + expected = "projects/{project}/locations/{location}".format(project=project, location=location, ) + actual = ImageFamilyViewsClient.common_location_path(project, location) + assert expected == actual + + +def test_parse_common_location_path(): + expected = { + "project": "scallop", + "location": "abalone", + } + path = ImageFamilyViewsClient.common_location_path(**expected) + + # Check that the path construction is reversible. + actual = ImageFamilyViewsClient.parse_common_location_path(path) + assert expected == actual + + +def test_client_with_default_client_info(): + client_info = gapic_v1.client_info.ClientInfo() + + with mock.patch.object(transports.ImageFamilyViewsTransport, '_prep_wrapped_messages') as prep: + client = ImageFamilyViewsClient( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + with mock.patch.object(transports.ImageFamilyViewsTransport, '_prep_wrapped_messages') as prep: + transport_class = ImageFamilyViewsClient.get_transport_class() + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + +def test_transport_close(): + transports = { + "rest": "_session", + } + + for transport, close_name in transports.items(): + client = ImageFamilyViewsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport + ) + with mock.patch.object(type(getattr(client.transport, close_name)), "close") as close: + with client: + close.assert_not_called() + close.assert_called_once() + +def test_client_ctx(): + transports = [ + 'rest', + ] + for transport in transports: + client = ImageFamilyViewsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport + ) + # Test client calls underlying transport. + with mock.patch.object(type(client.transport), "close") as close: + close.assert_not_called() + with client: + pass + close.assert_called() + +@pytest.mark.parametrize("client_class,transport_class", [ + (ImageFamilyViewsClient, transports.ImageFamilyViewsRestTransport), +]) +def test_api_key_credentials(client_class, transport_class): + with mock.patch.object( + google.auth._default, "get_api_key_credentials", create=True + ) as get_api_key_credentials: + mock_cred = mock.Mock() + get_api_key_credentials.return_value = mock_cred + options = client_options.ClientOptions() + options.api_key = "api_key" + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=mock_cred, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) diff --git a/owl-bot-staging/v1/tests/unit/gapic/compute_v1/test_images.py b/owl-bot-staging/v1/tests/unit/gapic/compute_v1/test_images.py new file mode 100644 index 000000000..46e2e03f3 --- /dev/null +++ b/owl-bot-staging/v1/tests/unit/gapic/compute_v1/test_images.py @@ -0,0 +1,5199 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import os +# try/except added for compatibility with python < 3.8 +try: + from unittest import mock + from unittest.mock import AsyncMock # pragma: NO COVER +except ImportError: # pragma: NO COVER + import mock + +import grpc +from grpc.experimental import aio +from collections.abc import Iterable +from google.protobuf import json_format +import json +import math +import pytest +from proto.marshal.rules.dates import DurationRule, TimestampRule +from proto.marshal.rules import wrappers +from requests import Response +from requests import Request, PreparedRequest +from requests.sessions import Session +from google.protobuf import json_format + +from google.api_core import client_options +from google.api_core import exceptions as core_exceptions +from google.api_core import extended_operation # type: ignore +from google.api_core import future +from google.api_core import gapic_v1 +from google.api_core import grpc_helpers +from google.api_core import grpc_helpers_async +from google.api_core import path_template +from google.auth import credentials as ga_credentials +from google.auth.exceptions import MutualTLSChannelError +from google.cloud.compute_v1.services.images import ImagesClient +from google.cloud.compute_v1.services.images import pagers +from google.cloud.compute_v1.services.images import transports +from google.cloud.compute_v1.types import compute +from google.oauth2 import service_account +import google.auth + + +def client_cert_source_callback(): + return b"cert bytes", b"key bytes" + + +# If default endpoint is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint(client): + return "foo.googleapis.com" if ("localhost" in client.DEFAULT_ENDPOINT) else client.DEFAULT_ENDPOINT + + +def test__get_default_mtls_endpoint(): + api_endpoint = "example.googleapis.com" + api_mtls_endpoint = "example.mtls.googleapis.com" + sandbox_endpoint = "example.sandbox.googleapis.com" + sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com" + non_googleapi = "api.example.com" + + assert ImagesClient._get_default_mtls_endpoint(None) is None + assert ImagesClient._get_default_mtls_endpoint(api_endpoint) == api_mtls_endpoint + assert ImagesClient._get_default_mtls_endpoint(api_mtls_endpoint) == api_mtls_endpoint + assert ImagesClient._get_default_mtls_endpoint(sandbox_endpoint) == sandbox_mtls_endpoint + assert ImagesClient._get_default_mtls_endpoint(sandbox_mtls_endpoint) == sandbox_mtls_endpoint + assert ImagesClient._get_default_mtls_endpoint(non_googleapi) == non_googleapi + + +@pytest.mark.parametrize("client_class,transport_name", [ + (ImagesClient, "rest"), +]) +def test_images_client_from_service_account_info(client_class, transport_name): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object(service_account.Credentials, 'from_service_account_info') as factory: + factory.return_value = creds + info = {"valid": True} + client = client_class.from_service_account_info(info, transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + 'compute.googleapis.com:443' + if transport_name in ['grpc', 'grpc_asyncio'] + else + 'https://compute.googleapis.com' + ) + + +@pytest.mark.parametrize("transport_class,transport_name", [ + (transports.ImagesRestTransport, "rest"), +]) +def test_images_client_service_account_always_use_jwt(transport_class, transport_name): + with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=True) + use_jwt.assert_called_once_with(True) + + with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=False) + use_jwt.assert_not_called() + + +@pytest.mark.parametrize("client_class,transport_name", [ + (ImagesClient, "rest"), +]) +def test_images_client_from_service_account_file(client_class, transport_name): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object(service_account.Credentials, 'from_service_account_file') as factory: + factory.return_value = creds + client = client_class.from_service_account_file("dummy/file/path.json", transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + client = client_class.from_service_account_json("dummy/file/path.json", transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + 'compute.googleapis.com:443' + if transport_name in ['grpc', 'grpc_asyncio'] + else + 'https://compute.googleapis.com' + ) + + +def test_images_client_get_transport_class(): + transport = ImagesClient.get_transport_class() + available_transports = [ + transports.ImagesRestTransport, + ] + assert transport in available_transports + + transport = ImagesClient.get_transport_class("rest") + assert transport == transports.ImagesRestTransport + + +@pytest.mark.parametrize("client_class,transport_class,transport_name", [ + (ImagesClient, transports.ImagesRestTransport, "rest"), +]) +@mock.patch.object(ImagesClient, "DEFAULT_ENDPOINT", modify_default_endpoint(ImagesClient)) +def test_images_client_client_options(client_class, transport_class, transport_name): + # Check that if channel is provided we won't create a new one. + with mock.patch.object(ImagesClient, 'get_transport_class') as gtc: + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ) + client = client_class(transport=transport) + gtc.assert_not_called() + + # Check that if channel is provided via str we will create a new one. + with mock.patch.object(ImagesClient, 'get_transport_class') as gtc: + client = client_class(transport=transport_name) + gtc.assert_called() + + # Check the case api_endpoint is provided. + options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(transport=transport_name, client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_MTLS_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError): + client = client_class(transport=transport_name) + + # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): + with pytest.raises(ValueError): + client = client_class(transport=transport_name) + + # Check the case quota_project_id is provided + options = client_options.ClientOptions(quota_project_id="octopus") + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id="octopus", + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + # Check the case api_endpoint is provided + options = client_options.ClientOptions(api_audience="https://language.googleapis.com") + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience="https://language.googleapis.com" + ) + +@pytest.mark.parametrize("client_class,transport_class,transport_name,use_client_cert_env", [ + (ImagesClient, transports.ImagesRestTransport, "rest", "true"), + (ImagesClient, transports.ImagesRestTransport, "rest", "false"), +]) +@mock.patch.object(ImagesClient, "DEFAULT_ENDPOINT", modify_default_endpoint(ImagesClient)) +@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) +def test_images_client_mtls_env_auto(client_class, transport_class, transport_name, use_client_cert_env): + # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default + # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists. + + # Check the case client_cert_source is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): + options = client_options.ClientOptions(client_cert_source=client_cert_source_callback) + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + + if use_client_cert_env == "false": + expected_client_cert_source = None + expected_host = client.DEFAULT_ENDPOINT + else: + expected_client_cert_source = client_cert_source_callback + expected_host = client.DEFAULT_MTLS_ENDPOINT + + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case ADC client cert is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): + with mock.patch.object(transport_class, '__init__') as patched: + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): + with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=client_cert_source_callback): + if use_client_cert_env == "false": + expected_host = client.DEFAULT_ENDPOINT + expected_client_cert_source = None + else: + expected_host = client.DEFAULT_MTLS_ENDPOINT + expected_client_cert_source = client_cert_source_callback + + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case client_cert_source and ADC client cert are not provided. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): + with mock.patch.object(transport_class, '__init__') as patched: + with mock.patch("google.auth.transport.mtls.has_default_client_cert_source", return_value=False): + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize("client_class", [ + ImagesClient +]) +@mock.patch.object(ImagesClient, "DEFAULT_ENDPOINT", modify_default_endpoint(ImagesClient)) +def test_images_client_get_mtls_endpoint_and_cert_source(client_class): + mock_client_cert_source = mock.Mock() + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "true". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + mock_api_endpoint = "foo" + options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(options) + assert api_endpoint == mock_api_endpoint + assert cert_source == mock_client_cert_source + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "false". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + mock_client_cert_source = mock.Mock() + mock_api_endpoint = "foo" + options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(options) + assert api_endpoint == mock_api_endpoint + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert doesn't exist. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=False): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert exists. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): + with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=mock_client_cert_source): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source == mock_client_cert_source + + +@pytest.mark.parametrize("client_class,transport_class,transport_name", [ + (ImagesClient, transports.ImagesRestTransport, "rest"), +]) +def test_images_client_client_options_scopes(client_class, transport_class, transport_name): + # Check the case scopes are provided. + options = client_options.ClientOptions( + scopes=["1", "2"], + ) + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=["1", "2"], + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + +@pytest.mark.parametrize("client_class,transport_class,transport_name,grpc_helpers", [ + (ImagesClient, transports.ImagesRestTransport, "rest", None), +]) +def test_images_client_client_options_credentials_file(client_class, transport_class, transport_name, grpc_helpers): + # Check the case credentials file is provided. + options = client_options.ClientOptions( + credentials_file="credentials.json" + ) + + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize("request_type", [ + compute.DeleteImageRequest, + dict, +]) +def test_delete_rest(request_type): + client = ImagesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'image': 'sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.delete(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, extended_operation.ExtendedOperation) + assert response.client_operation_id == 'client_operation_id_value' + assert response.creation_timestamp == 'creation_timestamp_value' + assert response.description == 'description_value' + assert response.end_time == 'end_time_value' + assert response.http_error_message == 'http_error_message_value' + assert response.http_error_status_code == 2374 + assert response.id == 205 + assert response.insert_time == 'insert_time_value' + assert response.kind == 'kind_value' + assert response.name == 'name_value' + assert response.operation_group_id == 'operation_group_id_value' + assert response.operation_type == 'operation_type_value' + assert response.progress == 885 + assert response.region == 'region_value' + assert response.self_link == 'self_link_value' + assert response.start_time == 'start_time_value' + assert response.status == compute.Operation.Status.DONE + assert response.status_message == 'status_message_value' + assert response.target_id == 947 + assert response.target_link == 'target_link_value' + assert response.user == 'user_value' + assert response.zone == 'zone_value' + + +def test_delete_rest_required_fields(request_type=compute.DeleteImageRequest): + transport_class = transports.ImagesRestTransport + + request_init = {} + request_init["image"] = "" + request_init["project"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["image"] = 'image_value' + jsonified_request["project"] = 'project_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "image" in jsonified_request + assert jsonified_request["image"] == 'image_value' + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + + client = ImagesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "delete", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.delete(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_delete_rest_unset_required_fields(): + transport = transports.ImagesRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.delete._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("image", "project", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_rest_interceptors(null_interceptor): + transport = transports.ImagesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.ImagesRestInterceptor(), + ) + client = ImagesClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.ImagesRestInterceptor, "post_delete") as post, \ + mock.patch.object(transports.ImagesRestInterceptor, "pre_delete") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.DeleteImageRequest.pb(compute.DeleteImageRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.DeleteImageRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.delete(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_delete_rest_bad_request(transport: str = 'rest', request_type=compute.DeleteImageRequest): + client = ImagesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'image': 'sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.delete(request) + + +def test_delete_rest_flattened(): + client = ImagesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'image': 'sample2'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + image='image_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.delete(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/global/images/{image}" % client.transport._host, args[1]) + + +def test_delete_rest_flattened_error(transport: str = 'rest'): + client = ImagesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete( + compute.DeleteImageRequest(), + project='project_value', + image='image_value', + ) + + +def test_delete_rest_error(): + client = ImagesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.DeleteImageRequest, + dict, +]) +def test_delete_unary_rest(request_type): + client = ImagesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'image': 'sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.delete_unary(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.Operation) + + +def test_delete_unary_rest_required_fields(request_type=compute.DeleteImageRequest): + transport_class = transports.ImagesRestTransport + + request_init = {} + request_init["image"] = "" + request_init["project"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["image"] = 'image_value' + jsonified_request["project"] = 'project_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "image" in jsonified_request + assert jsonified_request["image"] == 'image_value' + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + + client = ImagesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "delete", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.delete_unary(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_delete_unary_rest_unset_required_fields(): + transport = transports.ImagesRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.delete._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("image", "project", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_unary_rest_interceptors(null_interceptor): + transport = transports.ImagesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.ImagesRestInterceptor(), + ) + client = ImagesClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.ImagesRestInterceptor, "post_delete") as post, \ + mock.patch.object(transports.ImagesRestInterceptor, "pre_delete") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.DeleteImageRequest.pb(compute.DeleteImageRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.DeleteImageRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.delete_unary(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_delete_unary_rest_bad_request(transport: str = 'rest', request_type=compute.DeleteImageRequest): + client = ImagesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'image': 'sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.delete_unary(request) + + +def test_delete_unary_rest_flattened(): + client = ImagesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'image': 'sample2'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + image='image_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.delete_unary(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/global/images/{image}" % client.transport._host, args[1]) + + +def test_delete_unary_rest_flattened_error(transport: str = 'rest'): + client = ImagesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_unary( + compute.DeleteImageRequest(), + project='project_value', + image='image_value', + ) + + +def test_delete_unary_rest_error(): + client = ImagesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.DeprecateImageRequest, + dict, +]) +def test_deprecate_rest(request_type): + client = ImagesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'image': 'sample2'} + request_init["deprecation_status_resource"] = {'deleted': 'deleted_value', 'deprecated': 'deprecated_value', 'obsolete': 'obsolete_value', 'replacement': 'replacement_value', 'state': 'state_value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.deprecate(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, extended_operation.ExtendedOperation) + assert response.client_operation_id == 'client_operation_id_value' + assert response.creation_timestamp == 'creation_timestamp_value' + assert response.description == 'description_value' + assert response.end_time == 'end_time_value' + assert response.http_error_message == 'http_error_message_value' + assert response.http_error_status_code == 2374 + assert response.id == 205 + assert response.insert_time == 'insert_time_value' + assert response.kind == 'kind_value' + assert response.name == 'name_value' + assert response.operation_group_id == 'operation_group_id_value' + assert response.operation_type == 'operation_type_value' + assert response.progress == 885 + assert response.region == 'region_value' + assert response.self_link == 'self_link_value' + assert response.start_time == 'start_time_value' + assert response.status == compute.Operation.Status.DONE + assert response.status_message == 'status_message_value' + assert response.target_id == 947 + assert response.target_link == 'target_link_value' + assert response.user == 'user_value' + assert response.zone == 'zone_value' + + +def test_deprecate_rest_required_fields(request_type=compute.DeprecateImageRequest): + transport_class = transports.ImagesRestTransport + + request_init = {} + request_init["image"] = "" + request_init["project"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).deprecate._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["image"] = 'image_value' + jsonified_request["project"] = 'project_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).deprecate._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "image" in jsonified_request + assert jsonified_request["image"] == 'image_value' + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + + client = ImagesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.deprecate(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_deprecate_rest_unset_required_fields(): + transport = transports.ImagesRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.deprecate._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("deprecationStatusResource", "image", "project", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_deprecate_rest_interceptors(null_interceptor): + transport = transports.ImagesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.ImagesRestInterceptor(), + ) + client = ImagesClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.ImagesRestInterceptor, "post_deprecate") as post, \ + mock.patch.object(transports.ImagesRestInterceptor, "pre_deprecate") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.DeprecateImageRequest.pb(compute.DeprecateImageRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.DeprecateImageRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.deprecate(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_deprecate_rest_bad_request(transport: str = 'rest', request_type=compute.DeprecateImageRequest): + client = ImagesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'image': 'sample2'} + request_init["deprecation_status_resource"] = {'deleted': 'deleted_value', 'deprecated': 'deprecated_value', 'obsolete': 'obsolete_value', 'replacement': 'replacement_value', 'state': 'state_value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.deprecate(request) + + +def test_deprecate_rest_flattened(): + client = ImagesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'image': 'sample2'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + image='image_value', + deprecation_status_resource=compute.DeprecationStatus(deleted='deleted_value'), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.deprecate(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/global/images/{image}/deprecate" % client.transport._host, args[1]) + + +def test_deprecate_rest_flattened_error(transport: str = 'rest'): + client = ImagesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.deprecate( + compute.DeprecateImageRequest(), + project='project_value', + image='image_value', + deprecation_status_resource=compute.DeprecationStatus(deleted='deleted_value'), + ) + + +def test_deprecate_rest_error(): + client = ImagesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.DeprecateImageRequest, + dict, +]) +def test_deprecate_unary_rest(request_type): + client = ImagesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'image': 'sample2'} + request_init["deprecation_status_resource"] = {'deleted': 'deleted_value', 'deprecated': 'deprecated_value', 'obsolete': 'obsolete_value', 'replacement': 'replacement_value', 'state': 'state_value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.deprecate_unary(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.Operation) + + +def test_deprecate_unary_rest_required_fields(request_type=compute.DeprecateImageRequest): + transport_class = transports.ImagesRestTransport + + request_init = {} + request_init["image"] = "" + request_init["project"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).deprecate._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["image"] = 'image_value' + jsonified_request["project"] = 'project_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).deprecate._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "image" in jsonified_request + assert jsonified_request["image"] == 'image_value' + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + + client = ImagesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.deprecate_unary(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_deprecate_unary_rest_unset_required_fields(): + transport = transports.ImagesRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.deprecate._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("deprecationStatusResource", "image", "project", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_deprecate_unary_rest_interceptors(null_interceptor): + transport = transports.ImagesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.ImagesRestInterceptor(), + ) + client = ImagesClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.ImagesRestInterceptor, "post_deprecate") as post, \ + mock.patch.object(transports.ImagesRestInterceptor, "pre_deprecate") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.DeprecateImageRequest.pb(compute.DeprecateImageRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.DeprecateImageRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.deprecate_unary(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_deprecate_unary_rest_bad_request(transport: str = 'rest', request_type=compute.DeprecateImageRequest): + client = ImagesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'image': 'sample2'} + request_init["deprecation_status_resource"] = {'deleted': 'deleted_value', 'deprecated': 'deprecated_value', 'obsolete': 'obsolete_value', 'replacement': 'replacement_value', 'state': 'state_value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.deprecate_unary(request) + + +def test_deprecate_unary_rest_flattened(): + client = ImagesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'image': 'sample2'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + image='image_value', + deprecation_status_resource=compute.DeprecationStatus(deleted='deleted_value'), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.deprecate_unary(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/global/images/{image}/deprecate" % client.transport._host, args[1]) + + +def test_deprecate_unary_rest_flattened_error(transport: str = 'rest'): + client = ImagesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.deprecate_unary( + compute.DeprecateImageRequest(), + project='project_value', + image='image_value', + deprecation_status_resource=compute.DeprecationStatus(deleted='deleted_value'), + ) + + +def test_deprecate_unary_rest_error(): + client = ImagesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.GetImageRequest, + dict, +]) +def test_get_rest(request_type): + client = ImagesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'image': 'sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Image( + architecture='architecture_value', + archive_size_bytes=1922, + creation_timestamp='creation_timestamp_value', + description='description_value', + disk_size_gb=1261, + family='family_value', + id=205, + kind='kind_value', + label_fingerprint='label_fingerprint_value', + license_codes=[1360], + licenses=['licenses_value'], + name='name_value', + satisfies_pzs=True, + self_link='self_link_value', + source_disk='source_disk_value', + source_disk_id='source_disk_id_value', + source_image='source_image_value', + source_image_id='source_image_id_value', + source_snapshot='source_snapshot_value', + source_snapshot_id='source_snapshot_id_value', + source_type='source_type_value', + status='status_value', + storage_locations=['storage_locations_value'], + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Image.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.get(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.Image) + assert response.architecture == 'architecture_value' + assert response.archive_size_bytes == 1922 + assert response.creation_timestamp == 'creation_timestamp_value' + assert response.description == 'description_value' + assert response.disk_size_gb == 1261 + assert response.family == 'family_value' + assert response.id == 205 + assert response.kind == 'kind_value' + assert response.label_fingerprint == 'label_fingerprint_value' + assert response.license_codes == [1360] + assert response.licenses == ['licenses_value'] + assert response.name == 'name_value' + assert response.satisfies_pzs is True + assert response.self_link == 'self_link_value' + assert response.source_disk == 'source_disk_value' + assert response.source_disk_id == 'source_disk_id_value' + assert response.source_image == 'source_image_value' + assert response.source_image_id == 'source_image_id_value' + assert response.source_snapshot == 'source_snapshot_value' + assert response.source_snapshot_id == 'source_snapshot_id_value' + assert response.source_type == 'source_type_value' + assert response.status == 'status_value' + assert response.storage_locations == ['storage_locations_value'] + + +def test_get_rest_required_fields(request_type=compute.GetImageRequest): + transport_class = transports.ImagesRestTransport + + request_init = {} + request_init["image"] = "" + request_init["project"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["image"] = 'image_value' + jsonified_request["project"] = 'project_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "image" in jsonified_request + assert jsonified_request["image"] == 'image_value' + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + + client = ImagesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Image() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "get", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Image.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.get(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_get_rest_unset_required_fields(): + transport = transports.ImagesRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.get._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("image", "project", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_rest_interceptors(null_interceptor): + transport = transports.ImagesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.ImagesRestInterceptor(), + ) + client = ImagesClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.ImagesRestInterceptor, "post_get") as post, \ + mock.patch.object(transports.ImagesRestInterceptor, "pre_get") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.GetImageRequest.pb(compute.GetImageRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Image.to_json(compute.Image()) + + request = compute.GetImageRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Image() + + client.get(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_get_rest_bad_request(transport: str = 'rest', request_type=compute.GetImageRequest): + client = ImagesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'image': 'sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get(request) + + +def test_get_rest_flattened(): + client = ImagesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Image() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'image': 'sample2'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + image='image_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Image.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.get(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/global/images/{image}" % client.transport._host, args[1]) + + +def test_get_rest_flattened_error(transport: str = 'rest'): + client = ImagesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get( + compute.GetImageRequest(), + project='project_value', + image='image_value', + ) + + +def test_get_rest_error(): + client = ImagesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.GetFromFamilyImageRequest, + dict, +]) +def test_get_from_family_rest(request_type): + client = ImagesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'family': 'sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Image( + architecture='architecture_value', + archive_size_bytes=1922, + creation_timestamp='creation_timestamp_value', + description='description_value', + disk_size_gb=1261, + family='family_value', + id=205, + kind='kind_value', + label_fingerprint='label_fingerprint_value', + license_codes=[1360], + licenses=['licenses_value'], + name='name_value', + satisfies_pzs=True, + self_link='self_link_value', + source_disk='source_disk_value', + source_disk_id='source_disk_id_value', + source_image='source_image_value', + source_image_id='source_image_id_value', + source_snapshot='source_snapshot_value', + source_snapshot_id='source_snapshot_id_value', + source_type='source_type_value', + status='status_value', + storage_locations=['storage_locations_value'], + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Image.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.get_from_family(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.Image) + assert response.architecture == 'architecture_value' + assert response.archive_size_bytes == 1922 + assert response.creation_timestamp == 'creation_timestamp_value' + assert response.description == 'description_value' + assert response.disk_size_gb == 1261 + assert response.family == 'family_value' + assert response.id == 205 + assert response.kind == 'kind_value' + assert response.label_fingerprint == 'label_fingerprint_value' + assert response.license_codes == [1360] + assert response.licenses == ['licenses_value'] + assert response.name == 'name_value' + assert response.satisfies_pzs is True + assert response.self_link == 'self_link_value' + assert response.source_disk == 'source_disk_value' + assert response.source_disk_id == 'source_disk_id_value' + assert response.source_image == 'source_image_value' + assert response.source_image_id == 'source_image_id_value' + assert response.source_snapshot == 'source_snapshot_value' + assert response.source_snapshot_id == 'source_snapshot_id_value' + assert response.source_type == 'source_type_value' + assert response.status == 'status_value' + assert response.storage_locations == ['storage_locations_value'] + + +def test_get_from_family_rest_required_fields(request_type=compute.GetFromFamilyImageRequest): + transport_class = transports.ImagesRestTransport + + request_init = {} + request_init["family"] = "" + request_init["project"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_from_family._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["family"] = 'family_value' + jsonified_request["project"] = 'project_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_from_family._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "family" in jsonified_request + assert jsonified_request["family"] == 'family_value' + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + + client = ImagesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Image() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "get", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Image.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.get_from_family(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_get_from_family_rest_unset_required_fields(): + transport = transports.ImagesRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.get_from_family._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("family", "project", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_from_family_rest_interceptors(null_interceptor): + transport = transports.ImagesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.ImagesRestInterceptor(), + ) + client = ImagesClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.ImagesRestInterceptor, "post_get_from_family") as post, \ + mock.patch.object(transports.ImagesRestInterceptor, "pre_get_from_family") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.GetFromFamilyImageRequest.pb(compute.GetFromFamilyImageRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Image.to_json(compute.Image()) + + request = compute.GetFromFamilyImageRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Image() + + client.get_from_family(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_get_from_family_rest_bad_request(transport: str = 'rest', request_type=compute.GetFromFamilyImageRequest): + client = ImagesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'family': 'sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_from_family(request) + + +def test_get_from_family_rest_flattened(): + client = ImagesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Image() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'family': 'sample2'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + family='family_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Image.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.get_from_family(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/global/images/family/{family}" % client.transport._host, args[1]) + + +def test_get_from_family_rest_flattened_error(transport: str = 'rest'): + client = ImagesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_from_family( + compute.GetFromFamilyImageRequest(), + project='project_value', + family='family_value', + ) + + +def test_get_from_family_rest_error(): + client = ImagesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.GetIamPolicyImageRequest, + dict, +]) +def test_get_iam_policy_rest(request_type): + client = ImagesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'resource': 'sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Policy( + etag='etag_value', + iam_owned=True, + version=774, + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Policy.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.get_iam_policy(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.Policy) + assert response.etag == 'etag_value' + assert response.iam_owned is True + assert response.version == 774 + + +def test_get_iam_policy_rest_required_fields(request_type=compute.GetIamPolicyImageRequest): + transport_class = transports.ImagesRestTransport + + request_init = {} + request_init["project"] = "" + request_init["resource"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_iam_policy._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + jsonified_request["resource"] = 'resource_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_iam_policy._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("options_requested_policy_version", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "resource" in jsonified_request + assert jsonified_request["resource"] == 'resource_value' + + client = ImagesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Policy() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "get", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Policy.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.get_iam_policy(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_get_iam_policy_rest_unset_required_fields(): + transport = transports.ImagesRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.get_iam_policy._get_unset_required_fields({}) + assert set(unset_fields) == (set(("optionsRequestedPolicyVersion", )) & set(("project", "resource", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_iam_policy_rest_interceptors(null_interceptor): + transport = transports.ImagesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.ImagesRestInterceptor(), + ) + client = ImagesClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.ImagesRestInterceptor, "post_get_iam_policy") as post, \ + mock.patch.object(transports.ImagesRestInterceptor, "pre_get_iam_policy") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.GetIamPolicyImageRequest.pb(compute.GetIamPolicyImageRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Policy.to_json(compute.Policy()) + + request = compute.GetIamPolicyImageRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Policy() + + client.get_iam_policy(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_get_iam_policy_rest_bad_request(transport: str = 'rest', request_type=compute.GetIamPolicyImageRequest): + client = ImagesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'resource': 'sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_iam_policy(request) + + +def test_get_iam_policy_rest_flattened(): + client = ImagesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Policy() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'resource': 'sample2'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + resource='resource_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Policy.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.get_iam_policy(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/global/images/{resource}/getIamPolicy" % client.transport._host, args[1]) + + +def test_get_iam_policy_rest_flattened_error(transport: str = 'rest'): + client = ImagesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_iam_policy( + compute.GetIamPolicyImageRequest(), + project='project_value', + resource='resource_value', + ) + + +def test_get_iam_policy_rest_error(): + client = ImagesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.InsertImageRequest, + dict, +]) +def test_insert_rest(request_type): + client = ImagesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1'} + request_init["image_resource"] = {'architecture': 'architecture_value', 'archive_size_bytes': 1922, 'creation_timestamp': 'creation_timestamp_value', 'deprecated': {'deleted': 'deleted_value', 'deprecated': 'deprecated_value', 'obsolete': 'obsolete_value', 'replacement': 'replacement_value', 'state': 'state_value'}, 'description': 'description_value', 'disk_size_gb': 1261, 'family': 'family_value', 'guest_os_features': [{'type_': 'type__value'}], 'id': 205, 'image_encryption_key': {'kms_key_name': 'kms_key_name_value', 'kms_key_service_account': 'kms_key_service_account_value', 'raw_key': 'raw_key_value', 'rsa_encrypted_key': 'rsa_encrypted_key_value', 'sha256': 'sha256_value'}, 'kind': 'kind_value', 'label_fingerprint': 'label_fingerprint_value', 'labels': {}, 'license_codes': [1361, 1362], 'licenses': ['licenses_value1', 'licenses_value2'], 'name': 'name_value', 'raw_disk': {'container_type': 'container_type_value', 'sha1_checksum': 'sha1_checksum_value', 'source': 'source_value'}, 'satisfies_pzs': True, 'self_link': 'self_link_value', 'shielded_instance_initial_state': {'dbs': [{'content': 'content_value', 'file_type': 'file_type_value'}], 'dbxs': {}, 'keks': {}, 'pk': {}}, 'source_disk': 'source_disk_value', 'source_disk_encryption_key': {}, 'source_disk_id': 'source_disk_id_value', 'source_image': 'source_image_value', 'source_image_encryption_key': {}, 'source_image_id': 'source_image_id_value', 'source_snapshot': 'source_snapshot_value', 'source_snapshot_encryption_key': {}, 'source_snapshot_id': 'source_snapshot_id_value', 'source_type': 'source_type_value', 'status': 'status_value', 'storage_locations': ['storage_locations_value1', 'storage_locations_value2']} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.insert(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, extended_operation.ExtendedOperation) + assert response.client_operation_id == 'client_operation_id_value' + assert response.creation_timestamp == 'creation_timestamp_value' + assert response.description == 'description_value' + assert response.end_time == 'end_time_value' + assert response.http_error_message == 'http_error_message_value' + assert response.http_error_status_code == 2374 + assert response.id == 205 + assert response.insert_time == 'insert_time_value' + assert response.kind == 'kind_value' + assert response.name == 'name_value' + assert response.operation_group_id == 'operation_group_id_value' + assert response.operation_type == 'operation_type_value' + assert response.progress == 885 + assert response.region == 'region_value' + assert response.self_link == 'self_link_value' + assert response.start_time == 'start_time_value' + assert response.status == compute.Operation.Status.DONE + assert response.status_message == 'status_message_value' + assert response.target_id == 947 + assert response.target_link == 'target_link_value' + assert response.user == 'user_value' + assert response.zone == 'zone_value' + + +def test_insert_rest_required_fields(request_type=compute.InsertImageRequest): + transport_class = transports.ImagesRestTransport + + request_init = {} + request_init["project"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).insert._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).insert._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("force_create", "request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + + client = ImagesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.insert(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_insert_rest_unset_required_fields(): + transport = transports.ImagesRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.insert._get_unset_required_fields({}) + assert set(unset_fields) == (set(("forceCreate", "requestId", )) & set(("imageResource", "project", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_insert_rest_interceptors(null_interceptor): + transport = transports.ImagesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.ImagesRestInterceptor(), + ) + client = ImagesClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.ImagesRestInterceptor, "post_insert") as post, \ + mock.patch.object(transports.ImagesRestInterceptor, "pre_insert") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.InsertImageRequest.pb(compute.InsertImageRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.InsertImageRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.insert(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_insert_rest_bad_request(transport: str = 'rest', request_type=compute.InsertImageRequest): + client = ImagesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1'} + request_init["image_resource"] = {'architecture': 'architecture_value', 'archive_size_bytes': 1922, 'creation_timestamp': 'creation_timestamp_value', 'deprecated': {'deleted': 'deleted_value', 'deprecated': 'deprecated_value', 'obsolete': 'obsolete_value', 'replacement': 'replacement_value', 'state': 'state_value'}, 'description': 'description_value', 'disk_size_gb': 1261, 'family': 'family_value', 'guest_os_features': [{'type_': 'type__value'}], 'id': 205, 'image_encryption_key': {'kms_key_name': 'kms_key_name_value', 'kms_key_service_account': 'kms_key_service_account_value', 'raw_key': 'raw_key_value', 'rsa_encrypted_key': 'rsa_encrypted_key_value', 'sha256': 'sha256_value'}, 'kind': 'kind_value', 'label_fingerprint': 'label_fingerprint_value', 'labels': {}, 'license_codes': [1361, 1362], 'licenses': ['licenses_value1', 'licenses_value2'], 'name': 'name_value', 'raw_disk': {'container_type': 'container_type_value', 'sha1_checksum': 'sha1_checksum_value', 'source': 'source_value'}, 'satisfies_pzs': True, 'self_link': 'self_link_value', 'shielded_instance_initial_state': {'dbs': [{'content': 'content_value', 'file_type': 'file_type_value'}], 'dbxs': {}, 'keks': {}, 'pk': {}}, 'source_disk': 'source_disk_value', 'source_disk_encryption_key': {}, 'source_disk_id': 'source_disk_id_value', 'source_image': 'source_image_value', 'source_image_encryption_key': {}, 'source_image_id': 'source_image_id_value', 'source_snapshot': 'source_snapshot_value', 'source_snapshot_encryption_key': {}, 'source_snapshot_id': 'source_snapshot_id_value', 'source_type': 'source_type_value', 'status': 'status_value', 'storage_locations': ['storage_locations_value1', 'storage_locations_value2']} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.insert(request) + + +def test_insert_rest_flattened(): + client = ImagesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + image_resource=compute.Image(architecture='architecture_value'), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.insert(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/global/images" % client.transport._host, args[1]) + + +def test_insert_rest_flattened_error(transport: str = 'rest'): + client = ImagesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.insert( + compute.InsertImageRequest(), + project='project_value', + image_resource=compute.Image(architecture='architecture_value'), + ) + + +def test_insert_rest_error(): + client = ImagesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.InsertImageRequest, + dict, +]) +def test_insert_unary_rest(request_type): + client = ImagesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1'} + request_init["image_resource"] = {'architecture': 'architecture_value', 'archive_size_bytes': 1922, 'creation_timestamp': 'creation_timestamp_value', 'deprecated': {'deleted': 'deleted_value', 'deprecated': 'deprecated_value', 'obsolete': 'obsolete_value', 'replacement': 'replacement_value', 'state': 'state_value'}, 'description': 'description_value', 'disk_size_gb': 1261, 'family': 'family_value', 'guest_os_features': [{'type_': 'type__value'}], 'id': 205, 'image_encryption_key': {'kms_key_name': 'kms_key_name_value', 'kms_key_service_account': 'kms_key_service_account_value', 'raw_key': 'raw_key_value', 'rsa_encrypted_key': 'rsa_encrypted_key_value', 'sha256': 'sha256_value'}, 'kind': 'kind_value', 'label_fingerprint': 'label_fingerprint_value', 'labels': {}, 'license_codes': [1361, 1362], 'licenses': ['licenses_value1', 'licenses_value2'], 'name': 'name_value', 'raw_disk': {'container_type': 'container_type_value', 'sha1_checksum': 'sha1_checksum_value', 'source': 'source_value'}, 'satisfies_pzs': True, 'self_link': 'self_link_value', 'shielded_instance_initial_state': {'dbs': [{'content': 'content_value', 'file_type': 'file_type_value'}], 'dbxs': {}, 'keks': {}, 'pk': {}}, 'source_disk': 'source_disk_value', 'source_disk_encryption_key': {}, 'source_disk_id': 'source_disk_id_value', 'source_image': 'source_image_value', 'source_image_encryption_key': {}, 'source_image_id': 'source_image_id_value', 'source_snapshot': 'source_snapshot_value', 'source_snapshot_encryption_key': {}, 'source_snapshot_id': 'source_snapshot_id_value', 'source_type': 'source_type_value', 'status': 'status_value', 'storage_locations': ['storage_locations_value1', 'storage_locations_value2']} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.insert_unary(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.Operation) + + +def test_insert_unary_rest_required_fields(request_type=compute.InsertImageRequest): + transport_class = transports.ImagesRestTransport + + request_init = {} + request_init["project"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).insert._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).insert._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("force_create", "request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + + client = ImagesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.insert_unary(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_insert_unary_rest_unset_required_fields(): + transport = transports.ImagesRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.insert._get_unset_required_fields({}) + assert set(unset_fields) == (set(("forceCreate", "requestId", )) & set(("imageResource", "project", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_insert_unary_rest_interceptors(null_interceptor): + transport = transports.ImagesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.ImagesRestInterceptor(), + ) + client = ImagesClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.ImagesRestInterceptor, "post_insert") as post, \ + mock.patch.object(transports.ImagesRestInterceptor, "pre_insert") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.InsertImageRequest.pb(compute.InsertImageRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.InsertImageRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.insert_unary(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_insert_unary_rest_bad_request(transport: str = 'rest', request_type=compute.InsertImageRequest): + client = ImagesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1'} + request_init["image_resource"] = {'architecture': 'architecture_value', 'archive_size_bytes': 1922, 'creation_timestamp': 'creation_timestamp_value', 'deprecated': {'deleted': 'deleted_value', 'deprecated': 'deprecated_value', 'obsolete': 'obsolete_value', 'replacement': 'replacement_value', 'state': 'state_value'}, 'description': 'description_value', 'disk_size_gb': 1261, 'family': 'family_value', 'guest_os_features': [{'type_': 'type__value'}], 'id': 205, 'image_encryption_key': {'kms_key_name': 'kms_key_name_value', 'kms_key_service_account': 'kms_key_service_account_value', 'raw_key': 'raw_key_value', 'rsa_encrypted_key': 'rsa_encrypted_key_value', 'sha256': 'sha256_value'}, 'kind': 'kind_value', 'label_fingerprint': 'label_fingerprint_value', 'labels': {}, 'license_codes': [1361, 1362], 'licenses': ['licenses_value1', 'licenses_value2'], 'name': 'name_value', 'raw_disk': {'container_type': 'container_type_value', 'sha1_checksum': 'sha1_checksum_value', 'source': 'source_value'}, 'satisfies_pzs': True, 'self_link': 'self_link_value', 'shielded_instance_initial_state': {'dbs': [{'content': 'content_value', 'file_type': 'file_type_value'}], 'dbxs': {}, 'keks': {}, 'pk': {}}, 'source_disk': 'source_disk_value', 'source_disk_encryption_key': {}, 'source_disk_id': 'source_disk_id_value', 'source_image': 'source_image_value', 'source_image_encryption_key': {}, 'source_image_id': 'source_image_id_value', 'source_snapshot': 'source_snapshot_value', 'source_snapshot_encryption_key': {}, 'source_snapshot_id': 'source_snapshot_id_value', 'source_type': 'source_type_value', 'status': 'status_value', 'storage_locations': ['storage_locations_value1', 'storage_locations_value2']} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.insert_unary(request) + + +def test_insert_unary_rest_flattened(): + client = ImagesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + image_resource=compute.Image(architecture='architecture_value'), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.insert_unary(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/global/images" % client.transport._host, args[1]) + + +def test_insert_unary_rest_flattened_error(transport: str = 'rest'): + client = ImagesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.insert_unary( + compute.InsertImageRequest(), + project='project_value', + image_resource=compute.Image(architecture='architecture_value'), + ) + + +def test_insert_unary_rest_error(): + client = ImagesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.ListImagesRequest, + dict, +]) +def test_list_rest(request_type): + client = ImagesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.ImageList( + id='id_value', + kind='kind_value', + next_page_token='next_page_token_value', + self_link='self_link_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.ImageList.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.list(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListPager) + assert response.id == 'id_value' + assert response.kind == 'kind_value' + assert response.next_page_token == 'next_page_token_value' + assert response.self_link == 'self_link_value' + + +def test_list_rest_required_fields(request_type=compute.ListImagesRequest): + transport_class = transports.ImagesRestTransport + + request_init = {} + request_init["project"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("filter", "max_results", "order_by", "page_token", "return_partial_success", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + + client = ImagesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.ImageList() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "get", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.ImageList.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.list(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_list_rest_unset_required_fields(): + transport = transports.ImagesRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.list._get_unset_required_fields({}) + assert set(unset_fields) == (set(("filter", "maxResults", "orderBy", "pageToken", "returnPartialSuccess", )) & set(("project", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_rest_interceptors(null_interceptor): + transport = transports.ImagesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.ImagesRestInterceptor(), + ) + client = ImagesClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.ImagesRestInterceptor, "post_list") as post, \ + mock.patch.object(transports.ImagesRestInterceptor, "pre_list") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.ListImagesRequest.pb(compute.ListImagesRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.ImageList.to_json(compute.ImageList()) + + request = compute.ListImagesRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.ImageList() + + client.list(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_list_rest_bad_request(transport: str = 'rest', request_type=compute.ListImagesRequest): + client = ImagesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list(request) + + +def test_list_rest_flattened(): + client = ImagesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.ImageList() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.ImageList.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.list(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/global/images" % client.transport._host, args[1]) + + +def test_list_rest_flattened_error(transport: str = 'rest'): + client = ImagesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list( + compute.ListImagesRequest(), + project='project_value', + ) + + +def test_list_rest_pager(transport: str = 'rest'): + client = ImagesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + #with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + compute.ImageList( + items=[ + compute.Image(), + compute.Image(), + compute.Image(), + ], + next_page_token='abc', + ), + compute.ImageList( + items=[], + next_page_token='def', + ), + compute.ImageList( + items=[ + compute.Image(), + ], + next_page_token='ghi', + ), + compute.ImageList( + items=[ + compute.Image(), + compute.Image(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple(compute.ImageList.to_json(x) for x in response) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode('UTF-8') + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {'project': 'sample1'} + + pager = client.list(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, compute.Image) + for i in results) + + pages = list(client.list(request=sample_request).pages) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize("request_type", [ + compute.PatchImageRequest, + dict, +]) +def test_patch_rest(request_type): + client = ImagesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'image': 'sample2'} + request_init["image_resource"] = {'architecture': 'architecture_value', 'archive_size_bytes': 1922, 'creation_timestamp': 'creation_timestamp_value', 'deprecated': {'deleted': 'deleted_value', 'deprecated': 'deprecated_value', 'obsolete': 'obsolete_value', 'replacement': 'replacement_value', 'state': 'state_value'}, 'description': 'description_value', 'disk_size_gb': 1261, 'family': 'family_value', 'guest_os_features': [{'type_': 'type__value'}], 'id': 205, 'image_encryption_key': {'kms_key_name': 'kms_key_name_value', 'kms_key_service_account': 'kms_key_service_account_value', 'raw_key': 'raw_key_value', 'rsa_encrypted_key': 'rsa_encrypted_key_value', 'sha256': 'sha256_value'}, 'kind': 'kind_value', 'label_fingerprint': 'label_fingerprint_value', 'labels': {}, 'license_codes': [1361, 1362], 'licenses': ['licenses_value1', 'licenses_value2'], 'name': 'name_value', 'raw_disk': {'container_type': 'container_type_value', 'sha1_checksum': 'sha1_checksum_value', 'source': 'source_value'}, 'satisfies_pzs': True, 'self_link': 'self_link_value', 'shielded_instance_initial_state': {'dbs': [{'content': 'content_value', 'file_type': 'file_type_value'}], 'dbxs': {}, 'keks': {}, 'pk': {}}, 'source_disk': 'source_disk_value', 'source_disk_encryption_key': {}, 'source_disk_id': 'source_disk_id_value', 'source_image': 'source_image_value', 'source_image_encryption_key': {}, 'source_image_id': 'source_image_id_value', 'source_snapshot': 'source_snapshot_value', 'source_snapshot_encryption_key': {}, 'source_snapshot_id': 'source_snapshot_id_value', 'source_type': 'source_type_value', 'status': 'status_value', 'storage_locations': ['storage_locations_value1', 'storage_locations_value2']} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.patch(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, extended_operation.ExtendedOperation) + assert response.client_operation_id == 'client_operation_id_value' + assert response.creation_timestamp == 'creation_timestamp_value' + assert response.description == 'description_value' + assert response.end_time == 'end_time_value' + assert response.http_error_message == 'http_error_message_value' + assert response.http_error_status_code == 2374 + assert response.id == 205 + assert response.insert_time == 'insert_time_value' + assert response.kind == 'kind_value' + assert response.name == 'name_value' + assert response.operation_group_id == 'operation_group_id_value' + assert response.operation_type == 'operation_type_value' + assert response.progress == 885 + assert response.region == 'region_value' + assert response.self_link == 'self_link_value' + assert response.start_time == 'start_time_value' + assert response.status == compute.Operation.Status.DONE + assert response.status_message == 'status_message_value' + assert response.target_id == 947 + assert response.target_link == 'target_link_value' + assert response.user == 'user_value' + assert response.zone == 'zone_value' + + +def test_patch_rest_required_fields(request_type=compute.PatchImageRequest): + transport_class = transports.ImagesRestTransport + + request_init = {} + request_init["image"] = "" + request_init["project"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).patch._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["image"] = 'image_value' + jsonified_request["project"] = 'project_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).patch._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "image" in jsonified_request + assert jsonified_request["image"] == 'image_value' + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + + client = ImagesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "patch", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.patch(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_patch_rest_unset_required_fields(): + transport = transports.ImagesRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.patch._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("image", "imageResource", "project", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_patch_rest_interceptors(null_interceptor): + transport = transports.ImagesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.ImagesRestInterceptor(), + ) + client = ImagesClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.ImagesRestInterceptor, "post_patch") as post, \ + mock.patch.object(transports.ImagesRestInterceptor, "pre_patch") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.PatchImageRequest.pb(compute.PatchImageRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.PatchImageRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.patch(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_patch_rest_bad_request(transport: str = 'rest', request_type=compute.PatchImageRequest): + client = ImagesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'image': 'sample2'} + request_init["image_resource"] = {'architecture': 'architecture_value', 'archive_size_bytes': 1922, 'creation_timestamp': 'creation_timestamp_value', 'deprecated': {'deleted': 'deleted_value', 'deprecated': 'deprecated_value', 'obsolete': 'obsolete_value', 'replacement': 'replacement_value', 'state': 'state_value'}, 'description': 'description_value', 'disk_size_gb': 1261, 'family': 'family_value', 'guest_os_features': [{'type_': 'type__value'}], 'id': 205, 'image_encryption_key': {'kms_key_name': 'kms_key_name_value', 'kms_key_service_account': 'kms_key_service_account_value', 'raw_key': 'raw_key_value', 'rsa_encrypted_key': 'rsa_encrypted_key_value', 'sha256': 'sha256_value'}, 'kind': 'kind_value', 'label_fingerprint': 'label_fingerprint_value', 'labels': {}, 'license_codes': [1361, 1362], 'licenses': ['licenses_value1', 'licenses_value2'], 'name': 'name_value', 'raw_disk': {'container_type': 'container_type_value', 'sha1_checksum': 'sha1_checksum_value', 'source': 'source_value'}, 'satisfies_pzs': True, 'self_link': 'self_link_value', 'shielded_instance_initial_state': {'dbs': [{'content': 'content_value', 'file_type': 'file_type_value'}], 'dbxs': {}, 'keks': {}, 'pk': {}}, 'source_disk': 'source_disk_value', 'source_disk_encryption_key': {}, 'source_disk_id': 'source_disk_id_value', 'source_image': 'source_image_value', 'source_image_encryption_key': {}, 'source_image_id': 'source_image_id_value', 'source_snapshot': 'source_snapshot_value', 'source_snapshot_encryption_key': {}, 'source_snapshot_id': 'source_snapshot_id_value', 'source_type': 'source_type_value', 'status': 'status_value', 'storage_locations': ['storage_locations_value1', 'storage_locations_value2']} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.patch(request) + + +def test_patch_rest_flattened(): + client = ImagesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'image': 'sample2'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + image='image_value', + image_resource=compute.Image(architecture='architecture_value'), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.patch(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/global/images/{image}" % client.transport._host, args[1]) + + +def test_patch_rest_flattened_error(transport: str = 'rest'): + client = ImagesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.patch( + compute.PatchImageRequest(), + project='project_value', + image='image_value', + image_resource=compute.Image(architecture='architecture_value'), + ) + + +def test_patch_rest_error(): + client = ImagesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.PatchImageRequest, + dict, +]) +def test_patch_unary_rest(request_type): + client = ImagesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'image': 'sample2'} + request_init["image_resource"] = {'architecture': 'architecture_value', 'archive_size_bytes': 1922, 'creation_timestamp': 'creation_timestamp_value', 'deprecated': {'deleted': 'deleted_value', 'deprecated': 'deprecated_value', 'obsolete': 'obsolete_value', 'replacement': 'replacement_value', 'state': 'state_value'}, 'description': 'description_value', 'disk_size_gb': 1261, 'family': 'family_value', 'guest_os_features': [{'type_': 'type__value'}], 'id': 205, 'image_encryption_key': {'kms_key_name': 'kms_key_name_value', 'kms_key_service_account': 'kms_key_service_account_value', 'raw_key': 'raw_key_value', 'rsa_encrypted_key': 'rsa_encrypted_key_value', 'sha256': 'sha256_value'}, 'kind': 'kind_value', 'label_fingerprint': 'label_fingerprint_value', 'labels': {}, 'license_codes': [1361, 1362], 'licenses': ['licenses_value1', 'licenses_value2'], 'name': 'name_value', 'raw_disk': {'container_type': 'container_type_value', 'sha1_checksum': 'sha1_checksum_value', 'source': 'source_value'}, 'satisfies_pzs': True, 'self_link': 'self_link_value', 'shielded_instance_initial_state': {'dbs': [{'content': 'content_value', 'file_type': 'file_type_value'}], 'dbxs': {}, 'keks': {}, 'pk': {}}, 'source_disk': 'source_disk_value', 'source_disk_encryption_key': {}, 'source_disk_id': 'source_disk_id_value', 'source_image': 'source_image_value', 'source_image_encryption_key': {}, 'source_image_id': 'source_image_id_value', 'source_snapshot': 'source_snapshot_value', 'source_snapshot_encryption_key': {}, 'source_snapshot_id': 'source_snapshot_id_value', 'source_type': 'source_type_value', 'status': 'status_value', 'storage_locations': ['storage_locations_value1', 'storage_locations_value2']} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.patch_unary(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.Operation) + + +def test_patch_unary_rest_required_fields(request_type=compute.PatchImageRequest): + transport_class = transports.ImagesRestTransport + + request_init = {} + request_init["image"] = "" + request_init["project"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).patch._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["image"] = 'image_value' + jsonified_request["project"] = 'project_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).patch._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "image" in jsonified_request + assert jsonified_request["image"] == 'image_value' + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + + client = ImagesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "patch", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.patch_unary(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_patch_unary_rest_unset_required_fields(): + transport = transports.ImagesRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.patch._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("image", "imageResource", "project", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_patch_unary_rest_interceptors(null_interceptor): + transport = transports.ImagesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.ImagesRestInterceptor(), + ) + client = ImagesClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.ImagesRestInterceptor, "post_patch") as post, \ + mock.patch.object(transports.ImagesRestInterceptor, "pre_patch") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.PatchImageRequest.pb(compute.PatchImageRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.PatchImageRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.patch_unary(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_patch_unary_rest_bad_request(transport: str = 'rest', request_type=compute.PatchImageRequest): + client = ImagesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'image': 'sample2'} + request_init["image_resource"] = {'architecture': 'architecture_value', 'archive_size_bytes': 1922, 'creation_timestamp': 'creation_timestamp_value', 'deprecated': {'deleted': 'deleted_value', 'deprecated': 'deprecated_value', 'obsolete': 'obsolete_value', 'replacement': 'replacement_value', 'state': 'state_value'}, 'description': 'description_value', 'disk_size_gb': 1261, 'family': 'family_value', 'guest_os_features': [{'type_': 'type__value'}], 'id': 205, 'image_encryption_key': {'kms_key_name': 'kms_key_name_value', 'kms_key_service_account': 'kms_key_service_account_value', 'raw_key': 'raw_key_value', 'rsa_encrypted_key': 'rsa_encrypted_key_value', 'sha256': 'sha256_value'}, 'kind': 'kind_value', 'label_fingerprint': 'label_fingerprint_value', 'labels': {}, 'license_codes': [1361, 1362], 'licenses': ['licenses_value1', 'licenses_value2'], 'name': 'name_value', 'raw_disk': {'container_type': 'container_type_value', 'sha1_checksum': 'sha1_checksum_value', 'source': 'source_value'}, 'satisfies_pzs': True, 'self_link': 'self_link_value', 'shielded_instance_initial_state': {'dbs': [{'content': 'content_value', 'file_type': 'file_type_value'}], 'dbxs': {}, 'keks': {}, 'pk': {}}, 'source_disk': 'source_disk_value', 'source_disk_encryption_key': {}, 'source_disk_id': 'source_disk_id_value', 'source_image': 'source_image_value', 'source_image_encryption_key': {}, 'source_image_id': 'source_image_id_value', 'source_snapshot': 'source_snapshot_value', 'source_snapshot_encryption_key': {}, 'source_snapshot_id': 'source_snapshot_id_value', 'source_type': 'source_type_value', 'status': 'status_value', 'storage_locations': ['storage_locations_value1', 'storage_locations_value2']} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.patch_unary(request) + + +def test_patch_unary_rest_flattened(): + client = ImagesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'image': 'sample2'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + image='image_value', + image_resource=compute.Image(architecture='architecture_value'), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.patch_unary(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/global/images/{image}" % client.transport._host, args[1]) + + +def test_patch_unary_rest_flattened_error(transport: str = 'rest'): + client = ImagesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.patch_unary( + compute.PatchImageRequest(), + project='project_value', + image='image_value', + image_resource=compute.Image(architecture='architecture_value'), + ) + + +def test_patch_unary_rest_error(): + client = ImagesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.SetIamPolicyImageRequest, + dict, +]) +def test_set_iam_policy_rest(request_type): + client = ImagesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'resource': 'sample2'} + request_init["global_set_policy_request_resource"] = {'bindings': [{'binding_id': 'binding_id_value', 'condition': {'description': 'description_value', 'expression': 'expression_value', 'location': 'location_value', 'title': 'title_value'}, 'members': ['members_value1', 'members_value2'], 'role': 'role_value'}], 'etag': 'etag_value', 'policy': {'audit_configs': [{'audit_log_configs': [{'exempted_members': ['exempted_members_value1', 'exempted_members_value2'], 'ignore_child_exemptions': True, 'log_type': 'log_type_value'}], 'exempted_members': ['exempted_members_value1', 'exempted_members_value2'], 'service': 'service_value'}], 'bindings': {}, 'etag': 'etag_value', 'iam_owned': True, 'rules': [{'action': 'action_value', 'conditions': [{'iam': 'iam_value', 'op': 'op_value', 'svc': 'svc_value', 'sys': 'sys_value', 'values': ['values_value1', 'values_value2']}], 'description': 'description_value', 'ins': ['ins_value1', 'ins_value2'], 'log_configs': [{'cloud_audit': {'authorization_logging_options': {'permission_type': 'permission_type_value'}, 'log_name': 'log_name_value'}, 'counter': {'custom_fields': [{'name': 'name_value', 'value': 'value_value'}], 'field': 'field_value', 'metric': 'metric_value'}, 'data_access': {'log_mode': 'log_mode_value'}}], 'not_ins': ['not_ins_value1', 'not_ins_value2'], 'permissions': ['permissions_value1', 'permissions_value2']}], 'version': 774}} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Policy( + etag='etag_value', + iam_owned=True, + version=774, + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Policy.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.set_iam_policy(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.Policy) + assert response.etag == 'etag_value' + assert response.iam_owned is True + assert response.version == 774 + + +def test_set_iam_policy_rest_required_fields(request_type=compute.SetIamPolicyImageRequest): + transport_class = transports.ImagesRestTransport + + request_init = {} + request_init["project"] = "" + request_init["resource"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).set_iam_policy._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + jsonified_request["resource"] = 'resource_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).set_iam_policy._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "resource" in jsonified_request + assert jsonified_request["resource"] == 'resource_value' + + client = ImagesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Policy() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Policy.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.set_iam_policy(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_set_iam_policy_rest_unset_required_fields(): + transport = transports.ImagesRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.set_iam_policy._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("globalSetPolicyRequestResource", "project", "resource", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_set_iam_policy_rest_interceptors(null_interceptor): + transport = transports.ImagesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.ImagesRestInterceptor(), + ) + client = ImagesClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.ImagesRestInterceptor, "post_set_iam_policy") as post, \ + mock.patch.object(transports.ImagesRestInterceptor, "pre_set_iam_policy") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.SetIamPolicyImageRequest.pb(compute.SetIamPolicyImageRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Policy.to_json(compute.Policy()) + + request = compute.SetIamPolicyImageRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Policy() + + client.set_iam_policy(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_set_iam_policy_rest_bad_request(transport: str = 'rest', request_type=compute.SetIamPolicyImageRequest): + client = ImagesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'resource': 'sample2'} + request_init["global_set_policy_request_resource"] = {'bindings': [{'binding_id': 'binding_id_value', 'condition': {'description': 'description_value', 'expression': 'expression_value', 'location': 'location_value', 'title': 'title_value'}, 'members': ['members_value1', 'members_value2'], 'role': 'role_value'}], 'etag': 'etag_value', 'policy': {'audit_configs': [{'audit_log_configs': [{'exempted_members': ['exempted_members_value1', 'exempted_members_value2'], 'ignore_child_exemptions': True, 'log_type': 'log_type_value'}], 'exempted_members': ['exempted_members_value1', 'exempted_members_value2'], 'service': 'service_value'}], 'bindings': {}, 'etag': 'etag_value', 'iam_owned': True, 'rules': [{'action': 'action_value', 'conditions': [{'iam': 'iam_value', 'op': 'op_value', 'svc': 'svc_value', 'sys': 'sys_value', 'values': ['values_value1', 'values_value2']}], 'description': 'description_value', 'ins': ['ins_value1', 'ins_value2'], 'log_configs': [{'cloud_audit': {'authorization_logging_options': {'permission_type': 'permission_type_value'}, 'log_name': 'log_name_value'}, 'counter': {'custom_fields': [{'name': 'name_value', 'value': 'value_value'}], 'field': 'field_value', 'metric': 'metric_value'}, 'data_access': {'log_mode': 'log_mode_value'}}], 'not_ins': ['not_ins_value1', 'not_ins_value2'], 'permissions': ['permissions_value1', 'permissions_value2']}], 'version': 774}} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.set_iam_policy(request) + + +def test_set_iam_policy_rest_flattened(): + client = ImagesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Policy() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'resource': 'sample2'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + resource='resource_value', + global_set_policy_request_resource=compute.GlobalSetPolicyRequest(bindings=[compute.Binding(binding_id='binding_id_value')]), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Policy.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.set_iam_policy(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/global/images/{resource}/setIamPolicy" % client.transport._host, args[1]) + + +def test_set_iam_policy_rest_flattened_error(transport: str = 'rest'): + client = ImagesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.set_iam_policy( + compute.SetIamPolicyImageRequest(), + project='project_value', + resource='resource_value', + global_set_policy_request_resource=compute.GlobalSetPolicyRequest(bindings=[compute.Binding(binding_id='binding_id_value')]), + ) + + +def test_set_iam_policy_rest_error(): + client = ImagesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.SetLabelsImageRequest, + dict, +]) +def test_set_labels_rest(request_type): + client = ImagesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'resource': 'sample2'} + request_init["global_set_labels_request_resource"] = {'label_fingerprint': 'label_fingerprint_value', 'labels': {}} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.set_labels(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, extended_operation.ExtendedOperation) + assert response.client_operation_id == 'client_operation_id_value' + assert response.creation_timestamp == 'creation_timestamp_value' + assert response.description == 'description_value' + assert response.end_time == 'end_time_value' + assert response.http_error_message == 'http_error_message_value' + assert response.http_error_status_code == 2374 + assert response.id == 205 + assert response.insert_time == 'insert_time_value' + assert response.kind == 'kind_value' + assert response.name == 'name_value' + assert response.operation_group_id == 'operation_group_id_value' + assert response.operation_type == 'operation_type_value' + assert response.progress == 885 + assert response.region == 'region_value' + assert response.self_link == 'self_link_value' + assert response.start_time == 'start_time_value' + assert response.status == compute.Operation.Status.DONE + assert response.status_message == 'status_message_value' + assert response.target_id == 947 + assert response.target_link == 'target_link_value' + assert response.user == 'user_value' + assert response.zone == 'zone_value' + + +def test_set_labels_rest_required_fields(request_type=compute.SetLabelsImageRequest): + transport_class = transports.ImagesRestTransport + + request_init = {} + request_init["project"] = "" + request_init["resource"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).set_labels._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + jsonified_request["resource"] = 'resource_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).set_labels._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "resource" in jsonified_request + assert jsonified_request["resource"] == 'resource_value' + + client = ImagesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.set_labels(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_set_labels_rest_unset_required_fields(): + transport = transports.ImagesRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.set_labels._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("globalSetLabelsRequestResource", "project", "resource", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_set_labels_rest_interceptors(null_interceptor): + transport = transports.ImagesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.ImagesRestInterceptor(), + ) + client = ImagesClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.ImagesRestInterceptor, "post_set_labels") as post, \ + mock.patch.object(transports.ImagesRestInterceptor, "pre_set_labels") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.SetLabelsImageRequest.pb(compute.SetLabelsImageRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.SetLabelsImageRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.set_labels(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_set_labels_rest_bad_request(transport: str = 'rest', request_type=compute.SetLabelsImageRequest): + client = ImagesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'resource': 'sample2'} + request_init["global_set_labels_request_resource"] = {'label_fingerprint': 'label_fingerprint_value', 'labels': {}} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.set_labels(request) + + +def test_set_labels_rest_flattened(): + client = ImagesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'resource': 'sample2'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + resource='resource_value', + global_set_labels_request_resource=compute.GlobalSetLabelsRequest(label_fingerprint='label_fingerprint_value'), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.set_labels(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/global/images/{resource}/setLabels" % client.transport._host, args[1]) + + +def test_set_labels_rest_flattened_error(transport: str = 'rest'): + client = ImagesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.set_labels( + compute.SetLabelsImageRequest(), + project='project_value', + resource='resource_value', + global_set_labels_request_resource=compute.GlobalSetLabelsRequest(label_fingerprint='label_fingerprint_value'), + ) + + +def test_set_labels_rest_error(): + client = ImagesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.SetLabelsImageRequest, + dict, +]) +def test_set_labels_unary_rest(request_type): + client = ImagesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'resource': 'sample2'} + request_init["global_set_labels_request_resource"] = {'label_fingerprint': 'label_fingerprint_value', 'labels': {}} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.set_labels_unary(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.Operation) + + +def test_set_labels_unary_rest_required_fields(request_type=compute.SetLabelsImageRequest): + transport_class = transports.ImagesRestTransport + + request_init = {} + request_init["project"] = "" + request_init["resource"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).set_labels._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + jsonified_request["resource"] = 'resource_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).set_labels._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "resource" in jsonified_request + assert jsonified_request["resource"] == 'resource_value' + + client = ImagesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.set_labels_unary(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_set_labels_unary_rest_unset_required_fields(): + transport = transports.ImagesRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.set_labels._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("globalSetLabelsRequestResource", "project", "resource", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_set_labels_unary_rest_interceptors(null_interceptor): + transport = transports.ImagesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.ImagesRestInterceptor(), + ) + client = ImagesClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.ImagesRestInterceptor, "post_set_labels") as post, \ + mock.patch.object(transports.ImagesRestInterceptor, "pre_set_labels") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.SetLabelsImageRequest.pb(compute.SetLabelsImageRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.SetLabelsImageRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.set_labels_unary(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_set_labels_unary_rest_bad_request(transport: str = 'rest', request_type=compute.SetLabelsImageRequest): + client = ImagesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'resource': 'sample2'} + request_init["global_set_labels_request_resource"] = {'label_fingerprint': 'label_fingerprint_value', 'labels': {}} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.set_labels_unary(request) + + +def test_set_labels_unary_rest_flattened(): + client = ImagesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'resource': 'sample2'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + resource='resource_value', + global_set_labels_request_resource=compute.GlobalSetLabelsRequest(label_fingerprint='label_fingerprint_value'), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.set_labels_unary(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/global/images/{resource}/setLabels" % client.transport._host, args[1]) + + +def test_set_labels_unary_rest_flattened_error(transport: str = 'rest'): + client = ImagesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.set_labels_unary( + compute.SetLabelsImageRequest(), + project='project_value', + resource='resource_value', + global_set_labels_request_resource=compute.GlobalSetLabelsRequest(label_fingerprint='label_fingerprint_value'), + ) + + +def test_set_labels_unary_rest_error(): + client = ImagesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.TestIamPermissionsImageRequest, + dict, +]) +def test_test_iam_permissions_rest(request_type): + client = ImagesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'resource': 'sample2'} + request_init["test_permissions_request_resource"] = {'permissions': ['permissions_value1', 'permissions_value2']} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.TestPermissionsResponse( + permissions=['permissions_value'], + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.TestPermissionsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.test_iam_permissions(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.TestPermissionsResponse) + assert response.permissions == ['permissions_value'] + + +def test_test_iam_permissions_rest_required_fields(request_type=compute.TestIamPermissionsImageRequest): + transport_class = transports.ImagesRestTransport + + request_init = {} + request_init["project"] = "" + request_init["resource"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).test_iam_permissions._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + jsonified_request["resource"] = 'resource_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).test_iam_permissions._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "resource" in jsonified_request + assert jsonified_request["resource"] == 'resource_value' + + client = ImagesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.TestPermissionsResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.TestPermissionsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.test_iam_permissions(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_test_iam_permissions_rest_unset_required_fields(): + transport = transports.ImagesRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.test_iam_permissions._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("project", "resource", "testPermissionsRequestResource", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_test_iam_permissions_rest_interceptors(null_interceptor): + transport = transports.ImagesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.ImagesRestInterceptor(), + ) + client = ImagesClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.ImagesRestInterceptor, "post_test_iam_permissions") as post, \ + mock.patch.object(transports.ImagesRestInterceptor, "pre_test_iam_permissions") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.TestIamPermissionsImageRequest.pb(compute.TestIamPermissionsImageRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.TestPermissionsResponse.to_json(compute.TestPermissionsResponse()) + + request = compute.TestIamPermissionsImageRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.TestPermissionsResponse() + + client.test_iam_permissions(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_test_iam_permissions_rest_bad_request(transport: str = 'rest', request_type=compute.TestIamPermissionsImageRequest): + client = ImagesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'resource': 'sample2'} + request_init["test_permissions_request_resource"] = {'permissions': ['permissions_value1', 'permissions_value2']} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.test_iam_permissions(request) + + +def test_test_iam_permissions_rest_flattened(): + client = ImagesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.TestPermissionsResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'resource': 'sample2'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + resource='resource_value', + test_permissions_request_resource=compute.TestPermissionsRequest(permissions=['permissions_value']), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.TestPermissionsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.test_iam_permissions(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/global/images/{resource}/testIamPermissions" % client.transport._host, args[1]) + + +def test_test_iam_permissions_rest_flattened_error(transport: str = 'rest'): + client = ImagesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.test_iam_permissions( + compute.TestIamPermissionsImageRequest(), + project='project_value', + resource='resource_value', + test_permissions_request_resource=compute.TestPermissionsRequest(permissions=['permissions_value']), + ) + + +def test_test_iam_permissions_rest_error(): + client = ImagesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +def test_credentials_transport_error(): + # It is an error to provide credentials and a transport instance. + transport = transports.ImagesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = ImagesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # It is an error to provide a credentials file and a transport instance. + transport = transports.ImagesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = ImagesClient( + client_options={"credentials_file": "credentials.json"}, + transport=transport, + ) + + # It is an error to provide an api_key and a transport instance. + transport = transports.ImagesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = ImagesClient( + client_options=options, + transport=transport, + ) + + # It is an error to provide an api_key and a credential. + options = mock.Mock() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = ImagesClient( + client_options=options, + credentials=ga_credentials.AnonymousCredentials() + ) + + # It is an error to provide scopes and a transport instance. + transport = transports.ImagesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = ImagesClient( + client_options={"scopes": ["1", "2"]}, + transport=transport, + ) + + +def test_transport_instance(): + # A client may be instantiated with a custom transport instance. + transport = transports.ImagesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + client = ImagesClient(transport=transport) + assert client.transport is transport + + +@pytest.mark.parametrize("transport_class", [ + transports.ImagesRestTransport, +]) +def test_transport_adc(transport_class): + # Test default credentials are used if not provided. + with mock.patch.object(google.auth, 'default') as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class() + adc.assert_called_once() + +@pytest.mark.parametrize("transport_name", [ + "rest", +]) +def test_transport_kind(transport_name): + transport = ImagesClient.get_transport_class(transport_name)( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert transport.kind == transport_name + + +def test_images_base_transport_error(): + # Passing both a credentials object and credentials_file should raise an error + with pytest.raises(core_exceptions.DuplicateCredentialArgs): + transport = transports.ImagesTransport( + credentials=ga_credentials.AnonymousCredentials(), + credentials_file="credentials.json" + ) + + +def test_images_base_transport(): + # Instantiate the base transport. + with mock.patch('google.cloud.compute_v1.services.images.transports.ImagesTransport.__init__') as Transport: + Transport.return_value = None + transport = transports.ImagesTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Every method on the transport should just blindly + # raise NotImplementedError. + methods = ( + 'delete', + 'deprecate', + 'get', + 'get_from_family', + 'get_iam_policy', + 'insert', + 'list', + 'patch', + 'set_iam_policy', + 'set_labels', + 'test_iam_permissions', + ) + for method in methods: + with pytest.raises(NotImplementedError): + getattr(transport, method)(request=object()) + + with pytest.raises(NotImplementedError): + transport.close() + + # Catch all for all remaining methods and properties + remainder = [ + 'kind', + ] + for r in remainder: + with pytest.raises(NotImplementedError): + getattr(transport, r)() + + +def test_images_base_transport_with_credentials_file(): + # Instantiate the base transport with a credentials file + with mock.patch.object(google.auth, 'load_credentials_from_file', autospec=True) as load_creds, mock.patch('google.cloud.compute_v1.services.images.transports.ImagesTransport._prep_wrapped_messages') as Transport: + Transport.return_value = None + load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.ImagesTransport( + credentials_file="credentials.json", + quota_project_id="octopus", + ) + load_creds.assert_called_once_with("credentials.json", + scopes=None, + default_scopes=( + 'https://www.googleapis.com/auth/compute', + 'https://www.googleapis.com/auth/cloud-platform', +), + quota_project_id="octopus", + ) + + +def test_images_base_transport_with_adc(): + # Test the default credentials are used if credentials and credentials_file are None. + with mock.patch.object(google.auth, 'default', autospec=True) as adc, mock.patch('google.cloud.compute_v1.services.images.transports.ImagesTransport._prep_wrapped_messages') as Transport: + Transport.return_value = None + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.ImagesTransport() + adc.assert_called_once() + + +def test_images_auth_adc(): + # If no credentials are provided, we should use ADC credentials. + with mock.patch.object(google.auth, 'default', autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + ImagesClient() + adc.assert_called_once_with( + scopes=None, + default_scopes=( + 'https://www.googleapis.com/auth/compute', + 'https://www.googleapis.com/auth/cloud-platform', +), + quota_project_id=None, + ) + + +def test_images_http_transport_client_cert_source_for_mtls(): + cred = ga_credentials.AnonymousCredentials() + with mock.patch("google.auth.transport.requests.AuthorizedSession.configure_mtls_channel") as mock_configure_mtls_channel: + transports.ImagesRestTransport ( + credentials=cred, + client_cert_source_for_mtls=client_cert_source_callback + ) + mock_configure_mtls_channel.assert_called_once_with(client_cert_source_callback) + + +@pytest.mark.parametrize("transport_name", [ + "rest", +]) +def test_images_host_no_port(transport_name): + client = ImagesClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions(api_endpoint='compute.googleapis.com'), + transport=transport_name, + ) + assert client.transport._host == ( + 'compute.googleapis.com:443' + if transport_name in ['grpc', 'grpc_asyncio'] + else 'https://compute.googleapis.com' + ) + +@pytest.mark.parametrize("transport_name", [ + "rest", +]) +def test_images_host_with_port(transport_name): + client = ImagesClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions(api_endpoint='compute.googleapis.com:8000'), + transport=transport_name, + ) + assert client.transport._host == ( + 'compute.googleapis.com:8000' + if transport_name in ['grpc', 'grpc_asyncio'] + else 'https://compute.googleapis.com:8000' + ) + +@pytest.mark.parametrize("transport_name", [ + "rest", +]) +def test_images_client_transport_session_collision(transport_name): + creds1 = ga_credentials.AnonymousCredentials() + creds2 = ga_credentials.AnonymousCredentials() + client1 = ImagesClient( + credentials=creds1, + transport=transport_name, + ) + client2 = ImagesClient( + credentials=creds2, + transport=transport_name, + ) + session1 = client1.transport.delete._session + session2 = client2.transport.delete._session + assert session1 != session2 + session1 = client1.transport.deprecate._session + session2 = client2.transport.deprecate._session + assert session1 != session2 + session1 = client1.transport.get._session + session2 = client2.transport.get._session + assert session1 != session2 + session1 = client1.transport.get_from_family._session + session2 = client2.transport.get_from_family._session + assert session1 != session2 + session1 = client1.transport.get_iam_policy._session + session2 = client2.transport.get_iam_policy._session + assert session1 != session2 + session1 = client1.transport.insert._session + session2 = client2.transport.insert._session + assert session1 != session2 + session1 = client1.transport.list._session + session2 = client2.transport.list._session + assert session1 != session2 + session1 = client1.transport.patch._session + session2 = client2.transport.patch._session + assert session1 != session2 + session1 = client1.transport.set_iam_policy._session + session2 = client2.transport.set_iam_policy._session + assert session1 != session2 + session1 = client1.transport.set_labels._session + session2 = client2.transport.set_labels._session + assert session1 != session2 + session1 = client1.transport.test_iam_permissions._session + session2 = client2.transport.test_iam_permissions._session + assert session1 != session2 + +def test_common_billing_account_path(): + billing_account = "squid" + expected = "billingAccounts/{billing_account}".format(billing_account=billing_account, ) + actual = ImagesClient.common_billing_account_path(billing_account) + assert expected == actual + + +def test_parse_common_billing_account_path(): + expected = { + "billing_account": "clam", + } + path = ImagesClient.common_billing_account_path(**expected) + + # Check that the path construction is reversible. + actual = ImagesClient.parse_common_billing_account_path(path) + assert expected == actual + +def test_common_folder_path(): + folder = "whelk" + expected = "folders/{folder}".format(folder=folder, ) + actual = ImagesClient.common_folder_path(folder) + assert expected == actual + + +def test_parse_common_folder_path(): + expected = { + "folder": "octopus", + } + path = ImagesClient.common_folder_path(**expected) + + # Check that the path construction is reversible. + actual = ImagesClient.parse_common_folder_path(path) + assert expected == actual + +def test_common_organization_path(): + organization = "oyster" + expected = "organizations/{organization}".format(organization=organization, ) + actual = ImagesClient.common_organization_path(organization) + assert expected == actual + + +def test_parse_common_organization_path(): + expected = { + "organization": "nudibranch", + } + path = ImagesClient.common_organization_path(**expected) + + # Check that the path construction is reversible. + actual = ImagesClient.parse_common_organization_path(path) + assert expected == actual + +def test_common_project_path(): + project = "cuttlefish" + expected = "projects/{project}".format(project=project, ) + actual = ImagesClient.common_project_path(project) + assert expected == actual + + +def test_parse_common_project_path(): + expected = { + "project": "mussel", + } + path = ImagesClient.common_project_path(**expected) + + # Check that the path construction is reversible. + actual = ImagesClient.parse_common_project_path(path) + assert expected == actual + +def test_common_location_path(): + project = "winkle" + location = "nautilus" + expected = "projects/{project}/locations/{location}".format(project=project, location=location, ) + actual = ImagesClient.common_location_path(project, location) + assert expected == actual + + +def test_parse_common_location_path(): + expected = { + "project": "scallop", + "location": "abalone", + } + path = ImagesClient.common_location_path(**expected) + + # Check that the path construction is reversible. + actual = ImagesClient.parse_common_location_path(path) + assert expected == actual + + +def test_client_with_default_client_info(): + client_info = gapic_v1.client_info.ClientInfo() + + with mock.patch.object(transports.ImagesTransport, '_prep_wrapped_messages') as prep: + client = ImagesClient( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + with mock.patch.object(transports.ImagesTransport, '_prep_wrapped_messages') as prep: + transport_class = ImagesClient.get_transport_class() + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + +def test_transport_close(): + transports = { + "rest": "_session", + } + + for transport, close_name in transports.items(): + client = ImagesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport + ) + with mock.patch.object(type(getattr(client.transport, close_name)), "close") as close: + with client: + close.assert_not_called() + close.assert_called_once() + +def test_client_ctx(): + transports = [ + 'rest', + ] + for transport in transports: + client = ImagesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport + ) + # Test client calls underlying transport. + with mock.patch.object(type(client.transport), "close") as close: + close.assert_not_called() + with client: + pass + close.assert_called() + +@pytest.mark.parametrize("client_class,transport_class", [ + (ImagesClient, transports.ImagesRestTransport), +]) +def test_api_key_credentials(client_class, transport_class): + with mock.patch.object( + google.auth._default, "get_api_key_credentials", create=True + ) as get_api_key_credentials: + mock_cred = mock.Mock() + get_api_key_credentials.return_value = mock_cred + options = client_options.ClientOptions() + options.api_key = "api_key" + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=mock_cred, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) diff --git a/owl-bot-staging/v1/tests/unit/gapic/compute_v1/test_instance_group_managers.py b/owl-bot-staging/v1/tests/unit/gapic/compute_v1/test_instance_group_managers.py new file mode 100644 index 000000000..4073903e0 --- /dev/null +++ b/owl-bot-staging/v1/tests/unit/gapic/compute_v1/test_instance_group_managers.py @@ -0,0 +1,10573 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import os +# try/except added for compatibility with python < 3.8 +try: + from unittest import mock + from unittest.mock import AsyncMock # pragma: NO COVER +except ImportError: # pragma: NO COVER + import mock + +import grpc +from grpc.experimental import aio +from collections.abc import Iterable +from google.protobuf import json_format +import json +import math +import pytest +from proto.marshal.rules.dates import DurationRule, TimestampRule +from proto.marshal.rules import wrappers +from requests import Response +from requests import Request, PreparedRequest +from requests.sessions import Session +from google.protobuf import json_format + +from google.api_core import client_options +from google.api_core import exceptions as core_exceptions +from google.api_core import extended_operation # type: ignore +from google.api_core import future +from google.api_core import gapic_v1 +from google.api_core import grpc_helpers +from google.api_core import grpc_helpers_async +from google.api_core import path_template +from google.auth import credentials as ga_credentials +from google.auth.exceptions import MutualTLSChannelError +from google.cloud.compute_v1.services.instance_group_managers import InstanceGroupManagersClient +from google.cloud.compute_v1.services.instance_group_managers import pagers +from google.cloud.compute_v1.services.instance_group_managers import transports +from google.cloud.compute_v1.types import compute +from google.oauth2 import service_account +import google.auth + + +def client_cert_source_callback(): + return b"cert bytes", b"key bytes" + + +# If default endpoint is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint(client): + return "foo.googleapis.com" if ("localhost" in client.DEFAULT_ENDPOINT) else client.DEFAULT_ENDPOINT + + +def test__get_default_mtls_endpoint(): + api_endpoint = "example.googleapis.com" + api_mtls_endpoint = "example.mtls.googleapis.com" + sandbox_endpoint = "example.sandbox.googleapis.com" + sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com" + non_googleapi = "api.example.com" + + assert InstanceGroupManagersClient._get_default_mtls_endpoint(None) is None + assert InstanceGroupManagersClient._get_default_mtls_endpoint(api_endpoint) == api_mtls_endpoint + assert InstanceGroupManagersClient._get_default_mtls_endpoint(api_mtls_endpoint) == api_mtls_endpoint + assert InstanceGroupManagersClient._get_default_mtls_endpoint(sandbox_endpoint) == sandbox_mtls_endpoint + assert InstanceGroupManagersClient._get_default_mtls_endpoint(sandbox_mtls_endpoint) == sandbox_mtls_endpoint + assert InstanceGroupManagersClient._get_default_mtls_endpoint(non_googleapi) == non_googleapi + + +@pytest.mark.parametrize("client_class,transport_name", [ + (InstanceGroupManagersClient, "rest"), +]) +def test_instance_group_managers_client_from_service_account_info(client_class, transport_name): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object(service_account.Credentials, 'from_service_account_info') as factory: + factory.return_value = creds + info = {"valid": True} + client = client_class.from_service_account_info(info, transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + 'compute.googleapis.com:443' + if transport_name in ['grpc', 'grpc_asyncio'] + else + 'https://compute.googleapis.com' + ) + + +@pytest.mark.parametrize("transport_class,transport_name", [ + (transports.InstanceGroupManagersRestTransport, "rest"), +]) +def test_instance_group_managers_client_service_account_always_use_jwt(transport_class, transport_name): + with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=True) + use_jwt.assert_called_once_with(True) + + with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=False) + use_jwt.assert_not_called() + + +@pytest.mark.parametrize("client_class,transport_name", [ + (InstanceGroupManagersClient, "rest"), +]) +def test_instance_group_managers_client_from_service_account_file(client_class, transport_name): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object(service_account.Credentials, 'from_service_account_file') as factory: + factory.return_value = creds + client = client_class.from_service_account_file("dummy/file/path.json", transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + client = client_class.from_service_account_json("dummy/file/path.json", transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + 'compute.googleapis.com:443' + if transport_name in ['grpc', 'grpc_asyncio'] + else + 'https://compute.googleapis.com' + ) + + +def test_instance_group_managers_client_get_transport_class(): + transport = InstanceGroupManagersClient.get_transport_class() + available_transports = [ + transports.InstanceGroupManagersRestTransport, + ] + assert transport in available_transports + + transport = InstanceGroupManagersClient.get_transport_class("rest") + assert transport == transports.InstanceGroupManagersRestTransport + + +@pytest.mark.parametrize("client_class,transport_class,transport_name", [ + (InstanceGroupManagersClient, transports.InstanceGroupManagersRestTransport, "rest"), +]) +@mock.patch.object(InstanceGroupManagersClient, "DEFAULT_ENDPOINT", modify_default_endpoint(InstanceGroupManagersClient)) +def test_instance_group_managers_client_client_options(client_class, transport_class, transport_name): + # Check that if channel is provided we won't create a new one. + with mock.patch.object(InstanceGroupManagersClient, 'get_transport_class') as gtc: + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ) + client = client_class(transport=transport) + gtc.assert_not_called() + + # Check that if channel is provided via str we will create a new one. + with mock.patch.object(InstanceGroupManagersClient, 'get_transport_class') as gtc: + client = client_class(transport=transport_name) + gtc.assert_called() + + # Check the case api_endpoint is provided. + options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(transport=transport_name, client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_MTLS_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError): + client = client_class(transport=transport_name) + + # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): + with pytest.raises(ValueError): + client = client_class(transport=transport_name) + + # Check the case quota_project_id is provided + options = client_options.ClientOptions(quota_project_id="octopus") + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id="octopus", + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + # Check the case api_endpoint is provided + options = client_options.ClientOptions(api_audience="https://language.googleapis.com") + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience="https://language.googleapis.com" + ) + +@pytest.mark.parametrize("client_class,transport_class,transport_name,use_client_cert_env", [ + (InstanceGroupManagersClient, transports.InstanceGroupManagersRestTransport, "rest", "true"), + (InstanceGroupManagersClient, transports.InstanceGroupManagersRestTransport, "rest", "false"), +]) +@mock.patch.object(InstanceGroupManagersClient, "DEFAULT_ENDPOINT", modify_default_endpoint(InstanceGroupManagersClient)) +@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) +def test_instance_group_managers_client_mtls_env_auto(client_class, transport_class, transport_name, use_client_cert_env): + # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default + # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists. + + # Check the case client_cert_source is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): + options = client_options.ClientOptions(client_cert_source=client_cert_source_callback) + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + + if use_client_cert_env == "false": + expected_client_cert_source = None + expected_host = client.DEFAULT_ENDPOINT + else: + expected_client_cert_source = client_cert_source_callback + expected_host = client.DEFAULT_MTLS_ENDPOINT + + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case ADC client cert is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): + with mock.patch.object(transport_class, '__init__') as patched: + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): + with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=client_cert_source_callback): + if use_client_cert_env == "false": + expected_host = client.DEFAULT_ENDPOINT + expected_client_cert_source = None + else: + expected_host = client.DEFAULT_MTLS_ENDPOINT + expected_client_cert_source = client_cert_source_callback + + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case client_cert_source and ADC client cert are not provided. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): + with mock.patch.object(transport_class, '__init__') as patched: + with mock.patch("google.auth.transport.mtls.has_default_client_cert_source", return_value=False): + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize("client_class", [ + InstanceGroupManagersClient +]) +@mock.patch.object(InstanceGroupManagersClient, "DEFAULT_ENDPOINT", modify_default_endpoint(InstanceGroupManagersClient)) +def test_instance_group_managers_client_get_mtls_endpoint_and_cert_source(client_class): + mock_client_cert_source = mock.Mock() + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "true". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + mock_api_endpoint = "foo" + options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(options) + assert api_endpoint == mock_api_endpoint + assert cert_source == mock_client_cert_source + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "false". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + mock_client_cert_source = mock.Mock() + mock_api_endpoint = "foo" + options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(options) + assert api_endpoint == mock_api_endpoint + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert doesn't exist. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=False): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert exists. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): + with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=mock_client_cert_source): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source == mock_client_cert_source + + +@pytest.mark.parametrize("client_class,transport_class,transport_name", [ + (InstanceGroupManagersClient, transports.InstanceGroupManagersRestTransport, "rest"), +]) +def test_instance_group_managers_client_client_options_scopes(client_class, transport_class, transport_name): + # Check the case scopes are provided. + options = client_options.ClientOptions( + scopes=["1", "2"], + ) + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=["1", "2"], + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + +@pytest.mark.parametrize("client_class,transport_class,transport_name,grpc_helpers", [ + (InstanceGroupManagersClient, transports.InstanceGroupManagersRestTransport, "rest", None), +]) +def test_instance_group_managers_client_client_options_credentials_file(client_class, transport_class, transport_name, grpc_helpers): + # Check the case credentials file is provided. + options = client_options.ClientOptions( + credentials_file="credentials.json" + ) + + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize("request_type", [ + compute.AbandonInstancesInstanceGroupManagerRequest, + dict, +]) +def test_abandon_instances_rest(request_type): + client = InstanceGroupManagersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'zone': 'sample2', 'instance_group_manager': 'sample3'} + request_init["instance_group_managers_abandon_instances_request_resource"] = {'instances': ['instances_value1', 'instances_value2']} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.abandon_instances(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, extended_operation.ExtendedOperation) + assert response.client_operation_id == 'client_operation_id_value' + assert response.creation_timestamp == 'creation_timestamp_value' + assert response.description == 'description_value' + assert response.end_time == 'end_time_value' + assert response.http_error_message == 'http_error_message_value' + assert response.http_error_status_code == 2374 + assert response.id == 205 + assert response.insert_time == 'insert_time_value' + assert response.kind == 'kind_value' + assert response.name == 'name_value' + assert response.operation_group_id == 'operation_group_id_value' + assert response.operation_type == 'operation_type_value' + assert response.progress == 885 + assert response.region == 'region_value' + assert response.self_link == 'self_link_value' + assert response.start_time == 'start_time_value' + assert response.status == compute.Operation.Status.DONE + assert response.status_message == 'status_message_value' + assert response.target_id == 947 + assert response.target_link == 'target_link_value' + assert response.user == 'user_value' + assert response.zone == 'zone_value' + + +def test_abandon_instances_rest_required_fields(request_type=compute.AbandonInstancesInstanceGroupManagerRequest): + transport_class = transports.InstanceGroupManagersRestTransport + + request_init = {} + request_init["instance_group_manager"] = "" + request_init["project"] = "" + request_init["zone"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).abandon_instances._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["instanceGroupManager"] = 'instance_group_manager_value' + jsonified_request["project"] = 'project_value' + jsonified_request["zone"] = 'zone_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).abandon_instances._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "instanceGroupManager" in jsonified_request + assert jsonified_request["instanceGroupManager"] == 'instance_group_manager_value' + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "zone" in jsonified_request + assert jsonified_request["zone"] == 'zone_value' + + client = InstanceGroupManagersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.abandon_instances(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_abandon_instances_rest_unset_required_fields(): + transport = transports.InstanceGroupManagersRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.abandon_instances._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("instanceGroupManager", "instanceGroupManagersAbandonInstancesRequestResource", "project", "zone", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_abandon_instances_rest_interceptors(null_interceptor): + transport = transports.InstanceGroupManagersRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.InstanceGroupManagersRestInterceptor(), + ) + client = InstanceGroupManagersClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.InstanceGroupManagersRestInterceptor, "post_abandon_instances") as post, \ + mock.patch.object(transports.InstanceGroupManagersRestInterceptor, "pre_abandon_instances") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.AbandonInstancesInstanceGroupManagerRequest.pb(compute.AbandonInstancesInstanceGroupManagerRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.AbandonInstancesInstanceGroupManagerRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.abandon_instances(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_abandon_instances_rest_bad_request(transport: str = 'rest', request_type=compute.AbandonInstancesInstanceGroupManagerRequest): + client = InstanceGroupManagersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'zone': 'sample2', 'instance_group_manager': 'sample3'} + request_init["instance_group_managers_abandon_instances_request_resource"] = {'instances': ['instances_value1', 'instances_value2']} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.abandon_instances(request) + + +def test_abandon_instances_rest_flattened(): + client = InstanceGroupManagersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'zone': 'sample2', 'instance_group_manager': 'sample3'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + zone='zone_value', + instance_group_manager='instance_group_manager_value', + instance_group_managers_abandon_instances_request_resource=compute.InstanceGroupManagersAbandonInstancesRequest(instances=['instances_value']), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.abandon_instances(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/zones/{zone}/instanceGroupManagers/{instance_group_manager}/abandonInstances" % client.transport._host, args[1]) + + +def test_abandon_instances_rest_flattened_error(transport: str = 'rest'): + client = InstanceGroupManagersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.abandon_instances( + compute.AbandonInstancesInstanceGroupManagerRequest(), + project='project_value', + zone='zone_value', + instance_group_manager='instance_group_manager_value', + instance_group_managers_abandon_instances_request_resource=compute.InstanceGroupManagersAbandonInstancesRequest(instances=['instances_value']), + ) + + +def test_abandon_instances_rest_error(): + client = InstanceGroupManagersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.AbandonInstancesInstanceGroupManagerRequest, + dict, +]) +def test_abandon_instances_unary_rest(request_type): + client = InstanceGroupManagersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'zone': 'sample2', 'instance_group_manager': 'sample3'} + request_init["instance_group_managers_abandon_instances_request_resource"] = {'instances': ['instances_value1', 'instances_value2']} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.abandon_instances_unary(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.Operation) + + +def test_abandon_instances_unary_rest_required_fields(request_type=compute.AbandonInstancesInstanceGroupManagerRequest): + transport_class = transports.InstanceGroupManagersRestTransport + + request_init = {} + request_init["instance_group_manager"] = "" + request_init["project"] = "" + request_init["zone"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).abandon_instances._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["instanceGroupManager"] = 'instance_group_manager_value' + jsonified_request["project"] = 'project_value' + jsonified_request["zone"] = 'zone_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).abandon_instances._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "instanceGroupManager" in jsonified_request + assert jsonified_request["instanceGroupManager"] == 'instance_group_manager_value' + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "zone" in jsonified_request + assert jsonified_request["zone"] == 'zone_value' + + client = InstanceGroupManagersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.abandon_instances_unary(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_abandon_instances_unary_rest_unset_required_fields(): + transport = transports.InstanceGroupManagersRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.abandon_instances._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("instanceGroupManager", "instanceGroupManagersAbandonInstancesRequestResource", "project", "zone", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_abandon_instances_unary_rest_interceptors(null_interceptor): + transport = transports.InstanceGroupManagersRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.InstanceGroupManagersRestInterceptor(), + ) + client = InstanceGroupManagersClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.InstanceGroupManagersRestInterceptor, "post_abandon_instances") as post, \ + mock.patch.object(transports.InstanceGroupManagersRestInterceptor, "pre_abandon_instances") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.AbandonInstancesInstanceGroupManagerRequest.pb(compute.AbandonInstancesInstanceGroupManagerRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.AbandonInstancesInstanceGroupManagerRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.abandon_instances_unary(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_abandon_instances_unary_rest_bad_request(transport: str = 'rest', request_type=compute.AbandonInstancesInstanceGroupManagerRequest): + client = InstanceGroupManagersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'zone': 'sample2', 'instance_group_manager': 'sample3'} + request_init["instance_group_managers_abandon_instances_request_resource"] = {'instances': ['instances_value1', 'instances_value2']} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.abandon_instances_unary(request) + + +def test_abandon_instances_unary_rest_flattened(): + client = InstanceGroupManagersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'zone': 'sample2', 'instance_group_manager': 'sample3'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + zone='zone_value', + instance_group_manager='instance_group_manager_value', + instance_group_managers_abandon_instances_request_resource=compute.InstanceGroupManagersAbandonInstancesRequest(instances=['instances_value']), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.abandon_instances_unary(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/zones/{zone}/instanceGroupManagers/{instance_group_manager}/abandonInstances" % client.transport._host, args[1]) + + +def test_abandon_instances_unary_rest_flattened_error(transport: str = 'rest'): + client = InstanceGroupManagersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.abandon_instances_unary( + compute.AbandonInstancesInstanceGroupManagerRequest(), + project='project_value', + zone='zone_value', + instance_group_manager='instance_group_manager_value', + instance_group_managers_abandon_instances_request_resource=compute.InstanceGroupManagersAbandonInstancesRequest(instances=['instances_value']), + ) + + +def test_abandon_instances_unary_rest_error(): + client = InstanceGroupManagersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.AggregatedListInstanceGroupManagersRequest, + dict, +]) +def test_aggregated_list_rest(request_type): + client = InstanceGroupManagersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.InstanceGroupManagerAggregatedList( + id='id_value', + kind='kind_value', + next_page_token='next_page_token_value', + self_link='self_link_value', + unreachables=['unreachables_value'], + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.InstanceGroupManagerAggregatedList.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.aggregated_list(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.AggregatedListPager) + assert response.id == 'id_value' + assert response.kind == 'kind_value' + assert response.next_page_token == 'next_page_token_value' + assert response.self_link == 'self_link_value' + assert response.unreachables == ['unreachables_value'] + + +def test_aggregated_list_rest_required_fields(request_type=compute.AggregatedListInstanceGroupManagersRequest): + transport_class = transports.InstanceGroupManagersRestTransport + + request_init = {} + request_init["project"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).aggregated_list._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).aggregated_list._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("filter", "include_all_scopes", "max_results", "order_by", "page_token", "return_partial_success", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + + client = InstanceGroupManagersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.InstanceGroupManagerAggregatedList() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "get", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.InstanceGroupManagerAggregatedList.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.aggregated_list(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_aggregated_list_rest_unset_required_fields(): + transport = transports.InstanceGroupManagersRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.aggregated_list._get_unset_required_fields({}) + assert set(unset_fields) == (set(("filter", "includeAllScopes", "maxResults", "orderBy", "pageToken", "returnPartialSuccess", )) & set(("project", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_aggregated_list_rest_interceptors(null_interceptor): + transport = transports.InstanceGroupManagersRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.InstanceGroupManagersRestInterceptor(), + ) + client = InstanceGroupManagersClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.InstanceGroupManagersRestInterceptor, "post_aggregated_list") as post, \ + mock.patch.object(transports.InstanceGroupManagersRestInterceptor, "pre_aggregated_list") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.AggregatedListInstanceGroupManagersRequest.pb(compute.AggregatedListInstanceGroupManagersRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.InstanceGroupManagerAggregatedList.to_json(compute.InstanceGroupManagerAggregatedList()) + + request = compute.AggregatedListInstanceGroupManagersRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.InstanceGroupManagerAggregatedList() + + client.aggregated_list(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_aggregated_list_rest_bad_request(transport: str = 'rest', request_type=compute.AggregatedListInstanceGroupManagersRequest): + client = InstanceGroupManagersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.aggregated_list(request) + + +def test_aggregated_list_rest_flattened(): + client = InstanceGroupManagersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.InstanceGroupManagerAggregatedList() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.InstanceGroupManagerAggregatedList.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.aggregated_list(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/aggregated/instanceGroupManagers" % client.transport._host, args[1]) + + +def test_aggregated_list_rest_flattened_error(transport: str = 'rest'): + client = InstanceGroupManagersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.aggregated_list( + compute.AggregatedListInstanceGroupManagersRequest(), + project='project_value', + ) + + +def test_aggregated_list_rest_pager(transport: str = 'rest'): + client = InstanceGroupManagersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + #with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + compute.InstanceGroupManagerAggregatedList( + items={ + 'a':compute.InstanceGroupManagersScopedList(), + 'b':compute.InstanceGroupManagersScopedList(), + 'c':compute.InstanceGroupManagersScopedList(), + }, + next_page_token='abc', + ), + compute.InstanceGroupManagerAggregatedList( + items={}, + next_page_token='def', + ), + compute.InstanceGroupManagerAggregatedList( + items={ + 'g':compute.InstanceGroupManagersScopedList(), + }, + next_page_token='ghi', + ), + compute.InstanceGroupManagerAggregatedList( + items={ + 'h':compute.InstanceGroupManagersScopedList(), + 'i':compute.InstanceGroupManagersScopedList(), + }, + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple(compute.InstanceGroupManagerAggregatedList.to_json(x) for x in response) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode('UTF-8') + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {'project': 'sample1'} + + pager = client.aggregated_list(request=sample_request) + + assert isinstance(pager.get('a'), compute.InstanceGroupManagersScopedList) + assert pager.get('h') is None + + results = list(pager) + assert len(results) == 6 + assert all( + isinstance(i, tuple) + for i in results) + for result in results: + assert isinstance(result, tuple) + assert tuple(type(t) for t in result) == (str, compute.InstanceGroupManagersScopedList) + + assert pager.get('a') is None + assert isinstance(pager.get('h'), compute.InstanceGroupManagersScopedList) + + pages = list(client.aggregated_list(request=sample_request).pages) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize("request_type", [ + compute.ApplyUpdatesToInstancesInstanceGroupManagerRequest, + dict, +]) +def test_apply_updates_to_instances_rest(request_type): + client = InstanceGroupManagersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'zone': 'sample2', 'instance_group_manager': 'sample3'} + request_init["instance_group_managers_apply_updates_request_resource"] = {'all_instances': True, 'instances': ['instances_value1', 'instances_value2'], 'minimal_action': 'minimal_action_value', 'most_disruptive_allowed_action': 'most_disruptive_allowed_action_value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.apply_updates_to_instances(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, extended_operation.ExtendedOperation) + assert response.client_operation_id == 'client_operation_id_value' + assert response.creation_timestamp == 'creation_timestamp_value' + assert response.description == 'description_value' + assert response.end_time == 'end_time_value' + assert response.http_error_message == 'http_error_message_value' + assert response.http_error_status_code == 2374 + assert response.id == 205 + assert response.insert_time == 'insert_time_value' + assert response.kind == 'kind_value' + assert response.name == 'name_value' + assert response.operation_group_id == 'operation_group_id_value' + assert response.operation_type == 'operation_type_value' + assert response.progress == 885 + assert response.region == 'region_value' + assert response.self_link == 'self_link_value' + assert response.start_time == 'start_time_value' + assert response.status == compute.Operation.Status.DONE + assert response.status_message == 'status_message_value' + assert response.target_id == 947 + assert response.target_link == 'target_link_value' + assert response.user == 'user_value' + assert response.zone == 'zone_value' + + +def test_apply_updates_to_instances_rest_required_fields(request_type=compute.ApplyUpdatesToInstancesInstanceGroupManagerRequest): + transport_class = transports.InstanceGroupManagersRestTransport + + request_init = {} + request_init["instance_group_manager"] = "" + request_init["project"] = "" + request_init["zone"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).apply_updates_to_instances._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["instanceGroupManager"] = 'instance_group_manager_value' + jsonified_request["project"] = 'project_value' + jsonified_request["zone"] = 'zone_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).apply_updates_to_instances._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "instanceGroupManager" in jsonified_request + assert jsonified_request["instanceGroupManager"] == 'instance_group_manager_value' + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "zone" in jsonified_request + assert jsonified_request["zone"] == 'zone_value' + + client = InstanceGroupManagersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.apply_updates_to_instances(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_apply_updates_to_instances_rest_unset_required_fields(): + transport = transports.InstanceGroupManagersRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.apply_updates_to_instances._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("instanceGroupManager", "instanceGroupManagersApplyUpdatesRequestResource", "project", "zone", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_apply_updates_to_instances_rest_interceptors(null_interceptor): + transport = transports.InstanceGroupManagersRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.InstanceGroupManagersRestInterceptor(), + ) + client = InstanceGroupManagersClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.InstanceGroupManagersRestInterceptor, "post_apply_updates_to_instances") as post, \ + mock.patch.object(transports.InstanceGroupManagersRestInterceptor, "pre_apply_updates_to_instances") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.ApplyUpdatesToInstancesInstanceGroupManagerRequest.pb(compute.ApplyUpdatesToInstancesInstanceGroupManagerRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.ApplyUpdatesToInstancesInstanceGroupManagerRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.apply_updates_to_instances(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_apply_updates_to_instances_rest_bad_request(transport: str = 'rest', request_type=compute.ApplyUpdatesToInstancesInstanceGroupManagerRequest): + client = InstanceGroupManagersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'zone': 'sample2', 'instance_group_manager': 'sample3'} + request_init["instance_group_managers_apply_updates_request_resource"] = {'all_instances': True, 'instances': ['instances_value1', 'instances_value2'], 'minimal_action': 'minimal_action_value', 'most_disruptive_allowed_action': 'most_disruptive_allowed_action_value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.apply_updates_to_instances(request) + + +def test_apply_updates_to_instances_rest_flattened(): + client = InstanceGroupManagersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'zone': 'sample2', 'instance_group_manager': 'sample3'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + zone='zone_value', + instance_group_manager='instance_group_manager_value', + instance_group_managers_apply_updates_request_resource=compute.InstanceGroupManagersApplyUpdatesRequest(all_instances=True), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.apply_updates_to_instances(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/zones/{zone}/instanceGroupManagers/{instance_group_manager}/applyUpdatesToInstances" % client.transport._host, args[1]) + + +def test_apply_updates_to_instances_rest_flattened_error(transport: str = 'rest'): + client = InstanceGroupManagersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.apply_updates_to_instances( + compute.ApplyUpdatesToInstancesInstanceGroupManagerRequest(), + project='project_value', + zone='zone_value', + instance_group_manager='instance_group_manager_value', + instance_group_managers_apply_updates_request_resource=compute.InstanceGroupManagersApplyUpdatesRequest(all_instances=True), + ) + + +def test_apply_updates_to_instances_rest_error(): + client = InstanceGroupManagersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.ApplyUpdatesToInstancesInstanceGroupManagerRequest, + dict, +]) +def test_apply_updates_to_instances_unary_rest(request_type): + client = InstanceGroupManagersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'zone': 'sample2', 'instance_group_manager': 'sample3'} + request_init["instance_group_managers_apply_updates_request_resource"] = {'all_instances': True, 'instances': ['instances_value1', 'instances_value2'], 'minimal_action': 'minimal_action_value', 'most_disruptive_allowed_action': 'most_disruptive_allowed_action_value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.apply_updates_to_instances_unary(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.Operation) + + +def test_apply_updates_to_instances_unary_rest_required_fields(request_type=compute.ApplyUpdatesToInstancesInstanceGroupManagerRequest): + transport_class = transports.InstanceGroupManagersRestTransport + + request_init = {} + request_init["instance_group_manager"] = "" + request_init["project"] = "" + request_init["zone"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).apply_updates_to_instances._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["instanceGroupManager"] = 'instance_group_manager_value' + jsonified_request["project"] = 'project_value' + jsonified_request["zone"] = 'zone_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).apply_updates_to_instances._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "instanceGroupManager" in jsonified_request + assert jsonified_request["instanceGroupManager"] == 'instance_group_manager_value' + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "zone" in jsonified_request + assert jsonified_request["zone"] == 'zone_value' + + client = InstanceGroupManagersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.apply_updates_to_instances_unary(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_apply_updates_to_instances_unary_rest_unset_required_fields(): + transport = transports.InstanceGroupManagersRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.apply_updates_to_instances._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("instanceGroupManager", "instanceGroupManagersApplyUpdatesRequestResource", "project", "zone", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_apply_updates_to_instances_unary_rest_interceptors(null_interceptor): + transport = transports.InstanceGroupManagersRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.InstanceGroupManagersRestInterceptor(), + ) + client = InstanceGroupManagersClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.InstanceGroupManagersRestInterceptor, "post_apply_updates_to_instances") as post, \ + mock.patch.object(transports.InstanceGroupManagersRestInterceptor, "pre_apply_updates_to_instances") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.ApplyUpdatesToInstancesInstanceGroupManagerRequest.pb(compute.ApplyUpdatesToInstancesInstanceGroupManagerRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.ApplyUpdatesToInstancesInstanceGroupManagerRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.apply_updates_to_instances_unary(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_apply_updates_to_instances_unary_rest_bad_request(transport: str = 'rest', request_type=compute.ApplyUpdatesToInstancesInstanceGroupManagerRequest): + client = InstanceGroupManagersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'zone': 'sample2', 'instance_group_manager': 'sample3'} + request_init["instance_group_managers_apply_updates_request_resource"] = {'all_instances': True, 'instances': ['instances_value1', 'instances_value2'], 'minimal_action': 'minimal_action_value', 'most_disruptive_allowed_action': 'most_disruptive_allowed_action_value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.apply_updates_to_instances_unary(request) + + +def test_apply_updates_to_instances_unary_rest_flattened(): + client = InstanceGroupManagersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'zone': 'sample2', 'instance_group_manager': 'sample3'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + zone='zone_value', + instance_group_manager='instance_group_manager_value', + instance_group_managers_apply_updates_request_resource=compute.InstanceGroupManagersApplyUpdatesRequest(all_instances=True), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.apply_updates_to_instances_unary(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/zones/{zone}/instanceGroupManagers/{instance_group_manager}/applyUpdatesToInstances" % client.transport._host, args[1]) + + +def test_apply_updates_to_instances_unary_rest_flattened_error(transport: str = 'rest'): + client = InstanceGroupManagersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.apply_updates_to_instances_unary( + compute.ApplyUpdatesToInstancesInstanceGroupManagerRequest(), + project='project_value', + zone='zone_value', + instance_group_manager='instance_group_manager_value', + instance_group_managers_apply_updates_request_resource=compute.InstanceGroupManagersApplyUpdatesRequest(all_instances=True), + ) + + +def test_apply_updates_to_instances_unary_rest_error(): + client = InstanceGroupManagersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.CreateInstancesInstanceGroupManagerRequest, + dict, +]) +def test_create_instances_rest(request_type): + client = InstanceGroupManagersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'zone': 'sample2', 'instance_group_manager': 'sample3'} + request_init["instance_group_managers_create_instances_request_resource"] = {'instances': [{'fingerprint': 'fingerprint_value', 'name': 'name_value', 'preserved_state': {'disks': {}, 'metadata': {}}, 'status': 'status_value'}]} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.create_instances(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, extended_operation.ExtendedOperation) + assert response.client_operation_id == 'client_operation_id_value' + assert response.creation_timestamp == 'creation_timestamp_value' + assert response.description == 'description_value' + assert response.end_time == 'end_time_value' + assert response.http_error_message == 'http_error_message_value' + assert response.http_error_status_code == 2374 + assert response.id == 205 + assert response.insert_time == 'insert_time_value' + assert response.kind == 'kind_value' + assert response.name == 'name_value' + assert response.operation_group_id == 'operation_group_id_value' + assert response.operation_type == 'operation_type_value' + assert response.progress == 885 + assert response.region == 'region_value' + assert response.self_link == 'self_link_value' + assert response.start_time == 'start_time_value' + assert response.status == compute.Operation.Status.DONE + assert response.status_message == 'status_message_value' + assert response.target_id == 947 + assert response.target_link == 'target_link_value' + assert response.user == 'user_value' + assert response.zone == 'zone_value' + + +def test_create_instances_rest_required_fields(request_type=compute.CreateInstancesInstanceGroupManagerRequest): + transport_class = transports.InstanceGroupManagersRestTransport + + request_init = {} + request_init["instance_group_manager"] = "" + request_init["project"] = "" + request_init["zone"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_instances._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["instanceGroupManager"] = 'instance_group_manager_value' + jsonified_request["project"] = 'project_value' + jsonified_request["zone"] = 'zone_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_instances._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "instanceGroupManager" in jsonified_request + assert jsonified_request["instanceGroupManager"] == 'instance_group_manager_value' + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "zone" in jsonified_request + assert jsonified_request["zone"] == 'zone_value' + + client = InstanceGroupManagersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.create_instances(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_create_instances_rest_unset_required_fields(): + transport = transports.InstanceGroupManagersRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.create_instances._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("instanceGroupManager", "instanceGroupManagersCreateInstancesRequestResource", "project", "zone", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_create_instances_rest_interceptors(null_interceptor): + transport = transports.InstanceGroupManagersRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.InstanceGroupManagersRestInterceptor(), + ) + client = InstanceGroupManagersClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.InstanceGroupManagersRestInterceptor, "post_create_instances") as post, \ + mock.patch.object(transports.InstanceGroupManagersRestInterceptor, "pre_create_instances") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.CreateInstancesInstanceGroupManagerRequest.pb(compute.CreateInstancesInstanceGroupManagerRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.CreateInstancesInstanceGroupManagerRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.create_instances(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_create_instances_rest_bad_request(transport: str = 'rest', request_type=compute.CreateInstancesInstanceGroupManagerRequest): + client = InstanceGroupManagersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'zone': 'sample2', 'instance_group_manager': 'sample3'} + request_init["instance_group_managers_create_instances_request_resource"] = {'instances': [{'fingerprint': 'fingerprint_value', 'name': 'name_value', 'preserved_state': {'disks': {}, 'metadata': {}}, 'status': 'status_value'}]} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.create_instances(request) + + +def test_create_instances_rest_flattened(): + client = InstanceGroupManagersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'zone': 'sample2', 'instance_group_manager': 'sample3'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + zone='zone_value', + instance_group_manager='instance_group_manager_value', + instance_group_managers_create_instances_request_resource=compute.InstanceGroupManagersCreateInstancesRequest(instances=[compute.PerInstanceConfig(fingerprint='fingerprint_value')]), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.create_instances(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/zones/{zone}/instanceGroupManagers/{instance_group_manager}/createInstances" % client.transport._host, args[1]) + + +def test_create_instances_rest_flattened_error(transport: str = 'rest'): + client = InstanceGroupManagersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_instances( + compute.CreateInstancesInstanceGroupManagerRequest(), + project='project_value', + zone='zone_value', + instance_group_manager='instance_group_manager_value', + instance_group_managers_create_instances_request_resource=compute.InstanceGroupManagersCreateInstancesRequest(instances=[compute.PerInstanceConfig(fingerprint='fingerprint_value')]), + ) + + +def test_create_instances_rest_error(): + client = InstanceGroupManagersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.CreateInstancesInstanceGroupManagerRequest, + dict, +]) +def test_create_instances_unary_rest(request_type): + client = InstanceGroupManagersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'zone': 'sample2', 'instance_group_manager': 'sample3'} + request_init["instance_group_managers_create_instances_request_resource"] = {'instances': [{'fingerprint': 'fingerprint_value', 'name': 'name_value', 'preserved_state': {'disks': {}, 'metadata': {}}, 'status': 'status_value'}]} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.create_instances_unary(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.Operation) + + +def test_create_instances_unary_rest_required_fields(request_type=compute.CreateInstancesInstanceGroupManagerRequest): + transport_class = transports.InstanceGroupManagersRestTransport + + request_init = {} + request_init["instance_group_manager"] = "" + request_init["project"] = "" + request_init["zone"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_instances._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["instanceGroupManager"] = 'instance_group_manager_value' + jsonified_request["project"] = 'project_value' + jsonified_request["zone"] = 'zone_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_instances._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "instanceGroupManager" in jsonified_request + assert jsonified_request["instanceGroupManager"] == 'instance_group_manager_value' + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "zone" in jsonified_request + assert jsonified_request["zone"] == 'zone_value' + + client = InstanceGroupManagersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.create_instances_unary(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_create_instances_unary_rest_unset_required_fields(): + transport = transports.InstanceGroupManagersRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.create_instances._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("instanceGroupManager", "instanceGroupManagersCreateInstancesRequestResource", "project", "zone", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_create_instances_unary_rest_interceptors(null_interceptor): + transport = transports.InstanceGroupManagersRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.InstanceGroupManagersRestInterceptor(), + ) + client = InstanceGroupManagersClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.InstanceGroupManagersRestInterceptor, "post_create_instances") as post, \ + mock.patch.object(transports.InstanceGroupManagersRestInterceptor, "pre_create_instances") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.CreateInstancesInstanceGroupManagerRequest.pb(compute.CreateInstancesInstanceGroupManagerRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.CreateInstancesInstanceGroupManagerRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.create_instances_unary(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_create_instances_unary_rest_bad_request(transport: str = 'rest', request_type=compute.CreateInstancesInstanceGroupManagerRequest): + client = InstanceGroupManagersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'zone': 'sample2', 'instance_group_manager': 'sample3'} + request_init["instance_group_managers_create_instances_request_resource"] = {'instances': [{'fingerprint': 'fingerprint_value', 'name': 'name_value', 'preserved_state': {'disks': {}, 'metadata': {}}, 'status': 'status_value'}]} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.create_instances_unary(request) + + +def test_create_instances_unary_rest_flattened(): + client = InstanceGroupManagersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'zone': 'sample2', 'instance_group_manager': 'sample3'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + zone='zone_value', + instance_group_manager='instance_group_manager_value', + instance_group_managers_create_instances_request_resource=compute.InstanceGroupManagersCreateInstancesRequest(instances=[compute.PerInstanceConfig(fingerprint='fingerprint_value')]), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.create_instances_unary(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/zones/{zone}/instanceGroupManagers/{instance_group_manager}/createInstances" % client.transport._host, args[1]) + + +def test_create_instances_unary_rest_flattened_error(transport: str = 'rest'): + client = InstanceGroupManagersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_instances_unary( + compute.CreateInstancesInstanceGroupManagerRequest(), + project='project_value', + zone='zone_value', + instance_group_manager='instance_group_manager_value', + instance_group_managers_create_instances_request_resource=compute.InstanceGroupManagersCreateInstancesRequest(instances=[compute.PerInstanceConfig(fingerprint='fingerprint_value')]), + ) + + +def test_create_instances_unary_rest_error(): + client = InstanceGroupManagersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.DeleteInstanceGroupManagerRequest, + dict, +]) +def test_delete_rest(request_type): + client = InstanceGroupManagersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'zone': 'sample2', 'instance_group_manager': 'sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.delete(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, extended_operation.ExtendedOperation) + assert response.client_operation_id == 'client_operation_id_value' + assert response.creation_timestamp == 'creation_timestamp_value' + assert response.description == 'description_value' + assert response.end_time == 'end_time_value' + assert response.http_error_message == 'http_error_message_value' + assert response.http_error_status_code == 2374 + assert response.id == 205 + assert response.insert_time == 'insert_time_value' + assert response.kind == 'kind_value' + assert response.name == 'name_value' + assert response.operation_group_id == 'operation_group_id_value' + assert response.operation_type == 'operation_type_value' + assert response.progress == 885 + assert response.region == 'region_value' + assert response.self_link == 'self_link_value' + assert response.start_time == 'start_time_value' + assert response.status == compute.Operation.Status.DONE + assert response.status_message == 'status_message_value' + assert response.target_id == 947 + assert response.target_link == 'target_link_value' + assert response.user == 'user_value' + assert response.zone == 'zone_value' + + +def test_delete_rest_required_fields(request_type=compute.DeleteInstanceGroupManagerRequest): + transport_class = transports.InstanceGroupManagersRestTransport + + request_init = {} + request_init["instance_group_manager"] = "" + request_init["project"] = "" + request_init["zone"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["instanceGroupManager"] = 'instance_group_manager_value' + jsonified_request["project"] = 'project_value' + jsonified_request["zone"] = 'zone_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "instanceGroupManager" in jsonified_request + assert jsonified_request["instanceGroupManager"] == 'instance_group_manager_value' + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "zone" in jsonified_request + assert jsonified_request["zone"] == 'zone_value' + + client = InstanceGroupManagersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "delete", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.delete(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_delete_rest_unset_required_fields(): + transport = transports.InstanceGroupManagersRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.delete._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("instanceGroupManager", "project", "zone", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_rest_interceptors(null_interceptor): + transport = transports.InstanceGroupManagersRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.InstanceGroupManagersRestInterceptor(), + ) + client = InstanceGroupManagersClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.InstanceGroupManagersRestInterceptor, "post_delete") as post, \ + mock.patch.object(transports.InstanceGroupManagersRestInterceptor, "pre_delete") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.DeleteInstanceGroupManagerRequest.pb(compute.DeleteInstanceGroupManagerRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.DeleteInstanceGroupManagerRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.delete(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_delete_rest_bad_request(transport: str = 'rest', request_type=compute.DeleteInstanceGroupManagerRequest): + client = InstanceGroupManagersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'zone': 'sample2', 'instance_group_manager': 'sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.delete(request) + + +def test_delete_rest_flattened(): + client = InstanceGroupManagersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'zone': 'sample2', 'instance_group_manager': 'sample3'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + zone='zone_value', + instance_group_manager='instance_group_manager_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.delete(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/zones/{zone}/instanceGroupManagers/{instance_group_manager}" % client.transport._host, args[1]) + + +def test_delete_rest_flattened_error(transport: str = 'rest'): + client = InstanceGroupManagersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete( + compute.DeleteInstanceGroupManagerRequest(), + project='project_value', + zone='zone_value', + instance_group_manager='instance_group_manager_value', + ) + + +def test_delete_rest_error(): + client = InstanceGroupManagersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.DeleteInstanceGroupManagerRequest, + dict, +]) +def test_delete_unary_rest(request_type): + client = InstanceGroupManagersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'zone': 'sample2', 'instance_group_manager': 'sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.delete_unary(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.Operation) + + +def test_delete_unary_rest_required_fields(request_type=compute.DeleteInstanceGroupManagerRequest): + transport_class = transports.InstanceGroupManagersRestTransport + + request_init = {} + request_init["instance_group_manager"] = "" + request_init["project"] = "" + request_init["zone"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["instanceGroupManager"] = 'instance_group_manager_value' + jsonified_request["project"] = 'project_value' + jsonified_request["zone"] = 'zone_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "instanceGroupManager" in jsonified_request + assert jsonified_request["instanceGroupManager"] == 'instance_group_manager_value' + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "zone" in jsonified_request + assert jsonified_request["zone"] == 'zone_value' + + client = InstanceGroupManagersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "delete", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.delete_unary(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_delete_unary_rest_unset_required_fields(): + transport = transports.InstanceGroupManagersRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.delete._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("instanceGroupManager", "project", "zone", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_unary_rest_interceptors(null_interceptor): + transport = transports.InstanceGroupManagersRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.InstanceGroupManagersRestInterceptor(), + ) + client = InstanceGroupManagersClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.InstanceGroupManagersRestInterceptor, "post_delete") as post, \ + mock.patch.object(transports.InstanceGroupManagersRestInterceptor, "pre_delete") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.DeleteInstanceGroupManagerRequest.pb(compute.DeleteInstanceGroupManagerRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.DeleteInstanceGroupManagerRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.delete_unary(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_delete_unary_rest_bad_request(transport: str = 'rest', request_type=compute.DeleteInstanceGroupManagerRequest): + client = InstanceGroupManagersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'zone': 'sample2', 'instance_group_manager': 'sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.delete_unary(request) + + +def test_delete_unary_rest_flattened(): + client = InstanceGroupManagersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'zone': 'sample2', 'instance_group_manager': 'sample3'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + zone='zone_value', + instance_group_manager='instance_group_manager_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.delete_unary(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/zones/{zone}/instanceGroupManagers/{instance_group_manager}" % client.transport._host, args[1]) + + +def test_delete_unary_rest_flattened_error(transport: str = 'rest'): + client = InstanceGroupManagersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_unary( + compute.DeleteInstanceGroupManagerRequest(), + project='project_value', + zone='zone_value', + instance_group_manager='instance_group_manager_value', + ) + + +def test_delete_unary_rest_error(): + client = InstanceGroupManagersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.DeleteInstancesInstanceGroupManagerRequest, + dict, +]) +def test_delete_instances_rest(request_type): + client = InstanceGroupManagersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'zone': 'sample2', 'instance_group_manager': 'sample3'} + request_init["instance_group_managers_delete_instances_request_resource"] = {'instances': ['instances_value1', 'instances_value2'], 'skip_instances_on_validation_error': True} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.delete_instances(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, extended_operation.ExtendedOperation) + assert response.client_operation_id == 'client_operation_id_value' + assert response.creation_timestamp == 'creation_timestamp_value' + assert response.description == 'description_value' + assert response.end_time == 'end_time_value' + assert response.http_error_message == 'http_error_message_value' + assert response.http_error_status_code == 2374 + assert response.id == 205 + assert response.insert_time == 'insert_time_value' + assert response.kind == 'kind_value' + assert response.name == 'name_value' + assert response.operation_group_id == 'operation_group_id_value' + assert response.operation_type == 'operation_type_value' + assert response.progress == 885 + assert response.region == 'region_value' + assert response.self_link == 'self_link_value' + assert response.start_time == 'start_time_value' + assert response.status == compute.Operation.Status.DONE + assert response.status_message == 'status_message_value' + assert response.target_id == 947 + assert response.target_link == 'target_link_value' + assert response.user == 'user_value' + assert response.zone == 'zone_value' + + +def test_delete_instances_rest_required_fields(request_type=compute.DeleteInstancesInstanceGroupManagerRequest): + transport_class = transports.InstanceGroupManagersRestTransport + + request_init = {} + request_init["instance_group_manager"] = "" + request_init["project"] = "" + request_init["zone"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete_instances._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["instanceGroupManager"] = 'instance_group_manager_value' + jsonified_request["project"] = 'project_value' + jsonified_request["zone"] = 'zone_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete_instances._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "instanceGroupManager" in jsonified_request + assert jsonified_request["instanceGroupManager"] == 'instance_group_manager_value' + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "zone" in jsonified_request + assert jsonified_request["zone"] == 'zone_value' + + client = InstanceGroupManagersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.delete_instances(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_delete_instances_rest_unset_required_fields(): + transport = transports.InstanceGroupManagersRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.delete_instances._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("instanceGroupManager", "instanceGroupManagersDeleteInstancesRequestResource", "project", "zone", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_instances_rest_interceptors(null_interceptor): + transport = transports.InstanceGroupManagersRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.InstanceGroupManagersRestInterceptor(), + ) + client = InstanceGroupManagersClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.InstanceGroupManagersRestInterceptor, "post_delete_instances") as post, \ + mock.patch.object(transports.InstanceGroupManagersRestInterceptor, "pre_delete_instances") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.DeleteInstancesInstanceGroupManagerRequest.pb(compute.DeleteInstancesInstanceGroupManagerRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.DeleteInstancesInstanceGroupManagerRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.delete_instances(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_delete_instances_rest_bad_request(transport: str = 'rest', request_type=compute.DeleteInstancesInstanceGroupManagerRequest): + client = InstanceGroupManagersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'zone': 'sample2', 'instance_group_manager': 'sample3'} + request_init["instance_group_managers_delete_instances_request_resource"] = {'instances': ['instances_value1', 'instances_value2'], 'skip_instances_on_validation_error': True} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.delete_instances(request) + + +def test_delete_instances_rest_flattened(): + client = InstanceGroupManagersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'zone': 'sample2', 'instance_group_manager': 'sample3'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + zone='zone_value', + instance_group_manager='instance_group_manager_value', + instance_group_managers_delete_instances_request_resource=compute.InstanceGroupManagersDeleteInstancesRequest(instances=['instances_value']), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.delete_instances(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/zones/{zone}/instanceGroupManagers/{instance_group_manager}/deleteInstances" % client.transport._host, args[1]) + + +def test_delete_instances_rest_flattened_error(transport: str = 'rest'): + client = InstanceGroupManagersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_instances( + compute.DeleteInstancesInstanceGroupManagerRequest(), + project='project_value', + zone='zone_value', + instance_group_manager='instance_group_manager_value', + instance_group_managers_delete_instances_request_resource=compute.InstanceGroupManagersDeleteInstancesRequest(instances=['instances_value']), + ) + + +def test_delete_instances_rest_error(): + client = InstanceGroupManagersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.DeleteInstancesInstanceGroupManagerRequest, + dict, +]) +def test_delete_instances_unary_rest(request_type): + client = InstanceGroupManagersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'zone': 'sample2', 'instance_group_manager': 'sample3'} + request_init["instance_group_managers_delete_instances_request_resource"] = {'instances': ['instances_value1', 'instances_value2'], 'skip_instances_on_validation_error': True} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.delete_instances_unary(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.Operation) + + +def test_delete_instances_unary_rest_required_fields(request_type=compute.DeleteInstancesInstanceGroupManagerRequest): + transport_class = transports.InstanceGroupManagersRestTransport + + request_init = {} + request_init["instance_group_manager"] = "" + request_init["project"] = "" + request_init["zone"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete_instances._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["instanceGroupManager"] = 'instance_group_manager_value' + jsonified_request["project"] = 'project_value' + jsonified_request["zone"] = 'zone_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete_instances._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "instanceGroupManager" in jsonified_request + assert jsonified_request["instanceGroupManager"] == 'instance_group_manager_value' + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "zone" in jsonified_request + assert jsonified_request["zone"] == 'zone_value' + + client = InstanceGroupManagersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.delete_instances_unary(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_delete_instances_unary_rest_unset_required_fields(): + transport = transports.InstanceGroupManagersRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.delete_instances._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("instanceGroupManager", "instanceGroupManagersDeleteInstancesRequestResource", "project", "zone", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_instances_unary_rest_interceptors(null_interceptor): + transport = transports.InstanceGroupManagersRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.InstanceGroupManagersRestInterceptor(), + ) + client = InstanceGroupManagersClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.InstanceGroupManagersRestInterceptor, "post_delete_instances") as post, \ + mock.patch.object(transports.InstanceGroupManagersRestInterceptor, "pre_delete_instances") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.DeleteInstancesInstanceGroupManagerRequest.pb(compute.DeleteInstancesInstanceGroupManagerRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.DeleteInstancesInstanceGroupManagerRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.delete_instances_unary(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_delete_instances_unary_rest_bad_request(transport: str = 'rest', request_type=compute.DeleteInstancesInstanceGroupManagerRequest): + client = InstanceGroupManagersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'zone': 'sample2', 'instance_group_manager': 'sample3'} + request_init["instance_group_managers_delete_instances_request_resource"] = {'instances': ['instances_value1', 'instances_value2'], 'skip_instances_on_validation_error': True} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.delete_instances_unary(request) + + +def test_delete_instances_unary_rest_flattened(): + client = InstanceGroupManagersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'zone': 'sample2', 'instance_group_manager': 'sample3'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + zone='zone_value', + instance_group_manager='instance_group_manager_value', + instance_group_managers_delete_instances_request_resource=compute.InstanceGroupManagersDeleteInstancesRequest(instances=['instances_value']), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.delete_instances_unary(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/zones/{zone}/instanceGroupManagers/{instance_group_manager}/deleteInstances" % client.transport._host, args[1]) + + +def test_delete_instances_unary_rest_flattened_error(transport: str = 'rest'): + client = InstanceGroupManagersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_instances_unary( + compute.DeleteInstancesInstanceGroupManagerRequest(), + project='project_value', + zone='zone_value', + instance_group_manager='instance_group_manager_value', + instance_group_managers_delete_instances_request_resource=compute.InstanceGroupManagersDeleteInstancesRequest(instances=['instances_value']), + ) + + +def test_delete_instances_unary_rest_error(): + client = InstanceGroupManagersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.DeletePerInstanceConfigsInstanceGroupManagerRequest, + dict, +]) +def test_delete_per_instance_configs_rest(request_type): + client = InstanceGroupManagersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'zone': 'sample2', 'instance_group_manager': 'sample3'} + request_init["instance_group_managers_delete_per_instance_configs_req_resource"] = {'names': ['names_value1', 'names_value2']} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.delete_per_instance_configs(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, extended_operation.ExtendedOperation) + assert response.client_operation_id == 'client_operation_id_value' + assert response.creation_timestamp == 'creation_timestamp_value' + assert response.description == 'description_value' + assert response.end_time == 'end_time_value' + assert response.http_error_message == 'http_error_message_value' + assert response.http_error_status_code == 2374 + assert response.id == 205 + assert response.insert_time == 'insert_time_value' + assert response.kind == 'kind_value' + assert response.name == 'name_value' + assert response.operation_group_id == 'operation_group_id_value' + assert response.operation_type == 'operation_type_value' + assert response.progress == 885 + assert response.region == 'region_value' + assert response.self_link == 'self_link_value' + assert response.start_time == 'start_time_value' + assert response.status == compute.Operation.Status.DONE + assert response.status_message == 'status_message_value' + assert response.target_id == 947 + assert response.target_link == 'target_link_value' + assert response.user == 'user_value' + assert response.zone == 'zone_value' + + +def test_delete_per_instance_configs_rest_required_fields(request_type=compute.DeletePerInstanceConfigsInstanceGroupManagerRequest): + transport_class = transports.InstanceGroupManagersRestTransport + + request_init = {} + request_init["instance_group_manager"] = "" + request_init["project"] = "" + request_init["zone"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete_per_instance_configs._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["instanceGroupManager"] = 'instance_group_manager_value' + jsonified_request["project"] = 'project_value' + jsonified_request["zone"] = 'zone_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete_per_instance_configs._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "instanceGroupManager" in jsonified_request + assert jsonified_request["instanceGroupManager"] == 'instance_group_manager_value' + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "zone" in jsonified_request + assert jsonified_request["zone"] == 'zone_value' + + client = InstanceGroupManagersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.delete_per_instance_configs(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_delete_per_instance_configs_rest_unset_required_fields(): + transport = transports.InstanceGroupManagersRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.delete_per_instance_configs._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("instanceGroupManager", "instanceGroupManagersDeletePerInstanceConfigsReqResource", "project", "zone", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_per_instance_configs_rest_interceptors(null_interceptor): + transport = transports.InstanceGroupManagersRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.InstanceGroupManagersRestInterceptor(), + ) + client = InstanceGroupManagersClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.InstanceGroupManagersRestInterceptor, "post_delete_per_instance_configs") as post, \ + mock.patch.object(transports.InstanceGroupManagersRestInterceptor, "pre_delete_per_instance_configs") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.DeletePerInstanceConfigsInstanceGroupManagerRequest.pb(compute.DeletePerInstanceConfigsInstanceGroupManagerRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.DeletePerInstanceConfigsInstanceGroupManagerRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.delete_per_instance_configs(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_delete_per_instance_configs_rest_bad_request(transport: str = 'rest', request_type=compute.DeletePerInstanceConfigsInstanceGroupManagerRequest): + client = InstanceGroupManagersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'zone': 'sample2', 'instance_group_manager': 'sample3'} + request_init["instance_group_managers_delete_per_instance_configs_req_resource"] = {'names': ['names_value1', 'names_value2']} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.delete_per_instance_configs(request) + + +def test_delete_per_instance_configs_rest_flattened(): + client = InstanceGroupManagersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'zone': 'sample2', 'instance_group_manager': 'sample3'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + zone='zone_value', + instance_group_manager='instance_group_manager_value', + instance_group_managers_delete_per_instance_configs_req_resource=compute.InstanceGroupManagersDeletePerInstanceConfigsReq(names=['names_value']), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.delete_per_instance_configs(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/zones/{zone}/instanceGroupManagers/{instance_group_manager}/deletePerInstanceConfigs" % client.transport._host, args[1]) + + +def test_delete_per_instance_configs_rest_flattened_error(transport: str = 'rest'): + client = InstanceGroupManagersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_per_instance_configs( + compute.DeletePerInstanceConfigsInstanceGroupManagerRequest(), + project='project_value', + zone='zone_value', + instance_group_manager='instance_group_manager_value', + instance_group_managers_delete_per_instance_configs_req_resource=compute.InstanceGroupManagersDeletePerInstanceConfigsReq(names=['names_value']), + ) + + +def test_delete_per_instance_configs_rest_error(): + client = InstanceGroupManagersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.DeletePerInstanceConfigsInstanceGroupManagerRequest, + dict, +]) +def test_delete_per_instance_configs_unary_rest(request_type): + client = InstanceGroupManagersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'zone': 'sample2', 'instance_group_manager': 'sample3'} + request_init["instance_group_managers_delete_per_instance_configs_req_resource"] = {'names': ['names_value1', 'names_value2']} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.delete_per_instance_configs_unary(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.Operation) + + +def test_delete_per_instance_configs_unary_rest_required_fields(request_type=compute.DeletePerInstanceConfigsInstanceGroupManagerRequest): + transport_class = transports.InstanceGroupManagersRestTransport + + request_init = {} + request_init["instance_group_manager"] = "" + request_init["project"] = "" + request_init["zone"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete_per_instance_configs._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["instanceGroupManager"] = 'instance_group_manager_value' + jsonified_request["project"] = 'project_value' + jsonified_request["zone"] = 'zone_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete_per_instance_configs._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "instanceGroupManager" in jsonified_request + assert jsonified_request["instanceGroupManager"] == 'instance_group_manager_value' + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "zone" in jsonified_request + assert jsonified_request["zone"] == 'zone_value' + + client = InstanceGroupManagersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.delete_per_instance_configs_unary(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_delete_per_instance_configs_unary_rest_unset_required_fields(): + transport = transports.InstanceGroupManagersRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.delete_per_instance_configs._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("instanceGroupManager", "instanceGroupManagersDeletePerInstanceConfigsReqResource", "project", "zone", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_per_instance_configs_unary_rest_interceptors(null_interceptor): + transport = transports.InstanceGroupManagersRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.InstanceGroupManagersRestInterceptor(), + ) + client = InstanceGroupManagersClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.InstanceGroupManagersRestInterceptor, "post_delete_per_instance_configs") as post, \ + mock.patch.object(transports.InstanceGroupManagersRestInterceptor, "pre_delete_per_instance_configs") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.DeletePerInstanceConfigsInstanceGroupManagerRequest.pb(compute.DeletePerInstanceConfigsInstanceGroupManagerRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.DeletePerInstanceConfigsInstanceGroupManagerRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.delete_per_instance_configs_unary(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_delete_per_instance_configs_unary_rest_bad_request(transport: str = 'rest', request_type=compute.DeletePerInstanceConfigsInstanceGroupManagerRequest): + client = InstanceGroupManagersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'zone': 'sample2', 'instance_group_manager': 'sample3'} + request_init["instance_group_managers_delete_per_instance_configs_req_resource"] = {'names': ['names_value1', 'names_value2']} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.delete_per_instance_configs_unary(request) + + +def test_delete_per_instance_configs_unary_rest_flattened(): + client = InstanceGroupManagersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'zone': 'sample2', 'instance_group_manager': 'sample3'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + zone='zone_value', + instance_group_manager='instance_group_manager_value', + instance_group_managers_delete_per_instance_configs_req_resource=compute.InstanceGroupManagersDeletePerInstanceConfigsReq(names=['names_value']), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.delete_per_instance_configs_unary(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/zones/{zone}/instanceGroupManagers/{instance_group_manager}/deletePerInstanceConfigs" % client.transport._host, args[1]) + + +def test_delete_per_instance_configs_unary_rest_flattened_error(transport: str = 'rest'): + client = InstanceGroupManagersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_per_instance_configs_unary( + compute.DeletePerInstanceConfigsInstanceGroupManagerRequest(), + project='project_value', + zone='zone_value', + instance_group_manager='instance_group_manager_value', + instance_group_managers_delete_per_instance_configs_req_resource=compute.InstanceGroupManagersDeletePerInstanceConfigsReq(names=['names_value']), + ) + + +def test_delete_per_instance_configs_unary_rest_error(): + client = InstanceGroupManagersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.GetInstanceGroupManagerRequest, + dict, +]) +def test_get_rest(request_type): + client = InstanceGroupManagersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'zone': 'sample2', 'instance_group_manager': 'sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.InstanceGroupManager( + base_instance_name='base_instance_name_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + fingerprint='fingerprint_value', + id=205, + instance_group='instance_group_value', + instance_template='instance_template_value', + kind='kind_value', + list_managed_instances_results='list_managed_instances_results_value', + name='name_value', + region='region_value', + self_link='self_link_value', + target_pools=['target_pools_value'], + target_size=1185, + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.InstanceGroupManager.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.get(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.InstanceGroupManager) + assert response.base_instance_name == 'base_instance_name_value' + assert response.creation_timestamp == 'creation_timestamp_value' + assert response.description == 'description_value' + assert response.fingerprint == 'fingerprint_value' + assert response.id == 205 + assert response.instance_group == 'instance_group_value' + assert response.instance_template == 'instance_template_value' + assert response.kind == 'kind_value' + assert response.list_managed_instances_results == 'list_managed_instances_results_value' + assert response.name == 'name_value' + assert response.region == 'region_value' + assert response.self_link == 'self_link_value' + assert response.target_pools == ['target_pools_value'] + assert response.target_size == 1185 + assert response.zone == 'zone_value' + + +def test_get_rest_required_fields(request_type=compute.GetInstanceGroupManagerRequest): + transport_class = transports.InstanceGroupManagersRestTransport + + request_init = {} + request_init["instance_group_manager"] = "" + request_init["project"] = "" + request_init["zone"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["instanceGroupManager"] = 'instance_group_manager_value' + jsonified_request["project"] = 'project_value' + jsonified_request["zone"] = 'zone_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "instanceGroupManager" in jsonified_request + assert jsonified_request["instanceGroupManager"] == 'instance_group_manager_value' + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "zone" in jsonified_request + assert jsonified_request["zone"] == 'zone_value' + + client = InstanceGroupManagersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.InstanceGroupManager() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "get", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.InstanceGroupManager.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.get(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_get_rest_unset_required_fields(): + transport = transports.InstanceGroupManagersRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.get._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("instanceGroupManager", "project", "zone", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_rest_interceptors(null_interceptor): + transport = transports.InstanceGroupManagersRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.InstanceGroupManagersRestInterceptor(), + ) + client = InstanceGroupManagersClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.InstanceGroupManagersRestInterceptor, "post_get") as post, \ + mock.patch.object(transports.InstanceGroupManagersRestInterceptor, "pre_get") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.GetInstanceGroupManagerRequest.pb(compute.GetInstanceGroupManagerRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.InstanceGroupManager.to_json(compute.InstanceGroupManager()) + + request = compute.GetInstanceGroupManagerRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.InstanceGroupManager() + + client.get(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_get_rest_bad_request(transport: str = 'rest', request_type=compute.GetInstanceGroupManagerRequest): + client = InstanceGroupManagersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'zone': 'sample2', 'instance_group_manager': 'sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get(request) + + +def test_get_rest_flattened(): + client = InstanceGroupManagersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.InstanceGroupManager() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'zone': 'sample2', 'instance_group_manager': 'sample3'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + zone='zone_value', + instance_group_manager='instance_group_manager_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.InstanceGroupManager.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.get(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/zones/{zone}/instanceGroupManagers/{instance_group_manager}" % client.transport._host, args[1]) + + +def test_get_rest_flattened_error(transport: str = 'rest'): + client = InstanceGroupManagersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get( + compute.GetInstanceGroupManagerRequest(), + project='project_value', + zone='zone_value', + instance_group_manager='instance_group_manager_value', + ) + + +def test_get_rest_error(): + client = InstanceGroupManagersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.InsertInstanceGroupManagerRequest, + dict, +]) +def test_insert_rest(request_type): + client = InstanceGroupManagersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'zone': 'sample2'} + request_init["instance_group_manager_resource"] = {'auto_healing_policies': [{'health_check': 'health_check_value', 'initial_delay_sec': 1778}], 'base_instance_name': 'base_instance_name_value', 'creation_timestamp': 'creation_timestamp_value', 'current_actions': {'abandoning': 1041, 'creating': 845, 'creating_without_retries': 2589, 'deleting': 844, 'none': 432, 'recreating': 1060, 'refreshing': 1069, 'restarting': 1091, 'resuming': 874, 'starting': 876, 'stopping': 884, 'suspending': 1088, 'verifying': 979}, 'description': 'description_value', 'distribution_policy': {'target_shape': 'target_shape_value', 'zones': [{'zone': 'zone_value'}]}, 'fingerprint': 'fingerprint_value', 'id': 205, 'instance_group': 'instance_group_value', 'instance_lifecycle_policy': {'force_update_on_repair': 'force_update_on_repair_value'}, 'instance_template': 'instance_template_value', 'kind': 'kind_value', 'list_managed_instances_results': 'list_managed_instances_results_value', 'name': 'name_value', 'named_ports': [{'name': 'name_value', 'port': 453}], 'region': 'region_value', 'self_link': 'self_link_value', 'stateful_policy': {'preserved_state': {'disks': {}}}, 'status': {'autoscaler': 'autoscaler_value', 'is_stable': True, 'stateful': {'has_stateful_config': True, 'per_instance_configs': {'all_effective': True}}, 'version_target': {'is_reached': True}}, 'target_pools': ['target_pools_value1', 'target_pools_value2'], 'target_size': 1185, 'update_policy': {'instance_redistribution_type': 'instance_redistribution_type_value', 'max_surge': {'calculated': 1042, 'fixed': 528, 'percent': 753}, 'max_unavailable': {}, 'minimal_action': 'minimal_action_value', 'most_disruptive_allowed_action': 'most_disruptive_allowed_action_value', 'replacement_method': 'replacement_method_value', 'type_': 'type__value'}, 'versions': [{'instance_template': 'instance_template_value', 'name': 'name_value', 'target_size': {}}], 'zone': 'zone_value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.insert(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, extended_operation.ExtendedOperation) + assert response.client_operation_id == 'client_operation_id_value' + assert response.creation_timestamp == 'creation_timestamp_value' + assert response.description == 'description_value' + assert response.end_time == 'end_time_value' + assert response.http_error_message == 'http_error_message_value' + assert response.http_error_status_code == 2374 + assert response.id == 205 + assert response.insert_time == 'insert_time_value' + assert response.kind == 'kind_value' + assert response.name == 'name_value' + assert response.operation_group_id == 'operation_group_id_value' + assert response.operation_type == 'operation_type_value' + assert response.progress == 885 + assert response.region == 'region_value' + assert response.self_link == 'self_link_value' + assert response.start_time == 'start_time_value' + assert response.status == compute.Operation.Status.DONE + assert response.status_message == 'status_message_value' + assert response.target_id == 947 + assert response.target_link == 'target_link_value' + assert response.user == 'user_value' + assert response.zone == 'zone_value' + + +def test_insert_rest_required_fields(request_type=compute.InsertInstanceGroupManagerRequest): + transport_class = transports.InstanceGroupManagersRestTransport + + request_init = {} + request_init["project"] = "" + request_init["zone"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).insert._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + jsonified_request["zone"] = 'zone_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).insert._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "zone" in jsonified_request + assert jsonified_request["zone"] == 'zone_value' + + client = InstanceGroupManagersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.insert(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_insert_rest_unset_required_fields(): + transport = transports.InstanceGroupManagersRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.insert._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("instanceGroupManagerResource", "project", "zone", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_insert_rest_interceptors(null_interceptor): + transport = transports.InstanceGroupManagersRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.InstanceGroupManagersRestInterceptor(), + ) + client = InstanceGroupManagersClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.InstanceGroupManagersRestInterceptor, "post_insert") as post, \ + mock.patch.object(transports.InstanceGroupManagersRestInterceptor, "pre_insert") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.InsertInstanceGroupManagerRequest.pb(compute.InsertInstanceGroupManagerRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.InsertInstanceGroupManagerRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.insert(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_insert_rest_bad_request(transport: str = 'rest', request_type=compute.InsertInstanceGroupManagerRequest): + client = InstanceGroupManagersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'zone': 'sample2'} + request_init["instance_group_manager_resource"] = {'auto_healing_policies': [{'health_check': 'health_check_value', 'initial_delay_sec': 1778}], 'base_instance_name': 'base_instance_name_value', 'creation_timestamp': 'creation_timestamp_value', 'current_actions': {'abandoning': 1041, 'creating': 845, 'creating_without_retries': 2589, 'deleting': 844, 'none': 432, 'recreating': 1060, 'refreshing': 1069, 'restarting': 1091, 'resuming': 874, 'starting': 876, 'stopping': 884, 'suspending': 1088, 'verifying': 979}, 'description': 'description_value', 'distribution_policy': {'target_shape': 'target_shape_value', 'zones': [{'zone': 'zone_value'}]}, 'fingerprint': 'fingerprint_value', 'id': 205, 'instance_group': 'instance_group_value', 'instance_lifecycle_policy': {'force_update_on_repair': 'force_update_on_repair_value'}, 'instance_template': 'instance_template_value', 'kind': 'kind_value', 'list_managed_instances_results': 'list_managed_instances_results_value', 'name': 'name_value', 'named_ports': [{'name': 'name_value', 'port': 453}], 'region': 'region_value', 'self_link': 'self_link_value', 'stateful_policy': {'preserved_state': {'disks': {}}}, 'status': {'autoscaler': 'autoscaler_value', 'is_stable': True, 'stateful': {'has_stateful_config': True, 'per_instance_configs': {'all_effective': True}}, 'version_target': {'is_reached': True}}, 'target_pools': ['target_pools_value1', 'target_pools_value2'], 'target_size': 1185, 'update_policy': {'instance_redistribution_type': 'instance_redistribution_type_value', 'max_surge': {'calculated': 1042, 'fixed': 528, 'percent': 753}, 'max_unavailable': {}, 'minimal_action': 'minimal_action_value', 'most_disruptive_allowed_action': 'most_disruptive_allowed_action_value', 'replacement_method': 'replacement_method_value', 'type_': 'type__value'}, 'versions': [{'instance_template': 'instance_template_value', 'name': 'name_value', 'target_size': {}}], 'zone': 'zone_value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.insert(request) + + +def test_insert_rest_flattened(): + client = InstanceGroupManagersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'zone': 'sample2'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + zone='zone_value', + instance_group_manager_resource=compute.InstanceGroupManager(auto_healing_policies=[compute.InstanceGroupManagerAutoHealingPolicy(health_check='health_check_value')]), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.insert(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/zones/{zone}/instanceGroupManagers" % client.transport._host, args[1]) + + +def test_insert_rest_flattened_error(transport: str = 'rest'): + client = InstanceGroupManagersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.insert( + compute.InsertInstanceGroupManagerRequest(), + project='project_value', + zone='zone_value', + instance_group_manager_resource=compute.InstanceGroupManager(auto_healing_policies=[compute.InstanceGroupManagerAutoHealingPolicy(health_check='health_check_value')]), + ) + + +def test_insert_rest_error(): + client = InstanceGroupManagersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.InsertInstanceGroupManagerRequest, + dict, +]) +def test_insert_unary_rest(request_type): + client = InstanceGroupManagersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'zone': 'sample2'} + request_init["instance_group_manager_resource"] = {'auto_healing_policies': [{'health_check': 'health_check_value', 'initial_delay_sec': 1778}], 'base_instance_name': 'base_instance_name_value', 'creation_timestamp': 'creation_timestamp_value', 'current_actions': {'abandoning': 1041, 'creating': 845, 'creating_without_retries': 2589, 'deleting': 844, 'none': 432, 'recreating': 1060, 'refreshing': 1069, 'restarting': 1091, 'resuming': 874, 'starting': 876, 'stopping': 884, 'suspending': 1088, 'verifying': 979}, 'description': 'description_value', 'distribution_policy': {'target_shape': 'target_shape_value', 'zones': [{'zone': 'zone_value'}]}, 'fingerprint': 'fingerprint_value', 'id': 205, 'instance_group': 'instance_group_value', 'instance_lifecycle_policy': {'force_update_on_repair': 'force_update_on_repair_value'}, 'instance_template': 'instance_template_value', 'kind': 'kind_value', 'list_managed_instances_results': 'list_managed_instances_results_value', 'name': 'name_value', 'named_ports': [{'name': 'name_value', 'port': 453}], 'region': 'region_value', 'self_link': 'self_link_value', 'stateful_policy': {'preserved_state': {'disks': {}}}, 'status': {'autoscaler': 'autoscaler_value', 'is_stable': True, 'stateful': {'has_stateful_config': True, 'per_instance_configs': {'all_effective': True}}, 'version_target': {'is_reached': True}}, 'target_pools': ['target_pools_value1', 'target_pools_value2'], 'target_size': 1185, 'update_policy': {'instance_redistribution_type': 'instance_redistribution_type_value', 'max_surge': {'calculated': 1042, 'fixed': 528, 'percent': 753}, 'max_unavailable': {}, 'minimal_action': 'minimal_action_value', 'most_disruptive_allowed_action': 'most_disruptive_allowed_action_value', 'replacement_method': 'replacement_method_value', 'type_': 'type__value'}, 'versions': [{'instance_template': 'instance_template_value', 'name': 'name_value', 'target_size': {}}], 'zone': 'zone_value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.insert_unary(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.Operation) + + +def test_insert_unary_rest_required_fields(request_type=compute.InsertInstanceGroupManagerRequest): + transport_class = transports.InstanceGroupManagersRestTransport + + request_init = {} + request_init["project"] = "" + request_init["zone"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).insert._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + jsonified_request["zone"] = 'zone_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).insert._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "zone" in jsonified_request + assert jsonified_request["zone"] == 'zone_value' + + client = InstanceGroupManagersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.insert_unary(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_insert_unary_rest_unset_required_fields(): + transport = transports.InstanceGroupManagersRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.insert._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("instanceGroupManagerResource", "project", "zone", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_insert_unary_rest_interceptors(null_interceptor): + transport = transports.InstanceGroupManagersRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.InstanceGroupManagersRestInterceptor(), + ) + client = InstanceGroupManagersClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.InstanceGroupManagersRestInterceptor, "post_insert") as post, \ + mock.patch.object(transports.InstanceGroupManagersRestInterceptor, "pre_insert") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.InsertInstanceGroupManagerRequest.pb(compute.InsertInstanceGroupManagerRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.InsertInstanceGroupManagerRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.insert_unary(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_insert_unary_rest_bad_request(transport: str = 'rest', request_type=compute.InsertInstanceGroupManagerRequest): + client = InstanceGroupManagersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'zone': 'sample2'} + request_init["instance_group_manager_resource"] = {'auto_healing_policies': [{'health_check': 'health_check_value', 'initial_delay_sec': 1778}], 'base_instance_name': 'base_instance_name_value', 'creation_timestamp': 'creation_timestamp_value', 'current_actions': {'abandoning': 1041, 'creating': 845, 'creating_without_retries': 2589, 'deleting': 844, 'none': 432, 'recreating': 1060, 'refreshing': 1069, 'restarting': 1091, 'resuming': 874, 'starting': 876, 'stopping': 884, 'suspending': 1088, 'verifying': 979}, 'description': 'description_value', 'distribution_policy': {'target_shape': 'target_shape_value', 'zones': [{'zone': 'zone_value'}]}, 'fingerprint': 'fingerprint_value', 'id': 205, 'instance_group': 'instance_group_value', 'instance_lifecycle_policy': {'force_update_on_repair': 'force_update_on_repair_value'}, 'instance_template': 'instance_template_value', 'kind': 'kind_value', 'list_managed_instances_results': 'list_managed_instances_results_value', 'name': 'name_value', 'named_ports': [{'name': 'name_value', 'port': 453}], 'region': 'region_value', 'self_link': 'self_link_value', 'stateful_policy': {'preserved_state': {'disks': {}}}, 'status': {'autoscaler': 'autoscaler_value', 'is_stable': True, 'stateful': {'has_stateful_config': True, 'per_instance_configs': {'all_effective': True}}, 'version_target': {'is_reached': True}}, 'target_pools': ['target_pools_value1', 'target_pools_value2'], 'target_size': 1185, 'update_policy': {'instance_redistribution_type': 'instance_redistribution_type_value', 'max_surge': {'calculated': 1042, 'fixed': 528, 'percent': 753}, 'max_unavailable': {}, 'minimal_action': 'minimal_action_value', 'most_disruptive_allowed_action': 'most_disruptive_allowed_action_value', 'replacement_method': 'replacement_method_value', 'type_': 'type__value'}, 'versions': [{'instance_template': 'instance_template_value', 'name': 'name_value', 'target_size': {}}], 'zone': 'zone_value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.insert_unary(request) + + +def test_insert_unary_rest_flattened(): + client = InstanceGroupManagersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'zone': 'sample2'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + zone='zone_value', + instance_group_manager_resource=compute.InstanceGroupManager(auto_healing_policies=[compute.InstanceGroupManagerAutoHealingPolicy(health_check='health_check_value')]), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.insert_unary(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/zones/{zone}/instanceGroupManagers" % client.transport._host, args[1]) + + +def test_insert_unary_rest_flattened_error(transport: str = 'rest'): + client = InstanceGroupManagersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.insert_unary( + compute.InsertInstanceGroupManagerRequest(), + project='project_value', + zone='zone_value', + instance_group_manager_resource=compute.InstanceGroupManager(auto_healing_policies=[compute.InstanceGroupManagerAutoHealingPolicy(health_check='health_check_value')]), + ) + + +def test_insert_unary_rest_error(): + client = InstanceGroupManagersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.ListInstanceGroupManagersRequest, + dict, +]) +def test_list_rest(request_type): + client = InstanceGroupManagersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'zone': 'sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.InstanceGroupManagerList( + id='id_value', + kind='kind_value', + next_page_token='next_page_token_value', + self_link='self_link_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.InstanceGroupManagerList.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.list(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListPager) + assert response.id == 'id_value' + assert response.kind == 'kind_value' + assert response.next_page_token == 'next_page_token_value' + assert response.self_link == 'self_link_value' + + +def test_list_rest_required_fields(request_type=compute.ListInstanceGroupManagersRequest): + transport_class = transports.InstanceGroupManagersRestTransport + + request_init = {} + request_init["project"] = "" + request_init["zone"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + jsonified_request["zone"] = 'zone_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("filter", "max_results", "order_by", "page_token", "return_partial_success", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "zone" in jsonified_request + assert jsonified_request["zone"] == 'zone_value' + + client = InstanceGroupManagersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.InstanceGroupManagerList() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "get", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.InstanceGroupManagerList.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.list(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_list_rest_unset_required_fields(): + transport = transports.InstanceGroupManagersRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.list._get_unset_required_fields({}) + assert set(unset_fields) == (set(("filter", "maxResults", "orderBy", "pageToken", "returnPartialSuccess", )) & set(("project", "zone", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_rest_interceptors(null_interceptor): + transport = transports.InstanceGroupManagersRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.InstanceGroupManagersRestInterceptor(), + ) + client = InstanceGroupManagersClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.InstanceGroupManagersRestInterceptor, "post_list") as post, \ + mock.patch.object(transports.InstanceGroupManagersRestInterceptor, "pre_list") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.ListInstanceGroupManagersRequest.pb(compute.ListInstanceGroupManagersRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.InstanceGroupManagerList.to_json(compute.InstanceGroupManagerList()) + + request = compute.ListInstanceGroupManagersRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.InstanceGroupManagerList() + + client.list(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_list_rest_bad_request(transport: str = 'rest', request_type=compute.ListInstanceGroupManagersRequest): + client = InstanceGroupManagersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'zone': 'sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list(request) + + +def test_list_rest_flattened(): + client = InstanceGroupManagersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.InstanceGroupManagerList() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'zone': 'sample2'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + zone='zone_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.InstanceGroupManagerList.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.list(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/zones/{zone}/instanceGroupManagers" % client.transport._host, args[1]) + + +def test_list_rest_flattened_error(transport: str = 'rest'): + client = InstanceGroupManagersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list( + compute.ListInstanceGroupManagersRequest(), + project='project_value', + zone='zone_value', + ) + + +def test_list_rest_pager(transport: str = 'rest'): + client = InstanceGroupManagersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + #with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + compute.InstanceGroupManagerList( + items=[ + compute.InstanceGroupManager(), + compute.InstanceGroupManager(), + compute.InstanceGroupManager(), + ], + next_page_token='abc', + ), + compute.InstanceGroupManagerList( + items=[], + next_page_token='def', + ), + compute.InstanceGroupManagerList( + items=[ + compute.InstanceGroupManager(), + ], + next_page_token='ghi', + ), + compute.InstanceGroupManagerList( + items=[ + compute.InstanceGroupManager(), + compute.InstanceGroupManager(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple(compute.InstanceGroupManagerList.to_json(x) for x in response) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode('UTF-8') + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {'project': 'sample1', 'zone': 'sample2'} + + pager = client.list(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, compute.InstanceGroupManager) + for i in results) + + pages = list(client.list(request=sample_request).pages) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize("request_type", [ + compute.ListErrorsInstanceGroupManagersRequest, + dict, +]) +def test_list_errors_rest(request_type): + client = InstanceGroupManagersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'zone': 'sample2', 'instance_group_manager': 'sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.InstanceGroupManagersListErrorsResponse( + next_page_token='next_page_token_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.InstanceGroupManagersListErrorsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.list_errors(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListErrorsPager) + assert response.next_page_token == 'next_page_token_value' + + +def test_list_errors_rest_required_fields(request_type=compute.ListErrorsInstanceGroupManagersRequest): + transport_class = transports.InstanceGroupManagersRestTransport + + request_init = {} + request_init["instance_group_manager"] = "" + request_init["project"] = "" + request_init["zone"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_errors._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["instanceGroupManager"] = 'instance_group_manager_value' + jsonified_request["project"] = 'project_value' + jsonified_request["zone"] = 'zone_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_errors._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("filter", "max_results", "order_by", "page_token", "return_partial_success", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "instanceGroupManager" in jsonified_request + assert jsonified_request["instanceGroupManager"] == 'instance_group_manager_value' + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "zone" in jsonified_request + assert jsonified_request["zone"] == 'zone_value' + + client = InstanceGroupManagersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.InstanceGroupManagersListErrorsResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "get", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.InstanceGroupManagersListErrorsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.list_errors(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_list_errors_rest_unset_required_fields(): + transport = transports.InstanceGroupManagersRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.list_errors._get_unset_required_fields({}) + assert set(unset_fields) == (set(("filter", "maxResults", "orderBy", "pageToken", "returnPartialSuccess", )) & set(("instanceGroupManager", "project", "zone", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_errors_rest_interceptors(null_interceptor): + transport = transports.InstanceGroupManagersRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.InstanceGroupManagersRestInterceptor(), + ) + client = InstanceGroupManagersClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.InstanceGroupManagersRestInterceptor, "post_list_errors") as post, \ + mock.patch.object(transports.InstanceGroupManagersRestInterceptor, "pre_list_errors") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.ListErrorsInstanceGroupManagersRequest.pb(compute.ListErrorsInstanceGroupManagersRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.InstanceGroupManagersListErrorsResponse.to_json(compute.InstanceGroupManagersListErrorsResponse()) + + request = compute.ListErrorsInstanceGroupManagersRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.InstanceGroupManagersListErrorsResponse() + + client.list_errors(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_list_errors_rest_bad_request(transport: str = 'rest', request_type=compute.ListErrorsInstanceGroupManagersRequest): + client = InstanceGroupManagersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'zone': 'sample2', 'instance_group_manager': 'sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_errors(request) + + +def test_list_errors_rest_flattened(): + client = InstanceGroupManagersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.InstanceGroupManagersListErrorsResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'zone': 'sample2', 'instance_group_manager': 'sample3'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + zone='zone_value', + instance_group_manager='instance_group_manager_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.InstanceGroupManagersListErrorsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.list_errors(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/zones/{zone}/instanceGroupManagers/{instance_group_manager}/listErrors" % client.transport._host, args[1]) + + +def test_list_errors_rest_flattened_error(transport: str = 'rest'): + client = InstanceGroupManagersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_errors( + compute.ListErrorsInstanceGroupManagersRequest(), + project='project_value', + zone='zone_value', + instance_group_manager='instance_group_manager_value', + ) + + +def test_list_errors_rest_pager(transport: str = 'rest'): + client = InstanceGroupManagersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + #with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + compute.InstanceGroupManagersListErrorsResponse( + items=[ + compute.InstanceManagedByIgmError(), + compute.InstanceManagedByIgmError(), + compute.InstanceManagedByIgmError(), + ], + next_page_token='abc', + ), + compute.InstanceGroupManagersListErrorsResponse( + items=[], + next_page_token='def', + ), + compute.InstanceGroupManagersListErrorsResponse( + items=[ + compute.InstanceManagedByIgmError(), + ], + next_page_token='ghi', + ), + compute.InstanceGroupManagersListErrorsResponse( + items=[ + compute.InstanceManagedByIgmError(), + compute.InstanceManagedByIgmError(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple(compute.InstanceGroupManagersListErrorsResponse.to_json(x) for x in response) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode('UTF-8') + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {'project': 'sample1', 'zone': 'sample2', 'instance_group_manager': 'sample3'} + + pager = client.list_errors(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, compute.InstanceManagedByIgmError) + for i in results) + + pages = list(client.list_errors(request=sample_request).pages) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize("request_type", [ + compute.ListManagedInstancesInstanceGroupManagersRequest, + dict, +]) +def test_list_managed_instances_rest(request_type): + client = InstanceGroupManagersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'zone': 'sample2', 'instance_group_manager': 'sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.InstanceGroupManagersListManagedInstancesResponse( + next_page_token='next_page_token_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.InstanceGroupManagersListManagedInstancesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.list_managed_instances(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListManagedInstancesPager) + assert response.next_page_token == 'next_page_token_value' + + +def test_list_managed_instances_rest_required_fields(request_type=compute.ListManagedInstancesInstanceGroupManagersRequest): + transport_class = transports.InstanceGroupManagersRestTransport + + request_init = {} + request_init["instance_group_manager"] = "" + request_init["project"] = "" + request_init["zone"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_managed_instances._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["instanceGroupManager"] = 'instance_group_manager_value' + jsonified_request["project"] = 'project_value' + jsonified_request["zone"] = 'zone_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_managed_instances._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("filter", "max_results", "order_by", "page_token", "return_partial_success", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "instanceGroupManager" in jsonified_request + assert jsonified_request["instanceGroupManager"] == 'instance_group_manager_value' + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "zone" in jsonified_request + assert jsonified_request["zone"] == 'zone_value' + + client = InstanceGroupManagersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.InstanceGroupManagersListManagedInstancesResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.InstanceGroupManagersListManagedInstancesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.list_managed_instances(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_list_managed_instances_rest_unset_required_fields(): + transport = transports.InstanceGroupManagersRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.list_managed_instances._get_unset_required_fields({}) + assert set(unset_fields) == (set(("filter", "maxResults", "orderBy", "pageToken", "returnPartialSuccess", )) & set(("instanceGroupManager", "project", "zone", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_managed_instances_rest_interceptors(null_interceptor): + transport = transports.InstanceGroupManagersRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.InstanceGroupManagersRestInterceptor(), + ) + client = InstanceGroupManagersClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.InstanceGroupManagersRestInterceptor, "post_list_managed_instances") as post, \ + mock.patch.object(transports.InstanceGroupManagersRestInterceptor, "pre_list_managed_instances") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.ListManagedInstancesInstanceGroupManagersRequest.pb(compute.ListManagedInstancesInstanceGroupManagersRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.InstanceGroupManagersListManagedInstancesResponse.to_json(compute.InstanceGroupManagersListManagedInstancesResponse()) + + request = compute.ListManagedInstancesInstanceGroupManagersRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.InstanceGroupManagersListManagedInstancesResponse() + + client.list_managed_instances(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_list_managed_instances_rest_bad_request(transport: str = 'rest', request_type=compute.ListManagedInstancesInstanceGroupManagersRequest): + client = InstanceGroupManagersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'zone': 'sample2', 'instance_group_manager': 'sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_managed_instances(request) + + +def test_list_managed_instances_rest_flattened(): + client = InstanceGroupManagersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.InstanceGroupManagersListManagedInstancesResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'zone': 'sample2', 'instance_group_manager': 'sample3'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + zone='zone_value', + instance_group_manager='instance_group_manager_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.InstanceGroupManagersListManagedInstancesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.list_managed_instances(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/zones/{zone}/instanceGroupManagers/{instance_group_manager}/listManagedInstances" % client.transport._host, args[1]) + + +def test_list_managed_instances_rest_flattened_error(transport: str = 'rest'): + client = InstanceGroupManagersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_managed_instances( + compute.ListManagedInstancesInstanceGroupManagersRequest(), + project='project_value', + zone='zone_value', + instance_group_manager='instance_group_manager_value', + ) + + +def test_list_managed_instances_rest_pager(transport: str = 'rest'): + client = InstanceGroupManagersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + #with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + compute.InstanceGroupManagersListManagedInstancesResponse( + managed_instances=[ + compute.ManagedInstance(), + compute.ManagedInstance(), + compute.ManagedInstance(), + ], + next_page_token='abc', + ), + compute.InstanceGroupManagersListManagedInstancesResponse( + managed_instances=[], + next_page_token='def', + ), + compute.InstanceGroupManagersListManagedInstancesResponse( + managed_instances=[ + compute.ManagedInstance(), + ], + next_page_token='ghi', + ), + compute.InstanceGroupManagersListManagedInstancesResponse( + managed_instances=[ + compute.ManagedInstance(), + compute.ManagedInstance(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple(compute.InstanceGroupManagersListManagedInstancesResponse.to_json(x) for x in response) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode('UTF-8') + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {'project': 'sample1', 'zone': 'sample2', 'instance_group_manager': 'sample3'} + + pager = client.list_managed_instances(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, compute.ManagedInstance) + for i in results) + + pages = list(client.list_managed_instances(request=sample_request).pages) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize("request_type", [ + compute.ListPerInstanceConfigsInstanceGroupManagersRequest, + dict, +]) +def test_list_per_instance_configs_rest(request_type): + client = InstanceGroupManagersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'zone': 'sample2', 'instance_group_manager': 'sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.InstanceGroupManagersListPerInstanceConfigsResp( + next_page_token='next_page_token_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.InstanceGroupManagersListPerInstanceConfigsResp.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.list_per_instance_configs(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListPerInstanceConfigsPager) + assert response.next_page_token == 'next_page_token_value' + + +def test_list_per_instance_configs_rest_required_fields(request_type=compute.ListPerInstanceConfigsInstanceGroupManagersRequest): + transport_class = transports.InstanceGroupManagersRestTransport + + request_init = {} + request_init["instance_group_manager"] = "" + request_init["project"] = "" + request_init["zone"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_per_instance_configs._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["instanceGroupManager"] = 'instance_group_manager_value' + jsonified_request["project"] = 'project_value' + jsonified_request["zone"] = 'zone_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_per_instance_configs._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("filter", "max_results", "order_by", "page_token", "return_partial_success", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "instanceGroupManager" in jsonified_request + assert jsonified_request["instanceGroupManager"] == 'instance_group_manager_value' + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "zone" in jsonified_request + assert jsonified_request["zone"] == 'zone_value' + + client = InstanceGroupManagersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.InstanceGroupManagersListPerInstanceConfigsResp() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.InstanceGroupManagersListPerInstanceConfigsResp.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.list_per_instance_configs(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_list_per_instance_configs_rest_unset_required_fields(): + transport = transports.InstanceGroupManagersRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.list_per_instance_configs._get_unset_required_fields({}) + assert set(unset_fields) == (set(("filter", "maxResults", "orderBy", "pageToken", "returnPartialSuccess", )) & set(("instanceGroupManager", "project", "zone", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_per_instance_configs_rest_interceptors(null_interceptor): + transport = transports.InstanceGroupManagersRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.InstanceGroupManagersRestInterceptor(), + ) + client = InstanceGroupManagersClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.InstanceGroupManagersRestInterceptor, "post_list_per_instance_configs") as post, \ + mock.patch.object(transports.InstanceGroupManagersRestInterceptor, "pre_list_per_instance_configs") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.ListPerInstanceConfigsInstanceGroupManagersRequest.pb(compute.ListPerInstanceConfigsInstanceGroupManagersRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.InstanceGroupManagersListPerInstanceConfigsResp.to_json(compute.InstanceGroupManagersListPerInstanceConfigsResp()) + + request = compute.ListPerInstanceConfigsInstanceGroupManagersRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.InstanceGroupManagersListPerInstanceConfigsResp() + + client.list_per_instance_configs(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_list_per_instance_configs_rest_bad_request(transport: str = 'rest', request_type=compute.ListPerInstanceConfigsInstanceGroupManagersRequest): + client = InstanceGroupManagersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'zone': 'sample2', 'instance_group_manager': 'sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_per_instance_configs(request) + + +def test_list_per_instance_configs_rest_flattened(): + client = InstanceGroupManagersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.InstanceGroupManagersListPerInstanceConfigsResp() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'zone': 'sample2', 'instance_group_manager': 'sample3'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + zone='zone_value', + instance_group_manager='instance_group_manager_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.InstanceGroupManagersListPerInstanceConfigsResp.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.list_per_instance_configs(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/zones/{zone}/instanceGroupManagers/{instance_group_manager}/listPerInstanceConfigs" % client.transport._host, args[1]) + + +def test_list_per_instance_configs_rest_flattened_error(transport: str = 'rest'): + client = InstanceGroupManagersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_per_instance_configs( + compute.ListPerInstanceConfigsInstanceGroupManagersRequest(), + project='project_value', + zone='zone_value', + instance_group_manager='instance_group_manager_value', + ) + + +def test_list_per_instance_configs_rest_pager(transport: str = 'rest'): + client = InstanceGroupManagersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + #with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + compute.InstanceGroupManagersListPerInstanceConfigsResp( + items=[ + compute.PerInstanceConfig(), + compute.PerInstanceConfig(), + compute.PerInstanceConfig(), + ], + next_page_token='abc', + ), + compute.InstanceGroupManagersListPerInstanceConfigsResp( + items=[], + next_page_token='def', + ), + compute.InstanceGroupManagersListPerInstanceConfigsResp( + items=[ + compute.PerInstanceConfig(), + ], + next_page_token='ghi', + ), + compute.InstanceGroupManagersListPerInstanceConfigsResp( + items=[ + compute.PerInstanceConfig(), + compute.PerInstanceConfig(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple(compute.InstanceGroupManagersListPerInstanceConfigsResp.to_json(x) for x in response) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode('UTF-8') + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {'project': 'sample1', 'zone': 'sample2', 'instance_group_manager': 'sample3'} + + pager = client.list_per_instance_configs(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, compute.PerInstanceConfig) + for i in results) + + pages = list(client.list_per_instance_configs(request=sample_request).pages) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize("request_type", [ + compute.PatchInstanceGroupManagerRequest, + dict, +]) +def test_patch_rest(request_type): + client = InstanceGroupManagersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'zone': 'sample2', 'instance_group_manager': 'sample3'} + request_init["instance_group_manager_resource"] = {'auto_healing_policies': [{'health_check': 'health_check_value', 'initial_delay_sec': 1778}], 'base_instance_name': 'base_instance_name_value', 'creation_timestamp': 'creation_timestamp_value', 'current_actions': {'abandoning': 1041, 'creating': 845, 'creating_without_retries': 2589, 'deleting': 844, 'none': 432, 'recreating': 1060, 'refreshing': 1069, 'restarting': 1091, 'resuming': 874, 'starting': 876, 'stopping': 884, 'suspending': 1088, 'verifying': 979}, 'description': 'description_value', 'distribution_policy': {'target_shape': 'target_shape_value', 'zones': [{'zone': 'zone_value'}]}, 'fingerprint': 'fingerprint_value', 'id': 205, 'instance_group': 'instance_group_value', 'instance_lifecycle_policy': {'force_update_on_repair': 'force_update_on_repair_value'}, 'instance_template': 'instance_template_value', 'kind': 'kind_value', 'list_managed_instances_results': 'list_managed_instances_results_value', 'name': 'name_value', 'named_ports': [{'name': 'name_value', 'port': 453}], 'region': 'region_value', 'self_link': 'self_link_value', 'stateful_policy': {'preserved_state': {'disks': {}}}, 'status': {'autoscaler': 'autoscaler_value', 'is_stable': True, 'stateful': {'has_stateful_config': True, 'per_instance_configs': {'all_effective': True}}, 'version_target': {'is_reached': True}}, 'target_pools': ['target_pools_value1', 'target_pools_value2'], 'target_size': 1185, 'update_policy': {'instance_redistribution_type': 'instance_redistribution_type_value', 'max_surge': {'calculated': 1042, 'fixed': 528, 'percent': 753}, 'max_unavailable': {}, 'minimal_action': 'minimal_action_value', 'most_disruptive_allowed_action': 'most_disruptive_allowed_action_value', 'replacement_method': 'replacement_method_value', 'type_': 'type__value'}, 'versions': [{'instance_template': 'instance_template_value', 'name': 'name_value', 'target_size': {}}], 'zone': 'zone_value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.patch(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, extended_operation.ExtendedOperation) + assert response.client_operation_id == 'client_operation_id_value' + assert response.creation_timestamp == 'creation_timestamp_value' + assert response.description == 'description_value' + assert response.end_time == 'end_time_value' + assert response.http_error_message == 'http_error_message_value' + assert response.http_error_status_code == 2374 + assert response.id == 205 + assert response.insert_time == 'insert_time_value' + assert response.kind == 'kind_value' + assert response.name == 'name_value' + assert response.operation_group_id == 'operation_group_id_value' + assert response.operation_type == 'operation_type_value' + assert response.progress == 885 + assert response.region == 'region_value' + assert response.self_link == 'self_link_value' + assert response.start_time == 'start_time_value' + assert response.status == compute.Operation.Status.DONE + assert response.status_message == 'status_message_value' + assert response.target_id == 947 + assert response.target_link == 'target_link_value' + assert response.user == 'user_value' + assert response.zone == 'zone_value' + + +def test_patch_rest_required_fields(request_type=compute.PatchInstanceGroupManagerRequest): + transport_class = transports.InstanceGroupManagersRestTransport + + request_init = {} + request_init["instance_group_manager"] = "" + request_init["project"] = "" + request_init["zone"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).patch._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["instanceGroupManager"] = 'instance_group_manager_value' + jsonified_request["project"] = 'project_value' + jsonified_request["zone"] = 'zone_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).patch._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "instanceGroupManager" in jsonified_request + assert jsonified_request["instanceGroupManager"] == 'instance_group_manager_value' + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "zone" in jsonified_request + assert jsonified_request["zone"] == 'zone_value' + + client = InstanceGroupManagersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "patch", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.patch(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_patch_rest_unset_required_fields(): + transport = transports.InstanceGroupManagersRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.patch._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("instanceGroupManager", "instanceGroupManagerResource", "project", "zone", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_patch_rest_interceptors(null_interceptor): + transport = transports.InstanceGroupManagersRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.InstanceGroupManagersRestInterceptor(), + ) + client = InstanceGroupManagersClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.InstanceGroupManagersRestInterceptor, "post_patch") as post, \ + mock.patch.object(transports.InstanceGroupManagersRestInterceptor, "pre_patch") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.PatchInstanceGroupManagerRequest.pb(compute.PatchInstanceGroupManagerRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.PatchInstanceGroupManagerRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.patch(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_patch_rest_bad_request(transport: str = 'rest', request_type=compute.PatchInstanceGroupManagerRequest): + client = InstanceGroupManagersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'zone': 'sample2', 'instance_group_manager': 'sample3'} + request_init["instance_group_manager_resource"] = {'auto_healing_policies': [{'health_check': 'health_check_value', 'initial_delay_sec': 1778}], 'base_instance_name': 'base_instance_name_value', 'creation_timestamp': 'creation_timestamp_value', 'current_actions': {'abandoning': 1041, 'creating': 845, 'creating_without_retries': 2589, 'deleting': 844, 'none': 432, 'recreating': 1060, 'refreshing': 1069, 'restarting': 1091, 'resuming': 874, 'starting': 876, 'stopping': 884, 'suspending': 1088, 'verifying': 979}, 'description': 'description_value', 'distribution_policy': {'target_shape': 'target_shape_value', 'zones': [{'zone': 'zone_value'}]}, 'fingerprint': 'fingerprint_value', 'id': 205, 'instance_group': 'instance_group_value', 'instance_lifecycle_policy': {'force_update_on_repair': 'force_update_on_repair_value'}, 'instance_template': 'instance_template_value', 'kind': 'kind_value', 'list_managed_instances_results': 'list_managed_instances_results_value', 'name': 'name_value', 'named_ports': [{'name': 'name_value', 'port': 453}], 'region': 'region_value', 'self_link': 'self_link_value', 'stateful_policy': {'preserved_state': {'disks': {}}}, 'status': {'autoscaler': 'autoscaler_value', 'is_stable': True, 'stateful': {'has_stateful_config': True, 'per_instance_configs': {'all_effective': True}}, 'version_target': {'is_reached': True}}, 'target_pools': ['target_pools_value1', 'target_pools_value2'], 'target_size': 1185, 'update_policy': {'instance_redistribution_type': 'instance_redistribution_type_value', 'max_surge': {'calculated': 1042, 'fixed': 528, 'percent': 753}, 'max_unavailable': {}, 'minimal_action': 'minimal_action_value', 'most_disruptive_allowed_action': 'most_disruptive_allowed_action_value', 'replacement_method': 'replacement_method_value', 'type_': 'type__value'}, 'versions': [{'instance_template': 'instance_template_value', 'name': 'name_value', 'target_size': {}}], 'zone': 'zone_value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.patch(request) + + +def test_patch_rest_flattened(): + client = InstanceGroupManagersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'zone': 'sample2', 'instance_group_manager': 'sample3'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + zone='zone_value', + instance_group_manager='instance_group_manager_value', + instance_group_manager_resource=compute.InstanceGroupManager(auto_healing_policies=[compute.InstanceGroupManagerAutoHealingPolicy(health_check='health_check_value')]), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.patch(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/zones/{zone}/instanceGroupManagers/{instance_group_manager}" % client.transport._host, args[1]) + + +def test_patch_rest_flattened_error(transport: str = 'rest'): + client = InstanceGroupManagersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.patch( + compute.PatchInstanceGroupManagerRequest(), + project='project_value', + zone='zone_value', + instance_group_manager='instance_group_manager_value', + instance_group_manager_resource=compute.InstanceGroupManager(auto_healing_policies=[compute.InstanceGroupManagerAutoHealingPolicy(health_check='health_check_value')]), + ) + + +def test_patch_rest_error(): + client = InstanceGroupManagersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.PatchInstanceGroupManagerRequest, + dict, +]) +def test_patch_unary_rest(request_type): + client = InstanceGroupManagersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'zone': 'sample2', 'instance_group_manager': 'sample3'} + request_init["instance_group_manager_resource"] = {'auto_healing_policies': [{'health_check': 'health_check_value', 'initial_delay_sec': 1778}], 'base_instance_name': 'base_instance_name_value', 'creation_timestamp': 'creation_timestamp_value', 'current_actions': {'abandoning': 1041, 'creating': 845, 'creating_without_retries': 2589, 'deleting': 844, 'none': 432, 'recreating': 1060, 'refreshing': 1069, 'restarting': 1091, 'resuming': 874, 'starting': 876, 'stopping': 884, 'suspending': 1088, 'verifying': 979}, 'description': 'description_value', 'distribution_policy': {'target_shape': 'target_shape_value', 'zones': [{'zone': 'zone_value'}]}, 'fingerprint': 'fingerprint_value', 'id': 205, 'instance_group': 'instance_group_value', 'instance_lifecycle_policy': {'force_update_on_repair': 'force_update_on_repair_value'}, 'instance_template': 'instance_template_value', 'kind': 'kind_value', 'list_managed_instances_results': 'list_managed_instances_results_value', 'name': 'name_value', 'named_ports': [{'name': 'name_value', 'port': 453}], 'region': 'region_value', 'self_link': 'self_link_value', 'stateful_policy': {'preserved_state': {'disks': {}}}, 'status': {'autoscaler': 'autoscaler_value', 'is_stable': True, 'stateful': {'has_stateful_config': True, 'per_instance_configs': {'all_effective': True}}, 'version_target': {'is_reached': True}}, 'target_pools': ['target_pools_value1', 'target_pools_value2'], 'target_size': 1185, 'update_policy': {'instance_redistribution_type': 'instance_redistribution_type_value', 'max_surge': {'calculated': 1042, 'fixed': 528, 'percent': 753}, 'max_unavailable': {}, 'minimal_action': 'minimal_action_value', 'most_disruptive_allowed_action': 'most_disruptive_allowed_action_value', 'replacement_method': 'replacement_method_value', 'type_': 'type__value'}, 'versions': [{'instance_template': 'instance_template_value', 'name': 'name_value', 'target_size': {}}], 'zone': 'zone_value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.patch_unary(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.Operation) + + +def test_patch_unary_rest_required_fields(request_type=compute.PatchInstanceGroupManagerRequest): + transport_class = transports.InstanceGroupManagersRestTransport + + request_init = {} + request_init["instance_group_manager"] = "" + request_init["project"] = "" + request_init["zone"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).patch._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["instanceGroupManager"] = 'instance_group_manager_value' + jsonified_request["project"] = 'project_value' + jsonified_request["zone"] = 'zone_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).patch._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "instanceGroupManager" in jsonified_request + assert jsonified_request["instanceGroupManager"] == 'instance_group_manager_value' + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "zone" in jsonified_request + assert jsonified_request["zone"] == 'zone_value' + + client = InstanceGroupManagersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "patch", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.patch_unary(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_patch_unary_rest_unset_required_fields(): + transport = transports.InstanceGroupManagersRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.patch._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("instanceGroupManager", "instanceGroupManagerResource", "project", "zone", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_patch_unary_rest_interceptors(null_interceptor): + transport = transports.InstanceGroupManagersRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.InstanceGroupManagersRestInterceptor(), + ) + client = InstanceGroupManagersClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.InstanceGroupManagersRestInterceptor, "post_patch") as post, \ + mock.patch.object(transports.InstanceGroupManagersRestInterceptor, "pre_patch") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.PatchInstanceGroupManagerRequest.pb(compute.PatchInstanceGroupManagerRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.PatchInstanceGroupManagerRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.patch_unary(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_patch_unary_rest_bad_request(transport: str = 'rest', request_type=compute.PatchInstanceGroupManagerRequest): + client = InstanceGroupManagersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'zone': 'sample2', 'instance_group_manager': 'sample3'} + request_init["instance_group_manager_resource"] = {'auto_healing_policies': [{'health_check': 'health_check_value', 'initial_delay_sec': 1778}], 'base_instance_name': 'base_instance_name_value', 'creation_timestamp': 'creation_timestamp_value', 'current_actions': {'abandoning': 1041, 'creating': 845, 'creating_without_retries': 2589, 'deleting': 844, 'none': 432, 'recreating': 1060, 'refreshing': 1069, 'restarting': 1091, 'resuming': 874, 'starting': 876, 'stopping': 884, 'suspending': 1088, 'verifying': 979}, 'description': 'description_value', 'distribution_policy': {'target_shape': 'target_shape_value', 'zones': [{'zone': 'zone_value'}]}, 'fingerprint': 'fingerprint_value', 'id': 205, 'instance_group': 'instance_group_value', 'instance_lifecycle_policy': {'force_update_on_repair': 'force_update_on_repair_value'}, 'instance_template': 'instance_template_value', 'kind': 'kind_value', 'list_managed_instances_results': 'list_managed_instances_results_value', 'name': 'name_value', 'named_ports': [{'name': 'name_value', 'port': 453}], 'region': 'region_value', 'self_link': 'self_link_value', 'stateful_policy': {'preserved_state': {'disks': {}}}, 'status': {'autoscaler': 'autoscaler_value', 'is_stable': True, 'stateful': {'has_stateful_config': True, 'per_instance_configs': {'all_effective': True}}, 'version_target': {'is_reached': True}}, 'target_pools': ['target_pools_value1', 'target_pools_value2'], 'target_size': 1185, 'update_policy': {'instance_redistribution_type': 'instance_redistribution_type_value', 'max_surge': {'calculated': 1042, 'fixed': 528, 'percent': 753}, 'max_unavailable': {}, 'minimal_action': 'minimal_action_value', 'most_disruptive_allowed_action': 'most_disruptive_allowed_action_value', 'replacement_method': 'replacement_method_value', 'type_': 'type__value'}, 'versions': [{'instance_template': 'instance_template_value', 'name': 'name_value', 'target_size': {}}], 'zone': 'zone_value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.patch_unary(request) + + +def test_patch_unary_rest_flattened(): + client = InstanceGroupManagersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'zone': 'sample2', 'instance_group_manager': 'sample3'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + zone='zone_value', + instance_group_manager='instance_group_manager_value', + instance_group_manager_resource=compute.InstanceGroupManager(auto_healing_policies=[compute.InstanceGroupManagerAutoHealingPolicy(health_check='health_check_value')]), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.patch_unary(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/zones/{zone}/instanceGroupManagers/{instance_group_manager}" % client.transport._host, args[1]) + + +def test_patch_unary_rest_flattened_error(transport: str = 'rest'): + client = InstanceGroupManagersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.patch_unary( + compute.PatchInstanceGroupManagerRequest(), + project='project_value', + zone='zone_value', + instance_group_manager='instance_group_manager_value', + instance_group_manager_resource=compute.InstanceGroupManager(auto_healing_policies=[compute.InstanceGroupManagerAutoHealingPolicy(health_check='health_check_value')]), + ) + + +def test_patch_unary_rest_error(): + client = InstanceGroupManagersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.PatchPerInstanceConfigsInstanceGroupManagerRequest, + dict, +]) +def test_patch_per_instance_configs_rest(request_type): + client = InstanceGroupManagersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'zone': 'sample2', 'instance_group_manager': 'sample3'} + request_init["instance_group_managers_patch_per_instance_configs_req_resource"] = {'per_instance_configs': [{'fingerprint': 'fingerprint_value', 'name': 'name_value', 'preserved_state': {'disks': {}, 'metadata': {}}, 'status': 'status_value'}]} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.patch_per_instance_configs(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, extended_operation.ExtendedOperation) + assert response.client_operation_id == 'client_operation_id_value' + assert response.creation_timestamp == 'creation_timestamp_value' + assert response.description == 'description_value' + assert response.end_time == 'end_time_value' + assert response.http_error_message == 'http_error_message_value' + assert response.http_error_status_code == 2374 + assert response.id == 205 + assert response.insert_time == 'insert_time_value' + assert response.kind == 'kind_value' + assert response.name == 'name_value' + assert response.operation_group_id == 'operation_group_id_value' + assert response.operation_type == 'operation_type_value' + assert response.progress == 885 + assert response.region == 'region_value' + assert response.self_link == 'self_link_value' + assert response.start_time == 'start_time_value' + assert response.status == compute.Operation.Status.DONE + assert response.status_message == 'status_message_value' + assert response.target_id == 947 + assert response.target_link == 'target_link_value' + assert response.user == 'user_value' + assert response.zone == 'zone_value' + + +def test_patch_per_instance_configs_rest_required_fields(request_type=compute.PatchPerInstanceConfigsInstanceGroupManagerRequest): + transport_class = transports.InstanceGroupManagersRestTransport + + request_init = {} + request_init["instance_group_manager"] = "" + request_init["project"] = "" + request_init["zone"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).patch_per_instance_configs._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["instanceGroupManager"] = 'instance_group_manager_value' + jsonified_request["project"] = 'project_value' + jsonified_request["zone"] = 'zone_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).patch_per_instance_configs._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "instanceGroupManager" in jsonified_request + assert jsonified_request["instanceGroupManager"] == 'instance_group_manager_value' + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "zone" in jsonified_request + assert jsonified_request["zone"] == 'zone_value' + + client = InstanceGroupManagersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.patch_per_instance_configs(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_patch_per_instance_configs_rest_unset_required_fields(): + transport = transports.InstanceGroupManagersRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.patch_per_instance_configs._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("instanceGroupManager", "instanceGroupManagersPatchPerInstanceConfigsReqResource", "project", "zone", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_patch_per_instance_configs_rest_interceptors(null_interceptor): + transport = transports.InstanceGroupManagersRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.InstanceGroupManagersRestInterceptor(), + ) + client = InstanceGroupManagersClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.InstanceGroupManagersRestInterceptor, "post_patch_per_instance_configs") as post, \ + mock.patch.object(transports.InstanceGroupManagersRestInterceptor, "pre_patch_per_instance_configs") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.PatchPerInstanceConfigsInstanceGroupManagerRequest.pb(compute.PatchPerInstanceConfigsInstanceGroupManagerRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.PatchPerInstanceConfigsInstanceGroupManagerRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.patch_per_instance_configs(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_patch_per_instance_configs_rest_bad_request(transport: str = 'rest', request_type=compute.PatchPerInstanceConfigsInstanceGroupManagerRequest): + client = InstanceGroupManagersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'zone': 'sample2', 'instance_group_manager': 'sample3'} + request_init["instance_group_managers_patch_per_instance_configs_req_resource"] = {'per_instance_configs': [{'fingerprint': 'fingerprint_value', 'name': 'name_value', 'preserved_state': {'disks': {}, 'metadata': {}}, 'status': 'status_value'}]} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.patch_per_instance_configs(request) + + +def test_patch_per_instance_configs_rest_flattened(): + client = InstanceGroupManagersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'zone': 'sample2', 'instance_group_manager': 'sample3'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + zone='zone_value', + instance_group_manager='instance_group_manager_value', + instance_group_managers_patch_per_instance_configs_req_resource=compute.InstanceGroupManagersPatchPerInstanceConfigsReq(per_instance_configs=[compute.PerInstanceConfig(fingerprint='fingerprint_value')]), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.patch_per_instance_configs(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/zones/{zone}/instanceGroupManagers/{instance_group_manager}/patchPerInstanceConfigs" % client.transport._host, args[1]) + + +def test_patch_per_instance_configs_rest_flattened_error(transport: str = 'rest'): + client = InstanceGroupManagersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.patch_per_instance_configs( + compute.PatchPerInstanceConfigsInstanceGroupManagerRequest(), + project='project_value', + zone='zone_value', + instance_group_manager='instance_group_manager_value', + instance_group_managers_patch_per_instance_configs_req_resource=compute.InstanceGroupManagersPatchPerInstanceConfigsReq(per_instance_configs=[compute.PerInstanceConfig(fingerprint='fingerprint_value')]), + ) + + +def test_patch_per_instance_configs_rest_error(): + client = InstanceGroupManagersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.PatchPerInstanceConfigsInstanceGroupManagerRequest, + dict, +]) +def test_patch_per_instance_configs_unary_rest(request_type): + client = InstanceGroupManagersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'zone': 'sample2', 'instance_group_manager': 'sample3'} + request_init["instance_group_managers_patch_per_instance_configs_req_resource"] = {'per_instance_configs': [{'fingerprint': 'fingerprint_value', 'name': 'name_value', 'preserved_state': {'disks': {}, 'metadata': {}}, 'status': 'status_value'}]} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.patch_per_instance_configs_unary(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.Operation) + + +def test_patch_per_instance_configs_unary_rest_required_fields(request_type=compute.PatchPerInstanceConfigsInstanceGroupManagerRequest): + transport_class = transports.InstanceGroupManagersRestTransport + + request_init = {} + request_init["instance_group_manager"] = "" + request_init["project"] = "" + request_init["zone"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).patch_per_instance_configs._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["instanceGroupManager"] = 'instance_group_manager_value' + jsonified_request["project"] = 'project_value' + jsonified_request["zone"] = 'zone_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).patch_per_instance_configs._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "instanceGroupManager" in jsonified_request + assert jsonified_request["instanceGroupManager"] == 'instance_group_manager_value' + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "zone" in jsonified_request + assert jsonified_request["zone"] == 'zone_value' + + client = InstanceGroupManagersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.patch_per_instance_configs_unary(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_patch_per_instance_configs_unary_rest_unset_required_fields(): + transport = transports.InstanceGroupManagersRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.patch_per_instance_configs._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("instanceGroupManager", "instanceGroupManagersPatchPerInstanceConfigsReqResource", "project", "zone", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_patch_per_instance_configs_unary_rest_interceptors(null_interceptor): + transport = transports.InstanceGroupManagersRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.InstanceGroupManagersRestInterceptor(), + ) + client = InstanceGroupManagersClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.InstanceGroupManagersRestInterceptor, "post_patch_per_instance_configs") as post, \ + mock.patch.object(transports.InstanceGroupManagersRestInterceptor, "pre_patch_per_instance_configs") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.PatchPerInstanceConfigsInstanceGroupManagerRequest.pb(compute.PatchPerInstanceConfigsInstanceGroupManagerRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.PatchPerInstanceConfigsInstanceGroupManagerRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.patch_per_instance_configs_unary(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_patch_per_instance_configs_unary_rest_bad_request(transport: str = 'rest', request_type=compute.PatchPerInstanceConfigsInstanceGroupManagerRequest): + client = InstanceGroupManagersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'zone': 'sample2', 'instance_group_manager': 'sample3'} + request_init["instance_group_managers_patch_per_instance_configs_req_resource"] = {'per_instance_configs': [{'fingerprint': 'fingerprint_value', 'name': 'name_value', 'preserved_state': {'disks': {}, 'metadata': {}}, 'status': 'status_value'}]} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.patch_per_instance_configs_unary(request) + + +def test_patch_per_instance_configs_unary_rest_flattened(): + client = InstanceGroupManagersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'zone': 'sample2', 'instance_group_manager': 'sample3'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + zone='zone_value', + instance_group_manager='instance_group_manager_value', + instance_group_managers_patch_per_instance_configs_req_resource=compute.InstanceGroupManagersPatchPerInstanceConfigsReq(per_instance_configs=[compute.PerInstanceConfig(fingerprint='fingerprint_value')]), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.patch_per_instance_configs_unary(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/zones/{zone}/instanceGroupManagers/{instance_group_manager}/patchPerInstanceConfigs" % client.transport._host, args[1]) + + +def test_patch_per_instance_configs_unary_rest_flattened_error(transport: str = 'rest'): + client = InstanceGroupManagersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.patch_per_instance_configs_unary( + compute.PatchPerInstanceConfigsInstanceGroupManagerRequest(), + project='project_value', + zone='zone_value', + instance_group_manager='instance_group_manager_value', + instance_group_managers_patch_per_instance_configs_req_resource=compute.InstanceGroupManagersPatchPerInstanceConfigsReq(per_instance_configs=[compute.PerInstanceConfig(fingerprint='fingerprint_value')]), + ) + + +def test_patch_per_instance_configs_unary_rest_error(): + client = InstanceGroupManagersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.RecreateInstancesInstanceGroupManagerRequest, + dict, +]) +def test_recreate_instances_rest(request_type): + client = InstanceGroupManagersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'zone': 'sample2', 'instance_group_manager': 'sample3'} + request_init["instance_group_managers_recreate_instances_request_resource"] = {'instances': ['instances_value1', 'instances_value2']} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.recreate_instances(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, extended_operation.ExtendedOperation) + assert response.client_operation_id == 'client_operation_id_value' + assert response.creation_timestamp == 'creation_timestamp_value' + assert response.description == 'description_value' + assert response.end_time == 'end_time_value' + assert response.http_error_message == 'http_error_message_value' + assert response.http_error_status_code == 2374 + assert response.id == 205 + assert response.insert_time == 'insert_time_value' + assert response.kind == 'kind_value' + assert response.name == 'name_value' + assert response.operation_group_id == 'operation_group_id_value' + assert response.operation_type == 'operation_type_value' + assert response.progress == 885 + assert response.region == 'region_value' + assert response.self_link == 'self_link_value' + assert response.start_time == 'start_time_value' + assert response.status == compute.Operation.Status.DONE + assert response.status_message == 'status_message_value' + assert response.target_id == 947 + assert response.target_link == 'target_link_value' + assert response.user == 'user_value' + assert response.zone == 'zone_value' + + +def test_recreate_instances_rest_required_fields(request_type=compute.RecreateInstancesInstanceGroupManagerRequest): + transport_class = transports.InstanceGroupManagersRestTransport + + request_init = {} + request_init["instance_group_manager"] = "" + request_init["project"] = "" + request_init["zone"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).recreate_instances._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["instanceGroupManager"] = 'instance_group_manager_value' + jsonified_request["project"] = 'project_value' + jsonified_request["zone"] = 'zone_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).recreate_instances._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "instanceGroupManager" in jsonified_request + assert jsonified_request["instanceGroupManager"] == 'instance_group_manager_value' + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "zone" in jsonified_request + assert jsonified_request["zone"] == 'zone_value' + + client = InstanceGroupManagersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.recreate_instances(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_recreate_instances_rest_unset_required_fields(): + transport = transports.InstanceGroupManagersRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.recreate_instances._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("instanceGroupManager", "instanceGroupManagersRecreateInstancesRequestResource", "project", "zone", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_recreate_instances_rest_interceptors(null_interceptor): + transport = transports.InstanceGroupManagersRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.InstanceGroupManagersRestInterceptor(), + ) + client = InstanceGroupManagersClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.InstanceGroupManagersRestInterceptor, "post_recreate_instances") as post, \ + mock.patch.object(transports.InstanceGroupManagersRestInterceptor, "pre_recreate_instances") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.RecreateInstancesInstanceGroupManagerRequest.pb(compute.RecreateInstancesInstanceGroupManagerRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.RecreateInstancesInstanceGroupManagerRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.recreate_instances(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_recreate_instances_rest_bad_request(transport: str = 'rest', request_type=compute.RecreateInstancesInstanceGroupManagerRequest): + client = InstanceGroupManagersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'zone': 'sample2', 'instance_group_manager': 'sample3'} + request_init["instance_group_managers_recreate_instances_request_resource"] = {'instances': ['instances_value1', 'instances_value2']} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.recreate_instances(request) + + +def test_recreate_instances_rest_flattened(): + client = InstanceGroupManagersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'zone': 'sample2', 'instance_group_manager': 'sample3'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + zone='zone_value', + instance_group_manager='instance_group_manager_value', + instance_group_managers_recreate_instances_request_resource=compute.InstanceGroupManagersRecreateInstancesRequest(instances=['instances_value']), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.recreate_instances(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/zones/{zone}/instanceGroupManagers/{instance_group_manager}/recreateInstances" % client.transport._host, args[1]) + + +def test_recreate_instances_rest_flattened_error(transport: str = 'rest'): + client = InstanceGroupManagersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.recreate_instances( + compute.RecreateInstancesInstanceGroupManagerRequest(), + project='project_value', + zone='zone_value', + instance_group_manager='instance_group_manager_value', + instance_group_managers_recreate_instances_request_resource=compute.InstanceGroupManagersRecreateInstancesRequest(instances=['instances_value']), + ) + + +def test_recreate_instances_rest_error(): + client = InstanceGroupManagersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.RecreateInstancesInstanceGroupManagerRequest, + dict, +]) +def test_recreate_instances_unary_rest(request_type): + client = InstanceGroupManagersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'zone': 'sample2', 'instance_group_manager': 'sample3'} + request_init["instance_group_managers_recreate_instances_request_resource"] = {'instances': ['instances_value1', 'instances_value2']} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.recreate_instances_unary(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.Operation) + + +def test_recreate_instances_unary_rest_required_fields(request_type=compute.RecreateInstancesInstanceGroupManagerRequest): + transport_class = transports.InstanceGroupManagersRestTransport + + request_init = {} + request_init["instance_group_manager"] = "" + request_init["project"] = "" + request_init["zone"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).recreate_instances._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["instanceGroupManager"] = 'instance_group_manager_value' + jsonified_request["project"] = 'project_value' + jsonified_request["zone"] = 'zone_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).recreate_instances._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "instanceGroupManager" in jsonified_request + assert jsonified_request["instanceGroupManager"] == 'instance_group_manager_value' + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "zone" in jsonified_request + assert jsonified_request["zone"] == 'zone_value' + + client = InstanceGroupManagersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.recreate_instances_unary(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_recreate_instances_unary_rest_unset_required_fields(): + transport = transports.InstanceGroupManagersRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.recreate_instances._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("instanceGroupManager", "instanceGroupManagersRecreateInstancesRequestResource", "project", "zone", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_recreate_instances_unary_rest_interceptors(null_interceptor): + transport = transports.InstanceGroupManagersRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.InstanceGroupManagersRestInterceptor(), + ) + client = InstanceGroupManagersClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.InstanceGroupManagersRestInterceptor, "post_recreate_instances") as post, \ + mock.patch.object(transports.InstanceGroupManagersRestInterceptor, "pre_recreate_instances") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.RecreateInstancesInstanceGroupManagerRequest.pb(compute.RecreateInstancesInstanceGroupManagerRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.RecreateInstancesInstanceGroupManagerRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.recreate_instances_unary(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_recreate_instances_unary_rest_bad_request(transport: str = 'rest', request_type=compute.RecreateInstancesInstanceGroupManagerRequest): + client = InstanceGroupManagersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'zone': 'sample2', 'instance_group_manager': 'sample3'} + request_init["instance_group_managers_recreate_instances_request_resource"] = {'instances': ['instances_value1', 'instances_value2']} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.recreate_instances_unary(request) + + +def test_recreate_instances_unary_rest_flattened(): + client = InstanceGroupManagersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'zone': 'sample2', 'instance_group_manager': 'sample3'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + zone='zone_value', + instance_group_manager='instance_group_manager_value', + instance_group_managers_recreate_instances_request_resource=compute.InstanceGroupManagersRecreateInstancesRequest(instances=['instances_value']), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.recreate_instances_unary(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/zones/{zone}/instanceGroupManagers/{instance_group_manager}/recreateInstances" % client.transport._host, args[1]) + + +def test_recreate_instances_unary_rest_flattened_error(transport: str = 'rest'): + client = InstanceGroupManagersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.recreate_instances_unary( + compute.RecreateInstancesInstanceGroupManagerRequest(), + project='project_value', + zone='zone_value', + instance_group_manager='instance_group_manager_value', + instance_group_managers_recreate_instances_request_resource=compute.InstanceGroupManagersRecreateInstancesRequest(instances=['instances_value']), + ) + + +def test_recreate_instances_unary_rest_error(): + client = InstanceGroupManagersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.ResizeInstanceGroupManagerRequest, + dict, +]) +def test_resize_rest(request_type): + client = InstanceGroupManagersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'zone': 'sample2', 'instance_group_manager': 'sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.resize(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, extended_operation.ExtendedOperation) + assert response.client_operation_id == 'client_operation_id_value' + assert response.creation_timestamp == 'creation_timestamp_value' + assert response.description == 'description_value' + assert response.end_time == 'end_time_value' + assert response.http_error_message == 'http_error_message_value' + assert response.http_error_status_code == 2374 + assert response.id == 205 + assert response.insert_time == 'insert_time_value' + assert response.kind == 'kind_value' + assert response.name == 'name_value' + assert response.operation_group_id == 'operation_group_id_value' + assert response.operation_type == 'operation_type_value' + assert response.progress == 885 + assert response.region == 'region_value' + assert response.self_link == 'self_link_value' + assert response.start_time == 'start_time_value' + assert response.status == compute.Operation.Status.DONE + assert response.status_message == 'status_message_value' + assert response.target_id == 947 + assert response.target_link == 'target_link_value' + assert response.user == 'user_value' + assert response.zone == 'zone_value' + + +def test_resize_rest_required_fields(request_type=compute.ResizeInstanceGroupManagerRequest): + transport_class = transports.InstanceGroupManagersRestTransport + + request_init = {} + request_init["instance_group_manager"] = "" + request_init["project"] = "" + request_init["size"] = 0 + request_init["zone"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + assert "size" not in jsonified_request + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).resize._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + assert "size" in jsonified_request + assert jsonified_request["size"] == request_init["size"] + + jsonified_request["instanceGroupManager"] = 'instance_group_manager_value' + jsonified_request["project"] = 'project_value' + jsonified_request["size"] = 443 + jsonified_request["zone"] = 'zone_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).resize._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", "size", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "instanceGroupManager" in jsonified_request + assert jsonified_request["instanceGroupManager"] == 'instance_group_manager_value' + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "size" in jsonified_request + assert jsonified_request["size"] == 443 + assert "zone" in jsonified_request + assert jsonified_request["zone"] == 'zone_value' + + client = InstanceGroupManagersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.resize(request) + + expected_params = [ + ( + "size", + str(0), + ), + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_resize_rest_unset_required_fields(): + transport = transports.InstanceGroupManagersRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.resize._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", "size", )) & set(("instanceGroupManager", "project", "size", "zone", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_resize_rest_interceptors(null_interceptor): + transport = transports.InstanceGroupManagersRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.InstanceGroupManagersRestInterceptor(), + ) + client = InstanceGroupManagersClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.InstanceGroupManagersRestInterceptor, "post_resize") as post, \ + mock.patch.object(transports.InstanceGroupManagersRestInterceptor, "pre_resize") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.ResizeInstanceGroupManagerRequest.pb(compute.ResizeInstanceGroupManagerRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.ResizeInstanceGroupManagerRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.resize(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_resize_rest_bad_request(transport: str = 'rest', request_type=compute.ResizeInstanceGroupManagerRequest): + client = InstanceGroupManagersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'zone': 'sample2', 'instance_group_manager': 'sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.resize(request) + + +def test_resize_rest_flattened(): + client = InstanceGroupManagersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'zone': 'sample2', 'instance_group_manager': 'sample3'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + zone='zone_value', + instance_group_manager='instance_group_manager_value', + size=443, + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.resize(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/zones/{zone}/instanceGroupManagers/{instance_group_manager}/resize" % client.transport._host, args[1]) + + +def test_resize_rest_flattened_error(transport: str = 'rest'): + client = InstanceGroupManagersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.resize( + compute.ResizeInstanceGroupManagerRequest(), + project='project_value', + zone='zone_value', + instance_group_manager='instance_group_manager_value', + size=443, + ) + + +def test_resize_rest_error(): + client = InstanceGroupManagersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.ResizeInstanceGroupManagerRequest, + dict, +]) +def test_resize_unary_rest(request_type): + client = InstanceGroupManagersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'zone': 'sample2', 'instance_group_manager': 'sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.resize_unary(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.Operation) + + +def test_resize_unary_rest_required_fields(request_type=compute.ResizeInstanceGroupManagerRequest): + transport_class = transports.InstanceGroupManagersRestTransport + + request_init = {} + request_init["instance_group_manager"] = "" + request_init["project"] = "" + request_init["size"] = 0 + request_init["zone"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + assert "size" not in jsonified_request + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).resize._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + assert "size" in jsonified_request + assert jsonified_request["size"] == request_init["size"] + + jsonified_request["instanceGroupManager"] = 'instance_group_manager_value' + jsonified_request["project"] = 'project_value' + jsonified_request["size"] = 443 + jsonified_request["zone"] = 'zone_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).resize._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", "size", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "instanceGroupManager" in jsonified_request + assert jsonified_request["instanceGroupManager"] == 'instance_group_manager_value' + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "size" in jsonified_request + assert jsonified_request["size"] == 443 + assert "zone" in jsonified_request + assert jsonified_request["zone"] == 'zone_value' + + client = InstanceGroupManagersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.resize_unary(request) + + expected_params = [ + ( + "size", + str(0), + ), + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_resize_unary_rest_unset_required_fields(): + transport = transports.InstanceGroupManagersRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.resize._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", "size", )) & set(("instanceGroupManager", "project", "size", "zone", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_resize_unary_rest_interceptors(null_interceptor): + transport = transports.InstanceGroupManagersRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.InstanceGroupManagersRestInterceptor(), + ) + client = InstanceGroupManagersClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.InstanceGroupManagersRestInterceptor, "post_resize") as post, \ + mock.patch.object(transports.InstanceGroupManagersRestInterceptor, "pre_resize") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.ResizeInstanceGroupManagerRequest.pb(compute.ResizeInstanceGroupManagerRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.ResizeInstanceGroupManagerRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.resize_unary(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_resize_unary_rest_bad_request(transport: str = 'rest', request_type=compute.ResizeInstanceGroupManagerRequest): + client = InstanceGroupManagersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'zone': 'sample2', 'instance_group_manager': 'sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.resize_unary(request) + + +def test_resize_unary_rest_flattened(): + client = InstanceGroupManagersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'zone': 'sample2', 'instance_group_manager': 'sample3'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + zone='zone_value', + instance_group_manager='instance_group_manager_value', + size=443, + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.resize_unary(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/zones/{zone}/instanceGroupManagers/{instance_group_manager}/resize" % client.transport._host, args[1]) + + +def test_resize_unary_rest_flattened_error(transport: str = 'rest'): + client = InstanceGroupManagersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.resize_unary( + compute.ResizeInstanceGroupManagerRequest(), + project='project_value', + zone='zone_value', + instance_group_manager='instance_group_manager_value', + size=443, + ) + + +def test_resize_unary_rest_error(): + client = InstanceGroupManagersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.SetInstanceTemplateInstanceGroupManagerRequest, + dict, +]) +def test_set_instance_template_rest(request_type): + client = InstanceGroupManagersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'zone': 'sample2', 'instance_group_manager': 'sample3'} + request_init["instance_group_managers_set_instance_template_request_resource"] = {'instance_template': 'instance_template_value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.set_instance_template(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, extended_operation.ExtendedOperation) + assert response.client_operation_id == 'client_operation_id_value' + assert response.creation_timestamp == 'creation_timestamp_value' + assert response.description == 'description_value' + assert response.end_time == 'end_time_value' + assert response.http_error_message == 'http_error_message_value' + assert response.http_error_status_code == 2374 + assert response.id == 205 + assert response.insert_time == 'insert_time_value' + assert response.kind == 'kind_value' + assert response.name == 'name_value' + assert response.operation_group_id == 'operation_group_id_value' + assert response.operation_type == 'operation_type_value' + assert response.progress == 885 + assert response.region == 'region_value' + assert response.self_link == 'self_link_value' + assert response.start_time == 'start_time_value' + assert response.status == compute.Operation.Status.DONE + assert response.status_message == 'status_message_value' + assert response.target_id == 947 + assert response.target_link == 'target_link_value' + assert response.user == 'user_value' + assert response.zone == 'zone_value' + + +def test_set_instance_template_rest_required_fields(request_type=compute.SetInstanceTemplateInstanceGroupManagerRequest): + transport_class = transports.InstanceGroupManagersRestTransport + + request_init = {} + request_init["instance_group_manager"] = "" + request_init["project"] = "" + request_init["zone"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).set_instance_template._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["instanceGroupManager"] = 'instance_group_manager_value' + jsonified_request["project"] = 'project_value' + jsonified_request["zone"] = 'zone_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).set_instance_template._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "instanceGroupManager" in jsonified_request + assert jsonified_request["instanceGroupManager"] == 'instance_group_manager_value' + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "zone" in jsonified_request + assert jsonified_request["zone"] == 'zone_value' + + client = InstanceGroupManagersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.set_instance_template(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_set_instance_template_rest_unset_required_fields(): + transport = transports.InstanceGroupManagersRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.set_instance_template._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("instanceGroupManager", "instanceGroupManagersSetInstanceTemplateRequestResource", "project", "zone", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_set_instance_template_rest_interceptors(null_interceptor): + transport = transports.InstanceGroupManagersRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.InstanceGroupManagersRestInterceptor(), + ) + client = InstanceGroupManagersClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.InstanceGroupManagersRestInterceptor, "post_set_instance_template") as post, \ + mock.patch.object(transports.InstanceGroupManagersRestInterceptor, "pre_set_instance_template") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.SetInstanceTemplateInstanceGroupManagerRequest.pb(compute.SetInstanceTemplateInstanceGroupManagerRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.SetInstanceTemplateInstanceGroupManagerRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.set_instance_template(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_set_instance_template_rest_bad_request(transport: str = 'rest', request_type=compute.SetInstanceTemplateInstanceGroupManagerRequest): + client = InstanceGroupManagersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'zone': 'sample2', 'instance_group_manager': 'sample3'} + request_init["instance_group_managers_set_instance_template_request_resource"] = {'instance_template': 'instance_template_value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.set_instance_template(request) + + +def test_set_instance_template_rest_flattened(): + client = InstanceGroupManagersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'zone': 'sample2', 'instance_group_manager': 'sample3'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + zone='zone_value', + instance_group_manager='instance_group_manager_value', + instance_group_managers_set_instance_template_request_resource=compute.InstanceGroupManagersSetInstanceTemplateRequest(instance_template='instance_template_value'), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.set_instance_template(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/zones/{zone}/instanceGroupManagers/{instance_group_manager}/setInstanceTemplate" % client.transport._host, args[1]) + + +def test_set_instance_template_rest_flattened_error(transport: str = 'rest'): + client = InstanceGroupManagersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.set_instance_template( + compute.SetInstanceTemplateInstanceGroupManagerRequest(), + project='project_value', + zone='zone_value', + instance_group_manager='instance_group_manager_value', + instance_group_managers_set_instance_template_request_resource=compute.InstanceGroupManagersSetInstanceTemplateRequest(instance_template='instance_template_value'), + ) + + +def test_set_instance_template_rest_error(): + client = InstanceGroupManagersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.SetInstanceTemplateInstanceGroupManagerRequest, + dict, +]) +def test_set_instance_template_unary_rest(request_type): + client = InstanceGroupManagersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'zone': 'sample2', 'instance_group_manager': 'sample3'} + request_init["instance_group_managers_set_instance_template_request_resource"] = {'instance_template': 'instance_template_value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.set_instance_template_unary(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.Operation) + + +def test_set_instance_template_unary_rest_required_fields(request_type=compute.SetInstanceTemplateInstanceGroupManagerRequest): + transport_class = transports.InstanceGroupManagersRestTransport + + request_init = {} + request_init["instance_group_manager"] = "" + request_init["project"] = "" + request_init["zone"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).set_instance_template._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["instanceGroupManager"] = 'instance_group_manager_value' + jsonified_request["project"] = 'project_value' + jsonified_request["zone"] = 'zone_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).set_instance_template._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "instanceGroupManager" in jsonified_request + assert jsonified_request["instanceGroupManager"] == 'instance_group_manager_value' + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "zone" in jsonified_request + assert jsonified_request["zone"] == 'zone_value' + + client = InstanceGroupManagersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.set_instance_template_unary(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_set_instance_template_unary_rest_unset_required_fields(): + transport = transports.InstanceGroupManagersRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.set_instance_template._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("instanceGroupManager", "instanceGroupManagersSetInstanceTemplateRequestResource", "project", "zone", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_set_instance_template_unary_rest_interceptors(null_interceptor): + transport = transports.InstanceGroupManagersRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.InstanceGroupManagersRestInterceptor(), + ) + client = InstanceGroupManagersClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.InstanceGroupManagersRestInterceptor, "post_set_instance_template") as post, \ + mock.patch.object(transports.InstanceGroupManagersRestInterceptor, "pre_set_instance_template") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.SetInstanceTemplateInstanceGroupManagerRequest.pb(compute.SetInstanceTemplateInstanceGroupManagerRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.SetInstanceTemplateInstanceGroupManagerRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.set_instance_template_unary(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_set_instance_template_unary_rest_bad_request(transport: str = 'rest', request_type=compute.SetInstanceTemplateInstanceGroupManagerRequest): + client = InstanceGroupManagersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'zone': 'sample2', 'instance_group_manager': 'sample3'} + request_init["instance_group_managers_set_instance_template_request_resource"] = {'instance_template': 'instance_template_value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.set_instance_template_unary(request) + + +def test_set_instance_template_unary_rest_flattened(): + client = InstanceGroupManagersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'zone': 'sample2', 'instance_group_manager': 'sample3'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + zone='zone_value', + instance_group_manager='instance_group_manager_value', + instance_group_managers_set_instance_template_request_resource=compute.InstanceGroupManagersSetInstanceTemplateRequest(instance_template='instance_template_value'), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.set_instance_template_unary(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/zones/{zone}/instanceGroupManagers/{instance_group_manager}/setInstanceTemplate" % client.transport._host, args[1]) + + +def test_set_instance_template_unary_rest_flattened_error(transport: str = 'rest'): + client = InstanceGroupManagersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.set_instance_template_unary( + compute.SetInstanceTemplateInstanceGroupManagerRequest(), + project='project_value', + zone='zone_value', + instance_group_manager='instance_group_manager_value', + instance_group_managers_set_instance_template_request_resource=compute.InstanceGroupManagersSetInstanceTemplateRequest(instance_template='instance_template_value'), + ) + + +def test_set_instance_template_unary_rest_error(): + client = InstanceGroupManagersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.SetTargetPoolsInstanceGroupManagerRequest, + dict, +]) +def test_set_target_pools_rest(request_type): + client = InstanceGroupManagersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'zone': 'sample2', 'instance_group_manager': 'sample3'} + request_init["instance_group_managers_set_target_pools_request_resource"] = {'fingerprint': 'fingerprint_value', 'target_pools': ['target_pools_value1', 'target_pools_value2']} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.set_target_pools(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, extended_operation.ExtendedOperation) + assert response.client_operation_id == 'client_operation_id_value' + assert response.creation_timestamp == 'creation_timestamp_value' + assert response.description == 'description_value' + assert response.end_time == 'end_time_value' + assert response.http_error_message == 'http_error_message_value' + assert response.http_error_status_code == 2374 + assert response.id == 205 + assert response.insert_time == 'insert_time_value' + assert response.kind == 'kind_value' + assert response.name == 'name_value' + assert response.operation_group_id == 'operation_group_id_value' + assert response.operation_type == 'operation_type_value' + assert response.progress == 885 + assert response.region == 'region_value' + assert response.self_link == 'self_link_value' + assert response.start_time == 'start_time_value' + assert response.status == compute.Operation.Status.DONE + assert response.status_message == 'status_message_value' + assert response.target_id == 947 + assert response.target_link == 'target_link_value' + assert response.user == 'user_value' + assert response.zone == 'zone_value' + + +def test_set_target_pools_rest_required_fields(request_type=compute.SetTargetPoolsInstanceGroupManagerRequest): + transport_class = transports.InstanceGroupManagersRestTransport + + request_init = {} + request_init["instance_group_manager"] = "" + request_init["project"] = "" + request_init["zone"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).set_target_pools._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["instanceGroupManager"] = 'instance_group_manager_value' + jsonified_request["project"] = 'project_value' + jsonified_request["zone"] = 'zone_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).set_target_pools._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "instanceGroupManager" in jsonified_request + assert jsonified_request["instanceGroupManager"] == 'instance_group_manager_value' + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "zone" in jsonified_request + assert jsonified_request["zone"] == 'zone_value' + + client = InstanceGroupManagersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.set_target_pools(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_set_target_pools_rest_unset_required_fields(): + transport = transports.InstanceGroupManagersRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.set_target_pools._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("instanceGroupManager", "instanceGroupManagersSetTargetPoolsRequestResource", "project", "zone", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_set_target_pools_rest_interceptors(null_interceptor): + transport = transports.InstanceGroupManagersRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.InstanceGroupManagersRestInterceptor(), + ) + client = InstanceGroupManagersClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.InstanceGroupManagersRestInterceptor, "post_set_target_pools") as post, \ + mock.patch.object(transports.InstanceGroupManagersRestInterceptor, "pre_set_target_pools") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.SetTargetPoolsInstanceGroupManagerRequest.pb(compute.SetTargetPoolsInstanceGroupManagerRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.SetTargetPoolsInstanceGroupManagerRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.set_target_pools(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_set_target_pools_rest_bad_request(transport: str = 'rest', request_type=compute.SetTargetPoolsInstanceGroupManagerRequest): + client = InstanceGroupManagersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'zone': 'sample2', 'instance_group_manager': 'sample3'} + request_init["instance_group_managers_set_target_pools_request_resource"] = {'fingerprint': 'fingerprint_value', 'target_pools': ['target_pools_value1', 'target_pools_value2']} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.set_target_pools(request) + + +def test_set_target_pools_rest_flattened(): + client = InstanceGroupManagersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'zone': 'sample2', 'instance_group_manager': 'sample3'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + zone='zone_value', + instance_group_manager='instance_group_manager_value', + instance_group_managers_set_target_pools_request_resource=compute.InstanceGroupManagersSetTargetPoolsRequest(fingerprint='fingerprint_value'), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.set_target_pools(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/zones/{zone}/instanceGroupManagers/{instance_group_manager}/setTargetPools" % client.transport._host, args[1]) + + +def test_set_target_pools_rest_flattened_error(transport: str = 'rest'): + client = InstanceGroupManagersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.set_target_pools( + compute.SetTargetPoolsInstanceGroupManagerRequest(), + project='project_value', + zone='zone_value', + instance_group_manager='instance_group_manager_value', + instance_group_managers_set_target_pools_request_resource=compute.InstanceGroupManagersSetTargetPoolsRequest(fingerprint='fingerprint_value'), + ) + + +def test_set_target_pools_rest_error(): + client = InstanceGroupManagersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.SetTargetPoolsInstanceGroupManagerRequest, + dict, +]) +def test_set_target_pools_unary_rest(request_type): + client = InstanceGroupManagersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'zone': 'sample2', 'instance_group_manager': 'sample3'} + request_init["instance_group_managers_set_target_pools_request_resource"] = {'fingerprint': 'fingerprint_value', 'target_pools': ['target_pools_value1', 'target_pools_value2']} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.set_target_pools_unary(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.Operation) + + +def test_set_target_pools_unary_rest_required_fields(request_type=compute.SetTargetPoolsInstanceGroupManagerRequest): + transport_class = transports.InstanceGroupManagersRestTransport + + request_init = {} + request_init["instance_group_manager"] = "" + request_init["project"] = "" + request_init["zone"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).set_target_pools._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["instanceGroupManager"] = 'instance_group_manager_value' + jsonified_request["project"] = 'project_value' + jsonified_request["zone"] = 'zone_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).set_target_pools._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "instanceGroupManager" in jsonified_request + assert jsonified_request["instanceGroupManager"] == 'instance_group_manager_value' + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "zone" in jsonified_request + assert jsonified_request["zone"] == 'zone_value' + + client = InstanceGroupManagersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.set_target_pools_unary(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_set_target_pools_unary_rest_unset_required_fields(): + transport = transports.InstanceGroupManagersRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.set_target_pools._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("instanceGroupManager", "instanceGroupManagersSetTargetPoolsRequestResource", "project", "zone", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_set_target_pools_unary_rest_interceptors(null_interceptor): + transport = transports.InstanceGroupManagersRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.InstanceGroupManagersRestInterceptor(), + ) + client = InstanceGroupManagersClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.InstanceGroupManagersRestInterceptor, "post_set_target_pools") as post, \ + mock.patch.object(transports.InstanceGroupManagersRestInterceptor, "pre_set_target_pools") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.SetTargetPoolsInstanceGroupManagerRequest.pb(compute.SetTargetPoolsInstanceGroupManagerRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.SetTargetPoolsInstanceGroupManagerRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.set_target_pools_unary(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_set_target_pools_unary_rest_bad_request(transport: str = 'rest', request_type=compute.SetTargetPoolsInstanceGroupManagerRequest): + client = InstanceGroupManagersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'zone': 'sample2', 'instance_group_manager': 'sample3'} + request_init["instance_group_managers_set_target_pools_request_resource"] = {'fingerprint': 'fingerprint_value', 'target_pools': ['target_pools_value1', 'target_pools_value2']} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.set_target_pools_unary(request) + + +def test_set_target_pools_unary_rest_flattened(): + client = InstanceGroupManagersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'zone': 'sample2', 'instance_group_manager': 'sample3'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + zone='zone_value', + instance_group_manager='instance_group_manager_value', + instance_group_managers_set_target_pools_request_resource=compute.InstanceGroupManagersSetTargetPoolsRequest(fingerprint='fingerprint_value'), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.set_target_pools_unary(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/zones/{zone}/instanceGroupManagers/{instance_group_manager}/setTargetPools" % client.transport._host, args[1]) + + +def test_set_target_pools_unary_rest_flattened_error(transport: str = 'rest'): + client = InstanceGroupManagersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.set_target_pools_unary( + compute.SetTargetPoolsInstanceGroupManagerRequest(), + project='project_value', + zone='zone_value', + instance_group_manager='instance_group_manager_value', + instance_group_managers_set_target_pools_request_resource=compute.InstanceGroupManagersSetTargetPoolsRequest(fingerprint='fingerprint_value'), + ) + + +def test_set_target_pools_unary_rest_error(): + client = InstanceGroupManagersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.UpdatePerInstanceConfigsInstanceGroupManagerRequest, + dict, +]) +def test_update_per_instance_configs_rest(request_type): + client = InstanceGroupManagersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'zone': 'sample2', 'instance_group_manager': 'sample3'} + request_init["instance_group_managers_update_per_instance_configs_req_resource"] = {'per_instance_configs': [{'fingerprint': 'fingerprint_value', 'name': 'name_value', 'preserved_state': {'disks': {}, 'metadata': {}}, 'status': 'status_value'}]} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.update_per_instance_configs(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, extended_operation.ExtendedOperation) + assert response.client_operation_id == 'client_operation_id_value' + assert response.creation_timestamp == 'creation_timestamp_value' + assert response.description == 'description_value' + assert response.end_time == 'end_time_value' + assert response.http_error_message == 'http_error_message_value' + assert response.http_error_status_code == 2374 + assert response.id == 205 + assert response.insert_time == 'insert_time_value' + assert response.kind == 'kind_value' + assert response.name == 'name_value' + assert response.operation_group_id == 'operation_group_id_value' + assert response.operation_type == 'operation_type_value' + assert response.progress == 885 + assert response.region == 'region_value' + assert response.self_link == 'self_link_value' + assert response.start_time == 'start_time_value' + assert response.status == compute.Operation.Status.DONE + assert response.status_message == 'status_message_value' + assert response.target_id == 947 + assert response.target_link == 'target_link_value' + assert response.user == 'user_value' + assert response.zone == 'zone_value' + + +def test_update_per_instance_configs_rest_required_fields(request_type=compute.UpdatePerInstanceConfigsInstanceGroupManagerRequest): + transport_class = transports.InstanceGroupManagersRestTransport + + request_init = {} + request_init["instance_group_manager"] = "" + request_init["project"] = "" + request_init["zone"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).update_per_instance_configs._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["instanceGroupManager"] = 'instance_group_manager_value' + jsonified_request["project"] = 'project_value' + jsonified_request["zone"] = 'zone_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).update_per_instance_configs._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "instanceGroupManager" in jsonified_request + assert jsonified_request["instanceGroupManager"] == 'instance_group_manager_value' + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "zone" in jsonified_request + assert jsonified_request["zone"] == 'zone_value' + + client = InstanceGroupManagersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.update_per_instance_configs(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_update_per_instance_configs_rest_unset_required_fields(): + transport = transports.InstanceGroupManagersRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.update_per_instance_configs._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("instanceGroupManager", "instanceGroupManagersUpdatePerInstanceConfigsReqResource", "project", "zone", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_update_per_instance_configs_rest_interceptors(null_interceptor): + transport = transports.InstanceGroupManagersRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.InstanceGroupManagersRestInterceptor(), + ) + client = InstanceGroupManagersClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.InstanceGroupManagersRestInterceptor, "post_update_per_instance_configs") as post, \ + mock.patch.object(transports.InstanceGroupManagersRestInterceptor, "pre_update_per_instance_configs") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.UpdatePerInstanceConfigsInstanceGroupManagerRequest.pb(compute.UpdatePerInstanceConfigsInstanceGroupManagerRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.UpdatePerInstanceConfigsInstanceGroupManagerRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.update_per_instance_configs(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_update_per_instance_configs_rest_bad_request(transport: str = 'rest', request_type=compute.UpdatePerInstanceConfigsInstanceGroupManagerRequest): + client = InstanceGroupManagersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'zone': 'sample2', 'instance_group_manager': 'sample3'} + request_init["instance_group_managers_update_per_instance_configs_req_resource"] = {'per_instance_configs': [{'fingerprint': 'fingerprint_value', 'name': 'name_value', 'preserved_state': {'disks': {}, 'metadata': {}}, 'status': 'status_value'}]} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.update_per_instance_configs(request) + + +def test_update_per_instance_configs_rest_flattened(): + client = InstanceGroupManagersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'zone': 'sample2', 'instance_group_manager': 'sample3'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + zone='zone_value', + instance_group_manager='instance_group_manager_value', + instance_group_managers_update_per_instance_configs_req_resource=compute.InstanceGroupManagersUpdatePerInstanceConfigsReq(per_instance_configs=[compute.PerInstanceConfig(fingerprint='fingerprint_value')]), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.update_per_instance_configs(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/zones/{zone}/instanceGroupManagers/{instance_group_manager}/updatePerInstanceConfigs" % client.transport._host, args[1]) + + +def test_update_per_instance_configs_rest_flattened_error(transport: str = 'rest'): + client = InstanceGroupManagersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_per_instance_configs( + compute.UpdatePerInstanceConfigsInstanceGroupManagerRequest(), + project='project_value', + zone='zone_value', + instance_group_manager='instance_group_manager_value', + instance_group_managers_update_per_instance_configs_req_resource=compute.InstanceGroupManagersUpdatePerInstanceConfigsReq(per_instance_configs=[compute.PerInstanceConfig(fingerprint='fingerprint_value')]), + ) + + +def test_update_per_instance_configs_rest_error(): + client = InstanceGroupManagersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.UpdatePerInstanceConfigsInstanceGroupManagerRequest, + dict, +]) +def test_update_per_instance_configs_unary_rest(request_type): + client = InstanceGroupManagersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'zone': 'sample2', 'instance_group_manager': 'sample3'} + request_init["instance_group_managers_update_per_instance_configs_req_resource"] = {'per_instance_configs': [{'fingerprint': 'fingerprint_value', 'name': 'name_value', 'preserved_state': {'disks': {}, 'metadata': {}}, 'status': 'status_value'}]} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.update_per_instance_configs_unary(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.Operation) + + +def test_update_per_instance_configs_unary_rest_required_fields(request_type=compute.UpdatePerInstanceConfigsInstanceGroupManagerRequest): + transport_class = transports.InstanceGroupManagersRestTransport + + request_init = {} + request_init["instance_group_manager"] = "" + request_init["project"] = "" + request_init["zone"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).update_per_instance_configs._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["instanceGroupManager"] = 'instance_group_manager_value' + jsonified_request["project"] = 'project_value' + jsonified_request["zone"] = 'zone_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).update_per_instance_configs._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "instanceGroupManager" in jsonified_request + assert jsonified_request["instanceGroupManager"] == 'instance_group_manager_value' + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "zone" in jsonified_request + assert jsonified_request["zone"] == 'zone_value' + + client = InstanceGroupManagersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.update_per_instance_configs_unary(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_update_per_instance_configs_unary_rest_unset_required_fields(): + transport = transports.InstanceGroupManagersRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.update_per_instance_configs._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("instanceGroupManager", "instanceGroupManagersUpdatePerInstanceConfigsReqResource", "project", "zone", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_update_per_instance_configs_unary_rest_interceptors(null_interceptor): + transport = transports.InstanceGroupManagersRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.InstanceGroupManagersRestInterceptor(), + ) + client = InstanceGroupManagersClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.InstanceGroupManagersRestInterceptor, "post_update_per_instance_configs") as post, \ + mock.patch.object(transports.InstanceGroupManagersRestInterceptor, "pre_update_per_instance_configs") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.UpdatePerInstanceConfigsInstanceGroupManagerRequest.pb(compute.UpdatePerInstanceConfigsInstanceGroupManagerRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.UpdatePerInstanceConfigsInstanceGroupManagerRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.update_per_instance_configs_unary(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_update_per_instance_configs_unary_rest_bad_request(transport: str = 'rest', request_type=compute.UpdatePerInstanceConfigsInstanceGroupManagerRequest): + client = InstanceGroupManagersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'zone': 'sample2', 'instance_group_manager': 'sample3'} + request_init["instance_group_managers_update_per_instance_configs_req_resource"] = {'per_instance_configs': [{'fingerprint': 'fingerprint_value', 'name': 'name_value', 'preserved_state': {'disks': {}, 'metadata': {}}, 'status': 'status_value'}]} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.update_per_instance_configs_unary(request) + + +def test_update_per_instance_configs_unary_rest_flattened(): + client = InstanceGroupManagersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'zone': 'sample2', 'instance_group_manager': 'sample3'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + zone='zone_value', + instance_group_manager='instance_group_manager_value', + instance_group_managers_update_per_instance_configs_req_resource=compute.InstanceGroupManagersUpdatePerInstanceConfigsReq(per_instance_configs=[compute.PerInstanceConfig(fingerprint='fingerprint_value')]), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.update_per_instance_configs_unary(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/zones/{zone}/instanceGroupManagers/{instance_group_manager}/updatePerInstanceConfigs" % client.transport._host, args[1]) + + +def test_update_per_instance_configs_unary_rest_flattened_error(transport: str = 'rest'): + client = InstanceGroupManagersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_per_instance_configs_unary( + compute.UpdatePerInstanceConfigsInstanceGroupManagerRequest(), + project='project_value', + zone='zone_value', + instance_group_manager='instance_group_manager_value', + instance_group_managers_update_per_instance_configs_req_resource=compute.InstanceGroupManagersUpdatePerInstanceConfigsReq(per_instance_configs=[compute.PerInstanceConfig(fingerprint='fingerprint_value')]), + ) + + +def test_update_per_instance_configs_unary_rest_error(): + client = InstanceGroupManagersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +def test_credentials_transport_error(): + # It is an error to provide credentials and a transport instance. + transport = transports.InstanceGroupManagersRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = InstanceGroupManagersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # It is an error to provide a credentials file and a transport instance. + transport = transports.InstanceGroupManagersRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = InstanceGroupManagersClient( + client_options={"credentials_file": "credentials.json"}, + transport=transport, + ) + + # It is an error to provide an api_key and a transport instance. + transport = transports.InstanceGroupManagersRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = InstanceGroupManagersClient( + client_options=options, + transport=transport, + ) + + # It is an error to provide an api_key and a credential. + options = mock.Mock() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = InstanceGroupManagersClient( + client_options=options, + credentials=ga_credentials.AnonymousCredentials() + ) + + # It is an error to provide scopes and a transport instance. + transport = transports.InstanceGroupManagersRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = InstanceGroupManagersClient( + client_options={"scopes": ["1", "2"]}, + transport=transport, + ) + + +def test_transport_instance(): + # A client may be instantiated with a custom transport instance. + transport = transports.InstanceGroupManagersRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + client = InstanceGroupManagersClient(transport=transport) + assert client.transport is transport + + +@pytest.mark.parametrize("transport_class", [ + transports.InstanceGroupManagersRestTransport, +]) +def test_transport_adc(transport_class): + # Test default credentials are used if not provided. + with mock.patch.object(google.auth, 'default') as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class() + adc.assert_called_once() + +@pytest.mark.parametrize("transport_name", [ + "rest", +]) +def test_transport_kind(transport_name): + transport = InstanceGroupManagersClient.get_transport_class(transport_name)( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert transport.kind == transport_name + + +def test_instance_group_managers_base_transport_error(): + # Passing both a credentials object and credentials_file should raise an error + with pytest.raises(core_exceptions.DuplicateCredentialArgs): + transport = transports.InstanceGroupManagersTransport( + credentials=ga_credentials.AnonymousCredentials(), + credentials_file="credentials.json" + ) + + +def test_instance_group_managers_base_transport(): + # Instantiate the base transport. + with mock.patch('google.cloud.compute_v1.services.instance_group_managers.transports.InstanceGroupManagersTransport.__init__') as Transport: + Transport.return_value = None + transport = transports.InstanceGroupManagersTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Every method on the transport should just blindly + # raise NotImplementedError. + methods = ( + 'abandon_instances', + 'aggregated_list', + 'apply_updates_to_instances', + 'create_instances', + 'delete', + 'delete_instances', + 'delete_per_instance_configs', + 'get', + 'insert', + 'list', + 'list_errors', + 'list_managed_instances', + 'list_per_instance_configs', + 'patch', + 'patch_per_instance_configs', + 'recreate_instances', + 'resize', + 'set_instance_template', + 'set_target_pools', + 'update_per_instance_configs', + ) + for method in methods: + with pytest.raises(NotImplementedError): + getattr(transport, method)(request=object()) + + with pytest.raises(NotImplementedError): + transport.close() + + # Catch all for all remaining methods and properties + remainder = [ + 'kind', + ] + for r in remainder: + with pytest.raises(NotImplementedError): + getattr(transport, r)() + + +def test_instance_group_managers_base_transport_with_credentials_file(): + # Instantiate the base transport with a credentials file + with mock.patch.object(google.auth, 'load_credentials_from_file', autospec=True) as load_creds, mock.patch('google.cloud.compute_v1.services.instance_group_managers.transports.InstanceGroupManagersTransport._prep_wrapped_messages') as Transport: + Transport.return_value = None + load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.InstanceGroupManagersTransport( + credentials_file="credentials.json", + quota_project_id="octopus", + ) + load_creds.assert_called_once_with("credentials.json", + scopes=None, + default_scopes=( + 'https://www.googleapis.com/auth/compute', + 'https://www.googleapis.com/auth/cloud-platform', +), + quota_project_id="octopus", + ) + + +def test_instance_group_managers_base_transport_with_adc(): + # Test the default credentials are used if credentials and credentials_file are None. + with mock.patch.object(google.auth, 'default', autospec=True) as adc, mock.patch('google.cloud.compute_v1.services.instance_group_managers.transports.InstanceGroupManagersTransport._prep_wrapped_messages') as Transport: + Transport.return_value = None + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.InstanceGroupManagersTransport() + adc.assert_called_once() + + +def test_instance_group_managers_auth_adc(): + # If no credentials are provided, we should use ADC credentials. + with mock.patch.object(google.auth, 'default', autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + InstanceGroupManagersClient() + adc.assert_called_once_with( + scopes=None, + default_scopes=( + 'https://www.googleapis.com/auth/compute', + 'https://www.googleapis.com/auth/cloud-platform', +), + quota_project_id=None, + ) + + +def test_instance_group_managers_http_transport_client_cert_source_for_mtls(): + cred = ga_credentials.AnonymousCredentials() + with mock.patch("google.auth.transport.requests.AuthorizedSession.configure_mtls_channel") as mock_configure_mtls_channel: + transports.InstanceGroupManagersRestTransport ( + credentials=cred, + client_cert_source_for_mtls=client_cert_source_callback + ) + mock_configure_mtls_channel.assert_called_once_with(client_cert_source_callback) + + +@pytest.mark.parametrize("transport_name", [ + "rest", +]) +def test_instance_group_managers_host_no_port(transport_name): + client = InstanceGroupManagersClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions(api_endpoint='compute.googleapis.com'), + transport=transport_name, + ) + assert client.transport._host == ( + 'compute.googleapis.com:443' + if transport_name in ['grpc', 'grpc_asyncio'] + else 'https://compute.googleapis.com' + ) + +@pytest.mark.parametrize("transport_name", [ + "rest", +]) +def test_instance_group_managers_host_with_port(transport_name): + client = InstanceGroupManagersClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions(api_endpoint='compute.googleapis.com:8000'), + transport=transport_name, + ) + assert client.transport._host == ( + 'compute.googleapis.com:8000' + if transport_name in ['grpc', 'grpc_asyncio'] + else 'https://compute.googleapis.com:8000' + ) + +@pytest.mark.parametrize("transport_name", [ + "rest", +]) +def test_instance_group_managers_client_transport_session_collision(transport_name): + creds1 = ga_credentials.AnonymousCredentials() + creds2 = ga_credentials.AnonymousCredentials() + client1 = InstanceGroupManagersClient( + credentials=creds1, + transport=transport_name, + ) + client2 = InstanceGroupManagersClient( + credentials=creds2, + transport=transport_name, + ) + session1 = client1.transport.abandon_instances._session + session2 = client2.transport.abandon_instances._session + assert session1 != session2 + session1 = client1.transport.aggregated_list._session + session2 = client2.transport.aggregated_list._session + assert session1 != session2 + session1 = client1.transport.apply_updates_to_instances._session + session2 = client2.transport.apply_updates_to_instances._session + assert session1 != session2 + session1 = client1.transport.create_instances._session + session2 = client2.transport.create_instances._session + assert session1 != session2 + session1 = client1.transport.delete._session + session2 = client2.transport.delete._session + assert session1 != session2 + session1 = client1.transport.delete_instances._session + session2 = client2.transport.delete_instances._session + assert session1 != session2 + session1 = client1.transport.delete_per_instance_configs._session + session2 = client2.transport.delete_per_instance_configs._session + assert session1 != session2 + session1 = client1.transport.get._session + session2 = client2.transport.get._session + assert session1 != session2 + session1 = client1.transport.insert._session + session2 = client2.transport.insert._session + assert session1 != session2 + session1 = client1.transport.list._session + session2 = client2.transport.list._session + assert session1 != session2 + session1 = client1.transport.list_errors._session + session2 = client2.transport.list_errors._session + assert session1 != session2 + session1 = client1.transport.list_managed_instances._session + session2 = client2.transport.list_managed_instances._session + assert session1 != session2 + session1 = client1.transport.list_per_instance_configs._session + session2 = client2.transport.list_per_instance_configs._session + assert session1 != session2 + session1 = client1.transport.patch._session + session2 = client2.transport.patch._session + assert session1 != session2 + session1 = client1.transport.patch_per_instance_configs._session + session2 = client2.transport.patch_per_instance_configs._session + assert session1 != session2 + session1 = client1.transport.recreate_instances._session + session2 = client2.transport.recreate_instances._session + assert session1 != session2 + session1 = client1.transport.resize._session + session2 = client2.transport.resize._session + assert session1 != session2 + session1 = client1.transport.set_instance_template._session + session2 = client2.transport.set_instance_template._session + assert session1 != session2 + session1 = client1.transport.set_target_pools._session + session2 = client2.transport.set_target_pools._session + assert session1 != session2 + session1 = client1.transport.update_per_instance_configs._session + session2 = client2.transport.update_per_instance_configs._session + assert session1 != session2 + +def test_common_billing_account_path(): + billing_account = "squid" + expected = "billingAccounts/{billing_account}".format(billing_account=billing_account, ) + actual = InstanceGroupManagersClient.common_billing_account_path(billing_account) + assert expected == actual + + +def test_parse_common_billing_account_path(): + expected = { + "billing_account": "clam", + } + path = InstanceGroupManagersClient.common_billing_account_path(**expected) + + # Check that the path construction is reversible. + actual = InstanceGroupManagersClient.parse_common_billing_account_path(path) + assert expected == actual + +def test_common_folder_path(): + folder = "whelk" + expected = "folders/{folder}".format(folder=folder, ) + actual = InstanceGroupManagersClient.common_folder_path(folder) + assert expected == actual + + +def test_parse_common_folder_path(): + expected = { + "folder": "octopus", + } + path = InstanceGroupManagersClient.common_folder_path(**expected) + + # Check that the path construction is reversible. + actual = InstanceGroupManagersClient.parse_common_folder_path(path) + assert expected == actual + +def test_common_organization_path(): + organization = "oyster" + expected = "organizations/{organization}".format(organization=organization, ) + actual = InstanceGroupManagersClient.common_organization_path(organization) + assert expected == actual + + +def test_parse_common_organization_path(): + expected = { + "organization": "nudibranch", + } + path = InstanceGroupManagersClient.common_organization_path(**expected) + + # Check that the path construction is reversible. + actual = InstanceGroupManagersClient.parse_common_organization_path(path) + assert expected == actual + +def test_common_project_path(): + project = "cuttlefish" + expected = "projects/{project}".format(project=project, ) + actual = InstanceGroupManagersClient.common_project_path(project) + assert expected == actual + + +def test_parse_common_project_path(): + expected = { + "project": "mussel", + } + path = InstanceGroupManagersClient.common_project_path(**expected) + + # Check that the path construction is reversible. + actual = InstanceGroupManagersClient.parse_common_project_path(path) + assert expected == actual + +def test_common_location_path(): + project = "winkle" + location = "nautilus" + expected = "projects/{project}/locations/{location}".format(project=project, location=location, ) + actual = InstanceGroupManagersClient.common_location_path(project, location) + assert expected == actual + + +def test_parse_common_location_path(): + expected = { + "project": "scallop", + "location": "abalone", + } + path = InstanceGroupManagersClient.common_location_path(**expected) + + # Check that the path construction is reversible. + actual = InstanceGroupManagersClient.parse_common_location_path(path) + assert expected == actual + + +def test_client_with_default_client_info(): + client_info = gapic_v1.client_info.ClientInfo() + + with mock.patch.object(transports.InstanceGroupManagersTransport, '_prep_wrapped_messages') as prep: + client = InstanceGroupManagersClient( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + with mock.patch.object(transports.InstanceGroupManagersTransport, '_prep_wrapped_messages') as prep: + transport_class = InstanceGroupManagersClient.get_transport_class() + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + +def test_transport_close(): + transports = { + "rest": "_session", + } + + for transport, close_name in transports.items(): + client = InstanceGroupManagersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport + ) + with mock.patch.object(type(getattr(client.transport, close_name)), "close") as close: + with client: + close.assert_not_called() + close.assert_called_once() + +def test_client_ctx(): + transports = [ + 'rest', + ] + for transport in transports: + client = InstanceGroupManagersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport + ) + # Test client calls underlying transport. + with mock.patch.object(type(client.transport), "close") as close: + close.assert_not_called() + with client: + pass + close.assert_called() + +@pytest.mark.parametrize("client_class,transport_class", [ + (InstanceGroupManagersClient, transports.InstanceGroupManagersRestTransport), +]) +def test_api_key_credentials(client_class, transport_class): + with mock.patch.object( + google.auth._default, "get_api_key_credentials", create=True + ) as get_api_key_credentials: + mock_cred = mock.Mock() + get_api_key_credentials.return_value = mock_cred + options = client_options.ClientOptions() + options.api_key = "api_key" + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=mock_cred, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) diff --git a/owl-bot-staging/v1/tests/unit/gapic/compute_v1/test_instance_groups.py b/owl-bot-staging/v1/tests/unit/gapic/compute_v1/test_instance_groups.py new file mode 100644 index 000000000..42acc3c86 --- /dev/null +++ b/owl-bot-staging/v1/tests/unit/gapic/compute_v1/test_instance_groups.py @@ -0,0 +1,4849 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import os +# try/except added for compatibility with python < 3.8 +try: + from unittest import mock + from unittest.mock import AsyncMock # pragma: NO COVER +except ImportError: # pragma: NO COVER + import mock + +import grpc +from grpc.experimental import aio +from collections.abc import Iterable +from google.protobuf import json_format +import json +import math +import pytest +from proto.marshal.rules.dates import DurationRule, TimestampRule +from proto.marshal.rules import wrappers +from requests import Response +from requests import Request, PreparedRequest +from requests.sessions import Session +from google.protobuf import json_format + +from google.api_core import client_options +from google.api_core import exceptions as core_exceptions +from google.api_core import extended_operation # type: ignore +from google.api_core import future +from google.api_core import gapic_v1 +from google.api_core import grpc_helpers +from google.api_core import grpc_helpers_async +from google.api_core import path_template +from google.auth import credentials as ga_credentials +from google.auth.exceptions import MutualTLSChannelError +from google.cloud.compute_v1.services.instance_groups import InstanceGroupsClient +from google.cloud.compute_v1.services.instance_groups import pagers +from google.cloud.compute_v1.services.instance_groups import transports +from google.cloud.compute_v1.types import compute +from google.oauth2 import service_account +import google.auth + + +def client_cert_source_callback(): + return b"cert bytes", b"key bytes" + + +# If default endpoint is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint(client): + return "foo.googleapis.com" if ("localhost" in client.DEFAULT_ENDPOINT) else client.DEFAULT_ENDPOINT + + +def test__get_default_mtls_endpoint(): + api_endpoint = "example.googleapis.com" + api_mtls_endpoint = "example.mtls.googleapis.com" + sandbox_endpoint = "example.sandbox.googleapis.com" + sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com" + non_googleapi = "api.example.com" + + assert InstanceGroupsClient._get_default_mtls_endpoint(None) is None + assert InstanceGroupsClient._get_default_mtls_endpoint(api_endpoint) == api_mtls_endpoint + assert InstanceGroupsClient._get_default_mtls_endpoint(api_mtls_endpoint) == api_mtls_endpoint + assert InstanceGroupsClient._get_default_mtls_endpoint(sandbox_endpoint) == sandbox_mtls_endpoint + assert InstanceGroupsClient._get_default_mtls_endpoint(sandbox_mtls_endpoint) == sandbox_mtls_endpoint + assert InstanceGroupsClient._get_default_mtls_endpoint(non_googleapi) == non_googleapi + + +@pytest.mark.parametrize("client_class,transport_name", [ + (InstanceGroupsClient, "rest"), +]) +def test_instance_groups_client_from_service_account_info(client_class, transport_name): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object(service_account.Credentials, 'from_service_account_info') as factory: + factory.return_value = creds + info = {"valid": True} + client = client_class.from_service_account_info(info, transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + 'compute.googleapis.com:443' + if transport_name in ['grpc', 'grpc_asyncio'] + else + 'https://compute.googleapis.com' + ) + + +@pytest.mark.parametrize("transport_class,transport_name", [ + (transports.InstanceGroupsRestTransport, "rest"), +]) +def test_instance_groups_client_service_account_always_use_jwt(transport_class, transport_name): + with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=True) + use_jwt.assert_called_once_with(True) + + with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=False) + use_jwt.assert_not_called() + + +@pytest.mark.parametrize("client_class,transport_name", [ + (InstanceGroupsClient, "rest"), +]) +def test_instance_groups_client_from_service_account_file(client_class, transport_name): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object(service_account.Credentials, 'from_service_account_file') as factory: + factory.return_value = creds + client = client_class.from_service_account_file("dummy/file/path.json", transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + client = client_class.from_service_account_json("dummy/file/path.json", transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + 'compute.googleapis.com:443' + if transport_name in ['grpc', 'grpc_asyncio'] + else + 'https://compute.googleapis.com' + ) + + +def test_instance_groups_client_get_transport_class(): + transport = InstanceGroupsClient.get_transport_class() + available_transports = [ + transports.InstanceGroupsRestTransport, + ] + assert transport in available_transports + + transport = InstanceGroupsClient.get_transport_class("rest") + assert transport == transports.InstanceGroupsRestTransport + + +@pytest.mark.parametrize("client_class,transport_class,transport_name", [ + (InstanceGroupsClient, transports.InstanceGroupsRestTransport, "rest"), +]) +@mock.patch.object(InstanceGroupsClient, "DEFAULT_ENDPOINT", modify_default_endpoint(InstanceGroupsClient)) +def test_instance_groups_client_client_options(client_class, transport_class, transport_name): + # Check that if channel is provided we won't create a new one. + with mock.patch.object(InstanceGroupsClient, 'get_transport_class') as gtc: + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ) + client = client_class(transport=transport) + gtc.assert_not_called() + + # Check that if channel is provided via str we will create a new one. + with mock.patch.object(InstanceGroupsClient, 'get_transport_class') as gtc: + client = client_class(transport=transport_name) + gtc.assert_called() + + # Check the case api_endpoint is provided. + options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(transport=transport_name, client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_MTLS_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError): + client = client_class(transport=transport_name) + + # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): + with pytest.raises(ValueError): + client = client_class(transport=transport_name) + + # Check the case quota_project_id is provided + options = client_options.ClientOptions(quota_project_id="octopus") + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id="octopus", + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + # Check the case api_endpoint is provided + options = client_options.ClientOptions(api_audience="https://language.googleapis.com") + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience="https://language.googleapis.com" + ) + +@pytest.mark.parametrize("client_class,transport_class,transport_name,use_client_cert_env", [ + (InstanceGroupsClient, transports.InstanceGroupsRestTransport, "rest", "true"), + (InstanceGroupsClient, transports.InstanceGroupsRestTransport, "rest", "false"), +]) +@mock.patch.object(InstanceGroupsClient, "DEFAULT_ENDPOINT", modify_default_endpoint(InstanceGroupsClient)) +@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) +def test_instance_groups_client_mtls_env_auto(client_class, transport_class, transport_name, use_client_cert_env): + # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default + # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists. + + # Check the case client_cert_source is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): + options = client_options.ClientOptions(client_cert_source=client_cert_source_callback) + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + + if use_client_cert_env == "false": + expected_client_cert_source = None + expected_host = client.DEFAULT_ENDPOINT + else: + expected_client_cert_source = client_cert_source_callback + expected_host = client.DEFAULT_MTLS_ENDPOINT + + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case ADC client cert is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): + with mock.patch.object(transport_class, '__init__') as patched: + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): + with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=client_cert_source_callback): + if use_client_cert_env == "false": + expected_host = client.DEFAULT_ENDPOINT + expected_client_cert_source = None + else: + expected_host = client.DEFAULT_MTLS_ENDPOINT + expected_client_cert_source = client_cert_source_callback + + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case client_cert_source and ADC client cert are not provided. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): + with mock.patch.object(transport_class, '__init__') as patched: + with mock.patch("google.auth.transport.mtls.has_default_client_cert_source", return_value=False): + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize("client_class", [ + InstanceGroupsClient +]) +@mock.patch.object(InstanceGroupsClient, "DEFAULT_ENDPOINT", modify_default_endpoint(InstanceGroupsClient)) +def test_instance_groups_client_get_mtls_endpoint_and_cert_source(client_class): + mock_client_cert_source = mock.Mock() + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "true". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + mock_api_endpoint = "foo" + options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(options) + assert api_endpoint == mock_api_endpoint + assert cert_source == mock_client_cert_source + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "false". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + mock_client_cert_source = mock.Mock() + mock_api_endpoint = "foo" + options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(options) + assert api_endpoint == mock_api_endpoint + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert doesn't exist. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=False): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert exists. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): + with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=mock_client_cert_source): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source == mock_client_cert_source + + +@pytest.mark.parametrize("client_class,transport_class,transport_name", [ + (InstanceGroupsClient, transports.InstanceGroupsRestTransport, "rest"), +]) +def test_instance_groups_client_client_options_scopes(client_class, transport_class, transport_name): + # Check the case scopes are provided. + options = client_options.ClientOptions( + scopes=["1", "2"], + ) + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=["1", "2"], + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + +@pytest.mark.parametrize("client_class,transport_class,transport_name,grpc_helpers", [ + (InstanceGroupsClient, transports.InstanceGroupsRestTransport, "rest", None), +]) +def test_instance_groups_client_client_options_credentials_file(client_class, transport_class, transport_name, grpc_helpers): + # Check the case credentials file is provided. + options = client_options.ClientOptions( + credentials_file="credentials.json" + ) + + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize("request_type", [ + compute.AddInstancesInstanceGroupRequest, + dict, +]) +def test_add_instances_rest(request_type): + client = InstanceGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'zone': 'sample2', 'instance_group': 'sample3'} + request_init["instance_groups_add_instances_request_resource"] = {'instances': [{'instance': 'instance_value'}]} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.add_instances(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, extended_operation.ExtendedOperation) + assert response.client_operation_id == 'client_operation_id_value' + assert response.creation_timestamp == 'creation_timestamp_value' + assert response.description == 'description_value' + assert response.end_time == 'end_time_value' + assert response.http_error_message == 'http_error_message_value' + assert response.http_error_status_code == 2374 + assert response.id == 205 + assert response.insert_time == 'insert_time_value' + assert response.kind == 'kind_value' + assert response.name == 'name_value' + assert response.operation_group_id == 'operation_group_id_value' + assert response.operation_type == 'operation_type_value' + assert response.progress == 885 + assert response.region == 'region_value' + assert response.self_link == 'self_link_value' + assert response.start_time == 'start_time_value' + assert response.status == compute.Operation.Status.DONE + assert response.status_message == 'status_message_value' + assert response.target_id == 947 + assert response.target_link == 'target_link_value' + assert response.user == 'user_value' + assert response.zone == 'zone_value' + + +def test_add_instances_rest_required_fields(request_type=compute.AddInstancesInstanceGroupRequest): + transport_class = transports.InstanceGroupsRestTransport + + request_init = {} + request_init["instance_group"] = "" + request_init["project"] = "" + request_init["zone"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).add_instances._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["instanceGroup"] = 'instance_group_value' + jsonified_request["project"] = 'project_value' + jsonified_request["zone"] = 'zone_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).add_instances._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "instanceGroup" in jsonified_request + assert jsonified_request["instanceGroup"] == 'instance_group_value' + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "zone" in jsonified_request + assert jsonified_request["zone"] == 'zone_value' + + client = InstanceGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.add_instances(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_add_instances_rest_unset_required_fields(): + transport = transports.InstanceGroupsRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.add_instances._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("instanceGroup", "instanceGroupsAddInstancesRequestResource", "project", "zone", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_add_instances_rest_interceptors(null_interceptor): + transport = transports.InstanceGroupsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.InstanceGroupsRestInterceptor(), + ) + client = InstanceGroupsClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.InstanceGroupsRestInterceptor, "post_add_instances") as post, \ + mock.patch.object(transports.InstanceGroupsRestInterceptor, "pre_add_instances") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.AddInstancesInstanceGroupRequest.pb(compute.AddInstancesInstanceGroupRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.AddInstancesInstanceGroupRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.add_instances(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_add_instances_rest_bad_request(transport: str = 'rest', request_type=compute.AddInstancesInstanceGroupRequest): + client = InstanceGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'zone': 'sample2', 'instance_group': 'sample3'} + request_init["instance_groups_add_instances_request_resource"] = {'instances': [{'instance': 'instance_value'}]} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.add_instances(request) + + +def test_add_instances_rest_flattened(): + client = InstanceGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'zone': 'sample2', 'instance_group': 'sample3'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + zone='zone_value', + instance_group='instance_group_value', + instance_groups_add_instances_request_resource=compute.InstanceGroupsAddInstancesRequest(instances=[compute.InstanceReference(instance='instance_value')]), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.add_instances(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/zones/{zone}/instanceGroups/{instance_group}/addInstances" % client.transport._host, args[1]) + + +def test_add_instances_rest_flattened_error(transport: str = 'rest'): + client = InstanceGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.add_instances( + compute.AddInstancesInstanceGroupRequest(), + project='project_value', + zone='zone_value', + instance_group='instance_group_value', + instance_groups_add_instances_request_resource=compute.InstanceGroupsAddInstancesRequest(instances=[compute.InstanceReference(instance='instance_value')]), + ) + + +def test_add_instances_rest_error(): + client = InstanceGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.AddInstancesInstanceGroupRequest, + dict, +]) +def test_add_instances_unary_rest(request_type): + client = InstanceGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'zone': 'sample2', 'instance_group': 'sample3'} + request_init["instance_groups_add_instances_request_resource"] = {'instances': [{'instance': 'instance_value'}]} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.add_instances_unary(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.Operation) + + +def test_add_instances_unary_rest_required_fields(request_type=compute.AddInstancesInstanceGroupRequest): + transport_class = transports.InstanceGroupsRestTransport + + request_init = {} + request_init["instance_group"] = "" + request_init["project"] = "" + request_init["zone"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).add_instances._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["instanceGroup"] = 'instance_group_value' + jsonified_request["project"] = 'project_value' + jsonified_request["zone"] = 'zone_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).add_instances._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "instanceGroup" in jsonified_request + assert jsonified_request["instanceGroup"] == 'instance_group_value' + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "zone" in jsonified_request + assert jsonified_request["zone"] == 'zone_value' + + client = InstanceGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.add_instances_unary(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_add_instances_unary_rest_unset_required_fields(): + transport = transports.InstanceGroupsRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.add_instances._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("instanceGroup", "instanceGroupsAddInstancesRequestResource", "project", "zone", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_add_instances_unary_rest_interceptors(null_interceptor): + transport = transports.InstanceGroupsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.InstanceGroupsRestInterceptor(), + ) + client = InstanceGroupsClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.InstanceGroupsRestInterceptor, "post_add_instances") as post, \ + mock.patch.object(transports.InstanceGroupsRestInterceptor, "pre_add_instances") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.AddInstancesInstanceGroupRequest.pb(compute.AddInstancesInstanceGroupRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.AddInstancesInstanceGroupRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.add_instances_unary(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_add_instances_unary_rest_bad_request(transport: str = 'rest', request_type=compute.AddInstancesInstanceGroupRequest): + client = InstanceGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'zone': 'sample2', 'instance_group': 'sample3'} + request_init["instance_groups_add_instances_request_resource"] = {'instances': [{'instance': 'instance_value'}]} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.add_instances_unary(request) + + +def test_add_instances_unary_rest_flattened(): + client = InstanceGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'zone': 'sample2', 'instance_group': 'sample3'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + zone='zone_value', + instance_group='instance_group_value', + instance_groups_add_instances_request_resource=compute.InstanceGroupsAddInstancesRequest(instances=[compute.InstanceReference(instance='instance_value')]), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.add_instances_unary(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/zones/{zone}/instanceGroups/{instance_group}/addInstances" % client.transport._host, args[1]) + + +def test_add_instances_unary_rest_flattened_error(transport: str = 'rest'): + client = InstanceGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.add_instances_unary( + compute.AddInstancesInstanceGroupRequest(), + project='project_value', + zone='zone_value', + instance_group='instance_group_value', + instance_groups_add_instances_request_resource=compute.InstanceGroupsAddInstancesRequest(instances=[compute.InstanceReference(instance='instance_value')]), + ) + + +def test_add_instances_unary_rest_error(): + client = InstanceGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.AggregatedListInstanceGroupsRequest, + dict, +]) +def test_aggregated_list_rest(request_type): + client = InstanceGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.InstanceGroupAggregatedList( + id='id_value', + kind='kind_value', + next_page_token='next_page_token_value', + self_link='self_link_value', + unreachables=['unreachables_value'], + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.InstanceGroupAggregatedList.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.aggregated_list(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.AggregatedListPager) + assert response.id == 'id_value' + assert response.kind == 'kind_value' + assert response.next_page_token == 'next_page_token_value' + assert response.self_link == 'self_link_value' + assert response.unreachables == ['unreachables_value'] + + +def test_aggregated_list_rest_required_fields(request_type=compute.AggregatedListInstanceGroupsRequest): + transport_class = transports.InstanceGroupsRestTransport + + request_init = {} + request_init["project"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).aggregated_list._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).aggregated_list._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("filter", "include_all_scopes", "max_results", "order_by", "page_token", "return_partial_success", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + + client = InstanceGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.InstanceGroupAggregatedList() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "get", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.InstanceGroupAggregatedList.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.aggregated_list(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_aggregated_list_rest_unset_required_fields(): + transport = transports.InstanceGroupsRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.aggregated_list._get_unset_required_fields({}) + assert set(unset_fields) == (set(("filter", "includeAllScopes", "maxResults", "orderBy", "pageToken", "returnPartialSuccess", )) & set(("project", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_aggregated_list_rest_interceptors(null_interceptor): + transport = transports.InstanceGroupsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.InstanceGroupsRestInterceptor(), + ) + client = InstanceGroupsClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.InstanceGroupsRestInterceptor, "post_aggregated_list") as post, \ + mock.patch.object(transports.InstanceGroupsRestInterceptor, "pre_aggregated_list") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.AggregatedListInstanceGroupsRequest.pb(compute.AggregatedListInstanceGroupsRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.InstanceGroupAggregatedList.to_json(compute.InstanceGroupAggregatedList()) + + request = compute.AggregatedListInstanceGroupsRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.InstanceGroupAggregatedList() + + client.aggregated_list(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_aggregated_list_rest_bad_request(transport: str = 'rest', request_type=compute.AggregatedListInstanceGroupsRequest): + client = InstanceGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.aggregated_list(request) + + +def test_aggregated_list_rest_flattened(): + client = InstanceGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.InstanceGroupAggregatedList() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.InstanceGroupAggregatedList.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.aggregated_list(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/aggregated/instanceGroups" % client.transport._host, args[1]) + + +def test_aggregated_list_rest_flattened_error(transport: str = 'rest'): + client = InstanceGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.aggregated_list( + compute.AggregatedListInstanceGroupsRequest(), + project='project_value', + ) + + +def test_aggregated_list_rest_pager(transport: str = 'rest'): + client = InstanceGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + #with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + compute.InstanceGroupAggregatedList( + items={ + 'a':compute.InstanceGroupsScopedList(), + 'b':compute.InstanceGroupsScopedList(), + 'c':compute.InstanceGroupsScopedList(), + }, + next_page_token='abc', + ), + compute.InstanceGroupAggregatedList( + items={}, + next_page_token='def', + ), + compute.InstanceGroupAggregatedList( + items={ + 'g':compute.InstanceGroupsScopedList(), + }, + next_page_token='ghi', + ), + compute.InstanceGroupAggregatedList( + items={ + 'h':compute.InstanceGroupsScopedList(), + 'i':compute.InstanceGroupsScopedList(), + }, + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple(compute.InstanceGroupAggregatedList.to_json(x) for x in response) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode('UTF-8') + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {'project': 'sample1'} + + pager = client.aggregated_list(request=sample_request) + + assert isinstance(pager.get('a'), compute.InstanceGroupsScopedList) + assert pager.get('h') is None + + results = list(pager) + assert len(results) == 6 + assert all( + isinstance(i, tuple) + for i in results) + for result in results: + assert isinstance(result, tuple) + assert tuple(type(t) for t in result) == (str, compute.InstanceGroupsScopedList) + + assert pager.get('a') is None + assert isinstance(pager.get('h'), compute.InstanceGroupsScopedList) + + pages = list(client.aggregated_list(request=sample_request).pages) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize("request_type", [ + compute.DeleteInstanceGroupRequest, + dict, +]) +def test_delete_rest(request_type): + client = InstanceGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'zone': 'sample2', 'instance_group': 'sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.delete(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, extended_operation.ExtendedOperation) + assert response.client_operation_id == 'client_operation_id_value' + assert response.creation_timestamp == 'creation_timestamp_value' + assert response.description == 'description_value' + assert response.end_time == 'end_time_value' + assert response.http_error_message == 'http_error_message_value' + assert response.http_error_status_code == 2374 + assert response.id == 205 + assert response.insert_time == 'insert_time_value' + assert response.kind == 'kind_value' + assert response.name == 'name_value' + assert response.operation_group_id == 'operation_group_id_value' + assert response.operation_type == 'operation_type_value' + assert response.progress == 885 + assert response.region == 'region_value' + assert response.self_link == 'self_link_value' + assert response.start_time == 'start_time_value' + assert response.status == compute.Operation.Status.DONE + assert response.status_message == 'status_message_value' + assert response.target_id == 947 + assert response.target_link == 'target_link_value' + assert response.user == 'user_value' + assert response.zone == 'zone_value' + + +def test_delete_rest_required_fields(request_type=compute.DeleteInstanceGroupRequest): + transport_class = transports.InstanceGroupsRestTransport + + request_init = {} + request_init["instance_group"] = "" + request_init["project"] = "" + request_init["zone"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["instanceGroup"] = 'instance_group_value' + jsonified_request["project"] = 'project_value' + jsonified_request["zone"] = 'zone_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "instanceGroup" in jsonified_request + assert jsonified_request["instanceGroup"] == 'instance_group_value' + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "zone" in jsonified_request + assert jsonified_request["zone"] == 'zone_value' + + client = InstanceGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "delete", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.delete(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_delete_rest_unset_required_fields(): + transport = transports.InstanceGroupsRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.delete._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("instanceGroup", "project", "zone", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_rest_interceptors(null_interceptor): + transport = transports.InstanceGroupsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.InstanceGroupsRestInterceptor(), + ) + client = InstanceGroupsClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.InstanceGroupsRestInterceptor, "post_delete") as post, \ + mock.patch.object(transports.InstanceGroupsRestInterceptor, "pre_delete") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.DeleteInstanceGroupRequest.pb(compute.DeleteInstanceGroupRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.DeleteInstanceGroupRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.delete(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_delete_rest_bad_request(transport: str = 'rest', request_type=compute.DeleteInstanceGroupRequest): + client = InstanceGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'zone': 'sample2', 'instance_group': 'sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.delete(request) + + +def test_delete_rest_flattened(): + client = InstanceGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'zone': 'sample2', 'instance_group': 'sample3'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + zone='zone_value', + instance_group='instance_group_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.delete(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/zones/{zone}/instanceGroups/{instance_group}" % client.transport._host, args[1]) + + +def test_delete_rest_flattened_error(transport: str = 'rest'): + client = InstanceGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete( + compute.DeleteInstanceGroupRequest(), + project='project_value', + zone='zone_value', + instance_group='instance_group_value', + ) + + +def test_delete_rest_error(): + client = InstanceGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.DeleteInstanceGroupRequest, + dict, +]) +def test_delete_unary_rest(request_type): + client = InstanceGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'zone': 'sample2', 'instance_group': 'sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.delete_unary(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.Operation) + + +def test_delete_unary_rest_required_fields(request_type=compute.DeleteInstanceGroupRequest): + transport_class = transports.InstanceGroupsRestTransport + + request_init = {} + request_init["instance_group"] = "" + request_init["project"] = "" + request_init["zone"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["instanceGroup"] = 'instance_group_value' + jsonified_request["project"] = 'project_value' + jsonified_request["zone"] = 'zone_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "instanceGroup" in jsonified_request + assert jsonified_request["instanceGroup"] == 'instance_group_value' + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "zone" in jsonified_request + assert jsonified_request["zone"] == 'zone_value' + + client = InstanceGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "delete", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.delete_unary(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_delete_unary_rest_unset_required_fields(): + transport = transports.InstanceGroupsRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.delete._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("instanceGroup", "project", "zone", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_unary_rest_interceptors(null_interceptor): + transport = transports.InstanceGroupsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.InstanceGroupsRestInterceptor(), + ) + client = InstanceGroupsClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.InstanceGroupsRestInterceptor, "post_delete") as post, \ + mock.patch.object(transports.InstanceGroupsRestInterceptor, "pre_delete") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.DeleteInstanceGroupRequest.pb(compute.DeleteInstanceGroupRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.DeleteInstanceGroupRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.delete_unary(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_delete_unary_rest_bad_request(transport: str = 'rest', request_type=compute.DeleteInstanceGroupRequest): + client = InstanceGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'zone': 'sample2', 'instance_group': 'sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.delete_unary(request) + + +def test_delete_unary_rest_flattened(): + client = InstanceGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'zone': 'sample2', 'instance_group': 'sample3'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + zone='zone_value', + instance_group='instance_group_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.delete_unary(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/zones/{zone}/instanceGroups/{instance_group}" % client.transport._host, args[1]) + + +def test_delete_unary_rest_flattened_error(transport: str = 'rest'): + client = InstanceGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_unary( + compute.DeleteInstanceGroupRequest(), + project='project_value', + zone='zone_value', + instance_group='instance_group_value', + ) + + +def test_delete_unary_rest_error(): + client = InstanceGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.GetInstanceGroupRequest, + dict, +]) +def test_get_rest(request_type): + client = InstanceGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'zone': 'sample2', 'instance_group': 'sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.InstanceGroup( + creation_timestamp='creation_timestamp_value', + description='description_value', + fingerprint='fingerprint_value', + id=205, + kind='kind_value', + name='name_value', + network='network_value', + region='region_value', + self_link='self_link_value', + size=443, + subnetwork='subnetwork_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.InstanceGroup.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.get(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.InstanceGroup) + assert response.creation_timestamp == 'creation_timestamp_value' + assert response.description == 'description_value' + assert response.fingerprint == 'fingerprint_value' + assert response.id == 205 + assert response.kind == 'kind_value' + assert response.name == 'name_value' + assert response.network == 'network_value' + assert response.region == 'region_value' + assert response.self_link == 'self_link_value' + assert response.size == 443 + assert response.subnetwork == 'subnetwork_value' + assert response.zone == 'zone_value' + + +def test_get_rest_required_fields(request_type=compute.GetInstanceGroupRequest): + transport_class = transports.InstanceGroupsRestTransport + + request_init = {} + request_init["instance_group"] = "" + request_init["project"] = "" + request_init["zone"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["instanceGroup"] = 'instance_group_value' + jsonified_request["project"] = 'project_value' + jsonified_request["zone"] = 'zone_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "instanceGroup" in jsonified_request + assert jsonified_request["instanceGroup"] == 'instance_group_value' + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "zone" in jsonified_request + assert jsonified_request["zone"] == 'zone_value' + + client = InstanceGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.InstanceGroup() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "get", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.InstanceGroup.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.get(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_get_rest_unset_required_fields(): + transport = transports.InstanceGroupsRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.get._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("instanceGroup", "project", "zone", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_rest_interceptors(null_interceptor): + transport = transports.InstanceGroupsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.InstanceGroupsRestInterceptor(), + ) + client = InstanceGroupsClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.InstanceGroupsRestInterceptor, "post_get") as post, \ + mock.patch.object(transports.InstanceGroupsRestInterceptor, "pre_get") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.GetInstanceGroupRequest.pb(compute.GetInstanceGroupRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.InstanceGroup.to_json(compute.InstanceGroup()) + + request = compute.GetInstanceGroupRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.InstanceGroup() + + client.get(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_get_rest_bad_request(transport: str = 'rest', request_type=compute.GetInstanceGroupRequest): + client = InstanceGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'zone': 'sample2', 'instance_group': 'sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get(request) + + +def test_get_rest_flattened(): + client = InstanceGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.InstanceGroup() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'zone': 'sample2', 'instance_group': 'sample3'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + zone='zone_value', + instance_group='instance_group_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.InstanceGroup.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.get(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/zones/{zone}/instanceGroups/{instance_group}" % client.transport._host, args[1]) + + +def test_get_rest_flattened_error(transport: str = 'rest'): + client = InstanceGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get( + compute.GetInstanceGroupRequest(), + project='project_value', + zone='zone_value', + instance_group='instance_group_value', + ) + + +def test_get_rest_error(): + client = InstanceGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.InsertInstanceGroupRequest, + dict, +]) +def test_insert_rest(request_type): + client = InstanceGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'zone': 'sample2'} + request_init["instance_group_resource"] = {'creation_timestamp': 'creation_timestamp_value', 'description': 'description_value', 'fingerprint': 'fingerprint_value', 'id': 205, 'kind': 'kind_value', 'name': 'name_value', 'named_ports': [{'name': 'name_value', 'port': 453}], 'network': 'network_value', 'region': 'region_value', 'self_link': 'self_link_value', 'size': 443, 'subnetwork': 'subnetwork_value', 'zone': 'zone_value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.insert(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, extended_operation.ExtendedOperation) + assert response.client_operation_id == 'client_operation_id_value' + assert response.creation_timestamp == 'creation_timestamp_value' + assert response.description == 'description_value' + assert response.end_time == 'end_time_value' + assert response.http_error_message == 'http_error_message_value' + assert response.http_error_status_code == 2374 + assert response.id == 205 + assert response.insert_time == 'insert_time_value' + assert response.kind == 'kind_value' + assert response.name == 'name_value' + assert response.operation_group_id == 'operation_group_id_value' + assert response.operation_type == 'operation_type_value' + assert response.progress == 885 + assert response.region == 'region_value' + assert response.self_link == 'self_link_value' + assert response.start_time == 'start_time_value' + assert response.status == compute.Operation.Status.DONE + assert response.status_message == 'status_message_value' + assert response.target_id == 947 + assert response.target_link == 'target_link_value' + assert response.user == 'user_value' + assert response.zone == 'zone_value' + + +def test_insert_rest_required_fields(request_type=compute.InsertInstanceGroupRequest): + transport_class = transports.InstanceGroupsRestTransport + + request_init = {} + request_init["project"] = "" + request_init["zone"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).insert._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + jsonified_request["zone"] = 'zone_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).insert._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "zone" in jsonified_request + assert jsonified_request["zone"] == 'zone_value' + + client = InstanceGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.insert(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_insert_rest_unset_required_fields(): + transport = transports.InstanceGroupsRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.insert._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("instanceGroupResource", "project", "zone", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_insert_rest_interceptors(null_interceptor): + transport = transports.InstanceGroupsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.InstanceGroupsRestInterceptor(), + ) + client = InstanceGroupsClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.InstanceGroupsRestInterceptor, "post_insert") as post, \ + mock.patch.object(transports.InstanceGroupsRestInterceptor, "pre_insert") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.InsertInstanceGroupRequest.pb(compute.InsertInstanceGroupRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.InsertInstanceGroupRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.insert(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_insert_rest_bad_request(transport: str = 'rest', request_type=compute.InsertInstanceGroupRequest): + client = InstanceGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'zone': 'sample2'} + request_init["instance_group_resource"] = {'creation_timestamp': 'creation_timestamp_value', 'description': 'description_value', 'fingerprint': 'fingerprint_value', 'id': 205, 'kind': 'kind_value', 'name': 'name_value', 'named_ports': [{'name': 'name_value', 'port': 453}], 'network': 'network_value', 'region': 'region_value', 'self_link': 'self_link_value', 'size': 443, 'subnetwork': 'subnetwork_value', 'zone': 'zone_value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.insert(request) + + +def test_insert_rest_flattened(): + client = InstanceGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'zone': 'sample2'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + zone='zone_value', + instance_group_resource=compute.InstanceGroup(creation_timestamp='creation_timestamp_value'), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.insert(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/zones/{zone}/instanceGroups" % client.transport._host, args[1]) + + +def test_insert_rest_flattened_error(transport: str = 'rest'): + client = InstanceGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.insert( + compute.InsertInstanceGroupRequest(), + project='project_value', + zone='zone_value', + instance_group_resource=compute.InstanceGroup(creation_timestamp='creation_timestamp_value'), + ) + + +def test_insert_rest_error(): + client = InstanceGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.InsertInstanceGroupRequest, + dict, +]) +def test_insert_unary_rest(request_type): + client = InstanceGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'zone': 'sample2'} + request_init["instance_group_resource"] = {'creation_timestamp': 'creation_timestamp_value', 'description': 'description_value', 'fingerprint': 'fingerprint_value', 'id': 205, 'kind': 'kind_value', 'name': 'name_value', 'named_ports': [{'name': 'name_value', 'port': 453}], 'network': 'network_value', 'region': 'region_value', 'self_link': 'self_link_value', 'size': 443, 'subnetwork': 'subnetwork_value', 'zone': 'zone_value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.insert_unary(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.Operation) + + +def test_insert_unary_rest_required_fields(request_type=compute.InsertInstanceGroupRequest): + transport_class = transports.InstanceGroupsRestTransport + + request_init = {} + request_init["project"] = "" + request_init["zone"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).insert._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + jsonified_request["zone"] = 'zone_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).insert._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "zone" in jsonified_request + assert jsonified_request["zone"] == 'zone_value' + + client = InstanceGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.insert_unary(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_insert_unary_rest_unset_required_fields(): + transport = transports.InstanceGroupsRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.insert._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("instanceGroupResource", "project", "zone", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_insert_unary_rest_interceptors(null_interceptor): + transport = transports.InstanceGroupsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.InstanceGroupsRestInterceptor(), + ) + client = InstanceGroupsClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.InstanceGroupsRestInterceptor, "post_insert") as post, \ + mock.patch.object(transports.InstanceGroupsRestInterceptor, "pre_insert") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.InsertInstanceGroupRequest.pb(compute.InsertInstanceGroupRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.InsertInstanceGroupRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.insert_unary(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_insert_unary_rest_bad_request(transport: str = 'rest', request_type=compute.InsertInstanceGroupRequest): + client = InstanceGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'zone': 'sample2'} + request_init["instance_group_resource"] = {'creation_timestamp': 'creation_timestamp_value', 'description': 'description_value', 'fingerprint': 'fingerprint_value', 'id': 205, 'kind': 'kind_value', 'name': 'name_value', 'named_ports': [{'name': 'name_value', 'port': 453}], 'network': 'network_value', 'region': 'region_value', 'self_link': 'self_link_value', 'size': 443, 'subnetwork': 'subnetwork_value', 'zone': 'zone_value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.insert_unary(request) + + +def test_insert_unary_rest_flattened(): + client = InstanceGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'zone': 'sample2'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + zone='zone_value', + instance_group_resource=compute.InstanceGroup(creation_timestamp='creation_timestamp_value'), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.insert_unary(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/zones/{zone}/instanceGroups" % client.transport._host, args[1]) + + +def test_insert_unary_rest_flattened_error(transport: str = 'rest'): + client = InstanceGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.insert_unary( + compute.InsertInstanceGroupRequest(), + project='project_value', + zone='zone_value', + instance_group_resource=compute.InstanceGroup(creation_timestamp='creation_timestamp_value'), + ) + + +def test_insert_unary_rest_error(): + client = InstanceGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.ListInstanceGroupsRequest, + dict, +]) +def test_list_rest(request_type): + client = InstanceGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'zone': 'sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.InstanceGroupList( + id='id_value', + kind='kind_value', + next_page_token='next_page_token_value', + self_link='self_link_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.InstanceGroupList.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.list(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListPager) + assert response.id == 'id_value' + assert response.kind == 'kind_value' + assert response.next_page_token == 'next_page_token_value' + assert response.self_link == 'self_link_value' + + +def test_list_rest_required_fields(request_type=compute.ListInstanceGroupsRequest): + transport_class = transports.InstanceGroupsRestTransport + + request_init = {} + request_init["project"] = "" + request_init["zone"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + jsonified_request["zone"] = 'zone_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("filter", "max_results", "order_by", "page_token", "return_partial_success", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "zone" in jsonified_request + assert jsonified_request["zone"] == 'zone_value' + + client = InstanceGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.InstanceGroupList() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "get", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.InstanceGroupList.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.list(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_list_rest_unset_required_fields(): + transport = transports.InstanceGroupsRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.list._get_unset_required_fields({}) + assert set(unset_fields) == (set(("filter", "maxResults", "orderBy", "pageToken", "returnPartialSuccess", )) & set(("project", "zone", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_rest_interceptors(null_interceptor): + transport = transports.InstanceGroupsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.InstanceGroupsRestInterceptor(), + ) + client = InstanceGroupsClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.InstanceGroupsRestInterceptor, "post_list") as post, \ + mock.patch.object(transports.InstanceGroupsRestInterceptor, "pre_list") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.ListInstanceGroupsRequest.pb(compute.ListInstanceGroupsRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.InstanceGroupList.to_json(compute.InstanceGroupList()) + + request = compute.ListInstanceGroupsRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.InstanceGroupList() + + client.list(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_list_rest_bad_request(transport: str = 'rest', request_type=compute.ListInstanceGroupsRequest): + client = InstanceGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'zone': 'sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list(request) + + +def test_list_rest_flattened(): + client = InstanceGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.InstanceGroupList() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'zone': 'sample2'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + zone='zone_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.InstanceGroupList.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.list(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/zones/{zone}/instanceGroups" % client.transport._host, args[1]) + + +def test_list_rest_flattened_error(transport: str = 'rest'): + client = InstanceGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list( + compute.ListInstanceGroupsRequest(), + project='project_value', + zone='zone_value', + ) + + +def test_list_rest_pager(transport: str = 'rest'): + client = InstanceGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + #with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + compute.InstanceGroupList( + items=[ + compute.InstanceGroup(), + compute.InstanceGroup(), + compute.InstanceGroup(), + ], + next_page_token='abc', + ), + compute.InstanceGroupList( + items=[], + next_page_token='def', + ), + compute.InstanceGroupList( + items=[ + compute.InstanceGroup(), + ], + next_page_token='ghi', + ), + compute.InstanceGroupList( + items=[ + compute.InstanceGroup(), + compute.InstanceGroup(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple(compute.InstanceGroupList.to_json(x) for x in response) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode('UTF-8') + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {'project': 'sample1', 'zone': 'sample2'} + + pager = client.list(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, compute.InstanceGroup) + for i in results) + + pages = list(client.list(request=sample_request).pages) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize("request_type", [ + compute.ListInstancesInstanceGroupsRequest, + dict, +]) +def test_list_instances_rest(request_type): + client = InstanceGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'zone': 'sample2', 'instance_group': 'sample3'} + request_init["instance_groups_list_instances_request_resource"] = {'instance_state': 'instance_state_value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.InstanceGroupsListInstances( + id='id_value', + kind='kind_value', + next_page_token='next_page_token_value', + self_link='self_link_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.InstanceGroupsListInstances.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.list_instances(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListInstancesPager) + assert response.id == 'id_value' + assert response.kind == 'kind_value' + assert response.next_page_token == 'next_page_token_value' + assert response.self_link == 'self_link_value' + + +def test_list_instances_rest_required_fields(request_type=compute.ListInstancesInstanceGroupsRequest): + transport_class = transports.InstanceGroupsRestTransport + + request_init = {} + request_init["instance_group"] = "" + request_init["project"] = "" + request_init["zone"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_instances._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["instanceGroup"] = 'instance_group_value' + jsonified_request["project"] = 'project_value' + jsonified_request["zone"] = 'zone_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_instances._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("filter", "max_results", "order_by", "page_token", "return_partial_success", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "instanceGroup" in jsonified_request + assert jsonified_request["instanceGroup"] == 'instance_group_value' + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "zone" in jsonified_request + assert jsonified_request["zone"] == 'zone_value' + + client = InstanceGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.InstanceGroupsListInstances() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.InstanceGroupsListInstances.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.list_instances(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_list_instances_rest_unset_required_fields(): + transport = transports.InstanceGroupsRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.list_instances._get_unset_required_fields({}) + assert set(unset_fields) == (set(("filter", "maxResults", "orderBy", "pageToken", "returnPartialSuccess", )) & set(("instanceGroup", "instanceGroupsListInstancesRequestResource", "project", "zone", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_instances_rest_interceptors(null_interceptor): + transport = transports.InstanceGroupsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.InstanceGroupsRestInterceptor(), + ) + client = InstanceGroupsClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.InstanceGroupsRestInterceptor, "post_list_instances") as post, \ + mock.patch.object(transports.InstanceGroupsRestInterceptor, "pre_list_instances") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.ListInstancesInstanceGroupsRequest.pb(compute.ListInstancesInstanceGroupsRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.InstanceGroupsListInstances.to_json(compute.InstanceGroupsListInstances()) + + request = compute.ListInstancesInstanceGroupsRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.InstanceGroupsListInstances() + + client.list_instances(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_list_instances_rest_bad_request(transport: str = 'rest', request_type=compute.ListInstancesInstanceGroupsRequest): + client = InstanceGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'zone': 'sample2', 'instance_group': 'sample3'} + request_init["instance_groups_list_instances_request_resource"] = {'instance_state': 'instance_state_value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_instances(request) + + +def test_list_instances_rest_flattened(): + client = InstanceGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.InstanceGroupsListInstances() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'zone': 'sample2', 'instance_group': 'sample3'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + zone='zone_value', + instance_group='instance_group_value', + instance_groups_list_instances_request_resource=compute.InstanceGroupsListInstancesRequest(instance_state='instance_state_value'), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.InstanceGroupsListInstances.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.list_instances(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/zones/{zone}/instanceGroups/{instance_group}/listInstances" % client.transport._host, args[1]) + + +def test_list_instances_rest_flattened_error(transport: str = 'rest'): + client = InstanceGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_instances( + compute.ListInstancesInstanceGroupsRequest(), + project='project_value', + zone='zone_value', + instance_group='instance_group_value', + instance_groups_list_instances_request_resource=compute.InstanceGroupsListInstancesRequest(instance_state='instance_state_value'), + ) + + +def test_list_instances_rest_pager(transport: str = 'rest'): + client = InstanceGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + #with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + compute.InstanceGroupsListInstances( + items=[ + compute.InstanceWithNamedPorts(), + compute.InstanceWithNamedPorts(), + compute.InstanceWithNamedPorts(), + ], + next_page_token='abc', + ), + compute.InstanceGroupsListInstances( + items=[], + next_page_token='def', + ), + compute.InstanceGroupsListInstances( + items=[ + compute.InstanceWithNamedPorts(), + ], + next_page_token='ghi', + ), + compute.InstanceGroupsListInstances( + items=[ + compute.InstanceWithNamedPorts(), + compute.InstanceWithNamedPorts(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple(compute.InstanceGroupsListInstances.to_json(x) for x in response) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode('UTF-8') + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {'project': 'sample1', 'zone': 'sample2', 'instance_group': 'sample3'} + sample_request["instance_groups_list_instances_request_resource"] = compute.InstanceGroupsListInstancesRequest(instance_state='instance_state_value') + + pager = client.list_instances(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, compute.InstanceWithNamedPorts) + for i in results) + + pages = list(client.list_instances(request=sample_request).pages) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize("request_type", [ + compute.RemoveInstancesInstanceGroupRequest, + dict, +]) +def test_remove_instances_rest(request_type): + client = InstanceGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'zone': 'sample2', 'instance_group': 'sample3'} + request_init["instance_groups_remove_instances_request_resource"] = {'instances': [{'instance': 'instance_value'}]} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.remove_instances(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, extended_operation.ExtendedOperation) + assert response.client_operation_id == 'client_operation_id_value' + assert response.creation_timestamp == 'creation_timestamp_value' + assert response.description == 'description_value' + assert response.end_time == 'end_time_value' + assert response.http_error_message == 'http_error_message_value' + assert response.http_error_status_code == 2374 + assert response.id == 205 + assert response.insert_time == 'insert_time_value' + assert response.kind == 'kind_value' + assert response.name == 'name_value' + assert response.operation_group_id == 'operation_group_id_value' + assert response.operation_type == 'operation_type_value' + assert response.progress == 885 + assert response.region == 'region_value' + assert response.self_link == 'self_link_value' + assert response.start_time == 'start_time_value' + assert response.status == compute.Operation.Status.DONE + assert response.status_message == 'status_message_value' + assert response.target_id == 947 + assert response.target_link == 'target_link_value' + assert response.user == 'user_value' + assert response.zone == 'zone_value' + + +def test_remove_instances_rest_required_fields(request_type=compute.RemoveInstancesInstanceGroupRequest): + transport_class = transports.InstanceGroupsRestTransport + + request_init = {} + request_init["instance_group"] = "" + request_init["project"] = "" + request_init["zone"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).remove_instances._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["instanceGroup"] = 'instance_group_value' + jsonified_request["project"] = 'project_value' + jsonified_request["zone"] = 'zone_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).remove_instances._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "instanceGroup" in jsonified_request + assert jsonified_request["instanceGroup"] == 'instance_group_value' + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "zone" in jsonified_request + assert jsonified_request["zone"] == 'zone_value' + + client = InstanceGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.remove_instances(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_remove_instances_rest_unset_required_fields(): + transport = transports.InstanceGroupsRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.remove_instances._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("instanceGroup", "instanceGroupsRemoveInstancesRequestResource", "project", "zone", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_remove_instances_rest_interceptors(null_interceptor): + transport = transports.InstanceGroupsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.InstanceGroupsRestInterceptor(), + ) + client = InstanceGroupsClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.InstanceGroupsRestInterceptor, "post_remove_instances") as post, \ + mock.patch.object(transports.InstanceGroupsRestInterceptor, "pre_remove_instances") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.RemoveInstancesInstanceGroupRequest.pb(compute.RemoveInstancesInstanceGroupRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.RemoveInstancesInstanceGroupRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.remove_instances(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_remove_instances_rest_bad_request(transport: str = 'rest', request_type=compute.RemoveInstancesInstanceGroupRequest): + client = InstanceGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'zone': 'sample2', 'instance_group': 'sample3'} + request_init["instance_groups_remove_instances_request_resource"] = {'instances': [{'instance': 'instance_value'}]} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.remove_instances(request) + + +def test_remove_instances_rest_flattened(): + client = InstanceGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'zone': 'sample2', 'instance_group': 'sample3'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + zone='zone_value', + instance_group='instance_group_value', + instance_groups_remove_instances_request_resource=compute.InstanceGroupsRemoveInstancesRequest(instances=[compute.InstanceReference(instance='instance_value')]), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.remove_instances(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/zones/{zone}/instanceGroups/{instance_group}/removeInstances" % client.transport._host, args[1]) + + +def test_remove_instances_rest_flattened_error(transport: str = 'rest'): + client = InstanceGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.remove_instances( + compute.RemoveInstancesInstanceGroupRequest(), + project='project_value', + zone='zone_value', + instance_group='instance_group_value', + instance_groups_remove_instances_request_resource=compute.InstanceGroupsRemoveInstancesRequest(instances=[compute.InstanceReference(instance='instance_value')]), + ) + + +def test_remove_instances_rest_error(): + client = InstanceGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.RemoveInstancesInstanceGroupRequest, + dict, +]) +def test_remove_instances_unary_rest(request_type): + client = InstanceGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'zone': 'sample2', 'instance_group': 'sample3'} + request_init["instance_groups_remove_instances_request_resource"] = {'instances': [{'instance': 'instance_value'}]} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.remove_instances_unary(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.Operation) + + +def test_remove_instances_unary_rest_required_fields(request_type=compute.RemoveInstancesInstanceGroupRequest): + transport_class = transports.InstanceGroupsRestTransport + + request_init = {} + request_init["instance_group"] = "" + request_init["project"] = "" + request_init["zone"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).remove_instances._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["instanceGroup"] = 'instance_group_value' + jsonified_request["project"] = 'project_value' + jsonified_request["zone"] = 'zone_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).remove_instances._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "instanceGroup" in jsonified_request + assert jsonified_request["instanceGroup"] == 'instance_group_value' + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "zone" in jsonified_request + assert jsonified_request["zone"] == 'zone_value' + + client = InstanceGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.remove_instances_unary(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_remove_instances_unary_rest_unset_required_fields(): + transport = transports.InstanceGroupsRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.remove_instances._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("instanceGroup", "instanceGroupsRemoveInstancesRequestResource", "project", "zone", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_remove_instances_unary_rest_interceptors(null_interceptor): + transport = transports.InstanceGroupsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.InstanceGroupsRestInterceptor(), + ) + client = InstanceGroupsClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.InstanceGroupsRestInterceptor, "post_remove_instances") as post, \ + mock.patch.object(transports.InstanceGroupsRestInterceptor, "pre_remove_instances") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.RemoveInstancesInstanceGroupRequest.pb(compute.RemoveInstancesInstanceGroupRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.RemoveInstancesInstanceGroupRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.remove_instances_unary(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_remove_instances_unary_rest_bad_request(transport: str = 'rest', request_type=compute.RemoveInstancesInstanceGroupRequest): + client = InstanceGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'zone': 'sample2', 'instance_group': 'sample3'} + request_init["instance_groups_remove_instances_request_resource"] = {'instances': [{'instance': 'instance_value'}]} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.remove_instances_unary(request) + + +def test_remove_instances_unary_rest_flattened(): + client = InstanceGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'zone': 'sample2', 'instance_group': 'sample3'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + zone='zone_value', + instance_group='instance_group_value', + instance_groups_remove_instances_request_resource=compute.InstanceGroupsRemoveInstancesRequest(instances=[compute.InstanceReference(instance='instance_value')]), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.remove_instances_unary(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/zones/{zone}/instanceGroups/{instance_group}/removeInstances" % client.transport._host, args[1]) + + +def test_remove_instances_unary_rest_flattened_error(transport: str = 'rest'): + client = InstanceGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.remove_instances_unary( + compute.RemoveInstancesInstanceGroupRequest(), + project='project_value', + zone='zone_value', + instance_group='instance_group_value', + instance_groups_remove_instances_request_resource=compute.InstanceGroupsRemoveInstancesRequest(instances=[compute.InstanceReference(instance='instance_value')]), + ) + + +def test_remove_instances_unary_rest_error(): + client = InstanceGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.SetNamedPortsInstanceGroupRequest, + dict, +]) +def test_set_named_ports_rest(request_type): + client = InstanceGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'zone': 'sample2', 'instance_group': 'sample3'} + request_init["instance_groups_set_named_ports_request_resource"] = {'fingerprint': 'fingerprint_value', 'named_ports': [{'name': 'name_value', 'port': 453}]} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.set_named_ports(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, extended_operation.ExtendedOperation) + assert response.client_operation_id == 'client_operation_id_value' + assert response.creation_timestamp == 'creation_timestamp_value' + assert response.description == 'description_value' + assert response.end_time == 'end_time_value' + assert response.http_error_message == 'http_error_message_value' + assert response.http_error_status_code == 2374 + assert response.id == 205 + assert response.insert_time == 'insert_time_value' + assert response.kind == 'kind_value' + assert response.name == 'name_value' + assert response.operation_group_id == 'operation_group_id_value' + assert response.operation_type == 'operation_type_value' + assert response.progress == 885 + assert response.region == 'region_value' + assert response.self_link == 'self_link_value' + assert response.start_time == 'start_time_value' + assert response.status == compute.Operation.Status.DONE + assert response.status_message == 'status_message_value' + assert response.target_id == 947 + assert response.target_link == 'target_link_value' + assert response.user == 'user_value' + assert response.zone == 'zone_value' + + +def test_set_named_ports_rest_required_fields(request_type=compute.SetNamedPortsInstanceGroupRequest): + transport_class = transports.InstanceGroupsRestTransport + + request_init = {} + request_init["instance_group"] = "" + request_init["project"] = "" + request_init["zone"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).set_named_ports._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["instanceGroup"] = 'instance_group_value' + jsonified_request["project"] = 'project_value' + jsonified_request["zone"] = 'zone_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).set_named_ports._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "instanceGroup" in jsonified_request + assert jsonified_request["instanceGroup"] == 'instance_group_value' + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "zone" in jsonified_request + assert jsonified_request["zone"] == 'zone_value' + + client = InstanceGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.set_named_ports(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_set_named_ports_rest_unset_required_fields(): + transport = transports.InstanceGroupsRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.set_named_ports._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("instanceGroup", "instanceGroupsSetNamedPortsRequestResource", "project", "zone", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_set_named_ports_rest_interceptors(null_interceptor): + transport = transports.InstanceGroupsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.InstanceGroupsRestInterceptor(), + ) + client = InstanceGroupsClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.InstanceGroupsRestInterceptor, "post_set_named_ports") as post, \ + mock.patch.object(transports.InstanceGroupsRestInterceptor, "pre_set_named_ports") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.SetNamedPortsInstanceGroupRequest.pb(compute.SetNamedPortsInstanceGroupRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.SetNamedPortsInstanceGroupRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.set_named_ports(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_set_named_ports_rest_bad_request(transport: str = 'rest', request_type=compute.SetNamedPortsInstanceGroupRequest): + client = InstanceGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'zone': 'sample2', 'instance_group': 'sample3'} + request_init["instance_groups_set_named_ports_request_resource"] = {'fingerprint': 'fingerprint_value', 'named_ports': [{'name': 'name_value', 'port': 453}]} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.set_named_ports(request) + + +def test_set_named_ports_rest_flattened(): + client = InstanceGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'zone': 'sample2', 'instance_group': 'sample3'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + zone='zone_value', + instance_group='instance_group_value', + instance_groups_set_named_ports_request_resource=compute.InstanceGroupsSetNamedPortsRequest(fingerprint='fingerprint_value'), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.set_named_ports(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/zones/{zone}/instanceGroups/{instance_group}/setNamedPorts" % client.transport._host, args[1]) + + +def test_set_named_ports_rest_flattened_error(transport: str = 'rest'): + client = InstanceGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.set_named_ports( + compute.SetNamedPortsInstanceGroupRequest(), + project='project_value', + zone='zone_value', + instance_group='instance_group_value', + instance_groups_set_named_ports_request_resource=compute.InstanceGroupsSetNamedPortsRequest(fingerprint='fingerprint_value'), + ) + + +def test_set_named_ports_rest_error(): + client = InstanceGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.SetNamedPortsInstanceGroupRequest, + dict, +]) +def test_set_named_ports_unary_rest(request_type): + client = InstanceGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'zone': 'sample2', 'instance_group': 'sample3'} + request_init["instance_groups_set_named_ports_request_resource"] = {'fingerprint': 'fingerprint_value', 'named_ports': [{'name': 'name_value', 'port': 453}]} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.set_named_ports_unary(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.Operation) + + +def test_set_named_ports_unary_rest_required_fields(request_type=compute.SetNamedPortsInstanceGroupRequest): + transport_class = transports.InstanceGroupsRestTransport + + request_init = {} + request_init["instance_group"] = "" + request_init["project"] = "" + request_init["zone"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).set_named_ports._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["instanceGroup"] = 'instance_group_value' + jsonified_request["project"] = 'project_value' + jsonified_request["zone"] = 'zone_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).set_named_ports._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "instanceGroup" in jsonified_request + assert jsonified_request["instanceGroup"] == 'instance_group_value' + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "zone" in jsonified_request + assert jsonified_request["zone"] == 'zone_value' + + client = InstanceGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.set_named_ports_unary(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_set_named_ports_unary_rest_unset_required_fields(): + transport = transports.InstanceGroupsRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.set_named_ports._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("instanceGroup", "instanceGroupsSetNamedPortsRequestResource", "project", "zone", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_set_named_ports_unary_rest_interceptors(null_interceptor): + transport = transports.InstanceGroupsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.InstanceGroupsRestInterceptor(), + ) + client = InstanceGroupsClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.InstanceGroupsRestInterceptor, "post_set_named_ports") as post, \ + mock.patch.object(transports.InstanceGroupsRestInterceptor, "pre_set_named_ports") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.SetNamedPortsInstanceGroupRequest.pb(compute.SetNamedPortsInstanceGroupRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.SetNamedPortsInstanceGroupRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.set_named_ports_unary(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_set_named_ports_unary_rest_bad_request(transport: str = 'rest', request_type=compute.SetNamedPortsInstanceGroupRequest): + client = InstanceGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'zone': 'sample2', 'instance_group': 'sample3'} + request_init["instance_groups_set_named_ports_request_resource"] = {'fingerprint': 'fingerprint_value', 'named_ports': [{'name': 'name_value', 'port': 453}]} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.set_named_ports_unary(request) + + +def test_set_named_ports_unary_rest_flattened(): + client = InstanceGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'zone': 'sample2', 'instance_group': 'sample3'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + zone='zone_value', + instance_group='instance_group_value', + instance_groups_set_named_ports_request_resource=compute.InstanceGroupsSetNamedPortsRequest(fingerprint='fingerprint_value'), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.set_named_ports_unary(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/zones/{zone}/instanceGroups/{instance_group}/setNamedPorts" % client.transport._host, args[1]) + + +def test_set_named_ports_unary_rest_flattened_error(transport: str = 'rest'): + client = InstanceGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.set_named_ports_unary( + compute.SetNamedPortsInstanceGroupRequest(), + project='project_value', + zone='zone_value', + instance_group='instance_group_value', + instance_groups_set_named_ports_request_resource=compute.InstanceGroupsSetNamedPortsRequest(fingerprint='fingerprint_value'), + ) + + +def test_set_named_ports_unary_rest_error(): + client = InstanceGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +def test_credentials_transport_error(): + # It is an error to provide credentials and a transport instance. + transport = transports.InstanceGroupsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = InstanceGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # It is an error to provide a credentials file and a transport instance. + transport = transports.InstanceGroupsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = InstanceGroupsClient( + client_options={"credentials_file": "credentials.json"}, + transport=transport, + ) + + # It is an error to provide an api_key and a transport instance. + transport = transports.InstanceGroupsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = InstanceGroupsClient( + client_options=options, + transport=transport, + ) + + # It is an error to provide an api_key and a credential. + options = mock.Mock() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = InstanceGroupsClient( + client_options=options, + credentials=ga_credentials.AnonymousCredentials() + ) + + # It is an error to provide scopes and a transport instance. + transport = transports.InstanceGroupsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = InstanceGroupsClient( + client_options={"scopes": ["1", "2"]}, + transport=transport, + ) + + +def test_transport_instance(): + # A client may be instantiated with a custom transport instance. + transport = transports.InstanceGroupsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + client = InstanceGroupsClient(transport=transport) + assert client.transport is transport + + +@pytest.mark.parametrize("transport_class", [ + transports.InstanceGroupsRestTransport, +]) +def test_transport_adc(transport_class): + # Test default credentials are used if not provided. + with mock.patch.object(google.auth, 'default') as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class() + adc.assert_called_once() + +@pytest.mark.parametrize("transport_name", [ + "rest", +]) +def test_transport_kind(transport_name): + transport = InstanceGroupsClient.get_transport_class(transport_name)( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert transport.kind == transport_name + + +def test_instance_groups_base_transport_error(): + # Passing both a credentials object and credentials_file should raise an error + with pytest.raises(core_exceptions.DuplicateCredentialArgs): + transport = transports.InstanceGroupsTransport( + credentials=ga_credentials.AnonymousCredentials(), + credentials_file="credentials.json" + ) + + +def test_instance_groups_base_transport(): + # Instantiate the base transport. + with mock.patch('google.cloud.compute_v1.services.instance_groups.transports.InstanceGroupsTransport.__init__') as Transport: + Transport.return_value = None + transport = transports.InstanceGroupsTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Every method on the transport should just blindly + # raise NotImplementedError. + methods = ( + 'add_instances', + 'aggregated_list', + 'delete', + 'get', + 'insert', + 'list', + 'list_instances', + 'remove_instances', + 'set_named_ports', + ) + for method in methods: + with pytest.raises(NotImplementedError): + getattr(transport, method)(request=object()) + + with pytest.raises(NotImplementedError): + transport.close() + + # Catch all for all remaining methods and properties + remainder = [ + 'kind', + ] + for r in remainder: + with pytest.raises(NotImplementedError): + getattr(transport, r)() + + +def test_instance_groups_base_transport_with_credentials_file(): + # Instantiate the base transport with a credentials file + with mock.patch.object(google.auth, 'load_credentials_from_file', autospec=True) as load_creds, mock.patch('google.cloud.compute_v1.services.instance_groups.transports.InstanceGroupsTransport._prep_wrapped_messages') as Transport: + Transport.return_value = None + load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.InstanceGroupsTransport( + credentials_file="credentials.json", + quota_project_id="octopus", + ) + load_creds.assert_called_once_with("credentials.json", + scopes=None, + default_scopes=( + 'https://www.googleapis.com/auth/compute', + 'https://www.googleapis.com/auth/cloud-platform', +), + quota_project_id="octopus", + ) + + +def test_instance_groups_base_transport_with_adc(): + # Test the default credentials are used if credentials and credentials_file are None. + with mock.patch.object(google.auth, 'default', autospec=True) as adc, mock.patch('google.cloud.compute_v1.services.instance_groups.transports.InstanceGroupsTransport._prep_wrapped_messages') as Transport: + Transport.return_value = None + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.InstanceGroupsTransport() + adc.assert_called_once() + + +def test_instance_groups_auth_adc(): + # If no credentials are provided, we should use ADC credentials. + with mock.patch.object(google.auth, 'default', autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + InstanceGroupsClient() + adc.assert_called_once_with( + scopes=None, + default_scopes=( + 'https://www.googleapis.com/auth/compute', + 'https://www.googleapis.com/auth/cloud-platform', +), + quota_project_id=None, + ) + + +def test_instance_groups_http_transport_client_cert_source_for_mtls(): + cred = ga_credentials.AnonymousCredentials() + with mock.patch("google.auth.transport.requests.AuthorizedSession.configure_mtls_channel") as mock_configure_mtls_channel: + transports.InstanceGroupsRestTransport ( + credentials=cred, + client_cert_source_for_mtls=client_cert_source_callback + ) + mock_configure_mtls_channel.assert_called_once_with(client_cert_source_callback) + + +@pytest.mark.parametrize("transport_name", [ + "rest", +]) +def test_instance_groups_host_no_port(transport_name): + client = InstanceGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions(api_endpoint='compute.googleapis.com'), + transport=transport_name, + ) + assert client.transport._host == ( + 'compute.googleapis.com:443' + if transport_name in ['grpc', 'grpc_asyncio'] + else 'https://compute.googleapis.com' + ) + +@pytest.mark.parametrize("transport_name", [ + "rest", +]) +def test_instance_groups_host_with_port(transport_name): + client = InstanceGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions(api_endpoint='compute.googleapis.com:8000'), + transport=transport_name, + ) + assert client.transport._host == ( + 'compute.googleapis.com:8000' + if transport_name in ['grpc', 'grpc_asyncio'] + else 'https://compute.googleapis.com:8000' + ) + +@pytest.mark.parametrize("transport_name", [ + "rest", +]) +def test_instance_groups_client_transport_session_collision(transport_name): + creds1 = ga_credentials.AnonymousCredentials() + creds2 = ga_credentials.AnonymousCredentials() + client1 = InstanceGroupsClient( + credentials=creds1, + transport=transport_name, + ) + client2 = InstanceGroupsClient( + credentials=creds2, + transport=transport_name, + ) + session1 = client1.transport.add_instances._session + session2 = client2.transport.add_instances._session + assert session1 != session2 + session1 = client1.transport.aggregated_list._session + session2 = client2.transport.aggregated_list._session + assert session1 != session2 + session1 = client1.transport.delete._session + session2 = client2.transport.delete._session + assert session1 != session2 + session1 = client1.transport.get._session + session2 = client2.transport.get._session + assert session1 != session2 + session1 = client1.transport.insert._session + session2 = client2.transport.insert._session + assert session1 != session2 + session1 = client1.transport.list._session + session2 = client2.transport.list._session + assert session1 != session2 + session1 = client1.transport.list_instances._session + session2 = client2.transport.list_instances._session + assert session1 != session2 + session1 = client1.transport.remove_instances._session + session2 = client2.transport.remove_instances._session + assert session1 != session2 + session1 = client1.transport.set_named_ports._session + session2 = client2.transport.set_named_ports._session + assert session1 != session2 + +def test_common_billing_account_path(): + billing_account = "squid" + expected = "billingAccounts/{billing_account}".format(billing_account=billing_account, ) + actual = InstanceGroupsClient.common_billing_account_path(billing_account) + assert expected == actual + + +def test_parse_common_billing_account_path(): + expected = { + "billing_account": "clam", + } + path = InstanceGroupsClient.common_billing_account_path(**expected) + + # Check that the path construction is reversible. + actual = InstanceGroupsClient.parse_common_billing_account_path(path) + assert expected == actual + +def test_common_folder_path(): + folder = "whelk" + expected = "folders/{folder}".format(folder=folder, ) + actual = InstanceGroupsClient.common_folder_path(folder) + assert expected == actual + + +def test_parse_common_folder_path(): + expected = { + "folder": "octopus", + } + path = InstanceGroupsClient.common_folder_path(**expected) + + # Check that the path construction is reversible. + actual = InstanceGroupsClient.parse_common_folder_path(path) + assert expected == actual + +def test_common_organization_path(): + organization = "oyster" + expected = "organizations/{organization}".format(organization=organization, ) + actual = InstanceGroupsClient.common_organization_path(organization) + assert expected == actual + + +def test_parse_common_organization_path(): + expected = { + "organization": "nudibranch", + } + path = InstanceGroupsClient.common_organization_path(**expected) + + # Check that the path construction is reversible. + actual = InstanceGroupsClient.parse_common_organization_path(path) + assert expected == actual + +def test_common_project_path(): + project = "cuttlefish" + expected = "projects/{project}".format(project=project, ) + actual = InstanceGroupsClient.common_project_path(project) + assert expected == actual + + +def test_parse_common_project_path(): + expected = { + "project": "mussel", + } + path = InstanceGroupsClient.common_project_path(**expected) + + # Check that the path construction is reversible. + actual = InstanceGroupsClient.parse_common_project_path(path) + assert expected == actual + +def test_common_location_path(): + project = "winkle" + location = "nautilus" + expected = "projects/{project}/locations/{location}".format(project=project, location=location, ) + actual = InstanceGroupsClient.common_location_path(project, location) + assert expected == actual + + +def test_parse_common_location_path(): + expected = { + "project": "scallop", + "location": "abalone", + } + path = InstanceGroupsClient.common_location_path(**expected) + + # Check that the path construction is reversible. + actual = InstanceGroupsClient.parse_common_location_path(path) + assert expected == actual + + +def test_client_with_default_client_info(): + client_info = gapic_v1.client_info.ClientInfo() + + with mock.patch.object(transports.InstanceGroupsTransport, '_prep_wrapped_messages') as prep: + client = InstanceGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + with mock.patch.object(transports.InstanceGroupsTransport, '_prep_wrapped_messages') as prep: + transport_class = InstanceGroupsClient.get_transport_class() + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + +def test_transport_close(): + transports = { + "rest": "_session", + } + + for transport, close_name in transports.items(): + client = InstanceGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport + ) + with mock.patch.object(type(getattr(client.transport, close_name)), "close") as close: + with client: + close.assert_not_called() + close.assert_called_once() + +def test_client_ctx(): + transports = [ + 'rest', + ] + for transport in transports: + client = InstanceGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport + ) + # Test client calls underlying transport. + with mock.patch.object(type(client.transport), "close") as close: + close.assert_not_called() + with client: + pass + close.assert_called() + +@pytest.mark.parametrize("client_class,transport_class", [ + (InstanceGroupsClient, transports.InstanceGroupsRestTransport), +]) +def test_api_key_credentials(client_class, transport_class): + with mock.patch.object( + google.auth._default, "get_api_key_credentials", create=True + ) as get_api_key_credentials: + mock_cred = mock.Mock() + get_api_key_credentials.return_value = mock_cred + options = client_options.ClientOptions() + options.api_key = "api_key" + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=mock_cred, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) diff --git a/owl-bot-staging/v1/tests/unit/gapic/compute_v1/test_instance_templates.py b/owl-bot-staging/v1/tests/unit/gapic/compute_v1/test_instance_templates.py new file mode 100644 index 000000000..262c35df5 --- /dev/null +++ b/owl-bot-staging/v1/tests/unit/gapic/compute_v1/test_instance_templates.py @@ -0,0 +1,3528 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import os +# try/except added for compatibility with python < 3.8 +try: + from unittest import mock + from unittest.mock import AsyncMock # pragma: NO COVER +except ImportError: # pragma: NO COVER + import mock + +import grpc +from grpc.experimental import aio +from collections.abc import Iterable +from google.protobuf import json_format +import json +import math +import pytest +from proto.marshal.rules.dates import DurationRule, TimestampRule +from proto.marshal.rules import wrappers +from requests import Response +from requests import Request, PreparedRequest +from requests.sessions import Session +from google.protobuf import json_format + +from google.api_core import client_options +from google.api_core import exceptions as core_exceptions +from google.api_core import extended_operation # type: ignore +from google.api_core import future +from google.api_core import gapic_v1 +from google.api_core import grpc_helpers +from google.api_core import grpc_helpers_async +from google.api_core import path_template +from google.auth import credentials as ga_credentials +from google.auth.exceptions import MutualTLSChannelError +from google.cloud.compute_v1.services.instance_templates import InstanceTemplatesClient +from google.cloud.compute_v1.services.instance_templates import pagers +from google.cloud.compute_v1.services.instance_templates import transports +from google.cloud.compute_v1.types import compute +from google.oauth2 import service_account +import google.auth + + +def client_cert_source_callback(): + return b"cert bytes", b"key bytes" + + +# If default endpoint is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint(client): + return "foo.googleapis.com" if ("localhost" in client.DEFAULT_ENDPOINT) else client.DEFAULT_ENDPOINT + + +def test__get_default_mtls_endpoint(): + api_endpoint = "example.googleapis.com" + api_mtls_endpoint = "example.mtls.googleapis.com" + sandbox_endpoint = "example.sandbox.googleapis.com" + sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com" + non_googleapi = "api.example.com" + + assert InstanceTemplatesClient._get_default_mtls_endpoint(None) is None + assert InstanceTemplatesClient._get_default_mtls_endpoint(api_endpoint) == api_mtls_endpoint + assert InstanceTemplatesClient._get_default_mtls_endpoint(api_mtls_endpoint) == api_mtls_endpoint + assert InstanceTemplatesClient._get_default_mtls_endpoint(sandbox_endpoint) == sandbox_mtls_endpoint + assert InstanceTemplatesClient._get_default_mtls_endpoint(sandbox_mtls_endpoint) == sandbox_mtls_endpoint + assert InstanceTemplatesClient._get_default_mtls_endpoint(non_googleapi) == non_googleapi + + +@pytest.mark.parametrize("client_class,transport_name", [ + (InstanceTemplatesClient, "rest"), +]) +def test_instance_templates_client_from_service_account_info(client_class, transport_name): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object(service_account.Credentials, 'from_service_account_info') as factory: + factory.return_value = creds + info = {"valid": True} + client = client_class.from_service_account_info(info, transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + 'compute.googleapis.com:443' + if transport_name in ['grpc', 'grpc_asyncio'] + else + 'https://compute.googleapis.com' + ) + + +@pytest.mark.parametrize("transport_class,transport_name", [ + (transports.InstanceTemplatesRestTransport, "rest"), +]) +def test_instance_templates_client_service_account_always_use_jwt(transport_class, transport_name): + with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=True) + use_jwt.assert_called_once_with(True) + + with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=False) + use_jwt.assert_not_called() + + +@pytest.mark.parametrize("client_class,transport_name", [ + (InstanceTemplatesClient, "rest"), +]) +def test_instance_templates_client_from_service_account_file(client_class, transport_name): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object(service_account.Credentials, 'from_service_account_file') as factory: + factory.return_value = creds + client = client_class.from_service_account_file("dummy/file/path.json", transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + client = client_class.from_service_account_json("dummy/file/path.json", transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + 'compute.googleapis.com:443' + if transport_name in ['grpc', 'grpc_asyncio'] + else + 'https://compute.googleapis.com' + ) + + +def test_instance_templates_client_get_transport_class(): + transport = InstanceTemplatesClient.get_transport_class() + available_transports = [ + transports.InstanceTemplatesRestTransport, + ] + assert transport in available_transports + + transport = InstanceTemplatesClient.get_transport_class("rest") + assert transport == transports.InstanceTemplatesRestTransport + + +@pytest.mark.parametrize("client_class,transport_class,transport_name", [ + (InstanceTemplatesClient, transports.InstanceTemplatesRestTransport, "rest"), +]) +@mock.patch.object(InstanceTemplatesClient, "DEFAULT_ENDPOINT", modify_default_endpoint(InstanceTemplatesClient)) +def test_instance_templates_client_client_options(client_class, transport_class, transport_name): + # Check that if channel is provided we won't create a new one. + with mock.patch.object(InstanceTemplatesClient, 'get_transport_class') as gtc: + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ) + client = client_class(transport=transport) + gtc.assert_not_called() + + # Check that if channel is provided via str we will create a new one. + with mock.patch.object(InstanceTemplatesClient, 'get_transport_class') as gtc: + client = client_class(transport=transport_name) + gtc.assert_called() + + # Check the case api_endpoint is provided. + options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(transport=transport_name, client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_MTLS_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError): + client = client_class(transport=transport_name) + + # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): + with pytest.raises(ValueError): + client = client_class(transport=transport_name) + + # Check the case quota_project_id is provided + options = client_options.ClientOptions(quota_project_id="octopus") + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id="octopus", + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + # Check the case api_endpoint is provided + options = client_options.ClientOptions(api_audience="https://language.googleapis.com") + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience="https://language.googleapis.com" + ) + +@pytest.mark.parametrize("client_class,transport_class,transport_name,use_client_cert_env", [ + (InstanceTemplatesClient, transports.InstanceTemplatesRestTransport, "rest", "true"), + (InstanceTemplatesClient, transports.InstanceTemplatesRestTransport, "rest", "false"), +]) +@mock.patch.object(InstanceTemplatesClient, "DEFAULT_ENDPOINT", modify_default_endpoint(InstanceTemplatesClient)) +@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) +def test_instance_templates_client_mtls_env_auto(client_class, transport_class, transport_name, use_client_cert_env): + # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default + # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists. + + # Check the case client_cert_source is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): + options = client_options.ClientOptions(client_cert_source=client_cert_source_callback) + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + + if use_client_cert_env == "false": + expected_client_cert_source = None + expected_host = client.DEFAULT_ENDPOINT + else: + expected_client_cert_source = client_cert_source_callback + expected_host = client.DEFAULT_MTLS_ENDPOINT + + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case ADC client cert is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): + with mock.patch.object(transport_class, '__init__') as patched: + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): + with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=client_cert_source_callback): + if use_client_cert_env == "false": + expected_host = client.DEFAULT_ENDPOINT + expected_client_cert_source = None + else: + expected_host = client.DEFAULT_MTLS_ENDPOINT + expected_client_cert_source = client_cert_source_callback + + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case client_cert_source and ADC client cert are not provided. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): + with mock.patch.object(transport_class, '__init__') as patched: + with mock.patch("google.auth.transport.mtls.has_default_client_cert_source", return_value=False): + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize("client_class", [ + InstanceTemplatesClient +]) +@mock.patch.object(InstanceTemplatesClient, "DEFAULT_ENDPOINT", modify_default_endpoint(InstanceTemplatesClient)) +def test_instance_templates_client_get_mtls_endpoint_and_cert_source(client_class): + mock_client_cert_source = mock.Mock() + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "true". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + mock_api_endpoint = "foo" + options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(options) + assert api_endpoint == mock_api_endpoint + assert cert_source == mock_client_cert_source + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "false". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + mock_client_cert_source = mock.Mock() + mock_api_endpoint = "foo" + options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(options) + assert api_endpoint == mock_api_endpoint + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert doesn't exist. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=False): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert exists. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): + with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=mock_client_cert_source): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source == mock_client_cert_source + + +@pytest.mark.parametrize("client_class,transport_class,transport_name", [ + (InstanceTemplatesClient, transports.InstanceTemplatesRestTransport, "rest"), +]) +def test_instance_templates_client_client_options_scopes(client_class, transport_class, transport_name): + # Check the case scopes are provided. + options = client_options.ClientOptions( + scopes=["1", "2"], + ) + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=["1", "2"], + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + +@pytest.mark.parametrize("client_class,transport_class,transport_name,grpc_helpers", [ + (InstanceTemplatesClient, transports.InstanceTemplatesRestTransport, "rest", None), +]) +def test_instance_templates_client_client_options_credentials_file(client_class, transport_class, transport_name, grpc_helpers): + # Check the case credentials file is provided. + options = client_options.ClientOptions( + credentials_file="credentials.json" + ) + + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize("request_type", [ + compute.AggregatedListInstanceTemplatesRequest, + dict, +]) +def test_aggregated_list_rest(request_type): + client = InstanceTemplatesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.InstanceTemplateAggregatedList( + id='id_value', + kind='kind_value', + next_page_token='next_page_token_value', + self_link='self_link_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.InstanceTemplateAggregatedList.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.aggregated_list(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.AggregatedListPager) + assert response.id == 'id_value' + assert response.kind == 'kind_value' + assert response.next_page_token == 'next_page_token_value' + assert response.self_link == 'self_link_value' + + +def test_aggregated_list_rest_required_fields(request_type=compute.AggregatedListInstanceTemplatesRequest): + transport_class = transports.InstanceTemplatesRestTransport + + request_init = {} + request_init["project"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).aggregated_list._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).aggregated_list._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("filter", "include_all_scopes", "max_results", "order_by", "page_token", "return_partial_success", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + + client = InstanceTemplatesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.InstanceTemplateAggregatedList() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "get", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.InstanceTemplateAggregatedList.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.aggregated_list(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_aggregated_list_rest_unset_required_fields(): + transport = transports.InstanceTemplatesRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.aggregated_list._get_unset_required_fields({}) + assert set(unset_fields) == (set(("filter", "includeAllScopes", "maxResults", "orderBy", "pageToken", "returnPartialSuccess", )) & set(("project", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_aggregated_list_rest_interceptors(null_interceptor): + transport = transports.InstanceTemplatesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.InstanceTemplatesRestInterceptor(), + ) + client = InstanceTemplatesClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.InstanceTemplatesRestInterceptor, "post_aggregated_list") as post, \ + mock.patch.object(transports.InstanceTemplatesRestInterceptor, "pre_aggregated_list") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.AggregatedListInstanceTemplatesRequest.pb(compute.AggregatedListInstanceTemplatesRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.InstanceTemplateAggregatedList.to_json(compute.InstanceTemplateAggregatedList()) + + request = compute.AggregatedListInstanceTemplatesRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.InstanceTemplateAggregatedList() + + client.aggregated_list(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_aggregated_list_rest_bad_request(transport: str = 'rest', request_type=compute.AggregatedListInstanceTemplatesRequest): + client = InstanceTemplatesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.aggregated_list(request) + + +def test_aggregated_list_rest_flattened(): + client = InstanceTemplatesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.InstanceTemplateAggregatedList() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.InstanceTemplateAggregatedList.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.aggregated_list(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/aggregated/instanceTemplates" % client.transport._host, args[1]) + + +def test_aggregated_list_rest_flattened_error(transport: str = 'rest'): + client = InstanceTemplatesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.aggregated_list( + compute.AggregatedListInstanceTemplatesRequest(), + project='project_value', + ) + + +def test_aggregated_list_rest_pager(transport: str = 'rest'): + client = InstanceTemplatesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + #with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + compute.InstanceTemplateAggregatedList( + items={ + 'a':compute.InstanceTemplatesScopedList(), + 'b':compute.InstanceTemplatesScopedList(), + 'c':compute.InstanceTemplatesScopedList(), + }, + next_page_token='abc', + ), + compute.InstanceTemplateAggregatedList( + items={}, + next_page_token='def', + ), + compute.InstanceTemplateAggregatedList( + items={ + 'g':compute.InstanceTemplatesScopedList(), + }, + next_page_token='ghi', + ), + compute.InstanceTemplateAggregatedList( + items={ + 'h':compute.InstanceTemplatesScopedList(), + 'i':compute.InstanceTemplatesScopedList(), + }, + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple(compute.InstanceTemplateAggregatedList.to_json(x) for x in response) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode('UTF-8') + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {'project': 'sample1'} + + pager = client.aggregated_list(request=sample_request) + + assert isinstance(pager.get('a'), compute.InstanceTemplatesScopedList) + assert pager.get('h') is None + + results = list(pager) + assert len(results) == 6 + assert all( + isinstance(i, tuple) + for i in results) + for result in results: + assert isinstance(result, tuple) + assert tuple(type(t) for t in result) == (str, compute.InstanceTemplatesScopedList) + + assert pager.get('a') is None + assert isinstance(pager.get('h'), compute.InstanceTemplatesScopedList) + + pages = list(client.aggregated_list(request=sample_request).pages) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize("request_type", [ + compute.DeleteInstanceTemplateRequest, + dict, +]) +def test_delete_rest(request_type): + client = InstanceTemplatesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'instance_template': 'sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.delete(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, extended_operation.ExtendedOperation) + assert response.client_operation_id == 'client_operation_id_value' + assert response.creation_timestamp == 'creation_timestamp_value' + assert response.description == 'description_value' + assert response.end_time == 'end_time_value' + assert response.http_error_message == 'http_error_message_value' + assert response.http_error_status_code == 2374 + assert response.id == 205 + assert response.insert_time == 'insert_time_value' + assert response.kind == 'kind_value' + assert response.name == 'name_value' + assert response.operation_group_id == 'operation_group_id_value' + assert response.operation_type == 'operation_type_value' + assert response.progress == 885 + assert response.region == 'region_value' + assert response.self_link == 'self_link_value' + assert response.start_time == 'start_time_value' + assert response.status == compute.Operation.Status.DONE + assert response.status_message == 'status_message_value' + assert response.target_id == 947 + assert response.target_link == 'target_link_value' + assert response.user == 'user_value' + assert response.zone == 'zone_value' + + +def test_delete_rest_required_fields(request_type=compute.DeleteInstanceTemplateRequest): + transport_class = transports.InstanceTemplatesRestTransport + + request_init = {} + request_init["instance_template"] = "" + request_init["project"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["instanceTemplate"] = 'instance_template_value' + jsonified_request["project"] = 'project_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "instanceTemplate" in jsonified_request + assert jsonified_request["instanceTemplate"] == 'instance_template_value' + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + + client = InstanceTemplatesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "delete", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.delete(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_delete_rest_unset_required_fields(): + transport = transports.InstanceTemplatesRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.delete._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("instanceTemplate", "project", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_rest_interceptors(null_interceptor): + transport = transports.InstanceTemplatesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.InstanceTemplatesRestInterceptor(), + ) + client = InstanceTemplatesClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.InstanceTemplatesRestInterceptor, "post_delete") as post, \ + mock.patch.object(transports.InstanceTemplatesRestInterceptor, "pre_delete") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.DeleteInstanceTemplateRequest.pb(compute.DeleteInstanceTemplateRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.DeleteInstanceTemplateRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.delete(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_delete_rest_bad_request(transport: str = 'rest', request_type=compute.DeleteInstanceTemplateRequest): + client = InstanceTemplatesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'instance_template': 'sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.delete(request) + + +def test_delete_rest_flattened(): + client = InstanceTemplatesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'instance_template': 'sample2'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + instance_template='instance_template_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.delete(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/global/instanceTemplates/{instance_template}" % client.transport._host, args[1]) + + +def test_delete_rest_flattened_error(transport: str = 'rest'): + client = InstanceTemplatesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete( + compute.DeleteInstanceTemplateRequest(), + project='project_value', + instance_template='instance_template_value', + ) + + +def test_delete_rest_error(): + client = InstanceTemplatesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.DeleteInstanceTemplateRequest, + dict, +]) +def test_delete_unary_rest(request_type): + client = InstanceTemplatesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'instance_template': 'sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.delete_unary(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.Operation) + + +def test_delete_unary_rest_required_fields(request_type=compute.DeleteInstanceTemplateRequest): + transport_class = transports.InstanceTemplatesRestTransport + + request_init = {} + request_init["instance_template"] = "" + request_init["project"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["instanceTemplate"] = 'instance_template_value' + jsonified_request["project"] = 'project_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "instanceTemplate" in jsonified_request + assert jsonified_request["instanceTemplate"] == 'instance_template_value' + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + + client = InstanceTemplatesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "delete", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.delete_unary(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_delete_unary_rest_unset_required_fields(): + transport = transports.InstanceTemplatesRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.delete._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("instanceTemplate", "project", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_unary_rest_interceptors(null_interceptor): + transport = transports.InstanceTemplatesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.InstanceTemplatesRestInterceptor(), + ) + client = InstanceTemplatesClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.InstanceTemplatesRestInterceptor, "post_delete") as post, \ + mock.patch.object(transports.InstanceTemplatesRestInterceptor, "pre_delete") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.DeleteInstanceTemplateRequest.pb(compute.DeleteInstanceTemplateRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.DeleteInstanceTemplateRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.delete_unary(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_delete_unary_rest_bad_request(transport: str = 'rest', request_type=compute.DeleteInstanceTemplateRequest): + client = InstanceTemplatesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'instance_template': 'sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.delete_unary(request) + + +def test_delete_unary_rest_flattened(): + client = InstanceTemplatesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'instance_template': 'sample2'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + instance_template='instance_template_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.delete_unary(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/global/instanceTemplates/{instance_template}" % client.transport._host, args[1]) + + +def test_delete_unary_rest_flattened_error(transport: str = 'rest'): + client = InstanceTemplatesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_unary( + compute.DeleteInstanceTemplateRequest(), + project='project_value', + instance_template='instance_template_value', + ) + + +def test_delete_unary_rest_error(): + client = InstanceTemplatesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.GetInstanceTemplateRequest, + dict, +]) +def test_get_rest(request_type): + client = InstanceTemplatesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'instance_template': 'sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.InstanceTemplate( + creation_timestamp='creation_timestamp_value', + description='description_value', + id=205, + kind='kind_value', + name='name_value', + region='region_value', + self_link='self_link_value', + source_instance='source_instance_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.InstanceTemplate.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.get(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.InstanceTemplate) + assert response.creation_timestamp == 'creation_timestamp_value' + assert response.description == 'description_value' + assert response.id == 205 + assert response.kind == 'kind_value' + assert response.name == 'name_value' + assert response.region == 'region_value' + assert response.self_link == 'self_link_value' + assert response.source_instance == 'source_instance_value' + + +def test_get_rest_required_fields(request_type=compute.GetInstanceTemplateRequest): + transport_class = transports.InstanceTemplatesRestTransport + + request_init = {} + request_init["instance_template"] = "" + request_init["project"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["instanceTemplate"] = 'instance_template_value' + jsonified_request["project"] = 'project_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "instanceTemplate" in jsonified_request + assert jsonified_request["instanceTemplate"] == 'instance_template_value' + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + + client = InstanceTemplatesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.InstanceTemplate() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "get", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.InstanceTemplate.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.get(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_get_rest_unset_required_fields(): + transport = transports.InstanceTemplatesRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.get._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("instanceTemplate", "project", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_rest_interceptors(null_interceptor): + transport = transports.InstanceTemplatesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.InstanceTemplatesRestInterceptor(), + ) + client = InstanceTemplatesClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.InstanceTemplatesRestInterceptor, "post_get") as post, \ + mock.patch.object(transports.InstanceTemplatesRestInterceptor, "pre_get") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.GetInstanceTemplateRequest.pb(compute.GetInstanceTemplateRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.InstanceTemplate.to_json(compute.InstanceTemplate()) + + request = compute.GetInstanceTemplateRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.InstanceTemplate() + + client.get(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_get_rest_bad_request(transport: str = 'rest', request_type=compute.GetInstanceTemplateRequest): + client = InstanceTemplatesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'instance_template': 'sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get(request) + + +def test_get_rest_flattened(): + client = InstanceTemplatesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.InstanceTemplate() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'instance_template': 'sample2'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + instance_template='instance_template_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.InstanceTemplate.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.get(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/global/instanceTemplates/{instance_template}" % client.transport._host, args[1]) + + +def test_get_rest_flattened_error(transport: str = 'rest'): + client = InstanceTemplatesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get( + compute.GetInstanceTemplateRequest(), + project='project_value', + instance_template='instance_template_value', + ) + + +def test_get_rest_error(): + client = InstanceTemplatesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.GetIamPolicyInstanceTemplateRequest, + dict, +]) +def test_get_iam_policy_rest(request_type): + client = InstanceTemplatesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'resource': 'sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Policy( + etag='etag_value', + iam_owned=True, + version=774, + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Policy.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.get_iam_policy(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.Policy) + assert response.etag == 'etag_value' + assert response.iam_owned is True + assert response.version == 774 + + +def test_get_iam_policy_rest_required_fields(request_type=compute.GetIamPolicyInstanceTemplateRequest): + transport_class = transports.InstanceTemplatesRestTransport + + request_init = {} + request_init["project"] = "" + request_init["resource"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_iam_policy._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + jsonified_request["resource"] = 'resource_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_iam_policy._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("options_requested_policy_version", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "resource" in jsonified_request + assert jsonified_request["resource"] == 'resource_value' + + client = InstanceTemplatesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Policy() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "get", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Policy.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.get_iam_policy(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_get_iam_policy_rest_unset_required_fields(): + transport = transports.InstanceTemplatesRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.get_iam_policy._get_unset_required_fields({}) + assert set(unset_fields) == (set(("optionsRequestedPolicyVersion", )) & set(("project", "resource", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_iam_policy_rest_interceptors(null_interceptor): + transport = transports.InstanceTemplatesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.InstanceTemplatesRestInterceptor(), + ) + client = InstanceTemplatesClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.InstanceTemplatesRestInterceptor, "post_get_iam_policy") as post, \ + mock.patch.object(transports.InstanceTemplatesRestInterceptor, "pre_get_iam_policy") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.GetIamPolicyInstanceTemplateRequest.pb(compute.GetIamPolicyInstanceTemplateRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Policy.to_json(compute.Policy()) + + request = compute.GetIamPolicyInstanceTemplateRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Policy() + + client.get_iam_policy(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_get_iam_policy_rest_bad_request(transport: str = 'rest', request_type=compute.GetIamPolicyInstanceTemplateRequest): + client = InstanceTemplatesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'resource': 'sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_iam_policy(request) + + +def test_get_iam_policy_rest_flattened(): + client = InstanceTemplatesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Policy() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'resource': 'sample2'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + resource='resource_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Policy.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.get_iam_policy(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/global/instanceTemplates/{resource}/getIamPolicy" % client.transport._host, args[1]) + + +def test_get_iam_policy_rest_flattened_error(transport: str = 'rest'): + client = InstanceTemplatesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_iam_policy( + compute.GetIamPolicyInstanceTemplateRequest(), + project='project_value', + resource='resource_value', + ) + + +def test_get_iam_policy_rest_error(): + client = InstanceTemplatesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.InsertInstanceTemplateRequest, + dict, +]) +def test_insert_rest(request_type): + client = InstanceTemplatesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1'} + request_init["instance_template_resource"] = {'creation_timestamp': 'creation_timestamp_value', 'description': 'description_value', 'id': 205, 'kind': 'kind_value', 'name': 'name_value', 'properties': {'advanced_machine_features': {'enable_nested_virtualization': True, 'enable_uefi_networking': True, 'threads_per_core': 1689, 'visible_core_count': 1918}, 'can_ip_forward': True, 'confidential_instance_config': {'enable_confidential_compute': True}, 'description': 'description_value', 'disks': [{'architecture': 'architecture_value', 'auto_delete': True, 'boot': True, 'device_name': 'device_name_value', 'disk_encryption_key': {'kms_key_name': 'kms_key_name_value', 'kms_key_service_account': 'kms_key_service_account_value', 'raw_key': 'raw_key_value', 'rsa_encrypted_key': 'rsa_encrypted_key_value', 'sha256': 'sha256_value'}, 'disk_size_gb': 1261, 'force_attach': True, 'guest_os_features': [{'type_': 'type__value'}], 'index': 536, 'initialize_params': {'architecture': 'architecture_value', 'description': 'description_value', 'disk_name': 'disk_name_value', 'disk_size_gb': 1261, 'disk_type': 'disk_type_value', 'labels': {}, 'licenses': ['licenses_value1', 'licenses_value2'], 'on_update_action': 'on_update_action_value', 'provisioned_iops': 1740, 'provisioned_throughput': 2411, 'replica_zones': ['replica_zones_value1', 'replica_zones_value2'], 'resource_manager_tags': {}, 'resource_policies': ['resource_policies_value1', 'resource_policies_value2'], 'source_image': 'source_image_value', 'source_image_encryption_key': {}, 'source_snapshot': 'source_snapshot_value', 'source_snapshot_encryption_key': {}}, 'interface': 'interface_value', 'kind': 'kind_value', 'licenses': ['licenses_value1', 'licenses_value2'], 'mode': 'mode_value', 'saved_state': 'saved_state_value', 'shielded_instance_initial_state': {'dbs': [{'content': 'content_value', 'file_type': 'file_type_value'}], 'dbxs': {}, 'keks': {}, 'pk': {}}, 'source': 'source_value', 'type_': 'type__value'}], 'guest_accelerators': [{'accelerator_count': 1805, 'accelerator_type': 'accelerator_type_value'}], 'key_revocation_action_type': 'key_revocation_action_type_value', 'labels': {}, 'machine_type': 'machine_type_value', 'metadata': {'fingerprint': 'fingerprint_value', 'items': [{'key': 'key_value', 'value': 'value_value'}], 'kind': 'kind_value'}, 'min_cpu_platform': 'min_cpu_platform_value', 'network_interfaces': [{'access_configs': [{'external_ipv6': 'external_ipv6_value', 'external_ipv6_prefix_length': 2837, 'kind': 'kind_value', 'name': 'name_value', 'nat_i_p': 'nat_i_p_value', 'network_tier': 'network_tier_value', 'public_ptr_domain_name': 'public_ptr_domain_name_value', 'set_public_ptr': True, 'type_': 'type__value'}], 'alias_ip_ranges': [{'ip_cidr_range': 'ip_cidr_range_value', 'subnetwork_range_name': 'subnetwork_range_name_value'}], 'fingerprint': 'fingerprint_value', 'internal_ipv6_prefix_length': 2831, 'ipv6_access_configs': {}, 'ipv6_access_type': 'ipv6_access_type_value', 'ipv6_address': 'ipv6_address_value', 'kind': 'kind_value', 'name': 'name_value', 'network': 'network_value', 'network_attachment': 'network_attachment_value', 'network_i_p': 'network_i_p_value', 'nic_type': 'nic_type_value', 'queue_count': 1197, 'stack_type': 'stack_type_value', 'subnetwork': 'subnetwork_value'}], 'network_performance_config': {'total_egress_bandwidth_tier': 'total_egress_bandwidth_tier_value'}, 'private_ipv6_google_access': 'private_ipv6_google_access_value', 'reservation_affinity': {'consume_reservation_type': 'consume_reservation_type_value', 'key': 'key_value', 'values': ['values_value1', 'values_value2']}, 'resource_manager_tags': {}, 'resource_policies': ['resource_policies_value1', 'resource_policies_value2'], 'scheduling': {'automatic_restart': True, 'instance_termination_action': 'instance_termination_action_value', 'local_ssd_recovery_timeout': {'nanos': 543, 'seconds': 751}, 'location_hint': 'location_hint_value', 'min_node_cpus': 1379, 'node_affinities': [{'key': 'key_value', 'operator': 'operator_value', 'values': ['values_value1', 'values_value2']}], 'on_host_maintenance': 'on_host_maintenance_value', 'preemptible': True, 'provisioning_model': 'provisioning_model_value'}, 'service_accounts': [{'email': 'email_value', 'scopes': ['scopes_value1', 'scopes_value2']}], 'shielded_instance_config': {'enable_integrity_monitoring': True, 'enable_secure_boot': True, 'enable_vtpm': True}, 'tags': {'fingerprint': 'fingerprint_value', 'items': ['items_value1', 'items_value2']}}, 'region': 'region_value', 'self_link': 'self_link_value', 'source_instance': 'source_instance_value', 'source_instance_params': {'disk_configs': [{'auto_delete': True, 'custom_image': 'custom_image_value', 'device_name': 'device_name_value', 'instantiate_from': 'instantiate_from_value'}]}} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.insert(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, extended_operation.ExtendedOperation) + assert response.client_operation_id == 'client_operation_id_value' + assert response.creation_timestamp == 'creation_timestamp_value' + assert response.description == 'description_value' + assert response.end_time == 'end_time_value' + assert response.http_error_message == 'http_error_message_value' + assert response.http_error_status_code == 2374 + assert response.id == 205 + assert response.insert_time == 'insert_time_value' + assert response.kind == 'kind_value' + assert response.name == 'name_value' + assert response.operation_group_id == 'operation_group_id_value' + assert response.operation_type == 'operation_type_value' + assert response.progress == 885 + assert response.region == 'region_value' + assert response.self_link == 'self_link_value' + assert response.start_time == 'start_time_value' + assert response.status == compute.Operation.Status.DONE + assert response.status_message == 'status_message_value' + assert response.target_id == 947 + assert response.target_link == 'target_link_value' + assert response.user == 'user_value' + assert response.zone == 'zone_value' + + +def test_insert_rest_required_fields(request_type=compute.InsertInstanceTemplateRequest): + transport_class = transports.InstanceTemplatesRestTransport + + request_init = {} + request_init["project"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).insert._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).insert._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + + client = InstanceTemplatesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.insert(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_insert_rest_unset_required_fields(): + transport = transports.InstanceTemplatesRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.insert._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("instanceTemplateResource", "project", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_insert_rest_interceptors(null_interceptor): + transport = transports.InstanceTemplatesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.InstanceTemplatesRestInterceptor(), + ) + client = InstanceTemplatesClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.InstanceTemplatesRestInterceptor, "post_insert") as post, \ + mock.patch.object(transports.InstanceTemplatesRestInterceptor, "pre_insert") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.InsertInstanceTemplateRequest.pb(compute.InsertInstanceTemplateRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.InsertInstanceTemplateRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.insert(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_insert_rest_bad_request(transport: str = 'rest', request_type=compute.InsertInstanceTemplateRequest): + client = InstanceTemplatesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1'} + request_init["instance_template_resource"] = {'creation_timestamp': 'creation_timestamp_value', 'description': 'description_value', 'id': 205, 'kind': 'kind_value', 'name': 'name_value', 'properties': {'advanced_machine_features': {'enable_nested_virtualization': True, 'enable_uefi_networking': True, 'threads_per_core': 1689, 'visible_core_count': 1918}, 'can_ip_forward': True, 'confidential_instance_config': {'enable_confidential_compute': True}, 'description': 'description_value', 'disks': [{'architecture': 'architecture_value', 'auto_delete': True, 'boot': True, 'device_name': 'device_name_value', 'disk_encryption_key': {'kms_key_name': 'kms_key_name_value', 'kms_key_service_account': 'kms_key_service_account_value', 'raw_key': 'raw_key_value', 'rsa_encrypted_key': 'rsa_encrypted_key_value', 'sha256': 'sha256_value'}, 'disk_size_gb': 1261, 'force_attach': True, 'guest_os_features': [{'type_': 'type__value'}], 'index': 536, 'initialize_params': {'architecture': 'architecture_value', 'description': 'description_value', 'disk_name': 'disk_name_value', 'disk_size_gb': 1261, 'disk_type': 'disk_type_value', 'labels': {}, 'licenses': ['licenses_value1', 'licenses_value2'], 'on_update_action': 'on_update_action_value', 'provisioned_iops': 1740, 'provisioned_throughput': 2411, 'replica_zones': ['replica_zones_value1', 'replica_zones_value2'], 'resource_manager_tags': {}, 'resource_policies': ['resource_policies_value1', 'resource_policies_value2'], 'source_image': 'source_image_value', 'source_image_encryption_key': {}, 'source_snapshot': 'source_snapshot_value', 'source_snapshot_encryption_key': {}}, 'interface': 'interface_value', 'kind': 'kind_value', 'licenses': ['licenses_value1', 'licenses_value2'], 'mode': 'mode_value', 'saved_state': 'saved_state_value', 'shielded_instance_initial_state': {'dbs': [{'content': 'content_value', 'file_type': 'file_type_value'}], 'dbxs': {}, 'keks': {}, 'pk': {}}, 'source': 'source_value', 'type_': 'type__value'}], 'guest_accelerators': [{'accelerator_count': 1805, 'accelerator_type': 'accelerator_type_value'}], 'key_revocation_action_type': 'key_revocation_action_type_value', 'labels': {}, 'machine_type': 'machine_type_value', 'metadata': {'fingerprint': 'fingerprint_value', 'items': [{'key': 'key_value', 'value': 'value_value'}], 'kind': 'kind_value'}, 'min_cpu_platform': 'min_cpu_platform_value', 'network_interfaces': [{'access_configs': [{'external_ipv6': 'external_ipv6_value', 'external_ipv6_prefix_length': 2837, 'kind': 'kind_value', 'name': 'name_value', 'nat_i_p': 'nat_i_p_value', 'network_tier': 'network_tier_value', 'public_ptr_domain_name': 'public_ptr_domain_name_value', 'set_public_ptr': True, 'type_': 'type__value'}], 'alias_ip_ranges': [{'ip_cidr_range': 'ip_cidr_range_value', 'subnetwork_range_name': 'subnetwork_range_name_value'}], 'fingerprint': 'fingerprint_value', 'internal_ipv6_prefix_length': 2831, 'ipv6_access_configs': {}, 'ipv6_access_type': 'ipv6_access_type_value', 'ipv6_address': 'ipv6_address_value', 'kind': 'kind_value', 'name': 'name_value', 'network': 'network_value', 'network_attachment': 'network_attachment_value', 'network_i_p': 'network_i_p_value', 'nic_type': 'nic_type_value', 'queue_count': 1197, 'stack_type': 'stack_type_value', 'subnetwork': 'subnetwork_value'}], 'network_performance_config': {'total_egress_bandwidth_tier': 'total_egress_bandwidth_tier_value'}, 'private_ipv6_google_access': 'private_ipv6_google_access_value', 'reservation_affinity': {'consume_reservation_type': 'consume_reservation_type_value', 'key': 'key_value', 'values': ['values_value1', 'values_value2']}, 'resource_manager_tags': {}, 'resource_policies': ['resource_policies_value1', 'resource_policies_value2'], 'scheduling': {'automatic_restart': True, 'instance_termination_action': 'instance_termination_action_value', 'local_ssd_recovery_timeout': {'nanos': 543, 'seconds': 751}, 'location_hint': 'location_hint_value', 'min_node_cpus': 1379, 'node_affinities': [{'key': 'key_value', 'operator': 'operator_value', 'values': ['values_value1', 'values_value2']}], 'on_host_maintenance': 'on_host_maintenance_value', 'preemptible': True, 'provisioning_model': 'provisioning_model_value'}, 'service_accounts': [{'email': 'email_value', 'scopes': ['scopes_value1', 'scopes_value2']}], 'shielded_instance_config': {'enable_integrity_monitoring': True, 'enable_secure_boot': True, 'enable_vtpm': True}, 'tags': {'fingerprint': 'fingerprint_value', 'items': ['items_value1', 'items_value2']}}, 'region': 'region_value', 'self_link': 'self_link_value', 'source_instance': 'source_instance_value', 'source_instance_params': {'disk_configs': [{'auto_delete': True, 'custom_image': 'custom_image_value', 'device_name': 'device_name_value', 'instantiate_from': 'instantiate_from_value'}]}} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.insert(request) + + +def test_insert_rest_flattened(): + client = InstanceTemplatesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + instance_template_resource=compute.InstanceTemplate(creation_timestamp='creation_timestamp_value'), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.insert(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/global/instanceTemplates" % client.transport._host, args[1]) + + +def test_insert_rest_flattened_error(transport: str = 'rest'): + client = InstanceTemplatesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.insert( + compute.InsertInstanceTemplateRequest(), + project='project_value', + instance_template_resource=compute.InstanceTemplate(creation_timestamp='creation_timestamp_value'), + ) + + +def test_insert_rest_error(): + client = InstanceTemplatesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.InsertInstanceTemplateRequest, + dict, +]) +def test_insert_unary_rest(request_type): + client = InstanceTemplatesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1'} + request_init["instance_template_resource"] = {'creation_timestamp': 'creation_timestamp_value', 'description': 'description_value', 'id': 205, 'kind': 'kind_value', 'name': 'name_value', 'properties': {'advanced_machine_features': {'enable_nested_virtualization': True, 'enable_uefi_networking': True, 'threads_per_core': 1689, 'visible_core_count': 1918}, 'can_ip_forward': True, 'confidential_instance_config': {'enable_confidential_compute': True}, 'description': 'description_value', 'disks': [{'architecture': 'architecture_value', 'auto_delete': True, 'boot': True, 'device_name': 'device_name_value', 'disk_encryption_key': {'kms_key_name': 'kms_key_name_value', 'kms_key_service_account': 'kms_key_service_account_value', 'raw_key': 'raw_key_value', 'rsa_encrypted_key': 'rsa_encrypted_key_value', 'sha256': 'sha256_value'}, 'disk_size_gb': 1261, 'force_attach': True, 'guest_os_features': [{'type_': 'type__value'}], 'index': 536, 'initialize_params': {'architecture': 'architecture_value', 'description': 'description_value', 'disk_name': 'disk_name_value', 'disk_size_gb': 1261, 'disk_type': 'disk_type_value', 'labels': {}, 'licenses': ['licenses_value1', 'licenses_value2'], 'on_update_action': 'on_update_action_value', 'provisioned_iops': 1740, 'provisioned_throughput': 2411, 'replica_zones': ['replica_zones_value1', 'replica_zones_value2'], 'resource_manager_tags': {}, 'resource_policies': ['resource_policies_value1', 'resource_policies_value2'], 'source_image': 'source_image_value', 'source_image_encryption_key': {}, 'source_snapshot': 'source_snapshot_value', 'source_snapshot_encryption_key': {}}, 'interface': 'interface_value', 'kind': 'kind_value', 'licenses': ['licenses_value1', 'licenses_value2'], 'mode': 'mode_value', 'saved_state': 'saved_state_value', 'shielded_instance_initial_state': {'dbs': [{'content': 'content_value', 'file_type': 'file_type_value'}], 'dbxs': {}, 'keks': {}, 'pk': {}}, 'source': 'source_value', 'type_': 'type__value'}], 'guest_accelerators': [{'accelerator_count': 1805, 'accelerator_type': 'accelerator_type_value'}], 'key_revocation_action_type': 'key_revocation_action_type_value', 'labels': {}, 'machine_type': 'machine_type_value', 'metadata': {'fingerprint': 'fingerprint_value', 'items': [{'key': 'key_value', 'value': 'value_value'}], 'kind': 'kind_value'}, 'min_cpu_platform': 'min_cpu_platform_value', 'network_interfaces': [{'access_configs': [{'external_ipv6': 'external_ipv6_value', 'external_ipv6_prefix_length': 2837, 'kind': 'kind_value', 'name': 'name_value', 'nat_i_p': 'nat_i_p_value', 'network_tier': 'network_tier_value', 'public_ptr_domain_name': 'public_ptr_domain_name_value', 'set_public_ptr': True, 'type_': 'type__value'}], 'alias_ip_ranges': [{'ip_cidr_range': 'ip_cidr_range_value', 'subnetwork_range_name': 'subnetwork_range_name_value'}], 'fingerprint': 'fingerprint_value', 'internal_ipv6_prefix_length': 2831, 'ipv6_access_configs': {}, 'ipv6_access_type': 'ipv6_access_type_value', 'ipv6_address': 'ipv6_address_value', 'kind': 'kind_value', 'name': 'name_value', 'network': 'network_value', 'network_attachment': 'network_attachment_value', 'network_i_p': 'network_i_p_value', 'nic_type': 'nic_type_value', 'queue_count': 1197, 'stack_type': 'stack_type_value', 'subnetwork': 'subnetwork_value'}], 'network_performance_config': {'total_egress_bandwidth_tier': 'total_egress_bandwidth_tier_value'}, 'private_ipv6_google_access': 'private_ipv6_google_access_value', 'reservation_affinity': {'consume_reservation_type': 'consume_reservation_type_value', 'key': 'key_value', 'values': ['values_value1', 'values_value2']}, 'resource_manager_tags': {}, 'resource_policies': ['resource_policies_value1', 'resource_policies_value2'], 'scheduling': {'automatic_restart': True, 'instance_termination_action': 'instance_termination_action_value', 'local_ssd_recovery_timeout': {'nanos': 543, 'seconds': 751}, 'location_hint': 'location_hint_value', 'min_node_cpus': 1379, 'node_affinities': [{'key': 'key_value', 'operator': 'operator_value', 'values': ['values_value1', 'values_value2']}], 'on_host_maintenance': 'on_host_maintenance_value', 'preemptible': True, 'provisioning_model': 'provisioning_model_value'}, 'service_accounts': [{'email': 'email_value', 'scopes': ['scopes_value1', 'scopes_value2']}], 'shielded_instance_config': {'enable_integrity_monitoring': True, 'enable_secure_boot': True, 'enable_vtpm': True}, 'tags': {'fingerprint': 'fingerprint_value', 'items': ['items_value1', 'items_value2']}}, 'region': 'region_value', 'self_link': 'self_link_value', 'source_instance': 'source_instance_value', 'source_instance_params': {'disk_configs': [{'auto_delete': True, 'custom_image': 'custom_image_value', 'device_name': 'device_name_value', 'instantiate_from': 'instantiate_from_value'}]}} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.insert_unary(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.Operation) + + +def test_insert_unary_rest_required_fields(request_type=compute.InsertInstanceTemplateRequest): + transport_class = transports.InstanceTemplatesRestTransport + + request_init = {} + request_init["project"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).insert._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).insert._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + + client = InstanceTemplatesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.insert_unary(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_insert_unary_rest_unset_required_fields(): + transport = transports.InstanceTemplatesRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.insert._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("instanceTemplateResource", "project", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_insert_unary_rest_interceptors(null_interceptor): + transport = transports.InstanceTemplatesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.InstanceTemplatesRestInterceptor(), + ) + client = InstanceTemplatesClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.InstanceTemplatesRestInterceptor, "post_insert") as post, \ + mock.patch.object(transports.InstanceTemplatesRestInterceptor, "pre_insert") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.InsertInstanceTemplateRequest.pb(compute.InsertInstanceTemplateRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.InsertInstanceTemplateRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.insert_unary(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_insert_unary_rest_bad_request(transport: str = 'rest', request_type=compute.InsertInstanceTemplateRequest): + client = InstanceTemplatesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1'} + request_init["instance_template_resource"] = {'creation_timestamp': 'creation_timestamp_value', 'description': 'description_value', 'id': 205, 'kind': 'kind_value', 'name': 'name_value', 'properties': {'advanced_machine_features': {'enable_nested_virtualization': True, 'enable_uefi_networking': True, 'threads_per_core': 1689, 'visible_core_count': 1918}, 'can_ip_forward': True, 'confidential_instance_config': {'enable_confidential_compute': True}, 'description': 'description_value', 'disks': [{'architecture': 'architecture_value', 'auto_delete': True, 'boot': True, 'device_name': 'device_name_value', 'disk_encryption_key': {'kms_key_name': 'kms_key_name_value', 'kms_key_service_account': 'kms_key_service_account_value', 'raw_key': 'raw_key_value', 'rsa_encrypted_key': 'rsa_encrypted_key_value', 'sha256': 'sha256_value'}, 'disk_size_gb': 1261, 'force_attach': True, 'guest_os_features': [{'type_': 'type__value'}], 'index': 536, 'initialize_params': {'architecture': 'architecture_value', 'description': 'description_value', 'disk_name': 'disk_name_value', 'disk_size_gb': 1261, 'disk_type': 'disk_type_value', 'labels': {}, 'licenses': ['licenses_value1', 'licenses_value2'], 'on_update_action': 'on_update_action_value', 'provisioned_iops': 1740, 'provisioned_throughput': 2411, 'replica_zones': ['replica_zones_value1', 'replica_zones_value2'], 'resource_manager_tags': {}, 'resource_policies': ['resource_policies_value1', 'resource_policies_value2'], 'source_image': 'source_image_value', 'source_image_encryption_key': {}, 'source_snapshot': 'source_snapshot_value', 'source_snapshot_encryption_key': {}}, 'interface': 'interface_value', 'kind': 'kind_value', 'licenses': ['licenses_value1', 'licenses_value2'], 'mode': 'mode_value', 'saved_state': 'saved_state_value', 'shielded_instance_initial_state': {'dbs': [{'content': 'content_value', 'file_type': 'file_type_value'}], 'dbxs': {}, 'keks': {}, 'pk': {}}, 'source': 'source_value', 'type_': 'type__value'}], 'guest_accelerators': [{'accelerator_count': 1805, 'accelerator_type': 'accelerator_type_value'}], 'key_revocation_action_type': 'key_revocation_action_type_value', 'labels': {}, 'machine_type': 'machine_type_value', 'metadata': {'fingerprint': 'fingerprint_value', 'items': [{'key': 'key_value', 'value': 'value_value'}], 'kind': 'kind_value'}, 'min_cpu_platform': 'min_cpu_platform_value', 'network_interfaces': [{'access_configs': [{'external_ipv6': 'external_ipv6_value', 'external_ipv6_prefix_length': 2837, 'kind': 'kind_value', 'name': 'name_value', 'nat_i_p': 'nat_i_p_value', 'network_tier': 'network_tier_value', 'public_ptr_domain_name': 'public_ptr_domain_name_value', 'set_public_ptr': True, 'type_': 'type__value'}], 'alias_ip_ranges': [{'ip_cidr_range': 'ip_cidr_range_value', 'subnetwork_range_name': 'subnetwork_range_name_value'}], 'fingerprint': 'fingerprint_value', 'internal_ipv6_prefix_length': 2831, 'ipv6_access_configs': {}, 'ipv6_access_type': 'ipv6_access_type_value', 'ipv6_address': 'ipv6_address_value', 'kind': 'kind_value', 'name': 'name_value', 'network': 'network_value', 'network_attachment': 'network_attachment_value', 'network_i_p': 'network_i_p_value', 'nic_type': 'nic_type_value', 'queue_count': 1197, 'stack_type': 'stack_type_value', 'subnetwork': 'subnetwork_value'}], 'network_performance_config': {'total_egress_bandwidth_tier': 'total_egress_bandwidth_tier_value'}, 'private_ipv6_google_access': 'private_ipv6_google_access_value', 'reservation_affinity': {'consume_reservation_type': 'consume_reservation_type_value', 'key': 'key_value', 'values': ['values_value1', 'values_value2']}, 'resource_manager_tags': {}, 'resource_policies': ['resource_policies_value1', 'resource_policies_value2'], 'scheduling': {'automatic_restart': True, 'instance_termination_action': 'instance_termination_action_value', 'local_ssd_recovery_timeout': {'nanos': 543, 'seconds': 751}, 'location_hint': 'location_hint_value', 'min_node_cpus': 1379, 'node_affinities': [{'key': 'key_value', 'operator': 'operator_value', 'values': ['values_value1', 'values_value2']}], 'on_host_maintenance': 'on_host_maintenance_value', 'preemptible': True, 'provisioning_model': 'provisioning_model_value'}, 'service_accounts': [{'email': 'email_value', 'scopes': ['scopes_value1', 'scopes_value2']}], 'shielded_instance_config': {'enable_integrity_monitoring': True, 'enable_secure_boot': True, 'enable_vtpm': True}, 'tags': {'fingerprint': 'fingerprint_value', 'items': ['items_value1', 'items_value2']}}, 'region': 'region_value', 'self_link': 'self_link_value', 'source_instance': 'source_instance_value', 'source_instance_params': {'disk_configs': [{'auto_delete': True, 'custom_image': 'custom_image_value', 'device_name': 'device_name_value', 'instantiate_from': 'instantiate_from_value'}]}} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.insert_unary(request) + + +def test_insert_unary_rest_flattened(): + client = InstanceTemplatesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + instance_template_resource=compute.InstanceTemplate(creation_timestamp='creation_timestamp_value'), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.insert_unary(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/global/instanceTemplates" % client.transport._host, args[1]) + + +def test_insert_unary_rest_flattened_error(transport: str = 'rest'): + client = InstanceTemplatesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.insert_unary( + compute.InsertInstanceTemplateRequest(), + project='project_value', + instance_template_resource=compute.InstanceTemplate(creation_timestamp='creation_timestamp_value'), + ) + + +def test_insert_unary_rest_error(): + client = InstanceTemplatesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.ListInstanceTemplatesRequest, + dict, +]) +def test_list_rest(request_type): + client = InstanceTemplatesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.InstanceTemplateList( + id='id_value', + kind='kind_value', + next_page_token='next_page_token_value', + self_link='self_link_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.InstanceTemplateList.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.list(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListPager) + assert response.id == 'id_value' + assert response.kind == 'kind_value' + assert response.next_page_token == 'next_page_token_value' + assert response.self_link == 'self_link_value' + + +def test_list_rest_required_fields(request_type=compute.ListInstanceTemplatesRequest): + transport_class = transports.InstanceTemplatesRestTransport + + request_init = {} + request_init["project"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("filter", "max_results", "order_by", "page_token", "return_partial_success", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + + client = InstanceTemplatesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.InstanceTemplateList() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "get", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.InstanceTemplateList.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.list(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_list_rest_unset_required_fields(): + transport = transports.InstanceTemplatesRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.list._get_unset_required_fields({}) + assert set(unset_fields) == (set(("filter", "maxResults", "orderBy", "pageToken", "returnPartialSuccess", )) & set(("project", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_rest_interceptors(null_interceptor): + transport = transports.InstanceTemplatesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.InstanceTemplatesRestInterceptor(), + ) + client = InstanceTemplatesClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.InstanceTemplatesRestInterceptor, "post_list") as post, \ + mock.patch.object(transports.InstanceTemplatesRestInterceptor, "pre_list") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.ListInstanceTemplatesRequest.pb(compute.ListInstanceTemplatesRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.InstanceTemplateList.to_json(compute.InstanceTemplateList()) + + request = compute.ListInstanceTemplatesRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.InstanceTemplateList() + + client.list(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_list_rest_bad_request(transport: str = 'rest', request_type=compute.ListInstanceTemplatesRequest): + client = InstanceTemplatesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list(request) + + +def test_list_rest_flattened(): + client = InstanceTemplatesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.InstanceTemplateList() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.InstanceTemplateList.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.list(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/global/instanceTemplates" % client.transport._host, args[1]) + + +def test_list_rest_flattened_error(transport: str = 'rest'): + client = InstanceTemplatesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list( + compute.ListInstanceTemplatesRequest(), + project='project_value', + ) + + +def test_list_rest_pager(transport: str = 'rest'): + client = InstanceTemplatesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + #with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + compute.InstanceTemplateList( + items=[ + compute.InstanceTemplate(), + compute.InstanceTemplate(), + compute.InstanceTemplate(), + ], + next_page_token='abc', + ), + compute.InstanceTemplateList( + items=[], + next_page_token='def', + ), + compute.InstanceTemplateList( + items=[ + compute.InstanceTemplate(), + ], + next_page_token='ghi', + ), + compute.InstanceTemplateList( + items=[ + compute.InstanceTemplate(), + compute.InstanceTemplate(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple(compute.InstanceTemplateList.to_json(x) for x in response) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode('UTF-8') + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {'project': 'sample1'} + + pager = client.list(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, compute.InstanceTemplate) + for i in results) + + pages = list(client.list(request=sample_request).pages) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize("request_type", [ + compute.SetIamPolicyInstanceTemplateRequest, + dict, +]) +def test_set_iam_policy_rest(request_type): + client = InstanceTemplatesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'resource': 'sample2'} + request_init["global_set_policy_request_resource"] = {'bindings': [{'binding_id': 'binding_id_value', 'condition': {'description': 'description_value', 'expression': 'expression_value', 'location': 'location_value', 'title': 'title_value'}, 'members': ['members_value1', 'members_value2'], 'role': 'role_value'}], 'etag': 'etag_value', 'policy': {'audit_configs': [{'audit_log_configs': [{'exempted_members': ['exempted_members_value1', 'exempted_members_value2'], 'ignore_child_exemptions': True, 'log_type': 'log_type_value'}], 'exempted_members': ['exempted_members_value1', 'exempted_members_value2'], 'service': 'service_value'}], 'bindings': {}, 'etag': 'etag_value', 'iam_owned': True, 'rules': [{'action': 'action_value', 'conditions': [{'iam': 'iam_value', 'op': 'op_value', 'svc': 'svc_value', 'sys': 'sys_value', 'values': ['values_value1', 'values_value2']}], 'description': 'description_value', 'ins': ['ins_value1', 'ins_value2'], 'log_configs': [{'cloud_audit': {'authorization_logging_options': {'permission_type': 'permission_type_value'}, 'log_name': 'log_name_value'}, 'counter': {'custom_fields': [{'name': 'name_value', 'value': 'value_value'}], 'field': 'field_value', 'metric': 'metric_value'}, 'data_access': {'log_mode': 'log_mode_value'}}], 'not_ins': ['not_ins_value1', 'not_ins_value2'], 'permissions': ['permissions_value1', 'permissions_value2']}], 'version': 774}} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Policy( + etag='etag_value', + iam_owned=True, + version=774, + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Policy.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.set_iam_policy(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.Policy) + assert response.etag == 'etag_value' + assert response.iam_owned is True + assert response.version == 774 + + +def test_set_iam_policy_rest_required_fields(request_type=compute.SetIamPolicyInstanceTemplateRequest): + transport_class = transports.InstanceTemplatesRestTransport + + request_init = {} + request_init["project"] = "" + request_init["resource"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).set_iam_policy._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + jsonified_request["resource"] = 'resource_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).set_iam_policy._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "resource" in jsonified_request + assert jsonified_request["resource"] == 'resource_value' + + client = InstanceTemplatesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Policy() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Policy.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.set_iam_policy(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_set_iam_policy_rest_unset_required_fields(): + transport = transports.InstanceTemplatesRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.set_iam_policy._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("globalSetPolicyRequestResource", "project", "resource", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_set_iam_policy_rest_interceptors(null_interceptor): + transport = transports.InstanceTemplatesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.InstanceTemplatesRestInterceptor(), + ) + client = InstanceTemplatesClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.InstanceTemplatesRestInterceptor, "post_set_iam_policy") as post, \ + mock.patch.object(transports.InstanceTemplatesRestInterceptor, "pre_set_iam_policy") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.SetIamPolicyInstanceTemplateRequest.pb(compute.SetIamPolicyInstanceTemplateRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Policy.to_json(compute.Policy()) + + request = compute.SetIamPolicyInstanceTemplateRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Policy() + + client.set_iam_policy(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_set_iam_policy_rest_bad_request(transport: str = 'rest', request_type=compute.SetIamPolicyInstanceTemplateRequest): + client = InstanceTemplatesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'resource': 'sample2'} + request_init["global_set_policy_request_resource"] = {'bindings': [{'binding_id': 'binding_id_value', 'condition': {'description': 'description_value', 'expression': 'expression_value', 'location': 'location_value', 'title': 'title_value'}, 'members': ['members_value1', 'members_value2'], 'role': 'role_value'}], 'etag': 'etag_value', 'policy': {'audit_configs': [{'audit_log_configs': [{'exempted_members': ['exempted_members_value1', 'exempted_members_value2'], 'ignore_child_exemptions': True, 'log_type': 'log_type_value'}], 'exempted_members': ['exempted_members_value1', 'exempted_members_value2'], 'service': 'service_value'}], 'bindings': {}, 'etag': 'etag_value', 'iam_owned': True, 'rules': [{'action': 'action_value', 'conditions': [{'iam': 'iam_value', 'op': 'op_value', 'svc': 'svc_value', 'sys': 'sys_value', 'values': ['values_value1', 'values_value2']}], 'description': 'description_value', 'ins': ['ins_value1', 'ins_value2'], 'log_configs': [{'cloud_audit': {'authorization_logging_options': {'permission_type': 'permission_type_value'}, 'log_name': 'log_name_value'}, 'counter': {'custom_fields': [{'name': 'name_value', 'value': 'value_value'}], 'field': 'field_value', 'metric': 'metric_value'}, 'data_access': {'log_mode': 'log_mode_value'}}], 'not_ins': ['not_ins_value1', 'not_ins_value2'], 'permissions': ['permissions_value1', 'permissions_value2']}], 'version': 774}} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.set_iam_policy(request) + + +def test_set_iam_policy_rest_flattened(): + client = InstanceTemplatesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Policy() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'resource': 'sample2'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + resource='resource_value', + global_set_policy_request_resource=compute.GlobalSetPolicyRequest(bindings=[compute.Binding(binding_id='binding_id_value')]), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Policy.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.set_iam_policy(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/global/instanceTemplates/{resource}/setIamPolicy" % client.transport._host, args[1]) + + +def test_set_iam_policy_rest_flattened_error(transport: str = 'rest'): + client = InstanceTemplatesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.set_iam_policy( + compute.SetIamPolicyInstanceTemplateRequest(), + project='project_value', + resource='resource_value', + global_set_policy_request_resource=compute.GlobalSetPolicyRequest(bindings=[compute.Binding(binding_id='binding_id_value')]), + ) + + +def test_set_iam_policy_rest_error(): + client = InstanceTemplatesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.TestIamPermissionsInstanceTemplateRequest, + dict, +]) +def test_test_iam_permissions_rest(request_type): + client = InstanceTemplatesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'resource': 'sample2'} + request_init["test_permissions_request_resource"] = {'permissions': ['permissions_value1', 'permissions_value2']} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.TestPermissionsResponse( + permissions=['permissions_value'], + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.TestPermissionsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.test_iam_permissions(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.TestPermissionsResponse) + assert response.permissions == ['permissions_value'] + + +def test_test_iam_permissions_rest_required_fields(request_type=compute.TestIamPermissionsInstanceTemplateRequest): + transport_class = transports.InstanceTemplatesRestTransport + + request_init = {} + request_init["project"] = "" + request_init["resource"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).test_iam_permissions._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + jsonified_request["resource"] = 'resource_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).test_iam_permissions._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "resource" in jsonified_request + assert jsonified_request["resource"] == 'resource_value' + + client = InstanceTemplatesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.TestPermissionsResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.TestPermissionsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.test_iam_permissions(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_test_iam_permissions_rest_unset_required_fields(): + transport = transports.InstanceTemplatesRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.test_iam_permissions._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("project", "resource", "testPermissionsRequestResource", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_test_iam_permissions_rest_interceptors(null_interceptor): + transport = transports.InstanceTemplatesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.InstanceTemplatesRestInterceptor(), + ) + client = InstanceTemplatesClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.InstanceTemplatesRestInterceptor, "post_test_iam_permissions") as post, \ + mock.patch.object(transports.InstanceTemplatesRestInterceptor, "pre_test_iam_permissions") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.TestIamPermissionsInstanceTemplateRequest.pb(compute.TestIamPermissionsInstanceTemplateRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.TestPermissionsResponse.to_json(compute.TestPermissionsResponse()) + + request = compute.TestIamPermissionsInstanceTemplateRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.TestPermissionsResponse() + + client.test_iam_permissions(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_test_iam_permissions_rest_bad_request(transport: str = 'rest', request_type=compute.TestIamPermissionsInstanceTemplateRequest): + client = InstanceTemplatesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'resource': 'sample2'} + request_init["test_permissions_request_resource"] = {'permissions': ['permissions_value1', 'permissions_value2']} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.test_iam_permissions(request) + + +def test_test_iam_permissions_rest_flattened(): + client = InstanceTemplatesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.TestPermissionsResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'resource': 'sample2'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + resource='resource_value', + test_permissions_request_resource=compute.TestPermissionsRequest(permissions=['permissions_value']), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.TestPermissionsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.test_iam_permissions(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/global/instanceTemplates/{resource}/testIamPermissions" % client.transport._host, args[1]) + + +def test_test_iam_permissions_rest_flattened_error(transport: str = 'rest'): + client = InstanceTemplatesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.test_iam_permissions( + compute.TestIamPermissionsInstanceTemplateRequest(), + project='project_value', + resource='resource_value', + test_permissions_request_resource=compute.TestPermissionsRequest(permissions=['permissions_value']), + ) + + +def test_test_iam_permissions_rest_error(): + client = InstanceTemplatesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +def test_credentials_transport_error(): + # It is an error to provide credentials and a transport instance. + transport = transports.InstanceTemplatesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = InstanceTemplatesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # It is an error to provide a credentials file and a transport instance. + transport = transports.InstanceTemplatesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = InstanceTemplatesClient( + client_options={"credentials_file": "credentials.json"}, + transport=transport, + ) + + # It is an error to provide an api_key and a transport instance. + transport = transports.InstanceTemplatesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = InstanceTemplatesClient( + client_options=options, + transport=transport, + ) + + # It is an error to provide an api_key and a credential. + options = mock.Mock() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = InstanceTemplatesClient( + client_options=options, + credentials=ga_credentials.AnonymousCredentials() + ) + + # It is an error to provide scopes and a transport instance. + transport = transports.InstanceTemplatesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = InstanceTemplatesClient( + client_options={"scopes": ["1", "2"]}, + transport=transport, + ) + + +def test_transport_instance(): + # A client may be instantiated with a custom transport instance. + transport = transports.InstanceTemplatesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + client = InstanceTemplatesClient(transport=transport) + assert client.transport is transport + + +@pytest.mark.parametrize("transport_class", [ + transports.InstanceTemplatesRestTransport, +]) +def test_transport_adc(transport_class): + # Test default credentials are used if not provided. + with mock.patch.object(google.auth, 'default') as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class() + adc.assert_called_once() + +@pytest.mark.parametrize("transport_name", [ + "rest", +]) +def test_transport_kind(transport_name): + transport = InstanceTemplatesClient.get_transport_class(transport_name)( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert transport.kind == transport_name + + +def test_instance_templates_base_transport_error(): + # Passing both a credentials object and credentials_file should raise an error + with pytest.raises(core_exceptions.DuplicateCredentialArgs): + transport = transports.InstanceTemplatesTransport( + credentials=ga_credentials.AnonymousCredentials(), + credentials_file="credentials.json" + ) + + +def test_instance_templates_base_transport(): + # Instantiate the base transport. + with mock.patch('google.cloud.compute_v1.services.instance_templates.transports.InstanceTemplatesTransport.__init__') as Transport: + Transport.return_value = None + transport = transports.InstanceTemplatesTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Every method on the transport should just blindly + # raise NotImplementedError. + methods = ( + 'aggregated_list', + 'delete', + 'get', + 'get_iam_policy', + 'insert', + 'list', + 'set_iam_policy', + 'test_iam_permissions', + ) + for method in methods: + with pytest.raises(NotImplementedError): + getattr(transport, method)(request=object()) + + with pytest.raises(NotImplementedError): + transport.close() + + # Catch all for all remaining methods and properties + remainder = [ + 'kind', + ] + for r in remainder: + with pytest.raises(NotImplementedError): + getattr(transport, r)() + + +def test_instance_templates_base_transport_with_credentials_file(): + # Instantiate the base transport with a credentials file + with mock.patch.object(google.auth, 'load_credentials_from_file', autospec=True) as load_creds, mock.patch('google.cloud.compute_v1.services.instance_templates.transports.InstanceTemplatesTransport._prep_wrapped_messages') as Transport: + Transport.return_value = None + load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.InstanceTemplatesTransport( + credentials_file="credentials.json", + quota_project_id="octopus", + ) + load_creds.assert_called_once_with("credentials.json", + scopes=None, + default_scopes=( + 'https://www.googleapis.com/auth/compute', + 'https://www.googleapis.com/auth/cloud-platform', +), + quota_project_id="octopus", + ) + + +def test_instance_templates_base_transport_with_adc(): + # Test the default credentials are used if credentials and credentials_file are None. + with mock.patch.object(google.auth, 'default', autospec=True) as adc, mock.patch('google.cloud.compute_v1.services.instance_templates.transports.InstanceTemplatesTransport._prep_wrapped_messages') as Transport: + Transport.return_value = None + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.InstanceTemplatesTransport() + adc.assert_called_once() + + +def test_instance_templates_auth_adc(): + # If no credentials are provided, we should use ADC credentials. + with mock.patch.object(google.auth, 'default', autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + InstanceTemplatesClient() + adc.assert_called_once_with( + scopes=None, + default_scopes=( + 'https://www.googleapis.com/auth/compute', + 'https://www.googleapis.com/auth/cloud-platform', +), + quota_project_id=None, + ) + + +def test_instance_templates_http_transport_client_cert_source_for_mtls(): + cred = ga_credentials.AnonymousCredentials() + with mock.patch("google.auth.transport.requests.AuthorizedSession.configure_mtls_channel") as mock_configure_mtls_channel: + transports.InstanceTemplatesRestTransport ( + credentials=cred, + client_cert_source_for_mtls=client_cert_source_callback + ) + mock_configure_mtls_channel.assert_called_once_with(client_cert_source_callback) + + +@pytest.mark.parametrize("transport_name", [ + "rest", +]) +def test_instance_templates_host_no_port(transport_name): + client = InstanceTemplatesClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions(api_endpoint='compute.googleapis.com'), + transport=transport_name, + ) + assert client.transport._host == ( + 'compute.googleapis.com:443' + if transport_name in ['grpc', 'grpc_asyncio'] + else 'https://compute.googleapis.com' + ) + +@pytest.mark.parametrize("transport_name", [ + "rest", +]) +def test_instance_templates_host_with_port(transport_name): + client = InstanceTemplatesClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions(api_endpoint='compute.googleapis.com:8000'), + transport=transport_name, + ) + assert client.transport._host == ( + 'compute.googleapis.com:8000' + if transport_name in ['grpc', 'grpc_asyncio'] + else 'https://compute.googleapis.com:8000' + ) + +@pytest.mark.parametrize("transport_name", [ + "rest", +]) +def test_instance_templates_client_transport_session_collision(transport_name): + creds1 = ga_credentials.AnonymousCredentials() + creds2 = ga_credentials.AnonymousCredentials() + client1 = InstanceTemplatesClient( + credentials=creds1, + transport=transport_name, + ) + client2 = InstanceTemplatesClient( + credentials=creds2, + transport=transport_name, + ) + session1 = client1.transport.aggregated_list._session + session2 = client2.transport.aggregated_list._session + assert session1 != session2 + session1 = client1.transport.delete._session + session2 = client2.transport.delete._session + assert session1 != session2 + session1 = client1.transport.get._session + session2 = client2.transport.get._session + assert session1 != session2 + session1 = client1.transport.get_iam_policy._session + session2 = client2.transport.get_iam_policy._session + assert session1 != session2 + session1 = client1.transport.insert._session + session2 = client2.transport.insert._session + assert session1 != session2 + session1 = client1.transport.list._session + session2 = client2.transport.list._session + assert session1 != session2 + session1 = client1.transport.set_iam_policy._session + session2 = client2.transport.set_iam_policy._session + assert session1 != session2 + session1 = client1.transport.test_iam_permissions._session + session2 = client2.transport.test_iam_permissions._session + assert session1 != session2 + +def test_common_billing_account_path(): + billing_account = "squid" + expected = "billingAccounts/{billing_account}".format(billing_account=billing_account, ) + actual = InstanceTemplatesClient.common_billing_account_path(billing_account) + assert expected == actual + + +def test_parse_common_billing_account_path(): + expected = { + "billing_account": "clam", + } + path = InstanceTemplatesClient.common_billing_account_path(**expected) + + # Check that the path construction is reversible. + actual = InstanceTemplatesClient.parse_common_billing_account_path(path) + assert expected == actual + +def test_common_folder_path(): + folder = "whelk" + expected = "folders/{folder}".format(folder=folder, ) + actual = InstanceTemplatesClient.common_folder_path(folder) + assert expected == actual + + +def test_parse_common_folder_path(): + expected = { + "folder": "octopus", + } + path = InstanceTemplatesClient.common_folder_path(**expected) + + # Check that the path construction is reversible. + actual = InstanceTemplatesClient.parse_common_folder_path(path) + assert expected == actual + +def test_common_organization_path(): + organization = "oyster" + expected = "organizations/{organization}".format(organization=organization, ) + actual = InstanceTemplatesClient.common_organization_path(organization) + assert expected == actual + + +def test_parse_common_organization_path(): + expected = { + "organization": "nudibranch", + } + path = InstanceTemplatesClient.common_organization_path(**expected) + + # Check that the path construction is reversible. + actual = InstanceTemplatesClient.parse_common_organization_path(path) + assert expected == actual + +def test_common_project_path(): + project = "cuttlefish" + expected = "projects/{project}".format(project=project, ) + actual = InstanceTemplatesClient.common_project_path(project) + assert expected == actual + + +def test_parse_common_project_path(): + expected = { + "project": "mussel", + } + path = InstanceTemplatesClient.common_project_path(**expected) + + # Check that the path construction is reversible. + actual = InstanceTemplatesClient.parse_common_project_path(path) + assert expected == actual + +def test_common_location_path(): + project = "winkle" + location = "nautilus" + expected = "projects/{project}/locations/{location}".format(project=project, location=location, ) + actual = InstanceTemplatesClient.common_location_path(project, location) + assert expected == actual + + +def test_parse_common_location_path(): + expected = { + "project": "scallop", + "location": "abalone", + } + path = InstanceTemplatesClient.common_location_path(**expected) + + # Check that the path construction is reversible. + actual = InstanceTemplatesClient.parse_common_location_path(path) + assert expected == actual + + +def test_client_with_default_client_info(): + client_info = gapic_v1.client_info.ClientInfo() + + with mock.patch.object(transports.InstanceTemplatesTransport, '_prep_wrapped_messages') as prep: + client = InstanceTemplatesClient( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + with mock.patch.object(transports.InstanceTemplatesTransport, '_prep_wrapped_messages') as prep: + transport_class = InstanceTemplatesClient.get_transport_class() + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + +def test_transport_close(): + transports = { + "rest": "_session", + } + + for transport, close_name in transports.items(): + client = InstanceTemplatesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport + ) + with mock.patch.object(type(getattr(client.transport, close_name)), "close") as close: + with client: + close.assert_not_called() + close.assert_called_once() + +def test_client_ctx(): + transports = [ + 'rest', + ] + for transport in transports: + client = InstanceTemplatesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport + ) + # Test client calls underlying transport. + with mock.patch.object(type(client.transport), "close") as close: + close.assert_not_called() + with client: + pass + close.assert_called() + +@pytest.mark.parametrize("client_class,transport_class", [ + (InstanceTemplatesClient, transports.InstanceTemplatesRestTransport), +]) +def test_api_key_credentials(client_class, transport_class): + with mock.patch.object( + google.auth._default, "get_api_key_credentials", create=True + ) as get_api_key_credentials: + mock_cred = mock.Mock() + get_api_key_credentials.return_value = mock_cred + options = client_options.ClientOptions() + options.api_key = "api_key" + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=mock_cred, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) diff --git a/owl-bot-staging/v1/tests/unit/gapic/compute_v1/test_instances.py b/owl-bot-staging/v1/tests/unit/gapic/compute_v1/test_instances.py new file mode 100644 index 000000000..15dc56bab --- /dev/null +++ b/owl-bot-staging/v1/tests/unit/gapic/compute_v1/test_instances.py @@ -0,0 +1,23156 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import os +# try/except added for compatibility with python < 3.8 +try: + from unittest import mock + from unittest.mock import AsyncMock # pragma: NO COVER +except ImportError: # pragma: NO COVER + import mock + +import grpc +from grpc.experimental import aio +from collections.abc import Iterable +from google.protobuf import json_format +import json +import math +import pytest +from proto.marshal.rules.dates import DurationRule, TimestampRule +from proto.marshal.rules import wrappers +from requests import Response +from requests import Request, PreparedRequest +from requests.sessions import Session +from google.protobuf import json_format + +from google.api_core import client_options +from google.api_core import exceptions as core_exceptions +from google.api_core import extended_operation # type: ignore +from google.api_core import future +from google.api_core import gapic_v1 +from google.api_core import grpc_helpers +from google.api_core import grpc_helpers_async +from google.api_core import path_template +from google.auth import credentials as ga_credentials +from google.auth.exceptions import MutualTLSChannelError +from google.cloud.compute_v1.services.instances import InstancesClient +from google.cloud.compute_v1.services.instances import pagers +from google.cloud.compute_v1.services.instances import transports +from google.cloud.compute_v1.types import compute +from google.oauth2 import service_account +import google.auth + + +def client_cert_source_callback(): + return b"cert bytes", b"key bytes" + + +# If default endpoint is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint(client): + return "foo.googleapis.com" if ("localhost" in client.DEFAULT_ENDPOINT) else client.DEFAULT_ENDPOINT + + +def test__get_default_mtls_endpoint(): + api_endpoint = "example.googleapis.com" + api_mtls_endpoint = "example.mtls.googleapis.com" + sandbox_endpoint = "example.sandbox.googleapis.com" + sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com" + non_googleapi = "api.example.com" + + assert InstancesClient._get_default_mtls_endpoint(None) is None + assert InstancesClient._get_default_mtls_endpoint(api_endpoint) == api_mtls_endpoint + assert InstancesClient._get_default_mtls_endpoint(api_mtls_endpoint) == api_mtls_endpoint + assert InstancesClient._get_default_mtls_endpoint(sandbox_endpoint) == sandbox_mtls_endpoint + assert InstancesClient._get_default_mtls_endpoint(sandbox_mtls_endpoint) == sandbox_mtls_endpoint + assert InstancesClient._get_default_mtls_endpoint(non_googleapi) == non_googleapi + + +@pytest.mark.parametrize("client_class,transport_name", [ + (InstancesClient, "rest"), +]) +def test_instances_client_from_service_account_info(client_class, transport_name): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object(service_account.Credentials, 'from_service_account_info') as factory: + factory.return_value = creds + info = {"valid": True} + client = client_class.from_service_account_info(info, transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + 'compute.googleapis.com:443' + if transport_name in ['grpc', 'grpc_asyncio'] + else + 'https://compute.googleapis.com' + ) + + +@pytest.mark.parametrize("transport_class,transport_name", [ + (transports.InstancesRestTransport, "rest"), +]) +def test_instances_client_service_account_always_use_jwt(transport_class, transport_name): + with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=True) + use_jwt.assert_called_once_with(True) + + with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=False) + use_jwt.assert_not_called() + + +@pytest.mark.parametrize("client_class,transport_name", [ + (InstancesClient, "rest"), +]) +def test_instances_client_from_service_account_file(client_class, transport_name): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object(service_account.Credentials, 'from_service_account_file') as factory: + factory.return_value = creds + client = client_class.from_service_account_file("dummy/file/path.json", transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + client = client_class.from_service_account_json("dummy/file/path.json", transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + 'compute.googleapis.com:443' + if transport_name in ['grpc', 'grpc_asyncio'] + else + 'https://compute.googleapis.com' + ) + + +def test_instances_client_get_transport_class(): + transport = InstancesClient.get_transport_class() + available_transports = [ + transports.InstancesRestTransport, + ] + assert transport in available_transports + + transport = InstancesClient.get_transport_class("rest") + assert transport == transports.InstancesRestTransport + + +@pytest.mark.parametrize("client_class,transport_class,transport_name", [ + (InstancesClient, transports.InstancesRestTransport, "rest"), +]) +@mock.patch.object(InstancesClient, "DEFAULT_ENDPOINT", modify_default_endpoint(InstancesClient)) +def test_instances_client_client_options(client_class, transport_class, transport_name): + # Check that if channel is provided we won't create a new one. + with mock.patch.object(InstancesClient, 'get_transport_class') as gtc: + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ) + client = client_class(transport=transport) + gtc.assert_not_called() + + # Check that if channel is provided via str we will create a new one. + with mock.patch.object(InstancesClient, 'get_transport_class') as gtc: + client = client_class(transport=transport_name) + gtc.assert_called() + + # Check the case api_endpoint is provided. + options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(transport=transport_name, client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_MTLS_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError): + client = client_class(transport=transport_name) + + # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): + with pytest.raises(ValueError): + client = client_class(transport=transport_name) + + # Check the case quota_project_id is provided + options = client_options.ClientOptions(quota_project_id="octopus") + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id="octopus", + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + # Check the case api_endpoint is provided + options = client_options.ClientOptions(api_audience="https://language.googleapis.com") + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience="https://language.googleapis.com" + ) + +@pytest.mark.parametrize("client_class,transport_class,transport_name,use_client_cert_env", [ + (InstancesClient, transports.InstancesRestTransport, "rest", "true"), + (InstancesClient, transports.InstancesRestTransport, "rest", "false"), +]) +@mock.patch.object(InstancesClient, "DEFAULT_ENDPOINT", modify_default_endpoint(InstancesClient)) +@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) +def test_instances_client_mtls_env_auto(client_class, transport_class, transport_name, use_client_cert_env): + # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default + # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists. + + # Check the case client_cert_source is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): + options = client_options.ClientOptions(client_cert_source=client_cert_source_callback) + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + + if use_client_cert_env == "false": + expected_client_cert_source = None + expected_host = client.DEFAULT_ENDPOINT + else: + expected_client_cert_source = client_cert_source_callback + expected_host = client.DEFAULT_MTLS_ENDPOINT + + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case ADC client cert is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): + with mock.patch.object(transport_class, '__init__') as patched: + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): + with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=client_cert_source_callback): + if use_client_cert_env == "false": + expected_host = client.DEFAULT_ENDPOINT + expected_client_cert_source = None + else: + expected_host = client.DEFAULT_MTLS_ENDPOINT + expected_client_cert_source = client_cert_source_callback + + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case client_cert_source and ADC client cert are not provided. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): + with mock.patch.object(transport_class, '__init__') as patched: + with mock.patch("google.auth.transport.mtls.has_default_client_cert_source", return_value=False): + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize("client_class", [ + InstancesClient +]) +@mock.patch.object(InstancesClient, "DEFAULT_ENDPOINT", modify_default_endpoint(InstancesClient)) +def test_instances_client_get_mtls_endpoint_and_cert_source(client_class): + mock_client_cert_source = mock.Mock() + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "true". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + mock_api_endpoint = "foo" + options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(options) + assert api_endpoint == mock_api_endpoint + assert cert_source == mock_client_cert_source + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "false". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + mock_client_cert_source = mock.Mock() + mock_api_endpoint = "foo" + options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(options) + assert api_endpoint == mock_api_endpoint + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert doesn't exist. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=False): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert exists. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): + with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=mock_client_cert_source): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source == mock_client_cert_source + + +@pytest.mark.parametrize("client_class,transport_class,transport_name", [ + (InstancesClient, transports.InstancesRestTransport, "rest"), +]) +def test_instances_client_client_options_scopes(client_class, transport_class, transport_name): + # Check the case scopes are provided. + options = client_options.ClientOptions( + scopes=["1", "2"], + ) + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=["1", "2"], + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + +@pytest.mark.parametrize("client_class,transport_class,transport_name,grpc_helpers", [ + (InstancesClient, transports.InstancesRestTransport, "rest", None), +]) +def test_instances_client_client_options_credentials_file(client_class, transport_class, transport_name, grpc_helpers): + # Check the case credentials file is provided. + options = client_options.ClientOptions( + credentials_file="credentials.json" + ) + + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize("request_type", [ + compute.AddAccessConfigInstanceRequest, + dict, +]) +def test_add_access_config_rest(request_type): + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'zone': 'sample2', 'instance': 'sample3'} + request_init["access_config_resource"] = {'external_ipv6': 'external_ipv6_value', 'external_ipv6_prefix_length': 2837, 'kind': 'kind_value', 'name': 'name_value', 'nat_i_p': 'nat_i_p_value', 'network_tier': 'network_tier_value', 'public_ptr_domain_name': 'public_ptr_domain_name_value', 'set_public_ptr': True, 'type_': 'type__value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.add_access_config(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, extended_operation.ExtendedOperation) + assert response.client_operation_id == 'client_operation_id_value' + assert response.creation_timestamp == 'creation_timestamp_value' + assert response.description == 'description_value' + assert response.end_time == 'end_time_value' + assert response.http_error_message == 'http_error_message_value' + assert response.http_error_status_code == 2374 + assert response.id == 205 + assert response.insert_time == 'insert_time_value' + assert response.kind == 'kind_value' + assert response.name == 'name_value' + assert response.operation_group_id == 'operation_group_id_value' + assert response.operation_type == 'operation_type_value' + assert response.progress == 885 + assert response.region == 'region_value' + assert response.self_link == 'self_link_value' + assert response.start_time == 'start_time_value' + assert response.status == compute.Operation.Status.DONE + assert response.status_message == 'status_message_value' + assert response.target_id == 947 + assert response.target_link == 'target_link_value' + assert response.user == 'user_value' + assert response.zone == 'zone_value' + + +def test_add_access_config_rest_required_fields(request_type=compute.AddAccessConfigInstanceRequest): + transport_class = transports.InstancesRestTransport + + request_init = {} + request_init["instance"] = "" + request_init["network_interface"] = "" + request_init["project"] = "" + request_init["zone"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + assert "networkInterface" not in jsonified_request + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).add_access_config._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + assert "networkInterface" in jsonified_request + assert jsonified_request["networkInterface"] == request_init["network_interface"] + + jsonified_request["instance"] = 'instance_value' + jsonified_request["networkInterface"] = 'network_interface_value' + jsonified_request["project"] = 'project_value' + jsonified_request["zone"] = 'zone_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).add_access_config._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("network_interface", "request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "instance" in jsonified_request + assert jsonified_request["instance"] == 'instance_value' + assert "networkInterface" in jsonified_request + assert jsonified_request["networkInterface"] == 'network_interface_value' + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "zone" in jsonified_request + assert jsonified_request["zone"] == 'zone_value' + + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.add_access_config(request) + + expected_params = [ + ( + "networkInterface", + "", + ), + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_add_access_config_rest_unset_required_fields(): + transport = transports.InstancesRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.add_access_config._get_unset_required_fields({}) + assert set(unset_fields) == (set(("networkInterface", "requestId", )) & set(("accessConfigResource", "instance", "networkInterface", "project", "zone", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_add_access_config_rest_interceptors(null_interceptor): + transport = transports.InstancesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.InstancesRestInterceptor(), + ) + client = InstancesClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.InstancesRestInterceptor, "post_add_access_config") as post, \ + mock.patch.object(transports.InstancesRestInterceptor, "pre_add_access_config") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.AddAccessConfigInstanceRequest.pb(compute.AddAccessConfigInstanceRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.AddAccessConfigInstanceRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.add_access_config(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_add_access_config_rest_bad_request(transport: str = 'rest', request_type=compute.AddAccessConfigInstanceRequest): + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'zone': 'sample2', 'instance': 'sample3'} + request_init["access_config_resource"] = {'external_ipv6': 'external_ipv6_value', 'external_ipv6_prefix_length': 2837, 'kind': 'kind_value', 'name': 'name_value', 'nat_i_p': 'nat_i_p_value', 'network_tier': 'network_tier_value', 'public_ptr_domain_name': 'public_ptr_domain_name_value', 'set_public_ptr': True, 'type_': 'type__value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.add_access_config(request) + + +def test_add_access_config_rest_flattened(): + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'zone': 'sample2', 'instance': 'sample3'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + zone='zone_value', + instance='instance_value', + network_interface='network_interface_value', + access_config_resource=compute.AccessConfig(external_ipv6='external_ipv6_value'), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.add_access_config(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/zones/{zone}/instances/{instance}/addAccessConfig" % client.transport._host, args[1]) + + +def test_add_access_config_rest_flattened_error(transport: str = 'rest'): + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.add_access_config( + compute.AddAccessConfigInstanceRequest(), + project='project_value', + zone='zone_value', + instance='instance_value', + network_interface='network_interface_value', + access_config_resource=compute.AccessConfig(external_ipv6='external_ipv6_value'), + ) + + +def test_add_access_config_rest_error(): + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.AddAccessConfigInstanceRequest, + dict, +]) +def test_add_access_config_unary_rest(request_type): + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'zone': 'sample2', 'instance': 'sample3'} + request_init["access_config_resource"] = {'external_ipv6': 'external_ipv6_value', 'external_ipv6_prefix_length': 2837, 'kind': 'kind_value', 'name': 'name_value', 'nat_i_p': 'nat_i_p_value', 'network_tier': 'network_tier_value', 'public_ptr_domain_name': 'public_ptr_domain_name_value', 'set_public_ptr': True, 'type_': 'type__value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.add_access_config_unary(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.Operation) + + +def test_add_access_config_unary_rest_required_fields(request_type=compute.AddAccessConfigInstanceRequest): + transport_class = transports.InstancesRestTransport + + request_init = {} + request_init["instance"] = "" + request_init["network_interface"] = "" + request_init["project"] = "" + request_init["zone"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + assert "networkInterface" not in jsonified_request + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).add_access_config._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + assert "networkInterface" in jsonified_request + assert jsonified_request["networkInterface"] == request_init["network_interface"] + + jsonified_request["instance"] = 'instance_value' + jsonified_request["networkInterface"] = 'network_interface_value' + jsonified_request["project"] = 'project_value' + jsonified_request["zone"] = 'zone_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).add_access_config._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("network_interface", "request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "instance" in jsonified_request + assert jsonified_request["instance"] == 'instance_value' + assert "networkInterface" in jsonified_request + assert jsonified_request["networkInterface"] == 'network_interface_value' + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "zone" in jsonified_request + assert jsonified_request["zone"] == 'zone_value' + + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.add_access_config_unary(request) + + expected_params = [ + ( + "networkInterface", + "", + ), + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_add_access_config_unary_rest_unset_required_fields(): + transport = transports.InstancesRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.add_access_config._get_unset_required_fields({}) + assert set(unset_fields) == (set(("networkInterface", "requestId", )) & set(("accessConfigResource", "instance", "networkInterface", "project", "zone", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_add_access_config_unary_rest_interceptors(null_interceptor): + transport = transports.InstancesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.InstancesRestInterceptor(), + ) + client = InstancesClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.InstancesRestInterceptor, "post_add_access_config") as post, \ + mock.patch.object(transports.InstancesRestInterceptor, "pre_add_access_config") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.AddAccessConfigInstanceRequest.pb(compute.AddAccessConfigInstanceRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.AddAccessConfigInstanceRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.add_access_config_unary(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_add_access_config_unary_rest_bad_request(transport: str = 'rest', request_type=compute.AddAccessConfigInstanceRequest): + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'zone': 'sample2', 'instance': 'sample3'} + request_init["access_config_resource"] = {'external_ipv6': 'external_ipv6_value', 'external_ipv6_prefix_length': 2837, 'kind': 'kind_value', 'name': 'name_value', 'nat_i_p': 'nat_i_p_value', 'network_tier': 'network_tier_value', 'public_ptr_domain_name': 'public_ptr_domain_name_value', 'set_public_ptr': True, 'type_': 'type__value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.add_access_config_unary(request) + + +def test_add_access_config_unary_rest_flattened(): + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'zone': 'sample2', 'instance': 'sample3'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + zone='zone_value', + instance='instance_value', + network_interface='network_interface_value', + access_config_resource=compute.AccessConfig(external_ipv6='external_ipv6_value'), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.add_access_config_unary(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/zones/{zone}/instances/{instance}/addAccessConfig" % client.transport._host, args[1]) + + +def test_add_access_config_unary_rest_flattened_error(transport: str = 'rest'): + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.add_access_config_unary( + compute.AddAccessConfigInstanceRequest(), + project='project_value', + zone='zone_value', + instance='instance_value', + network_interface='network_interface_value', + access_config_resource=compute.AccessConfig(external_ipv6='external_ipv6_value'), + ) + + +def test_add_access_config_unary_rest_error(): + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.AddResourcePoliciesInstanceRequest, + dict, +]) +def test_add_resource_policies_rest(request_type): + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'zone': 'sample2', 'instance': 'sample3'} + request_init["instances_add_resource_policies_request_resource"] = {'resource_policies': ['resource_policies_value1', 'resource_policies_value2']} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.add_resource_policies(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, extended_operation.ExtendedOperation) + assert response.client_operation_id == 'client_operation_id_value' + assert response.creation_timestamp == 'creation_timestamp_value' + assert response.description == 'description_value' + assert response.end_time == 'end_time_value' + assert response.http_error_message == 'http_error_message_value' + assert response.http_error_status_code == 2374 + assert response.id == 205 + assert response.insert_time == 'insert_time_value' + assert response.kind == 'kind_value' + assert response.name == 'name_value' + assert response.operation_group_id == 'operation_group_id_value' + assert response.operation_type == 'operation_type_value' + assert response.progress == 885 + assert response.region == 'region_value' + assert response.self_link == 'self_link_value' + assert response.start_time == 'start_time_value' + assert response.status == compute.Operation.Status.DONE + assert response.status_message == 'status_message_value' + assert response.target_id == 947 + assert response.target_link == 'target_link_value' + assert response.user == 'user_value' + assert response.zone == 'zone_value' + + +def test_add_resource_policies_rest_required_fields(request_type=compute.AddResourcePoliciesInstanceRequest): + transport_class = transports.InstancesRestTransport + + request_init = {} + request_init["instance"] = "" + request_init["project"] = "" + request_init["zone"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).add_resource_policies._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["instance"] = 'instance_value' + jsonified_request["project"] = 'project_value' + jsonified_request["zone"] = 'zone_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).add_resource_policies._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "instance" in jsonified_request + assert jsonified_request["instance"] == 'instance_value' + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "zone" in jsonified_request + assert jsonified_request["zone"] == 'zone_value' + + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.add_resource_policies(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_add_resource_policies_rest_unset_required_fields(): + transport = transports.InstancesRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.add_resource_policies._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("instance", "instancesAddResourcePoliciesRequestResource", "project", "zone", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_add_resource_policies_rest_interceptors(null_interceptor): + transport = transports.InstancesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.InstancesRestInterceptor(), + ) + client = InstancesClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.InstancesRestInterceptor, "post_add_resource_policies") as post, \ + mock.patch.object(transports.InstancesRestInterceptor, "pre_add_resource_policies") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.AddResourcePoliciesInstanceRequest.pb(compute.AddResourcePoliciesInstanceRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.AddResourcePoliciesInstanceRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.add_resource_policies(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_add_resource_policies_rest_bad_request(transport: str = 'rest', request_type=compute.AddResourcePoliciesInstanceRequest): + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'zone': 'sample2', 'instance': 'sample3'} + request_init["instances_add_resource_policies_request_resource"] = {'resource_policies': ['resource_policies_value1', 'resource_policies_value2']} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.add_resource_policies(request) + + +def test_add_resource_policies_rest_flattened(): + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'zone': 'sample2', 'instance': 'sample3'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + zone='zone_value', + instance='instance_value', + instances_add_resource_policies_request_resource=compute.InstancesAddResourcePoliciesRequest(resource_policies=['resource_policies_value']), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.add_resource_policies(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/zones/{zone}/instances/{instance}/addResourcePolicies" % client.transport._host, args[1]) + + +def test_add_resource_policies_rest_flattened_error(transport: str = 'rest'): + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.add_resource_policies( + compute.AddResourcePoliciesInstanceRequest(), + project='project_value', + zone='zone_value', + instance='instance_value', + instances_add_resource_policies_request_resource=compute.InstancesAddResourcePoliciesRequest(resource_policies=['resource_policies_value']), + ) + + +def test_add_resource_policies_rest_error(): + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.AddResourcePoliciesInstanceRequest, + dict, +]) +def test_add_resource_policies_unary_rest(request_type): + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'zone': 'sample2', 'instance': 'sample3'} + request_init["instances_add_resource_policies_request_resource"] = {'resource_policies': ['resource_policies_value1', 'resource_policies_value2']} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.add_resource_policies_unary(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.Operation) + + +def test_add_resource_policies_unary_rest_required_fields(request_type=compute.AddResourcePoliciesInstanceRequest): + transport_class = transports.InstancesRestTransport + + request_init = {} + request_init["instance"] = "" + request_init["project"] = "" + request_init["zone"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).add_resource_policies._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["instance"] = 'instance_value' + jsonified_request["project"] = 'project_value' + jsonified_request["zone"] = 'zone_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).add_resource_policies._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "instance" in jsonified_request + assert jsonified_request["instance"] == 'instance_value' + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "zone" in jsonified_request + assert jsonified_request["zone"] == 'zone_value' + + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.add_resource_policies_unary(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_add_resource_policies_unary_rest_unset_required_fields(): + transport = transports.InstancesRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.add_resource_policies._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("instance", "instancesAddResourcePoliciesRequestResource", "project", "zone", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_add_resource_policies_unary_rest_interceptors(null_interceptor): + transport = transports.InstancesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.InstancesRestInterceptor(), + ) + client = InstancesClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.InstancesRestInterceptor, "post_add_resource_policies") as post, \ + mock.patch.object(transports.InstancesRestInterceptor, "pre_add_resource_policies") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.AddResourcePoliciesInstanceRequest.pb(compute.AddResourcePoliciesInstanceRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.AddResourcePoliciesInstanceRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.add_resource_policies_unary(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_add_resource_policies_unary_rest_bad_request(transport: str = 'rest', request_type=compute.AddResourcePoliciesInstanceRequest): + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'zone': 'sample2', 'instance': 'sample3'} + request_init["instances_add_resource_policies_request_resource"] = {'resource_policies': ['resource_policies_value1', 'resource_policies_value2']} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.add_resource_policies_unary(request) + + +def test_add_resource_policies_unary_rest_flattened(): + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'zone': 'sample2', 'instance': 'sample3'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + zone='zone_value', + instance='instance_value', + instances_add_resource_policies_request_resource=compute.InstancesAddResourcePoliciesRequest(resource_policies=['resource_policies_value']), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.add_resource_policies_unary(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/zones/{zone}/instances/{instance}/addResourcePolicies" % client.transport._host, args[1]) + + +def test_add_resource_policies_unary_rest_flattened_error(transport: str = 'rest'): + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.add_resource_policies_unary( + compute.AddResourcePoliciesInstanceRequest(), + project='project_value', + zone='zone_value', + instance='instance_value', + instances_add_resource_policies_request_resource=compute.InstancesAddResourcePoliciesRequest(resource_policies=['resource_policies_value']), + ) + + +def test_add_resource_policies_unary_rest_error(): + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.AggregatedListInstancesRequest, + dict, +]) +def test_aggregated_list_rest(request_type): + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.InstanceAggregatedList( + id='id_value', + kind='kind_value', + next_page_token='next_page_token_value', + self_link='self_link_value', + unreachables=['unreachables_value'], + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.InstanceAggregatedList.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.aggregated_list(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.AggregatedListPager) + assert response.id == 'id_value' + assert response.kind == 'kind_value' + assert response.next_page_token == 'next_page_token_value' + assert response.self_link == 'self_link_value' + assert response.unreachables == ['unreachables_value'] + + +def test_aggregated_list_rest_required_fields(request_type=compute.AggregatedListInstancesRequest): + transport_class = transports.InstancesRestTransport + + request_init = {} + request_init["project"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).aggregated_list._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).aggregated_list._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("filter", "include_all_scopes", "max_results", "order_by", "page_token", "return_partial_success", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.InstanceAggregatedList() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "get", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.InstanceAggregatedList.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.aggregated_list(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_aggregated_list_rest_unset_required_fields(): + transport = transports.InstancesRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.aggregated_list._get_unset_required_fields({}) + assert set(unset_fields) == (set(("filter", "includeAllScopes", "maxResults", "orderBy", "pageToken", "returnPartialSuccess", )) & set(("project", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_aggregated_list_rest_interceptors(null_interceptor): + transport = transports.InstancesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.InstancesRestInterceptor(), + ) + client = InstancesClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.InstancesRestInterceptor, "post_aggregated_list") as post, \ + mock.patch.object(transports.InstancesRestInterceptor, "pre_aggregated_list") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.AggregatedListInstancesRequest.pb(compute.AggregatedListInstancesRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.InstanceAggregatedList.to_json(compute.InstanceAggregatedList()) + + request = compute.AggregatedListInstancesRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.InstanceAggregatedList() + + client.aggregated_list(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_aggregated_list_rest_bad_request(transport: str = 'rest', request_type=compute.AggregatedListInstancesRequest): + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.aggregated_list(request) + + +def test_aggregated_list_rest_flattened(): + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.InstanceAggregatedList() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.InstanceAggregatedList.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.aggregated_list(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/aggregated/instances" % client.transport._host, args[1]) + + +def test_aggregated_list_rest_flattened_error(transport: str = 'rest'): + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.aggregated_list( + compute.AggregatedListInstancesRequest(), + project='project_value', + ) + + +def test_aggregated_list_rest_pager(transport: str = 'rest'): + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + #with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + compute.InstanceAggregatedList( + items={ + 'a':compute.InstancesScopedList(), + 'b':compute.InstancesScopedList(), + 'c':compute.InstancesScopedList(), + }, + next_page_token='abc', + ), + compute.InstanceAggregatedList( + items={}, + next_page_token='def', + ), + compute.InstanceAggregatedList( + items={ + 'g':compute.InstancesScopedList(), + }, + next_page_token='ghi', + ), + compute.InstanceAggregatedList( + items={ + 'h':compute.InstancesScopedList(), + 'i':compute.InstancesScopedList(), + }, + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple(compute.InstanceAggregatedList.to_json(x) for x in response) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode('UTF-8') + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {'project': 'sample1'} + + pager = client.aggregated_list(request=sample_request) + + assert isinstance(pager.get('a'), compute.InstancesScopedList) + assert pager.get('h') is None + + results = list(pager) + assert len(results) == 6 + assert all( + isinstance(i, tuple) + for i in results) + for result in results: + assert isinstance(result, tuple) + assert tuple(type(t) for t in result) == (str, compute.InstancesScopedList) + + assert pager.get('a') is None + assert isinstance(pager.get('h'), compute.InstancesScopedList) + + pages = list(client.aggregated_list(request=sample_request).pages) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize("request_type", [ + compute.AttachDiskInstanceRequest, + dict, +]) +def test_attach_disk_rest(request_type): + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'zone': 'sample2', 'instance': 'sample3'} + request_init["attached_disk_resource"] = {'architecture': 'architecture_value', 'auto_delete': True, 'boot': True, 'device_name': 'device_name_value', 'disk_encryption_key': {'kms_key_name': 'kms_key_name_value', 'kms_key_service_account': 'kms_key_service_account_value', 'raw_key': 'raw_key_value', 'rsa_encrypted_key': 'rsa_encrypted_key_value', 'sha256': 'sha256_value'}, 'disk_size_gb': 1261, 'force_attach': True, 'guest_os_features': [{'type_': 'type__value'}], 'index': 536, 'initialize_params': {'architecture': 'architecture_value', 'description': 'description_value', 'disk_name': 'disk_name_value', 'disk_size_gb': 1261, 'disk_type': 'disk_type_value', 'labels': {}, 'licenses': ['licenses_value1', 'licenses_value2'], 'on_update_action': 'on_update_action_value', 'provisioned_iops': 1740, 'provisioned_throughput': 2411, 'replica_zones': ['replica_zones_value1', 'replica_zones_value2'], 'resource_manager_tags': {}, 'resource_policies': ['resource_policies_value1', 'resource_policies_value2'], 'source_image': 'source_image_value', 'source_image_encryption_key': {}, 'source_snapshot': 'source_snapshot_value', 'source_snapshot_encryption_key': {}}, 'interface': 'interface_value', 'kind': 'kind_value', 'licenses': ['licenses_value1', 'licenses_value2'], 'mode': 'mode_value', 'saved_state': 'saved_state_value', 'shielded_instance_initial_state': {'dbs': [{'content': 'content_value', 'file_type': 'file_type_value'}], 'dbxs': {}, 'keks': {}, 'pk': {}}, 'source': 'source_value', 'type_': 'type__value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.attach_disk(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, extended_operation.ExtendedOperation) + assert response.client_operation_id == 'client_operation_id_value' + assert response.creation_timestamp == 'creation_timestamp_value' + assert response.description == 'description_value' + assert response.end_time == 'end_time_value' + assert response.http_error_message == 'http_error_message_value' + assert response.http_error_status_code == 2374 + assert response.id == 205 + assert response.insert_time == 'insert_time_value' + assert response.kind == 'kind_value' + assert response.name == 'name_value' + assert response.operation_group_id == 'operation_group_id_value' + assert response.operation_type == 'operation_type_value' + assert response.progress == 885 + assert response.region == 'region_value' + assert response.self_link == 'self_link_value' + assert response.start_time == 'start_time_value' + assert response.status == compute.Operation.Status.DONE + assert response.status_message == 'status_message_value' + assert response.target_id == 947 + assert response.target_link == 'target_link_value' + assert response.user == 'user_value' + assert response.zone == 'zone_value' + + +def test_attach_disk_rest_required_fields(request_type=compute.AttachDiskInstanceRequest): + transport_class = transports.InstancesRestTransport + + request_init = {} + request_init["instance"] = "" + request_init["project"] = "" + request_init["zone"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).attach_disk._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["instance"] = 'instance_value' + jsonified_request["project"] = 'project_value' + jsonified_request["zone"] = 'zone_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).attach_disk._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("force_attach", "request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "instance" in jsonified_request + assert jsonified_request["instance"] == 'instance_value' + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "zone" in jsonified_request + assert jsonified_request["zone"] == 'zone_value' + + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.attach_disk(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_attach_disk_rest_unset_required_fields(): + transport = transports.InstancesRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.attach_disk._get_unset_required_fields({}) + assert set(unset_fields) == (set(("forceAttach", "requestId", )) & set(("attachedDiskResource", "instance", "project", "zone", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_attach_disk_rest_interceptors(null_interceptor): + transport = transports.InstancesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.InstancesRestInterceptor(), + ) + client = InstancesClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.InstancesRestInterceptor, "post_attach_disk") as post, \ + mock.patch.object(transports.InstancesRestInterceptor, "pre_attach_disk") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.AttachDiskInstanceRequest.pb(compute.AttachDiskInstanceRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.AttachDiskInstanceRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.attach_disk(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_attach_disk_rest_bad_request(transport: str = 'rest', request_type=compute.AttachDiskInstanceRequest): + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'zone': 'sample2', 'instance': 'sample3'} + request_init["attached_disk_resource"] = {'architecture': 'architecture_value', 'auto_delete': True, 'boot': True, 'device_name': 'device_name_value', 'disk_encryption_key': {'kms_key_name': 'kms_key_name_value', 'kms_key_service_account': 'kms_key_service_account_value', 'raw_key': 'raw_key_value', 'rsa_encrypted_key': 'rsa_encrypted_key_value', 'sha256': 'sha256_value'}, 'disk_size_gb': 1261, 'force_attach': True, 'guest_os_features': [{'type_': 'type__value'}], 'index': 536, 'initialize_params': {'architecture': 'architecture_value', 'description': 'description_value', 'disk_name': 'disk_name_value', 'disk_size_gb': 1261, 'disk_type': 'disk_type_value', 'labels': {}, 'licenses': ['licenses_value1', 'licenses_value2'], 'on_update_action': 'on_update_action_value', 'provisioned_iops': 1740, 'provisioned_throughput': 2411, 'replica_zones': ['replica_zones_value1', 'replica_zones_value2'], 'resource_manager_tags': {}, 'resource_policies': ['resource_policies_value1', 'resource_policies_value2'], 'source_image': 'source_image_value', 'source_image_encryption_key': {}, 'source_snapshot': 'source_snapshot_value', 'source_snapshot_encryption_key': {}}, 'interface': 'interface_value', 'kind': 'kind_value', 'licenses': ['licenses_value1', 'licenses_value2'], 'mode': 'mode_value', 'saved_state': 'saved_state_value', 'shielded_instance_initial_state': {'dbs': [{'content': 'content_value', 'file_type': 'file_type_value'}], 'dbxs': {}, 'keks': {}, 'pk': {}}, 'source': 'source_value', 'type_': 'type__value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.attach_disk(request) + + +def test_attach_disk_rest_flattened(): + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'zone': 'sample2', 'instance': 'sample3'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + zone='zone_value', + instance='instance_value', + attached_disk_resource=compute.AttachedDisk(architecture='architecture_value'), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.attach_disk(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/zones/{zone}/instances/{instance}/attachDisk" % client.transport._host, args[1]) + + +def test_attach_disk_rest_flattened_error(transport: str = 'rest'): + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.attach_disk( + compute.AttachDiskInstanceRequest(), + project='project_value', + zone='zone_value', + instance='instance_value', + attached_disk_resource=compute.AttachedDisk(architecture='architecture_value'), + ) + + +def test_attach_disk_rest_error(): + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.AttachDiskInstanceRequest, + dict, +]) +def test_attach_disk_unary_rest(request_type): + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'zone': 'sample2', 'instance': 'sample3'} + request_init["attached_disk_resource"] = {'architecture': 'architecture_value', 'auto_delete': True, 'boot': True, 'device_name': 'device_name_value', 'disk_encryption_key': {'kms_key_name': 'kms_key_name_value', 'kms_key_service_account': 'kms_key_service_account_value', 'raw_key': 'raw_key_value', 'rsa_encrypted_key': 'rsa_encrypted_key_value', 'sha256': 'sha256_value'}, 'disk_size_gb': 1261, 'force_attach': True, 'guest_os_features': [{'type_': 'type__value'}], 'index': 536, 'initialize_params': {'architecture': 'architecture_value', 'description': 'description_value', 'disk_name': 'disk_name_value', 'disk_size_gb': 1261, 'disk_type': 'disk_type_value', 'labels': {}, 'licenses': ['licenses_value1', 'licenses_value2'], 'on_update_action': 'on_update_action_value', 'provisioned_iops': 1740, 'provisioned_throughput': 2411, 'replica_zones': ['replica_zones_value1', 'replica_zones_value2'], 'resource_manager_tags': {}, 'resource_policies': ['resource_policies_value1', 'resource_policies_value2'], 'source_image': 'source_image_value', 'source_image_encryption_key': {}, 'source_snapshot': 'source_snapshot_value', 'source_snapshot_encryption_key': {}}, 'interface': 'interface_value', 'kind': 'kind_value', 'licenses': ['licenses_value1', 'licenses_value2'], 'mode': 'mode_value', 'saved_state': 'saved_state_value', 'shielded_instance_initial_state': {'dbs': [{'content': 'content_value', 'file_type': 'file_type_value'}], 'dbxs': {}, 'keks': {}, 'pk': {}}, 'source': 'source_value', 'type_': 'type__value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.attach_disk_unary(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.Operation) + + +def test_attach_disk_unary_rest_required_fields(request_type=compute.AttachDiskInstanceRequest): + transport_class = transports.InstancesRestTransport + + request_init = {} + request_init["instance"] = "" + request_init["project"] = "" + request_init["zone"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).attach_disk._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["instance"] = 'instance_value' + jsonified_request["project"] = 'project_value' + jsonified_request["zone"] = 'zone_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).attach_disk._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("force_attach", "request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "instance" in jsonified_request + assert jsonified_request["instance"] == 'instance_value' + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "zone" in jsonified_request + assert jsonified_request["zone"] == 'zone_value' + + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.attach_disk_unary(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_attach_disk_unary_rest_unset_required_fields(): + transport = transports.InstancesRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.attach_disk._get_unset_required_fields({}) + assert set(unset_fields) == (set(("forceAttach", "requestId", )) & set(("attachedDiskResource", "instance", "project", "zone", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_attach_disk_unary_rest_interceptors(null_interceptor): + transport = transports.InstancesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.InstancesRestInterceptor(), + ) + client = InstancesClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.InstancesRestInterceptor, "post_attach_disk") as post, \ + mock.patch.object(transports.InstancesRestInterceptor, "pre_attach_disk") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.AttachDiskInstanceRequest.pb(compute.AttachDiskInstanceRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.AttachDiskInstanceRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.attach_disk_unary(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_attach_disk_unary_rest_bad_request(transport: str = 'rest', request_type=compute.AttachDiskInstanceRequest): + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'zone': 'sample2', 'instance': 'sample3'} + request_init["attached_disk_resource"] = {'architecture': 'architecture_value', 'auto_delete': True, 'boot': True, 'device_name': 'device_name_value', 'disk_encryption_key': {'kms_key_name': 'kms_key_name_value', 'kms_key_service_account': 'kms_key_service_account_value', 'raw_key': 'raw_key_value', 'rsa_encrypted_key': 'rsa_encrypted_key_value', 'sha256': 'sha256_value'}, 'disk_size_gb': 1261, 'force_attach': True, 'guest_os_features': [{'type_': 'type__value'}], 'index': 536, 'initialize_params': {'architecture': 'architecture_value', 'description': 'description_value', 'disk_name': 'disk_name_value', 'disk_size_gb': 1261, 'disk_type': 'disk_type_value', 'labels': {}, 'licenses': ['licenses_value1', 'licenses_value2'], 'on_update_action': 'on_update_action_value', 'provisioned_iops': 1740, 'provisioned_throughput': 2411, 'replica_zones': ['replica_zones_value1', 'replica_zones_value2'], 'resource_manager_tags': {}, 'resource_policies': ['resource_policies_value1', 'resource_policies_value2'], 'source_image': 'source_image_value', 'source_image_encryption_key': {}, 'source_snapshot': 'source_snapshot_value', 'source_snapshot_encryption_key': {}}, 'interface': 'interface_value', 'kind': 'kind_value', 'licenses': ['licenses_value1', 'licenses_value2'], 'mode': 'mode_value', 'saved_state': 'saved_state_value', 'shielded_instance_initial_state': {'dbs': [{'content': 'content_value', 'file_type': 'file_type_value'}], 'dbxs': {}, 'keks': {}, 'pk': {}}, 'source': 'source_value', 'type_': 'type__value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.attach_disk_unary(request) + + +def test_attach_disk_unary_rest_flattened(): + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'zone': 'sample2', 'instance': 'sample3'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + zone='zone_value', + instance='instance_value', + attached_disk_resource=compute.AttachedDisk(architecture='architecture_value'), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.attach_disk_unary(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/zones/{zone}/instances/{instance}/attachDisk" % client.transport._host, args[1]) + + +def test_attach_disk_unary_rest_flattened_error(transport: str = 'rest'): + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.attach_disk_unary( + compute.AttachDiskInstanceRequest(), + project='project_value', + zone='zone_value', + instance='instance_value', + attached_disk_resource=compute.AttachedDisk(architecture='architecture_value'), + ) + + +def test_attach_disk_unary_rest_error(): + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.BulkInsertInstanceRequest, + dict, +]) +def test_bulk_insert_rest(request_type): + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'zone': 'sample2'} + request_init["bulk_insert_instance_resource_resource"] = {'count': 553, 'instance_properties': {'advanced_machine_features': {'enable_nested_virtualization': True, 'enable_uefi_networking': True, 'threads_per_core': 1689, 'visible_core_count': 1918}, 'can_ip_forward': True, 'confidential_instance_config': {'enable_confidential_compute': True}, 'description': 'description_value', 'disks': [{'architecture': 'architecture_value', 'auto_delete': True, 'boot': True, 'device_name': 'device_name_value', 'disk_encryption_key': {'kms_key_name': 'kms_key_name_value', 'kms_key_service_account': 'kms_key_service_account_value', 'raw_key': 'raw_key_value', 'rsa_encrypted_key': 'rsa_encrypted_key_value', 'sha256': 'sha256_value'}, 'disk_size_gb': 1261, 'force_attach': True, 'guest_os_features': [{'type_': 'type__value'}], 'index': 536, 'initialize_params': {'architecture': 'architecture_value', 'description': 'description_value', 'disk_name': 'disk_name_value', 'disk_size_gb': 1261, 'disk_type': 'disk_type_value', 'labels': {}, 'licenses': ['licenses_value1', 'licenses_value2'], 'on_update_action': 'on_update_action_value', 'provisioned_iops': 1740, 'provisioned_throughput': 2411, 'replica_zones': ['replica_zones_value1', 'replica_zones_value2'], 'resource_manager_tags': {}, 'resource_policies': ['resource_policies_value1', 'resource_policies_value2'], 'source_image': 'source_image_value', 'source_image_encryption_key': {}, 'source_snapshot': 'source_snapshot_value', 'source_snapshot_encryption_key': {}}, 'interface': 'interface_value', 'kind': 'kind_value', 'licenses': ['licenses_value1', 'licenses_value2'], 'mode': 'mode_value', 'saved_state': 'saved_state_value', 'shielded_instance_initial_state': {'dbs': [{'content': 'content_value', 'file_type': 'file_type_value'}], 'dbxs': {}, 'keks': {}, 'pk': {}}, 'source': 'source_value', 'type_': 'type__value'}], 'guest_accelerators': [{'accelerator_count': 1805, 'accelerator_type': 'accelerator_type_value'}], 'key_revocation_action_type': 'key_revocation_action_type_value', 'labels': {}, 'machine_type': 'machine_type_value', 'metadata': {'fingerprint': 'fingerprint_value', 'items': [{'key': 'key_value', 'value': 'value_value'}], 'kind': 'kind_value'}, 'min_cpu_platform': 'min_cpu_platform_value', 'network_interfaces': [{'access_configs': [{'external_ipv6': 'external_ipv6_value', 'external_ipv6_prefix_length': 2837, 'kind': 'kind_value', 'name': 'name_value', 'nat_i_p': 'nat_i_p_value', 'network_tier': 'network_tier_value', 'public_ptr_domain_name': 'public_ptr_domain_name_value', 'set_public_ptr': True, 'type_': 'type__value'}], 'alias_ip_ranges': [{'ip_cidr_range': 'ip_cidr_range_value', 'subnetwork_range_name': 'subnetwork_range_name_value'}], 'fingerprint': 'fingerprint_value', 'internal_ipv6_prefix_length': 2831, 'ipv6_access_configs': {}, 'ipv6_access_type': 'ipv6_access_type_value', 'ipv6_address': 'ipv6_address_value', 'kind': 'kind_value', 'name': 'name_value', 'network': 'network_value', 'network_attachment': 'network_attachment_value', 'network_i_p': 'network_i_p_value', 'nic_type': 'nic_type_value', 'queue_count': 1197, 'stack_type': 'stack_type_value', 'subnetwork': 'subnetwork_value'}], 'network_performance_config': {'total_egress_bandwidth_tier': 'total_egress_bandwidth_tier_value'}, 'private_ipv6_google_access': 'private_ipv6_google_access_value', 'reservation_affinity': {'consume_reservation_type': 'consume_reservation_type_value', 'key': 'key_value', 'values': ['values_value1', 'values_value2']}, 'resource_manager_tags': {}, 'resource_policies': ['resource_policies_value1', 'resource_policies_value2'], 'scheduling': {'automatic_restart': True, 'instance_termination_action': 'instance_termination_action_value', 'local_ssd_recovery_timeout': {'nanos': 543, 'seconds': 751}, 'location_hint': 'location_hint_value', 'min_node_cpus': 1379, 'node_affinities': [{'key': 'key_value', 'operator': 'operator_value', 'values': ['values_value1', 'values_value2']}], 'on_host_maintenance': 'on_host_maintenance_value', 'preemptible': True, 'provisioning_model': 'provisioning_model_value'}, 'service_accounts': [{'email': 'email_value', 'scopes': ['scopes_value1', 'scopes_value2']}], 'shielded_instance_config': {'enable_integrity_monitoring': True, 'enable_secure_boot': True, 'enable_vtpm': True}, 'tags': {'fingerprint': 'fingerprint_value', 'items': ['items_value1', 'items_value2']}}, 'location_policy': {'locations': {}, 'target_shape': 'target_shape_value'}, 'min_count': 972, 'name_pattern': 'name_pattern_value', 'per_instance_properties': {}, 'source_instance_template': 'source_instance_template_value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.bulk_insert(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, extended_operation.ExtendedOperation) + assert response.client_operation_id == 'client_operation_id_value' + assert response.creation_timestamp == 'creation_timestamp_value' + assert response.description == 'description_value' + assert response.end_time == 'end_time_value' + assert response.http_error_message == 'http_error_message_value' + assert response.http_error_status_code == 2374 + assert response.id == 205 + assert response.insert_time == 'insert_time_value' + assert response.kind == 'kind_value' + assert response.name == 'name_value' + assert response.operation_group_id == 'operation_group_id_value' + assert response.operation_type == 'operation_type_value' + assert response.progress == 885 + assert response.region == 'region_value' + assert response.self_link == 'self_link_value' + assert response.start_time == 'start_time_value' + assert response.status == compute.Operation.Status.DONE + assert response.status_message == 'status_message_value' + assert response.target_id == 947 + assert response.target_link == 'target_link_value' + assert response.user == 'user_value' + assert response.zone == 'zone_value' + + +def test_bulk_insert_rest_required_fields(request_type=compute.BulkInsertInstanceRequest): + transport_class = transports.InstancesRestTransport + + request_init = {} + request_init["project"] = "" + request_init["zone"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).bulk_insert._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + jsonified_request["zone"] = 'zone_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).bulk_insert._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "zone" in jsonified_request + assert jsonified_request["zone"] == 'zone_value' + + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.bulk_insert(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_bulk_insert_rest_unset_required_fields(): + transport = transports.InstancesRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.bulk_insert._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("bulkInsertInstanceResourceResource", "project", "zone", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_bulk_insert_rest_interceptors(null_interceptor): + transport = transports.InstancesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.InstancesRestInterceptor(), + ) + client = InstancesClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.InstancesRestInterceptor, "post_bulk_insert") as post, \ + mock.patch.object(transports.InstancesRestInterceptor, "pre_bulk_insert") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.BulkInsertInstanceRequest.pb(compute.BulkInsertInstanceRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.BulkInsertInstanceRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.bulk_insert(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_bulk_insert_rest_bad_request(transport: str = 'rest', request_type=compute.BulkInsertInstanceRequest): + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'zone': 'sample2'} + request_init["bulk_insert_instance_resource_resource"] = {'count': 553, 'instance_properties': {'advanced_machine_features': {'enable_nested_virtualization': True, 'enable_uefi_networking': True, 'threads_per_core': 1689, 'visible_core_count': 1918}, 'can_ip_forward': True, 'confidential_instance_config': {'enable_confidential_compute': True}, 'description': 'description_value', 'disks': [{'architecture': 'architecture_value', 'auto_delete': True, 'boot': True, 'device_name': 'device_name_value', 'disk_encryption_key': {'kms_key_name': 'kms_key_name_value', 'kms_key_service_account': 'kms_key_service_account_value', 'raw_key': 'raw_key_value', 'rsa_encrypted_key': 'rsa_encrypted_key_value', 'sha256': 'sha256_value'}, 'disk_size_gb': 1261, 'force_attach': True, 'guest_os_features': [{'type_': 'type__value'}], 'index': 536, 'initialize_params': {'architecture': 'architecture_value', 'description': 'description_value', 'disk_name': 'disk_name_value', 'disk_size_gb': 1261, 'disk_type': 'disk_type_value', 'labels': {}, 'licenses': ['licenses_value1', 'licenses_value2'], 'on_update_action': 'on_update_action_value', 'provisioned_iops': 1740, 'provisioned_throughput': 2411, 'replica_zones': ['replica_zones_value1', 'replica_zones_value2'], 'resource_manager_tags': {}, 'resource_policies': ['resource_policies_value1', 'resource_policies_value2'], 'source_image': 'source_image_value', 'source_image_encryption_key': {}, 'source_snapshot': 'source_snapshot_value', 'source_snapshot_encryption_key': {}}, 'interface': 'interface_value', 'kind': 'kind_value', 'licenses': ['licenses_value1', 'licenses_value2'], 'mode': 'mode_value', 'saved_state': 'saved_state_value', 'shielded_instance_initial_state': {'dbs': [{'content': 'content_value', 'file_type': 'file_type_value'}], 'dbxs': {}, 'keks': {}, 'pk': {}}, 'source': 'source_value', 'type_': 'type__value'}], 'guest_accelerators': [{'accelerator_count': 1805, 'accelerator_type': 'accelerator_type_value'}], 'key_revocation_action_type': 'key_revocation_action_type_value', 'labels': {}, 'machine_type': 'machine_type_value', 'metadata': {'fingerprint': 'fingerprint_value', 'items': [{'key': 'key_value', 'value': 'value_value'}], 'kind': 'kind_value'}, 'min_cpu_platform': 'min_cpu_platform_value', 'network_interfaces': [{'access_configs': [{'external_ipv6': 'external_ipv6_value', 'external_ipv6_prefix_length': 2837, 'kind': 'kind_value', 'name': 'name_value', 'nat_i_p': 'nat_i_p_value', 'network_tier': 'network_tier_value', 'public_ptr_domain_name': 'public_ptr_domain_name_value', 'set_public_ptr': True, 'type_': 'type__value'}], 'alias_ip_ranges': [{'ip_cidr_range': 'ip_cidr_range_value', 'subnetwork_range_name': 'subnetwork_range_name_value'}], 'fingerprint': 'fingerprint_value', 'internal_ipv6_prefix_length': 2831, 'ipv6_access_configs': {}, 'ipv6_access_type': 'ipv6_access_type_value', 'ipv6_address': 'ipv6_address_value', 'kind': 'kind_value', 'name': 'name_value', 'network': 'network_value', 'network_attachment': 'network_attachment_value', 'network_i_p': 'network_i_p_value', 'nic_type': 'nic_type_value', 'queue_count': 1197, 'stack_type': 'stack_type_value', 'subnetwork': 'subnetwork_value'}], 'network_performance_config': {'total_egress_bandwidth_tier': 'total_egress_bandwidth_tier_value'}, 'private_ipv6_google_access': 'private_ipv6_google_access_value', 'reservation_affinity': {'consume_reservation_type': 'consume_reservation_type_value', 'key': 'key_value', 'values': ['values_value1', 'values_value2']}, 'resource_manager_tags': {}, 'resource_policies': ['resource_policies_value1', 'resource_policies_value2'], 'scheduling': {'automatic_restart': True, 'instance_termination_action': 'instance_termination_action_value', 'local_ssd_recovery_timeout': {'nanos': 543, 'seconds': 751}, 'location_hint': 'location_hint_value', 'min_node_cpus': 1379, 'node_affinities': [{'key': 'key_value', 'operator': 'operator_value', 'values': ['values_value1', 'values_value2']}], 'on_host_maintenance': 'on_host_maintenance_value', 'preemptible': True, 'provisioning_model': 'provisioning_model_value'}, 'service_accounts': [{'email': 'email_value', 'scopes': ['scopes_value1', 'scopes_value2']}], 'shielded_instance_config': {'enable_integrity_monitoring': True, 'enable_secure_boot': True, 'enable_vtpm': True}, 'tags': {'fingerprint': 'fingerprint_value', 'items': ['items_value1', 'items_value2']}}, 'location_policy': {'locations': {}, 'target_shape': 'target_shape_value'}, 'min_count': 972, 'name_pattern': 'name_pattern_value', 'per_instance_properties': {}, 'source_instance_template': 'source_instance_template_value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.bulk_insert(request) + + +def test_bulk_insert_rest_flattened(): + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'zone': 'sample2'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + zone='zone_value', + bulk_insert_instance_resource_resource=compute.BulkInsertInstanceResource(count=553), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.bulk_insert(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/zones/{zone}/instances/bulkInsert" % client.transport._host, args[1]) + + +def test_bulk_insert_rest_flattened_error(transport: str = 'rest'): + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.bulk_insert( + compute.BulkInsertInstanceRequest(), + project='project_value', + zone='zone_value', + bulk_insert_instance_resource_resource=compute.BulkInsertInstanceResource(count=553), + ) + + +def test_bulk_insert_rest_error(): + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.BulkInsertInstanceRequest, + dict, +]) +def test_bulk_insert_unary_rest(request_type): + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'zone': 'sample2'} + request_init["bulk_insert_instance_resource_resource"] = {'count': 553, 'instance_properties': {'advanced_machine_features': {'enable_nested_virtualization': True, 'enable_uefi_networking': True, 'threads_per_core': 1689, 'visible_core_count': 1918}, 'can_ip_forward': True, 'confidential_instance_config': {'enable_confidential_compute': True}, 'description': 'description_value', 'disks': [{'architecture': 'architecture_value', 'auto_delete': True, 'boot': True, 'device_name': 'device_name_value', 'disk_encryption_key': {'kms_key_name': 'kms_key_name_value', 'kms_key_service_account': 'kms_key_service_account_value', 'raw_key': 'raw_key_value', 'rsa_encrypted_key': 'rsa_encrypted_key_value', 'sha256': 'sha256_value'}, 'disk_size_gb': 1261, 'force_attach': True, 'guest_os_features': [{'type_': 'type__value'}], 'index': 536, 'initialize_params': {'architecture': 'architecture_value', 'description': 'description_value', 'disk_name': 'disk_name_value', 'disk_size_gb': 1261, 'disk_type': 'disk_type_value', 'labels': {}, 'licenses': ['licenses_value1', 'licenses_value2'], 'on_update_action': 'on_update_action_value', 'provisioned_iops': 1740, 'provisioned_throughput': 2411, 'replica_zones': ['replica_zones_value1', 'replica_zones_value2'], 'resource_manager_tags': {}, 'resource_policies': ['resource_policies_value1', 'resource_policies_value2'], 'source_image': 'source_image_value', 'source_image_encryption_key': {}, 'source_snapshot': 'source_snapshot_value', 'source_snapshot_encryption_key': {}}, 'interface': 'interface_value', 'kind': 'kind_value', 'licenses': ['licenses_value1', 'licenses_value2'], 'mode': 'mode_value', 'saved_state': 'saved_state_value', 'shielded_instance_initial_state': {'dbs': [{'content': 'content_value', 'file_type': 'file_type_value'}], 'dbxs': {}, 'keks': {}, 'pk': {}}, 'source': 'source_value', 'type_': 'type__value'}], 'guest_accelerators': [{'accelerator_count': 1805, 'accelerator_type': 'accelerator_type_value'}], 'key_revocation_action_type': 'key_revocation_action_type_value', 'labels': {}, 'machine_type': 'machine_type_value', 'metadata': {'fingerprint': 'fingerprint_value', 'items': [{'key': 'key_value', 'value': 'value_value'}], 'kind': 'kind_value'}, 'min_cpu_platform': 'min_cpu_platform_value', 'network_interfaces': [{'access_configs': [{'external_ipv6': 'external_ipv6_value', 'external_ipv6_prefix_length': 2837, 'kind': 'kind_value', 'name': 'name_value', 'nat_i_p': 'nat_i_p_value', 'network_tier': 'network_tier_value', 'public_ptr_domain_name': 'public_ptr_domain_name_value', 'set_public_ptr': True, 'type_': 'type__value'}], 'alias_ip_ranges': [{'ip_cidr_range': 'ip_cidr_range_value', 'subnetwork_range_name': 'subnetwork_range_name_value'}], 'fingerprint': 'fingerprint_value', 'internal_ipv6_prefix_length': 2831, 'ipv6_access_configs': {}, 'ipv6_access_type': 'ipv6_access_type_value', 'ipv6_address': 'ipv6_address_value', 'kind': 'kind_value', 'name': 'name_value', 'network': 'network_value', 'network_attachment': 'network_attachment_value', 'network_i_p': 'network_i_p_value', 'nic_type': 'nic_type_value', 'queue_count': 1197, 'stack_type': 'stack_type_value', 'subnetwork': 'subnetwork_value'}], 'network_performance_config': {'total_egress_bandwidth_tier': 'total_egress_bandwidth_tier_value'}, 'private_ipv6_google_access': 'private_ipv6_google_access_value', 'reservation_affinity': {'consume_reservation_type': 'consume_reservation_type_value', 'key': 'key_value', 'values': ['values_value1', 'values_value2']}, 'resource_manager_tags': {}, 'resource_policies': ['resource_policies_value1', 'resource_policies_value2'], 'scheduling': {'automatic_restart': True, 'instance_termination_action': 'instance_termination_action_value', 'local_ssd_recovery_timeout': {'nanos': 543, 'seconds': 751}, 'location_hint': 'location_hint_value', 'min_node_cpus': 1379, 'node_affinities': [{'key': 'key_value', 'operator': 'operator_value', 'values': ['values_value1', 'values_value2']}], 'on_host_maintenance': 'on_host_maintenance_value', 'preemptible': True, 'provisioning_model': 'provisioning_model_value'}, 'service_accounts': [{'email': 'email_value', 'scopes': ['scopes_value1', 'scopes_value2']}], 'shielded_instance_config': {'enable_integrity_monitoring': True, 'enable_secure_boot': True, 'enable_vtpm': True}, 'tags': {'fingerprint': 'fingerprint_value', 'items': ['items_value1', 'items_value2']}}, 'location_policy': {'locations': {}, 'target_shape': 'target_shape_value'}, 'min_count': 972, 'name_pattern': 'name_pattern_value', 'per_instance_properties': {}, 'source_instance_template': 'source_instance_template_value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.bulk_insert_unary(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.Operation) + + +def test_bulk_insert_unary_rest_required_fields(request_type=compute.BulkInsertInstanceRequest): + transport_class = transports.InstancesRestTransport + + request_init = {} + request_init["project"] = "" + request_init["zone"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).bulk_insert._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + jsonified_request["zone"] = 'zone_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).bulk_insert._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "zone" in jsonified_request + assert jsonified_request["zone"] == 'zone_value' + + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.bulk_insert_unary(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_bulk_insert_unary_rest_unset_required_fields(): + transport = transports.InstancesRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.bulk_insert._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("bulkInsertInstanceResourceResource", "project", "zone", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_bulk_insert_unary_rest_interceptors(null_interceptor): + transport = transports.InstancesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.InstancesRestInterceptor(), + ) + client = InstancesClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.InstancesRestInterceptor, "post_bulk_insert") as post, \ + mock.patch.object(transports.InstancesRestInterceptor, "pre_bulk_insert") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.BulkInsertInstanceRequest.pb(compute.BulkInsertInstanceRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.BulkInsertInstanceRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.bulk_insert_unary(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_bulk_insert_unary_rest_bad_request(transport: str = 'rest', request_type=compute.BulkInsertInstanceRequest): + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'zone': 'sample2'} + request_init["bulk_insert_instance_resource_resource"] = {'count': 553, 'instance_properties': {'advanced_machine_features': {'enable_nested_virtualization': True, 'enable_uefi_networking': True, 'threads_per_core': 1689, 'visible_core_count': 1918}, 'can_ip_forward': True, 'confidential_instance_config': {'enable_confidential_compute': True}, 'description': 'description_value', 'disks': [{'architecture': 'architecture_value', 'auto_delete': True, 'boot': True, 'device_name': 'device_name_value', 'disk_encryption_key': {'kms_key_name': 'kms_key_name_value', 'kms_key_service_account': 'kms_key_service_account_value', 'raw_key': 'raw_key_value', 'rsa_encrypted_key': 'rsa_encrypted_key_value', 'sha256': 'sha256_value'}, 'disk_size_gb': 1261, 'force_attach': True, 'guest_os_features': [{'type_': 'type__value'}], 'index': 536, 'initialize_params': {'architecture': 'architecture_value', 'description': 'description_value', 'disk_name': 'disk_name_value', 'disk_size_gb': 1261, 'disk_type': 'disk_type_value', 'labels': {}, 'licenses': ['licenses_value1', 'licenses_value2'], 'on_update_action': 'on_update_action_value', 'provisioned_iops': 1740, 'provisioned_throughput': 2411, 'replica_zones': ['replica_zones_value1', 'replica_zones_value2'], 'resource_manager_tags': {}, 'resource_policies': ['resource_policies_value1', 'resource_policies_value2'], 'source_image': 'source_image_value', 'source_image_encryption_key': {}, 'source_snapshot': 'source_snapshot_value', 'source_snapshot_encryption_key': {}}, 'interface': 'interface_value', 'kind': 'kind_value', 'licenses': ['licenses_value1', 'licenses_value2'], 'mode': 'mode_value', 'saved_state': 'saved_state_value', 'shielded_instance_initial_state': {'dbs': [{'content': 'content_value', 'file_type': 'file_type_value'}], 'dbxs': {}, 'keks': {}, 'pk': {}}, 'source': 'source_value', 'type_': 'type__value'}], 'guest_accelerators': [{'accelerator_count': 1805, 'accelerator_type': 'accelerator_type_value'}], 'key_revocation_action_type': 'key_revocation_action_type_value', 'labels': {}, 'machine_type': 'machine_type_value', 'metadata': {'fingerprint': 'fingerprint_value', 'items': [{'key': 'key_value', 'value': 'value_value'}], 'kind': 'kind_value'}, 'min_cpu_platform': 'min_cpu_platform_value', 'network_interfaces': [{'access_configs': [{'external_ipv6': 'external_ipv6_value', 'external_ipv6_prefix_length': 2837, 'kind': 'kind_value', 'name': 'name_value', 'nat_i_p': 'nat_i_p_value', 'network_tier': 'network_tier_value', 'public_ptr_domain_name': 'public_ptr_domain_name_value', 'set_public_ptr': True, 'type_': 'type__value'}], 'alias_ip_ranges': [{'ip_cidr_range': 'ip_cidr_range_value', 'subnetwork_range_name': 'subnetwork_range_name_value'}], 'fingerprint': 'fingerprint_value', 'internal_ipv6_prefix_length': 2831, 'ipv6_access_configs': {}, 'ipv6_access_type': 'ipv6_access_type_value', 'ipv6_address': 'ipv6_address_value', 'kind': 'kind_value', 'name': 'name_value', 'network': 'network_value', 'network_attachment': 'network_attachment_value', 'network_i_p': 'network_i_p_value', 'nic_type': 'nic_type_value', 'queue_count': 1197, 'stack_type': 'stack_type_value', 'subnetwork': 'subnetwork_value'}], 'network_performance_config': {'total_egress_bandwidth_tier': 'total_egress_bandwidth_tier_value'}, 'private_ipv6_google_access': 'private_ipv6_google_access_value', 'reservation_affinity': {'consume_reservation_type': 'consume_reservation_type_value', 'key': 'key_value', 'values': ['values_value1', 'values_value2']}, 'resource_manager_tags': {}, 'resource_policies': ['resource_policies_value1', 'resource_policies_value2'], 'scheduling': {'automatic_restart': True, 'instance_termination_action': 'instance_termination_action_value', 'local_ssd_recovery_timeout': {'nanos': 543, 'seconds': 751}, 'location_hint': 'location_hint_value', 'min_node_cpus': 1379, 'node_affinities': [{'key': 'key_value', 'operator': 'operator_value', 'values': ['values_value1', 'values_value2']}], 'on_host_maintenance': 'on_host_maintenance_value', 'preemptible': True, 'provisioning_model': 'provisioning_model_value'}, 'service_accounts': [{'email': 'email_value', 'scopes': ['scopes_value1', 'scopes_value2']}], 'shielded_instance_config': {'enable_integrity_monitoring': True, 'enable_secure_boot': True, 'enable_vtpm': True}, 'tags': {'fingerprint': 'fingerprint_value', 'items': ['items_value1', 'items_value2']}}, 'location_policy': {'locations': {}, 'target_shape': 'target_shape_value'}, 'min_count': 972, 'name_pattern': 'name_pattern_value', 'per_instance_properties': {}, 'source_instance_template': 'source_instance_template_value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.bulk_insert_unary(request) + + +def test_bulk_insert_unary_rest_flattened(): + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'zone': 'sample2'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + zone='zone_value', + bulk_insert_instance_resource_resource=compute.BulkInsertInstanceResource(count=553), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.bulk_insert_unary(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/zones/{zone}/instances/bulkInsert" % client.transport._host, args[1]) + + +def test_bulk_insert_unary_rest_flattened_error(transport: str = 'rest'): + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.bulk_insert_unary( + compute.BulkInsertInstanceRequest(), + project='project_value', + zone='zone_value', + bulk_insert_instance_resource_resource=compute.BulkInsertInstanceResource(count=553), + ) + + +def test_bulk_insert_unary_rest_error(): + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.DeleteInstanceRequest, + dict, +]) +def test_delete_rest(request_type): + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'zone': 'sample2', 'instance': 'sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.delete(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, extended_operation.ExtendedOperation) + assert response.client_operation_id == 'client_operation_id_value' + assert response.creation_timestamp == 'creation_timestamp_value' + assert response.description == 'description_value' + assert response.end_time == 'end_time_value' + assert response.http_error_message == 'http_error_message_value' + assert response.http_error_status_code == 2374 + assert response.id == 205 + assert response.insert_time == 'insert_time_value' + assert response.kind == 'kind_value' + assert response.name == 'name_value' + assert response.operation_group_id == 'operation_group_id_value' + assert response.operation_type == 'operation_type_value' + assert response.progress == 885 + assert response.region == 'region_value' + assert response.self_link == 'self_link_value' + assert response.start_time == 'start_time_value' + assert response.status == compute.Operation.Status.DONE + assert response.status_message == 'status_message_value' + assert response.target_id == 947 + assert response.target_link == 'target_link_value' + assert response.user == 'user_value' + assert response.zone == 'zone_value' + + +def test_delete_rest_required_fields(request_type=compute.DeleteInstanceRequest): + transport_class = transports.InstancesRestTransport + + request_init = {} + request_init["instance"] = "" + request_init["project"] = "" + request_init["zone"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["instance"] = 'instance_value' + jsonified_request["project"] = 'project_value' + jsonified_request["zone"] = 'zone_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "instance" in jsonified_request + assert jsonified_request["instance"] == 'instance_value' + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "zone" in jsonified_request + assert jsonified_request["zone"] == 'zone_value' + + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "delete", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.delete(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_delete_rest_unset_required_fields(): + transport = transports.InstancesRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.delete._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("instance", "project", "zone", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_rest_interceptors(null_interceptor): + transport = transports.InstancesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.InstancesRestInterceptor(), + ) + client = InstancesClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.InstancesRestInterceptor, "post_delete") as post, \ + mock.patch.object(transports.InstancesRestInterceptor, "pre_delete") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.DeleteInstanceRequest.pb(compute.DeleteInstanceRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.DeleteInstanceRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.delete(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_delete_rest_bad_request(transport: str = 'rest', request_type=compute.DeleteInstanceRequest): + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'zone': 'sample2', 'instance': 'sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.delete(request) + + +def test_delete_rest_flattened(): + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'zone': 'sample2', 'instance': 'sample3'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + zone='zone_value', + instance='instance_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.delete(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/zones/{zone}/instances/{instance}" % client.transport._host, args[1]) + + +def test_delete_rest_flattened_error(transport: str = 'rest'): + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete( + compute.DeleteInstanceRequest(), + project='project_value', + zone='zone_value', + instance='instance_value', + ) + + +def test_delete_rest_error(): + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.DeleteInstanceRequest, + dict, +]) +def test_delete_unary_rest(request_type): + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'zone': 'sample2', 'instance': 'sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.delete_unary(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.Operation) + + +def test_delete_unary_rest_required_fields(request_type=compute.DeleteInstanceRequest): + transport_class = transports.InstancesRestTransport + + request_init = {} + request_init["instance"] = "" + request_init["project"] = "" + request_init["zone"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["instance"] = 'instance_value' + jsonified_request["project"] = 'project_value' + jsonified_request["zone"] = 'zone_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "instance" in jsonified_request + assert jsonified_request["instance"] == 'instance_value' + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "zone" in jsonified_request + assert jsonified_request["zone"] == 'zone_value' + + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "delete", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.delete_unary(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_delete_unary_rest_unset_required_fields(): + transport = transports.InstancesRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.delete._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("instance", "project", "zone", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_unary_rest_interceptors(null_interceptor): + transport = transports.InstancesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.InstancesRestInterceptor(), + ) + client = InstancesClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.InstancesRestInterceptor, "post_delete") as post, \ + mock.patch.object(transports.InstancesRestInterceptor, "pre_delete") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.DeleteInstanceRequest.pb(compute.DeleteInstanceRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.DeleteInstanceRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.delete_unary(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_delete_unary_rest_bad_request(transport: str = 'rest', request_type=compute.DeleteInstanceRequest): + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'zone': 'sample2', 'instance': 'sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.delete_unary(request) + + +def test_delete_unary_rest_flattened(): + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'zone': 'sample2', 'instance': 'sample3'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + zone='zone_value', + instance='instance_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.delete_unary(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/zones/{zone}/instances/{instance}" % client.transport._host, args[1]) + + +def test_delete_unary_rest_flattened_error(transport: str = 'rest'): + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_unary( + compute.DeleteInstanceRequest(), + project='project_value', + zone='zone_value', + instance='instance_value', + ) + + +def test_delete_unary_rest_error(): + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.DeleteAccessConfigInstanceRequest, + dict, +]) +def test_delete_access_config_rest(request_type): + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'zone': 'sample2', 'instance': 'sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.delete_access_config(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, extended_operation.ExtendedOperation) + assert response.client_operation_id == 'client_operation_id_value' + assert response.creation_timestamp == 'creation_timestamp_value' + assert response.description == 'description_value' + assert response.end_time == 'end_time_value' + assert response.http_error_message == 'http_error_message_value' + assert response.http_error_status_code == 2374 + assert response.id == 205 + assert response.insert_time == 'insert_time_value' + assert response.kind == 'kind_value' + assert response.name == 'name_value' + assert response.operation_group_id == 'operation_group_id_value' + assert response.operation_type == 'operation_type_value' + assert response.progress == 885 + assert response.region == 'region_value' + assert response.self_link == 'self_link_value' + assert response.start_time == 'start_time_value' + assert response.status == compute.Operation.Status.DONE + assert response.status_message == 'status_message_value' + assert response.target_id == 947 + assert response.target_link == 'target_link_value' + assert response.user == 'user_value' + assert response.zone == 'zone_value' + + +def test_delete_access_config_rest_required_fields(request_type=compute.DeleteAccessConfigInstanceRequest): + transport_class = transports.InstancesRestTransport + + request_init = {} + request_init["access_config"] = "" + request_init["instance"] = "" + request_init["network_interface"] = "" + request_init["project"] = "" + request_init["zone"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + assert "accessConfig" not in jsonified_request + assert "networkInterface" not in jsonified_request + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete_access_config._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + assert "accessConfig" in jsonified_request + assert jsonified_request["accessConfig"] == request_init["access_config"] + assert "networkInterface" in jsonified_request + assert jsonified_request["networkInterface"] == request_init["network_interface"] + + jsonified_request["accessConfig"] = 'access_config_value' + jsonified_request["instance"] = 'instance_value' + jsonified_request["networkInterface"] = 'network_interface_value' + jsonified_request["project"] = 'project_value' + jsonified_request["zone"] = 'zone_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete_access_config._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("access_config", "network_interface", "request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "accessConfig" in jsonified_request + assert jsonified_request["accessConfig"] == 'access_config_value' + assert "instance" in jsonified_request + assert jsonified_request["instance"] == 'instance_value' + assert "networkInterface" in jsonified_request + assert jsonified_request["networkInterface"] == 'network_interface_value' + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "zone" in jsonified_request + assert jsonified_request["zone"] == 'zone_value' + + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.delete_access_config(request) + + expected_params = [ + ( + "accessConfig", + "", + ), + ( + "networkInterface", + "", + ), + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_delete_access_config_rest_unset_required_fields(): + transport = transports.InstancesRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.delete_access_config._get_unset_required_fields({}) + assert set(unset_fields) == (set(("accessConfig", "networkInterface", "requestId", )) & set(("accessConfig", "instance", "networkInterface", "project", "zone", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_access_config_rest_interceptors(null_interceptor): + transport = transports.InstancesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.InstancesRestInterceptor(), + ) + client = InstancesClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.InstancesRestInterceptor, "post_delete_access_config") as post, \ + mock.patch.object(transports.InstancesRestInterceptor, "pre_delete_access_config") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.DeleteAccessConfigInstanceRequest.pb(compute.DeleteAccessConfigInstanceRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.DeleteAccessConfigInstanceRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.delete_access_config(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_delete_access_config_rest_bad_request(transport: str = 'rest', request_type=compute.DeleteAccessConfigInstanceRequest): + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'zone': 'sample2', 'instance': 'sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.delete_access_config(request) + + +def test_delete_access_config_rest_flattened(): + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'zone': 'sample2', 'instance': 'sample3'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + zone='zone_value', + instance='instance_value', + access_config='access_config_value', + network_interface='network_interface_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.delete_access_config(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/zones/{zone}/instances/{instance}/deleteAccessConfig" % client.transport._host, args[1]) + + +def test_delete_access_config_rest_flattened_error(transport: str = 'rest'): + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_access_config( + compute.DeleteAccessConfigInstanceRequest(), + project='project_value', + zone='zone_value', + instance='instance_value', + access_config='access_config_value', + network_interface='network_interface_value', + ) + + +def test_delete_access_config_rest_error(): + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.DeleteAccessConfigInstanceRequest, + dict, +]) +def test_delete_access_config_unary_rest(request_type): + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'zone': 'sample2', 'instance': 'sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.delete_access_config_unary(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.Operation) + + +def test_delete_access_config_unary_rest_required_fields(request_type=compute.DeleteAccessConfigInstanceRequest): + transport_class = transports.InstancesRestTransport + + request_init = {} + request_init["access_config"] = "" + request_init["instance"] = "" + request_init["network_interface"] = "" + request_init["project"] = "" + request_init["zone"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + assert "accessConfig" not in jsonified_request + assert "networkInterface" not in jsonified_request + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete_access_config._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + assert "accessConfig" in jsonified_request + assert jsonified_request["accessConfig"] == request_init["access_config"] + assert "networkInterface" in jsonified_request + assert jsonified_request["networkInterface"] == request_init["network_interface"] + + jsonified_request["accessConfig"] = 'access_config_value' + jsonified_request["instance"] = 'instance_value' + jsonified_request["networkInterface"] = 'network_interface_value' + jsonified_request["project"] = 'project_value' + jsonified_request["zone"] = 'zone_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete_access_config._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("access_config", "network_interface", "request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "accessConfig" in jsonified_request + assert jsonified_request["accessConfig"] == 'access_config_value' + assert "instance" in jsonified_request + assert jsonified_request["instance"] == 'instance_value' + assert "networkInterface" in jsonified_request + assert jsonified_request["networkInterface"] == 'network_interface_value' + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "zone" in jsonified_request + assert jsonified_request["zone"] == 'zone_value' + + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.delete_access_config_unary(request) + + expected_params = [ + ( + "accessConfig", + "", + ), + ( + "networkInterface", + "", + ), + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_delete_access_config_unary_rest_unset_required_fields(): + transport = transports.InstancesRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.delete_access_config._get_unset_required_fields({}) + assert set(unset_fields) == (set(("accessConfig", "networkInterface", "requestId", )) & set(("accessConfig", "instance", "networkInterface", "project", "zone", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_access_config_unary_rest_interceptors(null_interceptor): + transport = transports.InstancesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.InstancesRestInterceptor(), + ) + client = InstancesClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.InstancesRestInterceptor, "post_delete_access_config") as post, \ + mock.patch.object(transports.InstancesRestInterceptor, "pre_delete_access_config") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.DeleteAccessConfigInstanceRequest.pb(compute.DeleteAccessConfigInstanceRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.DeleteAccessConfigInstanceRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.delete_access_config_unary(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_delete_access_config_unary_rest_bad_request(transport: str = 'rest', request_type=compute.DeleteAccessConfigInstanceRequest): + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'zone': 'sample2', 'instance': 'sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.delete_access_config_unary(request) + + +def test_delete_access_config_unary_rest_flattened(): + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'zone': 'sample2', 'instance': 'sample3'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + zone='zone_value', + instance='instance_value', + access_config='access_config_value', + network_interface='network_interface_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.delete_access_config_unary(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/zones/{zone}/instances/{instance}/deleteAccessConfig" % client.transport._host, args[1]) + + +def test_delete_access_config_unary_rest_flattened_error(transport: str = 'rest'): + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_access_config_unary( + compute.DeleteAccessConfigInstanceRequest(), + project='project_value', + zone='zone_value', + instance='instance_value', + access_config='access_config_value', + network_interface='network_interface_value', + ) + + +def test_delete_access_config_unary_rest_error(): + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.DetachDiskInstanceRequest, + dict, +]) +def test_detach_disk_rest(request_type): + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'zone': 'sample2', 'instance': 'sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.detach_disk(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, extended_operation.ExtendedOperation) + assert response.client_operation_id == 'client_operation_id_value' + assert response.creation_timestamp == 'creation_timestamp_value' + assert response.description == 'description_value' + assert response.end_time == 'end_time_value' + assert response.http_error_message == 'http_error_message_value' + assert response.http_error_status_code == 2374 + assert response.id == 205 + assert response.insert_time == 'insert_time_value' + assert response.kind == 'kind_value' + assert response.name == 'name_value' + assert response.operation_group_id == 'operation_group_id_value' + assert response.operation_type == 'operation_type_value' + assert response.progress == 885 + assert response.region == 'region_value' + assert response.self_link == 'self_link_value' + assert response.start_time == 'start_time_value' + assert response.status == compute.Operation.Status.DONE + assert response.status_message == 'status_message_value' + assert response.target_id == 947 + assert response.target_link == 'target_link_value' + assert response.user == 'user_value' + assert response.zone == 'zone_value' + + +def test_detach_disk_rest_required_fields(request_type=compute.DetachDiskInstanceRequest): + transport_class = transports.InstancesRestTransport + + request_init = {} + request_init["device_name"] = "" + request_init["instance"] = "" + request_init["project"] = "" + request_init["zone"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + assert "deviceName" not in jsonified_request + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).detach_disk._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + assert "deviceName" in jsonified_request + assert jsonified_request["deviceName"] == request_init["device_name"] + + jsonified_request["deviceName"] = 'device_name_value' + jsonified_request["instance"] = 'instance_value' + jsonified_request["project"] = 'project_value' + jsonified_request["zone"] = 'zone_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).detach_disk._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("device_name", "request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "deviceName" in jsonified_request + assert jsonified_request["deviceName"] == 'device_name_value' + assert "instance" in jsonified_request + assert jsonified_request["instance"] == 'instance_value' + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "zone" in jsonified_request + assert jsonified_request["zone"] == 'zone_value' + + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.detach_disk(request) + + expected_params = [ + ( + "deviceName", + "", + ), + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_detach_disk_rest_unset_required_fields(): + transport = transports.InstancesRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.detach_disk._get_unset_required_fields({}) + assert set(unset_fields) == (set(("deviceName", "requestId", )) & set(("deviceName", "instance", "project", "zone", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_detach_disk_rest_interceptors(null_interceptor): + transport = transports.InstancesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.InstancesRestInterceptor(), + ) + client = InstancesClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.InstancesRestInterceptor, "post_detach_disk") as post, \ + mock.patch.object(transports.InstancesRestInterceptor, "pre_detach_disk") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.DetachDiskInstanceRequest.pb(compute.DetachDiskInstanceRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.DetachDiskInstanceRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.detach_disk(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_detach_disk_rest_bad_request(transport: str = 'rest', request_type=compute.DetachDiskInstanceRequest): + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'zone': 'sample2', 'instance': 'sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.detach_disk(request) + + +def test_detach_disk_rest_flattened(): + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'zone': 'sample2', 'instance': 'sample3'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + zone='zone_value', + instance='instance_value', + device_name='device_name_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.detach_disk(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/zones/{zone}/instances/{instance}/detachDisk" % client.transport._host, args[1]) + + +def test_detach_disk_rest_flattened_error(transport: str = 'rest'): + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.detach_disk( + compute.DetachDiskInstanceRequest(), + project='project_value', + zone='zone_value', + instance='instance_value', + device_name='device_name_value', + ) + + +def test_detach_disk_rest_error(): + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.DetachDiskInstanceRequest, + dict, +]) +def test_detach_disk_unary_rest(request_type): + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'zone': 'sample2', 'instance': 'sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.detach_disk_unary(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.Operation) + + +def test_detach_disk_unary_rest_required_fields(request_type=compute.DetachDiskInstanceRequest): + transport_class = transports.InstancesRestTransport + + request_init = {} + request_init["device_name"] = "" + request_init["instance"] = "" + request_init["project"] = "" + request_init["zone"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + assert "deviceName" not in jsonified_request + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).detach_disk._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + assert "deviceName" in jsonified_request + assert jsonified_request["deviceName"] == request_init["device_name"] + + jsonified_request["deviceName"] = 'device_name_value' + jsonified_request["instance"] = 'instance_value' + jsonified_request["project"] = 'project_value' + jsonified_request["zone"] = 'zone_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).detach_disk._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("device_name", "request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "deviceName" in jsonified_request + assert jsonified_request["deviceName"] == 'device_name_value' + assert "instance" in jsonified_request + assert jsonified_request["instance"] == 'instance_value' + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "zone" in jsonified_request + assert jsonified_request["zone"] == 'zone_value' + + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.detach_disk_unary(request) + + expected_params = [ + ( + "deviceName", + "", + ), + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_detach_disk_unary_rest_unset_required_fields(): + transport = transports.InstancesRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.detach_disk._get_unset_required_fields({}) + assert set(unset_fields) == (set(("deviceName", "requestId", )) & set(("deviceName", "instance", "project", "zone", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_detach_disk_unary_rest_interceptors(null_interceptor): + transport = transports.InstancesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.InstancesRestInterceptor(), + ) + client = InstancesClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.InstancesRestInterceptor, "post_detach_disk") as post, \ + mock.patch.object(transports.InstancesRestInterceptor, "pre_detach_disk") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.DetachDiskInstanceRequest.pb(compute.DetachDiskInstanceRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.DetachDiskInstanceRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.detach_disk_unary(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_detach_disk_unary_rest_bad_request(transport: str = 'rest', request_type=compute.DetachDiskInstanceRequest): + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'zone': 'sample2', 'instance': 'sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.detach_disk_unary(request) + + +def test_detach_disk_unary_rest_flattened(): + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'zone': 'sample2', 'instance': 'sample3'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + zone='zone_value', + instance='instance_value', + device_name='device_name_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.detach_disk_unary(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/zones/{zone}/instances/{instance}/detachDisk" % client.transport._host, args[1]) + + +def test_detach_disk_unary_rest_flattened_error(transport: str = 'rest'): + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.detach_disk_unary( + compute.DetachDiskInstanceRequest(), + project='project_value', + zone='zone_value', + instance='instance_value', + device_name='device_name_value', + ) + + +def test_detach_disk_unary_rest_error(): + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.GetInstanceRequest, + dict, +]) +def test_get_rest(request_type): + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'zone': 'sample2', 'instance': 'sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Instance( + can_ip_forward=True, + cpu_platform='cpu_platform_value', + creation_timestamp='creation_timestamp_value', + deletion_protection=True, + description='description_value', + fingerprint='fingerprint_value', + hostname='hostname_value', + id=205, + key_revocation_action_type='key_revocation_action_type_value', + kind='kind_value', + label_fingerprint='label_fingerprint_value', + last_start_timestamp='last_start_timestamp_value', + last_stop_timestamp='last_stop_timestamp_value', + last_suspended_timestamp='last_suspended_timestamp_value', + machine_type='machine_type_value', + min_cpu_platform='min_cpu_platform_value', + name='name_value', + private_ipv6_google_access='private_ipv6_google_access_value', + resource_policies=['resource_policies_value'], + satisfies_pzs=True, + self_link='self_link_value', + source_machine_image='source_machine_image_value', + start_restricted=True, + status='status_value', + status_message='status_message_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Instance.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.get(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.Instance) + assert response.can_ip_forward is True + assert response.cpu_platform == 'cpu_platform_value' + assert response.creation_timestamp == 'creation_timestamp_value' + assert response.deletion_protection is True + assert response.description == 'description_value' + assert response.fingerprint == 'fingerprint_value' + assert response.hostname == 'hostname_value' + assert response.id == 205 + assert response.key_revocation_action_type == 'key_revocation_action_type_value' + assert response.kind == 'kind_value' + assert response.label_fingerprint == 'label_fingerprint_value' + assert response.last_start_timestamp == 'last_start_timestamp_value' + assert response.last_stop_timestamp == 'last_stop_timestamp_value' + assert response.last_suspended_timestamp == 'last_suspended_timestamp_value' + assert response.machine_type == 'machine_type_value' + assert response.min_cpu_platform == 'min_cpu_platform_value' + assert response.name == 'name_value' + assert response.private_ipv6_google_access == 'private_ipv6_google_access_value' + assert response.resource_policies == ['resource_policies_value'] + assert response.satisfies_pzs is True + assert response.self_link == 'self_link_value' + assert response.source_machine_image == 'source_machine_image_value' + assert response.start_restricted is True + assert response.status == 'status_value' + assert response.status_message == 'status_message_value' + assert response.zone == 'zone_value' + + +def test_get_rest_required_fields(request_type=compute.GetInstanceRequest): + transport_class = transports.InstancesRestTransport + + request_init = {} + request_init["instance"] = "" + request_init["project"] = "" + request_init["zone"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["instance"] = 'instance_value' + jsonified_request["project"] = 'project_value' + jsonified_request["zone"] = 'zone_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "instance" in jsonified_request + assert jsonified_request["instance"] == 'instance_value' + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "zone" in jsonified_request + assert jsonified_request["zone"] == 'zone_value' + + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Instance() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "get", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Instance.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.get(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_get_rest_unset_required_fields(): + transport = transports.InstancesRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.get._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("instance", "project", "zone", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_rest_interceptors(null_interceptor): + transport = transports.InstancesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.InstancesRestInterceptor(), + ) + client = InstancesClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.InstancesRestInterceptor, "post_get") as post, \ + mock.patch.object(transports.InstancesRestInterceptor, "pre_get") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.GetInstanceRequest.pb(compute.GetInstanceRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Instance.to_json(compute.Instance()) + + request = compute.GetInstanceRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Instance() + + client.get(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_get_rest_bad_request(transport: str = 'rest', request_type=compute.GetInstanceRequest): + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'zone': 'sample2', 'instance': 'sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get(request) + + +def test_get_rest_flattened(): + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Instance() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'zone': 'sample2', 'instance': 'sample3'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + zone='zone_value', + instance='instance_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Instance.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.get(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/zones/{zone}/instances/{instance}" % client.transport._host, args[1]) + + +def test_get_rest_flattened_error(transport: str = 'rest'): + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get( + compute.GetInstanceRequest(), + project='project_value', + zone='zone_value', + instance='instance_value', + ) + + +def test_get_rest_error(): + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.GetEffectiveFirewallsInstanceRequest, + dict, +]) +def test_get_effective_firewalls_rest(request_type): + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'zone': 'sample2', 'instance': 'sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.InstancesGetEffectiveFirewallsResponse( + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.InstancesGetEffectiveFirewallsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.get_effective_firewalls(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.InstancesGetEffectiveFirewallsResponse) + + +def test_get_effective_firewalls_rest_required_fields(request_type=compute.GetEffectiveFirewallsInstanceRequest): + transport_class = transports.InstancesRestTransport + + request_init = {} + request_init["instance"] = "" + request_init["network_interface"] = "" + request_init["project"] = "" + request_init["zone"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + assert "networkInterface" not in jsonified_request + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_effective_firewalls._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + assert "networkInterface" in jsonified_request + assert jsonified_request["networkInterface"] == request_init["network_interface"] + + jsonified_request["instance"] = 'instance_value' + jsonified_request["networkInterface"] = 'network_interface_value' + jsonified_request["project"] = 'project_value' + jsonified_request["zone"] = 'zone_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_effective_firewalls._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("network_interface", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "instance" in jsonified_request + assert jsonified_request["instance"] == 'instance_value' + assert "networkInterface" in jsonified_request + assert jsonified_request["networkInterface"] == 'network_interface_value' + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "zone" in jsonified_request + assert jsonified_request["zone"] == 'zone_value' + + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.InstancesGetEffectiveFirewallsResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "get", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.InstancesGetEffectiveFirewallsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.get_effective_firewalls(request) + + expected_params = [ + ( + "networkInterface", + "", + ), + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_get_effective_firewalls_rest_unset_required_fields(): + transport = transports.InstancesRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.get_effective_firewalls._get_unset_required_fields({}) + assert set(unset_fields) == (set(("networkInterface", )) & set(("instance", "networkInterface", "project", "zone", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_effective_firewalls_rest_interceptors(null_interceptor): + transport = transports.InstancesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.InstancesRestInterceptor(), + ) + client = InstancesClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.InstancesRestInterceptor, "post_get_effective_firewalls") as post, \ + mock.patch.object(transports.InstancesRestInterceptor, "pre_get_effective_firewalls") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.GetEffectiveFirewallsInstanceRequest.pb(compute.GetEffectiveFirewallsInstanceRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.InstancesGetEffectiveFirewallsResponse.to_json(compute.InstancesGetEffectiveFirewallsResponse()) + + request = compute.GetEffectiveFirewallsInstanceRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.InstancesGetEffectiveFirewallsResponse() + + client.get_effective_firewalls(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_get_effective_firewalls_rest_bad_request(transport: str = 'rest', request_type=compute.GetEffectiveFirewallsInstanceRequest): + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'zone': 'sample2', 'instance': 'sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_effective_firewalls(request) + + +def test_get_effective_firewalls_rest_flattened(): + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.InstancesGetEffectiveFirewallsResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'zone': 'sample2', 'instance': 'sample3'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + zone='zone_value', + instance='instance_value', + network_interface='network_interface_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.InstancesGetEffectiveFirewallsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.get_effective_firewalls(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/zones/{zone}/instances/{instance}/getEffectiveFirewalls" % client.transport._host, args[1]) + + +def test_get_effective_firewalls_rest_flattened_error(transport: str = 'rest'): + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_effective_firewalls( + compute.GetEffectiveFirewallsInstanceRequest(), + project='project_value', + zone='zone_value', + instance='instance_value', + network_interface='network_interface_value', + ) + + +def test_get_effective_firewalls_rest_error(): + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.GetGuestAttributesInstanceRequest, + dict, +]) +def test_get_guest_attributes_rest(request_type): + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'zone': 'sample2', 'instance': 'sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.GuestAttributes( + kind='kind_value', + query_path='query_path_value', + self_link='self_link_value', + variable_key='variable_key_value', + variable_value='variable_value_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.GuestAttributes.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.get_guest_attributes(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.GuestAttributes) + assert response.kind == 'kind_value' + assert response.query_path == 'query_path_value' + assert response.self_link == 'self_link_value' + assert response.variable_key == 'variable_key_value' + assert response.variable_value == 'variable_value_value' + + +def test_get_guest_attributes_rest_required_fields(request_type=compute.GetGuestAttributesInstanceRequest): + transport_class = transports.InstancesRestTransport + + request_init = {} + request_init["instance"] = "" + request_init["project"] = "" + request_init["zone"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_guest_attributes._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["instance"] = 'instance_value' + jsonified_request["project"] = 'project_value' + jsonified_request["zone"] = 'zone_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_guest_attributes._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("query_path", "variable_key", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "instance" in jsonified_request + assert jsonified_request["instance"] == 'instance_value' + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "zone" in jsonified_request + assert jsonified_request["zone"] == 'zone_value' + + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.GuestAttributes() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "get", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.GuestAttributes.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.get_guest_attributes(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_get_guest_attributes_rest_unset_required_fields(): + transport = transports.InstancesRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.get_guest_attributes._get_unset_required_fields({}) + assert set(unset_fields) == (set(("queryPath", "variableKey", )) & set(("instance", "project", "zone", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_guest_attributes_rest_interceptors(null_interceptor): + transport = transports.InstancesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.InstancesRestInterceptor(), + ) + client = InstancesClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.InstancesRestInterceptor, "post_get_guest_attributes") as post, \ + mock.patch.object(transports.InstancesRestInterceptor, "pre_get_guest_attributes") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.GetGuestAttributesInstanceRequest.pb(compute.GetGuestAttributesInstanceRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.GuestAttributes.to_json(compute.GuestAttributes()) + + request = compute.GetGuestAttributesInstanceRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.GuestAttributes() + + client.get_guest_attributes(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_get_guest_attributes_rest_bad_request(transport: str = 'rest', request_type=compute.GetGuestAttributesInstanceRequest): + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'zone': 'sample2', 'instance': 'sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_guest_attributes(request) + + +def test_get_guest_attributes_rest_flattened(): + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.GuestAttributes() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'zone': 'sample2', 'instance': 'sample3'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + zone='zone_value', + instance='instance_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.GuestAttributes.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.get_guest_attributes(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/zones/{zone}/instances/{instance}/getGuestAttributes" % client.transport._host, args[1]) + + +def test_get_guest_attributes_rest_flattened_error(transport: str = 'rest'): + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_guest_attributes( + compute.GetGuestAttributesInstanceRequest(), + project='project_value', + zone='zone_value', + instance='instance_value', + ) + + +def test_get_guest_attributes_rest_error(): + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.GetIamPolicyInstanceRequest, + dict, +]) +def test_get_iam_policy_rest(request_type): + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'zone': 'sample2', 'resource': 'sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Policy( + etag='etag_value', + iam_owned=True, + version=774, + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Policy.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.get_iam_policy(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.Policy) + assert response.etag == 'etag_value' + assert response.iam_owned is True + assert response.version == 774 + + +def test_get_iam_policy_rest_required_fields(request_type=compute.GetIamPolicyInstanceRequest): + transport_class = transports.InstancesRestTransport + + request_init = {} + request_init["project"] = "" + request_init["resource"] = "" + request_init["zone"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_iam_policy._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + jsonified_request["resource"] = 'resource_value' + jsonified_request["zone"] = 'zone_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_iam_policy._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("options_requested_policy_version", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "resource" in jsonified_request + assert jsonified_request["resource"] == 'resource_value' + assert "zone" in jsonified_request + assert jsonified_request["zone"] == 'zone_value' + + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Policy() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "get", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Policy.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.get_iam_policy(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_get_iam_policy_rest_unset_required_fields(): + transport = transports.InstancesRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.get_iam_policy._get_unset_required_fields({}) + assert set(unset_fields) == (set(("optionsRequestedPolicyVersion", )) & set(("project", "resource", "zone", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_iam_policy_rest_interceptors(null_interceptor): + transport = transports.InstancesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.InstancesRestInterceptor(), + ) + client = InstancesClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.InstancesRestInterceptor, "post_get_iam_policy") as post, \ + mock.patch.object(transports.InstancesRestInterceptor, "pre_get_iam_policy") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.GetIamPolicyInstanceRequest.pb(compute.GetIamPolicyInstanceRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Policy.to_json(compute.Policy()) + + request = compute.GetIamPolicyInstanceRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Policy() + + client.get_iam_policy(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_get_iam_policy_rest_bad_request(transport: str = 'rest', request_type=compute.GetIamPolicyInstanceRequest): + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'zone': 'sample2', 'resource': 'sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_iam_policy(request) + + +def test_get_iam_policy_rest_flattened(): + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Policy() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'zone': 'sample2', 'resource': 'sample3'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + zone='zone_value', + resource='resource_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Policy.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.get_iam_policy(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/zones/{zone}/instances/{resource}/getIamPolicy" % client.transport._host, args[1]) + + +def test_get_iam_policy_rest_flattened_error(transport: str = 'rest'): + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_iam_policy( + compute.GetIamPolicyInstanceRequest(), + project='project_value', + zone='zone_value', + resource='resource_value', + ) + + +def test_get_iam_policy_rest_error(): + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.GetScreenshotInstanceRequest, + dict, +]) +def test_get_screenshot_rest(request_type): + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'zone': 'sample2', 'instance': 'sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Screenshot( + contents='contents_value', + kind='kind_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Screenshot.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.get_screenshot(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.Screenshot) + assert response.contents == 'contents_value' + assert response.kind == 'kind_value' + + +def test_get_screenshot_rest_required_fields(request_type=compute.GetScreenshotInstanceRequest): + transport_class = transports.InstancesRestTransport + + request_init = {} + request_init["instance"] = "" + request_init["project"] = "" + request_init["zone"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_screenshot._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["instance"] = 'instance_value' + jsonified_request["project"] = 'project_value' + jsonified_request["zone"] = 'zone_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_screenshot._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "instance" in jsonified_request + assert jsonified_request["instance"] == 'instance_value' + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "zone" in jsonified_request + assert jsonified_request["zone"] == 'zone_value' + + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Screenshot() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "get", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Screenshot.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.get_screenshot(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_get_screenshot_rest_unset_required_fields(): + transport = transports.InstancesRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.get_screenshot._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("instance", "project", "zone", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_screenshot_rest_interceptors(null_interceptor): + transport = transports.InstancesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.InstancesRestInterceptor(), + ) + client = InstancesClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.InstancesRestInterceptor, "post_get_screenshot") as post, \ + mock.patch.object(transports.InstancesRestInterceptor, "pre_get_screenshot") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.GetScreenshotInstanceRequest.pb(compute.GetScreenshotInstanceRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Screenshot.to_json(compute.Screenshot()) + + request = compute.GetScreenshotInstanceRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Screenshot() + + client.get_screenshot(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_get_screenshot_rest_bad_request(transport: str = 'rest', request_type=compute.GetScreenshotInstanceRequest): + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'zone': 'sample2', 'instance': 'sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_screenshot(request) + + +def test_get_screenshot_rest_flattened(): + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Screenshot() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'zone': 'sample2', 'instance': 'sample3'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + zone='zone_value', + instance='instance_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Screenshot.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.get_screenshot(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/zones/{zone}/instances/{instance}/screenshot" % client.transport._host, args[1]) + + +def test_get_screenshot_rest_flattened_error(transport: str = 'rest'): + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_screenshot( + compute.GetScreenshotInstanceRequest(), + project='project_value', + zone='zone_value', + instance='instance_value', + ) + + +def test_get_screenshot_rest_error(): + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.GetSerialPortOutputInstanceRequest, + dict, +]) +def test_get_serial_port_output_rest(request_type): + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'zone': 'sample2', 'instance': 'sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.SerialPortOutput( + contents='contents_value', + kind='kind_value', + next_=542, + self_link='self_link_value', + start=558, + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.SerialPortOutput.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.get_serial_port_output(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.SerialPortOutput) + assert response.contents == 'contents_value' + assert response.kind == 'kind_value' + assert response.next_ == 542 + assert response.self_link == 'self_link_value' + assert response.start == 558 + + +def test_get_serial_port_output_rest_required_fields(request_type=compute.GetSerialPortOutputInstanceRequest): + transport_class = transports.InstancesRestTransport + + request_init = {} + request_init["instance"] = "" + request_init["project"] = "" + request_init["zone"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_serial_port_output._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["instance"] = 'instance_value' + jsonified_request["project"] = 'project_value' + jsonified_request["zone"] = 'zone_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_serial_port_output._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("port", "start", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "instance" in jsonified_request + assert jsonified_request["instance"] == 'instance_value' + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "zone" in jsonified_request + assert jsonified_request["zone"] == 'zone_value' + + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.SerialPortOutput() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "get", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.SerialPortOutput.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.get_serial_port_output(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_get_serial_port_output_rest_unset_required_fields(): + transport = transports.InstancesRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.get_serial_port_output._get_unset_required_fields({}) + assert set(unset_fields) == (set(("port", "start", )) & set(("instance", "project", "zone", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_serial_port_output_rest_interceptors(null_interceptor): + transport = transports.InstancesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.InstancesRestInterceptor(), + ) + client = InstancesClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.InstancesRestInterceptor, "post_get_serial_port_output") as post, \ + mock.patch.object(transports.InstancesRestInterceptor, "pre_get_serial_port_output") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.GetSerialPortOutputInstanceRequest.pb(compute.GetSerialPortOutputInstanceRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.SerialPortOutput.to_json(compute.SerialPortOutput()) + + request = compute.GetSerialPortOutputInstanceRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.SerialPortOutput() + + client.get_serial_port_output(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_get_serial_port_output_rest_bad_request(transport: str = 'rest', request_type=compute.GetSerialPortOutputInstanceRequest): + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'zone': 'sample2', 'instance': 'sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_serial_port_output(request) + + +def test_get_serial_port_output_rest_flattened(): + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.SerialPortOutput() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'zone': 'sample2', 'instance': 'sample3'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + zone='zone_value', + instance='instance_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.SerialPortOutput.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.get_serial_port_output(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/zones/{zone}/instances/{instance}/serialPort" % client.transport._host, args[1]) + + +def test_get_serial_port_output_rest_flattened_error(transport: str = 'rest'): + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_serial_port_output( + compute.GetSerialPortOutputInstanceRequest(), + project='project_value', + zone='zone_value', + instance='instance_value', + ) + + +def test_get_serial_port_output_rest_error(): + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.GetShieldedInstanceIdentityInstanceRequest, + dict, +]) +def test_get_shielded_instance_identity_rest(request_type): + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'zone': 'sample2', 'instance': 'sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.ShieldedInstanceIdentity( + kind='kind_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.ShieldedInstanceIdentity.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.get_shielded_instance_identity(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.ShieldedInstanceIdentity) + assert response.kind == 'kind_value' + + +def test_get_shielded_instance_identity_rest_required_fields(request_type=compute.GetShieldedInstanceIdentityInstanceRequest): + transport_class = transports.InstancesRestTransport + + request_init = {} + request_init["instance"] = "" + request_init["project"] = "" + request_init["zone"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_shielded_instance_identity._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["instance"] = 'instance_value' + jsonified_request["project"] = 'project_value' + jsonified_request["zone"] = 'zone_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_shielded_instance_identity._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "instance" in jsonified_request + assert jsonified_request["instance"] == 'instance_value' + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "zone" in jsonified_request + assert jsonified_request["zone"] == 'zone_value' + + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.ShieldedInstanceIdentity() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "get", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.ShieldedInstanceIdentity.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.get_shielded_instance_identity(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_get_shielded_instance_identity_rest_unset_required_fields(): + transport = transports.InstancesRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.get_shielded_instance_identity._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("instance", "project", "zone", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_shielded_instance_identity_rest_interceptors(null_interceptor): + transport = transports.InstancesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.InstancesRestInterceptor(), + ) + client = InstancesClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.InstancesRestInterceptor, "post_get_shielded_instance_identity") as post, \ + mock.patch.object(transports.InstancesRestInterceptor, "pre_get_shielded_instance_identity") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.GetShieldedInstanceIdentityInstanceRequest.pb(compute.GetShieldedInstanceIdentityInstanceRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.ShieldedInstanceIdentity.to_json(compute.ShieldedInstanceIdentity()) + + request = compute.GetShieldedInstanceIdentityInstanceRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.ShieldedInstanceIdentity() + + client.get_shielded_instance_identity(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_get_shielded_instance_identity_rest_bad_request(transport: str = 'rest', request_type=compute.GetShieldedInstanceIdentityInstanceRequest): + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'zone': 'sample2', 'instance': 'sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_shielded_instance_identity(request) + + +def test_get_shielded_instance_identity_rest_flattened(): + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.ShieldedInstanceIdentity() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'zone': 'sample2', 'instance': 'sample3'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + zone='zone_value', + instance='instance_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.ShieldedInstanceIdentity.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.get_shielded_instance_identity(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/zones/{zone}/instances/{instance}/getShieldedInstanceIdentity" % client.transport._host, args[1]) + + +def test_get_shielded_instance_identity_rest_flattened_error(transport: str = 'rest'): + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_shielded_instance_identity( + compute.GetShieldedInstanceIdentityInstanceRequest(), + project='project_value', + zone='zone_value', + instance='instance_value', + ) + + +def test_get_shielded_instance_identity_rest_error(): + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.InsertInstanceRequest, + dict, +]) +def test_insert_rest(request_type): + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'zone': 'sample2'} + request_init["instance_resource"] = {'advanced_machine_features': {'enable_nested_virtualization': True, 'enable_uefi_networking': True, 'threads_per_core': 1689, 'visible_core_count': 1918}, 'can_ip_forward': True, 'confidential_instance_config': {'enable_confidential_compute': True}, 'cpu_platform': 'cpu_platform_value', 'creation_timestamp': 'creation_timestamp_value', 'deletion_protection': True, 'description': 'description_value', 'disks': [{'architecture': 'architecture_value', 'auto_delete': True, 'boot': True, 'device_name': 'device_name_value', 'disk_encryption_key': {'kms_key_name': 'kms_key_name_value', 'kms_key_service_account': 'kms_key_service_account_value', 'raw_key': 'raw_key_value', 'rsa_encrypted_key': 'rsa_encrypted_key_value', 'sha256': 'sha256_value'}, 'disk_size_gb': 1261, 'force_attach': True, 'guest_os_features': [{'type_': 'type__value'}], 'index': 536, 'initialize_params': {'architecture': 'architecture_value', 'description': 'description_value', 'disk_name': 'disk_name_value', 'disk_size_gb': 1261, 'disk_type': 'disk_type_value', 'labels': {}, 'licenses': ['licenses_value1', 'licenses_value2'], 'on_update_action': 'on_update_action_value', 'provisioned_iops': 1740, 'provisioned_throughput': 2411, 'replica_zones': ['replica_zones_value1', 'replica_zones_value2'], 'resource_manager_tags': {}, 'resource_policies': ['resource_policies_value1', 'resource_policies_value2'], 'source_image': 'source_image_value', 'source_image_encryption_key': {}, 'source_snapshot': 'source_snapshot_value', 'source_snapshot_encryption_key': {}}, 'interface': 'interface_value', 'kind': 'kind_value', 'licenses': ['licenses_value1', 'licenses_value2'], 'mode': 'mode_value', 'saved_state': 'saved_state_value', 'shielded_instance_initial_state': {'dbs': [{'content': 'content_value', 'file_type': 'file_type_value'}], 'dbxs': {}, 'keks': {}, 'pk': {}}, 'source': 'source_value', 'type_': 'type__value'}], 'display_device': {'enable_display': True}, 'fingerprint': 'fingerprint_value', 'guest_accelerators': [{'accelerator_count': 1805, 'accelerator_type': 'accelerator_type_value'}], 'hostname': 'hostname_value', 'id': 205, 'instance_encryption_key': {}, 'key_revocation_action_type': 'key_revocation_action_type_value', 'kind': 'kind_value', 'label_fingerprint': 'label_fingerprint_value', 'labels': {}, 'last_start_timestamp': 'last_start_timestamp_value', 'last_stop_timestamp': 'last_stop_timestamp_value', 'last_suspended_timestamp': 'last_suspended_timestamp_value', 'machine_type': 'machine_type_value', 'metadata': {'fingerprint': 'fingerprint_value', 'items': [{'key': 'key_value', 'value': 'value_value'}], 'kind': 'kind_value'}, 'min_cpu_platform': 'min_cpu_platform_value', 'name': 'name_value', 'network_interfaces': [{'access_configs': [{'external_ipv6': 'external_ipv6_value', 'external_ipv6_prefix_length': 2837, 'kind': 'kind_value', 'name': 'name_value', 'nat_i_p': 'nat_i_p_value', 'network_tier': 'network_tier_value', 'public_ptr_domain_name': 'public_ptr_domain_name_value', 'set_public_ptr': True, 'type_': 'type__value'}], 'alias_ip_ranges': [{'ip_cidr_range': 'ip_cidr_range_value', 'subnetwork_range_name': 'subnetwork_range_name_value'}], 'fingerprint': 'fingerprint_value', 'internal_ipv6_prefix_length': 2831, 'ipv6_access_configs': {}, 'ipv6_access_type': 'ipv6_access_type_value', 'ipv6_address': 'ipv6_address_value', 'kind': 'kind_value', 'name': 'name_value', 'network': 'network_value', 'network_attachment': 'network_attachment_value', 'network_i_p': 'network_i_p_value', 'nic_type': 'nic_type_value', 'queue_count': 1197, 'stack_type': 'stack_type_value', 'subnetwork': 'subnetwork_value'}], 'network_performance_config': {'total_egress_bandwidth_tier': 'total_egress_bandwidth_tier_value'}, 'params': {'resource_manager_tags': {}}, 'private_ipv6_google_access': 'private_ipv6_google_access_value', 'reservation_affinity': {'consume_reservation_type': 'consume_reservation_type_value', 'key': 'key_value', 'values': ['values_value1', 'values_value2']}, 'resource_policies': ['resource_policies_value1', 'resource_policies_value2'], 'resource_status': {'physical_host': 'physical_host_value'}, 'satisfies_pzs': True, 'scheduling': {'automatic_restart': True, 'instance_termination_action': 'instance_termination_action_value', 'local_ssd_recovery_timeout': {'nanos': 543, 'seconds': 751}, 'location_hint': 'location_hint_value', 'min_node_cpus': 1379, 'node_affinities': [{'key': 'key_value', 'operator': 'operator_value', 'values': ['values_value1', 'values_value2']}], 'on_host_maintenance': 'on_host_maintenance_value', 'preemptible': True, 'provisioning_model': 'provisioning_model_value'}, 'self_link': 'self_link_value', 'service_accounts': [{'email': 'email_value', 'scopes': ['scopes_value1', 'scopes_value2']}], 'shielded_instance_config': {'enable_integrity_monitoring': True, 'enable_secure_boot': True, 'enable_vtpm': True}, 'shielded_instance_integrity_policy': {'update_auto_learn_policy': True}, 'source_machine_image': 'source_machine_image_value', 'source_machine_image_encryption_key': {}, 'start_restricted': True, 'status': 'status_value', 'status_message': 'status_message_value', 'tags': {'fingerprint': 'fingerprint_value', 'items': ['items_value1', 'items_value2']}, 'zone': 'zone_value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.insert(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, extended_operation.ExtendedOperation) + assert response.client_operation_id == 'client_operation_id_value' + assert response.creation_timestamp == 'creation_timestamp_value' + assert response.description == 'description_value' + assert response.end_time == 'end_time_value' + assert response.http_error_message == 'http_error_message_value' + assert response.http_error_status_code == 2374 + assert response.id == 205 + assert response.insert_time == 'insert_time_value' + assert response.kind == 'kind_value' + assert response.name == 'name_value' + assert response.operation_group_id == 'operation_group_id_value' + assert response.operation_type == 'operation_type_value' + assert response.progress == 885 + assert response.region == 'region_value' + assert response.self_link == 'self_link_value' + assert response.start_time == 'start_time_value' + assert response.status == compute.Operation.Status.DONE + assert response.status_message == 'status_message_value' + assert response.target_id == 947 + assert response.target_link == 'target_link_value' + assert response.user == 'user_value' + assert response.zone == 'zone_value' + + +def test_insert_rest_required_fields(request_type=compute.InsertInstanceRequest): + transport_class = transports.InstancesRestTransport + + request_init = {} + request_init["project"] = "" + request_init["zone"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).insert._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + jsonified_request["zone"] = 'zone_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).insert._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", "source_instance_template", "source_machine_image", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "zone" in jsonified_request + assert jsonified_request["zone"] == 'zone_value' + + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.insert(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_insert_rest_unset_required_fields(): + transport = transports.InstancesRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.insert._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", "sourceInstanceTemplate", "sourceMachineImage", )) & set(("instanceResource", "project", "zone", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_insert_rest_interceptors(null_interceptor): + transport = transports.InstancesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.InstancesRestInterceptor(), + ) + client = InstancesClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.InstancesRestInterceptor, "post_insert") as post, \ + mock.patch.object(transports.InstancesRestInterceptor, "pre_insert") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.InsertInstanceRequest.pb(compute.InsertInstanceRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.InsertInstanceRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.insert(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_insert_rest_bad_request(transport: str = 'rest', request_type=compute.InsertInstanceRequest): + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'zone': 'sample2'} + request_init["instance_resource"] = {'advanced_machine_features': {'enable_nested_virtualization': True, 'enable_uefi_networking': True, 'threads_per_core': 1689, 'visible_core_count': 1918}, 'can_ip_forward': True, 'confidential_instance_config': {'enable_confidential_compute': True}, 'cpu_platform': 'cpu_platform_value', 'creation_timestamp': 'creation_timestamp_value', 'deletion_protection': True, 'description': 'description_value', 'disks': [{'architecture': 'architecture_value', 'auto_delete': True, 'boot': True, 'device_name': 'device_name_value', 'disk_encryption_key': {'kms_key_name': 'kms_key_name_value', 'kms_key_service_account': 'kms_key_service_account_value', 'raw_key': 'raw_key_value', 'rsa_encrypted_key': 'rsa_encrypted_key_value', 'sha256': 'sha256_value'}, 'disk_size_gb': 1261, 'force_attach': True, 'guest_os_features': [{'type_': 'type__value'}], 'index': 536, 'initialize_params': {'architecture': 'architecture_value', 'description': 'description_value', 'disk_name': 'disk_name_value', 'disk_size_gb': 1261, 'disk_type': 'disk_type_value', 'labels': {}, 'licenses': ['licenses_value1', 'licenses_value2'], 'on_update_action': 'on_update_action_value', 'provisioned_iops': 1740, 'provisioned_throughput': 2411, 'replica_zones': ['replica_zones_value1', 'replica_zones_value2'], 'resource_manager_tags': {}, 'resource_policies': ['resource_policies_value1', 'resource_policies_value2'], 'source_image': 'source_image_value', 'source_image_encryption_key': {}, 'source_snapshot': 'source_snapshot_value', 'source_snapshot_encryption_key': {}}, 'interface': 'interface_value', 'kind': 'kind_value', 'licenses': ['licenses_value1', 'licenses_value2'], 'mode': 'mode_value', 'saved_state': 'saved_state_value', 'shielded_instance_initial_state': {'dbs': [{'content': 'content_value', 'file_type': 'file_type_value'}], 'dbxs': {}, 'keks': {}, 'pk': {}}, 'source': 'source_value', 'type_': 'type__value'}], 'display_device': {'enable_display': True}, 'fingerprint': 'fingerprint_value', 'guest_accelerators': [{'accelerator_count': 1805, 'accelerator_type': 'accelerator_type_value'}], 'hostname': 'hostname_value', 'id': 205, 'instance_encryption_key': {}, 'key_revocation_action_type': 'key_revocation_action_type_value', 'kind': 'kind_value', 'label_fingerprint': 'label_fingerprint_value', 'labels': {}, 'last_start_timestamp': 'last_start_timestamp_value', 'last_stop_timestamp': 'last_stop_timestamp_value', 'last_suspended_timestamp': 'last_suspended_timestamp_value', 'machine_type': 'machine_type_value', 'metadata': {'fingerprint': 'fingerprint_value', 'items': [{'key': 'key_value', 'value': 'value_value'}], 'kind': 'kind_value'}, 'min_cpu_platform': 'min_cpu_platform_value', 'name': 'name_value', 'network_interfaces': [{'access_configs': [{'external_ipv6': 'external_ipv6_value', 'external_ipv6_prefix_length': 2837, 'kind': 'kind_value', 'name': 'name_value', 'nat_i_p': 'nat_i_p_value', 'network_tier': 'network_tier_value', 'public_ptr_domain_name': 'public_ptr_domain_name_value', 'set_public_ptr': True, 'type_': 'type__value'}], 'alias_ip_ranges': [{'ip_cidr_range': 'ip_cidr_range_value', 'subnetwork_range_name': 'subnetwork_range_name_value'}], 'fingerprint': 'fingerprint_value', 'internal_ipv6_prefix_length': 2831, 'ipv6_access_configs': {}, 'ipv6_access_type': 'ipv6_access_type_value', 'ipv6_address': 'ipv6_address_value', 'kind': 'kind_value', 'name': 'name_value', 'network': 'network_value', 'network_attachment': 'network_attachment_value', 'network_i_p': 'network_i_p_value', 'nic_type': 'nic_type_value', 'queue_count': 1197, 'stack_type': 'stack_type_value', 'subnetwork': 'subnetwork_value'}], 'network_performance_config': {'total_egress_bandwidth_tier': 'total_egress_bandwidth_tier_value'}, 'params': {'resource_manager_tags': {}}, 'private_ipv6_google_access': 'private_ipv6_google_access_value', 'reservation_affinity': {'consume_reservation_type': 'consume_reservation_type_value', 'key': 'key_value', 'values': ['values_value1', 'values_value2']}, 'resource_policies': ['resource_policies_value1', 'resource_policies_value2'], 'resource_status': {'physical_host': 'physical_host_value'}, 'satisfies_pzs': True, 'scheduling': {'automatic_restart': True, 'instance_termination_action': 'instance_termination_action_value', 'local_ssd_recovery_timeout': {'nanos': 543, 'seconds': 751}, 'location_hint': 'location_hint_value', 'min_node_cpus': 1379, 'node_affinities': [{'key': 'key_value', 'operator': 'operator_value', 'values': ['values_value1', 'values_value2']}], 'on_host_maintenance': 'on_host_maintenance_value', 'preemptible': True, 'provisioning_model': 'provisioning_model_value'}, 'self_link': 'self_link_value', 'service_accounts': [{'email': 'email_value', 'scopes': ['scopes_value1', 'scopes_value2']}], 'shielded_instance_config': {'enable_integrity_monitoring': True, 'enable_secure_boot': True, 'enable_vtpm': True}, 'shielded_instance_integrity_policy': {'update_auto_learn_policy': True}, 'source_machine_image': 'source_machine_image_value', 'source_machine_image_encryption_key': {}, 'start_restricted': True, 'status': 'status_value', 'status_message': 'status_message_value', 'tags': {'fingerprint': 'fingerprint_value', 'items': ['items_value1', 'items_value2']}, 'zone': 'zone_value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.insert(request) + + +def test_insert_rest_flattened(): + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'zone': 'sample2'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + zone='zone_value', + instance_resource=compute.Instance(advanced_machine_features=compute.AdvancedMachineFeatures(enable_nested_virtualization=True)), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.insert(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/zones/{zone}/instances" % client.transport._host, args[1]) + + +def test_insert_rest_flattened_error(transport: str = 'rest'): + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.insert( + compute.InsertInstanceRequest(), + project='project_value', + zone='zone_value', + instance_resource=compute.Instance(advanced_machine_features=compute.AdvancedMachineFeatures(enable_nested_virtualization=True)), + ) + + +def test_insert_rest_error(): + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.InsertInstanceRequest, + dict, +]) +def test_insert_unary_rest(request_type): + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'zone': 'sample2'} + request_init["instance_resource"] = {'advanced_machine_features': {'enable_nested_virtualization': True, 'enable_uefi_networking': True, 'threads_per_core': 1689, 'visible_core_count': 1918}, 'can_ip_forward': True, 'confidential_instance_config': {'enable_confidential_compute': True}, 'cpu_platform': 'cpu_platform_value', 'creation_timestamp': 'creation_timestamp_value', 'deletion_protection': True, 'description': 'description_value', 'disks': [{'architecture': 'architecture_value', 'auto_delete': True, 'boot': True, 'device_name': 'device_name_value', 'disk_encryption_key': {'kms_key_name': 'kms_key_name_value', 'kms_key_service_account': 'kms_key_service_account_value', 'raw_key': 'raw_key_value', 'rsa_encrypted_key': 'rsa_encrypted_key_value', 'sha256': 'sha256_value'}, 'disk_size_gb': 1261, 'force_attach': True, 'guest_os_features': [{'type_': 'type__value'}], 'index': 536, 'initialize_params': {'architecture': 'architecture_value', 'description': 'description_value', 'disk_name': 'disk_name_value', 'disk_size_gb': 1261, 'disk_type': 'disk_type_value', 'labels': {}, 'licenses': ['licenses_value1', 'licenses_value2'], 'on_update_action': 'on_update_action_value', 'provisioned_iops': 1740, 'provisioned_throughput': 2411, 'replica_zones': ['replica_zones_value1', 'replica_zones_value2'], 'resource_manager_tags': {}, 'resource_policies': ['resource_policies_value1', 'resource_policies_value2'], 'source_image': 'source_image_value', 'source_image_encryption_key': {}, 'source_snapshot': 'source_snapshot_value', 'source_snapshot_encryption_key': {}}, 'interface': 'interface_value', 'kind': 'kind_value', 'licenses': ['licenses_value1', 'licenses_value2'], 'mode': 'mode_value', 'saved_state': 'saved_state_value', 'shielded_instance_initial_state': {'dbs': [{'content': 'content_value', 'file_type': 'file_type_value'}], 'dbxs': {}, 'keks': {}, 'pk': {}}, 'source': 'source_value', 'type_': 'type__value'}], 'display_device': {'enable_display': True}, 'fingerprint': 'fingerprint_value', 'guest_accelerators': [{'accelerator_count': 1805, 'accelerator_type': 'accelerator_type_value'}], 'hostname': 'hostname_value', 'id': 205, 'instance_encryption_key': {}, 'key_revocation_action_type': 'key_revocation_action_type_value', 'kind': 'kind_value', 'label_fingerprint': 'label_fingerprint_value', 'labels': {}, 'last_start_timestamp': 'last_start_timestamp_value', 'last_stop_timestamp': 'last_stop_timestamp_value', 'last_suspended_timestamp': 'last_suspended_timestamp_value', 'machine_type': 'machine_type_value', 'metadata': {'fingerprint': 'fingerprint_value', 'items': [{'key': 'key_value', 'value': 'value_value'}], 'kind': 'kind_value'}, 'min_cpu_platform': 'min_cpu_platform_value', 'name': 'name_value', 'network_interfaces': [{'access_configs': [{'external_ipv6': 'external_ipv6_value', 'external_ipv6_prefix_length': 2837, 'kind': 'kind_value', 'name': 'name_value', 'nat_i_p': 'nat_i_p_value', 'network_tier': 'network_tier_value', 'public_ptr_domain_name': 'public_ptr_domain_name_value', 'set_public_ptr': True, 'type_': 'type__value'}], 'alias_ip_ranges': [{'ip_cidr_range': 'ip_cidr_range_value', 'subnetwork_range_name': 'subnetwork_range_name_value'}], 'fingerprint': 'fingerprint_value', 'internal_ipv6_prefix_length': 2831, 'ipv6_access_configs': {}, 'ipv6_access_type': 'ipv6_access_type_value', 'ipv6_address': 'ipv6_address_value', 'kind': 'kind_value', 'name': 'name_value', 'network': 'network_value', 'network_attachment': 'network_attachment_value', 'network_i_p': 'network_i_p_value', 'nic_type': 'nic_type_value', 'queue_count': 1197, 'stack_type': 'stack_type_value', 'subnetwork': 'subnetwork_value'}], 'network_performance_config': {'total_egress_bandwidth_tier': 'total_egress_bandwidth_tier_value'}, 'params': {'resource_manager_tags': {}}, 'private_ipv6_google_access': 'private_ipv6_google_access_value', 'reservation_affinity': {'consume_reservation_type': 'consume_reservation_type_value', 'key': 'key_value', 'values': ['values_value1', 'values_value2']}, 'resource_policies': ['resource_policies_value1', 'resource_policies_value2'], 'resource_status': {'physical_host': 'physical_host_value'}, 'satisfies_pzs': True, 'scheduling': {'automatic_restart': True, 'instance_termination_action': 'instance_termination_action_value', 'local_ssd_recovery_timeout': {'nanos': 543, 'seconds': 751}, 'location_hint': 'location_hint_value', 'min_node_cpus': 1379, 'node_affinities': [{'key': 'key_value', 'operator': 'operator_value', 'values': ['values_value1', 'values_value2']}], 'on_host_maintenance': 'on_host_maintenance_value', 'preemptible': True, 'provisioning_model': 'provisioning_model_value'}, 'self_link': 'self_link_value', 'service_accounts': [{'email': 'email_value', 'scopes': ['scopes_value1', 'scopes_value2']}], 'shielded_instance_config': {'enable_integrity_monitoring': True, 'enable_secure_boot': True, 'enable_vtpm': True}, 'shielded_instance_integrity_policy': {'update_auto_learn_policy': True}, 'source_machine_image': 'source_machine_image_value', 'source_machine_image_encryption_key': {}, 'start_restricted': True, 'status': 'status_value', 'status_message': 'status_message_value', 'tags': {'fingerprint': 'fingerprint_value', 'items': ['items_value1', 'items_value2']}, 'zone': 'zone_value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.insert_unary(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.Operation) + + +def test_insert_unary_rest_required_fields(request_type=compute.InsertInstanceRequest): + transport_class = transports.InstancesRestTransport + + request_init = {} + request_init["project"] = "" + request_init["zone"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).insert._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + jsonified_request["zone"] = 'zone_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).insert._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", "source_instance_template", "source_machine_image", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "zone" in jsonified_request + assert jsonified_request["zone"] == 'zone_value' + + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.insert_unary(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_insert_unary_rest_unset_required_fields(): + transport = transports.InstancesRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.insert._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", "sourceInstanceTemplate", "sourceMachineImage", )) & set(("instanceResource", "project", "zone", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_insert_unary_rest_interceptors(null_interceptor): + transport = transports.InstancesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.InstancesRestInterceptor(), + ) + client = InstancesClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.InstancesRestInterceptor, "post_insert") as post, \ + mock.patch.object(transports.InstancesRestInterceptor, "pre_insert") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.InsertInstanceRequest.pb(compute.InsertInstanceRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.InsertInstanceRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.insert_unary(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_insert_unary_rest_bad_request(transport: str = 'rest', request_type=compute.InsertInstanceRequest): + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'zone': 'sample2'} + request_init["instance_resource"] = {'advanced_machine_features': {'enable_nested_virtualization': True, 'enable_uefi_networking': True, 'threads_per_core': 1689, 'visible_core_count': 1918}, 'can_ip_forward': True, 'confidential_instance_config': {'enable_confidential_compute': True}, 'cpu_platform': 'cpu_platform_value', 'creation_timestamp': 'creation_timestamp_value', 'deletion_protection': True, 'description': 'description_value', 'disks': [{'architecture': 'architecture_value', 'auto_delete': True, 'boot': True, 'device_name': 'device_name_value', 'disk_encryption_key': {'kms_key_name': 'kms_key_name_value', 'kms_key_service_account': 'kms_key_service_account_value', 'raw_key': 'raw_key_value', 'rsa_encrypted_key': 'rsa_encrypted_key_value', 'sha256': 'sha256_value'}, 'disk_size_gb': 1261, 'force_attach': True, 'guest_os_features': [{'type_': 'type__value'}], 'index': 536, 'initialize_params': {'architecture': 'architecture_value', 'description': 'description_value', 'disk_name': 'disk_name_value', 'disk_size_gb': 1261, 'disk_type': 'disk_type_value', 'labels': {}, 'licenses': ['licenses_value1', 'licenses_value2'], 'on_update_action': 'on_update_action_value', 'provisioned_iops': 1740, 'provisioned_throughput': 2411, 'replica_zones': ['replica_zones_value1', 'replica_zones_value2'], 'resource_manager_tags': {}, 'resource_policies': ['resource_policies_value1', 'resource_policies_value2'], 'source_image': 'source_image_value', 'source_image_encryption_key': {}, 'source_snapshot': 'source_snapshot_value', 'source_snapshot_encryption_key': {}}, 'interface': 'interface_value', 'kind': 'kind_value', 'licenses': ['licenses_value1', 'licenses_value2'], 'mode': 'mode_value', 'saved_state': 'saved_state_value', 'shielded_instance_initial_state': {'dbs': [{'content': 'content_value', 'file_type': 'file_type_value'}], 'dbxs': {}, 'keks': {}, 'pk': {}}, 'source': 'source_value', 'type_': 'type__value'}], 'display_device': {'enable_display': True}, 'fingerprint': 'fingerprint_value', 'guest_accelerators': [{'accelerator_count': 1805, 'accelerator_type': 'accelerator_type_value'}], 'hostname': 'hostname_value', 'id': 205, 'instance_encryption_key': {}, 'key_revocation_action_type': 'key_revocation_action_type_value', 'kind': 'kind_value', 'label_fingerprint': 'label_fingerprint_value', 'labels': {}, 'last_start_timestamp': 'last_start_timestamp_value', 'last_stop_timestamp': 'last_stop_timestamp_value', 'last_suspended_timestamp': 'last_suspended_timestamp_value', 'machine_type': 'machine_type_value', 'metadata': {'fingerprint': 'fingerprint_value', 'items': [{'key': 'key_value', 'value': 'value_value'}], 'kind': 'kind_value'}, 'min_cpu_platform': 'min_cpu_platform_value', 'name': 'name_value', 'network_interfaces': [{'access_configs': [{'external_ipv6': 'external_ipv6_value', 'external_ipv6_prefix_length': 2837, 'kind': 'kind_value', 'name': 'name_value', 'nat_i_p': 'nat_i_p_value', 'network_tier': 'network_tier_value', 'public_ptr_domain_name': 'public_ptr_domain_name_value', 'set_public_ptr': True, 'type_': 'type__value'}], 'alias_ip_ranges': [{'ip_cidr_range': 'ip_cidr_range_value', 'subnetwork_range_name': 'subnetwork_range_name_value'}], 'fingerprint': 'fingerprint_value', 'internal_ipv6_prefix_length': 2831, 'ipv6_access_configs': {}, 'ipv6_access_type': 'ipv6_access_type_value', 'ipv6_address': 'ipv6_address_value', 'kind': 'kind_value', 'name': 'name_value', 'network': 'network_value', 'network_attachment': 'network_attachment_value', 'network_i_p': 'network_i_p_value', 'nic_type': 'nic_type_value', 'queue_count': 1197, 'stack_type': 'stack_type_value', 'subnetwork': 'subnetwork_value'}], 'network_performance_config': {'total_egress_bandwidth_tier': 'total_egress_bandwidth_tier_value'}, 'params': {'resource_manager_tags': {}}, 'private_ipv6_google_access': 'private_ipv6_google_access_value', 'reservation_affinity': {'consume_reservation_type': 'consume_reservation_type_value', 'key': 'key_value', 'values': ['values_value1', 'values_value2']}, 'resource_policies': ['resource_policies_value1', 'resource_policies_value2'], 'resource_status': {'physical_host': 'physical_host_value'}, 'satisfies_pzs': True, 'scheduling': {'automatic_restart': True, 'instance_termination_action': 'instance_termination_action_value', 'local_ssd_recovery_timeout': {'nanos': 543, 'seconds': 751}, 'location_hint': 'location_hint_value', 'min_node_cpus': 1379, 'node_affinities': [{'key': 'key_value', 'operator': 'operator_value', 'values': ['values_value1', 'values_value2']}], 'on_host_maintenance': 'on_host_maintenance_value', 'preemptible': True, 'provisioning_model': 'provisioning_model_value'}, 'self_link': 'self_link_value', 'service_accounts': [{'email': 'email_value', 'scopes': ['scopes_value1', 'scopes_value2']}], 'shielded_instance_config': {'enable_integrity_monitoring': True, 'enable_secure_boot': True, 'enable_vtpm': True}, 'shielded_instance_integrity_policy': {'update_auto_learn_policy': True}, 'source_machine_image': 'source_machine_image_value', 'source_machine_image_encryption_key': {}, 'start_restricted': True, 'status': 'status_value', 'status_message': 'status_message_value', 'tags': {'fingerprint': 'fingerprint_value', 'items': ['items_value1', 'items_value2']}, 'zone': 'zone_value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.insert_unary(request) + + +def test_insert_unary_rest_flattened(): + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'zone': 'sample2'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + zone='zone_value', + instance_resource=compute.Instance(advanced_machine_features=compute.AdvancedMachineFeatures(enable_nested_virtualization=True)), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.insert_unary(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/zones/{zone}/instances" % client.transport._host, args[1]) + + +def test_insert_unary_rest_flattened_error(transport: str = 'rest'): + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.insert_unary( + compute.InsertInstanceRequest(), + project='project_value', + zone='zone_value', + instance_resource=compute.Instance(advanced_machine_features=compute.AdvancedMachineFeatures(enable_nested_virtualization=True)), + ) + + +def test_insert_unary_rest_error(): + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.ListInstancesRequest, + dict, +]) +def test_list_rest(request_type): + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'zone': 'sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.InstanceList( + id='id_value', + kind='kind_value', + next_page_token='next_page_token_value', + self_link='self_link_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.InstanceList.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.list(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListPager) + assert response.id == 'id_value' + assert response.kind == 'kind_value' + assert response.next_page_token == 'next_page_token_value' + assert response.self_link == 'self_link_value' + + +def test_list_rest_required_fields(request_type=compute.ListInstancesRequest): + transport_class = transports.InstancesRestTransport + + request_init = {} + request_init["project"] = "" + request_init["zone"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + jsonified_request["zone"] = 'zone_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("filter", "max_results", "order_by", "page_token", "return_partial_success", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "zone" in jsonified_request + assert jsonified_request["zone"] == 'zone_value' + + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.InstanceList() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "get", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.InstanceList.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.list(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_list_rest_unset_required_fields(): + transport = transports.InstancesRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.list._get_unset_required_fields({}) + assert set(unset_fields) == (set(("filter", "maxResults", "orderBy", "pageToken", "returnPartialSuccess", )) & set(("project", "zone", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_rest_interceptors(null_interceptor): + transport = transports.InstancesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.InstancesRestInterceptor(), + ) + client = InstancesClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.InstancesRestInterceptor, "post_list") as post, \ + mock.patch.object(transports.InstancesRestInterceptor, "pre_list") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.ListInstancesRequest.pb(compute.ListInstancesRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.InstanceList.to_json(compute.InstanceList()) + + request = compute.ListInstancesRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.InstanceList() + + client.list(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_list_rest_bad_request(transport: str = 'rest', request_type=compute.ListInstancesRequest): + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'zone': 'sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list(request) + + +def test_list_rest_flattened(): + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.InstanceList() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'zone': 'sample2'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + zone='zone_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.InstanceList.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.list(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/zones/{zone}/instances" % client.transport._host, args[1]) + + +def test_list_rest_flattened_error(transport: str = 'rest'): + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list( + compute.ListInstancesRequest(), + project='project_value', + zone='zone_value', + ) + + +def test_list_rest_pager(transport: str = 'rest'): + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + #with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + compute.InstanceList( + items=[ + compute.Instance(), + compute.Instance(), + compute.Instance(), + ], + next_page_token='abc', + ), + compute.InstanceList( + items=[], + next_page_token='def', + ), + compute.InstanceList( + items=[ + compute.Instance(), + ], + next_page_token='ghi', + ), + compute.InstanceList( + items=[ + compute.Instance(), + compute.Instance(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple(compute.InstanceList.to_json(x) for x in response) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode('UTF-8') + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {'project': 'sample1', 'zone': 'sample2'} + + pager = client.list(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, compute.Instance) + for i in results) + + pages = list(client.list(request=sample_request).pages) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize("request_type", [ + compute.ListReferrersInstancesRequest, + dict, +]) +def test_list_referrers_rest(request_type): + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'zone': 'sample2', 'instance': 'sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.InstanceListReferrers( + id='id_value', + kind='kind_value', + next_page_token='next_page_token_value', + self_link='self_link_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.InstanceListReferrers.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.list_referrers(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListReferrersPager) + assert response.id == 'id_value' + assert response.kind == 'kind_value' + assert response.next_page_token == 'next_page_token_value' + assert response.self_link == 'self_link_value' + + +def test_list_referrers_rest_required_fields(request_type=compute.ListReferrersInstancesRequest): + transport_class = transports.InstancesRestTransport + + request_init = {} + request_init["instance"] = "" + request_init["project"] = "" + request_init["zone"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_referrers._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["instance"] = 'instance_value' + jsonified_request["project"] = 'project_value' + jsonified_request["zone"] = 'zone_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_referrers._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("filter", "max_results", "order_by", "page_token", "return_partial_success", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "instance" in jsonified_request + assert jsonified_request["instance"] == 'instance_value' + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "zone" in jsonified_request + assert jsonified_request["zone"] == 'zone_value' + + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.InstanceListReferrers() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "get", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.InstanceListReferrers.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.list_referrers(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_list_referrers_rest_unset_required_fields(): + transport = transports.InstancesRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.list_referrers._get_unset_required_fields({}) + assert set(unset_fields) == (set(("filter", "maxResults", "orderBy", "pageToken", "returnPartialSuccess", )) & set(("instance", "project", "zone", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_referrers_rest_interceptors(null_interceptor): + transport = transports.InstancesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.InstancesRestInterceptor(), + ) + client = InstancesClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.InstancesRestInterceptor, "post_list_referrers") as post, \ + mock.patch.object(transports.InstancesRestInterceptor, "pre_list_referrers") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.ListReferrersInstancesRequest.pb(compute.ListReferrersInstancesRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.InstanceListReferrers.to_json(compute.InstanceListReferrers()) + + request = compute.ListReferrersInstancesRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.InstanceListReferrers() + + client.list_referrers(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_list_referrers_rest_bad_request(transport: str = 'rest', request_type=compute.ListReferrersInstancesRequest): + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'zone': 'sample2', 'instance': 'sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_referrers(request) + + +def test_list_referrers_rest_flattened(): + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.InstanceListReferrers() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'zone': 'sample2', 'instance': 'sample3'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + zone='zone_value', + instance='instance_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.InstanceListReferrers.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.list_referrers(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/zones/{zone}/instances/{instance}/referrers" % client.transport._host, args[1]) + + +def test_list_referrers_rest_flattened_error(transport: str = 'rest'): + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_referrers( + compute.ListReferrersInstancesRequest(), + project='project_value', + zone='zone_value', + instance='instance_value', + ) + + +def test_list_referrers_rest_pager(transport: str = 'rest'): + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + #with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + compute.InstanceListReferrers( + items=[ + compute.Reference(), + compute.Reference(), + compute.Reference(), + ], + next_page_token='abc', + ), + compute.InstanceListReferrers( + items=[], + next_page_token='def', + ), + compute.InstanceListReferrers( + items=[ + compute.Reference(), + ], + next_page_token='ghi', + ), + compute.InstanceListReferrers( + items=[ + compute.Reference(), + compute.Reference(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple(compute.InstanceListReferrers.to_json(x) for x in response) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode('UTF-8') + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {'project': 'sample1', 'zone': 'sample2', 'instance': 'sample3'} + + pager = client.list_referrers(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, compute.Reference) + for i in results) + + pages = list(client.list_referrers(request=sample_request).pages) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize("request_type", [ + compute.RemoveResourcePoliciesInstanceRequest, + dict, +]) +def test_remove_resource_policies_rest(request_type): + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'zone': 'sample2', 'instance': 'sample3'} + request_init["instances_remove_resource_policies_request_resource"] = {'resource_policies': ['resource_policies_value1', 'resource_policies_value2']} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.remove_resource_policies(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, extended_operation.ExtendedOperation) + assert response.client_operation_id == 'client_operation_id_value' + assert response.creation_timestamp == 'creation_timestamp_value' + assert response.description == 'description_value' + assert response.end_time == 'end_time_value' + assert response.http_error_message == 'http_error_message_value' + assert response.http_error_status_code == 2374 + assert response.id == 205 + assert response.insert_time == 'insert_time_value' + assert response.kind == 'kind_value' + assert response.name == 'name_value' + assert response.operation_group_id == 'operation_group_id_value' + assert response.operation_type == 'operation_type_value' + assert response.progress == 885 + assert response.region == 'region_value' + assert response.self_link == 'self_link_value' + assert response.start_time == 'start_time_value' + assert response.status == compute.Operation.Status.DONE + assert response.status_message == 'status_message_value' + assert response.target_id == 947 + assert response.target_link == 'target_link_value' + assert response.user == 'user_value' + assert response.zone == 'zone_value' + + +def test_remove_resource_policies_rest_required_fields(request_type=compute.RemoveResourcePoliciesInstanceRequest): + transport_class = transports.InstancesRestTransport + + request_init = {} + request_init["instance"] = "" + request_init["project"] = "" + request_init["zone"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).remove_resource_policies._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["instance"] = 'instance_value' + jsonified_request["project"] = 'project_value' + jsonified_request["zone"] = 'zone_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).remove_resource_policies._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "instance" in jsonified_request + assert jsonified_request["instance"] == 'instance_value' + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "zone" in jsonified_request + assert jsonified_request["zone"] == 'zone_value' + + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.remove_resource_policies(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_remove_resource_policies_rest_unset_required_fields(): + transport = transports.InstancesRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.remove_resource_policies._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("instance", "instancesRemoveResourcePoliciesRequestResource", "project", "zone", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_remove_resource_policies_rest_interceptors(null_interceptor): + transport = transports.InstancesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.InstancesRestInterceptor(), + ) + client = InstancesClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.InstancesRestInterceptor, "post_remove_resource_policies") as post, \ + mock.patch.object(transports.InstancesRestInterceptor, "pre_remove_resource_policies") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.RemoveResourcePoliciesInstanceRequest.pb(compute.RemoveResourcePoliciesInstanceRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.RemoveResourcePoliciesInstanceRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.remove_resource_policies(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_remove_resource_policies_rest_bad_request(transport: str = 'rest', request_type=compute.RemoveResourcePoliciesInstanceRequest): + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'zone': 'sample2', 'instance': 'sample3'} + request_init["instances_remove_resource_policies_request_resource"] = {'resource_policies': ['resource_policies_value1', 'resource_policies_value2']} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.remove_resource_policies(request) + + +def test_remove_resource_policies_rest_flattened(): + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'zone': 'sample2', 'instance': 'sample3'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + zone='zone_value', + instance='instance_value', + instances_remove_resource_policies_request_resource=compute.InstancesRemoveResourcePoliciesRequest(resource_policies=['resource_policies_value']), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.remove_resource_policies(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/zones/{zone}/instances/{instance}/removeResourcePolicies" % client.transport._host, args[1]) + + +def test_remove_resource_policies_rest_flattened_error(transport: str = 'rest'): + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.remove_resource_policies( + compute.RemoveResourcePoliciesInstanceRequest(), + project='project_value', + zone='zone_value', + instance='instance_value', + instances_remove_resource_policies_request_resource=compute.InstancesRemoveResourcePoliciesRequest(resource_policies=['resource_policies_value']), + ) + + +def test_remove_resource_policies_rest_error(): + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.RemoveResourcePoliciesInstanceRequest, + dict, +]) +def test_remove_resource_policies_unary_rest(request_type): + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'zone': 'sample2', 'instance': 'sample3'} + request_init["instances_remove_resource_policies_request_resource"] = {'resource_policies': ['resource_policies_value1', 'resource_policies_value2']} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.remove_resource_policies_unary(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.Operation) + + +def test_remove_resource_policies_unary_rest_required_fields(request_type=compute.RemoveResourcePoliciesInstanceRequest): + transport_class = transports.InstancesRestTransport + + request_init = {} + request_init["instance"] = "" + request_init["project"] = "" + request_init["zone"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).remove_resource_policies._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["instance"] = 'instance_value' + jsonified_request["project"] = 'project_value' + jsonified_request["zone"] = 'zone_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).remove_resource_policies._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "instance" in jsonified_request + assert jsonified_request["instance"] == 'instance_value' + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "zone" in jsonified_request + assert jsonified_request["zone"] == 'zone_value' + + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.remove_resource_policies_unary(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_remove_resource_policies_unary_rest_unset_required_fields(): + transport = transports.InstancesRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.remove_resource_policies._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("instance", "instancesRemoveResourcePoliciesRequestResource", "project", "zone", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_remove_resource_policies_unary_rest_interceptors(null_interceptor): + transport = transports.InstancesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.InstancesRestInterceptor(), + ) + client = InstancesClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.InstancesRestInterceptor, "post_remove_resource_policies") as post, \ + mock.patch.object(transports.InstancesRestInterceptor, "pre_remove_resource_policies") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.RemoveResourcePoliciesInstanceRequest.pb(compute.RemoveResourcePoliciesInstanceRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.RemoveResourcePoliciesInstanceRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.remove_resource_policies_unary(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_remove_resource_policies_unary_rest_bad_request(transport: str = 'rest', request_type=compute.RemoveResourcePoliciesInstanceRequest): + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'zone': 'sample2', 'instance': 'sample3'} + request_init["instances_remove_resource_policies_request_resource"] = {'resource_policies': ['resource_policies_value1', 'resource_policies_value2']} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.remove_resource_policies_unary(request) + + +def test_remove_resource_policies_unary_rest_flattened(): + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'zone': 'sample2', 'instance': 'sample3'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + zone='zone_value', + instance='instance_value', + instances_remove_resource_policies_request_resource=compute.InstancesRemoveResourcePoliciesRequest(resource_policies=['resource_policies_value']), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.remove_resource_policies_unary(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/zones/{zone}/instances/{instance}/removeResourcePolicies" % client.transport._host, args[1]) + + +def test_remove_resource_policies_unary_rest_flattened_error(transport: str = 'rest'): + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.remove_resource_policies_unary( + compute.RemoveResourcePoliciesInstanceRequest(), + project='project_value', + zone='zone_value', + instance='instance_value', + instances_remove_resource_policies_request_resource=compute.InstancesRemoveResourcePoliciesRequest(resource_policies=['resource_policies_value']), + ) + + +def test_remove_resource_policies_unary_rest_error(): + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.ResetInstanceRequest, + dict, +]) +def test_reset_rest(request_type): + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'zone': 'sample2', 'instance': 'sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.reset(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, extended_operation.ExtendedOperation) + assert response.client_operation_id == 'client_operation_id_value' + assert response.creation_timestamp == 'creation_timestamp_value' + assert response.description == 'description_value' + assert response.end_time == 'end_time_value' + assert response.http_error_message == 'http_error_message_value' + assert response.http_error_status_code == 2374 + assert response.id == 205 + assert response.insert_time == 'insert_time_value' + assert response.kind == 'kind_value' + assert response.name == 'name_value' + assert response.operation_group_id == 'operation_group_id_value' + assert response.operation_type == 'operation_type_value' + assert response.progress == 885 + assert response.region == 'region_value' + assert response.self_link == 'self_link_value' + assert response.start_time == 'start_time_value' + assert response.status == compute.Operation.Status.DONE + assert response.status_message == 'status_message_value' + assert response.target_id == 947 + assert response.target_link == 'target_link_value' + assert response.user == 'user_value' + assert response.zone == 'zone_value' + + +def test_reset_rest_required_fields(request_type=compute.ResetInstanceRequest): + transport_class = transports.InstancesRestTransport + + request_init = {} + request_init["instance"] = "" + request_init["project"] = "" + request_init["zone"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).reset._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["instance"] = 'instance_value' + jsonified_request["project"] = 'project_value' + jsonified_request["zone"] = 'zone_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).reset._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "instance" in jsonified_request + assert jsonified_request["instance"] == 'instance_value' + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "zone" in jsonified_request + assert jsonified_request["zone"] == 'zone_value' + + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.reset(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_reset_rest_unset_required_fields(): + transport = transports.InstancesRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.reset._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("instance", "project", "zone", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_reset_rest_interceptors(null_interceptor): + transport = transports.InstancesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.InstancesRestInterceptor(), + ) + client = InstancesClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.InstancesRestInterceptor, "post_reset") as post, \ + mock.patch.object(transports.InstancesRestInterceptor, "pre_reset") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.ResetInstanceRequest.pb(compute.ResetInstanceRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.ResetInstanceRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.reset(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_reset_rest_bad_request(transport: str = 'rest', request_type=compute.ResetInstanceRequest): + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'zone': 'sample2', 'instance': 'sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.reset(request) + + +def test_reset_rest_flattened(): + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'zone': 'sample2', 'instance': 'sample3'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + zone='zone_value', + instance='instance_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.reset(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/zones/{zone}/instances/{instance}/reset" % client.transport._host, args[1]) + + +def test_reset_rest_flattened_error(transport: str = 'rest'): + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.reset( + compute.ResetInstanceRequest(), + project='project_value', + zone='zone_value', + instance='instance_value', + ) + + +def test_reset_rest_error(): + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.ResetInstanceRequest, + dict, +]) +def test_reset_unary_rest(request_type): + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'zone': 'sample2', 'instance': 'sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.reset_unary(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.Operation) + + +def test_reset_unary_rest_required_fields(request_type=compute.ResetInstanceRequest): + transport_class = transports.InstancesRestTransport + + request_init = {} + request_init["instance"] = "" + request_init["project"] = "" + request_init["zone"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).reset._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["instance"] = 'instance_value' + jsonified_request["project"] = 'project_value' + jsonified_request["zone"] = 'zone_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).reset._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "instance" in jsonified_request + assert jsonified_request["instance"] == 'instance_value' + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "zone" in jsonified_request + assert jsonified_request["zone"] == 'zone_value' + + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.reset_unary(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_reset_unary_rest_unset_required_fields(): + transport = transports.InstancesRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.reset._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("instance", "project", "zone", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_reset_unary_rest_interceptors(null_interceptor): + transport = transports.InstancesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.InstancesRestInterceptor(), + ) + client = InstancesClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.InstancesRestInterceptor, "post_reset") as post, \ + mock.patch.object(transports.InstancesRestInterceptor, "pre_reset") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.ResetInstanceRequest.pb(compute.ResetInstanceRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.ResetInstanceRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.reset_unary(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_reset_unary_rest_bad_request(transport: str = 'rest', request_type=compute.ResetInstanceRequest): + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'zone': 'sample2', 'instance': 'sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.reset_unary(request) + + +def test_reset_unary_rest_flattened(): + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'zone': 'sample2', 'instance': 'sample3'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + zone='zone_value', + instance='instance_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.reset_unary(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/zones/{zone}/instances/{instance}/reset" % client.transport._host, args[1]) + + +def test_reset_unary_rest_flattened_error(transport: str = 'rest'): + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.reset_unary( + compute.ResetInstanceRequest(), + project='project_value', + zone='zone_value', + instance='instance_value', + ) + + +def test_reset_unary_rest_error(): + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.ResumeInstanceRequest, + dict, +]) +def test_resume_rest(request_type): + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'zone': 'sample2', 'instance': 'sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.resume(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, extended_operation.ExtendedOperation) + assert response.client_operation_id == 'client_operation_id_value' + assert response.creation_timestamp == 'creation_timestamp_value' + assert response.description == 'description_value' + assert response.end_time == 'end_time_value' + assert response.http_error_message == 'http_error_message_value' + assert response.http_error_status_code == 2374 + assert response.id == 205 + assert response.insert_time == 'insert_time_value' + assert response.kind == 'kind_value' + assert response.name == 'name_value' + assert response.operation_group_id == 'operation_group_id_value' + assert response.operation_type == 'operation_type_value' + assert response.progress == 885 + assert response.region == 'region_value' + assert response.self_link == 'self_link_value' + assert response.start_time == 'start_time_value' + assert response.status == compute.Operation.Status.DONE + assert response.status_message == 'status_message_value' + assert response.target_id == 947 + assert response.target_link == 'target_link_value' + assert response.user == 'user_value' + assert response.zone == 'zone_value' + + +def test_resume_rest_required_fields(request_type=compute.ResumeInstanceRequest): + transport_class = transports.InstancesRestTransport + + request_init = {} + request_init["instance"] = "" + request_init["project"] = "" + request_init["zone"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).resume._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["instance"] = 'instance_value' + jsonified_request["project"] = 'project_value' + jsonified_request["zone"] = 'zone_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).resume._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "instance" in jsonified_request + assert jsonified_request["instance"] == 'instance_value' + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "zone" in jsonified_request + assert jsonified_request["zone"] == 'zone_value' + + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.resume(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_resume_rest_unset_required_fields(): + transport = transports.InstancesRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.resume._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("instance", "project", "zone", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_resume_rest_interceptors(null_interceptor): + transport = transports.InstancesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.InstancesRestInterceptor(), + ) + client = InstancesClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.InstancesRestInterceptor, "post_resume") as post, \ + mock.patch.object(transports.InstancesRestInterceptor, "pre_resume") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.ResumeInstanceRequest.pb(compute.ResumeInstanceRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.ResumeInstanceRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.resume(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_resume_rest_bad_request(transport: str = 'rest', request_type=compute.ResumeInstanceRequest): + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'zone': 'sample2', 'instance': 'sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.resume(request) + + +def test_resume_rest_flattened(): + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'zone': 'sample2', 'instance': 'sample3'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + zone='zone_value', + instance='instance_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.resume(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/zones/{zone}/instances/{instance}/resume" % client.transport._host, args[1]) + + +def test_resume_rest_flattened_error(transport: str = 'rest'): + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.resume( + compute.ResumeInstanceRequest(), + project='project_value', + zone='zone_value', + instance='instance_value', + ) + + +def test_resume_rest_error(): + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.ResumeInstanceRequest, + dict, +]) +def test_resume_unary_rest(request_type): + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'zone': 'sample2', 'instance': 'sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.resume_unary(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.Operation) + + +def test_resume_unary_rest_required_fields(request_type=compute.ResumeInstanceRequest): + transport_class = transports.InstancesRestTransport + + request_init = {} + request_init["instance"] = "" + request_init["project"] = "" + request_init["zone"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).resume._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["instance"] = 'instance_value' + jsonified_request["project"] = 'project_value' + jsonified_request["zone"] = 'zone_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).resume._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "instance" in jsonified_request + assert jsonified_request["instance"] == 'instance_value' + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "zone" in jsonified_request + assert jsonified_request["zone"] == 'zone_value' + + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.resume_unary(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_resume_unary_rest_unset_required_fields(): + transport = transports.InstancesRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.resume._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("instance", "project", "zone", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_resume_unary_rest_interceptors(null_interceptor): + transport = transports.InstancesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.InstancesRestInterceptor(), + ) + client = InstancesClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.InstancesRestInterceptor, "post_resume") as post, \ + mock.patch.object(transports.InstancesRestInterceptor, "pre_resume") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.ResumeInstanceRequest.pb(compute.ResumeInstanceRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.ResumeInstanceRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.resume_unary(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_resume_unary_rest_bad_request(transport: str = 'rest', request_type=compute.ResumeInstanceRequest): + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'zone': 'sample2', 'instance': 'sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.resume_unary(request) + + +def test_resume_unary_rest_flattened(): + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'zone': 'sample2', 'instance': 'sample3'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + zone='zone_value', + instance='instance_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.resume_unary(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/zones/{zone}/instances/{instance}/resume" % client.transport._host, args[1]) + + +def test_resume_unary_rest_flattened_error(transport: str = 'rest'): + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.resume_unary( + compute.ResumeInstanceRequest(), + project='project_value', + zone='zone_value', + instance='instance_value', + ) + + +def test_resume_unary_rest_error(): + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.SendDiagnosticInterruptInstanceRequest, + dict, +]) +def test_send_diagnostic_interrupt_rest(request_type): + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'zone': 'sample2', 'instance': 'sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.SendDiagnosticInterruptInstanceResponse( + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.SendDiagnosticInterruptInstanceResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.send_diagnostic_interrupt(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.SendDiagnosticInterruptInstanceResponse) + + +def test_send_diagnostic_interrupt_rest_required_fields(request_type=compute.SendDiagnosticInterruptInstanceRequest): + transport_class = transports.InstancesRestTransport + + request_init = {} + request_init["instance"] = "" + request_init["project"] = "" + request_init["zone"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).send_diagnostic_interrupt._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["instance"] = 'instance_value' + jsonified_request["project"] = 'project_value' + jsonified_request["zone"] = 'zone_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).send_diagnostic_interrupt._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "instance" in jsonified_request + assert jsonified_request["instance"] == 'instance_value' + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "zone" in jsonified_request + assert jsonified_request["zone"] == 'zone_value' + + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.SendDiagnosticInterruptInstanceResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.SendDiagnosticInterruptInstanceResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.send_diagnostic_interrupt(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_send_diagnostic_interrupt_rest_unset_required_fields(): + transport = transports.InstancesRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.send_diagnostic_interrupt._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("instance", "project", "zone", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_send_diagnostic_interrupt_rest_interceptors(null_interceptor): + transport = transports.InstancesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.InstancesRestInterceptor(), + ) + client = InstancesClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.InstancesRestInterceptor, "post_send_diagnostic_interrupt") as post, \ + mock.patch.object(transports.InstancesRestInterceptor, "pre_send_diagnostic_interrupt") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.SendDiagnosticInterruptInstanceRequest.pb(compute.SendDiagnosticInterruptInstanceRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.SendDiagnosticInterruptInstanceResponse.to_json(compute.SendDiagnosticInterruptInstanceResponse()) + + request = compute.SendDiagnosticInterruptInstanceRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.SendDiagnosticInterruptInstanceResponse() + + client.send_diagnostic_interrupt(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_send_diagnostic_interrupt_rest_bad_request(transport: str = 'rest', request_type=compute.SendDiagnosticInterruptInstanceRequest): + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'zone': 'sample2', 'instance': 'sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.send_diagnostic_interrupt(request) + + +def test_send_diagnostic_interrupt_rest_flattened(): + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.SendDiagnosticInterruptInstanceResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'zone': 'sample2', 'instance': 'sample3'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + zone='zone_value', + instance='instance_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.SendDiagnosticInterruptInstanceResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.send_diagnostic_interrupt(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/zones/{zone}/instances/{instance}/sendDiagnosticInterrupt" % client.transport._host, args[1]) + + +def test_send_diagnostic_interrupt_rest_flattened_error(transport: str = 'rest'): + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.send_diagnostic_interrupt( + compute.SendDiagnosticInterruptInstanceRequest(), + project='project_value', + zone='zone_value', + instance='instance_value', + ) + + +def test_send_diagnostic_interrupt_rest_error(): + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.SetDeletionProtectionInstanceRequest, + dict, +]) +def test_set_deletion_protection_rest(request_type): + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'zone': 'sample2', 'resource': 'sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.set_deletion_protection(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, extended_operation.ExtendedOperation) + assert response.client_operation_id == 'client_operation_id_value' + assert response.creation_timestamp == 'creation_timestamp_value' + assert response.description == 'description_value' + assert response.end_time == 'end_time_value' + assert response.http_error_message == 'http_error_message_value' + assert response.http_error_status_code == 2374 + assert response.id == 205 + assert response.insert_time == 'insert_time_value' + assert response.kind == 'kind_value' + assert response.name == 'name_value' + assert response.operation_group_id == 'operation_group_id_value' + assert response.operation_type == 'operation_type_value' + assert response.progress == 885 + assert response.region == 'region_value' + assert response.self_link == 'self_link_value' + assert response.start_time == 'start_time_value' + assert response.status == compute.Operation.Status.DONE + assert response.status_message == 'status_message_value' + assert response.target_id == 947 + assert response.target_link == 'target_link_value' + assert response.user == 'user_value' + assert response.zone == 'zone_value' + + +def test_set_deletion_protection_rest_required_fields(request_type=compute.SetDeletionProtectionInstanceRequest): + transport_class = transports.InstancesRestTransport + + request_init = {} + request_init["project"] = "" + request_init["resource"] = "" + request_init["zone"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).set_deletion_protection._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + jsonified_request["resource"] = 'resource_value' + jsonified_request["zone"] = 'zone_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).set_deletion_protection._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("deletion_protection", "request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "resource" in jsonified_request + assert jsonified_request["resource"] == 'resource_value' + assert "zone" in jsonified_request + assert jsonified_request["zone"] == 'zone_value' + + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.set_deletion_protection(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_set_deletion_protection_rest_unset_required_fields(): + transport = transports.InstancesRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.set_deletion_protection._get_unset_required_fields({}) + assert set(unset_fields) == (set(("deletionProtection", "requestId", )) & set(("project", "resource", "zone", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_set_deletion_protection_rest_interceptors(null_interceptor): + transport = transports.InstancesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.InstancesRestInterceptor(), + ) + client = InstancesClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.InstancesRestInterceptor, "post_set_deletion_protection") as post, \ + mock.patch.object(transports.InstancesRestInterceptor, "pre_set_deletion_protection") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.SetDeletionProtectionInstanceRequest.pb(compute.SetDeletionProtectionInstanceRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.SetDeletionProtectionInstanceRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.set_deletion_protection(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_set_deletion_protection_rest_bad_request(transport: str = 'rest', request_type=compute.SetDeletionProtectionInstanceRequest): + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'zone': 'sample2', 'resource': 'sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.set_deletion_protection(request) + + +def test_set_deletion_protection_rest_flattened(): + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'zone': 'sample2', 'resource': 'sample3'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + zone='zone_value', + resource='resource_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.set_deletion_protection(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/zones/{zone}/instances/{resource}/setDeletionProtection" % client.transport._host, args[1]) + + +def test_set_deletion_protection_rest_flattened_error(transport: str = 'rest'): + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.set_deletion_protection( + compute.SetDeletionProtectionInstanceRequest(), + project='project_value', + zone='zone_value', + resource='resource_value', + ) + + +def test_set_deletion_protection_rest_error(): + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.SetDeletionProtectionInstanceRequest, + dict, +]) +def test_set_deletion_protection_unary_rest(request_type): + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'zone': 'sample2', 'resource': 'sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.set_deletion_protection_unary(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.Operation) + + +def test_set_deletion_protection_unary_rest_required_fields(request_type=compute.SetDeletionProtectionInstanceRequest): + transport_class = transports.InstancesRestTransport + + request_init = {} + request_init["project"] = "" + request_init["resource"] = "" + request_init["zone"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).set_deletion_protection._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + jsonified_request["resource"] = 'resource_value' + jsonified_request["zone"] = 'zone_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).set_deletion_protection._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("deletion_protection", "request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "resource" in jsonified_request + assert jsonified_request["resource"] == 'resource_value' + assert "zone" in jsonified_request + assert jsonified_request["zone"] == 'zone_value' + + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.set_deletion_protection_unary(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_set_deletion_protection_unary_rest_unset_required_fields(): + transport = transports.InstancesRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.set_deletion_protection._get_unset_required_fields({}) + assert set(unset_fields) == (set(("deletionProtection", "requestId", )) & set(("project", "resource", "zone", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_set_deletion_protection_unary_rest_interceptors(null_interceptor): + transport = transports.InstancesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.InstancesRestInterceptor(), + ) + client = InstancesClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.InstancesRestInterceptor, "post_set_deletion_protection") as post, \ + mock.patch.object(transports.InstancesRestInterceptor, "pre_set_deletion_protection") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.SetDeletionProtectionInstanceRequest.pb(compute.SetDeletionProtectionInstanceRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.SetDeletionProtectionInstanceRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.set_deletion_protection_unary(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_set_deletion_protection_unary_rest_bad_request(transport: str = 'rest', request_type=compute.SetDeletionProtectionInstanceRequest): + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'zone': 'sample2', 'resource': 'sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.set_deletion_protection_unary(request) + + +def test_set_deletion_protection_unary_rest_flattened(): + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'zone': 'sample2', 'resource': 'sample3'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + zone='zone_value', + resource='resource_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.set_deletion_protection_unary(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/zones/{zone}/instances/{resource}/setDeletionProtection" % client.transport._host, args[1]) + + +def test_set_deletion_protection_unary_rest_flattened_error(transport: str = 'rest'): + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.set_deletion_protection_unary( + compute.SetDeletionProtectionInstanceRequest(), + project='project_value', + zone='zone_value', + resource='resource_value', + ) + + +def test_set_deletion_protection_unary_rest_error(): + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.SetDiskAutoDeleteInstanceRequest, + dict, +]) +def test_set_disk_auto_delete_rest(request_type): + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'zone': 'sample2', 'instance': 'sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.set_disk_auto_delete(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, extended_operation.ExtendedOperation) + assert response.client_operation_id == 'client_operation_id_value' + assert response.creation_timestamp == 'creation_timestamp_value' + assert response.description == 'description_value' + assert response.end_time == 'end_time_value' + assert response.http_error_message == 'http_error_message_value' + assert response.http_error_status_code == 2374 + assert response.id == 205 + assert response.insert_time == 'insert_time_value' + assert response.kind == 'kind_value' + assert response.name == 'name_value' + assert response.operation_group_id == 'operation_group_id_value' + assert response.operation_type == 'operation_type_value' + assert response.progress == 885 + assert response.region == 'region_value' + assert response.self_link == 'self_link_value' + assert response.start_time == 'start_time_value' + assert response.status == compute.Operation.Status.DONE + assert response.status_message == 'status_message_value' + assert response.target_id == 947 + assert response.target_link == 'target_link_value' + assert response.user == 'user_value' + assert response.zone == 'zone_value' + + +def test_set_disk_auto_delete_rest_required_fields(request_type=compute.SetDiskAutoDeleteInstanceRequest): + transport_class = transports.InstancesRestTransport + + request_init = {} + request_init["auto_delete"] = False + request_init["device_name"] = "" + request_init["instance"] = "" + request_init["project"] = "" + request_init["zone"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + assert "autoDelete" not in jsonified_request + assert "deviceName" not in jsonified_request + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).set_disk_auto_delete._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + assert "autoDelete" in jsonified_request + assert jsonified_request["autoDelete"] == request_init["auto_delete"] + assert "deviceName" in jsonified_request + assert jsonified_request["deviceName"] == request_init["device_name"] + + jsonified_request["autoDelete"] = True + jsonified_request["deviceName"] = 'device_name_value' + jsonified_request["instance"] = 'instance_value' + jsonified_request["project"] = 'project_value' + jsonified_request["zone"] = 'zone_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).set_disk_auto_delete._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("auto_delete", "device_name", "request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "autoDelete" in jsonified_request + assert jsonified_request["autoDelete"] == True + assert "deviceName" in jsonified_request + assert jsonified_request["deviceName"] == 'device_name_value' + assert "instance" in jsonified_request + assert jsonified_request["instance"] == 'instance_value' + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "zone" in jsonified_request + assert jsonified_request["zone"] == 'zone_value' + + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.set_disk_auto_delete(request) + + expected_params = [ + ( + "autoDelete", + str(False).lower(), + ), + ( + "deviceName", + "", + ), + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_set_disk_auto_delete_rest_unset_required_fields(): + transport = transports.InstancesRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.set_disk_auto_delete._get_unset_required_fields({}) + assert set(unset_fields) == (set(("autoDelete", "deviceName", "requestId", )) & set(("autoDelete", "deviceName", "instance", "project", "zone", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_set_disk_auto_delete_rest_interceptors(null_interceptor): + transport = transports.InstancesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.InstancesRestInterceptor(), + ) + client = InstancesClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.InstancesRestInterceptor, "post_set_disk_auto_delete") as post, \ + mock.patch.object(transports.InstancesRestInterceptor, "pre_set_disk_auto_delete") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.SetDiskAutoDeleteInstanceRequest.pb(compute.SetDiskAutoDeleteInstanceRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.SetDiskAutoDeleteInstanceRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.set_disk_auto_delete(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_set_disk_auto_delete_rest_bad_request(transport: str = 'rest', request_type=compute.SetDiskAutoDeleteInstanceRequest): + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'zone': 'sample2', 'instance': 'sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.set_disk_auto_delete(request) + + +def test_set_disk_auto_delete_rest_flattened(): + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'zone': 'sample2', 'instance': 'sample3'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + zone='zone_value', + instance='instance_value', + auto_delete=True, + device_name='device_name_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.set_disk_auto_delete(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/zones/{zone}/instances/{instance}/setDiskAutoDelete" % client.transport._host, args[1]) + + +def test_set_disk_auto_delete_rest_flattened_error(transport: str = 'rest'): + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.set_disk_auto_delete( + compute.SetDiskAutoDeleteInstanceRequest(), + project='project_value', + zone='zone_value', + instance='instance_value', + auto_delete=True, + device_name='device_name_value', + ) + + +def test_set_disk_auto_delete_rest_error(): + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.SetDiskAutoDeleteInstanceRequest, + dict, +]) +def test_set_disk_auto_delete_unary_rest(request_type): + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'zone': 'sample2', 'instance': 'sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.set_disk_auto_delete_unary(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.Operation) + + +def test_set_disk_auto_delete_unary_rest_required_fields(request_type=compute.SetDiskAutoDeleteInstanceRequest): + transport_class = transports.InstancesRestTransport + + request_init = {} + request_init["auto_delete"] = False + request_init["device_name"] = "" + request_init["instance"] = "" + request_init["project"] = "" + request_init["zone"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + assert "autoDelete" not in jsonified_request + assert "deviceName" not in jsonified_request + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).set_disk_auto_delete._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + assert "autoDelete" in jsonified_request + assert jsonified_request["autoDelete"] == request_init["auto_delete"] + assert "deviceName" in jsonified_request + assert jsonified_request["deviceName"] == request_init["device_name"] + + jsonified_request["autoDelete"] = True + jsonified_request["deviceName"] = 'device_name_value' + jsonified_request["instance"] = 'instance_value' + jsonified_request["project"] = 'project_value' + jsonified_request["zone"] = 'zone_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).set_disk_auto_delete._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("auto_delete", "device_name", "request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "autoDelete" in jsonified_request + assert jsonified_request["autoDelete"] == True + assert "deviceName" in jsonified_request + assert jsonified_request["deviceName"] == 'device_name_value' + assert "instance" in jsonified_request + assert jsonified_request["instance"] == 'instance_value' + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "zone" in jsonified_request + assert jsonified_request["zone"] == 'zone_value' + + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.set_disk_auto_delete_unary(request) + + expected_params = [ + ( + "autoDelete", + str(False).lower(), + ), + ( + "deviceName", + "", + ), + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_set_disk_auto_delete_unary_rest_unset_required_fields(): + transport = transports.InstancesRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.set_disk_auto_delete._get_unset_required_fields({}) + assert set(unset_fields) == (set(("autoDelete", "deviceName", "requestId", )) & set(("autoDelete", "deviceName", "instance", "project", "zone", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_set_disk_auto_delete_unary_rest_interceptors(null_interceptor): + transport = transports.InstancesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.InstancesRestInterceptor(), + ) + client = InstancesClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.InstancesRestInterceptor, "post_set_disk_auto_delete") as post, \ + mock.patch.object(transports.InstancesRestInterceptor, "pre_set_disk_auto_delete") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.SetDiskAutoDeleteInstanceRequest.pb(compute.SetDiskAutoDeleteInstanceRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.SetDiskAutoDeleteInstanceRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.set_disk_auto_delete_unary(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_set_disk_auto_delete_unary_rest_bad_request(transport: str = 'rest', request_type=compute.SetDiskAutoDeleteInstanceRequest): + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'zone': 'sample2', 'instance': 'sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.set_disk_auto_delete_unary(request) + + +def test_set_disk_auto_delete_unary_rest_flattened(): + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'zone': 'sample2', 'instance': 'sample3'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + zone='zone_value', + instance='instance_value', + auto_delete=True, + device_name='device_name_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.set_disk_auto_delete_unary(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/zones/{zone}/instances/{instance}/setDiskAutoDelete" % client.transport._host, args[1]) + + +def test_set_disk_auto_delete_unary_rest_flattened_error(transport: str = 'rest'): + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.set_disk_auto_delete_unary( + compute.SetDiskAutoDeleteInstanceRequest(), + project='project_value', + zone='zone_value', + instance='instance_value', + auto_delete=True, + device_name='device_name_value', + ) + + +def test_set_disk_auto_delete_unary_rest_error(): + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.SetIamPolicyInstanceRequest, + dict, +]) +def test_set_iam_policy_rest(request_type): + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'zone': 'sample2', 'resource': 'sample3'} + request_init["zone_set_policy_request_resource"] = {'bindings': [{'binding_id': 'binding_id_value', 'condition': {'description': 'description_value', 'expression': 'expression_value', 'location': 'location_value', 'title': 'title_value'}, 'members': ['members_value1', 'members_value2'], 'role': 'role_value'}], 'etag': 'etag_value', 'policy': {'audit_configs': [{'audit_log_configs': [{'exempted_members': ['exempted_members_value1', 'exempted_members_value2'], 'ignore_child_exemptions': True, 'log_type': 'log_type_value'}], 'exempted_members': ['exempted_members_value1', 'exempted_members_value2'], 'service': 'service_value'}], 'bindings': {}, 'etag': 'etag_value', 'iam_owned': True, 'rules': [{'action': 'action_value', 'conditions': [{'iam': 'iam_value', 'op': 'op_value', 'svc': 'svc_value', 'sys': 'sys_value', 'values': ['values_value1', 'values_value2']}], 'description': 'description_value', 'ins': ['ins_value1', 'ins_value2'], 'log_configs': [{'cloud_audit': {'authorization_logging_options': {'permission_type': 'permission_type_value'}, 'log_name': 'log_name_value'}, 'counter': {'custom_fields': [{'name': 'name_value', 'value': 'value_value'}], 'field': 'field_value', 'metric': 'metric_value'}, 'data_access': {'log_mode': 'log_mode_value'}}], 'not_ins': ['not_ins_value1', 'not_ins_value2'], 'permissions': ['permissions_value1', 'permissions_value2']}], 'version': 774}} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Policy( + etag='etag_value', + iam_owned=True, + version=774, + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Policy.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.set_iam_policy(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.Policy) + assert response.etag == 'etag_value' + assert response.iam_owned is True + assert response.version == 774 + + +def test_set_iam_policy_rest_required_fields(request_type=compute.SetIamPolicyInstanceRequest): + transport_class = transports.InstancesRestTransport + + request_init = {} + request_init["project"] = "" + request_init["resource"] = "" + request_init["zone"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).set_iam_policy._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + jsonified_request["resource"] = 'resource_value' + jsonified_request["zone"] = 'zone_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).set_iam_policy._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "resource" in jsonified_request + assert jsonified_request["resource"] == 'resource_value' + assert "zone" in jsonified_request + assert jsonified_request["zone"] == 'zone_value' + + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Policy() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Policy.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.set_iam_policy(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_set_iam_policy_rest_unset_required_fields(): + transport = transports.InstancesRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.set_iam_policy._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("project", "resource", "zone", "zoneSetPolicyRequestResource", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_set_iam_policy_rest_interceptors(null_interceptor): + transport = transports.InstancesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.InstancesRestInterceptor(), + ) + client = InstancesClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.InstancesRestInterceptor, "post_set_iam_policy") as post, \ + mock.patch.object(transports.InstancesRestInterceptor, "pre_set_iam_policy") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.SetIamPolicyInstanceRequest.pb(compute.SetIamPolicyInstanceRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Policy.to_json(compute.Policy()) + + request = compute.SetIamPolicyInstanceRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Policy() + + client.set_iam_policy(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_set_iam_policy_rest_bad_request(transport: str = 'rest', request_type=compute.SetIamPolicyInstanceRequest): + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'zone': 'sample2', 'resource': 'sample3'} + request_init["zone_set_policy_request_resource"] = {'bindings': [{'binding_id': 'binding_id_value', 'condition': {'description': 'description_value', 'expression': 'expression_value', 'location': 'location_value', 'title': 'title_value'}, 'members': ['members_value1', 'members_value2'], 'role': 'role_value'}], 'etag': 'etag_value', 'policy': {'audit_configs': [{'audit_log_configs': [{'exempted_members': ['exempted_members_value1', 'exempted_members_value2'], 'ignore_child_exemptions': True, 'log_type': 'log_type_value'}], 'exempted_members': ['exempted_members_value1', 'exempted_members_value2'], 'service': 'service_value'}], 'bindings': {}, 'etag': 'etag_value', 'iam_owned': True, 'rules': [{'action': 'action_value', 'conditions': [{'iam': 'iam_value', 'op': 'op_value', 'svc': 'svc_value', 'sys': 'sys_value', 'values': ['values_value1', 'values_value2']}], 'description': 'description_value', 'ins': ['ins_value1', 'ins_value2'], 'log_configs': [{'cloud_audit': {'authorization_logging_options': {'permission_type': 'permission_type_value'}, 'log_name': 'log_name_value'}, 'counter': {'custom_fields': [{'name': 'name_value', 'value': 'value_value'}], 'field': 'field_value', 'metric': 'metric_value'}, 'data_access': {'log_mode': 'log_mode_value'}}], 'not_ins': ['not_ins_value1', 'not_ins_value2'], 'permissions': ['permissions_value1', 'permissions_value2']}], 'version': 774}} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.set_iam_policy(request) + + +def test_set_iam_policy_rest_flattened(): + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Policy() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'zone': 'sample2', 'resource': 'sample3'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + zone='zone_value', + resource='resource_value', + zone_set_policy_request_resource=compute.ZoneSetPolicyRequest(bindings=[compute.Binding(binding_id='binding_id_value')]), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Policy.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.set_iam_policy(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/zones/{zone}/instances/{resource}/setIamPolicy" % client.transport._host, args[1]) + + +def test_set_iam_policy_rest_flattened_error(transport: str = 'rest'): + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.set_iam_policy( + compute.SetIamPolicyInstanceRequest(), + project='project_value', + zone='zone_value', + resource='resource_value', + zone_set_policy_request_resource=compute.ZoneSetPolicyRequest(bindings=[compute.Binding(binding_id='binding_id_value')]), + ) + + +def test_set_iam_policy_rest_error(): + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.SetLabelsInstanceRequest, + dict, +]) +def test_set_labels_rest(request_type): + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'zone': 'sample2', 'instance': 'sample3'} + request_init["instances_set_labels_request_resource"] = {'label_fingerprint': 'label_fingerprint_value', 'labels': {}} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.set_labels(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, extended_operation.ExtendedOperation) + assert response.client_operation_id == 'client_operation_id_value' + assert response.creation_timestamp == 'creation_timestamp_value' + assert response.description == 'description_value' + assert response.end_time == 'end_time_value' + assert response.http_error_message == 'http_error_message_value' + assert response.http_error_status_code == 2374 + assert response.id == 205 + assert response.insert_time == 'insert_time_value' + assert response.kind == 'kind_value' + assert response.name == 'name_value' + assert response.operation_group_id == 'operation_group_id_value' + assert response.operation_type == 'operation_type_value' + assert response.progress == 885 + assert response.region == 'region_value' + assert response.self_link == 'self_link_value' + assert response.start_time == 'start_time_value' + assert response.status == compute.Operation.Status.DONE + assert response.status_message == 'status_message_value' + assert response.target_id == 947 + assert response.target_link == 'target_link_value' + assert response.user == 'user_value' + assert response.zone == 'zone_value' + + +def test_set_labels_rest_required_fields(request_type=compute.SetLabelsInstanceRequest): + transport_class = transports.InstancesRestTransport + + request_init = {} + request_init["instance"] = "" + request_init["project"] = "" + request_init["zone"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).set_labels._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["instance"] = 'instance_value' + jsonified_request["project"] = 'project_value' + jsonified_request["zone"] = 'zone_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).set_labels._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "instance" in jsonified_request + assert jsonified_request["instance"] == 'instance_value' + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "zone" in jsonified_request + assert jsonified_request["zone"] == 'zone_value' + + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.set_labels(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_set_labels_rest_unset_required_fields(): + transport = transports.InstancesRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.set_labels._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("instance", "instancesSetLabelsRequestResource", "project", "zone", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_set_labels_rest_interceptors(null_interceptor): + transport = transports.InstancesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.InstancesRestInterceptor(), + ) + client = InstancesClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.InstancesRestInterceptor, "post_set_labels") as post, \ + mock.patch.object(transports.InstancesRestInterceptor, "pre_set_labels") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.SetLabelsInstanceRequest.pb(compute.SetLabelsInstanceRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.SetLabelsInstanceRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.set_labels(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_set_labels_rest_bad_request(transport: str = 'rest', request_type=compute.SetLabelsInstanceRequest): + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'zone': 'sample2', 'instance': 'sample3'} + request_init["instances_set_labels_request_resource"] = {'label_fingerprint': 'label_fingerprint_value', 'labels': {}} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.set_labels(request) + + +def test_set_labels_rest_flattened(): + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'zone': 'sample2', 'instance': 'sample3'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + zone='zone_value', + instance='instance_value', + instances_set_labels_request_resource=compute.InstancesSetLabelsRequest(label_fingerprint='label_fingerprint_value'), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.set_labels(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/zones/{zone}/instances/{instance}/setLabels" % client.transport._host, args[1]) + + +def test_set_labels_rest_flattened_error(transport: str = 'rest'): + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.set_labels( + compute.SetLabelsInstanceRequest(), + project='project_value', + zone='zone_value', + instance='instance_value', + instances_set_labels_request_resource=compute.InstancesSetLabelsRequest(label_fingerprint='label_fingerprint_value'), + ) + + +def test_set_labels_rest_error(): + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.SetLabelsInstanceRequest, + dict, +]) +def test_set_labels_unary_rest(request_type): + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'zone': 'sample2', 'instance': 'sample3'} + request_init["instances_set_labels_request_resource"] = {'label_fingerprint': 'label_fingerprint_value', 'labels': {}} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.set_labels_unary(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.Operation) + + +def test_set_labels_unary_rest_required_fields(request_type=compute.SetLabelsInstanceRequest): + transport_class = transports.InstancesRestTransport + + request_init = {} + request_init["instance"] = "" + request_init["project"] = "" + request_init["zone"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).set_labels._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["instance"] = 'instance_value' + jsonified_request["project"] = 'project_value' + jsonified_request["zone"] = 'zone_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).set_labels._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "instance" in jsonified_request + assert jsonified_request["instance"] == 'instance_value' + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "zone" in jsonified_request + assert jsonified_request["zone"] == 'zone_value' + + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.set_labels_unary(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_set_labels_unary_rest_unset_required_fields(): + transport = transports.InstancesRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.set_labels._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("instance", "instancesSetLabelsRequestResource", "project", "zone", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_set_labels_unary_rest_interceptors(null_interceptor): + transport = transports.InstancesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.InstancesRestInterceptor(), + ) + client = InstancesClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.InstancesRestInterceptor, "post_set_labels") as post, \ + mock.patch.object(transports.InstancesRestInterceptor, "pre_set_labels") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.SetLabelsInstanceRequest.pb(compute.SetLabelsInstanceRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.SetLabelsInstanceRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.set_labels_unary(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_set_labels_unary_rest_bad_request(transport: str = 'rest', request_type=compute.SetLabelsInstanceRequest): + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'zone': 'sample2', 'instance': 'sample3'} + request_init["instances_set_labels_request_resource"] = {'label_fingerprint': 'label_fingerprint_value', 'labels': {}} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.set_labels_unary(request) + + +def test_set_labels_unary_rest_flattened(): + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'zone': 'sample2', 'instance': 'sample3'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + zone='zone_value', + instance='instance_value', + instances_set_labels_request_resource=compute.InstancesSetLabelsRequest(label_fingerprint='label_fingerprint_value'), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.set_labels_unary(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/zones/{zone}/instances/{instance}/setLabels" % client.transport._host, args[1]) + + +def test_set_labels_unary_rest_flattened_error(transport: str = 'rest'): + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.set_labels_unary( + compute.SetLabelsInstanceRequest(), + project='project_value', + zone='zone_value', + instance='instance_value', + instances_set_labels_request_resource=compute.InstancesSetLabelsRequest(label_fingerprint='label_fingerprint_value'), + ) + + +def test_set_labels_unary_rest_error(): + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.SetMachineResourcesInstanceRequest, + dict, +]) +def test_set_machine_resources_rest(request_type): + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'zone': 'sample2', 'instance': 'sample3'} + request_init["instances_set_machine_resources_request_resource"] = {'guest_accelerators': [{'accelerator_count': 1805, 'accelerator_type': 'accelerator_type_value'}]} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.set_machine_resources(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, extended_operation.ExtendedOperation) + assert response.client_operation_id == 'client_operation_id_value' + assert response.creation_timestamp == 'creation_timestamp_value' + assert response.description == 'description_value' + assert response.end_time == 'end_time_value' + assert response.http_error_message == 'http_error_message_value' + assert response.http_error_status_code == 2374 + assert response.id == 205 + assert response.insert_time == 'insert_time_value' + assert response.kind == 'kind_value' + assert response.name == 'name_value' + assert response.operation_group_id == 'operation_group_id_value' + assert response.operation_type == 'operation_type_value' + assert response.progress == 885 + assert response.region == 'region_value' + assert response.self_link == 'self_link_value' + assert response.start_time == 'start_time_value' + assert response.status == compute.Operation.Status.DONE + assert response.status_message == 'status_message_value' + assert response.target_id == 947 + assert response.target_link == 'target_link_value' + assert response.user == 'user_value' + assert response.zone == 'zone_value' + + +def test_set_machine_resources_rest_required_fields(request_type=compute.SetMachineResourcesInstanceRequest): + transport_class = transports.InstancesRestTransport + + request_init = {} + request_init["instance"] = "" + request_init["project"] = "" + request_init["zone"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).set_machine_resources._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["instance"] = 'instance_value' + jsonified_request["project"] = 'project_value' + jsonified_request["zone"] = 'zone_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).set_machine_resources._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "instance" in jsonified_request + assert jsonified_request["instance"] == 'instance_value' + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "zone" in jsonified_request + assert jsonified_request["zone"] == 'zone_value' + + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.set_machine_resources(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_set_machine_resources_rest_unset_required_fields(): + transport = transports.InstancesRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.set_machine_resources._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("instance", "instancesSetMachineResourcesRequestResource", "project", "zone", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_set_machine_resources_rest_interceptors(null_interceptor): + transport = transports.InstancesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.InstancesRestInterceptor(), + ) + client = InstancesClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.InstancesRestInterceptor, "post_set_machine_resources") as post, \ + mock.patch.object(transports.InstancesRestInterceptor, "pre_set_machine_resources") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.SetMachineResourcesInstanceRequest.pb(compute.SetMachineResourcesInstanceRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.SetMachineResourcesInstanceRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.set_machine_resources(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_set_machine_resources_rest_bad_request(transport: str = 'rest', request_type=compute.SetMachineResourcesInstanceRequest): + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'zone': 'sample2', 'instance': 'sample3'} + request_init["instances_set_machine_resources_request_resource"] = {'guest_accelerators': [{'accelerator_count': 1805, 'accelerator_type': 'accelerator_type_value'}]} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.set_machine_resources(request) + + +def test_set_machine_resources_rest_flattened(): + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'zone': 'sample2', 'instance': 'sample3'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + zone='zone_value', + instance='instance_value', + instances_set_machine_resources_request_resource=compute.InstancesSetMachineResourcesRequest(guest_accelerators=[compute.AcceleratorConfig(accelerator_count=1805)]), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.set_machine_resources(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/zones/{zone}/instances/{instance}/setMachineResources" % client.transport._host, args[1]) + + +def test_set_machine_resources_rest_flattened_error(transport: str = 'rest'): + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.set_machine_resources( + compute.SetMachineResourcesInstanceRequest(), + project='project_value', + zone='zone_value', + instance='instance_value', + instances_set_machine_resources_request_resource=compute.InstancesSetMachineResourcesRequest(guest_accelerators=[compute.AcceleratorConfig(accelerator_count=1805)]), + ) + + +def test_set_machine_resources_rest_error(): + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.SetMachineResourcesInstanceRequest, + dict, +]) +def test_set_machine_resources_unary_rest(request_type): + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'zone': 'sample2', 'instance': 'sample3'} + request_init["instances_set_machine_resources_request_resource"] = {'guest_accelerators': [{'accelerator_count': 1805, 'accelerator_type': 'accelerator_type_value'}]} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.set_machine_resources_unary(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.Operation) + + +def test_set_machine_resources_unary_rest_required_fields(request_type=compute.SetMachineResourcesInstanceRequest): + transport_class = transports.InstancesRestTransport + + request_init = {} + request_init["instance"] = "" + request_init["project"] = "" + request_init["zone"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).set_machine_resources._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["instance"] = 'instance_value' + jsonified_request["project"] = 'project_value' + jsonified_request["zone"] = 'zone_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).set_machine_resources._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "instance" in jsonified_request + assert jsonified_request["instance"] == 'instance_value' + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "zone" in jsonified_request + assert jsonified_request["zone"] == 'zone_value' + + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.set_machine_resources_unary(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_set_machine_resources_unary_rest_unset_required_fields(): + transport = transports.InstancesRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.set_machine_resources._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("instance", "instancesSetMachineResourcesRequestResource", "project", "zone", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_set_machine_resources_unary_rest_interceptors(null_interceptor): + transport = transports.InstancesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.InstancesRestInterceptor(), + ) + client = InstancesClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.InstancesRestInterceptor, "post_set_machine_resources") as post, \ + mock.patch.object(transports.InstancesRestInterceptor, "pre_set_machine_resources") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.SetMachineResourcesInstanceRequest.pb(compute.SetMachineResourcesInstanceRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.SetMachineResourcesInstanceRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.set_machine_resources_unary(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_set_machine_resources_unary_rest_bad_request(transport: str = 'rest', request_type=compute.SetMachineResourcesInstanceRequest): + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'zone': 'sample2', 'instance': 'sample3'} + request_init["instances_set_machine_resources_request_resource"] = {'guest_accelerators': [{'accelerator_count': 1805, 'accelerator_type': 'accelerator_type_value'}]} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.set_machine_resources_unary(request) + + +def test_set_machine_resources_unary_rest_flattened(): + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'zone': 'sample2', 'instance': 'sample3'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + zone='zone_value', + instance='instance_value', + instances_set_machine_resources_request_resource=compute.InstancesSetMachineResourcesRequest(guest_accelerators=[compute.AcceleratorConfig(accelerator_count=1805)]), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.set_machine_resources_unary(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/zones/{zone}/instances/{instance}/setMachineResources" % client.transport._host, args[1]) + + +def test_set_machine_resources_unary_rest_flattened_error(transport: str = 'rest'): + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.set_machine_resources_unary( + compute.SetMachineResourcesInstanceRequest(), + project='project_value', + zone='zone_value', + instance='instance_value', + instances_set_machine_resources_request_resource=compute.InstancesSetMachineResourcesRequest(guest_accelerators=[compute.AcceleratorConfig(accelerator_count=1805)]), + ) + + +def test_set_machine_resources_unary_rest_error(): + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.SetMachineTypeInstanceRequest, + dict, +]) +def test_set_machine_type_rest(request_type): + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'zone': 'sample2', 'instance': 'sample3'} + request_init["instances_set_machine_type_request_resource"] = {'machine_type': 'machine_type_value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.set_machine_type(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, extended_operation.ExtendedOperation) + assert response.client_operation_id == 'client_operation_id_value' + assert response.creation_timestamp == 'creation_timestamp_value' + assert response.description == 'description_value' + assert response.end_time == 'end_time_value' + assert response.http_error_message == 'http_error_message_value' + assert response.http_error_status_code == 2374 + assert response.id == 205 + assert response.insert_time == 'insert_time_value' + assert response.kind == 'kind_value' + assert response.name == 'name_value' + assert response.operation_group_id == 'operation_group_id_value' + assert response.operation_type == 'operation_type_value' + assert response.progress == 885 + assert response.region == 'region_value' + assert response.self_link == 'self_link_value' + assert response.start_time == 'start_time_value' + assert response.status == compute.Operation.Status.DONE + assert response.status_message == 'status_message_value' + assert response.target_id == 947 + assert response.target_link == 'target_link_value' + assert response.user == 'user_value' + assert response.zone == 'zone_value' + + +def test_set_machine_type_rest_required_fields(request_type=compute.SetMachineTypeInstanceRequest): + transport_class = transports.InstancesRestTransport + + request_init = {} + request_init["instance"] = "" + request_init["project"] = "" + request_init["zone"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).set_machine_type._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["instance"] = 'instance_value' + jsonified_request["project"] = 'project_value' + jsonified_request["zone"] = 'zone_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).set_machine_type._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "instance" in jsonified_request + assert jsonified_request["instance"] == 'instance_value' + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "zone" in jsonified_request + assert jsonified_request["zone"] == 'zone_value' + + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.set_machine_type(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_set_machine_type_rest_unset_required_fields(): + transport = transports.InstancesRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.set_machine_type._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("instance", "instancesSetMachineTypeRequestResource", "project", "zone", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_set_machine_type_rest_interceptors(null_interceptor): + transport = transports.InstancesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.InstancesRestInterceptor(), + ) + client = InstancesClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.InstancesRestInterceptor, "post_set_machine_type") as post, \ + mock.patch.object(transports.InstancesRestInterceptor, "pre_set_machine_type") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.SetMachineTypeInstanceRequest.pb(compute.SetMachineTypeInstanceRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.SetMachineTypeInstanceRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.set_machine_type(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_set_machine_type_rest_bad_request(transport: str = 'rest', request_type=compute.SetMachineTypeInstanceRequest): + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'zone': 'sample2', 'instance': 'sample3'} + request_init["instances_set_machine_type_request_resource"] = {'machine_type': 'machine_type_value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.set_machine_type(request) + + +def test_set_machine_type_rest_flattened(): + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'zone': 'sample2', 'instance': 'sample3'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + zone='zone_value', + instance='instance_value', + instances_set_machine_type_request_resource=compute.InstancesSetMachineTypeRequest(machine_type='machine_type_value'), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.set_machine_type(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/zones/{zone}/instances/{instance}/setMachineType" % client.transport._host, args[1]) + + +def test_set_machine_type_rest_flattened_error(transport: str = 'rest'): + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.set_machine_type( + compute.SetMachineTypeInstanceRequest(), + project='project_value', + zone='zone_value', + instance='instance_value', + instances_set_machine_type_request_resource=compute.InstancesSetMachineTypeRequest(machine_type='machine_type_value'), + ) + + +def test_set_machine_type_rest_error(): + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.SetMachineTypeInstanceRequest, + dict, +]) +def test_set_machine_type_unary_rest(request_type): + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'zone': 'sample2', 'instance': 'sample3'} + request_init["instances_set_machine_type_request_resource"] = {'machine_type': 'machine_type_value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.set_machine_type_unary(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.Operation) + + +def test_set_machine_type_unary_rest_required_fields(request_type=compute.SetMachineTypeInstanceRequest): + transport_class = transports.InstancesRestTransport + + request_init = {} + request_init["instance"] = "" + request_init["project"] = "" + request_init["zone"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).set_machine_type._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["instance"] = 'instance_value' + jsonified_request["project"] = 'project_value' + jsonified_request["zone"] = 'zone_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).set_machine_type._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "instance" in jsonified_request + assert jsonified_request["instance"] == 'instance_value' + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "zone" in jsonified_request + assert jsonified_request["zone"] == 'zone_value' + + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.set_machine_type_unary(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_set_machine_type_unary_rest_unset_required_fields(): + transport = transports.InstancesRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.set_machine_type._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("instance", "instancesSetMachineTypeRequestResource", "project", "zone", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_set_machine_type_unary_rest_interceptors(null_interceptor): + transport = transports.InstancesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.InstancesRestInterceptor(), + ) + client = InstancesClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.InstancesRestInterceptor, "post_set_machine_type") as post, \ + mock.patch.object(transports.InstancesRestInterceptor, "pre_set_machine_type") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.SetMachineTypeInstanceRequest.pb(compute.SetMachineTypeInstanceRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.SetMachineTypeInstanceRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.set_machine_type_unary(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_set_machine_type_unary_rest_bad_request(transport: str = 'rest', request_type=compute.SetMachineTypeInstanceRequest): + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'zone': 'sample2', 'instance': 'sample3'} + request_init["instances_set_machine_type_request_resource"] = {'machine_type': 'machine_type_value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.set_machine_type_unary(request) + + +def test_set_machine_type_unary_rest_flattened(): + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'zone': 'sample2', 'instance': 'sample3'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + zone='zone_value', + instance='instance_value', + instances_set_machine_type_request_resource=compute.InstancesSetMachineTypeRequest(machine_type='machine_type_value'), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.set_machine_type_unary(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/zones/{zone}/instances/{instance}/setMachineType" % client.transport._host, args[1]) + + +def test_set_machine_type_unary_rest_flattened_error(transport: str = 'rest'): + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.set_machine_type_unary( + compute.SetMachineTypeInstanceRequest(), + project='project_value', + zone='zone_value', + instance='instance_value', + instances_set_machine_type_request_resource=compute.InstancesSetMachineTypeRequest(machine_type='machine_type_value'), + ) + + +def test_set_machine_type_unary_rest_error(): + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.SetMetadataInstanceRequest, + dict, +]) +def test_set_metadata_rest(request_type): + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'zone': 'sample2', 'instance': 'sample3'} + request_init["metadata_resource"] = {'fingerprint': 'fingerprint_value', 'items': [{'key': 'key_value', 'value': 'value_value'}], 'kind': 'kind_value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.set_metadata(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, extended_operation.ExtendedOperation) + assert response.client_operation_id == 'client_operation_id_value' + assert response.creation_timestamp == 'creation_timestamp_value' + assert response.description == 'description_value' + assert response.end_time == 'end_time_value' + assert response.http_error_message == 'http_error_message_value' + assert response.http_error_status_code == 2374 + assert response.id == 205 + assert response.insert_time == 'insert_time_value' + assert response.kind == 'kind_value' + assert response.name == 'name_value' + assert response.operation_group_id == 'operation_group_id_value' + assert response.operation_type == 'operation_type_value' + assert response.progress == 885 + assert response.region == 'region_value' + assert response.self_link == 'self_link_value' + assert response.start_time == 'start_time_value' + assert response.status == compute.Operation.Status.DONE + assert response.status_message == 'status_message_value' + assert response.target_id == 947 + assert response.target_link == 'target_link_value' + assert response.user == 'user_value' + assert response.zone == 'zone_value' + + +def test_set_metadata_rest_required_fields(request_type=compute.SetMetadataInstanceRequest): + transport_class = transports.InstancesRestTransport + + request_init = {} + request_init["instance"] = "" + request_init["project"] = "" + request_init["zone"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).set_metadata._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["instance"] = 'instance_value' + jsonified_request["project"] = 'project_value' + jsonified_request["zone"] = 'zone_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).set_metadata._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "instance" in jsonified_request + assert jsonified_request["instance"] == 'instance_value' + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "zone" in jsonified_request + assert jsonified_request["zone"] == 'zone_value' + + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.set_metadata(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_set_metadata_rest_unset_required_fields(): + transport = transports.InstancesRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.set_metadata._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("instance", "metadataResource", "project", "zone", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_set_metadata_rest_interceptors(null_interceptor): + transport = transports.InstancesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.InstancesRestInterceptor(), + ) + client = InstancesClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.InstancesRestInterceptor, "post_set_metadata") as post, \ + mock.patch.object(transports.InstancesRestInterceptor, "pre_set_metadata") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.SetMetadataInstanceRequest.pb(compute.SetMetadataInstanceRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.SetMetadataInstanceRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.set_metadata(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_set_metadata_rest_bad_request(transport: str = 'rest', request_type=compute.SetMetadataInstanceRequest): + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'zone': 'sample2', 'instance': 'sample3'} + request_init["metadata_resource"] = {'fingerprint': 'fingerprint_value', 'items': [{'key': 'key_value', 'value': 'value_value'}], 'kind': 'kind_value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.set_metadata(request) + + +def test_set_metadata_rest_flattened(): + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'zone': 'sample2', 'instance': 'sample3'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + zone='zone_value', + instance='instance_value', + metadata_resource=compute.Metadata(fingerprint='fingerprint_value'), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.set_metadata(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/zones/{zone}/instances/{instance}/setMetadata" % client.transport._host, args[1]) + + +def test_set_metadata_rest_flattened_error(transport: str = 'rest'): + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.set_metadata( + compute.SetMetadataInstanceRequest(), + project='project_value', + zone='zone_value', + instance='instance_value', + metadata_resource=compute.Metadata(fingerprint='fingerprint_value'), + ) + + +def test_set_metadata_rest_error(): + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.SetMetadataInstanceRequest, + dict, +]) +def test_set_metadata_unary_rest(request_type): + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'zone': 'sample2', 'instance': 'sample3'} + request_init["metadata_resource"] = {'fingerprint': 'fingerprint_value', 'items': [{'key': 'key_value', 'value': 'value_value'}], 'kind': 'kind_value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.set_metadata_unary(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.Operation) + + +def test_set_metadata_unary_rest_required_fields(request_type=compute.SetMetadataInstanceRequest): + transport_class = transports.InstancesRestTransport + + request_init = {} + request_init["instance"] = "" + request_init["project"] = "" + request_init["zone"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).set_metadata._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["instance"] = 'instance_value' + jsonified_request["project"] = 'project_value' + jsonified_request["zone"] = 'zone_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).set_metadata._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "instance" in jsonified_request + assert jsonified_request["instance"] == 'instance_value' + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "zone" in jsonified_request + assert jsonified_request["zone"] == 'zone_value' + + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.set_metadata_unary(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_set_metadata_unary_rest_unset_required_fields(): + transport = transports.InstancesRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.set_metadata._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("instance", "metadataResource", "project", "zone", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_set_metadata_unary_rest_interceptors(null_interceptor): + transport = transports.InstancesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.InstancesRestInterceptor(), + ) + client = InstancesClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.InstancesRestInterceptor, "post_set_metadata") as post, \ + mock.patch.object(transports.InstancesRestInterceptor, "pre_set_metadata") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.SetMetadataInstanceRequest.pb(compute.SetMetadataInstanceRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.SetMetadataInstanceRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.set_metadata_unary(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_set_metadata_unary_rest_bad_request(transport: str = 'rest', request_type=compute.SetMetadataInstanceRequest): + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'zone': 'sample2', 'instance': 'sample3'} + request_init["metadata_resource"] = {'fingerprint': 'fingerprint_value', 'items': [{'key': 'key_value', 'value': 'value_value'}], 'kind': 'kind_value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.set_metadata_unary(request) + + +def test_set_metadata_unary_rest_flattened(): + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'zone': 'sample2', 'instance': 'sample3'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + zone='zone_value', + instance='instance_value', + metadata_resource=compute.Metadata(fingerprint='fingerprint_value'), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.set_metadata_unary(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/zones/{zone}/instances/{instance}/setMetadata" % client.transport._host, args[1]) + + +def test_set_metadata_unary_rest_flattened_error(transport: str = 'rest'): + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.set_metadata_unary( + compute.SetMetadataInstanceRequest(), + project='project_value', + zone='zone_value', + instance='instance_value', + metadata_resource=compute.Metadata(fingerprint='fingerprint_value'), + ) + + +def test_set_metadata_unary_rest_error(): + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.SetMinCpuPlatformInstanceRequest, + dict, +]) +def test_set_min_cpu_platform_rest(request_type): + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'zone': 'sample2', 'instance': 'sample3'} + request_init["instances_set_min_cpu_platform_request_resource"] = {'min_cpu_platform': 'min_cpu_platform_value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.set_min_cpu_platform(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, extended_operation.ExtendedOperation) + assert response.client_operation_id == 'client_operation_id_value' + assert response.creation_timestamp == 'creation_timestamp_value' + assert response.description == 'description_value' + assert response.end_time == 'end_time_value' + assert response.http_error_message == 'http_error_message_value' + assert response.http_error_status_code == 2374 + assert response.id == 205 + assert response.insert_time == 'insert_time_value' + assert response.kind == 'kind_value' + assert response.name == 'name_value' + assert response.operation_group_id == 'operation_group_id_value' + assert response.operation_type == 'operation_type_value' + assert response.progress == 885 + assert response.region == 'region_value' + assert response.self_link == 'self_link_value' + assert response.start_time == 'start_time_value' + assert response.status == compute.Operation.Status.DONE + assert response.status_message == 'status_message_value' + assert response.target_id == 947 + assert response.target_link == 'target_link_value' + assert response.user == 'user_value' + assert response.zone == 'zone_value' + + +def test_set_min_cpu_platform_rest_required_fields(request_type=compute.SetMinCpuPlatformInstanceRequest): + transport_class = transports.InstancesRestTransport + + request_init = {} + request_init["instance"] = "" + request_init["project"] = "" + request_init["zone"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).set_min_cpu_platform._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["instance"] = 'instance_value' + jsonified_request["project"] = 'project_value' + jsonified_request["zone"] = 'zone_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).set_min_cpu_platform._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "instance" in jsonified_request + assert jsonified_request["instance"] == 'instance_value' + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "zone" in jsonified_request + assert jsonified_request["zone"] == 'zone_value' + + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.set_min_cpu_platform(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_set_min_cpu_platform_rest_unset_required_fields(): + transport = transports.InstancesRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.set_min_cpu_platform._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("instance", "instancesSetMinCpuPlatformRequestResource", "project", "zone", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_set_min_cpu_platform_rest_interceptors(null_interceptor): + transport = transports.InstancesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.InstancesRestInterceptor(), + ) + client = InstancesClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.InstancesRestInterceptor, "post_set_min_cpu_platform") as post, \ + mock.patch.object(transports.InstancesRestInterceptor, "pre_set_min_cpu_platform") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.SetMinCpuPlatformInstanceRequest.pb(compute.SetMinCpuPlatformInstanceRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.SetMinCpuPlatformInstanceRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.set_min_cpu_platform(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_set_min_cpu_platform_rest_bad_request(transport: str = 'rest', request_type=compute.SetMinCpuPlatformInstanceRequest): + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'zone': 'sample2', 'instance': 'sample3'} + request_init["instances_set_min_cpu_platform_request_resource"] = {'min_cpu_platform': 'min_cpu_platform_value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.set_min_cpu_platform(request) + + +def test_set_min_cpu_platform_rest_flattened(): + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'zone': 'sample2', 'instance': 'sample3'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + zone='zone_value', + instance='instance_value', + instances_set_min_cpu_platform_request_resource=compute.InstancesSetMinCpuPlatformRequest(min_cpu_platform='min_cpu_platform_value'), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.set_min_cpu_platform(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/zones/{zone}/instances/{instance}/setMinCpuPlatform" % client.transport._host, args[1]) + + +def test_set_min_cpu_platform_rest_flattened_error(transport: str = 'rest'): + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.set_min_cpu_platform( + compute.SetMinCpuPlatformInstanceRequest(), + project='project_value', + zone='zone_value', + instance='instance_value', + instances_set_min_cpu_platform_request_resource=compute.InstancesSetMinCpuPlatformRequest(min_cpu_platform='min_cpu_platform_value'), + ) + + +def test_set_min_cpu_platform_rest_error(): + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.SetMinCpuPlatformInstanceRequest, + dict, +]) +def test_set_min_cpu_platform_unary_rest(request_type): + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'zone': 'sample2', 'instance': 'sample3'} + request_init["instances_set_min_cpu_platform_request_resource"] = {'min_cpu_platform': 'min_cpu_platform_value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.set_min_cpu_platform_unary(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.Operation) + + +def test_set_min_cpu_platform_unary_rest_required_fields(request_type=compute.SetMinCpuPlatformInstanceRequest): + transport_class = transports.InstancesRestTransport + + request_init = {} + request_init["instance"] = "" + request_init["project"] = "" + request_init["zone"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).set_min_cpu_platform._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["instance"] = 'instance_value' + jsonified_request["project"] = 'project_value' + jsonified_request["zone"] = 'zone_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).set_min_cpu_platform._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "instance" in jsonified_request + assert jsonified_request["instance"] == 'instance_value' + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "zone" in jsonified_request + assert jsonified_request["zone"] == 'zone_value' + + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.set_min_cpu_platform_unary(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_set_min_cpu_platform_unary_rest_unset_required_fields(): + transport = transports.InstancesRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.set_min_cpu_platform._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("instance", "instancesSetMinCpuPlatformRequestResource", "project", "zone", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_set_min_cpu_platform_unary_rest_interceptors(null_interceptor): + transport = transports.InstancesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.InstancesRestInterceptor(), + ) + client = InstancesClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.InstancesRestInterceptor, "post_set_min_cpu_platform") as post, \ + mock.patch.object(transports.InstancesRestInterceptor, "pre_set_min_cpu_platform") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.SetMinCpuPlatformInstanceRequest.pb(compute.SetMinCpuPlatformInstanceRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.SetMinCpuPlatformInstanceRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.set_min_cpu_platform_unary(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_set_min_cpu_platform_unary_rest_bad_request(transport: str = 'rest', request_type=compute.SetMinCpuPlatformInstanceRequest): + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'zone': 'sample2', 'instance': 'sample3'} + request_init["instances_set_min_cpu_platform_request_resource"] = {'min_cpu_platform': 'min_cpu_platform_value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.set_min_cpu_platform_unary(request) + + +def test_set_min_cpu_platform_unary_rest_flattened(): + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'zone': 'sample2', 'instance': 'sample3'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + zone='zone_value', + instance='instance_value', + instances_set_min_cpu_platform_request_resource=compute.InstancesSetMinCpuPlatformRequest(min_cpu_platform='min_cpu_platform_value'), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.set_min_cpu_platform_unary(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/zones/{zone}/instances/{instance}/setMinCpuPlatform" % client.transport._host, args[1]) + + +def test_set_min_cpu_platform_unary_rest_flattened_error(transport: str = 'rest'): + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.set_min_cpu_platform_unary( + compute.SetMinCpuPlatformInstanceRequest(), + project='project_value', + zone='zone_value', + instance='instance_value', + instances_set_min_cpu_platform_request_resource=compute.InstancesSetMinCpuPlatformRequest(min_cpu_platform='min_cpu_platform_value'), + ) + + +def test_set_min_cpu_platform_unary_rest_error(): + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.SetNameInstanceRequest, + dict, +]) +def test_set_name_rest(request_type): + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'zone': 'sample2', 'instance': 'sample3'} + request_init["instances_set_name_request_resource"] = {'current_name': 'current_name_value', 'name': 'name_value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.set_name(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, extended_operation.ExtendedOperation) + assert response.client_operation_id == 'client_operation_id_value' + assert response.creation_timestamp == 'creation_timestamp_value' + assert response.description == 'description_value' + assert response.end_time == 'end_time_value' + assert response.http_error_message == 'http_error_message_value' + assert response.http_error_status_code == 2374 + assert response.id == 205 + assert response.insert_time == 'insert_time_value' + assert response.kind == 'kind_value' + assert response.name == 'name_value' + assert response.operation_group_id == 'operation_group_id_value' + assert response.operation_type == 'operation_type_value' + assert response.progress == 885 + assert response.region == 'region_value' + assert response.self_link == 'self_link_value' + assert response.start_time == 'start_time_value' + assert response.status == compute.Operation.Status.DONE + assert response.status_message == 'status_message_value' + assert response.target_id == 947 + assert response.target_link == 'target_link_value' + assert response.user == 'user_value' + assert response.zone == 'zone_value' + + +def test_set_name_rest_required_fields(request_type=compute.SetNameInstanceRequest): + transport_class = transports.InstancesRestTransport + + request_init = {} + request_init["instance"] = "" + request_init["project"] = "" + request_init["zone"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).set_name._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["instance"] = 'instance_value' + jsonified_request["project"] = 'project_value' + jsonified_request["zone"] = 'zone_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).set_name._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "instance" in jsonified_request + assert jsonified_request["instance"] == 'instance_value' + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "zone" in jsonified_request + assert jsonified_request["zone"] == 'zone_value' + + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.set_name(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_set_name_rest_unset_required_fields(): + transport = transports.InstancesRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.set_name._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("instance", "instancesSetNameRequestResource", "project", "zone", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_set_name_rest_interceptors(null_interceptor): + transport = transports.InstancesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.InstancesRestInterceptor(), + ) + client = InstancesClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.InstancesRestInterceptor, "post_set_name") as post, \ + mock.patch.object(transports.InstancesRestInterceptor, "pre_set_name") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.SetNameInstanceRequest.pb(compute.SetNameInstanceRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.SetNameInstanceRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.set_name(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_set_name_rest_bad_request(transport: str = 'rest', request_type=compute.SetNameInstanceRequest): + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'zone': 'sample2', 'instance': 'sample3'} + request_init["instances_set_name_request_resource"] = {'current_name': 'current_name_value', 'name': 'name_value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.set_name(request) + + +def test_set_name_rest_flattened(): + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'zone': 'sample2', 'instance': 'sample3'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + zone='zone_value', + instance='instance_value', + instances_set_name_request_resource=compute.InstancesSetNameRequest(current_name='current_name_value'), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.set_name(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/zones/{zone}/instances/{instance}/setName" % client.transport._host, args[1]) + + +def test_set_name_rest_flattened_error(transport: str = 'rest'): + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.set_name( + compute.SetNameInstanceRequest(), + project='project_value', + zone='zone_value', + instance='instance_value', + instances_set_name_request_resource=compute.InstancesSetNameRequest(current_name='current_name_value'), + ) + + +def test_set_name_rest_error(): + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.SetNameInstanceRequest, + dict, +]) +def test_set_name_unary_rest(request_type): + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'zone': 'sample2', 'instance': 'sample3'} + request_init["instances_set_name_request_resource"] = {'current_name': 'current_name_value', 'name': 'name_value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.set_name_unary(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.Operation) + + +def test_set_name_unary_rest_required_fields(request_type=compute.SetNameInstanceRequest): + transport_class = transports.InstancesRestTransport + + request_init = {} + request_init["instance"] = "" + request_init["project"] = "" + request_init["zone"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).set_name._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["instance"] = 'instance_value' + jsonified_request["project"] = 'project_value' + jsonified_request["zone"] = 'zone_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).set_name._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "instance" in jsonified_request + assert jsonified_request["instance"] == 'instance_value' + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "zone" in jsonified_request + assert jsonified_request["zone"] == 'zone_value' + + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.set_name_unary(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_set_name_unary_rest_unset_required_fields(): + transport = transports.InstancesRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.set_name._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("instance", "instancesSetNameRequestResource", "project", "zone", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_set_name_unary_rest_interceptors(null_interceptor): + transport = transports.InstancesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.InstancesRestInterceptor(), + ) + client = InstancesClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.InstancesRestInterceptor, "post_set_name") as post, \ + mock.patch.object(transports.InstancesRestInterceptor, "pre_set_name") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.SetNameInstanceRequest.pb(compute.SetNameInstanceRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.SetNameInstanceRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.set_name_unary(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_set_name_unary_rest_bad_request(transport: str = 'rest', request_type=compute.SetNameInstanceRequest): + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'zone': 'sample2', 'instance': 'sample3'} + request_init["instances_set_name_request_resource"] = {'current_name': 'current_name_value', 'name': 'name_value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.set_name_unary(request) + + +def test_set_name_unary_rest_flattened(): + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'zone': 'sample2', 'instance': 'sample3'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + zone='zone_value', + instance='instance_value', + instances_set_name_request_resource=compute.InstancesSetNameRequest(current_name='current_name_value'), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.set_name_unary(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/zones/{zone}/instances/{instance}/setName" % client.transport._host, args[1]) + + +def test_set_name_unary_rest_flattened_error(transport: str = 'rest'): + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.set_name_unary( + compute.SetNameInstanceRequest(), + project='project_value', + zone='zone_value', + instance='instance_value', + instances_set_name_request_resource=compute.InstancesSetNameRequest(current_name='current_name_value'), + ) + + +def test_set_name_unary_rest_error(): + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.SetSchedulingInstanceRequest, + dict, +]) +def test_set_scheduling_rest(request_type): + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'zone': 'sample2', 'instance': 'sample3'} + request_init["scheduling_resource"] = {'automatic_restart': True, 'instance_termination_action': 'instance_termination_action_value', 'local_ssd_recovery_timeout': {'nanos': 543, 'seconds': 751}, 'location_hint': 'location_hint_value', 'min_node_cpus': 1379, 'node_affinities': [{'key': 'key_value', 'operator': 'operator_value', 'values': ['values_value1', 'values_value2']}], 'on_host_maintenance': 'on_host_maintenance_value', 'preemptible': True, 'provisioning_model': 'provisioning_model_value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.set_scheduling(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, extended_operation.ExtendedOperation) + assert response.client_operation_id == 'client_operation_id_value' + assert response.creation_timestamp == 'creation_timestamp_value' + assert response.description == 'description_value' + assert response.end_time == 'end_time_value' + assert response.http_error_message == 'http_error_message_value' + assert response.http_error_status_code == 2374 + assert response.id == 205 + assert response.insert_time == 'insert_time_value' + assert response.kind == 'kind_value' + assert response.name == 'name_value' + assert response.operation_group_id == 'operation_group_id_value' + assert response.operation_type == 'operation_type_value' + assert response.progress == 885 + assert response.region == 'region_value' + assert response.self_link == 'self_link_value' + assert response.start_time == 'start_time_value' + assert response.status == compute.Operation.Status.DONE + assert response.status_message == 'status_message_value' + assert response.target_id == 947 + assert response.target_link == 'target_link_value' + assert response.user == 'user_value' + assert response.zone == 'zone_value' + + +def test_set_scheduling_rest_required_fields(request_type=compute.SetSchedulingInstanceRequest): + transport_class = transports.InstancesRestTransport + + request_init = {} + request_init["instance"] = "" + request_init["project"] = "" + request_init["zone"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).set_scheduling._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["instance"] = 'instance_value' + jsonified_request["project"] = 'project_value' + jsonified_request["zone"] = 'zone_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).set_scheduling._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "instance" in jsonified_request + assert jsonified_request["instance"] == 'instance_value' + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "zone" in jsonified_request + assert jsonified_request["zone"] == 'zone_value' + + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.set_scheduling(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_set_scheduling_rest_unset_required_fields(): + transport = transports.InstancesRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.set_scheduling._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("instance", "project", "schedulingResource", "zone", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_set_scheduling_rest_interceptors(null_interceptor): + transport = transports.InstancesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.InstancesRestInterceptor(), + ) + client = InstancesClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.InstancesRestInterceptor, "post_set_scheduling") as post, \ + mock.patch.object(transports.InstancesRestInterceptor, "pre_set_scheduling") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.SetSchedulingInstanceRequest.pb(compute.SetSchedulingInstanceRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.SetSchedulingInstanceRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.set_scheduling(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_set_scheduling_rest_bad_request(transport: str = 'rest', request_type=compute.SetSchedulingInstanceRequest): + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'zone': 'sample2', 'instance': 'sample3'} + request_init["scheduling_resource"] = {'automatic_restart': True, 'instance_termination_action': 'instance_termination_action_value', 'local_ssd_recovery_timeout': {'nanos': 543, 'seconds': 751}, 'location_hint': 'location_hint_value', 'min_node_cpus': 1379, 'node_affinities': [{'key': 'key_value', 'operator': 'operator_value', 'values': ['values_value1', 'values_value2']}], 'on_host_maintenance': 'on_host_maintenance_value', 'preemptible': True, 'provisioning_model': 'provisioning_model_value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.set_scheduling(request) + + +def test_set_scheduling_rest_flattened(): + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'zone': 'sample2', 'instance': 'sample3'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + zone='zone_value', + instance='instance_value', + scheduling_resource=compute.Scheduling(automatic_restart=True), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.set_scheduling(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/zones/{zone}/instances/{instance}/setScheduling" % client.transport._host, args[1]) + + +def test_set_scheduling_rest_flattened_error(transport: str = 'rest'): + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.set_scheduling( + compute.SetSchedulingInstanceRequest(), + project='project_value', + zone='zone_value', + instance='instance_value', + scheduling_resource=compute.Scheduling(automatic_restart=True), + ) + + +def test_set_scheduling_rest_error(): + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.SetSchedulingInstanceRequest, + dict, +]) +def test_set_scheduling_unary_rest(request_type): + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'zone': 'sample2', 'instance': 'sample3'} + request_init["scheduling_resource"] = {'automatic_restart': True, 'instance_termination_action': 'instance_termination_action_value', 'local_ssd_recovery_timeout': {'nanos': 543, 'seconds': 751}, 'location_hint': 'location_hint_value', 'min_node_cpus': 1379, 'node_affinities': [{'key': 'key_value', 'operator': 'operator_value', 'values': ['values_value1', 'values_value2']}], 'on_host_maintenance': 'on_host_maintenance_value', 'preemptible': True, 'provisioning_model': 'provisioning_model_value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.set_scheduling_unary(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.Operation) + + +def test_set_scheduling_unary_rest_required_fields(request_type=compute.SetSchedulingInstanceRequest): + transport_class = transports.InstancesRestTransport + + request_init = {} + request_init["instance"] = "" + request_init["project"] = "" + request_init["zone"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).set_scheduling._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["instance"] = 'instance_value' + jsonified_request["project"] = 'project_value' + jsonified_request["zone"] = 'zone_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).set_scheduling._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "instance" in jsonified_request + assert jsonified_request["instance"] == 'instance_value' + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "zone" in jsonified_request + assert jsonified_request["zone"] == 'zone_value' + + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.set_scheduling_unary(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_set_scheduling_unary_rest_unset_required_fields(): + transport = transports.InstancesRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.set_scheduling._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("instance", "project", "schedulingResource", "zone", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_set_scheduling_unary_rest_interceptors(null_interceptor): + transport = transports.InstancesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.InstancesRestInterceptor(), + ) + client = InstancesClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.InstancesRestInterceptor, "post_set_scheduling") as post, \ + mock.patch.object(transports.InstancesRestInterceptor, "pre_set_scheduling") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.SetSchedulingInstanceRequest.pb(compute.SetSchedulingInstanceRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.SetSchedulingInstanceRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.set_scheduling_unary(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_set_scheduling_unary_rest_bad_request(transport: str = 'rest', request_type=compute.SetSchedulingInstanceRequest): + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'zone': 'sample2', 'instance': 'sample3'} + request_init["scheduling_resource"] = {'automatic_restart': True, 'instance_termination_action': 'instance_termination_action_value', 'local_ssd_recovery_timeout': {'nanos': 543, 'seconds': 751}, 'location_hint': 'location_hint_value', 'min_node_cpus': 1379, 'node_affinities': [{'key': 'key_value', 'operator': 'operator_value', 'values': ['values_value1', 'values_value2']}], 'on_host_maintenance': 'on_host_maintenance_value', 'preemptible': True, 'provisioning_model': 'provisioning_model_value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.set_scheduling_unary(request) + + +def test_set_scheduling_unary_rest_flattened(): + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'zone': 'sample2', 'instance': 'sample3'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + zone='zone_value', + instance='instance_value', + scheduling_resource=compute.Scheduling(automatic_restart=True), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.set_scheduling_unary(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/zones/{zone}/instances/{instance}/setScheduling" % client.transport._host, args[1]) + + +def test_set_scheduling_unary_rest_flattened_error(transport: str = 'rest'): + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.set_scheduling_unary( + compute.SetSchedulingInstanceRequest(), + project='project_value', + zone='zone_value', + instance='instance_value', + scheduling_resource=compute.Scheduling(automatic_restart=True), + ) + + +def test_set_scheduling_unary_rest_error(): + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.SetServiceAccountInstanceRequest, + dict, +]) +def test_set_service_account_rest(request_type): + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'zone': 'sample2', 'instance': 'sample3'} + request_init["instances_set_service_account_request_resource"] = {'email': 'email_value', 'scopes': ['scopes_value1', 'scopes_value2']} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.set_service_account(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, extended_operation.ExtendedOperation) + assert response.client_operation_id == 'client_operation_id_value' + assert response.creation_timestamp == 'creation_timestamp_value' + assert response.description == 'description_value' + assert response.end_time == 'end_time_value' + assert response.http_error_message == 'http_error_message_value' + assert response.http_error_status_code == 2374 + assert response.id == 205 + assert response.insert_time == 'insert_time_value' + assert response.kind == 'kind_value' + assert response.name == 'name_value' + assert response.operation_group_id == 'operation_group_id_value' + assert response.operation_type == 'operation_type_value' + assert response.progress == 885 + assert response.region == 'region_value' + assert response.self_link == 'self_link_value' + assert response.start_time == 'start_time_value' + assert response.status == compute.Operation.Status.DONE + assert response.status_message == 'status_message_value' + assert response.target_id == 947 + assert response.target_link == 'target_link_value' + assert response.user == 'user_value' + assert response.zone == 'zone_value' + + +def test_set_service_account_rest_required_fields(request_type=compute.SetServiceAccountInstanceRequest): + transport_class = transports.InstancesRestTransport + + request_init = {} + request_init["instance"] = "" + request_init["project"] = "" + request_init["zone"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).set_service_account._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["instance"] = 'instance_value' + jsonified_request["project"] = 'project_value' + jsonified_request["zone"] = 'zone_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).set_service_account._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "instance" in jsonified_request + assert jsonified_request["instance"] == 'instance_value' + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "zone" in jsonified_request + assert jsonified_request["zone"] == 'zone_value' + + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.set_service_account(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_set_service_account_rest_unset_required_fields(): + transport = transports.InstancesRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.set_service_account._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("instance", "instancesSetServiceAccountRequestResource", "project", "zone", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_set_service_account_rest_interceptors(null_interceptor): + transport = transports.InstancesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.InstancesRestInterceptor(), + ) + client = InstancesClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.InstancesRestInterceptor, "post_set_service_account") as post, \ + mock.patch.object(transports.InstancesRestInterceptor, "pre_set_service_account") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.SetServiceAccountInstanceRequest.pb(compute.SetServiceAccountInstanceRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.SetServiceAccountInstanceRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.set_service_account(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_set_service_account_rest_bad_request(transport: str = 'rest', request_type=compute.SetServiceAccountInstanceRequest): + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'zone': 'sample2', 'instance': 'sample3'} + request_init["instances_set_service_account_request_resource"] = {'email': 'email_value', 'scopes': ['scopes_value1', 'scopes_value2']} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.set_service_account(request) + + +def test_set_service_account_rest_flattened(): + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'zone': 'sample2', 'instance': 'sample3'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + zone='zone_value', + instance='instance_value', + instances_set_service_account_request_resource=compute.InstancesSetServiceAccountRequest(email='email_value'), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.set_service_account(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/zones/{zone}/instances/{instance}/setServiceAccount" % client.transport._host, args[1]) + + +def test_set_service_account_rest_flattened_error(transport: str = 'rest'): + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.set_service_account( + compute.SetServiceAccountInstanceRequest(), + project='project_value', + zone='zone_value', + instance='instance_value', + instances_set_service_account_request_resource=compute.InstancesSetServiceAccountRequest(email='email_value'), + ) + + +def test_set_service_account_rest_error(): + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.SetServiceAccountInstanceRequest, + dict, +]) +def test_set_service_account_unary_rest(request_type): + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'zone': 'sample2', 'instance': 'sample3'} + request_init["instances_set_service_account_request_resource"] = {'email': 'email_value', 'scopes': ['scopes_value1', 'scopes_value2']} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.set_service_account_unary(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.Operation) + + +def test_set_service_account_unary_rest_required_fields(request_type=compute.SetServiceAccountInstanceRequest): + transport_class = transports.InstancesRestTransport + + request_init = {} + request_init["instance"] = "" + request_init["project"] = "" + request_init["zone"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).set_service_account._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["instance"] = 'instance_value' + jsonified_request["project"] = 'project_value' + jsonified_request["zone"] = 'zone_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).set_service_account._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "instance" in jsonified_request + assert jsonified_request["instance"] == 'instance_value' + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "zone" in jsonified_request + assert jsonified_request["zone"] == 'zone_value' + + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.set_service_account_unary(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_set_service_account_unary_rest_unset_required_fields(): + transport = transports.InstancesRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.set_service_account._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("instance", "instancesSetServiceAccountRequestResource", "project", "zone", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_set_service_account_unary_rest_interceptors(null_interceptor): + transport = transports.InstancesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.InstancesRestInterceptor(), + ) + client = InstancesClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.InstancesRestInterceptor, "post_set_service_account") as post, \ + mock.patch.object(transports.InstancesRestInterceptor, "pre_set_service_account") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.SetServiceAccountInstanceRequest.pb(compute.SetServiceAccountInstanceRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.SetServiceAccountInstanceRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.set_service_account_unary(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_set_service_account_unary_rest_bad_request(transport: str = 'rest', request_type=compute.SetServiceAccountInstanceRequest): + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'zone': 'sample2', 'instance': 'sample3'} + request_init["instances_set_service_account_request_resource"] = {'email': 'email_value', 'scopes': ['scopes_value1', 'scopes_value2']} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.set_service_account_unary(request) + + +def test_set_service_account_unary_rest_flattened(): + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'zone': 'sample2', 'instance': 'sample3'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + zone='zone_value', + instance='instance_value', + instances_set_service_account_request_resource=compute.InstancesSetServiceAccountRequest(email='email_value'), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.set_service_account_unary(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/zones/{zone}/instances/{instance}/setServiceAccount" % client.transport._host, args[1]) + + +def test_set_service_account_unary_rest_flattened_error(transport: str = 'rest'): + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.set_service_account_unary( + compute.SetServiceAccountInstanceRequest(), + project='project_value', + zone='zone_value', + instance='instance_value', + instances_set_service_account_request_resource=compute.InstancesSetServiceAccountRequest(email='email_value'), + ) + + +def test_set_service_account_unary_rest_error(): + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.SetShieldedInstanceIntegrityPolicyInstanceRequest, + dict, +]) +def test_set_shielded_instance_integrity_policy_rest(request_type): + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'zone': 'sample2', 'instance': 'sample3'} + request_init["shielded_instance_integrity_policy_resource"] = {'update_auto_learn_policy': True} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.set_shielded_instance_integrity_policy(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, extended_operation.ExtendedOperation) + assert response.client_operation_id == 'client_operation_id_value' + assert response.creation_timestamp == 'creation_timestamp_value' + assert response.description == 'description_value' + assert response.end_time == 'end_time_value' + assert response.http_error_message == 'http_error_message_value' + assert response.http_error_status_code == 2374 + assert response.id == 205 + assert response.insert_time == 'insert_time_value' + assert response.kind == 'kind_value' + assert response.name == 'name_value' + assert response.operation_group_id == 'operation_group_id_value' + assert response.operation_type == 'operation_type_value' + assert response.progress == 885 + assert response.region == 'region_value' + assert response.self_link == 'self_link_value' + assert response.start_time == 'start_time_value' + assert response.status == compute.Operation.Status.DONE + assert response.status_message == 'status_message_value' + assert response.target_id == 947 + assert response.target_link == 'target_link_value' + assert response.user == 'user_value' + assert response.zone == 'zone_value' + + +def test_set_shielded_instance_integrity_policy_rest_required_fields(request_type=compute.SetShieldedInstanceIntegrityPolicyInstanceRequest): + transport_class = transports.InstancesRestTransport + + request_init = {} + request_init["instance"] = "" + request_init["project"] = "" + request_init["zone"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).set_shielded_instance_integrity_policy._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["instance"] = 'instance_value' + jsonified_request["project"] = 'project_value' + jsonified_request["zone"] = 'zone_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).set_shielded_instance_integrity_policy._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "instance" in jsonified_request + assert jsonified_request["instance"] == 'instance_value' + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "zone" in jsonified_request + assert jsonified_request["zone"] == 'zone_value' + + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "patch", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.set_shielded_instance_integrity_policy(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_set_shielded_instance_integrity_policy_rest_unset_required_fields(): + transport = transports.InstancesRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.set_shielded_instance_integrity_policy._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("instance", "project", "shieldedInstanceIntegrityPolicyResource", "zone", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_set_shielded_instance_integrity_policy_rest_interceptors(null_interceptor): + transport = transports.InstancesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.InstancesRestInterceptor(), + ) + client = InstancesClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.InstancesRestInterceptor, "post_set_shielded_instance_integrity_policy") as post, \ + mock.patch.object(transports.InstancesRestInterceptor, "pre_set_shielded_instance_integrity_policy") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.SetShieldedInstanceIntegrityPolicyInstanceRequest.pb(compute.SetShieldedInstanceIntegrityPolicyInstanceRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.SetShieldedInstanceIntegrityPolicyInstanceRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.set_shielded_instance_integrity_policy(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_set_shielded_instance_integrity_policy_rest_bad_request(transport: str = 'rest', request_type=compute.SetShieldedInstanceIntegrityPolicyInstanceRequest): + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'zone': 'sample2', 'instance': 'sample3'} + request_init["shielded_instance_integrity_policy_resource"] = {'update_auto_learn_policy': True} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.set_shielded_instance_integrity_policy(request) + + +def test_set_shielded_instance_integrity_policy_rest_flattened(): + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'zone': 'sample2', 'instance': 'sample3'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + zone='zone_value', + instance='instance_value', + shielded_instance_integrity_policy_resource=compute.ShieldedInstanceIntegrityPolicy(update_auto_learn_policy=True), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.set_shielded_instance_integrity_policy(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/zones/{zone}/instances/{instance}/setShieldedInstanceIntegrityPolicy" % client.transport._host, args[1]) + + +def test_set_shielded_instance_integrity_policy_rest_flattened_error(transport: str = 'rest'): + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.set_shielded_instance_integrity_policy( + compute.SetShieldedInstanceIntegrityPolicyInstanceRequest(), + project='project_value', + zone='zone_value', + instance='instance_value', + shielded_instance_integrity_policy_resource=compute.ShieldedInstanceIntegrityPolicy(update_auto_learn_policy=True), + ) + + +def test_set_shielded_instance_integrity_policy_rest_error(): + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.SetShieldedInstanceIntegrityPolicyInstanceRequest, + dict, +]) +def test_set_shielded_instance_integrity_policy_unary_rest(request_type): + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'zone': 'sample2', 'instance': 'sample3'} + request_init["shielded_instance_integrity_policy_resource"] = {'update_auto_learn_policy': True} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.set_shielded_instance_integrity_policy_unary(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.Operation) + + +def test_set_shielded_instance_integrity_policy_unary_rest_required_fields(request_type=compute.SetShieldedInstanceIntegrityPolicyInstanceRequest): + transport_class = transports.InstancesRestTransport + + request_init = {} + request_init["instance"] = "" + request_init["project"] = "" + request_init["zone"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).set_shielded_instance_integrity_policy._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["instance"] = 'instance_value' + jsonified_request["project"] = 'project_value' + jsonified_request["zone"] = 'zone_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).set_shielded_instance_integrity_policy._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "instance" in jsonified_request + assert jsonified_request["instance"] == 'instance_value' + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "zone" in jsonified_request + assert jsonified_request["zone"] == 'zone_value' + + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "patch", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.set_shielded_instance_integrity_policy_unary(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_set_shielded_instance_integrity_policy_unary_rest_unset_required_fields(): + transport = transports.InstancesRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.set_shielded_instance_integrity_policy._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("instance", "project", "shieldedInstanceIntegrityPolicyResource", "zone", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_set_shielded_instance_integrity_policy_unary_rest_interceptors(null_interceptor): + transport = transports.InstancesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.InstancesRestInterceptor(), + ) + client = InstancesClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.InstancesRestInterceptor, "post_set_shielded_instance_integrity_policy") as post, \ + mock.patch.object(transports.InstancesRestInterceptor, "pre_set_shielded_instance_integrity_policy") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.SetShieldedInstanceIntegrityPolicyInstanceRequest.pb(compute.SetShieldedInstanceIntegrityPolicyInstanceRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.SetShieldedInstanceIntegrityPolicyInstanceRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.set_shielded_instance_integrity_policy_unary(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_set_shielded_instance_integrity_policy_unary_rest_bad_request(transport: str = 'rest', request_type=compute.SetShieldedInstanceIntegrityPolicyInstanceRequest): + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'zone': 'sample2', 'instance': 'sample3'} + request_init["shielded_instance_integrity_policy_resource"] = {'update_auto_learn_policy': True} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.set_shielded_instance_integrity_policy_unary(request) + + +def test_set_shielded_instance_integrity_policy_unary_rest_flattened(): + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'zone': 'sample2', 'instance': 'sample3'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + zone='zone_value', + instance='instance_value', + shielded_instance_integrity_policy_resource=compute.ShieldedInstanceIntegrityPolicy(update_auto_learn_policy=True), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.set_shielded_instance_integrity_policy_unary(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/zones/{zone}/instances/{instance}/setShieldedInstanceIntegrityPolicy" % client.transport._host, args[1]) + + +def test_set_shielded_instance_integrity_policy_unary_rest_flattened_error(transport: str = 'rest'): + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.set_shielded_instance_integrity_policy_unary( + compute.SetShieldedInstanceIntegrityPolicyInstanceRequest(), + project='project_value', + zone='zone_value', + instance='instance_value', + shielded_instance_integrity_policy_resource=compute.ShieldedInstanceIntegrityPolicy(update_auto_learn_policy=True), + ) + + +def test_set_shielded_instance_integrity_policy_unary_rest_error(): + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.SetTagsInstanceRequest, + dict, +]) +def test_set_tags_rest(request_type): + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'zone': 'sample2', 'instance': 'sample3'} + request_init["tags_resource"] = {'fingerprint': 'fingerprint_value', 'items': ['items_value1', 'items_value2']} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.set_tags(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, extended_operation.ExtendedOperation) + assert response.client_operation_id == 'client_operation_id_value' + assert response.creation_timestamp == 'creation_timestamp_value' + assert response.description == 'description_value' + assert response.end_time == 'end_time_value' + assert response.http_error_message == 'http_error_message_value' + assert response.http_error_status_code == 2374 + assert response.id == 205 + assert response.insert_time == 'insert_time_value' + assert response.kind == 'kind_value' + assert response.name == 'name_value' + assert response.operation_group_id == 'operation_group_id_value' + assert response.operation_type == 'operation_type_value' + assert response.progress == 885 + assert response.region == 'region_value' + assert response.self_link == 'self_link_value' + assert response.start_time == 'start_time_value' + assert response.status == compute.Operation.Status.DONE + assert response.status_message == 'status_message_value' + assert response.target_id == 947 + assert response.target_link == 'target_link_value' + assert response.user == 'user_value' + assert response.zone == 'zone_value' + + +def test_set_tags_rest_required_fields(request_type=compute.SetTagsInstanceRequest): + transport_class = transports.InstancesRestTransport + + request_init = {} + request_init["instance"] = "" + request_init["project"] = "" + request_init["zone"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).set_tags._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["instance"] = 'instance_value' + jsonified_request["project"] = 'project_value' + jsonified_request["zone"] = 'zone_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).set_tags._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "instance" in jsonified_request + assert jsonified_request["instance"] == 'instance_value' + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "zone" in jsonified_request + assert jsonified_request["zone"] == 'zone_value' + + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.set_tags(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_set_tags_rest_unset_required_fields(): + transport = transports.InstancesRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.set_tags._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("instance", "project", "tagsResource", "zone", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_set_tags_rest_interceptors(null_interceptor): + transport = transports.InstancesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.InstancesRestInterceptor(), + ) + client = InstancesClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.InstancesRestInterceptor, "post_set_tags") as post, \ + mock.patch.object(transports.InstancesRestInterceptor, "pre_set_tags") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.SetTagsInstanceRequest.pb(compute.SetTagsInstanceRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.SetTagsInstanceRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.set_tags(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_set_tags_rest_bad_request(transport: str = 'rest', request_type=compute.SetTagsInstanceRequest): + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'zone': 'sample2', 'instance': 'sample3'} + request_init["tags_resource"] = {'fingerprint': 'fingerprint_value', 'items': ['items_value1', 'items_value2']} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.set_tags(request) + + +def test_set_tags_rest_flattened(): + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'zone': 'sample2', 'instance': 'sample3'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + zone='zone_value', + instance='instance_value', + tags_resource=compute.Tags(fingerprint='fingerprint_value'), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.set_tags(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/zones/{zone}/instances/{instance}/setTags" % client.transport._host, args[1]) + + +def test_set_tags_rest_flattened_error(transport: str = 'rest'): + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.set_tags( + compute.SetTagsInstanceRequest(), + project='project_value', + zone='zone_value', + instance='instance_value', + tags_resource=compute.Tags(fingerprint='fingerprint_value'), + ) + + +def test_set_tags_rest_error(): + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.SetTagsInstanceRequest, + dict, +]) +def test_set_tags_unary_rest(request_type): + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'zone': 'sample2', 'instance': 'sample3'} + request_init["tags_resource"] = {'fingerprint': 'fingerprint_value', 'items': ['items_value1', 'items_value2']} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.set_tags_unary(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.Operation) + + +def test_set_tags_unary_rest_required_fields(request_type=compute.SetTagsInstanceRequest): + transport_class = transports.InstancesRestTransport + + request_init = {} + request_init["instance"] = "" + request_init["project"] = "" + request_init["zone"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).set_tags._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["instance"] = 'instance_value' + jsonified_request["project"] = 'project_value' + jsonified_request["zone"] = 'zone_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).set_tags._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "instance" in jsonified_request + assert jsonified_request["instance"] == 'instance_value' + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "zone" in jsonified_request + assert jsonified_request["zone"] == 'zone_value' + + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.set_tags_unary(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_set_tags_unary_rest_unset_required_fields(): + transport = transports.InstancesRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.set_tags._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("instance", "project", "tagsResource", "zone", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_set_tags_unary_rest_interceptors(null_interceptor): + transport = transports.InstancesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.InstancesRestInterceptor(), + ) + client = InstancesClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.InstancesRestInterceptor, "post_set_tags") as post, \ + mock.patch.object(transports.InstancesRestInterceptor, "pre_set_tags") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.SetTagsInstanceRequest.pb(compute.SetTagsInstanceRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.SetTagsInstanceRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.set_tags_unary(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_set_tags_unary_rest_bad_request(transport: str = 'rest', request_type=compute.SetTagsInstanceRequest): + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'zone': 'sample2', 'instance': 'sample3'} + request_init["tags_resource"] = {'fingerprint': 'fingerprint_value', 'items': ['items_value1', 'items_value2']} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.set_tags_unary(request) + + +def test_set_tags_unary_rest_flattened(): + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'zone': 'sample2', 'instance': 'sample3'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + zone='zone_value', + instance='instance_value', + tags_resource=compute.Tags(fingerprint='fingerprint_value'), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.set_tags_unary(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/zones/{zone}/instances/{instance}/setTags" % client.transport._host, args[1]) + + +def test_set_tags_unary_rest_flattened_error(transport: str = 'rest'): + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.set_tags_unary( + compute.SetTagsInstanceRequest(), + project='project_value', + zone='zone_value', + instance='instance_value', + tags_resource=compute.Tags(fingerprint='fingerprint_value'), + ) + + +def test_set_tags_unary_rest_error(): + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.SimulateMaintenanceEventInstanceRequest, + dict, +]) +def test_simulate_maintenance_event_rest(request_type): + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'zone': 'sample2', 'instance': 'sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.simulate_maintenance_event(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, extended_operation.ExtendedOperation) + assert response.client_operation_id == 'client_operation_id_value' + assert response.creation_timestamp == 'creation_timestamp_value' + assert response.description == 'description_value' + assert response.end_time == 'end_time_value' + assert response.http_error_message == 'http_error_message_value' + assert response.http_error_status_code == 2374 + assert response.id == 205 + assert response.insert_time == 'insert_time_value' + assert response.kind == 'kind_value' + assert response.name == 'name_value' + assert response.operation_group_id == 'operation_group_id_value' + assert response.operation_type == 'operation_type_value' + assert response.progress == 885 + assert response.region == 'region_value' + assert response.self_link == 'self_link_value' + assert response.start_time == 'start_time_value' + assert response.status == compute.Operation.Status.DONE + assert response.status_message == 'status_message_value' + assert response.target_id == 947 + assert response.target_link == 'target_link_value' + assert response.user == 'user_value' + assert response.zone == 'zone_value' + + +def test_simulate_maintenance_event_rest_required_fields(request_type=compute.SimulateMaintenanceEventInstanceRequest): + transport_class = transports.InstancesRestTransport + + request_init = {} + request_init["instance"] = "" + request_init["project"] = "" + request_init["zone"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).simulate_maintenance_event._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["instance"] = 'instance_value' + jsonified_request["project"] = 'project_value' + jsonified_request["zone"] = 'zone_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).simulate_maintenance_event._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "instance" in jsonified_request + assert jsonified_request["instance"] == 'instance_value' + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "zone" in jsonified_request + assert jsonified_request["zone"] == 'zone_value' + + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.simulate_maintenance_event(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_simulate_maintenance_event_rest_unset_required_fields(): + transport = transports.InstancesRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.simulate_maintenance_event._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("instance", "project", "zone", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_simulate_maintenance_event_rest_interceptors(null_interceptor): + transport = transports.InstancesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.InstancesRestInterceptor(), + ) + client = InstancesClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.InstancesRestInterceptor, "post_simulate_maintenance_event") as post, \ + mock.patch.object(transports.InstancesRestInterceptor, "pre_simulate_maintenance_event") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.SimulateMaintenanceEventInstanceRequest.pb(compute.SimulateMaintenanceEventInstanceRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.SimulateMaintenanceEventInstanceRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.simulate_maintenance_event(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_simulate_maintenance_event_rest_bad_request(transport: str = 'rest', request_type=compute.SimulateMaintenanceEventInstanceRequest): + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'zone': 'sample2', 'instance': 'sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.simulate_maintenance_event(request) + + +def test_simulate_maintenance_event_rest_flattened(): + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'zone': 'sample2', 'instance': 'sample3'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + zone='zone_value', + instance='instance_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.simulate_maintenance_event(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/zones/{zone}/instances/{instance}/simulateMaintenanceEvent" % client.transport._host, args[1]) + + +def test_simulate_maintenance_event_rest_flattened_error(transport: str = 'rest'): + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.simulate_maintenance_event( + compute.SimulateMaintenanceEventInstanceRequest(), + project='project_value', + zone='zone_value', + instance='instance_value', + ) + + +def test_simulate_maintenance_event_rest_error(): + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.SimulateMaintenanceEventInstanceRequest, + dict, +]) +def test_simulate_maintenance_event_unary_rest(request_type): + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'zone': 'sample2', 'instance': 'sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.simulate_maintenance_event_unary(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.Operation) + + +def test_simulate_maintenance_event_unary_rest_required_fields(request_type=compute.SimulateMaintenanceEventInstanceRequest): + transport_class = transports.InstancesRestTransport + + request_init = {} + request_init["instance"] = "" + request_init["project"] = "" + request_init["zone"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).simulate_maintenance_event._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["instance"] = 'instance_value' + jsonified_request["project"] = 'project_value' + jsonified_request["zone"] = 'zone_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).simulate_maintenance_event._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "instance" in jsonified_request + assert jsonified_request["instance"] == 'instance_value' + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "zone" in jsonified_request + assert jsonified_request["zone"] == 'zone_value' + + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.simulate_maintenance_event_unary(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_simulate_maintenance_event_unary_rest_unset_required_fields(): + transport = transports.InstancesRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.simulate_maintenance_event._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("instance", "project", "zone", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_simulate_maintenance_event_unary_rest_interceptors(null_interceptor): + transport = transports.InstancesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.InstancesRestInterceptor(), + ) + client = InstancesClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.InstancesRestInterceptor, "post_simulate_maintenance_event") as post, \ + mock.patch.object(transports.InstancesRestInterceptor, "pre_simulate_maintenance_event") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.SimulateMaintenanceEventInstanceRequest.pb(compute.SimulateMaintenanceEventInstanceRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.SimulateMaintenanceEventInstanceRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.simulate_maintenance_event_unary(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_simulate_maintenance_event_unary_rest_bad_request(transport: str = 'rest', request_type=compute.SimulateMaintenanceEventInstanceRequest): + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'zone': 'sample2', 'instance': 'sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.simulate_maintenance_event_unary(request) + + +def test_simulate_maintenance_event_unary_rest_flattened(): + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'zone': 'sample2', 'instance': 'sample3'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + zone='zone_value', + instance='instance_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.simulate_maintenance_event_unary(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/zones/{zone}/instances/{instance}/simulateMaintenanceEvent" % client.transport._host, args[1]) + + +def test_simulate_maintenance_event_unary_rest_flattened_error(transport: str = 'rest'): + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.simulate_maintenance_event_unary( + compute.SimulateMaintenanceEventInstanceRequest(), + project='project_value', + zone='zone_value', + instance='instance_value', + ) + + +def test_simulate_maintenance_event_unary_rest_error(): + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.StartInstanceRequest, + dict, +]) +def test_start_rest(request_type): + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'zone': 'sample2', 'instance': 'sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.start(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, extended_operation.ExtendedOperation) + assert response.client_operation_id == 'client_operation_id_value' + assert response.creation_timestamp == 'creation_timestamp_value' + assert response.description == 'description_value' + assert response.end_time == 'end_time_value' + assert response.http_error_message == 'http_error_message_value' + assert response.http_error_status_code == 2374 + assert response.id == 205 + assert response.insert_time == 'insert_time_value' + assert response.kind == 'kind_value' + assert response.name == 'name_value' + assert response.operation_group_id == 'operation_group_id_value' + assert response.operation_type == 'operation_type_value' + assert response.progress == 885 + assert response.region == 'region_value' + assert response.self_link == 'self_link_value' + assert response.start_time == 'start_time_value' + assert response.status == compute.Operation.Status.DONE + assert response.status_message == 'status_message_value' + assert response.target_id == 947 + assert response.target_link == 'target_link_value' + assert response.user == 'user_value' + assert response.zone == 'zone_value' + + +def test_start_rest_required_fields(request_type=compute.StartInstanceRequest): + transport_class = transports.InstancesRestTransport + + request_init = {} + request_init["instance"] = "" + request_init["project"] = "" + request_init["zone"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).start._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["instance"] = 'instance_value' + jsonified_request["project"] = 'project_value' + jsonified_request["zone"] = 'zone_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).start._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "instance" in jsonified_request + assert jsonified_request["instance"] == 'instance_value' + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "zone" in jsonified_request + assert jsonified_request["zone"] == 'zone_value' + + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.start(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_start_rest_unset_required_fields(): + transport = transports.InstancesRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.start._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("instance", "project", "zone", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_start_rest_interceptors(null_interceptor): + transport = transports.InstancesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.InstancesRestInterceptor(), + ) + client = InstancesClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.InstancesRestInterceptor, "post_start") as post, \ + mock.patch.object(transports.InstancesRestInterceptor, "pre_start") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.StartInstanceRequest.pb(compute.StartInstanceRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.StartInstanceRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.start(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_start_rest_bad_request(transport: str = 'rest', request_type=compute.StartInstanceRequest): + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'zone': 'sample2', 'instance': 'sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.start(request) + + +def test_start_rest_flattened(): + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'zone': 'sample2', 'instance': 'sample3'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + zone='zone_value', + instance='instance_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.start(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/zones/{zone}/instances/{instance}/start" % client.transport._host, args[1]) + + +def test_start_rest_flattened_error(transport: str = 'rest'): + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.start( + compute.StartInstanceRequest(), + project='project_value', + zone='zone_value', + instance='instance_value', + ) + + +def test_start_rest_error(): + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.StartInstanceRequest, + dict, +]) +def test_start_unary_rest(request_type): + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'zone': 'sample2', 'instance': 'sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.start_unary(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.Operation) + + +def test_start_unary_rest_required_fields(request_type=compute.StartInstanceRequest): + transport_class = transports.InstancesRestTransport + + request_init = {} + request_init["instance"] = "" + request_init["project"] = "" + request_init["zone"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).start._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["instance"] = 'instance_value' + jsonified_request["project"] = 'project_value' + jsonified_request["zone"] = 'zone_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).start._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "instance" in jsonified_request + assert jsonified_request["instance"] == 'instance_value' + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "zone" in jsonified_request + assert jsonified_request["zone"] == 'zone_value' + + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.start_unary(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_start_unary_rest_unset_required_fields(): + transport = transports.InstancesRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.start._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("instance", "project", "zone", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_start_unary_rest_interceptors(null_interceptor): + transport = transports.InstancesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.InstancesRestInterceptor(), + ) + client = InstancesClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.InstancesRestInterceptor, "post_start") as post, \ + mock.patch.object(transports.InstancesRestInterceptor, "pre_start") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.StartInstanceRequest.pb(compute.StartInstanceRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.StartInstanceRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.start_unary(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_start_unary_rest_bad_request(transport: str = 'rest', request_type=compute.StartInstanceRequest): + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'zone': 'sample2', 'instance': 'sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.start_unary(request) + + +def test_start_unary_rest_flattened(): + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'zone': 'sample2', 'instance': 'sample3'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + zone='zone_value', + instance='instance_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.start_unary(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/zones/{zone}/instances/{instance}/start" % client.transport._host, args[1]) + + +def test_start_unary_rest_flattened_error(transport: str = 'rest'): + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.start_unary( + compute.StartInstanceRequest(), + project='project_value', + zone='zone_value', + instance='instance_value', + ) + + +def test_start_unary_rest_error(): + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.StartWithEncryptionKeyInstanceRequest, + dict, +]) +def test_start_with_encryption_key_rest(request_type): + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'zone': 'sample2', 'instance': 'sample3'} + request_init["instances_start_with_encryption_key_request_resource"] = {'disks': [{'disk_encryption_key': {'kms_key_name': 'kms_key_name_value', 'kms_key_service_account': 'kms_key_service_account_value', 'raw_key': 'raw_key_value', 'rsa_encrypted_key': 'rsa_encrypted_key_value', 'sha256': 'sha256_value'}, 'source': 'source_value'}]} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.start_with_encryption_key(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, extended_operation.ExtendedOperation) + assert response.client_operation_id == 'client_operation_id_value' + assert response.creation_timestamp == 'creation_timestamp_value' + assert response.description == 'description_value' + assert response.end_time == 'end_time_value' + assert response.http_error_message == 'http_error_message_value' + assert response.http_error_status_code == 2374 + assert response.id == 205 + assert response.insert_time == 'insert_time_value' + assert response.kind == 'kind_value' + assert response.name == 'name_value' + assert response.operation_group_id == 'operation_group_id_value' + assert response.operation_type == 'operation_type_value' + assert response.progress == 885 + assert response.region == 'region_value' + assert response.self_link == 'self_link_value' + assert response.start_time == 'start_time_value' + assert response.status == compute.Operation.Status.DONE + assert response.status_message == 'status_message_value' + assert response.target_id == 947 + assert response.target_link == 'target_link_value' + assert response.user == 'user_value' + assert response.zone == 'zone_value' + + +def test_start_with_encryption_key_rest_required_fields(request_type=compute.StartWithEncryptionKeyInstanceRequest): + transport_class = transports.InstancesRestTransport + + request_init = {} + request_init["instance"] = "" + request_init["project"] = "" + request_init["zone"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).start_with_encryption_key._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["instance"] = 'instance_value' + jsonified_request["project"] = 'project_value' + jsonified_request["zone"] = 'zone_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).start_with_encryption_key._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "instance" in jsonified_request + assert jsonified_request["instance"] == 'instance_value' + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "zone" in jsonified_request + assert jsonified_request["zone"] == 'zone_value' + + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.start_with_encryption_key(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_start_with_encryption_key_rest_unset_required_fields(): + transport = transports.InstancesRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.start_with_encryption_key._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("instance", "instancesStartWithEncryptionKeyRequestResource", "project", "zone", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_start_with_encryption_key_rest_interceptors(null_interceptor): + transport = transports.InstancesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.InstancesRestInterceptor(), + ) + client = InstancesClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.InstancesRestInterceptor, "post_start_with_encryption_key") as post, \ + mock.patch.object(transports.InstancesRestInterceptor, "pre_start_with_encryption_key") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.StartWithEncryptionKeyInstanceRequest.pb(compute.StartWithEncryptionKeyInstanceRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.StartWithEncryptionKeyInstanceRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.start_with_encryption_key(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_start_with_encryption_key_rest_bad_request(transport: str = 'rest', request_type=compute.StartWithEncryptionKeyInstanceRequest): + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'zone': 'sample2', 'instance': 'sample3'} + request_init["instances_start_with_encryption_key_request_resource"] = {'disks': [{'disk_encryption_key': {'kms_key_name': 'kms_key_name_value', 'kms_key_service_account': 'kms_key_service_account_value', 'raw_key': 'raw_key_value', 'rsa_encrypted_key': 'rsa_encrypted_key_value', 'sha256': 'sha256_value'}, 'source': 'source_value'}]} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.start_with_encryption_key(request) + + +def test_start_with_encryption_key_rest_flattened(): + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'zone': 'sample2', 'instance': 'sample3'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + zone='zone_value', + instance='instance_value', + instances_start_with_encryption_key_request_resource=compute.InstancesStartWithEncryptionKeyRequest(disks=[compute.CustomerEncryptionKeyProtectedDisk(disk_encryption_key=compute.CustomerEncryptionKey(kms_key_name='kms_key_name_value'))]), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.start_with_encryption_key(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/zones/{zone}/instances/{instance}/startWithEncryptionKey" % client.transport._host, args[1]) + + +def test_start_with_encryption_key_rest_flattened_error(transport: str = 'rest'): + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.start_with_encryption_key( + compute.StartWithEncryptionKeyInstanceRequest(), + project='project_value', + zone='zone_value', + instance='instance_value', + instances_start_with_encryption_key_request_resource=compute.InstancesStartWithEncryptionKeyRequest(disks=[compute.CustomerEncryptionKeyProtectedDisk(disk_encryption_key=compute.CustomerEncryptionKey(kms_key_name='kms_key_name_value'))]), + ) + + +def test_start_with_encryption_key_rest_error(): + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.StartWithEncryptionKeyInstanceRequest, + dict, +]) +def test_start_with_encryption_key_unary_rest(request_type): + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'zone': 'sample2', 'instance': 'sample3'} + request_init["instances_start_with_encryption_key_request_resource"] = {'disks': [{'disk_encryption_key': {'kms_key_name': 'kms_key_name_value', 'kms_key_service_account': 'kms_key_service_account_value', 'raw_key': 'raw_key_value', 'rsa_encrypted_key': 'rsa_encrypted_key_value', 'sha256': 'sha256_value'}, 'source': 'source_value'}]} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.start_with_encryption_key_unary(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.Operation) + + +def test_start_with_encryption_key_unary_rest_required_fields(request_type=compute.StartWithEncryptionKeyInstanceRequest): + transport_class = transports.InstancesRestTransport + + request_init = {} + request_init["instance"] = "" + request_init["project"] = "" + request_init["zone"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).start_with_encryption_key._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["instance"] = 'instance_value' + jsonified_request["project"] = 'project_value' + jsonified_request["zone"] = 'zone_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).start_with_encryption_key._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "instance" in jsonified_request + assert jsonified_request["instance"] == 'instance_value' + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "zone" in jsonified_request + assert jsonified_request["zone"] == 'zone_value' + + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.start_with_encryption_key_unary(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_start_with_encryption_key_unary_rest_unset_required_fields(): + transport = transports.InstancesRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.start_with_encryption_key._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("instance", "instancesStartWithEncryptionKeyRequestResource", "project", "zone", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_start_with_encryption_key_unary_rest_interceptors(null_interceptor): + transport = transports.InstancesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.InstancesRestInterceptor(), + ) + client = InstancesClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.InstancesRestInterceptor, "post_start_with_encryption_key") as post, \ + mock.patch.object(transports.InstancesRestInterceptor, "pre_start_with_encryption_key") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.StartWithEncryptionKeyInstanceRequest.pb(compute.StartWithEncryptionKeyInstanceRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.StartWithEncryptionKeyInstanceRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.start_with_encryption_key_unary(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_start_with_encryption_key_unary_rest_bad_request(transport: str = 'rest', request_type=compute.StartWithEncryptionKeyInstanceRequest): + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'zone': 'sample2', 'instance': 'sample3'} + request_init["instances_start_with_encryption_key_request_resource"] = {'disks': [{'disk_encryption_key': {'kms_key_name': 'kms_key_name_value', 'kms_key_service_account': 'kms_key_service_account_value', 'raw_key': 'raw_key_value', 'rsa_encrypted_key': 'rsa_encrypted_key_value', 'sha256': 'sha256_value'}, 'source': 'source_value'}]} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.start_with_encryption_key_unary(request) + + +def test_start_with_encryption_key_unary_rest_flattened(): + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'zone': 'sample2', 'instance': 'sample3'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + zone='zone_value', + instance='instance_value', + instances_start_with_encryption_key_request_resource=compute.InstancesStartWithEncryptionKeyRequest(disks=[compute.CustomerEncryptionKeyProtectedDisk(disk_encryption_key=compute.CustomerEncryptionKey(kms_key_name='kms_key_name_value'))]), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.start_with_encryption_key_unary(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/zones/{zone}/instances/{instance}/startWithEncryptionKey" % client.transport._host, args[1]) + + +def test_start_with_encryption_key_unary_rest_flattened_error(transport: str = 'rest'): + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.start_with_encryption_key_unary( + compute.StartWithEncryptionKeyInstanceRequest(), + project='project_value', + zone='zone_value', + instance='instance_value', + instances_start_with_encryption_key_request_resource=compute.InstancesStartWithEncryptionKeyRequest(disks=[compute.CustomerEncryptionKeyProtectedDisk(disk_encryption_key=compute.CustomerEncryptionKey(kms_key_name='kms_key_name_value'))]), + ) + + +def test_start_with_encryption_key_unary_rest_error(): + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.StopInstanceRequest, + dict, +]) +def test_stop_rest(request_type): + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'zone': 'sample2', 'instance': 'sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.stop(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, extended_operation.ExtendedOperation) + assert response.client_operation_id == 'client_operation_id_value' + assert response.creation_timestamp == 'creation_timestamp_value' + assert response.description == 'description_value' + assert response.end_time == 'end_time_value' + assert response.http_error_message == 'http_error_message_value' + assert response.http_error_status_code == 2374 + assert response.id == 205 + assert response.insert_time == 'insert_time_value' + assert response.kind == 'kind_value' + assert response.name == 'name_value' + assert response.operation_group_id == 'operation_group_id_value' + assert response.operation_type == 'operation_type_value' + assert response.progress == 885 + assert response.region == 'region_value' + assert response.self_link == 'self_link_value' + assert response.start_time == 'start_time_value' + assert response.status == compute.Operation.Status.DONE + assert response.status_message == 'status_message_value' + assert response.target_id == 947 + assert response.target_link == 'target_link_value' + assert response.user == 'user_value' + assert response.zone == 'zone_value' + + +def test_stop_rest_required_fields(request_type=compute.StopInstanceRequest): + transport_class = transports.InstancesRestTransport + + request_init = {} + request_init["instance"] = "" + request_init["project"] = "" + request_init["zone"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).stop._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["instance"] = 'instance_value' + jsonified_request["project"] = 'project_value' + jsonified_request["zone"] = 'zone_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).stop._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("discard_local_ssd", "request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "instance" in jsonified_request + assert jsonified_request["instance"] == 'instance_value' + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "zone" in jsonified_request + assert jsonified_request["zone"] == 'zone_value' + + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.stop(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_stop_rest_unset_required_fields(): + transport = transports.InstancesRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.stop._get_unset_required_fields({}) + assert set(unset_fields) == (set(("discardLocalSsd", "requestId", )) & set(("instance", "project", "zone", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_stop_rest_interceptors(null_interceptor): + transport = transports.InstancesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.InstancesRestInterceptor(), + ) + client = InstancesClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.InstancesRestInterceptor, "post_stop") as post, \ + mock.patch.object(transports.InstancesRestInterceptor, "pre_stop") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.StopInstanceRequest.pb(compute.StopInstanceRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.StopInstanceRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.stop(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_stop_rest_bad_request(transport: str = 'rest', request_type=compute.StopInstanceRequest): + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'zone': 'sample2', 'instance': 'sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.stop(request) + + +def test_stop_rest_flattened(): + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'zone': 'sample2', 'instance': 'sample3'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + zone='zone_value', + instance='instance_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.stop(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/zones/{zone}/instances/{instance}/stop" % client.transport._host, args[1]) + + +def test_stop_rest_flattened_error(transport: str = 'rest'): + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.stop( + compute.StopInstanceRequest(), + project='project_value', + zone='zone_value', + instance='instance_value', + ) + + +def test_stop_rest_error(): + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.StopInstanceRequest, + dict, +]) +def test_stop_unary_rest(request_type): + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'zone': 'sample2', 'instance': 'sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.stop_unary(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.Operation) + + +def test_stop_unary_rest_required_fields(request_type=compute.StopInstanceRequest): + transport_class = transports.InstancesRestTransport + + request_init = {} + request_init["instance"] = "" + request_init["project"] = "" + request_init["zone"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).stop._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["instance"] = 'instance_value' + jsonified_request["project"] = 'project_value' + jsonified_request["zone"] = 'zone_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).stop._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("discard_local_ssd", "request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "instance" in jsonified_request + assert jsonified_request["instance"] == 'instance_value' + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "zone" in jsonified_request + assert jsonified_request["zone"] == 'zone_value' + + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.stop_unary(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_stop_unary_rest_unset_required_fields(): + transport = transports.InstancesRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.stop._get_unset_required_fields({}) + assert set(unset_fields) == (set(("discardLocalSsd", "requestId", )) & set(("instance", "project", "zone", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_stop_unary_rest_interceptors(null_interceptor): + transport = transports.InstancesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.InstancesRestInterceptor(), + ) + client = InstancesClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.InstancesRestInterceptor, "post_stop") as post, \ + mock.patch.object(transports.InstancesRestInterceptor, "pre_stop") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.StopInstanceRequest.pb(compute.StopInstanceRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.StopInstanceRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.stop_unary(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_stop_unary_rest_bad_request(transport: str = 'rest', request_type=compute.StopInstanceRequest): + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'zone': 'sample2', 'instance': 'sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.stop_unary(request) + + +def test_stop_unary_rest_flattened(): + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'zone': 'sample2', 'instance': 'sample3'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + zone='zone_value', + instance='instance_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.stop_unary(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/zones/{zone}/instances/{instance}/stop" % client.transport._host, args[1]) + + +def test_stop_unary_rest_flattened_error(transport: str = 'rest'): + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.stop_unary( + compute.StopInstanceRequest(), + project='project_value', + zone='zone_value', + instance='instance_value', + ) + + +def test_stop_unary_rest_error(): + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.SuspendInstanceRequest, + dict, +]) +def test_suspend_rest(request_type): + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'zone': 'sample2', 'instance': 'sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.suspend(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, extended_operation.ExtendedOperation) + assert response.client_operation_id == 'client_operation_id_value' + assert response.creation_timestamp == 'creation_timestamp_value' + assert response.description == 'description_value' + assert response.end_time == 'end_time_value' + assert response.http_error_message == 'http_error_message_value' + assert response.http_error_status_code == 2374 + assert response.id == 205 + assert response.insert_time == 'insert_time_value' + assert response.kind == 'kind_value' + assert response.name == 'name_value' + assert response.operation_group_id == 'operation_group_id_value' + assert response.operation_type == 'operation_type_value' + assert response.progress == 885 + assert response.region == 'region_value' + assert response.self_link == 'self_link_value' + assert response.start_time == 'start_time_value' + assert response.status == compute.Operation.Status.DONE + assert response.status_message == 'status_message_value' + assert response.target_id == 947 + assert response.target_link == 'target_link_value' + assert response.user == 'user_value' + assert response.zone == 'zone_value' + + +def test_suspend_rest_required_fields(request_type=compute.SuspendInstanceRequest): + transport_class = transports.InstancesRestTransport + + request_init = {} + request_init["instance"] = "" + request_init["project"] = "" + request_init["zone"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).suspend._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["instance"] = 'instance_value' + jsonified_request["project"] = 'project_value' + jsonified_request["zone"] = 'zone_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).suspend._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("discard_local_ssd", "request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "instance" in jsonified_request + assert jsonified_request["instance"] == 'instance_value' + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "zone" in jsonified_request + assert jsonified_request["zone"] == 'zone_value' + + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.suspend(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_suspend_rest_unset_required_fields(): + transport = transports.InstancesRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.suspend._get_unset_required_fields({}) + assert set(unset_fields) == (set(("discardLocalSsd", "requestId", )) & set(("instance", "project", "zone", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_suspend_rest_interceptors(null_interceptor): + transport = transports.InstancesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.InstancesRestInterceptor(), + ) + client = InstancesClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.InstancesRestInterceptor, "post_suspend") as post, \ + mock.patch.object(transports.InstancesRestInterceptor, "pre_suspend") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.SuspendInstanceRequest.pb(compute.SuspendInstanceRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.SuspendInstanceRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.suspend(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_suspend_rest_bad_request(transport: str = 'rest', request_type=compute.SuspendInstanceRequest): + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'zone': 'sample2', 'instance': 'sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.suspend(request) + + +def test_suspend_rest_flattened(): + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'zone': 'sample2', 'instance': 'sample3'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + zone='zone_value', + instance='instance_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.suspend(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/zones/{zone}/instances/{instance}/suspend" % client.transport._host, args[1]) + + +def test_suspend_rest_flattened_error(transport: str = 'rest'): + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.suspend( + compute.SuspendInstanceRequest(), + project='project_value', + zone='zone_value', + instance='instance_value', + ) + + +def test_suspend_rest_error(): + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.SuspendInstanceRequest, + dict, +]) +def test_suspend_unary_rest(request_type): + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'zone': 'sample2', 'instance': 'sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.suspend_unary(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.Operation) + + +def test_suspend_unary_rest_required_fields(request_type=compute.SuspendInstanceRequest): + transport_class = transports.InstancesRestTransport + + request_init = {} + request_init["instance"] = "" + request_init["project"] = "" + request_init["zone"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).suspend._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["instance"] = 'instance_value' + jsonified_request["project"] = 'project_value' + jsonified_request["zone"] = 'zone_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).suspend._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("discard_local_ssd", "request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "instance" in jsonified_request + assert jsonified_request["instance"] == 'instance_value' + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "zone" in jsonified_request + assert jsonified_request["zone"] == 'zone_value' + + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.suspend_unary(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_suspend_unary_rest_unset_required_fields(): + transport = transports.InstancesRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.suspend._get_unset_required_fields({}) + assert set(unset_fields) == (set(("discardLocalSsd", "requestId", )) & set(("instance", "project", "zone", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_suspend_unary_rest_interceptors(null_interceptor): + transport = transports.InstancesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.InstancesRestInterceptor(), + ) + client = InstancesClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.InstancesRestInterceptor, "post_suspend") as post, \ + mock.patch.object(transports.InstancesRestInterceptor, "pre_suspend") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.SuspendInstanceRequest.pb(compute.SuspendInstanceRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.SuspendInstanceRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.suspend_unary(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_suspend_unary_rest_bad_request(transport: str = 'rest', request_type=compute.SuspendInstanceRequest): + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'zone': 'sample2', 'instance': 'sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.suspend_unary(request) + + +def test_suspend_unary_rest_flattened(): + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'zone': 'sample2', 'instance': 'sample3'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + zone='zone_value', + instance='instance_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.suspend_unary(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/zones/{zone}/instances/{instance}/suspend" % client.transport._host, args[1]) + + +def test_suspend_unary_rest_flattened_error(transport: str = 'rest'): + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.suspend_unary( + compute.SuspendInstanceRequest(), + project='project_value', + zone='zone_value', + instance='instance_value', + ) + + +def test_suspend_unary_rest_error(): + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.TestIamPermissionsInstanceRequest, + dict, +]) +def test_test_iam_permissions_rest(request_type): + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'zone': 'sample2', 'resource': 'sample3'} + request_init["test_permissions_request_resource"] = {'permissions': ['permissions_value1', 'permissions_value2']} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.TestPermissionsResponse( + permissions=['permissions_value'], + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.TestPermissionsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.test_iam_permissions(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.TestPermissionsResponse) + assert response.permissions == ['permissions_value'] + + +def test_test_iam_permissions_rest_required_fields(request_type=compute.TestIamPermissionsInstanceRequest): + transport_class = transports.InstancesRestTransport + + request_init = {} + request_init["project"] = "" + request_init["resource"] = "" + request_init["zone"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).test_iam_permissions._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + jsonified_request["resource"] = 'resource_value' + jsonified_request["zone"] = 'zone_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).test_iam_permissions._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "resource" in jsonified_request + assert jsonified_request["resource"] == 'resource_value' + assert "zone" in jsonified_request + assert jsonified_request["zone"] == 'zone_value' + + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.TestPermissionsResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.TestPermissionsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.test_iam_permissions(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_test_iam_permissions_rest_unset_required_fields(): + transport = transports.InstancesRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.test_iam_permissions._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("project", "resource", "testPermissionsRequestResource", "zone", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_test_iam_permissions_rest_interceptors(null_interceptor): + transport = transports.InstancesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.InstancesRestInterceptor(), + ) + client = InstancesClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.InstancesRestInterceptor, "post_test_iam_permissions") as post, \ + mock.patch.object(transports.InstancesRestInterceptor, "pre_test_iam_permissions") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.TestIamPermissionsInstanceRequest.pb(compute.TestIamPermissionsInstanceRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.TestPermissionsResponse.to_json(compute.TestPermissionsResponse()) + + request = compute.TestIamPermissionsInstanceRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.TestPermissionsResponse() + + client.test_iam_permissions(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_test_iam_permissions_rest_bad_request(transport: str = 'rest', request_type=compute.TestIamPermissionsInstanceRequest): + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'zone': 'sample2', 'resource': 'sample3'} + request_init["test_permissions_request_resource"] = {'permissions': ['permissions_value1', 'permissions_value2']} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.test_iam_permissions(request) + + +def test_test_iam_permissions_rest_flattened(): + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.TestPermissionsResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'zone': 'sample2', 'resource': 'sample3'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + zone='zone_value', + resource='resource_value', + test_permissions_request_resource=compute.TestPermissionsRequest(permissions=['permissions_value']), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.TestPermissionsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.test_iam_permissions(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/zones/{zone}/instances/{resource}/testIamPermissions" % client.transport._host, args[1]) + + +def test_test_iam_permissions_rest_flattened_error(transport: str = 'rest'): + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.test_iam_permissions( + compute.TestIamPermissionsInstanceRequest(), + project='project_value', + zone='zone_value', + resource='resource_value', + test_permissions_request_resource=compute.TestPermissionsRequest(permissions=['permissions_value']), + ) + + +def test_test_iam_permissions_rest_error(): + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.UpdateInstanceRequest, + dict, +]) +def test_update_rest(request_type): + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'zone': 'sample2', 'instance': 'sample3'} + request_init["instance_resource"] = {'advanced_machine_features': {'enable_nested_virtualization': True, 'enable_uefi_networking': True, 'threads_per_core': 1689, 'visible_core_count': 1918}, 'can_ip_forward': True, 'confidential_instance_config': {'enable_confidential_compute': True}, 'cpu_platform': 'cpu_platform_value', 'creation_timestamp': 'creation_timestamp_value', 'deletion_protection': True, 'description': 'description_value', 'disks': [{'architecture': 'architecture_value', 'auto_delete': True, 'boot': True, 'device_name': 'device_name_value', 'disk_encryption_key': {'kms_key_name': 'kms_key_name_value', 'kms_key_service_account': 'kms_key_service_account_value', 'raw_key': 'raw_key_value', 'rsa_encrypted_key': 'rsa_encrypted_key_value', 'sha256': 'sha256_value'}, 'disk_size_gb': 1261, 'force_attach': True, 'guest_os_features': [{'type_': 'type__value'}], 'index': 536, 'initialize_params': {'architecture': 'architecture_value', 'description': 'description_value', 'disk_name': 'disk_name_value', 'disk_size_gb': 1261, 'disk_type': 'disk_type_value', 'labels': {}, 'licenses': ['licenses_value1', 'licenses_value2'], 'on_update_action': 'on_update_action_value', 'provisioned_iops': 1740, 'provisioned_throughput': 2411, 'replica_zones': ['replica_zones_value1', 'replica_zones_value2'], 'resource_manager_tags': {}, 'resource_policies': ['resource_policies_value1', 'resource_policies_value2'], 'source_image': 'source_image_value', 'source_image_encryption_key': {}, 'source_snapshot': 'source_snapshot_value', 'source_snapshot_encryption_key': {}}, 'interface': 'interface_value', 'kind': 'kind_value', 'licenses': ['licenses_value1', 'licenses_value2'], 'mode': 'mode_value', 'saved_state': 'saved_state_value', 'shielded_instance_initial_state': {'dbs': [{'content': 'content_value', 'file_type': 'file_type_value'}], 'dbxs': {}, 'keks': {}, 'pk': {}}, 'source': 'source_value', 'type_': 'type__value'}], 'display_device': {'enable_display': True}, 'fingerprint': 'fingerprint_value', 'guest_accelerators': [{'accelerator_count': 1805, 'accelerator_type': 'accelerator_type_value'}], 'hostname': 'hostname_value', 'id': 205, 'instance_encryption_key': {}, 'key_revocation_action_type': 'key_revocation_action_type_value', 'kind': 'kind_value', 'label_fingerprint': 'label_fingerprint_value', 'labels': {}, 'last_start_timestamp': 'last_start_timestamp_value', 'last_stop_timestamp': 'last_stop_timestamp_value', 'last_suspended_timestamp': 'last_suspended_timestamp_value', 'machine_type': 'machine_type_value', 'metadata': {'fingerprint': 'fingerprint_value', 'items': [{'key': 'key_value', 'value': 'value_value'}], 'kind': 'kind_value'}, 'min_cpu_platform': 'min_cpu_platform_value', 'name': 'name_value', 'network_interfaces': [{'access_configs': [{'external_ipv6': 'external_ipv6_value', 'external_ipv6_prefix_length': 2837, 'kind': 'kind_value', 'name': 'name_value', 'nat_i_p': 'nat_i_p_value', 'network_tier': 'network_tier_value', 'public_ptr_domain_name': 'public_ptr_domain_name_value', 'set_public_ptr': True, 'type_': 'type__value'}], 'alias_ip_ranges': [{'ip_cidr_range': 'ip_cidr_range_value', 'subnetwork_range_name': 'subnetwork_range_name_value'}], 'fingerprint': 'fingerprint_value', 'internal_ipv6_prefix_length': 2831, 'ipv6_access_configs': {}, 'ipv6_access_type': 'ipv6_access_type_value', 'ipv6_address': 'ipv6_address_value', 'kind': 'kind_value', 'name': 'name_value', 'network': 'network_value', 'network_attachment': 'network_attachment_value', 'network_i_p': 'network_i_p_value', 'nic_type': 'nic_type_value', 'queue_count': 1197, 'stack_type': 'stack_type_value', 'subnetwork': 'subnetwork_value'}], 'network_performance_config': {'total_egress_bandwidth_tier': 'total_egress_bandwidth_tier_value'}, 'params': {'resource_manager_tags': {}}, 'private_ipv6_google_access': 'private_ipv6_google_access_value', 'reservation_affinity': {'consume_reservation_type': 'consume_reservation_type_value', 'key': 'key_value', 'values': ['values_value1', 'values_value2']}, 'resource_policies': ['resource_policies_value1', 'resource_policies_value2'], 'resource_status': {'physical_host': 'physical_host_value'}, 'satisfies_pzs': True, 'scheduling': {'automatic_restart': True, 'instance_termination_action': 'instance_termination_action_value', 'local_ssd_recovery_timeout': {'nanos': 543, 'seconds': 751}, 'location_hint': 'location_hint_value', 'min_node_cpus': 1379, 'node_affinities': [{'key': 'key_value', 'operator': 'operator_value', 'values': ['values_value1', 'values_value2']}], 'on_host_maintenance': 'on_host_maintenance_value', 'preemptible': True, 'provisioning_model': 'provisioning_model_value'}, 'self_link': 'self_link_value', 'service_accounts': [{'email': 'email_value', 'scopes': ['scopes_value1', 'scopes_value2']}], 'shielded_instance_config': {'enable_integrity_monitoring': True, 'enable_secure_boot': True, 'enable_vtpm': True}, 'shielded_instance_integrity_policy': {'update_auto_learn_policy': True}, 'source_machine_image': 'source_machine_image_value', 'source_machine_image_encryption_key': {}, 'start_restricted': True, 'status': 'status_value', 'status_message': 'status_message_value', 'tags': {'fingerprint': 'fingerprint_value', 'items': ['items_value1', 'items_value2']}, 'zone': 'zone_value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.update(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, extended_operation.ExtendedOperation) + assert response.client_operation_id == 'client_operation_id_value' + assert response.creation_timestamp == 'creation_timestamp_value' + assert response.description == 'description_value' + assert response.end_time == 'end_time_value' + assert response.http_error_message == 'http_error_message_value' + assert response.http_error_status_code == 2374 + assert response.id == 205 + assert response.insert_time == 'insert_time_value' + assert response.kind == 'kind_value' + assert response.name == 'name_value' + assert response.operation_group_id == 'operation_group_id_value' + assert response.operation_type == 'operation_type_value' + assert response.progress == 885 + assert response.region == 'region_value' + assert response.self_link == 'self_link_value' + assert response.start_time == 'start_time_value' + assert response.status == compute.Operation.Status.DONE + assert response.status_message == 'status_message_value' + assert response.target_id == 947 + assert response.target_link == 'target_link_value' + assert response.user == 'user_value' + assert response.zone == 'zone_value' + + +def test_update_rest_required_fields(request_type=compute.UpdateInstanceRequest): + transport_class = transports.InstancesRestTransport + + request_init = {} + request_init["instance"] = "" + request_init["project"] = "" + request_init["zone"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).update._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["instance"] = 'instance_value' + jsonified_request["project"] = 'project_value' + jsonified_request["zone"] = 'zone_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).update._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("minimal_action", "most_disruptive_allowed_action", "request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "instance" in jsonified_request + assert jsonified_request["instance"] == 'instance_value' + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "zone" in jsonified_request + assert jsonified_request["zone"] == 'zone_value' + + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "put", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.update(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_update_rest_unset_required_fields(): + transport = transports.InstancesRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.update._get_unset_required_fields({}) + assert set(unset_fields) == (set(("minimalAction", "mostDisruptiveAllowedAction", "requestId", )) & set(("instance", "instanceResource", "project", "zone", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_update_rest_interceptors(null_interceptor): + transport = transports.InstancesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.InstancesRestInterceptor(), + ) + client = InstancesClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.InstancesRestInterceptor, "post_update") as post, \ + mock.patch.object(transports.InstancesRestInterceptor, "pre_update") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.UpdateInstanceRequest.pb(compute.UpdateInstanceRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.UpdateInstanceRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.update(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_update_rest_bad_request(transport: str = 'rest', request_type=compute.UpdateInstanceRequest): + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'zone': 'sample2', 'instance': 'sample3'} + request_init["instance_resource"] = {'advanced_machine_features': {'enable_nested_virtualization': True, 'enable_uefi_networking': True, 'threads_per_core': 1689, 'visible_core_count': 1918}, 'can_ip_forward': True, 'confidential_instance_config': {'enable_confidential_compute': True}, 'cpu_platform': 'cpu_platform_value', 'creation_timestamp': 'creation_timestamp_value', 'deletion_protection': True, 'description': 'description_value', 'disks': [{'architecture': 'architecture_value', 'auto_delete': True, 'boot': True, 'device_name': 'device_name_value', 'disk_encryption_key': {'kms_key_name': 'kms_key_name_value', 'kms_key_service_account': 'kms_key_service_account_value', 'raw_key': 'raw_key_value', 'rsa_encrypted_key': 'rsa_encrypted_key_value', 'sha256': 'sha256_value'}, 'disk_size_gb': 1261, 'force_attach': True, 'guest_os_features': [{'type_': 'type__value'}], 'index': 536, 'initialize_params': {'architecture': 'architecture_value', 'description': 'description_value', 'disk_name': 'disk_name_value', 'disk_size_gb': 1261, 'disk_type': 'disk_type_value', 'labels': {}, 'licenses': ['licenses_value1', 'licenses_value2'], 'on_update_action': 'on_update_action_value', 'provisioned_iops': 1740, 'provisioned_throughput': 2411, 'replica_zones': ['replica_zones_value1', 'replica_zones_value2'], 'resource_manager_tags': {}, 'resource_policies': ['resource_policies_value1', 'resource_policies_value2'], 'source_image': 'source_image_value', 'source_image_encryption_key': {}, 'source_snapshot': 'source_snapshot_value', 'source_snapshot_encryption_key': {}}, 'interface': 'interface_value', 'kind': 'kind_value', 'licenses': ['licenses_value1', 'licenses_value2'], 'mode': 'mode_value', 'saved_state': 'saved_state_value', 'shielded_instance_initial_state': {'dbs': [{'content': 'content_value', 'file_type': 'file_type_value'}], 'dbxs': {}, 'keks': {}, 'pk': {}}, 'source': 'source_value', 'type_': 'type__value'}], 'display_device': {'enable_display': True}, 'fingerprint': 'fingerprint_value', 'guest_accelerators': [{'accelerator_count': 1805, 'accelerator_type': 'accelerator_type_value'}], 'hostname': 'hostname_value', 'id': 205, 'instance_encryption_key': {}, 'key_revocation_action_type': 'key_revocation_action_type_value', 'kind': 'kind_value', 'label_fingerprint': 'label_fingerprint_value', 'labels': {}, 'last_start_timestamp': 'last_start_timestamp_value', 'last_stop_timestamp': 'last_stop_timestamp_value', 'last_suspended_timestamp': 'last_suspended_timestamp_value', 'machine_type': 'machine_type_value', 'metadata': {'fingerprint': 'fingerprint_value', 'items': [{'key': 'key_value', 'value': 'value_value'}], 'kind': 'kind_value'}, 'min_cpu_platform': 'min_cpu_platform_value', 'name': 'name_value', 'network_interfaces': [{'access_configs': [{'external_ipv6': 'external_ipv6_value', 'external_ipv6_prefix_length': 2837, 'kind': 'kind_value', 'name': 'name_value', 'nat_i_p': 'nat_i_p_value', 'network_tier': 'network_tier_value', 'public_ptr_domain_name': 'public_ptr_domain_name_value', 'set_public_ptr': True, 'type_': 'type__value'}], 'alias_ip_ranges': [{'ip_cidr_range': 'ip_cidr_range_value', 'subnetwork_range_name': 'subnetwork_range_name_value'}], 'fingerprint': 'fingerprint_value', 'internal_ipv6_prefix_length': 2831, 'ipv6_access_configs': {}, 'ipv6_access_type': 'ipv6_access_type_value', 'ipv6_address': 'ipv6_address_value', 'kind': 'kind_value', 'name': 'name_value', 'network': 'network_value', 'network_attachment': 'network_attachment_value', 'network_i_p': 'network_i_p_value', 'nic_type': 'nic_type_value', 'queue_count': 1197, 'stack_type': 'stack_type_value', 'subnetwork': 'subnetwork_value'}], 'network_performance_config': {'total_egress_bandwidth_tier': 'total_egress_bandwidth_tier_value'}, 'params': {'resource_manager_tags': {}}, 'private_ipv6_google_access': 'private_ipv6_google_access_value', 'reservation_affinity': {'consume_reservation_type': 'consume_reservation_type_value', 'key': 'key_value', 'values': ['values_value1', 'values_value2']}, 'resource_policies': ['resource_policies_value1', 'resource_policies_value2'], 'resource_status': {'physical_host': 'physical_host_value'}, 'satisfies_pzs': True, 'scheduling': {'automatic_restart': True, 'instance_termination_action': 'instance_termination_action_value', 'local_ssd_recovery_timeout': {'nanos': 543, 'seconds': 751}, 'location_hint': 'location_hint_value', 'min_node_cpus': 1379, 'node_affinities': [{'key': 'key_value', 'operator': 'operator_value', 'values': ['values_value1', 'values_value2']}], 'on_host_maintenance': 'on_host_maintenance_value', 'preemptible': True, 'provisioning_model': 'provisioning_model_value'}, 'self_link': 'self_link_value', 'service_accounts': [{'email': 'email_value', 'scopes': ['scopes_value1', 'scopes_value2']}], 'shielded_instance_config': {'enable_integrity_monitoring': True, 'enable_secure_boot': True, 'enable_vtpm': True}, 'shielded_instance_integrity_policy': {'update_auto_learn_policy': True}, 'source_machine_image': 'source_machine_image_value', 'source_machine_image_encryption_key': {}, 'start_restricted': True, 'status': 'status_value', 'status_message': 'status_message_value', 'tags': {'fingerprint': 'fingerprint_value', 'items': ['items_value1', 'items_value2']}, 'zone': 'zone_value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.update(request) + + +def test_update_rest_flattened(): + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'zone': 'sample2', 'instance': 'sample3'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + zone='zone_value', + instance='instance_value', + instance_resource=compute.Instance(advanced_machine_features=compute.AdvancedMachineFeatures(enable_nested_virtualization=True)), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.update(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/zones/{zone}/instances/{instance}" % client.transport._host, args[1]) + + +def test_update_rest_flattened_error(transport: str = 'rest'): + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update( + compute.UpdateInstanceRequest(), + project='project_value', + zone='zone_value', + instance='instance_value', + instance_resource=compute.Instance(advanced_machine_features=compute.AdvancedMachineFeatures(enable_nested_virtualization=True)), + ) + + +def test_update_rest_error(): + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.UpdateInstanceRequest, + dict, +]) +def test_update_unary_rest(request_type): + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'zone': 'sample2', 'instance': 'sample3'} + request_init["instance_resource"] = {'advanced_machine_features': {'enable_nested_virtualization': True, 'enable_uefi_networking': True, 'threads_per_core': 1689, 'visible_core_count': 1918}, 'can_ip_forward': True, 'confidential_instance_config': {'enable_confidential_compute': True}, 'cpu_platform': 'cpu_platform_value', 'creation_timestamp': 'creation_timestamp_value', 'deletion_protection': True, 'description': 'description_value', 'disks': [{'architecture': 'architecture_value', 'auto_delete': True, 'boot': True, 'device_name': 'device_name_value', 'disk_encryption_key': {'kms_key_name': 'kms_key_name_value', 'kms_key_service_account': 'kms_key_service_account_value', 'raw_key': 'raw_key_value', 'rsa_encrypted_key': 'rsa_encrypted_key_value', 'sha256': 'sha256_value'}, 'disk_size_gb': 1261, 'force_attach': True, 'guest_os_features': [{'type_': 'type__value'}], 'index': 536, 'initialize_params': {'architecture': 'architecture_value', 'description': 'description_value', 'disk_name': 'disk_name_value', 'disk_size_gb': 1261, 'disk_type': 'disk_type_value', 'labels': {}, 'licenses': ['licenses_value1', 'licenses_value2'], 'on_update_action': 'on_update_action_value', 'provisioned_iops': 1740, 'provisioned_throughput': 2411, 'replica_zones': ['replica_zones_value1', 'replica_zones_value2'], 'resource_manager_tags': {}, 'resource_policies': ['resource_policies_value1', 'resource_policies_value2'], 'source_image': 'source_image_value', 'source_image_encryption_key': {}, 'source_snapshot': 'source_snapshot_value', 'source_snapshot_encryption_key': {}}, 'interface': 'interface_value', 'kind': 'kind_value', 'licenses': ['licenses_value1', 'licenses_value2'], 'mode': 'mode_value', 'saved_state': 'saved_state_value', 'shielded_instance_initial_state': {'dbs': [{'content': 'content_value', 'file_type': 'file_type_value'}], 'dbxs': {}, 'keks': {}, 'pk': {}}, 'source': 'source_value', 'type_': 'type__value'}], 'display_device': {'enable_display': True}, 'fingerprint': 'fingerprint_value', 'guest_accelerators': [{'accelerator_count': 1805, 'accelerator_type': 'accelerator_type_value'}], 'hostname': 'hostname_value', 'id': 205, 'instance_encryption_key': {}, 'key_revocation_action_type': 'key_revocation_action_type_value', 'kind': 'kind_value', 'label_fingerprint': 'label_fingerprint_value', 'labels': {}, 'last_start_timestamp': 'last_start_timestamp_value', 'last_stop_timestamp': 'last_stop_timestamp_value', 'last_suspended_timestamp': 'last_suspended_timestamp_value', 'machine_type': 'machine_type_value', 'metadata': {'fingerprint': 'fingerprint_value', 'items': [{'key': 'key_value', 'value': 'value_value'}], 'kind': 'kind_value'}, 'min_cpu_platform': 'min_cpu_platform_value', 'name': 'name_value', 'network_interfaces': [{'access_configs': [{'external_ipv6': 'external_ipv6_value', 'external_ipv6_prefix_length': 2837, 'kind': 'kind_value', 'name': 'name_value', 'nat_i_p': 'nat_i_p_value', 'network_tier': 'network_tier_value', 'public_ptr_domain_name': 'public_ptr_domain_name_value', 'set_public_ptr': True, 'type_': 'type__value'}], 'alias_ip_ranges': [{'ip_cidr_range': 'ip_cidr_range_value', 'subnetwork_range_name': 'subnetwork_range_name_value'}], 'fingerprint': 'fingerprint_value', 'internal_ipv6_prefix_length': 2831, 'ipv6_access_configs': {}, 'ipv6_access_type': 'ipv6_access_type_value', 'ipv6_address': 'ipv6_address_value', 'kind': 'kind_value', 'name': 'name_value', 'network': 'network_value', 'network_attachment': 'network_attachment_value', 'network_i_p': 'network_i_p_value', 'nic_type': 'nic_type_value', 'queue_count': 1197, 'stack_type': 'stack_type_value', 'subnetwork': 'subnetwork_value'}], 'network_performance_config': {'total_egress_bandwidth_tier': 'total_egress_bandwidth_tier_value'}, 'params': {'resource_manager_tags': {}}, 'private_ipv6_google_access': 'private_ipv6_google_access_value', 'reservation_affinity': {'consume_reservation_type': 'consume_reservation_type_value', 'key': 'key_value', 'values': ['values_value1', 'values_value2']}, 'resource_policies': ['resource_policies_value1', 'resource_policies_value2'], 'resource_status': {'physical_host': 'physical_host_value'}, 'satisfies_pzs': True, 'scheduling': {'automatic_restart': True, 'instance_termination_action': 'instance_termination_action_value', 'local_ssd_recovery_timeout': {'nanos': 543, 'seconds': 751}, 'location_hint': 'location_hint_value', 'min_node_cpus': 1379, 'node_affinities': [{'key': 'key_value', 'operator': 'operator_value', 'values': ['values_value1', 'values_value2']}], 'on_host_maintenance': 'on_host_maintenance_value', 'preemptible': True, 'provisioning_model': 'provisioning_model_value'}, 'self_link': 'self_link_value', 'service_accounts': [{'email': 'email_value', 'scopes': ['scopes_value1', 'scopes_value2']}], 'shielded_instance_config': {'enable_integrity_monitoring': True, 'enable_secure_boot': True, 'enable_vtpm': True}, 'shielded_instance_integrity_policy': {'update_auto_learn_policy': True}, 'source_machine_image': 'source_machine_image_value', 'source_machine_image_encryption_key': {}, 'start_restricted': True, 'status': 'status_value', 'status_message': 'status_message_value', 'tags': {'fingerprint': 'fingerprint_value', 'items': ['items_value1', 'items_value2']}, 'zone': 'zone_value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.update_unary(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.Operation) + + +def test_update_unary_rest_required_fields(request_type=compute.UpdateInstanceRequest): + transport_class = transports.InstancesRestTransport + + request_init = {} + request_init["instance"] = "" + request_init["project"] = "" + request_init["zone"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).update._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["instance"] = 'instance_value' + jsonified_request["project"] = 'project_value' + jsonified_request["zone"] = 'zone_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).update._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("minimal_action", "most_disruptive_allowed_action", "request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "instance" in jsonified_request + assert jsonified_request["instance"] == 'instance_value' + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "zone" in jsonified_request + assert jsonified_request["zone"] == 'zone_value' + + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "put", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.update_unary(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_update_unary_rest_unset_required_fields(): + transport = transports.InstancesRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.update._get_unset_required_fields({}) + assert set(unset_fields) == (set(("minimalAction", "mostDisruptiveAllowedAction", "requestId", )) & set(("instance", "instanceResource", "project", "zone", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_update_unary_rest_interceptors(null_interceptor): + transport = transports.InstancesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.InstancesRestInterceptor(), + ) + client = InstancesClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.InstancesRestInterceptor, "post_update") as post, \ + mock.patch.object(transports.InstancesRestInterceptor, "pre_update") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.UpdateInstanceRequest.pb(compute.UpdateInstanceRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.UpdateInstanceRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.update_unary(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_update_unary_rest_bad_request(transport: str = 'rest', request_type=compute.UpdateInstanceRequest): + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'zone': 'sample2', 'instance': 'sample3'} + request_init["instance_resource"] = {'advanced_machine_features': {'enable_nested_virtualization': True, 'enable_uefi_networking': True, 'threads_per_core': 1689, 'visible_core_count': 1918}, 'can_ip_forward': True, 'confidential_instance_config': {'enable_confidential_compute': True}, 'cpu_platform': 'cpu_platform_value', 'creation_timestamp': 'creation_timestamp_value', 'deletion_protection': True, 'description': 'description_value', 'disks': [{'architecture': 'architecture_value', 'auto_delete': True, 'boot': True, 'device_name': 'device_name_value', 'disk_encryption_key': {'kms_key_name': 'kms_key_name_value', 'kms_key_service_account': 'kms_key_service_account_value', 'raw_key': 'raw_key_value', 'rsa_encrypted_key': 'rsa_encrypted_key_value', 'sha256': 'sha256_value'}, 'disk_size_gb': 1261, 'force_attach': True, 'guest_os_features': [{'type_': 'type__value'}], 'index': 536, 'initialize_params': {'architecture': 'architecture_value', 'description': 'description_value', 'disk_name': 'disk_name_value', 'disk_size_gb': 1261, 'disk_type': 'disk_type_value', 'labels': {}, 'licenses': ['licenses_value1', 'licenses_value2'], 'on_update_action': 'on_update_action_value', 'provisioned_iops': 1740, 'provisioned_throughput': 2411, 'replica_zones': ['replica_zones_value1', 'replica_zones_value2'], 'resource_manager_tags': {}, 'resource_policies': ['resource_policies_value1', 'resource_policies_value2'], 'source_image': 'source_image_value', 'source_image_encryption_key': {}, 'source_snapshot': 'source_snapshot_value', 'source_snapshot_encryption_key': {}}, 'interface': 'interface_value', 'kind': 'kind_value', 'licenses': ['licenses_value1', 'licenses_value2'], 'mode': 'mode_value', 'saved_state': 'saved_state_value', 'shielded_instance_initial_state': {'dbs': [{'content': 'content_value', 'file_type': 'file_type_value'}], 'dbxs': {}, 'keks': {}, 'pk': {}}, 'source': 'source_value', 'type_': 'type__value'}], 'display_device': {'enable_display': True}, 'fingerprint': 'fingerprint_value', 'guest_accelerators': [{'accelerator_count': 1805, 'accelerator_type': 'accelerator_type_value'}], 'hostname': 'hostname_value', 'id': 205, 'instance_encryption_key': {}, 'key_revocation_action_type': 'key_revocation_action_type_value', 'kind': 'kind_value', 'label_fingerprint': 'label_fingerprint_value', 'labels': {}, 'last_start_timestamp': 'last_start_timestamp_value', 'last_stop_timestamp': 'last_stop_timestamp_value', 'last_suspended_timestamp': 'last_suspended_timestamp_value', 'machine_type': 'machine_type_value', 'metadata': {'fingerprint': 'fingerprint_value', 'items': [{'key': 'key_value', 'value': 'value_value'}], 'kind': 'kind_value'}, 'min_cpu_platform': 'min_cpu_platform_value', 'name': 'name_value', 'network_interfaces': [{'access_configs': [{'external_ipv6': 'external_ipv6_value', 'external_ipv6_prefix_length': 2837, 'kind': 'kind_value', 'name': 'name_value', 'nat_i_p': 'nat_i_p_value', 'network_tier': 'network_tier_value', 'public_ptr_domain_name': 'public_ptr_domain_name_value', 'set_public_ptr': True, 'type_': 'type__value'}], 'alias_ip_ranges': [{'ip_cidr_range': 'ip_cidr_range_value', 'subnetwork_range_name': 'subnetwork_range_name_value'}], 'fingerprint': 'fingerprint_value', 'internal_ipv6_prefix_length': 2831, 'ipv6_access_configs': {}, 'ipv6_access_type': 'ipv6_access_type_value', 'ipv6_address': 'ipv6_address_value', 'kind': 'kind_value', 'name': 'name_value', 'network': 'network_value', 'network_attachment': 'network_attachment_value', 'network_i_p': 'network_i_p_value', 'nic_type': 'nic_type_value', 'queue_count': 1197, 'stack_type': 'stack_type_value', 'subnetwork': 'subnetwork_value'}], 'network_performance_config': {'total_egress_bandwidth_tier': 'total_egress_bandwidth_tier_value'}, 'params': {'resource_manager_tags': {}}, 'private_ipv6_google_access': 'private_ipv6_google_access_value', 'reservation_affinity': {'consume_reservation_type': 'consume_reservation_type_value', 'key': 'key_value', 'values': ['values_value1', 'values_value2']}, 'resource_policies': ['resource_policies_value1', 'resource_policies_value2'], 'resource_status': {'physical_host': 'physical_host_value'}, 'satisfies_pzs': True, 'scheduling': {'automatic_restart': True, 'instance_termination_action': 'instance_termination_action_value', 'local_ssd_recovery_timeout': {'nanos': 543, 'seconds': 751}, 'location_hint': 'location_hint_value', 'min_node_cpus': 1379, 'node_affinities': [{'key': 'key_value', 'operator': 'operator_value', 'values': ['values_value1', 'values_value2']}], 'on_host_maintenance': 'on_host_maintenance_value', 'preemptible': True, 'provisioning_model': 'provisioning_model_value'}, 'self_link': 'self_link_value', 'service_accounts': [{'email': 'email_value', 'scopes': ['scopes_value1', 'scopes_value2']}], 'shielded_instance_config': {'enable_integrity_monitoring': True, 'enable_secure_boot': True, 'enable_vtpm': True}, 'shielded_instance_integrity_policy': {'update_auto_learn_policy': True}, 'source_machine_image': 'source_machine_image_value', 'source_machine_image_encryption_key': {}, 'start_restricted': True, 'status': 'status_value', 'status_message': 'status_message_value', 'tags': {'fingerprint': 'fingerprint_value', 'items': ['items_value1', 'items_value2']}, 'zone': 'zone_value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.update_unary(request) + + +def test_update_unary_rest_flattened(): + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'zone': 'sample2', 'instance': 'sample3'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + zone='zone_value', + instance='instance_value', + instance_resource=compute.Instance(advanced_machine_features=compute.AdvancedMachineFeatures(enable_nested_virtualization=True)), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.update_unary(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/zones/{zone}/instances/{instance}" % client.transport._host, args[1]) + + +def test_update_unary_rest_flattened_error(transport: str = 'rest'): + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_unary( + compute.UpdateInstanceRequest(), + project='project_value', + zone='zone_value', + instance='instance_value', + instance_resource=compute.Instance(advanced_machine_features=compute.AdvancedMachineFeatures(enable_nested_virtualization=True)), + ) + + +def test_update_unary_rest_error(): + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.UpdateAccessConfigInstanceRequest, + dict, +]) +def test_update_access_config_rest(request_type): + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'zone': 'sample2', 'instance': 'sample3'} + request_init["access_config_resource"] = {'external_ipv6': 'external_ipv6_value', 'external_ipv6_prefix_length': 2837, 'kind': 'kind_value', 'name': 'name_value', 'nat_i_p': 'nat_i_p_value', 'network_tier': 'network_tier_value', 'public_ptr_domain_name': 'public_ptr_domain_name_value', 'set_public_ptr': True, 'type_': 'type__value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.update_access_config(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, extended_operation.ExtendedOperation) + assert response.client_operation_id == 'client_operation_id_value' + assert response.creation_timestamp == 'creation_timestamp_value' + assert response.description == 'description_value' + assert response.end_time == 'end_time_value' + assert response.http_error_message == 'http_error_message_value' + assert response.http_error_status_code == 2374 + assert response.id == 205 + assert response.insert_time == 'insert_time_value' + assert response.kind == 'kind_value' + assert response.name == 'name_value' + assert response.operation_group_id == 'operation_group_id_value' + assert response.operation_type == 'operation_type_value' + assert response.progress == 885 + assert response.region == 'region_value' + assert response.self_link == 'self_link_value' + assert response.start_time == 'start_time_value' + assert response.status == compute.Operation.Status.DONE + assert response.status_message == 'status_message_value' + assert response.target_id == 947 + assert response.target_link == 'target_link_value' + assert response.user == 'user_value' + assert response.zone == 'zone_value' + + +def test_update_access_config_rest_required_fields(request_type=compute.UpdateAccessConfigInstanceRequest): + transport_class = transports.InstancesRestTransport + + request_init = {} + request_init["instance"] = "" + request_init["network_interface"] = "" + request_init["project"] = "" + request_init["zone"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + assert "networkInterface" not in jsonified_request + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).update_access_config._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + assert "networkInterface" in jsonified_request + assert jsonified_request["networkInterface"] == request_init["network_interface"] + + jsonified_request["instance"] = 'instance_value' + jsonified_request["networkInterface"] = 'network_interface_value' + jsonified_request["project"] = 'project_value' + jsonified_request["zone"] = 'zone_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).update_access_config._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("network_interface", "request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "instance" in jsonified_request + assert jsonified_request["instance"] == 'instance_value' + assert "networkInterface" in jsonified_request + assert jsonified_request["networkInterface"] == 'network_interface_value' + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "zone" in jsonified_request + assert jsonified_request["zone"] == 'zone_value' + + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.update_access_config(request) + + expected_params = [ + ( + "networkInterface", + "", + ), + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_update_access_config_rest_unset_required_fields(): + transport = transports.InstancesRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.update_access_config._get_unset_required_fields({}) + assert set(unset_fields) == (set(("networkInterface", "requestId", )) & set(("accessConfigResource", "instance", "networkInterface", "project", "zone", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_update_access_config_rest_interceptors(null_interceptor): + transport = transports.InstancesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.InstancesRestInterceptor(), + ) + client = InstancesClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.InstancesRestInterceptor, "post_update_access_config") as post, \ + mock.patch.object(transports.InstancesRestInterceptor, "pre_update_access_config") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.UpdateAccessConfigInstanceRequest.pb(compute.UpdateAccessConfigInstanceRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.UpdateAccessConfigInstanceRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.update_access_config(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_update_access_config_rest_bad_request(transport: str = 'rest', request_type=compute.UpdateAccessConfigInstanceRequest): + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'zone': 'sample2', 'instance': 'sample3'} + request_init["access_config_resource"] = {'external_ipv6': 'external_ipv6_value', 'external_ipv6_prefix_length': 2837, 'kind': 'kind_value', 'name': 'name_value', 'nat_i_p': 'nat_i_p_value', 'network_tier': 'network_tier_value', 'public_ptr_domain_name': 'public_ptr_domain_name_value', 'set_public_ptr': True, 'type_': 'type__value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.update_access_config(request) + + +def test_update_access_config_rest_flattened(): + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'zone': 'sample2', 'instance': 'sample3'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + zone='zone_value', + instance='instance_value', + network_interface='network_interface_value', + access_config_resource=compute.AccessConfig(external_ipv6='external_ipv6_value'), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.update_access_config(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/zones/{zone}/instances/{instance}/updateAccessConfig" % client.transport._host, args[1]) + + +def test_update_access_config_rest_flattened_error(transport: str = 'rest'): + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_access_config( + compute.UpdateAccessConfigInstanceRequest(), + project='project_value', + zone='zone_value', + instance='instance_value', + network_interface='network_interface_value', + access_config_resource=compute.AccessConfig(external_ipv6='external_ipv6_value'), + ) + + +def test_update_access_config_rest_error(): + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.UpdateAccessConfigInstanceRequest, + dict, +]) +def test_update_access_config_unary_rest(request_type): + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'zone': 'sample2', 'instance': 'sample3'} + request_init["access_config_resource"] = {'external_ipv6': 'external_ipv6_value', 'external_ipv6_prefix_length': 2837, 'kind': 'kind_value', 'name': 'name_value', 'nat_i_p': 'nat_i_p_value', 'network_tier': 'network_tier_value', 'public_ptr_domain_name': 'public_ptr_domain_name_value', 'set_public_ptr': True, 'type_': 'type__value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.update_access_config_unary(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.Operation) + + +def test_update_access_config_unary_rest_required_fields(request_type=compute.UpdateAccessConfigInstanceRequest): + transport_class = transports.InstancesRestTransport + + request_init = {} + request_init["instance"] = "" + request_init["network_interface"] = "" + request_init["project"] = "" + request_init["zone"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + assert "networkInterface" not in jsonified_request + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).update_access_config._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + assert "networkInterface" in jsonified_request + assert jsonified_request["networkInterface"] == request_init["network_interface"] + + jsonified_request["instance"] = 'instance_value' + jsonified_request["networkInterface"] = 'network_interface_value' + jsonified_request["project"] = 'project_value' + jsonified_request["zone"] = 'zone_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).update_access_config._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("network_interface", "request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "instance" in jsonified_request + assert jsonified_request["instance"] == 'instance_value' + assert "networkInterface" in jsonified_request + assert jsonified_request["networkInterface"] == 'network_interface_value' + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "zone" in jsonified_request + assert jsonified_request["zone"] == 'zone_value' + + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.update_access_config_unary(request) + + expected_params = [ + ( + "networkInterface", + "", + ), + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_update_access_config_unary_rest_unset_required_fields(): + transport = transports.InstancesRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.update_access_config._get_unset_required_fields({}) + assert set(unset_fields) == (set(("networkInterface", "requestId", )) & set(("accessConfigResource", "instance", "networkInterface", "project", "zone", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_update_access_config_unary_rest_interceptors(null_interceptor): + transport = transports.InstancesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.InstancesRestInterceptor(), + ) + client = InstancesClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.InstancesRestInterceptor, "post_update_access_config") as post, \ + mock.patch.object(transports.InstancesRestInterceptor, "pre_update_access_config") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.UpdateAccessConfigInstanceRequest.pb(compute.UpdateAccessConfigInstanceRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.UpdateAccessConfigInstanceRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.update_access_config_unary(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_update_access_config_unary_rest_bad_request(transport: str = 'rest', request_type=compute.UpdateAccessConfigInstanceRequest): + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'zone': 'sample2', 'instance': 'sample3'} + request_init["access_config_resource"] = {'external_ipv6': 'external_ipv6_value', 'external_ipv6_prefix_length': 2837, 'kind': 'kind_value', 'name': 'name_value', 'nat_i_p': 'nat_i_p_value', 'network_tier': 'network_tier_value', 'public_ptr_domain_name': 'public_ptr_domain_name_value', 'set_public_ptr': True, 'type_': 'type__value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.update_access_config_unary(request) + + +def test_update_access_config_unary_rest_flattened(): + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'zone': 'sample2', 'instance': 'sample3'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + zone='zone_value', + instance='instance_value', + network_interface='network_interface_value', + access_config_resource=compute.AccessConfig(external_ipv6='external_ipv6_value'), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.update_access_config_unary(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/zones/{zone}/instances/{instance}/updateAccessConfig" % client.transport._host, args[1]) + + +def test_update_access_config_unary_rest_flattened_error(transport: str = 'rest'): + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_access_config_unary( + compute.UpdateAccessConfigInstanceRequest(), + project='project_value', + zone='zone_value', + instance='instance_value', + network_interface='network_interface_value', + access_config_resource=compute.AccessConfig(external_ipv6='external_ipv6_value'), + ) + + +def test_update_access_config_unary_rest_error(): + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.UpdateDisplayDeviceInstanceRequest, + dict, +]) +def test_update_display_device_rest(request_type): + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'zone': 'sample2', 'instance': 'sample3'} + request_init["display_device_resource"] = {'enable_display': True} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.update_display_device(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, extended_operation.ExtendedOperation) + assert response.client_operation_id == 'client_operation_id_value' + assert response.creation_timestamp == 'creation_timestamp_value' + assert response.description == 'description_value' + assert response.end_time == 'end_time_value' + assert response.http_error_message == 'http_error_message_value' + assert response.http_error_status_code == 2374 + assert response.id == 205 + assert response.insert_time == 'insert_time_value' + assert response.kind == 'kind_value' + assert response.name == 'name_value' + assert response.operation_group_id == 'operation_group_id_value' + assert response.operation_type == 'operation_type_value' + assert response.progress == 885 + assert response.region == 'region_value' + assert response.self_link == 'self_link_value' + assert response.start_time == 'start_time_value' + assert response.status == compute.Operation.Status.DONE + assert response.status_message == 'status_message_value' + assert response.target_id == 947 + assert response.target_link == 'target_link_value' + assert response.user == 'user_value' + assert response.zone == 'zone_value' + + +def test_update_display_device_rest_required_fields(request_type=compute.UpdateDisplayDeviceInstanceRequest): + transport_class = transports.InstancesRestTransport + + request_init = {} + request_init["instance"] = "" + request_init["project"] = "" + request_init["zone"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).update_display_device._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["instance"] = 'instance_value' + jsonified_request["project"] = 'project_value' + jsonified_request["zone"] = 'zone_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).update_display_device._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "instance" in jsonified_request + assert jsonified_request["instance"] == 'instance_value' + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "zone" in jsonified_request + assert jsonified_request["zone"] == 'zone_value' + + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "patch", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.update_display_device(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_update_display_device_rest_unset_required_fields(): + transport = transports.InstancesRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.update_display_device._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("displayDeviceResource", "instance", "project", "zone", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_update_display_device_rest_interceptors(null_interceptor): + transport = transports.InstancesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.InstancesRestInterceptor(), + ) + client = InstancesClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.InstancesRestInterceptor, "post_update_display_device") as post, \ + mock.patch.object(transports.InstancesRestInterceptor, "pre_update_display_device") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.UpdateDisplayDeviceInstanceRequest.pb(compute.UpdateDisplayDeviceInstanceRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.UpdateDisplayDeviceInstanceRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.update_display_device(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_update_display_device_rest_bad_request(transport: str = 'rest', request_type=compute.UpdateDisplayDeviceInstanceRequest): + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'zone': 'sample2', 'instance': 'sample3'} + request_init["display_device_resource"] = {'enable_display': True} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.update_display_device(request) + + +def test_update_display_device_rest_flattened(): + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'zone': 'sample2', 'instance': 'sample3'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + zone='zone_value', + instance='instance_value', + display_device_resource=compute.DisplayDevice(enable_display=True), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.update_display_device(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/zones/{zone}/instances/{instance}/updateDisplayDevice" % client.transport._host, args[1]) + + +def test_update_display_device_rest_flattened_error(transport: str = 'rest'): + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_display_device( + compute.UpdateDisplayDeviceInstanceRequest(), + project='project_value', + zone='zone_value', + instance='instance_value', + display_device_resource=compute.DisplayDevice(enable_display=True), + ) + + +def test_update_display_device_rest_error(): + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.UpdateDisplayDeviceInstanceRequest, + dict, +]) +def test_update_display_device_unary_rest(request_type): + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'zone': 'sample2', 'instance': 'sample3'} + request_init["display_device_resource"] = {'enable_display': True} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.update_display_device_unary(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.Operation) + + +def test_update_display_device_unary_rest_required_fields(request_type=compute.UpdateDisplayDeviceInstanceRequest): + transport_class = transports.InstancesRestTransport + + request_init = {} + request_init["instance"] = "" + request_init["project"] = "" + request_init["zone"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).update_display_device._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["instance"] = 'instance_value' + jsonified_request["project"] = 'project_value' + jsonified_request["zone"] = 'zone_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).update_display_device._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "instance" in jsonified_request + assert jsonified_request["instance"] == 'instance_value' + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "zone" in jsonified_request + assert jsonified_request["zone"] == 'zone_value' + + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "patch", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.update_display_device_unary(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_update_display_device_unary_rest_unset_required_fields(): + transport = transports.InstancesRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.update_display_device._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("displayDeviceResource", "instance", "project", "zone", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_update_display_device_unary_rest_interceptors(null_interceptor): + transport = transports.InstancesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.InstancesRestInterceptor(), + ) + client = InstancesClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.InstancesRestInterceptor, "post_update_display_device") as post, \ + mock.patch.object(transports.InstancesRestInterceptor, "pre_update_display_device") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.UpdateDisplayDeviceInstanceRequest.pb(compute.UpdateDisplayDeviceInstanceRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.UpdateDisplayDeviceInstanceRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.update_display_device_unary(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_update_display_device_unary_rest_bad_request(transport: str = 'rest', request_type=compute.UpdateDisplayDeviceInstanceRequest): + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'zone': 'sample2', 'instance': 'sample3'} + request_init["display_device_resource"] = {'enable_display': True} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.update_display_device_unary(request) + + +def test_update_display_device_unary_rest_flattened(): + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'zone': 'sample2', 'instance': 'sample3'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + zone='zone_value', + instance='instance_value', + display_device_resource=compute.DisplayDevice(enable_display=True), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.update_display_device_unary(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/zones/{zone}/instances/{instance}/updateDisplayDevice" % client.transport._host, args[1]) + + +def test_update_display_device_unary_rest_flattened_error(transport: str = 'rest'): + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_display_device_unary( + compute.UpdateDisplayDeviceInstanceRequest(), + project='project_value', + zone='zone_value', + instance='instance_value', + display_device_resource=compute.DisplayDevice(enable_display=True), + ) + + +def test_update_display_device_unary_rest_error(): + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.UpdateNetworkInterfaceInstanceRequest, + dict, +]) +def test_update_network_interface_rest(request_type): + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'zone': 'sample2', 'instance': 'sample3'} + request_init["network_interface_resource"] = {'access_configs': [{'external_ipv6': 'external_ipv6_value', 'external_ipv6_prefix_length': 2837, 'kind': 'kind_value', 'name': 'name_value', 'nat_i_p': 'nat_i_p_value', 'network_tier': 'network_tier_value', 'public_ptr_domain_name': 'public_ptr_domain_name_value', 'set_public_ptr': True, 'type_': 'type__value'}], 'alias_ip_ranges': [{'ip_cidr_range': 'ip_cidr_range_value', 'subnetwork_range_name': 'subnetwork_range_name_value'}], 'fingerprint': 'fingerprint_value', 'internal_ipv6_prefix_length': 2831, 'ipv6_access_configs': {}, 'ipv6_access_type': 'ipv6_access_type_value', 'ipv6_address': 'ipv6_address_value', 'kind': 'kind_value', 'name': 'name_value', 'network': 'network_value', 'network_attachment': 'network_attachment_value', 'network_i_p': 'network_i_p_value', 'nic_type': 'nic_type_value', 'queue_count': 1197, 'stack_type': 'stack_type_value', 'subnetwork': 'subnetwork_value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.update_network_interface(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, extended_operation.ExtendedOperation) + assert response.client_operation_id == 'client_operation_id_value' + assert response.creation_timestamp == 'creation_timestamp_value' + assert response.description == 'description_value' + assert response.end_time == 'end_time_value' + assert response.http_error_message == 'http_error_message_value' + assert response.http_error_status_code == 2374 + assert response.id == 205 + assert response.insert_time == 'insert_time_value' + assert response.kind == 'kind_value' + assert response.name == 'name_value' + assert response.operation_group_id == 'operation_group_id_value' + assert response.operation_type == 'operation_type_value' + assert response.progress == 885 + assert response.region == 'region_value' + assert response.self_link == 'self_link_value' + assert response.start_time == 'start_time_value' + assert response.status == compute.Operation.Status.DONE + assert response.status_message == 'status_message_value' + assert response.target_id == 947 + assert response.target_link == 'target_link_value' + assert response.user == 'user_value' + assert response.zone == 'zone_value' + + +def test_update_network_interface_rest_required_fields(request_type=compute.UpdateNetworkInterfaceInstanceRequest): + transport_class = transports.InstancesRestTransport + + request_init = {} + request_init["instance"] = "" + request_init["network_interface"] = "" + request_init["project"] = "" + request_init["zone"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + assert "networkInterface" not in jsonified_request + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).update_network_interface._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + assert "networkInterface" in jsonified_request + assert jsonified_request["networkInterface"] == request_init["network_interface"] + + jsonified_request["instance"] = 'instance_value' + jsonified_request["networkInterface"] = 'network_interface_value' + jsonified_request["project"] = 'project_value' + jsonified_request["zone"] = 'zone_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).update_network_interface._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("network_interface", "request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "instance" in jsonified_request + assert jsonified_request["instance"] == 'instance_value' + assert "networkInterface" in jsonified_request + assert jsonified_request["networkInterface"] == 'network_interface_value' + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "zone" in jsonified_request + assert jsonified_request["zone"] == 'zone_value' + + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "patch", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.update_network_interface(request) + + expected_params = [ + ( + "networkInterface", + "", + ), + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_update_network_interface_rest_unset_required_fields(): + transport = transports.InstancesRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.update_network_interface._get_unset_required_fields({}) + assert set(unset_fields) == (set(("networkInterface", "requestId", )) & set(("instance", "networkInterface", "networkInterfaceResource", "project", "zone", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_update_network_interface_rest_interceptors(null_interceptor): + transport = transports.InstancesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.InstancesRestInterceptor(), + ) + client = InstancesClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.InstancesRestInterceptor, "post_update_network_interface") as post, \ + mock.patch.object(transports.InstancesRestInterceptor, "pre_update_network_interface") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.UpdateNetworkInterfaceInstanceRequest.pb(compute.UpdateNetworkInterfaceInstanceRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.UpdateNetworkInterfaceInstanceRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.update_network_interface(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_update_network_interface_rest_bad_request(transport: str = 'rest', request_type=compute.UpdateNetworkInterfaceInstanceRequest): + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'zone': 'sample2', 'instance': 'sample3'} + request_init["network_interface_resource"] = {'access_configs': [{'external_ipv6': 'external_ipv6_value', 'external_ipv6_prefix_length': 2837, 'kind': 'kind_value', 'name': 'name_value', 'nat_i_p': 'nat_i_p_value', 'network_tier': 'network_tier_value', 'public_ptr_domain_name': 'public_ptr_domain_name_value', 'set_public_ptr': True, 'type_': 'type__value'}], 'alias_ip_ranges': [{'ip_cidr_range': 'ip_cidr_range_value', 'subnetwork_range_name': 'subnetwork_range_name_value'}], 'fingerprint': 'fingerprint_value', 'internal_ipv6_prefix_length': 2831, 'ipv6_access_configs': {}, 'ipv6_access_type': 'ipv6_access_type_value', 'ipv6_address': 'ipv6_address_value', 'kind': 'kind_value', 'name': 'name_value', 'network': 'network_value', 'network_attachment': 'network_attachment_value', 'network_i_p': 'network_i_p_value', 'nic_type': 'nic_type_value', 'queue_count': 1197, 'stack_type': 'stack_type_value', 'subnetwork': 'subnetwork_value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.update_network_interface(request) + + +def test_update_network_interface_rest_flattened(): + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'zone': 'sample2', 'instance': 'sample3'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + zone='zone_value', + instance='instance_value', + network_interface='network_interface_value', + network_interface_resource=compute.NetworkInterface(access_configs=[compute.AccessConfig(external_ipv6='external_ipv6_value')]), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.update_network_interface(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/zones/{zone}/instances/{instance}/updateNetworkInterface" % client.transport._host, args[1]) + + +def test_update_network_interface_rest_flattened_error(transport: str = 'rest'): + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_network_interface( + compute.UpdateNetworkInterfaceInstanceRequest(), + project='project_value', + zone='zone_value', + instance='instance_value', + network_interface='network_interface_value', + network_interface_resource=compute.NetworkInterface(access_configs=[compute.AccessConfig(external_ipv6='external_ipv6_value')]), + ) + + +def test_update_network_interface_rest_error(): + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.UpdateNetworkInterfaceInstanceRequest, + dict, +]) +def test_update_network_interface_unary_rest(request_type): + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'zone': 'sample2', 'instance': 'sample3'} + request_init["network_interface_resource"] = {'access_configs': [{'external_ipv6': 'external_ipv6_value', 'external_ipv6_prefix_length': 2837, 'kind': 'kind_value', 'name': 'name_value', 'nat_i_p': 'nat_i_p_value', 'network_tier': 'network_tier_value', 'public_ptr_domain_name': 'public_ptr_domain_name_value', 'set_public_ptr': True, 'type_': 'type__value'}], 'alias_ip_ranges': [{'ip_cidr_range': 'ip_cidr_range_value', 'subnetwork_range_name': 'subnetwork_range_name_value'}], 'fingerprint': 'fingerprint_value', 'internal_ipv6_prefix_length': 2831, 'ipv6_access_configs': {}, 'ipv6_access_type': 'ipv6_access_type_value', 'ipv6_address': 'ipv6_address_value', 'kind': 'kind_value', 'name': 'name_value', 'network': 'network_value', 'network_attachment': 'network_attachment_value', 'network_i_p': 'network_i_p_value', 'nic_type': 'nic_type_value', 'queue_count': 1197, 'stack_type': 'stack_type_value', 'subnetwork': 'subnetwork_value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.update_network_interface_unary(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.Operation) + + +def test_update_network_interface_unary_rest_required_fields(request_type=compute.UpdateNetworkInterfaceInstanceRequest): + transport_class = transports.InstancesRestTransport + + request_init = {} + request_init["instance"] = "" + request_init["network_interface"] = "" + request_init["project"] = "" + request_init["zone"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + assert "networkInterface" not in jsonified_request + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).update_network_interface._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + assert "networkInterface" in jsonified_request + assert jsonified_request["networkInterface"] == request_init["network_interface"] + + jsonified_request["instance"] = 'instance_value' + jsonified_request["networkInterface"] = 'network_interface_value' + jsonified_request["project"] = 'project_value' + jsonified_request["zone"] = 'zone_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).update_network_interface._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("network_interface", "request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "instance" in jsonified_request + assert jsonified_request["instance"] == 'instance_value' + assert "networkInterface" in jsonified_request + assert jsonified_request["networkInterface"] == 'network_interface_value' + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "zone" in jsonified_request + assert jsonified_request["zone"] == 'zone_value' + + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "patch", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.update_network_interface_unary(request) + + expected_params = [ + ( + "networkInterface", + "", + ), + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_update_network_interface_unary_rest_unset_required_fields(): + transport = transports.InstancesRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.update_network_interface._get_unset_required_fields({}) + assert set(unset_fields) == (set(("networkInterface", "requestId", )) & set(("instance", "networkInterface", "networkInterfaceResource", "project", "zone", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_update_network_interface_unary_rest_interceptors(null_interceptor): + transport = transports.InstancesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.InstancesRestInterceptor(), + ) + client = InstancesClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.InstancesRestInterceptor, "post_update_network_interface") as post, \ + mock.patch.object(transports.InstancesRestInterceptor, "pre_update_network_interface") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.UpdateNetworkInterfaceInstanceRequest.pb(compute.UpdateNetworkInterfaceInstanceRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.UpdateNetworkInterfaceInstanceRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.update_network_interface_unary(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_update_network_interface_unary_rest_bad_request(transport: str = 'rest', request_type=compute.UpdateNetworkInterfaceInstanceRequest): + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'zone': 'sample2', 'instance': 'sample3'} + request_init["network_interface_resource"] = {'access_configs': [{'external_ipv6': 'external_ipv6_value', 'external_ipv6_prefix_length': 2837, 'kind': 'kind_value', 'name': 'name_value', 'nat_i_p': 'nat_i_p_value', 'network_tier': 'network_tier_value', 'public_ptr_domain_name': 'public_ptr_domain_name_value', 'set_public_ptr': True, 'type_': 'type__value'}], 'alias_ip_ranges': [{'ip_cidr_range': 'ip_cidr_range_value', 'subnetwork_range_name': 'subnetwork_range_name_value'}], 'fingerprint': 'fingerprint_value', 'internal_ipv6_prefix_length': 2831, 'ipv6_access_configs': {}, 'ipv6_access_type': 'ipv6_access_type_value', 'ipv6_address': 'ipv6_address_value', 'kind': 'kind_value', 'name': 'name_value', 'network': 'network_value', 'network_attachment': 'network_attachment_value', 'network_i_p': 'network_i_p_value', 'nic_type': 'nic_type_value', 'queue_count': 1197, 'stack_type': 'stack_type_value', 'subnetwork': 'subnetwork_value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.update_network_interface_unary(request) + + +def test_update_network_interface_unary_rest_flattened(): + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'zone': 'sample2', 'instance': 'sample3'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + zone='zone_value', + instance='instance_value', + network_interface='network_interface_value', + network_interface_resource=compute.NetworkInterface(access_configs=[compute.AccessConfig(external_ipv6='external_ipv6_value')]), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.update_network_interface_unary(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/zones/{zone}/instances/{instance}/updateNetworkInterface" % client.transport._host, args[1]) + + +def test_update_network_interface_unary_rest_flattened_error(transport: str = 'rest'): + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_network_interface_unary( + compute.UpdateNetworkInterfaceInstanceRequest(), + project='project_value', + zone='zone_value', + instance='instance_value', + network_interface='network_interface_value', + network_interface_resource=compute.NetworkInterface(access_configs=[compute.AccessConfig(external_ipv6='external_ipv6_value')]), + ) + + +def test_update_network_interface_unary_rest_error(): + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.UpdateShieldedInstanceConfigInstanceRequest, + dict, +]) +def test_update_shielded_instance_config_rest(request_type): + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'zone': 'sample2', 'instance': 'sample3'} + request_init["shielded_instance_config_resource"] = {'enable_integrity_monitoring': True, 'enable_secure_boot': True, 'enable_vtpm': True} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.update_shielded_instance_config(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, extended_operation.ExtendedOperation) + assert response.client_operation_id == 'client_operation_id_value' + assert response.creation_timestamp == 'creation_timestamp_value' + assert response.description == 'description_value' + assert response.end_time == 'end_time_value' + assert response.http_error_message == 'http_error_message_value' + assert response.http_error_status_code == 2374 + assert response.id == 205 + assert response.insert_time == 'insert_time_value' + assert response.kind == 'kind_value' + assert response.name == 'name_value' + assert response.operation_group_id == 'operation_group_id_value' + assert response.operation_type == 'operation_type_value' + assert response.progress == 885 + assert response.region == 'region_value' + assert response.self_link == 'self_link_value' + assert response.start_time == 'start_time_value' + assert response.status == compute.Operation.Status.DONE + assert response.status_message == 'status_message_value' + assert response.target_id == 947 + assert response.target_link == 'target_link_value' + assert response.user == 'user_value' + assert response.zone == 'zone_value' + + +def test_update_shielded_instance_config_rest_required_fields(request_type=compute.UpdateShieldedInstanceConfigInstanceRequest): + transport_class = transports.InstancesRestTransport + + request_init = {} + request_init["instance"] = "" + request_init["project"] = "" + request_init["zone"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).update_shielded_instance_config._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["instance"] = 'instance_value' + jsonified_request["project"] = 'project_value' + jsonified_request["zone"] = 'zone_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).update_shielded_instance_config._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "instance" in jsonified_request + assert jsonified_request["instance"] == 'instance_value' + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "zone" in jsonified_request + assert jsonified_request["zone"] == 'zone_value' + + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "patch", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.update_shielded_instance_config(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_update_shielded_instance_config_rest_unset_required_fields(): + transport = transports.InstancesRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.update_shielded_instance_config._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("instance", "project", "shieldedInstanceConfigResource", "zone", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_update_shielded_instance_config_rest_interceptors(null_interceptor): + transport = transports.InstancesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.InstancesRestInterceptor(), + ) + client = InstancesClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.InstancesRestInterceptor, "post_update_shielded_instance_config") as post, \ + mock.patch.object(transports.InstancesRestInterceptor, "pre_update_shielded_instance_config") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.UpdateShieldedInstanceConfigInstanceRequest.pb(compute.UpdateShieldedInstanceConfigInstanceRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.UpdateShieldedInstanceConfigInstanceRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.update_shielded_instance_config(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_update_shielded_instance_config_rest_bad_request(transport: str = 'rest', request_type=compute.UpdateShieldedInstanceConfigInstanceRequest): + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'zone': 'sample2', 'instance': 'sample3'} + request_init["shielded_instance_config_resource"] = {'enable_integrity_monitoring': True, 'enable_secure_boot': True, 'enable_vtpm': True} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.update_shielded_instance_config(request) + + +def test_update_shielded_instance_config_rest_flattened(): + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'zone': 'sample2', 'instance': 'sample3'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + zone='zone_value', + instance='instance_value', + shielded_instance_config_resource=compute.ShieldedInstanceConfig(enable_integrity_monitoring=True), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.update_shielded_instance_config(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/zones/{zone}/instances/{instance}/updateShieldedInstanceConfig" % client.transport._host, args[1]) + + +def test_update_shielded_instance_config_rest_flattened_error(transport: str = 'rest'): + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_shielded_instance_config( + compute.UpdateShieldedInstanceConfigInstanceRequest(), + project='project_value', + zone='zone_value', + instance='instance_value', + shielded_instance_config_resource=compute.ShieldedInstanceConfig(enable_integrity_monitoring=True), + ) + + +def test_update_shielded_instance_config_rest_error(): + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.UpdateShieldedInstanceConfigInstanceRequest, + dict, +]) +def test_update_shielded_instance_config_unary_rest(request_type): + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'zone': 'sample2', 'instance': 'sample3'} + request_init["shielded_instance_config_resource"] = {'enable_integrity_monitoring': True, 'enable_secure_boot': True, 'enable_vtpm': True} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.update_shielded_instance_config_unary(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.Operation) + + +def test_update_shielded_instance_config_unary_rest_required_fields(request_type=compute.UpdateShieldedInstanceConfigInstanceRequest): + transport_class = transports.InstancesRestTransport + + request_init = {} + request_init["instance"] = "" + request_init["project"] = "" + request_init["zone"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).update_shielded_instance_config._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["instance"] = 'instance_value' + jsonified_request["project"] = 'project_value' + jsonified_request["zone"] = 'zone_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).update_shielded_instance_config._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "instance" in jsonified_request + assert jsonified_request["instance"] == 'instance_value' + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "zone" in jsonified_request + assert jsonified_request["zone"] == 'zone_value' + + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "patch", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.update_shielded_instance_config_unary(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_update_shielded_instance_config_unary_rest_unset_required_fields(): + transport = transports.InstancesRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.update_shielded_instance_config._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("instance", "project", "shieldedInstanceConfigResource", "zone", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_update_shielded_instance_config_unary_rest_interceptors(null_interceptor): + transport = transports.InstancesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.InstancesRestInterceptor(), + ) + client = InstancesClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.InstancesRestInterceptor, "post_update_shielded_instance_config") as post, \ + mock.patch.object(transports.InstancesRestInterceptor, "pre_update_shielded_instance_config") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.UpdateShieldedInstanceConfigInstanceRequest.pb(compute.UpdateShieldedInstanceConfigInstanceRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.UpdateShieldedInstanceConfigInstanceRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.update_shielded_instance_config_unary(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_update_shielded_instance_config_unary_rest_bad_request(transport: str = 'rest', request_type=compute.UpdateShieldedInstanceConfigInstanceRequest): + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'zone': 'sample2', 'instance': 'sample3'} + request_init["shielded_instance_config_resource"] = {'enable_integrity_monitoring': True, 'enable_secure_boot': True, 'enable_vtpm': True} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.update_shielded_instance_config_unary(request) + + +def test_update_shielded_instance_config_unary_rest_flattened(): + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'zone': 'sample2', 'instance': 'sample3'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + zone='zone_value', + instance='instance_value', + shielded_instance_config_resource=compute.ShieldedInstanceConfig(enable_integrity_monitoring=True), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.update_shielded_instance_config_unary(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/zones/{zone}/instances/{instance}/updateShieldedInstanceConfig" % client.transport._host, args[1]) + + +def test_update_shielded_instance_config_unary_rest_flattened_error(transport: str = 'rest'): + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_shielded_instance_config_unary( + compute.UpdateShieldedInstanceConfigInstanceRequest(), + project='project_value', + zone='zone_value', + instance='instance_value', + shielded_instance_config_resource=compute.ShieldedInstanceConfig(enable_integrity_monitoring=True), + ) + + +def test_update_shielded_instance_config_unary_rest_error(): + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +def test_credentials_transport_error(): + # It is an error to provide credentials and a transport instance. + transport = transports.InstancesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # It is an error to provide a credentials file and a transport instance. + transport = transports.InstancesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = InstancesClient( + client_options={"credentials_file": "credentials.json"}, + transport=transport, + ) + + # It is an error to provide an api_key and a transport instance. + transport = transports.InstancesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = InstancesClient( + client_options=options, + transport=transport, + ) + + # It is an error to provide an api_key and a credential. + options = mock.Mock() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = InstancesClient( + client_options=options, + credentials=ga_credentials.AnonymousCredentials() + ) + + # It is an error to provide scopes and a transport instance. + transport = transports.InstancesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = InstancesClient( + client_options={"scopes": ["1", "2"]}, + transport=transport, + ) + + +def test_transport_instance(): + # A client may be instantiated with a custom transport instance. + transport = transports.InstancesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + client = InstancesClient(transport=transport) + assert client.transport is transport + + +@pytest.mark.parametrize("transport_class", [ + transports.InstancesRestTransport, +]) +def test_transport_adc(transport_class): + # Test default credentials are used if not provided. + with mock.patch.object(google.auth, 'default') as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class() + adc.assert_called_once() + +@pytest.mark.parametrize("transport_name", [ + "rest", +]) +def test_transport_kind(transport_name): + transport = InstancesClient.get_transport_class(transport_name)( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert transport.kind == transport_name + + +def test_instances_base_transport_error(): + # Passing both a credentials object and credentials_file should raise an error + with pytest.raises(core_exceptions.DuplicateCredentialArgs): + transport = transports.InstancesTransport( + credentials=ga_credentials.AnonymousCredentials(), + credentials_file="credentials.json" + ) + + +def test_instances_base_transport(): + # Instantiate the base transport. + with mock.patch('google.cloud.compute_v1.services.instances.transports.InstancesTransport.__init__') as Transport: + Transport.return_value = None + transport = transports.InstancesTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Every method on the transport should just blindly + # raise NotImplementedError. + methods = ( + 'add_access_config', + 'add_resource_policies', + 'aggregated_list', + 'attach_disk', + 'bulk_insert', + 'delete', + 'delete_access_config', + 'detach_disk', + 'get', + 'get_effective_firewalls', + 'get_guest_attributes', + 'get_iam_policy', + 'get_screenshot', + 'get_serial_port_output', + 'get_shielded_instance_identity', + 'insert', + 'list', + 'list_referrers', + 'remove_resource_policies', + 'reset', + 'resume', + 'send_diagnostic_interrupt', + 'set_deletion_protection', + 'set_disk_auto_delete', + 'set_iam_policy', + 'set_labels', + 'set_machine_resources', + 'set_machine_type', + 'set_metadata', + 'set_min_cpu_platform', + 'set_name', + 'set_scheduling', + 'set_service_account', + 'set_shielded_instance_integrity_policy', + 'set_tags', + 'simulate_maintenance_event', + 'start', + 'start_with_encryption_key', + 'stop', + 'suspend', + 'test_iam_permissions', + 'update', + 'update_access_config', + 'update_display_device', + 'update_network_interface', + 'update_shielded_instance_config', + ) + for method in methods: + with pytest.raises(NotImplementedError): + getattr(transport, method)(request=object()) + + with pytest.raises(NotImplementedError): + transport.close() + + # Catch all for all remaining methods and properties + remainder = [ + 'kind', + ] + for r in remainder: + with pytest.raises(NotImplementedError): + getattr(transport, r)() + + +def test_instances_base_transport_with_credentials_file(): + # Instantiate the base transport with a credentials file + with mock.patch.object(google.auth, 'load_credentials_from_file', autospec=True) as load_creds, mock.patch('google.cloud.compute_v1.services.instances.transports.InstancesTransport._prep_wrapped_messages') as Transport: + Transport.return_value = None + load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.InstancesTransport( + credentials_file="credentials.json", + quota_project_id="octopus", + ) + load_creds.assert_called_once_with("credentials.json", + scopes=None, + default_scopes=( + 'https://www.googleapis.com/auth/compute', + 'https://www.googleapis.com/auth/cloud-platform', +), + quota_project_id="octopus", + ) + + +def test_instances_base_transport_with_adc(): + # Test the default credentials are used if credentials and credentials_file are None. + with mock.patch.object(google.auth, 'default', autospec=True) as adc, mock.patch('google.cloud.compute_v1.services.instances.transports.InstancesTransport._prep_wrapped_messages') as Transport: + Transport.return_value = None + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.InstancesTransport() + adc.assert_called_once() + + +def test_instances_auth_adc(): + # If no credentials are provided, we should use ADC credentials. + with mock.patch.object(google.auth, 'default', autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + InstancesClient() + adc.assert_called_once_with( + scopes=None, + default_scopes=( + 'https://www.googleapis.com/auth/compute', + 'https://www.googleapis.com/auth/cloud-platform', +), + quota_project_id=None, + ) + + +def test_instances_http_transport_client_cert_source_for_mtls(): + cred = ga_credentials.AnonymousCredentials() + with mock.patch("google.auth.transport.requests.AuthorizedSession.configure_mtls_channel") as mock_configure_mtls_channel: + transports.InstancesRestTransport ( + credentials=cred, + client_cert_source_for_mtls=client_cert_source_callback + ) + mock_configure_mtls_channel.assert_called_once_with(client_cert_source_callback) + + +@pytest.mark.parametrize("transport_name", [ + "rest", +]) +def test_instances_host_no_port(transport_name): + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions(api_endpoint='compute.googleapis.com'), + transport=transport_name, + ) + assert client.transport._host == ( + 'compute.googleapis.com:443' + if transport_name in ['grpc', 'grpc_asyncio'] + else 'https://compute.googleapis.com' + ) + +@pytest.mark.parametrize("transport_name", [ + "rest", +]) +def test_instances_host_with_port(transport_name): + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions(api_endpoint='compute.googleapis.com:8000'), + transport=transport_name, + ) + assert client.transport._host == ( + 'compute.googleapis.com:8000' + if transport_name in ['grpc', 'grpc_asyncio'] + else 'https://compute.googleapis.com:8000' + ) + +@pytest.mark.parametrize("transport_name", [ + "rest", +]) +def test_instances_client_transport_session_collision(transport_name): + creds1 = ga_credentials.AnonymousCredentials() + creds2 = ga_credentials.AnonymousCredentials() + client1 = InstancesClient( + credentials=creds1, + transport=transport_name, + ) + client2 = InstancesClient( + credentials=creds2, + transport=transport_name, + ) + session1 = client1.transport.add_access_config._session + session2 = client2.transport.add_access_config._session + assert session1 != session2 + session1 = client1.transport.add_resource_policies._session + session2 = client2.transport.add_resource_policies._session + assert session1 != session2 + session1 = client1.transport.aggregated_list._session + session2 = client2.transport.aggregated_list._session + assert session1 != session2 + session1 = client1.transport.attach_disk._session + session2 = client2.transport.attach_disk._session + assert session1 != session2 + session1 = client1.transport.bulk_insert._session + session2 = client2.transport.bulk_insert._session + assert session1 != session2 + session1 = client1.transport.delete._session + session2 = client2.transport.delete._session + assert session1 != session2 + session1 = client1.transport.delete_access_config._session + session2 = client2.transport.delete_access_config._session + assert session1 != session2 + session1 = client1.transport.detach_disk._session + session2 = client2.transport.detach_disk._session + assert session1 != session2 + session1 = client1.transport.get._session + session2 = client2.transport.get._session + assert session1 != session2 + session1 = client1.transport.get_effective_firewalls._session + session2 = client2.transport.get_effective_firewalls._session + assert session1 != session2 + session1 = client1.transport.get_guest_attributes._session + session2 = client2.transport.get_guest_attributes._session + assert session1 != session2 + session1 = client1.transport.get_iam_policy._session + session2 = client2.transport.get_iam_policy._session + assert session1 != session2 + session1 = client1.transport.get_screenshot._session + session2 = client2.transport.get_screenshot._session + assert session1 != session2 + session1 = client1.transport.get_serial_port_output._session + session2 = client2.transport.get_serial_port_output._session + assert session1 != session2 + session1 = client1.transport.get_shielded_instance_identity._session + session2 = client2.transport.get_shielded_instance_identity._session + assert session1 != session2 + session1 = client1.transport.insert._session + session2 = client2.transport.insert._session + assert session1 != session2 + session1 = client1.transport.list._session + session2 = client2.transport.list._session + assert session1 != session2 + session1 = client1.transport.list_referrers._session + session2 = client2.transport.list_referrers._session + assert session1 != session2 + session1 = client1.transport.remove_resource_policies._session + session2 = client2.transport.remove_resource_policies._session + assert session1 != session2 + session1 = client1.transport.reset._session + session2 = client2.transport.reset._session + assert session1 != session2 + session1 = client1.transport.resume._session + session2 = client2.transport.resume._session + assert session1 != session2 + session1 = client1.transport.send_diagnostic_interrupt._session + session2 = client2.transport.send_diagnostic_interrupt._session + assert session1 != session2 + session1 = client1.transport.set_deletion_protection._session + session2 = client2.transport.set_deletion_protection._session + assert session1 != session2 + session1 = client1.transport.set_disk_auto_delete._session + session2 = client2.transport.set_disk_auto_delete._session + assert session1 != session2 + session1 = client1.transport.set_iam_policy._session + session2 = client2.transport.set_iam_policy._session + assert session1 != session2 + session1 = client1.transport.set_labels._session + session2 = client2.transport.set_labels._session + assert session1 != session2 + session1 = client1.transport.set_machine_resources._session + session2 = client2.transport.set_machine_resources._session + assert session1 != session2 + session1 = client1.transport.set_machine_type._session + session2 = client2.transport.set_machine_type._session + assert session1 != session2 + session1 = client1.transport.set_metadata._session + session2 = client2.transport.set_metadata._session + assert session1 != session2 + session1 = client1.transport.set_min_cpu_platform._session + session2 = client2.transport.set_min_cpu_platform._session + assert session1 != session2 + session1 = client1.transport.set_name._session + session2 = client2.transport.set_name._session + assert session1 != session2 + session1 = client1.transport.set_scheduling._session + session2 = client2.transport.set_scheduling._session + assert session1 != session2 + session1 = client1.transport.set_service_account._session + session2 = client2.transport.set_service_account._session + assert session1 != session2 + session1 = client1.transport.set_shielded_instance_integrity_policy._session + session2 = client2.transport.set_shielded_instance_integrity_policy._session + assert session1 != session2 + session1 = client1.transport.set_tags._session + session2 = client2.transport.set_tags._session + assert session1 != session2 + session1 = client1.transport.simulate_maintenance_event._session + session2 = client2.transport.simulate_maintenance_event._session + assert session1 != session2 + session1 = client1.transport.start._session + session2 = client2.transport.start._session + assert session1 != session2 + session1 = client1.transport.start_with_encryption_key._session + session2 = client2.transport.start_with_encryption_key._session + assert session1 != session2 + session1 = client1.transport.stop._session + session2 = client2.transport.stop._session + assert session1 != session2 + session1 = client1.transport.suspend._session + session2 = client2.transport.suspend._session + assert session1 != session2 + session1 = client1.transport.test_iam_permissions._session + session2 = client2.transport.test_iam_permissions._session + assert session1 != session2 + session1 = client1.transport.update._session + session2 = client2.transport.update._session + assert session1 != session2 + session1 = client1.transport.update_access_config._session + session2 = client2.transport.update_access_config._session + assert session1 != session2 + session1 = client1.transport.update_display_device._session + session2 = client2.transport.update_display_device._session + assert session1 != session2 + session1 = client1.transport.update_network_interface._session + session2 = client2.transport.update_network_interface._session + assert session1 != session2 + session1 = client1.transport.update_shielded_instance_config._session + session2 = client2.transport.update_shielded_instance_config._session + assert session1 != session2 + +def test_common_billing_account_path(): + billing_account = "squid" + expected = "billingAccounts/{billing_account}".format(billing_account=billing_account, ) + actual = InstancesClient.common_billing_account_path(billing_account) + assert expected == actual + + +def test_parse_common_billing_account_path(): + expected = { + "billing_account": "clam", + } + path = InstancesClient.common_billing_account_path(**expected) + + # Check that the path construction is reversible. + actual = InstancesClient.parse_common_billing_account_path(path) + assert expected == actual + +def test_common_folder_path(): + folder = "whelk" + expected = "folders/{folder}".format(folder=folder, ) + actual = InstancesClient.common_folder_path(folder) + assert expected == actual + + +def test_parse_common_folder_path(): + expected = { + "folder": "octopus", + } + path = InstancesClient.common_folder_path(**expected) + + # Check that the path construction is reversible. + actual = InstancesClient.parse_common_folder_path(path) + assert expected == actual + +def test_common_organization_path(): + organization = "oyster" + expected = "organizations/{organization}".format(organization=organization, ) + actual = InstancesClient.common_organization_path(organization) + assert expected == actual + + +def test_parse_common_organization_path(): + expected = { + "organization": "nudibranch", + } + path = InstancesClient.common_organization_path(**expected) + + # Check that the path construction is reversible. + actual = InstancesClient.parse_common_organization_path(path) + assert expected == actual + +def test_common_project_path(): + project = "cuttlefish" + expected = "projects/{project}".format(project=project, ) + actual = InstancesClient.common_project_path(project) + assert expected == actual + + +def test_parse_common_project_path(): + expected = { + "project": "mussel", + } + path = InstancesClient.common_project_path(**expected) + + # Check that the path construction is reversible. + actual = InstancesClient.parse_common_project_path(path) + assert expected == actual + +def test_common_location_path(): + project = "winkle" + location = "nautilus" + expected = "projects/{project}/locations/{location}".format(project=project, location=location, ) + actual = InstancesClient.common_location_path(project, location) + assert expected == actual + + +def test_parse_common_location_path(): + expected = { + "project": "scallop", + "location": "abalone", + } + path = InstancesClient.common_location_path(**expected) + + # Check that the path construction is reversible. + actual = InstancesClient.parse_common_location_path(path) + assert expected == actual + + +def test_client_with_default_client_info(): + client_info = gapic_v1.client_info.ClientInfo() + + with mock.patch.object(transports.InstancesTransport, '_prep_wrapped_messages') as prep: + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + with mock.patch.object(transports.InstancesTransport, '_prep_wrapped_messages') as prep: + transport_class = InstancesClient.get_transport_class() + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + +def test_transport_close(): + transports = { + "rest": "_session", + } + + for transport, close_name in transports.items(): + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport + ) + with mock.patch.object(type(getattr(client.transport, close_name)), "close") as close: + with client: + close.assert_not_called() + close.assert_called_once() + +def test_client_ctx(): + transports = [ + 'rest', + ] + for transport in transports: + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport + ) + # Test client calls underlying transport. + with mock.patch.object(type(client.transport), "close") as close: + close.assert_not_called() + with client: + pass + close.assert_called() + +@pytest.mark.parametrize("client_class,transport_class", [ + (InstancesClient, transports.InstancesRestTransport), +]) +def test_api_key_credentials(client_class, transport_class): + with mock.patch.object( + google.auth._default, "get_api_key_credentials", create=True + ) as get_api_key_credentials: + mock_cred = mock.Mock() + get_api_key_credentials.return_value = mock_cred + options = client_options.ClientOptions() + options.api_key = "api_key" + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=mock_cred, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) diff --git a/owl-bot-staging/v1/tests/unit/gapic/compute_v1/test_interconnect_attachments.py b/owl-bot-staging/v1/tests/unit/gapic/compute_v1/test_interconnect_attachments.py new file mode 100644 index 000000000..1366fd231 --- /dev/null +++ b/owl-bot-staging/v1/tests/unit/gapic/compute_v1/test_interconnect_attachments.py @@ -0,0 +1,4012 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import os +# try/except added for compatibility with python < 3.8 +try: + from unittest import mock + from unittest.mock import AsyncMock # pragma: NO COVER +except ImportError: # pragma: NO COVER + import mock + +import grpc +from grpc.experimental import aio +from collections.abc import Iterable +from google.protobuf import json_format +import json +import math +import pytest +from proto.marshal.rules.dates import DurationRule, TimestampRule +from proto.marshal.rules import wrappers +from requests import Response +from requests import Request, PreparedRequest +from requests.sessions import Session +from google.protobuf import json_format + +from google.api_core import client_options +from google.api_core import exceptions as core_exceptions +from google.api_core import extended_operation # type: ignore +from google.api_core import future +from google.api_core import gapic_v1 +from google.api_core import grpc_helpers +from google.api_core import grpc_helpers_async +from google.api_core import path_template +from google.auth import credentials as ga_credentials +from google.auth.exceptions import MutualTLSChannelError +from google.cloud.compute_v1.services.interconnect_attachments import InterconnectAttachmentsClient +from google.cloud.compute_v1.services.interconnect_attachments import pagers +from google.cloud.compute_v1.services.interconnect_attachments import transports +from google.cloud.compute_v1.types import compute +from google.oauth2 import service_account +import google.auth + + +def client_cert_source_callback(): + return b"cert bytes", b"key bytes" + + +# If default endpoint is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint(client): + return "foo.googleapis.com" if ("localhost" in client.DEFAULT_ENDPOINT) else client.DEFAULT_ENDPOINT + + +def test__get_default_mtls_endpoint(): + api_endpoint = "example.googleapis.com" + api_mtls_endpoint = "example.mtls.googleapis.com" + sandbox_endpoint = "example.sandbox.googleapis.com" + sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com" + non_googleapi = "api.example.com" + + assert InterconnectAttachmentsClient._get_default_mtls_endpoint(None) is None + assert InterconnectAttachmentsClient._get_default_mtls_endpoint(api_endpoint) == api_mtls_endpoint + assert InterconnectAttachmentsClient._get_default_mtls_endpoint(api_mtls_endpoint) == api_mtls_endpoint + assert InterconnectAttachmentsClient._get_default_mtls_endpoint(sandbox_endpoint) == sandbox_mtls_endpoint + assert InterconnectAttachmentsClient._get_default_mtls_endpoint(sandbox_mtls_endpoint) == sandbox_mtls_endpoint + assert InterconnectAttachmentsClient._get_default_mtls_endpoint(non_googleapi) == non_googleapi + + +@pytest.mark.parametrize("client_class,transport_name", [ + (InterconnectAttachmentsClient, "rest"), +]) +def test_interconnect_attachments_client_from_service_account_info(client_class, transport_name): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object(service_account.Credentials, 'from_service_account_info') as factory: + factory.return_value = creds + info = {"valid": True} + client = client_class.from_service_account_info(info, transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + 'compute.googleapis.com:443' + if transport_name in ['grpc', 'grpc_asyncio'] + else + 'https://compute.googleapis.com' + ) + + +@pytest.mark.parametrize("transport_class,transport_name", [ + (transports.InterconnectAttachmentsRestTransport, "rest"), +]) +def test_interconnect_attachments_client_service_account_always_use_jwt(transport_class, transport_name): + with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=True) + use_jwt.assert_called_once_with(True) + + with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=False) + use_jwt.assert_not_called() + + +@pytest.mark.parametrize("client_class,transport_name", [ + (InterconnectAttachmentsClient, "rest"), +]) +def test_interconnect_attachments_client_from_service_account_file(client_class, transport_name): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object(service_account.Credentials, 'from_service_account_file') as factory: + factory.return_value = creds + client = client_class.from_service_account_file("dummy/file/path.json", transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + client = client_class.from_service_account_json("dummy/file/path.json", transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + 'compute.googleapis.com:443' + if transport_name in ['grpc', 'grpc_asyncio'] + else + 'https://compute.googleapis.com' + ) + + +def test_interconnect_attachments_client_get_transport_class(): + transport = InterconnectAttachmentsClient.get_transport_class() + available_transports = [ + transports.InterconnectAttachmentsRestTransport, + ] + assert transport in available_transports + + transport = InterconnectAttachmentsClient.get_transport_class("rest") + assert transport == transports.InterconnectAttachmentsRestTransport + + +@pytest.mark.parametrize("client_class,transport_class,transport_name", [ + (InterconnectAttachmentsClient, transports.InterconnectAttachmentsRestTransport, "rest"), +]) +@mock.patch.object(InterconnectAttachmentsClient, "DEFAULT_ENDPOINT", modify_default_endpoint(InterconnectAttachmentsClient)) +def test_interconnect_attachments_client_client_options(client_class, transport_class, transport_name): + # Check that if channel is provided we won't create a new one. + with mock.patch.object(InterconnectAttachmentsClient, 'get_transport_class') as gtc: + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ) + client = client_class(transport=transport) + gtc.assert_not_called() + + # Check that if channel is provided via str we will create a new one. + with mock.patch.object(InterconnectAttachmentsClient, 'get_transport_class') as gtc: + client = client_class(transport=transport_name) + gtc.assert_called() + + # Check the case api_endpoint is provided. + options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(transport=transport_name, client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_MTLS_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError): + client = client_class(transport=transport_name) + + # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): + with pytest.raises(ValueError): + client = client_class(transport=transport_name) + + # Check the case quota_project_id is provided + options = client_options.ClientOptions(quota_project_id="octopus") + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id="octopus", + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + # Check the case api_endpoint is provided + options = client_options.ClientOptions(api_audience="https://language.googleapis.com") + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience="https://language.googleapis.com" + ) + +@pytest.mark.parametrize("client_class,transport_class,transport_name,use_client_cert_env", [ + (InterconnectAttachmentsClient, transports.InterconnectAttachmentsRestTransport, "rest", "true"), + (InterconnectAttachmentsClient, transports.InterconnectAttachmentsRestTransport, "rest", "false"), +]) +@mock.patch.object(InterconnectAttachmentsClient, "DEFAULT_ENDPOINT", modify_default_endpoint(InterconnectAttachmentsClient)) +@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) +def test_interconnect_attachments_client_mtls_env_auto(client_class, transport_class, transport_name, use_client_cert_env): + # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default + # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists. + + # Check the case client_cert_source is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): + options = client_options.ClientOptions(client_cert_source=client_cert_source_callback) + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + + if use_client_cert_env == "false": + expected_client_cert_source = None + expected_host = client.DEFAULT_ENDPOINT + else: + expected_client_cert_source = client_cert_source_callback + expected_host = client.DEFAULT_MTLS_ENDPOINT + + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case ADC client cert is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): + with mock.patch.object(transport_class, '__init__') as patched: + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): + with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=client_cert_source_callback): + if use_client_cert_env == "false": + expected_host = client.DEFAULT_ENDPOINT + expected_client_cert_source = None + else: + expected_host = client.DEFAULT_MTLS_ENDPOINT + expected_client_cert_source = client_cert_source_callback + + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case client_cert_source and ADC client cert are not provided. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): + with mock.patch.object(transport_class, '__init__') as patched: + with mock.patch("google.auth.transport.mtls.has_default_client_cert_source", return_value=False): + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize("client_class", [ + InterconnectAttachmentsClient +]) +@mock.patch.object(InterconnectAttachmentsClient, "DEFAULT_ENDPOINT", modify_default_endpoint(InterconnectAttachmentsClient)) +def test_interconnect_attachments_client_get_mtls_endpoint_and_cert_source(client_class): + mock_client_cert_source = mock.Mock() + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "true". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + mock_api_endpoint = "foo" + options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(options) + assert api_endpoint == mock_api_endpoint + assert cert_source == mock_client_cert_source + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "false". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + mock_client_cert_source = mock.Mock() + mock_api_endpoint = "foo" + options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(options) + assert api_endpoint == mock_api_endpoint + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert doesn't exist. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=False): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert exists. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): + with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=mock_client_cert_source): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source == mock_client_cert_source + + +@pytest.mark.parametrize("client_class,transport_class,transport_name", [ + (InterconnectAttachmentsClient, transports.InterconnectAttachmentsRestTransport, "rest"), +]) +def test_interconnect_attachments_client_client_options_scopes(client_class, transport_class, transport_name): + # Check the case scopes are provided. + options = client_options.ClientOptions( + scopes=["1", "2"], + ) + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=["1", "2"], + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + +@pytest.mark.parametrize("client_class,transport_class,transport_name,grpc_helpers", [ + (InterconnectAttachmentsClient, transports.InterconnectAttachmentsRestTransport, "rest", None), +]) +def test_interconnect_attachments_client_client_options_credentials_file(client_class, transport_class, transport_name, grpc_helpers): + # Check the case credentials file is provided. + options = client_options.ClientOptions( + credentials_file="credentials.json" + ) + + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize("request_type", [ + compute.AggregatedListInterconnectAttachmentsRequest, + dict, +]) +def test_aggregated_list_rest(request_type): + client = InterconnectAttachmentsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.InterconnectAttachmentAggregatedList( + id='id_value', + kind='kind_value', + next_page_token='next_page_token_value', + self_link='self_link_value', + unreachables=['unreachables_value'], + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.InterconnectAttachmentAggregatedList.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.aggregated_list(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.AggregatedListPager) + assert response.id == 'id_value' + assert response.kind == 'kind_value' + assert response.next_page_token == 'next_page_token_value' + assert response.self_link == 'self_link_value' + assert response.unreachables == ['unreachables_value'] + + +def test_aggregated_list_rest_required_fields(request_type=compute.AggregatedListInterconnectAttachmentsRequest): + transport_class = transports.InterconnectAttachmentsRestTransport + + request_init = {} + request_init["project"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).aggregated_list._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).aggregated_list._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("filter", "include_all_scopes", "max_results", "order_by", "page_token", "return_partial_success", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + + client = InterconnectAttachmentsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.InterconnectAttachmentAggregatedList() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "get", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.InterconnectAttachmentAggregatedList.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.aggregated_list(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_aggregated_list_rest_unset_required_fields(): + transport = transports.InterconnectAttachmentsRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.aggregated_list._get_unset_required_fields({}) + assert set(unset_fields) == (set(("filter", "includeAllScopes", "maxResults", "orderBy", "pageToken", "returnPartialSuccess", )) & set(("project", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_aggregated_list_rest_interceptors(null_interceptor): + transport = transports.InterconnectAttachmentsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.InterconnectAttachmentsRestInterceptor(), + ) + client = InterconnectAttachmentsClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.InterconnectAttachmentsRestInterceptor, "post_aggregated_list") as post, \ + mock.patch.object(transports.InterconnectAttachmentsRestInterceptor, "pre_aggregated_list") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.AggregatedListInterconnectAttachmentsRequest.pb(compute.AggregatedListInterconnectAttachmentsRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.InterconnectAttachmentAggregatedList.to_json(compute.InterconnectAttachmentAggregatedList()) + + request = compute.AggregatedListInterconnectAttachmentsRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.InterconnectAttachmentAggregatedList() + + client.aggregated_list(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_aggregated_list_rest_bad_request(transport: str = 'rest', request_type=compute.AggregatedListInterconnectAttachmentsRequest): + client = InterconnectAttachmentsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.aggregated_list(request) + + +def test_aggregated_list_rest_flattened(): + client = InterconnectAttachmentsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.InterconnectAttachmentAggregatedList() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.InterconnectAttachmentAggregatedList.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.aggregated_list(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/aggregated/interconnectAttachments" % client.transport._host, args[1]) + + +def test_aggregated_list_rest_flattened_error(transport: str = 'rest'): + client = InterconnectAttachmentsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.aggregated_list( + compute.AggregatedListInterconnectAttachmentsRequest(), + project='project_value', + ) + + +def test_aggregated_list_rest_pager(transport: str = 'rest'): + client = InterconnectAttachmentsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + #with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + compute.InterconnectAttachmentAggregatedList( + items={ + 'a':compute.InterconnectAttachmentsScopedList(), + 'b':compute.InterconnectAttachmentsScopedList(), + 'c':compute.InterconnectAttachmentsScopedList(), + }, + next_page_token='abc', + ), + compute.InterconnectAttachmentAggregatedList( + items={}, + next_page_token='def', + ), + compute.InterconnectAttachmentAggregatedList( + items={ + 'g':compute.InterconnectAttachmentsScopedList(), + }, + next_page_token='ghi', + ), + compute.InterconnectAttachmentAggregatedList( + items={ + 'h':compute.InterconnectAttachmentsScopedList(), + 'i':compute.InterconnectAttachmentsScopedList(), + }, + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple(compute.InterconnectAttachmentAggregatedList.to_json(x) for x in response) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode('UTF-8') + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {'project': 'sample1'} + + pager = client.aggregated_list(request=sample_request) + + assert isinstance(pager.get('a'), compute.InterconnectAttachmentsScopedList) + assert pager.get('h') is None + + results = list(pager) + assert len(results) == 6 + assert all( + isinstance(i, tuple) + for i in results) + for result in results: + assert isinstance(result, tuple) + assert tuple(type(t) for t in result) == (str, compute.InterconnectAttachmentsScopedList) + + assert pager.get('a') is None + assert isinstance(pager.get('h'), compute.InterconnectAttachmentsScopedList) + + pages = list(client.aggregated_list(request=sample_request).pages) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize("request_type", [ + compute.DeleteInterconnectAttachmentRequest, + dict, +]) +def test_delete_rest(request_type): + client = InterconnectAttachmentsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2', 'interconnect_attachment': 'sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.delete(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, extended_operation.ExtendedOperation) + assert response.client_operation_id == 'client_operation_id_value' + assert response.creation_timestamp == 'creation_timestamp_value' + assert response.description == 'description_value' + assert response.end_time == 'end_time_value' + assert response.http_error_message == 'http_error_message_value' + assert response.http_error_status_code == 2374 + assert response.id == 205 + assert response.insert_time == 'insert_time_value' + assert response.kind == 'kind_value' + assert response.name == 'name_value' + assert response.operation_group_id == 'operation_group_id_value' + assert response.operation_type == 'operation_type_value' + assert response.progress == 885 + assert response.region == 'region_value' + assert response.self_link == 'self_link_value' + assert response.start_time == 'start_time_value' + assert response.status == compute.Operation.Status.DONE + assert response.status_message == 'status_message_value' + assert response.target_id == 947 + assert response.target_link == 'target_link_value' + assert response.user == 'user_value' + assert response.zone == 'zone_value' + + +def test_delete_rest_required_fields(request_type=compute.DeleteInterconnectAttachmentRequest): + transport_class = transports.InterconnectAttachmentsRestTransport + + request_init = {} + request_init["interconnect_attachment"] = "" + request_init["project"] = "" + request_init["region"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["interconnectAttachment"] = 'interconnect_attachment_value' + jsonified_request["project"] = 'project_value' + jsonified_request["region"] = 'region_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "interconnectAttachment" in jsonified_request + assert jsonified_request["interconnectAttachment"] == 'interconnect_attachment_value' + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "region" in jsonified_request + assert jsonified_request["region"] == 'region_value' + + client = InterconnectAttachmentsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "delete", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.delete(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_delete_rest_unset_required_fields(): + transport = transports.InterconnectAttachmentsRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.delete._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("interconnectAttachment", "project", "region", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_rest_interceptors(null_interceptor): + transport = transports.InterconnectAttachmentsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.InterconnectAttachmentsRestInterceptor(), + ) + client = InterconnectAttachmentsClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.InterconnectAttachmentsRestInterceptor, "post_delete") as post, \ + mock.patch.object(transports.InterconnectAttachmentsRestInterceptor, "pre_delete") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.DeleteInterconnectAttachmentRequest.pb(compute.DeleteInterconnectAttachmentRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.DeleteInterconnectAttachmentRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.delete(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_delete_rest_bad_request(transport: str = 'rest', request_type=compute.DeleteInterconnectAttachmentRequest): + client = InterconnectAttachmentsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2', 'interconnect_attachment': 'sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.delete(request) + + +def test_delete_rest_flattened(): + client = InterconnectAttachmentsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'region': 'sample2', 'interconnect_attachment': 'sample3'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + region='region_value', + interconnect_attachment='interconnect_attachment_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.delete(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/regions/{region}/interconnectAttachments/{interconnect_attachment}" % client.transport._host, args[1]) + + +def test_delete_rest_flattened_error(transport: str = 'rest'): + client = InterconnectAttachmentsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete( + compute.DeleteInterconnectAttachmentRequest(), + project='project_value', + region='region_value', + interconnect_attachment='interconnect_attachment_value', + ) + + +def test_delete_rest_error(): + client = InterconnectAttachmentsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.DeleteInterconnectAttachmentRequest, + dict, +]) +def test_delete_unary_rest(request_type): + client = InterconnectAttachmentsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2', 'interconnect_attachment': 'sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.delete_unary(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.Operation) + + +def test_delete_unary_rest_required_fields(request_type=compute.DeleteInterconnectAttachmentRequest): + transport_class = transports.InterconnectAttachmentsRestTransport + + request_init = {} + request_init["interconnect_attachment"] = "" + request_init["project"] = "" + request_init["region"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["interconnectAttachment"] = 'interconnect_attachment_value' + jsonified_request["project"] = 'project_value' + jsonified_request["region"] = 'region_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "interconnectAttachment" in jsonified_request + assert jsonified_request["interconnectAttachment"] == 'interconnect_attachment_value' + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "region" in jsonified_request + assert jsonified_request["region"] == 'region_value' + + client = InterconnectAttachmentsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "delete", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.delete_unary(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_delete_unary_rest_unset_required_fields(): + transport = transports.InterconnectAttachmentsRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.delete._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("interconnectAttachment", "project", "region", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_unary_rest_interceptors(null_interceptor): + transport = transports.InterconnectAttachmentsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.InterconnectAttachmentsRestInterceptor(), + ) + client = InterconnectAttachmentsClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.InterconnectAttachmentsRestInterceptor, "post_delete") as post, \ + mock.patch.object(transports.InterconnectAttachmentsRestInterceptor, "pre_delete") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.DeleteInterconnectAttachmentRequest.pb(compute.DeleteInterconnectAttachmentRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.DeleteInterconnectAttachmentRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.delete_unary(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_delete_unary_rest_bad_request(transport: str = 'rest', request_type=compute.DeleteInterconnectAttachmentRequest): + client = InterconnectAttachmentsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2', 'interconnect_attachment': 'sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.delete_unary(request) + + +def test_delete_unary_rest_flattened(): + client = InterconnectAttachmentsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'region': 'sample2', 'interconnect_attachment': 'sample3'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + region='region_value', + interconnect_attachment='interconnect_attachment_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.delete_unary(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/regions/{region}/interconnectAttachments/{interconnect_attachment}" % client.transport._host, args[1]) + + +def test_delete_unary_rest_flattened_error(transport: str = 'rest'): + client = InterconnectAttachmentsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_unary( + compute.DeleteInterconnectAttachmentRequest(), + project='project_value', + region='region_value', + interconnect_attachment='interconnect_attachment_value', + ) + + +def test_delete_unary_rest_error(): + client = InterconnectAttachmentsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.GetInterconnectAttachmentRequest, + dict, +]) +def test_get_rest(request_type): + client = InterconnectAttachmentsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2', 'interconnect_attachment': 'sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.InterconnectAttachment( + admin_enabled=True, + bandwidth='bandwidth_value', + candidate_ipv6_subnets=['candidate_ipv6_subnets_value'], + candidate_subnets=['candidate_subnets_value'], + cloud_router_ip_address='cloud_router_ip_address_value', + cloud_router_ipv6_address='cloud_router_ipv6_address_value', + cloud_router_ipv6_interface_id='cloud_router_ipv6_interface_id_value', + creation_timestamp='creation_timestamp_value', + customer_router_ip_address='customer_router_ip_address_value', + customer_router_ipv6_address='customer_router_ipv6_address_value', + customer_router_ipv6_interface_id='customer_router_ipv6_interface_id_value', + dataplane_version=1807, + description='description_value', + edge_availability_domain='edge_availability_domain_value', + encryption='encryption_value', + google_reference_id='google_reference_id_value', + id=205, + interconnect='interconnect_value', + ipsec_internal_addresses=['ipsec_internal_addresses_value'], + kind='kind_value', + label_fingerprint='label_fingerprint_value', + mtu=342, + name='name_value', + operational_status='operational_status_value', + pairing_key='pairing_key_value', + partner_asn=1181, + region='region_value', + remote_service='remote_service_value', + router='router_value', + satisfies_pzs=True, + self_link='self_link_value', + stack_type='stack_type_value', + state='state_value', + subnet_length=1394, + type_='type__value', + vlan_tag8021q=1160, + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.InterconnectAttachment.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.get(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.InterconnectAttachment) + assert response.admin_enabled is True + assert response.bandwidth == 'bandwidth_value' + assert response.candidate_ipv6_subnets == ['candidate_ipv6_subnets_value'] + assert response.candidate_subnets == ['candidate_subnets_value'] + assert response.cloud_router_ip_address == 'cloud_router_ip_address_value' + assert response.cloud_router_ipv6_address == 'cloud_router_ipv6_address_value' + assert response.cloud_router_ipv6_interface_id == 'cloud_router_ipv6_interface_id_value' + assert response.creation_timestamp == 'creation_timestamp_value' + assert response.customer_router_ip_address == 'customer_router_ip_address_value' + assert response.customer_router_ipv6_address == 'customer_router_ipv6_address_value' + assert response.customer_router_ipv6_interface_id == 'customer_router_ipv6_interface_id_value' + assert response.dataplane_version == 1807 + assert response.description == 'description_value' + assert response.edge_availability_domain == 'edge_availability_domain_value' + assert response.encryption == 'encryption_value' + assert response.google_reference_id == 'google_reference_id_value' + assert response.id == 205 + assert response.interconnect == 'interconnect_value' + assert response.ipsec_internal_addresses == ['ipsec_internal_addresses_value'] + assert response.kind == 'kind_value' + assert response.label_fingerprint == 'label_fingerprint_value' + assert response.mtu == 342 + assert response.name == 'name_value' + assert response.operational_status == 'operational_status_value' + assert response.pairing_key == 'pairing_key_value' + assert response.partner_asn == 1181 + assert response.region == 'region_value' + assert response.remote_service == 'remote_service_value' + assert response.router == 'router_value' + assert response.satisfies_pzs is True + assert response.self_link == 'self_link_value' + assert response.stack_type == 'stack_type_value' + assert response.state == 'state_value' + assert response.subnet_length == 1394 + assert response.type_ == 'type__value' + assert response.vlan_tag8021q == 1160 + + +def test_get_rest_required_fields(request_type=compute.GetInterconnectAttachmentRequest): + transport_class = transports.InterconnectAttachmentsRestTransport + + request_init = {} + request_init["interconnect_attachment"] = "" + request_init["project"] = "" + request_init["region"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["interconnectAttachment"] = 'interconnect_attachment_value' + jsonified_request["project"] = 'project_value' + jsonified_request["region"] = 'region_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "interconnectAttachment" in jsonified_request + assert jsonified_request["interconnectAttachment"] == 'interconnect_attachment_value' + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "region" in jsonified_request + assert jsonified_request["region"] == 'region_value' + + client = InterconnectAttachmentsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.InterconnectAttachment() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "get", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.InterconnectAttachment.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.get(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_get_rest_unset_required_fields(): + transport = transports.InterconnectAttachmentsRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.get._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("interconnectAttachment", "project", "region", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_rest_interceptors(null_interceptor): + transport = transports.InterconnectAttachmentsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.InterconnectAttachmentsRestInterceptor(), + ) + client = InterconnectAttachmentsClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.InterconnectAttachmentsRestInterceptor, "post_get") as post, \ + mock.patch.object(transports.InterconnectAttachmentsRestInterceptor, "pre_get") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.GetInterconnectAttachmentRequest.pb(compute.GetInterconnectAttachmentRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.InterconnectAttachment.to_json(compute.InterconnectAttachment()) + + request = compute.GetInterconnectAttachmentRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.InterconnectAttachment() + + client.get(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_get_rest_bad_request(transport: str = 'rest', request_type=compute.GetInterconnectAttachmentRequest): + client = InterconnectAttachmentsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2', 'interconnect_attachment': 'sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get(request) + + +def test_get_rest_flattened(): + client = InterconnectAttachmentsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.InterconnectAttachment() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'region': 'sample2', 'interconnect_attachment': 'sample3'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + region='region_value', + interconnect_attachment='interconnect_attachment_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.InterconnectAttachment.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.get(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/regions/{region}/interconnectAttachments/{interconnect_attachment}" % client.transport._host, args[1]) + + +def test_get_rest_flattened_error(transport: str = 'rest'): + client = InterconnectAttachmentsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get( + compute.GetInterconnectAttachmentRequest(), + project='project_value', + region='region_value', + interconnect_attachment='interconnect_attachment_value', + ) + + +def test_get_rest_error(): + client = InterconnectAttachmentsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.InsertInterconnectAttachmentRequest, + dict, +]) +def test_insert_rest(request_type): + client = InterconnectAttachmentsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2'} + request_init["interconnect_attachment_resource"] = {'admin_enabled': True, 'bandwidth': 'bandwidth_value', 'candidate_ipv6_subnets': ['candidate_ipv6_subnets_value1', 'candidate_ipv6_subnets_value2'], 'candidate_subnets': ['candidate_subnets_value1', 'candidate_subnets_value2'], 'cloud_router_ip_address': 'cloud_router_ip_address_value', 'cloud_router_ipv6_address': 'cloud_router_ipv6_address_value', 'cloud_router_ipv6_interface_id': 'cloud_router_ipv6_interface_id_value', 'configuration_constraints': {'bgp_md5': 'bgp_md5_value', 'bgp_peer_asn_ranges': [{'max_': 421, 'min_': 419}]}, 'creation_timestamp': 'creation_timestamp_value', 'customer_router_ip_address': 'customer_router_ip_address_value', 'customer_router_ipv6_address': 'customer_router_ipv6_address_value', 'customer_router_ipv6_interface_id': 'customer_router_ipv6_interface_id_value', 'dataplane_version': 1807, 'description': 'description_value', 'edge_availability_domain': 'edge_availability_domain_value', 'encryption': 'encryption_value', 'google_reference_id': 'google_reference_id_value', 'id': 205, 'interconnect': 'interconnect_value', 'ipsec_internal_addresses': ['ipsec_internal_addresses_value1', 'ipsec_internal_addresses_value2'], 'kind': 'kind_value', 'label_fingerprint': 'label_fingerprint_value', 'labels': {}, 'mtu': 342, 'name': 'name_value', 'operational_status': 'operational_status_value', 'pairing_key': 'pairing_key_value', 'partner_asn': 1181, 'partner_metadata': {'interconnect_name': 'interconnect_name_value', 'partner_name': 'partner_name_value', 'portal_url': 'portal_url_value'}, 'private_interconnect_info': {'tag8021q': 632}, 'region': 'region_value', 'remote_service': 'remote_service_value', 'router': 'router_value', 'satisfies_pzs': True, 'self_link': 'self_link_value', 'stack_type': 'stack_type_value', 'state': 'state_value', 'subnet_length': 1394, 'type_': 'type__value', 'vlan_tag8021q': 1160} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.insert(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, extended_operation.ExtendedOperation) + assert response.client_operation_id == 'client_operation_id_value' + assert response.creation_timestamp == 'creation_timestamp_value' + assert response.description == 'description_value' + assert response.end_time == 'end_time_value' + assert response.http_error_message == 'http_error_message_value' + assert response.http_error_status_code == 2374 + assert response.id == 205 + assert response.insert_time == 'insert_time_value' + assert response.kind == 'kind_value' + assert response.name == 'name_value' + assert response.operation_group_id == 'operation_group_id_value' + assert response.operation_type == 'operation_type_value' + assert response.progress == 885 + assert response.region == 'region_value' + assert response.self_link == 'self_link_value' + assert response.start_time == 'start_time_value' + assert response.status == compute.Operation.Status.DONE + assert response.status_message == 'status_message_value' + assert response.target_id == 947 + assert response.target_link == 'target_link_value' + assert response.user == 'user_value' + assert response.zone == 'zone_value' + + +def test_insert_rest_required_fields(request_type=compute.InsertInterconnectAttachmentRequest): + transport_class = transports.InterconnectAttachmentsRestTransport + + request_init = {} + request_init["project"] = "" + request_init["region"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).insert._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + jsonified_request["region"] = 'region_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).insert._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", "validate_only", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "region" in jsonified_request + assert jsonified_request["region"] == 'region_value' + + client = InterconnectAttachmentsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.insert(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_insert_rest_unset_required_fields(): + transport = transports.InterconnectAttachmentsRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.insert._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", "validateOnly", )) & set(("interconnectAttachmentResource", "project", "region", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_insert_rest_interceptors(null_interceptor): + transport = transports.InterconnectAttachmentsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.InterconnectAttachmentsRestInterceptor(), + ) + client = InterconnectAttachmentsClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.InterconnectAttachmentsRestInterceptor, "post_insert") as post, \ + mock.patch.object(transports.InterconnectAttachmentsRestInterceptor, "pre_insert") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.InsertInterconnectAttachmentRequest.pb(compute.InsertInterconnectAttachmentRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.InsertInterconnectAttachmentRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.insert(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_insert_rest_bad_request(transport: str = 'rest', request_type=compute.InsertInterconnectAttachmentRequest): + client = InterconnectAttachmentsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2'} + request_init["interconnect_attachment_resource"] = {'admin_enabled': True, 'bandwidth': 'bandwidth_value', 'candidate_ipv6_subnets': ['candidate_ipv6_subnets_value1', 'candidate_ipv6_subnets_value2'], 'candidate_subnets': ['candidate_subnets_value1', 'candidate_subnets_value2'], 'cloud_router_ip_address': 'cloud_router_ip_address_value', 'cloud_router_ipv6_address': 'cloud_router_ipv6_address_value', 'cloud_router_ipv6_interface_id': 'cloud_router_ipv6_interface_id_value', 'configuration_constraints': {'bgp_md5': 'bgp_md5_value', 'bgp_peer_asn_ranges': [{'max_': 421, 'min_': 419}]}, 'creation_timestamp': 'creation_timestamp_value', 'customer_router_ip_address': 'customer_router_ip_address_value', 'customer_router_ipv6_address': 'customer_router_ipv6_address_value', 'customer_router_ipv6_interface_id': 'customer_router_ipv6_interface_id_value', 'dataplane_version': 1807, 'description': 'description_value', 'edge_availability_domain': 'edge_availability_domain_value', 'encryption': 'encryption_value', 'google_reference_id': 'google_reference_id_value', 'id': 205, 'interconnect': 'interconnect_value', 'ipsec_internal_addresses': ['ipsec_internal_addresses_value1', 'ipsec_internal_addresses_value2'], 'kind': 'kind_value', 'label_fingerprint': 'label_fingerprint_value', 'labels': {}, 'mtu': 342, 'name': 'name_value', 'operational_status': 'operational_status_value', 'pairing_key': 'pairing_key_value', 'partner_asn': 1181, 'partner_metadata': {'interconnect_name': 'interconnect_name_value', 'partner_name': 'partner_name_value', 'portal_url': 'portal_url_value'}, 'private_interconnect_info': {'tag8021q': 632}, 'region': 'region_value', 'remote_service': 'remote_service_value', 'router': 'router_value', 'satisfies_pzs': True, 'self_link': 'self_link_value', 'stack_type': 'stack_type_value', 'state': 'state_value', 'subnet_length': 1394, 'type_': 'type__value', 'vlan_tag8021q': 1160} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.insert(request) + + +def test_insert_rest_flattened(): + client = InterconnectAttachmentsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'region': 'sample2'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + region='region_value', + interconnect_attachment_resource=compute.InterconnectAttachment(admin_enabled=True), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.insert(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/regions/{region}/interconnectAttachments" % client.transport._host, args[1]) + + +def test_insert_rest_flattened_error(transport: str = 'rest'): + client = InterconnectAttachmentsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.insert( + compute.InsertInterconnectAttachmentRequest(), + project='project_value', + region='region_value', + interconnect_attachment_resource=compute.InterconnectAttachment(admin_enabled=True), + ) + + +def test_insert_rest_error(): + client = InterconnectAttachmentsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.InsertInterconnectAttachmentRequest, + dict, +]) +def test_insert_unary_rest(request_type): + client = InterconnectAttachmentsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2'} + request_init["interconnect_attachment_resource"] = {'admin_enabled': True, 'bandwidth': 'bandwidth_value', 'candidate_ipv6_subnets': ['candidate_ipv6_subnets_value1', 'candidate_ipv6_subnets_value2'], 'candidate_subnets': ['candidate_subnets_value1', 'candidate_subnets_value2'], 'cloud_router_ip_address': 'cloud_router_ip_address_value', 'cloud_router_ipv6_address': 'cloud_router_ipv6_address_value', 'cloud_router_ipv6_interface_id': 'cloud_router_ipv6_interface_id_value', 'configuration_constraints': {'bgp_md5': 'bgp_md5_value', 'bgp_peer_asn_ranges': [{'max_': 421, 'min_': 419}]}, 'creation_timestamp': 'creation_timestamp_value', 'customer_router_ip_address': 'customer_router_ip_address_value', 'customer_router_ipv6_address': 'customer_router_ipv6_address_value', 'customer_router_ipv6_interface_id': 'customer_router_ipv6_interface_id_value', 'dataplane_version': 1807, 'description': 'description_value', 'edge_availability_domain': 'edge_availability_domain_value', 'encryption': 'encryption_value', 'google_reference_id': 'google_reference_id_value', 'id': 205, 'interconnect': 'interconnect_value', 'ipsec_internal_addresses': ['ipsec_internal_addresses_value1', 'ipsec_internal_addresses_value2'], 'kind': 'kind_value', 'label_fingerprint': 'label_fingerprint_value', 'labels': {}, 'mtu': 342, 'name': 'name_value', 'operational_status': 'operational_status_value', 'pairing_key': 'pairing_key_value', 'partner_asn': 1181, 'partner_metadata': {'interconnect_name': 'interconnect_name_value', 'partner_name': 'partner_name_value', 'portal_url': 'portal_url_value'}, 'private_interconnect_info': {'tag8021q': 632}, 'region': 'region_value', 'remote_service': 'remote_service_value', 'router': 'router_value', 'satisfies_pzs': True, 'self_link': 'self_link_value', 'stack_type': 'stack_type_value', 'state': 'state_value', 'subnet_length': 1394, 'type_': 'type__value', 'vlan_tag8021q': 1160} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.insert_unary(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.Operation) + + +def test_insert_unary_rest_required_fields(request_type=compute.InsertInterconnectAttachmentRequest): + transport_class = transports.InterconnectAttachmentsRestTransport + + request_init = {} + request_init["project"] = "" + request_init["region"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).insert._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + jsonified_request["region"] = 'region_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).insert._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", "validate_only", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "region" in jsonified_request + assert jsonified_request["region"] == 'region_value' + + client = InterconnectAttachmentsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.insert_unary(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_insert_unary_rest_unset_required_fields(): + transport = transports.InterconnectAttachmentsRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.insert._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", "validateOnly", )) & set(("interconnectAttachmentResource", "project", "region", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_insert_unary_rest_interceptors(null_interceptor): + transport = transports.InterconnectAttachmentsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.InterconnectAttachmentsRestInterceptor(), + ) + client = InterconnectAttachmentsClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.InterconnectAttachmentsRestInterceptor, "post_insert") as post, \ + mock.patch.object(transports.InterconnectAttachmentsRestInterceptor, "pre_insert") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.InsertInterconnectAttachmentRequest.pb(compute.InsertInterconnectAttachmentRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.InsertInterconnectAttachmentRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.insert_unary(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_insert_unary_rest_bad_request(transport: str = 'rest', request_type=compute.InsertInterconnectAttachmentRequest): + client = InterconnectAttachmentsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2'} + request_init["interconnect_attachment_resource"] = {'admin_enabled': True, 'bandwidth': 'bandwidth_value', 'candidate_ipv6_subnets': ['candidate_ipv6_subnets_value1', 'candidate_ipv6_subnets_value2'], 'candidate_subnets': ['candidate_subnets_value1', 'candidate_subnets_value2'], 'cloud_router_ip_address': 'cloud_router_ip_address_value', 'cloud_router_ipv6_address': 'cloud_router_ipv6_address_value', 'cloud_router_ipv6_interface_id': 'cloud_router_ipv6_interface_id_value', 'configuration_constraints': {'bgp_md5': 'bgp_md5_value', 'bgp_peer_asn_ranges': [{'max_': 421, 'min_': 419}]}, 'creation_timestamp': 'creation_timestamp_value', 'customer_router_ip_address': 'customer_router_ip_address_value', 'customer_router_ipv6_address': 'customer_router_ipv6_address_value', 'customer_router_ipv6_interface_id': 'customer_router_ipv6_interface_id_value', 'dataplane_version': 1807, 'description': 'description_value', 'edge_availability_domain': 'edge_availability_domain_value', 'encryption': 'encryption_value', 'google_reference_id': 'google_reference_id_value', 'id': 205, 'interconnect': 'interconnect_value', 'ipsec_internal_addresses': ['ipsec_internal_addresses_value1', 'ipsec_internal_addresses_value2'], 'kind': 'kind_value', 'label_fingerprint': 'label_fingerprint_value', 'labels': {}, 'mtu': 342, 'name': 'name_value', 'operational_status': 'operational_status_value', 'pairing_key': 'pairing_key_value', 'partner_asn': 1181, 'partner_metadata': {'interconnect_name': 'interconnect_name_value', 'partner_name': 'partner_name_value', 'portal_url': 'portal_url_value'}, 'private_interconnect_info': {'tag8021q': 632}, 'region': 'region_value', 'remote_service': 'remote_service_value', 'router': 'router_value', 'satisfies_pzs': True, 'self_link': 'self_link_value', 'stack_type': 'stack_type_value', 'state': 'state_value', 'subnet_length': 1394, 'type_': 'type__value', 'vlan_tag8021q': 1160} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.insert_unary(request) + + +def test_insert_unary_rest_flattened(): + client = InterconnectAttachmentsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'region': 'sample2'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + region='region_value', + interconnect_attachment_resource=compute.InterconnectAttachment(admin_enabled=True), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.insert_unary(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/regions/{region}/interconnectAttachments" % client.transport._host, args[1]) + + +def test_insert_unary_rest_flattened_error(transport: str = 'rest'): + client = InterconnectAttachmentsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.insert_unary( + compute.InsertInterconnectAttachmentRequest(), + project='project_value', + region='region_value', + interconnect_attachment_resource=compute.InterconnectAttachment(admin_enabled=True), + ) + + +def test_insert_unary_rest_error(): + client = InterconnectAttachmentsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.ListInterconnectAttachmentsRequest, + dict, +]) +def test_list_rest(request_type): + client = InterconnectAttachmentsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.InterconnectAttachmentList( + id='id_value', + kind='kind_value', + next_page_token='next_page_token_value', + self_link='self_link_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.InterconnectAttachmentList.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.list(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListPager) + assert response.id == 'id_value' + assert response.kind == 'kind_value' + assert response.next_page_token == 'next_page_token_value' + assert response.self_link == 'self_link_value' + + +def test_list_rest_required_fields(request_type=compute.ListInterconnectAttachmentsRequest): + transport_class = transports.InterconnectAttachmentsRestTransport + + request_init = {} + request_init["project"] = "" + request_init["region"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + jsonified_request["region"] = 'region_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("filter", "max_results", "order_by", "page_token", "return_partial_success", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "region" in jsonified_request + assert jsonified_request["region"] == 'region_value' + + client = InterconnectAttachmentsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.InterconnectAttachmentList() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "get", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.InterconnectAttachmentList.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.list(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_list_rest_unset_required_fields(): + transport = transports.InterconnectAttachmentsRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.list._get_unset_required_fields({}) + assert set(unset_fields) == (set(("filter", "maxResults", "orderBy", "pageToken", "returnPartialSuccess", )) & set(("project", "region", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_rest_interceptors(null_interceptor): + transport = transports.InterconnectAttachmentsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.InterconnectAttachmentsRestInterceptor(), + ) + client = InterconnectAttachmentsClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.InterconnectAttachmentsRestInterceptor, "post_list") as post, \ + mock.patch.object(transports.InterconnectAttachmentsRestInterceptor, "pre_list") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.ListInterconnectAttachmentsRequest.pb(compute.ListInterconnectAttachmentsRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.InterconnectAttachmentList.to_json(compute.InterconnectAttachmentList()) + + request = compute.ListInterconnectAttachmentsRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.InterconnectAttachmentList() + + client.list(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_list_rest_bad_request(transport: str = 'rest', request_type=compute.ListInterconnectAttachmentsRequest): + client = InterconnectAttachmentsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list(request) + + +def test_list_rest_flattened(): + client = InterconnectAttachmentsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.InterconnectAttachmentList() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'region': 'sample2'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + region='region_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.InterconnectAttachmentList.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.list(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/regions/{region}/interconnectAttachments" % client.transport._host, args[1]) + + +def test_list_rest_flattened_error(transport: str = 'rest'): + client = InterconnectAttachmentsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list( + compute.ListInterconnectAttachmentsRequest(), + project='project_value', + region='region_value', + ) + + +def test_list_rest_pager(transport: str = 'rest'): + client = InterconnectAttachmentsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + #with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + compute.InterconnectAttachmentList( + items=[ + compute.InterconnectAttachment(), + compute.InterconnectAttachment(), + compute.InterconnectAttachment(), + ], + next_page_token='abc', + ), + compute.InterconnectAttachmentList( + items=[], + next_page_token='def', + ), + compute.InterconnectAttachmentList( + items=[ + compute.InterconnectAttachment(), + ], + next_page_token='ghi', + ), + compute.InterconnectAttachmentList( + items=[ + compute.InterconnectAttachment(), + compute.InterconnectAttachment(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple(compute.InterconnectAttachmentList.to_json(x) for x in response) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode('UTF-8') + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {'project': 'sample1', 'region': 'sample2'} + + pager = client.list(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, compute.InterconnectAttachment) + for i in results) + + pages = list(client.list(request=sample_request).pages) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize("request_type", [ + compute.PatchInterconnectAttachmentRequest, + dict, +]) +def test_patch_rest(request_type): + client = InterconnectAttachmentsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2', 'interconnect_attachment': 'sample3'} + request_init["interconnect_attachment_resource"] = {'admin_enabled': True, 'bandwidth': 'bandwidth_value', 'candidate_ipv6_subnets': ['candidate_ipv6_subnets_value1', 'candidate_ipv6_subnets_value2'], 'candidate_subnets': ['candidate_subnets_value1', 'candidate_subnets_value2'], 'cloud_router_ip_address': 'cloud_router_ip_address_value', 'cloud_router_ipv6_address': 'cloud_router_ipv6_address_value', 'cloud_router_ipv6_interface_id': 'cloud_router_ipv6_interface_id_value', 'configuration_constraints': {'bgp_md5': 'bgp_md5_value', 'bgp_peer_asn_ranges': [{'max_': 421, 'min_': 419}]}, 'creation_timestamp': 'creation_timestamp_value', 'customer_router_ip_address': 'customer_router_ip_address_value', 'customer_router_ipv6_address': 'customer_router_ipv6_address_value', 'customer_router_ipv6_interface_id': 'customer_router_ipv6_interface_id_value', 'dataplane_version': 1807, 'description': 'description_value', 'edge_availability_domain': 'edge_availability_domain_value', 'encryption': 'encryption_value', 'google_reference_id': 'google_reference_id_value', 'id': 205, 'interconnect': 'interconnect_value', 'ipsec_internal_addresses': ['ipsec_internal_addresses_value1', 'ipsec_internal_addresses_value2'], 'kind': 'kind_value', 'label_fingerprint': 'label_fingerprint_value', 'labels': {}, 'mtu': 342, 'name': 'name_value', 'operational_status': 'operational_status_value', 'pairing_key': 'pairing_key_value', 'partner_asn': 1181, 'partner_metadata': {'interconnect_name': 'interconnect_name_value', 'partner_name': 'partner_name_value', 'portal_url': 'portal_url_value'}, 'private_interconnect_info': {'tag8021q': 632}, 'region': 'region_value', 'remote_service': 'remote_service_value', 'router': 'router_value', 'satisfies_pzs': True, 'self_link': 'self_link_value', 'stack_type': 'stack_type_value', 'state': 'state_value', 'subnet_length': 1394, 'type_': 'type__value', 'vlan_tag8021q': 1160} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.patch(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, extended_operation.ExtendedOperation) + assert response.client_operation_id == 'client_operation_id_value' + assert response.creation_timestamp == 'creation_timestamp_value' + assert response.description == 'description_value' + assert response.end_time == 'end_time_value' + assert response.http_error_message == 'http_error_message_value' + assert response.http_error_status_code == 2374 + assert response.id == 205 + assert response.insert_time == 'insert_time_value' + assert response.kind == 'kind_value' + assert response.name == 'name_value' + assert response.operation_group_id == 'operation_group_id_value' + assert response.operation_type == 'operation_type_value' + assert response.progress == 885 + assert response.region == 'region_value' + assert response.self_link == 'self_link_value' + assert response.start_time == 'start_time_value' + assert response.status == compute.Operation.Status.DONE + assert response.status_message == 'status_message_value' + assert response.target_id == 947 + assert response.target_link == 'target_link_value' + assert response.user == 'user_value' + assert response.zone == 'zone_value' + + +def test_patch_rest_required_fields(request_type=compute.PatchInterconnectAttachmentRequest): + transport_class = transports.InterconnectAttachmentsRestTransport + + request_init = {} + request_init["interconnect_attachment"] = "" + request_init["project"] = "" + request_init["region"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).patch._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["interconnectAttachment"] = 'interconnect_attachment_value' + jsonified_request["project"] = 'project_value' + jsonified_request["region"] = 'region_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).patch._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "interconnectAttachment" in jsonified_request + assert jsonified_request["interconnectAttachment"] == 'interconnect_attachment_value' + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "region" in jsonified_request + assert jsonified_request["region"] == 'region_value' + + client = InterconnectAttachmentsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "patch", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.patch(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_patch_rest_unset_required_fields(): + transport = transports.InterconnectAttachmentsRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.patch._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("interconnectAttachment", "interconnectAttachmentResource", "project", "region", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_patch_rest_interceptors(null_interceptor): + transport = transports.InterconnectAttachmentsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.InterconnectAttachmentsRestInterceptor(), + ) + client = InterconnectAttachmentsClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.InterconnectAttachmentsRestInterceptor, "post_patch") as post, \ + mock.patch.object(transports.InterconnectAttachmentsRestInterceptor, "pre_patch") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.PatchInterconnectAttachmentRequest.pb(compute.PatchInterconnectAttachmentRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.PatchInterconnectAttachmentRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.patch(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_patch_rest_bad_request(transport: str = 'rest', request_type=compute.PatchInterconnectAttachmentRequest): + client = InterconnectAttachmentsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2', 'interconnect_attachment': 'sample3'} + request_init["interconnect_attachment_resource"] = {'admin_enabled': True, 'bandwidth': 'bandwidth_value', 'candidate_ipv6_subnets': ['candidate_ipv6_subnets_value1', 'candidate_ipv6_subnets_value2'], 'candidate_subnets': ['candidate_subnets_value1', 'candidate_subnets_value2'], 'cloud_router_ip_address': 'cloud_router_ip_address_value', 'cloud_router_ipv6_address': 'cloud_router_ipv6_address_value', 'cloud_router_ipv6_interface_id': 'cloud_router_ipv6_interface_id_value', 'configuration_constraints': {'bgp_md5': 'bgp_md5_value', 'bgp_peer_asn_ranges': [{'max_': 421, 'min_': 419}]}, 'creation_timestamp': 'creation_timestamp_value', 'customer_router_ip_address': 'customer_router_ip_address_value', 'customer_router_ipv6_address': 'customer_router_ipv6_address_value', 'customer_router_ipv6_interface_id': 'customer_router_ipv6_interface_id_value', 'dataplane_version': 1807, 'description': 'description_value', 'edge_availability_domain': 'edge_availability_domain_value', 'encryption': 'encryption_value', 'google_reference_id': 'google_reference_id_value', 'id': 205, 'interconnect': 'interconnect_value', 'ipsec_internal_addresses': ['ipsec_internal_addresses_value1', 'ipsec_internal_addresses_value2'], 'kind': 'kind_value', 'label_fingerprint': 'label_fingerprint_value', 'labels': {}, 'mtu': 342, 'name': 'name_value', 'operational_status': 'operational_status_value', 'pairing_key': 'pairing_key_value', 'partner_asn': 1181, 'partner_metadata': {'interconnect_name': 'interconnect_name_value', 'partner_name': 'partner_name_value', 'portal_url': 'portal_url_value'}, 'private_interconnect_info': {'tag8021q': 632}, 'region': 'region_value', 'remote_service': 'remote_service_value', 'router': 'router_value', 'satisfies_pzs': True, 'self_link': 'self_link_value', 'stack_type': 'stack_type_value', 'state': 'state_value', 'subnet_length': 1394, 'type_': 'type__value', 'vlan_tag8021q': 1160} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.patch(request) + + +def test_patch_rest_flattened(): + client = InterconnectAttachmentsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'region': 'sample2', 'interconnect_attachment': 'sample3'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + region='region_value', + interconnect_attachment='interconnect_attachment_value', + interconnect_attachment_resource=compute.InterconnectAttachment(admin_enabled=True), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.patch(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/regions/{region}/interconnectAttachments/{interconnect_attachment}" % client.transport._host, args[1]) + + +def test_patch_rest_flattened_error(transport: str = 'rest'): + client = InterconnectAttachmentsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.patch( + compute.PatchInterconnectAttachmentRequest(), + project='project_value', + region='region_value', + interconnect_attachment='interconnect_attachment_value', + interconnect_attachment_resource=compute.InterconnectAttachment(admin_enabled=True), + ) + + +def test_patch_rest_error(): + client = InterconnectAttachmentsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.PatchInterconnectAttachmentRequest, + dict, +]) +def test_patch_unary_rest(request_type): + client = InterconnectAttachmentsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2', 'interconnect_attachment': 'sample3'} + request_init["interconnect_attachment_resource"] = {'admin_enabled': True, 'bandwidth': 'bandwidth_value', 'candidate_ipv6_subnets': ['candidate_ipv6_subnets_value1', 'candidate_ipv6_subnets_value2'], 'candidate_subnets': ['candidate_subnets_value1', 'candidate_subnets_value2'], 'cloud_router_ip_address': 'cloud_router_ip_address_value', 'cloud_router_ipv6_address': 'cloud_router_ipv6_address_value', 'cloud_router_ipv6_interface_id': 'cloud_router_ipv6_interface_id_value', 'configuration_constraints': {'bgp_md5': 'bgp_md5_value', 'bgp_peer_asn_ranges': [{'max_': 421, 'min_': 419}]}, 'creation_timestamp': 'creation_timestamp_value', 'customer_router_ip_address': 'customer_router_ip_address_value', 'customer_router_ipv6_address': 'customer_router_ipv6_address_value', 'customer_router_ipv6_interface_id': 'customer_router_ipv6_interface_id_value', 'dataplane_version': 1807, 'description': 'description_value', 'edge_availability_domain': 'edge_availability_domain_value', 'encryption': 'encryption_value', 'google_reference_id': 'google_reference_id_value', 'id': 205, 'interconnect': 'interconnect_value', 'ipsec_internal_addresses': ['ipsec_internal_addresses_value1', 'ipsec_internal_addresses_value2'], 'kind': 'kind_value', 'label_fingerprint': 'label_fingerprint_value', 'labels': {}, 'mtu': 342, 'name': 'name_value', 'operational_status': 'operational_status_value', 'pairing_key': 'pairing_key_value', 'partner_asn': 1181, 'partner_metadata': {'interconnect_name': 'interconnect_name_value', 'partner_name': 'partner_name_value', 'portal_url': 'portal_url_value'}, 'private_interconnect_info': {'tag8021q': 632}, 'region': 'region_value', 'remote_service': 'remote_service_value', 'router': 'router_value', 'satisfies_pzs': True, 'self_link': 'self_link_value', 'stack_type': 'stack_type_value', 'state': 'state_value', 'subnet_length': 1394, 'type_': 'type__value', 'vlan_tag8021q': 1160} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.patch_unary(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.Operation) + + +def test_patch_unary_rest_required_fields(request_type=compute.PatchInterconnectAttachmentRequest): + transport_class = transports.InterconnectAttachmentsRestTransport + + request_init = {} + request_init["interconnect_attachment"] = "" + request_init["project"] = "" + request_init["region"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).patch._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["interconnectAttachment"] = 'interconnect_attachment_value' + jsonified_request["project"] = 'project_value' + jsonified_request["region"] = 'region_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).patch._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "interconnectAttachment" in jsonified_request + assert jsonified_request["interconnectAttachment"] == 'interconnect_attachment_value' + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "region" in jsonified_request + assert jsonified_request["region"] == 'region_value' + + client = InterconnectAttachmentsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "patch", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.patch_unary(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_patch_unary_rest_unset_required_fields(): + transport = transports.InterconnectAttachmentsRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.patch._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("interconnectAttachment", "interconnectAttachmentResource", "project", "region", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_patch_unary_rest_interceptors(null_interceptor): + transport = transports.InterconnectAttachmentsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.InterconnectAttachmentsRestInterceptor(), + ) + client = InterconnectAttachmentsClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.InterconnectAttachmentsRestInterceptor, "post_patch") as post, \ + mock.patch.object(transports.InterconnectAttachmentsRestInterceptor, "pre_patch") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.PatchInterconnectAttachmentRequest.pb(compute.PatchInterconnectAttachmentRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.PatchInterconnectAttachmentRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.patch_unary(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_patch_unary_rest_bad_request(transport: str = 'rest', request_type=compute.PatchInterconnectAttachmentRequest): + client = InterconnectAttachmentsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2', 'interconnect_attachment': 'sample3'} + request_init["interconnect_attachment_resource"] = {'admin_enabled': True, 'bandwidth': 'bandwidth_value', 'candidate_ipv6_subnets': ['candidate_ipv6_subnets_value1', 'candidate_ipv6_subnets_value2'], 'candidate_subnets': ['candidate_subnets_value1', 'candidate_subnets_value2'], 'cloud_router_ip_address': 'cloud_router_ip_address_value', 'cloud_router_ipv6_address': 'cloud_router_ipv6_address_value', 'cloud_router_ipv6_interface_id': 'cloud_router_ipv6_interface_id_value', 'configuration_constraints': {'bgp_md5': 'bgp_md5_value', 'bgp_peer_asn_ranges': [{'max_': 421, 'min_': 419}]}, 'creation_timestamp': 'creation_timestamp_value', 'customer_router_ip_address': 'customer_router_ip_address_value', 'customer_router_ipv6_address': 'customer_router_ipv6_address_value', 'customer_router_ipv6_interface_id': 'customer_router_ipv6_interface_id_value', 'dataplane_version': 1807, 'description': 'description_value', 'edge_availability_domain': 'edge_availability_domain_value', 'encryption': 'encryption_value', 'google_reference_id': 'google_reference_id_value', 'id': 205, 'interconnect': 'interconnect_value', 'ipsec_internal_addresses': ['ipsec_internal_addresses_value1', 'ipsec_internal_addresses_value2'], 'kind': 'kind_value', 'label_fingerprint': 'label_fingerprint_value', 'labels': {}, 'mtu': 342, 'name': 'name_value', 'operational_status': 'operational_status_value', 'pairing_key': 'pairing_key_value', 'partner_asn': 1181, 'partner_metadata': {'interconnect_name': 'interconnect_name_value', 'partner_name': 'partner_name_value', 'portal_url': 'portal_url_value'}, 'private_interconnect_info': {'tag8021q': 632}, 'region': 'region_value', 'remote_service': 'remote_service_value', 'router': 'router_value', 'satisfies_pzs': True, 'self_link': 'self_link_value', 'stack_type': 'stack_type_value', 'state': 'state_value', 'subnet_length': 1394, 'type_': 'type__value', 'vlan_tag8021q': 1160} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.patch_unary(request) + + +def test_patch_unary_rest_flattened(): + client = InterconnectAttachmentsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'region': 'sample2', 'interconnect_attachment': 'sample3'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + region='region_value', + interconnect_attachment='interconnect_attachment_value', + interconnect_attachment_resource=compute.InterconnectAttachment(admin_enabled=True), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.patch_unary(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/regions/{region}/interconnectAttachments/{interconnect_attachment}" % client.transport._host, args[1]) + + +def test_patch_unary_rest_flattened_error(transport: str = 'rest'): + client = InterconnectAttachmentsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.patch_unary( + compute.PatchInterconnectAttachmentRequest(), + project='project_value', + region='region_value', + interconnect_attachment='interconnect_attachment_value', + interconnect_attachment_resource=compute.InterconnectAttachment(admin_enabled=True), + ) + + +def test_patch_unary_rest_error(): + client = InterconnectAttachmentsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.SetLabelsInterconnectAttachmentRequest, + dict, +]) +def test_set_labels_rest(request_type): + client = InterconnectAttachmentsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2', 'resource': 'sample3'} + request_init["region_set_labels_request_resource"] = {'label_fingerprint': 'label_fingerprint_value', 'labels': {}} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.set_labels(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, extended_operation.ExtendedOperation) + assert response.client_operation_id == 'client_operation_id_value' + assert response.creation_timestamp == 'creation_timestamp_value' + assert response.description == 'description_value' + assert response.end_time == 'end_time_value' + assert response.http_error_message == 'http_error_message_value' + assert response.http_error_status_code == 2374 + assert response.id == 205 + assert response.insert_time == 'insert_time_value' + assert response.kind == 'kind_value' + assert response.name == 'name_value' + assert response.operation_group_id == 'operation_group_id_value' + assert response.operation_type == 'operation_type_value' + assert response.progress == 885 + assert response.region == 'region_value' + assert response.self_link == 'self_link_value' + assert response.start_time == 'start_time_value' + assert response.status == compute.Operation.Status.DONE + assert response.status_message == 'status_message_value' + assert response.target_id == 947 + assert response.target_link == 'target_link_value' + assert response.user == 'user_value' + assert response.zone == 'zone_value' + + +def test_set_labels_rest_required_fields(request_type=compute.SetLabelsInterconnectAttachmentRequest): + transport_class = transports.InterconnectAttachmentsRestTransport + + request_init = {} + request_init["project"] = "" + request_init["region"] = "" + request_init["resource"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).set_labels._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + jsonified_request["region"] = 'region_value' + jsonified_request["resource"] = 'resource_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).set_labels._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "region" in jsonified_request + assert jsonified_request["region"] == 'region_value' + assert "resource" in jsonified_request + assert jsonified_request["resource"] == 'resource_value' + + client = InterconnectAttachmentsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.set_labels(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_set_labels_rest_unset_required_fields(): + transport = transports.InterconnectAttachmentsRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.set_labels._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("project", "region", "regionSetLabelsRequestResource", "resource", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_set_labels_rest_interceptors(null_interceptor): + transport = transports.InterconnectAttachmentsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.InterconnectAttachmentsRestInterceptor(), + ) + client = InterconnectAttachmentsClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.InterconnectAttachmentsRestInterceptor, "post_set_labels") as post, \ + mock.patch.object(transports.InterconnectAttachmentsRestInterceptor, "pre_set_labels") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.SetLabelsInterconnectAttachmentRequest.pb(compute.SetLabelsInterconnectAttachmentRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.SetLabelsInterconnectAttachmentRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.set_labels(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_set_labels_rest_bad_request(transport: str = 'rest', request_type=compute.SetLabelsInterconnectAttachmentRequest): + client = InterconnectAttachmentsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2', 'resource': 'sample3'} + request_init["region_set_labels_request_resource"] = {'label_fingerprint': 'label_fingerprint_value', 'labels': {}} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.set_labels(request) + + +def test_set_labels_rest_flattened(): + client = InterconnectAttachmentsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'region': 'sample2', 'resource': 'sample3'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + region='region_value', + resource='resource_value', + region_set_labels_request_resource=compute.RegionSetLabelsRequest(label_fingerprint='label_fingerprint_value'), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.set_labels(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/regions/{region}/interconnectAttachments/{resource}/setLabels" % client.transport._host, args[1]) + + +def test_set_labels_rest_flattened_error(transport: str = 'rest'): + client = InterconnectAttachmentsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.set_labels( + compute.SetLabelsInterconnectAttachmentRequest(), + project='project_value', + region='region_value', + resource='resource_value', + region_set_labels_request_resource=compute.RegionSetLabelsRequest(label_fingerprint='label_fingerprint_value'), + ) + + +def test_set_labels_rest_error(): + client = InterconnectAttachmentsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.SetLabelsInterconnectAttachmentRequest, + dict, +]) +def test_set_labels_unary_rest(request_type): + client = InterconnectAttachmentsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2', 'resource': 'sample3'} + request_init["region_set_labels_request_resource"] = {'label_fingerprint': 'label_fingerprint_value', 'labels': {}} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.set_labels_unary(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.Operation) + + +def test_set_labels_unary_rest_required_fields(request_type=compute.SetLabelsInterconnectAttachmentRequest): + transport_class = transports.InterconnectAttachmentsRestTransport + + request_init = {} + request_init["project"] = "" + request_init["region"] = "" + request_init["resource"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).set_labels._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + jsonified_request["region"] = 'region_value' + jsonified_request["resource"] = 'resource_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).set_labels._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "region" in jsonified_request + assert jsonified_request["region"] == 'region_value' + assert "resource" in jsonified_request + assert jsonified_request["resource"] == 'resource_value' + + client = InterconnectAttachmentsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.set_labels_unary(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_set_labels_unary_rest_unset_required_fields(): + transport = transports.InterconnectAttachmentsRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.set_labels._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("project", "region", "regionSetLabelsRequestResource", "resource", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_set_labels_unary_rest_interceptors(null_interceptor): + transport = transports.InterconnectAttachmentsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.InterconnectAttachmentsRestInterceptor(), + ) + client = InterconnectAttachmentsClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.InterconnectAttachmentsRestInterceptor, "post_set_labels") as post, \ + mock.patch.object(transports.InterconnectAttachmentsRestInterceptor, "pre_set_labels") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.SetLabelsInterconnectAttachmentRequest.pb(compute.SetLabelsInterconnectAttachmentRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.SetLabelsInterconnectAttachmentRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.set_labels_unary(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_set_labels_unary_rest_bad_request(transport: str = 'rest', request_type=compute.SetLabelsInterconnectAttachmentRequest): + client = InterconnectAttachmentsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2', 'resource': 'sample3'} + request_init["region_set_labels_request_resource"] = {'label_fingerprint': 'label_fingerprint_value', 'labels': {}} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.set_labels_unary(request) + + +def test_set_labels_unary_rest_flattened(): + client = InterconnectAttachmentsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'region': 'sample2', 'resource': 'sample3'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + region='region_value', + resource='resource_value', + region_set_labels_request_resource=compute.RegionSetLabelsRequest(label_fingerprint='label_fingerprint_value'), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.set_labels_unary(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/regions/{region}/interconnectAttachments/{resource}/setLabels" % client.transport._host, args[1]) + + +def test_set_labels_unary_rest_flattened_error(transport: str = 'rest'): + client = InterconnectAttachmentsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.set_labels_unary( + compute.SetLabelsInterconnectAttachmentRequest(), + project='project_value', + region='region_value', + resource='resource_value', + region_set_labels_request_resource=compute.RegionSetLabelsRequest(label_fingerprint='label_fingerprint_value'), + ) + + +def test_set_labels_unary_rest_error(): + client = InterconnectAttachmentsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +def test_credentials_transport_error(): + # It is an error to provide credentials and a transport instance. + transport = transports.InterconnectAttachmentsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = InterconnectAttachmentsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # It is an error to provide a credentials file and a transport instance. + transport = transports.InterconnectAttachmentsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = InterconnectAttachmentsClient( + client_options={"credentials_file": "credentials.json"}, + transport=transport, + ) + + # It is an error to provide an api_key and a transport instance. + transport = transports.InterconnectAttachmentsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = InterconnectAttachmentsClient( + client_options=options, + transport=transport, + ) + + # It is an error to provide an api_key and a credential. + options = mock.Mock() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = InterconnectAttachmentsClient( + client_options=options, + credentials=ga_credentials.AnonymousCredentials() + ) + + # It is an error to provide scopes and a transport instance. + transport = transports.InterconnectAttachmentsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = InterconnectAttachmentsClient( + client_options={"scopes": ["1", "2"]}, + transport=transport, + ) + + +def test_transport_instance(): + # A client may be instantiated with a custom transport instance. + transport = transports.InterconnectAttachmentsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + client = InterconnectAttachmentsClient(transport=transport) + assert client.transport is transport + + +@pytest.mark.parametrize("transport_class", [ + transports.InterconnectAttachmentsRestTransport, +]) +def test_transport_adc(transport_class): + # Test default credentials are used if not provided. + with mock.patch.object(google.auth, 'default') as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class() + adc.assert_called_once() + +@pytest.mark.parametrize("transport_name", [ + "rest", +]) +def test_transport_kind(transport_name): + transport = InterconnectAttachmentsClient.get_transport_class(transport_name)( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert transport.kind == transport_name + + +def test_interconnect_attachments_base_transport_error(): + # Passing both a credentials object and credentials_file should raise an error + with pytest.raises(core_exceptions.DuplicateCredentialArgs): + transport = transports.InterconnectAttachmentsTransport( + credentials=ga_credentials.AnonymousCredentials(), + credentials_file="credentials.json" + ) + + +def test_interconnect_attachments_base_transport(): + # Instantiate the base transport. + with mock.patch('google.cloud.compute_v1.services.interconnect_attachments.transports.InterconnectAttachmentsTransport.__init__') as Transport: + Transport.return_value = None + transport = transports.InterconnectAttachmentsTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Every method on the transport should just blindly + # raise NotImplementedError. + methods = ( + 'aggregated_list', + 'delete', + 'get', + 'insert', + 'list', + 'patch', + 'set_labels', + ) + for method in methods: + with pytest.raises(NotImplementedError): + getattr(transport, method)(request=object()) + + with pytest.raises(NotImplementedError): + transport.close() + + # Catch all for all remaining methods and properties + remainder = [ + 'kind', + ] + for r in remainder: + with pytest.raises(NotImplementedError): + getattr(transport, r)() + + +def test_interconnect_attachments_base_transport_with_credentials_file(): + # Instantiate the base transport with a credentials file + with mock.patch.object(google.auth, 'load_credentials_from_file', autospec=True) as load_creds, mock.patch('google.cloud.compute_v1.services.interconnect_attachments.transports.InterconnectAttachmentsTransport._prep_wrapped_messages') as Transport: + Transport.return_value = None + load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.InterconnectAttachmentsTransport( + credentials_file="credentials.json", + quota_project_id="octopus", + ) + load_creds.assert_called_once_with("credentials.json", + scopes=None, + default_scopes=( + 'https://www.googleapis.com/auth/compute', + 'https://www.googleapis.com/auth/cloud-platform', +), + quota_project_id="octopus", + ) + + +def test_interconnect_attachments_base_transport_with_adc(): + # Test the default credentials are used if credentials and credentials_file are None. + with mock.patch.object(google.auth, 'default', autospec=True) as adc, mock.patch('google.cloud.compute_v1.services.interconnect_attachments.transports.InterconnectAttachmentsTransport._prep_wrapped_messages') as Transport: + Transport.return_value = None + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.InterconnectAttachmentsTransport() + adc.assert_called_once() + + +def test_interconnect_attachments_auth_adc(): + # If no credentials are provided, we should use ADC credentials. + with mock.patch.object(google.auth, 'default', autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + InterconnectAttachmentsClient() + adc.assert_called_once_with( + scopes=None, + default_scopes=( + 'https://www.googleapis.com/auth/compute', + 'https://www.googleapis.com/auth/cloud-platform', +), + quota_project_id=None, + ) + + +def test_interconnect_attachments_http_transport_client_cert_source_for_mtls(): + cred = ga_credentials.AnonymousCredentials() + with mock.patch("google.auth.transport.requests.AuthorizedSession.configure_mtls_channel") as mock_configure_mtls_channel: + transports.InterconnectAttachmentsRestTransport ( + credentials=cred, + client_cert_source_for_mtls=client_cert_source_callback + ) + mock_configure_mtls_channel.assert_called_once_with(client_cert_source_callback) + + +@pytest.mark.parametrize("transport_name", [ + "rest", +]) +def test_interconnect_attachments_host_no_port(transport_name): + client = InterconnectAttachmentsClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions(api_endpoint='compute.googleapis.com'), + transport=transport_name, + ) + assert client.transport._host == ( + 'compute.googleapis.com:443' + if transport_name in ['grpc', 'grpc_asyncio'] + else 'https://compute.googleapis.com' + ) + +@pytest.mark.parametrize("transport_name", [ + "rest", +]) +def test_interconnect_attachments_host_with_port(transport_name): + client = InterconnectAttachmentsClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions(api_endpoint='compute.googleapis.com:8000'), + transport=transport_name, + ) + assert client.transport._host == ( + 'compute.googleapis.com:8000' + if transport_name in ['grpc', 'grpc_asyncio'] + else 'https://compute.googleapis.com:8000' + ) + +@pytest.mark.parametrize("transport_name", [ + "rest", +]) +def test_interconnect_attachments_client_transport_session_collision(transport_name): + creds1 = ga_credentials.AnonymousCredentials() + creds2 = ga_credentials.AnonymousCredentials() + client1 = InterconnectAttachmentsClient( + credentials=creds1, + transport=transport_name, + ) + client2 = InterconnectAttachmentsClient( + credentials=creds2, + transport=transport_name, + ) + session1 = client1.transport.aggregated_list._session + session2 = client2.transport.aggregated_list._session + assert session1 != session2 + session1 = client1.transport.delete._session + session2 = client2.transport.delete._session + assert session1 != session2 + session1 = client1.transport.get._session + session2 = client2.transport.get._session + assert session1 != session2 + session1 = client1.transport.insert._session + session2 = client2.transport.insert._session + assert session1 != session2 + session1 = client1.transport.list._session + session2 = client2.transport.list._session + assert session1 != session2 + session1 = client1.transport.patch._session + session2 = client2.transport.patch._session + assert session1 != session2 + session1 = client1.transport.set_labels._session + session2 = client2.transport.set_labels._session + assert session1 != session2 + +def test_common_billing_account_path(): + billing_account = "squid" + expected = "billingAccounts/{billing_account}".format(billing_account=billing_account, ) + actual = InterconnectAttachmentsClient.common_billing_account_path(billing_account) + assert expected == actual + + +def test_parse_common_billing_account_path(): + expected = { + "billing_account": "clam", + } + path = InterconnectAttachmentsClient.common_billing_account_path(**expected) + + # Check that the path construction is reversible. + actual = InterconnectAttachmentsClient.parse_common_billing_account_path(path) + assert expected == actual + +def test_common_folder_path(): + folder = "whelk" + expected = "folders/{folder}".format(folder=folder, ) + actual = InterconnectAttachmentsClient.common_folder_path(folder) + assert expected == actual + + +def test_parse_common_folder_path(): + expected = { + "folder": "octopus", + } + path = InterconnectAttachmentsClient.common_folder_path(**expected) + + # Check that the path construction is reversible. + actual = InterconnectAttachmentsClient.parse_common_folder_path(path) + assert expected == actual + +def test_common_organization_path(): + organization = "oyster" + expected = "organizations/{organization}".format(organization=organization, ) + actual = InterconnectAttachmentsClient.common_organization_path(organization) + assert expected == actual + + +def test_parse_common_organization_path(): + expected = { + "organization": "nudibranch", + } + path = InterconnectAttachmentsClient.common_organization_path(**expected) + + # Check that the path construction is reversible. + actual = InterconnectAttachmentsClient.parse_common_organization_path(path) + assert expected == actual + +def test_common_project_path(): + project = "cuttlefish" + expected = "projects/{project}".format(project=project, ) + actual = InterconnectAttachmentsClient.common_project_path(project) + assert expected == actual + + +def test_parse_common_project_path(): + expected = { + "project": "mussel", + } + path = InterconnectAttachmentsClient.common_project_path(**expected) + + # Check that the path construction is reversible. + actual = InterconnectAttachmentsClient.parse_common_project_path(path) + assert expected == actual + +def test_common_location_path(): + project = "winkle" + location = "nautilus" + expected = "projects/{project}/locations/{location}".format(project=project, location=location, ) + actual = InterconnectAttachmentsClient.common_location_path(project, location) + assert expected == actual + + +def test_parse_common_location_path(): + expected = { + "project": "scallop", + "location": "abalone", + } + path = InterconnectAttachmentsClient.common_location_path(**expected) + + # Check that the path construction is reversible. + actual = InterconnectAttachmentsClient.parse_common_location_path(path) + assert expected == actual + + +def test_client_with_default_client_info(): + client_info = gapic_v1.client_info.ClientInfo() + + with mock.patch.object(transports.InterconnectAttachmentsTransport, '_prep_wrapped_messages') as prep: + client = InterconnectAttachmentsClient( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + with mock.patch.object(transports.InterconnectAttachmentsTransport, '_prep_wrapped_messages') as prep: + transport_class = InterconnectAttachmentsClient.get_transport_class() + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + +def test_transport_close(): + transports = { + "rest": "_session", + } + + for transport, close_name in transports.items(): + client = InterconnectAttachmentsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport + ) + with mock.patch.object(type(getattr(client.transport, close_name)), "close") as close: + with client: + close.assert_not_called() + close.assert_called_once() + +def test_client_ctx(): + transports = [ + 'rest', + ] + for transport in transports: + client = InterconnectAttachmentsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport + ) + # Test client calls underlying transport. + with mock.patch.object(type(client.transport), "close") as close: + close.assert_not_called() + with client: + pass + close.assert_called() + +@pytest.mark.parametrize("client_class,transport_class", [ + (InterconnectAttachmentsClient, transports.InterconnectAttachmentsRestTransport), +]) +def test_api_key_credentials(client_class, transport_class): + with mock.patch.object( + google.auth._default, "get_api_key_credentials", create=True + ) as get_api_key_credentials: + mock_cred = mock.Mock() + get_api_key_credentials.return_value = mock_cred + options = client_options.ClientOptions() + options.api_key = "api_key" + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=mock_cred, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) diff --git a/owl-bot-staging/v1/tests/unit/gapic/compute_v1/test_interconnect_locations.py b/owl-bot-staging/v1/tests/unit/gapic/compute_v1/test_interconnect_locations.py new file mode 100644 index 000000000..364cca927 --- /dev/null +++ b/owl-bot-staging/v1/tests/unit/gapic/compute_v1/test_interconnect_locations.py @@ -0,0 +1,1397 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import os +# try/except added for compatibility with python < 3.8 +try: + from unittest import mock + from unittest.mock import AsyncMock # pragma: NO COVER +except ImportError: # pragma: NO COVER + import mock + +import grpc +from grpc.experimental import aio +from collections.abc import Iterable +from google.protobuf import json_format +import json +import math +import pytest +from proto.marshal.rules.dates import DurationRule, TimestampRule +from proto.marshal.rules import wrappers +from requests import Response +from requests import Request, PreparedRequest +from requests.sessions import Session +from google.protobuf import json_format + +from google.api_core import client_options +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import grpc_helpers +from google.api_core import grpc_helpers_async +from google.api_core import path_template +from google.auth import credentials as ga_credentials +from google.auth.exceptions import MutualTLSChannelError +from google.cloud.compute_v1.services.interconnect_locations import InterconnectLocationsClient +from google.cloud.compute_v1.services.interconnect_locations import pagers +from google.cloud.compute_v1.services.interconnect_locations import transports +from google.cloud.compute_v1.types import compute +from google.oauth2 import service_account +import google.auth + + +def client_cert_source_callback(): + return b"cert bytes", b"key bytes" + + +# If default endpoint is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint(client): + return "foo.googleapis.com" if ("localhost" in client.DEFAULT_ENDPOINT) else client.DEFAULT_ENDPOINT + + +def test__get_default_mtls_endpoint(): + api_endpoint = "example.googleapis.com" + api_mtls_endpoint = "example.mtls.googleapis.com" + sandbox_endpoint = "example.sandbox.googleapis.com" + sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com" + non_googleapi = "api.example.com" + + assert InterconnectLocationsClient._get_default_mtls_endpoint(None) is None + assert InterconnectLocationsClient._get_default_mtls_endpoint(api_endpoint) == api_mtls_endpoint + assert InterconnectLocationsClient._get_default_mtls_endpoint(api_mtls_endpoint) == api_mtls_endpoint + assert InterconnectLocationsClient._get_default_mtls_endpoint(sandbox_endpoint) == sandbox_mtls_endpoint + assert InterconnectLocationsClient._get_default_mtls_endpoint(sandbox_mtls_endpoint) == sandbox_mtls_endpoint + assert InterconnectLocationsClient._get_default_mtls_endpoint(non_googleapi) == non_googleapi + + +@pytest.mark.parametrize("client_class,transport_name", [ + (InterconnectLocationsClient, "rest"), +]) +def test_interconnect_locations_client_from_service_account_info(client_class, transport_name): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object(service_account.Credentials, 'from_service_account_info') as factory: + factory.return_value = creds + info = {"valid": True} + client = client_class.from_service_account_info(info, transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + 'compute.googleapis.com:443' + if transport_name in ['grpc', 'grpc_asyncio'] + else + 'https://compute.googleapis.com' + ) + + +@pytest.mark.parametrize("transport_class,transport_name", [ + (transports.InterconnectLocationsRestTransport, "rest"), +]) +def test_interconnect_locations_client_service_account_always_use_jwt(transport_class, transport_name): + with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=True) + use_jwt.assert_called_once_with(True) + + with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=False) + use_jwt.assert_not_called() + + +@pytest.mark.parametrize("client_class,transport_name", [ + (InterconnectLocationsClient, "rest"), +]) +def test_interconnect_locations_client_from_service_account_file(client_class, transport_name): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object(service_account.Credentials, 'from_service_account_file') as factory: + factory.return_value = creds + client = client_class.from_service_account_file("dummy/file/path.json", transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + client = client_class.from_service_account_json("dummy/file/path.json", transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + 'compute.googleapis.com:443' + if transport_name in ['grpc', 'grpc_asyncio'] + else + 'https://compute.googleapis.com' + ) + + +def test_interconnect_locations_client_get_transport_class(): + transport = InterconnectLocationsClient.get_transport_class() + available_transports = [ + transports.InterconnectLocationsRestTransport, + ] + assert transport in available_transports + + transport = InterconnectLocationsClient.get_transport_class("rest") + assert transport == transports.InterconnectLocationsRestTransport + + +@pytest.mark.parametrize("client_class,transport_class,transport_name", [ + (InterconnectLocationsClient, transports.InterconnectLocationsRestTransport, "rest"), +]) +@mock.patch.object(InterconnectLocationsClient, "DEFAULT_ENDPOINT", modify_default_endpoint(InterconnectLocationsClient)) +def test_interconnect_locations_client_client_options(client_class, transport_class, transport_name): + # Check that if channel is provided we won't create a new one. + with mock.patch.object(InterconnectLocationsClient, 'get_transport_class') as gtc: + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ) + client = client_class(transport=transport) + gtc.assert_not_called() + + # Check that if channel is provided via str we will create a new one. + with mock.patch.object(InterconnectLocationsClient, 'get_transport_class') as gtc: + client = client_class(transport=transport_name) + gtc.assert_called() + + # Check the case api_endpoint is provided. + options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(transport=transport_name, client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_MTLS_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError): + client = client_class(transport=transport_name) + + # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): + with pytest.raises(ValueError): + client = client_class(transport=transport_name) + + # Check the case quota_project_id is provided + options = client_options.ClientOptions(quota_project_id="octopus") + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id="octopus", + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + # Check the case api_endpoint is provided + options = client_options.ClientOptions(api_audience="https://language.googleapis.com") + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience="https://language.googleapis.com" + ) + +@pytest.mark.parametrize("client_class,transport_class,transport_name,use_client_cert_env", [ + (InterconnectLocationsClient, transports.InterconnectLocationsRestTransport, "rest", "true"), + (InterconnectLocationsClient, transports.InterconnectLocationsRestTransport, "rest", "false"), +]) +@mock.patch.object(InterconnectLocationsClient, "DEFAULT_ENDPOINT", modify_default_endpoint(InterconnectLocationsClient)) +@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) +def test_interconnect_locations_client_mtls_env_auto(client_class, transport_class, transport_name, use_client_cert_env): + # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default + # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists. + + # Check the case client_cert_source is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): + options = client_options.ClientOptions(client_cert_source=client_cert_source_callback) + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + + if use_client_cert_env == "false": + expected_client_cert_source = None + expected_host = client.DEFAULT_ENDPOINT + else: + expected_client_cert_source = client_cert_source_callback + expected_host = client.DEFAULT_MTLS_ENDPOINT + + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case ADC client cert is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): + with mock.patch.object(transport_class, '__init__') as patched: + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): + with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=client_cert_source_callback): + if use_client_cert_env == "false": + expected_host = client.DEFAULT_ENDPOINT + expected_client_cert_source = None + else: + expected_host = client.DEFAULT_MTLS_ENDPOINT + expected_client_cert_source = client_cert_source_callback + + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case client_cert_source and ADC client cert are not provided. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): + with mock.patch.object(transport_class, '__init__') as patched: + with mock.patch("google.auth.transport.mtls.has_default_client_cert_source", return_value=False): + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize("client_class", [ + InterconnectLocationsClient +]) +@mock.patch.object(InterconnectLocationsClient, "DEFAULT_ENDPOINT", modify_default_endpoint(InterconnectLocationsClient)) +def test_interconnect_locations_client_get_mtls_endpoint_and_cert_source(client_class): + mock_client_cert_source = mock.Mock() + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "true". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + mock_api_endpoint = "foo" + options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(options) + assert api_endpoint == mock_api_endpoint + assert cert_source == mock_client_cert_source + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "false". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + mock_client_cert_source = mock.Mock() + mock_api_endpoint = "foo" + options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(options) + assert api_endpoint == mock_api_endpoint + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert doesn't exist. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=False): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert exists. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): + with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=mock_client_cert_source): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source == mock_client_cert_source + + +@pytest.mark.parametrize("client_class,transport_class,transport_name", [ + (InterconnectLocationsClient, transports.InterconnectLocationsRestTransport, "rest"), +]) +def test_interconnect_locations_client_client_options_scopes(client_class, transport_class, transport_name): + # Check the case scopes are provided. + options = client_options.ClientOptions( + scopes=["1", "2"], + ) + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=["1", "2"], + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + +@pytest.mark.parametrize("client_class,transport_class,transport_name,grpc_helpers", [ + (InterconnectLocationsClient, transports.InterconnectLocationsRestTransport, "rest", None), +]) +def test_interconnect_locations_client_client_options_credentials_file(client_class, transport_class, transport_name, grpc_helpers): + # Check the case credentials file is provided. + options = client_options.ClientOptions( + credentials_file="credentials.json" + ) + + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize("request_type", [ + compute.GetInterconnectLocationRequest, + dict, +]) +def test_get_rest(request_type): + client = InterconnectLocationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'interconnect_location': 'sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.InterconnectLocation( + address='address_value', + availability_zone='availability_zone_value', + city='city_value', + continent='continent_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + facility_provider='facility_provider_value', + facility_provider_facility_id='facility_provider_facility_id_value', + id=205, + kind='kind_value', + name='name_value', + peeringdb_facility_id='peeringdb_facility_id_value', + self_link='self_link_value', + status='status_value', + supports_pzs=True, + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.InterconnectLocation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.get(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.InterconnectLocation) + assert response.address == 'address_value' + assert response.availability_zone == 'availability_zone_value' + assert response.city == 'city_value' + assert response.continent == 'continent_value' + assert response.creation_timestamp == 'creation_timestamp_value' + assert response.description == 'description_value' + assert response.facility_provider == 'facility_provider_value' + assert response.facility_provider_facility_id == 'facility_provider_facility_id_value' + assert response.id == 205 + assert response.kind == 'kind_value' + assert response.name == 'name_value' + assert response.peeringdb_facility_id == 'peeringdb_facility_id_value' + assert response.self_link == 'self_link_value' + assert response.status == 'status_value' + assert response.supports_pzs is True + + +def test_get_rest_required_fields(request_type=compute.GetInterconnectLocationRequest): + transport_class = transports.InterconnectLocationsRestTransport + + request_init = {} + request_init["interconnect_location"] = "" + request_init["project"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["interconnectLocation"] = 'interconnect_location_value' + jsonified_request["project"] = 'project_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "interconnectLocation" in jsonified_request + assert jsonified_request["interconnectLocation"] == 'interconnect_location_value' + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + + client = InterconnectLocationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.InterconnectLocation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "get", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.InterconnectLocation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.get(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_get_rest_unset_required_fields(): + transport = transports.InterconnectLocationsRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.get._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("interconnectLocation", "project", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_rest_interceptors(null_interceptor): + transport = transports.InterconnectLocationsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.InterconnectLocationsRestInterceptor(), + ) + client = InterconnectLocationsClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.InterconnectLocationsRestInterceptor, "post_get") as post, \ + mock.patch.object(transports.InterconnectLocationsRestInterceptor, "pre_get") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.GetInterconnectLocationRequest.pb(compute.GetInterconnectLocationRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.InterconnectLocation.to_json(compute.InterconnectLocation()) + + request = compute.GetInterconnectLocationRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.InterconnectLocation() + + client.get(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_get_rest_bad_request(transport: str = 'rest', request_type=compute.GetInterconnectLocationRequest): + client = InterconnectLocationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'interconnect_location': 'sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get(request) + + +def test_get_rest_flattened(): + client = InterconnectLocationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.InterconnectLocation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'interconnect_location': 'sample2'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + interconnect_location='interconnect_location_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.InterconnectLocation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.get(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/global/interconnectLocations/{interconnect_location}" % client.transport._host, args[1]) + + +def test_get_rest_flattened_error(transport: str = 'rest'): + client = InterconnectLocationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get( + compute.GetInterconnectLocationRequest(), + project='project_value', + interconnect_location='interconnect_location_value', + ) + + +def test_get_rest_error(): + client = InterconnectLocationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.ListInterconnectLocationsRequest, + dict, +]) +def test_list_rest(request_type): + client = InterconnectLocationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.InterconnectLocationList( + id='id_value', + kind='kind_value', + next_page_token='next_page_token_value', + self_link='self_link_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.InterconnectLocationList.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.list(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListPager) + assert response.id == 'id_value' + assert response.kind == 'kind_value' + assert response.next_page_token == 'next_page_token_value' + assert response.self_link == 'self_link_value' + + +def test_list_rest_required_fields(request_type=compute.ListInterconnectLocationsRequest): + transport_class = transports.InterconnectLocationsRestTransport + + request_init = {} + request_init["project"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("filter", "max_results", "order_by", "page_token", "return_partial_success", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + + client = InterconnectLocationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.InterconnectLocationList() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "get", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.InterconnectLocationList.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.list(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_list_rest_unset_required_fields(): + transport = transports.InterconnectLocationsRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.list._get_unset_required_fields({}) + assert set(unset_fields) == (set(("filter", "maxResults", "orderBy", "pageToken", "returnPartialSuccess", )) & set(("project", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_rest_interceptors(null_interceptor): + transport = transports.InterconnectLocationsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.InterconnectLocationsRestInterceptor(), + ) + client = InterconnectLocationsClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.InterconnectLocationsRestInterceptor, "post_list") as post, \ + mock.patch.object(transports.InterconnectLocationsRestInterceptor, "pre_list") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.ListInterconnectLocationsRequest.pb(compute.ListInterconnectLocationsRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.InterconnectLocationList.to_json(compute.InterconnectLocationList()) + + request = compute.ListInterconnectLocationsRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.InterconnectLocationList() + + client.list(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_list_rest_bad_request(transport: str = 'rest', request_type=compute.ListInterconnectLocationsRequest): + client = InterconnectLocationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list(request) + + +def test_list_rest_flattened(): + client = InterconnectLocationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.InterconnectLocationList() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.InterconnectLocationList.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.list(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/global/interconnectLocations" % client.transport._host, args[1]) + + +def test_list_rest_flattened_error(transport: str = 'rest'): + client = InterconnectLocationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list( + compute.ListInterconnectLocationsRequest(), + project='project_value', + ) + + +def test_list_rest_pager(transport: str = 'rest'): + client = InterconnectLocationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + #with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + compute.InterconnectLocationList( + items=[ + compute.InterconnectLocation(), + compute.InterconnectLocation(), + compute.InterconnectLocation(), + ], + next_page_token='abc', + ), + compute.InterconnectLocationList( + items=[], + next_page_token='def', + ), + compute.InterconnectLocationList( + items=[ + compute.InterconnectLocation(), + ], + next_page_token='ghi', + ), + compute.InterconnectLocationList( + items=[ + compute.InterconnectLocation(), + compute.InterconnectLocation(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple(compute.InterconnectLocationList.to_json(x) for x in response) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode('UTF-8') + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {'project': 'sample1'} + + pager = client.list(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, compute.InterconnectLocation) + for i in results) + + pages = list(client.list(request=sample_request).pages) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + + +def test_credentials_transport_error(): + # It is an error to provide credentials and a transport instance. + transport = transports.InterconnectLocationsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = InterconnectLocationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # It is an error to provide a credentials file and a transport instance. + transport = transports.InterconnectLocationsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = InterconnectLocationsClient( + client_options={"credentials_file": "credentials.json"}, + transport=transport, + ) + + # It is an error to provide an api_key and a transport instance. + transport = transports.InterconnectLocationsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = InterconnectLocationsClient( + client_options=options, + transport=transport, + ) + + # It is an error to provide an api_key and a credential. + options = mock.Mock() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = InterconnectLocationsClient( + client_options=options, + credentials=ga_credentials.AnonymousCredentials() + ) + + # It is an error to provide scopes and a transport instance. + transport = transports.InterconnectLocationsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = InterconnectLocationsClient( + client_options={"scopes": ["1", "2"]}, + transport=transport, + ) + + +def test_transport_instance(): + # A client may be instantiated with a custom transport instance. + transport = transports.InterconnectLocationsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + client = InterconnectLocationsClient(transport=transport) + assert client.transport is transport + + +@pytest.mark.parametrize("transport_class", [ + transports.InterconnectLocationsRestTransport, +]) +def test_transport_adc(transport_class): + # Test default credentials are used if not provided. + with mock.patch.object(google.auth, 'default') as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class() + adc.assert_called_once() + +@pytest.mark.parametrize("transport_name", [ + "rest", +]) +def test_transport_kind(transport_name): + transport = InterconnectLocationsClient.get_transport_class(transport_name)( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert transport.kind == transport_name + + +def test_interconnect_locations_base_transport_error(): + # Passing both a credentials object and credentials_file should raise an error + with pytest.raises(core_exceptions.DuplicateCredentialArgs): + transport = transports.InterconnectLocationsTransport( + credentials=ga_credentials.AnonymousCredentials(), + credentials_file="credentials.json" + ) + + +def test_interconnect_locations_base_transport(): + # Instantiate the base transport. + with mock.patch('google.cloud.compute_v1.services.interconnect_locations.transports.InterconnectLocationsTransport.__init__') as Transport: + Transport.return_value = None + transport = transports.InterconnectLocationsTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Every method on the transport should just blindly + # raise NotImplementedError. + methods = ( + 'get', + 'list', + ) + for method in methods: + with pytest.raises(NotImplementedError): + getattr(transport, method)(request=object()) + + with pytest.raises(NotImplementedError): + transport.close() + + # Catch all for all remaining methods and properties + remainder = [ + 'kind', + ] + for r in remainder: + with pytest.raises(NotImplementedError): + getattr(transport, r)() + + +def test_interconnect_locations_base_transport_with_credentials_file(): + # Instantiate the base transport with a credentials file + with mock.patch.object(google.auth, 'load_credentials_from_file', autospec=True) as load_creds, mock.patch('google.cloud.compute_v1.services.interconnect_locations.transports.InterconnectLocationsTransport._prep_wrapped_messages') as Transport: + Transport.return_value = None + load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.InterconnectLocationsTransport( + credentials_file="credentials.json", + quota_project_id="octopus", + ) + load_creds.assert_called_once_with("credentials.json", + scopes=None, + default_scopes=( + 'https://www.googleapis.com/auth/compute.readonly', + 'https://www.googleapis.com/auth/compute', + 'https://www.googleapis.com/auth/cloud-platform', +), + quota_project_id="octopus", + ) + + +def test_interconnect_locations_base_transport_with_adc(): + # Test the default credentials are used if credentials and credentials_file are None. + with mock.patch.object(google.auth, 'default', autospec=True) as adc, mock.patch('google.cloud.compute_v1.services.interconnect_locations.transports.InterconnectLocationsTransport._prep_wrapped_messages') as Transport: + Transport.return_value = None + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.InterconnectLocationsTransport() + adc.assert_called_once() + + +def test_interconnect_locations_auth_adc(): + # If no credentials are provided, we should use ADC credentials. + with mock.patch.object(google.auth, 'default', autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + InterconnectLocationsClient() + adc.assert_called_once_with( + scopes=None, + default_scopes=( + 'https://www.googleapis.com/auth/compute.readonly', + 'https://www.googleapis.com/auth/compute', + 'https://www.googleapis.com/auth/cloud-platform', +), + quota_project_id=None, + ) + + +def test_interconnect_locations_http_transport_client_cert_source_for_mtls(): + cred = ga_credentials.AnonymousCredentials() + with mock.patch("google.auth.transport.requests.AuthorizedSession.configure_mtls_channel") as mock_configure_mtls_channel: + transports.InterconnectLocationsRestTransport ( + credentials=cred, + client_cert_source_for_mtls=client_cert_source_callback + ) + mock_configure_mtls_channel.assert_called_once_with(client_cert_source_callback) + + +@pytest.mark.parametrize("transport_name", [ + "rest", +]) +def test_interconnect_locations_host_no_port(transport_name): + client = InterconnectLocationsClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions(api_endpoint='compute.googleapis.com'), + transport=transport_name, + ) + assert client.transport._host == ( + 'compute.googleapis.com:443' + if transport_name in ['grpc', 'grpc_asyncio'] + else 'https://compute.googleapis.com' + ) + +@pytest.mark.parametrize("transport_name", [ + "rest", +]) +def test_interconnect_locations_host_with_port(transport_name): + client = InterconnectLocationsClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions(api_endpoint='compute.googleapis.com:8000'), + transport=transport_name, + ) + assert client.transport._host == ( + 'compute.googleapis.com:8000' + if transport_name in ['grpc', 'grpc_asyncio'] + else 'https://compute.googleapis.com:8000' + ) + +@pytest.mark.parametrize("transport_name", [ + "rest", +]) +def test_interconnect_locations_client_transport_session_collision(transport_name): + creds1 = ga_credentials.AnonymousCredentials() + creds2 = ga_credentials.AnonymousCredentials() + client1 = InterconnectLocationsClient( + credentials=creds1, + transport=transport_name, + ) + client2 = InterconnectLocationsClient( + credentials=creds2, + transport=transport_name, + ) + session1 = client1.transport.get._session + session2 = client2.transport.get._session + assert session1 != session2 + session1 = client1.transport.list._session + session2 = client2.transport.list._session + assert session1 != session2 + +def test_common_billing_account_path(): + billing_account = "squid" + expected = "billingAccounts/{billing_account}".format(billing_account=billing_account, ) + actual = InterconnectLocationsClient.common_billing_account_path(billing_account) + assert expected == actual + + +def test_parse_common_billing_account_path(): + expected = { + "billing_account": "clam", + } + path = InterconnectLocationsClient.common_billing_account_path(**expected) + + # Check that the path construction is reversible. + actual = InterconnectLocationsClient.parse_common_billing_account_path(path) + assert expected == actual + +def test_common_folder_path(): + folder = "whelk" + expected = "folders/{folder}".format(folder=folder, ) + actual = InterconnectLocationsClient.common_folder_path(folder) + assert expected == actual + + +def test_parse_common_folder_path(): + expected = { + "folder": "octopus", + } + path = InterconnectLocationsClient.common_folder_path(**expected) + + # Check that the path construction is reversible. + actual = InterconnectLocationsClient.parse_common_folder_path(path) + assert expected == actual + +def test_common_organization_path(): + organization = "oyster" + expected = "organizations/{organization}".format(organization=organization, ) + actual = InterconnectLocationsClient.common_organization_path(organization) + assert expected == actual + + +def test_parse_common_organization_path(): + expected = { + "organization": "nudibranch", + } + path = InterconnectLocationsClient.common_organization_path(**expected) + + # Check that the path construction is reversible. + actual = InterconnectLocationsClient.parse_common_organization_path(path) + assert expected == actual + +def test_common_project_path(): + project = "cuttlefish" + expected = "projects/{project}".format(project=project, ) + actual = InterconnectLocationsClient.common_project_path(project) + assert expected == actual + + +def test_parse_common_project_path(): + expected = { + "project": "mussel", + } + path = InterconnectLocationsClient.common_project_path(**expected) + + # Check that the path construction is reversible. + actual = InterconnectLocationsClient.parse_common_project_path(path) + assert expected == actual + +def test_common_location_path(): + project = "winkle" + location = "nautilus" + expected = "projects/{project}/locations/{location}".format(project=project, location=location, ) + actual = InterconnectLocationsClient.common_location_path(project, location) + assert expected == actual + + +def test_parse_common_location_path(): + expected = { + "project": "scallop", + "location": "abalone", + } + path = InterconnectLocationsClient.common_location_path(**expected) + + # Check that the path construction is reversible. + actual = InterconnectLocationsClient.parse_common_location_path(path) + assert expected == actual + + +def test_client_with_default_client_info(): + client_info = gapic_v1.client_info.ClientInfo() + + with mock.patch.object(transports.InterconnectLocationsTransport, '_prep_wrapped_messages') as prep: + client = InterconnectLocationsClient( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + with mock.patch.object(transports.InterconnectLocationsTransport, '_prep_wrapped_messages') as prep: + transport_class = InterconnectLocationsClient.get_transport_class() + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + +def test_transport_close(): + transports = { + "rest": "_session", + } + + for transport, close_name in transports.items(): + client = InterconnectLocationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport + ) + with mock.patch.object(type(getattr(client.transport, close_name)), "close") as close: + with client: + close.assert_not_called() + close.assert_called_once() + +def test_client_ctx(): + transports = [ + 'rest', + ] + for transport in transports: + client = InterconnectLocationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport + ) + # Test client calls underlying transport. + with mock.patch.object(type(client.transport), "close") as close: + close.assert_not_called() + with client: + pass + close.assert_called() + +@pytest.mark.parametrize("client_class,transport_class", [ + (InterconnectLocationsClient, transports.InterconnectLocationsRestTransport), +]) +def test_api_key_credentials(client_class, transport_class): + with mock.patch.object( + google.auth._default, "get_api_key_credentials", create=True + ) as get_api_key_credentials: + mock_cred = mock.Mock() + get_api_key_credentials.return_value = mock_cred + options = client_options.ClientOptions() + options.api_key = "api_key" + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=mock_cred, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) diff --git a/owl-bot-staging/v1/tests/unit/gapic/compute_v1/test_interconnect_remote_locations.py b/owl-bot-staging/v1/tests/unit/gapic/compute_v1/test_interconnect_remote_locations.py new file mode 100644 index 000000000..49ed538ac --- /dev/null +++ b/owl-bot-staging/v1/tests/unit/gapic/compute_v1/test_interconnect_remote_locations.py @@ -0,0 +1,1401 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import os +# try/except added for compatibility with python < 3.8 +try: + from unittest import mock + from unittest.mock import AsyncMock # pragma: NO COVER +except ImportError: # pragma: NO COVER + import mock + +import grpc +from grpc.experimental import aio +from collections.abc import Iterable +from google.protobuf import json_format +import json +import math +import pytest +from proto.marshal.rules.dates import DurationRule, TimestampRule +from proto.marshal.rules import wrappers +from requests import Response +from requests import Request, PreparedRequest +from requests.sessions import Session +from google.protobuf import json_format + +from google.api_core import client_options +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import grpc_helpers +from google.api_core import grpc_helpers_async +from google.api_core import path_template +from google.auth import credentials as ga_credentials +from google.auth.exceptions import MutualTLSChannelError +from google.cloud.compute_v1.services.interconnect_remote_locations import InterconnectRemoteLocationsClient +from google.cloud.compute_v1.services.interconnect_remote_locations import pagers +from google.cloud.compute_v1.services.interconnect_remote_locations import transports +from google.cloud.compute_v1.types import compute +from google.oauth2 import service_account +import google.auth + + +def client_cert_source_callback(): + return b"cert bytes", b"key bytes" + + +# If default endpoint is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint(client): + return "foo.googleapis.com" if ("localhost" in client.DEFAULT_ENDPOINT) else client.DEFAULT_ENDPOINT + + +def test__get_default_mtls_endpoint(): + api_endpoint = "example.googleapis.com" + api_mtls_endpoint = "example.mtls.googleapis.com" + sandbox_endpoint = "example.sandbox.googleapis.com" + sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com" + non_googleapi = "api.example.com" + + assert InterconnectRemoteLocationsClient._get_default_mtls_endpoint(None) is None + assert InterconnectRemoteLocationsClient._get_default_mtls_endpoint(api_endpoint) == api_mtls_endpoint + assert InterconnectRemoteLocationsClient._get_default_mtls_endpoint(api_mtls_endpoint) == api_mtls_endpoint + assert InterconnectRemoteLocationsClient._get_default_mtls_endpoint(sandbox_endpoint) == sandbox_mtls_endpoint + assert InterconnectRemoteLocationsClient._get_default_mtls_endpoint(sandbox_mtls_endpoint) == sandbox_mtls_endpoint + assert InterconnectRemoteLocationsClient._get_default_mtls_endpoint(non_googleapi) == non_googleapi + + +@pytest.mark.parametrize("client_class,transport_name", [ + (InterconnectRemoteLocationsClient, "rest"), +]) +def test_interconnect_remote_locations_client_from_service_account_info(client_class, transport_name): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object(service_account.Credentials, 'from_service_account_info') as factory: + factory.return_value = creds + info = {"valid": True} + client = client_class.from_service_account_info(info, transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + 'compute.googleapis.com:443' + if transport_name in ['grpc', 'grpc_asyncio'] + else + 'https://compute.googleapis.com' + ) + + +@pytest.mark.parametrize("transport_class,transport_name", [ + (transports.InterconnectRemoteLocationsRestTransport, "rest"), +]) +def test_interconnect_remote_locations_client_service_account_always_use_jwt(transport_class, transport_name): + with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=True) + use_jwt.assert_called_once_with(True) + + with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=False) + use_jwt.assert_not_called() + + +@pytest.mark.parametrize("client_class,transport_name", [ + (InterconnectRemoteLocationsClient, "rest"), +]) +def test_interconnect_remote_locations_client_from_service_account_file(client_class, transport_name): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object(service_account.Credentials, 'from_service_account_file') as factory: + factory.return_value = creds + client = client_class.from_service_account_file("dummy/file/path.json", transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + client = client_class.from_service_account_json("dummy/file/path.json", transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + 'compute.googleapis.com:443' + if transport_name in ['grpc', 'grpc_asyncio'] + else + 'https://compute.googleapis.com' + ) + + +def test_interconnect_remote_locations_client_get_transport_class(): + transport = InterconnectRemoteLocationsClient.get_transport_class() + available_transports = [ + transports.InterconnectRemoteLocationsRestTransport, + ] + assert transport in available_transports + + transport = InterconnectRemoteLocationsClient.get_transport_class("rest") + assert transport == transports.InterconnectRemoteLocationsRestTransport + + +@pytest.mark.parametrize("client_class,transport_class,transport_name", [ + (InterconnectRemoteLocationsClient, transports.InterconnectRemoteLocationsRestTransport, "rest"), +]) +@mock.patch.object(InterconnectRemoteLocationsClient, "DEFAULT_ENDPOINT", modify_default_endpoint(InterconnectRemoteLocationsClient)) +def test_interconnect_remote_locations_client_client_options(client_class, transport_class, transport_name): + # Check that if channel is provided we won't create a new one. + with mock.patch.object(InterconnectRemoteLocationsClient, 'get_transport_class') as gtc: + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ) + client = client_class(transport=transport) + gtc.assert_not_called() + + # Check that if channel is provided via str we will create a new one. + with mock.patch.object(InterconnectRemoteLocationsClient, 'get_transport_class') as gtc: + client = client_class(transport=transport_name) + gtc.assert_called() + + # Check the case api_endpoint is provided. + options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(transport=transport_name, client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_MTLS_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError): + client = client_class(transport=transport_name) + + # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): + with pytest.raises(ValueError): + client = client_class(transport=transport_name) + + # Check the case quota_project_id is provided + options = client_options.ClientOptions(quota_project_id="octopus") + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id="octopus", + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + # Check the case api_endpoint is provided + options = client_options.ClientOptions(api_audience="https://language.googleapis.com") + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience="https://language.googleapis.com" + ) + +@pytest.mark.parametrize("client_class,transport_class,transport_name,use_client_cert_env", [ + (InterconnectRemoteLocationsClient, transports.InterconnectRemoteLocationsRestTransport, "rest", "true"), + (InterconnectRemoteLocationsClient, transports.InterconnectRemoteLocationsRestTransport, "rest", "false"), +]) +@mock.patch.object(InterconnectRemoteLocationsClient, "DEFAULT_ENDPOINT", modify_default_endpoint(InterconnectRemoteLocationsClient)) +@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) +def test_interconnect_remote_locations_client_mtls_env_auto(client_class, transport_class, transport_name, use_client_cert_env): + # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default + # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists. + + # Check the case client_cert_source is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): + options = client_options.ClientOptions(client_cert_source=client_cert_source_callback) + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + + if use_client_cert_env == "false": + expected_client_cert_source = None + expected_host = client.DEFAULT_ENDPOINT + else: + expected_client_cert_source = client_cert_source_callback + expected_host = client.DEFAULT_MTLS_ENDPOINT + + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case ADC client cert is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): + with mock.patch.object(transport_class, '__init__') as patched: + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): + with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=client_cert_source_callback): + if use_client_cert_env == "false": + expected_host = client.DEFAULT_ENDPOINT + expected_client_cert_source = None + else: + expected_host = client.DEFAULT_MTLS_ENDPOINT + expected_client_cert_source = client_cert_source_callback + + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case client_cert_source and ADC client cert are not provided. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): + with mock.patch.object(transport_class, '__init__') as patched: + with mock.patch("google.auth.transport.mtls.has_default_client_cert_source", return_value=False): + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize("client_class", [ + InterconnectRemoteLocationsClient +]) +@mock.patch.object(InterconnectRemoteLocationsClient, "DEFAULT_ENDPOINT", modify_default_endpoint(InterconnectRemoteLocationsClient)) +def test_interconnect_remote_locations_client_get_mtls_endpoint_and_cert_source(client_class): + mock_client_cert_source = mock.Mock() + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "true". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + mock_api_endpoint = "foo" + options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(options) + assert api_endpoint == mock_api_endpoint + assert cert_source == mock_client_cert_source + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "false". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + mock_client_cert_source = mock.Mock() + mock_api_endpoint = "foo" + options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(options) + assert api_endpoint == mock_api_endpoint + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert doesn't exist. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=False): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert exists. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): + with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=mock_client_cert_source): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source == mock_client_cert_source + + +@pytest.mark.parametrize("client_class,transport_class,transport_name", [ + (InterconnectRemoteLocationsClient, transports.InterconnectRemoteLocationsRestTransport, "rest"), +]) +def test_interconnect_remote_locations_client_client_options_scopes(client_class, transport_class, transport_name): + # Check the case scopes are provided. + options = client_options.ClientOptions( + scopes=["1", "2"], + ) + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=["1", "2"], + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + +@pytest.mark.parametrize("client_class,transport_class,transport_name,grpc_helpers", [ + (InterconnectRemoteLocationsClient, transports.InterconnectRemoteLocationsRestTransport, "rest", None), +]) +def test_interconnect_remote_locations_client_client_options_credentials_file(client_class, transport_class, transport_name, grpc_helpers): + # Check the case credentials file is provided. + options = client_options.ClientOptions( + credentials_file="credentials.json" + ) + + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize("request_type", [ + compute.GetInterconnectRemoteLocationRequest, + dict, +]) +def test_get_rest(request_type): + client = InterconnectRemoteLocationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'interconnect_remote_location': 'sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.InterconnectRemoteLocation( + address='address_value', + city='city_value', + continent='continent_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + facility_provider='facility_provider_value', + facility_provider_facility_id='facility_provider_facility_id_value', + id=205, + kind='kind_value', + lacp='lacp_value', + max_lag_size100_gbps=1935, + max_lag_size10_gbps=1887, + name='name_value', + peeringdb_facility_id='peeringdb_facility_id_value', + remote_service='remote_service_value', + self_link='self_link_value', + status='status_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.InterconnectRemoteLocation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.get(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.InterconnectRemoteLocation) + assert response.address == 'address_value' + assert response.city == 'city_value' + assert response.continent == 'continent_value' + assert response.creation_timestamp == 'creation_timestamp_value' + assert response.description == 'description_value' + assert response.facility_provider == 'facility_provider_value' + assert response.facility_provider_facility_id == 'facility_provider_facility_id_value' + assert response.id == 205 + assert response.kind == 'kind_value' + assert response.lacp == 'lacp_value' + assert response.max_lag_size100_gbps == 1935 + assert response.max_lag_size10_gbps == 1887 + assert response.name == 'name_value' + assert response.peeringdb_facility_id == 'peeringdb_facility_id_value' + assert response.remote_service == 'remote_service_value' + assert response.self_link == 'self_link_value' + assert response.status == 'status_value' + + +def test_get_rest_required_fields(request_type=compute.GetInterconnectRemoteLocationRequest): + transport_class = transports.InterconnectRemoteLocationsRestTransport + + request_init = {} + request_init["interconnect_remote_location"] = "" + request_init["project"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["interconnectRemoteLocation"] = 'interconnect_remote_location_value' + jsonified_request["project"] = 'project_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "interconnectRemoteLocation" in jsonified_request + assert jsonified_request["interconnectRemoteLocation"] == 'interconnect_remote_location_value' + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + + client = InterconnectRemoteLocationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.InterconnectRemoteLocation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "get", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.InterconnectRemoteLocation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.get(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_get_rest_unset_required_fields(): + transport = transports.InterconnectRemoteLocationsRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.get._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("interconnectRemoteLocation", "project", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_rest_interceptors(null_interceptor): + transport = transports.InterconnectRemoteLocationsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.InterconnectRemoteLocationsRestInterceptor(), + ) + client = InterconnectRemoteLocationsClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.InterconnectRemoteLocationsRestInterceptor, "post_get") as post, \ + mock.patch.object(transports.InterconnectRemoteLocationsRestInterceptor, "pre_get") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.GetInterconnectRemoteLocationRequest.pb(compute.GetInterconnectRemoteLocationRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.InterconnectRemoteLocation.to_json(compute.InterconnectRemoteLocation()) + + request = compute.GetInterconnectRemoteLocationRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.InterconnectRemoteLocation() + + client.get(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_get_rest_bad_request(transport: str = 'rest', request_type=compute.GetInterconnectRemoteLocationRequest): + client = InterconnectRemoteLocationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'interconnect_remote_location': 'sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get(request) + + +def test_get_rest_flattened(): + client = InterconnectRemoteLocationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.InterconnectRemoteLocation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'interconnect_remote_location': 'sample2'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + interconnect_remote_location='interconnect_remote_location_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.InterconnectRemoteLocation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.get(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/global/interconnectRemoteLocations/{interconnect_remote_location}" % client.transport._host, args[1]) + + +def test_get_rest_flattened_error(transport: str = 'rest'): + client = InterconnectRemoteLocationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get( + compute.GetInterconnectRemoteLocationRequest(), + project='project_value', + interconnect_remote_location='interconnect_remote_location_value', + ) + + +def test_get_rest_error(): + client = InterconnectRemoteLocationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.ListInterconnectRemoteLocationsRequest, + dict, +]) +def test_list_rest(request_type): + client = InterconnectRemoteLocationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.InterconnectRemoteLocationList( + id='id_value', + kind='kind_value', + next_page_token='next_page_token_value', + self_link='self_link_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.InterconnectRemoteLocationList.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.list(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListPager) + assert response.id == 'id_value' + assert response.kind == 'kind_value' + assert response.next_page_token == 'next_page_token_value' + assert response.self_link == 'self_link_value' + + +def test_list_rest_required_fields(request_type=compute.ListInterconnectRemoteLocationsRequest): + transport_class = transports.InterconnectRemoteLocationsRestTransport + + request_init = {} + request_init["project"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("filter", "max_results", "order_by", "page_token", "return_partial_success", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + + client = InterconnectRemoteLocationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.InterconnectRemoteLocationList() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "get", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.InterconnectRemoteLocationList.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.list(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_list_rest_unset_required_fields(): + transport = transports.InterconnectRemoteLocationsRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.list._get_unset_required_fields({}) + assert set(unset_fields) == (set(("filter", "maxResults", "orderBy", "pageToken", "returnPartialSuccess", )) & set(("project", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_rest_interceptors(null_interceptor): + transport = transports.InterconnectRemoteLocationsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.InterconnectRemoteLocationsRestInterceptor(), + ) + client = InterconnectRemoteLocationsClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.InterconnectRemoteLocationsRestInterceptor, "post_list") as post, \ + mock.patch.object(transports.InterconnectRemoteLocationsRestInterceptor, "pre_list") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.ListInterconnectRemoteLocationsRequest.pb(compute.ListInterconnectRemoteLocationsRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.InterconnectRemoteLocationList.to_json(compute.InterconnectRemoteLocationList()) + + request = compute.ListInterconnectRemoteLocationsRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.InterconnectRemoteLocationList() + + client.list(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_list_rest_bad_request(transport: str = 'rest', request_type=compute.ListInterconnectRemoteLocationsRequest): + client = InterconnectRemoteLocationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list(request) + + +def test_list_rest_flattened(): + client = InterconnectRemoteLocationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.InterconnectRemoteLocationList() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.InterconnectRemoteLocationList.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.list(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/global/interconnectRemoteLocations" % client.transport._host, args[1]) + + +def test_list_rest_flattened_error(transport: str = 'rest'): + client = InterconnectRemoteLocationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list( + compute.ListInterconnectRemoteLocationsRequest(), + project='project_value', + ) + + +def test_list_rest_pager(transport: str = 'rest'): + client = InterconnectRemoteLocationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + #with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + compute.InterconnectRemoteLocationList( + items=[ + compute.InterconnectRemoteLocation(), + compute.InterconnectRemoteLocation(), + compute.InterconnectRemoteLocation(), + ], + next_page_token='abc', + ), + compute.InterconnectRemoteLocationList( + items=[], + next_page_token='def', + ), + compute.InterconnectRemoteLocationList( + items=[ + compute.InterconnectRemoteLocation(), + ], + next_page_token='ghi', + ), + compute.InterconnectRemoteLocationList( + items=[ + compute.InterconnectRemoteLocation(), + compute.InterconnectRemoteLocation(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple(compute.InterconnectRemoteLocationList.to_json(x) for x in response) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode('UTF-8') + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {'project': 'sample1'} + + pager = client.list(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, compute.InterconnectRemoteLocation) + for i in results) + + pages = list(client.list(request=sample_request).pages) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + + +def test_credentials_transport_error(): + # It is an error to provide credentials and a transport instance. + transport = transports.InterconnectRemoteLocationsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = InterconnectRemoteLocationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # It is an error to provide a credentials file and a transport instance. + transport = transports.InterconnectRemoteLocationsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = InterconnectRemoteLocationsClient( + client_options={"credentials_file": "credentials.json"}, + transport=transport, + ) + + # It is an error to provide an api_key and a transport instance. + transport = transports.InterconnectRemoteLocationsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = InterconnectRemoteLocationsClient( + client_options=options, + transport=transport, + ) + + # It is an error to provide an api_key and a credential. + options = mock.Mock() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = InterconnectRemoteLocationsClient( + client_options=options, + credentials=ga_credentials.AnonymousCredentials() + ) + + # It is an error to provide scopes and a transport instance. + transport = transports.InterconnectRemoteLocationsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = InterconnectRemoteLocationsClient( + client_options={"scopes": ["1", "2"]}, + transport=transport, + ) + + +def test_transport_instance(): + # A client may be instantiated with a custom transport instance. + transport = transports.InterconnectRemoteLocationsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + client = InterconnectRemoteLocationsClient(transport=transport) + assert client.transport is transport + + +@pytest.mark.parametrize("transport_class", [ + transports.InterconnectRemoteLocationsRestTransport, +]) +def test_transport_adc(transport_class): + # Test default credentials are used if not provided. + with mock.patch.object(google.auth, 'default') as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class() + adc.assert_called_once() + +@pytest.mark.parametrize("transport_name", [ + "rest", +]) +def test_transport_kind(transport_name): + transport = InterconnectRemoteLocationsClient.get_transport_class(transport_name)( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert transport.kind == transport_name + + +def test_interconnect_remote_locations_base_transport_error(): + # Passing both a credentials object and credentials_file should raise an error + with pytest.raises(core_exceptions.DuplicateCredentialArgs): + transport = transports.InterconnectRemoteLocationsTransport( + credentials=ga_credentials.AnonymousCredentials(), + credentials_file="credentials.json" + ) + + +def test_interconnect_remote_locations_base_transport(): + # Instantiate the base transport. + with mock.patch('google.cloud.compute_v1.services.interconnect_remote_locations.transports.InterconnectRemoteLocationsTransport.__init__') as Transport: + Transport.return_value = None + transport = transports.InterconnectRemoteLocationsTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Every method on the transport should just blindly + # raise NotImplementedError. + methods = ( + 'get', + 'list', + ) + for method in methods: + with pytest.raises(NotImplementedError): + getattr(transport, method)(request=object()) + + with pytest.raises(NotImplementedError): + transport.close() + + # Catch all for all remaining methods and properties + remainder = [ + 'kind', + ] + for r in remainder: + with pytest.raises(NotImplementedError): + getattr(transport, r)() + + +def test_interconnect_remote_locations_base_transport_with_credentials_file(): + # Instantiate the base transport with a credentials file + with mock.patch.object(google.auth, 'load_credentials_from_file', autospec=True) as load_creds, mock.patch('google.cloud.compute_v1.services.interconnect_remote_locations.transports.InterconnectRemoteLocationsTransport._prep_wrapped_messages') as Transport: + Transport.return_value = None + load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.InterconnectRemoteLocationsTransport( + credentials_file="credentials.json", + quota_project_id="octopus", + ) + load_creds.assert_called_once_with("credentials.json", + scopes=None, + default_scopes=( + 'https://www.googleapis.com/auth/compute.readonly', + 'https://www.googleapis.com/auth/compute', + 'https://www.googleapis.com/auth/cloud-platform', +), + quota_project_id="octopus", + ) + + +def test_interconnect_remote_locations_base_transport_with_adc(): + # Test the default credentials are used if credentials and credentials_file are None. + with mock.patch.object(google.auth, 'default', autospec=True) as adc, mock.patch('google.cloud.compute_v1.services.interconnect_remote_locations.transports.InterconnectRemoteLocationsTransport._prep_wrapped_messages') as Transport: + Transport.return_value = None + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.InterconnectRemoteLocationsTransport() + adc.assert_called_once() + + +def test_interconnect_remote_locations_auth_adc(): + # If no credentials are provided, we should use ADC credentials. + with mock.patch.object(google.auth, 'default', autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + InterconnectRemoteLocationsClient() + adc.assert_called_once_with( + scopes=None, + default_scopes=( + 'https://www.googleapis.com/auth/compute.readonly', + 'https://www.googleapis.com/auth/compute', + 'https://www.googleapis.com/auth/cloud-platform', +), + quota_project_id=None, + ) + + +def test_interconnect_remote_locations_http_transport_client_cert_source_for_mtls(): + cred = ga_credentials.AnonymousCredentials() + with mock.patch("google.auth.transport.requests.AuthorizedSession.configure_mtls_channel") as mock_configure_mtls_channel: + transports.InterconnectRemoteLocationsRestTransport ( + credentials=cred, + client_cert_source_for_mtls=client_cert_source_callback + ) + mock_configure_mtls_channel.assert_called_once_with(client_cert_source_callback) + + +@pytest.mark.parametrize("transport_name", [ + "rest", +]) +def test_interconnect_remote_locations_host_no_port(transport_name): + client = InterconnectRemoteLocationsClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions(api_endpoint='compute.googleapis.com'), + transport=transport_name, + ) + assert client.transport._host == ( + 'compute.googleapis.com:443' + if transport_name in ['grpc', 'grpc_asyncio'] + else 'https://compute.googleapis.com' + ) + +@pytest.mark.parametrize("transport_name", [ + "rest", +]) +def test_interconnect_remote_locations_host_with_port(transport_name): + client = InterconnectRemoteLocationsClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions(api_endpoint='compute.googleapis.com:8000'), + transport=transport_name, + ) + assert client.transport._host == ( + 'compute.googleapis.com:8000' + if transport_name in ['grpc', 'grpc_asyncio'] + else 'https://compute.googleapis.com:8000' + ) + +@pytest.mark.parametrize("transport_name", [ + "rest", +]) +def test_interconnect_remote_locations_client_transport_session_collision(transport_name): + creds1 = ga_credentials.AnonymousCredentials() + creds2 = ga_credentials.AnonymousCredentials() + client1 = InterconnectRemoteLocationsClient( + credentials=creds1, + transport=transport_name, + ) + client2 = InterconnectRemoteLocationsClient( + credentials=creds2, + transport=transport_name, + ) + session1 = client1.transport.get._session + session2 = client2.transport.get._session + assert session1 != session2 + session1 = client1.transport.list._session + session2 = client2.transport.list._session + assert session1 != session2 + +def test_common_billing_account_path(): + billing_account = "squid" + expected = "billingAccounts/{billing_account}".format(billing_account=billing_account, ) + actual = InterconnectRemoteLocationsClient.common_billing_account_path(billing_account) + assert expected == actual + + +def test_parse_common_billing_account_path(): + expected = { + "billing_account": "clam", + } + path = InterconnectRemoteLocationsClient.common_billing_account_path(**expected) + + # Check that the path construction is reversible. + actual = InterconnectRemoteLocationsClient.parse_common_billing_account_path(path) + assert expected == actual + +def test_common_folder_path(): + folder = "whelk" + expected = "folders/{folder}".format(folder=folder, ) + actual = InterconnectRemoteLocationsClient.common_folder_path(folder) + assert expected == actual + + +def test_parse_common_folder_path(): + expected = { + "folder": "octopus", + } + path = InterconnectRemoteLocationsClient.common_folder_path(**expected) + + # Check that the path construction is reversible. + actual = InterconnectRemoteLocationsClient.parse_common_folder_path(path) + assert expected == actual + +def test_common_organization_path(): + organization = "oyster" + expected = "organizations/{organization}".format(organization=organization, ) + actual = InterconnectRemoteLocationsClient.common_organization_path(organization) + assert expected == actual + + +def test_parse_common_organization_path(): + expected = { + "organization": "nudibranch", + } + path = InterconnectRemoteLocationsClient.common_organization_path(**expected) + + # Check that the path construction is reversible. + actual = InterconnectRemoteLocationsClient.parse_common_organization_path(path) + assert expected == actual + +def test_common_project_path(): + project = "cuttlefish" + expected = "projects/{project}".format(project=project, ) + actual = InterconnectRemoteLocationsClient.common_project_path(project) + assert expected == actual + + +def test_parse_common_project_path(): + expected = { + "project": "mussel", + } + path = InterconnectRemoteLocationsClient.common_project_path(**expected) + + # Check that the path construction is reversible. + actual = InterconnectRemoteLocationsClient.parse_common_project_path(path) + assert expected == actual + +def test_common_location_path(): + project = "winkle" + location = "nautilus" + expected = "projects/{project}/locations/{location}".format(project=project, location=location, ) + actual = InterconnectRemoteLocationsClient.common_location_path(project, location) + assert expected == actual + + +def test_parse_common_location_path(): + expected = { + "project": "scallop", + "location": "abalone", + } + path = InterconnectRemoteLocationsClient.common_location_path(**expected) + + # Check that the path construction is reversible. + actual = InterconnectRemoteLocationsClient.parse_common_location_path(path) + assert expected == actual + + +def test_client_with_default_client_info(): + client_info = gapic_v1.client_info.ClientInfo() + + with mock.patch.object(transports.InterconnectRemoteLocationsTransport, '_prep_wrapped_messages') as prep: + client = InterconnectRemoteLocationsClient( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + with mock.patch.object(transports.InterconnectRemoteLocationsTransport, '_prep_wrapped_messages') as prep: + transport_class = InterconnectRemoteLocationsClient.get_transport_class() + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + +def test_transport_close(): + transports = { + "rest": "_session", + } + + for transport, close_name in transports.items(): + client = InterconnectRemoteLocationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport + ) + with mock.patch.object(type(getattr(client.transport, close_name)), "close") as close: + with client: + close.assert_not_called() + close.assert_called_once() + +def test_client_ctx(): + transports = [ + 'rest', + ] + for transport in transports: + client = InterconnectRemoteLocationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport + ) + # Test client calls underlying transport. + with mock.patch.object(type(client.transport), "close") as close: + close.assert_not_called() + with client: + pass + close.assert_called() + +@pytest.mark.parametrize("client_class,transport_class", [ + (InterconnectRemoteLocationsClient, transports.InterconnectRemoteLocationsRestTransport), +]) +def test_api_key_credentials(client_class, transport_class): + with mock.patch.object( + google.auth._default, "get_api_key_credentials", create=True + ) as get_api_key_credentials: + mock_cred = mock.Mock() + get_api_key_credentials.return_value = mock_cred + options = client_options.ClientOptions() + options.api_key = "api_key" + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=mock_cred, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) diff --git a/owl-bot-staging/v1/tests/unit/gapic/compute_v1/test_interconnects.py b/owl-bot-staging/v1/tests/unit/gapic/compute_v1/test_interconnects.py new file mode 100644 index 000000000..ab7c84311 --- /dev/null +++ b/owl-bot-staging/v1/tests/unit/gapic/compute_v1/test_interconnects.py @@ -0,0 +1,3851 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import os +# try/except added for compatibility with python < 3.8 +try: + from unittest import mock + from unittest.mock import AsyncMock # pragma: NO COVER +except ImportError: # pragma: NO COVER + import mock + +import grpc +from grpc.experimental import aio +from collections.abc import Iterable +from google.protobuf import json_format +import json +import math +import pytest +from proto.marshal.rules.dates import DurationRule, TimestampRule +from proto.marshal.rules import wrappers +from requests import Response +from requests import Request, PreparedRequest +from requests.sessions import Session +from google.protobuf import json_format + +from google.api_core import client_options +from google.api_core import exceptions as core_exceptions +from google.api_core import extended_operation # type: ignore +from google.api_core import future +from google.api_core import gapic_v1 +from google.api_core import grpc_helpers +from google.api_core import grpc_helpers_async +from google.api_core import path_template +from google.auth import credentials as ga_credentials +from google.auth.exceptions import MutualTLSChannelError +from google.cloud.compute_v1.services.interconnects import InterconnectsClient +from google.cloud.compute_v1.services.interconnects import pagers +from google.cloud.compute_v1.services.interconnects import transports +from google.cloud.compute_v1.types import compute +from google.oauth2 import service_account +import google.auth + + +def client_cert_source_callback(): + return b"cert bytes", b"key bytes" + + +# If default endpoint is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint(client): + return "foo.googleapis.com" if ("localhost" in client.DEFAULT_ENDPOINT) else client.DEFAULT_ENDPOINT + + +def test__get_default_mtls_endpoint(): + api_endpoint = "example.googleapis.com" + api_mtls_endpoint = "example.mtls.googleapis.com" + sandbox_endpoint = "example.sandbox.googleapis.com" + sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com" + non_googleapi = "api.example.com" + + assert InterconnectsClient._get_default_mtls_endpoint(None) is None + assert InterconnectsClient._get_default_mtls_endpoint(api_endpoint) == api_mtls_endpoint + assert InterconnectsClient._get_default_mtls_endpoint(api_mtls_endpoint) == api_mtls_endpoint + assert InterconnectsClient._get_default_mtls_endpoint(sandbox_endpoint) == sandbox_mtls_endpoint + assert InterconnectsClient._get_default_mtls_endpoint(sandbox_mtls_endpoint) == sandbox_mtls_endpoint + assert InterconnectsClient._get_default_mtls_endpoint(non_googleapi) == non_googleapi + + +@pytest.mark.parametrize("client_class,transport_name", [ + (InterconnectsClient, "rest"), +]) +def test_interconnects_client_from_service_account_info(client_class, transport_name): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object(service_account.Credentials, 'from_service_account_info') as factory: + factory.return_value = creds + info = {"valid": True} + client = client_class.from_service_account_info(info, transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + 'compute.googleapis.com:443' + if transport_name in ['grpc', 'grpc_asyncio'] + else + 'https://compute.googleapis.com' + ) + + +@pytest.mark.parametrize("transport_class,transport_name", [ + (transports.InterconnectsRestTransport, "rest"), +]) +def test_interconnects_client_service_account_always_use_jwt(transport_class, transport_name): + with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=True) + use_jwt.assert_called_once_with(True) + + with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=False) + use_jwt.assert_not_called() + + +@pytest.mark.parametrize("client_class,transport_name", [ + (InterconnectsClient, "rest"), +]) +def test_interconnects_client_from_service_account_file(client_class, transport_name): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object(service_account.Credentials, 'from_service_account_file') as factory: + factory.return_value = creds + client = client_class.from_service_account_file("dummy/file/path.json", transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + client = client_class.from_service_account_json("dummy/file/path.json", transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + 'compute.googleapis.com:443' + if transport_name in ['grpc', 'grpc_asyncio'] + else + 'https://compute.googleapis.com' + ) + + +def test_interconnects_client_get_transport_class(): + transport = InterconnectsClient.get_transport_class() + available_transports = [ + transports.InterconnectsRestTransport, + ] + assert transport in available_transports + + transport = InterconnectsClient.get_transport_class("rest") + assert transport == transports.InterconnectsRestTransport + + +@pytest.mark.parametrize("client_class,transport_class,transport_name", [ + (InterconnectsClient, transports.InterconnectsRestTransport, "rest"), +]) +@mock.patch.object(InterconnectsClient, "DEFAULT_ENDPOINT", modify_default_endpoint(InterconnectsClient)) +def test_interconnects_client_client_options(client_class, transport_class, transport_name): + # Check that if channel is provided we won't create a new one. + with mock.patch.object(InterconnectsClient, 'get_transport_class') as gtc: + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ) + client = client_class(transport=transport) + gtc.assert_not_called() + + # Check that if channel is provided via str we will create a new one. + with mock.patch.object(InterconnectsClient, 'get_transport_class') as gtc: + client = client_class(transport=transport_name) + gtc.assert_called() + + # Check the case api_endpoint is provided. + options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(transport=transport_name, client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_MTLS_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError): + client = client_class(transport=transport_name) + + # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): + with pytest.raises(ValueError): + client = client_class(transport=transport_name) + + # Check the case quota_project_id is provided + options = client_options.ClientOptions(quota_project_id="octopus") + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id="octopus", + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + # Check the case api_endpoint is provided + options = client_options.ClientOptions(api_audience="https://language.googleapis.com") + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience="https://language.googleapis.com" + ) + +@pytest.mark.parametrize("client_class,transport_class,transport_name,use_client_cert_env", [ + (InterconnectsClient, transports.InterconnectsRestTransport, "rest", "true"), + (InterconnectsClient, transports.InterconnectsRestTransport, "rest", "false"), +]) +@mock.patch.object(InterconnectsClient, "DEFAULT_ENDPOINT", modify_default_endpoint(InterconnectsClient)) +@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) +def test_interconnects_client_mtls_env_auto(client_class, transport_class, transport_name, use_client_cert_env): + # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default + # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists. + + # Check the case client_cert_source is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): + options = client_options.ClientOptions(client_cert_source=client_cert_source_callback) + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + + if use_client_cert_env == "false": + expected_client_cert_source = None + expected_host = client.DEFAULT_ENDPOINT + else: + expected_client_cert_source = client_cert_source_callback + expected_host = client.DEFAULT_MTLS_ENDPOINT + + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case ADC client cert is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): + with mock.patch.object(transport_class, '__init__') as patched: + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): + with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=client_cert_source_callback): + if use_client_cert_env == "false": + expected_host = client.DEFAULT_ENDPOINT + expected_client_cert_source = None + else: + expected_host = client.DEFAULT_MTLS_ENDPOINT + expected_client_cert_source = client_cert_source_callback + + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case client_cert_source and ADC client cert are not provided. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): + with mock.patch.object(transport_class, '__init__') as patched: + with mock.patch("google.auth.transport.mtls.has_default_client_cert_source", return_value=False): + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize("client_class", [ + InterconnectsClient +]) +@mock.patch.object(InterconnectsClient, "DEFAULT_ENDPOINT", modify_default_endpoint(InterconnectsClient)) +def test_interconnects_client_get_mtls_endpoint_and_cert_source(client_class): + mock_client_cert_source = mock.Mock() + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "true". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + mock_api_endpoint = "foo" + options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(options) + assert api_endpoint == mock_api_endpoint + assert cert_source == mock_client_cert_source + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "false". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + mock_client_cert_source = mock.Mock() + mock_api_endpoint = "foo" + options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(options) + assert api_endpoint == mock_api_endpoint + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert doesn't exist. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=False): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert exists. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): + with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=mock_client_cert_source): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source == mock_client_cert_source + + +@pytest.mark.parametrize("client_class,transport_class,transport_name", [ + (InterconnectsClient, transports.InterconnectsRestTransport, "rest"), +]) +def test_interconnects_client_client_options_scopes(client_class, transport_class, transport_name): + # Check the case scopes are provided. + options = client_options.ClientOptions( + scopes=["1", "2"], + ) + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=["1", "2"], + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + +@pytest.mark.parametrize("client_class,transport_class,transport_name,grpc_helpers", [ + (InterconnectsClient, transports.InterconnectsRestTransport, "rest", None), +]) +def test_interconnects_client_client_options_credentials_file(client_class, transport_class, transport_name, grpc_helpers): + # Check the case credentials file is provided. + options = client_options.ClientOptions( + credentials_file="credentials.json" + ) + + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize("request_type", [ + compute.DeleteInterconnectRequest, + dict, +]) +def test_delete_rest(request_type): + client = InterconnectsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'interconnect': 'sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.delete(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, extended_operation.ExtendedOperation) + assert response.client_operation_id == 'client_operation_id_value' + assert response.creation_timestamp == 'creation_timestamp_value' + assert response.description == 'description_value' + assert response.end_time == 'end_time_value' + assert response.http_error_message == 'http_error_message_value' + assert response.http_error_status_code == 2374 + assert response.id == 205 + assert response.insert_time == 'insert_time_value' + assert response.kind == 'kind_value' + assert response.name == 'name_value' + assert response.operation_group_id == 'operation_group_id_value' + assert response.operation_type == 'operation_type_value' + assert response.progress == 885 + assert response.region == 'region_value' + assert response.self_link == 'self_link_value' + assert response.start_time == 'start_time_value' + assert response.status == compute.Operation.Status.DONE + assert response.status_message == 'status_message_value' + assert response.target_id == 947 + assert response.target_link == 'target_link_value' + assert response.user == 'user_value' + assert response.zone == 'zone_value' + + +def test_delete_rest_required_fields(request_type=compute.DeleteInterconnectRequest): + transport_class = transports.InterconnectsRestTransport + + request_init = {} + request_init["interconnect"] = "" + request_init["project"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["interconnect"] = 'interconnect_value' + jsonified_request["project"] = 'project_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "interconnect" in jsonified_request + assert jsonified_request["interconnect"] == 'interconnect_value' + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + + client = InterconnectsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "delete", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.delete(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_delete_rest_unset_required_fields(): + transport = transports.InterconnectsRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.delete._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("interconnect", "project", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_rest_interceptors(null_interceptor): + transport = transports.InterconnectsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.InterconnectsRestInterceptor(), + ) + client = InterconnectsClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.InterconnectsRestInterceptor, "post_delete") as post, \ + mock.patch.object(transports.InterconnectsRestInterceptor, "pre_delete") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.DeleteInterconnectRequest.pb(compute.DeleteInterconnectRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.DeleteInterconnectRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.delete(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_delete_rest_bad_request(transport: str = 'rest', request_type=compute.DeleteInterconnectRequest): + client = InterconnectsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'interconnect': 'sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.delete(request) + + +def test_delete_rest_flattened(): + client = InterconnectsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'interconnect': 'sample2'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + interconnect='interconnect_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.delete(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/global/interconnects/{interconnect}" % client.transport._host, args[1]) + + +def test_delete_rest_flattened_error(transport: str = 'rest'): + client = InterconnectsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete( + compute.DeleteInterconnectRequest(), + project='project_value', + interconnect='interconnect_value', + ) + + +def test_delete_rest_error(): + client = InterconnectsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.DeleteInterconnectRequest, + dict, +]) +def test_delete_unary_rest(request_type): + client = InterconnectsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'interconnect': 'sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.delete_unary(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.Operation) + + +def test_delete_unary_rest_required_fields(request_type=compute.DeleteInterconnectRequest): + transport_class = transports.InterconnectsRestTransport + + request_init = {} + request_init["interconnect"] = "" + request_init["project"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["interconnect"] = 'interconnect_value' + jsonified_request["project"] = 'project_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "interconnect" in jsonified_request + assert jsonified_request["interconnect"] == 'interconnect_value' + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + + client = InterconnectsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "delete", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.delete_unary(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_delete_unary_rest_unset_required_fields(): + transport = transports.InterconnectsRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.delete._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("interconnect", "project", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_unary_rest_interceptors(null_interceptor): + transport = transports.InterconnectsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.InterconnectsRestInterceptor(), + ) + client = InterconnectsClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.InterconnectsRestInterceptor, "post_delete") as post, \ + mock.patch.object(transports.InterconnectsRestInterceptor, "pre_delete") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.DeleteInterconnectRequest.pb(compute.DeleteInterconnectRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.DeleteInterconnectRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.delete_unary(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_delete_unary_rest_bad_request(transport: str = 'rest', request_type=compute.DeleteInterconnectRequest): + client = InterconnectsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'interconnect': 'sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.delete_unary(request) + + +def test_delete_unary_rest_flattened(): + client = InterconnectsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'interconnect': 'sample2'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + interconnect='interconnect_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.delete_unary(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/global/interconnects/{interconnect}" % client.transport._host, args[1]) + + +def test_delete_unary_rest_flattened_error(transport: str = 'rest'): + client = InterconnectsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_unary( + compute.DeleteInterconnectRequest(), + project='project_value', + interconnect='interconnect_value', + ) + + +def test_delete_unary_rest_error(): + client = InterconnectsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.GetInterconnectRequest, + dict, +]) +def test_get_rest(request_type): + client = InterconnectsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'interconnect': 'sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Interconnect( + admin_enabled=True, + creation_timestamp='creation_timestamp_value', + customer_name='customer_name_value', + description='description_value', + google_ip_address='google_ip_address_value', + google_reference_id='google_reference_id_value', + id=205, + interconnect_attachments=['interconnect_attachments_value'], + interconnect_type='interconnect_type_value', + kind='kind_value', + label_fingerprint='label_fingerprint_value', + link_type='link_type_value', + location='location_value', + name='name_value', + noc_contact_email='noc_contact_email_value', + operational_status='operational_status_value', + peer_ip_address='peer_ip_address_value', + provisioned_link_count=2375, + remote_location='remote_location_value', + requested_link_count=2151, + satisfies_pzs=True, + self_link='self_link_value', + state='state_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Interconnect.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.get(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.Interconnect) + assert response.admin_enabled is True + assert response.creation_timestamp == 'creation_timestamp_value' + assert response.customer_name == 'customer_name_value' + assert response.description == 'description_value' + assert response.google_ip_address == 'google_ip_address_value' + assert response.google_reference_id == 'google_reference_id_value' + assert response.id == 205 + assert response.interconnect_attachments == ['interconnect_attachments_value'] + assert response.interconnect_type == 'interconnect_type_value' + assert response.kind == 'kind_value' + assert response.label_fingerprint == 'label_fingerprint_value' + assert response.link_type == 'link_type_value' + assert response.location == 'location_value' + assert response.name == 'name_value' + assert response.noc_contact_email == 'noc_contact_email_value' + assert response.operational_status == 'operational_status_value' + assert response.peer_ip_address == 'peer_ip_address_value' + assert response.provisioned_link_count == 2375 + assert response.remote_location == 'remote_location_value' + assert response.requested_link_count == 2151 + assert response.satisfies_pzs is True + assert response.self_link == 'self_link_value' + assert response.state == 'state_value' + + +def test_get_rest_required_fields(request_type=compute.GetInterconnectRequest): + transport_class = transports.InterconnectsRestTransport + + request_init = {} + request_init["interconnect"] = "" + request_init["project"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["interconnect"] = 'interconnect_value' + jsonified_request["project"] = 'project_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "interconnect" in jsonified_request + assert jsonified_request["interconnect"] == 'interconnect_value' + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + + client = InterconnectsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Interconnect() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "get", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Interconnect.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.get(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_get_rest_unset_required_fields(): + transport = transports.InterconnectsRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.get._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("interconnect", "project", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_rest_interceptors(null_interceptor): + transport = transports.InterconnectsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.InterconnectsRestInterceptor(), + ) + client = InterconnectsClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.InterconnectsRestInterceptor, "post_get") as post, \ + mock.patch.object(transports.InterconnectsRestInterceptor, "pre_get") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.GetInterconnectRequest.pb(compute.GetInterconnectRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Interconnect.to_json(compute.Interconnect()) + + request = compute.GetInterconnectRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Interconnect() + + client.get(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_get_rest_bad_request(transport: str = 'rest', request_type=compute.GetInterconnectRequest): + client = InterconnectsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'interconnect': 'sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get(request) + + +def test_get_rest_flattened(): + client = InterconnectsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Interconnect() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'interconnect': 'sample2'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + interconnect='interconnect_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Interconnect.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.get(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/global/interconnects/{interconnect}" % client.transport._host, args[1]) + + +def test_get_rest_flattened_error(transport: str = 'rest'): + client = InterconnectsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get( + compute.GetInterconnectRequest(), + project='project_value', + interconnect='interconnect_value', + ) + + +def test_get_rest_error(): + client = InterconnectsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.GetDiagnosticsInterconnectRequest, + dict, +]) +def test_get_diagnostics_rest(request_type): + client = InterconnectsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'interconnect': 'sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.InterconnectsGetDiagnosticsResponse( + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.InterconnectsGetDiagnosticsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.get_diagnostics(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.InterconnectsGetDiagnosticsResponse) + + +def test_get_diagnostics_rest_required_fields(request_type=compute.GetDiagnosticsInterconnectRequest): + transport_class = transports.InterconnectsRestTransport + + request_init = {} + request_init["interconnect"] = "" + request_init["project"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_diagnostics._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["interconnect"] = 'interconnect_value' + jsonified_request["project"] = 'project_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_diagnostics._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "interconnect" in jsonified_request + assert jsonified_request["interconnect"] == 'interconnect_value' + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + + client = InterconnectsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.InterconnectsGetDiagnosticsResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "get", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.InterconnectsGetDiagnosticsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.get_diagnostics(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_get_diagnostics_rest_unset_required_fields(): + transport = transports.InterconnectsRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.get_diagnostics._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("interconnect", "project", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_diagnostics_rest_interceptors(null_interceptor): + transport = transports.InterconnectsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.InterconnectsRestInterceptor(), + ) + client = InterconnectsClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.InterconnectsRestInterceptor, "post_get_diagnostics") as post, \ + mock.patch.object(transports.InterconnectsRestInterceptor, "pre_get_diagnostics") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.GetDiagnosticsInterconnectRequest.pb(compute.GetDiagnosticsInterconnectRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.InterconnectsGetDiagnosticsResponse.to_json(compute.InterconnectsGetDiagnosticsResponse()) + + request = compute.GetDiagnosticsInterconnectRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.InterconnectsGetDiagnosticsResponse() + + client.get_diagnostics(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_get_diagnostics_rest_bad_request(transport: str = 'rest', request_type=compute.GetDiagnosticsInterconnectRequest): + client = InterconnectsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'interconnect': 'sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_diagnostics(request) + + +def test_get_diagnostics_rest_flattened(): + client = InterconnectsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.InterconnectsGetDiagnosticsResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'interconnect': 'sample2'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + interconnect='interconnect_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.InterconnectsGetDiagnosticsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.get_diagnostics(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/global/interconnects/{interconnect}/getDiagnostics" % client.transport._host, args[1]) + + +def test_get_diagnostics_rest_flattened_error(transport: str = 'rest'): + client = InterconnectsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_diagnostics( + compute.GetDiagnosticsInterconnectRequest(), + project='project_value', + interconnect='interconnect_value', + ) + + +def test_get_diagnostics_rest_error(): + client = InterconnectsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.InsertInterconnectRequest, + dict, +]) +def test_insert_rest(request_type): + client = InterconnectsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1'} + request_init["interconnect_resource"] = {'admin_enabled': True, 'circuit_infos': [{'customer_demarc_id': 'customer_demarc_id_value', 'google_circuit_id': 'google_circuit_id_value', 'google_demarc_id': 'google_demarc_id_value'}], 'creation_timestamp': 'creation_timestamp_value', 'customer_name': 'customer_name_value', 'description': 'description_value', 'expected_outages': [{'affected_circuits': ['affected_circuits_value1', 'affected_circuits_value2'], 'description': 'description_value', 'end_time': 837, 'issue_type': 'issue_type_value', 'name': 'name_value', 'source': 'source_value', 'start_time': 1084, 'state': 'state_value'}], 'google_ip_address': 'google_ip_address_value', 'google_reference_id': 'google_reference_id_value', 'id': 205, 'interconnect_attachments': ['interconnect_attachments_value1', 'interconnect_attachments_value2'], 'interconnect_type': 'interconnect_type_value', 'kind': 'kind_value', 'label_fingerprint': 'label_fingerprint_value', 'labels': {}, 'link_type': 'link_type_value', 'location': 'location_value', 'name': 'name_value', 'noc_contact_email': 'noc_contact_email_value', 'operational_status': 'operational_status_value', 'peer_ip_address': 'peer_ip_address_value', 'provisioned_link_count': 2375, 'remote_location': 'remote_location_value', 'requested_link_count': 2151, 'satisfies_pzs': True, 'self_link': 'self_link_value', 'state': 'state_value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.insert(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, extended_operation.ExtendedOperation) + assert response.client_operation_id == 'client_operation_id_value' + assert response.creation_timestamp == 'creation_timestamp_value' + assert response.description == 'description_value' + assert response.end_time == 'end_time_value' + assert response.http_error_message == 'http_error_message_value' + assert response.http_error_status_code == 2374 + assert response.id == 205 + assert response.insert_time == 'insert_time_value' + assert response.kind == 'kind_value' + assert response.name == 'name_value' + assert response.operation_group_id == 'operation_group_id_value' + assert response.operation_type == 'operation_type_value' + assert response.progress == 885 + assert response.region == 'region_value' + assert response.self_link == 'self_link_value' + assert response.start_time == 'start_time_value' + assert response.status == compute.Operation.Status.DONE + assert response.status_message == 'status_message_value' + assert response.target_id == 947 + assert response.target_link == 'target_link_value' + assert response.user == 'user_value' + assert response.zone == 'zone_value' + + +def test_insert_rest_required_fields(request_type=compute.InsertInterconnectRequest): + transport_class = transports.InterconnectsRestTransport + + request_init = {} + request_init["project"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).insert._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).insert._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + + client = InterconnectsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.insert(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_insert_rest_unset_required_fields(): + transport = transports.InterconnectsRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.insert._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("interconnectResource", "project", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_insert_rest_interceptors(null_interceptor): + transport = transports.InterconnectsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.InterconnectsRestInterceptor(), + ) + client = InterconnectsClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.InterconnectsRestInterceptor, "post_insert") as post, \ + mock.patch.object(transports.InterconnectsRestInterceptor, "pre_insert") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.InsertInterconnectRequest.pb(compute.InsertInterconnectRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.InsertInterconnectRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.insert(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_insert_rest_bad_request(transport: str = 'rest', request_type=compute.InsertInterconnectRequest): + client = InterconnectsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1'} + request_init["interconnect_resource"] = {'admin_enabled': True, 'circuit_infos': [{'customer_demarc_id': 'customer_demarc_id_value', 'google_circuit_id': 'google_circuit_id_value', 'google_demarc_id': 'google_demarc_id_value'}], 'creation_timestamp': 'creation_timestamp_value', 'customer_name': 'customer_name_value', 'description': 'description_value', 'expected_outages': [{'affected_circuits': ['affected_circuits_value1', 'affected_circuits_value2'], 'description': 'description_value', 'end_time': 837, 'issue_type': 'issue_type_value', 'name': 'name_value', 'source': 'source_value', 'start_time': 1084, 'state': 'state_value'}], 'google_ip_address': 'google_ip_address_value', 'google_reference_id': 'google_reference_id_value', 'id': 205, 'interconnect_attachments': ['interconnect_attachments_value1', 'interconnect_attachments_value2'], 'interconnect_type': 'interconnect_type_value', 'kind': 'kind_value', 'label_fingerprint': 'label_fingerprint_value', 'labels': {}, 'link_type': 'link_type_value', 'location': 'location_value', 'name': 'name_value', 'noc_contact_email': 'noc_contact_email_value', 'operational_status': 'operational_status_value', 'peer_ip_address': 'peer_ip_address_value', 'provisioned_link_count': 2375, 'remote_location': 'remote_location_value', 'requested_link_count': 2151, 'satisfies_pzs': True, 'self_link': 'self_link_value', 'state': 'state_value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.insert(request) + + +def test_insert_rest_flattened(): + client = InterconnectsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + interconnect_resource=compute.Interconnect(admin_enabled=True), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.insert(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/global/interconnects" % client.transport._host, args[1]) + + +def test_insert_rest_flattened_error(transport: str = 'rest'): + client = InterconnectsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.insert( + compute.InsertInterconnectRequest(), + project='project_value', + interconnect_resource=compute.Interconnect(admin_enabled=True), + ) + + +def test_insert_rest_error(): + client = InterconnectsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.InsertInterconnectRequest, + dict, +]) +def test_insert_unary_rest(request_type): + client = InterconnectsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1'} + request_init["interconnect_resource"] = {'admin_enabled': True, 'circuit_infos': [{'customer_demarc_id': 'customer_demarc_id_value', 'google_circuit_id': 'google_circuit_id_value', 'google_demarc_id': 'google_demarc_id_value'}], 'creation_timestamp': 'creation_timestamp_value', 'customer_name': 'customer_name_value', 'description': 'description_value', 'expected_outages': [{'affected_circuits': ['affected_circuits_value1', 'affected_circuits_value2'], 'description': 'description_value', 'end_time': 837, 'issue_type': 'issue_type_value', 'name': 'name_value', 'source': 'source_value', 'start_time': 1084, 'state': 'state_value'}], 'google_ip_address': 'google_ip_address_value', 'google_reference_id': 'google_reference_id_value', 'id': 205, 'interconnect_attachments': ['interconnect_attachments_value1', 'interconnect_attachments_value2'], 'interconnect_type': 'interconnect_type_value', 'kind': 'kind_value', 'label_fingerprint': 'label_fingerprint_value', 'labels': {}, 'link_type': 'link_type_value', 'location': 'location_value', 'name': 'name_value', 'noc_contact_email': 'noc_contact_email_value', 'operational_status': 'operational_status_value', 'peer_ip_address': 'peer_ip_address_value', 'provisioned_link_count': 2375, 'remote_location': 'remote_location_value', 'requested_link_count': 2151, 'satisfies_pzs': True, 'self_link': 'self_link_value', 'state': 'state_value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.insert_unary(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.Operation) + + +def test_insert_unary_rest_required_fields(request_type=compute.InsertInterconnectRequest): + transport_class = transports.InterconnectsRestTransport + + request_init = {} + request_init["project"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).insert._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).insert._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + + client = InterconnectsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.insert_unary(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_insert_unary_rest_unset_required_fields(): + transport = transports.InterconnectsRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.insert._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("interconnectResource", "project", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_insert_unary_rest_interceptors(null_interceptor): + transport = transports.InterconnectsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.InterconnectsRestInterceptor(), + ) + client = InterconnectsClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.InterconnectsRestInterceptor, "post_insert") as post, \ + mock.patch.object(transports.InterconnectsRestInterceptor, "pre_insert") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.InsertInterconnectRequest.pb(compute.InsertInterconnectRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.InsertInterconnectRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.insert_unary(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_insert_unary_rest_bad_request(transport: str = 'rest', request_type=compute.InsertInterconnectRequest): + client = InterconnectsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1'} + request_init["interconnect_resource"] = {'admin_enabled': True, 'circuit_infos': [{'customer_demarc_id': 'customer_demarc_id_value', 'google_circuit_id': 'google_circuit_id_value', 'google_demarc_id': 'google_demarc_id_value'}], 'creation_timestamp': 'creation_timestamp_value', 'customer_name': 'customer_name_value', 'description': 'description_value', 'expected_outages': [{'affected_circuits': ['affected_circuits_value1', 'affected_circuits_value2'], 'description': 'description_value', 'end_time': 837, 'issue_type': 'issue_type_value', 'name': 'name_value', 'source': 'source_value', 'start_time': 1084, 'state': 'state_value'}], 'google_ip_address': 'google_ip_address_value', 'google_reference_id': 'google_reference_id_value', 'id': 205, 'interconnect_attachments': ['interconnect_attachments_value1', 'interconnect_attachments_value2'], 'interconnect_type': 'interconnect_type_value', 'kind': 'kind_value', 'label_fingerprint': 'label_fingerprint_value', 'labels': {}, 'link_type': 'link_type_value', 'location': 'location_value', 'name': 'name_value', 'noc_contact_email': 'noc_contact_email_value', 'operational_status': 'operational_status_value', 'peer_ip_address': 'peer_ip_address_value', 'provisioned_link_count': 2375, 'remote_location': 'remote_location_value', 'requested_link_count': 2151, 'satisfies_pzs': True, 'self_link': 'self_link_value', 'state': 'state_value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.insert_unary(request) + + +def test_insert_unary_rest_flattened(): + client = InterconnectsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + interconnect_resource=compute.Interconnect(admin_enabled=True), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.insert_unary(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/global/interconnects" % client.transport._host, args[1]) + + +def test_insert_unary_rest_flattened_error(transport: str = 'rest'): + client = InterconnectsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.insert_unary( + compute.InsertInterconnectRequest(), + project='project_value', + interconnect_resource=compute.Interconnect(admin_enabled=True), + ) + + +def test_insert_unary_rest_error(): + client = InterconnectsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.ListInterconnectsRequest, + dict, +]) +def test_list_rest(request_type): + client = InterconnectsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.InterconnectList( + id='id_value', + kind='kind_value', + next_page_token='next_page_token_value', + self_link='self_link_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.InterconnectList.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.list(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListPager) + assert response.id == 'id_value' + assert response.kind == 'kind_value' + assert response.next_page_token == 'next_page_token_value' + assert response.self_link == 'self_link_value' + + +def test_list_rest_required_fields(request_type=compute.ListInterconnectsRequest): + transport_class = transports.InterconnectsRestTransport + + request_init = {} + request_init["project"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("filter", "max_results", "order_by", "page_token", "return_partial_success", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + + client = InterconnectsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.InterconnectList() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "get", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.InterconnectList.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.list(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_list_rest_unset_required_fields(): + transport = transports.InterconnectsRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.list._get_unset_required_fields({}) + assert set(unset_fields) == (set(("filter", "maxResults", "orderBy", "pageToken", "returnPartialSuccess", )) & set(("project", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_rest_interceptors(null_interceptor): + transport = transports.InterconnectsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.InterconnectsRestInterceptor(), + ) + client = InterconnectsClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.InterconnectsRestInterceptor, "post_list") as post, \ + mock.patch.object(transports.InterconnectsRestInterceptor, "pre_list") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.ListInterconnectsRequest.pb(compute.ListInterconnectsRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.InterconnectList.to_json(compute.InterconnectList()) + + request = compute.ListInterconnectsRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.InterconnectList() + + client.list(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_list_rest_bad_request(transport: str = 'rest', request_type=compute.ListInterconnectsRequest): + client = InterconnectsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list(request) + + +def test_list_rest_flattened(): + client = InterconnectsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.InterconnectList() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.InterconnectList.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.list(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/global/interconnects" % client.transport._host, args[1]) + + +def test_list_rest_flattened_error(transport: str = 'rest'): + client = InterconnectsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list( + compute.ListInterconnectsRequest(), + project='project_value', + ) + + +def test_list_rest_pager(transport: str = 'rest'): + client = InterconnectsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + #with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + compute.InterconnectList( + items=[ + compute.Interconnect(), + compute.Interconnect(), + compute.Interconnect(), + ], + next_page_token='abc', + ), + compute.InterconnectList( + items=[], + next_page_token='def', + ), + compute.InterconnectList( + items=[ + compute.Interconnect(), + ], + next_page_token='ghi', + ), + compute.InterconnectList( + items=[ + compute.Interconnect(), + compute.Interconnect(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple(compute.InterconnectList.to_json(x) for x in response) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode('UTF-8') + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {'project': 'sample1'} + + pager = client.list(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, compute.Interconnect) + for i in results) + + pages = list(client.list(request=sample_request).pages) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize("request_type", [ + compute.PatchInterconnectRequest, + dict, +]) +def test_patch_rest(request_type): + client = InterconnectsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'interconnect': 'sample2'} + request_init["interconnect_resource"] = {'admin_enabled': True, 'circuit_infos': [{'customer_demarc_id': 'customer_demarc_id_value', 'google_circuit_id': 'google_circuit_id_value', 'google_demarc_id': 'google_demarc_id_value'}], 'creation_timestamp': 'creation_timestamp_value', 'customer_name': 'customer_name_value', 'description': 'description_value', 'expected_outages': [{'affected_circuits': ['affected_circuits_value1', 'affected_circuits_value2'], 'description': 'description_value', 'end_time': 837, 'issue_type': 'issue_type_value', 'name': 'name_value', 'source': 'source_value', 'start_time': 1084, 'state': 'state_value'}], 'google_ip_address': 'google_ip_address_value', 'google_reference_id': 'google_reference_id_value', 'id': 205, 'interconnect_attachments': ['interconnect_attachments_value1', 'interconnect_attachments_value2'], 'interconnect_type': 'interconnect_type_value', 'kind': 'kind_value', 'label_fingerprint': 'label_fingerprint_value', 'labels': {}, 'link_type': 'link_type_value', 'location': 'location_value', 'name': 'name_value', 'noc_contact_email': 'noc_contact_email_value', 'operational_status': 'operational_status_value', 'peer_ip_address': 'peer_ip_address_value', 'provisioned_link_count': 2375, 'remote_location': 'remote_location_value', 'requested_link_count': 2151, 'satisfies_pzs': True, 'self_link': 'self_link_value', 'state': 'state_value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.patch(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, extended_operation.ExtendedOperation) + assert response.client_operation_id == 'client_operation_id_value' + assert response.creation_timestamp == 'creation_timestamp_value' + assert response.description == 'description_value' + assert response.end_time == 'end_time_value' + assert response.http_error_message == 'http_error_message_value' + assert response.http_error_status_code == 2374 + assert response.id == 205 + assert response.insert_time == 'insert_time_value' + assert response.kind == 'kind_value' + assert response.name == 'name_value' + assert response.operation_group_id == 'operation_group_id_value' + assert response.operation_type == 'operation_type_value' + assert response.progress == 885 + assert response.region == 'region_value' + assert response.self_link == 'self_link_value' + assert response.start_time == 'start_time_value' + assert response.status == compute.Operation.Status.DONE + assert response.status_message == 'status_message_value' + assert response.target_id == 947 + assert response.target_link == 'target_link_value' + assert response.user == 'user_value' + assert response.zone == 'zone_value' + + +def test_patch_rest_required_fields(request_type=compute.PatchInterconnectRequest): + transport_class = transports.InterconnectsRestTransport + + request_init = {} + request_init["interconnect"] = "" + request_init["project"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).patch._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["interconnect"] = 'interconnect_value' + jsonified_request["project"] = 'project_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).patch._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "interconnect" in jsonified_request + assert jsonified_request["interconnect"] == 'interconnect_value' + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + + client = InterconnectsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "patch", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.patch(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_patch_rest_unset_required_fields(): + transport = transports.InterconnectsRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.patch._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("interconnect", "interconnectResource", "project", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_patch_rest_interceptors(null_interceptor): + transport = transports.InterconnectsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.InterconnectsRestInterceptor(), + ) + client = InterconnectsClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.InterconnectsRestInterceptor, "post_patch") as post, \ + mock.patch.object(transports.InterconnectsRestInterceptor, "pre_patch") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.PatchInterconnectRequest.pb(compute.PatchInterconnectRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.PatchInterconnectRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.patch(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_patch_rest_bad_request(transport: str = 'rest', request_type=compute.PatchInterconnectRequest): + client = InterconnectsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'interconnect': 'sample2'} + request_init["interconnect_resource"] = {'admin_enabled': True, 'circuit_infos': [{'customer_demarc_id': 'customer_demarc_id_value', 'google_circuit_id': 'google_circuit_id_value', 'google_demarc_id': 'google_demarc_id_value'}], 'creation_timestamp': 'creation_timestamp_value', 'customer_name': 'customer_name_value', 'description': 'description_value', 'expected_outages': [{'affected_circuits': ['affected_circuits_value1', 'affected_circuits_value2'], 'description': 'description_value', 'end_time': 837, 'issue_type': 'issue_type_value', 'name': 'name_value', 'source': 'source_value', 'start_time': 1084, 'state': 'state_value'}], 'google_ip_address': 'google_ip_address_value', 'google_reference_id': 'google_reference_id_value', 'id': 205, 'interconnect_attachments': ['interconnect_attachments_value1', 'interconnect_attachments_value2'], 'interconnect_type': 'interconnect_type_value', 'kind': 'kind_value', 'label_fingerprint': 'label_fingerprint_value', 'labels': {}, 'link_type': 'link_type_value', 'location': 'location_value', 'name': 'name_value', 'noc_contact_email': 'noc_contact_email_value', 'operational_status': 'operational_status_value', 'peer_ip_address': 'peer_ip_address_value', 'provisioned_link_count': 2375, 'remote_location': 'remote_location_value', 'requested_link_count': 2151, 'satisfies_pzs': True, 'self_link': 'self_link_value', 'state': 'state_value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.patch(request) + + +def test_patch_rest_flattened(): + client = InterconnectsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'interconnect': 'sample2'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + interconnect='interconnect_value', + interconnect_resource=compute.Interconnect(admin_enabled=True), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.patch(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/global/interconnects/{interconnect}" % client.transport._host, args[1]) + + +def test_patch_rest_flattened_error(transport: str = 'rest'): + client = InterconnectsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.patch( + compute.PatchInterconnectRequest(), + project='project_value', + interconnect='interconnect_value', + interconnect_resource=compute.Interconnect(admin_enabled=True), + ) + + +def test_patch_rest_error(): + client = InterconnectsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.PatchInterconnectRequest, + dict, +]) +def test_patch_unary_rest(request_type): + client = InterconnectsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'interconnect': 'sample2'} + request_init["interconnect_resource"] = {'admin_enabled': True, 'circuit_infos': [{'customer_demarc_id': 'customer_demarc_id_value', 'google_circuit_id': 'google_circuit_id_value', 'google_demarc_id': 'google_demarc_id_value'}], 'creation_timestamp': 'creation_timestamp_value', 'customer_name': 'customer_name_value', 'description': 'description_value', 'expected_outages': [{'affected_circuits': ['affected_circuits_value1', 'affected_circuits_value2'], 'description': 'description_value', 'end_time': 837, 'issue_type': 'issue_type_value', 'name': 'name_value', 'source': 'source_value', 'start_time': 1084, 'state': 'state_value'}], 'google_ip_address': 'google_ip_address_value', 'google_reference_id': 'google_reference_id_value', 'id': 205, 'interconnect_attachments': ['interconnect_attachments_value1', 'interconnect_attachments_value2'], 'interconnect_type': 'interconnect_type_value', 'kind': 'kind_value', 'label_fingerprint': 'label_fingerprint_value', 'labels': {}, 'link_type': 'link_type_value', 'location': 'location_value', 'name': 'name_value', 'noc_contact_email': 'noc_contact_email_value', 'operational_status': 'operational_status_value', 'peer_ip_address': 'peer_ip_address_value', 'provisioned_link_count': 2375, 'remote_location': 'remote_location_value', 'requested_link_count': 2151, 'satisfies_pzs': True, 'self_link': 'self_link_value', 'state': 'state_value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.patch_unary(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.Operation) + + +def test_patch_unary_rest_required_fields(request_type=compute.PatchInterconnectRequest): + transport_class = transports.InterconnectsRestTransport + + request_init = {} + request_init["interconnect"] = "" + request_init["project"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).patch._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["interconnect"] = 'interconnect_value' + jsonified_request["project"] = 'project_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).patch._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "interconnect" in jsonified_request + assert jsonified_request["interconnect"] == 'interconnect_value' + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + + client = InterconnectsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "patch", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.patch_unary(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_patch_unary_rest_unset_required_fields(): + transport = transports.InterconnectsRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.patch._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("interconnect", "interconnectResource", "project", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_patch_unary_rest_interceptors(null_interceptor): + transport = transports.InterconnectsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.InterconnectsRestInterceptor(), + ) + client = InterconnectsClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.InterconnectsRestInterceptor, "post_patch") as post, \ + mock.patch.object(transports.InterconnectsRestInterceptor, "pre_patch") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.PatchInterconnectRequest.pb(compute.PatchInterconnectRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.PatchInterconnectRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.patch_unary(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_patch_unary_rest_bad_request(transport: str = 'rest', request_type=compute.PatchInterconnectRequest): + client = InterconnectsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'interconnect': 'sample2'} + request_init["interconnect_resource"] = {'admin_enabled': True, 'circuit_infos': [{'customer_demarc_id': 'customer_demarc_id_value', 'google_circuit_id': 'google_circuit_id_value', 'google_demarc_id': 'google_demarc_id_value'}], 'creation_timestamp': 'creation_timestamp_value', 'customer_name': 'customer_name_value', 'description': 'description_value', 'expected_outages': [{'affected_circuits': ['affected_circuits_value1', 'affected_circuits_value2'], 'description': 'description_value', 'end_time': 837, 'issue_type': 'issue_type_value', 'name': 'name_value', 'source': 'source_value', 'start_time': 1084, 'state': 'state_value'}], 'google_ip_address': 'google_ip_address_value', 'google_reference_id': 'google_reference_id_value', 'id': 205, 'interconnect_attachments': ['interconnect_attachments_value1', 'interconnect_attachments_value2'], 'interconnect_type': 'interconnect_type_value', 'kind': 'kind_value', 'label_fingerprint': 'label_fingerprint_value', 'labels': {}, 'link_type': 'link_type_value', 'location': 'location_value', 'name': 'name_value', 'noc_contact_email': 'noc_contact_email_value', 'operational_status': 'operational_status_value', 'peer_ip_address': 'peer_ip_address_value', 'provisioned_link_count': 2375, 'remote_location': 'remote_location_value', 'requested_link_count': 2151, 'satisfies_pzs': True, 'self_link': 'self_link_value', 'state': 'state_value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.patch_unary(request) + + +def test_patch_unary_rest_flattened(): + client = InterconnectsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'interconnect': 'sample2'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + interconnect='interconnect_value', + interconnect_resource=compute.Interconnect(admin_enabled=True), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.patch_unary(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/global/interconnects/{interconnect}" % client.transport._host, args[1]) + + +def test_patch_unary_rest_flattened_error(transport: str = 'rest'): + client = InterconnectsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.patch_unary( + compute.PatchInterconnectRequest(), + project='project_value', + interconnect='interconnect_value', + interconnect_resource=compute.Interconnect(admin_enabled=True), + ) + + +def test_patch_unary_rest_error(): + client = InterconnectsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.SetLabelsInterconnectRequest, + dict, +]) +def test_set_labels_rest(request_type): + client = InterconnectsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'resource': 'sample2'} + request_init["global_set_labels_request_resource"] = {'label_fingerprint': 'label_fingerprint_value', 'labels': {}} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.set_labels(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, extended_operation.ExtendedOperation) + assert response.client_operation_id == 'client_operation_id_value' + assert response.creation_timestamp == 'creation_timestamp_value' + assert response.description == 'description_value' + assert response.end_time == 'end_time_value' + assert response.http_error_message == 'http_error_message_value' + assert response.http_error_status_code == 2374 + assert response.id == 205 + assert response.insert_time == 'insert_time_value' + assert response.kind == 'kind_value' + assert response.name == 'name_value' + assert response.operation_group_id == 'operation_group_id_value' + assert response.operation_type == 'operation_type_value' + assert response.progress == 885 + assert response.region == 'region_value' + assert response.self_link == 'self_link_value' + assert response.start_time == 'start_time_value' + assert response.status == compute.Operation.Status.DONE + assert response.status_message == 'status_message_value' + assert response.target_id == 947 + assert response.target_link == 'target_link_value' + assert response.user == 'user_value' + assert response.zone == 'zone_value' + + +def test_set_labels_rest_required_fields(request_type=compute.SetLabelsInterconnectRequest): + transport_class = transports.InterconnectsRestTransport + + request_init = {} + request_init["project"] = "" + request_init["resource"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).set_labels._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + jsonified_request["resource"] = 'resource_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).set_labels._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "resource" in jsonified_request + assert jsonified_request["resource"] == 'resource_value' + + client = InterconnectsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.set_labels(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_set_labels_rest_unset_required_fields(): + transport = transports.InterconnectsRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.set_labels._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("globalSetLabelsRequestResource", "project", "resource", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_set_labels_rest_interceptors(null_interceptor): + transport = transports.InterconnectsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.InterconnectsRestInterceptor(), + ) + client = InterconnectsClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.InterconnectsRestInterceptor, "post_set_labels") as post, \ + mock.patch.object(transports.InterconnectsRestInterceptor, "pre_set_labels") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.SetLabelsInterconnectRequest.pb(compute.SetLabelsInterconnectRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.SetLabelsInterconnectRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.set_labels(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_set_labels_rest_bad_request(transport: str = 'rest', request_type=compute.SetLabelsInterconnectRequest): + client = InterconnectsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'resource': 'sample2'} + request_init["global_set_labels_request_resource"] = {'label_fingerprint': 'label_fingerprint_value', 'labels': {}} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.set_labels(request) + + +def test_set_labels_rest_flattened(): + client = InterconnectsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'resource': 'sample2'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + resource='resource_value', + global_set_labels_request_resource=compute.GlobalSetLabelsRequest(label_fingerprint='label_fingerprint_value'), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.set_labels(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/global/interconnects/{resource}/setLabels" % client.transport._host, args[1]) + + +def test_set_labels_rest_flattened_error(transport: str = 'rest'): + client = InterconnectsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.set_labels( + compute.SetLabelsInterconnectRequest(), + project='project_value', + resource='resource_value', + global_set_labels_request_resource=compute.GlobalSetLabelsRequest(label_fingerprint='label_fingerprint_value'), + ) + + +def test_set_labels_rest_error(): + client = InterconnectsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.SetLabelsInterconnectRequest, + dict, +]) +def test_set_labels_unary_rest(request_type): + client = InterconnectsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'resource': 'sample2'} + request_init["global_set_labels_request_resource"] = {'label_fingerprint': 'label_fingerprint_value', 'labels': {}} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.set_labels_unary(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.Operation) + + +def test_set_labels_unary_rest_required_fields(request_type=compute.SetLabelsInterconnectRequest): + transport_class = transports.InterconnectsRestTransport + + request_init = {} + request_init["project"] = "" + request_init["resource"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).set_labels._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + jsonified_request["resource"] = 'resource_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).set_labels._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "resource" in jsonified_request + assert jsonified_request["resource"] == 'resource_value' + + client = InterconnectsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.set_labels_unary(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_set_labels_unary_rest_unset_required_fields(): + transport = transports.InterconnectsRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.set_labels._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("globalSetLabelsRequestResource", "project", "resource", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_set_labels_unary_rest_interceptors(null_interceptor): + transport = transports.InterconnectsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.InterconnectsRestInterceptor(), + ) + client = InterconnectsClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.InterconnectsRestInterceptor, "post_set_labels") as post, \ + mock.patch.object(transports.InterconnectsRestInterceptor, "pre_set_labels") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.SetLabelsInterconnectRequest.pb(compute.SetLabelsInterconnectRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.SetLabelsInterconnectRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.set_labels_unary(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_set_labels_unary_rest_bad_request(transport: str = 'rest', request_type=compute.SetLabelsInterconnectRequest): + client = InterconnectsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'resource': 'sample2'} + request_init["global_set_labels_request_resource"] = {'label_fingerprint': 'label_fingerprint_value', 'labels': {}} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.set_labels_unary(request) + + +def test_set_labels_unary_rest_flattened(): + client = InterconnectsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'resource': 'sample2'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + resource='resource_value', + global_set_labels_request_resource=compute.GlobalSetLabelsRequest(label_fingerprint='label_fingerprint_value'), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.set_labels_unary(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/global/interconnects/{resource}/setLabels" % client.transport._host, args[1]) + + +def test_set_labels_unary_rest_flattened_error(transport: str = 'rest'): + client = InterconnectsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.set_labels_unary( + compute.SetLabelsInterconnectRequest(), + project='project_value', + resource='resource_value', + global_set_labels_request_resource=compute.GlobalSetLabelsRequest(label_fingerprint='label_fingerprint_value'), + ) + + +def test_set_labels_unary_rest_error(): + client = InterconnectsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +def test_credentials_transport_error(): + # It is an error to provide credentials and a transport instance. + transport = transports.InterconnectsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = InterconnectsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # It is an error to provide a credentials file and a transport instance. + transport = transports.InterconnectsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = InterconnectsClient( + client_options={"credentials_file": "credentials.json"}, + transport=transport, + ) + + # It is an error to provide an api_key and a transport instance. + transport = transports.InterconnectsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = InterconnectsClient( + client_options=options, + transport=transport, + ) + + # It is an error to provide an api_key and a credential. + options = mock.Mock() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = InterconnectsClient( + client_options=options, + credentials=ga_credentials.AnonymousCredentials() + ) + + # It is an error to provide scopes and a transport instance. + transport = transports.InterconnectsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = InterconnectsClient( + client_options={"scopes": ["1", "2"]}, + transport=transport, + ) + + +def test_transport_instance(): + # A client may be instantiated with a custom transport instance. + transport = transports.InterconnectsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + client = InterconnectsClient(transport=transport) + assert client.transport is transport + + +@pytest.mark.parametrize("transport_class", [ + transports.InterconnectsRestTransport, +]) +def test_transport_adc(transport_class): + # Test default credentials are used if not provided. + with mock.patch.object(google.auth, 'default') as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class() + adc.assert_called_once() + +@pytest.mark.parametrize("transport_name", [ + "rest", +]) +def test_transport_kind(transport_name): + transport = InterconnectsClient.get_transport_class(transport_name)( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert transport.kind == transport_name + + +def test_interconnects_base_transport_error(): + # Passing both a credentials object and credentials_file should raise an error + with pytest.raises(core_exceptions.DuplicateCredentialArgs): + transport = transports.InterconnectsTransport( + credentials=ga_credentials.AnonymousCredentials(), + credentials_file="credentials.json" + ) + + +def test_interconnects_base_transport(): + # Instantiate the base transport. + with mock.patch('google.cloud.compute_v1.services.interconnects.transports.InterconnectsTransport.__init__') as Transport: + Transport.return_value = None + transport = transports.InterconnectsTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Every method on the transport should just blindly + # raise NotImplementedError. + methods = ( + 'delete', + 'get', + 'get_diagnostics', + 'insert', + 'list', + 'patch', + 'set_labels', + ) + for method in methods: + with pytest.raises(NotImplementedError): + getattr(transport, method)(request=object()) + + with pytest.raises(NotImplementedError): + transport.close() + + # Catch all for all remaining methods and properties + remainder = [ + 'kind', + ] + for r in remainder: + with pytest.raises(NotImplementedError): + getattr(transport, r)() + + +def test_interconnects_base_transport_with_credentials_file(): + # Instantiate the base transport with a credentials file + with mock.patch.object(google.auth, 'load_credentials_from_file', autospec=True) as load_creds, mock.patch('google.cloud.compute_v1.services.interconnects.transports.InterconnectsTransport._prep_wrapped_messages') as Transport: + Transport.return_value = None + load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.InterconnectsTransport( + credentials_file="credentials.json", + quota_project_id="octopus", + ) + load_creds.assert_called_once_with("credentials.json", + scopes=None, + default_scopes=( + 'https://www.googleapis.com/auth/compute', + 'https://www.googleapis.com/auth/cloud-platform', +), + quota_project_id="octopus", + ) + + +def test_interconnects_base_transport_with_adc(): + # Test the default credentials are used if credentials and credentials_file are None. + with mock.patch.object(google.auth, 'default', autospec=True) as adc, mock.patch('google.cloud.compute_v1.services.interconnects.transports.InterconnectsTransport._prep_wrapped_messages') as Transport: + Transport.return_value = None + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.InterconnectsTransport() + adc.assert_called_once() + + +def test_interconnects_auth_adc(): + # If no credentials are provided, we should use ADC credentials. + with mock.patch.object(google.auth, 'default', autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + InterconnectsClient() + adc.assert_called_once_with( + scopes=None, + default_scopes=( + 'https://www.googleapis.com/auth/compute', + 'https://www.googleapis.com/auth/cloud-platform', +), + quota_project_id=None, + ) + + +def test_interconnects_http_transport_client_cert_source_for_mtls(): + cred = ga_credentials.AnonymousCredentials() + with mock.patch("google.auth.transport.requests.AuthorizedSession.configure_mtls_channel") as mock_configure_mtls_channel: + transports.InterconnectsRestTransport ( + credentials=cred, + client_cert_source_for_mtls=client_cert_source_callback + ) + mock_configure_mtls_channel.assert_called_once_with(client_cert_source_callback) + + +@pytest.mark.parametrize("transport_name", [ + "rest", +]) +def test_interconnects_host_no_port(transport_name): + client = InterconnectsClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions(api_endpoint='compute.googleapis.com'), + transport=transport_name, + ) + assert client.transport._host == ( + 'compute.googleapis.com:443' + if transport_name in ['grpc', 'grpc_asyncio'] + else 'https://compute.googleapis.com' + ) + +@pytest.mark.parametrize("transport_name", [ + "rest", +]) +def test_interconnects_host_with_port(transport_name): + client = InterconnectsClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions(api_endpoint='compute.googleapis.com:8000'), + transport=transport_name, + ) + assert client.transport._host == ( + 'compute.googleapis.com:8000' + if transport_name in ['grpc', 'grpc_asyncio'] + else 'https://compute.googleapis.com:8000' + ) + +@pytest.mark.parametrize("transport_name", [ + "rest", +]) +def test_interconnects_client_transport_session_collision(transport_name): + creds1 = ga_credentials.AnonymousCredentials() + creds2 = ga_credentials.AnonymousCredentials() + client1 = InterconnectsClient( + credentials=creds1, + transport=transport_name, + ) + client2 = InterconnectsClient( + credentials=creds2, + transport=transport_name, + ) + session1 = client1.transport.delete._session + session2 = client2.transport.delete._session + assert session1 != session2 + session1 = client1.transport.get._session + session2 = client2.transport.get._session + assert session1 != session2 + session1 = client1.transport.get_diagnostics._session + session2 = client2.transport.get_diagnostics._session + assert session1 != session2 + session1 = client1.transport.insert._session + session2 = client2.transport.insert._session + assert session1 != session2 + session1 = client1.transport.list._session + session2 = client2.transport.list._session + assert session1 != session2 + session1 = client1.transport.patch._session + session2 = client2.transport.patch._session + assert session1 != session2 + session1 = client1.transport.set_labels._session + session2 = client2.transport.set_labels._session + assert session1 != session2 + +def test_common_billing_account_path(): + billing_account = "squid" + expected = "billingAccounts/{billing_account}".format(billing_account=billing_account, ) + actual = InterconnectsClient.common_billing_account_path(billing_account) + assert expected == actual + + +def test_parse_common_billing_account_path(): + expected = { + "billing_account": "clam", + } + path = InterconnectsClient.common_billing_account_path(**expected) + + # Check that the path construction is reversible. + actual = InterconnectsClient.parse_common_billing_account_path(path) + assert expected == actual + +def test_common_folder_path(): + folder = "whelk" + expected = "folders/{folder}".format(folder=folder, ) + actual = InterconnectsClient.common_folder_path(folder) + assert expected == actual + + +def test_parse_common_folder_path(): + expected = { + "folder": "octopus", + } + path = InterconnectsClient.common_folder_path(**expected) + + # Check that the path construction is reversible. + actual = InterconnectsClient.parse_common_folder_path(path) + assert expected == actual + +def test_common_organization_path(): + organization = "oyster" + expected = "organizations/{organization}".format(organization=organization, ) + actual = InterconnectsClient.common_organization_path(organization) + assert expected == actual + + +def test_parse_common_organization_path(): + expected = { + "organization": "nudibranch", + } + path = InterconnectsClient.common_organization_path(**expected) + + # Check that the path construction is reversible. + actual = InterconnectsClient.parse_common_organization_path(path) + assert expected == actual + +def test_common_project_path(): + project = "cuttlefish" + expected = "projects/{project}".format(project=project, ) + actual = InterconnectsClient.common_project_path(project) + assert expected == actual + + +def test_parse_common_project_path(): + expected = { + "project": "mussel", + } + path = InterconnectsClient.common_project_path(**expected) + + # Check that the path construction is reversible. + actual = InterconnectsClient.parse_common_project_path(path) + assert expected == actual + +def test_common_location_path(): + project = "winkle" + location = "nautilus" + expected = "projects/{project}/locations/{location}".format(project=project, location=location, ) + actual = InterconnectsClient.common_location_path(project, location) + assert expected == actual + + +def test_parse_common_location_path(): + expected = { + "project": "scallop", + "location": "abalone", + } + path = InterconnectsClient.common_location_path(**expected) + + # Check that the path construction is reversible. + actual = InterconnectsClient.parse_common_location_path(path) + assert expected == actual + + +def test_client_with_default_client_info(): + client_info = gapic_v1.client_info.ClientInfo() + + with mock.patch.object(transports.InterconnectsTransport, '_prep_wrapped_messages') as prep: + client = InterconnectsClient( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + with mock.patch.object(transports.InterconnectsTransport, '_prep_wrapped_messages') as prep: + transport_class = InterconnectsClient.get_transport_class() + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + +def test_transport_close(): + transports = { + "rest": "_session", + } + + for transport, close_name in transports.items(): + client = InterconnectsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport + ) + with mock.patch.object(type(getattr(client.transport, close_name)), "close") as close: + with client: + close.assert_not_called() + close.assert_called_once() + +def test_client_ctx(): + transports = [ + 'rest', + ] + for transport in transports: + client = InterconnectsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport + ) + # Test client calls underlying transport. + with mock.patch.object(type(client.transport), "close") as close: + close.assert_not_called() + with client: + pass + close.assert_called() + +@pytest.mark.parametrize("client_class,transport_class", [ + (InterconnectsClient, transports.InterconnectsRestTransport), +]) +def test_api_key_credentials(client_class, transport_class): + with mock.patch.object( + google.auth._default, "get_api_key_credentials", create=True + ) as get_api_key_credentials: + mock_cred = mock.Mock() + get_api_key_credentials.return_value = mock_cred + options = client_options.ClientOptions() + options.api_key = "api_key" + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=mock_cred, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) diff --git a/owl-bot-staging/v1/tests/unit/gapic/compute_v1/test_license_codes.py b/owl-bot-staging/v1/tests/unit/gapic/compute_v1/test_license_codes.py new file mode 100644 index 000000000..95e6c81ec --- /dev/null +++ b/owl-bot-staging/v1/tests/unit/gapic/compute_v1/test_license_codes.py @@ -0,0 +1,1330 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import os +# try/except added for compatibility with python < 3.8 +try: + from unittest import mock + from unittest.mock import AsyncMock # pragma: NO COVER +except ImportError: # pragma: NO COVER + import mock + +import grpc +from grpc.experimental import aio +from collections.abc import Iterable +from google.protobuf import json_format +import json +import math +import pytest +from proto.marshal.rules.dates import DurationRule, TimestampRule +from proto.marshal.rules import wrappers +from requests import Response +from requests import Request, PreparedRequest +from requests.sessions import Session +from google.protobuf import json_format + +from google.api_core import client_options +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import grpc_helpers +from google.api_core import grpc_helpers_async +from google.api_core import path_template +from google.auth import credentials as ga_credentials +from google.auth.exceptions import MutualTLSChannelError +from google.cloud.compute_v1.services.license_codes import LicenseCodesClient +from google.cloud.compute_v1.services.license_codes import transports +from google.cloud.compute_v1.types import compute +from google.oauth2 import service_account +import google.auth + + +def client_cert_source_callback(): + return b"cert bytes", b"key bytes" + + +# If default endpoint is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint(client): + return "foo.googleapis.com" if ("localhost" in client.DEFAULT_ENDPOINT) else client.DEFAULT_ENDPOINT + + +def test__get_default_mtls_endpoint(): + api_endpoint = "example.googleapis.com" + api_mtls_endpoint = "example.mtls.googleapis.com" + sandbox_endpoint = "example.sandbox.googleapis.com" + sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com" + non_googleapi = "api.example.com" + + assert LicenseCodesClient._get_default_mtls_endpoint(None) is None + assert LicenseCodesClient._get_default_mtls_endpoint(api_endpoint) == api_mtls_endpoint + assert LicenseCodesClient._get_default_mtls_endpoint(api_mtls_endpoint) == api_mtls_endpoint + assert LicenseCodesClient._get_default_mtls_endpoint(sandbox_endpoint) == sandbox_mtls_endpoint + assert LicenseCodesClient._get_default_mtls_endpoint(sandbox_mtls_endpoint) == sandbox_mtls_endpoint + assert LicenseCodesClient._get_default_mtls_endpoint(non_googleapi) == non_googleapi + + +@pytest.mark.parametrize("client_class,transport_name", [ + (LicenseCodesClient, "rest"), +]) +def test_license_codes_client_from_service_account_info(client_class, transport_name): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object(service_account.Credentials, 'from_service_account_info') as factory: + factory.return_value = creds + info = {"valid": True} + client = client_class.from_service_account_info(info, transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + 'compute.googleapis.com:443' + if transport_name in ['grpc', 'grpc_asyncio'] + else + 'https://compute.googleapis.com' + ) + + +@pytest.mark.parametrize("transport_class,transport_name", [ + (transports.LicenseCodesRestTransport, "rest"), +]) +def test_license_codes_client_service_account_always_use_jwt(transport_class, transport_name): + with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=True) + use_jwt.assert_called_once_with(True) + + with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=False) + use_jwt.assert_not_called() + + +@pytest.mark.parametrize("client_class,transport_name", [ + (LicenseCodesClient, "rest"), +]) +def test_license_codes_client_from_service_account_file(client_class, transport_name): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object(service_account.Credentials, 'from_service_account_file') as factory: + factory.return_value = creds + client = client_class.from_service_account_file("dummy/file/path.json", transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + client = client_class.from_service_account_json("dummy/file/path.json", transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + 'compute.googleapis.com:443' + if transport_name in ['grpc', 'grpc_asyncio'] + else + 'https://compute.googleapis.com' + ) + + +def test_license_codes_client_get_transport_class(): + transport = LicenseCodesClient.get_transport_class() + available_transports = [ + transports.LicenseCodesRestTransport, + ] + assert transport in available_transports + + transport = LicenseCodesClient.get_transport_class("rest") + assert transport == transports.LicenseCodesRestTransport + + +@pytest.mark.parametrize("client_class,transport_class,transport_name", [ + (LicenseCodesClient, transports.LicenseCodesRestTransport, "rest"), +]) +@mock.patch.object(LicenseCodesClient, "DEFAULT_ENDPOINT", modify_default_endpoint(LicenseCodesClient)) +def test_license_codes_client_client_options(client_class, transport_class, transport_name): + # Check that if channel is provided we won't create a new one. + with mock.patch.object(LicenseCodesClient, 'get_transport_class') as gtc: + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ) + client = client_class(transport=transport) + gtc.assert_not_called() + + # Check that if channel is provided via str we will create a new one. + with mock.patch.object(LicenseCodesClient, 'get_transport_class') as gtc: + client = client_class(transport=transport_name) + gtc.assert_called() + + # Check the case api_endpoint is provided. + options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(transport=transport_name, client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_MTLS_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError): + client = client_class(transport=transport_name) + + # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): + with pytest.raises(ValueError): + client = client_class(transport=transport_name) + + # Check the case quota_project_id is provided + options = client_options.ClientOptions(quota_project_id="octopus") + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id="octopus", + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + # Check the case api_endpoint is provided + options = client_options.ClientOptions(api_audience="https://language.googleapis.com") + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience="https://language.googleapis.com" + ) + +@pytest.mark.parametrize("client_class,transport_class,transport_name,use_client_cert_env", [ + (LicenseCodesClient, transports.LicenseCodesRestTransport, "rest", "true"), + (LicenseCodesClient, transports.LicenseCodesRestTransport, "rest", "false"), +]) +@mock.patch.object(LicenseCodesClient, "DEFAULT_ENDPOINT", modify_default_endpoint(LicenseCodesClient)) +@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) +def test_license_codes_client_mtls_env_auto(client_class, transport_class, transport_name, use_client_cert_env): + # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default + # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists. + + # Check the case client_cert_source is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): + options = client_options.ClientOptions(client_cert_source=client_cert_source_callback) + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + + if use_client_cert_env == "false": + expected_client_cert_source = None + expected_host = client.DEFAULT_ENDPOINT + else: + expected_client_cert_source = client_cert_source_callback + expected_host = client.DEFAULT_MTLS_ENDPOINT + + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case ADC client cert is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): + with mock.patch.object(transport_class, '__init__') as patched: + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): + with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=client_cert_source_callback): + if use_client_cert_env == "false": + expected_host = client.DEFAULT_ENDPOINT + expected_client_cert_source = None + else: + expected_host = client.DEFAULT_MTLS_ENDPOINT + expected_client_cert_source = client_cert_source_callback + + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case client_cert_source and ADC client cert are not provided. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): + with mock.patch.object(transport_class, '__init__') as patched: + with mock.patch("google.auth.transport.mtls.has_default_client_cert_source", return_value=False): + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize("client_class", [ + LicenseCodesClient +]) +@mock.patch.object(LicenseCodesClient, "DEFAULT_ENDPOINT", modify_default_endpoint(LicenseCodesClient)) +def test_license_codes_client_get_mtls_endpoint_and_cert_source(client_class): + mock_client_cert_source = mock.Mock() + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "true". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + mock_api_endpoint = "foo" + options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(options) + assert api_endpoint == mock_api_endpoint + assert cert_source == mock_client_cert_source + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "false". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + mock_client_cert_source = mock.Mock() + mock_api_endpoint = "foo" + options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(options) + assert api_endpoint == mock_api_endpoint + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert doesn't exist. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=False): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert exists. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): + with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=mock_client_cert_source): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source == mock_client_cert_source + + +@pytest.mark.parametrize("client_class,transport_class,transport_name", [ + (LicenseCodesClient, transports.LicenseCodesRestTransport, "rest"), +]) +def test_license_codes_client_client_options_scopes(client_class, transport_class, transport_name): + # Check the case scopes are provided. + options = client_options.ClientOptions( + scopes=["1", "2"], + ) + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=["1", "2"], + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + +@pytest.mark.parametrize("client_class,transport_class,transport_name,grpc_helpers", [ + (LicenseCodesClient, transports.LicenseCodesRestTransport, "rest", None), +]) +def test_license_codes_client_client_options_credentials_file(client_class, transport_class, transport_name, grpc_helpers): + # Check the case credentials file is provided. + options = client_options.ClientOptions( + credentials_file="credentials.json" + ) + + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize("request_type", [ + compute.GetLicenseCodeRequest, + dict, +]) +def test_get_rest(request_type): + client = LicenseCodesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'license_code': 'sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.LicenseCode( + creation_timestamp='creation_timestamp_value', + description='description_value', + id=205, + kind='kind_value', + name='name_value', + self_link='self_link_value', + state='state_value', + transferable=True, + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.LicenseCode.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.get(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.LicenseCode) + assert response.creation_timestamp == 'creation_timestamp_value' + assert response.description == 'description_value' + assert response.id == 205 + assert response.kind == 'kind_value' + assert response.name == 'name_value' + assert response.self_link == 'self_link_value' + assert response.state == 'state_value' + assert response.transferable is True + + +def test_get_rest_required_fields(request_type=compute.GetLicenseCodeRequest): + transport_class = transports.LicenseCodesRestTransport + + request_init = {} + request_init["license_code"] = "" + request_init["project"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["licenseCode"] = 'license_code_value' + jsonified_request["project"] = 'project_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "licenseCode" in jsonified_request + assert jsonified_request["licenseCode"] == 'license_code_value' + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + + client = LicenseCodesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.LicenseCode() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "get", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.LicenseCode.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.get(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_get_rest_unset_required_fields(): + transport = transports.LicenseCodesRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.get._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("licenseCode", "project", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_rest_interceptors(null_interceptor): + transport = transports.LicenseCodesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.LicenseCodesRestInterceptor(), + ) + client = LicenseCodesClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.LicenseCodesRestInterceptor, "post_get") as post, \ + mock.patch.object(transports.LicenseCodesRestInterceptor, "pre_get") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.GetLicenseCodeRequest.pb(compute.GetLicenseCodeRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.LicenseCode.to_json(compute.LicenseCode()) + + request = compute.GetLicenseCodeRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.LicenseCode() + + client.get(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_get_rest_bad_request(transport: str = 'rest', request_type=compute.GetLicenseCodeRequest): + client = LicenseCodesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'license_code': 'sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get(request) + + +def test_get_rest_flattened(): + client = LicenseCodesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.LicenseCode() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'license_code': 'sample2'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + license_code='license_code_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.LicenseCode.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.get(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/global/licenseCodes/{license_code}" % client.transport._host, args[1]) + + +def test_get_rest_flattened_error(transport: str = 'rest'): + client = LicenseCodesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get( + compute.GetLicenseCodeRequest(), + project='project_value', + license_code='license_code_value', + ) + + +def test_get_rest_error(): + client = LicenseCodesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.TestIamPermissionsLicenseCodeRequest, + dict, +]) +def test_test_iam_permissions_rest(request_type): + client = LicenseCodesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'resource': 'sample2'} + request_init["test_permissions_request_resource"] = {'permissions': ['permissions_value1', 'permissions_value2']} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.TestPermissionsResponse( + permissions=['permissions_value'], + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.TestPermissionsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.test_iam_permissions(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.TestPermissionsResponse) + assert response.permissions == ['permissions_value'] + + +def test_test_iam_permissions_rest_required_fields(request_type=compute.TestIamPermissionsLicenseCodeRequest): + transport_class = transports.LicenseCodesRestTransport + + request_init = {} + request_init["project"] = "" + request_init["resource"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).test_iam_permissions._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + jsonified_request["resource"] = 'resource_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).test_iam_permissions._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "resource" in jsonified_request + assert jsonified_request["resource"] == 'resource_value' + + client = LicenseCodesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.TestPermissionsResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.TestPermissionsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.test_iam_permissions(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_test_iam_permissions_rest_unset_required_fields(): + transport = transports.LicenseCodesRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.test_iam_permissions._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("project", "resource", "testPermissionsRequestResource", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_test_iam_permissions_rest_interceptors(null_interceptor): + transport = transports.LicenseCodesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.LicenseCodesRestInterceptor(), + ) + client = LicenseCodesClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.LicenseCodesRestInterceptor, "post_test_iam_permissions") as post, \ + mock.patch.object(transports.LicenseCodesRestInterceptor, "pre_test_iam_permissions") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.TestIamPermissionsLicenseCodeRequest.pb(compute.TestIamPermissionsLicenseCodeRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.TestPermissionsResponse.to_json(compute.TestPermissionsResponse()) + + request = compute.TestIamPermissionsLicenseCodeRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.TestPermissionsResponse() + + client.test_iam_permissions(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_test_iam_permissions_rest_bad_request(transport: str = 'rest', request_type=compute.TestIamPermissionsLicenseCodeRequest): + client = LicenseCodesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'resource': 'sample2'} + request_init["test_permissions_request_resource"] = {'permissions': ['permissions_value1', 'permissions_value2']} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.test_iam_permissions(request) + + +def test_test_iam_permissions_rest_flattened(): + client = LicenseCodesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.TestPermissionsResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'resource': 'sample2'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + resource='resource_value', + test_permissions_request_resource=compute.TestPermissionsRequest(permissions=['permissions_value']), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.TestPermissionsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.test_iam_permissions(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/global/licenseCodes/{resource}/testIamPermissions" % client.transport._host, args[1]) + + +def test_test_iam_permissions_rest_flattened_error(transport: str = 'rest'): + client = LicenseCodesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.test_iam_permissions( + compute.TestIamPermissionsLicenseCodeRequest(), + project='project_value', + resource='resource_value', + test_permissions_request_resource=compute.TestPermissionsRequest(permissions=['permissions_value']), + ) + + +def test_test_iam_permissions_rest_error(): + client = LicenseCodesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +def test_credentials_transport_error(): + # It is an error to provide credentials and a transport instance. + transport = transports.LicenseCodesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = LicenseCodesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # It is an error to provide a credentials file and a transport instance. + transport = transports.LicenseCodesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = LicenseCodesClient( + client_options={"credentials_file": "credentials.json"}, + transport=transport, + ) + + # It is an error to provide an api_key and a transport instance. + transport = transports.LicenseCodesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = LicenseCodesClient( + client_options=options, + transport=transport, + ) + + # It is an error to provide an api_key and a credential. + options = mock.Mock() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = LicenseCodesClient( + client_options=options, + credentials=ga_credentials.AnonymousCredentials() + ) + + # It is an error to provide scopes and a transport instance. + transport = transports.LicenseCodesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = LicenseCodesClient( + client_options={"scopes": ["1", "2"]}, + transport=transport, + ) + + +def test_transport_instance(): + # A client may be instantiated with a custom transport instance. + transport = transports.LicenseCodesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + client = LicenseCodesClient(transport=transport) + assert client.transport is transport + + +@pytest.mark.parametrize("transport_class", [ + transports.LicenseCodesRestTransport, +]) +def test_transport_adc(transport_class): + # Test default credentials are used if not provided. + with mock.patch.object(google.auth, 'default') as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class() + adc.assert_called_once() + +@pytest.mark.parametrize("transport_name", [ + "rest", +]) +def test_transport_kind(transport_name): + transport = LicenseCodesClient.get_transport_class(transport_name)( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert transport.kind == transport_name + + +def test_license_codes_base_transport_error(): + # Passing both a credentials object and credentials_file should raise an error + with pytest.raises(core_exceptions.DuplicateCredentialArgs): + transport = transports.LicenseCodesTransport( + credentials=ga_credentials.AnonymousCredentials(), + credentials_file="credentials.json" + ) + + +def test_license_codes_base_transport(): + # Instantiate the base transport. + with mock.patch('google.cloud.compute_v1.services.license_codes.transports.LicenseCodesTransport.__init__') as Transport: + Transport.return_value = None + transport = transports.LicenseCodesTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Every method on the transport should just blindly + # raise NotImplementedError. + methods = ( + 'get', + 'test_iam_permissions', + ) + for method in methods: + with pytest.raises(NotImplementedError): + getattr(transport, method)(request=object()) + + with pytest.raises(NotImplementedError): + transport.close() + + # Catch all for all remaining methods and properties + remainder = [ + 'kind', + ] + for r in remainder: + with pytest.raises(NotImplementedError): + getattr(transport, r)() + + +def test_license_codes_base_transport_with_credentials_file(): + # Instantiate the base transport with a credentials file + with mock.patch.object(google.auth, 'load_credentials_from_file', autospec=True) as load_creds, mock.patch('google.cloud.compute_v1.services.license_codes.transports.LicenseCodesTransport._prep_wrapped_messages') as Transport: + Transport.return_value = None + load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.LicenseCodesTransport( + credentials_file="credentials.json", + quota_project_id="octopus", + ) + load_creds.assert_called_once_with("credentials.json", + scopes=None, + default_scopes=( + 'https://www.googleapis.com/auth/compute.readonly', + 'https://www.googleapis.com/auth/compute', + 'https://www.googleapis.com/auth/cloud-platform', +), + quota_project_id="octopus", + ) + + +def test_license_codes_base_transport_with_adc(): + # Test the default credentials are used if credentials and credentials_file are None. + with mock.patch.object(google.auth, 'default', autospec=True) as adc, mock.patch('google.cloud.compute_v1.services.license_codes.transports.LicenseCodesTransport._prep_wrapped_messages') as Transport: + Transport.return_value = None + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.LicenseCodesTransport() + adc.assert_called_once() + + +def test_license_codes_auth_adc(): + # If no credentials are provided, we should use ADC credentials. + with mock.patch.object(google.auth, 'default', autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + LicenseCodesClient() + adc.assert_called_once_with( + scopes=None, + default_scopes=( + 'https://www.googleapis.com/auth/compute.readonly', + 'https://www.googleapis.com/auth/compute', + 'https://www.googleapis.com/auth/cloud-platform', +), + quota_project_id=None, + ) + + +def test_license_codes_http_transport_client_cert_source_for_mtls(): + cred = ga_credentials.AnonymousCredentials() + with mock.patch("google.auth.transport.requests.AuthorizedSession.configure_mtls_channel") as mock_configure_mtls_channel: + transports.LicenseCodesRestTransport ( + credentials=cred, + client_cert_source_for_mtls=client_cert_source_callback + ) + mock_configure_mtls_channel.assert_called_once_with(client_cert_source_callback) + + +@pytest.mark.parametrize("transport_name", [ + "rest", +]) +def test_license_codes_host_no_port(transport_name): + client = LicenseCodesClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions(api_endpoint='compute.googleapis.com'), + transport=transport_name, + ) + assert client.transport._host == ( + 'compute.googleapis.com:443' + if transport_name in ['grpc', 'grpc_asyncio'] + else 'https://compute.googleapis.com' + ) + +@pytest.mark.parametrize("transport_name", [ + "rest", +]) +def test_license_codes_host_with_port(transport_name): + client = LicenseCodesClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions(api_endpoint='compute.googleapis.com:8000'), + transport=transport_name, + ) + assert client.transport._host == ( + 'compute.googleapis.com:8000' + if transport_name in ['grpc', 'grpc_asyncio'] + else 'https://compute.googleapis.com:8000' + ) + +@pytest.mark.parametrize("transport_name", [ + "rest", +]) +def test_license_codes_client_transport_session_collision(transport_name): + creds1 = ga_credentials.AnonymousCredentials() + creds2 = ga_credentials.AnonymousCredentials() + client1 = LicenseCodesClient( + credentials=creds1, + transport=transport_name, + ) + client2 = LicenseCodesClient( + credentials=creds2, + transport=transport_name, + ) + session1 = client1.transport.get._session + session2 = client2.transport.get._session + assert session1 != session2 + session1 = client1.transport.test_iam_permissions._session + session2 = client2.transport.test_iam_permissions._session + assert session1 != session2 + +def test_common_billing_account_path(): + billing_account = "squid" + expected = "billingAccounts/{billing_account}".format(billing_account=billing_account, ) + actual = LicenseCodesClient.common_billing_account_path(billing_account) + assert expected == actual + + +def test_parse_common_billing_account_path(): + expected = { + "billing_account": "clam", + } + path = LicenseCodesClient.common_billing_account_path(**expected) + + # Check that the path construction is reversible. + actual = LicenseCodesClient.parse_common_billing_account_path(path) + assert expected == actual + +def test_common_folder_path(): + folder = "whelk" + expected = "folders/{folder}".format(folder=folder, ) + actual = LicenseCodesClient.common_folder_path(folder) + assert expected == actual + + +def test_parse_common_folder_path(): + expected = { + "folder": "octopus", + } + path = LicenseCodesClient.common_folder_path(**expected) + + # Check that the path construction is reversible. + actual = LicenseCodesClient.parse_common_folder_path(path) + assert expected == actual + +def test_common_organization_path(): + organization = "oyster" + expected = "organizations/{organization}".format(organization=organization, ) + actual = LicenseCodesClient.common_organization_path(organization) + assert expected == actual + + +def test_parse_common_organization_path(): + expected = { + "organization": "nudibranch", + } + path = LicenseCodesClient.common_organization_path(**expected) + + # Check that the path construction is reversible. + actual = LicenseCodesClient.parse_common_organization_path(path) + assert expected == actual + +def test_common_project_path(): + project = "cuttlefish" + expected = "projects/{project}".format(project=project, ) + actual = LicenseCodesClient.common_project_path(project) + assert expected == actual + + +def test_parse_common_project_path(): + expected = { + "project": "mussel", + } + path = LicenseCodesClient.common_project_path(**expected) + + # Check that the path construction is reversible. + actual = LicenseCodesClient.parse_common_project_path(path) + assert expected == actual + +def test_common_location_path(): + project = "winkle" + location = "nautilus" + expected = "projects/{project}/locations/{location}".format(project=project, location=location, ) + actual = LicenseCodesClient.common_location_path(project, location) + assert expected == actual + + +def test_parse_common_location_path(): + expected = { + "project": "scallop", + "location": "abalone", + } + path = LicenseCodesClient.common_location_path(**expected) + + # Check that the path construction is reversible. + actual = LicenseCodesClient.parse_common_location_path(path) + assert expected == actual + + +def test_client_with_default_client_info(): + client_info = gapic_v1.client_info.ClientInfo() + + with mock.patch.object(transports.LicenseCodesTransport, '_prep_wrapped_messages') as prep: + client = LicenseCodesClient( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + with mock.patch.object(transports.LicenseCodesTransport, '_prep_wrapped_messages') as prep: + transport_class = LicenseCodesClient.get_transport_class() + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + +def test_transport_close(): + transports = { + "rest": "_session", + } + + for transport, close_name in transports.items(): + client = LicenseCodesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport + ) + with mock.patch.object(type(getattr(client.transport, close_name)), "close") as close: + with client: + close.assert_not_called() + close.assert_called_once() + +def test_client_ctx(): + transports = [ + 'rest', + ] + for transport in transports: + client = LicenseCodesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport + ) + # Test client calls underlying transport. + with mock.patch.object(type(client.transport), "close") as close: + close.assert_not_called() + with client: + pass + close.assert_called() + +@pytest.mark.parametrize("client_class,transport_class", [ + (LicenseCodesClient, transports.LicenseCodesRestTransport), +]) +def test_api_key_credentials(client_class, transport_class): + with mock.patch.object( + google.auth._default, "get_api_key_credentials", create=True + ) as get_api_key_credentials: + mock_cred = mock.Mock() + get_api_key_credentials.return_value = mock_cred + options = client_options.ClientOptions() + options.api_key = "api_key" + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=mock_cred, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) diff --git a/owl-bot-staging/v1/tests/unit/gapic/compute_v1/test_licenses.py b/owl-bot-staging/v1/tests/unit/gapic/compute_v1/test_licenses.py new file mode 100644 index 000000000..6ccd108c5 --- /dev/null +++ b/owl-bot-staging/v1/tests/unit/gapic/compute_v1/test_licenses.py @@ -0,0 +1,3242 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import os +# try/except added for compatibility with python < 3.8 +try: + from unittest import mock + from unittest.mock import AsyncMock # pragma: NO COVER +except ImportError: # pragma: NO COVER + import mock + +import grpc +from grpc.experimental import aio +from collections.abc import Iterable +from google.protobuf import json_format +import json +import math +import pytest +from proto.marshal.rules.dates import DurationRule, TimestampRule +from proto.marshal.rules import wrappers +from requests import Response +from requests import Request, PreparedRequest +from requests.sessions import Session +from google.protobuf import json_format + +from google.api_core import client_options +from google.api_core import exceptions as core_exceptions +from google.api_core import extended_operation # type: ignore +from google.api_core import future +from google.api_core import gapic_v1 +from google.api_core import grpc_helpers +from google.api_core import grpc_helpers_async +from google.api_core import path_template +from google.auth import credentials as ga_credentials +from google.auth.exceptions import MutualTLSChannelError +from google.cloud.compute_v1.services.licenses import LicensesClient +from google.cloud.compute_v1.services.licenses import pagers +from google.cloud.compute_v1.services.licenses import transports +from google.cloud.compute_v1.types import compute +from google.oauth2 import service_account +import google.auth + + +def client_cert_source_callback(): + return b"cert bytes", b"key bytes" + + +# If default endpoint is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint(client): + return "foo.googleapis.com" if ("localhost" in client.DEFAULT_ENDPOINT) else client.DEFAULT_ENDPOINT + + +def test__get_default_mtls_endpoint(): + api_endpoint = "example.googleapis.com" + api_mtls_endpoint = "example.mtls.googleapis.com" + sandbox_endpoint = "example.sandbox.googleapis.com" + sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com" + non_googleapi = "api.example.com" + + assert LicensesClient._get_default_mtls_endpoint(None) is None + assert LicensesClient._get_default_mtls_endpoint(api_endpoint) == api_mtls_endpoint + assert LicensesClient._get_default_mtls_endpoint(api_mtls_endpoint) == api_mtls_endpoint + assert LicensesClient._get_default_mtls_endpoint(sandbox_endpoint) == sandbox_mtls_endpoint + assert LicensesClient._get_default_mtls_endpoint(sandbox_mtls_endpoint) == sandbox_mtls_endpoint + assert LicensesClient._get_default_mtls_endpoint(non_googleapi) == non_googleapi + + +@pytest.mark.parametrize("client_class,transport_name", [ + (LicensesClient, "rest"), +]) +def test_licenses_client_from_service_account_info(client_class, transport_name): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object(service_account.Credentials, 'from_service_account_info') as factory: + factory.return_value = creds + info = {"valid": True} + client = client_class.from_service_account_info(info, transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + 'compute.googleapis.com:443' + if transport_name in ['grpc', 'grpc_asyncio'] + else + 'https://compute.googleapis.com' + ) + + +@pytest.mark.parametrize("transport_class,transport_name", [ + (transports.LicensesRestTransport, "rest"), +]) +def test_licenses_client_service_account_always_use_jwt(transport_class, transport_name): + with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=True) + use_jwt.assert_called_once_with(True) + + with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=False) + use_jwt.assert_not_called() + + +@pytest.mark.parametrize("client_class,transport_name", [ + (LicensesClient, "rest"), +]) +def test_licenses_client_from_service_account_file(client_class, transport_name): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object(service_account.Credentials, 'from_service_account_file') as factory: + factory.return_value = creds + client = client_class.from_service_account_file("dummy/file/path.json", transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + client = client_class.from_service_account_json("dummy/file/path.json", transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + 'compute.googleapis.com:443' + if transport_name in ['grpc', 'grpc_asyncio'] + else + 'https://compute.googleapis.com' + ) + + +def test_licenses_client_get_transport_class(): + transport = LicensesClient.get_transport_class() + available_transports = [ + transports.LicensesRestTransport, + ] + assert transport in available_transports + + transport = LicensesClient.get_transport_class("rest") + assert transport == transports.LicensesRestTransport + + +@pytest.mark.parametrize("client_class,transport_class,transport_name", [ + (LicensesClient, transports.LicensesRestTransport, "rest"), +]) +@mock.patch.object(LicensesClient, "DEFAULT_ENDPOINT", modify_default_endpoint(LicensesClient)) +def test_licenses_client_client_options(client_class, transport_class, transport_name): + # Check that if channel is provided we won't create a new one. + with mock.patch.object(LicensesClient, 'get_transport_class') as gtc: + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ) + client = client_class(transport=transport) + gtc.assert_not_called() + + # Check that if channel is provided via str we will create a new one. + with mock.patch.object(LicensesClient, 'get_transport_class') as gtc: + client = client_class(transport=transport_name) + gtc.assert_called() + + # Check the case api_endpoint is provided. + options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(transport=transport_name, client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_MTLS_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError): + client = client_class(transport=transport_name) + + # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): + with pytest.raises(ValueError): + client = client_class(transport=transport_name) + + # Check the case quota_project_id is provided + options = client_options.ClientOptions(quota_project_id="octopus") + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id="octopus", + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + # Check the case api_endpoint is provided + options = client_options.ClientOptions(api_audience="https://language.googleapis.com") + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience="https://language.googleapis.com" + ) + +@pytest.mark.parametrize("client_class,transport_class,transport_name,use_client_cert_env", [ + (LicensesClient, transports.LicensesRestTransport, "rest", "true"), + (LicensesClient, transports.LicensesRestTransport, "rest", "false"), +]) +@mock.patch.object(LicensesClient, "DEFAULT_ENDPOINT", modify_default_endpoint(LicensesClient)) +@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) +def test_licenses_client_mtls_env_auto(client_class, transport_class, transport_name, use_client_cert_env): + # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default + # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists. + + # Check the case client_cert_source is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): + options = client_options.ClientOptions(client_cert_source=client_cert_source_callback) + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + + if use_client_cert_env == "false": + expected_client_cert_source = None + expected_host = client.DEFAULT_ENDPOINT + else: + expected_client_cert_source = client_cert_source_callback + expected_host = client.DEFAULT_MTLS_ENDPOINT + + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case ADC client cert is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): + with mock.patch.object(transport_class, '__init__') as patched: + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): + with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=client_cert_source_callback): + if use_client_cert_env == "false": + expected_host = client.DEFAULT_ENDPOINT + expected_client_cert_source = None + else: + expected_host = client.DEFAULT_MTLS_ENDPOINT + expected_client_cert_source = client_cert_source_callback + + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case client_cert_source and ADC client cert are not provided. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): + with mock.patch.object(transport_class, '__init__') as patched: + with mock.patch("google.auth.transport.mtls.has_default_client_cert_source", return_value=False): + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize("client_class", [ + LicensesClient +]) +@mock.patch.object(LicensesClient, "DEFAULT_ENDPOINT", modify_default_endpoint(LicensesClient)) +def test_licenses_client_get_mtls_endpoint_and_cert_source(client_class): + mock_client_cert_source = mock.Mock() + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "true". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + mock_api_endpoint = "foo" + options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(options) + assert api_endpoint == mock_api_endpoint + assert cert_source == mock_client_cert_source + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "false". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + mock_client_cert_source = mock.Mock() + mock_api_endpoint = "foo" + options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(options) + assert api_endpoint == mock_api_endpoint + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert doesn't exist. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=False): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert exists. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): + with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=mock_client_cert_source): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source == mock_client_cert_source + + +@pytest.mark.parametrize("client_class,transport_class,transport_name", [ + (LicensesClient, transports.LicensesRestTransport, "rest"), +]) +def test_licenses_client_client_options_scopes(client_class, transport_class, transport_name): + # Check the case scopes are provided. + options = client_options.ClientOptions( + scopes=["1", "2"], + ) + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=["1", "2"], + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + +@pytest.mark.parametrize("client_class,transport_class,transport_name,grpc_helpers", [ + (LicensesClient, transports.LicensesRestTransport, "rest", None), +]) +def test_licenses_client_client_options_credentials_file(client_class, transport_class, transport_name, grpc_helpers): + # Check the case credentials file is provided. + options = client_options.ClientOptions( + credentials_file="credentials.json" + ) + + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize("request_type", [ + compute.DeleteLicenseRequest, + dict, +]) +def test_delete_rest(request_type): + client = LicensesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'license_': 'sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.delete(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, extended_operation.ExtendedOperation) + assert response.client_operation_id == 'client_operation_id_value' + assert response.creation_timestamp == 'creation_timestamp_value' + assert response.description == 'description_value' + assert response.end_time == 'end_time_value' + assert response.http_error_message == 'http_error_message_value' + assert response.http_error_status_code == 2374 + assert response.id == 205 + assert response.insert_time == 'insert_time_value' + assert response.kind == 'kind_value' + assert response.name == 'name_value' + assert response.operation_group_id == 'operation_group_id_value' + assert response.operation_type == 'operation_type_value' + assert response.progress == 885 + assert response.region == 'region_value' + assert response.self_link == 'self_link_value' + assert response.start_time == 'start_time_value' + assert response.status == compute.Operation.Status.DONE + assert response.status_message == 'status_message_value' + assert response.target_id == 947 + assert response.target_link == 'target_link_value' + assert response.user == 'user_value' + assert response.zone == 'zone_value' + + +def test_delete_rest_required_fields(request_type=compute.DeleteLicenseRequest): + transport_class = transports.LicensesRestTransport + + request_init = {} + request_init["license_"] = "" + request_init["project"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + assert "license" not in jsonified_request + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + assert "license" in jsonified_request + assert jsonified_request["license"] == request_init["license_"] + + jsonified_request["license"] = 'license__value' + jsonified_request["project"] = 'project_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("license_", "request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "license" in jsonified_request + assert jsonified_request["license"] == 'license__value' + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + + client = LicensesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "delete", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.delete(request) + + expected_params = [ + ( + "license", + "", + ), + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_delete_rest_unset_required_fields(): + transport = transports.LicensesRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.delete._get_unset_required_fields({}) + assert set(unset_fields) == (set(("license", "requestId", )) & set(("license", "project", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_rest_interceptors(null_interceptor): + transport = transports.LicensesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.LicensesRestInterceptor(), + ) + client = LicensesClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.LicensesRestInterceptor, "post_delete") as post, \ + mock.patch.object(transports.LicensesRestInterceptor, "pre_delete") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.DeleteLicenseRequest.pb(compute.DeleteLicenseRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.DeleteLicenseRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.delete(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_delete_rest_bad_request(transport: str = 'rest', request_type=compute.DeleteLicenseRequest): + client = LicensesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'license_': 'sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.delete(request) + + +def test_delete_rest_flattened(): + client = LicensesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'license_': 'sample2'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + license_='license__value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.delete(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/global/licenses/{license_}" % client.transport._host, args[1]) + + +def test_delete_rest_flattened_error(transport: str = 'rest'): + client = LicensesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete( + compute.DeleteLicenseRequest(), + project='project_value', + license_='license__value', + ) + + +def test_delete_rest_error(): + client = LicensesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.DeleteLicenseRequest, + dict, +]) +def test_delete_unary_rest(request_type): + client = LicensesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'license_': 'sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.delete_unary(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.Operation) + + +def test_delete_unary_rest_required_fields(request_type=compute.DeleteLicenseRequest): + transport_class = transports.LicensesRestTransport + + request_init = {} + request_init["license_"] = "" + request_init["project"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + assert "license" not in jsonified_request + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + assert "license" in jsonified_request + assert jsonified_request["license"] == request_init["license_"] + + jsonified_request["license"] = 'license__value' + jsonified_request["project"] = 'project_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("license_", "request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "license" in jsonified_request + assert jsonified_request["license"] == 'license__value' + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + + client = LicensesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "delete", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.delete_unary(request) + + expected_params = [ + ( + "license", + "", + ), + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_delete_unary_rest_unset_required_fields(): + transport = transports.LicensesRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.delete._get_unset_required_fields({}) + assert set(unset_fields) == (set(("license", "requestId", )) & set(("license", "project", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_unary_rest_interceptors(null_interceptor): + transport = transports.LicensesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.LicensesRestInterceptor(), + ) + client = LicensesClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.LicensesRestInterceptor, "post_delete") as post, \ + mock.patch.object(transports.LicensesRestInterceptor, "pre_delete") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.DeleteLicenseRequest.pb(compute.DeleteLicenseRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.DeleteLicenseRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.delete_unary(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_delete_unary_rest_bad_request(transport: str = 'rest', request_type=compute.DeleteLicenseRequest): + client = LicensesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'license_': 'sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.delete_unary(request) + + +def test_delete_unary_rest_flattened(): + client = LicensesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'license_': 'sample2'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + license_='license__value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.delete_unary(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/global/licenses/{license_}" % client.transport._host, args[1]) + + +def test_delete_unary_rest_flattened_error(transport: str = 'rest'): + client = LicensesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_unary( + compute.DeleteLicenseRequest(), + project='project_value', + license_='license__value', + ) + + +def test_delete_unary_rest_error(): + client = LicensesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.GetLicenseRequest, + dict, +]) +def test_get_rest(request_type): + client = LicensesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'license_': 'sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.License( + charges_use_fee=True, + creation_timestamp='creation_timestamp_value', + description='description_value', + id=205, + kind='kind_value', + license_code=1245, + name='name_value', + self_link='self_link_value', + transferable=True, + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.License.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.get(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.License) + assert response.charges_use_fee is True + assert response.creation_timestamp == 'creation_timestamp_value' + assert response.description == 'description_value' + assert response.id == 205 + assert response.kind == 'kind_value' + assert response.license_code == 1245 + assert response.name == 'name_value' + assert response.self_link == 'self_link_value' + assert response.transferable is True + + +def test_get_rest_required_fields(request_type=compute.GetLicenseRequest): + transport_class = transports.LicensesRestTransport + + request_init = {} + request_init["license_"] = "" + request_init["project"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + assert "license" not in jsonified_request + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + assert "license" in jsonified_request + assert jsonified_request["license"] == request_init["license_"] + + jsonified_request["license"] = 'license__value' + jsonified_request["project"] = 'project_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("license_", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "license" in jsonified_request + assert jsonified_request["license"] == 'license__value' + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + + client = LicensesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.License() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "get", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.License.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.get(request) + + expected_params = [ + ( + "license", + "", + ), + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_get_rest_unset_required_fields(): + transport = transports.LicensesRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.get._get_unset_required_fields({}) + assert set(unset_fields) == (set(("license", )) & set(("license", "project", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_rest_interceptors(null_interceptor): + transport = transports.LicensesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.LicensesRestInterceptor(), + ) + client = LicensesClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.LicensesRestInterceptor, "post_get") as post, \ + mock.patch.object(transports.LicensesRestInterceptor, "pre_get") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.GetLicenseRequest.pb(compute.GetLicenseRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.License.to_json(compute.License()) + + request = compute.GetLicenseRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.License() + + client.get(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_get_rest_bad_request(transport: str = 'rest', request_type=compute.GetLicenseRequest): + client = LicensesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'license_': 'sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get(request) + + +def test_get_rest_flattened(): + client = LicensesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.License() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'license_': 'sample2'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + license_='license__value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.License.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.get(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/global/licenses/{license_}" % client.transport._host, args[1]) + + +def test_get_rest_flattened_error(transport: str = 'rest'): + client = LicensesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get( + compute.GetLicenseRequest(), + project='project_value', + license_='license__value', + ) + + +def test_get_rest_error(): + client = LicensesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.GetIamPolicyLicenseRequest, + dict, +]) +def test_get_iam_policy_rest(request_type): + client = LicensesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'resource': 'sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Policy( + etag='etag_value', + iam_owned=True, + version=774, + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Policy.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.get_iam_policy(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.Policy) + assert response.etag == 'etag_value' + assert response.iam_owned is True + assert response.version == 774 + + +def test_get_iam_policy_rest_required_fields(request_type=compute.GetIamPolicyLicenseRequest): + transport_class = transports.LicensesRestTransport + + request_init = {} + request_init["project"] = "" + request_init["resource"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_iam_policy._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + jsonified_request["resource"] = 'resource_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_iam_policy._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("options_requested_policy_version", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "resource" in jsonified_request + assert jsonified_request["resource"] == 'resource_value' + + client = LicensesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Policy() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "get", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Policy.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.get_iam_policy(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_get_iam_policy_rest_unset_required_fields(): + transport = transports.LicensesRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.get_iam_policy._get_unset_required_fields({}) + assert set(unset_fields) == (set(("optionsRequestedPolicyVersion", )) & set(("project", "resource", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_iam_policy_rest_interceptors(null_interceptor): + transport = transports.LicensesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.LicensesRestInterceptor(), + ) + client = LicensesClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.LicensesRestInterceptor, "post_get_iam_policy") as post, \ + mock.patch.object(transports.LicensesRestInterceptor, "pre_get_iam_policy") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.GetIamPolicyLicenseRequest.pb(compute.GetIamPolicyLicenseRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Policy.to_json(compute.Policy()) + + request = compute.GetIamPolicyLicenseRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Policy() + + client.get_iam_policy(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_get_iam_policy_rest_bad_request(transport: str = 'rest', request_type=compute.GetIamPolicyLicenseRequest): + client = LicensesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'resource': 'sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_iam_policy(request) + + +def test_get_iam_policy_rest_flattened(): + client = LicensesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Policy() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'resource': 'sample2'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + resource='resource_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Policy.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.get_iam_policy(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/global/licenses/{resource}/getIamPolicy" % client.transport._host, args[1]) + + +def test_get_iam_policy_rest_flattened_error(transport: str = 'rest'): + client = LicensesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_iam_policy( + compute.GetIamPolicyLicenseRequest(), + project='project_value', + resource='resource_value', + ) + + +def test_get_iam_policy_rest_error(): + client = LicensesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.InsertLicenseRequest, + dict, +]) +def test_insert_rest(request_type): + client = LicensesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1'} + request_init["license_resource"] = {'charges_use_fee': True, 'creation_timestamp': 'creation_timestamp_value', 'description': 'description_value', 'id': 205, 'kind': 'kind_value', 'license_code': 1245, 'name': 'name_value', 'resource_requirements': {'min_guest_cpu_count': 2042, 'min_memory_mb': 1386}, 'self_link': 'self_link_value', 'transferable': True} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.insert(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, extended_operation.ExtendedOperation) + assert response.client_operation_id == 'client_operation_id_value' + assert response.creation_timestamp == 'creation_timestamp_value' + assert response.description == 'description_value' + assert response.end_time == 'end_time_value' + assert response.http_error_message == 'http_error_message_value' + assert response.http_error_status_code == 2374 + assert response.id == 205 + assert response.insert_time == 'insert_time_value' + assert response.kind == 'kind_value' + assert response.name == 'name_value' + assert response.operation_group_id == 'operation_group_id_value' + assert response.operation_type == 'operation_type_value' + assert response.progress == 885 + assert response.region == 'region_value' + assert response.self_link == 'self_link_value' + assert response.start_time == 'start_time_value' + assert response.status == compute.Operation.Status.DONE + assert response.status_message == 'status_message_value' + assert response.target_id == 947 + assert response.target_link == 'target_link_value' + assert response.user == 'user_value' + assert response.zone == 'zone_value' + + +def test_insert_rest_required_fields(request_type=compute.InsertLicenseRequest): + transport_class = transports.LicensesRestTransport + + request_init = {} + request_init["project"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).insert._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).insert._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + + client = LicensesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.insert(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_insert_rest_unset_required_fields(): + transport = transports.LicensesRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.insert._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("licenseResource", "project", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_insert_rest_interceptors(null_interceptor): + transport = transports.LicensesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.LicensesRestInterceptor(), + ) + client = LicensesClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.LicensesRestInterceptor, "post_insert") as post, \ + mock.patch.object(transports.LicensesRestInterceptor, "pre_insert") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.InsertLicenseRequest.pb(compute.InsertLicenseRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.InsertLicenseRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.insert(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_insert_rest_bad_request(transport: str = 'rest', request_type=compute.InsertLicenseRequest): + client = LicensesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1'} + request_init["license_resource"] = {'charges_use_fee': True, 'creation_timestamp': 'creation_timestamp_value', 'description': 'description_value', 'id': 205, 'kind': 'kind_value', 'license_code': 1245, 'name': 'name_value', 'resource_requirements': {'min_guest_cpu_count': 2042, 'min_memory_mb': 1386}, 'self_link': 'self_link_value', 'transferable': True} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.insert(request) + + +def test_insert_rest_flattened(): + client = LicensesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + license_resource=compute.License(charges_use_fee=True), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.insert(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/global/licenses" % client.transport._host, args[1]) + + +def test_insert_rest_flattened_error(transport: str = 'rest'): + client = LicensesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.insert( + compute.InsertLicenseRequest(), + project='project_value', + license_resource=compute.License(charges_use_fee=True), + ) + + +def test_insert_rest_error(): + client = LicensesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.InsertLicenseRequest, + dict, +]) +def test_insert_unary_rest(request_type): + client = LicensesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1'} + request_init["license_resource"] = {'charges_use_fee': True, 'creation_timestamp': 'creation_timestamp_value', 'description': 'description_value', 'id': 205, 'kind': 'kind_value', 'license_code': 1245, 'name': 'name_value', 'resource_requirements': {'min_guest_cpu_count': 2042, 'min_memory_mb': 1386}, 'self_link': 'self_link_value', 'transferable': True} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.insert_unary(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.Operation) + + +def test_insert_unary_rest_required_fields(request_type=compute.InsertLicenseRequest): + transport_class = transports.LicensesRestTransport + + request_init = {} + request_init["project"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).insert._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).insert._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + + client = LicensesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.insert_unary(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_insert_unary_rest_unset_required_fields(): + transport = transports.LicensesRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.insert._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("licenseResource", "project", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_insert_unary_rest_interceptors(null_interceptor): + transport = transports.LicensesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.LicensesRestInterceptor(), + ) + client = LicensesClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.LicensesRestInterceptor, "post_insert") as post, \ + mock.patch.object(transports.LicensesRestInterceptor, "pre_insert") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.InsertLicenseRequest.pb(compute.InsertLicenseRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.InsertLicenseRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.insert_unary(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_insert_unary_rest_bad_request(transport: str = 'rest', request_type=compute.InsertLicenseRequest): + client = LicensesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1'} + request_init["license_resource"] = {'charges_use_fee': True, 'creation_timestamp': 'creation_timestamp_value', 'description': 'description_value', 'id': 205, 'kind': 'kind_value', 'license_code': 1245, 'name': 'name_value', 'resource_requirements': {'min_guest_cpu_count': 2042, 'min_memory_mb': 1386}, 'self_link': 'self_link_value', 'transferable': True} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.insert_unary(request) + + +def test_insert_unary_rest_flattened(): + client = LicensesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + license_resource=compute.License(charges_use_fee=True), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.insert_unary(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/global/licenses" % client.transport._host, args[1]) + + +def test_insert_unary_rest_flattened_error(transport: str = 'rest'): + client = LicensesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.insert_unary( + compute.InsertLicenseRequest(), + project='project_value', + license_resource=compute.License(charges_use_fee=True), + ) + + +def test_insert_unary_rest_error(): + client = LicensesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.ListLicensesRequest, + dict, +]) +def test_list_rest(request_type): + client = LicensesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.LicensesListResponse( + id='id_value', + next_page_token='next_page_token_value', + self_link='self_link_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.LicensesListResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.list(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListPager) + assert response.id == 'id_value' + assert response.next_page_token == 'next_page_token_value' + assert response.self_link == 'self_link_value' + + +def test_list_rest_required_fields(request_type=compute.ListLicensesRequest): + transport_class = transports.LicensesRestTransport + + request_init = {} + request_init["project"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("filter", "max_results", "order_by", "page_token", "return_partial_success", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + + client = LicensesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.LicensesListResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "get", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.LicensesListResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.list(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_list_rest_unset_required_fields(): + transport = transports.LicensesRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.list._get_unset_required_fields({}) + assert set(unset_fields) == (set(("filter", "maxResults", "orderBy", "pageToken", "returnPartialSuccess", )) & set(("project", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_rest_interceptors(null_interceptor): + transport = transports.LicensesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.LicensesRestInterceptor(), + ) + client = LicensesClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.LicensesRestInterceptor, "post_list") as post, \ + mock.patch.object(transports.LicensesRestInterceptor, "pre_list") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.ListLicensesRequest.pb(compute.ListLicensesRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.LicensesListResponse.to_json(compute.LicensesListResponse()) + + request = compute.ListLicensesRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.LicensesListResponse() + + client.list(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_list_rest_bad_request(transport: str = 'rest', request_type=compute.ListLicensesRequest): + client = LicensesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list(request) + + +def test_list_rest_flattened(): + client = LicensesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.LicensesListResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.LicensesListResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.list(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/global/licenses" % client.transport._host, args[1]) + + +def test_list_rest_flattened_error(transport: str = 'rest'): + client = LicensesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list( + compute.ListLicensesRequest(), + project='project_value', + ) + + +def test_list_rest_pager(transport: str = 'rest'): + client = LicensesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + #with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + compute.LicensesListResponse( + items=[ + compute.License(), + compute.License(), + compute.License(), + ], + next_page_token='abc', + ), + compute.LicensesListResponse( + items=[], + next_page_token='def', + ), + compute.LicensesListResponse( + items=[ + compute.License(), + ], + next_page_token='ghi', + ), + compute.LicensesListResponse( + items=[ + compute.License(), + compute.License(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple(compute.LicensesListResponse.to_json(x) for x in response) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode('UTF-8') + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {'project': 'sample1'} + + pager = client.list(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, compute.License) + for i in results) + + pages = list(client.list(request=sample_request).pages) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize("request_type", [ + compute.SetIamPolicyLicenseRequest, + dict, +]) +def test_set_iam_policy_rest(request_type): + client = LicensesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'resource': 'sample2'} + request_init["global_set_policy_request_resource"] = {'bindings': [{'binding_id': 'binding_id_value', 'condition': {'description': 'description_value', 'expression': 'expression_value', 'location': 'location_value', 'title': 'title_value'}, 'members': ['members_value1', 'members_value2'], 'role': 'role_value'}], 'etag': 'etag_value', 'policy': {'audit_configs': [{'audit_log_configs': [{'exempted_members': ['exempted_members_value1', 'exempted_members_value2'], 'ignore_child_exemptions': True, 'log_type': 'log_type_value'}], 'exempted_members': ['exempted_members_value1', 'exempted_members_value2'], 'service': 'service_value'}], 'bindings': {}, 'etag': 'etag_value', 'iam_owned': True, 'rules': [{'action': 'action_value', 'conditions': [{'iam': 'iam_value', 'op': 'op_value', 'svc': 'svc_value', 'sys': 'sys_value', 'values': ['values_value1', 'values_value2']}], 'description': 'description_value', 'ins': ['ins_value1', 'ins_value2'], 'log_configs': [{'cloud_audit': {'authorization_logging_options': {'permission_type': 'permission_type_value'}, 'log_name': 'log_name_value'}, 'counter': {'custom_fields': [{'name': 'name_value', 'value': 'value_value'}], 'field': 'field_value', 'metric': 'metric_value'}, 'data_access': {'log_mode': 'log_mode_value'}}], 'not_ins': ['not_ins_value1', 'not_ins_value2'], 'permissions': ['permissions_value1', 'permissions_value2']}], 'version': 774}} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Policy( + etag='etag_value', + iam_owned=True, + version=774, + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Policy.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.set_iam_policy(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.Policy) + assert response.etag == 'etag_value' + assert response.iam_owned is True + assert response.version == 774 + + +def test_set_iam_policy_rest_required_fields(request_type=compute.SetIamPolicyLicenseRequest): + transport_class = transports.LicensesRestTransport + + request_init = {} + request_init["project"] = "" + request_init["resource"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).set_iam_policy._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + jsonified_request["resource"] = 'resource_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).set_iam_policy._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "resource" in jsonified_request + assert jsonified_request["resource"] == 'resource_value' + + client = LicensesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Policy() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Policy.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.set_iam_policy(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_set_iam_policy_rest_unset_required_fields(): + transport = transports.LicensesRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.set_iam_policy._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("globalSetPolicyRequestResource", "project", "resource", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_set_iam_policy_rest_interceptors(null_interceptor): + transport = transports.LicensesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.LicensesRestInterceptor(), + ) + client = LicensesClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.LicensesRestInterceptor, "post_set_iam_policy") as post, \ + mock.patch.object(transports.LicensesRestInterceptor, "pre_set_iam_policy") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.SetIamPolicyLicenseRequest.pb(compute.SetIamPolicyLicenseRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Policy.to_json(compute.Policy()) + + request = compute.SetIamPolicyLicenseRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Policy() + + client.set_iam_policy(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_set_iam_policy_rest_bad_request(transport: str = 'rest', request_type=compute.SetIamPolicyLicenseRequest): + client = LicensesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'resource': 'sample2'} + request_init["global_set_policy_request_resource"] = {'bindings': [{'binding_id': 'binding_id_value', 'condition': {'description': 'description_value', 'expression': 'expression_value', 'location': 'location_value', 'title': 'title_value'}, 'members': ['members_value1', 'members_value2'], 'role': 'role_value'}], 'etag': 'etag_value', 'policy': {'audit_configs': [{'audit_log_configs': [{'exempted_members': ['exempted_members_value1', 'exempted_members_value2'], 'ignore_child_exemptions': True, 'log_type': 'log_type_value'}], 'exempted_members': ['exempted_members_value1', 'exempted_members_value2'], 'service': 'service_value'}], 'bindings': {}, 'etag': 'etag_value', 'iam_owned': True, 'rules': [{'action': 'action_value', 'conditions': [{'iam': 'iam_value', 'op': 'op_value', 'svc': 'svc_value', 'sys': 'sys_value', 'values': ['values_value1', 'values_value2']}], 'description': 'description_value', 'ins': ['ins_value1', 'ins_value2'], 'log_configs': [{'cloud_audit': {'authorization_logging_options': {'permission_type': 'permission_type_value'}, 'log_name': 'log_name_value'}, 'counter': {'custom_fields': [{'name': 'name_value', 'value': 'value_value'}], 'field': 'field_value', 'metric': 'metric_value'}, 'data_access': {'log_mode': 'log_mode_value'}}], 'not_ins': ['not_ins_value1', 'not_ins_value2'], 'permissions': ['permissions_value1', 'permissions_value2']}], 'version': 774}} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.set_iam_policy(request) + + +def test_set_iam_policy_rest_flattened(): + client = LicensesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Policy() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'resource': 'sample2'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + resource='resource_value', + global_set_policy_request_resource=compute.GlobalSetPolicyRequest(bindings=[compute.Binding(binding_id='binding_id_value')]), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Policy.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.set_iam_policy(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/global/licenses/{resource}/setIamPolicy" % client.transport._host, args[1]) + + +def test_set_iam_policy_rest_flattened_error(transport: str = 'rest'): + client = LicensesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.set_iam_policy( + compute.SetIamPolicyLicenseRequest(), + project='project_value', + resource='resource_value', + global_set_policy_request_resource=compute.GlobalSetPolicyRequest(bindings=[compute.Binding(binding_id='binding_id_value')]), + ) + + +def test_set_iam_policy_rest_error(): + client = LicensesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.TestIamPermissionsLicenseRequest, + dict, +]) +def test_test_iam_permissions_rest(request_type): + client = LicensesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'resource': 'sample2'} + request_init["test_permissions_request_resource"] = {'permissions': ['permissions_value1', 'permissions_value2']} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.TestPermissionsResponse( + permissions=['permissions_value'], + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.TestPermissionsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.test_iam_permissions(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.TestPermissionsResponse) + assert response.permissions == ['permissions_value'] + + +def test_test_iam_permissions_rest_required_fields(request_type=compute.TestIamPermissionsLicenseRequest): + transport_class = transports.LicensesRestTransport + + request_init = {} + request_init["project"] = "" + request_init["resource"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).test_iam_permissions._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + jsonified_request["resource"] = 'resource_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).test_iam_permissions._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "resource" in jsonified_request + assert jsonified_request["resource"] == 'resource_value' + + client = LicensesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.TestPermissionsResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.TestPermissionsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.test_iam_permissions(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_test_iam_permissions_rest_unset_required_fields(): + transport = transports.LicensesRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.test_iam_permissions._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("project", "resource", "testPermissionsRequestResource", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_test_iam_permissions_rest_interceptors(null_interceptor): + transport = transports.LicensesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.LicensesRestInterceptor(), + ) + client = LicensesClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.LicensesRestInterceptor, "post_test_iam_permissions") as post, \ + mock.patch.object(transports.LicensesRestInterceptor, "pre_test_iam_permissions") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.TestIamPermissionsLicenseRequest.pb(compute.TestIamPermissionsLicenseRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.TestPermissionsResponse.to_json(compute.TestPermissionsResponse()) + + request = compute.TestIamPermissionsLicenseRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.TestPermissionsResponse() + + client.test_iam_permissions(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_test_iam_permissions_rest_bad_request(transport: str = 'rest', request_type=compute.TestIamPermissionsLicenseRequest): + client = LicensesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'resource': 'sample2'} + request_init["test_permissions_request_resource"] = {'permissions': ['permissions_value1', 'permissions_value2']} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.test_iam_permissions(request) + + +def test_test_iam_permissions_rest_flattened(): + client = LicensesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.TestPermissionsResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'resource': 'sample2'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + resource='resource_value', + test_permissions_request_resource=compute.TestPermissionsRequest(permissions=['permissions_value']), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.TestPermissionsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.test_iam_permissions(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/global/licenses/{resource}/testIamPermissions" % client.transport._host, args[1]) + + +def test_test_iam_permissions_rest_flattened_error(transport: str = 'rest'): + client = LicensesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.test_iam_permissions( + compute.TestIamPermissionsLicenseRequest(), + project='project_value', + resource='resource_value', + test_permissions_request_resource=compute.TestPermissionsRequest(permissions=['permissions_value']), + ) + + +def test_test_iam_permissions_rest_error(): + client = LicensesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +def test_credentials_transport_error(): + # It is an error to provide credentials and a transport instance. + transport = transports.LicensesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = LicensesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # It is an error to provide a credentials file and a transport instance. + transport = transports.LicensesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = LicensesClient( + client_options={"credentials_file": "credentials.json"}, + transport=transport, + ) + + # It is an error to provide an api_key and a transport instance. + transport = transports.LicensesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = LicensesClient( + client_options=options, + transport=transport, + ) + + # It is an error to provide an api_key and a credential. + options = mock.Mock() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = LicensesClient( + client_options=options, + credentials=ga_credentials.AnonymousCredentials() + ) + + # It is an error to provide scopes and a transport instance. + transport = transports.LicensesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = LicensesClient( + client_options={"scopes": ["1", "2"]}, + transport=transport, + ) + + +def test_transport_instance(): + # A client may be instantiated with a custom transport instance. + transport = transports.LicensesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + client = LicensesClient(transport=transport) + assert client.transport is transport + + +@pytest.mark.parametrize("transport_class", [ + transports.LicensesRestTransport, +]) +def test_transport_adc(transport_class): + # Test default credentials are used if not provided. + with mock.patch.object(google.auth, 'default') as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class() + adc.assert_called_once() + +@pytest.mark.parametrize("transport_name", [ + "rest", +]) +def test_transport_kind(transport_name): + transport = LicensesClient.get_transport_class(transport_name)( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert transport.kind == transport_name + + +def test_licenses_base_transport_error(): + # Passing both a credentials object and credentials_file should raise an error + with pytest.raises(core_exceptions.DuplicateCredentialArgs): + transport = transports.LicensesTransport( + credentials=ga_credentials.AnonymousCredentials(), + credentials_file="credentials.json" + ) + + +def test_licenses_base_transport(): + # Instantiate the base transport. + with mock.patch('google.cloud.compute_v1.services.licenses.transports.LicensesTransport.__init__') as Transport: + Transport.return_value = None + transport = transports.LicensesTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Every method on the transport should just blindly + # raise NotImplementedError. + methods = ( + 'delete', + 'get', + 'get_iam_policy', + 'insert', + 'list', + 'set_iam_policy', + 'test_iam_permissions', + ) + for method in methods: + with pytest.raises(NotImplementedError): + getattr(transport, method)(request=object()) + + with pytest.raises(NotImplementedError): + transport.close() + + # Catch all for all remaining methods and properties + remainder = [ + 'kind', + ] + for r in remainder: + with pytest.raises(NotImplementedError): + getattr(transport, r)() + + +def test_licenses_base_transport_with_credentials_file(): + # Instantiate the base transport with a credentials file + with mock.patch.object(google.auth, 'load_credentials_from_file', autospec=True) as load_creds, mock.patch('google.cloud.compute_v1.services.licenses.transports.LicensesTransport._prep_wrapped_messages') as Transport: + Transport.return_value = None + load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.LicensesTransport( + credentials_file="credentials.json", + quota_project_id="octopus", + ) + load_creds.assert_called_once_with("credentials.json", + scopes=None, + default_scopes=( + 'https://www.googleapis.com/auth/compute', + 'https://www.googleapis.com/auth/cloud-platform', +), + quota_project_id="octopus", + ) + + +def test_licenses_base_transport_with_adc(): + # Test the default credentials are used if credentials and credentials_file are None. + with mock.patch.object(google.auth, 'default', autospec=True) as adc, mock.patch('google.cloud.compute_v1.services.licenses.transports.LicensesTransport._prep_wrapped_messages') as Transport: + Transport.return_value = None + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.LicensesTransport() + adc.assert_called_once() + + +def test_licenses_auth_adc(): + # If no credentials are provided, we should use ADC credentials. + with mock.patch.object(google.auth, 'default', autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + LicensesClient() + adc.assert_called_once_with( + scopes=None, + default_scopes=( + 'https://www.googleapis.com/auth/compute', + 'https://www.googleapis.com/auth/cloud-platform', +), + quota_project_id=None, + ) + + +def test_licenses_http_transport_client_cert_source_for_mtls(): + cred = ga_credentials.AnonymousCredentials() + with mock.patch("google.auth.transport.requests.AuthorizedSession.configure_mtls_channel") as mock_configure_mtls_channel: + transports.LicensesRestTransport ( + credentials=cred, + client_cert_source_for_mtls=client_cert_source_callback + ) + mock_configure_mtls_channel.assert_called_once_with(client_cert_source_callback) + + +@pytest.mark.parametrize("transport_name", [ + "rest", +]) +def test_licenses_host_no_port(transport_name): + client = LicensesClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions(api_endpoint='compute.googleapis.com'), + transport=transport_name, + ) + assert client.transport._host == ( + 'compute.googleapis.com:443' + if transport_name in ['grpc', 'grpc_asyncio'] + else 'https://compute.googleapis.com' + ) + +@pytest.mark.parametrize("transport_name", [ + "rest", +]) +def test_licenses_host_with_port(transport_name): + client = LicensesClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions(api_endpoint='compute.googleapis.com:8000'), + transport=transport_name, + ) + assert client.transport._host == ( + 'compute.googleapis.com:8000' + if transport_name in ['grpc', 'grpc_asyncio'] + else 'https://compute.googleapis.com:8000' + ) + +@pytest.mark.parametrize("transport_name", [ + "rest", +]) +def test_licenses_client_transport_session_collision(transport_name): + creds1 = ga_credentials.AnonymousCredentials() + creds2 = ga_credentials.AnonymousCredentials() + client1 = LicensesClient( + credentials=creds1, + transport=transport_name, + ) + client2 = LicensesClient( + credentials=creds2, + transport=transport_name, + ) + session1 = client1.transport.delete._session + session2 = client2.transport.delete._session + assert session1 != session2 + session1 = client1.transport.get._session + session2 = client2.transport.get._session + assert session1 != session2 + session1 = client1.transport.get_iam_policy._session + session2 = client2.transport.get_iam_policy._session + assert session1 != session2 + session1 = client1.transport.insert._session + session2 = client2.transport.insert._session + assert session1 != session2 + session1 = client1.transport.list._session + session2 = client2.transport.list._session + assert session1 != session2 + session1 = client1.transport.set_iam_policy._session + session2 = client2.transport.set_iam_policy._session + assert session1 != session2 + session1 = client1.transport.test_iam_permissions._session + session2 = client2.transport.test_iam_permissions._session + assert session1 != session2 + +def test_common_billing_account_path(): + billing_account = "squid" + expected = "billingAccounts/{billing_account}".format(billing_account=billing_account, ) + actual = LicensesClient.common_billing_account_path(billing_account) + assert expected == actual + + +def test_parse_common_billing_account_path(): + expected = { + "billing_account": "clam", + } + path = LicensesClient.common_billing_account_path(**expected) + + # Check that the path construction is reversible. + actual = LicensesClient.parse_common_billing_account_path(path) + assert expected == actual + +def test_common_folder_path(): + folder = "whelk" + expected = "folders/{folder}".format(folder=folder, ) + actual = LicensesClient.common_folder_path(folder) + assert expected == actual + + +def test_parse_common_folder_path(): + expected = { + "folder": "octopus", + } + path = LicensesClient.common_folder_path(**expected) + + # Check that the path construction is reversible. + actual = LicensesClient.parse_common_folder_path(path) + assert expected == actual + +def test_common_organization_path(): + organization = "oyster" + expected = "organizations/{organization}".format(organization=organization, ) + actual = LicensesClient.common_organization_path(organization) + assert expected == actual + + +def test_parse_common_organization_path(): + expected = { + "organization": "nudibranch", + } + path = LicensesClient.common_organization_path(**expected) + + # Check that the path construction is reversible. + actual = LicensesClient.parse_common_organization_path(path) + assert expected == actual + +def test_common_project_path(): + project = "cuttlefish" + expected = "projects/{project}".format(project=project, ) + actual = LicensesClient.common_project_path(project) + assert expected == actual + + +def test_parse_common_project_path(): + expected = { + "project": "mussel", + } + path = LicensesClient.common_project_path(**expected) + + # Check that the path construction is reversible. + actual = LicensesClient.parse_common_project_path(path) + assert expected == actual + +def test_common_location_path(): + project = "winkle" + location = "nautilus" + expected = "projects/{project}/locations/{location}".format(project=project, location=location, ) + actual = LicensesClient.common_location_path(project, location) + assert expected == actual + + +def test_parse_common_location_path(): + expected = { + "project": "scallop", + "location": "abalone", + } + path = LicensesClient.common_location_path(**expected) + + # Check that the path construction is reversible. + actual = LicensesClient.parse_common_location_path(path) + assert expected == actual + + +def test_client_with_default_client_info(): + client_info = gapic_v1.client_info.ClientInfo() + + with mock.patch.object(transports.LicensesTransport, '_prep_wrapped_messages') as prep: + client = LicensesClient( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + with mock.patch.object(transports.LicensesTransport, '_prep_wrapped_messages') as prep: + transport_class = LicensesClient.get_transport_class() + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + +def test_transport_close(): + transports = { + "rest": "_session", + } + + for transport, close_name in transports.items(): + client = LicensesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport + ) + with mock.patch.object(type(getattr(client.transport, close_name)), "close") as close: + with client: + close.assert_not_called() + close.assert_called_once() + +def test_client_ctx(): + transports = [ + 'rest', + ] + for transport in transports: + client = LicensesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport + ) + # Test client calls underlying transport. + with mock.patch.object(type(client.transport), "close") as close: + close.assert_not_called() + with client: + pass + close.assert_called() + +@pytest.mark.parametrize("client_class,transport_class", [ + (LicensesClient, transports.LicensesRestTransport), +]) +def test_api_key_credentials(client_class, transport_class): + with mock.patch.object( + google.auth._default, "get_api_key_credentials", create=True + ) as get_api_key_credentials: + mock_cred = mock.Mock() + get_api_key_credentials.return_value = mock_cred + options = client_options.ClientOptions() + options.api_key = "api_key" + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=mock_cred, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) diff --git a/owl-bot-staging/v1/tests/unit/gapic/compute_v1/test_machine_images.py b/owl-bot-staging/v1/tests/unit/gapic/compute_v1/test_machine_images.py new file mode 100644 index 000000000..aadbfdb03 --- /dev/null +++ b/owl-bot-staging/v1/tests/unit/gapic/compute_v1/test_machine_images.py @@ -0,0 +1,3227 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import os +# try/except added for compatibility with python < 3.8 +try: + from unittest import mock + from unittest.mock import AsyncMock # pragma: NO COVER +except ImportError: # pragma: NO COVER + import mock + +import grpc +from grpc.experimental import aio +from collections.abc import Iterable +from google.protobuf import json_format +import json +import math +import pytest +from proto.marshal.rules.dates import DurationRule, TimestampRule +from proto.marshal.rules import wrappers +from requests import Response +from requests import Request, PreparedRequest +from requests.sessions import Session +from google.protobuf import json_format + +from google.api_core import client_options +from google.api_core import exceptions as core_exceptions +from google.api_core import extended_operation # type: ignore +from google.api_core import future +from google.api_core import gapic_v1 +from google.api_core import grpc_helpers +from google.api_core import grpc_helpers_async +from google.api_core import path_template +from google.auth import credentials as ga_credentials +from google.auth.exceptions import MutualTLSChannelError +from google.cloud.compute_v1.services.machine_images import MachineImagesClient +from google.cloud.compute_v1.services.machine_images import pagers +from google.cloud.compute_v1.services.machine_images import transports +from google.cloud.compute_v1.types import compute +from google.oauth2 import service_account +import google.auth + + +def client_cert_source_callback(): + return b"cert bytes", b"key bytes" + + +# If default endpoint is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint(client): + return "foo.googleapis.com" if ("localhost" in client.DEFAULT_ENDPOINT) else client.DEFAULT_ENDPOINT + + +def test__get_default_mtls_endpoint(): + api_endpoint = "example.googleapis.com" + api_mtls_endpoint = "example.mtls.googleapis.com" + sandbox_endpoint = "example.sandbox.googleapis.com" + sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com" + non_googleapi = "api.example.com" + + assert MachineImagesClient._get_default_mtls_endpoint(None) is None + assert MachineImagesClient._get_default_mtls_endpoint(api_endpoint) == api_mtls_endpoint + assert MachineImagesClient._get_default_mtls_endpoint(api_mtls_endpoint) == api_mtls_endpoint + assert MachineImagesClient._get_default_mtls_endpoint(sandbox_endpoint) == sandbox_mtls_endpoint + assert MachineImagesClient._get_default_mtls_endpoint(sandbox_mtls_endpoint) == sandbox_mtls_endpoint + assert MachineImagesClient._get_default_mtls_endpoint(non_googleapi) == non_googleapi + + +@pytest.mark.parametrize("client_class,transport_name", [ + (MachineImagesClient, "rest"), +]) +def test_machine_images_client_from_service_account_info(client_class, transport_name): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object(service_account.Credentials, 'from_service_account_info') as factory: + factory.return_value = creds + info = {"valid": True} + client = client_class.from_service_account_info(info, transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + 'compute.googleapis.com:443' + if transport_name in ['grpc', 'grpc_asyncio'] + else + 'https://compute.googleapis.com' + ) + + +@pytest.mark.parametrize("transport_class,transport_name", [ + (transports.MachineImagesRestTransport, "rest"), +]) +def test_machine_images_client_service_account_always_use_jwt(transport_class, transport_name): + with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=True) + use_jwt.assert_called_once_with(True) + + with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=False) + use_jwt.assert_not_called() + + +@pytest.mark.parametrize("client_class,transport_name", [ + (MachineImagesClient, "rest"), +]) +def test_machine_images_client_from_service_account_file(client_class, transport_name): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object(service_account.Credentials, 'from_service_account_file') as factory: + factory.return_value = creds + client = client_class.from_service_account_file("dummy/file/path.json", transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + client = client_class.from_service_account_json("dummy/file/path.json", transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + 'compute.googleapis.com:443' + if transport_name in ['grpc', 'grpc_asyncio'] + else + 'https://compute.googleapis.com' + ) + + +def test_machine_images_client_get_transport_class(): + transport = MachineImagesClient.get_transport_class() + available_transports = [ + transports.MachineImagesRestTransport, + ] + assert transport in available_transports + + transport = MachineImagesClient.get_transport_class("rest") + assert transport == transports.MachineImagesRestTransport + + +@pytest.mark.parametrize("client_class,transport_class,transport_name", [ + (MachineImagesClient, transports.MachineImagesRestTransport, "rest"), +]) +@mock.patch.object(MachineImagesClient, "DEFAULT_ENDPOINT", modify_default_endpoint(MachineImagesClient)) +def test_machine_images_client_client_options(client_class, transport_class, transport_name): + # Check that if channel is provided we won't create a new one. + with mock.patch.object(MachineImagesClient, 'get_transport_class') as gtc: + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ) + client = client_class(transport=transport) + gtc.assert_not_called() + + # Check that if channel is provided via str we will create a new one. + with mock.patch.object(MachineImagesClient, 'get_transport_class') as gtc: + client = client_class(transport=transport_name) + gtc.assert_called() + + # Check the case api_endpoint is provided. + options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(transport=transport_name, client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_MTLS_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError): + client = client_class(transport=transport_name) + + # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): + with pytest.raises(ValueError): + client = client_class(transport=transport_name) + + # Check the case quota_project_id is provided + options = client_options.ClientOptions(quota_project_id="octopus") + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id="octopus", + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + # Check the case api_endpoint is provided + options = client_options.ClientOptions(api_audience="https://language.googleapis.com") + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience="https://language.googleapis.com" + ) + +@pytest.mark.parametrize("client_class,transport_class,transport_name,use_client_cert_env", [ + (MachineImagesClient, transports.MachineImagesRestTransport, "rest", "true"), + (MachineImagesClient, transports.MachineImagesRestTransport, "rest", "false"), +]) +@mock.patch.object(MachineImagesClient, "DEFAULT_ENDPOINT", modify_default_endpoint(MachineImagesClient)) +@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) +def test_machine_images_client_mtls_env_auto(client_class, transport_class, transport_name, use_client_cert_env): + # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default + # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists. + + # Check the case client_cert_source is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): + options = client_options.ClientOptions(client_cert_source=client_cert_source_callback) + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + + if use_client_cert_env == "false": + expected_client_cert_source = None + expected_host = client.DEFAULT_ENDPOINT + else: + expected_client_cert_source = client_cert_source_callback + expected_host = client.DEFAULT_MTLS_ENDPOINT + + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case ADC client cert is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): + with mock.patch.object(transport_class, '__init__') as patched: + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): + with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=client_cert_source_callback): + if use_client_cert_env == "false": + expected_host = client.DEFAULT_ENDPOINT + expected_client_cert_source = None + else: + expected_host = client.DEFAULT_MTLS_ENDPOINT + expected_client_cert_source = client_cert_source_callback + + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case client_cert_source and ADC client cert are not provided. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): + with mock.patch.object(transport_class, '__init__') as patched: + with mock.patch("google.auth.transport.mtls.has_default_client_cert_source", return_value=False): + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize("client_class", [ + MachineImagesClient +]) +@mock.patch.object(MachineImagesClient, "DEFAULT_ENDPOINT", modify_default_endpoint(MachineImagesClient)) +def test_machine_images_client_get_mtls_endpoint_and_cert_source(client_class): + mock_client_cert_source = mock.Mock() + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "true". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + mock_api_endpoint = "foo" + options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(options) + assert api_endpoint == mock_api_endpoint + assert cert_source == mock_client_cert_source + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "false". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + mock_client_cert_source = mock.Mock() + mock_api_endpoint = "foo" + options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(options) + assert api_endpoint == mock_api_endpoint + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert doesn't exist. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=False): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert exists. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): + with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=mock_client_cert_source): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source == mock_client_cert_source + + +@pytest.mark.parametrize("client_class,transport_class,transport_name", [ + (MachineImagesClient, transports.MachineImagesRestTransport, "rest"), +]) +def test_machine_images_client_client_options_scopes(client_class, transport_class, transport_name): + # Check the case scopes are provided. + options = client_options.ClientOptions( + scopes=["1", "2"], + ) + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=["1", "2"], + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + +@pytest.mark.parametrize("client_class,transport_class,transport_name,grpc_helpers", [ + (MachineImagesClient, transports.MachineImagesRestTransport, "rest", None), +]) +def test_machine_images_client_client_options_credentials_file(client_class, transport_class, transport_name, grpc_helpers): + # Check the case credentials file is provided. + options = client_options.ClientOptions( + credentials_file="credentials.json" + ) + + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize("request_type", [ + compute.DeleteMachineImageRequest, + dict, +]) +def test_delete_rest(request_type): + client = MachineImagesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'machine_image': 'sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.delete(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, extended_operation.ExtendedOperation) + assert response.client_operation_id == 'client_operation_id_value' + assert response.creation_timestamp == 'creation_timestamp_value' + assert response.description == 'description_value' + assert response.end_time == 'end_time_value' + assert response.http_error_message == 'http_error_message_value' + assert response.http_error_status_code == 2374 + assert response.id == 205 + assert response.insert_time == 'insert_time_value' + assert response.kind == 'kind_value' + assert response.name == 'name_value' + assert response.operation_group_id == 'operation_group_id_value' + assert response.operation_type == 'operation_type_value' + assert response.progress == 885 + assert response.region == 'region_value' + assert response.self_link == 'self_link_value' + assert response.start_time == 'start_time_value' + assert response.status == compute.Operation.Status.DONE + assert response.status_message == 'status_message_value' + assert response.target_id == 947 + assert response.target_link == 'target_link_value' + assert response.user == 'user_value' + assert response.zone == 'zone_value' + + +def test_delete_rest_required_fields(request_type=compute.DeleteMachineImageRequest): + transport_class = transports.MachineImagesRestTransport + + request_init = {} + request_init["machine_image"] = "" + request_init["project"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["machineImage"] = 'machine_image_value' + jsonified_request["project"] = 'project_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "machineImage" in jsonified_request + assert jsonified_request["machineImage"] == 'machine_image_value' + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + + client = MachineImagesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "delete", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.delete(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_delete_rest_unset_required_fields(): + transport = transports.MachineImagesRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.delete._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("machineImage", "project", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_rest_interceptors(null_interceptor): + transport = transports.MachineImagesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.MachineImagesRestInterceptor(), + ) + client = MachineImagesClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.MachineImagesRestInterceptor, "post_delete") as post, \ + mock.patch.object(transports.MachineImagesRestInterceptor, "pre_delete") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.DeleteMachineImageRequest.pb(compute.DeleteMachineImageRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.DeleteMachineImageRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.delete(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_delete_rest_bad_request(transport: str = 'rest', request_type=compute.DeleteMachineImageRequest): + client = MachineImagesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'machine_image': 'sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.delete(request) + + +def test_delete_rest_flattened(): + client = MachineImagesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'machine_image': 'sample2'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + machine_image='machine_image_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.delete(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/global/machineImages/{machine_image}" % client.transport._host, args[1]) + + +def test_delete_rest_flattened_error(transport: str = 'rest'): + client = MachineImagesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete( + compute.DeleteMachineImageRequest(), + project='project_value', + machine_image='machine_image_value', + ) + + +def test_delete_rest_error(): + client = MachineImagesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.DeleteMachineImageRequest, + dict, +]) +def test_delete_unary_rest(request_type): + client = MachineImagesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'machine_image': 'sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.delete_unary(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.Operation) + + +def test_delete_unary_rest_required_fields(request_type=compute.DeleteMachineImageRequest): + transport_class = transports.MachineImagesRestTransport + + request_init = {} + request_init["machine_image"] = "" + request_init["project"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["machineImage"] = 'machine_image_value' + jsonified_request["project"] = 'project_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "machineImage" in jsonified_request + assert jsonified_request["machineImage"] == 'machine_image_value' + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + + client = MachineImagesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "delete", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.delete_unary(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_delete_unary_rest_unset_required_fields(): + transport = transports.MachineImagesRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.delete._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("machineImage", "project", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_unary_rest_interceptors(null_interceptor): + transport = transports.MachineImagesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.MachineImagesRestInterceptor(), + ) + client = MachineImagesClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.MachineImagesRestInterceptor, "post_delete") as post, \ + mock.patch.object(transports.MachineImagesRestInterceptor, "pre_delete") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.DeleteMachineImageRequest.pb(compute.DeleteMachineImageRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.DeleteMachineImageRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.delete_unary(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_delete_unary_rest_bad_request(transport: str = 'rest', request_type=compute.DeleteMachineImageRequest): + client = MachineImagesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'machine_image': 'sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.delete_unary(request) + + +def test_delete_unary_rest_flattened(): + client = MachineImagesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'machine_image': 'sample2'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + machine_image='machine_image_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.delete_unary(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/global/machineImages/{machine_image}" % client.transport._host, args[1]) + + +def test_delete_unary_rest_flattened_error(transport: str = 'rest'): + client = MachineImagesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_unary( + compute.DeleteMachineImageRequest(), + project='project_value', + machine_image='machine_image_value', + ) + + +def test_delete_unary_rest_error(): + client = MachineImagesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.GetMachineImageRequest, + dict, +]) +def test_get_rest(request_type): + client = MachineImagesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'machine_image': 'sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.MachineImage( + creation_timestamp='creation_timestamp_value', + description='description_value', + guest_flush=True, + id=205, + kind='kind_value', + name='name_value', + satisfies_pzs=True, + self_link='self_link_value', + source_instance='source_instance_value', + status='status_value', + storage_locations=['storage_locations_value'], + total_storage_bytes=2046, + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.MachineImage.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.get(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.MachineImage) + assert response.creation_timestamp == 'creation_timestamp_value' + assert response.description == 'description_value' + assert response.guest_flush is True + assert response.id == 205 + assert response.kind == 'kind_value' + assert response.name == 'name_value' + assert response.satisfies_pzs is True + assert response.self_link == 'self_link_value' + assert response.source_instance == 'source_instance_value' + assert response.status == 'status_value' + assert response.storage_locations == ['storage_locations_value'] + assert response.total_storage_bytes == 2046 + + +def test_get_rest_required_fields(request_type=compute.GetMachineImageRequest): + transport_class = transports.MachineImagesRestTransport + + request_init = {} + request_init["machine_image"] = "" + request_init["project"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["machineImage"] = 'machine_image_value' + jsonified_request["project"] = 'project_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "machineImage" in jsonified_request + assert jsonified_request["machineImage"] == 'machine_image_value' + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + + client = MachineImagesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.MachineImage() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "get", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.MachineImage.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.get(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_get_rest_unset_required_fields(): + transport = transports.MachineImagesRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.get._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("machineImage", "project", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_rest_interceptors(null_interceptor): + transport = transports.MachineImagesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.MachineImagesRestInterceptor(), + ) + client = MachineImagesClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.MachineImagesRestInterceptor, "post_get") as post, \ + mock.patch.object(transports.MachineImagesRestInterceptor, "pre_get") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.GetMachineImageRequest.pb(compute.GetMachineImageRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.MachineImage.to_json(compute.MachineImage()) + + request = compute.GetMachineImageRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.MachineImage() + + client.get(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_get_rest_bad_request(transport: str = 'rest', request_type=compute.GetMachineImageRequest): + client = MachineImagesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'machine_image': 'sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get(request) + + +def test_get_rest_flattened(): + client = MachineImagesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.MachineImage() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'machine_image': 'sample2'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + machine_image='machine_image_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.MachineImage.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.get(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/global/machineImages/{machine_image}" % client.transport._host, args[1]) + + +def test_get_rest_flattened_error(transport: str = 'rest'): + client = MachineImagesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get( + compute.GetMachineImageRequest(), + project='project_value', + machine_image='machine_image_value', + ) + + +def test_get_rest_error(): + client = MachineImagesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.GetIamPolicyMachineImageRequest, + dict, +]) +def test_get_iam_policy_rest(request_type): + client = MachineImagesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'resource': 'sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Policy( + etag='etag_value', + iam_owned=True, + version=774, + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Policy.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.get_iam_policy(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.Policy) + assert response.etag == 'etag_value' + assert response.iam_owned is True + assert response.version == 774 + + +def test_get_iam_policy_rest_required_fields(request_type=compute.GetIamPolicyMachineImageRequest): + transport_class = transports.MachineImagesRestTransport + + request_init = {} + request_init["project"] = "" + request_init["resource"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_iam_policy._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + jsonified_request["resource"] = 'resource_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_iam_policy._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("options_requested_policy_version", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "resource" in jsonified_request + assert jsonified_request["resource"] == 'resource_value' + + client = MachineImagesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Policy() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "get", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Policy.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.get_iam_policy(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_get_iam_policy_rest_unset_required_fields(): + transport = transports.MachineImagesRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.get_iam_policy._get_unset_required_fields({}) + assert set(unset_fields) == (set(("optionsRequestedPolicyVersion", )) & set(("project", "resource", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_iam_policy_rest_interceptors(null_interceptor): + transport = transports.MachineImagesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.MachineImagesRestInterceptor(), + ) + client = MachineImagesClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.MachineImagesRestInterceptor, "post_get_iam_policy") as post, \ + mock.patch.object(transports.MachineImagesRestInterceptor, "pre_get_iam_policy") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.GetIamPolicyMachineImageRequest.pb(compute.GetIamPolicyMachineImageRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Policy.to_json(compute.Policy()) + + request = compute.GetIamPolicyMachineImageRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Policy() + + client.get_iam_policy(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_get_iam_policy_rest_bad_request(transport: str = 'rest', request_type=compute.GetIamPolicyMachineImageRequest): + client = MachineImagesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'resource': 'sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_iam_policy(request) + + +def test_get_iam_policy_rest_flattened(): + client = MachineImagesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Policy() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'resource': 'sample2'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + resource='resource_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Policy.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.get_iam_policy(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/global/machineImages/{resource}/getIamPolicy" % client.transport._host, args[1]) + + +def test_get_iam_policy_rest_flattened_error(transport: str = 'rest'): + client = MachineImagesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_iam_policy( + compute.GetIamPolicyMachineImageRequest(), + project='project_value', + resource='resource_value', + ) + + +def test_get_iam_policy_rest_error(): + client = MachineImagesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.InsertMachineImageRequest, + dict, +]) +def test_insert_rest(request_type): + client = MachineImagesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1'} + request_init["machine_image_resource"] = {'creation_timestamp': 'creation_timestamp_value', 'description': 'description_value', 'guest_flush': True, 'id': 205, 'instance_properties': {'advanced_machine_features': {'enable_nested_virtualization': True, 'enable_uefi_networking': True, 'threads_per_core': 1689, 'visible_core_count': 1918}, 'can_ip_forward': True, 'confidential_instance_config': {'enable_confidential_compute': True}, 'description': 'description_value', 'disks': [{'architecture': 'architecture_value', 'auto_delete': True, 'boot': True, 'device_name': 'device_name_value', 'disk_encryption_key': {'kms_key_name': 'kms_key_name_value', 'kms_key_service_account': 'kms_key_service_account_value', 'raw_key': 'raw_key_value', 'rsa_encrypted_key': 'rsa_encrypted_key_value', 'sha256': 'sha256_value'}, 'disk_size_gb': 1261, 'force_attach': True, 'guest_os_features': [{'type_': 'type__value'}], 'index': 536, 'initialize_params': {'architecture': 'architecture_value', 'description': 'description_value', 'disk_name': 'disk_name_value', 'disk_size_gb': 1261, 'disk_type': 'disk_type_value', 'labels': {}, 'licenses': ['licenses_value1', 'licenses_value2'], 'on_update_action': 'on_update_action_value', 'provisioned_iops': 1740, 'provisioned_throughput': 2411, 'replica_zones': ['replica_zones_value1', 'replica_zones_value2'], 'resource_manager_tags': {}, 'resource_policies': ['resource_policies_value1', 'resource_policies_value2'], 'source_image': 'source_image_value', 'source_image_encryption_key': {}, 'source_snapshot': 'source_snapshot_value', 'source_snapshot_encryption_key': {}}, 'interface': 'interface_value', 'kind': 'kind_value', 'licenses': ['licenses_value1', 'licenses_value2'], 'mode': 'mode_value', 'saved_state': 'saved_state_value', 'shielded_instance_initial_state': {'dbs': [{'content': 'content_value', 'file_type': 'file_type_value'}], 'dbxs': {}, 'keks': {}, 'pk': {}}, 'source': 'source_value', 'type_': 'type__value'}], 'guest_accelerators': [{'accelerator_count': 1805, 'accelerator_type': 'accelerator_type_value'}], 'key_revocation_action_type': 'key_revocation_action_type_value', 'labels': {}, 'machine_type': 'machine_type_value', 'metadata': {'fingerprint': 'fingerprint_value', 'items': [{'key': 'key_value', 'value': 'value_value'}], 'kind': 'kind_value'}, 'min_cpu_platform': 'min_cpu_platform_value', 'network_interfaces': [{'access_configs': [{'external_ipv6': 'external_ipv6_value', 'external_ipv6_prefix_length': 2837, 'kind': 'kind_value', 'name': 'name_value', 'nat_i_p': 'nat_i_p_value', 'network_tier': 'network_tier_value', 'public_ptr_domain_name': 'public_ptr_domain_name_value', 'set_public_ptr': True, 'type_': 'type__value'}], 'alias_ip_ranges': [{'ip_cidr_range': 'ip_cidr_range_value', 'subnetwork_range_name': 'subnetwork_range_name_value'}], 'fingerprint': 'fingerprint_value', 'internal_ipv6_prefix_length': 2831, 'ipv6_access_configs': {}, 'ipv6_access_type': 'ipv6_access_type_value', 'ipv6_address': 'ipv6_address_value', 'kind': 'kind_value', 'name': 'name_value', 'network': 'network_value', 'network_attachment': 'network_attachment_value', 'network_i_p': 'network_i_p_value', 'nic_type': 'nic_type_value', 'queue_count': 1197, 'stack_type': 'stack_type_value', 'subnetwork': 'subnetwork_value'}], 'network_performance_config': {'total_egress_bandwidth_tier': 'total_egress_bandwidth_tier_value'}, 'private_ipv6_google_access': 'private_ipv6_google_access_value', 'reservation_affinity': {'consume_reservation_type': 'consume_reservation_type_value', 'key': 'key_value', 'values': ['values_value1', 'values_value2']}, 'resource_manager_tags': {}, 'resource_policies': ['resource_policies_value1', 'resource_policies_value2'], 'scheduling': {'automatic_restart': True, 'instance_termination_action': 'instance_termination_action_value', 'local_ssd_recovery_timeout': {'nanos': 543, 'seconds': 751}, 'location_hint': 'location_hint_value', 'min_node_cpus': 1379, 'node_affinities': [{'key': 'key_value', 'operator': 'operator_value', 'values': ['values_value1', 'values_value2']}], 'on_host_maintenance': 'on_host_maintenance_value', 'preemptible': True, 'provisioning_model': 'provisioning_model_value'}, 'service_accounts': [{'email': 'email_value', 'scopes': ['scopes_value1', 'scopes_value2']}], 'shielded_instance_config': {'enable_integrity_monitoring': True, 'enable_secure_boot': True, 'enable_vtpm': True}, 'tags': {'fingerprint': 'fingerprint_value', 'items': ['items_value1', 'items_value2']}}, 'kind': 'kind_value', 'machine_image_encryption_key': {}, 'name': 'name_value', 'satisfies_pzs': True, 'saved_disks': [{'architecture': 'architecture_value', 'kind': 'kind_value', 'source_disk': 'source_disk_value', 'storage_bytes': 1403, 'storage_bytes_status': 'storage_bytes_status_value'}], 'self_link': 'self_link_value', 'source_disk_encryption_keys': [{'disk_encryption_key': {}, 'source_disk': 'source_disk_value'}], 'source_instance': 'source_instance_value', 'source_instance_properties': {'can_ip_forward': True, 'deletion_protection': True, 'description': 'description_value', 'disks': [{'auto_delete': True, 'boot': True, 'device_name': 'device_name_value', 'disk_encryption_key': {}, 'disk_size_gb': 1261, 'disk_type': 'disk_type_value', 'guest_os_features': {}, 'index': 536, 'interface': 'interface_value', 'kind': 'kind_value', 'licenses': ['licenses_value1', 'licenses_value2'], 'mode': 'mode_value', 'source': 'source_value', 'storage_bytes': 1403, 'storage_bytes_status': 'storage_bytes_status_value', 'type_': 'type__value'}], 'guest_accelerators': {}, 'key_revocation_action_type': 'key_revocation_action_type_value', 'labels': {}, 'machine_type': 'machine_type_value', 'metadata': {}, 'min_cpu_platform': 'min_cpu_platform_value', 'network_interfaces': {}, 'scheduling': {}, 'service_accounts': {}, 'tags': {}}, 'status': 'status_value', 'storage_locations': ['storage_locations_value1', 'storage_locations_value2'], 'total_storage_bytes': 2046} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.insert(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, extended_operation.ExtendedOperation) + assert response.client_operation_id == 'client_operation_id_value' + assert response.creation_timestamp == 'creation_timestamp_value' + assert response.description == 'description_value' + assert response.end_time == 'end_time_value' + assert response.http_error_message == 'http_error_message_value' + assert response.http_error_status_code == 2374 + assert response.id == 205 + assert response.insert_time == 'insert_time_value' + assert response.kind == 'kind_value' + assert response.name == 'name_value' + assert response.operation_group_id == 'operation_group_id_value' + assert response.operation_type == 'operation_type_value' + assert response.progress == 885 + assert response.region == 'region_value' + assert response.self_link == 'self_link_value' + assert response.start_time == 'start_time_value' + assert response.status == compute.Operation.Status.DONE + assert response.status_message == 'status_message_value' + assert response.target_id == 947 + assert response.target_link == 'target_link_value' + assert response.user == 'user_value' + assert response.zone == 'zone_value' + + +def test_insert_rest_required_fields(request_type=compute.InsertMachineImageRequest): + transport_class = transports.MachineImagesRestTransport + + request_init = {} + request_init["project"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).insert._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).insert._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", "source_instance", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + + client = MachineImagesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.insert(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_insert_rest_unset_required_fields(): + transport = transports.MachineImagesRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.insert._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", "sourceInstance", )) & set(("machineImageResource", "project", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_insert_rest_interceptors(null_interceptor): + transport = transports.MachineImagesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.MachineImagesRestInterceptor(), + ) + client = MachineImagesClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.MachineImagesRestInterceptor, "post_insert") as post, \ + mock.patch.object(transports.MachineImagesRestInterceptor, "pre_insert") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.InsertMachineImageRequest.pb(compute.InsertMachineImageRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.InsertMachineImageRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.insert(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_insert_rest_bad_request(transport: str = 'rest', request_type=compute.InsertMachineImageRequest): + client = MachineImagesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1'} + request_init["machine_image_resource"] = {'creation_timestamp': 'creation_timestamp_value', 'description': 'description_value', 'guest_flush': True, 'id': 205, 'instance_properties': {'advanced_machine_features': {'enable_nested_virtualization': True, 'enable_uefi_networking': True, 'threads_per_core': 1689, 'visible_core_count': 1918}, 'can_ip_forward': True, 'confidential_instance_config': {'enable_confidential_compute': True}, 'description': 'description_value', 'disks': [{'architecture': 'architecture_value', 'auto_delete': True, 'boot': True, 'device_name': 'device_name_value', 'disk_encryption_key': {'kms_key_name': 'kms_key_name_value', 'kms_key_service_account': 'kms_key_service_account_value', 'raw_key': 'raw_key_value', 'rsa_encrypted_key': 'rsa_encrypted_key_value', 'sha256': 'sha256_value'}, 'disk_size_gb': 1261, 'force_attach': True, 'guest_os_features': [{'type_': 'type__value'}], 'index': 536, 'initialize_params': {'architecture': 'architecture_value', 'description': 'description_value', 'disk_name': 'disk_name_value', 'disk_size_gb': 1261, 'disk_type': 'disk_type_value', 'labels': {}, 'licenses': ['licenses_value1', 'licenses_value2'], 'on_update_action': 'on_update_action_value', 'provisioned_iops': 1740, 'provisioned_throughput': 2411, 'replica_zones': ['replica_zones_value1', 'replica_zones_value2'], 'resource_manager_tags': {}, 'resource_policies': ['resource_policies_value1', 'resource_policies_value2'], 'source_image': 'source_image_value', 'source_image_encryption_key': {}, 'source_snapshot': 'source_snapshot_value', 'source_snapshot_encryption_key': {}}, 'interface': 'interface_value', 'kind': 'kind_value', 'licenses': ['licenses_value1', 'licenses_value2'], 'mode': 'mode_value', 'saved_state': 'saved_state_value', 'shielded_instance_initial_state': {'dbs': [{'content': 'content_value', 'file_type': 'file_type_value'}], 'dbxs': {}, 'keks': {}, 'pk': {}}, 'source': 'source_value', 'type_': 'type__value'}], 'guest_accelerators': [{'accelerator_count': 1805, 'accelerator_type': 'accelerator_type_value'}], 'key_revocation_action_type': 'key_revocation_action_type_value', 'labels': {}, 'machine_type': 'machine_type_value', 'metadata': {'fingerprint': 'fingerprint_value', 'items': [{'key': 'key_value', 'value': 'value_value'}], 'kind': 'kind_value'}, 'min_cpu_platform': 'min_cpu_platform_value', 'network_interfaces': [{'access_configs': [{'external_ipv6': 'external_ipv6_value', 'external_ipv6_prefix_length': 2837, 'kind': 'kind_value', 'name': 'name_value', 'nat_i_p': 'nat_i_p_value', 'network_tier': 'network_tier_value', 'public_ptr_domain_name': 'public_ptr_domain_name_value', 'set_public_ptr': True, 'type_': 'type__value'}], 'alias_ip_ranges': [{'ip_cidr_range': 'ip_cidr_range_value', 'subnetwork_range_name': 'subnetwork_range_name_value'}], 'fingerprint': 'fingerprint_value', 'internal_ipv6_prefix_length': 2831, 'ipv6_access_configs': {}, 'ipv6_access_type': 'ipv6_access_type_value', 'ipv6_address': 'ipv6_address_value', 'kind': 'kind_value', 'name': 'name_value', 'network': 'network_value', 'network_attachment': 'network_attachment_value', 'network_i_p': 'network_i_p_value', 'nic_type': 'nic_type_value', 'queue_count': 1197, 'stack_type': 'stack_type_value', 'subnetwork': 'subnetwork_value'}], 'network_performance_config': {'total_egress_bandwidth_tier': 'total_egress_bandwidth_tier_value'}, 'private_ipv6_google_access': 'private_ipv6_google_access_value', 'reservation_affinity': {'consume_reservation_type': 'consume_reservation_type_value', 'key': 'key_value', 'values': ['values_value1', 'values_value2']}, 'resource_manager_tags': {}, 'resource_policies': ['resource_policies_value1', 'resource_policies_value2'], 'scheduling': {'automatic_restart': True, 'instance_termination_action': 'instance_termination_action_value', 'local_ssd_recovery_timeout': {'nanos': 543, 'seconds': 751}, 'location_hint': 'location_hint_value', 'min_node_cpus': 1379, 'node_affinities': [{'key': 'key_value', 'operator': 'operator_value', 'values': ['values_value1', 'values_value2']}], 'on_host_maintenance': 'on_host_maintenance_value', 'preemptible': True, 'provisioning_model': 'provisioning_model_value'}, 'service_accounts': [{'email': 'email_value', 'scopes': ['scopes_value1', 'scopes_value2']}], 'shielded_instance_config': {'enable_integrity_monitoring': True, 'enable_secure_boot': True, 'enable_vtpm': True}, 'tags': {'fingerprint': 'fingerprint_value', 'items': ['items_value1', 'items_value2']}}, 'kind': 'kind_value', 'machine_image_encryption_key': {}, 'name': 'name_value', 'satisfies_pzs': True, 'saved_disks': [{'architecture': 'architecture_value', 'kind': 'kind_value', 'source_disk': 'source_disk_value', 'storage_bytes': 1403, 'storage_bytes_status': 'storage_bytes_status_value'}], 'self_link': 'self_link_value', 'source_disk_encryption_keys': [{'disk_encryption_key': {}, 'source_disk': 'source_disk_value'}], 'source_instance': 'source_instance_value', 'source_instance_properties': {'can_ip_forward': True, 'deletion_protection': True, 'description': 'description_value', 'disks': [{'auto_delete': True, 'boot': True, 'device_name': 'device_name_value', 'disk_encryption_key': {}, 'disk_size_gb': 1261, 'disk_type': 'disk_type_value', 'guest_os_features': {}, 'index': 536, 'interface': 'interface_value', 'kind': 'kind_value', 'licenses': ['licenses_value1', 'licenses_value2'], 'mode': 'mode_value', 'source': 'source_value', 'storage_bytes': 1403, 'storage_bytes_status': 'storage_bytes_status_value', 'type_': 'type__value'}], 'guest_accelerators': {}, 'key_revocation_action_type': 'key_revocation_action_type_value', 'labels': {}, 'machine_type': 'machine_type_value', 'metadata': {}, 'min_cpu_platform': 'min_cpu_platform_value', 'network_interfaces': {}, 'scheduling': {}, 'service_accounts': {}, 'tags': {}}, 'status': 'status_value', 'storage_locations': ['storage_locations_value1', 'storage_locations_value2'], 'total_storage_bytes': 2046} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.insert(request) + + +def test_insert_rest_flattened(): + client = MachineImagesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + machine_image_resource=compute.MachineImage(creation_timestamp='creation_timestamp_value'), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.insert(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/global/machineImages" % client.transport._host, args[1]) + + +def test_insert_rest_flattened_error(transport: str = 'rest'): + client = MachineImagesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.insert( + compute.InsertMachineImageRequest(), + project='project_value', + machine_image_resource=compute.MachineImage(creation_timestamp='creation_timestamp_value'), + ) + + +def test_insert_rest_error(): + client = MachineImagesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.InsertMachineImageRequest, + dict, +]) +def test_insert_unary_rest(request_type): + client = MachineImagesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1'} + request_init["machine_image_resource"] = {'creation_timestamp': 'creation_timestamp_value', 'description': 'description_value', 'guest_flush': True, 'id': 205, 'instance_properties': {'advanced_machine_features': {'enable_nested_virtualization': True, 'enable_uefi_networking': True, 'threads_per_core': 1689, 'visible_core_count': 1918}, 'can_ip_forward': True, 'confidential_instance_config': {'enable_confidential_compute': True}, 'description': 'description_value', 'disks': [{'architecture': 'architecture_value', 'auto_delete': True, 'boot': True, 'device_name': 'device_name_value', 'disk_encryption_key': {'kms_key_name': 'kms_key_name_value', 'kms_key_service_account': 'kms_key_service_account_value', 'raw_key': 'raw_key_value', 'rsa_encrypted_key': 'rsa_encrypted_key_value', 'sha256': 'sha256_value'}, 'disk_size_gb': 1261, 'force_attach': True, 'guest_os_features': [{'type_': 'type__value'}], 'index': 536, 'initialize_params': {'architecture': 'architecture_value', 'description': 'description_value', 'disk_name': 'disk_name_value', 'disk_size_gb': 1261, 'disk_type': 'disk_type_value', 'labels': {}, 'licenses': ['licenses_value1', 'licenses_value2'], 'on_update_action': 'on_update_action_value', 'provisioned_iops': 1740, 'provisioned_throughput': 2411, 'replica_zones': ['replica_zones_value1', 'replica_zones_value2'], 'resource_manager_tags': {}, 'resource_policies': ['resource_policies_value1', 'resource_policies_value2'], 'source_image': 'source_image_value', 'source_image_encryption_key': {}, 'source_snapshot': 'source_snapshot_value', 'source_snapshot_encryption_key': {}}, 'interface': 'interface_value', 'kind': 'kind_value', 'licenses': ['licenses_value1', 'licenses_value2'], 'mode': 'mode_value', 'saved_state': 'saved_state_value', 'shielded_instance_initial_state': {'dbs': [{'content': 'content_value', 'file_type': 'file_type_value'}], 'dbxs': {}, 'keks': {}, 'pk': {}}, 'source': 'source_value', 'type_': 'type__value'}], 'guest_accelerators': [{'accelerator_count': 1805, 'accelerator_type': 'accelerator_type_value'}], 'key_revocation_action_type': 'key_revocation_action_type_value', 'labels': {}, 'machine_type': 'machine_type_value', 'metadata': {'fingerprint': 'fingerprint_value', 'items': [{'key': 'key_value', 'value': 'value_value'}], 'kind': 'kind_value'}, 'min_cpu_platform': 'min_cpu_platform_value', 'network_interfaces': [{'access_configs': [{'external_ipv6': 'external_ipv6_value', 'external_ipv6_prefix_length': 2837, 'kind': 'kind_value', 'name': 'name_value', 'nat_i_p': 'nat_i_p_value', 'network_tier': 'network_tier_value', 'public_ptr_domain_name': 'public_ptr_domain_name_value', 'set_public_ptr': True, 'type_': 'type__value'}], 'alias_ip_ranges': [{'ip_cidr_range': 'ip_cidr_range_value', 'subnetwork_range_name': 'subnetwork_range_name_value'}], 'fingerprint': 'fingerprint_value', 'internal_ipv6_prefix_length': 2831, 'ipv6_access_configs': {}, 'ipv6_access_type': 'ipv6_access_type_value', 'ipv6_address': 'ipv6_address_value', 'kind': 'kind_value', 'name': 'name_value', 'network': 'network_value', 'network_attachment': 'network_attachment_value', 'network_i_p': 'network_i_p_value', 'nic_type': 'nic_type_value', 'queue_count': 1197, 'stack_type': 'stack_type_value', 'subnetwork': 'subnetwork_value'}], 'network_performance_config': {'total_egress_bandwidth_tier': 'total_egress_bandwidth_tier_value'}, 'private_ipv6_google_access': 'private_ipv6_google_access_value', 'reservation_affinity': {'consume_reservation_type': 'consume_reservation_type_value', 'key': 'key_value', 'values': ['values_value1', 'values_value2']}, 'resource_manager_tags': {}, 'resource_policies': ['resource_policies_value1', 'resource_policies_value2'], 'scheduling': {'automatic_restart': True, 'instance_termination_action': 'instance_termination_action_value', 'local_ssd_recovery_timeout': {'nanos': 543, 'seconds': 751}, 'location_hint': 'location_hint_value', 'min_node_cpus': 1379, 'node_affinities': [{'key': 'key_value', 'operator': 'operator_value', 'values': ['values_value1', 'values_value2']}], 'on_host_maintenance': 'on_host_maintenance_value', 'preemptible': True, 'provisioning_model': 'provisioning_model_value'}, 'service_accounts': [{'email': 'email_value', 'scopes': ['scopes_value1', 'scopes_value2']}], 'shielded_instance_config': {'enable_integrity_monitoring': True, 'enable_secure_boot': True, 'enable_vtpm': True}, 'tags': {'fingerprint': 'fingerprint_value', 'items': ['items_value1', 'items_value2']}}, 'kind': 'kind_value', 'machine_image_encryption_key': {}, 'name': 'name_value', 'satisfies_pzs': True, 'saved_disks': [{'architecture': 'architecture_value', 'kind': 'kind_value', 'source_disk': 'source_disk_value', 'storage_bytes': 1403, 'storage_bytes_status': 'storage_bytes_status_value'}], 'self_link': 'self_link_value', 'source_disk_encryption_keys': [{'disk_encryption_key': {}, 'source_disk': 'source_disk_value'}], 'source_instance': 'source_instance_value', 'source_instance_properties': {'can_ip_forward': True, 'deletion_protection': True, 'description': 'description_value', 'disks': [{'auto_delete': True, 'boot': True, 'device_name': 'device_name_value', 'disk_encryption_key': {}, 'disk_size_gb': 1261, 'disk_type': 'disk_type_value', 'guest_os_features': {}, 'index': 536, 'interface': 'interface_value', 'kind': 'kind_value', 'licenses': ['licenses_value1', 'licenses_value2'], 'mode': 'mode_value', 'source': 'source_value', 'storage_bytes': 1403, 'storage_bytes_status': 'storage_bytes_status_value', 'type_': 'type__value'}], 'guest_accelerators': {}, 'key_revocation_action_type': 'key_revocation_action_type_value', 'labels': {}, 'machine_type': 'machine_type_value', 'metadata': {}, 'min_cpu_platform': 'min_cpu_platform_value', 'network_interfaces': {}, 'scheduling': {}, 'service_accounts': {}, 'tags': {}}, 'status': 'status_value', 'storage_locations': ['storage_locations_value1', 'storage_locations_value2'], 'total_storage_bytes': 2046} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.insert_unary(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.Operation) + + +def test_insert_unary_rest_required_fields(request_type=compute.InsertMachineImageRequest): + transport_class = transports.MachineImagesRestTransport + + request_init = {} + request_init["project"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).insert._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).insert._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", "source_instance", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + + client = MachineImagesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.insert_unary(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_insert_unary_rest_unset_required_fields(): + transport = transports.MachineImagesRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.insert._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", "sourceInstance", )) & set(("machineImageResource", "project", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_insert_unary_rest_interceptors(null_interceptor): + transport = transports.MachineImagesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.MachineImagesRestInterceptor(), + ) + client = MachineImagesClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.MachineImagesRestInterceptor, "post_insert") as post, \ + mock.patch.object(transports.MachineImagesRestInterceptor, "pre_insert") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.InsertMachineImageRequest.pb(compute.InsertMachineImageRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.InsertMachineImageRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.insert_unary(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_insert_unary_rest_bad_request(transport: str = 'rest', request_type=compute.InsertMachineImageRequest): + client = MachineImagesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1'} + request_init["machine_image_resource"] = {'creation_timestamp': 'creation_timestamp_value', 'description': 'description_value', 'guest_flush': True, 'id': 205, 'instance_properties': {'advanced_machine_features': {'enable_nested_virtualization': True, 'enable_uefi_networking': True, 'threads_per_core': 1689, 'visible_core_count': 1918}, 'can_ip_forward': True, 'confidential_instance_config': {'enable_confidential_compute': True}, 'description': 'description_value', 'disks': [{'architecture': 'architecture_value', 'auto_delete': True, 'boot': True, 'device_name': 'device_name_value', 'disk_encryption_key': {'kms_key_name': 'kms_key_name_value', 'kms_key_service_account': 'kms_key_service_account_value', 'raw_key': 'raw_key_value', 'rsa_encrypted_key': 'rsa_encrypted_key_value', 'sha256': 'sha256_value'}, 'disk_size_gb': 1261, 'force_attach': True, 'guest_os_features': [{'type_': 'type__value'}], 'index': 536, 'initialize_params': {'architecture': 'architecture_value', 'description': 'description_value', 'disk_name': 'disk_name_value', 'disk_size_gb': 1261, 'disk_type': 'disk_type_value', 'labels': {}, 'licenses': ['licenses_value1', 'licenses_value2'], 'on_update_action': 'on_update_action_value', 'provisioned_iops': 1740, 'provisioned_throughput': 2411, 'replica_zones': ['replica_zones_value1', 'replica_zones_value2'], 'resource_manager_tags': {}, 'resource_policies': ['resource_policies_value1', 'resource_policies_value2'], 'source_image': 'source_image_value', 'source_image_encryption_key': {}, 'source_snapshot': 'source_snapshot_value', 'source_snapshot_encryption_key': {}}, 'interface': 'interface_value', 'kind': 'kind_value', 'licenses': ['licenses_value1', 'licenses_value2'], 'mode': 'mode_value', 'saved_state': 'saved_state_value', 'shielded_instance_initial_state': {'dbs': [{'content': 'content_value', 'file_type': 'file_type_value'}], 'dbxs': {}, 'keks': {}, 'pk': {}}, 'source': 'source_value', 'type_': 'type__value'}], 'guest_accelerators': [{'accelerator_count': 1805, 'accelerator_type': 'accelerator_type_value'}], 'key_revocation_action_type': 'key_revocation_action_type_value', 'labels': {}, 'machine_type': 'machine_type_value', 'metadata': {'fingerprint': 'fingerprint_value', 'items': [{'key': 'key_value', 'value': 'value_value'}], 'kind': 'kind_value'}, 'min_cpu_platform': 'min_cpu_platform_value', 'network_interfaces': [{'access_configs': [{'external_ipv6': 'external_ipv6_value', 'external_ipv6_prefix_length': 2837, 'kind': 'kind_value', 'name': 'name_value', 'nat_i_p': 'nat_i_p_value', 'network_tier': 'network_tier_value', 'public_ptr_domain_name': 'public_ptr_domain_name_value', 'set_public_ptr': True, 'type_': 'type__value'}], 'alias_ip_ranges': [{'ip_cidr_range': 'ip_cidr_range_value', 'subnetwork_range_name': 'subnetwork_range_name_value'}], 'fingerprint': 'fingerprint_value', 'internal_ipv6_prefix_length': 2831, 'ipv6_access_configs': {}, 'ipv6_access_type': 'ipv6_access_type_value', 'ipv6_address': 'ipv6_address_value', 'kind': 'kind_value', 'name': 'name_value', 'network': 'network_value', 'network_attachment': 'network_attachment_value', 'network_i_p': 'network_i_p_value', 'nic_type': 'nic_type_value', 'queue_count': 1197, 'stack_type': 'stack_type_value', 'subnetwork': 'subnetwork_value'}], 'network_performance_config': {'total_egress_bandwidth_tier': 'total_egress_bandwidth_tier_value'}, 'private_ipv6_google_access': 'private_ipv6_google_access_value', 'reservation_affinity': {'consume_reservation_type': 'consume_reservation_type_value', 'key': 'key_value', 'values': ['values_value1', 'values_value2']}, 'resource_manager_tags': {}, 'resource_policies': ['resource_policies_value1', 'resource_policies_value2'], 'scheduling': {'automatic_restart': True, 'instance_termination_action': 'instance_termination_action_value', 'local_ssd_recovery_timeout': {'nanos': 543, 'seconds': 751}, 'location_hint': 'location_hint_value', 'min_node_cpus': 1379, 'node_affinities': [{'key': 'key_value', 'operator': 'operator_value', 'values': ['values_value1', 'values_value2']}], 'on_host_maintenance': 'on_host_maintenance_value', 'preemptible': True, 'provisioning_model': 'provisioning_model_value'}, 'service_accounts': [{'email': 'email_value', 'scopes': ['scopes_value1', 'scopes_value2']}], 'shielded_instance_config': {'enable_integrity_monitoring': True, 'enable_secure_boot': True, 'enable_vtpm': True}, 'tags': {'fingerprint': 'fingerprint_value', 'items': ['items_value1', 'items_value2']}}, 'kind': 'kind_value', 'machine_image_encryption_key': {}, 'name': 'name_value', 'satisfies_pzs': True, 'saved_disks': [{'architecture': 'architecture_value', 'kind': 'kind_value', 'source_disk': 'source_disk_value', 'storage_bytes': 1403, 'storage_bytes_status': 'storage_bytes_status_value'}], 'self_link': 'self_link_value', 'source_disk_encryption_keys': [{'disk_encryption_key': {}, 'source_disk': 'source_disk_value'}], 'source_instance': 'source_instance_value', 'source_instance_properties': {'can_ip_forward': True, 'deletion_protection': True, 'description': 'description_value', 'disks': [{'auto_delete': True, 'boot': True, 'device_name': 'device_name_value', 'disk_encryption_key': {}, 'disk_size_gb': 1261, 'disk_type': 'disk_type_value', 'guest_os_features': {}, 'index': 536, 'interface': 'interface_value', 'kind': 'kind_value', 'licenses': ['licenses_value1', 'licenses_value2'], 'mode': 'mode_value', 'source': 'source_value', 'storage_bytes': 1403, 'storage_bytes_status': 'storage_bytes_status_value', 'type_': 'type__value'}], 'guest_accelerators': {}, 'key_revocation_action_type': 'key_revocation_action_type_value', 'labels': {}, 'machine_type': 'machine_type_value', 'metadata': {}, 'min_cpu_platform': 'min_cpu_platform_value', 'network_interfaces': {}, 'scheduling': {}, 'service_accounts': {}, 'tags': {}}, 'status': 'status_value', 'storage_locations': ['storage_locations_value1', 'storage_locations_value2'], 'total_storage_bytes': 2046} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.insert_unary(request) + + +def test_insert_unary_rest_flattened(): + client = MachineImagesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + machine_image_resource=compute.MachineImage(creation_timestamp='creation_timestamp_value'), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.insert_unary(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/global/machineImages" % client.transport._host, args[1]) + + +def test_insert_unary_rest_flattened_error(transport: str = 'rest'): + client = MachineImagesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.insert_unary( + compute.InsertMachineImageRequest(), + project='project_value', + machine_image_resource=compute.MachineImage(creation_timestamp='creation_timestamp_value'), + ) + + +def test_insert_unary_rest_error(): + client = MachineImagesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.ListMachineImagesRequest, + dict, +]) +def test_list_rest(request_type): + client = MachineImagesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.MachineImageList( + id='id_value', + kind='kind_value', + next_page_token='next_page_token_value', + self_link='self_link_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.MachineImageList.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.list(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListPager) + assert response.id == 'id_value' + assert response.kind == 'kind_value' + assert response.next_page_token == 'next_page_token_value' + assert response.self_link == 'self_link_value' + + +def test_list_rest_required_fields(request_type=compute.ListMachineImagesRequest): + transport_class = transports.MachineImagesRestTransport + + request_init = {} + request_init["project"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("filter", "max_results", "order_by", "page_token", "return_partial_success", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + + client = MachineImagesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.MachineImageList() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "get", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.MachineImageList.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.list(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_list_rest_unset_required_fields(): + transport = transports.MachineImagesRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.list._get_unset_required_fields({}) + assert set(unset_fields) == (set(("filter", "maxResults", "orderBy", "pageToken", "returnPartialSuccess", )) & set(("project", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_rest_interceptors(null_interceptor): + transport = transports.MachineImagesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.MachineImagesRestInterceptor(), + ) + client = MachineImagesClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.MachineImagesRestInterceptor, "post_list") as post, \ + mock.patch.object(transports.MachineImagesRestInterceptor, "pre_list") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.ListMachineImagesRequest.pb(compute.ListMachineImagesRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.MachineImageList.to_json(compute.MachineImageList()) + + request = compute.ListMachineImagesRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.MachineImageList() + + client.list(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_list_rest_bad_request(transport: str = 'rest', request_type=compute.ListMachineImagesRequest): + client = MachineImagesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list(request) + + +def test_list_rest_flattened(): + client = MachineImagesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.MachineImageList() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.MachineImageList.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.list(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/global/machineImages" % client.transport._host, args[1]) + + +def test_list_rest_flattened_error(transport: str = 'rest'): + client = MachineImagesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list( + compute.ListMachineImagesRequest(), + project='project_value', + ) + + +def test_list_rest_pager(transport: str = 'rest'): + client = MachineImagesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + #with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + compute.MachineImageList( + items=[ + compute.MachineImage(), + compute.MachineImage(), + compute.MachineImage(), + ], + next_page_token='abc', + ), + compute.MachineImageList( + items=[], + next_page_token='def', + ), + compute.MachineImageList( + items=[ + compute.MachineImage(), + ], + next_page_token='ghi', + ), + compute.MachineImageList( + items=[ + compute.MachineImage(), + compute.MachineImage(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple(compute.MachineImageList.to_json(x) for x in response) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode('UTF-8') + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {'project': 'sample1'} + + pager = client.list(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, compute.MachineImage) + for i in results) + + pages = list(client.list(request=sample_request).pages) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize("request_type", [ + compute.SetIamPolicyMachineImageRequest, + dict, +]) +def test_set_iam_policy_rest(request_type): + client = MachineImagesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'resource': 'sample2'} + request_init["global_set_policy_request_resource"] = {'bindings': [{'binding_id': 'binding_id_value', 'condition': {'description': 'description_value', 'expression': 'expression_value', 'location': 'location_value', 'title': 'title_value'}, 'members': ['members_value1', 'members_value2'], 'role': 'role_value'}], 'etag': 'etag_value', 'policy': {'audit_configs': [{'audit_log_configs': [{'exempted_members': ['exempted_members_value1', 'exempted_members_value2'], 'ignore_child_exemptions': True, 'log_type': 'log_type_value'}], 'exempted_members': ['exempted_members_value1', 'exempted_members_value2'], 'service': 'service_value'}], 'bindings': {}, 'etag': 'etag_value', 'iam_owned': True, 'rules': [{'action': 'action_value', 'conditions': [{'iam': 'iam_value', 'op': 'op_value', 'svc': 'svc_value', 'sys': 'sys_value', 'values': ['values_value1', 'values_value2']}], 'description': 'description_value', 'ins': ['ins_value1', 'ins_value2'], 'log_configs': [{'cloud_audit': {'authorization_logging_options': {'permission_type': 'permission_type_value'}, 'log_name': 'log_name_value'}, 'counter': {'custom_fields': [{'name': 'name_value', 'value': 'value_value'}], 'field': 'field_value', 'metric': 'metric_value'}, 'data_access': {'log_mode': 'log_mode_value'}}], 'not_ins': ['not_ins_value1', 'not_ins_value2'], 'permissions': ['permissions_value1', 'permissions_value2']}], 'version': 774}} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Policy( + etag='etag_value', + iam_owned=True, + version=774, + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Policy.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.set_iam_policy(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.Policy) + assert response.etag == 'etag_value' + assert response.iam_owned is True + assert response.version == 774 + + +def test_set_iam_policy_rest_required_fields(request_type=compute.SetIamPolicyMachineImageRequest): + transport_class = transports.MachineImagesRestTransport + + request_init = {} + request_init["project"] = "" + request_init["resource"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).set_iam_policy._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + jsonified_request["resource"] = 'resource_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).set_iam_policy._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "resource" in jsonified_request + assert jsonified_request["resource"] == 'resource_value' + + client = MachineImagesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Policy() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Policy.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.set_iam_policy(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_set_iam_policy_rest_unset_required_fields(): + transport = transports.MachineImagesRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.set_iam_policy._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("globalSetPolicyRequestResource", "project", "resource", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_set_iam_policy_rest_interceptors(null_interceptor): + transport = transports.MachineImagesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.MachineImagesRestInterceptor(), + ) + client = MachineImagesClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.MachineImagesRestInterceptor, "post_set_iam_policy") as post, \ + mock.patch.object(transports.MachineImagesRestInterceptor, "pre_set_iam_policy") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.SetIamPolicyMachineImageRequest.pb(compute.SetIamPolicyMachineImageRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Policy.to_json(compute.Policy()) + + request = compute.SetIamPolicyMachineImageRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Policy() + + client.set_iam_policy(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_set_iam_policy_rest_bad_request(transport: str = 'rest', request_type=compute.SetIamPolicyMachineImageRequest): + client = MachineImagesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'resource': 'sample2'} + request_init["global_set_policy_request_resource"] = {'bindings': [{'binding_id': 'binding_id_value', 'condition': {'description': 'description_value', 'expression': 'expression_value', 'location': 'location_value', 'title': 'title_value'}, 'members': ['members_value1', 'members_value2'], 'role': 'role_value'}], 'etag': 'etag_value', 'policy': {'audit_configs': [{'audit_log_configs': [{'exempted_members': ['exempted_members_value1', 'exempted_members_value2'], 'ignore_child_exemptions': True, 'log_type': 'log_type_value'}], 'exempted_members': ['exempted_members_value1', 'exempted_members_value2'], 'service': 'service_value'}], 'bindings': {}, 'etag': 'etag_value', 'iam_owned': True, 'rules': [{'action': 'action_value', 'conditions': [{'iam': 'iam_value', 'op': 'op_value', 'svc': 'svc_value', 'sys': 'sys_value', 'values': ['values_value1', 'values_value2']}], 'description': 'description_value', 'ins': ['ins_value1', 'ins_value2'], 'log_configs': [{'cloud_audit': {'authorization_logging_options': {'permission_type': 'permission_type_value'}, 'log_name': 'log_name_value'}, 'counter': {'custom_fields': [{'name': 'name_value', 'value': 'value_value'}], 'field': 'field_value', 'metric': 'metric_value'}, 'data_access': {'log_mode': 'log_mode_value'}}], 'not_ins': ['not_ins_value1', 'not_ins_value2'], 'permissions': ['permissions_value1', 'permissions_value2']}], 'version': 774}} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.set_iam_policy(request) + + +def test_set_iam_policy_rest_flattened(): + client = MachineImagesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Policy() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'resource': 'sample2'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + resource='resource_value', + global_set_policy_request_resource=compute.GlobalSetPolicyRequest(bindings=[compute.Binding(binding_id='binding_id_value')]), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Policy.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.set_iam_policy(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/global/machineImages/{resource}/setIamPolicy" % client.transport._host, args[1]) + + +def test_set_iam_policy_rest_flattened_error(transport: str = 'rest'): + client = MachineImagesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.set_iam_policy( + compute.SetIamPolicyMachineImageRequest(), + project='project_value', + resource='resource_value', + global_set_policy_request_resource=compute.GlobalSetPolicyRequest(bindings=[compute.Binding(binding_id='binding_id_value')]), + ) + + +def test_set_iam_policy_rest_error(): + client = MachineImagesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.TestIamPermissionsMachineImageRequest, + dict, +]) +def test_test_iam_permissions_rest(request_type): + client = MachineImagesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'resource': 'sample2'} + request_init["test_permissions_request_resource"] = {'permissions': ['permissions_value1', 'permissions_value2']} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.TestPermissionsResponse( + permissions=['permissions_value'], + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.TestPermissionsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.test_iam_permissions(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.TestPermissionsResponse) + assert response.permissions == ['permissions_value'] + + +def test_test_iam_permissions_rest_required_fields(request_type=compute.TestIamPermissionsMachineImageRequest): + transport_class = transports.MachineImagesRestTransport + + request_init = {} + request_init["project"] = "" + request_init["resource"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).test_iam_permissions._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + jsonified_request["resource"] = 'resource_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).test_iam_permissions._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "resource" in jsonified_request + assert jsonified_request["resource"] == 'resource_value' + + client = MachineImagesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.TestPermissionsResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.TestPermissionsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.test_iam_permissions(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_test_iam_permissions_rest_unset_required_fields(): + transport = transports.MachineImagesRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.test_iam_permissions._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("project", "resource", "testPermissionsRequestResource", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_test_iam_permissions_rest_interceptors(null_interceptor): + transport = transports.MachineImagesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.MachineImagesRestInterceptor(), + ) + client = MachineImagesClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.MachineImagesRestInterceptor, "post_test_iam_permissions") as post, \ + mock.patch.object(transports.MachineImagesRestInterceptor, "pre_test_iam_permissions") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.TestIamPermissionsMachineImageRequest.pb(compute.TestIamPermissionsMachineImageRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.TestPermissionsResponse.to_json(compute.TestPermissionsResponse()) + + request = compute.TestIamPermissionsMachineImageRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.TestPermissionsResponse() + + client.test_iam_permissions(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_test_iam_permissions_rest_bad_request(transport: str = 'rest', request_type=compute.TestIamPermissionsMachineImageRequest): + client = MachineImagesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'resource': 'sample2'} + request_init["test_permissions_request_resource"] = {'permissions': ['permissions_value1', 'permissions_value2']} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.test_iam_permissions(request) + + +def test_test_iam_permissions_rest_flattened(): + client = MachineImagesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.TestPermissionsResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'resource': 'sample2'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + resource='resource_value', + test_permissions_request_resource=compute.TestPermissionsRequest(permissions=['permissions_value']), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.TestPermissionsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.test_iam_permissions(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/global/machineImages/{resource}/testIamPermissions" % client.transport._host, args[1]) + + +def test_test_iam_permissions_rest_flattened_error(transport: str = 'rest'): + client = MachineImagesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.test_iam_permissions( + compute.TestIamPermissionsMachineImageRequest(), + project='project_value', + resource='resource_value', + test_permissions_request_resource=compute.TestPermissionsRequest(permissions=['permissions_value']), + ) + + +def test_test_iam_permissions_rest_error(): + client = MachineImagesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +def test_credentials_transport_error(): + # It is an error to provide credentials and a transport instance. + transport = transports.MachineImagesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = MachineImagesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # It is an error to provide a credentials file and a transport instance. + transport = transports.MachineImagesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = MachineImagesClient( + client_options={"credentials_file": "credentials.json"}, + transport=transport, + ) + + # It is an error to provide an api_key and a transport instance. + transport = transports.MachineImagesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = MachineImagesClient( + client_options=options, + transport=transport, + ) + + # It is an error to provide an api_key and a credential. + options = mock.Mock() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = MachineImagesClient( + client_options=options, + credentials=ga_credentials.AnonymousCredentials() + ) + + # It is an error to provide scopes and a transport instance. + transport = transports.MachineImagesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = MachineImagesClient( + client_options={"scopes": ["1", "2"]}, + transport=transport, + ) + + +def test_transport_instance(): + # A client may be instantiated with a custom transport instance. + transport = transports.MachineImagesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + client = MachineImagesClient(transport=transport) + assert client.transport is transport + + +@pytest.mark.parametrize("transport_class", [ + transports.MachineImagesRestTransport, +]) +def test_transport_adc(transport_class): + # Test default credentials are used if not provided. + with mock.patch.object(google.auth, 'default') as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class() + adc.assert_called_once() + +@pytest.mark.parametrize("transport_name", [ + "rest", +]) +def test_transport_kind(transport_name): + transport = MachineImagesClient.get_transport_class(transport_name)( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert transport.kind == transport_name + + +def test_machine_images_base_transport_error(): + # Passing both a credentials object and credentials_file should raise an error + with pytest.raises(core_exceptions.DuplicateCredentialArgs): + transport = transports.MachineImagesTransport( + credentials=ga_credentials.AnonymousCredentials(), + credentials_file="credentials.json" + ) + + +def test_machine_images_base_transport(): + # Instantiate the base transport. + with mock.patch('google.cloud.compute_v1.services.machine_images.transports.MachineImagesTransport.__init__') as Transport: + Transport.return_value = None + transport = transports.MachineImagesTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Every method on the transport should just blindly + # raise NotImplementedError. + methods = ( + 'delete', + 'get', + 'get_iam_policy', + 'insert', + 'list', + 'set_iam_policy', + 'test_iam_permissions', + ) + for method in methods: + with pytest.raises(NotImplementedError): + getattr(transport, method)(request=object()) + + with pytest.raises(NotImplementedError): + transport.close() + + # Catch all for all remaining methods and properties + remainder = [ + 'kind', + ] + for r in remainder: + with pytest.raises(NotImplementedError): + getattr(transport, r)() + + +def test_machine_images_base_transport_with_credentials_file(): + # Instantiate the base transport with a credentials file + with mock.patch.object(google.auth, 'load_credentials_from_file', autospec=True) as load_creds, mock.patch('google.cloud.compute_v1.services.machine_images.transports.MachineImagesTransport._prep_wrapped_messages') as Transport: + Transport.return_value = None + load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.MachineImagesTransport( + credentials_file="credentials.json", + quota_project_id="octopus", + ) + load_creds.assert_called_once_with("credentials.json", + scopes=None, + default_scopes=( + 'https://www.googleapis.com/auth/compute', + 'https://www.googleapis.com/auth/cloud-platform', +), + quota_project_id="octopus", + ) + + +def test_machine_images_base_transport_with_adc(): + # Test the default credentials are used if credentials and credentials_file are None. + with mock.patch.object(google.auth, 'default', autospec=True) as adc, mock.patch('google.cloud.compute_v1.services.machine_images.transports.MachineImagesTransport._prep_wrapped_messages') as Transport: + Transport.return_value = None + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.MachineImagesTransport() + adc.assert_called_once() + + +def test_machine_images_auth_adc(): + # If no credentials are provided, we should use ADC credentials. + with mock.patch.object(google.auth, 'default', autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + MachineImagesClient() + adc.assert_called_once_with( + scopes=None, + default_scopes=( + 'https://www.googleapis.com/auth/compute', + 'https://www.googleapis.com/auth/cloud-platform', +), + quota_project_id=None, + ) + + +def test_machine_images_http_transport_client_cert_source_for_mtls(): + cred = ga_credentials.AnonymousCredentials() + with mock.patch("google.auth.transport.requests.AuthorizedSession.configure_mtls_channel") as mock_configure_mtls_channel: + transports.MachineImagesRestTransport ( + credentials=cred, + client_cert_source_for_mtls=client_cert_source_callback + ) + mock_configure_mtls_channel.assert_called_once_with(client_cert_source_callback) + + +@pytest.mark.parametrize("transport_name", [ + "rest", +]) +def test_machine_images_host_no_port(transport_name): + client = MachineImagesClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions(api_endpoint='compute.googleapis.com'), + transport=transport_name, + ) + assert client.transport._host == ( + 'compute.googleapis.com:443' + if transport_name in ['grpc', 'grpc_asyncio'] + else 'https://compute.googleapis.com' + ) + +@pytest.mark.parametrize("transport_name", [ + "rest", +]) +def test_machine_images_host_with_port(transport_name): + client = MachineImagesClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions(api_endpoint='compute.googleapis.com:8000'), + transport=transport_name, + ) + assert client.transport._host == ( + 'compute.googleapis.com:8000' + if transport_name in ['grpc', 'grpc_asyncio'] + else 'https://compute.googleapis.com:8000' + ) + +@pytest.mark.parametrize("transport_name", [ + "rest", +]) +def test_machine_images_client_transport_session_collision(transport_name): + creds1 = ga_credentials.AnonymousCredentials() + creds2 = ga_credentials.AnonymousCredentials() + client1 = MachineImagesClient( + credentials=creds1, + transport=transport_name, + ) + client2 = MachineImagesClient( + credentials=creds2, + transport=transport_name, + ) + session1 = client1.transport.delete._session + session2 = client2.transport.delete._session + assert session1 != session2 + session1 = client1.transport.get._session + session2 = client2.transport.get._session + assert session1 != session2 + session1 = client1.transport.get_iam_policy._session + session2 = client2.transport.get_iam_policy._session + assert session1 != session2 + session1 = client1.transport.insert._session + session2 = client2.transport.insert._session + assert session1 != session2 + session1 = client1.transport.list._session + session2 = client2.transport.list._session + assert session1 != session2 + session1 = client1.transport.set_iam_policy._session + session2 = client2.transport.set_iam_policy._session + assert session1 != session2 + session1 = client1.transport.test_iam_permissions._session + session2 = client2.transport.test_iam_permissions._session + assert session1 != session2 + +def test_common_billing_account_path(): + billing_account = "squid" + expected = "billingAccounts/{billing_account}".format(billing_account=billing_account, ) + actual = MachineImagesClient.common_billing_account_path(billing_account) + assert expected == actual + + +def test_parse_common_billing_account_path(): + expected = { + "billing_account": "clam", + } + path = MachineImagesClient.common_billing_account_path(**expected) + + # Check that the path construction is reversible. + actual = MachineImagesClient.parse_common_billing_account_path(path) + assert expected == actual + +def test_common_folder_path(): + folder = "whelk" + expected = "folders/{folder}".format(folder=folder, ) + actual = MachineImagesClient.common_folder_path(folder) + assert expected == actual + + +def test_parse_common_folder_path(): + expected = { + "folder": "octopus", + } + path = MachineImagesClient.common_folder_path(**expected) + + # Check that the path construction is reversible. + actual = MachineImagesClient.parse_common_folder_path(path) + assert expected == actual + +def test_common_organization_path(): + organization = "oyster" + expected = "organizations/{organization}".format(organization=organization, ) + actual = MachineImagesClient.common_organization_path(organization) + assert expected == actual + + +def test_parse_common_organization_path(): + expected = { + "organization": "nudibranch", + } + path = MachineImagesClient.common_organization_path(**expected) + + # Check that the path construction is reversible. + actual = MachineImagesClient.parse_common_organization_path(path) + assert expected == actual + +def test_common_project_path(): + project = "cuttlefish" + expected = "projects/{project}".format(project=project, ) + actual = MachineImagesClient.common_project_path(project) + assert expected == actual + + +def test_parse_common_project_path(): + expected = { + "project": "mussel", + } + path = MachineImagesClient.common_project_path(**expected) + + # Check that the path construction is reversible. + actual = MachineImagesClient.parse_common_project_path(path) + assert expected == actual + +def test_common_location_path(): + project = "winkle" + location = "nautilus" + expected = "projects/{project}/locations/{location}".format(project=project, location=location, ) + actual = MachineImagesClient.common_location_path(project, location) + assert expected == actual + + +def test_parse_common_location_path(): + expected = { + "project": "scallop", + "location": "abalone", + } + path = MachineImagesClient.common_location_path(**expected) + + # Check that the path construction is reversible. + actual = MachineImagesClient.parse_common_location_path(path) + assert expected == actual + + +def test_client_with_default_client_info(): + client_info = gapic_v1.client_info.ClientInfo() + + with mock.patch.object(transports.MachineImagesTransport, '_prep_wrapped_messages') as prep: + client = MachineImagesClient( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + with mock.patch.object(transports.MachineImagesTransport, '_prep_wrapped_messages') as prep: + transport_class = MachineImagesClient.get_transport_class() + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + +def test_transport_close(): + transports = { + "rest": "_session", + } + + for transport, close_name in transports.items(): + client = MachineImagesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport + ) + with mock.patch.object(type(getattr(client.transport, close_name)), "close") as close: + with client: + close.assert_not_called() + close.assert_called_once() + +def test_client_ctx(): + transports = [ + 'rest', + ] + for transport in transports: + client = MachineImagesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport + ) + # Test client calls underlying transport. + with mock.patch.object(type(client.transport), "close") as close: + close.assert_not_called() + with client: + pass + close.assert_called() + +@pytest.mark.parametrize("client_class,transport_class", [ + (MachineImagesClient, transports.MachineImagesRestTransport), +]) +def test_api_key_credentials(client_class, transport_class): + with mock.patch.object( + google.auth._default, "get_api_key_credentials", create=True + ) as get_api_key_credentials: + mock_cred = mock.Mock() + get_api_key_credentials.return_value = mock_cred + options = client_options.ClientOptions() + options.api_key = "api_key" + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=mock_cred, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) diff --git a/owl-bot-staging/v1/tests/unit/gapic/compute_v1/test_machine_types.py b/owl-bot-staging/v1/tests/unit/gapic/compute_v1/test_machine_types.py new file mode 100644 index 000000000..485bd0848 --- /dev/null +++ b/owl-bot-staging/v1/tests/unit/gapic/compute_v1/test_machine_types.py @@ -0,0 +1,1716 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import os +# try/except added for compatibility with python < 3.8 +try: + from unittest import mock + from unittest.mock import AsyncMock # pragma: NO COVER +except ImportError: # pragma: NO COVER + import mock + +import grpc +from grpc.experimental import aio +from collections.abc import Iterable +from google.protobuf import json_format +import json +import math +import pytest +from proto.marshal.rules.dates import DurationRule, TimestampRule +from proto.marshal.rules import wrappers +from requests import Response +from requests import Request, PreparedRequest +from requests.sessions import Session +from google.protobuf import json_format + +from google.api_core import client_options +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import grpc_helpers +from google.api_core import grpc_helpers_async +from google.api_core import path_template +from google.auth import credentials as ga_credentials +from google.auth.exceptions import MutualTLSChannelError +from google.cloud.compute_v1.services.machine_types import MachineTypesClient +from google.cloud.compute_v1.services.machine_types import pagers +from google.cloud.compute_v1.services.machine_types import transports +from google.cloud.compute_v1.types import compute +from google.oauth2 import service_account +import google.auth + + +def client_cert_source_callback(): + return b"cert bytes", b"key bytes" + + +# If default endpoint is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint(client): + return "foo.googleapis.com" if ("localhost" in client.DEFAULT_ENDPOINT) else client.DEFAULT_ENDPOINT + + +def test__get_default_mtls_endpoint(): + api_endpoint = "example.googleapis.com" + api_mtls_endpoint = "example.mtls.googleapis.com" + sandbox_endpoint = "example.sandbox.googleapis.com" + sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com" + non_googleapi = "api.example.com" + + assert MachineTypesClient._get_default_mtls_endpoint(None) is None + assert MachineTypesClient._get_default_mtls_endpoint(api_endpoint) == api_mtls_endpoint + assert MachineTypesClient._get_default_mtls_endpoint(api_mtls_endpoint) == api_mtls_endpoint + assert MachineTypesClient._get_default_mtls_endpoint(sandbox_endpoint) == sandbox_mtls_endpoint + assert MachineTypesClient._get_default_mtls_endpoint(sandbox_mtls_endpoint) == sandbox_mtls_endpoint + assert MachineTypesClient._get_default_mtls_endpoint(non_googleapi) == non_googleapi + + +@pytest.mark.parametrize("client_class,transport_name", [ + (MachineTypesClient, "rest"), +]) +def test_machine_types_client_from_service_account_info(client_class, transport_name): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object(service_account.Credentials, 'from_service_account_info') as factory: + factory.return_value = creds + info = {"valid": True} + client = client_class.from_service_account_info(info, transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + 'compute.googleapis.com:443' + if transport_name in ['grpc', 'grpc_asyncio'] + else + 'https://compute.googleapis.com' + ) + + +@pytest.mark.parametrize("transport_class,transport_name", [ + (transports.MachineTypesRestTransport, "rest"), +]) +def test_machine_types_client_service_account_always_use_jwt(transport_class, transport_name): + with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=True) + use_jwt.assert_called_once_with(True) + + with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=False) + use_jwt.assert_not_called() + + +@pytest.mark.parametrize("client_class,transport_name", [ + (MachineTypesClient, "rest"), +]) +def test_machine_types_client_from_service_account_file(client_class, transport_name): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object(service_account.Credentials, 'from_service_account_file') as factory: + factory.return_value = creds + client = client_class.from_service_account_file("dummy/file/path.json", transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + client = client_class.from_service_account_json("dummy/file/path.json", transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + 'compute.googleapis.com:443' + if transport_name in ['grpc', 'grpc_asyncio'] + else + 'https://compute.googleapis.com' + ) + + +def test_machine_types_client_get_transport_class(): + transport = MachineTypesClient.get_transport_class() + available_transports = [ + transports.MachineTypesRestTransport, + ] + assert transport in available_transports + + transport = MachineTypesClient.get_transport_class("rest") + assert transport == transports.MachineTypesRestTransport + + +@pytest.mark.parametrize("client_class,transport_class,transport_name", [ + (MachineTypesClient, transports.MachineTypesRestTransport, "rest"), +]) +@mock.patch.object(MachineTypesClient, "DEFAULT_ENDPOINT", modify_default_endpoint(MachineTypesClient)) +def test_machine_types_client_client_options(client_class, transport_class, transport_name): + # Check that if channel is provided we won't create a new one. + with mock.patch.object(MachineTypesClient, 'get_transport_class') as gtc: + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ) + client = client_class(transport=transport) + gtc.assert_not_called() + + # Check that if channel is provided via str we will create a new one. + with mock.patch.object(MachineTypesClient, 'get_transport_class') as gtc: + client = client_class(transport=transport_name) + gtc.assert_called() + + # Check the case api_endpoint is provided. + options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(transport=transport_name, client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_MTLS_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError): + client = client_class(transport=transport_name) + + # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): + with pytest.raises(ValueError): + client = client_class(transport=transport_name) + + # Check the case quota_project_id is provided + options = client_options.ClientOptions(quota_project_id="octopus") + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id="octopus", + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + # Check the case api_endpoint is provided + options = client_options.ClientOptions(api_audience="https://language.googleapis.com") + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience="https://language.googleapis.com" + ) + +@pytest.mark.parametrize("client_class,transport_class,transport_name,use_client_cert_env", [ + (MachineTypesClient, transports.MachineTypesRestTransport, "rest", "true"), + (MachineTypesClient, transports.MachineTypesRestTransport, "rest", "false"), +]) +@mock.patch.object(MachineTypesClient, "DEFAULT_ENDPOINT", modify_default_endpoint(MachineTypesClient)) +@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) +def test_machine_types_client_mtls_env_auto(client_class, transport_class, transport_name, use_client_cert_env): + # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default + # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists. + + # Check the case client_cert_source is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): + options = client_options.ClientOptions(client_cert_source=client_cert_source_callback) + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + + if use_client_cert_env == "false": + expected_client_cert_source = None + expected_host = client.DEFAULT_ENDPOINT + else: + expected_client_cert_source = client_cert_source_callback + expected_host = client.DEFAULT_MTLS_ENDPOINT + + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case ADC client cert is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): + with mock.patch.object(transport_class, '__init__') as patched: + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): + with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=client_cert_source_callback): + if use_client_cert_env == "false": + expected_host = client.DEFAULT_ENDPOINT + expected_client_cert_source = None + else: + expected_host = client.DEFAULT_MTLS_ENDPOINT + expected_client_cert_source = client_cert_source_callback + + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case client_cert_source and ADC client cert are not provided. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): + with mock.patch.object(transport_class, '__init__') as patched: + with mock.patch("google.auth.transport.mtls.has_default_client_cert_source", return_value=False): + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize("client_class", [ + MachineTypesClient +]) +@mock.patch.object(MachineTypesClient, "DEFAULT_ENDPOINT", modify_default_endpoint(MachineTypesClient)) +def test_machine_types_client_get_mtls_endpoint_and_cert_source(client_class): + mock_client_cert_source = mock.Mock() + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "true". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + mock_api_endpoint = "foo" + options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(options) + assert api_endpoint == mock_api_endpoint + assert cert_source == mock_client_cert_source + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "false". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + mock_client_cert_source = mock.Mock() + mock_api_endpoint = "foo" + options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(options) + assert api_endpoint == mock_api_endpoint + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert doesn't exist. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=False): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert exists. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): + with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=mock_client_cert_source): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source == mock_client_cert_source + + +@pytest.mark.parametrize("client_class,transport_class,transport_name", [ + (MachineTypesClient, transports.MachineTypesRestTransport, "rest"), +]) +def test_machine_types_client_client_options_scopes(client_class, transport_class, transport_name): + # Check the case scopes are provided. + options = client_options.ClientOptions( + scopes=["1", "2"], + ) + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=["1", "2"], + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + +@pytest.mark.parametrize("client_class,transport_class,transport_name,grpc_helpers", [ + (MachineTypesClient, transports.MachineTypesRestTransport, "rest", None), +]) +def test_machine_types_client_client_options_credentials_file(client_class, transport_class, transport_name, grpc_helpers): + # Check the case credentials file is provided. + options = client_options.ClientOptions( + credentials_file="credentials.json" + ) + + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize("request_type", [ + compute.AggregatedListMachineTypesRequest, + dict, +]) +def test_aggregated_list_rest(request_type): + client = MachineTypesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.MachineTypeAggregatedList( + id='id_value', + kind='kind_value', + next_page_token='next_page_token_value', + self_link='self_link_value', + unreachables=['unreachables_value'], + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.MachineTypeAggregatedList.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.aggregated_list(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.AggregatedListPager) + assert response.id == 'id_value' + assert response.kind == 'kind_value' + assert response.next_page_token == 'next_page_token_value' + assert response.self_link == 'self_link_value' + assert response.unreachables == ['unreachables_value'] + + +def test_aggregated_list_rest_required_fields(request_type=compute.AggregatedListMachineTypesRequest): + transport_class = transports.MachineTypesRestTransport + + request_init = {} + request_init["project"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).aggregated_list._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).aggregated_list._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("filter", "include_all_scopes", "max_results", "order_by", "page_token", "return_partial_success", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + + client = MachineTypesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.MachineTypeAggregatedList() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "get", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.MachineTypeAggregatedList.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.aggregated_list(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_aggregated_list_rest_unset_required_fields(): + transport = transports.MachineTypesRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.aggregated_list._get_unset_required_fields({}) + assert set(unset_fields) == (set(("filter", "includeAllScopes", "maxResults", "orderBy", "pageToken", "returnPartialSuccess", )) & set(("project", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_aggregated_list_rest_interceptors(null_interceptor): + transport = transports.MachineTypesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.MachineTypesRestInterceptor(), + ) + client = MachineTypesClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.MachineTypesRestInterceptor, "post_aggregated_list") as post, \ + mock.patch.object(transports.MachineTypesRestInterceptor, "pre_aggregated_list") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.AggregatedListMachineTypesRequest.pb(compute.AggregatedListMachineTypesRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.MachineTypeAggregatedList.to_json(compute.MachineTypeAggregatedList()) + + request = compute.AggregatedListMachineTypesRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.MachineTypeAggregatedList() + + client.aggregated_list(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_aggregated_list_rest_bad_request(transport: str = 'rest', request_type=compute.AggregatedListMachineTypesRequest): + client = MachineTypesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.aggregated_list(request) + + +def test_aggregated_list_rest_flattened(): + client = MachineTypesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.MachineTypeAggregatedList() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.MachineTypeAggregatedList.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.aggregated_list(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/aggregated/machineTypes" % client.transport._host, args[1]) + + +def test_aggregated_list_rest_flattened_error(transport: str = 'rest'): + client = MachineTypesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.aggregated_list( + compute.AggregatedListMachineTypesRequest(), + project='project_value', + ) + + +def test_aggregated_list_rest_pager(transport: str = 'rest'): + client = MachineTypesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + #with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + compute.MachineTypeAggregatedList( + items={ + 'a':compute.MachineTypesScopedList(), + 'b':compute.MachineTypesScopedList(), + 'c':compute.MachineTypesScopedList(), + }, + next_page_token='abc', + ), + compute.MachineTypeAggregatedList( + items={}, + next_page_token='def', + ), + compute.MachineTypeAggregatedList( + items={ + 'g':compute.MachineTypesScopedList(), + }, + next_page_token='ghi', + ), + compute.MachineTypeAggregatedList( + items={ + 'h':compute.MachineTypesScopedList(), + 'i':compute.MachineTypesScopedList(), + }, + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple(compute.MachineTypeAggregatedList.to_json(x) for x in response) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode('UTF-8') + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {'project': 'sample1'} + + pager = client.aggregated_list(request=sample_request) + + assert isinstance(pager.get('a'), compute.MachineTypesScopedList) + assert pager.get('h') is None + + results = list(pager) + assert len(results) == 6 + assert all( + isinstance(i, tuple) + for i in results) + for result in results: + assert isinstance(result, tuple) + assert tuple(type(t) for t in result) == (str, compute.MachineTypesScopedList) + + assert pager.get('a') is None + assert isinstance(pager.get('h'), compute.MachineTypesScopedList) + + pages = list(client.aggregated_list(request=sample_request).pages) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize("request_type", [ + compute.GetMachineTypeRequest, + dict, +]) +def test_get_rest(request_type): + client = MachineTypesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'zone': 'sample2', 'machine_type': 'sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.MachineType( + creation_timestamp='creation_timestamp_value', + description='description_value', + guest_cpus=1090, + id=205, + image_space_gb=1430, + is_shared_cpu=True, + kind='kind_value', + maximum_persistent_disks=2603, + maximum_persistent_disks_size_gb=3437, + memory_mb=967, + name='name_value', + self_link='self_link_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.MachineType.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.get(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.MachineType) + assert response.creation_timestamp == 'creation_timestamp_value' + assert response.description == 'description_value' + assert response.guest_cpus == 1090 + assert response.id == 205 + assert response.image_space_gb == 1430 + assert response.is_shared_cpu is True + assert response.kind == 'kind_value' + assert response.maximum_persistent_disks == 2603 + assert response.maximum_persistent_disks_size_gb == 3437 + assert response.memory_mb == 967 + assert response.name == 'name_value' + assert response.self_link == 'self_link_value' + assert response.zone == 'zone_value' + + +def test_get_rest_required_fields(request_type=compute.GetMachineTypeRequest): + transport_class = transports.MachineTypesRestTransport + + request_init = {} + request_init["machine_type"] = "" + request_init["project"] = "" + request_init["zone"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["machineType"] = 'machine_type_value' + jsonified_request["project"] = 'project_value' + jsonified_request["zone"] = 'zone_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "machineType" in jsonified_request + assert jsonified_request["machineType"] == 'machine_type_value' + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "zone" in jsonified_request + assert jsonified_request["zone"] == 'zone_value' + + client = MachineTypesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.MachineType() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "get", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.MachineType.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.get(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_get_rest_unset_required_fields(): + transport = transports.MachineTypesRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.get._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("machineType", "project", "zone", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_rest_interceptors(null_interceptor): + transport = transports.MachineTypesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.MachineTypesRestInterceptor(), + ) + client = MachineTypesClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.MachineTypesRestInterceptor, "post_get") as post, \ + mock.patch.object(transports.MachineTypesRestInterceptor, "pre_get") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.GetMachineTypeRequest.pb(compute.GetMachineTypeRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.MachineType.to_json(compute.MachineType()) + + request = compute.GetMachineTypeRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.MachineType() + + client.get(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_get_rest_bad_request(transport: str = 'rest', request_type=compute.GetMachineTypeRequest): + client = MachineTypesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'zone': 'sample2', 'machine_type': 'sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get(request) + + +def test_get_rest_flattened(): + client = MachineTypesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.MachineType() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'zone': 'sample2', 'machine_type': 'sample3'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + zone='zone_value', + machine_type='machine_type_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.MachineType.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.get(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/zones/{zone}/machineTypes/{machine_type}" % client.transport._host, args[1]) + + +def test_get_rest_flattened_error(transport: str = 'rest'): + client = MachineTypesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get( + compute.GetMachineTypeRequest(), + project='project_value', + zone='zone_value', + machine_type='machine_type_value', + ) + + +def test_get_rest_error(): + client = MachineTypesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.ListMachineTypesRequest, + dict, +]) +def test_list_rest(request_type): + client = MachineTypesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'zone': 'sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.MachineTypeList( + id='id_value', + kind='kind_value', + next_page_token='next_page_token_value', + self_link='self_link_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.MachineTypeList.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.list(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListPager) + assert response.id == 'id_value' + assert response.kind == 'kind_value' + assert response.next_page_token == 'next_page_token_value' + assert response.self_link == 'self_link_value' + + +def test_list_rest_required_fields(request_type=compute.ListMachineTypesRequest): + transport_class = transports.MachineTypesRestTransport + + request_init = {} + request_init["project"] = "" + request_init["zone"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + jsonified_request["zone"] = 'zone_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("filter", "max_results", "order_by", "page_token", "return_partial_success", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "zone" in jsonified_request + assert jsonified_request["zone"] == 'zone_value' + + client = MachineTypesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.MachineTypeList() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "get", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.MachineTypeList.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.list(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_list_rest_unset_required_fields(): + transport = transports.MachineTypesRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.list._get_unset_required_fields({}) + assert set(unset_fields) == (set(("filter", "maxResults", "orderBy", "pageToken", "returnPartialSuccess", )) & set(("project", "zone", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_rest_interceptors(null_interceptor): + transport = transports.MachineTypesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.MachineTypesRestInterceptor(), + ) + client = MachineTypesClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.MachineTypesRestInterceptor, "post_list") as post, \ + mock.patch.object(transports.MachineTypesRestInterceptor, "pre_list") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.ListMachineTypesRequest.pb(compute.ListMachineTypesRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.MachineTypeList.to_json(compute.MachineTypeList()) + + request = compute.ListMachineTypesRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.MachineTypeList() + + client.list(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_list_rest_bad_request(transport: str = 'rest', request_type=compute.ListMachineTypesRequest): + client = MachineTypesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'zone': 'sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list(request) + + +def test_list_rest_flattened(): + client = MachineTypesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.MachineTypeList() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'zone': 'sample2'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + zone='zone_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.MachineTypeList.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.list(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/zones/{zone}/machineTypes" % client.transport._host, args[1]) + + +def test_list_rest_flattened_error(transport: str = 'rest'): + client = MachineTypesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list( + compute.ListMachineTypesRequest(), + project='project_value', + zone='zone_value', + ) + + +def test_list_rest_pager(transport: str = 'rest'): + client = MachineTypesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + #with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + compute.MachineTypeList( + items=[ + compute.MachineType(), + compute.MachineType(), + compute.MachineType(), + ], + next_page_token='abc', + ), + compute.MachineTypeList( + items=[], + next_page_token='def', + ), + compute.MachineTypeList( + items=[ + compute.MachineType(), + ], + next_page_token='ghi', + ), + compute.MachineTypeList( + items=[ + compute.MachineType(), + compute.MachineType(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple(compute.MachineTypeList.to_json(x) for x in response) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode('UTF-8') + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {'project': 'sample1', 'zone': 'sample2'} + + pager = client.list(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, compute.MachineType) + for i in results) + + pages = list(client.list(request=sample_request).pages) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + + +def test_credentials_transport_error(): + # It is an error to provide credentials and a transport instance. + transport = transports.MachineTypesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = MachineTypesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # It is an error to provide a credentials file and a transport instance. + transport = transports.MachineTypesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = MachineTypesClient( + client_options={"credentials_file": "credentials.json"}, + transport=transport, + ) + + # It is an error to provide an api_key and a transport instance. + transport = transports.MachineTypesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = MachineTypesClient( + client_options=options, + transport=transport, + ) + + # It is an error to provide an api_key and a credential. + options = mock.Mock() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = MachineTypesClient( + client_options=options, + credentials=ga_credentials.AnonymousCredentials() + ) + + # It is an error to provide scopes and a transport instance. + transport = transports.MachineTypesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = MachineTypesClient( + client_options={"scopes": ["1", "2"]}, + transport=transport, + ) + + +def test_transport_instance(): + # A client may be instantiated with a custom transport instance. + transport = transports.MachineTypesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + client = MachineTypesClient(transport=transport) + assert client.transport is transport + + +@pytest.mark.parametrize("transport_class", [ + transports.MachineTypesRestTransport, +]) +def test_transport_adc(transport_class): + # Test default credentials are used if not provided. + with mock.patch.object(google.auth, 'default') as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class() + adc.assert_called_once() + +@pytest.mark.parametrize("transport_name", [ + "rest", +]) +def test_transport_kind(transport_name): + transport = MachineTypesClient.get_transport_class(transport_name)( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert transport.kind == transport_name + + +def test_machine_types_base_transport_error(): + # Passing both a credentials object and credentials_file should raise an error + with pytest.raises(core_exceptions.DuplicateCredentialArgs): + transport = transports.MachineTypesTransport( + credentials=ga_credentials.AnonymousCredentials(), + credentials_file="credentials.json" + ) + + +def test_machine_types_base_transport(): + # Instantiate the base transport. + with mock.patch('google.cloud.compute_v1.services.machine_types.transports.MachineTypesTransport.__init__') as Transport: + Transport.return_value = None + transport = transports.MachineTypesTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Every method on the transport should just blindly + # raise NotImplementedError. + methods = ( + 'aggregated_list', + 'get', + 'list', + ) + for method in methods: + with pytest.raises(NotImplementedError): + getattr(transport, method)(request=object()) + + with pytest.raises(NotImplementedError): + transport.close() + + # Catch all for all remaining methods and properties + remainder = [ + 'kind', + ] + for r in remainder: + with pytest.raises(NotImplementedError): + getattr(transport, r)() + + +def test_machine_types_base_transport_with_credentials_file(): + # Instantiate the base transport with a credentials file + with mock.patch.object(google.auth, 'load_credentials_from_file', autospec=True) as load_creds, mock.patch('google.cloud.compute_v1.services.machine_types.transports.MachineTypesTransport._prep_wrapped_messages') as Transport: + Transport.return_value = None + load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.MachineTypesTransport( + credentials_file="credentials.json", + quota_project_id="octopus", + ) + load_creds.assert_called_once_with("credentials.json", + scopes=None, + default_scopes=( + 'https://www.googleapis.com/auth/compute.readonly', + 'https://www.googleapis.com/auth/compute', + 'https://www.googleapis.com/auth/cloud-platform', +), + quota_project_id="octopus", + ) + + +def test_machine_types_base_transport_with_adc(): + # Test the default credentials are used if credentials and credentials_file are None. + with mock.patch.object(google.auth, 'default', autospec=True) as adc, mock.patch('google.cloud.compute_v1.services.machine_types.transports.MachineTypesTransport._prep_wrapped_messages') as Transport: + Transport.return_value = None + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.MachineTypesTransport() + adc.assert_called_once() + + +def test_machine_types_auth_adc(): + # If no credentials are provided, we should use ADC credentials. + with mock.patch.object(google.auth, 'default', autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + MachineTypesClient() + adc.assert_called_once_with( + scopes=None, + default_scopes=( + 'https://www.googleapis.com/auth/compute.readonly', + 'https://www.googleapis.com/auth/compute', + 'https://www.googleapis.com/auth/cloud-platform', +), + quota_project_id=None, + ) + + +def test_machine_types_http_transport_client_cert_source_for_mtls(): + cred = ga_credentials.AnonymousCredentials() + with mock.patch("google.auth.transport.requests.AuthorizedSession.configure_mtls_channel") as mock_configure_mtls_channel: + transports.MachineTypesRestTransport ( + credentials=cred, + client_cert_source_for_mtls=client_cert_source_callback + ) + mock_configure_mtls_channel.assert_called_once_with(client_cert_source_callback) + + +@pytest.mark.parametrize("transport_name", [ + "rest", +]) +def test_machine_types_host_no_port(transport_name): + client = MachineTypesClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions(api_endpoint='compute.googleapis.com'), + transport=transport_name, + ) + assert client.transport._host == ( + 'compute.googleapis.com:443' + if transport_name in ['grpc', 'grpc_asyncio'] + else 'https://compute.googleapis.com' + ) + +@pytest.mark.parametrize("transport_name", [ + "rest", +]) +def test_machine_types_host_with_port(transport_name): + client = MachineTypesClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions(api_endpoint='compute.googleapis.com:8000'), + transport=transport_name, + ) + assert client.transport._host == ( + 'compute.googleapis.com:8000' + if transport_name in ['grpc', 'grpc_asyncio'] + else 'https://compute.googleapis.com:8000' + ) + +@pytest.mark.parametrize("transport_name", [ + "rest", +]) +def test_machine_types_client_transport_session_collision(transport_name): + creds1 = ga_credentials.AnonymousCredentials() + creds2 = ga_credentials.AnonymousCredentials() + client1 = MachineTypesClient( + credentials=creds1, + transport=transport_name, + ) + client2 = MachineTypesClient( + credentials=creds2, + transport=transport_name, + ) + session1 = client1.transport.aggregated_list._session + session2 = client2.transport.aggregated_list._session + assert session1 != session2 + session1 = client1.transport.get._session + session2 = client2.transport.get._session + assert session1 != session2 + session1 = client1.transport.list._session + session2 = client2.transport.list._session + assert session1 != session2 + +def test_common_billing_account_path(): + billing_account = "squid" + expected = "billingAccounts/{billing_account}".format(billing_account=billing_account, ) + actual = MachineTypesClient.common_billing_account_path(billing_account) + assert expected == actual + + +def test_parse_common_billing_account_path(): + expected = { + "billing_account": "clam", + } + path = MachineTypesClient.common_billing_account_path(**expected) + + # Check that the path construction is reversible. + actual = MachineTypesClient.parse_common_billing_account_path(path) + assert expected == actual + +def test_common_folder_path(): + folder = "whelk" + expected = "folders/{folder}".format(folder=folder, ) + actual = MachineTypesClient.common_folder_path(folder) + assert expected == actual + + +def test_parse_common_folder_path(): + expected = { + "folder": "octopus", + } + path = MachineTypesClient.common_folder_path(**expected) + + # Check that the path construction is reversible. + actual = MachineTypesClient.parse_common_folder_path(path) + assert expected == actual + +def test_common_organization_path(): + organization = "oyster" + expected = "organizations/{organization}".format(organization=organization, ) + actual = MachineTypesClient.common_organization_path(organization) + assert expected == actual + + +def test_parse_common_organization_path(): + expected = { + "organization": "nudibranch", + } + path = MachineTypesClient.common_organization_path(**expected) + + # Check that the path construction is reversible. + actual = MachineTypesClient.parse_common_organization_path(path) + assert expected == actual + +def test_common_project_path(): + project = "cuttlefish" + expected = "projects/{project}".format(project=project, ) + actual = MachineTypesClient.common_project_path(project) + assert expected == actual + + +def test_parse_common_project_path(): + expected = { + "project": "mussel", + } + path = MachineTypesClient.common_project_path(**expected) + + # Check that the path construction is reversible. + actual = MachineTypesClient.parse_common_project_path(path) + assert expected == actual + +def test_common_location_path(): + project = "winkle" + location = "nautilus" + expected = "projects/{project}/locations/{location}".format(project=project, location=location, ) + actual = MachineTypesClient.common_location_path(project, location) + assert expected == actual + + +def test_parse_common_location_path(): + expected = { + "project": "scallop", + "location": "abalone", + } + path = MachineTypesClient.common_location_path(**expected) + + # Check that the path construction is reversible. + actual = MachineTypesClient.parse_common_location_path(path) + assert expected == actual + + +def test_client_with_default_client_info(): + client_info = gapic_v1.client_info.ClientInfo() + + with mock.patch.object(transports.MachineTypesTransport, '_prep_wrapped_messages') as prep: + client = MachineTypesClient( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + with mock.patch.object(transports.MachineTypesTransport, '_prep_wrapped_messages') as prep: + transport_class = MachineTypesClient.get_transport_class() + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + +def test_transport_close(): + transports = { + "rest": "_session", + } + + for transport, close_name in transports.items(): + client = MachineTypesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport + ) + with mock.patch.object(type(getattr(client.transport, close_name)), "close") as close: + with client: + close.assert_not_called() + close.assert_called_once() + +def test_client_ctx(): + transports = [ + 'rest', + ] + for transport in transports: + client = MachineTypesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport + ) + # Test client calls underlying transport. + with mock.patch.object(type(client.transport), "close") as close: + close.assert_not_called() + with client: + pass + close.assert_called() + +@pytest.mark.parametrize("client_class,transport_class", [ + (MachineTypesClient, transports.MachineTypesRestTransport), +]) +def test_api_key_credentials(client_class, transport_class): + with mock.patch.object( + google.auth._default, "get_api_key_credentials", create=True + ) as get_api_key_credentials: + mock_cred = mock.Mock() + get_api_key_credentials.return_value = mock_cred + options = client_options.ClientOptions() + options.api_key = "api_key" + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=mock_cred, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) diff --git a/owl-bot-staging/v1/tests/unit/gapic/compute_v1/test_network_attachments.py b/owl-bot-staging/v1/tests/unit/gapic/compute_v1/test_network_attachments.py new file mode 100644 index 000000000..eb04d9df3 --- /dev/null +++ b/owl-bot-staging/v1/tests/unit/gapic/compute_v1/test_network_attachments.py @@ -0,0 +1,3594 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import os +# try/except added for compatibility with python < 3.8 +try: + from unittest import mock + from unittest.mock import AsyncMock # pragma: NO COVER +except ImportError: # pragma: NO COVER + import mock + +import grpc +from grpc.experimental import aio +from collections.abc import Iterable +from google.protobuf import json_format +import json +import math +import pytest +from proto.marshal.rules.dates import DurationRule, TimestampRule +from proto.marshal.rules import wrappers +from requests import Response +from requests import Request, PreparedRequest +from requests.sessions import Session +from google.protobuf import json_format + +from google.api_core import client_options +from google.api_core import exceptions as core_exceptions +from google.api_core import extended_operation # type: ignore +from google.api_core import future +from google.api_core import gapic_v1 +from google.api_core import grpc_helpers +from google.api_core import grpc_helpers_async +from google.api_core import path_template +from google.auth import credentials as ga_credentials +from google.auth.exceptions import MutualTLSChannelError +from google.cloud.compute_v1.services.network_attachments import NetworkAttachmentsClient +from google.cloud.compute_v1.services.network_attachments import pagers +from google.cloud.compute_v1.services.network_attachments import transports +from google.cloud.compute_v1.types import compute +from google.oauth2 import service_account +import google.auth + + +def client_cert_source_callback(): + return b"cert bytes", b"key bytes" + + +# If default endpoint is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint(client): + return "foo.googleapis.com" if ("localhost" in client.DEFAULT_ENDPOINT) else client.DEFAULT_ENDPOINT + + +def test__get_default_mtls_endpoint(): + api_endpoint = "example.googleapis.com" + api_mtls_endpoint = "example.mtls.googleapis.com" + sandbox_endpoint = "example.sandbox.googleapis.com" + sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com" + non_googleapi = "api.example.com" + + assert NetworkAttachmentsClient._get_default_mtls_endpoint(None) is None + assert NetworkAttachmentsClient._get_default_mtls_endpoint(api_endpoint) == api_mtls_endpoint + assert NetworkAttachmentsClient._get_default_mtls_endpoint(api_mtls_endpoint) == api_mtls_endpoint + assert NetworkAttachmentsClient._get_default_mtls_endpoint(sandbox_endpoint) == sandbox_mtls_endpoint + assert NetworkAttachmentsClient._get_default_mtls_endpoint(sandbox_mtls_endpoint) == sandbox_mtls_endpoint + assert NetworkAttachmentsClient._get_default_mtls_endpoint(non_googleapi) == non_googleapi + + +@pytest.mark.parametrize("client_class,transport_name", [ + (NetworkAttachmentsClient, "rest"), +]) +def test_network_attachments_client_from_service_account_info(client_class, transport_name): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object(service_account.Credentials, 'from_service_account_info') as factory: + factory.return_value = creds + info = {"valid": True} + client = client_class.from_service_account_info(info, transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + 'compute.googleapis.com:443' + if transport_name in ['grpc', 'grpc_asyncio'] + else + 'https://compute.googleapis.com' + ) + + +@pytest.mark.parametrize("transport_class,transport_name", [ + (transports.NetworkAttachmentsRestTransport, "rest"), +]) +def test_network_attachments_client_service_account_always_use_jwt(transport_class, transport_name): + with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=True) + use_jwt.assert_called_once_with(True) + + with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=False) + use_jwt.assert_not_called() + + +@pytest.mark.parametrize("client_class,transport_name", [ + (NetworkAttachmentsClient, "rest"), +]) +def test_network_attachments_client_from_service_account_file(client_class, transport_name): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object(service_account.Credentials, 'from_service_account_file') as factory: + factory.return_value = creds + client = client_class.from_service_account_file("dummy/file/path.json", transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + client = client_class.from_service_account_json("dummy/file/path.json", transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + 'compute.googleapis.com:443' + if transport_name in ['grpc', 'grpc_asyncio'] + else + 'https://compute.googleapis.com' + ) + + +def test_network_attachments_client_get_transport_class(): + transport = NetworkAttachmentsClient.get_transport_class() + available_transports = [ + transports.NetworkAttachmentsRestTransport, + ] + assert transport in available_transports + + transport = NetworkAttachmentsClient.get_transport_class("rest") + assert transport == transports.NetworkAttachmentsRestTransport + + +@pytest.mark.parametrize("client_class,transport_class,transport_name", [ + (NetworkAttachmentsClient, transports.NetworkAttachmentsRestTransport, "rest"), +]) +@mock.patch.object(NetworkAttachmentsClient, "DEFAULT_ENDPOINT", modify_default_endpoint(NetworkAttachmentsClient)) +def test_network_attachments_client_client_options(client_class, transport_class, transport_name): + # Check that if channel is provided we won't create a new one. + with mock.patch.object(NetworkAttachmentsClient, 'get_transport_class') as gtc: + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ) + client = client_class(transport=transport) + gtc.assert_not_called() + + # Check that if channel is provided via str we will create a new one. + with mock.patch.object(NetworkAttachmentsClient, 'get_transport_class') as gtc: + client = client_class(transport=transport_name) + gtc.assert_called() + + # Check the case api_endpoint is provided. + options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(transport=transport_name, client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_MTLS_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError): + client = client_class(transport=transport_name) + + # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): + with pytest.raises(ValueError): + client = client_class(transport=transport_name) + + # Check the case quota_project_id is provided + options = client_options.ClientOptions(quota_project_id="octopus") + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id="octopus", + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + # Check the case api_endpoint is provided + options = client_options.ClientOptions(api_audience="https://language.googleapis.com") + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience="https://language.googleapis.com" + ) + +@pytest.mark.parametrize("client_class,transport_class,transport_name,use_client_cert_env", [ + (NetworkAttachmentsClient, transports.NetworkAttachmentsRestTransport, "rest", "true"), + (NetworkAttachmentsClient, transports.NetworkAttachmentsRestTransport, "rest", "false"), +]) +@mock.patch.object(NetworkAttachmentsClient, "DEFAULT_ENDPOINT", modify_default_endpoint(NetworkAttachmentsClient)) +@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) +def test_network_attachments_client_mtls_env_auto(client_class, transport_class, transport_name, use_client_cert_env): + # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default + # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists. + + # Check the case client_cert_source is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): + options = client_options.ClientOptions(client_cert_source=client_cert_source_callback) + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + + if use_client_cert_env == "false": + expected_client_cert_source = None + expected_host = client.DEFAULT_ENDPOINT + else: + expected_client_cert_source = client_cert_source_callback + expected_host = client.DEFAULT_MTLS_ENDPOINT + + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case ADC client cert is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): + with mock.patch.object(transport_class, '__init__') as patched: + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): + with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=client_cert_source_callback): + if use_client_cert_env == "false": + expected_host = client.DEFAULT_ENDPOINT + expected_client_cert_source = None + else: + expected_host = client.DEFAULT_MTLS_ENDPOINT + expected_client_cert_source = client_cert_source_callback + + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case client_cert_source and ADC client cert are not provided. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): + with mock.patch.object(transport_class, '__init__') as patched: + with mock.patch("google.auth.transport.mtls.has_default_client_cert_source", return_value=False): + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize("client_class", [ + NetworkAttachmentsClient +]) +@mock.patch.object(NetworkAttachmentsClient, "DEFAULT_ENDPOINT", modify_default_endpoint(NetworkAttachmentsClient)) +def test_network_attachments_client_get_mtls_endpoint_and_cert_source(client_class): + mock_client_cert_source = mock.Mock() + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "true". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + mock_api_endpoint = "foo" + options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(options) + assert api_endpoint == mock_api_endpoint + assert cert_source == mock_client_cert_source + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "false". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + mock_client_cert_source = mock.Mock() + mock_api_endpoint = "foo" + options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(options) + assert api_endpoint == mock_api_endpoint + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert doesn't exist. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=False): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert exists. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): + with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=mock_client_cert_source): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source == mock_client_cert_source + + +@pytest.mark.parametrize("client_class,transport_class,transport_name", [ + (NetworkAttachmentsClient, transports.NetworkAttachmentsRestTransport, "rest"), +]) +def test_network_attachments_client_client_options_scopes(client_class, transport_class, transport_name): + # Check the case scopes are provided. + options = client_options.ClientOptions( + scopes=["1", "2"], + ) + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=["1", "2"], + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + +@pytest.mark.parametrize("client_class,transport_class,transport_name,grpc_helpers", [ + (NetworkAttachmentsClient, transports.NetworkAttachmentsRestTransport, "rest", None), +]) +def test_network_attachments_client_client_options_credentials_file(client_class, transport_class, transport_name, grpc_helpers): + # Check the case credentials file is provided. + options = client_options.ClientOptions( + credentials_file="credentials.json" + ) + + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize("request_type", [ + compute.AggregatedListNetworkAttachmentsRequest, + dict, +]) +def test_aggregated_list_rest(request_type): + client = NetworkAttachmentsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.NetworkAttachmentAggregatedList( + id='id_value', + kind='kind_value', + next_page_token='next_page_token_value', + self_link='self_link_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.NetworkAttachmentAggregatedList.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.aggregated_list(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.AggregatedListPager) + assert response.id == 'id_value' + assert response.kind == 'kind_value' + assert response.next_page_token == 'next_page_token_value' + assert response.self_link == 'self_link_value' + + +def test_aggregated_list_rest_required_fields(request_type=compute.AggregatedListNetworkAttachmentsRequest): + transport_class = transports.NetworkAttachmentsRestTransport + + request_init = {} + request_init["project"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).aggregated_list._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).aggregated_list._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("filter", "include_all_scopes", "max_results", "order_by", "page_token", "return_partial_success", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + + client = NetworkAttachmentsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.NetworkAttachmentAggregatedList() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "get", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.NetworkAttachmentAggregatedList.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.aggregated_list(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_aggregated_list_rest_unset_required_fields(): + transport = transports.NetworkAttachmentsRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.aggregated_list._get_unset_required_fields({}) + assert set(unset_fields) == (set(("filter", "includeAllScopes", "maxResults", "orderBy", "pageToken", "returnPartialSuccess", )) & set(("project", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_aggregated_list_rest_interceptors(null_interceptor): + transport = transports.NetworkAttachmentsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.NetworkAttachmentsRestInterceptor(), + ) + client = NetworkAttachmentsClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.NetworkAttachmentsRestInterceptor, "post_aggregated_list") as post, \ + mock.patch.object(transports.NetworkAttachmentsRestInterceptor, "pre_aggregated_list") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.AggregatedListNetworkAttachmentsRequest.pb(compute.AggregatedListNetworkAttachmentsRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.NetworkAttachmentAggregatedList.to_json(compute.NetworkAttachmentAggregatedList()) + + request = compute.AggregatedListNetworkAttachmentsRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.NetworkAttachmentAggregatedList() + + client.aggregated_list(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_aggregated_list_rest_bad_request(transport: str = 'rest', request_type=compute.AggregatedListNetworkAttachmentsRequest): + client = NetworkAttachmentsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.aggregated_list(request) + + +def test_aggregated_list_rest_flattened(): + client = NetworkAttachmentsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.NetworkAttachmentAggregatedList() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.NetworkAttachmentAggregatedList.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.aggregated_list(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/aggregated/networkAttachments" % client.transport._host, args[1]) + + +def test_aggregated_list_rest_flattened_error(transport: str = 'rest'): + client = NetworkAttachmentsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.aggregated_list( + compute.AggregatedListNetworkAttachmentsRequest(), + project='project_value', + ) + + +def test_aggregated_list_rest_pager(transport: str = 'rest'): + client = NetworkAttachmentsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + #with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + compute.NetworkAttachmentAggregatedList( + items={ + 'a':compute.NetworkAttachmentsScopedList(), + 'b':compute.NetworkAttachmentsScopedList(), + 'c':compute.NetworkAttachmentsScopedList(), + }, + next_page_token='abc', + ), + compute.NetworkAttachmentAggregatedList( + items={}, + next_page_token='def', + ), + compute.NetworkAttachmentAggregatedList( + items={ + 'g':compute.NetworkAttachmentsScopedList(), + }, + next_page_token='ghi', + ), + compute.NetworkAttachmentAggregatedList( + items={ + 'h':compute.NetworkAttachmentsScopedList(), + 'i':compute.NetworkAttachmentsScopedList(), + }, + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple(compute.NetworkAttachmentAggregatedList.to_json(x) for x in response) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode('UTF-8') + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {'project': 'sample1'} + + pager = client.aggregated_list(request=sample_request) + + assert isinstance(pager.get('a'), compute.NetworkAttachmentsScopedList) + assert pager.get('h') is None + + results = list(pager) + assert len(results) == 6 + assert all( + isinstance(i, tuple) + for i in results) + for result in results: + assert isinstance(result, tuple) + assert tuple(type(t) for t in result) == (str, compute.NetworkAttachmentsScopedList) + + assert pager.get('a') is None + assert isinstance(pager.get('h'), compute.NetworkAttachmentsScopedList) + + pages = list(client.aggregated_list(request=sample_request).pages) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize("request_type", [ + compute.DeleteNetworkAttachmentRequest, + dict, +]) +def test_delete_rest(request_type): + client = NetworkAttachmentsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2', 'network_attachment': 'sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.delete(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, extended_operation.ExtendedOperation) + assert response.client_operation_id == 'client_operation_id_value' + assert response.creation_timestamp == 'creation_timestamp_value' + assert response.description == 'description_value' + assert response.end_time == 'end_time_value' + assert response.http_error_message == 'http_error_message_value' + assert response.http_error_status_code == 2374 + assert response.id == 205 + assert response.insert_time == 'insert_time_value' + assert response.kind == 'kind_value' + assert response.name == 'name_value' + assert response.operation_group_id == 'operation_group_id_value' + assert response.operation_type == 'operation_type_value' + assert response.progress == 885 + assert response.region == 'region_value' + assert response.self_link == 'self_link_value' + assert response.start_time == 'start_time_value' + assert response.status == compute.Operation.Status.DONE + assert response.status_message == 'status_message_value' + assert response.target_id == 947 + assert response.target_link == 'target_link_value' + assert response.user == 'user_value' + assert response.zone == 'zone_value' + + +def test_delete_rest_required_fields(request_type=compute.DeleteNetworkAttachmentRequest): + transport_class = transports.NetworkAttachmentsRestTransport + + request_init = {} + request_init["network_attachment"] = "" + request_init["project"] = "" + request_init["region"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["networkAttachment"] = 'network_attachment_value' + jsonified_request["project"] = 'project_value' + jsonified_request["region"] = 'region_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "networkAttachment" in jsonified_request + assert jsonified_request["networkAttachment"] == 'network_attachment_value' + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "region" in jsonified_request + assert jsonified_request["region"] == 'region_value' + + client = NetworkAttachmentsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "delete", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.delete(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_delete_rest_unset_required_fields(): + transport = transports.NetworkAttachmentsRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.delete._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("networkAttachment", "project", "region", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_rest_interceptors(null_interceptor): + transport = transports.NetworkAttachmentsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.NetworkAttachmentsRestInterceptor(), + ) + client = NetworkAttachmentsClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.NetworkAttachmentsRestInterceptor, "post_delete") as post, \ + mock.patch.object(transports.NetworkAttachmentsRestInterceptor, "pre_delete") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.DeleteNetworkAttachmentRequest.pb(compute.DeleteNetworkAttachmentRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.DeleteNetworkAttachmentRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.delete(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_delete_rest_bad_request(transport: str = 'rest', request_type=compute.DeleteNetworkAttachmentRequest): + client = NetworkAttachmentsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2', 'network_attachment': 'sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.delete(request) + + +def test_delete_rest_flattened(): + client = NetworkAttachmentsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'region': 'sample2', 'network_attachment': 'sample3'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + region='region_value', + network_attachment='network_attachment_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.delete(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/regions/{region}/networkAttachments/{network_attachment}" % client.transport._host, args[1]) + + +def test_delete_rest_flattened_error(transport: str = 'rest'): + client = NetworkAttachmentsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete( + compute.DeleteNetworkAttachmentRequest(), + project='project_value', + region='region_value', + network_attachment='network_attachment_value', + ) + + +def test_delete_rest_error(): + client = NetworkAttachmentsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.DeleteNetworkAttachmentRequest, + dict, +]) +def test_delete_unary_rest(request_type): + client = NetworkAttachmentsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2', 'network_attachment': 'sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.delete_unary(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.Operation) + + +def test_delete_unary_rest_required_fields(request_type=compute.DeleteNetworkAttachmentRequest): + transport_class = transports.NetworkAttachmentsRestTransport + + request_init = {} + request_init["network_attachment"] = "" + request_init["project"] = "" + request_init["region"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["networkAttachment"] = 'network_attachment_value' + jsonified_request["project"] = 'project_value' + jsonified_request["region"] = 'region_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "networkAttachment" in jsonified_request + assert jsonified_request["networkAttachment"] == 'network_attachment_value' + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "region" in jsonified_request + assert jsonified_request["region"] == 'region_value' + + client = NetworkAttachmentsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "delete", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.delete_unary(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_delete_unary_rest_unset_required_fields(): + transport = transports.NetworkAttachmentsRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.delete._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("networkAttachment", "project", "region", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_unary_rest_interceptors(null_interceptor): + transport = transports.NetworkAttachmentsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.NetworkAttachmentsRestInterceptor(), + ) + client = NetworkAttachmentsClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.NetworkAttachmentsRestInterceptor, "post_delete") as post, \ + mock.patch.object(transports.NetworkAttachmentsRestInterceptor, "pre_delete") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.DeleteNetworkAttachmentRequest.pb(compute.DeleteNetworkAttachmentRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.DeleteNetworkAttachmentRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.delete_unary(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_delete_unary_rest_bad_request(transport: str = 'rest', request_type=compute.DeleteNetworkAttachmentRequest): + client = NetworkAttachmentsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2', 'network_attachment': 'sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.delete_unary(request) + + +def test_delete_unary_rest_flattened(): + client = NetworkAttachmentsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'region': 'sample2', 'network_attachment': 'sample3'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + region='region_value', + network_attachment='network_attachment_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.delete_unary(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/regions/{region}/networkAttachments/{network_attachment}" % client.transport._host, args[1]) + + +def test_delete_unary_rest_flattened_error(transport: str = 'rest'): + client = NetworkAttachmentsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_unary( + compute.DeleteNetworkAttachmentRequest(), + project='project_value', + region='region_value', + network_attachment='network_attachment_value', + ) + + +def test_delete_unary_rest_error(): + client = NetworkAttachmentsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.GetNetworkAttachmentRequest, + dict, +]) +def test_get_rest(request_type): + client = NetworkAttachmentsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2', 'network_attachment': 'sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.NetworkAttachment( + connection_preference='connection_preference_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + fingerprint='fingerprint_value', + id=205, + kind='kind_value', + name='name_value', + network='network_value', + producer_accept_lists=['producer_accept_lists_value'], + producer_reject_lists=['producer_reject_lists_value'], + region='region_value', + self_link='self_link_value', + self_link_with_id='self_link_with_id_value', + subnetworks=['subnetworks_value'], + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.NetworkAttachment.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.get(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.NetworkAttachment) + assert response.connection_preference == 'connection_preference_value' + assert response.creation_timestamp == 'creation_timestamp_value' + assert response.description == 'description_value' + assert response.fingerprint == 'fingerprint_value' + assert response.id == 205 + assert response.kind == 'kind_value' + assert response.name == 'name_value' + assert response.network == 'network_value' + assert response.producer_accept_lists == ['producer_accept_lists_value'] + assert response.producer_reject_lists == ['producer_reject_lists_value'] + assert response.region == 'region_value' + assert response.self_link == 'self_link_value' + assert response.self_link_with_id == 'self_link_with_id_value' + assert response.subnetworks == ['subnetworks_value'] + + +def test_get_rest_required_fields(request_type=compute.GetNetworkAttachmentRequest): + transport_class = transports.NetworkAttachmentsRestTransport + + request_init = {} + request_init["network_attachment"] = "" + request_init["project"] = "" + request_init["region"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["networkAttachment"] = 'network_attachment_value' + jsonified_request["project"] = 'project_value' + jsonified_request["region"] = 'region_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "networkAttachment" in jsonified_request + assert jsonified_request["networkAttachment"] == 'network_attachment_value' + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "region" in jsonified_request + assert jsonified_request["region"] == 'region_value' + + client = NetworkAttachmentsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.NetworkAttachment() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "get", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.NetworkAttachment.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.get(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_get_rest_unset_required_fields(): + transport = transports.NetworkAttachmentsRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.get._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("networkAttachment", "project", "region", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_rest_interceptors(null_interceptor): + transport = transports.NetworkAttachmentsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.NetworkAttachmentsRestInterceptor(), + ) + client = NetworkAttachmentsClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.NetworkAttachmentsRestInterceptor, "post_get") as post, \ + mock.patch.object(transports.NetworkAttachmentsRestInterceptor, "pre_get") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.GetNetworkAttachmentRequest.pb(compute.GetNetworkAttachmentRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.NetworkAttachment.to_json(compute.NetworkAttachment()) + + request = compute.GetNetworkAttachmentRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.NetworkAttachment() + + client.get(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_get_rest_bad_request(transport: str = 'rest', request_type=compute.GetNetworkAttachmentRequest): + client = NetworkAttachmentsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2', 'network_attachment': 'sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get(request) + + +def test_get_rest_flattened(): + client = NetworkAttachmentsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.NetworkAttachment() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'region': 'sample2', 'network_attachment': 'sample3'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + region='region_value', + network_attachment='network_attachment_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.NetworkAttachment.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.get(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/regions/{region}/networkAttachments/{network_attachment}" % client.transport._host, args[1]) + + +def test_get_rest_flattened_error(transport: str = 'rest'): + client = NetworkAttachmentsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get( + compute.GetNetworkAttachmentRequest(), + project='project_value', + region='region_value', + network_attachment='network_attachment_value', + ) + + +def test_get_rest_error(): + client = NetworkAttachmentsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.GetIamPolicyNetworkAttachmentRequest, + dict, +]) +def test_get_iam_policy_rest(request_type): + client = NetworkAttachmentsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2', 'resource': 'sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Policy( + etag='etag_value', + iam_owned=True, + version=774, + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Policy.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.get_iam_policy(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.Policy) + assert response.etag == 'etag_value' + assert response.iam_owned is True + assert response.version == 774 + + +def test_get_iam_policy_rest_required_fields(request_type=compute.GetIamPolicyNetworkAttachmentRequest): + transport_class = transports.NetworkAttachmentsRestTransport + + request_init = {} + request_init["project"] = "" + request_init["region"] = "" + request_init["resource"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_iam_policy._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + jsonified_request["region"] = 'region_value' + jsonified_request["resource"] = 'resource_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_iam_policy._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("options_requested_policy_version", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "region" in jsonified_request + assert jsonified_request["region"] == 'region_value' + assert "resource" in jsonified_request + assert jsonified_request["resource"] == 'resource_value' + + client = NetworkAttachmentsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Policy() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "get", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Policy.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.get_iam_policy(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_get_iam_policy_rest_unset_required_fields(): + transport = transports.NetworkAttachmentsRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.get_iam_policy._get_unset_required_fields({}) + assert set(unset_fields) == (set(("optionsRequestedPolicyVersion", )) & set(("project", "region", "resource", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_iam_policy_rest_interceptors(null_interceptor): + transport = transports.NetworkAttachmentsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.NetworkAttachmentsRestInterceptor(), + ) + client = NetworkAttachmentsClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.NetworkAttachmentsRestInterceptor, "post_get_iam_policy") as post, \ + mock.patch.object(transports.NetworkAttachmentsRestInterceptor, "pre_get_iam_policy") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.GetIamPolicyNetworkAttachmentRequest.pb(compute.GetIamPolicyNetworkAttachmentRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Policy.to_json(compute.Policy()) + + request = compute.GetIamPolicyNetworkAttachmentRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Policy() + + client.get_iam_policy(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_get_iam_policy_rest_bad_request(transport: str = 'rest', request_type=compute.GetIamPolicyNetworkAttachmentRequest): + client = NetworkAttachmentsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2', 'resource': 'sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_iam_policy(request) + + +def test_get_iam_policy_rest_flattened(): + client = NetworkAttachmentsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Policy() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'region': 'sample2', 'resource': 'sample3'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + region='region_value', + resource='resource_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Policy.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.get_iam_policy(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/regions/{region}/networkAttachments/{resource}/getIamPolicy" % client.transport._host, args[1]) + + +def test_get_iam_policy_rest_flattened_error(transport: str = 'rest'): + client = NetworkAttachmentsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_iam_policy( + compute.GetIamPolicyNetworkAttachmentRequest(), + project='project_value', + region='region_value', + resource='resource_value', + ) + + +def test_get_iam_policy_rest_error(): + client = NetworkAttachmentsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.InsertNetworkAttachmentRequest, + dict, +]) +def test_insert_rest(request_type): + client = NetworkAttachmentsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2'} + request_init["network_attachment_resource"] = {'connection_endpoints': [{'ip_address': 'ip_address_value', 'project_id_or_num': 'project_id_or_num_value', 'secondary_ip_cidr_ranges': ['secondary_ip_cidr_ranges_value1', 'secondary_ip_cidr_ranges_value2'], 'status': 'status_value', 'subnetwork': 'subnetwork_value'}], 'connection_preference': 'connection_preference_value', 'creation_timestamp': 'creation_timestamp_value', 'description': 'description_value', 'fingerprint': 'fingerprint_value', 'id': 205, 'kind': 'kind_value', 'name': 'name_value', 'network': 'network_value', 'producer_accept_lists': ['producer_accept_lists_value1', 'producer_accept_lists_value2'], 'producer_reject_lists': ['producer_reject_lists_value1', 'producer_reject_lists_value2'], 'region': 'region_value', 'self_link': 'self_link_value', 'self_link_with_id': 'self_link_with_id_value', 'subnetworks': ['subnetworks_value1', 'subnetworks_value2']} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.insert(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, extended_operation.ExtendedOperation) + assert response.client_operation_id == 'client_operation_id_value' + assert response.creation_timestamp == 'creation_timestamp_value' + assert response.description == 'description_value' + assert response.end_time == 'end_time_value' + assert response.http_error_message == 'http_error_message_value' + assert response.http_error_status_code == 2374 + assert response.id == 205 + assert response.insert_time == 'insert_time_value' + assert response.kind == 'kind_value' + assert response.name == 'name_value' + assert response.operation_group_id == 'operation_group_id_value' + assert response.operation_type == 'operation_type_value' + assert response.progress == 885 + assert response.region == 'region_value' + assert response.self_link == 'self_link_value' + assert response.start_time == 'start_time_value' + assert response.status == compute.Operation.Status.DONE + assert response.status_message == 'status_message_value' + assert response.target_id == 947 + assert response.target_link == 'target_link_value' + assert response.user == 'user_value' + assert response.zone == 'zone_value' + + +def test_insert_rest_required_fields(request_type=compute.InsertNetworkAttachmentRequest): + transport_class = transports.NetworkAttachmentsRestTransport + + request_init = {} + request_init["project"] = "" + request_init["region"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).insert._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + jsonified_request["region"] = 'region_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).insert._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "region" in jsonified_request + assert jsonified_request["region"] == 'region_value' + + client = NetworkAttachmentsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.insert(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_insert_rest_unset_required_fields(): + transport = transports.NetworkAttachmentsRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.insert._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("networkAttachmentResource", "project", "region", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_insert_rest_interceptors(null_interceptor): + transport = transports.NetworkAttachmentsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.NetworkAttachmentsRestInterceptor(), + ) + client = NetworkAttachmentsClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.NetworkAttachmentsRestInterceptor, "post_insert") as post, \ + mock.patch.object(transports.NetworkAttachmentsRestInterceptor, "pre_insert") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.InsertNetworkAttachmentRequest.pb(compute.InsertNetworkAttachmentRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.InsertNetworkAttachmentRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.insert(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_insert_rest_bad_request(transport: str = 'rest', request_type=compute.InsertNetworkAttachmentRequest): + client = NetworkAttachmentsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2'} + request_init["network_attachment_resource"] = {'connection_endpoints': [{'ip_address': 'ip_address_value', 'project_id_or_num': 'project_id_or_num_value', 'secondary_ip_cidr_ranges': ['secondary_ip_cidr_ranges_value1', 'secondary_ip_cidr_ranges_value2'], 'status': 'status_value', 'subnetwork': 'subnetwork_value'}], 'connection_preference': 'connection_preference_value', 'creation_timestamp': 'creation_timestamp_value', 'description': 'description_value', 'fingerprint': 'fingerprint_value', 'id': 205, 'kind': 'kind_value', 'name': 'name_value', 'network': 'network_value', 'producer_accept_lists': ['producer_accept_lists_value1', 'producer_accept_lists_value2'], 'producer_reject_lists': ['producer_reject_lists_value1', 'producer_reject_lists_value2'], 'region': 'region_value', 'self_link': 'self_link_value', 'self_link_with_id': 'self_link_with_id_value', 'subnetworks': ['subnetworks_value1', 'subnetworks_value2']} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.insert(request) + + +def test_insert_rest_flattened(): + client = NetworkAttachmentsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'region': 'sample2'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + region='region_value', + network_attachment_resource=compute.NetworkAttachment(connection_endpoints=[compute.NetworkAttachmentConnectedEndpoint(ip_address='ip_address_value')]), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.insert(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/regions/{region}/networkAttachments" % client.transport._host, args[1]) + + +def test_insert_rest_flattened_error(transport: str = 'rest'): + client = NetworkAttachmentsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.insert( + compute.InsertNetworkAttachmentRequest(), + project='project_value', + region='region_value', + network_attachment_resource=compute.NetworkAttachment(connection_endpoints=[compute.NetworkAttachmentConnectedEndpoint(ip_address='ip_address_value')]), + ) + + +def test_insert_rest_error(): + client = NetworkAttachmentsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.InsertNetworkAttachmentRequest, + dict, +]) +def test_insert_unary_rest(request_type): + client = NetworkAttachmentsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2'} + request_init["network_attachment_resource"] = {'connection_endpoints': [{'ip_address': 'ip_address_value', 'project_id_or_num': 'project_id_or_num_value', 'secondary_ip_cidr_ranges': ['secondary_ip_cidr_ranges_value1', 'secondary_ip_cidr_ranges_value2'], 'status': 'status_value', 'subnetwork': 'subnetwork_value'}], 'connection_preference': 'connection_preference_value', 'creation_timestamp': 'creation_timestamp_value', 'description': 'description_value', 'fingerprint': 'fingerprint_value', 'id': 205, 'kind': 'kind_value', 'name': 'name_value', 'network': 'network_value', 'producer_accept_lists': ['producer_accept_lists_value1', 'producer_accept_lists_value2'], 'producer_reject_lists': ['producer_reject_lists_value1', 'producer_reject_lists_value2'], 'region': 'region_value', 'self_link': 'self_link_value', 'self_link_with_id': 'self_link_with_id_value', 'subnetworks': ['subnetworks_value1', 'subnetworks_value2']} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.insert_unary(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.Operation) + + +def test_insert_unary_rest_required_fields(request_type=compute.InsertNetworkAttachmentRequest): + transport_class = transports.NetworkAttachmentsRestTransport + + request_init = {} + request_init["project"] = "" + request_init["region"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).insert._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + jsonified_request["region"] = 'region_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).insert._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "region" in jsonified_request + assert jsonified_request["region"] == 'region_value' + + client = NetworkAttachmentsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.insert_unary(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_insert_unary_rest_unset_required_fields(): + transport = transports.NetworkAttachmentsRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.insert._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("networkAttachmentResource", "project", "region", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_insert_unary_rest_interceptors(null_interceptor): + transport = transports.NetworkAttachmentsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.NetworkAttachmentsRestInterceptor(), + ) + client = NetworkAttachmentsClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.NetworkAttachmentsRestInterceptor, "post_insert") as post, \ + mock.patch.object(transports.NetworkAttachmentsRestInterceptor, "pre_insert") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.InsertNetworkAttachmentRequest.pb(compute.InsertNetworkAttachmentRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.InsertNetworkAttachmentRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.insert_unary(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_insert_unary_rest_bad_request(transport: str = 'rest', request_type=compute.InsertNetworkAttachmentRequest): + client = NetworkAttachmentsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2'} + request_init["network_attachment_resource"] = {'connection_endpoints': [{'ip_address': 'ip_address_value', 'project_id_or_num': 'project_id_or_num_value', 'secondary_ip_cidr_ranges': ['secondary_ip_cidr_ranges_value1', 'secondary_ip_cidr_ranges_value2'], 'status': 'status_value', 'subnetwork': 'subnetwork_value'}], 'connection_preference': 'connection_preference_value', 'creation_timestamp': 'creation_timestamp_value', 'description': 'description_value', 'fingerprint': 'fingerprint_value', 'id': 205, 'kind': 'kind_value', 'name': 'name_value', 'network': 'network_value', 'producer_accept_lists': ['producer_accept_lists_value1', 'producer_accept_lists_value2'], 'producer_reject_lists': ['producer_reject_lists_value1', 'producer_reject_lists_value2'], 'region': 'region_value', 'self_link': 'self_link_value', 'self_link_with_id': 'self_link_with_id_value', 'subnetworks': ['subnetworks_value1', 'subnetworks_value2']} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.insert_unary(request) + + +def test_insert_unary_rest_flattened(): + client = NetworkAttachmentsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'region': 'sample2'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + region='region_value', + network_attachment_resource=compute.NetworkAttachment(connection_endpoints=[compute.NetworkAttachmentConnectedEndpoint(ip_address='ip_address_value')]), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.insert_unary(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/regions/{region}/networkAttachments" % client.transport._host, args[1]) + + +def test_insert_unary_rest_flattened_error(transport: str = 'rest'): + client = NetworkAttachmentsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.insert_unary( + compute.InsertNetworkAttachmentRequest(), + project='project_value', + region='region_value', + network_attachment_resource=compute.NetworkAttachment(connection_endpoints=[compute.NetworkAttachmentConnectedEndpoint(ip_address='ip_address_value')]), + ) + + +def test_insert_unary_rest_error(): + client = NetworkAttachmentsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.ListNetworkAttachmentsRequest, + dict, +]) +def test_list_rest(request_type): + client = NetworkAttachmentsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.NetworkAttachmentList( + id='id_value', + kind='kind_value', + next_page_token='next_page_token_value', + self_link='self_link_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.NetworkAttachmentList.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.list(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListPager) + assert response.id == 'id_value' + assert response.kind == 'kind_value' + assert response.next_page_token == 'next_page_token_value' + assert response.self_link == 'self_link_value' + + +def test_list_rest_required_fields(request_type=compute.ListNetworkAttachmentsRequest): + transport_class = transports.NetworkAttachmentsRestTransport + + request_init = {} + request_init["project"] = "" + request_init["region"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + jsonified_request["region"] = 'region_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("filter", "max_results", "order_by", "page_token", "return_partial_success", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "region" in jsonified_request + assert jsonified_request["region"] == 'region_value' + + client = NetworkAttachmentsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.NetworkAttachmentList() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "get", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.NetworkAttachmentList.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.list(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_list_rest_unset_required_fields(): + transport = transports.NetworkAttachmentsRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.list._get_unset_required_fields({}) + assert set(unset_fields) == (set(("filter", "maxResults", "orderBy", "pageToken", "returnPartialSuccess", )) & set(("project", "region", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_rest_interceptors(null_interceptor): + transport = transports.NetworkAttachmentsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.NetworkAttachmentsRestInterceptor(), + ) + client = NetworkAttachmentsClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.NetworkAttachmentsRestInterceptor, "post_list") as post, \ + mock.patch.object(transports.NetworkAttachmentsRestInterceptor, "pre_list") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.ListNetworkAttachmentsRequest.pb(compute.ListNetworkAttachmentsRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.NetworkAttachmentList.to_json(compute.NetworkAttachmentList()) + + request = compute.ListNetworkAttachmentsRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.NetworkAttachmentList() + + client.list(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_list_rest_bad_request(transport: str = 'rest', request_type=compute.ListNetworkAttachmentsRequest): + client = NetworkAttachmentsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list(request) + + +def test_list_rest_flattened(): + client = NetworkAttachmentsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.NetworkAttachmentList() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'region': 'sample2'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + region='region_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.NetworkAttachmentList.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.list(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/regions/{region}/networkAttachments" % client.transport._host, args[1]) + + +def test_list_rest_flattened_error(transport: str = 'rest'): + client = NetworkAttachmentsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list( + compute.ListNetworkAttachmentsRequest(), + project='project_value', + region='region_value', + ) + + +def test_list_rest_pager(transport: str = 'rest'): + client = NetworkAttachmentsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + #with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + compute.NetworkAttachmentList( + items=[ + compute.NetworkAttachment(), + compute.NetworkAttachment(), + compute.NetworkAttachment(), + ], + next_page_token='abc', + ), + compute.NetworkAttachmentList( + items=[], + next_page_token='def', + ), + compute.NetworkAttachmentList( + items=[ + compute.NetworkAttachment(), + ], + next_page_token='ghi', + ), + compute.NetworkAttachmentList( + items=[ + compute.NetworkAttachment(), + compute.NetworkAttachment(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple(compute.NetworkAttachmentList.to_json(x) for x in response) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode('UTF-8') + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {'project': 'sample1', 'region': 'sample2'} + + pager = client.list(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, compute.NetworkAttachment) + for i in results) + + pages = list(client.list(request=sample_request).pages) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize("request_type", [ + compute.SetIamPolicyNetworkAttachmentRequest, + dict, +]) +def test_set_iam_policy_rest(request_type): + client = NetworkAttachmentsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2', 'resource': 'sample3'} + request_init["region_set_policy_request_resource"] = {'bindings': [{'binding_id': 'binding_id_value', 'condition': {'description': 'description_value', 'expression': 'expression_value', 'location': 'location_value', 'title': 'title_value'}, 'members': ['members_value1', 'members_value2'], 'role': 'role_value'}], 'etag': 'etag_value', 'policy': {'audit_configs': [{'audit_log_configs': [{'exempted_members': ['exempted_members_value1', 'exempted_members_value2'], 'ignore_child_exemptions': True, 'log_type': 'log_type_value'}], 'exempted_members': ['exempted_members_value1', 'exempted_members_value2'], 'service': 'service_value'}], 'bindings': {}, 'etag': 'etag_value', 'iam_owned': True, 'rules': [{'action': 'action_value', 'conditions': [{'iam': 'iam_value', 'op': 'op_value', 'svc': 'svc_value', 'sys': 'sys_value', 'values': ['values_value1', 'values_value2']}], 'description': 'description_value', 'ins': ['ins_value1', 'ins_value2'], 'log_configs': [{'cloud_audit': {'authorization_logging_options': {'permission_type': 'permission_type_value'}, 'log_name': 'log_name_value'}, 'counter': {'custom_fields': [{'name': 'name_value', 'value': 'value_value'}], 'field': 'field_value', 'metric': 'metric_value'}, 'data_access': {'log_mode': 'log_mode_value'}}], 'not_ins': ['not_ins_value1', 'not_ins_value2'], 'permissions': ['permissions_value1', 'permissions_value2']}], 'version': 774}} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Policy( + etag='etag_value', + iam_owned=True, + version=774, + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Policy.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.set_iam_policy(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.Policy) + assert response.etag == 'etag_value' + assert response.iam_owned is True + assert response.version == 774 + + +def test_set_iam_policy_rest_required_fields(request_type=compute.SetIamPolicyNetworkAttachmentRequest): + transport_class = transports.NetworkAttachmentsRestTransport + + request_init = {} + request_init["project"] = "" + request_init["region"] = "" + request_init["resource"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).set_iam_policy._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + jsonified_request["region"] = 'region_value' + jsonified_request["resource"] = 'resource_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).set_iam_policy._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "region" in jsonified_request + assert jsonified_request["region"] == 'region_value' + assert "resource" in jsonified_request + assert jsonified_request["resource"] == 'resource_value' + + client = NetworkAttachmentsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Policy() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Policy.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.set_iam_policy(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_set_iam_policy_rest_unset_required_fields(): + transport = transports.NetworkAttachmentsRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.set_iam_policy._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("project", "region", "regionSetPolicyRequestResource", "resource", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_set_iam_policy_rest_interceptors(null_interceptor): + transport = transports.NetworkAttachmentsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.NetworkAttachmentsRestInterceptor(), + ) + client = NetworkAttachmentsClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.NetworkAttachmentsRestInterceptor, "post_set_iam_policy") as post, \ + mock.patch.object(transports.NetworkAttachmentsRestInterceptor, "pre_set_iam_policy") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.SetIamPolicyNetworkAttachmentRequest.pb(compute.SetIamPolicyNetworkAttachmentRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Policy.to_json(compute.Policy()) + + request = compute.SetIamPolicyNetworkAttachmentRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Policy() + + client.set_iam_policy(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_set_iam_policy_rest_bad_request(transport: str = 'rest', request_type=compute.SetIamPolicyNetworkAttachmentRequest): + client = NetworkAttachmentsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2', 'resource': 'sample3'} + request_init["region_set_policy_request_resource"] = {'bindings': [{'binding_id': 'binding_id_value', 'condition': {'description': 'description_value', 'expression': 'expression_value', 'location': 'location_value', 'title': 'title_value'}, 'members': ['members_value1', 'members_value2'], 'role': 'role_value'}], 'etag': 'etag_value', 'policy': {'audit_configs': [{'audit_log_configs': [{'exempted_members': ['exempted_members_value1', 'exempted_members_value2'], 'ignore_child_exemptions': True, 'log_type': 'log_type_value'}], 'exempted_members': ['exempted_members_value1', 'exempted_members_value2'], 'service': 'service_value'}], 'bindings': {}, 'etag': 'etag_value', 'iam_owned': True, 'rules': [{'action': 'action_value', 'conditions': [{'iam': 'iam_value', 'op': 'op_value', 'svc': 'svc_value', 'sys': 'sys_value', 'values': ['values_value1', 'values_value2']}], 'description': 'description_value', 'ins': ['ins_value1', 'ins_value2'], 'log_configs': [{'cloud_audit': {'authorization_logging_options': {'permission_type': 'permission_type_value'}, 'log_name': 'log_name_value'}, 'counter': {'custom_fields': [{'name': 'name_value', 'value': 'value_value'}], 'field': 'field_value', 'metric': 'metric_value'}, 'data_access': {'log_mode': 'log_mode_value'}}], 'not_ins': ['not_ins_value1', 'not_ins_value2'], 'permissions': ['permissions_value1', 'permissions_value2']}], 'version': 774}} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.set_iam_policy(request) + + +def test_set_iam_policy_rest_flattened(): + client = NetworkAttachmentsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Policy() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'region': 'sample2', 'resource': 'sample3'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + region='region_value', + resource='resource_value', + region_set_policy_request_resource=compute.RegionSetPolicyRequest(bindings=[compute.Binding(binding_id='binding_id_value')]), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Policy.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.set_iam_policy(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/regions/{region}/networkAttachments/{resource}/setIamPolicy" % client.transport._host, args[1]) + + +def test_set_iam_policy_rest_flattened_error(transport: str = 'rest'): + client = NetworkAttachmentsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.set_iam_policy( + compute.SetIamPolicyNetworkAttachmentRequest(), + project='project_value', + region='region_value', + resource='resource_value', + region_set_policy_request_resource=compute.RegionSetPolicyRequest(bindings=[compute.Binding(binding_id='binding_id_value')]), + ) + + +def test_set_iam_policy_rest_error(): + client = NetworkAttachmentsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.TestIamPermissionsNetworkAttachmentRequest, + dict, +]) +def test_test_iam_permissions_rest(request_type): + client = NetworkAttachmentsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2', 'resource': 'sample3'} + request_init["test_permissions_request_resource"] = {'permissions': ['permissions_value1', 'permissions_value2']} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.TestPermissionsResponse( + permissions=['permissions_value'], + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.TestPermissionsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.test_iam_permissions(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.TestPermissionsResponse) + assert response.permissions == ['permissions_value'] + + +def test_test_iam_permissions_rest_required_fields(request_type=compute.TestIamPermissionsNetworkAttachmentRequest): + transport_class = transports.NetworkAttachmentsRestTransport + + request_init = {} + request_init["project"] = "" + request_init["region"] = "" + request_init["resource"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).test_iam_permissions._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + jsonified_request["region"] = 'region_value' + jsonified_request["resource"] = 'resource_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).test_iam_permissions._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "region" in jsonified_request + assert jsonified_request["region"] == 'region_value' + assert "resource" in jsonified_request + assert jsonified_request["resource"] == 'resource_value' + + client = NetworkAttachmentsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.TestPermissionsResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.TestPermissionsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.test_iam_permissions(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_test_iam_permissions_rest_unset_required_fields(): + transport = transports.NetworkAttachmentsRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.test_iam_permissions._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("project", "region", "resource", "testPermissionsRequestResource", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_test_iam_permissions_rest_interceptors(null_interceptor): + transport = transports.NetworkAttachmentsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.NetworkAttachmentsRestInterceptor(), + ) + client = NetworkAttachmentsClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.NetworkAttachmentsRestInterceptor, "post_test_iam_permissions") as post, \ + mock.patch.object(transports.NetworkAttachmentsRestInterceptor, "pre_test_iam_permissions") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.TestIamPermissionsNetworkAttachmentRequest.pb(compute.TestIamPermissionsNetworkAttachmentRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.TestPermissionsResponse.to_json(compute.TestPermissionsResponse()) + + request = compute.TestIamPermissionsNetworkAttachmentRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.TestPermissionsResponse() + + client.test_iam_permissions(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_test_iam_permissions_rest_bad_request(transport: str = 'rest', request_type=compute.TestIamPermissionsNetworkAttachmentRequest): + client = NetworkAttachmentsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2', 'resource': 'sample3'} + request_init["test_permissions_request_resource"] = {'permissions': ['permissions_value1', 'permissions_value2']} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.test_iam_permissions(request) + + +def test_test_iam_permissions_rest_flattened(): + client = NetworkAttachmentsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.TestPermissionsResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'region': 'sample2', 'resource': 'sample3'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + region='region_value', + resource='resource_value', + test_permissions_request_resource=compute.TestPermissionsRequest(permissions=['permissions_value']), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.TestPermissionsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.test_iam_permissions(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/regions/{region}/networkAttachments/{resource}/testIamPermissions" % client.transport._host, args[1]) + + +def test_test_iam_permissions_rest_flattened_error(transport: str = 'rest'): + client = NetworkAttachmentsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.test_iam_permissions( + compute.TestIamPermissionsNetworkAttachmentRequest(), + project='project_value', + region='region_value', + resource='resource_value', + test_permissions_request_resource=compute.TestPermissionsRequest(permissions=['permissions_value']), + ) + + +def test_test_iam_permissions_rest_error(): + client = NetworkAttachmentsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +def test_credentials_transport_error(): + # It is an error to provide credentials and a transport instance. + transport = transports.NetworkAttachmentsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = NetworkAttachmentsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # It is an error to provide a credentials file and a transport instance. + transport = transports.NetworkAttachmentsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = NetworkAttachmentsClient( + client_options={"credentials_file": "credentials.json"}, + transport=transport, + ) + + # It is an error to provide an api_key and a transport instance. + transport = transports.NetworkAttachmentsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = NetworkAttachmentsClient( + client_options=options, + transport=transport, + ) + + # It is an error to provide an api_key and a credential. + options = mock.Mock() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = NetworkAttachmentsClient( + client_options=options, + credentials=ga_credentials.AnonymousCredentials() + ) + + # It is an error to provide scopes and a transport instance. + transport = transports.NetworkAttachmentsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = NetworkAttachmentsClient( + client_options={"scopes": ["1", "2"]}, + transport=transport, + ) + + +def test_transport_instance(): + # A client may be instantiated with a custom transport instance. + transport = transports.NetworkAttachmentsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + client = NetworkAttachmentsClient(transport=transport) + assert client.transport is transport + + +@pytest.mark.parametrize("transport_class", [ + transports.NetworkAttachmentsRestTransport, +]) +def test_transport_adc(transport_class): + # Test default credentials are used if not provided. + with mock.patch.object(google.auth, 'default') as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class() + adc.assert_called_once() + +@pytest.mark.parametrize("transport_name", [ + "rest", +]) +def test_transport_kind(transport_name): + transport = NetworkAttachmentsClient.get_transport_class(transport_name)( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert transport.kind == transport_name + + +def test_network_attachments_base_transport_error(): + # Passing both a credentials object and credentials_file should raise an error + with pytest.raises(core_exceptions.DuplicateCredentialArgs): + transport = transports.NetworkAttachmentsTransport( + credentials=ga_credentials.AnonymousCredentials(), + credentials_file="credentials.json" + ) + + +def test_network_attachments_base_transport(): + # Instantiate the base transport. + with mock.patch('google.cloud.compute_v1.services.network_attachments.transports.NetworkAttachmentsTransport.__init__') as Transport: + Transport.return_value = None + transport = transports.NetworkAttachmentsTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Every method on the transport should just blindly + # raise NotImplementedError. + methods = ( + 'aggregated_list', + 'delete', + 'get', + 'get_iam_policy', + 'insert', + 'list', + 'set_iam_policy', + 'test_iam_permissions', + ) + for method in methods: + with pytest.raises(NotImplementedError): + getattr(transport, method)(request=object()) + + with pytest.raises(NotImplementedError): + transport.close() + + # Catch all for all remaining methods and properties + remainder = [ + 'kind', + ] + for r in remainder: + with pytest.raises(NotImplementedError): + getattr(transport, r)() + + +def test_network_attachments_base_transport_with_credentials_file(): + # Instantiate the base transport with a credentials file + with mock.patch.object(google.auth, 'load_credentials_from_file', autospec=True) as load_creds, mock.patch('google.cloud.compute_v1.services.network_attachments.transports.NetworkAttachmentsTransport._prep_wrapped_messages') as Transport: + Transport.return_value = None + load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.NetworkAttachmentsTransport( + credentials_file="credentials.json", + quota_project_id="octopus", + ) + load_creds.assert_called_once_with("credentials.json", + scopes=None, + default_scopes=( + 'https://www.googleapis.com/auth/compute', + 'https://www.googleapis.com/auth/cloud-platform', +), + quota_project_id="octopus", + ) + + +def test_network_attachments_base_transport_with_adc(): + # Test the default credentials are used if credentials and credentials_file are None. + with mock.patch.object(google.auth, 'default', autospec=True) as adc, mock.patch('google.cloud.compute_v1.services.network_attachments.transports.NetworkAttachmentsTransport._prep_wrapped_messages') as Transport: + Transport.return_value = None + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.NetworkAttachmentsTransport() + adc.assert_called_once() + + +def test_network_attachments_auth_adc(): + # If no credentials are provided, we should use ADC credentials. + with mock.patch.object(google.auth, 'default', autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + NetworkAttachmentsClient() + adc.assert_called_once_with( + scopes=None, + default_scopes=( + 'https://www.googleapis.com/auth/compute', + 'https://www.googleapis.com/auth/cloud-platform', +), + quota_project_id=None, + ) + + +def test_network_attachments_http_transport_client_cert_source_for_mtls(): + cred = ga_credentials.AnonymousCredentials() + with mock.patch("google.auth.transport.requests.AuthorizedSession.configure_mtls_channel") as mock_configure_mtls_channel: + transports.NetworkAttachmentsRestTransport ( + credentials=cred, + client_cert_source_for_mtls=client_cert_source_callback + ) + mock_configure_mtls_channel.assert_called_once_with(client_cert_source_callback) + + +@pytest.mark.parametrize("transport_name", [ + "rest", +]) +def test_network_attachments_host_no_port(transport_name): + client = NetworkAttachmentsClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions(api_endpoint='compute.googleapis.com'), + transport=transport_name, + ) + assert client.transport._host == ( + 'compute.googleapis.com:443' + if transport_name in ['grpc', 'grpc_asyncio'] + else 'https://compute.googleapis.com' + ) + +@pytest.mark.parametrize("transport_name", [ + "rest", +]) +def test_network_attachments_host_with_port(transport_name): + client = NetworkAttachmentsClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions(api_endpoint='compute.googleapis.com:8000'), + transport=transport_name, + ) + assert client.transport._host == ( + 'compute.googleapis.com:8000' + if transport_name in ['grpc', 'grpc_asyncio'] + else 'https://compute.googleapis.com:8000' + ) + +@pytest.mark.parametrize("transport_name", [ + "rest", +]) +def test_network_attachments_client_transport_session_collision(transport_name): + creds1 = ga_credentials.AnonymousCredentials() + creds2 = ga_credentials.AnonymousCredentials() + client1 = NetworkAttachmentsClient( + credentials=creds1, + transport=transport_name, + ) + client2 = NetworkAttachmentsClient( + credentials=creds2, + transport=transport_name, + ) + session1 = client1.transport.aggregated_list._session + session2 = client2.transport.aggregated_list._session + assert session1 != session2 + session1 = client1.transport.delete._session + session2 = client2.transport.delete._session + assert session1 != session2 + session1 = client1.transport.get._session + session2 = client2.transport.get._session + assert session1 != session2 + session1 = client1.transport.get_iam_policy._session + session2 = client2.transport.get_iam_policy._session + assert session1 != session2 + session1 = client1.transport.insert._session + session2 = client2.transport.insert._session + assert session1 != session2 + session1 = client1.transport.list._session + session2 = client2.transport.list._session + assert session1 != session2 + session1 = client1.transport.set_iam_policy._session + session2 = client2.transport.set_iam_policy._session + assert session1 != session2 + session1 = client1.transport.test_iam_permissions._session + session2 = client2.transport.test_iam_permissions._session + assert session1 != session2 + +def test_common_billing_account_path(): + billing_account = "squid" + expected = "billingAccounts/{billing_account}".format(billing_account=billing_account, ) + actual = NetworkAttachmentsClient.common_billing_account_path(billing_account) + assert expected == actual + + +def test_parse_common_billing_account_path(): + expected = { + "billing_account": "clam", + } + path = NetworkAttachmentsClient.common_billing_account_path(**expected) + + # Check that the path construction is reversible. + actual = NetworkAttachmentsClient.parse_common_billing_account_path(path) + assert expected == actual + +def test_common_folder_path(): + folder = "whelk" + expected = "folders/{folder}".format(folder=folder, ) + actual = NetworkAttachmentsClient.common_folder_path(folder) + assert expected == actual + + +def test_parse_common_folder_path(): + expected = { + "folder": "octopus", + } + path = NetworkAttachmentsClient.common_folder_path(**expected) + + # Check that the path construction is reversible. + actual = NetworkAttachmentsClient.parse_common_folder_path(path) + assert expected == actual + +def test_common_organization_path(): + organization = "oyster" + expected = "organizations/{organization}".format(organization=organization, ) + actual = NetworkAttachmentsClient.common_organization_path(organization) + assert expected == actual + + +def test_parse_common_organization_path(): + expected = { + "organization": "nudibranch", + } + path = NetworkAttachmentsClient.common_organization_path(**expected) + + # Check that the path construction is reversible. + actual = NetworkAttachmentsClient.parse_common_organization_path(path) + assert expected == actual + +def test_common_project_path(): + project = "cuttlefish" + expected = "projects/{project}".format(project=project, ) + actual = NetworkAttachmentsClient.common_project_path(project) + assert expected == actual + + +def test_parse_common_project_path(): + expected = { + "project": "mussel", + } + path = NetworkAttachmentsClient.common_project_path(**expected) + + # Check that the path construction is reversible. + actual = NetworkAttachmentsClient.parse_common_project_path(path) + assert expected == actual + +def test_common_location_path(): + project = "winkle" + location = "nautilus" + expected = "projects/{project}/locations/{location}".format(project=project, location=location, ) + actual = NetworkAttachmentsClient.common_location_path(project, location) + assert expected == actual + + +def test_parse_common_location_path(): + expected = { + "project": "scallop", + "location": "abalone", + } + path = NetworkAttachmentsClient.common_location_path(**expected) + + # Check that the path construction is reversible. + actual = NetworkAttachmentsClient.parse_common_location_path(path) + assert expected == actual + + +def test_client_with_default_client_info(): + client_info = gapic_v1.client_info.ClientInfo() + + with mock.patch.object(transports.NetworkAttachmentsTransport, '_prep_wrapped_messages') as prep: + client = NetworkAttachmentsClient( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + with mock.patch.object(transports.NetworkAttachmentsTransport, '_prep_wrapped_messages') as prep: + transport_class = NetworkAttachmentsClient.get_transport_class() + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + +def test_transport_close(): + transports = { + "rest": "_session", + } + + for transport, close_name in transports.items(): + client = NetworkAttachmentsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport + ) + with mock.patch.object(type(getattr(client.transport, close_name)), "close") as close: + with client: + close.assert_not_called() + close.assert_called_once() + +def test_client_ctx(): + transports = [ + 'rest', + ] + for transport in transports: + client = NetworkAttachmentsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport + ) + # Test client calls underlying transport. + with mock.patch.object(type(client.transport), "close") as close: + close.assert_not_called() + with client: + pass + close.assert_called() + +@pytest.mark.parametrize("client_class,transport_class", [ + (NetworkAttachmentsClient, transports.NetworkAttachmentsRestTransport), +]) +def test_api_key_credentials(client_class, transport_class): + with mock.patch.object( + google.auth._default, "get_api_key_credentials", create=True + ) as get_api_key_credentials: + mock_cred = mock.Mock() + get_api_key_credentials.return_value = mock_cred + options = client_options.ClientOptions() + options.api_key = "api_key" + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=mock_cred, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) diff --git a/owl-bot-staging/v1/tests/unit/gapic/compute_v1/test_network_edge_security_services.py b/owl-bot-staging/v1/tests/unit/gapic/compute_v1/test_network_edge_security_services.py new file mode 100644 index 000000000..42098bad5 --- /dev/null +++ b/owl-bot-staging/v1/tests/unit/gapic/compute_v1/test_network_edge_security_services.py @@ -0,0 +1,3089 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import os +# try/except added for compatibility with python < 3.8 +try: + from unittest import mock + from unittest.mock import AsyncMock # pragma: NO COVER +except ImportError: # pragma: NO COVER + import mock + +import grpc +from grpc.experimental import aio +from collections.abc import Iterable +from google.protobuf import json_format +import json +import math +import pytest +from proto.marshal.rules.dates import DurationRule, TimestampRule +from proto.marshal.rules import wrappers +from requests import Response +from requests import Request, PreparedRequest +from requests.sessions import Session +from google.protobuf import json_format + +from google.api_core import client_options +from google.api_core import exceptions as core_exceptions +from google.api_core import extended_operation # type: ignore +from google.api_core import future +from google.api_core import gapic_v1 +from google.api_core import grpc_helpers +from google.api_core import grpc_helpers_async +from google.api_core import path_template +from google.auth import credentials as ga_credentials +from google.auth.exceptions import MutualTLSChannelError +from google.cloud.compute_v1.services.network_edge_security_services import NetworkEdgeSecurityServicesClient +from google.cloud.compute_v1.services.network_edge_security_services import pagers +from google.cloud.compute_v1.services.network_edge_security_services import transports +from google.cloud.compute_v1.types import compute +from google.oauth2 import service_account +import google.auth + + +def client_cert_source_callback(): + return b"cert bytes", b"key bytes" + + +# If default endpoint is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint(client): + return "foo.googleapis.com" if ("localhost" in client.DEFAULT_ENDPOINT) else client.DEFAULT_ENDPOINT + + +def test__get_default_mtls_endpoint(): + api_endpoint = "example.googleapis.com" + api_mtls_endpoint = "example.mtls.googleapis.com" + sandbox_endpoint = "example.sandbox.googleapis.com" + sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com" + non_googleapi = "api.example.com" + + assert NetworkEdgeSecurityServicesClient._get_default_mtls_endpoint(None) is None + assert NetworkEdgeSecurityServicesClient._get_default_mtls_endpoint(api_endpoint) == api_mtls_endpoint + assert NetworkEdgeSecurityServicesClient._get_default_mtls_endpoint(api_mtls_endpoint) == api_mtls_endpoint + assert NetworkEdgeSecurityServicesClient._get_default_mtls_endpoint(sandbox_endpoint) == sandbox_mtls_endpoint + assert NetworkEdgeSecurityServicesClient._get_default_mtls_endpoint(sandbox_mtls_endpoint) == sandbox_mtls_endpoint + assert NetworkEdgeSecurityServicesClient._get_default_mtls_endpoint(non_googleapi) == non_googleapi + + +@pytest.mark.parametrize("client_class,transport_name", [ + (NetworkEdgeSecurityServicesClient, "rest"), +]) +def test_network_edge_security_services_client_from_service_account_info(client_class, transport_name): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object(service_account.Credentials, 'from_service_account_info') as factory: + factory.return_value = creds + info = {"valid": True} + client = client_class.from_service_account_info(info, transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + 'compute.googleapis.com:443' + if transport_name in ['grpc', 'grpc_asyncio'] + else + 'https://compute.googleapis.com' + ) + + +@pytest.mark.parametrize("transport_class,transport_name", [ + (transports.NetworkEdgeSecurityServicesRestTransport, "rest"), +]) +def test_network_edge_security_services_client_service_account_always_use_jwt(transport_class, transport_name): + with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=True) + use_jwt.assert_called_once_with(True) + + with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=False) + use_jwt.assert_not_called() + + +@pytest.mark.parametrize("client_class,transport_name", [ + (NetworkEdgeSecurityServicesClient, "rest"), +]) +def test_network_edge_security_services_client_from_service_account_file(client_class, transport_name): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object(service_account.Credentials, 'from_service_account_file') as factory: + factory.return_value = creds + client = client_class.from_service_account_file("dummy/file/path.json", transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + client = client_class.from_service_account_json("dummy/file/path.json", transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + 'compute.googleapis.com:443' + if transport_name in ['grpc', 'grpc_asyncio'] + else + 'https://compute.googleapis.com' + ) + + +def test_network_edge_security_services_client_get_transport_class(): + transport = NetworkEdgeSecurityServicesClient.get_transport_class() + available_transports = [ + transports.NetworkEdgeSecurityServicesRestTransport, + ] + assert transport in available_transports + + transport = NetworkEdgeSecurityServicesClient.get_transport_class("rest") + assert transport == transports.NetworkEdgeSecurityServicesRestTransport + + +@pytest.mark.parametrize("client_class,transport_class,transport_name", [ + (NetworkEdgeSecurityServicesClient, transports.NetworkEdgeSecurityServicesRestTransport, "rest"), +]) +@mock.patch.object(NetworkEdgeSecurityServicesClient, "DEFAULT_ENDPOINT", modify_default_endpoint(NetworkEdgeSecurityServicesClient)) +def test_network_edge_security_services_client_client_options(client_class, transport_class, transport_name): + # Check that if channel is provided we won't create a new one. + with mock.patch.object(NetworkEdgeSecurityServicesClient, 'get_transport_class') as gtc: + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ) + client = client_class(transport=transport) + gtc.assert_not_called() + + # Check that if channel is provided via str we will create a new one. + with mock.patch.object(NetworkEdgeSecurityServicesClient, 'get_transport_class') as gtc: + client = client_class(transport=transport_name) + gtc.assert_called() + + # Check the case api_endpoint is provided. + options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(transport=transport_name, client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_MTLS_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError): + client = client_class(transport=transport_name) + + # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): + with pytest.raises(ValueError): + client = client_class(transport=transport_name) + + # Check the case quota_project_id is provided + options = client_options.ClientOptions(quota_project_id="octopus") + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id="octopus", + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + # Check the case api_endpoint is provided + options = client_options.ClientOptions(api_audience="https://language.googleapis.com") + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience="https://language.googleapis.com" + ) + +@pytest.mark.parametrize("client_class,transport_class,transport_name,use_client_cert_env", [ + (NetworkEdgeSecurityServicesClient, transports.NetworkEdgeSecurityServicesRestTransport, "rest", "true"), + (NetworkEdgeSecurityServicesClient, transports.NetworkEdgeSecurityServicesRestTransport, "rest", "false"), +]) +@mock.patch.object(NetworkEdgeSecurityServicesClient, "DEFAULT_ENDPOINT", modify_default_endpoint(NetworkEdgeSecurityServicesClient)) +@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) +def test_network_edge_security_services_client_mtls_env_auto(client_class, transport_class, transport_name, use_client_cert_env): + # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default + # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists. + + # Check the case client_cert_source is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): + options = client_options.ClientOptions(client_cert_source=client_cert_source_callback) + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + + if use_client_cert_env == "false": + expected_client_cert_source = None + expected_host = client.DEFAULT_ENDPOINT + else: + expected_client_cert_source = client_cert_source_callback + expected_host = client.DEFAULT_MTLS_ENDPOINT + + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case ADC client cert is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): + with mock.patch.object(transport_class, '__init__') as patched: + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): + with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=client_cert_source_callback): + if use_client_cert_env == "false": + expected_host = client.DEFAULT_ENDPOINT + expected_client_cert_source = None + else: + expected_host = client.DEFAULT_MTLS_ENDPOINT + expected_client_cert_source = client_cert_source_callback + + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case client_cert_source and ADC client cert are not provided. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): + with mock.patch.object(transport_class, '__init__') as patched: + with mock.patch("google.auth.transport.mtls.has_default_client_cert_source", return_value=False): + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize("client_class", [ + NetworkEdgeSecurityServicesClient +]) +@mock.patch.object(NetworkEdgeSecurityServicesClient, "DEFAULT_ENDPOINT", modify_default_endpoint(NetworkEdgeSecurityServicesClient)) +def test_network_edge_security_services_client_get_mtls_endpoint_and_cert_source(client_class): + mock_client_cert_source = mock.Mock() + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "true". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + mock_api_endpoint = "foo" + options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(options) + assert api_endpoint == mock_api_endpoint + assert cert_source == mock_client_cert_source + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "false". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + mock_client_cert_source = mock.Mock() + mock_api_endpoint = "foo" + options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(options) + assert api_endpoint == mock_api_endpoint + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert doesn't exist. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=False): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert exists. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): + with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=mock_client_cert_source): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source == mock_client_cert_source + + +@pytest.mark.parametrize("client_class,transport_class,transport_name", [ + (NetworkEdgeSecurityServicesClient, transports.NetworkEdgeSecurityServicesRestTransport, "rest"), +]) +def test_network_edge_security_services_client_client_options_scopes(client_class, transport_class, transport_name): + # Check the case scopes are provided. + options = client_options.ClientOptions( + scopes=["1", "2"], + ) + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=["1", "2"], + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + +@pytest.mark.parametrize("client_class,transport_class,transport_name,grpc_helpers", [ + (NetworkEdgeSecurityServicesClient, transports.NetworkEdgeSecurityServicesRestTransport, "rest", None), +]) +def test_network_edge_security_services_client_client_options_credentials_file(client_class, transport_class, transport_name, grpc_helpers): + # Check the case credentials file is provided. + options = client_options.ClientOptions( + credentials_file="credentials.json" + ) + + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize("request_type", [ + compute.AggregatedListNetworkEdgeSecurityServicesRequest, + dict, +]) +def test_aggregated_list_rest(request_type): + client = NetworkEdgeSecurityServicesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.NetworkEdgeSecurityServiceAggregatedList( + etag='etag_value', + id='id_value', + kind='kind_value', + next_page_token='next_page_token_value', + self_link='self_link_value', + unreachables=['unreachables_value'], + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.NetworkEdgeSecurityServiceAggregatedList.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.aggregated_list(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.AggregatedListPager) + assert response.etag == 'etag_value' + assert response.id == 'id_value' + assert response.kind == 'kind_value' + assert response.next_page_token == 'next_page_token_value' + assert response.self_link == 'self_link_value' + assert response.unreachables == ['unreachables_value'] + + +def test_aggregated_list_rest_required_fields(request_type=compute.AggregatedListNetworkEdgeSecurityServicesRequest): + transport_class = transports.NetworkEdgeSecurityServicesRestTransport + + request_init = {} + request_init["project"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).aggregated_list._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).aggregated_list._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("filter", "include_all_scopes", "max_results", "order_by", "page_token", "return_partial_success", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + + client = NetworkEdgeSecurityServicesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.NetworkEdgeSecurityServiceAggregatedList() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "get", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.NetworkEdgeSecurityServiceAggregatedList.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.aggregated_list(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_aggregated_list_rest_unset_required_fields(): + transport = transports.NetworkEdgeSecurityServicesRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.aggregated_list._get_unset_required_fields({}) + assert set(unset_fields) == (set(("filter", "includeAllScopes", "maxResults", "orderBy", "pageToken", "returnPartialSuccess", )) & set(("project", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_aggregated_list_rest_interceptors(null_interceptor): + transport = transports.NetworkEdgeSecurityServicesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.NetworkEdgeSecurityServicesRestInterceptor(), + ) + client = NetworkEdgeSecurityServicesClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.NetworkEdgeSecurityServicesRestInterceptor, "post_aggregated_list") as post, \ + mock.patch.object(transports.NetworkEdgeSecurityServicesRestInterceptor, "pre_aggregated_list") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.AggregatedListNetworkEdgeSecurityServicesRequest.pb(compute.AggregatedListNetworkEdgeSecurityServicesRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.NetworkEdgeSecurityServiceAggregatedList.to_json(compute.NetworkEdgeSecurityServiceAggregatedList()) + + request = compute.AggregatedListNetworkEdgeSecurityServicesRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.NetworkEdgeSecurityServiceAggregatedList() + + client.aggregated_list(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_aggregated_list_rest_bad_request(transport: str = 'rest', request_type=compute.AggregatedListNetworkEdgeSecurityServicesRequest): + client = NetworkEdgeSecurityServicesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.aggregated_list(request) + + +def test_aggregated_list_rest_flattened(): + client = NetworkEdgeSecurityServicesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.NetworkEdgeSecurityServiceAggregatedList() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.NetworkEdgeSecurityServiceAggregatedList.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.aggregated_list(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/aggregated/networkEdgeSecurityServices" % client.transport._host, args[1]) + + +def test_aggregated_list_rest_flattened_error(transport: str = 'rest'): + client = NetworkEdgeSecurityServicesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.aggregated_list( + compute.AggregatedListNetworkEdgeSecurityServicesRequest(), + project='project_value', + ) + + +def test_aggregated_list_rest_pager(transport: str = 'rest'): + client = NetworkEdgeSecurityServicesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + #with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + compute.NetworkEdgeSecurityServiceAggregatedList( + items={ + 'a':compute.NetworkEdgeSecurityServicesScopedList(), + 'b':compute.NetworkEdgeSecurityServicesScopedList(), + 'c':compute.NetworkEdgeSecurityServicesScopedList(), + }, + next_page_token='abc', + ), + compute.NetworkEdgeSecurityServiceAggregatedList( + items={}, + next_page_token='def', + ), + compute.NetworkEdgeSecurityServiceAggregatedList( + items={ + 'g':compute.NetworkEdgeSecurityServicesScopedList(), + }, + next_page_token='ghi', + ), + compute.NetworkEdgeSecurityServiceAggregatedList( + items={ + 'h':compute.NetworkEdgeSecurityServicesScopedList(), + 'i':compute.NetworkEdgeSecurityServicesScopedList(), + }, + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple(compute.NetworkEdgeSecurityServiceAggregatedList.to_json(x) for x in response) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode('UTF-8') + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {'project': 'sample1'} + + pager = client.aggregated_list(request=sample_request) + + assert isinstance(pager.get('a'), compute.NetworkEdgeSecurityServicesScopedList) + assert pager.get('h') is None + + results = list(pager) + assert len(results) == 6 + assert all( + isinstance(i, tuple) + for i in results) + for result in results: + assert isinstance(result, tuple) + assert tuple(type(t) for t in result) == (str, compute.NetworkEdgeSecurityServicesScopedList) + + assert pager.get('a') is None + assert isinstance(pager.get('h'), compute.NetworkEdgeSecurityServicesScopedList) + + pages = list(client.aggregated_list(request=sample_request).pages) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize("request_type", [ + compute.DeleteNetworkEdgeSecurityServiceRequest, + dict, +]) +def test_delete_rest(request_type): + client = NetworkEdgeSecurityServicesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2', 'network_edge_security_service': 'sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.delete(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, extended_operation.ExtendedOperation) + assert response.client_operation_id == 'client_operation_id_value' + assert response.creation_timestamp == 'creation_timestamp_value' + assert response.description == 'description_value' + assert response.end_time == 'end_time_value' + assert response.http_error_message == 'http_error_message_value' + assert response.http_error_status_code == 2374 + assert response.id == 205 + assert response.insert_time == 'insert_time_value' + assert response.kind == 'kind_value' + assert response.name == 'name_value' + assert response.operation_group_id == 'operation_group_id_value' + assert response.operation_type == 'operation_type_value' + assert response.progress == 885 + assert response.region == 'region_value' + assert response.self_link == 'self_link_value' + assert response.start_time == 'start_time_value' + assert response.status == compute.Operation.Status.DONE + assert response.status_message == 'status_message_value' + assert response.target_id == 947 + assert response.target_link == 'target_link_value' + assert response.user == 'user_value' + assert response.zone == 'zone_value' + + +def test_delete_rest_required_fields(request_type=compute.DeleteNetworkEdgeSecurityServiceRequest): + transport_class = transports.NetworkEdgeSecurityServicesRestTransport + + request_init = {} + request_init["network_edge_security_service"] = "" + request_init["project"] = "" + request_init["region"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["networkEdgeSecurityService"] = 'network_edge_security_service_value' + jsonified_request["project"] = 'project_value' + jsonified_request["region"] = 'region_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "networkEdgeSecurityService" in jsonified_request + assert jsonified_request["networkEdgeSecurityService"] == 'network_edge_security_service_value' + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "region" in jsonified_request + assert jsonified_request["region"] == 'region_value' + + client = NetworkEdgeSecurityServicesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "delete", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.delete(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_delete_rest_unset_required_fields(): + transport = transports.NetworkEdgeSecurityServicesRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.delete._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("networkEdgeSecurityService", "project", "region", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_rest_interceptors(null_interceptor): + transport = transports.NetworkEdgeSecurityServicesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.NetworkEdgeSecurityServicesRestInterceptor(), + ) + client = NetworkEdgeSecurityServicesClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.NetworkEdgeSecurityServicesRestInterceptor, "post_delete") as post, \ + mock.patch.object(transports.NetworkEdgeSecurityServicesRestInterceptor, "pre_delete") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.DeleteNetworkEdgeSecurityServiceRequest.pb(compute.DeleteNetworkEdgeSecurityServiceRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.DeleteNetworkEdgeSecurityServiceRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.delete(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_delete_rest_bad_request(transport: str = 'rest', request_type=compute.DeleteNetworkEdgeSecurityServiceRequest): + client = NetworkEdgeSecurityServicesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2', 'network_edge_security_service': 'sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.delete(request) + + +def test_delete_rest_flattened(): + client = NetworkEdgeSecurityServicesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'region': 'sample2', 'network_edge_security_service': 'sample3'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + region='region_value', + network_edge_security_service='network_edge_security_service_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.delete(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/regions/{region}/networkEdgeSecurityServices/{network_edge_security_service}" % client.transport._host, args[1]) + + +def test_delete_rest_flattened_error(transport: str = 'rest'): + client = NetworkEdgeSecurityServicesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete( + compute.DeleteNetworkEdgeSecurityServiceRequest(), + project='project_value', + region='region_value', + network_edge_security_service='network_edge_security_service_value', + ) + + +def test_delete_rest_error(): + client = NetworkEdgeSecurityServicesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.DeleteNetworkEdgeSecurityServiceRequest, + dict, +]) +def test_delete_unary_rest(request_type): + client = NetworkEdgeSecurityServicesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2', 'network_edge_security_service': 'sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.delete_unary(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.Operation) + + +def test_delete_unary_rest_required_fields(request_type=compute.DeleteNetworkEdgeSecurityServiceRequest): + transport_class = transports.NetworkEdgeSecurityServicesRestTransport + + request_init = {} + request_init["network_edge_security_service"] = "" + request_init["project"] = "" + request_init["region"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["networkEdgeSecurityService"] = 'network_edge_security_service_value' + jsonified_request["project"] = 'project_value' + jsonified_request["region"] = 'region_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "networkEdgeSecurityService" in jsonified_request + assert jsonified_request["networkEdgeSecurityService"] == 'network_edge_security_service_value' + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "region" in jsonified_request + assert jsonified_request["region"] == 'region_value' + + client = NetworkEdgeSecurityServicesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "delete", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.delete_unary(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_delete_unary_rest_unset_required_fields(): + transport = transports.NetworkEdgeSecurityServicesRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.delete._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("networkEdgeSecurityService", "project", "region", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_unary_rest_interceptors(null_interceptor): + transport = transports.NetworkEdgeSecurityServicesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.NetworkEdgeSecurityServicesRestInterceptor(), + ) + client = NetworkEdgeSecurityServicesClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.NetworkEdgeSecurityServicesRestInterceptor, "post_delete") as post, \ + mock.patch.object(transports.NetworkEdgeSecurityServicesRestInterceptor, "pre_delete") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.DeleteNetworkEdgeSecurityServiceRequest.pb(compute.DeleteNetworkEdgeSecurityServiceRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.DeleteNetworkEdgeSecurityServiceRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.delete_unary(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_delete_unary_rest_bad_request(transport: str = 'rest', request_type=compute.DeleteNetworkEdgeSecurityServiceRequest): + client = NetworkEdgeSecurityServicesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2', 'network_edge_security_service': 'sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.delete_unary(request) + + +def test_delete_unary_rest_flattened(): + client = NetworkEdgeSecurityServicesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'region': 'sample2', 'network_edge_security_service': 'sample3'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + region='region_value', + network_edge_security_service='network_edge_security_service_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.delete_unary(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/regions/{region}/networkEdgeSecurityServices/{network_edge_security_service}" % client.transport._host, args[1]) + + +def test_delete_unary_rest_flattened_error(transport: str = 'rest'): + client = NetworkEdgeSecurityServicesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_unary( + compute.DeleteNetworkEdgeSecurityServiceRequest(), + project='project_value', + region='region_value', + network_edge_security_service='network_edge_security_service_value', + ) + + +def test_delete_unary_rest_error(): + client = NetworkEdgeSecurityServicesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.GetNetworkEdgeSecurityServiceRequest, + dict, +]) +def test_get_rest(request_type): + client = NetworkEdgeSecurityServicesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2', 'network_edge_security_service': 'sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.NetworkEdgeSecurityService( + creation_timestamp='creation_timestamp_value', + description='description_value', + fingerprint='fingerprint_value', + id=205, + kind='kind_value', + name='name_value', + region='region_value', + security_policy='security_policy_value', + self_link='self_link_value', + self_link_with_id='self_link_with_id_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.NetworkEdgeSecurityService.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.get(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.NetworkEdgeSecurityService) + assert response.creation_timestamp == 'creation_timestamp_value' + assert response.description == 'description_value' + assert response.fingerprint == 'fingerprint_value' + assert response.id == 205 + assert response.kind == 'kind_value' + assert response.name == 'name_value' + assert response.region == 'region_value' + assert response.security_policy == 'security_policy_value' + assert response.self_link == 'self_link_value' + assert response.self_link_with_id == 'self_link_with_id_value' + + +def test_get_rest_required_fields(request_type=compute.GetNetworkEdgeSecurityServiceRequest): + transport_class = transports.NetworkEdgeSecurityServicesRestTransport + + request_init = {} + request_init["network_edge_security_service"] = "" + request_init["project"] = "" + request_init["region"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["networkEdgeSecurityService"] = 'network_edge_security_service_value' + jsonified_request["project"] = 'project_value' + jsonified_request["region"] = 'region_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "networkEdgeSecurityService" in jsonified_request + assert jsonified_request["networkEdgeSecurityService"] == 'network_edge_security_service_value' + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "region" in jsonified_request + assert jsonified_request["region"] == 'region_value' + + client = NetworkEdgeSecurityServicesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.NetworkEdgeSecurityService() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "get", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.NetworkEdgeSecurityService.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.get(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_get_rest_unset_required_fields(): + transport = transports.NetworkEdgeSecurityServicesRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.get._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("networkEdgeSecurityService", "project", "region", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_rest_interceptors(null_interceptor): + transport = transports.NetworkEdgeSecurityServicesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.NetworkEdgeSecurityServicesRestInterceptor(), + ) + client = NetworkEdgeSecurityServicesClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.NetworkEdgeSecurityServicesRestInterceptor, "post_get") as post, \ + mock.patch.object(transports.NetworkEdgeSecurityServicesRestInterceptor, "pre_get") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.GetNetworkEdgeSecurityServiceRequest.pb(compute.GetNetworkEdgeSecurityServiceRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.NetworkEdgeSecurityService.to_json(compute.NetworkEdgeSecurityService()) + + request = compute.GetNetworkEdgeSecurityServiceRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.NetworkEdgeSecurityService() + + client.get(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_get_rest_bad_request(transport: str = 'rest', request_type=compute.GetNetworkEdgeSecurityServiceRequest): + client = NetworkEdgeSecurityServicesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2', 'network_edge_security_service': 'sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get(request) + + +def test_get_rest_flattened(): + client = NetworkEdgeSecurityServicesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.NetworkEdgeSecurityService() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'region': 'sample2', 'network_edge_security_service': 'sample3'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + region='region_value', + network_edge_security_service='network_edge_security_service_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.NetworkEdgeSecurityService.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.get(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/regions/{region}/networkEdgeSecurityServices/{network_edge_security_service}" % client.transport._host, args[1]) + + +def test_get_rest_flattened_error(transport: str = 'rest'): + client = NetworkEdgeSecurityServicesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get( + compute.GetNetworkEdgeSecurityServiceRequest(), + project='project_value', + region='region_value', + network_edge_security_service='network_edge_security_service_value', + ) + + +def test_get_rest_error(): + client = NetworkEdgeSecurityServicesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.InsertNetworkEdgeSecurityServiceRequest, + dict, +]) +def test_insert_rest(request_type): + client = NetworkEdgeSecurityServicesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2'} + request_init["network_edge_security_service_resource"] = {'creation_timestamp': 'creation_timestamp_value', 'description': 'description_value', 'fingerprint': 'fingerprint_value', 'id': 205, 'kind': 'kind_value', 'name': 'name_value', 'region': 'region_value', 'security_policy': 'security_policy_value', 'self_link': 'self_link_value', 'self_link_with_id': 'self_link_with_id_value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.insert(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, extended_operation.ExtendedOperation) + assert response.client_operation_id == 'client_operation_id_value' + assert response.creation_timestamp == 'creation_timestamp_value' + assert response.description == 'description_value' + assert response.end_time == 'end_time_value' + assert response.http_error_message == 'http_error_message_value' + assert response.http_error_status_code == 2374 + assert response.id == 205 + assert response.insert_time == 'insert_time_value' + assert response.kind == 'kind_value' + assert response.name == 'name_value' + assert response.operation_group_id == 'operation_group_id_value' + assert response.operation_type == 'operation_type_value' + assert response.progress == 885 + assert response.region == 'region_value' + assert response.self_link == 'self_link_value' + assert response.start_time == 'start_time_value' + assert response.status == compute.Operation.Status.DONE + assert response.status_message == 'status_message_value' + assert response.target_id == 947 + assert response.target_link == 'target_link_value' + assert response.user == 'user_value' + assert response.zone == 'zone_value' + + +def test_insert_rest_required_fields(request_type=compute.InsertNetworkEdgeSecurityServiceRequest): + transport_class = transports.NetworkEdgeSecurityServicesRestTransport + + request_init = {} + request_init["project"] = "" + request_init["region"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).insert._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + jsonified_request["region"] = 'region_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).insert._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", "validate_only", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "region" in jsonified_request + assert jsonified_request["region"] == 'region_value' + + client = NetworkEdgeSecurityServicesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.insert(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_insert_rest_unset_required_fields(): + transport = transports.NetworkEdgeSecurityServicesRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.insert._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", "validateOnly", )) & set(("networkEdgeSecurityServiceResource", "project", "region", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_insert_rest_interceptors(null_interceptor): + transport = transports.NetworkEdgeSecurityServicesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.NetworkEdgeSecurityServicesRestInterceptor(), + ) + client = NetworkEdgeSecurityServicesClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.NetworkEdgeSecurityServicesRestInterceptor, "post_insert") as post, \ + mock.patch.object(transports.NetworkEdgeSecurityServicesRestInterceptor, "pre_insert") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.InsertNetworkEdgeSecurityServiceRequest.pb(compute.InsertNetworkEdgeSecurityServiceRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.InsertNetworkEdgeSecurityServiceRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.insert(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_insert_rest_bad_request(transport: str = 'rest', request_type=compute.InsertNetworkEdgeSecurityServiceRequest): + client = NetworkEdgeSecurityServicesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2'} + request_init["network_edge_security_service_resource"] = {'creation_timestamp': 'creation_timestamp_value', 'description': 'description_value', 'fingerprint': 'fingerprint_value', 'id': 205, 'kind': 'kind_value', 'name': 'name_value', 'region': 'region_value', 'security_policy': 'security_policy_value', 'self_link': 'self_link_value', 'self_link_with_id': 'self_link_with_id_value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.insert(request) + + +def test_insert_rest_flattened(): + client = NetworkEdgeSecurityServicesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'region': 'sample2'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + region='region_value', + network_edge_security_service_resource=compute.NetworkEdgeSecurityService(creation_timestamp='creation_timestamp_value'), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.insert(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/regions/{region}/networkEdgeSecurityServices" % client.transport._host, args[1]) + + +def test_insert_rest_flattened_error(transport: str = 'rest'): + client = NetworkEdgeSecurityServicesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.insert( + compute.InsertNetworkEdgeSecurityServiceRequest(), + project='project_value', + region='region_value', + network_edge_security_service_resource=compute.NetworkEdgeSecurityService(creation_timestamp='creation_timestamp_value'), + ) + + +def test_insert_rest_error(): + client = NetworkEdgeSecurityServicesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.InsertNetworkEdgeSecurityServiceRequest, + dict, +]) +def test_insert_unary_rest(request_type): + client = NetworkEdgeSecurityServicesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2'} + request_init["network_edge_security_service_resource"] = {'creation_timestamp': 'creation_timestamp_value', 'description': 'description_value', 'fingerprint': 'fingerprint_value', 'id': 205, 'kind': 'kind_value', 'name': 'name_value', 'region': 'region_value', 'security_policy': 'security_policy_value', 'self_link': 'self_link_value', 'self_link_with_id': 'self_link_with_id_value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.insert_unary(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.Operation) + + +def test_insert_unary_rest_required_fields(request_type=compute.InsertNetworkEdgeSecurityServiceRequest): + transport_class = transports.NetworkEdgeSecurityServicesRestTransport + + request_init = {} + request_init["project"] = "" + request_init["region"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).insert._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + jsonified_request["region"] = 'region_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).insert._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", "validate_only", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "region" in jsonified_request + assert jsonified_request["region"] == 'region_value' + + client = NetworkEdgeSecurityServicesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.insert_unary(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_insert_unary_rest_unset_required_fields(): + transport = transports.NetworkEdgeSecurityServicesRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.insert._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", "validateOnly", )) & set(("networkEdgeSecurityServiceResource", "project", "region", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_insert_unary_rest_interceptors(null_interceptor): + transport = transports.NetworkEdgeSecurityServicesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.NetworkEdgeSecurityServicesRestInterceptor(), + ) + client = NetworkEdgeSecurityServicesClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.NetworkEdgeSecurityServicesRestInterceptor, "post_insert") as post, \ + mock.patch.object(transports.NetworkEdgeSecurityServicesRestInterceptor, "pre_insert") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.InsertNetworkEdgeSecurityServiceRequest.pb(compute.InsertNetworkEdgeSecurityServiceRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.InsertNetworkEdgeSecurityServiceRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.insert_unary(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_insert_unary_rest_bad_request(transport: str = 'rest', request_type=compute.InsertNetworkEdgeSecurityServiceRequest): + client = NetworkEdgeSecurityServicesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2'} + request_init["network_edge_security_service_resource"] = {'creation_timestamp': 'creation_timestamp_value', 'description': 'description_value', 'fingerprint': 'fingerprint_value', 'id': 205, 'kind': 'kind_value', 'name': 'name_value', 'region': 'region_value', 'security_policy': 'security_policy_value', 'self_link': 'self_link_value', 'self_link_with_id': 'self_link_with_id_value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.insert_unary(request) + + +def test_insert_unary_rest_flattened(): + client = NetworkEdgeSecurityServicesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'region': 'sample2'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + region='region_value', + network_edge_security_service_resource=compute.NetworkEdgeSecurityService(creation_timestamp='creation_timestamp_value'), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.insert_unary(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/regions/{region}/networkEdgeSecurityServices" % client.transport._host, args[1]) + + +def test_insert_unary_rest_flattened_error(transport: str = 'rest'): + client = NetworkEdgeSecurityServicesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.insert_unary( + compute.InsertNetworkEdgeSecurityServiceRequest(), + project='project_value', + region='region_value', + network_edge_security_service_resource=compute.NetworkEdgeSecurityService(creation_timestamp='creation_timestamp_value'), + ) + + +def test_insert_unary_rest_error(): + client = NetworkEdgeSecurityServicesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.PatchNetworkEdgeSecurityServiceRequest, + dict, +]) +def test_patch_rest(request_type): + client = NetworkEdgeSecurityServicesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2', 'network_edge_security_service': 'sample3'} + request_init["network_edge_security_service_resource"] = {'creation_timestamp': 'creation_timestamp_value', 'description': 'description_value', 'fingerprint': 'fingerprint_value', 'id': 205, 'kind': 'kind_value', 'name': 'name_value', 'region': 'region_value', 'security_policy': 'security_policy_value', 'self_link': 'self_link_value', 'self_link_with_id': 'self_link_with_id_value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.patch(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, extended_operation.ExtendedOperation) + assert response.client_operation_id == 'client_operation_id_value' + assert response.creation_timestamp == 'creation_timestamp_value' + assert response.description == 'description_value' + assert response.end_time == 'end_time_value' + assert response.http_error_message == 'http_error_message_value' + assert response.http_error_status_code == 2374 + assert response.id == 205 + assert response.insert_time == 'insert_time_value' + assert response.kind == 'kind_value' + assert response.name == 'name_value' + assert response.operation_group_id == 'operation_group_id_value' + assert response.operation_type == 'operation_type_value' + assert response.progress == 885 + assert response.region == 'region_value' + assert response.self_link == 'self_link_value' + assert response.start_time == 'start_time_value' + assert response.status == compute.Operation.Status.DONE + assert response.status_message == 'status_message_value' + assert response.target_id == 947 + assert response.target_link == 'target_link_value' + assert response.user == 'user_value' + assert response.zone == 'zone_value' + + +def test_patch_rest_required_fields(request_type=compute.PatchNetworkEdgeSecurityServiceRequest): + transport_class = transports.NetworkEdgeSecurityServicesRestTransport + + request_init = {} + request_init["network_edge_security_service"] = "" + request_init["project"] = "" + request_init["region"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).patch._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["networkEdgeSecurityService"] = 'network_edge_security_service_value' + jsonified_request["project"] = 'project_value' + jsonified_request["region"] = 'region_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).patch._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("paths", "request_id", "update_mask", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "networkEdgeSecurityService" in jsonified_request + assert jsonified_request["networkEdgeSecurityService"] == 'network_edge_security_service_value' + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "region" in jsonified_request + assert jsonified_request["region"] == 'region_value' + + client = NetworkEdgeSecurityServicesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "patch", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.patch(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_patch_rest_unset_required_fields(): + transport = transports.NetworkEdgeSecurityServicesRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.patch._get_unset_required_fields({}) + assert set(unset_fields) == (set(("paths", "requestId", "updateMask", )) & set(("networkEdgeSecurityService", "networkEdgeSecurityServiceResource", "project", "region", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_patch_rest_interceptors(null_interceptor): + transport = transports.NetworkEdgeSecurityServicesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.NetworkEdgeSecurityServicesRestInterceptor(), + ) + client = NetworkEdgeSecurityServicesClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.NetworkEdgeSecurityServicesRestInterceptor, "post_patch") as post, \ + mock.patch.object(transports.NetworkEdgeSecurityServicesRestInterceptor, "pre_patch") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.PatchNetworkEdgeSecurityServiceRequest.pb(compute.PatchNetworkEdgeSecurityServiceRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.PatchNetworkEdgeSecurityServiceRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.patch(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_patch_rest_bad_request(transport: str = 'rest', request_type=compute.PatchNetworkEdgeSecurityServiceRequest): + client = NetworkEdgeSecurityServicesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2', 'network_edge_security_service': 'sample3'} + request_init["network_edge_security_service_resource"] = {'creation_timestamp': 'creation_timestamp_value', 'description': 'description_value', 'fingerprint': 'fingerprint_value', 'id': 205, 'kind': 'kind_value', 'name': 'name_value', 'region': 'region_value', 'security_policy': 'security_policy_value', 'self_link': 'self_link_value', 'self_link_with_id': 'self_link_with_id_value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.patch(request) + + +def test_patch_rest_flattened(): + client = NetworkEdgeSecurityServicesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'region': 'sample2', 'network_edge_security_service': 'sample3'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + region='region_value', + network_edge_security_service='network_edge_security_service_value', + network_edge_security_service_resource=compute.NetworkEdgeSecurityService(creation_timestamp='creation_timestamp_value'), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.patch(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/regions/{region}/networkEdgeSecurityServices/{network_edge_security_service}" % client.transport._host, args[1]) + + +def test_patch_rest_flattened_error(transport: str = 'rest'): + client = NetworkEdgeSecurityServicesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.patch( + compute.PatchNetworkEdgeSecurityServiceRequest(), + project='project_value', + region='region_value', + network_edge_security_service='network_edge_security_service_value', + network_edge_security_service_resource=compute.NetworkEdgeSecurityService(creation_timestamp='creation_timestamp_value'), + ) + + +def test_patch_rest_error(): + client = NetworkEdgeSecurityServicesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.PatchNetworkEdgeSecurityServiceRequest, + dict, +]) +def test_patch_unary_rest(request_type): + client = NetworkEdgeSecurityServicesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2', 'network_edge_security_service': 'sample3'} + request_init["network_edge_security_service_resource"] = {'creation_timestamp': 'creation_timestamp_value', 'description': 'description_value', 'fingerprint': 'fingerprint_value', 'id': 205, 'kind': 'kind_value', 'name': 'name_value', 'region': 'region_value', 'security_policy': 'security_policy_value', 'self_link': 'self_link_value', 'self_link_with_id': 'self_link_with_id_value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.patch_unary(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.Operation) + + +def test_patch_unary_rest_required_fields(request_type=compute.PatchNetworkEdgeSecurityServiceRequest): + transport_class = transports.NetworkEdgeSecurityServicesRestTransport + + request_init = {} + request_init["network_edge_security_service"] = "" + request_init["project"] = "" + request_init["region"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).patch._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["networkEdgeSecurityService"] = 'network_edge_security_service_value' + jsonified_request["project"] = 'project_value' + jsonified_request["region"] = 'region_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).patch._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("paths", "request_id", "update_mask", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "networkEdgeSecurityService" in jsonified_request + assert jsonified_request["networkEdgeSecurityService"] == 'network_edge_security_service_value' + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "region" in jsonified_request + assert jsonified_request["region"] == 'region_value' + + client = NetworkEdgeSecurityServicesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "patch", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.patch_unary(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_patch_unary_rest_unset_required_fields(): + transport = transports.NetworkEdgeSecurityServicesRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.patch._get_unset_required_fields({}) + assert set(unset_fields) == (set(("paths", "requestId", "updateMask", )) & set(("networkEdgeSecurityService", "networkEdgeSecurityServiceResource", "project", "region", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_patch_unary_rest_interceptors(null_interceptor): + transport = transports.NetworkEdgeSecurityServicesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.NetworkEdgeSecurityServicesRestInterceptor(), + ) + client = NetworkEdgeSecurityServicesClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.NetworkEdgeSecurityServicesRestInterceptor, "post_patch") as post, \ + mock.patch.object(transports.NetworkEdgeSecurityServicesRestInterceptor, "pre_patch") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.PatchNetworkEdgeSecurityServiceRequest.pb(compute.PatchNetworkEdgeSecurityServiceRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.PatchNetworkEdgeSecurityServiceRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.patch_unary(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_patch_unary_rest_bad_request(transport: str = 'rest', request_type=compute.PatchNetworkEdgeSecurityServiceRequest): + client = NetworkEdgeSecurityServicesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2', 'network_edge_security_service': 'sample3'} + request_init["network_edge_security_service_resource"] = {'creation_timestamp': 'creation_timestamp_value', 'description': 'description_value', 'fingerprint': 'fingerprint_value', 'id': 205, 'kind': 'kind_value', 'name': 'name_value', 'region': 'region_value', 'security_policy': 'security_policy_value', 'self_link': 'self_link_value', 'self_link_with_id': 'self_link_with_id_value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.patch_unary(request) + + +def test_patch_unary_rest_flattened(): + client = NetworkEdgeSecurityServicesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'region': 'sample2', 'network_edge_security_service': 'sample3'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + region='region_value', + network_edge_security_service='network_edge_security_service_value', + network_edge_security_service_resource=compute.NetworkEdgeSecurityService(creation_timestamp='creation_timestamp_value'), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.patch_unary(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/regions/{region}/networkEdgeSecurityServices/{network_edge_security_service}" % client.transport._host, args[1]) + + +def test_patch_unary_rest_flattened_error(transport: str = 'rest'): + client = NetworkEdgeSecurityServicesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.patch_unary( + compute.PatchNetworkEdgeSecurityServiceRequest(), + project='project_value', + region='region_value', + network_edge_security_service='network_edge_security_service_value', + network_edge_security_service_resource=compute.NetworkEdgeSecurityService(creation_timestamp='creation_timestamp_value'), + ) + + +def test_patch_unary_rest_error(): + client = NetworkEdgeSecurityServicesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +def test_credentials_transport_error(): + # It is an error to provide credentials and a transport instance. + transport = transports.NetworkEdgeSecurityServicesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = NetworkEdgeSecurityServicesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # It is an error to provide a credentials file and a transport instance. + transport = transports.NetworkEdgeSecurityServicesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = NetworkEdgeSecurityServicesClient( + client_options={"credentials_file": "credentials.json"}, + transport=transport, + ) + + # It is an error to provide an api_key and a transport instance. + transport = transports.NetworkEdgeSecurityServicesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = NetworkEdgeSecurityServicesClient( + client_options=options, + transport=transport, + ) + + # It is an error to provide an api_key and a credential. + options = mock.Mock() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = NetworkEdgeSecurityServicesClient( + client_options=options, + credentials=ga_credentials.AnonymousCredentials() + ) + + # It is an error to provide scopes and a transport instance. + transport = transports.NetworkEdgeSecurityServicesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = NetworkEdgeSecurityServicesClient( + client_options={"scopes": ["1", "2"]}, + transport=transport, + ) + + +def test_transport_instance(): + # A client may be instantiated with a custom transport instance. + transport = transports.NetworkEdgeSecurityServicesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + client = NetworkEdgeSecurityServicesClient(transport=transport) + assert client.transport is transport + + +@pytest.mark.parametrize("transport_class", [ + transports.NetworkEdgeSecurityServicesRestTransport, +]) +def test_transport_adc(transport_class): + # Test default credentials are used if not provided. + with mock.patch.object(google.auth, 'default') as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class() + adc.assert_called_once() + +@pytest.mark.parametrize("transport_name", [ + "rest", +]) +def test_transport_kind(transport_name): + transport = NetworkEdgeSecurityServicesClient.get_transport_class(transport_name)( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert transport.kind == transport_name + + +def test_network_edge_security_services_base_transport_error(): + # Passing both a credentials object and credentials_file should raise an error + with pytest.raises(core_exceptions.DuplicateCredentialArgs): + transport = transports.NetworkEdgeSecurityServicesTransport( + credentials=ga_credentials.AnonymousCredentials(), + credentials_file="credentials.json" + ) + + +def test_network_edge_security_services_base_transport(): + # Instantiate the base transport. + with mock.patch('google.cloud.compute_v1.services.network_edge_security_services.transports.NetworkEdgeSecurityServicesTransport.__init__') as Transport: + Transport.return_value = None + transport = transports.NetworkEdgeSecurityServicesTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Every method on the transport should just blindly + # raise NotImplementedError. + methods = ( + 'aggregated_list', + 'delete', + 'get', + 'insert', + 'patch', + ) + for method in methods: + with pytest.raises(NotImplementedError): + getattr(transport, method)(request=object()) + + with pytest.raises(NotImplementedError): + transport.close() + + # Catch all for all remaining methods and properties + remainder = [ + 'kind', + ] + for r in remainder: + with pytest.raises(NotImplementedError): + getattr(transport, r)() + + +def test_network_edge_security_services_base_transport_with_credentials_file(): + # Instantiate the base transport with a credentials file + with mock.patch.object(google.auth, 'load_credentials_from_file', autospec=True) as load_creds, mock.patch('google.cloud.compute_v1.services.network_edge_security_services.transports.NetworkEdgeSecurityServicesTransport._prep_wrapped_messages') as Transport: + Transport.return_value = None + load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.NetworkEdgeSecurityServicesTransport( + credentials_file="credentials.json", + quota_project_id="octopus", + ) + load_creds.assert_called_once_with("credentials.json", + scopes=None, + default_scopes=( + 'https://www.googleapis.com/auth/compute', + 'https://www.googleapis.com/auth/cloud-platform', +), + quota_project_id="octopus", + ) + + +def test_network_edge_security_services_base_transport_with_adc(): + # Test the default credentials are used if credentials and credentials_file are None. + with mock.patch.object(google.auth, 'default', autospec=True) as adc, mock.patch('google.cloud.compute_v1.services.network_edge_security_services.transports.NetworkEdgeSecurityServicesTransport._prep_wrapped_messages') as Transport: + Transport.return_value = None + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.NetworkEdgeSecurityServicesTransport() + adc.assert_called_once() + + +def test_network_edge_security_services_auth_adc(): + # If no credentials are provided, we should use ADC credentials. + with mock.patch.object(google.auth, 'default', autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + NetworkEdgeSecurityServicesClient() + adc.assert_called_once_with( + scopes=None, + default_scopes=( + 'https://www.googleapis.com/auth/compute', + 'https://www.googleapis.com/auth/cloud-platform', +), + quota_project_id=None, + ) + + +def test_network_edge_security_services_http_transport_client_cert_source_for_mtls(): + cred = ga_credentials.AnonymousCredentials() + with mock.patch("google.auth.transport.requests.AuthorizedSession.configure_mtls_channel") as mock_configure_mtls_channel: + transports.NetworkEdgeSecurityServicesRestTransport ( + credentials=cred, + client_cert_source_for_mtls=client_cert_source_callback + ) + mock_configure_mtls_channel.assert_called_once_with(client_cert_source_callback) + + +@pytest.mark.parametrize("transport_name", [ + "rest", +]) +def test_network_edge_security_services_host_no_port(transport_name): + client = NetworkEdgeSecurityServicesClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions(api_endpoint='compute.googleapis.com'), + transport=transport_name, + ) + assert client.transport._host == ( + 'compute.googleapis.com:443' + if transport_name in ['grpc', 'grpc_asyncio'] + else 'https://compute.googleapis.com' + ) + +@pytest.mark.parametrize("transport_name", [ + "rest", +]) +def test_network_edge_security_services_host_with_port(transport_name): + client = NetworkEdgeSecurityServicesClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions(api_endpoint='compute.googleapis.com:8000'), + transport=transport_name, + ) + assert client.transport._host == ( + 'compute.googleapis.com:8000' + if transport_name in ['grpc', 'grpc_asyncio'] + else 'https://compute.googleapis.com:8000' + ) + +@pytest.mark.parametrize("transport_name", [ + "rest", +]) +def test_network_edge_security_services_client_transport_session_collision(transport_name): + creds1 = ga_credentials.AnonymousCredentials() + creds2 = ga_credentials.AnonymousCredentials() + client1 = NetworkEdgeSecurityServicesClient( + credentials=creds1, + transport=transport_name, + ) + client2 = NetworkEdgeSecurityServicesClient( + credentials=creds2, + transport=transport_name, + ) + session1 = client1.transport.aggregated_list._session + session2 = client2.transport.aggregated_list._session + assert session1 != session2 + session1 = client1.transport.delete._session + session2 = client2.transport.delete._session + assert session1 != session2 + session1 = client1.transport.get._session + session2 = client2.transport.get._session + assert session1 != session2 + session1 = client1.transport.insert._session + session2 = client2.transport.insert._session + assert session1 != session2 + session1 = client1.transport.patch._session + session2 = client2.transport.patch._session + assert session1 != session2 + +def test_common_billing_account_path(): + billing_account = "squid" + expected = "billingAccounts/{billing_account}".format(billing_account=billing_account, ) + actual = NetworkEdgeSecurityServicesClient.common_billing_account_path(billing_account) + assert expected == actual + + +def test_parse_common_billing_account_path(): + expected = { + "billing_account": "clam", + } + path = NetworkEdgeSecurityServicesClient.common_billing_account_path(**expected) + + # Check that the path construction is reversible. + actual = NetworkEdgeSecurityServicesClient.parse_common_billing_account_path(path) + assert expected == actual + +def test_common_folder_path(): + folder = "whelk" + expected = "folders/{folder}".format(folder=folder, ) + actual = NetworkEdgeSecurityServicesClient.common_folder_path(folder) + assert expected == actual + + +def test_parse_common_folder_path(): + expected = { + "folder": "octopus", + } + path = NetworkEdgeSecurityServicesClient.common_folder_path(**expected) + + # Check that the path construction is reversible. + actual = NetworkEdgeSecurityServicesClient.parse_common_folder_path(path) + assert expected == actual + +def test_common_organization_path(): + organization = "oyster" + expected = "organizations/{organization}".format(organization=organization, ) + actual = NetworkEdgeSecurityServicesClient.common_organization_path(organization) + assert expected == actual + + +def test_parse_common_organization_path(): + expected = { + "organization": "nudibranch", + } + path = NetworkEdgeSecurityServicesClient.common_organization_path(**expected) + + # Check that the path construction is reversible. + actual = NetworkEdgeSecurityServicesClient.parse_common_organization_path(path) + assert expected == actual + +def test_common_project_path(): + project = "cuttlefish" + expected = "projects/{project}".format(project=project, ) + actual = NetworkEdgeSecurityServicesClient.common_project_path(project) + assert expected == actual + + +def test_parse_common_project_path(): + expected = { + "project": "mussel", + } + path = NetworkEdgeSecurityServicesClient.common_project_path(**expected) + + # Check that the path construction is reversible. + actual = NetworkEdgeSecurityServicesClient.parse_common_project_path(path) + assert expected == actual + +def test_common_location_path(): + project = "winkle" + location = "nautilus" + expected = "projects/{project}/locations/{location}".format(project=project, location=location, ) + actual = NetworkEdgeSecurityServicesClient.common_location_path(project, location) + assert expected == actual + + +def test_parse_common_location_path(): + expected = { + "project": "scallop", + "location": "abalone", + } + path = NetworkEdgeSecurityServicesClient.common_location_path(**expected) + + # Check that the path construction is reversible. + actual = NetworkEdgeSecurityServicesClient.parse_common_location_path(path) + assert expected == actual + + +def test_client_with_default_client_info(): + client_info = gapic_v1.client_info.ClientInfo() + + with mock.patch.object(transports.NetworkEdgeSecurityServicesTransport, '_prep_wrapped_messages') as prep: + client = NetworkEdgeSecurityServicesClient( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + with mock.patch.object(transports.NetworkEdgeSecurityServicesTransport, '_prep_wrapped_messages') as prep: + transport_class = NetworkEdgeSecurityServicesClient.get_transport_class() + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + +def test_transport_close(): + transports = { + "rest": "_session", + } + + for transport, close_name in transports.items(): + client = NetworkEdgeSecurityServicesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport + ) + with mock.patch.object(type(getattr(client.transport, close_name)), "close") as close: + with client: + close.assert_not_called() + close.assert_called_once() + +def test_client_ctx(): + transports = [ + 'rest', + ] + for transport in transports: + client = NetworkEdgeSecurityServicesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport + ) + # Test client calls underlying transport. + with mock.patch.object(type(client.transport), "close") as close: + close.assert_not_called() + with client: + pass + close.assert_called() + +@pytest.mark.parametrize("client_class,transport_class", [ + (NetworkEdgeSecurityServicesClient, transports.NetworkEdgeSecurityServicesRestTransport), +]) +def test_api_key_credentials(client_class, transport_class): + with mock.patch.object( + google.auth._default, "get_api_key_credentials", create=True + ) as get_api_key_credentials: + mock_cred = mock.Mock() + get_api_key_credentials.return_value = mock_cred + options = client_options.ClientOptions() + options.api_key = "api_key" + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=mock_cred, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) diff --git a/owl-bot-staging/v1/tests/unit/gapic/compute_v1/test_network_endpoint_groups.py b/owl-bot-staging/v1/tests/unit/gapic/compute_v1/test_network_endpoint_groups.py new file mode 100644 index 000000000..eee914aec --- /dev/null +++ b/owl-bot-staging/v1/tests/unit/gapic/compute_v1/test_network_endpoint_groups.py @@ -0,0 +1,4536 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import os +# try/except added for compatibility with python < 3.8 +try: + from unittest import mock + from unittest.mock import AsyncMock # pragma: NO COVER +except ImportError: # pragma: NO COVER + import mock + +import grpc +from grpc.experimental import aio +from collections.abc import Iterable +from google.protobuf import json_format +import json +import math +import pytest +from proto.marshal.rules.dates import DurationRule, TimestampRule +from proto.marshal.rules import wrappers +from requests import Response +from requests import Request, PreparedRequest +from requests.sessions import Session +from google.protobuf import json_format + +from google.api_core import client_options +from google.api_core import exceptions as core_exceptions +from google.api_core import extended_operation # type: ignore +from google.api_core import future +from google.api_core import gapic_v1 +from google.api_core import grpc_helpers +from google.api_core import grpc_helpers_async +from google.api_core import path_template +from google.auth import credentials as ga_credentials +from google.auth.exceptions import MutualTLSChannelError +from google.cloud.compute_v1.services.network_endpoint_groups import NetworkEndpointGroupsClient +from google.cloud.compute_v1.services.network_endpoint_groups import pagers +from google.cloud.compute_v1.services.network_endpoint_groups import transports +from google.cloud.compute_v1.types import compute +from google.oauth2 import service_account +import google.auth + + +def client_cert_source_callback(): + return b"cert bytes", b"key bytes" + + +# If default endpoint is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint(client): + return "foo.googleapis.com" if ("localhost" in client.DEFAULT_ENDPOINT) else client.DEFAULT_ENDPOINT + + +def test__get_default_mtls_endpoint(): + api_endpoint = "example.googleapis.com" + api_mtls_endpoint = "example.mtls.googleapis.com" + sandbox_endpoint = "example.sandbox.googleapis.com" + sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com" + non_googleapi = "api.example.com" + + assert NetworkEndpointGroupsClient._get_default_mtls_endpoint(None) is None + assert NetworkEndpointGroupsClient._get_default_mtls_endpoint(api_endpoint) == api_mtls_endpoint + assert NetworkEndpointGroupsClient._get_default_mtls_endpoint(api_mtls_endpoint) == api_mtls_endpoint + assert NetworkEndpointGroupsClient._get_default_mtls_endpoint(sandbox_endpoint) == sandbox_mtls_endpoint + assert NetworkEndpointGroupsClient._get_default_mtls_endpoint(sandbox_mtls_endpoint) == sandbox_mtls_endpoint + assert NetworkEndpointGroupsClient._get_default_mtls_endpoint(non_googleapi) == non_googleapi + + +@pytest.mark.parametrize("client_class,transport_name", [ + (NetworkEndpointGroupsClient, "rest"), +]) +def test_network_endpoint_groups_client_from_service_account_info(client_class, transport_name): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object(service_account.Credentials, 'from_service_account_info') as factory: + factory.return_value = creds + info = {"valid": True} + client = client_class.from_service_account_info(info, transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + 'compute.googleapis.com:443' + if transport_name in ['grpc', 'grpc_asyncio'] + else + 'https://compute.googleapis.com' + ) + + +@pytest.mark.parametrize("transport_class,transport_name", [ + (transports.NetworkEndpointGroupsRestTransport, "rest"), +]) +def test_network_endpoint_groups_client_service_account_always_use_jwt(transport_class, transport_name): + with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=True) + use_jwt.assert_called_once_with(True) + + with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=False) + use_jwt.assert_not_called() + + +@pytest.mark.parametrize("client_class,transport_name", [ + (NetworkEndpointGroupsClient, "rest"), +]) +def test_network_endpoint_groups_client_from_service_account_file(client_class, transport_name): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object(service_account.Credentials, 'from_service_account_file') as factory: + factory.return_value = creds + client = client_class.from_service_account_file("dummy/file/path.json", transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + client = client_class.from_service_account_json("dummy/file/path.json", transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + 'compute.googleapis.com:443' + if transport_name in ['grpc', 'grpc_asyncio'] + else + 'https://compute.googleapis.com' + ) + + +def test_network_endpoint_groups_client_get_transport_class(): + transport = NetworkEndpointGroupsClient.get_transport_class() + available_transports = [ + transports.NetworkEndpointGroupsRestTransport, + ] + assert transport in available_transports + + transport = NetworkEndpointGroupsClient.get_transport_class("rest") + assert transport == transports.NetworkEndpointGroupsRestTransport + + +@pytest.mark.parametrize("client_class,transport_class,transport_name", [ + (NetworkEndpointGroupsClient, transports.NetworkEndpointGroupsRestTransport, "rest"), +]) +@mock.patch.object(NetworkEndpointGroupsClient, "DEFAULT_ENDPOINT", modify_default_endpoint(NetworkEndpointGroupsClient)) +def test_network_endpoint_groups_client_client_options(client_class, transport_class, transport_name): + # Check that if channel is provided we won't create a new one. + with mock.patch.object(NetworkEndpointGroupsClient, 'get_transport_class') as gtc: + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ) + client = client_class(transport=transport) + gtc.assert_not_called() + + # Check that if channel is provided via str we will create a new one. + with mock.patch.object(NetworkEndpointGroupsClient, 'get_transport_class') as gtc: + client = client_class(transport=transport_name) + gtc.assert_called() + + # Check the case api_endpoint is provided. + options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(transport=transport_name, client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_MTLS_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError): + client = client_class(transport=transport_name) + + # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): + with pytest.raises(ValueError): + client = client_class(transport=transport_name) + + # Check the case quota_project_id is provided + options = client_options.ClientOptions(quota_project_id="octopus") + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id="octopus", + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + # Check the case api_endpoint is provided + options = client_options.ClientOptions(api_audience="https://language.googleapis.com") + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience="https://language.googleapis.com" + ) + +@pytest.mark.parametrize("client_class,transport_class,transport_name,use_client_cert_env", [ + (NetworkEndpointGroupsClient, transports.NetworkEndpointGroupsRestTransport, "rest", "true"), + (NetworkEndpointGroupsClient, transports.NetworkEndpointGroupsRestTransport, "rest", "false"), +]) +@mock.patch.object(NetworkEndpointGroupsClient, "DEFAULT_ENDPOINT", modify_default_endpoint(NetworkEndpointGroupsClient)) +@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) +def test_network_endpoint_groups_client_mtls_env_auto(client_class, transport_class, transport_name, use_client_cert_env): + # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default + # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists. + + # Check the case client_cert_source is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): + options = client_options.ClientOptions(client_cert_source=client_cert_source_callback) + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + + if use_client_cert_env == "false": + expected_client_cert_source = None + expected_host = client.DEFAULT_ENDPOINT + else: + expected_client_cert_source = client_cert_source_callback + expected_host = client.DEFAULT_MTLS_ENDPOINT + + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case ADC client cert is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): + with mock.patch.object(transport_class, '__init__') as patched: + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): + with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=client_cert_source_callback): + if use_client_cert_env == "false": + expected_host = client.DEFAULT_ENDPOINT + expected_client_cert_source = None + else: + expected_host = client.DEFAULT_MTLS_ENDPOINT + expected_client_cert_source = client_cert_source_callback + + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case client_cert_source and ADC client cert are not provided. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): + with mock.patch.object(transport_class, '__init__') as patched: + with mock.patch("google.auth.transport.mtls.has_default_client_cert_source", return_value=False): + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize("client_class", [ + NetworkEndpointGroupsClient +]) +@mock.patch.object(NetworkEndpointGroupsClient, "DEFAULT_ENDPOINT", modify_default_endpoint(NetworkEndpointGroupsClient)) +def test_network_endpoint_groups_client_get_mtls_endpoint_and_cert_source(client_class): + mock_client_cert_source = mock.Mock() + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "true". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + mock_api_endpoint = "foo" + options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(options) + assert api_endpoint == mock_api_endpoint + assert cert_source == mock_client_cert_source + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "false". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + mock_client_cert_source = mock.Mock() + mock_api_endpoint = "foo" + options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(options) + assert api_endpoint == mock_api_endpoint + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert doesn't exist. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=False): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert exists. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): + with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=mock_client_cert_source): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source == mock_client_cert_source + + +@pytest.mark.parametrize("client_class,transport_class,transport_name", [ + (NetworkEndpointGroupsClient, transports.NetworkEndpointGroupsRestTransport, "rest"), +]) +def test_network_endpoint_groups_client_client_options_scopes(client_class, transport_class, transport_name): + # Check the case scopes are provided. + options = client_options.ClientOptions( + scopes=["1", "2"], + ) + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=["1", "2"], + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + +@pytest.mark.parametrize("client_class,transport_class,transport_name,grpc_helpers", [ + (NetworkEndpointGroupsClient, transports.NetworkEndpointGroupsRestTransport, "rest", None), +]) +def test_network_endpoint_groups_client_client_options_credentials_file(client_class, transport_class, transport_name, grpc_helpers): + # Check the case credentials file is provided. + options = client_options.ClientOptions( + credentials_file="credentials.json" + ) + + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize("request_type", [ + compute.AggregatedListNetworkEndpointGroupsRequest, + dict, +]) +def test_aggregated_list_rest(request_type): + client = NetworkEndpointGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.NetworkEndpointGroupAggregatedList( + id='id_value', + kind='kind_value', + next_page_token='next_page_token_value', + self_link='self_link_value', + unreachables=['unreachables_value'], + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.NetworkEndpointGroupAggregatedList.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.aggregated_list(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.AggregatedListPager) + assert response.id == 'id_value' + assert response.kind == 'kind_value' + assert response.next_page_token == 'next_page_token_value' + assert response.self_link == 'self_link_value' + assert response.unreachables == ['unreachables_value'] + + +def test_aggregated_list_rest_required_fields(request_type=compute.AggregatedListNetworkEndpointGroupsRequest): + transport_class = transports.NetworkEndpointGroupsRestTransport + + request_init = {} + request_init["project"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).aggregated_list._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).aggregated_list._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("filter", "include_all_scopes", "max_results", "order_by", "page_token", "return_partial_success", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + + client = NetworkEndpointGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.NetworkEndpointGroupAggregatedList() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "get", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.NetworkEndpointGroupAggregatedList.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.aggregated_list(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_aggregated_list_rest_unset_required_fields(): + transport = transports.NetworkEndpointGroupsRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.aggregated_list._get_unset_required_fields({}) + assert set(unset_fields) == (set(("filter", "includeAllScopes", "maxResults", "orderBy", "pageToken", "returnPartialSuccess", )) & set(("project", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_aggregated_list_rest_interceptors(null_interceptor): + transport = transports.NetworkEndpointGroupsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.NetworkEndpointGroupsRestInterceptor(), + ) + client = NetworkEndpointGroupsClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.NetworkEndpointGroupsRestInterceptor, "post_aggregated_list") as post, \ + mock.patch.object(transports.NetworkEndpointGroupsRestInterceptor, "pre_aggregated_list") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.AggregatedListNetworkEndpointGroupsRequest.pb(compute.AggregatedListNetworkEndpointGroupsRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.NetworkEndpointGroupAggregatedList.to_json(compute.NetworkEndpointGroupAggregatedList()) + + request = compute.AggregatedListNetworkEndpointGroupsRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.NetworkEndpointGroupAggregatedList() + + client.aggregated_list(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_aggregated_list_rest_bad_request(transport: str = 'rest', request_type=compute.AggregatedListNetworkEndpointGroupsRequest): + client = NetworkEndpointGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.aggregated_list(request) + + +def test_aggregated_list_rest_flattened(): + client = NetworkEndpointGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.NetworkEndpointGroupAggregatedList() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.NetworkEndpointGroupAggregatedList.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.aggregated_list(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/aggregated/networkEndpointGroups" % client.transport._host, args[1]) + + +def test_aggregated_list_rest_flattened_error(transport: str = 'rest'): + client = NetworkEndpointGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.aggregated_list( + compute.AggregatedListNetworkEndpointGroupsRequest(), + project='project_value', + ) + + +def test_aggregated_list_rest_pager(transport: str = 'rest'): + client = NetworkEndpointGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + #with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + compute.NetworkEndpointGroupAggregatedList( + items={ + 'a':compute.NetworkEndpointGroupsScopedList(), + 'b':compute.NetworkEndpointGroupsScopedList(), + 'c':compute.NetworkEndpointGroupsScopedList(), + }, + next_page_token='abc', + ), + compute.NetworkEndpointGroupAggregatedList( + items={}, + next_page_token='def', + ), + compute.NetworkEndpointGroupAggregatedList( + items={ + 'g':compute.NetworkEndpointGroupsScopedList(), + }, + next_page_token='ghi', + ), + compute.NetworkEndpointGroupAggregatedList( + items={ + 'h':compute.NetworkEndpointGroupsScopedList(), + 'i':compute.NetworkEndpointGroupsScopedList(), + }, + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple(compute.NetworkEndpointGroupAggregatedList.to_json(x) for x in response) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode('UTF-8') + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {'project': 'sample1'} + + pager = client.aggregated_list(request=sample_request) + + assert isinstance(pager.get('a'), compute.NetworkEndpointGroupsScopedList) + assert pager.get('h') is None + + results = list(pager) + assert len(results) == 6 + assert all( + isinstance(i, tuple) + for i in results) + for result in results: + assert isinstance(result, tuple) + assert tuple(type(t) for t in result) == (str, compute.NetworkEndpointGroupsScopedList) + + assert pager.get('a') is None + assert isinstance(pager.get('h'), compute.NetworkEndpointGroupsScopedList) + + pages = list(client.aggregated_list(request=sample_request).pages) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize("request_type", [ + compute.AttachNetworkEndpointsNetworkEndpointGroupRequest, + dict, +]) +def test_attach_network_endpoints_rest(request_type): + client = NetworkEndpointGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'zone': 'sample2', 'network_endpoint_group': 'sample3'} + request_init["network_endpoint_groups_attach_endpoints_request_resource"] = {'network_endpoints': [{'annotations': {}, 'fqdn': 'fqdn_value', 'instance': 'instance_value', 'ip_address': 'ip_address_value', 'port': 453}]} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.attach_network_endpoints(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, extended_operation.ExtendedOperation) + assert response.client_operation_id == 'client_operation_id_value' + assert response.creation_timestamp == 'creation_timestamp_value' + assert response.description == 'description_value' + assert response.end_time == 'end_time_value' + assert response.http_error_message == 'http_error_message_value' + assert response.http_error_status_code == 2374 + assert response.id == 205 + assert response.insert_time == 'insert_time_value' + assert response.kind == 'kind_value' + assert response.name == 'name_value' + assert response.operation_group_id == 'operation_group_id_value' + assert response.operation_type == 'operation_type_value' + assert response.progress == 885 + assert response.region == 'region_value' + assert response.self_link == 'self_link_value' + assert response.start_time == 'start_time_value' + assert response.status == compute.Operation.Status.DONE + assert response.status_message == 'status_message_value' + assert response.target_id == 947 + assert response.target_link == 'target_link_value' + assert response.user == 'user_value' + assert response.zone == 'zone_value' + + +def test_attach_network_endpoints_rest_required_fields(request_type=compute.AttachNetworkEndpointsNetworkEndpointGroupRequest): + transport_class = transports.NetworkEndpointGroupsRestTransport + + request_init = {} + request_init["network_endpoint_group"] = "" + request_init["project"] = "" + request_init["zone"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).attach_network_endpoints._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["networkEndpointGroup"] = 'network_endpoint_group_value' + jsonified_request["project"] = 'project_value' + jsonified_request["zone"] = 'zone_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).attach_network_endpoints._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "networkEndpointGroup" in jsonified_request + assert jsonified_request["networkEndpointGroup"] == 'network_endpoint_group_value' + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "zone" in jsonified_request + assert jsonified_request["zone"] == 'zone_value' + + client = NetworkEndpointGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.attach_network_endpoints(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_attach_network_endpoints_rest_unset_required_fields(): + transport = transports.NetworkEndpointGroupsRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.attach_network_endpoints._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("networkEndpointGroup", "networkEndpointGroupsAttachEndpointsRequestResource", "project", "zone", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_attach_network_endpoints_rest_interceptors(null_interceptor): + transport = transports.NetworkEndpointGroupsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.NetworkEndpointGroupsRestInterceptor(), + ) + client = NetworkEndpointGroupsClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.NetworkEndpointGroupsRestInterceptor, "post_attach_network_endpoints") as post, \ + mock.patch.object(transports.NetworkEndpointGroupsRestInterceptor, "pre_attach_network_endpoints") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.AttachNetworkEndpointsNetworkEndpointGroupRequest.pb(compute.AttachNetworkEndpointsNetworkEndpointGroupRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.AttachNetworkEndpointsNetworkEndpointGroupRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.attach_network_endpoints(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_attach_network_endpoints_rest_bad_request(transport: str = 'rest', request_type=compute.AttachNetworkEndpointsNetworkEndpointGroupRequest): + client = NetworkEndpointGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'zone': 'sample2', 'network_endpoint_group': 'sample3'} + request_init["network_endpoint_groups_attach_endpoints_request_resource"] = {'network_endpoints': [{'annotations': {}, 'fqdn': 'fqdn_value', 'instance': 'instance_value', 'ip_address': 'ip_address_value', 'port': 453}]} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.attach_network_endpoints(request) + + +def test_attach_network_endpoints_rest_flattened(): + client = NetworkEndpointGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'zone': 'sample2', 'network_endpoint_group': 'sample3'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + zone='zone_value', + network_endpoint_group='network_endpoint_group_value', + network_endpoint_groups_attach_endpoints_request_resource=compute.NetworkEndpointGroupsAttachEndpointsRequest(network_endpoints=[compute.NetworkEndpoint(annotations={'key_value': 'value_value'})]), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.attach_network_endpoints(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/zones/{zone}/networkEndpointGroups/{network_endpoint_group}/attachNetworkEndpoints" % client.transport._host, args[1]) + + +def test_attach_network_endpoints_rest_flattened_error(transport: str = 'rest'): + client = NetworkEndpointGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.attach_network_endpoints( + compute.AttachNetworkEndpointsNetworkEndpointGroupRequest(), + project='project_value', + zone='zone_value', + network_endpoint_group='network_endpoint_group_value', + network_endpoint_groups_attach_endpoints_request_resource=compute.NetworkEndpointGroupsAttachEndpointsRequest(network_endpoints=[compute.NetworkEndpoint(annotations={'key_value': 'value_value'})]), + ) + + +def test_attach_network_endpoints_rest_error(): + client = NetworkEndpointGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.AttachNetworkEndpointsNetworkEndpointGroupRequest, + dict, +]) +def test_attach_network_endpoints_unary_rest(request_type): + client = NetworkEndpointGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'zone': 'sample2', 'network_endpoint_group': 'sample3'} + request_init["network_endpoint_groups_attach_endpoints_request_resource"] = {'network_endpoints': [{'annotations': {}, 'fqdn': 'fqdn_value', 'instance': 'instance_value', 'ip_address': 'ip_address_value', 'port': 453}]} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.attach_network_endpoints_unary(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.Operation) + + +def test_attach_network_endpoints_unary_rest_required_fields(request_type=compute.AttachNetworkEndpointsNetworkEndpointGroupRequest): + transport_class = transports.NetworkEndpointGroupsRestTransport + + request_init = {} + request_init["network_endpoint_group"] = "" + request_init["project"] = "" + request_init["zone"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).attach_network_endpoints._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["networkEndpointGroup"] = 'network_endpoint_group_value' + jsonified_request["project"] = 'project_value' + jsonified_request["zone"] = 'zone_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).attach_network_endpoints._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "networkEndpointGroup" in jsonified_request + assert jsonified_request["networkEndpointGroup"] == 'network_endpoint_group_value' + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "zone" in jsonified_request + assert jsonified_request["zone"] == 'zone_value' + + client = NetworkEndpointGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.attach_network_endpoints_unary(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_attach_network_endpoints_unary_rest_unset_required_fields(): + transport = transports.NetworkEndpointGroupsRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.attach_network_endpoints._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("networkEndpointGroup", "networkEndpointGroupsAttachEndpointsRequestResource", "project", "zone", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_attach_network_endpoints_unary_rest_interceptors(null_interceptor): + transport = transports.NetworkEndpointGroupsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.NetworkEndpointGroupsRestInterceptor(), + ) + client = NetworkEndpointGroupsClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.NetworkEndpointGroupsRestInterceptor, "post_attach_network_endpoints") as post, \ + mock.patch.object(transports.NetworkEndpointGroupsRestInterceptor, "pre_attach_network_endpoints") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.AttachNetworkEndpointsNetworkEndpointGroupRequest.pb(compute.AttachNetworkEndpointsNetworkEndpointGroupRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.AttachNetworkEndpointsNetworkEndpointGroupRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.attach_network_endpoints_unary(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_attach_network_endpoints_unary_rest_bad_request(transport: str = 'rest', request_type=compute.AttachNetworkEndpointsNetworkEndpointGroupRequest): + client = NetworkEndpointGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'zone': 'sample2', 'network_endpoint_group': 'sample3'} + request_init["network_endpoint_groups_attach_endpoints_request_resource"] = {'network_endpoints': [{'annotations': {}, 'fqdn': 'fqdn_value', 'instance': 'instance_value', 'ip_address': 'ip_address_value', 'port': 453}]} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.attach_network_endpoints_unary(request) + + +def test_attach_network_endpoints_unary_rest_flattened(): + client = NetworkEndpointGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'zone': 'sample2', 'network_endpoint_group': 'sample3'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + zone='zone_value', + network_endpoint_group='network_endpoint_group_value', + network_endpoint_groups_attach_endpoints_request_resource=compute.NetworkEndpointGroupsAttachEndpointsRequest(network_endpoints=[compute.NetworkEndpoint(annotations={'key_value': 'value_value'})]), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.attach_network_endpoints_unary(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/zones/{zone}/networkEndpointGroups/{network_endpoint_group}/attachNetworkEndpoints" % client.transport._host, args[1]) + + +def test_attach_network_endpoints_unary_rest_flattened_error(transport: str = 'rest'): + client = NetworkEndpointGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.attach_network_endpoints_unary( + compute.AttachNetworkEndpointsNetworkEndpointGroupRequest(), + project='project_value', + zone='zone_value', + network_endpoint_group='network_endpoint_group_value', + network_endpoint_groups_attach_endpoints_request_resource=compute.NetworkEndpointGroupsAttachEndpointsRequest(network_endpoints=[compute.NetworkEndpoint(annotations={'key_value': 'value_value'})]), + ) + + +def test_attach_network_endpoints_unary_rest_error(): + client = NetworkEndpointGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.DeleteNetworkEndpointGroupRequest, + dict, +]) +def test_delete_rest(request_type): + client = NetworkEndpointGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'zone': 'sample2', 'network_endpoint_group': 'sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.delete(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, extended_operation.ExtendedOperation) + assert response.client_operation_id == 'client_operation_id_value' + assert response.creation_timestamp == 'creation_timestamp_value' + assert response.description == 'description_value' + assert response.end_time == 'end_time_value' + assert response.http_error_message == 'http_error_message_value' + assert response.http_error_status_code == 2374 + assert response.id == 205 + assert response.insert_time == 'insert_time_value' + assert response.kind == 'kind_value' + assert response.name == 'name_value' + assert response.operation_group_id == 'operation_group_id_value' + assert response.operation_type == 'operation_type_value' + assert response.progress == 885 + assert response.region == 'region_value' + assert response.self_link == 'self_link_value' + assert response.start_time == 'start_time_value' + assert response.status == compute.Operation.Status.DONE + assert response.status_message == 'status_message_value' + assert response.target_id == 947 + assert response.target_link == 'target_link_value' + assert response.user == 'user_value' + assert response.zone == 'zone_value' + + +def test_delete_rest_required_fields(request_type=compute.DeleteNetworkEndpointGroupRequest): + transport_class = transports.NetworkEndpointGroupsRestTransport + + request_init = {} + request_init["network_endpoint_group"] = "" + request_init["project"] = "" + request_init["zone"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["networkEndpointGroup"] = 'network_endpoint_group_value' + jsonified_request["project"] = 'project_value' + jsonified_request["zone"] = 'zone_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "networkEndpointGroup" in jsonified_request + assert jsonified_request["networkEndpointGroup"] == 'network_endpoint_group_value' + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "zone" in jsonified_request + assert jsonified_request["zone"] == 'zone_value' + + client = NetworkEndpointGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "delete", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.delete(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_delete_rest_unset_required_fields(): + transport = transports.NetworkEndpointGroupsRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.delete._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("networkEndpointGroup", "project", "zone", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_rest_interceptors(null_interceptor): + transport = transports.NetworkEndpointGroupsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.NetworkEndpointGroupsRestInterceptor(), + ) + client = NetworkEndpointGroupsClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.NetworkEndpointGroupsRestInterceptor, "post_delete") as post, \ + mock.patch.object(transports.NetworkEndpointGroupsRestInterceptor, "pre_delete") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.DeleteNetworkEndpointGroupRequest.pb(compute.DeleteNetworkEndpointGroupRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.DeleteNetworkEndpointGroupRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.delete(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_delete_rest_bad_request(transport: str = 'rest', request_type=compute.DeleteNetworkEndpointGroupRequest): + client = NetworkEndpointGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'zone': 'sample2', 'network_endpoint_group': 'sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.delete(request) + + +def test_delete_rest_flattened(): + client = NetworkEndpointGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'zone': 'sample2', 'network_endpoint_group': 'sample3'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + zone='zone_value', + network_endpoint_group='network_endpoint_group_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.delete(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/zones/{zone}/networkEndpointGroups/{network_endpoint_group}" % client.transport._host, args[1]) + + +def test_delete_rest_flattened_error(transport: str = 'rest'): + client = NetworkEndpointGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete( + compute.DeleteNetworkEndpointGroupRequest(), + project='project_value', + zone='zone_value', + network_endpoint_group='network_endpoint_group_value', + ) + + +def test_delete_rest_error(): + client = NetworkEndpointGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.DeleteNetworkEndpointGroupRequest, + dict, +]) +def test_delete_unary_rest(request_type): + client = NetworkEndpointGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'zone': 'sample2', 'network_endpoint_group': 'sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.delete_unary(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.Operation) + + +def test_delete_unary_rest_required_fields(request_type=compute.DeleteNetworkEndpointGroupRequest): + transport_class = transports.NetworkEndpointGroupsRestTransport + + request_init = {} + request_init["network_endpoint_group"] = "" + request_init["project"] = "" + request_init["zone"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["networkEndpointGroup"] = 'network_endpoint_group_value' + jsonified_request["project"] = 'project_value' + jsonified_request["zone"] = 'zone_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "networkEndpointGroup" in jsonified_request + assert jsonified_request["networkEndpointGroup"] == 'network_endpoint_group_value' + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "zone" in jsonified_request + assert jsonified_request["zone"] == 'zone_value' + + client = NetworkEndpointGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "delete", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.delete_unary(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_delete_unary_rest_unset_required_fields(): + transport = transports.NetworkEndpointGroupsRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.delete._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("networkEndpointGroup", "project", "zone", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_unary_rest_interceptors(null_interceptor): + transport = transports.NetworkEndpointGroupsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.NetworkEndpointGroupsRestInterceptor(), + ) + client = NetworkEndpointGroupsClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.NetworkEndpointGroupsRestInterceptor, "post_delete") as post, \ + mock.patch.object(transports.NetworkEndpointGroupsRestInterceptor, "pre_delete") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.DeleteNetworkEndpointGroupRequest.pb(compute.DeleteNetworkEndpointGroupRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.DeleteNetworkEndpointGroupRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.delete_unary(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_delete_unary_rest_bad_request(transport: str = 'rest', request_type=compute.DeleteNetworkEndpointGroupRequest): + client = NetworkEndpointGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'zone': 'sample2', 'network_endpoint_group': 'sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.delete_unary(request) + + +def test_delete_unary_rest_flattened(): + client = NetworkEndpointGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'zone': 'sample2', 'network_endpoint_group': 'sample3'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + zone='zone_value', + network_endpoint_group='network_endpoint_group_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.delete_unary(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/zones/{zone}/networkEndpointGroups/{network_endpoint_group}" % client.transport._host, args[1]) + + +def test_delete_unary_rest_flattened_error(transport: str = 'rest'): + client = NetworkEndpointGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_unary( + compute.DeleteNetworkEndpointGroupRequest(), + project='project_value', + zone='zone_value', + network_endpoint_group='network_endpoint_group_value', + ) + + +def test_delete_unary_rest_error(): + client = NetworkEndpointGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.DetachNetworkEndpointsNetworkEndpointGroupRequest, + dict, +]) +def test_detach_network_endpoints_rest(request_type): + client = NetworkEndpointGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'zone': 'sample2', 'network_endpoint_group': 'sample3'} + request_init["network_endpoint_groups_detach_endpoints_request_resource"] = {'network_endpoints': [{'annotations': {}, 'fqdn': 'fqdn_value', 'instance': 'instance_value', 'ip_address': 'ip_address_value', 'port': 453}]} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.detach_network_endpoints(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, extended_operation.ExtendedOperation) + assert response.client_operation_id == 'client_operation_id_value' + assert response.creation_timestamp == 'creation_timestamp_value' + assert response.description == 'description_value' + assert response.end_time == 'end_time_value' + assert response.http_error_message == 'http_error_message_value' + assert response.http_error_status_code == 2374 + assert response.id == 205 + assert response.insert_time == 'insert_time_value' + assert response.kind == 'kind_value' + assert response.name == 'name_value' + assert response.operation_group_id == 'operation_group_id_value' + assert response.operation_type == 'operation_type_value' + assert response.progress == 885 + assert response.region == 'region_value' + assert response.self_link == 'self_link_value' + assert response.start_time == 'start_time_value' + assert response.status == compute.Operation.Status.DONE + assert response.status_message == 'status_message_value' + assert response.target_id == 947 + assert response.target_link == 'target_link_value' + assert response.user == 'user_value' + assert response.zone == 'zone_value' + + +def test_detach_network_endpoints_rest_required_fields(request_type=compute.DetachNetworkEndpointsNetworkEndpointGroupRequest): + transport_class = transports.NetworkEndpointGroupsRestTransport + + request_init = {} + request_init["network_endpoint_group"] = "" + request_init["project"] = "" + request_init["zone"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).detach_network_endpoints._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["networkEndpointGroup"] = 'network_endpoint_group_value' + jsonified_request["project"] = 'project_value' + jsonified_request["zone"] = 'zone_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).detach_network_endpoints._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "networkEndpointGroup" in jsonified_request + assert jsonified_request["networkEndpointGroup"] == 'network_endpoint_group_value' + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "zone" in jsonified_request + assert jsonified_request["zone"] == 'zone_value' + + client = NetworkEndpointGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.detach_network_endpoints(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_detach_network_endpoints_rest_unset_required_fields(): + transport = transports.NetworkEndpointGroupsRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.detach_network_endpoints._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("networkEndpointGroup", "networkEndpointGroupsDetachEndpointsRequestResource", "project", "zone", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_detach_network_endpoints_rest_interceptors(null_interceptor): + transport = transports.NetworkEndpointGroupsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.NetworkEndpointGroupsRestInterceptor(), + ) + client = NetworkEndpointGroupsClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.NetworkEndpointGroupsRestInterceptor, "post_detach_network_endpoints") as post, \ + mock.patch.object(transports.NetworkEndpointGroupsRestInterceptor, "pre_detach_network_endpoints") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.DetachNetworkEndpointsNetworkEndpointGroupRequest.pb(compute.DetachNetworkEndpointsNetworkEndpointGroupRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.DetachNetworkEndpointsNetworkEndpointGroupRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.detach_network_endpoints(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_detach_network_endpoints_rest_bad_request(transport: str = 'rest', request_type=compute.DetachNetworkEndpointsNetworkEndpointGroupRequest): + client = NetworkEndpointGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'zone': 'sample2', 'network_endpoint_group': 'sample3'} + request_init["network_endpoint_groups_detach_endpoints_request_resource"] = {'network_endpoints': [{'annotations': {}, 'fqdn': 'fqdn_value', 'instance': 'instance_value', 'ip_address': 'ip_address_value', 'port': 453}]} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.detach_network_endpoints(request) + + +def test_detach_network_endpoints_rest_flattened(): + client = NetworkEndpointGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'zone': 'sample2', 'network_endpoint_group': 'sample3'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + zone='zone_value', + network_endpoint_group='network_endpoint_group_value', + network_endpoint_groups_detach_endpoints_request_resource=compute.NetworkEndpointGroupsDetachEndpointsRequest(network_endpoints=[compute.NetworkEndpoint(annotations={'key_value': 'value_value'})]), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.detach_network_endpoints(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/zones/{zone}/networkEndpointGroups/{network_endpoint_group}/detachNetworkEndpoints" % client.transport._host, args[1]) + + +def test_detach_network_endpoints_rest_flattened_error(transport: str = 'rest'): + client = NetworkEndpointGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.detach_network_endpoints( + compute.DetachNetworkEndpointsNetworkEndpointGroupRequest(), + project='project_value', + zone='zone_value', + network_endpoint_group='network_endpoint_group_value', + network_endpoint_groups_detach_endpoints_request_resource=compute.NetworkEndpointGroupsDetachEndpointsRequest(network_endpoints=[compute.NetworkEndpoint(annotations={'key_value': 'value_value'})]), + ) + + +def test_detach_network_endpoints_rest_error(): + client = NetworkEndpointGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.DetachNetworkEndpointsNetworkEndpointGroupRequest, + dict, +]) +def test_detach_network_endpoints_unary_rest(request_type): + client = NetworkEndpointGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'zone': 'sample2', 'network_endpoint_group': 'sample3'} + request_init["network_endpoint_groups_detach_endpoints_request_resource"] = {'network_endpoints': [{'annotations': {}, 'fqdn': 'fqdn_value', 'instance': 'instance_value', 'ip_address': 'ip_address_value', 'port': 453}]} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.detach_network_endpoints_unary(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.Operation) + + +def test_detach_network_endpoints_unary_rest_required_fields(request_type=compute.DetachNetworkEndpointsNetworkEndpointGroupRequest): + transport_class = transports.NetworkEndpointGroupsRestTransport + + request_init = {} + request_init["network_endpoint_group"] = "" + request_init["project"] = "" + request_init["zone"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).detach_network_endpoints._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["networkEndpointGroup"] = 'network_endpoint_group_value' + jsonified_request["project"] = 'project_value' + jsonified_request["zone"] = 'zone_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).detach_network_endpoints._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "networkEndpointGroup" in jsonified_request + assert jsonified_request["networkEndpointGroup"] == 'network_endpoint_group_value' + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "zone" in jsonified_request + assert jsonified_request["zone"] == 'zone_value' + + client = NetworkEndpointGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.detach_network_endpoints_unary(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_detach_network_endpoints_unary_rest_unset_required_fields(): + transport = transports.NetworkEndpointGroupsRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.detach_network_endpoints._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("networkEndpointGroup", "networkEndpointGroupsDetachEndpointsRequestResource", "project", "zone", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_detach_network_endpoints_unary_rest_interceptors(null_interceptor): + transport = transports.NetworkEndpointGroupsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.NetworkEndpointGroupsRestInterceptor(), + ) + client = NetworkEndpointGroupsClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.NetworkEndpointGroupsRestInterceptor, "post_detach_network_endpoints") as post, \ + mock.patch.object(transports.NetworkEndpointGroupsRestInterceptor, "pre_detach_network_endpoints") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.DetachNetworkEndpointsNetworkEndpointGroupRequest.pb(compute.DetachNetworkEndpointsNetworkEndpointGroupRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.DetachNetworkEndpointsNetworkEndpointGroupRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.detach_network_endpoints_unary(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_detach_network_endpoints_unary_rest_bad_request(transport: str = 'rest', request_type=compute.DetachNetworkEndpointsNetworkEndpointGroupRequest): + client = NetworkEndpointGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'zone': 'sample2', 'network_endpoint_group': 'sample3'} + request_init["network_endpoint_groups_detach_endpoints_request_resource"] = {'network_endpoints': [{'annotations': {}, 'fqdn': 'fqdn_value', 'instance': 'instance_value', 'ip_address': 'ip_address_value', 'port': 453}]} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.detach_network_endpoints_unary(request) + + +def test_detach_network_endpoints_unary_rest_flattened(): + client = NetworkEndpointGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'zone': 'sample2', 'network_endpoint_group': 'sample3'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + zone='zone_value', + network_endpoint_group='network_endpoint_group_value', + network_endpoint_groups_detach_endpoints_request_resource=compute.NetworkEndpointGroupsDetachEndpointsRequest(network_endpoints=[compute.NetworkEndpoint(annotations={'key_value': 'value_value'})]), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.detach_network_endpoints_unary(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/zones/{zone}/networkEndpointGroups/{network_endpoint_group}/detachNetworkEndpoints" % client.transport._host, args[1]) + + +def test_detach_network_endpoints_unary_rest_flattened_error(transport: str = 'rest'): + client = NetworkEndpointGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.detach_network_endpoints_unary( + compute.DetachNetworkEndpointsNetworkEndpointGroupRequest(), + project='project_value', + zone='zone_value', + network_endpoint_group='network_endpoint_group_value', + network_endpoint_groups_detach_endpoints_request_resource=compute.NetworkEndpointGroupsDetachEndpointsRequest(network_endpoints=[compute.NetworkEndpoint(annotations={'key_value': 'value_value'})]), + ) + + +def test_detach_network_endpoints_unary_rest_error(): + client = NetworkEndpointGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.GetNetworkEndpointGroupRequest, + dict, +]) +def test_get_rest(request_type): + client = NetworkEndpointGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'zone': 'sample2', 'network_endpoint_group': 'sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.NetworkEndpointGroup( + creation_timestamp='creation_timestamp_value', + default_port=1289, + description='description_value', + id=205, + kind='kind_value', + name='name_value', + network='network_value', + network_endpoint_type='network_endpoint_type_value', + psc_target_service='psc_target_service_value', + region='region_value', + self_link='self_link_value', + size=443, + subnetwork='subnetwork_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.NetworkEndpointGroup.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.get(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.NetworkEndpointGroup) + assert response.creation_timestamp == 'creation_timestamp_value' + assert response.default_port == 1289 + assert response.description == 'description_value' + assert response.id == 205 + assert response.kind == 'kind_value' + assert response.name == 'name_value' + assert response.network == 'network_value' + assert response.network_endpoint_type == 'network_endpoint_type_value' + assert response.psc_target_service == 'psc_target_service_value' + assert response.region == 'region_value' + assert response.self_link == 'self_link_value' + assert response.size == 443 + assert response.subnetwork == 'subnetwork_value' + assert response.zone == 'zone_value' + + +def test_get_rest_required_fields(request_type=compute.GetNetworkEndpointGroupRequest): + transport_class = transports.NetworkEndpointGroupsRestTransport + + request_init = {} + request_init["network_endpoint_group"] = "" + request_init["project"] = "" + request_init["zone"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["networkEndpointGroup"] = 'network_endpoint_group_value' + jsonified_request["project"] = 'project_value' + jsonified_request["zone"] = 'zone_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "networkEndpointGroup" in jsonified_request + assert jsonified_request["networkEndpointGroup"] == 'network_endpoint_group_value' + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "zone" in jsonified_request + assert jsonified_request["zone"] == 'zone_value' + + client = NetworkEndpointGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.NetworkEndpointGroup() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "get", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.NetworkEndpointGroup.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.get(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_get_rest_unset_required_fields(): + transport = transports.NetworkEndpointGroupsRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.get._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("networkEndpointGroup", "project", "zone", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_rest_interceptors(null_interceptor): + transport = transports.NetworkEndpointGroupsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.NetworkEndpointGroupsRestInterceptor(), + ) + client = NetworkEndpointGroupsClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.NetworkEndpointGroupsRestInterceptor, "post_get") as post, \ + mock.patch.object(transports.NetworkEndpointGroupsRestInterceptor, "pre_get") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.GetNetworkEndpointGroupRequest.pb(compute.GetNetworkEndpointGroupRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.NetworkEndpointGroup.to_json(compute.NetworkEndpointGroup()) + + request = compute.GetNetworkEndpointGroupRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.NetworkEndpointGroup() + + client.get(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_get_rest_bad_request(transport: str = 'rest', request_type=compute.GetNetworkEndpointGroupRequest): + client = NetworkEndpointGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'zone': 'sample2', 'network_endpoint_group': 'sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get(request) + + +def test_get_rest_flattened(): + client = NetworkEndpointGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.NetworkEndpointGroup() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'zone': 'sample2', 'network_endpoint_group': 'sample3'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + zone='zone_value', + network_endpoint_group='network_endpoint_group_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.NetworkEndpointGroup.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.get(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/zones/{zone}/networkEndpointGroups/{network_endpoint_group}" % client.transport._host, args[1]) + + +def test_get_rest_flattened_error(transport: str = 'rest'): + client = NetworkEndpointGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get( + compute.GetNetworkEndpointGroupRequest(), + project='project_value', + zone='zone_value', + network_endpoint_group='network_endpoint_group_value', + ) + + +def test_get_rest_error(): + client = NetworkEndpointGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.InsertNetworkEndpointGroupRequest, + dict, +]) +def test_insert_rest(request_type): + client = NetworkEndpointGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'zone': 'sample2'} + request_init["network_endpoint_group_resource"] = {'annotations': {}, 'app_engine': {'service': 'service_value', 'url_mask': 'url_mask_value', 'version': 'version_value'}, 'cloud_function': {'function': 'function_value', 'url_mask': 'url_mask_value'}, 'cloud_run': {'service': 'service_value', 'tag': 'tag_value', 'url_mask': 'url_mask_value'}, 'creation_timestamp': 'creation_timestamp_value', 'default_port': 1289, 'description': 'description_value', 'id': 205, 'kind': 'kind_value', 'name': 'name_value', 'network': 'network_value', 'network_endpoint_type': 'network_endpoint_type_value', 'psc_data': {'consumer_psc_address': 'consumer_psc_address_value', 'psc_connection_id': 1793, 'psc_connection_status': 'psc_connection_status_value'}, 'psc_target_service': 'psc_target_service_value', 'region': 'region_value', 'self_link': 'self_link_value', 'size': 443, 'subnetwork': 'subnetwork_value', 'zone': 'zone_value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.insert(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, extended_operation.ExtendedOperation) + assert response.client_operation_id == 'client_operation_id_value' + assert response.creation_timestamp == 'creation_timestamp_value' + assert response.description == 'description_value' + assert response.end_time == 'end_time_value' + assert response.http_error_message == 'http_error_message_value' + assert response.http_error_status_code == 2374 + assert response.id == 205 + assert response.insert_time == 'insert_time_value' + assert response.kind == 'kind_value' + assert response.name == 'name_value' + assert response.operation_group_id == 'operation_group_id_value' + assert response.operation_type == 'operation_type_value' + assert response.progress == 885 + assert response.region == 'region_value' + assert response.self_link == 'self_link_value' + assert response.start_time == 'start_time_value' + assert response.status == compute.Operation.Status.DONE + assert response.status_message == 'status_message_value' + assert response.target_id == 947 + assert response.target_link == 'target_link_value' + assert response.user == 'user_value' + assert response.zone == 'zone_value' + + +def test_insert_rest_required_fields(request_type=compute.InsertNetworkEndpointGroupRequest): + transport_class = transports.NetworkEndpointGroupsRestTransport + + request_init = {} + request_init["project"] = "" + request_init["zone"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).insert._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + jsonified_request["zone"] = 'zone_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).insert._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "zone" in jsonified_request + assert jsonified_request["zone"] == 'zone_value' + + client = NetworkEndpointGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.insert(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_insert_rest_unset_required_fields(): + transport = transports.NetworkEndpointGroupsRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.insert._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("networkEndpointGroupResource", "project", "zone", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_insert_rest_interceptors(null_interceptor): + transport = transports.NetworkEndpointGroupsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.NetworkEndpointGroupsRestInterceptor(), + ) + client = NetworkEndpointGroupsClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.NetworkEndpointGroupsRestInterceptor, "post_insert") as post, \ + mock.patch.object(transports.NetworkEndpointGroupsRestInterceptor, "pre_insert") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.InsertNetworkEndpointGroupRequest.pb(compute.InsertNetworkEndpointGroupRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.InsertNetworkEndpointGroupRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.insert(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_insert_rest_bad_request(transport: str = 'rest', request_type=compute.InsertNetworkEndpointGroupRequest): + client = NetworkEndpointGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'zone': 'sample2'} + request_init["network_endpoint_group_resource"] = {'annotations': {}, 'app_engine': {'service': 'service_value', 'url_mask': 'url_mask_value', 'version': 'version_value'}, 'cloud_function': {'function': 'function_value', 'url_mask': 'url_mask_value'}, 'cloud_run': {'service': 'service_value', 'tag': 'tag_value', 'url_mask': 'url_mask_value'}, 'creation_timestamp': 'creation_timestamp_value', 'default_port': 1289, 'description': 'description_value', 'id': 205, 'kind': 'kind_value', 'name': 'name_value', 'network': 'network_value', 'network_endpoint_type': 'network_endpoint_type_value', 'psc_data': {'consumer_psc_address': 'consumer_psc_address_value', 'psc_connection_id': 1793, 'psc_connection_status': 'psc_connection_status_value'}, 'psc_target_service': 'psc_target_service_value', 'region': 'region_value', 'self_link': 'self_link_value', 'size': 443, 'subnetwork': 'subnetwork_value', 'zone': 'zone_value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.insert(request) + + +def test_insert_rest_flattened(): + client = NetworkEndpointGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'zone': 'sample2'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + zone='zone_value', + network_endpoint_group_resource=compute.NetworkEndpointGroup(annotations={'key_value': 'value_value'}), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.insert(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/zones/{zone}/networkEndpointGroups" % client.transport._host, args[1]) + + +def test_insert_rest_flattened_error(transport: str = 'rest'): + client = NetworkEndpointGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.insert( + compute.InsertNetworkEndpointGroupRequest(), + project='project_value', + zone='zone_value', + network_endpoint_group_resource=compute.NetworkEndpointGroup(annotations={'key_value': 'value_value'}), + ) + + +def test_insert_rest_error(): + client = NetworkEndpointGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.InsertNetworkEndpointGroupRequest, + dict, +]) +def test_insert_unary_rest(request_type): + client = NetworkEndpointGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'zone': 'sample2'} + request_init["network_endpoint_group_resource"] = {'annotations': {}, 'app_engine': {'service': 'service_value', 'url_mask': 'url_mask_value', 'version': 'version_value'}, 'cloud_function': {'function': 'function_value', 'url_mask': 'url_mask_value'}, 'cloud_run': {'service': 'service_value', 'tag': 'tag_value', 'url_mask': 'url_mask_value'}, 'creation_timestamp': 'creation_timestamp_value', 'default_port': 1289, 'description': 'description_value', 'id': 205, 'kind': 'kind_value', 'name': 'name_value', 'network': 'network_value', 'network_endpoint_type': 'network_endpoint_type_value', 'psc_data': {'consumer_psc_address': 'consumer_psc_address_value', 'psc_connection_id': 1793, 'psc_connection_status': 'psc_connection_status_value'}, 'psc_target_service': 'psc_target_service_value', 'region': 'region_value', 'self_link': 'self_link_value', 'size': 443, 'subnetwork': 'subnetwork_value', 'zone': 'zone_value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.insert_unary(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.Operation) + + +def test_insert_unary_rest_required_fields(request_type=compute.InsertNetworkEndpointGroupRequest): + transport_class = transports.NetworkEndpointGroupsRestTransport + + request_init = {} + request_init["project"] = "" + request_init["zone"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).insert._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + jsonified_request["zone"] = 'zone_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).insert._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "zone" in jsonified_request + assert jsonified_request["zone"] == 'zone_value' + + client = NetworkEndpointGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.insert_unary(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_insert_unary_rest_unset_required_fields(): + transport = transports.NetworkEndpointGroupsRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.insert._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("networkEndpointGroupResource", "project", "zone", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_insert_unary_rest_interceptors(null_interceptor): + transport = transports.NetworkEndpointGroupsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.NetworkEndpointGroupsRestInterceptor(), + ) + client = NetworkEndpointGroupsClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.NetworkEndpointGroupsRestInterceptor, "post_insert") as post, \ + mock.patch.object(transports.NetworkEndpointGroupsRestInterceptor, "pre_insert") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.InsertNetworkEndpointGroupRequest.pb(compute.InsertNetworkEndpointGroupRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.InsertNetworkEndpointGroupRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.insert_unary(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_insert_unary_rest_bad_request(transport: str = 'rest', request_type=compute.InsertNetworkEndpointGroupRequest): + client = NetworkEndpointGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'zone': 'sample2'} + request_init["network_endpoint_group_resource"] = {'annotations': {}, 'app_engine': {'service': 'service_value', 'url_mask': 'url_mask_value', 'version': 'version_value'}, 'cloud_function': {'function': 'function_value', 'url_mask': 'url_mask_value'}, 'cloud_run': {'service': 'service_value', 'tag': 'tag_value', 'url_mask': 'url_mask_value'}, 'creation_timestamp': 'creation_timestamp_value', 'default_port': 1289, 'description': 'description_value', 'id': 205, 'kind': 'kind_value', 'name': 'name_value', 'network': 'network_value', 'network_endpoint_type': 'network_endpoint_type_value', 'psc_data': {'consumer_psc_address': 'consumer_psc_address_value', 'psc_connection_id': 1793, 'psc_connection_status': 'psc_connection_status_value'}, 'psc_target_service': 'psc_target_service_value', 'region': 'region_value', 'self_link': 'self_link_value', 'size': 443, 'subnetwork': 'subnetwork_value', 'zone': 'zone_value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.insert_unary(request) + + +def test_insert_unary_rest_flattened(): + client = NetworkEndpointGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'zone': 'sample2'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + zone='zone_value', + network_endpoint_group_resource=compute.NetworkEndpointGroup(annotations={'key_value': 'value_value'}), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.insert_unary(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/zones/{zone}/networkEndpointGroups" % client.transport._host, args[1]) + + +def test_insert_unary_rest_flattened_error(transport: str = 'rest'): + client = NetworkEndpointGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.insert_unary( + compute.InsertNetworkEndpointGroupRequest(), + project='project_value', + zone='zone_value', + network_endpoint_group_resource=compute.NetworkEndpointGroup(annotations={'key_value': 'value_value'}), + ) + + +def test_insert_unary_rest_error(): + client = NetworkEndpointGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.ListNetworkEndpointGroupsRequest, + dict, +]) +def test_list_rest(request_type): + client = NetworkEndpointGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'zone': 'sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.NetworkEndpointGroupList( + id='id_value', + kind='kind_value', + next_page_token='next_page_token_value', + self_link='self_link_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.NetworkEndpointGroupList.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.list(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListPager) + assert response.id == 'id_value' + assert response.kind == 'kind_value' + assert response.next_page_token == 'next_page_token_value' + assert response.self_link == 'self_link_value' + + +def test_list_rest_required_fields(request_type=compute.ListNetworkEndpointGroupsRequest): + transport_class = transports.NetworkEndpointGroupsRestTransport + + request_init = {} + request_init["project"] = "" + request_init["zone"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + jsonified_request["zone"] = 'zone_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("filter", "max_results", "order_by", "page_token", "return_partial_success", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "zone" in jsonified_request + assert jsonified_request["zone"] == 'zone_value' + + client = NetworkEndpointGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.NetworkEndpointGroupList() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "get", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.NetworkEndpointGroupList.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.list(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_list_rest_unset_required_fields(): + transport = transports.NetworkEndpointGroupsRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.list._get_unset_required_fields({}) + assert set(unset_fields) == (set(("filter", "maxResults", "orderBy", "pageToken", "returnPartialSuccess", )) & set(("project", "zone", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_rest_interceptors(null_interceptor): + transport = transports.NetworkEndpointGroupsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.NetworkEndpointGroupsRestInterceptor(), + ) + client = NetworkEndpointGroupsClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.NetworkEndpointGroupsRestInterceptor, "post_list") as post, \ + mock.patch.object(transports.NetworkEndpointGroupsRestInterceptor, "pre_list") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.ListNetworkEndpointGroupsRequest.pb(compute.ListNetworkEndpointGroupsRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.NetworkEndpointGroupList.to_json(compute.NetworkEndpointGroupList()) + + request = compute.ListNetworkEndpointGroupsRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.NetworkEndpointGroupList() + + client.list(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_list_rest_bad_request(transport: str = 'rest', request_type=compute.ListNetworkEndpointGroupsRequest): + client = NetworkEndpointGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'zone': 'sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list(request) + + +def test_list_rest_flattened(): + client = NetworkEndpointGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.NetworkEndpointGroupList() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'zone': 'sample2'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + zone='zone_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.NetworkEndpointGroupList.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.list(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/zones/{zone}/networkEndpointGroups" % client.transport._host, args[1]) + + +def test_list_rest_flattened_error(transport: str = 'rest'): + client = NetworkEndpointGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list( + compute.ListNetworkEndpointGroupsRequest(), + project='project_value', + zone='zone_value', + ) + + +def test_list_rest_pager(transport: str = 'rest'): + client = NetworkEndpointGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + #with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + compute.NetworkEndpointGroupList( + items=[ + compute.NetworkEndpointGroup(), + compute.NetworkEndpointGroup(), + compute.NetworkEndpointGroup(), + ], + next_page_token='abc', + ), + compute.NetworkEndpointGroupList( + items=[], + next_page_token='def', + ), + compute.NetworkEndpointGroupList( + items=[ + compute.NetworkEndpointGroup(), + ], + next_page_token='ghi', + ), + compute.NetworkEndpointGroupList( + items=[ + compute.NetworkEndpointGroup(), + compute.NetworkEndpointGroup(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple(compute.NetworkEndpointGroupList.to_json(x) for x in response) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode('UTF-8') + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {'project': 'sample1', 'zone': 'sample2'} + + pager = client.list(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, compute.NetworkEndpointGroup) + for i in results) + + pages = list(client.list(request=sample_request).pages) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize("request_type", [ + compute.ListNetworkEndpointsNetworkEndpointGroupsRequest, + dict, +]) +def test_list_network_endpoints_rest(request_type): + client = NetworkEndpointGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'zone': 'sample2', 'network_endpoint_group': 'sample3'} + request_init["network_endpoint_groups_list_endpoints_request_resource"] = {'health_status': 'health_status_value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.NetworkEndpointGroupsListNetworkEndpoints( + id='id_value', + kind='kind_value', + next_page_token='next_page_token_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.NetworkEndpointGroupsListNetworkEndpoints.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.list_network_endpoints(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListNetworkEndpointsPager) + assert response.id == 'id_value' + assert response.kind == 'kind_value' + assert response.next_page_token == 'next_page_token_value' + + +def test_list_network_endpoints_rest_required_fields(request_type=compute.ListNetworkEndpointsNetworkEndpointGroupsRequest): + transport_class = transports.NetworkEndpointGroupsRestTransport + + request_init = {} + request_init["network_endpoint_group"] = "" + request_init["project"] = "" + request_init["zone"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_network_endpoints._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["networkEndpointGroup"] = 'network_endpoint_group_value' + jsonified_request["project"] = 'project_value' + jsonified_request["zone"] = 'zone_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_network_endpoints._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("filter", "max_results", "order_by", "page_token", "return_partial_success", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "networkEndpointGroup" in jsonified_request + assert jsonified_request["networkEndpointGroup"] == 'network_endpoint_group_value' + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "zone" in jsonified_request + assert jsonified_request["zone"] == 'zone_value' + + client = NetworkEndpointGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.NetworkEndpointGroupsListNetworkEndpoints() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.NetworkEndpointGroupsListNetworkEndpoints.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.list_network_endpoints(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_list_network_endpoints_rest_unset_required_fields(): + transport = transports.NetworkEndpointGroupsRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.list_network_endpoints._get_unset_required_fields({}) + assert set(unset_fields) == (set(("filter", "maxResults", "orderBy", "pageToken", "returnPartialSuccess", )) & set(("networkEndpointGroup", "networkEndpointGroupsListEndpointsRequestResource", "project", "zone", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_network_endpoints_rest_interceptors(null_interceptor): + transport = transports.NetworkEndpointGroupsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.NetworkEndpointGroupsRestInterceptor(), + ) + client = NetworkEndpointGroupsClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.NetworkEndpointGroupsRestInterceptor, "post_list_network_endpoints") as post, \ + mock.patch.object(transports.NetworkEndpointGroupsRestInterceptor, "pre_list_network_endpoints") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.ListNetworkEndpointsNetworkEndpointGroupsRequest.pb(compute.ListNetworkEndpointsNetworkEndpointGroupsRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.NetworkEndpointGroupsListNetworkEndpoints.to_json(compute.NetworkEndpointGroupsListNetworkEndpoints()) + + request = compute.ListNetworkEndpointsNetworkEndpointGroupsRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.NetworkEndpointGroupsListNetworkEndpoints() + + client.list_network_endpoints(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_list_network_endpoints_rest_bad_request(transport: str = 'rest', request_type=compute.ListNetworkEndpointsNetworkEndpointGroupsRequest): + client = NetworkEndpointGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'zone': 'sample2', 'network_endpoint_group': 'sample3'} + request_init["network_endpoint_groups_list_endpoints_request_resource"] = {'health_status': 'health_status_value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_network_endpoints(request) + + +def test_list_network_endpoints_rest_flattened(): + client = NetworkEndpointGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.NetworkEndpointGroupsListNetworkEndpoints() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'zone': 'sample2', 'network_endpoint_group': 'sample3'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + zone='zone_value', + network_endpoint_group='network_endpoint_group_value', + network_endpoint_groups_list_endpoints_request_resource=compute.NetworkEndpointGroupsListEndpointsRequest(health_status='health_status_value'), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.NetworkEndpointGroupsListNetworkEndpoints.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.list_network_endpoints(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/zones/{zone}/networkEndpointGroups/{network_endpoint_group}/listNetworkEndpoints" % client.transport._host, args[1]) + + +def test_list_network_endpoints_rest_flattened_error(transport: str = 'rest'): + client = NetworkEndpointGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_network_endpoints( + compute.ListNetworkEndpointsNetworkEndpointGroupsRequest(), + project='project_value', + zone='zone_value', + network_endpoint_group='network_endpoint_group_value', + network_endpoint_groups_list_endpoints_request_resource=compute.NetworkEndpointGroupsListEndpointsRequest(health_status='health_status_value'), + ) + + +def test_list_network_endpoints_rest_pager(transport: str = 'rest'): + client = NetworkEndpointGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + #with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + compute.NetworkEndpointGroupsListNetworkEndpoints( + items=[ + compute.NetworkEndpointWithHealthStatus(), + compute.NetworkEndpointWithHealthStatus(), + compute.NetworkEndpointWithHealthStatus(), + ], + next_page_token='abc', + ), + compute.NetworkEndpointGroupsListNetworkEndpoints( + items=[], + next_page_token='def', + ), + compute.NetworkEndpointGroupsListNetworkEndpoints( + items=[ + compute.NetworkEndpointWithHealthStatus(), + ], + next_page_token='ghi', + ), + compute.NetworkEndpointGroupsListNetworkEndpoints( + items=[ + compute.NetworkEndpointWithHealthStatus(), + compute.NetworkEndpointWithHealthStatus(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple(compute.NetworkEndpointGroupsListNetworkEndpoints.to_json(x) for x in response) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode('UTF-8') + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {'project': 'sample1', 'zone': 'sample2', 'network_endpoint_group': 'sample3'} + sample_request["network_endpoint_groups_list_endpoints_request_resource"] = compute.NetworkEndpointGroupsListEndpointsRequest(health_status='health_status_value') + + pager = client.list_network_endpoints(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, compute.NetworkEndpointWithHealthStatus) + for i in results) + + pages = list(client.list_network_endpoints(request=sample_request).pages) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize("request_type", [ + compute.TestIamPermissionsNetworkEndpointGroupRequest, + dict, +]) +def test_test_iam_permissions_rest(request_type): + client = NetworkEndpointGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'zone': 'sample2', 'resource': 'sample3'} + request_init["test_permissions_request_resource"] = {'permissions': ['permissions_value1', 'permissions_value2']} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.TestPermissionsResponse( + permissions=['permissions_value'], + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.TestPermissionsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.test_iam_permissions(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.TestPermissionsResponse) + assert response.permissions == ['permissions_value'] + + +def test_test_iam_permissions_rest_required_fields(request_type=compute.TestIamPermissionsNetworkEndpointGroupRequest): + transport_class = transports.NetworkEndpointGroupsRestTransport + + request_init = {} + request_init["project"] = "" + request_init["resource"] = "" + request_init["zone"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).test_iam_permissions._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + jsonified_request["resource"] = 'resource_value' + jsonified_request["zone"] = 'zone_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).test_iam_permissions._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "resource" in jsonified_request + assert jsonified_request["resource"] == 'resource_value' + assert "zone" in jsonified_request + assert jsonified_request["zone"] == 'zone_value' + + client = NetworkEndpointGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.TestPermissionsResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.TestPermissionsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.test_iam_permissions(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_test_iam_permissions_rest_unset_required_fields(): + transport = transports.NetworkEndpointGroupsRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.test_iam_permissions._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("project", "resource", "testPermissionsRequestResource", "zone", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_test_iam_permissions_rest_interceptors(null_interceptor): + transport = transports.NetworkEndpointGroupsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.NetworkEndpointGroupsRestInterceptor(), + ) + client = NetworkEndpointGroupsClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.NetworkEndpointGroupsRestInterceptor, "post_test_iam_permissions") as post, \ + mock.patch.object(transports.NetworkEndpointGroupsRestInterceptor, "pre_test_iam_permissions") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.TestIamPermissionsNetworkEndpointGroupRequest.pb(compute.TestIamPermissionsNetworkEndpointGroupRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.TestPermissionsResponse.to_json(compute.TestPermissionsResponse()) + + request = compute.TestIamPermissionsNetworkEndpointGroupRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.TestPermissionsResponse() + + client.test_iam_permissions(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_test_iam_permissions_rest_bad_request(transport: str = 'rest', request_type=compute.TestIamPermissionsNetworkEndpointGroupRequest): + client = NetworkEndpointGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'zone': 'sample2', 'resource': 'sample3'} + request_init["test_permissions_request_resource"] = {'permissions': ['permissions_value1', 'permissions_value2']} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.test_iam_permissions(request) + + +def test_test_iam_permissions_rest_flattened(): + client = NetworkEndpointGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.TestPermissionsResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'zone': 'sample2', 'resource': 'sample3'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + zone='zone_value', + resource='resource_value', + test_permissions_request_resource=compute.TestPermissionsRequest(permissions=['permissions_value']), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.TestPermissionsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.test_iam_permissions(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/zones/{zone}/networkEndpointGroups/{resource}/testIamPermissions" % client.transport._host, args[1]) + + +def test_test_iam_permissions_rest_flattened_error(transport: str = 'rest'): + client = NetworkEndpointGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.test_iam_permissions( + compute.TestIamPermissionsNetworkEndpointGroupRequest(), + project='project_value', + zone='zone_value', + resource='resource_value', + test_permissions_request_resource=compute.TestPermissionsRequest(permissions=['permissions_value']), + ) + + +def test_test_iam_permissions_rest_error(): + client = NetworkEndpointGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +def test_credentials_transport_error(): + # It is an error to provide credentials and a transport instance. + transport = transports.NetworkEndpointGroupsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = NetworkEndpointGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # It is an error to provide a credentials file and a transport instance. + transport = transports.NetworkEndpointGroupsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = NetworkEndpointGroupsClient( + client_options={"credentials_file": "credentials.json"}, + transport=transport, + ) + + # It is an error to provide an api_key and a transport instance. + transport = transports.NetworkEndpointGroupsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = NetworkEndpointGroupsClient( + client_options=options, + transport=transport, + ) + + # It is an error to provide an api_key and a credential. + options = mock.Mock() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = NetworkEndpointGroupsClient( + client_options=options, + credentials=ga_credentials.AnonymousCredentials() + ) + + # It is an error to provide scopes and a transport instance. + transport = transports.NetworkEndpointGroupsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = NetworkEndpointGroupsClient( + client_options={"scopes": ["1", "2"]}, + transport=transport, + ) + + +def test_transport_instance(): + # A client may be instantiated with a custom transport instance. + transport = transports.NetworkEndpointGroupsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + client = NetworkEndpointGroupsClient(transport=transport) + assert client.transport is transport + + +@pytest.mark.parametrize("transport_class", [ + transports.NetworkEndpointGroupsRestTransport, +]) +def test_transport_adc(transport_class): + # Test default credentials are used if not provided. + with mock.patch.object(google.auth, 'default') as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class() + adc.assert_called_once() + +@pytest.mark.parametrize("transport_name", [ + "rest", +]) +def test_transport_kind(transport_name): + transport = NetworkEndpointGroupsClient.get_transport_class(transport_name)( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert transport.kind == transport_name + + +def test_network_endpoint_groups_base_transport_error(): + # Passing both a credentials object and credentials_file should raise an error + with pytest.raises(core_exceptions.DuplicateCredentialArgs): + transport = transports.NetworkEndpointGroupsTransport( + credentials=ga_credentials.AnonymousCredentials(), + credentials_file="credentials.json" + ) + + +def test_network_endpoint_groups_base_transport(): + # Instantiate the base transport. + with mock.patch('google.cloud.compute_v1.services.network_endpoint_groups.transports.NetworkEndpointGroupsTransport.__init__') as Transport: + Transport.return_value = None + transport = transports.NetworkEndpointGroupsTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Every method on the transport should just blindly + # raise NotImplementedError. + methods = ( + 'aggregated_list', + 'attach_network_endpoints', + 'delete', + 'detach_network_endpoints', + 'get', + 'insert', + 'list', + 'list_network_endpoints', + 'test_iam_permissions', + ) + for method in methods: + with pytest.raises(NotImplementedError): + getattr(transport, method)(request=object()) + + with pytest.raises(NotImplementedError): + transport.close() + + # Catch all for all remaining methods and properties + remainder = [ + 'kind', + ] + for r in remainder: + with pytest.raises(NotImplementedError): + getattr(transport, r)() + + +def test_network_endpoint_groups_base_transport_with_credentials_file(): + # Instantiate the base transport with a credentials file + with mock.patch.object(google.auth, 'load_credentials_from_file', autospec=True) as load_creds, mock.patch('google.cloud.compute_v1.services.network_endpoint_groups.transports.NetworkEndpointGroupsTransport._prep_wrapped_messages') as Transport: + Transport.return_value = None + load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.NetworkEndpointGroupsTransport( + credentials_file="credentials.json", + quota_project_id="octopus", + ) + load_creds.assert_called_once_with("credentials.json", + scopes=None, + default_scopes=( + 'https://www.googleapis.com/auth/compute', + 'https://www.googleapis.com/auth/cloud-platform', +), + quota_project_id="octopus", + ) + + +def test_network_endpoint_groups_base_transport_with_adc(): + # Test the default credentials are used if credentials and credentials_file are None. + with mock.patch.object(google.auth, 'default', autospec=True) as adc, mock.patch('google.cloud.compute_v1.services.network_endpoint_groups.transports.NetworkEndpointGroupsTransport._prep_wrapped_messages') as Transport: + Transport.return_value = None + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.NetworkEndpointGroupsTransport() + adc.assert_called_once() + + +def test_network_endpoint_groups_auth_adc(): + # If no credentials are provided, we should use ADC credentials. + with mock.patch.object(google.auth, 'default', autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + NetworkEndpointGroupsClient() + adc.assert_called_once_with( + scopes=None, + default_scopes=( + 'https://www.googleapis.com/auth/compute', + 'https://www.googleapis.com/auth/cloud-platform', +), + quota_project_id=None, + ) + + +def test_network_endpoint_groups_http_transport_client_cert_source_for_mtls(): + cred = ga_credentials.AnonymousCredentials() + with mock.patch("google.auth.transport.requests.AuthorizedSession.configure_mtls_channel") as mock_configure_mtls_channel: + transports.NetworkEndpointGroupsRestTransport ( + credentials=cred, + client_cert_source_for_mtls=client_cert_source_callback + ) + mock_configure_mtls_channel.assert_called_once_with(client_cert_source_callback) + + +@pytest.mark.parametrize("transport_name", [ + "rest", +]) +def test_network_endpoint_groups_host_no_port(transport_name): + client = NetworkEndpointGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions(api_endpoint='compute.googleapis.com'), + transport=transport_name, + ) + assert client.transport._host == ( + 'compute.googleapis.com:443' + if transport_name in ['grpc', 'grpc_asyncio'] + else 'https://compute.googleapis.com' + ) + +@pytest.mark.parametrize("transport_name", [ + "rest", +]) +def test_network_endpoint_groups_host_with_port(transport_name): + client = NetworkEndpointGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions(api_endpoint='compute.googleapis.com:8000'), + transport=transport_name, + ) + assert client.transport._host == ( + 'compute.googleapis.com:8000' + if transport_name in ['grpc', 'grpc_asyncio'] + else 'https://compute.googleapis.com:8000' + ) + +@pytest.mark.parametrize("transport_name", [ + "rest", +]) +def test_network_endpoint_groups_client_transport_session_collision(transport_name): + creds1 = ga_credentials.AnonymousCredentials() + creds2 = ga_credentials.AnonymousCredentials() + client1 = NetworkEndpointGroupsClient( + credentials=creds1, + transport=transport_name, + ) + client2 = NetworkEndpointGroupsClient( + credentials=creds2, + transport=transport_name, + ) + session1 = client1.transport.aggregated_list._session + session2 = client2.transport.aggregated_list._session + assert session1 != session2 + session1 = client1.transport.attach_network_endpoints._session + session2 = client2.transport.attach_network_endpoints._session + assert session1 != session2 + session1 = client1.transport.delete._session + session2 = client2.transport.delete._session + assert session1 != session2 + session1 = client1.transport.detach_network_endpoints._session + session2 = client2.transport.detach_network_endpoints._session + assert session1 != session2 + session1 = client1.transport.get._session + session2 = client2.transport.get._session + assert session1 != session2 + session1 = client1.transport.insert._session + session2 = client2.transport.insert._session + assert session1 != session2 + session1 = client1.transport.list._session + session2 = client2.transport.list._session + assert session1 != session2 + session1 = client1.transport.list_network_endpoints._session + session2 = client2.transport.list_network_endpoints._session + assert session1 != session2 + session1 = client1.transport.test_iam_permissions._session + session2 = client2.transport.test_iam_permissions._session + assert session1 != session2 + +def test_common_billing_account_path(): + billing_account = "squid" + expected = "billingAccounts/{billing_account}".format(billing_account=billing_account, ) + actual = NetworkEndpointGroupsClient.common_billing_account_path(billing_account) + assert expected == actual + + +def test_parse_common_billing_account_path(): + expected = { + "billing_account": "clam", + } + path = NetworkEndpointGroupsClient.common_billing_account_path(**expected) + + # Check that the path construction is reversible. + actual = NetworkEndpointGroupsClient.parse_common_billing_account_path(path) + assert expected == actual + +def test_common_folder_path(): + folder = "whelk" + expected = "folders/{folder}".format(folder=folder, ) + actual = NetworkEndpointGroupsClient.common_folder_path(folder) + assert expected == actual + + +def test_parse_common_folder_path(): + expected = { + "folder": "octopus", + } + path = NetworkEndpointGroupsClient.common_folder_path(**expected) + + # Check that the path construction is reversible. + actual = NetworkEndpointGroupsClient.parse_common_folder_path(path) + assert expected == actual + +def test_common_organization_path(): + organization = "oyster" + expected = "organizations/{organization}".format(organization=organization, ) + actual = NetworkEndpointGroupsClient.common_organization_path(organization) + assert expected == actual + + +def test_parse_common_organization_path(): + expected = { + "organization": "nudibranch", + } + path = NetworkEndpointGroupsClient.common_organization_path(**expected) + + # Check that the path construction is reversible. + actual = NetworkEndpointGroupsClient.parse_common_organization_path(path) + assert expected == actual + +def test_common_project_path(): + project = "cuttlefish" + expected = "projects/{project}".format(project=project, ) + actual = NetworkEndpointGroupsClient.common_project_path(project) + assert expected == actual + + +def test_parse_common_project_path(): + expected = { + "project": "mussel", + } + path = NetworkEndpointGroupsClient.common_project_path(**expected) + + # Check that the path construction is reversible. + actual = NetworkEndpointGroupsClient.parse_common_project_path(path) + assert expected == actual + +def test_common_location_path(): + project = "winkle" + location = "nautilus" + expected = "projects/{project}/locations/{location}".format(project=project, location=location, ) + actual = NetworkEndpointGroupsClient.common_location_path(project, location) + assert expected == actual + + +def test_parse_common_location_path(): + expected = { + "project": "scallop", + "location": "abalone", + } + path = NetworkEndpointGroupsClient.common_location_path(**expected) + + # Check that the path construction is reversible. + actual = NetworkEndpointGroupsClient.parse_common_location_path(path) + assert expected == actual + + +def test_client_with_default_client_info(): + client_info = gapic_v1.client_info.ClientInfo() + + with mock.patch.object(transports.NetworkEndpointGroupsTransport, '_prep_wrapped_messages') as prep: + client = NetworkEndpointGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + with mock.patch.object(transports.NetworkEndpointGroupsTransport, '_prep_wrapped_messages') as prep: + transport_class = NetworkEndpointGroupsClient.get_transport_class() + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + +def test_transport_close(): + transports = { + "rest": "_session", + } + + for transport, close_name in transports.items(): + client = NetworkEndpointGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport + ) + with mock.patch.object(type(getattr(client.transport, close_name)), "close") as close: + with client: + close.assert_not_called() + close.assert_called_once() + +def test_client_ctx(): + transports = [ + 'rest', + ] + for transport in transports: + client = NetworkEndpointGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport + ) + # Test client calls underlying transport. + with mock.patch.object(type(client.transport), "close") as close: + close.assert_not_called() + with client: + pass + close.assert_called() + +@pytest.mark.parametrize("client_class,transport_class", [ + (NetworkEndpointGroupsClient, transports.NetworkEndpointGroupsRestTransport), +]) +def test_api_key_credentials(client_class, transport_class): + with mock.patch.object( + google.auth._default, "get_api_key_credentials", create=True + ) as get_api_key_credentials: + mock_cred = mock.Mock() + get_api_key_credentials.return_value = mock_cred + options = client_options.ClientOptions() + options.api_key = "api_key" + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=mock_cred, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) diff --git a/owl-bot-staging/v1/tests/unit/gapic/compute_v1/test_network_firewall_policies.py b/owl-bot-staging/v1/tests/unit/gapic/compute_v1/test_network_firewall_policies.py new file mode 100644 index 000000000..5236ce360 --- /dev/null +++ b/owl-bot-staging/v1/tests/unit/gapic/compute_v1/test_network_firewall_policies.py @@ -0,0 +1,7605 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import os +# try/except added for compatibility with python < 3.8 +try: + from unittest import mock + from unittest.mock import AsyncMock # pragma: NO COVER +except ImportError: # pragma: NO COVER + import mock + +import grpc +from grpc.experimental import aio +from collections.abc import Iterable +from google.protobuf import json_format +import json +import math +import pytest +from proto.marshal.rules.dates import DurationRule, TimestampRule +from proto.marshal.rules import wrappers +from requests import Response +from requests import Request, PreparedRequest +from requests.sessions import Session +from google.protobuf import json_format + +from google.api_core import client_options +from google.api_core import exceptions as core_exceptions +from google.api_core import extended_operation # type: ignore +from google.api_core import future +from google.api_core import gapic_v1 +from google.api_core import grpc_helpers +from google.api_core import grpc_helpers_async +from google.api_core import path_template +from google.auth import credentials as ga_credentials +from google.auth.exceptions import MutualTLSChannelError +from google.cloud.compute_v1.services.network_firewall_policies import NetworkFirewallPoliciesClient +from google.cloud.compute_v1.services.network_firewall_policies import pagers +from google.cloud.compute_v1.services.network_firewall_policies import transports +from google.cloud.compute_v1.types import compute +from google.oauth2 import service_account +import google.auth + + +def client_cert_source_callback(): + return b"cert bytes", b"key bytes" + + +# If default endpoint is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint(client): + return "foo.googleapis.com" if ("localhost" in client.DEFAULT_ENDPOINT) else client.DEFAULT_ENDPOINT + + +def test__get_default_mtls_endpoint(): + api_endpoint = "example.googleapis.com" + api_mtls_endpoint = "example.mtls.googleapis.com" + sandbox_endpoint = "example.sandbox.googleapis.com" + sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com" + non_googleapi = "api.example.com" + + assert NetworkFirewallPoliciesClient._get_default_mtls_endpoint(None) is None + assert NetworkFirewallPoliciesClient._get_default_mtls_endpoint(api_endpoint) == api_mtls_endpoint + assert NetworkFirewallPoliciesClient._get_default_mtls_endpoint(api_mtls_endpoint) == api_mtls_endpoint + assert NetworkFirewallPoliciesClient._get_default_mtls_endpoint(sandbox_endpoint) == sandbox_mtls_endpoint + assert NetworkFirewallPoliciesClient._get_default_mtls_endpoint(sandbox_mtls_endpoint) == sandbox_mtls_endpoint + assert NetworkFirewallPoliciesClient._get_default_mtls_endpoint(non_googleapi) == non_googleapi + + +@pytest.mark.parametrize("client_class,transport_name", [ + (NetworkFirewallPoliciesClient, "rest"), +]) +def test_network_firewall_policies_client_from_service_account_info(client_class, transport_name): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object(service_account.Credentials, 'from_service_account_info') as factory: + factory.return_value = creds + info = {"valid": True} + client = client_class.from_service_account_info(info, transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + 'compute.googleapis.com:443' + if transport_name in ['grpc', 'grpc_asyncio'] + else + 'https://compute.googleapis.com' + ) + + +@pytest.mark.parametrize("transport_class,transport_name", [ + (transports.NetworkFirewallPoliciesRestTransport, "rest"), +]) +def test_network_firewall_policies_client_service_account_always_use_jwt(transport_class, transport_name): + with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=True) + use_jwt.assert_called_once_with(True) + + with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=False) + use_jwt.assert_not_called() + + +@pytest.mark.parametrize("client_class,transport_name", [ + (NetworkFirewallPoliciesClient, "rest"), +]) +def test_network_firewall_policies_client_from_service_account_file(client_class, transport_name): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object(service_account.Credentials, 'from_service_account_file') as factory: + factory.return_value = creds + client = client_class.from_service_account_file("dummy/file/path.json", transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + client = client_class.from_service_account_json("dummy/file/path.json", transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + 'compute.googleapis.com:443' + if transport_name in ['grpc', 'grpc_asyncio'] + else + 'https://compute.googleapis.com' + ) + + +def test_network_firewall_policies_client_get_transport_class(): + transport = NetworkFirewallPoliciesClient.get_transport_class() + available_transports = [ + transports.NetworkFirewallPoliciesRestTransport, + ] + assert transport in available_transports + + transport = NetworkFirewallPoliciesClient.get_transport_class("rest") + assert transport == transports.NetworkFirewallPoliciesRestTransport + + +@pytest.mark.parametrize("client_class,transport_class,transport_name", [ + (NetworkFirewallPoliciesClient, transports.NetworkFirewallPoliciesRestTransport, "rest"), +]) +@mock.patch.object(NetworkFirewallPoliciesClient, "DEFAULT_ENDPOINT", modify_default_endpoint(NetworkFirewallPoliciesClient)) +def test_network_firewall_policies_client_client_options(client_class, transport_class, transport_name): + # Check that if channel is provided we won't create a new one. + with mock.patch.object(NetworkFirewallPoliciesClient, 'get_transport_class') as gtc: + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ) + client = client_class(transport=transport) + gtc.assert_not_called() + + # Check that if channel is provided via str we will create a new one. + with mock.patch.object(NetworkFirewallPoliciesClient, 'get_transport_class') as gtc: + client = client_class(transport=transport_name) + gtc.assert_called() + + # Check the case api_endpoint is provided. + options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(transport=transport_name, client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_MTLS_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError): + client = client_class(transport=transport_name) + + # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): + with pytest.raises(ValueError): + client = client_class(transport=transport_name) + + # Check the case quota_project_id is provided + options = client_options.ClientOptions(quota_project_id="octopus") + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id="octopus", + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + # Check the case api_endpoint is provided + options = client_options.ClientOptions(api_audience="https://language.googleapis.com") + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience="https://language.googleapis.com" + ) + +@pytest.mark.parametrize("client_class,transport_class,transport_name,use_client_cert_env", [ + (NetworkFirewallPoliciesClient, transports.NetworkFirewallPoliciesRestTransport, "rest", "true"), + (NetworkFirewallPoliciesClient, transports.NetworkFirewallPoliciesRestTransport, "rest", "false"), +]) +@mock.patch.object(NetworkFirewallPoliciesClient, "DEFAULT_ENDPOINT", modify_default_endpoint(NetworkFirewallPoliciesClient)) +@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) +def test_network_firewall_policies_client_mtls_env_auto(client_class, transport_class, transport_name, use_client_cert_env): + # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default + # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists. + + # Check the case client_cert_source is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): + options = client_options.ClientOptions(client_cert_source=client_cert_source_callback) + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + + if use_client_cert_env == "false": + expected_client_cert_source = None + expected_host = client.DEFAULT_ENDPOINT + else: + expected_client_cert_source = client_cert_source_callback + expected_host = client.DEFAULT_MTLS_ENDPOINT + + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case ADC client cert is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): + with mock.patch.object(transport_class, '__init__') as patched: + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): + with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=client_cert_source_callback): + if use_client_cert_env == "false": + expected_host = client.DEFAULT_ENDPOINT + expected_client_cert_source = None + else: + expected_host = client.DEFAULT_MTLS_ENDPOINT + expected_client_cert_source = client_cert_source_callback + + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case client_cert_source and ADC client cert are not provided. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): + with mock.patch.object(transport_class, '__init__') as patched: + with mock.patch("google.auth.transport.mtls.has_default_client_cert_source", return_value=False): + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize("client_class", [ + NetworkFirewallPoliciesClient +]) +@mock.patch.object(NetworkFirewallPoliciesClient, "DEFAULT_ENDPOINT", modify_default_endpoint(NetworkFirewallPoliciesClient)) +def test_network_firewall_policies_client_get_mtls_endpoint_and_cert_source(client_class): + mock_client_cert_source = mock.Mock() + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "true". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + mock_api_endpoint = "foo" + options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(options) + assert api_endpoint == mock_api_endpoint + assert cert_source == mock_client_cert_source + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "false". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + mock_client_cert_source = mock.Mock() + mock_api_endpoint = "foo" + options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(options) + assert api_endpoint == mock_api_endpoint + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert doesn't exist. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=False): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert exists. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): + with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=mock_client_cert_source): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source == mock_client_cert_source + + +@pytest.mark.parametrize("client_class,transport_class,transport_name", [ + (NetworkFirewallPoliciesClient, transports.NetworkFirewallPoliciesRestTransport, "rest"), +]) +def test_network_firewall_policies_client_client_options_scopes(client_class, transport_class, transport_name): + # Check the case scopes are provided. + options = client_options.ClientOptions( + scopes=["1", "2"], + ) + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=["1", "2"], + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + +@pytest.mark.parametrize("client_class,transport_class,transport_name,grpc_helpers", [ + (NetworkFirewallPoliciesClient, transports.NetworkFirewallPoliciesRestTransport, "rest", None), +]) +def test_network_firewall_policies_client_client_options_credentials_file(client_class, transport_class, transport_name, grpc_helpers): + # Check the case credentials file is provided. + options = client_options.ClientOptions( + credentials_file="credentials.json" + ) + + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize("request_type", [ + compute.AddAssociationNetworkFirewallPolicyRequest, + dict, +]) +def test_add_association_rest(request_type): + client = NetworkFirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'firewall_policy': 'sample2'} + request_init["firewall_policy_association_resource"] = {'attachment_target': 'attachment_target_value', 'display_name': 'display_name_value', 'firewall_policy_id': 'firewall_policy_id_value', 'name': 'name_value', 'short_name': 'short_name_value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.add_association(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, extended_operation.ExtendedOperation) + assert response.client_operation_id == 'client_operation_id_value' + assert response.creation_timestamp == 'creation_timestamp_value' + assert response.description == 'description_value' + assert response.end_time == 'end_time_value' + assert response.http_error_message == 'http_error_message_value' + assert response.http_error_status_code == 2374 + assert response.id == 205 + assert response.insert_time == 'insert_time_value' + assert response.kind == 'kind_value' + assert response.name == 'name_value' + assert response.operation_group_id == 'operation_group_id_value' + assert response.operation_type == 'operation_type_value' + assert response.progress == 885 + assert response.region == 'region_value' + assert response.self_link == 'self_link_value' + assert response.start_time == 'start_time_value' + assert response.status == compute.Operation.Status.DONE + assert response.status_message == 'status_message_value' + assert response.target_id == 947 + assert response.target_link == 'target_link_value' + assert response.user == 'user_value' + assert response.zone == 'zone_value' + + +def test_add_association_rest_required_fields(request_type=compute.AddAssociationNetworkFirewallPolicyRequest): + transport_class = transports.NetworkFirewallPoliciesRestTransport + + request_init = {} + request_init["firewall_policy"] = "" + request_init["project"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).add_association._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["firewallPolicy"] = 'firewall_policy_value' + jsonified_request["project"] = 'project_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).add_association._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("replace_existing_association", "request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "firewallPolicy" in jsonified_request + assert jsonified_request["firewallPolicy"] == 'firewall_policy_value' + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + + client = NetworkFirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.add_association(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_add_association_rest_unset_required_fields(): + transport = transports.NetworkFirewallPoliciesRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.add_association._get_unset_required_fields({}) + assert set(unset_fields) == (set(("replaceExistingAssociation", "requestId", )) & set(("firewallPolicy", "firewallPolicyAssociationResource", "project", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_add_association_rest_interceptors(null_interceptor): + transport = transports.NetworkFirewallPoliciesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.NetworkFirewallPoliciesRestInterceptor(), + ) + client = NetworkFirewallPoliciesClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.NetworkFirewallPoliciesRestInterceptor, "post_add_association") as post, \ + mock.patch.object(transports.NetworkFirewallPoliciesRestInterceptor, "pre_add_association") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.AddAssociationNetworkFirewallPolicyRequest.pb(compute.AddAssociationNetworkFirewallPolicyRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.AddAssociationNetworkFirewallPolicyRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.add_association(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_add_association_rest_bad_request(transport: str = 'rest', request_type=compute.AddAssociationNetworkFirewallPolicyRequest): + client = NetworkFirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'firewall_policy': 'sample2'} + request_init["firewall_policy_association_resource"] = {'attachment_target': 'attachment_target_value', 'display_name': 'display_name_value', 'firewall_policy_id': 'firewall_policy_id_value', 'name': 'name_value', 'short_name': 'short_name_value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.add_association(request) + + +def test_add_association_rest_flattened(): + client = NetworkFirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'firewall_policy': 'sample2'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + firewall_policy='firewall_policy_value', + firewall_policy_association_resource=compute.FirewallPolicyAssociation(attachment_target='attachment_target_value'), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.add_association(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/global/firewallPolicies/{firewall_policy}/addAssociation" % client.transport._host, args[1]) + + +def test_add_association_rest_flattened_error(transport: str = 'rest'): + client = NetworkFirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.add_association( + compute.AddAssociationNetworkFirewallPolicyRequest(), + project='project_value', + firewall_policy='firewall_policy_value', + firewall_policy_association_resource=compute.FirewallPolicyAssociation(attachment_target='attachment_target_value'), + ) + + +def test_add_association_rest_error(): + client = NetworkFirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.AddAssociationNetworkFirewallPolicyRequest, + dict, +]) +def test_add_association_unary_rest(request_type): + client = NetworkFirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'firewall_policy': 'sample2'} + request_init["firewall_policy_association_resource"] = {'attachment_target': 'attachment_target_value', 'display_name': 'display_name_value', 'firewall_policy_id': 'firewall_policy_id_value', 'name': 'name_value', 'short_name': 'short_name_value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.add_association_unary(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.Operation) + + +def test_add_association_unary_rest_required_fields(request_type=compute.AddAssociationNetworkFirewallPolicyRequest): + transport_class = transports.NetworkFirewallPoliciesRestTransport + + request_init = {} + request_init["firewall_policy"] = "" + request_init["project"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).add_association._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["firewallPolicy"] = 'firewall_policy_value' + jsonified_request["project"] = 'project_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).add_association._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("replace_existing_association", "request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "firewallPolicy" in jsonified_request + assert jsonified_request["firewallPolicy"] == 'firewall_policy_value' + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + + client = NetworkFirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.add_association_unary(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_add_association_unary_rest_unset_required_fields(): + transport = transports.NetworkFirewallPoliciesRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.add_association._get_unset_required_fields({}) + assert set(unset_fields) == (set(("replaceExistingAssociation", "requestId", )) & set(("firewallPolicy", "firewallPolicyAssociationResource", "project", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_add_association_unary_rest_interceptors(null_interceptor): + transport = transports.NetworkFirewallPoliciesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.NetworkFirewallPoliciesRestInterceptor(), + ) + client = NetworkFirewallPoliciesClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.NetworkFirewallPoliciesRestInterceptor, "post_add_association") as post, \ + mock.patch.object(transports.NetworkFirewallPoliciesRestInterceptor, "pre_add_association") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.AddAssociationNetworkFirewallPolicyRequest.pb(compute.AddAssociationNetworkFirewallPolicyRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.AddAssociationNetworkFirewallPolicyRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.add_association_unary(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_add_association_unary_rest_bad_request(transport: str = 'rest', request_type=compute.AddAssociationNetworkFirewallPolicyRequest): + client = NetworkFirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'firewall_policy': 'sample2'} + request_init["firewall_policy_association_resource"] = {'attachment_target': 'attachment_target_value', 'display_name': 'display_name_value', 'firewall_policy_id': 'firewall_policy_id_value', 'name': 'name_value', 'short_name': 'short_name_value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.add_association_unary(request) + + +def test_add_association_unary_rest_flattened(): + client = NetworkFirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'firewall_policy': 'sample2'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + firewall_policy='firewall_policy_value', + firewall_policy_association_resource=compute.FirewallPolicyAssociation(attachment_target='attachment_target_value'), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.add_association_unary(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/global/firewallPolicies/{firewall_policy}/addAssociation" % client.transport._host, args[1]) + + +def test_add_association_unary_rest_flattened_error(transport: str = 'rest'): + client = NetworkFirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.add_association_unary( + compute.AddAssociationNetworkFirewallPolicyRequest(), + project='project_value', + firewall_policy='firewall_policy_value', + firewall_policy_association_resource=compute.FirewallPolicyAssociation(attachment_target='attachment_target_value'), + ) + + +def test_add_association_unary_rest_error(): + client = NetworkFirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.AddRuleNetworkFirewallPolicyRequest, + dict, +]) +def test_add_rule_rest(request_type): + client = NetworkFirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'firewall_policy': 'sample2'} + request_init["firewall_policy_rule_resource"] = {'action': 'action_value', 'description': 'description_value', 'direction': 'direction_value', 'disabled': True, 'enable_logging': True, 'kind': 'kind_value', 'match': {'dest_address_groups': ['dest_address_groups_value1', 'dest_address_groups_value2'], 'dest_fqdns': ['dest_fqdns_value1', 'dest_fqdns_value2'], 'dest_ip_ranges': ['dest_ip_ranges_value1', 'dest_ip_ranges_value2'], 'dest_region_codes': ['dest_region_codes_value1', 'dest_region_codes_value2'], 'dest_threat_intelligences': ['dest_threat_intelligences_value1', 'dest_threat_intelligences_value2'], 'layer4_configs': [{'ip_protocol': 'ip_protocol_value', 'ports': ['ports_value1', 'ports_value2']}], 'src_address_groups': ['src_address_groups_value1', 'src_address_groups_value2'], 'src_fqdns': ['src_fqdns_value1', 'src_fqdns_value2'], 'src_ip_ranges': ['src_ip_ranges_value1', 'src_ip_ranges_value2'], 'src_region_codes': ['src_region_codes_value1', 'src_region_codes_value2'], 'src_secure_tags': [{'name': 'name_value', 'state': 'state_value'}], 'src_threat_intelligences': ['src_threat_intelligences_value1', 'src_threat_intelligences_value2']}, 'priority': 898, 'rule_name': 'rule_name_value', 'rule_tuple_count': 1737, 'target_resources': ['target_resources_value1', 'target_resources_value2'], 'target_secure_tags': {}, 'target_service_accounts': ['target_service_accounts_value1', 'target_service_accounts_value2']} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.add_rule(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, extended_operation.ExtendedOperation) + assert response.client_operation_id == 'client_operation_id_value' + assert response.creation_timestamp == 'creation_timestamp_value' + assert response.description == 'description_value' + assert response.end_time == 'end_time_value' + assert response.http_error_message == 'http_error_message_value' + assert response.http_error_status_code == 2374 + assert response.id == 205 + assert response.insert_time == 'insert_time_value' + assert response.kind == 'kind_value' + assert response.name == 'name_value' + assert response.operation_group_id == 'operation_group_id_value' + assert response.operation_type == 'operation_type_value' + assert response.progress == 885 + assert response.region == 'region_value' + assert response.self_link == 'self_link_value' + assert response.start_time == 'start_time_value' + assert response.status == compute.Operation.Status.DONE + assert response.status_message == 'status_message_value' + assert response.target_id == 947 + assert response.target_link == 'target_link_value' + assert response.user == 'user_value' + assert response.zone == 'zone_value' + + +def test_add_rule_rest_required_fields(request_type=compute.AddRuleNetworkFirewallPolicyRequest): + transport_class = transports.NetworkFirewallPoliciesRestTransport + + request_init = {} + request_init["firewall_policy"] = "" + request_init["project"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).add_rule._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["firewallPolicy"] = 'firewall_policy_value' + jsonified_request["project"] = 'project_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).add_rule._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("max_priority", "min_priority", "request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "firewallPolicy" in jsonified_request + assert jsonified_request["firewallPolicy"] == 'firewall_policy_value' + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + + client = NetworkFirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.add_rule(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_add_rule_rest_unset_required_fields(): + transport = transports.NetworkFirewallPoliciesRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.add_rule._get_unset_required_fields({}) + assert set(unset_fields) == (set(("maxPriority", "minPriority", "requestId", )) & set(("firewallPolicy", "firewallPolicyRuleResource", "project", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_add_rule_rest_interceptors(null_interceptor): + transport = transports.NetworkFirewallPoliciesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.NetworkFirewallPoliciesRestInterceptor(), + ) + client = NetworkFirewallPoliciesClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.NetworkFirewallPoliciesRestInterceptor, "post_add_rule") as post, \ + mock.patch.object(transports.NetworkFirewallPoliciesRestInterceptor, "pre_add_rule") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.AddRuleNetworkFirewallPolicyRequest.pb(compute.AddRuleNetworkFirewallPolicyRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.AddRuleNetworkFirewallPolicyRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.add_rule(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_add_rule_rest_bad_request(transport: str = 'rest', request_type=compute.AddRuleNetworkFirewallPolicyRequest): + client = NetworkFirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'firewall_policy': 'sample2'} + request_init["firewall_policy_rule_resource"] = {'action': 'action_value', 'description': 'description_value', 'direction': 'direction_value', 'disabled': True, 'enable_logging': True, 'kind': 'kind_value', 'match': {'dest_address_groups': ['dest_address_groups_value1', 'dest_address_groups_value2'], 'dest_fqdns': ['dest_fqdns_value1', 'dest_fqdns_value2'], 'dest_ip_ranges': ['dest_ip_ranges_value1', 'dest_ip_ranges_value2'], 'dest_region_codes': ['dest_region_codes_value1', 'dest_region_codes_value2'], 'dest_threat_intelligences': ['dest_threat_intelligences_value1', 'dest_threat_intelligences_value2'], 'layer4_configs': [{'ip_protocol': 'ip_protocol_value', 'ports': ['ports_value1', 'ports_value2']}], 'src_address_groups': ['src_address_groups_value1', 'src_address_groups_value2'], 'src_fqdns': ['src_fqdns_value1', 'src_fqdns_value2'], 'src_ip_ranges': ['src_ip_ranges_value1', 'src_ip_ranges_value2'], 'src_region_codes': ['src_region_codes_value1', 'src_region_codes_value2'], 'src_secure_tags': [{'name': 'name_value', 'state': 'state_value'}], 'src_threat_intelligences': ['src_threat_intelligences_value1', 'src_threat_intelligences_value2']}, 'priority': 898, 'rule_name': 'rule_name_value', 'rule_tuple_count': 1737, 'target_resources': ['target_resources_value1', 'target_resources_value2'], 'target_secure_tags': {}, 'target_service_accounts': ['target_service_accounts_value1', 'target_service_accounts_value2']} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.add_rule(request) + + +def test_add_rule_rest_flattened(): + client = NetworkFirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'firewall_policy': 'sample2'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + firewall_policy='firewall_policy_value', + firewall_policy_rule_resource=compute.FirewallPolicyRule(action='action_value'), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.add_rule(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/global/firewallPolicies/{firewall_policy}/addRule" % client.transport._host, args[1]) + + +def test_add_rule_rest_flattened_error(transport: str = 'rest'): + client = NetworkFirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.add_rule( + compute.AddRuleNetworkFirewallPolicyRequest(), + project='project_value', + firewall_policy='firewall_policy_value', + firewall_policy_rule_resource=compute.FirewallPolicyRule(action='action_value'), + ) + + +def test_add_rule_rest_error(): + client = NetworkFirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.AddRuleNetworkFirewallPolicyRequest, + dict, +]) +def test_add_rule_unary_rest(request_type): + client = NetworkFirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'firewall_policy': 'sample2'} + request_init["firewall_policy_rule_resource"] = {'action': 'action_value', 'description': 'description_value', 'direction': 'direction_value', 'disabled': True, 'enable_logging': True, 'kind': 'kind_value', 'match': {'dest_address_groups': ['dest_address_groups_value1', 'dest_address_groups_value2'], 'dest_fqdns': ['dest_fqdns_value1', 'dest_fqdns_value2'], 'dest_ip_ranges': ['dest_ip_ranges_value1', 'dest_ip_ranges_value2'], 'dest_region_codes': ['dest_region_codes_value1', 'dest_region_codes_value2'], 'dest_threat_intelligences': ['dest_threat_intelligences_value1', 'dest_threat_intelligences_value2'], 'layer4_configs': [{'ip_protocol': 'ip_protocol_value', 'ports': ['ports_value1', 'ports_value2']}], 'src_address_groups': ['src_address_groups_value1', 'src_address_groups_value2'], 'src_fqdns': ['src_fqdns_value1', 'src_fqdns_value2'], 'src_ip_ranges': ['src_ip_ranges_value1', 'src_ip_ranges_value2'], 'src_region_codes': ['src_region_codes_value1', 'src_region_codes_value2'], 'src_secure_tags': [{'name': 'name_value', 'state': 'state_value'}], 'src_threat_intelligences': ['src_threat_intelligences_value1', 'src_threat_intelligences_value2']}, 'priority': 898, 'rule_name': 'rule_name_value', 'rule_tuple_count': 1737, 'target_resources': ['target_resources_value1', 'target_resources_value2'], 'target_secure_tags': {}, 'target_service_accounts': ['target_service_accounts_value1', 'target_service_accounts_value2']} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.add_rule_unary(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.Operation) + + +def test_add_rule_unary_rest_required_fields(request_type=compute.AddRuleNetworkFirewallPolicyRequest): + transport_class = transports.NetworkFirewallPoliciesRestTransport + + request_init = {} + request_init["firewall_policy"] = "" + request_init["project"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).add_rule._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["firewallPolicy"] = 'firewall_policy_value' + jsonified_request["project"] = 'project_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).add_rule._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("max_priority", "min_priority", "request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "firewallPolicy" in jsonified_request + assert jsonified_request["firewallPolicy"] == 'firewall_policy_value' + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + + client = NetworkFirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.add_rule_unary(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_add_rule_unary_rest_unset_required_fields(): + transport = transports.NetworkFirewallPoliciesRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.add_rule._get_unset_required_fields({}) + assert set(unset_fields) == (set(("maxPriority", "minPriority", "requestId", )) & set(("firewallPolicy", "firewallPolicyRuleResource", "project", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_add_rule_unary_rest_interceptors(null_interceptor): + transport = transports.NetworkFirewallPoliciesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.NetworkFirewallPoliciesRestInterceptor(), + ) + client = NetworkFirewallPoliciesClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.NetworkFirewallPoliciesRestInterceptor, "post_add_rule") as post, \ + mock.patch.object(transports.NetworkFirewallPoliciesRestInterceptor, "pre_add_rule") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.AddRuleNetworkFirewallPolicyRequest.pb(compute.AddRuleNetworkFirewallPolicyRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.AddRuleNetworkFirewallPolicyRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.add_rule_unary(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_add_rule_unary_rest_bad_request(transport: str = 'rest', request_type=compute.AddRuleNetworkFirewallPolicyRequest): + client = NetworkFirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'firewall_policy': 'sample2'} + request_init["firewall_policy_rule_resource"] = {'action': 'action_value', 'description': 'description_value', 'direction': 'direction_value', 'disabled': True, 'enable_logging': True, 'kind': 'kind_value', 'match': {'dest_address_groups': ['dest_address_groups_value1', 'dest_address_groups_value2'], 'dest_fqdns': ['dest_fqdns_value1', 'dest_fqdns_value2'], 'dest_ip_ranges': ['dest_ip_ranges_value1', 'dest_ip_ranges_value2'], 'dest_region_codes': ['dest_region_codes_value1', 'dest_region_codes_value2'], 'dest_threat_intelligences': ['dest_threat_intelligences_value1', 'dest_threat_intelligences_value2'], 'layer4_configs': [{'ip_protocol': 'ip_protocol_value', 'ports': ['ports_value1', 'ports_value2']}], 'src_address_groups': ['src_address_groups_value1', 'src_address_groups_value2'], 'src_fqdns': ['src_fqdns_value1', 'src_fqdns_value2'], 'src_ip_ranges': ['src_ip_ranges_value1', 'src_ip_ranges_value2'], 'src_region_codes': ['src_region_codes_value1', 'src_region_codes_value2'], 'src_secure_tags': [{'name': 'name_value', 'state': 'state_value'}], 'src_threat_intelligences': ['src_threat_intelligences_value1', 'src_threat_intelligences_value2']}, 'priority': 898, 'rule_name': 'rule_name_value', 'rule_tuple_count': 1737, 'target_resources': ['target_resources_value1', 'target_resources_value2'], 'target_secure_tags': {}, 'target_service_accounts': ['target_service_accounts_value1', 'target_service_accounts_value2']} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.add_rule_unary(request) + + +def test_add_rule_unary_rest_flattened(): + client = NetworkFirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'firewall_policy': 'sample2'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + firewall_policy='firewall_policy_value', + firewall_policy_rule_resource=compute.FirewallPolicyRule(action='action_value'), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.add_rule_unary(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/global/firewallPolicies/{firewall_policy}/addRule" % client.transport._host, args[1]) + + +def test_add_rule_unary_rest_flattened_error(transport: str = 'rest'): + client = NetworkFirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.add_rule_unary( + compute.AddRuleNetworkFirewallPolicyRequest(), + project='project_value', + firewall_policy='firewall_policy_value', + firewall_policy_rule_resource=compute.FirewallPolicyRule(action='action_value'), + ) + + +def test_add_rule_unary_rest_error(): + client = NetworkFirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.CloneRulesNetworkFirewallPolicyRequest, + dict, +]) +def test_clone_rules_rest(request_type): + client = NetworkFirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'firewall_policy': 'sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.clone_rules(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, extended_operation.ExtendedOperation) + assert response.client_operation_id == 'client_operation_id_value' + assert response.creation_timestamp == 'creation_timestamp_value' + assert response.description == 'description_value' + assert response.end_time == 'end_time_value' + assert response.http_error_message == 'http_error_message_value' + assert response.http_error_status_code == 2374 + assert response.id == 205 + assert response.insert_time == 'insert_time_value' + assert response.kind == 'kind_value' + assert response.name == 'name_value' + assert response.operation_group_id == 'operation_group_id_value' + assert response.operation_type == 'operation_type_value' + assert response.progress == 885 + assert response.region == 'region_value' + assert response.self_link == 'self_link_value' + assert response.start_time == 'start_time_value' + assert response.status == compute.Operation.Status.DONE + assert response.status_message == 'status_message_value' + assert response.target_id == 947 + assert response.target_link == 'target_link_value' + assert response.user == 'user_value' + assert response.zone == 'zone_value' + + +def test_clone_rules_rest_required_fields(request_type=compute.CloneRulesNetworkFirewallPolicyRequest): + transport_class = transports.NetworkFirewallPoliciesRestTransport + + request_init = {} + request_init["firewall_policy"] = "" + request_init["project"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).clone_rules._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["firewallPolicy"] = 'firewall_policy_value' + jsonified_request["project"] = 'project_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).clone_rules._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", "source_firewall_policy", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "firewallPolicy" in jsonified_request + assert jsonified_request["firewallPolicy"] == 'firewall_policy_value' + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + + client = NetworkFirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.clone_rules(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_clone_rules_rest_unset_required_fields(): + transport = transports.NetworkFirewallPoliciesRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.clone_rules._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", "sourceFirewallPolicy", )) & set(("firewallPolicy", "project", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_clone_rules_rest_interceptors(null_interceptor): + transport = transports.NetworkFirewallPoliciesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.NetworkFirewallPoliciesRestInterceptor(), + ) + client = NetworkFirewallPoliciesClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.NetworkFirewallPoliciesRestInterceptor, "post_clone_rules") as post, \ + mock.patch.object(transports.NetworkFirewallPoliciesRestInterceptor, "pre_clone_rules") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.CloneRulesNetworkFirewallPolicyRequest.pb(compute.CloneRulesNetworkFirewallPolicyRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.CloneRulesNetworkFirewallPolicyRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.clone_rules(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_clone_rules_rest_bad_request(transport: str = 'rest', request_type=compute.CloneRulesNetworkFirewallPolicyRequest): + client = NetworkFirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'firewall_policy': 'sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.clone_rules(request) + + +def test_clone_rules_rest_flattened(): + client = NetworkFirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'firewall_policy': 'sample2'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + firewall_policy='firewall_policy_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.clone_rules(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/global/firewallPolicies/{firewall_policy}/cloneRules" % client.transport._host, args[1]) + + +def test_clone_rules_rest_flattened_error(transport: str = 'rest'): + client = NetworkFirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.clone_rules( + compute.CloneRulesNetworkFirewallPolicyRequest(), + project='project_value', + firewall_policy='firewall_policy_value', + ) + + +def test_clone_rules_rest_error(): + client = NetworkFirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.CloneRulesNetworkFirewallPolicyRequest, + dict, +]) +def test_clone_rules_unary_rest(request_type): + client = NetworkFirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'firewall_policy': 'sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.clone_rules_unary(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.Operation) + + +def test_clone_rules_unary_rest_required_fields(request_type=compute.CloneRulesNetworkFirewallPolicyRequest): + transport_class = transports.NetworkFirewallPoliciesRestTransport + + request_init = {} + request_init["firewall_policy"] = "" + request_init["project"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).clone_rules._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["firewallPolicy"] = 'firewall_policy_value' + jsonified_request["project"] = 'project_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).clone_rules._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", "source_firewall_policy", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "firewallPolicy" in jsonified_request + assert jsonified_request["firewallPolicy"] == 'firewall_policy_value' + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + + client = NetworkFirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.clone_rules_unary(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_clone_rules_unary_rest_unset_required_fields(): + transport = transports.NetworkFirewallPoliciesRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.clone_rules._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", "sourceFirewallPolicy", )) & set(("firewallPolicy", "project", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_clone_rules_unary_rest_interceptors(null_interceptor): + transport = transports.NetworkFirewallPoliciesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.NetworkFirewallPoliciesRestInterceptor(), + ) + client = NetworkFirewallPoliciesClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.NetworkFirewallPoliciesRestInterceptor, "post_clone_rules") as post, \ + mock.patch.object(transports.NetworkFirewallPoliciesRestInterceptor, "pre_clone_rules") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.CloneRulesNetworkFirewallPolicyRequest.pb(compute.CloneRulesNetworkFirewallPolicyRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.CloneRulesNetworkFirewallPolicyRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.clone_rules_unary(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_clone_rules_unary_rest_bad_request(transport: str = 'rest', request_type=compute.CloneRulesNetworkFirewallPolicyRequest): + client = NetworkFirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'firewall_policy': 'sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.clone_rules_unary(request) + + +def test_clone_rules_unary_rest_flattened(): + client = NetworkFirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'firewall_policy': 'sample2'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + firewall_policy='firewall_policy_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.clone_rules_unary(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/global/firewallPolicies/{firewall_policy}/cloneRules" % client.transport._host, args[1]) + + +def test_clone_rules_unary_rest_flattened_error(transport: str = 'rest'): + client = NetworkFirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.clone_rules_unary( + compute.CloneRulesNetworkFirewallPolicyRequest(), + project='project_value', + firewall_policy='firewall_policy_value', + ) + + +def test_clone_rules_unary_rest_error(): + client = NetworkFirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.DeleteNetworkFirewallPolicyRequest, + dict, +]) +def test_delete_rest(request_type): + client = NetworkFirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'firewall_policy': 'sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.delete(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, extended_operation.ExtendedOperation) + assert response.client_operation_id == 'client_operation_id_value' + assert response.creation_timestamp == 'creation_timestamp_value' + assert response.description == 'description_value' + assert response.end_time == 'end_time_value' + assert response.http_error_message == 'http_error_message_value' + assert response.http_error_status_code == 2374 + assert response.id == 205 + assert response.insert_time == 'insert_time_value' + assert response.kind == 'kind_value' + assert response.name == 'name_value' + assert response.operation_group_id == 'operation_group_id_value' + assert response.operation_type == 'operation_type_value' + assert response.progress == 885 + assert response.region == 'region_value' + assert response.self_link == 'self_link_value' + assert response.start_time == 'start_time_value' + assert response.status == compute.Operation.Status.DONE + assert response.status_message == 'status_message_value' + assert response.target_id == 947 + assert response.target_link == 'target_link_value' + assert response.user == 'user_value' + assert response.zone == 'zone_value' + + +def test_delete_rest_required_fields(request_type=compute.DeleteNetworkFirewallPolicyRequest): + transport_class = transports.NetworkFirewallPoliciesRestTransport + + request_init = {} + request_init["firewall_policy"] = "" + request_init["project"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["firewallPolicy"] = 'firewall_policy_value' + jsonified_request["project"] = 'project_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "firewallPolicy" in jsonified_request + assert jsonified_request["firewallPolicy"] == 'firewall_policy_value' + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + + client = NetworkFirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "delete", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.delete(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_delete_rest_unset_required_fields(): + transport = transports.NetworkFirewallPoliciesRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.delete._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("firewallPolicy", "project", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_rest_interceptors(null_interceptor): + transport = transports.NetworkFirewallPoliciesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.NetworkFirewallPoliciesRestInterceptor(), + ) + client = NetworkFirewallPoliciesClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.NetworkFirewallPoliciesRestInterceptor, "post_delete") as post, \ + mock.patch.object(transports.NetworkFirewallPoliciesRestInterceptor, "pre_delete") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.DeleteNetworkFirewallPolicyRequest.pb(compute.DeleteNetworkFirewallPolicyRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.DeleteNetworkFirewallPolicyRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.delete(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_delete_rest_bad_request(transport: str = 'rest', request_type=compute.DeleteNetworkFirewallPolicyRequest): + client = NetworkFirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'firewall_policy': 'sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.delete(request) + + +def test_delete_rest_flattened(): + client = NetworkFirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'firewall_policy': 'sample2'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + firewall_policy='firewall_policy_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.delete(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/global/firewallPolicies/{firewall_policy}" % client.transport._host, args[1]) + + +def test_delete_rest_flattened_error(transport: str = 'rest'): + client = NetworkFirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete( + compute.DeleteNetworkFirewallPolicyRequest(), + project='project_value', + firewall_policy='firewall_policy_value', + ) + + +def test_delete_rest_error(): + client = NetworkFirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.DeleteNetworkFirewallPolicyRequest, + dict, +]) +def test_delete_unary_rest(request_type): + client = NetworkFirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'firewall_policy': 'sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.delete_unary(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.Operation) + + +def test_delete_unary_rest_required_fields(request_type=compute.DeleteNetworkFirewallPolicyRequest): + transport_class = transports.NetworkFirewallPoliciesRestTransport + + request_init = {} + request_init["firewall_policy"] = "" + request_init["project"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["firewallPolicy"] = 'firewall_policy_value' + jsonified_request["project"] = 'project_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "firewallPolicy" in jsonified_request + assert jsonified_request["firewallPolicy"] == 'firewall_policy_value' + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + + client = NetworkFirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "delete", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.delete_unary(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_delete_unary_rest_unset_required_fields(): + transport = transports.NetworkFirewallPoliciesRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.delete._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("firewallPolicy", "project", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_unary_rest_interceptors(null_interceptor): + transport = transports.NetworkFirewallPoliciesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.NetworkFirewallPoliciesRestInterceptor(), + ) + client = NetworkFirewallPoliciesClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.NetworkFirewallPoliciesRestInterceptor, "post_delete") as post, \ + mock.patch.object(transports.NetworkFirewallPoliciesRestInterceptor, "pre_delete") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.DeleteNetworkFirewallPolicyRequest.pb(compute.DeleteNetworkFirewallPolicyRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.DeleteNetworkFirewallPolicyRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.delete_unary(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_delete_unary_rest_bad_request(transport: str = 'rest', request_type=compute.DeleteNetworkFirewallPolicyRequest): + client = NetworkFirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'firewall_policy': 'sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.delete_unary(request) + + +def test_delete_unary_rest_flattened(): + client = NetworkFirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'firewall_policy': 'sample2'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + firewall_policy='firewall_policy_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.delete_unary(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/global/firewallPolicies/{firewall_policy}" % client.transport._host, args[1]) + + +def test_delete_unary_rest_flattened_error(transport: str = 'rest'): + client = NetworkFirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_unary( + compute.DeleteNetworkFirewallPolicyRequest(), + project='project_value', + firewall_policy='firewall_policy_value', + ) + + +def test_delete_unary_rest_error(): + client = NetworkFirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.GetNetworkFirewallPolicyRequest, + dict, +]) +def test_get_rest(request_type): + client = NetworkFirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'firewall_policy': 'sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.FirewallPolicy( + creation_timestamp='creation_timestamp_value', + description='description_value', + display_name='display_name_value', + fingerprint='fingerprint_value', + id=205, + kind='kind_value', + name='name_value', + parent='parent_value', + region='region_value', + rule_tuple_count=1737, + self_link='self_link_value', + self_link_with_id='self_link_with_id_value', + short_name='short_name_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.FirewallPolicy.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.get(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.FirewallPolicy) + assert response.creation_timestamp == 'creation_timestamp_value' + assert response.description == 'description_value' + assert response.display_name == 'display_name_value' + assert response.fingerprint == 'fingerprint_value' + assert response.id == 205 + assert response.kind == 'kind_value' + assert response.name == 'name_value' + assert response.parent == 'parent_value' + assert response.region == 'region_value' + assert response.rule_tuple_count == 1737 + assert response.self_link == 'self_link_value' + assert response.self_link_with_id == 'self_link_with_id_value' + assert response.short_name == 'short_name_value' + + +def test_get_rest_required_fields(request_type=compute.GetNetworkFirewallPolicyRequest): + transport_class = transports.NetworkFirewallPoliciesRestTransport + + request_init = {} + request_init["firewall_policy"] = "" + request_init["project"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["firewallPolicy"] = 'firewall_policy_value' + jsonified_request["project"] = 'project_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "firewallPolicy" in jsonified_request + assert jsonified_request["firewallPolicy"] == 'firewall_policy_value' + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + + client = NetworkFirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.FirewallPolicy() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "get", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.FirewallPolicy.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.get(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_get_rest_unset_required_fields(): + transport = transports.NetworkFirewallPoliciesRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.get._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("firewallPolicy", "project", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_rest_interceptors(null_interceptor): + transport = transports.NetworkFirewallPoliciesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.NetworkFirewallPoliciesRestInterceptor(), + ) + client = NetworkFirewallPoliciesClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.NetworkFirewallPoliciesRestInterceptor, "post_get") as post, \ + mock.patch.object(transports.NetworkFirewallPoliciesRestInterceptor, "pre_get") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.GetNetworkFirewallPolicyRequest.pb(compute.GetNetworkFirewallPolicyRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.FirewallPolicy.to_json(compute.FirewallPolicy()) + + request = compute.GetNetworkFirewallPolicyRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.FirewallPolicy() + + client.get(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_get_rest_bad_request(transport: str = 'rest', request_type=compute.GetNetworkFirewallPolicyRequest): + client = NetworkFirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'firewall_policy': 'sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get(request) + + +def test_get_rest_flattened(): + client = NetworkFirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.FirewallPolicy() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'firewall_policy': 'sample2'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + firewall_policy='firewall_policy_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.FirewallPolicy.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.get(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/global/firewallPolicies/{firewall_policy}" % client.transport._host, args[1]) + + +def test_get_rest_flattened_error(transport: str = 'rest'): + client = NetworkFirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get( + compute.GetNetworkFirewallPolicyRequest(), + project='project_value', + firewall_policy='firewall_policy_value', + ) + + +def test_get_rest_error(): + client = NetworkFirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.GetAssociationNetworkFirewallPolicyRequest, + dict, +]) +def test_get_association_rest(request_type): + client = NetworkFirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'firewall_policy': 'sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.FirewallPolicyAssociation( + attachment_target='attachment_target_value', + display_name='display_name_value', + firewall_policy_id='firewall_policy_id_value', + name='name_value', + short_name='short_name_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.FirewallPolicyAssociation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.get_association(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.FirewallPolicyAssociation) + assert response.attachment_target == 'attachment_target_value' + assert response.display_name == 'display_name_value' + assert response.firewall_policy_id == 'firewall_policy_id_value' + assert response.name == 'name_value' + assert response.short_name == 'short_name_value' + + +def test_get_association_rest_required_fields(request_type=compute.GetAssociationNetworkFirewallPolicyRequest): + transport_class = transports.NetworkFirewallPoliciesRestTransport + + request_init = {} + request_init["firewall_policy"] = "" + request_init["project"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_association._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["firewallPolicy"] = 'firewall_policy_value' + jsonified_request["project"] = 'project_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_association._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("name", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "firewallPolicy" in jsonified_request + assert jsonified_request["firewallPolicy"] == 'firewall_policy_value' + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + + client = NetworkFirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.FirewallPolicyAssociation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "get", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.FirewallPolicyAssociation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.get_association(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_get_association_rest_unset_required_fields(): + transport = transports.NetworkFirewallPoliciesRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.get_association._get_unset_required_fields({}) + assert set(unset_fields) == (set(("name", )) & set(("firewallPolicy", "project", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_association_rest_interceptors(null_interceptor): + transport = transports.NetworkFirewallPoliciesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.NetworkFirewallPoliciesRestInterceptor(), + ) + client = NetworkFirewallPoliciesClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.NetworkFirewallPoliciesRestInterceptor, "post_get_association") as post, \ + mock.patch.object(transports.NetworkFirewallPoliciesRestInterceptor, "pre_get_association") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.GetAssociationNetworkFirewallPolicyRequest.pb(compute.GetAssociationNetworkFirewallPolicyRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.FirewallPolicyAssociation.to_json(compute.FirewallPolicyAssociation()) + + request = compute.GetAssociationNetworkFirewallPolicyRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.FirewallPolicyAssociation() + + client.get_association(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_get_association_rest_bad_request(transport: str = 'rest', request_type=compute.GetAssociationNetworkFirewallPolicyRequest): + client = NetworkFirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'firewall_policy': 'sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_association(request) + + +def test_get_association_rest_flattened(): + client = NetworkFirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.FirewallPolicyAssociation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'firewall_policy': 'sample2'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + firewall_policy='firewall_policy_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.FirewallPolicyAssociation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.get_association(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/global/firewallPolicies/{firewall_policy}/getAssociation" % client.transport._host, args[1]) + + +def test_get_association_rest_flattened_error(transport: str = 'rest'): + client = NetworkFirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_association( + compute.GetAssociationNetworkFirewallPolicyRequest(), + project='project_value', + firewall_policy='firewall_policy_value', + ) + + +def test_get_association_rest_error(): + client = NetworkFirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.GetIamPolicyNetworkFirewallPolicyRequest, + dict, +]) +def test_get_iam_policy_rest(request_type): + client = NetworkFirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'resource': 'sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Policy( + etag='etag_value', + iam_owned=True, + version=774, + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Policy.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.get_iam_policy(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.Policy) + assert response.etag == 'etag_value' + assert response.iam_owned is True + assert response.version == 774 + + +def test_get_iam_policy_rest_required_fields(request_type=compute.GetIamPolicyNetworkFirewallPolicyRequest): + transport_class = transports.NetworkFirewallPoliciesRestTransport + + request_init = {} + request_init["project"] = "" + request_init["resource"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_iam_policy._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + jsonified_request["resource"] = 'resource_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_iam_policy._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("options_requested_policy_version", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "resource" in jsonified_request + assert jsonified_request["resource"] == 'resource_value' + + client = NetworkFirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Policy() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "get", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Policy.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.get_iam_policy(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_get_iam_policy_rest_unset_required_fields(): + transport = transports.NetworkFirewallPoliciesRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.get_iam_policy._get_unset_required_fields({}) + assert set(unset_fields) == (set(("optionsRequestedPolicyVersion", )) & set(("project", "resource", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_iam_policy_rest_interceptors(null_interceptor): + transport = transports.NetworkFirewallPoliciesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.NetworkFirewallPoliciesRestInterceptor(), + ) + client = NetworkFirewallPoliciesClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.NetworkFirewallPoliciesRestInterceptor, "post_get_iam_policy") as post, \ + mock.patch.object(transports.NetworkFirewallPoliciesRestInterceptor, "pre_get_iam_policy") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.GetIamPolicyNetworkFirewallPolicyRequest.pb(compute.GetIamPolicyNetworkFirewallPolicyRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Policy.to_json(compute.Policy()) + + request = compute.GetIamPolicyNetworkFirewallPolicyRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Policy() + + client.get_iam_policy(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_get_iam_policy_rest_bad_request(transport: str = 'rest', request_type=compute.GetIamPolicyNetworkFirewallPolicyRequest): + client = NetworkFirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'resource': 'sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_iam_policy(request) + + +def test_get_iam_policy_rest_flattened(): + client = NetworkFirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Policy() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'resource': 'sample2'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + resource='resource_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Policy.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.get_iam_policy(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/global/firewallPolicies/{resource}/getIamPolicy" % client.transport._host, args[1]) + + +def test_get_iam_policy_rest_flattened_error(transport: str = 'rest'): + client = NetworkFirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_iam_policy( + compute.GetIamPolicyNetworkFirewallPolicyRequest(), + project='project_value', + resource='resource_value', + ) + + +def test_get_iam_policy_rest_error(): + client = NetworkFirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.GetRuleNetworkFirewallPolicyRequest, + dict, +]) +def test_get_rule_rest(request_type): + client = NetworkFirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'firewall_policy': 'sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.FirewallPolicyRule( + action='action_value', + description='description_value', + direction='direction_value', + disabled=True, + enable_logging=True, + kind='kind_value', + priority=898, + rule_name='rule_name_value', + rule_tuple_count=1737, + target_resources=['target_resources_value'], + target_service_accounts=['target_service_accounts_value'], + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.FirewallPolicyRule.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.get_rule(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.FirewallPolicyRule) + assert response.action == 'action_value' + assert response.description == 'description_value' + assert response.direction == 'direction_value' + assert response.disabled is True + assert response.enable_logging is True + assert response.kind == 'kind_value' + assert response.priority == 898 + assert response.rule_name == 'rule_name_value' + assert response.rule_tuple_count == 1737 + assert response.target_resources == ['target_resources_value'] + assert response.target_service_accounts == ['target_service_accounts_value'] + + +def test_get_rule_rest_required_fields(request_type=compute.GetRuleNetworkFirewallPolicyRequest): + transport_class = transports.NetworkFirewallPoliciesRestTransport + + request_init = {} + request_init["firewall_policy"] = "" + request_init["project"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_rule._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["firewallPolicy"] = 'firewall_policy_value' + jsonified_request["project"] = 'project_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_rule._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("priority", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "firewallPolicy" in jsonified_request + assert jsonified_request["firewallPolicy"] == 'firewall_policy_value' + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + + client = NetworkFirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.FirewallPolicyRule() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "get", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.FirewallPolicyRule.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.get_rule(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_get_rule_rest_unset_required_fields(): + transport = transports.NetworkFirewallPoliciesRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.get_rule._get_unset_required_fields({}) + assert set(unset_fields) == (set(("priority", )) & set(("firewallPolicy", "project", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_rule_rest_interceptors(null_interceptor): + transport = transports.NetworkFirewallPoliciesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.NetworkFirewallPoliciesRestInterceptor(), + ) + client = NetworkFirewallPoliciesClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.NetworkFirewallPoliciesRestInterceptor, "post_get_rule") as post, \ + mock.patch.object(transports.NetworkFirewallPoliciesRestInterceptor, "pre_get_rule") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.GetRuleNetworkFirewallPolicyRequest.pb(compute.GetRuleNetworkFirewallPolicyRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.FirewallPolicyRule.to_json(compute.FirewallPolicyRule()) + + request = compute.GetRuleNetworkFirewallPolicyRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.FirewallPolicyRule() + + client.get_rule(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_get_rule_rest_bad_request(transport: str = 'rest', request_type=compute.GetRuleNetworkFirewallPolicyRequest): + client = NetworkFirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'firewall_policy': 'sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_rule(request) + + +def test_get_rule_rest_flattened(): + client = NetworkFirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.FirewallPolicyRule() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'firewall_policy': 'sample2'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + firewall_policy='firewall_policy_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.FirewallPolicyRule.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.get_rule(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/global/firewallPolicies/{firewall_policy}/getRule" % client.transport._host, args[1]) + + +def test_get_rule_rest_flattened_error(transport: str = 'rest'): + client = NetworkFirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_rule( + compute.GetRuleNetworkFirewallPolicyRequest(), + project='project_value', + firewall_policy='firewall_policy_value', + ) + + +def test_get_rule_rest_error(): + client = NetworkFirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.InsertNetworkFirewallPolicyRequest, + dict, +]) +def test_insert_rest(request_type): + client = NetworkFirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1'} + request_init["firewall_policy_resource"] = {'associations': [{'attachment_target': 'attachment_target_value', 'display_name': 'display_name_value', 'firewall_policy_id': 'firewall_policy_id_value', 'name': 'name_value', 'short_name': 'short_name_value'}], 'creation_timestamp': 'creation_timestamp_value', 'description': 'description_value', 'display_name': 'display_name_value', 'fingerprint': 'fingerprint_value', 'id': 205, 'kind': 'kind_value', 'name': 'name_value', 'parent': 'parent_value', 'region': 'region_value', 'rule_tuple_count': 1737, 'rules': [{'action': 'action_value', 'description': 'description_value', 'direction': 'direction_value', 'disabled': True, 'enable_logging': True, 'kind': 'kind_value', 'match': {'dest_address_groups': ['dest_address_groups_value1', 'dest_address_groups_value2'], 'dest_fqdns': ['dest_fqdns_value1', 'dest_fqdns_value2'], 'dest_ip_ranges': ['dest_ip_ranges_value1', 'dest_ip_ranges_value2'], 'dest_region_codes': ['dest_region_codes_value1', 'dest_region_codes_value2'], 'dest_threat_intelligences': ['dest_threat_intelligences_value1', 'dest_threat_intelligences_value2'], 'layer4_configs': [{'ip_protocol': 'ip_protocol_value', 'ports': ['ports_value1', 'ports_value2']}], 'src_address_groups': ['src_address_groups_value1', 'src_address_groups_value2'], 'src_fqdns': ['src_fqdns_value1', 'src_fqdns_value2'], 'src_ip_ranges': ['src_ip_ranges_value1', 'src_ip_ranges_value2'], 'src_region_codes': ['src_region_codes_value1', 'src_region_codes_value2'], 'src_secure_tags': [{'name': 'name_value', 'state': 'state_value'}], 'src_threat_intelligences': ['src_threat_intelligences_value1', 'src_threat_intelligences_value2']}, 'priority': 898, 'rule_name': 'rule_name_value', 'rule_tuple_count': 1737, 'target_resources': ['target_resources_value1', 'target_resources_value2'], 'target_secure_tags': {}, 'target_service_accounts': ['target_service_accounts_value1', 'target_service_accounts_value2']}], 'self_link': 'self_link_value', 'self_link_with_id': 'self_link_with_id_value', 'short_name': 'short_name_value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.insert(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, extended_operation.ExtendedOperation) + assert response.client_operation_id == 'client_operation_id_value' + assert response.creation_timestamp == 'creation_timestamp_value' + assert response.description == 'description_value' + assert response.end_time == 'end_time_value' + assert response.http_error_message == 'http_error_message_value' + assert response.http_error_status_code == 2374 + assert response.id == 205 + assert response.insert_time == 'insert_time_value' + assert response.kind == 'kind_value' + assert response.name == 'name_value' + assert response.operation_group_id == 'operation_group_id_value' + assert response.operation_type == 'operation_type_value' + assert response.progress == 885 + assert response.region == 'region_value' + assert response.self_link == 'self_link_value' + assert response.start_time == 'start_time_value' + assert response.status == compute.Operation.Status.DONE + assert response.status_message == 'status_message_value' + assert response.target_id == 947 + assert response.target_link == 'target_link_value' + assert response.user == 'user_value' + assert response.zone == 'zone_value' + + +def test_insert_rest_required_fields(request_type=compute.InsertNetworkFirewallPolicyRequest): + transport_class = transports.NetworkFirewallPoliciesRestTransport + + request_init = {} + request_init["project"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).insert._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).insert._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + + client = NetworkFirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.insert(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_insert_rest_unset_required_fields(): + transport = transports.NetworkFirewallPoliciesRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.insert._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("firewallPolicyResource", "project", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_insert_rest_interceptors(null_interceptor): + transport = transports.NetworkFirewallPoliciesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.NetworkFirewallPoliciesRestInterceptor(), + ) + client = NetworkFirewallPoliciesClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.NetworkFirewallPoliciesRestInterceptor, "post_insert") as post, \ + mock.patch.object(transports.NetworkFirewallPoliciesRestInterceptor, "pre_insert") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.InsertNetworkFirewallPolicyRequest.pb(compute.InsertNetworkFirewallPolicyRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.InsertNetworkFirewallPolicyRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.insert(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_insert_rest_bad_request(transport: str = 'rest', request_type=compute.InsertNetworkFirewallPolicyRequest): + client = NetworkFirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1'} + request_init["firewall_policy_resource"] = {'associations': [{'attachment_target': 'attachment_target_value', 'display_name': 'display_name_value', 'firewall_policy_id': 'firewall_policy_id_value', 'name': 'name_value', 'short_name': 'short_name_value'}], 'creation_timestamp': 'creation_timestamp_value', 'description': 'description_value', 'display_name': 'display_name_value', 'fingerprint': 'fingerprint_value', 'id': 205, 'kind': 'kind_value', 'name': 'name_value', 'parent': 'parent_value', 'region': 'region_value', 'rule_tuple_count': 1737, 'rules': [{'action': 'action_value', 'description': 'description_value', 'direction': 'direction_value', 'disabled': True, 'enable_logging': True, 'kind': 'kind_value', 'match': {'dest_address_groups': ['dest_address_groups_value1', 'dest_address_groups_value2'], 'dest_fqdns': ['dest_fqdns_value1', 'dest_fqdns_value2'], 'dest_ip_ranges': ['dest_ip_ranges_value1', 'dest_ip_ranges_value2'], 'dest_region_codes': ['dest_region_codes_value1', 'dest_region_codes_value2'], 'dest_threat_intelligences': ['dest_threat_intelligences_value1', 'dest_threat_intelligences_value2'], 'layer4_configs': [{'ip_protocol': 'ip_protocol_value', 'ports': ['ports_value1', 'ports_value2']}], 'src_address_groups': ['src_address_groups_value1', 'src_address_groups_value2'], 'src_fqdns': ['src_fqdns_value1', 'src_fqdns_value2'], 'src_ip_ranges': ['src_ip_ranges_value1', 'src_ip_ranges_value2'], 'src_region_codes': ['src_region_codes_value1', 'src_region_codes_value2'], 'src_secure_tags': [{'name': 'name_value', 'state': 'state_value'}], 'src_threat_intelligences': ['src_threat_intelligences_value1', 'src_threat_intelligences_value2']}, 'priority': 898, 'rule_name': 'rule_name_value', 'rule_tuple_count': 1737, 'target_resources': ['target_resources_value1', 'target_resources_value2'], 'target_secure_tags': {}, 'target_service_accounts': ['target_service_accounts_value1', 'target_service_accounts_value2']}], 'self_link': 'self_link_value', 'self_link_with_id': 'self_link_with_id_value', 'short_name': 'short_name_value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.insert(request) + + +def test_insert_rest_flattened(): + client = NetworkFirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + firewall_policy_resource=compute.FirewallPolicy(associations=[compute.FirewallPolicyAssociation(attachment_target='attachment_target_value')]), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.insert(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/global/firewallPolicies" % client.transport._host, args[1]) + + +def test_insert_rest_flattened_error(transport: str = 'rest'): + client = NetworkFirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.insert( + compute.InsertNetworkFirewallPolicyRequest(), + project='project_value', + firewall_policy_resource=compute.FirewallPolicy(associations=[compute.FirewallPolicyAssociation(attachment_target='attachment_target_value')]), + ) + + +def test_insert_rest_error(): + client = NetworkFirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.InsertNetworkFirewallPolicyRequest, + dict, +]) +def test_insert_unary_rest(request_type): + client = NetworkFirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1'} + request_init["firewall_policy_resource"] = {'associations': [{'attachment_target': 'attachment_target_value', 'display_name': 'display_name_value', 'firewall_policy_id': 'firewall_policy_id_value', 'name': 'name_value', 'short_name': 'short_name_value'}], 'creation_timestamp': 'creation_timestamp_value', 'description': 'description_value', 'display_name': 'display_name_value', 'fingerprint': 'fingerprint_value', 'id': 205, 'kind': 'kind_value', 'name': 'name_value', 'parent': 'parent_value', 'region': 'region_value', 'rule_tuple_count': 1737, 'rules': [{'action': 'action_value', 'description': 'description_value', 'direction': 'direction_value', 'disabled': True, 'enable_logging': True, 'kind': 'kind_value', 'match': {'dest_address_groups': ['dest_address_groups_value1', 'dest_address_groups_value2'], 'dest_fqdns': ['dest_fqdns_value1', 'dest_fqdns_value2'], 'dest_ip_ranges': ['dest_ip_ranges_value1', 'dest_ip_ranges_value2'], 'dest_region_codes': ['dest_region_codes_value1', 'dest_region_codes_value2'], 'dest_threat_intelligences': ['dest_threat_intelligences_value1', 'dest_threat_intelligences_value2'], 'layer4_configs': [{'ip_protocol': 'ip_protocol_value', 'ports': ['ports_value1', 'ports_value2']}], 'src_address_groups': ['src_address_groups_value1', 'src_address_groups_value2'], 'src_fqdns': ['src_fqdns_value1', 'src_fqdns_value2'], 'src_ip_ranges': ['src_ip_ranges_value1', 'src_ip_ranges_value2'], 'src_region_codes': ['src_region_codes_value1', 'src_region_codes_value2'], 'src_secure_tags': [{'name': 'name_value', 'state': 'state_value'}], 'src_threat_intelligences': ['src_threat_intelligences_value1', 'src_threat_intelligences_value2']}, 'priority': 898, 'rule_name': 'rule_name_value', 'rule_tuple_count': 1737, 'target_resources': ['target_resources_value1', 'target_resources_value2'], 'target_secure_tags': {}, 'target_service_accounts': ['target_service_accounts_value1', 'target_service_accounts_value2']}], 'self_link': 'self_link_value', 'self_link_with_id': 'self_link_with_id_value', 'short_name': 'short_name_value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.insert_unary(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.Operation) + + +def test_insert_unary_rest_required_fields(request_type=compute.InsertNetworkFirewallPolicyRequest): + transport_class = transports.NetworkFirewallPoliciesRestTransport + + request_init = {} + request_init["project"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).insert._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).insert._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + + client = NetworkFirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.insert_unary(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_insert_unary_rest_unset_required_fields(): + transport = transports.NetworkFirewallPoliciesRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.insert._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("firewallPolicyResource", "project", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_insert_unary_rest_interceptors(null_interceptor): + transport = transports.NetworkFirewallPoliciesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.NetworkFirewallPoliciesRestInterceptor(), + ) + client = NetworkFirewallPoliciesClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.NetworkFirewallPoliciesRestInterceptor, "post_insert") as post, \ + mock.patch.object(transports.NetworkFirewallPoliciesRestInterceptor, "pre_insert") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.InsertNetworkFirewallPolicyRequest.pb(compute.InsertNetworkFirewallPolicyRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.InsertNetworkFirewallPolicyRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.insert_unary(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_insert_unary_rest_bad_request(transport: str = 'rest', request_type=compute.InsertNetworkFirewallPolicyRequest): + client = NetworkFirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1'} + request_init["firewall_policy_resource"] = {'associations': [{'attachment_target': 'attachment_target_value', 'display_name': 'display_name_value', 'firewall_policy_id': 'firewall_policy_id_value', 'name': 'name_value', 'short_name': 'short_name_value'}], 'creation_timestamp': 'creation_timestamp_value', 'description': 'description_value', 'display_name': 'display_name_value', 'fingerprint': 'fingerprint_value', 'id': 205, 'kind': 'kind_value', 'name': 'name_value', 'parent': 'parent_value', 'region': 'region_value', 'rule_tuple_count': 1737, 'rules': [{'action': 'action_value', 'description': 'description_value', 'direction': 'direction_value', 'disabled': True, 'enable_logging': True, 'kind': 'kind_value', 'match': {'dest_address_groups': ['dest_address_groups_value1', 'dest_address_groups_value2'], 'dest_fqdns': ['dest_fqdns_value1', 'dest_fqdns_value2'], 'dest_ip_ranges': ['dest_ip_ranges_value1', 'dest_ip_ranges_value2'], 'dest_region_codes': ['dest_region_codes_value1', 'dest_region_codes_value2'], 'dest_threat_intelligences': ['dest_threat_intelligences_value1', 'dest_threat_intelligences_value2'], 'layer4_configs': [{'ip_protocol': 'ip_protocol_value', 'ports': ['ports_value1', 'ports_value2']}], 'src_address_groups': ['src_address_groups_value1', 'src_address_groups_value2'], 'src_fqdns': ['src_fqdns_value1', 'src_fqdns_value2'], 'src_ip_ranges': ['src_ip_ranges_value1', 'src_ip_ranges_value2'], 'src_region_codes': ['src_region_codes_value1', 'src_region_codes_value2'], 'src_secure_tags': [{'name': 'name_value', 'state': 'state_value'}], 'src_threat_intelligences': ['src_threat_intelligences_value1', 'src_threat_intelligences_value2']}, 'priority': 898, 'rule_name': 'rule_name_value', 'rule_tuple_count': 1737, 'target_resources': ['target_resources_value1', 'target_resources_value2'], 'target_secure_tags': {}, 'target_service_accounts': ['target_service_accounts_value1', 'target_service_accounts_value2']}], 'self_link': 'self_link_value', 'self_link_with_id': 'self_link_with_id_value', 'short_name': 'short_name_value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.insert_unary(request) + + +def test_insert_unary_rest_flattened(): + client = NetworkFirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + firewall_policy_resource=compute.FirewallPolicy(associations=[compute.FirewallPolicyAssociation(attachment_target='attachment_target_value')]), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.insert_unary(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/global/firewallPolicies" % client.transport._host, args[1]) + + +def test_insert_unary_rest_flattened_error(transport: str = 'rest'): + client = NetworkFirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.insert_unary( + compute.InsertNetworkFirewallPolicyRequest(), + project='project_value', + firewall_policy_resource=compute.FirewallPolicy(associations=[compute.FirewallPolicyAssociation(attachment_target='attachment_target_value')]), + ) + + +def test_insert_unary_rest_error(): + client = NetworkFirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.ListNetworkFirewallPoliciesRequest, + dict, +]) +def test_list_rest(request_type): + client = NetworkFirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.FirewallPolicyList( + id='id_value', + kind='kind_value', + next_page_token='next_page_token_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.FirewallPolicyList.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.list(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListPager) + assert response.id == 'id_value' + assert response.kind == 'kind_value' + assert response.next_page_token == 'next_page_token_value' + + +def test_list_rest_required_fields(request_type=compute.ListNetworkFirewallPoliciesRequest): + transport_class = transports.NetworkFirewallPoliciesRestTransport + + request_init = {} + request_init["project"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("filter", "max_results", "order_by", "page_token", "return_partial_success", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + + client = NetworkFirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.FirewallPolicyList() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "get", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.FirewallPolicyList.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.list(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_list_rest_unset_required_fields(): + transport = transports.NetworkFirewallPoliciesRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.list._get_unset_required_fields({}) + assert set(unset_fields) == (set(("filter", "maxResults", "orderBy", "pageToken", "returnPartialSuccess", )) & set(("project", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_rest_interceptors(null_interceptor): + transport = transports.NetworkFirewallPoliciesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.NetworkFirewallPoliciesRestInterceptor(), + ) + client = NetworkFirewallPoliciesClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.NetworkFirewallPoliciesRestInterceptor, "post_list") as post, \ + mock.patch.object(transports.NetworkFirewallPoliciesRestInterceptor, "pre_list") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.ListNetworkFirewallPoliciesRequest.pb(compute.ListNetworkFirewallPoliciesRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.FirewallPolicyList.to_json(compute.FirewallPolicyList()) + + request = compute.ListNetworkFirewallPoliciesRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.FirewallPolicyList() + + client.list(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_list_rest_bad_request(transport: str = 'rest', request_type=compute.ListNetworkFirewallPoliciesRequest): + client = NetworkFirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list(request) + + +def test_list_rest_flattened(): + client = NetworkFirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.FirewallPolicyList() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.FirewallPolicyList.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.list(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/global/firewallPolicies" % client.transport._host, args[1]) + + +def test_list_rest_flattened_error(transport: str = 'rest'): + client = NetworkFirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list( + compute.ListNetworkFirewallPoliciesRequest(), + project='project_value', + ) + + +def test_list_rest_pager(transport: str = 'rest'): + client = NetworkFirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + #with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + compute.FirewallPolicyList( + items=[ + compute.FirewallPolicy(), + compute.FirewallPolicy(), + compute.FirewallPolicy(), + ], + next_page_token='abc', + ), + compute.FirewallPolicyList( + items=[], + next_page_token='def', + ), + compute.FirewallPolicyList( + items=[ + compute.FirewallPolicy(), + ], + next_page_token='ghi', + ), + compute.FirewallPolicyList( + items=[ + compute.FirewallPolicy(), + compute.FirewallPolicy(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple(compute.FirewallPolicyList.to_json(x) for x in response) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode('UTF-8') + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {'project': 'sample1'} + + pager = client.list(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, compute.FirewallPolicy) + for i in results) + + pages = list(client.list(request=sample_request).pages) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize("request_type", [ + compute.PatchNetworkFirewallPolicyRequest, + dict, +]) +def test_patch_rest(request_type): + client = NetworkFirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'firewall_policy': 'sample2'} + request_init["firewall_policy_resource"] = {'associations': [{'attachment_target': 'attachment_target_value', 'display_name': 'display_name_value', 'firewall_policy_id': 'firewall_policy_id_value', 'name': 'name_value', 'short_name': 'short_name_value'}], 'creation_timestamp': 'creation_timestamp_value', 'description': 'description_value', 'display_name': 'display_name_value', 'fingerprint': 'fingerprint_value', 'id': 205, 'kind': 'kind_value', 'name': 'name_value', 'parent': 'parent_value', 'region': 'region_value', 'rule_tuple_count': 1737, 'rules': [{'action': 'action_value', 'description': 'description_value', 'direction': 'direction_value', 'disabled': True, 'enable_logging': True, 'kind': 'kind_value', 'match': {'dest_address_groups': ['dest_address_groups_value1', 'dest_address_groups_value2'], 'dest_fqdns': ['dest_fqdns_value1', 'dest_fqdns_value2'], 'dest_ip_ranges': ['dest_ip_ranges_value1', 'dest_ip_ranges_value2'], 'dest_region_codes': ['dest_region_codes_value1', 'dest_region_codes_value2'], 'dest_threat_intelligences': ['dest_threat_intelligences_value1', 'dest_threat_intelligences_value2'], 'layer4_configs': [{'ip_protocol': 'ip_protocol_value', 'ports': ['ports_value1', 'ports_value2']}], 'src_address_groups': ['src_address_groups_value1', 'src_address_groups_value2'], 'src_fqdns': ['src_fqdns_value1', 'src_fqdns_value2'], 'src_ip_ranges': ['src_ip_ranges_value1', 'src_ip_ranges_value2'], 'src_region_codes': ['src_region_codes_value1', 'src_region_codes_value2'], 'src_secure_tags': [{'name': 'name_value', 'state': 'state_value'}], 'src_threat_intelligences': ['src_threat_intelligences_value1', 'src_threat_intelligences_value2']}, 'priority': 898, 'rule_name': 'rule_name_value', 'rule_tuple_count': 1737, 'target_resources': ['target_resources_value1', 'target_resources_value2'], 'target_secure_tags': {}, 'target_service_accounts': ['target_service_accounts_value1', 'target_service_accounts_value2']}], 'self_link': 'self_link_value', 'self_link_with_id': 'self_link_with_id_value', 'short_name': 'short_name_value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.patch(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, extended_operation.ExtendedOperation) + assert response.client_operation_id == 'client_operation_id_value' + assert response.creation_timestamp == 'creation_timestamp_value' + assert response.description == 'description_value' + assert response.end_time == 'end_time_value' + assert response.http_error_message == 'http_error_message_value' + assert response.http_error_status_code == 2374 + assert response.id == 205 + assert response.insert_time == 'insert_time_value' + assert response.kind == 'kind_value' + assert response.name == 'name_value' + assert response.operation_group_id == 'operation_group_id_value' + assert response.operation_type == 'operation_type_value' + assert response.progress == 885 + assert response.region == 'region_value' + assert response.self_link == 'self_link_value' + assert response.start_time == 'start_time_value' + assert response.status == compute.Operation.Status.DONE + assert response.status_message == 'status_message_value' + assert response.target_id == 947 + assert response.target_link == 'target_link_value' + assert response.user == 'user_value' + assert response.zone == 'zone_value' + + +def test_patch_rest_required_fields(request_type=compute.PatchNetworkFirewallPolicyRequest): + transport_class = transports.NetworkFirewallPoliciesRestTransport + + request_init = {} + request_init["firewall_policy"] = "" + request_init["project"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).patch._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["firewallPolicy"] = 'firewall_policy_value' + jsonified_request["project"] = 'project_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).patch._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "firewallPolicy" in jsonified_request + assert jsonified_request["firewallPolicy"] == 'firewall_policy_value' + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + + client = NetworkFirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "patch", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.patch(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_patch_rest_unset_required_fields(): + transport = transports.NetworkFirewallPoliciesRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.patch._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("firewallPolicy", "firewallPolicyResource", "project", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_patch_rest_interceptors(null_interceptor): + transport = transports.NetworkFirewallPoliciesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.NetworkFirewallPoliciesRestInterceptor(), + ) + client = NetworkFirewallPoliciesClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.NetworkFirewallPoliciesRestInterceptor, "post_patch") as post, \ + mock.patch.object(transports.NetworkFirewallPoliciesRestInterceptor, "pre_patch") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.PatchNetworkFirewallPolicyRequest.pb(compute.PatchNetworkFirewallPolicyRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.PatchNetworkFirewallPolicyRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.patch(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_patch_rest_bad_request(transport: str = 'rest', request_type=compute.PatchNetworkFirewallPolicyRequest): + client = NetworkFirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'firewall_policy': 'sample2'} + request_init["firewall_policy_resource"] = {'associations': [{'attachment_target': 'attachment_target_value', 'display_name': 'display_name_value', 'firewall_policy_id': 'firewall_policy_id_value', 'name': 'name_value', 'short_name': 'short_name_value'}], 'creation_timestamp': 'creation_timestamp_value', 'description': 'description_value', 'display_name': 'display_name_value', 'fingerprint': 'fingerprint_value', 'id': 205, 'kind': 'kind_value', 'name': 'name_value', 'parent': 'parent_value', 'region': 'region_value', 'rule_tuple_count': 1737, 'rules': [{'action': 'action_value', 'description': 'description_value', 'direction': 'direction_value', 'disabled': True, 'enable_logging': True, 'kind': 'kind_value', 'match': {'dest_address_groups': ['dest_address_groups_value1', 'dest_address_groups_value2'], 'dest_fqdns': ['dest_fqdns_value1', 'dest_fqdns_value2'], 'dest_ip_ranges': ['dest_ip_ranges_value1', 'dest_ip_ranges_value2'], 'dest_region_codes': ['dest_region_codes_value1', 'dest_region_codes_value2'], 'dest_threat_intelligences': ['dest_threat_intelligences_value1', 'dest_threat_intelligences_value2'], 'layer4_configs': [{'ip_protocol': 'ip_protocol_value', 'ports': ['ports_value1', 'ports_value2']}], 'src_address_groups': ['src_address_groups_value1', 'src_address_groups_value2'], 'src_fqdns': ['src_fqdns_value1', 'src_fqdns_value2'], 'src_ip_ranges': ['src_ip_ranges_value1', 'src_ip_ranges_value2'], 'src_region_codes': ['src_region_codes_value1', 'src_region_codes_value2'], 'src_secure_tags': [{'name': 'name_value', 'state': 'state_value'}], 'src_threat_intelligences': ['src_threat_intelligences_value1', 'src_threat_intelligences_value2']}, 'priority': 898, 'rule_name': 'rule_name_value', 'rule_tuple_count': 1737, 'target_resources': ['target_resources_value1', 'target_resources_value2'], 'target_secure_tags': {}, 'target_service_accounts': ['target_service_accounts_value1', 'target_service_accounts_value2']}], 'self_link': 'self_link_value', 'self_link_with_id': 'self_link_with_id_value', 'short_name': 'short_name_value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.patch(request) + + +def test_patch_rest_flattened(): + client = NetworkFirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'firewall_policy': 'sample2'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + firewall_policy='firewall_policy_value', + firewall_policy_resource=compute.FirewallPolicy(associations=[compute.FirewallPolicyAssociation(attachment_target='attachment_target_value')]), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.patch(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/global/firewallPolicies/{firewall_policy}" % client.transport._host, args[1]) + + +def test_patch_rest_flattened_error(transport: str = 'rest'): + client = NetworkFirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.patch( + compute.PatchNetworkFirewallPolicyRequest(), + project='project_value', + firewall_policy='firewall_policy_value', + firewall_policy_resource=compute.FirewallPolicy(associations=[compute.FirewallPolicyAssociation(attachment_target='attachment_target_value')]), + ) + + +def test_patch_rest_error(): + client = NetworkFirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.PatchNetworkFirewallPolicyRequest, + dict, +]) +def test_patch_unary_rest(request_type): + client = NetworkFirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'firewall_policy': 'sample2'} + request_init["firewall_policy_resource"] = {'associations': [{'attachment_target': 'attachment_target_value', 'display_name': 'display_name_value', 'firewall_policy_id': 'firewall_policy_id_value', 'name': 'name_value', 'short_name': 'short_name_value'}], 'creation_timestamp': 'creation_timestamp_value', 'description': 'description_value', 'display_name': 'display_name_value', 'fingerprint': 'fingerprint_value', 'id': 205, 'kind': 'kind_value', 'name': 'name_value', 'parent': 'parent_value', 'region': 'region_value', 'rule_tuple_count': 1737, 'rules': [{'action': 'action_value', 'description': 'description_value', 'direction': 'direction_value', 'disabled': True, 'enable_logging': True, 'kind': 'kind_value', 'match': {'dest_address_groups': ['dest_address_groups_value1', 'dest_address_groups_value2'], 'dest_fqdns': ['dest_fqdns_value1', 'dest_fqdns_value2'], 'dest_ip_ranges': ['dest_ip_ranges_value1', 'dest_ip_ranges_value2'], 'dest_region_codes': ['dest_region_codes_value1', 'dest_region_codes_value2'], 'dest_threat_intelligences': ['dest_threat_intelligences_value1', 'dest_threat_intelligences_value2'], 'layer4_configs': [{'ip_protocol': 'ip_protocol_value', 'ports': ['ports_value1', 'ports_value2']}], 'src_address_groups': ['src_address_groups_value1', 'src_address_groups_value2'], 'src_fqdns': ['src_fqdns_value1', 'src_fqdns_value2'], 'src_ip_ranges': ['src_ip_ranges_value1', 'src_ip_ranges_value2'], 'src_region_codes': ['src_region_codes_value1', 'src_region_codes_value2'], 'src_secure_tags': [{'name': 'name_value', 'state': 'state_value'}], 'src_threat_intelligences': ['src_threat_intelligences_value1', 'src_threat_intelligences_value2']}, 'priority': 898, 'rule_name': 'rule_name_value', 'rule_tuple_count': 1737, 'target_resources': ['target_resources_value1', 'target_resources_value2'], 'target_secure_tags': {}, 'target_service_accounts': ['target_service_accounts_value1', 'target_service_accounts_value2']}], 'self_link': 'self_link_value', 'self_link_with_id': 'self_link_with_id_value', 'short_name': 'short_name_value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.patch_unary(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.Operation) + + +def test_patch_unary_rest_required_fields(request_type=compute.PatchNetworkFirewallPolicyRequest): + transport_class = transports.NetworkFirewallPoliciesRestTransport + + request_init = {} + request_init["firewall_policy"] = "" + request_init["project"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).patch._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["firewallPolicy"] = 'firewall_policy_value' + jsonified_request["project"] = 'project_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).patch._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "firewallPolicy" in jsonified_request + assert jsonified_request["firewallPolicy"] == 'firewall_policy_value' + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + + client = NetworkFirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "patch", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.patch_unary(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_patch_unary_rest_unset_required_fields(): + transport = transports.NetworkFirewallPoliciesRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.patch._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("firewallPolicy", "firewallPolicyResource", "project", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_patch_unary_rest_interceptors(null_interceptor): + transport = transports.NetworkFirewallPoliciesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.NetworkFirewallPoliciesRestInterceptor(), + ) + client = NetworkFirewallPoliciesClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.NetworkFirewallPoliciesRestInterceptor, "post_patch") as post, \ + mock.patch.object(transports.NetworkFirewallPoliciesRestInterceptor, "pre_patch") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.PatchNetworkFirewallPolicyRequest.pb(compute.PatchNetworkFirewallPolicyRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.PatchNetworkFirewallPolicyRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.patch_unary(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_patch_unary_rest_bad_request(transport: str = 'rest', request_type=compute.PatchNetworkFirewallPolicyRequest): + client = NetworkFirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'firewall_policy': 'sample2'} + request_init["firewall_policy_resource"] = {'associations': [{'attachment_target': 'attachment_target_value', 'display_name': 'display_name_value', 'firewall_policy_id': 'firewall_policy_id_value', 'name': 'name_value', 'short_name': 'short_name_value'}], 'creation_timestamp': 'creation_timestamp_value', 'description': 'description_value', 'display_name': 'display_name_value', 'fingerprint': 'fingerprint_value', 'id': 205, 'kind': 'kind_value', 'name': 'name_value', 'parent': 'parent_value', 'region': 'region_value', 'rule_tuple_count': 1737, 'rules': [{'action': 'action_value', 'description': 'description_value', 'direction': 'direction_value', 'disabled': True, 'enable_logging': True, 'kind': 'kind_value', 'match': {'dest_address_groups': ['dest_address_groups_value1', 'dest_address_groups_value2'], 'dest_fqdns': ['dest_fqdns_value1', 'dest_fqdns_value2'], 'dest_ip_ranges': ['dest_ip_ranges_value1', 'dest_ip_ranges_value2'], 'dest_region_codes': ['dest_region_codes_value1', 'dest_region_codes_value2'], 'dest_threat_intelligences': ['dest_threat_intelligences_value1', 'dest_threat_intelligences_value2'], 'layer4_configs': [{'ip_protocol': 'ip_protocol_value', 'ports': ['ports_value1', 'ports_value2']}], 'src_address_groups': ['src_address_groups_value1', 'src_address_groups_value2'], 'src_fqdns': ['src_fqdns_value1', 'src_fqdns_value2'], 'src_ip_ranges': ['src_ip_ranges_value1', 'src_ip_ranges_value2'], 'src_region_codes': ['src_region_codes_value1', 'src_region_codes_value2'], 'src_secure_tags': [{'name': 'name_value', 'state': 'state_value'}], 'src_threat_intelligences': ['src_threat_intelligences_value1', 'src_threat_intelligences_value2']}, 'priority': 898, 'rule_name': 'rule_name_value', 'rule_tuple_count': 1737, 'target_resources': ['target_resources_value1', 'target_resources_value2'], 'target_secure_tags': {}, 'target_service_accounts': ['target_service_accounts_value1', 'target_service_accounts_value2']}], 'self_link': 'self_link_value', 'self_link_with_id': 'self_link_with_id_value', 'short_name': 'short_name_value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.patch_unary(request) + + +def test_patch_unary_rest_flattened(): + client = NetworkFirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'firewall_policy': 'sample2'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + firewall_policy='firewall_policy_value', + firewall_policy_resource=compute.FirewallPolicy(associations=[compute.FirewallPolicyAssociation(attachment_target='attachment_target_value')]), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.patch_unary(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/global/firewallPolicies/{firewall_policy}" % client.transport._host, args[1]) + + +def test_patch_unary_rest_flattened_error(transport: str = 'rest'): + client = NetworkFirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.patch_unary( + compute.PatchNetworkFirewallPolicyRequest(), + project='project_value', + firewall_policy='firewall_policy_value', + firewall_policy_resource=compute.FirewallPolicy(associations=[compute.FirewallPolicyAssociation(attachment_target='attachment_target_value')]), + ) + + +def test_patch_unary_rest_error(): + client = NetworkFirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.PatchRuleNetworkFirewallPolicyRequest, + dict, +]) +def test_patch_rule_rest(request_type): + client = NetworkFirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'firewall_policy': 'sample2'} + request_init["firewall_policy_rule_resource"] = {'action': 'action_value', 'description': 'description_value', 'direction': 'direction_value', 'disabled': True, 'enable_logging': True, 'kind': 'kind_value', 'match': {'dest_address_groups': ['dest_address_groups_value1', 'dest_address_groups_value2'], 'dest_fqdns': ['dest_fqdns_value1', 'dest_fqdns_value2'], 'dest_ip_ranges': ['dest_ip_ranges_value1', 'dest_ip_ranges_value2'], 'dest_region_codes': ['dest_region_codes_value1', 'dest_region_codes_value2'], 'dest_threat_intelligences': ['dest_threat_intelligences_value1', 'dest_threat_intelligences_value2'], 'layer4_configs': [{'ip_protocol': 'ip_protocol_value', 'ports': ['ports_value1', 'ports_value2']}], 'src_address_groups': ['src_address_groups_value1', 'src_address_groups_value2'], 'src_fqdns': ['src_fqdns_value1', 'src_fqdns_value2'], 'src_ip_ranges': ['src_ip_ranges_value1', 'src_ip_ranges_value2'], 'src_region_codes': ['src_region_codes_value1', 'src_region_codes_value2'], 'src_secure_tags': [{'name': 'name_value', 'state': 'state_value'}], 'src_threat_intelligences': ['src_threat_intelligences_value1', 'src_threat_intelligences_value2']}, 'priority': 898, 'rule_name': 'rule_name_value', 'rule_tuple_count': 1737, 'target_resources': ['target_resources_value1', 'target_resources_value2'], 'target_secure_tags': {}, 'target_service_accounts': ['target_service_accounts_value1', 'target_service_accounts_value2']} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.patch_rule(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, extended_operation.ExtendedOperation) + assert response.client_operation_id == 'client_operation_id_value' + assert response.creation_timestamp == 'creation_timestamp_value' + assert response.description == 'description_value' + assert response.end_time == 'end_time_value' + assert response.http_error_message == 'http_error_message_value' + assert response.http_error_status_code == 2374 + assert response.id == 205 + assert response.insert_time == 'insert_time_value' + assert response.kind == 'kind_value' + assert response.name == 'name_value' + assert response.operation_group_id == 'operation_group_id_value' + assert response.operation_type == 'operation_type_value' + assert response.progress == 885 + assert response.region == 'region_value' + assert response.self_link == 'self_link_value' + assert response.start_time == 'start_time_value' + assert response.status == compute.Operation.Status.DONE + assert response.status_message == 'status_message_value' + assert response.target_id == 947 + assert response.target_link == 'target_link_value' + assert response.user == 'user_value' + assert response.zone == 'zone_value' + + +def test_patch_rule_rest_required_fields(request_type=compute.PatchRuleNetworkFirewallPolicyRequest): + transport_class = transports.NetworkFirewallPoliciesRestTransport + + request_init = {} + request_init["firewall_policy"] = "" + request_init["project"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).patch_rule._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["firewallPolicy"] = 'firewall_policy_value' + jsonified_request["project"] = 'project_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).patch_rule._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("priority", "request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "firewallPolicy" in jsonified_request + assert jsonified_request["firewallPolicy"] == 'firewall_policy_value' + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + + client = NetworkFirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.patch_rule(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_patch_rule_rest_unset_required_fields(): + transport = transports.NetworkFirewallPoliciesRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.patch_rule._get_unset_required_fields({}) + assert set(unset_fields) == (set(("priority", "requestId", )) & set(("firewallPolicy", "firewallPolicyRuleResource", "project", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_patch_rule_rest_interceptors(null_interceptor): + transport = transports.NetworkFirewallPoliciesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.NetworkFirewallPoliciesRestInterceptor(), + ) + client = NetworkFirewallPoliciesClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.NetworkFirewallPoliciesRestInterceptor, "post_patch_rule") as post, \ + mock.patch.object(transports.NetworkFirewallPoliciesRestInterceptor, "pre_patch_rule") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.PatchRuleNetworkFirewallPolicyRequest.pb(compute.PatchRuleNetworkFirewallPolicyRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.PatchRuleNetworkFirewallPolicyRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.patch_rule(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_patch_rule_rest_bad_request(transport: str = 'rest', request_type=compute.PatchRuleNetworkFirewallPolicyRequest): + client = NetworkFirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'firewall_policy': 'sample2'} + request_init["firewall_policy_rule_resource"] = {'action': 'action_value', 'description': 'description_value', 'direction': 'direction_value', 'disabled': True, 'enable_logging': True, 'kind': 'kind_value', 'match': {'dest_address_groups': ['dest_address_groups_value1', 'dest_address_groups_value2'], 'dest_fqdns': ['dest_fqdns_value1', 'dest_fqdns_value2'], 'dest_ip_ranges': ['dest_ip_ranges_value1', 'dest_ip_ranges_value2'], 'dest_region_codes': ['dest_region_codes_value1', 'dest_region_codes_value2'], 'dest_threat_intelligences': ['dest_threat_intelligences_value1', 'dest_threat_intelligences_value2'], 'layer4_configs': [{'ip_protocol': 'ip_protocol_value', 'ports': ['ports_value1', 'ports_value2']}], 'src_address_groups': ['src_address_groups_value1', 'src_address_groups_value2'], 'src_fqdns': ['src_fqdns_value1', 'src_fqdns_value2'], 'src_ip_ranges': ['src_ip_ranges_value1', 'src_ip_ranges_value2'], 'src_region_codes': ['src_region_codes_value1', 'src_region_codes_value2'], 'src_secure_tags': [{'name': 'name_value', 'state': 'state_value'}], 'src_threat_intelligences': ['src_threat_intelligences_value1', 'src_threat_intelligences_value2']}, 'priority': 898, 'rule_name': 'rule_name_value', 'rule_tuple_count': 1737, 'target_resources': ['target_resources_value1', 'target_resources_value2'], 'target_secure_tags': {}, 'target_service_accounts': ['target_service_accounts_value1', 'target_service_accounts_value2']} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.patch_rule(request) + + +def test_patch_rule_rest_flattened(): + client = NetworkFirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'firewall_policy': 'sample2'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + firewall_policy='firewall_policy_value', + firewall_policy_rule_resource=compute.FirewallPolicyRule(action='action_value'), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.patch_rule(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/global/firewallPolicies/{firewall_policy}/patchRule" % client.transport._host, args[1]) + + +def test_patch_rule_rest_flattened_error(transport: str = 'rest'): + client = NetworkFirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.patch_rule( + compute.PatchRuleNetworkFirewallPolicyRequest(), + project='project_value', + firewall_policy='firewall_policy_value', + firewall_policy_rule_resource=compute.FirewallPolicyRule(action='action_value'), + ) + + +def test_patch_rule_rest_error(): + client = NetworkFirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.PatchRuleNetworkFirewallPolicyRequest, + dict, +]) +def test_patch_rule_unary_rest(request_type): + client = NetworkFirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'firewall_policy': 'sample2'} + request_init["firewall_policy_rule_resource"] = {'action': 'action_value', 'description': 'description_value', 'direction': 'direction_value', 'disabled': True, 'enable_logging': True, 'kind': 'kind_value', 'match': {'dest_address_groups': ['dest_address_groups_value1', 'dest_address_groups_value2'], 'dest_fqdns': ['dest_fqdns_value1', 'dest_fqdns_value2'], 'dest_ip_ranges': ['dest_ip_ranges_value1', 'dest_ip_ranges_value2'], 'dest_region_codes': ['dest_region_codes_value1', 'dest_region_codes_value2'], 'dest_threat_intelligences': ['dest_threat_intelligences_value1', 'dest_threat_intelligences_value2'], 'layer4_configs': [{'ip_protocol': 'ip_protocol_value', 'ports': ['ports_value1', 'ports_value2']}], 'src_address_groups': ['src_address_groups_value1', 'src_address_groups_value2'], 'src_fqdns': ['src_fqdns_value1', 'src_fqdns_value2'], 'src_ip_ranges': ['src_ip_ranges_value1', 'src_ip_ranges_value2'], 'src_region_codes': ['src_region_codes_value1', 'src_region_codes_value2'], 'src_secure_tags': [{'name': 'name_value', 'state': 'state_value'}], 'src_threat_intelligences': ['src_threat_intelligences_value1', 'src_threat_intelligences_value2']}, 'priority': 898, 'rule_name': 'rule_name_value', 'rule_tuple_count': 1737, 'target_resources': ['target_resources_value1', 'target_resources_value2'], 'target_secure_tags': {}, 'target_service_accounts': ['target_service_accounts_value1', 'target_service_accounts_value2']} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.patch_rule_unary(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.Operation) + + +def test_patch_rule_unary_rest_required_fields(request_type=compute.PatchRuleNetworkFirewallPolicyRequest): + transport_class = transports.NetworkFirewallPoliciesRestTransport + + request_init = {} + request_init["firewall_policy"] = "" + request_init["project"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).patch_rule._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["firewallPolicy"] = 'firewall_policy_value' + jsonified_request["project"] = 'project_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).patch_rule._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("priority", "request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "firewallPolicy" in jsonified_request + assert jsonified_request["firewallPolicy"] == 'firewall_policy_value' + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + + client = NetworkFirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.patch_rule_unary(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_patch_rule_unary_rest_unset_required_fields(): + transport = transports.NetworkFirewallPoliciesRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.patch_rule._get_unset_required_fields({}) + assert set(unset_fields) == (set(("priority", "requestId", )) & set(("firewallPolicy", "firewallPolicyRuleResource", "project", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_patch_rule_unary_rest_interceptors(null_interceptor): + transport = transports.NetworkFirewallPoliciesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.NetworkFirewallPoliciesRestInterceptor(), + ) + client = NetworkFirewallPoliciesClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.NetworkFirewallPoliciesRestInterceptor, "post_patch_rule") as post, \ + mock.patch.object(transports.NetworkFirewallPoliciesRestInterceptor, "pre_patch_rule") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.PatchRuleNetworkFirewallPolicyRequest.pb(compute.PatchRuleNetworkFirewallPolicyRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.PatchRuleNetworkFirewallPolicyRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.patch_rule_unary(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_patch_rule_unary_rest_bad_request(transport: str = 'rest', request_type=compute.PatchRuleNetworkFirewallPolicyRequest): + client = NetworkFirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'firewall_policy': 'sample2'} + request_init["firewall_policy_rule_resource"] = {'action': 'action_value', 'description': 'description_value', 'direction': 'direction_value', 'disabled': True, 'enable_logging': True, 'kind': 'kind_value', 'match': {'dest_address_groups': ['dest_address_groups_value1', 'dest_address_groups_value2'], 'dest_fqdns': ['dest_fqdns_value1', 'dest_fqdns_value2'], 'dest_ip_ranges': ['dest_ip_ranges_value1', 'dest_ip_ranges_value2'], 'dest_region_codes': ['dest_region_codes_value1', 'dest_region_codes_value2'], 'dest_threat_intelligences': ['dest_threat_intelligences_value1', 'dest_threat_intelligences_value2'], 'layer4_configs': [{'ip_protocol': 'ip_protocol_value', 'ports': ['ports_value1', 'ports_value2']}], 'src_address_groups': ['src_address_groups_value1', 'src_address_groups_value2'], 'src_fqdns': ['src_fqdns_value1', 'src_fqdns_value2'], 'src_ip_ranges': ['src_ip_ranges_value1', 'src_ip_ranges_value2'], 'src_region_codes': ['src_region_codes_value1', 'src_region_codes_value2'], 'src_secure_tags': [{'name': 'name_value', 'state': 'state_value'}], 'src_threat_intelligences': ['src_threat_intelligences_value1', 'src_threat_intelligences_value2']}, 'priority': 898, 'rule_name': 'rule_name_value', 'rule_tuple_count': 1737, 'target_resources': ['target_resources_value1', 'target_resources_value2'], 'target_secure_tags': {}, 'target_service_accounts': ['target_service_accounts_value1', 'target_service_accounts_value2']} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.patch_rule_unary(request) + + +def test_patch_rule_unary_rest_flattened(): + client = NetworkFirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'firewall_policy': 'sample2'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + firewall_policy='firewall_policy_value', + firewall_policy_rule_resource=compute.FirewallPolicyRule(action='action_value'), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.patch_rule_unary(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/global/firewallPolicies/{firewall_policy}/patchRule" % client.transport._host, args[1]) + + +def test_patch_rule_unary_rest_flattened_error(transport: str = 'rest'): + client = NetworkFirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.patch_rule_unary( + compute.PatchRuleNetworkFirewallPolicyRequest(), + project='project_value', + firewall_policy='firewall_policy_value', + firewall_policy_rule_resource=compute.FirewallPolicyRule(action='action_value'), + ) + + +def test_patch_rule_unary_rest_error(): + client = NetworkFirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.RemoveAssociationNetworkFirewallPolicyRequest, + dict, +]) +def test_remove_association_rest(request_type): + client = NetworkFirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'firewall_policy': 'sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.remove_association(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, extended_operation.ExtendedOperation) + assert response.client_operation_id == 'client_operation_id_value' + assert response.creation_timestamp == 'creation_timestamp_value' + assert response.description == 'description_value' + assert response.end_time == 'end_time_value' + assert response.http_error_message == 'http_error_message_value' + assert response.http_error_status_code == 2374 + assert response.id == 205 + assert response.insert_time == 'insert_time_value' + assert response.kind == 'kind_value' + assert response.name == 'name_value' + assert response.operation_group_id == 'operation_group_id_value' + assert response.operation_type == 'operation_type_value' + assert response.progress == 885 + assert response.region == 'region_value' + assert response.self_link == 'self_link_value' + assert response.start_time == 'start_time_value' + assert response.status == compute.Operation.Status.DONE + assert response.status_message == 'status_message_value' + assert response.target_id == 947 + assert response.target_link == 'target_link_value' + assert response.user == 'user_value' + assert response.zone == 'zone_value' + + +def test_remove_association_rest_required_fields(request_type=compute.RemoveAssociationNetworkFirewallPolicyRequest): + transport_class = transports.NetworkFirewallPoliciesRestTransport + + request_init = {} + request_init["firewall_policy"] = "" + request_init["project"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).remove_association._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["firewallPolicy"] = 'firewall_policy_value' + jsonified_request["project"] = 'project_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).remove_association._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("name", "request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "firewallPolicy" in jsonified_request + assert jsonified_request["firewallPolicy"] == 'firewall_policy_value' + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + + client = NetworkFirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.remove_association(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_remove_association_rest_unset_required_fields(): + transport = transports.NetworkFirewallPoliciesRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.remove_association._get_unset_required_fields({}) + assert set(unset_fields) == (set(("name", "requestId", )) & set(("firewallPolicy", "project", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_remove_association_rest_interceptors(null_interceptor): + transport = transports.NetworkFirewallPoliciesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.NetworkFirewallPoliciesRestInterceptor(), + ) + client = NetworkFirewallPoliciesClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.NetworkFirewallPoliciesRestInterceptor, "post_remove_association") as post, \ + mock.patch.object(transports.NetworkFirewallPoliciesRestInterceptor, "pre_remove_association") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.RemoveAssociationNetworkFirewallPolicyRequest.pb(compute.RemoveAssociationNetworkFirewallPolicyRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.RemoveAssociationNetworkFirewallPolicyRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.remove_association(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_remove_association_rest_bad_request(transport: str = 'rest', request_type=compute.RemoveAssociationNetworkFirewallPolicyRequest): + client = NetworkFirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'firewall_policy': 'sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.remove_association(request) + + +def test_remove_association_rest_flattened(): + client = NetworkFirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'firewall_policy': 'sample2'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + firewall_policy='firewall_policy_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.remove_association(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/global/firewallPolicies/{firewall_policy}/removeAssociation" % client.transport._host, args[1]) + + +def test_remove_association_rest_flattened_error(transport: str = 'rest'): + client = NetworkFirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.remove_association( + compute.RemoveAssociationNetworkFirewallPolicyRequest(), + project='project_value', + firewall_policy='firewall_policy_value', + ) + + +def test_remove_association_rest_error(): + client = NetworkFirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.RemoveAssociationNetworkFirewallPolicyRequest, + dict, +]) +def test_remove_association_unary_rest(request_type): + client = NetworkFirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'firewall_policy': 'sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.remove_association_unary(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.Operation) + + +def test_remove_association_unary_rest_required_fields(request_type=compute.RemoveAssociationNetworkFirewallPolicyRequest): + transport_class = transports.NetworkFirewallPoliciesRestTransport + + request_init = {} + request_init["firewall_policy"] = "" + request_init["project"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).remove_association._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["firewallPolicy"] = 'firewall_policy_value' + jsonified_request["project"] = 'project_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).remove_association._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("name", "request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "firewallPolicy" in jsonified_request + assert jsonified_request["firewallPolicy"] == 'firewall_policy_value' + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + + client = NetworkFirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.remove_association_unary(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_remove_association_unary_rest_unset_required_fields(): + transport = transports.NetworkFirewallPoliciesRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.remove_association._get_unset_required_fields({}) + assert set(unset_fields) == (set(("name", "requestId", )) & set(("firewallPolicy", "project", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_remove_association_unary_rest_interceptors(null_interceptor): + transport = transports.NetworkFirewallPoliciesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.NetworkFirewallPoliciesRestInterceptor(), + ) + client = NetworkFirewallPoliciesClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.NetworkFirewallPoliciesRestInterceptor, "post_remove_association") as post, \ + mock.patch.object(transports.NetworkFirewallPoliciesRestInterceptor, "pre_remove_association") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.RemoveAssociationNetworkFirewallPolicyRequest.pb(compute.RemoveAssociationNetworkFirewallPolicyRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.RemoveAssociationNetworkFirewallPolicyRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.remove_association_unary(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_remove_association_unary_rest_bad_request(transport: str = 'rest', request_type=compute.RemoveAssociationNetworkFirewallPolicyRequest): + client = NetworkFirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'firewall_policy': 'sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.remove_association_unary(request) + + +def test_remove_association_unary_rest_flattened(): + client = NetworkFirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'firewall_policy': 'sample2'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + firewall_policy='firewall_policy_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.remove_association_unary(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/global/firewallPolicies/{firewall_policy}/removeAssociation" % client.transport._host, args[1]) + + +def test_remove_association_unary_rest_flattened_error(transport: str = 'rest'): + client = NetworkFirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.remove_association_unary( + compute.RemoveAssociationNetworkFirewallPolicyRequest(), + project='project_value', + firewall_policy='firewall_policy_value', + ) + + +def test_remove_association_unary_rest_error(): + client = NetworkFirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.RemoveRuleNetworkFirewallPolicyRequest, + dict, +]) +def test_remove_rule_rest(request_type): + client = NetworkFirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'firewall_policy': 'sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.remove_rule(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, extended_operation.ExtendedOperation) + assert response.client_operation_id == 'client_operation_id_value' + assert response.creation_timestamp == 'creation_timestamp_value' + assert response.description == 'description_value' + assert response.end_time == 'end_time_value' + assert response.http_error_message == 'http_error_message_value' + assert response.http_error_status_code == 2374 + assert response.id == 205 + assert response.insert_time == 'insert_time_value' + assert response.kind == 'kind_value' + assert response.name == 'name_value' + assert response.operation_group_id == 'operation_group_id_value' + assert response.operation_type == 'operation_type_value' + assert response.progress == 885 + assert response.region == 'region_value' + assert response.self_link == 'self_link_value' + assert response.start_time == 'start_time_value' + assert response.status == compute.Operation.Status.DONE + assert response.status_message == 'status_message_value' + assert response.target_id == 947 + assert response.target_link == 'target_link_value' + assert response.user == 'user_value' + assert response.zone == 'zone_value' + + +def test_remove_rule_rest_required_fields(request_type=compute.RemoveRuleNetworkFirewallPolicyRequest): + transport_class = transports.NetworkFirewallPoliciesRestTransport + + request_init = {} + request_init["firewall_policy"] = "" + request_init["project"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).remove_rule._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["firewallPolicy"] = 'firewall_policy_value' + jsonified_request["project"] = 'project_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).remove_rule._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("priority", "request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "firewallPolicy" in jsonified_request + assert jsonified_request["firewallPolicy"] == 'firewall_policy_value' + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + + client = NetworkFirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.remove_rule(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_remove_rule_rest_unset_required_fields(): + transport = transports.NetworkFirewallPoliciesRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.remove_rule._get_unset_required_fields({}) + assert set(unset_fields) == (set(("priority", "requestId", )) & set(("firewallPolicy", "project", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_remove_rule_rest_interceptors(null_interceptor): + transport = transports.NetworkFirewallPoliciesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.NetworkFirewallPoliciesRestInterceptor(), + ) + client = NetworkFirewallPoliciesClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.NetworkFirewallPoliciesRestInterceptor, "post_remove_rule") as post, \ + mock.patch.object(transports.NetworkFirewallPoliciesRestInterceptor, "pre_remove_rule") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.RemoveRuleNetworkFirewallPolicyRequest.pb(compute.RemoveRuleNetworkFirewallPolicyRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.RemoveRuleNetworkFirewallPolicyRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.remove_rule(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_remove_rule_rest_bad_request(transport: str = 'rest', request_type=compute.RemoveRuleNetworkFirewallPolicyRequest): + client = NetworkFirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'firewall_policy': 'sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.remove_rule(request) + + +def test_remove_rule_rest_flattened(): + client = NetworkFirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'firewall_policy': 'sample2'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + firewall_policy='firewall_policy_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.remove_rule(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/global/firewallPolicies/{firewall_policy}/removeRule" % client.transport._host, args[1]) + + +def test_remove_rule_rest_flattened_error(transport: str = 'rest'): + client = NetworkFirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.remove_rule( + compute.RemoveRuleNetworkFirewallPolicyRequest(), + project='project_value', + firewall_policy='firewall_policy_value', + ) + + +def test_remove_rule_rest_error(): + client = NetworkFirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.RemoveRuleNetworkFirewallPolicyRequest, + dict, +]) +def test_remove_rule_unary_rest(request_type): + client = NetworkFirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'firewall_policy': 'sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.remove_rule_unary(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.Operation) + + +def test_remove_rule_unary_rest_required_fields(request_type=compute.RemoveRuleNetworkFirewallPolicyRequest): + transport_class = transports.NetworkFirewallPoliciesRestTransport + + request_init = {} + request_init["firewall_policy"] = "" + request_init["project"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).remove_rule._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["firewallPolicy"] = 'firewall_policy_value' + jsonified_request["project"] = 'project_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).remove_rule._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("priority", "request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "firewallPolicy" in jsonified_request + assert jsonified_request["firewallPolicy"] == 'firewall_policy_value' + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + + client = NetworkFirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.remove_rule_unary(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_remove_rule_unary_rest_unset_required_fields(): + transport = transports.NetworkFirewallPoliciesRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.remove_rule._get_unset_required_fields({}) + assert set(unset_fields) == (set(("priority", "requestId", )) & set(("firewallPolicy", "project", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_remove_rule_unary_rest_interceptors(null_interceptor): + transport = transports.NetworkFirewallPoliciesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.NetworkFirewallPoliciesRestInterceptor(), + ) + client = NetworkFirewallPoliciesClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.NetworkFirewallPoliciesRestInterceptor, "post_remove_rule") as post, \ + mock.patch.object(transports.NetworkFirewallPoliciesRestInterceptor, "pre_remove_rule") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.RemoveRuleNetworkFirewallPolicyRequest.pb(compute.RemoveRuleNetworkFirewallPolicyRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.RemoveRuleNetworkFirewallPolicyRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.remove_rule_unary(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_remove_rule_unary_rest_bad_request(transport: str = 'rest', request_type=compute.RemoveRuleNetworkFirewallPolicyRequest): + client = NetworkFirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'firewall_policy': 'sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.remove_rule_unary(request) + + +def test_remove_rule_unary_rest_flattened(): + client = NetworkFirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'firewall_policy': 'sample2'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + firewall_policy='firewall_policy_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.remove_rule_unary(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/global/firewallPolicies/{firewall_policy}/removeRule" % client.transport._host, args[1]) + + +def test_remove_rule_unary_rest_flattened_error(transport: str = 'rest'): + client = NetworkFirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.remove_rule_unary( + compute.RemoveRuleNetworkFirewallPolicyRequest(), + project='project_value', + firewall_policy='firewall_policy_value', + ) + + +def test_remove_rule_unary_rest_error(): + client = NetworkFirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.SetIamPolicyNetworkFirewallPolicyRequest, + dict, +]) +def test_set_iam_policy_rest(request_type): + client = NetworkFirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'resource': 'sample2'} + request_init["global_set_policy_request_resource"] = {'bindings': [{'binding_id': 'binding_id_value', 'condition': {'description': 'description_value', 'expression': 'expression_value', 'location': 'location_value', 'title': 'title_value'}, 'members': ['members_value1', 'members_value2'], 'role': 'role_value'}], 'etag': 'etag_value', 'policy': {'audit_configs': [{'audit_log_configs': [{'exempted_members': ['exempted_members_value1', 'exempted_members_value2'], 'ignore_child_exemptions': True, 'log_type': 'log_type_value'}], 'exempted_members': ['exempted_members_value1', 'exempted_members_value2'], 'service': 'service_value'}], 'bindings': {}, 'etag': 'etag_value', 'iam_owned': True, 'rules': [{'action': 'action_value', 'conditions': [{'iam': 'iam_value', 'op': 'op_value', 'svc': 'svc_value', 'sys': 'sys_value', 'values': ['values_value1', 'values_value2']}], 'description': 'description_value', 'ins': ['ins_value1', 'ins_value2'], 'log_configs': [{'cloud_audit': {'authorization_logging_options': {'permission_type': 'permission_type_value'}, 'log_name': 'log_name_value'}, 'counter': {'custom_fields': [{'name': 'name_value', 'value': 'value_value'}], 'field': 'field_value', 'metric': 'metric_value'}, 'data_access': {'log_mode': 'log_mode_value'}}], 'not_ins': ['not_ins_value1', 'not_ins_value2'], 'permissions': ['permissions_value1', 'permissions_value2']}], 'version': 774}} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Policy( + etag='etag_value', + iam_owned=True, + version=774, + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Policy.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.set_iam_policy(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.Policy) + assert response.etag == 'etag_value' + assert response.iam_owned is True + assert response.version == 774 + + +def test_set_iam_policy_rest_required_fields(request_type=compute.SetIamPolicyNetworkFirewallPolicyRequest): + transport_class = transports.NetworkFirewallPoliciesRestTransport + + request_init = {} + request_init["project"] = "" + request_init["resource"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).set_iam_policy._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + jsonified_request["resource"] = 'resource_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).set_iam_policy._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "resource" in jsonified_request + assert jsonified_request["resource"] == 'resource_value' + + client = NetworkFirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Policy() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Policy.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.set_iam_policy(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_set_iam_policy_rest_unset_required_fields(): + transport = transports.NetworkFirewallPoliciesRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.set_iam_policy._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("globalSetPolicyRequestResource", "project", "resource", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_set_iam_policy_rest_interceptors(null_interceptor): + transport = transports.NetworkFirewallPoliciesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.NetworkFirewallPoliciesRestInterceptor(), + ) + client = NetworkFirewallPoliciesClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.NetworkFirewallPoliciesRestInterceptor, "post_set_iam_policy") as post, \ + mock.patch.object(transports.NetworkFirewallPoliciesRestInterceptor, "pre_set_iam_policy") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.SetIamPolicyNetworkFirewallPolicyRequest.pb(compute.SetIamPolicyNetworkFirewallPolicyRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Policy.to_json(compute.Policy()) + + request = compute.SetIamPolicyNetworkFirewallPolicyRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Policy() + + client.set_iam_policy(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_set_iam_policy_rest_bad_request(transport: str = 'rest', request_type=compute.SetIamPolicyNetworkFirewallPolicyRequest): + client = NetworkFirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'resource': 'sample2'} + request_init["global_set_policy_request_resource"] = {'bindings': [{'binding_id': 'binding_id_value', 'condition': {'description': 'description_value', 'expression': 'expression_value', 'location': 'location_value', 'title': 'title_value'}, 'members': ['members_value1', 'members_value2'], 'role': 'role_value'}], 'etag': 'etag_value', 'policy': {'audit_configs': [{'audit_log_configs': [{'exempted_members': ['exempted_members_value1', 'exempted_members_value2'], 'ignore_child_exemptions': True, 'log_type': 'log_type_value'}], 'exempted_members': ['exempted_members_value1', 'exempted_members_value2'], 'service': 'service_value'}], 'bindings': {}, 'etag': 'etag_value', 'iam_owned': True, 'rules': [{'action': 'action_value', 'conditions': [{'iam': 'iam_value', 'op': 'op_value', 'svc': 'svc_value', 'sys': 'sys_value', 'values': ['values_value1', 'values_value2']}], 'description': 'description_value', 'ins': ['ins_value1', 'ins_value2'], 'log_configs': [{'cloud_audit': {'authorization_logging_options': {'permission_type': 'permission_type_value'}, 'log_name': 'log_name_value'}, 'counter': {'custom_fields': [{'name': 'name_value', 'value': 'value_value'}], 'field': 'field_value', 'metric': 'metric_value'}, 'data_access': {'log_mode': 'log_mode_value'}}], 'not_ins': ['not_ins_value1', 'not_ins_value2'], 'permissions': ['permissions_value1', 'permissions_value2']}], 'version': 774}} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.set_iam_policy(request) + + +def test_set_iam_policy_rest_flattened(): + client = NetworkFirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Policy() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'resource': 'sample2'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + resource='resource_value', + global_set_policy_request_resource=compute.GlobalSetPolicyRequest(bindings=[compute.Binding(binding_id='binding_id_value')]), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Policy.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.set_iam_policy(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/global/firewallPolicies/{resource}/setIamPolicy" % client.transport._host, args[1]) + + +def test_set_iam_policy_rest_flattened_error(transport: str = 'rest'): + client = NetworkFirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.set_iam_policy( + compute.SetIamPolicyNetworkFirewallPolicyRequest(), + project='project_value', + resource='resource_value', + global_set_policy_request_resource=compute.GlobalSetPolicyRequest(bindings=[compute.Binding(binding_id='binding_id_value')]), + ) + + +def test_set_iam_policy_rest_error(): + client = NetworkFirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.TestIamPermissionsNetworkFirewallPolicyRequest, + dict, +]) +def test_test_iam_permissions_rest(request_type): + client = NetworkFirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'resource': 'sample2'} + request_init["test_permissions_request_resource"] = {'permissions': ['permissions_value1', 'permissions_value2']} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.TestPermissionsResponse( + permissions=['permissions_value'], + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.TestPermissionsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.test_iam_permissions(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.TestPermissionsResponse) + assert response.permissions == ['permissions_value'] + + +def test_test_iam_permissions_rest_required_fields(request_type=compute.TestIamPermissionsNetworkFirewallPolicyRequest): + transport_class = transports.NetworkFirewallPoliciesRestTransport + + request_init = {} + request_init["project"] = "" + request_init["resource"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).test_iam_permissions._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + jsonified_request["resource"] = 'resource_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).test_iam_permissions._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "resource" in jsonified_request + assert jsonified_request["resource"] == 'resource_value' + + client = NetworkFirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.TestPermissionsResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.TestPermissionsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.test_iam_permissions(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_test_iam_permissions_rest_unset_required_fields(): + transport = transports.NetworkFirewallPoliciesRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.test_iam_permissions._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("project", "resource", "testPermissionsRequestResource", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_test_iam_permissions_rest_interceptors(null_interceptor): + transport = transports.NetworkFirewallPoliciesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.NetworkFirewallPoliciesRestInterceptor(), + ) + client = NetworkFirewallPoliciesClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.NetworkFirewallPoliciesRestInterceptor, "post_test_iam_permissions") as post, \ + mock.patch.object(transports.NetworkFirewallPoliciesRestInterceptor, "pre_test_iam_permissions") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.TestIamPermissionsNetworkFirewallPolicyRequest.pb(compute.TestIamPermissionsNetworkFirewallPolicyRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.TestPermissionsResponse.to_json(compute.TestPermissionsResponse()) + + request = compute.TestIamPermissionsNetworkFirewallPolicyRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.TestPermissionsResponse() + + client.test_iam_permissions(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_test_iam_permissions_rest_bad_request(transport: str = 'rest', request_type=compute.TestIamPermissionsNetworkFirewallPolicyRequest): + client = NetworkFirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'resource': 'sample2'} + request_init["test_permissions_request_resource"] = {'permissions': ['permissions_value1', 'permissions_value2']} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.test_iam_permissions(request) + + +def test_test_iam_permissions_rest_flattened(): + client = NetworkFirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.TestPermissionsResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'resource': 'sample2'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + resource='resource_value', + test_permissions_request_resource=compute.TestPermissionsRequest(permissions=['permissions_value']), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.TestPermissionsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.test_iam_permissions(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/global/firewallPolicies/{resource}/testIamPermissions" % client.transport._host, args[1]) + + +def test_test_iam_permissions_rest_flattened_error(transport: str = 'rest'): + client = NetworkFirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.test_iam_permissions( + compute.TestIamPermissionsNetworkFirewallPolicyRequest(), + project='project_value', + resource='resource_value', + test_permissions_request_resource=compute.TestPermissionsRequest(permissions=['permissions_value']), + ) + + +def test_test_iam_permissions_rest_error(): + client = NetworkFirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +def test_credentials_transport_error(): + # It is an error to provide credentials and a transport instance. + transport = transports.NetworkFirewallPoliciesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = NetworkFirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # It is an error to provide a credentials file and a transport instance. + transport = transports.NetworkFirewallPoliciesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = NetworkFirewallPoliciesClient( + client_options={"credentials_file": "credentials.json"}, + transport=transport, + ) + + # It is an error to provide an api_key and a transport instance. + transport = transports.NetworkFirewallPoliciesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = NetworkFirewallPoliciesClient( + client_options=options, + transport=transport, + ) + + # It is an error to provide an api_key and a credential. + options = mock.Mock() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = NetworkFirewallPoliciesClient( + client_options=options, + credentials=ga_credentials.AnonymousCredentials() + ) + + # It is an error to provide scopes and a transport instance. + transport = transports.NetworkFirewallPoliciesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = NetworkFirewallPoliciesClient( + client_options={"scopes": ["1", "2"]}, + transport=transport, + ) + + +def test_transport_instance(): + # A client may be instantiated with a custom transport instance. + transport = transports.NetworkFirewallPoliciesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + client = NetworkFirewallPoliciesClient(transport=transport) + assert client.transport is transport + + +@pytest.mark.parametrize("transport_class", [ + transports.NetworkFirewallPoliciesRestTransport, +]) +def test_transport_adc(transport_class): + # Test default credentials are used if not provided. + with mock.patch.object(google.auth, 'default') as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class() + adc.assert_called_once() + +@pytest.mark.parametrize("transport_name", [ + "rest", +]) +def test_transport_kind(transport_name): + transport = NetworkFirewallPoliciesClient.get_transport_class(transport_name)( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert transport.kind == transport_name + + +def test_network_firewall_policies_base_transport_error(): + # Passing both a credentials object and credentials_file should raise an error + with pytest.raises(core_exceptions.DuplicateCredentialArgs): + transport = transports.NetworkFirewallPoliciesTransport( + credentials=ga_credentials.AnonymousCredentials(), + credentials_file="credentials.json" + ) + + +def test_network_firewall_policies_base_transport(): + # Instantiate the base transport. + with mock.patch('google.cloud.compute_v1.services.network_firewall_policies.transports.NetworkFirewallPoliciesTransport.__init__') as Transport: + Transport.return_value = None + transport = transports.NetworkFirewallPoliciesTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Every method on the transport should just blindly + # raise NotImplementedError. + methods = ( + 'add_association', + 'add_rule', + 'clone_rules', + 'delete', + 'get', + 'get_association', + 'get_iam_policy', + 'get_rule', + 'insert', + 'list', + 'patch', + 'patch_rule', + 'remove_association', + 'remove_rule', + 'set_iam_policy', + 'test_iam_permissions', + ) + for method in methods: + with pytest.raises(NotImplementedError): + getattr(transport, method)(request=object()) + + with pytest.raises(NotImplementedError): + transport.close() + + # Catch all for all remaining methods and properties + remainder = [ + 'kind', + ] + for r in remainder: + with pytest.raises(NotImplementedError): + getattr(transport, r)() + + +def test_network_firewall_policies_base_transport_with_credentials_file(): + # Instantiate the base transport with a credentials file + with mock.patch.object(google.auth, 'load_credentials_from_file', autospec=True) as load_creds, mock.patch('google.cloud.compute_v1.services.network_firewall_policies.transports.NetworkFirewallPoliciesTransport._prep_wrapped_messages') as Transport: + Transport.return_value = None + load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.NetworkFirewallPoliciesTransport( + credentials_file="credentials.json", + quota_project_id="octopus", + ) + load_creds.assert_called_once_with("credentials.json", + scopes=None, + default_scopes=( + 'https://www.googleapis.com/auth/compute', + 'https://www.googleapis.com/auth/cloud-platform', +), + quota_project_id="octopus", + ) + + +def test_network_firewall_policies_base_transport_with_adc(): + # Test the default credentials are used if credentials and credentials_file are None. + with mock.patch.object(google.auth, 'default', autospec=True) as adc, mock.patch('google.cloud.compute_v1.services.network_firewall_policies.transports.NetworkFirewallPoliciesTransport._prep_wrapped_messages') as Transport: + Transport.return_value = None + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.NetworkFirewallPoliciesTransport() + adc.assert_called_once() + + +def test_network_firewall_policies_auth_adc(): + # If no credentials are provided, we should use ADC credentials. + with mock.patch.object(google.auth, 'default', autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + NetworkFirewallPoliciesClient() + adc.assert_called_once_with( + scopes=None, + default_scopes=( + 'https://www.googleapis.com/auth/compute', + 'https://www.googleapis.com/auth/cloud-platform', +), + quota_project_id=None, + ) + + +def test_network_firewall_policies_http_transport_client_cert_source_for_mtls(): + cred = ga_credentials.AnonymousCredentials() + with mock.patch("google.auth.transport.requests.AuthorizedSession.configure_mtls_channel") as mock_configure_mtls_channel: + transports.NetworkFirewallPoliciesRestTransport ( + credentials=cred, + client_cert_source_for_mtls=client_cert_source_callback + ) + mock_configure_mtls_channel.assert_called_once_with(client_cert_source_callback) + + +@pytest.mark.parametrize("transport_name", [ + "rest", +]) +def test_network_firewall_policies_host_no_port(transport_name): + client = NetworkFirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions(api_endpoint='compute.googleapis.com'), + transport=transport_name, + ) + assert client.transport._host == ( + 'compute.googleapis.com:443' + if transport_name in ['grpc', 'grpc_asyncio'] + else 'https://compute.googleapis.com' + ) + +@pytest.mark.parametrize("transport_name", [ + "rest", +]) +def test_network_firewall_policies_host_with_port(transport_name): + client = NetworkFirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions(api_endpoint='compute.googleapis.com:8000'), + transport=transport_name, + ) + assert client.transport._host == ( + 'compute.googleapis.com:8000' + if transport_name in ['grpc', 'grpc_asyncio'] + else 'https://compute.googleapis.com:8000' + ) + +@pytest.mark.parametrize("transport_name", [ + "rest", +]) +def test_network_firewall_policies_client_transport_session_collision(transport_name): + creds1 = ga_credentials.AnonymousCredentials() + creds2 = ga_credentials.AnonymousCredentials() + client1 = NetworkFirewallPoliciesClient( + credentials=creds1, + transport=transport_name, + ) + client2 = NetworkFirewallPoliciesClient( + credentials=creds2, + transport=transport_name, + ) + session1 = client1.transport.add_association._session + session2 = client2.transport.add_association._session + assert session1 != session2 + session1 = client1.transport.add_rule._session + session2 = client2.transport.add_rule._session + assert session1 != session2 + session1 = client1.transport.clone_rules._session + session2 = client2.transport.clone_rules._session + assert session1 != session2 + session1 = client1.transport.delete._session + session2 = client2.transport.delete._session + assert session1 != session2 + session1 = client1.transport.get._session + session2 = client2.transport.get._session + assert session1 != session2 + session1 = client1.transport.get_association._session + session2 = client2.transport.get_association._session + assert session1 != session2 + session1 = client1.transport.get_iam_policy._session + session2 = client2.transport.get_iam_policy._session + assert session1 != session2 + session1 = client1.transport.get_rule._session + session2 = client2.transport.get_rule._session + assert session1 != session2 + session1 = client1.transport.insert._session + session2 = client2.transport.insert._session + assert session1 != session2 + session1 = client1.transport.list._session + session2 = client2.transport.list._session + assert session1 != session2 + session1 = client1.transport.patch._session + session2 = client2.transport.patch._session + assert session1 != session2 + session1 = client1.transport.patch_rule._session + session2 = client2.transport.patch_rule._session + assert session1 != session2 + session1 = client1.transport.remove_association._session + session2 = client2.transport.remove_association._session + assert session1 != session2 + session1 = client1.transport.remove_rule._session + session2 = client2.transport.remove_rule._session + assert session1 != session2 + session1 = client1.transport.set_iam_policy._session + session2 = client2.transport.set_iam_policy._session + assert session1 != session2 + session1 = client1.transport.test_iam_permissions._session + session2 = client2.transport.test_iam_permissions._session + assert session1 != session2 + +def test_common_billing_account_path(): + billing_account = "squid" + expected = "billingAccounts/{billing_account}".format(billing_account=billing_account, ) + actual = NetworkFirewallPoliciesClient.common_billing_account_path(billing_account) + assert expected == actual + + +def test_parse_common_billing_account_path(): + expected = { + "billing_account": "clam", + } + path = NetworkFirewallPoliciesClient.common_billing_account_path(**expected) + + # Check that the path construction is reversible. + actual = NetworkFirewallPoliciesClient.parse_common_billing_account_path(path) + assert expected == actual + +def test_common_folder_path(): + folder = "whelk" + expected = "folders/{folder}".format(folder=folder, ) + actual = NetworkFirewallPoliciesClient.common_folder_path(folder) + assert expected == actual + + +def test_parse_common_folder_path(): + expected = { + "folder": "octopus", + } + path = NetworkFirewallPoliciesClient.common_folder_path(**expected) + + # Check that the path construction is reversible. + actual = NetworkFirewallPoliciesClient.parse_common_folder_path(path) + assert expected == actual + +def test_common_organization_path(): + organization = "oyster" + expected = "organizations/{organization}".format(organization=organization, ) + actual = NetworkFirewallPoliciesClient.common_organization_path(organization) + assert expected == actual + + +def test_parse_common_organization_path(): + expected = { + "organization": "nudibranch", + } + path = NetworkFirewallPoliciesClient.common_organization_path(**expected) + + # Check that the path construction is reversible. + actual = NetworkFirewallPoliciesClient.parse_common_organization_path(path) + assert expected == actual + +def test_common_project_path(): + project = "cuttlefish" + expected = "projects/{project}".format(project=project, ) + actual = NetworkFirewallPoliciesClient.common_project_path(project) + assert expected == actual + + +def test_parse_common_project_path(): + expected = { + "project": "mussel", + } + path = NetworkFirewallPoliciesClient.common_project_path(**expected) + + # Check that the path construction is reversible. + actual = NetworkFirewallPoliciesClient.parse_common_project_path(path) + assert expected == actual + +def test_common_location_path(): + project = "winkle" + location = "nautilus" + expected = "projects/{project}/locations/{location}".format(project=project, location=location, ) + actual = NetworkFirewallPoliciesClient.common_location_path(project, location) + assert expected == actual + + +def test_parse_common_location_path(): + expected = { + "project": "scallop", + "location": "abalone", + } + path = NetworkFirewallPoliciesClient.common_location_path(**expected) + + # Check that the path construction is reversible. + actual = NetworkFirewallPoliciesClient.parse_common_location_path(path) + assert expected == actual + + +def test_client_with_default_client_info(): + client_info = gapic_v1.client_info.ClientInfo() + + with mock.patch.object(transports.NetworkFirewallPoliciesTransport, '_prep_wrapped_messages') as prep: + client = NetworkFirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + with mock.patch.object(transports.NetworkFirewallPoliciesTransport, '_prep_wrapped_messages') as prep: + transport_class = NetworkFirewallPoliciesClient.get_transport_class() + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + +def test_transport_close(): + transports = { + "rest": "_session", + } + + for transport, close_name in transports.items(): + client = NetworkFirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport + ) + with mock.patch.object(type(getattr(client.transport, close_name)), "close") as close: + with client: + close.assert_not_called() + close.assert_called_once() + +def test_client_ctx(): + transports = [ + 'rest', + ] + for transport in transports: + client = NetworkFirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport + ) + # Test client calls underlying transport. + with mock.patch.object(type(client.transport), "close") as close: + close.assert_not_called() + with client: + pass + close.assert_called() + +@pytest.mark.parametrize("client_class,transport_class", [ + (NetworkFirewallPoliciesClient, transports.NetworkFirewallPoliciesRestTransport), +]) +def test_api_key_credentials(client_class, transport_class): + with mock.patch.object( + google.auth._default, "get_api_key_credentials", create=True + ) as get_api_key_credentials: + mock_cred = mock.Mock() + get_api_key_credentials.return_value = mock_cred + options = client_options.ClientOptions() + options.api_key = "api_key" + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=mock_cred, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) diff --git a/owl-bot-staging/v1/tests/unit/gapic/compute_v1/test_networks.py b/owl-bot-staging/v1/tests/unit/gapic/compute_v1/test_networks.py new file mode 100644 index 000000000..2554aae96 --- /dev/null +++ b/owl-bot-staging/v1/tests/unit/gapic/compute_v1/test_networks.py @@ -0,0 +1,5804 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import os +# try/except added for compatibility with python < 3.8 +try: + from unittest import mock + from unittest.mock import AsyncMock # pragma: NO COVER +except ImportError: # pragma: NO COVER + import mock + +import grpc +from grpc.experimental import aio +from collections.abc import Iterable +from google.protobuf import json_format +import json +import math +import pytest +from proto.marshal.rules.dates import DurationRule, TimestampRule +from proto.marshal.rules import wrappers +from requests import Response +from requests import Request, PreparedRequest +from requests.sessions import Session +from google.protobuf import json_format + +from google.api_core import client_options +from google.api_core import exceptions as core_exceptions +from google.api_core import extended_operation # type: ignore +from google.api_core import future +from google.api_core import gapic_v1 +from google.api_core import grpc_helpers +from google.api_core import grpc_helpers_async +from google.api_core import path_template +from google.auth import credentials as ga_credentials +from google.auth.exceptions import MutualTLSChannelError +from google.cloud.compute_v1.services.networks import NetworksClient +from google.cloud.compute_v1.services.networks import pagers +from google.cloud.compute_v1.services.networks import transports +from google.cloud.compute_v1.types import compute +from google.oauth2 import service_account +import google.auth + + +def client_cert_source_callback(): + return b"cert bytes", b"key bytes" + + +# If default endpoint is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint(client): + return "foo.googleapis.com" if ("localhost" in client.DEFAULT_ENDPOINT) else client.DEFAULT_ENDPOINT + + +def test__get_default_mtls_endpoint(): + api_endpoint = "example.googleapis.com" + api_mtls_endpoint = "example.mtls.googleapis.com" + sandbox_endpoint = "example.sandbox.googleapis.com" + sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com" + non_googleapi = "api.example.com" + + assert NetworksClient._get_default_mtls_endpoint(None) is None + assert NetworksClient._get_default_mtls_endpoint(api_endpoint) == api_mtls_endpoint + assert NetworksClient._get_default_mtls_endpoint(api_mtls_endpoint) == api_mtls_endpoint + assert NetworksClient._get_default_mtls_endpoint(sandbox_endpoint) == sandbox_mtls_endpoint + assert NetworksClient._get_default_mtls_endpoint(sandbox_mtls_endpoint) == sandbox_mtls_endpoint + assert NetworksClient._get_default_mtls_endpoint(non_googleapi) == non_googleapi + + +@pytest.mark.parametrize("client_class,transport_name", [ + (NetworksClient, "rest"), +]) +def test_networks_client_from_service_account_info(client_class, transport_name): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object(service_account.Credentials, 'from_service_account_info') as factory: + factory.return_value = creds + info = {"valid": True} + client = client_class.from_service_account_info(info, transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + 'compute.googleapis.com:443' + if transport_name in ['grpc', 'grpc_asyncio'] + else + 'https://compute.googleapis.com' + ) + + +@pytest.mark.parametrize("transport_class,transport_name", [ + (transports.NetworksRestTransport, "rest"), +]) +def test_networks_client_service_account_always_use_jwt(transport_class, transport_name): + with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=True) + use_jwt.assert_called_once_with(True) + + with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=False) + use_jwt.assert_not_called() + + +@pytest.mark.parametrize("client_class,transport_name", [ + (NetworksClient, "rest"), +]) +def test_networks_client_from_service_account_file(client_class, transport_name): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object(service_account.Credentials, 'from_service_account_file') as factory: + factory.return_value = creds + client = client_class.from_service_account_file("dummy/file/path.json", transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + client = client_class.from_service_account_json("dummy/file/path.json", transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + 'compute.googleapis.com:443' + if transport_name in ['grpc', 'grpc_asyncio'] + else + 'https://compute.googleapis.com' + ) + + +def test_networks_client_get_transport_class(): + transport = NetworksClient.get_transport_class() + available_transports = [ + transports.NetworksRestTransport, + ] + assert transport in available_transports + + transport = NetworksClient.get_transport_class("rest") + assert transport == transports.NetworksRestTransport + + +@pytest.mark.parametrize("client_class,transport_class,transport_name", [ + (NetworksClient, transports.NetworksRestTransport, "rest"), +]) +@mock.patch.object(NetworksClient, "DEFAULT_ENDPOINT", modify_default_endpoint(NetworksClient)) +def test_networks_client_client_options(client_class, transport_class, transport_name): + # Check that if channel is provided we won't create a new one. + with mock.patch.object(NetworksClient, 'get_transport_class') as gtc: + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ) + client = client_class(transport=transport) + gtc.assert_not_called() + + # Check that if channel is provided via str we will create a new one. + with mock.patch.object(NetworksClient, 'get_transport_class') as gtc: + client = client_class(transport=transport_name) + gtc.assert_called() + + # Check the case api_endpoint is provided. + options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(transport=transport_name, client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_MTLS_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError): + client = client_class(transport=transport_name) + + # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): + with pytest.raises(ValueError): + client = client_class(transport=transport_name) + + # Check the case quota_project_id is provided + options = client_options.ClientOptions(quota_project_id="octopus") + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id="octopus", + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + # Check the case api_endpoint is provided + options = client_options.ClientOptions(api_audience="https://language.googleapis.com") + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience="https://language.googleapis.com" + ) + +@pytest.mark.parametrize("client_class,transport_class,transport_name,use_client_cert_env", [ + (NetworksClient, transports.NetworksRestTransport, "rest", "true"), + (NetworksClient, transports.NetworksRestTransport, "rest", "false"), +]) +@mock.patch.object(NetworksClient, "DEFAULT_ENDPOINT", modify_default_endpoint(NetworksClient)) +@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) +def test_networks_client_mtls_env_auto(client_class, transport_class, transport_name, use_client_cert_env): + # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default + # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists. + + # Check the case client_cert_source is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): + options = client_options.ClientOptions(client_cert_source=client_cert_source_callback) + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + + if use_client_cert_env == "false": + expected_client_cert_source = None + expected_host = client.DEFAULT_ENDPOINT + else: + expected_client_cert_source = client_cert_source_callback + expected_host = client.DEFAULT_MTLS_ENDPOINT + + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case ADC client cert is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): + with mock.patch.object(transport_class, '__init__') as patched: + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): + with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=client_cert_source_callback): + if use_client_cert_env == "false": + expected_host = client.DEFAULT_ENDPOINT + expected_client_cert_source = None + else: + expected_host = client.DEFAULT_MTLS_ENDPOINT + expected_client_cert_source = client_cert_source_callback + + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case client_cert_source and ADC client cert are not provided. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): + with mock.patch.object(transport_class, '__init__') as patched: + with mock.patch("google.auth.transport.mtls.has_default_client_cert_source", return_value=False): + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize("client_class", [ + NetworksClient +]) +@mock.patch.object(NetworksClient, "DEFAULT_ENDPOINT", modify_default_endpoint(NetworksClient)) +def test_networks_client_get_mtls_endpoint_and_cert_source(client_class): + mock_client_cert_source = mock.Mock() + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "true". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + mock_api_endpoint = "foo" + options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(options) + assert api_endpoint == mock_api_endpoint + assert cert_source == mock_client_cert_source + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "false". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + mock_client_cert_source = mock.Mock() + mock_api_endpoint = "foo" + options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(options) + assert api_endpoint == mock_api_endpoint + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert doesn't exist. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=False): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert exists. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): + with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=mock_client_cert_source): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source == mock_client_cert_source + + +@pytest.mark.parametrize("client_class,transport_class,transport_name", [ + (NetworksClient, transports.NetworksRestTransport, "rest"), +]) +def test_networks_client_client_options_scopes(client_class, transport_class, transport_name): + # Check the case scopes are provided. + options = client_options.ClientOptions( + scopes=["1", "2"], + ) + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=["1", "2"], + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + +@pytest.mark.parametrize("client_class,transport_class,transport_name,grpc_helpers", [ + (NetworksClient, transports.NetworksRestTransport, "rest", None), +]) +def test_networks_client_client_options_credentials_file(client_class, transport_class, transport_name, grpc_helpers): + # Check the case credentials file is provided. + options = client_options.ClientOptions( + credentials_file="credentials.json" + ) + + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize("request_type", [ + compute.AddPeeringNetworkRequest, + dict, +]) +def test_add_peering_rest(request_type): + client = NetworksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'network': 'sample2'} + request_init["networks_add_peering_request_resource"] = {'auto_create_routes': True, 'name': 'name_value', 'network_peering': {'auto_create_routes': True, 'exchange_subnet_routes': True, 'export_custom_routes': True, 'export_subnet_routes_with_public_ip': True, 'import_custom_routes': True, 'import_subnet_routes_with_public_ip': True, 'name': 'name_value', 'network': 'network_value', 'peer_mtu': 865, 'stack_type': 'stack_type_value', 'state': 'state_value', 'state_details': 'state_details_value'}, 'peer_network': 'peer_network_value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.add_peering(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, extended_operation.ExtendedOperation) + assert response.client_operation_id == 'client_operation_id_value' + assert response.creation_timestamp == 'creation_timestamp_value' + assert response.description == 'description_value' + assert response.end_time == 'end_time_value' + assert response.http_error_message == 'http_error_message_value' + assert response.http_error_status_code == 2374 + assert response.id == 205 + assert response.insert_time == 'insert_time_value' + assert response.kind == 'kind_value' + assert response.name == 'name_value' + assert response.operation_group_id == 'operation_group_id_value' + assert response.operation_type == 'operation_type_value' + assert response.progress == 885 + assert response.region == 'region_value' + assert response.self_link == 'self_link_value' + assert response.start_time == 'start_time_value' + assert response.status == compute.Operation.Status.DONE + assert response.status_message == 'status_message_value' + assert response.target_id == 947 + assert response.target_link == 'target_link_value' + assert response.user == 'user_value' + assert response.zone == 'zone_value' + + +def test_add_peering_rest_required_fields(request_type=compute.AddPeeringNetworkRequest): + transport_class = transports.NetworksRestTransport + + request_init = {} + request_init["network"] = "" + request_init["project"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).add_peering._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["network"] = 'network_value' + jsonified_request["project"] = 'project_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).add_peering._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "network" in jsonified_request + assert jsonified_request["network"] == 'network_value' + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + + client = NetworksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.add_peering(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_add_peering_rest_unset_required_fields(): + transport = transports.NetworksRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.add_peering._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("network", "networksAddPeeringRequestResource", "project", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_add_peering_rest_interceptors(null_interceptor): + transport = transports.NetworksRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.NetworksRestInterceptor(), + ) + client = NetworksClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.NetworksRestInterceptor, "post_add_peering") as post, \ + mock.patch.object(transports.NetworksRestInterceptor, "pre_add_peering") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.AddPeeringNetworkRequest.pb(compute.AddPeeringNetworkRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.AddPeeringNetworkRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.add_peering(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_add_peering_rest_bad_request(transport: str = 'rest', request_type=compute.AddPeeringNetworkRequest): + client = NetworksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'network': 'sample2'} + request_init["networks_add_peering_request_resource"] = {'auto_create_routes': True, 'name': 'name_value', 'network_peering': {'auto_create_routes': True, 'exchange_subnet_routes': True, 'export_custom_routes': True, 'export_subnet_routes_with_public_ip': True, 'import_custom_routes': True, 'import_subnet_routes_with_public_ip': True, 'name': 'name_value', 'network': 'network_value', 'peer_mtu': 865, 'stack_type': 'stack_type_value', 'state': 'state_value', 'state_details': 'state_details_value'}, 'peer_network': 'peer_network_value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.add_peering(request) + + +def test_add_peering_rest_flattened(): + client = NetworksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'network': 'sample2'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + network='network_value', + networks_add_peering_request_resource=compute.NetworksAddPeeringRequest(auto_create_routes=True), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.add_peering(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/global/networks/{network}/addPeering" % client.transport._host, args[1]) + + +def test_add_peering_rest_flattened_error(transport: str = 'rest'): + client = NetworksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.add_peering( + compute.AddPeeringNetworkRequest(), + project='project_value', + network='network_value', + networks_add_peering_request_resource=compute.NetworksAddPeeringRequest(auto_create_routes=True), + ) + + +def test_add_peering_rest_error(): + client = NetworksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.AddPeeringNetworkRequest, + dict, +]) +def test_add_peering_unary_rest(request_type): + client = NetworksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'network': 'sample2'} + request_init["networks_add_peering_request_resource"] = {'auto_create_routes': True, 'name': 'name_value', 'network_peering': {'auto_create_routes': True, 'exchange_subnet_routes': True, 'export_custom_routes': True, 'export_subnet_routes_with_public_ip': True, 'import_custom_routes': True, 'import_subnet_routes_with_public_ip': True, 'name': 'name_value', 'network': 'network_value', 'peer_mtu': 865, 'stack_type': 'stack_type_value', 'state': 'state_value', 'state_details': 'state_details_value'}, 'peer_network': 'peer_network_value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.add_peering_unary(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.Operation) + + +def test_add_peering_unary_rest_required_fields(request_type=compute.AddPeeringNetworkRequest): + transport_class = transports.NetworksRestTransport + + request_init = {} + request_init["network"] = "" + request_init["project"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).add_peering._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["network"] = 'network_value' + jsonified_request["project"] = 'project_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).add_peering._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "network" in jsonified_request + assert jsonified_request["network"] == 'network_value' + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + + client = NetworksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.add_peering_unary(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_add_peering_unary_rest_unset_required_fields(): + transport = transports.NetworksRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.add_peering._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("network", "networksAddPeeringRequestResource", "project", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_add_peering_unary_rest_interceptors(null_interceptor): + transport = transports.NetworksRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.NetworksRestInterceptor(), + ) + client = NetworksClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.NetworksRestInterceptor, "post_add_peering") as post, \ + mock.patch.object(transports.NetworksRestInterceptor, "pre_add_peering") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.AddPeeringNetworkRequest.pb(compute.AddPeeringNetworkRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.AddPeeringNetworkRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.add_peering_unary(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_add_peering_unary_rest_bad_request(transport: str = 'rest', request_type=compute.AddPeeringNetworkRequest): + client = NetworksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'network': 'sample2'} + request_init["networks_add_peering_request_resource"] = {'auto_create_routes': True, 'name': 'name_value', 'network_peering': {'auto_create_routes': True, 'exchange_subnet_routes': True, 'export_custom_routes': True, 'export_subnet_routes_with_public_ip': True, 'import_custom_routes': True, 'import_subnet_routes_with_public_ip': True, 'name': 'name_value', 'network': 'network_value', 'peer_mtu': 865, 'stack_type': 'stack_type_value', 'state': 'state_value', 'state_details': 'state_details_value'}, 'peer_network': 'peer_network_value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.add_peering_unary(request) + + +def test_add_peering_unary_rest_flattened(): + client = NetworksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'network': 'sample2'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + network='network_value', + networks_add_peering_request_resource=compute.NetworksAddPeeringRequest(auto_create_routes=True), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.add_peering_unary(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/global/networks/{network}/addPeering" % client.transport._host, args[1]) + + +def test_add_peering_unary_rest_flattened_error(transport: str = 'rest'): + client = NetworksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.add_peering_unary( + compute.AddPeeringNetworkRequest(), + project='project_value', + network='network_value', + networks_add_peering_request_resource=compute.NetworksAddPeeringRequest(auto_create_routes=True), + ) + + +def test_add_peering_unary_rest_error(): + client = NetworksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.DeleteNetworkRequest, + dict, +]) +def test_delete_rest(request_type): + client = NetworksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'network': 'sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.delete(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, extended_operation.ExtendedOperation) + assert response.client_operation_id == 'client_operation_id_value' + assert response.creation_timestamp == 'creation_timestamp_value' + assert response.description == 'description_value' + assert response.end_time == 'end_time_value' + assert response.http_error_message == 'http_error_message_value' + assert response.http_error_status_code == 2374 + assert response.id == 205 + assert response.insert_time == 'insert_time_value' + assert response.kind == 'kind_value' + assert response.name == 'name_value' + assert response.operation_group_id == 'operation_group_id_value' + assert response.operation_type == 'operation_type_value' + assert response.progress == 885 + assert response.region == 'region_value' + assert response.self_link == 'self_link_value' + assert response.start_time == 'start_time_value' + assert response.status == compute.Operation.Status.DONE + assert response.status_message == 'status_message_value' + assert response.target_id == 947 + assert response.target_link == 'target_link_value' + assert response.user == 'user_value' + assert response.zone == 'zone_value' + + +def test_delete_rest_required_fields(request_type=compute.DeleteNetworkRequest): + transport_class = transports.NetworksRestTransport + + request_init = {} + request_init["network"] = "" + request_init["project"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["network"] = 'network_value' + jsonified_request["project"] = 'project_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "network" in jsonified_request + assert jsonified_request["network"] == 'network_value' + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + + client = NetworksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "delete", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.delete(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_delete_rest_unset_required_fields(): + transport = transports.NetworksRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.delete._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("network", "project", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_rest_interceptors(null_interceptor): + transport = transports.NetworksRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.NetworksRestInterceptor(), + ) + client = NetworksClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.NetworksRestInterceptor, "post_delete") as post, \ + mock.patch.object(transports.NetworksRestInterceptor, "pre_delete") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.DeleteNetworkRequest.pb(compute.DeleteNetworkRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.DeleteNetworkRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.delete(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_delete_rest_bad_request(transport: str = 'rest', request_type=compute.DeleteNetworkRequest): + client = NetworksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'network': 'sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.delete(request) + + +def test_delete_rest_flattened(): + client = NetworksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'network': 'sample2'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + network='network_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.delete(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/global/networks/{network}" % client.transport._host, args[1]) + + +def test_delete_rest_flattened_error(transport: str = 'rest'): + client = NetworksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete( + compute.DeleteNetworkRequest(), + project='project_value', + network='network_value', + ) + + +def test_delete_rest_error(): + client = NetworksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.DeleteNetworkRequest, + dict, +]) +def test_delete_unary_rest(request_type): + client = NetworksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'network': 'sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.delete_unary(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.Operation) + + +def test_delete_unary_rest_required_fields(request_type=compute.DeleteNetworkRequest): + transport_class = transports.NetworksRestTransport + + request_init = {} + request_init["network"] = "" + request_init["project"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["network"] = 'network_value' + jsonified_request["project"] = 'project_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "network" in jsonified_request + assert jsonified_request["network"] == 'network_value' + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + + client = NetworksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "delete", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.delete_unary(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_delete_unary_rest_unset_required_fields(): + transport = transports.NetworksRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.delete._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("network", "project", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_unary_rest_interceptors(null_interceptor): + transport = transports.NetworksRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.NetworksRestInterceptor(), + ) + client = NetworksClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.NetworksRestInterceptor, "post_delete") as post, \ + mock.patch.object(transports.NetworksRestInterceptor, "pre_delete") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.DeleteNetworkRequest.pb(compute.DeleteNetworkRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.DeleteNetworkRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.delete_unary(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_delete_unary_rest_bad_request(transport: str = 'rest', request_type=compute.DeleteNetworkRequest): + client = NetworksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'network': 'sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.delete_unary(request) + + +def test_delete_unary_rest_flattened(): + client = NetworksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'network': 'sample2'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + network='network_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.delete_unary(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/global/networks/{network}" % client.transport._host, args[1]) + + +def test_delete_unary_rest_flattened_error(transport: str = 'rest'): + client = NetworksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_unary( + compute.DeleteNetworkRequest(), + project='project_value', + network='network_value', + ) + + +def test_delete_unary_rest_error(): + client = NetworksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.GetNetworkRequest, + dict, +]) +def test_get_rest(request_type): + client = NetworksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'network': 'sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Network( + I_pv4_range='I_pv4_range_value', + auto_create_subnetworks=True, + creation_timestamp='creation_timestamp_value', + description='description_value', + enable_ula_internal_ipv6=True, + firewall_policy='firewall_policy_value', + gateway_i_pv4='gateway_i_pv4_value', + id=205, + internal_ipv6_range='internal_ipv6_range_value', + kind='kind_value', + mtu=342, + name='name_value', + network_firewall_policy_enforcement_order='network_firewall_policy_enforcement_order_value', + self_link='self_link_value', + self_link_with_id='self_link_with_id_value', + subnetworks=['subnetworks_value'], + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Network.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.get(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.Network) + assert response.I_pv4_range == 'I_pv4_range_value' + assert response.auto_create_subnetworks is True + assert response.creation_timestamp == 'creation_timestamp_value' + assert response.description == 'description_value' + assert response.enable_ula_internal_ipv6 is True + assert response.firewall_policy == 'firewall_policy_value' + assert response.gateway_i_pv4 == 'gateway_i_pv4_value' + assert response.id == 205 + assert response.internal_ipv6_range == 'internal_ipv6_range_value' + assert response.kind == 'kind_value' + assert response.mtu == 342 + assert response.name == 'name_value' + assert response.network_firewall_policy_enforcement_order == 'network_firewall_policy_enforcement_order_value' + assert response.self_link == 'self_link_value' + assert response.self_link_with_id == 'self_link_with_id_value' + assert response.subnetworks == ['subnetworks_value'] + + +def test_get_rest_required_fields(request_type=compute.GetNetworkRequest): + transport_class = transports.NetworksRestTransport + + request_init = {} + request_init["network"] = "" + request_init["project"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["network"] = 'network_value' + jsonified_request["project"] = 'project_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "network" in jsonified_request + assert jsonified_request["network"] == 'network_value' + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + + client = NetworksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Network() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "get", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Network.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.get(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_get_rest_unset_required_fields(): + transport = transports.NetworksRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.get._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("network", "project", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_rest_interceptors(null_interceptor): + transport = transports.NetworksRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.NetworksRestInterceptor(), + ) + client = NetworksClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.NetworksRestInterceptor, "post_get") as post, \ + mock.patch.object(transports.NetworksRestInterceptor, "pre_get") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.GetNetworkRequest.pb(compute.GetNetworkRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Network.to_json(compute.Network()) + + request = compute.GetNetworkRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Network() + + client.get(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_get_rest_bad_request(transport: str = 'rest', request_type=compute.GetNetworkRequest): + client = NetworksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'network': 'sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get(request) + + +def test_get_rest_flattened(): + client = NetworksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Network() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'network': 'sample2'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + network='network_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Network.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.get(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/global/networks/{network}" % client.transport._host, args[1]) + + +def test_get_rest_flattened_error(transport: str = 'rest'): + client = NetworksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get( + compute.GetNetworkRequest(), + project='project_value', + network='network_value', + ) + + +def test_get_rest_error(): + client = NetworksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.GetEffectiveFirewallsNetworkRequest, + dict, +]) +def test_get_effective_firewalls_rest(request_type): + client = NetworksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'network': 'sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.NetworksGetEffectiveFirewallsResponse( + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.NetworksGetEffectiveFirewallsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.get_effective_firewalls(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.NetworksGetEffectiveFirewallsResponse) + + +def test_get_effective_firewalls_rest_required_fields(request_type=compute.GetEffectiveFirewallsNetworkRequest): + transport_class = transports.NetworksRestTransport + + request_init = {} + request_init["network"] = "" + request_init["project"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_effective_firewalls._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["network"] = 'network_value' + jsonified_request["project"] = 'project_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_effective_firewalls._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "network" in jsonified_request + assert jsonified_request["network"] == 'network_value' + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + + client = NetworksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.NetworksGetEffectiveFirewallsResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "get", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.NetworksGetEffectiveFirewallsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.get_effective_firewalls(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_get_effective_firewalls_rest_unset_required_fields(): + transport = transports.NetworksRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.get_effective_firewalls._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("network", "project", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_effective_firewalls_rest_interceptors(null_interceptor): + transport = transports.NetworksRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.NetworksRestInterceptor(), + ) + client = NetworksClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.NetworksRestInterceptor, "post_get_effective_firewalls") as post, \ + mock.patch.object(transports.NetworksRestInterceptor, "pre_get_effective_firewalls") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.GetEffectiveFirewallsNetworkRequest.pb(compute.GetEffectiveFirewallsNetworkRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.NetworksGetEffectiveFirewallsResponse.to_json(compute.NetworksGetEffectiveFirewallsResponse()) + + request = compute.GetEffectiveFirewallsNetworkRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.NetworksGetEffectiveFirewallsResponse() + + client.get_effective_firewalls(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_get_effective_firewalls_rest_bad_request(transport: str = 'rest', request_type=compute.GetEffectiveFirewallsNetworkRequest): + client = NetworksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'network': 'sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_effective_firewalls(request) + + +def test_get_effective_firewalls_rest_flattened(): + client = NetworksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.NetworksGetEffectiveFirewallsResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'network': 'sample2'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + network='network_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.NetworksGetEffectiveFirewallsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.get_effective_firewalls(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/global/networks/{network}/getEffectiveFirewalls" % client.transport._host, args[1]) + + +def test_get_effective_firewalls_rest_flattened_error(transport: str = 'rest'): + client = NetworksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_effective_firewalls( + compute.GetEffectiveFirewallsNetworkRequest(), + project='project_value', + network='network_value', + ) + + +def test_get_effective_firewalls_rest_error(): + client = NetworksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.InsertNetworkRequest, + dict, +]) +def test_insert_rest(request_type): + client = NetworksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1'} + request_init["network_resource"] = {'I_pv4_range': 'I_pv4_range_value', 'auto_create_subnetworks': True, 'creation_timestamp': 'creation_timestamp_value', 'description': 'description_value', 'enable_ula_internal_ipv6': True, 'firewall_policy': 'firewall_policy_value', 'gateway_i_pv4': 'gateway_i_pv4_value', 'id': 205, 'internal_ipv6_range': 'internal_ipv6_range_value', 'kind': 'kind_value', 'mtu': 342, 'name': 'name_value', 'network_firewall_policy_enforcement_order': 'network_firewall_policy_enforcement_order_value', 'peerings': [{'auto_create_routes': True, 'exchange_subnet_routes': True, 'export_custom_routes': True, 'export_subnet_routes_with_public_ip': True, 'import_custom_routes': True, 'import_subnet_routes_with_public_ip': True, 'name': 'name_value', 'network': 'network_value', 'peer_mtu': 865, 'stack_type': 'stack_type_value', 'state': 'state_value', 'state_details': 'state_details_value'}], 'routing_config': {'routing_mode': 'routing_mode_value'}, 'self_link': 'self_link_value', 'self_link_with_id': 'self_link_with_id_value', 'subnetworks': ['subnetworks_value1', 'subnetworks_value2']} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.insert(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, extended_operation.ExtendedOperation) + assert response.client_operation_id == 'client_operation_id_value' + assert response.creation_timestamp == 'creation_timestamp_value' + assert response.description == 'description_value' + assert response.end_time == 'end_time_value' + assert response.http_error_message == 'http_error_message_value' + assert response.http_error_status_code == 2374 + assert response.id == 205 + assert response.insert_time == 'insert_time_value' + assert response.kind == 'kind_value' + assert response.name == 'name_value' + assert response.operation_group_id == 'operation_group_id_value' + assert response.operation_type == 'operation_type_value' + assert response.progress == 885 + assert response.region == 'region_value' + assert response.self_link == 'self_link_value' + assert response.start_time == 'start_time_value' + assert response.status == compute.Operation.Status.DONE + assert response.status_message == 'status_message_value' + assert response.target_id == 947 + assert response.target_link == 'target_link_value' + assert response.user == 'user_value' + assert response.zone == 'zone_value' + + +def test_insert_rest_required_fields(request_type=compute.InsertNetworkRequest): + transport_class = transports.NetworksRestTransport + + request_init = {} + request_init["project"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).insert._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).insert._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + + client = NetworksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.insert(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_insert_rest_unset_required_fields(): + transport = transports.NetworksRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.insert._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("networkResource", "project", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_insert_rest_interceptors(null_interceptor): + transport = transports.NetworksRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.NetworksRestInterceptor(), + ) + client = NetworksClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.NetworksRestInterceptor, "post_insert") as post, \ + mock.patch.object(transports.NetworksRestInterceptor, "pre_insert") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.InsertNetworkRequest.pb(compute.InsertNetworkRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.InsertNetworkRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.insert(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_insert_rest_bad_request(transport: str = 'rest', request_type=compute.InsertNetworkRequest): + client = NetworksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1'} + request_init["network_resource"] = {'I_pv4_range': 'I_pv4_range_value', 'auto_create_subnetworks': True, 'creation_timestamp': 'creation_timestamp_value', 'description': 'description_value', 'enable_ula_internal_ipv6': True, 'firewall_policy': 'firewall_policy_value', 'gateway_i_pv4': 'gateway_i_pv4_value', 'id': 205, 'internal_ipv6_range': 'internal_ipv6_range_value', 'kind': 'kind_value', 'mtu': 342, 'name': 'name_value', 'network_firewall_policy_enforcement_order': 'network_firewall_policy_enforcement_order_value', 'peerings': [{'auto_create_routes': True, 'exchange_subnet_routes': True, 'export_custom_routes': True, 'export_subnet_routes_with_public_ip': True, 'import_custom_routes': True, 'import_subnet_routes_with_public_ip': True, 'name': 'name_value', 'network': 'network_value', 'peer_mtu': 865, 'stack_type': 'stack_type_value', 'state': 'state_value', 'state_details': 'state_details_value'}], 'routing_config': {'routing_mode': 'routing_mode_value'}, 'self_link': 'self_link_value', 'self_link_with_id': 'self_link_with_id_value', 'subnetworks': ['subnetworks_value1', 'subnetworks_value2']} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.insert(request) + + +def test_insert_rest_flattened(): + client = NetworksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + network_resource=compute.Network(I_pv4_range='I_pv4_range_value'), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.insert(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/global/networks" % client.transport._host, args[1]) + + +def test_insert_rest_flattened_error(transport: str = 'rest'): + client = NetworksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.insert( + compute.InsertNetworkRequest(), + project='project_value', + network_resource=compute.Network(I_pv4_range='I_pv4_range_value'), + ) + + +def test_insert_rest_error(): + client = NetworksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.InsertNetworkRequest, + dict, +]) +def test_insert_unary_rest(request_type): + client = NetworksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1'} + request_init["network_resource"] = {'I_pv4_range': 'I_pv4_range_value', 'auto_create_subnetworks': True, 'creation_timestamp': 'creation_timestamp_value', 'description': 'description_value', 'enable_ula_internal_ipv6': True, 'firewall_policy': 'firewall_policy_value', 'gateway_i_pv4': 'gateway_i_pv4_value', 'id': 205, 'internal_ipv6_range': 'internal_ipv6_range_value', 'kind': 'kind_value', 'mtu': 342, 'name': 'name_value', 'network_firewall_policy_enforcement_order': 'network_firewall_policy_enforcement_order_value', 'peerings': [{'auto_create_routes': True, 'exchange_subnet_routes': True, 'export_custom_routes': True, 'export_subnet_routes_with_public_ip': True, 'import_custom_routes': True, 'import_subnet_routes_with_public_ip': True, 'name': 'name_value', 'network': 'network_value', 'peer_mtu': 865, 'stack_type': 'stack_type_value', 'state': 'state_value', 'state_details': 'state_details_value'}], 'routing_config': {'routing_mode': 'routing_mode_value'}, 'self_link': 'self_link_value', 'self_link_with_id': 'self_link_with_id_value', 'subnetworks': ['subnetworks_value1', 'subnetworks_value2']} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.insert_unary(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.Operation) + + +def test_insert_unary_rest_required_fields(request_type=compute.InsertNetworkRequest): + transport_class = transports.NetworksRestTransport + + request_init = {} + request_init["project"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).insert._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).insert._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + + client = NetworksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.insert_unary(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_insert_unary_rest_unset_required_fields(): + transport = transports.NetworksRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.insert._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("networkResource", "project", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_insert_unary_rest_interceptors(null_interceptor): + transport = transports.NetworksRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.NetworksRestInterceptor(), + ) + client = NetworksClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.NetworksRestInterceptor, "post_insert") as post, \ + mock.patch.object(transports.NetworksRestInterceptor, "pre_insert") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.InsertNetworkRequest.pb(compute.InsertNetworkRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.InsertNetworkRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.insert_unary(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_insert_unary_rest_bad_request(transport: str = 'rest', request_type=compute.InsertNetworkRequest): + client = NetworksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1'} + request_init["network_resource"] = {'I_pv4_range': 'I_pv4_range_value', 'auto_create_subnetworks': True, 'creation_timestamp': 'creation_timestamp_value', 'description': 'description_value', 'enable_ula_internal_ipv6': True, 'firewall_policy': 'firewall_policy_value', 'gateway_i_pv4': 'gateway_i_pv4_value', 'id': 205, 'internal_ipv6_range': 'internal_ipv6_range_value', 'kind': 'kind_value', 'mtu': 342, 'name': 'name_value', 'network_firewall_policy_enforcement_order': 'network_firewall_policy_enforcement_order_value', 'peerings': [{'auto_create_routes': True, 'exchange_subnet_routes': True, 'export_custom_routes': True, 'export_subnet_routes_with_public_ip': True, 'import_custom_routes': True, 'import_subnet_routes_with_public_ip': True, 'name': 'name_value', 'network': 'network_value', 'peer_mtu': 865, 'stack_type': 'stack_type_value', 'state': 'state_value', 'state_details': 'state_details_value'}], 'routing_config': {'routing_mode': 'routing_mode_value'}, 'self_link': 'self_link_value', 'self_link_with_id': 'self_link_with_id_value', 'subnetworks': ['subnetworks_value1', 'subnetworks_value2']} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.insert_unary(request) + + +def test_insert_unary_rest_flattened(): + client = NetworksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + network_resource=compute.Network(I_pv4_range='I_pv4_range_value'), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.insert_unary(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/global/networks" % client.transport._host, args[1]) + + +def test_insert_unary_rest_flattened_error(transport: str = 'rest'): + client = NetworksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.insert_unary( + compute.InsertNetworkRequest(), + project='project_value', + network_resource=compute.Network(I_pv4_range='I_pv4_range_value'), + ) + + +def test_insert_unary_rest_error(): + client = NetworksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.ListNetworksRequest, + dict, +]) +def test_list_rest(request_type): + client = NetworksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.NetworkList( + id='id_value', + kind='kind_value', + next_page_token='next_page_token_value', + self_link='self_link_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.NetworkList.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.list(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListPager) + assert response.id == 'id_value' + assert response.kind == 'kind_value' + assert response.next_page_token == 'next_page_token_value' + assert response.self_link == 'self_link_value' + + +def test_list_rest_required_fields(request_type=compute.ListNetworksRequest): + transport_class = transports.NetworksRestTransport + + request_init = {} + request_init["project"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("filter", "max_results", "order_by", "page_token", "return_partial_success", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + + client = NetworksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.NetworkList() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "get", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.NetworkList.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.list(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_list_rest_unset_required_fields(): + transport = transports.NetworksRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.list._get_unset_required_fields({}) + assert set(unset_fields) == (set(("filter", "maxResults", "orderBy", "pageToken", "returnPartialSuccess", )) & set(("project", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_rest_interceptors(null_interceptor): + transport = transports.NetworksRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.NetworksRestInterceptor(), + ) + client = NetworksClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.NetworksRestInterceptor, "post_list") as post, \ + mock.patch.object(transports.NetworksRestInterceptor, "pre_list") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.ListNetworksRequest.pb(compute.ListNetworksRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.NetworkList.to_json(compute.NetworkList()) + + request = compute.ListNetworksRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.NetworkList() + + client.list(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_list_rest_bad_request(transport: str = 'rest', request_type=compute.ListNetworksRequest): + client = NetworksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list(request) + + +def test_list_rest_flattened(): + client = NetworksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.NetworkList() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.NetworkList.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.list(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/global/networks" % client.transport._host, args[1]) + + +def test_list_rest_flattened_error(transport: str = 'rest'): + client = NetworksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list( + compute.ListNetworksRequest(), + project='project_value', + ) + + +def test_list_rest_pager(transport: str = 'rest'): + client = NetworksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + #with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + compute.NetworkList( + items=[ + compute.Network(), + compute.Network(), + compute.Network(), + ], + next_page_token='abc', + ), + compute.NetworkList( + items=[], + next_page_token='def', + ), + compute.NetworkList( + items=[ + compute.Network(), + ], + next_page_token='ghi', + ), + compute.NetworkList( + items=[ + compute.Network(), + compute.Network(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple(compute.NetworkList.to_json(x) for x in response) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode('UTF-8') + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {'project': 'sample1'} + + pager = client.list(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, compute.Network) + for i in results) + + pages = list(client.list(request=sample_request).pages) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize("request_type", [ + compute.ListPeeringRoutesNetworksRequest, + dict, +]) +def test_list_peering_routes_rest(request_type): + client = NetworksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'network': 'sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.ExchangedPeeringRoutesList( + id='id_value', + kind='kind_value', + next_page_token='next_page_token_value', + self_link='self_link_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.ExchangedPeeringRoutesList.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.list_peering_routes(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListPeeringRoutesPager) + assert response.id == 'id_value' + assert response.kind == 'kind_value' + assert response.next_page_token == 'next_page_token_value' + assert response.self_link == 'self_link_value' + + +def test_list_peering_routes_rest_required_fields(request_type=compute.ListPeeringRoutesNetworksRequest): + transport_class = transports.NetworksRestTransport + + request_init = {} + request_init["network"] = "" + request_init["project"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_peering_routes._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["network"] = 'network_value' + jsonified_request["project"] = 'project_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_peering_routes._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("direction", "filter", "max_results", "order_by", "page_token", "peering_name", "region", "return_partial_success", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "network" in jsonified_request + assert jsonified_request["network"] == 'network_value' + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + + client = NetworksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.ExchangedPeeringRoutesList() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "get", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.ExchangedPeeringRoutesList.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.list_peering_routes(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_list_peering_routes_rest_unset_required_fields(): + transport = transports.NetworksRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.list_peering_routes._get_unset_required_fields({}) + assert set(unset_fields) == (set(("direction", "filter", "maxResults", "orderBy", "pageToken", "peeringName", "region", "returnPartialSuccess", )) & set(("network", "project", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_peering_routes_rest_interceptors(null_interceptor): + transport = transports.NetworksRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.NetworksRestInterceptor(), + ) + client = NetworksClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.NetworksRestInterceptor, "post_list_peering_routes") as post, \ + mock.patch.object(transports.NetworksRestInterceptor, "pre_list_peering_routes") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.ListPeeringRoutesNetworksRequest.pb(compute.ListPeeringRoutesNetworksRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.ExchangedPeeringRoutesList.to_json(compute.ExchangedPeeringRoutesList()) + + request = compute.ListPeeringRoutesNetworksRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.ExchangedPeeringRoutesList() + + client.list_peering_routes(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_list_peering_routes_rest_bad_request(transport: str = 'rest', request_type=compute.ListPeeringRoutesNetworksRequest): + client = NetworksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'network': 'sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_peering_routes(request) + + +def test_list_peering_routes_rest_flattened(): + client = NetworksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.ExchangedPeeringRoutesList() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'network': 'sample2'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + network='network_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.ExchangedPeeringRoutesList.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.list_peering_routes(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/global/networks/{network}/listPeeringRoutes" % client.transport._host, args[1]) + + +def test_list_peering_routes_rest_flattened_error(transport: str = 'rest'): + client = NetworksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_peering_routes( + compute.ListPeeringRoutesNetworksRequest(), + project='project_value', + network='network_value', + ) + + +def test_list_peering_routes_rest_pager(transport: str = 'rest'): + client = NetworksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + #with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + compute.ExchangedPeeringRoutesList( + items=[ + compute.ExchangedPeeringRoute(), + compute.ExchangedPeeringRoute(), + compute.ExchangedPeeringRoute(), + ], + next_page_token='abc', + ), + compute.ExchangedPeeringRoutesList( + items=[], + next_page_token='def', + ), + compute.ExchangedPeeringRoutesList( + items=[ + compute.ExchangedPeeringRoute(), + ], + next_page_token='ghi', + ), + compute.ExchangedPeeringRoutesList( + items=[ + compute.ExchangedPeeringRoute(), + compute.ExchangedPeeringRoute(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple(compute.ExchangedPeeringRoutesList.to_json(x) for x in response) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode('UTF-8') + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {'project': 'sample1', 'network': 'sample2'} + + pager = client.list_peering_routes(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, compute.ExchangedPeeringRoute) + for i in results) + + pages = list(client.list_peering_routes(request=sample_request).pages) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize("request_type", [ + compute.PatchNetworkRequest, + dict, +]) +def test_patch_rest(request_type): + client = NetworksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'network': 'sample2'} + request_init["network_resource"] = {'I_pv4_range': 'I_pv4_range_value', 'auto_create_subnetworks': True, 'creation_timestamp': 'creation_timestamp_value', 'description': 'description_value', 'enable_ula_internal_ipv6': True, 'firewall_policy': 'firewall_policy_value', 'gateway_i_pv4': 'gateway_i_pv4_value', 'id': 205, 'internal_ipv6_range': 'internal_ipv6_range_value', 'kind': 'kind_value', 'mtu': 342, 'name': 'name_value', 'network_firewall_policy_enforcement_order': 'network_firewall_policy_enforcement_order_value', 'peerings': [{'auto_create_routes': True, 'exchange_subnet_routes': True, 'export_custom_routes': True, 'export_subnet_routes_with_public_ip': True, 'import_custom_routes': True, 'import_subnet_routes_with_public_ip': True, 'name': 'name_value', 'network': 'network_value', 'peer_mtu': 865, 'stack_type': 'stack_type_value', 'state': 'state_value', 'state_details': 'state_details_value'}], 'routing_config': {'routing_mode': 'routing_mode_value'}, 'self_link': 'self_link_value', 'self_link_with_id': 'self_link_with_id_value', 'subnetworks': ['subnetworks_value1', 'subnetworks_value2']} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.patch(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, extended_operation.ExtendedOperation) + assert response.client_operation_id == 'client_operation_id_value' + assert response.creation_timestamp == 'creation_timestamp_value' + assert response.description == 'description_value' + assert response.end_time == 'end_time_value' + assert response.http_error_message == 'http_error_message_value' + assert response.http_error_status_code == 2374 + assert response.id == 205 + assert response.insert_time == 'insert_time_value' + assert response.kind == 'kind_value' + assert response.name == 'name_value' + assert response.operation_group_id == 'operation_group_id_value' + assert response.operation_type == 'operation_type_value' + assert response.progress == 885 + assert response.region == 'region_value' + assert response.self_link == 'self_link_value' + assert response.start_time == 'start_time_value' + assert response.status == compute.Operation.Status.DONE + assert response.status_message == 'status_message_value' + assert response.target_id == 947 + assert response.target_link == 'target_link_value' + assert response.user == 'user_value' + assert response.zone == 'zone_value' + + +def test_patch_rest_required_fields(request_type=compute.PatchNetworkRequest): + transport_class = transports.NetworksRestTransport + + request_init = {} + request_init["network"] = "" + request_init["project"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).patch._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["network"] = 'network_value' + jsonified_request["project"] = 'project_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).patch._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "network" in jsonified_request + assert jsonified_request["network"] == 'network_value' + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + + client = NetworksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "patch", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.patch(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_patch_rest_unset_required_fields(): + transport = transports.NetworksRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.patch._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("network", "networkResource", "project", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_patch_rest_interceptors(null_interceptor): + transport = transports.NetworksRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.NetworksRestInterceptor(), + ) + client = NetworksClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.NetworksRestInterceptor, "post_patch") as post, \ + mock.patch.object(transports.NetworksRestInterceptor, "pre_patch") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.PatchNetworkRequest.pb(compute.PatchNetworkRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.PatchNetworkRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.patch(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_patch_rest_bad_request(transport: str = 'rest', request_type=compute.PatchNetworkRequest): + client = NetworksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'network': 'sample2'} + request_init["network_resource"] = {'I_pv4_range': 'I_pv4_range_value', 'auto_create_subnetworks': True, 'creation_timestamp': 'creation_timestamp_value', 'description': 'description_value', 'enable_ula_internal_ipv6': True, 'firewall_policy': 'firewall_policy_value', 'gateway_i_pv4': 'gateway_i_pv4_value', 'id': 205, 'internal_ipv6_range': 'internal_ipv6_range_value', 'kind': 'kind_value', 'mtu': 342, 'name': 'name_value', 'network_firewall_policy_enforcement_order': 'network_firewall_policy_enforcement_order_value', 'peerings': [{'auto_create_routes': True, 'exchange_subnet_routes': True, 'export_custom_routes': True, 'export_subnet_routes_with_public_ip': True, 'import_custom_routes': True, 'import_subnet_routes_with_public_ip': True, 'name': 'name_value', 'network': 'network_value', 'peer_mtu': 865, 'stack_type': 'stack_type_value', 'state': 'state_value', 'state_details': 'state_details_value'}], 'routing_config': {'routing_mode': 'routing_mode_value'}, 'self_link': 'self_link_value', 'self_link_with_id': 'self_link_with_id_value', 'subnetworks': ['subnetworks_value1', 'subnetworks_value2']} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.patch(request) + + +def test_patch_rest_flattened(): + client = NetworksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'network': 'sample2'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + network='network_value', + network_resource=compute.Network(I_pv4_range='I_pv4_range_value'), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.patch(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/global/networks/{network}" % client.transport._host, args[1]) + + +def test_patch_rest_flattened_error(transport: str = 'rest'): + client = NetworksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.patch( + compute.PatchNetworkRequest(), + project='project_value', + network='network_value', + network_resource=compute.Network(I_pv4_range='I_pv4_range_value'), + ) + + +def test_patch_rest_error(): + client = NetworksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.PatchNetworkRequest, + dict, +]) +def test_patch_unary_rest(request_type): + client = NetworksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'network': 'sample2'} + request_init["network_resource"] = {'I_pv4_range': 'I_pv4_range_value', 'auto_create_subnetworks': True, 'creation_timestamp': 'creation_timestamp_value', 'description': 'description_value', 'enable_ula_internal_ipv6': True, 'firewall_policy': 'firewall_policy_value', 'gateway_i_pv4': 'gateway_i_pv4_value', 'id': 205, 'internal_ipv6_range': 'internal_ipv6_range_value', 'kind': 'kind_value', 'mtu': 342, 'name': 'name_value', 'network_firewall_policy_enforcement_order': 'network_firewall_policy_enforcement_order_value', 'peerings': [{'auto_create_routes': True, 'exchange_subnet_routes': True, 'export_custom_routes': True, 'export_subnet_routes_with_public_ip': True, 'import_custom_routes': True, 'import_subnet_routes_with_public_ip': True, 'name': 'name_value', 'network': 'network_value', 'peer_mtu': 865, 'stack_type': 'stack_type_value', 'state': 'state_value', 'state_details': 'state_details_value'}], 'routing_config': {'routing_mode': 'routing_mode_value'}, 'self_link': 'self_link_value', 'self_link_with_id': 'self_link_with_id_value', 'subnetworks': ['subnetworks_value1', 'subnetworks_value2']} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.patch_unary(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.Operation) + + +def test_patch_unary_rest_required_fields(request_type=compute.PatchNetworkRequest): + transport_class = transports.NetworksRestTransport + + request_init = {} + request_init["network"] = "" + request_init["project"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).patch._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["network"] = 'network_value' + jsonified_request["project"] = 'project_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).patch._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "network" in jsonified_request + assert jsonified_request["network"] == 'network_value' + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + + client = NetworksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "patch", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.patch_unary(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_patch_unary_rest_unset_required_fields(): + transport = transports.NetworksRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.patch._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("network", "networkResource", "project", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_patch_unary_rest_interceptors(null_interceptor): + transport = transports.NetworksRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.NetworksRestInterceptor(), + ) + client = NetworksClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.NetworksRestInterceptor, "post_patch") as post, \ + mock.patch.object(transports.NetworksRestInterceptor, "pre_patch") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.PatchNetworkRequest.pb(compute.PatchNetworkRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.PatchNetworkRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.patch_unary(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_patch_unary_rest_bad_request(transport: str = 'rest', request_type=compute.PatchNetworkRequest): + client = NetworksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'network': 'sample2'} + request_init["network_resource"] = {'I_pv4_range': 'I_pv4_range_value', 'auto_create_subnetworks': True, 'creation_timestamp': 'creation_timestamp_value', 'description': 'description_value', 'enable_ula_internal_ipv6': True, 'firewall_policy': 'firewall_policy_value', 'gateway_i_pv4': 'gateway_i_pv4_value', 'id': 205, 'internal_ipv6_range': 'internal_ipv6_range_value', 'kind': 'kind_value', 'mtu': 342, 'name': 'name_value', 'network_firewall_policy_enforcement_order': 'network_firewall_policy_enforcement_order_value', 'peerings': [{'auto_create_routes': True, 'exchange_subnet_routes': True, 'export_custom_routes': True, 'export_subnet_routes_with_public_ip': True, 'import_custom_routes': True, 'import_subnet_routes_with_public_ip': True, 'name': 'name_value', 'network': 'network_value', 'peer_mtu': 865, 'stack_type': 'stack_type_value', 'state': 'state_value', 'state_details': 'state_details_value'}], 'routing_config': {'routing_mode': 'routing_mode_value'}, 'self_link': 'self_link_value', 'self_link_with_id': 'self_link_with_id_value', 'subnetworks': ['subnetworks_value1', 'subnetworks_value2']} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.patch_unary(request) + + +def test_patch_unary_rest_flattened(): + client = NetworksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'network': 'sample2'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + network='network_value', + network_resource=compute.Network(I_pv4_range='I_pv4_range_value'), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.patch_unary(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/global/networks/{network}" % client.transport._host, args[1]) + + +def test_patch_unary_rest_flattened_error(transport: str = 'rest'): + client = NetworksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.patch_unary( + compute.PatchNetworkRequest(), + project='project_value', + network='network_value', + network_resource=compute.Network(I_pv4_range='I_pv4_range_value'), + ) + + +def test_patch_unary_rest_error(): + client = NetworksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.RemovePeeringNetworkRequest, + dict, +]) +def test_remove_peering_rest(request_type): + client = NetworksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'network': 'sample2'} + request_init["networks_remove_peering_request_resource"] = {'name': 'name_value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.remove_peering(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, extended_operation.ExtendedOperation) + assert response.client_operation_id == 'client_operation_id_value' + assert response.creation_timestamp == 'creation_timestamp_value' + assert response.description == 'description_value' + assert response.end_time == 'end_time_value' + assert response.http_error_message == 'http_error_message_value' + assert response.http_error_status_code == 2374 + assert response.id == 205 + assert response.insert_time == 'insert_time_value' + assert response.kind == 'kind_value' + assert response.name == 'name_value' + assert response.operation_group_id == 'operation_group_id_value' + assert response.operation_type == 'operation_type_value' + assert response.progress == 885 + assert response.region == 'region_value' + assert response.self_link == 'self_link_value' + assert response.start_time == 'start_time_value' + assert response.status == compute.Operation.Status.DONE + assert response.status_message == 'status_message_value' + assert response.target_id == 947 + assert response.target_link == 'target_link_value' + assert response.user == 'user_value' + assert response.zone == 'zone_value' + + +def test_remove_peering_rest_required_fields(request_type=compute.RemovePeeringNetworkRequest): + transport_class = transports.NetworksRestTransport + + request_init = {} + request_init["network"] = "" + request_init["project"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).remove_peering._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["network"] = 'network_value' + jsonified_request["project"] = 'project_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).remove_peering._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "network" in jsonified_request + assert jsonified_request["network"] == 'network_value' + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + + client = NetworksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.remove_peering(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_remove_peering_rest_unset_required_fields(): + transport = transports.NetworksRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.remove_peering._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("network", "networksRemovePeeringRequestResource", "project", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_remove_peering_rest_interceptors(null_interceptor): + transport = transports.NetworksRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.NetworksRestInterceptor(), + ) + client = NetworksClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.NetworksRestInterceptor, "post_remove_peering") as post, \ + mock.patch.object(transports.NetworksRestInterceptor, "pre_remove_peering") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.RemovePeeringNetworkRequest.pb(compute.RemovePeeringNetworkRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.RemovePeeringNetworkRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.remove_peering(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_remove_peering_rest_bad_request(transport: str = 'rest', request_type=compute.RemovePeeringNetworkRequest): + client = NetworksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'network': 'sample2'} + request_init["networks_remove_peering_request_resource"] = {'name': 'name_value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.remove_peering(request) + + +def test_remove_peering_rest_flattened(): + client = NetworksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'network': 'sample2'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + network='network_value', + networks_remove_peering_request_resource=compute.NetworksRemovePeeringRequest(name='name_value'), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.remove_peering(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/global/networks/{network}/removePeering" % client.transport._host, args[1]) + + +def test_remove_peering_rest_flattened_error(transport: str = 'rest'): + client = NetworksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.remove_peering( + compute.RemovePeeringNetworkRequest(), + project='project_value', + network='network_value', + networks_remove_peering_request_resource=compute.NetworksRemovePeeringRequest(name='name_value'), + ) + + +def test_remove_peering_rest_error(): + client = NetworksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.RemovePeeringNetworkRequest, + dict, +]) +def test_remove_peering_unary_rest(request_type): + client = NetworksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'network': 'sample2'} + request_init["networks_remove_peering_request_resource"] = {'name': 'name_value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.remove_peering_unary(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.Operation) + + +def test_remove_peering_unary_rest_required_fields(request_type=compute.RemovePeeringNetworkRequest): + transport_class = transports.NetworksRestTransport + + request_init = {} + request_init["network"] = "" + request_init["project"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).remove_peering._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["network"] = 'network_value' + jsonified_request["project"] = 'project_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).remove_peering._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "network" in jsonified_request + assert jsonified_request["network"] == 'network_value' + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + + client = NetworksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.remove_peering_unary(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_remove_peering_unary_rest_unset_required_fields(): + transport = transports.NetworksRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.remove_peering._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("network", "networksRemovePeeringRequestResource", "project", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_remove_peering_unary_rest_interceptors(null_interceptor): + transport = transports.NetworksRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.NetworksRestInterceptor(), + ) + client = NetworksClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.NetworksRestInterceptor, "post_remove_peering") as post, \ + mock.patch.object(transports.NetworksRestInterceptor, "pre_remove_peering") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.RemovePeeringNetworkRequest.pb(compute.RemovePeeringNetworkRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.RemovePeeringNetworkRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.remove_peering_unary(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_remove_peering_unary_rest_bad_request(transport: str = 'rest', request_type=compute.RemovePeeringNetworkRequest): + client = NetworksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'network': 'sample2'} + request_init["networks_remove_peering_request_resource"] = {'name': 'name_value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.remove_peering_unary(request) + + +def test_remove_peering_unary_rest_flattened(): + client = NetworksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'network': 'sample2'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + network='network_value', + networks_remove_peering_request_resource=compute.NetworksRemovePeeringRequest(name='name_value'), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.remove_peering_unary(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/global/networks/{network}/removePeering" % client.transport._host, args[1]) + + +def test_remove_peering_unary_rest_flattened_error(transport: str = 'rest'): + client = NetworksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.remove_peering_unary( + compute.RemovePeeringNetworkRequest(), + project='project_value', + network='network_value', + networks_remove_peering_request_resource=compute.NetworksRemovePeeringRequest(name='name_value'), + ) + + +def test_remove_peering_unary_rest_error(): + client = NetworksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.SwitchToCustomModeNetworkRequest, + dict, +]) +def test_switch_to_custom_mode_rest(request_type): + client = NetworksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'network': 'sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.switch_to_custom_mode(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, extended_operation.ExtendedOperation) + assert response.client_operation_id == 'client_operation_id_value' + assert response.creation_timestamp == 'creation_timestamp_value' + assert response.description == 'description_value' + assert response.end_time == 'end_time_value' + assert response.http_error_message == 'http_error_message_value' + assert response.http_error_status_code == 2374 + assert response.id == 205 + assert response.insert_time == 'insert_time_value' + assert response.kind == 'kind_value' + assert response.name == 'name_value' + assert response.operation_group_id == 'operation_group_id_value' + assert response.operation_type == 'operation_type_value' + assert response.progress == 885 + assert response.region == 'region_value' + assert response.self_link == 'self_link_value' + assert response.start_time == 'start_time_value' + assert response.status == compute.Operation.Status.DONE + assert response.status_message == 'status_message_value' + assert response.target_id == 947 + assert response.target_link == 'target_link_value' + assert response.user == 'user_value' + assert response.zone == 'zone_value' + + +def test_switch_to_custom_mode_rest_required_fields(request_type=compute.SwitchToCustomModeNetworkRequest): + transport_class = transports.NetworksRestTransport + + request_init = {} + request_init["network"] = "" + request_init["project"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).switch_to_custom_mode._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["network"] = 'network_value' + jsonified_request["project"] = 'project_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).switch_to_custom_mode._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "network" in jsonified_request + assert jsonified_request["network"] == 'network_value' + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + + client = NetworksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.switch_to_custom_mode(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_switch_to_custom_mode_rest_unset_required_fields(): + transport = transports.NetworksRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.switch_to_custom_mode._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("network", "project", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_switch_to_custom_mode_rest_interceptors(null_interceptor): + transport = transports.NetworksRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.NetworksRestInterceptor(), + ) + client = NetworksClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.NetworksRestInterceptor, "post_switch_to_custom_mode") as post, \ + mock.patch.object(transports.NetworksRestInterceptor, "pre_switch_to_custom_mode") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.SwitchToCustomModeNetworkRequest.pb(compute.SwitchToCustomModeNetworkRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.SwitchToCustomModeNetworkRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.switch_to_custom_mode(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_switch_to_custom_mode_rest_bad_request(transport: str = 'rest', request_type=compute.SwitchToCustomModeNetworkRequest): + client = NetworksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'network': 'sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.switch_to_custom_mode(request) + + +def test_switch_to_custom_mode_rest_flattened(): + client = NetworksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'network': 'sample2'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + network='network_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.switch_to_custom_mode(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/global/networks/{network}/switchToCustomMode" % client.transport._host, args[1]) + + +def test_switch_to_custom_mode_rest_flattened_error(transport: str = 'rest'): + client = NetworksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.switch_to_custom_mode( + compute.SwitchToCustomModeNetworkRequest(), + project='project_value', + network='network_value', + ) + + +def test_switch_to_custom_mode_rest_error(): + client = NetworksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.SwitchToCustomModeNetworkRequest, + dict, +]) +def test_switch_to_custom_mode_unary_rest(request_type): + client = NetworksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'network': 'sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.switch_to_custom_mode_unary(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.Operation) + + +def test_switch_to_custom_mode_unary_rest_required_fields(request_type=compute.SwitchToCustomModeNetworkRequest): + transport_class = transports.NetworksRestTransport + + request_init = {} + request_init["network"] = "" + request_init["project"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).switch_to_custom_mode._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["network"] = 'network_value' + jsonified_request["project"] = 'project_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).switch_to_custom_mode._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "network" in jsonified_request + assert jsonified_request["network"] == 'network_value' + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + + client = NetworksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.switch_to_custom_mode_unary(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_switch_to_custom_mode_unary_rest_unset_required_fields(): + transport = transports.NetworksRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.switch_to_custom_mode._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("network", "project", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_switch_to_custom_mode_unary_rest_interceptors(null_interceptor): + transport = transports.NetworksRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.NetworksRestInterceptor(), + ) + client = NetworksClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.NetworksRestInterceptor, "post_switch_to_custom_mode") as post, \ + mock.patch.object(transports.NetworksRestInterceptor, "pre_switch_to_custom_mode") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.SwitchToCustomModeNetworkRequest.pb(compute.SwitchToCustomModeNetworkRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.SwitchToCustomModeNetworkRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.switch_to_custom_mode_unary(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_switch_to_custom_mode_unary_rest_bad_request(transport: str = 'rest', request_type=compute.SwitchToCustomModeNetworkRequest): + client = NetworksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'network': 'sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.switch_to_custom_mode_unary(request) + + +def test_switch_to_custom_mode_unary_rest_flattened(): + client = NetworksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'network': 'sample2'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + network='network_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.switch_to_custom_mode_unary(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/global/networks/{network}/switchToCustomMode" % client.transport._host, args[1]) + + +def test_switch_to_custom_mode_unary_rest_flattened_error(transport: str = 'rest'): + client = NetworksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.switch_to_custom_mode_unary( + compute.SwitchToCustomModeNetworkRequest(), + project='project_value', + network='network_value', + ) + + +def test_switch_to_custom_mode_unary_rest_error(): + client = NetworksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.UpdatePeeringNetworkRequest, + dict, +]) +def test_update_peering_rest(request_type): + client = NetworksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'network': 'sample2'} + request_init["networks_update_peering_request_resource"] = {'network_peering': {'auto_create_routes': True, 'exchange_subnet_routes': True, 'export_custom_routes': True, 'export_subnet_routes_with_public_ip': True, 'import_custom_routes': True, 'import_subnet_routes_with_public_ip': True, 'name': 'name_value', 'network': 'network_value', 'peer_mtu': 865, 'stack_type': 'stack_type_value', 'state': 'state_value', 'state_details': 'state_details_value'}} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.update_peering(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, extended_operation.ExtendedOperation) + assert response.client_operation_id == 'client_operation_id_value' + assert response.creation_timestamp == 'creation_timestamp_value' + assert response.description == 'description_value' + assert response.end_time == 'end_time_value' + assert response.http_error_message == 'http_error_message_value' + assert response.http_error_status_code == 2374 + assert response.id == 205 + assert response.insert_time == 'insert_time_value' + assert response.kind == 'kind_value' + assert response.name == 'name_value' + assert response.operation_group_id == 'operation_group_id_value' + assert response.operation_type == 'operation_type_value' + assert response.progress == 885 + assert response.region == 'region_value' + assert response.self_link == 'self_link_value' + assert response.start_time == 'start_time_value' + assert response.status == compute.Operation.Status.DONE + assert response.status_message == 'status_message_value' + assert response.target_id == 947 + assert response.target_link == 'target_link_value' + assert response.user == 'user_value' + assert response.zone == 'zone_value' + + +def test_update_peering_rest_required_fields(request_type=compute.UpdatePeeringNetworkRequest): + transport_class = transports.NetworksRestTransport + + request_init = {} + request_init["network"] = "" + request_init["project"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).update_peering._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["network"] = 'network_value' + jsonified_request["project"] = 'project_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).update_peering._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "network" in jsonified_request + assert jsonified_request["network"] == 'network_value' + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + + client = NetworksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "patch", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.update_peering(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_update_peering_rest_unset_required_fields(): + transport = transports.NetworksRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.update_peering._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("network", "networksUpdatePeeringRequestResource", "project", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_update_peering_rest_interceptors(null_interceptor): + transport = transports.NetworksRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.NetworksRestInterceptor(), + ) + client = NetworksClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.NetworksRestInterceptor, "post_update_peering") as post, \ + mock.patch.object(transports.NetworksRestInterceptor, "pre_update_peering") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.UpdatePeeringNetworkRequest.pb(compute.UpdatePeeringNetworkRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.UpdatePeeringNetworkRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.update_peering(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_update_peering_rest_bad_request(transport: str = 'rest', request_type=compute.UpdatePeeringNetworkRequest): + client = NetworksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'network': 'sample2'} + request_init["networks_update_peering_request_resource"] = {'network_peering': {'auto_create_routes': True, 'exchange_subnet_routes': True, 'export_custom_routes': True, 'export_subnet_routes_with_public_ip': True, 'import_custom_routes': True, 'import_subnet_routes_with_public_ip': True, 'name': 'name_value', 'network': 'network_value', 'peer_mtu': 865, 'stack_type': 'stack_type_value', 'state': 'state_value', 'state_details': 'state_details_value'}} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.update_peering(request) + + +def test_update_peering_rest_flattened(): + client = NetworksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'network': 'sample2'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + network='network_value', + networks_update_peering_request_resource=compute.NetworksUpdatePeeringRequest(network_peering=compute.NetworkPeering(auto_create_routes=True)), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.update_peering(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/global/networks/{network}/updatePeering" % client.transport._host, args[1]) + + +def test_update_peering_rest_flattened_error(transport: str = 'rest'): + client = NetworksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_peering( + compute.UpdatePeeringNetworkRequest(), + project='project_value', + network='network_value', + networks_update_peering_request_resource=compute.NetworksUpdatePeeringRequest(network_peering=compute.NetworkPeering(auto_create_routes=True)), + ) + + +def test_update_peering_rest_error(): + client = NetworksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.UpdatePeeringNetworkRequest, + dict, +]) +def test_update_peering_unary_rest(request_type): + client = NetworksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'network': 'sample2'} + request_init["networks_update_peering_request_resource"] = {'network_peering': {'auto_create_routes': True, 'exchange_subnet_routes': True, 'export_custom_routes': True, 'export_subnet_routes_with_public_ip': True, 'import_custom_routes': True, 'import_subnet_routes_with_public_ip': True, 'name': 'name_value', 'network': 'network_value', 'peer_mtu': 865, 'stack_type': 'stack_type_value', 'state': 'state_value', 'state_details': 'state_details_value'}} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.update_peering_unary(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.Operation) + + +def test_update_peering_unary_rest_required_fields(request_type=compute.UpdatePeeringNetworkRequest): + transport_class = transports.NetworksRestTransport + + request_init = {} + request_init["network"] = "" + request_init["project"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).update_peering._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["network"] = 'network_value' + jsonified_request["project"] = 'project_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).update_peering._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "network" in jsonified_request + assert jsonified_request["network"] == 'network_value' + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + + client = NetworksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "patch", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.update_peering_unary(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_update_peering_unary_rest_unset_required_fields(): + transport = transports.NetworksRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.update_peering._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("network", "networksUpdatePeeringRequestResource", "project", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_update_peering_unary_rest_interceptors(null_interceptor): + transport = transports.NetworksRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.NetworksRestInterceptor(), + ) + client = NetworksClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.NetworksRestInterceptor, "post_update_peering") as post, \ + mock.patch.object(transports.NetworksRestInterceptor, "pre_update_peering") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.UpdatePeeringNetworkRequest.pb(compute.UpdatePeeringNetworkRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.UpdatePeeringNetworkRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.update_peering_unary(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_update_peering_unary_rest_bad_request(transport: str = 'rest', request_type=compute.UpdatePeeringNetworkRequest): + client = NetworksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'network': 'sample2'} + request_init["networks_update_peering_request_resource"] = {'network_peering': {'auto_create_routes': True, 'exchange_subnet_routes': True, 'export_custom_routes': True, 'export_subnet_routes_with_public_ip': True, 'import_custom_routes': True, 'import_subnet_routes_with_public_ip': True, 'name': 'name_value', 'network': 'network_value', 'peer_mtu': 865, 'stack_type': 'stack_type_value', 'state': 'state_value', 'state_details': 'state_details_value'}} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.update_peering_unary(request) + + +def test_update_peering_unary_rest_flattened(): + client = NetworksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'network': 'sample2'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + network='network_value', + networks_update_peering_request_resource=compute.NetworksUpdatePeeringRequest(network_peering=compute.NetworkPeering(auto_create_routes=True)), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.update_peering_unary(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/global/networks/{network}/updatePeering" % client.transport._host, args[1]) + + +def test_update_peering_unary_rest_flattened_error(transport: str = 'rest'): + client = NetworksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_peering_unary( + compute.UpdatePeeringNetworkRequest(), + project='project_value', + network='network_value', + networks_update_peering_request_resource=compute.NetworksUpdatePeeringRequest(network_peering=compute.NetworkPeering(auto_create_routes=True)), + ) + + +def test_update_peering_unary_rest_error(): + client = NetworksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +def test_credentials_transport_error(): + # It is an error to provide credentials and a transport instance. + transport = transports.NetworksRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = NetworksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # It is an error to provide a credentials file and a transport instance. + transport = transports.NetworksRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = NetworksClient( + client_options={"credentials_file": "credentials.json"}, + transport=transport, + ) + + # It is an error to provide an api_key and a transport instance. + transport = transports.NetworksRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = NetworksClient( + client_options=options, + transport=transport, + ) + + # It is an error to provide an api_key and a credential. + options = mock.Mock() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = NetworksClient( + client_options=options, + credentials=ga_credentials.AnonymousCredentials() + ) + + # It is an error to provide scopes and a transport instance. + transport = transports.NetworksRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = NetworksClient( + client_options={"scopes": ["1", "2"]}, + transport=transport, + ) + + +def test_transport_instance(): + # A client may be instantiated with a custom transport instance. + transport = transports.NetworksRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + client = NetworksClient(transport=transport) + assert client.transport is transport + + +@pytest.mark.parametrize("transport_class", [ + transports.NetworksRestTransport, +]) +def test_transport_adc(transport_class): + # Test default credentials are used if not provided. + with mock.patch.object(google.auth, 'default') as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class() + adc.assert_called_once() + +@pytest.mark.parametrize("transport_name", [ + "rest", +]) +def test_transport_kind(transport_name): + transport = NetworksClient.get_transport_class(transport_name)( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert transport.kind == transport_name + + +def test_networks_base_transport_error(): + # Passing both a credentials object and credentials_file should raise an error + with pytest.raises(core_exceptions.DuplicateCredentialArgs): + transport = transports.NetworksTransport( + credentials=ga_credentials.AnonymousCredentials(), + credentials_file="credentials.json" + ) + + +def test_networks_base_transport(): + # Instantiate the base transport. + with mock.patch('google.cloud.compute_v1.services.networks.transports.NetworksTransport.__init__') as Transport: + Transport.return_value = None + transport = transports.NetworksTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Every method on the transport should just blindly + # raise NotImplementedError. + methods = ( + 'add_peering', + 'delete', + 'get', + 'get_effective_firewalls', + 'insert', + 'list', + 'list_peering_routes', + 'patch', + 'remove_peering', + 'switch_to_custom_mode', + 'update_peering', + ) + for method in methods: + with pytest.raises(NotImplementedError): + getattr(transport, method)(request=object()) + + with pytest.raises(NotImplementedError): + transport.close() + + # Catch all for all remaining methods and properties + remainder = [ + 'kind', + ] + for r in remainder: + with pytest.raises(NotImplementedError): + getattr(transport, r)() + + +def test_networks_base_transport_with_credentials_file(): + # Instantiate the base transport with a credentials file + with mock.patch.object(google.auth, 'load_credentials_from_file', autospec=True) as load_creds, mock.patch('google.cloud.compute_v1.services.networks.transports.NetworksTransport._prep_wrapped_messages') as Transport: + Transport.return_value = None + load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.NetworksTransport( + credentials_file="credentials.json", + quota_project_id="octopus", + ) + load_creds.assert_called_once_with("credentials.json", + scopes=None, + default_scopes=( + 'https://www.googleapis.com/auth/compute', + 'https://www.googleapis.com/auth/cloud-platform', +), + quota_project_id="octopus", + ) + + +def test_networks_base_transport_with_adc(): + # Test the default credentials are used if credentials and credentials_file are None. + with mock.patch.object(google.auth, 'default', autospec=True) as adc, mock.patch('google.cloud.compute_v1.services.networks.transports.NetworksTransport._prep_wrapped_messages') as Transport: + Transport.return_value = None + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.NetworksTransport() + adc.assert_called_once() + + +def test_networks_auth_adc(): + # If no credentials are provided, we should use ADC credentials. + with mock.patch.object(google.auth, 'default', autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + NetworksClient() + adc.assert_called_once_with( + scopes=None, + default_scopes=( + 'https://www.googleapis.com/auth/compute', + 'https://www.googleapis.com/auth/cloud-platform', +), + quota_project_id=None, + ) + + +def test_networks_http_transport_client_cert_source_for_mtls(): + cred = ga_credentials.AnonymousCredentials() + with mock.patch("google.auth.transport.requests.AuthorizedSession.configure_mtls_channel") as mock_configure_mtls_channel: + transports.NetworksRestTransport ( + credentials=cred, + client_cert_source_for_mtls=client_cert_source_callback + ) + mock_configure_mtls_channel.assert_called_once_with(client_cert_source_callback) + + +@pytest.mark.parametrize("transport_name", [ + "rest", +]) +def test_networks_host_no_port(transport_name): + client = NetworksClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions(api_endpoint='compute.googleapis.com'), + transport=transport_name, + ) + assert client.transport._host == ( + 'compute.googleapis.com:443' + if transport_name in ['grpc', 'grpc_asyncio'] + else 'https://compute.googleapis.com' + ) + +@pytest.mark.parametrize("transport_name", [ + "rest", +]) +def test_networks_host_with_port(transport_name): + client = NetworksClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions(api_endpoint='compute.googleapis.com:8000'), + transport=transport_name, + ) + assert client.transport._host == ( + 'compute.googleapis.com:8000' + if transport_name in ['grpc', 'grpc_asyncio'] + else 'https://compute.googleapis.com:8000' + ) + +@pytest.mark.parametrize("transport_name", [ + "rest", +]) +def test_networks_client_transport_session_collision(transport_name): + creds1 = ga_credentials.AnonymousCredentials() + creds2 = ga_credentials.AnonymousCredentials() + client1 = NetworksClient( + credentials=creds1, + transport=transport_name, + ) + client2 = NetworksClient( + credentials=creds2, + transport=transport_name, + ) + session1 = client1.transport.add_peering._session + session2 = client2.transport.add_peering._session + assert session1 != session2 + session1 = client1.transport.delete._session + session2 = client2.transport.delete._session + assert session1 != session2 + session1 = client1.transport.get._session + session2 = client2.transport.get._session + assert session1 != session2 + session1 = client1.transport.get_effective_firewalls._session + session2 = client2.transport.get_effective_firewalls._session + assert session1 != session2 + session1 = client1.transport.insert._session + session2 = client2.transport.insert._session + assert session1 != session2 + session1 = client1.transport.list._session + session2 = client2.transport.list._session + assert session1 != session2 + session1 = client1.transport.list_peering_routes._session + session2 = client2.transport.list_peering_routes._session + assert session1 != session2 + session1 = client1.transport.patch._session + session2 = client2.transport.patch._session + assert session1 != session2 + session1 = client1.transport.remove_peering._session + session2 = client2.transport.remove_peering._session + assert session1 != session2 + session1 = client1.transport.switch_to_custom_mode._session + session2 = client2.transport.switch_to_custom_mode._session + assert session1 != session2 + session1 = client1.transport.update_peering._session + session2 = client2.transport.update_peering._session + assert session1 != session2 + +def test_common_billing_account_path(): + billing_account = "squid" + expected = "billingAccounts/{billing_account}".format(billing_account=billing_account, ) + actual = NetworksClient.common_billing_account_path(billing_account) + assert expected == actual + + +def test_parse_common_billing_account_path(): + expected = { + "billing_account": "clam", + } + path = NetworksClient.common_billing_account_path(**expected) + + # Check that the path construction is reversible. + actual = NetworksClient.parse_common_billing_account_path(path) + assert expected == actual + +def test_common_folder_path(): + folder = "whelk" + expected = "folders/{folder}".format(folder=folder, ) + actual = NetworksClient.common_folder_path(folder) + assert expected == actual + + +def test_parse_common_folder_path(): + expected = { + "folder": "octopus", + } + path = NetworksClient.common_folder_path(**expected) + + # Check that the path construction is reversible. + actual = NetworksClient.parse_common_folder_path(path) + assert expected == actual + +def test_common_organization_path(): + organization = "oyster" + expected = "organizations/{organization}".format(organization=organization, ) + actual = NetworksClient.common_organization_path(organization) + assert expected == actual + + +def test_parse_common_organization_path(): + expected = { + "organization": "nudibranch", + } + path = NetworksClient.common_organization_path(**expected) + + # Check that the path construction is reversible. + actual = NetworksClient.parse_common_organization_path(path) + assert expected == actual + +def test_common_project_path(): + project = "cuttlefish" + expected = "projects/{project}".format(project=project, ) + actual = NetworksClient.common_project_path(project) + assert expected == actual + + +def test_parse_common_project_path(): + expected = { + "project": "mussel", + } + path = NetworksClient.common_project_path(**expected) + + # Check that the path construction is reversible. + actual = NetworksClient.parse_common_project_path(path) + assert expected == actual + +def test_common_location_path(): + project = "winkle" + location = "nautilus" + expected = "projects/{project}/locations/{location}".format(project=project, location=location, ) + actual = NetworksClient.common_location_path(project, location) + assert expected == actual + + +def test_parse_common_location_path(): + expected = { + "project": "scallop", + "location": "abalone", + } + path = NetworksClient.common_location_path(**expected) + + # Check that the path construction is reversible. + actual = NetworksClient.parse_common_location_path(path) + assert expected == actual + + +def test_client_with_default_client_info(): + client_info = gapic_v1.client_info.ClientInfo() + + with mock.patch.object(transports.NetworksTransport, '_prep_wrapped_messages') as prep: + client = NetworksClient( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + with mock.patch.object(transports.NetworksTransport, '_prep_wrapped_messages') as prep: + transport_class = NetworksClient.get_transport_class() + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + +def test_transport_close(): + transports = { + "rest": "_session", + } + + for transport, close_name in transports.items(): + client = NetworksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport + ) + with mock.patch.object(type(getattr(client.transport, close_name)), "close") as close: + with client: + close.assert_not_called() + close.assert_called_once() + +def test_client_ctx(): + transports = [ + 'rest', + ] + for transport in transports: + client = NetworksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport + ) + # Test client calls underlying transport. + with mock.patch.object(type(client.transport), "close") as close: + close.assert_not_called() + with client: + pass + close.assert_called() + +@pytest.mark.parametrize("client_class,transport_class", [ + (NetworksClient, transports.NetworksRestTransport), +]) +def test_api_key_credentials(client_class, transport_class): + with mock.patch.object( + google.auth._default, "get_api_key_credentials", create=True + ) as get_api_key_credentials: + mock_cred = mock.Mock() + get_api_key_credentials.return_value = mock_cred + options = client_options.ClientOptions() + options.api_key = "api_key" + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=mock_cred, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) diff --git a/owl-bot-staging/v1/tests/unit/gapic/compute_v1/test_node_groups.py b/owl-bot-staging/v1/tests/unit/gapic/compute_v1/test_node_groups.py new file mode 100644 index 000000000..ac2d38e89 --- /dev/null +++ b/owl-bot-staging/v1/tests/unit/gapic/compute_v1/test_node_groups.py @@ -0,0 +1,6771 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import os +# try/except added for compatibility with python < 3.8 +try: + from unittest import mock + from unittest.mock import AsyncMock # pragma: NO COVER +except ImportError: # pragma: NO COVER + import mock + +import grpc +from grpc.experimental import aio +from collections.abc import Iterable +from google.protobuf import json_format +import json +import math +import pytest +from proto.marshal.rules.dates import DurationRule, TimestampRule +from proto.marshal.rules import wrappers +from requests import Response +from requests import Request, PreparedRequest +from requests.sessions import Session +from google.protobuf import json_format + +from google.api_core import client_options +from google.api_core import exceptions as core_exceptions +from google.api_core import extended_operation # type: ignore +from google.api_core import future +from google.api_core import gapic_v1 +from google.api_core import grpc_helpers +from google.api_core import grpc_helpers_async +from google.api_core import path_template +from google.auth import credentials as ga_credentials +from google.auth.exceptions import MutualTLSChannelError +from google.cloud.compute_v1.services.node_groups import NodeGroupsClient +from google.cloud.compute_v1.services.node_groups import pagers +from google.cloud.compute_v1.services.node_groups import transports +from google.cloud.compute_v1.types import compute +from google.oauth2 import service_account +import google.auth + + +def client_cert_source_callback(): + return b"cert bytes", b"key bytes" + + +# If default endpoint is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint(client): + return "foo.googleapis.com" if ("localhost" in client.DEFAULT_ENDPOINT) else client.DEFAULT_ENDPOINT + + +def test__get_default_mtls_endpoint(): + api_endpoint = "example.googleapis.com" + api_mtls_endpoint = "example.mtls.googleapis.com" + sandbox_endpoint = "example.sandbox.googleapis.com" + sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com" + non_googleapi = "api.example.com" + + assert NodeGroupsClient._get_default_mtls_endpoint(None) is None + assert NodeGroupsClient._get_default_mtls_endpoint(api_endpoint) == api_mtls_endpoint + assert NodeGroupsClient._get_default_mtls_endpoint(api_mtls_endpoint) == api_mtls_endpoint + assert NodeGroupsClient._get_default_mtls_endpoint(sandbox_endpoint) == sandbox_mtls_endpoint + assert NodeGroupsClient._get_default_mtls_endpoint(sandbox_mtls_endpoint) == sandbox_mtls_endpoint + assert NodeGroupsClient._get_default_mtls_endpoint(non_googleapi) == non_googleapi + + +@pytest.mark.parametrize("client_class,transport_name", [ + (NodeGroupsClient, "rest"), +]) +def test_node_groups_client_from_service_account_info(client_class, transport_name): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object(service_account.Credentials, 'from_service_account_info') as factory: + factory.return_value = creds + info = {"valid": True} + client = client_class.from_service_account_info(info, transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + 'compute.googleapis.com:443' + if transport_name in ['grpc', 'grpc_asyncio'] + else + 'https://compute.googleapis.com' + ) + + +@pytest.mark.parametrize("transport_class,transport_name", [ + (transports.NodeGroupsRestTransport, "rest"), +]) +def test_node_groups_client_service_account_always_use_jwt(transport_class, transport_name): + with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=True) + use_jwt.assert_called_once_with(True) + + with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=False) + use_jwt.assert_not_called() + + +@pytest.mark.parametrize("client_class,transport_name", [ + (NodeGroupsClient, "rest"), +]) +def test_node_groups_client_from_service_account_file(client_class, transport_name): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object(service_account.Credentials, 'from_service_account_file') as factory: + factory.return_value = creds + client = client_class.from_service_account_file("dummy/file/path.json", transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + client = client_class.from_service_account_json("dummy/file/path.json", transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + 'compute.googleapis.com:443' + if transport_name in ['grpc', 'grpc_asyncio'] + else + 'https://compute.googleapis.com' + ) + + +def test_node_groups_client_get_transport_class(): + transport = NodeGroupsClient.get_transport_class() + available_transports = [ + transports.NodeGroupsRestTransport, + ] + assert transport in available_transports + + transport = NodeGroupsClient.get_transport_class("rest") + assert transport == transports.NodeGroupsRestTransport + + +@pytest.mark.parametrize("client_class,transport_class,transport_name", [ + (NodeGroupsClient, transports.NodeGroupsRestTransport, "rest"), +]) +@mock.patch.object(NodeGroupsClient, "DEFAULT_ENDPOINT", modify_default_endpoint(NodeGroupsClient)) +def test_node_groups_client_client_options(client_class, transport_class, transport_name): + # Check that if channel is provided we won't create a new one. + with mock.patch.object(NodeGroupsClient, 'get_transport_class') as gtc: + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ) + client = client_class(transport=transport) + gtc.assert_not_called() + + # Check that if channel is provided via str we will create a new one. + with mock.patch.object(NodeGroupsClient, 'get_transport_class') as gtc: + client = client_class(transport=transport_name) + gtc.assert_called() + + # Check the case api_endpoint is provided. + options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(transport=transport_name, client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_MTLS_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError): + client = client_class(transport=transport_name) + + # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): + with pytest.raises(ValueError): + client = client_class(transport=transport_name) + + # Check the case quota_project_id is provided + options = client_options.ClientOptions(quota_project_id="octopus") + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id="octopus", + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + # Check the case api_endpoint is provided + options = client_options.ClientOptions(api_audience="https://language.googleapis.com") + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience="https://language.googleapis.com" + ) + +@pytest.mark.parametrize("client_class,transport_class,transport_name,use_client_cert_env", [ + (NodeGroupsClient, transports.NodeGroupsRestTransport, "rest", "true"), + (NodeGroupsClient, transports.NodeGroupsRestTransport, "rest", "false"), +]) +@mock.patch.object(NodeGroupsClient, "DEFAULT_ENDPOINT", modify_default_endpoint(NodeGroupsClient)) +@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) +def test_node_groups_client_mtls_env_auto(client_class, transport_class, transport_name, use_client_cert_env): + # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default + # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists. + + # Check the case client_cert_source is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): + options = client_options.ClientOptions(client_cert_source=client_cert_source_callback) + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + + if use_client_cert_env == "false": + expected_client_cert_source = None + expected_host = client.DEFAULT_ENDPOINT + else: + expected_client_cert_source = client_cert_source_callback + expected_host = client.DEFAULT_MTLS_ENDPOINT + + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case ADC client cert is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): + with mock.patch.object(transport_class, '__init__') as patched: + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): + with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=client_cert_source_callback): + if use_client_cert_env == "false": + expected_host = client.DEFAULT_ENDPOINT + expected_client_cert_source = None + else: + expected_host = client.DEFAULT_MTLS_ENDPOINT + expected_client_cert_source = client_cert_source_callback + + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case client_cert_source and ADC client cert are not provided. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): + with mock.patch.object(transport_class, '__init__') as patched: + with mock.patch("google.auth.transport.mtls.has_default_client_cert_source", return_value=False): + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize("client_class", [ + NodeGroupsClient +]) +@mock.patch.object(NodeGroupsClient, "DEFAULT_ENDPOINT", modify_default_endpoint(NodeGroupsClient)) +def test_node_groups_client_get_mtls_endpoint_and_cert_source(client_class): + mock_client_cert_source = mock.Mock() + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "true". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + mock_api_endpoint = "foo" + options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(options) + assert api_endpoint == mock_api_endpoint + assert cert_source == mock_client_cert_source + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "false". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + mock_client_cert_source = mock.Mock() + mock_api_endpoint = "foo" + options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(options) + assert api_endpoint == mock_api_endpoint + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert doesn't exist. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=False): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert exists. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): + with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=mock_client_cert_source): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source == mock_client_cert_source + + +@pytest.mark.parametrize("client_class,transport_class,transport_name", [ + (NodeGroupsClient, transports.NodeGroupsRestTransport, "rest"), +]) +def test_node_groups_client_client_options_scopes(client_class, transport_class, transport_name): + # Check the case scopes are provided. + options = client_options.ClientOptions( + scopes=["1", "2"], + ) + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=["1", "2"], + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + +@pytest.mark.parametrize("client_class,transport_class,transport_name,grpc_helpers", [ + (NodeGroupsClient, transports.NodeGroupsRestTransport, "rest", None), +]) +def test_node_groups_client_client_options_credentials_file(client_class, transport_class, transport_name, grpc_helpers): + # Check the case credentials file is provided. + options = client_options.ClientOptions( + credentials_file="credentials.json" + ) + + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize("request_type", [ + compute.AddNodesNodeGroupRequest, + dict, +]) +def test_add_nodes_rest(request_type): + client = NodeGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'zone': 'sample2', 'node_group': 'sample3'} + request_init["node_groups_add_nodes_request_resource"] = {'additional_node_count': 2214} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.add_nodes(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, extended_operation.ExtendedOperation) + assert response.client_operation_id == 'client_operation_id_value' + assert response.creation_timestamp == 'creation_timestamp_value' + assert response.description == 'description_value' + assert response.end_time == 'end_time_value' + assert response.http_error_message == 'http_error_message_value' + assert response.http_error_status_code == 2374 + assert response.id == 205 + assert response.insert_time == 'insert_time_value' + assert response.kind == 'kind_value' + assert response.name == 'name_value' + assert response.operation_group_id == 'operation_group_id_value' + assert response.operation_type == 'operation_type_value' + assert response.progress == 885 + assert response.region == 'region_value' + assert response.self_link == 'self_link_value' + assert response.start_time == 'start_time_value' + assert response.status == compute.Operation.Status.DONE + assert response.status_message == 'status_message_value' + assert response.target_id == 947 + assert response.target_link == 'target_link_value' + assert response.user == 'user_value' + assert response.zone == 'zone_value' + + +def test_add_nodes_rest_required_fields(request_type=compute.AddNodesNodeGroupRequest): + transport_class = transports.NodeGroupsRestTransport + + request_init = {} + request_init["node_group"] = "" + request_init["project"] = "" + request_init["zone"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).add_nodes._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["nodeGroup"] = 'node_group_value' + jsonified_request["project"] = 'project_value' + jsonified_request["zone"] = 'zone_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).add_nodes._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "nodeGroup" in jsonified_request + assert jsonified_request["nodeGroup"] == 'node_group_value' + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "zone" in jsonified_request + assert jsonified_request["zone"] == 'zone_value' + + client = NodeGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.add_nodes(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_add_nodes_rest_unset_required_fields(): + transport = transports.NodeGroupsRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.add_nodes._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("nodeGroup", "nodeGroupsAddNodesRequestResource", "project", "zone", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_add_nodes_rest_interceptors(null_interceptor): + transport = transports.NodeGroupsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.NodeGroupsRestInterceptor(), + ) + client = NodeGroupsClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.NodeGroupsRestInterceptor, "post_add_nodes") as post, \ + mock.patch.object(transports.NodeGroupsRestInterceptor, "pre_add_nodes") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.AddNodesNodeGroupRequest.pb(compute.AddNodesNodeGroupRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.AddNodesNodeGroupRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.add_nodes(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_add_nodes_rest_bad_request(transport: str = 'rest', request_type=compute.AddNodesNodeGroupRequest): + client = NodeGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'zone': 'sample2', 'node_group': 'sample3'} + request_init["node_groups_add_nodes_request_resource"] = {'additional_node_count': 2214} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.add_nodes(request) + + +def test_add_nodes_rest_flattened(): + client = NodeGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'zone': 'sample2', 'node_group': 'sample3'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + zone='zone_value', + node_group='node_group_value', + node_groups_add_nodes_request_resource=compute.NodeGroupsAddNodesRequest(additional_node_count=2214), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.add_nodes(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/zones/{zone}/nodeGroups/{node_group}/addNodes" % client.transport._host, args[1]) + + +def test_add_nodes_rest_flattened_error(transport: str = 'rest'): + client = NodeGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.add_nodes( + compute.AddNodesNodeGroupRequest(), + project='project_value', + zone='zone_value', + node_group='node_group_value', + node_groups_add_nodes_request_resource=compute.NodeGroupsAddNodesRequest(additional_node_count=2214), + ) + + +def test_add_nodes_rest_error(): + client = NodeGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.AddNodesNodeGroupRequest, + dict, +]) +def test_add_nodes_unary_rest(request_type): + client = NodeGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'zone': 'sample2', 'node_group': 'sample3'} + request_init["node_groups_add_nodes_request_resource"] = {'additional_node_count': 2214} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.add_nodes_unary(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.Operation) + + +def test_add_nodes_unary_rest_required_fields(request_type=compute.AddNodesNodeGroupRequest): + transport_class = transports.NodeGroupsRestTransport + + request_init = {} + request_init["node_group"] = "" + request_init["project"] = "" + request_init["zone"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).add_nodes._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["nodeGroup"] = 'node_group_value' + jsonified_request["project"] = 'project_value' + jsonified_request["zone"] = 'zone_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).add_nodes._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "nodeGroup" in jsonified_request + assert jsonified_request["nodeGroup"] == 'node_group_value' + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "zone" in jsonified_request + assert jsonified_request["zone"] == 'zone_value' + + client = NodeGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.add_nodes_unary(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_add_nodes_unary_rest_unset_required_fields(): + transport = transports.NodeGroupsRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.add_nodes._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("nodeGroup", "nodeGroupsAddNodesRequestResource", "project", "zone", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_add_nodes_unary_rest_interceptors(null_interceptor): + transport = transports.NodeGroupsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.NodeGroupsRestInterceptor(), + ) + client = NodeGroupsClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.NodeGroupsRestInterceptor, "post_add_nodes") as post, \ + mock.patch.object(transports.NodeGroupsRestInterceptor, "pre_add_nodes") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.AddNodesNodeGroupRequest.pb(compute.AddNodesNodeGroupRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.AddNodesNodeGroupRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.add_nodes_unary(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_add_nodes_unary_rest_bad_request(transport: str = 'rest', request_type=compute.AddNodesNodeGroupRequest): + client = NodeGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'zone': 'sample2', 'node_group': 'sample3'} + request_init["node_groups_add_nodes_request_resource"] = {'additional_node_count': 2214} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.add_nodes_unary(request) + + +def test_add_nodes_unary_rest_flattened(): + client = NodeGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'zone': 'sample2', 'node_group': 'sample3'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + zone='zone_value', + node_group='node_group_value', + node_groups_add_nodes_request_resource=compute.NodeGroupsAddNodesRequest(additional_node_count=2214), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.add_nodes_unary(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/zones/{zone}/nodeGroups/{node_group}/addNodes" % client.transport._host, args[1]) + + +def test_add_nodes_unary_rest_flattened_error(transport: str = 'rest'): + client = NodeGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.add_nodes_unary( + compute.AddNodesNodeGroupRequest(), + project='project_value', + zone='zone_value', + node_group='node_group_value', + node_groups_add_nodes_request_resource=compute.NodeGroupsAddNodesRequest(additional_node_count=2214), + ) + + +def test_add_nodes_unary_rest_error(): + client = NodeGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.AggregatedListNodeGroupsRequest, + dict, +]) +def test_aggregated_list_rest(request_type): + client = NodeGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.NodeGroupAggregatedList( + id='id_value', + kind='kind_value', + next_page_token='next_page_token_value', + self_link='self_link_value', + unreachables=['unreachables_value'], + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.NodeGroupAggregatedList.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.aggregated_list(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.AggregatedListPager) + assert response.id == 'id_value' + assert response.kind == 'kind_value' + assert response.next_page_token == 'next_page_token_value' + assert response.self_link == 'self_link_value' + assert response.unreachables == ['unreachables_value'] + + +def test_aggregated_list_rest_required_fields(request_type=compute.AggregatedListNodeGroupsRequest): + transport_class = transports.NodeGroupsRestTransport + + request_init = {} + request_init["project"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).aggregated_list._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).aggregated_list._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("filter", "include_all_scopes", "max_results", "order_by", "page_token", "return_partial_success", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + + client = NodeGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.NodeGroupAggregatedList() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "get", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.NodeGroupAggregatedList.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.aggregated_list(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_aggregated_list_rest_unset_required_fields(): + transport = transports.NodeGroupsRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.aggregated_list._get_unset_required_fields({}) + assert set(unset_fields) == (set(("filter", "includeAllScopes", "maxResults", "orderBy", "pageToken", "returnPartialSuccess", )) & set(("project", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_aggregated_list_rest_interceptors(null_interceptor): + transport = transports.NodeGroupsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.NodeGroupsRestInterceptor(), + ) + client = NodeGroupsClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.NodeGroupsRestInterceptor, "post_aggregated_list") as post, \ + mock.patch.object(transports.NodeGroupsRestInterceptor, "pre_aggregated_list") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.AggregatedListNodeGroupsRequest.pb(compute.AggregatedListNodeGroupsRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.NodeGroupAggregatedList.to_json(compute.NodeGroupAggregatedList()) + + request = compute.AggregatedListNodeGroupsRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.NodeGroupAggregatedList() + + client.aggregated_list(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_aggregated_list_rest_bad_request(transport: str = 'rest', request_type=compute.AggregatedListNodeGroupsRequest): + client = NodeGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.aggregated_list(request) + + +def test_aggregated_list_rest_flattened(): + client = NodeGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.NodeGroupAggregatedList() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.NodeGroupAggregatedList.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.aggregated_list(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/aggregated/nodeGroups" % client.transport._host, args[1]) + + +def test_aggregated_list_rest_flattened_error(transport: str = 'rest'): + client = NodeGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.aggregated_list( + compute.AggregatedListNodeGroupsRequest(), + project='project_value', + ) + + +def test_aggregated_list_rest_pager(transport: str = 'rest'): + client = NodeGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + #with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + compute.NodeGroupAggregatedList( + items={ + 'a':compute.NodeGroupsScopedList(), + 'b':compute.NodeGroupsScopedList(), + 'c':compute.NodeGroupsScopedList(), + }, + next_page_token='abc', + ), + compute.NodeGroupAggregatedList( + items={}, + next_page_token='def', + ), + compute.NodeGroupAggregatedList( + items={ + 'g':compute.NodeGroupsScopedList(), + }, + next_page_token='ghi', + ), + compute.NodeGroupAggregatedList( + items={ + 'h':compute.NodeGroupsScopedList(), + 'i':compute.NodeGroupsScopedList(), + }, + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple(compute.NodeGroupAggregatedList.to_json(x) for x in response) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode('UTF-8') + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {'project': 'sample1'} + + pager = client.aggregated_list(request=sample_request) + + assert isinstance(pager.get('a'), compute.NodeGroupsScopedList) + assert pager.get('h') is None + + results = list(pager) + assert len(results) == 6 + assert all( + isinstance(i, tuple) + for i in results) + for result in results: + assert isinstance(result, tuple) + assert tuple(type(t) for t in result) == (str, compute.NodeGroupsScopedList) + + assert pager.get('a') is None + assert isinstance(pager.get('h'), compute.NodeGroupsScopedList) + + pages = list(client.aggregated_list(request=sample_request).pages) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize("request_type", [ + compute.DeleteNodeGroupRequest, + dict, +]) +def test_delete_rest(request_type): + client = NodeGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'zone': 'sample2', 'node_group': 'sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.delete(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, extended_operation.ExtendedOperation) + assert response.client_operation_id == 'client_operation_id_value' + assert response.creation_timestamp == 'creation_timestamp_value' + assert response.description == 'description_value' + assert response.end_time == 'end_time_value' + assert response.http_error_message == 'http_error_message_value' + assert response.http_error_status_code == 2374 + assert response.id == 205 + assert response.insert_time == 'insert_time_value' + assert response.kind == 'kind_value' + assert response.name == 'name_value' + assert response.operation_group_id == 'operation_group_id_value' + assert response.operation_type == 'operation_type_value' + assert response.progress == 885 + assert response.region == 'region_value' + assert response.self_link == 'self_link_value' + assert response.start_time == 'start_time_value' + assert response.status == compute.Operation.Status.DONE + assert response.status_message == 'status_message_value' + assert response.target_id == 947 + assert response.target_link == 'target_link_value' + assert response.user == 'user_value' + assert response.zone == 'zone_value' + + +def test_delete_rest_required_fields(request_type=compute.DeleteNodeGroupRequest): + transport_class = transports.NodeGroupsRestTransport + + request_init = {} + request_init["node_group"] = "" + request_init["project"] = "" + request_init["zone"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["nodeGroup"] = 'node_group_value' + jsonified_request["project"] = 'project_value' + jsonified_request["zone"] = 'zone_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "nodeGroup" in jsonified_request + assert jsonified_request["nodeGroup"] == 'node_group_value' + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "zone" in jsonified_request + assert jsonified_request["zone"] == 'zone_value' + + client = NodeGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "delete", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.delete(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_delete_rest_unset_required_fields(): + transport = transports.NodeGroupsRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.delete._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("nodeGroup", "project", "zone", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_rest_interceptors(null_interceptor): + transport = transports.NodeGroupsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.NodeGroupsRestInterceptor(), + ) + client = NodeGroupsClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.NodeGroupsRestInterceptor, "post_delete") as post, \ + mock.patch.object(transports.NodeGroupsRestInterceptor, "pre_delete") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.DeleteNodeGroupRequest.pb(compute.DeleteNodeGroupRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.DeleteNodeGroupRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.delete(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_delete_rest_bad_request(transport: str = 'rest', request_type=compute.DeleteNodeGroupRequest): + client = NodeGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'zone': 'sample2', 'node_group': 'sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.delete(request) + + +def test_delete_rest_flattened(): + client = NodeGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'zone': 'sample2', 'node_group': 'sample3'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + zone='zone_value', + node_group='node_group_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.delete(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/zones/{zone}/nodeGroups/{node_group}" % client.transport._host, args[1]) + + +def test_delete_rest_flattened_error(transport: str = 'rest'): + client = NodeGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete( + compute.DeleteNodeGroupRequest(), + project='project_value', + zone='zone_value', + node_group='node_group_value', + ) + + +def test_delete_rest_error(): + client = NodeGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.DeleteNodeGroupRequest, + dict, +]) +def test_delete_unary_rest(request_type): + client = NodeGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'zone': 'sample2', 'node_group': 'sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.delete_unary(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.Operation) + + +def test_delete_unary_rest_required_fields(request_type=compute.DeleteNodeGroupRequest): + transport_class = transports.NodeGroupsRestTransport + + request_init = {} + request_init["node_group"] = "" + request_init["project"] = "" + request_init["zone"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["nodeGroup"] = 'node_group_value' + jsonified_request["project"] = 'project_value' + jsonified_request["zone"] = 'zone_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "nodeGroup" in jsonified_request + assert jsonified_request["nodeGroup"] == 'node_group_value' + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "zone" in jsonified_request + assert jsonified_request["zone"] == 'zone_value' + + client = NodeGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "delete", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.delete_unary(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_delete_unary_rest_unset_required_fields(): + transport = transports.NodeGroupsRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.delete._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("nodeGroup", "project", "zone", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_unary_rest_interceptors(null_interceptor): + transport = transports.NodeGroupsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.NodeGroupsRestInterceptor(), + ) + client = NodeGroupsClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.NodeGroupsRestInterceptor, "post_delete") as post, \ + mock.patch.object(transports.NodeGroupsRestInterceptor, "pre_delete") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.DeleteNodeGroupRequest.pb(compute.DeleteNodeGroupRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.DeleteNodeGroupRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.delete_unary(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_delete_unary_rest_bad_request(transport: str = 'rest', request_type=compute.DeleteNodeGroupRequest): + client = NodeGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'zone': 'sample2', 'node_group': 'sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.delete_unary(request) + + +def test_delete_unary_rest_flattened(): + client = NodeGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'zone': 'sample2', 'node_group': 'sample3'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + zone='zone_value', + node_group='node_group_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.delete_unary(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/zones/{zone}/nodeGroups/{node_group}" % client.transport._host, args[1]) + + +def test_delete_unary_rest_flattened_error(transport: str = 'rest'): + client = NodeGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_unary( + compute.DeleteNodeGroupRequest(), + project='project_value', + zone='zone_value', + node_group='node_group_value', + ) + + +def test_delete_unary_rest_error(): + client = NodeGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.DeleteNodesNodeGroupRequest, + dict, +]) +def test_delete_nodes_rest(request_type): + client = NodeGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'zone': 'sample2', 'node_group': 'sample3'} + request_init["node_groups_delete_nodes_request_resource"] = {'nodes': ['nodes_value1', 'nodes_value2']} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.delete_nodes(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, extended_operation.ExtendedOperation) + assert response.client_operation_id == 'client_operation_id_value' + assert response.creation_timestamp == 'creation_timestamp_value' + assert response.description == 'description_value' + assert response.end_time == 'end_time_value' + assert response.http_error_message == 'http_error_message_value' + assert response.http_error_status_code == 2374 + assert response.id == 205 + assert response.insert_time == 'insert_time_value' + assert response.kind == 'kind_value' + assert response.name == 'name_value' + assert response.operation_group_id == 'operation_group_id_value' + assert response.operation_type == 'operation_type_value' + assert response.progress == 885 + assert response.region == 'region_value' + assert response.self_link == 'self_link_value' + assert response.start_time == 'start_time_value' + assert response.status == compute.Operation.Status.DONE + assert response.status_message == 'status_message_value' + assert response.target_id == 947 + assert response.target_link == 'target_link_value' + assert response.user == 'user_value' + assert response.zone == 'zone_value' + + +def test_delete_nodes_rest_required_fields(request_type=compute.DeleteNodesNodeGroupRequest): + transport_class = transports.NodeGroupsRestTransport + + request_init = {} + request_init["node_group"] = "" + request_init["project"] = "" + request_init["zone"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete_nodes._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["nodeGroup"] = 'node_group_value' + jsonified_request["project"] = 'project_value' + jsonified_request["zone"] = 'zone_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete_nodes._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "nodeGroup" in jsonified_request + assert jsonified_request["nodeGroup"] == 'node_group_value' + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "zone" in jsonified_request + assert jsonified_request["zone"] == 'zone_value' + + client = NodeGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.delete_nodes(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_delete_nodes_rest_unset_required_fields(): + transport = transports.NodeGroupsRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.delete_nodes._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("nodeGroup", "nodeGroupsDeleteNodesRequestResource", "project", "zone", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_nodes_rest_interceptors(null_interceptor): + transport = transports.NodeGroupsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.NodeGroupsRestInterceptor(), + ) + client = NodeGroupsClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.NodeGroupsRestInterceptor, "post_delete_nodes") as post, \ + mock.patch.object(transports.NodeGroupsRestInterceptor, "pre_delete_nodes") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.DeleteNodesNodeGroupRequest.pb(compute.DeleteNodesNodeGroupRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.DeleteNodesNodeGroupRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.delete_nodes(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_delete_nodes_rest_bad_request(transport: str = 'rest', request_type=compute.DeleteNodesNodeGroupRequest): + client = NodeGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'zone': 'sample2', 'node_group': 'sample3'} + request_init["node_groups_delete_nodes_request_resource"] = {'nodes': ['nodes_value1', 'nodes_value2']} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.delete_nodes(request) + + +def test_delete_nodes_rest_flattened(): + client = NodeGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'zone': 'sample2', 'node_group': 'sample3'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + zone='zone_value', + node_group='node_group_value', + node_groups_delete_nodes_request_resource=compute.NodeGroupsDeleteNodesRequest(nodes=['nodes_value']), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.delete_nodes(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/zones/{zone}/nodeGroups/{node_group}/deleteNodes" % client.transport._host, args[1]) + + +def test_delete_nodes_rest_flattened_error(transport: str = 'rest'): + client = NodeGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_nodes( + compute.DeleteNodesNodeGroupRequest(), + project='project_value', + zone='zone_value', + node_group='node_group_value', + node_groups_delete_nodes_request_resource=compute.NodeGroupsDeleteNodesRequest(nodes=['nodes_value']), + ) + + +def test_delete_nodes_rest_error(): + client = NodeGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.DeleteNodesNodeGroupRequest, + dict, +]) +def test_delete_nodes_unary_rest(request_type): + client = NodeGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'zone': 'sample2', 'node_group': 'sample3'} + request_init["node_groups_delete_nodes_request_resource"] = {'nodes': ['nodes_value1', 'nodes_value2']} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.delete_nodes_unary(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.Operation) + + +def test_delete_nodes_unary_rest_required_fields(request_type=compute.DeleteNodesNodeGroupRequest): + transport_class = transports.NodeGroupsRestTransport + + request_init = {} + request_init["node_group"] = "" + request_init["project"] = "" + request_init["zone"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete_nodes._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["nodeGroup"] = 'node_group_value' + jsonified_request["project"] = 'project_value' + jsonified_request["zone"] = 'zone_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete_nodes._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "nodeGroup" in jsonified_request + assert jsonified_request["nodeGroup"] == 'node_group_value' + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "zone" in jsonified_request + assert jsonified_request["zone"] == 'zone_value' + + client = NodeGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.delete_nodes_unary(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_delete_nodes_unary_rest_unset_required_fields(): + transport = transports.NodeGroupsRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.delete_nodes._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("nodeGroup", "nodeGroupsDeleteNodesRequestResource", "project", "zone", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_nodes_unary_rest_interceptors(null_interceptor): + transport = transports.NodeGroupsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.NodeGroupsRestInterceptor(), + ) + client = NodeGroupsClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.NodeGroupsRestInterceptor, "post_delete_nodes") as post, \ + mock.patch.object(transports.NodeGroupsRestInterceptor, "pre_delete_nodes") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.DeleteNodesNodeGroupRequest.pb(compute.DeleteNodesNodeGroupRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.DeleteNodesNodeGroupRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.delete_nodes_unary(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_delete_nodes_unary_rest_bad_request(transport: str = 'rest', request_type=compute.DeleteNodesNodeGroupRequest): + client = NodeGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'zone': 'sample2', 'node_group': 'sample3'} + request_init["node_groups_delete_nodes_request_resource"] = {'nodes': ['nodes_value1', 'nodes_value2']} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.delete_nodes_unary(request) + + +def test_delete_nodes_unary_rest_flattened(): + client = NodeGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'zone': 'sample2', 'node_group': 'sample3'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + zone='zone_value', + node_group='node_group_value', + node_groups_delete_nodes_request_resource=compute.NodeGroupsDeleteNodesRequest(nodes=['nodes_value']), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.delete_nodes_unary(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/zones/{zone}/nodeGroups/{node_group}/deleteNodes" % client.transport._host, args[1]) + + +def test_delete_nodes_unary_rest_flattened_error(transport: str = 'rest'): + client = NodeGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_nodes_unary( + compute.DeleteNodesNodeGroupRequest(), + project='project_value', + zone='zone_value', + node_group='node_group_value', + node_groups_delete_nodes_request_resource=compute.NodeGroupsDeleteNodesRequest(nodes=['nodes_value']), + ) + + +def test_delete_nodes_unary_rest_error(): + client = NodeGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.GetNodeGroupRequest, + dict, +]) +def test_get_rest(request_type): + client = NodeGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'zone': 'sample2', 'node_group': 'sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.NodeGroup( + creation_timestamp='creation_timestamp_value', + description='description_value', + fingerprint='fingerprint_value', + id=205, + kind='kind_value', + location_hint='location_hint_value', + maintenance_policy='maintenance_policy_value', + name='name_value', + node_template='node_template_value', + self_link='self_link_value', + size=443, + status='status_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.NodeGroup.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.get(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.NodeGroup) + assert response.creation_timestamp == 'creation_timestamp_value' + assert response.description == 'description_value' + assert response.fingerprint == 'fingerprint_value' + assert response.id == 205 + assert response.kind == 'kind_value' + assert response.location_hint == 'location_hint_value' + assert response.maintenance_policy == 'maintenance_policy_value' + assert response.name == 'name_value' + assert response.node_template == 'node_template_value' + assert response.self_link == 'self_link_value' + assert response.size == 443 + assert response.status == 'status_value' + assert response.zone == 'zone_value' + + +def test_get_rest_required_fields(request_type=compute.GetNodeGroupRequest): + transport_class = transports.NodeGroupsRestTransport + + request_init = {} + request_init["node_group"] = "" + request_init["project"] = "" + request_init["zone"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["nodeGroup"] = 'node_group_value' + jsonified_request["project"] = 'project_value' + jsonified_request["zone"] = 'zone_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "nodeGroup" in jsonified_request + assert jsonified_request["nodeGroup"] == 'node_group_value' + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "zone" in jsonified_request + assert jsonified_request["zone"] == 'zone_value' + + client = NodeGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.NodeGroup() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "get", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.NodeGroup.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.get(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_get_rest_unset_required_fields(): + transport = transports.NodeGroupsRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.get._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("nodeGroup", "project", "zone", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_rest_interceptors(null_interceptor): + transport = transports.NodeGroupsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.NodeGroupsRestInterceptor(), + ) + client = NodeGroupsClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.NodeGroupsRestInterceptor, "post_get") as post, \ + mock.patch.object(transports.NodeGroupsRestInterceptor, "pre_get") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.GetNodeGroupRequest.pb(compute.GetNodeGroupRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.NodeGroup.to_json(compute.NodeGroup()) + + request = compute.GetNodeGroupRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.NodeGroup() + + client.get(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_get_rest_bad_request(transport: str = 'rest', request_type=compute.GetNodeGroupRequest): + client = NodeGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'zone': 'sample2', 'node_group': 'sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get(request) + + +def test_get_rest_flattened(): + client = NodeGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.NodeGroup() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'zone': 'sample2', 'node_group': 'sample3'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + zone='zone_value', + node_group='node_group_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.NodeGroup.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.get(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/zones/{zone}/nodeGroups/{node_group}" % client.transport._host, args[1]) + + +def test_get_rest_flattened_error(transport: str = 'rest'): + client = NodeGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get( + compute.GetNodeGroupRequest(), + project='project_value', + zone='zone_value', + node_group='node_group_value', + ) + + +def test_get_rest_error(): + client = NodeGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.GetIamPolicyNodeGroupRequest, + dict, +]) +def test_get_iam_policy_rest(request_type): + client = NodeGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'zone': 'sample2', 'resource': 'sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Policy( + etag='etag_value', + iam_owned=True, + version=774, + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Policy.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.get_iam_policy(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.Policy) + assert response.etag == 'etag_value' + assert response.iam_owned is True + assert response.version == 774 + + +def test_get_iam_policy_rest_required_fields(request_type=compute.GetIamPolicyNodeGroupRequest): + transport_class = transports.NodeGroupsRestTransport + + request_init = {} + request_init["project"] = "" + request_init["resource"] = "" + request_init["zone"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_iam_policy._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + jsonified_request["resource"] = 'resource_value' + jsonified_request["zone"] = 'zone_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_iam_policy._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("options_requested_policy_version", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "resource" in jsonified_request + assert jsonified_request["resource"] == 'resource_value' + assert "zone" in jsonified_request + assert jsonified_request["zone"] == 'zone_value' + + client = NodeGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Policy() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "get", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Policy.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.get_iam_policy(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_get_iam_policy_rest_unset_required_fields(): + transport = transports.NodeGroupsRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.get_iam_policy._get_unset_required_fields({}) + assert set(unset_fields) == (set(("optionsRequestedPolicyVersion", )) & set(("project", "resource", "zone", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_iam_policy_rest_interceptors(null_interceptor): + transport = transports.NodeGroupsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.NodeGroupsRestInterceptor(), + ) + client = NodeGroupsClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.NodeGroupsRestInterceptor, "post_get_iam_policy") as post, \ + mock.patch.object(transports.NodeGroupsRestInterceptor, "pre_get_iam_policy") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.GetIamPolicyNodeGroupRequest.pb(compute.GetIamPolicyNodeGroupRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Policy.to_json(compute.Policy()) + + request = compute.GetIamPolicyNodeGroupRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Policy() + + client.get_iam_policy(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_get_iam_policy_rest_bad_request(transport: str = 'rest', request_type=compute.GetIamPolicyNodeGroupRequest): + client = NodeGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'zone': 'sample2', 'resource': 'sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_iam_policy(request) + + +def test_get_iam_policy_rest_flattened(): + client = NodeGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Policy() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'zone': 'sample2', 'resource': 'sample3'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + zone='zone_value', + resource='resource_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Policy.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.get_iam_policy(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/zones/{zone}/nodeGroups/{resource}/getIamPolicy" % client.transport._host, args[1]) + + +def test_get_iam_policy_rest_flattened_error(transport: str = 'rest'): + client = NodeGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_iam_policy( + compute.GetIamPolicyNodeGroupRequest(), + project='project_value', + zone='zone_value', + resource='resource_value', + ) + + +def test_get_iam_policy_rest_error(): + client = NodeGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.InsertNodeGroupRequest, + dict, +]) +def test_insert_rest(request_type): + client = NodeGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'zone': 'sample2'} + request_init["node_group_resource"] = {'autoscaling_policy': {'max_nodes': 958, 'min_nodes': 956, 'mode': 'mode_value'}, 'creation_timestamp': 'creation_timestamp_value', 'description': 'description_value', 'fingerprint': 'fingerprint_value', 'id': 205, 'kind': 'kind_value', 'location_hint': 'location_hint_value', 'maintenance_policy': 'maintenance_policy_value', 'maintenance_window': {'maintenance_duration': {'nanos': 543, 'seconds': 751}, 'start_time': 'start_time_value'}, 'name': 'name_value', 'node_template': 'node_template_value', 'self_link': 'self_link_value', 'share_settings': {'project_map': {}, 'share_type': 'share_type_value'}, 'size': 443, 'status': 'status_value', 'zone': 'zone_value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.insert(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, extended_operation.ExtendedOperation) + assert response.client_operation_id == 'client_operation_id_value' + assert response.creation_timestamp == 'creation_timestamp_value' + assert response.description == 'description_value' + assert response.end_time == 'end_time_value' + assert response.http_error_message == 'http_error_message_value' + assert response.http_error_status_code == 2374 + assert response.id == 205 + assert response.insert_time == 'insert_time_value' + assert response.kind == 'kind_value' + assert response.name == 'name_value' + assert response.operation_group_id == 'operation_group_id_value' + assert response.operation_type == 'operation_type_value' + assert response.progress == 885 + assert response.region == 'region_value' + assert response.self_link == 'self_link_value' + assert response.start_time == 'start_time_value' + assert response.status == compute.Operation.Status.DONE + assert response.status_message == 'status_message_value' + assert response.target_id == 947 + assert response.target_link == 'target_link_value' + assert response.user == 'user_value' + assert response.zone == 'zone_value' + + +def test_insert_rest_required_fields(request_type=compute.InsertNodeGroupRequest): + transport_class = transports.NodeGroupsRestTransport + + request_init = {} + request_init["initial_node_count"] = 0 + request_init["project"] = "" + request_init["zone"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + assert "initialNodeCount" not in jsonified_request + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).insert._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + assert "initialNodeCount" in jsonified_request + assert jsonified_request["initialNodeCount"] == request_init["initial_node_count"] + + jsonified_request["initialNodeCount"] = 1911 + jsonified_request["project"] = 'project_value' + jsonified_request["zone"] = 'zone_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).insert._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("initial_node_count", "request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "initialNodeCount" in jsonified_request + assert jsonified_request["initialNodeCount"] == 1911 + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "zone" in jsonified_request + assert jsonified_request["zone"] == 'zone_value' + + client = NodeGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.insert(request) + + expected_params = [ + ( + "initialNodeCount", + str(0), + ), + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_insert_rest_unset_required_fields(): + transport = transports.NodeGroupsRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.insert._get_unset_required_fields({}) + assert set(unset_fields) == (set(("initialNodeCount", "requestId", )) & set(("initialNodeCount", "nodeGroupResource", "project", "zone", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_insert_rest_interceptors(null_interceptor): + transport = transports.NodeGroupsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.NodeGroupsRestInterceptor(), + ) + client = NodeGroupsClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.NodeGroupsRestInterceptor, "post_insert") as post, \ + mock.patch.object(transports.NodeGroupsRestInterceptor, "pre_insert") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.InsertNodeGroupRequest.pb(compute.InsertNodeGroupRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.InsertNodeGroupRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.insert(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_insert_rest_bad_request(transport: str = 'rest', request_type=compute.InsertNodeGroupRequest): + client = NodeGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'zone': 'sample2'} + request_init["node_group_resource"] = {'autoscaling_policy': {'max_nodes': 958, 'min_nodes': 956, 'mode': 'mode_value'}, 'creation_timestamp': 'creation_timestamp_value', 'description': 'description_value', 'fingerprint': 'fingerprint_value', 'id': 205, 'kind': 'kind_value', 'location_hint': 'location_hint_value', 'maintenance_policy': 'maintenance_policy_value', 'maintenance_window': {'maintenance_duration': {'nanos': 543, 'seconds': 751}, 'start_time': 'start_time_value'}, 'name': 'name_value', 'node_template': 'node_template_value', 'self_link': 'self_link_value', 'share_settings': {'project_map': {}, 'share_type': 'share_type_value'}, 'size': 443, 'status': 'status_value', 'zone': 'zone_value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.insert(request) + + +def test_insert_rest_flattened(): + client = NodeGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'zone': 'sample2'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + zone='zone_value', + initial_node_count=1911, + node_group_resource=compute.NodeGroup(autoscaling_policy=compute.NodeGroupAutoscalingPolicy(max_nodes=958)), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.insert(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/zones/{zone}/nodeGroups" % client.transport._host, args[1]) + + +def test_insert_rest_flattened_error(transport: str = 'rest'): + client = NodeGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.insert( + compute.InsertNodeGroupRequest(), + project='project_value', + zone='zone_value', + initial_node_count=1911, + node_group_resource=compute.NodeGroup(autoscaling_policy=compute.NodeGroupAutoscalingPolicy(max_nodes=958)), + ) + + +def test_insert_rest_error(): + client = NodeGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.InsertNodeGroupRequest, + dict, +]) +def test_insert_unary_rest(request_type): + client = NodeGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'zone': 'sample2'} + request_init["node_group_resource"] = {'autoscaling_policy': {'max_nodes': 958, 'min_nodes': 956, 'mode': 'mode_value'}, 'creation_timestamp': 'creation_timestamp_value', 'description': 'description_value', 'fingerprint': 'fingerprint_value', 'id': 205, 'kind': 'kind_value', 'location_hint': 'location_hint_value', 'maintenance_policy': 'maintenance_policy_value', 'maintenance_window': {'maintenance_duration': {'nanos': 543, 'seconds': 751}, 'start_time': 'start_time_value'}, 'name': 'name_value', 'node_template': 'node_template_value', 'self_link': 'self_link_value', 'share_settings': {'project_map': {}, 'share_type': 'share_type_value'}, 'size': 443, 'status': 'status_value', 'zone': 'zone_value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.insert_unary(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.Operation) + + +def test_insert_unary_rest_required_fields(request_type=compute.InsertNodeGroupRequest): + transport_class = transports.NodeGroupsRestTransport + + request_init = {} + request_init["initial_node_count"] = 0 + request_init["project"] = "" + request_init["zone"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + assert "initialNodeCount" not in jsonified_request + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).insert._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + assert "initialNodeCount" in jsonified_request + assert jsonified_request["initialNodeCount"] == request_init["initial_node_count"] + + jsonified_request["initialNodeCount"] = 1911 + jsonified_request["project"] = 'project_value' + jsonified_request["zone"] = 'zone_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).insert._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("initial_node_count", "request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "initialNodeCount" in jsonified_request + assert jsonified_request["initialNodeCount"] == 1911 + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "zone" in jsonified_request + assert jsonified_request["zone"] == 'zone_value' + + client = NodeGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.insert_unary(request) + + expected_params = [ + ( + "initialNodeCount", + str(0), + ), + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_insert_unary_rest_unset_required_fields(): + transport = transports.NodeGroupsRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.insert._get_unset_required_fields({}) + assert set(unset_fields) == (set(("initialNodeCount", "requestId", )) & set(("initialNodeCount", "nodeGroupResource", "project", "zone", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_insert_unary_rest_interceptors(null_interceptor): + transport = transports.NodeGroupsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.NodeGroupsRestInterceptor(), + ) + client = NodeGroupsClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.NodeGroupsRestInterceptor, "post_insert") as post, \ + mock.patch.object(transports.NodeGroupsRestInterceptor, "pre_insert") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.InsertNodeGroupRequest.pb(compute.InsertNodeGroupRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.InsertNodeGroupRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.insert_unary(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_insert_unary_rest_bad_request(transport: str = 'rest', request_type=compute.InsertNodeGroupRequest): + client = NodeGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'zone': 'sample2'} + request_init["node_group_resource"] = {'autoscaling_policy': {'max_nodes': 958, 'min_nodes': 956, 'mode': 'mode_value'}, 'creation_timestamp': 'creation_timestamp_value', 'description': 'description_value', 'fingerprint': 'fingerprint_value', 'id': 205, 'kind': 'kind_value', 'location_hint': 'location_hint_value', 'maintenance_policy': 'maintenance_policy_value', 'maintenance_window': {'maintenance_duration': {'nanos': 543, 'seconds': 751}, 'start_time': 'start_time_value'}, 'name': 'name_value', 'node_template': 'node_template_value', 'self_link': 'self_link_value', 'share_settings': {'project_map': {}, 'share_type': 'share_type_value'}, 'size': 443, 'status': 'status_value', 'zone': 'zone_value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.insert_unary(request) + + +def test_insert_unary_rest_flattened(): + client = NodeGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'zone': 'sample2'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + zone='zone_value', + initial_node_count=1911, + node_group_resource=compute.NodeGroup(autoscaling_policy=compute.NodeGroupAutoscalingPolicy(max_nodes=958)), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.insert_unary(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/zones/{zone}/nodeGroups" % client.transport._host, args[1]) + + +def test_insert_unary_rest_flattened_error(transport: str = 'rest'): + client = NodeGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.insert_unary( + compute.InsertNodeGroupRequest(), + project='project_value', + zone='zone_value', + initial_node_count=1911, + node_group_resource=compute.NodeGroup(autoscaling_policy=compute.NodeGroupAutoscalingPolicy(max_nodes=958)), + ) + + +def test_insert_unary_rest_error(): + client = NodeGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.ListNodeGroupsRequest, + dict, +]) +def test_list_rest(request_type): + client = NodeGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'zone': 'sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.NodeGroupList( + id='id_value', + kind='kind_value', + next_page_token='next_page_token_value', + self_link='self_link_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.NodeGroupList.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.list(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListPager) + assert response.id == 'id_value' + assert response.kind == 'kind_value' + assert response.next_page_token == 'next_page_token_value' + assert response.self_link == 'self_link_value' + + +def test_list_rest_required_fields(request_type=compute.ListNodeGroupsRequest): + transport_class = transports.NodeGroupsRestTransport + + request_init = {} + request_init["project"] = "" + request_init["zone"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + jsonified_request["zone"] = 'zone_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("filter", "max_results", "order_by", "page_token", "return_partial_success", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "zone" in jsonified_request + assert jsonified_request["zone"] == 'zone_value' + + client = NodeGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.NodeGroupList() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "get", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.NodeGroupList.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.list(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_list_rest_unset_required_fields(): + transport = transports.NodeGroupsRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.list._get_unset_required_fields({}) + assert set(unset_fields) == (set(("filter", "maxResults", "orderBy", "pageToken", "returnPartialSuccess", )) & set(("project", "zone", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_rest_interceptors(null_interceptor): + transport = transports.NodeGroupsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.NodeGroupsRestInterceptor(), + ) + client = NodeGroupsClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.NodeGroupsRestInterceptor, "post_list") as post, \ + mock.patch.object(transports.NodeGroupsRestInterceptor, "pre_list") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.ListNodeGroupsRequest.pb(compute.ListNodeGroupsRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.NodeGroupList.to_json(compute.NodeGroupList()) + + request = compute.ListNodeGroupsRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.NodeGroupList() + + client.list(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_list_rest_bad_request(transport: str = 'rest', request_type=compute.ListNodeGroupsRequest): + client = NodeGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'zone': 'sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list(request) + + +def test_list_rest_flattened(): + client = NodeGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.NodeGroupList() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'zone': 'sample2'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + zone='zone_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.NodeGroupList.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.list(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/zones/{zone}/nodeGroups" % client.transport._host, args[1]) + + +def test_list_rest_flattened_error(transport: str = 'rest'): + client = NodeGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list( + compute.ListNodeGroupsRequest(), + project='project_value', + zone='zone_value', + ) + + +def test_list_rest_pager(transport: str = 'rest'): + client = NodeGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + #with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + compute.NodeGroupList( + items=[ + compute.NodeGroup(), + compute.NodeGroup(), + compute.NodeGroup(), + ], + next_page_token='abc', + ), + compute.NodeGroupList( + items=[], + next_page_token='def', + ), + compute.NodeGroupList( + items=[ + compute.NodeGroup(), + ], + next_page_token='ghi', + ), + compute.NodeGroupList( + items=[ + compute.NodeGroup(), + compute.NodeGroup(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple(compute.NodeGroupList.to_json(x) for x in response) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode('UTF-8') + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {'project': 'sample1', 'zone': 'sample2'} + + pager = client.list(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, compute.NodeGroup) + for i in results) + + pages = list(client.list(request=sample_request).pages) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize("request_type", [ + compute.ListNodesNodeGroupsRequest, + dict, +]) +def test_list_nodes_rest(request_type): + client = NodeGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'zone': 'sample2', 'node_group': 'sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.NodeGroupsListNodes( + id='id_value', + kind='kind_value', + next_page_token='next_page_token_value', + self_link='self_link_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.NodeGroupsListNodes.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.list_nodes(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListNodesPager) + assert response.id == 'id_value' + assert response.kind == 'kind_value' + assert response.next_page_token == 'next_page_token_value' + assert response.self_link == 'self_link_value' + + +def test_list_nodes_rest_required_fields(request_type=compute.ListNodesNodeGroupsRequest): + transport_class = transports.NodeGroupsRestTransport + + request_init = {} + request_init["node_group"] = "" + request_init["project"] = "" + request_init["zone"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_nodes._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["nodeGroup"] = 'node_group_value' + jsonified_request["project"] = 'project_value' + jsonified_request["zone"] = 'zone_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_nodes._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("filter", "max_results", "order_by", "page_token", "return_partial_success", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "nodeGroup" in jsonified_request + assert jsonified_request["nodeGroup"] == 'node_group_value' + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "zone" in jsonified_request + assert jsonified_request["zone"] == 'zone_value' + + client = NodeGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.NodeGroupsListNodes() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.NodeGroupsListNodes.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.list_nodes(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_list_nodes_rest_unset_required_fields(): + transport = transports.NodeGroupsRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.list_nodes._get_unset_required_fields({}) + assert set(unset_fields) == (set(("filter", "maxResults", "orderBy", "pageToken", "returnPartialSuccess", )) & set(("nodeGroup", "project", "zone", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_nodes_rest_interceptors(null_interceptor): + transport = transports.NodeGroupsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.NodeGroupsRestInterceptor(), + ) + client = NodeGroupsClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.NodeGroupsRestInterceptor, "post_list_nodes") as post, \ + mock.patch.object(transports.NodeGroupsRestInterceptor, "pre_list_nodes") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.ListNodesNodeGroupsRequest.pb(compute.ListNodesNodeGroupsRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.NodeGroupsListNodes.to_json(compute.NodeGroupsListNodes()) + + request = compute.ListNodesNodeGroupsRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.NodeGroupsListNodes() + + client.list_nodes(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_list_nodes_rest_bad_request(transport: str = 'rest', request_type=compute.ListNodesNodeGroupsRequest): + client = NodeGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'zone': 'sample2', 'node_group': 'sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_nodes(request) + + +def test_list_nodes_rest_flattened(): + client = NodeGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.NodeGroupsListNodes() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'zone': 'sample2', 'node_group': 'sample3'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + zone='zone_value', + node_group='node_group_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.NodeGroupsListNodes.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.list_nodes(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/zones/{zone}/nodeGroups/{node_group}/listNodes" % client.transport._host, args[1]) + + +def test_list_nodes_rest_flattened_error(transport: str = 'rest'): + client = NodeGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_nodes( + compute.ListNodesNodeGroupsRequest(), + project='project_value', + zone='zone_value', + node_group='node_group_value', + ) + + +def test_list_nodes_rest_pager(transport: str = 'rest'): + client = NodeGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + #with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + compute.NodeGroupsListNodes( + items=[ + compute.NodeGroupNode(), + compute.NodeGroupNode(), + compute.NodeGroupNode(), + ], + next_page_token='abc', + ), + compute.NodeGroupsListNodes( + items=[], + next_page_token='def', + ), + compute.NodeGroupsListNodes( + items=[ + compute.NodeGroupNode(), + ], + next_page_token='ghi', + ), + compute.NodeGroupsListNodes( + items=[ + compute.NodeGroupNode(), + compute.NodeGroupNode(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple(compute.NodeGroupsListNodes.to_json(x) for x in response) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode('UTF-8') + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {'project': 'sample1', 'zone': 'sample2', 'node_group': 'sample3'} + + pager = client.list_nodes(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, compute.NodeGroupNode) + for i in results) + + pages = list(client.list_nodes(request=sample_request).pages) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize("request_type", [ + compute.PatchNodeGroupRequest, + dict, +]) +def test_patch_rest(request_type): + client = NodeGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'zone': 'sample2', 'node_group': 'sample3'} + request_init["node_group_resource"] = {'autoscaling_policy': {'max_nodes': 958, 'min_nodes': 956, 'mode': 'mode_value'}, 'creation_timestamp': 'creation_timestamp_value', 'description': 'description_value', 'fingerprint': 'fingerprint_value', 'id': 205, 'kind': 'kind_value', 'location_hint': 'location_hint_value', 'maintenance_policy': 'maintenance_policy_value', 'maintenance_window': {'maintenance_duration': {'nanos': 543, 'seconds': 751}, 'start_time': 'start_time_value'}, 'name': 'name_value', 'node_template': 'node_template_value', 'self_link': 'self_link_value', 'share_settings': {'project_map': {}, 'share_type': 'share_type_value'}, 'size': 443, 'status': 'status_value', 'zone': 'zone_value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.patch(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, extended_operation.ExtendedOperation) + assert response.client_operation_id == 'client_operation_id_value' + assert response.creation_timestamp == 'creation_timestamp_value' + assert response.description == 'description_value' + assert response.end_time == 'end_time_value' + assert response.http_error_message == 'http_error_message_value' + assert response.http_error_status_code == 2374 + assert response.id == 205 + assert response.insert_time == 'insert_time_value' + assert response.kind == 'kind_value' + assert response.name == 'name_value' + assert response.operation_group_id == 'operation_group_id_value' + assert response.operation_type == 'operation_type_value' + assert response.progress == 885 + assert response.region == 'region_value' + assert response.self_link == 'self_link_value' + assert response.start_time == 'start_time_value' + assert response.status == compute.Operation.Status.DONE + assert response.status_message == 'status_message_value' + assert response.target_id == 947 + assert response.target_link == 'target_link_value' + assert response.user == 'user_value' + assert response.zone == 'zone_value' + + +def test_patch_rest_required_fields(request_type=compute.PatchNodeGroupRequest): + transport_class = transports.NodeGroupsRestTransport + + request_init = {} + request_init["node_group"] = "" + request_init["project"] = "" + request_init["zone"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).patch._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["nodeGroup"] = 'node_group_value' + jsonified_request["project"] = 'project_value' + jsonified_request["zone"] = 'zone_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).patch._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "nodeGroup" in jsonified_request + assert jsonified_request["nodeGroup"] == 'node_group_value' + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "zone" in jsonified_request + assert jsonified_request["zone"] == 'zone_value' + + client = NodeGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "patch", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.patch(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_patch_rest_unset_required_fields(): + transport = transports.NodeGroupsRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.patch._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("nodeGroup", "nodeGroupResource", "project", "zone", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_patch_rest_interceptors(null_interceptor): + transport = transports.NodeGroupsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.NodeGroupsRestInterceptor(), + ) + client = NodeGroupsClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.NodeGroupsRestInterceptor, "post_patch") as post, \ + mock.patch.object(transports.NodeGroupsRestInterceptor, "pre_patch") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.PatchNodeGroupRequest.pb(compute.PatchNodeGroupRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.PatchNodeGroupRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.patch(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_patch_rest_bad_request(transport: str = 'rest', request_type=compute.PatchNodeGroupRequest): + client = NodeGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'zone': 'sample2', 'node_group': 'sample3'} + request_init["node_group_resource"] = {'autoscaling_policy': {'max_nodes': 958, 'min_nodes': 956, 'mode': 'mode_value'}, 'creation_timestamp': 'creation_timestamp_value', 'description': 'description_value', 'fingerprint': 'fingerprint_value', 'id': 205, 'kind': 'kind_value', 'location_hint': 'location_hint_value', 'maintenance_policy': 'maintenance_policy_value', 'maintenance_window': {'maintenance_duration': {'nanos': 543, 'seconds': 751}, 'start_time': 'start_time_value'}, 'name': 'name_value', 'node_template': 'node_template_value', 'self_link': 'self_link_value', 'share_settings': {'project_map': {}, 'share_type': 'share_type_value'}, 'size': 443, 'status': 'status_value', 'zone': 'zone_value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.patch(request) + + +def test_patch_rest_flattened(): + client = NodeGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'zone': 'sample2', 'node_group': 'sample3'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + zone='zone_value', + node_group='node_group_value', + node_group_resource=compute.NodeGroup(autoscaling_policy=compute.NodeGroupAutoscalingPolicy(max_nodes=958)), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.patch(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/zones/{zone}/nodeGroups/{node_group}" % client.transport._host, args[1]) + + +def test_patch_rest_flattened_error(transport: str = 'rest'): + client = NodeGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.patch( + compute.PatchNodeGroupRequest(), + project='project_value', + zone='zone_value', + node_group='node_group_value', + node_group_resource=compute.NodeGroup(autoscaling_policy=compute.NodeGroupAutoscalingPolicy(max_nodes=958)), + ) + + +def test_patch_rest_error(): + client = NodeGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.PatchNodeGroupRequest, + dict, +]) +def test_patch_unary_rest(request_type): + client = NodeGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'zone': 'sample2', 'node_group': 'sample3'} + request_init["node_group_resource"] = {'autoscaling_policy': {'max_nodes': 958, 'min_nodes': 956, 'mode': 'mode_value'}, 'creation_timestamp': 'creation_timestamp_value', 'description': 'description_value', 'fingerprint': 'fingerprint_value', 'id': 205, 'kind': 'kind_value', 'location_hint': 'location_hint_value', 'maintenance_policy': 'maintenance_policy_value', 'maintenance_window': {'maintenance_duration': {'nanos': 543, 'seconds': 751}, 'start_time': 'start_time_value'}, 'name': 'name_value', 'node_template': 'node_template_value', 'self_link': 'self_link_value', 'share_settings': {'project_map': {}, 'share_type': 'share_type_value'}, 'size': 443, 'status': 'status_value', 'zone': 'zone_value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.patch_unary(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.Operation) + + +def test_patch_unary_rest_required_fields(request_type=compute.PatchNodeGroupRequest): + transport_class = transports.NodeGroupsRestTransport + + request_init = {} + request_init["node_group"] = "" + request_init["project"] = "" + request_init["zone"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).patch._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["nodeGroup"] = 'node_group_value' + jsonified_request["project"] = 'project_value' + jsonified_request["zone"] = 'zone_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).patch._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "nodeGroup" in jsonified_request + assert jsonified_request["nodeGroup"] == 'node_group_value' + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "zone" in jsonified_request + assert jsonified_request["zone"] == 'zone_value' + + client = NodeGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "patch", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.patch_unary(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_patch_unary_rest_unset_required_fields(): + transport = transports.NodeGroupsRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.patch._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("nodeGroup", "nodeGroupResource", "project", "zone", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_patch_unary_rest_interceptors(null_interceptor): + transport = transports.NodeGroupsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.NodeGroupsRestInterceptor(), + ) + client = NodeGroupsClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.NodeGroupsRestInterceptor, "post_patch") as post, \ + mock.patch.object(transports.NodeGroupsRestInterceptor, "pre_patch") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.PatchNodeGroupRequest.pb(compute.PatchNodeGroupRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.PatchNodeGroupRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.patch_unary(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_patch_unary_rest_bad_request(transport: str = 'rest', request_type=compute.PatchNodeGroupRequest): + client = NodeGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'zone': 'sample2', 'node_group': 'sample3'} + request_init["node_group_resource"] = {'autoscaling_policy': {'max_nodes': 958, 'min_nodes': 956, 'mode': 'mode_value'}, 'creation_timestamp': 'creation_timestamp_value', 'description': 'description_value', 'fingerprint': 'fingerprint_value', 'id': 205, 'kind': 'kind_value', 'location_hint': 'location_hint_value', 'maintenance_policy': 'maintenance_policy_value', 'maintenance_window': {'maintenance_duration': {'nanos': 543, 'seconds': 751}, 'start_time': 'start_time_value'}, 'name': 'name_value', 'node_template': 'node_template_value', 'self_link': 'self_link_value', 'share_settings': {'project_map': {}, 'share_type': 'share_type_value'}, 'size': 443, 'status': 'status_value', 'zone': 'zone_value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.patch_unary(request) + + +def test_patch_unary_rest_flattened(): + client = NodeGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'zone': 'sample2', 'node_group': 'sample3'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + zone='zone_value', + node_group='node_group_value', + node_group_resource=compute.NodeGroup(autoscaling_policy=compute.NodeGroupAutoscalingPolicy(max_nodes=958)), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.patch_unary(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/zones/{zone}/nodeGroups/{node_group}" % client.transport._host, args[1]) + + +def test_patch_unary_rest_flattened_error(transport: str = 'rest'): + client = NodeGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.patch_unary( + compute.PatchNodeGroupRequest(), + project='project_value', + zone='zone_value', + node_group='node_group_value', + node_group_resource=compute.NodeGroup(autoscaling_policy=compute.NodeGroupAutoscalingPolicy(max_nodes=958)), + ) + + +def test_patch_unary_rest_error(): + client = NodeGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.SetIamPolicyNodeGroupRequest, + dict, +]) +def test_set_iam_policy_rest(request_type): + client = NodeGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'zone': 'sample2', 'resource': 'sample3'} + request_init["zone_set_policy_request_resource"] = {'bindings': [{'binding_id': 'binding_id_value', 'condition': {'description': 'description_value', 'expression': 'expression_value', 'location': 'location_value', 'title': 'title_value'}, 'members': ['members_value1', 'members_value2'], 'role': 'role_value'}], 'etag': 'etag_value', 'policy': {'audit_configs': [{'audit_log_configs': [{'exempted_members': ['exempted_members_value1', 'exempted_members_value2'], 'ignore_child_exemptions': True, 'log_type': 'log_type_value'}], 'exempted_members': ['exempted_members_value1', 'exempted_members_value2'], 'service': 'service_value'}], 'bindings': {}, 'etag': 'etag_value', 'iam_owned': True, 'rules': [{'action': 'action_value', 'conditions': [{'iam': 'iam_value', 'op': 'op_value', 'svc': 'svc_value', 'sys': 'sys_value', 'values': ['values_value1', 'values_value2']}], 'description': 'description_value', 'ins': ['ins_value1', 'ins_value2'], 'log_configs': [{'cloud_audit': {'authorization_logging_options': {'permission_type': 'permission_type_value'}, 'log_name': 'log_name_value'}, 'counter': {'custom_fields': [{'name': 'name_value', 'value': 'value_value'}], 'field': 'field_value', 'metric': 'metric_value'}, 'data_access': {'log_mode': 'log_mode_value'}}], 'not_ins': ['not_ins_value1', 'not_ins_value2'], 'permissions': ['permissions_value1', 'permissions_value2']}], 'version': 774}} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Policy( + etag='etag_value', + iam_owned=True, + version=774, + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Policy.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.set_iam_policy(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.Policy) + assert response.etag == 'etag_value' + assert response.iam_owned is True + assert response.version == 774 + + +def test_set_iam_policy_rest_required_fields(request_type=compute.SetIamPolicyNodeGroupRequest): + transport_class = transports.NodeGroupsRestTransport + + request_init = {} + request_init["project"] = "" + request_init["resource"] = "" + request_init["zone"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).set_iam_policy._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + jsonified_request["resource"] = 'resource_value' + jsonified_request["zone"] = 'zone_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).set_iam_policy._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "resource" in jsonified_request + assert jsonified_request["resource"] == 'resource_value' + assert "zone" in jsonified_request + assert jsonified_request["zone"] == 'zone_value' + + client = NodeGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Policy() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Policy.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.set_iam_policy(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_set_iam_policy_rest_unset_required_fields(): + transport = transports.NodeGroupsRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.set_iam_policy._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("project", "resource", "zone", "zoneSetPolicyRequestResource", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_set_iam_policy_rest_interceptors(null_interceptor): + transport = transports.NodeGroupsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.NodeGroupsRestInterceptor(), + ) + client = NodeGroupsClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.NodeGroupsRestInterceptor, "post_set_iam_policy") as post, \ + mock.patch.object(transports.NodeGroupsRestInterceptor, "pre_set_iam_policy") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.SetIamPolicyNodeGroupRequest.pb(compute.SetIamPolicyNodeGroupRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Policy.to_json(compute.Policy()) + + request = compute.SetIamPolicyNodeGroupRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Policy() + + client.set_iam_policy(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_set_iam_policy_rest_bad_request(transport: str = 'rest', request_type=compute.SetIamPolicyNodeGroupRequest): + client = NodeGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'zone': 'sample2', 'resource': 'sample3'} + request_init["zone_set_policy_request_resource"] = {'bindings': [{'binding_id': 'binding_id_value', 'condition': {'description': 'description_value', 'expression': 'expression_value', 'location': 'location_value', 'title': 'title_value'}, 'members': ['members_value1', 'members_value2'], 'role': 'role_value'}], 'etag': 'etag_value', 'policy': {'audit_configs': [{'audit_log_configs': [{'exempted_members': ['exempted_members_value1', 'exempted_members_value2'], 'ignore_child_exemptions': True, 'log_type': 'log_type_value'}], 'exempted_members': ['exempted_members_value1', 'exempted_members_value2'], 'service': 'service_value'}], 'bindings': {}, 'etag': 'etag_value', 'iam_owned': True, 'rules': [{'action': 'action_value', 'conditions': [{'iam': 'iam_value', 'op': 'op_value', 'svc': 'svc_value', 'sys': 'sys_value', 'values': ['values_value1', 'values_value2']}], 'description': 'description_value', 'ins': ['ins_value1', 'ins_value2'], 'log_configs': [{'cloud_audit': {'authorization_logging_options': {'permission_type': 'permission_type_value'}, 'log_name': 'log_name_value'}, 'counter': {'custom_fields': [{'name': 'name_value', 'value': 'value_value'}], 'field': 'field_value', 'metric': 'metric_value'}, 'data_access': {'log_mode': 'log_mode_value'}}], 'not_ins': ['not_ins_value1', 'not_ins_value2'], 'permissions': ['permissions_value1', 'permissions_value2']}], 'version': 774}} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.set_iam_policy(request) + + +def test_set_iam_policy_rest_flattened(): + client = NodeGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Policy() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'zone': 'sample2', 'resource': 'sample3'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + zone='zone_value', + resource='resource_value', + zone_set_policy_request_resource=compute.ZoneSetPolicyRequest(bindings=[compute.Binding(binding_id='binding_id_value')]), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Policy.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.set_iam_policy(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/zones/{zone}/nodeGroups/{resource}/setIamPolicy" % client.transport._host, args[1]) + + +def test_set_iam_policy_rest_flattened_error(transport: str = 'rest'): + client = NodeGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.set_iam_policy( + compute.SetIamPolicyNodeGroupRequest(), + project='project_value', + zone='zone_value', + resource='resource_value', + zone_set_policy_request_resource=compute.ZoneSetPolicyRequest(bindings=[compute.Binding(binding_id='binding_id_value')]), + ) + + +def test_set_iam_policy_rest_error(): + client = NodeGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.SetNodeTemplateNodeGroupRequest, + dict, +]) +def test_set_node_template_rest(request_type): + client = NodeGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'zone': 'sample2', 'node_group': 'sample3'} + request_init["node_groups_set_node_template_request_resource"] = {'node_template': 'node_template_value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.set_node_template(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, extended_operation.ExtendedOperation) + assert response.client_operation_id == 'client_operation_id_value' + assert response.creation_timestamp == 'creation_timestamp_value' + assert response.description == 'description_value' + assert response.end_time == 'end_time_value' + assert response.http_error_message == 'http_error_message_value' + assert response.http_error_status_code == 2374 + assert response.id == 205 + assert response.insert_time == 'insert_time_value' + assert response.kind == 'kind_value' + assert response.name == 'name_value' + assert response.operation_group_id == 'operation_group_id_value' + assert response.operation_type == 'operation_type_value' + assert response.progress == 885 + assert response.region == 'region_value' + assert response.self_link == 'self_link_value' + assert response.start_time == 'start_time_value' + assert response.status == compute.Operation.Status.DONE + assert response.status_message == 'status_message_value' + assert response.target_id == 947 + assert response.target_link == 'target_link_value' + assert response.user == 'user_value' + assert response.zone == 'zone_value' + + +def test_set_node_template_rest_required_fields(request_type=compute.SetNodeTemplateNodeGroupRequest): + transport_class = transports.NodeGroupsRestTransport + + request_init = {} + request_init["node_group"] = "" + request_init["project"] = "" + request_init["zone"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).set_node_template._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["nodeGroup"] = 'node_group_value' + jsonified_request["project"] = 'project_value' + jsonified_request["zone"] = 'zone_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).set_node_template._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "nodeGroup" in jsonified_request + assert jsonified_request["nodeGroup"] == 'node_group_value' + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "zone" in jsonified_request + assert jsonified_request["zone"] == 'zone_value' + + client = NodeGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.set_node_template(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_set_node_template_rest_unset_required_fields(): + transport = transports.NodeGroupsRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.set_node_template._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("nodeGroup", "nodeGroupsSetNodeTemplateRequestResource", "project", "zone", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_set_node_template_rest_interceptors(null_interceptor): + transport = transports.NodeGroupsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.NodeGroupsRestInterceptor(), + ) + client = NodeGroupsClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.NodeGroupsRestInterceptor, "post_set_node_template") as post, \ + mock.patch.object(transports.NodeGroupsRestInterceptor, "pre_set_node_template") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.SetNodeTemplateNodeGroupRequest.pb(compute.SetNodeTemplateNodeGroupRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.SetNodeTemplateNodeGroupRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.set_node_template(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_set_node_template_rest_bad_request(transport: str = 'rest', request_type=compute.SetNodeTemplateNodeGroupRequest): + client = NodeGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'zone': 'sample2', 'node_group': 'sample3'} + request_init["node_groups_set_node_template_request_resource"] = {'node_template': 'node_template_value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.set_node_template(request) + + +def test_set_node_template_rest_flattened(): + client = NodeGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'zone': 'sample2', 'node_group': 'sample3'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + zone='zone_value', + node_group='node_group_value', + node_groups_set_node_template_request_resource=compute.NodeGroupsSetNodeTemplateRequest(node_template='node_template_value'), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.set_node_template(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/zones/{zone}/nodeGroups/{node_group}/setNodeTemplate" % client.transport._host, args[1]) + + +def test_set_node_template_rest_flattened_error(transport: str = 'rest'): + client = NodeGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.set_node_template( + compute.SetNodeTemplateNodeGroupRequest(), + project='project_value', + zone='zone_value', + node_group='node_group_value', + node_groups_set_node_template_request_resource=compute.NodeGroupsSetNodeTemplateRequest(node_template='node_template_value'), + ) + + +def test_set_node_template_rest_error(): + client = NodeGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.SetNodeTemplateNodeGroupRequest, + dict, +]) +def test_set_node_template_unary_rest(request_type): + client = NodeGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'zone': 'sample2', 'node_group': 'sample3'} + request_init["node_groups_set_node_template_request_resource"] = {'node_template': 'node_template_value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.set_node_template_unary(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.Operation) + + +def test_set_node_template_unary_rest_required_fields(request_type=compute.SetNodeTemplateNodeGroupRequest): + transport_class = transports.NodeGroupsRestTransport + + request_init = {} + request_init["node_group"] = "" + request_init["project"] = "" + request_init["zone"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).set_node_template._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["nodeGroup"] = 'node_group_value' + jsonified_request["project"] = 'project_value' + jsonified_request["zone"] = 'zone_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).set_node_template._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "nodeGroup" in jsonified_request + assert jsonified_request["nodeGroup"] == 'node_group_value' + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "zone" in jsonified_request + assert jsonified_request["zone"] == 'zone_value' + + client = NodeGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.set_node_template_unary(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_set_node_template_unary_rest_unset_required_fields(): + transport = transports.NodeGroupsRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.set_node_template._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("nodeGroup", "nodeGroupsSetNodeTemplateRequestResource", "project", "zone", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_set_node_template_unary_rest_interceptors(null_interceptor): + transport = transports.NodeGroupsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.NodeGroupsRestInterceptor(), + ) + client = NodeGroupsClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.NodeGroupsRestInterceptor, "post_set_node_template") as post, \ + mock.patch.object(transports.NodeGroupsRestInterceptor, "pre_set_node_template") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.SetNodeTemplateNodeGroupRequest.pb(compute.SetNodeTemplateNodeGroupRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.SetNodeTemplateNodeGroupRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.set_node_template_unary(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_set_node_template_unary_rest_bad_request(transport: str = 'rest', request_type=compute.SetNodeTemplateNodeGroupRequest): + client = NodeGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'zone': 'sample2', 'node_group': 'sample3'} + request_init["node_groups_set_node_template_request_resource"] = {'node_template': 'node_template_value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.set_node_template_unary(request) + + +def test_set_node_template_unary_rest_flattened(): + client = NodeGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'zone': 'sample2', 'node_group': 'sample3'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + zone='zone_value', + node_group='node_group_value', + node_groups_set_node_template_request_resource=compute.NodeGroupsSetNodeTemplateRequest(node_template='node_template_value'), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.set_node_template_unary(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/zones/{zone}/nodeGroups/{node_group}/setNodeTemplate" % client.transport._host, args[1]) + + +def test_set_node_template_unary_rest_flattened_error(transport: str = 'rest'): + client = NodeGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.set_node_template_unary( + compute.SetNodeTemplateNodeGroupRequest(), + project='project_value', + zone='zone_value', + node_group='node_group_value', + node_groups_set_node_template_request_resource=compute.NodeGroupsSetNodeTemplateRequest(node_template='node_template_value'), + ) + + +def test_set_node_template_unary_rest_error(): + client = NodeGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.SimulateMaintenanceEventNodeGroupRequest, + dict, +]) +def test_simulate_maintenance_event_rest(request_type): + client = NodeGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'zone': 'sample2', 'node_group': 'sample3'} + request_init["node_groups_simulate_maintenance_event_request_resource"] = {'nodes': ['nodes_value1', 'nodes_value2']} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.simulate_maintenance_event(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, extended_operation.ExtendedOperation) + assert response.client_operation_id == 'client_operation_id_value' + assert response.creation_timestamp == 'creation_timestamp_value' + assert response.description == 'description_value' + assert response.end_time == 'end_time_value' + assert response.http_error_message == 'http_error_message_value' + assert response.http_error_status_code == 2374 + assert response.id == 205 + assert response.insert_time == 'insert_time_value' + assert response.kind == 'kind_value' + assert response.name == 'name_value' + assert response.operation_group_id == 'operation_group_id_value' + assert response.operation_type == 'operation_type_value' + assert response.progress == 885 + assert response.region == 'region_value' + assert response.self_link == 'self_link_value' + assert response.start_time == 'start_time_value' + assert response.status == compute.Operation.Status.DONE + assert response.status_message == 'status_message_value' + assert response.target_id == 947 + assert response.target_link == 'target_link_value' + assert response.user == 'user_value' + assert response.zone == 'zone_value' + + +def test_simulate_maintenance_event_rest_required_fields(request_type=compute.SimulateMaintenanceEventNodeGroupRequest): + transport_class = transports.NodeGroupsRestTransport + + request_init = {} + request_init["node_group"] = "" + request_init["project"] = "" + request_init["zone"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).simulate_maintenance_event._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["nodeGroup"] = 'node_group_value' + jsonified_request["project"] = 'project_value' + jsonified_request["zone"] = 'zone_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).simulate_maintenance_event._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "nodeGroup" in jsonified_request + assert jsonified_request["nodeGroup"] == 'node_group_value' + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "zone" in jsonified_request + assert jsonified_request["zone"] == 'zone_value' + + client = NodeGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.simulate_maintenance_event(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_simulate_maintenance_event_rest_unset_required_fields(): + transport = transports.NodeGroupsRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.simulate_maintenance_event._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("nodeGroup", "nodeGroupsSimulateMaintenanceEventRequestResource", "project", "zone", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_simulate_maintenance_event_rest_interceptors(null_interceptor): + transport = transports.NodeGroupsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.NodeGroupsRestInterceptor(), + ) + client = NodeGroupsClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.NodeGroupsRestInterceptor, "post_simulate_maintenance_event") as post, \ + mock.patch.object(transports.NodeGroupsRestInterceptor, "pre_simulate_maintenance_event") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.SimulateMaintenanceEventNodeGroupRequest.pb(compute.SimulateMaintenanceEventNodeGroupRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.SimulateMaintenanceEventNodeGroupRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.simulate_maintenance_event(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_simulate_maintenance_event_rest_bad_request(transport: str = 'rest', request_type=compute.SimulateMaintenanceEventNodeGroupRequest): + client = NodeGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'zone': 'sample2', 'node_group': 'sample3'} + request_init["node_groups_simulate_maintenance_event_request_resource"] = {'nodes': ['nodes_value1', 'nodes_value2']} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.simulate_maintenance_event(request) + + +def test_simulate_maintenance_event_rest_flattened(): + client = NodeGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'zone': 'sample2', 'node_group': 'sample3'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + zone='zone_value', + node_group='node_group_value', + node_groups_simulate_maintenance_event_request_resource=compute.NodeGroupsSimulateMaintenanceEventRequest(nodes=['nodes_value']), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.simulate_maintenance_event(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/zones/{zone}/nodeGroups/{node_group}/simulateMaintenanceEvent" % client.transport._host, args[1]) + + +def test_simulate_maintenance_event_rest_flattened_error(transport: str = 'rest'): + client = NodeGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.simulate_maintenance_event( + compute.SimulateMaintenanceEventNodeGroupRequest(), + project='project_value', + zone='zone_value', + node_group='node_group_value', + node_groups_simulate_maintenance_event_request_resource=compute.NodeGroupsSimulateMaintenanceEventRequest(nodes=['nodes_value']), + ) + + +def test_simulate_maintenance_event_rest_error(): + client = NodeGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.SimulateMaintenanceEventNodeGroupRequest, + dict, +]) +def test_simulate_maintenance_event_unary_rest(request_type): + client = NodeGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'zone': 'sample2', 'node_group': 'sample3'} + request_init["node_groups_simulate_maintenance_event_request_resource"] = {'nodes': ['nodes_value1', 'nodes_value2']} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.simulate_maintenance_event_unary(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.Operation) + + +def test_simulate_maintenance_event_unary_rest_required_fields(request_type=compute.SimulateMaintenanceEventNodeGroupRequest): + transport_class = transports.NodeGroupsRestTransport + + request_init = {} + request_init["node_group"] = "" + request_init["project"] = "" + request_init["zone"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).simulate_maintenance_event._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["nodeGroup"] = 'node_group_value' + jsonified_request["project"] = 'project_value' + jsonified_request["zone"] = 'zone_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).simulate_maintenance_event._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "nodeGroup" in jsonified_request + assert jsonified_request["nodeGroup"] == 'node_group_value' + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "zone" in jsonified_request + assert jsonified_request["zone"] == 'zone_value' + + client = NodeGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.simulate_maintenance_event_unary(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_simulate_maintenance_event_unary_rest_unset_required_fields(): + transport = transports.NodeGroupsRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.simulate_maintenance_event._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("nodeGroup", "nodeGroupsSimulateMaintenanceEventRequestResource", "project", "zone", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_simulate_maintenance_event_unary_rest_interceptors(null_interceptor): + transport = transports.NodeGroupsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.NodeGroupsRestInterceptor(), + ) + client = NodeGroupsClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.NodeGroupsRestInterceptor, "post_simulate_maintenance_event") as post, \ + mock.patch.object(transports.NodeGroupsRestInterceptor, "pre_simulate_maintenance_event") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.SimulateMaintenanceEventNodeGroupRequest.pb(compute.SimulateMaintenanceEventNodeGroupRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.SimulateMaintenanceEventNodeGroupRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.simulate_maintenance_event_unary(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_simulate_maintenance_event_unary_rest_bad_request(transport: str = 'rest', request_type=compute.SimulateMaintenanceEventNodeGroupRequest): + client = NodeGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'zone': 'sample2', 'node_group': 'sample3'} + request_init["node_groups_simulate_maintenance_event_request_resource"] = {'nodes': ['nodes_value1', 'nodes_value2']} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.simulate_maintenance_event_unary(request) + + +def test_simulate_maintenance_event_unary_rest_flattened(): + client = NodeGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'zone': 'sample2', 'node_group': 'sample3'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + zone='zone_value', + node_group='node_group_value', + node_groups_simulate_maintenance_event_request_resource=compute.NodeGroupsSimulateMaintenanceEventRequest(nodes=['nodes_value']), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.simulate_maintenance_event_unary(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/zones/{zone}/nodeGroups/{node_group}/simulateMaintenanceEvent" % client.transport._host, args[1]) + + +def test_simulate_maintenance_event_unary_rest_flattened_error(transport: str = 'rest'): + client = NodeGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.simulate_maintenance_event_unary( + compute.SimulateMaintenanceEventNodeGroupRequest(), + project='project_value', + zone='zone_value', + node_group='node_group_value', + node_groups_simulate_maintenance_event_request_resource=compute.NodeGroupsSimulateMaintenanceEventRequest(nodes=['nodes_value']), + ) + + +def test_simulate_maintenance_event_unary_rest_error(): + client = NodeGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.TestIamPermissionsNodeGroupRequest, + dict, +]) +def test_test_iam_permissions_rest(request_type): + client = NodeGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'zone': 'sample2', 'resource': 'sample3'} + request_init["test_permissions_request_resource"] = {'permissions': ['permissions_value1', 'permissions_value2']} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.TestPermissionsResponse( + permissions=['permissions_value'], + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.TestPermissionsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.test_iam_permissions(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.TestPermissionsResponse) + assert response.permissions == ['permissions_value'] + + +def test_test_iam_permissions_rest_required_fields(request_type=compute.TestIamPermissionsNodeGroupRequest): + transport_class = transports.NodeGroupsRestTransport + + request_init = {} + request_init["project"] = "" + request_init["resource"] = "" + request_init["zone"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).test_iam_permissions._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + jsonified_request["resource"] = 'resource_value' + jsonified_request["zone"] = 'zone_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).test_iam_permissions._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "resource" in jsonified_request + assert jsonified_request["resource"] == 'resource_value' + assert "zone" in jsonified_request + assert jsonified_request["zone"] == 'zone_value' + + client = NodeGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.TestPermissionsResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.TestPermissionsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.test_iam_permissions(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_test_iam_permissions_rest_unset_required_fields(): + transport = transports.NodeGroupsRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.test_iam_permissions._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("project", "resource", "testPermissionsRequestResource", "zone", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_test_iam_permissions_rest_interceptors(null_interceptor): + transport = transports.NodeGroupsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.NodeGroupsRestInterceptor(), + ) + client = NodeGroupsClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.NodeGroupsRestInterceptor, "post_test_iam_permissions") as post, \ + mock.patch.object(transports.NodeGroupsRestInterceptor, "pre_test_iam_permissions") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.TestIamPermissionsNodeGroupRequest.pb(compute.TestIamPermissionsNodeGroupRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.TestPermissionsResponse.to_json(compute.TestPermissionsResponse()) + + request = compute.TestIamPermissionsNodeGroupRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.TestPermissionsResponse() + + client.test_iam_permissions(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_test_iam_permissions_rest_bad_request(transport: str = 'rest', request_type=compute.TestIamPermissionsNodeGroupRequest): + client = NodeGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'zone': 'sample2', 'resource': 'sample3'} + request_init["test_permissions_request_resource"] = {'permissions': ['permissions_value1', 'permissions_value2']} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.test_iam_permissions(request) + + +def test_test_iam_permissions_rest_flattened(): + client = NodeGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.TestPermissionsResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'zone': 'sample2', 'resource': 'sample3'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + zone='zone_value', + resource='resource_value', + test_permissions_request_resource=compute.TestPermissionsRequest(permissions=['permissions_value']), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.TestPermissionsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.test_iam_permissions(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/zones/{zone}/nodeGroups/{resource}/testIamPermissions" % client.transport._host, args[1]) + + +def test_test_iam_permissions_rest_flattened_error(transport: str = 'rest'): + client = NodeGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.test_iam_permissions( + compute.TestIamPermissionsNodeGroupRequest(), + project='project_value', + zone='zone_value', + resource='resource_value', + test_permissions_request_resource=compute.TestPermissionsRequest(permissions=['permissions_value']), + ) + + +def test_test_iam_permissions_rest_error(): + client = NodeGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +def test_credentials_transport_error(): + # It is an error to provide credentials and a transport instance. + transport = transports.NodeGroupsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = NodeGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # It is an error to provide a credentials file and a transport instance. + transport = transports.NodeGroupsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = NodeGroupsClient( + client_options={"credentials_file": "credentials.json"}, + transport=transport, + ) + + # It is an error to provide an api_key and a transport instance. + transport = transports.NodeGroupsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = NodeGroupsClient( + client_options=options, + transport=transport, + ) + + # It is an error to provide an api_key and a credential. + options = mock.Mock() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = NodeGroupsClient( + client_options=options, + credentials=ga_credentials.AnonymousCredentials() + ) + + # It is an error to provide scopes and a transport instance. + transport = transports.NodeGroupsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = NodeGroupsClient( + client_options={"scopes": ["1", "2"]}, + transport=transport, + ) + + +def test_transport_instance(): + # A client may be instantiated with a custom transport instance. + transport = transports.NodeGroupsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + client = NodeGroupsClient(transport=transport) + assert client.transport is transport + + +@pytest.mark.parametrize("transport_class", [ + transports.NodeGroupsRestTransport, +]) +def test_transport_adc(transport_class): + # Test default credentials are used if not provided. + with mock.patch.object(google.auth, 'default') as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class() + adc.assert_called_once() + +@pytest.mark.parametrize("transport_name", [ + "rest", +]) +def test_transport_kind(transport_name): + transport = NodeGroupsClient.get_transport_class(transport_name)( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert transport.kind == transport_name + + +def test_node_groups_base_transport_error(): + # Passing both a credentials object and credentials_file should raise an error + with pytest.raises(core_exceptions.DuplicateCredentialArgs): + transport = transports.NodeGroupsTransport( + credentials=ga_credentials.AnonymousCredentials(), + credentials_file="credentials.json" + ) + + +def test_node_groups_base_transport(): + # Instantiate the base transport. + with mock.patch('google.cloud.compute_v1.services.node_groups.transports.NodeGroupsTransport.__init__') as Transport: + Transport.return_value = None + transport = transports.NodeGroupsTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Every method on the transport should just blindly + # raise NotImplementedError. + methods = ( + 'add_nodes', + 'aggregated_list', + 'delete', + 'delete_nodes', + 'get', + 'get_iam_policy', + 'insert', + 'list', + 'list_nodes', + 'patch', + 'set_iam_policy', + 'set_node_template', + 'simulate_maintenance_event', + 'test_iam_permissions', + ) + for method in methods: + with pytest.raises(NotImplementedError): + getattr(transport, method)(request=object()) + + with pytest.raises(NotImplementedError): + transport.close() + + # Catch all for all remaining methods and properties + remainder = [ + 'kind', + ] + for r in remainder: + with pytest.raises(NotImplementedError): + getattr(transport, r)() + + +def test_node_groups_base_transport_with_credentials_file(): + # Instantiate the base transport with a credentials file + with mock.patch.object(google.auth, 'load_credentials_from_file', autospec=True) as load_creds, mock.patch('google.cloud.compute_v1.services.node_groups.transports.NodeGroupsTransport._prep_wrapped_messages') as Transport: + Transport.return_value = None + load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.NodeGroupsTransport( + credentials_file="credentials.json", + quota_project_id="octopus", + ) + load_creds.assert_called_once_with("credentials.json", + scopes=None, + default_scopes=( + 'https://www.googleapis.com/auth/compute', + 'https://www.googleapis.com/auth/cloud-platform', +), + quota_project_id="octopus", + ) + + +def test_node_groups_base_transport_with_adc(): + # Test the default credentials are used if credentials and credentials_file are None. + with mock.patch.object(google.auth, 'default', autospec=True) as adc, mock.patch('google.cloud.compute_v1.services.node_groups.transports.NodeGroupsTransport._prep_wrapped_messages') as Transport: + Transport.return_value = None + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.NodeGroupsTransport() + adc.assert_called_once() + + +def test_node_groups_auth_adc(): + # If no credentials are provided, we should use ADC credentials. + with mock.patch.object(google.auth, 'default', autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + NodeGroupsClient() + adc.assert_called_once_with( + scopes=None, + default_scopes=( + 'https://www.googleapis.com/auth/compute', + 'https://www.googleapis.com/auth/cloud-platform', +), + quota_project_id=None, + ) + + +def test_node_groups_http_transport_client_cert_source_for_mtls(): + cred = ga_credentials.AnonymousCredentials() + with mock.patch("google.auth.transport.requests.AuthorizedSession.configure_mtls_channel") as mock_configure_mtls_channel: + transports.NodeGroupsRestTransport ( + credentials=cred, + client_cert_source_for_mtls=client_cert_source_callback + ) + mock_configure_mtls_channel.assert_called_once_with(client_cert_source_callback) + + +@pytest.mark.parametrize("transport_name", [ + "rest", +]) +def test_node_groups_host_no_port(transport_name): + client = NodeGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions(api_endpoint='compute.googleapis.com'), + transport=transport_name, + ) + assert client.transport._host == ( + 'compute.googleapis.com:443' + if transport_name in ['grpc', 'grpc_asyncio'] + else 'https://compute.googleapis.com' + ) + +@pytest.mark.parametrize("transport_name", [ + "rest", +]) +def test_node_groups_host_with_port(transport_name): + client = NodeGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions(api_endpoint='compute.googleapis.com:8000'), + transport=transport_name, + ) + assert client.transport._host == ( + 'compute.googleapis.com:8000' + if transport_name in ['grpc', 'grpc_asyncio'] + else 'https://compute.googleapis.com:8000' + ) + +@pytest.mark.parametrize("transport_name", [ + "rest", +]) +def test_node_groups_client_transport_session_collision(transport_name): + creds1 = ga_credentials.AnonymousCredentials() + creds2 = ga_credentials.AnonymousCredentials() + client1 = NodeGroupsClient( + credentials=creds1, + transport=transport_name, + ) + client2 = NodeGroupsClient( + credentials=creds2, + transport=transport_name, + ) + session1 = client1.transport.add_nodes._session + session2 = client2.transport.add_nodes._session + assert session1 != session2 + session1 = client1.transport.aggregated_list._session + session2 = client2.transport.aggregated_list._session + assert session1 != session2 + session1 = client1.transport.delete._session + session2 = client2.transport.delete._session + assert session1 != session2 + session1 = client1.transport.delete_nodes._session + session2 = client2.transport.delete_nodes._session + assert session1 != session2 + session1 = client1.transport.get._session + session2 = client2.transport.get._session + assert session1 != session2 + session1 = client1.transport.get_iam_policy._session + session2 = client2.transport.get_iam_policy._session + assert session1 != session2 + session1 = client1.transport.insert._session + session2 = client2.transport.insert._session + assert session1 != session2 + session1 = client1.transport.list._session + session2 = client2.transport.list._session + assert session1 != session2 + session1 = client1.transport.list_nodes._session + session2 = client2.transport.list_nodes._session + assert session1 != session2 + session1 = client1.transport.patch._session + session2 = client2.transport.patch._session + assert session1 != session2 + session1 = client1.transport.set_iam_policy._session + session2 = client2.transport.set_iam_policy._session + assert session1 != session2 + session1 = client1.transport.set_node_template._session + session2 = client2.transport.set_node_template._session + assert session1 != session2 + session1 = client1.transport.simulate_maintenance_event._session + session2 = client2.transport.simulate_maintenance_event._session + assert session1 != session2 + session1 = client1.transport.test_iam_permissions._session + session2 = client2.transport.test_iam_permissions._session + assert session1 != session2 + +def test_common_billing_account_path(): + billing_account = "squid" + expected = "billingAccounts/{billing_account}".format(billing_account=billing_account, ) + actual = NodeGroupsClient.common_billing_account_path(billing_account) + assert expected == actual + + +def test_parse_common_billing_account_path(): + expected = { + "billing_account": "clam", + } + path = NodeGroupsClient.common_billing_account_path(**expected) + + # Check that the path construction is reversible. + actual = NodeGroupsClient.parse_common_billing_account_path(path) + assert expected == actual + +def test_common_folder_path(): + folder = "whelk" + expected = "folders/{folder}".format(folder=folder, ) + actual = NodeGroupsClient.common_folder_path(folder) + assert expected == actual + + +def test_parse_common_folder_path(): + expected = { + "folder": "octopus", + } + path = NodeGroupsClient.common_folder_path(**expected) + + # Check that the path construction is reversible. + actual = NodeGroupsClient.parse_common_folder_path(path) + assert expected == actual + +def test_common_organization_path(): + organization = "oyster" + expected = "organizations/{organization}".format(organization=organization, ) + actual = NodeGroupsClient.common_organization_path(organization) + assert expected == actual + + +def test_parse_common_organization_path(): + expected = { + "organization": "nudibranch", + } + path = NodeGroupsClient.common_organization_path(**expected) + + # Check that the path construction is reversible. + actual = NodeGroupsClient.parse_common_organization_path(path) + assert expected == actual + +def test_common_project_path(): + project = "cuttlefish" + expected = "projects/{project}".format(project=project, ) + actual = NodeGroupsClient.common_project_path(project) + assert expected == actual + + +def test_parse_common_project_path(): + expected = { + "project": "mussel", + } + path = NodeGroupsClient.common_project_path(**expected) + + # Check that the path construction is reversible. + actual = NodeGroupsClient.parse_common_project_path(path) + assert expected == actual + +def test_common_location_path(): + project = "winkle" + location = "nautilus" + expected = "projects/{project}/locations/{location}".format(project=project, location=location, ) + actual = NodeGroupsClient.common_location_path(project, location) + assert expected == actual + + +def test_parse_common_location_path(): + expected = { + "project": "scallop", + "location": "abalone", + } + path = NodeGroupsClient.common_location_path(**expected) + + # Check that the path construction is reversible. + actual = NodeGroupsClient.parse_common_location_path(path) + assert expected == actual + + +def test_client_with_default_client_info(): + client_info = gapic_v1.client_info.ClientInfo() + + with mock.patch.object(transports.NodeGroupsTransport, '_prep_wrapped_messages') as prep: + client = NodeGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + with mock.patch.object(transports.NodeGroupsTransport, '_prep_wrapped_messages') as prep: + transport_class = NodeGroupsClient.get_transport_class() + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + +def test_transport_close(): + transports = { + "rest": "_session", + } + + for transport, close_name in transports.items(): + client = NodeGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport + ) + with mock.patch.object(type(getattr(client.transport, close_name)), "close") as close: + with client: + close.assert_not_called() + close.assert_called_once() + +def test_client_ctx(): + transports = [ + 'rest', + ] + for transport in transports: + client = NodeGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport + ) + # Test client calls underlying transport. + with mock.patch.object(type(client.transport), "close") as close: + close.assert_not_called() + with client: + pass + close.assert_called() + +@pytest.mark.parametrize("client_class,transport_class", [ + (NodeGroupsClient, transports.NodeGroupsRestTransport), +]) +def test_api_key_credentials(client_class, transport_class): + with mock.patch.object( + google.auth._default, "get_api_key_credentials", create=True + ) as get_api_key_credentials: + mock_cred = mock.Mock() + get_api_key_credentials.return_value = mock_cred + options = client_options.ClientOptions() + options.api_key = "api_key" + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=mock_cred, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) diff --git a/owl-bot-staging/v1/tests/unit/gapic/compute_v1/test_node_templates.py b/owl-bot-staging/v1/tests/unit/gapic/compute_v1/test_node_templates.py new file mode 100644 index 000000000..20c95c7e3 --- /dev/null +++ b/owl-bot-staging/v1/tests/unit/gapic/compute_v1/test_node_templates.py @@ -0,0 +1,3590 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import os +# try/except added for compatibility with python < 3.8 +try: + from unittest import mock + from unittest.mock import AsyncMock # pragma: NO COVER +except ImportError: # pragma: NO COVER + import mock + +import grpc +from grpc.experimental import aio +from collections.abc import Iterable +from google.protobuf import json_format +import json +import math +import pytest +from proto.marshal.rules.dates import DurationRule, TimestampRule +from proto.marshal.rules import wrappers +from requests import Response +from requests import Request, PreparedRequest +from requests.sessions import Session +from google.protobuf import json_format + +from google.api_core import client_options +from google.api_core import exceptions as core_exceptions +from google.api_core import extended_operation # type: ignore +from google.api_core import future +from google.api_core import gapic_v1 +from google.api_core import grpc_helpers +from google.api_core import grpc_helpers_async +from google.api_core import path_template +from google.auth import credentials as ga_credentials +from google.auth.exceptions import MutualTLSChannelError +from google.cloud.compute_v1.services.node_templates import NodeTemplatesClient +from google.cloud.compute_v1.services.node_templates import pagers +from google.cloud.compute_v1.services.node_templates import transports +from google.cloud.compute_v1.types import compute +from google.oauth2 import service_account +import google.auth + + +def client_cert_source_callback(): + return b"cert bytes", b"key bytes" + + +# If default endpoint is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint(client): + return "foo.googleapis.com" if ("localhost" in client.DEFAULT_ENDPOINT) else client.DEFAULT_ENDPOINT + + +def test__get_default_mtls_endpoint(): + api_endpoint = "example.googleapis.com" + api_mtls_endpoint = "example.mtls.googleapis.com" + sandbox_endpoint = "example.sandbox.googleapis.com" + sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com" + non_googleapi = "api.example.com" + + assert NodeTemplatesClient._get_default_mtls_endpoint(None) is None + assert NodeTemplatesClient._get_default_mtls_endpoint(api_endpoint) == api_mtls_endpoint + assert NodeTemplatesClient._get_default_mtls_endpoint(api_mtls_endpoint) == api_mtls_endpoint + assert NodeTemplatesClient._get_default_mtls_endpoint(sandbox_endpoint) == sandbox_mtls_endpoint + assert NodeTemplatesClient._get_default_mtls_endpoint(sandbox_mtls_endpoint) == sandbox_mtls_endpoint + assert NodeTemplatesClient._get_default_mtls_endpoint(non_googleapi) == non_googleapi + + +@pytest.mark.parametrize("client_class,transport_name", [ + (NodeTemplatesClient, "rest"), +]) +def test_node_templates_client_from_service_account_info(client_class, transport_name): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object(service_account.Credentials, 'from_service_account_info') as factory: + factory.return_value = creds + info = {"valid": True} + client = client_class.from_service_account_info(info, transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + 'compute.googleapis.com:443' + if transport_name in ['grpc', 'grpc_asyncio'] + else + 'https://compute.googleapis.com' + ) + + +@pytest.mark.parametrize("transport_class,transport_name", [ + (transports.NodeTemplatesRestTransport, "rest"), +]) +def test_node_templates_client_service_account_always_use_jwt(transport_class, transport_name): + with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=True) + use_jwt.assert_called_once_with(True) + + with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=False) + use_jwt.assert_not_called() + + +@pytest.mark.parametrize("client_class,transport_name", [ + (NodeTemplatesClient, "rest"), +]) +def test_node_templates_client_from_service_account_file(client_class, transport_name): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object(service_account.Credentials, 'from_service_account_file') as factory: + factory.return_value = creds + client = client_class.from_service_account_file("dummy/file/path.json", transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + client = client_class.from_service_account_json("dummy/file/path.json", transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + 'compute.googleapis.com:443' + if transport_name in ['grpc', 'grpc_asyncio'] + else + 'https://compute.googleapis.com' + ) + + +def test_node_templates_client_get_transport_class(): + transport = NodeTemplatesClient.get_transport_class() + available_transports = [ + transports.NodeTemplatesRestTransport, + ] + assert transport in available_transports + + transport = NodeTemplatesClient.get_transport_class("rest") + assert transport == transports.NodeTemplatesRestTransport + + +@pytest.mark.parametrize("client_class,transport_class,transport_name", [ + (NodeTemplatesClient, transports.NodeTemplatesRestTransport, "rest"), +]) +@mock.patch.object(NodeTemplatesClient, "DEFAULT_ENDPOINT", modify_default_endpoint(NodeTemplatesClient)) +def test_node_templates_client_client_options(client_class, transport_class, transport_name): + # Check that if channel is provided we won't create a new one. + with mock.patch.object(NodeTemplatesClient, 'get_transport_class') as gtc: + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ) + client = client_class(transport=transport) + gtc.assert_not_called() + + # Check that if channel is provided via str we will create a new one. + with mock.patch.object(NodeTemplatesClient, 'get_transport_class') as gtc: + client = client_class(transport=transport_name) + gtc.assert_called() + + # Check the case api_endpoint is provided. + options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(transport=transport_name, client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_MTLS_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError): + client = client_class(transport=transport_name) + + # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): + with pytest.raises(ValueError): + client = client_class(transport=transport_name) + + # Check the case quota_project_id is provided + options = client_options.ClientOptions(quota_project_id="octopus") + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id="octopus", + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + # Check the case api_endpoint is provided + options = client_options.ClientOptions(api_audience="https://language.googleapis.com") + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience="https://language.googleapis.com" + ) + +@pytest.mark.parametrize("client_class,transport_class,transport_name,use_client_cert_env", [ + (NodeTemplatesClient, transports.NodeTemplatesRestTransport, "rest", "true"), + (NodeTemplatesClient, transports.NodeTemplatesRestTransport, "rest", "false"), +]) +@mock.patch.object(NodeTemplatesClient, "DEFAULT_ENDPOINT", modify_default_endpoint(NodeTemplatesClient)) +@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) +def test_node_templates_client_mtls_env_auto(client_class, transport_class, transport_name, use_client_cert_env): + # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default + # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists. + + # Check the case client_cert_source is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): + options = client_options.ClientOptions(client_cert_source=client_cert_source_callback) + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + + if use_client_cert_env == "false": + expected_client_cert_source = None + expected_host = client.DEFAULT_ENDPOINT + else: + expected_client_cert_source = client_cert_source_callback + expected_host = client.DEFAULT_MTLS_ENDPOINT + + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case ADC client cert is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): + with mock.patch.object(transport_class, '__init__') as patched: + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): + with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=client_cert_source_callback): + if use_client_cert_env == "false": + expected_host = client.DEFAULT_ENDPOINT + expected_client_cert_source = None + else: + expected_host = client.DEFAULT_MTLS_ENDPOINT + expected_client_cert_source = client_cert_source_callback + + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case client_cert_source and ADC client cert are not provided. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): + with mock.patch.object(transport_class, '__init__') as patched: + with mock.patch("google.auth.transport.mtls.has_default_client_cert_source", return_value=False): + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize("client_class", [ + NodeTemplatesClient +]) +@mock.patch.object(NodeTemplatesClient, "DEFAULT_ENDPOINT", modify_default_endpoint(NodeTemplatesClient)) +def test_node_templates_client_get_mtls_endpoint_and_cert_source(client_class): + mock_client_cert_source = mock.Mock() + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "true". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + mock_api_endpoint = "foo" + options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(options) + assert api_endpoint == mock_api_endpoint + assert cert_source == mock_client_cert_source + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "false". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + mock_client_cert_source = mock.Mock() + mock_api_endpoint = "foo" + options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(options) + assert api_endpoint == mock_api_endpoint + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert doesn't exist. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=False): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert exists. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): + with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=mock_client_cert_source): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source == mock_client_cert_source + + +@pytest.mark.parametrize("client_class,transport_class,transport_name", [ + (NodeTemplatesClient, transports.NodeTemplatesRestTransport, "rest"), +]) +def test_node_templates_client_client_options_scopes(client_class, transport_class, transport_name): + # Check the case scopes are provided. + options = client_options.ClientOptions( + scopes=["1", "2"], + ) + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=["1", "2"], + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + +@pytest.mark.parametrize("client_class,transport_class,transport_name,grpc_helpers", [ + (NodeTemplatesClient, transports.NodeTemplatesRestTransport, "rest", None), +]) +def test_node_templates_client_client_options_credentials_file(client_class, transport_class, transport_name, grpc_helpers): + # Check the case credentials file is provided. + options = client_options.ClientOptions( + credentials_file="credentials.json" + ) + + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize("request_type", [ + compute.AggregatedListNodeTemplatesRequest, + dict, +]) +def test_aggregated_list_rest(request_type): + client = NodeTemplatesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.NodeTemplateAggregatedList( + id='id_value', + kind='kind_value', + next_page_token='next_page_token_value', + self_link='self_link_value', + unreachables=['unreachables_value'], + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.NodeTemplateAggregatedList.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.aggregated_list(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.AggregatedListPager) + assert response.id == 'id_value' + assert response.kind == 'kind_value' + assert response.next_page_token == 'next_page_token_value' + assert response.self_link == 'self_link_value' + assert response.unreachables == ['unreachables_value'] + + +def test_aggregated_list_rest_required_fields(request_type=compute.AggregatedListNodeTemplatesRequest): + transport_class = transports.NodeTemplatesRestTransport + + request_init = {} + request_init["project"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).aggregated_list._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).aggregated_list._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("filter", "include_all_scopes", "max_results", "order_by", "page_token", "return_partial_success", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + + client = NodeTemplatesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.NodeTemplateAggregatedList() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "get", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.NodeTemplateAggregatedList.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.aggregated_list(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_aggregated_list_rest_unset_required_fields(): + transport = transports.NodeTemplatesRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.aggregated_list._get_unset_required_fields({}) + assert set(unset_fields) == (set(("filter", "includeAllScopes", "maxResults", "orderBy", "pageToken", "returnPartialSuccess", )) & set(("project", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_aggregated_list_rest_interceptors(null_interceptor): + transport = transports.NodeTemplatesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.NodeTemplatesRestInterceptor(), + ) + client = NodeTemplatesClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.NodeTemplatesRestInterceptor, "post_aggregated_list") as post, \ + mock.patch.object(transports.NodeTemplatesRestInterceptor, "pre_aggregated_list") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.AggregatedListNodeTemplatesRequest.pb(compute.AggregatedListNodeTemplatesRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.NodeTemplateAggregatedList.to_json(compute.NodeTemplateAggregatedList()) + + request = compute.AggregatedListNodeTemplatesRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.NodeTemplateAggregatedList() + + client.aggregated_list(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_aggregated_list_rest_bad_request(transport: str = 'rest', request_type=compute.AggregatedListNodeTemplatesRequest): + client = NodeTemplatesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.aggregated_list(request) + + +def test_aggregated_list_rest_flattened(): + client = NodeTemplatesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.NodeTemplateAggregatedList() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.NodeTemplateAggregatedList.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.aggregated_list(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/aggregated/nodeTemplates" % client.transport._host, args[1]) + + +def test_aggregated_list_rest_flattened_error(transport: str = 'rest'): + client = NodeTemplatesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.aggregated_list( + compute.AggregatedListNodeTemplatesRequest(), + project='project_value', + ) + + +def test_aggregated_list_rest_pager(transport: str = 'rest'): + client = NodeTemplatesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + #with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + compute.NodeTemplateAggregatedList( + items={ + 'a':compute.NodeTemplatesScopedList(), + 'b':compute.NodeTemplatesScopedList(), + 'c':compute.NodeTemplatesScopedList(), + }, + next_page_token='abc', + ), + compute.NodeTemplateAggregatedList( + items={}, + next_page_token='def', + ), + compute.NodeTemplateAggregatedList( + items={ + 'g':compute.NodeTemplatesScopedList(), + }, + next_page_token='ghi', + ), + compute.NodeTemplateAggregatedList( + items={ + 'h':compute.NodeTemplatesScopedList(), + 'i':compute.NodeTemplatesScopedList(), + }, + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple(compute.NodeTemplateAggregatedList.to_json(x) for x in response) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode('UTF-8') + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {'project': 'sample1'} + + pager = client.aggregated_list(request=sample_request) + + assert isinstance(pager.get('a'), compute.NodeTemplatesScopedList) + assert pager.get('h') is None + + results = list(pager) + assert len(results) == 6 + assert all( + isinstance(i, tuple) + for i in results) + for result in results: + assert isinstance(result, tuple) + assert tuple(type(t) for t in result) == (str, compute.NodeTemplatesScopedList) + + assert pager.get('a') is None + assert isinstance(pager.get('h'), compute.NodeTemplatesScopedList) + + pages = list(client.aggregated_list(request=sample_request).pages) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize("request_type", [ + compute.DeleteNodeTemplateRequest, + dict, +]) +def test_delete_rest(request_type): + client = NodeTemplatesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2', 'node_template': 'sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.delete(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, extended_operation.ExtendedOperation) + assert response.client_operation_id == 'client_operation_id_value' + assert response.creation_timestamp == 'creation_timestamp_value' + assert response.description == 'description_value' + assert response.end_time == 'end_time_value' + assert response.http_error_message == 'http_error_message_value' + assert response.http_error_status_code == 2374 + assert response.id == 205 + assert response.insert_time == 'insert_time_value' + assert response.kind == 'kind_value' + assert response.name == 'name_value' + assert response.operation_group_id == 'operation_group_id_value' + assert response.operation_type == 'operation_type_value' + assert response.progress == 885 + assert response.region == 'region_value' + assert response.self_link == 'self_link_value' + assert response.start_time == 'start_time_value' + assert response.status == compute.Operation.Status.DONE + assert response.status_message == 'status_message_value' + assert response.target_id == 947 + assert response.target_link == 'target_link_value' + assert response.user == 'user_value' + assert response.zone == 'zone_value' + + +def test_delete_rest_required_fields(request_type=compute.DeleteNodeTemplateRequest): + transport_class = transports.NodeTemplatesRestTransport + + request_init = {} + request_init["node_template"] = "" + request_init["project"] = "" + request_init["region"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["nodeTemplate"] = 'node_template_value' + jsonified_request["project"] = 'project_value' + jsonified_request["region"] = 'region_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "nodeTemplate" in jsonified_request + assert jsonified_request["nodeTemplate"] == 'node_template_value' + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "region" in jsonified_request + assert jsonified_request["region"] == 'region_value' + + client = NodeTemplatesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "delete", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.delete(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_delete_rest_unset_required_fields(): + transport = transports.NodeTemplatesRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.delete._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("nodeTemplate", "project", "region", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_rest_interceptors(null_interceptor): + transport = transports.NodeTemplatesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.NodeTemplatesRestInterceptor(), + ) + client = NodeTemplatesClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.NodeTemplatesRestInterceptor, "post_delete") as post, \ + mock.patch.object(transports.NodeTemplatesRestInterceptor, "pre_delete") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.DeleteNodeTemplateRequest.pb(compute.DeleteNodeTemplateRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.DeleteNodeTemplateRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.delete(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_delete_rest_bad_request(transport: str = 'rest', request_type=compute.DeleteNodeTemplateRequest): + client = NodeTemplatesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2', 'node_template': 'sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.delete(request) + + +def test_delete_rest_flattened(): + client = NodeTemplatesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'region': 'sample2', 'node_template': 'sample3'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + region='region_value', + node_template='node_template_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.delete(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/regions/{region}/nodeTemplates/{node_template}" % client.transport._host, args[1]) + + +def test_delete_rest_flattened_error(transport: str = 'rest'): + client = NodeTemplatesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete( + compute.DeleteNodeTemplateRequest(), + project='project_value', + region='region_value', + node_template='node_template_value', + ) + + +def test_delete_rest_error(): + client = NodeTemplatesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.DeleteNodeTemplateRequest, + dict, +]) +def test_delete_unary_rest(request_type): + client = NodeTemplatesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2', 'node_template': 'sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.delete_unary(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.Operation) + + +def test_delete_unary_rest_required_fields(request_type=compute.DeleteNodeTemplateRequest): + transport_class = transports.NodeTemplatesRestTransport + + request_init = {} + request_init["node_template"] = "" + request_init["project"] = "" + request_init["region"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["nodeTemplate"] = 'node_template_value' + jsonified_request["project"] = 'project_value' + jsonified_request["region"] = 'region_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "nodeTemplate" in jsonified_request + assert jsonified_request["nodeTemplate"] == 'node_template_value' + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "region" in jsonified_request + assert jsonified_request["region"] == 'region_value' + + client = NodeTemplatesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "delete", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.delete_unary(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_delete_unary_rest_unset_required_fields(): + transport = transports.NodeTemplatesRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.delete._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("nodeTemplate", "project", "region", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_unary_rest_interceptors(null_interceptor): + transport = transports.NodeTemplatesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.NodeTemplatesRestInterceptor(), + ) + client = NodeTemplatesClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.NodeTemplatesRestInterceptor, "post_delete") as post, \ + mock.patch.object(transports.NodeTemplatesRestInterceptor, "pre_delete") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.DeleteNodeTemplateRequest.pb(compute.DeleteNodeTemplateRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.DeleteNodeTemplateRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.delete_unary(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_delete_unary_rest_bad_request(transport: str = 'rest', request_type=compute.DeleteNodeTemplateRequest): + client = NodeTemplatesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2', 'node_template': 'sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.delete_unary(request) + + +def test_delete_unary_rest_flattened(): + client = NodeTemplatesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'region': 'sample2', 'node_template': 'sample3'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + region='region_value', + node_template='node_template_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.delete_unary(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/regions/{region}/nodeTemplates/{node_template}" % client.transport._host, args[1]) + + +def test_delete_unary_rest_flattened_error(transport: str = 'rest'): + client = NodeTemplatesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_unary( + compute.DeleteNodeTemplateRequest(), + project='project_value', + region='region_value', + node_template='node_template_value', + ) + + +def test_delete_unary_rest_error(): + client = NodeTemplatesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.GetNodeTemplateRequest, + dict, +]) +def test_get_rest(request_type): + client = NodeTemplatesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2', 'node_template': 'sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.NodeTemplate( + cpu_overcommit_type='cpu_overcommit_type_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + id=205, + kind='kind_value', + name='name_value', + node_type='node_type_value', + region='region_value', + self_link='self_link_value', + status='status_value', + status_message='status_message_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.NodeTemplate.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.get(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.NodeTemplate) + assert response.cpu_overcommit_type == 'cpu_overcommit_type_value' + assert response.creation_timestamp == 'creation_timestamp_value' + assert response.description == 'description_value' + assert response.id == 205 + assert response.kind == 'kind_value' + assert response.name == 'name_value' + assert response.node_type == 'node_type_value' + assert response.region == 'region_value' + assert response.self_link == 'self_link_value' + assert response.status == 'status_value' + assert response.status_message == 'status_message_value' + + +def test_get_rest_required_fields(request_type=compute.GetNodeTemplateRequest): + transport_class = transports.NodeTemplatesRestTransport + + request_init = {} + request_init["node_template"] = "" + request_init["project"] = "" + request_init["region"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["nodeTemplate"] = 'node_template_value' + jsonified_request["project"] = 'project_value' + jsonified_request["region"] = 'region_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "nodeTemplate" in jsonified_request + assert jsonified_request["nodeTemplate"] == 'node_template_value' + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "region" in jsonified_request + assert jsonified_request["region"] == 'region_value' + + client = NodeTemplatesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.NodeTemplate() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "get", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.NodeTemplate.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.get(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_get_rest_unset_required_fields(): + transport = transports.NodeTemplatesRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.get._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("nodeTemplate", "project", "region", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_rest_interceptors(null_interceptor): + transport = transports.NodeTemplatesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.NodeTemplatesRestInterceptor(), + ) + client = NodeTemplatesClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.NodeTemplatesRestInterceptor, "post_get") as post, \ + mock.patch.object(transports.NodeTemplatesRestInterceptor, "pre_get") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.GetNodeTemplateRequest.pb(compute.GetNodeTemplateRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.NodeTemplate.to_json(compute.NodeTemplate()) + + request = compute.GetNodeTemplateRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.NodeTemplate() + + client.get(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_get_rest_bad_request(transport: str = 'rest', request_type=compute.GetNodeTemplateRequest): + client = NodeTemplatesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2', 'node_template': 'sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get(request) + + +def test_get_rest_flattened(): + client = NodeTemplatesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.NodeTemplate() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'region': 'sample2', 'node_template': 'sample3'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + region='region_value', + node_template='node_template_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.NodeTemplate.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.get(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/regions/{region}/nodeTemplates/{node_template}" % client.transport._host, args[1]) + + +def test_get_rest_flattened_error(transport: str = 'rest'): + client = NodeTemplatesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get( + compute.GetNodeTemplateRequest(), + project='project_value', + region='region_value', + node_template='node_template_value', + ) + + +def test_get_rest_error(): + client = NodeTemplatesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.GetIamPolicyNodeTemplateRequest, + dict, +]) +def test_get_iam_policy_rest(request_type): + client = NodeTemplatesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2', 'resource': 'sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Policy( + etag='etag_value', + iam_owned=True, + version=774, + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Policy.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.get_iam_policy(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.Policy) + assert response.etag == 'etag_value' + assert response.iam_owned is True + assert response.version == 774 + + +def test_get_iam_policy_rest_required_fields(request_type=compute.GetIamPolicyNodeTemplateRequest): + transport_class = transports.NodeTemplatesRestTransport + + request_init = {} + request_init["project"] = "" + request_init["region"] = "" + request_init["resource"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_iam_policy._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + jsonified_request["region"] = 'region_value' + jsonified_request["resource"] = 'resource_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_iam_policy._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("options_requested_policy_version", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "region" in jsonified_request + assert jsonified_request["region"] == 'region_value' + assert "resource" in jsonified_request + assert jsonified_request["resource"] == 'resource_value' + + client = NodeTemplatesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Policy() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "get", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Policy.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.get_iam_policy(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_get_iam_policy_rest_unset_required_fields(): + transport = transports.NodeTemplatesRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.get_iam_policy._get_unset_required_fields({}) + assert set(unset_fields) == (set(("optionsRequestedPolicyVersion", )) & set(("project", "region", "resource", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_iam_policy_rest_interceptors(null_interceptor): + transport = transports.NodeTemplatesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.NodeTemplatesRestInterceptor(), + ) + client = NodeTemplatesClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.NodeTemplatesRestInterceptor, "post_get_iam_policy") as post, \ + mock.patch.object(transports.NodeTemplatesRestInterceptor, "pre_get_iam_policy") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.GetIamPolicyNodeTemplateRequest.pb(compute.GetIamPolicyNodeTemplateRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Policy.to_json(compute.Policy()) + + request = compute.GetIamPolicyNodeTemplateRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Policy() + + client.get_iam_policy(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_get_iam_policy_rest_bad_request(transport: str = 'rest', request_type=compute.GetIamPolicyNodeTemplateRequest): + client = NodeTemplatesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2', 'resource': 'sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_iam_policy(request) + + +def test_get_iam_policy_rest_flattened(): + client = NodeTemplatesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Policy() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'region': 'sample2', 'resource': 'sample3'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + region='region_value', + resource='resource_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Policy.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.get_iam_policy(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/regions/{region}/nodeTemplates/{resource}/getIamPolicy" % client.transport._host, args[1]) + + +def test_get_iam_policy_rest_flattened_error(transport: str = 'rest'): + client = NodeTemplatesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_iam_policy( + compute.GetIamPolicyNodeTemplateRequest(), + project='project_value', + region='region_value', + resource='resource_value', + ) + + +def test_get_iam_policy_rest_error(): + client = NodeTemplatesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.InsertNodeTemplateRequest, + dict, +]) +def test_insert_rest(request_type): + client = NodeTemplatesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2'} + request_init["node_template_resource"] = {'accelerators': [{'accelerator_count': 1805, 'accelerator_type': 'accelerator_type_value'}], 'cpu_overcommit_type': 'cpu_overcommit_type_value', 'creation_timestamp': 'creation_timestamp_value', 'description': 'description_value', 'disks': [{'disk_count': 1075, 'disk_size_gb': 1261, 'disk_type': 'disk_type_value'}], 'id': 205, 'kind': 'kind_value', 'name': 'name_value', 'node_affinity_labels': {}, 'node_type': 'node_type_value', 'node_type_flexibility': {'cpus': 'cpus_value', 'local_ssd': 'local_ssd_value', 'memory': 'memory_value'}, 'region': 'region_value', 'self_link': 'self_link_value', 'server_binding': {'type_': 'type__value'}, 'status': 'status_value', 'status_message': 'status_message_value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.insert(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, extended_operation.ExtendedOperation) + assert response.client_operation_id == 'client_operation_id_value' + assert response.creation_timestamp == 'creation_timestamp_value' + assert response.description == 'description_value' + assert response.end_time == 'end_time_value' + assert response.http_error_message == 'http_error_message_value' + assert response.http_error_status_code == 2374 + assert response.id == 205 + assert response.insert_time == 'insert_time_value' + assert response.kind == 'kind_value' + assert response.name == 'name_value' + assert response.operation_group_id == 'operation_group_id_value' + assert response.operation_type == 'operation_type_value' + assert response.progress == 885 + assert response.region == 'region_value' + assert response.self_link == 'self_link_value' + assert response.start_time == 'start_time_value' + assert response.status == compute.Operation.Status.DONE + assert response.status_message == 'status_message_value' + assert response.target_id == 947 + assert response.target_link == 'target_link_value' + assert response.user == 'user_value' + assert response.zone == 'zone_value' + + +def test_insert_rest_required_fields(request_type=compute.InsertNodeTemplateRequest): + transport_class = transports.NodeTemplatesRestTransport + + request_init = {} + request_init["project"] = "" + request_init["region"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).insert._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + jsonified_request["region"] = 'region_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).insert._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "region" in jsonified_request + assert jsonified_request["region"] == 'region_value' + + client = NodeTemplatesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.insert(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_insert_rest_unset_required_fields(): + transport = transports.NodeTemplatesRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.insert._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("nodeTemplateResource", "project", "region", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_insert_rest_interceptors(null_interceptor): + transport = transports.NodeTemplatesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.NodeTemplatesRestInterceptor(), + ) + client = NodeTemplatesClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.NodeTemplatesRestInterceptor, "post_insert") as post, \ + mock.patch.object(transports.NodeTemplatesRestInterceptor, "pre_insert") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.InsertNodeTemplateRequest.pb(compute.InsertNodeTemplateRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.InsertNodeTemplateRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.insert(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_insert_rest_bad_request(transport: str = 'rest', request_type=compute.InsertNodeTemplateRequest): + client = NodeTemplatesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2'} + request_init["node_template_resource"] = {'accelerators': [{'accelerator_count': 1805, 'accelerator_type': 'accelerator_type_value'}], 'cpu_overcommit_type': 'cpu_overcommit_type_value', 'creation_timestamp': 'creation_timestamp_value', 'description': 'description_value', 'disks': [{'disk_count': 1075, 'disk_size_gb': 1261, 'disk_type': 'disk_type_value'}], 'id': 205, 'kind': 'kind_value', 'name': 'name_value', 'node_affinity_labels': {}, 'node_type': 'node_type_value', 'node_type_flexibility': {'cpus': 'cpus_value', 'local_ssd': 'local_ssd_value', 'memory': 'memory_value'}, 'region': 'region_value', 'self_link': 'self_link_value', 'server_binding': {'type_': 'type__value'}, 'status': 'status_value', 'status_message': 'status_message_value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.insert(request) + + +def test_insert_rest_flattened(): + client = NodeTemplatesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'region': 'sample2'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + region='region_value', + node_template_resource=compute.NodeTemplate(accelerators=[compute.AcceleratorConfig(accelerator_count=1805)]), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.insert(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/regions/{region}/nodeTemplates" % client.transport._host, args[1]) + + +def test_insert_rest_flattened_error(transport: str = 'rest'): + client = NodeTemplatesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.insert( + compute.InsertNodeTemplateRequest(), + project='project_value', + region='region_value', + node_template_resource=compute.NodeTemplate(accelerators=[compute.AcceleratorConfig(accelerator_count=1805)]), + ) + + +def test_insert_rest_error(): + client = NodeTemplatesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.InsertNodeTemplateRequest, + dict, +]) +def test_insert_unary_rest(request_type): + client = NodeTemplatesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2'} + request_init["node_template_resource"] = {'accelerators': [{'accelerator_count': 1805, 'accelerator_type': 'accelerator_type_value'}], 'cpu_overcommit_type': 'cpu_overcommit_type_value', 'creation_timestamp': 'creation_timestamp_value', 'description': 'description_value', 'disks': [{'disk_count': 1075, 'disk_size_gb': 1261, 'disk_type': 'disk_type_value'}], 'id': 205, 'kind': 'kind_value', 'name': 'name_value', 'node_affinity_labels': {}, 'node_type': 'node_type_value', 'node_type_flexibility': {'cpus': 'cpus_value', 'local_ssd': 'local_ssd_value', 'memory': 'memory_value'}, 'region': 'region_value', 'self_link': 'self_link_value', 'server_binding': {'type_': 'type__value'}, 'status': 'status_value', 'status_message': 'status_message_value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.insert_unary(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.Operation) + + +def test_insert_unary_rest_required_fields(request_type=compute.InsertNodeTemplateRequest): + transport_class = transports.NodeTemplatesRestTransport + + request_init = {} + request_init["project"] = "" + request_init["region"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).insert._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + jsonified_request["region"] = 'region_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).insert._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "region" in jsonified_request + assert jsonified_request["region"] == 'region_value' + + client = NodeTemplatesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.insert_unary(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_insert_unary_rest_unset_required_fields(): + transport = transports.NodeTemplatesRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.insert._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("nodeTemplateResource", "project", "region", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_insert_unary_rest_interceptors(null_interceptor): + transport = transports.NodeTemplatesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.NodeTemplatesRestInterceptor(), + ) + client = NodeTemplatesClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.NodeTemplatesRestInterceptor, "post_insert") as post, \ + mock.patch.object(transports.NodeTemplatesRestInterceptor, "pre_insert") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.InsertNodeTemplateRequest.pb(compute.InsertNodeTemplateRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.InsertNodeTemplateRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.insert_unary(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_insert_unary_rest_bad_request(transport: str = 'rest', request_type=compute.InsertNodeTemplateRequest): + client = NodeTemplatesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2'} + request_init["node_template_resource"] = {'accelerators': [{'accelerator_count': 1805, 'accelerator_type': 'accelerator_type_value'}], 'cpu_overcommit_type': 'cpu_overcommit_type_value', 'creation_timestamp': 'creation_timestamp_value', 'description': 'description_value', 'disks': [{'disk_count': 1075, 'disk_size_gb': 1261, 'disk_type': 'disk_type_value'}], 'id': 205, 'kind': 'kind_value', 'name': 'name_value', 'node_affinity_labels': {}, 'node_type': 'node_type_value', 'node_type_flexibility': {'cpus': 'cpus_value', 'local_ssd': 'local_ssd_value', 'memory': 'memory_value'}, 'region': 'region_value', 'self_link': 'self_link_value', 'server_binding': {'type_': 'type__value'}, 'status': 'status_value', 'status_message': 'status_message_value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.insert_unary(request) + + +def test_insert_unary_rest_flattened(): + client = NodeTemplatesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'region': 'sample2'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + region='region_value', + node_template_resource=compute.NodeTemplate(accelerators=[compute.AcceleratorConfig(accelerator_count=1805)]), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.insert_unary(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/regions/{region}/nodeTemplates" % client.transport._host, args[1]) + + +def test_insert_unary_rest_flattened_error(transport: str = 'rest'): + client = NodeTemplatesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.insert_unary( + compute.InsertNodeTemplateRequest(), + project='project_value', + region='region_value', + node_template_resource=compute.NodeTemplate(accelerators=[compute.AcceleratorConfig(accelerator_count=1805)]), + ) + + +def test_insert_unary_rest_error(): + client = NodeTemplatesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.ListNodeTemplatesRequest, + dict, +]) +def test_list_rest(request_type): + client = NodeTemplatesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.NodeTemplateList( + id='id_value', + kind='kind_value', + next_page_token='next_page_token_value', + self_link='self_link_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.NodeTemplateList.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.list(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListPager) + assert response.id == 'id_value' + assert response.kind == 'kind_value' + assert response.next_page_token == 'next_page_token_value' + assert response.self_link == 'self_link_value' + + +def test_list_rest_required_fields(request_type=compute.ListNodeTemplatesRequest): + transport_class = transports.NodeTemplatesRestTransport + + request_init = {} + request_init["project"] = "" + request_init["region"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + jsonified_request["region"] = 'region_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("filter", "max_results", "order_by", "page_token", "return_partial_success", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "region" in jsonified_request + assert jsonified_request["region"] == 'region_value' + + client = NodeTemplatesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.NodeTemplateList() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "get", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.NodeTemplateList.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.list(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_list_rest_unset_required_fields(): + transport = transports.NodeTemplatesRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.list._get_unset_required_fields({}) + assert set(unset_fields) == (set(("filter", "maxResults", "orderBy", "pageToken", "returnPartialSuccess", )) & set(("project", "region", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_rest_interceptors(null_interceptor): + transport = transports.NodeTemplatesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.NodeTemplatesRestInterceptor(), + ) + client = NodeTemplatesClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.NodeTemplatesRestInterceptor, "post_list") as post, \ + mock.patch.object(transports.NodeTemplatesRestInterceptor, "pre_list") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.ListNodeTemplatesRequest.pb(compute.ListNodeTemplatesRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.NodeTemplateList.to_json(compute.NodeTemplateList()) + + request = compute.ListNodeTemplatesRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.NodeTemplateList() + + client.list(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_list_rest_bad_request(transport: str = 'rest', request_type=compute.ListNodeTemplatesRequest): + client = NodeTemplatesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list(request) + + +def test_list_rest_flattened(): + client = NodeTemplatesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.NodeTemplateList() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'region': 'sample2'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + region='region_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.NodeTemplateList.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.list(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/regions/{region}/nodeTemplates" % client.transport._host, args[1]) + + +def test_list_rest_flattened_error(transport: str = 'rest'): + client = NodeTemplatesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list( + compute.ListNodeTemplatesRequest(), + project='project_value', + region='region_value', + ) + + +def test_list_rest_pager(transport: str = 'rest'): + client = NodeTemplatesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + #with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + compute.NodeTemplateList( + items=[ + compute.NodeTemplate(), + compute.NodeTemplate(), + compute.NodeTemplate(), + ], + next_page_token='abc', + ), + compute.NodeTemplateList( + items=[], + next_page_token='def', + ), + compute.NodeTemplateList( + items=[ + compute.NodeTemplate(), + ], + next_page_token='ghi', + ), + compute.NodeTemplateList( + items=[ + compute.NodeTemplate(), + compute.NodeTemplate(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple(compute.NodeTemplateList.to_json(x) for x in response) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode('UTF-8') + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {'project': 'sample1', 'region': 'sample2'} + + pager = client.list(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, compute.NodeTemplate) + for i in results) + + pages = list(client.list(request=sample_request).pages) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize("request_type", [ + compute.SetIamPolicyNodeTemplateRequest, + dict, +]) +def test_set_iam_policy_rest(request_type): + client = NodeTemplatesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2', 'resource': 'sample3'} + request_init["region_set_policy_request_resource"] = {'bindings': [{'binding_id': 'binding_id_value', 'condition': {'description': 'description_value', 'expression': 'expression_value', 'location': 'location_value', 'title': 'title_value'}, 'members': ['members_value1', 'members_value2'], 'role': 'role_value'}], 'etag': 'etag_value', 'policy': {'audit_configs': [{'audit_log_configs': [{'exempted_members': ['exempted_members_value1', 'exempted_members_value2'], 'ignore_child_exemptions': True, 'log_type': 'log_type_value'}], 'exempted_members': ['exempted_members_value1', 'exempted_members_value2'], 'service': 'service_value'}], 'bindings': {}, 'etag': 'etag_value', 'iam_owned': True, 'rules': [{'action': 'action_value', 'conditions': [{'iam': 'iam_value', 'op': 'op_value', 'svc': 'svc_value', 'sys': 'sys_value', 'values': ['values_value1', 'values_value2']}], 'description': 'description_value', 'ins': ['ins_value1', 'ins_value2'], 'log_configs': [{'cloud_audit': {'authorization_logging_options': {'permission_type': 'permission_type_value'}, 'log_name': 'log_name_value'}, 'counter': {'custom_fields': [{'name': 'name_value', 'value': 'value_value'}], 'field': 'field_value', 'metric': 'metric_value'}, 'data_access': {'log_mode': 'log_mode_value'}}], 'not_ins': ['not_ins_value1', 'not_ins_value2'], 'permissions': ['permissions_value1', 'permissions_value2']}], 'version': 774}} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Policy( + etag='etag_value', + iam_owned=True, + version=774, + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Policy.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.set_iam_policy(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.Policy) + assert response.etag == 'etag_value' + assert response.iam_owned is True + assert response.version == 774 + + +def test_set_iam_policy_rest_required_fields(request_type=compute.SetIamPolicyNodeTemplateRequest): + transport_class = transports.NodeTemplatesRestTransport + + request_init = {} + request_init["project"] = "" + request_init["region"] = "" + request_init["resource"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).set_iam_policy._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + jsonified_request["region"] = 'region_value' + jsonified_request["resource"] = 'resource_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).set_iam_policy._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "region" in jsonified_request + assert jsonified_request["region"] == 'region_value' + assert "resource" in jsonified_request + assert jsonified_request["resource"] == 'resource_value' + + client = NodeTemplatesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Policy() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Policy.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.set_iam_policy(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_set_iam_policy_rest_unset_required_fields(): + transport = transports.NodeTemplatesRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.set_iam_policy._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("project", "region", "regionSetPolicyRequestResource", "resource", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_set_iam_policy_rest_interceptors(null_interceptor): + transport = transports.NodeTemplatesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.NodeTemplatesRestInterceptor(), + ) + client = NodeTemplatesClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.NodeTemplatesRestInterceptor, "post_set_iam_policy") as post, \ + mock.patch.object(transports.NodeTemplatesRestInterceptor, "pre_set_iam_policy") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.SetIamPolicyNodeTemplateRequest.pb(compute.SetIamPolicyNodeTemplateRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Policy.to_json(compute.Policy()) + + request = compute.SetIamPolicyNodeTemplateRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Policy() + + client.set_iam_policy(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_set_iam_policy_rest_bad_request(transport: str = 'rest', request_type=compute.SetIamPolicyNodeTemplateRequest): + client = NodeTemplatesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2', 'resource': 'sample3'} + request_init["region_set_policy_request_resource"] = {'bindings': [{'binding_id': 'binding_id_value', 'condition': {'description': 'description_value', 'expression': 'expression_value', 'location': 'location_value', 'title': 'title_value'}, 'members': ['members_value1', 'members_value2'], 'role': 'role_value'}], 'etag': 'etag_value', 'policy': {'audit_configs': [{'audit_log_configs': [{'exempted_members': ['exempted_members_value1', 'exempted_members_value2'], 'ignore_child_exemptions': True, 'log_type': 'log_type_value'}], 'exempted_members': ['exempted_members_value1', 'exempted_members_value2'], 'service': 'service_value'}], 'bindings': {}, 'etag': 'etag_value', 'iam_owned': True, 'rules': [{'action': 'action_value', 'conditions': [{'iam': 'iam_value', 'op': 'op_value', 'svc': 'svc_value', 'sys': 'sys_value', 'values': ['values_value1', 'values_value2']}], 'description': 'description_value', 'ins': ['ins_value1', 'ins_value2'], 'log_configs': [{'cloud_audit': {'authorization_logging_options': {'permission_type': 'permission_type_value'}, 'log_name': 'log_name_value'}, 'counter': {'custom_fields': [{'name': 'name_value', 'value': 'value_value'}], 'field': 'field_value', 'metric': 'metric_value'}, 'data_access': {'log_mode': 'log_mode_value'}}], 'not_ins': ['not_ins_value1', 'not_ins_value2'], 'permissions': ['permissions_value1', 'permissions_value2']}], 'version': 774}} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.set_iam_policy(request) + + +def test_set_iam_policy_rest_flattened(): + client = NodeTemplatesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Policy() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'region': 'sample2', 'resource': 'sample3'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + region='region_value', + resource='resource_value', + region_set_policy_request_resource=compute.RegionSetPolicyRequest(bindings=[compute.Binding(binding_id='binding_id_value')]), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Policy.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.set_iam_policy(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/regions/{region}/nodeTemplates/{resource}/setIamPolicy" % client.transport._host, args[1]) + + +def test_set_iam_policy_rest_flattened_error(transport: str = 'rest'): + client = NodeTemplatesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.set_iam_policy( + compute.SetIamPolicyNodeTemplateRequest(), + project='project_value', + region='region_value', + resource='resource_value', + region_set_policy_request_resource=compute.RegionSetPolicyRequest(bindings=[compute.Binding(binding_id='binding_id_value')]), + ) + + +def test_set_iam_policy_rest_error(): + client = NodeTemplatesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.TestIamPermissionsNodeTemplateRequest, + dict, +]) +def test_test_iam_permissions_rest(request_type): + client = NodeTemplatesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2', 'resource': 'sample3'} + request_init["test_permissions_request_resource"] = {'permissions': ['permissions_value1', 'permissions_value2']} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.TestPermissionsResponse( + permissions=['permissions_value'], + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.TestPermissionsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.test_iam_permissions(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.TestPermissionsResponse) + assert response.permissions == ['permissions_value'] + + +def test_test_iam_permissions_rest_required_fields(request_type=compute.TestIamPermissionsNodeTemplateRequest): + transport_class = transports.NodeTemplatesRestTransport + + request_init = {} + request_init["project"] = "" + request_init["region"] = "" + request_init["resource"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).test_iam_permissions._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + jsonified_request["region"] = 'region_value' + jsonified_request["resource"] = 'resource_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).test_iam_permissions._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "region" in jsonified_request + assert jsonified_request["region"] == 'region_value' + assert "resource" in jsonified_request + assert jsonified_request["resource"] == 'resource_value' + + client = NodeTemplatesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.TestPermissionsResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.TestPermissionsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.test_iam_permissions(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_test_iam_permissions_rest_unset_required_fields(): + transport = transports.NodeTemplatesRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.test_iam_permissions._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("project", "region", "resource", "testPermissionsRequestResource", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_test_iam_permissions_rest_interceptors(null_interceptor): + transport = transports.NodeTemplatesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.NodeTemplatesRestInterceptor(), + ) + client = NodeTemplatesClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.NodeTemplatesRestInterceptor, "post_test_iam_permissions") as post, \ + mock.patch.object(transports.NodeTemplatesRestInterceptor, "pre_test_iam_permissions") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.TestIamPermissionsNodeTemplateRequest.pb(compute.TestIamPermissionsNodeTemplateRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.TestPermissionsResponse.to_json(compute.TestPermissionsResponse()) + + request = compute.TestIamPermissionsNodeTemplateRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.TestPermissionsResponse() + + client.test_iam_permissions(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_test_iam_permissions_rest_bad_request(transport: str = 'rest', request_type=compute.TestIamPermissionsNodeTemplateRequest): + client = NodeTemplatesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2', 'resource': 'sample3'} + request_init["test_permissions_request_resource"] = {'permissions': ['permissions_value1', 'permissions_value2']} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.test_iam_permissions(request) + + +def test_test_iam_permissions_rest_flattened(): + client = NodeTemplatesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.TestPermissionsResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'region': 'sample2', 'resource': 'sample3'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + region='region_value', + resource='resource_value', + test_permissions_request_resource=compute.TestPermissionsRequest(permissions=['permissions_value']), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.TestPermissionsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.test_iam_permissions(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/regions/{region}/nodeTemplates/{resource}/testIamPermissions" % client.transport._host, args[1]) + + +def test_test_iam_permissions_rest_flattened_error(transport: str = 'rest'): + client = NodeTemplatesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.test_iam_permissions( + compute.TestIamPermissionsNodeTemplateRequest(), + project='project_value', + region='region_value', + resource='resource_value', + test_permissions_request_resource=compute.TestPermissionsRequest(permissions=['permissions_value']), + ) + + +def test_test_iam_permissions_rest_error(): + client = NodeTemplatesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +def test_credentials_transport_error(): + # It is an error to provide credentials and a transport instance. + transport = transports.NodeTemplatesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = NodeTemplatesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # It is an error to provide a credentials file and a transport instance. + transport = transports.NodeTemplatesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = NodeTemplatesClient( + client_options={"credentials_file": "credentials.json"}, + transport=transport, + ) + + # It is an error to provide an api_key and a transport instance. + transport = transports.NodeTemplatesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = NodeTemplatesClient( + client_options=options, + transport=transport, + ) + + # It is an error to provide an api_key and a credential. + options = mock.Mock() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = NodeTemplatesClient( + client_options=options, + credentials=ga_credentials.AnonymousCredentials() + ) + + # It is an error to provide scopes and a transport instance. + transport = transports.NodeTemplatesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = NodeTemplatesClient( + client_options={"scopes": ["1", "2"]}, + transport=transport, + ) + + +def test_transport_instance(): + # A client may be instantiated with a custom transport instance. + transport = transports.NodeTemplatesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + client = NodeTemplatesClient(transport=transport) + assert client.transport is transport + + +@pytest.mark.parametrize("transport_class", [ + transports.NodeTemplatesRestTransport, +]) +def test_transport_adc(transport_class): + # Test default credentials are used if not provided. + with mock.patch.object(google.auth, 'default') as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class() + adc.assert_called_once() + +@pytest.mark.parametrize("transport_name", [ + "rest", +]) +def test_transport_kind(transport_name): + transport = NodeTemplatesClient.get_transport_class(transport_name)( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert transport.kind == transport_name + + +def test_node_templates_base_transport_error(): + # Passing both a credentials object and credentials_file should raise an error + with pytest.raises(core_exceptions.DuplicateCredentialArgs): + transport = transports.NodeTemplatesTransport( + credentials=ga_credentials.AnonymousCredentials(), + credentials_file="credentials.json" + ) + + +def test_node_templates_base_transport(): + # Instantiate the base transport. + with mock.patch('google.cloud.compute_v1.services.node_templates.transports.NodeTemplatesTransport.__init__') as Transport: + Transport.return_value = None + transport = transports.NodeTemplatesTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Every method on the transport should just blindly + # raise NotImplementedError. + methods = ( + 'aggregated_list', + 'delete', + 'get', + 'get_iam_policy', + 'insert', + 'list', + 'set_iam_policy', + 'test_iam_permissions', + ) + for method in methods: + with pytest.raises(NotImplementedError): + getattr(transport, method)(request=object()) + + with pytest.raises(NotImplementedError): + transport.close() + + # Catch all for all remaining methods and properties + remainder = [ + 'kind', + ] + for r in remainder: + with pytest.raises(NotImplementedError): + getattr(transport, r)() + + +def test_node_templates_base_transport_with_credentials_file(): + # Instantiate the base transport with a credentials file + with mock.patch.object(google.auth, 'load_credentials_from_file', autospec=True) as load_creds, mock.patch('google.cloud.compute_v1.services.node_templates.transports.NodeTemplatesTransport._prep_wrapped_messages') as Transport: + Transport.return_value = None + load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.NodeTemplatesTransport( + credentials_file="credentials.json", + quota_project_id="octopus", + ) + load_creds.assert_called_once_with("credentials.json", + scopes=None, + default_scopes=( + 'https://www.googleapis.com/auth/compute', + 'https://www.googleapis.com/auth/cloud-platform', +), + quota_project_id="octopus", + ) + + +def test_node_templates_base_transport_with_adc(): + # Test the default credentials are used if credentials and credentials_file are None. + with mock.patch.object(google.auth, 'default', autospec=True) as adc, mock.patch('google.cloud.compute_v1.services.node_templates.transports.NodeTemplatesTransport._prep_wrapped_messages') as Transport: + Transport.return_value = None + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.NodeTemplatesTransport() + adc.assert_called_once() + + +def test_node_templates_auth_adc(): + # If no credentials are provided, we should use ADC credentials. + with mock.patch.object(google.auth, 'default', autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + NodeTemplatesClient() + adc.assert_called_once_with( + scopes=None, + default_scopes=( + 'https://www.googleapis.com/auth/compute', + 'https://www.googleapis.com/auth/cloud-platform', +), + quota_project_id=None, + ) + + +def test_node_templates_http_transport_client_cert_source_for_mtls(): + cred = ga_credentials.AnonymousCredentials() + with mock.patch("google.auth.transport.requests.AuthorizedSession.configure_mtls_channel") as mock_configure_mtls_channel: + transports.NodeTemplatesRestTransport ( + credentials=cred, + client_cert_source_for_mtls=client_cert_source_callback + ) + mock_configure_mtls_channel.assert_called_once_with(client_cert_source_callback) + + +@pytest.mark.parametrize("transport_name", [ + "rest", +]) +def test_node_templates_host_no_port(transport_name): + client = NodeTemplatesClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions(api_endpoint='compute.googleapis.com'), + transport=transport_name, + ) + assert client.transport._host == ( + 'compute.googleapis.com:443' + if transport_name in ['grpc', 'grpc_asyncio'] + else 'https://compute.googleapis.com' + ) + +@pytest.mark.parametrize("transport_name", [ + "rest", +]) +def test_node_templates_host_with_port(transport_name): + client = NodeTemplatesClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions(api_endpoint='compute.googleapis.com:8000'), + transport=transport_name, + ) + assert client.transport._host == ( + 'compute.googleapis.com:8000' + if transport_name in ['grpc', 'grpc_asyncio'] + else 'https://compute.googleapis.com:8000' + ) + +@pytest.mark.parametrize("transport_name", [ + "rest", +]) +def test_node_templates_client_transport_session_collision(transport_name): + creds1 = ga_credentials.AnonymousCredentials() + creds2 = ga_credentials.AnonymousCredentials() + client1 = NodeTemplatesClient( + credentials=creds1, + transport=transport_name, + ) + client2 = NodeTemplatesClient( + credentials=creds2, + transport=transport_name, + ) + session1 = client1.transport.aggregated_list._session + session2 = client2.transport.aggregated_list._session + assert session1 != session2 + session1 = client1.transport.delete._session + session2 = client2.transport.delete._session + assert session1 != session2 + session1 = client1.transport.get._session + session2 = client2.transport.get._session + assert session1 != session2 + session1 = client1.transport.get_iam_policy._session + session2 = client2.transport.get_iam_policy._session + assert session1 != session2 + session1 = client1.transport.insert._session + session2 = client2.transport.insert._session + assert session1 != session2 + session1 = client1.transport.list._session + session2 = client2.transport.list._session + assert session1 != session2 + session1 = client1.transport.set_iam_policy._session + session2 = client2.transport.set_iam_policy._session + assert session1 != session2 + session1 = client1.transport.test_iam_permissions._session + session2 = client2.transport.test_iam_permissions._session + assert session1 != session2 + +def test_common_billing_account_path(): + billing_account = "squid" + expected = "billingAccounts/{billing_account}".format(billing_account=billing_account, ) + actual = NodeTemplatesClient.common_billing_account_path(billing_account) + assert expected == actual + + +def test_parse_common_billing_account_path(): + expected = { + "billing_account": "clam", + } + path = NodeTemplatesClient.common_billing_account_path(**expected) + + # Check that the path construction is reversible. + actual = NodeTemplatesClient.parse_common_billing_account_path(path) + assert expected == actual + +def test_common_folder_path(): + folder = "whelk" + expected = "folders/{folder}".format(folder=folder, ) + actual = NodeTemplatesClient.common_folder_path(folder) + assert expected == actual + + +def test_parse_common_folder_path(): + expected = { + "folder": "octopus", + } + path = NodeTemplatesClient.common_folder_path(**expected) + + # Check that the path construction is reversible. + actual = NodeTemplatesClient.parse_common_folder_path(path) + assert expected == actual + +def test_common_organization_path(): + organization = "oyster" + expected = "organizations/{organization}".format(organization=organization, ) + actual = NodeTemplatesClient.common_organization_path(organization) + assert expected == actual + + +def test_parse_common_organization_path(): + expected = { + "organization": "nudibranch", + } + path = NodeTemplatesClient.common_organization_path(**expected) + + # Check that the path construction is reversible. + actual = NodeTemplatesClient.parse_common_organization_path(path) + assert expected == actual + +def test_common_project_path(): + project = "cuttlefish" + expected = "projects/{project}".format(project=project, ) + actual = NodeTemplatesClient.common_project_path(project) + assert expected == actual + + +def test_parse_common_project_path(): + expected = { + "project": "mussel", + } + path = NodeTemplatesClient.common_project_path(**expected) + + # Check that the path construction is reversible. + actual = NodeTemplatesClient.parse_common_project_path(path) + assert expected == actual + +def test_common_location_path(): + project = "winkle" + location = "nautilus" + expected = "projects/{project}/locations/{location}".format(project=project, location=location, ) + actual = NodeTemplatesClient.common_location_path(project, location) + assert expected == actual + + +def test_parse_common_location_path(): + expected = { + "project": "scallop", + "location": "abalone", + } + path = NodeTemplatesClient.common_location_path(**expected) + + # Check that the path construction is reversible. + actual = NodeTemplatesClient.parse_common_location_path(path) + assert expected == actual + + +def test_client_with_default_client_info(): + client_info = gapic_v1.client_info.ClientInfo() + + with mock.patch.object(transports.NodeTemplatesTransport, '_prep_wrapped_messages') as prep: + client = NodeTemplatesClient( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + with mock.patch.object(transports.NodeTemplatesTransport, '_prep_wrapped_messages') as prep: + transport_class = NodeTemplatesClient.get_transport_class() + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + +def test_transport_close(): + transports = { + "rest": "_session", + } + + for transport, close_name in transports.items(): + client = NodeTemplatesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport + ) + with mock.patch.object(type(getattr(client.transport, close_name)), "close") as close: + with client: + close.assert_not_called() + close.assert_called_once() + +def test_client_ctx(): + transports = [ + 'rest', + ] + for transport in transports: + client = NodeTemplatesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport + ) + # Test client calls underlying transport. + with mock.patch.object(type(client.transport), "close") as close: + close.assert_not_called() + with client: + pass + close.assert_called() + +@pytest.mark.parametrize("client_class,transport_class", [ + (NodeTemplatesClient, transports.NodeTemplatesRestTransport), +]) +def test_api_key_credentials(client_class, transport_class): + with mock.patch.object( + google.auth._default, "get_api_key_credentials", create=True + ) as get_api_key_credentials: + mock_cred = mock.Mock() + get_api_key_credentials.return_value = mock_cred + options = client_options.ClientOptions() + options.api_key = "api_key" + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=mock_cred, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) diff --git a/owl-bot-staging/v1/tests/unit/gapic/compute_v1/test_node_types.py b/owl-bot-staging/v1/tests/unit/gapic/compute_v1/test_node_types.py new file mode 100644 index 000000000..178ebea98 --- /dev/null +++ b/owl-bot-staging/v1/tests/unit/gapic/compute_v1/test_node_types.py @@ -0,0 +1,1712 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import os +# try/except added for compatibility with python < 3.8 +try: + from unittest import mock + from unittest.mock import AsyncMock # pragma: NO COVER +except ImportError: # pragma: NO COVER + import mock + +import grpc +from grpc.experimental import aio +from collections.abc import Iterable +from google.protobuf import json_format +import json +import math +import pytest +from proto.marshal.rules.dates import DurationRule, TimestampRule +from proto.marshal.rules import wrappers +from requests import Response +from requests import Request, PreparedRequest +from requests.sessions import Session +from google.protobuf import json_format + +from google.api_core import client_options +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import grpc_helpers +from google.api_core import grpc_helpers_async +from google.api_core import path_template +from google.auth import credentials as ga_credentials +from google.auth.exceptions import MutualTLSChannelError +from google.cloud.compute_v1.services.node_types import NodeTypesClient +from google.cloud.compute_v1.services.node_types import pagers +from google.cloud.compute_v1.services.node_types import transports +from google.cloud.compute_v1.types import compute +from google.oauth2 import service_account +import google.auth + + +def client_cert_source_callback(): + return b"cert bytes", b"key bytes" + + +# If default endpoint is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint(client): + return "foo.googleapis.com" if ("localhost" in client.DEFAULT_ENDPOINT) else client.DEFAULT_ENDPOINT + + +def test__get_default_mtls_endpoint(): + api_endpoint = "example.googleapis.com" + api_mtls_endpoint = "example.mtls.googleapis.com" + sandbox_endpoint = "example.sandbox.googleapis.com" + sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com" + non_googleapi = "api.example.com" + + assert NodeTypesClient._get_default_mtls_endpoint(None) is None + assert NodeTypesClient._get_default_mtls_endpoint(api_endpoint) == api_mtls_endpoint + assert NodeTypesClient._get_default_mtls_endpoint(api_mtls_endpoint) == api_mtls_endpoint + assert NodeTypesClient._get_default_mtls_endpoint(sandbox_endpoint) == sandbox_mtls_endpoint + assert NodeTypesClient._get_default_mtls_endpoint(sandbox_mtls_endpoint) == sandbox_mtls_endpoint + assert NodeTypesClient._get_default_mtls_endpoint(non_googleapi) == non_googleapi + + +@pytest.mark.parametrize("client_class,transport_name", [ + (NodeTypesClient, "rest"), +]) +def test_node_types_client_from_service_account_info(client_class, transport_name): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object(service_account.Credentials, 'from_service_account_info') as factory: + factory.return_value = creds + info = {"valid": True} + client = client_class.from_service_account_info(info, transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + 'compute.googleapis.com:443' + if transport_name in ['grpc', 'grpc_asyncio'] + else + 'https://compute.googleapis.com' + ) + + +@pytest.mark.parametrize("transport_class,transport_name", [ + (transports.NodeTypesRestTransport, "rest"), +]) +def test_node_types_client_service_account_always_use_jwt(transport_class, transport_name): + with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=True) + use_jwt.assert_called_once_with(True) + + with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=False) + use_jwt.assert_not_called() + + +@pytest.mark.parametrize("client_class,transport_name", [ + (NodeTypesClient, "rest"), +]) +def test_node_types_client_from_service_account_file(client_class, transport_name): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object(service_account.Credentials, 'from_service_account_file') as factory: + factory.return_value = creds + client = client_class.from_service_account_file("dummy/file/path.json", transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + client = client_class.from_service_account_json("dummy/file/path.json", transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + 'compute.googleapis.com:443' + if transport_name in ['grpc', 'grpc_asyncio'] + else + 'https://compute.googleapis.com' + ) + + +def test_node_types_client_get_transport_class(): + transport = NodeTypesClient.get_transport_class() + available_transports = [ + transports.NodeTypesRestTransport, + ] + assert transport in available_transports + + transport = NodeTypesClient.get_transport_class("rest") + assert transport == transports.NodeTypesRestTransport + + +@pytest.mark.parametrize("client_class,transport_class,transport_name", [ + (NodeTypesClient, transports.NodeTypesRestTransport, "rest"), +]) +@mock.patch.object(NodeTypesClient, "DEFAULT_ENDPOINT", modify_default_endpoint(NodeTypesClient)) +def test_node_types_client_client_options(client_class, transport_class, transport_name): + # Check that if channel is provided we won't create a new one. + with mock.patch.object(NodeTypesClient, 'get_transport_class') as gtc: + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ) + client = client_class(transport=transport) + gtc.assert_not_called() + + # Check that if channel is provided via str we will create a new one. + with mock.patch.object(NodeTypesClient, 'get_transport_class') as gtc: + client = client_class(transport=transport_name) + gtc.assert_called() + + # Check the case api_endpoint is provided. + options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(transport=transport_name, client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_MTLS_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError): + client = client_class(transport=transport_name) + + # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): + with pytest.raises(ValueError): + client = client_class(transport=transport_name) + + # Check the case quota_project_id is provided + options = client_options.ClientOptions(quota_project_id="octopus") + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id="octopus", + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + # Check the case api_endpoint is provided + options = client_options.ClientOptions(api_audience="https://language.googleapis.com") + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience="https://language.googleapis.com" + ) + +@pytest.mark.parametrize("client_class,transport_class,transport_name,use_client_cert_env", [ + (NodeTypesClient, transports.NodeTypesRestTransport, "rest", "true"), + (NodeTypesClient, transports.NodeTypesRestTransport, "rest", "false"), +]) +@mock.patch.object(NodeTypesClient, "DEFAULT_ENDPOINT", modify_default_endpoint(NodeTypesClient)) +@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) +def test_node_types_client_mtls_env_auto(client_class, transport_class, transport_name, use_client_cert_env): + # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default + # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists. + + # Check the case client_cert_source is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): + options = client_options.ClientOptions(client_cert_source=client_cert_source_callback) + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + + if use_client_cert_env == "false": + expected_client_cert_source = None + expected_host = client.DEFAULT_ENDPOINT + else: + expected_client_cert_source = client_cert_source_callback + expected_host = client.DEFAULT_MTLS_ENDPOINT + + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case ADC client cert is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): + with mock.patch.object(transport_class, '__init__') as patched: + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): + with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=client_cert_source_callback): + if use_client_cert_env == "false": + expected_host = client.DEFAULT_ENDPOINT + expected_client_cert_source = None + else: + expected_host = client.DEFAULT_MTLS_ENDPOINT + expected_client_cert_source = client_cert_source_callback + + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case client_cert_source and ADC client cert are not provided. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): + with mock.patch.object(transport_class, '__init__') as patched: + with mock.patch("google.auth.transport.mtls.has_default_client_cert_source", return_value=False): + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize("client_class", [ + NodeTypesClient +]) +@mock.patch.object(NodeTypesClient, "DEFAULT_ENDPOINT", modify_default_endpoint(NodeTypesClient)) +def test_node_types_client_get_mtls_endpoint_and_cert_source(client_class): + mock_client_cert_source = mock.Mock() + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "true". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + mock_api_endpoint = "foo" + options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(options) + assert api_endpoint == mock_api_endpoint + assert cert_source == mock_client_cert_source + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "false". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + mock_client_cert_source = mock.Mock() + mock_api_endpoint = "foo" + options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(options) + assert api_endpoint == mock_api_endpoint + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert doesn't exist. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=False): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert exists. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): + with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=mock_client_cert_source): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source == mock_client_cert_source + + +@pytest.mark.parametrize("client_class,transport_class,transport_name", [ + (NodeTypesClient, transports.NodeTypesRestTransport, "rest"), +]) +def test_node_types_client_client_options_scopes(client_class, transport_class, transport_name): + # Check the case scopes are provided. + options = client_options.ClientOptions( + scopes=["1", "2"], + ) + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=["1", "2"], + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + +@pytest.mark.parametrize("client_class,transport_class,transport_name,grpc_helpers", [ + (NodeTypesClient, transports.NodeTypesRestTransport, "rest", None), +]) +def test_node_types_client_client_options_credentials_file(client_class, transport_class, transport_name, grpc_helpers): + # Check the case credentials file is provided. + options = client_options.ClientOptions( + credentials_file="credentials.json" + ) + + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize("request_type", [ + compute.AggregatedListNodeTypesRequest, + dict, +]) +def test_aggregated_list_rest(request_type): + client = NodeTypesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.NodeTypeAggregatedList( + id='id_value', + kind='kind_value', + next_page_token='next_page_token_value', + self_link='self_link_value', + unreachables=['unreachables_value'], + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.NodeTypeAggregatedList.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.aggregated_list(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.AggregatedListPager) + assert response.id == 'id_value' + assert response.kind == 'kind_value' + assert response.next_page_token == 'next_page_token_value' + assert response.self_link == 'self_link_value' + assert response.unreachables == ['unreachables_value'] + + +def test_aggregated_list_rest_required_fields(request_type=compute.AggregatedListNodeTypesRequest): + transport_class = transports.NodeTypesRestTransport + + request_init = {} + request_init["project"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).aggregated_list._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).aggregated_list._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("filter", "include_all_scopes", "max_results", "order_by", "page_token", "return_partial_success", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + + client = NodeTypesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.NodeTypeAggregatedList() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "get", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.NodeTypeAggregatedList.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.aggregated_list(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_aggregated_list_rest_unset_required_fields(): + transport = transports.NodeTypesRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.aggregated_list._get_unset_required_fields({}) + assert set(unset_fields) == (set(("filter", "includeAllScopes", "maxResults", "orderBy", "pageToken", "returnPartialSuccess", )) & set(("project", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_aggregated_list_rest_interceptors(null_interceptor): + transport = transports.NodeTypesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.NodeTypesRestInterceptor(), + ) + client = NodeTypesClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.NodeTypesRestInterceptor, "post_aggregated_list") as post, \ + mock.patch.object(transports.NodeTypesRestInterceptor, "pre_aggregated_list") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.AggregatedListNodeTypesRequest.pb(compute.AggregatedListNodeTypesRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.NodeTypeAggregatedList.to_json(compute.NodeTypeAggregatedList()) + + request = compute.AggregatedListNodeTypesRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.NodeTypeAggregatedList() + + client.aggregated_list(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_aggregated_list_rest_bad_request(transport: str = 'rest', request_type=compute.AggregatedListNodeTypesRequest): + client = NodeTypesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.aggregated_list(request) + + +def test_aggregated_list_rest_flattened(): + client = NodeTypesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.NodeTypeAggregatedList() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.NodeTypeAggregatedList.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.aggregated_list(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/aggregated/nodeTypes" % client.transport._host, args[1]) + + +def test_aggregated_list_rest_flattened_error(transport: str = 'rest'): + client = NodeTypesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.aggregated_list( + compute.AggregatedListNodeTypesRequest(), + project='project_value', + ) + + +def test_aggregated_list_rest_pager(transport: str = 'rest'): + client = NodeTypesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + #with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + compute.NodeTypeAggregatedList( + items={ + 'a':compute.NodeTypesScopedList(), + 'b':compute.NodeTypesScopedList(), + 'c':compute.NodeTypesScopedList(), + }, + next_page_token='abc', + ), + compute.NodeTypeAggregatedList( + items={}, + next_page_token='def', + ), + compute.NodeTypeAggregatedList( + items={ + 'g':compute.NodeTypesScopedList(), + }, + next_page_token='ghi', + ), + compute.NodeTypeAggregatedList( + items={ + 'h':compute.NodeTypesScopedList(), + 'i':compute.NodeTypesScopedList(), + }, + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple(compute.NodeTypeAggregatedList.to_json(x) for x in response) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode('UTF-8') + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {'project': 'sample1'} + + pager = client.aggregated_list(request=sample_request) + + assert isinstance(pager.get('a'), compute.NodeTypesScopedList) + assert pager.get('h') is None + + results = list(pager) + assert len(results) == 6 + assert all( + isinstance(i, tuple) + for i in results) + for result in results: + assert isinstance(result, tuple) + assert tuple(type(t) for t in result) == (str, compute.NodeTypesScopedList) + + assert pager.get('a') is None + assert isinstance(pager.get('h'), compute.NodeTypesScopedList) + + pages = list(client.aggregated_list(request=sample_request).pages) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize("request_type", [ + compute.GetNodeTypeRequest, + dict, +]) +def test_get_rest(request_type): + client = NodeTypesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'zone': 'sample2', 'node_type': 'sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.NodeType( + cpu_platform='cpu_platform_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + guest_cpus=1090, + id=205, + kind='kind_value', + local_ssd_gb=1244, + memory_mb=967, + name='name_value', + self_link='self_link_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.NodeType.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.get(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.NodeType) + assert response.cpu_platform == 'cpu_platform_value' + assert response.creation_timestamp == 'creation_timestamp_value' + assert response.description == 'description_value' + assert response.guest_cpus == 1090 + assert response.id == 205 + assert response.kind == 'kind_value' + assert response.local_ssd_gb == 1244 + assert response.memory_mb == 967 + assert response.name == 'name_value' + assert response.self_link == 'self_link_value' + assert response.zone == 'zone_value' + + +def test_get_rest_required_fields(request_type=compute.GetNodeTypeRequest): + transport_class = transports.NodeTypesRestTransport + + request_init = {} + request_init["node_type"] = "" + request_init["project"] = "" + request_init["zone"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["nodeType"] = 'node_type_value' + jsonified_request["project"] = 'project_value' + jsonified_request["zone"] = 'zone_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "nodeType" in jsonified_request + assert jsonified_request["nodeType"] == 'node_type_value' + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "zone" in jsonified_request + assert jsonified_request["zone"] == 'zone_value' + + client = NodeTypesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.NodeType() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "get", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.NodeType.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.get(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_get_rest_unset_required_fields(): + transport = transports.NodeTypesRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.get._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("nodeType", "project", "zone", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_rest_interceptors(null_interceptor): + transport = transports.NodeTypesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.NodeTypesRestInterceptor(), + ) + client = NodeTypesClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.NodeTypesRestInterceptor, "post_get") as post, \ + mock.patch.object(transports.NodeTypesRestInterceptor, "pre_get") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.GetNodeTypeRequest.pb(compute.GetNodeTypeRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.NodeType.to_json(compute.NodeType()) + + request = compute.GetNodeTypeRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.NodeType() + + client.get(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_get_rest_bad_request(transport: str = 'rest', request_type=compute.GetNodeTypeRequest): + client = NodeTypesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'zone': 'sample2', 'node_type': 'sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get(request) + + +def test_get_rest_flattened(): + client = NodeTypesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.NodeType() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'zone': 'sample2', 'node_type': 'sample3'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + zone='zone_value', + node_type='node_type_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.NodeType.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.get(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/zones/{zone}/nodeTypes/{node_type}" % client.transport._host, args[1]) + + +def test_get_rest_flattened_error(transport: str = 'rest'): + client = NodeTypesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get( + compute.GetNodeTypeRequest(), + project='project_value', + zone='zone_value', + node_type='node_type_value', + ) + + +def test_get_rest_error(): + client = NodeTypesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.ListNodeTypesRequest, + dict, +]) +def test_list_rest(request_type): + client = NodeTypesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'zone': 'sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.NodeTypeList( + id='id_value', + kind='kind_value', + next_page_token='next_page_token_value', + self_link='self_link_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.NodeTypeList.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.list(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListPager) + assert response.id == 'id_value' + assert response.kind == 'kind_value' + assert response.next_page_token == 'next_page_token_value' + assert response.self_link == 'self_link_value' + + +def test_list_rest_required_fields(request_type=compute.ListNodeTypesRequest): + transport_class = transports.NodeTypesRestTransport + + request_init = {} + request_init["project"] = "" + request_init["zone"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + jsonified_request["zone"] = 'zone_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("filter", "max_results", "order_by", "page_token", "return_partial_success", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "zone" in jsonified_request + assert jsonified_request["zone"] == 'zone_value' + + client = NodeTypesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.NodeTypeList() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "get", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.NodeTypeList.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.list(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_list_rest_unset_required_fields(): + transport = transports.NodeTypesRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.list._get_unset_required_fields({}) + assert set(unset_fields) == (set(("filter", "maxResults", "orderBy", "pageToken", "returnPartialSuccess", )) & set(("project", "zone", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_rest_interceptors(null_interceptor): + transport = transports.NodeTypesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.NodeTypesRestInterceptor(), + ) + client = NodeTypesClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.NodeTypesRestInterceptor, "post_list") as post, \ + mock.patch.object(transports.NodeTypesRestInterceptor, "pre_list") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.ListNodeTypesRequest.pb(compute.ListNodeTypesRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.NodeTypeList.to_json(compute.NodeTypeList()) + + request = compute.ListNodeTypesRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.NodeTypeList() + + client.list(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_list_rest_bad_request(transport: str = 'rest', request_type=compute.ListNodeTypesRequest): + client = NodeTypesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'zone': 'sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list(request) + + +def test_list_rest_flattened(): + client = NodeTypesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.NodeTypeList() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'zone': 'sample2'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + zone='zone_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.NodeTypeList.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.list(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/zones/{zone}/nodeTypes" % client.transport._host, args[1]) + + +def test_list_rest_flattened_error(transport: str = 'rest'): + client = NodeTypesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list( + compute.ListNodeTypesRequest(), + project='project_value', + zone='zone_value', + ) + + +def test_list_rest_pager(transport: str = 'rest'): + client = NodeTypesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + #with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + compute.NodeTypeList( + items=[ + compute.NodeType(), + compute.NodeType(), + compute.NodeType(), + ], + next_page_token='abc', + ), + compute.NodeTypeList( + items=[], + next_page_token='def', + ), + compute.NodeTypeList( + items=[ + compute.NodeType(), + ], + next_page_token='ghi', + ), + compute.NodeTypeList( + items=[ + compute.NodeType(), + compute.NodeType(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple(compute.NodeTypeList.to_json(x) for x in response) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode('UTF-8') + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {'project': 'sample1', 'zone': 'sample2'} + + pager = client.list(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, compute.NodeType) + for i in results) + + pages = list(client.list(request=sample_request).pages) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + + +def test_credentials_transport_error(): + # It is an error to provide credentials and a transport instance. + transport = transports.NodeTypesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = NodeTypesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # It is an error to provide a credentials file and a transport instance. + transport = transports.NodeTypesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = NodeTypesClient( + client_options={"credentials_file": "credentials.json"}, + transport=transport, + ) + + # It is an error to provide an api_key and a transport instance. + transport = transports.NodeTypesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = NodeTypesClient( + client_options=options, + transport=transport, + ) + + # It is an error to provide an api_key and a credential. + options = mock.Mock() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = NodeTypesClient( + client_options=options, + credentials=ga_credentials.AnonymousCredentials() + ) + + # It is an error to provide scopes and a transport instance. + transport = transports.NodeTypesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = NodeTypesClient( + client_options={"scopes": ["1", "2"]}, + transport=transport, + ) + + +def test_transport_instance(): + # A client may be instantiated with a custom transport instance. + transport = transports.NodeTypesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + client = NodeTypesClient(transport=transport) + assert client.transport is transport + + +@pytest.mark.parametrize("transport_class", [ + transports.NodeTypesRestTransport, +]) +def test_transport_adc(transport_class): + # Test default credentials are used if not provided. + with mock.patch.object(google.auth, 'default') as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class() + adc.assert_called_once() + +@pytest.mark.parametrize("transport_name", [ + "rest", +]) +def test_transport_kind(transport_name): + transport = NodeTypesClient.get_transport_class(transport_name)( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert transport.kind == transport_name + + +def test_node_types_base_transport_error(): + # Passing both a credentials object and credentials_file should raise an error + with pytest.raises(core_exceptions.DuplicateCredentialArgs): + transport = transports.NodeTypesTransport( + credentials=ga_credentials.AnonymousCredentials(), + credentials_file="credentials.json" + ) + + +def test_node_types_base_transport(): + # Instantiate the base transport. + with mock.patch('google.cloud.compute_v1.services.node_types.transports.NodeTypesTransport.__init__') as Transport: + Transport.return_value = None + transport = transports.NodeTypesTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Every method on the transport should just blindly + # raise NotImplementedError. + methods = ( + 'aggregated_list', + 'get', + 'list', + ) + for method in methods: + with pytest.raises(NotImplementedError): + getattr(transport, method)(request=object()) + + with pytest.raises(NotImplementedError): + transport.close() + + # Catch all for all remaining methods and properties + remainder = [ + 'kind', + ] + for r in remainder: + with pytest.raises(NotImplementedError): + getattr(transport, r)() + + +def test_node_types_base_transport_with_credentials_file(): + # Instantiate the base transport with a credentials file + with mock.patch.object(google.auth, 'load_credentials_from_file', autospec=True) as load_creds, mock.patch('google.cloud.compute_v1.services.node_types.transports.NodeTypesTransport._prep_wrapped_messages') as Transport: + Transport.return_value = None + load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.NodeTypesTransport( + credentials_file="credentials.json", + quota_project_id="octopus", + ) + load_creds.assert_called_once_with("credentials.json", + scopes=None, + default_scopes=( + 'https://www.googleapis.com/auth/compute.readonly', + 'https://www.googleapis.com/auth/compute', + 'https://www.googleapis.com/auth/cloud-platform', +), + quota_project_id="octopus", + ) + + +def test_node_types_base_transport_with_adc(): + # Test the default credentials are used if credentials and credentials_file are None. + with mock.patch.object(google.auth, 'default', autospec=True) as adc, mock.patch('google.cloud.compute_v1.services.node_types.transports.NodeTypesTransport._prep_wrapped_messages') as Transport: + Transport.return_value = None + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.NodeTypesTransport() + adc.assert_called_once() + + +def test_node_types_auth_adc(): + # If no credentials are provided, we should use ADC credentials. + with mock.patch.object(google.auth, 'default', autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + NodeTypesClient() + adc.assert_called_once_with( + scopes=None, + default_scopes=( + 'https://www.googleapis.com/auth/compute.readonly', + 'https://www.googleapis.com/auth/compute', + 'https://www.googleapis.com/auth/cloud-platform', +), + quota_project_id=None, + ) + + +def test_node_types_http_transport_client_cert_source_for_mtls(): + cred = ga_credentials.AnonymousCredentials() + with mock.patch("google.auth.transport.requests.AuthorizedSession.configure_mtls_channel") as mock_configure_mtls_channel: + transports.NodeTypesRestTransport ( + credentials=cred, + client_cert_source_for_mtls=client_cert_source_callback + ) + mock_configure_mtls_channel.assert_called_once_with(client_cert_source_callback) + + +@pytest.mark.parametrize("transport_name", [ + "rest", +]) +def test_node_types_host_no_port(transport_name): + client = NodeTypesClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions(api_endpoint='compute.googleapis.com'), + transport=transport_name, + ) + assert client.transport._host == ( + 'compute.googleapis.com:443' + if transport_name in ['grpc', 'grpc_asyncio'] + else 'https://compute.googleapis.com' + ) + +@pytest.mark.parametrize("transport_name", [ + "rest", +]) +def test_node_types_host_with_port(transport_name): + client = NodeTypesClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions(api_endpoint='compute.googleapis.com:8000'), + transport=transport_name, + ) + assert client.transport._host == ( + 'compute.googleapis.com:8000' + if transport_name in ['grpc', 'grpc_asyncio'] + else 'https://compute.googleapis.com:8000' + ) + +@pytest.mark.parametrize("transport_name", [ + "rest", +]) +def test_node_types_client_transport_session_collision(transport_name): + creds1 = ga_credentials.AnonymousCredentials() + creds2 = ga_credentials.AnonymousCredentials() + client1 = NodeTypesClient( + credentials=creds1, + transport=transport_name, + ) + client2 = NodeTypesClient( + credentials=creds2, + transport=transport_name, + ) + session1 = client1.transport.aggregated_list._session + session2 = client2.transport.aggregated_list._session + assert session1 != session2 + session1 = client1.transport.get._session + session2 = client2.transport.get._session + assert session1 != session2 + session1 = client1.transport.list._session + session2 = client2.transport.list._session + assert session1 != session2 + +def test_common_billing_account_path(): + billing_account = "squid" + expected = "billingAccounts/{billing_account}".format(billing_account=billing_account, ) + actual = NodeTypesClient.common_billing_account_path(billing_account) + assert expected == actual + + +def test_parse_common_billing_account_path(): + expected = { + "billing_account": "clam", + } + path = NodeTypesClient.common_billing_account_path(**expected) + + # Check that the path construction is reversible. + actual = NodeTypesClient.parse_common_billing_account_path(path) + assert expected == actual + +def test_common_folder_path(): + folder = "whelk" + expected = "folders/{folder}".format(folder=folder, ) + actual = NodeTypesClient.common_folder_path(folder) + assert expected == actual + + +def test_parse_common_folder_path(): + expected = { + "folder": "octopus", + } + path = NodeTypesClient.common_folder_path(**expected) + + # Check that the path construction is reversible. + actual = NodeTypesClient.parse_common_folder_path(path) + assert expected == actual + +def test_common_organization_path(): + organization = "oyster" + expected = "organizations/{organization}".format(organization=organization, ) + actual = NodeTypesClient.common_organization_path(organization) + assert expected == actual + + +def test_parse_common_organization_path(): + expected = { + "organization": "nudibranch", + } + path = NodeTypesClient.common_organization_path(**expected) + + # Check that the path construction is reversible. + actual = NodeTypesClient.parse_common_organization_path(path) + assert expected == actual + +def test_common_project_path(): + project = "cuttlefish" + expected = "projects/{project}".format(project=project, ) + actual = NodeTypesClient.common_project_path(project) + assert expected == actual + + +def test_parse_common_project_path(): + expected = { + "project": "mussel", + } + path = NodeTypesClient.common_project_path(**expected) + + # Check that the path construction is reversible. + actual = NodeTypesClient.parse_common_project_path(path) + assert expected == actual + +def test_common_location_path(): + project = "winkle" + location = "nautilus" + expected = "projects/{project}/locations/{location}".format(project=project, location=location, ) + actual = NodeTypesClient.common_location_path(project, location) + assert expected == actual + + +def test_parse_common_location_path(): + expected = { + "project": "scallop", + "location": "abalone", + } + path = NodeTypesClient.common_location_path(**expected) + + # Check that the path construction is reversible. + actual = NodeTypesClient.parse_common_location_path(path) + assert expected == actual + + +def test_client_with_default_client_info(): + client_info = gapic_v1.client_info.ClientInfo() + + with mock.patch.object(transports.NodeTypesTransport, '_prep_wrapped_messages') as prep: + client = NodeTypesClient( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + with mock.patch.object(transports.NodeTypesTransport, '_prep_wrapped_messages') as prep: + transport_class = NodeTypesClient.get_transport_class() + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + +def test_transport_close(): + transports = { + "rest": "_session", + } + + for transport, close_name in transports.items(): + client = NodeTypesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport + ) + with mock.patch.object(type(getattr(client.transport, close_name)), "close") as close: + with client: + close.assert_not_called() + close.assert_called_once() + +def test_client_ctx(): + transports = [ + 'rest', + ] + for transport in transports: + client = NodeTypesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport + ) + # Test client calls underlying transport. + with mock.patch.object(type(client.transport), "close") as close: + close.assert_not_called() + with client: + pass + close.assert_called() + +@pytest.mark.parametrize("client_class,transport_class", [ + (NodeTypesClient, transports.NodeTypesRestTransport), +]) +def test_api_key_credentials(client_class, transport_class): + with mock.patch.object( + google.auth._default, "get_api_key_credentials", create=True + ) as get_api_key_credentials: + mock_cred = mock.Mock() + get_api_key_credentials.return_value = mock_cred + options = client_options.ClientOptions() + options.api_key = "api_key" + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=mock_cred, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) diff --git a/owl-bot-staging/v1/tests/unit/gapic/compute_v1/test_packet_mirrorings.py b/owl-bot-staging/v1/tests/unit/gapic/compute_v1/test_packet_mirrorings.py new file mode 100644 index 000000000..f4eee68a8 --- /dev/null +++ b/owl-bot-staging/v1/tests/unit/gapic/compute_v1/test_packet_mirrorings.py @@ -0,0 +1,3643 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import os +# try/except added for compatibility with python < 3.8 +try: + from unittest import mock + from unittest.mock import AsyncMock # pragma: NO COVER +except ImportError: # pragma: NO COVER + import mock + +import grpc +from grpc.experimental import aio +from collections.abc import Iterable +from google.protobuf import json_format +import json +import math +import pytest +from proto.marshal.rules.dates import DurationRule, TimestampRule +from proto.marshal.rules import wrappers +from requests import Response +from requests import Request, PreparedRequest +from requests.sessions import Session +from google.protobuf import json_format + +from google.api_core import client_options +from google.api_core import exceptions as core_exceptions +from google.api_core import extended_operation # type: ignore +from google.api_core import future +from google.api_core import gapic_v1 +from google.api_core import grpc_helpers +from google.api_core import grpc_helpers_async +from google.api_core import path_template +from google.auth import credentials as ga_credentials +from google.auth.exceptions import MutualTLSChannelError +from google.cloud.compute_v1.services.packet_mirrorings import PacketMirroringsClient +from google.cloud.compute_v1.services.packet_mirrorings import pagers +from google.cloud.compute_v1.services.packet_mirrorings import transports +from google.cloud.compute_v1.types import compute +from google.oauth2 import service_account +import google.auth + + +def client_cert_source_callback(): + return b"cert bytes", b"key bytes" + + +# If default endpoint is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint(client): + return "foo.googleapis.com" if ("localhost" in client.DEFAULT_ENDPOINT) else client.DEFAULT_ENDPOINT + + +def test__get_default_mtls_endpoint(): + api_endpoint = "example.googleapis.com" + api_mtls_endpoint = "example.mtls.googleapis.com" + sandbox_endpoint = "example.sandbox.googleapis.com" + sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com" + non_googleapi = "api.example.com" + + assert PacketMirroringsClient._get_default_mtls_endpoint(None) is None + assert PacketMirroringsClient._get_default_mtls_endpoint(api_endpoint) == api_mtls_endpoint + assert PacketMirroringsClient._get_default_mtls_endpoint(api_mtls_endpoint) == api_mtls_endpoint + assert PacketMirroringsClient._get_default_mtls_endpoint(sandbox_endpoint) == sandbox_mtls_endpoint + assert PacketMirroringsClient._get_default_mtls_endpoint(sandbox_mtls_endpoint) == sandbox_mtls_endpoint + assert PacketMirroringsClient._get_default_mtls_endpoint(non_googleapi) == non_googleapi + + +@pytest.mark.parametrize("client_class,transport_name", [ + (PacketMirroringsClient, "rest"), +]) +def test_packet_mirrorings_client_from_service_account_info(client_class, transport_name): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object(service_account.Credentials, 'from_service_account_info') as factory: + factory.return_value = creds + info = {"valid": True} + client = client_class.from_service_account_info(info, transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + 'compute.googleapis.com:443' + if transport_name in ['grpc', 'grpc_asyncio'] + else + 'https://compute.googleapis.com' + ) + + +@pytest.mark.parametrize("transport_class,transport_name", [ + (transports.PacketMirroringsRestTransport, "rest"), +]) +def test_packet_mirrorings_client_service_account_always_use_jwt(transport_class, transport_name): + with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=True) + use_jwt.assert_called_once_with(True) + + with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=False) + use_jwt.assert_not_called() + + +@pytest.mark.parametrize("client_class,transport_name", [ + (PacketMirroringsClient, "rest"), +]) +def test_packet_mirrorings_client_from_service_account_file(client_class, transport_name): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object(service_account.Credentials, 'from_service_account_file') as factory: + factory.return_value = creds + client = client_class.from_service_account_file("dummy/file/path.json", transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + client = client_class.from_service_account_json("dummy/file/path.json", transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + 'compute.googleapis.com:443' + if transport_name in ['grpc', 'grpc_asyncio'] + else + 'https://compute.googleapis.com' + ) + + +def test_packet_mirrorings_client_get_transport_class(): + transport = PacketMirroringsClient.get_transport_class() + available_transports = [ + transports.PacketMirroringsRestTransport, + ] + assert transport in available_transports + + transport = PacketMirroringsClient.get_transport_class("rest") + assert transport == transports.PacketMirroringsRestTransport + + +@pytest.mark.parametrize("client_class,transport_class,transport_name", [ + (PacketMirroringsClient, transports.PacketMirroringsRestTransport, "rest"), +]) +@mock.patch.object(PacketMirroringsClient, "DEFAULT_ENDPOINT", modify_default_endpoint(PacketMirroringsClient)) +def test_packet_mirrorings_client_client_options(client_class, transport_class, transport_name): + # Check that if channel is provided we won't create a new one. + with mock.patch.object(PacketMirroringsClient, 'get_transport_class') as gtc: + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ) + client = client_class(transport=transport) + gtc.assert_not_called() + + # Check that if channel is provided via str we will create a new one. + with mock.patch.object(PacketMirroringsClient, 'get_transport_class') as gtc: + client = client_class(transport=transport_name) + gtc.assert_called() + + # Check the case api_endpoint is provided. + options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(transport=transport_name, client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_MTLS_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError): + client = client_class(transport=transport_name) + + # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): + with pytest.raises(ValueError): + client = client_class(transport=transport_name) + + # Check the case quota_project_id is provided + options = client_options.ClientOptions(quota_project_id="octopus") + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id="octopus", + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + # Check the case api_endpoint is provided + options = client_options.ClientOptions(api_audience="https://language.googleapis.com") + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience="https://language.googleapis.com" + ) + +@pytest.mark.parametrize("client_class,transport_class,transport_name,use_client_cert_env", [ + (PacketMirroringsClient, transports.PacketMirroringsRestTransport, "rest", "true"), + (PacketMirroringsClient, transports.PacketMirroringsRestTransport, "rest", "false"), +]) +@mock.patch.object(PacketMirroringsClient, "DEFAULT_ENDPOINT", modify_default_endpoint(PacketMirroringsClient)) +@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) +def test_packet_mirrorings_client_mtls_env_auto(client_class, transport_class, transport_name, use_client_cert_env): + # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default + # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists. + + # Check the case client_cert_source is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): + options = client_options.ClientOptions(client_cert_source=client_cert_source_callback) + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + + if use_client_cert_env == "false": + expected_client_cert_source = None + expected_host = client.DEFAULT_ENDPOINT + else: + expected_client_cert_source = client_cert_source_callback + expected_host = client.DEFAULT_MTLS_ENDPOINT + + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case ADC client cert is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): + with mock.patch.object(transport_class, '__init__') as patched: + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): + with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=client_cert_source_callback): + if use_client_cert_env == "false": + expected_host = client.DEFAULT_ENDPOINT + expected_client_cert_source = None + else: + expected_host = client.DEFAULT_MTLS_ENDPOINT + expected_client_cert_source = client_cert_source_callback + + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case client_cert_source and ADC client cert are not provided. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): + with mock.patch.object(transport_class, '__init__') as patched: + with mock.patch("google.auth.transport.mtls.has_default_client_cert_source", return_value=False): + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize("client_class", [ + PacketMirroringsClient +]) +@mock.patch.object(PacketMirroringsClient, "DEFAULT_ENDPOINT", modify_default_endpoint(PacketMirroringsClient)) +def test_packet_mirrorings_client_get_mtls_endpoint_and_cert_source(client_class): + mock_client_cert_source = mock.Mock() + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "true". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + mock_api_endpoint = "foo" + options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(options) + assert api_endpoint == mock_api_endpoint + assert cert_source == mock_client_cert_source + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "false". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + mock_client_cert_source = mock.Mock() + mock_api_endpoint = "foo" + options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(options) + assert api_endpoint == mock_api_endpoint + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert doesn't exist. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=False): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert exists. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): + with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=mock_client_cert_source): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source == mock_client_cert_source + + +@pytest.mark.parametrize("client_class,transport_class,transport_name", [ + (PacketMirroringsClient, transports.PacketMirroringsRestTransport, "rest"), +]) +def test_packet_mirrorings_client_client_options_scopes(client_class, transport_class, transport_name): + # Check the case scopes are provided. + options = client_options.ClientOptions( + scopes=["1", "2"], + ) + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=["1", "2"], + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + +@pytest.mark.parametrize("client_class,transport_class,transport_name,grpc_helpers", [ + (PacketMirroringsClient, transports.PacketMirroringsRestTransport, "rest", None), +]) +def test_packet_mirrorings_client_client_options_credentials_file(client_class, transport_class, transport_name, grpc_helpers): + # Check the case credentials file is provided. + options = client_options.ClientOptions( + credentials_file="credentials.json" + ) + + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize("request_type", [ + compute.AggregatedListPacketMirroringsRequest, + dict, +]) +def test_aggregated_list_rest(request_type): + client = PacketMirroringsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.PacketMirroringAggregatedList( + id='id_value', + kind='kind_value', + next_page_token='next_page_token_value', + self_link='self_link_value', + unreachables=['unreachables_value'], + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.PacketMirroringAggregatedList.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.aggregated_list(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.AggregatedListPager) + assert response.id == 'id_value' + assert response.kind == 'kind_value' + assert response.next_page_token == 'next_page_token_value' + assert response.self_link == 'self_link_value' + assert response.unreachables == ['unreachables_value'] + + +def test_aggregated_list_rest_required_fields(request_type=compute.AggregatedListPacketMirroringsRequest): + transport_class = transports.PacketMirroringsRestTransport + + request_init = {} + request_init["project"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).aggregated_list._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).aggregated_list._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("filter", "include_all_scopes", "max_results", "order_by", "page_token", "return_partial_success", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + + client = PacketMirroringsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.PacketMirroringAggregatedList() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "get", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.PacketMirroringAggregatedList.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.aggregated_list(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_aggregated_list_rest_unset_required_fields(): + transport = transports.PacketMirroringsRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.aggregated_list._get_unset_required_fields({}) + assert set(unset_fields) == (set(("filter", "includeAllScopes", "maxResults", "orderBy", "pageToken", "returnPartialSuccess", )) & set(("project", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_aggregated_list_rest_interceptors(null_interceptor): + transport = transports.PacketMirroringsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.PacketMirroringsRestInterceptor(), + ) + client = PacketMirroringsClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.PacketMirroringsRestInterceptor, "post_aggregated_list") as post, \ + mock.patch.object(transports.PacketMirroringsRestInterceptor, "pre_aggregated_list") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.AggregatedListPacketMirroringsRequest.pb(compute.AggregatedListPacketMirroringsRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.PacketMirroringAggregatedList.to_json(compute.PacketMirroringAggregatedList()) + + request = compute.AggregatedListPacketMirroringsRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.PacketMirroringAggregatedList() + + client.aggregated_list(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_aggregated_list_rest_bad_request(transport: str = 'rest', request_type=compute.AggregatedListPacketMirroringsRequest): + client = PacketMirroringsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.aggregated_list(request) + + +def test_aggregated_list_rest_flattened(): + client = PacketMirroringsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.PacketMirroringAggregatedList() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.PacketMirroringAggregatedList.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.aggregated_list(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/aggregated/packetMirrorings" % client.transport._host, args[1]) + + +def test_aggregated_list_rest_flattened_error(transport: str = 'rest'): + client = PacketMirroringsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.aggregated_list( + compute.AggregatedListPacketMirroringsRequest(), + project='project_value', + ) + + +def test_aggregated_list_rest_pager(transport: str = 'rest'): + client = PacketMirroringsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + #with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + compute.PacketMirroringAggregatedList( + items={ + 'a':compute.PacketMirroringsScopedList(), + 'b':compute.PacketMirroringsScopedList(), + 'c':compute.PacketMirroringsScopedList(), + }, + next_page_token='abc', + ), + compute.PacketMirroringAggregatedList( + items={}, + next_page_token='def', + ), + compute.PacketMirroringAggregatedList( + items={ + 'g':compute.PacketMirroringsScopedList(), + }, + next_page_token='ghi', + ), + compute.PacketMirroringAggregatedList( + items={ + 'h':compute.PacketMirroringsScopedList(), + 'i':compute.PacketMirroringsScopedList(), + }, + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple(compute.PacketMirroringAggregatedList.to_json(x) for x in response) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode('UTF-8') + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {'project': 'sample1'} + + pager = client.aggregated_list(request=sample_request) + + assert isinstance(pager.get('a'), compute.PacketMirroringsScopedList) + assert pager.get('h') is None + + results = list(pager) + assert len(results) == 6 + assert all( + isinstance(i, tuple) + for i in results) + for result in results: + assert isinstance(result, tuple) + assert tuple(type(t) for t in result) == (str, compute.PacketMirroringsScopedList) + + assert pager.get('a') is None + assert isinstance(pager.get('h'), compute.PacketMirroringsScopedList) + + pages = list(client.aggregated_list(request=sample_request).pages) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize("request_type", [ + compute.DeletePacketMirroringRequest, + dict, +]) +def test_delete_rest(request_type): + client = PacketMirroringsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2', 'packet_mirroring': 'sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.delete(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, extended_operation.ExtendedOperation) + assert response.client_operation_id == 'client_operation_id_value' + assert response.creation_timestamp == 'creation_timestamp_value' + assert response.description == 'description_value' + assert response.end_time == 'end_time_value' + assert response.http_error_message == 'http_error_message_value' + assert response.http_error_status_code == 2374 + assert response.id == 205 + assert response.insert_time == 'insert_time_value' + assert response.kind == 'kind_value' + assert response.name == 'name_value' + assert response.operation_group_id == 'operation_group_id_value' + assert response.operation_type == 'operation_type_value' + assert response.progress == 885 + assert response.region == 'region_value' + assert response.self_link == 'self_link_value' + assert response.start_time == 'start_time_value' + assert response.status == compute.Operation.Status.DONE + assert response.status_message == 'status_message_value' + assert response.target_id == 947 + assert response.target_link == 'target_link_value' + assert response.user == 'user_value' + assert response.zone == 'zone_value' + + +def test_delete_rest_required_fields(request_type=compute.DeletePacketMirroringRequest): + transport_class = transports.PacketMirroringsRestTransport + + request_init = {} + request_init["packet_mirroring"] = "" + request_init["project"] = "" + request_init["region"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["packetMirroring"] = 'packet_mirroring_value' + jsonified_request["project"] = 'project_value' + jsonified_request["region"] = 'region_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "packetMirroring" in jsonified_request + assert jsonified_request["packetMirroring"] == 'packet_mirroring_value' + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "region" in jsonified_request + assert jsonified_request["region"] == 'region_value' + + client = PacketMirroringsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "delete", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.delete(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_delete_rest_unset_required_fields(): + transport = transports.PacketMirroringsRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.delete._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("packetMirroring", "project", "region", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_rest_interceptors(null_interceptor): + transport = transports.PacketMirroringsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.PacketMirroringsRestInterceptor(), + ) + client = PacketMirroringsClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.PacketMirroringsRestInterceptor, "post_delete") as post, \ + mock.patch.object(transports.PacketMirroringsRestInterceptor, "pre_delete") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.DeletePacketMirroringRequest.pb(compute.DeletePacketMirroringRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.DeletePacketMirroringRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.delete(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_delete_rest_bad_request(transport: str = 'rest', request_type=compute.DeletePacketMirroringRequest): + client = PacketMirroringsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2', 'packet_mirroring': 'sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.delete(request) + + +def test_delete_rest_flattened(): + client = PacketMirroringsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'region': 'sample2', 'packet_mirroring': 'sample3'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + region='region_value', + packet_mirroring='packet_mirroring_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.delete(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/regions/{region}/packetMirrorings/{packet_mirroring}" % client.transport._host, args[1]) + + +def test_delete_rest_flattened_error(transport: str = 'rest'): + client = PacketMirroringsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete( + compute.DeletePacketMirroringRequest(), + project='project_value', + region='region_value', + packet_mirroring='packet_mirroring_value', + ) + + +def test_delete_rest_error(): + client = PacketMirroringsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.DeletePacketMirroringRequest, + dict, +]) +def test_delete_unary_rest(request_type): + client = PacketMirroringsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2', 'packet_mirroring': 'sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.delete_unary(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.Operation) + + +def test_delete_unary_rest_required_fields(request_type=compute.DeletePacketMirroringRequest): + transport_class = transports.PacketMirroringsRestTransport + + request_init = {} + request_init["packet_mirroring"] = "" + request_init["project"] = "" + request_init["region"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["packetMirroring"] = 'packet_mirroring_value' + jsonified_request["project"] = 'project_value' + jsonified_request["region"] = 'region_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "packetMirroring" in jsonified_request + assert jsonified_request["packetMirroring"] == 'packet_mirroring_value' + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "region" in jsonified_request + assert jsonified_request["region"] == 'region_value' + + client = PacketMirroringsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "delete", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.delete_unary(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_delete_unary_rest_unset_required_fields(): + transport = transports.PacketMirroringsRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.delete._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("packetMirroring", "project", "region", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_unary_rest_interceptors(null_interceptor): + transport = transports.PacketMirroringsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.PacketMirroringsRestInterceptor(), + ) + client = PacketMirroringsClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.PacketMirroringsRestInterceptor, "post_delete") as post, \ + mock.patch.object(transports.PacketMirroringsRestInterceptor, "pre_delete") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.DeletePacketMirroringRequest.pb(compute.DeletePacketMirroringRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.DeletePacketMirroringRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.delete_unary(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_delete_unary_rest_bad_request(transport: str = 'rest', request_type=compute.DeletePacketMirroringRequest): + client = PacketMirroringsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2', 'packet_mirroring': 'sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.delete_unary(request) + + +def test_delete_unary_rest_flattened(): + client = PacketMirroringsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'region': 'sample2', 'packet_mirroring': 'sample3'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + region='region_value', + packet_mirroring='packet_mirroring_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.delete_unary(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/regions/{region}/packetMirrorings/{packet_mirroring}" % client.transport._host, args[1]) + + +def test_delete_unary_rest_flattened_error(transport: str = 'rest'): + client = PacketMirroringsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_unary( + compute.DeletePacketMirroringRequest(), + project='project_value', + region='region_value', + packet_mirroring='packet_mirroring_value', + ) + + +def test_delete_unary_rest_error(): + client = PacketMirroringsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.GetPacketMirroringRequest, + dict, +]) +def test_get_rest(request_type): + client = PacketMirroringsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2', 'packet_mirroring': 'sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.PacketMirroring( + creation_timestamp='creation_timestamp_value', + description='description_value', + enable='enable_value', + id=205, + kind='kind_value', + name='name_value', + priority=898, + region='region_value', + self_link='self_link_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.PacketMirroring.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.get(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.PacketMirroring) + assert response.creation_timestamp == 'creation_timestamp_value' + assert response.description == 'description_value' + assert response.enable == 'enable_value' + assert response.id == 205 + assert response.kind == 'kind_value' + assert response.name == 'name_value' + assert response.priority == 898 + assert response.region == 'region_value' + assert response.self_link == 'self_link_value' + + +def test_get_rest_required_fields(request_type=compute.GetPacketMirroringRequest): + transport_class = transports.PacketMirroringsRestTransport + + request_init = {} + request_init["packet_mirroring"] = "" + request_init["project"] = "" + request_init["region"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["packetMirroring"] = 'packet_mirroring_value' + jsonified_request["project"] = 'project_value' + jsonified_request["region"] = 'region_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "packetMirroring" in jsonified_request + assert jsonified_request["packetMirroring"] == 'packet_mirroring_value' + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "region" in jsonified_request + assert jsonified_request["region"] == 'region_value' + + client = PacketMirroringsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.PacketMirroring() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "get", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.PacketMirroring.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.get(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_get_rest_unset_required_fields(): + transport = transports.PacketMirroringsRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.get._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("packetMirroring", "project", "region", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_rest_interceptors(null_interceptor): + transport = transports.PacketMirroringsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.PacketMirroringsRestInterceptor(), + ) + client = PacketMirroringsClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.PacketMirroringsRestInterceptor, "post_get") as post, \ + mock.patch.object(transports.PacketMirroringsRestInterceptor, "pre_get") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.GetPacketMirroringRequest.pb(compute.GetPacketMirroringRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.PacketMirroring.to_json(compute.PacketMirroring()) + + request = compute.GetPacketMirroringRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.PacketMirroring() + + client.get(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_get_rest_bad_request(transport: str = 'rest', request_type=compute.GetPacketMirroringRequest): + client = PacketMirroringsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2', 'packet_mirroring': 'sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get(request) + + +def test_get_rest_flattened(): + client = PacketMirroringsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.PacketMirroring() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'region': 'sample2', 'packet_mirroring': 'sample3'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + region='region_value', + packet_mirroring='packet_mirroring_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.PacketMirroring.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.get(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/regions/{region}/packetMirrorings/{packet_mirroring}" % client.transport._host, args[1]) + + +def test_get_rest_flattened_error(transport: str = 'rest'): + client = PacketMirroringsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get( + compute.GetPacketMirroringRequest(), + project='project_value', + region='region_value', + packet_mirroring='packet_mirroring_value', + ) + + +def test_get_rest_error(): + client = PacketMirroringsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.InsertPacketMirroringRequest, + dict, +]) +def test_insert_rest(request_type): + client = PacketMirroringsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2'} + request_init["packet_mirroring_resource"] = {'collector_ilb': {'canonical_url': 'canonical_url_value', 'url': 'url_value'}, 'creation_timestamp': 'creation_timestamp_value', 'description': 'description_value', 'enable': 'enable_value', 'filter': {'I_p_protocols': ['I_p_protocols_value1', 'I_p_protocols_value2'], 'cidr_ranges': ['cidr_ranges_value1', 'cidr_ranges_value2'], 'direction': 'direction_value'}, 'id': 205, 'kind': 'kind_value', 'mirrored_resources': {'instances': [{'canonical_url': 'canonical_url_value', 'url': 'url_value'}], 'subnetworks': [{'canonical_url': 'canonical_url_value', 'url': 'url_value'}], 'tags': ['tags_value1', 'tags_value2']}, 'name': 'name_value', 'network': {'canonical_url': 'canonical_url_value', 'url': 'url_value'}, 'priority': 898, 'region': 'region_value', 'self_link': 'self_link_value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.insert(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, extended_operation.ExtendedOperation) + assert response.client_operation_id == 'client_operation_id_value' + assert response.creation_timestamp == 'creation_timestamp_value' + assert response.description == 'description_value' + assert response.end_time == 'end_time_value' + assert response.http_error_message == 'http_error_message_value' + assert response.http_error_status_code == 2374 + assert response.id == 205 + assert response.insert_time == 'insert_time_value' + assert response.kind == 'kind_value' + assert response.name == 'name_value' + assert response.operation_group_id == 'operation_group_id_value' + assert response.operation_type == 'operation_type_value' + assert response.progress == 885 + assert response.region == 'region_value' + assert response.self_link == 'self_link_value' + assert response.start_time == 'start_time_value' + assert response.status == compute.Operation.Status.DONE + assert response.status_message == 'status_message_value' + assert response.target_id == 947 + assert response.target_link == 'target_link_value' + assert response.user == 'user_value' + assert response.zone == 'zone_value' + + +def test_insert_rest_required_fields(request_type=compute.InsertPacketMirroringRequest): + transport_class = transports.PacketMirroringsRestTransport + + request_init = {} + request_init["project"] = "" + request_init["region"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).insert._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + jsonified_request["region"] = 'region_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).insert._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "region" in jsonified_request + assert jsonified_request["region"] == 'region_value' + + client = PacketMirroringsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.insert(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_insert_rest_unset_required_fields(): + transport = transports.PacketMirroringsRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.insert._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("packetMirroringResource", "project", "region", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_insert_rest_interceptors(null_interceptor): + transport = transports.PacketMirroringsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.PacketMirroringsRestInterceptor(), + ) + client = PacketMirroringsClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.PacketMirroringsRestInterceptor, "post_insert") as post, \ + mock.patch.object(transports.PacketMirroringsRestInterceptor, "pre_insert") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.InsertPacketMirroringRequest.pb(compute.InsertPacketMirroringRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.InsertPacketMirroringRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.insert(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_insert_rest_bad_request(transport: str = 'rest', request_type=compute.InsertPacketMirroringRequest): + client = PacketMirroringsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2'} + request_init["packet_mirroring_resource"] = {'collector_ilb': {'canonical_url': 'canonical_url_value', 'url': 'url_value'}, 'creation_timestamp': 'creation_timestamp_value', 'description': 'description_value', 'enable': 'enable_value', 'filter': {'I_p_protocols': ['I_p_protocols_value1', 'I_p_protocols_value2'], 'cidr_ranges': ['cidr_ranges_value1', 'cidr_ranges_value2'], 'direction': 'direction_value'}, 'id': 205, 'kind': 'kind_value', 'mirrored_resources': {'instances': [{'canonical_url': 'canonical_url_value', 'url': 'url_value'}], 'subnetworks': [{'canonical_url': 'canonical_url_value', 'url': 'url_value'}], 'tags': ['tags_value1', 'tags_value2']}, 'name': 'name_value', 'network': {'canonical_url': 'canonical_url_value', 'url': 'url_value'}, 'priority': 898, 'region': 'region_value', 'self_link': 'self_link_value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.insert(request) + + +def test_insert_rest_flattened(): + client = PacketMirroringsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'region': 'sample2'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + region='region_value', + packet_mirroring_resource=compute.PacketMirroring(collector_ilb=compute.PacketMirroringForwardingRuleInfo(canonical_url='canonical_url_value')), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.insert(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/regions/{region}/packetMirrorings" % client.transport._host, args[1]) + + +def test_insert_rest_flattened_error(transport: str = 'rest'): + client = PacketMirroringsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.insert( + compute.InsertPacketMirroringRequest(), + project='project_value', + region='region_value', + packet_mirroring_resource=compute.PacketMirroring(collector_ilb=compute.PacketMirroringForwardingRuleInfo(canonical_url='canonical_url_value')), + ) + + +def test_insert_rest_error(): + client = PacketMirroringsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.InsertPacketMirroringRequest, + dict, +]) +def test_insert_unary_rest(request_type): + client = PacketMirroringsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2'} + request_init["packet_mirroring_resource"] = {'collector_ilb': {'canonical_url': 'canonical_url_value', 'url': 'url_value'}, 'creation_timestamp': 'creation_timestamp_value', 'description': 'description_value', 'enable': 'enable_value', 'filter': {'I_p_protocols': ['I_p_protocols_value1', 'I_p_protocols_value2'], 'cidr_ranges': ['cidr_ranges_value1', 'cidr_ranges_value2'], 'direction': 'direction_value'}, 'id': 205, 'kind': 'kind_value', 'mirrored_resources': {'instances': [{'canonical_url': 'canonical_url_value', 'url': 'url_value'}], 'subnetworks': [{'canonical_url': 'canonical_url_value', 'url': 'url_value'}], 'tags': ['tags_value1', 'tags_value2']}, 'name': 'name_value', 'network': {'canonical_url': 'canonical_url_value', 'url': 'url_value'}, 'priority': 898, 'region': 'region_value', 'self_link': 'self_link_value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.insert_unary(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.Operation) + + +def test_insert_unary_rest_required_fields(request_type=compute.InsertPacketMirroringRequest): + transport_class = transports.PacketMirroringsRestTransport + + request_init = {} + request_init["project"] = "" + request_init["region"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).insert._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + jsonified_request["region"] = 'region_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).insert._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "region" in jsonified_request + assert jsonified_request["region"] == 'region_value' + + client = PacketMirroringsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.insert_unary(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_insert_unary_rest_unset_required_fields(): + transport = transports.PacketMirroringsRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.insert._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("packetMirroringResource", "project", "region", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_insert_unary_rest_interceptors(null_interceptor): + transport = transports.PacketMirroringsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.PacketMirroringsRestInterceptor(), + ) + client = PacketMirroringsClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.PacketMirroringsRestInterceptor, "post_insert") as post, \ + mock.patch.object(transports.PacketMirroringsRestInterceptor, "pre_insert") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.InsertPacketMirroringRequest.pb(compute.InsertPacketMirroringRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.InsertPacketMirroringRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.insert_unary(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_insert_unary_rest_bad_request(transport: str = 'rest', request_type=compute.InsertPacketMirroringRequest): + client = PacketMirroringsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2'} + request_init["packet_mirroring_resource"] = {'collector_ilb': {'canonical_url': 'canonical_url_value', 'url': 'url_value'}, 'creation_timestamp': 'creation_timestamp_value', 'description': 'description_value', 'enable': 'enable_value', 'filter': {'I_p_protocols': ['I_p_protocols_value1', 'I_p_protocols_value2'], 'cidr_ranges': ['cidr_ranges_value1', 'cidr_ranges_value2'], 'direction': 'direction_value'}, 'id': 205, 'kind': 'kind_value', 'mirrored_resources': {'instances': [{'canonical_url': 'canonical_url_value', 'url': 'url_value'}], 'subnetworks': [{'canonical_url': 'canonical_url_value', 'url': 'url_value'}], 'tags': ['tags_value1', 'tags_value2']}, 'name': 'name_value', 'network': {'canonical_url': 'canonical_url_value', 'url': 'url_value'}, 'priority': 898, 'region': 'region_value', 'self_link': 'self_link_value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.insert_unary(request) + + +def test_insert_unary_rest_flattened(): + client = PacketMirroringsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'region': 'sample2'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + region='region_value', + packet_mirroring_resource=compute.PacketMirroring(collector_ilb=compute.PacketMirroringForwardingRuleInfo(canonical_url='canonical_url_value')), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.insert_unary(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/regions/{region}/packetMirrorings" % client.transport._host, args[1]) + + +def test_insert_unary_rest_flattened_error(transport: str = 'rest'): + client = PacketMirroringsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.insert_unary( + compute.InsertPacketMirroringRequest(), + project='project_value', + region='region_value', + packet_mirroring_resource=compute.PacketMirroring(collector_ilb=compute.PacketMirroringForwardingRuleInfo(canonical_url='canonical_url_value')), + ) + + +def test_insert_unary_rest_error(): + client = PacketMirroringsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.ListPacketMirroringsRequest, + dict, +]) +def test_list_rest(request_type): + client = PacketMirroringsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.PacketMirroringList( + id='id_value', + kind='kind_value', + next_page_token='next_page_token_value', + self_link='self_link_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.PacketMirroringList.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.list(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListPager) + assert response.id == 'id_value' + assert response.kind == 'kind_value' + assert response.next_page_token == 'next_page_token_value' + assert response.self_link == 'self_link_value' + + +def test_list_rest_required_fields(request_type=compute.ListPacketMirroringsRequest): + transport_class = transports.PacketMirroringsRestTransport + + request_init = {} + request_init["project"] = "" + request_init["region"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + jsonified_request["region"] = 'region_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("filter", "max_results", "order_by", "page_token", "return_partial_success", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "region" in jsonified_request + assert jsonified_request["region"] == 'region_value' + + client = PacketMirroringsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.PacketMirroringList() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "get", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.PacketMirroringList.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.list(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_list_rest_unset_required_fields(): + transport = transports.PacketMirroringsRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.list._get_unset_required_fields({}) + assert set(unset_fields) == (set(("filter", "maxResults", "orderBy", "pageToken", "returnPartialSuccess", )) & set(("project", "region", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_rest_interceptors(null_interceptor): + transport = transports.PacketMirroringsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.PacketMirroringsRestInterceptor(), + ) + client = PacketMirroringsClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.PacketMirroringsRestInterceptor, "post_list") as post, \ + mock.patch.object(transports.PacketMirroringsRestInterceptor, "pre_list") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.ListPacketMirroringsRequest.pb(compute.ListPacketMirroringsRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.PacketMirroringList.to_json(compute.PacketMirroringList()) + + request = compute.ListPacketMirroringsRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.PacketMirroringList() + + client.list(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_list_rest_bad_request(transport: str = 'rest', request_type=compute.ListPacketMirroringsRequest): + client = PacketMirroringsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list(request) + + +def test_list_rest_flattened(): + client = PacketMirroringsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.PacketMirroringList() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'region': 'sample2'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + region='region_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.PacketMirroringList.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.list(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/regions/{region}/packetMirrorings" % client.transport._host, args[1]) + + +def test_list_rest_flattened_error(transport: str = 'rest'): + client = PacketMirroringsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list( + compute.ListPacketMirroringsRequest(), + project='project_value', + region='region_value', + ) + + +def test_list_rest_pager(transport: str = 'rest'): + client = PacketMirroringsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + #with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + compute.PacketMirroringList( + items=[ + compute.PacketMirroring(), + compute.PacketMirroring(), + compute.PacketMirroring(), + ], + next_page_token='abc', + ), + compute.PacketMirroringList( + items=[], + next_page_token='def', + ), + compute.PacketMirroringList( + items=[ + compute.PacketMirroring(), + ], + next_page_token='ghi', + ), + compute.PacketMirroringList( + items=[ + compute.PacketMirroring(), + compute.PacketMirroring(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple(compute.PacketMirroringList.to_json(x) for x in response) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode('UTF-8') + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {'project': 'sample1', 'region': 'sample2'} + + pager = client.list(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, compute.PacketMirroring) + for i in results) + + pages = list(client.list(request=sample_request).pages) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize("request_type", [ + compute.PatchPacketMirroringRequest, + dict, +]) +def test_patch_rest(request_type): + client = PacketMirroringsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2', 'packet_mirroring': 'sample3'} + request_init["packet_mirroring_resource"] = {'collector_ilb': {'canonical_url': 'canonical_url_value', 'url': 'url_value'}, 'creation_timestamp': 'creation_timestamp_value', 'description': 'description_value', 'enable': 'enable_value', 'filter': {'I_p_protocols': ['I_p_protocols_value1', 'I_p_protocols_value2'], 'cidr_ranges': ['cidr_ranges_value1', 'cidr_ranges_value2'], 'direction': 'direction_value'}, 'id': 205, 'kind': 'kind_value', 'mirrored_resources': {'instances': [{'canonical_url': 'canonical_url_value', 'url': 'url_value'}], 'subnetworks': [{'canonical_url': 'canonical_url_value', 'url': 'url_value'}], 'tags': ['tags_value1', 'tags_value2']}, 'name': 'name_value', 'network': {'canonical_url': 'canonical_url_value', 'url': 'url_value'}, 'priority': 898, 'region': 'region_value', 'self_link': 'self_link_value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.patch(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, extended_operation.ExtendedOperation) + assert response.client_operation_id == 'client_operation_id_value' + assert response.creation_timestamp == 'creation_timestamp_value' + assert response.description == 'description_value' + assert response.end_time == 'end_time_value' + assert response.http_error_message == 'http_error_message_value' + assert response.http_error_status_code == 2374 + assert response.id == 205 + assert response.insert_time == 'insert_time_value' + assert response.kind == 'kind_value' + assert response.name == 'name_value' + assert response.operation_group_id == 'operation_group_id_value' + assert response.operation_type == 'operation_type_value' + assert response.progress == 885 + assert response.region == 'region_value' + assert response.self_link == 'self_link_value' + assert response.start_time == 'start_time_value' + assert response.status == compute.Operation.Status.DONE + assert response.status_message == 'status_message_value' + assert response.target_id == 947 + assert response.target_link == 'target_link_value' + assert response.user == 'user_value' + assert response.zone == 'zone_value' + + +def test_patch_rest_required_fields(request_type=compute.PatchPacketMirroringRequest): + transport_class = transports.PacketMirroringsRestTransport + + request_init = {} + request_init["packet_mirroring"] = "" + request_init["project"] = "" + request_init["region"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).patch._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["packetMirroring"] = 'packet_mirroring_value' + jsonified_request["project"] = 'project_value' + jsonified_request["region"] = 'region_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).patch._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "packetMirroring" in jsonified_request + assert jsonified_request["packetMirroring"] == 'packet_mirroring_value' + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "region" in jsonified_request + assert jsonified_request["region"] == 'region_value' + + client = PacketMirroringsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "patch", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.patch(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_patch_rest_unset_required_fields(): + transport = transports.PacketMirroringsRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.patch._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("packetMirroring", "packetMirroringResource", "project", "region", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_patch_rest_interceptors(null_interceptor): + transport = transports.PacketMirroringsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.PacketMirroringsRestInterceptor(), + ) + client = PacketMirroringsClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.PacketMirroringsRestInterceptor, "post_patch") as post, \ + mock.patch.object(transports.PacketMirroringsRestInterceptor, "pre_patch") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.PatchPacketMirroringRequest.pb(compute.PatchPacketMirroringRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.PatchPacketMirroringRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.patch(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_patch_rest_bad_request(transport: str = 'rest', request_type=compute.PatchPacketMirroringRequest): + client = PacketMirroringsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2', 'packet_mirroring': 'sample3'} + request_init["packet_mirroring_resource"] = {'collector_ilb': {'canonical_url': 'canonical_url_value', 'url': 'url_value'}, 'creation_timestamp': 'creation_timestamp_value', 'description': 'description_value', 'enable': 'enable_value', 'filter': {'I_p_protocols': ['I_p_protocols_value1', 'I_p_protocols_value2'], 'cidr_ranges': ['cidr_ranges_value1', 'cidr_ranges_value2'], 'direction': 'direction_value'}, 'id': 205, 'kind': 'kind_value', 'mirrored_resources': {'instances': [{'canonical_url': 'canonical_url_value', 'url': 'url_value'}], 'subnetworks': [{'canonical_url': 'canonical_url_value', 'url': 'url_value'}], 'tags': ['tags_value1', 'tags_value2']}, 'name': 'name_value', 'network': {'canonical_url': 'canonical_url_value', 'url': 'url_value'}, 'priority': 898, 'region': 'region_value', 'self_link': 'self_link_value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.patch(request) + + +def test_patch_rest_flattened(): + client = PacketMirroringsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'region': 'sample2', 'packet_mirroring': 'sample3'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + region='region_value', + packet_mirroring='packet_mirroring_value', + packet_mirroring_resource=compute.PacketMirroring(collector_ilb=compute.PacketMirroringForwardingRuleInfo(canonical_url='canonical_url_value')), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.patch(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/regions/{region}/packetMirrorings/{packet_mirroring}" % client.transport._host, args[1]) + + +def test_patch_rest_flattened_error(transport: str = 'rest'): + client = PacketMirroringsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.patch( + compute.PatchPacketMirroringRequest(), + project='project_value', + region='region_value', + packet_mirroring='packet_mirroring_value', + packet_mirroring_resource=compute.PacketMirroring(collector_ilb=compute.PacketMirroringForwardingRuleInfo(canonical_url='canonical_url_value')), + ) + + +def test_patch_rest_error(): + client = PacketMirroringsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.PatchPacketMirroringRequest, + dict, +]) +def test_patch_unary_rest(request_type): + client = PacketMirroringsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2', 'packet_mirroring': 'sample3'} + request_init["packet_mirroring_resource"] = {'collector_ilb': {'canonical_url': 'canonical_url_value', 'url': 'url_value'}, 'creation_timestamp': 'creation_timestamp_value', 'description': 'description_value', 'enable': 'enable_value', 'filter': {'I_p_protocols': ['I_p_protocols_value1', 'I_p_protocols_value2'], 'cidr_ranges': ['cidr_ranges_value1', 'cidr_ranges_value2'], 'direction': 'direction_value'}, 'id': 205, 'kind': 'kind_value', 'mirrored_resources': {'instances': [{'canonical_url': 'canonical_url_value', 'url': 'url_value'}], 'subnetworks': [{'canonical_url': 'canonical_url_value', 'url': 'url_value'}], 'tags': ['tags_value1', 'tags_value2']}, 'name': 'name_value', 'network': {'canonical_url': 'canonical_url_value', 'url': 'url_value'}, 'priority': 898, 'region': 'region_value', 'self_link': 'self_link_value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.patch_unary(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.Operation) + + +def test_patch_unary_rest_required_fields(request_type=compute.PatchPacketMirroringRequest): + transport_class = transports.PacketMirroringsRestTransport + + request_init = {} + request_init["packet_mirroring"] = "" + request_init["project"] = "" + request_init["region"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).patch._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["packetMirroring"] = 'packet_mirroring_value' + jsonified_request["project"] = 'project_value' + jsonified_request["region"] = 'region_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).patch._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "packetMirroring" in jsonified_request + assert jsonified_request["packetMirroring"] == 'packet_mirroring_value' + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "region" in jsonified_request + assert jsonified_request["region"] == 'region_value' + + client = PacketMirroringsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "patch", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.patch_unary(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_patch_unary_rest_unset_required_fields(): + transport = transports.PacketMirroringsRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.patch._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("packetMirroring", "packetMirroringResource", "project", "region", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_patch_unary_rest_interceptors(null_interceptor): + transport = transports.PacketMirroringsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.PacketMirroringsRestInterceptor(), + ) + client = PacketMirroringsClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.PacketMirroringsRestInterceptor, "post_patch") as post, \ + mock.patch.object(transports.PacketMirroringsRestInterceptor, "pre_patch") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.PatchPacketMirroringRequest.pb(compute.PatchPacketMirroringRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.PatchPacketMirroringRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.patch_unary(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_patch_unary_rest_bad_request(transport: str = 'rest', request_type=compute.PatchPacketMirroringRequest): + client = PacketMirroringsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2', 'packet_mirroring': 'sample3'} + request_init["packet_mirroring_resource"] = {'collector_ilb': {'canonical_url': 'canonical_url_value', 'url': 'url_value'}, 'creation_timestamp': 'creation_timestamp_value', 'description': 'description_value', 'enable': 'enable_value', 'filter': {'I_p_protocols': ['I_p_protocols_value1', 'I_p_protocols_value2'], 'cidr_ranges': ['cidr_ranges_value1', 'cidr_ranges_value2'], 'direction': 'direction_value'}, 'id': 205, 'kind': 'kind_value', 'mirrored_resources': {'instances': [{'canonical_url': 'canonical_url_value', 'url': 'url_value'}], 'subnetworks': [{'canonical_url': 'canonical_url_value', 'url': 'url_value'}], 'tags': ['tags_value1', 'tags_value2']}, 'name': 'name_value', 'network': {'canonical_url': 'canonical_url_value', 'url': 'url_value'}, 'priority': 898, 'region': 'region_value', 'self_link': 'self_link_value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.patch_unary(request) + + +def test_patch_unary_rest_flattened(): + client = PacketMirroringsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'region': 'sample2', 'packet_mirroring': 'sample3'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + region='region_value', + packet_mirroring='packet_mirroring_value', + packet_mirroring_resource=compute.PacketMirroring(collector_ilb=compute.PacketMirroringForwardingRuleInfo(canonical_url='canonical_url_value')), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.patch_unary(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/regions/{region}/packetMirrorings/{packet_mirroring}" % client.transport._host, args[1]) + + +def test_patch_unary_rest_flattened_error(transport: str = 'rest'): + client = PacketMirroringsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.patch_unary( + compute.PatchPacketMirroringRequest(), + project='project_value', + region='region_value', + packet_mirroring='packet_mirroring_value', + packet_mirroring_resource=compute.PacketMirroring(collector_ilb=compute.PacketMirroringForwardingRuleInfo(canonical_url='canonical_url_value')), + ) + + +def test_patch_unary_rest_error(): + client = PacketMirroringsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.TestIamPermissionsPacketMirroringRequest, + dict, +]) +def test_test_iam_permissions_rest(request_type): + client = PacketMirroringsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2', 'resource': 'sample3'} + request_init["test_permissions_request_resource"] = {'permissions': ['permissions_value1', 'permissions_value2']} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.TestPermissionsResponse( + permissions=['permissions_value'], + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.TestPermissionsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.test_iam_permissions(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.TestPermissionsResponse) + assert response.permissions == ['permissions_value'] + + +def test_test_iam_permissions_rest_required_fields(request_type=compute.TestIamPermissionsPacketMirroringRequest): + transport_class = transports.PacketMirroringsRestTransport + + request_init = {} + request_init["project"] = "" + request_init["region"] = "" + request_init["resource"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).test_iam_permissions._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + jsonified_request["region"] = 'region_value' + jsonified_request["resource"] = 'resource_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).test_iam_permissions._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "region" in jsonified_request + assert jsonified_request["region"] == 'region_value' + assert "resource" in jsonified_request + assert jsonified_request["resource"] == 'resource_value' + + client = PacketMirroringsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.TestPermissionsResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.TestPermissionsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.test_iam_permissions(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_test_iam_permissions_rest_unset_required_fields(): + transport = transports.PacketMirroringsRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.test_iam_permissions._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("project", "region", "resource", "testPermissionsRequestResource", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_test_iam_permissions_rest_interceptors(null_interceptor): + transport = transports.PacketMirroringsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.PacketMirroringsRestInterceptor(), + ) + client = PacketMirroringsClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.PacketMirroringsRestInterceptor, "post_test_iam_permissions") as post, \ + mock.patch.object(transports.PacketMirroringsRestInterceptor, "pre_test_iam_permissions") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.TestIamPermissionsPacketMirroringRequest.pb(compute.TestIamPermissionsPacketMirroringRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.TestPermissionsResponse.to_json(compute.TestPermissionsResponse()) + + request = compute.TestIamPermissionsPacketMirroringRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.TestPermissionsResponse() + + client.test_iam_permissions(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_test_iam_permissions_rest_bad_request(transport: str = 'rest', request_type=compute.TestIamPermissionsPacketMirroringRequest): + client = PacketMirroringsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2', 'resource': 'sample3'} + request_init["test_permissions_request_resource"] = {'permissions': ['permissions_value1', 'permissions_value2']} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.test_iam_permissions(request) + + +def test_test_iam_permissions_rest_flattened(): + client = PacketMirroringsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.TestPermissionsResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'region': 'sample2', 'resource': 'sample3'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + region='region_value', + resource='resource_value', + test_permissions_request_resource=compute.TestPermissionsRequest(permissions=['permissions_value']), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.TestPermissionsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.test_iam_permissions(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/regions/{region}/packetMirrorings/{resource}/testIamPermissions" % client.transport._host, args[1]) + + +def test_test_iam_permissions_rest_flattened_error(transport: str = 'rest'): + client = PacketMirroringsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.test_iam_permissions( + compute.TestIamPermissionsPacketMirroringRequest(), + project='project_value', + region='region_value', + resource='resource_value', + test_permissions_request_resource=compute.TestPermissionsRequest(permissions=['permissions_value']), + ) + + +def test_test_iam_permissions_rest_error(): + client = PacketMirroringsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +def test_credentials_transport_error(): + # It is an error to provide credentials and a transport instance. + transport = transports.PacketMirroringsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = PacketMirroringsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # It is an error to provide a credentials file and a transport instance. + transport = transports.PacketMirroringsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = PacketMirroringsClient( + client_options={"credentials_file": "credentials.json"}, + transport=transport, + ) + + # It is an error to provide an api_key and a transport instance. + transport = transports.PacketMirroringsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = PacketMirroringsClient( + client_options=options, + transport=transport, + ) + + # It is an error to provide an api_key and a credential. + options = mock.Mock() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = PacketMirroringsClient( + client_options=options, + credentials=ga_credentials.AnonymousCredentials() + ) + + # It is an error to provide scopes and a transport instance. + transport = transports.PacketMirroringsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = PacketMirroringsClient( + client_options={"scopes": ["1", "2"]}, + transport=transport, + ) + + +def test_transport_instance(): + # A client may be instantiated with a custom transport instance. + transport = transports.PacketMirroringsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + client = PacketMirroringsClient(transport=transport) + assert client.transport is transport + + +@pytest.mark.parametrize("transport_class", [ + transports.PacketMirroringsRestTransport, +]) +def test_transport_adc(transport_class): + # Test default credentials are used if not provided. + with mock.patch.object(google.auth, 'default') as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class() + adc.assert_called_once() + +@pytest.mark.parametrize("transport_name", [ + "rest", +]) +def test_transport_kind(transport_name): + transport = PacketMirroringsClient.get_transport_class(transport_name)( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert transport.kind == transport_name + + +def test_packet_mirrorings_base_transport_error(): + # Passing both a credentials object and credentials_file should raise an error + with pytest.raises(core_exceptions.DuplicateCredentialArgs): + transport = transports.PacketMirroringsTransport( + credentials=ga_credentials.AnonymousCredentials(), + credentials_file="credentials.json" + ) + + +def test_packet_mirrorings_base_transport(): + # Instantiate the base transport. + with mock.patch('google.cloud.compute_v1.services.packet_mirrorings.transports.PacketMirroringsTransport.__init__') as Transport: + Transport.return_value = None + transport = transports.PacketMirroringsTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Every method on the transport should just blindly + # raise NotImplementedError. + methods = ( + 'aggregated_list', + 'delete', + 'get', + 'insert', + 'list', + 'patch', + 'test_iam_permissions', + ) + for method in methods: + with pytest.raises(NotImplementedError): + getattr(transport, method)(request=object()) + + with pytest.raises(NotImplementedError): + transport.close() + + # Catch all for all remaining methods and properties + remainder = [ + 'kind', + ] + for r in remainder: + with pytest.raises(NotImplementedError): + getattr(transport, r)() + + +def test_packet_mirrorings_base_transport_with_credentials_file(): + # Instantiate the base transport with a credentials file + with mock.patch.object(google.auth, 'load_credentials_from_file', autospec=True) as load_creds, mock.patch('google.cloud.compute_v1.services.packet_mirrorings.transports.PacketMirroringsTransport._prep_wrapped_messages') as Transport: + Transport.return_value = None + load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.PacketMirroringsTransport( + credentials_file="credentials.json", + quota_project_id="octopus", + ) + load_creds.assert_called_once_with("credentials.json", + scopes=None, + default_scopes=( + 'https://www.googleapis.com/auth/compute', + 'https://www.googleapis.com/auth/cloud-platform', +), + quota_project_id="octopus", + ) + + +def test_packet_mirrorings_base_transport_with_adc(): + # Test the default credentials are used if credentials and credentials_file are None. + with mock.patch.object(google.auth, 'default', autospec=True) as adc, mock.patch('google.cloud.compute_v1.services.packet_mirrorings.transports.PacketMirroringsTransport._prep_wrapped_messages') as Transport: + Transport.return_value = None + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.PacketMirroringsTransport() + adc.assert_called_once() + + +def test_packet_mirrorings_auth_adc(): + # If no credentials are provided, we should use ADC credentials. + with mock.patch.object(google.auth, 'default', autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + PacketMirroringsClient() + adc.assert_called_once_with( + scopes=None, + default_scopes=( + 'https://www.googleapis.com/auth/compute', + 'https://www.googleapis.com/auth/cloud-platform', +), + quota_project_id=None, + ) + + +def test_packet_mirrorings_http_transport_client_cert_source_for_mtls(): + cred = ga_credentials.AnonymousCredentials() + with mock.patch("google.auth.transport.requests.AuthorizedSession.configure_mtls_channel") as mock_configure_mtls_channel: + transports.PacketMirroringsRestTransport ( + credentials=cred, + client_cert_source_for_mtls=client_cert_source_callback + ) + mock_configure_mtls_channel.assert_called_once_with(client_cert_source_callback) + + +@pytest.mark.parametrize("transport_name", [ + "rest", +]) +def test_packet_mirrorings_host_no_port(transport_name): + client = PacketMirroringsClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions(api_endpoint='compute.googleapis.com'), + transport=transport_name, + ) + assert client.transport._host == ( + 'compute.googleapis.com:443' + if transport_name in ['grpc', 'grpc_asyncio'] + else 'https://compute.googleapis.com' + ) + +@pytest.mark.parametrize("transport_name", [ + "rest", +]) +def test_packet_mirrorings_host_with_port(transport_name): + client = PacketMirroringsClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions(api_endpoint='compute.googleapis.com:8000'), + transport=transport_name, + ) + assert client.transport._host == ( + 'compute.googleapis.com:8000' + if transport_name in ['grpc', 'grpc_asyncio'] + else 'https://compute.googleapis.com:8000' + ) + +@pytest.mark.parametrize("transport_name", [ + "rest", +]) +def test_packet_mirrorings_client_transport_session_collision(transport_name): + creds1 = ga_credentials.AnonymousCredentials() + creds2 = ga_credentials.AnonymousCredentials() + client1 = PacketMirroringsClient( + credentials=creds1, + transport=transport_name, + ) + client2 = PacketMirroringsClient( + credentials=creds2, + transport=transport_name, + ) + session1 = client1.transport.aggregated_list._session + session2 = client2.transport.aggregated_list._session + assert session1 != session2 + session1 = client1.transport.delete._session + session2 = client2.transport.delete._session + assert session1 != session2 + session1 = client1.transport.get._session + session2 = client2.transport.get._session + assert session1 != session2 + session1 = client1.transport.insert._session + session2 = client2.transport.insert._session + assert session1 != session2 + session1 = client1.transport.list._session + session2 = client2.transport.list._session + assert session1 != session2 + session1 = client1.transport.patch._session + session2 = client2.transport.patch._session + assert session1 != session2 + session1 = client1.transport.test_iam_permissions._session + session2 = client2.transport.test_iam_permissions._session + assert session1 != session2 + +def test_common_billing_account_path(): + billing_account = "squid" + expected = "billingAccounts/{billing_account}".format(billing_account=billing_account, ) + actual = PacketMirroringsClient.common_billing_account_path(billing_account) + assert expected == actual + + +def test_parse_common_billing_account_path(): + expected = { + "billing_account": "clam", + } + path = PacketMirroringsClient.common_billing_account_path(**expected) + + # Check that the path construction is reversible. + actual = PacketMirroringsClient.parse_common_billing_account_path(path) + assert expected == actual + +def test_common_folder_path(): + folder = "whelk" + expected = "folders/{folder}".format(folder=folder, ) + actual = PacketMirroringsClient.common_folder_path(folder) + assert expected == actual + + +def test_parse_common_folder_path(): + expected = { + "folder": "octopus", + } + path = PacketMirroringsClient.common_folder_path(**expected) + + # Check that the path construction is reversible. + actual = PacketMirroringsClient.parse_common_folder_path(path) + assert expected == actual + +def test_common_organization_path(): + organization = "oyster" + expected = "organizations/{organization}".format(organization=organization, ) + actual = PacketMirroringsClient.common_organization_path(organization) + assert expected == actual + + +def test_parse_common_organization_path(): + expected = { + "organization": "nudibranch", + } + path = PacketMirroringsClient.common_organization_path(**expected) + + # Check that the path construction is reversible. + actual = PacketMirroringsClient.parse_common_organization_path(path) + assert expected == actual + +def test_common_project_path(): + project = "cuttlefish" + expected = "projects/{project}".format(project=project, ) + actual = PacketMirroringsClient.common_project_path(project) + assert expected == actual + + +def test_parse_common_project_path(): + expected = { + "project": "mussel", + } + path = PacketMirroringsClient.common_project_path(**expected) + + # Check that the path construction is reversible. + actual = PacketMirroringsClient.parse_common_project_path(path) + assert expected == actual + +def test_common_location_path(): + project = "winkle" + location = "nautilus" + expected = "projects/{project}/locations/{location}".format(project=project, location=location, ) + actual = PacketMirroringsClient.common_location_path(project, location) + assert expected == actual + + +def test_parse_common_location_path(): + expected = { + "project": "scallop", + "location": "abalone", + } + path = PacketMirroringsClient.common_location_path(**expected) + + # Check that the path construction is reversible. + actual = PacketMirroringsClient.parse_common_location_path(path) + assert expected == actual + + +def test_client_with_default_client_info(): + client_info = gapic_v1.client_info.ClientInfo() + + with mock.patch.object(transports.PacketMirroringsTransport, '_prep_wrapped_messages') as prep: + client = PacketMirroringsClient( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + with mock.patch.object(transports.PacketMirroringsTransport, '_prep_wrapped_messages') as prep: + transport_class = PacketMirroringsClient.get_transport_class() + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + +def test_transport_close(): + transports = { + "rest": "_session", + } + + for transport, close_name in transports.items(): + client = PacketMirroringsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport + ) + with mock.patch.object(type(getattr(client.transport, close_name)), "close") as close: + with client: + close.assert_not_called() + close.assert_called_once() + +def test_client_ctx(): + transports = [ + 'rest', + ] + for transport in transports: + client = PacketMirroringsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport + ) + # Test client calls underlying transport. + with mock.patch.object(type(client.transport), "close") as close: + close.assert_not_called() + with client: + pass + close.assert_called() + +@pytest.mark.parametrize("client_class,transport_class", [ + (PacketMirroringsClient, transports.PacketMirroringsRestTransport), +]) +def test_api_key_credentials(client_class, transport_class): + with mock.patch.object( + google.auth._default, "get_api_key_credentials", create=True + ) as get_api_key_credentials: + mock_cred = mock.Mock() + get_api_key_credentials.return_value = mock_cred + options = client_options.ClientOptions() + options.api_key = "api_key" + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=mock_cred, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) diff --git a/owl-bot-staging/v1/tests/unit/gapic/compute_v1/test_projects.py b/owl-bot-staging/v1/tests/unit/gapic/compute_v1/test_projects.py new file mode 100644 index 000000000..faf7fe5e9 --- /dev/null +++ b/owl-bot-staging/v1/tests/unit/gapic/compute_v1/test_projects.py @@ -0,0 +1,6816 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import os +# try/except added for compatibility with python < 3.8 +try: + from unittest import mock + from unittest.mock import AsyncMock # pragma: NO COVER +except ImportError: # pragma: NO COVER + import mock + +import grpc +from grpc.experimental import aio +from collections.abc import Iterable +from google.protobuf import json_format +import json +import math +import pytest +from proto.marshal.rules.dates import DurationRule, TimestampRule +from proto.marshal.rules import wrappers +from requests import Response +from requests import Request, PreparedRequest +from requests.sessions import Session +from google.protobuf import json_format + +from google.api_core import client_options +from google.api_core import exceptions as core_exceptions +from google.api_core import extended_operation # type: ignore +from google.api_core import future +from google.api_core import gapic_v1 +from google.api_core import grpc_helpers +from google.api_core import grpc_helpers_async +from google.api_core import path_template +from google.auth import credentials as ga_credentials +from google.auth.exceptions import MutualTLSChannelError +from google.cloud.compute_v1.services.projects import ProjectsClient +from google.cloud.compute_v1.services.projects import pagers +from google.cloud.compute_v1.services.projects import transports +from google.cloud.compute_v1.types import compute +from google.oauth2 import service_account +import google.auth + + +def client_cert_source_callback(): + return b"cert bytes", b"key bytes" + + +# If default endpoint is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint(client): + return "foo.googleapis.com" if ("localhost" in client.DEFAULT_ENDPOINT) else client.DEFAULT_ENDPOINT + + +def test__get_default_mtls_endpoint(): + api_endpoint = "example.googleapis.com" + api_mtls_endpoint = "example.mtls.googleapis.com" + sandbox_endpoint = "example.sandbox.googleapis.com" + sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com" + non_googleapi = "api.example.com" + + assert ProjectsClient._get_default_mtls_endpoint(None) is None + assert ProjectsClient._get_default_mtls_endpoint(api_endpoint) == api_mtls_endpoint + assert ProjectsClient._get_default_mtls_endpoint(api_mtls_endpoint) == api_mtls_endpoint + assert ProjectsClient._get_default_mtls_endpoint(sandbox_endpoint) == sandbox_mtls_endpoint + assert ProjectsClient._get_default_mtls_endpoint(sandbox_mtls_endpoint) == sandbox_mtls_endpoint + assert ProjectsClient._get_default_mtls_endpoint(non_googleapi) == non_googleapi + + +@pytest.mark.parametrize("client_class,transport_name", [ + (ProjectsClient, "rest"), +]) +def test_projects_client_from_service_account_info(client_class, transport_name): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object(service_account.Credentials, 'from_service_account_info') as factory: + factory.return_value = creds + info = {"valid": True} + client = client_class.from_service_account_info(info, transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + 'compute.googleapis.com:443' + if transport_name in ['grpc', 'grpc_asyncio'] + else + 'https://compute.googleapis.com' + ) + + +@pytest.mark.parametrize("transport_class,transport_name", [ + (transports.ProjectsRestTransport, "rest"), +]) +def test_projects_client_service_account_always_use_jwt(transport_class, transport_name): + with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=True) + use_jwt.assert_called_once_with(True) + + with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=False) + use_jwt.assert_not_called() + + +@pytest.mark.parametrize("client_class,transport_name", [ + (ProjectsClient, "rest"), +]) +def test_projects_client_from_service_account_file(client_class, transport_name): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object(service_account.Credentials, 'from_service_account_file') as factory: + factory.return_value = creds + client = client_class.from_service_account_file("dummy/file/path.json", transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + client = client_class.from_service_account_json("dummy/file/path.json", transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + 'compute.googleapis.com:443' + if transport_name in ['grpc', 'grpc_asyncio'] + else + 'https://compute.googleapis.com' + ) + + +def test_projects_client_get_transport_class(): + transport = ProjectsClient.get_transport_class() + available_transports = [ + transports.ProjectsRestTransport, + ] + assert transport in available_transports + + transport = ProjectsClient.get_transport_class("rest") + assert transport == transports.ProjectsRestTransport + + +@pytest.mark.parametrize("client_class,transport_class,transport_name", [ + (ProjectsClient, transports.ProjectsRestTransport, "rest"), +]) +@mock.patch.object(ProjectsClient, "DEFAULT_ENDPOINT", modify_default_endpoint(ProjectsClient)) +def test_projects_client_client_options(client_class, transport_class, transport_name): + # Check that if channel is provided we won't create a new one. + with mock.patch.object(ProjectsClient, 'get_transport_class') as gtc: + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ) + client = client_class(transport=transport) + gtc.assert_not_called() + + # Check that if channel is provided via str we will create a new one. + with mock.patch.object(ProjectsClient, 'get_transport_class') as gtc: + client = client_class(transport=transport_name) + gtc.assert_called() + + # Check the case api_endpoint is provided. + options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(transport=transport_name, client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_MTLS_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError): + client = client_class(transport=transport_name) + + # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): + with pytest.raises(ValueError): + client = client_class(transport=transport_name) + + # Check the case quota_project_id is provided + options = client_options.ClientOptions(quota_project_id="octopus") + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id="octopus", + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + # Check the case api_endpoint is provided + options = client_options.ClientOptions(api_audience="https://language.googleapis.com") + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience="https://language.googleapis.com" + ) + +@pytest.mark.parametrize("client_class,transport_class,transport_name,use_client_cert_env", [ + (ProjectsClient, transports.ProjectsRestTransport, "rest", "true"), + (ProjectsClient, transports.ProjectsRestTransport, "rest", "false"), +]) +@mock.patch.object(ProjectsClient, "DEFAULT_ENDPOINT", modify_default_endpoint(ProjectsClient)) +@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) +def test_projects_client_mtls_env_auto(client_class, transport_class, transport_name, use_client_cert_env): + # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default + # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists. + + # Check the case client_cert_source is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): + options = client_options.ClientOptions(client_cert_source=client_cert_source_callback) + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + + if use_client_cert_env == "false": + expected_client_cert_source = None + expected_host = client.DEFAULT_ENDPOINT + else: + expected_client_cert_source = client_cert_source_callback + expected_host = client.DEFAULT_MTLS_ENDPOINT + + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case ADC client cert is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): + with mock.patch.object(transport_class, '__init__') as patched: + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): + with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=client_cert_source_callback): + if use_client_cert_env == "false": + expected_host = client.DEFAULT_ENDPOINT + expected_client_cert_source = None + else: + expected_host = client.DEFAULT_MTLS_ENDPOINT + expected_client_cert_source = client_cert_source_callback + + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case client_cert_source and ADC client cert are not provided. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): + with mock.patch.object(transport_class, '__init__') as patched: + with mock.patch("google.auth.transport.mtls.has_default_client_cert_source", return_value=False): + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize("client_class", [ + ProjectsClient +]) +@mock.patch.object(ProjectsClient, "DEFAULT_ENDPOINT", modify_default_endpoint(ProjectsClient)) +def test_projects_client_get_mtls_endpoint_and_cert_source(client_class): + mock_client_cert_source = mock.Mock() + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "true". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + mock_api_endpoint = "foo" + options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(options) + assert api_endpoint == mock_api_endpoint + assert cert_source == mock_client_cert_source + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "false". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + mock_client_cert_source = mock.Mock() + mock_api_endpoint = "foo" + options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(options) + assert api_endpoint == mock_api_endpoint + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert doesn't exist. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=False): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert exists. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): + with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=mock_client_cert_source): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source == mock_client_cert_source + + +@pytest.mark.parametrize("client_class,transport_class,transport_name", [ + (ProjectsClient, transports.ProjectsRestTransport, "rest"), +]) +def test_projects_client_client_options_scopes(client_class, transport_class, transport_name): + # Check the case scopes are provided. + options = client_options.ClientOptions( + scopes=["1", "2"], + ) + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=["1", "2"], + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + +@pytest.mark.parametrize("client_class,transport_class,transport_name,grpc_helpers", [ + (ProjectsClient, transports.ProjectsRestTransport, "rest", None), +]) +def test_projects_client_client_options_credentials_file(client_class, transport_class, transport_name, grpc_helpers): + # Check the case credentials file is provided. + options = client_options.ClientOptions( + credentials_file="credentials.json" + ) + + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize("request_type", [ + compute.DisableXpnHostProjectRequest, + dict, +]) +def test_disable_xpn_host_rest(request_type): + client = ProjectsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.disable_xpn_host(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, extended_operation.ExtendedOperation) + assert response.client_operation_id == 'client_operation_id_value' + assert response.creation_timestamp == 'creation_timestamp_value' + assert response.description == 'description_value' + assert response.end_time == 'end_time_value' + assert response.http_error_message == 'http_error_message_value' + assert response.http_error_status_code == 2374 + assert response.id == 205 + assert response.insert_time == 'insert_time_value' + assert response.kind == 'kind_value' + assert response.name == 'name_value' + assert response.operation_group_id == 'operation_group_id_value' + assert response.operation_type == 'operation_type_value' + assert response.progress == 885 + assert response.region == 'region_value' + assert response.self_link == 'self_link_value' + assert response.start_time == 'start_time_value' + assert response.status == compute.Operation.Status.DONE + assert response.status_message == 'status_message_value' + assert response.target_id == 947 + assert response.target_link == 'target_link_value' + assert response.user == 'user_value' + assert response.zone == 'zone_value' + + +def test_disable_xpn_host_rest_required_fields(request_type=compute.DisableXpnHostProjectRequest): + transport_class = transports.ProjectsRestTransport + + request_init = {} + request_init["project"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).disable_xpn_host._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).disable_xpn_host._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + + client = ProjectsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.disable_xpn_host(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_disable_xpn_host_rest_unset_required_fields(): + transport = transports.ProjectsRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.disable_xpn_host._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("project", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_disable_xpn_host_rest_interceptors(null_interceptor): + transport = transports.ProjectsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.ProjectsRestInterceptor(), + ) + client = ProjectsClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.ProjectsRestInterceptor, "post_disable_xpn_host") as post, \ + mock.patch.object(transports.ProjectsRestInterceptor, "pre_disable_xpn_host") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.DisableXpnHostProjectRequest.pb(compute.DisableXpnHostProjectRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.DisableXpnHostProjectRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.disable_xpn_host(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_disable_xpn_host_rest_bad_request(transport: str = 'rest', request_type=compute.DisableXpnHostProjectRequest): + client = ProjectsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.disable_xpn_host(request) + + +def test_disable_xpn_host_rest_flattened(): + client = ProjectsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.disable_xpn_host(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/disableXpnHost" % client.transport._host, args[1]) + + +def test_disable_xpn_host_rest_flattened_error(transport: str = 'rest'): + client = ProjectsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.disable_xpn_host( + compute.DisableXpnHostProjectRequest(), + project='project_value', + ) + + +def test_disable_xpn_host_rest_error(): + client = ProjectsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.DisableXpnHostProjectRequest, + dict, +]) +def test_disable_xpn_host_unary_rest(request_type): + client = ProjectsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.disable_xpn_host_unary(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.Operation) + + +def test_disable_xpn_host_unary_rest_required_fields(request_type=compute.DisableXpnHostProjectRequest): + transport_class = transports.ProjectsRestTransport + + request_init = {} + request_init["project"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).disable_xpn_host._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).disable_xpn_host._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + + client = ProjectsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.disable_xpn_host_unary(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_disable_xpn_host_unary_rest_unset_required_fields(): + transport = transports.ProjectsRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.disable_xpn_host._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("project", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_disable_xpn_host_unary_rest_interceptors(null_interceptor): + transport = transports.ProjectsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.ProjectsRestInterceptor(), + ) + client = ProjectsClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.ProjectsRestInterceptor, "post_disable_xpn_host") as post, \ + mock.patch.object(transports.ProjectsRestInterceptor, "pre_disable_xpn_host") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.DisableXpnHostProjectRequest.pb(compute.DisableXpnHostProjectRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.DisableXpnHostProjectRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.disable_xpn_host_unary(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_disable_xpn_host_unary_rest_bad_request(transport: str = 'rest', request_type=compute.DisableXpnHostProjectRequest): + client = ProjectsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.disable_xpn_host_unary(request) + + +def test_disable_xpn_host_unary_rest_flattened(): + client = ProjectsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.disable_xpn_host_unary(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/disableXpnHost" % client.transport._host, args[1]) + + +def test_disable_xpn_host_unary_rest_flattened_error(transport: str = 'rest'): + client = ProjectsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.disable_xpn_host_unary( + compute.DisableXpnHostProjectRequest(), + project='project_value', + ) + + +def test_disable_xpn_host_unary_rest_error(): + client = ProjectsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.DisableXpnResourceProjectRequest, + dict, +]) +def test_disable_xpn_resource_rest(request_type): + client = ProjectsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1'} + request_init["projects_disable_xpn_resource_request_resource"] = {'xpn_resource': {'id': 'id_value', 'type_': 'type__value'}} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.disable_xpn_resource(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, extended_operation.ExtendedOperation) + assert response.client_operation_id == 'client_operation_id_value' + assert response.creation_timestamp == 'creation_timestamp_value' + assert response.description == 'description_value' + assert response.end_time == 'end_time_value' + assert response.http_error_message == 'http_error_message_value' + assert response.http_error_status_code == 2374 + assert response.id == 205 + assert response.insert_time == 'insert_time_value' + assert response.kind == 'kind_value' + assert response.name == 'name_value' + assert response.operation_group_id == 'operation_group_id_value' + assert response.operation_type == 'operation_type_value' + assert response.progress == 885 + assert response.region == 'region_value' + assert response.self_link == 'self_link_value' + assert response.start_time == 'start_time_value' + assert response.status == compute.Operation.Status.DONE + assert response.status_message == 'status_message_value' + assert response.target_id == 947 + assert response.target_link == 'target_link_value' + assert response.user == 'user_value' + assert response.zone == 'zone_value' + + +def test_disable_xpn_resource_rest_required_fields(request_type=compute.DisableXpnResourceProjectRequest): + transport_class = transports.ProjectsRestTransport + + request_init = {} + request_init["project"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).disable_xpn_resource._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).disable_xpn_resource._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + + client = ProjectsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.disable_xpn_resource(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_disable_xpn_resource_rest_unset_required_fields(): + transport = transports.ProjectsRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.disable_xpn_resource._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("project", "projectsDisableXpnResourceRequestResource", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_disable_xpn_resource_rest_interceptors(null_interceptor): + transport = transports.ProjectsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.ProjectsRestInterceptor(), + ) + client = ProjectsClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.ProjectsRestInterceptor, "post_disable_xpn_resource") as post, \ + mock.patch.object(transports.ProjectsRestInterceptor, "pre_disable_xpn_resource") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.DisableXpnResourceProjectRequest.pb(compute.DisableXpnResourceProjectRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.DisableXpnResourceProjectRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.disable_xpn_resource(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_disable_xpn_resource_rest_bad_request(transport: str = 'rest', request_type=compute.DisableXpnResourceProjectRequest): + client = ProjectsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1'} + request_init["projects_disable_xpn_resource_request_resource"] = {'xpn_resource': {'id': 'id_value', 'type_': 'type__value'}} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.disable_xpn_resource(request) + + +def test_disable_xpn_resource_rest_flattened(): + client = ProjectsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + projects_disable_xpn_resource_request_resource=compute.ProjectsDisableXpnResourceRequest(xpn_resource=compute.XpnResourceId(id='id_value')), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.disable_xpn_resource(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/disableXpnResource" % client.transport._host, args[1]) + + +def test_disable_xpn_resource_rest_flattened_error(transport: str = 'rest'): + client = ProjectsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.disable_xpn_resource( + compute.DisableXpnResourceProjectRequest(), + project='project_value', + projects_disable_xpn_resource_request_resource=compute.ProjectsDisableXpnResourceRequest(xpn_resource=compute.XpnResourceId(id='id_value')), + ) + + +def test_disable_xpn_resource_rest_error(): + client = ProjectsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.DisableXpnResourceProjectRequest, + dict, +]) +def test_disable_xpn_resource_unary_rest(request_type): + client = ProjectsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1'} + request_init["projects_disable_xpn_resource_request_resource"] = {'xpn_resource': {'id': 'id_value', 'type_': 'type__value'}} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.disable_xpn_resource_unary(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.Operation) + + +def test_disable_xpn_resource_unary_rest_required_fields(request_type=compute.DisableXpnResourceProjectRequest): + transport_class = transports.ProjectsRestTransport + + request_init = {} + request_init["project"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).disable_xpn_resource._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).disable_xpn_resource._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + + client = ProjectsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.disable_xpn_resource_unary(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_disable_xpn_resource_unary_rest_unset_required_fields(): + transport = transports.ProjectsRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.disable_xpn_resource._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("project", "projectsDisableXpnResourceRequestResource", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_disable_xpn_resource_unary_rest_interceptors(null_interceptor): + transport = transports.ProjectsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.ProjectsRestInterceptor(), + ) + client = ProjectsClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.ProjectsRestInterceptor, "post_disable_xpn_resource") as post, \ + mock.patch.object(transports.ProjectsRestInterceptor, "pre_disable_xpn_resource") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.DisableXpnResourceProjectRequest.pb(compute.DisableXpnResourceProjectRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.DisableXpnResourceProjectRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.disable_xpn_resource_unary(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_disable_xpn_resource_unary_rest_bad_request(transport: str = 'rest', request_type=compute.DisableXpnResourceProjectRequest): + client = ProjectsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1'} + request_init["projects_disable_xpn_resource_request_resource"] = {'xpn_resource': {'id': 'id_value', 'type_': 'type__value'}} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.disable_xpn_resource_unary(request) + + +def test_disable_xpn_resource_unary_rest_flattened(): + client = ProjectsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + projects_disable_xpn_resource_request_resource=compute.ProjectsDisableXpnResourceRequest(xpn_resource=compute.XpnResourceId(id='id_value')), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.disable_xpn_resource_unary(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/disableXpnResource" % client.transport._host, args[1]) + + +def test_disable_xpn_resource_unary_rest_flattened_error(transport: str = 'rest'): + client = ProjectsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.disable_xpn_resource_unary( + compute.DisableXpnResourceProjectRequest(), + project='project_value', + projects_disable_xpn_resource_request_resource=compute.ProjectsDisableXpnResourceRequest(xpn_resource=compute.XpnResourceId(id='id_value')), + ) + + +def test_disable_xpn_resource_unary_rest_error(): + client = ProjectsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.EnableXpnHostProjectRequest, + dict, +]) +def test_enable_xpn_host_rest(request_type): + client = ProjectsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.enable_xpn_host(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, extended_operation.ExtendedOperation) + assert response.client_operation_id == 'client_operation_id_value' + assert response.creation_timestamp == 'creation_timestamp_value' + assert response.description == 'description_value' + assert response.end_time == 'end_time_value' + assert response.http_error_message == 'http_error_message_value' + assert response.http_error_status_code == 2374 + assert response.id == 205 + assert response.insert_time == 'insert_time_value' + assert response.kind == 'kind_value' + assert response.name == 'name_value' + assert response.operation_group_id == 'operation_group_id_value' + assert response.operation_type == 'operation_type_value' + assert response.progress == 885 + assert response.region == 'region_value' + assert response.self_link == 'self_link_value' + assert response.start_time == 'start_time_value' + assert response.status == compute.Operation.Status.DONE + assert response.status_message == 'status_message_value' + assert response.target_id == 947 + assert response.target_link == 'target_link_value' + assert response.user == 'user_value' + assert response.zone == 'zone_value' + + +def test_enable_xpn_host_rest_required_fields(request_type=compute.EnableXpnHostProjectRequest): + transport_class = transports.ProjectsRestTransport + + request_init = {} + request_init["project"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).enable_xpn_host._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).enable_xpn_host._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + + client = ProjectsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.enable_xpn_host(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_enable_xpn_host_rest_unset_required_fields(): + transport = transports.ProjectsRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.enable_xpn_host._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("project", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_enable_xpn_host_rest_interceptors(null_interceptor): + transport = transports.ProjectsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.ProjectsRestInterceptor(), + ) + client = ProjectsClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.ProjectsRestInterceptor, "post_enable_xpn_host") as post, \ + mock.patch.object(transports.ProjectsRestInterceptor, "pre_enable_xpn_host") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.EnableXpnHostProjectRequest.pb(compute.EnableXpnHostProjectRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.EnableXpnHostProjectRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.enable_xpn_host(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_enable_xpn_host_rest_bad_request(transport: str = 'rest', request_type=compute.EnableXpnHostProjectRequest): + client = ProjectsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.enable_xpn_host(request) + + +def test_enable_xpn_host_rest_flattened(): + client = ProjectsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.enable_xpn_host(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/enableXpnHost" % client.transport._host, args[1]) + + +def test_enable_xpn_host_rest_flattened_error(transport: str = 'rest'): + client = ProjectsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.enable_xpn_host( + compute.EnableXpnHostProjectRequest(), + project='project_value', + ) + + +def test_enable_xpn_host_rest_error(): + client = ProjectsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.EnableXpnHostProjectRequest, + dict, +]) +def test_enable_xpn_host_unary_rest(request_type): + client = ProjectsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.enable_xpn_host_unary(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.Operation) + + +def test_enable_xpn_host_unary_rest_required_fields(request_type=compute.EnableXpnHostProjectRequest): + transport_class = transports.ProjectsRestTransport + + request_init = {} + request_init["project"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).enable_xpn_host._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).enable_xpn_host._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + + client = ProjectsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.enable_xpn_host_unary(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_enable_xpn_host_unary_rest_unset_required_fields(): + transport = transports.ProjectsRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.enable_xpn_host._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("project", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_enable_xpn_host_unary_rest_interceptors(null_interceptor): + transport = transports.ProjectsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.ProjectsRestInterceptor(), + ) + client = ProjectsClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.ProjectsRestInterceptor, "post_enable_xpn_host") as post, \ + mock.patch.object(transports.ProjectsRestInterceptor, "pre_enable_xpn_host") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.EnableXpnHostProjectRequest.pb(compute.EnableXpnHostProjectRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.EnableXpnHostProjectRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.enable_xpn_host_unary(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_enable_xpn_host_unary_rest_bad_request(transport: str = 'rest', request_type=compute.EnableXpnHostProjectRequest): + client = ProjectsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.enable_xpn_host_unary(request) + + +def test_enable_xpn_host_unary_rest_flattened(): + client = ProjectsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.enable_xpn_host_unary(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/enableXpnHost" % client.transport._host, args[1]) + + +def test_enable_xpn_host_unary_rest_flattened_error(transport: str = 'rest'): + client = ProjectsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.enable_xpn_host_unary( + compute.EnableXpnHostProjectRequest(), + project='project_value', + ) + + +def test_enable_xpn_host_unary_rest_error(): + client = ProjectsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.EnableXpnResourceProjectRequest, + dict, +]) +def test_enable_xpn_resource_rest(request_type): + client = ProjectsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1'} + request_init["projects_enable_xpn_resource_request_resource"] = {'xpn_resource': {'id': 'id_value', 'type_': 'type__value'}} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.enable_xpn_resource(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, extended_operation.ExtendedOperation) + assert response.client_operation_id == 'client_operation_id_value' + assert response.creation_timestamp == 'creation_timestamp_value' + assert response.description == 'description_value' + assert response.end_time == 'end_time_value' + assert response.http_error_message == 'http_error_message_value' + assert response.http_error_status_code == 2374 + assert response.id == 205 + assert response.insert_time == 'insert_time_value' + assert response.kind == 'kind_value' + assert response.name == 'name_value' + assert response.operation_group_id == 'operation_group_id_value' + assert response.operation_type == 'operation_type_value' + assert response.progress == 885 + assert response.region == 'region_value' + assert response.self_link == 'self_link_value' + assert response.start_time == 'start_time_value' + assert response.status == compute.Operation.Status.DONE + assert response.status_message == 'status_message_value' + assert response.target_id == 947 + assert response.target_link == 'target_link_value' + assert response.user == 'user_value' + assert response.zone == 'zone_value' + + +def test_enable_xpn_resource_rest_required_fields(request_type=compute.EnableXpnResourceProjectRequest): + transport_class = transports.ProjectsRestTransport + + request_init = {} + request_init["project"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).enable_xpn_resource._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).enable_xpn_resource._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + + client = ProjectsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.enable_xpn_resource(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_enable_xpn_resource_rest_unset_required_fields(): + transport = transports.ProjectsRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.enable_xpn_resource._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("project", "projectsEnableXpnResourceRequestResource", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_enable_xpn_resource_rest_interceptors(null_interceptor): + transport = transports.ProjectsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.ProjectsRestInterceptor(), + ) + client = ProjectsClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.ProjectsRestInterceptor, "post_enable_xpn_resource") as post, \ + mock.patch.object(transports.ProjectsRestInterceptor, "pre_enable_xpn_resource") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.EnableXpnResourceProjectRequest.pb(compute.EnableXpnResourceProjectRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.EnableXpnResourceProjectRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.enable_xpn_resource(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_enable_xpn_resource_rest_bad_request(transport: str = 'rest', request_type=compute.EnableXpnResourceProjectRequest): + client = ProjectsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1'} + request_init["projects_enable_xpn_resource_request_resource"] = {'xpn_resource': {'id': 'id_value', 'type_': 'type__value'}} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.enable_xpn_resource(request) + + +def test_enable_xpn_resource_rest_flattened(): + client = ProjectsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + projects_enable_xpn_resource_request_resource=compute.ProjectsEnableXpnResourceRequest(xpn_resource=compute.XpnResourceId(id='id_value')), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.enable_xpn_resource(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/enableXpnResource" % client.transport._host, args[1]) + + +def test_enable_xpn_resource_rest_flattened_error(transport: str = 'rest'): + client = ProjectsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.enable_xpn_resource( + compute.EnableXpnResourceProjectRequest(), + project='project_value', + projects_enable_xpn_resource_request_resource=compute.ProjectsEnableXpnResourceRequest(xpn_resource=compute.XpnResourceId(id='id_value')), + ) + + +def test_enable_xpn_resource_rest_error(): + client = ProjectsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.EnableXpnResourceProjectRequest, + dict, +]) +def test_enable_xpn_resource_unary_rest(request_type): + client = ProjectsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1'} + request_init["projects_enable_xpn_resource_request_resource"] = {'xpn_resource': {'id': 'id_value', 'type_': 'type__value'}} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.enable_xpn_resource_unary(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.Operation) + + +def test_enable_xpn_resource_unary_rest_required_fields(request_type=compute.EnableXpnResourceProjectRequest): + transport_class = transports.ProjectsRestTransport + + request_init = {} + request_init["project"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).enable_xpn_resource._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).enable_xpn_resource._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + + client = ProjectsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.enable_xpn_resource_unary(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_enable_xpn_resource_unary_rest_unset_required_fields(): + transport = transports.ProjectsRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.enable_xpn_resource._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("project", "projectsEnableXpnResourceRequestResource", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_enable_xpn_resource_unary_rest_interceptors(null_interceptor): + transport = transports.ProjectsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.ProjectsRestInterceptor(), + ) + client = ProjectsClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.ProjectsRestInterceptor, "post_enable_xpn_resource") as post, \ + mock.patch.object(transports.ProjectsRestInterceptor, "pre_enable_xpn_resource") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.EnableXpnResourceProjectRequest.pb(compute.EnableXpnResourceProjectRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.EnableXpnResourceProjectRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.enable_xpn_resource_unary(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_enable_xpn_resource_unary_rest_bad_request(transport: str = 'rest', request_type=compute.EnableXpnResourceProjectRequest): + client = ProjectsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1'} + request_init["projects_enable_xpn_resource_request_resource"] = {'xpn_resource': {'id': 'id_value', 'type_': 'type__value'}} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.enable_xpn_resource_unary(request) + + +def test_enable_xpn_resource_unary_rest_flattened(): + client = ProjectsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + projects_enable_xpn_resource_request_resource=compute.ProjectsEnableXpnResourceRequest(xpn_resource=compute.XpnResourceId(id='id_value')), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.enable_xpn_resource_unary(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/enableXpnResource" % client.transport._host, args[1]) + + +def test_enable_xpn_resource_unary_rest_flattened_error(transport: str = 'rest'): + client = ProjectsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.enable_xpn_resource_unary( + compute.EnableXpnResourceProjectRequest(), + project='project_value', + projects_enable_xpn_resource_request_resource=compute.ProjectsEnableXpnResourceRequest(xpn_resource=compute.XpnResourceId(id='id_value')), + ) + + +def test_enable_xpn_resource_unary_rest_error(): + client = ProjectsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.GetProjectRequest, + dict, +]) +def test_get_rest(request_type): + client = ProjectsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Project( + creation_timestamp='creation_timestamp_value', + default_network_tier='default_network_tier_value', + default_service_account='default_service_account_value', + description='description_value', + enabled_features=['enabled_features_value'], + id=205, + kind='kind_value', + name='name_value', + self_link='self_link_value', + vm_dns_setting='vm_dns_setting_value', + xpn_project_status='xpn_project_status_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Project.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.get(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.Project) + assert response.creation_timestamp == 'creation_timestamp_value' + assert response.default_network_tier == 'default_network_tier_value' + assert response.default_service_account == 'default_service_account_value' + assert response.description == 'description_value' + assert response.enabled_features == ['enabled_features_value'] + assert response.id == 205 + assert response.kind == 'kind_value' + assert response.name == 'name_value' + assert response.self_link == 'self_link_value' + assert response.vm_dns_setting == 'vm_dns_setting_value' + assert response.xpn_project_status == 'xpn_project_status_value' + + +def test_get_rest_required_fields(request_type=compute.GetProjectRequest): + transport_class = transports.ProjectsRestTransport + + request_init = {} + request_init["project"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + + client = ProjectsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Project() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "get", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Project.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.get(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_get_rest_unset_required_fields(): + transport = transports.ProjectsRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.get._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("project", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_rest_interceptors(null_interceptor): + transport = transports.ProjectsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.ProjectsRestInterceptor(), + ) + client = ProjectsClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.ProjectsRestInterceptor, "post_get") as post, \ + mock.patch.object(transports.ProjectsRestInterceptor, "pre_get") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.GetProjectRequest.pb(compute.GetProjectRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Project.to_json(compute.Project()) + + request = compute.GetProjectRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Project() + + client.get(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_get_rest_bad_request(transport: str = 'rest', request_type=compute.GetProjectRequest): + client = ProjectsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get(request) + + +def test_get_rest_flattened(): + client = ProjectsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Project() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Project.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.get(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}" % client.transport._host, args[1]) + + +def test_get_rest_flattened_error(transport: str = 'rest'): + client = ProjectsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get( + compute.GetProjectRequest(), + project='project_value', + ) + + +def test_get_rest_error(): + client = ProjectsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.GetXpnHostProjectRequest, + dict, +]) +def test_get_xpn_host_rest(request_type): + client = ProjectsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Project( + creation_timestamp='creation_timestamp_value', + default_network_tier='default_network_tier_value', + default_service_account='default_service_account_value', + description='description_value', + enabled_features=['enabled_features_value'], + id=205, + kind='kind_value', + name='name_value', + self_link='self_link_value', + vm_dns_setting='vm_dns_setting_value', + xpn_project_status='xpn_project_status_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Project.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.get_xpn_host(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.Project) + assert response.creation_timestamp == 'creation_timestamp_value' + assert response.default_network_tier == 'default_network_tier_value' + assert response.default_service_account == 'default_service_account_value' + assert response.description == 'description_value' + assert response.enabled_features == ['enabled_features_value'] + assert response.id == 205 + assert response.kind == 'kind_value' + assert response.name == 'name_value' + assert response.self_link == 'self_link_value' + assert response.vm_dns_setting == 'vm_dns_setting_value' + assert response.xpn_project_status == 'xpn_project_status_value' + + +def test_get_xpn_host_rest_required_fields(request_type=compute.GetXpnHostProjectRequest): + transport_class = transports.ProjectsRestTransport + + request_init = {} + request_init["project"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_xpn_host._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_xpn_host._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + + client = ProjectsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Project() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "get", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Project.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.get_xpn_host(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_get_xpn_host_rest_unset_required_fields(): + transport = transports.ProjectsRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.get_xpn_host._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("project", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_xpn_host_rest_interceptors(null_interceptor): + transport = transports.ProjectsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.ProjectsRestInterceptor(), + ) + client = ProjectsClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.ProjectsRestInterceptor, "post_get_xpn_host") as post, \ + mock.patch.object(transports.ProjectsRestInterceptor, "pre_get_xpn_host") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.GetXpnHostProjectRequest.pb(compute.GetXpnHostProjectRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Project.to_json(compute.Project()) + + request = compute.GetXpnHostProjectRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Project() + + client.get_xpn_host(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_get_xpn_host_rest_bad_request(transport: str = 'rest', request_type=compute.GetXpnHostProjectRequest): + client = ProjectsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_xpn_host(request) + + +def test_get_xpn_host_rest_flattened(): + client = ProjectsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Project() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Project.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.get_xpn_host(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/getXpnHost" % client.transport._host, args[1]) + + +def test_get_xpn_host_rest_flattened_error(transport: str = 'rest'): + client = ProjectsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_xpn_host( + compute.GetXpnHostProjectRequest(), + project='project_value', + ) + + +def test_get_xpn_host_rest_error(): + client = ProjectsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.GetXpnResourcesProjectsRequest, + dict, +]) +def test_get_xpn_resources_rest(request_type): + client = ProjectsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.ProjectsGetXpnResources( + kind='kind_value', + next_page_token='next_page_token_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.ProjectsGetXpnResources.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.get_xpn_resources(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.GetXpnResourcesPager) + assert response.kind == 'kind_value' + assert response.next_page_token == 'next_page_token_value' + + +def test_get_xpn_resources_rest_required_fields(request_type=compute.GetXpnResourcesProjectsRequest): + transport_class = transports.ProjectsRestTransport + + request_init = {} + request_init["project"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_xpn_resources._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_xpn_resources._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("filter", "max_results", "order_by", "page_token", "return_partial_success", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + + client = ProjectsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.ProjectsGetXpnResources() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "get", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.ProjectsGetXpnResources.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.get_xpn_resources(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_get_xpn_resources_rest_unset_required_fields(): + transport = transports.ProjectsRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.get_xpn_resources._get_unset_required_fields({}) + assert set(unset_fields) == (set(("filter", "maxResults", "orderBy", "pageToken", "returnPartialSuccess", )) & set(("project", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_xpn_resources_rest_interceptors(null_interceptor): + transport = transports.ProjectsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.ProjectsRestInterceptor(), + ) + client = ProjectsClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.ProjectsRestInterceptor, "post_get_xpn_resources") as post, \ + mock.patch.object(transports.ProjectsRestInterceptor, "pre_get_xpn_resources") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.GetXpnResourcesProjectsRequest.pb(compute.GetXpnResourcesProjectsRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.ProjectsGetXpnResources.to_json(compute.ProjectsGetXpnResources()) + + request = compute.GetXpnResourcesProjectsRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.ProjectsGetXpnResources() + + client.get_xpn_resources(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_get_xpn_resources_rest_bad_request(transport: str = 'rest', request_type=compute.GetXpnResourcesProjectsRequest): + client = ProjectsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_xpn_resources(request) + + +def test_get_xpn_resources_rest_flattened(): + client = ProjectsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.ProjectsGetXpnResources() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.ProjectsGetXpnResources.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.get_xpn_resources(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/getXpnResources" % client.transport._host, args[1]) + + +def test_get_xpn_resources_rest_flattened_error(transport: str = 'rest'): + client = ProjectsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_xpn_resources( + compute.GetXpnResourcesProjectsRequest(), + project='project_value', + ) + + +def test_get_xpn_resources_rest_pager(transport: str = 'rest'): + client = ProjectsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + #with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + compute.ProjectsGetXpnResources( + resources=[ + compute.XpnResourceId(), + compute.XpnResourceId(), + compute.XpnResourceId(), + ], + next_page_token='abc', + ), + compute.ProjectsGetXpnResources( + resources=[], + next_page_token='def', + ), + compute.ProjectsGetXpnResources( + resources=[ + compute.XpnResourceId(), + ], + next_page_token='ghi', + ), + compute.ProjectsGetXpnResources( + resources=[ + compute.XpnResourceId(), + compute.XpnResourceId(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple(compute.ProjectsGetXpnResources.to_json(x) for x in response) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode('UTF-8') + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {'project': 'sample1'} + + pager = client.get_xpn_resources(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, compute.XpnResourceId) + for i in results) + + pages = list(client.get_xpn_resources(request=sample_request).pages) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize("request_type", [ + compute.ListXpnHostsProjectsRequest, + dict, +]) +def test_list_xpn_hosts_rest(request_type): + client = ProjectsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1'} + request_init["projects_list_xpn_hosts_request_resource"] = {'organization': 'organization_value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.XpnHostList( + id='id_value', + kind='kind_value', + next_page_token='next_page_token_value', + self_link='self_link_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.XpnHostList.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.list_xpn_hosts(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListXpnHostsPager) + assert response.id == 'id_value' + assert response.kind == 'kind_value' + assert response.next_page_token == 'next_page_token_value' + assert response.self_link == 'self_link_value' + + +def test_list_xpn_hosts_rest_required_fields(request_type=compute.ListXpnHostsProjectsRequest): + transport_class = transports.ProjectsRestTransport + + request_init = {} + request_init["project"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_xpn_hosts._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_xpn_hosts._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("filter", "max_results", "order_by", "page_token", "return_partial_success", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + + client = ProjectsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.XpnHostList() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.XpnHostList.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.list_xpn_hosts(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_list_xpn_hosts_rest_unset_required_fields(): + transport = transports.ProjectsRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.list_xpn_hosts._get_unset_required_fields({}) + assert set(unset_fields) == (set(("filter", "maxResults", "orderBy", "pageToken", "returnPartialSuccess", )) & set(("project", "projectsListXpnHostsRequestResource", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_xpn_hosts_rest_interceptors(null_interceptor): + transport = transports.ProjectsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.ProjectsRestInterceptor(), + ) + client = ProjectsClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.ProjectsRestInterceptor, "post_list_xpn_hosts") as post, \ + mock.patch.object(transports.ProjectsRestInterceptor, "pre_list_xpn_hosts") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.ListXpnHostsProjectsRequest.pb(compute.ListXpnHostsProjectsRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.XpnHostList.to_json(compute.XpnHostList()) + + request = compute.ListXpnHostsProjectsRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.XpnHostList() + + client.list_xpn_hosts(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_list_xpn_hosts_rest_bad_request(transport: str = 'rest', request_type=compute.ListXpnHostsProjectsRequest): + client = ProjectsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1'} + request_init["projects_list_xpn_hosts_request_resource"] = {'organization': 'organization_value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_xpn_hosts(request) + + +def test_list_xpn_hosts_rest_flattened(): + client = ProjectsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.XpnHostList() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + projects_list_xpn_hosts_request_resource=compute.ProjectsListXpnHostsRequest(organization='organization_value'), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.XpnHostList.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.list_xpn_hosts(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/listXpnHosts" % client.transport._host, args[1]) + + +def test_list_xpn_hosts_rest_flattened_error(transport: str = 'rest'): + client = ProjectsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_xpn_hosts( + compute.ListXpnHostsProjectsRequest(), + project='project_value', + projects_list_xpn_hosts_request_resource=compute.ProjectsListXpnHostsRequest(organization='organization_value'), + ) + + +def test_list_xpn_hosts_rest_pager(transport: str = 'rest'): + client = ProjectsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + #with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + compute.XpnHostList( + items=[ + compute.Project(), + compute.Project(), + compute.Project(), + ], + next_page_token='abc', + ), + compute.XpnHostList( + items=[], + next_page_token='def', + ), + compute.XpnHostList( + items=[ + compute.Project(), + ], + next_page_token='ghi', + ), + compute.XpnHostList( + items=[ + compute.Project(), + compute.Project(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple(compute.XpnHostList.to_json(x) for x in response) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode('UTF-8') + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {'project': 'sample1'} + sample_request["projects_list_xpn_hosts_request_resource"] = compute.ProjectsListXpnHostsRequest(organization='organization_value') + + pager = client.list_xpn_hosts(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, compute.Project) + for i in results) + + pages = list(client.list_xpn_hosts(request=sample_request).pages) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize("request_type", [ + compute.MoveDiskProjectRequest, + dict, +]) +def test_move_disk_rest(request_type): + client = ProjectsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1'} + request_init["disk_move_request_resource"] = {'destination_zone': 'destination_zone_value', 'target_disk': 'target_disk_value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.move_disk(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, extended_operation.ExtendedOperation) + assert response.client_operation_id == 'client_operation_id_value' + assert response.creation_timestamp == 'creation_timestamp_value' + assert response.description == 'description_value' + assert response.end_time == 'end_time_value' + assert response.http_error_message == 'http_error_message_value' + assert response.http_error_status_code == 2374 + assert response.id == 205 + assert response.insert_time == 'insert_time_value' + assert response.kind == 'kind_value' + assert response.name == 'name_value' + assert response.operation_group_id == 'operation_group_id_value' + assert response.operation_type == 'operation_type_value' + assert response.progress == 885 + assert response.region == 'region_value' + assert response.self_link == 'self_link_value' + assert response.start_time == 'start_time_value' + assert response.status == compute.Operation.Status.DONE + assert response.status_message == 'status_message_value' + assert response.target_id == 947 + assert response.target_link == 'target_link_value' + assert response.user == 'user_value' + assert response.zone == 'zone_value' + + +def test_move_disk_rest_required_fields(request_type=compute.MoveDiskProjectRequest): + transport_class = transports.ProjectsRestTransport + + request_init = {} + request_init["project"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).move_disk._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).move_disk._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + + client = ProjectsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.move_disk(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_move_disk_rest_unset_required_fields(): + transport = transports.ProjectsRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.move_disk._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("diskMoveRequestResource", "project", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_move_disk_rest_interceptors(null_interceptor): + transport = transports.ProjectsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.ProjectsRestInterceptor(), + ) + client = ProjectsClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.ProjectsRestInterceptor, "post_move_disk") as post, \ + mock.patch.object(transports.ProjectsRestInterceptor, "pre_move_disk") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.MoveDiskProjectRequest.pb(compute.MoveDiskProjectRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.MoveDiskProjectRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.move_disk(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_move_disk_rest_bad_request(transport: str = 'rest', request_type=compute.MoveDiskProjectRequest): + client = ProjectsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1'} + request_init["disk_move_request_resource"] = {'destination_zone': 'destination_zone_value', 'target_disk': 'target_disk_value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.move_disk(request) + + +def test_move_disk_rest_flattened(): + client = ProjectsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + disk_move_request_resource=compute.DiskMoveRequest(destination_zone='destination_zone_value'), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.move_disk(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/moveDisk" % client.transport._host, args[1]) + + +def test_move_disk_rest_flattened_error(transport: str = 'rest'): + client = ProjectsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.move_disk( + compute.MoveDiskProjectRequest(), + project='project_value', + disk_move_request_resource=compute.DiskMoveRequest(destination_zone='destination_zone_value'), + ) + + +def test_move_disk_rest_error(): + client = ProjectsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.MoveDiskProjectRequest, + dict, +]) +def test_move_disk_unary_rest(request_type): + client = ProjectsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1'} + request_init["disk_move_request_resource"] = {'destination_zone': 'destination_zone_value', 'target_disk': 'target_disk_value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.move_disk_unary(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.Operation) + + +def test_move_disk_unary_rest_required_fields(request_type=compute.MoveDiskProjectRequest): + transport_class = transports.ProjectsRestTransport + + request_init = {} + request_init["project"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).move_disk._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).move_disk._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + + client = ProjectsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.move_disk_unary(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_move_disk_unary_rest_unset_required_fields(): + transport = transports.ProjectsRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.move_disk._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("diskMoveRequestResource", "project", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_move_disk_unary_rest_interceptors(null_interceptor): + transport = transports.ProjectsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.ProjectsRestInterceptor(), + ) + client = ProjectsClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.ProjectsRestInterceptor, "post_move_disk") as post, \ + mock.patch.object(transports.ProjectsRestInterceptor, "pre_move_disk") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.MoveDiskProjectRequest.pb(compute.MoveDiskProjectRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.MoveDiskProjectRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.move_disk_unary(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_move_disk_unary_rest_bad_request(transport: str = 'rest', request_type=compute.MoveDiskProjectRequest): + client = ProjectsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1'} + request_init["disk_move_request_resource"] = {'destination_zone': 'destination_zone_value', 'target_disk': 'target_disk_value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.move_disk_unary(request) + + +def test_move_disk_unary_rest_flattened(): + client = ProjectsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + disk_move_request_resource=compute.DiskMoveRequest(destination_zone='destination_zone_value'), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.move_disk_unary(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/moveDisk" % client.transport._host, args[1]) + + +def test_move_disk_unary_rest_flattened_error(transport: str = 'rest'): + client = ProjectsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.move_disk_unary( + compute.MoveDiskProjectRequest(), + project='project_value', + disk_move_request_resource=compute.DiskMoveRequest(destination_zone='destination_zone_value'), + ) + + +def test_move_disk_unary_rest_error(): + client = ProjectsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.MoveInstanceProjectRequest, + dict, +]) +def test_move_instance_rest(request_type): + client = ProjectsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1'} + request_init["instance_move_request_resource"] = {'destination_zone': 'destination_zone_value', 'target_instance': 'target_instance_value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.move_instance(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, extended_operation.ExtendedOperation) + assert response.client_operation_id == 'client_operation_id_value' + assert response.creation_timestamp == 'creation_timestamp_value' + assert response.description == 'description_value' + assert response.end_time == 'end_time_value' + assert response.http_error_message == 'http_error_message_value' + assert response.http_error_status_code == 2374 + assert response.id == 205 + assert response.insert_time == 'insert_time_value' + assert response.kind == 'kind_value' + assert response.name == 'name_value' + assert response.operation_group_id == 'operation_group_id_value' + assert response.operation_type == 'operation_type_value' + assert response.progress == 885 + assert response.region == 'region_value' + assert response.self_link == 'self_link_value' + assert response.start_time == 'start_time_value' + assert response.status == compute.Operation.Status.DONE + assert response.status_message == 'status_message_value' + assert response.target_id == 947 + assert response.target_link == 'target_link_value' + assert response.user == 'user_value' + assert response.zone == 'zone_value' + + +def test_move_instance_rest_required_fields(request_type=compute.MoveInstanceProjectRequest): + transport_class = transports.ProjectsRestTransport + + request_init = {} + request_init["project"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).move_instance._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).move_instance._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + + client = ProjectsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.move_instance(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_move_instance_rest_unset_required_fields(): + transport = transports.ProjectsRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.move_instance._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("instanceMoveRequestResource", "project", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_move_instance_rest_interceptors(null_interceptor): + transport = transports.ProjectsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.ProjectsRestInterceptor(), + ) + client = ProjectsClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.ProjectsRestInterceptor, "post_move_instance") as post, \ + mock.patch.object(transports.ProjectsRestInterceptor, "pre_move_instance") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.MoveInstanceProjectRequest.pb(compute.MoveInstanceProjectRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.MoveInstanceProjectRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.move_instance(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_move_instance_rest_bad_request(transport: str = 'rest', request_type=compute.MoveInstanceProjectRequest): + client = ProjectsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1'} + request_init["instance_move_request_resource"] = {'destination_zone': 'destination_zone_value', 'target_instance': 'target_instance_value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.move_instance(request) + + +def test_move_instance_rest_flattened(): + client = ProjectsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + instance_move_request_resource=compute.InstanceMoveRequest(destination_zone='destination_zone_value'), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.move_instance(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/moveInstance" % client.transport._host, args[1]) + + +def test_move_instance_rest_flattened_error(transport: str = 'rest'): + client = ProjectsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.move_instance( + compute.MoveInstanceProjectRequest(), + project='project_value', + instance_move_request_resource=compute.InstanceMoveRequest(destination_zone='destination_zone_value'), + ) + + +def test_move_instance_rest_error(): + client = ProjectsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.MoveInstanceProjectRequest, + dict, +]) +def test_move_instance_unary_rest(request_type): + client = ProjectsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1'} + request_init["instance_move_request_resource"] = {'destination_zone': 'destination_zone_value', 'target_instance': 'target_instance_value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.move_instance_unary(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.Operation) + + +def test_move_instance_unary_rest_required_fields(request_type=compute.MoveInstanceProjectRequest): + transport_class = transports.ProjectsRestTransport + + request_init = {} + request_init["project"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).move_instance._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).move_instance._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + + client = ProjectsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.move_instance_unary(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_move_instance_unary_rest_unset_required_fields(): + transport = transports.ProjectsRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.move_instance._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("instanceMoveRequestResource", "project", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_move_instance_unary_rest_interceptors(null_interceptor): + transport = transports.ProjectsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.ProjectsRestInterceptor(), + ) + client = ProjectsClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.ProjectsRestInterceptor, "post_move_instance") as post, \ + mock.patch.object(transports.ProjectsRestInterceptor, "pre_move_instance") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.MoveInstanceProjectRequest.pb(compute.MoveInstanceProjectRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.MoveInstanceProjectRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.move_instance_unary(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_move_instance_unary_rest_bad_request(transport: str = 'rest', request_type=compute.MoveInstanceProjectRequest): + client = ProjectsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1'} + request_init["instance_move_request_resource"] = {'destination_zone': 'destination_zone_value', 'target_instance': 'target_instance_value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.move_instance_unary(request) + + +def test_move_instance_unary_rest_flattened(): + client = ProjectsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + instance_move_request_resource=compute.InstanceMoveRequest(destination_zone='destination_zone_value'), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.move_instance_unary(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/moveInstance" % client.transport._host, args[1]) + + +def test_move_instance_unary_rest_flattened_error(transport: str = 'rest'): + client = ProjectsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.move_instance_unary( + compute.MoveInstanceProjectRequest(), + project='project_value', + instance_move_request_resource=compute.InstanceMoveRequest(destination_zone='destination_zone_value'), + ) + + +def test_move_instance_unary_rest_error(): + client = ProjectsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.SetCommonInstanceMetadataProjectRequest, + dict, +]) +def test_set_common_instance_metadata_rest(request_type): + client = ProjectsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1'} + request_init["metadata_resource"] = {'fingerprint': 'fingerprint_value', 'items': [{'key': 'key_value', 'value': 'value_value'}], 'kind': 'kind_value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.set_common_instance_metadata(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, extended_operation.ExtendedOperation) + assert response.client_operation_id == 'client_operation_id_value' + assert response.creation_timestamp == 'creation_timestamp_value' + assert response.description == 'description_value' + assert response.end_time == 'end_time_value' + assert response.http_error_message == 'http_error_message_value' + assert response.http_error_status_code == 2374 + assert response.id == 205 + assert response.insert_time == 'insert_time_value' + assert response.kind == 'kind_value' + assert response.name == 'name_value' + assert response.operation_group_id == 'operation_group_id_value' + assert response.operation_type == 'operation_type_value' + assert response.progress == 885 + assert response.region == 'region_value' + assert response.self_link == 'self_link_value' + assert response.start_time == 'start_time_value' + assert response.status == compute.Operation.Status.DONE + assert response.status_message == 'status_message_value' + assert response.target_id == 947 + assert response.target_link == 'target_link_value' + assert response.user == 'user_value' + assert response.zone == 'zone_value' + + +def test_set_common_instance_metadata_rest_required_fields(request_type=compute.SetCommonInstanceMetadataProjectRequest): + transport_class = transports.ProjectsRestTransport + + request_init = {} + request_init["project"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).set_common_instance_metadata._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).set_common_instance_metadata._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + + client = ProjectsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.set_common_instance_metadata(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_set_common_instance_metadata_rest_unset_required_fields(): + transport = transports.ProjectsRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.set_common_instance_metadata._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("metadataResource", "project", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_set_common_instance_metadata_rest_interceptors(null_interceptor): + transport = transports.ProjectsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.ProjectsRestInterceptor(), + ) + client = ProjectsClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.ProjectsRestInterceptor, "post_set_common_instance_metadata") as post, \ + mock.patch.object(transports.ProjectsRestInterceptor, "pre_set_common_instance_metadata") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.SetCommonInstanceMetadataProjectRequest.pb(compute.SetCommonInstanceMetadataProjectRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.SetCommonInstanceMetadataProjectRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.set_common_instance_metadata(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_set_common_instance_metadata_rest_bad_request(transport: str = 'rest', request_type=compute.SetCommonInstanceMetadataProjectRequest): + client = ProjectsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1'} + request_init["metadata_resource"] = {'fingerprint': 'fingerprint_value', 'items': [{'key': 'key_value', 'value': 'value_value'}], 'kind': 'kind_value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.set_common_instance_metadata(request) + + +def test_set_common_instance_metadata_rest_flattened(): + client = ProjectsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + metadata_resource=compute.Metadata(fingerprint='fingerprint_value'), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.set_common_instance_metadata(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/setCommonInstanceMetadata" % client.transport._host, args[1]) + + +def test_set_common_instance_metadata_rest_flattened_error(transport: str = 'rest'): + client = ProjectsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.set_common_instance_metadata( + compute.SetCommonInstanceMetadataProjectRequest(), + project='project_value', + metadata_resource=compute.Metadata(fingerprint='fingerprint_value'), + ) + + +def test_set_common_instance_metadata_rest_error(): + client = ProjectsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.SetCommonInstanceMetadataProjectRequest, + dict, +]) +def test_set_common_instance_metadata_unary_rest(request_type): + client = ProjectsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1'} + request_init["metadata_resource"] = {'fingerprint': 'fingerprint_value', 'items': [{'key': 'key_value', 'value': 'value_value'}], 'kind': 'kind_value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.set_common_instance_metadata_unary(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.Operation) + + +def test_set_common_instance_metadata_unary_rest_required_fields(request_type=compute.SetCommonInstanceMetadataProjectRequest): + transport_class = transports.ProjectsRestTransport + + request_init = {} + request_init["project"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).set_common_instance_metadata._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).set_common_instance_metadata._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + + client = ProjectsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.set_common_instance_metadata_unary(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_set_common_instance_metadata_unary_rest_unset_required_fields(): + transport = transports.ProjectsRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.set_common_instance_metadata._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("metadataResource", "project", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_set_common_instance_metadata_unary_rest_interceptors(null_interceptor): + transport = transports.ProjectsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.ProjectsRestInterceptor(), + ) + client = ProjectsClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.ProjectsRestInterceptor, "post_set_common_instance_metadata") as post, \ + mock.patch.object(transports.ProjectsRestInterceptor, "pre_set_common_instance_metadata") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.SetCommonInstanceMetadataProjectRequest.pb(compute.SetCommonInstanceMetadataProjectRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.SetCommonInstanceMetadataProjectRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.set_common_instance_metadata_unary(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_set_common_instance_metadata_unary_rest_bad_request(transport: str = 'rest', request_type=compute.SetCommonInstanceMetadataProjectRequest): + client = ProjectsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1'} + request_init["metadata_resource"] = {'fingerprint': 'fingerprint_value', 'items': [{'key': 'key_value', 'value': 'value_value'}], 'kind': 'kind_value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.set_common_instance_metadata_unary(request) + + +def test_set_common_instance_metadata_unary_rest_flattened(): + client = ProjectsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + metadata_resource=compute.Metadata(fingerprint='fingerprint_value'), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.set_common_instance_metadata_unary(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/setCommonInstanceMetadata" % client.transport._host, args[1]) + + +def test_set_common_instance_metadata_unary_rest_flattened_error(transport: str = 'rest'): + client = ProjectsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.set_common_instance_metadata_unary( + compute.SetCommonInstanceMetadataProjectRequest(), + project='project_value', + metadata_resource=compute.Metadata(fingerprint='fingerprint_value'), + ) + + +def test_set_common_instance_metadata_unary_rest_error(): + client = ProjectsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.SetDefaultNetworkTierProjectRequest, + dict, +]) +def test_set_default_network_tier_rest(request_type): + client = ProjectsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1'} + request_init["projects_set_default_network_tier_request_resource"] = {'network_tier': 'network_tier_value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.set_default_network_tier(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, extended_operation.ExtendedOperation) + assert response.client_operation_id == 'client_operation_id_value' + assert response.creation_timestamp == 'creation_timestamp_value' + assert response.description == 'description_value' + assert response.end_time == 'end_time_value' + assert response.http_error_message == 'http_error_message_value' + assert response.http_error_status_code == 2374 + assert response.id == 205 + assert response.insert_time == 'insert_time_value' + assert response.kind == 'kind_value' + assert response.name == 'name_value' + assert response.operation_group_id == 'operation_group_id_value' + assert response.operation_type == 'operation_type_value' + assert response.progress == 885 + assert response.region == 'region_value' + assert response.self_link == 'self_link_value' + assert response.start_time == 'start_time_value' + assert response.status == compute.Operation.Status.DONE + assert response.status_message == 'status_message_value' + assert response.target_id == 947 + assert response.target_link == 'target_link_value' + assert response.user == 'user_value' + assert response.zone == 'zone_value' + + +def test_set_default_network_tier_rest_required_fields(request_type=compute.SetDefaultNetworkTierProjectRequest): + transport_class = transports.ProjectsRestTransport + + request_init = {} + request_init["project"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).set_default_network_tier._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).set_default_network_tier._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + + client = ProjectsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.set_default_network_tier(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_set_default_network_tier_rest_unset_required_fields(): + transport = transports.ProjectsRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.set_default_network_tier._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("project", "projectsSetDefaultNetworkTierRequestResource", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_set_default_network_tier_rest_interceptors(null_interceptor): + transport = transports.ProjectsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.ProjectsRestInterceptor(), + ) + client = ProjectsClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.ProjectsRestInterceptor, "post_set_default_network_tier") as post, \ + mock.patch.object(transports.ProjectsRestInterceptor, "pre_set_default_network_tier") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.SetDefaultNetworkTierProjectRequest.pb(compute.SetDefaultNetworkTierProjectRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.SetDefaultNetworkTierProjectRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.set_default_network_tier(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_set_default_network_tier_rest_bad_request(transport: str = 'rest', request_type=compute.SetDefaultNetworkTierProjectRequest): + client = ProjectsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1'} + request_init["projects_set_default_network_tier_request_resource"] = {'network_tier': 'network_tier_value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.set_default_network_tier(request) + + +def test_set_default_network_tier_rest_flattened(): + client = ProjectsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + projects_set_default_network_tier_request_resource=compute.ProjectsSetDefaultNetworkTierRequest(network_tier='network_tier_value'), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.set_default_network_tier(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/setDefaultNetworkTier" % client.transport._host, args[1]) + + +def test_set_default_network_tier_rest_flattened_error(transport: str = 'rest'): + client = ProjectsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.set_default_network_tier( + compute.SetDefaultNetworkTierProjectRequest(), + project='project_value', + projects_set_default_network_tier_request_resource=compute.ProjectsSetDefaultNetworkTierRequest(network_tier='network_tier_value'), + ) + + +def test_set_default_network_tier_rest_error(): + client = ProjectsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.SetDefaultNetworkTierProjectRequest, + dict, +]) +def test_set_default_network_tier_unary_rest(request_type): + client = ProjectsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1'} + request_init["projects_set_default_network_tier_request_resource"] = {'network_tier': 'network_tier_value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.set_default_network_tier_unary(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.Operation) + + +def test_set_default_network_tier_unary_rest_required_fields(request_type=compute.SetDefaultNetworkTierProjectRequest): + transport_class = transports.ProjectsRestTransport + + request_init = {} + request_init["project"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).set_default_network_tier._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).set_default_network_tier._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + + client = ProjectsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.set_default_network_tier_unary(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_set_default_network_tier_unary_rest_unset_required_fields(): + transport = transports.ProjectsRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.set_default_network_tier._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("project", "projectsSetDefaultNetworkTierRequestResource", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_set_default_network_tier_unary_rest_interceptors(null_interceptor): + transport = transports.ProjectsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.ProjectsRestInterceptor(), + ) + client = ProjectsClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.ProjectsRestInterceptor, "post_set_default_network_tier") as post, \ + mock.patch.object(transports.ProjectsRestInterceptor, "pre_set_default_network_tier") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.SetDefaultNetworkTierProjectRequest.pb(compute.SetDefaultNetworkTierProjectRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.SetDefaultNetworkTierProjectRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.set_default_network_tier_unary(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_set_default_network_tier_unary_rest_bad_request(transport: str = 'rest', request_type=compute.SetDefaultNetworkTierProjectRequest): + client = ProjectsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1'} + request_init["projects_set_default_network_tier_request_resource"] = {'network_tier': 'network_tier_value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.set_default_network_tier_unary(request) + + +def test_set_default_network_tier_unary_rest_flattened(): + client = ProjectsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + projects_set_default_network_tier_request_resource=compute.ProjectsSetDefaultNetworkTierRequest(network_tier='network_tier_value'), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.set_default_network_tier_unary(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/setDefaultNetworkTier" % client.transport._host, args[1]) + + +def test_set_default_network_tier_unary_rest_flattened_error(transport: str = 'rest'): + client = ProjectsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.set_default_network_tier_unary( + compute.SetDefaultNetworkTierProjectRequest(), + project='project_value', + projects_set_default_network_tier_request_resource=compute.ProjectsSetDefaultNetworkTierRequest(network_tier='network_tier_value'), + ) + + +def test_set_default_network_tier_unary_rest_error(): + client = ProjectsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.SetUsageExportBucketProjectRequest, + dict, +]) +def test_set_usage_export_bucket_rest(request_type): + client = ProjectsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1'} + request_init["usage_export_location_resource"] = {'bucket_name': 'bucket_name_value', 'report_name_prefix': 'report_name_prefix_value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.set_usage_export_bucket(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, extended_operation.ExtendedOperation) + assert response.client_operation_id == 'client_operation_id_value' + assert response.creation_timestamp == 'creation_timestamp_value' + assert response.description == 'description_value' + assert response.end_time == 'end_time_value' + assert response.http_error_message == 'http_error_message_value' + assert response.http_error_status_code == 2374 + assert response.id == 205 + assert response.insert_time == 'insert_time_value' + assert response.kind == 'kind_value' + assert response.name == 'name_value' + assert response.operation_group_id == 'operation_group_id_value' + assert response.operation_type == 'operation_type_value' + assert response.progress == 885 + assert response.region == 'region_value' + assert response.self_link == 'self_link_value' + assert response.start_time == 'start_time_value' + assert response.status == compute.Operation.Status.DONE + assert response.status_message == 'status_message_value' + assert response.target_id == 947 + assert response.target_link == 'target_link_value' + assert response.user == 'user_value' + assert response.zone == 'zone_value' + + +def test_set_usage_export_bucket_rest_required_fields(request_type=compute.SetUsageExportBucketProjectRequest): + transport_class = transports.ProjectsRestTransport + + request_init = {} + request_init["project"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).set_usage_export_bucket._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).set_usage_export_bucket._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + + client = ProjectsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.set_usage_export_bucket(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_set_usage_export_bucket_rest_unset_required_fields(): + transport = transports.ProjectsRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.set_usage_export_bucket._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("project", "usageExportLocationResource", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_set_usage_export_bucket_rest_interceptors(null_interceptor): + transport = transports.ProjectsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.ProjectsRestInterceptor(), + ) + client = ProjectsClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.ProjectsRestInterceptor, "post_set_usage_export_bucket") as post, \ + mock.patch.object(transports.ProjectsRestInterceptor, "pre_set_usage_export_bucket") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.SetUsageExportBucketProjectRequest.pb(compute.SetUsageExportBucketProjectRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.SetUsageExportBucketProjectRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.set_usage_export_bucket(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_set_usage_export_bucket_rest_bad_request(transport: str = 'rest', request_type=compute.SetUsageExportBucketProjectRequest): + client = ProjectsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1'} + request_init["usage_export_location_resource"] = {'bucket_name': 'bucket_name_value', 'report_name_prefix': 'report_name_prefix_value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.set_usage_export_bucket(request) + + +def test_set_usage_export_bucket_rest_flattened(): + client = ProjectsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + usage_export_location_resource=compute.UsageExportLocation(bucket_name='bucket_name_value'), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.set_usage_export_bucket(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/setUsageExportBucket" % client.transport._host, args[1]) + + +def test_set_usage_export_bucket_rest_flattened_error(transport: str = 'rest'): + client = ProjectsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.set_usage_export_bucket( + compute.SetUsageExportBucketProjectRequest(), + project='project_value', + usage_export_location_resource=compute.UsageExportLocation(bucket_name='bucket_name_value'), + ) + + +def test_set_usage_export_bucket_rest_error(): + client = ProjectsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.SetUsageExportBucketProjectRequest, + dict, +]) +def test_set_usage_export_bucket_unary_rest(request_type): + client = ProjectsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1'} + request_init["usage_export_location_resource"] = {'bucket_name': 'bucket_name_value', 'report_name_prefix': 'report_name_prefix_value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.set_usage_export_bucket_unary(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.Operation) + + +def test_set_usage_export_bucket_unary_rest_required_fields(request_type=compute.SetUsageExportBucketProjectRequest): + transport_class = transports.ProjectsRestTransport + + request_init = {} + request_init["project"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).set_usage_export_bucket._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).set_usage_export_bucket._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + + client = ProjectsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.set_usage_export_bucket_unary(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_set_usage_export_bucket_unary_rest_unset_required_fields(): + transport = transports.ProjectsRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.set_usage_export_bucket._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("project", "usageExportLocationResource", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_set_usage_export_bucket_unary_rest_interceptors(null_interceptor): + transport = transports.ProjectsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.ProjectsRestInterceptor(), + ) + client = ProjectsClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.ProjectsRestInterceptor, "post_set_usage_export_bucket") as post, \ + mock.patch.object(transports.ProjectsRestInterceptor, "pre_set_usage_export_bucket") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.SetUsageExportBucketProjectRequest.pb(compute.SetUsageExportBucketProjectRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.SetUsageExportBucketProjectRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.set_usage_export_bucket_unary(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_set_usage_export_bucket_unary_rest_bad_request(transport: str = 'rest', request_type=compute.SetUsageExportBucketProjectRequest): + client = ProjectsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1'} + request_init["usage_export_location_resource"] = {'bucket_name': 'bucket_name_value', 'report_name_prefix': 'report_name_prefix_value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.set_usage_export_bucket_unary(request) + + +def test_set_usage_export_bucket_unary_rest_flattened(): + client = ProjectsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + usage_export_location_resource=compute.UsageExportLocation(bucket_name='bucket_name_value'), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.set_usage_export_bucket_unary(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/setUsageExportBucket" % client.transport._host, args[1]) + + +def test_set_usage_export_bucket_unary_rest_flattened_error(transport: str = 'rest'): + client = ProjectsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.set_usage_export_bucket_unary( + compute.SetUsageExportBucketProjectRequest(), + project='project_value', + usage_export_location_resource=compute.UsageExportLocation(bucket_name='bucket_name_value'), + ) + + +def test_set_usage_export_bucket_unary_rest_error(): + client = ProjectsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +def test_credentials_transport_error(): + # It is an error to provide credentials and a transport instance. + transport = transports.ProjectsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = ProjectsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # It is an error to provide a credentials file and a transport instance. + transport = transports.ProjectsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = ProjectsClient( + client_options={"credentials_file": "credentials.json"}, + transport=transport, + ) + + # It is an error to provide an api_key and a transport instance. + transport = transports.ProjectsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = ProjectsClient( + client_options=options, + transport=transport, + ) + + # It is an error to provide an api_key and a credential. + options = mock.Mock() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = ProjectsClient( + client_options=options, + credentials=ga_credentials.AnonymousCredentials() + ) + + # It is an error to provide scopes and a transport instance. + transport = transports.ProjectsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = ProjectsClient( + client_options={"scopes": ["1", "2"]}, + transport=transport, + ) + + +def test_transport_instance(): + # A client may be instantiated with a custom transport instance. + transport = transports.ProjectsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + client = ProjectsClient(transport=transport) + assert client.transport is transport + + +@pytest.mark.parametrize("transport_class", [ + transports.ProjectsRestTransport, +]) +def test_transport_adc(transport_class): + # Test default credentials are used if not provided. + with mock.patch.object(google.auth, 'default') as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class() + adc.assert_called_once() + +@pytest.mark.parametrize("transport_name", [ + "rest", +]) +def test_transport_kind(transport_name): + transport = ProjectsClient.get_transport_class(transport_name)( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert transport.kind == transport_name + + +def test_projects_base_transport_error(): + # Passing both a credentials object and credentials_file should raise an error + with pytest.raises(core_exceptions.DuplicateCredentialArgs): + transport = transports.ProjectsTransport( + credentials=ga_credentials.AnonymousCredentials(), + credentials_file="credentials.json" + ) + + +def test_projects_base_transport(): + # Instantiate the base transport. + with mock.patch('google.cloud.compute_v1.services.projects.transports.ProjectsTransport.__init__') as Transport: + Transport.return_value = None + transport = transports.ProjectsTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Every method on the transport should just blindly + # raise NotImplementedError. + methods = ( + 'disable_xpn_host', + 'disable_xpn_resource', + 'enable_xpn_host', + 'enable_xpn_resource', + 'get', + 'get_xpn_host', + 'get_xpn_resources', + 'list_xpn_hosts', + 'move_disk', + 'move_instance', + 'set_common_instance_metadata', + 'set_default_network_tier', + 'set_usage_export_bucket', + ) + for method in methods: + with pytest.raises(NotImplementedError): + getattr(transport, method)(request=object()) + + with pytest.raises(NotImplementedError): + transport.close() + + # Catch all for all remaining methods and properties + remainder = [ + 'kind', + ] + for r in remainder: + with pytest.raises(NotImplementedError): + getattr(transport, r)() + + +def test_projects_base_transport_with_credentials_file(): + # Instantiate the base transport with a credentials file + with mock.patch.object(google.auth, 'load_credentials_from_file', autospec=True) as load_creds, mock.patch('google.cloud.compute_v1.services.projects.transports.ProjectsTransport._prep_wrapped_messages') as Transport: + Transport.return_value = None + load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.ProjectsTransport( + credentials_file="credentials.json", + quota_project_id="octopus", + ) + load_creds.assert_called_once_with("credentials.json", + scopes=None, + default_scopes=( + 'https://www.googleapis.com/auth/compute', + 'https://www.googleapis.com/auth/cloud-platform', +), + quota_project_id="octopus", + ) + + +def test_projects_base_transport_with_adc(): + # Test the default credentials are used if credentials and credentials_file are None. + with mock.patch.object(google.auth, 'default', autospec=True) as adc, mock.patch('google.cloud.compute_v1.services.projects.transports.ProjectsTransport._prep_wrapped_messages') as Transport: + Transport.return_value = None + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.ProjectsTransport() + adc.assert_called_once() + + +def test_projects_auth_adc(): + # If no credentials are provided, we should use ADC credentials. + with mock.patch.object(google.auth, 'default', autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + ProjectsClient() + adc.assert_called_once_with( + scopes=None, + default_scopes=( + 'https://www.googleapis.com/auth/compute', + 'https://www.googleapis.com/auth/cloud-platform', +), + quota_project_id=None, + ) + + +def test_projects_http_transport_client_cert_source_for_mtls(): + cred = ga_credentials.AnonymousCredentials() + with mock.patch("google.auth.transport.requests.AuthorizedSession.configure_mtls_channel") as mock_configure_mtls_channel: + transports.ProjectsRestTransport ( + credentials=cred, + client_cert_source_for_mtls=client_cert_source_callback + ) + mock_configure_mtls_channel.assert_called_once_with(client_cert_source_callback) + + +@pytest.mark.parametrize("transport_name", [ + "rest", +]) +def test_projects_host_no_port(transport_name): + client = ProjectsClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions(api_endpoint='compute.googleapis.com'), + transport=transport_name, + ) + assert client.transport._host == ( + 'compute.googleapis.com:443' + if transport_name in ['grpc', 'grpc_asyncio'] + else 'https://compute.googleapis.com' + ) + +@pytest.mark.parametrize("transport_name", [ + "rest", +]) +def test_projects_host_with_port(transport_name): + client = ProjectsClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions(api_endpoint='compute.googleapis.com:8000'), + transport=transport_name, + ) + assert client.transport._host == ( + 'compute.googleapis.com:8000' + if transport_name in ['grpc', 'grpc_asyncio'] + else 'https://compute.googleapis.com:8000' + ) + +@pytest.mark.parametrize("transport_name", [ + "rest", +]) +def test_projects_client_transport_session_collision(transport_name): + creds1 = ga_credentials.AnonymousCredentials() + creds2 = ga_credentials.AnonymousCredentials() + client1 = ProjectsClient( + credentials=creds1, + transport=transport_name, + ) + client2 = ProjectsClient( + credentials=creds2, + transport=transport_name, + ) + session1 = client1.transport.disable_xpn_host._session + session2 = client2.transport.disable_xpn_host._session + assert session1 != session2 + session1 = client1.transport.disable_xpn_resource._session + session2 = client2.transport.disable_xpn_resource._session + assert session1 != session2 + session1 = client1.transport.enable_xpn_host._session + session2 = client2.transport.enable_xpn_host._session + assert session1 != session2 + session1 = client1.transport.enable_xpn_resource._session + session2 = client2.transport.enable_xpn_resource._session + assert session1 != session2 + session1 = client1.transport.get._session + session2 = client2.transport.get._session + assert session1 != session2 + session1 = client1.transport.get_xpn_host._session + session2 = client2.transport.get_xpn_host._session + assert session1 != session2 + session1 = client1.transport.get_xpn_resources._session + session2 = client2.transport.get_xpn_resources._session + assert session1 != session2 + session1 = client1.transport.list_xpn_hosts._session + session2 = client2.transport.list_xpn_hosts._session + assert session1 != session2 + session1 = client1.transport.move_disk._session + session2 = client2.transport.move_disk._session + assert session1 != session2 + session1 = client1.transport.move_instance._session + session2 = client2.transport.move_instance._session + assert session1 != session2 + session1 = client1.transport.set_common_instance_metadata._session + session2 = client2.transport.set_common_instance_metadata._session + assert session1 != session2 + session1 = client1.transport.set_default_network_tier._session + session2 = client2.transport.set_default_network_tier._session + assert session1 != session2 + session1 = client1.transport.set_usage_export_bucket._session + session2 = client2.transport.set_usage_export_bucket._session + assert session1 != session2 + +def test_common_billing_account_path(): + billing_account = "squid" + expected = "billingAccounts/{billing_account}".format(billing_account=billing_account, ) + actual = ProjectsClient.common_billing_account_path(billing_account) + assert expected == actual + + +def test_parse_common_billing_account_path(): + expected = { + "billing_account": "clam", + } + path = ProjectsClient.common_billing_account_path(**expected) + + # Check that the path construction is reversible. + actual = ProjectsClient.parse_common_billing_account_path(path) + assert expected == actual + +def test_common_folder_path(): + folder = "whelk" + expected = "folders/{folder}".format(folder=folder, ) + actual = ProjectsClient.common_folder_path(folder) + assert expected == actual + + +def test_parse_common_folder_path(): + expected = { + "folder": "octopus", + } + path = ProjectsClient.common_folder_path(**expected) + + # Check that the path construction is reversible. + actual = ProjectsClient.parse_common_folder_path(path) + assert expected == actual + +def test_common_organization_path(): + organization = "oyster" + expected = "organizations/{organization}".format(organization=organization, ) + actual = ProjectsClient.common_organization_path(organization) + assert expected == actual + + +def test_parse_common_organization_path(): + expected = { + "organization": "nudibranch", + } + path = ProjectsClient.common_organization_path(**expected) + + # Check that the path construction is reversible. + actual = ProjectsClient.parse_common_organization_path(path) + assert expected == actual + +def test_common_project_path(): + project = "cuttlefish" + expected = "projects/{project}".format(project=project, ) + actual = ProjectsClient.common_project_path(project) + assert expected == actual + + +def test_parse_common_project_path(): + expected = { + "project": "mussel", + } + path = ProjectsClient.common_project_path(**expected) + + # Check that the path construction is reversible. + actual = ProjectsClient.parse_common_project_path(path) + assert expected == actual + +def test_common_location_path(): + project = "winkle" + location = "nautilus" + expected = "projects/{project}/locations/{location}".format(project=project, location=location, ) + actual = ProjectsClient.common_location_path(project, location) + assert expected == actual + + +def test_parse_common_location_path(): + expected = { + "project": "scallop", + "location": "abalone", + } + path = ProjectsClient.common_location_path(**expected) + + # Check that the path construction is reversible. + actual = ProjectsClient.parse_common_location_path(path) + assert expected == actual + + +def test_client_with_default_client_info(): + client_info = gapic_v1.client_info.ClientInfo() + + with mock.patch.object(transports.ProjectsTransport, '_prep_wrapped_messages') as prep: + client = ProjectsClient( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + with mock.patch.object(transports.ProjectsTransport, '_prep_wrapped_messages') as prep: + transport_class = ProjectsClient.get_transport_class() + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + +def test_transport_close(): + transports = { + "rest": "_session", + } + + for transport, close_name in transports.items(): + client = ProjectsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport + ) + with mock.patch.object(type(getattr(client.transport, close_name)), "close") as close: + with client: + close.assert_not_called() + close.assert_called_once() + +def test_client_ctx(): + transports = [ + 'rest', + ] + for transport in transports: + client = ProjectsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport + ) + # Test client calls underlying transport. + with mock.patch.object(type(client.transport), "close") as close: + close.assert_not_called() + with client: + pass + close.assert_called() + +@pytest.mark.parametrize("client_class,transport_class", [ + (ProjectsClient, transports.ProjectsRestTransport), +]) +def test_api_key_credentials(client_class, transport_class): + with mock.patch.object( + google.auth._default, "get_api_key_credentials", create=True + ) as get_api_key_credentials: + mock_cred = mock.Mock() + get_api_key_credentials.return_value = mock_cred + options = client_options.ClientOptions() + options.api_key = "api_key" + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=mock_cred, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) diff --git a/owl-bot-staging/v1/tests/unit/gapic/compute_v1/test_public_advertised_prefixes.py b/owl-bot-staging/v1/tests/unit/gapic/compute_v1/test_public_advertised_prefixes.py new file mode 100644 index 000000000..8d1442e21 --- /dev/null +++ b/owl-bot-staging/v1/tests/unit/gapic/compute_v1/test_public_advertised_prefixes.py @@ -0,0 +1,3035 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import os +# try/except added for compatibility with python < 3.8 +try: + from unittest import mock + from unittest.mock import AsyncMock # pragma: NO COVER +except ImportError: # pragma: NO COVER + import mock + +import grpc +from grpc.experimental import aio +from collections.abc import Iterable +from google.protobuf import json_format +import json +import math +import pytest +from proto.marshal.rules.dates import DurationRule, TimestampRule +from proto.marshal.rules import wrappers +from requests import Response +from requests import Request, PreparedRequest +from requests.sessions import Session +from google.protobuf import json_format + +from google.api_core import client_options +from google.api_core import exceptions as core_exceptions +from google.api_core import extended_operation # type: ignore +from google.api_core import future +from google.api_core import gapic_v1 +from google.api_core import grpc_helpers +from google.api_core import grpc_helpers_async +from google.api_core import path_template +from google.auth import credentials as ga_credentials +from google.auth.exceptions import MutualTLSChannelError +from google.cloud.compute_v1.services.public_advertised_prefixes import PublicAdvertisedPrefixesClient +from google.cloud.compute_v1.services.public_advertised_prefixes import pagers +from google.cloud.compute_v1.services.public_advertised_prefixes import transports +from google.cloud.compute_v1.types import compute +from google.oauth2 import service_account +import google.auth + + +def client_cert_source_callback(): + return b"cert bytes", b"key bytes" + + +# If default endpoint is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint(client): + return "foo.googleapis.com" if ("localhost" in client.DEFAULT_ENDPOINT) else client.DEFAULT_ENDPOINT + + +def test__get_default_mtls_endpoint(): + api_endpoint = "example.googleapis.com" + api_mtls_endpoint = "example.mtls.googleapis.com" + sandbox_endpoint = "example.sandbox.googleapis.com" + sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com" + non_googleapi = "api.example.com" + + assert PublicAdvertisedPrefixesClient._get_default_mtls_endpoint(None) is None + assert PublicAdvertisedPrefixesClient._get_default_mtls_endpoint(api_endpoint) == api_mtls_endpoint + assert PublicAdvertisedPrefixesClient._get_default_mtls_endpoint(api_mtls_endpoint) == api_mtls_endpoint + assert PublicAdvertisedPrefixesClient._get_default_mtls_endpoint(sandbox_endpoint) == sandbox_mtls_endpoint + assert PublicAdvertisedPrefixesClient._get_default_mtls_endpoint(sandbox_mtls_endpoint) == sandbox_mtls_endpoint + assert PublicAdvertisedPrefixesClient._get_default_mtls_endpoint(non_googleapi) == non_googleapi + + +@pytest.mark.parametrize("client_class,transport_name", [ + (PublicAdvertisedPrefixesClient, "rest"), +]) +def test_public_advertised_prefixes_client_from_service_account_info(client_class, transport_name): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object(service_account.Credentials, 'from_service_account_info') as factory: + factory.return_value = creds + info = {"valid": True} + client = client_class.from_service_account_info(info, transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + 'compute.googleapis.com:443' + if transport_name in ['grpc', 'grpc_asyncio'] + else + 'https://compute.googleapis.com' + ) + + +@pytest.mark.parametrize("transport_class,transport_name", [ + (transports.PublicAdvertisedPrefixesRestTransport, "rest"), +]) +def test_public_advertised_prefixes_client_service_account_always_use_jwt(transport_class, transport_name): + with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=True) + use_jwt.assert_called_once_with(True) + + with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=False) + use_jwt.assert_not_called() + + +@pytest.mark.parametrize("client_class,transport_name", [ + (PublicAdvertisedPrefixesClient, "rest"), +]) +def test_public_advertised_prefixes_client_from_service_account_file(client_class, transport_name): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object(service_account.Credentials, 'from_service_account_file') as factory: + factory.return_value = creds + client = client_class.from_service_account_file("dummy/file/path.json", transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + client = client_class.from_service_account_json("dummy/file/path.json", transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + 'compute.googleapis.com:443' + if transport_name in ['grpc', 'grpc_asyncio'] + else + 'https://compute.googleapis.com' + ) + + +def test_public_advertised_prefixes_client_get_transport_class(): + transport = PublicAdvertisedPrefixesClient.get_transport_class() + available_transports = [ + transports.PublicAdvertisedPrefixesRestTransport, + ] + assert transport in available_transports + + transport = PublicAdvertisedPrefixesClient.get_transport_class("rest") + assert transport == transports.PublicAdvertisedPrefixesRestTransport + + +@pytest.mark.parametrize("client_class,transport_class,transport_name", [ + (PublicAdvertisedPrefixesClient, transports.PublicAdvertisedPrefixesRestTransport, "rest"), +]) +@mock.patch.object(PublicAdvertisedPrefixesClient, "DEFAULT_ENDPOINT", modify_default_endpoint(PublicAdvertisedPrefixesClient)) +def test_public_advertised_prefixes_client_client_options(client_class, transport_class, transport_name): + # Check that if channel is provided we won't create a new one. + with mock.patch.object(PublicAdvertisedPrefixesClient, 'get_transport_class') as gtc: + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ) + client = client_class(transport=transport) + gtc.assert_not_called() + + # Check that if channel is provided via str we will create a new one. + with mock.patch.object(PublicAdvertisedPrefixesClient, 'get_transport_class') as gtc: + client = client_class(transport=transport_name) + gtc.assert_called() + + # Check the case api_endpoint is provided. + options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(transport=transport_name, client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_MTLS_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError): + client = client_class(transport=transport_name) + + # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): + with pytest.raises(ValueError): + client = client_class(transport=transport_name) + + # Check the case quota_project_id is provided + options = client_options.ClientOptions(quota_project_id="octopus") + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id="octopus", + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + # Check the case api_endpoint is provided + options = client_options.ClientOptions(api_audience="https://language.googleapis.com") + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience="https://language.googleapis.com" + ) + +@pytest.mark.parametrize("client_class,transport_class,transport_name,use_client_cert_env", [ + (PublicAdvertisedPrefixesClient, transports.PublicAdvertisedPrefixesRestTransport, "rest", "true"), + (PublicAdvertisedPrefixesClient, transports.PublicAdvertisedPrefixesRestTransport, "rest", "false"), +]) +@mock.patch.object(PublicAdvertisedPrefixesClient, "DEFAULT_ENDPOINT", modify_default_endpoint(PublicAdvertisedPrefixesClient)) +@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) +def test_public_advertised_prefixes_client_mtls_env_auto(client_class, transport_class, transport_name, use_client_cert_env): + # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default + # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists. + + # Check the case client_cert_source is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): + options = client_options.ClientOptions(client_cert_source=client_cert_source_callback) + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + + if use_client_cert_env == "false": + expected_client_cert_source = None + expected_host = client.DEFAULT_ENDPOINT + else: + expected_client_cert_source = client_cert_source_callback + expected_host = client.DEFAULT_MTLS_ENDPOINT + + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case ADC client cert is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): + with mock.patch.object(transport_class, '__init__') as patched: + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): + with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=client_cert_source_callback): + if use_client_cert_env == "false": + expected_host = client.DEFAULT_ENDPOINT + expected_client_cert_source = None + else: + expected_host = client.DEFAULT_MTLS_ENDPOINT + expected_client_cert_source = client_cert_source_callback + + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case client_cert_source and ADC client cert are not provided. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): + with mock.patch.object(transport_class, '__init__') as patched: + with mock.patch("google.auth.transport.mtls.has_default_client_cert_source", return_value=False): + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize("client_class", [ + PublicAdvertisedPrefixesClient +]) +@mock.patch.object(PublicAdvertisedPrefixesClient, "DEFAULT_ENDPOINT", modify_default_endpoint(PublicAdvertisedPrefixesClient)) +def test_public_advertised_prefixes_client_get_mtls_endpoint_and_cert_source(client_class): + mock_client_cert_source = mock.Mock() + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "true". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + mock_api_endpoint = "foo" + options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(options) + assert api_endpoint == mock_api_endpoint + assert cert_source == mock_client_cert_source + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "false". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + mock_client_cert_source = mock.Mock() + mock_api_endpoint = "foo" + options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(options) + assert api_endpoint == mock_api_endpoint + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert doesn't exist. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=False): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert exists. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): + with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=mock_client_cert_source): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source == mock_client_cert_source + + +@pytest.mark.parametrize("client_class,transport_class,transport_name", [ + (PublicAdvertisedPrefixesClient, transports.PublicAdvertisedPrefixesRestTransport, "rest"), +]) +def test_public_advertised_prefixes_client_client_options_scopes(client_class, transport_class, transport_name): + # Check the case scopes are provided. + options = client_options.ClientOptions( + scopes=["1", "2"], + ) + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=["1", "2"], + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + +@pytest.mark.parametrize("client_class,transport_class,transport_name,grpc_helpers", [ + (PublicAdvertisedPrefixesClient, transports.PublicAdvertisedPrefixesRestTransport, "rest", None), +]) +def test_public_advertised_prefixes_client_client_options_credentials_file(client_class, transport_class, transport_name, grpc_helpers): + # Check the case credentials file is provided. + options = client_options.ClientOptions( + credentials_file="credentials.json" + ) + + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize("request_type", [ + compute.DeletePublicAdvertisedPrefixeRequest, + dict, +]) +def test_delete_rest(request_type): + client = PublicAdvertisedPrefixesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'public_advertised_prefix': 'sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.delete(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, extended_operation.ExtendedOperation) + assert response.client_operation_id == 'client_operation_id_value' + assert response.creation_timestamp == 'creation_timestamp_value' + assert response.description == 'description_value' + assert response.end_time == 'end_time_value' + assert response.http_error_message == 'http_error_message_value' + assert response.http_error_status_code == 2374 + assert response.id == 205 + assert response.insert_time == 'insert_time_value' + assert response.kind == 'kind_value' + assert response.name == 'name_value' + assert response.operation_group_id == 'operation_group_id_value' + assert response.operation_type == 'operation_type_value' + assert response.progress == 885 + assert response.region == 'region_value' + assert response.self_link == 'self_link_value' + assert response.start_time == 'start_time_value' + assert response.status == compute.Operation.Status.DONE + assert response.status_message == 'status_message_value' + assert response.target_id == 947 + assert response.target_link == 'target_link_value' + assert response.user == 'user_value' + assert response.zone == 'zone_value' + + +def test_delete_rest_required_fields(request_type=compute.DeletePublicAdvertisedPrefixeRequest): + transport_class = transports.PublicAdvertisedPrefixesRestTransport + + request_init = {} + request_init["project"] = "" + request_init["public_advertised_prefix"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + jsonified_request["publicAdvertisedPrefix"] = 'public_advertised_prefix_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "publicAdvertisedPrefix" in jsonified_request + assert jsonified_request["publicAdvertisedPrefix"] == 'public_advertised_prefix_value' + + client = PublicAdvertisedPrefixesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "delete", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.delete(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_delete_rest_unset_required_fields(): + transport = transports.PublicAdvertisedPrefixesRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.delete._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("project", "publicAdvertisedPrefix", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_rest_interceptors(null_interceptor): + transport = transports.PublicAdvertisedPrefixesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.PublicAdvertisedPrefixesRestInterceptor(), + ) + client = PublicAdvertisedPrefixesClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.PublicAdvertisedPrefixesRestInterceptor, "post_delete") as post, \ + mock.patch.object(transports.PublicAdvertisedPrefixesRestInterceptor, "pre_delete") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.DeletePublicAdvertisedPrefixeRequest.pb(compute.DeletePublicAdvertisedPrefixeRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.DeletePublicAdvertisedPrefixeRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.delete(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_delete_rest_bad_request(transport: str = 'rest', request_type=compute.DeletePublicAdvertisedPrefixeRequest): + client = PublicAdvertisedPrefixesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'public_advertised_prefix': 'sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.delete(request) + + +def test_delete_rest_flattened(): + client = PublicAdvertisedPrefixesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'public_advertised_prefix': 'sample2'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + public_advertised_prefix='public_advertised_prefix_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.delete(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/global/publicAdvertisedPrefixes/{public_advertised_prefix}" % client.transport._host, args[1]) + + +def test_delete_rest_flattened_error(transport: str = 'rest'): + client = PublicAdvertisedPrefixesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete( + compute.DeletePublicAdvertisedPrefixeRequest(), + project='project_value', + public_advertised_prefix='public_advertised_prefix_value', + ) + + +def test_delete_rest_error(): + client = PublicAdvertisedPrefixesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.DeletePublicAdvertisedPrefixeRequest, + dict, +]) +def test_delete_unary_rest(request_type): + client = PublicAdvertisedPrefixesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'public_advertised_prefix': 'sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.delete_unary(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.Operation) + + +def test_delete_unary_rest_required_fields(request_type=compute.DeletePublicAdvertisedPrefixeRequest): + transport_class = transports.PublicAdvertisedPrefixesRestTransport + + request_init = {} + request_init["project"] = "" + request_init["public_advertised_prefix"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + jsonified_request["publicAdvertisedPrefix"] = 'public_advertised_prefix_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "publicAdvertisedPrefix" in jsonified_request + assert jsonified_request["publicAdvertisedPrefix"] == 'public_advertised_prefix_value' + + client = PublicAdvertisedPrefixesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "delete", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.delete_unary(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_delete_unary_rest_unset_required_fields(): + transport = transports.PublicAdvertisedPrefixesRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.delete._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("project", "publicAdvertisedPrefix", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_unary_rest_interceptors(null_interceptor): + transport = transports.PublicAdvertisedPrefixesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.PublicAdvertisedPrefixesRestInterceptor(), + ) + client = PublicAdvertisedPrefixesClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.PublicAdvertisedPrefixesRestInterceptor, "post_delete") as post, \ + mock.patch.object(transports.PublicAdvertisedPrefixesRestInterceptor, "pre_delete") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.DeletePublicAdvertisedPrefixeRequest.pb(compute.DeletePublicAdvertisedPrefixeRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.DeletePublicAdvertisedPrefixeRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.delete_unary(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_delete_unary_rest_bad_request(transport: str = 'rest', request_type=compute.DeletePublicAdvertisedPrefixeRequest): + client = PublicAdvertisedPrefixesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'public_advertised_prefix': 'sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.delete_unary(request) + + +def test_delete_unary_rest_flattened(): + client = PublicAdvertisedPrefixesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'public_advertised_prefix': 'sample2'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + public_advertised_prefix='public_advertised_prefix_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.delete_unary(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/global/publicAdvertisedPrefixes/{public_advertised_prefix}" % client.transport._host, args[1]) + + +def test_delete_unary_rest_flattened_error(transport: str = 'rest'): + client = PublicAdvertisedPrefixesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_unary( + compute.DeletePublicAdvertisedPrefixeRequest(), + project='project_value', + public_advertised_prefix='public_advertised_prefix_value', + ) + + +def test_delete_unary_rest_error(): + client = PublicAdvertisedPrefixesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.GetPublicAdvertisedPrefixeRequest, + dict, +]) +def test_get_rest(request_type): + client = PublicAdvertisedPrefixesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'public_advertised_prefix': 'sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.PublicAdvertisedPrefix( + creation_timestamp='creation_timestamp_value', + description='description_value', + dns_verification_ip='dns_verification_ip_value', + fingerprint='fingerprint_value', + id=205, + ip_cidr_range='ip_cidr_range_value', + kind='kind_value', + name='name_value', + self_link='self_link_value', + shared_secret='shared_secret_value', + status='status_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.PublicAdvertisedPrefix.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.get(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.PublicAdvertisedPrefix) + assert response.creation_timestamp == 'creation_timestamp_value' + assert response.description == 'description_value' + assert response.dns_verification_ip == 'dns_verification_ip_value' + assert response.fingerprint == 'fingerprint_value' + assert response.id == 205 + assert response.ip_cidr_range == 'ip_cidr_range_value' + assert response.kind == 'kind_value' + assert response.name == 'name_value' + assert response.self_link == 'self_link_value' + assert response.shared_secret == 'shared_secret_value' + assert response.status == 'status_value' + + +def test_get_rest_required_fields(request_type=compute.GetPublicAdvertisedPrefixeRequest): + transport_class = transports.PublicAdvertisedPrefixesRestTransport + + request_init = {} + request_init["project"] = "" + request_init["public_advertised_prefix"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + jsonified_request["publicAdvertisedPrefix"] = 'public_advertised_prefix_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "publicAdvertisedPrefix" in jsonified_request + assert jsonified_request["publicAdvertisedPrefix"] == 'public_advertised_prefix_value' + + client = PublicAdvertisedPrefixesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.PublicAdvertisedPrefix() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "get", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.PublicAdvertisedPrefix.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.get(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_get_rest_unset_required_fields(): + transport = transports.PublicAdvertisedPrefixesRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.get._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("project", "publicAdvertisedPrefix", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_rest_interceptors(null_interceptor): + transport = transports.PublicAdvertisedPrefixesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.PublicAdvertisedPrefixesRestInterceptor(), + ) + client = PublicAdvertisedPrefixesClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.PublicAdvertisedPrefixesRestInterceptor, "post_get") as post, \ + mock.patch.object(transports.PublicAdvertisedPrefixesRestInterceptor, "pre_get") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.GetPublicAdvertisedPrefixeRequest.pb(compute.GetPublicAdvertisedPrefixeRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.PublicAdvertisedPrefix.to_json(compute.PublicAdvertisedPrefix()) + + request = compute.GetPublicAdvertisedPrefixeRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.PublicAdvertisedPrefix() + + client.get(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_get_rest_bad_request(transport: str = 'rest', request_type=compute.GetPublicAdvertisedPrefixeRequest): + client = PublicAdvertisedPrefixesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'public_advertised_prefix': 'sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get(request) + + +def test_get_rest_flattened(): + client = PublicAdvertisedPrefixesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.PublicAdvertisedPrefix() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'public_advertised_prefix': 'sample2'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + public_advertised_prefix='public_advertised_prefix_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.PublicAdvertisedPrefix.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.get(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/global/publicAdvertisedPrefixes/{public_advertised_prefix}" % client.transport._host, args[1]) + + +def test_get_rest_flattened_error(transport: str = 'rest'): + client = PublicAdvertisedPrefixesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get( + compute.GetPublicAdvertisedPrefixeRequest(), + project='project_value', + public_advertised_prefix='public_advertised_prefix_value', + ) + + +def test_get_rest_error(): + client = PublicAdvertisedPrefixesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.InsertPublicAdvertisedPrefixeRequest, + dict, +]) +def test_insert_rest(request_type): + client = PublicAdvertisedPrefixesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1'} + request_init["public_advertised_prefix_resource"] = {'creation_timestamp': 'creation_timestamp_value', 'description': 'description_value', 'dns_verification_ip': 'dns_verification_ip_value', 'fingerprint': 'fingerprint_value', 'id': 205, 'ip_cidr_range': 'ip_cidr_range_value', 'kind': 'kind_value', 'name': 'name_value', 'public_delegated_prefixs': [{'ip_range': 'ip_range_value', 'name': 'name_value', 'project': 'project_value', 'region': 'region_value', 'status': 'status_value'}], 'self_link': 'self_link_value', 'shared_secret': 'shared_secret_value', 'status': 'status_value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.insert(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, extended_operation.ExtendedOperation) + assert response.client_operation_id == 'client_operation_id_value' + assert response.creation_timestamp == 'creation_timestamp_value' + assert response.description == 'description_value' + assert response.end_time == 'end_time_value' + assert response.http_error_message == 'http_error_message_value' + assert response.http_error_status_code == 2374 + assert response.id == 205 + assert response.insert_time == 'insert_time_value' + assert response.kind == 'kind_value' + assert response.name == 'name_value' + assert response.operation_group_id == 'operation_group_id_value' + assert response.operation_type == 'operation_type_value' + assert response.progress == 885 + assert response.region == 'region_value' + assert response.self_link == 'self_link_value' + assert response.start_time == 'start_time_value' + assert response.status == compute.Operation.Status.DONE + assert response.status_message == 'status_message_value' + assert response.target_id == 947 + assert response.target_link == 'target_link_value' + assert response.user == 'user_value' + assert response.zone == 'zone_value' + + +def test_insert_rest_required_fields(request_type=compute.InsertPublicAdvertisedPrefixeRequest): + transport_class = transports.PublicAdvertisedPrefixesRestTransport + + request_init = {} + request_init["project"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).insert._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).insert._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + + client = PublicAdvertisedPrefixesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.insert(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_insert_rest_unset_required_fields(): + transport = transports.PublicAdvertisedPrefixesRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.insert._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("project", "publicAdvertisedPrefixResource", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_insert_rest_interceptors(null_interceptor): + transport = transports.PublicAdvertisedPrefixesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.PublicAdvertisedPrefixesRestInterceptor(), + ) + client = PublicAdvertisedPrefixesClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.PublicAdvertisedPrefixesRestInterceptor, "post_insert") as post, \ + mock.patch.object(transports.PublicAdvertisedPrefixesRestInterceptor, "pre_insert") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.InsertPublicAdvertisedPrefixeRequest.pb(compute.InsertPublicAdvertisedPrefixeRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.InsertPublicAdvertisedPrefixeRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.insert(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_insert_rest_bad_request(transport: str = 'rest', request_type=compute.InsertPublicAdvertisedPrefixeRequest): + client = PublicAdvertisedPrefixesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1'} + request_init["public_advertised_prefix_resource"] = {'creation_timestamp': 'creation_timestamp_value', 'description': 'description_value', 'dns_verification_ip': 'dns_verification_ip_value', 'fingerprint': 'fingerprint_value', 'id': 205, 'ip_cidr_range': 'ip_cidr_range_value', 'kind': 'kind_value', 'name': 'name_value', 'public_delegated_prefixs': [{'ip_range': 'ip_range_value', 'name': 'name_value', 'project': 'project_value', 'region': 'region_value', 'status': 'status_value'}], 'self_link': 'self_link_value', 'shared_secret': 'shared_secret_value', 'status': 'status_value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.insert(request) + + +def test_insert_rest_flattened(): + client = PublicAdvertisedPrefixesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + public_advertised_prefix_resource=compute.PublicAdvertisedPrefix(creation_timestamp='creation_timestamp_value'), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.insert(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/global/publicAdvertisedPrefixes" % client.transport._host, args[1]) + + +def test_insert_rest_flattened_error(transport: str = 'rest'): + client = PublicAdvertisedPrefixesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.insert( + compute.InsertPublicAdvertisedPrefixeRequest(), + project='project_value', + public_advertised_prefix_resource=compute.PublicAdvertisedPrefix(creation_timestamp='creation_timestamp_value'), + ) + + +def test_insert_rest_error(): + client = PublicAdvertisedPrefixesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.InsertPublicAdvertisedPrefixeRequest, + dict, +]) +def test_insert_unary_rest(request_type): + client = PublicAdvertisedPrefixesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1'} + request_init["public_advertised_prefix_resource"] = {'creation_timestamp': 'creation_timestamp_value', 'description': 'description_value', 'dns_verification_ip': 'dns_verification_ip_value', 'fingerprint': 'fingerprint_value', 'id': 205, 'ip_cidr_range': 'ip_cidr_range_value', 'kind': 'kind_value', 'name': 'name_value', 'public_delegated_prefixs': [{'ip_range': 'ip_range_value', 'name': 'name_value', 'project': 'project_value', 'region': 'region_value', 'status': 'status_value'}], 'self_link': 'self_link_value', 'shared_secret': 'shared_secret_value', 'status': 'status_value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.insert_unary(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.Operation) + + +def test_insert_unary_rest_required_fields(request_type=compute.InsertPublicAdvertisedPrefixeRequest): + transport_class = transports.PublicAdvertisedPrefixesRestTransport + + request_init = {} + request_init["project"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).insert._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).insert._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + + client = PublicAdvertisedPrefixesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.insert_unary(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_insert_unary_rest_unset_required_fields(): + transport = transports.PublicAdvertisedPrefixesRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.insert._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("project", "publicAdvertisedPrefixResource", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_insert_unary_rest_interceptors(null_interceptor): + transport = transports.PublicAdvertisedPrefixesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.PublicAdvertisedPrefixesRestInterceptor(), + ) + client = PublicAdvertisedPrefixesClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.PublicAdvertisedPrefixesRestInterceptor, "post_insert") as post, \ + mock.patch.object(transports.PublicAdvertisedPrefixesRestInterceptor, "pre_insert") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.InsertPublicAdvertisedPrefixeRequest.pb(compute.InsertPublicAdvertisedPrefixeRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.InsertPublicAdvertisedPrefixeRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.insert_unary(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_insert_unary_rest_bad_request(transport: str = 'rest', request_type=compute.InsertPublicAdvertisedPrefixeRequest): + client = PublicAdvertisedPrefixesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1'} + request_init["public_advertised_prefix_resource"] = {'creation_timestamp': 'creation_timestamp_value', 'description': 'description_value', 'dns_verification_ip': 'dns_verification_ip_value', 'fingerprint': 'fingerprint_value', 'id': 205, 'ip_cidr_range': 'ip_cidr_range_value', 'kind': 'kind_value', 'name': 'name_value', 'public_delegated_prefixs': [{'ip_range': 'ip_range_value', 'name': 'name_value', 'project': 'project_value', 'region': 'region_value', 'status': 'status_value'}], 'self_link': 'self_link_value', 'shared_secret': 'shared_secret_value', 'status': 'status_value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.insert_unary(request) + + +def test_insert_unary_rest_flattened(): + client = PublicAdvertisedPrefixesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + public_advertised_prefix_resource=compute.PublicAdvertisedPrefix(creation_timestamp='creation_timestamp_value'), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.insert_unary(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/global/publicAdvertisedPrefixes" % client.transport._host, args[1]) + + +def test_insert_unary_rest_flattened_error(transport: str = 'rest'): + client = PublicAdvertisedPrefixesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.insert_unary( + compute.InsertPublicAdvertisedPrefixeRequest(), + project='project_value', + public_advertised_prefix_resource=compute.PublicAdvertisedPrefix(creation_timestamp='creation_timestamp_value'), + ) + + +def test_insert_unary_rest_error(): + client = PublicAdvertisedPrefixesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.ListPublicAdvertisedPrefixesRequest, + dict, +]) +def test_list_rest(request_type): + client = PublicAdvertisedPrefixesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.PublicAdvertisedPrefixList( + id='id_value', + kind='kind_value', + next_page_token='next_page_token_value', + self_link='self_link_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.PublicAdvertisedPrefixList.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.list(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListPager) + assert response.id == 'id_value' + assert response.kind == 'kind_value' + assert response.next_page_token == 'next_page_token_value' + assert response.self_link == 'self_link_value' + + +def test_list_rest_required_fields(request_type=compute.ListPublicAdvertisedPrefixesRequest): + transport_class = transports.PublicAdvertisedPrefixesRestTransport + + request_init = {} + request_init["project"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("filter", "max_results", "order_by", "page_token", "return_partial_success", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + + client = PublicAdvertisedPrefixesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.PublicAdvertisedPrefixList() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "get", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.PublicAdvertisedPrefixList.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.list(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_list_rest_unset_required_fields(): + transport = transports.PublicAdvertisedPrefixesRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.list._get_unset_required_fields({}) + assert set(unset_fields) == (set(("filter", "maxResults", "orderBy", "pageToken", "returnPartialSuccess", )) & set(("project", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_rest_interceptors(null_interceptor): + transport = transports.PublicAdvertisedPrefixesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.PublicAdvertisedPrefixesRestInterceptor(), + ) + client = PublicAdvertisedPrefixesClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.PublicAdvertisedPrefixesRestInterceptor, "post_list") as post, \ + mock.patch.object(transports.PublicAdvertisedPrefixesRestInterceptor, "pre_list") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.ListPublicAdvertisedPrefixesRequest.pb(compute.ListPublicAdvertisedPrefixesRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.PublicAdvertisedPrefixList.to_json(compute.PublicAdvertisedPrefixList()) + + request = compute.ListPublicAdvertisedPrefixesRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.PublicAdvertisedPrefixList() + + client.list(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_list_rest_bad_request(transport: str = 'rest', request_type=compute.ListPublicAdvertisedPrefixesRequest): + client = PublicAdvertisedPrefixesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list(request) + + +def test_list_rest_flattened(): + client = PublicAdvertisedPrefixesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.PublicAdvertisedPrefixList() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.PublicAdvertisedPrefixList.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.list(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/global/publicAdvertisedPrefixes" % client.transport._host, args[1]) + + +def test_list_rest_flattened_error(transport: str = 'rest'): + client = PublicAdvertisedPrefixesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list( + compute.ListPublicAdvertisedPrefixesRequest(), + project='project_value', + ) + + +def test_list_rest_pager(transport: str = 'rest'): + client = PublicAdvertisedPrefixesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + #with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + compute.PublicAdvertisedPrefixList( + items=[ + compute.PublicAdvertisedPrefix(), + compute.PublicAdvertisedPrefix(), + compute.PublicAdvertisedPrefix(), + ], + next_page_token='abc', + ), + compute.PublicAdvertisedPrefixList( + items=[], + next_page_token='def', + ), + compute.PublicAdvertisedPrefixList( + items=[ + compute.PublicAdvertisedPrefix(), + ], + next_page_token='ghi', + ), + compute.PublicAdvertisedPrefixList( + items=[ + compute.PublicAdvertisedPrefix(), + compute.PublicAdvertisedPrefix(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple(compute.PublicAdvertisedPrefixList.to_json(x) for x in response) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode('UTF-8') + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {'project': 'sample1'} + + pager = client.list(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, compute.PublicAdvertisedPrefix) + for i in results) + + pages = list(client.list(request=sample_request).pages) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize("request_type", [ + compute.PatchPublicAdvertisedPrefixeRequest, + dict, +]) +def test_patch_rest(request_type): + client = PublicAdvertisedPrefixesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'public_advertised_prefix': 'sample2'} + request_init["public_advertised_prefix_resource"] = {'creation_timestamp': 'creation_timestamp_value', 'description': 'description_value', 'dns_verification_ip': 'dns_verification_ip_value', 'fingerprint': 'fingerprint_value', 'id': 205, 'ip_cidr_range': 'ip_cidr_range_value', 'kind': 'kind_value', 'name': 'name_value', 'public_delegated_prefixs': [{'ip_range': 'ip_range_value', 'name': 'name_value', 'project': 'project_value', 'region': 'region_value', 'status': 'status_value'}], 'self_link': 'self_link_value', 'shared_secret': 'shared_secret_value', 'status': 'status_value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.patch(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, extended_operation.ExtendedOperation) + assert response.client_operation_id == 'client_operation_id_value' + assert response.creation_timestamp == 'creation_timestamp_value' + assert response.description == 'description_value' + assert response.end_time == 'end_time_value' + assert response.http_error_message == 'http_error_message_value' + assert response.http_error_status_code == 2374 + assert response.id == 205 + assert response.insert_time == 'insert_time_value' + assert response.kind == 'kind_value' + assert response.name == 'name_value' + assert response.operation_group_id == 'operation_group_id_value' + assert response.operation_type == 'operation_type_value' + assert response.progress == 885 + assert response.region == 'region_value' + assert response.self_link == 'self_link_value' + assert response.start_time == 'start_time_value' + assert response.status == compute.Operation.Status.DONE + assert response.status_message == 'status_message_value' + assert response.target_id == 947 + assert response.target_link == 'target_link_value' + assert response.user == 'user_value' + assert response.zone == 'zone_value' + + +def test_patch_rest_required_fields(request_type=compute.PatchPublicAdvertisedPrefixeRequest): + transport_class = transports.PublicAdvertisedPrefixesRestTransport + + request_init = {} + request_init["project"] = "" + request_init["public_advertised_prefix"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).patch._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + jsonified_request["publicAdvertisedPrefix"] = 'public_advertised_prefix_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).patch._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "publicAdvertisedPrefix" in jsonified_request + assert jsonified_request["publicAdvertisedPrefix"] == 'public_advertised_prefix_value' + + client = PublicAdvertisedPrefixesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "patch", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.patch(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_patch_rest_unset_required_fields(): + transport = transports.PublicAdvertisedPrefixesRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.patch._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("project", "publicAdvertisedPrefix", "publicAdvertisedPrefixResource", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_patch_rest_interceptors(null_interceptor): + transport = transports.PublicAdvertisedPrefixesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.PublicAdvertisedPrefixesRestInterceptor(), + ) + client = PublicAdvertisedPrefixesClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.PublicAdvertisedPrefixesRestInterceptor, "post_patch") as post, \ + mock.patch.object(transports.PublicAdvertisedPrefixesRestInterceptor, "pre_patch") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.PatchPublicAdvertisedPrefixeRequest.pb(compute.PatchPublicAdvertisedPrefixeRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.PatchPublicAdvertisedPrefixeRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.patch(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_patch_rest_bad_request(transport: str = 'rest', request_type=compute.PatchPublicAdvertisedPrefixeRequest): + client = PublicAdvertisedPrefixesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'public_advertised_prefix': 'sample2'} + request_init["public_advertised_prefix_resource"] = {'creation_timestamp': 'creation_timestamp_value', 'description': 'description_value', 'dns_verification_ip': 'dns_verification_ip_value', 'fingerprint': 'fingerprint_value', 'id': 205, 'ip_cidr_range': 'ip_cidr_range_value', 'kind': 'kind_value', 'name': 'name_value', 'public_delegated_prefixs': [{'ip_range': 'ip_range_value', 'name': 'name_value', 'project': 'project_value', 'region': 'region_value', 'status': 'status_value'}], 'self_link': 'self_link_value', 'shared_secret': 'shared_secret_value', 'status': 'status_value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.patch(request) + + +def test_patch_rest_flattened(): + client = PublicAdvertisedPrefixesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'public_advertised_prefix': 'sample2'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + public_advertised_prefix='public_advertised_prefix_value', + public_advertised_prefix_resource=compute.PublicAdvertisedPrefix(creation_timestamp='creation_timestamp_value'), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.patch(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/global/publicAdvertisedPrefixes/{public_advertised_prefix}" % client.transport._host, args[1]) + + +def test_patch_rest_flattened_error(transport: str = 'rest'): + client = PublicAdvertisedPrefixesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.patch( + compute.PatchPublicAdvertisedPrefixeRequest(), + project='project_value', + public_advertised_prefix='public_advertised_prefix_value', + public_advertised_prefix_resource=compute.PublicAdvertisedPrefix(creation_timestamp='creation_timestamp_value'), + ) + + +def test_patch_rest_error(): + client = PublicAdvertisedPrefixesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.PatchPublicAdvertisedPrefixeRequest, + dict, +]) +def test_patch_unary_rest(request_type): + client = PublicAdvertisedPrefixesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'public_advertised_prefix': 'sample2'} + request_init["public_advertised_prefix_resource"] = {'creation_timestamp': 'creation_timestamp_value', 'description': 'description_value', 'dns_verification_ip': 'dns_verification_ip_value', 'fingerprint': 'fingerprint_value', 'id': 205, 'ip_cidr_range': 'ip_cidr_range_value', 'kind': 'kind_value', 'name': 'name_value', 'public_delegated_prefixs': [{'ip_range': 'ip_range_value', 'name': 'name_value', 'project': 'project_value', 'region': 'region_value', 'status': 'status_value'}], 'self_link': 'self_link_value', 'shared_secret': 'shared_secret_value', 'status': 'status_value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.patch_unary(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.Operation) + + +def test_patch_unary_rest_required_fields(request_type=compute.PatchPublicAdvertisedPrefixeRequest): + transport_class = transports.PublicAdvertisedPrefixesRestTransport + + request_init = {} + request_init["project"] = "" + request_init["public_advertised_prefix"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).patch._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + jsonified_request["publicAdvertisedPrefix"] = 'public_advertised_prefix_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).patch._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "publicAdvertisedPrefix" in jsonified_request + assert jsonified_request["publicAdvertisedPrefix"] == 'public_advertised_prefix_value' + + client = PublicAdvertisedPrefixesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "patch", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.patch_unary(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_patch_unary_rest_unset_required_fields(): + transport = transports.PublicAdvertisedPrefixesRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.patch._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("project", "publicAdvertisedPrefix", "publicAdvertisedPrefixResource", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_patch_unary_rest_interceptors(null_interceptor): + transport = transports.PublicAdvertisedPrefixesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.PublicAdvertisedPrefixesRestInterceptor(), + ) + client = PublicAdvertisedPrefixesClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.PublicAdvertisedPrefixesRestInterceptor, "post_patch") as post, \ + mock.patch.object(transports.PublicAdvertisedPrefixesRestInterceptor, "pre_patch") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.PatchPublicAdvertisedPrefixeRequest.pb(compute.PatchPublicAdvertisedPrefixeRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.PatchPublicAdvertisedPrefixeRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.patch_unary(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_patch_unary_rest_bad_request(transport: str = 'rest', request_type=compute.PatchPublicAdvertisedPrefixeRequest): + client = PublicAdvertisedPrefixesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'public_advertised_prefix': 'sample2'} + request_init["public_advertised_prefix_resource"] = {'creation_timestamp': 'creation_timestamp_value', 'description': 'description_value', 'dns_verification_ip': 'dns_verification_ip_value', 'fingerprint': 'fingerprint_value', 'id': 205, 'ip_cidr_range': 'ip_cidr_range_value', 'kind': 'kind_value', 'name': 'name_value', 'public_delegated_prefixs': [{'ip_range': 'ip_range_value', 'name': 'name_value', 'project': 'project_value', 'region': 'region_value', 'status': 'status_value'}], 'self_link': 'self_link_value', 'shared_secret': 'shared_secret_value', 'status': 'status_value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.patch_unary(request) + + +def test_patch_unary_rest_flattened(): + client = PublicAdvertisedPrefixesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'public_advertised_prefix': 'sample2'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + public_advertised_prefix='public_advertised_prefix_value', + public_advertised_prefix_resource=compute.PublicAdvertisedPrefix(creation_timestamp='creation_timestamp_value'), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.patch_unary(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/global/publicAdvertisedPrefixes/{public_advertised_prefix}" % client.transport._host, args[1]) + + +def test_patch_unary_rest_flattened_error(transport: str = 'rest'): + client = PublicAdvertisedPrefixesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.patch_unary( + compute.PatchPublicAdvertisedPrefixeRequest(), + project='project_value', + public_advertised_prefix='public_advertised_prefix_value', + public_advertised_prefix_resource=compute.PublicAdvertisedPrefix(creation_timestamp='creation_timestamp_value'), + ) + + +def test_patch_unary_rest_error(): + client = PublicAdvertisedPrefixesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +def test_credentials_transport_error(): + # It is an error to provide credentials and a transport instance. + transport = transports.PublicAdvertisedPrefixesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = PublicAdvertisedPrefixesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # It is an error to provide a credentials file and a transport instance. + transport = transports.PublicAdvertisedPrefixesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = PublicAdvertisedPrefixesClient( + client_options={"credentials_file": "credentials.json"}, + transport=transport, + ) + + # It is an error to provide an api_key and a transport instance. + transport = transports.PublicAdvertisedPrefixesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = PublicAdvertisedPrefixesClient( + client_options=options, + transport=transport, + ) + + # It is an error to provide an api_key and a credential. + options = mock.Mock() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = PublicAdvertisedPrefixesClient( + client_options=options, + credentials=ga_credentials.AnonymousCredentials() + ) + + # It is an error to provide scopes and a transport instance. + transport = transports.PublicAdvertisedPrefixesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = PublicAdvertisedPrefixesClient( + client_options={"scopes": ["1", "2"]}, + transport=transport, + ) + + +def test_transport_instance(): + # A client may be instantiated with a custom transport instance. + transport = transports.PublicAdvertisedPrefixesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + client = PublicAdvertisedPrefixesClient(transport=transport) + assert client.transport is transport + + +@pytest.mark.parametrize("transport_class", [ + transports.PublicAdvertisedPrefixesRestTransport, +]) +def test_transport_adc(transport_class): + # Test default credentials are used if not provided. + with mock.patch.object(google.auth, 'default') as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class() + adc.assert_called_once() + +@pytest.mark.parametrize("transport_name", [ + "rest", +]) +def test_transport_kind(transport_name): + transport = PublicAdvertisedPrefixesClient.get_transport_class(transport_name)( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert transport.kind == transport_name + + +def test_public_advertised_prefixes_base_transport_error(): + # Passing both a credentials object and credentials_file should raise an error + with pytest.raises(core_exceptions.DuplicateCredentialArgs): + transport = transports.PublicAdvertisedPrefixesTransport( + credentials=ga_credentials.AnonymousCredentials(), + credentials_file="credentials.json" + ) + + +def test_public_advertised_prefixes_base_transport(): + # Instantiate the base transport. + with mock.patch('google.cloud.compute_v1.services.public_advertised_prefixes.transports.PublicAdvertisedPrefixesTransport.__init__') as Transport: + Transport.return_value = None + transport = transports.PublicAdvertisedPrefixesTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Every method on the transport should just blindly + # raise NotImplementedError. + methods = ( + 'delete', + 'get', + 'insert', + 'list', + 'patch', + ) + for method in methods: + with pytest.raises(NotImplementedError): + getattr(transport, method)(request=object()) + + with pytest.raises(NotImplementedError): + transport.close() + + # Catch all for all remaining methods and properties + remainder = [ + 'kind', + ] + for r in remainder: + with pytest.raises(NotImplementedError): + getattr(transport, r)() + + +def test_public_advertised_prefixes_base_transport_with_credentials_file(): + # Instantiate the base transport with a credentials file + with mock.patch.object(google.auth, 'load_credentials_from_file', autospec=True) as load_creds, mock.patch('google.cloud.compute_v1.services.public_advertised_prefixes.transports.PublicAdvertisedPrefixesTransport._prep_wrapped_messages') as Transport: + Transport.return_value = None + load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.PublicAdvertisedPrefixesTransport( + credentials_file="credentials.json", + quota_project_id="octopus", + ) + load_creds.assert_called_once_with("credentials.json", + scopes=None, + default_scopes=( + 'https://www.googleapis.com/auth/compute', + 'https://www.googleapis.com/auth/cloud-platform', +), + quota_project_id="octopus", + ) + + +def test_public_advertised_prefixes_base_transport_with_adc(): + # Test the default credentials are used if credentials and credentials_file are None. + with mock.patch.object(google.auth, 'default', autospec=True) as adc, mock.patch('google.cloud.compute_v1.services.public_advertised_prefixes.transports.PublicAdvertisedPrefixesTransport._prep_wrapped_messages') as Transport: + Transport.return_value = None + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.PublicAdvertisedPrefixesTransport() + adc.assert_called_once() + + +def test_public_advertised_prefixes_auth_adc(): + # If no credentials are provided, we should use ADC credentials. + with mock.patch.object(google.auth, 'default', autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + PublicAdvertisedPrefixesClient() + adc.assert_called_once_with( + scopes=None, + default_scopes=( + 'https://www.googleapis.com/auth/compute', + 'https://www.googleapis.com/auth/cloud-platform', +), + quota_project_id=None, + ) + + +def test_public_advertised_prefixes_http_transport_client_cert_source_for_mtls(): + cred = ga_credentials.AnonymousCredentials() + with mock.patch("google.auth.transport.requests.AuthorizedSession.configure_mtls_channel") as mock_configure_mtls_channel: + transports.PublicAdvertisedPrefixesRestTransport ( + credentials=cred, + client_cert_source_for_mtls=client_cert_source_callback + ) + mock_configure_mtls_channel.assert_called_once_with(client_cert_source_callback) + + +@pytest.mark.parametrize("transport_name", [ + "rest", +]) +def test_public_advertised_prefixes_host_no_port(transport_name): + client = PublicAdvertisedPrefixesClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions(api_endpoint='compute.googleapis.com'), + transport=transport_name, + ) + assert client.transport._host == ( + 'compute.googleapis.com:443' + if transport_name in ['grpc', 'grpc_asyncio'] + else 'https://compute.googleapis.com' + ) + +@pytest.mark.parametrize("transport_name", [ + "rest", +]) +def test_public_advertised_prefixes_host_with_port(transport_name): + client = PublicAdvertisedPrefixesClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions(api_endpoint='compute.googleapis.com:8000'), + transport=transport_name, + ) + assert client.transport._host == ( + 'compute.googleapis.com:8000' + if transport_name in ['grpc', 'grpc_asyncio'] + else 'https://compute.googleapis.com:8000' + ) + +@pytest.mark.parametrize("transport_name", [ + "rest", +]) +def test_public_advertised_prefixes_client_transport_session_collision(transport_name): + creds1 = ga_credentials.AnonymousCredentials() + creds2 = ga_credentials.AnonymousCredentials() + client1 = PublicAdvertisedPrefixesClient( + credentials=creds1, + transport=transport_name, + ) + client2 = PublicAdvertisedPrefixesClient( + credentials=creds2, + transport=transport_name, + ) + session1 = client1.transport.delete._session + session2 = client2.transport.delete._session + assert session1 != session2 + session1 = client1.transport.get._session + session2 = client2.transport.get._session + assert session1 != session2 + session1 = client1.transport.insert._session + session2 = client2.transport.insert._session + assert session1 != session2 + session1 = client1.transport.list._session + session2 = client2.transport.list._session + assert session1 != session2 + session1 = client1.transport.patch._session + session2 = client2.transport.patch._session + assert session1 != session2 + +def test_common_billing_account_path(): + billing_account = "squid" + expected = "billingAccounts/{billing_account}".format(billing_account=billing_account, ) + actual = PublicAdvertisedPrefixesClient.common_billing_account_path(billing_account) + assert expected == actual + + +def test_parse_common_billing_account_path(): + expected = { + "billing_account": "clam", + } + path = PublicAdvertisedPrefixesClient.common_billing_account_path(**expected) + + # Check that the path construction is reversible. + actual = PublicAdvertisedPrefixesClient.parse_common_billing_account_path(path) + assert expected == actual + +def test_common_folder_path(): + folder = "whelk" + expected = "folders/{folder}".format(folder=folder, ) + actual = PublicAdvertisedPrefixesClient.common_folder_path(folder) + assert expected == actual + + +def test_parse_common_folder_path(): + expected = { + "folder": "octopus", + } + path = PublicAdvertisedPrefixesClient.common_folder_path(**expected) + + # Check that the path construction is reversible. + actual = PublicAdvertisedPrefixesClient.parse_common_folder_path(path) + assert expected == actual + +def test_common_organization_path(): + organization = "oyster" + expected = "organizations/{organization}".format(organization=organization, ) + actual = PublicAdvertisedPrefixesClient.common_organization_path(organization) + assert expected == actual + + +def test_parse_common_organization_path(): + expected = { + "organization": "nudibranch", + } + path = PublicAdvertisedPrefixesClient.common_organization_path(**expected) + + # Check that the path construction is reversible. + actual = PublicAdvertisedPrefixesClient.parse_common_organization_path(path) + assert expected == actual + +def test_common_project_path(): + project = "cuttlefish" + expected = "projects/{project}".format(project=project, ) + actual = PublicAdvertisedPrefixesClient.common_project_path(project) + assert expected == actual + + +def test_parse_common_project_path(): + expected = { + "project": "mussel", + } + path = PublicAdvertisedPrefixesClient.common_project_path(**expected) + + # Check that the path construction is reversible. + actual = PublicAdvertisedPrefixesClient.parse_common_project_path(path) + assert expected == actual + +def test_common_location_path(): + project = "winkle" + location = "nautilus" + expected = "projects/{project}/locations/{location}".format(project=project, location=location, ) + actual = PublicAdvertisedPrefixesClient.common_location_path(project, location) + assert expected == actual + + +def test_parse_common_location_path(): + expected = { + "project": "scallop", + "location": "abalone", + } + path = PublicAdvertisedPrefixesClient.common_location_path(**expected) + + # Check that the path construction is reversible. + actual = PublicAdvertisedPrefixesClient.parse_common_location_path(path) + assert expected == actual + + +def test_client_with_default_client_info(): + client_info = gapic_v1.client_info.ClientInfo() + + with mock.patch.object(transports.PublicAdvertisedPrefixesTransport, '_prep_wrapped_messages') as prep: + client = PublicAdvertisedPrefixesClient( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + with mock.patch.object(transports.PublicAdvertisedPrefixesTransport, '_prep_wrapped_messages') as prep: + transport_class = PublicAdvertisedPrefixesClient.get_transport_class() + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + +def test_transport_close(): + transports = { + "rest": "_session", + } + + for transport, close_name in transports.items(): + client = PublicAdvertisedPrefixesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport + ) + with mock.patch.object(type(getattr(client.transport, close_name)), "close") as close: + with client: + close.assert_not_called() + close.assert_called_once() + +def test_client_ctx(): + transports = [ + 'rest', + ] + for transport in transports: + client = PublicAdvertisedPrefixesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport + ) + # Test client calls underlying transport. + with mock.patch.object(type(client.transport), "close") as close: + close.assert_not_called() + with client: + pass + close.assert_called() + +@pytest.mark.parametrize("client_class,transport_class", [ + (PublicAdvertisedPrefixesClient, transports.PublicAdvertisedPrefixesRestTransport), +]) +def test_api_key_credentials(client_class, transport_class): + with mock.patch.object( + google.auth._default, "get_api_key_credentials", create=True + ) as get_api_key_credentials: + mock_cred = mock.Mock() + get_api_key_credentials.return_value = mock_cred + options = client_options.ClientOptions() + options.api_key = "api_key" + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=mock_cred, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) diff --git a/owl-bot-staging/v1/tests/unit/gapic/compute_v1/test_public_delegated_prefixes.py b/owl-bot-staging/v1/tests/unit/gapic/compute_v1/test_public_delegated_prefixes.py new file mode 100644 index 000000000..ca6a1786e --- /dev/null +++ b/owl-bot-staging/v1/tests/unit/gapic/compute_v1/test_public_delegated_prefixes.py @@ -0,0 +1,3396 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import os +# try/except added for compatibility with python < 3.8 +try: + from unittest import mock + from unittest.mock import AsyncMock # pragma: NO COVER +except ImportError: # pragma: NO COVER + import mock + +import grpc +from grpc.experimental import aio +from collections.abc import Iterable +from google.protobuf import json_format +import json +import math +import pytest +from proto.marshal.rules.dates import DurationRule, TimestampRule +from proto.marshal.rules import wrappers +from requests import Response +from requests import Request, PreparedRequest +from requests.sessions import Session +from google.protobuf import json_format + +from google.api_core import client_options +from google.api_core import exceptions as core_exceptions +from google.api_core import extended_operation # type: ignore +from google.api_core import future +from google.api_core import gapic_v1 +from google.api_core import grpc_helpers +from google.api_core import grpc_helpers_async +from google.api_core import path_template +from google.auth import credentials as ga_credentials +from google.auth.exceptions import MutualTLSChannelError +from google.cloud.compute_v1.services.public_delegated_prefixes import PublicDelegatedPrefixesClient +from google.cloud.compute_v1.services.public_delegated_prefixes import pagers +from google.cloud.compute_v1.services.public_delegated_prefixes import transports +from google.cloud.compute_v1.types import compute +from google.oauth2 import service_account +import google.auth + + +def client_cert_source_callback(): + return b"cert bytes", b"key bytes" + + +# If default endpoint is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint(client): + return "foo.googleapis.com" if ("localhost" in client.DEFAULT_ENDPOINT) else client.DEFAULT_ENDPOINT + + +def test__get_default_mtls_endpoint(): + api_endpoint = "example.googleapis.com" + api_mtls_endpoint = "example.mtls.googleapis.com" + sandbox_endpoint = "example.sandbox.googleapis.com" + sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com" + non_googleapi = "api.example.com" + + assert PublicDelegatedPrefixesClient._get_default_mtls_endpoint(None) is None + assert PublicDelegatedPrefixesClient._get_default_mtls_endpoint(api_endpoint) == api_mtls_endpoint + assert PublicDelegatedPrefixesClient._get_default_mtls_endpoint(api_mtls_endpoint) == api_mtls_endpoint + assert PublicDelegatedPrefixesClient._get_default_mtls_endpoint(sandbox_endpoint) == sandbox_mtls_endpoint + assert PublicDelegatedPrefixesClient._get_default_mtls_endpoint(sandbox_mtls_endpoint) == sandbox_mtls_endpoint + assert PublicDelegatedPrefixesClient._get_default_mtls_endpoint(non_googleapi) == non_googleapi + + +@pytest.mark.parametrize("client_class,transport_name", [ + (PublicDelegatedPrefixesClient, "rest"), +]) +def test_public_delegated_prefixes_client_from_service_account_info(client_class, transport_name): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object(service_account.Credentials, 'from_service_account_info') as factory: + factory.return_value = creds + info = {"valid": True} + client = client_class.from_service_account_info(info, transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + 'compute.googleapis.com:443' + if transport_name in ['grpc', 'grpc_asyncio'] + else + 'https://compute.googleapis.com' + ) + + +@pytest.mark.parametrize("transport_class,transport_name", [ + (transports.PublicDelegatedPrefixesRestTransport, "rest"), +]) +def test_public_delegated_prefixes_client_service_account_always_use_jwt(transport_class, transport_name): + with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=True) + use_jwt.assert_called_once_with(True) + + with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=False) + use_jwt.assert_not_called() + + +@pytest.mark.parametrize("client_class,transport_name", [ + (PublicDelegatedPrefixesClient, "rest"), +]) +def test_public_delegated_prefixes_client_from_service_account_file(client_class, transport_name): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object(service_account.Credentials, 'from_service_account_file') as factory: + factory.return_value = creds + client = client_class.from_service_account_file("dummy/file/path.json", transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + client = client_class.from_service_account_json("dummy/file/path.json", transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + 'compute.googleapis.com:443' + if transport_name in ['grpc', 'grpc_asyncio'] + else + 'https://compute.googleapis.com' + ) + + +def test_public_delegated_prefixes_client_get_transport_class(): + transport = PublicDelegatedPrefixesClient.get_transport_class() + available_transports = [ + transports.PublicDelegatedPrefixesRestTransport, + ] + assert transport in available_transports + + transport = PublicDelegatedPrefixesClient.get_transport_class("rest") + assert transport == transports.PublicDelegatedPrefixesRestTransport + + +@pytest.mark.parametrize("client_class,transport_class,transport_name", [ + (PublicDelegatedPrefixesClient, transports.PublicDelegatedPrefixesRestTransport, "rest"), +]) +@mock.patch.object(PublicDelegatedPrefixesClient, "DEFAULT_ENDPOINT", modify_default_endpoint(PublicDelegatedPrefixesClient)) +def test_public_delegated_prefixes_client_client_options(client_class, transport_class, transport_name): + # Check that if channel is provided we won't create a new one. + with mock.patch.object(PublicDelegatedPrefixesClient, 'get_transport_class') as gtc: + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ) + client = client_class(transport=transport) + gtc.assert_not_called() + + # Check that if channel is provided via str we will create a new one. + with mock.patch.object(PublicDelegatedPrefixesClient, 'get_transport_class') as gtc: + client = client_class(transport=transport_name) + gtc.assert_called() + + # Check the case api_endpoint is provided. + options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(transport=transport_name, client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_MTLS_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError): + client = client_class(transport=transport_name) + + # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): + with pytest.raises(ValueError): + client = client_class(transport=transport_name) + + # Check the case quota_project_id is provided + options = client_options.ClientOptions(quota_project_id="octopus") + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id="octopus", + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + # Check the case api_endpoint is provided + options = client_options.ClientOptions(api_audience="https://language.googleapis.com") + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience="https://language.googleapis.com" + ) + +@pytest.mark.parametrize("client_class,transport_class,transport_name,use_client_cert_env", [ + (PublicDelegatedPrefixesClient, transports.PublicDelegatedPrefixesRestTransport, "rest", "true"), + (PublicDelegatedPrefixesClient, transports.PublicDelegatedPrefixesRestTransport, "rest", "false"), +]) +@mock.patch.object(PublicDelegatedPrefixesClient, "DEFAULT_ENDPOINT", modify_default_endpoint(PublicDelegatedPrefixesClient)) +@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) +def test_public_delegated_prefixes_client_mtls_env_auto(client_class, transport_class, transport_name, use_client_cert_env): + # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default + # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists. + + # Check the case client_cert_source is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): + options = client_options.ClientOptions(client_cert_source=client_cert_source_callback) + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + + if use_client_cert_env == "false": + expected_client_cert_source = None + expected_host = client.DEFAULT_ENDPOINT + else: + expected_client_cert_source = client_cert_source_callback + expected_host = client.DEFAULT_MTLS_ENDPOINT + + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case ADC client cert is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): + with mock.patch.object(transport_class, '__init__') as patched: + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): + with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=client_cert_source_callback): + if use_client_cert_env == "false": + expected_host = client.DEFAULT_ENDPOINT + expected_client_cert_source = None + else: + expected_host = client.DEFAULT_MTLS_ENDPOINT + expected_client_cert_source = client_cert_source_callback + + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case client_cert_source and ADC client cert are not provided. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): + with mock.patch.object(transport_class, '__init__') as patched: + with mock.patch("google.auth.transport.mtls.has_default_client_cert_source", return_value=False): + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize("client_class", [ + PublicDelegatedPrefixesClient +]) +@mock.patch.object(PublicDelegatedPrefixesClient, "DEFAULT_ENDPOINT", modify_default_endpoint(PublicDelegatedPrefixesClient)) +def test_public_delegated_prefixes_client_get_mtls_endpoint_and_cert_source(client_class): + mock_client_cert_source = mock.Mock() + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "true". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + mock_api_endpoint = "foo" + options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(options) + assert api_endpoint == mock_api_endpoint + assert cert_source == mock_client_cert_source + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "false". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + mock_client_cert_source = mock.Mock() + mock_api_endpoint = "foo" + options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(options) + assert api_endpoint == mock_api_endpoint + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert doesn't exist. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=False): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert exists. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): + with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=mock_client_cert_source): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source == mock_client_cert_source + + +@pytest.mark.parametrize("client_class,transport_class,transport_name", [ + (PublicDelegatedPrefixesClient, transports.PublicDelegatedPrefixesRestTransport, "rest"), +]) +def test_public_delegated_prefixes_client_client_options_scopes(client_class, transport_class, transport_name): + # Check the case scopes are provided. + options = client_options.ClientOptions( + scopes=["1", "2"], + ) + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=["1", "2"], + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + +@pytest.mark.parametrize("client_class,transport_class,transport_name,grpc_helpers", [ + (PublicDelegatedPrefixesClient, transports.PublicDelegatedPrefixesRestTransport, "rest", None), +]) +def test_public_delegated_prefixes_client_client_options_credentials_file(client_class, transport_class, transport_name, grpc_helpers): + # Check the case credentials file is provided. + options = client_options.ClientOptions( + credentials_file="credentials.json" + ) + + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize("request_type", [ + compute.AggregatedListPublicDelegatedPrefixesRequest, + dict, +]) +def test_aggregated_list_rest(request_type): + client = PublicDelegatedPrefixesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.PublicDelegatedPrefixAggregatedList( + id='id_value', + kind='kind_value', + next_page_token='next_page_token_value', + self_link='self_link_value', + unreachables=['unreachables_value'], + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.PublicDelegatedPrefixAggregatedList.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.aggregated_list(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.AggregatedListPager) + assert response.id == 'id_value' + assert response.kind == 'kind_value' + assert response.next_page_token == 'next_page_token_value' + assert response.self_link == 'self_link_value' + assert response.unreachables == ['unreachables_value'] + + +def test_aggregated_list_rest_required_fields(request_type=compute.AggregatedListPublicDelegatedPrefixesRequest): + transport_class = transports.PublicDelegatedPrefixesRestTransport + + request_init = {} + request_init["project"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).aggregated_list._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).aggregated_list._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("filter", "include_all_scopes", "max_results", "order_by", "page_token", "return_partial_success", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + + client = PublicDelegatedPrefixesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.PublicDelegatedPrefixAggregatedList() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "get", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.PublicDelegatedPrefixAggregatedList.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.aggregated_list(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_aggregated_list_rest_unset_required_fields(): + transport = transports.PublicDelegatedPrefixesRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.aggregated_list._get_unset_required_fields({}) + assert set(unset_fields) == (set(("filter", "includeAllScopes", "maxResults", "orderBy", "pageToken", "returnPartialSuccess", )) & set(("project", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_aggregated_list_rest_interceptors(null_interceptor): + transport = transports.PublicDelegatedPrefixesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.PublicDelegatedPrefixesRestInterceptor(), + ) + client = PublicDelegatedPrefixesClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.PublicDelegatedPrefixesRestInterceptor, "post_aggregated_list") as post, \ + mock.patch.object(transports.PublicDelegatedPrefixesRestInterceptor, "pre_aggregated_list") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.AggregatedListPublicDelegatedPrefixesRequest.pb(compute.AggregatedListPublicDelegatedPrefixesRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.PublicDelegatedPrefixAggregatedList.to_json(compute.PublicDelegatedPrefixAggregatedList()) + + request = compute.AggregatedListPublicDelegatedPrefixesRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.PublicDelegatedPrefixAggregatedList() + + client.aggregated_list(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_aggregated_list_rest_bad_request(transport: str = 'rest', request_type=compute.AggregatedListPublicDelegatedPrefixesRequest): + client = PublicDelegatedPrefixesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.aggregated_list(request) + + +def test_aggregated_list_rest_flattened(): + client = PublicDelegatedPrefixesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.PublicDelegatedPrefixAggregatedList() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.PublicDelegatedPrefixAggregatedList.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.aggregated_list(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/aggregated/publicDelegatedPrefixes" % client.transport._host, args[1]) + + +def test_aggregated_list_rest_flattened_error(transport: str = 'rest'): + client = PublicDelegatedPrefixesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.aggregated_list( + compute.AggregatedListPublicDelegatedPrefixesRequest(), + project='project_value', + ) + + +def test_aggregated_list_rest_pager(transport: str = 'rest'): + client = PublicDelegatedPrefixesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + #with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + compute.PublicDelegatedPrefixAggregatedList( + items={ + 'a':compute.PublicDelegatedPrefixesScopedList(), + 'b':compute.PublicDelegatedPrefixesScopedList(), + 'c':compute.PublicDelegatedPrefixesScopedList(), + }, + next_page_token='abc', + ), + compute.PublicDelegatedPrefixAggregatedList( + items={}, + next_page_token='def', + ), + compute.PublicDelegatedPrefixAggregatedList( + items={ + 'g':compute.PublicDelegatedPrefixesScopedList(), + }, + next_page_token='ghi', + ), + compute.PublicDelegatedPrefixAggregatedList( + items={ + 'h':compute.PublicDelegatedPrefixesScopedList(), + 'i':compute.PublicDelegatedPrefixesScopedList(), + }, + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple(compute.PublicDelegatedPrefixAggregatedList.to_json(x) for x in response) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode('UTF-8') + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {'project': 'sample1'} + + pager = client.aggregated_list(request=sample_request) + + assert isinstance(pager.get('a'), compute.PublicDelegatedPrefixesScopedList) + assert pager.get('h') is None + + results = list(pager) + assert len(results) == 6 + assert all( + isinstance(i, tuple) + for i in results) + for result in results: + assert isinstance(result, tuple) + assert tuple(type(t) for t in result) == (str, compute.PublicDelegatedPrefixesScopedList) + + assert pager.get('a') is None + assert isinstance(pager.get('h'), compute.PublicDelegatedPrefixesScopedList) + + pages = list(client.aggregated_list(request=sample_request).pages) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize("request_type", [ + compute.DeletePublicDelegatedPrefixeRequest, + dict, +]) +def test_delete_rest(request_type): + client = PublicDelegatedPrefixesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2', 'public_delegated_prefix': 'sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.delete(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, extended_operation.ExtendedOperation) + assert response.client_operation_id == 'client_operation_id_value' + assert response.creation_timestamp == 'creation_timestamp_value' + assert response.description == 'description_value' + assert response.end_time == 'end_time_value' + assert response.http_error_message == 'http_error_message_value' + assert response.http_error_status_code == 2374 + assert response.id == 205 + assert response.insert_time == 'insert_time_value' + assert response.kind == 'kind_value' + assert response.name == 'name_value' + assert response.operation_group_id == 'operation_group_id_value' + assert response.operation_type == 'operation_type_value' + assert response.progress == 885 + assert response.region == 'region_value' + assert response.self_link == 'self_link_value' + assert response.start_time == 'start_time_value' + assert response.status == compute.Operation.Status.DONE + assert response.status_message == 'status_message_value' + assert response.target_id == 947 + assert response.target_link == 'target_link_value' + assert response.user == 'user_value' + assert response.zone == 'zone_value' + + +def test_delete_rest_required_fields(request_type=compute.DeletePublicDelegatedPrefixeRequest): + transport_class = transports.PublicDelegatedPrefixesRestTransport + + request_init = {} + request_init["project"] = "" + request_init["public_delegated_prefix"] = "" + request_init["region"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + jsonified_request["publicDelegatedPrefix"] = 'public_delegated_prefix_value' + jsonified_request["region"] = 'region_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "publicDelegatedPrefix" in jsonified_request + assert jsonified_request["publicDelegatedPrefix"] == 'public_delegated_prefix_value' + assert "region" in jsonified_request + assert jsonified_request["region"] == 'region_value' + + client = PublicDelegatedPrefixesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "delete", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.delete(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_delete_rest_unset_required_fields(): + transport = transports.PublicDelegatedPrefixesRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.delete._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("project", "publicDelegatedPrefix", "region", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_rest_interceptors(null_interceptor): + transport = transports.PublicDelegatedPrefixesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.PublicDelegatedPrefixesRestInterceptor(), + ) + client = PublicDelegatedPrefixesClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.PublicDelegatedPrefixesRestInterceptor, "post_delete") as post, \ + mock.patch.object(transports.PublicDelegatedPrefixesRestInterceptor, "pre_delete") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.DeletePublicDelegatedPrefixeRequest.pb(compute.DeletePublicDelegatedPrefixeRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.DeletePublicDelegatedPrefixeRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.delete(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_delete_rest_bad_request(transport: str = 'rest', request_type=compute.DeletePublicDelegatedPrefixeRequest): + client = PublicDelegatedPrefixesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2', 'public_delegated_prefix': 'sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.delete(request) + + +def test_delete_rest_flattened(): + client = PublicDelegatedPrefixesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'region': 'sample2', 'public_delegated_prefix': 'sample3'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + region='region_value', + public_delegated_prefix='public_delegated_prefix_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.delete(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/regions/{region}/publicDelegatedPrefixes/{public_delegated_prefix}" % client.transport._host, args[1]) + + +def test_delete_rest_flattened_error(transport: str = 'rest'): + client = PublicDelegatedPrefixesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete( + compute.DeletePublicDelegatedPrefixeRequest(), + project='project_value', + region='region_value', + public_delegated_prefix='public_delegated_prefix_value', + ) + + +def test_delete_rest_error(): + client = PublicDelegatedPrefixesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.DeletePublicDelegatedPrefixeRequest, + dict, +]) +def test_delete_unary_rest(request_type): + client = PublicDelegatedPrefixesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2', 'public_delegated_prefix': 'sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.delete_unary(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.Operation) + + +def test_delete_unary_rest_required_fields(request_type=compute.DeletePublicDelegatedPrefixeRequest): + transport_class = transports.PublicDelegatedPrefixesRestTransport + + request_init = {} + request_init["project"] = "" + request_init["public_delegated_prefix"] = "" + request_init["region"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + jsonified_request["publicDelegatedPrefix"] = 'public_delegated_prefix_value' + jsonified_request["region"] = 'region_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "publicDelegatedPrefix" in jsonified_request + assert jsonified_request["publicDelegatedPrefix"] == 'public_delegated_prefix_value' + assert "region" in jsonified_request + assert jsonified_request["region"] == 'region_value' + + client = PublicDelegatedPrefixesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "delete", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.delete_unary(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_delete_unary_rest_unset_required_fields(): + transport = transports.PublicDelegatedPrefixesRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.delete._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("project", "publicDelegatedPrefix", "region", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_unary_rest_interceptors(null_interceptor): + transport = transports.PublicDelegatedPrefixesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.PublicDelegatedPrefixesRestInterceptor(), + ) + client = PublicDelegatedPrefixesClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.PublicDelegatedPrefixesRestInterceptor, "post_delete") as post, \ + mock.patch.object(transports.PublicDelegatedPrefixesRestInterceptor, "pre_delete") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.DeletePublicDelegatedPrefixeRequest.pb(compute.DeletePublicDelegatedPrefixeRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.DeletePublicDelegatedPrefixeRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.delete_unary(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_delete_unary_rest_bad_request(transport: str = 'rest', request_type=compute.DeletePublicDelegatedPrefixeRequest): + client = PublicDelegatedPrefixesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2', 'public_delegated_prefix': 'sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.delete_unary(request) + + +def test_delete_unary_rest_flattened(): + client = PublicDelegatedPrefixesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'region': 'sample2', 'public_delegated_prefix': 'sample3'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + region='region_value', + public_delegated_prefix='public_delegated_prefix_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.delete_unary(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/regions/{region}/publicDelegatedPrefixes/{public_delegated_prefix}" % client.transport._host, args[1]) + + +def test_delete_unary_rest_flattened_error(transport: str = 'rest'): + client = PublicDelegatedPrefixesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_unary( + compute.DeletePublicDelegatedPrefixeRequest(), + project='project_value', + region='region_value', + public_delegated_prefix='public_delegated_prefix_value', + ) + + +def test_delete_unary_rest_error(): + client = PublicDelegatedPrefixesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.GetPublicDelegatedPrefixeRequest, + dict, +]) +def test_get_rest(request_type): + client = PublicDelegatedPrefixesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2', 'public_delegated_prefix': 'sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.PublicDelegatedPrefix( + creation_timestamp='creation_timestamp_value', + description='description_value', + fingerprint='fingerprint_value', + id=205, + ip_cidr_range='ip_cidr_range_value', + is_live_migration=True, + kind='kind_value', + name='name_value', + parent_prefix='parent_prefix_value', + region='region_value', + self_link='self_link_value', + status='status_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.PublicDelegatedPrefix.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.get(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.PublicDelegatedPrefix) + assert response.creation_timestamp == 'creation_timestamp_value' + assert response.description == 'description_value' + assert response.fingerprint == 'fingerprint_value' + assert response.id == 205 + assert response.ip_cidr_range == 'ip_cidr_range_value' + assert response.is_live_migration is True + assert response.kind == 'kind_value' + assert response.name == 'name_value' + assert response.parent_prefix == 'parent_prefix_value' + assert response.region == 'region_value' + assert response.self_link == 'self_link_value' + assert response.status == 'status_value' + + +def test_get_rest_required_fields(request_type=compute.GetPublicDelegatedPrefixeRequest): + transport_class = transports.PublicDelegatedPrefixesRestTransport + + request_init = {} + request_init["project"] = "" + request_init["public_delegated_prefix"] = "" + request_init["region"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + jsonified_request["publicDelegatedPrefix"] = 'public_delegated_prefix_value' + jsonified_request["region"] = 'region_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "publicDelegatedPrefix" in jsonified_request + assert jsonified_request["publicDelegatedPrefix"] == 'public_delegated_prefix_value' + assert "region" in jsonified_request + assert jsonified_request["region"] == 'region_value' + + client = PublicDelegatedPrefixesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.PublicDelegatedPrefix() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "get", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.PublicDelegatedPrefix.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.get(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_get_rest_unset_required_fields(): + transport = transports.PublicDelegatedPrefixesRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.get._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("project", "publicDelegatedPrefix", "region", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_rest_interceptors(null_interceptor): + transport = transports.PublicDelegatedPrefixesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.PublicDelegatedPrefixesRestInterceptor(), + ) + client = PublicDelegatedPrefixesClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.PublicDelegatedPrefixesRestInterceptor, "post_get") as post, \ + mock.patch.object(transports.PublicDelegatedPrefixesRestInterceptor, "pre_get") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.GetPublicDelegatedPrefixeRequest.pb(compute.GetPublicDelegatedPrefixeRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.PublicDelegatedPrefix.to_json(compute.PublicDelegatedPrefix()) + + request = compute.GetPublicDelegatedPrefixeRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.PublicDelegatedPrefix() + + client.get(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_get_rest_bad_request(transport: str = 'rest', request_type=compute.GetPublicDelegatedPrefixeRequest): + client = PublicDelegatedPrefixesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2', 'public_delegated_prefix': 'sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get(request) + + +def test_get_rest_flattened(): + client = PublicDelegatedPrefixesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.PublicDelegatedPrefix() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'region': 'sample2', 'public_delegated_prefix': 'sample3'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + region='region_value', + public_delegated_prefix='public_delegated_prefix_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.PublicDelegatedPrefix.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.get(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/regions/{region}/publicDelegatedPrefixes/{public_delegated_prefix}" % client.transport._host, args[1]) + + +def test_get_rest_flattened_error(transport: str = 'rest'): + client = PublicDelegatedPrefixesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get( + compute.GetPublicDelegatedPrefixeRequest(), + project='project_value', + region='region_value', + public_delegated_prefix='public_delegated_prefix_value', + ) + + +def test_get_rest_error(): + client = PublicDelegatedPrefixesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.InsertPublicDelegatedPrefixeRequest, + dict, +]) +def test_insert_rest(request_type): + client = PublicDelegatedPrefixesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2'} + request_init["public_delegated_prefix_resource"] = {'creation_timestamp': 'creation_timestamp_value', 'description': 'description_value', 'fingerprint': 'fingerprint_value', 'id': 205, 'ip_cidr_range': 'ip_cidr_range_value', 'is_live_migration': True, 'kind': 'kind_value', 'name': 'name_value', 'parent_prefix': 'parent_prefix_value', 'public_delegated_sub_prefixs': [{'delegatee_project': 'delegatee_project_value', 'description': 'description_value', 'ip_cidr_range': 'ip_cidr_range_value', 'is_address': True, 'name': 'name_value', 'region': 'region_value', 'status': 'status_value'}], 'region': 'region_value', 'self_link': 'self_link_value', 'status': 'status_value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.insert(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, extended_operation.ExtendedOperation) + assert response.client_operation_id == 'client_operation_id_value' + assert response.creation_timestamp == 'creation_timestamp_value' + assert response.description == 'description_value' + assert response.end_time == 'end_time_value' + assert response.http_error_message == 'http_error_message_value' + assert response.http_error_status_code == 2374 + assert response.id == 205 + assert response.insert_time == 'insert_time_value' + assert response.kind == 'kind_value' + assert response.name == 'name_value' + assert response.operation_group_id == 'operation_group_id_value' + assert response.operation_type == 'operation_type_value' + assert response.progress == 885 + assert response.region == 'region_value' + assert response.self_link == 'self_link_value' + assert response.start_time == 'start_time_value' + assert response.status == compute.Operation.Status.DONE + assert response.status_message == 'status_message_value' + assert response.target_id == 947 + assert response.target_link == 'target_link_value' + assert response.user == 'user_value' + assert response.zone == 'zone_value' + + +def test_insert_rest_required_fields(request_type=compute.InsertPublicDelegatedPrefixeRequest): + transport_class = transports.PublicDelegatedPrefixesRestTransport + + request_init = {} + request_init["project"] = "" + request_init["region"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).insert._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + jsonified_request["region"] = 'region_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).insert._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "region" in jsonified_request + assert jsonified_request["region"] == 'region_value' + + client = PublicDelegatedPrefixesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.insert(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_insert_rest_unset_required_fields(): + transport = transports.PublicDelegatedPrefixesRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.insert._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("project", "publicDelegatedPrefixResource", "region", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_insert_rest_interceptors(null_interceptor): + transport = transports.PublicDelegatedPrefixesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.PublicDelegatedPrefixesRestInterceptor(), + ) + client = PublicDelegatedPrefixesClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.PublicDelegatedPrefixesRestInterceptor, "post_insert") as post, \ + mock.patch.object(transports.PublicDelegatedPrefixesRestInterceptor, "pre_insert") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.InsertPublicDelegatedPrefixeRequest.pb(compute.InsertPublicDelegatedPrefixeRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.InsertPublicDelegatedPrefixeRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.insert(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_insert_rest_bad_request(transport: str = 'rest', request_type=compute.InsertPublicDelegatedPrefixeRequest): + client = PublicDelegatedPrefixesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2'} + request_init["public_delegated_prefix_resource"] = {'creation_timestamp': 'creation_timestamp_value', 'description': 'description_value', 'fingerprint': 'fingerprint_value', 'id': 205, 'ip_cidr_range': 'ip_cidr_range_value', 'is_live_migration': True, 'kind': 'kind_value', 'name': 'name_value', 'parent_prefix': 'parent_prefix_value', 'public_delegated_sub_prefixs': [{'delegatee_project': 'delegatee_project_value', 'description': 'description_value', 'ip_cidr_range': 'ip_cidr_range_value', 'is_address': True, 'name': 'name_value', 'region': 'region_value', 'status': 'status_value'}], 'region': 'region_value', 'self_link': 'self_link_value', 'status': 'status_value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.insert(request) + + +def test_insert_rest_flattened(): + client = PublicDelegatedPrefixesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'region': 'sample2'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + region='region_value', + public_delegated_prefix_resource=compute.PublicDelegatedPrefix(creation_timestamp='creation_timestamp_value'), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.insert(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/regions/{region}/publicDelegatedPrefixes" % client.transport._host, args[1]) + + +def test_insert_rest_flattened_error(transport: str = 'rest'): + client = PublicDelegatedPrefixesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.insert( + compute.InsertPublicDelegatedPrefixeRequest(), + project='project_value', + region='region_value', + public_delegated_prefix_resource=compute.PublicDelegatedPrefix(creation_timestamp='creation_timestamp_value'), + ) + + +def test_insert_rest_error(): + client = PublicDelegatedPrefixesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.InsertPublicDelegatedPrefixeRequest, + dict, +]) +def test_insert_unary_rest(request_type): + client = PublicDelegatedPrefixesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2'} + request_init["public_delegated_prefix_resource"] = {'creation_timestamp': 'creation_timestamp_value', 'description': 'description_value', 'fingerprint': 'fingerprint_value', 'id': 205, 'ip_cidr_range': 'ip_cidr_range_value', 'is_live_migration': True, 'kind': 'kind_value', 'name': 'name_value', 'parent_prefix': 'parent_prefix_value', 'public_delegated_sub_prefixs': [{'delegatee_project': 'delegatee_project_value', 'description': 'description_value', 'ip_cidr_range': 'ip_cidr_range_value', 'is_address': True, 'name': 'name_value', 'region': 'region_value', 'status': 'status_value'}], 'region': 'region_value', 'self_link': 'self_link_value', 'status': 'status_value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.insert_unary(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.Operation) + + +def test_insert_unary_rest_required_fields(request_type=compute.InsertPublicDelegatedPrefixeRequest): + transport_class = transports.PublicDelegatedPrefixesRestTransport + + request_init = {} + request_init["project"] = "" + request_init["region"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).insert._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + jsonified_request["region"] = 'region_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).insert._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "region" in jsonified_request + assert jsonified_request["region"] == 'region_value' + + client = PublicDelegatedPrefixesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.insert_unary(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_insert_unary_rest_unset_required_fields(): + transport = transports.PublicDelegatedPrefixesRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.insert._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("project", "publicDelegatedPrefixResource", "region", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_insert_unary_rest_interceptors(null_interceptor): + transport = transports.PublicDelegatedPrefixesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.PublicDelegatedPrefixesRestInterceptor(), + ) + client = PublicDelegatedPrefixesClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.PublicDelegatedPrefixesRestInterceptor, "post_insert") as post, \ + mock.patch.object(transports.PublicDelegatedPrefixesRestInterceptor, "pre_insert") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.InsertPublicDelegatedPrefixeRequest.pb(compute.InsertPublicDelegatedPrefixeRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.InsertPublicDelegatedPrefixeRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.insert_unary(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_insert_unary_rest_bad_request(transport: str = 'rest', request_type=compute.InsertPublicDelegatedPrefixeRequest): + client = PublicDelegatedPrefixesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2'} + request_init["public_delegated_prefix_resource"] = {'creation_timestamp': 'creation_timestamp_value', 'description': 'description_value', 'fingerprint': 'fingerprint_value', 'id': 205, 'ip_cidr_range': 'ip_cidr_range_value', 'is_live_migration': True, 'kind': 'kind_value', 'name': 'name_value', 'parent_prefix': 'parent_prefix_value', 'public_delegated_sub_prefixs': [{'delegatee_project': 'delegatee_project_value', 'description': 'description_value', 'ip_cidr_range': 'ip_cidr_range_value', 'is_address': True, 'name': 'name_value', 'region': 'region_value', 'status': 'status_value'}], 'region': 'region_value', 'self_link': 'self_link_value', 'status': 'status_value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.insert_unary(request) + + +def test_insert_unary_rest_flattened(): + client = PublicDelegatedPrefixesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'region': 'sample2'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + region='region_value', + public_delegated_prefix_resource=compute.PublicDelegatedPrefix(creation_timestamp='creation_timestamp_value'), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.insert_unary(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/regions/{region}/publicDelegatedPrefixes" % client.transport._host, args[1]) + + +def test_insert_unary_rest_flattened_error(transport: str = 'rest'): + client = PublicDelegatedPrefixesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.insert_unary( + compute.InsertPublicDelegatedPrefixeRequest(), + project='project_value', + region='region_value', + public_delegated_prefix_resource=compute.PublicDelegatedPrefix(creation_timestamp='creation_timestamp_value'), + ) + + +def test_insert_unary_rest_error(): + client = PublicDelegatedPrefixesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.ListPublicDelegatedPrefixesRequest, + dict, +]) +def test_list_rest(request_type): + client = PublicDelegatedPrefixesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.PublicDelegatedPrefixList( + id='id_value', + kind='kind_value', + next_page_token='next_page_token_value', + self_link='self_link_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.PublicDelegatedPrefixList.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.list(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListPager) + assert response.id == 'id_value' + assert response.kind == 'kind_value' + assert response.next_page_token == 'next_page_token_value' + assert response.self_link == 'self_link_value' + + +def test_list_rest_required_fields(request_type=compute.ListPublicDelegatedPrefixesRequest): + transport_class = transports.PublicDelegatedPrefixesRestTransport + + request_init = {} + request_init["project"] = "" + request_init["region"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + jsonified_request["region"] = 'region_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("filter", "max_results", "order_by", "page_token", "return_partial_success", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "region" in jsonified_request + assert jsonified_request["region"] == 'region_value' + + client = PublicDelegatedPrefixesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.PublicDelegatedPrefixList() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "get", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.PublicDelegatedPrefixList.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.list(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_list_rest_unset_required_fields(): + transport = transports.PublicDelegatedPrefixesRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.list._get_unset_required_fields({}) + assert set(unset_fields) == (set(("filter", "maxResults", "orderBy", "pageToken", "returnPartialSuccess", )) & set(("project", "region", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_rest_interceptors(null_interceptor): + transport = transports.PublicDelegatedPrefixesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.PublicDelegatedPrefixesRestInterceptor(), + ) + client = PublicDelegatedPrefixesClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.PublicDelegatedPrefixesRestInterceptor, "post_list") as post, \ + mock.patch.object(transports.PublicDelegatedPrefixesRestInterceptor, "pre_list") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.ListPublicDelegatedPrefixesRequest.pb(compute.ListPublicDelegatedPrefixesRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.PublicDelegatedPrefixList.to_json(compute.PublicDelegatedPrefixList()) + + request = compute.ListPublicDelegatedPrefixesRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.PublicDelegatedPrefixList() + + client.list(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_list_rest_bad_request(transport: str = 'rest', request_type=compute.ListPublicDelegatedPrefixesRequest): + client = PublicDelegatedPrefixesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list(request) + + +def test_list_rest_flattened(): + client = PublicDelegatedPrefixesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.PublicDelegatedPrefixList() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'region': 'sample2'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + region='region_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.PublicDelegatedPrefixList.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.list(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/regions/{region}/publicDelegatedPrefixes" % client.transport._host, args[1]) + + +def test_list_rest_flattened_error(transport: str = 'rest'): + client = PublicDelegatedPrefixesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list( + compute.ListPublicDelegatedPrefixesRequest(), + project='project_value', + region='region_value', + ) + + +def test_list_rest_pager(transport: str = 'rest'): + client = PublicDelegatedPrefixesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + #with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + compute.PublicDelegatedPrefixList( + items=[ + compute.PublicDelegatedPrefix(), + compute.PublicDelegatedPrefix(), + compute.PublicDelegatedPrefix(), + ], + next_page_token='abc', + ), + compute.PublicDelegatedPrefixList( + items=[], + next_page_token='def', + ), + compute.PublicDelegatedPrefixList( + items=[ + compute.PublicDelegatedPrefix(), + ], + next_page_token='ghi', + ), + compute.PublicDelegatedPrefixList( + items=[ + compute.PublicDelegatedPrefix(), + compute.PublicDelegatedPrefix(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple(compute.PublicDelegatedPrefixList.to_json(x) for x in response) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode('UTF-8') + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {'project': 'sample1', 'region': 'sample2'} + + pager = client.list(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, compute.PublicDelegatedPrefix) + for i in results) + + pages = list(client.list(request=sample_request).pages) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize("request_type", [ + compute.PatchPublicDelegatedPrefixeRequest, + dict, +]) +def test_patch_rest(request_type): + client = PublicDelegatedPrefixesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2', 'public_delegated_prefix': 'sample3'} + request_init["public_delegated_prefix_resource"] = {'creation_timestamp': 'creation_timestamp_value', 'description': 'description_value', 'fingerprint': 'fingerprint_value', 'id': 205, 'ip_cidr_range': 'ip_cidr_range_value', 'is_live_migration': True, 'kind': 'kind_value', 'name': 'name_value', 'parent_prefix': 'parent_prefix_value', 'public_delegated_sub_prefixs': [{'delegatee_project': 'delegatee_project_value', 'description': 'description_value', 'ip_cidr_range': 'ip_cidr_range_value', 'is_address': True, 'name': 'name_value', 'region': 'region_value', 'status': 'status_value'}], 'region': 'region_value', 'self_link': 'self_link_value', 'status': 'status_value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.patch(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, extended_operation.ExtendedOperation) + assert response.client_operation_id == 'client_operation_id_value' + assert response.creation_timestamp == 'creation_timestamp_value' + assert response.description == 'description_value' + assert response.end_time == 'end_time_value' + assert response.http_error_message == 'http_error_message_value' + assert response.http_error_status_code == 2374 + assert response.id == 205 + assert response.insert_time == 'insert_time_value' + assert response.kind == 'kind_value' + assert response.name == 'name_value' + assert response.operation_group_id == 'operation_group_id_value' + assert response.operation_type == 'operation_type_value' + assert response.progress == 885 + assert response.region == 'region_value' + assert response.self_link == 'self_link_value' + assert response.start_time == 'start_time_value' + assert response.status == compute.Operation.Status.DONE + assert response.status_message == 'status_message_value' + assert response.target_id == 947 + assert response.target_link == 'target_link_value' + assert response.user == 'user_value' + assert response.zone == 'zone_value' + + +def test_patch_rest_required_fields(request_type=compute.PatchPublicDelegatedPrefixeRequest): + transport_class = transports.PublicDelegatedPrefixesRestTransport + + request_init = {} + request_init["project"] = "" + request_init["public_delegated_prefix"] = "" + request_init["region"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).patch._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + jsonified_request["publicDelegatedPrefix"] = 'public_delegated_prefix_value' + jsonified_request["region"] = 'region_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).patch._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "publicDelegatedPrefix" in jsonified_request + assert jsonified_request["publicDelegatedPrefix"] == 'public_delegated_prefix_value' + assert "region" in jsonified_request + assert jsonified_request["region"] == 'region_value' + + client = PublicDelegatedPrefixesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "patch", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.patch(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_patch_rest_unset_required_fields(): + transport = transports.PublicDelegatedPrefixesRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.patch._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("project", "publicDelegatedPrefix", "publicDelegatedPrefixResource", "region", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_patch_rest_interceptors(null_interceptor): + transport = transports.PublicDelegatedPrefixesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.PublicDelegatedPrefixesRestInterceptor(), + ) + client = PublicDelegatedPrefixesClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.PublicDelegatedPrefixesRestInterceptor, "post_patch") as post, \ + mock.patch.object(transports.PublicDelegatedPrefixesRestInterceptor, "pre_patch") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.PatchPublicDelegatedPrefixeRequest.pb(compute.PatchPublicDelegatedPrefixeRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.PatchPublicDelegatedPrefixeRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.patch(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_patch_rest_bad_request(transport: str = 'rest', request_type=compute.PatchPublicDelegatedPrefixeRequest): + client = PublicDelegatedPrefixesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2', 'public_delegated_prefix': 'sample3'} + request_init["public_delegated_prefix_resource"] = {'creation_timestamp': 'creation_timestamp_value', 'description': 'description_value', 'fingerprint': 'fingerprint_value', 'id': 205, 'ip_cidr_range': 'ip_cidr_range_value', 'is_live_migration': True, 'kind': 'kind_value', 'name': 'name_value', 'parent_prefix': 'parent_prefix_value', 'public_delegated_sub_prefixs': [{'delegatee_project': 'delegatee_project_value', 'description': 'description_value', 'ip_cidr_range': 'ip_cidr_range_value', 'is_address': True, 'name': 'name_value', 'region': 'region_value', 'status': 'status_value'}], 'region': 'region_value', 'self_link': 'self_link_value', 'status': 'status_value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.patch(request) + + +def test_patch_rest_flattened(): + client = PublicDelegatedPrefixesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'region': 'sample2', 'public_delegated_prefix': 'sample3'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + region='region_value', + public_delegated_prefix='public_delegated_prefix_value', + public_delegated_prefix_resource=compute.PublicDelegatedPrefix(creation_timestamp='creation_timestamp_value'), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.patch(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/regions/{region}/publicDelegatedPrefixes/{public_delegated_prefix}" % client.transport._host, args[1]) + + +def test_patch_rest_flattened_error(transport: str = 'rest'): + client = PublicDelegatedPrefixesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.patch( + compute.PatchPublicDelegatedPrefixeRequest(), + project='project_value', + region='region_value', + public_delegated_prefix='public_delegated_prefix_value', + public_delegated_prefix_resource=compute.PublicDelegatedPrefix(creation_timestamp='creation_timestamp_value'), + ) + + +def test_patch_rest_error(): + client = PublicDelegatedPrefixesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.PatchPublicDelegatedPrefixeRequest, + dict, +]) +def test_patch_unary_rest(request_type): + client = PublicDelegatedPrefixesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2', 'public_delegated_prefix': 'sample3'} + request_init["public_delegated_prefix_resource"] = {'creation_timestamp': 'creation_timestamp_value', 'description': 'description_value', 'fingerprint': 'fingerprint_value', 'id': 205, 'ip_cidr_range': 'ip_cidr_range_value', 'is_live_migration': True, 'kind': 'kind_value', 'name': 'name_value', 'parent_prefix': 'parent_prefix_value', 'public_delegated_sub_prefixs': [{'delegatee_project': 'delegatee_project_value', 'description': 'description_value', 'ip_cidr_range': 'ip_cidr_range_value', 'is_address': True, 'name': 'name_value', 'region': 'region_value', 'status': 'status_value'}], 'region': 'region_value', 'self_link': 'self_link_value', 'status': 'status_value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.patch_unary(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.Operation) + + +def test_patch_unary_rest_required_fields(request_type=compute.PatchPublicDelegatedPrefixeRequest): + transport_class = transports.PublicDelegatedPrefixesRestTransport + + request_init = {} + request_init["project"] = "" + request_init["public_delegated_prefix"] = "" + request_init["region"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).patch._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + jsonified_request["publicDelegatedPrefix"] = 'public_delegated_prefix_value' + jsonified_request["region"] = 'region_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).patch._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "publicDelegatedPrefix" in jsonified_request + assert jsonified_request["publicDelegatedPrefix"] == 'public_delegated_prefix_value' + assert "region" in jsonified_request + assert jsonified_request["region"] == 'region_value' + + client = PublicDelegatedPrefixesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "patch", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.patch_unary(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_patch_unary_rest_unset_required_fields(): + transport = transports.PublicDelegatedPrefixesRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.patch._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("project", "publicDelegatedPrefix", "publicDelegatedPrefixResource", "region", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_patch_unary_rest_interceptors(null_interceptor): + transport = transports.PublicDelegatedPrefixesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.PublicDelegatedPrefixesRestInterceptor(), + ) + client = PublicDelegatedPrefixesClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.PublicDelegatedPrefixesRestInterceptor, "post_patch") as post, \ + mock.patch.object(transports.PublicDelegatedPrefixesRestInterceptor, "pre_patch") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.PatchPublicDelegatedPrefixeRequest.pb(compute.PatchPublicDelegatedPrefixeRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.PatchPublicDelegatedPrefixeRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.patch_unary(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_patch_unary_rest_bad_request(transport: str = 'rest', request_type=compute.PatchPublicDelegatedPrefixeRequest): + client = PublicDelegatedPrefixesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2', 'public_delegated_prefix': 'sample3'} + request_init["public_delegated_prefix_resource"] = {'creation_timestamp': 'creation_timestamp_value', 'description': 'description_value', 'fingerprint': 'fingerprint_value', 'id': 205, 'ip_cidr_range': 'ip_cidr_range_value', 'is_live_migration': True, 'kind': 'kind_value', 'name': 'name_value', 'parent_prefix': 'parent_prefix_value', 'public_delegated_sub_prefixs': [{'delegatee_project': 'delegatee_project_value', 'description': 'description_value', 'ip_cidr_range': 'ip_cidr_range_value', 'is_address': True, 'name': 'name_value', 'region': 'region_value', 'status': 'status_value'}], 'region': 'region_value', 'self_link': 'self_link_value', 'status': 'status_value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.patch_unary(request) + + +def test_patch_unary_rest_flattened(): + client = PublicDelegatedPrefixesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'region': 'sample2', 'public_delegated_prefix': 'sample3'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + region='region_value', + public_delegated_prefix='public_delegated_prefix_value', + public_delegated_prefix_resource=compute.PublicDelegatedPrefix(creation_timestamp='creation_timestamp_value'), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.patch_unary(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/regions/{region}/publicDelegatedPrefixes/{public_delegated_prefix}" % client.transport._host, args[1]) + + +def test_patch_unary_rest_flattened_error(transport: str = 'rest'): + client = PublicDelegatedPrefixesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.patch_unary( + compute.PatchPublicDelegatedPrefixeRequest(), + project='project_value', + region='region_value', + public_delegated_prefix='public_delegated_prefix_value', + public_delegated_prefix_resource=compute.PublicDelegatedPrefix(creation_timestamp='creation_timestamp_value'), + ) + + +def test_patch_unary_rest_error(): + client = PublicDelegatedPrefixesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +def test_credentials_transport_error(): + # It is an error to provide credentials and a transport instance. + transport = transports.PublicDelegatedPrefixesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = PublicDelegatedPrefixesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # It is an error to provide a credentials file and a transport instance. + transport = transports.PublicDelegatedPrefixesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = PublicDelegatedPrefixesClient( + client_options={"credentials_file": "credentials.json"}, + transport=transport, + ) + + # It is an error to provide an api_key and a transport instance. + transport = transports.PublicDelegatedPrefixesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = PublicDelegatedPrefixesClient( + client_options=options, + transport=transport, + ) + + # It is an error to provide an api_key and a credential. + options = mock.Mock() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = PublicDelegatedPrefixesClient( + client_options=options, + credentials=ga_credentials.AnonymousCredentials() + ) + + # It is an error to provide scopes and a transport instance. + transport = transports.PublicDelegatedPrefixesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = PublicDelegatedPrefixesClient( + client_options={"scopes": ["1", "2"]}, + transport=transport, + ) + + +def test_transport_instance(): + # A client may be instantiated with a custom transport instance. + transport = transports.PublicDelegatedPrefixesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + client = PublicDelegatedPrefixesClient(transport=transport) + assert client.transport is transport + + +@pytest.mark.parametrize("transport_class", [ + transports.PublicDelegatedPrefixesRestTransport, +]) +def test_transport_adc(transport_class): + # Test default credentials are used if not provided. + with mock.patch.object(google.auth, 'default') as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class() + adc.assert_called_once() + +@pytest.mark.parametrize("transport_name", [ + "rest", +]) +def test_transport_kind(transport_name): + transport = PublicDelegatedPrefixesClient.get_transport_class(transport_name)( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert transport.kind == transport_name + + +def test_public_delegated_prefixes_base_transport_error(): + # Passing both a credentials object and credentials_file should raise an error + with pytest.raises(core_exceptions.DuplicateCredentialArgs): + transport = transports.PublicDelegatedPrefixesTransport( + credentials=ga_credentials.AnonymousCredentials(), + credentials_file="credentials.json" + ) + + +def test_public_delegated_prefixes_base_transport(): + # Instantiate the base transport. + with mock.patch('google.cloud.compute_v1.services.public_delegated_prefixes.transports.PublicDelegatedPrefixesTransport.__init__') as Transport: + Transport.return_value = None + transport = transports.PublicDelegatedPrefixesTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Every method on the transport should just blindly + # raise NotImplementedError. + methods = ( + 'aggregated_list', + 'delete', + 'get', + 'insert', + 'list', + 'patch', + ) + for method in methods: + with pytest.raises(NotImplementedError): + getattr(transport, method)(request=object()) + + with pytest.raises(NotImplementedError): + transport.close() + + # Catch all for all remaining methods and properties + remainder = [ + 'kind', + ] + for r in remainder: + with pytest.raises(NotImplementedError): + getattr(transport, r)() + + +def test_public_delegated_prefixes_base_transport_with_credentials_file(): + # Instantiate the base transport with a credentials file + with mock.patch.object(google.auth, 'load_credentials_from_file', autospec=True) as load_creds, mock.patch('google.cloud.compute_v1.services.public_delegated_prefixes.transports.PublicDelegatedPrefixesTransport._prep_wrapped_messages') as Transport: + Transport.return_value = None + load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.PublicDelegatedPrefixesTransport( + credentials_file="credentials.json", + quota_project_id="octopus", + ) + load_creds.assert_called_once_with("credentials.json", + scopes=None, + default_scopes=( + 'https://www.googleapis.com/auth/compute', + 'https://www.googleapis.com/auth/cloud-platform', +), + quota_project_id="octopus", + ) + + +def test_public_delegated_prefixes_base_transport_with_adc(): + # Test the default credentials are used if credentials and credentials_file are None. + with mock.patch.object(google.auth, 'default', autospec=True) as adc, mock.patch('google.cloud.compute_v1.services.public_delegated_prefixes.transports.PublicDelegatedPrefixesTransport._prep_wrapped_messages') as Transport: + Transport.return_value = None + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.PublicDelegatedPrefixesTransport() + adc.assert_called_once() + + +def test_public_delegated_prefixes_auth_adc(): + # If no credentials are provided, we should use ADC credentials. + with mock.patch.object(google.auth, 'default', autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + PublicDelegatedPrefixesClient() + adc.assert_called_once_with( + scopes=None, + default_scopes=( + 'https://www.googleapis.com/auth/compute', + 'https://www.googleapis.com/auth/cloud-platform', +), + quota_project_id=None, + ) + + +def test_public_delegated_prefixes_http_transport_client_cert_source_for_mtls(): + cred = ga_credentials.AnonymousCredentials() + with mock.patch("google.auth.transport.requests.AuthorizedSession.configure_mtls_channel") as mock_configure_mtls_channel: + transports.PublicDelegatedPrefixesRestTransport ( + credentials=cred, + client_cert_source_for_mtls=client_cert_source_callback + ) + mock_configure_mtls_channel.assert_called_once_with(client_cert_source_callback) + + +@pytest.mark.parametrize("transport_name", [ + "rest", +]) +def test_public_delegated_prefixes_host_no_port(transport_name): + client = PublicDelegatedPrefixesClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions(api_endpoint='compute.googleapis.com'), + transport=transport_name, + ) + assert client.transport._host == ( + 'compute.googleapis.com:443' + if transport_name in ['grpc', 'grpc_asyncio'] + else 'https://compute.googleapis.com' + ) + +@pytest.mark.parametrize("transport_name", [ + "rest", +]) +def test_public_delegated_prefixes_host_with_port(transport_name): + client = PublicDelegatedPrefixesClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions(api_endpoint='compute.googleapis.com:8000'), + transport=transport_name, + ) + assert client.transport._host == ( + 'compute.googleapis.com:8000' + if transport_name in ['grpc', 'grpc_asyncio'] + else 'https://compute.googleapis.com:8000' + ) + +@pytest.mark.parametrize("transport_name", [ + "rest", +]) +def test_public_delegated_prefixes_client_transport_session_collision(transport_name): + creds1 = ga_credentials.AnonymousCredentials() + creds2 = ga_credentials.AnonymousCredentials() + client1 = PublicDelegatedPrefixesClient( + credentials=creds1, + transport=transport_name, + ) + client2 = PublicDelegatedPrefixesClient( + credentials=creds2, + transport=transport_name, + ) + session1 = client1.transport.aggregated_list._session + session2 = client2.transport.aggregated_list._session + assert session1 != session2 + session1 = client1.transport.delete._session + session2 = client2.transport.delete._session + assert session1 != session2 + session1 = client1.transport.get._session + session2 = client2.transport.get._session + assert session1 != session2 + session1 = client1.transport.insert._session + session2 = client2.transport.insert._session + assert session1 != session2 + session1 = client1.transport.list._session + session2 = client2.transport.list._session + assert session1 != session2 + session1 = client1.transport.patch._session + session2 = client2.transport.patch._session + assert session1 != session2 + +def test_common_billing_account_path(): + billing_account = "squid" + expected = "billingAccounts/{billing_account}".format(billing_account=billing_account, ) + actual = PublicDelegatedPrefixesClient.common_billing_account_path(billing_account) + assert expected == actual + + +def test_parse_common_billing_account_path(): + expected = { + "billing_account": "clam", + } + path = PublicDelegatedPrefixesClient.common_billing_account_path(**expected) + + # Check that the path construction is reversible. + actual = PublicDelegatedPrefixesClient.parse_common_billing_account_path(path) + assert expected == actual + +def test_common_folder_path(): + folder = "whelk" + expected = "folders/{folder}".format(folder=folder, ) + actual = PublicDelegatedPrefixesClient.common_folder_path(folder) + assert expected == actual + + +def test_parse_common_folder_path(): + expected = { + "folder": "octopus", + } + path = PublicDelegatedPrefixesClient.common_folder_path(**expected) + + # Check that the path construction is reversible. + actual = PublicDelegatedPrefixesClient.parse_common_folder_path(path) + assert expected == actual + +def test_common_organization_path(): + organization = "oyster" + expected = "organizations/{organization}".format(organization=organization, ) + actual = PublicDelegatedPrefixesClient.common_organization_path(organization) + assert expected == actual + + +def test_parse_common_organization_path(): + expected = { + "organization": "nudibranch", + } + path = PublicDelegatedPrefixesClient.common_organization_path(**expected) + + # Check that the path construction is reversible. + actual = PublicDelegatedPrefixesClient.parse_common_organization_path(path) + assert expected == actual + +def test_common_project_path(): + project = "cuttlefish" + expected = "projects/{project}".format(project=project, ) + actual = PublicDelegatedPrefixesClient.common_project_path(project) + assert expected == actual + + +def test_parse_common_project_path(): + expected = { + "project": "mussel", + } + path = PublicDelegatedPrefixesClient.common_project_path(**expected) + + # Check that the path construction is reversible. + actual = PublicDelegatedPrefixesClient.parse_common_project_path(path) + assert expected == actual + +def test_common_location_path(): + project = "winkle" + location = "nautilus" + expected = "projects/{project}/locations/{location}".format(project=project, location=location, ) + actual = PublicDelegatedPrefixesClient.common_location_path(project, location) + assert expected == actual + + +def test_parse_common_location_path(): + expected = { + "project": "scallop", + "location": "abalone", + } + path = PublicDelegatedPrefixesClient.common_location_path(**expected) + + # Check that the path construction is reversible. + actual = PublicDelegatedPrefixesClient.parse_common_location_path(path) + assert expected == actual + + +def test_client_with_default_client_info(): + client_info = gapic_v1.client_info.ClientInfo() + + with mock.patch.object(transports.PublicDelegatedPrefixesTransport, '_prep_wrapped_messages') as prep: + client = PublicDelegatedPrefixesClient( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + with mock.patch.object(transports.PublicDelegatedPrefixesTransport, '_prep_wrapped_messages') as prep: + transport_class = PublicDelegatedPrefixesClient.get_transport_class() + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + +def test_transport_close(): + transports = { + "rest": "_session", + } + + for transport, close_name in transports.items(): + client = PublicDelegatedPrefixesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport + ) + with mock.patch.object(type(getattr(client.transport, close_name)), "close") as close: + with client: + close.assert_not_called() + close.assert_called_once() + +def test_client_ctx(): + transports = [ + 'rest', + ] + for transport in transports: + client = PublicDelegatedPrefixesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport + ) + # Test client calls underlying transport. + with mock.patch.object(type(client.transport), "close") as close: + close.assert_not_called() + with client: + pass + close.assert_called() + +@pytest.mark.parametrize("client_class,transport_class", [ + (PublicDelegatedPrefixesClient, transports.PublicDelegatedPrefixesRestTransport), +]) +def test_api_key_credentials(client_class, transport_class): + with mock.patch.object( + google.auth._default, "get_api_key_credentials", create=True + ) as get_api_key_credentials: + mock_cred = mock.Mock() + get_api_key_credentials.return_value = mock_cred + options = client_options.ClientOptions() + options.api_key = "api_key" + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=mock_cred, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) diff --git a/owl-bot-staging/v1/tests/unit/gapic/compute_v1/test_region_autoscalers.py b/owl-bot-staging/v1/tests/unit/gapic/compute_v1/test_region_autoscalers.py new file mode 100644 index 000000000..fdfcd4262 --- /dev/null +++ b/owl-bot-staging/v1/tests/unit/gapic/compute_v1/test_region_autoscalers.py @@ -0,0 +1,3627 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import os +# try/except added for compatibility with python < 3.8 +try: + from unittest import mock + from unittest.mock import AsyncMock # pragma: NO COVER +except ImportError: # pragma: NO COVER + import mock + +import grpc +from grpc.experimental import aio +from collections.abc import Iterable +from google.protobuf import json_format +import json +import math +import pytest +from proto.marshal.rules.dates import DurationRule, TimestampRule +from proto.marshal.rules import wrappers +from requests import Response +from requests import Request, PreparedRequest +from requests.sessions import Session +from google.protobuf import json_format + +from google.api_core import client_options +from google.api_core import exceptions as core_exceptions +from google.api_core import extended_operation # type: ignore +from google.api_core import future +from google.api_core import gapic_v1 +from google.api_core import grpc_helpers +from google.api_core import grpc_helpers_async +from google.api_core import path_template +from google.auth import credentials as ga_credentials +from google.auth.exceptions import MutualTLSChannelError +from google.cloud.compute_v1.services.region_autoscalers import RegionAutoscalersClient +from google.cloud.compute_v1.services.region_autoscalers import pagers +from google.cloud.compute_v1.services.region_autoscalers import transports +from google.cloud.compute_v1.types import compute +from google.oauth2 import service_account +import google.auth + + +def client_cert_source_callback(): + return b"cert bytes", b"key bytes" + + +# If default endpoint is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint(client): + return "foo.googleapis.com" if ("localhost" in client.DEFAULT_ENDPOINT) else client.DEFAULT_ENDPOINT + + +def test__get_default_mtls_endpoint(): + api_endpoint = "example.googleapis.com" + api_mtls_endpoint = "example.mtls.googleapis.com" + sandbox_endpoint = "example.sandbox.googleapis.com" + sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com" + non_googleapi = "api.example.com" + + assert RegionAutoscalersClient._get_default_mtls_endpoint(None) is None + assert RegionAutoscalersClient._get_default_mtls_endpoint(api_endpoint) == api_mtls_endpoint + assert RegionAutoscalersClient._get_default_mtls_endpoint(api_mtls_endpoint) == api_mtls_endpoint + assert RegionAutoscalersClient._get_default_mtls_endpoint(sandbox_endpoint) == sandbox_mtls_endpoint + assert RegionAutoscalersClient._get_default_mtls_endpoint(sandbox_mtls_endpoint) == sandbox_mtls_endpoint + assert RegionAutoscalersClient._get_default_mtls_endpoint(non_googleapi) == non_googleapi + + +@pytest.mark.parametrize("client_class,transport_name", [ + (RegionAutoscalersClient, "rest"), +]) +def test_region_autoscalers_client_from_service_account_info(client_class, transport_name): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object(service_account.Credentials, 'from_service_account_info') as factory: + factory.return_value = creds + info = {"valid": True} + client = client_class.from_service_account_info(info, transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + 'compute.googleapis.com:443' + if transport_name in ['grpc', 'grpc_asyncio'] + else + 'https://compute.googleapis.com' + ) + + +@pytest.mark.parametrize("transport_class,transport_name", [ + (transports.RegionAutoscalersRestTransport, "rest"), +]) +def test_region_autoscalers_client_service_account_always_use_jwt(transport_class, transport_name): + with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=True) + use_jwt.assert_called_once_with(True) + + with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=False) + use_jwt.assert_not_called() + + +@pytest.mark.parametrize("client_class,transport_name", [ + (RegionAutoscalersClient, "rest"), +]) +def test_region_autoscalers_client_from_service_account_file(client_class, transport_name): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object(service_account.Credentials, 'from_service_account_file') as factory: + factory.return_value = creds + client = client_class.from_service_account_file("dummy/file/path.json", transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + client = client_class.from_service_account_json("dummy/file/path.json", transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + 'compute.googleapis.com:443' + if transport_name in ['grpc', 'grpc_asyncio'] + else + 'https://compute.googleapis.com' + ) + + +def test_region_autoscalers_client_get_transport_class(): + transport = RegionAutoscalersClient.get_transport_class() + available_transports = [ + transports.RegionAutoscalersRestTransport, + ] + assert transport in available_transports + + transport = RegionAutoscalersClient.get_transport_class("rest") + assert transport == transports.RegionAutoscalersRestTransport + + +@pytest.mark.parametrize("client_class,transport_class,transport_name", [ + (RegionAutoscalersClient, transports.RegionAutoscalersRestTransport, "rest"), +]) +@mock.patch.object(RegionAutoscalersClient, "DEFAULT_ENDPOINT", modify_default_endpoint(RegionAutoscalersClient)) +def test_region_autoscalers_client_client_options(client_class, transport_class, transport_name): + # Check that if channel is provided we won't create a new one. + with mock.patch.object(RegionAutoscalersClient, 'get_transport_class') as gtc: + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ) + client = client_class(transport=transport) + gtc.assert_not_called() + + # Check that if channel is provided via str we will create a new one. + with mock.patch.object(RegionAutoscalersClient, 'get_transport_class') as gtc: + client = client_class(transport=transport_name) + gtc.assert_called() + + # Check the case api_endpoint is provided. + options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(transport=transport_name, client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_MTLS_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError): + client = client_class(transport=transport_name) + + # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): + with pytest.raises(ValueError): + client = client_class(transport=transport_name) + + # Check the case quota_project_id is provided + options = client_options.ClientOptions(quota_project_id="octopus") + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id="octopus", + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + # Check the case api_endpoint is provided + options = client_options.ClientOptions(api_audience="https://language.googleapis.com") + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience="https://language.googleapis.com" + ) + +@pytest.mark.parametrize("client_class,transport_class,transport_name,use_client_cert_env", [ + (RegionAutoscalersClient, transports.RegionAutoscalersRestTransport, "rest", "true"), + (RegionAutoscalersClient, transports.RegionAutoscalersRestTransport, "rest", "false"), +]) +@mock.patch.object(RegionAutoscalersClient, "DEFAULT_ENDPOINT", modify_default_endpoint(RegionAutoscalersClient)) +@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) +def test_region_autoscalers_client_mtls_env_auto(client_class, transport_class, transport_name, use_client_cert_env): + # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default + # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists. + + # Check the case client_cert_source is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): + options = client_options.ClientOptions(client_cert_source=client_cert_source_callback) + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + + if use_client_cert_env == "false": + expected_client_cert_source = None + expected_host = client.DEFAULT_ENDPOINT + else: + expected_client_cert_source = client_cert_source_callback + expected_host = client.DEFAULT_MTLS_ENDPOINT + + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case ADC client cert is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): + with mock.patch.object(transport_class, '__init__') as patched: + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): + with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=client_cert_source_callback): + if use_client_cert_env == "false": + expected_host = client.DEFAULT_ENDPOINT + expected_client_cert_source = None + else: + expected_host = client.DEFAULT_MTLS_ENDPOINT + expected_client_cert_source = client_cert_source_callback + + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case client_cert_source and ADC client cert are not provided. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): + with mock.patch.object(transport_class, '__init__') as patched: + with mock.patch("google.auth.transport.mtls.has_default_client_cert_source", return_value=False): + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize("client_class", [ + RegionAutoscalersClient +]) +@mock.patch.object(RegionAutoscalersClient, "DEFAULT_ENDPOINT", modify_default_endpoint(RegionAutoscalersClient)) +def test_region_autoscalers_client_get_mtls_endpoint_and_cert_source(client_class): + mock_client_cert_source = mock.Mock() + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "true". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + mock_api_endpoint = "foo" + options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(options) + assert api_endpoint == mock_api_endpoint + assert cert_source == mock_client_cert_source + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "false". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + mock_client_cert_source = mock.Mock() + mock_api_endpoint = "foo" + options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(options) + assert api_endpoint == mock_api_endpoint + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert doesn't exist. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=False): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert exists. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): + with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=mock_client_cert_source): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source == mock_client_cert_source + + +@pytest.mark.parametrize("client_class,transport_class,transport_name", [ + (RegionAutoscalersClient, transports.RegionAutoscalersRestTransport, "rest"), +]) +def test_region_autoscalers_client_client_options_scopes(client_class, transport_class, transport_name): + # Check the case scopes are provided. + options = client_options.ClientOptions( + scopes=["1", "2"], + ) + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=["1", "2"], + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + +@pytest.mark.parametrize("client_class,transport_class,transport_name,grpc_helpers", [ + (RegionAutoscalersClient, transports.RegionAutoscalersRestTransport, "rest", None), +]) +def test_region_autoscalers_client_client_options_credentials_file(client_class, transport_class, transport_name, grpc_helpers): + # Check the case credentials file is provided. + options = client_options.ClientOptions( + credentials_file="credentials.json" + ) + + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize("request_type", [ + compute.DeleteRegionAutoscalerRequest, + dict, +]) +def test_delete_rest(request_type): + client = RegionAutoscalersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2', 'autoscaler': 'sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.delete(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, extended_operation.ExtendedOperation) + assert response.client_operation_id == 'client_operation_id_value' + assert response.creation_timestamp == 'creation_timestamp_value' + assert response.description == 'description_value' + assert response.end_time == 'end_time_value' + assert response.http_error_message == 'http_error_message_value' + assert response.http_error_status_code == 2374 + assert response.id == 205 + assert response.insert_time == 'insert_time_value' + assert response.kind == 'kind_value' + assert response.name == 'name_value' + assert response.operation_group_id == 'operation_group_id_value' + assert response.operation_type == 'operation_type_value' + assert response.progress == 885 + assert response.region == 'region_value' + assert response.self_link == 'self_link_value' + assert response.start_time == 'start_time_value' + assert response.status == compute.Operation.Status.DONE + assert response.status_message == 'status_message_value' + assert response.target_id == 947 + assert response.target_link == 'target_link_value' + assert response.user == 'user_value' + assert response.zone == 'zone_value' + + +def test_delete_rest_required_fields(request_type=compute.DeleteRegionAutoscalerRequest): + transport_class = transports.RegionAutoscalersRestTransport + + request_init = {} + request_init["autoscaler"] = "" + request_init["project"] = "" + request_init["region"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["autoscaler"] = 'autoscaler_value' + jsonified_request["project"] = 'project_value' + jsonified_request["region"] = 'region_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "autoscaler" in jsonified_request + assert jsonified_request["autoscaler"] == 'autoscaler_value' + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "region" in jsonified_request + assert jsonified_request["region"] == 'region_value' + + client = RegionAutoscalersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "delete", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.delete(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_delete_rest_unset_required_fields(): + transport = transports.RegionAutoscalersRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.delete._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("autoscaler", "project", "region", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_rest_interceptors(null_interceptor): + transport = transports.RegionAutoscalersRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.RegionAutoscalersRestInterceptor(), + ) + client = RegionAutoscalersClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.RegionAutoscalersRestInterceptor, "post_delete") as post, \ + mock.patch.object(transports.RegionAutoscalersRestInterceptor, "pre_delete") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.DeleteRegionAutoscalerRequest.pb(compute.DeleteRegionAutoscalerRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.DeleteRegionAutoscalerRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.delete(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_delete_rest_bad_request(transport: str = 'rest', request_type=compute.DeleteRegionAutoscalerRequest): + client = RegionAutoscalersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2', 'autoscaler': 'sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.delete(request) + + +def test_delete_rest_flattened(): + client = RegionAutoscalersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'region': 'sample2', 'autoscaler': 'sample3'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + region='region_value', + autoscaler='autoscaler_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.delete(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/regions/{region}/autoscalers/{autoscaler}" % client.transport._host, args[1]) + + +def test_delete_rest_flattened_error(transport: str = 'rest'): + client = RegionAutoscalersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete( + compute.DeleteRegionAutoscalerRequest(), + project='project_value', + region='region_value', + autoscaler='autoscaler_value', + ) + + +def test_delete_rest_error(): + client = RegionAutoscalersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.DeleteRegionAutoscalerRequest, + dict, +]) +def test_delete_unary_rest(request_type): + client = RegionAutoscalersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2', 'autoscaler': 'sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.delete_unary(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.Operation) + + +def test_delete_unary_rest_required_fields(request_type=compute.DeleteRegionAutoscalerRequest): + transport_class = transports.RegionAutoscalersRestTransport + + request_init = {} + request_init["autoscaler"] = "" + request_init["project"] = "" + request_init["region"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["autoscaler"] = 'autoscaler_value' + jsonified_request["project"] = 'project_value' + jsonified_request["region"] = 'region_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "autoscaler" in jsonified_request + assert jsonified_request["autoscaler"] == 'autoscaler_value' + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "region" in jsonified_request + assert jsonified_request["region"] == 'region_value' + + client = RegionAutoscalersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "delete", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.delete_unary(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_delete_unary_rest_unset_required_fields(): + transport = transports.RegionAutoscalersRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.delete._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("autoscaler", "project", "region", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_unary_rest_interceptors(null_interceptor): + transport = transports.RegionAutoscalersRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.RegionAutoscalersRestInterceptor(), + ) + client = RegionAutoscalersClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.RegionAutoscalersRestInterceptor, "post_delete") as post, \ + mock.patch.object(transports.RegionAutoscalersRestInterceptor, "pre_delete") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.DeleteRegionAutoscalerRequest.pb(compute.DeleteRegionAutoscalerRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.DeleteRegionAutoscalerRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.delete_unary(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_delete_unary_rest_bad_request(transport: str = 'rest', request_type=compute.DeleteRegionAutoscalerRequest): + client = RegionAutoscalersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2', 'autoscaler': 'sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.delete_unary(request) + + +def test_delete_unary_rest_flattened(): + client = RegionAutoscalersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'region': 'sample2', 'autoscaler': 'sample3'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + region='region_value', + autoscaler='autoscaler_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.delete_unary(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/regions/{region}/autoscalers/{autoscaler}" % client.transport._host, args[1]) + + +def test_delete_unary_rest_flattened_error(transport: str = 'rest'): + client = RegionAutoscalersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_unary( + compute.DeleteRegionAutoscalerRequest(), + project='project_value', + region='region_value', + autoscaler='autoscaler_value', + ) + + +def test_delete_unary_rest_error(): + client = RegionAutoscalersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.GetRegionAutoscalerRequest, + dict, +]) +def test_get_rest(request_type): + client = RegionAutoscalersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2', 'autoscaler': 'sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Autoscaler( + creation_timestamp='creation_timestamp_value', + description='description_value', + id=205, + kind='kind_value', + name='name_value', + recommended_size=1693, + region='region_value', + self_link='self_link_value', + status='status_value', + target='target_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Autoscaler.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.get(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.Autoscaler) + assert response.creation_timestamp == 'creation_timestamp_value' + assert response.description == 'description_value' + assert response.id == 205 + assert response.kind == 'kind_value' + assert response.name == 'name_value' + assert response.recommended_size == 1693 + assert response.region == 'region_value' + assert response.self_link == 'self_link_value' + assert response.status == 'status_value' + assert response.target == 'target_value' + assert response.zone == 'zone_value' + + +def test_get_rest_required_fields(request_type=compute.GetRegionAutoscalerRequest): + transport_class = transports.RegionAutoscalersRestTransport + + request_init = {} + request_init["autoscaler"] = "" + request_init["project"] = "" + request_init["region"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["autoscaler"] = 'autoscaler_value' + jsonified_request["project"] = 'project_value' + jsonified_request["region"] = 'region_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "autoscaler" in jsonified_request + assert jsonified_request["autoscaler"] == 'autoscaler_value' + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "region" in jsonified_request + assert jsonified_request["region"] == 'region_value' + + client = RegionAutoscalersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Autoscaler() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "get", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Autoscaler.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.get(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_get_rest_unset_required_fields(): + transport = transports.RegionAutoscalersRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.get._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("autoscaler", "project", "region", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_rest_interceptors(null_interceptor): + transport = transports.RegionAutoscalersRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.RegionAutoscalersRestInterceptor(), + ) + client = RegionAutoscalersClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.RegionAutoscalersRestInterceptor, "post_get") as post, \ + mock.patch.object(transports.RegionAutoscalersRestInterceptor, "pre_get") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.GetRegionAutoscalerRequest.pb(compute.GetRegionAutoscalerRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Autoscaler.to_json(compute.Autoscaler()) + + request = compute.GetRegionAutoscalerRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Autoscaler() + + client.get(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_get_rest_bad_request(transport: str = 'rest', request_type=compute.GetRegionAutoscalerRequest): + client = RegionAutoscalersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2', 'autoscaler': 'sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get(request) + + +def test_get_rest_flattened(): + client = RegionAutoscalersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Autoscaler() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'region': 'sample2', 'autoscaler': 'sample3'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + region='region_value', + autoscaler='autoscaler_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Autoscaler.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.get(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/regions/{region}/autoscalers/{autoscaler}" % client.transport._host, args[1]) + + +def test_get_rest_flattened_error(transport: str = 'rest'): + client = RegionAutoscalersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get( + compute.GetRegionAutoscalerRequest(), + project='project_value', + region='region_value', + autoscaler='autoscaler_value', + ) + + +def test_get_rest_error(): + client = RegionAutoscalersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.InsertRegionAutoscalerRequest, + dict, +]) +def test_insert_rest(request_type): + client = RegionAutoscalersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2'} + request_init["autoscaler_resource"] = {'autoscaling_policy': {'cool_down_period_sec': 2112, 'cpu_utilization': {'predictive_method': 'predictive_method_value', 'utilization_target': 0.19540000000000002}, 'custom_metric_utilizations': [{'filter': 'filter_value', 'metric': 'metric_value', 'single_instance_assignment': 0.2766, 'utilization_target': 0.19540000000000002, 'utilization_target_type': 'utilization_target_type_value'}], 'load_balancing_utilization': {'utilization_target': 0.19540000000000002}, 'max_num_replicas': 1703, 'min_num_replicas': 1701, 'mode': 'mode_value', 'scale_in_control': {'max_scaled_in_replicas': {'calculated': 1042, 'fixed': 528, 'percent': 753}, 'time_window_sec': 1600}, 'scaling_schedules': {}}, 'creation_timestamp': 'creation_timestamp_value', 'description': 'description_value', 'id': 205, 'kind': 'kind_value', 'name': 'name_value', 'recommended_size': 1693, 'region': 'region_value', 'scaling_schedule_status': {}, 'self_link': 'self_link_value', 'status': 'status_value', 'status_details': [{'message': 'message_value', 'type_': 'type__value'}], 'target': 'target_value', 'zone': 'zone_value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.insert(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, extended_operation.ExtendedOperation) + assert response.client_operation_id == 'client_operation_id_value' + assert response.creation_timestamp == 'creation_timestamp_value' + assert response.description == 'description_value' + assert response.end_time == 'end_time_value' + assert response.http_error_message == 'http_error_message_value' + assert response.http_error_status_code == 2374 + assert response.id == 205 + assert response.insert_time == 'insert_time_value' + assert response.kind == 'kind_value' + assert response.name == 'name_value' + assert response.operation_group_id == 'operation_group_id_value' + assert response.operation_type == 'operation_type_value' + assert response.progress == 885 + assert response.region == 'region_value' + assert response.self_link == 'self_link_value' + assert response.start_time == 'start_time_value' + assert response.status == compute.Operation.Status.DONE + assert response.status_message == 'status_message_value' + assert response.target_id == 947 + assert response.target_link == 'target_link_value' + assert response.user == 'user_value' + assert response.zone == 'zone_value' + + +def test_insert_rest_required_fields(request_type=compute.InsertRegionAutoscalerRequest): + transport_class = transports.RegionAutoscalersRestTransport + + request_init = {} + request_init["project"] = "" + request_init["region"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).insert._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + jsonified_request["region"] = 'region_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).insert._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "region" in jsonified_request + assert jsonified_request["region"] == 'region_value' + + client = RegionAutoscalersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.insert(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_insert_rest_unset_required_fields(): + transport = transports.RegionAutoscalersRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.insert._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("autoscalerResource", "project", "region", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_insert_rest_interceptors(null_interceptor): + transport = transports.RegionAutoscalersRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.RegionAutoscalersRestInterceptor(), + ) + client = RegionAutoscalersClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.RegionAutoscalersRestInterceptor, "post_insert") as post, \ + mock.patch.object(transports.RegionAutoscalersRestInterceptor, "pre_insert") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.InsertRegionAutoscalerRequest.pb(compute.InsertRegionAutoscalerRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.InsertRegionAutoscalerRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.insert(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_insert_rest_bad_request(transport: str = 'rest', request_type=compute.InsertRegionAutoscalerRequest): + client = RegionAutoscalersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2'} + request_init["autoscaler_resource"] = {'autoscaling_policy': {'cool_down_period_sec': 2112, 'cpu_utilization': {'predictive_method': 'predictive_method_value', 'utilization_target': 0.19540000000000002}, 'custom_metric_utilizations': [{'filter': 'filter_value', 'metric': 'metric_value', 'single_instance_assignment': 0.2766, 'utilization_target': 0.19540000000000002, 'utilization_target_type': 'utilization_target_type_value'}], 'load_balancing_utilization': {'utilization_target': 0.19540000000000002}, 'max_num_replicas': 1703, 'min_num_replicas': 1701, 'mode': 'mode_value', 'scale_in_control': {'max_scaled_in_replicas': {'calculated': 1042, 'fixed': 528, 'percent': 753}, 'time_window_sec': 1600}, 'scaling_schedules': {}}, 'creation_timestamp': 'creation_timestamp_value', 'description': 'description_value', 'id': 205, 'kind': 'kind_value', 'name': 'name_value', 'recommended_size': 1693, 'region': 'region_value', 'scaling_schedule_status': {}, 'self_link': 'self_link_value', 'status': 'status_value', 'status_details': [{'message': 'message_value', 'type_': 'type__value'}], 'target': 'target_value', 'zone': 'zone_value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.insert(request) + + +def test_insert_rest_flattened(): + client = RegionAutoscalersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'region': 'sample2'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + region='region_value', + autoscaler_resource=compute.Autoscaler(autoscaling_policy=compute.AutoscalingPolicy(cool_down_period_sec=2112)), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.insert(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/regions/{region}/autoscalers" % client.transport._host, args[1]) + + +def test_insert_rest_flattened_error(transport: str = 'rest'): + client = RegionAutoscalersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.insert( + compute.InsertRegionAutoscalerRequest(), + project='project_value', + region='region_value', + autoscaler_resource=compute.Autoscaler(autoscaling_policy=compute.AutoscalingPolicy(cool_down_period_sec=2112)), + ) + + +def test_insert_rest_error(): + client = RegionAutoscalersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.InsertRegionAutoscalerRequest, + dict, +]) +def test_insert_unary_rest(request_type): + client = RegionAutoscalersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2'} + request_init["autoscaler_resource"] = {'autoscaling_policy': {'cool_down_period_sec': 2112, 'cpu_utilization': {'predictive_method': 'predictive_method_value', 'utilization_target': 0.19540000000000002}, 'custom_metric_utilizations': [{'filter': 'filter_value', 'metric': 'metric_value', 'single_instance_assignment': 0.2766, 'utilization_target': 0.19540000000000002, 'utilization_target_type': 'utilization_target_type_value'}], 'load_balancing_utilization': {'utilization_target': 0.19540000000000002}, 'max_num_replicas': 1703, 'min_num_replicas': 1701, 'mode': 'mode_value', 'scale_in_control': {'max_scaled_in_replicas': {'calculated': 1042, 'fixed': 528, 'percent': 753}, 'time_window_sec': 1600}, 'scaling_schedules': {}}, 'creation_timestamp': 'creation_timestamp_value', 'description': 'description_value', 'id': 205, 'kind': 'kind_value', 'name': 'name_value', 'recommended_size': 1693, 'region': 'region_value', 'scaling_schedule_status': {}, 'self_link': 'self_link_value', 'status': 'status_value', 'status_details': [{'message': 'message_value', 'type_': 'type__value'}], 'target': 'target_value', 'zone': 'zone_value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.insert_unary(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.Operation) + + +def test_insert_unary_rest_required_fields(request_type=compute.InsertRegionAutoscalerRequest): + transport_class = transports.RegionAutoscalersRestTransport + + request_init = {} + request_init["project"] = "" + request_init["region"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).insert._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + jsonified_request["region"] = 'region_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).insert._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "region" in jsonified_request + assert jsonified_request["region"] == 'region_value' + + client = RegionAutoscalersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.insert_unary(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_insert_unary_rest_unset_required_fields(): + transport = transports.RegionAutoscalersRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.insert._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("autoscalerResource", "project", "region", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_insert_unary_rest_interceptors(null_interceptor): + transport = transports.RegionAutoscalersRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.RegionAutoscalersRestInterceptor(), + ) + client = RegionAutoscalersClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.RegionAutoscalersRestInterceptor, "post_insert") as post, \ + mock.patch.object(transports.RegionAutoscalersRestInterceptor, "pre_insert") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.InsertRegionAutoscalerRequest.pb(compute.InsertRegionAutoscalerRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.InsertRegionAutoscalerRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.insert_unary(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_insert_unary_rest_bad_request(transport: str = 'rest', request_type=compute.InsertRegionAutoscalerRequest): + client = RegionAutoscalersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2'} + request_init["autoscaler_resource"] = {'autoscaling_policy': {'cool_down_period_sec': 2112, 'cpu_utilization': {'predictive_method': 'predictive_method_value', 'utilization_target': 0.19540000000000002}, 'custom_metric_utilizations': [{'filter': 'filter_value', 'metric': 'metric_value', 'single_instance_assignment': 0.2766, 'utilization_target': 0.19540000000000002, 'utilization_target_type': 'utilization_target_type_value'}], 'load_balancing_utilization': {'utilization_target': 0.19540000000000002}, 'max_num_replicas': 1703, 'min_num_replicas': 1701, 'mode': 'mode_value', 'scale_in_control': {'max_scaled_in_replicas': {'calculated': 1042, 'fixed': 528, 'percent': 753}, 'time_window_sec': 1600}, 'scaling_schedules': {}}, 'creation_timestamp': 'creation_timestamp_value', 'description': 'description_value', 'id': 205, 'kind': 'kind_value', 'name': 'name_value', 'recommended_size': 1693, 'region': 'region_value', 'scaling_schedule_status': {}, 'self_link': 'self_link_value', 'status': 'status_value', 'status_details': [{'message': 'message_value', 'type_': 'type__value'}], 'target': 'target_value', 'zone': 'zone_value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.insert_unary(request) + + +def test_insert_unary_rest_flattened(): + client = RegionAutoscalersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'region': 'sample2'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + region='region_value', + autoscaler_resource=compute.Autoscaler(autoscaling_policy=compute.AutoscalingPolicy(cool_down_period_sec=2112)), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.insert_unary(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/regions/{region}/autoscalers" % client.transport._host, args[1]) + + +def test_insert_unary_rest_flattened_error(transport: str = 'rest'): + client = RegionAutoscalersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.insert_unary( + compute.InsertRegionAutoscalerRequest(), + project='project_value', + region='region_value', + autoscaler_resource=compute.Autoscaler(autoscaling_policy=compute.AutoscalingPolicy(cool_down_period_sec=2112)), + ) + + +def test_insert_unary_rest_error(): + client = RegionAutoscalersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.ListRegionAutoscalersRequest, + dict, +]) +def test_list_rest(request_type): + client = RegionAutoscalersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.RegionAutoscalerList( + id='id_value', + kind='kind_value', + next_page_token='next_page_token_value', + self_link='self_link_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.RegionAutoscalerList.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.list(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListPager) + assert response.id == 'id_value' + assert response.kind == 'kind_value' + assert response.next_page_token == 'next_page_token_value' + assert response.self_link == 'self_link_value' + + +def test_list_rest_required_fields(request_type=compute.ListRegionAutoscalersRequest): + transport_class = transports.RegionAutoscalersRestTransport + + request_init = {} + request_init["project"] = "" + request_init["region"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + jsonified_request["region"] = 'region_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("filter", "max_results", "order_by", "page_token", "return_partial_success", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "region" in jsonified_request + assert jsonified_request["region"] == 'region_value' + + client = RegionAutoscalersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.RegionAutoscalerList() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "get", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.RegionAutoscalerList.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.list(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_list_rest_unset_required_fields(): + transport = transports.RegionAutoscalersRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.list._get_unset_required_fields({}) + assert set(unset_fields) == (set(("filter", "maxResults", "orderBy", "pageToken", "returnPartialSuccess", )) & set(("project", "region", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_rest_interceptors(null_interceptor): + transport = transports.RegionAutoscalersRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.RegionAutoscalersRestInterceptor(), + ) + client = RegionAutoscalersClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.RegionAutoscalersRestInterceptor, "post_list") as post, \ + mock.patch.object(transports.RegionAutoscalersRestInterceptor, "pre_list") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.ListRegionAutoscalersRequest.pb(compute.ListRegionAutoscalersRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.RegionAutoscalerList.to_json(compute.RegionAutoscalerList()) + + request = compute.ListRegionAutoscalersRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.RegionAutoscalerList() + + client.list(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_list_rest_bad_request(transport: str = 'rest', request_type=compute.ListRegionAutoscalersRequest): + client = RegionAutoscalersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list(request) + + +def test_list_rest_flattened(): + client = RegionAutoscalersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.RegionAutoscalerList() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'region': 'sample2'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + region='region_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.RegionAutoscalerList.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.list(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/regions/{region}/autoscalers" % client.transport._host, args[1]) + + +def test_list_rest_flattened_error(transport: str = 'rest'): + client = RegionAutoscalersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list( + compute.ListRegionAutoscalersRequest(), + project='project_value', + region='region_value', + ) + + +def test_list_rest_pager(transport: str = 'rest'): + client = RegionAutoscalersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + #with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + compute.RegionAutoscalerList( + items=[ + compute.Autoscaler(), + compute.Autoscaler(), + compute.Autoscaler(), + ], + next_page_token='abc', + ), + compute.RegionAutoscalerList( + items=[], + next_page_token='def', + ), + compute.RegionAutoscalerList( + items=[ + compute.Autoscaler(), + ], + next_page_token='ghi', + ), + compute.RegionAutoscalerList( + items=[ + compute.Autoscaler(), + compute.Autoscaler(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple(compute.RegionAutoscalerList.to_json(x) for x in response) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode('UTF-8') + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {'project': 'sample1', 'region': 'sample2'} + + pager = client.list(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, compute.Autoscaler) + for i in results) + + pages = list(client.list(request=sample_request).pages) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize("request_type", [ + compute.PatchRegionAutoscalerRequest, + dict, +]) +def test_patch_rest(request_type): + client = RegionAutoscalersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2'} + request_init["autoscaler_resource"] = {'autoscaling_policy': {'cool_down_period_sec': 2112, 'cpu_utilization': {'predictive_method': 'predictive_method_value', 'utilization_target': 0.19540000000000002}, 'custom_metric_utilizations': [{'filter': 'filter_value', 'metric': 'metric_value', 'single_instance_assignment': 0.2766, 'utilization_target': 0.19540000000000002, 'utilization_target_type': 'utilization_target_type_value'}], 'load_balancing_utilization': {'utilization_target': 0.19540000000000002}, 'max_num_replicas': 1703, 'min_num_replicas': 1701, 'mode': 'mode_value', 'scale_in_control': {'max_scaled_in_replicas': {'calculated': 1042, 'fixed': 528, 'percent': 753}, 'time_window_sec': 1600}, 'scaling_schedules': {}}, 'creation_timestamp': 'creation_timestamp_value', 'description': 'description_value', 'id': 205, 'kind': 'kind_value', 'name': 'name_value', 'recommended_size': 1693, 'region': 'region_value', 'scaling_schedule_status': {}, 'self_link': 'self_link_value', 'status': 'status_value', 'status_details': [{'message': 'message_value', 'type_': 'type__value'}], 'target': 'target_value', 'zone': 'zone_value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.patch(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, extended_operation.ExtendedOperation) + assert response.client_operation_id == 'client_operation_id_value' + assert response.creation_timestamp == 'creation_timestamp_value' + assert response.description == 'description_value' + assert response.end_time == 'end_time_value' + assert response.http_error_message == 'http_error_message_value' + assert response.http_error_status_code == 2374 + assert response.id == 205 + assert response.insert_time == 'insert_time_value' + assert response.kind == 'kind_value' + assert response.name == 'name_value' + assert response.operation_group_id == 'operation_group_id_value' + assert response.operation_type == 'operation_type_value' + assert response.progress == 885 + assert response.region == 'region_value' + assert response.self_link == 'self_link_value' + assert response.start_time == 'start_time_value' + assert response.status == compute.Operation.Status.DONE + assert response.status_message == 'status_message_value' + assert response.target_id == 947 + assert response.target_link == 'target_link_value' + assert response.user == 'user_value' + assert response.zone == 'zone_value' + + +def test_patch_rest_required_fields(request_type=compute.PatchRegionAutoscalerRequest): + transport_class = transports.RegionAutoscalersRestTransport + + request_init = {} + request_init["project"] = "" + request_init["region"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).patch._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + jsonified_request["region"] = 'region_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).patch._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("autoscaler", "request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "region" in jsonified_request + assert jsonified_request["region"] == 'region_value' + + client = RegionAutoscalersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "patch", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.patch(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_patch_rest_unset_required_fields(): + transport = transports.RegionAutoscalersRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.patch._get_unset_required_fields({}) + assert set(unset_fields) == (set(("autoscaler", "requestId", )) & set(("autoscalerResource", "project", "region", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_patch_rest_interceptors(null_interceptor): + transport = transports.RegionAutoscalersRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.RegionAutoscalersRestInterceptor(), + ) + client = RegionAutoscalersClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.RegionAutoscalersRestInterceptor, "post_patch") as post, \ + mock.patch.object(transports.RegionAutoscalersRestInterceptor, "pre_patch") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.PatchRegionAutoscalerRequest.pb(compute.PatchRegionAutoscalerRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.PatchRegionAutoscalerRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.patch(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_patch_rest_bad_request(transport: str = 'rest', request_type=compute.PatchRegionAutoscalerRequest): + client = RegionAutoscalersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2'} + request_init["autoscaler_resource"] = {'autoscaling_policy': {'cool_down_period_sec': 2112, 'cpu_utilization': {'predictive_method': 'predictive_method_value', 'utilization_target': 0.19540000000000002}, 'custom_metric_utilizations': [{'filter': 'filter_value', 'metric': 'metric_value', 'single_instance_assignment': 0.2766, 'utilization_target': 0.19540000000000002, 'utilization_target_type': 'utilization_target_type_value'}], 'load_balancing_utilization': {'utilization_target': 0.19540000000000002}, 'max_num_replicas': 1703, 'min_num_replicas': 1701, 'mode': 'mode_value', 'scale_in_control': {'max_scaled_in_replicas': {'calculated': 1042, 'fixed': 528, 'percent': 753}, 'time_window_sec': 1600}, 'scaling_schedules': {}}, 'creation_timestamp': 'creation_timestamp_value', 'description': 'description_value', 'id': 205, 'kind': 'kind_value', 'name': 'name_value', 'recommended_size': 1693, 'region': 'region_value', 'scaling_schedule_status': {}, 'self_link': 'self_link_value', 'status': 'status_value', 'status_details': [{'message': 'message_value', 'type_': 'type__value'}], 'target': 'target_value', 'zone': 'zone_value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.patch(request) + + +def test_patch_rest_flattened(): + client = RegionAutoscalersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'region': 'sample2'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + region='region_value', + autoscaler_resource=compute.Autoscaler(autoscaling_policy=compute.AutoscalingPolicy(cool_down_period_sec=2112)), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.patch(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/regions/{region}/autoscalers" % client.transport._host, args[1]) + + +def test_patch_rest_flattened_error(transport: str = 'rest'): + client = RegionAutoscalersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.patch( + compute.PatchRegionAutoscalerRequest(), + project='project_value', + region='region_value', + autoscaler_resource=compute.Autoscaler(autoscaling_policy=compute.AutoscalingPolicy(cool_down_period_sec=2112)), + ) + + +def test_patch_rest_error(): + client = RegionAutoscalersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.PatchRegionAutoscalerRequest, + dict, +]) +def test_patch_unary_rest(request_type): + client = RegionAutoscalersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2'} + request_init["autoscaler_resource"] = {'autoscaling_policy': {'cool_down_period_sec': 2112, 'cpu_utilization': {'predictive_method': 'predictive_method_value', 'utilization_target': 0.19540000000000002}, 'custom_metric_utilizations': [{'filter': 'filter_value', 'metric': 'metric_value', 'single_instance_assignment': 0.2766, 'utilization_target': 0.19540000000000002, 'utilization_target_type': 'utilization_target_type_value'}], 'load_balancing_utilization': {'utilization_target': 0.19540000000000002}, 'max_num_replicas': 1703, 'min_num_replicas': 1701, 'mode': 'mode_value', 'scale_in_control': {'max_scaled_in_replicas': {'calculated': 1042, 'fixed': 528, 'percent': 753}, 'time_window_sec': 1600}, 'scaling_schedules': {}}, 'creation_timestamp': 'creation_timestamp_value', 'description': 'description_value', 'id': 205, 'kind': 'kind_value', 'name': 'name_value', 'recommended_size': 1693, 'region': 'region_value', 'scaling_schedule_status': {}, 'self_link': 'self_link_value', 'status': 'status_value', 'status_details': [{'message': 'message_value', 'type_': 'type__value'}], 'target': 'target_value', 'zone': 'zone_value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.patch_unary(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.Operation) + + +def test_patch_unary_rest_required_fields(request_type=compute.PatchRegionAutoscalerRequest): + transport_class = transports.RegionAutoscalersRestTransport + + request_init = {} + request_init["project"] = "" + request_init["region"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).patch._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + jsonified_request["region"] = 'region_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).patch._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("autoscaler", "request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "region" in jsonified_request + assert jsonified_request["region"] == 'region_value' + + client = RegionAutoscalersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "patch", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.patch_unary(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_patch_unary_rest_unset_required_fields(): + transport = transports.RegionAutoscalersRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.patch._get_unset_required_fields({}) + assert set(unset_fields) == (set(("autoscaler", "requestId", )) & set(("autoscalerResource", "project", "region", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_patch_unary_rest_interceptors(null_interceptor): + transport = transports.RegionAutoscalersRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.RegionAutoscalersRestInterceptor(), + ) + client = RegionAutoscalersClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.RegionAutoscalersRestInterceptor, "post_patch") as post, \ + mock.patch.object(transports.RegionAutoscalersRestInterceptor, "pre_patch") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.PatchRegionAutoscalerRequest.pb(compute.PatchRegionAutoscalerRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.PatchRegionAutoscalerRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.patch_unary(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_patch_unary_rest_bad_request(transport: str = 'rest', request_type=compute.PatchRegionAutoscalerRequest): + client = RegionAutoscalersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2'} + request_init["autoscaler_resource"] = {'autoscaling_policy': {'cool_down_period_sec': 2112, 'cpu_utilization': {'predictive_method': 'predictive_method_value', 'utilization_target': 0.19540000000000002}, 'custom_metric_utilizations': [{'filter': 'filter_value', 'metric': 'metric_value', 'single_instance_assignment': 0.2766, 'utilization_target': 0.19540000000000002, 'utilization_target_type': 'utilization_target_type_value'}], 'load_balancing_utilization': {'utilization_target': 0.19540000000000002}, 'max_num_replicas': 1703, 'min_num_replicas': 1701, 'mode': 'mode_value', 'scale_in_control': {'max_scaled_in_replicas': {'calculated': 1042, 'fixed': 528, 'percent': 753}, 'time_window_sec': 1600}, 'scaling_schedules': {}}, 'creation_timestamp': 'creation_timestamp_value', 'description': 'description_value', 'id': 205, 'kind': 'kind_value', 'name': 'name_value', 'recommended_size': 1693, 'region': 'region_value', 'scaling_schedule_status': {}, 'self_link': 'self_link_value', 'status': 'status_value', 'status_details': [{'message': 'message_value', 'type_': 'type__value'}], 'target': 'target_value', 'zone': 'zone_value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.patch_unary(request) + + +def test_patch_unary_rest_flattened(): + client = RegionAutoscalersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'region': 'sample2'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + region='region_value', + autoscaler_resource=compute.Autoscaler(autoscaling_policy=compute.AutoscalingPolicy(cool_down_period_sec=2112)), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.patch_unary(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/regions/{region}/autoscalers" % client.transport._host, args[1]) + + +def test_patch_unary_rest_flattened_error(transport: str = 'rest'): + client = RegionAutoscalersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.patch_unary( + compute.PatchRegionAutoscalerRequest(), + project='project_value', + region='region_value', + autoscaler_resource=compute.Autoscaler(autoscaling_policy=compute.AutoscalingPolicy(cool_down_period_sec=2112)), + ) + + +def test_patch_unary_rest_error(): + client = RegionAutoscalersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.UpdateRegionAutoscalerRequest, + dict, +]) +def test_update_rest(request_type): + client = RegionAutoscalersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2'} + request_init["autoscaler_resource"] = {'autoscaling_policy': {'cool_down_period_sec': 2112, 'cpu_utilization': {'predictive_method': 'predictive_method_value', 'utilization_target': 0.19540000000000002}, 'custom_metric_utilizations': [{'filter': 'filter_value', 'metric': 'metric_value', 'single_instance_assignment': 0.2766, 'utilization_target': 0.19540000000000002, 'utilization_target_type': 'utilization_target_type_value'}], 'load_balancing_utilization': {'utilization_target': 0.19540000000000002}, 'max_num_replicas': 1703, 'min_num_replicas': 1701, 'mode': 'mode_value', 'scale_in_control': {'max_scaled_in_replicas': {'calculated': 1042, 'fixed': 528, 'percent': 753}, 'time_window_sec': 1600}, 'scaling_schedules': {}}, 'creation_timestamp': 'creation_timestamp_value', 'description': 'description_value', 'id': 205, 'kind': 'kind_value', 'name': 'name_value', 'recommended_size': 1693, 'region': 'region_value', 'scaling_schedule_status': {}, 'self_link': 'self_link_value', 'status': 'status_value', 'status_details': [{'message': 'message_value', 'type_': 'type__value'}], 'target': 'target_value', 'zone': 'zone_value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.update(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, extended_operation.ExtendedOperation) + assert response.client_operation_id == 'client_operation_id_value' + assert response.creation_timestamp == 'creation_timestamp_value' + assert response.description == 'description_value' + assert response.end_time == 'end_time_value' + assert response.http_error_message == 'http_error_message_value' + assert response.http_error_status_code == 2374 + assert response.id == 205 + assert response.insert_time == 'insert_time_value' + assert response.kind == 'kind_value' + assert response.name == 'name_value' + assert response.operation_group_id == 'operation_group_id_value' + assert response.operation_type == 'operation_type_value' + assert response.progress == 885 + assert response.region == 'region_value' + assert response.self_link == 'self_link_value' + assert response.start_time == 'start_time_value' + assert response.status == compute.Operation.Status.DONE + assert response.status_message == 'status_message_value' + assert response.target_id == 947 + assert response.target_link == 'target_link_value' + assert response.user == 'user_value' + assert response.zone == 'zone_value' + + +def test_update_rest_required_fields(request_type=compute.UpdateRegionAutoscalerRequest): + transport_class = transports.RegionAutoscalersRestTransport + + request_init = {} + request_init["project"] = "" + request_init["region"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).update._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + jsonified_request["region"] = 'region_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).update._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("autoscaler", "request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "region" in jsonified_request + assert jsonified_request["region"] == 'region_value' + + client = RegionAutoscalersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "put", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.update(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_update_rest_unset_required_fields(): + transport = transports.RegionAutoscalersRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.update._get_unset_required_fields({}) + assert set(unset_fields) == (set(("autoscaler", "requestId", )) & set(("autoscalerResource", "project", "region", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_update_rest_interceptors(null_interceptor): + transport = transports.RegionAutoscalersRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.RegionAutoscalersRestInterceptor(), + ) + client = RegionAutoscalersClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.RegionAutoscalersRestInterceptor, "post_update") as post, \ + mock.patch.object(transports.RegionAutoscalersRestInterceptor, "pre_update") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.UpdateRegionAutoscalerRequest.pb(compute.UpdateRegionAutoscalerRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.UpdateRegionAutoscalerRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.update(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_update_rest_bad_request(transport: str = 'rest', request_type=compute.UpdateRegionAutoscalerRequest): + client = RegionAutoscalersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2'} + request_init["autoscaler_resource"] = {'autoscaling_policy': {'cool_down_period_sec': 2112, 'cpu_utilization': {'predictive_method': 'predictive_method_value', 'utilization_target': 0.19540000000000002}, 'custom_metric_utilizations': [{'filter': 'filter_value', 'metric': 'metric_value', 'single_instance_assignment': 0.2766, 'utilization_target': 0.19540000000000002, 'utilization_target_type': 'utilization_target_type_value'}], 'load_balancing_utilization': {'utilization_target': 0.19540000000000002}, 'max_num_replicas': 1703, 'min_num_replicas': 1701, 'mode': 'mode_value', 'scale_in_control': {'max_scaled_in_replicas': {'calculated': 1042, 'fixed': 528, 'percent': 753}, 'time_window_sec': 1600}, 'scaling_schedules': {}}, 'creation_timestamp': 'creation_timestamp_value', 'description': 'description_value', 'id': 205, 'kind': 'kind_value', 'name': 'name_value', 'recommended_size': 1693, 'region': 'region_value', 'scaling_schedule_status': {}, 'self_link': 'self_link_value', 'status': 'status_value', 'status_details': [{'message': 'message_value', 'type_': 'type__value'}], 'target': 'target_value', 'zone': 'zone_value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.update(request) + + +def test_update_rest_flattened(): + client = RegionAutoscalersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'region': 'sample2'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + region='region_value', + autoscaler_resource=compute.Autoscaler(autoscaling_policy=compute.AutoscalingPolicy(cool_down_period_sec=2112)), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.update(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/regions/{region}/autoscalers" % client.transport._host, args[1]) + + +def test_update_rest_flattened_error(transport: str = 'rest'): + client = RegionAutoscalersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update( + compute.UpdateRegionAutoscalerRequest(), + project='project_value', + region='region_value', + autoscaler_resource=compute.Autoscaler(autoscaling_policy=compute.AutoscalingPolicy(cool_down_period_sec=2112)), + ) + + +def test_update_rest_error(): + client = RegionAutoscalersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.UpdateRegionAutoscalerRequest, + dict, +]) +def test_update_unary_rest(request_type): + client = RegionAutoscalersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2'} + request_init["autoscaler_resource"] = {'autoscaling_policy': {'cool_down_period_sec': 2112, 'cpu_utilization': {'predictive_method': 'predictive_method_value', 'utilization_target': 0.19540000000000002}, 'custom_metric_utilizations': [{'filter': 'filter_value', 'metric': 'metric_value', 'single_instance_assignment': 0.2766, 'utilization_target': 0.19540000000000002, 'utilization_target_type': 'utilization_target_type_value'}], 'load_balancing_utilization': {'utilization_target': 0.19540000000000002}, 'max_num_replicas': 1703, 'min_num_replicas': 1701, 'mode': 'mode_value', 'scale_in_control': {'max_scaled_in_replicas': {'calculated': 1042, 'fixed': 528, 'percent': 753}, 'time_window_sec': 1600}, 'scaling_schedules': {}}, 'creation_timestamp': 'creation_timestamp_value', 'description': 'description_value', 'id': 205, 'kind': 'kind_value', 'name': 'name_value', 'recommended_size': 1693, 'region': 'region_value', 'scaling_schedule_status': {}, 'self_link': 'self_link_value', 'status': 'status_value', 'status_details': [{'message': 'message_value', 'type_': 'type__value'}], 'target': 'target_value', 'zone': 'zone_value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.update_unary(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.Operation) + + +def test_update_unary_rest_required_fields(request_type=compute.UpdateRegionAutoscalerRequest): + transport_class = transports.RegionAutoscalersRestTransport + + request_init = {} + request_init["project"] = "" + request_init["region"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).update._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + jsonified_request["region"] = 'region_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).update._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("autoscaler", "request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "region" in jsonified_request + assert jsonified_request["region"] == 'region_value' + + client = RegionAutoscalersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "put", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.update_unary(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_update_unary_rest_unset_required_fields(): + transport = transports.RegionAutoscalersRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.update._get_unset_required_fields({}) + assert set(unset_fields) == (set(("autoscaler", "requestId", )) & set(("autoscalerResource", "project", "region", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_update_unary_rest_interceptors(null_interceptor): + transport = transports.RegionAutoscalersRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.RegionAutoscalersRestInterceptor(), + ) + client = RegionAutoscalersClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.RegionAutoscalersRestInterceptor, "post_update") as post, \ + mock.patch.object(transports.RegionAutoscalersRestInterceptor, "pre_update") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.UpdateRegionAutoscalerRequest.pb(compute.UpdateRegionAutoscalerRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.UpdateRegionAutoscalerRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.update_unary(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_update_unary_rest_bad_request(transport: str = 'rest', request_type=compute.UpdateRegionAutoscalerRequest): + client = RegionAutoscalersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2'} + request_init["autoscaler_resource"] = {'autoscaling_policy': {'cool_down_period_sec': 2112, 'cpu_utilization': {'predictive_method': 'predictive_method_value', 'utilization_target': 0.19540000000000002}, 'custom_metric_utilizations': [{'filter': 'filter_value', 'metric': 'metric_value', 'single_instance_assignment': 0.2766, 'utilization_target': 0.19540000000000002, 'utilization_target_type': 'utilization_target_type_value'}], 'load_balancing_utilization': {'utilization_target': 0.19540000000000002}, 'max_num_replicas': 1703, 'min_num_replicas': 1701, 'mode': 'mode_value', 'scale_in_control': {'max_scaled_in_replicas': {'calculated': 1042, 'fixed': 528, 'percent': 753}, 'time_window_sec': 1600}, 'scaling_schedules': {}}, 'creation_timestamp': 'creation_timestamp_value', 'description': 'description_value', 'id': 205, 'kind': 'kind_value', 'name': 'name_value', 'recommended_size': 1693, 'region': 'region_value', 'scaling_schedule_status': {}, 'self_link': 'self_link_value', 'status': 'status_value', 'status_details': [{'message': 'message_value', 'type_': 'type__value'}], 'target': 'target_value', 'zone': 'zone_value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.update_unary(request) + + +def test_update_unary_rest_flattened(): + client = RegionAutoscalersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'region': 'sample2'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + region='region_value', + autoscaler_resource=compute.Autoscaler(autoscaling_policy=compute.AutoscalingPolicy(cool_down_period_sec=2112)), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.update_unary(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/regions/{region}/autoscalers" % client.transport._host, args[1]) + + +def test_update_unary_rest_flattened_error(transport: str = 'rest'): + client = RegionAutoscalersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_unary( + compute.UpdateRegionAutoscalerRequest(), + project='project_value', + region='region_value', + autoscaler_resource=compute.Autoscaler(autoscaling_policy=compute.AutoscalingPolicy(cool_down_period_sec=2112)), + ) + + +def test_update_unary_rest_error(): + client = RegionAutoscalersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +def test_credentials_transport_error(): + # It is an error to provide credentials and a transport instance. + transport = transports.RegionAutoscalersRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = RegionAutoscalersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # It is an error to provide a credentials file and a transport instance. + transport = transports.RegionAutoscalersRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = RegionAutoscalersClient( + client_options={"credentials_file": "credentials.json"}, + transport=transport, + ) + + # It is an error to provide an api_key and a transport instance. + transport = transports.RegionAutoscalersRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = RegionAutoscalersClient( + client_options=options, + transport=transport, + ) + + # It is an error to provide an api_key and a credential. + options = mock.Mock() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = RegionAutoscalersClient( + client_options=options, + credentials=ga_credentials.AnonymousCredentials() + ) + + # It is an error to provide scopes and a transport instance. + transport = transports.RegionAutoscalersRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = RegionAutoscalersClient( + client_options={"scopes": ["1", "2"]}, + transport=transport, + ) + + +def test_transport_instance(): + # A client may be instantiated with a custom transport instance. + transport = transports.RegionAutoscalersRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + client = RegionAutoscalersClient(transport=transport) + assert client.transport is transport + + +@pytest.mark.parametrize("transport_class", [ + transports.RegionAutoscalersRestTransport, +]) +def test_transport_adc(transport_class): + # Test default credentials are used if not provided. + with mock.patch.object(google.auth, 'default') as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class() + adc.assert_called_once() + +@pytest.mark.parametrize("transport_name", [ + "rest", +]) +def test_transport_kind(transport_name): + transport = RegionAutoscalersClient.get_transport_class(transport_name)( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert transport.kind == transport_name + + +def test_region_autoscalers_base_transport_error(): + # Passing both a credentials object and credentials_file should raise an error + with pytest.raises(core_exceptions.DuplicateCredentialArgs): + transport = transports.RegionAutoscalersTransport( + credentials=ga_credentials.AnonymousCredentials(), + credentials_file="credentials.json" + ) + + +def test_region_autoscalers_base_transport(): + # Instantiate the base transport. + with mock.patch('google.cloud.compute_v1.services.region_autoscalers.transports.RegionAutoscalersTransport.__init__') as Transport: + Transport.return_value = None + transport = transports.RegionAutoscalersTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Every method on the transport should just blindly + # raise NotImplementedError. + methods = ( + 'delete', + 'get', + 'insert', + 'list', + 'patch', + 'update', + ) + for method in methods: + with pytest.raises(NotImplementedError): + getattr(transport, method)(request=object()) + + with pytest.raises(NotImplementedError): + transport.close() + + # Catch all for all remaining methods and properties + remainder = [ + 'kind', + ] + for r in remainder: + with pytest.raises(NotImplementedError): + getattr(transport, r)() + + +def test_region_autoscalers_base_transport_with_credentials_file(): + # Instantiate the base transport with a credentials file + with mock.patch.object(google.auth, 'load_credentials_from_file', autospec=True) as load_creds, mock.patch('google.cloud.compute_v1.services.region_autoscalers.transports.RegionAutoscalersTransport._prep_wrapped_messages') as Transport: + Transport.return_value = None + load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.RegionAutoscalersTransport( + credentials_file="credentials.json", + quota_project_id="octopus", + ) + load_creds.assert_called_once_with("credentials.json", + scopes=None, + default_scopes=( + 'https://www.googleapis.com/auth/compute', + 'https://www.googleapis.com/auth/cloud-platform', +), + quota_project_id="octopus", + ) + + +def test_region_autoscalers_base_transport_with_adc(): + # Test the default credentials are used if credentials and credentials_file are None. + with mock.patch.object(google.auth, 'default', autospec=True) as adc, mock.patch('google.cloud.compute_v1.services.region_autoscalers.transports.RegionAutoscalersTransport._prep_wrapped_messages') as Transport: + Transport.return_value = None + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.RegionAutoscalersTransport() + adc.assert_called_once() + + +def test_region_autoscalers_auth_adc(): + # If no credentials are provided, we should use ADC credentials. + with mock.patch.object(google.auth, 'default', autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + RegionAutoscalersClient() + adc.assert_called_once_with( + scopes=None, + default_scopes=( + 'https://www.googleapis.com/auth/compute', + 'https://www.googleapis.com/auth/cloud-platform', +), + quota_project_id=None, + ) + + +def test_region_autoscalers_http_transport_client_cert_source_for_mtls(): + cred = ga_credentials.AnonymousCredentials() + with mock.patch("google.auth.transport.requests.AuthorizedSession.configure_mtls_channel") as mock_configure_mtls_channel: + transports.RegionAutoscalersRestTransport ( + credentials=cred, + client_cert_source_for_mtls=client_cert_source_callback + ) + mock_configure_mtls_channel.assert_called_once_with(client_cert_source_callback) + + +@pytest.mark.parametrize("transport_name", [ + "rest", +]) +def test_region_autoscalers_host_no_port(transport_name): + client = RegionAutoscalersClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions(api_endpoint='compute.googleapis.com'), + transport=transport_name, + ) + assert client.transport._host == ( + 'compute.googleapis.com:443' + if transport_name in ['grpc', 'grpc_asyncio'] + else 'https://compute.googleapis.com' + ) + +@pytest.mark.parametrize("transport_name", [ + "rest", +]) +def test_region_autoscalers_host_with_port(transport_name): + client = RegionAutoscalersClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions(api_endpoint='compute.googleapis.com:8000'), + transport=transport_name, + ) + assert client.transport._host == ( + 'compute.googleapis.com:8000' + if transport_name in ['grpc', 'grpc_asyncio'] + else 'https://compute.googleapis.com:8000' + ) + +@pytest.mark.parametrize("transport_name", [ + "rest", +]) +def test_region_autoscalers_client_transport_session_collision(transport_name): + creds1 = ga_credentials.AnonymousCredentials() + creds2 = ga_credentials.AnonymousCredentials() + client1 = RegionAutoscalersClient( + credentials=creds1, + transport=transport_name, + ) + client2 = RegionAutoscalersClient( + credentials=creds2, + transport=transport_name, + ) + session1 = client1.transport.delete._session + session2 = client2.transport.delete._session + assert session1 != session2 + session1 = client1.transport.get._session + session2 = client2.transport.get._session + assert session1 != session2 + session1 = client1.transport.insert._session + session2 = client2.transport.insert._session + assert session1 != session2 + session1 = client1.transport.list._session + session2 = client2.transport.list._session + assert session1 != session2 + session1 = client1.transport.patch._session + session2 = client2.transport.patch._session + assert session1 != session2 + session1 = client1.transport.update._session + session2 = client2.transport.update._session + assert session1 != session2 + +def test_common_billing_account_path(): + billing_account = "squid" + expected = "billingAccounts/{billing_account}".format(billing_account=billing_account, ) + actual = RegionAutoscalersClient.common_billing_account_path(billing_account) + assert expected == actual + + +def test_parse_common_billing_account_path(): + expected = { + "billing_account": "clam", + } + path = RegionAutoscalersClient.common_billing_account_path(**expected) + + # Check that the path construction is reversible. + actual = RegionAutoscalersClient.parse_common_billing_account_path(path) + assert expected == actual + +def test_common_folder_path(): + folder = "whelk" + expected = "folders/{folder}".format(folder=folder, ) + actual = RegionAutoscalersClient.common_folder_path(folder) + assert expected == actual + + +def test_parse_common_folder_path(): + expected = { + "folder": "octopus", + } + path = RegionAutoscalersClient.common_folder_path(**expected) + + # Check that the path construction is reversible. + actual = RegionAutoscalersClient.parse_common_folder_path(path) + assert expected == actual + +def test_common_organization_path(): + organization = "oyster" + expected = "organizations/{organization}".format(organization=organization, ) + actual = RegionAutoscalersClient.common_organization_path(organization) + assert expected == actual + + +def test_parse_common_organization_path(): + expected = { + "organization": "nudibranch", + } + path = RegionAutoscalersClient.common_organization_path(**expected) + + # Check that the path construction is reversible. + actual = RegionAutoscalersClient.parse_common_organization_path(path) + assert expected == actual + +def test_common_project_path(): + project = "cuttlefish" + expected = "projects/{project}".format(project=project, ) + actual = RegionAutoscalersClient.common_project_path(project) + assert expected == actual + + +def test_parse_common_project_path(): + expected = { + "project": "mussel", + } + path = RegionAutoscalersClient.common_project_path(**expected) + + # Check that the path construction is reversible. + actual = RegionAutoscalersClient.parse_common_project_path(path) + assert expected == actual + +def test_common_location_path(): + project = "winkle" + location = "nautilus" + expected = "projects/{project}/locations/{location}".format(project=project, location=location, ) + actual = RegionAutoscalersClient.common_location_path(project, location) + assert expected == actual + + +def test_parse_common_location_path(): + expected = { + "project": "scallop", + "location": "abalone", + } + path = RegionAutoscalersClient.common_location_path(**expected) + + # Check that the path construction is reversible. + actual = RegionAutoscalersClient.parse_common_location_path(path) + assert expected == actual + + +def test_client_with_default_client_info(): + client_info = gapic_v1.client_info.ClientInfo() + + with mock.patch.object(transports.RegionAutoscalersTransport, '_prep_wrapped_messages') as prep: + client = RegionAutoscalersClient( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + with mock.patch.object(transports.RegionAutoscalersTransport, '_prep_wrapped_messages') as prep: + transport_class = RegionAutoscalersClient.get_transport_class() + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + +def test_transport_close(): + transports = { + "rest": "_session", + } + + for transport, close_name in transports.items(): + client = RegionAutoscalersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport + ) + with mock.patch.object(type(getattr(client.transport, close_name)), "close") as close: + with client: + close.assert_not_called() + close.assert_called_once() + +def test_client_ctx(): + transports = [ + 'rest', + ] + for transport in transports: + client = RegionAutoscalersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport + ) + # Test client calls underlying transport. + with mock.patch.object(type(client.transport), "close") as close: + close.assert_not_called() + with client: + pass + close.assert_called() + +@pytest.mark.parametrize("client_class,transport_class", [ + (RegionAutoscalersClient, transports.RegionAutoscalersRestTransport), +]) +def test_api_key_credentials(client_class, transport_class): + with mock.patch.object( + google.auth._default, "get_api_key_credentials", create=True + ) as get_api_key_credentials: + mock_cred = mock.Mock() + get_api_key_credentials.return_value = mock_cred + options = client_options.ClientOptions() + options.api_key = "api_key" + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=mock_cred, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) diff --git a/owl-bot-staging/v1/tests/unit/gapic/compute_v1/test_region_backend_services.py b/owl-bot-staging/v1/tests/unit/gapic/compute_v1/test_region_backend_services.py new file mode 100644 index 000000000..6429f997c --- /dev/null +++ b/owl-bot-staging/v1/tests/unit/gapic/compute_v1/test_region_backend_services.py @@ -0,0 +1,4443 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import os +# try/except added for compatibility with python < 3.8 +try: + from unittest import mock + from unittest.mock import AsyncMock # pragma: NO COVER +except ImportError: # pragma: NO COVER + import mock + +import grpc +from grpc.experimental import aio +from collections.abc import Iterable +from google.protobuf import json_format +import json +import math +import pytest +from proto.marshal.rules.dates import DurationRule, TimestampRule +from proto.marshal.rules import wrappers +from requests import Response +from requests import Request, PreparedRequest +from requests.sessions import Session +from google.protobuf import json_format + +from google.api_core import client_options +from google.api_core import exceptions as core_exceptions +from google.api_core import extended_operation # type: ignore +from google.api_core import future +from google.api_core import gapic_v1 +from google.api_core import grpc_helpers +from google.api_core import grpc_helpers_async +from google.api_core import path_template +from google.auth import credentials as ga_credentials +from google.auth.exceptions import MutualTLSChannelError +from google.cloud.compute_v1.services.region_backend_services import RegionBackendServicesClient +from google.cloud.compute_v1.services.region_backend_services import pagers +from google.cloud.compute_v1.services.region_backend_services import transports +from google.cloud.compute_v1.types import compute +from google.oauth2 import service_account +import google.auth + + +def client_cert_source_callback(): + return b"cert bytes", b"key bytes" + + +# If default endpoint is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint(client): + return "foo.googleapis.com" if ("localhost" in client.DEFAULT_ENDPOINT) else client.DEFAULT_ENDPOINT + + +def test__get_default_mtls_endpoint(): + api_endpoint = "example.googleapis.com" + api_mtls_endpoint = "example.mtls.googleapis.com" + sandbox_endpoint = "example.sandbox.googleapis.com" + sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com" + non_googleapi = "api.example.com" + + assert RegionBackendServicesClient._get_default_mtls_endpoint(None) is None + assert RegionBackendServicesClient._get_default_mtls_endpoint(api_endpoint) == api_mtls_endpoint + assert RegionBackendServicesClient._get_default_mtls_endpoint(api_mtls_endpoint) == api_mtls_endpoint + assert RegionBackendServicesClient._get_default_mtls_endpoint(sandbox_endpoint) == sandbox_mtls_endpoint + assert RegionBackendServicesClient._get_default_mtls_endpoint(sandbox_mtls_endpoint) == sandbox_mtls_endpoint + assert RegionBackendServicesClient._get_default_mtls_endpoint(non_googleapi) == non_googleapi + + +@pytest.mark.parametrize("client_class,transport_name", [ + (RegionBackendServicesClient, "rest"), +]) +def test_region_backend_services_client_from_service_account_info(client_class, transport_name): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object(service_account.Credentials, 'from_service_account_info') as factory: + factory.return_value = creds + info = {"valid": True} + client = client_class.from_service_account_info(info, transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + 'compute.googleapis.com:443' + if transport_name in ['grpc', 'grpc_asyncio'] + else + 'https://compute.googleapis.com' + ) + + +@pytest.mark.parametrize("transport_class,transport_name", [ + (transports.RegionBackendServicesRestTransport, "rest"), +]) +def test_region_backend_services_client_service_account_always_use_jwt(transport_class, transport_name): + with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=True) + use_jwt.assert_called_once_with(True) + + with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=False) + use_jwt.assert_not_called() + + +@pytest.mark.parametrize("client_class,transport_name", [ + (RegionBackendServicesClient, "rest"), +]) +def test_region_backend_services_client_from_service_account_file(client_class, transport_name): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object(service_account.Credentials, 'from_service_account_file') as factory: + factory.return_value = creds + client = client_class.from_service_account_file("dummy/file/path.json", transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + client = client_class.from_service_account_json("dummy/file/path.json", transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + 'compute.googleapis.com:443' + if transport_name in ['grpc', 'grpc_asyncio'] + else + 'https://compute.googleapis.com' + ) + + +def test_region_backend_services_client_get_transport_class(): + transport = RegionBackendServicesClient.get_transport_class() + available_transports = [ + transports.RegionBackendServicesRestTransport, + ] + assert transport in available_transports + + transport = RegionBackendServicesClient.get_transport_class("rest") + assert transport == transports.RegionBackendServicesRestTransport + + +@pytest.mark.parametrize("client_class,transport_class,transport_name", [ + (RegionBackendServicesClient, transports.RegionBackendServicesRestTransport, "rest"), +]) +@mock.patch.object(RegionBackendServicesClient, "DEFAULT_ENDPOINT", modify_default_endpoint(RegionBackendServicesClient)) +def test_region_backend_services_client_client_options(client_class, transport_class, transport_name): + # Check that if channel is provided we won't create a new one. + with mock.patch.object(RegionBackendServicesClient, 'get_transport_class') as gtc: + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ) + client = client_class(transport=transport) + gtc.assert_not_called() + + # Check that if channel is provided via str we will create a new one. + with mock.patch.object(RegionBackendServicesClient, 'get_transport_class') as gtc: + client = client_class(transport=transport_name) + gtc.assert_called() + + # Check the case api_endpoint is provided. + options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(transport=transport_name, client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_MTLS_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError): + client = client_class(transport=transport_name) + + # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): + with pytest.raises(ValueError): + client = client_class(transport=transport_name) + + # Check the case quota_project_id is provided + options = client_options.ClientOptions(quota_project_id="octopus") + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id="octopus", + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + # Check the case api_endpoint is provided + options = client_options.ClientOptions(api_audience="https://language.googleapis.com") + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience="https://language.googleapis.com" + ) + +@pytest.mark.parametrize("client_class,transport_class,transport_name,use_client_cert_env", [ + (RegionBackendServicesClient, transports.RegionBackendServicesRestTransport, "rest", "true"), + (RegionBackendServicesClient, transports.RegionBackendServicesRestTransport, "rest", "false"), +]) +@mock.patch.object(RegionBackendServicesClient, "DEFAULT_ENDPOINT", modify_default_endpoint(RegionBackendServicesClient)) +@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) +def test_region_backend_services_client_mtls_env_auto(client_class, transport_class, transport_name, use_client_cert_env): + # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default + # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists. + + # Check the case client_cert_source is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): + options = client_options.ClientOptions(client_cert_source=client_cert_source_callback) + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + + if use_client_cert_env == "false": + expected_client_cert_source = None + expected_host = client.DEFAULT_ENDPOINT + else: + expected_client_cert_source = client_cert_source_callback + expected_host = client.DEFAULT_MTLS_ENDPOINT + + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case ADC client cert is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): + with mock.patch.object(transport_class, '__init__') as patched: + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): + with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=client_cert_source_callback): + if use_client_cert_env == "false": + expected_host = client.DEFAULT_ENDPOINT + expected_client_cert_source = None + else: + expected_host = client.DEFAULT_MTLS_ENDPOINT + expected_client_cert_source = client_cert_source_callback + + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case client_cert_source and ADC client cert are not provided. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): + with mock.patch.object(transport_class, '__init__') as patched: + with mock.patch("google.auth.transport.mtls.has_default_client_cert_source", return_value=False): + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize("client_class", [ + RegionBackendServicesClient +]) +@mock.patch.object(RegionBackendServicesClient, "DEFAULT_ENDPOINT", modify_default_endpoint(RegionBackendServicesClient)) +def test_region_backend_services_client_get_mtls_endpoint_and_cert_source(client_class): + mock_client_cert_source = mock.Mock() + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "true". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + mock_api_endpoint = "foo" + options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(options) + assert api_endpoint == mock_api_endpoint + assert cert_source == mock_client_cert_source + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "false". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + mock_client_cert_source = mock.Mock() + mock_api_endpoint = "foo" + options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(options) + assert api_endpoint == mock_api_endpoint + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert doesn't exist. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=False): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert exists. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): + with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=mock_client_cert_source): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source == mock_client_cert_source + + +@pytest.mark.parametrize("client_class,transport_class,transport_name", [ + (RegionBackendServicesClient, transports.RegionBackendServicesRestTransport, "rest"), +]) +def test_region_backend_services_client_client_options_scopes(client_class, transport_class, transport_name): + # Check the case scopes are provided. + options = client_options.ClientOptions( + scopes=["1", "2"], + ) + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=["1", "2"], + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + +@pytest.mark.parametrize("client_class,transport_class,transport_name,grpc_helpers", [ + (RegionBackendServicesClient, transports.RegionBackendServicesRestTransport, "rest", None), +]) +def test_region_backend_services_client_client_options_credentials_file(client_class, transport_class, transport_name, grpc_helpers): + # Check the case credentials file is provided. + options = client_options.ClientOptions( + credentials_file="credentials.json" + ) + + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize("request_type", [ + compute.DeleteRegionBackendServiceRequest, + dict, +]) +def test_delete_rest(request_type): + client = RegionBackendServicesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2', 'backend_service': 'sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.delete(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, extended_operation.ExtendedOperation) + assert response.client_operation_id == 'client_operation_id_value' + assert response.creation_timestamp == 'creation_timestamp_value' + assert response.description == 'description_value' + assert response.end_time == 'end_time_value' + assert response.http_error_message == 'http_error_message_value' + assert response.http_error_status_code == 2374 + assert response.id == 205 + assert response.insert_time == 'insert_time_value' + assert response.kind == 'kind_value' + assert response.name == 'name_value' + assert response.operation_group_id == 'operation_group_id_value' + assert response.operation_type == 'operation_type_value' + assert response.progress == 885 + assert response.region == 'region_value' + assert response.self_link == 'self_link_value' + assert response.start_time == 'start_time_value' + assert response.status == compute.Operation.Status.DONE + assert response.status_message == 'status_message_value' + assert response.target_id == 947 + assert response.target_link == 'target_link_value' + assert response.user == 'user_value' + assert response.zone == 'zone_value' + + +def test_delete_rest_required_fields(request_type=compute.DeleteRegionBackendServiceRequest): + transport_class = transports.RegionBackendServicesRestTransport + + request_init = {} + request_init["backend_service"] = "" + request_init["project"] = "" + request_init["region"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["backendService"] = 'backend_service_value' + jsonified_request["project"] = 'project_value' + jsonified_request["region"] = 'region_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "backendService" in jsonified_request + assert jsonified_request["backendService"] == 'backend_service_value' + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "region" in jsonified_request + assert jsonified_request["region"] == 'region_value' + + client = RegionBackendServicesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "delete", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.delete(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_delete_rest_unset_required_fields(): + transport = transports.RegionBackendServicesRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.delete._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("backendService", "project", "region", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_rest_interceptors(null_interceptor): + transport = transports.RegionBackendServicesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.RegionBackendServicesRestInterceptor(), + ) + client = RegionBackendServicesClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.RegionBackendServicesRestInterceptor, "post_delete") as post, \ + mock.patch.object(transports.RegionBackendServicesRestInterceptor, "pre_delete") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.DeleteRegionBackendServiceRequest.pb(compute.DeleteRegionBackendServiceRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.DeleteRegionBackendServiceRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.delete(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_delete_rest_bad_request(transport: str = 'rest', request_type=compute.DeleteRegionBackendServiceRequest): + client = RegionBackendServicesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2', 'backend_service': 'sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.delete(request) + + +def test_delete_rest_flattened(): + client = RegionBackendServicesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'region': 'sample2', 'backend_service': 'sample3'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + region='region_value', + backend_service='backend_service_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.delete(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/regions/{region}/backendServices/{backend_service}" % client.transport._host, args[1]) + + +def test_delete_rest_flattened_error(transport: str = 'rest'): + client = RegionBackendServicesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete( + compute.DeleteRegionBackendServiceRequest(), + project='project_value', + region='region_value', + backend_service='backend_service_value', + ) + + +def test_delete_rest_error(): + client = RegionBackendServicesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.DeleteRegionBackendServiceRequest, + dict, +]) +def test_delete_unary_rest(request_type): + client = RegionBackendServicesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2', 'backend_service': 'sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.delete_unary(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.Operation) + + +def test_delete_unary_rest_required_fields(request_type=compute.DeleteRegionBackendServiceRequest): + transport_class = transports.RegionBackendServicesRestTransport + + request_init = {} + request_init["backend_service"] = "" + request_init["project"] = "" + request_init["region"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["backendService"] = 'backend_service_value' + jsonified_request["project"] = 'project_value' + jsonified_request["region"] = 'region_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "backendService" in jsonified_request + assert jsonified_request["backendService"] == 'backend_service_value' + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "region" in jsonified_request + assert jsonified_request["region"] == 'region_value' + + client = RegionBackendServicesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "delete", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.delete_unary(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_delete_unary_rest_unset_required_fields(): + transport = transports.RegionBackendServicesRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.delete._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("backendService", "project", "region", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_unary_rest_interceptors(null_interceptor): + transport = transports.RegionBackendServicesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.RegionBackendServicesRestInterceptor(), + ) + client = RegionBackendServicesClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.RegionBackendServicesRestInterceptor, "post_delete") as post, \ + mock.patch.object(transports.RegionBackendServicesRestInterceptor, "pre_delete") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.DeleteRegionBackendServiceRequest.pb(compute.DeleteRegionBackendServiceRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.DeleteRegionBackendServiceRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.delete_unary(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_delete_unary_rest_bad_request(transport: str = 'rest', request_type=compute.DeleteRegionBackendServiceRequest): + client = RegionBackendServicesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2', 'backend_service': 'sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.delete_unary(request) + + +def test_delete_unary_rest_flattened(): + client = RegionBackendServicesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'region': 'sample2', 'backend_service': 'sample3'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + region='region_value', + backend_service='backend_service_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.delete_unary(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/regions/{region}/backendServices/{backend_service}" % client.transport._host, args[1]) + + +def test_delete_unary_rest_flattened_error(transport: str = 'rest'): + client = RegionBackendServicesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_unary( + compute.DeleteRegionBackendServiceRequest(), + project='project_value', + region='region_value', + backend_service='backend_service_value', + ) + + +def test_delete_unary_rest_error(): + client = RegionBackendServicesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.GetRegionBackendServiceRequest, + dict, +]) +def test_get_rest(request_type): + client = RegionBackendServicesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2', 'backend_service': 'sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.BackendService( + affinity_cookie_ttl_sec=2432, + compression_mode='compression_mode_value', + creation_timestamp='creation_timestamp_value', + custom_request_headers=['custom_request_headers_value'], + custom_response_headers=['custom_response_headers_value'], + description='description_value', + edge_security_policy='edge_security_policy_value', + enable_c_d_n=True, + fingerprint='fingerprint_value', + health_checks=['health_checks_value'], + id=205, + kind='kind_value', + load_balancing_scheme='load_balancing_scheme_value', + locality_lb_policy='locality_lb_policy_value', + name='name_value', + network='network_value', + port=453, + port_name='port_name_value', + protocol='protocol_value', + region='region_value', + security_policy='security_policy_value', + self_link='self_link_value', + service_bindings=['service_bindings_value'], + session_affinity='session_affinity_value', + timeout_sec=1185, + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.BackendService.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.get(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.BackendService) + assert response.affinity_cookie_ttl_sec == 2432 + assert response.compression_mode == 'compression_mode_value' + assert response.creation_timestamp == 'creation_timestamp_value' + assert response.custom_request_headers == ['custom_request_headers_value'] + assert response.custom_response_headers == ['custom_response_headers_value'] + assert response.description == 'description_value' + assert response.edge_security_policy == 'edge_security_policy_value' + assert response.enable_c_d_n is True + assert response.fingerprint == 'fingerprint_value' + assert response.health_checks == ['health_checks_value'] + assert response.id == 205 + assert response.kind == 'kind_value' + assert response.load_balancing_scheme == 'load_balancing_scheme_value' + assert response.locality_lb_policy == 'locality_lb_policy_value' + assert response.name == 'name_value' + assert response.network == 'network_value' + assert response.port == 453 + assert response.port_name == 'port_name_value' + assert response.protocol == 'protocol_value' + assert response.region == 'region_value' + assert response.security_policy == 'security_policy_value' + assert response.self_link == 'self_link_value' + assert response.service_bindings == ['service_bindings_value'] + assert response.session_affinity == 'session_affinity_value' + assert response.timeout_sec == 1185 + + +def test_get_rest_required_fields(request_type=compute.GetRegionBackendServiceRequest): + transport_class = transports.RegionBackendServicesRestTransport + + request_init = {} + request_init["backend_service"] = "" + request_init["project"] = "" + request_init["region"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["backendService"] = 'backend_service_value' + jsonified_request["project"] = 'project_value' + jsonified_request["region"] = 'region_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "backendService" in jsonified_request + assert jsonified_request["backendService"] == 'backend_service_value' + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "region" in jsonified_request + assert jsonified_request["region"] == 'region_value' + + client = RegionBackendServicesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.BackendService() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "get", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.BackendService.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.get(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_get_rest_unset_required_fields(): + transport = transports.RegionBackendServicesRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.get._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("backendService", "project", "region", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_rest_interceptors(null_interceptor): + transport = transports.RegionBackendServicesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.RegionBackendServicesRestInterceptor(), + ) + client = RegionBackendServicesClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.RegionBackendServicesRestInterceptor, "post_get") as post, \ + mock.patch.object(transports.RegionBackendServicesRestInterceptor, "pre_get") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.GetRegionBackendServiceRequest.pb(compute.GetRegionBackendServiceRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.BackendService.to_json(compute.BackendService()) + + request = compute.GetRegionBackendServiceRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.BackendService() + + client.get(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_get_rest_bad_request(transport: str = 'rest', request_type=compute.GetRegionBackendServiceRequest): + client = RegionBackendServicesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2', 'backend_service': 'sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get(request) + + +def test_get_rest_flattened(): + client = RegionBackendServicesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.BackendService() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'region': 'sample2', 'backend_service': 'sample3'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + region='region_value', + backend_service='backend_service_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.BackendService.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.get(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/regions/{region}/backendServices/{backend_service}" % client.transport._host, args[1]) + + +def test_get_rest_flattened_error(transport: str = 'rest'): + client = RegionBackendServicesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get( + compute.GetRegionBackendServiceRequest(), + project='project_value', + region='region_value', + backend_service='backend_service_value', + ) + + +def test_get_rest_error(): + client = RegionBackendServicesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.GetHealthRegionBackendServiceRequest, + dict, +]) +def test_get_health_rest(request_type): + client = RegionBackendServicesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2', 'backend_service': 'sample3'} + request_init["resource_group_reference_resource"] = {'group': 'group_value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.BackendServiceGroupHealth( + kind='kind_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.BackendServiceGroupHealth.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.get_health(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.BackendServiceGroupHealth) + assert response.kind == 'kind_value' + + +def test_get_health_rest_required_fields(request_type=compute.GetHealthRegionBackendServiceRequest): + transport_class = transports.RegionBackendServicesRestTransport + + request_init = {} + request_init["backend_service"] = "" + request_init["project"] = "" + request_init["region"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_health._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["backendService"] = 'backend_service_value' + jsonified_request["project"] = 'project_value' + jsonified_request["region"] = 'region_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_health._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "backendService" in jsonified_request + assert jsonified_request["backendService"] == 'backend_service_value' + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "region" in jsonified_request + assert jsonified_request["region"] == 'region_value' + + client = RegionBackendServicesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.BackendServiceGroupHealth() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.BackendServiceGroupHealth.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.get_health(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_get_health_rest_unset_required_fields(): + transport = transports.RegionBackendServicesRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.get_health._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("backendService", "project", "region", "resourceGroupReferenceResource", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_health_rest_interceptors(null_interceptor): + transport = transports.RegionBackendServicesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.RegionBackendServicesRestInterceptor(), + ) + client = RegionBackendServicesClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.RegionBackendServicesRestInterceptor, "post_get_health") as post, \ + mock.patch.object(transports.RegionBackendServicesRestInterceptor, "pre_get_health") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.GetHealthRegionBackendServiceRequest.pb(compute.GetHealthRegionBackendServiceRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.BackendServiceGroupHealth.to_json(compute.BackendServiceGroupHealth()) + + request = compute.GetHealthRegionBackendServiceRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.BackendServiceGroupHealth() + + client.get_health(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_get_health_rest_bad_request(transport: str = 'rest', request_type=compute.GetHealthRegionBackendServiceRequest): + client = RegionBackendServicesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2', 'backend_service': 'sample3'} + request_init["resource_group_reference_resource"] = {'group': 'group_value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_health(request) + + +def test_get_health_rest_flattened(): + client = RegionBackendServicesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.BackendServiceGroupHealth() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'region': 'sample2', 'backend_service': 'sample3'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + region='region_value', + backend_service='backend_service_value', + resource_group_reference_resource=compute.ResourceGroupReference(group='group_value'), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.BackendServiceGroupHealth.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.get_health(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/regions/{region}/backendServices/{backend_service}/getHealth" % client.transport._host, args[1]) + + +def test_get_health_rest_flattened_error(transport: str = 'rest'): + client = RegionBackendServicesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_health( + compute.GetHealthRegionBackendServiceRequest(), + project='project_value', + region='region_value', + backend_service='backend_service_value', + resource_group_reference_resource=compute.ResourceGroupReference(group='group_value'), + ) + + +def test_get_health_rest_error(): + client = RegionBackendServicesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.GetIamPolicyRegionBackendServiceRequest, + dict, +]) +def test_get_iam_policy_rest(request_type): + client = RegionBackendServicesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2', 'resource': 'sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Policy( + etag='etag_value', + iam_owned=True, + version=774, + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Policy.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.get_iam_policy(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.Policy) + assert response.etag == 'etag_value' + assert response.iam_owned is True + assert response.version == 774 + + +def test_get_iam_policy_rest_required_fields(request_type=compute.GetIamPolicyRegionBackendServiceRequest): + transport_class = transports.RegionBackendServicesRestTransport + + request_init = {} + request_init["project"] = "" + request_init["region"] = "" + request_init["resource"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_iam_policy._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + jsonified_request["region"] = 'region_value' + jsonified_request["resource"] = 'resource_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_iam_policy._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("options_requested_policy_version", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "region" in jsonified_request + assert jsonified_request["region"] == 'region_value' + assert "resource" in jsonified_request + assert jsonified_request["resource"] == 'resource_value' + + client = RegionBackendServicesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Policy() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "get", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Policy.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.get_iam_policy(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_get_iam_policy_rest_unset_required_fields(): + transport = transports.RegionBackendServicesRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.get_iam_policy._get_unset_required_fields({}) + assert set(unset_fields) == (set(("optionsRequestedPolicyVersion", )) & set(("project", "region", "resource", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_iam_policy_rest_interceptors(null_interceptor): + transport = transports.RegionBackendServicesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.RegionBackendServicesRestInterceptor(), + ) + client = RegionBackendServicesClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.RegionBackendServicesRestInterceptor, "post_get_iam_policy") as post, \ + mock.patch.object(transports.RegionBackendServicesRestInterceptor, "pre_get_iam_policy") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.GetIamPolicyRegionBackendServiceRequest.pb(compute.GetIamPolicyRegionBackendServiceRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Policy.to_json(compute.Policy()) + + request = compute.GetIamPolicyRegionBackendServiceRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Policy() + + client.get_iam_policy(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_get_iam_policy_rest_bad_request(transport: str = 'rest', request_type=compute.GetIamPolicyRegionBackendServiceRequest): + client = RegionBackendServicesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2', 'resource': 'sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_iam_policy(request) + + +def test_get_iam_policy_rest_flattened(): + client = RegionBackendServicesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Policy() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'region': 'sample2', 'resource': 'sample3'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + region='region_value', + resource='resource_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Policy.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.get_iam_policy(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/regions/{region}/backendServices/{resource}/getIamPolicy" % client.transport._host, args[1]) + + +def test_get_iam_policy_rest_flattened_error(transport: str = 'rest'): + client = RegionBackendServicesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_iam_policy( + compute.GetIamPolicyRegionBackendServiceRequest(), + project='project_value', + region='region_value', + resource='resource_value', + ) + + +def test_get_iam_policy_rest_error(): + client = RegionBackendServicesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.InsertRegionBackendServiceRequest, + dict, +]) +def test_insert_rest(request_type): + client = RegionBackendServicesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2'} + request_init["backend_service_resource"] = {'affinity_cookie_ttl_sec': 2432, 'backends': [{'balancing_mode': 'balancing_mode_value', 'capacity_scaler': 0.1575, 'description': 'description_value', 'failover': True, 'group': 'group_value', 'max_connections': 1608, 'max_connections_per_endpoint': 2990, 'max_connections_per_instance': 2978, 'max_rate': 849, 'max_rate_per_endpoint': 0.22310000000000002, 'max_rate_per_instance': 0.22190000000000001, 'max_utilization': 0.1633}], 'cdn_policy': {'bypass_cache_on_request_headers': [{'header_name': 'header_name_value'}], 'cache_key_policy': {'include_host': True, 'include_http_headers': ['include_http_headers_value1', 'include_http_headers_value2'], 'include_named_cookies': ['include_named_cookies_value1', 'include_named_cookies_value2'], 'include_protocol': True, 'include_query_string': True, 'query_string_blacklist': ['query_string_blacklist_value1', 'query_string_blacklist_value2'], 'query_string_whitelist': ['query_string_whitelist_value1', 'query_string_whitelist_value2']}, 'cache_mode': 'cache_mode_value', 'client_ttl': 1074, 'default_ttl': 1176, 'max_ttl': 761, 'negative_caching': True, 'negative_caching_policy': [{'code': 411, 'ttl': 340}], 'request_coalescing': True, 'serve_while_stale': 1813, 'signed_url_cache_max_age_sec': 2890, 'signed_url_key_names': ['signed_url_key_names_value1', 'signed_url_key_names_value2']}, 'circuit_breakers': {'max_connections': 1608, 'max_pending_requests': 2149, 'max_requests': 1313, 'max_requests_per_connection': 2902, 'max_retries': 1187}, 'compression_mode': 'compression_mode_value', 'connection_draining': {'draining_timeout_sec': 2124}, 'connection_tracking_policy': {'connection_persistence_on_unhealthy_backends': 'connection_persistence_on_unhealthy_backends_value', 'enable_strong_affinity': True, 'idle_timeout_sec': 1694, 'tracking_mode': 'tracking_mode_value'}, 'consistent_hash': {'http_cookie': {'name': 'name_value', 'path': 'path_value', 'ttl': {'nanos': 543, 'seconds': 751}}, 'http_header_name': 'http_header_name_value', 'minimum_ring_size': 1829}, 'creation_timestamp': 'creation_timestamp_value', 'custom_request_headers': ['custom_request_headers_value1', 'custom_request_headers_value2'], 'custom_response_headers': ['custom_response_headers_value1', 'custom_response_headers_value2'], 'description': 'description_value', 'edge_security_policy': 'edge_security_policy_value', 'enable_c_d_n': True, 'failover_policy': {'disable_connection_drain_on_failover': True, 'drop_traffic_if_unhealthy': True, 'failover_ratio': 0.1494}, 'fingerprint': 'fingerprint_value', 'health_checks': ['health_checks_value1', 'health_checks_value2'], 'iap': {'enabled': True, 'oauth2_client_id': 'oauth2_client_id_value', 'oauth2_client_secret': 'oauth2_client_secret_value', 'oauth2_client_secret_sha256': 'oauth2_client_secret_sha256_value'}, 'id': 205, 'kind': 'kind_value', 'load_balancing_scheme': 'load_balancing_scheme_value', 'locality_lb_policies': [{'custom_policy': {'data': 'data_value', 'name': 'name_value'}, 'policy': {'name': 'name_value'}}], 'locality_lb_policy': 'locality_lb_policy_value', 'log_config': {'enable': True, 'optional_fields': ['optional_fields_value1', 'optional_fields_value2'], 'optional_mode': 'optional_mode_value', 'sample_rate': 0.1165}, 'max_stream_duration': {}, 'metadatas': {}, 'name': 'name_value', 'network': 'network_value', 'outlier_detection': {'base_ejection_time': {}, 'consecutive_errors': 1956, 'consecutive_gateway_failure': 2880, 'enforcing_consecutive_errors': 3006, 'enforcing_consecutive_gateway_failure': 3930, 'enforcing_success_rate': 2334, 'interval': {}, 'max_ejection_percent': 2118, 'success_rate_minimum_hosts': 2799, 'success_rate_request_volume': 2915, 'success_rate_stdev_factor': 2663}, 'port': 453, 'port_name': 'port_name_value', 'protocol': 'protocol_value', 'region': 'region_value', 'security_policy': 'security_policy_value', 'security_settings': {'client_tls_policy': 'client_tls_policy_value', 'subject_alt_names': ['subject_alt_names_value1', 'subject_alt_names_value2']}, 'self_link': 'self_link_value', 'service_bindings': ['service_bindings_value1', 'service_bindings_value2'], 'session_affinity': 'session_affinity_value', 'subsetting': {'policy': 'policy_value'}, 'timeout_sec': 1185} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.insert(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, extended_operation.ExtendedOperation) + assert response.client_operation_id == 'client_operation_id_value' + assert response.creation_timestamp == 'creation_timestamp_value' + assert response.description == 'description_value' + assert response.end_time == 'end_time_value' + assert response.http_error_message == 'http_error_message_value' + assert response.http_error_status_code == 2374 + assert response.id == 205 + assert response.insert_time == 'insert_time_value' + assert response.kind == 'kind_value' + assert response.name == 'name_value' + assert response.operation_group_id == 'operation_group_id_value' + assert response.operation_type == 'operation_type_value' + assert response.progress == 885 + assert response.region == 'region_value' + assert response.self_link == 'self_link_value' + assert response.start_time == 'start_time_value' + assert response.status == compute.Operation.Status.DONE + assert response.status_message == 'status_message_value' + assert response.target_id == 947 + assert response.target_link == 'target_link_value' + assert response.user == 'user_value' + assert response.zone == 'zone_value' + + +def test_insert_rest_required_fields(request_type=compute.InsertRegionBackendServiceRequest): + transport_class = transports.RegionBackendServicesRestTransport + + request_init = {} + request_init["project"] = "" + request_init["region"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).insert._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + jsonified_request["region"] = 'region_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).insert._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "region" in jsonified_request + assert jsonified_request["region"] == 'region_value' + + client = RegionBackendServicesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.insert(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_insert_rest_unset_required_fields(): + transport = transports.RegionBackendServicesRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.insert._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("backendServiceResource", "project", "region", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_insert_rest_interceptors(null_interceptor): + transport = transports.RegionBackendServicesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.RegionBackendServicesRestInterceptor(), + ) + client = RegionBackendServicesClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.RegionBackendServicesRestInterceptor, "post_insert") as post, \ + mock.patch.object(transports.RegionBackendServicesRestInterceptor, "pre_insert") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.InsertRegionBackendServiceRequest.pb(compute.InsertRegionBackendServiceRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.InsertRegionBackendServiceRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.insert(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_insert_rest_bad_request(transport: str = 'rest', request_type=compute.InsertRegionBackendServiceRequest): + client = RegionBackendServicesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2'} + request_init["backend_service_resource"] = {'affinity_cookie_ttl_sec': 2432, 'backends': [{'balancing_mode': 'balancing_mode_value', 'capacity_scaler': 0.1575, 'description': 'description_value', 'failover': True, 'group': 'group_value', 'max_connections': 1608, 'max_connections_per_endpoint': 2990, 'max_connections_per_instance': 2978, 'max_rate': 849, 'max_rate_per_endpoint': 0.22310000000000002, 'max_rate_per_instance': 0.22190000000000001, 'max_utilization': 0.1633}], 'cdn_policy': {'bypass_cache_on_request_headers': [{'header_name': 'header_name_value'}], 'cache_key_policy': {'include_host': True, 'include_http_headers': ['include_http_headers_value1', 'include_http_headers_value2'], 'include_named_cookies': ['include_named_cookies_value1', 'include_named_cookies_value2'], 'include_protocol': True, 'include_query_string': True, 'query_string_blacklist': ['query_string_blacklist_value1', 'query_string_blacklist_value2'], 'query_string_whitelist': ['query_string_whitelist_value1', 'query_string_whitelist_value2']}, 'cache_mode': 'cache_mode_value', 'client_ttl': 1074, 'default_ttl': 1176, 'max_ttl': 761, 'negative_caching': True, 'negative_caching_policy': [{'code': 411, 'ttl': 340}], 'request_coalescing': True, 'serve_while_stale': 1813, 'signed_url_cache_max_age_sec': 2890, 'signed_url_key_names': ['signed_url_key_names_value1', 'signed_url_key_names_value2']}, 'circuit_breakers': {'max_connections': 1608, 'max_pending_requests': 2149, 'max_requests': 1313, 'max_requests_per_connection': 2902, 'max_retries': 1187}, 'compression_mode': 'compression_mode_value', 'connection_draining': {'draining_timeout_sec': 2124}, 'connection_tracking_policy': {'connection_persistence_on_unhealthy_backends': 'connection_persistence_on_unhealthy_backends_value', 'enable_strong_affinity': True, 'idle_timeout_sec': 1694, 'tracking_mode': 'tracking_mode_value'}, 'consistent_hash': {'http_cookie': {'name': 'name_value', 'path': 'path_value', 'ttl': {'nanos': 543, 'seconds': 751}}, 'http_header_name': 'http_header_name_value', 'minimum_ring_size': 1829}, 'creation_timestamp': 'creation_timestamp_value', 'custom_request_headers': ['custom_request_headers_value1', 'custom_request_headers_value2'], 'custom_response_headers': ['custom_response_headers_value1', 'custom_response_headers_value2'], 'description': 'description_value', 'edge_security_policy': 'edge_security_policy_value', 'enable_c_d_n': True, 'failover_policy': {'disable_connection_drain_on_failover': True, 'drop_traffic_if_unhealthy': True, 'failover_ratio': 0.1494}, 'fingerprint': 'fingerprint_value', 'health_checks': ['health_checks_value1', 'health_checks_value2'], 'iap': {'enabled': True, 'oauth2_client_id': 'oauth2_client_id_value', 'oauth2_client_secret': 'oauth2_client_secret_value', 'oauth2_client_secret_sha256': 'oauth2_client_secret_sha256_value'}, 'id': 205, 'kind': 'kind_value', 'load_balancing_scheme': 'load_balancing_scheme_value', 'locality_lb_policies': [{'custom_policy': {'data': 'data_value', 'name': 'name_value'}, 'policy': {'name': 'name_value'}}], 'locality_lb_policy': 'locality_lb_policy_value', 'log_config': {'enable': True, 'optional_fields': ['optional_fields_value1', 'optional_fields_value2'], 'optional_mode': 'optional_mode_value', 'sample_rate': 0.1165}, 'max_stream_duration': {}, 'metadatas': {}, 'name': 'name_value', 'network': 'network_value', 'outlier_detection': {'base_ejection_time': {}, 'consecutive_errors': 1956, 'consecutive_gateway_failure': 2880, 'enforcing_consecutive_errors': 3006, 'enforcing_consecutive_gateway_failure': 3930, 'enforcing_success_rate': 2334, 'interval': {}, 'max_ejection_percent': 2118, 'success_rate_minimum_hosts': 2799, 'success_rate_request_volume': 2915, 'success_rate_stdev_factor': 2663}, 'port': 453, 'port_name': 'port_name_value', 'protocol': 'protocol_value', 'region': 'region_value', 'security_policy': 'security_policy_value', 'security_settings': {'client_tls_policy': 'client_tls_policy_value', 'subject_alt_names': ['subject_alt_names_value1', 'subject_alt_names_value2']}, 'self_link': 'self_link_value', 'service_bindings': ['service_bindings_value1', 'service_bindings_value2'], 'session_affinity': 'session_affinity_value', 'subsetting': {'policy': 'policy_value'}, 'timeout_sec': 1185} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.insert(request) + + +def test_insert_rest_flattened(): + client = RegionBackendServicesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'region': 'sample2'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + region='region_value', + backend_service_resource=compute.BackendService(affinity_cookie_ttl_sec=2432), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.insert(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/regions/{region}/backendServices" % client.transport._host, args[1]) + + +def test_insert_rest_flattened_error(transport: str = 'rest'): + client = RegionBackendServicesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.insert( + compute.InsertRegionBackendServiceRequest(), + project='project_value', + region='region_value', + backend_service_resource=compute.BackendService(affinity_cookie_ttl_sec=2432), + ) + + +def test_insert_rest_error(): + client = RegionBackendServicesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.InsertRegionBackendServiceRequest, + dict, +]) +def test_insert_unary_rest(request_type): + client = RegionBackendServicesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2'} + request_init["backend_service_resource"] = {'affinity_cookie_ttl_sec': 2432, 'backends': [{'balancing_mode': 'balancing_mode_value', 'capacity_scaler': 0.1575, 'description': 'description_value', 'failover': True, 'group': 'group_value', 'max_connections': 1608, 'max_connections_per_endpoint': 2990, 'max_connections_per_instance': 2978, 'max_rate': 849, 'max_rate_per_endpoint': 0.22310000000000002, 'max_rate_per_instance': 0.22190000000000001, 'max_utilization': 0.1633}], 'cdn_policy': {'bypass_cache_on_request_headers': [{'header_name': 'header_name_value'}], 'cache_key_policy': {'include_host': True, 'include_http_headers': ['include_http_headers_value1', 'include_http_headers_value2'], 'include_named_cookies': ['include_named_cookies_value1', 'include_named_cookies_value2'], 'include_protocol': True, 'include_query_string': True, 'query_string_blacklist': ['query_string_blacklist_value1', 'query_string_blacklist_value2'], 'query_string_whitelist': ['query_string_whitelist_value1', 'query_string_whitelist_value2']}, 'cache_mode': 'cache_mode_value', 'client_ttl': 1074, 'default_ttl': 1176, 'max_ttl': 761, 'negative_caching': True, 'negative_caching_policy': [{'code': 411, 'ttl': 340}], 'request_coalescing': True, 'serve_while_stale': 1813, 'signed_url_cache_max_age_sec': 2890, 'signed_url_key_names': ['signed_url_key_names_value1', 'signed_url_key_names_value2']}, 'circuit_breakers': {'max_connections': 1608, 'max_pending_requests': 2149, 'max_requests': 1313, 'max_requests_per_connection': 2902, 'max_retries': 1187}, 'compression_mode': 'compression_mode_value', 'connection_draining': {'draining_timeout_sec': 2124}, 'connection_tracking_policy': {'connection_persistence_on_unhealthy_backends': 'connection_persistence_on_unhealthy_backends_value', 'enable_strong_affinity': True, 'idle_timeout_sec': 1694, 'tracking_mode': 'tracking_mode_value'}, 'consistent_hash': {'http_cookie': {'name': 'name_value', 'path': 'path_value', 'ttl': {'nanos': 543, 'seconds': 751}}, 'http_header_name': 'http_header_name_value', 'minimum_ring_size': 1829}, 'creation_timestamp': 'creation_timestamp_value', 'custom_request_headers': ['custom_request_headers_value1', 'custom_request_headers_value2'], 'custom_response_headers': ['custom_response_headers_value1', 'custom_response_headers_value2'], 'description': 'description_value', 'edge_security_policy': 'edge_security_policy_value', 'enable_c_d_n': True, 'failover_policy': {'disable_connection_drain_on_failover': True, 'drop_traffic_if_unhealthy': True, 'failover_ratio': 0.1494}, 'fingerprint': 'fingerprint_value', 'health_checks': ['health_checks_value1', 'health_checks_value2'], 'iap': {'enabled': True, 'oauth2_client_id': 'oauth2_client_id_value', 'oauth2_client_secret': 'oauth2_client_secret_value', 'oauth2_client_secret_sha256': 'oauth2_client_secret_sha256_value'}, 'id': 205, 'kind': 'kind_value', 'load_balancing_scheme': 'load_balancing_scheme_value', 'locality_lb_policies': [{'custom_policy': {'data': 'data_value', 'name': 'name_value'}, 'policy': {'name': 'name_value'}}], 'locality_lb_policy': 'locality_lb_policy_value', 'log_config': {'enable': True, 'optional_fields': ['optional_fields_value1', 'optional_fields_value2'], 'optional_mode': 'optional_mode_value', 'sample_rate': 0.1165}, 'max_stream_duration': {}, 'metadatas': {}, 'name': 'name_value', 'network': 'network_value', 'outlier_detection': {'base_ejection_time': {}, 'consecutive_errors': 1956, 'consecutive_gateway_failure': 2880, 'enforcing_consecutive_errors': 3006, 'enforcing_consecutive_gateway_failure': 3930, 'enforcing_success_rate': 2334, 'interval': {}, 'max_ejection_percent': 2118, 'success_rate_minimum_hosts': 2799, 'success_rate_request_volume': 2915, 'success_rate_stdev_factor': 2663}, 'port': 453, 'port_name': 'port_name_value', 'protocol': 'protocol_value', 'region': 'region_value', 'security_policy': 'security_policy_value', 'security_settings': {'client_tls_policy': 'client_tls_policy_value', 'subject_alt_names': ['subject_alt_names_value1', 'subject_alt_names_value2']}, 'self_link': 'self_link_value', 'service_bindings': ['service_bindings_value1', 'service_bindings_value2'], 'session_affinity': 'session_affinity_value', 'subsetting': {'policy': 'policy_value'}, 'timeout_sec': 1185} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.insert_unary(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.Operation) + + +def test_insert_unary_rest_required_fields(request_type=compute.InsertRegionBackendServiceRequest): + transport_class = transports.RegionBackendServicesRestTransport + + request_init = {} + request_init["project"] = "" + request_init["region"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).insert._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + jsonified_request["region"] = 'region_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).insert._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "region" in jsonified_request + assert jsonified_request["region"] == 'region_value' + + client = RegionBackendServicesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.insert_unary(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_insert_unary_rest_unset_required_fields(): + transport = transports.RegionBackendServicesRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.insert._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("backendServiceResource", "project", "region", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_insert_unary_rest_interceptors(null_interceptor): + transport = transports.RegionBackendServicesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.RegionBackendServicesRestInterceptor(), + ) + client = RegionBackendServicesClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.RegionBackendServicesRestInterceptor, "post_insert") as post, \ + mock.patch.object(transports.RegionBackendServicesRestInterceptor, "pre_insert") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.InsertRegionBackendServiceRequest.pb(compute.InsertRegionBackendServiceRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.InsertRegionBackendServiceRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.insert_unary(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_insert_unary_rest_bad_request(transport: str = 'rest', request_type=compute.InsertRegionBackendServiceRequest): + client = RegionBackendServicesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2'} + request_init["backend_service_resource"] = {'affinity_cookie_ttl_sec': 2432, 'backends': [{'balancing_mode': 'balancing_mode_value', 'capacity_scaler': 0.1575, 'description': 'description_value', 'failover': True, 'group': 'group_value', 'max_connections': 1608, 'max_connections_per_endpoint': 2990, 'max_connections_per_instance': 2978, 'max_rate': 849, 'max_rate_per_endpoint': 0.22310000000000002, 'max_rate_per_instance': 0.22190000000000001, 'max_utilization': 0.1633}], 'cdn_policy': {'bypass_cache_on_request_headers': [{'header_name': 'header_name_value'}], 'cache_key_policy': {'include_host': True, 'include_http_headers': ['include_http_headers_value1', 'include_http_headers_value2'], 'include_named_cookies': ['include_named_cookies_value1', 'include_named_cookies_value2'], 'include_protocol': True, 'include_query_string': True, 'query_string_blacklist': ['query_string_blacklist_value1', 'query_string_blacklist_value2'], 'query_string_whitelist': ['query_string_whitelist_value1', 'query_string_whitelist_value2']}, 'cache_mode': 'cache_mode_value', 'client_ttl': 1074, 'default_ttl': 1176, 'max_ttl': 761, 'negative_caching': True, 'negative_caching_policy': [{'code': 411, 'ttl': 340}], 'request_coalescing': True, 'serve_while_stale': 1813, 'signed_url_cache_max_age_sec': 2890, 'signed_url_key_names': ['signed_url_key_names_value1', 'signed_url_key_names_value2']}, 'circuit_breakers': {'max_connections': 1608, 'max_pending_requests': 2149, 'max_requests': 1313, 'max_requests_per_connection': 2902, 'max_retries': 1187}, 'compression_mode': 'compression_mode_value', 'connection_draining': {'draining_timeout_sec': 2124}, 'connection_tracking_policy': {'connection_persistence_on_unhealthy_backends': 'connection_persistence_on_unhealthy_backends_value', 'enable_strong_affinity': True, 'idle_timeout_sec': 1694, 'tracking_mode': 'tracking_mode_value'}, 'consistent_hash': {'http_cookie': {'name': 'name_value', 'path': 'path_value', 'ttl': {'nanos': 543, 'seconds': 751}}, 'http_header_name': 'http_header_name_value', 'minimum_ring_size': 1829}, 'creation_timestamp': 'creation_timestamp_value', 'custom_request_headers': ['custom_request_headers_value1', 'custom_request_headers_value2'], 'custom_response_headers': ['custom_response_headers_value1', 'custom_response_headers_value2'], 'description': 'description_value', 'edge_security_policy': 'edge_security_policy_value', 'enable_c_d_n': True, 'failover_policy': {'disable_connection_drain_on_failover': True, 'drop_traffic_if_unhealthy': True, 'failover_ratio': 0.1494}, 'fingerprint': 'fingerprint_value', 'health_checks': ['health_checks_value1', 'health_checks_value2'], 'iap': {'enabled': True, 'oauth2_client_id': 'oauth2_client_id_value', 'oauth2_client_secret': 'oauth2_client_secret_value', 'oauth2_client_secret_sha256': 'oauth2_client_secret_sha256_value'}, 'id': 205, 'kind': 'kind_value', 'load_balancing_scheme': 'load_balancing_scheme_value', 'locality_lb_policies': [{'custom_policy': {'data': 'data_value', 'name': 'name_value'}, 'policy': {'name': 'name_value'}}], 'locality_lb_policy': 'locality_lb_policy_value', 'log_config': {'enable': True, 'optional_fields': ['optional_fields_value1', 'optional_fields_value2'], 'optional_mode': 'optional_mode_value', 'sample_rate': 0.1165}, 'max_stream_duration': {}, 'metadatas': {}, 'name': 'name_value', 'network': 'network_value', 'outlier_detection': {'base_ejection_time': {}, 'consecutive_errors': 1956, 'consecutive_gateway_failure': 2880, 'enforcing_consecutive_errors': 3006, 'enforcing_consecutive_gateway_failure': 3930, 'enforcing_success_rate': 2334, 'interval': {}, 'max_ejection_percent': 2118, 'success_rate_minimum_hosts': 2799, 'success_rate_request_volume': 2915, 'success_rate_stdev_factor': 2663}, 'port': 453, 'port_name': 'port_name_value', 'protocol': 'protocol_value', 'region': 'region_value', 'security_policy': 'security_policy_value', 'security_settings': {'client_tls_policy': 'client_tls_policy_value', 'subject_alt_names': ['subject_alt_names_value1', 'subject_alt_names_value2']}, 'self_link': 'self_link_value', 'service_bindings': ['service_bindings_value1', 'service_bindings_value2'], 'session_affinity': 'session_affinity_value', 'subsetting': {'policy': 'policy_value'}, 'timeout_sec': 1185} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.insert_unary(request) + + +def test_insert_unary_rest_flattened(): + client = RegionBackendServicesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'region': 'sample2'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + region='region_value', + backend_service_resource=compute.BackendService(affinity_cookie_ttl_sec=2432), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.insert_unary(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/regions/{region}/backendServices" % client.transport._host, args[1]) + + +def test_insert_unary_rest_flattened_error(transport: str = 'rest'): + client = RegionBackendServicesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.insert_unary( + compute.InsertRegionBackendServiceRequest(), + project='project_value', + region='region_value', + backend_service_resource=compute.BackendService(affinity_cookie_ttl_sec=2432), + ) + + +def test_insert_unary_rest_error(): + client = RegionBackendServicesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.ListRegionBackendServicesRequest, + dict, +]) +def test_list_rest(request_type): + client = RegionBackendServicesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.BackendServiceList( + id='id_value', + kind='kind_value', + next_page_token='next_page_token_value', + self_link='self_link_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.BackendServiceList.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.list(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListPager) + assert response.id == 'id_value' + assert response.kind == 'kind_value' + assert response.next_page_token == 'next_page_token_value' + assert response.self_link == 'self_link_value' + + +def test_list_rest_required_fields(request_type=compute.ListRegionBackendServicesRequest): + transport_class = transports.RegionBackendServicesRestTransport + + request_init = {} + request_init["project"] = "" + request_init["region"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + jsonified_request["region"] = 'region_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("filter", "max_results", "order_by", "page_token", "return_partial_success", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "region" in jsonified_request + assert jsonified_request["region"] == 'region_value' + + client = RegionBackendServicesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.BackendServiceList() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "get", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.BackendServiceList.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.list(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_list_rest_unset_required_fields(): + transport = transports.RegionBackendServicesRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.list._get_unset_required_fields({}) + assert set(unset_fields) == (set(("filter", "maxResults", "orderBy", "pageToken", "returnPartialSuccess", )) & set(("project", "region", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_rest_interceptors(null_interceptor): + transport = transports.RegionBackendServicesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.RegionBackendServicesRestInterceptor(), + ) + client = RegionBackendServicesClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.RegionBackendServicesRestInterceptor, "post_list") as post, \ + mock.patch.object(transports.RegionBackendServicesRestInterceptor, "pre_list") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.ListRegionBackendServicesRequest.pb(compute.ListRegionBackendServicesRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.BackendServiceList.to_json(compute.BackendServiceList()) + + request = compute.ListRegionBackendServicesRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.BackendServiceList() + + client.list(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_list_rest_bad_request(transport: str = 'rest', request_type=compute.ListRegionBackendServicesRequest): + client = RegionBackendServicesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list(request) + + +def test_list_rest_flattened(): + client = RegionBackendServicesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.BackendServiceList() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'region': 'sample2'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + region='region_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.BackendServiceList.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.list(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/regions/{region}/backendServices" % client.transport._host, args[1]) + + +def test_list_rest_flattened_error(transport: str = 'rest'): + client = RegionBackendServicesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list( + compute.ListRegionBackendServicesRequest(), + project='project_value', + region='region_value', + ) + + +def test_list_rest_pager(transport: str = 'rest'): + client = RegionBackendServicesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + #with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + compute.BackendServiceList( + items=[ + compute.BackendService(), + compute.BackendService(), + compute.BackendService(), + ], + next_page_token='abc', + ), + compute.BackendServiceList( + items=[], + next_page_token='def', + ), + compute.BackendServiceList( + items=[ + compute.BackendService(), + ], + next_page_token='ghi', + ), + compute.BackendServiceList( + items=[ + compute.BackendService(), + compute.BackendService(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple(compute.BackendServiceList.to_json(x) for x in response) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode('UTF-8') + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {'project': 'sample1', 'region': 'sample2'} + + pager = client.list(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, compute.BackendService) + for i in results) + + pages = list(client.list(request=sample_request).pages) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize("request_type", [ + compute.PatchRegionBackendServiceRequest, + dict, +]) +def test_patch_rest(request_type): + client = RegionBackendServicesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2', 'backend_service': 'sample3'} + request_init["backend_service_resource"] = {'affinity_cookie_ttl_sec': 2432, 'backends': [{'balancing_mode': 'balancing_mode_value', 'capacity_scaler': 0.1575, 'description': 'description_value', 'failover': True, 'group': 'group_value', 'max_connections': 1608, 'max_connections_per_endpoint': 2990, 'max_connections_per_instance': 2978, 'max_rate': 849, 'max_rate_per_endpoint': 0.22310000000000002, 'max_rate_per_instance': 0.22190000000000001, 'max_utilization': 0.1633}], 'cdn_policy': {'bypass_cache_on_request_headers': [{'header_name': 'header_name_value'}], 'cache_key_policy': {'include_host': True, 'include_http_headers': ['include_http_headers_value1', 'include_http_headers_value2'], 'include_named_cookies': ['include_named_cookies_value1', 'include_named_cookies_value2'], 'include_protocol': True, 'include_query_string': True, 'query_string_blacklist': ['query_string_blacklist_value1', 'query_string_blacklist_value2'], 'query_string_whitelist': ['query_string_whitelist_value1', 'query_string_whitelist_value2']}, 'cache_mode': 'cache_mode_value', 'client_ttl': 1074, 'default_ttl': 1176, 'max_ttl': 761, 'negative_caching': True, 'negative_caching_policy': [{'code': 411, 'ttl': 340}], 'request_coalescing': True, 'serve_while_stale': 1813, 'signed_url_cache_max_age_sec': 2890, 'signed_url_key_names': ['signed_url_key_names_value1', 'signed_url_key_names_value2']}, 'circuit_breakers': {'max_connections': 1608, 'max_pending_requests': 2149, 'max_requests': 1313, 'max_requests_per_connection': 2902, 'max_retries': 1187}, 'compression_mode': 'compression_mode_value', 'connection_draining': {'draining_timeout_sec': 2124}, 'connection_tracking_policy': {'connection_persistence_on_unhealthy_backends': 'connection_persistence_on_unhealthy_backends_value', 'enable_strong_affinity': True, 'idle_timeout_sec': 1694, 'tracking_mode': 'tracking_mode_value'}, 'consistent_hash': {'http_cookie': {'name': 'name_value', 'path': 'path_value', 'ttl': {'nanos': 543, 'seconds': 751}}, 'http_header_name': 'http_header_name_value', 'minimum_ring_size': 1829}, 'creation_timestamp': 'creation_timestamp_value', 'custom_request_headers': ['custom_request_headers_value1', 'custom_request_headers_value2'], 'custom_response_headers': ['custom_response_headers_value1', 'custom_response_headers_value2'], 'description': 'description_value', 'edge_security_policy': 'edge_security_policy_value', 'enable_c_d_n': True, 'failover_policy': {'disable_connection_drain_on_failover': True, 'drop_traffic_if_unhealthy': True, 'failover_ratio': 0.1494}, 'fingerprint': 'fingerprint_value', 'health_checks': ['health_checks_value1', 'health_checks_value2'], 'iap': {'enabled': True, 'oauth2_client_id': 'oauth2_client_id_value', 'oauth2_client_secret': 'oauth2_client_secret_value', 'oauth2_client_secret_sha256': 'oauth2_client_secret_sha256_value'}, 'id': 205, 'kind': 'kind_value', 'load_balancing_scheme': 'load_balancing_scheme_value', 'locality_lb_policies': [{'custom_policy': {'data': 'data_value', 'name': 'name_value'}, 'policy': {'name': 'name_value'}}], 'locality_lb_policy': 'locality_lb_policy_value', 'log_config': {'enable': True, 'optional_fields': ['optional_fields_value1', 'optional_fields_value2'], 'optional_mode': 'optional_mode_value', 'sample_rate': 0.1165}, 'max_stream_duration': {}, 'metadatas': {}, 'name': 'name_value', 'network': 'network_value', 'outlier_detection': {'base_ejection_time': {}, 'consecutive_errors': 1956, 'consecutive_gateway_failure': 2880, 'enforcing_consecutive_errors': 3006, 'enforcing_consecutive_gateway_failure': 3930, 'enforcing_success_rate': 2334, 'interval': {}, 'max_ejection_percent': 2118, 'success_rate_minimum_hosts': 2799, 'success_rate_request_volume': 2915, 'success_rate_stdev_factor': 2663}, 'port': 453, 'port_name': 'port_name_value', 'protocol': 'protocol_value', 'region': 'region_value', 'security_policy': 'security_policy_value', 'security_settings': {'client_tls_policy': 'client_tls_policy_value', 'subject_alt_names': ['subject_alt_names_value1', 'subject_alt_names_value2']}, 'self_link': 'self_link_value', 'service_bindings': ['service_bindings_value1', 'service_bindings_value2'], 'session_affinity': 'session_affinity_value', 'subsetting': {'policy': 'policy_value'}, 'timeout_sec': 1185} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.patch(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, extended_operation.ExtendedOperation) + assert response.client_operation_id == 'client_operation_id_value' + assert response.creation_timestamp == 'creation_timestamp_value' + assert response.description == 'description_value' + assert response.end_time == 'end_time_value' + assert response.http_error_message == 'http_error_message_value' + assert response.http_error_status_code == 2374 + assert response.id == 205 + assert response.insert_time == 'insert_time_value' + assert response.kind == 'kind_value' + assert response.name == 'name_value' + assert response.operation_group_id == 'operation_group_id_value' + assert response.operation_type == 'operation_type_value' + assert response.progress == 885 + assert response.region == 'region_value' + assert response.self_link == 'self_link_value' + assert response.start_time == 'start_time_value' + assert response.status == compute.Operation.Status.DONE + assert response.status_message == 'status_message_value' + assert response.target_id == 947 + assert response.target_link == 'target_link_value' + assert response.user == 'user_value' + assert response.zone == 'zone_value' + + +def test_patch_rest_required_fields(request_type=compute.PatchRegionBackendServiceRequest): + transport_class = transports.RegionBackendServicesRestTransport + + request_init = {} + request_init["backend_service"] = "" + request_init["project"] = "" + request_init["region"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).patch._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["backendService"] = 'backend_service_value' + jsonified_request["project"] = 'project_value' + jsonified_request["region"] = 'region_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).patch._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "backendService" in jsonified_request + assert jsonified_request["backendService"] == 'backend_service_value' + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "region" in jsonified_request + assert jsonified_request["region"] == 'region_value' + + client = RegionBackendServicesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "patch", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.patch(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_patch_rest_unset_required_fields(): + transport = transports.RegionBackendServicesRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.patch._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("backendService", "backendServiceResource", "project", "region", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_patch_rest_interceptors(null_interceptor): + transport = transports.RegionBackendServicesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.RegionBackendServicesRestInterceptor(), + ) + client = RegionBackendServicesClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.RegionBackendServicesRestInterceptor, "post_patch") as post, \ + mock.patch.object(transports.RegionBackendServicesRestInterceptor, "pre_patch") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.PatchRegionBackendServiceRequest.pb(compute.PatchRegionBackendServiceRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.PatchRegionBackendServiceRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.patch(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_patch_rest_bad_request(transport: str = 'rest', request_type=compute.PatchRegionBackendServiceRequest): + client = RegionBackendServicesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2', 'backend_service': 'sample3'} + request_init["backend_service_resource"] = {'affinity_cookie_ttl_sec': 2432, 'backends': [{'balancing_mode': 'balancing_mode_value', 'capacity_scaler': 0.1575, 'description': 'description_value', 'failover': True, 'group': 'group_value', 'max_connections': 1608, 'max_connections_per_endpoint': 2990, 'max_connections_per_instance': 2978, 'max_rate': 849, 'max_rate_per_endpoint': 0.22310000000000002, 'max_rate_per_instance': 0.22190000000000001, 'max_utilization': 0.1633}], 'cdn_policy': {'bypass_cache_on_request_headers': [{'header_name': 'header_name_value'}], 'cache_key_policy': {'include_host': True, 'include_http_headers': ['include_http_headers_value1', 'include_http_headers_value2'], 'include_named_cookies': ['include_named_cookies_value1', 'include_named_cookies_value2'], 'include_protocol': True, 'include_query_string': True, 'query_string_blacklist': ['query_string_blacklist_value1', 'query_string_blacklist_value2'], 'query_string_whitelist': ['query_string_whitelist_value1', 'query_string_whitelist_value2']}, 'cache_mode': 'cache_mode_value', 'client_ttl': 1074, 'default_ttl': 1176, 'max_ttl': 761, 'negative_caching': True, 'negative_caching_policy': [{'code': 411, 'ttl': 340}], 'request_coalescing': True, 'serve_while_stale': 1813, 'signed_url_cache_max_age_sec': 2890, 'signed_url_key_names': ['signed_url_key_names_value1', 'signed_url_key_names_value2']}, 'circuit_breakers': {'max_connections': 1608, 'max_pending_requests': 2149, 'max_requests': 1313, 'max_requests_per_connection': 2902, 'max_retries': 1187}, 'compression_mode': 'compression_mode_value', 'connection_draining': {'draining_timeout_sec': 2124}, 'connection_tracking_policy': {'connection_persistence_on_unhealthy_backends': 'connection_persistence_on_unhealthy_backends_value', 'enable_strong_affinity': True, 'idle_timeout_sec': 1694, 'tracking_mode': 'tracking_mode_value'}, 'consistent_hash': {'http_cookie': {'name': 'name_value', 'path': 'path_value', 'ttl': {'nanos': 543, 'seconds': 751}}, 'http_header_name': 'http_header_name_value', 'minimum_ring_size': 1829}, 'creation_timestamp': 'creation_timestamp_value', 'custom_request_headers': ['custom_request_headers_value1', 'custom_request_headers_value2'], 'custom_response_headers': ['custom_response_headers_value1', 'custom_response_headers_value2'], 'description': 'description_value', 'edge_security_policy': 'edge_security_policy_value', 'enable_c_d_n': True, 'failover_policy': {'disable_connection_drain_on_failover': True, 'drop_traffic_if_unhealthy': True, 'failover_ratio': 0.1494}, 'fingerprint': 'fingerprint_value', 'health_checks': ['health_checks_value1', 'health_checks_value2'], 'iap': {'enabled': True, 'oauth2_client_id': 'oauth2_client_id_value', 'oauth2_client_secret': 'oauth2_client_secret_value', 'oauth2_client_secret_sha256': 'oauth2_client_secret_sha256_value'}, 'id': 205, 'kind': 'kind_value', 'load_balancing_scheme': 'load_balancing_scheme_value', 'locality_lb_policies': [{'custom_policy': {'data': 'data_value', 'name': 'name_value'}, 'policy': {'name': 'name_value'}}], 'locality_lb_policy': 'locality_lb_policy_value', 'log_config': {'enable': True, 'optional_fields': ['optional_fields_value1', 'optional_fields_value2'], 'optional_mode': 'optional_mode_value', 'sample_rate': 0.1165}, 'max_stream_duration': {}, 'metadatas': {}, 'name': 'name_value', 'network': 'network_value', 'outlier_detection': {'base_ejection_time': {}, 'consecutive_errors': 1956, 'consecutive_gateway_failure': 2880, 'enforcing_consecutive_errors': 3006, 'enforcing_consecutive_gateway_failure': 3930, 'enforcing_success_rate': 2334, 'interval': {}, 'max_ejection_percent': 2118, 'success_rate_minimum_hosts': 2799, 'success_rate_request_volume': 2915, 'success_rate_stdev_factor': 2663}, 'port': 453, 'port_name': 'port_name_value', 'protocol': 'protocol_value', 'region': 'region_value', 'security_policy': 'security_policy_value', 'security_settings': {'client_tls_policy': 'client_tls_policy_value', 'subject_alt_names': ['subject_alt_names_value1', 'subject_alt_names_value2']}, 'self_link': 'self_link_value', 'service_bindings': ['service_bindings_value1', 'service_bindings_value2'], 'session_affinity': 'session_affinity_value', 'subsetting': {'policy': 'policy_value'}, 'timeout_sec': 1185} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.patch(request) + + +def test_patch_rest_flattened(): + client = RegionBackendServicesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'region': 'sample2', 'backend_service': 'sample3'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + region='region_value', + backend_service='backend_service_value', + backend_service_resource=compute.BackendService(affinity_cookie_ttl_sec=2432), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.patch(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/regions/{region}/backendServices/{backend_service}" % client.transport._host, args[1]) + + +def test_patch_rest_flattened_error(transport: str = 'rest'): + client = RegionBackendServicesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.patch( + compute.PatchRegionBackendServiceRequest(), + project='project_value', + region='region_value', + backend_service='backend_service_value', + backend_service_resource=compute.BackendService(affinity_cookie_ttl_sec=2432), + ) + + +def test_patch_rest_error(): + client = RegionBackendServicesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.PatchRegionBackendServiceRequest, + dict, +]) +def test_patch_unary_rest(request_type): + client = RegionBackendServicesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2', 'backend_service': 'sample3'} + request_init["backend_service_resource"] = {'affinity_cookie_ttl_sec': 2432, 'backends': [{'balancing_mode': 'balancing_mode_value', 'capacity_scaler': 0.1575, 'description': 'description_value', 'failover': True, 'group': 'group_value', 'max_connections': 1608, 'max_connections_per_endpoint': 2990, 'max_connections_per_instance': 2978, 'max_rate': 849, 'max_rate_per_endpoint': 0.22310000000000002, 'max_rate_per_instance': 0.22190000000000001, 'max_utilization': 0.1633}], 'cdn_policy': {'bypass_cache_on_request_headers': [{'header_name': 'header_name_value'}], 'cache_key_policy': {'include_host': True, 'include_http_headers': ['include_http_headers_value1', 'include_http_headers_value2'], 'include_named_cookies': ['include_named_cookies_value1', 'include_named_cookies_value2'], 'include_protocol': True, 'include_query_string': True, 'query_string_blacklist': ['query_string_blacklist_value1', 'query_string_blacklist_value2'], 'query_string_whitelist': ['query_string_whitelist_value1', 'query_string_whitelist_value2']}, 'cache_mode': 'cache_mode_value', 'client_ttl': 1074, 'default_ttl': 1176, 'max_ttl': 761, 'negative_caching': True, 'negative_caching_policy': [{'code': 411, 'ttl': 340}], 'request_coalescing': True, 'serve_while_stale': 1813, 'signed_url_cache_max_age_sec': 2890, 'signed_url_key_names': ['signed_url_key_names_value1', 'signed_url_key_names_value2']}, 'circuit_breakers': {'max_connections': 1608, 'max_pending_requests': 2149, 'max_requests': 1313, 'max_requests_per_connection': 2902, 'max_retries': 1187}, 'compression_mode': 'compression_mode_value', 'connection_draining': {'draining_timeout_sec': 2124}, 'connection_tracking_policy': {'connection_persistence_on_unhealthy_backends': 'connection_persistence_on_unhealthy_backends_value', 'enable_strong_affinity': True, 'idle_timeout_sec': 1694, 'tracking_mode': 'tracking_mode_value'}, 'consistent_hash': {'http_cookie': {'name': 'name_value', 'path': 'path_value', 'ttl': {'nanos': 543, 'seconds': 751}}, 'http_header_name': 'http_header_name_value', 'minimum_ring_size': 1829}, 'creation_timestamp': 'creation_timestamp_value', 'custom_request_headers': ['custom_request_headers_value1', 'custom_request_headers_value2'], 'custom_response_headers': ['custom_response_headers_value1', 'custom_response_headers_value2'], 'description': 'description_value', 'edge_security_policy': 'edge_security_policy_value', 'enable_c_d_n': True, 'failover_policy': {'disable_connection_drain_on_failover': True, 'drop_traffic_if_unhealthy': True, 'failover_ratio': 0.1494}, 'fingerprint': 'fingerprint_value', 'health_checks': ['health_checks_value1', 'health_checks_value2'], 'iap': {'enabled': True, 'oauth2_client_id': 'oauth2_client_id_value', 'oauth2_client_secret': 'oauth2_client_secret_value', 'oauth2_client_secret_sha256': 'oauth2_client_secret_sha256_value'}, 'id': 205, 'kind': 'kind_value', 'load_balancing_scheme': 'load_balancing_scheme_value', 'locality_lb_policies': [{'custom_policy': {'data': 'data_value', 'name': 'name_value'}, 'policy': {'name': 'name_value'}}], 'locality_lb_policy': 'locality_lb_policy_value', 'log_config': {'enable': True, 'optional_fields': ['optional_fields_value1', 'optional_fields_value2'], 'optional_mode': 'optional_mode_value', 'sample_rate': 0.1165}, 'max_stream_duration': {}, 'metadatas': {}, 'name': 'name_value', 'network': 'network_value', 'outlier_detection': {'base_ejection_time': {}, 'consecutive_errors': 1956, 'consecutive_gateway_failure': 2880, 'enforcing_consecutive_errors': 3006, 'enforcing_consecutive_gateway_failure': 3930, 'enforcing_success_rate': 2334, 'interval': {}, 'max_ejection_percent': 2118, 'success_rate_minimum_hosts': 2799, 'success_rate_request_volume': 2915, 'success_rate_stdev_factor': 2663}, 'port': 453, 'port_name': 'port_name_value', 'protocol': 'protocol_value', 'region': 'region_value', 'security_policy': 'security_policy_value', 'security_settings': {'client_tls_policy': 'client_tls_policy_value', 'subject_alt_names': ['subject_alt_names_value1', 'subject_alt_names_value2']}, 'self_link': 'self_link_value', 'service_bindings': ['service_bindings_value1', 'service_bindings_value2'], 'session_affinity': 'session_affinity_value', 'subsetting': {'policy': 'policy_value'}, 'timeout_sec': 1185} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.patch_unary(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.Operation) + + +def test_patch_unary_rest_required_fields(request_type=compute.PatchRegionBackendServiceRequest): + transport_class = transports.RegionBackendServicesRestTransport + + request_init = {} + request_init["backend_service"] = "" + request_init["project"] = "" + request_init["region"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).patch._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["backendService"] = 'backend_service_value' + jsonified_request["project"] = 'project_value' + jsonified_request["region"] = 'region_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).patch._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "backendService" in jsonified_request + assert jsonified_request["backendService"] == 'backend_service_value' + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "region" in jsonified_request + assert jsonified_request["region"] == 'region_value' + + client = RegionBackendServicesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "patch", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.patch_unary(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_patch_unary_rest_unset_required_fields(): + transport = transports.RegionBackendServicesRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.patch._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("backendService", "backendServiceResource", "project", "region", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_patch_unary_rest_interceptors(null_interceptor): + transport = transports.RegionBackendServicesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.RegionBackendServicesRestInterceptor(), + ) + client = RegionBackendServicesClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.RegionBackendServicesRestInterceptor, "post_patch") as post, \ + mock.patch.object(transports.RegionBackendServicesRestInterceptor, "pre_patch") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.PatchRegionBackendServiceRequest.pb(compute.PatchRegionBackendServiceRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.PatchRegionBackendServiceRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.patch_unary(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_patch_unary_rest_bad_request(transport: str = 'rest', request_type=compute.PatchRegionBackendServiceRequest): + client = RegionBackendServicesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2', 'backend_service': 'sample3'} + request_init["backend_service_resource"] = {'affinity_cookie_ttl_sec': 2432, 'backends': [{'balancing_mode': 'balancing_mode_value', 'capacity_scaler': 0.1575, 'description': 'description_value', 'failover': True, 'group': 'group_value', 'max_connections': 1608, 'max_connections_per_endpoint': 2990, 'max_connections_per_instance': 2978, 'max_rate': 849, 'max_rate_per_endpoint': 0.22310000000000002, 'max_rate_per_instance': 0.22190000000000001, 'max_utilization': 0.1633}], 'cdn_policy': {'bypass_cache_on_request_headers': [{'header_name': 'header_name_value'}], 'cache_key_policy': {'include_host': True, 'include_http_headers': ['include_http_headers_value1', 'include_http_headers_value2'], 'include_named_cookies': ['include_named_cookies_value1', 'include_named_cookies_value2'], 'include_protocol': True, 'include_query_string': True, 'query_string_blacklist': ['query_string_blacklist_value1', 'query_string_blacklist_value2'], 'query_string_whitelist': ['query_string_whitelist_value1', 'query_string_whitelist_value2']}, 'cache_mode': 'cache_mode_value', 'client_ttl': 1074, 'default_ttl': 1176, 'max_ttl': 761, 'negative_caching': True, 'negative_caching_policy': [{'code': 411, 'ttl': 340}], 'request_coalescing': True, 'serve_while_stale': 1813, 'signed_url_cache_max_age_sec': 2890, 'signed_url_key_names': ['signed_url_key_names_value1', 'signed_url_key_names_value2']}, 'circuit_breakers': {'max_connections': 1608, 'max_pending_requests': 2149, 'max_requests': 1313, 'max_requests_per_connection': 2902, 'max_retries': 1187}, 'compression_mode': 'compression_mode_value', 'connection_draining': {'draining_timeout_sec': 2124}, 'connection_tracking_policy': {'connection_persistence_on_unhealthy_backends': 'connection_persistence_on_unhealthy_backends_value', 'enable_strong_affinity': True, 'idle_timeout_sec': 1694, 'tracking_mode': 'tracking_mode_value'}, 'consistent_hash': {'http_cookie': {'name': 'name_value', 'path': 'path_value', 'ttl': {'nanos': 543, 'seconds': 751}}, 'http_header_name': 'http_header_name_value', 'minimum_ring_size': 1829}, 'creation_timestamp': 'creation_timestamp_value', 'custom_request_headers': ['custom_request_headers_value1', 'custom_request_headers_value2'], 'custom_response_headers': ['custom_response_headers_value1', 'custom_response_headers_value2'], 'description': 'description_value', 'edge_security_policy': 'edge_security_policy_value', 'enable_c_d_n': True, 'failover_policy': {'disable_connection_drain_on_failover': True, 'drop_traffic_if_unhealthy': True, 'failover_ratio': 0.1494}, 'fingerprint': 'fingerprint_value', 'health_checks': ['health_checks_value1', 'health_checks_value2'], 'iap': {'enabled': True, 'oauth2_client_id': 'oauth2_client_id_value', 'oauth2_client_secret': 'oauth2_client_secret_value', 'oauth2_client_secret_sha256': 'oauth2_client_secret_sha256_value'}, 'id': 205, 'kind': 'kind_value', 'load_balancing_scheme': 'load_balancing_scheme_value', 'locality_lb_policies': [{'custom_policy': {'data': 'data_value', 'name': 'name_value'}, 'policy': {'name': 'name_value'}}], 'locality_lb_policy': 'locality_lb_policy_value', 'log_config': {'enable': True, 'optional_fields': ['optional_fields_value1', 'optional_fields_value2'], 'optional_mode': 'optional_mode_value', 'sample_rate': 0.1165}, 'max_stream_duration': {}, 'metadatas': {}, 'name': 'name_value', 'network': 'network_value', 'outlier_detection': {'base_ejection_time': {}, 'consecutive_errors': 1956, 'consecutive_gateway_failure': 2880, 'enforcing_consecutive_errors': 3006, 'enforcing_consecutive_gateway_failure': 3930, 'enforcing_success_rate': 2334, 'interval': {}, 'max_ejection_percent': 2118, 'success_rate_minimum_hosts': 2799, 'success_rate_request_volume': 2915, 'success_rate_stdev_factor': 2663}, 'port': 453, 'port_name': 'port_name_value', 'protocol': 'protocol_value', 'region': 'region_value', 'security_policy': 'security_policy_value', 'security_settings': {'client_tls_policy': 'client_tls_policy_value', 'subject_alt_names': ['subject_alt_names_value1', 'subject_alt_names_value2']}, 'self_link': 'self_link_value', 'service_bindings': ['service_bindings_value1', 'service_bindings_value2'], 'session_affinity': 'session_affinity_value', 'subsetting': {'policy': 'policy_value'}, 'timeout_sec': 1185} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.patch_unary(request) + + +def test_patch_unary_rest_flattened(): + client = RegionBackendServicesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'region': 'sample2', 'backend_service': 'sample3'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + region='region_value', + backend_service='backend_service_value', + backend_service_resource=compute.BackendService(affinity_cookie_ttl_sec=2432), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.patch_unary(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/regions/{region}/backendServices/{backend_service}" % client.transport._host, args[1]) + + +def test_patch_unary_rest_flattened_error(transport: str = 'rest'): + client = RegionBackendServicesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.patch_unary( + compute.PatchRegionBackendServiceRequest(), + project='project_value', + region='region_value', + backend_service='backend_service_value', + backend_service_resource=compute.BackendService(affinity_cookie_ttl_sec=2432), + ) + + +def test_patch_unary_rest_error(): + client = RegionBackendServicesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.SetIamPolicyRegionBackendServiceRequest, + dict, +]) +def test_set_iam_policy_rest(request_type): + client = RegionBackendServicesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2', 'resource': 'sample3'} + request_init["region_set_policy_request_resource"] = {'bindings': [{'binding_id': 'binding_id_value', 'condition': {'description': 'description_value', 'expression': 'expression_value', 'location': 'location_value', 'title': 'title_value'}, 'members': ['members_value1', 'members_value2'], 'role': 'role_value'}], 'etag': 'etag_value', 'policy': {'audit_configs': [{'audit_log_configs': [{'exempted_members': ['exempted_members_value1', 'exempted_members_value2'], 'ignore_child_exemptions': True, 'log_type': 'log_type_value'}], 'exempted_members': ['exempted_members_value1', 'exempted_members_value2'], 'service': 'service_value'}], 'bindings': {}, 'etag': 'etag_value', 'iam_owned': True, 'rules': [{'action': 'action_value', 'conditions': [{'iam': 'iam_value', 'op': 'op_value', 'svc': 'svc_value', 'sys': 'sys_value', 'values': ['values_value1', 'values_value2']}], 'description': 'description_value', 'ins': ['ins_value1', 'ins_value2'], 'log_configs': [{'cloud_audit': {'authorization_logging_options': {'permission_type': 'permission_type_value'}, 'log_name': 'log_name_value'}, 'counter': {'custom_fields': [{'name': 'name_value', 'value': 'value_value'}], 'field': 'field_value', 'metric': 'metric_value'}, 'data_access': {'log_mode': 'log_mode_value'}}], 'not_ins': ['not_ins_value1', 'not_ins_value2'], 'permissions': ['permissions_value1', 'permissions_value2']}], 'version': 774}} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Policy( + etag='etag_value', + iam_owned=True, + version=774, + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Policy.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.set_iam_policy(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.Policy) + assert response.etag == 'etag_value' + assert response.iam_owned is True + assert response.version == 774 + + +def test_set_iam_policy_rest_required_fields(request_type=compute.SetIamPolicyRegionBackendServiceRequest): + transport_class = transports.RegionBackendServicesRestTransport + + request_init = {} + request_init["project"] = "" + request_init["region"] = "" + request_init["resource"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).set_iam_policy._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + jsonified_request["region"] = 'region_value' + jsonified_request["resource"] = 'resource_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).set_iam_policy._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "region" in jsonified_request + assert jsonified_request["region"] == 'region_value' + assert "resource" in jsonified_request + assert jsonified_request["resource"] == 'resource_value' + + client = RegionBackendServicesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Policy() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Policy.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.set_iam_policy(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_set_iam_policy_rest_unset_required_fields(): + transport = transports.RegionBackendServicesRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.set_iam_policy._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("project", "region", "regionSetPolicyRequestResource", "resource", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_set_iam_policy_rest_interceptors(null_interceptor): + transport = transports.RegionBackendServicesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.RegionBackendServicesRestInterceptor(), + ) + client = RegionBackendServicesClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.RegionBackendServicesRestInterceptor, "post_set_iam_policy") as post, \ + mock.patch.object(transports.RegionBackendServicesRestInterceptor, "pre_set_iam_policy") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.SetIamPolicyRegionBackendServiceRequest.pb(compute.SetIamPolicyRegionBackendServiceRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Policy.to_json(compute.Policy()) + + request = compute.SetIamPolicyRegionBackendServiceRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Policy() + + client.set_iam_policy(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_set_iam_policy_rest_bad_request(transport: str = 'rest', request_type=compute.SetIamPolicyRegionBackendServiceRequest): + client = RegionBackendServicesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2', 'resource': 'sample3'} + request_init["region_set_policy_request_resource"] = {'bindings': [{'binding_id': 'binding_id_value', 'condition': {'description': 'description_value', 'expression': 'expression_value', 'location': 'location_value', 'title': 'title_value'}, 'members': ['members_value1', 'members_value2'], 'role': 'role_value'}], 'etag': 'etag_value', 'policy': {'audit_configs': [{'audit_log_configs': [{'exempted_members': ['exempted_members_value1', 'exempted_members_value2'], 'ignore_child_exemptions': True, 'log_type': 'log_type_value'}], 'exempted_members': ['exempted_members_value1', 'exempted_members_value2'], 'service': 'service_value'}], 'bindings': {}, 'etag': 'etag_value', 'iam_owned': True, 'rules': [{'action': 'action_value', 'conditions': [{'iam': 'iam_value', 'op': 'op_value', 'svc': 'svc_value', 'sys': 'sys_value', 'values': ['values_value1', 'values_value2']}], 'description': 'description_value', 'ins': ['ins_value1', 'ins_value2'], 'log_configs': [{'cloud_audit': {'authorization_logging_options': {'permission_type': 'permission_type_value'}, 'log_name': 'log_name_value'}, 'counter': {'custom_fields': [{'name': 'name_value', 'value': 'value_value'}], 'field': 'field_value', 'metric': 'metric_value'}, 'data_access': {'log_mode': 'log_mode_value'}}], 'not_ins': ['not_ins_value1', 'not_ins_value2'], 'permissions': ['permissions_value1', 'permissions_value2']}], 'version': 774}} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.set_iam_policy(request) + + +def test_set_iam_policy_rest_flattened(): + client = RegionBackendServicesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Policy() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'region': 'sample2', 'resource': 'sample3'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + region='region_value', + resource='resource_value', + region_set_policy_request_resource=compute.RegionSetPolicyRequest(bindings=[compute.Binding(binding_id='binding_id_value')]), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Policy.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.set_iam_policy(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/regions/{region}/backendServices/{resource}/setIamPolicy" % client.transport._host, args[1]) + + +def test_set_iam_policy_rest_flattened_error(transport: str = 'rest'): + client = RegionBackendServicesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.set_iam_policy( + compute.SetIamPolicyRegionBackendServiceRequest(), + project='project_value', + region='region_value', + resource='resource_value', + region_set_policy_request_resource=compute.RegionSetPolicyRequest(bindings=[compute.Binding(binding_id='binding_id_value')]), + ) + + +def test_set_iam_policy_rest_error(): + client = RegionBackendServicesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.UpdateRegionBackendServiceRequest, + dict, +]) +def test_update_rest(request_type): + client = RegionBackendServicesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2', 'backend_service': 'sample3'} + request_init["backend_service_resource"] = {'affinity_cookie_ttl_sec': 2432, 'backends': [{'balancing_mode': 'balancing_mode_value', 'capacity_scaler': 0.1575, 'description': 'description_value', 'failover': True, 'group': 'group_value', 'max_connections': 1608, 'max_connections_per_endpoint': 2990, 'max_connections_per_instance': 2978, 'max_rate': 849, 'max_rate_per_endpoint': 0.22310000000000002, 'max_rate_per_instance': 0.22190000000000001, 'max_utilization': 0.1633}], 'cdn_policy': {'bypass_cache_on_request_headers': [{'header_name': 'header_name_value'}], 'cache_key_policy': {'include_host': True, 'include_http_headers': ['include_http_headers_value1', 'include_http_headers_value2'], 'include_named_cookies': ['include_named_cookies_value1', 'include_named_cookies_value2'], 'include_protocol': True, 'include_query_string': True, 'query_string_blacklist': ['query_string_blacklist_value1', 'query_string_blacklist_value2'], 'query_string_whitelist': ['query_string_whitelist_value1', 'query_string_whitelist_value2']}, 'cache_mode': 'cache_mode_value', 'client_ttl': 1074, 'default_ttl': 1176, 'max_ttl': 761, 'negative_caching': True, 'negative_caching_policy': [{'code': 411, 'ttl': 340}], 'request_coalescing': True, 'serve_while_stale': 1813, 'signed_url_cache_max_age_sec': 2890, 'signed_url_key_names': ['signed_url_key_names_value1', 'signed_url_key_names_value2']}, 'circuit_breakers': {'max_connections': 1608, 'max_pending_requests': 2149, 'max_requests': 1313, 'max_requests_per_connection': 2902, 'max_retries': 1187}, 'compression_mode': 'compression_mode_value', 'connection_draining': {'draining_timeout_sec': 2124}, 'connection_tracking_policy': {'connection_persistence_on_unhealthy_backends': 'connection_persistence_on_unhealthy_backends_value', 'enable_strong_affinity': True, 'idle_timeout_sec': 1694, 'tracking_mode': 'tracking_mode_value'}, 'consistent_hash': {'http_cookie': {'name': 'name_value', 'path': 'path_value', 'ttl': {'nanos': 543, 'seconds': 751}}, 'http_header_name': 'http_header_name_value', 'minimum_ring_size': 1829}, 'creation_timestamp': 'creation_timestamp_value', 'custom_request_headers': ['custom_request_headers_value1', 'custom_request_headers_value2'], 'custom_response_headers': ['custom_response_headers_value1', 'custom_response_headers_value2'], 'description': 'description_value', 'edge_security_policy': 'edge_security_policy_value', 'enable_c_d_n': True, 'failover_policy': {'disable_connection_drain_on_failover': True, 'drop_traffic_if_unhealthy': True, 'failover_ratio': 0.1494}, 'fingerprint': 'fingerprint_value', 'health_checks': ['health_checks_value1', 'health_checks_value2'], 'iap': {'enabled': True, 'oauth2_client_id': 'oauth2_client_id_value', 'oauth2_client_secret': 'oauth2_client_secret_value', 'oauth2_client_secret_sha256': 'oauth2_client_secret_sha256_value'}, 'id': 205, 'kind': 'kind_value', 'load_balancing_scheme': 'load_balancing_scheme_value', 'locality_lb_policies': [{'custom_policy': {'data': 'data_value', 'name': 'name_value'}, 'policy': {'name': 'name_value'}}], 'locality_lb_policy': 'locality_lb_policy_value', 'log_config': {'enable': True, 'optional_fields': ['optional_fields_value1', 'optional_fields_value2'], 'optional_mode': 'optional_mode_value', 'sample_rate': 0.1165}, 'max_stream_duration': {}, 'metadatas': {}, 'name': 'name_value', 'network': 'network_value', 'outlier_detection': {'base_ejection_time': {}, 'consecutive_errors': 1956, 'consecutive_gateway_failure': 2880, 'enforcing_consecutive_errors': 3006, 'enforcing_consecutive_gateway_failure': 3930, 'enforcing_success_rate': 2334, 'interval': {}, 'max_ejection_percent': 2118, 'success_rate_minimum_hosts': 2799, 'success_rate_request_volume': 2915, 'success_rate_stdev_factor': 2663}, 'port': 453, 'port_name': 'port_name_value', 'protocol': 'protocol_value', 'region': 'region_value', 'security_policy': 'security_policy_value', 'security_settings': {'client_tls_policy': 'client_tls_policy_value', 'subject_alt_names': ['subject_alt_names_value1', 'subject_alt_names_value2']}, 'self_link': 'self_link_value', 'service_bindings': ['service_bindings_value1', 'service_bindings_value2'], 'session_affinity': 'session_affinity_value', 'subsetting': {'policy': 'policy_value'}, 'timeout_sec': 1185} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.update(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, extended_operation.ExtendedOperation) + assert response.client_operation_id == 'client_operation_id_value' + assert response.creation_timestamp == 'creation_timestamp_value' + assert response.description == 'description_value' + assert response.end_time == 'end_time_value' + assert response.http_error_message == 'http_error_message_value' + assert response.http_error_status_code == 2374 + assert response.id == 205 + assert response.insert_time == 'insert_time_value' + assert response.kind == 'kind_value' + assert response.name == 'name_value' + assert response.operation_group_id == 'operation_group_id_value' + assert response.operation_type == 'operation_type_value' + assert response.progress == 885 + assert response.region == 'region_value' + assert response.self_link == 'self_link_value' + assert response.start_time == 'start_time_value' + assert response.status == compute.Operation.Status.DONE + assert response.status_message == 'status_message_value' + assert response.target_id == 947 + assert response.target_link == 'target_link_value' + assert response.user == 'user_value' + assert response.zone == 'zone_value' + + +def test_update_rest_required_fields(request_type=compute.UpdateRegionBackendServiceRequest): + transport_class = transports.RegionBackendServicesRestTransport + + request_init = {} + request_init["backend_service"] = "" + request_init["project"] = "" + request_init["region"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).update._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["backendService"] = 'backend_service_value' + jsonified_request["project"] = 'project_value' + jsonified_request["region"] = 'region_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).update._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "backendService" in jsonified_request + assert jsonified_request["backendService"] == 'backend_service_value' + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "region" in jsonified_request + assert jsonified_request["region"] == 'region_value' + + client = RegionBackendServicesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "put", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.update(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_update_rest_unset_required_fields(): + transport = transports.RegionBackendServicesRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.update._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("backendService", "backendServiceResource", "project", "region", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_update_rest_interceptors(null_interceptor): + transport = transports.RegionBackendServicesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.RegionBackendServicesRestInterceptor(), + ) + client = RegionBackendServicesClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.RegionBackendServicesRestInterceptor, "post_update") as post, \ + mock.patch.object(transports.RegionBackendServicesRestInterceptor, "pre_update") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.UpdateRegionBackendServiceRequest.pb(compute.UpdateRegionBackendServiceRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.UpdateRegionBackendServiceRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.update(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_update_rest_bad_request(transport: str = 'rest', request_type=compute.UpdateRegionBackendServiceRequest): + client = RegionBackendServicesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2', 'backend_service': 'sample3'} + request_init["backend_service_resource"] = {'affinity_cookie_ttl_sec': 2432, 'backends': [{'balancing_mode': 'balancing_mode_value', 'capacity_scaler': 0.1575, 'description': 'description_value', 'failover': True, 'group': 'group_value', 'max_connections': 1608, 'max_connections_per_endpoint': 2990, 'max_connections_per_instance': 2978, 'max_rate': 849, 'max_rate_per_endpoint': 0.22310000000000002, 'max_rate_per_instance': 0.22190000000000001, 'max_utilization': 0.1633}], 'cdn_policy': {'bypass_cache_on_request_headers': [{'header_name': 'header_name_value'}], 'cache_key_policy': {'include_host': True, 'include_http_headers': ['include_http_headers_value1', 'include_http_headers_value2'], 'include_named_cookies': ['include_named_cookies_value1', 'include_named_cookies_value2'], 'include_protocol': True, 'include_query_string': True, 'query_string_blacklist': ['query_string_blacklist_value1', 'query_string_blacklist_value2'], 'query_string_whitelist': ['query_string_whitelist_value1', 'query_string_whitelist_value2']}, 'cache_mode': 'cache_mode_value', 'client_ttl': 1074, 'default_ttl': 1176, 'max_ttl': 761, 'negative_caching': True, 'negative_caching_policy': [{'code': 411, 'ttl': 340}], 'request_coalescing': True, 'serve_while_stale': 1813, 'signed_url_cache_max_age_sec': 2890, 'signed_url_key_names': ['signed_url_key_names_value1', 'signed_url_key_names_value2']}, 'circuit_breakers': {'max_connections': 1608, 'max_pending_requests': 2149, 'max_requests': 1313, 'max_requests_per_connection': 2902, 'max_retries': 1187}, 'compression_mode': 'compression_mode_value', 'connection_draining': {'draining_timeout_sec': 2124}, 'connection_tracking_policy': {'connection_persistence_on_unhealthy_backends': 'connection_persistence_on_unhealthy_backends_value', 'enable_strong_affinity': True, 'idle_timeout_sec': 1694, 'tracking_mode': 'tracking_mode_value'}, 'consistent_hash': {'http_cookie': {'name': 'name_value', 'path': 'path_value', 'ttl': {'nanos': 543, 'seconds': 751}}, 'http_header_name': 'http_header_name_value', 'minimum_ring_size': 1829}, 'creation_timestamp': 'creation_timestamp_value', 'custom_request_headers': ['custom_request_headers_value1', 'custom_request_headers_value2'], 'custom_response_headers': ['custom_response_headers_value1', 'custom_response_headers_value2'], 'description': 'description_value', 'edge_security_policy': 'edge_security_policy_value', 'enable_c_d_n': True, 'failover_policy': {'disable_connection_drain_on_failover': True, 'drop_traffic_if_unhealthy': True, 'failover_ratio': 0.1494}, 'fingerprint': 'fingerprint_value', 'health_checks': ['health_checks_value1', 'health_checks_value2'], 'iap': {'enabled': True, 'oauth2_client_id': 'oauth2_client_id_value', 'oauth2_client_secret': 'oauth2_client_secret_value', 'oauth2_client_secret_sha256': 'oauth2_client_secret_sha256_value'}, 'id': 205, 'kind': 'kind_value', 'load_balancing_scheme': 'load_balancing_scheme_value', 'locality_lb_policies': [{'custom_policy': {'data': 'data_value', 'name': 'name_value'}, 'policy': {'name': 'name_value'}}], 'locality_lb_policy': 'locality_lb_policy_value', 'log_config': {'enable': True, 'optional_fields': ['optional_fields_value1', 'optional_fields_value2'], 'optional_mode': 'optional_mode_value', 'sample_rate': 0.1165}, 'max_stream_duration': {}, 'metadatas': {}, 'name': 'name_value', 'network': 'network_value', 'outlier_detection': {'base_ejection_time': {}, 'consecutive_errors': 1956, 'consecutive_gateway_failure': 2880, 'enforcing_consecutive_errors': 3006, 'enforcing_consecutive_gateway_failure': 3930, 'enforcing_success_rate': 2334, 'interval': {}, 'max_ejection_percent': 2118, 'success_rate_minimum_hosts': 2799, 'success_rate_request_volume': 2915, 'success_rate_stdev_factor': 2663}, 'port': 453, 'port_name': 'port_name_value', 'protocol': 'protocol_value', 'region': 'region_value', 'security_policy': 'security_policy_value', 'security_settings': {'client_tls_policy': 'client_tls_policy_value', 'subject_alt_names': ['subject_alt_names_value1', 'subject_alt_names_value2']}, 'self_link': 'self_link_value', 'service_bindings': ['service_bindings_value1', 'service_bindings_value2'], 'session_affinity': 'session_affinity_value', 'subsetting': {'policy': 'policy_value'}, 'timeout_sec': 1185} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.update(request) + + +def test_update_rest_flattened(): + client = RegionBackendServicesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'region': 'sample2', 'backend_service': 'sample3'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + region='region_value', + backend_service='backend_service_value', + backend_service_resource=compute.BackendService(affinity_cookie_ttl_sec=2432), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.update(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/regions/{region}/backendServices/{backend_service}" % client.transport._host, args[1]) + + +def test_update_rest_flattened_error(transport: str = 'rest'): + client = RegionBackendServicesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update( + compute.UpdateRegionBackendServiceRequest(), + project='project_value', + region='region_value', + backend_service='backend_service_value', + backend_service_resource=compute.BackendService(affinity_cookie_ttl_sec=2432), + ) + + +def test_update_rest_error(): + client = RegionBackendServicesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.UpdateRegionBackendServiceRequest, + dict, +]) +def test_update_unary_rest(request_type): + client = RegionBackendServicesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2', 'backend_service': 'sample3'} + request_init["backend_service_resource"] = {'affinity_cookie_ttl_sec': 2432, 'backends': [{'balancing_mode': 'balancing_mode_value', 'capacity_scaler': 0.1575, 'description': 'description_value', 'failover': True, 'group': 'group_value', 'max_connections': 1608, 'max_connections_per_endpoint': 2990, 'max_connections_per_instance': 2978, 'max_rate': 849, 'max_rate_per_endpoint': 0.22310000000000002, 'max_rate_per_instance': 0.22190000000000001, 'max_utilization': 0.1633}], 'cdn_policy': {'bypass_cache_on_request_headers': [{'header_name': 'header_name_value'}], 'cache_key_policy': {'include_host': True, 'include_http_headers': ['include_http_headers_value1', 'include_http_headers_value2'], 'include_named_cookies': ['include_named_cookies_value1', 'include_named_cookies_value2'], 'include_protocol': True, 'include_query_string': True, 'query_string_blacklist': ['query_string_blacklist_value1', 'query_string_blacklist_value2'], 'query_string_whitelist': ['query_string_whitelist_value1', 'query_string_whitelist_value2']}, 'cache_mode': 'cache_mode_value', 'client_ttl': 1074, 'default_ttl': 1176, 'max_ttl': 761, 'negative_caching': True, 'negative_caching_policy': [{'code': 411, 'ttl': 340}], 'request_coalescing': True, 'serve_while_stale': 1813, 'signed_url_cache_max_age_sec': 2890, 'signed_url_key_names': ['signed_url_key_names_value1', 'signed_url_key_names_value2']}, 'circuit_breakers': {'max_connections': 1608, 'max_pending_requests': 2149, 'max_requests': 1313, 'max_requests_per_connection': 2902, 'max_retries': 1187}, 'compression_mode': 'compression_mode_value', 'connection_draining': {'draining_timeout_sec': 2124}, 'connection_tracking_policy': {'connection_persistence_on_unhealthy_backends': 'connection_persistence_on_unhealthy_backends_value', 'enable_strong_affinity': True, 'idle_timeout_sec': 1694, 'tracking_mode': 'tracking_mode_value'}, 'consistent_hash': {'http_cookie': {'name': 'name_value', 'path': 'path_value', 'ttl': {'nanos': 543, 'seconds': 751}}, 'http_header_name': 'http_header_name_value', 'minimum_ring_size': 1829}, 'creation_timestamp': 'creation_timestamp_value', 'custom_request_headers': ['custom_request_headers_value1', 'custom_request_headers_value2'], 'custom_response_headers': ['custom_response_headers_value1', 'custom_response_headers_value2'], 'description': 'description_value', 'edge_security_policy': 'edge_security_policy_value', 'enable_c_d_n': True, 'failover_policy': {'disable_connection_drain_on_failover': True, 'drop_traffic_if_unhealthy': True, 'failover_ratio': 0.1494}, 'fingerprint': 'fingerprint_value', 'health_checks': ['health_checks_value1', 'health_checks_value2'], 'iap': {'enabled': True, 'oauth2_client_id': 'oauth2_client_id_value', 'oauth2_client_secret': 'oauth2_client_secret_value', 'oauth2_client_secret_sha256': 'oauth2_client_secret_sha256_value'}, 'id': 205, 'kind': 'kind_value', 'load_balancing_scheme': 'load_balancing_scheme_value', 'locality_lb_policies': [{'custom_policy': {'data': 'data_value', 'name': 'name_value'}, 'policy': {'name': 'name_value'}}], 'locality_lb_policy': 'locality_lb_policy_value', 'log_config': {'enable': True, 'optional_fields': ['optional_fields_value1', 'optional_fields_value2'], 'optional_mode': 'optional_mode_value', 'sample_rate': 0.1165}, 'max_stream_duration': {}, 'metadatas': {}, 'name': 'name_value', 'network': 'network_value', 'outlier_detection': {'base_ejection_time': {}, 'consecutive_errors': 1956, 'consecutive_gateway_failure': 2880, 'enforcing_consecutive_errors': 3006, 'enforcing_consecutive_gateway_failure': 3930, 'enforcing_success_rate': 2334, 'interval': {}, 'max_ejection_percent': 2118, 'success_rate_minimum_hosts': 2799, 'success_rate_request_volume': 2915, 'success_rate_stdev_factor': 2663}, 'port': 453, 'port_name': 'port_name_value', 'protocol': 'protocol_value', 'region': 'region_value', 'security_policy': 'security_policy_value', 'security_settings': {'client_tls_policy': 'client_tls_policy_value', 'subject_alt_names': ['subject_alt_names_value1', 'subject_alt_names_value2']}, 'self_link': 'self_link_value', 'service_bindings': ['service_bindings_value1', 'service_bindings_value2'], 'session_affinity': 'session_affinity_value', 'subsetting': {'policy': 'policy_value'}, 'timeout_sec': 1185} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.update_unary(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.Operation) + + +def test_update_unary_rest_required_fields(request_type=compute.UpdateRegionBackendServiceRequest): + transport_class = transports.RegionBackendServicesRestTransport + + request_init = {} + request_init["backend_service"] = "" + request_init["project"] = "" + request_init["region"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).update._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["backendService"] = 'backend_service_value' + jsonified_request["project"] = 'project_value' + jsonified_request["region"] = 'region_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).update._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "backendService" in jsonified_request + assert jsonified_request["backendService"] == 'backend_service_value' + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "region" in jsonified_request + assert jsonified_request["region"] == 'region_value' + + client = RegionBackendServicesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "put", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.update_unary(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_update_unary_rest_unset_required_fields(): + transport = transports.RegionBackendServicesRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.update._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("backendService", "backendServiceResource", "project", "region", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_update_unary_rest_interceptors(null_interceptor): + transport = transports.RegionBackendServicesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.RegionBackendServicesRestInterceptor(), + ) + client = RegionBackendServicesClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.RegionBackendServicesRestInterceptor, "post_update") as post, \ + mock.patch.object(transports.RegionBackendServicesRestInterceptor, "pre_update") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.UpdateRegionBackendServiceRequest.pb(compute.UpdateRegionBackendServiceRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.UpdateRegionBackendServiceRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.update_unary(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_update_unary_rest_bad_request(transport: str = 'rest', request_type=compute.UpdateRegionBackendServiceRequest): + client = RegionBackendServicesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2', 'backend_service': 'sample3'} + request_init["backend_service_resource"] = {'affinity_cookie_ttl_sec': 2432, 'backends': [{'balancing_mode': 'balancing_mode_value', 'capacity_scaler': 0.1575, 'description': 'description_value', 'failover': True, 'group': 'group_value', 'max_connections': 1608, 'max_connections_per_endpoint': 2990, 'max_connections_per_instance': 2978, 'max_rate': 849, 'max_rate_per_endpoint': 0.22310000000000002, 'max_rate_per_instance': 0.22190000000000001, 'max_utilization': 0.1633}], 'cdn_policy': {'bypass_cache_on_request_headers': [{'header_name': 'header_name_value'}], 'cache_key_policy': {'include_host': True, 'include_http_headers': ['include_http_headers_value1', 'include_http_headers_value2'], 'include_named_cookies': ['include_named_cookies_value1', 'include_named_cookies_value2'], 'include_protocol': True, 'include_query_string': True, 'query_string_blacklist': ['query_string_blacklist_value1', 'query_string_blacklist_value2'], 'query_string_whitelist': ['query_string_whitelist_value1', 'query_string_whitelist_value2']}, 'cache_mode': 'cache_mode_value', 'client_ttl': 1074, 'default_ttl': 1176, 'max_ttl': 761, 'negative_caching': True, 'negative_caching_policy': [{'code': 411, 'ttl': 340}], 'request_coalescing': True, 'serve_while_stale': 1813, 'signed_url_cache_max_age_sec': 2890, 'signed_url_key_names': ['signed_url_key_names_value1', 'signed_url_key_names_value2']}, 'circuit_breakers': {'max_connections': 1608, 'max_pending_requests': 2149, 'max_requests': 1313, 'max_requests_per_connection': 2902, 'max_retries': 1187}, 'compression_mode': 'compression_mode_value', 'connection_draining': {'draining_timeout_sec': 2124}, 'connection_tracking_policy': {'connection_persistence_on_unhealthy_backends': 'connection_persistence_on_unhealthy_backends_value', 'enable_strong_affinity': True, 'idle_timeout_sec': 1694, 'tracking_mode': 'tracking_mode_value'}, 'consistent_hash': {'http_cookie': {'name': 'name_value', 'path': 'path_value', 'ttl': {'nanos': 543, 'seconds': 751}}, 'http_header_name': 'http_header_name_value', 'minimum_ring_size': 1829}, 'creation_timestamp': 'creation_timestamp_value', 'custom_request_headers': ['custom_request_headers_value1', 'custom_request_headers_value2'], 'custom_response_headers': ['custom_response_headers_value1', 'custom_response_headers_value2'], 'description': 'description_value', 'edge_security_policy': 'edge_security_policy_value', 'enable_c_d_n': True, 'failover_policy': {'disable_connection_drain_on_failover': True, 'drop_traffic_if_unhealthy': True, 'failover_ratio': 0.1494}, 'fingerprint': 'fingerprint_value', 'health_checks': ['health_checks_value1', 'health_checks_value2'], 'iap': {'enabled': True, 'oauth2_client_id': 'oauth2_client_id_value', 'oauth2_client_secret': 'oauth2_client_secret_value', 'oauth2_client_secret_sha256': 'oauth2_client_secret_sha256_value'}, 'id': 205, 'kind': 'kind_value', 'load_balancing_scheme': 'load_balancing_scheme_value', 'locality_lb_policies': [{'custom_policy': {'data': 'data_value', 'name': 'name_value'}, 'policy': {'name': 'name_value'}}], 'locality_lb_policy': 'locality_lb_policy_value', 'log_config': {'enable': True, 'optional_fields': ['optional_fields_value1', 'optional_fields_value2'], 'optional_mode': 'optional_mode_value', 'sample_rate': 0.1165}, 'max_stream_duration': {}, 'metadatas': {}, 'name': 'name_value', 'network': 'network_value', 'outlier_detection': {'base_ejection_time': {}, 'consecutive_errors': 1956, 'consecutive_gateway_failure': 2880, 'enforcing_consecutive_errors': 3006, 'enforcing_consecutive_gateway_failure': 3930, 'enforcing_success_rate': 2334, 'interval': {}, 'max_ejection_percent': 2118, 'success_rate_minimum_hosts': 2799, 'success_rate_request_volume': 2915, 'success_rate_stdev_factor': 2663}, 'port': 453, 'port_name': 'port_name_value', 'protocol': 'protocol_value', 'region': 'region_value', 'security_policy': 'security_policy_value', 'security_settings': {'client_tls_policy': 'client_tls_policy_value', 'subject_alt_names': ['subject_alt_names_value1', 'subject_alt_names_value2']}, 'self_link': 'self_link_value', 'service_bindings': ['service_bindings_value1', 'service_bindings_value2'], 'session_affinity': 'session_affinity_value', 'subsetting': {'policy': 'policy_value'}, 'timeout_sec': 1185} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.update_unary(request) + + +def test_update_unary_rest_flattened(): + client = RegionBackendServicesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'region': 'sample2', 'backend_service': 'sample3'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + region='region_value', + backend_service='backend_service_value', + backend_service_resource=compute.BackendService(affinity_cookie_ttl_sec=2432), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.update_unary(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/regions/{region}/backendServices/{backend_service}" % client.transport._host, args[1]) + + +def test_update_unary_rest_flattened_error(transport: str = 'rest'): + client = RegionBackendServicesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_unary( + compute.UpdateRegionBackendServiceRequest(), + project='project_value', + region='region_value', + backend_service='backend_service_value', + backend_service_resource=compute.BackendService(affinity_cookie_ttl_sec=2432), + ) + + +def test_update_unary_rest_error(): + client = RegionBackendServicesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +def test_credentials_transport_error(): + # It is an error to provide credentials and a transport instance. + transport = transports.RegionBackendServicesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = RegionBackendServicesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # It is an error to provide a credentials file and a transport instance. + transport = transports.RegionBackendServicesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = RegionBackendServicesClient( + client_options={"credentials_file": "credentials.json"}, + transport=transport, + ) + + # It is an error to provide an api_key and a transport instance. + transport = transports.RegionBackendServicesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = RegionBackendServicesClient( + client_options=options, + transport=transport, + ) + + # It is an error to provide an api_key and a credential. + options = mock.Mock() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = RegionBackendServicesClient( + client_options=options, + credentials=ga_credentials.AnonymousCredentials() + ) + + # It is an error to provide scopes and a transport instance. + transport = transports.RegionBackendServicesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = RegionBackendServicesClient( + client_options={"scopes": ["1", "2"]}, + transport=transport, + ) + + +def test_transport_instance(): + # A client may be instantiated with a custom transport instance. + transport = transports.RegionBackendServicesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + client = RegionBackendServicesClient(transport=transport) + assert client.transport is transport + + +@pytest.mark.parametrize("transport_class", [ + transports.RegionBackendServicesRestTransport, +]) +def test_transport_adc(transport_class): + # Test default credentials are used if not provided. + with mock.patch.object(google.auth, 'default') as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class() + adc.assert_called_once() + +@pytest.mark.parametrize("transport_name", [ + "rest", +]) +def test_transport_kind(transport_name): + transport = RegionBackendServicesClient.get_transport_class(transport_name)( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert transport.kind == transport_name + + +def test_region_backend_services_base_transport_error(): + # Passing both a credentials object and credentials_file should raise an error + with pytest.raises(core_exceptions.DuplicateCredentialArgs): + transport = transports.RegionBackendServicesTransport( + credentials=ga_credentials.AnonymousCredentials(), + credentials_file="credentials.json" + ) + + +def test_region_backend_services_base_transport(): + # Instantiate the base transport. + with mock.patch('google.cloud.compute_v1.services.region_backend_services.transports.RegionBackendServicesTransport.__init__') as Transport: + Transport.return_value = None + transport = transports.RegionBackendServicesTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Every method on the transport should just blindly + # raise NotImplementedError. + methods = ( + 'delete', + 'get', + 'get_health', + 'get_iam_policy', + 'insert', + 'list', + 'patch', + 'set_iam_policy', + 'update', + ) + for method in methods: + with pytest.raises(NotImplementedError): + getattr(transport, method)(request=object()) + + with pytest.raises(NotImplementedError): + transport.close() + + # Catch all for all remaining methods and properties + remainder = [ + 'kind', + ] + for r in remainder: + with pytest.raises(NotImplementedError): + getattr(transport, r)() + + +def test_region_backend_services_base_transport_with_credentials_file(): + # Instantiate the base transport with a credentials file + with mock.patch.object(google.auth, 'load_credentials_from_file', autospec=True) as load_creds, mock.patch('google.cloud.compute_v1.services.region_backend_services.transports.RegionBackendServicesTransport._prep_wrapped_messages') as Transport: + Transport.return_value = None + load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.RegionBackendServicesTransport( + credentials_file="credentials.json", + quota_project_id="octopus", + ) + load_creds.assert_called_once_with("credentials.json", + scopes=None, + default_scopes=( + 'https://www.googleapis.com/auth/compute', + 'https://www.googleapis.com/auth/cloud-platform', +), + quota_project_id="octopus", + ) + + +def test_region_backend_services_base_transport_with_adc(): + # Test the default credentials are used if credentials and credentials_file are None. + with mock.patch.object(google.auth, 'default', autospec=True) as adc, mock.patch('google.cloud.compute_v1.services.region_backend_services.transports.RegionBackendServicesTransport._prep_wrapped_messages') as Transport: + Transport.return_value = None + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.RegionBackendServicesTransport() + adc.assert_called_once() + + +def test_region_backend_services_auth_adc(): + # If no credentials are provided, we should use ADC credentials. + with mock.patch.object(google.auth, 'default', autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + RegionBackendServicesClient() + adc.assert_called_once_with( + scopes=None, + default_scopes=( + 'https://www.googleapis.com/auth/compute', + 'https://www.googleapis.com/auth/cloud-platform', +), + quota_project_id=None, + ) + + +def test_region_backend_services_http_transport_client_cert_source_for_mtls(): + cred = ga_credentials.AnonymousCredentials() + with mock.patch("google.auth.transport.requests.AuthorizedSession.configure_mtls_channel") as mock_configure_mtls_channel: + transports.RegionBackendServicesRestTransport ( + credentials=cred, + client_cert_source_for_mtls=client_cert_source_callback + ) + mock_configure_mtls_channel.assert_called_once_with(client_cert_source_callback) + + +@pytest.mark.parametrize("transport_name", [ + "rest", +]) +def test_region_backend_services_host_no_port(transport_name): + client = RegionBackendServicesClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions(api_endpoint='compute.googleapis.com'), + transport=transport_name, + ) + assert client.transport._host == ( + 'compute.googleapis.com:443' + if transport_name in ['grpc', 'grpc_asyncio'] + else 'https://compute.googleapis.com' + ) + +@pytest.mark.parametrize("transport_name", [ + "rest", +]) +def test_region_backend_services_host_with_port(transport_name): + client = RegionBackendServicesClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions(api_endpoint='compute.googleapis.com:8000'), + transport=transport_name, + ) + assert client.transport._host == ( + 'compute.googleapis.com:8000' + if transport_name in ['grpc', 'grpc_asyncio'] + else 'https://compute.googleapis.com:8000' + ) + +@pytest.mark.parametrize("transport_name", [ + "rest", +]) +def test_region_backend_services_client_transport_session_collision(transport_name): + creds1 = ga_credentials.AnonymousCredentials() + creds2 = ga_credentials.AnonymousCredentials() + client1 = RegionBackendServicesClient( + credentials=creds1, + transport=transport_name, + ) + client2 = RegionBackendServicesClient( + credentials=creds2, + transport=transport_name, + ) + session1 = client1.transport.delete._session + session2 = client2.transport.delete._session + assert session1 != session2 + session1 = client1.transport.get._session + session2 = client2.transport.get._session + assert session1 != session2 + session1 = client1.transport.get_health._session + session2 = client2.transport.get_health._session + assert session1 != session2 + session1 = client1.transport.get_iam_policy._session + session2 = client2.transport.get_iam_policy._session + assert session1 != session2 + session1 = client1.transport.insert._session + session2 = client2.transport.insert._session + assert session1 != session2 + session1 = client1.transport.list._session + session2 = client2.transport.list._session + assert session1 != session2 + session1 = client1.transport.patch._session + session2 = client2.transport.patch._session + assert session1 != session2 + session1 = client1.transport.set_iam_policy._session + session2 = client2.transport.set_iam_policy._session + assert session1 != session2 + session1 = client1.transport.update._session + session2 = client2.transport.update._session + assert session1 != session2 + +def test_common_billing_account_path(): + billing_account = "squid" + expected = "billingAccounts/{billing_account}".format(billing_account=billing_account, ) + actual = RegionBackendServicesClient.common_billing_account_path(billing_account) + assert expected == actual + + +def test_parse_common_billing_account_path(): + expected = { + "billing_account": "clam", + } + path = RegionBackendServicesClient.common_billing_account_path(**expected) + + # Check that the path construction is reversible. + actual = RegionBackendServicesClient.parse_common_billing_account_path(path) + assert expected == actual + +def test_common_folder_path(): + folder = "whelk" + expected = "folders/{folder}".format(folder=folder, ) + actual = RegionBackendServicesClient.common_folder_path(folder) + assert expected == actual + + +def test_parse_common_folder_path(): + expected = { + "folder": "octopus", + } + path = RegionBackendServicesClient.common_folder_path(**expected) + + # Check that the path construction is reversible. + actual = RegionBackendServicesClient.parse_common_folder_path(path) + assert expected == actual + +def test_common_organization_path(): + organization = "oyster" + expected = "organizations/{organization}".format(organization=organization, ) + actual = RegionBackendServicesClient.common_organization_path(organization) + assert expected == actual + + +def test_parse_common_organization_path(): + expected = { + "organization": "nudibranch", + } + path = RegionBackendServicesClient.common_organization_path(**expected) + + # Check that the path construction is reversible. + actual = RegionBackendServicesClient.parse_common_organization_path(path) + assert expected == actual + +def test_common_project_path(): + project = "cuttlefish" + expected = "projects/{project}".format(project=project, ) + actual = RegionBackendServicesClient.common_project_path(project) + assert expected == actual + + +def test_parse_common_project_path(): + expected = { + "project": "mussel", + } + path = RegionBackendServicesClient.common_project_path(**expected) + + # Check that the path construction is reversible. + actual = RegionBackendServicesClient.parse_common_project_path(path) + assert expected == actual + +def test_common_location_path(): + project = "winkle" + location = "nautilus" + expected = "projects/{project}/locations/{location}".format(project=project, location=location, ) + actual = RegionBackendServicesClient.common_location_path(project, location) + assert expected == actual + + +def test_parse_common_location_path(): + expected = { + "project": "scallop", + "location": "abalone", + } + path = RegionBackendServicesClient.common_location_path(**expected) + + # Check that the path construction is reversible. + actual = RegionBackendServicesClient.parse_common_location_path(path) + assert expected == actual + + +def test_client_with_default_client_info(): + client_info = gapic_v1.client_info.ClientInfo() + + with mock.patch.object(transports.RegionBackendServicesTransport, '_prep_wrapped_messages') as prep: + client = RegionBackendServicesClient( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + with mock.patch.object(transports.RegionBackendServicesTransport, '_prep_wrapped_messages') as prep: + transport_class = RegionBackendServicesClient.get_transport_class() + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + +def test_transport_close(): + transports = { + "rest": "_session", + } + + for transport, close_name in transports.items(): + client = RegionBackendServicesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport + ) + with mock.patch.object(type(getattr(client.transport, close_name)), "close") as close: + with client: + close.assert_not_called() + close.assert_called_once() + +def test_client_ctx(): + transports = [ + 'rest', + ] + for transport in transports: + client = RegionBackendServicesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport + ) + # Test client calls underlying transport. + with mock.patch.object(type(client.transport), "close") as close: + close.assert_not_called() + with client: + pass + close.assert_called() + +@pytest.mark.parametrize("client_class,transport_class", [ + (RegionBackendServicesClient, transports.RegionBackendServicesRestTransport), +]) +def test_api_key_credentials(client_class, transport_class): + with mock.patch.object( + google.auth._default, "get_api_key_credentials", create=True + ) as get_api_key_credentials: + mock_cred = mock.Mock() + get_api_key_credentials.return_value = mock_cred + options = client_options.ClientOptions() + options.api_key = "api_key" + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=mock_cred, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) diff --git a/owl-bot-staging/v1/tests/unit/gapic/compute_v1/test_region_commitments.py b/owl-bot-staging/v1/tests/unit/gapic/compute_v1/test_region_commitments.py new file mode 100644 index 000000000..f21b5d655 --- /dev/null +++ b/owl-bot-staging/v1/tests/unit/gapic/compute_v1/test_region_commitments.py @@ -0,0 +1,2848 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import os +# try/except added for compatibility with python < 3.8 +try: + from unittest import mock + from unittest.mock import AsyncMock # pragma: NO COVER +except ImportError: # pragma: NO COVER + import mock + +import grpc +from grpc.experimental import aio +from collections.abc import Iterable +from google.protobuf import json_format +import json +import math +import pytest +from proto.marshal.rules.dates import DurationRule, TimestampRule +from proto.marshal.rules import wrappers +from requests import Response +from requests import Request, PreparedRequest +from requests.sessions import Session +from google.protobuf import json_format + +from google.api_core import client_options +from google.api_core import exceptions as core_exceptions +from google.api_core import extended_operation # type: ignore +from google.api_core import future +from google.api_core import gapic_v1 +from google.api_core import grpc_helpers +from google.api_core import grpc_helpers_async +from google.api_core import path_template +from google.auth import credentials as ga_credentials +from google.auth.exceptions import MutualTLSChannelError +from google.cloud.compute_v1.services.region_commitments import RegionCommitmentsClient +from google.cloud.compute_v1.services.region_commitments import pagers +from google.cloud.compute_v1.services.region_commitments import transports +from google.cloud.compute_v1.types import compute +from google.oauth2 import service_account +import google.auth + + +def client_cert_source_callback(): + return b"cert bytes", b"key bytes" + + +# If default endpoint is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint(client): + return "foo.googleapis.com" if ("localhost" in client.DEFAULT_ENDPOINT) else client.DEFAULT_ENDPOINT + + +def test__get_default_mtls_endpoint(): + api_endpoint = "example.googleapis.com" + api_mtls_endpoint = "example.mtls.googleapis.com" + sandbox_endpoint = "example.sandbox.googleapis.com" + sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com" + non_googleapi = "api.example.com" + + assert RegionCommitmentsClient._get_default_mtls_endpoint(None) is None + assert RegionCommitmentsClient._get_default_mtls_endpoint(api_endpoint) == api_mtls_endpoint + assert RegionCommitmentsClient._get_default_mtls_endpoint(api_mtls_endpoint) == api_mtls_endpoint + assert RegionCommitmentsClient._get_default_mtls_endpoint(sandbox_endpoint) == sandbox_mtls_endpoint + assert RegionCommitmentsClient._get_default_mtls_endpoint(sandbox_mtls_endpoint) == sandbox_mtls_endpoint + assert RegionCommitmentsClient._get_default_mtls_endpoint(non_googleapi) == non_googleapi + + +@pytest.mark.parametrize("client_class,transport_name", [ + (RegionCommitmentsClient, "rest"), +]) +def test_region_commitments_client_from_service_account_info(client_class, transport_name): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object(service_account.Credentials, 'from_service_account_info') as factory: + factory.return_value = creds + info = {"valid": True} + client = client_class.from_service_account_info(info, transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + 'compute.googleapis.com:443' + if transport_name in ['grpc', 'grpc_asyncio'] + else + 'https://compute.googleapis.com' + ) + + +@pytest.mark.parametrize("transport_class,transport_name", [ + (transports.RegionCommitmentsRestTransport, "rest"), +]) +def test_region_commitments_client_service_account_always_use_jwt(transport_class, transport_name): + with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=True) + use_jwt.assert_called_once_with(True) + + with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=False) + use_jwt.assert_not_called() + + +@pytest.mark.parametrize("client_class,transport_name", [ + (RegionCommitmentsClient, "rest"), +]) +def test_region_commitments_client_from_service_account_file(client_class, transport_name): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object(service_account.Credentials, 'from_service_account_file') as factory: + factory.return_value = creds + client = client_class.from_service_account_file("dummy/file/path.json", transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + client = client_class.from_service_account_json("dummy/file/path.json", transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + 'compute.googleapis.com:443' + if transport_name in ['grpc', 'grpc_asyncio'] + else + 'https://compute.googleapis.com' + ) + + +def test_region_commitments_client_get_transport_class(): + transport = RegionCommitmentsClient.get_transport_class() + available_transports = [ + transports.RegionCommitmentsRestTransport, + ] + assert transport in available_transports + + transport = RegionCommitmentsClient.get_transport_class("rest") + assert transport == transports.RegionCommitmentsRestTransport + + +@pytest.mark.parametrize("client_class,transport_class,transport_name", [ + (RegionCommitmentsClient, transports.RegionCommitmentsRestTransport, "rest"), +]) +@mock.patch.object(RegionCommitmentsClient, "DEFAULT_ENDPOINT", modify_default_endpoint(RegionCommitmentsClient)) +def test_region_commitments_client_client_options(client_class, transport_class, transport_name): + # Check that if channel is provided we won't create a new one. + with mock.patch.object(RegionCommitmentsClient, 'get_transport_class') as gtc: + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ) + client = client_class(transport=transport) + gtc.assert_not_called() + + # Check that if channel is provided via str we will create a new one. + with mock.patch.object(RegionCommitmentsClient, 'get_transport_class') as gtc: + client = client_class(transport=transport_name) + gtc.assert_called() + + # Check the case api_endpoint is provided. + options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(transport=transport_name, client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_MTLS_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError): + client = client_class(transport=transport_name) + + # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): + with pytest.raises(ValueError): + client = client_class(transport=transport_name) + + # Check the case quota_project_id is provided + options = client_options.ClientOptions(quota_project_id="octopus") + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id="octopus", + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + # Check the case api_endpoint is provided + options = client_options.ClientOptions(api_audience="https://language.googleapis.com") + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience="https://language.googleapis.com" + ) + +@pytest.mark.parametrize("client_class,transport_class,transport_name,use_client_cert_env", [ + (RegionCommitmentsClient, transports.RegionCommitmentsRestTransport, "rest", "true"), + (RegionCommitmentsClient, transports.RegionCommitmentsRestTransport, "rest", "false"), +]) +@mock.patch.object(RegionCommitmentsClient, "DEFAULT_ENDPOINT", modify_default_endpoint(RegionCommitmentsClient)) +@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) +def test_region_commitments_client_mtls_env_auto(client_class, transport_class, transport_name, use_client_cert_env): + # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default + # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists. + + # Check the case client_cert_source is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): + options = client_options.ClientOptions(client_cert_source=client_cert_source_callback) + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + + if use_client_cert_env == "false": + expected_client_cert_source = None + expected_host = client.DEFAULT_ENDPOINT + else: + expected_client_cert_source = client_cert_source_callback + expected_host = client.DEFAULT_MTLS_ENDPOINT + + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case ADC client cert is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): + with mock.patch.object(transport_class, '__init__') as patched: + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): + with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=client_cert_source_callback): + if use_client_cert_env == "false": + expected_host = client.DEFAULT_ENDPOINT + expected_client_cert_source = None + else: + expected_host = client.DEFAULT_MTLS_ENDPOINT + expected_client_cert_source = client_cert_source_callback + + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case client_cert_source and ADC client cert are not provided. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): + with mock.patch.object(transport_class, '__init__') as patched: + with mock.patch("google.auth.transport.mtls.has_default_client_cert_source", return_value=False): + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize("client_class", [ + RegionCommitmentsClient +]) +@mock.patch.object(RegionCommitmentsClient, "DEFAULT_ENDPOINT", modify_default_endpoint(RegionCommitmentsClient)) +def test_region_commitments_client_get_mtls_endpoint_and_cert_source(client_class): + mock_client_cert_source = mock.Mock() + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "true". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + mock_api_endpoint = "foo" + options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(options) + assert api_endpoint == mock_api_endpoint + assert cert_source == mock_client_cert_source + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "false". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + mock_client_cert_source = mock.Mock() + mock_api_endpoint = "foo" + options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(options) + assert api_endpoint == mock_api_endpoint + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert doesn't exist. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=False): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert exists. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): + with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=mock_client_cert_source): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source == mock_client_cert_source + + +@pytest.mark.parametrize("client_class,transport_class,transport_name", [ + (RegionCommitmentsClient, transports.RegionCommitmentsRestTransport, "rest"), +]) +def test_region_commitments_client_client_options_scopes(client_class, transport_class, transport_name): + # Check the case scopes are provided. + options = client_options.ClientOptions( + scopes=["1", "2"], + ) + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=["1", "2"], + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + +@pytest.mark.parametrize("client_class,transport_class,transport_name,grpc_helpers", [ + (RegionCommitmentsClient, transports.RegionCommitmentsRestTransport, "rest", None), +]) +def test_region_commitments_client_client_options_credentials_file(client_class, transport_class, transport_name, grpc_helpers): + # Check the case credentials file is provided. + options = client_options.ClientOptions( + credentials_file="credentials.json" + ) + + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize("request_type", [ + compute.AggregatedListRegionCommitmentsRequest, + dict, +]) +def test_aggregated_list_rest(request_type): + client = RegionCommitmentsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.CommitmentAggregatedList( + id='id_value', + kind='kind_value', + next_page_token='next_page_token_value', + self_link='self_link_value', + unreachables=['unreachables_value'], + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.CommitmentAggregatedList.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.aggregated_list(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.AggregatedListPager) + assert response.id == 'id_value' + assert response.kind == 'kind_value' + assert response.next_page_token == 'next_page_token_value' + assert response.self_link == 'self_link_value' + assert response.unreachables == ['unreachables_value'] + + +def test_aggregated_list_rest_required_fields(request_type=compute.AggregatedListRegionCommitmentsRequest): + transport_class = transports.RegionCommitmentsRestTransport + + request_init = {} + request_init["project"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).aggregated_list._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).aggregated_list._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("filter", "include_all_scopes", "max_results", "order_by", "page_token", "return_partial_success", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + + client = RegionCommitmentsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.CommitmentAggregatedList() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "get", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.CommitmentAggregatedList.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.aggregated_list(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_aggregated_list_rest_unset_required_fields(): + transport = transports.RegionCommitmentsRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.aggregated_list._get_unset_required_fields({}) + assert set(unset_fields) == (set(("filter", "includeAllScopes", "maxResults", "orderBy", "pageToken", "returnPartialSuccess", )) & set(("project", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_aggregated_list_rest_interceptors(null_interceptor): + transport = transports.RegionCommitmentsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.RegionCommitmentsRestInterceptor(), + ) + client = RegionCommitmentsClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.RegionCommitmentsRestInterceptor, "post_aggregated_list") as post, \ + mock.patch.object(transports.RegionCommitmentsRestInterceptor, "pre_aggregated_list") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.AggregatedListRegionCommitmentsRequest.pb(compute.AggregatedListRegionCommitmentsRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.CommitmentAggregatedList.to_json(compute.CommitmentAggregatedList()) + + request = compute.AggregatedListRegionCommitmentsRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.CommitmentAggregatedList() + + client.aggregated_list(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_aggregated_list_rest_bad_request(transport: str = 'rest', request_type=compute.AggregatedListRegionCommitmentsRequest): + client = RegionCommitmentsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.aggregated_list(request) + + +def test_aggregated_list_rest_flattened(): + client = RegionCommitmentsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.CommitmentAggregatedList() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.CommitmentAggregatedList.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.aggregated_list(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/aggregated/commitments" % client.transport._host, args[1]) + + +def test_aggregated_list_rest_flattened_error(transport: str = 'rest'): + client = RegionCommitmentsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.aggregated_list( + compute.AggregatedListRegionCommitmentsRequest(), + project='project_value', + ) + + +def test_aggregated_list_rest_pager(transport: str = 'rest'): + client = RegionCommitmentsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + #with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + compute.CommitmentAggregatedList( + items={ + 'a':compute.CommitmentsScopedList(), + 'b':compute.CommitmentsScopedList(), + 'c':compute.CommitmentsScopedList(), + }, + next_page_token='abc', + ), + compute.CommitmentAggregatedList( + items={}, + next_page_token='def', + ), + compute.CommitmentAggregatedList( + items={ + 'g':compute.CommitmentsScopedList(), + }, + next_page_token='ghi', + ), + compute.CommitmentAggregatedList( + items={ + 'h':compute.CommitmentsScopedList(), + 'i':compute.CommitmentsScopedList(), + }, + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple(compute.CommitmentAggregatedList.to_json(x) for x in response) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode('UTF-8') + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {'project': 'sample1'} + + pager = client.aggregated_list(request=sample_request) + + assert isinstance(pager.get('a'), compute.CommitmentsScopedList) + assert pager.get('h') is None + + results = list(pager) + assert len(results) == 6 + assert all( + isinstance(i, tuple) + for i in results) + for result in results: + assert isinstance(result, tuple) + assert tuple(type(t) for t in result) == (str, compute.CommitmentsScopedList) + + assert pager.get('a') is None + assert isinstance(pager.get('h'), compute.CommitmentsScopedList) + + pages = list(client.aggregated_list(request=sample_request).pages) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize("request_type", [ + compute.GetRegionCommitmentRequest, + dict, +]) +def test_get_rest(request_type): + client = RegionCommitmentsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2', 'commitment': 'sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Commitment( + auto_renew=True, + category='category_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_timestamp='end_timestamp_value', + id=205, + kind='kind_value', + merge_source_commitments=['merge_source_commitments_value'], + name='name_value', + plan='plan_value', + region='region_value', + self_link='self_link_value', + split_source_commitment='split_source_commitment_value', + start_timestamp='start_timestamp_value', + status='status_value', + status_message='status_message_value', + type_='type__value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Commitment.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.get(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.Commitment) + assert response.auto_renew is True + assert response.category == 'category_value' + assert response.creation_timestamp == 'creation_timestamp_value' + assert response.description == 'description_value' + assert response.end_timestamp == 'end_timestamp_value' + assert response.id == 205 + assert response.kind == 'kind_value' + assert response.merge_source_commitments == ['merge_source_commitments_value'] + assert response.name == 'name_value' + assert response.plan == 'plan_value' + assert response.region == 'region_value' + assert response.self_link == 'self_link_value' + assert response.split_source_commitment == 'split_source_commitment_value' + assert response.start_timestamp == 'start_timestamp_value' + assert response.status == 'status_value' + assert response.status_message == 'status_message_value' + assert response.type_ == 'type__value' + + +def test_get_rest_required_fields(request_type=compute.GetRegionCommitmentRequest): + transport_class = transports.RegionCommitmentsRestTransport + + request_init = {} + request_init["commitment"] = "" + request_init["project"] = "" + request_init["region"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["commitment"] = 'commitment_value' + jsonified_request["project"] = 'project_value' + jsonified_request["region"] = 'region_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "commitment" in jsonified_request + assert jsonified_request["commitment"] == 'commitment_value' + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "region" in jsonified_request + assert jsonified_request["region"] == 'region_value' + + client = RegionCommitmentsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Commitment() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "get", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Commitment.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.get(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_get_rest_unset_required_fields(): + transport = transports.RegionCommitmentsRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.get._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("commitment", "project", "region", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_rest_interceptors(null_interceptor): + transport = transports.RegionCommitmentsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.RegionCommitmentsRestInterceptor(), + ) + client = RegionCommitmentsClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.RegionCommitmentsRestInterceptor, "post_get") as post, \ + mock.patch.object(transports.RegionCommitmentsRestInterceptor, "pre_get") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.GetRegionCommitmentRequest.pb(compute.GetRegionCommitmentRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Commitment.to_json(compute.Commitment()) + + request = compute.GetRegionCommitmentRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Commitment() + + client.get(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_get_rest_bad_request(transport: str = 'rest', request_type=compute.GetRegionCommitmentRequest): + client = RegionCommitmentsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2', 'commitment': 'sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get(request) + + +def test_get_rest_flattened(): + client = RegionCommitmentsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Commitment() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'region': 'sample2', 'commitment': 'sample3'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + region='region_value', + commitment='commitment_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Commitment.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.get(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/regions/{region}/commitments/{commitment}" % client.transport._host, args[1]) + + +def test_get_rest_flattened_error(transport: str = 'rest'): + client = RegionCommitmentsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get( + compute.GetRegionCommitmentRequest(), + project='project_value', + region='region_value', + commitment='commitment_value', + ) + + +def test_get_rest_error(): + client = RegionCommitmentsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.InsertRegionCommitmentRequest, + dict, +]) +def test_insert_rest(request_type): + client = RegionCommitmentsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2'} + request_init["commitment_resource"] = {'auto_renew': True, 'category': 'category_value', 'creation_timestamp': 'creation_timestamp_value', 'description': 'description_value', 'end_timestamp': 'end_timestamp_value', 'id': 205, 'kind': 'kind_value', 'license_resource': {'amount': 660, 'cores_per_license': 'cores_per_license_value', 'license_': 'license__value'}, 'merge_source_commitments': ['merge_source_commitments_value1', 'merge_source_commitments_value2'], 'name': 'name_value', 'plan': 'plan_value', 'region': 'region_value', 'reservations': [{'commitment': 'commitment_value', 'creation_timestamp': 'creation_timestamp_value', 'description': 'description_value', 'id': 205, 'kind': 'kind_value', 'name': 'name_value', 'resource_policies': {}, 'resource_status': {'specific_sku_allocation': {'source_instance_template_id': 'source_instance_template_id_value'}}, 'satisfies_pzs': True, 'self_link': 'self_link_value', 'share_settings': {'project_map': {}, 'share_type': 'share_type_value'}, 'specific_reservation': {'assured_count': 1407, 'count': 553, 'in_use_count': 1291, 'instance_properties': {'guest_accelerators': [{'accelerator_count': 1805, 'accelerator_type': 'accelerator_type_value'}], 'local_ssds': [{'disk_size_gb': 1261, 'interface': 'interface_value'}], 'location_hint': 'location_hint_value', 'machine_type': 'machine_type_value', 'min_cpu_platform': 'min_cpu_platform_value'}, 'source_instance_template': 'source_instance_template_value'}, 'specific_reservation_required': True, 'status': 'status_value', 'zone': 'zone_value'}], 'resources': [{'accelerator_type': 'accelerator_type_value', 'amount': 660, 'type_': 'type__value'}], 'self_link': 'self_link_value', 'split_source_commitment': 'split_source_commitment_value', 'start_timestamp': 'start_timestamp_value', 'status': 'status_value', 'status_message': 'status_message_value', 'type_': 'type__value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.insert(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, extended_operation.ExtendedOperation) + assert response.client_operation_id == 'client_operation_id_value' + assert response.creation_timestamp == 'creation_timestamp_value' + assert response.description == 'description_value' + assert response.end_time == 'end_time_value' + assert response.http_error_message == 'http_error_message_value' + assert response.http_error_status_code == 2374 + assert response.id == 205 + assert response.insert_time == 'insert_time_value' + assert response.kind == 'kind_value' + assert response.name == 'name_value' + assert response.operation_group_id == 'operation_group_id_value' + assert response.operation_type == 'operation_type_value' + assert response.progress == 885 + assert response.region == 'region_value' + assert response.self_link == 'self_link_value' + assert response.start_time == 'start_time_value' + assert response.status == compute.Operation.Status.DONE + assert response.status_message == 'status_message_value' + assert response.target_id == 947 + assert response.target_link == 'target_link_value' + assert response.user == 'user_value' + assert response.zone == 'zone_value' + + +def test_insert_rest_required_fields(request_type=compute.InsertRegionCommitmentRequest): + transport_class = transports.RegionCommitmentsRestTransport + + request_init = {} + request_init["project"] = "" + request_init["region"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).insert._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + jsonified_request["region"] = 'region_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).insert._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "region" in jsonified_request + assert jsonified_request["region"] == 'region_value' + + client = RegionCommitmentsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.insert(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_insert_rest_unset_required_fields(): + transport = transports.RegionCommitmentsRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.insert._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("commitmentResource", "project", "region", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_insert_rest_interceptors(null_interceptor): + transport = transports.RegionCommitmentsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.RegionCommitmentsRestInterceptor(), + ) + client = RegionCommitmentsClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.RegionCommitmentsRestInterceptor, "post_insert") as post, \ + mock.patch.object(transports.RegionCommitmentsRestInterceptor, "pre_insert") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.InsertRegionCommitmentRequest.pb(compute.InsertRegionCommitmentRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.InsertRegionCommitmentRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.insert(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_insert_rest_bad_request(transport: str = 'rest', request_type=compute.InsertRegionCommitmentRequest): + client = RegionCommitmentsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2'} + request_init["commitment_resource"] = {'auto_renew': True, 'category': 'category_value', 'creation_timestamp': 'creation_timestamp_value', 'description': 'description_value', 'end_timestamp': 'end_timestamp_value', 'id': 205, 'kind': 'kind_value', 'license_resource': {'amount': 660, 'cores_per_license': 'cores_per_license_value', 'license_': 'license__value'}, 'merge_source_commitments': ['merge_source_commitments_value1', 'merge_source_commitments_value2'], 'name': 'name_value', 'plan': 'plan_value', 'region': 'region_value', 'reservations': [{'commitment': 'commitment_value', 'creation_timestamp': 'creation_timestamp_value', 'description': 'description_value', 'id': 205, 'kind': 'kind_value', 'name': 'name_value', 'resource_policies': {}, 'resource_status': {'specific_sku_allocation': {'source_instance_template_id': 'source_instance_template_id_value'}}, 'satisfies_pzs': True, 'self_link': 'self_link_value', 'share_settings': {'project_map': {}, 'share_type': 'share_type_value'}, 'specific_reservation': {'assured_count': 1407, 'count': 553, 'in_use_count': 1291, 'instance_properties': {'guest_accelerators': [{'accelerator_count': 1805, 'accelerator_type': 'accelerator_type_value'}], 'local_ssds': [{'disk_size_gb': 1261, 'interface': 'interface_value'}], 'location_hint': 'location_hint_value', 'machine_type': 'machine_type_value', 'min_cpu_platform': 'min_cpu_platform_value'}, 'source_instance_template': 'source_instance_template_value'}, 'specific_reservation_required': True, 'status': 'status_value', 'zone': 'zone_value'}], 'resources': [{'accelerator_type': 'accelerator_type_value', 'amount': 660, 'type_': 'type__value'}], 'self_link': 'self_link_value', 'split_source_commitment': 'split_source_commitment_value', 'start_timestamp': 'start_timestamp_value', 'status': 'status_value', 'status_message': 'status_message_value', 'type_': 'type__value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.insert(request) + + +def test_insert_rest_flattened(): + client = RegionCommitmentsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'region': 'sample2'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + region='region_value', + commitment_resource=compute.Commitment(auto_renew=True), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.insert(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/regions/{region}/commitments" % client.transport._host, args[1]) + + +def test_insert_rest_flattened_error(transport: str = 'rest'): + client = RegionCommitmentsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.insert( + compute.InsertRegionCommitmentRequest(), + project='project_value', + region='region_value', + commitment_resource=compute.Commitment(auto_renew=True), + ) + + +def test_insert_rest_error(): + client = RegionCommitmentsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.InsertRegionCommitmentRequest, + dict, +]) +def test_insert_unary_rest(request_type): + client = RegionCommitmentsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2'} + request_init["commitment_resource"] = {'auto_renew': True, 'category': 'category_value', 'creation_timestamp': 'creation_timestamp_value', 'description': 'description_value', 'end_timestamp': 'end_timestamp_value', 'id': 205, 'kind': 'kind_value', 'license_resource': {'amount': 660, 'cores_per_license': 'cores_per_license_value', 'license_': 'license__value'}, 'merge_source_commitments': ['merge_source_commitments_value1', 'merge_source_commitments_value2'], 'name': 'name_value', 'plan': 'plan_value', 'region': 'region_value', 'reservations': [{'commitment': 'commitment_value', 'creation_timestamp': 'creation_timestamp_value', 'description': 'description_value', 'id': 205, 'kind': 'kind_value', 'name': 'name_value', 'resource_policies': {}, 'resource_status': {'specific_sku_allocation': {'source_instance_template_id': 'source_instance_template_id_value'}}, 'satisfies_pzs': True, 'self_link': 'self_link_value', 'share_settings': {'project_map': {}, 'share_type': 'share_type_value'}, 'specific_reservation': {'assured_count': 1407, 'count': 553, 'in_use_count': 1291, 'instance_properties': {'guest_accelerators': [{'accelerator_count': 1805, 'accelerator_type': 'accelerator_type_value'}], 'local_ssds': [{'disk_size_gb': 1261, 'interface': 'interface_value'}], 'location_hint': 'location_hint_value', 'machine_type': 'machine_type_value', 'min_cpu_platform': 'min_cpu_platform_value'}, 'source_instance_template': 'source_instance_template_value'}, 'specific_reservation_required': True, 'status': 'status_value', 'zone': 'zone_value'}], 'resources': [{'accelerator_type': 'accelerator_type_value', 'amount': 660, 'type_': 'type__value'}], 'self_link': 'self_link_value', 'split_source_commitment': 'split_source_commitment_value', 'start_timestamp': 'start_timestamp_value', 'status': 'status_value', 'status_message': 'status_message_value', 'type_': 'type__value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.insert_unary(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.Operation) + + +def test_insert_unary_rest_required_fields(request_type=compute.InsertRegionCommitmentRequest): + transport_class = transports.RegionCommitmentsRestTransport + + request_init = {} + request_init["project"] = "" + request_init["region"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).insert._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + jsonified_request["region"] = 'region_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).insert._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "region" in jsonified_request + assert jsonified_request["region"] == 'region_value' + + client = RegionCommitmentsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.insert_unary(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_insert_unary_rest_unset_required_fields(): + transport = transports.RegionCommitmentsRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.insert._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("commitmentResource", "project", "region", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_insert_unary_rest_interceptors(null_interceptor): + transport = transports.RegionCommitmentsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.RegionCommitmentsRestInterceptor(), + ) + client = RegionCommitmentsClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.RegionCommitmentsRestInterceptor, "post_insert") as post, \ + mock.patch.object(transports.RegionCommitmentsRestInterceptor, "pre_insert") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.InsertRegionCommitmentRequest.pb(compute.InsertRegionCommitmentRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.InsertRegionCommitmentRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.insert_unary(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_insert_unary_rest_bad_request(transport: str = 'rest', request_type=compute.InsertRegionCommitmentRequest): + client = RegionCommitmentsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2'} + request_init["commitment_resource"] = {'auto_renew': True, 'category': 'category_value', 'creation_timestamp': 'creation_timestamp_value', 'description': 'description_value', 'end_timestamp': 'end_timestamp_value', 'id': 205, 'kind': 'kind_value', 'license_resource': {'amount': 660, 'cores_per_license': 'cores_per_license_value', 'license_': 'license__value'}, 'merge_source_commitments': ['merge_source_commitments_value1', 'merge_source_commitments_value2'], 'name': 'name_value', 'plan': 'plan_value', 'region': 'region_value', 'reservations': [{'commitment': 'commitment_value', 'creation_timestamp': 'creation_timestamp_value', 'description': 'description_value', 'id': 205, 'kind': 'kind_value', 'name': 'name_value', 'resource_policies': {}, 'resource_status': {'specific_sku_allocation': {'source_instance_template_id': 'source_instance_template_id_value'}}, 'satisfies_pzs': True, 'self_link': 'self_link_value', 'share_settings': {'project_map': {}, 'share_type': 'share_type_value'}, 'specific_reservation': {'assured_count': 1407, 'count': 553, 'in_use_count': 1291, 'instance_properties': {'guest_accelerators': [{'accelerator_count': 1805, 'accelerator_type': 'accelerator_type_value'}], 'local_ssds': [{'disk_size_gb': 1261, 'interface': 'interface_value'}], 'location_hint': 'location_hint_value', 'machine_type': 'machine_type_value', 'min_cpu_platform': 'min_cpu_platform_value'}, 'source_instance_template': 'source_instance_template_value'}, 'specific_reservation_required': True, 'status': 'status_value', 'zone': 'zone_value'}], 'resources': [{'accelerator_type': 'accelerator_type_value', 'amount': 660, 'type_': 'type__value'}], 'self_link': 'self_link_value', 'split_source_commitment': 'split_source_commitment_value', 'start_timestamp': 'start_timestamp_value', 'status': 'status_value', 'status_message': 'status_message_value', 'type_': 'type__value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.insert_unary(request) + + +def test_insert_unary_rest_flattened(): + client = RegionCommitmentsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'region': 'sample2'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + region='region_value', + commitment_resource=compute.Commitment(auto_renew=True), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.insert_unary(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/regions/{region}/commitments" % client.transport._host, args[1]) + + +def test_insert_unary_rest_flattened_error(transport: str = 'rest'): + client = RegionCommitmentsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.insert_unary( + compute.InsertRegionCommitmentRequest(), + project='project_value', + region='region_value', + commitment_resource=compute.Commitment(auto_renew=True), + ) + + +def test_insert_unary_rest_error(): + client = RegionCommitmentsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.ListRegionCommitmentsRequest, + dict, +]) +def test_list_rest(request_type): + client = RegionCommitmentsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.CommitmentList( + id='id_value', + kind='kind_value', + next_page_token='next_page_token_value', + self_link='self_link_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.CommitmentList.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.list(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListPager) + assert response.id == 'id_value' + assert response.kind == 'kind_value' + assert response.next_page_token == 'next_page_token_value' + assert response.self_link == 'self_link_value' + + +def test_list_rest_required_fields(request_type=compute.ListRegionCommitmentsRequest): + transport_class = transports.RegionCommitmentsRestTransport + + request_init = {} + request_init["project"] = "" + request_init["region"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + jsonified_request["region"] = 'region_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("filter", "max_results", "order_by", "page_token", "return_partial_success", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "region" in jsonified_request + assert jsonified_request["region"] == 'region_value' + + client = RegionCommitmentsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.CommitmentList() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "get", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.CommitmentList.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.list(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_list_rest_unset_required_fields(): + transport = transports.RegionCommitmentsRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.list._get_unset_required_fields({}) + assert set(unset_fields) == (set(("filter", "maxResults", "orderBy", "pageToken", "returnPartialSuccess", )) & set(("project", "region", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_rest_interceptors(null_interceptor): + transport = transports.RegionCommitmentsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.RegionCommitmentsRestInterceptor(), + ) + client = RegionCommitmentsClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.RegionCommitmentsRestInterceptor, "post_list") as post, \ + mock.patch.object(transports.RegionCommitmentsRestInterceptor, "pre_list") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.ListRegionCommitmentsRequest.pb(compute.ListRegionCommitmentsRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.CommitmentList.to_json(compute.CommitmentList()) + + request = compute.ListRegionCommitmentsRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.CommitmentList() + + client.list(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_list_rest_bad_request(transport: str = 'rest', request_type=compute.ListRegionCommitmentsRequest): + client = RegionCommitmentsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list(request) + + +def test_list_rest_flattened(): + client = RegionCommitmentsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.CommitmentList() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'region': 'sample2'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + region='region_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.CommitmentList.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.list(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/regions/{region}/commitments" % client.transport._host, args[1]) + + +def test_list_rest_flattened_error(transport: str = 'rest'): + client = RegionCommitmentsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list( + compute.ListRegionCommitmentsRequest(), + project='project_value', + region='region_value', + ) + + +def test_list_rest_pager(transport: str = 'rest'): + client = RegionCommitmentsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + #with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + compute.CommitmentList( + items=[ + compute.Commitment(), + compute.Commitment(), + compute.Commitment(), + ], + next_page_token='abc', + ), + compute.CommitmentList( + items=[], + next_page_token='def', + ), + compute.CommitmentList( + items=[ + compute.Commitment(), + ], + next_page_token='ghi', + ), + compute.CommitmentList( + items=[ + compute.Commitment(), + compute.Commitment(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple(compute.CommitmentList.to_json(x) for x in response) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode('UTF-8') + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {'project': 'sample1', 'region': 'sample2'} + + pager = client.list(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, compute.Commitment) + for i in results) + + pages = list(client.list(request=sample_request).pages) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize("request_type", [ + compute.UpdateRegionCommitmentRequest, + dict, +]) +def test_update_rest(request_type): + client = RegionCommitmentsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2', 'commitment': 'sample3'} + request_init["commitment_resource"] = {'auto_renew': True, 'category': 'category_value', 'creation_timestamp': 'creation_timestamp_value', 'description': 'description_value', 'end_timestamp': 'end_timestamp_value', 'id': 205, 'kind': 'kind_value', 'license_resource': {'amount': 660, 'cores_per_license': 'cores_per_license_value', 'license_': 'license__value'}, 'merge_source_commitments': ['merge_source_commitments_value1', 'merge_source_commitments_value2'], 'name': 'name_value', 'plan': 'plan_value', 'region': 'region_value', 'reservations': [{'commitment': 'commitment_value', 'creation_timestamp': 'creation_timestamp_value', 'description': 'description_value', 'id': 205, 'kind': 'kind_value', 'name': 'name_value', 'resource_policies': {}, 'resource_status': {'specific_sku_allocation': {'source_instance_template_id': 'source_instance_template_id_value'}}, 'satisfies_pzs': True, 'self_link': 'self_link_value', 'share_settings': {'project_map': {}, 'share_type': 'share_type_value'}, 'specific_reservation': {'assured_count': 1407, 'count': 553, 'in_use_count': 1291, 'instance_properties': {'guest_accelerators': [{'accelerator_count': 1805, 'accelerator_type': 'accelerator_type_value'}], 'local_ssds': [{'disk_size_gb': 1261, 'interface': 'interface_value'}], 'location_hint': 'location_hint_value', 'machine_type': 'machine_type_value', 'min_cpu_platform': 'min_cpu_platform_value'}, 'source_instance_template': 'source_instance_template_value'}, 'specific_reservation_required': True, 'status': 'status_value', 'zone': 'zone_value'}], 'resources': [{'accelerator_type': 'accelerator_type_value', 'amount': 660, 'type_': 'type__value'}], 'self_link': 'self_link_value', 'split_source_commitment': 'split_source_commitment_value', 'start_timestamp': 'start_timestamp_value', 'status': 'status_value', 'status_message': 'status_message_value', 'type_': 'type__value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.update(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, extended_operation.ExtendedOperation) + assert response.client_operation_id == 'client_operation_id_value' + assert response.creation_timestamp == 'creation_timestamp_value' + assert response.description == 'description_value' + assert response.end_time == 'end_time_value' + assert response.http_error_message == 'http_error_message_value' + assert response.http_error_status_code == 2374 + assert response.id == 205 + assert response.insert_time == 'insert_time_value' + assert response.kind == 'kind_value' + assert response.name == 'name_value' + assert response.operation_group_id == 'operation_group_id_value' + assert response.operation_type == 'operation_type_value' + assert response.progress == 885 + assert response.region == 'region_value' + assert response.self_link == 'self_link_value' + assert response.start_time == 'start_time_value' + assert response.status == compute.Operation.Status.DONE + assert response.status_message == 'status_message_value' + assert response.target_id == 947 + assert response.target_link == 'target_link_value' + assert response.user == 'user_value' + assert response.zone == 'zone_value' + + +def test_update_rest_required_fields(request_type=compute.UpdateRegionCommitmentRequest): + transport_class = transports.RegionCommitmentsRestTransport + + request_init = {} + request_init["commitment"] = "" + request_init["project"] = "" + request_init["region"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).update._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["commitment"] = 'commitment_value' + jsonified_request["project"] = 'project_value' + jsonified_request["region"] = 'region_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).update._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("paths", "request_id", "update_mask", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "commitment" in jsonified_request + assert jsonified_request["commitment"] == 'commitment_value' + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "region" in jsonified_request + assert jsonified_request["region"] == 'region_value' + + client = RegionCommitmentsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "patch", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.update(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_update_rest_unset_required_fields(): + transport = transports.RegionCommitmentsRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.update._get_unset_required_fields({}) + assert set(unset_fields) == (set(("paths", "requestId", "updateMask", )) & set(("commitment", "commitmentResource", "project", "region", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_update_rest_interceptors(null_interceptor): + transport = transports.RegionCommitmentsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.RegionCommitmentsRestInterceptor(), + ) + client = RegionCommitmentsClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.RegionCommitmentsRestInterceptor, "post_update") as post, \ + mock.patch.object(transports.RegionCommitmentsRestInterceptor, "pre_update") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.UpdateRegionCommitmentRequest.pb(compute.UpdateRegionCommitmentRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.UpdateRegionCommitmentRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.update(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_update_rest_bad_request(transport: str = 'rest', request_type=compute.UpdateRegionCommitmentRequest): + client = RegionCommitmentsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2', 'commitment': 'sample3'} + request_init["commitment_resource"] = {'auto_renew': True, 'category': 'category_value', 'creation_timestamp': 'creation_timestamp_value', 'description': 'description_value', 'end_timestamp': 'end_timestamp_value', 'id': 205, 'kind': 'kind_value', 'license_resource': {'amount': 660, 'cores_per_license': 'cores_per_license_value', 'license_': 'license__value'}, 'merge_source_commitments': ['merge_source_commitments_value1', 'merge_source_commitments_value2'], 'name': 'name_value', 'plan': 'plan_value', 'region': 'region_value', 'reservations': [{'commitment': 'commitment_value', 'creation_timestamp': 'creation_timestamp_value', 'description': 'description_value', 'id': 205, 'kind': 'kind_value', 'name': 'name_value', 'resource_policies': {}, 'resource_status': {'specific_sku_allocation': {'source_instance_template_id': 'source_instance_template_id_value'}}, 'satisfies_pzs': True, 'self_link': 'self_link_value', 'share_settings': {'project_map': {}, 'share_type': 'share_type_value'}, 'specific_reservation': {'assured_count': 1407, 'count': 553, 'in_use_count': 1291, 'instance_properties': {'guest_accelerators': [{'accelerator_count': 1805, 'accelerator_type': 'accelerator_type_value'}], 'local_ssds': [{'disk_size_gb': 1261, 'interface': 'interface_value'}], 'location_hint': 'location_hint_value', 'machine_type': 'machine_type_value', 'min_cpu_platform': 'min_cpu_platform_value'}, 'source_instance_template': 'source_instance_template_value'}, 'specific_reservation_required': True, 'status': 'status_value', 'zone': 'zone_value'}], 'resources': [{'accelerator_type': 'accelerator_type_value', 'amount': 660, 'type_': 'type__value'}], 'self_link': 'self_link_value', 'split_source_commitment': 'split_source_commitment_value', 'start_timestamp': 'start_timestamp_value', 'status': 'status_value', 'status_message': 'status_message_value', 'type_': 'type__value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.update(request) + + +def test_update_rest_flattened(): + client = RegionCommitmentsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'region': 'sample2', 'commitment': 'sample3'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + region='region_value', + commitment='commitment_value', + commitment_resource=compute.Commitment(auto_renew=True), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.update(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/regions/{region}/commitments/{commitment}" % client.transport._host, args[1]) + + +def test_update_rest_flattened_error(transport: str = 'rest'): + client = RegionCommitmentsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update( + compute.UpdateRegionCommitmentRequest(), + project='project_value', + region='region_value', + commitment='commitment_value', + commitment_resource=compute.Commitment(auto_renew=True), + ) + + +def test_update_rest_error(): + client = RegionCommitmentsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.UpdateRegionCommitmentRequest, + dict, +]) +def test_update_unary_rest(request_type): + client = RegionCommitmentsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2', 'commitment': 'sample3'} + request_init["commitment_resource"] = {'auto_renew': True, 'category': 'category_value', 'creation_timestamp': 'creation_timestamp_value', 'description': 'description_value', 'end_timestamp': 'end_timestamp_value', 'id': 205, 'kind': 'kind_value', 'license_resource': {'amount': 660, 'cores_per_license': 'cores_per_license_value', 'license_': 'license__value'}, 'merge_source_commitments': ['merge_source_commitments_value1', 'merge_source_commitments_value2'], 'name': 'name_value', 'plan': 'plan_value', 'region': 'region_value', 'reservations': [{'commitment': 'commitment_value', 'creation_timestamp': 'creation_timestamp_value', 'description': 'description_value', 'id': 205, 'kind': 'kind_value', 'name': 'name_value', 'resource_policies': {}, 'resource_status': {'specific_sku_allocation': {'source_instance_template_id': 'source_instance_template_id_value'}}, 'satisfies_pzs': True, 'self_link': 'self_link_value', 'share_settings': {'project_map': {}, 'share_type': 'share_type_value'}, 'specific_reservation': {'assured_count': 1407, 'count': 553, 'in_use_count': 1291, 'instance_properties': {'guest_accelerators': [{'accelerator_count': 1805, 'accelerator_type': 'accelerator_type_value'}], 'local_ssds': [{'disk_size_gb': 1261, 'interface': 'interface_value'}], 'location_hint': 'location_hint_value', 'machine_type': 'machine_type_value', 'min_cpu_platform': 'min_cpu_platform_value'}, 'source_instance_template': 'source_instance_template_value'}, 'specific_reservation_required': True, 'status': 'status_value', 'zone': 'zone_value'}], 'resources': [{'accelerator_type': 'accelerator_type_value', 'amount': 660, 'type_': 'type__value'}], 'self_link': 'self_link_value', 'split_source_commitment': 'split_source_commitment_value', 'start_timestamp': 'start_timestamp_value', 'status': 'status_value', 'status_message': 'status_message_value', 'type_': 'type__value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.update_unary(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.Operation) + + +def test_update_unary_rest_required_fields(request_type=compute.UpdateRegionCommitmentRequest): + transport_class = transports.RegionCommitmentsRestTransport + + request_init = {} + request_init["commitment"] = "" + request_init["project"] = "" + request_init["region"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).update._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["commitment"] = 'commitment_value' + jsonified_request["project"] = 'project_value' + jsonified_request["region"] = 'region_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).update._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("paths", "request_id", "update_mask", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "commitment" in jsonified_request + assert jsonified_request["commitment"] == 'commitment_value' + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "region" in jsonified_request + assert jsonified_request["region"] == 'region_value' + + client = RegionCommitmentsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "patch", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.update_unary(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_update_unary_rest_unset_required_fields(): + transport = transports.RegionCommitmentsRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.update._get_unset_required_fields({}) + assert set(unset_fields) == (set(("paths", "requestId", "updateMask", )) & set(("commitment", "commitmentResource", "project", "region", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_update_unary_rest_interceptors(null_interceptor): + transport = transports.RegionCommitmentsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.RegionCommitmentsRestInterceptor(), + ) + client = RegionCommitmentsClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.RegionCommitmentsRestInterceptor, "post_update") as post, \ + mock.patch.object(transports.RegionCommitmentsRestInterceptor, "pre_update") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.UpdateRegionCommitmentRequest.pb(compute.UpdateRegionCommitmentRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.UpdateRegionCommitmentRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.update_unary(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_update_unary_rest_bad_request(transport: str = 'rest', request_type=compute.UpdateRegionCommitmentRequest): + client = RegionCommitmentsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2', 'commitment': 'sample3'} + request_init["commitment_resource"] = {'auto_renew': True, 'category': 'category_value', 'creation_timestamp': 'creation_timestamp_value', 'description': 'description_value', 'end_timestamp': 'end_timestamp_value', 'id': 205, 'kind': 'kind_value', 'license_resource': {'amount': 660, 'cores_per_license': 'cores_per_license_value', 'license_': 'license__value'}, 'merge_source_commitments': ['merge_source_commitments_value1', 'merge_source_commitments_value2'], 'name': 'name_value', 'plan': 'plan_value', 'region': 'region_value', 'reservations': [{'commitment': 'commitment_value', 'creation_timestamp': 'creation_timestamp_value', 'description': 'description_value', 'id': 205, 'kind': 'kind_value', 'name': 'name_value', 'resource_policies': {}, 'resource_status': {'specific_sku_allocation': {'source_instance_template_id': 'source_instance_template_id_value'}}, 'satisfies_pzs': True, 'self_link': 'self_link_value', 'share_settings': {'project_map': {}, 'share_type': 'share_type_value'}, 'specific_reservation': {'assured_count': 1407, 'count': 553, 'in_use_count': 1291, 'instance_properties': {'guest_accelerators': [{'accelerator_count': 1805, 'accelerator_type': 'accelerator_type_value'}], 'local_ssds': [{'disk_size_gb': 1261, 'interface': 'interface_value'}], 'location_hint': 'location_hint_value', 'machine_type': 'machine_type_value', 'min_cpu_platform': 'min_cpu_platform_value'}, 'source_instance_template': 'source_instance_template_value'}, 'specific_reservation_required': True, 'status': 'status_value', 'zone': 'zone_value'}], 'resources': [{'accelerator_type': 'accelerator_type_value', 'amount': 660, 'type_': 'type__value'}], 'self_link': 'self_link_value', 'split_source_commitment': 'split_source_commitment_value', 'start_timestamp': 'start_timestamp_value', 'status': 'status_value', 'status_message': 'status_message_value', 'type_': 'type__value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.update_unary(request) + + +def test_update_unary_rest_flattened(): + client = RegionCommitmentsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'region': 'sample2', 'commitment': 'sample3'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + region='region_value', + commitment='commitment_value', + commitment_resource=compute.Commitment(auto_renew=True), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.update_unary(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/regions/{region}/commitments/{commitment}" % client.transport._host, args[1]) + + +def test_update_unary_rest_flattened_error(transport: str = 'rest'): + client = RegionCommitmentsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_unary( + compute.UpdateRegionCommitmentRequest(), + project='project_value', + region='region_value', + commitment='commitment_value', + commitment_resource=compute.Commitment(auto_renew=True), + ) + + +def test_update_unary_rest_error(): + client = RegionCommitmentsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +def test_credentials_transport_error(): + # It is an error to provide credentials and a transport instance. + transport = transports.RegionCommitmentsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = RegionCommitmentsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # It is an error to provide a credentials file and a transport instance. + transport = transports.RegionCommitmentsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = RegionCommitmentsClient( + client_options={"credentials_file": "credentials.json"}, + transport=transport, + ) + + # It is an error to provide an api_key and a transport instance. + transport = transports.RegionCommitmentsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = RegionCommitmentsClient( + client_options=options, + transport=transport, + ) + + # It is an error to provide an api_key and a credential. + options = mock.Mock() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = RegionCommitmentsClient( + client_options=options, + credentials=ga_credentials.AnonymousCredentials() + ) + + # It is an error to provide scopes and a transport instance. + transport = transports.RegionCommitmentsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = RegionCommitmentsClient( + client_options={"scopes": ["1", "2"]}, + transport=transport, + ) + + +def test_transport_instance(): + # A client may be instantiated with a custom transport instance. + transport = transports.RegionCommitmentsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + client = RegionCommitmentsClient(transport=transport) + assert client.transport is transport + + +@pytest.mark.parametrize("transport_class", [ + transports.RegionCommitmentsRestTransport, +]) +def test_transport_adc(transport_class): + # Test default credentials are used if not provided. + with mock.patch.object(google.auth, 'default') as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class() + adc.assert_called_once() + +@pytest.mark.parametrize("transport_name", [ + "rest", +]) +def test_transport_kind(transport_name): + transport = RegionCommitmentsClient.get_transport_class(transport_name)( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert transport.kind == transport_name + + +def test_region_commitments_base_transport_error(): + # Passing both a credentials object and credentials_file should raise an error + with pytest.raises(core_exceptions.DuplicateCredentialArgs): + transport = transports.RegionCommitmentsTransport( + credentials=ga_credentials.AnonymousCredentials(), + credentials_file="credentials.json" + ) + + +def test_region_commitments_base_transport(): + # Instantiate the base transport. + with mock.patch('google.cloud.compute_v1.services.region_commitments.transports.RegionCommitmentsTransport.__init__') as Transport: + Transport.return_value = None + transport = transports.RegionCommitmentsTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Every method on the transport should just blindly + # raise NotImplementedError. + methods = ( + 'aggregated_list', + 'get', + 'insert', + 'list', + 'update', + ) + for method in methods: + with pytest.raises(NotImplementedError): + getattr(transport, method)(request=object()) + + with pytest.raises(NotImplementedError): + transport.close() + + # Catch all for all remaining methods and properties + remainder = [ + 'kind', + ] + for r in remainder: + with pytest.raises(NotImplementedError): + getattr(transport, r)() + + +def test_region_commitments_base_transport_with_credentials_file(): + # Instantiate the base transport with a credentials file + with mock.patch.object(google.auth, 'load_credentials_from_file', autospec=True) as load_creds, mock.patch('google.cloud.compute_v1.services.region_commitments.transports.RegionCommitmentsTransport._prep_wrapped_messages') as Transport: + Transport.return_value = None + load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.RegionCommitmentsTransport( + credentials_file="credentials.json", + quota_project_id="octopus", + ) + load_creds.assert_called_once_with("credentials.json", + scopes=None, + default_scopes=( + 'https://www.googleapis.com/auth/compute', + 'https://www.googleapis.com/auth/cloud-platform', +), + quota_project_id="octopus", + ) + + +def test_region_commitments_base_transport_with_adc(): + # Test the default credentials are used if credentials and credentials_file are None. + with mock.patch.object(google.auth, 'default', autospec=True) as adc, mock.patch('google.cloud.compute_v1.services.region_commitments.transports.RegionCommitmentsTransport._prep_wrapped_messages') as Transport: + Transport.return_value = None + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.RegionCommitmentsTransport() + adc.assert_called_once() + + +def test_region_commitments_auth_adc(): + # If no credentials are provided, we should use ADC credentials. + with mock.patch.object(google.auth, 'default', autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + RegionCommitmentsClient() + adc.assert_called_once_with( + scopes=None, + default_scopes=( + 'https://www.googleapis.com/auth/compute', + 'https://www.googleapis.com/auth/cloud-platform', +), + quota_project_id=None, + ) + + +def test_region_commitments_http_transport_client_cert_source_for_mtls(): + cred = ga_credentials.AnonymousCredentials() + with mock.patch("google.auth.transport.requests.AuthorizedSession.configure_mtls_channel") as mock_configure_mtls_channel: + transports.RegionCommitmentsRestTransport ( + credentials=cred, + client_cert_source_for_mtls=client_cert_source_callback + ) + mock_configure_mtls_channel.assert_called_once_with(client_cert_source_callback) + + +@pytest.mark.parametrize("transport_name", [ + "rest", +]) +def test_region_commitments_host_no_port(transport_name): + client = RegionCommitmentsClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions(api_endpoint='compute.googleapis.com'), + transport=transport_name, + ) + assert client.transport._host == ( + 'compute.googleapis.com:443' + if transport_name in ['grpc', 'grpc_asyncio'] + else 'https://compute.googleapis.com' + ) + +@pytest.mark.parametrize("transport_name", [ + "rest", +]) +def test_region_commitments_host_with_port(transport_name): + client = RegionCommitmentsClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions(api_endpoint='compute.googleapis.com:8000'), + transport=transport_name, + ) + assert client.transport._host == ( + 'compute.googleapis.com:8000' + if transport_name in ['grpc', 'grpc_asyncio'] + else 'https://compute.googleapis.com:8000' + ) + +@pytest.mark.parametrize("transport_name", [ + "rest", +]) +def test_region_commitments_client_transport_session_collision(transport_name): + creds1 = ga_credentials.AnonymousCredentials() + creds2 = ga_credentials.AnonymousCredentials() + client1 = RegionCommitmentsClient( + credentials=creds1, + transport=transport_name, + ) + client2 = RegionCommitmentsClient( + credentials=creds2, + transport=transport_name, + ) + session1 = client1.transport.aggregated_list._session + session2 = client2.transport.aggregated_list._session + assert session1 != session2 + session1 = client1.transport.get._session + session2 = client2.transport.get._session + assert session1 != session2 + session1 = client1.transport.insert._session + session2 = client2.transport.insert._session + assert session1 != session2 + session1 = client1.transport.list._session + session2 = client2.transport.list._session + assert session1 != session2 + session1 = client1.transport.update._session + session2 = client2.transport.update._session + assert session1 != session2 + +def test_common_billing_account_path(): + billing_account = "squid" + expected = "billingAccounts/{billing_account}".format(billing_account=billing_account, ) + actual = RegionCommitmentsClient.common_billing_account_path(billing_account) + assert expected == actual + + +def test_parse_common_billing_account_path(): + expected = { + "billing_account": "clam", + } + path = RegionCommitmentsClient.common_billing_account_path(**expected) + + # Check that the path construction is reversible. + actual = RegionCommitmentsClient.parse_common_billing_account_path(path) + assert expected == actual + +def test_common_folder_path(): + folder = "whelk" + expected = "folders/{folder}".format(folder=folder, ) + actual = RegionCommitmentsClient.common_folder_path(folder) + assert expected == actual + + +def test_parse_common_folder_path(): + expected = { + "folder": "octopus", + } + path = RegionCommitmentsClient.common_folder_path(**expected) + + # Check that the path construction is reversible. + actual = RegionCommitmentsClient.parse_common_folder_path(path) + assert expected == actual + +def test_common_organization_path(): + organization = "oyster" + expected = "organizations/{organization}".format(organization=organization, ) + actual = RegionCommitmentsClient.common_organization_path(organization) + assert expected == actual + + +def test_parse_common_organization_path(): + expected = { + "organization": "nudibranch", + } + path = RegionCommitmentsClient.common_organization_path(**expected) + + # Check that the path construction is reversible. + actual = RegionCommitmentsClient.parse_common_organization_path(path) + assert expected == actual + +def test_common_project_path(): + project = "cuttlefish" + expected = "projects/{project}".format(project=project, ) + actual = RegionCommitmentsClient.common_project_path(project) + assert expected == actual + + +def test_parse_common_project_path(): + expected = { + "project": "mussel", + } + path = RegionCommitmentsClient.common_project_path(**expected) + + # Check that the path construction is reversible. + actual = RegionCommitmentsClient.parse_common_project_path(path) + assert expected == actual + +def test_common_location_path(): + project = "winkle" + location = "nautilus" + expected = "projects/{project}/locations/{location}".format(project=project, location=location, ) + actual = RegionCommitmentsClient.common_location_path(project, location) + assert expected == actual + + +def test_parse_common_location_path(): + expected = { + "project": "scallop", + "location": "abalone", + } + path = RegionCommitmentsClient.common_location_path(**expected) + + # Check that the path construction is reversible. + actual = RegionCommitmentsClient.parse_common_location_path(path) + assert expected == actual + + +def test_client_with_default_client_info(): + client_info = gapic_v1.client_info.ClientInfo() + + with mock.patch.object(transports.RegionCommitmentsTransport, '_prep_wrapped_messages') as prep: + client = RegionCommitmentsClient( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + with mock.patch.object(transports.RegionCommitmentsTransport, '_prep_wrapped_messages') as prep: + transport_class = RegionCommitmentsClient.get_transport_class() + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + +def test_transport_close(): + transports = { + "rest": "_session", + } + + for transport, close_name in transports.items(): + client = RegionCommitmentsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport + ) + with mock.patch.object(type(getattr(client.transport, close_name)), "close") as close: + with client: + close.assert_not_called() + close.assert_called_once() + +def test_client_ctx(): + transports = [ + 'rest', + ] + for transport in transports: + client = RegionCommitmentsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport + ) + # Test client calls underlying transport. + with mock.patch.object(type(client.transport), "close") as close: + close.assert_not_called() + with client: + pass + close.assert_called() + +@pytest.mark.parametrize("client_class,transport_class", [ + (RegionCommitmentsClient, transports.RegionCommitmentsRestTransport), +]) +def test_api_key_credentials(client_class, transport_class): + with mock.patch.object( + google.auth._default, "get_api_key_credentials", create=True + ) as get_api_key_credentials: + mock_cred = mock.Mock() + get_api_key_credentials.return_value = mock_cred + options = client_options.ClientOptions() + options.api_key = "api_key" + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=mock_cred, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) diff --git a/owl-bot-staging/v1/tests/unit/gapic/compute_v1/test_region_disk_types.py b/owl-bot-staging/v1/tests/unit/gapic/compute_v1/test_region_disk_types.py new file mode 100644 index 000000000..2f8a1c4bd --- /dev/null +++ b/owl-bot-staging/v1/tests/unit/gapic/compute_v1/test_region_disk_types.py @@ -0,0 +1,1399 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import os +# try/except added for compatibility with python < 3.8 +try: + from unittest import mock + from unittest.mock import AsyncMock # pragma: NO COVER +except ImportError: # pragma: NO COVER + import mock + +import grpc +from grpc.experimental import aio +from collections.abc import Iterable +from google.protobuf import json_format +import json +import math +import pytest +from proto.marshal.rules.dates import DurationRule, TimestampRule +from proto.marshal.rules import wrappers +from requests import Response +from requests import Request, PreparedRequest +from requests.sessions import Session +from google.protobuf import json_format + +from google.api_core import client_options +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import grpc_helpers +from google.api_core import grpc_helpers_async +from google.api_core import path_template +from google.auth import credentials as ga_credentials +from google.auth.exceptions import MutualTLSChannelError +from google.cloud.compute_v1.services.region_disk_types import RegionDiskTypesClient +from google.cloud.compute_v1.services.region_disk_types import pagers +from google.cloud.compute_v1.services.region_disk_types import transports +from google.cloud.compute_v1.types import compute +from google.oauth2 import service_account +import google.auth + + +def client_cert_source_callback(): + return b"cert bytes", b"key bytes" + + +# If default endpoint is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint(client): + return "foo.googleapis.com" if ("localhost" in client.DEFAULT_ENDPOINT) else client.DEFAULT_ENDPOINT + + +def test__get_default_mtls_endpoint(): + api_endpoint = "example.googleapis.com" + api_mtls_endpoint = "example.mtls.googleapis.com" + sandbox_endpoint = "example.sandbox.googleapis.com" + sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com" + non_googleapi = "api.example.com" + + assert RegionDiskTypesClient._get_default_mtls_endpoint(None) is None + assert RegionDiskTypesClient._get_default_mtls_endpoint(api_endpoint) == api_mtls_endpoint + assert RegionDiskTypesClient._get_default_mtls_endpoint(api_mtls_endpoint) == api_mtls_endpoint + assert RegionDiskTypesClient._get_default_mtls_endpoint(sandbox_endpoint) == sandbox_mtls_endpoint + assert RegionDiskTypesClient._get_default_mtls_endpoint(sandbox_mtls_endpoint) == sandbox_mtls_endpoint + assert RegionDiskTypesClient._get_default_mtls_endpoint(non_googleapi) == non_googleapi + + +@pytest.mark.parametrize("client_class,transport_name", [ + (RegionDiskTypesClient, "rest"), +]) +def test_region_disk_types_client_from_service_account_info(client_class, transport_name): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object(service_account.Credentials, 'from_service_account_info') as factory: + factory.return_value = creds + info = {"valid": True} + client = client_class.from_service_account_info(info, transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + 'compute.googleapis.com:443' + if transport_name in ['grpc', 'grpc_asyncio'] + else + 'https://compute.googleapis.com' + ) + + +@pytest.mark.parametrize("transport_class,transport_name", [ + (transports.RegionDiskTypesRestTransport, "rest"), +]) +def test_region_disk_types_client_service_account_always_use_jwt(transport_class, transport_name): + with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=True) + use_jwt.assert_called_once_with(True) + + with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=False) + use_jwt.assert_not_called() + + +@pytest.mark.parametrize("client_class,transport_name", [ + (RegionDiskTypesClient, "rest"), +]) +def test_region_disk_types_client_from_service_account_file(client_class, transport_name): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object(service_account.Credentials, 'from_service_account_file') as factory: + factory.return_value = creds + client = client_class.from_service_account_file("dummy/file/path.json", transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + client = client_class.from_service_account_json("dummy/file/path.json", transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + 'compute.googleapis.com:443' + if transport_name in ['grpc', 'grpc_asyncio'] + else + 'https://compute.googleapis.com' + ) + + +def test_region_disk_types_client_get_transport_class(): + transport = RegionDiskTypesClient.get_transport_class() + available_transports = [ + transports.RegionDiskTypesRestTransport, + ] + assert transport in available_transports + + transport = RegionDiskTypesClient.get_transport_class("rest") + assert transport == transports.RegionDiskTypesRestTransport + + +@pytest.mark.parametrize("client_class,transport_class,transport_name", [ + (RegionDiskTypesClient, transports.RegionDiskTypesRestTransport, "rest"), +]) +@mock.patch.object(RegionDiskTypesClient, "DEFAULT_ENDPOINT", modify_default_endpoint(RegionDiskTypesClient)) +def test_region_disk_types_client_client_options(client_class, transport_class, transport_name): + # Check that if channel is provided we won't create a new one. + with mock.patch.object(RegionDiskTypesClient, 'get_transport_class') as gtc: + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ) + client = client_class(transport=transport) + gtc.assert_not_called() + + # Check that if channel is provided via str we will create a new one. + with mock.patch.object(RegionDiskTypesClient, 'get_transport_class') as gtc: + client = client_class(transport=transport_name) + gtc.assert_called() + + # Check the case api_endpoint is provided. + options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(transport=transport_name, client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_MTLS_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError): + client = client_class(transport=transport_name) + + # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): + with pytest.raises(ValueError): + client = client_class(transport=transport_name) + + # Check the case quota_project_id is provided + options = client_options.ClientOptions(quota_project_id="octopus") + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id="octopus", + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + # Check the case api_endpoint is provided + options = client_options.ClientOptions(api_audience="https://language.googleapis.com") + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience="https://language.googleapis.com" + ) + +@pytest.mark.parametrize("client_class,transport_class,transport_name,use_client_cert_env", [ + (RegionDiskTypesClient, transports.RegionDiskTypesRestTransport, "rest", "true"), + (RegionDiskTypesClient, transports.RegionDiskTypesRestTransport, "rest", "false"), +]) +@mock.patch.object(RegionDiskTypesClient, "DEFAULT_ENDPOINT", modify_default_endpoint(RegionDiskTypesClient)) +@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) +def test_region_disk_types_client_mtls_env_auto(client_class, transport_class, transport_name, use_client_cert_env): + # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default + # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists. + + # Check the case client_cert_source is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): + options = client_options.ClientOptions(client_cert_source=client_cert_source_callback) + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + + if use_client_cert_env == "false": + expected_client_cert_source = None + expected_host = client.DEFAULT_ENDPOINT + else: + expected_client_cert_source = client_cert_source_callback + expected_host = client.DEFAULT_MTLS_ENDPOINT + + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case ADC client cert is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): + with mock.patch.object(transport_class, '__init__') as patched: + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): + with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=client_cert_source_callback): + if use_client_cert_env == "false": + expected_host = client.DEFAULT_ENDPOINT + expected_client_cert_source = None + else: + expected_host = client.DEFAULT_MTLS_ENDPOINT + expected_client_cert_source = client_cert_source_callback + + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case client_cert_source and ADC client cert are not provided. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): + with mock.patch.object(transport_class, '__init__') as patched: + with mock.patch("google.auth.transport.mtls.has_default_client_cert_source", return_value=False): + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize("client_class", [ + RegionDiskTypesClient +]) +@mock.patch.object(RegionDiskTypesClient, "DEFAULT_ENDPOINT", modify_default_endpoint(RegionDiskTypesClient)) +def test_region_disk_types_client_get_mtls_endpoint_and_cert_source(client_class): + mock_client_cert_source = mock.Mock() + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "true". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + mock_api_endpoint = "foo" + options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(options) + assert api_endpoint == mock_api_endpoint + assert cert_source == mock_client_cert_source + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "false". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + mock_client_cert_source = mock.Mock() + mock_api_endpoint = "foo" + options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(options) + assert api_endpoint == mock_api_endpoint + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert doesn't exist. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=False): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert exists. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): + with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=mock_client_cert_source): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source == mock_client_cert_source + + +@pytest.mark.parametrize("client_class,transport_class,transport_name", [ + (RegionDiskTypesClient, transports.RegionDiskTypesRestTransport, "rest"), +]) +def test_region_disk_types_client_client_options_scopes(client_class, transport_class, transport_name): + # Check the case scopes are provided. + options = client_options.ClientOptions( + scopes=["1", "2"], + ) + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=["1", "2"], + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + +@pytest.mark.parametrize("client_class,transport_class,transport_name,grpc_helpers", [ + (RegionDiskTypesClient, transports.RegionDiskTypesRestTransport, "rest", None), +]) +def test_region_disk_types_client_client_options_credentials_file(client_class, transport_class, transport_name, grpc_helpers): + # Check the case credentials file is provided. + options = client_options.ClientOptions( + credentials_file="credentials.json" + ) + + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize("request_type", [ + compute.GetRegionDiskTypeRequest, + dict, +]) +def test_get_rest(request_type): + client = RegionDiskTypesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2', 'disk_type': 'sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.DiskType( + creation_timestamp='creation_timestamp_value', + default_disk_size_gb=2097, + description='description_value', + id=205, + kind='kind_value', + name='name_value', + region='region_value', + self_link='self_link_value', + valid_disk_size='valid_disk_size_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.DiskType.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.get(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.DiskType) + assert response.creation_timestamp == 'creation_timestamp_value' + assert response.default_disk_size_gb == 2097 + assert response.description == 'description_value' + assert response.id == 205 + assert response.kind == 'kind_value' + assert response.name == 'name_value' + assert response.region == 'region_value' + assert response.self_link == 'self_link_value' + assert response.valid_disk_size == 'valid_disk_size_value' + assert response.zone == 'zone_value' + + +def test_get_rest_required_fields(request_type=compute.GetRegionDiskTypeRequest): + transport_class = transports.RegionDiskTypesRestTransport + + request_init = {} + request_init["disk_type"] = "" + request_init["project"] = "" + request_init["region"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["diskType"] = 'disk_type_value' + jsonified_request["project"] = 'project_value' + jsonified_request["region"] = 'region_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "diskType" in jsonified_request + assert jsonified_request["diskType"] == 'disk_type_value' + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "region" in jsonified_request + assert jsonified_request["region"] == 'region_value' + + client = RegionDiskTypesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.DiskType() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "get", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.DiskType.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.get(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_get_rest_unset_required_fields(): + transport = transports.RegionDiskTypesRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.get._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("diskType", "project", "region", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_rest_interceptors(null_interceptor): + transport = transports.RegionDiskTypesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.RegionDiskTypesRestInterceptor(), + ) + client = RegionDiskTypesClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.RegionDiskTypesRestInterceptor, "post_get") as post, \ + mock.patch.object(transports.RegionDiskTypesRestInterceptor, "pre_get") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.GetRegionDiskTypeRequest.pb(compute.GetRegionDiskTypeRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.DiskType.to_json(compute.DiskType()) + + request = compute.GetRegionDiskTypeRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.DiskType() + + client.get(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_get_rest_bad_request(transport: str = 'rest', request_type=compute.GetRegionDiskTypeRequest): + client = RegionDiskTypesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2', 'disk_type': 'sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get(request) + + +def test_get_rest_flattened(): + client = RegionDiskTypesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.DiskType() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'region': 'sample2', 'disk_type': 'sample3'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + region='region_value', + disk_type='disk_type_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.DiskType.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.get(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/regions/{region}/diskTypes/{disk_type}" % client.transport._host, args[1]) + + +def test_get_rest_flattened_error(transport: str = 'rest'): + client = RegionDiskTypesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get( + compute.GetRegionDiskTypeRequest(), + project='project_value', + region='region_value', + disk_type='disk_type_value', + ) + + +def test_get_rest_error(): + client = RegionDiskTypesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.ListRegionDiskTypesRequest, + dict, +]) +def test_list_rest(request_type): + client = RegionDiskTypesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.RegionDiskTypeList( + id='id_value', + kind='kind_value', + next_page_token='next_page_token_value', + self_link='self_link_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.RegionDiskTypeList.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.list(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListPager) + assert response.id == 'id_value' + assert response.kind == 'kind_value' + assert response.next_page_token == 'next_page_token_value' + assert response.self_link == 'self_link_value' + + +def test_list_rest_required_fields(request_type=compute.ListRegionDiskTypesRequest): + transport_class = transports.RegionDiskTypesRestTransport + + request_init = {} + request_init["project"] = "" + request_init["region"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + jsonified_request["region"] = 'region_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("filter", "max_results", "order_by", "page_token", "return_partial_success", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "region" in jsonified_request + assert jsonified_request["region"] == 'region_value' + + client = RegionDiskTypesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.RegionDiskTypeList() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "get", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.RegionDiskTypeList.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.list(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_list_rest_unset_required_fields(): + transport = transports.RegionDiskTypesRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.list._get_unset_required_fields({}) + assert set(unset_fields) == (set(("filter", "maxResults", "orderBy", "pageToken", "returnPartialSuccess", )) & set(("project", "region", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_rest_interceptors(null_interceptor): + transport = transports.RegionDiskTypesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.RegionDiskTypesRestInterceptor(), + ) + client = RegionDiskTypesClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.RegionDiskTypesRestInterceptor, "post_list") as post, \ + mock.patch.object(transports.RegionDiskTypesRestInterceptor, "pre_list") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.ListRegionDiskTypesRequest.pb(compute.ListRegionDiskTypesRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.RegionDiskTypeList.to_json(compute.RegionDiskTypeList()) + + request = compute.ListRegionDiskTypesRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.RegionDiskTypeList() + + client.list(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_list_rest_bad_request(transport: str = 'rest', request_type=compute.ListRegionDiskTypesRequest): + client = RegionDiskTypesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list(request) + + +def test_list_rest_flattened(): + client = RegionDiskTypesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.RegionDiskTypeList() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'region': 'sample2'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + region='region_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.RegionDiskTypeList.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.list(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/regions/{region}/diskTypes" % client.transport._host, args[1]) + + +def test_list_rest_flattened_error(transport: str = 'rest'): + client = RegionDiskTypesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list( + compute.ListRegionDiskTypesRequest(), + project='project_value', + region='region_value', + ) + + +def test_list_rest_pager(transport: str = 'rest'): + client = RegionDiskTypesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + #with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + compute.RegionDiskTypeList( + items=[ + compute.DiskType(), + compute.DiskType(), + compute.DiskType(), + ], + next_page_token='abc', + ), + compute.RegionDiskTypeList( + items=[], + next_page_token='def', + ), + compute.RegionDiskTypeList( + items=[ + compute.DiskType(), + ], + next_page_token='ghi', + ), + compute.RegionDiskTypeList( + items=[ + compute.DiskType(), + compute.DiskType(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple(compute.RegionDiskTypeList.to_json(x) for x in response) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode('UTF-8') + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {'project': 'sample1', 'region': 'sample2'} + + pager = client.list(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, compute.DiskType) + for i in results) + + pages = list(client.list(request=sample_request).pages) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + + +def test_credentials_transport_error(): + # It is an error to provide credentials and a transport instance. + transport = transports.RegionDiskTypesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = RegionDiskTypesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # It is an error to provide a credentials file and a transport instance. + transport = transports.RegionDiskTypesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = RegionDiskTypesClient( + client_options={"credentials_file": "credentials.json"}, + transport=transport, + ) + + # It is an error to provide an api_key and a transport instance. + transport = transports.RegionDiskTypesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = RegionDiskTypesClient( + client_options=options, + transport=transport, + ) + + # It is an error to provide an api_key and a credential. + options = mock.Mock() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = RegionDiskTypesClient( + client_options=options, + credentials=ga_credentials.AnonymousCredentials() + ) + + # It is an error to provide scopes and a transport instance. + transport = transports.RegionDiskTypesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = RegionDiskTypesClient( + client_options={"scopes": ["1", "2"]}, + transport=transport, + ) + + +def test_transport_instance(): + # A client may be instantiated with a custom transport instance. + transport = transports.RegionDiskTypesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + client = RegionDiskTypesClient(transport=transport) + assert client.transport is transport + + +@pytest.mark.parametrize("transport_class", [ + transports.RegionDiskTypesRestTransport, +]) +def test_transport_adc(transport_class): + # Test default credentials are used if not provided. + with mock.patch.object(google.auth, 'default') as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class() + adc.assert_called_once() + +@pytest.mark.parametrize("transport_name", [ + "rest", +]) +def test_transport_kind(transport_name): + transport = RegionDiskTypesClient.get_transport_class(transport_name)( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert transport.kind == transport_name + + +def test_region_disk_types_base_transport_error(): + # Passing both a credentials object and credentials_file should raise an error + with pytest.raises(core_exceptions.DuplicateCredentialArgs): + transport = transports.RegionDiskTypesTransport( + credentials=ga_credentials.AnonymousCredentials(), + credentials_file="credentials.json" + ) + + +def test_region_disk_types_base_transport(): + # Instantiate the base transport. + with mock.patch('google.cloud.compute_v1.services.region_disk_types.transports.RegionDiskTypesTransport.__init__') as Transport: + Transport.return_value = None + transport = transports.RegionDiskTypesTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Every method on the transport should just blindly + # raise NotImplementedError. + methods = ( + 'get', + 'list', + ) + for method in methods: + with pytest.raises(NotImplementedError): + getattr(transport, method)(request=object()) + + with pytest.raises(NotImplementedError): + transport.close() + + # Catch all for all remaining methods and properties + remainder = [ + 'kind', + ] + for r in remainder: + with pytest.raises(NotImplementedError): + getattr(transport, r)() + + +def test_region_disk_types_base_transport_with_credentials_file(): + # Instantiate the base transport with a credentials file + with mock.patch.object(google.auth, 'load_credentials_from_file', autospec=True) as load_creds, mock.patch('google.cloud.compute_v1.services.region_disk_types.transports.RegionDiskTypesTransport._prep_wrapped_messages') as Transport: + Transport.return_value = None + load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.RegionDiskTypesTransport( + credentials_file="credentials.json", + quota_project_id="octopus", + ) + load_creds.assert_called_once_with("credentials.json", + scopes=None, + default_scopes=( + 'https://www.googleapis.com/auth/compute.readonly', + 'https://www.googleapis.com/auth/compute', + 'https://www.googleapis.com/auth/cloud-platform', +), + quota_project_id="octopus", + ) + + +def test_region_disk_types_base_transport_with_adc(): + # Test the default credentials are used if credentials and credentials_file are None. + with mock.patch.object(google.auth, 'default', autospec=True) as adc, mock.patch('google.cloud.compute_v1.services.region_disk_types.transports.RegionDiskTypesTransport._prep_wrapped_messages') as Transport: + Transport.return_value = None + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.RegionDiskTypesTransport() + adc.assert_called_once() + + +def test_region_disk_types_auth_adc(): + # If no credentials are provided, we should use ADC credentials. + with mock.patch.object(google.auth, 'default', autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + RegionDiskTypesClient() + adc.assert_called_once_with( + scopes=None, + default_scopes=( + 'https://www.googleapis.com/auth/compute.readonly', + 'https://www.googleapis.com/auth/compute', + 'https://www.googleapis.com/auth/cloud-platform', +), + quota_project_id=None, + ) + + +def test_region_disk_types_http_transport_client_cert_source_for_mtls(): + cred = ga_credentials.AnonymousCredentials() + with mock.patch("google.auth.transport.requests.AuthorizedSession.configure_mtls_channel") as mock_configure_mtls_channel: + transports.RegionDiskTypesRestTransport ( + credentials=cred, + client_cert_source_for_mtls=client_cert_source_callback + ) + mock_configure_mtls_channel.assert_called_once_with(client_cert_source_callback) + + +@pytest.mark.parametrize("transport_name", [ + "rest", +]) +def test_region_disk_types_host_no_port(transport_name): + client = RegionDiskTypesClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions(api_endpoint='compute.googleapis.com'), + transport=transport_name, + ) + assert client.transport._host == ( + 'compute.googleapis.com:443' + if transport_name in ['grpc', 'grpc_asyncio'] + else 'https://compute.googleapis.com' + ) + +@pytest.mark.parametrize("transport_name", [ + "rest", +]) +def test_region_disk_types_host_with_port(transport_name): + client = RegionDiskTypesClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions(api_endpoint='compute.googleapis.com:8000'), + transport=transport_name, + ) + assert client.transport._host == ( + 'compute.googleapis.com:8000' + if transport_name in ['grpc', 'grpc_asyncio'] + else 'https://compute.googleapis.com:8000' + ) + +@pytest.mark.parametrize("transport_name", [ + "rest", +]) +def test_region_disk_types_client_transport_session_collision(transport_name): + creds1 = ga_credentials.AnonymousCredentials() + creds2 = ga_credentials.AnonymousCredentials() + client1 = RegionDiskTypesClient( + credentials=creds1, + transport=transport_name, + ) + client2 = RegionDiskTypesClient( + credentials=creds2, + transport=transport_name, + ) + session1 = client1.transport.get._session + session2 = client2.transport.get._session + assert session1 != session2 + session1 = client1.transport.list._session + session2 = client2.transport.list._session + assert session1 != session2 + +def test_common_billing_account_path(): + billing_account = "squid" + expected = "billingAccounts/{billing_account}".format(billing_account=billing_account, ) + actual = RegionDiskTypesClient.common_billing_account_path(billing_account) + assert expected == actual + + +def test_parse_common_billing_account_path(): + expected = { + "billing_account": "clam", + } + path = RegionDiskTypesClient.common_billing_account_path(**expected) + + # Check that the path construction is reversible. + actual = RegionDiskTypesClient.parse_common_billing_account_path(path) + assert expected == actual + +def test_common_folder_path(): + folder = "whelk" + expected = "folders/{folder}".format(folder=folder, ) + actual = RegionDiskTypesClient.common_folder_path(folder) + assert expected == actual + + +def test_parse_common_folder_path(): + expected = { + "folder": "octopus", + } + path = RegionDiskTypesClient.common_folder_path(**expected) + + # Check that the path construction is reversible. + actual = RegionDiskTypesClient.parse_common_folder_path(path) + assert expected == actual + +def test_common_organization_path(): + organization = "oyster" + expected = "organizations/{organization}".format(organization=organization, ) + actual = RegionDiskTypesClient.common_organization_path(organization) + assert expected == actual + + +def test_parse_common_organization_path(): + expected = { + "organization": "nudibranch", + } + path = RegionDiskTypesClient.common_organization_path(**expected) + + # Check that the path construction is reversible. + actual = RegionDiskTypesClient.parse_common_organization_path(path) + assert expected == actual + +def test_common_project_path(): + project = "cuttlefish" + expected = "projects/{project}".format(project=project, ) + actual = RegionDiskTypesClient.common_project_path(project) + assert expected == actual + + +def test_parse_common_project_path(): + expected = { + "project": "mussel", + } + path = RegionDiskTypesClient.common_project_path(**expected) + + # Check that the path construction is reversible. + actual = RegionDiskTypesClient.parse_common_project_path(path) + assert expected == actual + +def test_common_location_path(): + project = "winkle" + location = "nautilus" + expected = "projects/{project}/locations/{location}".format(project=project, location=location, ) + actual = RegionDiskTypesClient.common_location_path(project, location) + assert expected == actual + + +def test_parse_common_location_path(): + expected = { + "project": "scallop", + "location": "abalone", + } + path = RegionDiskTypesClient.common_location_path(**expected) + + # Check that the path construction is reversible. + actual = RegionDiskTypesClient.parse_common_location_path(path) + assert expected == actual + + +def test_client_with_default_client_info(): + client_info = gapic_v1.client_info.ClientInfo() + + with mock.patch.object(transports.RegionDiskTypesTransport, '_prep_wrapped_messages') as prep: + client = RegionDiskTypesClient( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + with mock.patch.object(transports.RegionDiskTypesTransport, '_prep_wrapped_messages') as prep: + transport_class = RegionDiskTypesClient.get_transport_class() + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + +def test_transport_close(): + transports = { + "rest": "_session", + } + + for transport, close_name in transports.items(): + client = RegionDiskTypesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport + ) + with mock.patch.object(type(getattr(client.transport, close_name)), "close") as close: + with client: + close.assert_not_called() + close.assert_called_once() + +def test_client_ctx(): + transports = [ + 'rest', + ] + for transport in transports: + client = RegionDiskTypesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport + ) + # Test client calls underlying transport. + with mock.patch.object(type(client.transport), "close") as close: + close.assert_not_called() + with client: + pass + close.assert_called() + +@pytest.mark.parametrize("client_class,transport_class", [ + (RegionDiskTypesClient, transports.RegionDiskTypesRestTransport), +]) +def test_api_key_credentials(client_class, transport_class): + with mock.patch.object( + google.auth._default, "get_api_key_credentials", create=True + ) as get_api_key_credentials: + mock_cred = mock.Mock() + get_api_key_credentials.return_value = mock_cred + options = client_options.ClientOptions() + options.api_key = "api_key" + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=mock_cred, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) diff --git a/owl-bot-staging/v1/tests/unit/gapic/compute_v1/test_region_disks.py b/owl-bot-staging/v1/tests/unit/gapic/compute_v1/test_region_disks.py new file mode 100644 index 000000000..e8c35784f --- /dev/null +++ b/owl-bot-staging/v1/tests/unit/gapic/compute_v1/test_region_disks.py @@ -0,0 +1,8973 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import os +# try/except added for compatibility with python < 3.8 +try: + from unittest import mock + from unittest.mock import AsyncMock # pragma: NO COVER +except ImportError: # pragma: NO COVER + import mock + +import grpc +from grpc.experimental import aio +from collections.abc import Iterable +from google.protobuf import json_format +import json +import math +import pytest +from proto.marshal.rules.dates import DurationRule, TimestampRule +from proto.marshal.rules import wrappers +from requests import Response +from requests import Request, PreparedRequest +from requests.sessions import Session +from google.protobuf import json_format + +from google.api_core import client_options +from google.api_core import exceptions as core_exceptions +from google.api_core import extended_operation # type: ignore +from google.api_core import future +from google.api_core import gapic_v1 +from google.api_core import grpc_helpers +from google.api_core import grpc_helpers_async +from google.api_core import path_template +from google.auth import credentials as ga_credentials +from google.auth.exceptions import MutualTLSChannelError +from google.cloud.compute_v1.services.region_disks import RegionDisksClient +from google.cloud.compute_v1.services.region_disks import pagers +from google.cloud.compute_v1.services.region_disks import transports +from google.cloud.compute_v1.types import compute +from google.oauth2 import service_account +import google.auth + + +def client_cert_source_callback(): + return b"cert bytes", b"key bytes" + + +# If default endpoint is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint(client): + return "foo.googleapis.com" if ("localhost" in client.DEFAULT_ENDPOINT) else client.DEFAULT_ENDPOINT + + +def test__get_default_mtls_endpoint(): + api_endpoint = "example.googleapis.com" + api_mtls_endpoint = "example.mtls.googleapis.com" + sandbox_endpoint = "example.sandbox.googleapis.com" + sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com" + non_googleapi = "api.example.com" + + assert RegionDisksClient._get_default_mtls_endpoint(None) is None + assert RegionDisksClient._get_default_mtls_endpoint(api_endpoint) == api_mtls_endpoint + assert RegionDisksClient._get_default_mtls_endpoint(api_mtls_endpoint) == api_mtls_endpoint + assert RegionDisksClient._get_default_mtls_endpoint(sandbox_endpoint) == sandbox_mtls_endpoint + assert RegionDisksClient._get_default_mtls_endpoint(sandbox_mtls_endpoint) == sandbox_mtls_endpoint + assert RegionDisksClient._get_default_mtls_endpoint(non_googleapi) == non_googleapi + + +@pytest.mark.parametrize("client_class,transport_name", [ + (RegionDisksClient, "rest"), +]) +def test_region_disks_client_from_service_account_info(client_class, transport_name): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object(service_account.Credentials, 'from_service_account_info') as factory: + factory.return_value = creds + info = {"valid": True} + client = client_class.from_service_account_info(info, transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + 'compute.googleapis.com:443' + if transport_name in ['grpc', 'grpc_asyncio'] + else + 'https://compute.googleapis.com' + ) + + +@pytest.mark.parametrize("transport_class,transport_name", [ + (transports.RegionDisksRestTransport, "rest"), +]) +def test_region_disks_client_service_account_always_use_jwt(transport_class, transport_name): + with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=True) + use_jwt.assert_called_once_with(True) + + with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=False) + use_jwt.assert_not_called() + + +@pytest.mark.parametrize("client_class,transport_name", [ + (RegionDisksClient, "rest"), +]) +def test_region_disks_client_from_service_account_file(client_class, transport_name): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object(service_account.Credentials, 'from_service_account_file') as factory: + factory.return_value = creds + client = client_class.from_service_account_file("dummy/file/path.json", transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + client = client_class.from_service_account_json("dummy/file/path.json", transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + 'compute.googleapis.com:443' + if transport_name in ['grpc', 'grpc_asyncio'] + else + 'https://compute.googleapis.com' + ) + + +def test_region_disks_client_get_transport_class(): + transport = RegionDisksClient.get_transport_class() + available_transports = [ + transports.RegionDisksRestTransport, + ] + assert transport in available_transports + + transport = RegionDisksClient.get_transport_class("rest") + assert transport == transports.RegionDisksRestTransport + + +@pytest.mark.parametrize("client_class,transport_class,transport_name", [ + (RegionDisksClient, transports.RegionDisksRestTransport, "rest"), +]) +@mock.patch.object(RegionDisksClient, "DEFAULT_ENDPOINT", modify_default_endpoint(RegionDisksClient)) +def test_region_disks_client_client_options(client_class, transport_class, transport_name): + # Check that if channel is provided we won't create a new one. + with mock.patch.object(RegionDisksClient, 'get_transport_class') as gtc: + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ) + client = client_class(transport=transport) + gtc.assert_not_called() + + # Check that if channel is provided via str we will create a new one. + with mock.patch.object(RegionDisksClient, 'get_transport_class') as gtc: + client = client_class(transport=transport_name) + gtc.assert_called() + + # Check the case api_endpoint is provided. + options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(transport=transport_name, client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_MTLS_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError): + client = client_class(transport=transport_name) + + # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): + with pytest.raises(ValueError): + client = client_class(transport=transport_name) + + # Check the case quota_project_id is provided + options = client_options.ClientOptions(quota_project_id="octopus") + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id="octopus", + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + # Check the case api_endpoint is provided + options = client_options.ClientOptions(api_audience="https://language.googleapis.com") + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience="https://language.googleapis.com" + ) + +@pytest.mark.parametrize("client_class,transport_class,transport_name,use_client_cert_env", [ + (RegionDisksClient, transports.RegionDisksRestTransport, "rest", "true"), + (RegionDisksClient, transports.RegionDisksRestTransport, "rest", "false"), +]) +@mock.patch.object(RegionDisksClient, "DEFAULT_ENDPOINT", modify_default_endpoint(RegionDisksClient)) +@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) +def test_region_disks_client_mtls_env_auto(client_class, transport_class, transport_name, use_client_cert_env): + # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default + # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists. + + # Check the case client_cert_source is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): + options = client_options.ClientOptions(client_cert_source=client_cert_source_callback) + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + + if use_client_cert_env == "false": + expected_client_cert_source = None + expected_host = client.DEFAULT_ENDPOINT + else: + expected_client_cert_source = client_cert_source_callback + expected_host = client.DEFAULT_MTLS_ENDPOINT + + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case ADC client cert is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): + with mock.patch.object(transport_class, '__init__') as patched: + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): + with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=client_cert_source_callback): + if use_client_cert_env == "false": + expected_host = client.DEFAULT_ENDPOINT + expected_client_cert_source = None + else: + expected_host = client.DEFAULT_MTLS_ENDPOINT + expected_client_cert_source = client_cert_source_callback + + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case client_cert_source and ADC client cert are not provided. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): + with mock.patch.object(transport_class, '__init__') as patched: + with mock.patch("google.auth.transport.mtls.has_default_client_cert_source", return_value=False): + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize("client_class", [ + RegionDisksClient +]) +@mock.patch.object(RegionDisksClient, "DEFAULT_ENDPOINT", modify_default_endpoint(RegionDisksClient)) +def test_region_disks_client_get_mtls_endpoint_and_cert_source(client_class): + mock_client_cert_source = mock.Mock() + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "true". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + mock_api_endpoint = "foo" + options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(options) + assert api_endpoint == mock_api_endpoint + assert cert_source == mock_client_cert_source + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "false". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + mock_client_cert_source = mock.Mock() + mock_api_endpoint = "foo" + options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(options) + assert api_endpoint == mock_api_endpoint + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert doesn't exist. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=False): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert exists. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): + with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=mock_client_cert_source): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source == mock_client_cert_source + + +@pytest.mark.parametrize("client_class,transport_class,transport_name", [ + (RegionDisksClient, transports.RegionDisksRestTransport, "rest"), +]) +def test_region_disks_client_client_options_scopes(client_class, transport_class, transport_name): + # Check the case scopes are provided. + options = client_options.ClientOptions( + scopes=["1", "2"], + ) + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=["1", "2"], + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + +@pytest.mark.parametrize("client_class,transport_class,transport_name,grpc_helpers", [ + (RegionDisksClient, transports.RegionDisksRestTransport, "rest", None), +]) +def test_region_disks_client_client_options_credentials_file(client_class, transport_class, transport_name, grpc_helpers): + # Check the case credentials file is provided. + options = client_options.ClientOptions( + credentials_file="credentials.json" + ) + + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize("request_type", [ + compute.AddResourcePoliciesRegionDiskRequest, + dict, +]) +def test_add_resource_policies_rest(request_type): + client = RegionDisksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2', 'disk': 'sample3'} + request_init["region_disks_add_resource_policies_request_resource"] = {'resource_policies': ['resource_policies_value1', 'resource_policies_value2']} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.add_resource_policies(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, extended_operation.ExtendedOperation) + assert response.client_operation_id == 'client_operation_id_value' + assert response.creation_timestamp == 'creation_timestamp_value' + assert response.description == 'description_value' + assert response.end_time == 'end_time_value' + assert response.http_error_message == 'http_error_message_value' + assert response.http_error_status_code == 2374 + assert response.id == 205 + assert response.insert_time == 'insert_time_value' + assert response.kind == 'kind_value' + assert response.name == 'name_value' + assert response.operation_group_id == 'operation_group_id_value' + assert response.operation_type == 'operation_type_value' + assert response.progress == 885 + assert response.region == 'region_value' + assert response.self_link == 'self_link_value' + assert response.start_time == 'start_time_value' + assert response.status == compute.Operation.Status.DONE + assert response.status_message == 'status_message_value' + assert response.target_id == 947 + assert response.target_link == 'target_link_value' + assert response.user == 'user_value' + assert response.zone == 'zone_value' + + +def test_add_resource_policies_rest_required_fields(request_type=compute.AddResourcePoliciesRegionDiskRequest): + transport_class = transports.RegionDisksRestTransport + + request_init = {} + request_init["disk"] = "" + request_init["project"] = "" + request_init["region"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).add_resource_policies._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["disk"] = 'disk_value' + jsonified_request["project"] = 'project_value' + jsonified_request["region"] = 'region_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).add_resource_policies._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "disk" in jsonified_request + assert jsonified_request["disk"] == 'disk_value' + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "region" in jsonified_request + assert jsonified_request["region"] == 'region_value' + + client = RegionDisksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.add_resource_policies(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_add_resource_policies_rest_unset_required_fields(): + transport = transports.RegionDisksRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.add_resource_policies._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("disk", "project", "region", "regionDisksAddResourcePoliciesRequestResource", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_add_resource_policies_rest_interceptors(null_interceptor): + transport = transports.RegionDisksRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.RegionDisksRestInterceptor(), + ) + client = RegionDisksClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.RegionDisksRestInterceptor, "post_add_resource_policies") as post, \ + mock.patch.object(transports.RegionDisksRestInterceptor, "pre_add_resource_policies") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.AddResourcePoliciesRegionDiskRequest.pb(compute.AddResourcePoliciesRegionDiskRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.AddResourcePoliciesRegionDiskRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.add_resource_policies(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_add_resource_policies_rest_bad_request(transport: str = 'rest', request_type=compute.AddResourcePoliciesRegionDiskRequest): + client = RegionDisksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2', 'disk': 'sample3'} + request_init["region_disks_add_resource_policies_request_resource"] = {'resource_policies': ['resource_policies_value1', 'resource_policies_value2']} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.add_resource_policies(request) + + +def test_add_resource_policies_rest_flattened(): + client = RegionDisksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'region': 'sample2', 'disk': 'sample3'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + region='region_value', + disk='disk_value', + region_disks_add_resource_policies_request_resource=compute.RegionDisksAddResourcePoliciesRequest(resource_policies=['resource_policies_value']), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.add_resource_policies(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/regions/{region}/disks/{disk}/addResourcePolicies" % client.transport._host, args[1]) + + +def test_add_resource_policies_rest_flattened_error(transport: str = 'rest'): + client = RegionDisksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.add_resource_policies( + compute.AddResourcePoliciesRegionDiskRequest(), + project='project_value', + region='region_value', + disk='disk_value', + region_disks_add_resource_policies_request_resource=compute.RegionDisksAddResourcePoliciesRequest(resource_policies=['resource_policies_value']), + ) + + +def test_add_resource_policies_rest_error(): + client = RegionDisksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.AddResourcePoliciesRegionDiskRequest, + dict, +]) +def test_add_resource_policies_unary_rest(request_type): + client = RegionDisksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2', 'disk': 'sample3'} + request_init["region_disks_add_resource_policies_request_resource"] = {'resource_policies': ['resource_policies_value1', 'resource_policies_value2']} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.add_resource_policies_unary(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.Operation) + + +def test_add_resource_policies_unary_rest_required_fields(request_type=compute.AddResourcePoliciesRegionDiskRequest): + transport_class = transports.RegionDisksRestTransport + + request_init = {} + request_init["disk"] = "" + request_init["project"] = "" + request_init["region"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).add_resource_policies._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["disk"] = 'disk_value' + jsonified_request["project"] = 'project_value' + jsonified_request["region"] = 'region_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).add_resource_policies._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "disk" in jsonified_request + assert jsonified_request["disk"] == 'disk_value' + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "region" in jsonified_request + assert jsonified_request["region"] == 'region_value' + + client = RegionDisksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.add_resource_policies_unary(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_add_resource_policies_unary_rest_unset_required_fields(): + transport = transports.RegionDisksRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.add_resource_policies._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("disk", "project", "region", "regionDisksAddResourcePoliciesRequestResource", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_add_resource_policies_unary_rest_interceptors(null_interceptor): + transport = transports.RegionDisksRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.RegionDisksRestInterceptor(), + ) + client = RegionDisksClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.RegionDisksRestInterceptor, "post_add_resource_policies") as post, \ + mock.patch.object(transports.RegionDisksRestInterceptor, "pre_add_resource_policies") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.AddResourcePoliciesRegionDiskRequest.pb(compute.AddResourcePoliciesRegionDiskRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.AddResourcePoliciesRegionDiskRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.add_resource_policies_unary(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_add_resource_policies_unary_rest_bad_request(transport: str = 'rest', request_type=compute.AddResourcePoliciesRegionDiskRequest): + client = RegionDisksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2', 'disk': 'sample3'} + request_init["region_disks_add_resource_policies_request_resource"] = {'resource_policies': ['resource_policies_value1', 'resource_policies_value2']} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.add_resource_policies_unary(request) + + +def test_add_resource_policies_unary_rest_flattened(): + client = RegionDisksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'region': 'sample2', 'disk': 'sample3'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + region='region_value', + disk='disk_value', + region_disks_add_resource_policies_request_resource=compute.RegionDisksAddResourcePoliciesRequest(resource_policies=['resource_policies_value']), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.add_resource_policies_unary(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/regions/{region}/disks/{disk}/addResourcePolicies" % client.transport._host, args[1]) + + +def test_add_resource_policies_unary_rest_flattened_error(transport: str = 'rest'): + client = RegionDisksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.add_resource_policies_unary( + compute.AddResourcePoliciesRegionDiskRequest(), + project='project_value', + region='region_value', + disk='disk_value', + region_disks_add_resource_policies_request_resource=compute.RegionDisksAddResourcePoliciesRequest(resource_policies=['resource_policies_value']), + ) + + +def test_add_resource_policies_unary_rest_error(): + client = RegionDisksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.BulkInsertRegionDiskRequest, + dict, +]) +def test_bulk_insert_rest(request_type): + client = RegionDisksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2'} + request_init["bulk_insert_disk_resource_resource"] = {'source_consistency_group_policy': 'source_consistency_group_policy_value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.bulk_insert(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, extended_operation.ExtendedOperation) + assert response.client_operation_id == 'client_operation_id_value' + assert response.creation_timestamp == 'creation_timestamp_value' + assert response.description == 'description_value' + assert response.end_time == 'end_time_value' + assert response.http_error_message == 'http_error_message_value' + assert response.http_error_status_code == 2374 + assert response.id == 205 + assert response.insert_time == 'insert_time_value' + assert response.kind == 'kind_value' + assert response.name == 'name_value' + assert response.operation_group_id == 'operation_group_id_value' + assert response.operation_type == 'operation_type_value' + assert response.progress == 885 + assert response.region == 'region_value' + assert response.self_link == 'self_link_value' + assert response.start_time == 'start_time_value' + assert response.status == compute.Operation.Status.DONE + assert response.status_message == 'status_message_value' + assert response.target_id == 947 + assert response.target_link == 'target_link_value' + assert response.user == 'user_value' + assert response.zone == 'zone_value' + + +def test_bulk_insert_rest_required_fields(request_type=compute.BulkInsertRegionDiskRequest): + transport_class = transports.RegionDisksRestTransport + + request_init = {} + request_init["project"] = "" + request_init["region"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).bulk_insert._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + jsonified_request["region"] = 'region_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).bulk_insert._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "region" in jsonified_request + assert jsonified_request["region"] == 'region_value' + + client = RegionDisksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.bulk_insert(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_bulk_insert_rest_unset_required_fields(): + transport = transports.RegionDisksRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.bulk_insert._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("bulkInsertDiskResourceResource", "project", "region", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_bulk_insert_rest_interceptors(null_interceptor): + transport = transports.RegionDisksRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.RegionDisksRestInterceptor(), + ) + client = RegionDisksClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.RegionDisksRestInterceptor, "post_bulk_insert") as post, \ + mock.patch.object(transports.RegionDisksRestInterceptor, "pre_bulk_insert") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.BulkInsertRegionDiskRequest.pb(compute.BulkInsertRegionDiskRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.BulkInsertRegionDiskRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.bulk_insert(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_bulk_insert_rest_bad_request(transport: str = 'rest', request_type=compute.BulkInsertRegionDiskRequest): + client = RegionDisksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2'} + request_init["bulk_insert_disk_resource_resource"] = {'source_consistency_group_policy': 'source_consistency_group_policy_value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.bulk_insert(request) + + +def test_bulk_insert_rest_flattened(): + client = RegionDisksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'region': 'sample2'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + region='region_value', + bulk_insert_disk_resource_resource=compute.BulkInsertDiskResource(source_consistency_group_policy='source_consistency_group_policy_value'), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.bulk_insert(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/regions/{region}/disks/bulkInsert" % client.transport._host, args[1]) + + +def test_bulk_insert_rest_flattened_error(transport: str = 'rest'): + client = RegionDisksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.bulk_insert( + compute.BulkInsertRegionDiskRequest(), + project='project_value', + region='region_value', + bulk_insert_disk_resource_resource=compute.BulkInsertDiskResource(source_consistency_group_policy='source_consistency_group_policy_value'), + ) + + +def test_bulk_insert_rest_error(): + client = RegionDisksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.BulkInsertRegionDiskRequest, + dict, +]) +def test_bulk_insert_unary_rest(request_type): + client = RegionDisksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2'} + request_init["bulk_insert_disk_resource_resource"] = {'source_consistency_group_policy': 'source_consistency_group_policy_value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.bulk_insert_unary(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.Operation) + + +def test_bulk_insert_unary_rest_required_fields(request_type=compute.BulkInsertRegionDiskRequest): + transport_class = transports.RegionDisksRestTransport + + request_init = {} + request_init["project"] = "" + request_init["region"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).bulk_insert._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + jsonified_request["region"] = 'region_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).bulk_insert._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "region" in jsonified_request + assert jsonified_request["region"] == 'region_value' + + client = RegionDisksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.bulk_insert_unary(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_bulk_insert_unary_rest_unset_required_fields(): + transport = transports.RegionDisksRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.bulk_insert._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("bulkInsertDiskResourceResource", "project", "region", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_bulk_insert_unary_rest_interceptors(null_interceptor): + transport = transports.RegionDisksRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.RegionDisksRestInterceptor(), + ) + client = RegionDisksClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.RegionDisksRestInterceptor, "post_bulk_insert") as post, \ + mock.patch.object(transports.RegionDisksRestInterceptor, "pre_bulk_insert") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.BulkInsertRegionDiskRequest.pb(compute.BulkInsertRegionDiskRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.BulkInsertRegionDiskRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.bulk_insert_unary(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_bulk_insert_unary_rest_bad_request(transport: str = 'rest', request_type=compute.BulkInsertRegionDiskRequest): + client = RegionDisksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2'} + request_init["bulk_insert_disk_resource_resource"] = {'source_consistency_group_policy': 'source_consistency_group_policy_value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.bulk_insert_unary(request) + + +def test_bulk_insert_unary_rest_flattened(): + client = RegionDisksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'region': 'sample2'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + region='region_value', + bulk_insert_disk_resource_resource=compute.BulkInsertDiskResource(source_consistency_group_policy='source_consistency_group_policy_value'), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.bulk_insert_unary(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/regions/{region}/disks/bulkInsert" % client.transport._host, args[1]) + + +def test_bulk_insert_unary_rest_flattened_error(transport: str = 'rest'): + client = RegionDisksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.bulk_insert_unary( + compute.BulkInsertRegionDiskRequest(), + project='project_value', + region='region_value', + bulk_insert_disk_resource_resource=compute.BulkInsertDiskResource(source_consistency_group_policy='source_consistency_group_policy_value'), + ) + + +def test_bulk_insert_unary_rest_error(): + client = RegionDisksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.CreateSnapshotRegionDiskRequest, + dict, +]) +def test_create_snapshot_rest(request_type): + client = RegionDisksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2', 'disk': 'sample3'} + request_init["snapshot_resource"] = {'architecture': 'architecture_value', 'auto_created': True, 'chain_name': 'chain_name_value', 'creation_size_bytes': 2037, 'creation_timestamp': 'creation_timestamp_value', 'description': 'description_value', 'disk_size_gb': 1261, 'download_bytes': 1502, 'id': 205, 'kind': 'kind_value', 'label_fingerprint': 'label_fingerprint_value', 'labels': {}, 'license_codes': [1361, 1362], 'licenses': ['licenses_value1', 'licenses_value2'], 'location_hint': 'location_hint_value', 'name': 'name_value', 'satisfies_pzs': True, 'self_link': 'self_link_value', 'snapshot_encryption_key': {'kms_key_name': 'kms_key_name_value', 'kms_key_service_account': 'kms_key_service_account_value', 'raw_key': 'raw_key_value', 'rsa_encrypted_key': 'rsa_encrypted_key_value', 'sha256': 'sha256_value'}, 'snapshot_type': 'snapshot_type_value', 'source_disk': 'source_disk_value', 'source_disk_encryption_key': {}, 'source_disk_id': 'source_disk_id_value', 'source_snapshot_schedule_policy': 'source_snapshot_schedule_policy_value', 'source_snapshot_schedule_policy_id': 'source_snapshot_schedule_policy_id_value', 'status': 'status_value', 'storage_bytes': 1403, 'storage_bytes_status': 'storage_bytes_status_value', 'storage_locations': ['storage_locations_value1', 'storage_locations_value2']} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.create_snapshot(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, extended_operation.ExtendedOperation) + assert response.client_operation_id == 'client_operation_id_value' + assert response.creation_timestamp == 'creation_timestamp_value' + assert response.description == 'description_value' + assert response.end_time == 'end_time_value' + assert response.http_error_message == 'http_error_message_value' + assert response.http_error_status_code == 2374 + assert response.id == 205 + assert response.insert_time == 'insert_time_value' + assert response.kind == 'kind_value' + assert response.name == 'name_value' + assert response.operation_group_id == 'operation_group_id_value' + assert response.operation_type == 'operation_type_value' + assert response.progress == 885 + assert response.region == 'region_value' + assert response.self_link == 'self_link_value' + assert response.start_time == 'start_time_value' + assert response.status == compute.Operation.Status.DONE + assert response.status_message == 'status_message_value' + assert response.target_id == 947 + assert response.target_link == 'target_link_value' + assert response.user == 'user_value' + assert response.zone == 'zone_value' + + +def test_create_snapshot_rest_required_fields(request_type=compute.CreateSnapshotRegionDiskRequest): + transport_class = transports.RegionDisksRestTransport + + request_init = {} + request_init["disk"] = "" + request_init["project"] = "" + request_init["region"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_snapshot._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["disk"] = 'disk_value' + jsonified_request["project"] = 'project_value' + jsonified_request["region"] = 'region_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_snapshot._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "disk" in jsonified_request + assert jsonified_request["disk"] == 'disk_value' + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "region" in jsonified_request + assert jsonified_request["region"] == 'region_value' + + client = RegionDisksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.create_snapshot(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_create_snapshot_rest_unset_required_fields(): + transport = transports.RegionDisksRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.create_snapshot._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("disk", "project", "region", "snapshotResource", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_create_snapshot_rest_interceptors(null_interceptor): + transport = transports.RegionDisksRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.RegionDisksRestInterceptor(), + ) + client = RegionDisksClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.RegionDisksRestInterceptor, "post_create_snapshot") as post, \ + mock.patch.object(transports.RegionDisksRestInterceptor, "pre_create_snapshot") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.CreateSnapshotRegionDiskRequest.pb(compute.CreateSnapshotRegionDiskRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.CreateSnapshotRegionDiskRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.create_snapshot(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_create_snapshot_rest_bad_request(transport: str = 'rest', request_type=compute.CreateSnapshotRegionDiskRequest): + client = RegionDisksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2', 'disk': 'sample3'} + request_init["snapshot_resource"] = {'architecture': 'architecture_value', 'auto_created': True, 'chain_name': 'chain_name_value', 'creation_size_bytes': 2037, 'creation_timestamp': 'creation_timestamp_value', 'description': 'description_value', 'disk_size_gb': 1261, 'download_bytes': 1502, 'id': 205, 'kind': 'kind_value', 'label_fingerprint': 'label_fingerprint_value', 'labels': {}, 'license_codes': [1361, 1362], 'licenses': ['licenses_value1', 'licenses_value2'], 'location_hint': 'location_hint_value', 'name': 'name_value', 'satisfies_pzs': True, 'self_link': 'self_link_value', 'snapshot_encryption_key': {'kms_key_name': 'kms_key_name_value', 'kms_key_service_account': 'kms_key_service_account_value', 'raw_key': 'raw_key_value', 'rsa_encrypted_key': 'rsa_encrypted_key_value', 'sha256': 'sha256_value'}, 'snapshot_type': 'snapshot_type_value', 'source_disk': 'source_disk_value', 'source_disk_encryption_key': {}, 'source_disk_id': 'source_disk_id_value', 'source_snapshot_schedule_policy': 'source_snapshot_schedule_policy_value', 'source_snapshot_schedule_policy_id': 'source_snapshot_schedule_policy_id_value', 'status': 'status_value', 'storage_bytes': 1403, 'storage_bytes_status': 'storage_bytes_status_value', 'storage_locations': ['storage_locations_value1', 'storage_locations_value2']} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.create_snapshot(request) + + +def test_create_snapshot_rest_flattened(): + client = RegionDisksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'region': 'sample2', 'disk': 'sample3'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + region='region_value', + disk='disk_value', + snapshot_resource=compute.Snapshot(architecture='architecture_value'), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.create_snapshot(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/regions/{region}/disks/{disk}/createSnapshot" % client.transport._host, args[1]) + + +def test_create_snapshot_rest_flattened_error(transport: str = 'rest'): + client = RegionDisksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_snapshot( + compute.CreateSnapshotRegionDiskRequest(), + project='project_value', + region='region_value', + disk='disk_value', + snapshot_resource=compute.Snapshot(architecture='architecture_value'), + ) + + +def test_create_snapshot_rest_error(): + client = RegionDisksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.CreateSnapshotRegionDiskRequest, + dict, +]) +def test_create_snapshot_unary_rest(request_type): + client = RegionDisksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2', 'disk': 'sample3'} + request_init["snapshot_resource"] = {'architecture': 'architecture_value', 'auto_created': True, 'chain_name': 'chain_name_value', 'creation_size_bytes': 2037, 'creation_timestamp': 'creation_timestamp_value', 'description': 'description_value', 'disk_size_gb': 1261, 'download_bytes': 1502, 'id': 205, 'kind': 'kind_value', 'label_fingerprint': 'label_fingerprint_value', 'labels': {}, 'license_codes': [1361, 1362], 'licenses': ['licenses_value1', 'licenses_value2'], 'location_hint': 'location_hint_value', 'name': 'name_value', 'satisfies_pzs': True, 'self_link': 'self_link_value', 'snapshot_encryption_key': {'kms_key_name': 'kms_key_name_value', 'kms_key_service_account': 'kms_key_service_account_value', 'raw_key': 'raw_key_value', 'rsa_encrypted_key': 'rsa_encrypted_key_value', 'sha256': 'sha256_value'}, 'snapshot_type': 'snapshot_type_value', 'source_disk': 'source_disk_value', 'source_disk_encryption_key': {}, 'source_disk_id': 'source_disk_id_value', 'source_snapshot_schedule_policy': 'source_snapshot_schedule_policy_value', 'source_snapshot_schedule_policy_id': 'source_snapshot_schedule_policy_id_value', 'status': 'status_value', 'storage_bytes': 1403, 'storage_bytes_status': 'storage_bytes_status_value', 'storage_locations': ['storage_locations_value1', 'storage_locations_value2']} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.create_snapshot_unary(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.Operation) + + +def test_create_snapshot_unary_rest_required_fields(request_type=compute.CreateSnapshotRegionDiskRequest): + transport_class = transports.RegionDisksRestTransport + + request_init = {} + request_init["disk"] = "" + request_init["project"] = "" + request_init["region"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_snapshot._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["disk"] = 'disk_value' + jsonified_request["project"] = 'project_value' + jsonified_request["region"] = 'region_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_snapshot._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "disk" in jsonified_request + assert jsonified_request["disk"] == 'disk_value' + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "region" in jsonified_request + assert jsonified_request["region"] == 'region_value' + + client = RegionDisksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.create_snapshot_unary(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_create_snapshot_unary_rest_unset_required_fields(): + transport = transports.RegionDisksRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.create_snapshot._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("disk", "project", "region", "snapshotResource", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_create_snapshot_unary_rest_interceptors(null_interceptor): + transport = transports.RegionDisksRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.RegionDisksRestInterceptor(), + ) + client = RegionDisksClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.RegionDisksRestInterceptor, "post_create_snapshot") as post, \ + mock.patch.object(transports.RegionDisksRestInterceptor, "pre_create_snapshot") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.CreateSnapshotRegionDiskRequest.pb(compute.CreateSnapshotRegionDiskRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.CreateSnapshotRegionDiskRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.create_snapshot_unary(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_create_snapshot_unary_rest_bad_request(transport: str = 'rest', request_type=compute.CreateSnapshotRegionDiskRequest): + client = RegionDisksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2', 'disk': 'sample3'} + request_init["snapshot_resource"] = {'architecture': 'architecture_value', 'auto_created': True, 'chain_name': 'chain_name_value', 'creation_size_bytes': 2037, 'creation_timestamp': 'creation_timestamp_value', 'description': 'description_value', 'disk_size_gb': 1261, 'download_bytes': 1502, 'id': 205, 'kind': 'kind_value', 'label_fingerprint': 'label_fingerprint_value', 'labels': {}, 'license_codes': [1361, 1362], 'licenses': ['licenses_value1', 'licenses_value2'], 'location_hint': 'location_hint_value', 'name': 'name_value', 'satisfies_pzs': True, 'self_link': 'self_link_value', 'snapshot_encryption_key': {'kms_key_name': 'kms_key_name_value', 'kms_key_service_account': 'kms_key_service_account_value', 'raw_key': 'raw_key_value', 'rsa_encrypted_key': 'rsa_encrypted_key_value', 'sha256': 'sha256_value'}, 'snapshot_type': 'snapshot_type_value', 'source_disk': 'source_disk_value', 'source_disk_encryption_key': {}, 'source_disk_id': 'source_disk_id_value', 'source_snapshot_schedule_policy': 'source_snapshot_schedule_policy_value', 'source_snapshot_schedule_policy_id': 'source_snapshot_schedule_policy_id_value', 'status': 'status_value', 'storage_bytes': 1403, 'storage_bytes_status': 'storage_bytes_status_value', 'storage_locations': ['storage_locations_value1', 'storage_locations_value2']} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.create_snapshot_unary(request) + + +def test_create_snapshot_unary_rest_flattened(): + client = RegionDisksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'region': 'sample2', 'disk': 'sample3'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + region='region_value', + disk='disk_value', + snapshot_resource=compute.Snapshot(architecture='architecture_value'), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.create_snapshot_unary(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/regions/{region}/disks/{disk}/createSnapshot" % client.transport._host, args[1]) + + +def test_create_snapshot_unary_rest_flattened_error(transport: str = 'rest'): + client = RegionDisksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_snapshot_unary( + compute.CreateSnapshotRegionDiskRequest(), + project='project_value', + region='region_value', + disk='disk_value', + snapshot_resource=compute.Snapshot(architecture='architecture_value'), + ) + + +def test_create_snapshot_unary_rest_error(): + client = RegionDisksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.DeleteRegionDiskRequest, + dict, +]) +def test_delete_rest(request_type): + client = RegionDisksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2', 'disk': 'sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.delete(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, extended_operation.ExtendedOperation) + assert response.client_operation_id == 'client_operation_id_value' + assert response.creation_timestamp == 'creation_timestamp_value' + assert response.description == 'description_value' + assert response.end_time == 'end_time_value' + assert response.http_error_message == 'http_error_message_value' + assert response.http_error_status_code == 2374 + assert response.id == 205 + assert response.insert_time == 'insert_time_value' + assert response.kind == 'kind_value' + assert response.name == 'name_value' + assert response.operation_group_id == 'operation_group_id_value' + assert response.operation_type == 'operation_type_value' + assert response.progress == 885 + assert response.region == 'region_value' + assert response.self_link == 'self_link_value' + assert response.start_time == 'start_time_value' + assert response.status == compute.Operation.Status.DONE + assert response.status_message == 'status_message_value' + assert response.target_id == 947 + assert response.target_link == 'target_link_value' + assert response.user == 'user_value' + assert response.zone == 'zone_value' + + +def test_delete_rest_required_fields(request_type=compute.DeleteRegionDiskRequest): + transport_class = transports.RegionDisksRestTransport + + request_init = {} + request_init["disk"] = "" + request_init["project"] = "" + request_init["region"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["disk"] = 'disk_value' + jsonified_request["project"] = 'project_value' + jsonified_request["region"] = 'region_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "disk" in jsonified_request + assert jsonified_request["disk"] == 'disk_value' + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "region" in jsonified_request + assert jsonified_request["region"] == 'region_value' + + client = RegionDisksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "delete", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.delete(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_delete_rest_unset_required_fields(): + transport = transports.RegionDisksRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.delete._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("disk", "project", "region", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_rest_interceptors(null_interceptor): + transport = transports.RegionDisksRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.RegionDisksRestInterceptor(), + ) + client = RegionDisksClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.RegionDisksRestInterceptor, "post_delete") as post, \ + mock.patch.object(transports.RegionDisksRestInterceptor, "pre_delete") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.DeleteRegionDiskRequest.pb(compute.DeleteRegionDiskRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.DeleteRegionDiskRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.delete(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_delete_rest_bad_request(transport: str = 'rest', request_type=compute.DeleteRegionDiskRequest): + client = RegionDisksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2', 'disk': 'sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.delete(request) + + +def test_delete_rest_flattened(): + client = RegionDisksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'region': 'sample2', 'disk': 'sample3'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + region='region_value', + disk='disk_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.delete(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/regions/{region}/disks/{disk}" % client.transport._host, args[1]) + + +def test_delete_rest_flattened_error(transport: str = 'rest'): + client = RegionDisksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete( + compute.DeleteRegionDiskRequest(), + project='project_value', + region='region_value', + disk='disk_value', + ) + + +def test_delete_rest_error(): + client = RegionDisksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.DeleteRegionDiskRequest, + dict, +]) +def test_delete_unary_rest(request_type): + client = RegionDisksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2', 'disk': 'sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.delete_unary(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.Operation) + + +def test_delete_unary_rest_required_fields(request_type=compute.DeleteRegionDiskRequest): + transport_class = transports.RegionDisksRestTransport + + request_init = {} + request_init["disk"] = "" + request_init["project"] = "" + request_init["region"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["disk"] = 'disk_value' + jsonified_request["project"] = 'project_value' + jsonified_request["region"] = 'region_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "disk" in jsonified_request + assert jsonified_request["disk"] == 'disk_value' + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "region" in jsonified_request + assert jsonified_request["region"] == 'region_value' + + client = RegionDisksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "delete", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.delete_unary(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_delete_unary_rest_unset_required_fields(): + transport = transports.RegionDisksRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.delete._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("disk", "project", "region", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_unary_rest_interceptors(null_interceptor): + transport = transports.RegionDisksRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.RegionDisksRestInterceptor(), + ) + client = RegionDisksClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.RegionDisksRestInterceptor, "post_delete") as post, \ + mock.patch.object(transports.RegionDisksRestInterceptor, "pre_delete") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.DeleteRegionDiskRequest.pb(compute.DeleteRegionDiskRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.DeleteRegionDiskRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.delete_unary(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_delete_unary_rest_bad_request(transport: str = 'rest', request_type=compute.DeleteRegionDiskRequest): + client = RegionDisksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2', 'disk': 'sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.delete_unary(request) + + +def test_delete_unary_rest_flattened(): + client = RegionDisksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'region': 'sample2', 'disk': 'sample3'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + region='region_value', + disk='disk_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.delete_unary(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/regions/{region}/disks/{disk}" % client.transport._host, args[1]) + + +def test_delete_unary_rest_flattened_error(transport: str = 'rest'): + client = RegionDisksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_unary( + compute.DeleteRegionDiskRequest(), + project='project_value', + region='region_value', + disk='disk_value', + ) + + +def test_delete_unary_rest_error(): + client = RegionDisksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.GetRegionDiskRequest, + dict, +]) +def test_get_rest(request_type): + client = RegionDisksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2', 'disk': 'sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Disk( + architecture='architecture_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + id=205, + kind='kind_value', + label_fingerprint='label_fingerprint_value', + last_attach_timestamp='last_attach_timestamp_value', + last_detach_timestamp='last_detach_timestamp_value', + license_codes=[1360], + licenses=['licenses_value'], + location_hint='location_hint_value', + name='name_value', + options='options_value', + physical_block_size_bytes=2663, + provisioned_iops=1740, + provisioned_throughput=2411, + region='region_value', + replica_zones=['replica_zones_value'], + resource_policies=['resource_policies_value'], + satisfies_pzs=True, + self_link='self_link_value', + size_gb=739, + source_consistency_group_policy='source_consistency_group_policy_value', + source_consistency_group_policy_id='source_consistency_group_policy_id_value', + source_disk='source_disk_value', + source_disk_id='source_disk_id_value', + source_image='source_image_value', + source_image_id='source_image_id_value', + source_snapshot='source_snapshot_value', + source_snapshot_id='source_snapshot_id_value', + source_storage_object='source_storage_object_value', + status='status_value', + type_='type__value', + users=['users_value'], + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Disk.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.get(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.Disk) + assert response.architecture == 'architecture_value' + assert response.creation_timestamp == 'creation_timestamp_value' + assert response.description == 'description_value' + assert response.id == 205 + assert response.kind == 'kind_value' + assert response.label_fingerprint == 'label_fingerprint_value' + assert response.last_attach_timestamp == 'last_attach_timestamp_value' + assert response.last_detach_timestamp == 'last_detach_timestamp_value' + assert response.license_codes == [1360] + assert response.licenses == ['licenses_value'] + assert response.location_hint == 'location_hint_value' + assert response.name == 'name_value' + assert response.options == 'options_value' + assert response.physical_block_size_bytes == 2663 + assert response.provisioned_iops == 1740 + assert response.provisioned_throughput == 2411 + assert response.region == 'region_value' + assert response.replica_zones == ['replica_zones_value'] + assert response.resource_policies == ['resource_policies_value'] + assert response.satisfies_pzs is True + assert response.self_link == 'self_link_value' + assert response.size_gb == 739 + assert response.source_consistency_group_policy == 'source_consistency_group_policy_value' + assert response.source_consistency_group_policy_id == 'source_consistency_group_policy_id_value' + assert response.source_disk == 'source_disk_value' + assert response.source_disk_id == 'source_disk_id_value' + assert response.source_image == 'source_image_value' + assert response.source_image_id == 'source_image_id_value' + assert response.source_snapshot == 'source_snapshot_value' + assert response.source_snapshot_id == 'source_snapshot_id_value' + assert response.source_storage_object == 'source_storage_object_value' + assert response.status == 'status_value' + assert response.type_ == 'type__value' + assert response.users == ['users_value'] + assert response.zone == 'zone_value' + + +def test_get_rest_required_fields(request_type=compute.GetRegionDiskRequest): + transport_class = transports.RegionDisksRestTransport + + request_init = {} + request_init["disk"] = "" + request_init["project"] = "" + request_init["region"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["disk"] = 'disk_value' + jsonified_request["project"] = 'project_value' + jsonified_request["region"] = 'region_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "disk" in jsonified_request + assert jsonified_request["disk"] == 'disk_value' + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "region" in jsonified_request + assert jsonified_request["region"] == 'region_value' + + client = RegionDisksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Disk() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "get", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Disk.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.get(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_get_rest_unset_required_fields(): + transport = transports.RegionDisksRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.get._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("disk", "project", "region", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_rest_interceptors(null_interceptor): + transport = transports.RegionDisksRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.RegionDisksRestInterceptor(), + ) + client = RegionDisksClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.RegionDisksRestInterceptor, "post_get") as post, \ + mock.patch.object(transports.RegionDisksRestInterceptor, "pre_get") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.GetRegionDiskRequest.pb(compute.GetRegionDiskRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Disk.to_json(compute.Disk()) + + request = compute.GetRegionDiskRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Disk() + + client.get(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_get_rest_bad_request(transport: str = 'rest', request_type=compute.GetRegionDiskRequest): + client = RegionDisksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2', 'disk': 'sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get(request) + + +def test_get_rest_flattened(): + client = RegionDisksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Disk() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'region': 'sample2', 'disk': 'sample3'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + region='region_value', + disk='disk_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Disk.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.get(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/regions/{region}/disks/{disk}" % client.transport._host, args[1]) + + +def test_get_rest_flattened_error(transport: str = 'rest'): + client = RegionDisksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get( + compute.GetRegionDiskRequest(), + project='project_value', + region='region_value', + disk='disk_value', + ) + + +def test_get_rest_error(): + client = RegionDisksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.GetIamPolicyRegionDiskRequest, + dict, +]) +def test_get_iam_policy_rest(request_type): + client = RegionDisksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2', 'resource': 'sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Policy( + etag='etag_value', + iam_owned=True, + version=774, + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Policy.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.get_iam_policy(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.Policy) + assert response.etag == 'etag_value' + assert response.iam_owned is True + assert response.version == 774 + + +def test_get_iam_policy_rest_required_fields(request_type=compute.GetIamPolicyRegionDiskRequest): + transport_class = transports.RegionDisksRestTransport + + request_init = {} + request_init["project"] = "" + request_init["region"] = "" + request_init["resource"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_iam_policy._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + jsonified_request["region"] = 'region_value' + jsonified_request["resource"] = 'resource_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_iam_policy._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("options_requested_policy_version", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "region" in jsonified_request + assert jsonified_request["region"] == 'region_value' + assert "resource" in jsonified_request + assert jsonified_request["resource"] == 'resource_value' + + client = RegionDisksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Policy() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "get", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Policy.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.get_iam_policy(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_get_iam_policy_rest_unset_required_fields(): + transport = transports.RegionDisksRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.get_iam_policy._get_unset_required_fields({}) + assert set(unset_fields) == (set(("optionsRequestedPolicyVersion", )) & set(("project", "region", "resource", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_iam_policy_rest_interceptors(null_interceptor): + transport = transports.RegionDisksRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.RegionDisksRestInterceptor(), + ) + client = RegionDisksClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.RegionDisksRestInterceptor, "post_get_iam_policy") as post, \ + mock.patch.object(transports.RegionDisksRestInterceptor, "pre_get_iam_policy") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.GetIamPolicyRegionDiskRequest.pb(compute.GetIamPolicyRegionDiskRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Policy.to_json(compute.Policy()) + + request = compute.GetIamPolicyRegionDiskRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Policy() + + client.get_iam_policy(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_get_iam_policy_rest_bad_request(transport: str = 'rest', request_type=compute.GetIamPolicyRegionDiskRequest): + client = RegionDisksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2', 'resource': 'sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_iam_policy(request) + + +def test_get_iam_policy_rest_flattened(): + client = RegionDisksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Policy() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'region': 'sample2', 'resource': 'sample3'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + region='region_value', + resource='resource_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Policy.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.get_iam_policy(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/regions/{region}/disks/{resource}/getIamPolicy" % client.transport._host, args[1]) + + +def test_get_iam_policy_rest_flattened_error(transport: str = 'rest'): + client = RegionDisksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_iam_policy( + compute.GetIamPolicyRegionDiskRequest(), + project='project_value', + region='region_value', + resource='resource_value', + ) + + +def test_get_iam_policy_rest_error(): + client = RegionDisksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.InsertRegionDiskRequest, + dict, +]) +def test_insert_rest(request_type): + client = RegionDisksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2'} + request_init["disk_resource"] = {'architecture': 'architecture_value', 'async_primary_disk': {'consistency_group_policy': 'consistency_group_policy_value', 'consistency_group_policy_id': 'consistency_group_policy_id_value', 'disk': 'disk_value', 'disk_id': 'disk_id_value'}, 'async_secondary_disks': {}, 'creation_timestamp': 'creation_timestamp_value', 'description': 'description_value', 'disk_encryption_key': {'kms_key_name': 'kms_key_name_value', 'kms_key_service_account': 'kms_key_service_account_value', 'raw_key': 'raw_key_value', 'rsa_encrypted_key': 'rsa_encrypted_key_value', 'sha256': 'sha256_value'}, 'guest_os_features': [{'type_': 'type__value'}], 'id': 205, 'kind': 'kind_value', 'label_fingerprint': 'label_fingerprint_value', 'labels': {}, 'last_attach_timestamp': 'last_attach_timestamp_value', 'last_detach_timestamp': 'last_detach_timestamp_value', 'license_codes': [1361, 1362], 'licenses': ['licenses_value1', 'licenses_value2'], 'location_hint': 'location_hint_value', 'name': 'name_value', 'options': 'options_value', 'params': {'resource_manager_tags': {}}, 'physical_block_size_bytes': 2663, 'provisioned_iops': 1740, 'provisioned_throughput': 2411, 'region': 'region_value', 'replica_zones': ['replica_zones_value1', 'replica_zones_value2'], 'resource_policies': ['resource_policies_value1', 'resource_policies_value2'], 'resource_status': {'async_primary_disk': {'state': 'state_value'}, 'async_secondary_disks': {}}, 'satisfies_pzs': True, 'self_link': 'self_link_value', 'size_gb': 739, 'source_consistency_group_policy': 'source_consistency_group_policy_value', 'source_consistency_group_policy_id': 'source_consistency_group_policy_id_value', 'source_disk': 'source_disk_value', 'source_disk_id': 'source_disk_id_value', 'source_image': 'source_image_value', 'source_image_encryption_key': {}, 'source_image_id': 'source_image_id_value', 'source_snapshot': 'source_snapshot_value', 'source_snapshot_encryption_key': {}, 'source_snapshot_id': 'source_snapshot_id_value', 'source_storage_object': 'source_storage_object_value', 'status': 'status_value', 'type_': 'type__value', 'users': ['users_value1', 'users_value2'], 'zone': 'zone_value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.insert(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, extended_operation.ExtendedOperation) + assert response.client_operation_id == 'client_operation_id_value' + assert response.creation_timestamp == 'creation_timestamp_value' + assert response.description == 'description_value' + assert response.end_time == 'end_time_value' + assert response.http_error_message == 'http_error_message_value' + assert response.http_error_status_code == 2374 + assert response.id == 205 + assert response.insert_time == 'insert_time_value' + assert response.kind == 'kind_value' + assert response.name == 'name_value' + assert response.operation_group_id == 'operation_group_id_value' + assert response.operation_type == 'operation_type_value' + assert response.progress == 885 + assert response.region == 'region_value' + assert response.self_link == 'self_link_value' + assert response.start_time == 'start_time_value' + assert response.status == compute.Operation.Status.DONE + assert response.status_message == 'status_message_value' + assert response.target_id == 947 + assert response.target_link == 'target_link_value' + assert response.user == 'user_value' + assert response.zone == 'zone_value' + + +def test_insert_rest_required_fields(request_type=compute.InsertRegionDiskRequest): + transport_class = transports.RegionDisksRestTransport + + request_init = {} + request_init["project"] = "" + request_init["region"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).insert._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + jsonified_request["region"] = 'region_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).insert._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", "source_image", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "region" in jsonified_request + assert jsonified_request["region"] == 'region_value' + + client = RegionDisksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.insert(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_insert_rest_unset_required_fields(): + transport = transports.RegionDisksRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.insert._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", "sourceImage", )) & set(("diskResource", "project", "region", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_insert_rest_interceptors(null_interceptor): + transport = transports.RegionDisksRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.RegionDisksRestInterceptor(), + ) + client = RegionDisksClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.RegionDisksRestInterceptor, "post_insert") as post, \ + mock.patch.object(transports.RegionDisksRestInterceptor, "pre_insert") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.InsertRegionDiskRequest.pb(compute.InsertRegionDiskRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.InsertRegionDiskRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.insert(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_insert_rest_bad_request(transport: str = 'rest', request_type=compute.InsertRegionDiskRequest): + client = RegionDisksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2'} + request_init["disk_resource"] = {'architecture': 'architecture_value', 'async_primary_disk': {'consistency_group_policy': 'consistency_group_policy_value', 'consistency_group_policy_id': 'consistency_group_policy_id_value', 'disk': 'disk_value', 'disk_id': 'disk_id_value'}, 'async_secondary_disks': {}, 'creation_timestamp': 'creation_timestamp_value', 'description': 'description_value', 'disk_encryption_key': {'kms_key_name': 'kms_key_name_value', 'kms_key_service_account': 'kms_key_service_account_value', 'raw_key': 'raw_key_value', 'rsa_encrypted_key': 'rsa_encrypted_key_value', 'sha256': 'sha256_value'}, 'guest_os_features': [{'type_': 'type__value'}], 'id': 205, 'kind': 'kind_value', 'label_fingerprint': 'label_fingerprint_value', 'labels': {}, 'last_attach_timestamp': 'last_attach_timestamp_value', 'last_detach_timestamp': 'last_detach_timestamp_value', 'license_codes': [1361, 1362], 'licenses': ['licenses_value1', 'licenses_value2'], 'location_hint': 'location_hint_value', 'name': 'name_value', 'options': 'options_value', 'params': {'resource_manager_tags': {}}, 'physical_block_size_bytes': 2663, 'provisioned_iops': 1740, 'provisioned_throughput': 2411, 'region': 'region_value', 'replica_zones': ['replica_zones_value1', 'replica_zones_value2'], 'resource_policies': ['resource_policies_value1', 'resource_policies_value2'], 'resource_status': {'async_primary_disk': {'state': 'state_value'}, 'async_secondary_disks': {}}, 'satisfies_pzs': True, 'self_link': 'self_link_value', 'size_gb': 739, 'source_consistency_group_policy': 'source_consistency_group_policy_value', 'source_consistency_group_policy_id': 'source_consistency_group_policy_id_value', 'source_disk': 'source_disk_value', 'source_disk_id': 'source_disk_id_value', 'source_image': 'source_image_value', 'source_image_encryption_key': {}, 'source_image_id': 'source_image_id_value', 'source_snapshot': 'source_snapshot_value', 'source_snapshot_encryption_key': {}, 'source_snapshot_id': 'source_snapshot_id_value', 'source_storage_object': 'source_storage_object_value', 'status': 'status_value', 'type_': 'type__value', 'users': ['users_value1', 'users_value2'], 'zone': 'zone_value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.insert(request) + + +def test_insert_rest_flattened(): + client = RegionDisksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'region': 'sample2'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + region='region_value', + disk_resource=compute.Disk(architecture='architecture_value'), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.insert(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/regions/{region}/disks" % client.transport._host, args[1]) + + +def test_insert_rest_flattened_error(transport: str = 'rest'): + client = RegionDisksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.insert( + compute.InsertRegionDiskRequest(), + project='project_value', + region='region_value', + disk_resource=compute.Disk(architecture='architecture_value'), + ) + + +def test_insert_rest_error(): + client = RegionDisksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.InsertRegionDiskRequest, + dict, +]) +def test_insert_unary_rest(request_type): + client = RegionDisksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2'} + request_init["disk_resource"] = {'architecture': 'architecture_value', 'async_primary_disk': {'consistency_group_policy': 'consistency_group_policy_value', 'consistency_group_policy_id': 'consistency_group_policy_id_value', 'disk': 'disk_value', 'disk_id': 'disk_id_value'}, 'async_secondary_disks': {}, 'creation_timestamp': 'creation_timestamp_value', 'description': 'description_value', 'disk_encryption_key': {'kms_key_name': 'kms_key_name_value', 'kms_key_service_account': 'kms_key_service_account_value', 'raw_key': 'raw_key_value', 'rsa_encrypted_key': 'rsa_encrypted_key_value', 'sha256': 'sha256_value'}, 'guest_os_features': [{'type_': 'type__value'}], 'id': 205, 'kind': 'kind_value', 'label_fingerprint': 'label_fingerprint_value', 'labels': {}, 'last_attach_timestamp': 'last_attach_timestamp_value', 'last_detach_timestamp': 'last_detach_timestamp_value', 'license_codes': [1361, 1362], 'licenses': ['licenses_value1', 'licenses_value2'], 'location_hint': 'location_hint_value', 'name': 'name_value', 'options': 'options_value', 'params': {'resource_manager_tags': {}}, 'physical_block_size_bytes': 2663, 'provisioned_iops': 1740, 'provisioned_throughput': 2411, 'region': 'region_value', 'replica_zones': ['replica_zones_value1', 'replica_zones_value2'], 'resource_policies': ['resource_policies_value1', 'resource_policies_value2'], 'resource_status': {'async_primary_disk': {'state': 'state_value'}, 'async_secondary_disks': {}}, 'satisfies_pzs': True, 'self_link': 'self_link_value', 'size_gb': 739, 'source_consistency_group_policy': 'source_consistency_group_policy_value', 'source_consistency_group_policy_id': 'source_consistency_group_policy_id_value', 'source_disk': 'source_disk_value', 'source_disk_id': 'source_disk_id_value', 'source_image': 'source_image_value', 'source_image_encryption_key': {}, 'source_image_id': 'source_image_id_value', 'source_snapshot': 'source_snapshot_value', 'source_snapshot_encryption_key': {}, 'source_snapshot_id': 'source_snapshot_id_value', 'source_storage_object': 'source_storage_object_value', 'status': 'status_value', 'type_': 'type__value', 'users': ['users_value1', 'users_value2'], 'zone': 'zone_value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.insert_unary(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.Operation) + + +def test_insert_unary_rest_required_fields(request_type=compute.InsertRegionDiskRequest): + transport_class = transports.RegionDisksRestTransport + + request_init = {} + request_init["project"] = "" + request_init["region"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).insert._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + jsonified_request["region"] = 'region_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).insert._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", "source_image", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "region" in jsonified_request + assert jsonified_request["region"] == 'region_value' + + client = RegionDisksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.insert_unary(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_insert_unary_rest_unset_required_fields(): + transport = transports.RegionDisksRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.insert._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", "sourceImage", )) & set(("diskResource", "project", "region", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_insert_unary_rest_interceptors(null_interceptor): + transport = transports.RegionDisksRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.RegionDisksRestInterceptor(), + ) + client = RegionDisksClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.RegionDisksRestInterceptor, "post_insert") as post, \ + mock.patch.object(transports.RegionDisksRestInterceptor, "pre_insert") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.InsertRegionDiskRequest.pb(compute.InsertRegionDiskRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.InsertRegionDiskRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.insert_unary(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_insert_unary_rest_bad_request(transport: str = 'rest', request_type=compute.InsertRegionDiskRequest): + client = RegionDisksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2'} + request_init["disk_resource"] = {'architecture': 'architecture_value', 'async_primary_disk': {'consistency_group_policy': 'consistency_group_policy_value', 'consistency_group_policy_id': 'consistency_group_policy_id_value', 'disk': 'disk_value', 'disk_id': 'disk_id_value'}, 'async_secondary_disks': {}, 'creation_timestamp': 'creation_timestamp_value', 'description': 'description_value', 'disk_encryption_key': {'kms_key_name': 'kms_key_name_value', 'kms_key_service_account': 'kms_key_service_account_value', 'raw_key': 'raw_key_value', 'rsa_encrypted_key': 'rsa_encrypted_key_value', 'sha256': 'sha256_value'}, 'guest_os_features': [{'type_': 'type__value'}], 'id': 205, 'kind': 'kind_value', 'label_fingerprint': 'label_fingerprint_value', 'labels': {}, 'last_attach_timestamp': 'last_attach_timestamp_value', 'last_detach_timestamp': 'last_detach_timestamp_value', 'license_codes': [1361, 1362], 'licenses': ['licenses_value1', 'licenses_value2'], 'location_hint': 'location_hint_value', 'name': 'name_value', 'options': 'options_value', 'params': {'resource_manager_tags': {}}, 'physical_block_size_bytes': 2663, 'provisioned_iops': 1740, 'provisioned_throughput': 2411, 'region': 'region_value', 'replica_zones': ['replica_zones_value1', 'replica_zones_value2'], 'resource_policies': ['resource_policies_value1', 'resource_policies_value2'], 'resource_status': {'async_primary_disk': {'state': 'state_value'}, 'async_secondary_disks': {}}, 'satisfies_pzs': True, 'self_link': 'self_link_value', 'size_gb': 739, 'source_consistency_group_policy': 'source_consistency_group_policy_value', 'source_consistency_group_policy_id': 'source_consistency_group_policy_id_value', 'source_disk': 'source_disk_value', 'source_disk_id': 'source_disk_id_value', 'source_image': 'source_image_value', 'source_image_encryption_key': {}, 'source_image_id': 'source_image_id_value', 'source_snapshot': 'source_snapshot_value', 'source_snapshot_encryption_key': {}, 'source_snapshot_id': 'source_snapshot_id_value', 'source_storage_object': 'source_storage_object_value', 'status': 'status_value', 'type_': 'type__value', 'users': ['users_value1', 'users_value2'], 'zone': 'zone_value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.insert_unary(request) + + +def test_insert_unary_rest_flattened(): + client = RegionDisksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'region': 'sample2'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + region='region_value', + disk_resource=compute.Disk(architecture='architecture_value'), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.insert_unary(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/regions/{region}/disks" % client.transport._host, args[1]) + + +def test_insert_unary_rest_flattened_error(transport: str = 'rest'): + client = RegionDisksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.insert_unary( + compute.InsertRegionDiskRequest(), + project='project_value', + region='region_value', + disk_resource=compute.Disk(architecture='architecture_value'), + ) + + +def test_insert_unary_rest_error(): + client = RegionDisksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.ListRegionDisksRequest, + dict, +]) +def test_list_rest(request_type): + client = RegionDisksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.DiskList( + id='id_value', + kind='kind_value', + next_page_token='next_page_token_value', + self_link='self_link_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.DiskList.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.list(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListPager) + assert response.id == 'id_value' + assert response.kind == 'kind_value' + assert response.next_page_token == 'next_page_token_value' + assert response.self_link == 'self_link_value' + + +def test_list_rest_required_fields(request_type=compute.ListRegionDisksRequest): + transport_class = transports.RegionDisksRestTransport + + request_init = {} + request_init["project"] = "" + request_init["region"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + jsonified_request["region"] = 'region_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("filter", "max_results", "order_by", "page_token", "return_partial_success", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "region" in jsonified_request + assert jsonified_request["region"] == 'region_value' + + client = RegionDisksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.DiskList() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "get", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.DiskList.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.list(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_list_rest_unset_required_fields(): + transport = transports.RegionDisksRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.list._get_unset_required_fields({}) + assert set(unset_fields) == (set(("filter", "maxResults", "orderBy", "pageToken", "returnPartialSuccess", )) & set(("project", "region", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_rest_interceptors(null_interceptor): + transport = transports.RegionDisksRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.RegionDisksRestInterceptor(), + ) + client = RegionDisksClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.RegionDisksRestInterceptor, "post_list") as post, \ + mock.patch.object(transports.RegionDisksRestInterceptor, "pre_list") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.ListRegionDisksRequest.pb(compute.ListRegionDisksRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.DiskList.to_json(compute.DiskList()) + + request = compute.ListRegionDisksRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.DiskList() + + client.list(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_list_rest_bad_request(transport: str = 'rest', request_type=compute.ListRegionDisksRequest): + client = RegionDisksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list(request) + + +def test_list_rest_flattened(): + client = RegionDisksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.DiskList() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'region': 'sample2'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + region='region_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.DiskList.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.list(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/regions/{region}/disks" % client.transport._host, args[1]) + + +def test_list_rest_flattened_error(transport: str = 'rest'): + client = RegionDisksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list( + compute.ListRegionDisksRequest(), + project='project_value', + region='region_value', + ) + + +def test_list_rest_pager(transport: str = 'rest'): + client = RegionDisksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + #with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + compute.DiskList( + items=[ + compute.Disk(), + compute.Disk(), + compute.Disk(), + ], + next_page_token='abc', + ), + compute.DiskList( + items=[], + next_page_token='def', + ), + compute.DiskList( + items=[ + compute.Disk(), + ], + next_page_token='ghi', + ), + compute.DiskList( + items=[ + compute.Disk(), + compute.Disk(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple(compute.DiskList.to_json(x) for x in response) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode('UTF-8') + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {'project': 'sample1', 'region': 'sample2'} + + pager = client.list(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, compute.Disk) + for i in results) + + pages = list(client.list(request=sample_request).pages) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize("request_type", [ + compute.RemoveResourcePoliciesRegionDiskRequest, + dict, +]) +def test_remove_resource_policies_rest(request_type): + client = RegionDisksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2', 'disk': 'sample3'} + request_init["region_disks_remove_resource_policies_request_resource"] = {'resource_policies': ['resource_policies_value1', 'resource_policies_value2']} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.remove_resource_policies(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, extended_operation.ExtendedOperation) + assert response.client_operation_id == 'client_operation_id_value' + assert response.creation_timestamp == 'creation_timestamp_value' + assert response.description == 'description_value' + assert response.end_time == 'end_time_value' + assert response.http_error_message == 'http_error_message_value' + assert response.http_error_status_code == 2374 + assert response.id == 205 + assert response.insert_time == 'insert_time_value' + assert response.kind == 'kind_value' + assert response.name == 'name_value' + assert response.operation_group_id == 'operation_group_id_value' + assert response.operation_type == 'operation_type_value' + assert response.progress == 885 + assert response.region == 'region_value' + assert response.self_link == 'self_link_value' + assert response.start_time == 'start_time_value' + assert response.status == compute.Operation.Status.DONE + assert response.status_message == 'status_message_value' + assert response.target_id == 947 + assert response.target_link == 'target_link_value' + assert response.user == 'user_value' + assert response.zone == 'zone_value' + + +def test_remove_resource_policies_rest_required_fields(request_type=compute.RemoveResourcePoliciesRegionDiskRequest): + transport_class = transports.RegionDisksRestTransport + + request_init = {} + request_init["disk"] = "" + request_init["project"] = "" + request_init["region"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).remove_resource_policies._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["disk"] = 'disk_value' + jsonified_request["project"] = 'project_value' + jsonified_request["region"] = 'region_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).remove_resource_policies._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "disk" in jsonified_request + assert jsonified_request["disk"] == 'disk_value' + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "region" in jsonified_request + assert jsonified_request["region"] == 'region_value' + + client = RegionDisksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.remove_resource_policies(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_remove_resource_policies_rest_unset_required_fields(): + transport = transports.RegionDisksRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.remove_resource_policies._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("disk", "project", "region", "regionDisksRemoveResourcePoliciesRequestResource", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_remove_resource_policies_rest_interceptors(null_interceptor): + transport = transports.RegionDisksRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.RegionDisksRestInterceptor(), + ) + client = RegionDisksClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.RegionDisksRestInterceptor, "post_remove_resource_policies") as post, \ + mock.patch.object(transports.RegionDisksRestInterceptor, "pre_remove_resource_policies") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.RemoveResourcePoliciesRegionDiskRequest.pb(compute.RemoveResourcePoliciesRegionDiskRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.RemoveResourcePoliciesRegionDiskRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.remove_resource_policies(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_remove_resource_policies_rest_bad_request(transport: str = 'rest', request_type=compute.RemoveResourcePoliciesRegionDiskRequest): + client = RegionDisksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2', 'disk': 'sample3'} + request_init["region_disks_remove_resource_policies_request_resource"] = {'resource_policies': ['resource_policies_value1', 'resource_policies_value2']} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.remove_resource_policies(request) + + +def test_remove_resource_policies_rest_flattened(): + client = RegionDisksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'region': 'sample2', 'disk': 'sample3'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + region='region_value', + disk='disk_value', + region_disks_remove_resource_policies_request_resource=compute.RegionDisksRemoveResourcePoliciesRequest(resource_policies=['resource_policies_value']), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.remove_resource_policies(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/regions/{region}/disks/{disk}/removeResourcePolicies" % client.transport._host, args[1]) + + +def test_remove_resource_policies_rest_flattened_error(transport: str = 'rest'): + client = RegionDisksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.remove_resource_policies( + compute.RemoveResourcePoliciesRegionDiskRequest(), + project='project_value', + region='region_value', + disk='disk_value', + region_disks_remove_resource_policies_request_resource=compute.RegionDisksRemoveResourcePoliciesRequest(resource_policies=['resource_policies_value']), + ) + + +def test_remove_resource_policies_rest_error(): + client = RegionDisksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.RemoveResourcePoliciesRegionDiskRequest, + dict, +]) +def test_remove_resource_policies_unary_rest(request_type): + client = RegionDisksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2', 'disk': 'sample3'} + request_init["region_disks_remove_resource_policies_request_resource"] = {'resource_policies': ['resource_policies_value1', 'resource_policies_value2']} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.remove_resource_policies_unary(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.Operation) + + +def test_remove_resource_policies_unary_rest_required_fields(request_type=compute.RemoveResourcePoliciesRegionDiskRequest): + transport_class = transports.RegionDisksRestTransport + + request_init = {} + request_init["disk"] = "" + request_init["project"] = "" + request_init["region"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).remove_resource_policies._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["disk"] = 'disk_value' + jsonified_request["project"] = 'project_value' + jsonified_request["region"] = 'region_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).remove_resource_policies._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "disk" in jsonified_request + assert jsonified_request["disk"] == 'disk_value' + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "region" in jsonified_request + assert jsonified_request["region"] == 'region_value' + + client = RegionDisksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.remove_resource_policies_unary(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_remove_resource_policies_unary_rest_unset_required_fields(): + transport = transports.RegionDisksRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.remove_resource_policies._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("disk", "project", "region", "regionDisksRemoveResourcePoliciesRequestResource", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_remove_resource_policies_unary_rest_interceptors(null_interceptor): + transport = transports.RegionDisksRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.RegionDisksRestInterceptor(), + ) + client = RegionDisksClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.RegionDisksRestInterceptor, "post_remove_resource_policies") as post, \ + mock.patch.object(transports.RegionDisksRestInterceptor, "pre_remove_resource_policies") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.RemoveResourcePoliciesRegionDiskRequest.pb(compute.RemoveResourcePoliciesRegionDiskRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.RemoveResourcePoliciesRegionDiskRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.remove_resource_policies_unary(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_remove_resource_policies_unary_rest_bad_request(transport: str = 'rest', request_type=compute.RemoveResourcePoliciesRegionDiskRequest): + client = RegionDisksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2', 'disk': 'sample3'} + request_init["region_disks_remove_resource_policies_request_resource"] = {'resource_policies': ['resource_policies_value1', 'resource_policies_value2']} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.remove_resource_policies_unary(request) + + +def test_remove_resource_policies_unary_rest_flattened(): + client = RegionDisksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'region': 'sample2', 'disk': 'sample3'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + region='region_value', + disk='disk_value', + region_disks_remove_resource_policies_request_resource=compute.RegionDisksRemoveResourcePoliciesRequest(resource_policies=['resource_policies_value']), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.remove_resource_policies_unary(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/regions/{region}/disks/{disk}/removeResourcePolicies" % client.transport._host, args[1]) + + +def test_remove_resource_policies_unary_rest_flattened_error(transport: str = 'rest'): + client = RegionDisksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.remove_resource_policies_unary( + compute.RemoveResourcePoliciesRegionDiskRequest(), + project='project_value', + region='region_value', + disk='disk_value', + region_disks_remove_resource_policies_request_resource=compute.RegionDisksRemoveResourcePoliciesRequest(resource_policies=['resource_policies_value']), + ) + + +def test_remove_resource_policies_unary_rest_error(): + client = RegionDisksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.ResizeRegionDiskRequest, + dict, +]) +def test_resize_rest(request_type): + client = RegionDisksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2', 'disk': 'sample3'} + request_init["region_disks_resize_request_resource"] = {'size_gb': 739} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.resize(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, extended_operation.ExtendedOperation) + assert response.client_operation_id == 'client_operation_id_value' + assert response.creation_timestamp == 'creation_timestamp_value' + assert response.description == 'description_value' + assert response.end_time == 'end_time_value' + assert response.http_error_message == 'http_error_message_value' + assert response.http_error_status_code == 2374 + assert response.id == 205 + assert response.insert_time == 'insert_time_value' + assert response.kind == 'kind_value' + assert response.name == 'name_value' + assert response.operation_group_id == 'operation_group_id_value' + assert response.operation_type == 'operation_type_value' + assert response.progress == 885 + assert response.region == 'region_value' + assert response.self_link == 'self_link_value' + assert response.start_time == 'start_time_value' + assert response.status == compute.Operation.Status.DONE + assert response.status_message == 'status_message_value' + assert response.target_id == 947 + assert response.target_link == 'target_link_value' + assert response.user == 'user_value' + assert response.zone == 'zone_value' + + +def test_resize_rest_required_fields(request_type=compute.ResizeRegionDiskRequest): + transport_class = transports.RegionDisksRestTransport + + request_init = {} + request_init["disk"] = "" + request_init["project"] = "" + request_init["region"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).resize._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["disk"] = 'disk_value' + jsonified_request["project"] = 'project_value' + jsonified_request["region"] = 'region_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).resize._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "disk" in jsonified_request + assert jsonified_request["disk"] == 'disk_value' + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "region" in jsonified_request + assert jsonified_request["region"] == 'region_value' + + client = RegionDisksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.resize(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_resize_rest_unset_required_fields(): + transport = transports.RegionDisksRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.resize._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("disk", "project", "region", "regionDisksResizeRequestResource", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_resize_rest_interceptors(null_interceptor): + transport = transports.RegionDisksRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.RegionDisksRestInterceptor(), + ) + client = RegionDisksClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.RegionDisksRestInterceptor, "post_resize") as post, \ + mock.patch.object(transports.RegionDisksRestInterceptor, "pre_resize") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.ResizeRegionDiskRequest.pb(compute.ResizeRegionDiskRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.ResizeRegionDiskRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.resize(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_resize_rest_bad_request(transport: str = 'rest', request_type=compute.ResizeRegionDiskRequest): + client = RegionDisksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2', 'disk': 'sample3'} + request_init["region_disks_resize_request_resource"] = {'size_gb': 739} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.resize(request) + + +def test_resize_rest_flattened(): + client = RegionDisksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'region': 'sample2', 'disk': 'sample3'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + region='region_value', + disk='disk_value', + region_disks_resize_request_resource=compute.RegionDisksResizeRequest(size_gb=739), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.resize(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/regions/{region}/disks/{disk}/resize" % client.transport._host, args[1]) + + +def test_resize_rest_flattened_error(transport: str = 'rest'): + client = RegionDisksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.resize( + compute.ResizeRegionDiskRequest(), + project='project_value', + region='region_value', + disk='disk_value', + region_disks_resize_request_resource=compute.RegionDisksResizeRequest(size_gb=739), + ) + + +def test_resize_rest_error(): + client = RegionDisksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.ResizeRegionDiskRequest, + dict, +]) +def test_resize_unary_rest(request_type): + client = RegionDisksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2', 'disk': 'sample3'} + request_init["region_disks_resize_request_resource"] = {'size_gb': 739} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.resize_unary(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.Operation) + + +def test_resize_unary_rest_required_fields(request_type=compute.ResizeRegionDiskRequest): + transport_class = transports.RegionDisksRestTransport + + request_init = {} + request_init["disk"] = "" + request_init["project"] = "" + request_init["region"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).resize._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["disk"] = 'disk_value' + jsonified_request["project"] = 'project_value' + jsonified_request["region"] = 'region_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).resize._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "disk" in jsonified_request + assert jsonified_request["disk"] == 'disk_value' + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "region" in jsonified_request + assert jsonified_request["region"] == 'region_value' + + client = RegionDisksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.resize_unary(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_resize_unary_rest_unset_required_fields(): + transport = transports.RegionDisksRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.resize._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("disk", "project", "region", "regionDisksResizeRequestResource", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_resize_unary_rest_interceptors(null_interceptor): + transport = transports.RegionDisksRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.RegionDisksRestInterceptor(), + ) + client = RegionDisksClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.RegionDisksRestInterceptor, "post_resize") as post, \ + mock.patch.object(transports.RegionDisksRestInterceptor, "pre_resize") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.ResizeRegionDiskRequest.pb(compute.ResizeRegionDiskRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.ResizeRegionDiskRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.resize_unary(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_resize_unary_rest_bad_request(transport: str = 'rest', request_type=compute.ResizeRegionDiskRequest): + client = RegionDisksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2', 'disk': 'sample3'} + request_init["region_disks_resize_request_resource"] = {'size_gb': 739} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.resize_unary(request) + + +def test_resize_unary_rest_flattened(): + client = RegionDisksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'region': 'sample2', 'disk': 'sample3'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + region='region_value', + disk='disk_value', + region_disks_resize_request_resource=compute.RegionDisksResizeRequest(size_gb=739), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.resize_unary(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/regions/{region}/disks/{disk}/resize" % client.transport._host, args[1]) + + +def test_resize_unary_rest_flattened_error(transport: str = 'rest'): + client = RegionDisksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.resize_unary( + compute.ResizeRegionDiskRequest(), + project='project_value', + region='region_value', + disk='disk_value', + region_disks_resize_request_resource=compute.RegionDisksResizeRequest(size_gb=739), + ) + + +def test_resize_unary_rest_error(): + client = RegionDisksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.SetIamPolicyRegionDiskRequest, + dict, +]) +def test_set_iam_policy_rest(request_type): + client = RegionDisksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2', 'resource': 'sample3'} + request_init["region_set_policy_request_resource"] = {'bindings': [{'binding_id': 'binding_id_value', 'condition': {'description': 'description_value', 'expression': 'expression_value', 'location': 'location_value', 'title': 'title_value'}, 'members': ['members_value1', 'members_value2'], 'role': 'role_value'}], 'etag': 'etag_value', 'policy': {'audit_configs': [{'audit_log_configs': [{'exempted_members': ['exempted_members_value1', 'exempted_members_value2'], 'ignore_child_exemptions': True, 'log_type': 'log_type_value'}], 'exempted_members': ['exempted_members_value1', 'exempted_members_value2'], 'service': 'service_value'}], 'bindings': {}, 'etag': 'etag_value', 'iam_owned': True, 'rules': [{'action': 'action_value', 'conditions': [{'iam': 'iam_value', 'op': 'op_value', 'svc': 'svc_value', 'sys': 'sys_value', 'values': ['values_value1', 'values_value2']}], 'description': 'description_value', 'ins': ['ins_value1', 'ins_value2'], 'log_configs': [{'cloud_audit': {'authorization_logging_options': {'permission_type': 'permission_type_value'}, 'log_name': 'log_name_value'}, 'counter': {'custom_fields': [{'name': 'name_value', 'value': 'value_value'}], 'field': 'field_value', 'metric': 'metric_value'}, 'data_access': {'log_mode': 'log_mode_value'}}], 'not_ins': ['not_ins_value1', 'not_ins_value2'], 'permissions': ['permissions_value1', 'permissions_value2']}], 'version': 774}} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Policy( + etag='etag_value', + iam_owned=True, + version=774, + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Policy.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.set_iam_policy(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.Policy) + assert response.etag == 'etag_value' + assert response.iam_owned is True + assert response.version == 774 + + +def test_set_iam_policy_rest_required_fields(request_type=compute.SetIamPolicyRegionDiskRequest): + transport_class = transports.RegionDisksRestTransport + + request_init = {} + request_init["project"] = "" + request_init["region"] = "" + request_init["resource"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).set_iam_policy._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + jsonified_request["region"] = 'region_value' + jsonified_request["resource"] = 'resource_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).set_iam_policy._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "region" in jsonified_request + assert jsonified_request["region"] == 'region_value' + assert "resource" in jsonified_request + assert jsonified_request["resource"] == 'resource_value' + + client = RegionDisksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Policy() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Policy.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.set_iam_policy(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_set_iam_policy_rest_unset_required_fields(): + transport = transports.RegionDisksRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.set_iam_policy._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("project", "region", "regionSetPolicyRequestResource", "resource", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_set_iam_policy_rest_interceptors(null_interceptor): + transport = transports.RegionDisksRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.RegionDisksRestInterceptor(), + ) + client = RegionDisksClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.RegionDisksRestInterceptor, "post_set_iam_policy") as post, \ + mock.patch.object(transports.RegionDisksRestInterceptor, "pre_set_iam_policy") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.SetIamPolicyRegionDiskRequest.pb(compute.SetIamPolicyRegionDiskRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Policy.to_json(compute.Policy()) + + request = compute.SetIamPolicyRegionDiskRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Policy() + + client.set_iam_policy(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_set_iam_policy_rest_bad_request(transport: str = 'rest', request_type=compute.SetIamPolicyRegionDiskRequest): + client = RegionDisksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2', 'resource': 'sample3'} + request_init["region_set_policy_request_resource"] = {'bindings': [{'binding_id': 'binding_id_value', 'condition': {'description': 'description_value', 'expression': 'expression_value', 'location': 'location_value', 'title': 'title_value'}, 'members': ['members_value1', 'members_value2'], 'role': 'role_value'}], 'etag': 'etag_value', 'policy': {'audit_configs': [{'audit_log_configs': [{'exempted_members': ['exempted_members_value1', 'exempted_members_value2'], 'ignore_child_exemptions': True, 'log_type': 'log_type_value'}], 'exempted_members': ['exempted_members_value1', 'exempted_members_value2'], 'service': 'service_value'}], 'bindings': {}, 'etag': 'etag_value', 'iam_owned': True, 'rules': [{'action': 'action_value', 'conditions': [{'iam': 'iam_value', 'op': 'op_value', 'svc': 'svc_value', 'sys': 'sys_value', 'values': ['values_value1', 'values_value2']}], 'description': 'description_value', 'ins': ['ins_value1', 'ins_value2'], 'log_configs': [{'cloud_audit': {'authorization_logging_options': {'permission_type': 'permission_type_value'}, 'log_name': 'log_name_value'}, 'counter': {'custom_fields': [{'name': 'name_value', 'value': 'value_value'}], 'field': 'field_value', 'metric': 'metric_value'}, 'data_access': {'log_mode': 'log_mode_value'}}], 'not_ins': ['not_ins_value1', 'not_ins_value2'], 'permissions': ['permissions_value1', 'permissions_value2']}], 'version': 774}} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.set_iam_policy(request) + + +def test_set_iam_policy_rest_flattened(): + client = RegionDisksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Policy() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'region': 'sample2', 'resource': 'sample3'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + region='region_value', + resource='resource_value', + region_set_policy_request_resource=compute.RegionSetPolicyRequest(bindings=[compute.Binding(binding_id='binding_id_value')]), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Policy.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.set_iam_policy(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/regions/{region}/disks/{resource}/setIamPolicy" % client.transport._host, args[1]) + + +def test_set_iam_policy_rest_flattened_error(transport: str = 'rest'): + client = RegionDisksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.set_iam_policy( + compute.SetIamPolicyRegionDiskRequest(), + project='project_value', + region='region_value', + resource='resource_value', + region_set_policy_request_resource=compute.RegionSetPolicyRequest(bindings=[compute.Binding(binding_id='binding_id_value')]), + ) + + +def test_set_iam_policy_rest_error(): + client = RegionDisksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.SetLabelsRegionDiskRequest, + dict, +]) +def test_set_labels_rest(request_type): + client = RegionDisksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2', 'resource': 'sample3'} + request_init["region_set_labels_request_resource"] = {'label_fingerprint': 'label_fingerprint_value', 'labels': {}} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.set_labels(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, extended_operation.ExtendedOperation) + assert response.client_operation_id == 'client_operation_id_value' + assert response.creation_timestamp == 'creation_timestamp_value' + assert response.description == 'description_value' + assert response.end_time == 'end_time_value' + assert response.http_error_message == 'http_error_message_value' + assert response.http_error_status_code == 2374 + assert response.id == 205 + assert response.insert_time == 'insert_time_value' + assert response.kind == 'kind_value' + assert response.name == 'name_value' + assert response.operation_group_id == 'operation_group_id_value' + assert response.operation_type == 'operation_type_value' + assert response.progress == 885 + assert response.region == 'region_value' + assert response.self_link == 'self_link_value' + assert response.start_time == 'start_time_value' + assert response.status == compute.Operation.Status.DONE + assert response.status_message == 'status_message_value' + assert response.target_id == 947 + assert response.target_link == 'target_link_value' + assert response.user == 'user_value' + assert response.zone == 'zone_value' + + +def test_set_labels_rest_required_fields(request_type=compute.SetLabelsRegionDiskRequest): + transport_class = transports.RegionDisksRestTransport + + request_init = {} + request_init["project"] = "" + request_init["region"] = "" + request_init["resource"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).set_labels._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + jsonified_request["region"] = 'region_value' + jsonified_request["resource"] = 'resource_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).set_labels._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "region" in jsonified_request + assert jsonified_request["region"] == 'region_value' + assert "resource" in jsonified_request + assert jsonified_request["resource"] == 'resource_value' + + client = RegionDisksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.set_labels(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_set_labels_rest_unset_required_fields(): + transport = transports.RegionDisksRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.set_labels._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("project", "region", "regionSetLabelsRequestResource", "resource", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_set_labels_rest_interceptors(null_interceptor): + transport = transports.RegionDisksRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.RegionDisksRestInterceptor(), + ) + client = RegionDisksClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.RegionDisksRestInterceptor, "post_set_labels") as post, \ + mock.patch.object(transports.RegionDisksRestInterceptor, "pre_set_labels") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.SetLabelsRegionDiskRequest.pb(compute.SetLabelsRegionDiskRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.SetLabelsRegionDiskRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.set_labels(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_set_labels_rest_bad_request(transport: str = 'rest', request_type=compute.SetLabelsRegionDiskRequest): + client = RegionDisksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2', 'resource': 'sample3'} + request_init["region_set_labels_request_resource"] = {'label_fingerprint': 'label_fingerprint_value', 'labels': {}} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.set_labels(request) + + +def test_set_labels_rest_flattened(): + client = RegionDisksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'region': 'sample2', 'resource': 'sample3'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + region='region_value', + resource='resource_value', + region_set_labels_request_resource=compute.RegionSetLabelsRequest(label_fingerprint='label_fingerprint_value'), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.set_labels(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/regions/{region}/disks/{resource}/setLabels" % client.transport._host, args[1]) + + +def test_set_labels_rest_flattened_error(transport: str = 'rest'): + client = RegionDisksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.set_labels( + compute.SetLabelsRegionDiskRequest(), + project='project_value', + region='region_value', + resource='resource_value', + region_set_labels_request_resource=compute.RegionSetLabelsRequest(label_fingerprint='label_fingerprint_value'), + ) + + +def test_set_labels_rest_error(): + client = RegionDisksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.SetLabelsRegionDiskRequest, + dict, +]) +def test_set_labels_unary_rest(request_type): + client = RegionDisksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2', 'resource': 'sample3'} + request_init["region_set_labels_request_resource"] = {'label_fingerprint': 'label_fingerprint_value', 'labels': {}} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.set_labels_unary(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.Operation) + + +def test_set_labels_unary_rest_required_fields(request_type=compute.SetLabelsRegionDiskRequest): + transport_class = transports.RegionDisksRestTransport + + request_init = {} + request_init["project"] = "" + request_init["region"] = "" + request_init["resource"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).set_labels._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + jsonified_request["region"] = 'region_value' + jsonified_request["resource"] = 'resource_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).set_labels._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "region" in jsonified_request + assert jsonified_request["region"] == 'region_value' + assert "resource" in jsonified_request + assert jsonified_request["resource"] == 'resource_value' + + client = RegionDisksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.set_labels_unary(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_set_labels_unary_rest_unset_required_fields(): + transport = transports.RegionDisksRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.set_labels._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("project", "region", "regionSetLabelsRequestResource", "resource", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_set_labels_unary_rest_interceptors(null_interceptor): + transport = transports.RegionDisksRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.RegionDisksRestInterceptor(), + ) + client = RegionDisksClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.RegionDisksRestInterceptor, "post_set_labels") as post, \ + mock.patch.object(transports.RegionDisksRestInterceptor, "pre_set_labels") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.SetLabelsRegionDiskRequest.pb(compute.SetLabelsRegionDiskRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.SetLabelsRegionDiskRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.set_labels_unary(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_set_labels_unary_rest_bad_request(transport: str = 'rest', request_type=compute.SetLabelsRegionDiskRequest): + client = RegionDisksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2', 'resource': 'sample3'} + request_init["region_set_labels_request_resource"] = {'label_fingerprint': 'label_fingerprint_value', 'labels': {}} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.set_labels_unary(request) + + +def test_set_labels_unary_rest_flattened(): + client = RegionDisksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'region': 'sample2', 'resource': 'sample3'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + region='region_value', + resource='resource_value', + region_set_labels_request_resource=compute.RegionSetLabelsRequest(label_fingerprint='label_fingerprint_value'), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.set_labels_unary(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/regions/{region}/disks/{resource}/setLabels" % client.transport._host, args[1]) + + +def test_set_labels_unary_rest_flattened_error(transport: str = 'rest'): + client = RegionDisksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.set_labels_unary( + compute.SetLabelsRegionDiskRequest(), + project='project_value', + region='region_value', + resource='resource_value', + region_set_labels_request_resource=compute.RegionSetLabelsRequest(label_fingerprint='label_fingerprint_value'), + ) + + +def test_set_labels_unary_rest_error(): + client = RegionDisksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.StartAsyncReplicationRegionDiskRequest, + dict, +]) +def test_start_async_replication_rest(request_type): + client = RegionDisksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2', 'disk': 'sample3'} + request_init["region_disks_start_async_replication_request_resource"] = {'async_secondary_disk': 'async_secondary_disk_value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.start_async_replication(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, extended_operation.ExtendedOperation) + assert response.client_operation_id == 'client_operation_id_value' + assert response.creation_timestamp == 'creation_timestamp_value' + assert response.description == 'description_value' + assert response.end_time == 'end_time_value' + assert response.http_error_message == 'http_error_message_value' + assert response.http_error_status_code == 2374 + assert response.id == 205 + assert response.insert_time == 'insert_time_value' + assert response.kind == 'kind_value' + assert response.name == 'name_value' + assert response.operation_group_id == 'operation_group_id_value' + assert response.operation_type == 'operation_type_value' + assert response.progress == 885 + assert response.region == 'region_value' + assert response.self_link == 'self_link_value' + assert response.start_time == 'start_time_value' + assert response.status == compute.Operation.Status.DONE + assert response.status_message == 'status_message_value' + assert response.target_id == 947 + assert response.target_link == 'target_link_value' + assert response.user == 'user_value' + assert response.zone == 'zone_value' + + +def test_start_async_replication_rest_required_fields(request_type=compute.StartAsyncReplicationRegionDiskRequest): + transport_class = transports.RegionDisksRestTransport + + request_init = {} + request_init["disk"] = "" + request_init["project"] = "" + request_init["region"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).start_async_replication._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["disk"] = 'disk_value' + jsonified_request["project"] = 'project_value' + jsonified_request["region"] = 'region_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).start_async_replication._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "disk" in jsonified_request + assert jsonified_request["disk"] == 'disk_value' + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "region" in jsonified_request + assert jsonified_request["region"] == 'region_value' + + client = RegionDisksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.start_async_replication(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_start_async_replication_rest_unset_required_fields(): + transport = transports.RegionDisksRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.start_async_replication._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("disk", "project", "region", "regionDisksStartAsyncReplicationRequestResource", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_start_async_replication_rest_interceptors(null_interceptor): + transport = transports.RegionDisksRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.RegionDisksRestInterceptor(), + ) + client = RegionDisksClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.RegionDisksRestInterceptor, "post_start_async_replication") as post, \ + mock.patch.object(transports.RegionDisksRestInterceptor, "pre_start_async_replication") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.StartAsyncReplicationRegionDiskRequest.pb(compute.StartAsyncReplicationRegionDiskRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.StartAsyncReplicationRegionDiskRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.start_async_replication(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_start_async_replication_rest_bad_request(transport: str = 'rest', request_type=compute.StartAsyncReplicationRegionDiskRequest): + client = RegionDisksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2', 'disk': 'sample3'} + request_init["region_disks_start_async_replication_request_resource"] = {'async_secondary_disk': 'async_secondary_disk_value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.start_async_replication(request) + + +def test_start_async_replication_rest_flattened(): + client = RegionDisksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'region': 'sample2', 'disk': 'sample3'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + region='region_value', + disk='disk_value', + region_disks_start_async_replication_request_resource=compute.RegionDisksStartAsyncReplicationRequest(async_secondary_disk='async_secondary_disk_value'), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.start_async_replication(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/regions/{region}/disks/{disk}/startAsyncReplication" % client.transport._host, args[1]) + + +def test_start_async_replication_rest_flattened_error(transport: str = 'rest'): + client = RegionDisksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.start_async_replication( + compute.StartAsyncReplicationRegionDiskRequest(), + project='project_value', + region='region_value', + disk='disk_value', + region_disks_start_async_replication_request_resource=compute.RegionDisksStartAsyncReplicationRequest(async_secondary_disk='async_secondary_disk_value'), + ) + + +def test_start_async_replication_rest_error(): + client = RegionDisksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.StartAsyncReplicationRegionDiskRequest, + dict, +]) +def test_start_async_replication_unary_rest(request_type): + client = RegionDisksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2', 'disk': 'sample3'} + request_init["region_disks_start_async_replication_request_resource"] = {'async_secondary_disk': 'async_secondary_disk_value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.start_async_replication_unary(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.Operation) + + +def test_start_async_replication_unary_rest_required_fields(request_type=compute.StartAsyncReplicationRegionDiskRequest): + transport_class = transports.RegionDisksRestTransport + + request_init = {} + request_init["disk"] = "" + request_init["project"] = "" + request_init["region"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).start_async_replication._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["disk"] = 'disk_value' + jsonified_request["project"] = 'project_value' + jsonified_request["region"] = 'region_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).start_async_replication._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "disk" in jsonified_request + assert jsonified_request["disk"] == 'disk_value' + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "region" in jsonified_request + assert jsonified_request["region"] == 'region_value' + + client = RegionDisksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.start_async_replication_unary(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_start_async_replication_unary_rest_unset_required_fields(): + transport = transports.RegionDisksRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.start_async_replication._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("disk", "project", "region", "regionDisksStartAsyncReplicationRequestResource", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_start_async_replication_unary_rest_interceptors(null_interceptor): + transport = transports.RegionDisksRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.RegionDisksRestInterceptor(), + ) + client = RegionDisksClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.RegionDisksRestInterceptor, "post_start_async_replication") as post, \ + mock.patch.object(transports.RegionDisksRestInterceptor, "pre_start_async_replication") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.StartAsyncReplicationRegionDiskRequest.pb(compute.StartAsyncReplicationRegionDiskRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.StartAsyncReplicationRegionDiskRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.start_async_replication_unary(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_start_async_replication_unary_rest_bad_request(transport: str = 'rest', request_type=compute.StartAsyncReplicationRegionDiskRequest): + client = RegionDisksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2', 'disk': 'sample3'} + request_init["region_disks_start_async_replication_request_resource"] = {'async_secondary_disk': 'async_secondary_disk_value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.start_async_replication_unary(request) + + +def test_start_async_replication_unary_rest_flattened(): + client = RegionDisksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'region': 'sample2', 'disk': 'sample3'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + region='region_value', + disk='disk_value', + region_disks_start_async_replication_request_resource=compute.RegionDisksStartAsyncReplicationRequest(async_secondary_disk='async_secondary_disk_value'), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.start_async_replication_unary(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/regions/{region}/disks/{disk}/startAsyncReplication" % client.transport._host, args[1]) + + +def test_start_async_replication_unary_rest_flattened_error(transport: str = 'rest'): + client = RegionDisksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.start_async_replication_unary( + compute.StartAsyncReplicationRegionDiskRequest(), + project='project_value', + region='region_value', + disk='disk_value', + region_disks_start_async_replication_request_resource=compute.RegionDisksStartAsyncReplicationRequest(async_secondary_disk='async_secondary_disk_value'), + ) + + +def test_start_async_replication_unary_rest_error(): + client = RegionDisksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.StopAsyncReplicationRegionDiskRequest, + dict, +]) +def test_stop_async_replication_rest(request_type): + client = RegionDisksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2', 'disk': 'sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.stop_async_replication(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, extended_operation.ExtendedOperation) + assert response.client_operation_id == 'client_operation_id_value' + assert response.creation_timestamp == 'creation_timestamp_value' + assert response.description == 'description_value' + assert response.end_time == 'end_time_value' + assert response.http_error_message == 'http_error_message_value' + assert response.http_error_status_code == 2374 + assert response.id == 205 + assert response.insert_time == 'insert_time_value' + assert response.kind == 'kind_value' + assert response.name == 'name_value' + assert response.operation_group_id == 'operation_group_id_value' + assert response.operation_type == 'operation_type_value' + assert response.progress == 885 + assert response.region == 'region_value' + assert response.self_link == 'self_link_value' + assert response.start_time == 'start_time_value' + assert response.status == compute.Operation.Status.DONE + assert response.status_message == 'status_message_value' + assert response.target_id == 947 + assert response.target_link == 'target_link_value' + assert response.user == 'user_value' + assert response.zone == 'zone_value' + + +def test_stop_async_replication_rest_required_fields(request_type=compute.StopAsyncReplicationRegionDiskRequest): + transport_class = transports.RegionDisksRestTransport + + request_init = {} + request_init["disk"] = "" + request_init["project"] = "" + request_init["region"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).stop_async_replication._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["disk"] = 'disk_value' + jsonified_request["project"] = 'project_value' + jsonified_request["region"] = 'region_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).stop_async_replication._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "disk" in jsonified_request + assert jsonified_request["disk"] == 'disk_value' + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "region" in jsonified_request + assert jsonified_request["region"] == 'region_value' + + client = RegionDisksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.stop_async_replication(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_stop_async_replication_rest_unset_required_fields(): + transport = transports.RegionDisksRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.stop_async_replication._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("disk", "project", "region", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_stop_async_replication_rest_interceptors(null_interceptor): + transport = transports.RegionDisksRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.RegionDisksRestInterceptor(), + ) + client = RegionDisksClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.RegionDisksRestInterceptor, "post_stop_async_replication") as post, \ + mock.patch.object(transports.RegionDisksRestInterceptor, "pre_stop_async_replication") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.StopAsyncReplicationRegionDiskRequest.pb(compute.StopAsyncReplicationRegionDiskRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.StopAsyncReplicationRegionDiskRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.stop_async_replication(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_stop_async_replication_rest_bad_request(transport: str = 'rest', request_type=compute.StopAsyncReplicationRegionDiskRequest): + client = RegionDisksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2', 'disk': 'sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.stop_async_replication(request) + + +def test_stop_async_replication_rest_flattened(): + client = RegionDisksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'region': 'sample2', 'disk': 'sample3'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + region='region_value', + disk='disk_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.stop_async_replication(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/regions/{region}/disks/{disk}/stopAsyncReplication" % client.transport._host, args[1]) + + +def test_stop_async_replication_rest_flattened_error(transport: str = 'rest'): + client = RegionDisksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.stop_async_replication( + compute.StopAsyncReplicationRegionDiskRequest(), + project='project_value', + region='region_value', + disk='disk_value', + ) + + +def test_stop_async_replication_rest_error(): + client = RegionDisksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.StopAsyncReplicationRegionDiskRequest, + dict, +]) +def test_stop_async_replication_unary_rest(request_type): + client = RegionDisksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2', 'disk': 'sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.stop_async_replication_unary(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.Operation) + + +def test_stop_async_replication_unary_rest_required_fields(request_type=compute.StopAsyncReplicationRegionDiskRequest): + transport_class = transports.RegionDisksRestTransport + + request_init = {} + request_init["disk"] = "" + request_init["project"] = "" + request_init["region"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).stop_async_replication._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["disk"] = 'disk_value' + jsonified_request["project"] = 'project_value' + jsonified_request["region"] = 'region_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).stop_async_replication._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "disk" in jsonified_request + assert jsonified_request["disk"] == 'disk_value' + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "region" in jsonified_request + assert jsonified_request["region"] == 'region_value' + + client = RegionDisksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.stop_async_replication_unary(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_stop_async_replication_unary_rest_unset_required_fields(): + transport = transports.RegionDisksRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.stop_async_replication._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("disk", "project", "region", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_stop_async_replication_unary_rest_interceptors(null_interceptor): + transport = transports.RegionDisksRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.RegionDisksRestInterceptor(), + ) + client = RegionDisksClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.RegionDisksRestInterceptor, "post_stop_async_replication") as post, \ + mock.patch.object(transports.RegionDisksRestInterceptor, "pre_stop_async_replication") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.StopAsyncReplicationRegionDiskRequest.pb(compute.StopAsyncReplicationRegionDiskRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.StopAsyncReplicationRegionDiskRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.stop_async_replication_unary(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_stop_async_replication_unary_rest_bad_request(transport: str = 'rest', request_type=compute.StopAsyncReplicationRegionDiskRequest): + client = RegionDisksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2', 'disk': 'sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.stop_async_replication_unary(request) + + +def test_stop_async_replication_unary_rest_flattened(): + client = RegionDisksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'region': 'sample2', 'disk': 'sample3'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + region='region_value', + disk='disk_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.stop_async_replication_unary(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/regions/{region}/disks/{disk}/stopAsyncReplication" % client.transport._host, args[1]) + + +def test_stop_async_replication_unary_rest_flattened_error(transport: str = 'rest'): + client = RegionDisksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.stop_async_replication_unary( + compute.StopAsyncReplicationRegionDiskRequest(), + project='project_value', + region='region_value', + disk='disk_value', + ) + + +def test_stop_async_replication_unary_rest_error(): + client = RegionDisksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.StopGroupAsyncReplicationRegionDiskRequest, + dict, +]) +def test_stop_group_async_replication_rest(request_type): + client = RegionDisksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2'} + request_init["disks_stop_group_async_replication_resource_resource"] = {'resource_policy': 'resource_policy_value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.stop_group_async_replication(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, extended_operation.ExtendedOperation) + assert response.client_operation_id == 'client_operation_id_value' + assert response.creation_timestamp == 'creation_timestamp_value' + assert response.description == 'description_value' + assert response.end_time == 'end_time_value' + assert response.http_error_message == 'http_error_message_value' + assert response.http_error_status_code == 2374 + assert response.id == 205 + assert response.insert_time == 'insert_time_value' + assert response.kind == 'kind_value' + assert response.name == 'name_value' + assert response.operation_group_id == 'operation_group_id_value' + assert response.operation_type == 'operation_type_value' + assert response.progress == 885 + assert response.region == 'region_value' + assert response.self_link == 'self_link_value' + assert response.start_time == 'start_time_value' + assert response.status == compute.Operation.Status.DONE + assert response.status_message == 'status_message_value' + assert response.target_id == 947 + assert response.target_link == 'target_link_value' + assert response.user == 'user_value' + assert response.zone == 'zone_value' + + +def test_stop_group_async_replication_rest_required_fields(request_type=compute.StopGroupAsyncReplicationRegionDiskRequest): + transport_class = transports.RegionDisksRestTransport + + request_init = {} + request_init["project"] = "" + request_init["region"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).stop_group_async_replication._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + jsonified_request["region"] = 'region_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).stop_group_async_replication._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "region" in jsonified_request + assert jsonified_request["region"] == 'region_value' + + client = RegionDisksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.stop_group_async_replication(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_stop_group_async_replication_rest_unset_required_fields(): + transport = transports.RegionDisksRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.stop_group_async_replication._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("disksStopGroupAsyncReplicationResourceResource", "project", "region", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_stop_group_async_replication_rest_interceptors(null_interceptor): + transport = transports.RegionDisksRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.RegionDisksRestInterceptor(), + ) + client = RegionDisksClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.RegionDisksRestInterceptor, "post_stop_group_async_replication") as post, \ + mock.patch.object(transports.RegionDisksRestInterceptor, "pre_stop_group_async_replication") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.StopGroupAsyncReplicationRegionDiskRequest.pb(compute.StopGroupAsyncReplicationRegionDiskRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.StopGroupAsyncReplicationRegionDiskRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.stop_group_async_replication(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_stop_group_async_replication_rest_bad_request(transport: str = 'rest', request_type=compute.StopGroupAsyncReplicationRegionDiskRequest): + client = RegionDisksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2'} + request_init["disks_stop_group_async_replication_resource_resource"] = {'resource_policy': 'resource_policy_value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.stop_group_async_replication(request) + + +def test_stop_group_async_replication_rest_flattened(): + client = RegionDisksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'region': 'sample2'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + region='region_value', + disks_stop_group_async_replication_resource_resource=compute.DisksStopGroupAsyncReplicationResource(resource_policy='resource_policy_value'), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.stop_group_async_replication(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/regions/{region}/disks/stopGroupAsyncReplication" % client.transport._host, args[1]) + + +def test_stop_group_async_replication_rest_flattened_error(transport: str = 'rest'): + client = RegionDisksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.stop_group_async_replication( + compute.StopGroupAsyncReplicationRegionDiskRequest(), + project='project_value', + region='region_value', + disks_stop_group_async_replication_resource_resource=compute.DisksStopGroupAsyncReplicationResource(resource_policy='resource_policy_value'), + ) + + +def test_stop_group_async_replication_rest_error(): + client = RegionDisksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.StopGroupAsyncReplicationRegionDiskRequest, + dict, +]) +def test_stop_group_async_replication_unary_rest(request_type): + client = RegionDisksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2'} + request_init["disks_stop_group_async_replication_resource_resource"] = {'resource_policy': 'resource_policy_value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.stop_group_async_replication_unary(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.Operation) + + +def test_stop_group_async_replication_unary_rest_required_fields(request_type=compute.StopGroupAsyncReplicationRegionDiskRequest): + transport_class = transports.RegionDisksRestTransport + + request_init = {} + request_init["project"] = "" + request_init["region"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).stop_group_async_replication._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + jsonified_request["region"] = 'region_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).stop_group_async_replication._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "region" in jsonified_request + assert jsonified_request["region"] == 'region_value' + + client = RegionDisksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.stop_group_async_replication_unary(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_stop_group_async_replication_unary_rest_unset_required_fields(): + transport = transports.RegionDisksRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.stop_group_async_replication._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("disksStopGroupAsyncReplicationResourceResource", "project", "region", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_stop_group_async_replication_unary_rest_interceptors(null_interceptor): + transport = transports.RegionDisksRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.RegionDisksRestInterceptor(), + ) + client = RegionDisksClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.RegionDisksRestInterceptor, "post_stop_group_async_replication") as post, \ + mock.patch.object(transports.RegionDisksRestInterceptor, "pre_stop_group_async_replication") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.StopGroupAsyncReplicationRegionDiskRequest.pb(compute.StopGroupAsyncReplicationRegionDiskRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.StopGroupAsyncReplicationRegionDiskRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.stop_group_async_replication_unary(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_stop_group_async_replication_unary_rest_bad_request(transport: str = 'rest', request_type=compute.StopGroupAsyncReplicationRegionDiskRequest): + client = RegionDisksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2'} + request_init["disks_stop_group_async_replication_resource_resource"] = {'resource_policy': 'resource_policy_value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.stop_group_async_replication_unary(request) + + +def test_stop_group_async_replication_unary_rest_flattened(): + client = RegionDisksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'region': 'sample2'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + region='region_value', + disks_stop_group_async_replication_resource_resource=compute.DisksStopGroupAsyncReplicationResource(resource_policy='resource_policy_value'), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.stop_group_async_replication_unary(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/regions/{region}/disks/stopGroupAsyncReplication" % client.transport._host, args[1]) + + +def test_stop_group_async_replication_unary_rest_flattened_error(transport: str = 'rest'): + client = RegionDisksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.stop_group_async_replication_unary( + compute.StopGroupAsyncReplicationRegionDiskRequest(), + project='project_value', + region='region_value', + disks_stop_group_async_replication_resource_resource=compute.DisksStopGroupAsyncReplicationResource(resource_policy='resource_policy_value'), + ) + + +def test_stop_group_async_replication_unary_rest_error(): + client = RegionDisksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.TestIamPermissionsRegionDiskRequest, + dict, +]) +def test_test_iam_permissions_rest(request_type): + client = RegionDisksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2', 'resource': 'sample3'} + request_init["test_permissions_request_resource"] = {'permissions': ['permissions_value1', 'permissions_value2']} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.TestPermissionsResponse( + permissions=['permissions_value'], + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.TestPermissionsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.test_iam_permissions(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.TestPermissionsResponse) + assert response.permissions == ['permissions_value'] + + +def test_test_iam_permissions_rest_required_fields(request_type=compute.TestIamPermissionsRegionDiskRequest): + transport_class = transports.RegionDisksRestTransport + + request_init = {} + request_init["project"] = "" + request_init["region"] = "" + request_init["resource"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).test_iam_permissions._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + jsonified_request["region"] = 'region_value' + jsonified_request["resource"] = 'resource_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).test_iam_permissions._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "region" in jsonified_request + assert jsonified_request["region"] == 'region_value' + assert "resource" in jsonified_request + assert jsonified_request["resource"] == 'resource_value' + + client = RegionDisksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.TestPermissionsResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.TestPermissionsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.test_iam_permissions(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_test_iam_permissions_rest_unset_required_fields(): + transport = transports.RegionDisksRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.test_iam_permissions._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("project", "region", "resource", "testPermissionsRequestResource", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_test_iam_permissions_rest_interceptors(null_interceptor): + transport = transports.RegionDisksRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.RegionDisksRestInterceptor(), + ) + client = RegionDisksClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.RegionDisksRestInterceptor, "post_test_iam_permissions") as post, \ + mock.patch.object(transports.RegionDisksRestInterceptor, "pre_test_iam_permissions") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.TestIamPermissionsRegionDiskRequest.pb(compute.TestIamPermissionsRegionDiskRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.TestPermissionsResponse.to_json(compute.TestPermissionsResponse()) + + request = compute.TestIamPermissionsRegionDiskRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.TestPermissionsResponse() + + client.test_iam_permissions(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_test_iam_permissions_rest_bad_request(transport: str = 'rest', request_type=compute.TestIamPermissionsRegionDiskRequest): + client = RegionDisksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2', 'resource': 'sample3'} + request_init["test_permissions_request_resource"] = {'permissions': ['permissions_value1', 'permissions_value2']} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.test_iam_permissions(request) + + +def test_test_iam_permissions_rest_flattened(): + client = RegionDisksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.TestPermissionsResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'region': 'sample2', 'resource': 'sample3'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + region='region_value', + resource='resource_value', + test_permissions_request_resource=compute.TestPermissionsRequest(permissions=['permissions_value']), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.TestPermissionsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.test_iam_permissions(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/regions/{region}/disks/{resource}/testIamPermissions" % client.transport._host, args[1]) + + +def test_test_iam_permissions_rest_flattened_error(transport: str = 'rest'): + client = RegionDisksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.test_iam_permissions( + compute.TestIamPermissionsRegionDiskRequest(), + project='project_value', + region='region_value', + resource='resource_value', + test_permissions_request_resource=compute.TestPermissionsRequest(permissions=['permissions_value']), + ) + + +def test_test_iam_permissions_rest_error(): + client = RegionDisksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.UpdateRegionDiskRequest, + dict, +]) +def test_update_rest(request_type): + client = RegionDisksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2', 'disk': 'sample3'} + request_init["disk_resource"] = {'architecture': 'architecture_value', 'async_primary_disk': {'consistency_group_policy': 'consistency_group_policy_value', 'consistency_group_policy_id': 'consistency_group_policy_id_value', 'disk': 'disk_value', 'disk_id': 'disk_id_value'}, 'async_secondary_disks': {}, 'creation_timestamp': 'creation_timestamp_value', 'description': 'description_value', 'disk_encryption_key': {'kms_key_name': 'kms_key_name_value', 'kms_key_service_account': 'kms_key_service_account_value', 'raw_key': 'raw_key_value', 'rsa_encrypted_key': 'rsa_encrypted_key_value', 'sha256': 'sha256_value'}, 'guest_os_features': [{'type_': 'type__value'}], 'id': 205, 'kind': 'kind_value', 'label_fingerprint': 'label_fingerprint_value', 'labels': {}, 'last_attach_timestamp': 'last_attach_timestamp_value', 'last_detach_timestamp': 'last_detach_timestamp_value', 'license_codes': [1361, 1362], 'licenses': ['licenses_value1', 'licenses_value2'], 'location_hint': 'location_hint_value', 'name': 'name_value', 'options': 'options_value', 'params': {'resource_manager_tags': {}}, 'physical_block_size_bytes': 2663, 'provisioned_iops': 1740, 'provisioned_throughput': 2411, 'region': 'region_value', 'replica_zones': ['replica_zones_value1', 'replica_zones_value2'], 'resource_policies': ['resource_policies_value1', 'resource_policies_value2'], 'resource_status': {'async_primary_disk': {'state': 'state_value'}, 'async_secondary_disks': {}}, 'satisfies_pzs': True, 'self_link': 'self_link_value', 'size_gb': 739, 'source_consistency_group_policy': 'source_consistency_group_policy_value', 'source_consistency_group_policy_id': 'source_consistency_group_policy_id_value', 'source_disk': 'source_disk_value', 'source_disk_id': 'source_disk_id_value', 'source_image': 'source_image_value', 'source_image_encryption_key': {}, 'source_image_id': 'source_image_id_value', 'source_snapshot': 'source_snapshot_value', 'source_snapshot_encryption_key': {}, 'source_snapshot_id': 'source_snapshot_id_value', 'source_storage_object': 'source_storage_object_value', 'status': 'status_value', 'type_': 'type__value', 'users': ['users_value1', 'users_value2'], 'zone': 'zone_value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.update(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, extended_operation.ExtendedOperation) + assert response.client_operation_id == 'client_operation_id_value' + assert response.creation_timestamp == 'creation_timestamp_value' + assert response.description == 'description_value' + assert response.end_time == 'end_time_value' + assert response.http_error_message == 'http_error_message_value' + assert response.http_error_status_code == 2374 + assert response.id == 205 + assert response.insert_time == 'insert_time_value' + assert response.kind == 'kind_value' + assert response.name == 'name_value' + assert response.operation_group_id == 'operation_group_id_value' + assert response.operation_type == 'operation_type_value' + assert response.progress == 885 + assert response.region == 'region_value' + assert response.self_link == 'self_link_value' + assert response.start_time == 'start_time_value' + assert response.status == compute.Operation.Status.DONE + assert response.status_message == 'status_message_value' + assert response.target_id == 947 + assert response.target_link == 'target_link_value' + assert response.user == 'user_value' + assert response.zone == 'zone_value' + + +def test_update_rest_required_fields(request_type=compute.UpdateRegionDiskRequest): + transport_class = transports.RegionDisksRestTransport + + request_init = {} + request_init["disk"] = "" + request_init["project"] = "" + request_init["region"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).update._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["disk"] = 'disk_value' + jsonified_request["project"] = 'project_value' + jsonified_request["region"] = 'region_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).update._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("paths", "request_id", "update_mask", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "disk" in jsonified_request + assert jsonified_request["disk"] == 'disk_value' + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "region" in jsonified_request + assert jsonified_request["region"] == 'region_value' + + client = RegionDisksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "patch", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.update(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_update_rest_unset_required_fields(): + transport = transports.RegionDisksRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.update._get_unset_required_fields({}) + assert set(unset_fields) == (set(("paths", "requestId", "updateMask", )) & set(("disk", "diskResource", "project", "region", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_update_rest_interceptors(null_interceptor): + transport = transports.RegionDisksRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.RegionDisksRestInterceptor(), + ) + client = RegionDisksClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.RegionDisksRestInterceptor, "post_update") as post, \ + mock.patch.object(transports.RegionDisksRestInterceptor, "pre_update") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.UpdateRegionDiskRequest.pb(compute.UpdateRegionDiskRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.UpdateRegionDiskRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.update(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_update_rest_bad_request(transport: str = 'rest', request_type=compute.UpdateRegionDiskRequest): + client = RegionDisksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2', 'disk': 'sample3'} + request_init["disk_resource"] = {'architecture': 'architecture_value', 'async_primary_disk': {'consistency_group_policy': 'consistency_group_policy_value', 'consistency_group_policy_id': 'consistency_group_policy_id_value', 'disk': 'disk_value', 'disk_id': 'disk_id_value'}, 'async_secondary_disks': {}, 'creation_timestamp': 'creation_timestamp_value', 'description': 'description_value', 'disk_encryption_key': {'kms_key_name': 'kms_key_name_value', 'kms_key_service_account': 'kms_key_service_account_value', 'raw_key': 'raw_key_value', 'rsa_encrypted_key': 'rsa_encrypted_key_value', 'sha256': 'sha256_value'}, 'guest_os_features': [{'type_': 'type__value'}], 'id': 205, 'kind': 'kind_value', 'label_fingerprint': 'label_fingerprint_value', 'labels': {}, 'last_attach_timestamp': 'last_attach_timestamp_value', 'last_detach_timestamp': 'last_detach_timestamp_value', 'license_codes': [1361, 1362], 'licenses': ['licenses_value1', 'licenses_value2'], 'location_hint': 'location_hint_value', 'name': 'name_value', 'options': 'options_value', 'params': {'resource_manager_tags': {}}, 'physical_block_size_bytes': 2663, 'provisioned_iops': 1740, 'provisioned_throughput': 2411, 'region': 'region_value', 'replica_zones': ['replica_zones_value1', 'replica_zones_value2'], 'resource_policies': ['resource_policies_value1', 'resource_policies_value2'], 'resource_status': {'async_primary_disk': {'state': 'state_value'}, 'async_secondary_disks': {}}, 'satisfies_pzs': True, 'self_link': 'self_link_value', 'size_gb': 739, 'source_consistency_group_policy': 'source_consistency_group_policy_value', 'source_consistency_group_policy_id': 'source_consistency_group_policy_id_value', 'source_disk': 'source_disk_value', 'source_disk_id': 'source_disk_id_value', 'source_image': 'source_image_value', 'source_image_encryption_key': {}, 'source_image_id': 'source_image_id_value', 'source_snapshot': 'source_snapshot_value', 'source_snapshot_encryption_key': {}, 'source_snapshot_id': 'source_snapshot_id_value', 'source_storage_object': 'source_storage_object_value', 'status': 'status_value', 'type_': 'type__value', 'users': ['users_value1', 'users_value2'], 'zone': 'zone_value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.update(request) + + +def test_update_rest_flattened(): + client = RegionDisksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'region': 'sample2', 'disk': 'sample3'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + region='region_value', + disk='disk_value', + disk_resource=compute.Disk(architecture='architecture_value'), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.update(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/regions/{region}/disks/{disk}" % client.transport._host, args[1]) + + +def test_update_rest_flattened_error(transport: str = 'rest'): + client = RegionDisksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update( + compute.UpdateRegionDiskRequest(), + project='project_value', + region='region_value', + disk='disk_value', + disk_resource=compute.Disk(architecture='architecture_value'), + ) + + +def test_update_rest_error(): + client = RegionDisksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.UpdateRegionDiskRequest, + dict, +]) +def test_update_unary_rest(request_type): + client = RegionDisksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2', 'disk': 'sample3'} + request_init["disk_resource"] = {'architecture': 'architecture_value', 'async_primary_disk': {'consistency_group_policy': 'consistency_group_policy_value', 'consistency_group_policy_id': 'consistency_group_policy_id_value', 'disk': 'disk_value', 'disk_id': 'disk_id_value'}, 'async_secondary_disks': {}, 'creation_timestamp': 'creation_timestamp_value', 'description': 'description_value', 'disk_encryption_key': {'kms_key_name': 'kms_key_name_value', 'kms_key_service_account': 'kms_key_service_account_value', 'raw_key': 'raw_key_value', 'rsa_encrypted_key': 'rsa_encrypted_key_value', 'sha256': 'sha256_value'}, 'guest_os_features': [{'type_': 'type__value'}], 'id': 205, 'kind': 'kind_value', 'label_fingerprint': 'label_fingerprint_value', 'labels': {}, 'last_attach_timestamp': 'last_attach_timestamp_value', 'last_detach_timestamp': 'last_detach_timestamp_value', 'license_codes': [1361, 1362], 'licenses': ['licenses_value1', 'licenses_value2'], 'location_hint': 'location_hint_value', 'name': 'name_value', 'options': 'options_value', 'params': {'resource_manager_tags': {}}, 'physical_block_size_bytes': 2663, 'provisioned_iops': 1740, 'provisioned_throughput': 2411, 'region': 'region_value', 'replica_zones': ['replica_zones_value1', 'replica_zones_value2'], 'resource_policies': ['resource_policies_value1', 'resource_policies_value2'], 'resource_status': {'async_primary_disk': {'state': 'state_value'}, 'async_secondary_disks': {}}, 'satisfies_pzs': True, 'self_link': 'self_link_value', 'size_gb': 739, 'source_consistency_group_policy': 'source_consistency_group_policy_value', 'source_consistency_group_policy_id': 'source_consistency_group_policy_id_value', 'source_disk': 'source_disk_value', 'source_disk_id': 'source_disk_id_value', 'source_image': 'source_image_value', 'source_image_encryption_key': {}, 'source_image_id': 'source_image_id_value', 'source_snapshot': 'source_snapshot_value', 'source_snapshot_encryption_key': {}, 'source_snapshot_id': 'source_snapshot_id_value', 'source_storage_object': 'source_storage_object_value', 'status': 'status_value', 'type_': 'type__value', 'users': ['users_value1', 'users_value2'], 'zone': 'zone_value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.update_unary(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.Operation) + + +def test_update_unary_rest_required_fields(request_type=compute.UpdateRegionDiskRequest): + transport_class = transports.RegionDisksRestTransport + + request_init = {} + request_init["disk"] = "" + request_init["project"] = "" + request_init["region"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).update._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["disk"] = 'disk_value' + jsonified_request["project"] = 'project_value' + jsonified_request["region"] = 'region_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).update._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("paths", "request_id", "update_mask", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "disk" in jsonified_request + assert jsonified_request["disk"] == 'disk_value' + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "region" in jsonified_request + assert jsonified_request["region"] == 'region_value' + + client = RegionDisksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "patch", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.update_unary(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_update_unary_rest_unset_required_fields(): + transport = transports.RegionDisksRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.update._get_unset_required_fields({}) + assert set(unset_fields) == (set(("paths", "requestId", "updateMask", )) & set(("disk", "diskResource", "project", "region", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_update_unary_rest_interceptors(null_interceptor): + transport = transports.RegionDisksRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.RegionDisksRestInterceptor(), + ) + client = RegionDisksClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.RegionDisksRestInterceptor, "post_update") as post, \ + mock.patch.object(transports.RegionDisksRestInterceptor, "pre_update") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.UpdateRegionDiskRequest.pb(compute.UpdateRegionDiskRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.UpdateRegionDiskRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.update_unary(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_update_unary_rest_bad_request(transport: str = 'rest', request_type=compute.UpdateRegionDiskRequest): + client = RegionDisksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2', 'disk': 'sample3'} + request_init["disk_resource"] = {'architecture': 'architecture_value', 'async_primary_disk': {'consistency_group_policy': 'consistency_group_policy_value', 'consistency_group_policy_id': 'consistency_group_policy_id_value', 'disk': 'disk_value', 'disk_id': 'disk_id_value'}, 'async_secondary_disks': {}, 'creation_timestamp': 'creation_timestamp_value', 'description': 'description_value', 'disk_encryption_key': {'kms_key_name': 'kms_key_name_value', 'kms_key_service_account': 'kms_key_service_account_value', 'raw_key': 'raw_key_value', 'rsa_encrypted_key': 'rsa_encrypted_key_value', 'sha256': 'sha256_value'}, 'guest_os_features': [{'type_': 'type__value'}], 'id': 205, 'kind': 'kind_value', 'label_fingerprint': 'label_fingerprint_value', 'labels': {}, 'last_attach_timestamp': 'last_attach_timestamp_value', 'last_detach_timestamp': 'last_detach_timestamp_value', 'license_codes': [1361, 1362], 'licenses': ['licenses_value1', 'licenses_value2'], 'location_hint': 'location_hint_value', 'name': 'name_value', 'options': 'options_value', 'params': {'resource_manager_tags': {}}, 'physical_block_size_bytes': 2663, 'provisioned_iops': 1740, 'provisioned_throughput': 2411, 'region': 'region_value', 'replica_zones': ['replica_zones_value1', 'replica_zones_value2'], 'resource_policies': ['resource_policies_value1', 'resource_policies_value2'], 'resource_status': {'async_primary_disk': {'state': 'state_value'}, 'async_secondary_disks': {}}, 'satisfies_pzs': True, 'self_link': 'self_link_value', 'size_gb': 739, 'source_consistency_group_policy': 'source_consistency_group_policy_value', 'source_consistency_group_policy_id': 'source_consistency_group_policy_id_value', 'source_disk': 'source_disk_value', 'source_disk_id': 'source_disk_id_value', 'source_image': 'source_image_value', 'source_image_encryption_key': {}, 'source_image_id': 'source_image_id_value', 'source_snapshot': 'source_snapshot_value', 'source_snapshot_encryption_key': {}, 'source_snapshot_id': 'source_snapshot_id_value', 'source_storage_object': 'source_storage_object_value', 'status': 'status_value', 'type_': 'type__value', 'users': ['users_value1', 'users_value2'], 'zone': 'zone_value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.update_unary(request) + + +def test_update_unary_rest_flattened(): + client = RegionDisksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'region': 'sample2', 'disk': 'sample3'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + region='region_value', + disk='disk_value', + disk_resource=compute.Disk(architecture='architecture_value'), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.update_unary(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/regions/{region}/disks/{disk}" % client.transport._host, args[1]) + + +def test_update_unary_rest_flattened_error(transport: str = 'rest'): + client = RegionDisksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_unary( + compute.UpdateRegionDiskRequest(), + project='project_value', + region='region_value', + disk='disk_value', + disk_resource=compute.Disk(architecture='architecture_value'), + ) + + +def test_update_unary_rest_error(): + client = RegionDisksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +def test_credentials_transport_error(): + # It is an error to provide credentials and a transport instance. + transport = transports.RegionDisksRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = RegionDisksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # It is an error to provide a credentials file and a transport instance. + transport = transports.RegionDisksRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = RegionDisksClient( + client_options={"credentials_file": "credentials.json"}, + transport=transport, + ) + + # It is an error to provide an api_key and a transport instance. + transport = transports.RegionDisksRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = RegionDisksClient( + client_options=options, + transport=transport, + ) + + # It is an error to provide an api_key and a credential. + options = mock.Mock() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = RegionDisksClient( + client_options=options, + credentials=ga_credentials.AnonymousCredentials() + ) + + # It is an error to provide scopes and a transport instance. + transport = transports.RegionDisksRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = RegionDisksClient( + client_options={"scopes": ["1", "2"]}, + transport=transport, + ) + + +def test_transport_instance(): + # A client may be instantiated with a custom transport instance. + transport = transports.RegionDisksRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + client = RegionDisksClient(transport=transport) + assert client.transport is transport + + +@pytest.mark.parametrize("transport_class", [ + transports.RegionDisksRestTransport, +]) +def test_transport_adc(transport_class): + # Test default credentials are used if not provided. + with mock.patch.object(google.auth, 'default') as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class() + adc.assert_called_once() + +@pytest.mark.parametrize("transport_name", [ + "rest", +]) +def test_transport_kind(transport_name): + transport = RegionDisksClient.get_transport_class(transport_name)( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert transport.kind == transport_name + + +def test_region_disks_base_transport_error(): + # Passing both a credentials object and credentials_file should raise an error + with pytest.raises(core_exceptions.DuplicateCredentialArgs): + transport = transports.RegionDisksTransport( + credentials=ga_credentials.AnonymousCredentials(), + credentials_file="credentials.json" + ) + + +def test_region_disks_base_transport(): + # Instantiate the base transport. + with mock.patch('google.cloud.compute_v1.services.region_disks.transports.RegionDisksTransport.__init__') as Transport: + Transport.return_value = None + transport = transports.RegionDisksTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Every method on the transport should just blindly + # raise NotImplementedError. + methods = ( + 'add_resource_policies', + 'bulk_insert', + 'create_snapshot', + 'delete', + 'get', + 'get_iam_policy', + 'insert', + 'list', + 'remove_resource_policies', + 'resize', + 'set_iam_policy', + 'set_labels', + 'start_async_replication', + 'stop_async_replication', + 'stop_group_async_replication', + 'test_iam_permissions', + 'update', + ) + for method in methods: + with pytest.raises(NotImplementedError): + getattr(transport, method)(request=object()) + + with pytest.raises(NotImplementedError): + transport.close() + + # Catch all for all remaining methods and properties + remainder = [ + 'kind', + ] + for r in remainder: + with pytest.raises(NotImplementedError): + getattr(transport, r)() + + +def test_region_disks_base_transport_with_credentials_file(): + # Instantiate the base transport with a credentials file + with mock.patch.object(google.auth, 'load_credentials_from_file', autospec=True) as load_creds, mock.patch('google.cloud.compute_v1.services.region_disks.transports.RegionDisksTransport._prep_wrapped_messages') as Transport: + Transport.return_value = None + load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.RegionDisksTransport( + credentials_file="credentials.json", + quota_project_id="octopus", + ) + load_creds.assert_called_once_with("credentials.json", + scopes=None, + default_scopes=( + 'https://www.googleapis.com/auth/compute', + 'https://www.googleapis.com/auth/cloud-platform', +), + quota_project_id="octopus", + ) + + +def test_region_disks_base_transport_with_adc(): + # Test the default credentials are used if credentials and credentials_file are None. + with mock.patch.object(google.auth, 'default', autospec=True) as adc, mock.patch('google.cloud.compute_v1.services.region_disks.transports.RegionDisksTransport._prep_wrapped_messages') as Transport: + Transport.return_value = None + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.RegionDisksTransport() + adc.assert_called_once() + + +def test_region_disks_auth_adc(): + # If no credentials are provided, we should use ADC credentials. + with mock.patch.object(google.auth, 'default', autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + RegionDisksClient() + adc.assert_called_once_with( + scopes=None, + default_scopes=( + 'https://www.googleapis.com/auth/compute', + 'https://www.googleapis.com/auth/cloud-platform', +), + quota_project_id=None, + ) + + +def test_region_disks_http_transport_client_cert_source_for_mtls(): + cred = ga_credentials.AnonymousCredentials() + with mock.patch("google.auth.transport.requests.AuthorizedSession.configure_mtls_channel") as mock_configure_mtls_channel: + transports.RegionDisksRestTransport ( + credentials=cred, + client_cert_source_for_mtls=client_cert_source_callback + ) + mock_configure_mtls_channel.assert_called_once_with(client_cert_source_callback) + + +@pytest.mark.parametrize("transport_name", [ + "rest", +]) +def test_region_disks_host_no_port(transport_name): + client = RegionDisksClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions(api_endpoint='compute.googleapis.com'), + transport=transport_name, + ) + assert client.transport._host == ( + 'compute.googleapis.com:443' + if transport_name in ['grpc', 'grpc_asyncio'] + else 'https://compute.googleapis.com' + ) + +@pytest.mark.parametrize("transport_name", [ + "rest", +]) +def test_region_disks_host_with_port(transport_name): + client = RegionDisksClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions(api_endpoint='compute.googleapis.com:8000'), + transport=transport_name, + ) + assert client.transport._host == ( + 'compute.googleapis.com:8000' + if transport_name in ['grpc', 'grpc_asyncio'] + else 'https://compute.googleapis.com:8000' + ) + +@pytest.mark.parametrize("transport_name", [ + "rest", +]) +def test_region_disks_client_transport_session_collision(transport_name): + creds1 = ga_credentials.AnonymousCredentials() + creds2 = ga_credentials.AnonymousCredentials() + client1 = RegionDisksClient( + credentials=creds1, + transport=transport_name, + ) + client2 = RegionDisksClient( + credentials=creds2, + transport=transport_name, + ) + session1 = client1.transport.add_resource_policies._session + session2 = client2.transport.add_resource_policies._session + assert session1 != session2 + session1 = client1.transport.bulk_insert._session + session2 = client2.transport.bulk_insert._session + assert session1 != session2 + session1 = client1.transport.create_snapshot._session + session2 = client2.transport.create_snapshot._session + assert session1 != session2 + session1 = client1.transport.delete._session + session2 = client2.transport.delete._session + assert session1 != session2 + session1 = client1.transport.get._session + session2 = client2.transport.get._session + assert session1 != session2 + session1 = client1.transport.get_iam_policy._session + session2 = client2.transport.get_iam_policy._session + assert session1 != session2 + session1 = client1.transport.insert._session + session2 = client2.transport.insert._session + assert session1 != session2 + session1 = client1.transport.list._session + session2 = client2.transport.list._session + assert session1 != session2 + session1 = client1.transport.remove_resource_policies._session + session2 = client2.transport.remove_resource_policies._session + assert session1 != session2 + session1 = client1.transport.resize._session + session2 = client2.transport.resize._session + assert session1 != session2 + session1 = client1.transport.set_iam_policy._session + session2 = client2.transport.set_iam_policy._session + assert session1 != session2 + session1 = client1.transport.set_labels._session + session2 = client2.transport.set_labels._session + assert session1 != session2 + session1 = client1.transport.start_async_replication._session + session2 = client2.transport.start_async_replication._session + assert session1 != session2 + session1 = client1.transport.stop_async_replication._session + session2 = client2.transport.stop_async_replication._session + assert session1 != session2 + session1 = client1.transport.stop_group_async_replication._session + session2 = client2.transport.stop_group_async_replication._session + assert session1 != session2 + session1 = client1.transport.test_iam_permissions._session + session2 = client2.transport.test_iam_permissions._session + assert session1 != session2 + session1 = client1.transport.update._session + session2 = client2.transport.update._session + assert session1 != session2 + +def test_common_billing_account_path(): + billing_account = "squid" + expected = "billingAccounts/{billing_account}".format(billing_account=billing_account, ) + actual = RegionDisksClient.common_billing_account_path(billing_account) + assert expected == actual + + +def test_parse_common_billing_account_path(): + expected = { + "billing_account": "clam", + } + path = RegionDisksClient.common_billing_account_path(**expected) + + # Check that the path construction is reversible. + actual = RegionDisksClient.parse_common_billing_account_path(path) + assert expected == actual + +def test_common_folder_path(): + folder = "whelk" + expected = "folders/{folder}".format(folder=folder, ) + actual = RegionDisksClient.common_folder_path(folder) + assert expected == actual + + +def test_parse_common_folder_path(): + expected = { + "folder": "octopus", + } + path = RegionDisksClient.common_folder_path(**expected) + + # Check that the path construction is reversible. + actual = RegionDisksClient.parse_common_folder_path(path) + assert expected == actual + +def test_common_organization_path(): + organization = "oyster" + expected = "organizations/{organization}".format(organization=organization, ) + actual = RegionDisksClient.common_organization_path(organization) + assert expected == actual + + +def test_parse_common_organization_path(): + expected = { + "organization": "nudibranch", + } + path = RegionDisksClient.common_organization_path(**expected) + + # Check that the path construction is reversible. + actual = RegionDisksClient.parse_common_organization_path(path) + assert expected == actual + +def test_common_project_path(): + project = "cuttlefish" + expected = "projects/{project}".format(project=project, ) + actual = RegionDisksClient.common_project_path(project) + assert expected == actual + + +def test_parse_common_project_path(): + expected = { + "project": "mussel", + } + path = RegionDisksClient.common_project_path(**expected) + + # Check that the path construction is reversible. + actual = RegionDisksClient.parse_common_project_path(path) + assert expected == actual + +def test_common_location_path(): + project = "winkle" + location = "nautilus" + expected = "projects/{project}/locations/{location}".format(project=project, location=location, ) + actual = RegionDisksClient.common_location_path(project, location) + assert expected == actual + + +def test_parse_common_location_path(): + expected = { + "project": "scallop", + "location": "abalone", + } + path = RegionDisksClient.common_location_path(**expected) + + # Check that the path construction is reversible. + actual = RegionDisksClient.parse_common_location_path(path) + assert expected == actual + + +def test_client_with_default_client_info(): + client_info = gapic_v1.client_info.ClientInfo() + + with mock.patch.object(transports.RegionDisksTransport, '_prep_wrapped_messages') as prep: + client = RegionDisksClient( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + with mock.patch.object(transports.RegionDisksTransport, '_prep_wrapped_messages') as prep: + transport_class = RegionDisksClient.get_transport_class() + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + +def test_transport_close(): + transports = { + "rest": "_session", + } + + for transport, close_name in transports.items(): + client = RegionDisksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport + ) + with mock.patch.object(type(getattr(client.transport, close_name)), "close") as close: + with client: + close.assert_not_called() + close.assert_called_once() + +def test_client_ctx(): + transports = [ + 'rest', + ] + for transport in transports: + client = RegionDisksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport + ) + # Test client calls underlying transport. + with mock.patch.object(type(client.transport), "close") as close: + close.assert_not_called() + with client: + pass + close.assert_called() + +@pytest.mark.parametrize("client_class,transport_class", [ + (RegionDisksClient, transports.RegionDisksRestTransport), +]) +def test_api_key_credentials(client_class, transport_class): + with mock.patch.object( + google.auth._default, "get_api_key_credentials", create=True + ) as get_api_key_credentials: + mock_cred = mock.Mock() + get_api_key_credentials.return_value = mock_cred + options = client_options.ClientOptions() + options.api_key = "api_key" + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=mock_cred, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) diff --git a/owl-bot-staging/v1/tests/unit/gapic/compute_v1/test_region_health_check_services.py b/owl-bot-staging/v1/tests/unit/gapic/compute_v1/test_region_health_check_services.py new file mode 100644 index 000000000..3f94affc8 --- /dev/null +++ b/owl-bot-staging/v1/tests/unit/gapic/compute_v1/test_region_health_check_services.py @@ -0,0 +1,3085 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import os +# try/except added for compatibility with python < 3.8 +try: + from unittest import mock + from unittest.mock import AsyncMock # pragma: NO COVER +except ImportError: # pragma: NO COVER + import mock + +import grpc +from grpc.experimental import aio +from collections.abc import Iterable +from google.protobuf import json_format +import json +import math +import pytest +from proto.marshal.rules.dates import DurationRule, TimestampRule +from proto.marshal.rules import wrappers +from requests import Response +from requests import Request, PreparedRequest +from requests.sessions import Session +from google.protobuf import json_format + +from google.api_core import client_options +from google.api_core import exceptions as core_exceptions +from google.api_core import extended_operation # type: ignore +from google.api_core import future +from google.api_core import gapic_v1 +from google.api_core import grpc_helpers +from google.api_core import grpc_helpers_async +from google.api_core import path_template +from google.auth import credentials as ga_credentials +from google.auth.exceptions import MutualTLSChannelError +from google.cloud.compute_v1.services.region_health_check_services import RegionHealthCheckServicesClient +from google.cloud.compute_v1.services.region_health_check_services import pagers +from google.cloud.compute_v1.services.region_health_check_services import transports +from google.cloud.compute_v1.types import compute +from google.oauth2 import service_account +import google.auth + + +def client_cert_source_callback(): + return b"cert bytes", b"key bytes" + + +# If default endpoint is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint(client): + return "foo.googleapis.com" if ("localhost" in client.DEFAULT_ENDPOINT) else client.DEFAULT_ENDPOINT + + +def test__get_default_mtls_endpoint(): + api_endpoint = "example.googleapis.com" + api_mtls_endpoint = "example.mtls.googleapis.com" + sandbox_endpoint = "example.sandbox.googleapis.com" + sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com" + non_googleapi = "api.example.com" + + assert RegionHealthCheckServicesClient._get_default_mtls_endpoint(None) is None + assert RegionHealthCheckServicesClient._get_default_mtls_endpoint(api_endpoint) == api_mtls_endpoint + assert RegionHealthCheckServicesClient._get_default_mtls_endpoint(api_mtls_endpoint) == api_mtls_endpoint + assert RegionHealthCheckServicesClient._get_default_mtls_endpoint(sandbox_endpoint) == sandbox_mtls_endpoint + assert RegionHealthCheckServicesClient._get_default_mtls_endpoint(sandbox_mtls_endpoint) == sandbox_mtls_endpoint + assert RegionHealthCheckServicesClient._get_default_mtls_endpoint(non_googleapi) == non_googleapi + + +@pytest.mark.parametrize("client_class,transport_name", [ + (RegionHealthCheckServicesClient, "rest"), +]) +def test_region_health_check_services_client_from_service_account_info(client_class, transport_name): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object(service_account.Credentials, 'from_service_account_info') as factory: + factory.return_value = creds + info = {"valid": True} + client = client_class.from_service_account_info(info, transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + 'compute.googleapis.com:443' + if transport_name in ['grpc', 'grpc_asyncio'] + else + 'https://compute.googleapis.com' + ) + + +@pytest.mark.parametrize("transport_class,transport_name", [ + (transports.RegionHealthCheckServicesRestTransport, "rest"), +]) +def test_region_health_check_services_client_service_account_always_use_jwt(transport_class, transport_name): + with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=True) + use_jwt.assert_called_once_with(True) + + with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=False) + use_jwt.assert_not_called() + + +@pytest.mark.parametrize("client_class,transport_name", [ + (RegionHealthCheckServicesClient, "rest"), +]) +def test_region_health_check_services_client_from_service_account_file(client_class, transport_name): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object(service_account.Credentials, 'from_service_account_file') as factory: + factory.return_value = creds + client = client_class.from_service_account_file("dummy/file/path.json", transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + client = client_class.from_service_account_json("dummy/file/path.json", transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + 'compute.googleapis.com:443' + if transport_name in ['grpc', 'grpc_asyncio'] + else + 'https://compute.googleapis.com' + ) + + +def test_region_health_check_services_client_get_transport_class(): + transport = RegionHealthCheckServicesClient.get_transport_class() + available_transports = [ + transports.RegionHealthCheckServicesRestTransport, + ] + assert transport in available_transports + + transport = RegionHealthCheckServicesClient.get_transport_class("rest") + assert transport == transports.RegionHealthCheckServicesRestTransport + + +@pytest.mark.parametrize("client_class,transport_class,transport_name", [ + (RegionHealthCheckServicesClient, transports.RegionHealthCheckServicesRestTransport, "rest"), +]) +@mock.patch.object(RegionHealthCheckServicesClient, "DEFAULT_ENDPOINT", modify_default_endpoint(RegionHealthCheckServicesClient)) +def test_region_health_check_services_client_client_options(client_class, transport_class, transport_name): + # Check that if channel is provided we won't create a new one. + with mock.patch.object(RegionHealthCheckServicesClient, 'get_transport_class') as gtc: + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ) + client = client_class(transport=transport) + gtc.assert_not_called() + + # Check that if channel is provided via str we will create a new one. + with mock.patch.object(RegionHealthCheckServicesClient, 'get_transport_class') as gtc: + client = client_class(transport=transport_name) + gtc.assert_called() + + # Check the case api_endpoint is provided. + options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(transport=transport_name, client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_MTLS_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError): + client = client_class(transport=transport_name) + + # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): + with pytest.raises(ValueError): + client = client_class(transport=transport_name) + + # Check the case quota_project_id is provided + options = client_options.ClientOptions(quota_project_id="octopus") + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id="octopus", + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + # Check the case api_endpoint is provided + options = client_options.ClientOptions(api_audience="https://language.googleapis.com") + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience="https://language.googleapis.com" + ) + +@pytest.mark.parametrize("client_class,transport_class,transport_name,use_client_cert_env", [ + (RegionHealthCheckServicesClient, transports.RegionHealthCheckServicesRestTransport, "rest", "true"), + (RegionHealthCheckServicesClient, transports.RegionHealthCheckServicesRestTransport, "rest", "false"), +]) +@mock.patch.object(RegionHealthCheckServicesClient, "DEFAULT_ENDPOINT", modify_default_endpoint(RegionHealthCheckServicesClient)) +@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) +def test_region_health_check_services_client_mtls_env_auto(client_class, transport_class, transport_name, use_client_cert_env): + # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default + # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists. + + # Check the case client_cert_source is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): + options = client_options.ClientOptions(client_cert_source=client_cert_source_callback) + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + + if use_client_cert_env == "false": + expected_client_cert_source = None + expected_host = client.DEFAULT_ENDPOINT + else: + expected_client_cert_source = client_cert_source_callback + expected_host = client.DEFAULT_MTLS_ENDPOINT + + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case ADC client cert is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): + with mock.patch.object(transport_class, '__init__') as patched: + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): + with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=client_cert_source_callback): + if use_client_cert_env == "false": + expected_host = client.DEFAULT_ENDPOINT + expected_client_cert_source = None + else: + expected_host = client.DEFAULT_MTLS_ENDPOINT + expected_client_cert_source = client_cert_source_callback + + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case client_cert_source and ADC client cert are not provided. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): + with mock.patch.object(transport_class, '__init__') as patched: + with mock.patch("google.auth.transport.mtls.has_default_client_cert_source", return_value=False): + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize("client_class", [ + RegionHealthCheckServicesClient +]) +@mock.patch.object(RegionHealthCheckServicesClient, "DEFAULT_ENDPOINT", modify_default_endpoint(RegionHealthCheckServicesClient)) +def test_region_health_check_services_client_get_mtls_endpoint_and_cert_source(client_class): + mock_client_cert_source = mock.Mock() + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "true". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + mock_api_endpoint = "foo" + options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(options) + assert api_endpoint == mock_api_endpoint + assert cert_source == mock_client_cert_source + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "false". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + mock_client_cert_source = mock.Mock() + mock_api_endpoint = "foo" + options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(options) + assert api_endpoint == mock_api_endpoint + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert doesn't exist. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=False): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert exists. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): + with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=mock_client_cert_source): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source == mock_client_cert_source + + +@pytest.mark.parametrize("client_class,transport_class,transport_name", [ + (RegionHealthCheckServicesClient, transports.RegionHealthCheckServicesRestTransport, "rest"), +]) +def test_region_health_check_services_client_client_options_scopes(client_class, transport_class, transport_name): + # Check the case scopes are provided. + options = client_options.ClientOptions( + scopes=["1", "2"], + ) + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=["1", "2"], + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + +@pytest.mark.parametrize("client_class,transport_class,transport_name,grpc_helpers", [ + (RegionHealthCheckServicesClient, transports.RegionHealthCheckServicesRestTransport, "rest", None), +]) +def test_region_health_check_services_client_client_options_credentials_file(client_class, transport_class, transport_name, grpc_helpers): + # Check the case credentials file is provided. + options = client_options.ClientOptions( + credentials_file="credentials.json" + ) + + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize("request_type", [ + compute.DeleteRegionHealthCheckServiceRequest, + dict, +]) +def test_delete_rest(request_type): + client = RegionHealthCheckServicesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2', 'health_check_service': 'sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.delete(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, extended_operation.ExtendedOperation) + assert response.client_operation_id == 'client_operation_id_value' + assert response.creation_timestamp == 'creation_timestamp_value' + assert response.description == 'description_value' + assert response.end_time == 'end_time_value' + assert response.http_error_message == 'http_error_message_value' + assert response.http_error_status_code == 2374 + assert response.id == 205 + assert response.insert_time == 'insert_time_value' + assert response.kind == 'kind_value' + assert response.name == 'name_value' + assert response.operation_group_id == 'operation_group_id_value' + assert response.operation_type == 'operation_type_value' + assert response.progress == 885 + assert response.region == 'region_value' + assert response.self_link == 'self_link_value' + assert response.start_time == 'start_time_value' + assert response.status == compute.Operation.Status.DONE + assert response.status_message == 'status_message_value' + assert response.target_id == 947 + assert response.target_link == 'target_link_value' + assert response.user == 'user_value' + assert response.zone == 'zone_value' + + +def test_delete_rest_required_fields(request_type=compute.DeleteRegionHealthCheckServiceRequest): + transport_class = transports.RegionHealthCheckServicesRestTransport + + request_init = {} + request_init["health_check_service"] = "" + request_init["project"] = "" + request_init["region"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["healthCheckService"] = 'health_check_service_value' + jsonified_request["project"] = 'project_value' + jsonified_request["region"] = 'region_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "healthCheckService" in jsonified_request + assert jsonified_request["healthCheckService"] == 'health_check_service_value' + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "region" in jsonified_request + assert jsonified_request["region"] == 'region_value' + + client = RegionHealthCheckServicesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "delete", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.delete(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_delete_rest_unset_required_fields(): + transport = transports.RegionHealthCheckServicesRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.delete._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("healthCheckService", "project", "region", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_rest_interceptors(null_interceptor): + transport = transports.RegionHealthCheckServicesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.RegionHealthCheckServicesRestInterceptor(), + ) + client = RegionHealthCheckServicesClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.RegionHealthCheckServicesRestInterceptor, "post_delete") as post, \ + mock.patch.object(transports.RegionHealthCheckServicesRestInterceptor, "pre_delete") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.DeleteRegionHealthCheckServiceRequest.pb(compute.DeleteRegionHealthCheckServiceRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.DeleteRegionHealthCheckServiceRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.delete(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_delete_rest_bad_request(transport: str = 'rest', request_type=compute.DeleteRegionHealthCheckServiceRequest): + client = RegionHealthCheckServicesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2', 'health_check_service': 'sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.delete(request) + + +def test_delete_rest_flattened(): + client = RegionHealthCheckServicesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'region': 'sample2', 'health_check_service': 'sample3'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + region='region_value', + health_check_service='health_check_service_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.delete(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/regions/{region}/healthCheckServices/{health_check_service}" % client.transport._host, args[1]) + + +def test_delete_rest_flattened_error(transport: str = 'rest'): + client = RegionHealthCheckServicesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete( + compute.DeleteRegionHealthCheckServiceRequest(), + project='project_value', + region='region_value', + health_check_service='health_check_service_value', + ) + + +def test_delete_rest_error(): + client = RegionHealthCheckServicesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.DeleteRegionHealthCheckServiceRequest, + dict, +]) +def test_delete_unary_rest(request_type): + client = RegionHealthCheckServicesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2', 'health_check_service': 'sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.delete_unary(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.Operation) + + +def test_delete_unary_rest_required_fields(request_type=compute.DeleteRegionHealthCheckServiceRequest): + transport_class = transports.RegionHealthCheckServicesRestTransport + + request_init = {} + request_init["health_check_service"] = "" + request_init["project"] = "" + request_init["region"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["healthCheckService"] = 'health_check_service_value' + jsonified_request["project"] = 'project_value' + jsonified_request["region"] = 'region_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "healthCheckService" in jsonified_request + assert jsonified_request["healthCheckService"] == 'health_check_service_value' + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "region" in jsonified_request + assert jsonified_request["region"] == 'region_value' + + client = RegionHealthCheckServicesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "delete", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.delete_unary(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_delete_unary_rest_unset_required_fields(): + transport = transports.RegionHealthCheckServicesRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.delete._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("healthCheckService", "project", "region", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_unary_rest_interceptors(null_interceptor): + transport = transports.RegionHealthCheckServicesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.RegionHealthCheckServicesRestInterceptor(), + ) + client = RegionHealthCheckServicesClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.RegionHealthCheckServicesRestInterceptor, "post_delete") as post, \ + mock.patch.object(transports.RegionHealthCheckServicesRestInterceptor, "pre_delete") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.DeleteRegionHealthCheckServiceRequest.pb(compute.DeleteRegionHealthCheckServiceRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.DeleteRegionHealthCheckServiceRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.delete_unary(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_delete_unary_rest_bad_request(transport: str = 'rest', request_type=compute.DeleteRegionHealthCheckServiceRequest): + client = RegionHealthCheckServicesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2', 'health_check_service': 'sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.delete_unary(request) + + +def test_delete_unary_rest_flattened(): + client = RegionHealthCheckServicesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'region': 'sample2', 'health_check_service': 'sample3'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + region='region_value', + health_check_service='health_check_service_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.delete_unary(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/regions/{region}/healthCheckServices/{health_check_service}" % client.transport._host, args[1]) + + +def test_delete_unary_rest_flattened_error(transport: str = 'rest'): + client = RegionHealthCheckServicesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_unary( + compute.DeleteRegionHealthCheckServiceRequest(), + project='project_value', + region='region_value', + health_check_service='health_check_service_value', + ) + + +def test_delete_unary_rest_error(): + client = RegionHealthCheckServicesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.GetRegionHealthCheckServiceRequest, + dict, +]) +def test_get_rest(request_type): + client = RegionHealthCheckServicesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2', 'health_check_service': 'sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.HealthCheckService( + creation_timestamp='creation_timestamp_value', + description='description_value', + fingerprint='fingerprint_value', + health_checks=['health_checks_value'], + health_status_aggregation_policy='health_status_aggregation_policy_value', + id=205, + kind='kind_value', + name='name_value', + network_endpoint_groups=['network_endpoint_groups_value'], + notification_endpoints=['notification_endpoints_value'], + region='region_value', + self_link='self_link_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.HealthCheckService.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.get(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.HealthCheckService) + assert response.creation_timestamp == 'creation_timestamp_value' + assert response.description == 'description_value' + assert response.fingerprint == 'fingerprint_value' + assert response.health_checks == ['health_checks_value'] + assert response.health_status_aggregation_policy == 'health_status_aggregation_policy_value' + assert response.id == 205 + assert response.kind == 'kind_value' + assert response.name == 'name_value' + assert response.network_endpoint_groups == ['network_endpoint_groups_value'] + assert response.notification_endpoints == ['notification_endpoints_value'] + assert response.region == 'region_value' + assert response.self_link == 'self_link_value' + + +def test_get_rest_required_fields(request_type=compute.GetRegionHealthCheckServiceRequest): + transport_class = transports.RegionHealthCheckServicesRestTransport + + request_init = {} + request_init["health_check_service"] = "" + request_init["project"] = "" + request_init["region"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["healthCheckService"] = 'health_check_service_value' + jsonified_request["project"] = 'project_value' + jsonified_request["region"] = 'region_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "healthCheckService" in jsonified_request + assert jsonified_request["healthCheckService"] == 'health_check_service_value' + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "region" in jsonified_request + assert jsonified_request["region"] == 'region_value' + + client = RegionHealthCheckServicesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.HealthCheckService() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "get", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.HealthCheckService.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.get(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_get_rest_unset_required_fields(): + transport = transports.RegionHealthCheckServicesRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.get._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("healthCheckService", "project", "region", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_rest_interceptors(null_interceptor): + transport = transports.RegionHealthCheckServicesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.RegionHealthCheckServicesRestInterceptor(), + ) + client = RegionHealthCheckServicesClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.RegionHealthCheckServicesRestInterceptor, "post_get") as post, \ + mock.patch.object(transports.RegionHealthCheckServicesRestInterceptor, "pre_get") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.GetRegionHealthCheckServiceRequest.pb(compute.GetRegionHealthCheckServiceRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.HealthCheckService.to_json(compute.HealthCheckService()) + + request = compute.GetRegionHealthCheckServiceRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.HealthCheckService() + + client.get(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_get_rest_bad_request(transport: str = 'rest', request_type=compute.GetRegionHealthCheckServiceRequest): + client = RegionHealthCheckServicesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2', 'health_check_service': 'sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get(request) + + +def test_get_rest_flattened(): + client = RegionHealthCheckServicesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.HealthCheckService() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'region': 'sample2', 'health_check_service': 'sample3'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + region='region_value', + health_check_service='health_check_service_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.HealthCheckService.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.get(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/regions/{region}/healthCheckServices/{health_check_service}" % client.transport._host, args[1]) + + +def test_get_rest_flattened_error(transport: str = 'rest'): + client = RegionHealthCheckServicesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get( + compute.GetRegionHealthCheckServiceRequest(), + project='project_value', + region='region_value', + health_check_service='health_check_service_value', + ) + + +def test_get_rest_error(): + client = RegionHealthCheckServicesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.InsertRegionHealthCheckServiceRequest, + dict, +]) +def test_insert_rest(request_type): + client = RegionHealthCheckServicesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2'} + request_init["health_check_service_resource"] = {'creation_timestamp': 'creation_timestamp_value', 'description': 'description_value', 'fingerprint': 'fingerprint_value', 'health_checks': ['health_checks_value1', 'health_checks_value2'], 'health_status_aggregation_policy': 'health_status_aggregation_policy_value', 'id': 205, 'kind': 'kind_value', 'name': 'name_value', 'network_endpoint_groups': ['network_endpoint_groups_value1', 'network_endpoint_groups_value2'], 'notification_endpoints': ['notification_endpoints_value1', 'notification_endpoints_value2'], 'region': 'region_value', 'self_link': 'self_link_value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.insert(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, extended_operation.ExtendedOperation) + assert response.client_operation_id == 'client_operation_id_value' + assert response.creation_timestamp == 'creation_timestamp_value' + assert response.description == 'description_value' + assert response.end_time == 'end_time_value' + assert response.http_error_message == 'http_error_message_value' + assert response.http_error_status_code == 2374 + assert response.id == 205 + assert response.insert_time == 'insert_time_value' + assert response.kind == 'kind_value' + assert response.name == 'name_value' + assert response.operation_group_id == 'operation_group_id_value' + assert response.operation_type == 'operation_type_value' + assert response.progress == 885 + assert response.region == 'region_value' + assert response.self_link == 'self_link_value' + assert response.start_time == 'start_time_value' + assert response.status == compute.Operation.Status.DONE + assert response.status_message == 'status_message_value' + assert response.target_id == 947 + assert response.target_link == 'target_link_value' + assert response.user == 'user_value' + assert response.zone == 'zone_value' + + +def test_insert_rest_required_fields(request_type=compute.InsertRegionHealthCheckServiceRequest): + transport_class = transports.RegionHealthCheckServicesRestTransport + + request_init = {} + request_init["project"] = "" + request_init["region"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).insert._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + jsonified_request["region"] = 'region_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).insert._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "region" in jsonified_request + assert jsonified_request["region"] == 'region_value' + + client = RegionHealthCheckServicesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.insert(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_insert_rest_unset_required_fields(): + transport = transports.RegionHealthCheckServicesRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.insert._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("healthCheckServiceResource", "project", "region", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_insert_rest_interceptors(null_interceptor): + transport = transports.RegionHealthCheckServicesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.RegionHealthCheckServicesRestInterceptor(), + ) + client = RegionHealthCheckServicesClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.RegionHealthCheckServicesRestInterceptor, "post_insert") as post, \ + mock.patch.object(transports.RegionHealthCheckServicesRestInterceptor, "pre_insert") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.InsertRegionHealthCheckServiceRequest.pb(compute.InsertRegionHealthCheckServiceRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.InsertRegionHealthCheckServiceRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.insert(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_insert_rest_bad_request(transport: str = 'rest', request_type=compute.InsertRegionHealthCheckServiceRequest): + client = RegionHealthCheckServicesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2'} + request_init["health_check_service_resource"] = {'creation_timestamp': 'creation_timestamp_value', 'description': 'description_value', 'fingerprint': 'fingerprint_value', 'health_checks': ['health_checks_value1', 'health_checks_value2'], 'health_status_aggregation_policy': 'health_status_aggregation_policy_value', 'id': 205, 'kind': 'kind_value', 'name': 'name_value', 'network_endpoint_groups': ['network_endpoint_groups_value1', 'network_endpoint_groups_value2'], 'notification_endpoints': ['notification_endpoints_value1', 'notification_endpoints_value2'], 'region': 'region_value', 'self_link': 'self_link_value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.insert(request) + + +def test_insert_rest_flattened(): + client = RegionHealthCheckServicesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'region': 'sample2'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + region='region_value', + health_check_service_resource=compute.HealthCheckService(creation_timestamp='creation_timestamp_value'), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.insert(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/regions/{region}/healthCheckServices" % client.transport._host, args[1]) + + +def test_insert_rest_flattened_error(transport: str = 'rest'): + client = RegionHealthCheckServicesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.insert( + compute.InsertRegionHealthCheckServiceRequest(), + project='project_value', + region='region_value', + health_check_service_resource=compute.HealthCheckService(creation_timestamp='creation_timestamp_value'), + ) + + +def test_insert_rest_error(): + client = RegionHealthCheckServicesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.InsertRegionHealthCheckServiceRequest, + dict, +]) +def test_insert_unary_rest(request_type): + client = RegionHealthCheckServicesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2'} + request_init["health_check_service_resource"] = {'creation_timestamp': 'creation_timestamp_value', 'description': 'description_value', 'fingerprint': 'fingerprint_value', 'health_checks': ['health_checks_value1', 'health_checks_value2'], 'health_status_aggregation_policy': 'health_status_aggregation_policy_value', 'id': 205, 'kind': 'kind_value', 'name': 'name_value', 'network_endpoint_groups': ['network_endpoint_groups_value1', 'network_endpoint_groups_value2'], 'notification_endpoints': ['notification_endpoints_value1', 'notification_endpoints_value2'], 'region': 'region_value', 'self_link': 'self_link_value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.insert_unary(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.Operation) + + +def test_insert_unary_rest_required_fields(request_type=compute.InsertRegionHealthCheckServiceRequest): + transport_class = transports.RegionHealthCheckServicesRestTransport + + request_init = {} + request_init["project"] = "" + request_init["region"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).insert._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + jsonified_request["region"] = 'region_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).insert._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "region" in jsonified_request + assert jsonified_request["region"] == 'region_value' + + client = RegionHealthCheckServicesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.insert_unary(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_insert_unary_rest_unset_required_fields(): + transport = transports.RegionHealthCheckServicesRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.insert._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("healthCheckServiceResource", "project", "region", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_insert_unary_rest_interceptors(null_interceptor): + transport = transports.RegionHealthCheckServicesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.RegionHealthCheckServicesRestInterceptor(), + ) + client = RegionHealthCheckServicesClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.RegionHealthCheckServicesRestInterceptor, "post_insert") as post, \ + mock.patch.object(transports.RegionHealthCheckServicesRestInterceptor, "pre_insert") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.InsertRegionHealthCheckServiceRequest.pb(compute.InsertRegionHealthCheckServiceRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.InsertRegionHealthCheckServiceRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.insert_unary(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_insert_unary_rest_bad_request(transport: str = 'rest', request_type=compute.InsertRegionHealthCheckServiceRequest): + client = RegionHealthCheckServicesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2'} + request_init["health_check_service_resource"] = {'creation_timestamp': 'creation_timestamp_value', 'description': 'description_value', 'fingerprint': 'fingerprint_value', 'health_checks': ['health_checks_value1', 'health_checks_value2'], 'health_status_aggregation_policy': 'health_status_aggregation_policy_value', 'id': 205, 'kind': 'kind_value', 'name': 'name_value', 'network_endpoint_groups': ['network_endpoint_groups_value1', 'network_endpoint_groups_value2'], 'notification_endpoints': ['notification_endpoints_value1', 'notification_endpoints_value2'], 'region': 'region_value', 'self_link': 'self_link_value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.insert_unary(request) + + +def test_insert_unary_rest_flattened(): + client = RegionHealthCheckServicesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'region': 'sample2'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + region='region_value', + health_check_service_resource=compute.HealthCheckService(creation_timestamp='creation_timestamp_value'), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.insert_unary(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/regions/{region}/healthCheckServices" % client.transport._host, args[1]) + + +def test_insert_unary_rest_flattened_error(transport: str = 'rest'): + client = RegionHealthCheckServicesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.insert_unary( + compute.InsertRegionHealthCheckServiceRequest(), + project='project_value', + region='region_value', + health_check_service_resource=compute.HealthCheckService(creation_timestamp='creation_timestamp_value'), + ) + + +def test_insert_unary_rest_error(): + client = RegionHealthCheckServicesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.ListRegionHealthCheckServicesRequest, + dict, +]) +def test_list_rest(request_type): + client = RegionHealthCheckServicesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.HealthCheckServicesList( + id='id_value', + kind='kind_value', + next_page_token='next_page_token_value', + self_link='self_link_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.HealthCheckServicesList.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.list(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListPager) + assert response.id == 'id_value' + assert response.kind == 'kind_value' + assert response.next_page_token == 'next_page_token_value' + assert response.self_link == 'self_link_value' + + +def test_list_rest_required_fields(request_type=compute.ListRegionHealthCheckServicesRequest): + transport_class = transports.RegionHealthCheckServicesRestTransport + + request_init = {} + request_init["project"] = "" + request_init["region"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + jsonified_request["region"] = 'region_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("filter", "max_results", "order_by", "page_token", "return_partial_success", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "region" in jsonified_request + assert jsonified_request["region"] == 'region_value' + + client = RegionHealthCheckServicesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.HealthCheckServicesList() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "get", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.HealthCheckServicesList.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.list(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_list_rest_unset_required_fields(): + transport = transports.RegionHealthCheckServicesRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.list._get_unset_required_fields({}) + assert set(unset_fields) == (set(("filter", "maxResults", "orderBy", "pageToken", "returnPartialSuccess", )) & set(("project", "region", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_rest_interceptors(null_interceptor): + transport = transports.RegionHealthCheckServicesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.RegionHealthCheckServicesRestInterceptor(), + ) + client = RegionHealthCheckServicesClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.RegionHealthCheckServicesRestInterceptor, "post_list") as post, \ + mock.patch.object(transports.RegionHealthCheckServicesRestInterceptor, "pre_list") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.ListRegionHealthCheckServicesRequest.pb(compute.ListRegionHealthCheckServicesRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.HealthCheckServicesList.to_json(compute.HealthCheckServicesList()) + + request = compute.ListRegionHealthCheckServicesRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.HealthCheckServicesList() + + client.list(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_list_rest_bad_request(transport: str = 'rest', request_type=compute.ListRegionHealthCheckServicesRequest): + client = RegionHealthCheckServicesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list(request) + + +def test_list_rest_flattened(): + client = RegionHealthCheckServicesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.HealthCheckServicesList() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'region': 'sample2'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + region='region_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.HealthCheckServicesList.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.list(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/regions/{region}/healthCheckServices" % client.transport._host, args[1]) + + +def test_list_rest_flattened_error(transport: str = 'rest'): + client = RegionHealthCheckServicesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list( + compute.ListRegionHealthCheckServicesRequest(), + project='project_value', + region='region_value', + ) + + +def test_list_rest_pager(transport: str = 'rest'): + client = RegionHealthCheckServicesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + #with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + compute.HealthCheckServicesList( + items=[ + compute.HealthCheckService(), + compute.HealthCheckService(), + compute.HealthCheckService(), + ], + next_page_token='abc', + ), + compute.HealthCheckServicesList( + items=[], + next_page_token='def', + ), + compute.HealthCheckServicesList( + items=[ + compute.HealthCheckService(), + ], + next_page_token='ghi', + ), + compute.HealthCheckServicesList( + items=[ + compute.HealthCheckService(), + compute.HealthCheckService(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple(compute.HealthCheckServicesList.to_json(x) for x in response) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode('UTF-8') + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {'project': 'sample1', 'region': 'sample2'} + + pager = client.list(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, compute.HealthCheckService) + for i in results) + + pages = list(client.list(request=sample_request).pages) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize("request_type", [ + compute.PatchRegionHealthCheckServiceRequest, + dict, +]) +def test_patch_rest(request_type): + client = RegionHealthCheckServicesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2', 'health_check_service': 'sample3'} + request_init["health_check_service_resource"] = {'creation_timestamp': 'creation_timestamp_value', 'description': 'description_value', 'fingerprint': 'fingerprint_value', 'health_checks': ['health_checks_value1', 'health_checks_value2'], 'health_status_aggregation_policy': 'health_status_aggregation_policy_value', 'id': 205, 'kind': 'kind_value', 'name': 'name_value', 'network_endpoint_groups': ['network_endpoint_groups_value1', 'network_endpoint_groups_value2'], 'notification_endpoints': ['notification_endpoints_value1', 'notification_endpoints_value2'], 'region': 'region_value', 'self_link': 'self_link_value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.patch(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, extended_operation.ExtendedOperation) + assert response.client_operation_id == 'client_operation_id_value' + assert response.creation_timestamp == 'creation_timestamp_value' + assert response.description == 'description_value' + assert response.end_time == 'end_time_value' + assert response.http_error_message == 'http_error_message_value' + assert response.http_error_status_code == 2374 + assert response.id == 205 + assert response.insert_time == 'insert_time_value' + assert response.kind == 'kind_value' + assert response.name == 'name_value' + assert response.operation_group_id == 'operation_group_id_value' + assert response.operation_type == 'operation_type_value' + assert response.progress == 885 + assert response.region == 'region_value' + assert response.self_link == 'self_link_value' + assert response.start_time == 'start_time_value' + assert response.status == compute.Operation.Status.DONE + assert response.status_message == 'status_message_value' + assert response.target_id == 947 + assert response.target_link == 'target_link_value' + assert response.user == 'user_value' + assert response.zone == 'zone_value' + + +def test_patch_rest_required_fields(request_type=compute.PatchRegionHealthCheckServiceRequest): + transport_class = transports.RegionHealthCheckServicesRestTransport + + request_init = {} + request_init["health_check_service"] = "" + request_init["project"] = "" + request_init["region"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).patch._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["healthCheckService"] = 'health_check_service_value' + jsonified_request["project"] = 'project_value' + jsonified_request["region"] = 'region_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).patch._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "healthCheckService" in jsonified_request + assert jsonified_request["healthCheckService"] == 'health_check_service_value' + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "region" in jsonified_request + assert jsonified_request["region"] == 'region_value' + + client = RegionHealthCheckServicesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "patch", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.patch(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_patch_rest_unset_required_fields(): + transport = transports.RegionHealthCheckServicesRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.patch._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("healthCheckService", "healthCheckServiceResource", "project", "region", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_patch_rest_interceptors(null_interceptor): + transport = transports.RegionHealthCheckServicesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.RegionHealthCheckServicesRestInterceptor(), + ) + client = RegionHealthCheckServicesClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.RegionHealthCheckServicesRestInterceptor, "post_patch") as post, \ + mock.patch.object(transports.RegionHealthCheckServicesRestInterceptor, "pre_patch") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.PatchRegionHealthCheckServiceRequest.pb(compute.PatchRegionHealthCheckServiceRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.PatchRegionHealthCheckServiceRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.patch(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_patch_rest_bad_request(transport: str = 'rest', request_type=compute.PatchRegionHealthCheckServiceRequest): + client = RegionHealthCheckServicesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2', 'health_check_service': 'sample3'} + request_init["health_check_service_resource"] = {'creation_timestamp': 'creation_timestamp_value', 'description': 'description_value', 'fingerprint': 'fingerprint_value', 'health_checks': ['health_checks_value1', 'health_checks_value2'], 'health_status_aggregation_policy': 'health_status_aggregation_policy_value', 'id': 205, 'kind': 'kind_value', 'name': 'name_value', 'network_endpoint_groups': ['network_endpoint_groups_value1', 'network_endpoint_groups_value2'], 'notification_endpoints': ['notification_endpoints_value1', 'notification_endpoints_value2'], 'region': 'region_value', 'self_link': 'self_link_value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.patch(request) + + +def test_patch_rest_flattened(): + client = RegionHealthCheckServicesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'region': 'sample2', 'health_check_service': 'sample3'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + region='region_value', + health_check_service='health_check_service_value', + health_check_service_resource=compute.HealthCheckService(creation_timestamp='creation_timestamp_value'), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.patch(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/regions/{region}/healthCheckServices/{health_check_service}" % client.transport._host, args[1]) + + +def test_patch_rest_flattened_error(transport: str = 'rest'): + client = RegionHealthCheckServicesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.patch( + compute.PatchRegionHealthCheckServiceRequest(), + project='project_value', + region='region_value', + health_check_service='health_check_service_value', + health_check_service_resource=compute.HealthCheckService(creation_timestamp='creation_timestamp_value'), + ) + + +def test_patch_rest_error(): + client = RegionHealthCheckServicesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.PatchRegionHealthCheckServiceRequest, + dict, +]) +def test_patch_unary_rest(request_type): + client = RegionHealthCheckServicesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2', 'health_check_service': 'sample3'} + request_init["health_check_service_resource"] = {'creation_timestamp': 'creation_timestamp_value', 'description': 'description_value', 'fingerprint': 'fingerprint_value', 'health_checks': ['health_checks_value1', 'health_checks_value2'], 'health_status_aggregation_policy': 'health_status_aggregation_policy_value', 'id': 205, 'kind': 'kind_value', 'name': 'name_value', 'network_endpoint_groups': ['network_endpoint_groups_value1', 'network_endpoint_groups_value2'], 'notification_endpoints': ['notification_endpoints_value1', 'notification_endpoints_value2'], 'region': 'region_value', 'self_link': 'self_link_value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.patch_unary(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.Operation) + + +def test_patch_unary_rest_required_fields(request_type=compute.PatchRegionHealthCheckServiceRequest): + transport_class = transports.RegionHealthCheckServicesRestTransport + + request_init = {} + request_init["health_check_service"] = "" + request_init["project"] = "" + request_init["region"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).patch._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["healthCheckService"] = 'health_check_service_value' + jsonified_request["project"] = 'project_value' + jsonified_request["region"] = 'region_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).patch._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "healthCheckService" in jsonified_request + assert jsonified_request["healthCheckService"] == 'health_check_service_value' + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "region" in jsonified_request + assert jsonified_request["region"] == 'region_value' + + client = RegionHealthCheckServicesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "patch", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.patch_unary(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_patch_unary_rest_unset_required_fields(): + transport = transports.RegionHealthCheckServicesRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.patch._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("healthCheckService", "healthCheckServiceResource", "project", "region", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_patch_unary_rest_interceptors(null_interceptor): + transport = transports.RegionHealthCheckServicesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.RegionHealthCheckServicesRestInterceptor(), + ) + client = RegionHealthCheckServicesClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.RegionHealthCheckServicesRestInterceptor, "post_patch") as post, \ + mock.patch.object(transports.RegionHealthCheckServicesRestInterceptor, "pre_patch") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.PatchRegionHealthCheckServiceRequest.pb(compute.PatchRegionHealthCheckServiceRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.PatchRegionHealthCheckServiceRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.patch_unary(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_patch_unary_rest_bad_request(transport: str = 'rest', request_type=compute.PatchRegionHealthCheckServiceRequest): + client = RegionHealthCheckServicesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2', 'health_check_service': 'sample3'} + request_init["health_check_service_resource"] = {'creation_timestamp': 'creation_timestamp_value', 'description': 'description_value', 'fingerprint': 'fingerprint_value', 'health_checks': ['health_checks_value1', 'health_checks_value2'], 'health_status_aggregation_policy': 'health_status_aggregation_policy_value', 'id': 205, 'kind': 'kind_value', 'name': 'name_value', 'network_endpoint_groups': ['network_endpoint_groups_value1', 'network_endpoint_groups_value2'], 'notification_endpoints': ['notification_endpoints_value1', 'notification_endpoints_value2'], 'region': 'region_value', 'self_link': 'self_link_value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.patch_unary(request) + + +def test_patch_unary_rest_flattened(): + client = RegionHealthCheckServicesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'region': 'sample2', 'health_check_service': 'sample3'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + region='region_value', + health_check_service='health_check_service_value', + health_check_service_resource=compute.HealthCheckService(creation_timestamp='creation_timestamp_value'), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.patch_unary(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/regions/{region}/healthCheckServices/{health_check_service}" % client.transport._host, args[1]) + + +def test_patch_unary_rest_flattened_error(transport: str = 'rest'): + client = RegionHealthCheckServicesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.patch_unary( + compute.PatchRegionHealthCheckServiceRequest(), + project='project_value', + region='region_value', + health_check_service='health_check_service_value', + health_check_service_resource=compute.HealthCheckService(creation_timestamp='creation_timestamp_value'), + ) + + +def test_patch_unary_rest_error(): + client = RegionHealthCheckServicesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +def test_credentials_transport_error(): + # It is an error to provide credentials and a transport instance. + transport = transports.RegionHealthCheckServicesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = RegionHealthCheckServicesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # It is an error to provide a credentials file and a transport instance. + transport = transports.RegionHealthCheckServicesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = RegionHealthCheckServicesClient( + client_options={"credentials_file": "credentials.json"}, + transport=transport, + ) + + # It is an error to provide an api_key and a transport instance. + transport = transports.RegionHealthCheckServicesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = RegionHealthCheckServicesClient( + client_options=options, + transport=transport, + ) + + # It is an error to provide an api_key and a credential. + options = mock.Mock() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = RegionHealthCheckServicesClient( + client_options=options, + credentials=ga_credentials.AnonymousCredentials() + ) + + # It is an error to provide scopes and a transport instance. + transport = transports.RegionHealthCheckServicesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = RegionHealthCheckServicesClient( + client_options={"scopes": ["1", "2"]}, + transport=transport, + ) + + +def test_transport_instance(): + # A client may be instantiated with a custom transport instance. + transport = transports.RegionHealthCheckServicesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + client = RegionHealthCheckServicesClient(transport=transport) + assert client.transport is transport + + +@pytest.mark.parametrize("transport_class", [ + transports.RegionHealthCheckServicesRestTransport, +]) +def test_transport_adc(transport_class): + # Test default credentials are used if not provided. + with mock.patch.object(google.auth, 'default') as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class() + adc.assert_called_once() + +@pytest.mark.parametrize("transport_name", [ + "rest", +]) +def test_transport_kind(transport_name): + transport = RegionHealthCheckServicesClient.get_transport_class(transport_name)( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert transport.kind == transport_name + + +def test_region_health_check_services_base_transport_error(): + # Passing both a credentials object and credentials_file should raise an error + with pytest.raises(core_exceptions.DuplicateCredentialArgs): + transport = transports.RegionHealthCheckServicesTransport( + credentials=ga_credentials.AnonymousCredentials(), + credentials_file="credentials.json" + ) + + +def test_region_health_check_services_base_transport(): + # Instantiate the base transport. + with mock.patch('google.cloud.compute_v1.services.region_health_check_services.transports.RegionHealthCheckServicesTransport.__init__') as Transport: + Transport.return_value = None + transport = transports.RegionHealthCheckServicesTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Every method on the transport should just blindly + # raise NotImplementedError. + methods = ( + 'delete', + 'get', + 'insert', + 'list', + 'patch', + ) + for method in methods: + with pytest.raises(NotImplementedError): + getattr(transport, method)(request=object()) + + with pytest.raises(NotImplementedError): + transport.close() + + # Catch all for all remaining methods and properties + remainder = [ + 'kind', + ] + for r in remainder: + with pytest.raises(NotImplementedError): + getattr(transport, r)() + + +def test_region_health_check_services_base_transport_with_credentials_file(): + # Instantiate the base transport with a credentials file + with mock.patch.object(google.auth, 'load_credentials_from_file', autospec=True) as load_creds, mock.patch('google.cloud.compute_v1.services.region_health_check_services.transports.RegionHealthCheckServicesTransport._prep_wrapped_messages') as Transport: + Transport.return_value = None + load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.RegionHealthCheckServicesTransport( + credentials_file="credentials.json", + quota_project_id="octopus", + ) + load_creds.assert_called_once_with("credentials.json", + scopes=None, + default_scopes=( + 'https://www.googleapis.com/auth/compute', + 'https://www.googleapis.com/auth/cloud-platform', +), + quota_project_id="octopus", + ) + + +def test_region_health_check_services_base_transport_with_adc(): + # Test the default credentials are used if credentials and credentials_file are None. + with mock.patch.object(google.auth, 'default', autospec=True) as adc, mock.patch('google.cloud.compute_v1.services.region_health_check_services.transports.RegionHealthCheckServicesTransport._prep_wrapped_messages') as Transport: + Transport.return_value = None + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.RegionHealthCheckServicesTransport() + adc.assert_called_once() + + +def test_region_health_check_services_auth_adc(): + # If no credentials are provided, we should use ADC credentials. + with mock.patch.object(google.auth, 'default', autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + RegionHealthCheckServicesClient() + adc.assert_called_once_with( + scopes=None, + default_scopes=( + 'https://www.googleapis.com/auth/compute', + 'https://www.googleapis.com/auth/cloud-platform', +), + quota_project_id=None, + ) + + +def test_region_health_check_services_http_transport_client_cert_source_for_mtls(): + cred = ga_credentials.AnonymousCredentials() + with mock.patch("google.auth.transport.requests.AuthorizedSession.configure_mtls_channel") as mock_configure_mtls_channel: + transports.RegionHealthCheckServicesRestTransport ( + credentials=cred, + client_cert_source_for_mtls=client_cert_source_callback + ) + mock_configure_mtls_channel.assert_called_once_with(client_cert_source_callback) + + +@pytest.mark.parametrize("transport_name", [ + "rest", +]) +def test_region_health_check_services_host_no_port(transport_name): + client = RegionHealthCheckServicesClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions(api_endpoint='compute.googleapis.com'), + transport=transport_name, + ) + assert client.transport._host == ( + 'compute.googleapis.com:443' + if transport_name in ['grpc', 'grpc_asyncio'] + else 'https://compute.googleapis.com' + ) + +@pytest.mark.parametrize("transport_name", [ + "rest", +]) +def test_region_health_check_services_host_with_port(transport_name): + client = RegionHealthCheckServicesClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions(api_endpoint='compute.googleapis.com:8000'), + transport=transport_name, + ) + assert client.transport._host == ( + 'compute.googleapis.com:8000' + if transport_name in ['grpc', 'grpc_asyncio'] + else 'https://compute.googleapis.com:8000' + ) + +@pytest.mark.parametrize("transport_name", [ + "rest", +]) +def test_region_health_check_services_client_transport_session_collision(transport_name): + creds1 = ga_credentials.AnonymousCredentials() + creds2 = ga_credentials.AnonymousCredentials() + client1 = RegionHealthCheckServicesClient( + credentials=creds1, + transport=transport_name, + ) + client2 = RegionHealthCheckServicesClient( + credentials=creds2, + transport=transport_name, + ) + session1 = client1.transport.delete._session + session2 = client2.transport.delete._session + assert session1 != session2 + session1 = client1.transport.get._session + session2 = client2.transport.get._session + assert session1 != session2 + session1 = client1.transport.insert._session + session2 = client2.transport.insert._session + assert session1 != session2 + session1 = client1.transport.list._session + session2 = client2.transport.list._session + assert session1 != session2 + session1 = client1.transport.patch._session + session2 = client2.transport.patch._session + assert session1 != session2 + +def test_common_billing_account_path(): + billing_account = "squid" + expected = "billingAccounts/{billing_account}".format(billing_account=billing_account, ) + actual = RegionHealthCheckServicesClient.common_billing_account_path(billing_account) + assert expected == actual + + +def test_parse_common_billing_account_path(): + expected = { + "billing_account": "clam", + } + path = RegionHealthCheckServicesClient.common_billing_account_path(**expected) + + # Check that the path construction is reversible. + actual = RegionHealthCheckServicesClient.parse_common_billing_account_path(path) + assert expected == actual + +def test_common_folder_path(): + folder = "whelk" + expected = "folders/{folder}".format(folder=folder, ) + actual = RegionHealthCheckServicesClient.common_folder_path(folder) + assert expected == actual + + +def test_parse_common_folder_path(): + expected = { + "folder": "octopus", + } + path = RegionHealthCheckServicesClient.common_folder_path(**expected) + + # Check that the path construction is reversible. + actual = RegionHealthCheckServicesClient.parse_common_folder_path(path) + assert expected == actual + +def test_common_organization_path(): + organization = "oyster" + expected = "organizations/{organization}".format(organization=organization, ) + actual = RegionHealthCheckServicesClient.common_organization_path(organization) + assert expected == actual + + +def test_parse_common_organization_path(): + expected = { + "organization": "nudibranch", + } + path = RegionHealthCheckServicesClient.common_organization_path(**expected) + + # Check that the path construction is reversible. + actual = RegionHealthCheckServicesClient.parse_common_organization_path(path) + assert expected == actual + +def test_common_project_path(): + project = "cuttlefish" + expected = "projects/{project}".format(project=project, ) + actual = RegionHealthCheckServicesClient.common_project_path(project) + assert expected == actual + + +def test_parse_common_project_path(): + expected = { + "project": "mussel", + } + path = RegionHealthCheckServicesClient.common_project_path(**expected) + + # Check that the path construction is reversible. + actual = RegionHealthCheckServicesClient.parse_common_project_path(path) + assert expected == actual + +def test_common_location_path(): + project = "winkle" + location = "nautilus" + expected = "projects/{project}/locations/{location}".format(project=project, location=location, ) + actual = RegionHealthCheckServicesClient.common_location_path(project, location) + assert expected == actual + + +def test_parse_common_location_path(): + expected = { + "project": "scallop", + "location": "abalone", + } + path = RegionHealthCheckServicesClient.common_location_path(**expected) + + # Check that the path construction is reversible. + actual = RegionHealthCheckServicesClient.parse_common_location_path(path) + assert expected == actual + + +def test_client_with_default_client_info(): + client_info = gapic_v1.client_info.ClientInfo() + + with mock.patch.object(transports.RegionHealthCheckServicesTransport, '_prep_wrapped_messages') as prep: + client = RegionHealthCheckServicesClient( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + with mock.patch.object(transports.RegionHealthCheckServicesTransport, '_prep_wrapped_messages') as prep: + transport_class = RegionHealthCheckServicesClient.get_transport_class() + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + +def test_transport_close(): + transports = { + "rest": "_session", + } + + for transport, close_name in transports.items(): + client = RegionHealthCheckServicesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport + ) + with mock.patch.object(type(getattr(client.transport, close_name)), "close") as close: + with client: + close.assert_not_called() + close.assert_called_once() + +def test_client_ctx(): + transports = [ + 'rest', + ] + for transport in transports: + client = RegionHealthCheckServicesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport + ) + # Test client calls underlying transport. + with mock.patch.object(type(client.transport), "close") as close: + close.assert_not_called() + with client: + pass + close.assert_called() + +@pytest.mark.parametrize("client_class,transport_class", [ + (RegionHealthCheckServicesClient, transports.RegionHealthCheckServicesRestTransport), +]) +def test_api_key_credentials(client_class, transport_class): + with mock.patch.object( + google.auth._default, "get_api_key_credentials", create=True + ) as get_api_key_credentials: + mock_cred = mock.Mock() + get_api_key_credentials.return_value = mock_cred + options = client_options.ClientOptions() + options.api_key = "api_key" + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=mock_cred, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) diff --git a/owl-bot-staging/v1/tests/unit/gapic/compute_v1/test_region_health_checks.py b/owl-bot-staging/v1/tests/unit/gapic/compute_v1/test_region_health_checks.py new file mode 100644 index 000000000..3f94657cd --- /dev/null +++ b/owl-bot-staging/v1/tests/unit/gapic/compute_v1/test_region_health_checks.py @@ -0,0 +1,3653 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import os +# try/except added for compatibility with python < 3.8 +try: + from unittest import mock + from unittest.mock import AsyncMock # pragma: NO COVER +except ImportError: # pragma: NO COVER + import mock + +import grpc +from grpc.experimental import aio +from collections.abc import Iterable +from google.protobuf import json_format +import json +import math +import pytest +from proto.marshal.rules.dates import DurationRule, TimestampRule +from proto.marshal.rules import wrappers +from requests import Response +from requests import Request, PreparedRequest +from requests.sessions import Session +from google.protobuf import json_format + +from google.api_core import client_options +from google.api_core import exceptions as core_exceptions +from google.api_core import extended_operation # type: ignore +from google.api_core import future +from google.api_core import gapic_v1 +from google.api_core import grpc_helpers +from google.api_core import grpc_helpers_async +from google.api_core import path_template +from google.auth import credentials as ga_credentials +from google.auth.exceptions import MutualTLSChannelError +from google.cloud.compute_v1.services.region_health_checks import RegionHealthChecksClient +from google.cloud.compute_v1.services.region_health_checks import pagers +from google.cloud.compute_v1.services.region_health_checks import transports +from google.cloud.compute_v1.types import compute +from google.oauth2 import service_account +import google.auth + + +def client_cert_source_callback(): + return b"cert bytes", b"key bytes" + + +# If default endpoint is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint(client): + return "foo.googleapis.com" if ("localhost" in client.DEFAULT_ENDPOINT) else client.DEFAULT_ENDPOINT + + +def test__get_default_mtls_endpoint(): + api_endpoint = "example.googleapis.com" + api_mtls_endpoint = "example.mtls.googleapis.com" + sandbox_endpoint = "example.sandbox.googleapis.com" + sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com" + non_googleapi = "api.example.com" + + assert RegionHealthChecksClient._get_default_mtls_endpoint(None) is None + assert RegionHealthChecksClient._get_default_mtls_endpoint(api_endpoint) == api_mtls_endpoint + assert RegionHealthChecksClient._get_default_mtls_endpoint(api_mtls_endpoint) == api_mtls_endpoint + assert RegionHealthChecksClient._get_default_mtls_endpoint(sandbox_endpoint) == sandbox_mtls_endpoint + assert RegionHealthChecksClient._get_default_mtls_endpoint(sandbox_mtls_endpoint) == sandbox_mtls_endpoint + assert RegionHealthChecksClient._get_default_mtls_endpoint(non_googleapi) == non_googleapi + + +@pytest.mark.parametrize("client_class,transport_name", [ + (RegionHealthChecksClient, "rest"), +]) +def test_region_health_checks_client_from_service_account_info(client_class, transport_name): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object(service_account.Credentials, 'from_service_account_info') as factory: + factory.return_value = creds + info = {"valid": True} + client = client_class.from_service_account_info(info, transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + 'compute.googleapis.com:443' + if transport_name in ['grpc', 'grpc_asyncio'] + else + 'https://compute.googleapis.com' + ) + + +@pytest.mark.parametrize("transport_class,transport_name", [ + (transports.RegionHealthChecksRestTransport, "rest"), +]) +def test_region_health_checks_client_service_account_always_use_jwt(transport_class, transport_name): + with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=True) + use_jwt.assert_called_once_with(True) + + with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=False) + use_jwt.assert_not_called() + + +@pytest.mark.parametrize("client_class,transport_name", [ + (RegionHealthChecksClient, "rest"), +]) +def test_region_health_checks_client_from_service_account_file(client_class, transport_name): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object(service_account.Credentials, 'from_service_account_file') as factory: + factory.return_value = creds + client = client_class.from_service_account_file("dummy/file/path.json", transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + client = client_class.from_service_account_json("dummy/file/path.json", transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + 'compute.googleapis.com:443' + if transport_name in ['grpc', 'grpc_asyncio'] + else + 'https://compute.googleapis.com' + ) + + +def test_region_health_checks_client_get_transport_class(): + transport = RegionHealthChecksClient.get_transport_class() + available_transports = [ + transports.RegionHealthChecksRestTransport, + ] + assert transport in available_transports + + transport = RegionHealthChecksClient.get_transport_class("rest") + assert transport == transports.RegionHealthChecksRestTransport + + +@pytest.mark.parametrize("client_class,transport_class,transport_name", [ + (RegionHealthChecksClient, transports.RegionHealthChecksRestTransport, "rest"), +]) +@mock.patch.object(RegionHealthChecksClient, "DEFAULT_ENDPOINT", modify_default_endpoint(RegionHealthChecksClient)) +def test_region_health_checks_client_client_options(client_class, transport_class, transport_name): + # Check that if channel is provided we won't create a new one. + with mock.patch.object(RegionHealthChecksClient, 'get_transport_class') as gtc: + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ) + client = client_class(transport=transport) + gtc.assert_not_called() + + # Check that if channel is provided via str we will create a new one. + with mock.patch.object(RegionHealthChecksClient, 'get_transport_class') as gtc: + client = client_class(transport=transport_name) + gtc.assert_called() + + # Check the case api_endpoint is provided. + options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(transport=transport_name, client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_MTLS_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError): + client = client_class(transport=transport_name) + + # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): + with pytest.raises(ValueError): + client = client_class(transport=transport_name) + + # Check the case quota_project_id is provided + options = client_options.ClientOptions(quota_project_id="octopus") + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id="octopus", + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + # Check the case api_endpoint is provided + options = client_options.ClientOptions(api_audience="https://language.googleapis.com") + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience="https://language.googleapis.com" + ) + +@pytest.mark.parametrize("client_class,transport_class,transport_name,use_client_cert_env", [ + (RegionHealthChecksClient, transports.RegionHealthChecksRestTransport, "rest", "true"), + (RegionHealthChecksClient, transports.RegionHealthChecksRestTransport, "rest", "false"), +]) +@mock.patch.object(RegionHealthChecksClient, "DEFAULT_ENDPOINT", modify_default_endpoint(RegionHealthChecksClient)) +@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) +def test_region_health_checks_client_mtls_env_auto(client_class, transport_class, transport_name, use_client_cert_env): + # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default + # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists. + + # Check the case client_cert_source is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): + options = client_options.ClientOptions(client_cert_source=client_cert_source_callback) + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + + if use_client_cert_env == "false": + expected_client_cert_source = None + expected_host = client.DEFAULT_ENDPOINT + else: + expected_client_cert_source = client_cert_source_callback + expected_host = client.DEFAULT_MTLS_ENDPOINT + + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case ADC client cert is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): + with mock.patch.object(transport_class, '__init__') as patched: + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): + with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=client_cert_source_callback): + if use_client_cert_env == "false": + expected_host = client.DEFAULT_ENDPOINT + expected_client_cert_source = None + else: + expected_host = client.DEFAULT_MTLS_ENDPOINT + expected_client_cert_source = client_cert_source_callback + + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case client_cert_source and ADC client cert are not provided. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): + with mock.patch.object(transport_class, '__init__') as patched: + with mock.patch("google.auth.transport.mtls.has_default_client_cert_source", return_value=False): + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize("client_class", [ + RegionHealthChecksClient +]) +@mock.patch.object(RegionHealthChecksClient, "DEFAULT_ENDPOINT", modify_default_endpoint(RegionHealthChecksClient)) +def test_region_health_checks_client_get_mtls_endpoint_and_cert_source(client_class): + mock_client_cert_source = mock.Mock() + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "true". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + mock_api_endpoint = "foo" + options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(options) + assert api_endpoint == mock_api_endpoint + assert cert_source == mock_client_cert_source + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "false". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + mock_client_cert_source = mock.Mock() + mock_api_endpoint = "foo" + options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(options) + assert api_endpoint == mock_api_endpoint + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert doesn't exist. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=False): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert exists. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): + with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=mock_client_cert_source): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source == mock_client_cert_source + + +@pytest.mark.parametrize("client_class,transport_class,transport_name", [ + (RegionHealthChecksClient, transports.RegionHealthChecksRestTransport, "rest"), +]) +def test_region_health_checks_client_client_options_scopes(client_class, transport_class, transport_name): + # Check the case scopes are provided. + options = client_options.ClientOptions( + scopes=["1", "2"], + ) + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=["1", "2"], + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + +@pytest.mark.parametrize("client_class,transport_class,transport_name,grpc_helpers", [ + (RegionHealthChecksClient, transports.RegionHealthChecksRestTransport, "rest", None), +]) +def test_region_health_checks_client_client_options_credentials_file(client_class, transport_class, transport_name, grpc_helpers): + # Check the case credentials file is provided. + options = client_options.ClientOptions( + credentials_file="credentials.json" + ) + + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize("request_type", [ + compute.DeleteRegionHealthCheckRequest, + dict, +]) +def test_delete_rest(request_type): + client = RegionHealthChecksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2', 'health_check': 'sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.delete(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, extended_operation.ExtendedOperation) + assert response.client_operation_id == 'client_operation_id_value' + assert response.creation_timestamp == 'creation_timestamp_value' + assert response.description == 'description_value' + assert response.end_time == 'end_time_value' + assert response.http_error_message == 'http_error_message_value' + assert response.http_error_status_code == 2374 + assert response.id == 205 + assert response.insert_time == 'insert_time_value' + assert response.kind == 'kind_value' + assert response.name == 'name_value' + assert response.operation_group_id == 'operation_group_id_value' + assert response.operation_type == 'operation_type_value' + assert response.progress == 885 + assert response.region == 'region_value' + assert response.self_link == 'self_link_value' + assert response.start_time == 'start_time_value' + assert response.status == compute.Operation.Status.DONE + assert response.status_message == 'status_message_value' + assert response.target_id == 947 + assert response.target_link == 'target_link_value' + assert response.user == 'user_value' + assert response.zone == 'zone_value' + + +def test_delete_rest_required_fields(request_type=compute.DeleteRegionHealthCheckRequest): + transport_class = transports.RegionHealthChecksRestTransport + + request_init = {} + request_init["health_check"] = "" + request_init["project"] = "" + request_init["region"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["healthCheck"] = 'health_check_value' + jsonified_request["project"] = 'project_value' + jsonified_request["region"] = 'region_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "healthCheck" in jsonified_request + assert jsonified_request["healthCheck"] == 'health_check_value' + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "region" in jsonified_request + assert jsonified_request["region"] == 'region_value' + + client = RegionHealthChecksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "delete", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.delete(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_delete_rest_unset_required_fields(): + transport = transports.RegionHealthChecksRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.delete._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("healthCheck", "project", "region", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_rest_interceptors(null_interceptor): + transport = transports.RegionHealthChecksRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.RegionHealthChecksRestInterceptor(), + ) + client = RegionHealthChecksClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.RegionHealthChecksRestInterceptor, "post_delete") as post, \ + mock.patch.object(transports.RegionHealthChecksRestInterceptor, "pre_delete") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.DeleteRegionHealthCheckRequest.pb(compute.DeleteRegionHealthCheckRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.DeleteRegionHealthCheckRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.delete(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_delete_rest_bad_request(transport: str = 'rest', request_type=compute.DeleteRegionHealthCheckRequest): + client = RegionHealthChecksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2', 'health_check': 'sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.delete(request) + + +def test_delete_rest_flattened(): + client = RegionHealthChecksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'region': 'sample2', 'health_check': 'sample3'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + region='region_value', + health_check='health_check_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.delete(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/regions/{region}/healthChecks/{health_check}" % client.transport._host, args[1]) + + +def test_delete_rest_flattened_error(transport: str = 'rest'): + client = RegionHealthChecksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete( + compute.DeleteRegionHealthCheckRequest(), + project='project_value', + region='region_value', + health_check='health_check_value', + ) + + +def test_delete_rest_error(): + client = RegionHealthChecksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.DeleteRegionHealthCheckRequest, + dict, +]) +def test_delete_unary_rest(request_type): + client = RegionHealthChecksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2', 'health_check': 'sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.delete_unary(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.Operation) + + +def test_delete_unary_rest_required_fields(request_type=compute.DeleteRegionHealthCheckRequest): + transport_class = transports.RegionHealthChecksRestTransport + + request_init = {} + request_init["health_check"] = "" + request_init["project"] = "" + request_init["region"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["healthCheck"] = 'health_check_value' + jsonified_request["project"] = 'project_value' + jsonified_request["region"] = 'region_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "healthCheck" in jsonified_request + assert jsonified_request["healthCheck"] == 'health_check_value' + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "region" in jsonified_request + assert jsonified_request["region"] == 'region_value' + + client = RegionHealthChecksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "delete", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.delete_unary(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_delete_unary_rest_unset_required_fields(): + transport = transports.RegionHealthChecksRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.delete._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("healthCheck", "project", "region", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_unary_rest_interceptors(null_interceptor): + transport = transports.RegionHealthChecksRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.RegionHealthChecksRestInterceptor(), + ) + client = RegionHealthChecksClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.RegionHealthChecksRestInterceptor, "post_delete") as post, \ + mock.patch.object(transports.RegionHealthChecksRestInterceptor, "pre_delete") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.DeleteRegionHealthCheckRequest.pb(compute.DeleteRegionHealthCheckRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.DeleteRegionHealthCheckRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.delete_unary(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_delete_unary_rest_bad_request(transport: str = 'rest', request_type=compute.DeleteRegionHealthCheckRequest): + client = RegionHealthChecksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2', 'health_check': 'sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.delete_unary(request) + + +def test_delete_unary_rest_flattened(): + client = RegionHealthChecksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'region': 'sample2', 'health_check': 'sample3'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + region='region_value', + health_check='health_check_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.delete_unary(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/regions/{region}/healthChecks/{health_check}" % client.transport._host, args[1]) + + +def test_delete_unary_rest_flattened_error(transport: str = 'rest'): + client = RegionHealthChecksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_unary( + compute.DeleteRegionHealthCheckRequest(), + project='project_value', + region='region_value', + health_check='health_check_value', + ) + + +def test_delete_unary_rest_error(): + client = RegionHealthChecksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.GetRegionHealthCheckRequest, + dict, +]) +def test_get_rest(request_type): + client = RegionHealthChecksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2', 'health_check': 'sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.HealthCheck( + check_interval_sec=1884, + creation_timestamp='creation_timestamp_value', + description='description_value', + healthy_threshold=1819, + id=205, + kind='kind_value', + name='name_value', + region='region_value', + self_link='self_link_value', + timeout_sec=1185, + type_='type__value', + unhealthy_threshold=2046, + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.HealthCheck.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.get(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.HealthCheck) + assert response.check_interval_sec == 1884 + assert response.creation_timestamp == 'creation_timestamp_value' + assert response.description == 'description_value' + assert response.healthy_threshold == 1819 + assert response.id == 205 + assert response.kind == 'kind_value' + assert response.name == 'name_value' + assert response.region == 'region_value' + assert response.self_link == 'self_link_value' + assert response.timeout_sec == 1185 + assert response.type_ == 'type__value' + assert response.unhealthy_threshold == 2046 + + +def test_get_rest_required_fields(request_type=compute.GetRegionHealthCheckRequest): + transport_class = transports.RegionHealthChecksRestTransport + + request_init = {} + request_init["health_check"] = "" + request_init["project"] = "" + request_init["region"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["healthCheck"] = 'health_check_value' + jsonified_request["project"] = 'project_value' + jsonified_request["region"] = 'region_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "healthCheck" in jsonified_request + assert jsonified_request["healthCheck"] == 'health_check_value' + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "region" in jsonified_request + assert jsonified_request["region"] == 'region_value' + + client = RegionHealthChecksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.HealthCheck() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "get", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.HealthCheck.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.get(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_get_rest_unset_required_fields(): + transport = transports.RegionHealthChecksRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.get._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("healthCheck", "project", "region", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_rest_interceptors(null_interceptor): + transport = transports.RegionHealthChecksRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.RegionHealthChecksRestInterceptor(), + ) + client = RegionHealthChecksClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.RegionHealthChecksRestInterceptor, "post_get") as post, \ + mock.patch.object(transports.RegionHealthChecksRestInterceptor, "pre_get") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.GetRegionHealthCheckRequest.pb(compute.GetRegionHealthCheckRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.HealthCheck.to_json(compute.HealthCheck()) + + request = compute.GetRegionHealthCheckRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.HealthCheck() + + client.get(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_get_rest_bad_request(transport: str = 'rest', request_type=compute.GetRegionHealthCheckRequest): + client = RegionHealthChecksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2', 'health_check': 'sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get(request) + + +def test_get_rest_flattened(): + client = RegionHealthChecksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.HealthCheck() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'region': 'sample2', 'health_check': 'sample3'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + region='region_value', + health_check='health_check_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.HealthCheck.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.get(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/regions/{region}/healthChecks/{health_check}" % client.transport._host, args[1]) + + +def test_get_rest_flattened_error(transport: str = 'rest'): + client = RegionHealthChecksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get( + compute.GetRegionHealthCheckRequest(), + project='project_value', + region='region_value', + health_check='health_check_value', + ) + + +def test_get_rest_error(): + client = RegionHealthChecksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.InsertRegionHealthCheckRequest, + dict, +]) +def test_insert_rest(request_type): + client = RegionHealthChecksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2'} + request_init["health_check_resource"] = {'check_interval_sec': 1884, 'creation_timestamp': 'creation_timestamp_value', 'description': 'description_value', 'grpc_health_check': {'grpc_service_name': 'grpc_service_name_value', 'port': 453, 'port_name': 'port_name_value', 'port_specification': 'port_specification_value'}, 'healthy_threshold': 1819, 'http2_health_check': {'host': 'host_value', 'port': 453, 'port_name': 'port_name_value', 'port_specification': 'port_specification_value', 'proxy_header': 'proxy_header_value', 'request_path': 'request_path_value', 'response': 'response_value'}, 'http_health_check': {'host': 'host_value', 'port': 453, 'port_name': 'port_name_value', 'port_specification': 'port_specification_value', 'proxy_header': 'proxy_header_value', 'request_path': 'request_path_value', 'response': 'response_value'}, 'https_health_check': {'host': 'host_value', 'port': 453, 'port_name': 'port_name_value', 'port_specification': 'port_specification_value', 'proxy_header': 'proxy_header_value', 'request_path': 'request_path_value', 'response': 'response_value'}, 'id': 205, 'kind': 'kind_value', 'log_config': {'enable': True}, 'name': 'name_value', 'region': 'region_value', 'self_link': 'self_link_value', 'ssl_health_check': {'port': 453, 'port_name': 'port_name_value', 'port_specification': 'port_specification_value', 'proxy_header': 'proxy_header_value', 'request': 'request_value', 'response': 'response_value'}, 'tcp_health_check': {'port': 453, 'port_name': 'port_name_value', 'port_specification': 'port_specification_value', 'proxy_header': 'proxy_header_value', 'request': 'request_value', 'response': 'response_value'}, 'timeout_sec': 1185, 'type_': 'type__value', 'unhealthy_threshold': 2046} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.insert(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, extended_operation.ExtendedOperation) + assert response.client_operation_id == 'client_operation_id_value' + assert response.creation_timestamp == 'creation_timestamp_value' + assert response.description == 'description_value' + assert response.end_time == 'end_time_value' + assert response.http_error_message == 'http_error_message_value' + assert response.http_error_status_code == 2374 + assert response.id == 205 + assert response.insert_time == 'insert_time_value' + assert response.kind == 'kind_value' + assert response.name == 'name_value' + assert response.operation_group_id == 'operation_group_id_value' + assert response.operation_type == 'operation_type_value' + assert response.progress == 885 + assert response.region == 'region_value' + assert response.self_link == 'self_link_value' + assert response.start_time == 'start_time_value' + assert response.status == compute.Operation.Status.DONE + assert response.status_message == 'status_message_value' + assert response.target_id == 947 + assert response.target_link == 'target_link_value' + assert response.user == 'user_value' + assert response.zone == 'zone_value' + + +def test_insert_rest_required_fields(request_type=compute.InsertRegionHealthCheckRequest): + transport_class = transports.RegionHealthChecksRestTransport + + request_init = {} + request_init["project"] = "" + request_init["region"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).insert._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + jsonified_request["region"] = 'region_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).insert._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "region" in jsonified_request + assert jsonified_request["region"] == 'region_value' + + client = RegionHealthChecksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.insert(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_insert_rest_unset_required_fields(): + transport = transports.RegionHealthChecksRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.insert._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("healthCheckResource", "project", "region", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_insert_rest_interceptors(null_interceptor): + transport = transports.RegionHealthChecksRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.RegionHealthChecksRestInterceptor(), + ) + client = RegionHealthChecksClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.RegionHealthChecksRestInterceptor, "post_insert") as post, \ + mock.patch.object(transports.RegionHealthChecksRestInterceptor, "pre_insert") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.InsertRegionHealthCheckRequest.pb(compute.InsertRegionHealthCheckRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.InsertRegionHealthCheckRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.insert(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_insert_rest_bad_request(transport: str = 'rest', request_type=compute.InsertRegionHealthCheckRequest): + client = RegionHealthChecksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2'} + request_init["health_check_resource"] = {'check_interval_sec': 1884, 'creation_timestamp': 'creation_timestamp_value', 'description': 'description_value', 'grpc_health_check': {'grpc_service_name': 'grpc_service_name_value', 'port': 453, 'port_name': 'port_name_value', 'port_specification': 'port_specification_value'}, 'healthy_threshold': 1819, 'http2_health_check': {'host': 'host_value', 'port': 453, 'port_name': 'port_name_value', 'port_specification': 'port_specification_value', 'proxy_header': 'proxy_header_value', 'request_path': 'request_path_value', 'response': 'response_value'}, 'http_health_check': {'host': 'host_value', 'port': 453, 'port_name': 'port_name_value', 'port_specification': 'port_specification_value', 'proxy_header': 'proxy_header_value', 'request_path': 'request_path_value', 'response': 'response_value'}, 'https_health_check': {'host': 'host_value', 'port': 453, 'port_name': 'port_name_value', 'port_specification': 'port_specification_value', 'proxy_header': 'proxy_header_value', 'request_path': 'request_path_value', 'response': 'response_value'}, 'id': 205, 'kind': 'kind_value', 'log_config': {'enable': True}, 'name': 'name_value', 'region': 'region_value', 'self_link': 'self_link_value', 'ssl_health_check': {'port': 453, 'port_name': 'port_name_value', 'port_specification': 'port_specification_value', 'proxy_header': 'proxy_header_value', 'request': 'request_value', 'response': 'response_value'}, 'tcp_health_check': {'port': 453, 'port_name': 'port_name_value', 'port_specification': 'port_specification_value', 'proxy_header': 'proxy_header_value', 'request': 'request_value', 'response': 'response_value'}, 'timeout_sec': 1185, 'type_': 'type__value', 'unhealthy_threshold': 2046} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.insert(request) + + +def test_insert_rest_flattened(): + client = RegionHealthChecksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'region': 'sample2'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + region='region_value', + health_check_resource=compute.HealthCheck(check_interval_sec=1884), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.insert(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/regions/{region}/healthChecks" % client.transport._host, args[1]) + + +def test_insert_rest_flattened_error(transport: str = 'rest'): + client = RegionHealthChecksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.insert( + compute.InsertRegionHealthCheckRequest(), + project='project_value', + region='region_value', + health_check_resource=compute.HealthCheck(check_interval_sec=1884), + ) + + +def test_insert_rest_error(): + client = RegionHealthChecksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.InsertRegionHealthCheckRequest, + dict, +]) +def test_insert_unary_rest(request_type): + client = RegionHealthChecksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2'} + request_init["health_check_resource"] = {'check_interval_sec': 1884, 'creation_timestamp': 'creation_timestamp_value', 'description': 'description_value', 'grpc_health_check': {'grpc_service_name': 'grpc_service_name_value', 'port': 453, 'port_name': 'port_name_value', 'port_specification': 'port_specification_value'}, 'healthy_threshold': 1819, 'http2_health_check': {'host': 'host_value', 'port': 453, 'port_name': 'port_name_value', 'port_specification': 'port_specification_value', 'proxy_header': 'proxy_header_value', 'request_path': 'request_path_value', 'response': 'response_value'}, 'http_health_check': {'host': 'host_value', 'port': 453, 'port_name': 'port_name_value', 'port_specification': 'port_specification_value', 'proxy_header': 'proxy_header_value', 'request_path': 'request_path_value', 'response': 'response_value'}, 'https_health_check': {'host': 'host_value', 'port': 453, 'port_name': 'port_name_value', 'port_specification': 'port_specification_value', 'proxy_header': 'proxy_header_value', 'request_path': 'request_path_value', 'response': 'response_value'}, 'id': 205, 'kind': 'kind_value', 'log_config': {'enable': True}, 'name': 'name_value', 'region': 'region_value', 'self_link': 'self_link_value', 'ssl_health_check': {'port': 453, 'port_name': 'port_name_value', 'port_specification': 'port_specification_value', 'proxy_header': 'proxy_header_value', 'request': 'request_value', 'response': 'response_value'}, 'tcp_health_check': {'port': 453, 'port_name': 'port_name_value', 'port_specification': 'port_specification_value', 'proxy_header': 'proxy_header_value', 'request': 'request_value', 'response': 'response_value'}, 'timeout_sec': 1185, 'type_': 'type__value', 'unhealthy_threshold': 2046} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.insert_unary(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.Operation) + + +def test_insert_unary_rest_required_fields(request_type=compute.InsertRegionHealthCheckRequest): + transport_class = transports.RegionHealthChecksRestTransport + + request_init = {} + request_init["project"] = "" + request_init["region"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).insert._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + jsonified_request["region"] = 'region_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).insert._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "region" in jsonified_request + assert jsonified_request["region"] == 'region_value' + + client = RegionHealthChecksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.insert_unary(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_insert_unary_rest_unset_required_fields(): + transport = transports.RegionHealthChecksRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.insert._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("healthCheckResource", "project", "region", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_insert_unary_rest_interceptors(null_interceptor): + transport = transports.RegionHealthChecksRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.RegionHealthChecksRestInterceptor(), + ) + client = RegionHealthChecksClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.RegionHealthChecksRestInterceptor, "post_insert") as post, \ + mock.patch.object(transports.RegionHealthChecksRestInterceptor, "pre_insert") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.InsertRegionHealthCheckRequest.pb(compute.InsertRegionHealthCheckRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.InsertRegionHealthCheckRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.insert_unary(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_insert_unary_rest_bad_request(transport: str = 'rest', request_type=compute.InsertRegionHealthCheckRequest): + client = RegionHealthChecksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2'} + request_init["health_check_resource"] = {'check_interval_sec': 1884, 'creation_timestamp': 'creation_timestamp_value', 'description': 'description_value', 'grpc_health_check': {'grpc_service_name': 'grpc_service_name_value', 'port': 453, 'port_name': 'port_name_value', 'port_specification': 'port_specification_value'}, 'healthy_threshold': 1819, 'http2_health_check': {'host': 'host_value', 'port': 453, 'port_name': 'port_name_value', 'port_specification': 'port_specification_value', 'proxy_header': 'proxy_header_value', 'request_path': 'request_path_value', 'response': 'response_value'}, 'http_health_check': {'host': 'host_value', 'port': 453, 'port_name': 'port_name_value', 'port_specification': 'port_specification_value', 'proxy_header': 'proxy_header_value', 'request_path': 'request_path_value', 'response': 'response_value'}, 'https_health_check': {'host': 'host_value', 'port': 453, 'port_name': 'port_name_value', 'port_specification': 'port_specification_value', 'proxy_header': 'proxy_header_value', 'request_path': 'request_path_value', 'response': 'response_value'}, 'id': 205, 'kind': 'kind_value', 'log_config': {'enable': True}, 'name': 'name_value', 'region': 'region_value', 'self_link': 'self_link_value', 'ssl_health_check': {'port': 453, 'port_name': 'port_name_value', 'port_specification': 'port_specification_value', 'proxy_header': 'proxy_header_value', 'request': 'request_value', 'response': 'response_value'}, 'tcp_health_check': {'port': 453, 'port_name': 'port_name_value', 'port_specification': 'port_specification_value', 'proxy_header': 'proxy_header_value', 'request': 'request_value', 'response': 'response_value'}, 'timeout_sec': 1185, 'type_': 'type__value', 'unhealthy_threshold': 2046} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.insert_unary(request) + + +def test_insert_unary_rest_flattened(): + client = RegionHealthChecksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'region': 'sample2'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + region='region_value', + health_check_resource=compute.HealthCheck(check_interval_sec=1884), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.insert_unary(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/regions/{region}/healthChecks" % client.transport._host, args[1]) + + +def test_insert_unary_rest_flattened_error(transport: str = 'rest'): + client = RegionHealthChecksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.insert_unary( + compute.InsertRegionHealthCheckRequest(), + project='project_value', + region='region_value', + health_check_resource=compute.HealthCheck(check_interval_sec=1884), + ) + + +def test_insert_unary_rest_error(): + client = RegionHealthChecksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.ListRegionHealthChecksRequest, + dict, +]) +def test_list_rest(request_type): + client = RegionHealthChecksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.HealthCheckList( + id='id_value', + kind='kind_value', + next_page_token='next_page_token_value', + self_link='self_link_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.HealthCheckList.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.list(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListPager) + assert response.id == 'id_value' + assert response.kind == 'kind_value' + assert response.next_page_token == 'next_page_token_value' + assert response.self_link == 'self_link_value' + + +def test_list_rest_required_fields(request_type=compute.ListRegionHealthChecksRequest): + transport_class = transports.RegionHealthChecksRestTransport + + request_init = {} + request_init["project"] = "" + request_init["region"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + jsonified_request["region"] = 'region_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("filter", "max_results", "order_by", "page_token", "return_partial_success", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "region" in jsonified_request + assert jsonified_request["region"] == 'region_value' + + client = RegionHealthChecksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.HealthCheckList() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "get", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.HealthCheckList.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.list(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_list_rest_unset_required_fields(): + transport = transports.RegionHealthChecksRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.list._get_unset_required_fields({}) + assert set(unset_fields) == (set(("filter", "maxResults", "orderBy", "pageToken", "returnPartialSuccess", )) & set(("project", "region", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_rest_interceptors(null_interceptor): + transport = transports.RegionHealthChecksRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.RegionHealthChecksRestInterceptor(), + ) + client = RegionHealthChecksClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.RegionHealthChecksRestInterceptor, "post_list") as post, \ + mock.patch.object(transports.RegionHealthChecksRestInterceptor, "pre_list") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.ListRegionHealthChecksRequest.pb(compute.ListRegionHealthChecksRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.HealthCheckList.to_json(compute.HealthCheckList()) + + request = compute.ListRegionHealthChecksRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.HealthCheckList() + + client.list(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_list_rest_bad_request(transport: str = 'rest', request_type=compute.ListRegionHealthChecksRequest): + client = RegionHealthChecksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list(request) + + +def test_list_rest_flattened(): + client = RegionHealthChecksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.HealthCheckList() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'region': 'sample2'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + region='region_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.HealthCheckList.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.list(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/regions/{region}/healthChecks" % client.transport._host, args[1]) + + +def test_list_rest_flattened_error(transport: str = 'rest'): + client = RegionHealthChecksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list( + compute.ListRegionHealthChecksRequest(), + project='project_value', + region='region_value', + ) + + +def test_list_rest_pager(transport: str = 'rest'): + client = RegionHealthChecksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + #with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + compute.HealthCheckList( + items=[ + compute.HealthCheck(), + compute.HealthCheck(), + compute.HealthCheck(), + ], + next_page_token='abc', + ), + compute.HealthCheckList( + items=[], + next_page_token='def', + ), + compute.HealthCheckList( + items=[ + compute.HealthCheck(), + ], + next_page_token='ghi', + ), + compute.HealthCheckList( + items=[ + compute.HealthCheck(), + compute.HealthCheck(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple(compute.HealthCheckList.to_json(x) for x in response) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode('UTF-8') + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {'project': 'sample1', 'region': 'sample2'} + + pager = client.list(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, compute.HealthCheck) + for i in results) + + pages = list(client.list(request=sample_request).pages) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize("request_type", [ + compute.PatchRegionHealthCheckRequest, + dict, +]) +def test_patch_rest(request_type): + client = RegionHealthChecksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2', 'health_check': 'sample3'} + request_init["health_check_resource"] = {'check_interval_sec': 1884, 'creation_timestamp': 'creation_timestamp_value', 'description': 'description_value', 'grpc_health_check': {'grpc_service_name': 'grpc_service_name_value', 'port': 453, 'port_name': 'port_name_value', 'port_specification': 'port_specification_value'}, 'healthy_threshold': 1819, 'http2_health_check': {'host': 'host_value', 'port': 453, 'port_name': 'port_name_value', 'port_specification': 'port_specification_value', 'proxy_header': 'proxy_header_value', 'request_path': 'request_path_value', 'response': 'response_value'}, 'http_health_check': {'host': 'host_value', 'port': 453, 'port_name': 'port_name_value', 'port_specification': 'port_specification_value', 'proxy_header': 'proxy_header_value', 'request_path': 'request_path_value', 'response': 'response_value'}, 'https_health_check': {'host': 'host_value', 'port': 453, 'port_name': 'port_name_value', 'port_specification': 'port_specification_value', 'proxy_header': 'proxy_header_value', 'request_path': 'request_path_value', 'response': 'response_value'}, 'id': 205, 'kind': 'kind_value', 'log_config': {'enable': True}, 'name': 'name_value', 'region': 'region_value', 'self_link': 'self_link_value', 'ssl_health_check': {'port': 453, 'port_name': 'port_name_value', 'port_specification': 'port_specification_value', 'proxy_header': 'proxy_header_value', 'request': 'request_value', 'response': 'response_value'}, 'tcp_health_check': {'port': 453, 'port_name': 'port_name_value', 'port_specification': 'port_specification_value', 'proxy_header': 'proxy_header_value', 'request': 'request_value', 'response': 'response_value'}, 'timeout_sec': 1185, 'type_': 'type__value', 'unhealthy_threshold': 2046} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.patch(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, extended_operation.ExtendedOperation) + assert response.client_operation_id == 'client_operation_id_value' + assert response.creation_timestamp == 'creation_timestamp_value' + assert response.description == 'description_value' + assert response.end_time == 'end_time_value' + assert response.http_error_message == 'http_error_message_value' + assert response.http_error_status_code == 2374 + assert response.id == 205 + assert response.insert_time == 'insert_time_value' + assert response.kind == 'kind_value' + assert response.name == 'name_value' + assert response.operation_group_id == 'operation_group_id_value' + assert response.operation_type == 'operation_type_value' + assert response.progress == 885 + assert response.region == 'region_value' + assert response.self_link == 'self_link_value' + assert response.start_time == 'start_time_value' + assert response.status == compute.Operation.Status.DONE + assert response.status_message == 'status_message_value' + assert response.target_id == 947 + assert response.target_link == 'target_link_value' + assert response.user == 'user_value' + assert response.zone == 'zone_value' + + +def test_patch_rest_required_fields(request_type=compute.PatchRegionHealthCheckRequest): + transport_class = transports.RegionHealthChecksRestTransport + + request_init = {} + request_init["health_check"] = "" + request_init["project"] = "" + request_init["region"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).patch._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["healthCheck"] = 'health_check_value' + jsonified_request["project"] = 'project_value' + jsonified_request["region"] = 'region_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).patch._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "healthCheck" in jsonified_request + assert jsonified_request["healthCheck"] == 'health_check_value' + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "region" in jsonified_request + assert jsonified_request["region"] == 'region_value' + + client = RegionHealthChecksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "patch", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.patch(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_patch_rest_unset_required_fields(): + transport = transports.RegionHealthChecksRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.patch._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("healthCheck", "healthCheckResource", "project", "region", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_patch_rest_interceptors(null_interceptor): + transport = transports.RegionHealthChecksRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.RegionHealthChecksRestInterceptor(), + ) + client = RegionHealthChecksClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.RegionHealthChecksRestInterceptor, "post_patch") as post, \ + mock.patch.object(transports.RegionHealthChecksRestInterceptor, "pre_patch") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.PatchRegionHealthCheckRequest.pb(compute.PatchRegionHealthCheckRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.PatchRegionHealthCheckRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.patch(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_patch_rest_bad_request(transport: str = 'rest', request_type=compute.PatchRegionHealthCheckRequest): + client = RegionHealthChecksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2', 'health_check': 'sample3'} + request_init["health_check_resource"] = {'check_interval_sec': 1884, 'creation_timestamp': 'creation_timestamp_value', 'description': 'description_value', 'grpc_health_check': {'grpc_service_name': 'grpc_service_name_value', 'port': 453, 'port_name': 'port_name_value', 'port_specification': 'port_specification_value'}, 'healthy_threshold': 1819, 'http2_health_check': {'host': 'host_value', 'port': 453, 'port_name': 'port_name_value', 'port_specification': 'port_specification_value', 'proxy_header': 'proxy_header_value', 'request_path': 'request_path_value', 'response': 'response_value'}, 'http_health_check': {'host': 'host_value', 'port': 453, 'port_name': 'port_name_value', 'port_specification': 'port_specification_value', 'proxy_header': 'proxy_header_value', 'request_path': 'request_path_value', 'response': 'response_value'}, 'https_health_check': {'host': 'host_value', 'port': 453, 'port_name': 'port_name_value', 'port_specification': 'port_specification_value', 'proxy_header': 'proxy_header_value', 'request_path': 'request_path_value', 'response': 'response_value'}, 'id': 205, 'kind': 'kind_value', 'log_config': {'enable': True}, 'name': 'name_value', 'region': 'region_value', 'self_link': 'self_link_value', 'ssl_health_check': {'port': 453, 'port_name': 'port_name_value', 'port_specification': 'port_specification_value', 'proxy_header': 'proxy_header_value', 'request': 'request_value', 'response': 'response_value'}, 'tcp_health_check': {'port': 453, 'port_name': 'port_name_value', 'port_specification': 'port_specification_value', 'proxy_header': 'proxy_header_value', 'request': 'request_value', 'response': 'response_value'}, 'timeout_sec': 1185, 'type_': 'type__value', 'unhealthy_threshold': 2046} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.patch(request) + + +def test_patch_rest_flattened(): + client = RegionHealthChecksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'region': 'sample2', 'health_check': 'sample3'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + region='region_value', + health_check='health_check_value', + health_check_resource=compute.HealthCheck(check_interval_sec=1884), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.patch(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/regions/{region}/healthChecks/{health_check}" % client.transport._host, args[1]) + + +def test_patch_rest_flattened_error(transport: str = 'rest'): + client = RegionHealthChecksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.patch( + compute.PatchRegionHealthCheckRequest(), + project='project_value', + region='region_value', + health_check='health_check_value', + health_check_resource=compute.HealthCheck(check_interval_sec=1884), + ) + + +def test_patch_rest_error(): + client = RegionHealthChecksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.PatchRegionHealthCheckRequest, + dict, +]) +def test_patch_unary_rest(request_type): + client = RegionHealthChecksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2', 'health_check': 'sample3'} + request_init["health_check_resource"] = {'check_interval_sec': 1884, 'creation_timestamp': 'creation_timestamp_value', 'description': 'description_value', 'grpc_health_check': {'grpc_service_name': 'grpc_service_name_value', 'port': 453, 'port_name': 'port_name_value', 'port_specification': 'port_specification_value'}, 'healthy_threshold': 1819, 'http2_health_check': {'host': 'host_value', 'port': 453, 'port_name': 'port_name_value', 'port_specification': 'port_specification_value', 'proxy_header': 'proxy_header_value', 'request_path': 'request_path_value', 'response': 'response_value'}, 'http_health_check': {'host': 'host_value', 'port': 453, 'port_name': 'port_name_value', 'port_specification': 'port_specification_value', 'proxy_header': 'proxy_header_value', 'request_path': 'request_path_value', 'response': 'response_value'}, 'https_health_check': {'host': 'host_value', 'port': 453, 'port_name': 'port_name_value', 'port_specification': 'port_specification_value', 'proxy_header': 'proxy_header_value', 'request_path': 'request_path_value', 'response': 'response_value'}, 'id': 205, 'kind': 'kind_value', 'log_config': {'enable': True}, 'name': 'name_value', 'region': 'region_value', 'self_link': 'self_link_value', 'ssl_health_check': {'port': 453, 'port_name': 'port_name_value', 'port_specification': 'port_specification_value', 'proxy_header': 'proxy_header_value', 'request': 'request_value', 'response': 'response_value'}, 'tcp_health_check': {'port': 453, 'port_name': 'port_name_value', 'port_specification': 'port_specification_value', 'proxy_header': 'proxy_header_value', 'request': 'request_value', 'response': 'response_value'}, 'timeout_sec': 1185, 'type_': 'type__value', 'unhealthy_threshold': 2046} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.patch_unary(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.Operation) + + +def test_patch_unary_rest_required_fields(request_type=compute.PatchRegionHealthCheckRequest): + transport_class = transports.RegionHealthChecksRestTransport + + request_init = {} + request_init["health_check"] = "" + request_init["project"] = "" + request_init["region"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).patch._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["healthCheck"] = 'health_check_value' + jsonified_request["project"] = 'project_value' + jsonified_request["region"] = 'region_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).patch._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "healthCheck" in jsonified_request + assert jsonified_request["healthCheck"] == 'health_check_value' + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "region" in jsonified_request + assert jsonified_request["region"] == 'region_value' + + client = RegionHealthChecksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "patch", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.patch_unary(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_patch_unary_rest_unset_required_fields(): + transport = transports.RegionHealthChecksRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.patch._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("healthCheck", "healthCheckResource", "project", "region", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_patch_unary_rest_interceptors(null_interceptor): + transport = transports.RegionHealthChecksRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.RegionHealthChecksRestInterceptor(), + ) + client = RegionHealthChecksClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.RegionHealthChecksRestInterceptor, "post_patch") as post, \ + mock.patch.object(transports.RegionHealthChecksRestInterceptor, "pre_patch") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.PatchRegionHealthCheckRequest.pb(compute.PatchRegionHealthCheckRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.PatchRegionHealthCheckRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.patch_unary(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_patch_unary_rest_bad_request(transport: str = 'rest', request_type=compute.PatchRegionHealthCheckRequest): + client = RegionHealthChecksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2', 'health_check': 'sample3'} + request_init["health_check_resource"] = {'check_interval_sec': 1884, 'creation_timestamp': 'creation_timestamp_value', 'description': 'description_value', 'grpc_health_check': {'grpc_service_name': 'grpc_service_name_value', 'port': 453, 'port_name': 'port_name_value', 'port_specification': 'port_specification_value'}, 'healthy_threshold': 1819, 'http2_health_check': {'host': 'host_value', 'port': 453, 'port_name': 'port_name_value', 'port_specification': 'port_specification_value', 'proxy_header': 'proxy_header_value', 'request_path': 'request_path_value', 'response': 'response_value'}, 'http_health_check': {'host': 'host_value', 'port': 453, 'port_name': 'port_name_value', 'port_specification': 'port_specification_value', 'proxy_header': 'proxy_header_value', 'request_path': 'request_path_value', 'response': 'response_value'}, 'https_health_check': {'host': 'host_value', 'port': 453, 'port_name': 'port_name_value', 'port_specification': 'port_specification_value', 'proxy_header': 'proxy_header_value', 'request_path': 'request_path_value', 'response': 'response_value'}, 'id': 205, 'kind': 'kind_value', 'log_config': {'enable': True}, 'name': 'name_value', 'region': 'region_value', 'self_link': 'self_link_value', 'ssl_health_check': {'port': 453, 'port_name': 'port_name_value', 'port_specification': 'port_specification_value', 'proxy_header': 'proxy_header_value', 'request': 'request_value', 'response': 'response_value'}, 'tcp_health_check': {'port': 453, 'port_name': 'port_name_value', 'port_specification': 'port_specification_value', 'proxy_header': 'proxy_header_value', 'request': 'request_value', 'response': 'response_value'}, 'timeout_sec': 1185, 'type_': 'type__value', 'unhealthy_threshold': 2046} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.patch_unary(request) + + +def test_patch_unary_rest_flattened(): + client = RegionHealthChecksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'region': 'sample2', 'health_check': 'sample3'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + region='region_value', + health_check='health_check_value', + health_check_resource=compute.HealthCheck(check_interval_sec=1884), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.patch_unary(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/regions/{region}/healthChecks/{health_check}" % client.transport._host, args[1]) + + +def test_patch_unary_rest_flattened_error(transport: str = 'rest'): + client = RegionHealthChecksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.patch_unary( + compute.PatchRegionHealthCheckRequest(), + project='project_value', + region='region_value', + health_check='health_check_value', + health_check_resource=compute.HealthCheck(check_interval_sec=1884), + ) + + +def test_patch_unary_rest_error(): + client = RegionHealthChecksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.UpdateRegionHealthCheckRequest, + dict, +]) +def test_update_rest(request_type): + client = RegionHealthChecksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2', 'health_check': 'sample3'} + request_init["health_check_resource"] = {'check_interval_sec': 1884, 'creation_timestamp': 'creation_timestamp_value', 'description': 'description_value', 'grpc_health_check': {'grpc_service_name': 'grpc_service_name_value', 'port': 453, 'port_name': 'port_name_value', 'port_specification': 'port_specification_value'}, 'healthy_threshold': 1819, 'http2_health_check': {'host': 'host_value', 'port': 453, 'port_name': 'port_name_value', 'port_specification': 'port_specification_value', 'proxy_header': 'proxy_header_value', 'request_path': 'request_path_value', 'response': 'response_value'}, 'http_health_check': {'host': 'host_value', 'port': 453, 'port_name': 'port_name_value', 'port_specification': 'port_specification_value', 'proxy_header': 'proxy_header_value', 'request_path': 'request_path_value', 'response': 'response_value'}, 'https_health_check': {'host': 'host_value', 'port': 453, 'port_name': 'port_name_value', 'port_specification': 'port_specification_value', 'proxy_header': 'proxy_header_value', 'request_path': 'request_path_value', 'response': 'response_value'}, 'id': 205, 'kind': 'kind_value', 'log_config': {'enable': True}, 'name': 'name_value', 'region': 'region_value', 'self_link': 'self_link_value', 'ssl_health_check': {'port': 453, 'port_name': 'port_name_value', 'port_specification': 'port_specification_value', 'proxy_header': 'proxy_header_value', 'request': 'request_value', 'response': 'response_value'}, 'tcp_health_check': {'port': 453, 'port_name': 'port_name_value', 'port_specification': 'port_specification_value', 'proxy_header': 'proxy_header_value', 'request': 'request_value', 'response': 'response_value'}, 'timeout_sec': 1185, 'type_': 'type__value', 'unhealthy_threshold': 2046} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.update(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, extended_operation.ExtendedOperation) + assert response.client_operation_id == 'client_operation_id_value' + assert response.creation_timestamp == 'creation_timestamp_value' + assert response.description == 'description_value' + assert response.end_time == 'end_time_value' + assert response.http_error_message == 'http_error_message_value' + assert response.http_error_status_code == 2374 + assert response.id == 205 + assert response.insert_time == 'insert_time_value' + assert response.kind == 'kind_value' + assert response.name == 'name_value' + assert response.operation_group_id == 'operation_group_id_value' + assert response.operation_type == 'operation_type_value' + assert response.progress == 885 + assert response.region == 'region_value' + assert response.self_link == 'self_link_value' + assert response.start_time == 'start_time_value' + assert response.status == compute.Operation.Status.DONE + assert response.status_message == 'status_message_value' + assert response.target_id == 947 + assert response.target_link == 'target_link_value' + assert response.user == 'user_value' + assert response.zone == 'zone_value' + + +def test_update_rest_required_fields(request_type=compute.UpdateRegionHealthCheckRequest): + transport_class = transports.RegionHealthChecksRestTransport + + request_init = {} + request_init["health_check"] = "" + request_init["project"] = "" + request_init["region"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).update._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["healthCheck"] = 'health_check_value' + jsonified_request["project"] = 'project_value' + jsonified_request["region"] = 'region_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).update._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "healthCheck" in jsonified_request + assert jsonified_request["healthCheck"] == 'health_check_value' + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "region" in jsonified_request + assert jsonified_request["region"] == 'region_value' + + client = RegionHealthChecksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "put", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.update(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_update_rest_unset_required_fields(): + transport = transports.RegionHealthChecksRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.update._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("healthCheck", "healthCheckResource", "project", "region", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_update_rest_interceptors(null_interceptor): + transport = transports.RegionHealthChecksRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.RegionHealthChecksRestInterceptor(), + ) + client = RegionHealthChecksClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.RegionHealthChecksRestInterceptor, "post_update") as post, \ + mock.patch.object(transports.RegionHealthChecksRestInterceptor, "pre_update") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.UpdateRegionHealthCheckRequest.pb(compute.UpdateRegionHealthCheckRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.UpdateRegionHealthCheckRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.update(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_update_rest_bad_request(transport: str = 'rest', request_type=compute.UpdateRegionHealthCheckRequest): + client = RegionHealthChecksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2', 'health_check': 'sample3'} + request_init["health_check_resource"] = {'check_interval_sec': 1884, 'creation_timestamp': 'creation_timestamp_value', 'description': 'description_value', 'grpc_health_check': {'grpc_service_name': 'grpc_service_name_value', 'port': 453, 'port_name': 'port_name_value', 'port_specification': 'port_specification_value'}, 'healthy_threshold': 1819, 'http2_health_check': {'host': 'host_value', 'port': 453, 'port_name': 'port_name_value', 'port_specification': 'port_specification_value', 'proxy_header': 'proxy_header_value', 'request_path': 'request_path_value', 'response': 'response_value'}, 'http_health_check': {'host': 'host_value', 'port': 453, 'port_name': 'port_name_value', 'port_specification': 'port_specification_value', 'proxy_header': 'proxy_header_value', 'request_path': 'request_path_value', 'response': 'response_value'}, 'https_health_check': {'host': 'host_value', 'port': 453, 'port_name': 'port_name_value', 'port_specification': 'port_specification_value', 'proxy_header': 'proxy_header_value', 'request_path': 'request_path_value', 'response': 'response_value'}, 'id': 205, 'kind': 'kind_value', 'log_config': {'enable': True}, 'name': 'name_value', 'region': 'region_value', 'self_link': 'self_link_value', 'ssl_health_check': {'port': 453, 'port_name': 'port_name_value', 'port_specification': 'port_specification_value', 'proxy_header': 'proxy_header_value', 'request': 'request_value', 'response': 'response_value'}, 'tcp_health_check': {'port': 453, 'port_name': 'port_name_value', 'port_specification': 'port_specification_value', 'proxy_header': 'proxy_header_value', 'request': 'request_value', 'response': 'response_value'}, 'timeout_sec': 1185, 'type_': 'type__value', 'unhealthy_threshold': 2046} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.update(request) + + +def test_update_rest_flattened(): + client = RegionHealthChecksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'region': 'sample2', 'health_check': 'sample3'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + region='region_value', + health_check='health_check_value', + health_check_resource=compute.HealthCheck(check_interval_sec=1884), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.update(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/regions/{region}/healthChecks/{health_check}" % client.transport._host, args[1]) + + +def test_update_rest_flattened_error(transport: str = 'rest'): + client = RegionHealthChecksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update( + compute.UpdateRegionHealthCheckRequest(), + project='project_value', + region='region_value', + health_check='health_check_value', + health_check_resource=compute.HealthCheck(check_interval_sec=1884), + ) + + +def test_update_rest_error(): + client = RegionHealthChecksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.UpdateRegionHealthCheckRequest, + dict, +]) +def test_update_unary_rest(request_type): + client = RegionHealthChecksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2', 'health_check': 'sample3'} + request_init["health_check_resource"] = {'check_interval_sec': 1884, 'creation_timestamp': 'creation_timestamp_value', 'description': 'description_value', 'grpc_health_check': {'grpc_service_name': 'grpc_service_name_value', 'port': 453, 'port_name': 'port_name_value', 'port_specification': 'port_specification_value'}, 'healthy_threshold': 1819, 'http2_health_check': {'host': 'host_value', 'port': 453, 'port_name': 'port_name_value', 'port_specification': 'port_specification_value', 'proxy_header': 'proxy_header_value', 'request_path': 'request_path_value', 'response': 'response_value'}, 'http_health_check': {'host': 'host_value', 'port': 453, 'port_name': 'port_name_value', 'port_specification': 'port_specification_value', 'proxy_header': 'proxy_header_value', 'request_path': 'request_path_value', 'response': 'response_value'}, 'https_health_check': {'host': 'host_value', 'port': 453, 'port_name': 'port_name_value', 'port_specification': 'port_specification_value', 'proxy_header': 'proxy_header_value', 'request_path': 'request_path_value', 'response': 'response_value'}, 'id': 205, 'kind': 'kind_value', 'log_config': {'enable': True}, 'name': 'name_value', 'region': 'region_value', 'self_link': 'self_link_value', 'ssl_health_check': {'port': 453, 'port_name': 'port_name_value', 'port_specification': 'port_specification_value', 'proxy_header': 'proxy_header_value', 'request': 'request_value', 'response': 'response_value'}, 'tcp_health_check': {'port': 453, 'port_name': 'port_name_value', 'port_specification': 'port_specification_value', 'proxy_header': 'proxy_header_value', 'request': 'request_value', 'response': 'response_value'}, 'timeout_sec': 1185, 'type_': 'type__value', 'unhealthy_threshold': 2046} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.update_unary(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.Operation) + + +def test_update_unary_rest_required_fields(request_type=compute.UpdateRegionHealthCheckRequest): + transport_class = transports.RegionHealthChecksRestTransport + + request_init = {} + request_init["health_check"] = "" + request_init["project"] = "" + request_init["region"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).update._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["healthCheck"] = 'health_check_value' + jsonified_request["project"] = 'project_value' + jsonified_request["region"] = 'region_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).update._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "healthCheck" in jsonified_request + assert jsonified_request["healthCheck"] == 'health_check_value' + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "region" in jsonified_request + assert jsonified_request["region"] == 'region_value' + + client = RegionHealthChecksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "put", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.update_unary(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_update_unary_rest_unset_required_fields(): + transport = transports.RegionHealthChecksRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.update._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("healthCheck", "healthCheckResource", "project", "region", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_update_unary_rest_interceptors(null_interceptor): + transport = transports.RegionHealthChecksRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.RegionHealthChecksRestInterceptor(), + ) + client = RegionHealthChecksClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.RegionHealthChecksRestInterceptor, "post_update") as post, \ + mock.patch.object(transports.RegionHealthChecksRestInterceptor, "pre_update") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.UpdateRegionHealthCheckRequest.pb(compute.UpdateRegionHealthCheckRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.UpdateRegionHealthCheckRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.update_unary(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_update_unary_rest_bad_request(transport: str = 'rest', request_type=compute.UpdateRegionHealthCheckRequest): + client = RegionHealthChecksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2', 'health_check': 'sample3'} + request_init["health_check_resource"] = {'check_interval_sec': 1884, 'creation_timestamp': 'creation_timestamp_value', 'description': 'description_value', 'grpc_health_check': {'grpc_service_name': 'grpc_service_name_value', 'port': 453, 'port_name': 'port_name_value', 'port_specification': 'port_specification_value'}, 'healthy_threshold': 1819, 'http2_health_check': {'host': 'host_value', 'port': 453, 'port_name': 'port_name_value', 'port_specification': 'port_specification_value', 'proxy_header': 'proxy_header_value', 'request_path': 'request_path_value', 'response': 'response_value'}, 'http_health_check': {'host': 'host_value', 'port': 453, 'port_name': 'port_name_value', 'port_specification': 'port_specification_value', 'proxy_header': 'proxy_header_value', 'request_path': 'request_path_value', 'response': 'response_value'}, 'https_health_check': {'host': 'host_value', 'port': 453, 'port_name': 'port_name_value', 'port_specification': 'port_specification_value', 'proxy_header': 'proxy_header_value', 'request_path': 'request_path_value', 'response': 'response_value'}, 'id': 205, 'kind': 'kind_value', 'log_config': {'enable': True}, 'name': 'name_value', 'region': 'region_value', 'self_link': 'self_link_value', 'ssl_health_check': {'port': 453, 'port_name': 'port_name_value', 'port_specification': 'port_specification_value', 'proxy_header': 'proxy_header_value', 'request': 'request_value', 'response': 'response_value'}, 'tcp_health_check': {'port': 453, 'port_name': 'port_name_value', 'port_specification': 'port_specification_value', 'proxy_header': 'proxy_header_value', 'request': 'request_value', 'response': 'response_value'}, 'timeout_sec': 1185, 'type_': 'type__value', 'unhealthy_threshold': 2046} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.update_unary(request) + + +def test_update_unary_rest_flattened(): + client = RegionHealthChecksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'region': 'sample2', 'health_check': 'sample3'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + region='region_value', + health_check='health_check_value', + health_check_resource=compute.HealthCheck(check_interval_sec=1884), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.update_unary(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/regions/{region}/healthChecks/{health_check}" % client.transport._host, args[1]) + + +def test_update_unary_rest_flattened_error(transport: str = 'rest'): + client = RegionHealthChecksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_unary( + compute.UpdateRegionHealthCheckRequest(), + project='project_value', + region='region_value', + health_check='health_check_value', + health_check_resource=compute.HealthCheck(check_interval_sec=1884), + ) + + +def test_update_unary_rest_error(): + client = RegionHealthChecksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +def test_credentials_transport_error(): + # It is an error to provide credentials and a transport instance. + transport = transports.RegionHealthChecksRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = RegionHealthChecksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # It is an error to provide a credentials file and a transport instance. + transport = transports.RegionHealthChecksRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = RegionHealthChecksClient( + client_options={"credentials_file": "credentials.json"}, + transport=transport, + ) + + # It is an error to provide an api_key and a transport instance. + transport = transports.RegionHealthChecksRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = RegionHealthChecksClient( + client_options=options, + transport=transport, + ) + + # It is an error to provide an api_key and a credential. + options = mock.Mock() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = RegionHealthChecksClient( + client_options=options, + credentials=ga_credentials.AnonymousCredentials() + ) + + # It is an error to provide scopes and a transport instance. + transport = transports.RegionHealthChecksRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = RegionHealthChecksClient( + client_options={"scopes": ["1", "2"]}, + transport=transport, + ) + + +def test_transport_instance(): + # A client may be instantiated with a custom transport instance. + transport = transports.RegionHealthChecksRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + client = RegionHealthChecksClient(transport=transport) + assert client.transport is transport + + +@pytest.mark.parametrize("transport_class", [ + transports.RegionHealthChecksRestTransport, +]) +def test_transport_adc(transport_class): + # Test default credentials are used if not provided. + with mock.patch.object(google.auth, 'default') as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class() + adc.assert_called_once() + +@pytest.mark.parametrize("transport_name", [ + "rest", +]) +def test_transport_kind(transport_name): + transport = RegionHealthChecksClient.get_transport_class(transport_name)( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert transport.kind == transport_name + + +def test_region_health_checks_base_transport_error(): + # Passing both a credentials object and credentials_file should raise an error + with pytest.raises(core_exceptions.DuplicateCredentialArgs): + transport = transports.RegionHealthChecksTransport( + credentials=ga_credentials.AnonymousCredentials(), + credentials_file="credentials.json" + ) + + +def test_region_health_checks_base_transport(): + # Instantiate the base transport. + with mock.patch('google.cloud.compute_v1.services.region_health_checks.transports.RegionHealthChecksTransport.__init__') as Transport: + Transport.return_value = None + transport = transports.RegionHealthChecksTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Every method on the transport should just blindly + # raise NotImplementedError. + methods = ( + 'delete', + 'get', + 'insert', + 'list', + 'patch', + 'update', + ) + for method in methods: + with pytest.raises(NotImplementedError): + getattr(transport, method)(request=object()) + + with pytest.raises(NotImplementedError): + transport.close() + + # Catch all for all remaining methods and properties + remainder = [ + 'kind', + ] + for r in remainder: + with pytest.raises(NotImplementedError): + getattr(transport, r)() + + +def test_region_health_checks_base_transport_with_credentials_file(): + # Instantiate the base transport with a credentials file + with mock.patch.object(google.auth, 'load_credentials_from_file', autospec=True) as load_creds, mock.patch('google.cloud.compute_v1.services.region_health_checks.transports.RegionHealthChecksTransport._prep_wrapped_messages') as Transport: + Transport.return_value = None + load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.RegionHealthChecksTransport( + credentials_file="credentials.json", + quota_project_id="octopus", + ) + load_creds.assert_called_once_with("credentials.json", + scopes=None, + default_scopes=( + 'https://www.googleapis.com/auth/compute', + 'https://www.googleapis.com/auth/cloud-platform', +), + quota_project_id="octopus", + ) + + +def test_region_health_checks_base_transport_with_adc(): + # Test the default credentials are used if credentials and credentials_file are None. + with mock.patch.object(google.auth, 'default', autospec=True) as adc, mock.patch('google.cloud.compute_v1.services.region_health_checks.transports.RegionHealthChecksTransport._prep_wrapped_messages') as Transport: + Transport.return_value = None + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.RegionHealthChecksTransport() + adc.assert_called_once() + + +def test_region_health_checks_auth_adc(): + # If no credentials are provided, we should use ADC credentials. + with mock.patch.object(google.auth, 'default', autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + RegionHealthChecksClient() + adc.assert_called_once_with( + scopes=None, + default_scopes=( + 'https://www.googleapis.com/auth/compute', + 'https://www.googleapis.com/auth/cloud-platform', +), + quota_project_id=None, + ) + + +def test_region_health_checks_http_transport_client_cert_source_for_mtls(): + cred = ga_credentials.AnonymousCredentials() + with mock.patch("google.auth.transport.requests.AuthorizedSession.configure_mtls_channel") as mock_configure_mtls_channel: + transports.RegionHealthChecksRestTransport ( + credentials=cred, + client_cert_source_for_mtls=client_cert_source_callback + ) + mock_configure_mtls_channel.assert_called_once_with(client_cert_source_callback) + + +@pytest.mark.parametrize("transport_name", [ + "rest", +]) +def test_region_health_checks_host_no_port(transport_name): + client = RegionHealthChecksClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions(api_endpoint='compute.googleapis.com'), + transport=transport_name, + ) + assert client.transport._host == ( + 'compute.googleapis.com:443' + if transport_name in ['grpc', 'grpc_asyncio'] + else 'https://compute.googleapis.com' + ) + +@pytest.mark.parametrize("transport_name", [ + "rest", +]) +def test_region_health_checks_host_with_port(transport_name): + client = RegionHealthChecksClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions(api_endpoint='compute.googleapis.com:8000'), + transport=transport_name, + ) + assert client.transport._host == ( + 'compute.googleapis.com:8000' + if transport_name in ['grpc', 'grpc_asyncio'] + else 'https://compute.googleapis.com:8000' + ) + +@pytest.mark.parametrize("transport_name", [ + "rest", +]) +def test_region_health_checks_client_transport_session_collision(transport_name): + creds1 = ga_credentials.AnonymousCredentials() + creds2 = ga_credentials.AnonymousCredentials() + client1 = RegionHealthChecksClient( + credentials=creds1, + transport=transport_name, + ) + client2 = RegionHealthChecksClient( + credentials=creds2, + transport=transport_name, + ) + session1 = client1.transport.delete._session + session2 = client2.transport.delete._session + assert session1 != session2 + session1 = client1.transport.get._session + session2 = client2.transport.get._session + assert session1 != session2 + session1 = client1.transport.insert._session + session2 = client2.transport.insert._session + assert session1 != session2 + session1 = client1.transport.list._session + session2 = client2.transport.list._session + assert session1 != session2 + session1 = client1.transport.patch._session + session2 = client2.transport.patch._session + assert session1 != session2 + session1 = client1.transport.update._session + session2 = client2.transport.update._session + assert session1 != session2 + +def test_common_billing_account_path(): + billing_account = "squid" + expected = "billingAccounts/{billing_account}".format(billing_account=billing_account, ) + actual = RegionHealthChecksClient.common_billing_account_path(billing_account) + assert expected == actual + + +def test_parse_common_billing_account_path(): + expected = { + "billing_account": "clam", + } + path = RegionHealthChecksClient.common_billing_account_path(**expected) + + # Check that the path construction is reversible. + actual = RegionHealthChecksClient.parse_common_billing_account_path(path) + assert expected == actual + +def test_common_folder_path(): + folder = "whelk" + expected = "folders/{folder}".format(folder=folder, ) + actual = RegionHealthChecksClient.common_folder_path(folder) + assert expected == actual + + +def test_parse_common_folder_path(): + expected = { + "folder": "octopus", + } + path = RegionHealthChecksClient.common_folder_path(**expected) + + # Check that the path construction is reversible. + actual = RegionHealthChecksClient.parse_common_folder_path(path) + assert expected == actual + +def test_common_organization_path(): + organization = "oyster" + expected = "organizations/{organization}".format(organization=organization, ) + actual = RegionHealthChecksClient.common_organization_path(organization) + assert expected == actual + + +def test_parse_common_organization_path(): + expected = { + "organization": "nudibranch", + } + path = RegionHealthChecksClient.common_organization_path(**expected) + + # Check that the path construction is reversible. + actual = RegionHealthChecksClient.parse_common_organization_path(path) + assert expected == actual + +def test_common_project_path(): + project = "cuttlefish" + expected = "projects/{project}".format(project=project, ) + actual = RegionHealthChecksClient.common_project_path(project) + assert expected == actual + + +def test_parse_common_project_path(): + expected = { + "project": "mussel", + } + path = RegionHealthChecksClient.common_project_path(**expected) + + # Check that the path construction is reversible. + actual = RegionHealthChecksClient.parse_common_project_path(path) + assert expected == actual + +def test_common_location_path(): + project = "winkle" + location = "nautilus" + expected = "projects/{project}/locations/{location}".format(project=project, location=location, ) + actual = RegionHealthChecksClient.common_location_path(project, location) + assert expected == actual + + +def test_parse_common_location_path(): + expected = { + "project": "scallop", + "location": "abalone", + } + path = RegionHealthChecksClient.common_location_path(**expected) + + # Check that the path construction is reversible. + actual = RegionHealthChecksClient.parse_common_location_path(path) + assert expected == actual + + +def test_client_with_default_client_info(): + client_info = gapic_v1.client_info.ClientInfo() + + with mock.patch.object(transports.RegionHealthChecksTransport, '_prep_wrapped_messages') as prep: + client = RegionHealthChecksClient( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + with mock.patch.object(transports.RegionHealthChecksTransport, '_prep_wrapped_messages') as prep: + transport_class = RegionHealthChecksClient.get_transport_class() + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + +def test_transport_close(): + transports = { + "rest": "_session", + } + + for transport, close_name in transports.items(): + client = RegionHealthChecksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport + ) + with mock.patch.object(type(getattr(client.transport, close_name)), "close") as close: + with client: + close.assert_not_called() + close.assert_called_once() + +def test_client_ctx(): + transports = [ + 'rest', + ] + for transport in transports: + client = RegionHealthChecksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport + ) + # Test client calls underlying transport. + with mock.patch.object(type(client.transport), "close") as close: + close.assert_not_called() + with client: + pass + close.assert_called() + +@pytest.mark.parametrize("client_class,transport_class", [ + (RegionHealthChecksClient, transports.RegionHealthChecksRestTransport), +]) +def test_api_key_credentials(client_class, transport_class): + with mock.patch.object( + google.auth._default, "get_api_key_credentials", create=True + ) as get_api_key_credentials: + mock_cred = mock.Mock() + get_api_key_credentials.return_value = mock_cred + options = client_options.ClientOptions() + options.api_key = "api_key" + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=mock_cred, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) diff --git a/owl-bot-staging/v1/tests/unit/gapic/compute_v1/test_region_instance_group_managers.py b/owl-bot-staging/v1/tests/unit/gapic/compute_v1/test_region_instance_group_managers.py new file mode 100644 index 000000000..b4b7257e4 --- /dev/null +++ b/owl-bot-staging/v1/tests/unit/gapic/compute_v1/test_region_instance_group_managers.py @@ -0,0 +1,10262 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import os +# try/except added for compatibility with python < 3.8 +try: + from unittest import mock + from unittest.mock import AsyncMock # pragma: NO COVER +except ImportError: # pragma: NO COVER + import mock + +import grpc +from grpc.experimental import aio +from collections.abc import Iterable +from google.protobuf import json_format +import json +import math +import pytest +from proto.marshal.rules.dates import DurationRule, TimestampRule +from proto.marshal.rules import wrappers +from requests import Response +from requests import Request, PreparedRequest +from requests.sessions import Session +from google.protobuf import json_format + +from google.api_core import client_options +from google.api_core import exceptions as core_exceptions +from google.api_core import extended_operation # type: ignore +from google.api_core import future +from google.api_core import gapic_v1 +from google.api_core import grpc_helpers +from google.api_core import grpc_helpers_async +from google.api_core import path_template +from google.auth import credentials as ga_credentials +from google.auth.exceptions import MutualTLSChannelError +from google.cloud.compute_v1.services.region_instance_group_managers import RegionInstanceGroupManagersClient +from google.cloud.compute_v1.services.region_instance_group_managers import pagers +from google.cloud.compute_v1.services.region_instance_group_managers import transports +from google.cloud.compute_v1.types import compute +from google.oauth2 import service_account +import google.auth + + +def client_cert_source_callback(): + return b"cert bytes", b"key bytes" + + +# If default endpoint is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint(client): + return "foo.googleapis.com" if ("localhost" in client.DEFAULT_ENDPOINT) else client.DEFAULT_ENDPOINT + + +def test__get_default_mtls_endpoint(): + api_endpoint = "example.googleapis.com" + api_mtls_endpoint = "example.mtls.googleapis.com" + sandbox_endpoint = "example.sandbox.googleapis.com" + sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com" + non_googleapi = "api.example.com" + + assert RegionInstanceGroupManagersClient._get_default_mtls_endpoint(None) is None + assert RegionInstanceGroupManagersClient._get_default_mtls_endpoint(api_endpoint) == api_mtls_endpoint + assert RegionInstanceGroupManagersClient._get_default_mtls_endpoint(api_mtls_endpoint) == api_mtls_endpoint + assert RegionInstanceGroupManagersClient._get_default_mtls_endpoint(sandbox_endpoint) == sandbox_mtls_endpoint + assert RegionInstanceGroupManagersClient._get_default_mtls_endpoint(sandbox_mtls_endpoint) == sandbox_mtls_endpoint + assert RegionInstanceGroupManagersClient._get_default_mtls_endpoint(non_googleapi) == non_googleapi + + +@pytest.mark.parametrize("client_class,transport_name", [ + (RegionInstanceGroupManagersClient, "rest"), +]) +def test_region_instance_group_managers_client_from_service_account_info(client_class, transport_name): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object(service_account.Credentials, 'from_service_account_info') as factory: + factory.return_value = creds + info = {"valid": True} + client = client_class.from_service_account_info(info, transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + 'compute.googleapis.com:443' + if transport_name in ['grpc', 'grpc_asyncio'] + else + 'https://compute.googleapis.com' + ) + + +@pytest.mark.parametrize("transport_class,transport_name", [ + (transports.RegionInstanceGroupManagersRestTransport, "rest"), +]) +def test_region_instance_group_managers_client_service_account_always_use_jwt(transport_class, transport_name): + with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=True) + use_jwt.assert_called_once_with(True) + + with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=False) + use_jwt.assert_not_called() + + +@pytest.mark.parametrize("client_class,transport_name", [ + (RegionInstanceGroupManagersClient, "rest"), +]) +def test_region_instance_group_managers_client_from_service_account_file(client_class, transport_name): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object(service_account.Credentials, 'from_service_account_file') as factory: + factory.return_value = creds + client = client_class.from_service_account_file("dummy/file/path.json", transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + client = client_class.from_service_account_json("dummy/file/path.json", transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + 'compute.googleapis.com:443' + if transport_name in ['grpc', 'grpc_asyncio'] + else + 'https://compute.googleapis.com' + ) + + +def test_region_instance_group_managers_client_get_transport_class(): + transport = RegionInstanceGroupManagersClient.get_transport_class() + available_transports = [ + transports.RegionInstanceGroupManagersRestTransport, + ] + assert transport in available_transports + + transport = RegionInstanceGroupManagersClient.get_transport_class("rest") + assert transport == transports.RegionInstanceGroupManagersRestTransport + + +@pytest.mark.parametrize("client_class,transport_class,transport_name", [ + (RegionInstanceGroupManagersClient, transports.RegionInstanceGroupManagersRestTransport, "rest"), +]) +@mock.patch.object(RegionInstanceGroupManagersClient, "DEFAULT_ENDPOINT", modify_default_endpoint(RegionInstanceGroupManagersClient)) +def test_region_instance_group_managers_client_client_options(client_class, transport_class, transport_name): + # Check that if channel is provided we won't create a new one. + with mock.patch.object(RegionInstanceGroupManagersClient, 'get_transport_class') as gtc: + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ) + client = client_class(transport=transport) + gtc.assert_not_called() + + # Check that if channel is provided via str we will create a new one. + with mock.patch.object(RegionInstanceGroupManagersClient, 'get_transport_class') as gtc: + client = client_class(transport=transport_name) + gtc.assert_called() + + # Check the case api_endpoint is provided. + options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(transport=transport_name, client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_MTLS_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError): + client = client_class(transport=transport_name) + + # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): + with pytest.raises(ValueError): + client = client_class(transport=transport_name) + + # Check the case quota_project_id is provided + options = client_options.ClientOptions(quota_project_id="octopus") + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id="octopus", + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + # Check the case api_endpoint is provided + options = client_options.ClientOptions(api_audience="https://language.googleapis.com") + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience="https://language.googleapis.com" + ) + +@pytest.mark.parametrize("client_class,transport_class,transport_name,use_client_cert_env", [ + (RegionInstanceGroupManagersClient, transports.RegionInstanceGroupManagersRestTransport, "rest", "true"), + (RegionInstanceGroupManagersClient, transports.RegionInstanceGroupManagersRestTransport, "rest", "false"), +]) +@mock.patch.object(RegionInstanceGroupManagersClient, "DEFAULT_ENDPOINT", modify_default_endpoint(RegionInstanceGroupManagersClient)) +@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) +def test_region_instance_group_managers_client_mtls_env_auto(client_class, transport_class, transport_name, use_client_cert_env): + # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default + # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists. + + # Check the case client_cert_source is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): + options = client_options.ClientOptions(client_cert_source=client_cert_source_callback) + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + + if use_client_cert_env == "false": + expected_client_cert_source = None + expected_host = client.DEFAULT_ENDPOINT + else: + expected_client_cert_source = client_cert_source_callback + expected_host = client.DEFAULT_MTLS_ENDPOINT + + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case ADC client cert is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): + with mock.patch.object(transport_class, '__init__') as patched: + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): + with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=client_cert_source_callback): + if use_client_cert_env == "false": + expected_host = client.DEFAULT_ENDPOINT + expected_client_cert_source = None + else: + expected_host = client.DEFAULT_MTLS_ENDPOINT + expected_client_cert_source = client_cert_source_callback + + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case client_cert_source and ADC client cert are not provided. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): + with mock.patch.object(transport_class, '__init__') as patched: + with mock.patch("google.auth.transport.mtls.has_default_client_cert_source", return_value=False): + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize("client_class", [ + RegionInstanceGroupManagersClient +]) +@mock.patch.object(RegionInstanceGroupManagersClient, "DEFAULT_ENDPOINT", modify_default_endpoint(RegionInstanceGroupManagersClient)) +def test_region_instance_group_managers_client_get_mtls_endpoint_and_cert_source(client_class): + mock_client_cert_source = mock.Mock() + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "true". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + mock_api_endpoint = "foo" + options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(options) + assert api_endpoint == mock_api_endpoint + assert cert_source == mock_client_cert_source + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "false". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + mock_client_cert_source = mock.Mock() + mock_api_endpoint = "foo" + options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(options) + assert api_endpoint == mock_api_endpoint + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert doesn't exist. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=False): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert exists. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): + with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=mock_client_cert_source): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source == mock_client_cert_source + + +@pytest.mark.parametrize("client_class,transport_class,transport_name", [ + (RegionInstanceGroupManagersClient, transports.RegionInstanceGroupManagersRestTransport, "rest"), +]) +def test_region_instance_group_managers_client_client_options_scopes(client_class, transport_class, transport_name): + # Check the case scopes are provided. + options = client_options.ClientOptions( + scopes=["1", "2"], + ) + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=["1", "2"], + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + +@pytest.mark.parametrize("client_class,transport_class,transport_name,grpc_helpers", [ + (RegionInstanceGroupManagersClient, transports.RegionInstanceGroupManagersRestTransport, "rest", None), +]) +def test_region_instance_group_managers_client_client_options_credentials_file(client_class, transport_class, transport_name, grpc_helpers): + # Check the case credentials file is provided. + options = client_options.ClientOptions( + credentials_file="credentials.json" + ) + + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize("request_type", [ + compute.AbandonInstancesRegionInstanceGroupManagerRequest, + dict, +]) +def test_abandon_instances_rest(request_type): + client = RegionInstanceGroupManagersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2', 'instance_group_manager': 'sample3'} + request_init["region_instance_group_managers_abandon_instances_request_resource"] = {'instances': ['instances_value1', 'instances_value2']} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.abandon_instances(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, extended_operation.ExtendedOperation) + assert response.client_operation_id == 'client_operation_id_value' + assert response.creation_timestamp == 'creation_timestamp_value' + assert response.description == 'description_value' + assert response.end_time == 'end_time_value' + assert response.http_error_message == 'http_error_message_value' + assert response.http_error_status_code == 2374 + assert response.id == 205 + assert response.insert_time == 'insert_time_value' + assert response.kind == 'kind_value' + assert response.name == 'name_value' + assert response.operation_group_id == 'operation_group_id_value' + assert response.operation_type == 'operation_type_value' + assert response.progress == 885 + assert response.region == 'region_value' + assert response.self_link == 'self_link_value' + assert response.start_time == 'start_time_value' + assert response.status == compute.Operation.Status.DONE + assert response.status_message == 'status_message_value' + assert response.target_id == 947 + assert response.target_link == 'target_link_value' + assert response.user == 'user_value' + assert response.zone == 'zone_value' + + +def test_abandon_instances_rest_required_fields(request_type=compute.AbandonInstancesRegionInstanceGroupManagerRequest): + transport_class = transports.RegionInstanceGroupManagersRestTransport + + request_init = {} + request_init["instance_group_manager"] = "" + request_init["project"] = "" + request_init["region"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).abandon_instances._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["instanceGroupManager"] = 'instance_group_manager_value' + jsonified_request["project"] = 'project_value' + jsonified_request["region"] = 'region_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).abandon_instances._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "instanceGroupManager" in jsonified_request + assert jsonified_request["instanceGroupManager"] == 'instance_group_manager_value' + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "region" in jsonified_request + assert jsonified_request["region"] == 'region_value' + + client = RegionInstanceGroupManagersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.abandon_instances(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_abandon_instances_rest_unset_required_fields(): + transport = transports.RegionInstanceGroupManagersRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.abandon_instances._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("instanceGroupManager", "project", "region", "regionInstanceGroupManagersAbandonInstancesRequestResource", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_abandon_instances_rest_interceptors(null_interceptor): + transport = transports.RegionInstanceGroupManagersRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.RegionInstanceGroupManagersRestInterceptor(), + ) + client = RegionInstanceGroupManagersClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.RegionInstanceGroupManagersRestInterceptor, "post_abandon_instances") as post, \ + mock.patch.object(transports.RegionInstanceGroupManagersRestInterceptor, "pre_abandon_instances") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.AbandonInstancesRegionInstanceGroupManagerRequest.pb(compute.AbandonInstancesRegionInstanceGroupManagerRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.AbandonInstancesRegionInstanceGroupManagerRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.abandon_instances(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_abandon_instances_rest_bad_request(transport: str = 'rest', request_type=compute.AbandonInstancesRegionInstanceGroupManagerRequest): + client = RegionInstanceGroupManagersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2', 'instance_group_manager': 'sample3'} + request_init["region_instance_group_managers_abandon_instances_request_resource"] = {'instances': ['instances_value1', 'instances_value2']} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.abandon_instances(request) + + +def test_abandon_instances_rest_flattened(): + client = RegionInstanceGroupManagersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'region': 'sample2', 'instance_group_manager': 'sample3'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + region='region_value', + instance_group_manager='instance_group_manager_value', + region_instance_group_managers_abandon_instances_request_resource=compute.RegionInstanceGroupManagersAbandonInstancesRequest(instances=['instances_value']), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.abandon_instances(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/regions/{region}/instanceGroupManagers/{instance_group_manager}/abandonInstances" % client.transport._host, args[1]) + + +def test_abandon_instances_rest_flattened_error(transport: str = 'rest'): + client = RegionInstanceGroupManagersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.abandon_instances( + compute.AbandonInstancesRegionInstanceGroupManagerRequest(), + project='project_value', + region='region_value', + instance_group_manager='instance_group_manager_value', + region_instance_group_managers_abandon_instances_request_resource=compute.RegionInstanceGroupManagersAbandonInstancesRequest(instances=['instances_value']), + ) + + +def test_abandon_instances_rest_error(): + client = RegionInstanceGroupManagersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.AbandonInstancesRegionInstanceGroupManagerRequest, + dict, +]) +def test_abandon_instances_unary_rest(request_type): + client = RegionInstanceGroupManagersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2', 'instance_group_manager': 'sample3'} + request_init["region_instance_group_managers_abandon_instances_request_resource"] = {'instances': ['instances_value1', 'instances_value2']} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.abandon_instances_unary(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.Operation) + + +def test_abandon_instances_unary_rest_required_fields(request_type=compute.AbandonInstancesRegionInstanceGroupManagerRequest): + transport_class = transports.RegionInstanceGroupManagersRestTransport + + request_init = {} + request_init["instance_group_manager"] = "" + request_init["project"] = "" + request_init["region"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).abandon_instances._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["instanceGroupManager"] = 'instance_group_manager_value' + jsonified_request["project"] = 'project_value' + jsonified_request["region"] = 'region_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).abandon_instances._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "instanceGroupManager" in jsonified_request + assert jsonified_request["instanceGroupManager"] == 'instance_group_manager_value' + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "region" in jsonified_request + assert jsonified_request["region"] == 'region_value' + + client = RegionInstanceGroupManagersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.abandon_instances_unary(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_abandon_instances_unary_rest_unset_required_fields(): + transport = transports.RegionInstanceGroupManagersRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.abandon_instances._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("instanceGroupManager", "project", "region", "regionInstanceGroupManagersAbandonInstancesRequestResource", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_abandon_instances_unary_rest_interceptors(null_interceptor): + transport = transports.RegionInstanceGroupManagersRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.RegionInstanceGroupManagersRestInterceptor(), + ) + client = RegionInstanceGroupManagersClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.RegionInstanceGroupManagersRestInterceptor, "post_abandon_instances") as post, \ + mock.patch.object(transports.RegionInstanceGroupManagersRestInterceptor, "pre_abandon_instances") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.AbandonInstancesRegionInstanceGroupManagerRequest.pb(compute.AbandonInstancesRegionInstanceGroupManagerRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.AbandonInstancesRegionInstanceGroupManagerRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.abandon_instances_unary(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_abandon_instances_unary_rest_bad_request(transport: str = 'rest', request_type=compute.AbandonInstancesRegionInstanceGroupManagerRequest): + client = RegionInstanceGroupManagersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2', 'instance_group_manager': 'sample3'} + request_init["region_instance_group_managers_abandon_instances_request_resource"] = {'instances': ['instances_value1', 'instances_value2']} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.abandon_instances_unary(request) + + +def test_abandon_instances_unary_rest_flattened(): + client = RegionInstanceGroupManagersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'region': 'sample2', 'instance_group_manager': 'sample3'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + region='region_value', + instance_group_manager='instance_group_manager_value', + region_instance_group_managers_abandon_instances_request_resource=compute.RegionInstanceGroupManagersAbandonInstancesRequest(instances=['instances_value']), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.abandon_instances_unary(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/regions/{region}/instanceGroupManagers/{instance_group_manager}/abandonInstances" % client.transport._host, args[1]) + + +def test_abandon_instances_unary_rest_flattened_error(transport: str = 'rest'): + client = RegionInstanceGroupManagersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.abandon_instances_unary( + compute.AbandonInstancesRegionInstanceGroupManagerRequest(), + project='project_value', + region='region_value', + instance_group_manager='instance_group_manager_value', + region_instance_group_managers_abandon_instances_request_resource=compute.RegionInstanceGroupManagersAbandonInstancesRequest(instances=['instances_value']), + ) + + +def test_abandon_instances_unary_rest_error(): + client = RegionInstanceGroupManagersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.ApplyUpdatesToInstancesRegionInstanceGroupManagerRequest, + dict, +]) +def test_apply_updates_to_instances_rest(request_type): + client = RegionInstanceGroupManagersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2', 'instance_group_manager': 'sample3'} + request_init["region_instance_group_managers_apply_updates_request_resource"] = {'all_instances': True, 'instances': ['instances_value1', 'instances_value2'], 'minimal_action': 'minimal_action_value', 'most_disruptive_allowed_action': 'most_disruptive_allowed_action_value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.apply_updates_to_instances(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, extended_operation.ExtendedOperation) + assert response.client_operation_id == 'client_operation_id_value' + assert response.creation_timestamp == 'creation_timestamp_value' + assert response.description == 'description_value' + assert response.end_time == 'end_time_value' + assert response.http_error_message == 'http_error_message_value' + assert response.http_error_status_code == 2374 + assert response.id == 205 + assert response.insert_time == 'insert_time_value' + assert response.kind == 'kind_value' + assert response.name == 'name_value' + assert response.operation_group_id == 'operation_group_id_value' + assert response.operation_type == 'operation_type_value' + assert response.progress == 885 + assert response.region == 'region_value' + assert response.self_link == 'self_link_value' + assert response.start_time == 'start_time_value' + assert response.status == compute.Operation.Status.DONE + assert response.status_message == 'status_message_value' + assert response.target_id == 947 + assert response.target_link == 'target_link_value' + assert response.user == 'user_value' + assert response.zone == 'zone_value' + + +def test_apply_updates_to_instances_rest_required_fields(request_type=compute.ApplyUpdatesToInstancesRegionInstanceGroupManagerRequest): + transport_class = transports.RegionInstanceGroupManagersRestTransport + + request_init = {} + request_init["instance_group_manager"] = "" + request_init["project"] = "" + request_init["region"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).apply_updates_to_instances._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["instanceGroupManager"] = 'instance_group_manager_value' + jsonified_request["project"] = 'project_value' + jsonified_request["region"] = 'region_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).apply_updates_to_instances._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "instanceGroupManager" in jsonified_request + assert jsonified_request["instanceGroupManager"] == 'instance_group_manager_value' + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "region" in jsonified_request + assert jsonified_request["region"] == 'region_value' + + client = RegionInstanceGroupManagersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.apply_updates_to_instances(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_apply_updates_to_instances_rest_unset_required_fields(): + transport = transports.RegionInstanceGroupManagersRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.apply_updates_to_instances._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("instanceGroupManager", "project", "region", "regionInstanceGroupManagersApplyUpdatesRequestResource", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_apply_updates_to_instances_rest_interceptors(null_interceptor): + transport = transports.RegionInstanceGroupManagersRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.RegionInstanceGroupManagersRestInterceptor(), + ) + client = RegionInstanceGroupManagersClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.RegionInstanceGroupManagersRestInterceptor, "post_apply_updates_to_instances") as post, \ + mock.patch.object(transports.RegionInstanceGroupManagersRestInterceptor, "pre_apply_updates_to_instances") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.ApplyUpdatesToInstancesRegionInstanceGroupManagerRequest.pb(compute.ApplyUpdatesToInstancesRegionInstanceGroupManagerRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.ApplyUpdatesToInstancesRegionInstanceGroupManagerRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.apply_updates_to_instances(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_apply_updates_to_instances_rest_bad_request(transport: str = 'rest', request_type=compute.ApplyUpdatesToInstancesRegionInstanceGroupManagerRequest): + client = RegionInstanceGroupManagersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2', 'instance_group_manager': 'sample3'} + request_init["region_instance_group_managers_apply_updates_request_resource"] = {'all_instances': True, 'instances': ['instances_value1', 'instances_value2'], 'minimal_action': 'minimal_action_value', 'most_disruptive_allowed_action': 'most_disruptive_allowed_action_value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.apply_updates_to_instances(request) + + +def test_apply_updates_to_instances_rest_flattened(): + client = RegionInstanceGroupManagersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'region': 'sample2', 'instance_group_manager': 'sample3'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + region='region_value', + instance_group_manager='instance_group_manager_value', + region_instance_group_managers_apply_updates_request_resource=compute.RegionInstanceGroupManagersApplyUpdatesRequest(all_instances=True), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.apply_updates_to_instances(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/regions/{region}/instanceGroupManagers/{instance_group_manager}/applyUpdatesToInstances" % client.transport._host, args[1]) + + +def test_apply_updates_to_instances_rest_flattened_error(transport: str = 'rest'): + client = RegionInstanceGroupManagersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.apply_updates_to_instances( + compute.ApplyUpdatesToInstancesRegionInstanceGroupManagerRequest(), + project='project_value', + region='region_value', + instance_group_manager='instance_group_manager_value', + region_instance_group_managers_apply_updates_request_resource=compute.RegionInstanceGroupManagersApplyUpdatesRequest(all_instances=True), + ) + + +def test_apply_updates_to_instances_rest_error(): + client = RegionInstanceGroupManagersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.ApplyUpdatesToInstancesRegionInstanceGroupManagerRequest, + dict, +]) +def test_apply_updates_to_instances_unary_rest(request_type): + client = RegionInstanceGroupManagersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2', 'instance_group_manager': 'sample3'} + request_init["region_instance_group_managers_apply_updates_request_resource"] = {'all_instances': True, 'instances': ['instances_value1', 'instances_value2'], 'minimal_action': 'minimal_action_value', 'most_disruptive_allowed_action': 'most_disruptive_allowed_action_value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.apply_updates_to_instances_unary(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.Operation) + + +def test_apply_updates_to_instances_unary_rest_required_fields(request_type=compute.ApplyUpdatesToInstancesRegionInstanceGroupManagerRequest): + transport_class = transports.RegionInstanceGroupManagersRestTransport + + request_init = {} + request_init["instance_group_manager"] = "" + request_init["project"] = "" + request_init["region"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).apply_updates_to_instances._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["instanceGroupManager"] = 'instance_group_manager_value' + jsonified_request["project"] = 'project_value' + jsonified_request["region"] = 'region_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).apply_updates_to_instances._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "instanceGroupManager" in jsonified_request + assert jsonified_request["instanceGroupManager"] == 'instance_group_manager_value' + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "region" in jsonified_request + assert jsonified_request["region"] == 'region_value' + + client = RegionInstanceGroupManagersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.apply_updates_to_instances_unary(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_apply_updates_to_instances_unary_rest_unset_required_fields(): + transport = transports.RegionInstanceGroupManagersRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.apply_updates_to_instances._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("instanceGroupManager", "project", "region", "regionInstanceGroupManagersApplyUpdatesRequestResource", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_apply_updates_to_instances_unary_rest_interceptors(null_interceptor): + transport = transports.RegionInstanceGroupManagersRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.RegionInstanceGroupManagersRestInterceptor(), + ) + client = RegionInstanceGroupManagersClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.RegionInstanceGroupManagersRestInterceptor, "post_apply_updates_to_instances") as post, \ + mock.patch.object(transports.RegionInstanceGroupManagersRestInterceptor, "pre_apply_updates_to_instances") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.ApplyUpdatesToInstancesRegionInstanceGroupManagerRequest.pb(compute.ApplyUpdatesToInstancesRegionInstanceGroupManagerRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.ApplyUpdatesToInstancesRegionInstanceGroupManagerRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.apply_updates_to_instances_unary(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_apply_updates_to_instances_unary_rest_bad_request(transport: str = 'rest', request_type=compute.ApplyUpdatesToInstancesRegionInstanceGroupManagerRequest): + client = RegionInstanceGroupManagersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2', 'instance_group_manager': 'sample3'} + request_init["region_instance_group_managers_apply_updates_request_resource"] = {'all_instances': True, 'instances': ['instances_value1', 'instances_value2'], 'minimal_action': 'minimal_action_value', 'most_disruptive_allowed_action': 'most_disruptive_allowed_action_value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.apply_updates_to_instances_unary(request) + + +def test_apply_updates_to_instances_unary_rest_flattened(): + client = RegionInstanceGroupManagersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'region': 'sample2', 'instance_group_manager': 'sample3'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + region='region_value', + instance_group_manager='instance_group_manager_value', + region_instance_group_managers_apply_updates_request_resource=compute.RegionInstanceGroupManagersApplyUpdatesRequest(all_instances=True), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.apply_updates_to_instances_unary(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/regions/{region}/instanceGroupManagers/{instance_group_manager}/applyUpdatesToInstances" % client.transport._host, args[1]) + + +def test_apply_updates_to_instances_unary_rest_flattened_error(transport: str = 'rest'): + client = RegionInstanceGroupManagersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.apply_updates_to_instances_unary( + compute.ApplyUpdatesToInstancesRegionInstanceGroupManagerRequest(), + project='project_value', + region='region_value', + instance_group_manager='instance_group_manager_value', + region_instance_group_managers_apply_updates_request_resource=compute.RegionInstanceGroupManagersApplyUpdatesRequest(all_instances=True), + ) + + +def test_apply_updates_to_instances_unary_rest_error(): + client = RegionInstanceGroupManagersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.CreateInstancesRegionInstanceGroupManagerRequest, + dict, +]) +def test_create_instances_rest(request_type): + client = RegionInstanceGroupManagersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2', 'instance_group_manager': 'sample3'} + request_init["region_instance_group_managers_create_instances_request_resource"] = {'instances': [{'fingerprint': 'fingerprint_value', 'name': 'name_value', 'preserved_state': {'disks': {}, 'metadata': {}}, 'status': 'status_value'}]} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.create_instances(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, extended_operation.ExtendedOperation) + assert response.client_operation_id == 'client_operation_id_value' + assert response.creation_timestamp == 'creation_timestamp_value' + assert response.description == 'description_value' + assert response.end_time == 'end_time_value' + assert response.http_error_message == 'http_error_message_value' + assert response.http_error_status_code == 2374 + assert response.id == 205 + assert response.insert_time == 'insert_time_value' + assert response.kind == 'kind_value' + assert response.name == 'name_value' + assert response.operation_group_id == 'operation_group_id_value' + assert response.operation_type == 'operation_type_value' + assert response.progress == 885 + assert response.region == 'region_value' + assert response.self_link == 'self_link_value' + assert response.start_time == 'start_time_value' + assert response.status == compute.Operation.Status.DONE + assert response.status_message == 'status_message_value' + assert response.target_id == 947 + assert response.target_link == 'target_link_value' + assert response.user == 'user_value' + assert response.zone == 'zone_value' + + +def test_create_instances_rest_required_fields(request_type=compute.CreateInstancesRegionInstanceGroupManagerRequest): + transport_class = transports.RegionInstanceGroupManagersRestTransport + + request_init = {} + request_init["instance_group_manager"] = "" + request_init["project"] = "" + request_init["region"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_instances._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["instanceGroupManager"] = 'instance_group_manager_value' + jsonified_request["project"] = 'project_value' + jsonified_request["region"] = 'region_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_instances._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "instanceGroupManager" in jsonified_request + assert jsonified_request["instanceGroupManager"] == 'instance_group_manager_value' + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "region" in jsonified_request + assert jsonified_request["region"] == 'region_value' + + client = RegionInstanceGroupManagersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.create_instances(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_create_instances_rest_unset_required_fields(): + transport = transports.RegionInstanceGroupManagersRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.create_instances._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("instanceGroupManager", "project", "region", "regionInstanceGroupManagersCreateInstancesRequestResource", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_create_instances_rest_interceptors(null_interceptor): + transport = transports.RegionInstanceGroupManagersRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.RegionInstanceGroupManagersRestInterceptor(), + ) + client = RegionInstanceGroupManagersClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.RegionInstanceGroupManagersRestInterceptor, "post_create_instances") as post, \ + mock.patch.object(transports.RegionInstanceGroupManagersRestInterceptor, "pre_create_instances") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.CreateInstancesRegionInstanceGroupManagerRequest.pb(compute.CreateInstancesRegionInstanceGroupManagerRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.CreateInstancesRegionInstanceGroupManagerRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.create_instances(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_create_instances_rest_bad_request(transport: str = 'rest', request_type=compute.CreateInstancesRegionInstanceGroupManagerRequest): + client = RegionInstanceGroupManagersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2', 'instance_group_manager': 'sample3'} + request_init["region_instance_group_managers_create_instances_request_resource"] = {'instances': [{'fingerprint': 'fingerprint_value', 'name': 'name_value', 'preserved_state': {'disks': {}, 'metadata': {}}, 'status': 'status_value'}]} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.create_instances(request) + + +def test_create_instances_rest_flattened(): + client = RegionInstanceGroupManagersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'region': 'sample2', 'instance_group_manager': 'sample3'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + region='region_value', + instance_group_manager='instance_group_manager_value', + region_instance_group_managers_create_instances_request_resource=compute.RegionInstanceGroupManagersCreateInstancesRequest(instances=[compute.PerInstanceConfig(fingerprint='fingerprint_value')]), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.create_instances(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/regions/{region}/instanceGroupManagers/{instance_group_manager}/createInstances" % client.transport._host, args[1]) + + +def test_create_instances_rest_flattened_error(transport: str = 'rest'): + client = RegionInstanceGroupManagersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_instances( + compute.CreateInstancesRegionInstanceGroupManagerRequest(), + project='project_value', + region='region_value', + instance_group_manager='instance_group_manager_value', + region_instance_group_managers_create_instances_request_resource=compute.RegionInstanceGroupManagersCreateInstancesRequest(instances=[compute.PerInstanceConfig(fingerprint='fingerprint_value')]), + ) + + +def test_create_instances_rest_error(): + client = RegionInstanceGroupManagersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.CreateInstancesRegionInstanceGroupManagerRequest, + dict, +]) +def test_create_instances_unary_rest(request_type): + client = RegionInstanceGroupManagersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2', 'instance_group_manager': 'sample3'} + request_init["region_instance_group_managers_create_instances_request_resource"] = {'instances': [{'fingerprint': 'fingerprint_value', 'name': 'name_value', 'preserved_state': {'disks': {}, 'metadata': {}}, 'status': 'status_value'}]} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.create_instances_unary(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.Operation) + + +def test_create_instances_unary_rest_required_fields(request_type=compute.CreateInstancesRegionInstanceGroupManagerRequest): + transport_class = transports.RegionInstanceGroupManagersRestTransport + + request_init = {} + request_init["instance_group_manager"] = "" + request_init["project"] = "" + request_init["region"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_instances._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["instanceGroupManager"] = 'instance_group_manager_value' + jsonified_request["project"] = 'project_value' + jsonified_request["region"] = 'region_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_instances._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "instanceGroupManager" in jsonified_request + assert jsonified_request["instanceGroupManager"] == 'instance_group_manager_value' + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "region" in jsonified_request + assert jsonified_request["region"] == 'region_value' + + client = RegionInstanceGroupManagersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.create_instances_unary(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_create_instances_unary_rest_unset_required_fields(): + transport = transports.RegionInstanceGroupManagersRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.create_instances._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("instanceGroupManager", "project", "region", "regionInstanceGroupManagersCreateInstancesRequestResource", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_create_instances_unary_rest_interceptors(null_interceptor): + transport = transports.RegionInstanceGroupManagersRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.RegionInstanceGroupManagersRestInterceptor(), + ) + client = RegionInstanceGroupManagersClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.RegionInstanceGroupManagersRestInterceptor, "post_create_instances") as post, \ + mock.patch.object(transports.RegionInstanceGroupManagersRestInterceptor, "pre_create_instances") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.CreateInstancesRegionInstanceGroupManagerRequest.pb(compute.CreateInstancesRegionInstanceGroupManagerRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.CreateInstancesRegionInstanceGroupManagerRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.create_instances_unary(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_create_instances_unary_rest_bad_request(transport: str = 'rest', request_type=compute.CreateInstancesRegionInstanceGroupManagerRequest): + client = RegionInstanceGroupManagersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2', 'instance_group_manager': 'sample3'} + request_init["region_instance_group_managers_create_instances_request_resource"] = {'instances': [{'fingerprint': 'fingerprint_value', 'name': 'name_value', 'preserved_state': {'disks': {}, 'metadata': {}}, 'status': 'status_value'}]} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.create_instances_unary(request) + + +def test_create_instances_unary_rest_flattened(): + client = RegionInstanceGroupManagersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'region': 'sample2', 'instance_group_manager': 'sample3'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + region='region_value', + instance_group_manager='instance_group_manager_value', + region_instance_group_managers_create_instances_request_resource=compute.RegionInstanceGroupManagersCreateInstancesRequest(instances=[compute.PerInstanceConfig(fingerprint='fingerprint_value')]), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.create_instances_unary(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/regions/{region}/instanceGroupManagers/{instance_group_manager}/createInstances" % client.transport._host, args[1]) + + +def test_create_instances_unary_rest_flattened_error(transport: str = 'rest'): + client = RegionInstanceGroupManagersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_instances_unary( + compute.CreateInstancesRegionInstanceGroupManagerRequest(), + project='project_value', + region='region_value', + instance_group_manager='instance_group_manager_value', + region_instance_group_managers_create_instances_request_resource=compute.RegionInstanceGroupManagersCreateInstancesRequest(instances=[compute.PerInstanceConfig(fingerprint='fingerprint_value')]), + ) + + +def test_create_instances_unary_rest_error(): + client = RegionInstanceGroupManagersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.DeleteRegionInstanceGroupManagerRequest, + dict, +]) +def test_delete_rest(request_type): + client = RegionInstanceGroupManagersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2', 'instance_group_manager': 'sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.delete(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, extended_operation.ExtendedOperation) + assert response.client_operation_id == 'client_operation_id_value' + assert response.creation_timestamp == 'creation_timestamp_value' + assert response.description == 'description_value' + assert response.end_time == 'end_time_value' + assert response.http_error_message == 'http_error_message_value' + assert response.http_error_status_code == 2374 + assert response.id == 205 + assert response.insert_time == 'insert_time_value' + assert response.kind == 'kind_value' + assert response.name == 'name_value' + assert response.operation_group_id == 'operation_group_id_value' + assert response.operation_type == 'operation_type_value' + assert response.progress == 885 + assert response.region == 'region_value' + assert response.self_link == 'self_link_value' + assert response.start_time == 'start_time_value' + assert response.status == compute.Operation.Status.DONE + assert response.status_message == 'status_message_value' + assert response.target_id == 947 + assert response.target_link == 'target_link_value' + assert response.user == 'user_value' + assert response.zone == 'zone_value' + + +def test_delete_rest_required_fields(request_type=compute.DeleteRegionInstanceGroupManagerRequest): + transport_class = transports.RegionInstanceGroupManagersRestTransport + + request_init = {} + request_init["instance_group_manager"] = "" + request_init["project"] = "" + request_init["region"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["instanceGroupManager"] = 'instance_group_manager_value' + jsonified_request["project"] = 'project_value' + jsonified_request["region"] = 'region_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "instanceGroupManager" in jsonified_request + assert jsonified_request["instanceGroupManager"] == 'instance_group_manager_value' + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "region" in jsonified_request + assert jsonified_request["region"] == 'region_value' + + client = RegionInstanceGroupManagersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "delete", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.delete(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_delete_rest_unset_required_fields(): + transport = transports.RegionInstanceGroupManagersRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.delete._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("instanceGroupManager", "project", "region", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_rest_interceptors(null_interceptor): + transport = transports.RegionInstanceGroupManagersRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.RegionInstanceGroupManagersRestInterceptor(), + ) + client = RegionInstanceGroupManagersClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.RegionInstanceGroupManagersRestInterceptor, "post_delete") as post, \ + mock.patch.object(transports.RegionInstanceGroupManagersRestInterceptor, "pre_delete") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.DeleteRegionInstanceGroupManagerRequest.pb(compute.DeleteRegionInstanceGroupManagerRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.DeleteRegionInstanceGroupManagerRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.delete(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_delete_rest_bad_request(transport: str = 'rest', request_type=compute.DeleteRegionInstanceGroupManagerRequest): + client = RegionInstanceGroupManagersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2', 'instance_group_manager': 'sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.delete(request) + + +def test_delete_rest_flattened(): + client = RegionInstanceGroupManagersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'region': 'sample2', 'instance_group_manager': 'sample3'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + region='region_value', + instance_group_manager='instance_group_manager_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.delete(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/regions/{region}/instanceGroupManagers/{instance_group_manager}" % client.transport._host, args[1]) + + +def test_delete_rest_flattened_error(transport: str = 'rest'): + client = RegionInstanceGroupManagersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete( + compute.DeleteRegionInstanceGroupManagerRequest(), + project='project_value', + region='region_value', + instance_group_manager='instance_group_manager_value', + ) + + +def test_delete_rest_error(): + client = RegionInstanceGroupManagersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.DeleteRegionInstanceGroupManagerRequest, + dict, +]) +def test_delete_unary_rest(request_type): + client = RegionInstanceGroupManagersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2', 'instance_group_manager': 'sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.delete_unary(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.Operation) + + +def test_delete_unary_rest_required_fields(request_type=compute.DeleteRegionInstanceGroupManagerRequest): + transport_class = transports.RegionInstanceGroupManagersRestTransport + + request_init = {} + request_init["instance_group_manager"] = "" + request_init["project"] = "" + request_init["region"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["instanceGroupManager"] = 'instance_group_manager_value' + jsonified_request["project"] = 'project_value' + jsonified_request["region"] = 'region_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "instanceGroupManager" in jsonified_request + assert jsonified_request["instanceGroupManager"] == 'instance_group_manager_value' + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "region" in jsonified_request + assert jsonified_request["region"] == 'region_value' + + client = RegionInstanceGroupManagersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "delete", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.delete_unary(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_delete_unary_rest_unset_required_fields(): + transport = transports.RegionInstanceGroupManagersRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.delete._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("instanceGroupManager", "project", "region", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_unary_rest_interceptors(null_interceptor): + transport = transports.RegionInstanceGroupManagersRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.RegionInstanceGroupManagersRestInterceptor(), + ) + client = RegionInstanceGroupManagersClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.RegionInstanceGroupManagersRestInterceptor, "post_delete") as post, \ + mock.patch.object(transports.RegionInstanceGroupManagersRestInterceptor, "pre_delete") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.DeleteRegionInstanceGroupManagerRequest.pb(compute.DeleteRegionInstanceGroupManagerRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.DeleteRegionInstanceGroupManagerRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.delete_unary(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_delete_unary_rest_bad_request(transport: str = 'rest', request_type=compute.DeleteRegionInstanceGroupManagerRequest): + client = RegionInstanceGroupManagersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2', 'instance_group_manager': 'sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.delete_unary(request) + + +def test_delete_unary_rest_flattened(): + client = RegionInstanceGroupManagersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'region': 'sample2', 'instance_group_manager': 'sample3'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + region='region_value', + instance_group_manager='instance_group_manager_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.delete_unary(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/regions/{region}/instanceGroupManagers/{instance_group_manager}" % client.transport._host, args[1]) + + +def test_delete_unary_rest_flattened_error(transport: str = 'rest'): + client = RegionInstanceGroupManagersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_unary( + compute.DeleteRegionInstanceGroupManagerRequest(), + project='project_value', + region='region_value', + instance_group_manager='instance_group_manager_value', + ) + + +def test_delete_unary_rest_error(): + client = RegionInstanceGroupManagersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.DeleteInstancesRegionInstanceGroupManagerRequest, + dict, +]) +def test_delete_instances_rest(request_type): + client = RegionInstanceGroupManagersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2', 'instance_group_manager': 'sample3'} + request_init["region_instance_group_managers_delete_instances_request_resource"] = {'instances': ['instances_value1', 'instances_value2'], 'skip_instances_on_validation_error': True} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.delete_instances(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, extended_operation.ExtendedOperation) + assert response.client_operation_id == 'client_operation_id_value' + assert response.creation_timestamp == 'creation_timestamp_value' + assert response.description == 'description_value' + assert response.end_time == 'end_time_value' + assert response.http_error_message == 'http_error_message_value' + assert response.http_error_status_code == 2374 + assert response.id == 205 + assert response.insert_time == 'insert_time_value' + assert response.kind == 'kind_value' + assert response.name == 'name_value' + assert response.operation_group_id == 'operation_group_id_value' + assert response.operation_type == 'operation_type_value' + assert response.progress == 885 + assert response.region == 'region_value' + assert response.self_link == 'self_link_value' + assert response.start_time == 'start_time_value' + assert response.status == compute.Operation.Status.DONE + assert response.status_message == 'status_message_value' + assert response.target_id == 947 + assert response.target_link == 'target_link_value' + assert response.user == 'user_value' + assert response.zone == 'zone_value' + + +def test_delete_instances_rest_required_fields(request_type=compute.DeleteInstancesRegionInstanceGroupManagerRequest): + transport_class = transports.RegionInstanceGroupManagersRestTransport + + request_init = {} + request_init["instance_group_manager"] = "" + request_init["project"] = "" + request_init["region"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete_instances._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["instanceGroupManager"] = 'instance_group_manager_value' + jsonified_request["project"] = 'project_value' + jsonified_request["region"] = 'region_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete_instances._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "instanceGroupManager" in jsonified_request + assert jsonified_request["instanceGroupManager"] == 'instance_group_manager_value' + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "region" in jsonified_request + assert jsonified_request["region"] == 'region_value' + + client = RegionInstanceGroupManagersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.delete_instances(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_delete_instances_rest_unset_required_fields(): + transport = transports.RegionInstanceGroupManagersRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.delete_instances._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("instanceGroupManager", "project", "region", "regionInstanceGroupManagersDeleteInstancesRequestResource", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_instances_rest_interceptors(null_interceptor): + transport = transports.RegionInstanceGroupManagersRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.RegionInstanceGroupManagersRestInterceptor(), + ) + client = RegionInstanceGroupManagersClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.RegionInstanceGroupManagersRestInterceptor, "post_delete_instances") as post, \ + mock.patch.object(transports.RegionInstanceGroupManagersRestInterceptor, "pre_delete_instances") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.DeleteInstancesRegionInstanceGroupManagerRequest.pb(compute.DeleteInstancesRegionInstanceGroupManagerRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.DeleteInstancesRegionInstanceGroupManagerRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.delete_instances(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_delete_instances_rest_bad_request(transport: str = 'rest', request_type=compute.DeleteInstancesRegionInstanceGroupManagerRequest): + client = RegionInstanceGroupManagersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2', 'instance_group_manager': 'sample3'} + request_init["region_instance_group_managers_delete_instances_request_resource"] = {'instances': ['instances_value1', 'instances_value2'], 'skip_instances_on_validation_error': True} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.delete_instances(request) + + +def test_delete_instances_rest_flattened(): + client = RegionInstanceGroupManagersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'region': 'sample2', 'instance_group_manager': 'sample3'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + region='region_value', + instance_group_manager='instance_group_manager_value', + region_instance_group_managers_delete_instances_request_resource=compute.RegionInstanceGroupManagersDeleteInstancesRequest(instances=['instances_value']), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.delete_instances(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/regions/{region}/instanceGroupManagers/{instance_group_manager}/deleteInstances" % client.transport._host, args[1]) + + +def test_delete_instances_rest_flattened_error(transport: str = 'rest'): + client = RegionInstanceGroupManagersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_instances( + compute.DeleteInstancesRegionInstanceGroupManagerRequest(), + project='project_value', + region='region_value', + instance_group_manager='instance_group_manager_value', + region_instance_group_managers_delete_instances_request_resource=compute.RegionInstanceGroupManagersDeleteInstancesRequest(instances=['instances_value']), + ) + + +def test_delete_instances_rest_error(): + client = RegionInstanceGroupManagersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.DeleteInstancesRegionInstanceGroupManagerRequest, + dict, +]) +def test_delete_instances_unary_rest(request_type): + client = RegionInstanceGroupManagersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2', 'instance_group_manager': 'sample3'} + request_init["region_instance_group_managers_delete_instances_request_resource"] = {'instances': ['instances_value1', 'instances_value2'], 'skip_instances_on_validation_error': True} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.delete_instances_unary(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.Operation) + + +def test_delete_instances_unary_rest_required_fields(request_type=compute.DeleteInstancesRegionInstanceGroupManagerRequest): + transport_class = transports.RegionInstanceGroupManagersRestTransport + + request_init = {} + request_init["instance_group_manager"] = "" + request_init["project"] = "" + request_init["region"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete_instances._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["instanceGroupManager"] = 'instance_group_manager_value' + jsonified_request["project"] = 'project_value' + jsonified_request["region"] = 'region_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete_instances._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "instanceGroupManager" in jsonified_request + assert jsonified_request["instanceGroupManager"] == 'instance_group_manager_value' + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "region" in jsonified_request + assert jsonified_request["region"] == 'region_value' + + client = RegionInstanceGroupManagersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.delete_instances_unary(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_delete_instances_unary_rest_unset_required_fields(): + transport = transports.RegionInstanceGroupManagersRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.delete_instances._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("instanceGroupManager", "project", "region", "regionInstanceGroupManagersDeleteInstancesRequestResource", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_instances_unary_rest_interceptors(null_interceptor): + transport = transports.RegionInstanceGroupManagersRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.RegionInstanceGroupManagersRestInterceptor(), + ) + client = RegionInstanceGroupManagersClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.RegionInstanceGroupManagersRestInterceptor, "post_delete_instances") as post, \ + mock.patch.object(transports.RegionInstanceGroupManagersRestInterceptor, "pre_delete_instances") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.DeleteInstancesRegionInstanceGroupManagerRequest.pb(compute.DeleteInstancesRegionInstanceGroupManagerRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.DeleteInstancesRegionInstanceGroupManagerRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.delete_instances_unary(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_delete_instances_unary_rest_bad_request(transport: str = 'rest', request_type=compute.DeleteInstancesRegionInstanceGroupManagerRequest): + client = RegionInstanceGroupManagersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2', 'instance_group_manager': 'sample3'} + request_init["region_instance_group_managers_delete_instances_request_resource"] = {'instances': ['instances_value1', 'instances_value2'], 'skip_instances_on_validation_error': True} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.delete_instances_unary(request) + + +def test_delete_instances_unary_rest_flattened(): + client = RegionInstanceGroupManagersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'region': 'sample2', 'instance_group_manager': 'sample3'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + region='region_value', + instance_group_manager='instance_group_manager_value', + region_instance_group_managers_delete_instances_request_resource=compute.RegionInstanceGroupManagersDeleteInstancesRequest(instances=['instances_value']), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.delete_instances_unary(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/regions/{region}/instanceGroupManagers/{instance_group_manager}/deleteInstances" % client.transport._host, args[1]) + + +def test_delete_instances_unary_rest_flattened_error(transport: str = 'rest'): + client = RegionInstanceGroupManagersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_instances_unary( + compute.DeleteInstancesRegionInstanceGroupManagerRequest(), + project='project_value', + region='region_value', + instance_group_manager='instance_group_manager_value', + region_instance_group_managers_delete_instances_request_resource=compute.RegionInstanceGroupManagersDeleteInstancesRequest(instances=['instances_value']), + ) + + +def test_delete_instances_unary_rest_error(): + client = RegionInstanceGroupManagersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.DeletePerInstanceConfigsRegionInstanceGroupManagerRequest, + dict, +]) +def test_delete_per_instance_configs_rest(request_type): + client = RegionInstanceGroupManagersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2', 'instance_group_manager': 'sample3'} + request_init["region_instance_group_manager_delete_instance_config_req_resource"] = {'names': ['names_value1', 'names_value2']} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.delete_per_instance_configs(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, extended_operation.ExtendedOperation) + assert response.client_operation_id == 'client_operation_id_value' + assert response.creation_timestamp == 'creation_timestamp_value' + assert response.description == 'description_value' + assert response.end_time == 'end_time_value' + assert response.http_error_message == 'http_error_message_value' + assert response.http_error_status_code == 2374 + assert response.id == 205 + assert response.insert_time == 'insert_time_value' + assert response.kind == 'kind_value' + assert response.name == 'name_value' + assert response.operation_group_id == 'operation_group_id_value' + assert response.operation_type == 'operation_type_value' + assert response.progress == 885 + assert response.region == 'region_value' + assert response.self_link == 'self_link_value' + assert response.start_time == 'start_time_value' + assert response.status == compute.Operation.Status.DONE + assert response.status_message == 'status_message_value' + assert response.target_id == 947 + assert response.target_link == 'target_link_value' + assert response.user == 'user_value' + assert response.zone == 'zone_value' + + +def test_delete_per_instance_configs_rest_required_fields(request_type=compute.DeletePerInstanceConfigsRegionInstanceGroupManagerRequest): + transport_class = transports.RegionInstanceGroupManagersRestTransport + + request_init = {} + request_init["instance_group_manager"] = "" + request_init["project"] = "" + request_init["region"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete_per_instance_configs._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["instanceGroupManager"] = 'instance_group_manager_value' + jsonified_request["project"] = 'project_value' + jsonified_request["region"] = 'region_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete_per_instance_configs._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "instanceGroupManager" in jsonified_request + assert jsonified_request["instanceGroupManager"] == 'instance_group_manager_value' + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "region" in jsonified_request + assert jsonified_request["region"] == 'region_value' + + client = RegionInstanceGroupManagersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.delete_per_instance_configs(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_delete_per_instance_configs_rest_unset_required_fields(): + transport = transports.RegionInstanceGroupManagersRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.delete_per_instance_configs._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("instanceGroupManager", "project", "region", "regionInstanceGroupManagerDeleteInstanceConfigReqResource", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_per_instance_configs_rest_interceptors(null_interceptor): + transport = transports.RegionInstanceGroupManagersRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.RegionInstanceGroupManagersRestInterceptor(), + ) + client = RegionInstanceGroupManagersClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.RegionInstanceGroupManagersRestInterceptor, "post_delete_per_instance_configs") as post, \ + mock.patch.object(transports.RegionInstanceGroupManagersRestInterceptor, "pre_delete_per_instance_configs") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.DeletePerInstanceConfigsRegionInstanceGroupManagerRequest.pb(compute.DeletePerInstanceConfigsRegionInstanceGroupManagerRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.DeletePerInstanceConfigsRegionInstanceGroupManagerRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.delete_per_instance_configs(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_delete_per_instance_configs_rest_bad_request(transport: str = 'rest', request_type=compute.DeletePerInstanceConfigsRegionInstanceGroupManagerRequest): + client = RegionInstanceGroupManagersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2', 'instance_group_manager': 'sample3'} + request_init["region_instance_group_manager_delete_instance_config_req_resource"] = {'names': ['names_value1', 'names_value2']} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.delete_per_instance_configs(request) + + +def test_delete_per_instance_configs_rest_flattened(): + client = RegionInstanceGroupManagersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'region': 'sample2', 'instance_group_manager': 'sample3'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + region='region_value', + instance_group_manager='instance_group_manager_value', + region_instance_group_manager_delete_instance_config_req_resource=compute.RegionInstanceGroupManagerDeleteInstanceConfigReq(names=['names_value']), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.delete_per_instance_configs(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/regions/{region}/instanceGroupManagers/{instance_group_manager}/deletePerInstanceConfigs" % client.transport._host, args[1]) + + +def test_delete_per_instance_configs_rest_flattened_error(transport: str = 'rest'): + client = RegionInstanceGroupManagersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_per_instance_configs( + compute.DeletePerInstanceConfigsRegionInstanceGroupManagerRequest(), + project='project_value', + region='region_value', + instance_group_manager='instance_group_manager_value', + region_instance_group_manager_delete_instance_config_req_resource=compute.RegionInstanceGroupManagerDeleteInstanceConfigReq(names=['names_value']), + ) + + +def test_delete_per_instance_configs_rest_error(): + client = RegionInstanceGroupManagersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.DeletePerInstanceConfigsRegionInstanceGroupManagerRequest, + dict, +]) +def test_delete_per_instance_configs_unary_rest(request_type): + client = RegionInstanceGroupManagersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2', 'instance_group_manager': 'sample3'} + request_init["region_instance_group_manager_delete_instance_config_req_resource"] = {'names': ['names_value1', 'names_value2']} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.delete_per_instance_configs_unary(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.Operation) + + +def test_delete_per_instance_configs_unary_rest_required_fields(request_type=compute.DeletePerInstanceConfigsRegionInstanceGroupManagerRequest): + transport_class = transports.RegionInstanceGroupManagersRestTransport + + request_init = {} + request_init["instance_group_manager"] = "" + request_init["project"] = "" + request_init["region"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete_per_instance_configs._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["instanceGroupManager"] = 'instance_group_manager_value' + jsonified_request["project"] = 'project_value' + jsonified_request["region"] = 'region_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete_per_instance_configs._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "instanceGroupManager" in jsonified_request + assert jsonified_request["instanceGroupManager"] == 'instance_group_manager_value' + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "region" in jsonified_request + assert jsonified_request["region"] == 'region_value' + + client = RegionInstanceGroupManagersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.delete_per_instance_configs_unary(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_delete_per_instance_configs_unary_rest_unset_required_fields(): + transport = transports.RegionInstanceGroupManagersRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.delete_per_instance_configs._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("instanceGroupManager", "project", "region", "regionInstanceGroupManagerDeleteInstanceConfigReqResource", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_per_instance_configs_unary_rest_interceptors(null_interceptor): + transport = transports.RegionInstanceGroupManagersRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.RegionInstanceGroupManagersRestInterceptor(), + ) + client = RegionInstanceGroupManagersClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.RegionInstanceGroupManagersRestInterceptor, "post_delete_per_instance_configs") as post, \ + mock.patch.object(transports.RegionInstanceGroupManagersRestInterceptor, "pre_delete_per_instance_configs") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.DeletePerInstanceConfigsRegionInstanceGroupManagerRequest.pb(compute.DeletePerInstanceConfigsRegionInstanceGroupManagerRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.DeletePerInstanceConfigsRegionInstanceGroupManagerRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.delete_per_instance_configs_unary(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_delete_per_instance_configs_unary_rest_bad_request(transport: str = 'rest', request_type=compute.DeletePerInstanceConfigsRegionInstanceGroupManagerRequest): + client = RegionInstanceGroupManagersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2', 'instance_group_manager': 'sample3'} + request_init["region_instance_group_manager_delete_instance_config_req_resource"] = {'names': ['names_value1', 'names_value2']} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.delete_per_instance_configs_unary(request) + + +def test_delete_per_instance_configs_unary_rest_flattened(): + client = RegionInstanceGroupManagersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'region': 'sample2', 'instance_group_manager': 'sample3'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + region='region_value', + instance_group_manager='instance_group_manager_value', + region_instance_group_manager_delete_instance_config_req_resource=compute.RegionInstanceGroupManagerDeleteInstanceConfigReq(names=['names_value']), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.delete_per_instance_configs_unary(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/regions/{region}/instanceGroupManagers/{instance_group_manager}/deletePerInstanceConfigs" % client.transport._host, args[1]) + + +def test_delete_per_instance_configs_unary_rest_flattened_error(transport: str = 'rest'): + client = RegionInstanceGroupManagersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_per_instance_configs_unary( + compute.DeletePerInstanceConfigsRegionInstanceGroupManagerRequest(), + project='project_value', + region='region_value', + instance_group_manager='instance_group_manager_value', + region_instance_group_manager_delete_instance_config_req_resource=compute.RegionInstanceGroupManagerDeleteInstanceConfigReq(names=['names_value']), + ) + + +def test_delete_per_instance_configs_unary_rest_error(): + client = RegionInstanceGroupManagersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.GetRegionInstanceGroupManagerRequest, + dict, +]) +def test_get_rest(request_type): + client = RegionInstanceGroupManagersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2', 'instance_group_manager': 'sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.InstanceGroupManager( + base_instance_name='base_instance_name_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + fingerprint='fingerprint_value', + id=205, + instance_group='instance_group_value', + instance_template='instance_template_value', + kind='kind_value', + list_managed_instances_results='list_managed_instances_results_value', + name='name_value', + region='region_value', + self_link='self_link_value', + target_pools=['target_pools_value'], + target_size=1185, + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.InstanceGroupManager.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.get(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.InstanceGroupManager) + assert response.base_instance_name == 'base_instance_name_value' + assert response.creation_timestamp == 'creation_timestamp_value' + assert response.description == 'description_value' + assert response.fingerprint == 'fingerprint_value' + assert response.id == 205 + assert response.instance_group == 'instance_group_value' + assert response.instance_template == 'instance_template_value' + assert response.kind == 'kind_value' + assert response.list_managed_instances_results == 'list_managed_instances_results_value' + assert response.name == 'name_value' + assert response.region == 'region_value' + assert response.self_link == 'self_link_value' + assert response.target_pools == ['target_pools_value'] + assert response.target_size == 1185 + assert response.zone == 'zone_value' + + +def test_get_rest_required_fields(request_type=compute.GetRegionInstanceGroupManagerRequest): + transport_class = transports.RegionInstanceGroupManagersRestTransport + + request_init = {} + request_init["instance_group_manager"] = "" + request_init["project"] = "" + request_init["region"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["instanceGroupManager"] = 'instance_group_manager_value' + jsonified_request["project"] = 'project_value' + jsonified_request["region"] = 'region_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "instanceGroupManager" in jsonified_request + assert jsonified_request["instanceGroupManager"] == 'instance_group_manager_value' + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "region" in jsonified_request + assert jsonified_request["region"] == 'region_value' + + client = RegionInstanceGroupManagersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.InstanceGroupManager() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "get", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.InstanceGroupManager.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.get(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_get_rest_unset_required_fields(): + transport = transports.RegionInstanceGroupManagersRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.get._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("instanceGroupManager", "project", "region", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_rest_interceptors(null_interceptor): + transport = transports.RegionInstanceGroupManagersRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.RegionInstanceGroupManagersRestInterceptor(), + ) + client = RegionInstanceGroupManagersClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.RegionInstanceGroupManagersRestInterceptor, "post_get") as post, \ + mock.patch.object(transports.RegionInstanceGroupManagersRestInterceptor, "pre_get") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.GetRegionInstanceGroupManagerRequest.pb(compute.GetRegionInstanceGroupManagerRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.InstanceGroupManager.to_json(compute.InstanceGroupManager()) + + request = compute.GetRegionInstanceGroupManagerRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.InstanceGroupManager() + + client.get(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_get_rest_bad_request(transport: str = 'rest', request_type=compute.GetRegionInstanceGroupManagerRequest): + client = RegionInstanceGroupManagersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2', 'instance_group_manager': 'sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get(request) + + +def test_get_rest_flattened(): + client = RegionInstanceGroupManagersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.InstanceGroupManager() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'region': 'sample2', 'instance_group_manager': 'sample3'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + region='region_value', + instance_group_manager='instance_group_manager_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.InstanceGroupManager.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.get(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/regions/{region}/instanceGroupManagers/{instance_group_manager}" % client.transport._host, args[1]) + + +def test_get_rest_flattened_error(transport: str = 'rest'): + client = RegionInstanceGroupManagersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get( + compute.GetRegionInstanceGroupManagerRequest(), + project='project_value', + region='region_value', + instance_group_manager='instance_group_manager_value', + ) + + +def test_get_rest_error(): + client = RegionInstanceGroupManagersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.InsertRegionInstanceGroupManagerRequest, + dict, +]) +def test_insert_rest(request_type): + client = RegionInstanceGroupManagersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2'} + request_init["instance_group_manager_resource"] = {'auto_healing_policies': [{'health_check': 'health_check_value', 'initial_delay_sec': 1778}], 'base_instance_name': 'base_instance_name_value', 'creation_timestamp': 'creation_timestamp_value', 'current_actions': {'abandoning': 1041, 'creating': 845, 'creating_without_retries': 2589, 'deleting': 844, 'none': 432, 'recreating': 1060, 'refreshing': 1069, 'restarting': 1091, 'resuming': 874, 'starting': 876, 'stopping': 884, 'suspending': 1088, 'verifying': 979}, 'description': 'description_value', 'distribution_policy': {'target_shape': 'target_shape_value', 'zones': [{'zone': 'zone_value'}]}, 'fingerprint': 'fingerprint_value', 'id': 205, 'instance_group': 'instance_group_value', 'instance_lifecycle_policy': {'force_update_on_repair': 'force_update_on_repair_value'}, 'instance_template': 'instance_template_value', 'kind': 'kind_value', 'list_managed_instances_results': 'list_managed_instances_results_value', 'name': 'name_value', 'named_ports': [{'name': 'name_value', 'port': 453}], 'region': 'region_value', 'self_link': 'self_link_value', 'stateful_policy': {'preserved_state': {'disks': {}}}, 'status': {'autoscaler': 'autoscaler_value', 'is_stable': True, 'stateful': {'has_stateful_config': True, 'per_instance_configs': {'all_effective': True}}, 'version_target': {'is_reached': True}}, 'target_pools': ['target_pools_value1', 'target_pools_value2'], 'target_size': 1185, 'update_policy': {'instance_redistribution_type': 'instance_redistribution_type_value', 'max_surge': {'calculated': 1042, 'fixed': 528, 'percent': 753}, 'max_unavailable': {}, 'minimal_action': 'minimal_action_value', 'most_disruptive_allowed_action': 'most_disruptive_allowed_action_value', 'replacement_method': 'replacement_method_value', 'type_': 'type__value'}, 'versions': [{'instance_template': 'instance_template_value', 'name': 'name_value', 'target_size': {}}], 'zone': 'zone_value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.insert(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, extended_operation.ExtendedOperation) + assert response.client_operation_id == 'client_operation_id_value' + assert response.creation_timestamp == 'creation_timestamp_value' + assert response.description == 'description_value' + assert response.end_time == 'end_time_value' + assert response.http_error_message == 'http_error_message_value' + assert response.http_error_status_code == 2374 + assert response.id == 205 + assert response.insert_time == 'insert_time_value' + assert response.kind == 'kind_value' + assert response.name == 'name_value' + assert response.operation_group_id == 'operation_group_id_value' + assert response.operation_type == 'operation_type_value' + assert response.progress == 885 + assert response.region == 'region_value' + assert response.self_link == 'self_link_value' + assert response.start_time == 'start_time_value' + assert response.status == compute.Operation.Status.DONE + assert response.status_message == 'status_message_value' + assert response.target_id == 947 + assert response.target_link == 'target_link_value' + assert response.user == 'user_value' + assert response.zone == 'zone_value' + + +def test_insert_rest_required_fields(request_type=compute.InsertRegionInstanceGroupManagerRequest): + transport_class = transports.RegionInstanceGroupManagersRestTransport + + request_init = {} + request_init["project"] = "" + request_init["region"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).insert._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + jsonified_request["region"] = 'region_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).insert._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "region" in jsonified_request + assert jsonified_request["region"] == 'region_value' + + client = RegionInstanceGroupManagersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.insert(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_insert_rest_unset_required_fields(): + transport = transports.RegionInstanceGroupManagersRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.insert._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("instanceGroupManagerResource", "project", "region", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_insert_rest_interceptors(null_interceptor): + transport = transports.RegionInstanceGroupManagersRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.RegionInstanceGroupManagersRestInterceptor(), + ) + client = RegionInstanceGroupManagersClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.RegionInstanceGroupManagersRestInterceptor, "post_insert") as post, \ + mock.patch.object(transports.RegionInstanceGroupManagersRestInterceptor, "pre_insert") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.InsertRegionInstanceGroupManagerRequest.pb(compute.InsertRegionInstanceGroupManagerRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.InsertRegionInstanceGroupManagerRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.insert(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_insert_rest_bad_request(transport: str = 'rest', request_type=compute.InsertRegionInstanceGroupManagerRequest): + client = RegionInstanceGroupManagersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2'} + request_init["instance_group_manager_resource"] = {'auto_healing_policies': [{'health_check': 'health_check_value', 'initial_delay_sec': 1778}], 'base_instance_name': 'base_instance_name_value', 'creation_timestamp': 'creation_timestamp_value', 'current_actions': {'abandoning': 1041, 'creating': 845, 'creating_without_retries': 2589, 'deleting': 844, 'none': 432, 'recreating': 1060, 'refreshing': 1069, 'restarting': 1091, 'resuming': 874, 'starting': 876, 'stopping': 884, 'suspending': 1088, 'verifying': 979}, 'description': 'description_value', 'distribution_policy': {'target_shape': 'target_shape_value', 'zones': [{'zone': 'zone_value'}]}, 'fingerprint': 'fingerprint_value', 'id': 205, 'instance_group': 'instance_group_value', 'instance_lifecycle_policy': {'force_update_on_repair': 'force_update_on_repair_value'}, 'instance_template': 'instance_template_value', 'kind': 'kind_value', 'list_managed_instances_results': 'list_managed_instances_results_value', 'name': 'name_value', 'named_ports': [{'name': 'name_value', 'port': 453}], 'region': 'region_value', 'self_link': 'self_link_value', 'stateful_policy': {'preserved_state': {'disks': {}}}, 'status': {'autoscaler': 'autoscaler_value', 'is_stable': True, 'stateful': {'has_stateful_config': True, 'per_instance_configs': {'all_effective': True}}, 'version_target': {'is_reached': True}}, 'target_pools': ['target_pools_value1', 'target_pools_value2'], 'target_size': 1185, 'update_policy': {'instance_redistribution_type': 'instance_redistribution_type_value', 'max_surge': {'calculated': 1042, 'fixed': 528, 'percent': 753}, 'max_unavailable': {}, 'minimal_action': 'minimal_action_value', 'most_disruptive_allowed_action': 'most_disruptive_allowed_action_value', 'replacement_method': 'replacement_method_value', 'type_': 'type__value'}, 'versions': [{'instance_template': 'instance_template_value', 'name': 'name_value', 'target_size': {}}], 'zone': 'zone_value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.insert(request) + + +def test_insert_rest_flattened(): + client = RegionInstanceGroupManagersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'region': 'sample2'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + region='region_value', + instance_group_manager_resource=compute.InstanceGroupManager(auto_healing_policies=[compute.InstanceGroupManagerAutoHealingPolicy(health_check='health_check_value')]), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.insert(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/regions/{region}/instanceGroupManagers" % client.transport._host, args[1]) + + +def test_insert_rest_flattened_error(transport: str = 'rest'): + client = RegionInstanceGroupManagersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.insert( + compute.InsertRegionInstanceGroupManagerRequest(), + project='project_value', + region='region_value', + instance_group_manager_resource=compute.InstanceGroupManager(auto_healing_policies=[compute.InstanceGroupManagerAutoHealingPolicy(health_check='health_check_value')]), + ) + + +def test_insert_rest_error(): + client = RegionInstanceGroupManagersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.InsertRegionInstanceGroupManagerRequest, + dict, +]) +def test_insert_unary_rest(request_type): + client = RegionInstanceGroupManagersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2'} + request_init["instance_group_manager_resource"] = {'auto_healing_policies': [{'health_check': 'health_check_value', 'initial_delay_sec': 1778}], 'base_instance_name': 'base_instance_name_value', 'creation_timestamp': 'creation_timestamp_value', 'current_actions': {'abandoning': 1041, 'creating': 845, 'creating_without_retries': 2589, 'deleting': 844, 'none': 432, 'recreating': 1060, 'refreshing': 1069, 'restarting': 1091, 'resuming': 874, 'starting': 876, 'stopping': 884, 'suspending': 1088, 'verifying': 979}, 'description': 'description_value', 'distribution_policy': {'target_shape': 'target_shape_value', 'zones': [{'zone': 'zone_value'}]}, 'fingerprint': 'fingerprint_value', 'id': 205, 'instance_group': 'instance_group_value', 'instance_lifecycle_policy': {'force_update_on_repair': 'force_update_on_repair_value'}, 'instance_template': 'instance_template_value', 'kind': 'kind_value', 'list_managed_instances_results': 'list_managed_instances_results_value', 'name': 'name_value', 'named_ports': [{'name': 'name_value', 'port': 453}], 'region': 'region_value', 'self_link': 'self_link_value', 'stateful_policy': {'preserved_state': {'disks': {}}}, 'status': {'autoscaler': 'autoscaler_value', 'is_stable': True, 'stateful': {'has_stateful_config': True, 'per_instance_configs': {'all_effective': True}}, 'version_target': {'is_reached': True}}, 'target_pools': ['target_pools_value1', 'target_pools_value2'], 'target_size': 1185, 'update_policy': {'instance_redistribution_type': 'instance_redistribution_type_value', 'max_surge': {'calculated': 1042, 'fixed': 528, 'percent': 753}, 'max_unavailable': {}, 'minimal_action': 'minimal_action_value', 'most_disruptive_allowed_action': 'most_disruptive_allowed_action_value', 'replacement_method': 'replacement_method_value', 'type_': 'type__value'}, 'versions': [{'instance_template': 'instance_template_value', 'name': 'name_value', 'target_size': {}}], 'zone': 'zone_value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.insert_unary(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.Operation) + + +def test_insert_unary_rest_required_fields(request_type=compute.InsertRegionInstanceGroupManagerRequest): + transport_class = transports.RegionInstanceGroupManagersRestTransport + + request_init = {} + request_init["project"] = "" + request_init["region"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).insert._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + jsonified_request["region"] = 'region_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).insert._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "region" in jsonified_request + assert jsonified_request["region"] == 'region_value' + + client = RegionInstanceGroupManagersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.insert_unary(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_insert_unary_rest_unset_required_fields(): + transport = transports.RegionInstanceGroupManagersRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.insert._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("instanceGroupManagerResource", "project", "region", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_insert_unary_rest_interceptors(null_interceptor): + transport = transports.RegionInstanceGroupManagersRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.RegionInstanceGroupManagersRestInterceptor(), + ) + client = RegionInstanceGroupManagersClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.RegionInstanceGroupManagersRestInterceptor, "post_insert") as post, \ + mock.patch.object(transports.RegionInstanceGroupManagersRestInterceptor, "pre_insert") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.InsertRegionInstanceGroupManagerRequest.pb(compute.InsertRegionInstanceGroupManagerRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.InsertRegionInstanceGroupManagerRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.insert_unary(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_insert_unary_rest_bad_request(transport: str = 'rest', request_type=compute.InsertRegionInstanceGroupManagerRequest): + client = RegionInstanceGroupManagersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2'} + request_init["instance_group_manager_resource"] = {'auto_healing_policies': [{'health_check': 'health_check_value', 'initial_delay_sec': 1778}], 'base_instance_name': 'base_instance_name_value', 'creation_timestamp': 'creation_timestamp_value', 'current_actions': {'abandoning': 1041, 'creating': 845, 'creating_without_retries': 2589, 'deleting': 844, 'none': 432, 'recreating': 1060, 'refreshing': 1069, 'restarting': 1091, 'resuming': 874, 'starting': 876, 'stopping': 884, 'suspending': 1088, 'verifying': 979}, 'description': 'description_value', 'distribution_policy': {'target_shape': 'target_shape_value', 'zones': [{'zone': 'zone_value'}]}, 'fingerprint': 'fingerprint_value', 'id': 205, 'instance_group': 'instance_group_value', 'instance_lifecycle_policy': {'force_update_on_repair': 'force_update_on_repair_value'}, 'instance_template': 'instance_template_value', 'kind': 'kind_value', 'list_managed_instances_results': 'list_managed_instances_results_value', 'name': 'name_value', 'named_ports': [{'name': 'name_value', 'port': 453}], 'region': 'region_value', 'self_link': 'self_link_value', 'stateful_policy': {'preserved_state': {'disks': {}}}, 'status': {'autoscaler': 'autoscaler_value', 'is_stable': True, 'stateful': {'has_stateful_config': True, 'per_instance_configs': {'all_effective': True}}, 'version_target': {'is_reached': True}}, 'target_pools': ['target_pools_value1', 'target_pools_value2'], 'target_size': 1185, 'update_policy': {'instance_redistribution_type': 'instance_redistribution_type_value', 'max_surge': {'calculated': 1042, 'fixed': 528, 'percent': 753}, 'max_unavailable': {}, 'minimal_action': 'minimal_action_value', 'most_disruptive_allowed_action': 'most_disruptive_allowed_action_value', 'replacement_method': 'replacement_method_value', 'type_': 'type__value'}, 'versions': [{'instance_template': 'instance_template_value', 'name': 'name_value', 'target_size': {}}], 'zone': 'zone_value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.insert_unary(request) + + +def test_insert_unary_rest_flattened(): + client = RegionInstanceGroupManagersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'region': 'sample2'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + region='region_value', + instance_group_manager_resource=compute.InstanceGroupManager(auto_healing_policies=[compute.InstanceGroupManagerAutoHealingPolicy(health_check='health_check_value')]), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.insert_unary(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/regions/{region}/instanceGroupManagers" % client.transport._host, args[1]) + + +def test_insert_unary_rest_flattened_error(transport: str = 'rest'): + client = RegionInstanceGroupManagersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.insert_unary( + compute.InsertRegionInstanceGroupManagerRequest(), + project='project_value', + region='region_value', + instance_group_manager_resource=compute.InstanceGroupManager(auto_healing_policies=[compute.InstanceGroupManagerAutoHealingPolicy(health_check='health_check_value')]), + ) + + +def test_insert_unary_rest_error(): + client = RegionInstanceGroupManagersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.ListRegionInstanceGroupManagersRequest, + dict, +]) +def test_list_rest(request_type): + client = RegionInstanceGroupManagersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.RegionInstanceGroupManagerList( + id='id_value', + kind='kind_value', + next_page_token='next_page_token_value', + self_link='self_link_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.RegionInstanceGroupManagerList.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.list(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListPager) + assert response.id == 'id_value' + assert response.kind == 'kind_value' + assert response.next_page_token == 'next_page_token_value' + assert response.self_link == 'self_link_value' + + +def test_list_rest_required_fields(request_type=compute.ListRegionInstanceGroupManagersRequest): + transport_class = transports.RegionInstanceGroupManagersRestTransport + + request_init = {} + request_init["project"] = "" + request_init["region"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + jsonified_request["region"] = 'region_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("filter", "max_results", "order_by", "page_token", "return_partial_success", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "region" in jsonified_request + assert jsonified_request["region"] == 'region_value' + + client = RegionInstanceGroupManagersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.RegionInstanceGroupManagerList() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "get", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.RegionInstanceGroupManagerList.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.list(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_list_rest_unset_required_fields(): + transport = transports.RegionInstanceGroupManagersRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.list._get_unset_required_fields({}) + assert set(unset_fields) == (set(("filter", "maxResults", "orderBy", "pageToken", "returnPartialSuccess", )) & set(("project", "region", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_rest_interceptors(null_interceptor): + transport = transports.RegionInstanceGroupManagersRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.RegionInstanceGroupManagersRestInterceptor(), + ) + client = RegionInstanceGroupManagersClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.RegionInstanceGroupManagersRestInterceptor, "post_list") as post, \ + mock.patch.object(transports.RegionInstanceGroupManagersRestInterceptor, "pre_list") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.ListRegionInstanceGroupManagersRequest.pb(compute.ListRegionInstanceGroupManagersRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.RegionInstanceGroupManagerList.to_json(compute.RegionInstanceGroupManagerList()) + + request = compute.ListRegionInstanceGroupManagersRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.RegionInstanceGroupManagerList() + + client.list(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_list_rest_bad_request(transport: str = 'rest', request_type=compute.ListRegionInstanceGroupManagersRequest): + client = RegionInstanceGroupManagersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list(request) + + +def test_list_rest_flattened(): + client = RegionInstanceGroupManagersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.RegionInstanceGroupManagerList() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'region': 'sample2'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + region='region_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.RegionInstanceGroupManagerList.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.list(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/regions/{region}/instanceGroupManagers" % client.transport._host, args[1]) + + +def test_list_rest_flattened_error(transport: str = 'rest'): + client = RegionInstanceGroupManagersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list( + compute.ListRegionInstanceGroupManagersRequest(), + project='project_value', + region='region_value', + ) + + +def test_list_rest_pager(transport: str = 'rest'): + client = RegionInstanceGroupManagersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + #with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + compute.RegionInstanceGroupManagerList( + items=[ + compute.InstanceGroupManager(), + compute.InstanceGroupManager(), + compute.InstanceGroupManager(), + ], + next_page_token='abc', + ), + compute.RegionInstanceGroupManagerList( + items=[], + next_page_token='def', + ), + compute.RegionInstanceGroupManagerList( + items=[ + compute.InstanceGroupManager(), + ], + next_page_token='ghi', + ), + compute.RegionInstanceGroupManagerList( + items=[ + compute.InstanceGroupManager(), + compute.InstanceGroupManager(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple(compute.RegionInstanceGroupManagerList.to_json(x) for x in response) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode('UTF-8') + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {'project': 'sample1', 'region': 'sample2'} + + pager = client.list(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, compute.InstanceGroupManager) + for i in results) + + pages = list(client.list(request=sample_request).pages) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize("request_type", [ + compute.ListErrorsRegionInstanceGroupManagersRequest, + dict, +]) +def test_list_errors_rest(request_type): + client = RegionInstanceGroupManagersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2', 'instance_group_manager': 'sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.RegionInstanceGroupManagersListErrorsResponse( + next_page_token='next_page_token_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.RegionInstanceGroupManagersListErrorsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.list_errors(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListErrorsPager) + assert response.next_page_token == 'next_page_token_value' + + +def test_list_errors_rest_required_fields(request_type=compute.ListErrorsRegionInstanceGroupManagersRequest): + transport_class = transports.RegionInstanceGroupManagersRestTransport + + request_init = {} + request_init["instance_group_manager"] = "" + request_init["project"] = "" + request_init["region"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_errors._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["instanceGroupManager"] = 'instance_group_manager_value' + jsonified_request["project"] = 'project_value' + jsonified_request["region"] = 'region_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_errors._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("filter", "max_results", "order_by", "page_token", "return_partial_success", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "instanceGroupManager" in jsonified_request + assert jsonified_request["instanceGroupManager"] == 'instance_group_manager_value' + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "region" in jsonified_request + assert jsonified_request["region"] == 'region_value' + + client = RegionInstanceGroupManagersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.RegionInstanceGroupManagersListErrorsResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "get", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.RegionInstanceGroupManagersListErrorsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.list_errors(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_list_errors_rest_unset_required_fields(): + transport = transports.RegionInstanceGroupManagersRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.list_errors._get_unset_required_fields({}) + assert set(unset_fields) == (set(("filter", "maxResults", "orderBy", "pageToken", "returnPartialSuccess", )) & set(("instanceGroupManager", "project", "region", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_errors_rest_interceptors(null_interceptor): + transport = transports.RegionInstanceGroupManagersRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.RegionInstanceGroupManagersRestInterceptor(), + ) + client = RegionInstanceGroupManagersClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.RegionInstanceGroupManagersRestInterceptor, "post_list_errors") as post, \ + mock.patch.object(transports.RegionInstanceGroupManagersRestInterceptor, "pre_list_errors") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.ListErrorsRegionInstanceGroupManagersRequest.pb(compute.ListErrorsRegionInstanceGroupManagersRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.RegionInstanceGroupManagersListErrorsResponse.to_json(compute.RegionInstanceGroupManagersListErrorsResponse()) + + request = compute.ListErrorsRegionInstanceGroupManagersRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.RegionInstanceGroupManagersListErrorsResponse() + + client.list_errors(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_list_errors_rest_bad_request(transport: str = 'rest', request_type=compute.ListErrorsRegionInstanceGroupManagersRequest): + client = RegionInstanceGroupManagersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2', 'instance_group_manager': 'sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_errors(request) + + +def test_list_errors_rest_flattened(): + client = RegionInstanceGroupManagersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.RegionInstanceGroupManagersListErrorsResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'region': 'sample2', 'instance_group_manager': 'sample3'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + region='region_value', + instance_group_manager='instance_group_manager_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.RegionInstanceGroupManagersListErrorsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.list_errors(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/regions/{region}/instanceGroupManagers/{instance_group_manager}/listErrors" % client.transport._host, args[1]) + + +def test_list_errors_rest_flattened_error(transport: str = 'rest'): + client = RegionInstanceGroupManagersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_errors( + compute.ListErrorsRegionInstanceGroupManagersRequest(), + project='project_value', + region='region_value', + instance_group_manager='instance_group_manager_value', + ) + + +def test_list_errors_rest_pager(transport: str = 'rest'): + client = RegionInstanceGroupManagersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + #with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + compute.RegionInstanceGroupManagersListErrorsResponse( + items=[ + compute.InstanceManagedByIgmError(), + compute.InstanceManagedByIgmError(), + compute.InstanceManagedByIgmError(), + ], + next_page_token='abc', + ), + compute.RegionInstanceGroupManagersListErrorsResponse( + items=[], + next_page_token='def', + ), + compute.RegionInstanceGroupManagersListErrorsResponse( + items=[ + compute.InstanceManagedByIgmError(), + ], + next_page_token='ghi', + ), + compute.RegionInstanceGroupManagersListErrorsResponse( + items=[ + compute.InstanceManagedByIgmError(), + compute.InstanceManagedByIgmError(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple(compute.RegionInstanceGroupManagersListErrorsResponse.to_json(x) for x in response) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode('UTF-8') + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {'project': 'sample1', 'region': 'sample2', 'instance_group_manager': 'sample3'} + + pager = client.list_errors(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, compute.InstanceManagedByIgmError) + for i in results) + + pages = list(client.list_errors(request=sample_request).pages) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize("request_type", [ + compute.ListManagedInstancesRegionInstanceGroupManagersRequest, + dict, +]) +def test_list_managed_instances_rest(request_type): + client = RegionInstanceGroupManagersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2', 'instance_group_manager': 'sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.RegionInstanceGroupManagersListInstancesResponse( + next_page_token='next_page_token_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.RegionInstanceGroupManagersListInstancesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.list_managed_instances(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListManagedInstancesPager) + assert response.next_page_token == 'next_page_token_value' + + +def test_list_managed_instances_rest_required_fields(request_type=compute.ListManagedInstancesRegionInstanceGroupManagersRequest): + transport_class = transports.RegionInstanceGroupManagersRestTransport + + request_init = {} + request_init["instance_group_manager"] = "" + request_init["project"] = "" + request_init["region"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_managed_instances._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["instanceGroupManager"] = 'instance_group_manager_value' + jsonified_request["project"] = 'project_value' + jsonified_request["region"] = 'region_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_managed_instances._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("filter", "max_results", "order_by", "page_token", "return_partial_success", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "instanceGroupManager" in jsonified_request + assert jsonified_request["instanceGroupManager"] == 'instance_group_manager_value' + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "region" in jsonified_request + assert jsonified_request["region"] == 'region_value' + + client = RegionInstanceGroupManagersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.RegionInstanceGroupManagersListInstancesResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.RegionInstanceGroupManagersListInstancesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.list_managed_instances(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_list_managed_instances_rest_unset_required_fields(): + transport = transports.RegionInstanceGroupManagersRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.list_managed_instances._get_unset_required_fields({}) + assert set(unset_fields) == (set(("filter", "maxResults", "orderBy", "pageToken", "returnPartialSuccess", )) & set(("instanceGroupManager", "project", "region", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_managed_instances_rest_interceptors(null_interceptor): + transport = transports.RegionInstanceGroupManagersRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.RegionInstanceGroupManagersRestInterceptor(), + ) + client = RegionInstanceGroupManagersClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.RegionInstanceGroupManagersRestInterceptor, "post_list_managed_instances") as post, \ + mock.patch.object(transports.RegionInstanceGroupManagersRestInterceptor, "pre_list_managed_instances") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.ListManagedInstancesRegionInstanceGroupManagersRequest.pb(compute.ListManagedInstancesRegionInstanceGroupManagersRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.RegionInstanceGroupManagersListInstancesResponse.to_json(compute.RegionInstanceGroupManagersListInstancesResponse()) + + request = compute.ListManagedInstancesRegionInstanceGroupManagersRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.RegionInstanceGroupManagersListInstancesResponse() + + client.list_managed_instances(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_list_managed_instances_rest_bad_request(transport: str = 'rest', request_type=compute.ListManagedInstancesRegionInstanceGroupManagersRequest): + client = RegionInstanceGroupManagersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2', 'instance_group_manager': 'sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_managed_instances(request) + + +def test_list_managed_instances_rest_flattened(): + client = RegionInstanceGroupManagersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.RegionInstanceGroupManagersListInstancesResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'region': 'sample2', 'instance_group_manager': 'sample3'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + region='region_value', + instance_group_manager='instance_group_manager_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.RegionInstanceGroupManagersListInstancesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.list_managed_instances(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/regions/{region}/instanceGroupManagers/{instance_group_manager}/listManagedInstances" % client.transport._host, args[1]) + + +def test_list_managed_instances_rest_flattened_error(transport: str = 'rest'): + client = RegionInstanceGroupManagersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_managed_instances( + compute.ListManagedInstancesRegionInstanceGroupManagersRequest(), + project='project_value', + region='region_value', + instance_group_manager='instance_group_manager_value', + ) + + +def test_list_managed_instances_rest_pager(transport: str = 'rest'): + client = RegionInstanceGroupManagersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + #with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + compute.RegionInstanceGroupManagersListInstancesResponse( + managed_instances=[ + compute.ManagedInstance(), + compute.ManagedInstance(), + compute.ManagedInstance(), + ], + next_page_token='abc', + ), + compute.RegionInstanceGroupManagersListInstancesResponse( + managed_instances=[], + next_page_token='def', + ), + compute.RegionInstanceGroupManagersListInstancesResponse( + managed_instances=[ + compute.ManagedInstance(), + ], + next_page_token='ghi', + ), + compute.RegionInstanceGroupManagersListInstancesResponse( + managed_instances=[ + compute.ManagedInstance(), + compute.ManagedInstance(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple(compute.RegionInstanceGroupManagersListInstancesResponse.to_json(x) for x in response) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode('UTF-8') + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {'project': 'sample1', 'region': 'sample2', 'instance_group_manager': 'sample3'} + + pager = client.list_managed_instances(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, compute.ManagedInstance) + for i in results) + + pages = list(client.list_managed_instances(request=sample_request).pages) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize("request_type", [ + compute.ListPerInstanceConfigsRegionInstanceGroupManagersRequest, + dict, +]) +def test_list_per_instance_configs_rest(request_type): + client = RegionInstanceGroupManagersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2', 'instance_group_manager': 'sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.RegionInstanceGroupManagersListInstanceConfigsResp( + next_page_token='next_page_token_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.RegionInstanceGroupManagersListInstanceConfigsResp.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.list_per_instance_configs(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListPerInstanceConfigsPager) + assert response.next_page_token == 'next_page_token_value' + + +def test_list_per_instance_configs_rest_required_fields(request_type=compute.ListPerInstanceConfigsRegionInstanceGroupManagersRequest): + transport_class = transports.RegionInstanceGroupManagersRestTransport + + request_init = {} + request_init["instance_group_manager"] = "" + request_init["project"] = "" + request_init["region"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_per_instance_configs._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["instanceGroupManager"] = 'instance_group_manager_value' + jsonified_request["project"] = 'project_value' + jsonified_request["region"] = 'region_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_per_instance_configs._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("filter", "max_results", "order_by", "page_token", "return_partial_success", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "instanceGroupManager" in jsonified_request + assert jsonified_request["instanceGroupManager"] == 'instance_group_manager_value' + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "region" in jsonified_request + assert jsonified_request["region"] == 'region_value' + + client = RegionInstanceGroupManagersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.RegionInstanceGroupManagersListInstanceConfigsResp() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.RegionInstanceGroupManagersListInstanceConfigsResp.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.list_per_instance_configs(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_list_per_instance_configs_rest_unset_required_fields(): + transport = transports.RegionInstanceGroupManagersRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.list_per_instance_configs._get_unset_required_fields({}) + assert set(unset_fields) == (set(("filter", "maxResults", "orderBy", "pageToken", "returnPartialSuccess", )) & set(("instanceGroupManager", "project", "region", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_per_instance_configs_rest_interceptors(null_interceptor): + transport = transports.RegionInstanceGroupManagersRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.RegionInstanceGroupManagersRestInterceptor(), + ) + client = RegionInstanceGroupManagersClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.RegionInstanceGroupManagersRestInterceptor, "post_list_per_instance_configs") as post, \ + mock.patch.object(transports.RegionInstanceGroupManagersRestInterceptor, "pre_list_per_instance_configs") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.ListPerInstanceConfigsRegionInstanceGroupManagersRequest.pb(compute.ListPerInstanceConfigsRegionInstanceGroupManagersRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.RegionInstanceGroupManagersListInstanceConfigsResp.to_json(compute.RegionInstanceGroupManagersListInstanceConfigsResp()) + + request = compute.ListPerInstanceConfigsRegionInstanceGroupManagersRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.RegionInstanceGroupManagersListInstanceConfigsResp() + + client.list_per_instance_configs(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_list_per_instance_configs_rest_bad_request(transport: str = 'rest', request_type=compute.ListPerInstanceConfigsRegionInstanceGroupManagersRequest): + client = RegionInstanceGroupManagersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2', 'instance_group_manager': 'sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_per_instance_configs(request) + + +def test_list_per_instance_configs_rest_flattened(): + client = RegionInstanceGroupManagersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.RegionInstanceGroupManagersListInstanceConfigsResp() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'region': 'sample2', 'instance_group_manager': 'sample3'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + region='region_value', + instance_group_manager='instance_group_manager_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.RegionInstanceGroupManagersListInstanceConfigsResp.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.list_per_instance_configs(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/regions/{region}/instanceGroupManagers/{instance_group_manager}/listPerInstanceConfigs" % client.transport._host, args[1]) + + +def test_list_per_instance_configs_rest_flattened_error(transport: str = 'rest'): + client = RegionInstanceGroupManagersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_per_instance_configs( + compute.ListPerInstanceConfigsRegionInstanceGroupManagersRequest(), + project='project_value', + region='region_value', + instance_group_manager='instance_group_manager_value', + ) + + +def test_list_per_instance_configs_rest_pager(transport: str = 'rest'): + client = RegionInstanceGroupManagersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + #with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + compute.RegionInstanceGroupManagersListInstanceConfigsResp( + items=[ + compute.PerInstanceConfig(), + compute.PerInstanceConfig(), + compute.PerInstanceConfig(), + ], + next_page_token='abc', + ), + compute.RegionInstanceGroupManagersListInstanceConfigsResp( + items=[], + next_page_token='def', + ), + compute.RegionInstanceGroupManagersListInstanceConfigsResp( + items=[ + compute.PerInstanceConfig(), + ], + next_page_token='ghi', + ), + compute.RegionInstanceGroupManagersListInstanceConfigsResp( + items=[ + compute.PerInstanceConfig(), + compute.PerInstanceConfig(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple(compute.RegionInstanceGroupManagersListInstanceConfigsResp.to_json(x) for x in response) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode('UTF-8') + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {'project': 'sample1', 'region': 'sample2', 'instance_group_manager': 'sample3'} + + pager = client.list_per_instance_configs(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, compute.PerInstanceConfig) + for i in results) + + pages = list(client.list_per_instance_configs(request=sample_request).pages) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize("request_type", [ + compute.PatchRegionInstanceGroupManagerRequest, + dict, +]) +def test_patch_rest(request_type): + client = RegionInstanceGroupManagersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2', 'instance_group_manager': 'sample3'} + request_init["instance_group_manager_resource"] = {'auto_healing_policies': [{'health_check': 'health_check_value', 'initial_delay_sec': 1778}], 'base_instance_name': 'base_instance_name_value', 'creation_timestamp': 'creation_timestamp_value', 'current_actions': {'abandoning': 1041, 'creating': 845, 'creating_without_retries': 2589, 'deleting': 844, 'none': 432, 'recreating': 1060, 'refreshing': 1069, 'restarting': 1091, 'resuming': 874, 'starting': 876, 'stopping': 884, 'suspending': 1088, 'verifying': 979}, 'description': 'description_value', 'distribution_policy': {'target_shape': 'target_shape_value', 'zones': [{'zone': 'zone_value'}]}, 'fingerprint': 'fingerprint_value', 'id': 205, 'instance_group': 'instance_group_value', 'instance_lifecycle_policy': {'force_update_on_repair': 'force_update_on_repair_value'}, 'instance_template': 'instance_template_value', 'kind': 'kind_value', 'list_managed_instances_results': 'list_managed_instances_results_value', 'name': 'name_value', 'named_ports': [{'name': 'name_value', 'port': 453}], 'region': 'region_value', 'self_link': 'self_link_value', 'stateful_policy': {'preserved_state': {'disks': {}}}, 'status': {'autoscaler': 'autoscaler_value', 'is_stable': True, 'stateful': {'has_stateful_config': True, 'per_instance_configs': {'all_effective': True}}, 'version_target': {'is_reached': True}}, 'target_pools': ['target_pools_value1', 'target_pools_value2'], 'target_size': 1185, 'update_policy': {'instance_redistribution_type': 'instance_redistribution_type_value', 'max_surge': {'calculated': 1042, 'fixed': 528, 'percent': 753}, 'max_unavailable': {}, 'minimal_action': 'minimal_action_value', 'most_disruptive_allowed_action': 'most_disruptive_allowed_action_value', 'replacement_method': 'replacement_method_value', 'type_': 'type__value'}, 'versions': [{'instance_template': 'instance_template_value', 'name': 'name_value', 'target_size': {}}], 'zone': 'zone_value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.patch(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, extended_operation.ExtendedOperation) + assert response.client_operation_id == 'client_operation_id_value' + assert response.creation_timestamp == 'creation_timestamp_value' + assert response.description == 'description_value' + assert response.end_time == 'end_time_value' + assert response.http_error_message == 'http_error_message_value' + assert response.http_error_status_code == 2374 + assert response.id == 205 + assert response.insert_time == 'insert_time_value' + assert response.kind == 'kind_value' + assert response.name == 'name_value' + assert response.operation_group_id == 'operation_group_id_value' + assert response.operation_type == 'operation_type_value' + assert response.progress == 885 + assert response.region == 'region_value' + assert response.self_link == 'self_link_value' + assert response.start_time == 'start_time_value' + assert response.status == compute.Operation.Status.DONE + assert response.status_message == 'status_message_value' + assert response.target_id == 947 + assert response.target_link == 'target_link_value' + assert response.user == 'user_value' + assert response.zone == 'zone_value' + + +def test_patch_rest_required_fields(request_type=compute.PatchRegionInstanceGroupManagerRequest): + transport_class = transports.RegionInstanceGroupManagersRestTransport + + request_init = {} + request_init["instance_group_manager"] = "" + request_init["project"] = "" + request_init["region"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).patch._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["instanceGroupManager"] = 'instance_group_manager_value' + jsonified_request["project"] = 'project_value' + jsonified_request["region"] = 'region_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).patch._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "instanceGroupManager" in jsonified_request + assert jsonified_request["instanceGroupManager"] == 'instance_group_manager_value' + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "region" in jsonified_request + assert jsonified_request["region"] == 'region_value' + + client = RegionInstanceGroupManagersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "patch", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.patch(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_patch_rest_unset_required_fields(): + transport = transports.RegionInstanceGroupManagersRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.patch._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("instanceGroupManager", "instanceGroupManagerResource", "project", "region", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_patch_rest_interceptors(null_interceptor): + transport = transports.RegionInstanceGroupManagersRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.RegionInstanceGroupManagersRestInterceptor(), + ) + client = RegionInstanceGroupManagersClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.RegionInstanceGroupManagersRestInterceptor, "post_patch") as post, \ + mock.patch.object(transports.RegionInstanceGroupManagersRestInterceptor, "pre_patch") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.PatchRegionInstanceGroupManagerRequest.pb(compute.PatchRegionInstanceGroupManagerRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.PatchRegionInstanceGroupManagerRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.patch(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_patch_rest_bad_request(transport: str = 'rest', request_type=compute.PatchRegionInstanceGroupManagerRequest): + client = RegionInstanceGroupManagersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2', 'instance_group_manager': 'sample3'} + request_init["instance_group_manager_resource"] = {'auto_healing_policies': [{'health_check': 'health_check_value', 'initial_delay_sec': 1778}], 'base_instance_name': 'base_instance_name_value', 'creation_timestamp': 'creation_timestamp_value', 'current_actions': {'abandoning': 1041, 'creating': 845, 'creating_without_retries': 2589, 'deleting': 844, 'none': 432, 'recreating': 1060, 'refreshing': 1069, 'restarting': 1091, 'resuming': 874, 'starting': 876, 'stopping': 884, 'suspending': 1088, 'verifying': 979}, 'description': 'description_value', 'distribution_policy': {'target_shape': 'target_shape_value', 'zones': [{'zone': 'zone_value'}]}, 'fingerprint': 'fingerprint_value', 'id': 205, 'instance_group': 'instance_group_value', 'instance_lifecycle_policy': {'force_update_on_repair': 'force_update_on_repair_value'}, 'instance_template': 'instance_template_value', 'kind': 'kind_value', 'list_managed_instances_results': 'list_managed_instances_results_value', 'name': 'name_value', 'named_ports': [{'name': 'name_value', 'port': 453}], 'region': 'region_value', 'self_link': 'self_link_value', 'stateful_policy': {'preserved_state': {'disks': {}}}, 'status': {'autoscaler': 'autoscaler_value', 'is_stable': True, 'stateful': {'has_stateful_config': True, 'per_instance_configs': {'all_effective': True}}, 'version_target': {'is_reached': True}}, 'target_pools': ['target_pools_value1', 'target_pools_value2'], 'target_size': 1185, 'update_policy': {'instance_redistribution_type': 'instance_redistribution_type_value', 'max_surge': {'calculated': 1042, 'fixed': 528, 'percent': 753}, 'max_unavailable': {}, 'minimal_action': 'minimal_action_value', 'most_disruptive_allowed_action': 'most_disruptive_allowed_action_value', 'replacement_method': 'replacement_method_value', 'type_': 'type__value'}, 'versions': [{'instance_template': 'instance_template_value', 'name': 'name_value', 'target_size': {}}], 'zone': 'zone_value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.patch(request) + + +def test_patch_rest_flattened(): + client = RegionInstanceGroupManagersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'region': 'sample2', 'instance_group_manager': 'sample3'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + region='region_value', + instance_group_manager='instance_group_manager_value', + instance_group_manager_resource=compute.InstanceGroupManager(auto_healing_policies=[compute.InstanceGroupManagerAutoHealingPolicy(health_check='health_check_value')]), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.patch(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/regions/{region}/instanceGroupManagers/{instance_group_manager}" % client.transport._host, args[1]) + + +def test_patch_rest_flattened_error(transport: str = 'rest'): + client = RegionInstanceGroupManagersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.patch( + compute.PatchRegionInstanceGroupManagerRequest(), + project='project_value', + region='region_value', + instance_group_manager='instance_group_manager_value', + instance_group_manager_resource=compute.InstanceGroupManager(auto_healing_policies=[compute.InstanceGroupManagerAutoHealingPolicy(health_check='health_check_value')]), + ) + + +def test_patch_rest_error(): + client = RegionInstanceGroupManagersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.PatchRegionInstanceGroupManagerRequest, + dict, +]) +def test_patch_unary_rest(request_type): + client = RegionInstanceGroupManagersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2', 'instance_group_manager': 'sample3'} + request_init["instance_group_manager_resource"] = {'auto_healing_policies': [{'health_check': 'health_check_value', 'initial_delay_sec': 1778}], 'base_instance_name': 'base_instance_name_value', 'creation_timestamp': 'creation_timestamp_value', 'current_actions': {'abandoning': 1041, 'creating': 845, 'creating_without_retries': 2589, 'deleting': 844, 'none': 432, 'recreating': 1060, 'refreshing': 1069, 'restarting': 1091, 'resuming': 874, 'starting': 876, 'stopping': 884, 'suspending': 1088, 'verifying': 979}, 'description': 'description_value', 'distribution_policy': {'target_shape': 'target_shape_value', 'zones': [{'zone': 'zone_value'}]}, 'fingerprint': 'fingerprint_value', 'id': 205, 'instance_group': 'instance_group_value', 'instance_lifecycle_policy': {'force_update_on_repair': 'force_update_on_repair_value'}, 'instance_template': 'instance_template_value', 'kind': 'kind_value', 'list_managed_instances_results': 'list_managed_instances_results_value', 'name': 'name_value', 'named_ports': [{'name': 'name_value', 'port': 453}], 'region': 'region_value', 'self_link': 'self_link_value', 'stateful_policy': {'preserved_state': {'disks': {}}}, 'status': {'autoscaler': 'autoscaler_value', 'is_stable': True, 'stateful': {'has_stateful_config': True, 'per_instance_configs': {'all_effective': True}}, 'version_target': {'is_reached': True}}, 'target_pools': ['target_pools_value1', 'target_pools_value2'], 'target_size': 1185, 'update_policy': {'instance_redistribution_type': 'instance_redistribution_type_value', 'max_surge': {'calculated': 1042, 'fixed': 528, 'percent': 753}, 'max_unavailable': {}, 'minimal_action': 'minimal_action_value', 'most_disruptive_allowed_action': 'most_disruptive_allowed_action_value', 'replacement_method': 'replacement_method_value', 'type_': 'type__value'}, 'versions': [{'instance_template': 'instance_template_value', 'name': 'name_value', 'target_size': {}}], 'zone': 'zone_value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.patch_unary(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.Operation) + + +def test_patch_unary_rest_required_fields(request_type=compute.PatchRegionInstanceGroupManagerRequest): + transport_class = transports.RegionInstanceGroupManagersRestTransport + + request_init = {} + request_init["instance_group_manager"] = "" + request_init["project"] = "" + request_init["region"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).patch._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["instanceGroupManager"] = 'instance_group_manager_value' + jsonified_request["project"] = 'project_value' + jsonified_request["region"] = 'region_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).patch._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "instanceGroupManager" in jsonified_request + assert jsonified_request["instanceGroupManager"] == 'instance_group_manager_value' + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "region" in jsonified_request + assert jsonified_request["region"] == 'region_value' + + client = RegionInstanceGroupManagersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "patch", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.patch_unary(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_patch_unary_rest_unset_required_fields(): + transport = transports.RegionInstanceGroupManagersRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.patch._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("instanceGroupManager", "instanceGroupManagerResource", "project", "region", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_patch_unary_rest_interceptors(null_interceptor): + transport = transports.RegionInstanceGroupManagersRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.RegionInstanceGroupManagersRestInterceptor(), + ) + client = RegionInstanceGroupManagersClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.RegionInstanceGroupManagersRestInterceptor, "post_patch") as post, \ + mock.patch.object(transports.RegionInstanceGroupManagersRestInterceptor, "pre_patch") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.PatchRegionInstanceGroupManagerRequest.pb(compute.PatchRegionInstanceGroupManagerRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.PatchRegionInstanceGroupManagerRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.patch_unary(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_patch_unary_rest_bad_request(transport: str = 'rest', request_type=compute.PatchRegionInstanceGroupManagerRequest): + client = RegionInstanceGroupManagersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2', 'instance_group_manager': 'sample3'} + request_init["instance_group_manager_resource"] = {'auto_healing_policies': [{'health_check': 'health_check_value', 'initial_delay_sec': 1778}], 'base_instance_name': 'base_instance_name_value', 'creation_timestamp': 'creation_timestamp_value', 'current_actions': {'abandoning': 1041, 'creating': 845, 'creating_without_retries': 2589, 'deleting': 844, 'none': 432, 'recreating': 1060, 'refreshing': 1069, 'restarting': 1091, 'resuming': 874, 'starting': 876, 'stopping': 884, 'suspending': 1088, 'verifying': 979}, 'description': 'description_value', 'distribution_policy': {'target_shape': 'target_shape_value', 'zones': [{'zone': 'zone_value'}]}, 'fingerprint': 'fingerprint_value', 'id': 205, 'instance_group': 'instance_group_value', 'instance_lifecycle_policy': {'force_update_on_repair': 'force_update_on_repair_value'}, 'instance_template': 'instance_template_value', 'kind': 'kind_value', 'list_managed_instances_results': 'list_managed_instances_results_value', 'name': 'name_value', 'named_ports': [{'name': 'name_value', 'port': 453}], 'region': 'region_value', 'self_link': 'self_link_value', 'stateful_policy': {'preserved_state': {'disks': {}}}, 'status': {'autoscaler': 'autoscaler_value', 'is_stable': True, 'stateful': {'has_stateful_config': True, 'per_instance_configs': {'all_effective': True}}, 'version_target': {'is_reached': True}}, 'target_pools': ['target_pools_value1', 'target_pools_value2'], 'target_size': 1185, 'update_policy': {'instance_redistribution_type': 'instance_redistribution_type_value', 'max_surge': {'calculated': 1042, 'fixed': 528, 'percent': 753}, 'max_unavailable': {}, 'minimal_action': 'minimal_action_value', 'most_disruptive_allowed_action': 'most_disruptive_allowed_action_value', 'replacement_method': 'replacement_method_value', 'type_': 'type__value'}, 'versions': [{'instance_template': 'instance_template_value', 'name': 'name_value', 'target_size': {}}], 'zone': 'zone_value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.patch_unary(request) + + +def test_patch_unary_rest_flattened(): + client = RegionInstanceGroupManagersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'region': 'sample2', 'instance_group_manager': 'sample3'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + region='region_value', + instance_group_manager='instance_group_manager_value', + instance_group_manager_resource=compute.InstanceGroupManager(auto_healing_policies=[compute.InstanceGroupManagerAutoHealingPolicy(health_check='health_check_value')]), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.patch_unary(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/regions/{region}/instanceGroupManagers/{instance_group_manager}" % client.transport._host, args[1]) + + +def test_patch_unary_rest_flattened_error(transport: str = 'rest'): + client = RegionInstanceGroupManagersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.patch_unary( + compute.PatchRegionInstanceGroupManagerRequest(), + project='project_value', + region='region_value', + instance_group_manager='instance_group_manager_value', + instance_group_manager_resource=compute.InstanceGroupManager(auto_healing_policies=[compute.InstanceGroupManagerAutoHealingPolicy(health_check='health_check_value')]), + ) + + +def test_patch_unary_rest_error(): + client = RegionInstanceGroupManagersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.PatchPerInstanceConfigsRegionInstanceGroupManagerRequest, + dict, +]) +def test_patch_per_instance_configs_rest(request_type): + client = RegionInstanceGroupManagersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2', 'instance_group_manager': 'sample3'} + request_init["region_instance_group_manager_patch_instance_config_req_resource"] = {'per_instance_configs': [{'fingerprint': 'fingerprint_value', 'name': 'name_value', 'preserved_state': {'disks': {}, 'metadata': {}}, 'status': 'status_value'}]} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.patch_per_instance_configs(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, extended_operation.ExtendedOperation) + assert response.client_operation_id == 'client_operation_id_value' + assert response.creation_timestamp == 'creation_timestamp_value' + assert response.description == 'description_value' + assert response.end_time == 'end_time_value' + assert response.http_error_message == 'http_error_message_value' + assert response.http_error_status_code == 2374 + assert response.id == 205 + assert response.insert_time == 'insert_time_value' + assert response.kind == 'kind_value' + assert response.name == 'name_value' + assert response.operation_group_id == 'operation_group_id_value' + assert response.operation_type == 'operation_type_value' + assert response.progress == 885 + assert response.region == 'region_value' + assert response.self_link == 'self_link_value' + assert response.start_time == 'start_time_value' + assert response.status == compute.Operation.Status.DONE + assert response.status_message == 'status_message_value' + assert response.target_id == 947 + assert response.target_link == 'target_link_value' + assert response.user == 'user_value' + assert response.zone == 'zone_value' + + +def test_patch_per_instance_configs_rest_required_fields(request_type=compute.PatchPerInstanceConfigsRegionInstanceGroupManagerRequest): + transport_class = transports.RegionInstanceGroupManagersRestTransport + + request_init = {} + request_init["instance_group_manager"] = "" + request_init["project"] = "" + request_init["region"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).patch_per_instance_configs._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["instanceGroupManager"] = 'instance_group_manager_value' + jsonified_request["project"] = 'project_value' + jsonified_request["region"] = 'region_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).patch_per_instance_configs._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "instanceGroupManager" in jsonified_request + assert jsonified_request["instanceGroupManager"] == 'instance_group_manager_value' + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "region" in jsonified_request + assert jsonified_request["region"] == 'region_value' + + client = RegionInstanceGroupManagersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.patch_per_instance_configs(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_patch_per_instance_configs_rest_unset_required_fields(): + transport = transports.RegionInstanceGroupManagersRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.patch_per_instance_configs._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("instanceGroupManager", "project", "region", "regionInstanceGroupManagerPatchInstanceConfigReqResource", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_patch_per_instance_configs_rest_interceptors(null_interceptor): + transport = transports.RegionInstanceGroupManagersRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.RegionInstanceGroupManagersRestInterceptor(), + ) + client = RegionInstanceGroupManagersClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.RegionInstanceGroupManagersRestInterceptor, "post_patch_per_instance_configs") as post, \ + mock.patch.object(transports.RegionInstanceGroupManagersRestInterceptor, "pre_patch_per_instance_configs") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.PatchPerInstanceConfigsRegionInstanceGroupManagerRequest.pb(compute.PatchPerInstanceConfigsRegionInstanceGroupManagerRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.PatchPerInstanceConfigsRegionInstanceGroupManagerRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.patch_per_instance_configs(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_patch_per_instance_configs_rest_bad_request(transport: str = 'rest', request_type=compute.PatchPerInstanceConfigsRegionInstanceGroupManagerRequest): + client = RegionInstanceGroupManagersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2', 'instance_group_manager': 'sample3'} + request_init["region_instance_group_manager_patch_instance_config_req_resource"] = {'per_instance_configs': [{'fingerprint': 'fingerprint_value', 'name': 'name_value', 'preserved_state': {'disks': {}, 'metadata': {}}, 'status': 'status_value'}]} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.patch_per_instance_configs(request) + + +def test_patch_per_instance_configs_rest_flattened(): + client = RegionInstanceGroupManagersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'region': 'sample2', 'instance_group_manager': 'sample3'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + region='region_value', + instance_group_manager='instance_group_manager_value', + region_instance_group_manager_patch_instance_config_req_resource=compute.RegionInstanceGroupManagerPatchInstanceConfigReq(per_instance_configs=[compute.PerInstanceConfig(fingerprint='fingerprint_value')]), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.patch_per_instance_configs(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/regions/{region}/instanceGroupManagers/{instance_group_manager}/patchPerInstanceConfigs" % client.transport._host, args[1]) + + +def test_patch_per_instance_configs_rest_flattened_error(transport: str = 'rest'): + client = RegionInstanceGroupManagersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.patch_per_instance_configs( + compute.PatchPerInstanceConfigsRegionInstanceGroupManagerRequest(), + project='project_value', + region='region_value', + instance_group_manager='instance_group_manager_value', + region_instance_group_manager_patch_instance_config_req_resource=compute.RegionInstanceGroupManagerPatchInstanceConfigReq(per_instance_configs=[compute.PerInstanceConfig(fingerprint='fingerprint_value')]), + ) + + +def test_patch_per_instance_configs_rest_error(): + client = RegionInstanceGroupManagersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.PatchPerInstanceConfigsRegionInstanceGroupManagerRequest, + dict, +]) +def test_patch_per_instance_configs_unary_rest(request_type): + client = RegionInstanceGroupManagersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2', 'instance_group_manager': 'sample3'} + request_init["region_instance_group_manager_patch_instance_config_req_resource"] = {'per_instance_configs': [{'fingerprint': 'fingerprint_value', 'name': 'name_value', 'preserved_state': {'disks': {}, 'metadata': {}}, 'status': 'status_value'}]} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.patch_per_instance_configs_unary(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.Operation) + + +def test_patch_per_instance_configs_unary_rest_required_fields(request_type=compute.PatchPerInstanceConfigsRegionInstanceGroupManagerRequest): + transport_class = transports.RegionInstanceGroupManagersRestTransport + + request_init = {} + request_init["instance_group_manager"] = "" + request_init["project"] = "" + request_init["region"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).patch_per_instance_configs._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["instanceGroupManager"] = 'instance_group_manager_value' + jsonified_request["project"] = 'project_value' + jsonified_request["region"] = 'region_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).patch_per_instance_configs._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "instanceGroupManager" in jsonified_request + assert jsonified_request["instanceGroupManager"] == 'instance_group_manager_value' + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "region" in jsonified_request + assert jsonified_request["region"] == 'region_value' + + client = RegionInstanceGroupManagersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.patch_per_instance_configs_unary(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_patch_per_instance_configs_unary_rest_unset_required_fields(): + transport = transports.RegionInstanceGroupManagersRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.patch_per_instance_configs._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("instanceGroupManager", "project", "region", "regionInstanceGroupManagerPatchInstanceConfigReqResource", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_patch_per_instance_configs_unary_rest_interceptors(null_interceptor): + transport = transports.RegionInstanceGroupManagersRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.RegionInstanceGroupManagersRestInterceptor(), + ) + client = RegionInstanceGroupManagersClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.RegionInstanceGroupManagersRestInterceptor, "post_patch_per_instance_configs") as post, \ + mock.patch.object(transports.RegionInstanceGroupManagersRestInterceptor, "pre_patch_per_instance_configs") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.PatchPerInstanceConfigsRegionInstanceGroupManagerRequest.pb(compute.PatchPerInstanceConfigsRegionInstanceGroupManagerRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.PatchPerInstanceConfigsRegionInstanceGroupManagerRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.patch_per_instance_configs_unary(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_patch_per_instance_configs_unary_rest_bad_request(transport: str = 'rest', request_type=compute.PatchPerInstanceConfigsRegionInstanceGroupManagerRequest): + client = RegionInstanceGroupManagersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2', 'instance_group_manager': 'sample3'} + request_init["region_instance_group_manager_patch_instance_config_req_resource"] = {'per_instance_configs': [{'fingerprint': 'fingerprint_value', 'name': 'name_value', 'preserved_state': {'disks': {}, 'metadata': {}}, 'status': 'status_value'}]} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.patch_per_instance_configs_unary(request) + + +def test_patch_per_instance_configs_unary_rest_flattened(): + client = RegionInstanceGroupManagersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'region': 'sample2', 'instance_group_manager': 'sample3'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + region='region_value', + instance_group_manager='instance_group_manager_value', + region_instance_group_manager_patch_instance_config_req_resource=compute.RegionInstanceGroupManagerPatchInstanceConfigReq(per_instance_configs=[compute.PerInstanceConfig(fingerprint='fingerprint_value')]), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.patch_per_instance_configs_unary(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/regions/{region}/instanceGroupManagers/{instance_group_manager}/patchPerInstanceConfigs" % client.transport._host, args[1]) + + +def test_patch_per_instance_configs_unary_rest_flattened_error(transport: str = 'rest'): + client = RegionInstanceGroupManagersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.patch_per_instance_configs_unary( + compute.PatchPerInstanceConfigsRegionInstanceGroupManagerRequest(), + project='project_value', + region='region_value', + instance_group_manager='instance_group_manager_value', + region_instance_group_manager_patch_instance_config_req_resource=compute.RegionInstanceGroupManagerPatchInstanceConfigReq(per_instance_configs=[compute.PerInstanceConfig(fingerprint='fingerprint_value')]), + ) + + +def test_patch_per_instance_configs_unary_rest_error(): + client = RegionInstanceGroupManagersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.RecreateInstancesRegionInstanceGroupManagerRequest, + dict, +]) +def test_recreate_instances_rest(request_type): + client = RegionInstanceGroupManagersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2', 'instance_group_manager': 'sample3'} + request_init["region_instance_group_managers_recreate_request_resource"] = {'instances': ['instances_value1', 'instances_value2']} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.recreate_instances(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, extended_operation.ExtendedOperation) + assert response.client_operation_id == 'client_operation_id_value' + assert response.creation_timestamp == 'creation_timestamp_value' + assert response.description == 'description_value' + assert response.end_time == 'end_time_value' + assert response.http_error_message == 'http_error_message_value' + assert response.http_error_status_code == 2374 + assert response.id == 205 + assert response.insert_time == 'insert_time_value' + assert response.kind == 'kind_value' + assert response.name == 'name_value' + assert response.operation_group_id == 'operation_group_id_value' + assert response.operation_type == 'operation_type_value' + assert response.progress == 885 + assert response.region == 'region_value' + assert response.self_link == 'self_link_value' + assert response.start_time == 'start_time_value' + assert response.status == compute.Operation.Status.DONE + assert response.status_message == 'status_message_value' + assert response.target_id == 947 + assert response.target_link == 'target_link_value' + assert response.user == 'user_value' + assert response.zone == 'zone_value' + + +def test_recreate_instances_rest_required_fields(request_type=compute.RecreateInstancesRegionInstanceGroupManagerRequest): + transport_class = transports.RegionInstanceGroupManagersRestTransport + + request_init = {} + request_init["instance_group_manager"] = "" + request_init["project"] = "" + request_init["region"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).recreate_instances._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["instanceGroupManager"] = 'instance_group_manager_value' + jsonified_request["project"] = 'project_value' + jsonified_request["region"] = 'region_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).recreate_instances._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "instanceGroupManager" in jsonified_request + assert jsonified_request["instanceGroupManager"] == 'instance_group_manager_value' + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "region" in jsonified_request + assert jsonified_request["region"] == 'region_value' + + client = RegionInstanceGroupManagersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.recreate_instances(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_recreate_instances_rest_unset_required_fields(): + transport = transports.RegionInstanceGroupManagersRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.recreate_instances._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("instanceGroupManager", "project", "region", "regionInstanceGroupManagersRecreateRequestResource", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_recreate_instances_rest_interceptors(null_interceptor): + transport = transports.RegionInstanceGroupManagersRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.RegionInstanceGroupManagersRestInterceptor(), + ) + client = RegionInstanceGroupManagersClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.RegionInstanceGroupManagersRestInterceptor, "post_recreate_instances") as post, \ + mock.patch.object(transports.RegionInstanceGroupManagersRestInterceptor, "pre_recreate_instances") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.RecreateInstancesRegionInstanceGroupManagerRequest.pb(compute.RecreateInstancesRegionInstanceGroupManagerRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.RecreateInstancesRegionInstanceGroupManagerRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.recreate_instances(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_recreate_instances_rest_bad_request(transport: str = 'rest', request_type=compute.RecreateInstancesRegionInstanceGroupManagerRequest): + client = RegionInstanceGroupManagersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2', 'instance_group_manager': 'sample3'} + request_init["region_instance_group_managers_recreate_request_resource"] = {'instances': ['instances_value1', 'instances_value2']} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.recreate_instances(request) + + +def test_recreate_instances_rest_flattened(): + client = RegionInstanceGroupManagersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'region': 'sample2', 'instance_group_manager': 'sample3'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + region='region_value', + instance_group_manager='instance_group_manager_value', + region_instance_group_managers_recreate_request_resource=compute.RegionInstanceGroupManagersRecreateRequest(instances=['instances_value']), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.recreate_instances(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/regions/{region}/instanceGroupManagers/{instance_group_manager}/recreateInstances" % client.transport._host, args[1]) + + +def test_recreate_instances_rest_flattened_error(transport: str = 'rest'): + client = RegionInstanceGroupManagersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.recreate_instances( + compute.RecreateInstancesRegionInstanceGroupManagerRequest(), + project='project_value', + region='region_value', + instance_group_manager='instance_group_manager_value', + region_instance_group_managers_recreate_request_resource=compute.RegionInstanceGroupManagersRecreateRequest(instances=['instances_value']), + ) + + +def test_recreate_instances_rest_error(): + client = RegionInstanceGroupManagersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.RecreateInstancesRegionInstanceGroupManagerRequest, + dict, +]) +def test_recreate_instances_unary_rest(request_type): + client = RegionInstanceGroupManagersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2', 'instance_group_manager': 'sample3'} + request_init["region_instance_group_managers_recreate_request_resource"] = {'instances': ['instances_value1', 'instances_value2']} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.recreate_instances_unary(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.Operation) + + +def test_recreate_instances_unary_rest_required_fields(request_type=compute.RecreateInstancesRegionInstanceGroupManagerRequest): + transport_class = transports.RegionInstanceGroupManagersRestTransport + + request_init = {} + request_init["instance_group_manager"] = "" + request_init["project"] = "" + request_init["region"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).recreate_instances._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["instanceGroupManager"] = 'instance_group_manager_value' + jsonified_request["project"] = 'project_value' + jsonified_request["region"] = 'region_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).recreate_instances._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "instanceGroupManager" in jsonified_request + assert jsonified_request["instanceGroupManager"] == 'instance_group_manager_value' + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "region" in jsonified_request + assert jsonified_request["region"] == 'region_value' + + client = RegionInstanceGroupManagersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.recreate_instances_unary(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_recreate_instances_unary_rest_unset_required_fields(): + transport = transports.RegionInstanceGroupManagersRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.recreate_instances._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("instanceGroupManager", "project", "region", "regionInstanceGroupManagersRecreateRequestResource", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_recreate_instances_unary_rest_interceptors(null_interceptor): + transport = transports.RegionInstanceGroupManagersRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.RegionInstanceGroupManagersRestInterceptor(), + ) + client = RegionInstanceGroupManagersClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.RegionInstanceGroupManagersRestInterceptor, "post_recreate_instances") as post, \ + mock.patch.object(transports.RegionInstanceGroupManagersRestInterceptor, "pre_recreate_instances") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.RecreateInstancesRegionInstanceGroupManagerRequest.pb(compute.RecreateInstancesRegionInstanceGroupManagerRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.RecreateInstancesRegionInstanceGroupManagerRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.recreate_instances_unary(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_recreate_instances_unary_rest_bad_request(transport: str = 'rest', request_type=compute.RecreateInstancesRegionInstanceGroupManagerRequest): + client = RegionInstanceGroupManagersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2', 'instance_group_manager': 'sample3'} + request_init["region_instance_group_managers_recreate_request_resource"] = {'instances': ['instances_value1', 'instances_value2']} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.recreate_instances_unary(request) + + +def test_recreate_instances_unary_rest_flattened(): + client = RegionInstanceGroupManagersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'region': 'sample2', 'instance_group_manager': 'sample3'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + region='region_value', + instance_group_manager='instance_group_manager_value', + region_instance_group_managers_recreate_request_resource=compute.RegionInstanceGroupManagersRecreateRequest(instances=['instances_value']), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.recreate_instances_unary(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/regions/{region}/instanceGroupManagers/{instance_group_manager}/recreateInstances" % client.transport._host, args[1]) + + +def test_recreate_instances_unary_rest_flattened_error(transport: str = 'rest'): + client = RegionInstanceGroupManagersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.recreate_instances_unary( + compute.RecreateInstancesRegionInstanceGroupManagerRequest(), + project='project_value', + region='region_value', + instance_group_manager='instance_group_manager_value', + region_instance_group_managers_recreate_request_resource=compute.RegionInstanceGroupManagersRecreateRequest(instances=['instances_value']), + ) + + +def test_recreate_instances_unary_rest_error(): + client = RegionInstanceGroupManagersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.ResizeRegionInstanceGroupManagerRequest, + dict, +]) +def test_resize_rest(request_type): + client = RegionInstanceGroupManagersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2', 'instance_group_manager': 'sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.resize(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, extended_operation.ExtendedOperation) + assert response.client_operation_id == 'client_operation_id_value' + assert response.creation_timestamp == 'creation_timestamp_value' + assert response.description == 'description_value' + assert response.end_time == 'end_time_value' + assert response.http_error_message == 'http_error_message_value' + assert response.http_error_status_code == 2374 + assert response.id == 205 + assert response.insert_time == 'insert_time_value' + assert response.kind == 'kind_value' + assert response.name == 'name_value' + assert response.operation_group_id == 'operation_group_id_value' + assert response.operation_type == 'operation_type_value' + assert response.progress == 885 + assert response.region == 'region_value' + assert response.self_link == 'self_link_value' + assert response.start_time == 'start_time_value' + assert response.status == compute.Operation.Status.DONE + assert response.status_message == 'status_message_value' + assert response.target_id == 947 + assert response.target_link == 'target_link_value' + assert response.user == 'user_value' + assert response.zone == 'zone_value' + + +def test_resize_rest_required_fields(request_type=compute.ResizeRegionInstanceGroupManagerRequest): + transport_class = transports.RegionInstanceGroupManagersRestTransport + + request_init = {} + request_init["instance_group_manager"] = "" + request_init["project"] = "" + request_init["region"] = "" + request_init["size"] = 0 + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + assert "size" not in jsonified_request + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).resize._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + assert "size" in jsonified_request + assert jsonified_request["size"] == request_init["size"] + + jsonified_request["instanceGroupManager"] = 'instance_group_manager_value' + jsonified_request["project"] = 'project_value' + jsonified_request["region"] = 'region_value' + jsonified_request["size"] = 443 + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).resize._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", "size", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "instanceGroupManager" in jsonified_request + assert jsonified_request["instanceGroupManager"] == 'instance_group_manager_value' + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "region" in jsonified_request + assert jsonified_request["region"] == 'region_value' + assert "size" in jsonified_request + assert jsonified_request["size"] == 443 + + client = RegionInstanceGroupManagersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.resize(request) + + expected_params = [ + ( + "size", + str(0), + ), + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_resize_rest_unset_required_fields(): + transport = transports.RegionInstanceGroupManagersRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.resize._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", "size", )) & set(("instanceGroupManager", "project", "region", "size", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_resize_rest_interceptors(null_interceptor): + transport = transports.RegionInstanceGroupManagersRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.RegionInstanceGroupManagersRestInterceptor(), + ) + client = RegionInstanceGroupManagersClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.RegionInstanceGroupManagersRestInterceptor, "post_resize") as post, \ + mock.patch.object(transports.RegionInstanceGroupManagersRestInterceptor, "pre_resize") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.ResizeRegionInstanceGroupManagerRequest.pb(compute.ResizeRegionInstanceGroupManagerRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.ResizeRegionInstanceGroupManagerRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.resize(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_resize_rest_bad_request(transport: str = 'rest', request_type=compute.ResizeRegionInstanceGroupManagerRequest): + client = RegionInstanceGroupManagersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2', 'instance_group_manager': 'sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.resize(request) + + +def test_resize_rest_flattened(): + client = RegionInstanceGroupManagersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'region': 'sample2', 'instance_group_manager': 'sample3'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + region='region_value', + instance_group_manager='instance_group_manager_value', + size=443, + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.resize(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/regions/{region}/instanceGroupManagers/{instance_group_manager}/resize" % client.transport._host, args[1]) + + +def test_resize_rest_flattened_error(transport: str = 'rest'): + client = RegionInstanceGroupManagersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.resize( + compute.ResizeRegionInstanceGroupManagerRequest(), + project='project_value', + region='region_value', + instance_group_manager='instance_group_manager_value', + size=443, + ) + + +def test_resize_rest_error(): + client = RegionInstanceGroupManagersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.ResizeRegionInstanceGroupManagerRequest, + dict, +]) +def test_resize_unary_rest(request_type): + client = RegionInstanceGroupManagersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2', 'instance_group_manager': 'sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.resize_unary(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.Operation) + + +def test_resize_unary_rest_required_fields(request_type=compute.ResizeRegionInstanceGroupManagerRequest): + transport_class = transports.RegionInstanceGroupManagersRestTransport + + request_init = {} + request_init["instance_group_manager"] = "" + request_init["project"] = "" + request_init["region"] = "" + request_init["size"] = 0 + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + assert "size" not in jsonified_request + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).resize._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + assert "size" in jsonified_request + assert jsonified_request["size"] == request_init["size"] + + jsonified_request["instanceGroupManager"] = 'instance_group_manager_value' + jsonified_request["project"] = 'project_value' + jsonified_request["region"] = 'region_value' + jsonified_request["size"] = 443 + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).resize._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", "size", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "instanceGroupManager" in jsonified_request + assert jsonified_request["instanceGroupManager"] == 'instance_group_manager_value' + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "region" in jsonified_request + assert jsonified_request["region"] == 'region_value' + assert "size" in jsonified_request + assert jsonified_request["size"] == 443 + + client = RegionInstanceGroupManagersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.resize_unary(request) + + expected_params = [ + ( + "size", + str(0), + ), + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_resize_unary_rest_unset_required_fields(): + transport = transports.RegionInstanceGroupManagersRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.resize._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", "size", )) & set(("instanceGroupManager", "project", "region", "size", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_resize_unary_rest_interceptors(null_interceptor): + transport = transports.RegionInstanceGroupManagersRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.RegionInstanceGroupManagersRestInterceptor(), + ) + client = RegionInstanceGroupManagersClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.RegionInstanceGroupManagersRestInterceptor, "post_resize") as post, \ + mock.patch.object(transports.RegionInstanceGroupManagersRestInterceptor, "pre_resize") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.ResizeRegionInstanceGroupManagerRequest.pb(compute.ResizeRegionInstanceGroupManagerRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.ResizeRegionInstanceGroupManagerRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.resize_unary(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_resize_unary_rest_bad_request(transport: str = 'rest', request_type=compute.ResizeRegionInstanceGroupManagerRequest): + client = RegionInstanceGroupManagersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2', 'instance_group_manager': 'sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.resize_unary(request) + + +def test_resize_unary_rest_flattened(): + client = RegionInstanceGroupManagersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'region': 'sample2', 'instance_group_manager': 'sample3'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + region='region_value', + instance_group_manager='instance_group_manager_value', + size=443, + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.resize_unary(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/regions/{region}/instanceGroupManagers/{instance_group_manager}/resize" % client.transport._host, args[1]) + + +def test_resize_unary_rest_flattened_error(transport: str = 'rest'): + client = RegionInstanceGroupManagersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.resize_unary( + compute.ResizeRegionInstanceGroupManagerRequest(), + project='project_value', + region='region_value', + instance_group_manager='instance_group_manager_value', + size=443, + ) + + +def test_resize_unary_rest_error(): + client = RegionInstanceGroupManagersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.SetInstanceTemplateRegionInstanceGroupManagerRequest, + dict, +]) +def test_set_instance_template_rest(request_type): + client = RegionInstanceGroupManagersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2', 'instance_group_manager': 'sample3'} + request_init["region_instance_group_managers_set_template_request_resource"] = {'instance_template': 'instance_template_value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.set_instance_template(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, extended_operation.ExtendedOperation) + assert response.client_operation_id == 'client_operation_id_value' + assert response.creation_timestamp == 'creation_timestamp_value' + assert response.description == 'description_value' + assert response.end_time == 'end_time_value' + assert response.http_error_message == 'http_error_message_value' + assert response.http_error_status_code == 2374 + assert response.id == 205 + assert response.insert_time == 'insert_time_value' + assert response.kind == 'kind_value' + assert response.name == 'name_value' + assert response.operation_group_id == 'operation_group_id_value' + assert response.operation_type == 'operation_type_value' + assert response.progress == 885 + assert response.region == 'region_value' + assert response.self_link == 'self_link_value' + assert response.start_time == 'start_time_value' + assert response.status == compute.Operation.Status.DONE + assert response.status_message == 'status_message_value' + assert response.target_id == 947 + assert response.target_link == 'target_link_value' + assert response.user == 'user_value' + assert response.zone == 'zone_value' + + +def test_set_instance_template_rest_required_fields(request_type=compute.SetInstanceTemplateRegionInstanceGroupManagerRequest): + transport_class = transports.RegionInstanceGroupManagersRestTransport + + request_init = {} + request_init["instance_group_manager"] = "" + request_init["project"] = "" + request_init["region"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).set_instance_template._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["instanceGroupManager"] = 'instance_group_manager_value' + jsonified_request["project"] = 'project_value' + jsonified_request["region"] = 'region_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).set_instance_template._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "instanceGroupManager" in jsonified_request + assert jsonified_request["instanceGroupManager"] == 'instance_group_manager_value' + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "region" in jsonified_request + assert jsonified_request["region"] == 'region_value' + + client = RegionInstanceGroupManagersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.set_instance_template(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_set_instance_template_rest_unset_required_fields(): + transport = transports.RegionInstanceGroupManagersRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.set_instance_template._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("instanceGroupManager", "project", "region", "regionInstanceGroupManagersSetTemplateRequestResource", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_set_instance_template_rest_interceptors(null_interceptor): + transport = transports.RegionInstanceGroupManagersRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.RegionInstanceGroupManagersRestInterceptor(), + ) + client = RegionInstanceGroupManagersClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.RegionInstanceGroupManagersRestInterceptor, "post_set_instance_template") as post, \ + mock.patch.object(transports.RegionInstanceGroupManagersRestInterceptor, "pre_set_instance_template") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.SetInstanceTemplateRegionInstanceGroupManagerRequest.pb(compute.SetInstanceTemplateRegionInstanceGroupManagerRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.SetInstanceTemplateRegionInstanceGroupManagerRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.set_instance_template(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_set_instance_template_rest_bad_request(transport: str = 'rest', request_type=compute.SetInstanceTemplateRegionInstanceGroupManagerRequest): + client = RegionInstanceGroupManagersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2', 'instance_group_manager': 'sample3'} + request_init["region_instance_group_managers_set_template_request_resource"] = {'instance_template': 'instance_template_value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.set_instance_template(request) + + +def test_set_instance_template_rest_flattened(): + client = RegionInstanceGroupManagersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'region': 'sample2', 'instance_group_manager': 'sample3'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + region='region_value', + instance_group_manager='instance_group_manager_value', + region_instance_group_managers_set_template_request_resource=compute.RegionInstanceGroupManagersSetTemplateRequest(instance_template='instance_template_value'), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.set_instance_template(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/regions/{region}/instanceGroupManagers/{instance_group_manager}/setInstanceTemplate" % client.transport._host, args[1]) + + +def test_set_instance_template_rest_flattened_error(transport: str = 'rest'): + client = RegionInstanceGroupManagersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.set_instance_template( + compute.SetInstanceTemplateRegionInstanceGroupManagerRequest(), + project='project_value', + region='region_value', + instance_group_manager='instance_group_manager_value', + region_instance_group_managers_set_template_request_resource=compute.RegionInstanceGroupManagersSetTemplateRequest(instance_template='instance_template_value'), + ) + + +def test_set_instance_template_rest_error(): + client = RegionInstanceGroupManagersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.SetInstanceTemplateRegionInstanceGroupManagerRequest, + dict, +]) +def test_set_instance_template_unary_rest(request_type): + client = RegionInstanceGroupManagersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2', 'instance_group_manager': 'sample3'} + request_init["region_instance_group_managers_set_template_request_resource"] = {'instance_template': 'instance_template_value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.set_instance_template_unary(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.Operation) + + +def test_set_instance_template_unary_rest_required_fields(request_type=compute.SetInstanceTemplateRegionInstanceGroupManagerRequest): + transport_class = transports.RegionInstanceGroupManagersRestTransport + + request_init = {} + request_init["instance_group_manager"] = "" + request_init["project"] = "" + request_init["region"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).set_instance_template._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["instanceGroupManager"] = 'instance_group_manager_value' + jsonified_request["project"] = 'project_value' + jsonified_request["region"] = 'region_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).set_instance_template._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "instanceGroupManager" in jsonified_request + assert jsonified_request["instanceGroupManager"] == 'instance_group_manager_value' + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "region" in jsonified_request + assert jsonified_request["region"] == 'region_value' + + client = RegionInstanceGroupManagersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.set_instance_template_unary(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_set_instance_template_unary_rest_unset_required_fields(): + transport = transports.RegionInstanceGroupManagersRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.set_instance_template._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("instanceGroupManager", "project", "region", "regionInstanceGroupManagersSetTemplateRequestResource", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_set_instance_template_unary_rest_interceptors(null_interceptor): + transport = transports.RegionInstanceGroupManagersRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.RegionInstanceGroupManagersRestInterceptor(), + ) + client = RegionInstanceGroupManagersClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.RegionInstanceGroupManagersRestInterceptor, "post_set_instance_template") as post, \ + mock.patch.object(transports.RegionInstanceGroupManagersRestInterceptor, "pre_set_instance_template") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.SetInstanceTemplateRegionInstanceGroupManagerRequest.pb(compute.SetInstanceTemplateRegionInstanceGroupManagerRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.SetInstanceTemplateRegionInstanceGroupManagerRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.set_instance_template_unary(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_set_instance_template_unary_rest_bad_request(transport: str = 'rest', request_type=compute.SetInstanceTemplateRegionInstanceGroupManagerRequest): + client = RegionInstanceGroupManagersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2', 'instance_group_manager': 'sample3'} + request_init["region_instance_group_managers_set_template_request_resource"] = {'instance_template': 'instance_template_value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.set_instance_template_unary(request) + + +def test_set_instance_template_unary_rest_flattened(): + client = RegionInstanceGroupManagersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'region': 'sample2', 'instance_group_manager': 'sample3'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + region='region_value', + instance_group_manager='instance_group_manager_value', + region_instance_group_managers_set_template_request_resource=compute.RegionInstanceGroupManagersSetTemplateRequest(instance_template='instance_template_value'), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.set_instance_template_unary(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/regions/{region}/instanceGroupManagers/{instance_group_manager}/setInstanceTemplate" % client.transport._host, args[1]) + + +def test_set_instance_template_unary_rest_flattened_error(transport: str = 'rest'): + client = RegionInstanceGroupManagersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.set_instance_template_unary( + compute.SetInstanceTemplateRegionInstanceGroupManagerRequest(), + project='project_value', + region='region_value', + instance_group_manager='instance_group_manager_value', + region_instance_group_managers_set_template_request_resource=compute.RegionInstanceGroupManagersSetTemplateRequest(instance_template='instance_template_value'), + ) + + +def test_set_instance_template_unary_rest_error(): + client = RegionInstanceGroupManagersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.SetTargetPoolsRegionInstanceGroupManagerRequest, + dict, +]) +def test_set_target_pools_rest(request_type): + client = RegionInstanceGroupManagersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2', 'instance_group_manager': 'sample3'} + request_init["region_instance_group_managers_set_target_pools_request_resource"] = {'fingerprint': 'fingerprint_value', 'target_pools': ['target_pools_value1', 'target_pools_value2']} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.set_target_pools(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, extended_operation.ExtendedOperation) + assert response.client_operation_id == 'client_operation_id_value' + assert response.creation_timestamp == 'creation_timestamp_value' + assert response.description == 'description_value' + assert response.end_time == 'end_time_value' + assert response.http_error_message == 'http_error_message_value' + assert response.http_error_status_code == 2374 + assert response.id == 205 + assert response.insert_time == 'insert_time_value' + assert response.kind == 'kind_value' + assert response.name == 'name_value' + assert response.operation_group_id == 'operation_group_id_value' + assert response.operation_type == 'operation_type_value' + assert response.progress == 885 + assert response.region == 'region_value' + assert response.self_link == 'self_link_value' + assert response.start_time == 'start_time_value' + assert response.status == compute.Operation.Status.DONE + assert response.status_message == 'status_message_value' + assert response.target_id == 947 + assert response.target_link == 'target_link_value' + assert response.user == 'user_value' + assert response.zone == 'zone_value' + + +def test_set_target_pools_rest_required_fields(request_type=compute.SetTargetPoolsRegionInstanceGroupManagerRequest): + transport_class = transports.RegionInstanceGroupManagersRestTransport + + request_init = {} + request_init["instance_group_manager"] = "" + request_init["project"] = "" + request_init["region"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).set_target_pools._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["instanceGroupManager"] = 'instance_group_manager_value' + jsonified_request["project"] = 'project_value' + jsonified_request["region"] = 'region_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).set_target_pools._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "instanceGroupManager" in jsonified_request + assert jsonified_request["instanceGroupManager"] == 'instance_group_manager_value' + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "region" in jsonified_request + assert jsonified_request["region"] == 'region_value' + + client = RegionInstanceGroupManagersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.set_target_pools(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_set_target_pools_rest_unset_required_fields(): + transport = transports.RegionInstanceGroupManagersRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.set_target_pools._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("instanceGroupManager", "project", "region", "regionInstanceGroupManagersSetTargetPoolsRequestResource", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_set_target_pools_rest_interceptors(null_interceptor): + transport = transports.RegionInstanceGroupManagersRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.RegionInstanceGroupManagersRestInterceptor(), + ) + client = RegionInstanceGroupManagersClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.RegionInstanceGroupManagersRestInterceptor, "post_set_target_pools") as post, \ + mock.patch.object(transports.RegionInstanceGroupManagersRestInterceptor, "pre_set_target_pools") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.SetTargetPoolsRegionInstanceGroupManagerRequest.pb(compute.SetTargetPoolsRegionInstanceGroupManagerRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.SetTargetPoolsRegionInstanceGroupManagerRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.set_target_pools(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_set_target_pools_rest_bad_request(transport: str = 'rest', request_type=compute.SetTargetPoolsRegionInstanceGroupManagerRequest): + client = RegionInstanceGroupManagersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2', 'instance_group_manager': 'sample3'} + request_init["region_instance_group_managers_set_target_pools_request_resource"] = {'fingerprint': 'fingerprint_value', 'target_pools': ['target_pools_value1', 'target_pools_value2']} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.set_target_pools(request) + + +def test_set_target_pools_rest_flattened(): + client = RegionInstanceGroupManagersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'region': 'sample2', 'instance_group_manager': 'sample3'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + region='region_value', + instance_group_manager='instance_group_manager_value', + region_instance_group_managers_set_target_pools_request_resource=compute.RegionInstanceGroupManagersSetTargetPoolsRequest(fingerprint='fingerprint_value'), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.set_target_pools(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/regions/{region}/instanceGroupManagers/{instance_group_manager}/setTargetPools" % client.transport._host, args[1]) + + +def test_set_target_pools_rest_flattened_error(transport: str = 'rest'): + client = RegionInstanceGroupManagersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.set_target_pools( + compute.SetTargetPoolsRegionInstanceGroupManagerRequest(), + project='project_value', + region='region_value', + instance_group_manager='instance_group_manager_value', + region_instance_group_managers_set_target_pools_request_resource=compute.RegionInstanceGroupManagersSetTargetPoolsRequest(fingerprint='fingerprint_value'), + ) + + +def test_set_target_pools_rest_error(): + client = RegionInstanceGroupManagersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.SetTargetPoolsRegionInstanceGroupManagerRequest, + dict, +]) +def test_set_target_pools_unary_rest(request_type): + client = RegionInstanceGroupManagersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2', 'instance_group_manager': 'sample3'} + request_init["region_instance_group_managers_set_target_pools_request_resource"] = {'fingerprint': 'fingerprint_value', 'target_pools': ['target_pools_value1', 'target_pools_value2']} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.set_target_pools_unary(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.Operation) + + +def test_set_target_pools_unary_rest_required_fields(request_type=compute.SetTargetPoolsRegionInstanceGroupManagerRequest): + transport_class = transports.RegionInstanceGroupManagersRestTransport + + request_init = {} + request_init["instance_group_manager"] = "" + request_init["project"] = "" + request_init["region"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).set_target_pools._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["instanceGroupManager"] = 'instance_group_manager_value' + jsonified_request["project"] = 'project_value' + jsonified_request["region"] = 'region_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).set_target_pools._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "instanceGroupManager" in jsonified_request + assert jsonified_request["instanceGroupManager"] == 'instance_group_manager_value' + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "region" in jsonified_request + assert jsonified_request["region"] == 'region_value' + + client = RegionInstanceGroupManagersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.set_target_pools_unary(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_set_target_pools_unary_rest_unset_required_fields(): + transport = transports.RegionInstanceGroupManagersRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.set_target_pools._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("instanceGroupManager", "project", "region", "regionInstanceGroupManagersSetTargetPoolsRequestResource", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_set_target_pools_unary_rest_interceptors(null_interceptor): + transport = transports.RegionInstanceGroupManagersRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.RegionInstanceGroupManagersRestInterceptor(), + ) + client = RegionInstanceGroupManagersClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.RegionInstanceGroupManagersRestInterceptor, "post_set_target_pools") as post, \ + mock.patch.object(transports.RegionInstanceGroupManagersRestInterceptor, "pre_set_target_pools") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.SetTargetPoolsRegionInstanceGroupManagerRequest.pb(compute.SetTargetPoolsRegionInstanceGroupManagerRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.SetTargetPoolsRegionInstanceGroupManagerRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.set_target_pools_unary(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_set_target_pools_unary_rest_bad_request(transport: str = 'rest', request_type=compute.SetTargetPoolsRegionInstanceGroupManagerRequest): + client = RegionInstanceGroupManagersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2', 'instance_group_manager': 'sample3'} + request_init["region_instance_group_managers_set_target_pools_request_resource"] = {'fingerprint': 'fingerprint_value', 'target_pools': ['target_pools_value1', 'target_pools_value2']} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.set_target_pools_unary(request) + + +def test_set_target_pools_unary_rest_flattened(): + client = RegionInstanceGroupManagersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'region': 'sample2', 'instance_group_manager': 'sample3'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + region='region_value', + instance_group_manager='instance_group_manager_value', + region_instance_group_managers_set_target_pools_request_resource=compute.RegionInstanceGroupManagersSetTargetPoolsRequest(fingerprint='fingerprint_value'), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.set_target_pools_unary(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/regions/{region}/instanceGroupManagers/{instance_group_manager}/setTargetPools" % client.transport._host, args[1]) + + +def test_set_target_pools_unary_rest_flattened_error(transport: str = 'rest'): + client = RegionInstanceGroupManagersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.set_target_pools_unary( + compute.SetTargetPoolsRegionInstanceGroupManagerRequest(), + project='project_value', + region='region_value', + instance_group_manager='instance_group_manager_value', + region_instance_group_managers_set_target_pools_request_resource=compute.RegionInstanceGroupManagersSetTargetPoolsRequest(fingerprint='fingerprint_value'), + ) + + +def test_set_target_pools_unary_rest_error(): + client = RegionInstanceGroupManagersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.UpdatePerInstanceConfigsRegionInstanceGroupManagerRequest, + dict, +]) +def test_update_per_instance_configs_rest(request_type): + client = RegionInstanceGroupManagersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2', 'instance_group_manager': 'sample3'} + request_init["region_instance_group_manager_update_instance_config_req_resource"] = {'per_instance_configs': [{'fingerprint': 'fingerprint_value', 'name': 'name_value', 'preserved_state': {'disks': {}, 'metadata': {}}, 'status': 'status_value'}]} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.update_per_instance_configs(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, extended_operation.ExtendedOperation) + assert response.client_operation_id == 'client_operation_id_value' + assert response.creation_timestamp == 'creation_timestamp_value' + assert response.description == 'description_value' + assert response.end_time == 'end_time_value' + assert response.http_error_message == 'http_error_message_value' + assert response.http_error_status_code == 2374 + assert response.id == 205 + assert response.insert_time == 'insert_time_value' + assert response.kind == 'kind_value' + assert response.name == 'name_value' + assert response.operation_group_id == 'operation_group_id_value' + assert response.operation_type == 'operation_type_value' + assert response.progress == 885 + assert response.region == 'region_value' + assert response.self_link == 'self_link_value' + assert response.start_time == 'start_time_value' + assert response.status == compute.Operation.Status.DONE + assert response.status_message == 'status_message_value' + assert response.target_id == 947 + assert response.target_link == 'target_link_value' + assert response.user == 'user_value' + assert response.zone == 'zone_value' + + +def test_update_per_instance_configs_rest_required_fields(request_type=compute.UpdatePerInstanceConfigsRegionInstanceGroupManagerRequest): + transport_class = transports.RegionInstanceGroupManagersRestTransport + + request_init = {} + request_init["instance_group_manager"] = "" + request_init["project"] = "" + request_init["region"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).update_per_instance_configs._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["instanceGroupManager"] = 'instance_group_manager_value' + jsonified_request["project"] = 'project_value' + jsonified_request["region"] = 'region_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).update_per_instance_configs._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "instanceGroupManager" in jsonified_request + assert jsonified_request["instanceGroupManager"] == 'instance_group_manager_value' + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "region" in jsonified_request + assert jsonified_request["region"] == 'region_value' + + client = RegionInstanceGroupManagersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.update_per_instance_configs(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_update_per_instance_configs_rest_unset_required_fields(): + transport = transports.RegionInstanceGroupManagersRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.update_per_instance_configs._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("instanceGroupManager", "project", "region", "regionInstanceGroupManagerUpdateInstanceConfigReqResource", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_update_per_instance_configs_rest_interceptors(null_interceptor): + transport = transports.RegionInstanceGroupManagersRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.RegionInstanceGroupManagersRestInterceptor(), + ) + client = RegionInstanceGroupManagersClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.RegionInstanceGroupManagersRestInterceptor, "post_update_per_instance_configs") as post, \ + mock.patch.object(transports.RegionInstanceGroupManagersRestInterceptor, "pre_update_per_instance_configs") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.UpdatePerInstanceConfigsRegionInstanceGroupManagerRequest.pb(compute.UpdatePerInstanceConfigsRegionInstanceGroupManagerRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.UpdatePerInstanceConfigsRegionInstanceGroupManagerRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.update_per_instance_configs(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_update_per_instance_configs_rest_bad_request(transport: str = 'rest', request_type=compute.UpdatePerInstanceConfigsRegionInstanceGroupManagerRequest): + client = RegionInstanceGroupManagersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2', 'instance_group_manager': 'sample3'} + request_init["region_instance_group_manager_update_instance_config_req_resource"] = {'per_instance_configs': [{'fingerprint': 'fingerprint_value', 'name': 'name_value', 'preserved_state': {'disks': {}, 'metadata': {}}, 'status': 'status_value'}]} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.update_per_instance_configs(request) + + +def test_update_per_instance_configs_rest_flattened(): + client = RegionInstanceGroupManagersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'region': 'sample2', 'instance_group_manager': 'sample3'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + region='region_value', + instance_group_manager='instance_group_manager_value', + region_instance_group_manager_update_instance_config_req_resource=compute.RegionInstanceGroupManagerUpdateInstanceConfigReq(per_instance_configs=[compute.PerInstanceConfig(fingerprint='fingerprint_value')]), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.update_per_instance_configs(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/regions/{region}/instanceGroupManagers/{instance_group_manager}/updatePerInstanceConfigs" % client.transport._host, args[1]) + + +def test_update_per_instance_configs_rest_flattened_error(transport: str = 'rest'): + client = RegionInstanceGroupManagersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_per_instance_configs( + compute.UpdatePerInstanceConfigsRegionInstanceGroupManagerRequest(), + project='project_value', + region='region_value', + instance_group_manager='instance_group_manager_value', + region_instance_group_manager_update_instance_config_req_resource=compute.RegionInstanceGroupManagerUpdateInstanceConfigReq(per_instance_configs=[compute.PerInstanceConfig(fingerprint='fingerprint_value')]), + ) + + +def test_update_per_instance_configs_rest_error(): + client = RegionInstanceGroupManagersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.UpdatePerInstanceConfigsRegionInstanceGroupManagerRequest, + dict, +]) +def test_update_per_instance_configs_unary_rest(request_type): + client = RegionInstanceGroupManagersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2', 'instance_group_manager': 'sample3'} + request_init["region_instance_group_manager_update_instance_config_req_resource"] = {'per_instance_configs': [{'fingerprint': 'fingerprint_value', 'name': 'name_value', 'preserved_state': {'disks': {}, 'metadata': {}}, 'status': 'status_value'}]} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.update_per_instance_configs_unary(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.Operation) + + +def test_update_per_instance_configs_unary_rest_required_fields(request_type=compute.UpdatePerInstanceConfigsRegionInstanceGroupManagerRequest): + transport_class = transports.RegionInstanceGroupManagersRestTransport + + request_init = {} + request_init["instance_group_manager"] = "" + request_init["project"] = "" + request_init["region"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).update_per_instance_configs._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["instanceGroupManager"] = 'instance_group_manager_value' + jsonified_request["project"] = 'project_value' + jsonified_request["region"] = 'region_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).update_per_instance_configs._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "instanceGroupManager" in jsonified_request + assert jsonified_request["instanceGroupManager"] == 'instance_group_manager_value' + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "region" in jsonified_request + assert jsonified_request["region"] == 'region_value' + + client = RegionInstanceGroupManagersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.update_per_instance_configs_unary(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_update_per_instance_configs_unary_rest_unset_required_fields(): + transport = transports.RegionInstanceGroupManagersRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.update_per_instance_configs._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("instanceGroupManager", "project", "region", "regionInstanceGroupManagerUpdateInstanceConfigReqResource", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_update_per_instance_configs_unary_rest_interceptors(null_interceptor): + transport = transports.RegionInstanceGroupManagersRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.RegionInstanceGroupManagersRestInterceptor(), + ) + client = RegionInstanceGroupManagersClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.RegionInstanceGroupManagersRestInterceptor, "post_update_per_instance_configs") as post, \ + mock.patch.object(transports.RegionInstanceGroupManagersRestInterceptor, "pre_update_per_instance_configs") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.UpdatePerInstanceConfigsRegionInstanceGroupManagerRequest.pb(compute.UpdatePerInstanceConfigsRegionInstanceGroupManagerRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.UpdatePerInstanceConfigsRegionInstanceGroupManagerRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.update_per_instance_configs_unary(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_update_per_instance_configs_unary_rest_bad_request(transport: str = 'rest', request_type=compute.UpdatePerInstanceConfigsRegionInstanceGroupManagerRequest): + client = RegionInstanceGroupManagersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2', 'instance_group_manager': 'sample3'} + request_init["region_instance_group_manager_update_instance_config_req_resource"] = {'per_instance_configs': [{'fingerprint': 'fingerprint_value', 'name': 'name_value', 'preserved_state': {'disks': {}, 'metadata': {}}, 'status': 'status_value'}]} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.update_per_instance_configs_unary(request) + + +def test_update_per_instance_configs_unary_rest_flattened(): + client = RegionInstanceGroupManagersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'region': 'sample2', 'instance_group_manager': 'sample3'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + region='region_value', + instance_group_manager='instance_group_manager_value', + region_instance_group_manager_update_instance_config_req_resource=compute.RegionInstanceGroupManagerUpdateInstanceConfigReq(per_instance_configs=[compute.PerInstanceConfig(fingerprint='fingerprint_value')]), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.update_per_instance_configs_unary(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/regions/{region}/instanceGroupManagers/{instance_group_manager}/updatePerInstanceConfigs" % client.transport._host, args[1]) + + +def test_update_per_instance_configs_unary_rest_flattened_error(transport: str = 'rest'): + client = RegionInstanceGroupManagersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_per_instance_configs_unary( + compute.UpdatePerInstanceConfigsRegionInstanceGroupManagerRequest(), + project='project_value', + region='region_value', + instance_group_manager='instance_group_manager_value', + region_instance_group_manager_update_instance_config_req_resource=compute.RegionInstanceGroupManagerUpdateInstanceConfigReq(per_instance_configs=[compute.PerInstanceConfig(fingerprint='fingerprint_value')]), + ) + + +def test_update_per_instance_configs_unary_rest_error(): + client = RegionInstanceGroupManagersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +def test_credentials_transport_error(): + # It is an error to provide credentials and a transport instance. + transport = transports.RegionInstanceGroupManagersRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = RegionInstanceGroupManagersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # It is an error to provide a credentials file and a transport instance. + transport = transports.RegionInstanceGroupManagersRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = RegionInstanceGroupManagersClient( + client_options={"credentials_file": "credentials.json"}, + transport=transport, + ) + + # It is an error to provide an api_key and a transport instance. + transport = transports.RegionInstanceGroupManagersRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = RegionInstanceGroupManagersClient( + client_options=options, + transport=transport, + ) + + # It is an error to provide an api_key and a credential. + options = mock.Mock() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = RegionInstanceGroupManagersClient( + client_options=options, + credentials=ga_credentials.AnonymousCredentials() + ) + + # It is an error to provide scopes and a transport instance. + transport = transports.RegionInstanceGroupManagersRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = RegionInstanceGroupManagersClient( + client_options={"scopes": ["1", "2"]}, + transport=transport, + ) + + +def test_transport_instance(): + # A client may be instantiated with a custom transport instance. + transport = transports.RegionInstanceGroupManagersRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + client = RegionInstanceGroupManagersClient(transport=transport) + assert client.transport is transport + + +@pytest.mark.parametrize("transport_class", [ + transports.RegionInstanceGroupManagersRestTransport, +]) +def test_transport_adc(transport_class): + # Test default credentials are used if not provided. + with mock.patch.object(google.auth, 'default') as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class() + adc.assert_called_once() + +@pytest.mark.parametrize("transport_name", [ + "rest", +]) +def test_transport_kind(transport_name): + transport = RegionInstanceGroupManagersClient.get_transport_class(transport_name)( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert transport.kind == transport_name + + +def test_region_instance_group_managers_base_transport_error(): + # Passing both a credentials object and credentials_file should raise an error + with pytest.raises(core_exceptions.DuplicateCredentialArgs): + transport = transports.RegionInstanceGroupManagersTransport( + credentials=ga_credentials.AnonymousCredentials(), + credentials_file="credentials.json" + ) + + +def test_region_instance_group_managers_base_transport(): + # Instantiate the base transport. + with mock.patch('google.cloud.compute_v1.services.region_instance_group_managers.transports.RegionInstanceGroupManagersTransport.__init__') as Transport: + Transport.return_value = None + transport = transports.RegionInstanceGroupManagersTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Every method on the transport should just blindly + # raise NotImplementedError. + methods = ( + 'abandon_instances', + 'apply_updates_to_instances', + 'create_instances', + 'delete', + 'delete_instances', + 'delete_per_instance_configs', + 'get', + 'insert', + 'list', + 'list_errors', + 'list_managed_instances', + 'list_per_instance_configs', + 'patch', + 'patch_per_instance_configs', + 'recreate_instances', + 'resize', + 'set_instance_template', + 'set_target_pools', + 'update_per_instance_configs', + ) + for method in methods: + with pytest.raises(NotImplementedError): + getattr(transport, method)(request=object()) + + with pytest.raises(NotImplementedError): + transport.close() + + # Catch all for all remaining methods and properties + remainder = [ + 'kind', + ] + for r in remainder: + with pytest.raises(NotImplementedError): + getattr(transport, r)() + + +def test_region_instance_group_managers_base_transport_with_credentials_file(): + # Instantiate the base transport with a credentials file + with mock.patch.object(google.auth, 'load_credentials_from_file', autospec=True) as load_creds, mock.patch('google.cloud.compute_v1.services.region_instance_group_managers.transports.RegionInstanceGroupManagersTransport._prep_wrapped_messages') as Transport: + Transport.return_value = None + load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.RegionInstanceGroupManagersTransport( + credentials_file="credentials.json", + quota_project_id="octopus", + ) + load_creds.assert_called_once_with("credentials.json", + scopes=None, + default_scopes=( + 'https://www.googleapis.com/auth/compute', + 'https://www.googleapis.com/auth/cloud-platform', +), + quota_project_id="octopus", + ) + + +def test_region_instance_group_managers_base_transport_with_adc(): + # Test the default credentials are used if credentials and credentials_file are None. + with mock.patch.object(google.auth, 'default', autospec=True) as adc, mock.patch('google.cloud.compute_v1.services.region_instance_group_managers.transports.RegionInstanceGroupManagersTransport._prep_wrapped_messages') as Transport: + Transport.return_value = None + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.RegionInstanceGroupManagersTransport() + adc.assert_called_once() + + +def test_region_instance_group_managers_auth_adc(): + # If no credentials are provided, we should use ADC credentials. + with mock.patch.object(google.auth, 'default', autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + RegionInstanceGroupManagersClient() + adc.assert_called_once_with( + scopes=None, + default_scopes=( + 'https://www.googleapis.com/auth/compute', + 'https://www.googleapis.com/auth/cloud-platform', +), + quota_project_id=None, + ) + + +def test_region_instance_group_managers_http_transport_client_cert_source_for_mtls(): + cred = ga_credentials.AnonymousCredentials() + with mock.patch("google.auth.transport.requests.AuthorizedSession.configure_mtls_channel") as mock_configure_mtls_channel: + transports.RegionInstanceGroupManagersRestTransport ( + credentials=cred, + client_cert_source_for_mtls=client_cert_source_callback + ) + mock_configure_mtls_channel.assert_called_once_with(client_cert_source_callback) + + +@pytest.mark.parametrize("transport_name", [ + "rest", +]) +def test_region_instance_group_managers_host_no_port(transport_name): + client = RegionInstanceGroupManagersClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions(api_endpoint='compute.googleapis.com'), + transport=transport_name, + ) + assert client.transport._host == ( + 'compute.googleapis.com:443' + if transport_name in ['grpc', 'grpc_asyncio'] + else 'https://compute.googleapis.com' + ) + +@pytest.mark.parametrize("transport_name", [ + "rest", +]) +def test_region_instance_group_managers_host_with_port(transport_name): + client = RegionInstanceGroupManagersClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions(api_endpoint='compute.googleapis.com:8000'), + transport=transport_name, + ) + assert client.transport._host == ( + 'compute.googleapis.com:8000' + if transport_name in ['grpc', 'grpc_asyncio'] + else 'https://compute.googleapis.com:8000' + ) + +@pytest.mark.parametrize("transport_name", [ + "rest", +]) +def test_region_instance_group_managers_client_transport_session_collision(transport_name): + creds1 = ga_credentials.AnonymousCredentials() + creds2 = ga_credentials.AnonymousCredentials() + client1 = RegionInstanceGroupManagersClient( + credentials=creds1, + transport=transport_name, + ) + client2 = RegionInstanceGroupManagersClient( + credentials=creds2, + transport=transport_name, + ) + session1 = client1.transport.abandon_instances._session + session2 = client2.transport.abandon_instances._session + assert session1 != session2 + session1 = client1.transport.apply_updates_to_instances._session + session2 = client2.transport.apply_updates_to_instances._session + assert session1 != session2 + session1 = client1.transport.create_instances._session + session2 = client2.transport.create_instances._session + assert session1 != session2 + session1 = client1.transport.delete._session + session2 = client2.transport.delete._session + assert session1 != session2 + session1 = client1.transport.delete_instances._session + session2 = client2.transport.delete_instances._session + assert session1 != session2 + session1 = client1.transport.delete_per_instance_configs._session + session2 = client2.transport.delete_per_instance_configs._session + assert session1 != session2 + session1 = client1.transport.get._session + session2 = client2.transport.get._session + assert session1 != session2 + session1 = client1.transport.insert._session + session2 = client2.transport.insert._session + assert session1 != session2 + session1 = client1.transport.list._session + session2 = client2.transport.list._session + assert session1 != session2 + session1 = client1.transport.list_errors._session + session2 = client2.transport.list_errors._session + assert session1 != session2 + session1 = client1.transport.list_managed_instances._session + session2 = client2.transport.list_managed_instances._session + assert session1 != session2 + session1 = client1.transport.list_per_instance_configs._session + session2 = client2.transport.list_per_instance_configs._session + assert session1 != session2 + session1 = client1.transport.patch._session + session2 = client2.transport.patch._session + assert session1 != session2 + session1 = client1.transport.patch_per_instance_configs._session + session2 = client2.transport.patch_per_instance_configs._session + assert session1 != session2 + session1 = client1.transport.recreate_instances._session + session2 = client2.transport.recreate_instances._session + assert session1 != session2 + session1 = client1.transport.resize._session + session2 = client2.transport.resize._session + assert session1 != session2 + session1 = client1.transport.set_instance_template._session + session2 = client2.transport.set_instance_template._session + assert session1 != session2 + session1 = client1.transport.set_target_pools._session + session2 = client2.transport.set_target_pools._session + assert session1 != session2 + session1 = client1.transport.update_per_instance_configs._session + session2 = client2.transport.update_per_instance_configs._session + assert session1 != session2 + +def test_common_billing_account_path(): + billing_account = "squid" + expected = "billingAccounts/{billing_account}".format(billing_account=billing_account, ) + actual = RegionInstanceGroupManagersClient.common_billing_account_path(billing_account) + assert expected == actual + + +def test_parse_common_billing_account_path(): + expected = { + "billing_account": "clam", + } + path = RegionInstanceGroupManagersClient.common_billing_account_path(**expected) + + # Check that the path construction is reversible. + actual = RegionInstanceGroupManagersClient.parse_common_billing_account_path(path) + assert expected == actual + +def test_common_folder_path(): + folder = "whelk" + expected = "folders/{folder}".format(folder=folder, ) + actual = RegionInstanceGroupManagersClient.common_folder_path(folder) + assert expected == actual + + +def test_parse_common_folder_path(): + expected = { + "folder": "octopus", + } + path = RegionInstanceGroupManagersClient.common_folder_path(**expected) + + # Check that the path construction is reversible. + actual = RegionInstanceGroupManagersClient.parse_common_folder_path(path) + assert expected == actual + +def test_common_organization_path(): + organization = "oyster" + expected = "organizations/{organization}".format(organization=organization, ) + actual = RegionInstanceGroupManagersClient.common_organization_path(organization) + assert expected == actual + + +def test_parse_common_organization_path(): + expected = { + "organization": "nudibranch", + } + path = RegionInstanceGroupManagersClient.common_organization_path(**expected) + + # Check that the path construction is reversible. + actual = RegionInstanceGroupManagersClient.parse_common_organization_path(path) + assert expected == actual + +def test_common_project_path(): + project = "cuttlefish" + expected = "projects/{project}".format(project=project, ) + actual = RegionInstanceGroupManagersClient.common_project_path(project) + assert expected == actual + + +def test_parse_common_project_path(): + expected = { + "project": "mussel", + } + path = RegionInstanceGroupManagersClient.common_project_path(**expected) + + # Check that the path construction is reversible. + actual = RegionInstanceGroupManagersClient.parse_common_project_path(path) + assert expected == actual + +def test_common_location_path(): + project = "winkle" + location = "nautilus" + expected = "projects/{project}/locations/{location}".format(project=project, location=location, ) + actual = RegionInstanceGroupManagersClient.common_location_path(project, location) + assert expected == actual + + +def test_parse_common_location_path(): + expected = { + "project": "scallop", + "location": "abalone", + } + path = RegionInstanceGroupManagersClient.common_location_path(**expected) + + # Check that the path construction is reversible. + actual = RegionInstanceGroupManagersClient.parse_common_location_path(path) + assert expected == actual + + +def test_client_with_default_client_info(): + client_info = gapic_v1.client_info.ClientInfo() + + with mock.patch.object(transports.RegionInstanceGroupManagersTransport, '_prep_wrapped_messages') as prep: + client = RegionInstanceGroupManagersClient( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + with mock.patch.object(transports.RegionInstanceGroupManagersTransport, '_prep_wrapped_messages') as prep: + transport_class = RegionInstanceGroupManagersClient.get_transport_class() + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + +def test_transport_close(): + transports = { + "rest": "_session", + } + + for transport, close_name in transports.items(): + client = RegionInstanceGroupManagersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport + ) + with mock.patch.object(type(getattr(client.transport, close_name)), "close") as close: + with client: + close.assert_not_called() + close.assert_called_once() + +def test_client_ctx(): + transports = [ + 'rest', + ] + for transport in transports: + client = RegionInstanceGroupManagersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport + ) + # Test client calls underlying transport. + with mock.patch.object(type(client.transport), "close") as close: + close.assert_not_called() + with client: + pass + close.assert_called() + +@pytest.mark.parametrize("client_class,transport_class", [ + (RegionInstanceGroupManagersClient, transports.RegionInstanceGroupManagersRestTransport), +]) +def test_api_key_credentials(client_class, transport_class): + with mock.patch.object( + google.auth._default, "get_api_key_credentials", create=True + ) as get_api_key_credentials: + mock_cred = mock.Mock() + get_api_key_credentials.return_value = mock_cred + options = client_options.ClientOptions() + options.api_key = "api_key" + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=mock_cred, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) diff --git a/owl-bot-staging/v1/tests/unit/gapic/compute_v1/test_region_instance_groups.py b/owl-bot-staging/v1/tests/unit/gapic/compute_v1/test_region_instance_groups.py new file mode 100644 index 000000000..03e26c8eb --- /dev/null +++ b/owl-bot-staging/v1/tests/unit/gapic/compute_v1/test_region_instance_groups.py @@ -0,0 +1,2288 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import os +# try/except added for compatibility with python < 3.8 +try: + from unittest import mock + from unittest.mock import AsyncMock # pragma: NO COVER +except ImportError: # pragma: NO COVER + import mock + +import grpc +from grpc.experimental import aio +from collections.abc import Iterable +from google.protobuf import json_format +import json +import math +import pytest +from proto.marshal.rules.dates import DurationRule, TimestampRule +from proto.marshal.rules import wrappers +from requests import Response +from requests import Request, PreparedRequest +from requests.sessions import Session +from google.protobuf import json_format + +from google.api_core import client_options +from google.api_core import exceptions as core_exceptions +from google.api_core import extended_operation # type: ignore +from google.api_core import future +from google.api_core import gapic_v1 +from google.api_core import grpc_helpers +from google.api_core import grpc_helpers_async +from google.api_core import path_template +from google.auth import credentials as ga_credentials +from google.auth.exceptions import MutualTLSChannelError +from google.cloud.compute_v1.services.region_instance_groups import RegionInstanceGroupsClient +from google.cloud.compute_v1.services.region_instance_groups import pagers +from google.cloud.compute_v1.services.region_instance_groups import transports +from google.cloud.compute_v1.types import compute +from google.oauth2 import service_account +import google.auth + + +def client_cert_source_callback(): + return b"cert bytes", b"key bytes" + + +# If default endpoint is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint(client): + return "foo.googleapis.com" if ("localhost" in client.DEFAULT_ENDPOINT) else client.DEFAULT_ENDPOINT + + +def test__get_default_mtls_endpoint(): + api_endpoint = "example.googleapis.com" + api_mtls_endpoint = "example.mtls.googleapis.com" + sandbox_endpoint = "example.sandbox.googleapis.com" + sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com" + non_googleapi = "api.example.com" + + assert RegionInstanceGroupsClient._get_default_mtls_endpoint(None) is None + assert RegionInstanceGroupsClient._get_default_mtls_endpoint(api_endpoint) == api_mtls_endpoint + assert RegionInstanceGroupsClient._get_default_mtls_endpoint(api_mtls_endpoint) == api_mtls_endpoint + assert RegionInstanceGroupsClient._get_default_mtls_endpoint(sandbox_endpoint) == sandbox_mtls_endpoint + assert RegionInstanceGroupsClient._get_default_mtls_endpoint(sandbox_mtls_endpoint) == sandbox_mtls_endpoint + assert RegionInstanceGroupsClient._get_default_mtls_endpoint(non_googleapi) == non_googleapi + + +@pytest.mark.parametrize("client_class,transport_name", [ + (RegionInstanceGroupsClient, "rest"), +]) +def test_region_instance_groups_client_from_service_account_info(client_class, transport_name): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object(service_account.Credentials, 'from_service_account_info') as factory: + factory.return_value = creds + info = {"valid": True} + client = client_class.from_service_account_info(info, transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + 'compute.googleapis.com:443' + if transport_name in ['grpc', 'grpc_asyncio'] + else + 'https://compute.googleapis.com' + ) + + +@pytest.mark.parametrize("transport_class,transport_name", [ + (transports.RegionInstanceGroupsRestTransport, "rest"), +]) +def test_region_instance_groups_client_service_account_always_use_jwt(transport_class, transport_name): + with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=True) + use_jwt.assert_called_once_with(True) + + with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=False) + use_jwt.assert_not_called() + + +@pytest.mark.parametrize("client_class,transport_name", [ + (RegionInstanceGroupsClient, "rest"), +]) +def test_region_instance_groups_client_from_service_account_file(client_class, transport_name): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object(service_account.Credentials, 'from_service_account_file') as factory: + factory.return_value = creds + client = client_class.from_service_account_file("dummy/file/path.json", transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + client = client_class.from_service_account_json("dummy/file/path.json", transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + 'compute.googleapis.com:443' + if transport_name in ['grpc', 'grpc_asyncio'] + else + 'https://compute.googleapis.com' + ) + + +def test_region_instance_groups_client_get_transport_class(): + transport = RegionInstanceGroupsClient.get_transport_class() + available_transports = [ + transports.RegionInstanceGroupsRestTransport, + ] + assert transport in available_transports + + transport = RegionInstanceGroupsClient.get_transport_class("rest") + assert transport == transports.RegionInstanceGroupsRestTransport + + +@pytest.mark.parametrize("client_class,transport_class,transport_name", [ + (RegionInstanceGroupsClient, transports.RegionInstanceGroupsRestTransport, "rest"), +]) +@mock.patch.object(RegionInstanceGroupsClient, "DEFAULT_ENDPOINT", modify_default_endpoint(RegionInstanceGroupsClient)) +def test_region_instance_groups_client_client_options(client_class, transport_class, transport_name): + # Check that if channel is provided we won't create a new one. + with mock.patch.object(RegionInstanceGroupsClient, 'get_transport_class') as gtc: + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ) + client = client_class(transport=transport) + gtc.assert_not_called() + + # Check that if channel is provided via str we will create a new one. + with mock.patch.object(RegionInstanceGroupsClient, 'get_transport_class') as gtc: + client = client_class(transport=transport_name) + gtc.assert_called() + + # Check the case api_endpoint is provided. + options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(transport=transport_name, client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_MTLS_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError): + client = client_class(transport=transport_name) + + # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): + with pytest.raises(ValueError): + client = client_class(transport=transport_name) + + # Check the case quota_project_id is provided + options = client_options.ClientOptions(quota_project_id="octopus") + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id="octopus", + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + # Check the case api_endpoint is provided + options = client_options.ClientOptions(api_audience="https://language.googleapis.com") + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience="https://language.googleapis.com" + ) + +@pytest.mark.parametrize("client_class,transport_class,transport_name,use_client_cert_env", [ + (RegionInstanceGroupsClient, transports.RegionInstanceGroupsRestTransport, "rest", "true"), + (RegionInstanceGroupsClient, transports.RegionInstanceGroupsRestTransport, "rest", "false"), +]) +@mock.patch.object(RegionInstanceGroupsClient, "DEFAULT_ENDPOINT", modify_default_endpoint(RegionInstanceGroupsClient)) +@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) +def test_region_instance_groups_client_mtls_env_auto(client_class, transport_class, transport_name, use_client_cert_env): + # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default + # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists. + + # Check the case client_cert_source is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): + options = client_options.ClientOptions(client_cert_source=client_cert_source_callback) + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + + if use_client_cert_env == "false": + expected_client_cert_source = None + expected_host = client.DEFAULT_ENDPOINT + else: + expected_client_cert_source = client_cert_source_callback + expected_host = client.DEFAULT_MTLS_ENDPOINT + + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case ADC client cert is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): + with mock.patch.object(transport_class, '__init__') as patched: + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): + with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=client_cert_source_callback): + if use_client_cert_env == "false": + expected_host = client.DEFAULT_ENDPOINT + expected_client_cert_source = None + else: + expected_host = client.DEFAULT_MTLS_ENDPOINT + expected_client_cert_source = client_cert_source_callback + + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case client_cert_source and ADC client cert are not provided. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): + with mock.patch.object(transport_class, '__init__') as patched: + with mock.patch("google.auth.transport.mtls.has_default_client_cert_source", return_value=False): + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize("client_class", [ + RegionInstanceGroupsClient +]) +@mock.patch.object(RegionInstanceGroupsClient, "DEFAULT_ENDPOINT", modify_default_endpoint(RegionInstanceGroupsClient)) +def test_region_instance_groups_client_get_mtls_endpoint_and_cert_source(client_class): + mock_client_cert_source = mock.Mock() + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "true". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + mock_api_endpoint = "foo" + options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(options) + assert api_endpoint == mock_api_endpoint + assert cert_source == mock_client_cert_source + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "false". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + mock_client_cert_source = mock.Mock() + mock_api_endpoint = "foo" + options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(options) + assert api_endpoint == mock_api_endpoint + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert doesn't exist. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=False): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert exists. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): + with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=mock_client_cert_source): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source == mock_client_cert_source + + +@pytest.mark.parametrize("client_class,transport_class,transport_name", [ + (RegionInstanceGroupsClient, transports.RegionInstanceGroupsRestTransport, "rest"), +]) +def test_region_instance_groups_client_client_options_scopes(client_class, transport_class, transport_name): + # Check the case scopes are provided. + options = client_options.ClientOptions( + scopes=["1", "2"], + ) + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=["1", "2"], + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + +@pytest.mark.parametrize("client_class,transport_class,transport_name,grpc_helpers", [ + (RegionInstanceGroupsClient, transports.RegionInstanceGroupsRestTransport, "rest", None), +]) +def test_region_instance_groups_client_client_options_credentials_file(client_class, transport_class, transport_name, grpc_helpers): + # Check the case credentials file is provided. + options = client_options.ClientOptions( + credentials_file="credentials.json" + ) + + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize("request_type", [ + compute.GetRegionInstanceGroupRequest, + dict, +]) +def test_get_rest(request_type): + client = RegionInstanceGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2', 'instance_group': 'sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.InstanceGroup( + creation_timestamp='creation_timestamp_value', + description='description_value', + fingerprint='fingerprint_value', + id=205, + kind='kind_value', + name='name_value', + network='network_value', + region='region_value', + self_link='self_link_value', + size=443, + subnetwork='subnetwork_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.InstanceGroup.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.get(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.InstanceGroup) + assert response.creation_timestamp == 'creation_timestamp_value' + assert response.description == 'description_value' + assert response.fingerprint == 'fingerprint_value' + assert response.id == 205 + assert response.kind == 'kind_value' + assert response.name == 'name_value' + assert response.network == 'network_value' + assert response.region == 'region_value' + assert response.self_link == 'self_link_value' + assert response.size == 443 + assert response.subnetwork == 'subnetwork_value' + assert response.zone == 'zone_value' + + +def test_get_rest_required_fields(request_type=compute.GetRegionInstanceGroupRequest): + transport_class = transports.RegionInstanceGroupsRestTransport + + request_init = {} + request_init["instance_group"] = "" + request_init["project"] = "" + request_init["region"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["instanceGroup"] = 'instance_group_value' + jsonified_request["project"] = 'project_value' + jsonified_request["region"] = 'region_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "instanceGroup" in jsonified_request + assert jsonified_request["instanceGroup"] == 'instance_group_value' + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "region" in jsonified_request + assert jsonified_request["region"] == 'region_value' + + client = RegionInstanceGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.InstanceGroup() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "get", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.InstanceGroup.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.get(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_get_rest_unset_required_fields(): + transport = transports.RegionInstanceGroupsRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.get._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("instanceGroup", "project", "region", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_rest_interceptors(null_interceptor): + transport = transports.RegionInstanceGroupsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.RegionInstanceGroupsRestInterceptor(), + ) + client = RegionInstanceGroupsClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.RegionInstanceGroupsRestInterceptor, "post_get") as post, \ + mock.patch.object(transports.RegionInstanceGroupsRestInterceptor, "pre_get") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.GetRegionInstanceGroupRequest.pb(compute.GetRegionInstanceGroupRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.InstanceGroup.to_json(compute.InstanceGroup()) + + request = compute.GetRegionInstanceGroupRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.InstanceGroup() + + client.get(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_get_rest_bad_request(transport: str = 'rest', request_type=compute.GetRegionInstanceGroupRequest): + client = RegionInstanceGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2', 'instance_group': 'sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get(request) + + +def test_get_rest_flattened(): + client = RegionInstanceGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.InstanceGroup() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'region': 'sample2', 'instance_group': 'sample3'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + region='region_value', + instance_group='instance_group_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.InstanceGroup.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.get(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/regions/{region}/instanceGroups/{instance_group}" % client.transport._host, args[1]) + + +def test_get_rest_flattened_error(transport: str = 'rest'): + client = RegionInstanceGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get( + compute.GetRegionInstanceGroupRequest(), + project='project_value', + region='region_value', + instance_group='instance_group_value', + ) + + +def test_get_rest_error(): + client = RegionInstanceGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.ListRegionInstanceGroupsRequest, + dict, +]) +def test_list_rest(request_type): + client = RegionInstanceGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.RegionInstanceGroupList( + id='id_value', + kind='kind_value', + next_page_token='next_page_token_value', + self_link='self_link_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.RegionInstanceGroupList.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.list(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListPager) + assert response.id == 'id_value' + assert response.kind == 'kind_value' + assert response.next_page_token == 'next_page_token_value' + assert response.self_link == 'self_link_value' + + +def test_list_rest_required_fields(request_type=compute.ListRegionInstanceGroupsRequest): + transport_class = transports.RegionInstanceGroupsRestTransport + + request_init = {} + request_init["project"] = "" + request_init["region"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + jsonified_request["region"] = 'region_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("filter", "max_results", "order_by", "page_token", "return_partial_success", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "region" in jsonified_request + assert jsonified_request["region"] == 'region_value' + + client = RegionInstanceGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.RegionInstanceGroupList() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "get", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.RegionInstanceGroupList.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.list(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_list_rest_unset_required_fields(): + transport = transports.RegionInstanceGroupsRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.list._get_unset_required_fields({}) + assert set(unset_fields) == (set(("filter", "maxResults", "orderBy", "pageToken", "returnPartialSuccess", )) & set(("project", "region", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_rest_interceptors(null_interceptor): + transport = transports.RegionInstanceGroupsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.RegionInstanceGroupsRestInterceptor(), + ) + client = RegionInstanceGroupsClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.RegionInstanceGroupsRestInterceptor, "post_list") as post, \ + mock.patch.object(transports.RegionInstanceGroupsRestInterceptor, "pre_list") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.ListRegionInstanceGroupsRequest.pb(compute.ListRegionInstanceGroupsRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.RegionInstanceGroupList.to_json(compute.RegionInstanceGroupList()) + + request = compute.ListRegionInstanceGroupsRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.RegionInstanceGroupList() + + client.list(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_list_rest_bad_request(transport: str = 'rest', request_type=compute.ListRegionInstanceGroupsRequest): + client = RegionInstanceGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list(request) + + +def test_list_rest_flattened(): + client = RegionInstanceGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.RegionInstanceGroupList() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'region': 'sample2'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + region='region_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.RegionInstanceGroupList.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.list(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/regions/{region}/instanceGroups" % client.transport._host, args[1]) + + +def test_list_rest_flattened_error(transport: str = 'rest'): + client = RegionInstanceGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list( + compute.ListRegionInstanceGroupsRequest(), + project='project_value', + region='region_value', + ) + + +def test_list_rest_pager(transport: str = 'rest'): + client = RegionInstanceGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + #with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + compute.RegionInstanceGroupList( + items=[ + compute.InstanceGroup(), + compute.InstanceGroup(), + compute.InstanceGroup(), + ], + next_page_token='abc', + ), + compute.RegionInstanceGroupList( + items=[], + next_page_token='def', + ), + compute.RegionInstanceGroupList( + items=[ + compute.InstanceGroup(), + ], + next_page_token='ghi', + ), + compute.RegionInstanceGroupList( + items=[ + compute.InstanceGroup(), + compute.InstanceGroup(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple(compute.RegionInstanceGroupList.to_json(x) for x in response) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode('UTF-8') + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {'project': 'sample1', 'region': 'sample2'} + + pager = client.list(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, compute.InstanceGroup) + for i in results) + + pages = list(client.list(request=sample_request).pages) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize("request_type", [ + compute.ListInstancesRegionInstanceGroupsRequest, + dict, +]) +def test_list_instances_rest(request_type): + client = RegionInstanceGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2', 'instance_group': 'sample3'} + request_init["region_instance_groups_list_instances_request_resource"] = {'instance_state': 'instance_state_value', 'port_name': 'port_name_value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.RegionInstanceGroupsListInstances( + id='id_value', + kind='kind_value', + next_page_token='next_page_token_value', + self_link='self_link_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.RegionInstanceGroupsListInstances.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.list_instances(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListInstancesPager) + assert response.id == 'id_value' + assert response.kind == 'kind_value' + assert response.next_page_token == 'next_page_token_value' + assert response.self_link == 'self_link_value' + + +def test_list_instances_rest_required_fields(request_type=compute.ListInstancesRegionInstanceGroupsRequest): + transport_class = transports.RegionInstanceGroupsRestTransport + + request_init = {} + request_init["instance_group"] = "" + request_init["project"] = "" + request_init["region"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_instances._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["instanceGroup"] = 'instance_group_value' + jsonified_request["project"] = 'project_value' + jsonified_request["region"] = 'region_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_instances._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("filter", "max_results", "order_by", "page_token", "return_partial_success", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "instanceGroup" in jsonified_request + assert jsonified_request["instanceGroup"] == 'instance_group_value' + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "region" in jsonified_request + assert jsonified_request["region"] == 'region_value' + + client = RegionInstanceGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.RegionInstanceGroupsListInstances() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.RegionInstanceGroupsListInstances.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.list_instances(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_list_instances_rest_unset_required_fields(): + transport = transports.RegionInstanceGroupsRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.list_instances._get_unset_required_fields({}) + assert set(unset_fields) == (set(("filter", "maxResults", "orderBy", "pageToken", "returnPartialSuccess", )) & set(("instanceGroup", "project", "region", "regionInstanceGroupsListInstancesRequestResource", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_instances_rest_interceptors(null_interceptor): + transport = transports.RegionInstanceGroupsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.RegionInstanceGroupsRestInterceptor(), + ) + client = RegionInstanceGroupsClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.RegionInstanceGroupsRestInterceptor, "post_list_instances") as post, \ + mock.patch.object(transports.RegionInstanceGroupsRestInterceptor, "pre_list_instances") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.ListInstancesRegionInstanceGroupsRequest.pb(compute.ListInstancesRegionInstanceGroupsRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.RegionInstanceGroupsListInstances.to_json(compute.RegionInstanceGroupsListInstances()) + + request = compute.ListInstancesRegionInstanceGroupsRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.RegionInstanceGroupsListInstances() + + client.list_instances(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_list_instances_rest_bad_request(transport: str = 'rest', request_type=compute.ListInstancesRegionInstanceGroupsRequest): + client = RegionInstanceGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2', 'instance_group': 'sample3'} + request_init["region_instance_groups_list_instances_request_resource"] = {'instance_state': 'instance_state_value', 'port_name': 'port_name_value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_instances(request) + + +def test_list_instances_rest_flattened(): + client = RegionInstanceGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.RegionInstanceGroupsListInstances() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'region': 'sample2', 'instance_group': 'sample3'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + region='region_value', + instance_group='instance_group_value', + region_instance_groups_list_instances_request_resource=compute.RegionInstanceGroupsListInstancesRequest(instance_state='instance_state_value'), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.RegionInstanceGroupsListInstances.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.list_instances(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/regions/{region}/instanceGroups/{instance_group}/listInstances" % client.transport._host, args[1]) + + +def test_list_instances_rest_flattened_error(transport: str = 'rest'): + client = RegionInstanceGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_instances( + compute.ListInstancesRegionInstanceGroupsRequest(), + project='project_value', + region='region_value', + instance_group='instance_group_value', + region_instance_groups_list_instances_request_resource=compute.RegionInstanceGroupsListInstancesRequest(instance_state='instance_state_value'), + ) + + +def test_list_instances_rest_pager(transport: str = 'rest'): + client = RegionInstanceGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + #with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + compute.RegionInstanceGroupsListInstances( + items=[ + compute.InstanceWithNamedPorts(), + compute.InstanceWithNamedPorts(), + compute.InstanceWithNamedPorts(), + ], + next_page_token='abc', + ), + compute.RegionInstanceGroupsListInstances( + items=[], + next_page_token='def', + ), + compute.RegionInstanceGroupsListInstances( + items=[ + compute.InstanceWithNamedPorts(), + ], + next_page_token='ghi', + ), + compute.RegionInstanceGroupsListInstances( + items=[ + compute.InstanceWithNamedPorts(), + compute.InstanceWithNamedPorts(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple(compute.RegionInstanceGroupsListInstances.to_json(x) for x in response) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode('UTF-8') + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {'project': 'sample1', 'region': 'sample2', 'instance_group': 'sample3'} + sample_request["region_instance_groups_list_instances_request_resource"] = compute.RegionInstanceGroupsListInstancesRequest(instance_state='instance_state_value') + + pager = client.list_instances(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, compute.InstanceWithNamedPorts) + for i in results) + + pages = list(client.list_instances(request=sample_request).pages) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize("request_type", [ + compute.SetNamedPortsRegionInstanceGroupRequest, + dict, +]) +def test_set_named_ports_rest(request_type): + client = RegionInstanceGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2', 'instance_group': 'sample3'} + request_init["region_instance_groups_set_named_ports_request_resource"] = {'fingerprint': 'fingerprint_value', 'named_ports': [{'name': 'name_value', 'port': 453}]} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.set_named_ports(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, extended_operation.ExtendedOperation) + assert response.client_operation_id == 'client_operation_id_value' + assert response.creation_timestamp == 'creation_timestamp_value' + assert response.description == 'description_value' + assert response.end_time == 'end_time_value' + assert response.http_error_message == 'http_error_message_value' + assert response.http_error_status_code == 2374 + assert response.id == 205 + assert response.insert_time == 'insert_time_value' + assert response.kind == 'kind_value' + assert response.name == 'name_value' + assert response.operation_group_id == 'operation_group_id_value' + assert response.operation_type == 'operation_type_value' + assert response.progress == 885 + assert response.region == 'region_value' + assert response.self_link == 'self_link_value' + assert response.start_time == 'start_time_value' + assert response.status == compute.Operation.Status.DONE + assert response.status_message == 'status_message_value' + assert response.target_id == 947 + assert response.target_link == 'target_link_value' + assert response.user == 'user_value' + assert response.zone == 'zone_value' + + +def test_set_named_ports_rest_required_fields(request_type=compute.SetNamedPortsRegionInstanceGroupRequest): + transport_class = transports.RegionInstanceGroupsRestTransport + + request_init = {} + request_init["instance_group"] = "" + request_init["project"] = "" + request_init["region"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).set_named_ports._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["instanceGroup"] = 'instance_group_value' + jsonified_request["project"] = 'project_value' + jsonified_request["region"] = 'region_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).set_named_ports._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "instanceGroup" in jsonified_request + assert jsonified_request["instanceGroup"] == 'instance_group_value' + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "region" in jsonified_request + assert jsonified_request["region"] == 'region_value' + + client = RegionInstanceGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.set_named_ports(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_set_named_ports_rest_unset_required_fields(): + transport = transports.RegionInstanceGroupsRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.set_named_ports._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("instanceGroup", "project", "region", "regionInstanceGroupsSetNamedPortsRequestResource", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_set_named_ports_rest_interceptors(null_interceptor): + transport = transports.RegionInstanceGroupsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.RegionInstanceGroupsRestInterceptor(), + ) + client = RegionInstanceGroupsClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.RegionInstanceGroupsRestInterceptor, "post_set_named_ports") as post, \ + mock.patch.object(transports.RegionInstanceGroupsRestInterceptor, "pre_set_named_ports") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.SetNamedPortsRegionInstanceGroupRequest.pb(compute.SetNamedPortsRegionInstanceGroupRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.SetNamedPortsRegionInstanceGroupRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.set_named_ports(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_set_named_ports_rest_bad_request(transport: str = 'rest', request_type=compute.SetNamedPortsRegionInstanceGroupRequest): + client = RegionInstanceGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2', 'instance_group': 'sample3'} + request_init["region_instance_groups_set_named_ports_request_resource"] = {'fingerprint': 'fingerprint_value', 'named_ports': [{'name': 'name_value', 'port': 453}]} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.set_named_ports(request) + + +def test_set_named_ports_rest_flattened(): + client = RegionInstanceGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'region': 'sample2', 'instance_group': 'sample3'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + region='region_value', + instance_group='instance_group_value', + region_instance_groups_set_named_ports_request_resource=compute.RegionInstanceGroupsSetNamedPortsRequest(fingerprint='fingerprint_value'), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.set_named_ports(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/regions/{region}/instanceGroups/{instance_group}/setNamedPorts" % client.transport._host, args[1]) + + +def test_set_named_ports_rest_flattened_error(transport: str = 'rest'): + client = RegionInstanceGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.set_named_ports( + compute.SetNamedPortsRegionInstanceGroupRequest(), + project='project_value', + region='region_value', + instance_group='instance_group_value', + region_instance_groups_set_named_ports_request_resource=compute.RegionInstanceGroupsSetNamedPortsRequest(fingerprint='fingerprint_value'), + ) + + +def test_set_named_ports_rest_error(): + client = RegionInstanceGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.SetNamedPortsRegionInstanceGroupRequest, + dict, +]) +def test_set_named_ports_unary_rest(request_type): + client = RegionInstanceGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2', 'instance_group': 'sample3'} + request_init["region_instance_groups_set_named_ports_request_resource"] = {'fingerprint': 'fingerprint_value', 'named_ports': [{'name': 'name_value', 'port': 453}]} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.set_named_ports_unary(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.Operation) + + +def test_set_named_ports_unary_rest_required_fields(request_type=compute.SetNamedPortsRegionInstanceGroupRequest): + transport_class = transports.RegionInstanceGroupsRestTransport + + request_init = {} + request_init["instance_group"] = "" + request_init["project"] = "" + request_init["region"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).set_named_ports._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["instanceGroup"] = 'instance_group_value' + jsonified_request["project"] = 'project_value' + jsonified_request["region"] = 'region_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).set_named_ports._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "instanceGroup" in jsonified_request + assert jsonified_request["instanceGroup"] == 'instance_group_value' + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "region" in jsonified_request + assert jsonified_request["region"] == 'region_value' + + client = RegionInstanceGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.set_named_ports_unary(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_set_named_ports_unary_rest_unset_required_fields(): + transport = transports.RegionInstanceGroupsRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.set_named_ports._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("instanceGroup", "project", "region", "regionInstanceGroupsSetNamedPortsRequestResource", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_set_named_ports_unary_rest_interceptors(null_interceptor): + transport = transports.RegionInstanceGroupsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.RegionInstanceGroupsRestInterceptor(), + ) + client = RegionInstanceGroupsClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.RegionInstanceGroupsRestInterceptor, "post_set_named_ports") as post, \ + mock.patch.object(transports.RegionInstanceGroupsRestInterceptor, "pre_set_named_ports") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.SetNamedPortsRegionInstanceGroupRequest.pb(compute.SetNamedPortsRegionInstanceGroupRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.SetNamedPortsRegionInstanceGroupRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.set_named_ports_unary(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_set_named_ports_unary_rest_bad_request(transport: str = 'rest', request_type=compute.SetNamedPortsRegionInstanceGroupRequest): + client = RegionInstanceGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2', 'instance_group': 'sample3'} + request_init["region_instance_groups_set_named_ports_request_resource"] = {'fingerprint': 'fingerprint_value', 'named_ports': [{'name': 'name_value', 'port': 453}]} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.set_named_ports_unary(request) + + +def test_set_named_ports_unary_rest_flattened(): + client = RegionInstanceGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'region': 'sample2', 'instance_group': 'sample3'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + region='region_value', + instance_group='instance_group_value', + region_instance_groups_set_named_ports_request_resource=compute.RegionInstanceGroupsSetNamedPortsRequest(fingerprint='fingerprint_value'), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.set_named_ports_unary(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/regions/{region}/instanceGroups/{instance_group}/setNamedPorts" % client.transport._host, args[1]) + + +def test_set_named_ports_unary_rest_flattened_error(transport: str = 'rest'): + client = RegionInstanceGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.set_named_ports_unary( + compute.SetNamedPortsRegionInstanceGroupRequest(), + project='project_value', + region='region_value', + instance_group='instance_group_value', + region_instance_groups_set_named_ports_request_resource=compute.RegionInstanceGroupsSetNamedPortsRequest(fingerprint='fingerprint_value'), + ) + + +def test_set_named_ports_unary_rest_error(): + client = RegionInstanceGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +def test_credentials_transport_error(): + # It is an error to provide credentials and a transport instance. + transport = transports.RegionInstanceGroupsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = RegionInstanceGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # It is an error to provide a credentials file and a transport instance. + transport = transports.RegionInstanceGroupsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = RegionInstanceGroupsClient( + client_options={"credentials_file": "credentials.json"}, + transport=transport, + ) + + # It is an error to provide an api_key and a transport instance. + transport = transports.RegionInstanceGroupsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = RegionInstanceGroupsClient( + client_options=options, + transport=transport, + ) + + # It is an error to provide an api_key and a credential. + options = mock.Mock() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = RegionInstanceGroupsClient( + client_options=options, + credentials=ga_credentials.AnonymousCredentials() + ) + + # It is an error to provide scopes and a transport instance. + transport = transports.RegionInstanceGroupsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = RegionInstanceGroupsClient( + client_options={"scopes": ["1", "2"]}, + transport=transport, + ) + + +def test_transport_instance(): + # A client may be instantiated with a custom transport instance. + transport = transports.RegionInstanceGroupsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + client = RegionInstanceGroupsClient(transport=transport) + assert client.transport is transport + + +@pytest.mark.parametrize("transport_class", [ + transports.RegionInstanceGroupsRestTransport, +]) +def test_transport_adc(transport_class): + # Test default credentials are used if not provided. + with mock.patch.object(google.auth, 'default') as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class() + adc.assert_called_once() + +@pytest.mark.parametrize("transport_name", [ + "rest", +]) +def test_transport_kind(transport_name): + transport = RegionInstanceGroupsClient.get_transport_class(transport_name)( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert transport.kind == transport_name + + +def test_region_instance_groups_base_transport_error(): + # Passing both a credentials object and credentials_file should raise an error + with pytest.raises(core_exceptions.DuplicateCredentialArgs): + transport = transports.RegionInstanceGroupsTransport( + credentials=ga_credentials.AnonymousCredentials(), + credentials_file="credentials.json" + ) + + +def test_region_instance_groups_base_transport(): + # Instantiate the base transport. + with mock.patch('google.cloud.compute_v1.services.region_instance_groups.transports.RegionInstanceGroupsTransport.__init__') as Transport: + Transport.return_value = None + transport = transports.RegionInstanceGroupsTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Every method on the transport should just blindly + # raise NotImplementedError. + methods = ( + 'get', + 'list', + 'list_instances', + 'set_named_ports', + ) + for method in methods: + with pytest.raises(NotImplementedError): + getattr(transport, method)(request=object()) + + with pytest.raises(NotImplementedError): + transport.close() + + # Catch all for all remaining methods and properties + remainder = [ + 'kind', + ] + for r in remainder: + with pytest.raises(NotImplementedError): + getattr(transport, r)() + + +def test_region_instance_groups_base_transport_with_credentials_file(): + # Instantiate the base transport with a credentials file + with mock.patch.object(google.auth, 'load_credentials_from_file', autospec=True) as load_creds, mock.patch('google.cloud.compute_v1.services.region_instance_groups.transports.RegionInstanceGroupsTransport._prep_wrapped_messages') as Transport: + Transport.return_value = None + load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.RegionInstanceGroupsTransport( + credentials_file="credentials.json", + quota_project_id="octopus", + ) + load_creds.assert_called_once_with("credentials.json", + scopes=None, + default_scopes=( + 'https://www.googleapis.com/auth/compute', + 'https://www.googleapis.com/auth/cloud-platform', +), + quota_project_id="octopus", + ) + + +def test_region_instance_groups_base_transport_with_adc(): + # Test the default credentials are used if credentials and credentials_file are None. + with mock.patch.object(google.auth, 'default', autospec=True) as adc, mock.patch('google.cloud.compute_v1.services.region_instance_groups.transports.RegionInstanceGroupsTransport._prep_wrapped_messages') as Transport: + Transport.return_value = None + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.RegionInstanceGroupsTransport() + adc.assert_called_once() + + +def test_region_instance_groups_auth_adc(): + # If no credentials are provided, we should use ADC credentials. + with mock.patch.object(google.auth, 'default', autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + RegionInstanceGroupsClient() + adc.assert_called_once_with( + scopes=None, + default_scopes=( + 'https://www.googleapis.com/auth/compute', + 'https://www.googleapis.com/auth/cloud-platform', +), + quota_project_id=None, + ) + + +def test_region_instance_groups_http_transport_client_cert_source_for_mtls(): + cred = ga_credentials.AnonymousCredentials() + with mock.patch("google.auth.transport.requests.AuthorizedSession.configure_mtls_channel") as mock_configure_mtls_channel: + transports.RegionInstanceGroupsRestTransport ( + credentials=cred, + client_cert_source_for_mtls=client_cert_source_callback + ) + mock_configure_mtls_channel.assert_called_once_with(client_cert_source_callback) + + +@pytest.mark.parametrize("transport_name", [ + "rest", +]) +def test_region_instance_groups_host_no_port(transport_name): + client = RegionInstanceGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions(api_endpoint='compute.googleapis.com'), + transport=transport_name, + ) + assert client.transport._host == ( + 'compute.googleapis.com:443' + if transport_name in ['grpc', 'grpc_asyncio'] + else 'https://compute.googleapis.com' + ) + +@pytest.mark.parametrize("transport_name", [ + "rest", +]) +def test_region_instance_groups_host_with_port(transport_name): + client = RegionInstanceGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions(api_endpoint='compute.googleapis.com:8000'), + transport=transport_name, + ) + assert client.transport._host == ( + 'compute.googleapis.com:8000' + if transport_name in ['grpc', 'grpc_asyncio'] + else 'https://compute.googleapis.com:8000' + ) + +@pytest.mark.parametrize("transport_name", [ + "rest", +]) +def test_region_instance_groups_client_transport_session_collision(transport_name): + creds1 = ga_credentials.AnonymousCredentials() + creds2 = ga_credentials.AnonymousCredentials() + client1 = RegionInstanceGroupsClient( + credentials=creds1, + transport=transport_name, + ) + client2 = RegionInstanceGroupsClient( + credentials=creds2, + transport=transport_name, + ) + session1 = client1.transport.get._session + session2 = client2.transport.get._session + assert session1 != session2 + session1 = client1.transport.list._session + session2 = client2.transport.list._session + assert session1 != session2 + session1 = client1.transport.list_instances._session + session2 = client2.transport.list_instances._session + assert session1 != session2 + session1 = client1.transport.set_named_ports._session + session2 = client2.transport.set_named_ports._session + assert session1 != session2 + +def test_common_billing_account_path(): + billing_account = "squid" + expected = "billingAccounts/{billing_account}".format(billing_account=billing_account, ) + actual = RegionInstanceGroupsClient.common_billing_account_path(billing_account) + assert expected == actual + + +def test_parse_common_billing_account_path(): + expected = { + "billing_account": "clam", + } + path = RegionInstanceGroupsClient.common_billing_account_path(**expected) + + # Check that the path construction is reversible. + actual = RegionInstanceGroupsClient.parse_common_billing_account_path(path) + assert expected == actual + +def test_common_folder_path(): + folder = "whelk" + expected = "folders/{folder}".format(folder=folder, ) + actual = RegionInstanceGroupsClient.common_folder_path(folder) + assert expected == actual + + +def test_parse_common_folder_path(): + expected = { + "folder": "octopus", + } + path = RegionInstanceGroupsClient.common_folder_path(**expected) + + # Check that the path construction is reversible. + actual = RegionInstanceGroupsClient.parse_common_folder_path(path) + assert expected == actual + +def test_common_organization_path(): + organization = "oyster" + expected = "organizations/{organization}".format(organization=organization, ) + actual = RegionInstanceGroupsClient.common_organization_path(organization) + assert expected == actual + + +def test_parse_common_organization_path(): + expected = { + "organization": "nudibranch", + } + path = RegionInstanceGroupsClient.common_organization_path(**expected) + + # Check that the path construction is reversible. + actual = RegionInstanceGroupsClient.parse_common_organization_path(path) + assert expected == actual + +def test_common_project_path(): + project = "cuttlefish" + expected = "projects/{project}".format(project=project, ) + actual = RegionInstanceGroupsClient.common_project_path(project) + assert expected == actual + + +def test_parse_common_project_path(): + expected = { + "project": "mussel", + } + path = RegionInstanceGroupsClient.common_project_path(**expected) + + # Check that the path construction is reversible. + actual = RegionInstanceGroupsClient.parse_common_project_path(path) + assert expected == actual + +def test_common_location_path(): + project = "winkle" + location = "nautilus" + expected = "projects/{project}/locations/{location}".format(project=project, location=location, ) + actual = RegionInstanceGroupsClient.common_location_path(project, location) + assert expected == actual + + +def test_parse_common_location_path(): + expected = { + "project": "scallop", + "location": "abalone", + } + path = RegionInstanceGroupsClient.common_location_path(**expected) + + # Check that the path construction is reversible. + actual = RegionInstanceGroupsClient.parse_common_location_path(path) + assert expected == actual + + +def test_client_with_default_client_info(): + client_info = gapic_v1.client_info.ClientInfo() + + with mock.patch.object(transports.RegionInstanceGroupsTransport, '_prep_wrapped_messages') as prep: + client = RegionInstanceGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + with mock.patch.object(transports.RegionInstanceGroupsTransport, '_prep_wrapped_messages') as prep: + transport_class = RegionInstanceGroupsClient.get_transport_class() + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + +def test_transport_close(): + transports = { + "rest": "_session", + } + + for transport, close_name in transports.items(): + client = RegionInstanceGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport + ) + with mock.patch.object(type(getattr(client.transport, close_name)), "close") as close: + with client: + close.assert_not_called() + close.assert_called_once() + +def test_client_ctx(): + transports = [ + 'rest', + ] + for transport in transports: + client = RegionInstanceGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport + ) + # Test client calls underlying transport. + with mock.patch.object(type(client.transport), "close") as close: + close.assert_not_called() + with client: + pass + close.assert_called() + +@pytest.mark.parametrize("client_class,transport_class", [ + (RegionInstanceGroupsClient, transports.RegionInstanceGroupsRestTransport), +]) +def test_api_key_credentials(client_class, transport_class): + with mock.patch.object( + google.auth._default, "get_api_key_credentials", create=True + ) as get_api_key_credentials: + mock_cred = mock.Mock() + get_api_key_credentials.return_value = mock_cred + options = client_options.ClientOptions() + options.api_key = "api_key" + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=mock_cred, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) diff --git a/owl-bot-staging/v1/tests/unit/gapic/compute_v1/test_region_instance_templates.py b/owl-bot-staging/v1/tests/unit/gapic/compute_v1/test_region_instance_templates.py new file mode 100644 index 000000000..36fbd0cff --- /dev/null +++ b/owl-bot-staging/v1/tests/unit/gapic/compute_v1/test_region_instance_templates.py @@ -0,0 +1,2509 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import os +# try/except added for compatibility with python < 3.8 +try: + from unittest import mock + from unittest.mock import AsyncMock # pragma: NO COVER +except ImportError: # pragma: NO COVER + import mock + +import grpc +from grpc.experimental import aio +from collections.abc import Iterable +from google.protobuf import json_format +import json +import math +import pytest +from proto.marshal.rules.dates import DurationRule, TimestampRule +from proto.marshal.rules import wrappers +from requests import Response +from requests import Request, PreparedRequest +from requests.sessions import Session +from google.protobuf import json_format + +from google.api_core import client_options +from google.api_core import exceptions as core_exceptions +from google.api_core import extended_operation # type: ignore +from google.api_core import future +from google.api_core import gapic_v1 +from google.api_core import grpc_helpers +from google.api_core import grpc_helpers_async +from google.api_core import path_template +from google.auth import credentials as ga_credentials +from google.auth.exceptions import MutualTLSChannelError +from google.cloud.compute_v1.services.region_instance_templates import RegionInstanceTemplatesClient +from google.cloud.compute_v1.services.region_instance_templates import pagers +from google.cloud.compute_v1.services.region_instance_templates import transports +from google.cloud.compute_v1.types import compute +from google.oauth2 import service_account +import google.auth + + +def client_cert_source_callback(): + return b"cert bytes", b"key bytes" + + +# If default endpoint is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint(client): + return "foo.googleapis.com" if ("localhost" in client.DEFAULT_ENDPOINT) else client.DEFAULT_ENDPOINT + + +def test__get_default_mtls_endpoint(): + api_endpoint = "example.googleapis.com" + api_mtls_endpoint = "example.mtls.googleapis.com" + sandbox_endpoint = "example.sandbox.googleapis.com" + sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com" + non_googleapi = "api.example.com" + + assert RegionInstanceTemplatesClient._get_default_mtls_endpoint(None) is None + assert RegionInstanceTemplatesClient._get_default_mtls_endpoint(api_endpoint) == api_mtls_endpoint + assert RegionInstanceTemplatesClient._get_default_mtls_endpoint(api_mtls_endpoint) == api_mtls_endpoint + assert RegionInstanceTemplatesClient._get_default_mtls_endpoint(sandbox_endpoint) == sandbox_mtls_endpoint + assert RegionInstanceTemplatesClient._get_default_mtls_endpoint(sandbox_mtls_endpoint) == sandbox_mtls_endpoint + assert RegionInstanceTemplatesClient._get_default_mtls_endpoint(non_googleapi) == non_googleapi + + +@pytest.mark.parametrize("client_class,transport_name", [ + (RegionInstanceTemplatesClient, "rest"), +]) +def test_region_instance_templates_client_from_service_account_info(client_class, transport_name): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object(service_account.Credentials, 'from_service_account_info') as factory: + factory.return_value = creds + info = {"valid": True} + client = client_class.from_service_account_info(info, transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + 'compute.googleapis.com:443' + if transport_name in ['grpc', 'grpc_asyncio'] + else + 'https://compute.googleapis.com' + ) + + +@pytest.mark.parametrize("transport_class,transport_name", [ + (transports.RegionInstanceTemplatesRestTransport, "rest"), +]) +def test_region_instance_templates_client_service_account_always_use_jwt(transport_class, transport_name): + with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=True) + use_jwt.assert_called_once_with(True) + + with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=False) + use_jwt.assert_not_called() + + +@pytest.mark.parametrize("client_class,transport_name", [ + (RegionInstanceTemplatesClient, "rest"), +]) +def test_region_instance_templates_client_from_service_account_file(client_class, transport_name): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object(service_account.Credentials, 'from_service_account_file') as factory: + factory.return_value = creds + client = client_class.from_service_account_file("dummy/file/path.json", transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + client = client_class.from_service_account_json("dummy/file/path.json", transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + 'compute.googleapis.com:443' + if transport_name in ['grpc', 'grpc_asyncio'] + else + 'https://compute.googleapis.com' + ) + + +def test_region_instance_templates_client_get_transport_class(): + transport = RegionInstanceTemplatesClient.get_transport_class() + available_transports = [ + transports.RegionInstanceTemplatesRestTransport, + ] + assert transport in available_transports + + transport = RegionInstanceTemplatesClient.get_transport_class("rest") + assert transport == transports.RegionInstanceTemplatesRestTransport + + +@pytest.mark.parametrize("client_class,transport_class,transport_name", [ + (RegionInstanceTemplatesClient, transports.RegionInstanceTemplatesRestTransport, "rest"), +]) +@mock.patch.object(RegionInstanceTemplatesClient, "DEFAULT_ENDPOINT", modify_default_endpoint(RegionInstanceTemplatesClient)) +def test_region_instance_templates_client_client_options(client_class, transport_class, transport_name): + # Check that if channel is provided we won't create a new one. + with mock.patch.object(RegionInstanceTemplatesClient, 'get_transport_class') as gtc: + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ) + client = client_class(transport=transport) + gtc.assert_not_called() + + # Check that if channel is provided via str we will create a new one. + with mock.patch.object(RegionInstanceTemplatesClient, 'get_transport_class') as gtc: + client = client_class(transport=transport_name) + gtc.assert_called() + + # Check the case api_endpoint is provided. + options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(transport=transport_name, client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_MTLS_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError): + client = client_class(transport=transport_name) + + # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): + with pytest.raises(ValueError): + client = client_class(transport=transport_name) + + # Check the case quota_project_id is provided + options = client_options.ClientOptions(quota_project_id="octopus") + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id="octopus", + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + # Check the case api_endpoint is provided + options = client_options.ClientOptions(api_audience="https://language.googleapis.com") + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience="https://language.googleapis.com" + ) + +@pytest.mark.parametrize("client_class,transport_class,transport_name,use_client_cert_env", [ + (RegionInstanceTemplatesClient, transports.RegionInstanceTemplatesRestTransport, "rest", "true"), + (RegionInstanceTemplatesClient, transports.RegionInstanceTemplatesRestTransport, "rest", "false"), +]) +@mock.patch.object(RegionInstanceTemplatesClient, "DEFAULT_ENDPOINT", modify_default_endpoint(RegionInstanceTemplatesClient)) +@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) +def test_region_instance_templates_client_mtls_env_auto(client_class, transport_class, transport_name, use_client_cert_env): + # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default + # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists. + + # Check the case client_cert_source is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): + options = client_options.ClientOptions(client_cert_source=client_cert_source_callback) + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + + if use_client_cert_env == "false": + expected_client_cert_source = None + expected_host = client.DEFAULT_ENDPOINT + else: + expected_client_cert_source = client_cert_source_callback + expected_host = client.DEFAULT_MTLS_ENDPOINT + + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case ADC client cert is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): + with mock.patch.object(transport_class, '__init__') as patched: + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): + with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=client_cert_source_callback): + if use_client_cert_env == "false": + expected_host = client.DEFAULT_ENDPOINT + expected_client_cert_source = None + else: + expected_host = client.DEFAULT_MTLS_ENDPOINT + expected_client_cert_source = client_cert_source_callback + + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case client_cert_source and ADC client cert are not provided. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): + with mock.patch.object(transport_class, '__init__') as patched: + with mock.patch("google.auth.transport.mtls.has_default_client_cert_source", return_value=False): + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize("client_class", [ + RegionInstanceTemplatesClient +]) +@mock.patch.object(RegionInstanceTemplatesClient, "DEFAULT_ENDPOINT", modify_default_endpoint(RegionInstanceTemplatesClient)) +def test_region_instance_templates_client_get_mtls_endpoint_and_cert_source(client_class): + mock_client_cert_source = mock.Mock() + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "true". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + mock_api_endpoint = "foo" + options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(options) + assert api_endpoint == mock_api_endpoint + assert cert_source == mock_client_cert_source + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "false". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + mock_client_cert_source = mock.Mock() + mock_api_endpoint = "foo" + options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(options) + assert api_endpoint == mock_api_endpoint + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert doesn't exist. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=False): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert exists. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): + with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=mock_client_cert_source): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source == mock_client_cert_source + + +@pytest.mark.parametrize("client_class,transport_class,transport_name", [ + (RegionInstanceTemplatesClient, transports.RegionInstanceTemplatesRestTransport, "rest"), +]) +def test_region_instance_templates_client_client_options_scopes(client_class, transport_class, transport_name): + # Check the case scopes are provided. + options = client_options.ClientOptions( + scopes=["1", "2"], + ) + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=["1", "2"], + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + +@pytest.mark.parametrize("client_class,transport_class,transport_name,grpc_helpers", [ + (RegionInstanceTemplatesClient, transports.RegionInstanceTemplatesRestTransport, "rest", None), +]) +def test_region_instance_templates_client_client_options_credentials_file(client_class, transport_class, transport_name, grpc_helpers): + # Check the case credentials file is provided. + options = client_options.ClientOptions( + credentials_file="credentials.json" + ) + + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize("request_type", [ + compute.DeleteRegionInstanceTemplateRequest, + dict, +]) +def test_delete_rest(request_type): + client = RegionInstanceTemplatesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2', 'instance_template': 'sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.delete(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, extended_operation.ExtendedOperation) + assert response.client_operation_id == 'client_operation_id_value' + assert response.creation_timestamp == 'creation_timestamp_value' + assert response.description == 'description_value' + assert response.end_time == 'end_time_value' + assert response.http_error_message == 'http_error_message_value' + assert response.http_error_status_code == 2374 + assert response.id == 205 + assert response.insert_time == 'insert_time_value' + assert response.kind == 'kind_value' + assert response.name == 'name_value' + assert response.operation_group_id == 'operation_group_id_value' + assert response.operation_type == 'operation_type_value' + assert response.progress == 885 + assert response.region == 'region_value' + assert response.self_link == 'self_link_value' + assert response.start_time == 'start_time_value' + assert response.status == compute.Operation.Status.DONE + assert response.status_message == 'status_message_value' + assert response.target_id == 947 + assert response.target_link == 'target_link_value' + assert response.user == 'user_value' + assert response.zone == 'zone_value' + + +def test_delete_rest_required_fields(request_type=compute.DeleteRegionInstanceTemplateRequest): + transport_class = transports.RegionInstanceTemplatesRestTransport + + request_init = {} + request_init["instance_template"] = "" + request_init["project"] = "" + request_init["region"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["instanceTemplate"] = 'instance_template_value' + jsonified_request["project"] = 'project_value' + jsonified_request["region"] = 'region_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "instanceTemplate" in jsonified_request + assert jsonified_request["instanceTemplate"] == 'instance_template_value' + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "region" in jsonified_request + assert jsonified_request["region"] == 'region_value' + + client = RegionInstanceTemplatesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "delete", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.delete(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_delete_rest_unset_required_fields(): + transport = transports.RegionInstanceTemplatesRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.delete._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("instanceTemplate", "project", "region", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_rest_interceptors(null_interceptor): + transport = transports.RegionInstanceTemplatesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.RegionInstanceTemplatesRestInterceptor(), + ) + client = RegionInstanceTemplatesClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.RegionInstanceTemplatesRestInterceptor, "post_delete") as post, \ + mock.patch.object(transports.RegionInstanceTemplatesRestInterceptor, "pre_delete") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.DeleteRegionInstanceTemplateRequest.pb(compute.DeleteRegionInstanceTemplateRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.DeleteRegionInstanceTemplateRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.delete(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_delete_rest_bad_request(transport: str = 'rest', request_type=compute.DeleteRegionInstanceTemplateRequest): + client = RegionInstanceTemplatesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2', 'instance_template': 'sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.delete(request) + + +def test_delete_rest_flattened(): + client = RegionInstanceTemplatesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'region': 'sample2', 'instance_template': 'sample3'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + region='region_value', + instance_template='instance_template_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.delete(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/regions/{region}/instanceTemplates/{instance_template}" % client.transport._host, args[1]) + + +def test_delete_rest_flattened_error(transport: str = 'rest'): + client = RegionInstanceTemplatesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete( + compute.DeleteRegionInstanceTemplateRequest(), + project='project_value', + region='region_value', + instance_template='instance_template_value', + ) + + +def test_delete_rest_error(): + client = RegionInstanceTemplatesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.DeleteRegionInstanceTemplateRequest, + dict, +]) +def test_delete_unary_rest(request_type): + client = RegionInstanceTemplatesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2', 'instance_template': 'sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.delete_unary(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.Operation) + + +def test_delete_unary_rest_required_fields(request_type=compute.DeleteRegionInstanceTemplateRequest): + transport_class = transports.RegionInstanceTemplatesRestTransport + + request_init = {} + request_init["instance_template"] = "" + request_init["project"] = "" + request_init["region"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["instanceTemplate"] = 'instance_template_value' + jsonified_request["project"] = 'project_value' + jsonified_request["region"] = 'region_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "instanceTemplate" in jsonified_request + assert jsonified_request["instanceTemplate"] == 'instance_template_value' + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "region" in jsonified_request + assert jsonified_request["region"] == 'region_value' + + client = RegionInstanceTemplatesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "delete", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.delete_unary(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_delete_unary_rest_unset_required_fields(): + transport = transports.RegionInstanceTemplatesRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.delete._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("instanceTemplate", "project", "region", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_unary_rest_interceptors(null_interceptor): + transport = transports.RegionInstanceTemplatesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.RegionInstanceTemplatesRestInterceptor(), + ) + client = RegionInstanceTemplatesClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.RegionInstanceTemplatesRestInterceptor, "post_delete") as post, \ + mock.patch.object(transports.RegionInstanceTemplatesRestInterceptor, "pre_delete") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.DeleteRegionInstanceTemplateRequest.pb(compute.DeleteRegionInstanceTemplateRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.DeleteRegionInstanceTemplateRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.delete_unary(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_delete_unary_rest_bad_request(transport: str = 'rest', request_type=compute.DeleteRegionInstanceTemplateRequest): + client = RegionInstanceTemplatesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2', 'instance_template': 'sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.delete_unary(request) + + +def test_delete_unary_rest_flattened(): + client = RegionInstanceTemplatesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'region': 'sample2', 'instance_template': 'sample3'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + region='region_value', + instance_template='instance_template_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.delete_unary(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/regions/{region}/instanceTemplates/{instance_template}" % client.transport._host, args[1]) + + +def test_delete_unary_rest_flattened_error(transport: str = 'rest'): + client = RegionInstanceTemplatesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_unary( + compute.DeleteRegionInstanceTemplateRequest(), + project='project_value', + region='region_value', + instance_template='instance_template_value', + ) + + +def test_delete_unary_rest_error(): + client = RegionInstanceTemplatesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.GetRegionInstanceTemplateRequest, + dict, +]) +def test_get_rest(request_type): + client = RegionInstanceTemplatesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2', 'instance_template': 'sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.InstanceTemplate( + creation_timestamp='creation_timestamp_value', + description='description_value', + id=205, + kind='kind_value', + name='name_value', + region='region_value', + self_link='self_link_value', + source_instance='source_instance_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.InstanceTemplate.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.get(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.InstanceTemplate) + assert response.creation_timestamp == 'creation_timestamp_value' + assert response.description == 'description_value' + assert response.id == 205 + assert response.kind == 'kind_value' + assert response.name == 'name_value' + assert response.region == 'region_value' + assert response.self_link == 'self_link_value' + assert response.source_instance == 'source_instance_value' + + +def test_get_rest_required_fields(request_type=compute.GetRegionInstanceTemplateRequest): + transport_class = transports.RegionInstanceTemplatesRestTransport + + request_init = {} + request_init["instance_template"] = "" + request_init["project"] = "" + request_init["region"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["instanceTemplate"] = 'instance_template_value' + jsonified_request["project"] = 'project_value' + jsonified_request["region"] = 'region_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "instanceTemplate" in jsonified_request + assert jsonified_request["instanceTemplate"] == 'instance_template_value' + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "region" in jsonified_request + assert jsonified_request["region"] == 'region_value' + + client = RegionInstanceTemplatesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.InstanceTemplate() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "get", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.InstanceTemplate.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.get(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_get_rest_unset_required_fields(): + transport = transports.RegionInstanceTemplatesRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.get._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("instanceTemplate", "project", "region", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_rest_interceptors(null_interceptor): + transport = transports.RegionInstanceTemplatesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.RegionInstanceTemplatesRestInterceptor(), + ) + client = RegionInstanceTemplatesClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.RegionInstanceTemplatesRestInterceptor, "post_get") as post, \ + mock.patch.object(transports.RegionInstanceTemplatesRestInterceptor, "pre_get") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.GetRegionInstanceTemplateRequest.pb(compute.GetRegionInstanceTemplateRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.InstanceTemplate.to_json(compute.InstanceTemplate()) + + request = compute.GetRegionInstanceTemplateRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.InstanceTemplate() + + client.get(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_get_rest_bad_request(transport: str = 'rest', request_type=compute.GetRegionInstanceTemplateRequest): + client = RegionInstanceTemplatesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2', 'instance_template': 'sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get(request) + + +def test_get_rest_flattened(): + client = RegionInstanceTemplatesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.InstanceTemplate() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'region': 'sample2', 'instance_template': 'sample3'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + region='region_value', + instance_template='instance_template_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.InstanceTemplate.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.get(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/regions/{region}/instanceTemplates/{instance_template}" % client.transport._host, args[1]) + + +def test_get_rest_flattened_error(transport: str = 'rest'): + client = RegionInstanceTemplatesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get( + compute.GetRegionInstanceTemplateRequest(), + project='project_value', + region='region_value', + instance_template='instance_template_value', + ) + + +def test_get_rest_error(): + client = RegionInstanceTemplatesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.InsertRegionInstanceTemplateRequest, + dict, +]) +def test_insert_rest(request_type): + client = RegionInstanceTemplatesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2'} + request_init["instance_template_resource"] = {'creation_timestamp': 'creation_timestamp_value', 'description': 'description_value', 'id': 205, 'kind': 'kind_value', 'name': 'name_value', 'properties': {'advanced_machine_features': {'enable_nested_virtualization': True, 'enable_uefi_networking': True, 'threads_per_core': 1689, 'visible_core_count': 1918}, 'can_ip_forward': True, 'confidential_instance_config': {'enable_confidential_compute': True}, 'description': 'description_value', 'disks': [{'architecture': 'architecture_value', 'auto_delete': True, 'boot': True, 'device_name': 'device_name_value', 'disk_encryption_key': {'kms_key_name': 'kms_key_name_value', 'kms_key_service_account': 'kms_key_service_account_value', 'raw_key': 'raw_key_value', 'rsa_encrypted_key': 'rsa_encrypted_key_value', 'sha256': 'sha256_value'}, 'disk_size_gb': 1261, 'force_attach': True, 'guest_os_features': [{'type_': 'type__value'}], 'index': 536, 'initialize_params': {'architecture': 'architecture_value', 'description': 'description_value', 'disk_name': 'disk_name_value', 'disk_size_gb': 1261, 'disk_type': 'disk_type_value', 'labels': {}, 'licenses': ['licenses_value1', 'licenses_value2'], 'on_update_action': 'on_update_action_value', 'provisioned_iops': 1740, 'provisioned_throughput': 2411, 'replica_zones': ['replica_zones_value1', 'replica_zones_value2'], 'resource_manager_tags': {}, 'resource_policies': ['resource_policies_value1', 'resource_policies_value2'], 'source_image': 'source_image_value', 'source_image_encryption_key': {}, 'source_snapshot': 'source_snapshot_value', 'source_snapshot_encryption_key': {}}, 'interface': 'interface_value', 'kind': 'kind_value', 'licenses': ['licenses_value1', 'licenses_value2'], 'mode': 'mode_value', 'saved_state': 'saved_state_value', 'shielded_instance_initial_state': {'dbs': [{'content': 'content_value', 'file_type': 'file_type_value'}], 'dbxs': {}, 'keks': {}, 'pk': {}}, 'source': 'source_value', 'type_': 'type__value'}], 'guest_accelerators': [{'accelerator_count': 1805, 'accelerator_type': 'accelerator_type_value'}], 'key_revocation_action_type': 'key_revocation_action_type_value', 'labels': {}, 'machine_type': 'machine_type_value', 'metadata': {'fingerprint': 'fingerprint_value', 'items': [{'key': 'key_value', 'value': 'value_value'}], 'kind': 'kind_value'}, 'min_cpu_platform': 'min_cpu_platform_value', 'network_interfaces': [{'access_configs': [{'external_ipv6': 'external_ipv6_value', 'external_ipv6_prefix_length': 2837, 'kind': 'kind_value', 'name': 'name_value', 'nat_i_p': 'nat_i_p_value', 'network_tier': 'network_tier_value', 'public_ptr_domain_name': 'public_ptr_domain_name_value', 'set_public_ptr': True, 'type_': 'type__value'}], 'alias_ip_ranges': [{'ip_cidr_range': 'ip_cidr_range_value', 'subnetwork_range_name': 'subnetwork_range_name_value'}], 'fingerprint': 'fingerprint_value', 'internal_ipv6_prefix_length': 2831, 'ipv6_access_configs': {}, 'ipv6_access_type': 'ipv6_access_type_value', 'ipv6_address': 'ipv6_address_value', 'kind': 'kind_value', 'name': 'name_value', 'network': 'network_value', 'network_attachment': 'network_attachment_value', 'network_i_p': 'network_i_p_value', 'nic_type': 'nic_type_value', 'queue_count': 1197, 'stack_type': 'stack_type_value', 'subnetwork': 'subnetwork_value'}], 'network_performance_config': {'total_egress_bandwidth_tier': 'total_egress_bandwidth_tier_value'}, 'private_ipv6_google_access': 'private_ipv6_google_access_value', 'reservation_affinity': {'consume_reservation_type': 'consume_reservation_type_value', 'key': 'key_value', 'values': ['values_value1', 'values_value2']}, 'resource_manager_tags': {}, 'resource_policies': ['resource_policies_value1', 'resource_policies_value2'], 'scheduling': {'automatic_restart': True, 'instance_termination_action': 'instance_termination_action_value', 'local_ssd_recovery_timeout': {'nanos': 543, 'seconds': 751}, 'location_hint': 'location_hint_value', 'min_node_cpus': 1379, 'node_affinities': [{'key': 'key_value', 'operator': 'operator_value', 'values': ['values_value1', 'values_value2']}], 'on_host_maintenance': 'on_host_maintenance_value', 'preemptible': True, 'provisioning_model': 'provisioning_model_value'}, 'service_accounts': [{'email': 'email_value', 'scopes': ['scopes_value1', 'scopes_value2']}], 'shielded_instance_config': {'enable_integrity_monitoring': True, 'enable_secure_boot': True, 'enable_vtpm': True}, 'tags': {'fingerprint': 'fingerprint_value', 'items': ['items_value1', 'items_value2']}}, 'region': 'region_value', 'self_link': 'self_link_value', 'source_instance': 'source_instance_value', 'source_instance_params': {'disk_configs': [{'auto_delete': True, 'custom_image': 'custom_image_value', 'device_name': 'device_name_value', 'instantiate_from': 'instantiate_from_value'}]}} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.insert(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, extended_operation.ExtendedOperation) + assert response.client_operation_id == 'client_operation_id_value' + assert response.creation_timestamp == 'creation_timestamp_value' + assert response.description == 'description_value' + assert response.end_time == 'end_time_value' + assert response.http_error_message == 'http_error_message_value' + assert response.http_error_status_code == 2374 + assert response.id == 205 + assert response.insert_time == 'insert_time_value' + assert response.kind == 'kind_value' + assert response.name == 'name_value' + assert response.operation_group_id == 'operation_group_id_value' + assert response.operation_type == 'operation_type_value' + assert response.progress == 885 + assert response.region == 'region_value' + assert response.self_link == 'self_link_value' + assert response.start_time == 'start_time_value' + assert response.status == compute.Operation.Status.DONE + assert response.status_message == 'status_message_value' + assert response.target_id == 947 + assert response.target_link == 'target_link_value' + assert response.user == 'user_value' + assert response.zone == 'zone_value' + + +def test_insert_rest_required_fields(request_type=compute.InsertRegionInstanceTemplateRequest): + transport_class = transports.RegionInstanceTemplatesRestTransport + + request_init = {} + request_init["project"] = "" + request_init["region"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).insert._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + jsonified_request["region"] = 'region_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).insert._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "region" in jsonified_request + assert jsonified_request["region"] == 'region_value' + + client = RegionInstanceTemplatesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.insert(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_insert_rest_unset_required_fields(): + transport = transports.RegionInstanceTemplatesRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.insert._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("instanceTemplateResource", "project", "region", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_insert_rest_interceptors(null_interceptor): + transport = transports.RegionInstanceTemplatesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.RegionInstanceTemplatesRestInterceptor(), + ) + client = RegionInstanceTemplatesClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.RegionInstanceTemplatesRestInterceptor, "post_insert") as post, \ + mock.patch.object(transports.RegionInstanceTemplatesRestInterceptor, "pre_insert") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.InsertRegionInstanceTemplateRequest.pb(compute.InsertRegionInstanceTemplateRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.InsertRegionInstanceTemplateRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.insert(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_insert_rest_bad_request(transport: str = 'rest', request_type=compute.InsertRegionInstanceTemplateRequest): + client = RegionInstanceTemplatesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2'} + request_init["instance_template_resource"] = {'creation_timestamp': 'creation_timestamp_value', 'description': 'description_value', 'id': 205, 'kind': 'kind_value', 'name': 'name_value', 'properties': {'advanced_machine_features': {'enable_nested_virtualization': True, 'enable_uefi_networking': True, 'threads_per_core': 1689, 'visible_core_count': 1918}, 'can_ip_forward': True, 'confidential_instance_config': {'enable_confidential_compute': True}, 'description': 'description_value', 'disks': [{'architecture': 'architecture_value', 'auto_delete': True, 'boot': True, 'device_name': 'device_name_value', 'disk_encryption_key': {'kms_key_name': 'kms_key_name_value', 'kms_key_service_account': 'kms_key_service_account_value', 'raw_key': 'raw_key_value', 'rsa_encrypted_key': 'rsa_encrypted_key_value', 'sha256': 'sha256_value'}, 'disk_size_gb': 1261, 'force_attach': True, 'guest_os_features': [{'type_': 'type__value'}], 'index': 536, 'initialize_params': {'architecture': 'architecture_value', 'description': 'description_value', 'disk_name': 'disk_name_value', 'disk_size_gb': 1261, 'disk_type': 'disk_type_value', 'labels': {}, 'licenses': ['licenses_value1', 'licenses_value2'], 'on_update_action': 'on_update_action_value', 'provisioned_iops': 1740, 'provisioned_throughput': 2411, 'replica_zones': ['replica_zones_value1', 'replica_zones_value2'], 'resource_manager_tags': {}, 'resource_policies': ['resource_policies_value1', 'resource_policies_value2'], 'source_image': 'source_image_value', 'source_image_encryption_key': {}, 'source_snapshot': 'source_snapshot_value', 'source_snapshot_encryption_key': {}}, 'interface': 'interface_value', 'kind': 'kind_value', 'licenses': ['licenses_value1', 'licenses_value2'], 'mode': 'mode_value', 'saved_state': 'saved_state_value', 'shielded_instance_initial_state': {'dbs': [{'content': 'content_value', 'file_type': 'file_type_value'}], 'dbxs': {}, 'keks': {}, 'pk': {}}, 'source': 'source_value', 'type_': 'type__value'}], 'guest_accelerators': [{'accelerator_count': 1805, 'accelerator_type': 'accelerator_type_value'}], 'key_revocation_action_type': 'key_revocation_action_type_value', 'labels': {}, 'machine_type': 'machine_type_value', 'metadata': {'fingerprint': 'fingerprint_value', 'items': [{'key': 'key_value', 'value': 'value_value'}], 'kind': 'kind_value'}, 'min_cpu_platform': 'min_cpu_platform_value', 'network_interfaces': [{'access_configs': [{'external_ipv6': 'external_ipv6_value', 'external_ipv6_prefix_length': 2837, 'kind': 'kind_value', 'name': 'name_value', 'nat_i_p': 'nat_i_p_value', 'network_tier': 'network_tier_value', 'public_ptr_domain_name': 'public_ptr_domain_name_value', 'set_public_ptr': True, 'type_': 'type__value'}], 'alias_ip_ranges': [{'ip_cidr_range': 'ip_cidr_range_value', 'subnetwork_range_name': 'subnetwork_range_name_value'}], 'fingerprint': 'fingerprint_value', 'internal_ipv6_prefix_length': 2831, 'ipv6_access_configs': {}, 'ipv6_access_type': 'ipv6_access_type_value', 'ipv6_address': 'ipv6_address_value', 'kind': 'kind_value', 'name': 'name_value', 'network': 'network_value', 'network_attachment': 'network_attachment_value', 'network_i_p': 'network_i_p_value', 'nic_type': 'nic_type_value', 'queue_count': 1197, 'stack_type': 'stack_type_value', 'subnetwork': 'subnetwork_value'}], 'network_performance_config': {'total_egress_bandwidth_tier': 'total_egress_bandwidth_tier_value'}, 'private_ipv6_google_access': 'private_ipv6_google_access_value', 'reservation_affinity': {'consume_reservation_type': 'consume_reservation_type_value', 'key': 'key_value', 'values': ['values_value1', 'values_value2']}, 'resource_manager_tags': {}, 'resource_policies': ['resource_policies_value1', 'resource_policies_value2'], 'scheduling': {'automatic_restart': True, 'instance_termination_action': 'instance_termination_action_value', 'local_ssd_recovery_timeout': {'nanos': 543, 'seconds': 751}, 'location_hint': 'location_hint_value', 'min_node_cpus': 1379, 'node_affinities': [{'key': 'key_value', 'operator': 'operator_value', 'values': ['values_value1', 'values_value2']}], 'on_host_maintenance': 'on_host_maintenance_value', 'preemptible': True, 'provisioning_model': 'provisioning_model_value'}, 'service_accounts': [{'email': 'email_value', 'scopes': ['scopes_value1', 'scopes_value2']}], 'shielded_instance_config': {'enable_integrity_monitoring': True, 'enable_secure_boot': True, 'enable_vtpm': True}, 'tags': {'fingerprint': 'fingerprint_value', 'items': ['items_value1', 'items_value2']}}, 'region': 'region_value', 'self_link': 'self_link_value', 'source_instance': 'source_instance_value', 'source_instance_params': {'disk_configs': [{'auto_delete': True, 'custom_image': 'custom_image_value', 'device_name': 'device_name_value', 'instantiate_from': 'instantiate_from_value'}]}} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.insert(request) + + +def test_insert_rest_flattened(): + client = RegionInstanceTemplatesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'region': 'sample2'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + region='region_value', + instance_template_resource=compute.InstanceTemplate(creation_timestamp='creation_timestamp_value'), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.insert(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/regions/{region}/instanceTemplates" % client.transport._host, args[1]) + + +def test_insert_rest_flattened_error(transport: str = 'rest'): + client = RegionInstanceTemplatesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.insert( + compute.InsertRegionInstanceTemplateRequest(), + project='project_value', + region='region_value', + instance_template_resource=compute.InstanceTemplate(creation_timestamp='creation_timestamp_value'), + ) + + +def test_insert_rest_error(): + client = RegionInstanceTemplatesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.InsertRegionInstanceTemplateRequest, + dict, +]) +def test_insert_unary_rest(request_type): + client = RegionInstanceTemplatesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2'} + request_init["instance_template_resource"] = {'creation_timestamp': 'creation_timestamp_value', 'description': 'description_value', 'id': 205, 'kind': 'kind_value', 'name': 'name_value', 'properties': {'advanced_machine_features': {'enable_nested_virtualization': True, 'enable_uefi_networking': True, 'threads_per_core': 1689, 'visible_core_count': 1918}, 'can_ip_forward': True, 'confidential_instance_config': {'enable_confidential_compute': True}, 'description': 'description_value', 'disks': [{'architecture': 'architecture_value', 'auto_delete': True, 'boot': True, 'device_name': 'device_name_value', 'disk_encryption_key': {'kms_key_name': 'kms_key_name_value', 'kms_key_service_account': 'kms_key_service_account_value', 'raw_key': 'raw_key_value', 'rsa_encrypted_key': 'rsa_encrypted_key_value', 'sha256': 'sha256_value'}, 'disk_size_gb': 1261, 'force_attach': True, 'guest_os_features': [{'type_': 'type__value'}], 'index': 536, 'initialize_params': {'architecture': 'architecture_value', 'description': 'description_value', 'disk_name': 'disk_name_value', 'disk_size_gb': 1261, 'disk_type': 'disk_type_value', 'labels': {}, 'licenses': ['licenses_value1', 'licenses_value2'], 'on_update_action': 'on_update_action_value', 'provisioned_iops': 1740, 'provisioned_throughput': 2411, 'replica_zones': ['replica_zones_value1', 'replica_zones_value2'], 'resource_manager_tags': {}, 'resource_policies': ['resource_policies_value1', 'resource_policies_value2'], 'source_image': 'source_image_value', 'source_image_encryption_key': {}, 'source_snapshot': 'source_snapshot_value', 'source_snapshot_encryption_key': {}}, 'interface': 'interface_value', 'kind': 'kind_value', 'licenses': ['licenses_value1', 'licenses_value2'], 'mode': 'mode_value', 'saved_state': 'saved_state_value', 'shielded_instance_initial_state': {'dbs': [{'content': 'content_value', 'file_type': 'file_type_value'}], 'dbxs': {}, 'keks': {}, 'pk': {}}, 'source': 'source_value', 'type_': 'type__value'}], 'guest_accelerators': [{'accelerator_count': 1805, 'accelerator_type': 'accelerator_type_value'}], 'key_revocation_action_type': 'key_revocation_action_type_value', 'labels': {}, 'machine_type': 'machine_type_value', 'metadata': {'fingerprint': 'fingerprint_value', 'items': [{'key': 'key_value', 'value': 'value_value'}], 'kind': 'kind_value'}, 'min_cpu_platform': 'min_cpu_platform_value', 'network_interfaces': [{'access_configs': [{'external_ipv6': 'external_ipv6_value', 'external_ipv6_prefix_length': 2837, 'kind': 'kind_value', 'name': 'name_value', 'nat_i_p': 'nat_i_p_value', 'network_tier': 'network_tier_value', 'public_ptr_domain_name': 'public_ptr_domain_name_value', 'set_public_ptr': True, 'type_': 'type__value'}], 'alias_ip_ranges': [{'ip_cidr_range': 'ip_cidr_range_value', 'subnetwork_range_name': 'subnetwork_range_name_value'}], 'fingerprint': 'fingerprint_value', 'internal_ipv6_prefix_length': 2831, 'ipv6_access_configs': {}, 'ipv6_access_type': 'ipv6_access_type_value', 'ipv6_address': 'ipv6_address_value', 'kind': 'kind_value', 'name': 'name_value', 'network': 'network_value', 'network_attachment': 'network_attachment_value', 'network_i_p': 'network_i_p_value', 'nic_type': 'nic_type_value', 'queue_count': 1197, 'stack_type': 'stack_type_value', 'subnetwork': 'subnetwork_value'}], 'network_performance_config': {'total_egress_bandwidth_tier': 'total_egress_bandwidth_tier_value'}, 'private_ipv6_google_access': 'private_ipv6_google_access_value', 'reservation_affinity': {'consume_reservation_type': 'consume_reservation_type_value', 'key': 'key_value', 'values': ['values_value1', 'values_value2']}, 'resource_manager_tags': {}, 'resource_policies': ['resource_policies_value1', 'resource_policies_value2'], 'scheduling': {'automatic_restart': True, 'instance_termination_action': 'instance_termination_action_value', 'local_ssd_recovery_timeout': {'nanos': 543, 'seconds': 751}, 'location_hint': 'location_hint_value', 'min_node_cpus': 1379, 'node_affinities': [{'key': 'key_value', 'operator': 'operator_value', 'values': ['values_value1', 'values_value2']}], 'on_host_maintenance': 'on_host_maintenance_value', 'preemptible': True, 'provisioning_model': 'provisioning_model_value'}, 'service_accounts': [{'email': 'email_value', 'scopes': ['scopes_value1', 'scopes_value2']}], 'shielded_instance_config': {'enable_integrity_monitoring': True, 'enable_secure_boot': True, 'enable_vtpm': True}, 'tags': {'fingerprint': 'fingerprint_value', 'items': ['items_value1', 'items_value2']}}, 'region': 'region_value', 'self_link': 'self_link_value', 'source_instance': 'source_instance_value', 'source_instance_params': {'disk_configs': [{'auto_delete': True, 'custom_image': 'custom_image_value', 'device_name': 'device_name_value', 'instantiate_from': 'instantiate_from_value'}]}} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.insert_unary(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.Operation) + + +def test_insert_unary_rest_required_fields(request_type=compute.InsertRegionInstanceTemplateRequest): + transport_class = transports.RegionInstanceTemplatesRestTransport + + request_init = {} + request_init["project"] = "" + request_init["region"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).insert._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + jsonified_request["region"] = 'region_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).insert._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "region" in jsonified_request + assert jsonified_request["region"] == 'region_value' + + client = RegionInstanceTemplatesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.insert_unary(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_insert_unary_rest_unset_required_fields(): + transport = transports.RegionInstanceTemplatesRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.insert._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("instanceTemplateResource", "project", "region", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_insert_unary_rest_interceptors(null_interceptor): + transport = transports.RegionInstanceTemplatesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.RegionInstanceTemplatesRestInterceptor(), + ) + client = RegionInstanceTemplatesClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.RegionInstanceTemplatesRestInterceptor, "post_insert") as post, \ + mock.patch.object(transports.RegionInstanceTemplatesRestInterceptor, "pre_insert") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.InsertRegionInstanceTemplateRequest.pb(compute.InsertRegionInstanceTemplateRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.InsertRegionInstanceTemplateRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.insert_unary(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_insert_unary_rest_bad_request(transport: str = 'rest', request_type=compute.InsertRegionInstanceTemplateRequest): + client = RegionInstanceTemplatesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2'} + request_init["instance_template_resource"] = {'creation_timestamp': 'creation_timestamp_value', 'description': 'description_value', 'id': 205, 'kind': 'kind_value', 'name': 'name_value', 'properties': {'advanced_machine_features': {'enable_nested_virtualization': True, 'enable_uefi_networking': True, 'threads_per_core': 1689, 'visible_core_count': 1918}, 'can_ip_forward': True, 'confidential_instance_config': {'enable_confidential_compute': True}, 'description': 'description_value', 'disks': [{'architecture': 'architecture_value', 'auto_delete': True, 'boot': True, 'device_name': 'device_name_value', 'disk_encryption_key': {'kms_key_name': 'kms_key_name_value', 'kms_key_service_account': 'kms_key_service_account_value', 'raw_key': 'raw_key_value', 'rsa_encrypted_key': 'rsa_encrypted_key_value', 'sha256': 'sha256_value'}, 'disk_size_gb': 1261, 'force_attach': True, 'guest_os_features': [{'type_': 'type__value'}], 'index': 536, 'initialize_params': {'architecture': 'architecture_value', 'description': 'description_value', 'disk_name': 'disk_name_value', 'disk_size_gb': 1261, 'disk_type': 'disk_type_value', 'labels': {}, 'licenses': ['licenses_value1', 'licenses_value2'], 'on_update_action': 'on_update_action_value', 'provisioned_iops': 1740, 'provisioned_throughput': 2411, 'replica_zones': ['replica_zones_value1', 'replica_zones_value2'], 'resource_manager_tags': {}, 'resource_policies': ['resource_policies_value1', 'resource_policies_value2'], 'source_image': 'source_image_value', 'source_image_encryption_key': {}, 'source_snapshot': 'source_snapshot_value', 'source_snapshot_encryption_key': {}}, 'interface': 'interface_value', 'kind': 'kind_value', 'licenses': ['licenses_value1', 'licenses_value2'], 'mode': 'mode_value', 'saved_state': 'saved_state_value', 'shielded_instance_initial_state': {'dbs': [{'content': 'content_value', 'file_type': 'file_type_value'}], 'dbxs': {}, 'keks': {}, 'pk': {}}, 'source': 'source_value', 'type_': 'type__value'}], 'guest_accelerators': [{'accelerator_count': 1805, 'accelerator_type': 'accelerator_type_value'}], 'key_revocation_action_type': 'key_revocation_action_type_value', 'labels': {}, 'machine_type': 'machine_type_value', 'metadata': {'fingerprint': 'fingerprint_value', 'items': [{'key': 'key_value', 'value': 'value_value'}], 'kind': 'kind_value'}, 'min_cpu_platform': 'min_cpu_platform_value', 'network_interfaces': [{'access_configs': [{'external_ipv6': 'external_ipv6_value', 'external_ipv6_prefix_length': 2837, 'kind': 'kind_value', 'name': 'name_value', 'nat_i_p': 'nat_i_p_value', 'network_tier': 'network_tier_value', 'public_ptr_domain_name': 'public_ptr_domain_name_value', 'set_public_ptr': True, 'type_': 'type__value'}], 'alias_ip_ranges': [{'ip_cidr_range': 'ip_cidr_range_value', 'subnetwork_range_name': 'subnetwork_range_name_value'}], 'fingerprint': 'fingerprint_value', 'internal_ipv6_prefix_length': 2831, 'ipv6_access_configs': {}, 'ipv6_access_type': 'ipv6_access_type_value', 'ipv6_address': 'ipv6_address_value', 'kind': 'kind_value', 'name': 'name_value', 'network': 'network_value', 'network_attachment': 'network_attachment_value', 'network_i_p': 'network_i_p_value', 'nic_type': 'nic_type_value', 'queue_count': 1197, 'stack_type': 'stack_type_value', 'subnetwork': 'subnetwork_value'}], 'network_performance_config': {'total_egress_bandwidth_tier': 'total_egress_bandwidth_tier_value'}, 'private_ipv6_google_access': 'private_ipv6_google_access_value', 'reservation_affinity': {'consume_reservation_type': 'consume_reservation_type_value', 'key': 'key_value', 'values': ['values_value1', 'values_value2']}, 'resource_manager_tags': {}, 'resource_policies': ['resource_policies_value1', 'resource_policies_value2'], 'scheduling': {'automatic_restart': True, 'instance_termination_action': 'instance_termination_action_value', 'local_ssd_recovery_timeout': {'nanos': 543, 'seconds': 751}, 'location_hint': 'location_hint_value', 'min_node_cpus': 1379, 'node_affinities': [{'key': 'key_value', 'operator': 'operator_value', 'values': ['values_value1', 'values_value2']}], 'on_host_maintenance': 'on_host_maintenance_value', 'preemptible': True, 'provisioning_model': 'provisioning_model_value'}, 'service_accounts': [{'email': 'email_value', 'scopes': ['scopes_value1', 'scopes_value2']}], 'shielded_instance_config': {'enable_integrity_monitoring': True, 'enable_secure_boot': True, 'enable_vtpm': True}, 'tags': {'fingerprint': 'fingerprint_value', 'items': ['items_value1', 'items_value2']}}, 'region': 'region_value', 'self_link': 'self_link_value', 'source_instance': 'source_instance_value', 'source_instance_params': {'disk_configs': [{'auto_delete': True, 'custom_image': 'custom_image_value', 'device_name': 'device_name_value', 'instantiate_from': 'instantiate_from_value'}]}} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.insert_unary(request) + + +def test_insert_unary_rest_flattened(): + client = RegionInstanceTemplatesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'region': 'sample2'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + region='region_value', + instance_template_resource=compute.InstanceTemplate(creation_timestamp='creation_timestamp_value'), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.insert_unary(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/regions/{region}/instanceTemplates" % client.transport._host, args[1]) + + +def test_insert_unary_rest_flattened_error(transport: str = 'rest'): + client = RegionInstanceTemplatesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.insert_unary( + compute.InsertRegionInstanceTemplateRequest(), + project='project_value', + region='region_value', + instance_template_resource=compute.InstanceTemplate(creation_timestamp='creation_timestamp_value'), + ) + + +def test_insert_unary_rest_error(): + client = RegionInstanceTemplatesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.ListRegionInstanceTemplatesRequest, + dict, +]) +def test_list_rest(request_type): + client = RegionInstanceTemplatesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.InstanceTemplateList( + id='id_value', + kind='kind_value', + next_page_token='next_page_token_value', + self_link='self_link_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.InstanceTemplateList.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.list(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListPager) + assert response.id == 'id_value' + assert response.kind == 'kind_value' + assert response.next_page_token == 'next_page_token_value' + assert response.self_link == 'self_link_value' + + +def test_list_rest_required_fields(request_type=compute.ListRegionInstanceTemplatesRequest): + transport_class = transports.RegionInstanceTemplatesRestTransport + + request_init = {} + request_init["project"] = "" + request_init["region"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + jsonified_request["region"] = 'region_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("filter", "max_results", "order_by", "page_token", "return_partial_success", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "region" in jsonified_request + assert jsonified_request["region"] == 'region_value' + + client = RegionInstanceTemplatesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.InstanceTemplateList() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "get", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.InstanceTemplateList.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.list(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_list_rest_unset_required_fields(): + transport = transports.RegionInstanceTemplatesRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.list._get_unset_required_fields({}) + assert set(unset_fields) == (set(("filter", "maxResults", "orderBy", "pageToken", "returnPartialSuccess", )) & set(("project", "region", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_rest_interceptors(null_interceptor): + transport = transports.RegionInstanceTemplatesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.RegionInstanceTemplatesRestInterceptor(), + ) + client = RegionInstanceTemplatesClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.RegionInstanceTemplatesRestInterceptor, "post_list") as post, \ + mock.patch.object(transports.RegionInstanceTemplatesRestInterceptor, "pre_list") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.ListRegionInstanceTemplatesRequest.pb(compute.ListRegionInstanceTemplatesRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.InstanceTemplateList.to_json(compute.InstanceTemplateList()) + + request = compute.ListRegionInstanceTemplatesRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.InstanceTemplateList() + + client.list(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_list_rest_bad_request(transport: str = 'rest', request_type=compute.ListRegionInstanceTemplatesRequest): + client = RegionInstanceTemplatesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list(request) + + +def test_list_rest_flattened(): + client = RegionInstanceTemplatesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.InstanceTemplateList() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'region': 'sample2'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + region='region_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.InstanceTemplateList.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.list(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/regions/{region}/instanceTemplates" % client.transport._host, args[1]) + + +def test_list_rest_flattened_error(transport: str = 'rest'): + client = RegionInstanceTemplatesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list( + compute.ListRegionInstanceTemplatesRequest(), + project='project_value', + region='region_value', + ) + + +def test_list_rest_pager(transport: str = 'rest'): + client = RegionInstanceTemplatesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + #with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + compute.InstanceTemplateList( + items=[ + compute.InstanceTemplate(), + compute.InstanceTemplate(), + compute.InstanceTemplate(), + ], + next_page_token='abc', + ), + compute.InstanceTemplateList( + items=[], + next_page_token='def', + ), + compute.InstanceTemplateList( + items=[ + compute.InstanceTemplate(), + ], + next_page_token='ghi', + ), + compute.InstanceTemplateList( + items=[ + compute.InstanceTemplate(), + compute.InstanceTemplate(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple(compute.InstanceTemplateList.to_json(x) for x in response) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode('UTF-8') + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {'project': 'sample1', 'region': 'sample2'} + + pager = client.list(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, compute.InstanceTemplate) + for i in results) + + pages = list(client.list(request=sample_request).pages) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + + +def test_credentials_transport_error(): + # It is an error to provide credentials and a transport instance. + transport = transports.RegionInstanceTemplatesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = RegionInstanceTemplatesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # It is an error to provide a credentials file and a transport instance. + transport = transports.RegionInstanceTemplatesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = RegionInstanceTemplatesClient( + client_options={"credentials_file": "credentials.json"}, + transport=transport, + ) + + # It is an error to provide an api_key and a transport instance. + transport = transports.RegionInstanceTemplatesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = RegionInstanceTemplatesClient( + client_options=options, + transport=transport, + ) + + # It is an error to provide an api_key and a credential. + options = mock.Mock() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = RegionInstanceTemplatesClient( + client_options=options, + credentials=ga_credentials.AnonymousCredentials() + ) + + # It is an error to provide scopes and a transport instance. + transport = transports.RegionInstanceTemplatesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = RegionInstanceTemplatesClient( + client_options={"scopes": ["1", "2"]}, + transport=transport, + ) + + +def test_transport_instance(): + # A client may be instantiated with a custom transport instance. + transport = transports.RegionInstanceTemplatesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + client = RegionInstanceTemplatesClient(transport=transport) + assert client.transport is transport + + +@pytest.mark.parametrize("transport_class", [ + transports.RegionInstanceTemplatesRestTransport, +]) +def test_transport_adc(transport_class): + # Test default credentials are used if not provided. + with mock.patch.object(google.auth, 'default') as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class() + adc.assert_called_once() + +@pytest.mark.parametrize("transport_name", [ + "rest", +]) +def test_transport_kind(transport_name): + transport = RegionInstanceTemplatesClient.get_transport_class(transport_name)( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert transport.kind == transport_name + + +def test_region_instance_templates_base_transport_error(): + # Passing both a credentials object and credentials_file should raise an error + with pytest.raises(core_exceptions.DuplicateCredentialArgs): + transport = transports.RegionInstanceTemplatesTransport( + credentials=ga_credentials.AnonymousCredentials(), + credentials_file="credentials.json" + ) + + +def test_region_instance_templates_base_transport(): + # Instantiate the base transport. + with mock.patch('google.cloud.compute_v1.services.region_instance_templates.transports.RegionInstanceTemplatesTransport.__init__') as Transport: + Transport.return_value = None + transport = transports.RegionInstanceTemplatesTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Every method on the transport should just blindly + # raise NotImplementedError. + methods = ( + 'delete', + 'get', + 'insert', + 'list', + ) + for method in methods: + with pytest.raises(NotImplementedError): + getattr(transport, method)(request=object()) + + with pytest.raises(NotImplementedError): + transport.close() + + # Catch all for all remaining methods and properties + remainder = [ + 'kind', + ] + for r in remainder: + with pytest.raises(NotImplementedError): + getattr(transport, r)() + + +def test_region_instance_templates_base_transport_with_credentials_file(): + # Instantiate the base transport with a credentials file + with mock.patch.object(google.auth, 'load_credentials_from_file', autospec=True) as load_creds, mock.patch('google.cloud.compute_v1.services.region_instance_templates.transports.RegionInstanceTemplatesTransport._prep_wrapped_messages') as Transport: + Transport.return_value = None + load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.RegionInstanceTemplatesTransport( + credentials_file="credentials.json", + quota_project_id="octopus", + ) + load_creds.assert_called_once_with("credentials.json", + scopes=None, + default_scopes=( + 'https://www.googleapis.com/auth/compute', + 'https://www.googleapis.com/auth/cloud-platform', +), + quota_project_id="octopus", + ) + + +def test_region_instance_templates_base_transport_with_adc(): + # Test the default credentials are used if credentials and credentials_file are None. + with mock.patch.object(google.auth, 'default', autospec=True) as adc, mock.patch('google.cloud.compute_v1.services.region_instance_templates.transports.RegionInstanceTemplatesTransport._prep_wrapped_messages') as Transport: + Transport.return_value = None + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.RegionInstanceTemplatesTransport() + adc.assert_called_once() + + +def test_region_instance_templates_auth_adc(): + # If no credentials are provided, we should use ADC credentials. + with mock.patch.object(google.auth, 'default', autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + RegionInstanceTemplatesClient() + adc.assert_called_once_with( + scopes=None, + default_scopes=( + 'https://www.googleapis.com/auth/compute', + 'https://www.googleapis.com/auth/cloud-platform', +), + quota_project_id=None, + ) + + +def test_region_instance_templates_http_transport_client_cert_source_for_mtls(): + cred = ga_credentials.AnonymousCredentials() + with mock.patch("google.auth.transport.requests.AuthorizedSession.configure_mtls_channel") as mock_configure_mtls_channel: + transports.RegionInstanceTemplatesRestTransport ( + credentials=cred, + client_cert_source_for_mtls=client_cert_source_callback + ) + mock_configure_mtls_channel.assert_called_once_with(client_cert_source_callback) + + +@pytest.mark.parametrize("transport_name", [ + "rest", +]) +def test_region_instance_templates_host_no_port(transport_name): + client = RegionInstanceTemplatesClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions(api_endpoint='compute.googleapis.com'), + transport=transport_name, + ) + assert client.transport._host == ( + 'compute.googleapis.com:443' + if transport_name in ['grpc', 'grpc_asyncio'] + else 'https://compute.googleapis.com' + ) + +@pytest.mark.parametrize("transport_name", [ + "rest", +]) +def test_region_instance_templates_host_with_port(transport_name): + client = RegionInstanceTemplatesClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions(api_endpoint='compute.googleapis.com:8000'), + transport=transport_name, + ) + assert client.transport._host == ( + 'compute.googleapis.com:8000' + if transport_name in ['grpc', 'grpc_asyncio'] + else 'https://compute.googleapis.com:8000' + ) + +@pytest.mark.parametrize("transport_name", [ + "rest", +]) +def test_region_instance_templates_client_transport_session_collision(transport_name): + creds1 = ga_credentials.AnonymousCredentials() + creds2 = ga_credentials.AnonymousCredentials() + client1 = RegionInstanceTemplatesClient( + credentials=creds1, + transport=transport_name, + ) + client2 = RegionInstanceTemplatesClient( + credentials=creds2, + transport=transport_name, + ) + session1 = client1.transport.delete._session + session2 = client2.transport.delete._session + assert session1 != session2 + session1 = client1.transport.get._session + session2 = client2.transport.get._session + assert session1 != session2 + session1 = client1.transport.insert._session + session2 = client2.transport.insert._session + assert session1 != session2 + session1 = client1.transport.list._session + session2 = client2.transport.list._session + assert session1 != session2 + +def test_common_billing_account_path(): + billing_account = "squid" + expected = "billingAccounts/{billing_account}".format(billing_account=billing_account, ) + actual = RegionInstanceTemplatesClient.common_billing_account_path(billing_account) + assert expected == actual + + +def test_parse_common_billing_account_path(): + expected = { + "billing_account": "clam", + } + path = RegionInstanceTemplatesClient.common_billing_account_path(**expected) + + # Check that the path construction is reversible. + actual = RegionInstanceTemplatesClient.parse_common_billing_account_path(path) + assert expected == actual + +def test_common_folder_path(): + folder = "whelk" + expected = "folders/{folder}".format(folder=folder, ) + actual = RegionInstanceTemplatesClient.common_folder_path(folder) + assert expected == actual + + +def test_parse_common_folder_path(): + expected = { + "folder": "octopus", + } + path = RegionInstanceTemplatesClient.common_folder_path(**expected) + + # Check that the path construction is reversible. + actual = RegionInstanceTemplatesClient.parse_common_folder_path(path) + assert expected == actual + +def test_common_organization_path(): + organization = "oyster" + expected = "organizations/{organization}".format(organization=organization, ) + actual = RegionInstanceTemplatesClient.common_organization_path(organization) + assert expected == actual + + +def test_parse_common_organization_path(): + expected = { + "organization": "nudibranch", + } + path = RegionInstanceTemplatesClient.common_organization_path(**expected) + + # Check that the path construction is reversible. + actual = RegionInstanceTemplatesClient.parse_common_organization_path(path) + assert expected == actual + +def test_common_project_path(): + project = "cuttlefish" + expected = "projects/{project}".format(project=project, ) + actual = RegionInstanceTemplatesClient.common_project_path(project) + assert expected == actual + + +def test_parse_common_project_path(): + expected = { + "project": "mussel", + } + path = RegionInstanceTemplatesClient.common_project_path(**expected) + + # Check that the path construction is reversible. + actual = RegionInstanceTemplatesClient.parse_common_project_path(path) + assert expected == actual + +def test_common_location_path(): + project = "winkle" + location = "nautilus" + expected = "projects/{project}/locations/{location}".format(project=project, location=location, ) + actual = RegionInstanceTemplatesClient.common_location_path(project, location) + assert expected == actual + + +def test_parse_common_location_path(): + expected = { + "project": "scallop", + "location": "abalone", + } + path = RegionInstanceTemplatesClient.common_location_path(**expected) + + # Check that the path construction is reversible. + actual = RegionInstanceTemplatesClient.parse_common_location_path(path) + assert expected == actual + + +def test_client_with_default_client_info(): + client_info = gapic_v1.client_info.ClientInfo() + + with mock.patch.object(transports.RegionInstanceTemplatesTransport, '_prep_wrapped_messages') as prep: + client = RegionInstanceTemplatesClient( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + with mock.patch.object(transports.RegionInstanceTemplatesTransport, '_prep_wrapped_messages') as prep: + transport_class = RegionInstanceTemplatesClient.get_transport_class() + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + +def test_transport_close(): + transports = { + "rest": "_session", + } + + for transport, close_name in transports.items(): + client = RegionInstanceTemplatesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport + ) + with mock.patch.object(type(getattr(client.transport, close_name)), "close") as close: + with client: + close.assert_not_called() + close.assert_called_once() + +def test_client_ctx(): + transports = [ + 'rest', + ] + for transport in transports: + client = RegionInstanceTemplatesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport + ) + # Test client calls underlying transport. + with mock.patch.object(type(client.transport), "close") as close: + close.assert_not_called() + with client: + pass + close.assert_called() + +@pytest.mark.parametrize("client_class,transport_class", [ + (RegionInstanceTemplatesClient, transports.RegionInstanceTemplatesRestTransport), +]) +def test_api_key_credentials(client_class, transport_class): + with mock.patch.object( + google.auth._default, "get_api_key_credentials", create=True + ) as get_api_key_credentials: + mock_cred = mock.Mock() + get_api_key_credentials.return_value = mock_cred + options = client_options.ClientOptions() + options.api_key = "api_key" + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=mock_cred, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) diff --git a/owl-bot-staging/v1/tests/unit/gapic/compute_v1/test_region_instances.py b/owl-bot-staging/v1/tests/unit/gapic/compute_v1/test_region_instances.py new file mode 100644 index 000000000..dd03c7dd4 --- /dev/null +++ b/owl-bot-staging/v1/tests/unit/gapic/compute_v1/test_region_instances.py @@ -0,0 +1,1383 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import os +# try/except added for compatibility with python < 3.8 +try: + from unittest import mock + from unittest.mock import AsyncMock # pragma: NO COVER +except ImportError: # pragma: NO COVER + import mock + +import grpc +from grpc.experimental import aio +from collections.abc import Iterable +from google.protobuf import json_format +import json +import math +import pytest +from proto.marshal.rules.dates import DurationRule, TimestampRule +from proto.marshal.rules import wrappers +from requests import Response +from requests import Request, PreparedRequest +from requests.sessions import Session +from google.protobuf import json_format + +from google.api_core import client_options +from google.api_core import exceptions as core_exceptions +from google.api_core import extended_operation # type: ignore +from google.api_core import future +from google.api_core import gapic_v1 +from google.api_core import grpc_helpers +from google.api_core import grpc_helpers_async +from google.api_core import path_template +from google.auth import credentials as ga_credentials +from google.auth.exceptions import MutualTLSChannelError +from google.cloud.compute_v1.services.region_instances import RegionInstancesClient +from google.cloud.compute_v1.services.region_instances import transports +from google.cloud.compute_v1.types import compute +from google.oauth2 import service_account +import google.auth + + +def client_cert_source_callback(): + return b"cert bytes", b"key bytes" + + +# If default endpoint is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint(client): + return "foo.googleapis.com" if ("localhost" in client.DEFAULT_ENDPOINT) else client.DEFAULT_ENDPOINT + + +def test__get_default_mtls_endpoint(): + api_endpoint = "example.googleapis.com" + api_mtls_endpoint = "example.mtls.googleapis.com" + sandbox_endpoint = "example.sandbox.googleapis.com" + sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com" + non_googleapi = "api.example.com" + + assert RegionInstancesClient._get_default_mtls_endpoint(None) is None + assert RegionInstancesClient._get_default_mtls_endpoint(api_endpoint) == api_mtls_endpoint + assert RegionInstancesClient._get_default_mtls_endpoint(api_mtls_endpoint) == api_mtls_endpoint + assert RegionInstancesClient._get_default_mtls_endpoint(sandbox_endpoint) == sandbox_mtls_endpoint + assert RegionInstancesClient._get_default_mtls_endpoint(sandbox_mtls_endpoint) == sandbox_mtls_endpoint + assert RegionInstancesClient._get_default_mtls_endpoint(non_googleapi) == non_googleapi + + +@pytest.mark.parametrize("client_class,transport_name", [ + (RegionInstancesClient, "rest"), +]) +def test_region_instances_client_from_service_account_info(client_class, transport_name): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object(service_account.Credentials, 'from_service_account_info') as factory: + factory.return_value = creds + info = {"valid": True} + client = client_class.from_service_account_info(info, transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + 'compute.googleapis.com:443' + if transport_name in ['grpc', 'grpc_asyncio'] + else + 'https://compute.googleapis.com' + ) + + +@pytest.mark.parametrize("transport_class,transport_name", [ + (transports.RegionInstancesRestTransport, "rest"), +]) +def test_region_instances_client_service_account_always_use_jwt(transport_class, transport_name): + with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=True) + use_jwt.assert_called_once_with(True) + + with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=False) + use_jwt.assert_not_called() + + +@pytest.mark.parametrize("client_class,transport_name", [ + (RegionInstancesClient, "rest"), +]) +def test_region_instances_client_from_service_account_file(client_class, transport_name): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object(service_account.Credentials, 'from_service_account_file') as factory: + factory.return_value = creds + client = client_class.from_service_account_file("dummy/file/path.json", transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + client = client_class.from_service_account_json("dummy/file/path.json", transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + 'compute.googleapis.com:443' + if transport_name in ['grpc', 'grpc_asyncio'] + else + 'https://compute.googleapis.com' + ) + + +def test_region_instances_client_get_transport_class(): + transport = RegionInstancesClient.get_transport_class() + available_transports = [ + transports.RegionInstancesRestTransport, + ] + assert transport in available_transports + + transport = RegionInstancesClient.get_transport_class("rest") + assert transport == transports.RegionInstancesRestTransport + + +@pytest.mark.parametrize("client_class,transport_class,transport_name", [ + (RegionInstancesClient, transports.RegionInstancesRestTransport, "rest"), +]) +@mock.patch.object(RegionInstancesClient, "DEFAULT_ENDPOINT", modify_default_endpoint(RegionInstancesClient)) +def test_region_instances_client_client_options(client_class, transport_class, transport_name): + # Check that if channel is provided we won't create a new one. + with mock.patch.object(RegionInstancesClient, 'get_transport_class') as gtc: + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ) + client = client_class(transport=transport) + gtc.assert_not_called() + + # Check that if channel is provided via str we will create a new one. + with mock.patch.object(RegionInstancesClient, 'get_transport_class') as gtc: + client = client_class(transport=transport_name) + gtc.assert_called() + + # Check the case api_endpoint is provided. + options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(transport=transport_name, client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_MTLS_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError): + client = client_class(transport=transport_name) + + # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): + with pytest.raises(ValueError): + client = client_class(transport=transport_name) + + # Check the case quota_project_id is provided + options = client_options.ClientOptions(quota_project_id="octopus") + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id="octopus", + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + # Check the case api_endpoint is provided + options = client_options.ClientOptions(api_audience="https://language.googleapis.com") + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience="https://language.googleapis.com" + ) + +@pytest.mark.parametrize("client_class,transport_class,transport_name,use_client_cert_env", [ + (RegionInstancesClient, transports.RegionInstancesRestTransport, "rest", "true"), + (RegionInstancesClient, transports.RegionInstancesRestTransport, "rest", "false"), +]) +@mock.patch.object(RegionInstancesClient, "DEFAULT_ENDPOINT", modify_default_endpoint(RegionInstancesClient)) +@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) +def test_region_instances_client_mtls_env_auto(client_class, transport_class, transport_name, use_client_cert_env): + # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default + # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists. + + # Check the case client_cert_source is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): + options = client_options.ClientOptions(client_cert_source=client_cert_source_callback) + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + + if use_client_cert_env == "false": + expected_client_cert_source = None + expected_host = client.DEFAULT_ENDPOINT + else: + expected_client_cert_source = client_cert_source_callback + expected_host = client.DEFAULT_MTLS_ENDPOINT + + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case ADC client cert is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): + with mock.patch.object(transport_class, '__init__') as patched: + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): + with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=client_cert_source_callback): + if use_client_cert_env == "false": + expected_host = client.DEFAULT_ENDPOINT + expected_client_cert_source = None + else: + expected_host = client.DEFAULT_MTLS_ENDPOINT + expected_client_cert_source = client_cert_source_callback + + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case client_cert_source and ADC client cert are not provided. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): + with mock.patch.object(transport_class, '__init__') as patched: + with mock.patch("google.auth.transport.mtls.has_default_client_cert_source", return_value=False): + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize("client_class", [ + RegionInstancesClient +]) +@mock.patch.object(RegionInstancesClient, "DEFAULT_ENDPOINT", modify_default_endpoint(RegionInstancesClient)) +def test_region_instances_client_get_mtls_endpoint_and_cert_source(client_class): + mock_client_cert_source = mock.Mock() + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "true". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + mock_api_endpoint = "foo" + options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(options) + assert api_endpoint == mock_api_endpoint + assert cert_source == mock_client_cert_source + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "false". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + mock_client_cert_source = mock.Mock() + mock_api_endpoint = "foo" + options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(options) + assert api_endpoint == mock_api_endpoint + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert doesn't exist. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=False): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert exists. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): + with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=mock_client_cert_source): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source == mock_client_cert_source + + +@pytest.mark.parametrize("client_class,transport_class,transport_name", [ + (RegionInstancesClient, transports.RegionInstancesRestTransport, "rest"), +]) +def test_region_instances_client_client_options_scopes(client_class, transport_class, transport_name): + # Check the case scopes are provided. + options = client_options.ClientOptions( + scopes=["1", "2"], + ) + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=["1", "2"], + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + +@pytest.mark.parametrize("client_class,transport_class,transport_name,grpc_helpers", [ + (RegionInstancesClient, transports.RegionInstancesRestTransport, "rest", None), +]) +def test_region_instances_client_client_options_credentials_file(client_class, transport_class, transport_name, grpc_helpers): + # Check the case credentials file is provided. + options = client_options.ClientOptions( + credentials_file="credentials.json" + ) + + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize("request_type", [ + compute.BulkInsertRegionInstanceRequest, + dict, +]) +def test_bulk_insert_rest(request_type): + client = RegionInstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2'} + request_init["bulk_insert_instance_resource_resource"] = {'count': 553, 'instance_properties': {'advanced_machine_features': {'enable_nested_virtualization': True, 'enable_uefi_networking': True, 'threads_per_core': 1689, 'visible_core_count': 1918}, 'can_ip_forward': True, 'confidential_instance_config': {'enable_confidential_compute': True}, 'description': 'description_value', 'disks': [{'architecture': 'architecture_value', 'auto_delete': True, 'boot': True, 'device_name': 'device_name_value', 'disk_encryption_key': {'kms_key_name': 'kms_key_name_value', 'kms_key_service_account': 'kms_key_service_account_value', 'raw_key': 'raw_key_value', 'rsa_encrypted_key': 'rsa_encrypted_key_value', 'sha256': 'sha256_value'}, 'disk_size_gb': 1261, 'force_attach': True, 'guest_os_features': [{'type_': 'type__value'}], 'index': 536, 'initialize_params': {'architecture': 'architecture_value', 'description': 'description_value', 'disk_name': 'disk_name_value', 'disk_size_gb': 1261, 'disk_type': 'disk_type_value', 'labels': {}, 'licenses': ['licenses_value1', 'licenses_value2'], 'on_update_action': 'on_update_action_value', 'provisioned_iops': 1740, 'provisioned_throughput': 2411, 'replica_zones': ['replica_zones_value1', 'replica_zones_value2'], 'resource_manager_tags': {}, 'resource_policies': ['resource_policies_value1', 'resource_policies_value2'], 'source_image': 'source_image_value', 'source_image_encryption_key': {}, 'source_snapshot': 'source_snapshot_value', 'source_snapshot_encryption_key': {}}, 'interface': 'interface_value', 'kind': 'kind_value', 'licenses': ['licenses_value1', 'licenses_value2'], 'mode': 'mode_value', 'saved_state': 'saved_state_value', 'shielded_instance_initial_state': {'dbs': [{'content': 'content_value', 'file_type': 'file_type_value'}], 'dbxs': {}, 'keks': {}, 'pk': {}}, 'source': 'source_value', 'type_': 'type__value'}], 'guest_accelerators': [{'accelerator_count': 1805, 'accelerator_type': 'accelerator_type_value'}], 'key_revocation_action_type': 'key_revocation_action_type_value', 'labels': {}, 'machine_type': 'machine_type_value', 'metadata': {'fingerprint': 'fingerprint_value', 'items': [{'key': 'key_value', 'value': 'value_value'}], 'kind': 'kind_value'}, 'min_cpu_platform': 'min_cpu_platform_value', 'network_interfaces': [{'access_configs': [{'external_ipv6': 'external_ipv6_value', 'external_ipv6_prefix_length': 2837, 'kind': 'kind_value', 'name': 'name_value', 'nat_i_p': 'nat_i_p_value', 'network_tier': 'network_tier_value', 'public_ptr_domain_name': 'public_ptr_domain_name_value', 'set_public_ptr': True, 'type_': 'type__value'}], 'alias_ip_ranges': [{'ip_cidr_range': 'ip_cidr_range_value', 'subnetwork_range_name': 'subnetwork_range_name_value'}], 'fingerprint': 'fingerprint_value', 'internal_ipv6_prefix_length': 2831, 'ipv6_access_configs': {}, 'ipv6_access_type': 'ipv6_access_type_value', 'ipv6_address': 'ipv6_address_value', 'kind': 'kind_value', 'name': 'name_value', 'network': 'network_value', 'network_attachment': 'network_attachment_value', 'network_i_p': 'network_i_p_value', 'nic_type': 'nic_type_value', 'queue_count': 1197, 'stack_type': 'stack_type_value', 'subnetwork': 'subnetwork_value'}], 'network_performance_config': {'total_egress_bandwidth_tier': 'total_egress_bandwidth_tier_value'}, 'private_ipv6_google_access': 'private_ipv6_google_access_value', 'reservation_affinity': {'consume_reservation_type': 'consume_reservation_type_value', 'key': 'key_value', 'values': ['values_value1', 'values_value2']}, 'resource_manager_tags': {}, 'resource_policies': ['resource_policies_value1', 'resource_policies_value2'], 'scheduling': {'automatic_restart': True, 'instance_termination_action': 'instance_termination_action_value', 'local_ssd_recovery_timeout': {'nanos': 543, 'seconds': 751}, 'location_hint': 'location_hint_value', 'min_node_cpus': 1379, 'node_affinities': [{'key': 'key_value', 'operator': 'operator_value', 'values': ['values_value1', 'values_value2']}], 'on_host_maintenance': 'on_host_maintenance_value', 'preemptible': True, 'provisioning_model': 'provisioning_model_value'}, 'service_accounts': [{'email': 'email_value', 'scopes': ['scopes_value1', 'scopes_value2']}], 'shielded_instance_config': {'enable_integrity_monitoring': True, 'enable_secure_boot': True, 'enable_vtpm': True}, 'tags': {'fingerprint': 'fingerprint_value', 'items': ['items_value1', 'items_value2']}}, 'location_policy': {'locations': {}, 'target_shape': 'target_shape_value'}, 'min_count': 972, 'name_pattern': 'name_pattern_value', 'per_instance_properties': {}, 'source_instance_template': 'source_instance_template_value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.bulk_insert(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, extended_operation.ExtendedOperation) + assert response.client_operation_id == 'client_operation_id_value' + assert response.creation_timestamp == 'creation_timestamp_value' + assert response.description == 'description_value' + assert response.end_time == 'end_time_value' + assert response.http_error_message == 'http_error_message_value' + assert response.http_error_status_code == 2374 + assert response.id == 205 + assert response.insert_time == 'insert_time_value' + assert response.kind == 'kind_value' + assert response.name == 'name_value' + assert response.operation_group_id == 'operation_group_id_value' + assert response.operation_type == 'operation_type_value' + assert response.progress == 885 + assert response.region == 'region_value' + assert response.self_link == 'self_link_value' + assert response.start_time == 'start_time_value' + assert response.status == compute.Operation.Status.DONE + assert response.status_message == 'status_message_value' + assert response.target_id == 947 + assert response.target_link == 'target_link_value' + assert response.user == 'user_value' + assert response.zone == 'zone_value' + + +def test_bulk_insert_rest_required_fields(request_type=compute.BulkInsertRegionInstanceRequest): + transport_class = transports.RegionInstancesRestTransport + + request_init = {} + request_init["project"] = "" + request_init["region"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).bulk_insert._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + jsonified_request["region"] = 'region_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).bulk_insert._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "region" in jsonified_request + assert jsonified_request["region"] == 'region_value' + + client = RegionInstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.bulk_insert(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_bulk_insert_rest_unset_required_fields(): + transport = transports.RegionInstancesRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.bulk_insert._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("bulkInsertInstanceResourceResource", "project", "region", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_bulk_insert_rest_interceptors(null_interceptor): + transport = transports.RegionInstancesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.RegionInstancesRestInterceptor(), + ) + client = RegionInstancesClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.RegionInstancesRestInterceptor, "post_bulk_insert") as post, \ + mock.patch.object(transports.RegionInstancesRestInterceptor, "pre_bulk_insert") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.BulkInsertRegionInstanceRequest.pb(compute.BulkInsertRegionInstanceRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.BulkInsertRegionInstanceRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.bulk_insert(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_bulk_insert_rest_bad_request(transport: str = 'rest', request_type=compute.BulkInsertRegionInstanceRequest): + client = RegionInstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2'} + request_init["bulk_insert_instance_resource_resource"] = {'count': 553, 'instance_properties': {'advanced_machine_features': {'enable_nested_virtualization': True, 'enable_uefi_networking': True, 'threads_per_core': 1689, 'visible_core_count': 1918}, 'can_ip_forward': True, 'confidential_instance_config': {'enable_confidential_compute': True}, 'description': 'description_value', 'disks': [{'architecture': 'architecture_value', 'auto_delete': True, 'boot': True, 'device_name': 'device_name_value', 'disk_encryption_key': {'kms_key_name': 'kms_key_name_value', 'kms_key_service_account': 'kms_key_service_account_value', 'raw_key': 'raw_key_value', 'rsa_encrypted_key': 'rsa_encrypted_key_value', 'sha256': 'sha256_value'}, 'disk_size_gb': 1261, 'force_attach': True, 'guest_os_features': [{'type_': 'type__value'}], 'index': 536, 'initialize_params': {'architecture': 'architecture_value', 'description': 'description_value', 'disk_name': 'disk_name_value', 'disk_size_gb': 1261, 'disk_type': 'disk_type_value', 'labels': {}, 'licenses': ['licenses_value1', 'licenses_value2'], 'on_update_action': 'on_update_action_value', 'provisioned_iops': 1740, 'provisioned_throughput': 2411, 'replica_zones': ['replica_zones_value1', 'replica_zones_value2'], 'resource_manager_tags': {}, 'resource_policies': ['resource_policies_value1', 'resource_policies_value2'], 'source_image': 'source_image_value', 'source_image_encryption_key': {}, 'source_snapshot': 'source_snapshot_value', 'source_snapshot_encryption_key': {}}, 'interface': 'interface_value', 'kind': 'kind_value', 'licenses': ['licenses_value1', 'licenses_value2'], 'mode': 'mode_value', 'saved_state': 'saved_state_value', 'shielded_instance_initial_state': {'dbs': [{'content': 'content_value', 'file_type': 'file_type_value'}], 'dbxs': {}, 'keks': {}, 'pk': {}}, 'source': 'source_value', 'type_': 'type__value'}], 'guest_accelerators': [{'accelerator_count': 1805, 'accelerator_type': 'accelerator_type_value'}], 'key_revocation_action_type': 'key_revocation_action_type_value', 'labels': {}, 'machine_type': 'machine_type_value', 'metadata': {'fingerprint': 'fingerprint_value', 'items': [{'key': 'key_value', 'value': 'value_value'}], 'kind': 'kind_value'}, 'min_cpu_platform': 'min_cpu_platform_value', 'network_interfaces': [{'access_configs': [{'external_ipv6': 'external_ipv6_value', 'external_ipv6_prefix_length': 2837, 'kind': 'kind_value', 'name': 'name_value', 'nat_i_p': 'nat_i_p_value', 'network_tier': 'network_tier_value', 'public_ptr_domain_name': 'public_ptr_domain_name_value', 'set_public_ptr': True, 'type_': 'type__value'}], 'alias_ip_ranges': [{'ip_cidr_range': 'ip_cidr_range_value', 'subnetwork_range_name': 'subnetwork_range_name_value'}], 'fingerprint': 'fingerprint_value', 'internal_ipv6_prefix_length': 2831, 'ipv6_access_configs': {}, 'ipv6_access_type': 'ipv6_access_type_value', 'ipv6_address': 'ipv6_address_value', 'kind': 'kind_value', 'name': 'name_value', 'network': 'network_value', 'network_attachment': 'network_attachment_value', 'network_i_p': 'network_i_p_value', 'nic_type': 'nic_type_value', 'queue_count': 1197, 'stack_type': 'stack_type_value', 'subnetwork': 'subnetwork_value'}], 'network_performance_config': {'total_egress_bandwidth_tier': 'total_egress_bandwidth_tier_value'}, 'private_ipv6_google_access': 'private_ipv6_google_access_value', 'reservation_affinity': {'consume_reservation_type': 'consume_reservation_type_value', 'key': 'key_value', 'values': ['values_value1', 'values_value2']}, 'resource_manager_tags': {}, 'resource_policies': ['resource_policies_value1', 'resource_policies_value2'], 'scheduling': {'automatic_restart': True, 'instance_termination_action': 'instance_termination_action_value', 'local_ssd_recovery_timeout': {'nanos': 543, 'seconds': 751}, 'location_hint': 'location_hint_value', 'min_node_cpus': 1379, 'node_affinities': [{'key': 'key_value', 'operator': 'operator_value', 'values': ['values_value1', 'values_value2']}], 'on_host_maintenance': 'on_host_maintenance_value', 'preemptible': True, 'provisioning_model': 'provisioning_model_value'}, 'service_accounts': [{'email': 'email_value', 'scopes': ['scopes_value1', 'scopes_value2']}], 'shielded_instance_config': {'enable_integrity_monitoring': True, 'enable_secure_boot': True, 'enable_vtpm': True}, 'tags': {'fingerprint': 'fingerprint_value', 'items': ['items_value1', 'items_value2']}}, 'location_policy': {'locations': {}, 'target_shape': 'target_shape_value'}, 'min_count': 972, 'name_pattern': 'name_pattern_value', 'per_instance_properties': {}, 'source_instance_template': 'source_instance_template_value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.bulk_insert(request) + + +def test_bulk_insert_rest_flattened(): + client = RegionInstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'region': 'sample2'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + region='region_value', + bulk_insert_instance_resource_resource=compute.BulkInsertInstanceResource(count=553), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.bulk_insert(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/regions/{region}/instances/bulkInsert" % client.transport._host, args[1]) + + +def test_bulk_insert_rest_flattened_error(transport: str = 'rest'): + client = RegionInstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.bulk_insert( + compute.BulkInsertRegionInstanceRequest(), + project='project_value', + region='region_value', + bulk_insert_instance_resource_resource=compute.BulkInsertInstanceResource(count=553), + ) + + +def test_bulk_insert_rest_error(): + client = RegionInstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.BulkInsertRegionInstanceRequest, + dict, +]) +def test_bulk_insert_unary_rest(request_type): + client = RegionInstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2'} + request_init["bulk_insert_instance_resource_resource"] = {'count': 553, 'instance_properties': {'advanced_machine_features': {'enable_nested_virtualization': True, 'enable_uefi_networking': True, 'threads_per_core': 1689, 'visible_core_count': 1918}, 'can_ip_forward': True, 'confidential_instance_config': {'enable_confidential_compute': True}, 'description': 'description_value', 'disks': [{'architecture': 'architecture_value', 'auto_delete': True, 'boot': True, 'device_name': 'device_name_value', 'disk_encryption_key': {'kms_key_name': 'kms_key_name_value', 'kms_key_service_account': 'kms_key_service_account_value', 'raw_key': 'raw_key_value', 'rsa_encrypted_key': 'rsa_encrypted_key_value', 'sha256': 'sha256_value'}, 'disk_size_gb': 1261, 'force_attach': True, 'guest_os_features': [{'type_': 'type__value'}], 'index': 536, 'initialize_params': {'architecture': 'architecture_value', 'description': 'description_value', 'disk_name': 'disk_name_value', 'disk_size_gb': 1261, 'disk_type': 'disk_type_value', 'labels': {}, 'licenses': ['licenses_value1', 'licenses_value2'], 'on_update_action': 'on_update_action_value', 'provisioned_iops': 1740, 'provisioned_throughput': 2411, 'replica_zones': ['replica_zones_value1', 'replica_zones_value2'], 'resource_manager_tags': {}, 'resource_policies': ['resource_policies_value1', 'resource_policies_value2'], 'source_image': 'source_image_value', 'source_image_encryption_key': {}, 'source_snapshot': 'source_snapshot_value', 'source_snapshot_encryption_key': {}}, 'interface': 'interface_value', 'kind': 'kind_value', 'licenses': ['licenses_value1', 'licenses_value2'], 'mode': 'mode_value', 'saved_state': 'saved_state_value', 'shielded_instance_initial_state': {'dbs': [{'content': 'content_value', 'file_type': 'file_type_value'}], 'dbxs': {}, 'keks': {}, 'pk': {}}, 'source': 'source_value', 'type_': 'type__value'}], 'guest_accelerators': [{'accelerator_count': 1805, 'accelerator_type': 'accelerator_type_value'}], 'key_revocation_action_type': 'key_revocation_action_type_value', 'labels': {}, 'machine_type': 'machine_type_value', 'metadata': {'fingerprint': 'fingerprint_value', 'items': [{'key': 'key_value', 'value': 'value_value'}], 'kind': 'kind_value'}, 'min_cpu_platform': 'min_cpu_platform_value', 'network_interfaces': [{'access_configs': [{'external_ipv6': 'external_ipv6_value', 'external_ipv6_prefix_length': 2837, 'kind': 'kind_value', 'name': 'name_value', 'nat_i_p': 'nat_i_p_value', 'network_tier': 'network_tier_value', 'public_ptr_domain_name': 'public_ptr_domain_name_value', 'set_public_ptr': True, 'type_': 'type__value'}], 'alias_ip_ranges': [{'ip_cidr_range': 'ip_cidr_range_value', 'subnetwork_range_name': 'subnetwork_range_name_value'}], 'fingerprint': 'fingerprint_value', 'internal_ipv6_prefix_length': 2831, 'ipv6_access_configs': {}, 'ipv6_access_type': 'ipv6_access_type_value', 'ipv6_address': 'ipv6_address_value', 'kind': 'kind_value', 'name': 'name_value', 'network': 'network_value', 'network_attachment': 'network_attachment_value', 'network_i_p': 'network_i_p_value', 'nic_type': 'nic_type_value', 'queue_count': 1197, 'stack_type': 'stack_type_value', 'subnetwork': 'subnetwork_value'}], 'network_performance_config': {'total_egress_bandwidth_tier': 'total_egress_bandwidth_tier_value'}, 'private_ipv6_google_access': 'private_ipv6_google_access_value', 'reservation_affinity': {'consume_reservation_type': 'consume_reservation_type_value', 'key': 'key_value', 'values': ['values_value1', 'values_value2']}, 'resource_manager_tags': {}, 'resource_policies': ['resource_policies_value1', 'resource_policies_value2'], 'scheduling': {'automatic_restart': True, 'instance_termination_action': 'instance_termination_action_value', 'local_ssd_recovery_timeout': {'nanos': 543, 'seconds': 751}, 'location_hint': 'location_hint_value', 'min_node_cpus': 1379, 'node_affinities': [{'key': 'key_value', 'operator': 'operator_value', 'values': ['values_value1', 'values_value2']}], 'on_host_maintenance': 'on_host_maintenance_value', 'preemptible': True, 'provisioning_model': 'provisioning_model_value'}, 'service_accounts': [{'email': 'email_value', 'scopes': ['scopes_value1', 'scopes_value2']}], 'shielded_instance_config': {'enable_integrity_monitoring': True, 'enable_secure_boot': True, 'enable_vtpm': True}, 'tags': {'fingerprint': 'fingerprint_value', 'items': ['items_value1', 'items_value2']}}, 'location_policy': {'locations': {}, 'target_shape': 'target_shape_value'}, 'min_count': 972, 'name_pattern': 'name_pattern_value', 'per_instance_properties': {}, 'source_instance_template': 'source_instance_template_value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.bulk_insert_unary(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.Operation) + + +def test_bulk_insert_unary_rest_required_fields(request_type=compute.BulkInsertRegionInstanceRequest): + transport_class = transports.RegionInstancesRestTransport + + request_init = {} + request_init["project"] = "" + request_init["region"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).bulk_insert._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + jsonified_request["region"] = 'region_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).bulk_insert._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "region" in jsonified_request + assert jsonified_request["region"] == 'region_value' + + client = RegionInstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.bulk_insert_unary(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_bulk_insert_unary_rest_unset_required_fields(): + transport = transports.RegionInstancesRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.bulk_insert._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("bulkInsertInstanceResourceResource", "project", "region", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_bulk_insert_unary_rest_interceptors(null_interceptor): + transport = transports.RegionInstancesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.RegionInstancesRestInterceptor(), + ) + client = RegionInstancesClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.RegionInstancesRestInterceptor, "post_bulk_insert") as post, \ + mock.patch.object(transports.RegionInstancesRestInterceptor, "pre_bulk_insert") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.BulkInsertRegionInstanceRequest.pb(compute.BulkInsertRegionInstanceRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.BulkInsertRegionInstanceRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.bulk_insert_unary(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_bulk_insert_unary_rest_bad_request(transport: str = 'rest', request_type=compute.BulkInsertRegionInstanceRequest): + client = RegionInstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2'} + request_init["bulk_insert_instance_resource_resource"] = {'count': 553, 'instance_properties': {'advanced_machine_features': {'enable_nested_virtualization': True, 'enable_uefi_networking': True, 'threads_per_core': 1689, 'visible_core_count': 1918}, 'can_ip_forward': True, 'confidential_instance_config': {'enable_confidential_compute': True}, 'description': 'description_value', 'disks': [{'architecture': 'architecture_value', 'auto_delete': True, 'boot': True, 'device_name': 'device_name_value', 'disk_encryption_key': {'kms_key_name': 'kms_key_name_value', 'kms_key_service_account': 'kms_key_service_account_value', 'raw_key': 'raw_key_value', 'rsa_encrypted_key': 'rsa_encrypted_key_value', 'sha256': 'sha256_value'}, 'disk_size_gb': 1261, 'force_attach': True, 'guest_os_features': [{'type_': 'type__value'}], 'index': 536, 'initialize_params': {'architecture': 'architecture_value', 'description': 'description_value', 'disk_name': 'disk_name_value', 'disk_size_gb': 1261, 'disk_type': 'disk_type_value', 'labels': {}, 'licenses': ['licenses_value1', 'licenses_value2'], 'on_update_action': 'on_update_action_value', 'provisioned_iops': 1740, 'provisioned_throughput': 2411, 'replica_zones': ['replica_zones_value1', 'replica_zones_value2'], 'resource_manager_tags': {}, 'resource_policies': ['resource_policies_value1', 'resource_policies_value2'], 'source_image': 'source_image_value', 'source_image_encryption_key': {}, 'source_snapshot': 'source_snapshot_value', 'source_snapshot_encryption_key': {}}, 'interface': 'interface_value', 'kind': 'kind_value', 'licenses': ['licenses_value1', 'licenses_value2'], 'mode': 'mode_value', 'saved_state': 'saved_state_value', 'shielded_instance_initial_state': {'dbs': [{'content': 'content_value', 'file_type': 'file_type_value'}], 'dbxs': {}, 'keks': {}, 'pk': {}}, 'source': 'source_value', 'type_': 'type__value'}], 'guest_accelerators': [{'accelerator_count': 1805, 'accelerator_type': 'accelerator_type_value'}], 'key_revocation_action_type': 'key_revocation_action_type_value', 'labels': {}, 'machine_type': 'machine_type_value', 'metadata': {'fingerprint': 'fingerprint_value', 'items': [{'key': 'key_value', 'value': 'value_value'}], 'kind': 'kind_value'}, 'min_cpu_platform': 'min_cpu_platform_value', 'network_interfaces': [{'access_configs': [{'external_ipv6': 'external_ipv6_value', 'external_ipv6_prefix_length': 2837, 'kind': 'kind_value', 'name': 'name_value', 'nat_i_p': 'nat_i_p_value', 'network_tier': 'network_tier_value', 'public_ptr_domain_name': 'public_ptr_domain_name_value', 'set_public_ptr': True, 'type_': 'type__value'}], 'alias_ip_ranges': [{'ip_cidr_range': 'ip_cidr_range_value', 'subnetwork_range_name': 'subnetwork_range_name_value'}], 'fingerprint': 'fingerprint_value', 'internal_ipv6_prefix_length': 2831, 'ipv6_access_configs': {}, 'ipv6_access_type': 'ipv6_access_type_value', 'ipv6_address': 'ipv6_address_value', 'kind': 'kind_value', 'name': 'name_value', 'network': 'network_value', 'network_attachment': 'network_attachment_value', 'network_i_p': 'network_i_p_value', 'nic_type': 'nic_type_value', 'queue_count': 1197, 'stack_type': 'stack_type_value', 'subnetwork': 'subnetwork_value'}], 'network_performance_config': {'total_egress_bandwidth_tier': 'total_egress_bandwidth_tier_value'}, 'private_ipv6_google_access': 'private_ipv6_google_access_value', 'reservation_affinity': {'consume_reservation_type': 'consume_reservation_type_value', 'key': 'key_value', 'values': ['values_value1', 'values_value2']}, 'resource_manager_tags': {}, 'resource_policies': ['resource_policies_value1', 'resource_policies_value2'], 'scheduling': {'automatic_restart': True, 'instance_termination_action': 'instance_termination_action_value', 'local_ssd_recovery_timeout': {'nanos': 543, 'seconds': 751}, 'location_hint': 'location_hint_value', 'min_node_cpus': 1379, 'node_affinities': [{'key': 'key_value', 'operator': 'operator_value', 'values': ['values_value1', 'values_value2']}], 'on_host_maintenance': 'on_host_maintenance_value', 'preemptible': True, 'provisioning_model': 'provisioning_model_value'}, 'service_accounts': [{'email': 'email_value', 'scopes': ['scopes_value1', 'scopes_value2']}], 'shielded_instance_config': {'enable_integrity_monitoring': True, 'enable_secure_boot': True, 'enable_vtpm': True}, 'tags': {'fingerprint': 'fingerprint_value', 'items': ['items_value1', 'items_value2']}}, 'location_policy': {'locations': {}, 'target_shape': 'target_shape_value'}, 'min_count': 972, 'name_pattern': 'name_pattern_value', 'per_instance_properties': {}, 'source_instance_template': 'source_instance_template_value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.bulk_insert_unary(request) + + +def test_bulk_insert_unary_rest_flattened(): + client = RegionInstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'region': 'sample2'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + region='region_value', + bulk_insert_instance_resource_resource=compute.BulkInsertInstanceResource(count=553), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.bulk_insert_unary(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/regions/{region}/instances/bulkInsert" % client.transport._host, args[1]) + + +def test_bulk_insert_unary_rest_flattened_error(transport: str = 'rest'): + client = RegionInstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.bulk_insert_unary( + compute.BulkInsertRegionInstanceRequest(), + project='project_value', + region='region_value', + bulk_insert_instance_resource_resource=compute.BulkInsertInstanceResource(count=553), + ) + + +def test_bulk_insert_unary_rest_error(): + client = RegionInstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +def test_credentials_transport_error(): + # It is an error to provide credentials and a transport instance. + transport = transports.RegionInstancesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = RegionInstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # It is an error to provide a credentials file and a transport instance. + transport = transports.RegionInstancesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = RegionInstancesClient( + client_options={"credentials_file": "credentials.json"}, + transport=transport, + ) + + # It is an error to provide an api_key and a transport instance. + transport = transports.RegionInstancesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = RegionInstancesClient( + client_options=options, + transport=transport, + ) + + # It is an error to provide an api_key and a credential. + options = mock.Mock() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = RegionInstancesClient( + client_options=options, + credentials=ga_credentials.AnonymousCredentials() + ) + + # It is an error to provide scopes and a transport instance. + transport = transports.RegionInstancesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = RegionInstancesClient( + client_options={"scopes": ["1", "2"]}, + transport=transport, + ) + + +def test_transport_instance(): + # A client may be instantiated with a custom transport instance. + transport = transports.RegionInstancesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + client = RegionInstancesClient(transport=transport) + assert client.transport is transport + + +@pytest.mark.parametrize("transport_class", [ + transports.RegionInstancesRestTransport, +]) +def test_transport_adc(transport_class): + # Test default credentials are used if not provided. + with mock.patch.object(google.auth, 'default') as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class() + adc.assert_called_once() + +@pytest.mark.parametrize("transport_name", [ + "rest", +]) +def test_transport_kind(transport_name): + transport = RegionInstancesClient.get_transport_class(transport_name)( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert transport.kind == transport_name + + +def test_region_instances_base_transport_error(): + # Passing both a credentials object and credentials_file should raise an error + with pytest.raises(core_exceptions.DuplicateCredentialArgs): + transport = transports.RegionInstancesTransport( + credentials=ga_credentials.AnonymousCredentials(), + credentials_file="credentials.json" + ) + + +def test_region_instances_base_transport(): + # Instantiate the base transport. + with mock.patch('google.cloud.compute_v1.services.region_instances.transports.RegionInstancesTransport.__init__') as Transport: + Transport.return_value = None + transport = transports.RegionInstancesTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Every method on the transport should just blindly + # raise NotImplementedError. + methods = ( + 'bulk_insert', + ) + for method in methods: + with pytest.raises(NotImplementedError): + getattr(transport, method)(request=object()) + + with pytest.raises(NotImplementedError): + transport.close() + + # Catch all for all remaining methods and properties + remainder = [ + 'kind', + ] + for r in remainder: + with pytest.raises(NotImplementedError): + getattr(transport, r)() + + +def test_region_instances_base_transport_with_credentials_file(): + # Instantiate the base transport with a credentials file + with mock.patch.object(google.auth, 'load_credentials_from_file', autospec=True) as load_creds, mock.patch('google.cloud.compute_v1.services.region_instances.transports.RegionInstancesTransport._prep_wrapped_messages') as Transport: + Transport.return_value = None + load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.RegionInstancesTransport( + credentials_file="credentials.json", + quota_project_id="octopus", + ) + load_creds.assert_called_once_with("credentials.json", + scopes=None, + default_scopes=( + 'https://www.googleapis.com/auth/compute', + 'https://www.googleapis.com/auth/cloud-platform', +), + quota_project_id="octopus", + ) + + +def test_region_instances_base_transport_with_adc(): + # Test the default credentials are used if credentials and credentials_file are None. + with mock.patch.object(google.auth, 'default', autospec=True) as adc, mock.patch('google.cloud.compute_v1.services.region_instances.transports.RegionInstancesTransport._prep_wrapped_messages') as Transport: + Transport.return_value = None + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.RegionInstancesTransport() + adc.assert_called_once() + + +def test_region_instances_auth_adc(): + # If no credentials are provided, we should use ADC credentials. + with mock.patch.object(google.auth, 'default', autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + RegionInstancesClient() + adc.assert_called_once_with( + scopes=None, + default_scopes=( + 'https://www.googleapis.com/auth/compute', + 'https://www.googleapis.com/auth/cloud-platform', +), + quota_project_id=None, + ) + + +def test_region_instances_http_transport_client_cert_source_for_mtls(): + cred = ga_credentials.AnonymousCredentials() + with mock.patch("google.auth.transport.requests.AuthorizedSession.configure_mtls_channel") as mock_configure_mtls_channel: + transports.RegionInstancesRestTransport ( + credentials=cred, + client_cert_source_for_mtls=client_cert_source_callback + ) + mock_configure_mtls_channel.assert_called_once_with(client_cert_source_callback) + + +@pytest.mark.parametrize("transport_name", [ + "rest", +]) +def test_region_instances_host_no_port(transport_name): + client = RegionInstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions(api_endpoint='compute.googleapis.com'), + transport=transport_name, + ) + assert client.transport._host == ( + 'compute.googleapis.com:443' + if transport_name in ['grpc', 'grpc_asyncio'] + else 'https://compute.googleapis.com' + ) + +@pytest.mark.parametrize("transport_name", [ + "rest", +]) +def test_region_instances_host_with_port(transport_name): + client = RegionInstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions(api_endpoint='compute.googleapis.com:8000'), + transport=transport_name, + ) + assert client.transport._host == ( + 'compute.googleapis.com:8000' + if transport_name in ['grpc', 'grpc_asyncio'] + else 'https://compute.googleapis.com:8000' + ) + +@pytest.mark.parametrize("transport_name", [ + "rest", +]) +def test_region_instances_client_transport_session_collision(transport_name): + creds1 = ga_credentials.AnonymousCredentials() + creds2 = ga_credentials.AnonymousCredentials() + client1 = RegionInstancesClient( + credentials=creds1, + transport=transport_name, + ) + client2 = RegionInstancesClient( + credentials=creds2, + transport=transport_name, + ) + session1 = client1.transport.bulk_insert._session + session2 = client2.transport.bulk_insert._session + assert session1 != session2 + +def test_common_billing_account_path(): + billing_account = "squid" + expected = "billingAccounts/{billing_account}".format(billing_account=billing_account, ) + actual = RegionInstancesClient.common_billing_account_path(billing_account) + assert expected == actual + + +def test_parse_common_billing_account_path(): + expected = { + "billing_account": "clam", + } + path = RegionInstancesClient.common_billing_account_path(**expected) + + # Check that the path construction is reversible. + actual = RegionInstancesClient.parse_common_billing_account_path(path) + assert expected == actual + +def test_common_folder_path(): + folder = "whelk" + expected = "folders/{folder}".format(folder=folder, ) + actual = RegionInstancesClient.common_folder_path(folder) + assert expected == actual + + +def test_parse_common_folder_path(): + expected = { + "folder": "octopus", + } + path = RegionInstancesClient.common_folder_path(**expected) + + # Check that the path construction is reversible. + actual = RegionInstancesClient.parse_common_folder_path(path) + assert expected == actual + +def test_common_organization_path(): + organization = "oyster" + expected = "organizations/{organization}".format(organization=organization, ) + actual = RegionInstancesClient.common_organization_path(organization) + assert expected == actual + + +def test_parse_common_organization_path(): + expected = { + "organization": "nudibranch", + } + path = RegionInstancesClient.common_organization_path(**expected) + + # Check that the path construction is reversible. + actual = RegionInstancesClient.parse_common_organization_path(path) + assert expected == actual + +def test_common_project_path(): + project = "cuttlefish" + expected = "projects/{project}".format(project=project, ) + actual = RegionInstancesClient.common_project_path(project) + assert expected == actual + + +def test_parse_common_project_path(): + expected = { + "project": "mussel", + } + path = RegionInstancesClient.common_project_path(**expected) + + # Check that the path construction is reversible. + actual = RegionInstancesClient.parse_common_project_path(path) + assert expected == actual + +def test_common_location_path(): + project = "winkle" + location = "nautilus" + expected = "projects/{project}/locations/{location}".format(project=project, location=location, ) + actual = RegionInstancesClient.common_location_path(project, location) + assert expected == actual + + +def test_parse_common_location_path(): + expected = { + "project": "scallop", + "location": "abalone", + } + path = RegionInstancesClient.common_location_path(**expected) + + # Check that the path construction is reversible. + actual = RegionInstancesClient.parse_common_location_path(path) + assert expected == actual + + +def test_client_with_default_client_info(): + client_info = gapic_v1.client_info.ClientInfo() + + with mock.patch.object(transports.RegionInstancesTransport, '_prep_wrapped_messages') as prep: + client = RegionInstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + with mock.patch.object(transports.RegionInstancesTransport, '_prep_wrapped_messages') as prep: + transport_class = RegionInstancesClient.get_transport_class() + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + +def test_transport_close(): + transports = { + "rest": "_session", + } + + for transport, close_name in transports.items(): + client = RegionInstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport + ) + with mock.patch.object(type(getattr(client.transport, close_name)), "close") as close: + with client: + close.assert_not_called() + close.assert_called_once() + +def test_client_ctx(): + transports = [ + 'rest', + ] + for transport in transports: + client = RegionInstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport + ) + # Test client calls underlying transport. + with mock.patch.object(type(client.transport), "close") as close: + close.assert_not_called() + with client: + pass + close.assert_called() + +@pytest.mark.parametrize("client_class,transport_class", [ + (RegionInstancesClient, transports.RegionInstancesRestTransport), +]) +def test_api_key_credentials(client_class, transport_class): + with mock.patch.object( + google.auth._default, "get_api_key_credentials", create=True + ) as get_api_key_credentials: + mock_cred = mock.Mock() + get_api_key_credentials.return_value = mock_cred + options = client_options.ClientOptions() + options.api_key = "api_key" + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=mock_cred, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) diff --git a/owl-bot-staging/v1/tests/unit/gapic/compute_v1/test_region_network_endpoint_groups.py b/owl-bot-staging/v1/tests/unit/gapic/compute_v1/test_region_network_endpoint_groups.py new file mode 100644 index 000000000..f4ba6ca2d --- /dev/null +++ b/owl-bot-staging/v1/tests/unit/gapic/compute_v1/test_region_network_endpoint_groups.py @@ -0,0 +1,2521 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import os +# try/except added for compatibility with python < 3.8 +try: + from unittest import mock + from unittest.mock import AsyncMock # pragma: NO COVER +except ImportError: # pragma: NO COVER + import mock + +import grpc +from grpc.experimental import aio +from collections.abc import Iterable +from google.protobuf import json_format +import json +import math +import pytest +from proto.marshal.rules.dates import DurationRule, TimestampRule +from proto.marshal.rules import wrappers +from requests import Response +from requests import Request, PreparedRequest +from requests.sessions import Session +from google.protobuf import json_format + +from google.api_core import client_options +from google.api_core import exceptions as core_exceptions +from google.api_core import extended_operation # type: ignore +from google.api_core import future +from google.api_core import gapic_v1 +from google.api_core import grpc_helpers +from google.api_core import grpc_helpers_async +from google.api_core import path_template +from google.auth import credentials as ga_credentials +from google.auth.exceptions import MutualTLSChannelError +from google.cloud.compute_v1.services.region_network_endpoint_groups import RegionNetworkEndpointGroupsClient +from google.cloud.compute_v1.services.region_network_endpoint_groups import pagers +from google.cloud.compute_v1.services.region_network_endpoint_groups import transports +from google.cloud.compute_v1.types import compute +from google.oauth2 import service_account +import google.auth + + +def client_cert_source_callback(): + return b"cert bytes", b"key bytes" + + +# If default endpoint is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint(client): + return "foo.googleapis.com" if ("localhost" in client.DEFAULT_ENDPOINT) else client.DEFAULT_ENDPOINT + + +def test__get_default_mtls_endpoint(): + api_endpoint = "example.googleapis.com" + api_mtls_endpoint = "example.mtls.googleapis.com" + sandbox_endpoint = "example.sandbox.googleapis.com" + sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com" + non_googleapi = "api.example.com" + + assert RegionNetworkEndpointGroupsClient._get_default_mtls_endpoint(None) is None + assert RegionNetworkEndpointGroupsClient._get_default_mtls_endpoint(api_endpoint) == api_mtls_endpoint + assert RegionNetworkEndpointGroupsClient._get_default_mtls_endpoint(api_mtls_endpoint) == api_mtls_endpoint + assert RegionNetworkEndpointGroupsClient._get_default_mtls_endpoint(sandbox_endpoint) == sandbox_mtls_endpoint + assert RegionNetworkEndpointGroupsClient._get_default_mtls_endpoint(sandbox_mtls_endpoint) == sandbox_mtls_endpoint + assert RegionNetworkEndpointGroupsClient._get_default_mtls_endpoint(non_googleapi) == non_googleapi + + +@pytest.mark.parametrize("client_class,transport_name", [ + (RegionNetworkEndpointGroupsClient, "rest"), +]) +def test_region_network_endpoint_groups_client_from_service_account_info(client_class, transport_name): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object(service_account.Credentials, 'from_service_account_info') as factory: + factory.return_value = creds + info = {"valid": True} + client = client_class.from_service_account_info(info, transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + 'compute.googleapis.com:443' + if transport_name in ['grpc', 'grpc_asyncio'] + else + 'https://compute.googleapis.com' + ) + + +@pytest.mark.parametrize("transport_class,transport_name", [ + (transports.RegionNetworkEndpointGroupsRestTransport, "rest"), +]) +def test_region_network_endpoint_groups_client_service_account_always_use_jwt(transport_class, transport_name): + with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=True) + use_jwt.assert_called_once_with(True) + + with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=False) + use_jwt.assert_not_called() + + +@pytest.mark.parametrize("client_class,transport_name", [ + (RegionNetworkEndpointGroupsClient, "rest"), +]) +def test_region_network_endpoint_groups_client_from_service_account_file(client_class, transport_name): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object(service_account.Credentials, 'from_service_account_file') as factory: + factory.return_value = creds + client = client_class.from_service_account_file("dummy/file/path.json", transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + client = client_class.from_service_account_json("dummy/file/path.json", transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + 'compute.googleapis.com:443' + if transport_name in ['grpc', 'grpc_asyncio'] + else + 'https://compute.googleapis.com' + ) + + +def test_region_network_endpoint_groups_client_get_transport_class(): + transport = RegionNetworkEndpointGroupsClient.get_transport_class() + available_transports = [ + transports.RegionNetworkEndpointGroupsRestTransport, + ] + assert transport in available_transports + + transport = RegionNetworkEndpointGroupsClient.get_transport_class("rest") + assert transport == transports.RegionNetworkEndpointGroupsRestTransport + + +@pytest.mark.parametrize("client_class,transport_class,transport_name", [ + (RegionNetworkEndpointGroupsClient, transports.RegionNetworkEndpointGroupsRestTransport, "rest"), +]) +@mock.patch.object(RegionNetworkEndpointGroupsClient, "DEFAULT_ENDPOINT", modify_default_endpoint(RegionNetworkEndpointGroupsClient)) +def test_region_network_endpoint_groups_client_client_options(client_class, transport_class, transport_name): + # Check that if channel is provided we won't create a new one. + with mock.patch.object(RegionNetworkEndpointGroupsClient, 'get_transport_class') as gtc: + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ) + client = client_class(transport=transport) + gtc.assert_not_called() + + # Check that if channel is provided via str we will create a new one. + with mock.patch.object(RegionNetworkEndpointGroupsClient, 'get_transport_class') as gtc: + client = client_class(transport=transport_name) + gtc.assert_called() + + # Check the case api_endpoint is provided. + options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(transport=transport_name, client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_MTLS_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError): + client = client_class(transport=transport_name) + + # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): + with pytest.raises(ValueError): + client = client_class(transport=transport_name) + + # Check the case quota_project_id is provided + options = client_options.ClientOptions(quota_project_id="octopus") + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id="octopus", + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + # Check the case api_endpoint is provided + options = client_options.ClientOptions(api_audience="https://language.googleapis.com") + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience="https://language.googleapis.com" + ) + +@pytest.mark.parametrize("client_class,transport_class,transport_name,use_client_cert_env", [ + (RegionNetworkEndpointGroupsClient, transports.RegionNetworkEndpointGroupsRestTransport, "rest", "true"), + (RegionNetworkEndpointGroupsClient, transports.RegionNetworkEndpointGroupsRestTransport, "rest", "false"), +]) +@mock.patch.object(RegionNetworkEndpointGroupsClient, "DEFAULT_ENDPOINT", modify_default_endpoint(RegionNetworkEndpointGroupsClient)) +@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) +def test_region_network_endpoint_groups_client_mtls_env_auto(client_class, transport_class, transport_name, use_client_cert_env): + # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default + # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists. + + # Check the case client_cert_source is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): + options = client_options.ClientOptions(client_cert_source=client_cert_source_callback) + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + + if use_client_cert_env == "false": + expected_client_cert_source = None + expected_host = client.DEFAULT_ENDPOINT + else: + expected_client_cert_source = client_cert_source_callback + expected_host = client.DEFAULT_MTLS_ENDPOINT + + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case ADC client cert is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): + with mock.patch.object(transport_class, '__init__') as patched: + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): + with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=client_cert_source_callback): + if use_client_cert_env == "false": + expected_host = client.DEFAULT_ENDPOINT + expected_client_cert_source = None + else: + expected_host = client.DEFAULT_MTLS_ENDPOINT + expected_client_cert_source = client_cert_source_callback + + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case client_cert_source and ADC client cert are not provided. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): + with mock.patch.object(transport_class, '__init__') as patched: + with mock.patch("google.auth.transport.mtls.has_default_client_cert_source", return_value=False): + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize("client_class", [ + RegionNetworkEndpointGroupsClient +]) +@mock.patch.object(RegionNetworkEndpointGroupsClient, "DEFAULT_ENDPOINT", modify_default_endpoint(RegionNetworkEndpointGroupsClient)) +def test_region_network_endpoint_groups_client_get_mtls_endpoint_and_cert_source(client_class): + mock_client_cert_source = mock.Mock() + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "true". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + mock_api_endpoint = "foo" + options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(options) + assert api_endpoint == mock_api_endpoint + assert cert_source == mock_client_cert_source + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "false". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + mock_client_cert_source = mock.Mock() + mock_api_endpoint = "foo" + options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(options) + assert api_endpoint == mock_api_endpoint + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert doesn't exist. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=False): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert exists. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): + with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=mock_client_cert_source): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source == mock_client_cert_source + + +@pytest.mark.parametrize("client_class,transport_class,transport_name", [ + (RegionNetworkEndpointGroupsClient, transports.RegionNetworkEndpointGroupsRestTransport, "rest"), +]) +def test_region_network_endpoint_groups_client_client_options_scopes(client_class, transport_class, transport_name): + # Check the case scopes are provided. + options = client_options.ClientOptions( + scopes=["1", "2"], + ) + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=["1", "2"], + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + +@pytest.mark.parametrize("client_class,transport_class,transport_name,grpc_helpers", [ + (RegionNetworkEndpointGroupsClient, transports.RegionNetworkEndpointGroupsRestTransport, "rest", None), +]) +def test_region_network_endpoint_groups_client_client_options_credentials_file(client_class, transport_class, transport_name, grpc_helpers): + # Check the case credentials file is provided. + options = client_options.ClientOptions( + credentials_file="credentials.json" + ) + + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize("request_type", [ + compute.DeleteRegionNetworkEndpointGroupRequest, + dict, +]) +def test_delete_rest(request_type): + client = RegionNetworkEndpointGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2', 'network_endpoint_group': 'sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.delete(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, extended_operation.ExtendedOperation) + assert response.client_operation_id == 'client_operation_id_value' + assert response.creation_timestamp == 'creation_timestamp_value' + assert response.description == 'description_value' + assert response.end_time == 'end_time_value' + assert response.http_error_message == 'http_error_message_value' + assert response.http_error_status_code == 2374 + assert response.id == 205 + assert response.insert_time == 'insert_time_value' + assert response.kind == 'kind_value' + assert response.name == 'name_value' + assert response.operation_group_id == 'operation_group_id_value' + assert response.operation_type == 'operation_type_value' + assert response.progress == 885 + assert response.region == 'region_value' + assert response.self_link == 'self_link_value' + assert response.start_time == 'start_time_value' + assert response.status == compute.Operation.Status.DONE + assert response.status_message == 'status_message_value' + assert response.target_id == 947 + assert response.target_link == 'target_link_value' + assert response.user == 'user_value' + assert response.zone == 'zone_value' + + +def test_delete_rest_required_fields(request_type=compute.DeleteRegionNetworkEndpointGroupRequest): + transport_class = transports.RegionNetworkEndpointGroupsRestTransport + + request_init = {} + request_init["network_endpoint_group"] = "" + request_init["project"] = "" + request_init["region"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["networkEndpointGroup"] = 'network_endpoint_group_value' + jsonified_request["project"] = 'project_value' + jsonified_request["region"] = 'region_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "networkEndpointGroup" in jsonified_request + assert jsonified_request["networkEndpointGroup"] == 'network_endpoint_group_value' + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "region" in jsonified_request + assert jsonified_request["region"] == 'region_value' + + client = RegionNetworkEndpointGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "delete", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.delete(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_delete_rest_unset_required_fields(): + transport = transports.RegionNetworkEndpointGroupsRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.delete._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("networkEndpointGroup", "project", "region", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_rest_interceptors(null_interceptor): + transport = transports.RegionNetworkEndpointGroupsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.RegionNetworkEndpointGroupsRestInterceptor(), + ) + client = RegionNetworkEndpointGroupsClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.RegionNetworkEndpointGroupsRestInterceptor, "post_delete") as post, \ + mock.patch.object(transports.RegionNetworkEndpointGroupsRestInterceptor, "pre_delete") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.DeleteRegionNetworkEndpointGroupRequest.pb(compute.DeleteRegionNetworkEndpointGroupRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.DeleteRegionNetworkEndpointGroupRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.delete(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_delete_rest_bad_request(transport: str = 'rest', request_type=compute.DeleteRegionNetworkEndpointGroupRequest): + client = RegionNetworkEndpointGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2', 'network_endpoint_group': 'sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.delete(request) + + +def test_delete_rest_flattened(): + client = RegionNetworkEndpointGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'region': 'sample2', 'network_endpoint_group': 'sample3'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + region='region_value', + network_endpoint_group='network_endpoint_group_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.delete(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/regions/{region}/networkEndpointGroups/{network_endpoint_group}" % client.transport._host, args[1]) + + +def test_delete_rest_flattened_error(transport: str = 'rest'): + client = RegionNetworkEndpointGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete( + compute.DeleteRegionNetworkEndpointGroupRequest(), + project='project_value', + region='region_value', + network_endpoint_group='network_endpoint_group_value', + ) + + +def test_delete_rest_error(): + client = RegionNetworkEndpointGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.DeleteRegionNetworkEndpointGroupRequest, + dict, +]) +def test_delete_unary_rest(request_type): + client = RegionNetworkEndpointGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2', 'network_endpoint_group': 'sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.delete_unary(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.Operation) + + +def test_delete_unary_rest_required_fields(request_type=compute.DeleteRegionNetworkEndpointGroupRequest): + transport_class = transports.RegionNetworkEndpointGroupsRestTransport + + request_init = {} + request_init["network_endpoint_group"] = "" + request_init["project"] = "" + request_init["region"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["networkEndpointGroup"] = 'network_endpoint_group_value' + jsonified_request["project"] = 'project_value' + jsonified_request["region"] = 'region_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "networkEndpointGroup" in jsonified_request + assert jsonified_request["networkEndpointGroup"] == 'network_endpoint_group_value' + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "region" in jsonified_request + assert jsonified_request["region"] == 'region_value' + + client = RegionNetworkEndpointGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "delete", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.delete_unary(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_delete_unary_rest_unset_required_fields(): + transport = transports.RegionNetworkEndpointGroupsRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.delete._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("networkEndpointGroup", "project", "region", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_unary_rest_interceptors(null_interceptor): + transport = transports.RegionNetworkEndpointGroupsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.RegionNetworkEndpointGroupsRestInterceptor(), + ) + client = RegionNetworkEndpointGroupsClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.RegionNetworkEndpointGroupsRestInterceptor, "post_delete") as post, \ + mock.patch.object(transports.RegionNetworkEndpointGroupsRestInterceptor, "pre_delete") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.DeleteRegionNetworkEndpointGroupRequest.pb(compute.DeleteRegionNetworkEndpointGroupRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.DeleteRegionNetworkEndpointGroupRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.delete_unary(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_delete_unary_rest_bad_request(transport: str = 'rest', request_type=compute.DeleteRegionNetworkEndpointGroupRequest): + client = RegionNetworkEndpointGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2', 'network_endpoint_group': 'sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.delete_unary(request) + + +def test_delete_unary_rest_flattened(): + client = RegionNetworkEndpointGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'region': 'sample2', 'network_endpoint_group': 'sample3'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + region='region_value', + network_endpoint_group='network_endpoint_group_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.delete_unary(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/regions/{region}/networkEndpointGroups/{network_endpoint_group}" % client.transport._host, args[1]) + + +def test_delete_unary_rest_flattened_error(transport: str = 'rest'): + client = RegionNetworkEndpointGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_unary( + compute.DeleteRegionNetworkEndpointGroupRequest(), + project='project_value', + region='region_value', + network_endpoint_group='network_endpoint_group_value', + ) + + +def test_delete_unary_rest_error(): + client = RegionNetworkEndpointGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.GetRegionNetworkEndpointGroupRequest, + dict, +]) +def test_get_rest(request_type): + client = RegionNetworkEndpointGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2', 'network_endpoint_group': 'sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.NetworkEndpointGroup( + creation_timestamp='creation_timestamp_value', + default_port=1289, + description='description_value', + id=205, + kind='kind_value', + name='name_value', + network='network_value', + network_endpoint_type='network_endpoint_type_value', + psc_target_service='psc_target_service_value', + region='region_value', + self_link='self_link_value', + size=443, + subnetwork='subnetwork_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.NetworkEndpointGroup.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.get(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.NetworkEndpointGroup) + assert response.creation_timestamp == 'creation_timestamp_value' + assert response.default_port == 1289 + assert response.description == 'description_value' + assert response.id == 205 + assert response.kind == 'kind_value' + assert response.name == 'name_value' + assert response.network == 'network_value' + assert response.network_endpoint_type == 'network_endpoint_type_value' + assert response.psc_target_service == 'psc_target_service_value' + assert response.region == 'region_value' + assert response.self_link == 'self_link_value' + assert response.size == 443 + assert response.subnetwork == 'subnetwork_value' + assert response.zone == 'zone_value' + + +def test_get_rest_required_fields(request_type=compute.GetRegionNetworkEndpointGroupRequest): + transport_class = transports.RegionNetworkEndpointGroupsRestTransport + + request_init = {} + request_init["network_endpoint_group"] = "" + request_init["project"] = "" + request_init["region"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["networkEndpointGroup"] = 'network_endpoint_group_value' + jsonified_request["project"] = 'project_value' + jsonified_request["region"] = 'region_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "networkEndpointGroup" in jsonified_request + assert jsonified_request["networkEndpointGroup"] == 'network_endpoint_group_value' + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "region" in jsonified_request + assert jsonified_request["region"] == 'region_value' + + client = RegionNetworkEndpointGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.NetworkEndpointGroup() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "get", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.NetworkEndpointGroup.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.get(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_get_rest_unset_required_fields(): + transport = transports.RegionNetworkEndpointGroupsRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.get._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("networkEndpointGroup", "project", "region", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_rest_interceptors(null_interceptor): + transport = transports.RegionNetworkEndpointGroupsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.RegionNetworkEndpointGroupsRestInterceptor(), + ) + client = RegionNetworkEndpointGroupsClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.RegionNetworkEndpointGroupsRestInterceptor, "post_get") as post, \ + mock.patch.object(transports.RegionNetworkEndpointGroupsRestInterceptor, "pre_get") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.GetRegionNetworkEndpointGroupRequest.pb(compute.GetRegionNetworkEndpointGroupRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.NetworkEndpointGroup.to_json(compute.NetworkEndpointGroup()) + + request = compute.GetRegionNetworkEndpointGroupRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.NetworkEndpointGroup() + + client.get(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_get_rest_bad_request(transport: str = 'rest', request_type=compute.GetRegionNetworkEndpointGroupRequest): + client = RegionNetworkEndpointGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2', 'network_endpoint_group': 'sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get(request) + + +def test_get_rest_flattened(): + client = RegionNetworkEndpointGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.NetworkEndpointGroup() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'region': 'sample2', 'network_endpoint_group': 'sample3'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + region='region_value', + network_endpoint_group='network_endpoint_group_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.NetworkEndpointGroup.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.get(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/regions/{region}/networkEndpointGroups/{network_endpoint_group}" % client.transport._host, args[1]) + + +def test_get_rest_flattened_error(transport: str = 'rest'): + client = RegionNetworkEndpointGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get( + compute.GetRegionNetworkEndpointGroupRequest(), + project='project_value', + region='region_value', + network_endpoint_group='network_endpoint_group_value', + ) + + +def test_get_rest_error(): + client = RegionNetworkEndpointGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.InsertRegionNetworkEndpointGroupRequest, + dict, +]) +def test_insert_rest(request_type): + client = RegionNetworkEndpointGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2'} + request_init["network_endpoint_group_resource"] = {'annotations': {}, 'app_engine': {'service': 'service_value', 'url_mask': 'url_mask_value', 'version': 'version_value'}, 'cloud_function': {'function': 'function_value', 'url_mask': 'url_mask_value'}, 'cloud_run': {'service': 'service_value', 'tag': 'tag_value', 'url_mask': 'url_mask_value'}, 'creation_timestamp': 'creation_timestamp_value', 'default_port': 1289, 'description': 'description_value', 'id': 205, 'kind': 'kind_value', 'name': 'name_value', 'network': 'network_value', 'network_endpoint_type': 'network_endpoint_type_value', 'psc_data': {'consumer_psc_address': 'consumer_psc_address_value', 'psc_connection_id': 1793, 'psc_connection_status': 'psc_connection_status_value'}, 'psc_target_service': 'psc_target_service_value', 'region': 'region_value', 'self_link': 'self_link_value', 'size': 443, 'subnetwork': 'subnetwork_value', 'zone': 'zone_value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.insert(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, extended_operation.ExtendedOperation) + assert response.client_operation_id == 'client_operation_id_value' + assert response.creation_timestamp == 'creation_timestamp_value' + assert response.description == 'description_value' + assert response.end_time == 'end_time_value' + assert response.http_error_message == 'http_error_message_value' + assert response.http_error_status_code == 2374 + assert response.id == 205 + assert response.insert_time == 'insert_time_value' + assert response.kind == 'kind_value' + assert response.name == 'name_value' + assert response.operation_group_id == 'operation_group_id_value' + assert response.operation_type == 'operation_type_value' + assert response.progress == 885 + assert response.region == 'region_value' + assert response.self_link == 'self_link_value' + assert response.start_time == 'start_time_value' + assert response.status == compute.Operation.Status.DONE + assert response.status_message == 'status_message_value' + assert response.target_id == 947 + assert response.target_link == 'target_link_value' + assert response.user == 'user_value' + assert response.zone == 'zone_value' + + +def test_insert_rest_required_fields(request_type=compute.InsertRegionNetworkEndpointGroupRequest): + transport_class = transports.RegionNetworkEndpointGroupsRestTransport + + request_init = {} + request_init["project"] = "" + request_init["region"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).insert._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + jsonified_request["region"] = 'region_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).insert._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "region" in jsonified_request + assert jsonified_request["region"] == 'region_value' + + client = RegionNetworkEndpointGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.insert(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_insert_rest_unset_required_fields(): + transport = transports.RegionNetworkEndpointGroupsRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.insert._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("networkEndpointGroupResource", "project", "region", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_insert_rest_interceptors(null_interceptor): + transport = transports.RegionNetworkEndpointGroupsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.RegionNetworkEndpointGroupsRestInterceptor(), + ) + client = RegionNetworkEndpointGroupsClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.RegionNetworkEndpointGroupsRestInterceptor, "post_insert") as post, \ + mock.patch.object(transports.RegionNetworkEndpointGroupsRestInterceptor, "pre_insert") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.InsertRegionNetworkEndpointGroupRequest.pb(compute.InsertRegionNetworkEndpointGroupRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.InsertRegionNetworkEndpointGroupRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.insert(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_insert_rest_bad_request(transport: str = 'rest', request_type=compute.InsertRegionNetworkEndpointGroupRequest): + client = RegionNetworkEndpointGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2'} + request_init["network_endpoint_group_resource"] = {'annotations': {}, 'app_engine': {'service': 'service_value', 'url_mask': 'url_mask_value', 'version': 'version_value'}, 'cloud_function': {'function': 'function_value', 'url_mask': 'url_mask_value'}, 'cloud_run': {'service': 'service_value', 'tag': 'tag_value', 'url_mask': 'url_mask_value'}, 'creation_timestamp': 'creation_timestamp_value', 'default_port': 1289, 'description': 'description_value', 'id': 205, 'kind': 'kind_value', 'name': 'name_value', 'network': 'network_value', 'network_endpoint_type': 'network_endpoint_type_value', 'psc_data': {'consumer_psc_address': 'consumer_psc_address_value', 'psc_connection_id': 1793, 'psc_connection_status': 'psc_connection_status_value'}, 'psc_target_service': 'psc_target_service_value', 'region': 'region_value', 'self_link': 'self_link_value', 'size': 443, 'subnetwork': 'subnetwork_value', 'zone': 'zone_value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.insert(request) + + +def test_insert_rest_flattened(): + client = RegionNetworkEndpointGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'region': 'sample2'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + region='region_value', + network_endpoint_group_resource=compute.NetworkEndpointGroup(annotations={'key_value': 'value_value'}), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.insert(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/regions/{region}/networkEndpointGroups" % client.transport._host, args[1]) + + +def test_insert_rest_flattened_error(transport: str = 'rest'): + client = RegionNetworkEndpointGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.insert( + compute.InsertRegionNetworkEndpointGroupRequest(), + project='project_value', + region='region_value', + network_endpoint_group_resource=compute.NetworkEndpointGroup(annotations={'key_value': 'value_value'}), + ) + + +def test_insert_rest_error(): + client = RegionNetworkEndpointGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.InsertRegionNetworkEndpointGroupRequest, + dict, +]) +def test_insert_unary_rest(request_type): + client = RegionNetworkEndpointGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2'} + request_init["network_endpoint_group_resource"] = {'annotations': {}, 'app_engine': {'service': 'service_value', 'url_mask': 'url_mask_value', 'version': 'version_value'}, 'cloud_function': {'function': 'function_value', 'url_mask': 'url_mask_value'}, 'cloud_run': {'service': 'service_value', 'tag': 'tag_value', 'url_mask': 'url_mask_value'}, 'creation_timestamp': 'creation_timestamp_value', 'default_port': 1289, 'description': 'description_value', 'id': 205, 'kind': 'kind_value', 'name': 'name_value', 'network': 'network_value', 'network_endpoint_type': 'network_endpoint_type_value', 'psc_data': {'consumer_psc_address': 'consumer_psc_address_value', 'psc_connection_id': 1793, 'psc_connection_status': 'psc_connection_status_value'}, 'psc_target_service': 'psc_target_service_value', 'region': 'region_value', 'self_link': 'self_link_value', 'size': 443, 'subnetwork': 'subnetwork_value', 'zone': 'zone_value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.insert_unary(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.Operation) + + +def test_insert_unary_rest_required_fields(request_type=compute.InsertRegionNetworkEndpointGroupRequest): + transport_class = transports.RegionNetworkEndpointGroupsRestTransport + + request_init = {} + request_init["project"] = "" + request_init["region"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).insert._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + jsonified_request["region"] = 'region_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).insert._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "region" in jsonified_request + assert jsonified_request["region"] == 'region_value' + + client = RegionNetworkEndpointGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.insert_unary(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_insert_unary_rest_unset_required_fields(): + transport = transports.RegionNetworkEndpointGroupsRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.insert._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("networkEndpointGroupResource", "project", "region", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_insert_unary_rest_interceptors(null_interceptor): + transport = transports.RegionNetworkEndpointGroupsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.RegionNetworkEndpointGroupsRestInterceptor(), + ) + client = RegionNetworkEndpointGroupsClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.RegionNetworkEndpointGroupsRestInterceptor, "post_insert") as post, \ + mock.patch.object(transports.RegionNetworkEndpointGroupsRestInterceptor, "pre_insert") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.InsertRegionNetworkEndpointGroupRequest.pb(compute.InsertRegionNetworkEndpointGroupRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.InsertRegionNetworkEndpointGroupRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.insert_unary(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_insert_unary_rest_bad_request(transport: str = 'rest', request_type=compute.InsertRegionNetworkEndpointGroupRequest): + client = RegionNetworkEndpointGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2'} + request_init["network_endpoint_group_resource"] = {'annotations': {}, 'app_engine': {'service': 'service_value', 'url_mask': 'url_mask_value', 'version': 'version_value'}, 'cloud_function': {'function': 'function_value', 'url_mask': 'url_mask_value'}, 'cloud_run': {'service': 'service_value', 'tag': 'tag_value', 'url_mask': 'url_mask_value'}, 'creation_timestamp': 'creation_timestamp_value', 'default_port': 1289, 'description': 'description_value', 'id': 205, 'kind': 'kind_value', 'name': 'name_value', 'network': 'network_value', 'network_endpoint_type': 'network_endpoint_type_value', 'psc_data': {'consumer_psc_address': 'consumer_psc_address_value', 'psc_connection_id': 1793, 'psc_connection_status': 'psc_connection_status_value'}, 'psc_target_service': 'psc_target_service_value', 'region': 'region_value', 'self_link': 'self_link_value', 'size': 443, 'subnetwork': 'subnetwork_value', 'zone': 'zone_value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.insert_unary(request) + + +def test_insert_unary_rest_flattened(): + client = RegionNetworkEndpointGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'region': 'sample2'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + region='region_value', + network_endpoint_group_resource=compute.NetworkEndpointGroup(annotations={'key_value': 'value_value'}), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.insert_unary(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/regions/{region}/networkEndpointGroups" % client.transport._host, args[1]) + + +def test_insert_unary_rest_flattened_error(transport: str = 'rest'): + client = RegionNetworkEndpointGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.insert_unary( + compute.InsertRegionNetworkEndpointGroupRequest(), + project='project_value', + region='region_value', + network_endpoint_group_resource=compute.NetworkEndpointGroup(annotations={'key_value': 'value_value'}), + ) + + +def test_insert_unary_rest_error(): + client = RegionNetworkEndpointGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.ListRegionNetworkEndpointGroupsRequest, + dict, +]) +def test_list_rest(request_type): + client = RegionNetworkEndpointGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.NetworkEndpointGroupList( + id='id_value', + kind='kind_value', + next_page_token='next_page_token_value', + self_link='self_link_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.NetworkEndpointGroupList.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.list(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListPager) + assert response.id == 'id_value' + assert response.kind == 'kind_value' + assert response.next_page_token == 'next_page_token_value' + assert response.self_link == 'self_link_value' + + +def test_list_rest_required_fields(request_type=compute.ListRegionNetworkEndpointGroupsRequest): + transport_class = transports.RegionNetworkEndpointGroupsRestTransport + + request_init = {} + request_init["project"] = "" + request_init["region"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + jsonified_request["region"] = 'region_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("filter", "max_results", "order_by", "page_token", "return_partial_success", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "region" in jsonified_request + assert jsonified_request["region"] == 'region_value' + + client = RegionNetworkEndpointGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.NetworkEndpointGroupList() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "get", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.NetworkEndpointGroupList.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.list(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_list_rest_unset_required_fields(): + transport = transports.RegionNetworkEndpointGroupsRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.list._get_unset_required_fields({}) + assert set(unset_fields) == (set(("filter", "maxResults", "orderBy", "pageToken", "returnPartialSuccess", )) & set(("project", "region", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_rest_interceptors(null_interceptor): + transport = transports.RegionNetworkEndpointGroupsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.RegionNetworkEndpointGroupsRestInterceptor(), + ) + client = RegionNetworkEndpointGroupsClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.RegionNetworkEndpointGroupsRestInterceptor, "post_list") as post, \ + mock.patch.object(transports.RegionNetworkEndpointGroupsRestInterceptor, "pre_list") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.ListRegionNetworkEndpointGroupsRequest.pb(compute.ListRegionNetworkEndpointGroupsRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.NetworkEndpointGroupList.to_json(compute.NetworkEndpointGroupList()) + + request = compute.ListRegionNetworkEndpointGroupsRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.NetworkEndpointGroupList() + + client.list(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_list_rest_bad_request(transport: str = 'rest', request_type=compute.ListRegionNetworkEndpointGroupsRequest): + client = RegionNetworkEndpointGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list(request) + + +def test_list_rest_flattened(): + client = RegionNetworkEndpointGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.NetworkEndpointGroupList() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'region': 'sample2'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + region='region_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.NetworkEndpointGroupList.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.list(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/regions/{region}/networkEndpointGroups" % client.transport._host, args[1]) + + +def test_list_rest_flattened_error(transport: str = 'rest'): + client = RegionNetworkEndpointGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list( + compute.ListRegionNetworkEndpointGroupsRequest(), + project='project_value', + region='region_value', + ) + + +def test_list_rest_pager(transport: str = 'rest'): + client = RegionNetworkEndpointGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + #with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + compute.NetworkEndpointGroupList( + items=[ + compute.NetworkEndpointGroup(), + compute.NetworkEndpointGroup(), + compute.NetworkEndpointGroup(), + ], + next_page_token='abc', + ), + compute.NetworkEndpointGroupList( + items=[], + next_page_token='def', + ), + compute.NetworkEndpointGroupList( + items=[ + compute.NetworkEndpointGroup(), + ], + next_page_token='ghi', + ), + compute.NetworkEndpointGroupList( + items=[ + compute.NetworkEndpointGroup(), + compute.NetworkEndpointGroup(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple(compute.NetworkEndpointGroupList.to_json(x) for x in response) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode('UTF-8') + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {'project': 'sample1', 'region': 'sample2'} + + pager = client.list(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, compute.NetworkEndpointGroup) + for i in results) + + pages = list(client.list(request=sample_request).pages) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + + +def test_credentials_transport_error(): + # It is an error to provide credentials and a transport instance. + transport = transports.RegionNetworkEndpointGroupsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = RegionNetworkEndpointGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # It is an error to provide a credentials file and a transport instance. + transport = transports.RegionNetworkEndpointGroupsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = RegionNetworkEndpointGroupsClient( + client_options={"credentials_file": "credentials.json"}, + transport=transport, + ) + + # It is an error to provide an api_key and a transport instance. + transport = transports.RegionNetworkEndpointGroupsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = RegionNetworkEndpointGroupsClient( + client_options=options, + transport=transport, + ) + + # It is an error to provide an api_key and a credential. + options = mock.Mock() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = RegionNetworkEndpointGroupsClient( + client_options=options, + credentials=ga_credentials.AnonymousCredentials() + ) + + # It is an error to provide scopes and a transport instance. + transport = transports.RegionNetworkEndpointGroupsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = RegionNetworkEndpointGroupsClient( + client_options={"scopes": ["1", "2"]}, + transport=transport, + ) + + +def test_transport_instance(): + # A client may be instantiated with a custom transport instance. + transport = transports.RegionNetworkEndpointGroupsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + client = RegionNetworkEndpointGroupsClient(transport=transport) + assert client.transport is transport + + +@pytest.mark.parametrize("transport_class", [ + transports.RegionNetworkEndpointGroupsRestTransport, +]) +def test_transport_adc(transport_class): + # Test default credentials are used if not provided. + with mock.patch.object(google.auth, 'default') as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class() + adc.assert_called_once() + +@pytest.mark.parametrize("transport_name", [ + "rest", +]) +def test_transport_kind(transport_name): + transport = RegionNetworkEndpointGroupsClient.get_transport_class(transport_name)( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert transport.kind == transport_name + + +def test_region_network_endpoint_groups_base_transport_error(): + # Passing both a credentials object and credentials_file should raise an error + with pytest.raises(core_exceptions.DuplicateCredentialArgs): + transport = transports.RegionNetworkEndpointGroupsTransport( + credentials=ga_credentials.AnonymousCredentials(), + credentials_file="credentials.json" + ) + + +def test_region_network_endpoint_groups_base_transport(): + # Instantiate the base transport. + with mock.patch('google.cloud.compute_v1.services.region_network_endpoint_groups.transports.RegionNetworkEndpointGroupsTransport.__init__') as Transport: + Transport.return_value = None + transport = transports.RegionNetworkEndpointGroupsTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Every method on the transport should just blindly + # raise NotImplementedError. + methods = ( + 'delete', + 'get', + 'insert', + 'list', + ) + for method in methods: + with pytest.raises(NotImplementedError): + getattr(transport, method)(request=object()) + + with pytest.raises(NotImplementedError): + transport.close() + + # Catch all for all remaining methods and properties + remainder = [ + 'kind', + ] + for r in remainder: + with pytest.raises(NotImplementedError): + getattr(transport, r)() + + +def test_region_network_endpoint_groups_base_transport_with_credentials_file(): + # Instantiate the base transport with a credentials file + with mock.patch.object(google.auth, 'load_credentials_from_file', autospec=True) as load_creds, mock.patch('google.cloud.compute_v1.services.region_network_endpoint_groups.transports.RegionNetworkEndpointGroupsTransport._prep_wrapped_messages') as Transport: + Transport.return_value = None + load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.RegionNetworkEndpointGroupsTransport( + credentials_file="credentials.json", + quota_project_id="octopus", + ) + load_creds.assert_called_once_with("credentials.json", + scopes=None, + default_scopes=( + 'https://www.googleapis.com/auth/compute', + 'https://www.googleapis.com/auth/cloud-platform', +), + quota_project_id="octopus", + ) + + +def test_region_network_endpoint_groups_base_transport_with_adc(): + # Test the default credentials are used if credentials and credentials_file are None. + with mock.patch.object(google.auth, 'default', autospec=True) as adc, mock.patch('google.cloud.compute_v1.services.region_network_endpoint_groups.transports.RegionNetworkEndpointGroupsTransport._prep_wrapped_messages') as Transport: + Transport.return_value = None + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.RegionNetworkEndpointGroupsTransport() + adc.assert_called_once() + + +def test_region_network_endpoint_groups_auth_adc(): + # If no credentials are provided, we should use ADC credentials. + with mock.patch.object(google.auth, 'default', autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + RegionNetworkEndpointGroupsClient() + adc.assert_called_once_with( + scopes=None, + default_scopes=( + 'https://www.googleapis.com/auth/compute', + 'https://www.googleapis.com/auth/cloud-platform', +), + quota_project_id=None, + ) + + +def test_region_network_endpoint_groups_http_transport_client_cert_source_for_mtls(): + cred = ga_credentials.AnonymousCredentials() + with mock.patch("google.auth.transport.requests.AuthorizedSession.configure_mtls_channel") as mock_configure_mtls_channel: + transports.RegionNetworkEndpointGroupsRestTransport ( + credentials=cred, + client_cert_source_for_mtls=client_cert_source_callback + ) + mock_configure_mtls_channel.assert_called_once_with(client_cert_source_callback) + + +@pytest.mark.parametrize("transport_name", [ + "rest", +]) +def test_region_network_endpoint_groups_host_no_port(transport_name): + client = RegionNetworkEndpointGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions(api_endpoint='compute.googleapis.com'), + transport=transport_name, + ) + assert client.transport._host == ( + 'compute.googleapis.com:443' + if transport_name in ['grpc', 'grpc_asyncio'] + else 'https://compute.googleapis.com' + ) + +@pytest.mark.parametrize("transport_name", [ + "rest", +]) +def test_region_network_endpoint_groups_host_with_port(transport_name): + client = RegionNetworkEndpointGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions(api_endpoint='compute.googleapis.com:8000'), + transport=transport_name, + ) + assert client.transport._host == ( + 'compute.googleapis.com:8000' + if transport_name in ['grpc', 'grpc_asyncio'] + else 'https://compute.googleapis.com:8000' + ) + +@pytest.mark.parametrize("transport_name", [ + "rest", +]) +def test_region_network_endpoint_groups_client_transport_session_collision(transport_name): + creds1 = ga_credentials.AnonymousCredentials() + creds2 = ga_credentials.AnonymousCredentials() + client1 = RegionNetworkEndpointGroupsClient( + credentials=creds1, + transport=transport_name, + ) + client2 = RegionNetworkEndpointGroupsClient( + credentials=creds2, + transport=transport_name, + ) + session1 = client1.transport.delete._session + session2 = client2.transport.delete._session + assert session1 != session2 + session1 = client1.transport.get._session + session2 = client2.transport.get._session + assert session1 != session2 + session1 = client1.transport.insert._session + session2 = client2.transport.insert._session + assert session1 != session2 + session1 = client1.transport.list._session + session2 = client2.transport.list._session + assert session1 != session2 + +def test_common_billing_account_path(): + billing_account = "squid" + expected = "billingAccounts/{billing_account}".format(billing_account=billing_account, ) + actual = RegionNetworkEndpointGroupsClient.common_billing_account_path(billing_account) + assert expected == actual + + +def test_parse_common_billing_account_path(): + expected = { + "billing_account": "clam", + } + path = RegionNetworkEndpointGroupsClient.common_billing_account_path(**expected) + + # Check that the path construction is reversible. + actual = RegionNetworkEndpointGroupsClient.parse_common_billing_account_path(path) + assert expected == actual + +def test_common_folder_path(): + folder = "whelk" + expected = "folders/{folder}".format(folder=folder, ) + actual = RegionNetworkEndpointGroupsClient.common_folder_path(folder) + assert expected == actual + + +def test_parse_common_folder_path(): + expected = { + "folder": "octopus", + } + path = RegionNetworkEndpointGroupsClient.common_folder_path(**expected) + + # Check that the path construction is reversible. + actual = RegionNetworkEndpointGroupsClient.parse_common_folder_path(path) + assert expected == actual + +def test_common_organization_path(): + organization = "oyster" + expected = "organizations/{organization}".format(organization=organization, ) + actual = RegionNetworkEndpointGroupsClient.common_organization_path(organization) + assert expected == actual + + +def test_parse_common_organization_path(): + expected = { + "organization": "nudibranch", + } + path = RegionNetworkEndpointGroupsClient.common_organization_path(**expected) + + # Check that the path construction is reversible. + actual = RegionNetworkEndpointGroupsClient.parse_common_organization_path(path) + assert expected == actual + +def test_common_project_path(): + project = "cuttlefish" + expected = "projects/{project}".format(project=project, ) + actual = RegionNetworkEndpointGroupsClient.common_project_path(project) + assert expected == actual + + +def test_parse_common_project_path(): + expected = { + "project": "mussel", + } + path = RegionNetworkEndpointGroupsClient.common_project_path(**expected) + + # Check that the path construction is reversible. + actual = RegionNetworkEndpointGroupsClient.parse_common_project_path(path) + assert expected == actual + +def test_common_location_path(): + project = "winkle" + location = "nautilus" + expected = "projects/{project}/locations/{location}".format(project=project, location=location, ) + actual = RegionNetworkEndpointGroupsClient.common_location_path(project, location) + assert expected == actual + + +def test_parse_common_location_path(): + expected = { + "project": "scallop", + "location": "abalone", + } + path = RegionNetworkEndpointGroupsClient.common_location_path(**expected) + + # Check that the path construction is reversible. + actual = RegionNetworkEndpointGroupsClient.parse_common_location_path(path) + assert expected == actual + + +def test_client_with_default_client_info(): + client_info = gapic_v1.client_info.ClientInfo() + + with mock.patch.object(transports.RegionNetworkEndpointGroupsTransport, '_prep_wrapped_messages') as prep: + client = RegionNetworkEndpointGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + with mock.patch.object(transports.RegionNetworkEndpointGroupsTransport, '_prep_wrapped_messages') as prep: + transport_class = RegionNetworkEndpointGroupsClient.get_transport_class() + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + +def test_transport_close(): + transports = { + "rest": "_session", + } + + for transport, close_name in transports.items(): + client = RegionNetworkEndpointGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport + ) + with mock.patch.object(type(getattr(client.transport, close_name)), "close") as close: + with client: + close.assert_not_called() + close.assert_called_once() + +def test_client_ctx(): + transports = [ + 'rest', + ] + for transport in transports: + client = RegionNetworkEndpointGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport + ) + # Test client calls underlying transport. + with mock.patch.object(type(client.transport), "close") as close: + close.assert_not_called() + with client: + pass + close.assert_called() + +@pytest.mark.parametrize("client_class,transport_class", [ + (RegionNetworkEndpointGroupsClient, transports.RegionNetworkEndpointGroupsRestTransport), +]) +def test_api_key_credentials(client_class, transport_class): + with mock.patch.object( + google.auth._default, "get_api_key_credentials", create=True + ) as get_api_key_credentials: + mock_cred = mock.Mock() + get_api_key_credentials.return_value = mock_cred + options = client_options.ClientOptions() + options.api_key = "api_key" + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=mock_cred, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) diff --git a/owl-bot-staging/v1/tests/unit/gapic/compute_v1/test_region_network_firewall_policies.py b/owl-bot-staging/v1/tests/unit/gapic/compute_v1/test_region_network_firewall_policies.py new file mode 100644 index 000000000..645cfc727 --- /dev/null +++ b/owl-bot-staging/v1/tests/unit/gapic/compute_v1/test_region_network_firewall_policies.py @@ -0,0 +1,8010 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import os +# try/except added for compatibility with python < 3.8 +try: + from unittest import mock + from unittest.mock import AsyncMock # pragma: NO COVER +except ImportError: # pragma: NO COVER + import mock + +import grpc +from grpc.experimental import aio +from collections.abc import Iterable +from google.protobuf import json_format +import json +import math +import pytest +from proto.marshal.rules.dates import DurationRule, TimestampRule +from proto.marshal.rules import wrappers +from requests import Response +from requests import Request, PreparedRequest +from requests.sessions import Session +from google.protobuf import json_format + +from google.api_core import client_options +from google.api_core import exceptions as core_exceptions +from google.api_core import extended_operation # type: ignore +from google.api_core import future +from google.api_core import gapic_v1 +from google.api_core import grpc_helpers +from google.api_core import grpc_helpers_async +from google.api_core import path_template +from google.auth import credentials as ga_credentials +from google.auth.exceptions import MutualTLSChannelError +from google.cloud.compute_v1.services.region_network_firewall_policies import RegionNetworkFirewallPoliciesClient +from google.cloud.compute_v1.services.region_network_firewall_policies import pagers +from google.cloud.compute_v1.services.region_network_firewall_policies import transports +from google.cloud.compute_v1.types import compute +from google.oauth2 import service_account +import google.auth + + +def client_cert_source_callback(): + return b"cert bytes", b"key bytes" + + +# If default endpoint is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint(client): + return "foo.googleapis.com" if ("localhost" in client.DEFAULT_ENDPOINT) else client.DEFAULT_ENDPOINT + + +def test__get_default_mtls_endpoint(): + api_endpoint = "example.googleapis.com" + api_mtls_endpoint = "example.mtls.googleapis.com" + sandbox_endpoint = "example.sandbox.googleapis.com" + sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com" + non_googleapi = "api.example.com" + + assert RegionNetworkFirewallPoliciesClient._get_default_mtls_endpoint(None) is None + assert RegionNetworkFirewallPoliciesClient._get_default_mtls_endpoint(api_endpoint) == api_mtls_endpoint + assert RegionNetworkFirewallPoliciesClient._get_default_mtls_endpoint(api_mtls_endpoint) == api_mtls_endpoint + assert RegionNetworkFirewallPoliciesClient._get_default_mtls_endpoint(sandbox_endpoint) == sandbox_mtls_endpoint + assert RegionNetworkFirewallPoliciesClient._get_default_mtls_endpoint(sandbox_mtls_endpoint) == sandbox_mtls_endpoint + assert RegionNetworkFirewallPoliciesClient._get_default_mtls_endpoint(non_googleapi) == non_googleapi + + +@pytest.mark.parametrize("client_class,transport_name", [ + (RegionNetworkFirewallPoliciesClient, "rest"), +]) +def test_region_network_firewall_policies_client_from_service_account_info(client_class, transport_name): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object(service_account.Credentials, 'from_service_account_info') as factory: + factory.return_value = creds + info = {"valid": True} + client = client_class.from_service_account_info(info, transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + 'compute.googleapis.com:443' + if transport_name in ['grpc', 'grpc_asyncio'] + else + 'https://compute.googleapis.com' + ) + + +@pytest.mark.parametrize("transport_class,transport_name", [ + (transports.RegionNetworkFirewallPoliciesRestTransport, "rest"), +]) +def test_region_network_firewall_policies_client_service_account_always_use_jwt(transport_class, transport_name): + with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=True) + use_jwt.assert_called_once_with(True) + + with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=False) + use_jwt.assert_not_called() + + +@pytest.mark.parametrize("client_class,transport_name", [ + (RegionNetworkFirewallPoliciesClient, "rest"), +]) +def test_region_network_firewall_policies_client_from_service_account_file(client_class, transport_name): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object(service_account.Credentials, 'from_service_account_file') as factory: + factory.return_value = creds + client = client_class.from_service_account_file("dummy/file/path.json", transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + client = client_class.from_service_account_json("dummy/file/path.json", transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + 'compute.googleapis.com:443' + if transport_name in ['grpc', 'grpc_asyncio'] + else + 'https://compute.googleapis.com' + ) + + +def test_region_network_firewall_policies_client_get_transport_class(): + transport = RegionNetworkFirewallPoliciesClient.get_transport_class() + available_transports = [ + transports.RegionNetworkFirewallPoliciesRestTransport, + ] + assert transport in available_transports + + transport = RegionNetworkFirewallPoliciesClient.get_transport_class("rest") + assert transport == transports.RegionNetworkFirewallPoliciesRestTransport + + +@pytest.mark.parametrize("client_class,transport_class,transport_name", [ + (RegionNetworkFirewallPoliciesClient, transports.RegionNetworkFirewallPoliciesRestTransport, "rest"), +]) +@mock.patch.object(RegionNetworkFirewallPoliciesClient, "DEFAULT_ENDPOINT", modify_default_endpoint(RegionNetworkFirewallPoliciesClient)) +def test_region_network_firewall_policies_client_client_options(client_class, transport_class, transport_name): + # Check that if channel is provided we won't create a new one. + with mock.patch.object(RegionNetworkFirewallPoliciesClient, 'get_transport_class') as gtc: + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ) + client = client_class(transport=transport) + gtc.assert_not_called() + + # Check that if channel is provided via str we will create a new one. + with mock.patch.object(RegionNetworkFirewallPoliciesClient, 'get_transport_class') as gtc: + client = client_class(transport=transport_name) + gtc.assert_called() + + # Check the case api_endpoint is provided. + options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(transport=transport_name, client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_MTLS_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError): + client = client_class(transport=transport_name) + + # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): + with pytest.raises(ValueError): + client = client_class(transport=transport_name) + + # Check the case quota_project_id is provided + options = client_options.ClientOptions(quota_project_id="octopus") + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id="octopus", + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + # Check the case api_endpoint is provided + options = client_options.ClientOptions(api_audience="https://language.googleapis.com") + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience="https://language.googleapis.com" + ) + +@pytest.mark.parametrize("client_class,transport_class,transport_name,use_client_cert_env", [ + (RegionNetworkFirewallPoliciesClient, transports.RegionNetworkFirewallPoliciesRestTransport, "rest", "true"), + (RegionNetworkFirewallPoliciesClient, transports.RegionNetworkFirewallPoliciesRestTransport, "rest", "false"), +]) +@mock.patch.object(RegionNetworkFirewallPoliciesClient, "DEFAULT_ENDPOINT", modify_default_endpoint(RegionNetworkFirewallPoliciesClient)) +@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) +def test_region_network_firewall_policies_client_mtls_env_auto(client_class, transport_class, transport_name, use_client_cert_env): + # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default + # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists. + + # Check the case client_cert_source is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): + options = client_options.ClientOptions(client_cert_source=client_cert_source_callback) + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + + if use_client_cert_env == "false": + expected_client_cert_source = None + expected_host = client.DEFAULT_ENDPOINT + else: + expected_client_cert_source = client_cert_source_callback + expected_host = client.DEFAULT_MTLS_ENDPOINT + + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case ADC client cert is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): + with mock.patch.object(transport_class, '__init__') as patched: + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): + with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=client_cert_source_callback): + if use_client_cert_env == "false": + expected_host = client.DEFAULT_ENDPOINT + expected_client_cert_source = None + else: + expected_host = client.DEFAULT_MTLS_ENDPOINT + expected_client_cert_source = client_cert_source_callback + + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case client_cert_source and ADC client cert are not provided. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): + with mock.patch.object(transport_class, '__init__') as patched: + with mock.patch("google.auth.transport.mtls.has_default_client_cert_source", return_value=False): + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize("client_class", [ + RegionNetworkFirewallPoliciesClient +]) +@mock.patch.object(RegionNetworkFirewallPoliciesClient, "DEFAULT_ENDPOINT", modify_default_endpoint(RegionNetworkFirewallPoliciesClient)) +def test_region_network_firewall_policies_client_get_mtls_endpoint_and_cert_source(client_class): + mock_client_cert_source = mock.Mock() + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "true". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + mock_api_endpoint = "foo" + options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(options) + assert api_endpoint == mock_api_endpoint + assert cert_source == mock_client_cert_source + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "false". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + mock_client_cert_source = mock.Mock() + mock_api_endpoint = "foo" + options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(options) + assert api_endpoint == mock_api_endpoint + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert doesn't exist. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=False): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert exists. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): + with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=mock_client_cert_source): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source == mock_client_cert_source + + +@pytest.mark.parametrize("client_class,transport_class,transport_name", [ + (RegionNetworkFirewallPoliciesClient, transports.RegionNetworkFirewallPoliciesRestTransport, "rest"), +]) +def test_region_network_firewall_policies_client_client_options_scopes(client_class, transport_class, transport_name): + # Check the case scopes are provided. + options = client_options.ClientOptions( + scopes=["1", "2"], + ) + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=["1", "2"], + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + +@pytest.mark.parametrize("client_class,transport_class,transport_name,grpc_helpers", [ + (RegionNetworkFirewallPoliciesClient, transports.RegionNetworkFirewallPoliciesRestTransport, "rest", None), +]) +def test_region_network_firewall_policies_client_client_options_credentials_file(client_class, transport_class, transport_name, grpc_helpers): + # Check the case credentials file is provided. + options = client_options.ClientOptions( + credentials_file="credentials.json" + ) + + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize("request_type", [ + compute.AddAssociationRegionNetworkFirewallPolicyRequest, + dict, +]) +def test_add_association_rest(request_type): + client = RegionNetworkFirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2', 'firewall_policy': 'sample3'} + request_init["firewall_policy_association_resource"] = {'attachment_target': 'attachment_target_value', 'display_name': 'display_name_value', 'firewall_policy_id': 'firewall_policy_id_value', 'name': 'name_value', 'short_name': 'short_name_value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.add_association(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, extended_operation.ExtendedOperation) + assert response.client_operation_id == 'client_operation_id_value' + assert response.creation_timestamp == 'creation_timestamp_value' + assert response.description == 'description_value' + assert response.end_time == 'end_time_value' + assert response.http_error_message == 'http_error_message_value' + assert response.http_error_status_code == 2374 + assert response.id == 205 + assert response.insert_time == 'insert_time_value' + assert response.kind == 'kind_value' + assert response.name == 'name_value' + assert response.operation_group_id == 'operation_group_id_value' + assert response.operation_type == 'operation_type_value' + assert response.progress == 885 + assert response.region == 'region_value' + assert response.self_link == 'self_link_value' + assert response.start_time == 'start_time_value' + assert response.status == compute.Operation.Status.DONE + assert response.status_message == 'status_message_value' + assert response.target_id == 947 + assert response.target_link == 'target_link_value' + assert response.user == 'user_value' + assert response.zone == 'zone_value' + + +def test_add_association_rest_required_fields(request_type=compute.AddAssociationRegionNetworkFirewallPolicyRequest): + transport_class = transports.RegionNetworkFirewallPoliciesRestTransport + + request_init = {} + request_init["firewall_policy"] = "" + request_init["project"] = "" + request_init["region"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).add_association._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["firewallPolicy"] = 'firewall_policy_value' + jsonified_request["project"] = 'project_value' + jsonified_request["region"] = 'region_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).add_association._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("replace_existing_association", "request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "firewallPolicy" in jsonified_request + assert jsonified_request["firewallPolicy"] == 'firewall_policy_value' + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "region" in jsonified_request + assert jsonified_request["region"] == 'region_value' + + client = RegionNetworkFirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.add_association(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_add_association_rest_unset_required_fields(): + transport = transports.RegionNetworkFirewallPoliciesRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.add_association._get_unset_required_fields({}) + assert set(unset_fields) == (set(("replaceExistingAssociation", "requestId", )) & set(("firewallPolicy", "firewallPolicyAssociationResource", "project", "region", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_add_association_rest_interceptors(null_interceptor): + transport = transports.RegionNetworkFirewallPoliciesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.RegionNetworkFirewallPoliciesRestInterceptor(), + ) + client = RegionNetworkFirewallPoliciesClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.RegionNetworkFirewallPoliciesRestInterceptor, "post_add_association") as post, \ + mock.patch.object(transports.RegionNetworkFirewallPoliciesRestInterceptor, "pre_add_association") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.AddAssociationRegionNetworkFirewallPolicyRequest.pb(compute.AddAssociationRegionNetworkFirewallPolicyRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.AddAssociationRegionNetworkFirewallPolicyRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.add_association(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_add_association_rest_bad_request(transport: str = 'rest', request_type=compute.AddAssociationRegionNetworkFirewallPolicyRequest): + client = RegionNetworkFirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2', 'firewall_policy': 'sample3'} + request_init["firewall_policy_association_resource"] = {'attachment_target': 'attachment_target_value', 'display_name': 'display_name_value', 'firewall_policy_id': 'firewall_policy_id_value', 'name': 'name_value', 'short_name': 'short_name_value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.add_association(request) + + +def test_add_association_rest_flattened(): + client = RegionNetworkFirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'region': 'sample2', 'firewall_policy': 'sample3'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + region='region_value', + firewall_policy='firewall_policy_value', + firewall_policy_association_resource=compute.FirewallPolicyAssociation(attachment_target='attachment_target_value'), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.add_association(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/regions/{region}/firewallPolicies/{firewall_policy}/addAssociation" % client.transport._host, args[1]) + + +def test_add_association_rest_flattened_error(transport: str = 'rest'): + client = RegionNetworkFirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.add_association( + compute.AddAssociationRegionNetworkFirewallPolicyRequest(), + project='project_value', + region='region_value', + firewall_policy='firewall_policy_value', + firewall_policy_association_resource=compute.FirewallPolicyAssociation(attachment_target='attachment_target_value'), + ) + + +def test_add_association_rest_error(): + client = RegionNetworkFirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.AddAssociationRegionNetworkFirewallPolicyRequest, + dict, +]) +def test_add_association_unary_rest(request_type): + client = RegionNetworkFirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2', 'firewall_policy': 'sample3'} + request_init["firewall_policy_association_resource"] = {'attachment_target': 'attachment_target_value', 'display_name': 'display_name_value', 'firewall_policy_id': 'firewall_policy_id_value', 'name': 'name_value', 'short_name': 'short_name_value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.add_association_unary(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.Operation) + + +def test_add_association_unary_rest_required_fields(request_type=compute.AddAssociationRegionNetworkFirewallPolicyRequest): + transport_class = transports.RegionNetworkFirewallPoliciesRestTransport + + request_init = {} + request_init["firewall_policy"] = "" + request_init["project"] = "" + request_init["region"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).add_association._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["firewallPolicy"] = 'firewall_policy_value' + jsonified_request["project"] = 'project_value' + jsonified_request["region"] = 'region_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).add_association._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("replace_existing_association", "request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "firewallPolicy" in jsonified_request + assert jsonified_request["firewallPolicy"] == 'firewall_policy_value' + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "region" in jsonified_request + assert jsonified_request["region"] == 'region_value' + + client = RegionNetworkFirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.add_association_unary(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_add_association_unary_rest_unset_required_fields(): + transport = transports.RegionNetworkFirewallPoliciesRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.add_association._get_unset_required_fields({}) + assert set(unset_fields) == (set(("replaceExistingAssociation", "requestId", )) & set(("firewallPolicy", "firewallPolicyAssociationResource", "project", "region", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_add_association_unary_rest_interceptors(null_interceptor): + transport = transports.RegionNetworkFirewallPoliciesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.RegionNetworkFirewallPoliciesRestInterceptor(), + ) + client = RegionNetworkFirewallPoliciesClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.RegionNetworkFirewallPoliciesRestInterceptor, "post_add_association") as post, \ + mock.patch.object(transports.RegionNetworkFirewallPoliciesRestInterceptor, "pre_add_association") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.AddAssociationRegionNetworkFirewallPolicyRequest.pb(compute.AddAssociationRegionNetworkFirewallPolicyRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.AddAssociationRegionNetworkFirewallPolicyRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.add_association_unary(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_add_association_unary_rest_bad_request(transport: str = 'rest', request_type=compute.AddAssociationRegionNetworkFirewallPolicyRequest): + client = RegionNetworkFirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2', 'firewall_policy': 'sample3'} + request_init["firewall_policy_association_resource"] = {'attachment_target': 'attachment_target_value', 'display_name': 'display_name_value', 'firewall_policy_id': 'firewall_policy_id_value', 'name': 'name_value', 'short_name': 'short_name_value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.add_association_unary(request) + + +def test_add_association_unary_rest_flattened(): + client = RegionNetworkFirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'region': 'sample2', 'firewall_policy': 'sample3'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + region='region_value', + firewall_policy='firewall_policy_value', + firewall_policy_association_resource=compute.FirewallPolicyAssociation(attachment_target='attachment_target_value'), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.add_association_unary(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/regions/{region}/firewallPolicies/{firewall_policy}/addAssociation" % client.transport._host, args[1]) + + +def test_add_association_unary_rest_flattened_error(transport: str = 'rest'): + client = RegionNetworkFirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.add_association_unary( + compute.AddAssociationRegionNetworkFirewallPolicyRequest(), + project='project_value', + region='region_value', + firewall_policy='firewall_policy_value', + firewall_policy_association_resource=compute.FirewallPolicyAssociation(attachment_target='attachment_target_value'), + ) + + +def test_add_association_unary_rest_error(): + client = RegionNetworkFirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.AddRuleRegionNetworkFirewallPolicyRequest, + dict, +]) +def test_add_rule_rest(request_type): + client = RegionNetworkFirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2', 'firewall_policy': 'sample3'} + request_init["firewall_policy_rule_resource"] = {'action': 'action_value', 'description': 'description_value', 'direction': 'direction_value', 'disabled': True, 'enable_logging': True, 'kind': 'kind_value', 'match': {'dest_address_groups': ['dest_address_groups_value1', 'dest_address_groups_value2'], 'dest_fqdns': ['dest_fqdns_value1', 'dest_fqdns_value2'], 'dest_ip_ranges': ['dest_ip_ranges_value1', 'dest_ip_ranges_value2'], 'dest_region_codes': ['dest_region_codes_value1', 'dest_region_codes_value2'], 'dest_threat_intelligences': ['dest_threat_intelligences_value1', 'dest_threat_intelligences_value2'], 'layer4_configs': [{'ip_protocol': 'ip_protocol_value', 'ports': ['ports_value1', 'ports_value2']}], 'src_address_groups': ['src_address_groups_value1', 'src_address_groups_value2'], 'src_fqdns': ['src_fqdns_value1', 'src_fqdns_value2'], 'src_ip_ranges': ['src_ip_ranges_value1', 'src_ip_ranges_value2'], 'src_region_codes': ['src_region_codes_value1', 'src_region_codes_value2'], 'src_secure_tags': [{'name': 'name_value', 'state': 'state_value'}], 'src_threat_intelligences': ['src_threat_intelligences_value1', 'src_threat_intelligences_value2']}, 'priority': 898, 'rule_name': 'rule_name_value', 'rule_tuple_count': 1737, 'target_resources': ['target_resources_value1', 'target_resources_value2'], 'target_secure_tags': {}, 'target_service_accounts': ['target_service_accounts_value1', 'target_service_accounts_value2']} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.add_rule(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, extended_operation.ExtendedOperation) + assert response.client_operation_id == 'client_operation_id_value' + assert response.creation_timestamp == 'creation_timestamp_value' + assert response.description == 'description_value' + assert response.end_time == 'end_time_value' + assert response.http_error_message == 'http_error_message_value' + assert response.http_error_status_code == 2374 + assert response.id == 205 + assert response.insert_time == 'insert_time_value' + assert response.kind == 'kind_value' + assert response.name == 'name_value' + assert response.operation_group_id == 'operation_group_id_value' + assert response.operation_type == 'operation_type_value' + assert response.progress == 885 + assert response.region == 'region_value' + assert response.self_link == 'self_link_value' + assert response.start_time == 'start_time_value' + assert response.status == compute.Operation.Status.DONE + assert response.status_message == 'status_message_value' + assert response.target_id == 947 + assert response.target_link == 'target_link_value' + assert response.user == 'user_value' + assert response.zone == 'zone_value' + + +def test_add_rule_rest_required_fields(request_type=compute.AddRuleRegionNetworkFirewallPolicyRequest): + transport_class = transports.RegionNetworkFirewallPoliciesRestTransport + + request_init = {} + request_init["firewall_policy"] = "" + request_init["project"] = "" + request_init["region"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).add_rule._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["firewallPolicy"] = 'firewall_policy_value' + jsonified_request["project"] = 'project_value' + jsonified_request["region"] = 'region_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).add_rule._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("max_priority", "min_priority", "request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "firewallPolicy" in jsonified_request + assert jsonified_request["firewallPolicy"] == 'firewall_policy_value' + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "region" in jsonified_request + assert jsonified_request["region"] == 'region_value' + + client = RegionNetworkFirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.add_rule(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_add_rule_rest_unset_required_fields(): + transport = transports.RegionNetworkFirewallPoliciesRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.add_rule._get_unset_required_fields({}) + assert set(unset_fields) == (set(("maxPriority", "minPriority", "requestId", )) & set(("firewallPolicy", "firewallPolicyRuleResource", "project", "region", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_add_rule_rest_interceptors(null_interceptor): + transport = transports.RegionNetworkFirewallPoliciesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.RegionNetworkFirewallPoliciesRestInterceptor(), + ) + client = RegionNetworkFirewallPoliciesClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.RegionNetworkFirewallPoliciesRestInterceptor, "post_add_rule") as post, \ + mock.patch.object(transports.RegionNetworkFirewallPoliciesRestInterceptor, "pre_add_rule") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.AddRuleRegionNetworkFirewallPolicyRequest.pb(compute.AddRuleRegionNetworkFirewallPolicyRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.AddRuleRegionNetworkFirewallPolicyRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.add_rule(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_add_rule_rest_bad_request(transport: str = 'rest', request_type=compute.AddRuleRegionNetworkFirewallPolicyRequest): + client = RegionNetworkFirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2', 'firewall_policy': 'sample3'} + request_init["firewall_policy_rule_resource"] = {'action': 'action_value', 'description': 'description_value', 'direction': 'direction_value', 'disabled': True, 'enable_logging': True, 'kind': 'kind_value', 'match': {'dest_address_groups': ['dest_address_groups_value1', 'dest_address_groups_value2'], 'dest_fqdns': ['dest_fqdns_value1', 'dest_fqdns_value2'], 'dest_ip_ranges': ['dest_ip_ranges_value1', 'dest_ip_ranges_value2'], 'dest_region_codes': ['dest_region_codes_value1', 'dest_region_codes_value2'], 'dest_threat_intelligences': ['dest_threat_intelligences_value1', 'dest_threat_intelligences_value2'], 'layer4_configs': [{'ip_protocol': 'ip_protocol_value', 'ports': ['ports_value1', 'ports_value2']}], 'src_address_groups': ['src_address_groups_value1', 'src_address_groups_value2'], 'src_fqdns': ['src_fqdns_value1', 'src_fqdns_value2'], 'src_ip_ranges': ['src_ip_ranges_value1', 'src_ip_ranges_value2'], 'src_region_codes': ['src_region_codes_value1', 'src_region_codes_value2'], 'src_secure_tags': [{'name': 'name_value', 'state': 'state_value'}], 'src_threat_intelligences': ['src_threat_intelligences_value1', 'src_threat_intelligences_value2']}, 'priority': 898, 'rule_name': 'rule_name_value', 'rule_tuple_count': 1737, 'target_resources': ['target_resources_value1', 'target_resources_value2'], 'target_secure_tags': {}, 'target_service_accounts': ['target_service_accounts_value1', 'target_service_accounts_value2']} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.add_rule(request) + + +def test_add_rule_rest_flattened(): + client = RegionNetworkFirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'region': 'sample2', 'firewall_policy': 'sample3'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + region='region_value', + firewall_policy='firewall_policy_value', + firewall_policy_rule_resource=compute.FirewallPolicyRule(action='action_value'), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.add_rule(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/regions/{region}/firewallPolicies/{firewall_policy}/addRule" % client.transport._host, args[1]) + + +def test_add_rule_rest_flattened_error(transport: str = 'rest'): + client = RegionNetworkFirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.add_rule( + compute.AddRuleRegionNetworkFirewallPolicyRequest(), + project='project_value', + region='region_value', + firewall_policy='firewall_policy_value', + firewall_policy_rule_resource=compute.FirewallPolicyRule(action='action_value'), + ) + + +def test_add_rule_rest_error(): + client = RegionNetworkFirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.AddRuleRegionNetworkFirewallPolicyRequest, + dict, +]) +def test_add_rule_unary_rest(request_type): + client = RegionNetworkFirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2', 'firewall_policy': 'sample3'} + request_init["firewall_policy_rule_resource"] = {'action': 'action_value', 'description': 'description_value', 'direction': 'direction_value', 'disabled': True, 'enable_logging': True, 'kind': 'kind_value', 'match': {'dest_address_groups': ['dest_address_groups_value1', 'dest_address_groups_value2'], 'dest_fqdns': ['dest_fqdns_value1', 'dest_fqdns_value2'], 'dest_ip_ranges': ['dest_ip_ranges_value1', 'dest_ip_ranges_value2'], 'dest_region_codes': ['dest_region_codes_value1', 'dest_region_codes_value2'], 'dest_threat_intelligences': ['dest_threat_intelligences_value1', 'dest_threat_intelligences_value2'], 'layer4_configs': [{'ip_protocol': 'ip_protocol_value', 'ports': ['ports_value1', 'ports_value2']}], 'src_address_groups': ['src_address_groups_value1', 'src_address_groups_value2'], 'src_fqdns': ['src_fqdns_value1', 'src_fqdns_value2'], 'src_ip_ranges': ['src_ip_ranges_value1', 'src_ip_ranges_value2'], 'src_region_codes': ['src_region_codes_value1', 'src_region_codes_value2'], 'src_secure_tags': [{'name': 'name_value', 'state': 'state_value'}], 'src_threat_intelligences': ['src_threat_intelligences_value1', 'src_threat_intelligences_value2']}, 'priority': 898, 'rule_name': 'rule_name_value', 'rule_tuple_count': 1737, 'target_resources': ['target_resources_value1', 'target_resources_value2'], 'target_secure_tags': {}, 'target_service_accounts': ['target_service_accounts_value1', 'target_service_accounts_value2']} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.add_rule_unary(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.Operation) + + +def test_add_rule_unary_rest_required_fields(request_type=compute.AddRuleRegionNetworkFirewallPolicyRequest): + transport_class = transports.RegionNetworkFirewallPoliciesRestTransport + + request_init = {} + request_init["firewall_policy"] = "" + request_init["project"] = "" + request_init["region"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).add_rule._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["firewallPolicy"] = 'firewall_policy_value' + jsonified_request["project"] = 'project_value' + jsonified_request["region"] = 'region_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).add_rule._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("max_priority", "min_priority", "request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "firewallPolicy" in jsonified_request + assert jsonified_request["firewallPolicy"] == 'firewall_policy_value' + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "region" in jsonified_request + assert jsonified_request["region"] == 'region_value' + + client = RegionNetworkFirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.add_rule_unary(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_add_rule_unary_rest_unset_required_fields(): + transport = transports.RegionNetworkFirewallPoliciesRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.add_rule._get_unset_required_fields({}) + assert set(unset_fields) == (set(("maxPriority", "minPriority", "requestId", )) & set(("firewallPolicy", "firewallPolicyRuleResource", "project", "region", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_add_rule_unary_rest_interceptors(null_interceptor): + transport = transports.RegionNetworkFirewallPoliciesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.RegionNetworkFirewallPoliciesRestInterceptor(), + ) + client = RegionNetworkFirewallPoliciesClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.RegionNetworkFirewallPoliciesRestInterceptor, "post_add_rule") as post, \ + mock.patch.object(transports.RegionNetworkFirewallPoliciesRestInterceptor, "pre_add_rule") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.AddRuleRegionNetworkFirewallPolicyRequest.pb(compute.AddRuleRegionNetworkFirewallPolicyRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.AddRuleRegionNetworkFirewallPolicyRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.add_rule_unary(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_add_rule_unary_rest_bad_request(transport: str = 'rest', request_type=compute.AddRuleRegionNetworkFirewallPolicyRequest): + client = RegionNetworkFirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2', 'firewall_policy': 'sample3'} + request_init["firewall_policy_rule_resource"] = {'action': 'action_value', 'description': 'description_value', 'direction': 'direction_value', 'disabled': True, 'enable_logging': True, 'kind': 'kind_value', 'match': {'dest_address_groups': ['dest_address_groups_value1', 'dest_address_groups_value2'], 'dest_fqdns': ['dest_fqdns_value1', 'dest_fqdns_value2'], 'dest_ip_ranges': ['dest_ip_ranges_value1', 'dest_ip_ranges_value2'], 'dest_region_codes': ['dest_region_codes_value1', 'dest_region_codes_value2'], 'dest_threat_intelligences': ['dest_threat_intelligences_value1', 'dest_threat_intelligences_value2'], 'layer4_configs': [{'ip_protocol': 'ip_protocol_value', 'ports': ['ports_value1', 'ports_value2']}], 'src_address_groups': ['src_address_groups_value1', 'src_address_groups_value2'], 'src_fqdns': ['src_fqdns_value1', 'src_fqdns_value2'], 'src_ip_ranges': ['src_ip_ranges_value1', 'src_ip_ranges_value2'], 'src_region_codes': ['src_region_codes_value1', 'src_region_codes_value2'], 'src_secure_tags': [{'name': 'name_value', 'state': 'state_value'}], 'src_threat_intelligences': ['src_threat_intelligences_value1', 'src_threat_intelligences_value2']}, 'priority': 898, 'rule_name': 'rule_name_value', 'rule_tuple_count': 1737, 'target_resources': ['target_resources_value1', 'target_resources_value2'], 'target_secure_tags': {}, 'target_service_accounts': ['target_service_accounts_value1', 'target_service_accounts_value2']} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.add_rule_unary(request) + + +def test_add_rule_unary_rest_flattened(): + client = RegionNetworkFirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'region': 'sample2', 'firewall_policy': 'sample3'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + region='region_value', + firewall_policy='firewall_policy_value', + firewall_policy_rule_resource=compute.FirewallPolicyRule(action='action_value'), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.add_rule_unary(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/regions/{region}/firewallPolicies/{firewall_policy}/addRule" % client.transport._host, args[1]) + + +def test_add_rule_unary_rest_flattened_error(transport: str = 'rest'): + client = RegionNetworkFirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.add_rule_unary( + compute.AddRuleRegionNetworkFirewallPolicyRequest(), + project='project_value', + region='region_value', + firewall_policy='firewall_policy_value', + firewall_policy_rule_resource=compute.FirewallPolicyRule(action='action_value'), + ) + + +def test_add_rule_unary_rest_error(): + client = RegionNetworkFirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.CloneRulesRegionNetworkFirewallPolicyRequest, + dict, +]) +def test_clone_rules_rest(request_type): + client = RegionNetworkFirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2', 'firewall_policy': 'sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.clone_rules(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, extended_operation.ExtendedOperation) + assert response.client_operation_id == 'client_operation_id_value' + assert response.creation_timestamp == 'creation_timestamp_value' + assert response.description == 'description_value' + assert response.end_time == 'end_time_value' + assert response.http_error_message == 'http_error_message_value' + assert response.http_error_status_code == 2374 + assert response.id == 205 + assert response.insert_time == 'insert_time_value' + assert response.kind == 'kind_value' + assert response.name == 'name_value' + assert response.operation_group_id == 'operation_group_id_value' + assert response.operation_type == 'operation_type_value' + assert response.progress == 885 + assert response.region == 'region_value' + assert response.self_link == 'self_link_value' + assert response.start_time == 'start_time_value' + assert response.status == compute.Operation.Status.DONE + assert response.status_message == 'status_message_value' + assert response.target_id == 947 + assert response.target_link == 'target_link_value' + assert response.user == 'user_value' + assert response.zone == 'zone_value' + + +def test_clone_rules_rest_required_fields(request_type=compute.CloneRulesRegionNetworkFirewallPolicyRequest): + transport_class = transports.RegionNetworkFirewallPoliciesRestTransport + + request_init = {} + request_init["firewall_policy"] = "" + request_init["project"] = "" + request_init["region"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).clone_rules._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["firewallPolicy"] = 'firewall_policy_value' + jsonified_request["project"] = 'project_value' + jsonified_request["region"] = 'region_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).clone_rules._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", "source_firewall_policy", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "firewallPolicy" in jsonified_request + assert jsonified_request["firewallPolicy"] == 'firewall_policy_value' + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "region" in jsonified_request + assert jsonified_request["region"] == 'region_value' + + client = RegionNetworkFirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.clone_rules(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_clone_rules_rest_unset_required_fields(): + transport = transports.RegionNetworkFirewallPoliciesRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.clone_rules._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", "sourceFirewallPolicy", )) & set(("firewallPolicy", "project", "region", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_clone_rules_rest_interceptors(null_interceptor): + transport = transports.RegionNetworkFirewallPoliciesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.RegionNetworkFirewallPoliciesRestInterceptor(), + ) + client = RegionNetworkFirewallPoliciesClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.RegionNetworkFirewallPoliciesRestInterceptor, "post_clone_rules") as post, \ + mock.patch.object(transports.RegionNetworkFirewallPoliciesRestInterceptor, "pre_clone_rules") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.CloneRulesRegionNetworkFirewallPolicyRequest.pb(compute.CloneRulesRegionNetworkFirewallPolicyRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.CloneRulesRegionNetworkFirewallPolicyRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.clone_rules(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_clone_rules_rest_bad_request(transport: str = 'rest', request_type=compute.CloneRulesRegionNetworkFirewallPolicyRequest): + client = RegionNetworkFirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2', 'firewall_policy': 'sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.clone_rules(request) + + +def test_clone_rules_rest_flattened(): + client = RegionNetworkFirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'region': 'sample2', 'firewall_policy': 'sample3'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + region='region_value', + firewall_policy='firewall_policy_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.clone_rules(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/regions/{region}/firewallPolicies/{firewall_policy}/cloneRules" % client.transport._host, args[1]) + + +def test_clone_rules_rest_flattened_error(transport: str = 'rest'): + client = RegionNetworkFirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.clone_rules( + compute.CloneRulesRegionNetworkFirewallPolicyRequest(), + project='project_value', + region='region_value', + firewall_policy='firewall_policy_value', + ) + + +def test_clone_rules_rest_error(): + client = RegionNetworkFirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.CloneRulesRegionNetworkFirewallPolicyRequest, + dict, +]) +def test_clone_rules_unary_rest(request_type): + client = RegionNetworkFirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2', 'firewall_policy': 'sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.clone_rules_unary(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.Operation) + + +def test_clone_rules_unary_rest_required_fields(request_type=compute.CloneRulesRegionNetworkFirewallPolicyRequest): + transport_class = transports.RegionNetworkFirewallPoliciesRestTransport + + request_init = {} + request_init["firewall_policy"] = "" + request_init["project"] = "" + request_init["region"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).clone_rules._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["firewallPolicy"] = 'firewall_policy_value' + jsonified_request["project"] = 'project_value' + jsonified_request["region"] = 'region_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).clone_rules._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", "source_firewall_policy", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "firewallPolicy" in jsonified_request + assert jsonified_request["firewallPolicy"] == 'firewall_policy_value' + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "region" in jsonified_request + assert jsonified_request["region"] == 'region_value' + + client = RegionNetworkFirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.clone_rules_unary(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_clone_rules_unary_rest_unset_required_fields(): + transport = transports.RegionNetworkFirewallPoliciesRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.clone_rules._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", "sourceFirewallPolicy", )) & set(("firewallPolicy", "project", "region", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_clone_rules_unary_rest_interceptors(null_interceptor): + transport = transports.RegionNetworkFirewallPoliciesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.RegionNetworkFirewallPoliciesRestInterceptor(), + ) + client = RegionNetworkFirewallPoliciesClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.RegionNetworkFirewallPoliciesRestInterceptor, "post_clone_rules") as post, \ + mock.patch.object(transports.RegionNetworkFirewallPoliciesRestInterceptor, "pre_clone_rules") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.CloneRulesRegionNetworkFirewallPolicyRequest.pb(compute.CloneRulesRegionNetworkFirewallPolicyRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.CloneRulesRegionNetworkFirewallPolicyRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.clone_rules_unary(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_clone_rules_unary_rest_bad_request(transport: str = 'rest', request_type=compute.CloneRulesRegionNetworkFirewallPolicyRequest): + client = RegionNetworkFirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2', 'firewall_policy': 'sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.clone_rules_unary(request) + + +def test_clone_rules_unary_rest_flattened(): + client = RegionNetworkFirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'region': 'sample2', 'firewall_policy': 'sample3'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + region='region_value', + firewall_policy='firewall_policy_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.clone_rules_unary(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/regions/{region}/firewallPolicies/{firewall_policy}/cloneRules" % client.transport._host, args[1]) + + +def test_clone_rules_unary_rest_flattened_error(transport: str = 'rest'): + client = RegionNetworkFirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.clone_rules_unary( + compute.CloneRulesRegionNetworkFirewallPolicyRequest(), + project='project_value', + region='region_value', + firewall_policy='firewall_policy_value', + ) + + +def test_clone_rules_unary_rest_error(): + client = RegionNetworkFirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.DeleteRegionNetworkFirewallPolicyRequest, + dict, +]) +def test_delete_rest(request_type): + client = RegionNetworkFirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2', 'firewall_policy': 'sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.delete(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, extended_operation.ExtendedOperation) + assert response.client_operation_id == 'client_operation_id_value' + assert response.creation_timestamp == 'creation_timestamp_value' + assert response.description == 'description_value' + assert response.end_time == 'end_time_value' + assert response.http_error_message == 'http_error_message_value' + assert response.http_error_status_code == 2374 + assert response.id == 205 + assert response.insert_time == 'insert_time_value' + assert response.kind == 'kind_value' + assert response.name == 'name_value' + assert response.operation_group_id == 'operation_group_id_value' + assert response.operation_type == 'operation_type_value' + assert response.progress == 885 + assert response.region == 'region_value' + assert response.self_link == 'self_link_value' + assert response.start_time == 'start_time_value' + assert response.status == compute.Operation.Status.DONE + assert response.status_message == 'status_message_value' + assert response.target_id == 947 + assert response.target_link == 'target_link_value' + assert response.user == 'user_value' + assert response.zone == 'zone_value' + + +def test_delete_rest_required_fields(request_type=compute.DeleteRegionNetworkFirewallPolicyRequest): + transport_class = transports.RegionNetworkFirewallPoliciesRestTransport + + request_init = {} + request_init["firewall_policy"] = "" + request_init["project"] = "" + request_init["region"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["firewallPolicy"] = 'firewall_policy_value' + jsonified_request["project"] = 'project_value' + jsonified_request["region"] = 'region_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "firewallPolicy" in jsonified_request + assert jsonified_request["firewallPolicy"] == 'firewall_policy_value' + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "region" in jsonified_request + assert jsonified_request["region"] == 'region_value' + + client = RegionNetworkFirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "delete", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.delete(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_delete_rest_unset_required_fields(): + transport = transports.RegionNetworkFirewallPoliciesRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.delete._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("firewallPolicy", "project", "region", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_rest_interceptors(null_interceptor): + transport = transports.RegionNetworkFirewallPoliciesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.RegionNetworkFirewallPoliciesRestInterceptor(), + ) + client = RegionNetworkFirewallPoliciesClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.RegionNetworkFirewallPoliciesRestInterceptor, "post_delete") as post, \ + mock.patch.object(transports.RegionNetworkFirewallPoliciesRestInterceptor, "pre_delete") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.DeleteRegionNetworkFirewallPolicyRequest.pb(compute.DeleteRegionNetworkFirewallPolicyRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.DeleteRegionNetworkFirewallPolicyRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.delete(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_delete_rest_bad_request(transport: str = 'rest', request_type=compute.DeleteRegionNetworkFirewallPolicyRequest): + client = RegionNetworkFirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2', 'firewall_policy': 'sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.delete(request) + + +def test_delete_rest_flattened(): + client = RegionNetworkFirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'region': 'sample2', 'firewall_policy': 'sample3'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + region='region_value', + firewall_policy='firewall_policy_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.delete(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/regions/{region}/firewallPolicies/{firewall_policy}" % client.transport._host, args[1]) + + +def test_delete_rest_flattened_error(transport: str = 'rest'): + client = RegionNetworkFirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete( + compute.DeleteRegionNetworkFirewallPolicyRequest(), + project='project_value', + region='region_value', + firewall_policy='firewall_policy_value', + ) + + +def test_delete_rest_error(): + client = RegionNetworkFirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.DeleteRegionNetworkFirewallPolicyRequest, + dict, +]) +def test_delete_unary_rest(request_type): + client = RegionNetworkFirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2', 'firewall_policy': 'sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.delete_unary(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.Operation) + + +def test_delete_unary_rest_required_fields(request_type=compute.DeleteRegionNetworkFirewallPolicyRequest): + transport_class = transports.RegionNetworkFirewallPoliciesRestTransport + + request_init = {} + request_init["firewall_policy"] = "" + request_init["project"] = "" + request_init["region"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["firewallPolicy"] = 'firewall_policy_value' + jsonified_request["project"] = 'project_value' + jsonified_request["region"] = 'region_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "firewallPolicy" in jsonified_request + assert jsonified_request["firewallPolicy"] == 'firewall_policy_value' + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "region" in jsonified_request + assert jsonified_request["region"] == 'region_value' + + client = RegionNetworkFirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "delete", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.delete_unary(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_delete_unary_rest_unset_required_fields(): + transport = transports.RegionNetworkFirewallPoliciesRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.delete._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("firewallPolicy", "project", "region", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_unary_rest_interceptors(null_interceptor): + transport = transports.RegionNetworkFirewallPoliciesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.RegionNetworkFirewallPoliciesRestInterceptor(), + ) + client = RegionNetworkFirewallPoliciesClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.RegionNetworkFirewallPoliciesRestInterceptor, "post_delete") as post, \ + mock.patch.object(transports.RegionNetworkFirewallPoliciesRestInterceptor, "pre_delete") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.DeleteRegionNetworkFirewallPolicyRequest.pb(compute.DeleteRegionNetworkFirewallPolicyRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.DeleteRegionNetworkFirewallPolicyRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.delete_unary(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_delete_unary_rest_bad_request(transport: str = 'rest', request_type=compute.DeleteRegionNetworkFirewallPolicyRequest): + client = RegionNetworkFirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2', 'firewall_policy': 'sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.delete_unary(request) + + +def test_delete_unary_rest_flattened(): + client = RegionNetworkFirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'region': 'sample2', 'firewall_policy': 'sample3'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + region='region_value', + firewall_policy='firewall_policy_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.delete_unary(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/regions/{region}/firewallPolicies/{firewall_policy}" % client.transport._host, args[1]) + + +def test_delete_unary_rest_flattened_error(transport: str = 'rest'): + client = RegionNetworkFirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_unary( + compute.DeleteRegionNetworkFirewallPolicyRequest(), + project='project_value', + region='region_value', + firewall_policy='firewall_policy_value', + ) + + +def test_delete_unary_rest_error(): + client = RegionNetworkFirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.GetRegionNetworkFirewallPolicyRequest, + dict, +]) +def test_get_rest(request_type): + client = RegionNetworkFirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2', 'firewall_policy': 'sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.FirewallPolicy( + creation_timestamp='creation_timestamp_value', + description='description_value', + display_name='display_name_value', + fingerprint='fingerprint_value', + id=205, + kind='kind_value', + name='name_value', + parent='parent_value', + region='region_value', + rule_tuple_count=1737, + self_link='self_link_value', + self_link_with_id='self_link_with_id_value', + short_name='short_name_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.FirewallPolicy.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.get(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.FirewallPolicy) + assert response.creation_timestamp == 'creation_timestamp_value' + assert response.description == 'description_value' + assert response.display_name == 'display_name_value' + assert response.fingerprint == 'fingerprint_value' + assert response.id == 205 + assert response.kind == 'kind_value' + assert response.name == 'name_value' + assert response.parent == 'parent_value' + assert response.region == 'region_value' + assert response.rule_tuple_count == 1737 + assert response.self_link == 'self_link_value' + assert response.self_link_with_id == 'self_link_with_id_value' + assert response.short_name == 'short_name_value' + + +def test_get_rest_required_fields(request_type=compute.GetRegionNetworkFirewallPolicyRequest): + transport_class = transports.RegionNetworkFirewallPoliciesRestTransport + + request_init = {} + request_init["firewall_policy"] = "" + request_init["project"] = "" + request_init["region"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["firewallPolicy"] = 'firewall_policy_value' + jsonified_request["project"] = 'project_value' + jsonified_request["region"] = 'region_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "firewallPolicy" in jsonified_request + assert jsonified_request["firewallPolicy"] == 'firewall_policy_value' + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "region" in jsonified_request + assert jsonified_request["region"] == 'region_value' + + client = RegionNetworkFirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.FirewallPolicy() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "get", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.FirewallPolicy.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.get(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_get_rest_unset_required_fields(): + transport = transports.RegionNetworkFirewallPoliciesRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.get._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("firewallPolicy", "project", "region", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_rest_interceptors(null_interceptor): + transport = transports.RegionNetworkFirewallPoliciesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.RegionNetworkFirewallPoliciesRestInterceptor(), + ) + client = RegionNetworkFirewallPoliciesClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.RegionNetworkFirewallPoliciesRestInterceptor, "post_get") as post, \ + mock.patch.object(transports.RegionNetworkFirewallPoliciesRestInterceptor, "pre_get") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.GetRegionNetworkFirewallPolicyRequest.pb(compute.GetRegionNetworkFirewallPolicyRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.FirewallPolicy.to_json(compute.FirewallPolicy()) + + request = compute.GetRegionNetworkFirewallPolicyRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.FirewallPolicy() + + client.get(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_get_rest_bad_request(transport: str = 'rest', request_type=compute.GetRegionNetworkFirewallPolicyRequest): + client = RegionNetworkFirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2', 'firewall_policy': 'sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get(request) + + +def test_get_rest_flattened(): + client = RegionNetworkFirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.FirewallPolicy() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'region': 'sample2', 'firewall_policy': 'sample3'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + region='region_value', + firewall_policy='firewall_policy_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.FirewallPolicy.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.get(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/regions/{region}/firewallPolicies/{firewall_policy}" % client.transport._host, args[1]) + + +def test_get_rest_flattened_error(transport: str = 'rest'): + client = RegionNetworkFirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get( + compute.GetRegionNetworkFirewallPolicyRequest(), + project='project_value', + region='region_value', + firewall_policy='firewall_policy_value', + ) + + +def test_get_rest_error(): + client = RegionNetworkFirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.GetAssociationRegionNetworkFirewallPolicyRequest, + dict, +]) +def test_get_association_rest(request_type): + client = RegionNetworkFirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2', 'firewall_policy': 'sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.FirewallPolicyAssociation( + attachment_target='attachment_target_value', + display_name='display_name_value', + firewall_policy_id='firewall_policy_id_value', + name='name_value', + short_name='short_name_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.FirewallPolicyAssociation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.get_association(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.FirewallPolicyAssociation) + assert response.attachment_target == 'attachment_target_value' + assert response.display_name == 'display_name_value' + assert response.firewall_policy_id == 'firewall_policy_id_value' + assert response.name == 'name_value' + assert response.short_name == 'short_name_value' + + +def test_get_association_rest_required_fields(request_type=compute.GetAssociationRegionNetworkFirewallPolicyRequest): + transport_class = transports.RegionNetworkFirewallPoliciesRestTransport + + request_init = {} + request_init["firewall_policy"] = "" + request_init["project"] = "" + request_init["region"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_association._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["firewallPolicy"] = 'firewall_policy_value' + jsonified_request["project"] = 'project_value' + jsonified_request["region"] = 'region_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_association._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("name", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "firewallPolicy" in jsonified_request + assert jsonified_request["firewallPolicy"] == 'firewall_policy_value' + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "region" in jsonified_request + assert jsonified_request["region"] == 'region_value' + + client = RegionNetworkFirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.FirewallPolicyAssociation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "get", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.FirewallPolicyAssociation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.get_association(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_get_association_rest_unset_required_fields(): + transport = transports.RegionNetworkFirewallPoliciesRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.get_association._get_unset_required_fields({}) + assert set(unset_fields) == (set(("name", )) & set(("firewallPolicy", "project", "region", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_association_rest_interceptors(null_interceptor): + transport = transports.RegionNetworkFirewallPoliciesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.RegionNetworkFirewallPoliciesRestInterceptor(), + ) + client = RegionNetworkFirewallPoliciesClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.RegionNetworkFirewallPoliciesRestInterceptor, "post_get_association") as post, \ + mock.patch.object(transports.RegionNetworkFirewallPoliciesRestInterceptor, "pre_get_association") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.GetAssociationRegionNetworkFirewallPolicyRequest.pb(compute.GetAssociationRegionNetworkFirewallPolicyRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.FirewallPolicyAssociation.to_json(compute.FirewallPolicyAssociation()) + + request = compute.GetAssociationRegionNetworkFirewallPolicyRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.FirewallPolicyAssociation() + + client.get_association(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_get_association_rest_bad_request(transport: str = 'rest', request_type=compute.GetAssociationRegionNetworkFirewallPolicyRequest): + client = RegionNetworkFirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2', 'firewall_policy': 'sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_association(request) + + +def test_get_association_rest_flattened(): + client = RegionNetworkFirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.FirewallPolicyAssociation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'region': 'sample2', 'firewall_policy': 'sample3'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + region='region_value', + firewall_policy='firewall_policy_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.FirewallPolicyAssociation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.get_association(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/regions/{region}/firewallPolicies/{firewall_policy}/getAssociation" % client.transport._host, args[1]) + + +def test_get_association_rest_flattened_error(transport: str = 'rest'): + client = RegionNetworkFirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_association( + compute.GetAssociationRegionNetworkFirewallPolicyRequest(), + project='project_value', + region='region_value', + firewall_policy='firewall_policy_value', + ) + + +def test_get_association_rest_error(): + client = RegionNetworkFirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.GetEffectiveFirewallsRegionNetworkFirewallPolicyRequest, + dict, +]) +def test_get_effective_firewalls_rest(request_type): + client = RegionNetworkFirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.RegionNetworkFirewallPoliciesGetEffectiveFirewallsResponse( + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.RegionNetworkFirewallPoliciesGetEffectiveFirewallsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.get_effective_firewalls(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.RegionNetworkFirewallPoliciesGetEffectiveFirewallsResponse) + + +def test_get_effective_firewalls_rest_required_fields(request_type=compute.GetEffectiveFirewallsRegionNetworkFirewallPolicyRequest): + transport_class = transports.RegionNetworkFirewallPoliciesRestTransport + + request_init = {} + request_init["network"] = "" + request_init["project"] = "" + request_init["region"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + assert "network" not in jsonified_request + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_effective_firewalls._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + assert "network" in jsonified_request + assert jsonified_request["network"] == request_init["network"] + + jsonified_request["network"] = 'network_value' + jsonified_request["project"] = 'project_value' + jsonified_request["region"] = 'region_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_effective_firewalls._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("network", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "network" in jsonified_request + assert jsonified_request["network"] == 'network_value' + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "region" in jsonified_request + assert jsonified_request["region"] == 'region_value' + + client = RegionNetworkFirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.RegionNetworkFirewallPoliciesGetEffectiveFirewallsResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "get", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.RegionNetworkFirewallPoliciesGetEffectiveFirewallsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.get_effective_firewalls(request) + + expected_params = [ + ( + "network", + "", + ), + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_get_effective_firewalls_rest_unset_required_fields(): + transport = transports.RegionNetworkFirewallPoliciesRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.get_effective_firewalls._get_unset_required_fields({}) + assert set(unset_fields) == (set(("network", )) & set(("network", "project", "region", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_effective_firewalls_rest_interceptors(null_interceptor): + transport = transports.RegionNetworkFirewallPoliciesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.RegionNetworkFirewallPoliciesRestInterceptor(), + ) + client = RegionNetworkFirewallPoliciesClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.RegionNetworkFirewallPoliciesRestInterceptor, "post_get_effective_firewalls") as post, \ + mock.patch.object(transports.RegionNetworkFirewallPoliciesRestInterceptor, "pre_get_effective_firewalls") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.GetEffectiveFirewallsRegionNetworkFirewallPolicyRequest.pb(compute.GetEffectiveFirewallsRegionNetworkFirewallPolicyRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.RegionNetworkFirewallPoliciesGetEffectiveFirewallsResponse.to_json(compute.RegionNetworkFirewallPoliciesGetEffectiveFirewallsResponse()) + + request = compute.GetEffectiveFirewallsRegionNetworkFirewallPolicyRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.RegionNetworkFirewallPoliciesGetEffectiveFirewallsResponse() + + client.get_effective_firewalls(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_get_effective_firewalls_rest_bad_request(transport: str = 'rest', request_type=compute.GetEffectiveFirewallsRegionNetworkFirewallPolicyRequest): + client = RegionNetworkFirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_effective_firewalls(request) + + +def test_get_effective_firewalls_rest_flattened(): + client = RegionNetworkFirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.RegionNetworkFirewallPoliciesGetEffectiveFirewallsResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'region': 'sample2'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + region='region_value', + network='network_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.RegionNetworkFirewallPoliciesGetEffectiveFirewallsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.get_effective_firewalls(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/regions/{region}/firewallPolicies/getEffectiveFirewalls" % client.transport._host, args[1]) + + +def test_get_effective_firewalls_rest_flattened_error(transport: str = 'rest'): + client = RegionNetworkFirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_effective_firewalls( + compute.GetEffectiveFirewallsRegionNetworkFirewallPolicyRequest(), + project='project_value', + region='region_value', + network='network_value', + ) + + +def test_get_effective_firewalls_rest_error(): + client = RegionNetworkFirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.GetIamPolicyRegionNetworkFirewallPolicyRequest, + dict, +]) +def test_get_iam_policy_rest(request_type): + client = RegionNetworkFirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2', 'resource': 'sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Policy( + etag='etag_value', + iam_owned=True, + version=774, + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Policy.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.get_iam_policy(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.Policy) + assert response.etag == 'etag_value' + assert response.iam_owned is True + assert response.version == 774 + + +def test_get_iam_policy_rest_required_fields(request_type=compute.GetIamPolicyRegionNetworkFirewallPolicyRequest): + transport_class = transports.RegionNetworkFirewallPoliciesRestTransport + + request_init = {} + request_init["project"] = "" + request_init["region"] = "" + request_init["resource"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_iam_policy._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + jsonified_request["region"] = 'region_value' + jsonified_request["resource"] = 'resource_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_iam_policy._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("options_requested_policy_version", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "region" in jsonified_request + assert jsonified_request["region"] == 'region_value' + assert "resource" in jsonified_request + assert jsonified_request["resource"] == 'resource_value' + + client = RegionNetworkFirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Policy() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "get", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Policy.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.get_iam_policy(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_get_iam_policy_rest_unset_required_fields(): + transport = transports.RegionNetworkFirewallPoliciesRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.get_iam_policy._get_unset_required_fields({}) + assert set(unset_fields) == (set(("optionsRequestedPolicyVersion", )) & set(("project", "region", "resource", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_iam_policy_rest_interceptors(null_interceptor): + transport = transports.RegionNetworkFirewallPoliciesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.RegionNetworkFirewallPoliciesRestInterceptor(), + ) + client = RegionNetworkFirewallPoliciesClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.RegionNetworkFirewallPoliciesRestInterceptor, "post_get_iam_policy") as post, \ + mock.patch.object(transports.RegionNetworkFirewallPoliciesRestInterceptor, "pre_get_iam_policy") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.GetIamPolicyRegionNetworkFirewallPolicyRequest.pb(compute.GetIamPolicyRegionNetworkFirewallPolicyRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Policy.to_json(compute.Policy()) + + request = compute.GetIamPolicyRegionNetworkFirewallPolicyRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Policy() + + client.get_iam_policy(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_get_iam_policy_rest_bad_request(transport: str = 'rest', request_type=compute.GetIamPolicyRegionNetworkFirewallPolicyRequest): + client = RegionNetworkFirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2', 'resource': 'sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_iam_policy(request) + + +def test_get_iam_policy_rest_flattened(): + client = RegionNetworkFirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Policy() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'region': 'sample2', 'resource': 'sample3'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + region='region_value', + resource='resource_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Policy.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.get_iam_policy(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/regions/{region}/firewallPolicies/{resource}/getIamPolicy" % client.transport._host, args[1]) + + +def test_get_iam_policy_rest_flattened_error(transport: str = 'rest'): + client = RegionNetworkFirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_iam_policy( + compute.GetIamPolicyRegionNetworkFirewallPolicyRequest(), + project='project_value', + region='region_value', + resource='resource_value', + ) + + +def test_get_iam_policy_rest_error(): + client = RegionNetworkFirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.GetRuleRegionNetworkFirewallPolicyRequest, + dict, +]) +def test_get_rule_rest(request_type): + client = RegionNetworkFirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2', 'firewall_policy': 'sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.FirewallPolicyRule( + action='action_value', + description='description_value', + direction='direction_value', + disabled=True, + enable_logging=True, + kind='kind_value', + priority=898, + rule_name='rule_name_value', + rule_tuple_count=1737, + target_resources=['target_resources_value'], + target_service_accounts=['target_service_accounts_value'], + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.FirewallPolicyRule.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.get_rule(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.FirewallPolicyRule) + assert response.action == 'action_value' + assert response.description == 'description_value' + assert response.direction == 'direction_value' + assert response.disabled is True + assert response.enable_logging is True + assert response.kind == 'kind_value' + assert response.priority == 898 + assert response.rule_name == 'rule_name_value' + assert response.rule_tuple_count == 1737 + assert response.target_resources == ['target_resources_value'] + assert response.target_service_accounts == ['target_service_accounts_value'] + + +def test_get_rule_rest_required_fields(request_type=compute.GetRuleRegionNetworkFirewallPolicyRequest): + transport_class = transports.RegionNetworkFirewallPoliciesRestTransport + + request_init = {} + request_init["firewall_policy"] = "" + request_init["project"] = "" + request_init["region"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_rule._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["firewallPolicy"] = 'firewall_policy_value' + jsonified_request["project"] = 'project_value' + jsonified_request["region"] = 'region_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_rule._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("priority", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "firewallPolicy" in jsonified_request + assert jsonified_request["firewallPolicy"] == 'firewall_policy_value' + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "region" in jsonified_request + assert jsonified_request["region"] == 'region_value' + + client = RegionNetworkFirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.FirewallPolicyRule() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "get", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.FirewallPolicyRule.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.get_rule(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_get_rule_rest_unset_required_fields(): + transport = transports.RegionNetworkFirewallPoliciesRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.get_rule._get_unset_required_fields({}) + assert set(unset_fields) == (set(("priority", )) & set(("firewallPolicy", "project", "region", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_rule_rest_interceptors(null_interceptor): + transport = transports.RegionNetworkFirewallPoliciesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.RegionNetworkFirewallPoliciesRestInterceptor(), + ) + client = RegionNetworkFirewallPoliciesClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.RegionNetworkFirewallPoliciesRestInterceptor, "post_get_rule") as post, \ + mock.patch.object(transports.RegionNetworkFirewallPoliciesRestInterceptor, "pre_get_rule") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.GetRuleRegionNetworkFirewallPolicyRequest.pb(compute.GetRuleRegionNetworkFirewallPolicyRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.FirewallPolicyRule.to_json(compute.FirewallPolicyRule()) + + request = compute.GetRuleRegionNetworkFirewallPolicyRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.FirewallPolicyRule() + + client.get_rule(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_get_rule_rest_bad_request(transport: str = 'rest', request_type=compute.GetRuleRegionNetworkFirewallPolicyRequest): + client = RegionNetworkFirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2', 'firewall_policy': 'sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_rule(request) + + +def test_get_rule_rest_flattened(): + client = RegionNetworkFirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.FirewallPolicyRule() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'region': 'sample2', 'firewall_policy': 'sample3'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + region='region_value', + firewall_policy='firewall_policy_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.FirewallPolicyRule.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.get_rule(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/regions/{region}/firewallPolicies/{firewall_policy}/getRule" % client.transport._host, args[1]) + + +def test_get_rule_rest_flattened_error(transport: str = 'rest'): + client = RegionNetworkFirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_rule( + compute.GetRuleRegionNetworkFirewallPolicyRequest(), + project='project_value', + region='region_value', + firewall_policy='firewall_policy_value', + ) + + +def test_get_rule_rest_error(): + client = RegionNetworkFirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.InsertRegionNetworkFirewallPolicyRequest, + dict, +]) +def test_insert_rest(request_type): + client = RegionNetworkFirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2'} + request_init["firewall_policy_resource"] = {'associations': [{'attachment_target': 'attachment_target_value', 'display_name': 'display_name_value', 'firewall_policy_id': 'firewall_policy_id_value', 'name': 'name_value', 'short_name': 'short_name_value'}], 'creation_timestamp': 'creation_timestamp_value', 'description': 'description_value', 'display_name': 'display_name_value', 'fingerprint': 'fingerprint_value', 'id': 205, 'kind': 'kind_value', 'name': 'name_value', 'parent': 'parent_value', 'region': 'region_value', 'rule_tuple_count': 1737, 'rules': [{'action': 'action_value', 'description': 'description_value', 'direction': 'direction_value', 'disabled': True, 'enable_logging': True, 'kind': 'kind_value', 'match': {'dest_address_groups': ['dest_address_groups_value1', 'dest_address_groups_value2'], 'dest_fqdns': ['dest_fqdns_value1', 'dest_fqdns_value2'], 'dest_ip_ranges': ['dest_ip_ranges_value1', 'dest_ip_ranges_value2'], 'dest_region_codes': ['dest_region_codes_value1', 'dest_region_codes_value2'], 'dest_threat_intelligences': ['dest_threat_intelligences_value1', 'dest_threat_intelligences_value2'], 'layer4_configs': [{'ip_protocol': 'ip_protocol_value', 'ports': ['ports_value1', 'ports_value2']}], 'src_address_groups': ['src_address_groups_value1', 'src_address_groups_value2'], 'src_fqdns': ['src_fqdns_value1', 'src_fqdns_value2'], 'src_ip_ranges': ['src_ip_ranges_value1', 'src_ip_ranges_value2'], 'src_region_codes': ['src_region_codes_value1', 'src_region_codes_value2'], 'src_secure_tags': [{'name': 'name_value', 'state': 'state_value'}], 'src_threat_intelligences': ['src_threat_intelligences_value1', 'src_threat_intelligences_value2']}, 'priority': 898, 'rule_name': 'rule_name_value', 'rule_tuple_count': 1737, 'target_resources': ['target_resources_value1', 'target_resources_value2'], 'target_secure_tags': {}, 'target_service_accounts': ['target_service_accounts_value1', 'target_service_accounts_value2']}], 'self_link': 'self_link_value', 'self_link_with_id': 'self_link_with_id_value', 'short_name': 'short_name_value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.insert(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, extended_operation.ExtendedOperation) + assert response.client_operation_id == 'client_operation_id_value' + assert response.creation_timestamp == 'creation_timestamp_value' + assert response.description == 'description_value' + assert response.end_time == 'end_time_value' + assert response.http_error_message == 'http_error_message_value' + assert response.http_error_status_code == 2374 + assert response.id == 205 + assert response.insert_time == 'insert_time_value' + assert response.kind == 'kind_value' + assert response.name == 'name_value' + assert response.operation_group_id == 'operation_group_id_value' + assert response.operation_type == 'operation_type_value' + assert response.progress == 885 + assert response.region == 'region_value' + assert response.self_link == 'self_link_value' + assert response.start_time == 'start_time_value' + assert response.status == compute.Operation.Status.DONE + assert response.status_message == 'status_message_value' + assert response.target_id == 947 + assert response.target_link == 'target_link_value' + assert response.user == 'user_value' + assert response.zone == 'zone_value' + + +def test_insert_rest_required_fields(request_type=compute.InsertRegionNetworkFirewallPolicyRequest): + transport_class = transports.RegionNetworkFirewallPoliciesRestTransport + + request_init = {} + request_init["project"] = "" + request_init["region"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).insert._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + jsonified_request["region"] = 'region_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).insert._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "region" in jsonified_request + assert jsonified_request["region"] == 'region_value' + + client = RegionNetworkFirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.insert(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_insert_rest_unset_required_fields(): + transport = transports.RegionNetworkFirewallPoliciesRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.insert._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("firewallPolicyResource", "project", "region", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_insert_rest_interceptors(null_interceptor): + transport = transports.RegionNetworkFirewallPoliciesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.RegionNetworkFirewallPoliciesRestInterceptor(), + ) + client = RegionNetworkFirewallPoliciesClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.RegionNetworkFirewallPoliciesRestInterceptor, "post_insert") as post, \ + mock.patch.object(transports.RegionNetworkFirewallPoliciesRestInterceptor, "pre_insert") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.InsertRegionNetworkFirewallPolicyRequest.pb(compute.InsertRegionNetworkFirewallPolicyRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.InsertRegionNetworkFirewallPolicyRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.insert(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_insert_rest_bad_request(transport: str = 'rest', request_type=compute.InsertRegionNetworkFirewallPolicyRequest): + client = RegionNetworkFirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2'} + request_init["firewall_policy_resource"] = {'associations': [{'attachment_target': 'attachment_target_value', 'display_name': 'display_name_value', 'firewall_policy_id': 'firewall_policy_id_value', 'name': 'name_value', 'short_name': 'short_name_value'}], 'creation_timestamp': 'creation_timestamp_value', 'description': 'description_value', 'display_name': 'display_name_value', 'fingerprint': 'fingerprint_value', 'id': 205, 'kind': 'kind_value', 'name': 'name_value', 'parent': 'parent_value', 'region': 'region_value', 'rule_tuple_count': 1737, 'rules': [{'action': 'action_value', 'description': 'description_value', 'direction': 'direction_value', 'disabled': True, 'enable_logging': True, 'kind': 'kind_value', 'match': {'dest_address_groups': ['dest_address_groups_value1', 'dest_address_groups_value2'], 'dest_fqdns': ['dest_fqdns_value1', 'dest_fqdns_value2'], 'dest_ip_ranges': ['dest_ip_ranges_value1', 'dest_ip_ranges_value2'], 'dest_region_codes': ['dest_region_codes_value1', 'dest_region_codes_value2'], 'dest_threat_intelligences': ['dest_threat_intelligences_value1', 'dest_threat_intelligences_value2'], 'layer4_configs': [{'ip_protocol': 'ip_protocol_value', 'ports': ['ports_value1', 'ports_value2']}], 'src_address_groups': ['src_address_groups_value1', 'src_address_groups_value2'], 'src_fqdns': ['src_fqdns_value1', 'src_fqdns_value2'], 'src_ip_ranges': ['src_ip_ranges_value1', 'src_ip_ranges_value2'], 'src_region_codes': ['src_region_codes_value1', 'src_region_codes_value2'], 'src_secure_tags': [{'name': 'name_value', 'state': 'state_value'}], 'src_threat_intelligences': ['src_threat_intelligences_value1', 'src_threat_intelligences_value2']}, 'priority': 898, 'rule_name': 'rule_name_value', 'rule_tuple_count': 1737, 'target_resources': ['target_resources_value1', 'target_resources_value2'], 'target_secure_tags': {}, 'target_service_accounts': ['target_service_accounts_value1', 'target_service_accounts_value2']}], 'self_link': 'self_link_value', 'self_link_with_id': 'self_link_with_id_value', 'short_name': 'short_name_value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.insert(request) + + +def test_insert_rest_flattened(): + client = RegionNetworkFirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'region': 'sample2'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + region='region_value', + firewall_policy_resource=compute.FirewallPolicy(associations=[compute.FirewallPolicyAssociation(attachment_target='attachment_target_value')]), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.insert(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/regions/{region}/firewallPolicies" % client.transport._host, args[1]) + + +def test_insert_rest_flattened_error(transport: str = 'rest'): + client = RegionNetworkFirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.insert( + compute.InsertRegionNetworkFirewallPolicyRequest(), + project='project_value', + region='region_value', + firewall_policy_resource=compute.FirewallPolicy(associations=[compute.FirewallPolicyAssociation(attachment_target='attachment_target_value')]), + ) + + +def test_insert_rest_error(): + client = RegionNetworkFirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.InsertRegionNetworkFirewallPolicyRequest, + dict, +]) +def test_insert_unary_rest(request_type): + client = RegionNetworkFirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2'} + request_init["firewall_policy_resource"] = {'associations': [{'attachment_target': 'attachment_target_value', 'display_name': 'display_name_value', 'firewall_policy_id': 'firewall_policy_id_value', 'name': 'name_value', 'short_name': 'short_name_value'}], 'creation_timestamp': 'creation_timestamp_value', 'description': 'description_value', 'display_name': 'display_name_value', 'fingerprint': 'fingerprint_value', 'id': 205, 'kind': 'kind_value', 'name': 'name_value', 'parent': 'parent_value', 'region': 'region_value', 'rule_tuple_count': 1737, 'rules': [{'action': 'action_value', 'description': 'description_value', 'direction': 'direction_value', 'disabled': True, 'enable_logging': True, 'kind': 'kind_value', 'match': {'dest_address_groups': ['dest_address_groups_value1', 'dest_address_groups_value2'], 'dest_fqdns': ['dest_fqdns_value1', 'dest_fqdns_value2'], 'dest_ip_ranges': ['dest_ip_ranges_value1', 'dest_ip_ranges_value2'], 'dest_region_codes': ['dest_region_codes_value1', 'dest_region_codes_value2'], 'dest_threat_intelligences': ['dest_threat_intelligences_value1', 'dest_threat_intelligences_value2'], 'layer4_configs': [{'ip_protocol': 'ip_protocol_value', 'ports': ['ports_value1', 'ports_value2']}], 'src_address_groups': ['src_address_groups_value1', 'src_address_groups_value2'], 'src_fqdns': ['src_fqdns_value1', 'src_fqdns_value2'], 'src_ip_ranges': ['src_ip_ranges_value1', 'src_ip_ranges_value2'], 'src_region_codes': ['src_region_codes_value1', 'src_region_codes_value2'], 'src_secure_tags': [{'name': 'name_value', 'state': 'state_value'}], 'src_threat_intelligences': ['src_threat_intelligences_value1', 'src_threat_intelligences_value2']}, 'priority': 898, 'rule_name': 'rule_name_value', 'rule_tuple_count': 1737, 'target_resources': ['target_resources_value1', 'target_resources_value2'], 'target_secure_tags': {}, 'target_service_accounts': ['target_service_accounts_value1', 'target_service_accounts_value2']}], 'self_link': 'self_link_value', 'self_link_with_id': 'self_link_with_id_value', 'short_name': 'short_name_value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.insert_unary(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.Operation) + + +def test_insert_unary_rest_required_fields(request_type=compute.InsertRegionNetworkFirewallPolicyRequest): + transport_class = transports.RegionNetworkFirewallPoliciesRestTransport + + request_init = {} + request_init["project"] = "" + request_init["region"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).insert._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + jsonified_request["region"] = 'region_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).insert._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "region" in jsonified_request + assert jsonified_request["region"] == 'region_value' + + client = RegionNetworkFirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.insert_unary(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_insert_unary_rest_unset_required_fields(): + transport = transports.RegionNetworkFirewallPoliciesRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.insert._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("firewallPolicyResource", "project", "region", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_insert_unary_rest_interceptors(null_interceptor): + transport = transports.RegionNetworkFirewallPoliciesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.RegionNetworkFirewallPoliciesRestInterceptor(), + ) + client = RegionNetworkFirewallPoliciesClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.RegionNetworkFirewallPoliciesRestInterceptor, "post_insert") as post, \ + mock.patch.object(transports.RegionNetworkFirewallPoliciesRestInterceptor, "pre_insert") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.InsertRegionNetworkFirewallPolicyRequest.pb(compute.InsertRegionNetworkFirewallPolicyRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.InsertRegionNetworkFirewallPolicyRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.insert_unary(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_insert_unary_rest_bad_request(transport: str = 'rest', request_type=compute.InsertRegionNetworkFirewallPolicyRequest): + client = RegionNetworkFirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2'} + request_init["firewall_policy_resource"] = {'associations': [{'attachment_target': 'attachment_target_value', 'display_name': 'display_name_value', 'firewall_policy_id': 'firewall_policy_id_value', 'name': 'name_value', 'short_name': 'short_name_value'}], 'creation_timestamp': 'creation_timestamp_value', 'description': 'description_value', 'display_name': 'display_name_value', 'fingerprint': 'fingerprint_value', 'id': 205, 'kind': 'kind_value', 'name': 'name_value', 'parent': 'parent_value', 'region': 'region_value', 'rule_tuple_count': 1737, 'rules': [{'action': 'action_value', 'description': 'description_value', 'direction': 'direction_value', 'disabled': True, 'enable_logging': True, 'kind': 'kind_value', 'match': {'dest_address_groups': ['dest_address_groups_value1', 'dest_address_groups_value2'], 'dest_fqdns': ['dest_fqdns_value1', 'dest_fqdns_value2'], 'dest_ip_ranges': ['dest_ip_ranges_value1', 'dest_ip_ranges_value2'], 'dest_region_codes': ['dest_region_codes_value1', 'dest_region_codes_value2'], 'dest_threat_intelligences': ['dest_threat_intelligences_value1', 'dest_threat_intelligences_value2'], 'layer4_configs': [{'ip_protocol': 'ip_protocol_value', 'ports': ['ports_value1', 'ports_value2']}], 'src_address_groups': ['src_address_groups_value1', 'src_address_groups_value2'], 'src_fqdns': ['src_fqdns_value1', 'src_fqdns_value2'], 'src_ip_ranges': ['src_ip_ranges_value1', 'src_ip_ranges_value2'], 'src_region_codes': ['src_region_codes_value1', 'src_region_codes_value2'], 'src_secure_tags': [{'name': 'name_value', 'state': 'state_value'}], 'src_threat_intelligences': ['src_threat_intelligences_value1', 'src_threat_intelligences_value2']}, 'priority': 898, 'rule_name': 'rule_name_value', 'rule_tuple_count': 1737, 'target_resources': ['target_resources_value1', 'target_resources_value2'], 'target_secure_tags': {}, 'target_service_accounts': ['target_service_accounts_value1', 'target_service_accounts_value2']}], 'self_link': 'self_link_value', 'self_link_with_id': 'self_link_with_id_value', 'short_name': 'short_name_value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.insert_unary(request) + + +def test_insert_unary_rest_flattened(): + client = RegionNetworkFirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'region': 'sample2'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + region='region_value', + firewall_policy_resource=compute.FirewallPolicy(associations=[compute.FirewallPolicyAssociation(attachment_target='attachment_target_value')]), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.insert_unary(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/regions/{region}/firewallPolicies" % client.transport._host, args[1]) + + +def test_insert_unary_rest_flattened_error(transport: str = 'rest'): + client = RegionNetworkFirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.insert_unary( + compute.InsertRegionNetworkFirewallPolicyRequest(), + project='project_value', + region='region_value', + firewall_policy_resource=compute.FirewallPolicy(associations=[compute.FirewallPolicyAssociation(attachment_target='attachment_target_value')]), + ) + + +def test_insert_unary_rest_error(): + client = RegionNetworkFirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.ListRegionNetworkFirewallPoliciesRequest, + dict, +]) +def test_list_rest(request_type): + client = RegionNetworkFirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.FirewallPolicyList( + id='id_value', + kind='kind_value', + next_page_token='next_page_token_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.FirewallPolicyList.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.list(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListPager) + assert response.id == 'id_value' + assert response.kind == 'kind_value' + assert response.next_page_token == 'next_page_token_value' + + +def test_list_rest_required_fields(request_type=compute.ListRegionNetworkFirewallPoliciesRequest): + transport_class = transports.RegionNetworkFirewallPoliciesRestTransport + + request_init = {} + request_init["project"] = "" + request_init["region"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + jsonified_request["region"] = 'region_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("filter", "max_results", "order_by", "page_token", "return_partial_success", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "region" in jsonified_request + assert jsonified_request["region"] == 'region_value' + + client = RegionNetworkFirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.FirewallPolicyList() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "get", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.FirewallPolicyList.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.list(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_list_rest_unset_required_fields(): + transport = transports.RegionNetworkFirewallPoliciesRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.list._get_unset_required_fields({}) + assert set(unset_fields) == (set(("filter", "maxResults", "orderBy", "pageToken", "returnPartialSuccess", )) & set(("project", "region", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_rest_interceptors(null_interceptor): + transport = transports.RegionNetworkFirewallPoliciesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.RegionNetworkFirewallPoliciesRestInterceptor(), + ) + client = RegionNetworkFirewallPoliciesClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.RegionNetworkFirewallPoliciesRestInterceptor, "post_list") as post, \ + mock.patch.object(transports.RegionNetworkFirewallPoliciesRestInterceptor, "pre_list") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.ListRegionNetworkFirewallPoliciesRequest.pb(compute.ListRegionNetworkFirewallPoliciesRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.FirewallPolicyList.to_json(compute.FirewallPolicyList()) + + request = compute.ListRegionNetworkFirewallPoliciesRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.FirewallPolicyList() + + client.list(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_list_rest_bad_request(transport: str = 'rest', request_type=compute.ListRegionNetworkFirewallPoliciesRequest): + client = RegionNetworkFirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list(request) + + +def test_list_rest_flattened(): + client = RegionNetworkFirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.FirewallPolicyList() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'region': 'sample2'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + region='region_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.FirewallPolicyList.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.list(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/regions/{region}/firewallPolicies" % client.transport._host, args[1]) + + +def test_list_rest_flattened_error(transport: str = 'rest'): + client = RegionNetworkFirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list( + compute.ListRegionNetworkFirewallPoliciesRequest(), + project='project_value', + region='region_value', + ) + + +def test_list_rest_pager(transport: str = 'rest'): + client = RegionNetworkFirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + #with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + compute.FirewallPolicyList( + items=[ + compute.FirewallPolicy(), + compute.FirewallPolicy(), + compute.FirewallPolicy(), + ], + next_page_token='abc', + ), + compute.FirewallPolicyList( + items=[], + next_page_token='def', + ), + compute.FirewallPolicyList( + items=[ + compute.FirewallPolicy(), + ], + next_page_token='ghi', + ), + compute.FirewallPolicyList( + items=[ + compute.FirewallPolicy(), + compute.FirewallPolicy(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple(compute.FirewallPolicyList.to_json(x) for x in response) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode('UTF-8') + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {'project': 'sample1', 'region': 'sample2'} + + pager = client.list(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, compute.FirewallPolicy) + for i in results) + + pages = list(client.list(request=sample_request).pages) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize("request_type", [ + compute.PatchRegionNetworkFirewallPolicyRequest, + dict, +]) +def test_patch_rest(request_type): + client = RegionNetworkFirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2', 'firewall_policy': 'sample3'} + request_init["firewall_policy_resource"] = {'associations': [{'attachment_target': 'attachment_target_value', 'display_name': 'display_name_value', 'firewall_policy_id': 'firewall_policy_id_value', 'name': 'name_value', 'short_name': 'short_name_value'}], 'creation_timestamp': 'creation_timestamp_value', 'description': 'description_value', 'display_name': 'display_name_value', 'fingerprint': 'fingerprint_value', 'id': 205, 'kind': 'kind_value', 'name': 'name_value', 'parent': 'parent_value', 'region': 'region_value', 'rule_tuple_count': 1737, 'rules': [{'action': 'action_value', 'description': 'description_value', 'direction': 'direction_value', 'disabled': True, 'enable_logging': True, 'kind': 'kind_value', 'match': {'dest_address_groups': ['dest_address_groups_value1', 'dest_address_groups_value2'], 'dest_fqdns': ['dest_fqdns_value1', 'dest_fqdns_value2'], 'dest_ip_ranges': ['dest_ip_ranges_value1', 'dest_ip_ranges_value2'], 'dest_region_codes': ['dest_region_codes_value1', 'dest_region_codes_value2'], 'dest_threat_intelligences': ['dest_threat_intelligences_value1', 'dest_threat_intelligences_value2'], 'layer4_configs': [{'ip_protocol': 'ip_protocol_value', 'ports': ['ports_value1', 'ports_value2']}], 'src_address_groups': ['src_address_groups_value1', 'src_address_groups_value2'], 'src_fqdns': ['src_fqdns_value1', 'src_fqdns_value2'], 'src_ip_ranges': ['src_ip_ranges_value1', 'src_ip_ranges_value2'], 'src_region_codes': ['src_region_codes_value1', 'src_region_codes_value2'], 'src_secure_tags': [{'name': 'name_value', 'state': 'state_value'}], 'src_threat_intelligences': ['src_threat_intelligences_value1', 'src_threat_intelligences_value2']}, 'priority': 898, 'rule_name': 'rule_name_value', 'rule_tuple_count': 1737, 'target_resources': ['target_resources_value1', 'target_resources_value2'], 'target_secure_tags': {}, 'target_service_accounts': ['target_service_accounts_value1', 'target_service_accounts_value2']}], 'self_link': 'self_link_value', 'self_link_with_id': 'self_link_with_id_value', 'short_name': 'short_name_value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.patch(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, extended_operation.ExtendedOperation) + assert response.client_operation_id == 'client_operation_id_value' + assert response.creation_timestamp == 'creation_timestamp_value' + assert response.description == 'description_value' + assert response.end_time == 'end_time_value' + assert response.http_error_message == 'http_error_message_value' + assert response.http_error_status_code == 2374 + assert response.id == 205 + assert response.insert_time == 'insert_time_value' + assert response.kind == 'kind_value' + assert response.name == 'name_value' + assert response.operation_group_id == 'operation_group_id_value' + assert response.operation_type == 'operation_type_value' + assert response.progress == 885 + assert response.region == 'region_value' + assert response.self_link == 'self_link_value' + assert response.start_time == 'start_time_value' + assert response.status == compute.Operation.Status.DONE + assert response.status_message == 'status_message_value' + assert response.target_id == 947 + assert response.target_link == 'target_link_value' + assert response.user == 'user_value' + assert response.zone == 'zone_value' + + +def test_patch_rest_required_fields(request_type=compute.PatchRegionNetworkFirewallPolicyRequest): + transport_class = transports.RegionNetworkFirewallPoliciesRestTransport + + request_init = {} + request_init["firewall_policy"] = "" + request_init["project"] = "" + request_init["region"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).patch._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["firewallPolicy"] = 'firewall_policy_value' + jsonified_request["project"] = 'project_value' + jsonified_request["region"] = 'region_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).patch._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "firewallPolicy" in jsonified_request + assert jsonified_request["firewallPolicy"] == 'firewall_policy_value' + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "region" in jsonified_request + assert jsonified_request["region"] == 'region_value' + + client = RegionNetworkFirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "patch", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.patch(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_patch_rest_unset_required_fields(): + transport = transports.RegionNetworkFirewallPoliciesRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.patch._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("firewallPolicy", "firewallPolicyResource", "project", "region", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_patch_rest_interceptors(null_interceptor): + transport = transports.RegionNetworkFirewallPoliciesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.RegionNetworkFirewallPoliciesRestInterceptor(), + ) + client = RegionNetworkFirewallPoliciesClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.RegionNetworkFirewallPoliciesRestInterceptor, "post_patch") as post, \ + mock.patch.object(transports.RegionNetworkFirewallPoliciesRestInterceptor, "pre_patch") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.PatchRegionNetworkFirewallPolicyRequest.pb(compute.PatchRegionNetworkFirewallPolicyRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.PatchRegionNetworkFirewallPolicyRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.patch(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_patch_rest_bad_request(transport: str = 'rest', request_type=compute.PatchRegionNetworkFirewallPolicyRequest): + client = RegionNetworkFirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2', 'firewall_policy': 'sample3'} + request_init["firewall_policy_resource"] = {'associations': [{'attachment_target': 'attachment_target_value', 'display_name': 'display_name_value', 'firewall_policy_id': 'firewall_policy_id_value', 'name': 'name_value', 'short_name': 'short_name_value'}], 'creation_timestamp': 'creation_timestamp_value', 'description': 'description_value', 'display_name': 'display_name_value', 'fingerprint': 'fingerprint_value', 'id': 205, 'kind': 'kind_value', 'name': 'name_value', 'parent': 'parent_value', 'region': 'region_value', 'rule_tuple_count': 1737, 'rules': [{'action': 'action_value', 'description': 'description_value', 'direction': 'direction_value', 'disabled': True, 'enable_logging': True, 'kind': 'kind_value', 'match': {'dest_address_groups': ['dest_address_groups_value1', 'dest_address_groups_value2'], 'dest_fqdns': ['dest_fqdns_value1', 'dest_fqdns_value2'], 'dest_ip_ranges': ['dest_ip_ranges_value1', 'dest_ip_ranges_value2'], 'dest_region_codes': ['dest_region_codes_value1', 'dest_region_codes_value2'], 'dest_threat_intelligences': ['dest_threat_intelligences_value1', 'dest_threat_intelligences_value2'], 'layer4_configs': [{'ip_protocol': 'ip_protocol_value', 'ports': ['ports_value1', 'ports_value2']}], 'src_address_groups': ['src_address_groups_value1', 'src_address_groups_value2'], 'src_fqdns': ['src_fqdns_value1', 'src_fqdns_value2'], 'src_ip_ranges': ['src_ip_ranges_value1', 'src_ip_ranges_value2'], 'src_region_codes': ['src_region_codes_value1', 'src_region_codes_value2'], 'src_secure_tags': [{'name': 'name_value', 'state': 'state_value'}], 'src_threat_intelligences': ['src_threat_intelligences_value1', 'src_threat_intelligences_value2']}, 'priority': 898, 'rule_name': 'rule_name_value', 'rule_tuple_count': 1737, 'target_resources': ['target_resources_value1', 'target_resources_value2'], 'target_secure_tags': {}, 'target_service_accounts': ['target_service_accounts_value1', 'target_service_accounts_value2']}], 'self_link': 'self_link_value', 'self_link_with_id': 'self_link_with_id_value', 'short_name': 'short_name_value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.patch(request) + + +def test_patch_rest_flattened(): + client = RegionNetworkFirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'region': 'sample2', 'firewall_policy': 'sample3'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + region='region_value', + firewall_policy='firewall_policy_value', + firewall_policy_resource=compute.FirewallPolicy(associations=[compute.FirewallPolicyAssociation(attachment_target='attachment_target_value')]), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.patch(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/regions/{region}/firewallPolicies/{firewall_policy}" % client.transport._host, args[1]) + + +def test_patch_rest_flattened_error(transport: str = 'rest'): + client = RegionNetworkFirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.patch( + compute.PatchRegionNetworkFirewallPolicyRequest(), + project='project_value', + region='region_value', + firewall_policy='firewall_policy_value', + firewall_policy_resource=compute.FirewallPolicy(associations=[compute.FirewallPolicyAssociation(attachment_target='attachment_target_value')]), + ) + + +def test_patch_rest_error(): + client = RegionNetworkFirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.PatchRegionNetworkFirewallPolicyRequest, + dict, +]) +def test_patch_unary_rest(request_type): + client = RegionNetworkFirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2', 'firewall_policy': 'sample3'} + request_init["firewall_policy_resource"] = {'associations': [{'attachment_target': 'attachment_target_value', 'display_name': 'display_name_value', 'firewall_policy_id': 'firewall_policy_id_value', 'name': 'name_value', 'short_name': 'short_name_value'}], 'creation_timestamp': 'creation_timestamp_value', 'description': 'description_value', 'display_name': 'display_name_value', 'fingerprint': 'fingerprint_value', 'id': 205, 'kind': 'kind_value', 'name': 'name_value', 'parent': 'parent_value', 'region': 'region_value', 'rule_tuple_count': 1737, 'rules': [{'action': 'action_value', 'description': 'description_value', 'direction': 'direction_value', 'disabled': True, 'enable_logging': True, 'kind': 'kind_value', 'match': {'dest_address_groups': ['dest_address_groups_value1', 'dest_address_groups_value2'], 'dest_fqdns': ['dest_fqdns_value1', 'dest_fqdns_value2'], 'dest_ip_ranges': ['dest_ip_ranges_value1', 'dest_ip_ranges_value2'], 'dest_region_codes': ['dest_region_codes_value1', 'dest_region_codes_value2'], 'dest_threat_intelligences': ['dest_threat_intelligences_value1', 'dest_threat_intelligences_value2'], 'layer4_configs': [{'ip_protocol': 'ip_protocol_value', 'ports': ['ports_value1', 'ports_value2']}], 'src_address_groups': ['src_address_groups_value1', 'src_address_groups_value2'], 'src_fqdns': ['src_fqdns_value1', 'src_fqdns_value2'], 'src_ip_ranges': ['src_ip_ranges_value1', 'src_ip_ranges_value2'], 'src_region_codes': ['src_region_codes_value1', 'src_region_codes_value2'], 'src_secure_tags': [{'name': 'name_value', 'state': 'state_value'}], 'src_threat_intelligences': ['src_threat_intelligences_value1', 'src_threat_intelligences_value2']}, 'priority': 898, 'rule_name': 'rule_name_value', 'rule_tuple_count': 1737, 'target_resources': ['target_resources_value1', 'target_resources_value2'], 'target_secure_tags': {}, 'target_service_accounts': ['target_service_accounts_value1', 'target_service_accounts_value2']}], 'self_link': 'self_link_value', 'self_link_with_id': 'self_link_with_id_value', 'short_name': 'short_name_value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.patch_unary(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.Operation) + + +def test_patch_unary_rest_required_fields(request_type=compute.PatchRegionNetworkFirewallPolicyRequest): + transport_class = transports.RegionNetworkFirewallPoliciesRestTransport + + request_init = {} + request_init["firewall_policy"] = "" + request_init["project"] = "" + request_init["region"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).patch._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["firewallPolicy"] = 'firewall_policy_value' + jsonified_request["project"] = 'project_value' + jsonified_request["region"] = 'region_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).patch._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "firewallPolicy" in jsonified_request + assert jsonified_request["firewallPolicy"] == 'firewall_policy_value' + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "region" in jsonified_request + assert jsonified_request["region"] == 'region_value' + + client = RegionNetworkFirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "patch", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.patch_unary(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_patch_unary_rest_unset_required_fields(): + transport = transports.RegionNetworkFirewallPoliciesRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.patch._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("firewallPolicy", "firewallPolicyResource", "project", "region", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_patch_unary_rest_interceptors(null_interceptor): + transport = transports.RegionNetworkFirewallPoliciesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.RegionNetworkFirewallPoliciesRestInterceptor(), + ) + client = RegionNetworkFirewallPoliciesClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.RegionNetworkFirewallPoliciesRestInterceptor, "post_patch") as post, \ + mock.patch.object(transports.RegionNetworkFirewallPoliciesRestInterceptor, "pre_patch") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.PatchRegionNetworkFirewallPolicyRequest.pb(compute.PatchRegionNetworkFirewallPolicyRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.PatchRegionNetworkFirewallPolicyRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.patch_unary(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_patch_unary_rest_bad_request(transport: str = 'rest', request_type=compute.PatchRegionNetworkFirewallPolicyRequest): + client = RegionNetworkFirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2', 'firewall_policy': 'sample3'} + request_init["firewall_policy_resource"] = {'associations': [{'attachment_target': 'attachment_target_value', 'display_name': 'display_name_value', 'firewall_policy_id': 'firewall_policy_id_value', 'name': 'name_value', 'short_name': 'short_name_value'}], 'creation_timestamp': 'creation_timestamp_value', 'description': 'description_value', 'display_name': 'display_name_value', 'fingerprint': 'fingerprint_value', 'id': 205, 'kind': 'kind_value', 'name': 'name_value', 'parent': 'parent_value', 'region': 'region_value', 'rule_tuple_count': 1737, 'rules': [{'action': 'action_value', 'description': 'description_value', 'direction': 'direction_value', 'disabled': True, 'enable_logging': True, 'kind': 'kind_value', 'match': {'dest_address_groups': ['dest_address_groups_value1', 'dest_address_groups_value2'], 'dest_fqdns': ['dest_fqdns_value1', 'dest_fqdns_value2'], 'dest_ip_ranges': ['dest_ip_ranges_value1', 'dest_ip_ranges_value2'], 'dest_region_codes': ['dest_region_codes_value1', 'dest_region_codes_value2'], 'dest_threat_intelligences': ['dest_threat_intelligences_value1', 'dest_threat_intelligences_value2'], 'layer4_configs': [{'ip_protocol': 'ip_protocol_value', 'ports': ['ports_value1', 'ports_value2']}], 'src_address_groups': ['src_address_groups_value1', 'src_address_groups_value2'], 'src_fqdns': ['src_fqdns_value1', 'src_fqdns_value2'], 'src_ip_ranges': ['src_ip_ranges_value1', 'src_ip_ranges_value2'], 'src_region_codes': ['src_region_codes_value1', 'src_region_codes_value2'], 'src_secure_tags': [{'name': 'name_value', 'state': 'state_value'}], 'src_threat_intelligences': ['src_threat_intelligences_value1', 'src_threat_intelligences_value2']}, 'priority': 898, 'rule_name': 'rule_name_value', 'rule_tuple_count': 1737, 'target_resources': ['target_resources_value1', 'target_resources_value2'], 'target_secure_tags': {}, 'target_service_accounts': ['target_service_accounts_value1', 'target_service_accounts_value2']}], 'self_link': 'self_link_value', 'self_link_with_id': 'self_link_with_id_value', 'short_name': 'short_name_value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.patch_unary(request) + + +def test_patch_unary_rest_flattened(): + client = RegionNetworkFirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'region': 'sample2', 'firewall_policy': 'sample3'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + region='region_value', + firewall_policy='firewall_policy_value', + firewall_policy_resource=compute.FirewallPolicy(associations=[compute.FirewallPolicyAssociation(attachment_target='attachment_target_value')]), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.patch_unary(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/regions/{region}/firewallPolicies/{firewall_policy}" % client.transport._host, args[1]) + + +def test_patch_unary_rest_flattened_error(transport: str = 'rest'): + client = RegionNetworkFirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.patch_unary( + compute.PatchRegionNetworkFirewallPolicyRequest(), + project='project_value', + region='region_value', + firewall_policy='firewall_policy_value', + firewall_policy_resource=compute.FirewallPolicy(associations=[compute.FirewallPolicyAssociation(attachment_target='attachment_target_value')]), + ) + + +def test_patch_unary_rest_error(): + client = RegionNetworkFirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.PatchRuleRegionNetworkFirewallPolicyRequest, + dict, +]) +def test_patch_rule_rest(request_type): + client = RegionNetworkFirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2', 'firewall_policy': 'sample3'} + request_init["firewall_policy_rule_resource"] = {'action': 'action_value', 'description': 'description_value', 'direction': 'direction_value', 'disabled': True, 'enable_logging': True, 'kind': 'kind_value', 'match': {'dest_address_groups': ['dest_address_groups_value1', 'dest_address_groups_value2'], 'dest_fqdns': ['dest_fqdns_value1', 'dest_fqdns_value2'], 'dest_ip_ranges': ['dest_ip_ranges_value1', 'dest_ip_ranges_value2'], 'dest_region_codes': ['dest_region_codes_value1', 'dest_region_codes_value2'], 'dest_threat_intelligences': ['dest_threat_intelligences_value1', 'dest_threat_intelligences_value2'], 'layer4_configs': [{'ip_protocol': 'ip_protocol_value', 'ports': ['ports_value1', 'ports_value2']}], 'src_address_groups': ['src_address_groups_value1', 'src_address_groups_value2'], 'src_fqdns': ['src_fqdns_value1', 'src_fqdns_value2'], 'src_ip_ranges': ['src_ip_ranges_value1', 'src_ip_ranges_value2'], 'src_region_codes': ['src_region_codes_value1', 'src_region_codes_value2'], 'src_secure_tags': [{'name': 'name_value', 'state': 'state_value'}], 'src_threat_intelligences': ['src_threat_intelligences_value1', 'src_threat_intelligences_value2']}, 'priority': 898, 'rule_name': 'rule_name_value', 'rule_tuple_count': 1737, 'target_resources': ['target_resources_value1', 'target_resources_value2'], 'target_secure_tags': {}, 'target_service_accounts': ['target_service_accounts_value1', 'target_service_accounts_value2']} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.patch_rule(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, extended_operation.ExtendedOperation) + assert response.client_operation_id == 'client_operation_id_value' + assert response.creation_timestamp == 'creation_timestamp_value' + assert response.description == 'description_value' + assert response.end_time == 'end_time_value' + assert response.http_error_message == 'http_error_message_value' + assert response.http_error_status_code == 2374 + assert response.id == 205 + assert response.insert_time == 'insert_time_value' + assert response.kind == 'kind_value' + assert response.name == 'name_value' + assert response.operation_group_id == 'operation_group_id_value' + assert response.operation_type == 'operation_type_value' + assert response.progress == 885 + assert response.region == 'region_value' + assert response.self_link == 'self_link_value' + assert response.start_time == 'start_time_value' + assert response.status == compute.Operation.Status.DONE + assert response.status_message == 'status_message_value' + assert response.target_id == 947 + assert response.target_link == 'target_link_value' + assert response.user == 'user_value' + assert response.zone == 'zone_value' + + +def test_patch_rule_rest_required_fields(request_type=compute.PatchRuleRegionNetworkFirewallPolicyRequest): + transport_class = transports.RegionNetworkFirewallPoliciesRestTransport + + request_init = {} + request_init["firewall_policy"] = "" + request_init["project"] = "" + request_init["region"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).patch_rule._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["firewallPolicy"] = 'firewall_policy_value' + jsonified_request["project"] = 'project_value' + jsonified_request["region"] = 'region_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).patch_rule._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("priority", "request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "firewallPolicy" in jsonified_request + assert jsonified_request["firewallPolicy"] == 'firewall_policy_value' + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "region" in jsonified_request + assert jsonified_request["region"] == 'region_value' + + client = RegionNetworkFirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.patch_rule(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_patch_rule_rest_unset_required_fields(): + transport = transports.RegionNetworkFirewallPoliciesRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.patch_rule._get_unset_required_fields({}) + assert set(unset_fields) == (set(("priority", "requestId", )) & set(("firewallPolicy", "firewallPolicyRuleResource", "project", "region", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_patch_rule_rest_interceptors(null_interceptor): + transport = transports.RegionNetworkFirewallPoliciesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.RegionNetworkFirewallPoliciesRestInterceptor(), + ) + client = RegionNetworkFirewallPoliciesClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.RegionNetworkFirewallPoliciesRestInterceptor, "post_patch_rule") as post, \ + mock.patch.object(transports.RegionNetworkFirewallPoliciesRestInterceptor, "pre_patch_rule") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.PatchRuleRegionNetworkFirewallPolicyRequest.pb(compute.PatchRuleRegionNetworkFirewallPolicyRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.PatchRuleRegionNetworkFirewallPolicyRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.patch_rule(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_patch_rule_rest_bad_request(transport: str = 'rest', request_type=compute.PatchRuleRegionNetworkFirewallPolicyRequest): + client = RegionNetworkFirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2', 'firewall_policy': 'sample3'} + request_init["firewall_policy_rule_resource"] = {'action': 'action_value', 'description': 'description_value', 'direction': 'direction_value', 'disabled': True, 'enable_logging': True, 'kind': 'kind_value', 'match': {'dest_address_groups': ['dest_address_groups_value1', 'dest_address_groups_value2'], 'dest_fqdns': ['dest_fqdns_value1', 'dest_fqdns_value2'], 'dest_ip_ranges': ['dest_ip_ranges_value1', 'dest_ip_ranges_value2'], 'dest_region_codes': ['dest_region_codes_value1', 'dest_region_codes_value2'], 'dest_threat_intelligences': ['dest_threat_intelligences_value1', 'dest_threat_intelligences_value2'], 'layer4_configs': [{'ip_protocol': 'ip_protocol_value', 'ports': ['ports_value1', 'ports_value2']}], 'src_address_groups': ['src_address_groups_value1', 'src_address_groups_value2'], 'src_fqdns': ['src_fqdns_value1', 'src_fqdns_value2'], 'src_ip_ranges': ['src_ip_ranges_value1', 'src_ip_ranges_value2'], 'src_region_codes': ['src_region_codes_value1', 'src_region_codes_value2'], 'src_secure_tags': [{'name': 'name_value', 'state': 'state_value'}], 'src_threat_intelligences': ['src_threat_intelligences_value1', 'src_threat_intelligences_value2']}, 'priority': 898, 'rule_name': 'rule_name_value', 'rule_tuple_count': 1737, 'target_resources': ['target_resources_value1', 'target_resources_value2'], 'target_secure_tags': {}, 'target_service_accounts': ['target_service_accounts_value1', 'target_service_accounts_value2']} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.patch_rule(request) + + +def test_patch_rule_rest_flattened(): + client = RegionNetworkFirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'region': 'sample2', 'firewall_policy': 'sample3'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + region='region_value', + firewall_policy='firewall_policy_value', + firewall_policy_rule_resource=compute.FirewallPolicyRule(action='action_value'), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.patch_rule(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/regions/{region}/firewallPolicies/{firewall_policy}/patchRule" % client.transport._host, args[1]) + + +def test_patch_rule_rest_flattened_error(transport: str = 'rest'): + client = RegionNetworkFirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.patch_rule( + compute.PatchRuleRegionNetworkFirewallPolicyRequest(), + project='project_value', + region='region_value', + firewall_policy='firewall_policy_value', + firewall_policy_rule_resource=compute.FirewallPolicyRule(action='action_value'), + ) + + +def test_patch_rule_rest_error(): + client = RegionNetworkFirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.PatchRuleRegionNetworkFirewallPolicyRequest, + dict, +]) +def test_patch_rule_unary_rest(request_type): + client = RegionNetworkFirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2', 'firewall_policy': 'sample3'} + request_init["firewall_policy_rule_resource"] = {'action': 'action_value', 'description': 'description_value', 'direction': 'direction_value', 'disabled': True, 'enable_logging': True, 'kind': 'kind_value', 'match': {'dest_address_groups': ['dest_address_groups_value1', 'dest_address_groups_value2'], 'dest_fqdns': ['dest_fqdns_value1', 'dest_fqdns_value2'], 'dest_ip_ranges': ['dest_ip_ranges_value1', 'dest_ip_ranges_value2'], 'dest_region_codes': ['dest_region_codes_value1', 'dest_region_codes_value2'], 'dest_threat_intelligences': ['dest_threat_intelligences_value1', 'dest_threat_intelligences_value2'], 'layer4_configs': [{'ip_protocol': 'ip_protocol_value', 'ports': ['ports_value1', 'ports_value2']}], 'src_address_groups': ['src_address_groups_value1', 'src_address_groups_value2'], 'src_fqdns': ['src_fqdns_value1', 'src_fqdns_value2'], 'src_ip_ranges': ['src_ip_ranges_value1', 'src_ip_ranges_value2'], 'src_region_codes': ['src_region_codes_value1', 'src_region_codes_value2'], 'src_secure_tags': [{'name': 'name_value', 'state': 'state_value'}], 'src_threat_intelligences': ['src_threat_intelligences_value1', 'src_threat_intelligences_value2']}, 'priority': 898, 'rule_name': 'rule_name_value', 'rule_tuple_count': 1737, 'target_resources': ['target_resources_value1', 'target_resources_value2'], 'target_secure_tags': {}, 'target_service_accounts': ['target_service_accounts_value1', 'target_service_accounts_value2']} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.patch_rule_unary(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.Operation) + + +def test_patch_rule_unary_rest_required_fields(request_type=compute.PatchRuleRegionNetworkFirewallPolicyRequest): + transport_class = transports.RegionNetworkFirewallPoliciesRestTransport + + request_init = {} + request_init["firewall_policy"] = "" + request_init["project"] = "" + request_init["region"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).patch_rule._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["firewallPolicy"] = 'firewall_policy_value' + jsonified_request["project"] = 'project_value' + jsonified_request["region"] = 'region_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).patch_rule._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("priority", "request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "firewallPolicy" in jsonified_request + assert jsonified_request["firewallPolicy"] == 'firewall_policy_value' + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "region" in jsonified_request + assert jsonified_request["region"] == 'region_value' + + client = RegionNetworkFirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.patch_rule_unary(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_patch_rule_unary_rest_unset_required_fields(): + transport = transports.RegionNetworkFirewallPoliciesRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.patch_rule._get_unset_required_fields({}) + assert set(unset_fields) == (set(("priority", "requestId", )) & set(("firewallPolicy", "firewallPolicyRuleResource", "project", "region", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_patch_rule_unary_rest_interceptors(null_interceptor): + transport = transports.RegionNetworkFirewallPoliciesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.RegionNetworkFirewallPoliciesRestInterceptor(), + ) + client = RegionNetworkFirewallPoliciesClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.RegionNetworkFirewallPoliciesRestInterceptor, "post_patch_rule") as post, \ + mock.patch.object(transports.RegionNetworkFirewallPoliciesRestInterceptor, "pre_patch_rule") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.PatchRuleRegionNetworkFirewallPolicyRequest.pb(compute.PatchRuleRegionNetworkFirewallPolicyRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.PatchRuleRegionNetworkFirewallPolicyRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.patch_rule_unary(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_patch_rule_unary_rest_bad_request(transport: str = 'rest', request_type=compute.PatchRuleRegionNetworkFirewallPolicyRequest): + client = RegionNetworkFirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2', 'firewall_policy': 'sample3'} + request_init["firewall_policy_rule_resource"] = {'action': 'action_value', 'description': 'description_value', 'direction': 'direction_value', 'disabled': True, 'enable_logging': True, 'kind': 'kind_value', 'match': {'dest_address_groups': ['dest_address_groups_value1', 'dest_address_groups_value2'], 'dest_fqdns': ['dest_fqdns_value1', 'dest_fqdns_value2'], 'dest_ip_ranges': ['dest_ip_ranges_value1', 'dest_ip_ranges_value2'], 'dest_region_codes': ['dest_region_codes_value1', 'dest_region_codes_value2'], 'dest_threat_intelligences': ['dest_threat_intelligences_value1', 'dest_threat_intelligences_value2'], 'layer4_configs': [{'ip_protocol': 'ip_protocol_value', 'ports': ['ports_value1', 'ports_value2']}], 'src_address_groups': ['src_address_groups_value1', 'src_address_groups_value2'], 'src_fqdns': ['src_fqdns_value1', 'src_fqdns_value2'], 'src_ip_ranges': ['src_ip_ranges_value1', 'src_ip_ranges_value2'], 'src_region_codes': ['src_region_codes_value1', 'src_region_codes_value2'], 'src_secure_tags': [{'name': 'name_value', 'state': 'state_value'}], 'src_threat_intelligences': ['src_threat_intelligences_value1', 'src_threat_intelligences_value2']}, 'priority': 898, 'rule_name': 'rule_name_value', 'rule_tuple_count': 1737, 'target_resources': ['target_resources_value1', 'target_resources_value2'], 'target_secure_tags': {}, 'target_service_accounts': ['target_service_accounts_value1', 'target_service_accounts_value2']} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.patch_rule_unary(request) + + +def test_patch_rule_unary_rest_flattened(): + client = RegionNetworkFirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'region': 'sample2', 'firewall_policy': 'sample3'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + region='region_value', + firewall_policy='firewall_policy_value', + firewall_policy_rule_resource=compute.FirewallPolicyRule(action='action_value'), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.patch_rule_unary(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/regions/{region}/firewallPolicies/{firewall_policy}/patchRule" % client.transport._host, args[1]) + + +def test_patch_rule_unary_rest_flattened_error(transport: str = 'rest'): + client = RegionNetworkFirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.patch_rule_unary( + compute.PatchRuleRegionNetworkFirewallPolicyRequest(), + project='project_value', + region='region_value', + firewall_policy='firewall_policy_value', + firewall_policy_rule_resource=compute.FirewallPolicyRule(action='action_value'), + ) + + +def test_patch_rule_unary_rest_error(): + client = RegionNetworkFirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.RemoveAssociationRegionNetworkFirewallPolicyRequest, + dict, +]) +def test_remove_association_rest(request_type): + client = RegionNetworkFirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2', 'firewall_policy': 'sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.remove_association(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, extended_operation.ExtendedOperation) + assert response.client_operation_id == 'client_operation_id_value' + assert response.creation_timestamp == 'creation_timestamp_value' + assert response.description == 'description_value' + assert response.end_time == 'end_time_value' + assert response.http_error_message == 'http_error_message_value' + assert response.http_error_status_code == 2374 + assert response.id == 205 + assert response.insert_time == 'insert_time_value' + assert response.kind == 'kind_value' + assert response.name == 'name_value' + assert response.operation_group_id == 'operation_group_id_value' + assert response.operation_type == 'operation_type_value' + assert response.progress == 885 + assert response.region == 'region_value' + assert response.self_link == 'self_link_value' + assert response.start_time == 'start_time_value' + assert response.status == compute.Operation.Status.DONE + assert response.status_message == 'status_message_value' + assert response.target_id == 947 + assert response.target_link == 'target_link_value' + assert response.user == 'user_value' + assert response.zone == 'zone_value' + + +def test_remove_association_rest_required_fields(request_type=compute.RemoveAssociationRegionNetworkFirewallPolicyRequest): + transport_class = transports.RegionNetworkFirewallPoliciesRestTransport + + request_init = {} + request_init["firewall_policy"] = "" + request_init["project"] = "" + request_init["region"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).remove_association._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["firewallPolicy"] = 'firewall_policy_value' + jsonified_request["project"] = 'project_value' + jsonified_request["region"] = 'region_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).remove_association._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("name", "request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "firewallPolicy" in jsonified_request + assert jsonified_request["firewallPolicy"] == 'firewall_policy_value' + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "region" in jsonified_request + assert jsonified_request["region"] == 'region_value' + + client = RegionNetworkFirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.remove_association(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_remove_association_rest_unset_required_fields(): + transport = transports.RegionNetworkFirewallPoliciesRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.remove_association._get_unset_required_fields({}) + assert set(unset_fields) == (set(("name", "requestId", )) & set(("firewallPolicy", "project", "region", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_remove_association_rest_interceptors(null_interceptor): + transport = transports.RegionNetworkFirewallPoliciesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.RegionNetworkFirewallPoliciesRestInterceptor(), + ) + client = RegionNetworkFirewallPoliciesClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.RegionNetworkFirewallPoliciesRestInterceptor, "post_remove_association") as post, \ + mock.patch.object(transports.RegionNetworkFirewallPoliciesRestInterceptor, "pre_remove_association") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.RemoveAssociationRegionNetworkFirewallPolicyRequest.pb(compute.RemoveAssociationRegionNetworkFirewallPolicyRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.RemoveAssociationRegionNetworkFirewallPolicyRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.remove_association(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_remove_association_rest_bad_request(transport: str = 'rest', request_type=compute.RemoveAssociationRegionNetworkFirewallPolicyRequest): + client = RegionNetworkFirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2', 'firewall_policy': 'sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.remove_association(request) + + +def test_remove_association_rest_flattened(): + client = RegionNetworkFirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'region': 'sample2', 'firewall_policy': 'sample3'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + region='region_value', + firewall_policy='firewall_policy_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.remove_association(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/regions/{region}/firewallPolicies/{firewall_policy}/removeAssociation" % client.transport._host, args[1]) + + +def test_remove_association_rest_flattened_error(transport: str = 'rest'): + client = RegionNetworkFirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.remove_association( + compute.RemoveAssociationRegionNetworkFirewallPolicyRequest(), + project='project_value', + region='region_value', + firewall_policy='firewall_policy_value', + ) + + +def test_remove_association_rest_error(): + client = RegionNetworkFirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.RemoveAssociationRegionNetworkFirewallPolicyRequest, + dict, +]) +def test_remove_association_unary_rest(request_type): + client = RegionNetworkFirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2', 'firewall_policy': 'sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.remove_association_unary(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.Operation) + + +def test_remove_association_unary_rest_required_fields(request_type=compute.RemoveAssociationRegionNetworkFirewallPolicyRequest): + transport_class = transports.RegionNetworkFirewallPoliciesRestTransport + + request_init = {} + request_init["firewall_policy"] = "" + request_init["project"] = "" + request_init["region"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).remove_association._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["firewallPolicy"] = 'firewall_policy_value' + jsonified_request["project"] = 'project_value' + jsonified_request["region"] = 'region_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).remove_association._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("name", "request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "firewallPolicy" in jsonified_request + assert jsonified_request["firewallPolicy"] == 'firewall_policy_value' + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "region" in jsonified_request + assert jsonified_request["region"] == 'region_value' + + client = RegionNetworkFirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.remove_association_unary(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_remove_association_unary_rest_unset_required_fields(): + transport = transports.RegionNetworkFirewallPoliciesRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.remove_association._get_unset_required_fields({}) + assert set(unset_fields) == (set(("name", "requestId", )) & set(("firewallPolicy", "project", "region", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_remove_association_unary_rest_interceptors(null_interceptor): + transport = transports.RegionNetworkFirewallPoliciesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.RegionNetworkFirewallPoliciesRestInterceptor(), + ) + client = RegionNetworkFirewallPoliciesClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.RegionNetworkFirewallPoliciesRestInterceptor, "post_remove_association") as post, \ + mock.patch.object(transports.RegionNetworkFirewallPoliciesRestInterceptor, "pre_remove_association") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.RemoveAssociationRegionNetworkFirewallPolicyRequest.pb(compute.RemoveAssociationRegionNetworkFirewallPolicyRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.RemoveAssociationRegionNetworkFirewallPolicyRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.remove_association_unary(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_remove_association_unary_rest_bad_request(transport: str = 'rest', request_type=compute.RemoveAssociationRegionNetworkFirewallPolicyRequest): + client = RegionNetworkFirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2', 'firewall_policy': 'sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.remove_association_unary(request) + + +def test_remove_association_unary_rest_flattened(): + client = RegionNetworkFirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'region': 'sample2', 'firewall_policy': 'sample3'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + region='region_value', + firewall_policy='firewall_policy_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.remove_association_unary(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/regions/{region}/firewallPolicies/{firewall_policy}/removeAssociation" % client.transport._host, args[1]) + + +def test_remove_association_unary_rest_flattened_error(transport: str = 'rest'): + client = RegionNetworkFirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.remove_association_unary( + compute.RemoveAssociationRegionNetworkFirewallPolicyRequest(), + project='project_value', + region='region_value', + firewall_policy='firewall_policy_value', + ) + + +def test_remove_association_unary_rest_error(): + client = RegionNetworkFirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.RemoveRuleRegionNetworkFirewallPolicyRequest, + dict, +]) +def test_remove_rule_rest(request_type): + client = RegionNetworkFirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2', 'firewall_policy': 'sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.remove_rule(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, extended_operation.ExtendedOperation) + assert response.client_operation_id == 'client_operation_id_value' + assert response.creation_timestamp == 'creation_timestamp_value' + assert response.description == 'description_value' + assert response.end_time == 'end_time_value' + assert response.http_error_message == 'http_error_message_value' + assert response.http_error_status_code == 2374 + assert response.id == 205 + assert response.insert_time == 'insert_time_value' + assert response.kind == 'kind_value' + assert response.name == 'name_value' + assert response.operation_group_id == 'operation_group_id_value' + assert response.operation_type == 'operation_type_value' + assert response.progress == 885 + assert response.region == 'region_value' + assert response.self_link == 'self_link_value' + assert response.start_time == 'start_time_value' + assert response.status == compute.Operation.Status.DONE + assert response.status_message == 'status_message_value' + assert response.target_id == 947 + assert response.target_link == 'target_link_value' + assert response.user == 'user_value' + assert response.zone == 'zone_value' + + +def test_remove_rule_rest_required_fields(request_type=compute.RemoveRuleRegionNetworkFirewallPolicyRequest): + transport_class = transports.RegionNetworkFirewallPoliciesRestTransport + + request_init = {} + request_init["firewall_policy"] = "" + request_init["project"] = "" + request_init["region"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).remove_rule._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["firewallPolicy"] = 'firewall_policy_value' + jsonified_request["project"] = 'project_value' + jsonified_request["region"] = 'region_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).remove_rule._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("priority", "request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "firewallPolicy" in jsonified_request + assert jsonified_request["firewallPolicy"] == 'firewall_policy_value' + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "region" in jsonified_request + assert jsonified_request["region"] == 'region_value' + + client = RegionNetworkFirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.remove_rule(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_remove_rule_rest_unset_required_fields(): + transport = transports.RegionNetworkFirewallPoliciesRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.remove_rule._get_unset_required_fields({}) + assert set(unset_fields) == (set(("priority", "requestId", )) & set(("firewallPolicy", "project", "region", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_remove_rule_rest_interceptors(null_interceptor): + transport = transports.RegionNetworkFirewallPoliciesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.RegionNetworkFirewallPoliciesRestInterceptor(), + ) + client = RegionNetworkFirewallPoliciesClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.RegionNetworkFirewallPoliciesRestInterceptor, "post_remove_rule") as post, \ + mock.patch.object(transports.RegionNetworkFirewallPoliciesRestInterceptor, "pre_remove_rule") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.RemoveRuleRegionNetworkFirewallPolicyRequest.pb(compute.RemoveRuleRegionNetworkFirewallPolicyRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.RemoveRuleRegionNetworkFirewallPolicyRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.remove_rule(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_remove_rule_rest_bad_request(transport: str = 'rest', request_type=compute.RemoveRuleRegionNetworkFirewallPolicyRequest): + client = RegionNetworkFirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2', 'firewall_policy': 'sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.remove_rule(request) + + +def test_remove_rule_rest_flattened(): + client = RegionNetworkFirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'region': 'sample2', 'firewall_policy': 'sample3'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + region='region_value', + firewall_policy='firewall_policy_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.remove_rule(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/regions/{region}/firewallPolicies/{firewall_policy}/removeRule" % client.transport._host, args[1]) + + +def test_remove_rule_rest_flattened_error(transport: str = 'rest'): + client = RegionNetworkFirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.remove_rule( + compute.RemoveRuleRegionNetworkFirewallPolicyRequest(), + project='project_value', + region='region_value', + firewall_policy='firewall_policy_value', + ) + + +def test_remove_rule_rest_error(): + client = RegionNetworkFirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.RemoveRuleRegionNetworkFirewallPolicyRequest, + dict, +]) +def test_remove_rule_unary_rest(request_type): + client = RegionNetworkFirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2', 'firewall_policy': 'sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.remove_rule_unary(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.Operation) + + +def test_remove_rule_unary_rest_required_fields(request_type=compute.RemoveRuleRegionNetworkFirewallPolicyRequest): + transport_class = transports.RegionNetworkFirewallPoliciesRestTransport + + request_init = {} + request_init["firewall_policy"] = "" + request_init["project"] = "" + request_init["region"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).remove_rule._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["firewallPolicy"] = 'firewall_policy_value' + jsonified_request["project"] = 'project_value' + jsonified_request["region"] = 'region_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).remove_rule._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("priority", "request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "firewallPolicy" in jsonified_request + assert jsonified_request["firewallPolicy"] == 'firewall_policy_value' + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "region" in jsonified_request + assert jsonified_request["region"] == 'region_value' + + client = RegionNetworkFirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.remove_rule_unary(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_remove_rule_unary_rest_unset_required_fields(): + transport = transports.RegionNetworkFirewallPoliciesRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.remove_rule._get_unset_required_fields({}) + assert set(unset_fields) == (set(("priority", "requestId", )) & set(("firewallPolicy", "project", "region", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_remove_rule_unary_rest_interceptors(null_interceptor): + transport = transports.RegionNetworkFirewallPoliciesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.RegionNetworkFirewallPoliciesRestInterceptor(), + ) + client = RegionNetworkFirewallPoliciesClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.RegionNetworkFirewallPoliciesRestInterceptor, "post_remove_rule") as post, \ + mock.patch.object(transports.RegionNetworkFirewallPoliciesRestInterceptor, "pre_remove_rule") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.RemoveRuleRegionNetworkFirewallPolicyRequest.pb(compute.RemoveRuleRegionNetworkFirewallPolicyRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.RemoveRuleRegionNetworkFirewallPolicyRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.remove_rule_unary(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_remove_rule_unary_rest_bad_request(transport: str = 'rest', request_type=compute.RemoveRuleRegionNetworkFirewallPolicyRequest): + client = RegionNetworkFirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2', 'firewall_policy': 'sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.remove_rule_unary(request) + + +def test_remove_rule_unary_rest_flattened(): + client = RegionNetworkFirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'region': 'sample2', 'firewall_policy': 'sample3'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + region='region_value', + firewall_policy='firewall_policy_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.remove_rule_unary(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/regions/{region}/firewallPolicies/{firewall_policy}/removeRule" % client.transport._host, args[1]) + + +def test_remove_rule_unary_rest_flattened_error(transport: str = 'rest'): + client = RegionNetworkFirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.remove_rule_unary( + compute.RemoveRuleRegionNetworkFirewallPolicyRequest(), + project='project_value', + region='region_value', + firewall_policy='firewall_policy_value', + ) + + +def test_remove_rule_unary_rest_error(): + client = RegionNetworkFirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.SetIamPolicyRegionNetworkFirewallPolicyRequest, + dict, +]) +def test_set_iam_policy_rest(request_type): + client = RegionNetworkFirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2', 'resource': 'sample3'} + request_init["region_set_policy_request_resource"] = {'bindings': [{'binding_id': 'binding_id_value', 'condition': {'description': 'description_value', 'expression': 'expression_value', 'location': 'location_value', 'title': 'title_value'}, 'members': ['members_value1', 'members_value2'], 'role': 'role_value'}], 'etag': 'etag_value', 'policy': {'audit_configs': [{'audit_log_configs': [{'exempted_members': ['exempted_members_value1', 'exempted_members_value2'], 'ignore_child_exemptions': True, 'log_type': 'log_type_value'}], 'exempted_members': ['exempted_members_value1', 'exempted_members_value2'], 'service': 'service_value'}], 'bindings': {}, 'etag': 'etag_value', 'iam_owned': True, 'rules': [{'action': 'action_value', 'conditions': [{'iam': 'iam_value', 'op': 'op_value', 'svc': 'svc_value', 'sys': 'sys_value', 'values': ['values_value1', 'values_value2']}], 'description': 'description_value', 'ins': ['ins_value1', 'ins_value2'], 'log_configs': [{'cloud_audit': {'authorization_logging_options': {'permission_type': 'permission_type_value'}, 'log_name': 'log_name_value'}, 'counter': {'custom_fields': [{'name': 'name_value', 'value': 'value_value'}], 'field': 'field_value', 'metric': 'metric_value'}, 'data_access': {'log_mode': 'log_mode_value'}}], 'not_ins': ['not_ins_value1', 'not_ins_value2'], 'permissions': ['permissions_value1', 'permissions_value2']}], 'version': 774}} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Policy( + etag='etag_value', + iam_owned=True, + version=774, + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Policy.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.set_iam_policy(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.Policy) + assert response.etag == 'etag_value' + assert response.iam_owned is True + assert response.version == 774 + + +def test_set_iam_policy_rest_required_fields(request_type=compute.SetIamPolicyRegionNetworkFirewallPolicyRequest): + transport_class = transports.RegionNetworkFirewallPoliciesRestTransport + + request_init = {} + request_init["project"] = "" + request_init["region"] = "" + request_init["resource"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).set_iam_policy._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + jsonified_request["region"] = 'region_value' + jsonified_request["resource"] = 'resource_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).set_iam_policy._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "region" in jsonified_request + assert jsonified_request["region"] == 'region_value' + assert "resource" in jsonified_request + assert jsonified_request["resource"] == 'resource_value' + + client = RegionNetworkFirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Policy() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Policy.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.set_iam_policy(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_set_iam_policy_rest_unset_required_fields(): + transport = transports.RegionNetworkFirewallPoliciesRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.set_iam_policy._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("project", "region", "regionSetPolicyRequestResource", "resource", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_set_iam_policy_rest_interceptors(null_interceptor): + transport = transports.RegionNetworkFirewallPoliciesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.RegionNetworkFirewallPoliciesRestInterceptor(), + ) + client = RegionNetworkFirewallPoliciesClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.RegionNetworkFirewallPoliciesRestInterceptor, "post_set_iam_policy") as post, \ + mock.patch.object(transports.RegionNetworkFirewallPoliciesRestInterceptor, "pre_set_iam_policy") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.SetIamPolicyRegionNetworkFirewallPolicyRequest.pb(compute.SetIamPolicyRegionNetworkFirewallPolicyRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Policy.to_json(compute.Policy()) + + request = compute.SetIamPolicyRegionNetworkFirewallPolicyRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Policy() + + client.set_iam_policy(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_set_iam_policy_rest_bad_request(transport: str = 'rest', request_type=compute.SetIamPolicyRegionNetworkFirewallPolicyRequest): + client = RegionNetworkFirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2', 'resource': 'sample3'} + request_init["region_set_policy_request_resource"] = {'bindings': [{'binding_id': 'binding_id_value', 'condition': {'description': 'description_value', 'expression': 'expression_value', 'location': 'location_value', 'title': 'title_value'}, 'members': ['members_value1', 'members_value2'], 'role': 'role_value'}], 'etag': 'etag_value', 'policy': {'audit_configs': [{'audit_log_configs': [{'exempted_members': ['exempted_members_value1', 'exempted_members_value2'], 'ignore_child_exemptions': True, 'log_type': 'log_type_value'}], 'exempted_members': ['exempted_members_value1', 'exempted_members_value2'], 'service': 'service_value'}], 'bindings': {}, 'etag': 'etag_value', 'iam_owned': True, 'rules': [{'action': 'action_value', 'conditions': [{'iam': 'iam_value', 'op': 'op_value', 'svc': 'svc_value', 'sys': 'sys_value', 'values': ['values_value1', 'values_value2']}], 'description': 'description_value', 'ins': ['ins_value1', 'ins_value2'], 'log_configs': [{'cloud_audit': {'authorization_logging_options': {'permission_type': 'permission_type_value'}, 'log_name': 'log_name_value'}, 'counter': {'custom_fields': [{'name': 'name_value', 'value': 'value_value'}], 'field': 'field_value', 'metric': 'metric_value'}, 'data_access': {'log_mode': 'log_mode_value'}}], 'not_ins': ['not_ins_value1', 'not_ins_value2'], 'permissions': ['permissions_value1', 'permissions_value2']}], 'version': 774}} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.set_iam_policy(request) + + +def test_set_iam_policy_rest_flattened(): + client = RegionNetworkFirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Policy() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'region': 'sample2', 'resource': 'sample3'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + region='region_value', + resource='resource_value', + region_set_policy_request_resource=compute.RegionSetPolicyRequest(bindings=[compute.Binding(binding_id='binding_id_value')]), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Policy.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.set_iam_policy(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/regions/{region}/firewallPolicies/{resource}/setIamPolicy" % client.transport._host, args[1]) + + +def test_set_iam_policy_rest_flattened_error(transport: str = 'rest'): + client = RegionNetworkFirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.set_iam_policy( + compute.SetIamPolicyRegionNetworkFirewallPolicyRequest(), + project='project_value', + region='region_value', + resource='resource_value', + region_set_policy_request_resource=compute.RegionSetPolicyRequest(bindings=[compute.Binding(binding_id='binding_id_value')]), + ) + + +def test_set_iam_policy_rest_error(): + client = RegionNetworkFirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.TestIamPermissionsRegionNetworkFirewallPolicyRequest, + dict, +]) +def test_test_iam_permissions_rest(request_type): + client = RegionNetworkFirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2', 'resource': 'sample3'} + request_init["test_permissions_request_resource"] = {'permissions': ['permissions_value1', 'permissions_value2']} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.TestPermissionsResponse( + permissions=['permissions_value'], + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.TestPermissionsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.test_iam_permissions(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.TestPermissionsResponse) + assert response.permissions == ['permissions_value'] + + +def test_test_iam_permissions_rest_required_fields(request_type=compute.TestIamPermissionsRegionNetworkFirewallPolicyRequest): + transport_class = transports.RegionNetworkFirewallPoliciesRestTransport + + request_init = {} + request_init["project"] = "" + request_init["region"] = "" + request_init["resource"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).test_iam_permissions._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + jsonified_request["region"] = 'region_value' + jsonified_request["resource"] = 'resource_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).test_iam_permissions._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "region" in jsonified_request + assert jsonified_request["region"] == 'region_value' + assert "resource" in jsonified_request + assert jsonified_request["resource"] == 'resource_value' + + client = RegionNetworkFirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.TestPermissionsResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.TestPermissionsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.test_iam_permissions(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_test_iam_permissions_rest_unset_required_fields(): + transport = transports.RegionNetworkFirewallPoliciesRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.test_iam_permissions._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("project", "region", "resource", "testPermissionsRequestResource", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_test_iam_permissions_rest_interceptors(null_interceptor): + transport = transports.RegionNetworkFirewallPoliciesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.RegionNetworkFirewallPoliciesRestInterceptor(), + ) + client = RegionNetworkFirewallPoliciesClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.RegionNetworkFirewallPoliciesRestInterceptor, "post_test_iam_permissions") as post, \ + mock.patch.object(transports.RegionNetworkFirewallPoliciesRestInterceptor, "pre_test_iam_permissions") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.TestIamPermissionsRegionNetworkFirewallPolicyRequest.pb(compute.TestIamPermissionsRegionNetworkFirewallPolicyRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.TestPermissionsResponse.to_json(compute.TestPermissionsResponse()) + + request = compute.TestIamPermissionsRegionNetworkFirewallPolicyRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.TestPermissionsResponse() + + client.test_iam_permissions(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_test_iam_permissions_rest_bad_request(transport: str = 'rest', request_type=compute.TestIamPermissionsRegionNetworkFirewallPolicyRequest): + client = RegionNetworkFirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2', 'resource': 'sample3'} + request_init["test_permissions_request_resource"] = {'permissions': ['permissions_value1', 'permissions_value2']} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.test_iam_permissions(request) + + +def test_test_iam_permissions_rest_flattened(): + client = RegionNetworkFirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.TestPermissionsResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'region': 'sample2', 'resource': 'sample3'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + region='region_value', + resource='resource_value', + test_permissions_request_resource=compute.TestPermissionsRequest(permissions=['permissions_value']), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.TestPermissionsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.test_iam_permissions(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/regions/{region}/firewallPolicies/{resource}/testIamPermissions" % client.transport._host, args[1]) + + +def test_test_iam_permissions_rest_flattened_error(transport: str = 'rest'): + client = RegionNetworkFirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.test_iam_permissions( + compute.TestIamPermissionsRegionNetworkFirewallPolicyRequest(), + project='project_value', + region='region_value', + resource='resource_value', + test_permissions_request_resource=compute.TestPermissionsRequest(permissions=['permissions_value']), + ) + + +def test_test_iam_permissions_rest_error(): + client = RegionNetworkFirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +def test_credentials_transport_error(): + # It is an error to provide credentials and a transport instance. + transport = transports.RegionNetworkFirewallPoliciesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = RegionNetworkFirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # It is an error to provide a credentials file and a transport instance. + transport = transports.RegionNetworkFirewallPoliciesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = RegionNetworkFirewallPoliciesClient( + client_options={"credentials_file": "credentials.json"}, + transport=transport, + ) + + # It is an error to provide an api_key and a transport instance. + transport = transports.RegionNetworkFirewallPoliciesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = RegionNetworkFirewallPoliciesClient( + client_options=options, + transport=transport, + ) + + # It is an error to provide an api_key and a credential. + options = mock.Mock() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = RegionNetworkFirewallPoliciesClient( + client_options=options, + credentials=ga_credentials.AnonymousCredentials() + ) + + # It is an error to provide scopes and a transport instance. + transport = transports.RegionNetworkFirewallPoliciesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = RegionNetworkFirewallPoliciesClient( + client_options={"scopes": ["1", "2"]}, + transport=transport, + ) + + +def test_transport_instance(): + # A client may be instantiated with a custom transport instance. + transport = transports.RegionNetworkFirewallPoliciesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + client = RegionNetworkFirewallPoliciesClient(transport=transport) + assert client.transport is transport + + +@pytest.mark.parametrize("transport_class", [ + transports.RegionNetworkFirewallPoliciesRestTransport, +]) +def test_transport_adc(transport_class): + # Test default credentials are used if not provided. + with mock.patch.object(google.auth, 'default') as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class() + adc.assert_called_once() + +@pytest.mark.parametrize("transport_name", [ + "rest", +]) +def test_transport_kind(transport_name): + transport = RegionNetworkFirewallPoliciesClient.get_transport_class(transport_name)( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert transport.kind == transport_name + + +def test_region_network_firewall_policies_base_transport_error(): + # Passing both a credentials object and credentials_file should raise an error + with pytest.raises(core_exceptions.DuplicateCredentialArgs): + transport = transports.RegionNetworkFirewallPoliciesTransport( + credentials=ga_credentials.AnonymousCredentials(), + credentials_file="credentials.json" + ) + + +def test_region_network_firewall_policies_base_transport(): + # Instantiate the base transport. + with mock.patch('google.cloud.compute_v1.services.region_network_firewall_policies.transports.RegionNetworkFirewallPoliciesTransport.__init__') as Transport: + Transport.return_value = None + transport = transports.RegionNetworkFirewallPoliciesTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Every method on the transport should just blindly + # raise NotImplementedError. + methods = ( + 'add_association', + 'add_rule', + 'clone_rules', + 'delete', + 'get', + 'get_association', + 'get_effective_firewalls', + 'get_iam_policy', + 'get_rule', + 'insert', + 'list', + 'patch', + 'patch_rule', + 'remove_association', + 'remove_rule', + 'set_iam_policy', + 'test_iam_permissions', + ) + for method in methods: + with pytest.raises(NotImplementedError): + getattr(transport, method)(request=object()) + + with pytest.raises(NotImplementedError): + transport.close() + + # Catch all for all remaining methods and properties + remainder = [ + 'kind', + ] + for r in remainder: + with pytest.raises(NotImplementedError): + getattr(transport, r)() + + +def test_region_network_firewall_policies_base_transport_with_credentials_file(): + # Instantiate the base transport with a credentials file + with mock.patch.object(google.auth, 'load_credentials_from_file', autospec=True) as load_creds, mock.patch('google.cloud.compute_v1.services.region_network_firewall_policies.transports.RegionNetworkFirewallPoliciesTransport._prep_wrapped_messages') as Transport: + Transport.return_value = None + load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.RegionNetworkFirewallPoliciesTransport( + credentials_file="credentials.json", + quota_project_id="octopus", + ) + load_creds.assert_called_once_with("credentials.json", + scopes=None, + default_scopes=( + 'https://www.googleapis.com/auth/compute', + 'https://www.googleapis.com/auth/cloud-platform', +), + quota_project_id="octopus", + ) + + +def test_region_network_firewall_policies_base_transport_with_adc(): + # Test the default credentials are used if credentials and credentials_file are None. + with mock.patch.object(google.auth, 'default', autospec=True) as adc, mock.patch('google.cloud.compute_v1.services.region_network_firewall_policies.transports.RegionNetworkFirewallPoliciesTransport._prep_wrapped_messages') as Transport: + Transport.return_value = None + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.RegionNetworkFirewallPoliciesTransport() + adc.assert_called_once() + + +def test_region_network_firewall_policies_auth_adc(): + # If no credentials are provided, we should use ADC credentials. + with mock.patch.object(google.auth, 'default', autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + RegionNetworkFirewallPoliciesClient() + adc.assert_called_once_with( + scopes=None, + default_scopes=( + 'https://www.googleapis.com/auth/compute', + 'https://www.googleapis.com/auth/cloud-platform', +), + quota_project_id=None, + ) + + +def test_region_network_firewall_policies_http_transport_client_cert_source_for_mtls(): + cred = ga_credentials.AnonymousCredentials() + with mock.patch("google.auth.transport.requests.AuthorizedSession.configure_mtls_channel") as mock_configure_mtls_channel: + transports.RegionNetworkFirewallPoliciesRestTransport ( + credentials=cred, + client_cert_source_for_mtls=client_cert_source_callback + ) + mock_configure_mtls_channel.assert_called_once_with(client_cert_source_callback) + + +@pytest.mark.parametrize("transport_name", [ + "rest", +]) +def test_region_network_firewall_policies_host_no_port(transport_name): + client = RegionNetworkFirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions(api_endpoint='compute.googleapis.com'), + transport=transport_name, + ) + assert client.transport._host == ( + 'compute.googleapis.com:443' + if transport_name in ['grpc', 'grpc_asyncio'] + else 'https://compute.googleapis.com' + ) + +@pytest.mark.parametrize("transport_name", [ + "rest", +]) +def test_region_network_firewall_policies_host_with_port(transport_name): + client = RegionNetworkFirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions(api_endpoint='compute.googleapis.com:8000'), + transport=transport_name, + ) + assert client.transport._host == ( + 'compute.googleapis.com:8000' + if transport_name in ['grpc', 'grpc_asyncio'] + else 'https://compute.googleapis.com:8000' + ) + +@pytest.mark.parametrize("transport_name", [ + "rest", +]) +def test_region_network_firewall_policies_client_transport_session_collision(transport_name): + creds1 = ga_credentials.AnonymousCredentials() + creds2 = ga_credentials.AnonymousCredentials() + client1 = RegionNetworkFirewallPoliciesClient( + credentials=creds1, + transport=transport_name, + ) + client2 = RegionNetworkFirewallPoliciesClient( + credentials=creds2, + transport=transport_name, + ) + session1 = client1.transport.add_association._session + session2 = client2.transport.add_association._session + assert session1 != session2 + session1 = client1.transport.add_rule._session + session2 = client2.transport.add_rule._session + assert session1 != session2 + session1 = client1.transport.clone_rules._session + session2 = client2.transport.clone_rules._session + assert session1 != session2 + session1 = client1.transport.delete._session + session2 = client2.transport.delete._session + assert session1 != session2 + session1 = client1.transport.get._session + session2 = client2.transport.get._session + assert session1 != session2 + session1 = client1.transport.get_association._session + session2 = client2.transport.get_association._session + assert session1 != session2 + session1 = client1.transport.get_effective_firewalls._session + session2 = client2.transport.get_effective_firewalls._session + assert session1 != session2 + session1 = client1.transport.get_iam_policy._session + session2 = client2.transport.get_iam_policy._session + assert session1 != session2 + session1 = client1.transport.get_rule._session + session2 = client2.transport.get_rule._session + assert session1 != session2 + session1 = client1.transport.insert._session + session2 = client2.transport.insert._session + assert session1 != session2 + session1 = client1.transport.list._session + session2 = client2.transport.list._session + assert session1 != session2 + session1 = client1.transport.patch._session + session2 = client2.transport.patch._session + assert session1 != session2 + session1 = client1.transport.patch_rule._session + session2 = client2.transport.patch_rule._session + assert session1 != session2 + session1 = client1.transport.remove_association._session + session2 = client2.transport.remove_association._session + assert session1 != session2 + session1 = client1.transport.remove_rule._session + session2 = client2.transport.remove_rule._session + assert session1 != session2 + session1 = client1.transport.set_iam_policy._session + session2 = client2.transport.set_iam_policy._session + assert session1 != session2 + session1 = client1.transport.test_iam_permissions._session + session2 = client2.transport.test_iam_permissions._session + assert session1 != session2 + +def test_common_billing_account_path(): + billing_account = "squid" + expected = "billingAccounts/{billing_account}".format(billing_account=billing_account, ) + actual = RegionNetworkFirewallPoliciesClient.common_billing_account_path(billing_account) + assert expected == actual + + +def test_parse_common_billing_account_path(): + expected = { + "billing_account": "clam", + } + path = RegionNetworkFirewallPoliciesClient.common_billing_account_path(**expected) + + # Check that the path construction is reversible. + actual = RegionNetworkFirewallPoliciesClient.parse_common_billing_account_path(path) + assert expected == actual + +def test_common_folder_path(): + folder = "whelk" + expected = "folders/{folder}".format(folder=folder, ) + actual = RegionNetworkFirewallPoliciesClient.common_folder_path(folder) + assert expected == actual + + +def test_parse_common_folder_path(): + expected = { + "folder": "octopus", + } + path = RegionNetworkFirewallPoliciesClient.common_folder_path(**expected) + + # Check that the path construction is reversible. + actual = RegionNetworkFirewallPoliciesClient.parse_common_folder_path(path) + assert expected == actual + +def test_common_organization_path(): + organization = "oyster" + expected = "organizations/{organization}".format(organization=organization, ) + actual = RegionNetworkFirewallPoliciesClient.common_organization_path(organization) + assert expected == actual + + +def test_parse_common_organization_path(): + expected = { + "organization": "nudibranch", + } + path = RegionNetworkFirewallPoliciesClient.common_organization_path(**expected) + + # Check that the path construction is reversible. + actual = RegionNetworkFirewallPoliciesClient.parse_common_organization_path(path) + assert expected == actual + +def test_common_project_path(): + project = "cuttlefish" + expected = "projects/{project}".format(project=project, ) + actual = RegionNetworkFirewallPoliciesClient.common_project_path(project) + assert expected == actual + + +def test_parse_common_project_path(): + expected = { + "project": "mussel", + } + path = RegionNetworkFirewallPoliciesClient.common_project_path(**expected) + + # Check that the path construction is reversible. + actual = RegionNetworkFirewallPoliciesClient.parse_common_project_path(path) + assert expected == actual + +def test_common_location_path(): + project = "winkle" + location = "nautilus" + expected = "projects/{project}/locations/{location}".format(project=project, location=location, ) + actual = RegionNetworkFirewallPoliciesClient.common_location_path(project, location) + assert expected == actual + + +def test_parse_common_location_path(): + expected = { + "project": "scallop", + "location": "abalone", + } + path = RegionNetworkFirewallPoliciesClient.common_location_path(**expected) + + # Check that the path construction is reversible. + actual = RegionNetworkFirewallPoliciesClient.parse_common_location_path(path) + assert expected == actual + + +def test_client_with_default_client_info(): + client_info = gapic_v1.client_info.ClientInfo() + + with mock.patch.object(transports.RegionNetworkFirewallPoliciesTransport, '_prep_wrapped_messages') as prep: + client = RegionNetworkFirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + with mock.patch.object(transports.RegionNetworkFirewallPoliciesTransport, '_prep_wrapped_messages') as prep: + transport_class = RegionNetworkFirewallPoliciesClient.get_transport_class() + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + +def test_transport_close(): + transports = { + "rest": "_session", + } + + for transport, close_name in transports.items(): + client = RegionNetworkFirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport + ) + with mock.patch.object(type(getattr(client.transport, close_name)), "close") as close: + with client: + close.assert_not_called() + close.assert_called_once() + +def test_client_ctx(): + transports = [ + 'rest', + ] + for transport in transports: + client = RegionNetworkFirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport + ) + # Test client calls underlying transport. + with mock.patch.object(type(client.transport), "close") as close: + close.assert_not_called() + with client: + pass + close.assert_called() + +@pytest.mark.parametrize("client_class,transport_class", [ + (RegionNetworkFirewallPoliciesClient, transports.RegionNetworkFirewallPoliciesRestTransport), +]) +def test_api_key_credentials(client_class, transport_class): + with mock.patch.object( + google.auth._default, "get_api_key_credentials", create=True + ) as get_api_key_credentials: + mock_cred = mock.Mock() + get_api_key_credentials.return_value = mock_cred + options = client_options.ClientOptions() + options.api_key = "api_key" + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=mock_cred, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) diff --git a/owl-bot-staging/v1/tests/unit/gapic/compute_v1/test_region_notification_endpoints.py b/owl-bot-staging/v1/tests/unit/gapic/compute_v1/test_region_notification_endpoints.py new file mode 100644 index 000000000..df5877d6c --- /dev/null +++ b/owl-bot-staging/v1/tests/unit/gapic/compute_v1/test_region_notification_endpoints.py @@ -0,0 +1,2507 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import os +# try/except added for compatibility with python < 3.8 +try: + from unittest import mock + from unittest.mock import AsyncMock # pragma: NO COVER +except ImportError: # pragma: NO COVER + import mock + +import grpc +from grpc.experimental import aio +from collections.abc import Iterable +from google.protobuf import json_format +import json +import math +import pytest +from proto.marshal.rules.dates import DurationRule, TimestampRule +from proto.marshal.rules import wrappers +from requests import Response +from requests import Request, PreparedRequest +from requests.sessions import Session +from google.protobuf import json_format + +from google.api_core import client_options +from google.api_core import exceptions as core_exceptions +from google.api_core import extended_operation # type: ignore +from google.api_core import future +from google.api_core import gapic_v1 +from google.api_core import grpc_helpers +from google.api_core import grpc_helpers_async +from google.api_core import path_template +from google.auth import credentials as ga_credentials +from google.auth.exceptions import MutualTLSChannelError +from google.cloud.compute_v1.services.region_notification_endpoints import RegionNotificationEndpointsClient +from google.cloud.compute_v1.services.region_notification_endpoints import pagers +from google.cloud.compute_v1.services.region_notification_endpoints import transports +from google.cloud.compute_v1.types import compute +from google.oauth2 import service_account +import google.auth + + +def client_cert_source_callback(): + return b"cert bytes", b"key bytes" + + +# If default endpoint is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint(client): + return "foo.googleapis.com" if ("localhost" in client.DEFAULT_ENDPOINT) else client.DEFAULT_ENDPOINT + + +def test__get_default_mtls_endpoint(): + api_endpoint = "example.googleapis.com" + api_mtls_endpoint = "example.mtls.googleapis.com" + sandbox_endpoint = "example.sandbox.googleapis.com" + sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com" + non_googleapi = "api.example.com" + + assert RegionNotificationEndpointsClient._get_default_mtls_endpoint(None) is None + assert RegionNotificationEndpointsClient._get_default_mtls_endpoint(api_endpoint) == api_mtls_endpoint + assert RegionNotificationEndpointsClient._get_default_mtls_endpoint(api_mtls_endpoint) == api_mtls_endpoint + assert RegionNotificationEndpointsClient._get_default_mtls_endpoint(sandbox_endpoint) == sandbox_mtls_endpoint + assert RegionNotificationEndpointsClient._get_default_mtls_endpoint(sandbox_mtls_endpoint) == sandbox_mtls_endpoint + assert RegionNotificationEndpointsClient._get_default_mtls_endpoint(non_googleapi) == non_googleapi + + +@pytest.mark.parametrize("client_class,transport_name", [ + (RegionNotificationEndpointsClient, "rest"), +]) +def test_region_notification_endpoints_client_from_service_account_info(client_class, transport_name): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object(service_account.Credentials, 'from_service_account_info') as factory: + factory.return_value = creds + info = {"valid": True} + client = client_class.from_service_account_info(info, transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + 'compute.googleapis.com:443' + if transport_name in ['grpc', 'grpc_asyncio'] + else + 'https://compute.googleapis.com' + ) + + +@pytest.mark.parametrize("transport_class,transport_name", [ + (transports.RegionNotificationEndpointsRestTransport, "rest"), +]) +def test_region_notification_endpoints_client_service_account_always_use_jwt(transport_class, transport_name): + with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=True) + use_jwt.assert_called_once_with(True) + + with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=False) + use_jwt.assert_not_called() + + +@pytest.mark.parametrize("client_class,transport_name", [ + (RegionNotificationEndpointsClient, "rest"), +]) +def test_region_notification_endpoints_client_from_service_account_file(client_class, transport_name): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object(service_account.Credentials, 'from_service_account_file') as factory: + factory.return_value = creds + client = client_class.from_service_account_file("dummy/file/path.json", transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + client = client_class.from_service_account_json("dummy/file/path.json", transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + 'compute.googleapis.com:443' + if transport_name in ['grpc', 'grpc_asyncio'] + else + 'https://compute.googleapis.com' + ) + + +def test_region_notification_endpoints_client_get_transport_class(): + transport = RegionNotificationEndpointsClient.get_transport_class() + available_transports = [ + transports.RegionNotificationEndpointsRestTransport, + ] + assert transport in available_transports + + transport = RegionNotificationEndpointsClient.get_transport_class("rest") + assert transport == transports.RegionNotificationEndpointsRestTransport + + +@pytest.mark.parametrize("client_class,transport_class,transport_name", [ + (RegionNotificationEndpointsClient, transports.RegionNotificationEndpointsRestTransport, "rest"), +]) +@mock.patch.object(RegionNotificationEndpointsClient, "DEFAULT_ENDPOINT", modify_default_endpoint(RegionNotificationEndpointsClient)) +def test_region_notification_endpoints_client_client_options(client_class, transport_class, transport_name): + # Check that if channel is provided we won't create a new one. + with mock.patch.object(RegionNotificationEndpointsClient, 'get_transport_class') as gtc: + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ) + client = client_class(transport=transport) + gtc.assert_not_called() + + # Check that if channel is provided via str we will create a new one. + with mock.patch.object(RegionNotificationEndpointsClient, 'get_transport_class') as gtc: + client = client_class(transport=transport_name) + gtc.assert_called() + + # Check the case api_endpoint is provided. + options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(transport=transport_name, client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_MTLS_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError): + client = client_class(transport=transport_name) + + # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): + with pytest.raises(ValueError): + client = client_class(transport=transport_name) + + # Check the case quota_project_id is provided + options = client_options.ClientOptions(quota_project_id="octopus") + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id="octopus", + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + # Check the case api_endpoint is provided + options = client_options.ClientOptions(api_audience="https://language.googleapis.com") + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience="https://language.googleapis.com" + ) + +@pytest.mark.parametrize("client_class,transport_class,transport_name,use_client_cert_env", [ + (RegionNotificationEndpointsClient, transports.RegionNotificationEndpointsRestTransport, "rest", "true"), + (RegionNotificationEndpointsClient, transports.RegionNotificationEndpointsRestTransport, "rest", "false"), +]) +@mock.patch.object(RegionNotificationEndpointsClient, "DEFAULT_ENDPOINT", modify_default_endpoint(RegionNotificationEndpointsClient)) +@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) +def test_region_notification_endpoints_client_mtls_env_auto(client_class, transport_class, transport_name, use_client_cert_env): + # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default + # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists. + + # Check the case client_cert_source is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): + options = client_options.ClientOptions(client_cert_source=client_cert_source_callback) + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + + if use_client_cert_env == "false": + expected_client_cert_source = None + expected_host = client.DEFAULT_ENDPOINT + else: + expected_client_cert_source = client_cert_source_callback + expected_host = client.DEFAULT_MTLS_ENDPOINT + + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case ADC client cert is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): + with mock.patch.object(transport_class, '__init__') as patched: + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): + with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=client_cert_source_callback): + if use_client_cert_env == "false": + expected_host = client.DEFAULT_ENDPOINT + expected_client_cert_source = None + else: + expected_host = client.DEFAULT_MTLS_ENDPOINT + expected_client_cert_source = client_cert_source_callback + + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case client_cert_source and ADC client cert are not provided. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): + with mock.patch.object(transport_class, '__init__') as patched: + with mock.patch("google.auth.transport.mtls.has_default_client_cert_source", return_value=False): + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize("client_class", [ + RegionNotificationEndpointsClient +]) +@mock.patch.object(RegionNotificationEndpointsClient, "DEFAULT_ENDPOINT", modify_default_endpoint(RegionNotificationEndpointsClient)) +def test_region_notification_endpoints_client_get_mtls_endpoint_and_cert_source(client_class): + mock_client_cert_source = mock.Mock() + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "true". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + mock_api_endpoint = "foo" + options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(options) + assert api_endpoint == mock_api_endpoint + assert cert_source == mock_client_cert_source + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "false". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + mock_client_cert_source = mock.Mock() + mock_api_endpoint = "foo" + options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(options) + assert api_endpoint == mock_api_endpoint + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert doesn't exist. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=False): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert exists. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): + with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=mock_client_cert_source): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source == mock_client_cert_source + + +@pytest.mark.parametrize("client_class,transport_class,transport_name", [ + (RegionNotificationEndpointsClient, transports.RegionNotificationEndpointsRestTransport, "rest"), +]) +def test_region_notification_endpoints_client_client_options_scopes(client_class, transport_class, transport_name): + # Check the case scopes are provided. + options = client_options.ClientOptions( + scopes=["1", "2"], + ) + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=["1", "2"], + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + +@pytest.mark.parametrize("client_class,transport_class,transport_name,grpc_helpers", [ + (RegionNotificationEndpointsClient, transports.RegionNotificationEndpointsRestTransport, "rest", None), +]) +def test_region_notification_endpoints_client_client_options_credentials_file(client_class, transport_class, transport_name, grpc_helpers): + # Check the case credentials file is provided. + options = client_options.ClientOptions( + credentials_file="credentials.json" + ) + + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize("request_type", [ + compute.DeleteRegionNotificationEndpointRequest, + dict, +]) +def test_delete_rest(request_type): + client = RegionNotificationEndpointsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2', 'notification_endpoint': 'sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.delete(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, extended_operation.ExtendedOperation) + assert response.client_operation_id == 'client_operation_id_value' + assert response.creation_timestamp == 'creation_timestamp_value' + assert response.description == 'description_value' + assert response.end_time == 'end_time_value' + assert response.http_error_message == 'http_error_message_value' + assert response.http_error_status_code == 2374 + assert response.id == 205 + assert response.insert_time == 'insert_time_value' + assert response.kind == 'kind_value' + assert response.name == 'name_value' + assert response.operation_group_id == 'operation_group_id_value' + assert response.operation_type == 'operation_type_value' + assert response.progress == 885 + assert response.region == 'region_value' + assert response.self_link == 'self_link_value' + assert response.start_time == 'start_time_value' + assert response.status == compute.Operation.Status.DONE + assert response.status_message == 'status_message_value' + assert response.target_id == 947 + assert response.target_link == 'target_link_value' + assert response.user == 'user_value' + assert response.zone == 'zone_value' + + +def test_delete_rest_required_fields(request_type=compute.DeleteRegionNotificationEndpointRequest): + transport_class = transports.RegionNotificationEndpointsRestTransport + + request_init = {} + request_init["notification_endpoint"] = "" + request_init["project"] = "" + request_init["region"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["notificationEndpoint"] = 'notification_endpoint_value' + jsonified_request["project"] = 'project_value' + jsonified_request["region"] = 'region_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "notificationEndpoint" in jsonified_request + assert jsonified_request["notificationEndpoint"] == 'notification_endpoint_value' + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "region" in jsonified_request + assert jsonified_request["region"] == 'region_value' + + client = RegionNotificationEndpointsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "delete", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.delete(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_delete_rest_unset_required_fields(): + transport = transports.RegionNotificationEndpointsRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.delete._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("notificationEndpoint", "project", "region", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_rest_interceptors(null_interceptor): + transport = transports.RegionNotificationEndpointsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.RegionNotificationEndpointsRestInterceptor(), + ) + client = RegionNotificationEndpointsClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.RegionNotificationEndpointsRestInterceptor, "post_delete") as post, \ + mock.patch.object(transports.RegionNotificationEndpointsRestInterceptor, "pre_delete") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.DeleteRegionNotificationEndpointRequest.pb(compute.DeleteRegionNotificationEndpointRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.DeleteRegionNotificationEndpointRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.delete(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_delete_rest_bad_request(transport: str = 'rest', request_type=compute.DeleteRegionNotificationEndpointRequest): + client = RegionNotificationEndpointsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2', 'notification_endpoint': 'sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.delete(request) + + +def test_delete_rest_flattened(): + client = RegionNotificationEndpointsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'region': 'sample2', 'notification_endpoint': 'sample3'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + region='region_value', + notification_endpoint='notification_endpoint_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.delete(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/regions/{region}/notificationEndpoints/{notification_endpoint}" % client.transport._host, args[1]) + + +def test_delete_rest_flattened_error(transport: str = 'rest'): + client = RegionNotificationEndpointsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete( + compute.DeleteRegionNotificationEndpointRequest(), + project='project_value', + region='region_value', + notification_endpoint='notification_endpoint_value', + ) + + +def test_delete_rest_error(): + client = RegionNotificationEndpointsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.DeleteRegionNotificationEndpointRequest, + dict, +]) +def test_delete_unary_rest(request_type): + client = RegionNotificationEndpointsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2', 'notification_endpoint': 'sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.delete_unary(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.Operation) + + +def test_delete_unary_rest_required_fields(request_type=compute.DeleteRegionNotificationEndpointRequest): + transport_class = transports.RegionNotificationEndpointsRestTransport + + request_init = {} + request_init["notification_endpoint"] = "" + request_init["project"] = "" + request_init["region"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["notificationEndpoint"] = 'notification_endpoint_value' + jsonified_request["project"] = 'project_value' + jsonified_request["region"] = 'region_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "notificationEndpoint" in jsonified_request + assert jsonified_request["notificationEndpoint"] == 'notification_endpoint_value' + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "region" in jsonified_request + assert jsonified_request["region"] == 'region_value' + + client = RegionNotificationEndpointsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "delete", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.delete_unary(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_delete_unary_rest_unset_required_fields(): + transport = transports.RegionNotificationEndpointsRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.delete._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("notificationEndpoint", "project", "region", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_unary_rest_interceptors(null_interceptor): + transport = transports.RegionNotificationEndpointsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.RegionNotificationEndpointsRestInterceptor(), + ) + client = RegionNotificationEndpointsClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.RegionNotificationEndpointsRestInterceptor, "post_delete") as post, \ + mock.patch.object(transports.RegionNotificationEndpointsRestInterceptor, "pre_delete") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.DeleteRegionNotificationEndpointRequest.pb(compute.DeleteRegionNotificationEndpointRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.DeleteRegionNotificationEndpointRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.delete_unary(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_delete_unary_rest_bad_request(transport: str = 'rest', request_type=compute.DeleteRegionNotificationEndpointRequest): + client = RegionNotificationEndpointsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2', 'notification_endpoint': 'sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.delete_unary(request) + + +def test_delete_unary_rest_flattened(): + client = RegionNotificationEndpointsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'region': 'sample2', 'notification_endpoint': 'sample3'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + region='region_value', + notification_endpoint='notification_endpoint_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.delete_unary(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/regions/{region}/notificationEndpoints/{notification_endpoint}" % client.transport._host, args[1]) + + +def test_delete_unary_rest_flattened_error(transport: str = 'rest'): + client = RegionNotificationEndpointsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_unary( + compute.DeleteRegionNotificationEndpointRequest(), + project='project_value', + region='region_value', + notification_endpoint='notification_endpoint_value', + ) + + +def test_delete_unary_rest_error(): + client = RegionNotificationEndpointsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.GetRegionNotificationEndpointRequest, + dict, +]) +def test_get_rest(request_type): + client = RegionNotificationEndpointsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2', 'notification_endpoint': 'sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.NotificationEndpoint( + creation_timestamp='creation_timestamp_value', + description='description_value', + id=205, + kind='kind_value', + name='name_value', + region='region_value', + self_link='self_link_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.NotificationEndpoint.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.get(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.NotificationEndpoint) + assert response.creation_timestamp == 'creation_timestamp_value' + assert response.description == 'description_value' + assert response.id == 205 + assert response.kind == 'kind_value' + assert response.name == 'name_value' + assert response.region == 'region_value' + assert response.self_link == 'self_link_value' + + +def test_get_rest_required_fields(request_type=compute.GetRegionNotificationEndpointRequest): + transport_class = transports.RegionNotificationEndpointsRestTransport + + request_init = {} + request_init["notification_endpoint"] = "" + request_init["project"] = "" + request_init["region"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["notificationEndpoint"] = 'notification_endpoint_value' + jsonified_request["project"] = 'project_value' + jsonified_request["region"] = 'region_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "notificationEndpoint" in jsonified_request + assert jsonified_request["notificationEndpoint"] == 'notification_endpoint_value' + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "region" in jsonified_request + assert jsonified_request["region"] == 'region_value' + + client = RegionNotificationEndpointsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.NotificationEndpoint() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "get", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.NotificationEndpoint.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.get(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_get_rest_unset_required_fields(): + transport = transports.RegionNotificationEndpointsRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.get._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("notificationEndpoint", "project", "region", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_rest_interceptors(null_interceptor): + transport = transports.RegionNotificationEndpointsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.RegionNotificationEndpointsRestInterceptor(), + ) + client = RegionNotificationEndpointsClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.RegionNotificationEndpointsRestInterceptor, "post_get") as post, \ + mock.patch.object(transports.RegionNotificationEndpointsRestInterceptor, "pre_get") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.GetRegionNotificationEndpointRequest.pb(compute.GetRegionNotificationEndpointRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.NotificationEndpoint.to_json(compute.NotificationEndpoint()) + + request = compute.GetRegionNotificationEndpointRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.NotificationEndpoint() + + client.get(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_get_rest_bad_request(transport: str = 'rest', request_type=compute.GetRegionNotificationEndpointRequest): + client = RegionNotificationEndpointsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2', 'notification_endpoint': 'sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get(request) + + +def test_get_rest_flattened(): + client = RegionNotificationEndpointsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.NotificationEndpoint() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'region': 'sample2', 'notification_endpoint': 'sample3'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + region='region_value', + notification_endpoint='notification_endpoint_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.NotificationEndpoint.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.get(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/regions/{region}/notificationEndpoints/{notification_endpoint}" % client.transport._host, args[1]) + + +def test_get_rest_flattened_error(transport: str = 'rest'): + client = RegionNotificationEndpointsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get( + compute.GetRegionNotificationEndpointRequest(), + project='project_value', + region='region_value', + notification_endpoint='notification_endpoint_value', + ) + + +def test_get_rest_error(): + client = RegionNotificationEndpointsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.InsertRegionNotificationEndpointRequest, + dict, +]) +def test_insert_rest(request_type): + client = RegionNotificationEndpointsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2'} + request_init["notification_endpoint_resource"] = {'creation_timestamp': 'creation_timestamp_value', 'description': 'description_value', 'grpc_settings': {'authority': 'authority_value', 'endpoint': 'endpoint_value', 'payload_name': 'payload_name_value', 'resend_interval': {'nanos': 543, 'seconds': 751}, 'retry_duration_sec': 1941}, 'id': 205, 'kind': 'kind_value', 'name': 'name_value', 'region': 'region_value', 'self_link': 'self_link_value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.insert(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, extended_operation.ExtendedOperation) + assert response.client_operation_id == 'client_operation_id_value' + assert response.creation_timestamp == 'creation_timestamp_value' + assert response.description == 'description_value' + assert response.end_time == 'end_time_value' + assert response.http_error_message == 'http_error_message_value' + assert response.http_error_status_code == 2374 + assert response.id == 205 + assert response.insert_time == 'insert_time_value' + assert response.kind == 'kind_value' + assert response.name == 'name_value' + assert response.operation_group_id == 'operation_group_id_value' + assert response.operation_type == 'operation_type_value' + assert response.progress == 885 + assert response.region == 'region_value' + assert response.self_link == 'self_link_value' + assert response.start_time == 'start_time_value' + assert response.status == compute.Operation.Status.DONE + assert response.status_message == 'status_message_value' + assert response.target_id == 947 + assert response.target_link == 'target_link_value' + assert response.user == 'user_value' + assert response.zone == 'zone_value' + + +def test_insert_rest_required_fields(request_type=compute.InsertRegionNotificationEndpointRequest): + transport_class = transports.RegionNotificationEndpointsRestTransport + + request_init = {} + request_init["project"] = "" + request_init["region"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).insert._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + jsonified_request["region"] = 'region_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).insert._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "region" in jsonified_request + assert jsonified_request["region"] == 'region_value' + + client = RegionNotificationEndpointsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.insert(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_insert_rest_unset_required_fields(): + transport = transports.RegionNotificationEndpointsRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.insert._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("notificationEndpointResource", "project", "region", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_insert_rest_interceptors(null_interceptor): + transport = transports.RegionNotificationEndpointsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.RegionNotificationEndpointsRestInterceptor(), + ) + client = RegionNotificationEndpointsClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.RegionNotificationEndpointsRestInterceptor, "post_insert") as post, \ + mock.patch.object(transports.RegionNotificationEndpointsRestInterceptor, "pre_insert") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.InsertRegionNotificationEndpointRequest.pb(compute.InsertRegionNotificationEndpointRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.InsertRegionNotificationEndpointRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.insert(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_insert_rest_bad_request(transport: str = 'rest', request_type=compute.InsertRegionNotificationEndpointRequest): + client = RegionNotificationEndpointsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2'} + request_init["notification_endpoint_resource"] = {'creation_timestamp': 'creation_timestamp_value', 'description': 'description_value', 'grpc_settings': {'authority': 'authority_value', 'endpoint': 'endpoint_value', 'payload_name': 'payload_name_value', 'resend_interval': {'nanos': 543, 'seconds': 751}, 'retry_duration_sec': 1941}, 'id': 205, 'kind': 'kind_value', 'name': 'name_value', 'region': 'region_value', 'self_link': 'self_link_value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.insert(request) + + +def test_insert_rest_flattened(): + client = RegionNotificationEndpointsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'region': 'sample2'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + region='region_value', + notification_endpoint_resource=compute.NotificationEndpoint(creation_timestamp='creation_timestamp_value'), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.insert(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/regions/{region}/notificationEndpoints" % client.transport._host, args[1]) + + +def test_insert_rest_flattened_error(transport: str = 'rest'): + client = RegionNotificationEndpointsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.insert( + compute.InsertRegionNotificationEndpointRequest(), + project='project_value', + region='region_value', + notification_endpoint_resource=compute.NotificationEndpoint(creation_timestamp='creation_timestamp_value'), + ) + + +def test_insert_rest_error(): + client = RegionNotificationEndpointsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.InsertRegionNotificationEndpointRequest, + dict, +]) +def test_insert_unary_rest(request_type): + client = RegionNotificationEndpointsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2'} + request_init["notification_endpoint_resource"] = {'creation_timestamp': 'creation_timestamp_value', 'description': 'description_value', 'grpc_settings': {'authority': 'authority_value', 'endpoint': 'endpoint_value', 'payload_name': 'payload_name_value', 'resend_interval': {'nanos': 543, 'seconds': 751}, 'retry_duration_sec': 1941}, 'id': 205, 'kind': 'kind_value', 'name': 'name_value', 'region': 'region_value', 'self_link': 'self_link_value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.insert_unary(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.Operation) + + +def test_insert_unary_rest_required_fields(request_type=compute.InsertRegionNotificationEndpointRequest): + transport_class = transports.RegionNotificationEndpointsRestTransport + + request_init = {} + request_init["project"] = "" + request_init["region"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).insert._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + jsonified_request["region"] = 'region_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).insert._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "region" in jsonified_request + assert jsonified_request["region"] == 'region_value' + + client = RegionNotificationEndpointsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.insert_unary(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_insert_unary_rest_unset_required_fields(): + transport = transports.RegionNotificationEndpointsRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.insert._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("notificationEndpointResource", "project", "region", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_insert_unary_rest_interceptors(null_interceptor): + transport = transports.RegionNotificationEndpointsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.RegionNotificationEndpointsRestInterceptor(), + ) + client = RegionNotificationEndpointsClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.RegionNotificationEndpointsRestInterceptor, "post_insert") as post, \ + mock.patch.object(transports.RegionNotificationEndpointsRestInterceptor, "pre_insert") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.InsertRegionNotificationEndpointRequest.pb(compute.InsertRegionNotificationEndpointRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.InsertRegionNotificationEndpointRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.insert_unary(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_insert_unary_rest_bad_request(transport: str = 'rest', request_type=compute.InsertRegionNotificationEndpointRequest): + client = RegionNotificationEndpointsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2'} + request_init["notification_endpoint_resource"] = {'creation_timestamp': 'creation_timestamp_value', 'description': 'description_value', 'grpc_settings': {'authority': 'authority_value', 'endpoint': 'endpoint_value', 'payload_name': 'payload_name_value', 'resend_interval': {'nanos': 543, 'seconds': 751}, 'retry_duration_sec': 1941}, 'id': 205, 'kind': 'kind_value', 'name': 'name_value', 'region': 'region_value', 'self_link': 'self_link_value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.insert_unary(request) + + +def test_insert_unary_rest_flattened(): + client = RegionNotificationEndpointsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'region': 'sample2'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + region='region_value', + notification_endpoint_resource=compute.NotificationEndpoint(creation_timestamp='creation_timestamp_value'), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.insert_unary(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/regions/{region}/notificationEndpoints" % client.transport._host, args[1]) + + +def test_insert_unary_rest_flattened_error(transport: str = 'rest'): + client = RegionNotificationEndpointsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.insert_unary( + compute.InsertRegionNotificationEndpointRequest(), + project='project_value', + region='region_value', + notification_endpoint_resource=compute.NotificationEndpoint(creation_timestamp='creation_timestamp_value'), + ) + + +def test_insert_unary_rest_error(): + client = RegionNotificationEndpointsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.ListRegionNotificationEndpointsRequest, + dict, +]) +def test_list_rest(request_type): + client = RegionNotificationEndpointsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.NotificationEndpointList( + id='id_value', + kind='kind_value', + next_page_token='next_page_token_value', + self_link='self_link_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.NotificationEndpointList.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.list(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListPager) + assert response.id == 'id_value' + assert response.kind == 'kind_value' + assert response.next_page_token == 'next_page_token_value' + assert response.self_link == 'self_link_value' + + +def test_list_rest_required_fields(request_type=compute.ListRegionNotificationEndpointsRequest): + transport_class = transports.RegionNotificationEndpointsRestTransport + + request_init = {} + request_init["project"] = "" + request_init["region"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + jsonified_request["region"] = 'region_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("filter", "max_results", "order_by", "page_token", "return_partial_success", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "region" in jsonified_request + assert jsonified_request["region"] == 'region_value' + + client = RegionNotificationEndpointsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.NotificationEndpointList() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "get", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.NotificationEndpointList.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.list(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_list_rest_unset_required_fields(): + transport = transports.RegionNotificationEndpointsRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.list._get_unset_required_fields({}) + assert set(unset_fields) == (set(("filter", "maxResults", "orderBy", "pageToken", "returnPartialSuccess", )) & set(("project", "region", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_rest_interceptors(null_interceptor): + transport = transports.RegionNotificationEndpointsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.RegionNotificationEndpointsRestInterceptor(), + ) + client = RegionNotificationEndpointsClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.RegionNotificationEndpointsRestInterceptor, "post_list") as post, \ + mock.patch.object(transports.RegionNotificationEndpointsRestInterceptor, "pre_list") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.ListRegionNotificationEndpointsRequest.pb(compute.ListRegionNotificationEndpointsRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.NotificationEndpointList.to_json(compute.NotificationEndpointList()) + + request = compute.ListRegionNotificationEndpointsRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.NotificationEndpointList() + + client.list(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_list_rest_bad_request(transport: str = 'rest', request_type=compute.ListRegionNotificationEndpointsRequest): + client = RegionNotificationEndpointsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list(request) + + +def test_list_rest_flattened(): + client = RegionNotificationEndpointsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.NotificationEndpointList() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'region': 'sample2'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + region='region_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.NotificationEndpointList.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.list(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/regions/{region}/notificationEndpoints" % client.transport._host, args[1]) + + +def test_list_rest_flattened_error(transport: str = 'rest'): + client = RegionNotificationEndpointsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list( + compute.ListRegionNotificationEndpointsRequest(), + project='project_value', + region='region_value', + ) + + +def test_list_rest_pager(transport: str = 'rest'): + client = RegionNotificationEndpointsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + #with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + compute.NotificationEndpointList( + items=[ + compute.NotificationEndpoint(), + compute.NotificationEndpoint(), + compute.NotificationEndpoint(), + ], + next_page_token='abc', + ), + compute.NotificationEndpointList( + items=[], + next_page_token='def', + ), + compute.NotificationEndpointList( + items=[ + compute.NotificationEndpoint(), + ], + next_page_token='ghi', + ), + compute.NotificationEndpointList( + items=[ + compute.NotificationEndpoint(), + compute.NotificationEndpoint(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple(compute.NotificationEndpointList.to_json(x) for x in response) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode('UTF-8') + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {'project': 'sample1', 'region': 'sample2'} + + pager = client.list(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, compute.NotificationEndpoint) + for i in results) + + pages = list(client.list(request=sample_request).pages) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + + +def test_credentials_transport_error(): + # It is an error to provide credentials and a transport instance. + transport = transports.RegionNotificationEndpointsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = RegionNotificationEndpointsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # It is an error to provide a credentials file and a transport instance. + transport = transports.RegionNotificationEndpointsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = RegionNotificationEndpointsClient( + client_options={"credentials_file": "credentials.json"}, + transport=transport, + ) + + # It is an error to provide an api_key and a transport instance. + transport = transports.RegionNotificationEndpointsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = RegionNotificationEndpointsClient( + client_options=options, + transport=transport, + ) + + # It is an error to provide an api_key and a credential. + options = mock.Mock() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = RegionNotificationEndpointsClient( + client_options=options, + credentials=ga_credentials.AnonymousCredentials() + ) + + # It is an error to provide scopes and a transport instance. + transport = transports.RegionNotificationEndpointsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = RegionNotificationEndpointsClient( + client_options={"scopes": ["1", "2"]}, + transport=transport, + ) + + +def test_transport_instance(): + # A client may be instantiated with a custom transport instance. + transport = transports.RegionNotificationEndpointsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + client = RegionNotificationEndpointsClient(transport=transport) + assert client.transport is transport + + +@pytest.mark.parametrize("transport_class", [ + transports.RegionNotificationEndpointsRestTransport, +]) +def test_transport_adc(transport_class): + # Test default credentials are used if not provided. + with mock.patch.object(google.auth, 'default') as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class() + adc.assert_called_once() + +@pytest.mark.parametrize("transport_name", [ + "rest", +]) +def test_transport_kind(transport_name): + transport = RegionNotificationEndpointsClient.get_transport_class(transport_name)( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert transport.kind == transport_name + + +def test_region_notification_endpoints_base_transport_error(): + # Passing both a credentials object and credentials_file should raise an error + with pytest.raises(core_exceptions.DuplicateCredentialArgs): + transport = transports.RegionNotificationEndpointsTransport( + credentials=ga_credentials.AnonymousCredentials(), + credentials_file="credentials.json" + ) + + +def test_region_notification_endpoints_base_transport(): + # Instantiate the base transport. + with mock.patch('google.cloud.compute_v1.services.region_notification_endpoints.transports.RegionNotificationEndpointsTransport.__init__') as Transport: + Transport.return_value = None + transport = transports.RegionNotificationEndpointsTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Every method on the transport should just blindly + # raise NotImplementedError. + methods = ( + 'delete', + 'get', + 'insert', + 'list', + ) + for method in methods: + with pytest.raises(NotImplementedError): + getattr(transport, method)(request=object()) + + with pytest.raises(NotImplementedError): + transport.close() + + # Catch all for all remaining methods and properties + remainder = [ + 'kind', + ] + for r in remainder: + with pytest.raises(NotImplementedError): + getattr(transport, r)() + + +def test_region_notification_endpoints_base_transport_with_credentials_file(): + # Instantiate the base transport with a credentials file + with mock.patch.object(google.auth, 'load_credentials_from_file', autospec=True) as load_creds, mock.patch('google.cloud.compute_v1.services.region_notification_endpoints.transports.RegionNotificationEndpointsTransport._prep_wrapped_messages') as Transport: + Transport.return_value = None + load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.RegionNotificationEndpointsTransport( + credentials_file="credentials.json", + quota_project_id="octopus", + ) + load_creds.assert_called_once_with("credentials.json", + scopes=None, + default_scopes=( + 'https://www.googleapis.com/auth/compute', + 'https://www.googleapis.com/auth/cloud-platform', +), + quota_project_id="octopus", + ) + + +def test_region_notification_endpoints_base_transport_with_adc(): + # Test the default credentials are used if credentials and credentials_file are None. + with mock.patch.object(google.auth, 'default', autospec=True) as adc, mock.patch('google.cloud.compute_v1.services.region_notification_endpoints.transports.RegionNotificationEndpointsTransport._prep_wrapped_messages') as Transport: + Transport.return_value = None + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.RegionNotificationEndpointsTransport() + adc.assert_called_once() + + +def test_region_notification_endpoints_auth_adc(): + # If no credentials are provided, we should use ADC credentials. + with mock.patch.object(google.auth, 'default', autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + RegionNotificationEndpointsClient() + adc.assert_called_once_with( + scopes=None, + default_scopes=( + 'https://www.googleapis.com/auth/compute', + 'https://www.googleapis.com/auth/cloud-platform', +), + quota_project_id=None, + ) + + +def test_region_notification_endpoints_http_transport_client_cert_source_for_mtls(): + cred = ga_credentials.AnonymousCredentials() + with mock.patch("google.auth.transport.requests.AuthorizedSession.configure_mtls_channel") as mock_configure_mtls_channel: + transports.RegionNotificationEndpointsRestTransport ( + credentials=cred, + client_cert_source_for_mtls=client_cert_source_callback + ) + mock_configure_mtls_channel.assert_called_once_with(client_cert_source_callback) + + +@pytest.mark.parametrize("transport_name", [ + "rest", +]) +def test_region_notification_endpoints_host_no_port(transport_name): + client = RegionNotificationEndpointsClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions(api_endpoint='compute.googleapis.com'), + transport=transport_name, + ) + assert client.transport._host == ( + 'compute.googleapis.com:443' + if transport_name in ['grpc', 'grpc_asyncio'] + else 'https://compute.googleapis.com' + ) + +@pytest.mark.parametrize("transport_name", [ + "rest", +]) +def test_region_notification_endpoints_host_with_port(transport_name): + client = RegionNotificationEndpointsClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions(api_endpoint='compute.googleapis.com:8000'), + transport=transport_name, + ) + assert client.transport._host == ( + 'compute.googleapis.com:8000' + if transport_name in ['grpc', 'grpc_asyncio'] + else 'https://compute.googleapis.com:8000' + ) + +@pytest.mark.parametrize("transport_name", [ + "rest", +]) +def test_region_notification_endpoints_client_transport_session_collision(transport_name): + creds1 = ga_credentials.AnonymousCredentials() + creds2 = ga_credentials.AnonymousCredentials() + client1 = RegionNotificationEndpointsClient( + credentials=creds1, + transport=transport_name, + ) + client2 = RegionNotificationEndpointsClient( + credentials=creds2, + transport=transport_name, + ) + session1 = client1.transport.delete._session + session2 = client2.transport.delete._session + assert session1 != session2 + session1 = client1.transport.get._session + session2 = client2.transport.get._session + assert session1 != session2 + session1 = client1.transport.insert._session + session2 = client2.transport.insert._session + assert session1 != session2 + session1 = client1.transport.list._session + session2 = client2.transport.list._session + assert session1 != session2 + +def test_common_billing_account_path(): + billing_account = "squid" + expected = "billingAccounts/{billing_account}".format(billing_account=billing_account, ) + actual = RegionNotificationEndpointsClient.common_billing_account_path(billing_account) + assert expected == actual + + +def test_parse_common_billing_account_path(): + expected = { + "billing_account": "clam", + } + path = RegionNotificationEndpointsClient.common_billing_account_path(**expected) + + # Check that the path construction is reversible. + actual = RegionNotificationEndpointsClient.parse_common_billing_account_path(path) + assert expected == actual + +def test_common_folder_path(): + folder = "whelk" + expected = "folders/{folder}".format(folder=folder, ) + actual = RegionNotificationEndpointsClient.common_folder_path(folder) + assert expected == actual + + +def test_parse_common_folder_path(): + expected = { + "folder": "octopus", + } + path = RegionNotificationEndpointsClient.common_folder_path(**expected) + + # Check that the path construction is reversible. + actual = RegionNotificationEndpointsClient.parse_common_folder_path(path) + assert expected == actual + +def test_common_organization_path(): + organization = "oyster" + expected = "organizations/{organization}".format(organization=organization, ) + actual = RegionNotificationEndpointsClient.common_organization_path(organization) + assert expected == actual + + +def test_parse_common_organization_path(): + expected = { + "organization": "nudibranch", + } + path = RegionNotificationEndpointsClient.common_organization_path(**expected) + + # Check that the path construction is reversible. + actual = RegionNotificationEndpointsClient.parse_common_organization_path(path) + assert expected == actual + +def test_common_project_path(): + project = "cuttlefish" + expected = "projects/{project}".format(project=project, ) + actual = RegionNotificationEndpointsClient.common_project_path(project) + assert expected == actual + + +def test_parse_common_project_path(): + expected = { + "project": "mussel", + } + path = RegionNotificationEndpointsClient.common_project_path(**expected) + + # Check that the path construction is reversible. + actual = RegionNotificationEndpointsClient.parse_common_project_path(path) + assert expected == actual + +def test_common_location_path(): + project = "winkle" + location = "nautilus" + expected = "projects/{project}/locations/{location}".format(project=project, location=location, ) + actual = RegionNotificationEndpointsClient.common_location_path(project, location) + assert expected == actual + + +def test_parse_common_location_path(): + expected = { + "project": "scallop", + "location": "abalone", + } + path = RegionNotificationEndpointsClient.common_location_path(**expected) + + # Check that the path construction is reversible. + actual = RegionNotificationEndpointsClient.parse_common_location_path(path) + assert expected == actual + + +def test_client_with_default_client_info(): + client_info = gapic_v1.client_info.ClientInfo() + + with mock.patch.object(transports.RegionNotificationEndpointsTransport, '_prep_wrapped_messages') as prep: + client = RegionNotificationEndpointsClient( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + with mock.patch.object(transports.RegionNotificationEndpointsTransport, '_prep_wrapped_messages') as prep: + transport_class = RegionNotificationEndpointsClient.get_transport_class() + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + +def test_transport_close(): + transports = { + "rest": "_session", + } + + for transport, close_name in transports.items(): + client = RegionNotificationEndpointsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport + ) + with mock.patch.object(type(getattr(client.transport, close_name)), "close") as close: + with client: + close.assert_not_called() + close.assert_called_once() + +def test_client_ctx(): + transports = [ + 'rest', + ] + for transport in transports: + client = RegionNotificationEndpointsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport + ) + # Test client calls underlying transport. + with mock.patch.object(type(client.transport), "close") as close: + close.assert_not_called() + with client: + pass + close.assert_called() + +@pytest.mark.parametrize("client_class,transport_class", [ + (RegionNotificationEndpointsClient, transports.RegionNotificationEndpointsRestTransport), +]) +def test_api_key_credentials(client_class, transport_class): + with mock.patch.object( + google.auth._default, "get_api_key_credentials", create=True + ) as get_api_key_credentials: + mock_cred = mock.Mock() + get_api_key_credentials.return_value = mock_cred + options = client_options.ClientOptions() + options.api_key = "api_key" + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=mock_cred, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) diff --git a/owl-bot-staging/v1/tests/unit/gapic/compute_v1/test_region_operations.py b/owl-bot-staging/v1/tests/unit/gapic/compute_v1/test_region_operations.py new file mode 100644 index 000000000..e0b9aec0c --- /dev/null +++ b/owl-bot-staging/v1/tests/unit/gapic/compute_v1/test_region_operations.py @@ -0,0 +1,1957 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import os +# try/except added for compatibility with python < 3.8 +try: + from unittest import mock + from unittest.mock import AsyncMock # pragma: NO COVER +except ImportError: # pragma: NO COVER + import mock + +import grpc +from grpc.experimental import aio +from collections.abc import Iterable +from google.protobuf import json_format +import json +import math +import pytest +from proto.marshal.rules.dates import DurationRule, TimestampRule +from proto.marshal.rules import wrappers +from requests import Response +from requests import Request, PreparedRequest +from requests.sessions import Session +from google.protobuf import json_format + +from google.api_core import client_options +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import grpc_helpers +from google.api_core import grpc_helpers_async +from google.api_core import path_template +from google.auth import credentials as ga_credentials +from google.auth.exceptions import MutualTLSChannelError +from google.cloud.compute_v1.services.region_operations import RegionOperationsClient +from google.cloud.compute_v1.services.region_operations import pagers +from google.cloud.compute_v1.services.region_operations import transports +from google.cloud.compute_v1.types import compute +from google.oauth2 import service_account +import google.auth + + +def client_cert_source_callback(): + return b"cert bytes", b"key bytes" + + +# If default endpoint is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint(client): + return "foo.googleapis.com" if ("localhost" in client.DEFAULT_ENDPOINT) else client.DEFAULT_ENDPOINT + + +def test__get_default_mtls_endpoint(): + api_endpoint = "example.googleapis.com" + api_mtls_endpoint = "example.mtls.googleapis.com" + sandbox_endpoint = "example.sandbox.googleapis.com" + sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com" + non_googleapi = "api.example.com" + + assert RegionOperationsClient._get_default_mtls_endpoint(None) is None + assert RegionOperationsClient._get_default_mtls_endpoint(api_endpoint) == api_mtls_endpoint + assert RegionOperationsClient._get_default_mtls_endpoint(api_mtls_endpoint) == api_mtls_endpoint + assert RegionOperationsClient._get_default_mtls_endpoint(sandbox_endpoint) == sandbox_mtls_endpoint + assert RegionOperationsClient._get_default_mtls_endpoint(sandbox_mtls_endpoint) == sandbox_mtls_endpoint + assert RegionOperationsClient._get_default_mtls_endpoint(non_googleapi) == non_googleapi + + +@pytest.mark.parametrize("client_class,transport_name", [ + (RegionOperationsClient, "rest"), +]) +def test_region_operations_client_from_service_account_info(client_class, transport_name): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object(service_account.Credentials, 'from_service_account_info') as factory: + factory.return_value = creds + info = {"valid": True} + client = client_class.from_service_account_info(info, transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + 'compute.googleapis.com:443' + if transport_name in ['grpc', 'grpc_asyncio'] + else + 'https://compute.googleapis.com' + ) + + +@pytest.mark.parametrize("transport_class,transport_name", [ + (transports.RegionOperationsRestTransport, "rest"), +]) +def test_region_operations_client_service_account_always_use_jwt(transport_class, transport_name): + with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=True) + use_jwt.assert_called_once_with(True) + + with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=False) + use_jwt.assert_not_called() + + +@pytest.mark.parametrize("client_class,transport_name", [ + (RegionOperationsClient, "rest"), +]) +def test_region_operations_client_from_service_account_file(client_class, transport_name): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object(service_account.Credentials, 'from_service_account_file') as factory: + factory.return_value = creds + client = client_class.from_service_account_file("dummy/file/path.json", transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + client = client_class.from_service_account_json("dummy/file/path.json", transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + 'compute.googleapis.com:443' + if transport_name in ['grpc', 'grpc_asyncio'] + else + 'https://compute.googleapis.com' + ) + + +def test_region_operations_client_get_transport_class(): + transport = RegionOperationsClient.get_transport_class() + available_transports = [ + transports.RegionOperationsRestTransport, + ] + assert transport in available_transports + + transport = RegionOperationsClient.get_transport_class("rest") + assert transport == transports.RegionOperationsRestTransport + + +@pytest.mark.parametrize("client_class,transport_class,transport_name", [ + (RegionOperationsClient, transports.RegionOperationsRestTransport, "rest"), +]) +@mock.patch.object(RegionOperationsClient, "DEFAULT_ENDPOINT", modify_default_endpoint(RegionOperationsClient)) +def test_region_operations_client_client_options(client_class, transport_class, transport_name): + # Check that if channel is provided we won't create a new one. + with mock.patch.object(RegionOperationsClient, 'get_transport_class') as gtc: + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ) + client = client_class(transport=transport) + gtc.assert_not_called() + + # Check that if channel is provided via str we will create a new one. + with mock.patch.object(RegionOperationsClient, 'get_transport_class') as gtc: + client = client_class(transport=transport_name) + gtc.assert_called() + + # Check the case api_endpoint is provided. + options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(transport=transport_name, client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_MTLS_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError): + client = client_class(transport=transport_name) + + # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): + with pytest.raises(ValueError): + client = client_class(transport=transport_name) + + # Check the case quota_project_id is provided + options = client_options.ClientOptions(quota_project_id="octopus") + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id="octopus", + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + # Check the case api_endpoint is provided + options = client_options.ClientOptions(api_audience="https://language.googleapis.com") + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience="https://language.googleapis.com" + ) + +@pytest.mark.parametrize("client_class,transport_class,transport_name,use_client_cert_env", [ + (RegionOperationsClient, transports.RegionOperationsRestTransport, "rest", "true"), + (RegionOperationsClient, transports.RegionOperationsRestTransport, "rest", "false"), +]) +@mock.patch.object(RegionOperationsClient, "DEFAULT_ENDPOINT", modify_default_endpoint(RegionOperationsClient)) +@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) +def test_region_operations_client_mtls_env_auto(client_class, transport_class, transport_name, use_client_cert_env): + # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default + # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists. + + # Check the case client_cert_source is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): + options = client_options.ClientOptions(client_cert_source=client_cert_source_callback) + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + + if use_client_cert_env == "false": + expected_client_cert_source = None + expected_host = client.DEFAULT_ENDPOINT + else: + expected_client_cert_source = client_cert_source_callback + expected_host = client.DEFAULT_MTLS_ENDPOINT + + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case ADC client cert is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): + with mock.patch.object(transport_class, '__init__') as patched: + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): + with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=client_cert_source_callback): + if use_client_cert_env == "false": + expected_host = client.DEFAULT_ENDPOINT + expected_client_cert_source = None + else: + expected_host = client.DEFAULT_MTLS_ENDPOINT + expected_client_cert_source = client_cert_source_callback + + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case client_cert_source and ADC client cert are not provided. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): + with mock.patch.object(transport_class, '__init__') as patched: + with mock.patch("google.auth.transport.mtls.has_default_client_cert_source", return_value=False): + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize("client_class", [ + RegionOperationsClient +]) +@mock.patch.object(RegionOperationsClient, "DEFAULT_ENDPOINT", modify_default_endpoint(RegionOperationsClient)) +def test_region_operations_client_get_mtls_endpoint_and_cert_source(client_class): + mock_client_cert_source = mock.Mock() + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "true". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + mock_api_endpoint = "foo" + options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(options) + assert api_endpoint == mock_api_endpoint + assert cert_source == mock_client_cert_source + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "false". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + mock_client_cert_source = mock.Mock() + mock_api_endpoint = "foo" + options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(options) + assert api_endpoint == mock_api_endpoint + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert doesn't exist. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=False): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert exists. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): + with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=mock_client_cert_source): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source == mock_client_cert_source + + +@pytest.mark.parametrize("client_class,transport_class,transport_name", [ + (RegionOperationsClient, transports.RegionOperationsRestTransport, "rest"), +]) +def test_region_operations_client_client_options_scopes(client_class, transport_class, transport_name): + # Check the case scopes are provided. + options = client_options.ClientOptions( + scopes=["1", "2"], + ) + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=["1", "2"], + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + +@pytest.mark.parametrize("client_class,transport_class,transport_name,grpc_helpers", [ + (RegionOperationsClient, transports.RegionOperationsRestTransport, "rest", None), +]) +def test_region_operations_client_client_options_credentials_file(client_class, transport_class, transport_name, grpc_helpers): + # Check the case credentials file is provided. + options = client_options.ClientOptions( + credentials_file="credentials.json" + ) + + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize("request_type", [ + compute.DeleteRegionOperationRequest, + dict, +]) +def test_delete_rest(request_type): + client = RegionOperationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2', 'operation': 'sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.DeleteRegionOperationResponse( + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.DeleteRegionOperationResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.delete(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.DeleteRegionOperationResponse) + + +def test_delete_rest_required_fields(request_type=compute.DeleteRegionOperationRequest): + transport_class = transports.RegionOperationsRestTransport + + request_init = {} + request_init["operation"] = "" + request_init["project"] = "" + request_init["region"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["operation"] = 'operation_value' + jsonified_request["project"] = 'project_value' + jsonified_request["region"] = 'region_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "operation" in jsonified_request + assert jsonified_request["operation"] == 'operation_value' + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "region" in jsonified_request + assert jsonified_request["region"] == 'region_value' + + client = RegionOperationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.DeleteRegionOperationResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "delete", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.DeleteRegionOperationResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.delete(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_delete_rest_unset_required_fields(): + transport = transports.RegionOperationsRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.delete._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("operation", "project", "region", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_rest_interceptors(null_interceptor): + transport = transports.RegionOperationsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.RegionOperationsRestInterceptor(), + ) + client = RegionOperationsClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.RegionOperationsRestInterceptor, "post_delete") as post, \ + mock.patch.object(transports.RegionOperationsRestInterceptor, "pre_delete") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.DeleteRegionOperationRequest.pb(compute.DeleteRegionOperationRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.DeleteRegionOperationResponse.to_json(compute.DeleteRegionOperationResponse()) + + request = compute.DeleteRegionOperationRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.DeleteRegionOperationResponse() + + client.delete(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_delete_rest_bad_request(transport: str = 'rest', request_type=compute.DeleteRegionOperationRequest): + client = RegionOperationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2', 'operation': 'sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.delete(request) + + +def test_delete_rest_flattened(): + client = RegionOperationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.DeleteRegionOperationResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'region': 'sample2', 'operation': 'sample3'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + region='region_value', + operation='operation_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.DeleteRegionOperationResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.delete(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/regions/{region}/operations/{operation}" % client.transport._host, args[1]) + + +def test_delete_rest_flattened_error(transport: str = 'rest'): + client = RegionOperationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete( + compute.DeleteRegionOperationRequest(), + project='project_value', + region='region_value', + operation='operation_value', + ) + + +def test_delete_rest_error(): + client = RegionOperationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.GetRegionOperationRequest, + dict, +]) +def test_get_rest(request_type): + client = RegionOperationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2', 'operation': 'sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.get(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.Operation) + assert response.client_operation_id == 'client_operation_id_value' + assert response.creation_timestamp == 'creation_timestamp_value' + assert response.description == 'description_value' + assert response.end_time == 'end_time_value' + assert response.http_error_message == 'http_error_message_value' + assert response.http_error_status_code == 2374 + assert response.id == 205 + assert response.insert_time == 'insert_time_value' + assert response.kind == 'kind_value' + assert response.name == 'name_value' + assert response.operation_group_id == 'operation_group_id_value' + assert response.operation_type == 'operation_type_value' + assert response.progress == 885 + assert response.region == 'region_value' + assert response.self_link == 'self_link_value' + assert response.start_time == 'start_time_value' + assert response.status == compute.Operation.Status.DONE + assert response.status_message == 'status_message_value' + assert response.target_id == 947 + assert response.target_link == 'target_link_value' + assert response.user == 'user_value' + assert response.zone == 'zone_value' + + +def test_get_rest_required_fields(request_type=compute.GetRegionOperationRequest): + transport_class = transports.RegionOperationsRestTransport + + request_init = {} + request_init["operation"] = "" + request_init["project"] = "" + request_init["region"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["operation"] = 'operation_value' + jsonified_request["project"] = 'project_value' + jsonified_request["region"] = 'region_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "operation" in jsonified_request + assert jsonified_request["operation"] == 'operation_value' + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "region" in jsonified_request + assert jsonified_request["region"] == 'region_value' + + client = RegionOperationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "get", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.get(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_get_rest_unset_required_fields(): + transport = transports.RegionOperationsRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.get._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("operation", "project", "region", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_rest_interceptors(null_interceptor): + transport = transports.RegionOperationsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.RegionOperationsRestInterceptor(), + ) + client = RegionOperationsClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.RegionOperationsRestInterceptor, "post_get") as post, \ + mock.patch.object(transports.RegionOperationsRestInterceptor, "pre_get") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.GetRegionOperationRequest.pb(compute.GetRegionOperationRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.GetRegionOperationRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.get(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_get_rest_bad_request(transport: str = 'rest', request_type=compute.GetRegionOperationRequest): + client = RegionOperationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2', 'operation': 'sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get(request) + + +def test_get_rest_flattened(): + client = RegionOperationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'region': 'sample2', 'operation': 'sample3'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + region='region_value', + operation='operation_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.get(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/regions/{region}/operations/{operation}" % client.transport._host, args[1]) + + +def test_get_rest_flattened_error(transport: str = 'rest'): + client = RegionOperationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get( + compute.GetRegionOperationRequest(), + project='project_value', + region='region_value', + operation='operation_value', + ) + + +def test_get_rest_error(): + client = RegionOperationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.ListRegionOperationsRequest, + dict, +]) +def test_list_rest(request_type): + client = RegionOperationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.OperationList( + id='id_value', + kind='kind_value', + next_page_token='next_page_token_value', + self_link='self_link_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.OperationList.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.list(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListPager) + assert response.id == 'id_value' + assert response.kind == 'kind_value' + assert response.next_page_token == 'next_page_token_value' + assert response.self_link == 'self_link_value' + + +def test_list_rest_required_fields(request_type=compute.ListRegionOperationsRequest): + transport_class = transports.RegionOperationsRestTransport + + request_init = {} + request_init["project"] = "" + request_init["region"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + jsonified_request["region"] = 'region_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("filter", "max_results", "order_by", "page_token", "return_partial_success", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "region" in jsonified_request + assert jsonified_request["region"] == 'region_value' + + client = RegionOperationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.OperationList() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "get", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.OperationList.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.list(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_list_rest_unset_required_fields(): + transport = transports.RegionOperationsRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.list._get_unset_required_fields({}) + assert set(unset_fields) == (set(("filter", "maxResults", "orderBy", "pageToken", "returnPartialSuccess", )) & set(("project", "region", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_rest_interceptors(null_interceptor): + transport = transports.RegionOperationsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.RegionOperationsRestInterceptor(), + ) + client = RegionOperationsClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.RegionOperationsRestInterceptor, "post_list") as post, \ + mock.patch.object(transports.RegionOperationsRestInterceptor, "pre_list") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.ListRegionOperationsRequest.pb(compute.ListRegionOperationsRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.OperationList.to_json(compute.OperationList()) + + request = compute.ListRegionOperationsRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.OperationList() + + client.list(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_list_rest_bad_request(transport: str = 'rest', request_type=compute.ListRegionOperationsRequest): + client = RegionOperationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list(request) + + +def test_list_rest_flattened(): + client = RegionOperationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.OperationList() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'region': 'sample2'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + region='region_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.OperationList.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.list(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/regions/{region}/operations" % client.transport._host, args[1]) + + +def test_list_rest_flattened_error(transport: str = 'rest'): + client = RegionOperationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list( + compute.ListRegionOperationsRequest(), + project='project_value', + region='region_value', + ) + + +def test_list_rest_pager(transport: str = 'rest'): + client = RegionOperationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + #with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + compute.OperationList( + items=[ + compute.Operation(), + compute.Operation(), + compute.Operation(), + ], + next_page_token='abc', + ), + compute.OperationList( + items=[], + next_page_token='def', + ), + compute.OperationList( + items=[ + compute.Operation(), + ], + next_page_token='ghi', + ), + compute.OperationList( + items=[ + compute.Operation(), + compute.Operation(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple(compute.OperationList.to_json(x) for x in response) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode('UTF-8') + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {'project': 'sample1', 'region': 'sample2'} + + pager = client.list(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, compute.Operation) + for i in results) + + pages = list(client.list(request=sample_request).pages) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize("request_type", [ + compute.WaitRegionOperationRequest, + dict, +]) +def test_wait_rest(request_type): + client = RegionOperationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2', 'operation': 'sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.wait(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.Operation) + assert response.client_operation_id == 'client_operation_id_value' + assert response.creation_timestamp == 'creation_timestamp_value' + assert response.description == 'description_value' + assert response.end_time == 'end_time_value' + assert response.http_error_message == 'http_error_message_value' + assert response.http_error_status_code == 2374 + assert response.id == 205 + assert response.insert_time == 'insert_time_value' + assert response.kind == 'kind_value' + assert response.name == 'name_value' + assert response.operation_group_id == 'operation_group_id_value' + assert response.operation_type == 'operation_type_value' + assert response.progress == 885 + assert response.region == 'region_value' + assert response.self_link == 'self_link_value' + assert response.start_time == 'start_time_value' + assert response.status == compute.Operation.Status.DONE + assert response.status_message == 'status_message_value' + assert response.target_id == 947 + assert response.target_link == 'target_link_value' + assert response.user == 'user_value' + assert response.zone == 'zone_value' + + +def test_wait_rest_required_fields(request_type=compute.WaitRegionOperationRequest): + transport_class = transports.RegionOperationsRestTransport + + request_init = {} + request_init["operation"] = "" + request_init["project"] = "" + request_init["region"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).wait._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["operation"] = 'operation_value' + jsonified_request["project"] = 'project_value' + jsonified_request["region"] = 'region_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).wait._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "operation" in jsonified_request + assert jsonified_request["operation"] == 'operation_value' + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "region" in jsonified_request + assert jsonified_request["region"] == 'region_value' + + client = RegionOperationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.wait(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_wait_rest_unset_required_fields(): + transport = transports.RegionOperationsRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.wait._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("operation", "project", "region", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_wait_rest_interceptors(null_interceptor): + transport = transports.RegionOperationsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.RegionOperationsRestInterceptor(), + ) + client = RegionOperationsClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.RegionOperationsRestInterceptor, "post_wait") as post, \ + mock.patch.object(transports.RegionOperationsRestInterceptor, "pre_wait") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.WaitRegionOperationRequest.pb(compute.WaitRegionOperationRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.WaitRegionOperationRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.wait(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_wait_rest_bad_request(transport: str = 'rest', request_type=compute.WaitRegionOperationRequest): + client = RegionOperationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2', 'operation': 'sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.wait(request) + + +def test_wait_rest_flattened(): + client = RegionOperationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'region': 'sample2', 'operation': 'sample3'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + region='region_value', + operation='operation_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.wait(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/regions/{region}/operations/{operation}/wait" % client.transport._host, args[1]) + + +def test_wait_rest_flattened_error(transport: str = 'rest'): + client = RegionOperationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.wait( + compute.WaitRegionOperationRequest(), + project='project_value', + region='region_value', + operation='operation_value', + ) + + +def test_wait_rest_error(): + client = RegionOperationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +def test_credentials_transport_error(): + # It is an error to provide credentials and a transport instance. + transport = transports.RegionOperationsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = RegionOperationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # It is an error to provide a credentials file and a transport instance. + transport = transports.RegionOperationsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = RegionOperationsClient( + client_options={"credentials_file": "credentials.json"}, + transport=transport, + ) + + # It is an error to provide an api_key and a transport instance. + transport = transports.RegionOperationsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = RegionOperationsClient( + client_options=options, + transport=transport, + ) + + # It is an error to provide an api_key and a credential. + options = mock.Mock() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = RegionOperationsClient( + client_options=options, + credentials=ga_credentials.AnonymousCredentials() + ) + + # It is an error to provide scopes and a transport instance. + transport = transports.RegionOperationsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = RegionOperationsClient( + client_options={"scopes": ["1", "2"]}, + transport=transport, + ) + + +def test_transport_instance(): + # A client may be instantiated with a custom transport instance. + transport = transports.RegionOperationsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + client = RegionOperationsClient(transport=transport) + assert client.transport is transport + + +@pytest.mark.parametrize("transport_class", [ + transports.RegionOperationsRestTransport, +]) +def test_transport_adc(transport_class): + # Test default credentials are used if not provided. + with mock.patch.object(google.auth, 'default') as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class() + adc.assert_called_once() + +@pytest.mark.parametrize("transport_name", [ + "rest", +]) +def test_transport_kind(transport_name): + transport = RegionOperationsClient.get_transport_class(transport_name)( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert transport.kind == transport_name + + +def test_region_operations_base_transport_error(): + # Passing both a credentials object and credentials_file should raise an error + with pytest.raises(core_exceptions.DuplicateCredentialArgs): + transport = transports.RegionOperationsTransport( + credentials=ga_credentials.AnonymousCredentials(), + credentials_file="credentials.json" + ) + + +def test_region_operations_base_transport(): + # Instantiate the base transport. + with mock.patch('google.cloud.compute_v1.services.region_operations.transports.RegionOperationsTransport.__init__') as Transport: + Transport.return_value = None + transport = transports.RegionOperationsTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Every method on the transport should just blindly + # raise NotImplementedError. + methods = ( + 'delete', + 'get', + 'list', + 'wait', + ) + for method in methods: + with pytest.raises(NotImplementedError): + getattr(transport, method)(request=object()) + + with pytest.raises(NotImplementedError): + transport.close() + + # Catch all for all remaining methods and properties + remainder = [ + 'kind', + ] + for r in remainder: + with pytest.raises(NotImplementedError): + getattr(transport, r)() + + +def test_region_operations_base_transport_with_credentials_file(): + # Instantiate the base transport with a credentials file + with mock.patch.object(google.auth, 'load_credentials_from_file', autospec=True) as load_creds, mock.patch('google.cloud.compute_v1.services.region_operations.transports.RegionOperationsTransport._prep_wrapped_messages') as Transport: + Transport.return_value = None + load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.RegionOperationsTransport( + credentials_file="credentials.json", + quota_project_id="octopus", + ) + load_creds.assert_called_once_with("credentials.json", + scopes=None, + default_scopes=( + 'https://www.googleapis.com/auth/compute', + 'https://www.googleapis.com/auth/cloud-platform', +), + quota_project_id="octopus", + ) + + +def test_region_operations_base_transport_with_adc(): + # Test the default credentials are used if credentials and credentials_file are None. + with mock.patch.object(google.auth, 'default', autospec=True) as adc, mock.patch('google.cloud.compute_v1.services.region_operations.transports.RegionOperationsTransport._prep_wrapped_messages') as Transport: + Transport.return_value = None + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.RegionOperationsTransport() + adc.assert_called_once() + + +def test_region_operations_auth_adc(): + # If no credentials are provided, we should use ADC credentials. + with mock.patch.object(google.auth, 'default', autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + RegionOperationsClient() + adc.assert_called_once_with( + scopes=None, + default_scopes=( + 'https://www.googleapis.com/auth/compute', + 'https://www.googleapis.com/auth/cloud-platform', +), + quota_project_id=None, + ) + + +def test_region_operations_http_transport_client_cert_source_for_mtls(): + cred = ga_credentials.AnonymousCredentials() + with mock.patch("google.auth.transport.requests.AuthorizedSession.configure_mtls_channel") as mock_configure_mtls_channel: + transports.RegionOperationsRestTransport ( + credentials=cred, + client_cert_source_for_mtls=client_cert_source_callback + ) + mock_configure_mtls_channel.assert_called_once_with(client_cert_source_callback) + + +@pytest.mark.parametrize("transport_name", [ + "rest", +]) +def test_region_operations_host_no_port(transport_name): + client = RegionOperationsClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions(api_endpoint='compute.googleapis.com'), + transport=transport_name, + ) + assert client.transport._host == ( + 'compute.googleapis.com:443' + if transport_name in ['grpc', 'grpc_asyncio'] + else 'https://compute.googleapis.com' + ) + +@pytest.mark.parametrize("transport_name", [ + "rest", +]) +def test_region_operations_host_with_port(transport_name): + client = RegionOperationsClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions(api_endpoint='compute.googleapis.com:8000'), + transport=transport_name, + ) + assert client.transport._host == ( + 'compute.googleapis.com:8000' + if transport_name in ['grpc', 'grpc_asyncio'] + else 'https://compute.googleapis.com:8000' + ) + +@pytest.mark.parametrize("transport_name", [ + "rest", +]) +def test_region_operations_client_transport_session_collision(transport_name): + creds1 = ga_credentials.AnonymousCredentials() + creds2 = ga_credentials.AnonymousCredentials() + client1 = RegionOperationsClient( + credentials=creds1, + transport=transport_name, + ) + client2 = RegionOperationsClient( + credentials=creds2, + transport=transport_name, + ) + session1 = client1.transport.delete._session + session2 = client2.transport.delete._session + assert session1 != session2 + session1 = client1.transport.get._session + session2 = client2.transport.get._session + assert session1 != session2 + session1 = client1.transport.list._session + session2 = client2.transport.list._session + assert session1 != session2 + session1 = client1.transport.wait._session + session2 = client2.transport.wait._session + assert session1 != session2 + +def test_common_billing_account_path(): + billing_account = "squid" + expected = "billingAccounts/{billing_account}".format(billing_account=billing_account, ) + actual = RegionOperationsClient.common_billing_account_path(billing_account) + assert expected == actual + + +def test_parse_common_billing_account_path(): + expected = { + "billing_account": "clam", + } + path = RegionOperationsClient.common_billing_account_path(**expected) + + # Check that the path construction is reversible. + actual = RegionOperationsClient.parse_common_billing_account_path(path) + assert expected == actual + +def test_common_folder_path(): + folder = "whelk" + expected = "folders/{folder}".format(folder=folder, ) + actual = RegionOperationsClient.common_folder_path(folder) + assert expected == actual + + +def test_parse_common_folder_path(): + expected = { + "folder": "octopus", + } + path = RegionOperationsClient.common_folder_path(**expected) + + # Check that the path construction is reversible. + actual = RegionOperationsClient.parse_common_folder_path(path) + assert expected == actual + +def test_common_organization_path(): + organization = "oyster" + expected = "organizations/{organization}".format(organization=organization, ) + actual = RegionOperationsClient.common_organization_path(organization) + assert expected == actual + + +def test_parse_common_organization_path(): + expected = { + "organization": "nudibranch", + } + path = RegionOperationsClient.common_organization_path(**expected) + + # Check that the path construction is reversible. + actual = RegionOperationsClient.parse_common_organization_path(path) + assert expected == actual + +def test_common_project_path(): + project = "cuttlefish" + expected = "projects/{project}".format(project=project, ) + actual = RegionOperationsClient.common_project_path(project) + assert expected == actual + + +def test_parse_common_project_path(): + expected = { + "project": "mussel", + } + path = RegionOperationsClient.common_project_path(**expected) + + # Check that the path construction is reversible. + actual = RegionOperationsClient.parse_common_project_path(path) + assert expected == actual + +def test_common_location_path(): + project = "winkle" + location = "nautilus" + expected = "projects/{project}/locations/{location}".format(project=project, location=location, ) + actual = RegionOperationsClient.common_location_path(project, location) + assert expected == actual + + +def test_parse_common_location_path(): + expected = { + "project": "scallop", + "location": "abalone", + } + path = RegionOperationsClient.common_location_path(**expected) + + # Check that the path construction is reversible. + actual = RegionOperationsClient.parse_common_location_path(path) + assert expected == actual + + +def test_client_with_default_client_info(): + client_info = gapic_v1.client_info.ClientInfo() + + with mock.patch.object(transports.RegionOperationsTransport, '_prep_wrapped_messages') as prep: + client = RegionOperationsClient( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + with mock.patch.object(transports.RegionOperationsTransport, '_prep_wrapped_messages') as prep: + transport_class = RegionOperationsClient.get_transport_class() + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + +def test_transport_close(): + transports = { + "rest": "_session", + } + + for transport, close_name in transports.items(): + client = RegionOperationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport + ) + with mock.patch.object(type(getattr(client.transport, close_name)), "close") as close: + with client: + close.assert_not_called() + close.assert_called_once() + +def test_client_ctx(): + transports = [ + 'rest', + ] + for transport in transports: + client = RegionOperationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport + ) + # Test client calls underlying transport. + with mock.patch.object(type(client.transport), "close") as close: + close.assert_not_called() + with client: + pass + close.assert_called() + +@pytest.mark.parametrize("client_class,transport_class", [ + (RegionOperationsClient, transports.RegionOperationsRestTransport), +]) +def test_api_key_credentials(client_class, transport_class): + with mock.patch.object( + google.auth._default, "get_api_key_credentials", create=True + ) as get_api_key_credentials: + mock_cred = mock.Mock() + get_api_key_credentials.return_value = mock_cred + options = client_options.ClientOptions() + options.api_key = "api_key" + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=mock_cred, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) diff --git a/owl-bot-staging/v1/tests/unit/gapic/compute_v1/test_region_security_policies.py b/owl-bot-staging/v1/tests/unit/gapic/compute_v1/test_region_security_policies.py new file mode 100644 index 000000000..886a5e31d --- /dev/null +++ b/owl-bot-staging/v1/tests/unit/gapic/compute_v1/test_region_security_policies.py @@ -0,0 +1,3079 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import os +# try/except added for compatibility with python < 3.8 +try: + from unittest import mock + from unittest.mock import AsyncMock # pragma: NO COVER +except ImportError: # pragma: NO COVER + import mock + +import grpc +from grpc.experimental import aio +from collections.abc import Iterable +from google.protobuf import json_format +import json +import math +import pytest +from proto.marshal.rules.dates import DurationRule, TimestampRule +from proto.marshal.rules import wrappers +from requests import Response +from requests import Request, PreparedRequest +from requests.sessions import Session +from google.protobuf import json_format + +from google.api_core import client_options +from google.api_core import exceptions as core_exceptions +from google.api_core import extended_operation # type: ignore +from google.api_core import future +from google.api_core import gapic_v1 +from google.api_core import grpc_helpers +from google.api_core import grpc_helpers_async +from google.api_core import path_template +from google.auth import credentials as ga_credentials +from google.auth.exceptions import MutualTLSChannelError +from google.cloud.compute_v1.services.region_security_policies import RegionSecurityPoliciesClient +from google.cloud.compute_v1.services.region_security_policies import pagers +from google.cloud.compute_v1.services.region_security_policies import transports +from google.cloud.compute_v1.types import compute +from google.oauth2 import service_account +import google.auth + + +def client_cert_source_callback(): + return b"cert bytes", b"key bytes" + + +# If default endpoint is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint(client): + return "foo.googleapis.com" if ("localhost" in client.DEFAULT_ENDPOINT) else client.DEFAULT_ENDPOINT + + +def test__get_default_mtls_endpoint(): + api_endpoint = "example.googleapis.com" + api_mtls_endpoint = "example.mtls.googleapis.com" + sandbox_endpoint = "example.sandbox.googleapis.com" + sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com" + non_googleapi = "api.example.com" + + assert RegionSecurityPoliciesClient._get_default_mtls_endpoint(None) is None + assert RegionSecurityPoliciesClient._get_default_mtls_endpoint(api_endpoint) == api_mtls_endpoint + assert RegionSecurityPoliciesClient._get_default_mtls_endpoint(api_mtls_endpoint) == api_mtls_endpoint + assert RegionSecurityPoliciesClient._get_default_mtls_endpoint(sandbox_endpoint) == sandbox_mtls_endpoint + assert RegionSecurityPoliciesClient._get_default_mtls_endpoint(sandbox_mtls_endpoint) == sandbox_mtls_endpoint + assert RegionSecurityPoliciesClient._get_default_mtls_endpoint(non_googleapi) == non_googleapi + + +@pytest.mark.parametrize("client_class,transport_name", [ + (RegionSecurityPoliciesClient, "rest"), +]) +def test_region_security_policies_client_from_service_account_info(client_class, transport_name): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object(service_account.Credentials, 'from_service_account_info') as factory: + factory.return_value = creds + info = {"valid": True} + client = client_class.from_service_account_info(info, transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + 'compute.googleapis.com:443' + if transport_name in ['grpc', 'grpc_asyncio'] + else + 'https://compute.googleapis.com' + ) + + +@pytest.mark.parametrize("transport_class,transport_name", [ + (transports.RegionSecurityPoliciesRestTransport, "rest"), +]) +def test_region_security_policies_client_service_account_always_use_jwt(transport_class, transport_name): + with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=True) + use_jwt.assert_called_once_with(True) + + with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=False) + use_jwt.assert_not_called() + + +@pytest.mark.parametrize("client_class,transport_name", [ + (RegionSecurityPoliciesClient, "rest"), +]) +def test_region_security_policies_client_from_service_account_file(client_class, transport_name): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object(service_account.Credentials, 'from_service_account_file') as factory: + factory.return_value = creds + client = client_class.from_service_account_file("dummy/file/path.json", transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + client = client_class.from_service_account_json("dummy/file/path.json", transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + 'compute.googleapis.com:443' + if transport_name in ['grpc', 'grpc_asyncio'] + else + 'https://compute.googleapis.com' + ) + + +def test_region_security_policies_client_get_transport_class(): + transport = RegionSecurityPoliciesClient.get_transport_class() + available_transports = [ + transports.RegionSecurityPoliciesRestTransport, + ] + assert transport in available_transports + + transport = RegionSecurityPoliciesClient.get_transport_class("rest") + assert transport == transports.RegionSecurityPoliciesRestTransport + + +@pytest.mark.parametrize("client_class,transport_class,transport_name", [ + (RegionSecurityPoliciesClient, transports.RegionSecurityPoliciesRestTransport, "rest"), +]) +@mock.patch.object(RegionSecurityPoliciesClient, "DEFAULT_ENDPOINT", modify_default_endpoint(RegionSecurityPoliciesClient)) +def test_region_security_policies_client_client_options(client_class, transport_class, transport_name): + # Check that if channel is provided we won't create a new one. + with mock.patch.object(RegionSecurityPoliciesClient, 'get_transport_class') as gtc: + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ) + client = client_class(transport=transport) + gtc.assert_not_called() + + # Check that if channel is provided via str we will create a new one. + with mock.patch.object(RegionSecurityPoliciesClient, 'get_transport_class') as gtc: + client = client_class(transport=transport_name) + gtc.assert_called() + + # Check the case api_endpoint is provided. + options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(transport=transport_name, client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_MTLS_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError): + client = client_class(transport=transport_name) + + # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): + with pytest.raises(ValueError): + client = client_class(transport=transport_name) + + # Check the case quota_project_id is provided + options = client_options.ClientOptions(quota_project_id="octopus") + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id="octopus", + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + # Check the case api_endpoint is provided + options = client_options.ClientOptions(api_audience="https://language.googleapis.com") + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience="https://language.googleapis.com" + ) + +@pytest.mark.parametrize("client_class,transport_class,transport_name,use_client_cert_env", [ + (RegionSecurityPoliciesClient, transports.RegionSecurityPoliciesRestTransport, "rest", "true"), + (RegionSecurityPoliciesClient, transports.RegionSecurityPoliciesRestTransport, "rest", "false"), +]) +@mock.patch.object(RegionSecurityPoliciesClient, "DEFAULT_ENDPOINT", modify_default_endpoint(RegionSecurityPoliciesClient)) +@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) +def test_region_security_policies_client_mtls_env_auto(client_class, transport_class, transport_name, use_client_cert_env): + # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default + # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists. + + # Check the case client_cert_source is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): + options = client_options.ClientOptions(client_cert_source=client_cert_source_callback) + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + + if use_client_cert_env == "false": + expected_client_cert_source = None + expected_host = client.DEFAULT_ENDPOINT + else: + expected_client_cert_source = client_cert_source_callback + expected_host = client.DEFAULT_MTLS_ENDPOINT + + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case ADC client cert is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): + with mock.patch.object(transport_class, '__init__') as patched: + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): + with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=client_cert_source_callback): + if use_client_cert_env == "false": + expected_host = client.DEFAULT_ENDPOINT + expected_client_cert_source = None + else: + expected_host = client.DEFAULT_MTLS_ENDPOINT + expected_client_cert_source = client_cert_source_callback + + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case client_cert_source and ADC client cert are not provided. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): + with mock.patch.object(transport_class, '__init__') as patched: + with mock.patch("google.auth.transport.mtls.has_default_client_cert_source", return_value=False): + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize("client_class", [ + RegionSecurityPoliciesClient +]) +@mock.patch.object(RegionSecurityPoliciesClient, "DEFAULT_ENDPOINT", modify_default_endpoint(RegionSecurityPoliciesClient)) +def test_region_security_policies_client_get_mtls_endpoint_and_cert_source(client_class): + mock_client_cert_source = mock.Mock() + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "true". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + mock_api_endpoint = "foo" + options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(options) + assert api_endpoint == mock_api_endpoint + assert cert_source == mock_client_cert_source + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "false". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + mock_client_cert_source = mock.Mock() + mock_api_endpoint = "foo" + options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(options) + assert api_endpoint == mock_api_endpoint + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert doesn't exist. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=False): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert exists. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): + with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=mock_client_cert_source): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source == mock_client_cert_source + + +@pytest.mark.parametrize("client_class,transport_class,transport_name", [ + (RegionSecurityPoliciesClient, transports.RegionSecurityPoliciesRestTransport, "rest"), +]) +def test_region_security_policies_client_client_options_scopes(client_class, transport_class, transport_name): + # Check the case scopes are provided. + options = client_options.ClientOptions( + scopes=["1", "2"], + ) + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=["1", "2"], + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + +@pytest.mark.parametrize("client_class,transport_class,transport_name,grpc_helpers", [ + (RegionSecurityPoliciesClient, transports.RegionSecurityPoliciesRestTransport, "rest", None), +]) +def test_region_security_policies_client_client_options_credentials_file(client_class, transport_class, transport_name, grpc_helpers): + # Check the case credentials file is provided. + options = client_options.ClientOptions( + credentials_file="credentials.json" + ) + + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize("request_type", [ + compute.DeleteRegionSecurityPolicyRequest, + dict, +]) +def test_delete_rest(request_type): + client = RegionSecurityPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2', 'security_policy': 'sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.delete(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, extended_operation.ExtendedOperation) + assert response.client_operation_id == 'client_operation_id_value' + assert response.creation_timestamp == 'creation_timestamp_value' + assert response.description == 'description_value' + assert response.end_time == 'end_time_value' + assert response.http_error_message == 'http_error_message_value' + assert response.http_error_status_code == 2374 + assert response.id == 205 + assert response.insert_time == 'insert_time_value' + assert response.kind == 'kind_value' + assert response.name == 'name_value' + assert response.operation_group_id == 'operation_group_id_value' + assert response.operation_type == 'operation_type_value' + assert response.progress == 885 + assert response.region == 'region_value' + assert response.self_link == 'self_link_value' + assert response.start_time == 'start_time_value' + assert response.status == compute.Operation.Status.DONE + assert response.status_message == 'status_message_value' + assert response.target_id == 947 + assert response.target_link == 'target_link_value' + assert response.user == 'user_value' + assert response.zone == 'zone_value' + + +def test_delete_rest_required_fields(request_type=compute.DeleteRegionSecurityPolicyRequest): + transport_class = transports.RegionSecurityPoliciesRestTransport + + request_init = {} + request_init["project"] = "" + request_init["region"] = "" + request_init["security_policy"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + jsonified_request["region"] = 'region_value' + jsonified_request["securityPolicy"] = 'security_policy_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "region" in jsonified_request + assert jsonified_request["region"] == 'region_value' + assert "securityPolicy" in jsonified_request + assert jsonified_request["securityPolicy"] == 'security_policy_value' + + client = RegionSecurityPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "delete", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.delete(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_delete_rest_unset_required_fields(): + transport = transports.RegionSecurityPoliciesRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.delete._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("project", "region", "securityPolicy", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_rest_interceptors(null_interceptor): + transport = transports.RegionSecurityPoliciesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.RegionSecurityPoliciesRestInterceptor(), + ) + client = RegionSecurityPoliciesClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.RegionSecurityPoliciesRestInterceptor, "post_delete") as post, \ + mock.patch.object(transports.RegionSecurityPoliciesRestInterceptor, "pre_delete") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.DeleteRegionSecurityPolicyRequest.pb(compute.DeleteRegionSecurityPolicyRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.DeleteRegionSecurityPolicyRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.delete(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_delete_rest_bad_request(transport: str = 'rest', request_type=compute.DeleteRegionSecurityPolicyRequest): + client = RegionSecurityPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2', 'security_policy': 'sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.delete(request) + + +def test_delete_rest_flattened(): + client = RegionSecurityPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'region': 'sample2', 'security_policy': 'sample3'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + region='region_value', + security_policy='security_policy_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.delete(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/regions/{region}/securityPolicies/{security_policy}" % client.transport._host, args[1]) + + +def test_delete_rest_flattened_error(transport: str = 'rest'): + client = RegionSecurityPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete( + compute.DeleteRegionSecurityPolicyRequest(), + project='project_value', + region='region_value', + security_policy='security_policy_value', + ) + + +def test_delete_rest_error(): + client = RegionSecurityPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.DeleteRegionSecurityPolicyRequest, + dict, +]) +def test_delete_unary_rest(request_type): + client = RegionSecurityPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2', 'security_policy': 'sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.delete_unary(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.Operation) + + +def test_delete_unary_rest_required_fields(request_type=compute.DeleteRegionSecurityPolicyRequest): + transport_class = transports.RegionSecurityPoliciesRestTransport + + request_init = {} + request_init["project"] = "" + request_init["region"] = "" + request_init["security_policy"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + jsonified_request["region"] = 'region_value' + jsonified_request["securityPolicy"] = 'security_policy_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "region" in jsonified_request + assert jsonified_request["region"] == 'region_value' + assert "securityPolicy" in jsonified_request + assert jsonified_request["securityPolicy"] == 'security_policy_value' + + client = RegionSecurityPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "delete", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.delete_unary(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_delete_unary_rest_unset_required_fields(): + transport = transports.RegionSecurityPoliciesRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.delete._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("project", "region", "securityPolicy", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_unary_rest_interceptors(null_interceptor): + transport = transports.RegionSecurityPoliciesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.RegionSecurityPoliciesRestInterceptor(), + ) + client = RegionSecurityPoliciesClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.RegionSecurityPoliciesRestInterceptor, "post_delete") as post, \ + mock.patch.object(transports.RegionSecurityPoliciesRestInterceptor, "pre_delete") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.DeleteRegionSecurityPolicyRequest.pb(compute.DeleteRegionSecurityPolicyRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.DeleteRegionSecurityPolicyRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.delete_unary(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_delete_unary_rest_bad_request(transport: str = 'rest', request_type=compute.DeleteRegionSecurityPolicyRequest): + client = RegionSecurityPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2', 'security_policy': 'sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.delete_unary(request) + + +def test_delete_unary_rest_flattened(): + client = RegionSecurityPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'region': 'sample2', 'security_policy': 'sample3'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + region='region_value', + security_policy='security_policy_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.delete_unary(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/regions/{region}/securityPolicies/{security_policy}" % client.transport._host, args[1]) + + +def test_delete_unary_rest_flattened_error(transport: str = 'rest'): + client = RegionSecurityPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_unary( + compute.DeleteRegionSecurityPolicyRequest(), + project='project_value', + region='region_value', + security_policy='security_policy_value', + ) + + +def test_delete_unary_rest_error(): + client = RegionSecurityPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.GetRegionSecurityPolicyRequest, + dict, +]) +def test_get_rest(request_type): + client = RegionSecurityPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2', 'security_policy': 'sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.SecurityPolicy( + creation_timestamp='creation_timestamp_value', + description='description_value', + fingerprint='fingerprint_value', + id=205, + kind='kind_value', + label_fingerprint='label_fingerprint_value', + name='name_value', + region='region_value', + self_link='self_link_value', + type_='type__value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.SecurityPolicy.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.get(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.SecurityPolicy) + assert response.creation_timestamp == 'creation_timestamp_value' + assert response.description == 'description_value' + assert response.fingerprint == 'fingerprint_value' + assert response.id == 205 + assert response.kind == 'kind_value' + assert response.label_fingerprint == 'label_fingerprint_value' + assert response.name == 'name_value' + assert response.region == 'region_value' + assert response.self_link == 'self_link_value' + assert response.type_ == 'type__value' + + +def test_get_rest_required_fields(request_type=compute.GetRegionSecurityPolicyRequest): + transport_class = transports.RegionSecurityPoliciesRestTransport + + request_init = {} + request_init["project"] = "" + request_init["region"] = "" + request_init["security_policy"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + jsonified_request["region"] = 'region_value' + jsonified_request["securityPolicy"] = 'security_policy_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "region" in jsonified_request + assert jsonified_request["region"] == 'region_value' + assert "securityPolicy" in jsonified_request + assert jsonified_request["securityPolicy"] == 'security_policy_value' + + client = RegionSecurityPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.SecurityPolicy() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "get", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.SecurityPolicy.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.get(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_get_rest_unset_required_fields(): + transport = transports.RegionSecurityPoliciesRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.get._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("project", "region", "securityPolicy", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_rest_interceptors(null_interceptor): + transport = transports.RegionSecurityPoliciesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.RegionSecurityPoliciesRestInterceptor(), + ) + client = RegionSecurityPoliciesClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.RegionSecurityPoliciesRestInterceptor, "post_get") as post, \ + mock.patch.object(transports.RegionSecurityPoliciesRestInterceptor, "pre_get") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.GetRegionSecurityPolicyRequest.pb(compute.GetRegionSecurityPolicyRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.SecurityPolicy.to_json(compute.SecurityPolicy()) + + request = compute.GetRegionSecurityPolicyRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.SecurityPolicy() + + client.get(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_get_rest_bad_request(transport: str = 'rest', request_type=compute.GetRegionSecurityPolicyRequest): + client = RegionSecurityPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2', 'security_policy': 'sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get(request) + + +def test_get_rest_flattened(): + client = RegionSecurityPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.SecurityPolicy() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'region': 'sample2', 'security_policy': 'sample3'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + region='region_value', + security_policy='security_policy_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.SecurityPolicy.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.get(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/regions/{region}/securityPolicies/{security_policy}" % client.transport._host, args[1]) + + +def test_get_rest_flattened_error(transport: str = 'rest'): + client = RegionSecurityPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get( + compute.GetRegionSecurityPolicyRequest(), + project='project_value', + region='region_value', + security_policy='security_policy_value', + ) + + +def test_get_rest_error(): + client = RegionSecurityPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.InsertRegionSecurityPolicyRequest, + dict, +]) +def test_insert_rest(request_type): + client = RegionSecurityPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2'} + request_init["security_policy_resource"] = {'adaptive_protection_config': {'layer7_ddos_defense_config': {'enable': True, 'rule_visibility': 'rule_visibility_value'}}, 'advanced_options_config': {'json_custom_config': {'content_types': ['content_types_value1', 'content_types_value2']}, 'json_parsing': 'json_parsing_value', 'log_level': 'log_level_value'}, 'creation_timestamp': 'creation_timestamp_value', 'ddos_protection_config': {'ddos_protection': 'ddos_protection_value'}, 'description': 'description_value', 'fingerprint': 'fingerprint_value', 'id': 205, 'kind': 'kind_value', 'label_fingerprint': 'label_fingerprint_value', 'labels': {}, 'name': 'name_value', 'recaptcha_options_config': {'redirect_site_key': 'redirect_site_key_value'}, 'region': 'region_value', 'rules': [{'action': 'action_value', 'description': 'description_value', 'header_action': {'request_headers_to_adds': [{'header_name': 'header_name_value', 'header_value': 'header_value_value'}]}, 'kind': 'kind_value', 'match': {'config': {'src_ip_ranges': ['src_ip_ranges_value1', 'src_ip_ranges_value2']}, 'expr': {'description': 'description_value', 'expression': 'expression_value', 'location': 'location_value', 'title': 'title_value'}, 'versioned_expr': 'versioned_expr_value'}, 'preconfigured_waf_config': {'exclusions': [{'request_cookies_to_exclude': [{'op': 'op_value', 'val': 'val_value'}], 'request_headers_to_exclude': {}, 'request_query_params_to_exclude': {}, 'request_uris_to_exclude': {}, 'target_rule_ids': ['target_rule_ids_value1', 'target_rule_ids_value2'], 'target_rule_set': 'target_rule_set_value'}]}, 'preview': True, 'priority': 898, 'rate_limit_options': {'ban_duration_sec': 1680, 'ban_threshold': {'count': 553, 'interval_sec': 1279}, 'conform_action': 'conform_action_value', 'enforce_on_key': 'enforce_on_key_value', 'enforce_on_key_configs': [{'enforce_on_key_name': 'enforce_on_key_name_value', 'enforce_on_key_type': 'enforce_on_key_type_value'}], 'enforce_on_key_name': 'enforce_on_key_name_value', 'exceed_action': 'exceed_action_value', 'exceed_redirect_options': {'target': 'target_value', 'type_': 'type__value'}, 'rate_limit_threshold': {}}, 'redirect_options': {}}], 'self_link': 'self_link_value', 'type_': 'type__value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.insert(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, extended_operation.ExtendedOperation) + assert response.client_operation_id == 'client_operation_id_value' + assert response.creation_timestamp == 'creation_timestamp_value' + assert response.description == 'description_value' + assert response.end_time == 'end_time_value' + assert response.http_error_message == 'http_error_message_value' + assert response.http_error_status_code == 2374 + assert response.id == 205 + assert response.insert_time == 'insert_time_value' + assert response.kind == 'kind_value' + assert response.name == 'name_value' + assert response.operation_group_id == 'operation_group_id_value' + assert response.operation_type == 'operation_type_value' + assert response.progress == 885 + assert response.region == 'region_value' + assert response.self_link == 'self_link_value' + assert response.start_time == 'start_time_value' + assert response.status == compute.Operation.Status.DONE + assert response.status_message == 'status_message_value' + assert response.target_id == 947 + assert response.target_link == 'target_link_value' + assert response.user == 'user_value' + assert response.zone == 'zone_value' + + +def test_insert_rest_required_fields(request_type=compute.InsertRegionSecurityPolicyRequest): + transport_class = transports.RegionSecurityPoliciesRestTransport + + request_init = {} + request_init["project"] = "" + request_init["region"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).insert._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + jsonified_request["region"] = 'region_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).insert._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", "validate_only", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "region" in jsonified_request + assert jsonified_request["region"] == 'region_value' + + client = RegionSecurityPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.insert(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_insert_rest_unset_required_fields(): + transport = transports.RegionSecurityPoliciesRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.insert._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", "validateOnly", )) & set(("project", "region", "securityPolicyResource", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_insert_rest_interceptors(null_interceptor): + transport = transports.RegionSecurityPoliciesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.RegionSecurityPoliciesRestInterceptor(), + ) + client = RegionSecurityPoliciesClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.RegionSecurityPoliciesRestInterceptor, "post_insert") as post, \ + mock.patch.object(transports.RegionSecurityPoliciesRestInterceptor, "pre_insert") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.InsertRegionSecurityPolicyRequest.pb(compute.InsertRegionSecurityPolicyRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.InsertRegionSecurityPolicyRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.insert(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_insert_rest_bad_request(transport: str = 'rest', request_type=compute.InsertRegionSecurityPolicyRequest): + client = RegionSecurityPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2'} + request_init["security_policy_resource"] = {'adaptive_protection_config': {'layer7_ddos_defense_config': {'enable': True, 'rule_visibility': 'rule_visibility_value'}}, 'advanced_options_config': {'json_custom_config': {'content_types': ['content_types_value1', 'content_types_value2']}, 'json_parsing': 'json_parsing_value', 'log_level': 'log_level_value'}, 'creation_timestamp': 'creation_timestamp_value', 'ddos_protection_config': {'ddos_protection': 'ddos_protection_value'}, 'description': 'description_value', 'fingerprint': 'fingerprint_value', 'id': 205, 'kind': 'kind_value', 'label_fingerprint': 'label_fingerprint_value', 'labels': {}, 'name': 'name_value', 'recaptcha_options_config': {'redirect_site_key': 'redirect_site_key_value'}, 'region': 'region_value', 'rules': [{'action': 'action_value', 'description': 'description_value', 'header_action': {'request_headers_to_adds': [{'header_name': 'header_name_value', 'header_value': 'header_value_value'}]}, 'kind': 'kind_value', 'match': {'config': {'src_ip_ranges': ['src_ip_ranges_value1', 'src_ip_ranges_value2']}, 'expr': {'description': 'description_value', 'expression': 'expression_value', 'location': 'location_value', 'title': 'title_value'}, 'versioned_expr': 'versioned_expr_value'}, 'preconfigured_waf_config': {'exclusions': [{'request_cookies_to_exclude': [{'op': 'op_value', 'val': 'val_value'}], 'request_headers_to_exclude': {}, 'request_query_params_to_exclude': {}, 'request_uris_to_exclude': {}, 'target_rule_ids': ['target_rule_ids_value1', 'target_rule_ids_value2'], 'target_rule_set': 'target_rule_set_value'}]}, 'preview': True, 'priority': 898, 'rate_limit_options': {'ban_duration_sec': 1680, 'ban_threshold': {'count': 553, 'interval_sec': 1279}, 'conform_action': 'conform_action_value', 'enforce_on_key': 'enforce_on_key_value', 'enforce_on_key_configs': [{'enforce_on_key_name': 'enforce_on_key_name_value', 'enforce_on_key_type': 'enforce_on_key_type_value'}], 'enforce_on_key_name': 'enforce_on_key_name_value', 'exceed_action': 'exceed_action_value', 'exceed_redirect_options': {'target': 'target_value', 'type_': 'type__value'}, 'rate_limit_threshold': {}}, 'redirect_options': {}}], 'self_link': 'self_link_value', 'type_': 'type__value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.insert(request) + + +def test_insert_rest_flattened(): + client = RegionSecurityPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'region': 'sample2'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + region='region_value', + security_policy_resource=compute.SecurityPolicy(adaptive_protection_config=compute.SecurityPolicyAdaptiveProtectionConfig(layer7_ddos_defense_config=compute.SecurityPolicyAdaptiveProtectionConfigLayer7DdosDefenseConfig(enable=True))), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.insert(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/regions/{region}/securityPolicies" % client.transport._host, args[1]) + + +def test_insert_rest_flattened_error(transport: str = 'rest'): + client = RegionSecurityPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.insert( + compute.InsertRegionSecurityPolicyRequest(), + project='project_value', + region='region_value', + security_policy_resource=compute.SecurityPolicy(adaptive_protection_config=compute.SecurityPolicyAdaptiveProtectionConfig(layer7_ddos_defense_config=compute.SecurityPolicyAdaptiveProtectionConfigLayer7DdosDefenseConfig(enable=True))), + ) + + +def test_insert_rest_error(): + client = RegionSecurityPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.InsertRegionSecurityPolicyRequest, + dict, +]) +def test_insert_unary_rest(request_type): + client = RegionSecurityPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2'} + request_init["security_policy_resource"] = {'adaptive_protection_config': {'layer7_ddos_defense_config': {'enable': True, 'rule_visibility': 'rule_visibility_value'}}, 'advanced_options_config': {'json_custom_config': {'content_types': ['content_types_value1', 'content_types_value2']}, 'json_parsing': 'json_parsing_value', 'log_level': 'log_level_value'}, 'creation_timestamp': 'creation_timestamp_value', 'ddos_protection_config': {'ddos_protection': 'ddos_protection_value'}, 'description': 'description_value', 'fingerprint': 'fingerprint_value', 'id': 205, 'kind': 'kind_value', 'label_fingerprint': 'label_fingerprint_value', 'labels': {}, 'name': 'name_value', 'recaptcha_options_config': {'redirect_site_key': 'redirect_site_key_value'}, 'region': 'region_value', 'rules': [{'action': 'action_value', 'description': 'description_value', 'header_action': {'request_headers_to_adds': [{'header_name': 'header_name_value', 'header_value': 'header_value_value'}]}, 'kind': 'kind_value', 'match': {'config': {'src_ip_ranges': ['src_ip_ranges_value1', 'src_ip_ranges_value2']}, 'expr': {'description': 'description_value', 'expression': 'expression_value', 'location': 'location_value', 'title': 'title_value'}, 'versioned_expr': 'versioned_expr_value'}, 'preconfigured_waf_config': {'exclusions': [{'request_cookies_to_exclude': [{'op': 'op_value', 'val': 'val_value'}], 'request_headers_to_exclude': {}, 'request_query_params_to_exclude': {}, 'request_uris_to_exclude': {}, 'target_rule_ids': ['target_rule_ids_value1', 'target_rule_ids_value2'], 'target_rule_set': 'target_rule_set_value'}]}, 'preview': True, 'priority': 898, 'rate_limit_options': {'ban_duration_sec': 1680, 'ban_threshold': {'count': 553, 'interval_sec': 1279}, 'conform_action': 'conform_action_value', 'enforce_on_key': 'enforce_on_key_value', 'enforce_on_key_configs': [{'enforce_on_key_name': 'enforce_on_key_name_value', 'enforce_on_key_type': 'enforce_on_key_type_value'}], 'enforce_on_key_name': 'enforce_on_key_name_value', 'exceed_action': 'exceed_action_value', 'exceed_redirect_options': {'target': 'target_value', 'type_': 'type__value'}, 'rate_limit_threshold': {}}, 'redirect_options': {}}], 'self_link': 'self_link_value', 'type_': 'type__value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.insert_unary(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.Operation) + + +def test_insert_unary_rest_required_fields(request_type=compute.InsertRegionSecurityPolicyRequest): + transport_class = transports.RegionSecurityPoliciesRestTransport + + request_init = {} + request_init["project"] = "" + request_init["region"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).insert._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + jsonified_request["region"] = 'region_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).insert._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", "validate_only", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "region" in jsonified_request + assert jsonified_request["region"] == 'region_value' + + client = RegionSecurityPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.insert_unary(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_insert_unary_rest_unset_required_fields(): + transport = transports.RegionSecurityPoliciesRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.insert._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", "validateOnly", )) & set(("project", "region", "securityPolicyResource", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_insert_unary_rest_interceptors(null_interceptor): + transport = transports.RegionSecurityPoliciesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.RegionSecurityPoliciesRestInterceptor(), + ) + client = RegionSecurityPoliciesClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.RegionSecurityPoliciesRestInterceptor, "post_insert") as post, \ + mock.patch.object(transports.RegionSecurityPoliciesRestInterceptor, "pre_insert") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.InsertRegionSecurityPolicyRequest.pb(compute.InsertRegionSecurityPolicyRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.InsertRegionSecurityPolicyRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.insert_unary(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_insert_unary_rest_bad_request(transport: str = 'rest', request_type=compute.InsertRegionSecurityPolicyRequest): + client = RegionSecurityPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2'} + request_init["security_policy_resource"] = {'adaptive_protection_config': {'layer7_ddos_defense_config': {'enable': True, 'rule_visibility': 'rule_visibility_value'}}, 'advanced_options_config': {'json_custom_config': {'content_types': ['content_types_value1', 'content_types_value2']}, 'json_parsing': 'json_parsing_value', 'log_level': 'log_level_value'}, 'creation_timestamp': 'creation_timestamp_value', 'ddos_protection_config': {'ddos_protection': 'ddos_protection_value'}, 'description': 'description_value', 'fingerprint': 'fingerprint_value', 'id': 205, 'kind': 'kind_value', 'label_fingerprint': 'label_fingerprint_value', 'labels': {}, 'name': 'name_value', 'recaptcha_options_config': {'redirect_site_key': 'redirect_site_key_value'}, 'region': 'region_value', 'rules': [{'action': 'action_value', 'description': 'description_value', 'header_action': {'request_headers_to_adds': [{'header_name': 'header_name_value', 'header_value': 'header_value_value'}]}, 'kind': 'kind_value', 'match': {'config': {'src_ip_ranges': ['src_ip_ranges_value1', 'src_ip_ranges_value2']}, 'expr': {'description': 'description_value', 'expression': 'expression_value', 'location': 'location_value', 'title': 'title_value'}, 'versioned_expr': 'versioned_expr_value'}, 'preconfigured_waf_config': {'exclusions': [{'request_cookies_to_exclude': [{'op': 'op_value', 'val': 'val_value'}], 'request_headers_to_exclude': {}, 'request_query_params_to_exclude': {}, 'request_uris_to_exclude': {}, 'target_rule_ids': ['target_rule_ids_value1', 'target_rule_ids_value2'], 'target_rule_set': 'target_rule_set_value'}]}, 'preview': True, 'priority': 898, 'rate_limit_options': {'ban_duration_sec': 1680, 'ban_threshold': {'count': 553, 'interval_sec': 1279}, 'conform_action': 'conform_action_value', 'enforce_on_key': 'enforce_on_key_value', 'enforce_on_key_configs': [{'enforce_on_key_name': 'enforce_on_key_name_value', 'enforce_on_key_type': 'enforce_on_key_type_value'}], 'enforce_on_key_name': 'enforce_on_key_name_value', 'exceed_action': 'exceed_action_value', 'exceed_redirect_options': {'target': 'target_value', 'type_': 'type__value'}, 'rate_limit_threshold': {}}, 'redirect_options': {}}], 'self_link': 'self_link_value', 'type_': 'type__value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.insert_unary(request) + + +def test_insert_unary_rest_flattened(): + client = RegionSecurityPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'region': 'sample2'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + region='region_value', + security_policy_resource=compute.SecurityPolicy(adaptive_protection_config=compute.SecurityPolicyAdaptiveProtectionConfig(layer7_ddos_defense_config=compute.SecurityPolicyAdaptiveProtectionConfigLayer7DdosDefenseConfig(enable=True))), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.insert_unary(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/regions/{region}/securityPolicies" % client.transport._host, args[1]) + + +def test_insert_unary_rest_flattened_error(transport: str = 'rest'): + client = RegionSecurityPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.insert_unary( + compute.InsertRegionSecurityPolicyRequest(), + project='project_value', + region='region_value', + security_policy_resource=compute.SecurityPolicy(adaptive_protection_config=compute.SecurityPolicyAdaptiveProtectionConfig(layer7_ddos_defense_config=compute.SecurityPolicyAdaptiveProtectionConfigLayer7DdosDefenseConfig(enable=True))), + ) + + +def test_insert_unary_rest_error(): + client = RegionSecurityPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.ListRegionSecurityPoliciesRequest, + dict, +]) +def test_list_rest(request_type): + client = RegionSecurityPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.SecurityPolicyList( + id='id_value', + kind='kind_value', + next_page_token='next_page_token_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.SecurityPolicyList.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.list(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListPager) + assert response.id == 'id_value' + assert response.kind == 'kind_value' + assert response.next_page_token == 'next_page_token_value' + + +def test_list_rest_required_fields(request_type=compute.ListRegionSecurityPoliciesRequest): + transport_class = transports.RegionSecurityPoliciesRestTransport + + request_init = {} + request_init["project"] = "" + request_init["region"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + jsonified_request["region"] = 'region_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("filter", "max_results", "order_by", "page_token", "return_partial_success", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "region" in jsonified_request + assert jsonified_request["region"] == 'region_value' + + client = RegionSecurityPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.SecurityPolicyList() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "get", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.SecurityPolicyList.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.list(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_list_rest_unset_required_fields(): + transport = transports.RegionSecurityPoliciesRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.list._get_unset_required_fields({}) + assert set(unset_fields) == (set(("filter", "maxResults", "orderBy", "pageToken", "returnPartialSuccess", )) & set(("project", "region", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_rest_interceptors(null_interceptor): + transport = transports.RegionSecurityPoliciesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.RegionSecurityPoliciesRestInterceptor(), + ) + client = RegionSecurityPoliciesClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.RegionSecurityPoliciesRestInterceptor, "post_list") as post, \ + mock.patch.object(transports.RegionSecurityPoliciesRestInterceptor, "pre_list") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.ListRegionSecurityPoliciesRequest.pb(compute.ListRegionSecurityPoliciesRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.SecurityPolicyList.to_json(compute.SecurityPolicyList()) + + request = compute.ListRegionSecurityPoliciesRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.SecurityPolicyList() + + client.list(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_list_rest_bad_request(transport: str = 'rest', request_type=compute.ListRegionSecurityPoliciesRequest): + client = RegionSecurityPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list(request) + + +def test_list_rest_flattened(): + client = RegionSecurityPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.SecurityPolicyList() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'region': 'sample2'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + region='region_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.SecurityPolicyList.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.list(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/regions/{region}/securityPolicies" % client.transport._host, args[1]) + + +def test_list_rest_flattened_error(transport: str = 'rest'): + client = RegionSecurityPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list( + compute.ListRegionSecurityPoliciesRequest(), + project='project_value', + region='region_value', + ) + + +def test_list_rest_pager(transport: str = 'rest'): + client = RegionSecurityPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + #with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + compute.SecurityPolicyList( + items=[ + compute.SecurityPolicy(), + compute.SecurityPolicy(), + compute.SecurityPolicy(), + ], + next_page_token='abc', + ), + compute.SecurityPolicyList( + items=[], + next_page_token='def', + ), + compute.SecurityPolicyList( + items=[ + compute.SecurityPolicy(), + ], + next_page_token='ghi', + ), + compute.SecurityPolicyList( + items=[ + compute.SecurityPolicy(), + compute.SecurityPolicy(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple(compute.SecurityPolicyList.to_json(x) for x in response) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode('UTF-8') + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {'project': 'sample1', 'region': 'sample2'} + + pager = client.list(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, compute.SecurityPolicy) + for i in results) + + pages = list(client.list(request=sample_request).pages) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize("request_type", [ + compute.PatchRegionSecurityPolicyRequest, + dict, +]) +def test_patch_rest(request_type): + client = RegionSecurityPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2', 'security_policy': 'sample3'} + request_init["security_policy_resource"] = {'adaptive_protection_config': {'layer7_ddos_defense_config': {'enable': True, 'rule_visibility': 'rule_visibility_value'}}, 'advanced_options_config': {'json_custom_config': {'content_types': ['content_types_value1', 'content_types_value2']}, 'json_parsing': 'json_parsing_value', 'log_level': 'log_level_value'}, 'creation_timestamp': 'creation_timestamp_value', 'ddos_protection_config': {'ddos_protection': 'ddos_protection_value'}, 'description': 'description_value', 'fingerprint': 'fingerprint_value', 'id': 205, 'kind': 'kind_value', 'label_fingerprint': 'label_fingerprint_value', 'labels': {}, 'name': 'name_value', 'recaptcha_options_config': {'redirect_site_key': 'redirect_site_key_value'}, 'region': 'region_value', 'rules': [{'action': 'action_value', 'description': 'description_value', 'header_action': {'request_headers_to_adds': [{'header_name': 'header_name_value', 'header_value': 'header_value_value'}]}, 'kind': 'kind_value', 'match': {'config': {'src_ip_ranges': ['src_ip_ranges_value1', 'src_ip_ranges_value2']}, 'expr': {'description': 'description_value', 'expression': 'expression_value', 'location': 'location_value', 'title': 'title_value'}, 'versioned_expr': 'versioned_expr_value'}, 'preconfigured_waf_config': {'exclusions': [{'request_cookies_to_exclude': [{'op': 'op_value', 'val': 'val_value'}], 'request_headers_to_exclude': {}, 'request_query_params_to_exclude': {}, 'request_uris_to_exclude': {}, 'target_rule_ids': ['target_rule_ids_value1', 'target_rule_ids_value2'], 'target_rule_set': 'target_rule_set_value'}]}, 'preview': True, 'priority': 898, 'rate_limit_options': {'ban_duration_sec': 1680, 'ban_threshold': {'count': 553, 'interval_sec': 1279}, 'conform_action': 'conform_action_value', 'enforce_on_key': 'enforce_on_key_value', 'enforce_on_key_configs': [{'enforce_on_key_name': 'enforce_on_key_name_value', 'enforce_on_key_type': 'enforce_on_key_type_value'}], 'enforce_on_key_name': 'enforce_on_key_name_value', 'exceed_action': 'exceed_action_value', 'exceed_redirect_options': {'target': 'target_value', 'type_': 'type__value'}, 'rate_limit_threshold': {}}, 'redirect_options': {}}], 'self_link': 'self_link_value', 'type_': 'type__value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.patch(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, extended_operation.ExtendedOperation) + assert response.client_operation_id == 'client_operation_id_value' + assert response.creation_timestamp == 'creation_timestamp_value' + assert response.description == 'description_value' + assert response.end_time == 'end_time_value' + assert response.http_error_message == 'http_error_message_value' + assert response.http_error_status_code == 2374 + assert response.id == 205 + assert response.insert_time == 'insert_time_value' + assert response.kind == 'kind_value' + assert response.name == 'name_value' + assert response.operation_group_id == 'operation_group_id_value' + assert response.operation_type == 'operation_type_value' + assert response.progress == 885 + assert response.region == 'region_value' + assert response.self_link == 'self_link_value' + assert response.start_time == 'start_time_value' + assert response.status == compute.Operation.Status.DONE + assert response.status_message == 'status_message_value' + assert response.target_id == 947 + assert response.target_link == 'target_link_value' + assert response.user == 'user_value' + assert response.zone == 'zone_value' + + +def test_patch_rest_required_fields(request_type=compute.PatchRegionSecurityPolicyRequest): + transport_class = transports.RegionSecurityPoliciesRestTransport + + request_init = {} + request_init["project"] = "" + request_init["region"] = "" + request_init["security_policy"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).patch._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + jsonified_request["region"] = 'region_value' + jsonified_request["securityPolicy"] = 'security_policy_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).patch._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "region" in jsonified_request + assert jsonified_request["region"] == 'region_value' + assert "securityPolicy" in jsonified_request + assert jsonified_request["securityPolicy"] == 'security_policy_value' + + client = RegionSecurityPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "patch", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.patch(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_patch_rest_unset_required_fields(): + transport = transports.RegionSecurityPoliciesRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.patch._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("project", "region", "securityPolicy", "securityPolicyResource", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_patch_rest_interceptors(null_interceptor): + transport = transports.RegionSecurityPoliciesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.RegionSecurityPoliciesRestInterceptor(), + ) + client = RegionSecurityPoliciesClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.RegionSecurityPoliciesRestInterceptor, "post_patch") as post, \ + mock.patch.object(transports.RegionSecurityPoliciesRestInterceptor, "pre_patch") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.PatchRegionSecurityPolicyRequest.pb(compute.PatchRegionSecurityPolicyRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.PatchRegionSecurityPolicyRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.patch(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_patch_rest_bad_request(transport: str = 'rest', request_type=compute.PatchRegionSecurityPolicyRequest): + client = RegionSecurityPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2', 'security_policy': 'sample3'} + request_init["security_policy_resource"] = {'adaptive_protection_config': {'layer7_ddos_defense_config': {'enable': True, 'rule_visibility': 'rule_visibility_value'}}, 'advanced_options_config': {'json_custom_config': {'content_types': ['content_types_value1', 'content_types_value2']}, 'json_parsing': 'json_parsing_value', 'log_level': 'log_level_value'}, 'creation_timestamp': 'creation_timestamp_value', 'ddos_protection_config': {'ddos_protection': 'ddos_protection_value'}, 'description': 'description_value', 'fingerprint': 'fingerprint_value', 'id': 205, 'kind': 'kind_value', 'label_fingerprint': 'label_fingerprint_value', 'labels': {}, 'name': 'name_value', 'recaptcha_options_config': {'redirect_site_key': 'redirect_site_key_value'}, 'region': 'region_value', 'rules': [{'action': 'action_value', 'description': 'description_value', 'header_action': {'request_headers_to_adds': [{'header_name': 'header_name_value', 'header_value': 'header_value_value'}]}, 'kind': 'kind_value', 'match': {'config': {'src_ip_ranges': ['src_ip_ranges_value1', 'src_ip_ranges_value2']}, 'expr': {'description': 'description_value', 'expression': 'expression_value', 'location': 'location_value', 'title': 'title_value'}, 'versioned_expr': 'versioned_expr_value'}, 'preconfigured_waf_config': {'exclusions': [{'request_cookies_to_exclude': [{'op': 'op_value', 'val': 'val_value'}], 'request_headers_to_exclude': {}, 'request_query_params_to_exclude': {}, 'request_uris_to_exclude': {}, 'target_rule_ids': ['target_rule_ids_value1', 'target_rule_ids_value2'], 'target_rule_set': 'target_rule_set_value'}]}, 'preview': True, 'priority': 898, 'rate_limit_options': {'ban_duration_sec': 1680, 'ban_threshold': {'count': 553, 'interval_sec': 1279}, 'conform_action': 'conform_action_value', 'enforce_on_key': 'enforce_on_key_value', 'enforce_on_key_configs': [{'enforce_on_key_name': 'enforce_on_key_name_value', 'enforce_on_key_type': 'enforce_on_key_type_value'}], 'enforce_on_key_name': 'enforce_on_key_name_value', 'exceed_action': 'exceed_action_value', 'exceed_redirect_options': {'target': 'target_value', 'type_': 'type__value'}, 'rate_limit_threshold': {}}, 'redirect_options': {}}], 'self_link': 'self_link_value', 'type_': 'type__value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.patch(request) + + +def test_patch_rest_flattened(): + client = RegionSecurityPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'region': 'sample2', 'security_policy': 'sample3'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + region='region_value', + security_policy='security_policy_value', + security_policy_resource=compute.SecurityPolicy(adaptive_protection_config=compute.SecurityPolicyAdaptiveProtectionConfig(layer7_ddos_defense_config=compute.SecurityPolicyAdaptiveProtectionConfigLayer7DdosDefenseConfig(enable=True))), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.patch(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/regions/{region}/securityPolicies/{security_policy}" % client.transport._host, args[1]) + + +def test_patch_rest_flattened_error(transport: str = 'rest'): + client = RegionSecurityPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.patch( + compute.PatchRegionSecurityPolicyRequest(), + project='project_value', + region='region_value', + security_policy='security_policy_value', + security_policy_resource=compute.SecurityPolicy(adaptive_protection_config=compute.SecurityPolicyAdaptiveProtectionConfig(layer7_ddos_defense_config=compute.SecurityPolicyAdaptiveProtectionConfigLayer7DdosDefenseConfig(enable=True))), + ) + + +def test_patch_rest_error(): + client = RegionSecurityPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.PatchRegionSecurityPolicyRequest, + dict, +]) +def test_patch_unary_rest(request_type): + client = RegionSecurityPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2', 'security_policy': 'sample3'} + request_init["security_policy_resource"] = {'adaptive_protection_config': {'layer7_ddos_defense_config': {'enable': True, 'rule_visibility': 'rule_visibility_value'}}, 'advanced_options_config': {'json_custom_config': {'content_types': ['content_types_value1', 'content_types_value2']}, 'json_parsing': 'json_parsing_value', 'log_level': 'log_level_value'}, 'creation_timestamp': 'creation_timestamp_value', 'ddos_protection_config': {'ddos_protection': 'ddos_protection_value'}, 'description': 'description_value', 'fingerprint': 'fingerprint_value', 'id': 205, 'kind': 'kind_value', 'label_fingerprint': 'label_fingerprint_value', 'labels': {}, 'name': 'name_value', 'recaptcha_options_config': {'redirect_site_key': 'redirect_site_key_value'}, 'region': 'region_value', 'rules': [{'action': 'action_value', 'description': 'description_value', 'header_action': {'request_headers_to_adds': [{'header_name': 'header_name_value', 'header_value': 'header_value_value'}]}, 'kind': 'kind_value', 'match': {'config': {'src_ip_ranges': ['src_ip_ranges_value1', 'src_ip_ranges_value2']}, 'expr': {'description': 'description_value', 'expression': 'expression_value', 'location': 'location_value', 'title': 'title_value'}, 'versioned_expr': 'versioned_expr_value'}, 'preconfigured_waf_config': {'exclusions': [{'request_cookies_to_exclude': [{'op': 'op_value', 'val': 'val_value'}], 'request_headers_to_exclude': {}, 'request_query_params_to_exclude': {}, 'request_uris_to_exclude': {}, 'target_rule_ids': ['target_rule_ids_value1', 'target_rule_ids_value2'], 'target_rule_set': 'target_rule_set_value'}]}, 'preview': True, 'priority': 898, 'rate_limit_options': {'ban_duration_sec': 1680, 'ban_threshold': {'count': 553, 'interval_sec': 1279}, 'conform_action': 'conform_action_value', 'enforce_on_key': 'enforce_on_key_value', 'enforce_on_key_configs': [{'enforce_on_key_name': 'enforce_on_key_name_value', 'enforce_on_key_type': 'enforce_on_key_type_value'}], 'enforce_on_key_name': 'enforce_on_key_name_value', 'exceed_action': 'exceed_action_value', 'exceed_redirect_options': {'target': 'target_value', 'type_': 'type__value'}, 'rate_limit_threshold': {}}, 'redirect_options': {}}], 'self_link': 'self_link_value', 'type_': 'type__value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.patch_unary(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.Operation) + + +def test_patch_unary_rest_required_fields(request_type=compute.PatchRegionSecurityPolicyRequest): + transport_class = transports.RegionSecurityPoliciesRestTransport + + request_init = {} + request_init["project"] = "" + request_init["region"] = "" + request_init["security_policy"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).patch._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + jsonified_request["region"] = 'region_value' + jsonified_request["securityPolicy"] = 'security_policy_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).patch._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "region" in jsonified_request + assert jsonified_request["region"] == 'region_value' + assert "securityPolicy" in jsonified_request + assert jsonified_request["securityPolicy"] == 'security_policy_value' + + client = RegionSecurityPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "patch", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.patch_unary(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_patch_unary_rest_unset_required_fields(): + transport = transports.RegionSecurityPoliciesRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.patch._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("project", "region", "securityPolicy", "securityPolicyResource", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_patch_unary_rest_interceptors(null_interceptor): + transport = transports.RegionSecurityPoliciesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.RegionSecurityPoliciesRestInterceptor(), + ) + client = RegionSecurityPoliciesClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.RegionSecurityPoliciesRestInterceptor, "post_patch") as post, \ + mock.patch.object(transports.RegionSecurityPoliciesRestInterceptor, "pre_patch") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.PatchRegionSecurityPolicyRequest.pb(compute.PatchRegionSecurityPolicyRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.PatchRegionSecurityPolicyRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.patch_unary(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_patch_unary_rest_bad_request(transport: str = 'rest', request_type=compute.PatchRegionSecurityPolicyRequest): + client = RegionSecurityPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2', 'security_policy': 'sample3'} + request_init["security_policy_resource"] = {'adaptive_protection_config': {'layer7_ddos_defense_config': {'enable': True, 'rule_visibility': 'rule_visibility_value'}}, 'advanced_options_config': {'json_custom_config': {'content_types': ['content_types_value1', 'content_types_value2']}, 'json_parsing': 'json_parsing_value', 'log_level': 'log_level_value'}, 'creation_timestamp': 'creation_timestamp_value', 'ddos_protection_config': {'ddos_protection': 'ddos_protection_value'}, 'description': 'description_value', 'fingerprint': 'fingerprint_value', 'id': 205, 'kind': 'kind_value', 'label_fingerprint': 'label_fingerprint_value', 'labels': {}, 'name': 'name_value', 'recaptcha_options_config': {'redirect_site_key': 'redirect_site_key_value'}, 'region': 'region_value', 'rules': [{'action': 'action_value', 'description': 'description_value', 'header_action': {'request_headers_to_adds': [{'header_name': 'header_name_value', 'header_value': 'header_value_value'}]}, 'kind': 'kind_value', 'match': {'config': {'src_ip_ranges': ['src_ip_ranges_value1', 'src_ip_ranges_value2']}, 'expr': {'description': 'description_value', 'expression': 'expression_value', 'location': 'location_value', 'title': 'title_value'}, 'versioned_expr': 'versioned_expr_value'}, 'preconfigured_waf_config': {'exclusions': [{'request_cookies_to_exclude': [{'op': 'op_value', 'val': 'val_value'}], 'request_headers_to_exclude': {}, 'request_query_params_to_exclude': {}, 'request_uris_to_exclude': {}, 'target_rule_ids': ['target_rule_ids_value1', 'target_rule_ids_value2'], 'target_rule_set': 'target_rule_set_value'}]}, 'preview': True, 'priority': 898, 'rate_limit_options': {'ban_duration_sec': 1680, 'ban_threshold': {'count': 553, 'interval_sec': 1279}, 'conform_action': 'conform_action_value', 'enforce_on_key': 'enforce_on_key_value', 'enforce_on_key_configs': [{'enforce_on_key_name': 'enforce_on_key_name_value', 'enforce_on_key_type': 'enforce_on_key_type_value'}], 'enforce_on_key_name': 'enforce_on_key_name_value', 'exceed_action': 'exceed_action_value', 'exceed_redirect_options': {'target': 'target_value', 'type_': 'type__value'}, 'rate_limit_threshold': {}}, 'redirect_options': {}}], 'self_link': 'self_link_value', 'type_': 'type__value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.patch_unary(request) + + +def test_patch_unary_rest_flattened(): + client = RegionSecurityPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'region': 'sample2', 'security_policy': 'sample3'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + region='region_value', + security_policy='security_policy_value', + security_policy_resource=compute.SecurityPolicy(adaptive_protection_config=compute.SecurityPolicyAdaptiveProtectionConfig(layer7_ddos_defense_config=compute.SecurityPolicyAdaptiveProtectionConfigLayer7DdosDefenseConfig(enable=True))), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.patch_unary(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/regions/{region}/securityPolicies/{security_policy}" % client.transport._host, args[1]) + + +def test_patch_unary_rest_flattened_error(transport: str = 'rest'): + client = RegionSecurityPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.patch_unary( + compute.PatchRegionSecurityPolicyRequest(), + project='project_value', + region='region_value', + security_policy='security_policy_value', + security_policy_resource=compute.SecurityPolicy(adaptive_protection_config=compute.SecurityPolicyAdaptiveProtectionConfig(layer7_ddos_defense_config=compute.SecurityPolicyAdaptiveProtectionConfigLayer7DdosDefenseConfig(enable=True))), + ) + + +def test_patch_unary_rest_error(): + client = RegionSecurityPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +def test_credentials_transport_error(): + # It is an error to provide credentials and a transport instance. + transport = transports.RegionSecurityPoliciesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = RegionSecurityPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # It is an error to provide a credentials file and a transport instance. + transport = transports.RegionSecurityPoliciesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = RegionSecurityPoliciesClient( + client_options={"credentials_file": "credentials.json"}, + transport=transport, + ) + + # It is an error to provide an api_key and a transport instance. + transport = transports.RegionSecurityPoliciesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = RegionSecurityPoliciesClient( + client_options=options, + transport=transport, + ) + + # It is an error to provide an api_key and a credential. + options = mock.Mock() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = RegionSecurityPoliciesClient( + client_options=options, + credentials=ga_credentials.AnonymousCredentials() + ) + + # It is an error to provide scopes and a transport instance. + transport = transports.RegionSecurityPoliciesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = RegionSecurityPoliciesClient( + client_options={"scopes": ["1", "2"]}, + transport=transport, + ) + + +def test_transport_instance(): + # A client may be instantiated with a custom transport instance. + transport = transports.RegionSecurityPoliciesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + client = RegionSecurityPoliciesClient(transport=transport) + assert client.transport is transport + + +@pytest.mark.parametrize("transport_class", [ + transports.RegionSecurityPoliciesRestTransport, +]) +def test_transport_adc(transport_class): + # Test default credentials are used if not provided. + with mock.patch.object(google.auth, 'default') as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class() + adc.assert_called_once() + +@pytest.mark.parametrize("transport_name", [ + "rest", +]) +def test_transport_kind(transport_name): + transport = RegionSecurityPoliciesClient.get_transport_class(transport_name)( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert transport.kind == transport_name + + +def test_region_security_policies_base_transport_error(): + # Passing both a credentials object and credentials_file should raise an error + with pytest.raises(core_exceptions.DuplicateCredentialArgs): + transport = transports.RegionSecurityPoliciesTransport( + credentials=ga_credentials.AnonymousCredentials(), + credentials_file="credentials.json" + ) + + +def test_region_security_policies_base_transport(): + # Instantiate the base transport. + with mock.patch('google.cloud.compute_v1.services.region_security_policies.transports.RegionSecurityPoliciesTransport.__init__') as Transport: + Transport.return_value = None + transport = transports.RegionSecurityPoliciesTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Every method on the transport should just blindly + # raise NotImplementedError. + methods = ( + 'delete', + 'get', + 'insert', + 'list', + 'patch', + ) + for method in methods: + with pytest.raises(NotImplementedError): + getattr(transport, method)(request=object()) + + with pytest.raises(NotImplementedError): + transport.close() + + # Catch all for all remaining methods and properties + remainder = [ + 'kind', + ] + for r in remainder: + with pytest.raises(NotImplementedError): + getattr(transport, r)() + + +def test_region_security_policies_base_transport_with_credentials_file(): + # Instantiate the base transport with a credentials file + with mock.patch.object(google.auth, 'load_credentials_from_file', autospec=True) as load_creds, mock.patch('google.cloud.compute_v1.services.region_security_policies.transports.RegionSecurityPoliciesTransport._prep_wrapped_messages') as Transport: + Transport.return_value = None + load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.RegionSecurityPoliciesTransport( + credentials_file="credentials.json", + quota_project_id="octopus", + ) + load_creds.assert_called_once_with("credentials.json", + scopes=None, + default_scopes=( + 'https://www.googleapis.com/auth/compute', + 'https://www.googleapis.com/auth/cloud-platform', +), + quota_project_id="octopus", + ) + + +def test_region_security_policies_base_transport_with_adc(): + # Test the default credentials are used if credentials and credentials_file are None. + with mock.patch.object(google.auth, 'default', autospec=True) as adc, mock.patch('google.cloud.compute_v1.services.region_security_policies.transports.RegionSecurityPoliciesTransport._prep_wrapped_messages') as Transport: + Transport.return_value = None + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.RegionSecurityPoliciesTransport() + adc.assert_called_once() + + +def test_region_security_policies_auth_adc(): + # If no credentials are provided, we should use ADC credentials. + with mock.patch.object(google.auth, 'default', autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + RegionSecurityPoliciesClient() + adc.assert_called_once_with( + scopes=None, + default_scopes=( + 'https://www.googleapis.com/auth/compute', + 'https://www.googleapis.com/auth/cloud-platform', +), + quota_project_id=None, + ) + + +def test_region_security_policies_http_transport_client_cert_source_for_mtls(): + cred = ga_credentials.AnonymousCredentials() + with mock.patch("google.auth.transport.requests.AuthorizedSession.configure_mtls_channel") as mock_configure_mtls_channel: + transports.RegionSecurityPoliciesRestTransport ( + credentials=cred, + client_cert_source_for_mtls=client_cert_source_callback + ) + mock_configure_mtls_channel.assert_called_once_with(client_cert_source_callback) + + +@pytest.mark.parametrize("transport_name", [ + "rest", +]) +def test_region_security_policies_host_no_port(transport_name): + client = RegionSecurityPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions(api_endpoint='compute.googleapis.com'), + transport=transport_name, + ) + assert client.transport._host == ( + 'compute.googleapis.com:443' + if transport_name in ['grpc', 'grpc_asyncio'] + else 'https://compute.googleapis.com' + ) + +@pytest.mark.parametrize("transport_name", [ + "rest", +]) +def test_region_security_policies_host_with_port(transport_name): + client = RegionSecurityPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions(api_endpoint='compute.googleapis.com:8000'), + transport=transport_name, + ) + assert client.transport._host == ( + 'compute.googleapis.com:8000' + if transport_name in ['grpc', 'grpc_asyncio'] + else 'https://compute.googleapis.com:8000' + ) + +@pytest.mark.parametrize("transport_name", [ + "rest", +]) +def test_region_security_policies_client_transport_session_collision(transport_name): + creds1 = ga_credentials.AnonymousCredentials() + creds2 = ga_credentials.AnonymousCredentials() + client1 = RegionSecurityPoliciesClient( + credentials=creds1, + transport=transport_name, + ) + client2 = RegionSecurityPoliciesClient( + credentials=creds2, + transport=transport_name, + ) + session1 = client1.transport.delete._session + session2 = client2.transport.delete._session + assert session1 != session2 + session1 = client1.transport.get._session + session2 = client2.transport.get._session + assert session1 != session2 + session1 = client1.transport.insert._session + session2 = client2.transport.insert._session + assert session1 != session2 + session1 = client1.transport.list._session + session2 = client2.transport.list._session + assert session1 != session2 + session1 = client1.transport.patch._session + session2 = client2.transport.patch._session + assert session1 != session2 + +def test_common_billing_account_path(): + billing_account = "squid" + expected = "billingAccounts/{billing_account}".format(billing_account=billing_account, ) + actual = RegionSecurityPoliciesClient.common_billing_account_path(billing_account) + assert expected == actual + + +def test_parse_common_billing_account_path(): + expected = { + "billing_account": "clam", + } + path = RegionSecurityPoliciesClient.common_billing_account_path(**expected) + + # Check that the path construction is reversible. + actual = RegionSecurityPoliciesClient.parse_common_billing_account_path(path) + assert expected == actual + +def test_common_folder_path(): + folder = "whelk" + expected = "folders/{folder}".format(folder=folder, ) + actual = RegionSecurityPoliciesClient.common_folder_path(folder) + assert expected == actual + + +def test_parse_common_folder_path(): + expected = { + "folder": "octopus", + } + path = RegionSecurityPoliciesClient.common_folder_path(**expected) + + # Check that the path construction is reversible. + actual = RegionSecurityPoliciesClient.parse_common_folder_path(path) + assert expected == actual + +def test_common_organization_path(): + organization = "oyster" + expected = "organizations/{organization}".format(organization=organization, ) + actual = RegionSecurityPoliciesClient.common_organization_path(organization) + assert expected == actual + + +def test_parse_common_organization_path(): + expected = { + "organization": "nudibranch", + } + path = RegionSecurityPoliciesClient.common_organization_path(**expected) + + # Check that the path construction is reversible. + actual = RegionSecurityPoliciesClient.parse_common_organization_path(path) + assert expected == actual + +def test_common_project_path(): + project = "cuttlefish" + expected = "projects/{project}".format(project=project, ) + actual = RegionSecurityPoliciesClient.common_project_path(project) + assert expected == actual + + +def test_parse_common_project_path(): + expected = { + "project": "mussel", + } + path = RegionSecurityPoliciesClient.common_project_path(**expected) + + # Check that the path construction is reversible. + actual = RegionSecurityPoliciesClient.parse_common_project_path(path) + assert expected == actual + +def test_common_location_path(): + project = "winkle" + location = "nautilus" + expected = "projects/{project}/locations/{location}".format(project=project, location=location, ) + actual = RegionSecurityPoliciesClient.common_location_path(project, location) + assert expected == actual + + +def test_parse_common_location_path(): + expected = { + "project": "scallop", + "location": "abalone", + } + path = RegionSecurityPoliciesClient.common_location_path(**expected) + + # Check that the path construction is reversible. + actual = RegionSecurityPoliciesClient.parse_common_location_path(path) + assert expected == actual + + +def test_client_with_default_client_info(): + client_info = gapic_v1.client_info.ClientInfo() + + with mock.patch.object(transports.RegionSecurityPoliciesTransport, '_prep_wrapped_messages') as prep: + client = RegionSecurityPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + with mock.patch.object(transports.RegionSecurityPoliciesTransport, '_prep_wrapped_messages') as prep: + transport_class = RegionSecurityPoliciesClient.get_transport_class() + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + +def test_transport_close(): + transports = { + "rest": "_session", + } + + for transport, close_name in transports.items(): + client = RegionSecurityPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport + ) + with mock.patch.object(type(getattr(client.transport, close_name)), "close") as close: + with client: + close.assert_not_called() + close.assert_called_once() + +def test_client_ctx(): + transports = [ + 'rest', + ] + for transport in transports: + client = RegionSecurityPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport + ) + # Test client calls underlying transport. + with mock.patch.object(type(client.transport), "close") as close: + close.assert_not_called() + with client: + pass + close.assert_called() + +@pytest.mark.parametrize("client_class,transport_class", [ + (RegionSecurityPoliciesClient, transports.RegionSecurityPoliciesRestTransport), +]) +def test_api_key_credentials(client_class, transport_class): + with mock.patch.object( + google.auth._default, "get_api_key_credentials", create=True + ) as get_api_key_credentials: + mock_cred = mock.Mock() + get_api_key_credentials.return_value = mock_cred + options = client_options.ClientOptions() + options.api_key = "api_key" + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=mock_cred, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) diff --git a/owl-bot-staging/v1/tests/unit/gapic/compute_v1/test_region_ssl_certificates.py b/owl-bot-staging/v1/tests/unit/gapic/compute_v1/test_region_ssl_certificates.py new file mode 100644 index 000000000..ddc576a86 --- /dev/null +++ b/owl-bot-staging/v1/tests/unit/gapic/compute_v1/test_region_ssl_certificates.py @@ -0,0 +1,2517 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import os +# try/except added for compatibility with python < 3.8 +try: + from unittest import mock + from unittest.mock import AsyncMock # pragma: NO COVER +except ImportError: # pragma: NO COVER + import mock + +import grpc +from grpc.experimental import aio +from collections.abc import Iterable +from google.protobuf import json_format +import json +import math +import pytest +from proto.marshal.rules.dates import DurationRule, TimestampRule +from proto.marshal.rules import wrappers +from requests import Response +from requests import Request, PreparedRequest +from requests.sessions import Session +from google.protobuf import json_format + +from google.api_core import client_options +from google.api_core import exceptions as core_exceptions +from google.api_core import extended_operation # type: ignore +from google.api_core import future +from google.api_core import gapic_v1 +from google.api_core import grpc_helpers +from google.api_core import grpc_helpers_async +from google.api_core import path_template +from google.auth import credentials as ga_credentials +from google.auth.exceptions import MutualTLSChannelError +from google.cloud.compute_v1.services.region_ssl_certificates import RegionSslCertificatesClient +from google.cloud.compute_v1.services.region_ssl_certificates import pagers +from google.cloud.compute_v1.services.region_ssl_certificates import transports +from google.cloud.compute_v1.types import compute +from google.oauth2 import service_account +import google.auth + + +def client_cert_source_callback(): + return b"cert bytes", b"key bytes" + + +# If default endpoint is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint(client): + return "foo.googleapis.com" if ("localhost" in client.DEFAULT_ENDPOINT) else client.DEFAULT_ENDPOINT + + +def test__get_default_mtls_endpoint(): + api_endpoint = "example.googleapis.com" + api_mtls_endpoint = "example.mtls.googleapis.com" + sandbox_endpoint = "example.sandbox.googleapis.com" + sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com" + non_googleapi = "api.example.com" + + assert RegionSslCertificatesClient._get_default_mtls_endpoint(None) is None + assert RegionSslCertificatesClient._get_default_mtls_endpoint(api_endpoint) == api_mtls_endpoint + assert RegionSslCertificatesClient._get_default_mtls_endpoint(api_mtls_endpoint) == api_mtls_endpoint + assert RegionSslCertificatesClient._get_default_mtls_endpoint(sandbox_endpoint) == sandbox_mtls_endpoint + assert RegionSslCertificatesClient._get_default_mtls_endpoint(sandbox_mtls_endpoint) == sandbox_mtls_endpoint + assert RegionSslCertificatesClient._get_default_mtls_endpoint(non_googleapi) == non_googleapi + + +@pytest.mark.parametrize("client_class,transport_name", [ + (RegionSslCertificatesClient, "rest"), +]) +def test_region_ssl_certificates_client_from_service_account_info(client_class, transport_name): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object(service_account.Credentials, 'from_service_account_info') as factory: + factory.return_value = creds + info = {"valid": True} + client = client_class.from_service_account_info(info, transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + 'compute.googleapis.com:443' + if transport_name in ['grpc', 'grpc_asyncio'] + else + 'https://compute.googleapis.com' + ) + + +@pytest.mark.parametrize("transport_class,transport_name", [ + (transports.RegionSslCertificatesRestTransport, "rest"), +]) +def test_region_ssl_certificates_client_service_account_always_use_jwt(transport_class, transport_name): + with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=True) + use_jwt.assert_called_once_with(True) + + with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=False) + use_jwt.assert_not_called() + + +@pytest.mark.parametrize("client_class,transport_name", [ + (RegionSslCertificatesClient, "rest"), +]) +def test_region_ssl_certificates_client_from_service_account_file(client_class, transport_name): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object(service_account.Credentials, 'from_service_account_file') as factory: + factory.return_value = creds + client = client_class.from_service_account_file("dummy/file/path.json", transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + client = client_class.from_service_account_json("dummy/file/path.json", transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + 'compute.googleapis.com:443' + if transport_name in ['grpc', 'grpc_asyncio'] + else + 'https://compute.googleapis.com' + ) + + +def test_region_ssl_certificates_client_get_transport_class(): + transport = RegionSslCertificatesClient.get_transport_class() + available_transports = [ + transports.RegionSslCertificatesRestTransport, + ] + assert transport in available_transports + + transport = RegionSslCertificatesClient.get_transport_class("rest") + assert transport == transports.RegionSslCertificatesRestTransport + + +@pytest.mark.parametrize("client_class,transport_class,transport_name", [ + (RegionSslCertificatesClient, transports.RegionSslCertificatesRestTransport, "rest"), +]) +@mock.patch.object(RegionSslCertificatesClient, "DEFAULT_ENDPOINT", modify_default_endpoint(RegionSslCertificatesClient)) +def test_region_ssl_certificates_client_client_options(client_class, transport_class, transport_name): + # Check that if channel is provided we won't create a new one. + with mock.patch.object(RegionSslCertificatesClient, 'get_transport_class') as gtc: + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ) + client = client_class(transport=transport) + gtc.assert_not_called() + + # Check that if channel is provided via str we will create a new one. + with mock.patch.object(RegionSslCertificatesClient, 'get_transport_class') as gtc: + client = client_class(transport=transport_name) + gtc.assert_called() + + # Check the case api_endpoint is provided. + options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(transport=transport_name, client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_MTLS_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError): + client = client_class(transport=transport_name) + + # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): + with pytest.raises(ValueError): + client = client_class(transport=transport_name) + + # Check the case quota_project_id is provided + options = client_options.ClientOptions(quota_project_id="octopus") + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id="octopus", + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + # Check the case api_endpoint is provided + options = client_options.ClientOptions(api_audience="https://language.googleapis.com") + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience="https://language.googleapis.com" + ) + +@pytest.mark.parametrize("client_class,transport_class,transport_name,use_client_cert_env", [ + (RegionSslCertificatesClient, transports.RegionSslCertificatesRestTransport, "rest", "true"), + (RegionSslCertificatesClient, transports.RegionSslCertificatesRestTransport, "rest", "false"), +]) +@mock.patch.object(RegionSslCertificatesClient, "DEFAULT_ENDPOINT", modify_default_endpoint(RegionSslCertificatesClient)) +@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) +def test_region_ssl_certificates_client_mtls_env_auto(client_class, transport_class, transport_name, use_client_cert_env): + # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default + # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists. + + # Check the case client_cert_source is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): + options = client_options.ClientOptions(client_cert_source=client_cert_source_callback) + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + + if use_client_cert_env == "false": + expected_client_cert_source = None + expected_host = client.DEFAULT_ENDPOINT + else: + expected_client_cert_source = client_cert_source_callback + expected_host = client.DEFAULT_MTLS_ENDPOINT + + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case ADC client cert is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): + with mock.patch.object(transport_class, '__init__') as patched: + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): + with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=client_cert_source_callback): + if use_client_cert_env == "false": + expected_host = client.DEFAULT_ENDPOINT + expected_client_cert_source = None + else: + expected_host = client.DEFAULT_MTLS_ENDPOINT + expected_client_cert_source = client_cert_source_callback + + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case client_cert_source and ADC client cert are not provided. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): + with mock.patch.object(transport_class, '__init__') as patched: + with mock.patch("google.auth.transport.mtls.has_default_client_cert_source", return_value=False): + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize("client_class", [ + RegionSslCertificatesClient +]) +@mock.patch.object(RegionSslCertificatesClient, "DEFAULT_ENDPOINT", modify_default_endpoint(RegionSslCertificatesClient)) +def test_region_ssl_certificates_client_get_mtls_endpoint_and_cert_source(client_class): + mock_client_cert_source = mock.Mock() + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "true". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + mock_api_endpoint = "foo" + options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(options) + assert api_endpoint == mock_api_endpoint + assert cert_source == mock_client_cert_source + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "false". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + mock_client_cert_source = mock.Mock() + mock_api_endpoint = "foo" + options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(options) + assert api_endpoint == mock_api_endpoint + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert doesn't exist. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=False): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert exists. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): + with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=mock_client_cert_source): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source == mock_client_cert_source + + +@pytest.mark.parametrize("client_class,transport_class,transport_name", [ + (RegionSslCertificatesClient, transports.RegionSslCertificatesRestTransport, "rest"), +]) +def test_region_ssl_certificates_client_client_options_scopes(client_class, transport_class, transport_name): + # Check the case scopes are provided. + options = client_options.ClientOptions( + scopes=["1", "2"], + ) + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=["1", "2"], + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + +@pytest.mark.parametrize("client_class,transport_class,transport_name,grpc_helpers", [ + (RegionSslCertificatesClient, transports.RegionSslCertificatesRestTransport, "rest", None), +]) +def test_region_ssl_certificates_client_client_options_credentials_file(client_class, transport_class, transport_name, grpc_helpers): + # Check the case credentials file is provided. + options = client_options.ClientOptions( + credentials_file="credentials.json" + ) + + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize("request_type", [ + compute.DeleteRegionSslCertificateRequest, + dict, +]) +def test_delete_rest(request_type): + client = RegionSslCertificatesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2', 'ssl_certificate': 'sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.delete(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, extended_operation.ExtendedOperation) + assert response.client_operation_id == 'client_operation_id_value' + assert response.creation_timestamp == 'creation_timestamp_value' + assert response.description == 'description_value' + assert response.end_time == 'end_time_value' + assert response.http_error_message == 'http_error_message_value' + assert response.http_error_status_code == 2374 + assert response.id == 205 + assert response.insert_time == 'insert_time_value' + assert response.kind == 'kind_value' + assert response.name == 'name_value' + assert response.operation_group_id == 'operation_group_id_value' + assert response.operation_type == 'operation_type_value' + assert response.progress == 885 + assert response.region == 'region_value' + assert response.self_link == 'self_link_value' + assert response.start_time == 'start_time_value' + assert response.status == compute.Operation.Status.DONE + assert response.status_message == 'status_message_value' + assert response.target_id == 947 + assert response.target_link == 'target_link_value' + assert response.user == 'user_value' + assert response.zone == 'zone_value' + + +def test_delete_rest_required_fields(request_type=compute.DeleteRegionSslCertificateRequest): + transport_class = transports.RegionSslCertificatesRestTransport + + request_init = {} + request_init["project"] = "" + request_init["region"] = "" + request_init["ssl_certificate"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + jsonified_request["region"] = 'region_value' + jsonified_request["sslCertificate"] = 'ssl_certificate_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "region" in jsonified_request + assert jsonified_request["region"] == 'region_value' + assert "sslCertificate" in jsonified_request + assert jsonified_request["sslCertificate"] == 'ssl_certificate_value' + + client = RegionSslCertificatesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "delete", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.delete(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_delete_rest_unset_required_fields(): + transport = transports.RegionSslCertificatesRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.delete._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("project", "region", "sslCertificate", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_rest_interceptors(null_interceptor): + transport = transports.RegionSslCertificatesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.RegionSslCertificatesRestInterceptor(), + ) + client = RegionSslCertificatesClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.RegionSslCertificatesRestInterceptor, "post_delete") as post, \ + mock.patch.object(transports.RegionSslCertificatesRestInterceptor, "pre_delete") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.DeleteRegionSslCertificateRequest.pb(compute.DeleteRegionSslCertificateRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.DeleteRegionSslCertificateRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.delete(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_delete_rest_bad_request(transport: str = 'rest', request_type=compute.DeleteRegionSslCertificateRequest): + client = RegionSslCertificatesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2', 'ssl_certificate': 'sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.delete(request) + + +def test_delete_rest_flattened(): + client = RegionSslCertificatesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'region': 'sample2', 'ssl_certificate': 'sample3'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + region='region_value', + ssl_certificate='ssl_certificate_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.delete(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/regions/{region}/sslCertificates/{ssl_certificate}" % client.transport._host, args[1]) + + +def test_delete_rest_flattened_error(transport: str = 'rest'): + client = RegionSslCertificatesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete( + compute.DeleteRegionSslCertificateRequest(), + project='project_value', + region='region_value', + ssl_certificate='ssl_certificate_value', + ) + + +def test_delete_rest_error(): + client = RegionSslCertificatesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.DeleteRegionSslCertificateRequest, + dict, +]) +def test_delete_unary_rest(request_type): + client = RegionSslCertificatesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2', 'ssl_certificate': 'sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.delete_unary(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.Operation) + + +def test_delete_unary_rest_required_fields(request_type=compute.DeleteRegionSslCertificateRequest): + transport_class = transports.RegionSslCertificatesRestTransport + + request_init = {} + request_init["project"] = "" + request_init["region"] = "" + request_init["ssl_certificate"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + jsonified_request["region"] = 'region_value' + jsonified_request["sslCertificate"] = 'ssl_certificate_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "region" in jsonified_request + assert jsonified_request["region"] == 'region_value' + assert "sslCertificate" in jsonified_request + assert jsonified_request["sslCertificate"] == 'ssl_certificate_value' + + client = RegionSslCertificatesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "delete", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.delete_unary(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_delete_unary_rest_unset_required_fields(): + transport = transports.RegionSslCertificatesRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.delete._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("project", "region", "sslCertificate", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_unary_rest_interceptors(null_interceptor): + transport = transports.RegionSslCertificatesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.RegionSslCertificatesRestInterceptor(), + ) + client = RegionSslCertificatesClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.RegionSslCertificatesRestInterceptor, "post_delete") as post, \ + mock.patch.object(transports.RegionSslCertificatesRestInterceptor, "pre_delete") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.DeleteRegionSslCertificateRequest.pb(compute.DeleteRegionSslCertificateRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.DeleteRegionSslCertificateRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.delete_unary(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_delete_unary_rest_bad_request(transport: str = 'rest', request_type=compute.DeleteRegionSslCertificateRequest): + client = RegionSslCertificatesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2', 'ssl_certificate': 'sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.delete_unary(request) + + +def test_delete_unary_rest_flattened(): + client = RegionSslCertificatesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'region': 'sample2', 'ssl_certificate': 'sample3'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + region='region_value', + ssl_certificate='ssl_certificate_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.delete_unary(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/regions/{region}/sslCertificates/{ssl_certificate}" % client.transport._host, args[1]) + + +def test_delete_unary_rest_flattened_error(transport: str = 'rest'): + client = RegionSslCertificatesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_unary( + compute.DeleteRegionSslCertificateRequest(), + project='project_value', + region='region_value', + ssl_certificate='ssl_certificate_value', + ) + + +def test_delete_unary_rest_error(): + client = RegionSslCertificatesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.GetRegionSslCertificateRequest, + dict, +]) +def test_get_rest(request_type): + client = RegionSslCertificatesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2', 'ssl_certificate': 'sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.SslCertificate( + certificate='certificate_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + expire_time='expire_time_value', + id=205, + kind='kind_value', + name='name_value', + private_key='private_key_value', + region='region_value', + self_link='self_link_value', + subject_alternative_names=['subject_alternative_names_value'], + type_='type__value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.SslCertificate.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.get(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.SslCertificate) + assert response.certificate == 'certificate_value' + assert response.creation_timestamp == 'creation_timestamp_value' + assert response.description == 'description_value' + assert response.expire_time == 'expire_time_value' + assert response.id == 205 + assert response.kind == 'kind_value' + assert response.name == 'name_value' + assert response.private_key == 'private_key_value' + assert response.region == 'region_value' + assert response.self_link == 'self_link_value' + assert response.subject_alternative_names == ['subject_alternative_names_value'] + assert response.type_ == 'type__value' + + +def test_get_rest_required_fields(request_type=compute.GetRegionSslCertificateRequest): + transport_class = transports.RegionSslCertificatesRestTransport + + request_init = {} + request_init["project"] = "" + request_init["region"] = "" + request_init["ssl_certificate"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + jsonified_request["region"] = 'region_value' + jsonified_request["sslCertificate"] = 'ssl_certificate_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "region" in jsonified_request + assert jsonified_request["region"] == 'region_value' + assert "sslCertificate" in jsonified_request + assert jsonified_request["sslCertificate"] == 'ssl_certificate_value' + + client = RegionSslCertificatesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.SslCertificate() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "get", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.SslCertificate.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.get(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_get_rest_unset_required_fields(): + transport = transports.RegionSslCertificatesRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.get._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("project", "region", "sslCertificate", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_rest_interceptors(null_interceptor): + transport = transports.RegionSslCertificatesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.RegionSslCertificatesRestInterceptor(), + ) + client = RegionSslCertificatesClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.RegionSslCertificatesRestInterceptor, "post_get") as post, \ + mock.patch.object(transports.RegionSslCertificatesRestInterceptor, "pre_get") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.GetRegionSslCertificateRequest.pb(compute.GetRegionSslCertificateRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.SslCertificate.to_json(compute.SslCertificate()) + + request = compute.GetRegionSslCertificateRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.SslCertificate() + + client.get(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_get_rest_bad_request(transport: str = 'rest', request_type=compute.GetRegionSslCertificateRequest): + client = RegionSslCertificatesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2', 'ssl_certificate': 'sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get(request) + + +def test_get_rest_flattened(): + client = RegionSslCertificatesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.SslCertificate() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'region': 'sample2', 'ssl_certificate': 'sample3'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + region='region_value', + ssl_certificate='ssl_certificate_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.SslCertificate.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.get(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/regions/{region}/sslCertificates/{ssl_certificate}" % client.transport._host, args[1]) + + +def test_get_rest_flattened_error(transport: str = 'rest'): + client = RegionSslCertificatesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get( + compute.GetRegionSslCertificateRequest(), + project='project_value', + region='region_value', + ssl_certificate='ssl_certificate_value', + ) + + +def test_get_rest_error(): + client = RegionSslCertificatesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.InsertRegionSslCertificateRequest, + dict, +]) +def test_insert_rest(request_type): + client = RegionSslCertificatesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2'} + request_init["ssl_certificate_resource"] = {'certificate': 'certificate_value', 'creation_timestamp': 'creation_timestamp_value', 'description': 'description_value', 'expire_time': 'expire_time_value', 'id': 205, 'kind': 'kind_value', 'managed': {'domain_status': {}, 'domains': ['domains_value1', 'domains_value2'], 'status': 'status_value'}, 'name': 'name_value', 'private_key': 'private_key_value', 'region': 'region_value', 'self_link': 'self_link_value', 'self_managed': {'certificate': 'certificate_value', 'private_key': 'private_key_value'}, 'subject_alternative_names': ['subject_alternative_names_value1', 'subject_alternative_names_value2'], 'type_': 'type__value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.insert(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, extended_operation.ExtendedOperation) + assert response.client_operation_id == 'client_operation_id_value' + assert response.creation_timestamp == 'creation_timestamp_value' + assert response.description == 'description_value' + assert response.end_time == 'end_time_value' + assert response.http_error_message == 'http_error_message_value' + assert response.http_error_status_code == 2374 + assert response.id == 205 + assert response.insert_time == 'insert_time_value' + assert response.kind == 'kind_value' + assert response.name == 'name_value' + assert response.operation_group_id == 'operation_group_id_value' + assert response.operation_type == 'operation_type_value' + assert response.progress == 885 + assert response.region == 'region_value' + assert response.self_link == 'self_link_value' + assert response.start_time == 'start_time_value' + assert response.status == compute.Operation.Status.DONE + assert response.status_message == 'status_message_value' + assert response.target_id == 947 + assert response.target_link == 'target_link_value' + assert response.user == 'user_value' + assert response.zone == 'zone_value' + + +def test_insert_rest_required_fields(request_type=compute.InsertRegionSslCertificateRequest): + transport_class = transports.RegionSslCertificatesRestTransport + + request_init = {} + request_init["project"] = "" + request_init["region"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).insert._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + jsonified_request["region"] = 'region_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).insert._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "region" in jsonified_request + assert jsonified_request["region"] == 'region_value' + + client = RegionSslCertificatesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.insert(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_insert_rest_unset_required_fields(): + transport = transports.RegionSslCertificatesRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.insert._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("project", "region", "sslCertificateResource", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_insert_rest_interceptors(null_interceptor): + transport = transports.RegionSslCertificatesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.RegionSslCertificatesRestInterceptor(), + ) + client = RegionSslCertificatesClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.RegionSslCertificatesRestInterceptor, "post_insert") as post, \ + mock.patch.object(transports.RegionSslCertificatesRestInterceptor, "pre_insert") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.InsertRegionSslCertificateRequest.pb(compute.InsertRegionSslCertificateRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.InsertRegionSslCertificateRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.insert(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_insert_rest_bad_request(transport: str = 'rest', request_type=compute.InsertRegionSslCertificateRequest): + client = RegionSslCertificatesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2'} + request_init["ssl_certificate_resource"] = {'certificate': 'certificate_value', 'creation_timestamp': 'creation_timestamp_value', 'description': 'description_value', 'expire_time': 'expire_time_value', 'id': 205, 'kind': 'kind_value', 'managed': {'domain_status': {}, 'domains': ['domains_value1', 'domains_value2'], 'status': 'status_value'}, 'name': 'name_value', 'private_key': 'private_key_value', 'region': 'region_value', 'self_link': 'self_link_value', 'self_managed': {'certificate': 'certificate_value', 'private_key': 'private_key_value'}, 'subject_alternative_names': ['subject_alternative_names_value1', 'subject_alternative_names_value2'], 'type_': 'type__value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.insert(request) + + +def test_insert_rest_flattened(): + client = RegionSslCertificatesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'region': 'sample2'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + region='region_value', + ssl_certificate_resource=compute.SslCertificate(certificate='certificate_value'), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.insert(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/regions/{region}/sslCertificates" % client.transport._host, args[1]) + + +def test_insert_rest_flattened_error(transport: str = 'rest'): + client = RegionSslCertificatesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.insert( + compute.InsertRegionSslCertificateRequest(), + project='project_value', + region='region_value', + ssl_certificate_resource=compute.SslCertificate(certificate='certificate_value'), + ) + + +def test_insert_rest_error(): + client = RegionSslCertificatesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.InsertRegionSslCertificateRequest, + dict, +]) +def test_insert_unary_rest(request_type): + client = RegionSslCertificatesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2'} + request_init["ssl_certificate_resource"] = {'certificate': 'certificate_value', 'creation_timestamp': 'creation_timestamp_value', 'description': 'description_value', 'expire_time': 'expire_time_value', 'id': 205, 'kind': 'kind_value', 'managed': {'domain_status': {}, 'domains': ['domains_value1', 'domains_value2'], 'status': 'status_value'}, 'name': 'name_value', 'private_key': 'private_key_value', 'region': 'region_value', 'self_link': 'self_link_value', 'self_managed': {'certificate': 'certificate_value', 'private_key': 'private_key_value'}, 'subject_alternative_names': ['subject_alternative_names_value1', 'subject_alternative_names_value2'], 'type_': 'type__value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.insert_unary(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.Operation) + + +def test_insert_unary_rest_required_fields(request_type=compute.InsertRegionSslCertificateRequest): + transport_class = transports.RegionSslCertificatesRestTransport + + request_init = {} + request_init["project"] = "" + request_init["region"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).insert._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + jsonified_request["region"] = 'region_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).insert._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "region" in jsonified_request + assert jsonified_request["region"] == 'region_value' + + client = RegionSslCertificatesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.insert_unary(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_insert_unary_rest_unset_required_fields(): + transport = transports.RegionSslCertificatesRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.insert._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("project", "region", "sslCertificateResource", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_insert_unary_rest_interceptors(null_interceptor): + transport = transports.RegionSslCertificatesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.RegionSslCertificatesRestInterceptor(), + ) + client = RegionSslCertificatesClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.RegionSslCertificatesRestInterceptor, "post_insert") as post, \ + mock.patch.object(transports.RegionSslCertificatesRestInterceptor, "pre_insert") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.InsertRegionSslCertificateRequest.pb(compute.InsertRegionSslCertificateRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.InsertRegionSslCertificateRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.insert_unary(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_insert_unary_rest_bad_request(transport: str = 'rest', request_type=compute.InsertRegionSslCertificateRequest): + client = RegionSslCertificatesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2'} + request_init["ssl_certificate_resource"] = {'certificate': 'certificate_value', 'creation_timestamp': 'creation_timestamp_value', 'description': 'description_value', 'expire_time': 'expire_time_value', 'id': 205, 'kind': 'kind_value', 'managed': {'domain_status': {}, 'domains': ['domains_value1', 'domains_value2'], 'status': 'status_value'}, 'name': 'name_value', 'private_key': 'private_key_value', 'region': 'region_value', 'self_link': 'self_link_value', 'self_managed': {'certificate': 'certificate_value', 'private_key': 'private_key_value'}, 'subject_alternative_names': ['subject_alternative_names_value1', 'subject_alternative_names_value2'], 'type_': 'type__value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.insert_unary(request) + + +def test_insert_unary_rest_flattened(): + client = RegionSslCertificatesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'region': 'sample2'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + region='region_value', + ssl_certificate_resource=compute.SslCertificate(certificate='certificate_value'), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.insert_unary(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/regions/{region}/sslCertificates" % client.transport._host, args[1]) + + +def test_insert_unary_rest_flattened_error(transport: str = 'rest'): + client = RegionSslCertificatesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.insert_unary( + compute.InsertRegionSslCertificateRequest(), + project='project_value', + region='region_value', + ssl_certificate_resource=compute.SslCertificate(certificate='certificate_value'), + ) + + +def test_insert_unary_rest_error(): + client = RegionSslCertificatesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.ListRegionSslCertificatesRequest, + dict, +]) +def test_list_rest(request_type): + client = RegionSslCertificatesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.SslCertificateList( + id='id_value', + kind='kind_value', + next_page_token='next_page_token_value', + self_link='self_link_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.SslCertificateList.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.list(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListPager) + assert response.id == 'id_value' + assert response.kind == 'kind_value' + assert response.next_page_token == 'next_page_token_value' + assert response.self_link == 'self_link_value' + + +def test_list_rest_required_fields(request_type=compute.ListRegionSslCertificatesRequest): + transport_class = transports.RegionSslCertificatesRestTransport + + request_init = {} + request_init["project"] = "" + request_init["region"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + jsonified_request["region"] = 'region_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("filter", "max_results", "order_by", "page_token", "return_partial_success", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "region" in jsonified_request + assert jsonified_request["region"] == 'region_value' + + client = RegionSslCertificatesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.SslCertificateList() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "get", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.SslCertificateList.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.list(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_list_rest_unset_required_fields(): + transport = transports.RegionSslCertificatesRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.list._get_unset_required_fields({}) + assert set(unset_fields) == (set(("filter", "maxResults", "orderBy", "pageToken", "returnPartialSuccess", )) & set(("project", "region", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_rest_interceptors(null_interceptor): + transport = transports.RegionSslCertificatesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.RegionSslCertificatesRestInterceptor(), + ) + client = RegionSslCertificatesClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.RegionSslCertificatesRestInterceptor, "post_list") as post, \ + mock.patch.object(transports.RegionSslCertificatesRestInterceptor, "pre_list") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.ListRegionSslCertificatesRequest.pb(compute.ListRegionSslCertificatesRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.SslCertificateList.to_json(compute.SslCertificateList()) + + request = compute.ListRegionSslCertificatesRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.SslCertificateList() + + client.list(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_list_rest_bad_request(transport: str = 'rest', request_type=compute.ListRegionSslCertificatesRequest): + client = RegionSslCertificatesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list(request) + + +def test_list_rest_flattened(): + client = RegionSslCertificatesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.SslCertificateList() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'region': 'sample2'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + region='region_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.SslCertificateList.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.list(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/regions/{region}/sslCertificates" % client.transport._host, args[1]) + + +def test_list_rest_flattened_error(transport: str = 'rest'): + client = RegionSslCertificatesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list( + compute.ListRegionSslCertificatesRequest(), + project='project_value', + region='region_value', + ) + + +def test_list_rest_pager(transport: str = 'rest'): + client = RegionSslCertificatesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + #with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + compute.SslCertificateList( + items=[ + compute.SslCertificate(), + compute.SslCertificate(), + compute.SslCertificate(), + ], + next_page_token='abc', + ), + compute.SslCertificateList( + items=[], + next_page_token='def', + ), + compute.SslCertificateList( + items=[ + compute.SslCertificate(), + ], + next_page_token='ghi', + ), + compute.SslCertificateList( + items=[ + compute.SslCertificate(), + compute.SslCertificate(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple(compute.SslCertificateList.to_json(x) for x in response) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode('UTF-8') + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {'project': 'sample1', 'region': 'sample2'} + + pager = client.list(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, compute.SslCertificate) + for i in results) + + pages = list(client.list(request=sample_request).pages) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + + +def test_credentials_transport_error(): + # It is an error to provide credentials and a transport instance. + transport = transports.RegionSslCertificatesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = RegionSslCertificatesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # It is an error to provide a credentials file and a transport instance. + transport = transports.RegionSslCertificatesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = RegionSslCertificatesClient( + client_options={"credentials_file": "credentials.json"}, + transport=transport, + ) + + # It is an error to provide an api_key and a transport instance. + transport = transports.RegionSslCertificatesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = RegionSslCertificatesClient( + client_options=options, + transport=transport, + ) + + # It is an error to provide an api_key and a credential. + options = mock.Mock() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = RegionSslCertificatesClient( + client_options=options, + credentials=ga_credentials.AnonymousCredentials() + ) + + # It is an error to provide scopes and a transport instance. + transport = transports.RegionSslCertificatesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = RegionSslCertificatesClient( + client_options={"scopes": ["1", "2"]}, + transport=transport, + ) + + +def test_transport_instance(): + # A client may be instantiated with a custom transport instance. + transport = transports.RegionSslCertificatesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + client = RegionSslCertificatesClient(transport=transport) + assert client.transport is transport + + +@pytest.mark.parametrize("transport_class", [ + transports.RegionSslCertificatesRestTransport, +]) +def test_transport_adc(transport_class): + # Test default credentials are used if not provided. + with mock.patch.object(google.auth, 'default') as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class() + adc.assert_called_once() + +@pytest.mark.parametrize("transport_name", [ + "rest", +]) +def test_transport_kind(transport_name): + transport = RegionSslCertificatesClient.get_transport_class(transport_name)( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert transport.kind == transport_name + + +def test_region_ssl_certificates_base_transport_error(): + # Passing both a credentials object and credentials_file should raise an error + with pytest.raises(core_exceptions.DuplicateCredentialArgs): + transport = transports.RegionSslCertificatesTransport( + credentials=ga_credentials.AnonymousCredentials(), + credentials_file="credentials.json" + ) + + +def test_region_ssl_certificates_base_transport(): + # Instantiate the base transport. + with mock.patch('google.cloud.compute_v1.services.region_ssl_certificates.transports.RegionSslCertificatesTransport.__init__') as Transport: + Transport.return_value = None + transport = transports.RegionSslCertificatesTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Every method on the transport should just blindly + # raise NotImplementedError. + methods = ( + 'delete', + 'get', + 'insert', + 'list', + ) + for method in methods: + with pytest.raises(NotImplementedError): + getattr(transport, method)(request=object()) + + with pytest.raises(NotImplementedError): + transport.close() + + # Catch all for all remaining methods and properties + remainder = [ + 'kind', + ] + for r in remainder: + with pytest.raises(NotImplementedError): + getattr(transport, r)() + + +def test_region_ssl_certificates_base_transport_with_credentials_file(): + # Instantiate the base transport with a credentials file + with mock.patch.object(google.auth, 'load_credentials_from_file', autospec=True) as load_creds, mock.patch('google.cloud.compute_v1.services.region_ssl_certificates.transports.RegionSslCertificatesTransport._prep_wrapped_messages') as Transport: + Transport.return_value = None + load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.RegionSslCertificatesTransport( + credentials_file="credentials.json", + quota_project_id="octopus", + ) + load_creds.assert_called_once_with("credentials.json", + scopes=None, + default_scopes=( + 'https://www.googleapis.com/auth/compute', + 'https://www.googleapis.com/auth/cloud-platform', +), + quota_project_id="octopus", + ) + + +def test_region_ssl_certificates_base_transport_with_adc(): + # Test the default credentials are used if credentials and credentials_file are None. + with mock.patch.object(google.auth, 'default', autospec=True) as adc, mock.patch('google.cloud.compute_v1.services.region_ssl_certificates.transports.RegionSslCertificatesTransport._prep_wrapped_messages') as Transport: + Transport.return_value = None + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.RegionSslCertificatesTransport() + adc.assert_called_once() + + +def test_region_ssl_certificates_auth_adc(): + # If no credentials are provided, we should use ADC credentials. + with mock.patch.object(google.auth, 'default', autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + RegionSslCertificatesClient() + adc.assert_called_once_with( + scopes=None, + default_scopes=( + 'https://www.googleapis.com/auth/compute', + 'https://www.googleapis.com/auth/cloud-platform', +), + quota_project_id=None, + ) + + +def test_region_ssl_certificates_http_transport_client_cert_source_for_mtls(): + cred = ga_credentials.AnonymousCredentials() + with mock.patch("google.auth.transport.requests.AuthorizedSession.configure_mtls_channel") as mock_configure_mtls_channel: + transports.RegionSslCertificatesRestTransport ( + credentials=cred, + client_cert_source_for_mtls=client_cert_source_callback + ) + mock_configure_mtls_channel.assert_called_once_with(client_cert_source_callback) + + +@pytest.mark.parametrize("transport_name", [ + "rest", +]) +def test_region_ssl_certificates_host_no_port(transport_name): + client = RegionSslCertificatesClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions(api_endpoint='compute.googleapis.com'), + transport=transport_name, + ) + assert client.transport._host == ( + 'compute.googleapis.com:443' + if transport_name in ['grpc', 'grpc_asyncio'] + else 'https://compute.googleapis.com' + ) + +@pytest.mark.parametrize("transport_name", [ + "rest", +]) +def test_region_ssl_certificates_host_with_port(transport_name): + client = RegionSslCertificatesClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions(api_endpoint='compute.googleapis.com:8000'), + transport=transport_name, + ) + assert client.transport._host == ( + 'compute.googleapis.com:8000' + if transport_name in ['grpc', 'grpc_asyncio'] + else 'https://compute.googleapis.com:8000' + ) + +@pytest.mark.parametrize("transport_name", [ + "rest", +]) +def test_region_ssl_certificates_client_transport_session_collision(transport_name): + creds1 = ga_credentials.AnonymousCredentials() + creds2 = ga_credentials.AnonymousCredentials() + client1 = RegionSslCertificatesClient( + credentials=creds1, + transport=transport_name, + ) + client2 = RegionSslCertificatesClient( + credentials=creds2, + transport=transport_name, + ) + session1 = client1.transport.delete._session + session2 = client2.transport.delete._session + assert session1 != session2 + session1 = client1.transport.get._session + session2 = client2.transport.get._session + assert session1 != session2 + session1 = client1.transport.insert._session + session2 = client2.transport.insert._session + assert session1 != session2 + session1 = client1.transport.list._session + session2 = client2.transport.list._session + assert session1 != session2 + +def test_common_billing_account_path(): + billing_account = "squid" + expected = "billingAccounts/{billing_account}".format(billing_account=billing_account, ) + actual = RegionSslCertificatesClient.common_billing_account_path(billing_account) + assert expected == actual + + +def test_parse_common_billing_account_path(): + expected = { + "billing_account": "clam", + } + path = RegionSslCertificatesClient.common_billing_account_path(**expected) + + # Check that the path construction is reversible. + actual = RegionSslCertificatesClient.parse_common_billing_account_path(path) + assert expected == actual + +def test_common_folder_path(): + folder = "whelk" + expected = "folders/{folder}".format(folder=folder, ) + actual = RegionSslCertificatesClient.common_folder_path(folder) + assert expected == actual + + +def test_parse_common_folder_path(): + expected = { + "folder": "octopus", + } + path = RegionSslCertificatesClient.common_folder_path(**expected) + + # Check that the path construction is reversible. + actual = RegionSslCertificatesClient.parse_common_folder_path(path) + assert expected == actual + +def test_common_organization_path(): + organization = "oyster" + expected = "organizations/{organization}".format(organization=organization, ) + actual = RegionSslCertificatesClient.common_organization_path(organization) + assert expected == actual + + +def test_parse_common_organization_path(): + expected = { + "organization": "nudibranch", + } + path = RegionSslCertificatesClient.common_organization_path(**expected) + + # Check that the path construction is reversible. + actual = RegionSslCertificatesClient.parse_common_organization_path(path) + assert expected == actual + +def test_common_project_path(): + project = "cuttlefish" + expected = "projects/{project}".format(project=project, ) + actual = RegionSslCertificatesClient.common_project_path(project) + assert expected == actual + + +def test_parse_common_project_path(): + expected = { + "project": "mussel", + } + path = RegionSslCertificatesClient.common_project_path(**expected) + + # Check that the path construction is reversible. + actual = RegionSslCertificatesClient.parse_common_project_path(path) + assert expected == actual + +def test_common_location_path(): + project = "winkle" + location = "nautilus" + expected = "projects/{project}/locations/{location}".format(project=project, location=location, ) + actual = RegionSslCertificatesClient.common_location_path(project, location) + assert expected == actual + + +def test_parse_common_location_path(): + expected = { + "project": "scallop", + "location": "abalone", + } + path = RegionSslCertificatesClient.common_location_path(**expected) + + # Check that the path construction is reversible. + actual = RegionSslCertificatesClient.parse_common_location_path(path) + assert expected == actual + + +def test_client_with_default_client_info(): + client_info = gapic_v1.client_info.ClientInfo() + + with mock.patch.object(transports.RegionSslCertificatesTransport, '_prep_wrapped_messages') as prep: + client = RegionSslCertificatesClient( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + with mock.patch.object(transports.RegionSslCertificatesTransport, '_prep_wrapped_messages') as prep: + transport_class = RegionSslCertificatesClient.get_transport_class() + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + +def test_transport_close(): + transports = { + "rest": "_session", + } + + for transport, close_name in transports.items(): + client = RegionSslCertificatesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport + ) + with mock.patch.object(type(getattr(client.transport, close_name)), "close") as close: + with client: + close.assert_not_called() + close.assert_called_once() + +def test_client_ctx(): + transports = [ + 'rest', + ] + for transport in transports: + client = RegionSslCertificatesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport + ) + # Test client calls underlying transport. + with mock.patch.object(type(client.transport), "close") as close: + close.assert_not_called() + with client: + pass + close.assert_called() + +@pytest.mark.parametrize("client_class,transport_class", [ + (RegionSslCertificatesClient, transports.RegionSslCertificatesRestTransport), +]) +def test_api_key_credentials(client_class, transport_class): + with mock.patch.object( + google.auth._default, "get_api_key_credentials", create=True + ) as get_api_key_credentials: + mock_cred = mock.Mock() + get_api_key_credentials.return_value = mock_cred + options = client_options.ClientOptions() + options.api_key = "api_key" + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=mock_cred, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) diff --git a/owl-bot-staging/v1/tests/unit/gapic/compute_v1/test_region_ssl_policies.py b/owl-bot-staging/v1/tests/unit/gapic/compute_v1/test_region_ssl_policies.py new file mode 100644 index 000000000..e99e38d26 --- /dev/null +++ b/owl-bot-staging/v1/tests/unit/gapic/compute_v1/test_region_ssl_policies.py @@ -0,0 +1,3329 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import os +# try/except added for compatibility with python < 3.8 +try: + from unittest import mock + from unittest.mock import AsyncMock # pragma: NO COVER +except ImportError: # pragma: NO COVER + import mock + +import grpc +from grpc.experimental import aio +from collections.abc import Iterable +from google.protobuf import json_format +import json +import math +import pytest +from proto.marshal.rules.dates import DurationRule, TimestampRule +from proto.marshal.rules import wrappers +from requests import Response +from requests import Request, PreparedRequest +from requests.sessions import Session +from google.protobuf import json_format + +from google.api_core import client_options +from google.api_core import exceptions as core_exceptions +from google.api_core import extended_operation # type: ignore +from google.api_core import future +from google.api_core import gapic_v1 +from google.api_core import grpc_helpers +from google.api_core import grpc_helpers_async +from google.api_core import path_template +from google.auth import credentials as ga_credentials +from google.auth.exceptions import MutualTLSChannelError +from google.cloud.compute_v1.services.region_ssl_policies import RegionSslPoliciesClient +from google.cloud.compute_v1.services.region_ssl_policies import pagers +from google.cloud.compute_v1.services.region_ssl_policies import transports +from google.cloud.compute_v1.types import compute +from google.oauth2 import service_account +import google.auth + + +def client_cert_source_callback(): + return b"cert bytes", b"key bytes" + + +# If default endpoint is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint(client): + return "foo.googleapis.com" if ("localhost" in client.DEFAULT_ENDPOINT) else client.DEFAULT_ENDPOINT + + +def test__get_default_mtls_endpoint(): + api_endpoint = "example.googleapis.com" + api_mtls_endpoint = "example.mtls.googleapis.com" + sandbox_endpoint = "example.sandbox.googleapis.com" + sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com" + non_googleapi = "api.example.com" + + assert RegionSslPoliciesClient._get_default_mtls_endpoint(None) is None + assert RegionSslPoliciesClient._get_default_mtls_endpoint(api_endpoint) == api_mtls_endpoint + assert RegionSslPoliciesClient._get_default_mtls_endpoint(api_mtls_endpoint) == api_mtls_endpoint + assert RegionSslPoliciesClient._get_default_mtls_endpoint(sandbox_endpoint) == sandbox_mtls_endpoint + assert RegionSslPoliciesClient._get_default_mtls_endpoint(sandbox_mtls_endpoint) == sandbox_mtls_endpoint + assert RegionSslPoliciesClient._get_default_mtls_endpoint(non_googleapi) == non_googleapi + + +@pytest.mark.parametrize("client_class,transport_name", [ + (RegionSslPoliciesClient, "rest"), +]) +def test_region_ssl_policies_client_from_service_account_info(client_class, transport_name): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object(service_account.Credentials, 'from_service_account_info') as factory: + factory.return_value = creds + info = {"valid": True} + client = client_class.from_service_account_info(info, transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + 'compute.googleapis.com:443' + if transport_name in ['grpc', 'grpc_asyncio'] + else + 'https://compute.googleapis.com' + ) + + +@pytest.mark.parametrize("transport_class,transport_name", [ + (transports.RegionSslPoliciesRestTransport, "rest"), +]) +def test_region_ssl_policies_client_service_account_always_use_jwt(transport_class, transport_name): + with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=True) + use_jwt.assert_called_once_with(True) + + with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=False) + use_jwt.assert_not_called() + + +@pytest.mark.parametrize("client_class,transport_name", [ + (RegionSslPoliciesClient, "rest"), +]) +def test_region_ssl_policies_client_from_service_account_file(client_class, transport_name): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object(service_account.Credentials, 'from_service_account_file') as factory: + factory.return_value = creds + client = client_class.from_service_account_file("dummy/file/path.json", transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + client = client_class.from_service_account_json("dummy/file/path.json", transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + 'compute.googleapis.com:443' + if transport_name in ['grpc', 'grpc_asyncio'] + else + 'https://compute.googleapis.com' + ) + + +def test_region_ssl_policies_client_get_transport_class(): + transport = RegionSslPoliciesClient.get_transport_class() + available_transports = [ + transports.RegionSslPoliciesRestTransport, + ] + assert transport in available_transports + + transport = RegionSslPoliciesClient.get_transport_class("rest") + assert transport == transports.RegionSslPoliciesRestTransport + + +@pytest.mark.parametrize("client_class,transport_class,transport_name", [ + (RegionSslPoliciesClient, transports.RegionSslPoliciesRestTransport, "rest"), +]) +@mock.patch.object(RegionSslPoliciesClient, "DEFAULT_ENDPOINT", modify_default_endpoint(RegionSslPoliciesClient)) +def test_region_ssl_policies_client_client_options(client_class, transport_class, transport_name): + # Check that if channel is provided we won't create a new one. + with mock.patch.object(RegionSslPoliciesClient, 'get_transport_class') as gtc: + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ) + client = client_class(transport=transport) + gtc.assert_not_called() + + # Check that if channel is provided via str we will create a new one. + with mock.patch.object(RegionSslPoliciesClient, 'get_transport_class') as gtc: + client = client_class(transport=transport_name) + gtc.assert_called() + + # Check the case api_endpoint is provided. + options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(transport=transport_name, client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_MTLS_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError): + client = client_class(transport=transport_name) + + # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): + with pytest.raises(ValueError): + client = client_class(transport=transport_name) + + # Check the case quota_project_id is provided + options = client_options.ClientOptions(quota_project_id="octopus") + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id="octopus", + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + # Check the case api_endpoint is provided + options = client_options.ClientOptions(api_audience="https://language.googleapis.com") + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience="https://language.googleapis.com" + ) + +@pytest.mark.parametrize("client_class,transport_class,transport_name,use_client_cert_env", [ + (RegionSslPoliciesClient, transports.RegionSslPoliciesRestTransport, "rest", "true"), + (RegionSslPoliciesClient, transports.RegionSslPoliciesRestTransport, "rest", "false"), +]) +@mock.patch.object(RegionSslPoliciesClient, "DEFAULT_ENDPOINT", modify_default_endpoint(RegionSslPoliciesClient)) +@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) +def test_region_ssl_policies_client_mtls_env_auto(client_class, transport_class, transport_name, use_client_cert_env): + # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default + # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists. + + # Check the case client_cert_source is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): + options = client_options.ClientOptions(client_cert_source=client_cert_source_callback) + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + + if use_client_cert_env == "false": + expected_client_cert_source = None + expected_host = client.DEFAULT_ENDPOINT + else: + expected_client_cert_source = client_cert_source_callback + expected_host = client.DEFAULT_MTLS_ENDPOINT + + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case ADC client cert is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): + with mock.patch.object(transport_class, '__init__') as patched: + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): + with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=client_cert_source_callback): + if use_client_cert_env == "false": + expected_host = client.DEFAULT_ENDPOINT + expected_client_cert_source = None + else: + expected_host = client.DEFAULT_MTLS_ENDPOINT + expected_client_cert_source = client_cert_source_callback + + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case client_cert_source and ADC client cert are not provided. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): + with mock.patch.object(transport_class, '__init__') as patched: + with mock.patch("google.auth.transport.mtls.has_default_client_cert_source", return_value=False): + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize("client_class", [ + RegionSslPoliciesClient +]) +@mock.patch.object(RegionSslPoliciesClient, "DEFAULT_ENDPOINT", modify_default_endpoint(RegionSslPoliciesClient)) +def test_region_ssl_policies_client_get_mtls_endpoint_and_cert_source(client_class): + mock_client_cert_source = mock.Mock() + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "true". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + mock_api_endpoint = "foo" + options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(options) + assert api_endpoint == mock_api_endpoint + assert cert_source == mock_client_cert_source + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "false". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + mock_client_cert_source = mock.Mock() + mock_api_endpoint = "foo" + options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(options) + assert api_endpoint == mock_api_endpoint + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert doesn't exist. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=False): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert exists. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): + with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=mock_client_cert_source): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source == mock_client_cert_source + + +@pytest.mark.parametrize("client_class,transport_class,transport_name", [ + (RegionSslPoliciesClient, transports.RegionSslPoliciesRestTransport, "rest"), +]) +def test_region_ssl_policies_client_client_options_scopes(client_class, transport_class, transport_name): + # Check the case scopes are provided. + options = client_options.ClientOptions( + scopes=["1", "2"], + ) + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=["1", "2"], + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + +@pytest.mark.parametrize("client_class,transport_class,transport_name,grpc_helpers", [ + (RegionSslPoliciesClient, transports.RegionSslPoliciesRestTransport, "rest", None), +]) +def test_region_ssl_policies_client_client_options_credentials_file(client_class, transport_class, transport_name, grpc_helpers): + # Check the case credentials file is provided. + options = client_options.ClientOptions( + credentials_file="credentials.json" + ) + + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize("request_type", [ + compute.DeleteRegionSslPolicyRequest, + dict, +]) +def test_delete_rest(request_type): + client = RegionSslPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2', 'ssl_policy': 'sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.delete(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, extended_operation.ExtendedOperation) + assert response.client_operation_id == 'client_operation_id_value' + assert response.creation_timestamp == 'creation_timestamp_value' + assert response.description == 'description_value' + assert response.end_time == 'end_time_value' + assert response.http_error_message == 'http_error_message_value' + assert response.http_error_status_code == 2374 + assert response.id == 205 + assert response.insert_time == 'insert_time_value' + assert response.kind == 'kind_value' + assert response.name == 'name_value' + assert response.operation_group_id == 'operation_group_id_value' + assert response.operation_type == 'operation_type_value' + assert response.progress == 885 + assert response.region == 'region_value' + assert response.self_link == 'self_link_value' + assert response.start_time == 'start_time_value' + assert response.status == compute.Operation.Status.DONE + assert response.status_message == 'status_message_value' + assert response.target_id == 947 + assert response.target_link == 'target_link_value' + assert response.user == 'user_value' + assert response.zone == 'zone_value' + + +def test_delete_rest_required_fields(request_type=compute.DeleteRegionSslPolicyRequest): + transport_class = transports.RegionSslPoliciesRestTransport + + request_init = {} + request_init["project"] = "" + request_init["region"] = "" + request_init["ssl_policy"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + jsonified_request["region"] = 'region_value' + jsonified_request["sslPolicy"] = 'ssl_policy_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "region" in jsonified_request + assert jsonified_request["region"] == 'region_value' + assert "sslPolicy" in jsonified_request + assert jsonified_request["sslPolicy"] == 'ssl_policy_value' + + client = RegionSslPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "delete", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.delete(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_delete_rest_unset_required_fields(): + transport = transports.RegionSslPoliciesRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.delete._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("project", "region", "sslPolicy", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_rest_interceptors(null_interceptor): + transport = transports.RegionSslPoliciesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.RegionSslPoliciesRestInterceptor(), + ) + client = RegionSslPoliciesClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.RegionSslPoliciesRestInterceptor, "post_delete") as post, \ + mock.patch.object(transports.RegionSslPoliciesRestInterceptor, "pre_delete") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.DeleteRegionSslPolicyRequest.pb(compute.DeleteRegionSslPolicyRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.DeleteRegionSslPolicyRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.delete(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_delete_rest_bad_request(transport: str = 'rest', request_type=compute.DeleteRegionSslPolicyRequest): + client = RegionSslPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2', 'ssl_policy': 'sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.delete(request) + + +def test_delete_rest_flattened(): + client = RegionSslPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'region': 'sample2', 'ssl_policy': 'sample3'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + region='region_value', + ssl_policy='ssl_policy_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.delete(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/regions/{region}/sslPolicies/{ssl_policy}" % client.transport._host, args[1]) + + +def test_delete_rest_flattened_error(transport: str = 'rest'): + client = RegionSslPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete( + compute.DeleteRegionSslPolicyRequest(), + project='project_value', + region='region_value', + ssl_policy='ssl_policy_value', + ) + + +def test_delete_rest_error(): + client = RegionSslPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.DeleteRegionSslPolicyRequest, + dict, +]) +def test_delete_unary_rest(request_type): + client = RegionSslPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2', 'ssl_policy': 'sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.delete_unary(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.Operation) + + +def test_delete_unary_rest_required_fields(request_type=compute.DeleteRegionSslPolicyRequest): + transport_class = transports.RegionSslPoliciesRestTransport + + request_init = {} + request_init["project"] = "" + request_init["region"] = "" + request_init["ssl_policy"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + jsonified_request["region"] = 'region_value' + jsonified_request["sslPolicy"] = 'ssl_policy_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "region" in jsonified_request + assert jsonified_request["region"] == 'region_value' + assert "sslPolicy" in jsonified_request + assert jsonified_request["sslPolicy"] == 'ssl_policy_value' + + client = RegionSslPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "delete", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.delete_unary(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_delete_unary_rest_unset_required_fields(): + transport = transports.RegionSslPoliciesRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.delete._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("project", "region", "sslPolicy", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_unary_rest_interceptors(null_interceptor): + transport = transports.RegionSslPoliciesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.RegionSslPoliciesRestInterceptor(), + ) + client = RegionSslPoliciesClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.RegionSslPoliciesRestInterceptor, "post_delete") as post, \ + mock.patch.object(transports.RegionSslPoliciesRestInterceptor, "pre_delete") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.DeleteRegionSslPolicyRequest.pb(compute.DeleteRegionSslPolicyRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.DeleteRegionSslPolicyRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.delete_unary(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_delete_unary_rest_bad_request(transport: str = 'rest', request_type=compute.DeleteRegionSslPolicyRequest): + client = RegionSslPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2', 'ssl_policy': 'sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.delete_unary(request) + + +def test_delete_unary_rest_flattened(): + client = RegionSslPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'region': 'sample2', 'ssl_policy': 'sample3'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + region='region_value', + ssl_policy='ssl_policy_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.delete_unary(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/regions/{region}/sslPolicies/{ssl_policy}" % client.transport._host, args[1]) + + +def test_delete_unary_rest_flattened_error(transport: str = 'rest'): + client = RegionSslPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_unary( + compute.DeleteRegionSslPolicyRequest(), + project='project_value', + region='region_value', + ssl_policy='ssl_policy_value', + ) + + +def test_delete_unary_rest_error(): + client = RegionSslPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.GetRegionSslPolicyRequest, + dict, +]) +def test_get_rest(request_type): + client = RegionSslPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2', 'ssl_policy': 'sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.SslPolicy( + creation_timestamp='creation_timestamp_value', + custom_features=['custom_features_value'], + description='description_value', + enabled_features=['enabled_features_value'], + fingerprint='fingerprint_value', + id=205, + kind='kind_value', + min_tls_version='min_tls_version_value', + name='name_value', + profile='profile_value', + region='region_value', + self_link='self_link_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.SslPolicy.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.get(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.SslPolicy) + assert response.creation_timestamp == 'creation_timestamp_value' + assert response.custom_features == ['custom_features_value'] + assert response.description == 'description_value' + assert response.enabled_features == ['enabled_features_value'] + assert response.fingerprint == 'fingerprint_value' + assert response.id == 205 + assert response.kind == 'kind_value' + assert response.min_tls_version == 'min_tls_version_value' + assert response.name == 'name_value' + assert response.profile == 'profile_value' + assert response.region == 'region_value' + assert response.self_link == 'self_link_value' + + +def test_get_rest_required_fields(request_type=compute.GetRegionSslPolicyRequest): + transport_class = transports.RegionSslPoliciesRestTransport + + request_init = {} + request_init["project"] = "" + request_init["region"] = "" + request_init["ssl_policy"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + jsonified_request["region"] = 'region_value' + jsonified_request["sslPolicy"] = 'ssl_policy_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "region" in jsonified_request + assert jsonified_request["region"] == 'region_value' + assert "sslPolicy" in jsonified_request + assert jsonified_request["sslPolicy"] == 'ssl_policy_value' + + client = RegionSslPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.SslPolicy() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "get", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.SslPolicy.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.get(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_get_rest_unset_required_fields(): + transport = transports.RegionSslPoliciesRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.get._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("project", "region", "sslPolicy", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_rest_interceptors(null_interceptor): + transport = transports.RegionSslPoliciesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.RegionSslPoliciesRestInterceptor(), + ) + client = RegionSslPoliciesClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.RegionSslPoliciesRestInterceptor, "post_get") as post, \ + mock.patch.object(transports.RegionSslPoliciesRestInterceptor, "pre_get") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.GetRegionSslPolicyRequest.pb(compute.GetRegionSslPolicyRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.SslPolicy.to_json(compute.SslPolicy()) + + request = compute.GetRegionSslPolicyRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.SslPolicy() + + client.get(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_get_rest_bad_request(transport: str = 'rest', request_type=compute.GetRegionSslPolicyRequest): + client = RegionSslPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2', 'ssl_policy': 'sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get(request) + + +def test_get_rest_flattened(): + client = RegionSslPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.SslPolicy() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'region': 'sample2', 'ssl_policy': 'sample3'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + region='region_value', + ssl_policy='ssl_policy_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.SslPolicy.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.get(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/regions/{region}/sslPolicies/{ssl_policy}" % client.transport._host, args[1]) + + +def test_get_rest_flattened_error(transport: str = 'rest'): + client = RegionSslPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get( + compute.GetRegionSslPolicyRequest(), + project='project_value', + region='region_value', + ssl_policy='ssl_policy_value', + ) + + +def test_get_rest_error(): + client = RegionSslPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.InsertRegionSslPolicyRequest, + dict, +]) +def test_insert_rest(request_type): + client = RegionSslPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2'} + request_init["ssl_policy_resource"] = {'creation_timestamp': 'creation_timestamp_value', 'custom_features': ['custom_features_value1', 'custom_features_value2'], 'description': 'description_value', 'enabled_features': ['enabled_features_value1', 'enabled_features_value2'], 'fingerprint': 'fingerprint_value', 'id': 205, 'kind': 'kind_value', 'min_tls_version': 'min_tls_version_value', 'name': 'name_value', 'profile': 'profile_value', 'region': 'region_value', 'self_link': 'self_link_value', 'warnings': [{'code': 'code_value', 'data': [{'key': 'key_value', 'value': 'value_value'}], 'message': 'message_value'}]} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.insert(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, extended_operation.ExtendedOperation) + assert response.client_operation_id == 'client_operation_id_value' + assert response.creation_timestamp == 'creation_timestamp_value' + assert response.description == 'description_value' + assert response.end_time == 'end_time_value' + assert response.http_error_message == 'http_error_message_value' + assert response.http_error_status_code == 2374 + assert response.id == 205 + assert response.insert_time == 'insert_time_value' + assert response.kind == 'kind_value' + assert response.name == 'name_value' + assert response.operation_group_id == 'operation_group_id_value' + assert response.operation_type == 'operation_type_value' + assert response.progress == 885 + assert response.region == 'region_value' + assert response.self_link == 'self_link_value' + assert response.start_time == 'start_time_value' + assert response.status == compute.Operation.Status.DONE + assert response.status_message == 'status_message_value' + assert response.target_id == 947 + assert response.target_link == 'target_link_value' + assert response.user == 'user_value' + assert response.zone == 'zone_value' + + +def test_insert_rest_required_fields(request_type=compute.InsertRegionSslPolicyRequest): + transport_class = transports.RegionSslPoliciesRestTransport + + request_init = {} + request_init["project"] = "" + request_init["region"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).insert._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + jsonified_request["region"] = 'region_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).insert._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "region" in jsonified_request + assert jsonified_request["region"] == 'region_value' + + client = RegionSslPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.insert(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_insert_rest_unset_required_fields(): + transport = transports.RegionSslPoliciesRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.insert._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("project", "region", "sslPolicyResource", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_insert_rest_interceptors(null_interceptor): + transport = transports.RegionSslPoliciesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.RegionSslPoliciesRestInterceptor(), + ) + client = RegionSslPoliciesClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.RegionSslPoliciesRestInterceptor, "post_insert") as post, \ + mock.patch.object(transports.RegionSslPoliciesRestInterceptor, "pre_insert") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.InsertRegionSslPolicyRequest.pb(compute.InsertRegionSslPolicyRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.InsertRegionSslPolicyRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.insert(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_insert_rest_bad_request(transport: str = 'rest', request_type=compute.InsertRegionSslPolicyRequest): + client = RegionSslPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2'} + request_init["ssl_policy_resource"] = {'creation_timestamp': 'creation_timestamp_value', 'custom_features': ['custom_features_value1', 'custom_features_value2'], 'description': 'description_value', 'enabled_features': ['enabled_features_value1', 'enabled_features_value2'], 'fingerprint': 'fingerprint_value', 'id': 205, 'kind': 'kind_value', 'min_tls_version': 'min_tls_version_value', 'name': 'name_value', 'profile': 'profile_value', 'region': 'region_value', 'self_link': 'self_link_value', 'warnings': [{'code': 'code_value', 'data': [{'key': 'key_value', 'value': 'value_value'}], 'message': 'message_value'}]} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.insert(request) + + +def test_insert_rest_flattened(): + client = RegionSslPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'region': 'sample2'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + region='region_value', + ssl_policy_resource=compute.SslPolicy(creation_timestamp='creation_timestamp_value'), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.insert(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/regions/{region}/sslPolicies" % client.transport._host, args[1]) + + +def test_insert_rest_flattened_error(transport: str = 'rest'): + client = RegionSslPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.insert( + compute.InsertRegionSslPolicyRequest(), + project='project_value', + region='region_value', + ssl_policy_resource=compute.SslPolicy(creation_timestamp='creation_timestamp_value'), + ) + + +def test_insert_rest_error(): + client = RegionSslPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.InsertRegionSslPolicyRequest, + dict, +]) +def test_insert_unary_rest(request_type): + client = RegionSslPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2'} + request_init["ssl_policy_resource"] = {'creation_timestamp': 'creation_timestamp_value', 'custom_features': ['custom_features_value1', 'custom_features_value2'], 'description': 'description_value', 'enabled_features': ['enabled_features_value1', 'enabled_features_value2'], 'fingerprint': 'fingerprint_value', 'id': 205, 'kind': 'kind_value', 'min_tls_version': 'min_tls_version_value', 'name': 'name_value', 'profile': 'profile_value', 'region': 'region_value', 'self_link': 'self_link_value', 'warnings': [{'code': 'code_value', 'data': [{'key': 'key_value', 'value': 'value_value'}], 'message': 'message_value'}]} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.insert_unary(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.Operation) + + +def test_insert_unary_rest_required_fields(request_type=compute.InsertRegionSslPolicyRequest): + transport_class = transports.RegionSslPoliciesRestTransport + + request_init = {} + request_init["project"] = "" + request_init["region"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).insert._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + jsonified_request["region"] = 'region_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).insert._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "region" in jsonified_request + assert jsonified_request["region"] == 'region_value' + + client = RegionSslPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.insert_unary(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_insert_unary_rest_unset_required_fields(): + transport = transports.RegionSslPoliciesRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.insert._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("project", "region", "sslPolicyResource", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_insert_unary_rest_interceptors(null_interceptor): + transport = transports.RegionSslPoliciesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.RegionSslPoliciesRestInterceptor(), + ) + client = RegionSslPoliciesClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.RegionSslPoliciesRestInterceptor, "post_insert") as post, \ + mock.patch.object(transports.RegionSslPoliciesRestInterceptor, "pre_insert") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.InsertRegionSslPolicyRequest.pb(compute.InsertRegionSslPolicyRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.InsertRegionSslPolicyRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.insert_unary(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_insert_unary_rest_bad_request(transport: str = 'rest', request_type=compute.InsertRegionSslPolicyRequest): + client = RegionSslPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2'} + request_init["ssl_policy_resource"] = {'creation_timestamp': 'creation_timestamp_value', 'custom_features': ['custom_features_value1', 'custom_features_value2'], 'description': 'description_value', 'enabled_features': ['enabled_features_value1', 'enabled_features_value2'], 'fingerprint': 'fingerprint_value', 'id': 205, 'kind': 'kind_value', 'min_tls_version': 'min_tls_version_value', 'name': 'name_value', 'profile': 'profile_value', 'region': 'region_value', 'self_link': 'self_link_value', 'warnings': [{'code': 'code_value', 'data': [{'key': 'key_value', 'value': 'value_value'}], 'message': 'message_value'}]} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.insert_unary(request) + + +def test_insert_unary_rest_flattened(): + client = RegionSslPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'region': 'sample2'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + region='region_value', + ssl_policy_resource=compute.SslPolicy(creation_timestamp='creation_timestamp_value'), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.insert_unary(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/regions/{region}/sslPolicies" % client.transport._host, args[1]) + + +def test_insert_unary_rest_flattened_error(transport: str = 'rest'): + client = RegionSslPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.insert_unary( + compute.InsertRegionSslPolicyRequest(), + project='project_value', + region='region_value', + ssl_policy_resource=compute.SslPolicy(creation_timestamp='creation_timestamp_value'), + ) + + +def test_insert_unary_rest_error(): + client = RegionSslPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.ListRegionSslPoliciesRequest, + dict, +]) +def test_list_rest(request_type): + client = RegionSslPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.SslPoliciesList( + id='id_value', + kind='kind_value', + next_page_token='next_page_token_value', + self_link='self_link_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.SslPoliciesList.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.list(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListPager) + assert response.id == 'id_value' + assert response.kind == 'kind_value' + assert response.next_page_token == 'next_page_token_value' + assert response.self_link == 'self_link_value' + + +def test_list_rest_required_fields(request_type=compute.ListRegionSslPoliciesRequest): + transport_class = transports.RegionSslPoliciesRestTransport + + request_init = {} + request_init["project"] = "" + request_init["region"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + jsonified_request["region"] = 'region_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("filter", "max_results", "order_by", "page_token", "return_partial_success", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "region" in jsonified_request + assert jsonified_request["region"] == 'region_value' + + client = RegionSslPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.SslPoliciesList() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "get", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.SslPoliciesList.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.list(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_list_rest_unset_required_fields(): + transport = transports.RegionSslPoliciesRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.list._get_unset_required_fields({}) + assert set(unset_fields) == (set(("filter", "maxResults", "orderBy", "pageToken", "returnPartialSuccess", )) & set(("project", "region", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_rest_interceptors(null_interceptor): + transport = transports.RegionSslPoliciesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.RegionSslPoliciesRestInterceptor(), + ) + client = RegionSslPoliciesClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.RegionSslPoliciesRestInterceptor, "post_list") as post, \ + mock.patch.object(transports.RegionSslPoliciesRestInterceptor, "pre_list") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.ListRegionSslPoliciesRequest.pb(compute.ListRegionSslPoliciesRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.SslPoliciesList.to_json(compute.SslPoliciesList()) + + request = compute.ListRegionSslPoliciesRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.SslPoliciesList() + + client.list(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_list_rest_bad_request(transport: str = 'rest', request_type=compute.ListRegionSslPoliciesRequest): + client = RegionSslPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list(request) + + +def test_list_rest_flattened(): + client = RegionSslPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.SslPoliciesList() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'region': 'sample2'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + region='region_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.SslPoliciesList.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.list(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/regions/{region}/sslPolicies" % client.transport._host, args[1]) + + +def test_list_rest_flattened_error(transport: str = 'rest'): + client = RegionSslPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list( + compute.ListRegionSslPoliciesRequest(), + project='project_value', + region='region_value', + ) + + +def test_list_rest_pager(transport: str = 'rest'): + client = RegionSslPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + #with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + compute.SslPoliciesList( + items=[ + compute.SslPolicy(), + compute.SslPolicy(), + compute.SslPolicy(), + ], + next_page_token='abc', + ), + compute.SslPoliciesList( + items=[], + next_page_token='def', + ), + compute.SslPoliciesList( + items=[ + compute.SslPolicy(), + ], + next_page_token='ghi', + ), + compute.SslPoliciesList( + items=[ + compute.SslPolicy(), + compute.SslPolicy(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple(compute.SslPoliciesList.to_json(x) for x in response) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode('UTF-8') + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {'project': 'sample1', 'region': 'sample2'} + + pager = client.list(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, compute.SslPolicy) + for i in results) + + pages = list(client.list(request=sample_request).pages) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize("request_type", [ + compute.ListAvailableFeaturesRegionSslPoliciesRequest, + dict, +]) +def test_list_available_features_rest(request_type): + client = RegionSslPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.SslPoliciesListAvailableFeaturesResponse( + features=['features_value'], + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.SslPoliciesListAvailableFeaturesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.list_available_features(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.SslPoliciesListAvailableFeaturesResponse) + assert response.features == ['features_value'] + + +def test_list_available_features_rest_required_fields(request_type=compute.ListAvailableFeaturesRegionSslPoliciesRequest): + transport_class = transports.RegionSslPoliciesRestTransport + + request_init = {} + request_init["project"] = "" + request_init["region"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_available_features._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + jsonified_request["region"] = 'region_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_available_features._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("filter", "max_results", "order_by", "page_token", "return_partial_success", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "region" in jsonified_request + assert jsonified_request["region"] == 'region_value' + + client = RegionSslPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.SslPoliciesListAvailableFeaturesResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "get", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.SslPoliciesListAvailableFeaturesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.list_available_features(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_list_available_features_rest_unset_required_fields(): + transport = transports.RegionSslPoliciesRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.list_available_features._get_unset_required_fields({}) + assert set(unset_fields) == (set(("filter", "maxResults", "orderBy", "pageToken", "returnPartialSuccess", )) & set(("project", "region", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_available_features_rest_interceptors(null_interceptor): + transport = transports.RegionSslPoliciesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.RegionSslPoliciesRestInterceptor(), + ) + client = RegionSslPoliciesClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.RegionSslPoliciesRestInterceptor, "post_list_available_features") as post, \ + mock.patch.object(transports.RegionSslPoliciesRestInterceptor, "pre_list_available_features") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.ListAvailableFeaturesRegionSslPoliciesRequest.pb(compute.ListAvailableFeaturesRegionSslPoliciesRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.SslPoliciesListAvailableFeaturesResponse.to_json(compute.SslPoliciesListAvailableFeaturesResponse()) + + request = compute.ListAvailableFeaturesRegionSslPoliciesRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.SslPoliciesListAvailableFeaturesResponse() + + client.list_available_features(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_list_available_features_rest_bad_request(transport: str = 'rest', request_type=compute.ListAvailableFeaturesRegionSslPoliciesRequest): + client = RegionSslPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_available_features(request) + + +def test_list_available_features_rest_flattened(): + client = RegionSslPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.SslPoliciesListAvailableFeaturesResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'region': 'sample2'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + region='region_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.SslPoliciesListAvailableFeaturesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.list_available_features(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/regions/{region}/sslPolicies/listAvailableFeatures" % client.transport._host, args[1]) + + +def test_list_available_features_rest_flattened_error(transport: str = 'rest'): + client = RegionSslPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_available_features( + compute.ListAvailableFeaturesRegionSslPoliciesRequest(), + project='project_value', + region='region_value', + ) + + +def test_list_available_features_rest_error(): + client = RegionSslPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.PatchRegionSslPolicyRequest, + dict, +]) +def test_patch_rest(request_type): + client = RegionSslPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2', 'ssl_policy': 'sample3'} + request_init["ssl_policy_resource"] = {'creation_timestamp': 'creation_timestamp_value', 'custom_features': ['custom_features_value1', 'custom_features_value2'], 'description': 'description_value', 'enabled_features': ['enabled_features_value1', 'enabled_features_value2'], 'fingerprint': 'fingerprint_value', 'id': 205, 'kind': 'kind_value', 'min_tls_version': 'min_tls_version_value', 'name': 'name_value', 'profile': 'profile_value', 'region': 'region_value', 'self_link': 'self_link_value', 'warnings': [{'code': 'code_value', 'data': [{'key': 'key_value', 'value': 'value_value'}], 'message': 'message_value'}]} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.patch(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, extended_operation.ExtendedOperation) + assert response.client_operation_id == 'client_operation_id_value' + assert response.creation_timestamp == 'creation_timestamp_value' + assert response.description == 'description_value' + assert response.end_time == 'end_time_value' + assert response.http_error_message == 'http_error_message_value' + assert response.http_error_status_code == 2374 + assert response.id == 205 + assert response.insert_time == 'insert_time_value' + assert response.kind == 'kind_value' + assert response.name == 'name_value' + assert response.operation_group_id == 'operation_group_id_value' + assert response.operation_type == 'operation_type_value' + assert response.progress == 885 + assert response.region == 'region_value' + assert response.self_link == 'self_link_value' + assert response.start_time == 'start_time_value' + assert response.status == compute.Operation.Status.DONE + assert response.status_message == 'status_message_value' + assert response.target_id == 947 + assert response.target_link == 'target_link_value' + assert response.user == 'user_value' + assert response.zone == 'zone_value' + + +def test_patch_rest_required_fields(request_type=compute.PatchRegionSslPolicyRequest): + transport_class = transports.RegionSslPoliciesRestTransport + + request_init = {} + request_init["project"] = "" + request_init["region"] = "" + request_init["ssl_policy"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).patch._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + jsonified_request["region"] = 'region_value' + jsonified_request["sslPolicy"] = 'ssl_policy_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).patch._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "region" in jsonified_request + assert jsonified_request["region"] == 'region_value' + assert "sslPolicy" in jsonified_request + assert jsonified_request["sslPolicy"] == 'ssl_policy_value' + + client = RegionSslPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "patch", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.patch(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_patch_rest_unset_required_fields(): + transport = transports.RegionSslPoliciesRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.patch._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("project", "region", "sslPolicy", "sslPolicyResource", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_patch_rest_interceptors(null_interceptor): + transport = transports.RegionSslPoliciesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.RegionSslPoliciesRestInterceptor(), + ) + client = RegionSslPoliciesClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.RegionSslPoliciesRestInterceptor, "post_patch") as post, \ + mock.patch.object(transports.RegionSslPoliciesRestInterceptor, "pre_patch") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.PatchRegionSslPolicyRequest.pb(compute.PatchRegionSslPolicyRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.PatchRegionSslPolicyRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.patch(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_patch_rest_bad_request(transport: str = 'rest', request_type=compute.PatchRegionSslPolicyRequest): + client = RegionSslPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2', 'ssl_policy': 'sample3'} + request_init["ssl_policy_resource"] = {'creation_timestamp': 'creation_timestamp_value', 'custom_features': ['custom_features_value1', 'custom_features_value2'], 'description': 'description_value', 'enabled_features': ['enabled_features_value1', 'enabled_features_value2'], 'fingerprint': 'fingerprint_value', 'id': 205, 'kind': 'kind_value', 'min_tls_version': 'min_tls_version_value', 'name': 'name_value', 'profile': 'profile_value', 'region': 'region_value', 'self_link': 'self_link_value', 'warnings': [{'code': 'code_value', 'data': [{'key': 'key_value', 'value': 'value_value'}], 'message': 'message_value'}]} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.patch(request) + + +def test_patch_rest_flattened(): + client = RegionSslPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'region': 'sample2', 'ssl_policy': 'sample3'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + region='region_value', + ssl_policy='ssl_policy_value', + ssl_policy_resource=compute.SslPolicy(creation_timestamp='creation_timestamp_value'), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.patch(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/regions/{region}/sslPolicies/{ssl_policy}" % client.transport._host, args[1]) + + +def test_patch_rest_flattened_error(transport: str = 'rest'): + client = RegionSslPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.patch( + compute.PatchRegionSslPolicyRequest(), + project='project_value', + region='region_value', + ssl_policy='ssl_policy_value', + ssl_policy_resource=compute.SslPolicy(creation_timestamp='creation_timestamp_value'), + ) + + +def test_patch_rest_error(): + client = RegionSslPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.PatchRegionSslPolicyRequest, + dict, +]) +def test_patch_unary_rest(request_type): + client = RegionSslPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2', 'ssl_policy': 'sample3'} + request_init["ssl_policy_resource"] = {'creation_timestamp': 'creation_timestamp_value', 'custom_features': ['custom_features_value1', 'custom_features_value2'], 'description': 'description_value', 'enabled_features': ['enabled_features_value1', 'enabled_features_value2'], 'fingerprint': 'fingerprint_value', 'id': 205, 'kind': 'kind_value', 'min_tls_version': 'min_tls_version_value', 'name': 'name_value', 'profile': 'profile_value', 'region': 'region_value', 'self_link': 'self_link_value', 'warnings': [{'code': 'code_value', 'data': [{'key': 'key_value', 'value': 'value_value'}], 'message': 'message_value'}]} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.patch_unary(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.Operation) + + +def test_patch_unary_rest_required_fields(request_type=compute.PatchRegionSslPolicyRequest): + transport_class = transports.RegionSslPoliciesRestTransport + + request_init = {} + request_init["project"] = "" + request_init["region"] = "" + request_init["ssl_policy"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).patch._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + jsonified_request["region"] = 'region_value' + jsonified_request["sslPolicy"] = 'ssl_policy_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).patch._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "region" in jsonified_request + assert jsonified_request["region"] == 'region_value' + assert "sslPolicy" in jsonified_request + assert jsonified_request["sslPolicy"] == 'ssl_policy_value' + + client = RegionSslPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "patch", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.patch_unary(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_patch_unary_rest_unset_required_fields(): + transport = transports.RegionSslPoliciesRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.patch._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("project", "region", "sslPolicy", "sslPolicyResource", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_patch_unary_rest_interceptors(null_interceptor): + transport = transports.RegionSslPoliciesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.RegionSslPoliciesRestInterceptor(), + ) + client = RegionSslPoliciesClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.RegionSslPoliciesRestInterceptor, "post_patch") as post, \ + mock.patch.object(transports.RegionSslPoliciesRestInterceptor, "pre_patch") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.PatchRegionSslPolicyRequest.pb(compute.PatchRegionSslPolicyRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.PatchRegionSslPolicyRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.patch_unary(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_patch_unary_rest_bad_request(transport: str = 'rest', request_type=compute.PatchRegionSslPolicyRequest): + client = RegionSslPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2', 'ssl_policy': 'sample3'} + request_init["ssl_policy_resource"] = {'creation_timestamp': 'creation_timestamp_value', 'custom_features': ['custom_features_value1', 'custom_features_value2'], 'description': 'description_value', 'enabled_features': ['enabled_features_value1', 'enabled_features_value2'], 'fingerprint': 'fingerprint_value', 'id': 205, 'kind': 'kind_value', 'min_tls_version': 'min_tls_version_value', 'name': 'name_value', 'profile': 'profile_value', 'region': 'region_value', 'self_link': 'self_link_value', 'warnings': [{'code': 'code_value', 'data': [{'key': 'key_value', 'value': 'value_value'}], 'message': 'message_value'}]} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.patch_unary(request) + + +def test_patch_unary_rest_flattened(): + client = RegionSslPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'region': 'sample2', 'ssl_policy': 'sample3'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + region='region_value', + ssl_policy='ssl_policy_value', + ssl_policy_resource=compute.SslPolicy(creation_timestamp='creation_timestamp_value'), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.patch_unary(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/regions/{region}/sslPolicies/{ssl_policy}" % client.transport._host, args[1]) + + +def test_patch_unary_rest_flattened_error(transport: str = 'rest'): + client = RegionSslPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.patch_unary( + compute.PatchRegionSslPolicyRequest(), + project='project_value', + region='region_value', + ssl_policy='ssl_policy_value', + ssl_policy_resource=compute.SslPolicy(creation_timestamp='creation_timestamp_value'), + ) + + +def test_patch_unary_rest_error(): + client = RegionSslPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +def test_credentials_transport_error(): + # It is an error to provide credentials and a transport instance. + transport = transports.RegionSslPoliciesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = RegionSslPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # It is an error to provide a credentials file and a transport instance. + transport = transports.RegionSslPoliciesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = RegionSslPoliciesClient( + client_options={"credentials_file": "credentials.json"}, + transport=transport, + ) + + # It is an error to provide an api_key and a transport instance. + transport = transports.RegionSslPoliciesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = RegionSslPoliciesClient( + client_options=options, + transport=transport, + ) + + # It is an error to provide an api_key and a credential. + options = mock.Mock() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = RegionSslPoliciesClient( + client_options=options, + credentials=ga_credentials.AnonymousCredentials() + ) + + # It is an error to provide scopes and a transport instance. + transport = transports.RegionSslPoliciesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = RegionSslPoliciesClient( + client_options={"scopes": ["1", "2"]}, + transport=transport, + ) + + +def test_transport_instance(): + # A client may be instantiated with a custom transport instance. + transport = transports.RegionSslPoliciesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + client = RegionSslPoliciesClient(transport=transport) + assert client.transport is transport + + +@pytest.mark.parametrize("transport_class", [ + transports.RegionSslPoliciesRestTransport, +]) +def test_transport_adc(transport_class): + # Test default credentials are used if not provided. + with mock.patch.object(google.auth, 'default') as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class() + adc.assert_called_once() + +@pytest.mark.parametrize("transport_name", [ + "rest", +]) +def test_transport_kind(transport_name): + transport = RegionSslPoliciesClient.get_transport_class(transport_name)( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert transport.kind == transport_name + + +def test_region_ssl_policies_base_transport_error(): + # Passing both a credentials object and credentials_file should raise an error + with pytest.raises(core_exceptions.DuplicateCredentialArgs): + transport = transports.RegionSslPoliciesTransport( + credentials=ga_credentials.AnonymousCredentials(), + credentials_file="credentials.json" + ) + + +def test_region_ssl_policies_base_transport(): + # Instantiate the base transport. + with mock.patch('google.cloud.compute_v1.services.region_ssl_policies.transports.RegionSslPoliciesTransport.__init__') as Transport: + Transport.return_value = None + transport = transports.RegionSslPoliciesTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Every method on the transport should just blindly + # raise NotImplementedError. + methods = ( + 'delete', + 'get', + 'insert', + 'list', + 'list_available_features', + 'patch', + ) + for method in methods: + with pytest.raises(NotImplementedError): + getattr(transport, method)(request=object()) + + with pytest.raises(NotImplementedError): + transport.close() + + # Catch all for all remaining methods and properties + remainder = [ + 'kind', + ] + for r in remainder: + with pytest.raises(NotImplementedError): + getattr(transport, r)() + + +def test_region_ssl_policies_base_transport_with_credentials_file(): + # Instantiate the base transport with a credentials file + with mock.patch.object(google.auth, 'load_credentials_from_file', autospec=True) as load_creds, mock.patch('google.cloud.compute_v1.services.region_ssl_policies.transports.RegionSslPoliciesTransport._prep_wrapped_messages') as Transport: + Transport.return_value = None + load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.RegionSslPoliciesTransport( + credentials_file="credentials.json", + quota_project_id="octopus", + ) + load_creds.assert_called_once_with("credentials.json", + scopes=None, + default_scopes=( + 'https://www.googleapis.com/auth/compute', + 'https://www.googleapis.com/auth/cloud-platform', +), + quota_project_id="octopus", + ) + + +def test_region_ssl_policies_base_transport_with_adc(): + # Test the default credentials are used if credentials and credentials_file are None. + with mock.patch.object(google.auth, 'default', autospec=True) as adc, mock.patch('google.cloud.compute_v1.services.region_ssl_policies.transports.RegionSslPoliciesTransport._prep_wrapped_messages') as Transport: + Transport.return_value = None + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.RegionSslPoliciesTransport() + adc.assert_called_once() + + +def test_region_ssl_policies_auth_adc(): + # If no credentials are provided, we should use ADC credentials. + with mock.patch.object(google.auth, 'default', autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + RegionSslPoliciesClient() + adc.assert_called_once_with( + scopes=None, + default_scopes=( + 'https://www.googleapis.com/auth/compute', + 'https://www.googleapis.com/auth/cloud-platform', +), + quota_project_id=None, + ) + + +def test_region_ssl_policies_http_transport_client_cert_source_for_mtls(): + cred = ga_credentials.AnonymousCredentials() + with mock.patch("google.auth.transport.requests.AuthorizedSession.configure_mtls_channel") as mock_configure_mtls_channel: + transports.RegionSslPoliciesRestTransport ( + credentials=cred, + client_cert_source_for_mtls=client_cert_source_callback + ) + mock_configure_mtls_channel.assert_called_once_with(client_cert_source_callback) + + +@pytest.mark.parametrize("transport_name", [ + "rest", +]) +def test_region_ssl_policies_host_no_port(transport_name): + client = RegionSslPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions(api_endpoint='compute.googleapis.com'), + transport=transport_name, + ) + assert client.transport._host == ( + 'compute.googleapis.com:443' + if transport_name in ['grpc', 'grpc_asyncio'] + else 'https://compute.googleapis.com' + ) + +@pytest.mark.parametrize("transport_name", [ + "rest", +]) +def test_region_ssl_policies_host_with_port(transport_name): + client = RegionSslPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions(api_endpoint='compute.googleapis.com:8000'), + transport=transport_name, + ) + assert client.transport._host == ( + 'compute.googleapis.com:8000' + if transport_name in ['grpc', 'grpc_asyncio'] + else 'https://compute.googleapis.com:8000' + ) + +@pytest.mark.parametrize("transport_name", [ + "rest", +]) +def test_region_ssl_policies_client_transport_session_collision(transport_name): + creds1 = ga_credentials.AnonymousCredentials() + creds2 = ga_credentials.AnonymousCredentials() + client1 = RegionSslPoliciesClient( + credentials=creds1, + transport=transport_name, + ) + client2 = RegionSslPoliciesClient( + credentials=creds2, + transport=transport_name, + ) + session1 = client1.transport.delete._session + session2 = client2.transport.delete._session + assert session1 != session2 + session1 = client1.transport.get._session + session2 = client2.transport.get._session + assert session1 != session2 + session1 = client1.transport.insert._session + session2 = client2.transport.insert._session + assert session1 != session2 + session1 = client1.transport.list._session + session2 = client2.transport.list._session + assert session1 != session2 + session1 = client1.transport.list_available_features._session + session2 = client2.transport.list_available_features._session + assert session1 != session2 + session1 = client1.transport.patch._session + session2 = client2.transport.patch._session + assert session1 != session2 + +def test_common_billing_account_path(): + billing_account = "squid" + expected = "billingAccounts/{billing_account}".format(billing_account=billing_account, ) + actual = RegionSslPoliciesClient.common_billing_account_path(billing_account) + assert expected == actual + + +def test_parse_common_billing_account_path(): + expected = { + "billing_account": "clam", + } + path = RegionSslPoliciesClient.common_billing_account_path(**expected) + + # Check that the path construction is reversible. + actual = RegionSslPoliciesClient.parse_common_billing_account_path(path) + assert expected == actual + +def test_common_folder_path(): + folder = "whelk" + expected = "folders/{folder}".format(folder=folder, ) + actual = RegionSslPoliciesClient.common_folder_path(folder) + assert expected == actual + + +def test_parse_common_folder_path(): + expected = { + "folder": "octopus", + } + path = RegionSslPoliciesClient.common_folder_path(**expected) + + # Check that the path construction is reversible. + actual = RegionSslPoliciesClient.parse_common_folder_path(path) + assert expected == actual + +def test_common_organization_path(): + organization = "oyster" + expected = "organizations/{organization}".format(organization=organization, ) + actual = RegionSslPoliciesClient.common_organization_path(organization) + assert expected == actual + + +def test_parse_common_organization_path(): + expected = { + "organization": "nudibranch", + } + path = RegionSslPoliciesClient.common_organization_path(**expected) + + # Check that the path construction is reversible. + actual = RegionSslPoliciesClient.parse_common_organization_path(path) + assert expected == actual + +def test_common_project_path(): + project = "cuttlefish" + expected = "projects/{project}".format(project=project, ) + actual = RegionSslPoliciesClient.common_project_path(project) + assert expected == actual + + +def test_parse_common_project_path(): + expected = { + "project": "mussel", + } + path = RegionSslPoliciesClient.common_project_path(**expected) + + # Check that the path construction is reversible. + actual = RegionSslPoliciesClient.parse_common_project_path(path) + assert expected == actual + +def test_common_location_path(): + project = "winkle" + location = "nautilus" + expected = "projects/{project}/locations/{location}".format(project=project, location=location, ) + actual = RegionSslPoliciesClient.common_location_path(project, location) + assert expected == actual + + +def test_parse_common_location_path(): + expected = { + "project": "scallop", + "location": "abalone", + } + path = RegionSslPoliciesClient.common_location_path(**expected) + + # Check that the path construction is reversible. + actual = RegionSslPoliciesClient.parse_common_location_path(path) + assert expected == actual + + +def test_client_with_default_client_info(): + client_info = gapic_v1.client_info.ClientInfo() + + with mock.patch.object(transports.RegionSslPoliciesTransport, '_prep_wrapped_messages') as prep: + client = RegionSslPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + with mock.patch.object(transports.RegionSslPoliciesTransport, '_prep_wrapped_messages') as prep: + transport_class = RegionSslPoliciesClient.get_transport_class() + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + +def test_transport_close(): + transports = { + "rest": "_session", + } + + for transport, close_name in transports.items(): + client = RegionSslPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport + ) + with mock.patch.object(type(getattr(client.transport, close_name)), "close") as close: + with client: + close.assert_not_called() + close.assert_called_once() + +def test_client_ctx(): + transports = [ + 'rest', + ] + for transport in transports: + client = RegionSslPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport + ) + # Test client calls underlying transport. + with mock.patch.object(type(client.transport), "close") as close: + close.assert_not_called() + with client: + pass + close.assert_called() + +@pytest.mark.parametrize("client_class,transport_class", [ + (RegionSslPoliciesClient, transports.RegionSslPoliciesRestTransport), +]) +def test_api_key_credentials(client_class, transport_class): + with mock.patch.object( + google.auth._default, "get_api_key_credentials", create=True + ) as get_api_key_credentials: + mock_cred = mock.Mock() + get_api_key_credentials.return_value = mock_cred + options = client_options.ClientOptions() + options.api_key = "api_key" + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=mock_cred, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) diff --git a/owl-bot-staging/v1/tests/unit/gapic/compute_v1/test_region_target_http_proxies.py b/owl-bot-staging/v1/tests/unit/gapic/compute_v1/test_region_target_http_proxies.py new file mode 100644 index 000000000..c48e93881 --- /dev/null +++ b/owl-bot-staging/v1/tests/unit/gapic/compute_v1/test_region_target_http_proxies.py @@ -0,0 +1,3083 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import os +# try/except added for compatibility with python < 3.8 +try: + from unittest import mock + from unittest.mock import AsyncMock # pragma: NO COVER +except ImportError: # pragma: NO COVER + import mock + +import grpc +from grpc.experimental import aio +from collections.abc import Iterable +from google.protobuf import json_format +import json +import math +import pytest +from proto.marshal.rules.dates import DurationRule, TimestampRule +from proto.marshal.rules import wrappers +from requests import Response +from requests import Request, PreparedRequest +from requests.sessions import Session +from google.protobuf import json_format + +from google.api_core import client_options +from google.api_core import exceptions as core_exceptions +from google.api_core import extended_operation # type: ignore +from google.api_core import future +from google.api_core import gapic_v1 +from google.api_core import grpc_helpers +from google.api_core import grpc_helpers_async +from google.api_core import path_template +from google.auth import credentials as ga_credentials +from google.auth.exceptions import MutualTLSChannelError +from google.cloud.compute_v1.services.region_target_http_proxies import RegionTargetHttpProxiesClient +from google.cloud.compute_v1.services.region_target_http_proxies import pagers +from google.cloud.compute_v1.services.region_target_http_proxies import transports +from google.cloud.compute_v1.types import compute +from google.oauth2 import service_account +import google.auth + + +def client_cert_source_callback(): + return b"cert bytes", b"key bytes" + + +# If default endpoint is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint(client): + return "foo.googleapis.com" if ("localhost" in client.DEFAULT_ENDPOINT) else client.DEFAULT_ENDPOINT + + +def test__get_default_mtls_endpoint(): + api_endpoint = "example.googleapis.com" + api_mtls_endpoint = "example.mtls.googleapis.com" + sandbox_endpoint = "example.sandbox.googleapis.com" + sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com" + non_googleapi = "api.example.com" + + assert RegionTargetHttpProxiesClient._get_default_mtls_endpoint(None) is None + assert RegionTargetHttpProxiesClient._get_default_mtls_endpoint(api_endpoint) == api_mtls_endpoint + assert RegionTargetHttpProxiesClient._get_default_mtls_endpoint(api_mtls_endpoint) == api_mtls_endpoint + assert RegionTargetHttpProxiesClient._get_default_mtls_endpoint(sandbox_endpoint) == sandbox_mtls_endpoint + assert RegionTargetHttpProxiesClient._get_default_mtls_endpoint(sandbox_mtls_endpoint) == sandbox_mtls_endpoint + assert RegionTargetHttpProxiesClient._get_default_mtls_endpoint(non_googleapi) == non_googleapi + + +@pytest.mark.parametrize("client_class,transport_name", [ + (RegionTargetHttpProxiesClient, "rest"), +]) +def test_region_target_http_proxies_client_from_service_account_info(client_class, transport_name): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object(service_account.Credentials, 'from_service_account_info') as factory: + factory.return_value = creds + info = {"valid": True} + client = client_class.from_service_account_info(info, transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + 'compute.googleapis.com:443' + if transport_name in ['grpc', 'grpc_asyncio'] + else + 'https://compute.googleapis.com' + ) + + +@pytest.mark.parametrize("transport_class,transport_name", [ + (transports.RegionTargetHttpProxiesRestTransport, "rest"), +]) +def test_region_target_http_proxies_client_service_account_always_use_jwt(transport_class, transport_name): + with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=True) + use_jwt.assert_called_once_with(True) + + with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=False) + use_jwt.assert_not_called() + + +@pytest.mark.parametrize("client_class,transport_name", [ + (RegionTargetHttpProxiesClient, "rest"), +]) +def test_region_target_http_proxies_client_from_service_account_file(client_class, transport_name): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object(service_account.Credentials, 'from_service_account_file') as factory: + factory.return_value = creds + client = client_class.from_service_account_file("dummy/file/path.json", transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + client = client_class.from_service_account_json("dummy/file/path.json", transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + 'compute.googleapis.com:443' + if transport_name in ['grpc', 'grpc_asyncio'] + else + 'https://compute.googleapis.com' + ) + + +def test_region_target_http_proxies_client_get_transport_class(): + transport = RegionTargetHttpProxiesClient.get_transport_class() + available_transports = [ + transports.RegionTargetHttpProxiesRestTransport, + ] + assert transport in available_transports + + transport = RegionTargetHttpProxiesClient.get_transport_class("rest") + assert transport == transports.RegionTargetHttpProxiesRestTransport + + +@pytest.mark.parametrize("client_class,transport_class,transport_name", [ + (RegionTargetHttpProxiesClient, transports.RegionTargetHttpProxiesRestTransport, "rest"), +]) +@mock.patch.object(RegionTargetHttpProxiesClient, "DEFAULT_ENDPOINT", modify_default_endpoint(RegionTargetHttpProxiesClient)) +def test_region_target_http_proxies_client_client_options(client_class, transport_class, transport_name): + # Check that if channel is provided we won't create a new one. + with mock.patch.object(RegionTargetHttpProxiesClient, 'get_transport_class') as gtc: + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ) + client = client_class(transport=transport) + gtc.assert_not_called() + + # Check that if channel is provided via str we will create a new one. + with mock.patch.object(RegionTargetHttpProxiesClient, 'get_transport_class') as gtc: + client = client_class(transport=transport_name) + gtc.assert_called() + + # Check the case api_endpoint is provided. + options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(transport=transport_name, client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_MTLS_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError): + client = client_class(transport=transport_name) + + # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): + with pytest.raises(ValueError): + client = client_class(transport=transport_name) + + # Check the case quota_project_id is provided + options = client_options.ClientOptions(quota_project_id="octopus") + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id="octopus", + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + # Check the case api_endpoint is provided + options = client_options.ClientOptions(api_audience="https://language.googleapis.com") + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience="https://language.googleapis.com" + ) + +@pytest.mark.parametrize("client_class,transport_class,transport_name,use_client_cert_env", [ + (RegionTargetHttpProxiesClient, transports.RegionTargetHttpProxiesRestTransport, "rest", "true"), + (RegionTargetHttpProxiesClient, transports.RegionTargetHttpProxiesRestTransport, "rest", "false"), +]) +@mock.patch.object(RegionTargetHttpProxiesClient, "DEFAULT_ENDPOINT", modify_default_endpoint(RegionTargetHttpProxiesClient)) +@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) +def test_region_target_http_proxies_client_mtls_env_auto(client_class, transport_class, transport_name, use_client_cert_env): + # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default + # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists. + + # Check the case client_cert_source is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): + options = client_options.ClientOptions(client_cert_source=client_cert_source_callback) + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + + if use_client_cert_env == "false": + expected_client_cert_source = None + expected_host = client.DEFAULT_ENDPOINT + else: + expected_client_cert_source = client_cert_source_callback + expected_host = client.DEFAULT_MTLS_ENDPOINT + + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case ADC client cert is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): + with mock.patch.object(transport_class, '__init__') as patched: + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): + with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=client_cert_source_callback): + if use_client_cert_env == "false": + expected_host = client.DEFAULT_ENDPOINT + expected_client_cert_source = None + else: + expected_host = client.DEFAULT_MTLS_ENDPOINT + expected_client_cert_source = client_cert_source_callback + + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case client_cert_source and ADC client cert are not provided. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): + with mock.patch.object(transport_class, '__init__') as patched: + with mock.patch("google.auth.transport.mtls.has_default_client_cert_source", return_value=False): + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize("client_class", [ + RegionTargetHttpProxiesClient +]) +@mock.patch.object(RegionTargetHttpProxiesClient, "DEFAULT_ENDPOINT", modify_default_endpoint(RegionTargetHttpProxiesClient)) +def test_region_target_http_proxies_client_get_mtls_endpoint_and_cert_source(client_class): + mock_client_cert_source = mock.Mock() + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "true". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + mock_api_endpoint = "foo" + options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(options) + assert api_endpoint == mock_api_endpoint + assert cert_source == mock_client_cert_source + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "false". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + mock_client_cert_source = mock.Mock() + mock_api_endpoint = "foo" + options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(options) + assert api_endpoint == mock_api_endpoint + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert doesn't exist. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=False): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert exists. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): + with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=mock_client_cert_source): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source == mock_client_cert_source + + +@pytest.mark.parametrize("client_class,transport_class,transport_name", [ + (RegionTargetHttpProxiesClient, transports.RegionTargetHttpProxiesRestTransport, "rest"), +]) +def test_region_target_http_proxies_client_client_options_scopes(client_class, transport_class, transport_name): + # Check the case scopes are provided. + options = client_options.ClientOptions( + scopes=["1", "2"], + ) + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=["1", "2"], + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + +@pytest.mark.parametrize("client_class,transport_class,transport_name,grpc_helpers", [ + (RegionTargetHttpProxiesClient, transports.RegionTargetHttpProxiesRestTransport, "rest", None), +]) +def test_region_target_http_proxies_client_client_options_credentials_file(client_class, transport_class, transport_name, grpc_helpers): + # Check the case credentials file is provided. + options = client_options.ClientOptions( + credentials_file="credentials.json" + ) + + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize("request_type", [ + compute.DeleteRegionTargetHttpProxyRequest, + dict, +]) +def test_delete_rest(request_type): + client = RegionTargetHttpProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2', 'target_http_proxy': 'sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.delete(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, extended_operation.ExtendedOperation) + assert response.client_operation_id == 'client_operation_id_value' + assert response.creation_timestamp == 'creation_timestamp_value' + assert response.description == 'description_value' + assert response.end_time == 'end_time_value' + assert response.http_error_message == 'http_error_message_value' + assert response.http_error_status_code == 2374 + assert response.id == 205 + assert response.insert_time == 'insert_time_value' + assert response.kind == 'kind_value' + assert response.name == 'name_value' + assert response.operation_group_id == 'operation_group_id_value' + assert response.operation_type == 'operation_type_value' + assert response.progress == 885 + assert response.region == 'region_value' + assert response.self_link == 'self_link_value' + assert response.start_time == 'start_time_value' + assert response.status == compute.Operation.Status.DONE + assert response.status_message == 'status_message_value' + assert response.target_id == 947 + assert response.target_link == 'target_link_value' + assert response.user == 'user_value' + assert response.zone == 'zone_value' + + +def test_delete_rest_required_fields(request_type=compute.DeleteRegionTargetHttpProxyRequest): + transport_class = transports.RegionTargetHttpProxiesRestTransport + + request_init = {} + request_init["project"] = "" + request_init["region"] = "" + request_init["target_http_proxy"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + jsonified_request["region"] = 'region_value' + jsonified_request["targetHttpProxy"] = 'target_http_proxy_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "region" in jsonified_request + assert jsonified_request["region"] == 'region_value' + assert "targetHttpProxy" in jsonified_request + assert jsonified_request["targetHttpProxy"] == 'target_http_proxy_value' + + client = RegionTargetHttpProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "delete", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.delete(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_delete_rest_unset_required_fields(): + transport = transports.RegionTargetHttpProxiesRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.delete._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("project", "region", "targetHttpProxy", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_rest_interceptors(null_interceptor): + transport = transports.RegionTargetHttpProxiesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.RegionTargetHttpProxiesRestInterceptor(), + ) + client = RegionTargetHttpProxiesClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.RegionTargetHttpProxiesRestInterceptor, "post_delete") as post, \ + mock.patch.object(transports.RegionTargetHttpProxiesRestInterceptor, "pre_delete") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.DeleteRegionTargetHttpProxyRequest.pb(compute.DeleteRegionTargetHttpProxyRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.DeleteRegionTargetHttpProxyRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.delete(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_delete_rest_bad_request(transport: str = 'rest', request_type=compute.DeleteRegionTargetHttpProxyRequest): + client = RegionTargetHttpProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2', 'target_http_proxy': 'sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.delete(request) + + +def test_delete_rest_flattened(): + client = RegionTargetHttpProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'region': 'sample2', 'target_http_proxy': 'sample3'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + region='region_value', + target_http_proxy='target_http_proxy_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.delete(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/regions/{region}/targetHttpProxies/{target_http_proxy}" % client.transport._host, args[1]) + + +def test_delete_rest_flattened_error(transport: str = 'rest'): + client = RegionTargetHttpProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete( + compute.DeleteRegionTargetHttpProxyRequest(), + project='project_value', + region='region_value', + target_http_proxy='target_http_proxy_value', + ) + + +def test_delete_rest_error(): + client = RegionTargetHttpProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.DeleteRegionTargetHttpProxyRequest, + dict, +]) +def test_delete_unary_rest(request_type): + client = RegionTargetHttpProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2', 'target_http_proxy': 'sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.delete_unary(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.Operation) + + +def test_delete_unary_rest_required_fields(request_type=compute.DeleteRegionTargetHttpProxyRequest): + transport_class = transports.RegionTargetHttpProxiesRestTransport + + request_init = {} + request_init["project"] = "" + request_init["region"] = "" + request_init["target_http_proxy"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + jsonified_request["region"] = 'region_value' + jsonified_request["targetHttpProxy"] = 'target_http_proxy_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "region" in jsonified_request + assert jsonified_request["region"] == 'region_value' + assert "targetHttpProxy" in jsonified_request + assert jsonified_request["targetHttpProxy"] == 'target_http_proxy_value' + + client = RegionTargetHttpProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "delete", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.delete_unary(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_delete_unary_rest_unset_required_fields(): + transport = transports.RegionTargetHttpProxiesRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.delete._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("project", "region", "targetHttpProxy", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_unary_rest_interceptors(null_interceptor): + transport = transports.RegionTargetHttpProxiesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.RegionTargetHttpProxiesRestInterceptor(), + ) + client = RegionTargetHttpProxiesClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.RegionTargetHttpProxiesRestInterceptor, "post_delete") as post, \ + mock.patch.object(transports.RegionTargetHttpProxiesRestInterceptor, "pre_delete") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.DeleteRegionTargetHttpProxyRequest.pb(compute.DeleteRegionTargetHttpProxyRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.DeleteRegionTargetHttpProxyRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.delete_unary(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_delete_unary_rest_bad_request(transport: str = 'rest', request_type=compute.DeleteRegionTargetHttpProxyRequest): + client = RegionTargetHttpProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2', 'target_http_proxy': 'sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.delete_unary(request) + + +def test_delete_unary_rest_flattened(): + client = RegionTargetHttpProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'region': 'sample2', 'target_http_proxy': 'sample3'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + region='region_value', + target_http_proxy='target_http_proxy_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.delete_unary(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/regions/{region}/targetHttpProxies/{target_http_proxy}" % client.transport._host, args[1]) + + +def test_delete_unary_rest_flattened_error(transport: str = 'rest'): + client = RegionTargetHttpProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_unary( + compute.DeleteRegionTargetHttpProxyRequest(), + project='project_value', + region='region_value', + target_http_proxy='target_http_proxy_value', + ) + + +def test_delete_unary_rest_error(): + client = RegionTargetHttpProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.GetRegionTargetHttpProxyRequest, + dict, +]) +def test_get_rest(request_type): + client = RegionTargetHttpProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2', 'target_http_proxy': 'sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.TargetHttpProxy( + creation_timestamp='creation_timestamp_value', + description='description_value', + fingerprint='fingerprint_value', + http_keep_alive_timeout_sec=2868, + id=205, + kind='kind_value', + name='name_value', + proxy_bind=True, + region='region_value', + self_link='self_link_value', + url_map='url_map_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.TargetHttpProxy.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.get(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.TargetHttpProxy) + assert response.creation_timestamp == 'creation_timestamp_value' + assert response.description == 'description_value' + assert response.fingerprint == 'fingerprint_value' + assert response.http_keep_alive_timeout_sec == 2868 + assert response.id == 205 + assert response.kind == 'kind_value' + assert response.name == 'name_value' + assert response.proxy_bind is True + assert response.region == 'region_value' + assert response.self_link == 'self_link_value' + assert response.url_map == 'url_map_value' + + +def test_get_rest_required_fields(request_type=compute.GetRegionTargetHttpProxyRequest): + transport_class = transports.RegionTargetHttpProxiesRestTransport + + request_init = {} + request_init["project"] = "" + request_init["region"] = "" + request_init["target_http_proxy"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + jsonified_request["region"] = 'region_value' + jsonified_request["targetHttpProxy"] = 'target_http_proxy_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "region" in jsonified_request + assert jsonified_request["region"] == 'region_value' + assert "targetHttpProxy" in jsonified_request + assert jsonified_request["targetHttpProxy"] == 'target_http_proxy_value' + + client = RegionTargetHttpProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.TargetHttpProxy() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "get", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.TargetHttpProxy.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.get(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_get_rest_unset_required_fields(): + transport = transports.RegionTargetHttpProxiesRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.get._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("project", "region", "targetHttpProxy", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_rest_interceptors(null_interceptor): + transport = transports.RegionTargetHttpProxiesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.RegionTargetHttpProxiesRestInterceptor(), + ) + client = RegionTargetHttpProxiesClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.RegionTargetHttpProxiesRestInterceptor, "post_get") as post, \ + mock.patch.object(transports.RegionTargetHttpProxiesRestInterceptor, "pre_get") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.GetRegionTargetHttpProxyRequest.pb(compute.GetRegionTargetHttpProxyRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.TargetHttpProxy.to_json(compute.TargetHttpProxy()) + + request = compute.GetRegionTargetHttpProxyRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.TargetHttpProxy() + + client.get(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_get_rest_bad_request(transport: str = 'rest', request_type=compute.GetRegionTargetHttpProxyRequest): + client = RegionTargetHttpProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2', 'target_http_proxy': 'sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get(request) + + +def test_get_rest_flattened(): + client = RegionTargetHttpProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.TargetHttpProxy() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'region': 'sample2', 'target_http_proxy': 'sample3'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + region='region_value', + target_http_proxy='target_http_proxy_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.TargetHttpProxy.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.get(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/regions/{region}/targetHttpProxies/{target_http_proxy}" % client.transport._host, args[1]) + + +def test_get_rest_flattened_error(transport: str = 'rest'): + client = RegionTargetHttpProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get( + compute.GetRegionTargetHttpProxyRequest(), + project='project_value', + region='region_value', + target_http_proxy='target_http_proxy_value', + ) + + +def test_get_rest_error(): + client = RegionTargetHttpProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.InsertRegionTargetHttpProxyRequest, + dict, +]) +def test_insert_rest(request_type): + client = RegionTargetHttpProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2'} + request_init["target_http_proxy_resource"] = {'creation_timestamp': 'creation_timestamp_value', 'description': 'description_value', 'fingerprint': 'fingerprint_value', 'http_keep_alive_timeout_sec': 2868, 'id': 205, 'kind': 'kind_value', 'name': 'name_value', 'proxy_bind': True, 'region': 'region_value', 'self_link': 'self_link_value', 'url_map': 'url_map_value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.insert(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, extended_operation.ExtendedOperation) + assert response.client_operation_id == 'client_operation_id_value' + assert response.creation_timestamp == 'creation_timestamp_value' + assert response.description == 'description_value' + assert response.end_time == 'end_time_value' + assert response.http_error_message == 'http_error_message_value' + assert response.http_error_status_code == 2374 + assert response.id == 205 + assert response.insert_time == 'insert_time_value' + assert response.kind == 'kind_value' + assert response.name == 'name_value' + assert response.operation_group_id == 'operation_group_id_value' + assert response.operation_type == 'operation_type_value' + assert response.progress == 885 + assert response.region == 'region_value' + assert response.self_link == 'self_link_value' + assert response.start_time == 'start_time_value' + assert response.status == compute.Operation.Status.DONE + assert response.status_message == 'status_message_value' + assert response.target_id == 947 + assert response.target_link == 'target_link_value' + assert response.user == 'user_value' + assert response.zone == 'zone_value' + + +def test_insert_rest_required_fields(request_type=compute.InsertRegionTargetHttpProxyRequest): + transport_class = transports.RegionTargetHttpProxiesRestTransport + + request_init = {} + request_init["project"] = "" + request_init["region"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).insert._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + jsonified_request["region"] = 'region_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).insert._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "region" in jsonified_request + assert jsonified_request["region"] == 'region_value' + + client = RegionTargetHttpProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.insert(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_insert_rest_unset_required_fields(): + transport = transports.RegionTargetHttpProxiesRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.insert._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("project", "region", "targetHttpProxyResource", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_insert_rest_interceptors(null_interceptor): + transport = transports.RegionTargetHttpProxiesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.RegionTargetHttpProxiesRestInterceptor(), + ) + client = RegionTargetHttpProxiesClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.RegionTargetHttpProxiesRestInterceptor, "post_insert") as post, \ + mock.patch.object(transports.RegionTargetHttpProxiesRestInterceptor, "pre_insert") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.InsertRegionTargetHttpProxyRequest.pb(compute.InsertRegionTargetHttpProxyRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.InsertRegionTargetHttpProxyRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.insert(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_insert_rest_bad_request(transport: str = 'rest', request_type=compute.InsertRegionTargetHttpProxyRequest): + client = RegionTargetHttpProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2'} + request_init["target_http_proxy_resource"] = {'creation_timestamp': 'creation_timestamp_value', 'description': 'description_value', 'fingerprint': 'fingerprint_value', 'http_keep_alive_timeout_sec': 2868, 'id': 205, 'kind': 'kind_value', 'name': 'name_value', 'proxy_bind': True, 'region': 'region_value', 'self_link': 'self_link_value', 'url_map': 'url_map_value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.insert(request) + + +def test_insert_rest_flattened(): + client = RegionTargetHttpProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'region': 'sample2'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + region='region_value', + target_http_proxy_resource=compute.TargetHttpProxy(creation_timestamp='creation_timestamp_value'), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.insert(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/regions/{region}/targetHttpProxies" % client.transport._host, args[1]) + + +def test_insert_rest_flattened_error(transport: str = 'rest'): + client = RegionTargetHttpProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.insert( + compute.InsertRegionTargetHttpProxyRequest(), + project='project_value', + region='region_value', + target_http_proxy_resource=compute.TargetHttpProxy(creation_timestamp='creation_timestamp_value'), + ) + + +def test_insert_rest_error(): + client = RegionTargetHttpProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.InsertRegionTargetHttpProxyRequest, + dict, +]) +def test_insert_unary_rest(request_type): + client = RegionTargetHttpProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2'} + request_init["target_http_proxy_resource"] = {'creation_timestamp': 'creation_timestamp_value', 'description': 'description_value', 'fingerprint': 'fingerprint_value', 'http_keep_alive_timeout_sec': 2868, 'id': 205, 'kind': 'kind_value', 'name': 'name_value', 'proxy_bind': True, 'region': 'region_value', 'self_link': 'self_link_value', 'url_map': 'url_map_value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.insert_unary(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.Operation) + + +def test_insert_unary_rest_required_fields(request_type=compute.InsertRegionTargetHttpProxyRequest): + transport_class = transports.RegionTargetHttpProxiesRestTransport + + request_init = {} + request_init["project"] = "" + request_init["region"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).insert._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + jsonified_request["region"] = 'region_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).insert._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "region" in jsonified_request + assert jsonified_request["region"] == 'region_value' + + client = RegionTargetHttpProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.insert_unary(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_insert_unary_rest_unset_required_fields(): + transport = transports.RegionTargetHttpProxiesRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.insert._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("project", "region", "targetHttpProxyResource", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_insert_unary_rest_interceptors(null_interceptor): + transport = transports.RegionTargetHttpProxiesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.RegionTargetHttpProxiesRestInterceptor(), + ) + client = RegionTargetHttpProxiesClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.RegionTargetHttpProxiesRestInterceptor, "post_insert") as post, \ + mock.patch.object(transports.RegionTargetHttpProxiesRestInterceptor, "pre_insert") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.InsertRegionTargetHttpProxyRequest.pb(compute.InsertRegionTargetHttpProxyRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.InsertRegionTargetHttpProxyRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.insert_unary(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_insert_unary_rest_bad_request(transport: str = 'rest', request_type=compute.InsertRegionTargetHttpProxyRequest): + client = RegionTargetHttpProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2'} + request_init["target_http_proxy_resource"] = {'creation_timestamp': 'creation_timestamp_value', 'description': 'description_value', 'fingerprint': 'fingerprint_value', 'http_keep_alive_timeout_sec': 2868, 'id': 205, 'kind': 'kind_value', 'name': 'name_value', 'proxy_bind': True, 'region': 'region_value', 'self_link': 'self_link_value', 'url_map': 'url_map_value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.insert_unary(request) + + +def test_insert_unary_rest_flattened(): + client = RegionTargetHttpProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'region': 'sample2'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + region='region_value', + target_http_proxy_resource=compute.TargetHttpProxy(creation_timestamp='creation_timestamp_value'), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.insert_unary(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/regions/{region}/targetHttpProxies" % client.transport._host, args[1]) + + +def test_insert_unary_rest_flattened_error(transport: str = 'rest'): + client = RegionTargetHttpProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.insert_unary( + compute.InsertRegionTargetHttpProxyRequest(), + project='project_value', + region='region_value', + target_http_proxy_resource=compute.TargetHttpProxy(creation_timestamp='creation_timestamp_value'), + ) + + +def test_insert_unary_rest_error(): + client = RegionTargetHttpProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.ListRegionTargetHttpProxiesRequest, + dict, +]) +def test_list_rest(request_type): + client = RegionTargetHttpProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.TargetHttpProxyList( + id='id_value', + kind='kind_value', + next_page_token='next_page_token_value', + self_link='self_link_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.TargetHttpProxyList.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.list(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListPager) + assert response.id == 'id_value' + assert response.kind == 'kind_value' + assert response.next_page_token == 'next_page_token_value' + assert response.self_link == 'self_link_value' + + +def test_list_rest_required_fields(request_type=compute.ListRegionTargetHttpProxiesRequest): + transport_class = transports.RegionTargetHttpProxiesRestTransport + + request_init = {} + request_init["project"] = "" + request_init["region"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + jsonified_request["region"] = 'region_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("filter", "max_results", "order_by", "page_token", "return_partial_success", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "region" in jsonified_request + assert jsonified_request["region"] == 'region_value' + + client = RegionTargetHttpProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.TargetHttpProxyList() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "get", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.TargetHttpProxyList.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.list(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_list_rest_unset_required_fields(): + transport = transports.RegionTargetHttpProxiesRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.list._get_unset_required_fields({}) + assert set(unset_fields) == (set(("filter", "maxResults", "orderBy", "pageToken", "returnPartialSuccess", )) & set(("project", "region", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_rest_interceptors(null_interceptor): + transport = transports.RegionTargetHttpProxiesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.RegionTargetHttpProxiesRestInterceptor(), + ) + client = RegionTargetHttpProxiesClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.RegionTargetHttpProxiesRestInterceptor, "post_list") as post, \ + mock.patch.object(transports.RegionTargetHttpProxiesRestInterceptor, "pre_list") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.ListRegionTargetHttpProxiesRequest.pb(compute.ListRegionTargetHttpProxiesRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.TargetHttpProxyList.to_json(compute.TargetHttpProxyList()) + + request = compute.ListRegionTargetHttpProxiesRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.TargetHttpProxyList() + + client.list(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_list_rest_bad_request(transport: str = 'rest', request_type=compute.ListRegionTargetHttpProxiesRequest): + client = RegionTargetHttpProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list(request) + + +def test_list_rest_flattened(): + client = RegionTargetHttpProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.TargetHttpProxyList() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'region': 'sample2'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + region='region_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.TargetHttpProxyList.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.list(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/regions/{region}/targetHttpProxies" % client.transport._host, args[1]) + + +def test_list_rest_flattened_error(transport: str = 'rest'): + client = RegionTargetHttpProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list( + compute.ListRegionTargetHttpProxiesRequest(), + project='project_value', + region='region_value', + ) + + +def test_list_rest_pager(transport: str = 'rest'): + client = RegionTargetHttpProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + #with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + compute.TargetHttpProxyList( + items=[ + compute.TargetHttpProxy(), + compute.TargetHttpProxy(), + compute.TargetHttpProxy(), + ], + next_page_token='abc', + ), + compute.TargetHttpProxyList( + items=[], + next_page_token='def', + ), + compute.TargetHttpProxyList( + items=[ + compute.TargetHttpProxy(), + ], + next_page_token='ghi', + ), + compute.TargetHttpProxyList( + items=[ + compute.TargetHttpProxy(), + compute.TargetHttpProxy(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple(compute.TargetHttpProxyList.to_json(x) for x in response) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode('UTF-8') + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {'project': 'sample1', 'region': 'sample2'} + + pager = client.list(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, compute.TargetHttpProxy) + for i in results) + + pages = list(client.list(request=sample_request).pages) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize("request_type", [ + compute.SetUrlMapRegionTargetHttpProxyRequest, + dict, +]) +def test_set_url_map_rest(request_type): + client = RegionTargetHttpProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2', 'target_http_proxy': 'sample3'} + request_init["url_map_reference_resource"] = {'url_map': 'url_map_value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.set_url_map(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, extended_operation.ExtendedOperation) + assert response.client_operation_id == 'client_operation_id_value' + assert response.creation_timestamp == 'creation_timestamp_value' + assert response.description == 'description_value' + assert response.end_time == 'end_time_value' + assert response.http_error_message == 'http_error_message_value' + assert response.http_error_status_code == 2374 + assert response.id == 205 + assert response.insert_time == 'insert_time_value' + assert response.kind == 'kind_value' + assert response.name == 'name_value' + assert response.operation_group_id == 'operation_group_id_value' + assert response.operation_type == 'operation_type_value' + assert response.progress == 885 + assert response.region == 'region_value' + assert response.self_link == 'self_link_value' + assert response.start_time == 'start_time_value' + assert response.status == compute.Operation.Status.DONE + assert response.status_message == 'status_message_value' + assert response.target_id == 947 + assert response.target_link == 'target_link_value' + assert response.user == 'user_value' + assert response.zone == 'zone_value' + + +def test_set_url_map_rest_required_fields(request_type=compute.SetUrlMapRegionTargetHttpProxyRequest): + transport_class = transports.RegionTargetHttpProxiesRestTransport + + request_init = {} + request_init["project"] = "" + request_init["region"] = "" + request_init["target_http_proxy"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).set_url_map._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + jsonified_request["region"] = 'region_value' + jsonified_request["targetHttpProxy"] = 'target_http_proxy_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).set_url_map._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "region" in jsonified_request + assert jsonified_request["region"] == 'region_value' + assert "targetHttpProxy" in jsonified_request + assert jsonified_request["targetHttpProxy"] == 'target_http_proxy_value' + + client = RegionTargetHttpProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.set_url_map(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_set_url_map_rest_unset_required_fields(): + transport = transports.RegionTargetHttpProxiesRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.set_url_map._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("project", "region", "targetHttpProxy", "urlMapReferenceResource", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_set_url_map_rest_interceptors(null_interceptor): + transport = transports.RegionTargetHttpProxiesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.RegionTargetHttpProxiesRestInterceptor(), + ) + client = RegionTargetHttpProxiesClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.RegionTargetHttpProxiesRestInterceptor, "post_set_url_map") as post, \ + mock.patch.object(transports.RegionTargetHttpProxiesRestInterceptor, "pre_set_url_map") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.SetUrlMapRegionTargetHttpProxyRequest.pb(compute.SetUrlMapRegionTargetHttpProxyRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.SetUrlMapRegionTargetHttpProxyRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.set_url_map(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_set_url_map_rest_bad_request(transport: str = 'rest', request_type=compute.SetUrlMapRegionTargetHttpProxyRequest): + client = RegionTargetHttpProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2', 'target_http_proxy': 'sample3'} + request_init["url_map_reference_resource"] = {'url_map': 'url_map_value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.set_url_map(request) + + +def test_set_url_map_rest_flattened(): + client = RegionTargetHttpProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'region': 'sample2', 'target_http_proxy': 'sample3'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + region='region_value', + target_http_proxy='target_http_proxy_value', + url_map_reference_resource=compute.UrlMapReference(url_map='url_map_value'), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.set_url_map(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/regions/{region}/targetHttpProxies/{target_http_proxy}/setUrlMap" % client.transport._host, args[1]) + + +def test_set_url_map_rest_flattened_error(transport: str = 'rest'): + client = RegionTargetHttpProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.set_url_map( + compute.SetUrlMapRegionTargetHttpProxyRequest(), + project='project_value', + region='region_value', + target_http_proxy='target_http_proxy_value', + url_map_reference_resource=compute.UrlMapReference(url_map='url_map_value'), + ) + + +def test_set_url_map_rest_error(): + client = RegionTargetHttpProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.SetUrlMapRegionTargetHttpProxyRequest, + dict, +]) +def test_set_url_map_unary_rest(request_type): + client = RegionTargetHttpProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2', 'target_http_proxy': 'sample3'} + request_init["url_map_reference_resource"] = {'url_map': 'url_map_value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.set_url_map_unary(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.Operation) + + +def test_set_url_map_unary_rest_required_fields(request_type=compute.SetUrlMapRegionTargetHttpProxyRequest): + transport_class = transports.RegionTargetHttpProxiesRestTransport + + request_init = {} + request_init["project"] = "" + request_init["region"] = "" + request_init["target_http_proxy"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).set_url_map._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + jsonified_request["region"] = 'region_value' + jsonified_request["targetHttpProxy"] = 'target_http_proxy_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).set_url_map._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "region" in jsonified_request + assert jsonified_request["region"] == 'region_value' + assert "targetHttpProxy" in jsonified_request + assert jsonified_request["targetHttpProxy"] == 'target_http_proxy_value' + + client = RegionTargetHttpProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.set_url_map_unary(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_set_url_map_unary_rest_unset_required_fields(): + transport = transports.RegionTargetHttpProxiesRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.set_url_map._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("project", "region", "targetHttpProxy", "urlMapReferenceResource", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_set_url_map_unary_rest_interceptors(null_interceptor): + transport = transports.RegionTargetHttpProxiesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.RegionTargetHttpProxiesRestInterceptor(), + ) + client = RegionTargetHttpProxiesClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.RegionTargetHttpProxiesRestInterceptor, "post_set_url_map") as post, \ + mock.patch.object(transports.RegionTargetHttpProxiesRestInterceptor, "pre_set_url_map") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.SetUrlMapRegionTargetHttpProxyRequest.pb(compute.SetUrlMapRegionTargetHttpProxyRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.SetUrlMapRegionTargetHttpProxyRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.set_url_map_unary(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_set_url_map_unary_rest_bad_request(transport: str = 'rest', request_type=compute.SetUrlMapRegionTargetHttpProxyRequest): + client = RegionTargetHttpProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2', 'target_http_proxy': 'sample3'} + request_init["url_map_reference_resource"] = {'url_map': 'url_map_value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.set_url_map_unary(request) + + +def test_set_url_map_unary_rest_flattened(): + client = RegionTargetHttpProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'region': 'sample2', 'target_http_proxy': 'sample3'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + region='region_value', + target_http_proxy='target_http_proxy_value', + url_map_reference_resource=compute.UrlMapReference(url_map='url_map_value'), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.set_url_map_unary(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/regions/{region}/targetHttpProxies/{target_http_proxy}/setUrlMap" % client.transport._host, args[1]) + + +def test_set_url_map_unary_rest_flattened_error(transport: str = 'rest'): + client = RegionTargetHttpProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.set_url_map_unary( + compute.SetUrlMapRegionTargetHttpProxyRequest(), + project='project_value', + region='region_value', + target_http_proxy='target_http_proxy_value', + url_map_reference_resource=compute.UrlMapReference(url_map='url_map_value'), + ) + + +def test_set_url_map_unary_rest_error(): + client = RegionTargetHttpProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +def test_credentials_transport_error(): + # It is an error to provide credentials and a transport instance. + transport = transports.RegionTargetHttpProxiesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = RegionTargetHttpProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # It is an error to provide a credentials file and a transport instance. + transport = transports.RegionTargetHttpProxiesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = RegionTargetHttpProxiesClient( + client_options={"credentials_file": "credentials.json"}, + transport=transport, + ) + + # It is an error to provide an api_key and a transport instance. + transport = transports.RegionTargetHttpProxiesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = RegionTargetHttpProxiesClient( + client_options=options, + transport=transport, + ) + + # It is an error to provide an api_key and a credential. + options = mock.Mock() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = RegionTargetHttpProxiesClient( + client_options=options, + credentials=ga_credentials.AnonymousCredentials() + ) + + # It is an error to provide scopes and a transport instance. + transport = transports.RegionTargetHttpProxiesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = RegionTargetHttpProxiesClient( + client_options={"scopes": ["1", "2"]}, + transport=transport, + ) + + +def test_transport_instance(): + # A client may be instantiated with a custom transport instance. + transport = transports.RegionTargetHttpProxiesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + client = RegionTargetHttpProxiesClient(transport=transport) + assert client.transport is transport + + +@pytest.mark.parametrize("transport_class", [ + transports.RegionTargetHttpProxiesRestTransport, +]) +def test_transport_adc(transport_class): + # Test default credentials are used if not provided. + with mock.patch.object(google.auth, 'default') as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class() + adc.assert_called_once() + +@pytest.mark.parametrize("transport_name", [ + "rest", +]) +def test_transport_kind(transport_name): + transport = RegionTargetHttpProxiesClient.get_transport_class(transport_name)( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert transport.kind == transport_name + + +def test_region_target_http_proxies_base_transport_error(): + # Passing both a credentials object and credentials_file should raise an error + with pytest.raises(core_exceptions.DuplicateCredentialArgs): + transport = transports.RegionTargetHttpProxiesTransport( + credentials=ga_credentials.AnonymousCredentials(), + credentials_file="credentials.json" + ) + + +def test_region_target_http_proxies_base_transport(): + # Instantiate the base transport. + with mock.patch('google.cloud.compute_v1.services.region_target_http_proxies.transports.RegionTargetHttpProxiesTransport.__init__') as Transport: + Transport.return_value = None + transport = transports.RegionTargetHttpProxiesTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Every method on the transport should just blindly + # raise NotImplementedError. + methods = ( + 'delete', + 'get', + 'insert', + 'list', + 'set_url_map', + ) + for method in methods: + with pytest.raises(NotImplementedError): + getattr(transport, method)(request=object()) + + with pytest.raises(NotImplementedError): + transport.close() + + # Catch all for all remaining methods and properties + remainder = [ + 'kind', + ] + for r in remainder: + with pytest.raises(NotImplementedError): + getattr(transport, r)() + + +def test_region_target_http_proxies_base_transport_with_credentials_file(): + # Instantiate the base transport with a credentials file + with mock.patch.object(google.auth, 'load_credentials_from_file', autospec=True) as load_creds, mock.patch('google.cloud.compute_v1.services.region_target_http_proxies.transports.RegionTargetHttpProxiesTransport._prep_wrapped_messages') as Transport: + Transport.return_value = None + load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.RegionTargetHttpProxiesTransport( + credentials_file="credentials.json", + quota_project_id="octopus", + ) + load_creds.assert_called_once_with("credentials.json", + scopes=None, + default_scopes=( + 'https://www.googleapis.com/auth/compute', + 'https://www.googleapis.com/auth/cloud-platform', +), + quota_project_id="octopus", + ) + + +def test_region_target_http_proxies_base_transport_with_adc(): + # Test the default credentials are used if credentials and credentials_file are None. + with mock.patch.object(google.auth, 'default', autospec=True) as adc, mock.patch('google.cloud.compute_v1.services.region_target_http_proxies.transports.RegionTargetHttpProxiesTransport._prep_wrapped_messages') as Transport: + Transport.return_value = None + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.RegionTargetHttpProxiesTransport() + adc.assert_called_once() + + +def test_region_target_http_proxies_auth_adc(): + # If no credentials are provided, we should use ADC credentials. + with mock.patch.object(google.auth, 'default', autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + RegionTargetHttpProxiesClient() + adc.assert_called_once_with( + scopes=None, + default_scopes=( + 'https://www.googleapis.com/auth/compute', + 'https://www.googleapis.com/auth/cloud-platform', +), + quota_project_id=None, + ) + + +def test_region_target_http_proxies_http_transport_client_cert_source_for_mtls(): + cred = ga_credentials.AnonymousCredentials() + with mock.patch("google.auth.transport.requests.AuthorizedSession.configure_mtls_channel") as mock_configure_mtls_channel: + transports.RegionTargetHttpProxiesRestTransport ( + credentials=cred, + client_cert_source_for_mtls=client_cert_source_callback + ) + mock_configure_mtls_channel.assert_called_once_with(client_cert_source_callback) + + +@pytest.mark.parametrize("transport_name", [ + "rest", +]) +def test_region_target_http_proxies_host_no_port(transport_name): + client = RegionTargetHttpProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions(api_endpoint='compute.googleapis.com'), + transport=transport_name, + ) + assert client.transport._host == ( + 'compute.googleapis.com:443' + if transport_name in ['grpc', 'grpc_asyncio'] + else 'https://compute.googleapis.com' + ) + +@pytest.mark.parametrize("transport_name", [ + "rest", +]) +def test_region_target_http_proxies_host_with_port(transport_name): + client = RegionTargetHttpProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions(api_endpoint='compute.googleapis.com:8000'), + transport=transport_name, + ) + assert client.transport._host == ( + 'compute.googleapis.com:8000' + if transport_name in ['grpc', 'grpc_asyncio'] + else 'https://compute.googleapis.com:8000' + ) + +@pytest.mark.parametrize("transport_name", [ + "rest", +]) +def test_region_target_http_proxies_client_transport_session_collision(transport_name): + creds1 = ga_credentials.AnonymousCredentials() + creds2 = ga_credentials.AnonymousCredentials() + client1 = RegionTargetHttpProxiesClient( + credentials=creds1, + transport=transport_name, + ) + client2 = RegionTargetHttpProxiesClient( + credentials=creds2, + transport=transport_name, + ) + session1 = client1.transport.delete._session + session2 = client2.transport.delete._session + assert session1 != session2 + session1 = client1.transport.get._session + session2 = client2.transport.get._session + assert session1 != session2 + session1 = client1.transport.insert._session + session2 = client2.transport.insert._session + assert session1 != session2 + session1 = client1.transport.list._session + session2 = client2.transport.list._session + assert session1 != session2 + session1 = client1.transport.set_url_map._session + session2 = client2.transport.set_url_map._session + assert session1 != session2 + +def test_common_billing_account_path(): + billing_account = "squid" + expected = "billingAccounts/{billing_account}".format(billing_account=billing_account, ) + actual = RegionTargetHttpProxiesClient.common_billing_account_path(billing_account) + assert expected == actual + + +def test_parse_common_billing_account_path(): + expected = { + "billing_account": "clam", + } + path = RegionTargetHttpProxiesClient.common_billing_account_path(**expected) + + # Check that the path construction is reversible. + actual = RegionTargetHttpProxiesClient.parse_common_billing_account_path(path) + assert expected == actual + +def test_common_folder_path(): + folder = "whelk" + expected = "folders/{folder}".format(folder=folder, ) + actual = RegionTargetHttpProxiesClient.common_folder_path(folder) + assert expected == actual + + +def test_parse_common_folder_path(): + expected = { + "folder": "octopus", + } + path = RegionTargetHttpProxiesClient.common_folder_path(**expected) + + # Check that the path construction is reversible. + actual = RegionTargetHttpProxiesClient.parse_common_folder_path(path) + assert expected == actual + +def test_common_organization_path(): + organization = "oyster" + expected = "organizations/{organization}".format(organization=organization, ) + actual = RegionTargetHttpProxiesClient.common_organization_path(organization) + assert expected == actual + + +def test_parse_common_organization_path(): + expected = { + "organization": "nudibranch", + } + path = RegionTargetHttpProxiesClient.common_organization_path(**expected) + + # Check that the path construction is reversible. + actual = RegionTargetHttpProxiesClient.parse_common_organization_path(path) + assert expected == actual + +def test_common_project_path(): + project = "cuttlefish" + expected = "projects/{project}".format(project=project, ) + actual = RegionTargetHttpProxiesClient.common_project_path(project) + assert expected == actual + + +def test_parse_common_project_path(): + expected = { + "project": "mussel", + } + path = RegionTargetHttpProxiesClient.common_project_path(**expected) + + # Check that the path construction is reversible. + actual = RegionTargetHttpProxiesClient.parse_common_project_path(path) + assert expected == actual + +def test_common_location_path(): + project = "winkle" + location = "nautilus" + expected = "projects/{project}/locations/{location}".format(project=project, location=location, ) + actual = RegionTargetHttpProxiesClient.common_location_path(project, location) + assert expected == actual + + +def test_parse_common_location_path(): + expected = { + "project": "scallop", + "location": "abalone", + } + path = RegionTargetHttpProxiesClient.common_location_path(**expected) + + # Check that the path construction is reversible. + actual = RegionTargetHttpProxiesClient.parse_common_location_path(path) + assert expected == actual + + +def test_client_with_default_client_info(): + client_info = gapic_v1.client_info.ClientInfo() + + with mock.patch.object(transports.RegionTargetHttpProxiesTransport, '_prep_wrapped_messages') as prep: + client = RegionTargetHttpProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + with mock.patch.object(transports.RegionTargetHttpProxiesTransport, '_prep_wrapped_messages') as prep: + transport_class = RegionTargetHttpProxiesClient.get_transport_class() + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + +def test_transport_close(): + transports = { + "rest": "_session", + } + + for transport, close_name in transports.items(): + client = RegionTargetHttpProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport + ) + with mock.patch.object(type(getattr(client.transport, close_name)), "close") as close: + with client: + close.assert_not_called() + close.assert_called_once() + +def test_client_ctx(): + transports = [ + 'rest', + ] + for transport in transports: + client = RegionTargetHttpProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport + ) + # Test client calls underlying transport. + with mock.patch.object(type(client.transport), "close") as close: + close.assert_not_called() + with client: + pass + close.assert_called() + +@pytest.mark.parametrize("client_class,transport_class", [ + (RegionTargetHttpProxiesClient, transports.RegionTargetHttpProxiesRestTransport), +]) +def test_api_key_credentials(client_class, transport_class): + with mock.patch.object( + google.auth._default, "get_api_key_credentials", create=True + ) as get_api_key_credentials: + mock_cred = mock.Mock() + get_api_key_credentials.return_value = mock_cred + options = client_options.ClientOptions() + options.api_key = "api_key" + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=mock_cred, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) diff --git a/owl-bot-staging/v1/tests/unit/gapic/compute_v1/test_region_target_https_proxies.py b/owl-bot-staging/v1/tests/unit/gapic/compute_v1/test_region_target_https_proxies.py new file mode 100644 index 000000000..69a42699b --- /dev/null +++ b/owl-bot-staging/v1/tests/unit/gapic/compute_v1/test_region_target_https_proxies.py @@ -0,0 +1,4231 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import os +# try/except added for compatibility with python < 3.8 +try: + from unittest import mock + from unittest.mock import AsyncMock # pragma: NO COVER +except ImportError: # pragma: NO COVER + import mock + +import grpc +from grpc.experimental import aio +from collections.abc import Iterable +from google.protobuf import json_format +import json +import math +import pytest +from proto.marshal.rules.dates import DurationRule, TimestampRule +from proto.marshal.rules import wrappers +from requests import Response +from requests import Request, PreparedRequest +from requests.sessions import Session +from google.protobuf import json_format + +from google.api_core import client_options +from google.api_core import exceptions as core_exceptions +from google.api_core import extended_operation # type: ignore +from google.api_core import future +from google.api_core import gapic_v1 +from google.api_core import grpc_helpers +from google.api_core import grpc_helpers_async +from google.api_core import path_template +from google.auth import credentials as ga_credentials +from google.auth.exceptions import MutualTLSChannelError +from google.cloud.compute_v1.services.region_target_https_proxies import RegionTargetHttpsProxiesClient +from google.cloud.compute_v1.services.region_target_https_proxies import pagers +from google.cloud.compute_v1.services.region_target_https_proxies import transports +from google.cloud.compute_v1.types import compute +from google.oauth2 import service_account +import google.auth + + +def client_cert_source_callback(): + return b"cert bytes", b"key bytes" + + +# If default endpoint is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint(client): + return "foo.googleapis.com" if ("localhost" in client.DEFAULT_ENDPOINT) else client.DEFAULT_ENDPOINT + + +def test__get_default_mtls_endpoint(): + api_endpoint = "example.googleapis.com" + api_mtls_endpoint = "example.mtls.googleapis.com" + sandbox_endpoint = "example.sandbox.googleapis.com" + sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com" + non_googleapi = "api.example.com" + + assert RegionTargetHttpsProxiesClient._get_default_mtls_endpoint(None) is None + assert RegionTargetHttpsProxiesClient._get_default_mtls_endpoint(api_endpoint) == api_mtls_endpoint + assert RegionTargetHttpsProxiesClient._get_default_mtls_endpoint(api_mtls_endpoint) == api_mtls_endpoint + assert RegionTargetHttpsProxiesClient._get_default_mtls_endpoint(sandbox_endpoint) == sandbox_mtls_endpoint + assert RegionTargetHttpsProxiesClient._get_default_mtls_endpoint(sandbox_mtls_endpoint) == sandbox_mtls_endpoint + assert RegionTargetHttpsProxiesClient._get_default_mtls_endpoint(non_googleapi) == non_googleapi + + +@pytest.mark.parametrize("client_class,transport_name", [ + (RegionTargetHttpsProxiesClient, "rest"), +]) +def test_region_target_https_proxies_client_from_service_account_info(client_class, transport_name): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object(service_account.Credentials, 'from_service_account_info') as factory: + factory.return_value = creds + info = {"valid": True} + client = client_class.from_service_account_info(info, transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + 'compute.googleapis.com:443' + if transport_name in ['grpc', 'grpc_asyncio'] + else + 'https://compute.googleapis.com' + ) + + +@pytest.mark.parametrize("transport_class,transport_name", [ + (transports.RegionTargetHttpsProxiesRestTransport, "rest"), +]) +def test_region_target_https_proxies_client_service_account_always_use_jwt(transport_class, transport_name): + with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=True) + use_jwt.assert_called_once_with(True) + + with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=False) + use_jwt.assert_not_called() + + +@pytest.mark.parametrize("client_class,transport_name", [ + (RegionTargetHttpsProxiesClient, "rest"), +]) +def test_region_target_https_proxies_client_from_service_account_file(client_class, transport_name): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object(service_account.Credentials, 'from_service_account_file') as factory: + factory.return_value = creds + client = client_class.from_service_account_file("dummy/file/path.json", transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + client = client_class.from_service_account_json("dummy/file/path.json", transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + 'compute.googleapis.com:443' + if transport_name in ['grpc', 'grpc_asyncio'] + else + 'https://compute.googleapis.com' + ) + + +def test_region_target_https_proxies_client_get_transport_class(): + transport = RegionTargetHttpsProxiesClient.get_transport_class() + available_transports = [ + transports.RegionTargetHttpsProxiesRestTransport, + ] + assert transport in available_transports + + transport = RegionTargetHttpsProxiesClient.get_transport_class("rest") + assert transport == transports.RegionTargetHttpsProxiesRestTransport + + +@pytest.mark.parametrize("client_class,transport_class,transport_name", [ + (RegionTargetHttpsProxiesClient, transports.RegionTargetHttpsProxiesRestTransport, "rest"), +]) +@mock.patch.object(RegionTargetHttpsProxiesClient, "DEFAULT_ENDPOINT", modify_default_endpoint(RegionTargetHttpsProxiesClient)) +def test_region_target_https_proxies_client_client_options(client_class, transport_class, transport_name): + # Check that if channel is provided we won't create a new one. + with mock.patch.object(RegionTargetHttpsProxiesClient, 'get_transport_class') as gtc: + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ) + client = client_class(transport=transport) + gtc.assert_not_called() + + # Check that if channel is provided via str we will create a new one. + with mock.patch.object(RegionTargetHttpsProxiesClient, 'get_transport_class') as gtc: + client = client_class(transport=transport_name) + gtc.assert_called() + + # Check the case api_endpoint is provided. + options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(transport=transport_name, client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_MTLS_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError): + client = client_class(transport=transport_name) + + # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): + with pytest.raises(ValueError): + client = client_class(transport=transport_name) + + # Check the case quota_project_id is provided + options = client_options.ClientOptions(quota_project_id="octopus") + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id="octopus", + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + # Check the case api_endpoint is provided + options = client_options.ClientOptions(api_audience="https://language.googleapis.com") + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience="https://language.googleapis.com" + ) + +@pytest.mark.parametrize("client_class,transport_class,transport_name,use_client_cert_env", [ + (RegionTargetHttpsProxiesClient, transports.RegionTargetHttpsProxiesRestTransport, "rest", "true"), + (RegionTargetHttpsProxiesClient, transports.RegionTargetHttpsProxiesRestTransport, "rest", "false"), +]) +@mock.patch.object(RegionTargetHttpsProxiesClient, "DEFAULT_ENDPOINT", modify_default_endpoint(RegionTargetHttpsProxiesClient)) +@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) +def test_region_target_https_proxies_client_mtls_env_auto(client_class, transport_class, transport_name, use_client_cert_env): + # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default + # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists. + + # Check the case client_cert_source is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): + options = client_options.ClientOptions(client_cert_source=client_cert_source_callback) + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + + if use_client_cert_env == "false": + expected_client_cert_source = None + expected_host = client.DEFAULT_ENDPOINT + else: + expected_client_cert_source = client_cert_source_callback + expected_host = client.DEFAULT_MTLS_ENDPOINT + + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case ADC client cert is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): + with mock.patch.object(transport_class, '__init__') as patched: + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): + with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=client_cert_source_callback): + if use_client_cert_env == "false": + expected_host = client.DEFAULT_ENDPOINT + expected_client_cert_source = None + else: + expected_host = client.DEFAULT_MTLS_ENDPOINT + expected_client_cert_source = client_cert_source_callback + + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case client_cert_source and ADC client cert are not provided. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): + with mock.patch.object(transport_class, '__init__') as patched: + with mock.patch("google.auth.transport.mtls.has_default_client_cert_source", return_value=False): + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize("client_class", [ + RegionTargetHttpsProxiesClient +]) +@mock.patch.object(RegionTargetHttpsProxiesClient, "DEFAULT_ENDPOINT", modify_default_endpoint(RegionTargetHttpsProxiesClient)) +def test_region_target_https_proxies_client_get_mtls_endpoint_and_cert_source(client_class): + mock_client_cert_source = mock.Mock() + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "true". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + mock_api_endpoint = "foo" + options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(options) + assert api_endpoint == mock_api_endpoint + assert cert_source == mock_client_cert_source + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "false". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + mock_client_cert_source = mock.Mock() + mock_api_endpoint = "foo" + options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(options) + assert api_endpoint == mock_api_endpoint + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert doesn't exist. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=False): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert exists. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): + with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=mock_client_cert_source): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source == mock_client_cert_source + + +@pytest.mark.parametrize("client_class,transport_class,transport_name", [ + (RegionTargetHttpsProxiesClient, transports.RegionTargetHttpsProxiesRestTransport, "rest"), +]) +def test_region_target_https_proxies_client_client_options_scopes(client_class, transport_class, transport_name): + # Check the case scopes are provided. + options = client_options.ClientOptions( + scopes=["1", "2"], + ) + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=["1", "2"], + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + +@pytest.mark.parametrize("client_class,transport_class,transport_name,grpc_helpers", [ + (RegionTargetHttpsProxiesClient, transports.RegionTargetHttpsProxiesRestTransport, "rest", None), +]) +def test_region_target_https_proxies_client_client_options_credentials_file(client_class, transport_class, transport_name, grpc_helpers): + # Check the case credentials file is provided. + options = client_options.ClientOptions( + credentials_file="credentials.json" + ) + + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize("request_type", [ + compute.DeleteRegionTargetHttpsProxyRequest, + dict, +]) +def test_delete_rest(request_type): + client = RegionTargetHttpsProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2', 'target_https_proxy': 'sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.delete(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, extended_operation.ExtendedOperation) + assert response.client_operation_id == 'client_operation_id_value' + assert response.creation_timestamp == 'creation_timestamp_value' + assert response.description == 'description_value' + assert response.end_time == 'end_time_value' + assert response.http_error_message == 'http_error_message_value' + assert response.http_error_status_code == 2374 + assert response.id == 205 + assert response.insert_time == 'insert_time_value' + assert response.kind == 'kind_value' + assert response.name == 'name_value' + assert response.operation_group_id == 'operation_group_id_value' + assert response.operation_type == 'operation_type_value' + assert response.progress == 885 + assert response.region == 'region_value' + assert response.self_link == 'self_link_value' + assert response.start_time == 'start_time_value' + assert response.status == compute.Operation.Status.DONE + assert response.status_message == 'status_message_value' + assert response.target_id == 947 + assert response.target_link == 'target_link_value' + assert response.user == 'user_value' + assert response.zone == 'zone_value' + + +def test_delete_rest_required_fields(request_type=compute.DeleteRegionTargetHttpsProxyRequest): + transport_class = transports.RegionTargetHttpsProxiesRestTransport + + request_init = {} + request_init["project"] = "" + request_init["region"] = "" + request_init["target_https_proxy"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + jsonified_request["region"] = 'region_value' + jsonified_request["targetHttpsProxy"] = 'target_https_proxy_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "region" in jsonified_request + assert jsonified_request["region"] == 'region_value' + assert "targetHttpsProxy" in jsonified_request + assert jsonified_request["targetHttpsProxy"] == 'target_https_proxy_value' + + client = RegionTargetHttpsProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "delete", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.delete(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_delete_rest_unset_required_fields(): + transport = transports.RegionTargetHttpsProxiesRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.delete._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("project", "region", "targetHttpsProxy", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_rest_interceptors(null_interceptor): + transport = transports.RegionTargetHttpsProxiesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.RegionTargetHttpsProxiesRestInterceptor(), + ) + client = RegionTargetHttpsProxiesClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.RegionTargetHttpsProxiesRestInterceptor, "post_delete") as post, \ + mock.patch.object(transports.RegionTargetHttpsProxiesRestInterceptor, "pre_delete") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.DeleteRegionTargetHttpsProxyRequest.pb(compute.DeleteRegionTargetHttpsProxyRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.DeleteRegionTargetHttpsProxyRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.delete(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_delete_rest_bad_request(transport: str = 'rest', request_type=compute.DeleteRegionTargetHttpsProxyRequest): + client = RegionTargetHttpsProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2', 'target_https_proxy': 'sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.delete(request) + + +def test_delete_rest_flattened(): + client = RegionTargetHttpsProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'region': 'sample2', 'target_https_proxy': 'sample3'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + region='region_value', + target_https_proxy='target_https_proxy_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.delete(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/regions/{region}/targetHttpsProxies/{target_https_proxy}" % client.transport._host, args[1]) + + +def test_delete_rest_flattened_error(transport: str = 'rest'): + client = RegionTargetHttpsProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete( + compute.DeleteRegionTargetHttpsProxyRequest(), + project='project_value', + region='region_value', + target_https_proxy='target_https_proxy_value', + ) + + +def test_delete_rest_error(): + client = RegionTargetHttpsProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.DeleteRegionTargetHttpsProxyRequest, + dict, +]) +def test_delete_unary_rest(request_type): + client = RegionTargetHttpsProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2', 'target_https_proxy': 'sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.delete_unary(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.Operation) + + +def test_delete_unary_rest_required_fields(request_type=compute.DeleteRegionTargetHttpsProxyRequest): + transport_class = transports.RegionTargetHttpsProxiesRestTransport + + request_init = {} + request_init["project"] = "" + request_init["region"] = "" + request_init["target_https_proxy"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + jsonified_request["region"] = 'region_value' + jsonified_request["targetHttpsProxy"] = 'target_https_proxy_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "region" in jsonified_request + assert jsonified_request["region"] == 'region_value' + assert "targetHttpsProxy" in jsonified_request + assert jsonified_request["targetHttpsProxy"] == 'target_https_proxy_value' + + client = RegionTargetHttpsProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "delete", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.delete_unary(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_delete_unary_rest_unset_required_fields(): + transport = transports.RegionTargetHttpsProxiesRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.delete._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("project", "region", "targetHttpsProxy", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_unary_rest_interceptors(null_interceptor): + transport = transports.RegionTargetHttpsProxiesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.RegionTargetHttpsProxiesRestInterceptor(), + ) + client = RegionTargetHttpsProxiesClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.RegionTargetHttpsProxiesRestInterceptor, "post_delete") as post, \ + mock.patch.object(transports.RegionTargetHttpsProxiesRestInterceptor, "pre_delete") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.DeleteRegionTargetHttpsProxyRequest.pb(compute.DeleteRegionTargetHttpsProxyRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.DeleteRegionTargetHttpsProxyRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.delete_unary(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_delete_unary_rest_bad_request(transport: str = 'rest', request_type=compute.DeleteRegionTargetHttpsProxyRequest): + client = RegionTargetHttpsProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2', 'target_https_proxy': 'sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.delete_unary(request) + + +def test_delete_unary_rest_flattened(): + client = RegionTargetHttpsProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'region': 'sample2', 'target_https_proxy': 'sample3'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + region='region_value', + target_https_proxy='target_https_proxy_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.delete_unary(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/regions/{region}/targetHttpsProxies/{target_https_proxy}" % client.transport._host, args[1]) + + +def test_delete_unary_rest_flattened_error(transport: str = 'rest'): + client = RegionTargetHttpsProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_unary( + compute.DeleteRegionTargetHttpsProxyRequest(), + project='project_value', + region='region_value', + target_https_proxy='target_https_proxy_value', + ) + + +def test_delete_unary_rest_error(): + client = RegionTargetHttpsProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.GetRegionTargetHttpsProxyRequest, + dict, +]) +def test_get_rest(request_type): + client = RegionTargetHttpsProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2', 'target_https_proxy': 'sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.TargetHttpsProxy( + authorization_policy='authorization_policy_value', + certificate_map='certificate_map_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + fingerprint='fingerprint_value', + http_keep_alive_timeout_sec=2868, + id=205, + kind='kind_value', + name='name_value', + proxy_bind=True, + quic_override='quic_override_value', + region='region_value', + self_link='self_link_value', + server_tls_policy='server_tls_policy_value', + ssl_certificates=['ssl_certificates_value'], + ssl_policy='ssl_policy_value', + url_map='url_map_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.TargetHttpsProxy.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.get(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.TargetHttpsProxy) + assert response.authorization_policy == 'authorization_policy_value' + assert response.certificate_map == 'certificate_map_value' + assert response.creation_timestamp == 'creation_timestamp_value' + assert response.description == 'description_value' + assert response.fingerprint == 'fingerprint_value' + assert response.http_keep_alive_timeout_sec == 2868 + assert response.id == 205 + assert response.kind == 'kind_value' + assert response.name == 'name_value' + assert response.proxy_bind is True + assert response.quic_override == 'quic_override_value' + assert response.region == 'region_value' + assert response.self_link == 'self_link_value' + assert response.server_tls_policy == 'server_tls_policy_value' + assert response.ssl_certificates == ['ssl_certificates_value'] + assert response.ssl_policy == 'ssl_policy_value' + assert response.url_map == 'url_map_value' + + +def test_get_rest_required_fields(request_type=compute.GetRegionTargetHttpsProxyRequest): + transport_class = transports.RegionTargetHttpsProxiesRestTransport + + request_init = {} + request_init["project"] = "" + request_init["region"] = "" + request_init["target_https_proxy"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + jsonified_request["region"] = 'region_value' + jsonified_request["targetHttpsProxy"] = 'target_https_proxy_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "region" in jsonified_request + assert jsonified_request["region"] == 'region_value' + assert "targetHttpsProxy" in jsonified_request + assert jsonified_request["targetHttpsProxy"] == 'target_https_proxy_value' + + client = RegionTargetHttpsProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.TargetHttpsProxy() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "get", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.TargetHttpsProxy.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.get(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_get_rest_unset_required_fields(): + transport = transports.RegionTargetHttpsProxiesRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.get._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("project", "region", "targetHttpsProxy", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_rest_interceptors(null_interceptor): + transport = transports.RegionTargetHttpsProxiesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.RegionTargetHttpsProxiesRestInterceptor(), + ) + client = RegionTargetHttpsProxiesClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.RegionTargetHttpsProxiesRestInterceptor, "post_get") as post, \ + mock.patch.object(transports.RegionTargetHttpsProxiesRestInterceptor, "pre_get") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.GetRegionTargetHttpsProxyRequest.pb(compute.GetRegionTargetHttpsProxyRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.TargetHttpsProxy.to_json(compute.TargetHttpsProxy()) + + request = compute.GetRegionTargetHttpsProxyRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.TargetHttpsProxy() + + client.get(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_get_rest_bad_request(transport: str = 'rest', request_type=compute.GetRegionTargetHttpsProxyRequest): + client = RegionTargetHttpsProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2', 'target_https_proxy': 'sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get(request) + + +def test_get_rest_flattened(): + client = RegionTargetHttpsProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.TargetHttpsProxy() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'region': 'sample2', 'target_https_proxy': 'sample3'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + region='region_value', + target_https_proxy='target_https_proxy_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.TargetHttpsProxy.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.get(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/regions/{region}/targetHttpsProxies/{target_https_proxy}" % client.transport._host, args[1]) + + +def test_get_rest_flattened_error(transport: str = 'rest'): + client = RegionTargetHttpsProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get( + compute.GetRegionTargetHttpsProxyRequest(), + project='project_value', + region='region_value', + target_https_proxy='target_https_proxy_value', + ) + + +def test_get_rest_error(): + client = RegionTargetHttpsProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.InsertRegionTargetHttpsProxyRequest, + dict, +]) +def test_insert_rest(request_type): + client = RegionTargetHttpsProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2'} + request_init["target_https_proxy_resource"] = {'authorization_policy': 'authorization_policy_value', 'certificate_map': 'certificate_map_value', 'creation_timestamp': 'creation_timestamp_value', 'description': 'description_value', 'fingerprint': 'fingerprint_value', 'http_keep_alive_timeout_sec': 2868, 'id': 205, 'kind': 'kind_value', 'name': 'name_value', 'proxy_bind': True, 'quic_override': 'quic_override_value', 'region': 'region_value', 'self_link': 'self_link_value', 'server_tls_policy': 'server_tls_policy_value', 'ssl_certificates': ['ssl_certificates_value1', 'ssl_certificates_value2'], 'ssl_policy': 'ssl_policy_value', 'url_map': 'url_map_value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.insert(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, extended_operation.ExtendedOperation) + assert response.client_operation_id == 'client_operation_id_value' + assert response.creation_timestamp == 'creation_timestamp_value' + assert response.description == 'description_value' + assert response.end_time == 'end_time_value' + assert response.http_error_message == 'http_error_message_value' + assert response.http_error_status_code == 2374 + assert response.id == 205 + assert response.insert_time == 'insert_time_value' + assert response.kind == 'kind_value' + assert response.name == 'name_value' + assert response.operation_group_id == 'operation_group_id_value' + assert response.operation_type == 'operation_type_value' + assert response.progress == 885 + assert response.region == 'region_value' + assert response.self_link == 'self_link_value' + assert response.start_time == 'start_time_value' + assert response.status == compute.Operation.Status.DONE + assert response.status_message == 'status_message_value' + assert response.target_id == 947 + assert response.target_link == 'target_link_value' + assert response.user == 'user_value' + assert response.zone == 'zone_value' + + +def test_insert_rest_required_fields(request_type=compute.InsertRegionTargetHttpsProxyRequest): + transport_class = transports.RegionTargetHttpsProxiesRestTransport + + request_init = {} + request_init["project"] = "" + request_init["region"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).insert._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + jsonified_request["region"] = 'region_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).insert._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "region" in jsonified_request + assert jsonified_request["region"] == 'region_value' + + client = RegionTargetHttpsProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.insert(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_insert_rest_unset_required_fields(): + transport = transports.RegionTargetHttpsProxiesRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.insert._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("project", "region", "targetHttpsProxyResource", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_insert_rest_interceptors(null_interceptor): + transport = transports.RegionTargetHttpsProxiesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.RegionTargetHttpsProxiesRestInterceptor(), + ) + client = RegionTargetHttpsProxiesClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.RegionTargetHttpsProxiesRestInterceptor, "post_insert") as post, \ + mock.patch.object(transports.RegionTargetHttpsProxiesRestInterceptor, "pre_insert") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.InsertRegionTargetHttpsProxyRequest.pb(compute.InsertRegionTargetHttpsProxyRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.InsertRegionTargetHttpsProxyRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.insert(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_insert_rest_bad_request(transport: str = 'rest', request_type=compute.InsertRegionTargetHttpsProxyRequest): + client = RegionTargetHttpsProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2'} + request_init["target_https_proxy_resource"] = {'authorization_policy': 'authorization_policy_value', 'certificate_map': 'certificate_map_value', 'creation_timestamp': 'creation_timestamp_value', 'description': 'description_value', 'fingerprint': 'fingerprint_value', 'http_keep_alive_timeout_sec': 2868, 'id': 205, 'kind': 'kind_value', 'name': 'name_value', 'proxy_bind': True, 'quic_override': 'quic_override_value', 'region': 'region_value', 'self_link': 'self_link_value', 'server_tls_policy': 'server_tls_policy_value', 'ssl_certificates': ['ssl_certificates_value1', 'ssl_certificates_value2'], 'ssl_policy': 'ssl_policy_value', 'url_map': 'url_map_value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.insert(request) + + +def test_insert_rest_flattened(): + client = RegionTargetHttpsProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'region': 'sample2'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + region='region_value', + target_https_proxy_resource=compute.TargetHttpsProxy(authorization_policy='authorization_policy_value'), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.insert(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/regions/{region}/targetHttpsProxies" % client.transport._host, args[1]) + + +def test_insert_rest_flattened_error(transport: str = 'rest'): + client = RegionTargetHttpsProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.insert( + compute.InsertRegionTargetHttpsProxyRequest(), + project='project_value', + region='region_value', + target_https_proxy_resource=compute.TargetHttpsProxy(authorization_policy='authorization_policy_value'), + ) + + +def test_insert_rest_error(): + client = RegionTargetHttpsProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.InsertRegionTargetHttpsProxyRequest, + dict, +]) +def test_insert_unary_rest(request_type): + client = RegionTargetHttpsProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2'} + request_init["target_https_proxy_resource"] = {'authorization_policy': 'authorization_policy_value', 'certificate_map': 'certificate_map_value', 'creation_timestamp': 'creation_timestamp_value', 'description': 'description_value', 'fingerprint': 'fingerprint_value', 'http_keep_alive_timeout_sec': 2868, 'id': 205, 'kind': 'kind_value', 'name': 'name_value', 'proxy_bind': True, 'quic_override': 'quic_override_value', 'region': 'region_value', 'self_link': 'self_link_value', 'server_tls_policy': 'server_tls_policy_value', 'ssl_certificates': ['ssl_certificates_value1', 'ssl_certificates_value2'], 'ssl_policy': 'ssl_policy_value', 'url_map': 'url_map_value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.insert_unary(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.Operation) + + +def test_insert_unary_rest_required_fields(request_type=compute.InsertRegionTargetHttpsProxyRequest): + transport_class = transports.RegionTargetHttpsProxiesRestTransport + + request_init = {} + request_init["project"] = "" + request_init["region"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).insert._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + jsonified_request["region"] = 'region_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).insert._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "region" in jsonified_request + assert jsonified_request["region"] == 'region_value' + + client = RegionTargetHttpsProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.insert_unary(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_insert_unary_rest_unset_required_fields(): + transport = transports.RegionTargetHttpsProxiesRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.insert._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("project", "region", "targetHttpsProxyResource", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_insert_unary_rest_interceptors(null_interceptor): + transport = transports.RegionTargetHttpsProxiesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.RegionTargetHttpsProxiesRestInterceptor(), + ) + client = RegionTargetHttpsProxiesClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.RegionTargetHttpsProxiesRestInterceptor, "post_insert") as post, \ + mock.patch.object(transports.RegionTargetHttpsProxiesRestInterceptor, "pre_insert") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.InsertRegionTargetHttpsProxyRequest.pb(compute.InsertRegionTargetHttpsProxyRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.InsertRegionTargetHttpsProxyRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.insert_unary(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_insert_unary_rest_bad_request(transport: str = 'rest', request_type=compute.InsertRegionTargetHttpsProxyRequest): + client = RegionTargetHttpsProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2'} + request_init["target_https_proxy_resource"] = {'authorization_policy': 'authorization_policy_value', 'certificate_map': 'certificate_map_value', 'creation_timestamp': 'creation_timestamp_value', 'description': 'description_value', 'fingerprint': 'fingerprint_value', 'http_keep_alive_timeout_sec': 2868, 'id': 205, 'kind': 'kind_value', 'name': 'name_value', 'proxy_bind': True, 'quic_override': 'quic_override_value', 'region': 'region_value', 'self_link': 'self_link_value', 'server_tls_policy': 'server_tls_policy_value', 'ssl_certificates': ['ssl_certificates_value1', 'ssl_certificates_value2'], 'ssl_policy': 'ssl_policy_value', 'url_map': 'url_map_value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.insert_unary(request) + + +def test_insert_unary_rest_flattened(): + client = RegionTargetHttpsProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'region': 'sample2'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + region='region_value', + target_https_proxy_resource=compute.TargetHttpsProxy(authorization_policy='authorization_policy_value'), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.insert_unary(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/regions/{region}/targetHttpsProxies" % client.transport._host, args[1]) + + +def test_insert_unary_rest_flattened_error(transport: str = 'rest'): + client = RegionTargetHttpsProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.insert_unary( + compute.InsertRegionTargetHttpsProxyRequest(), + project='project_value', + region='region_value', + target_https_proxy_resource=compute.TargetHttpsProxy(authorization_policy='authorization_policy_value'), + ) + + +def test_insert_unary_rest_error(): + client = RegionTargetHttpsProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.ListRegionTargetHttpsProxiesRequest, + dict, +]) +def test_list_rest(request_type): + client = RegionTargetHttpsProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.TargetHttpsProxyList( + id='id_value', + kind='kind_value', + next_page_token='next_page_token_value', + self_link='self_link_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.TargetHttpsProxyList.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.list(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListPager) + assert response.id == 'id_value' + assert response.kind == 'kind_value' + assert response.next_page_token == 'next_page_token_value' + assert response.self_link == 'self_link_value' + + +def test_list_rest_required_fields(request_type=compute.ListRegionTargetHttpsProxiesRequest): + transport_class = transports.RegionTargetHttpsProxiesRestTransport + + request_init = {} + request_init["project"] = "" + request_init["region"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + jsonified_request["region"] = 'region_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("filter", "max_results", "order_by", "page_token", "return_partial_success", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "region" in jsonified_request + assert jsonified_request["region"] == 'region_value' + + client = RegionTargetHttpsProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.TargetHttpsProxyList() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "get", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.TargetHttpsProxyList.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.list(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_list_rest_unset_required_fields(): + transport = transports.RegionTargetHttpsProxiesRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.list._get_unset_required_fields({}) + assert set(unset_fields) == (set(("filter", "maxResults", "orderBy", "pageToken", "returnPartialSuccess", )) & set(("project", "region", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_rest_interceptors(null_interceptor): + transport = transports.RegionTargetHttpsProxiesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.RegionTargetHttpsProxiesRestInterceptor(), + ) + client = RegionTargetHttpsProxiesClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.RegionTargetHttpsProxiesRestInterceptor, "post_list") as post, \ + mock.patch.object(transports.RegionTargetHttpsProxiesRestInterceptor, "pre_list") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.ListRegionTargetHttpsProxiesRequest.pb(compute.ListRegionTargetHttpsProxiesRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.TargetHttpsProxyList.to_json(compute.TargetHttpsProxyList()) + + request = compute.ListRegionTargetHttpsProxiesRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.TargetHttpsProxyList() + + client.list(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_list_rest_bad_request(transport: str = 'rest', request_type=compute.ListRegionTargetHttpsProxiesRequest): + client = RegionTargetHttpsProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list(request) + + +def test_list_rest_flattened(): + client = RegionTargetHttpsProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.TargetHttpsProxyList() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'region': 'sample2'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + region='region_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.TargetHttpsProxyList.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.list(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/regions/{region}/targetHttpsProxies" % client.transport._host, args[1]) + + +def test_list_rest_flattened_error(transport: str = 'rest'): + client = RegionTargetHttpsProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list( + compute.ListRegionTargetHttpsProxiesRequest(), + project='project_value', + region='region_value', + ) + + +def test_list_rest_pager(transport: str = 'rest'): + client = RegionTargetHttpsProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + #with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + compute.TargetHttpsProxyList( + items=[ + compute.TargetHttpsProxy(), + compute.TargetHttpsProxy(), + compute.TargetHttpsProxy(), + ], + next_page_token='abc', + ), + compute.TargetHttpsProxyList( + items=[], + next_page_token='def', + ), + compute.TargetHttpsProxyList( + items=[ + compute.TargetHttpsProxy(), + ], + next_page_token='ghi', + ), + compute.TargetHttpsProxyList( + items=[ + compute.TargetHttpsProxy(), + compute.TargetHttpsProxy(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple(compute.TargetHttpsProxyList.to_json(x) for x in response) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode('UTF-8') + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {'project': 'sample1', 'region': 'sample2'} + + pager = client.list(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, compute.TargetHttpsProxy) + for i in results) + + pages = list(client.list(request=sample_request).pages) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize("request_type", [ + compute.PatchRegionTargetHttpsProxyRequest, + dict, +]) +def test_patch_rest(request_type): + client = RegionTargetHttpsProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2', 'target_https_proxy': 'sample3'} + request_init["target_https_proxy_resource"] = {'authorization_policy': 'authorization_policy_value', 'certificate_map': 'certificate_map_value', 'creation_timestamp': 'creation_timestamp_value', 'description': 'description_value', 'fingerprint': 'fingerprint_value', 'http_keep_alive_timeout_sec': 2868, 'id': 205, 'kind': 'kind_value', 'name': 'name_value', 'proxy_bind': True, 'quic_override': 'quic_override_value', 'region': 'region_value', 'self_link': 'self_link_value', 'server_tls_policy': 'server_tls_policy_value', 'ssl_certificates': ['ssl_certificates_value1', 'ssl_certificates_value2'], 'ssl_policy': 'ssl_policy_value', 'url_map': 'url_map_value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.patch(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, extended_operation.ExtendedOperation) + assert response.client_operation_id == 'client_operation_id_value' + assert response.creation_timestamp == 'creation_timestamp_value' + assert response.description == 'description_value' + assert response.end_time == 'end_time_value' + assert response.http_error_message == 'http_error_message_value' + assert response.http_error_status_code == 2374 + assert response.id == 205 + assert response.insert_time == 'insert_time_value' + assert response.kind == 'kind_value' + assert response.name == 'name_value' + assert response.operation_group_id == 'operation_group_id_value' + assert response.operation_type == 'operation_type_value' + assert response.progress == 885 + assert response.region == 'region_value' + assert response.self_link == 'self_link_value' + assert response.start_time == 'start_time_value' + assert response.status == compute.Operation.Status.DONE + assert response.status_message == 'status_message_value' + assert response.target_id == 947 + assert response.target_link == 'target_link_value' + assert response.user == 'user_value' + assert response.zone == 'zone_value' + + +def test_patch_rest_required_fields(request_type=compute.PatchRegionTargetHttpsProxyRequest): + transport_class = transports.RegionTargetHttpsProxiesRestTransport + + request_init = {} + request_init["project"] = "" + request_init["region"] = "" + request_init["target_https_proxy"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).patch._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + jsonified_request["region"] = 'region_value' + jsonified_request["targetHttpsProxy"] = 'target_https_proxy_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).patch._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "region" in jsonified_request + assert jsonified_request["region"] == 'region_value' + assert "targetHttpsProxy" in jsonified_request + assert jsonified_request["targetHttpsProxy"] == 'target_https_proxy_value' + + client = RegionTargetHttpsProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "patch", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.patch(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_patch_rest_unset_required_fields(): + transport = transports.RegionTargetHttpsProxiesRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.patch._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("project", "region", "targetHttpsProxy", "targetHttpsProxyResource", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_patch_rest_interceptors(null_interceptor): + transport = transports.RegionTargetHttpsProxiesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.RegionTargetHttpsProxiesRestInterceptor(), + ) + client = RegionTargetHttpsProxiesClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.RegionTargetHttpsProxiesRestInterceptor, "post_patch") as post, \ + mock.patch.object(transports.RegionTargetHttpsProxiesRestInterceptor, "pre_patch") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.PatchRegionTargetHttpsProxyRequest.pb(compute.PatchRegionTargetHttpsProxyRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.PatchRegionTargetHttpsProxyRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.patch(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_patch_rest_bad_request(transport: str = 'rest', request_type=compute.PatchRegionTargetHttpsProxyRequest): + client = RegionTargetHttpsProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2', 'target_https_proxy': 'sample3'} + request_init["target_https_proxy_resource"] = {'authorization_policy': 'authorization_policy_value', 'certificate_map': 'certificate_map_value', 'creation_timestamp': 'creation_timestamp_value', 'description': 'description_value', 'fingerprint': 'fingerprint_value', 'http_keep_alive_timeout_sec': 2868, 'id': 205, 'kind': 'kind_value', 'name': 'name_value', 'proxy_bind': True, 'quic_override': 'quic_override_value', 'region': 'region_value', 'self_link': 'self_link_value', 'server_tls_policy': 'server_tls_policy_value', 'ssl_certificates': ['ssl_certificates_value1', 'ssl_certificates_value2'], 'ssl_policy': 'ssl_policy_value', 'url_map': 'url_map_value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.patch(request) + + +def test_patch_rest_flattened(): + client = RegionTargetHttpsProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'region': 'sample2', 'target_https_proxy': 'sample3'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + region='region_value', + target_https_proxy='target_https_proxy_value', + target_https_proxy_resource=compute.TargetHttpsProxy(authorization_policy='authorization_policy_value'), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.patch(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/regions/{region}/targetHttpsProxies/{target_https_proxy}" % client.transport._host, args[1]) + + +def test_patch_rest_flattened_error(transport: str = 'rest'): + client = RegionTargetHttpsProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.patch( + compute.PatchRegionTargetHttpsProxyRequest(), + project='project_value', + region='region_value', + target_https_proxy='target_https_proxy_value', + target_https_proxy_resource=compute.TargetHttpsProxy(authorization_policy='authorization_policy_value'), + ) + + +def test_patch_rest_error(): + client = RegionTargetHttpsProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.PatchRegionTargetHttpsProxyRequest, + dict, +]) +def test_patch_unary_rest(request_type): + client = RegionTargetHttpsProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2', 'target_https_proxy': 'sample3'} + request_init["target_https_proxy_resource"] = {'authorization_policy': 'authorization_policy_value', 'certificate_map': 'certificate_map_value', 'creation_timestamp': 'creation_timestamp_value', 'description': 'description_value', 'fingerprint': 'fingerprint_value', 'http_keep_alive_timeout_sec': 2868, 'id': 205, 'kind': 'kind_value', 'name': 'name_value', 'proxy_bind': True, 'quic_override': 'quic_override_value', 'region': 'region_value', 'self_link': 'self_link_value', 'server_tls_policy': 'server_tls_policy_value', 'ssl_certificates': ['ssl_certificates_value1', 'ssl_certificates_value2'], 'ssl_policy': 'ssl_policy_value', 'url_map': 'url_map_value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.patch_unary(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.Operation) + + +def test_patch_unary_rest_required_fields(request_type=compute.PatchRegionTargetHttpsProxyRequest): + transport_class = transports.RegionTargetHttpsProxiesRestTransport + + request_init = {} + request_init["project"] = "" + request_init["region"] = "" + request_init["target_https_proxy"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).patch._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + jsonified_request["region"] = 'region_value' + jsonified_request["targetHttpsProxy"] = 'target_https_proxy_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).patch._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "region" in jsonified_request + assert jsonified_request["region"] == 'region_value' + assert "targetHttpsProxy" in jsonified_request + assert jsonified_request["targetHttpsProxy"] == 'target_https_proxy_value' + + client = RegionTargetHttpsProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "patch", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.patch_unary(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_patch_unary_rest_unset_required_fields(): + transport = transports.RegionTargetHttpsProxiesRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.patch._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("project", "region", "targetHttpsProxy", "targetHttpsProxyResource", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_patch_unary_rest_interceptors(null_interceptor): + transport = transports.RegionTargetHttpsProxiesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.RegionTargetHttpsProxiesRestInterceptor(), + ) + client = RegionTargetHttpsProxiesClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.RegionTargetHttpsProxiesRestInterceptor, "post_patch") as post, \ + mock.patch.object(transports.RegionTargetHttpsProxiesRestInterceptor, "pre_patch") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.PatchRegionTargetHttpsProxyRequest.pb(compute.PatchRegionTargetHttpsProxyRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.PatchRegionTargetHttpsProxyRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.patch_unary(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_patch_unary_rest_bad_request(transport: str = 'rest', request_type=compute.PatchRegionTargetHttpsProxyRequest): + client = RegionTargetHttpsProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2', 'target_https_proxy': 'sample3'} + request_init["target_https_proxy_resource"] = {'authorization_policy': 'authorization_policy_value', 'certificate_map': 'certificate_map_value', 'creation_timestamp': 'creation_timestamp_value', 'description': 'description_value', 'fingerprint': 'fingerprint_value', 'http_keep_alive_timeout_sec': 2868, 'id': 205, 'kind': 'kind_value', 'name': 'name_value', 'proxy_bind': True, 'quic_override': 'quic_override_value', 'region': 'region_value', 'self_link': 'self_link_value', 'server_tls_policy': 'server_tls_policy_value', 'ssl_certificates': ['ssl_certificates_value1', 'ssl_certificates_value2'], 'ssl_policy': 'ssl_policy_value', 'url_map': 'url_map_value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.patch_unary(request) + + +def test_patch_unary_rest_flattened(): + client = RegionTargetHttpsProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'region': 'sample2', 'target_https_proxy': 'sample3'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + region='region_value', + target_https_proxy='target_https_proxy_value', + target_https_proxy_resource=compute.TargetHttpsProxy(authorization_policy='authorization_policy_value'), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.patch_unary(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/regions/{region}/targetHttpsProxies/{target_https_proxy}" % client.transport._host, args[1]) + + +def test_patch_unary_rest_flattened_error(transport: str = 'rest'): + client = RegionTargetHttpsProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.patch_unary( + compute.PatchRegionTargetHttpsProxyRequest(), + project='project_value', + region='region_value', + target_https_proxy='target_https_proxy_value', + target_https_proxy_resource=compute.TargetHttpsProxy(authorization_policy='authorization_policy_value'), + ) + + +def test_patch_unary_rest_error(): + client = RegionTargetHttpsProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.SetSslCertificatesRegionTargetHttpsProxyRequest, + dict, +]) +def test_set_ssl_certificates_rest(request_type): + client = RegionTargetHttpsProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2', 'target_https_proxy': 'sample3'} + request_init["region_target_https_proxies_set_ssl_certificates_request_resource"] = {'ssl_certificates': ['ssl_certificates_value1', 'ssl_certificates_value2']} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.set_ssl_certificates(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, extended_operation.ExtendedOperation) + assert response.client_operation_id == 'client_operation_id_value' + assert response.creation_timestamp == 'creation_timestamp_value' + assert response.description == 'description_value' + assert response.end_time == 'end_time_value' + assert response.http_error_message == 'http_error_message_value' + assert response.http_error_status_code == 2374 + assert response.id == 205 + assert response.insert_time == 'insert_time_value' + assert response.kind == 'kind_value' + assert response.name == 'name_value' + assert response.operation_group_id == 'operation_group_id_value' + assert response.operation_type == 'operation_type_value' + assert response.progress == 885 + assert response.region == 'region_value' + assert response.self_link == 'self_link_value' + assert response.start_time == 'start_time_value' + assert response.status == compute.Operation.Status.DONE + assert response.status_message == 'status_message_value' + assert response.target_id == 947 + assert response.target_link == 'target_link_value' + assert response.user == 'user_value' + assert response.zone == 'zone_value' + + +def test_set_ssl_certificates_rest_required_fields(request_type=compute.SetSslCertificatesRegionTargetHttpsProxyRequest): + transport_class = transports.RegionTargetHttpsProxiesRestTransport + + request_init = {} + request_init["project"] = "" + request_init["region"] = "" + request_init["target_https_proxy"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).set_ssl_certificates._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + jsonified_request["region"] = 'region_value' + jsonified_request["targetHttpsProxy"] = 'target_https_proxy_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).set_ssl_certificates._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "region" in jsonified_request + assert jsonified_request["region"] == 'region_value' + assert "targetHttpsProxy" in jsonified_request + assert jsonified_request["targetHttpsProxy"] == 'target_https_proxy_value' + + client = RegionTargetHttpsProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.set_ssl_certificates(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_set_ssl_certificates_rest_unset_required_fields(): + transport = transports.RegionTargetHttpsProxiesRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.set_ssl_certificates._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("project", "region", "regionTargetHttpsProxiesSetSslCertificatesRequestResource", "targetHttpsProxy", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_set_ssl_certificates_rest_interceptors(null_interceptor): + transport = transports.RegionTargetHttpsProxiesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.RegionTargetHttpsProxiesRestInterceptor(), + ) + client = RegionTargetHttpsProxiesClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.RegionTargetHttpsProxiesRestInterceptor, "post_set_ssl_certificates") as post, \ + mock.patch.object(transports.RegionTargetHttpsProxiesRestInterceptor, "pre_set_ssl_certificates") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.SetSslCertificatesRegionTargetHttpsProxyRequest.pb(compute.SetSslCertificatesRegionTargetHttpsProxyRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.SetSslCertificatesRegionTargetHttpsProxyRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.set_ssl_certificates(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_set_ssl_certificates_rest_bad_request(transport: str = 'rest', request_type=compute.SetSslCertificatesRegionTargetHttpsProxyRequest): + client = RegionTargetHttpsProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2', 'target_https_proxy': 'sample3'} + request_init["region_target_https_proxies_set_ssl_certificates_request_resource"] = {'ssl_certificates': ['ssl_certificates_value1', 'ssl_certificates_value2']} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.set_ssl_certificates(request) + + +def test_set_ssl_certificates_rest_flattened(): + client = RegionTargetHttpsProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'region': 'sample2', 'target_https_proxy': 'sample3'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + region='region_value', + target_https_proxy='target_https_proxy_value', + region_target_https_proxies_set_ssl_certificates_request_resource=compute.RegionTargetHttpsProxiesSetSslCertificatesRequest(ssl_certificates=['ssl_certificates_value']), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.set_ssl_certificates(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/regions/{region}/targetHttpsProxies/{target_https_proxy}/setSslCertificates" % client.transport._host, args[1]) + + +def test_set_ssl_certificates_rest_flattened_error(transport: str = 'rest'): + client = RegionTargetHttpsProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.set_ssl_certificates( + compute.SetSslCertificatesRegionTargetHttpsProxyRequest(), + project='project_value', + region='region_value', + target_https_proxy='target_https_proxy_value', + region_target_https_proxies_set_ssl_certificates_request_resource=compute.RegionTargetHttpsProxiesSetSslCertificatesRequest(ssl_certificates=['ssl_certificates_value']), + ) + + +def test_set_ssl_certificates_rest_error(): + client = RegionTargetHttpsProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.SetSslCertificatesRegionTargetHttpsProxyRequest, + dict, +]) +def test_set_ssl_certificates_unary_rest(request_type): + client = RegionTargetHttpsProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2', 'target_https_proxy': 'sample3'} + request_init["region_target_https_proxies_set_ssl_certificates_request_resource"] = {'ssl_certificates': ['ssl_certificates_value1', 'ssl_certificates_value2']} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.set_ssl_certificates_unary(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.Operation) + + +def test_set_ssl_certificates_unary_rest_required_fields(request_type=compute.SetSslCertificatesRegionTargetHttpsProxyRequest): + transport_class = transports.RegionTargetHttpsProxiesRestTransport + + request_init = {} + request_init["project"] = "" + request_init["region"] = "" + request_init["target_https_proxy"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).set_ssl_certificates._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + jsonified_request["region"] = 'region_value' + jsonified_request["targetHttpsProxy"] = 'target_https_proxy_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).set_ssl_certificates._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "region" in jsonified_request + assert jsonified_request["region"] == 'region_value' + assert "targetHttpsProxy" in jsonified_request + assert jsonified_request["targetHttpsProxy"] == 'target_https_proxy_value' + + client = RegionTargetHttpsProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.set_ssl_certificates_unary(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_set_ssl_certificates_unary_rest_unset_required_fields(): + transport = transports.RegionTargetHttpsProxiesRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.set_ssl_certificates._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("project", "region", "regionTargetHttpsProxiesSetSslCertificatesRequestResource", "targetHttpsProxy", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_set_ssl_certificates_unary_rest_interceptors(null_interceptor): + transport = transports.RegionTargetHttpsProxiesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.RegionTargetHttpsProxiesRestInterceptor(), + ) + client = RegionTargetHttpsProxiesClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.RegionTargetHttpsProxiesRestInterceptor, "post_set_ssl_certificates") as post, \ + mock.patch.object(transports.RegionTargetHttpsProxiesRestInterceptor, "pre_set_ssl_certificates") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.SetSslCertificatesRegionTargetHttpsProxyRequest.pb(compute.SetSslCertificatesRegionTargetHttpsProxyRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.SetSslCertificatesRegionTargetHttpsProxyRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.set_ssl_certificates_unary(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_set_ssl_certificates_unary_rest_bad_request(transport: str = 'rest', request_type=compute.SetSslCertificatesRegionTargetHttpsProxyRequest): + client = RegionTargetHttpsProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2', 'target_https_proxy': 'sample3'} + request_init["region_target_https_proxies_set_ssl_certificates_request_resource"] = {'ssl_certificates': ['ssl_certificates_value1', 'ssl_certificates_value2']} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.set_ssl_certificates_unary(request) + + +def test_set_ssl_certificates_unary_rest_flattened(): + client = RegionTargetHttpsProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'region': 'sample2', 'target_https_proxy': 'sample3'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + region='region_value', + target_https_proxy='target_https_proxy_value', + region_target_https_proxies_set_ssl_certificates_request_resource=compute.RegionTargetHttpsProxiesSetSslCertificatesRequest(ssl_certificates=['ssl_certificates_value']), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.set_ssl_certificates_unary(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/regions/{region}/targetHttpsProxies/{target_https_proxy}/setSslCertificates" % client.transport._host, args[1]) + + +def test_set_ssl_certificates_unary_rest_flattened_error(transport: str = 'rest'): + client = RegionTargetHttpsProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.set_ssl_certificates_unary( + compute.SetSslCertificatesRegionTargetHttpsProxyRequest(), + project='project_value', + region='region_value', + target_https_proxy='target_https_proxy_value', + region_target_https_proxies_set_ssl_certificates_request_resource=compute.RegionTargetHttpsProxiesSetSslCertificatesRequest(ssl_certificates=['ssl_certificates_value']), + ) + + +def test_set_ssl_certificates_unary_rest_error(): + client = RegionTargetHttpsProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.SetUrlMapRegionTargetHttpsProxyRequest, + dict, +]) +def test_set_url_map_rest(request_type): + client = RegionTargetHttpsProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2', 'target_https_proxy': 'sample3'} + request_init["url_map_reference_resource"] = {'url_map': 'url_map_value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.set_url_map(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, extended_operation.ExtendedOperation) + assert response.client_operation_id == 'client_operation_id_value' + assert response.creation_timestamp == 'creation_timestamp_value' + assert response.description == 'description_value' + assert response.end_time == 'end_time_value' + assert response.http_error_message == 'http_error_message_value' + assert response.http_error_status_code == 2374 + assert response.id == 205 + assert response.insert_time == 'insert_time_value' + assert response.kind == 'kind_value' + assert response.name == 'name_value' + assert response.operation_group_id == 'operation_group_id_value' + assert response.operation_type == 'operation_type_value' + assert response.progress == 885 + assert response.region == 'region_value' + assert response.self_link == 'self_link_value' + assert response.start_time == 'start_time_value' + assert response.status == compute.Operation.Status.DONE + assert response.status_message == 'status_message_value' + assert response.target_id == 947 + assert response.target_link == 'target_link_value' + assert response.user == 'user_value' + assert response.zone == 'zone_value' + + +def test_set_url_map_rest_required_fields(request_type=compute.SetUrlMapRegionTargetHttpsProxyRequest): + transport_class = transports.RegionTargetHttpsProxiesRestTransport + + request_init = {} + request_init["project"] = "" + request_init["region"] = "" + request_init["target_https_proxy"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).set_url_map._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + jsonified_request["region"] = 'region_value' + jsonified_request["targetHttpsProxy"] = 'target_https_proxy_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).set_url_map._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "region" in jsonified_request + assert jsonified_request["region"] == 'region_value' + assert "targetHttpsProxy" in jsonified_request + assert jsonified_request["targetHttpsProxy"] == 'target_https_proxy_value' + + client = RegionTargetHttpsProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.set_url_map(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_set_url_map_rest_unset_required_fields(): + transport = transports.RegionTargetHttpsProxiesRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.set_url_map._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("project", "region", "targetHttpsProxy", "urlMapReferenceResource", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_set_url_map_rest_interceptors(null_interceptor): + transport = transports.RegionTargetHttpsProxiesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.RegionTargetHttpsProxiesRestInterceptor(), + ) + client = RegionTargetHttpsProxiesClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.RegionTargetHttpsProxiesRestInterceptor, "post_set_url_map") as post, \ + mock.patch.object(transports.RegionTargetHttpsProxiesRestInterceptor, "pre_set_url_map") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.SetUrlMapRegionTargetHttpsProxyRequest.pb(compute.SetUrlMapRegionTargetHttpsProxyRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.SetUrlMapRegionTargetHttpsProxyRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.set_url_map(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_set_url_map_rest_bad_request(transport: str = 'rest', request_type=compute.SetUrlMapRegionTargetHttpsProxyRequest): + client = RegionTargetHttpsProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2', 'target_https_proxy': 'sample3'} + request_init["url_map_reference_resource"] = {'url_map': 'url_map_value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.set_url_map(request) + + +def test_set_url_map_rest_flattened(): + client = RegionTargetHttpsProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'region': 'sample2', 'target_https_proxy': 'sample3'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + region='region_value', + target_https_proxy='target_https_proxy_value', + url_map_reference_resource=compute.UrlMapReference(url_map='url_map_value'), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.set_url_map(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/regions/{region}/targetHttpsProxies/{target_https_proxy}/setUrlMap" % client.transport._host, args[1]) + + +def test_set_url_map_rest_flattened_error(transport: str = 'rest'): + client = RegionTargetHttpsProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.set_url_map( + compute.SetUrlMapRegionTargetHttpsProxyRequest(), + project='project_value', + region='region_value', + target_https_proxy='target_https_proxy_value', + url_map_reference_resource=compute.UrlMapReference(url_map='url_map_value'), + ) + + +def test_set_url_map_rest_error(): + client = RegionTargetHttpsProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.SetUrlMapRegionTargetHttpsProxyRequest, + dict, +]) +def test_set_url_map_unary_rest(request_type): + client = RegionTargetHttpsProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2', 'target_https_proxy': 'sample3'} + request_init["url_map_reference_resource"] = {'url_map': 'url_map_value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.set_url_map_unary(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.Operation) + + +def test_set_url_map_unary_rest_required_fields(request_type=compute.SetUrlMapRegionTargetHttpsProxyRequest): + transport_class = transports.RegionTargetHttpsProxiesRestTransport + + request_init = {} + request_init["project"] = "" + request_init["region"] = "" + request_init["target_https_proxy"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).set_url_map._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + jsonified_request["region"] = 'region_value' + jsonified_request["targetHttpsProxy"] = 'target_https_proxy_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).set_url_map._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "region" in jsonified_request + assert jsonified_request["region"] == 'region_value' + assert "targetHttpsProxy" in jsonified_request + assert jsonified_request["targetHttpsProxy"] == 'target_https_proxy_value' + + client = RegionTargetHttpsProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.set_url_map_unary(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_set_url_map_unary_rest_unset_required_fields(): + transport = transports.RegionTargetHttpsProxiesRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.set_url_map._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("project", "region", "targetHttpsProxy", "urlMapReferenceResource", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_set_url_map_unary_rest_interceptors(null_interceptor): + transport = transports.RegionTargetHttpsProxiesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.RegionTargetHttpsProxiesRestInterceptor(), + ) + client = RegionTargetHttpsProxiesClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.RegionTargetHttpsProxiesRestInterceptor, "post_set_url_map") as post, \ + mock.patch.object(transports.RegionTargetHttpsProxiesRestInterceptor, "pre_set_url_map") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.SetUrlMapRegionTargetHttpsProxyRequest.pb(compute.SetUrlMapRegionTargetHttpsProxyRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.SetUrlMapRegionTargetHttpsProxyRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.set_url_map_unary(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_set_url_map_unary_rest_bad_request(transport: str = 'rest', request_type=compute.SetUrlMapRegionTargetHttpsProxyRequest): + client = RegionTargetHttpsProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2', 'target_https_proxy': 'sample3'} + request_init["url_map_reference_resource"] = {'url_map': 'url_map_value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.set_url_map_unary(request) + + +def test_set_url_map_unary_rest_flattened(): + client = RegionTargetHttpsProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'region': 'sample2', 'target_https_proxy': 'sample3'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + region='region_value', + target_https_proxy='target_https_proxy_value', + url_map_reference_resource=compute.UrlMapReference(url_map='url_map_value'), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.set_url_map_unary(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/regions/{region}/targetHttpsProxies/{target_https_proxy}/setUrlMap" % client.transport._host, args[1]) + + +def test_set_url_map_unary_rest_flattened_error(transport: str = 'rest'): + client = RegionTargetHttpsProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.set_url_map_unary( + compute.SetUrlMapRegionTargetHttpsProxyRequest(), + project='project_value', + region='region_value', + target_https_proxy='target_https_proxy_value', + url_map_reference_resource=compute.UrlMapReference(url_map='url_map_value'), + ) + + +def test_set_url_map_unary_rest_error(): + client = RegionTargetHttpsProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +def test_credentials_transport_error(): + # It is an error to provide credentials and a transport instance. + transport = transports.RegionTargetHttpsProxiesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = RegionTargetHttpsProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # It is an error to provide a credentials file and a transport instance. + transport = transports.RegionTargetHttpsProxiesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = RegionTargetHttpsProxiesClient( + client_options={"credentials_file": "credentials.json"}, + transport=transport, + ) + + # It is an error to provide an api_key and a transport instance. + transport = transports.RegionTargetHttpsProxiesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = RegionTargetHttpsProxiesClient( + client_options=options, + transport=transport, + ) + + # It is an error to provide an api_key and a credential. + options = mock.Mock() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = RegionTargetHttpsProxiesClient( + client_options=options, + credentials=ga_credentials.AnonymousCredentials() + ) + + # It is an error to provide scopes and a transport instance. + transport = transports.RegionTargetHttpsProxiesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = RegionTargetHttpsProxiesClient( + client_options={"scopes": ["1", "2"]}, + transport=transport, + ) + + +def test_transport_instance(): + # A client may be instantiated with a custom transport instance. + transport = transports.RegionTargetHttpsProxiesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + client = RegionTargetHttpsProxiesClient(transport=transport) + assert client.transport is transport + + +@pytest.mark.parametrize("transport_class", [ + transports.RegionTargetHttpsProxiesRestTransport, +]) +def test_transport_adc(transport_class): + # Test default credentials are used if not provided. + with mock.patch.object(google.auth, 'default') as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class() + adc.assert_called_once() + +@pytest.mark.parametrize("transport_name", [ + "rest", +]) +def test_transport_kind(transport_name): + transport = RegionTargetHttpsProxiesClient.get_transport_class(transport_name)( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert transport.kind == transport_name + + +def test_region_target_https_proxies_base_transport_error(): + # Passing both a credentials object and credentials_file should raise an error + with pytest.raises(core_exceptions.DuplicateCredentialArgs): + transport = transports.RegionTargetHttpsProxiesTransport( + credentials=ga_credentials.AnonymousCredentials(), + credentials_file="credentials.json" + ) + + +def test_region_target_https_proxies_base_transport(): + # Instantiate the base transport. + with mock.patch('google.cloud.compute_v1.services.region_target_https_proxies.transports.RegionTargetHttpsProxiesTransport.__init__') as Transport: + Transport.return_value = None + transport = transports.RegionTargetHttpsProxiesTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Every method on the transport should just blindly + # raise NotImplementedError. + methods = ( + 'delete', + 'get', + 'insert', + 'list', + 'patch', + 'set_ssl_certificates', + 'set_url_map', + ) + for method in methods: + with pytest.raises(NotImplementedError): + getattr(transport, method)(request=object()) + + with pytest.raises(NotImplementedError): + transport.close() + + # Catch all for all remaining methods and properties + remainder = [ + 'kind', + ] + for r in remainder: + with pytest.raises(NotImplementedError): + getattr(transport, r)() + + +def test_region_target_https_proxies_base_transport_with_credentials_file(): + # Instantiate the base transport with a credentials file + with mock.patch.object(google.auth, 'load_credentials_from_file', autospec=True) as load_creds, mock.patch('google.cloud.compute_v1.services.region_target_https_proxies.transports.RegionTargetHttpsProxiesTransport._prep_wrapped_messages') as Transport: + Transport.return_value = None + load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.RegionTargetHttpsProxiesTransport( + credentials_file="credentials.json", + quota_project_id="octopus", + ) + load_creds.assert_called_once_with("credentials.json", + scopes=None, + default_scopes=( + 'https://www.googleapis.com/auth/compute', + 'https://www.googleapis.com/auth/cloud-platform', +), + quota_project_id="octopus", + ) + + +def test_region_target_https_proxies_base_transport_with_adc(): + # Test the default credentials are used if credentials and credentials_file are None. + with mock.patch.object(google.auth, 'default', autospec=True) as adc, mock.patch('google.cloud.compute_v1.services.region_target_https_proxies.transports.RegionTargetHttpsProxiesTransport._prep_wrapped_messages') as Transport: + Transport.return_value = None + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.RegionTargetHttpsProxiesTransport() + adc.assert_called_once() + + +def test_region_target_https_proxies_auth_adc(): + # If no credentials are provided, we should use ADC credentials. + with mock.patch.object(google.auth, 'default', autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + RegionTargetHttpsProxiesClient() + adc.assert_called_once_with( + scopes=None, + default_scopes=( + 'https://www.googleapis.com/auth/compute', + 'https://www.googleapis.com/auth/cloud-platform', +), + quota_project_id=None, + ) + + +def test_region_target_https_proxies_http_transport_client_cert_source_for_mtls(): + cred = ga_credentials.AnonymousCredentials() + with mock.patch("google.auth.transport.requests.AuthorizedSession.configure_mtls_channel") as mock_configure_mtls_channel: + transports.RegionTargetHttpsProxiesRestTransport ( + credentials=cred, + client_cert_source_for_mtls=client_cert_source_callback + ) + mock_configure_mtls_channel.assert_called_once_with(client_cert_source_callback) + + +@pytest.mark.parametrize("transport_name", [ + "rest", +]) +def test_region_target_https_proxies_host_no_port(transport_name): + client = RegionTargetHttpsProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions(api_endpoint='compute.googleapis.com'), + transport=transport_name, + ) + assert client.transport._host == ( + 'compute.googleapis.com:443' + if transport_name in ['grpc', 'grpc_asyncio'] + else 'https://compute.googleapis.com' + ) + +@pytest.mark.parametrize("transport_name", [ + "rest", +]) +def test_region_target_https_proxies_host_with_port(transport_name): + client = RegionTargetHttpsProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions(api_endpoint='compute.googleapis.com:8000'), + transport=transport_name, + ) + assert client.transport._host == ( + 'compute.googleapis.com:8000' + if transport_name in ['grpc', 'grpc_asyncio'] + else 'https://compute.googleapis.com:8000' + ) + +@pytest.mark.parametrize("transport_name", [ + "rest", +]) +def test_region_target_https_proxies_client_transport_session_collision(transport_name): + creds1 = ga_credentials.AnonymousCredentials() + creds2 = ga_credentials.AnonymousCredentials() + client1 = RegionTargetHttpsProxiesClient( + credentials=creds1, + transport=transport_name, + ) + client2 = RegionTargetHttpsProxiesClient( + credentials=creds2, + transport=transport_name, + ) + session1 = client1.transport.delete._session + session2 = client2.transport.delete._session + assert session1 != session2 + session1 = client1.transport.get._session + session2 = client2.transport.get._session + assert session1 != session2 + session1 = client1.transport.insert._session + session2 = client2.transport.insert._session + assert session1 != session2 + session1 = client1.transport.list._session + session2 = client2.transport.list._session + assert session1 != session2 + session1 = client1.transport.patch._session + session2 = client2.transport.patch._session + assert session1 != session2 + session1 = client1.transport.set_ssl_certificates._session + session2 = client2.transport.set_ssl_certificates._session + assert session1 != session2 + session1 = client1.transport.set_url_map._session + session2 = client2.transport.set_url_map._session + assert session1 != session2 + +def test_common_billing_account_path(): + billing_account = "squid" + expected = "billingAccounts/{billing_account}".format(billing_account=billing_account, ) + actual = RegionTargetHttpsProxiesClient.common_billing_account_path(billing_account) + assert expected == actual + + +def test_parse_common_billing_account_path(): + expected = { + "billing_account": "clam", + } + path = RegionTargetHttpsProxiesClient.common_billing_account_path(**expected) + + # Check that the path construction is reversible. + actual = RegionTargetHttpsProxiesClient.parse_common_billing_account_path(path) + assert expected == actual + +def test_common_folder_path(): + folder = "whelk" + expected = "folders/{folder}".format(folder=folder, ) + actual = RegionTargetHttpsProxiesClient.common_folder_path(folder) + assert expected == actual + + +def test_parse_common_folder_path(): + expected = { + "folder": "octopus", + } + path = RegionTargetHttpsProxiesClient.common_folder_path(**expected) + + # Check that the path construction is reversible. + actual = RegionTargetHttpsProxiesClient.parse_common_folder_path(path) + assert expected == actual + +def test_common_organization_path(): + organization = "oyster" + expected = "organizations/{organization}".format(organization=organization, ) + actual = RegionTargetHttpsProxiesClient.common_organization_path(organization) + assert expected == actual + + +def test_parse_common_organization_path(): + expected = { + "organization": "nudibranch", + } + path = RegionTargetHttpsProxiesClient.common_organization_path(**expected) + + # Check that the path construction is reversible. + actual = RegionTargetHttpsProxiesClient.parse_common_organization_path(path) + assert expected == actual + +def test_common_project_path(): + project = "cuttlefish" + expected = "projects/{project}".format(project=project, ) + actual = RegionTargetHttpsProxiesClient.common_project_path(project) + assert expected == actual + + +def test_parse_common_project_path(): + expected = { + "project": "mussel", + } + path = RegionTargetHttpsProxiesClient.common_project_path(**expected) + + # Check that the path construction is reversible. + actual = RegionTargetHttpsProxiesClient.parse_common_project_path(path) + assert expected == actual + +def test_common_location_path(): + project = "winkle" + location = "nautilus" + expected = "projects/{project}/locations/{location}".format(project=project, location=location, ) + actual = RegionTargetHttpsProxiesClient.common_location_path(project, location) + assert expected == actual + + +def test_parse_common_location_path(): + expected = { + "project": "scallop", + "location": "abalone", + } + path = RegionTargetHttpsProxiesClient.common_location_path(**expected) + + # Check that the path construction is reversible. + actual = RegionTargetHttpsProxiesClient.parse_common_location_path(path) + assert expected == actual + + +def test_client_with_default_client_info(): + client_info = gapic_v1.client_info.ClientInfo() + + with mock.patch.object(transports.RegionTargetHttpsProxiesTransport, '_prep_wrapped_messages') as prep: + client = RegionTargetHttpsProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + with mock.patch.object(transports.RegionTargetHttpsProxiesTransport, '_prep_wrapped_messages') as prep: + transport_class = RegionTargetHttpsProxiesClient.get_transport_class() + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + +def test_transport_close(): + transports = { + "rest": "_session", + } + + for transport, close_name in transports.items(): + client = RegionTargetHttpsProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport + ) + with mock.patch.object(type(getattr(client.transport, close_name)), "close") as close: + with client: + close.assert_not_called() + close.assert_called_once() + +def test_client_ctx(): + transports = [ + 'rest', + ] + for transport in transports: + client = RegionTargetHttpsProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport + ) + # Test client calls underlying transport. + with mock.patch.object(type(client.transport), "close") as close: + close.assert_not_called() + with client: + pass + close.assert_called() + +@pytest.mark.parametrize("client_class,transport_class", [ + (RegionTargetHttpsProxiesClient, transports.RegionTargetHttpsProxiesRestTransport), +]) +def test_api_key_credentials(client_class, transport_class): + with mock.patch.object( + google.auth._default, "get_api_key_credentials", create=True + ) as get_api_key_credentials: + mock_cred = mock.Mock() + get_api_key_credentials.return_value = mock_cred + options = client_options.ClientOptions() + options.api_key = "api_key" + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=mock_cred, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) diff --git a/owl-bot-staging/v1/tests/unit/gapic/compute_v1/test_region_target_tcp_proxies.py b/owl-bot-staging/v1/tests/unit/gapic/compute_v1/test_region_target_tcp_proxies.py new file mode 100644 index 000000000..218d92ec4 --- /dev/null +++ b/owl-bot-staging/v1/tests/unit/gapic/compute_v1/test_region_target_tcp_proxies.py @@ -0,0 +1,2513 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import os +# try/except added for compatibility with python < 3.8 +try: + from unittest import mock + from unittest.mock import AsyncMock # pragma: NO COVER +except ImportError: # pragma: NO COVER + import mock + +import grpc +from grpc.experimental import aio +from collections.abc import Iterable +from google.protobuf import json_format +import json +import math +import pytest +from proto.marshal.rules.dates import DurationRule, TimestampRule +from proto.marshal.rules import wrappers +from requests import Response +from requests import Request, PreparedRequest +from requests.sessions import Session +from google.protobuf import json_format + +from google.api_core import client_options +from google.api_core import exceptions as core_exceptions +from google.api_core import extended_operation # type: ignore +from google.api_core import future +from google.api_core import gapic_v1 +from google.api_core import grpc_helpers +from google.api_core import grpc_helpers_async +from google.api_core import path_template +from google.auth import credentials as ga_credentials +from google.auth.exceptions import MutualTLSChannelError +from google.cloud.compute_v1.services.region_target_tcp_proxies import RegionTargetTcpProxiesClient +from google.cloud.compute_v1.services.region_target_tcp_proxies import pagers +from google.cloud.compute_v1.services.region_target_tcp_proxies import transports +from google.cloud.compute_v1.types import compute +from google.oauth2 import service_account +import google.auth + + +def client_cert_source_callback(): + return b"cert bytes", b"key bytes" + + +# If default endpoint is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint(client): + return "foo.googleapis.com" if ("localhost" in client.DEFAULT_ENDPOINT) else client.DEFAULT_ENDPOINT + + +def test__get_default_mtls_endpoint(): + api_endpoint = "example.googleapis.com" + api_mtls_endpoint = "example.mtls.googleapis.com" + sandbox_endpoint = "example.sandbox.googleapis.com" + sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com" + non_googleapi = "api.example.com" + + assert RegionTargetTcpProxiesClient._get_default_mtls_endpoint(None) is None + assert RegionTargetTcpProxiesClient._get_default_mtls_endpoint(api_endpoint) == api_mtls_endpoint + assert RegionTargetTcpProxiesClient._get_default_mtls_endpoint(api_mtls_endpoint) == api_mtls_endpoint + assert RegionTargetTcpProxiesClient._get_default_mtls_endpoint(sandbox_endpoint) == sandbox_mtls_endpoint + assert RegionTargetTcpProxiesClient._get_default_mtls_endpoint(sandbox_mtls_endpoint) == sandbox_mtls_endpoint + assert RegionTargetTcpProxiesClient._get_default_mtls_endpoint(non_googleapi) == non_googleapi + + +@pytest.mark.parametrize("client_class,transport_name", [ + (RegionTargetTcpProxiesClient, "rest"), +]) +def test_region_target_tcp_proxies_client_from_service_account_info(client_class, transport_name): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object(service_account.Credentials, 'from_service_account_info') as factory: + factory.return_value = creds + info = {"valid": True} + client = client_class.from_service_account_info(info, transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + 'compute.googleapis.com:443' + if transport_name in ['grpc', 'grpc_asyncio'] + else + 'https://compute.googleapis.com' + ) + + +@pytest.mark.parametrize("transport_class,transport_name", [ + (transports.RegionTargetTcpProxiesRestTransport, "rest"), +]) +def test_region_target_tcp_proxies_client_service_account_always_use_jwt(transport_class, transport_name): + with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=True) + use_jwt.assert_called_once_with(True) + + with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=False) + use_jwt.assert_not_called() + + +@pytest.mark.parametrize("client_class,transport_name", [ + (RegionTargetTcpProxiesClient, "rest"), +]) +def test_region_target_tcp_proxies_client_from_service_account_file(client_class, transport_name): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object(service_account.Credentials, 'from_service_account_file') as factory: + factory.return_value = creds + client = client_class.from_service_account_file("dummy/file/path.json", transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + client = client_class.from_service_account_json("dummy/file/path.json", transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + 'compute.googleapis.com:443' + if transport_name in ['grpc', 'grpc_asyncio'] + else + 'https://compute.googleapis.com' + ) + + +def test_region_target_tcp_proxies_client_get_transport_class(): + transport = RegionTargetTcpProxiesClient.get_transport_class() + available_transports = [ + transports.RegionTargetTcpProxiesRestTransport, + ] + assert transport in available_transports + + transport = RegionTargetTcpProxiesClient.get_transport_class("rest") + assert transport == transports.RegionTargetTcpProxiesRestTransport + + +@pytest.mark.parametrize("client_class,transport_class,transport_name", [ + (RegionTargetTcpProxiesClient, transports.RegionTargetTcpProxiesRestTransport, "rest"), +]) +@mock.patch.object(RegionTargetTcpProxiesClient, "DEFAULT_ENDPOINT", modify_default_endpoint(RegionTargetTcpProxiesClient)) +def test_region_target_tcp_proxies_client_client_options(client_class, transport_class, transport_name): + # Check that if channel is provided we won't create a new one. + with mock.patch.object(RegionTargetTcpProxiesClient, 'get_transport_class') as gtc: + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ) + client = client_class(transport=transport) + gtc.assert_not_called() + + # Check that if channel is provided via str we will create a new one. + with mock.patch.object(RegionTargetTcpProxiesClient, 'get_transport_class') as gtc: + client = client_class(transport=transport_name) + gtc.assert_called() + + # Check the case api_endpoint is provided. + options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(transport=transport_name, client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_MTLS_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError): + client = client_class(transport=transport_name) + + # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): + with pytest.raises(ValueError): + client = client_class(transport=transport_name) + + # Check the case quota_project_id is provided + options = client_options.ClientOptions(quota_project_id="octopus") + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id="octopus", + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + # Check the case api_endpoint is provided + options = client_options.ClientOptions(api_audience="https://language.googleapis.com") + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience="https://language.googleapis.com" + ) + +@pytest.mark.parametrize("client_class,transport_class,transport_name,use_client_cert_env", [ + (RegionTargetTcpProxiesClient, transports.RegionTargetTcpProxiesRestTransport, "rest", "true"), + (RegionTargetTcpProxiesClient, transports.RegionTargetTcpProxiesRestTransport, "rest", "false"), +]) +@mock.patch.object(RegionTargetTcpProxiesClient, "DEFAULT_ENDPOINT", modify_default_endpoint(RegionTargetTcpProxiesClient)) +@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) +def test_region_target_tcp_proxies_client_mtls_env_auto(client_class, transport_class, transport_name, use_client_cert_env): + # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default + # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists. + + # Check the case client_cert_source is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): + options = client_options.ClientOptions(client_cert_source=client_cert_source_callback) + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + + if use_client_cert_env == "false": + expected_client_cert_source = None + expected_host = client.DEFAULT_ENDPOINT + else: + expected_client_cert_source = client_cert_source_callback + expected_host = client.DEFAULT_MTLS_ENDPOINT + + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case ADC client cert is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): + with mock.patch.object(transport_class, '__init__') as patched: + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): + with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=client_cert_source_callback): + if use_client_cert_env == "false": + expected_host = client.DEFAULT_ENDPOINT + expected_client_cert_source = None + else: + expected_host = client.DEFAULT_MTLS_ENDPOINT + expected_client_cert_source = client_cert_source_callback + + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case client_cert_source and ADC client cert are not provided. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): + with mock.patch.object(transport_class, '__init__') as patched: + with mock.patch("google.auth.transport.mtls.has_default_client_cert_source", return_value=False): + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize("client_class", [ + RegionTargetTcpProxiesClient +]) +@mock.patch.object(RegionTargetTcpProxiesClient, "DEFAULT_ENDPOINT", modify_default_endpoint(RegionTargetTcpProxiesClient)) +def test_region_target_tcp_proxies_client_get_mtls_endpoint_and_cert_source(client_class): + mock_client_cert_source = mock.Mock() + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "true". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + mock_api_endpoint = "foo" + options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(options) + assert api_endpoint == mock_api_endpoint + assert cert_source == mock_client_cert_source + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "false". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + mock_client_cert_source = mock.Mock() + mock_api_endpoint = "foo" + options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(options) + assert api_endpoint == mock_api_endpoint + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert doesn't exist. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=False): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert exists. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): + with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=mock_client_cert_source): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source == mock_client_cert_source + + +@pytest.mark.parametrize("client_class,transport_class,transport_name", [ + (RegionTargetTcpProxiesClient, transports.RegionTargetTcpProxiesRestTransport, "rest"), +]) +def test_region_target_tcp_proxies_client_client_options_scopes(client_class, transport_class, transport_name): + # Check the case scopes are provided. + options = client_options.ClientOptions( + scopes=["1", "2"], + ) + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=["1", "2"], + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + +@pytest.mark.parametrize("client_class,transport_class,transport_name,grpc_helpers", [ + (RegionTargetTcpProxiesClient, transports.RegionTargetTcpProxiesRestTransport, "rest", None), +]) +def test_region_target_tcp_proxies_client_client_options_credentials_file(client_class, transport_class, transport_name, grpc_helpers): + # Check the case credentials file is provided. + options = client_options.ClientOptions( + credentials_file="credentials.json" + ) + + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize("request_type", [ + compute.DeleteRegionTargetTcpProxyRequest, + dict, +]) +def test_delete_rest(request_type): + client = RegionTargetTcpProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2', 'target_tcp_proxy': 'sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.delete(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, extended_operation.ExtendedOperation) + assert response.client_operation_id == 'client_operation_id_value' + assert response.creation_timestamp == 'creation_timestamp_value' + assert response.description == 'description_value' + assert response.end_time == 'end_time_value' + assert response.http_error_message == 'http_error_message_value' + assert response.http_error_status_code == 2374 + assert response.id == 205 + assert response.insert_time == 'insert_time_value' + assert response.kind == 'kind_value' + assert response.name == 'name_value' + assert response.operation_group_id == 'operation_group_id_value' + assert response.operation_type == 'operation_type_value' + assert response.progress == 885 + assert response.region == 'region_value' + assert response.self_link == 'self_link_value' + assert response.start_time == 'start_time_value' + assert response.status == compute.Operation.Status.DONE + assert response.status_message == 'status_message_value' + assert response.target_id == 947 + assert response.target_link == 'target_link_value' + assert response.user == 'user_value' + assert response.zone == 'zone_value' + + +def test_delete_rest_required_fields(request_type=compute.DeleteRegionTargetTcpProxyRequest): + transport_class = transports.RegionTargetTcpProxiesRestTransport + + request_init = {} + request_init["project"] = "" + request_init["region"] = "" + request_init["target_tcp_proxy"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + jsonified_request["region"] = 'region_value' + jsonified_request["targetTcpProxy"] = 'target_tcp_proxy_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "region" in jsonified_request + assert jsonified_request["region"] == 'region_value' + assert "targetTcpProxy" in jsonified_request + assert jsonified_request["targetTcpProxy"] == 'target_tcp_proxy_value' + + client = RegionTargetTcpProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "delete", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.delete(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_delete_rest_unset_required_fields(): + transport = transports.RegionTargetTcpProxiesRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.delete._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("project", "region", "targetTcpProxy", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_rest_interceptors(null_interceptor): + transport = transports.RegionTargetTcpProxiesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.RegionTargetTcpProxiesRestInterceptor(), + ) + client = RegionTargetTcpProxiesClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.RegionTargetTcpProxiesRestInterceptor, "post_delete") as post, \ + mock.patch.object(transports.RegionTargetTcpProxiesRestInterceptor, "pre_delete") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.DeleteRegionTargetTcpProxyRequest.pb(compute.DeleteRegionTargetTcpProxyRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.DeleteRegionTargetTcpProxyRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.delete(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_delete_rest_bad_request(transport: str = 'rest', request_type=compute.DeleteRegionTargetTcpProxyRequest): + client = RegionTargetTcpProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2', 'target_tcp_proxy': 'sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.delete(request) + + +def test_delete_rest_flattened(): + client = RegionTargetTcpProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'region': 'sample2', 'target_tcp_proxy': 'sample3'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + region='region_value', + target_tcp_proxy='target_tcp_proxy_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.delete(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/regions/{region}/targetTcpProxies/{target_tcp_proxy}" % client.transport._host, args[1]) + + +def test_delete_rest_flattened_error(transport: str = 'rest'): + client = RegionTargetTcpProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete( + compute.DeleteRegionTargetTcpProxyRequest(), + project='project_value', + region='region_value', + target_tcp_proxy='target_tcp_proxy_value', + ) + + +def test_delete_rest_error(): + client = RegionTargetTcpProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.DeleteRegionTargetTcpProxyRequest, + dict, +]) +def test_delete_unary_rest(request_type): + client = RegionTargetTcpProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2', 'target_tcp_proxy': 'sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.delete_unary(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.Operation) + + +def test_delete_unary_rest_required_fields(request_type=compute.DeleteRegionTargetTcpProxyRequest): + transport_class = transports.RegionTargetTcpProxiesRestTransport + + request_init = {} + request_init["project"] = "" + request_init["region"] = "" + request_init["target_tcp_proxy"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + jsonified_request["region"] = 'region_value' + jsonified_request["targetTcpProxy"] = 'target_tcp_proxy_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "region" in jsonified_request + assert jsonified_request["region"] == 'region_value' + assert "targetTcpProxy" in jsonified_request + assert jsonified_request["targetTcpProxy"] == 'target_tcp_proxy_value' + + client = RegionTargetTcpProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "delete", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.delete_unary(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_delete_unary_rest_unset_required_fields(): + transport = transports.RegionTargetTcpProxiesRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.delete._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("project", "region", "targetTcpProxy", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_unary_rest_interceptors(null_interceptor): + transport = transports.RegionTargetTcpProxiesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.RegionTargetTcpProxiesRestInterceptor(), + ) + client = RegionTargetTcpProxiesClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.RegionTargetTcpProxiesRestInterceptor, "post_delete") as post, \ + mock.patch.object(transports.RegionTargetTcpProxiesRestInterceptor, "pre_delete") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.DeleteRegionTargetTcpProxyRequest.pb(compute.DeleteRegionTargetTcpProxyRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.DeleteRegionTargetTcpProxyRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.delete_unary(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_delete_unary_rest_bad_request(transport: str = 'rest', request_type=compute.DeleteRegionTargetTcpProxyRequest): + client = RegionTargetTcpProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2', 'target_tcp_proxy': 'sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.delete_unary(request) + + +def test_delete_unary_rest_flattened(): + client = RegionTargetTcpProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'region': 'sample2', 'target_tcp_proxy': 'sample3'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + region='region_value', + target_tcp_proxy='target_tcp_proxy_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.delete_unary(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/regions/{region}/targetTcpProxies/{target_tcp_proxy}" % client.transport._host, args[1]) + + +def test_delete_unary_rest_flattened_error(transport: str = 'rest'): + client = RegionTargetTcpProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_unary( + compute.DeleteRegionTargetTcpProxyRequest(), + project='project_value', + region='region_value', + target_tcp_proxy='target_tcp_proxy_value', + ) + + +def test_delete_unary_rest_error(): + client = RegionTargetTcpProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.GetRegionTargetTcpProxyRequest, + dict, +]) +def test_get_rest(request_type): + client = RegionTargetTcpProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2', 'target_tcp_proxy': 'sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.TargetTcpProxy( + creation_timestamp='creation_timestamp_value', + description='description_value', + id=205, + kind='kind_value', + name='name_value', + proxy_bind=True, + proxy_header='proxy_header_value', + region='region_value', + self_link='self_link_value', + service='service_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.TargetTcpProxy.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.get(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.TargetTcpProxy) + assert response.creation_timestamp == 'creation_timestamp_value' + assert response.description == 'description_value' + assert response.id == 205 + assert response.kind == 'kind_value' + assert response.name == 'name_value' + assert response.proxy_bind is True + assert response.proxy_header == 'proxy_header_value' + assert response.region == 'region_value' + assert response.self_link == 'self_link_value' + assert response.service == 'service_value' + + +def test_get_rest_required_fields(request_type=compute.GetRegionTargetTcpProxyRequest): + transport_class = transports.RegionTargetTcpProxiesRestTransport + + request_init = {} + request_init["project"] = "" + request_init["region"] = "" + request_init["target_tcp_proxy"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + jsonified_request["region"] = 'region_value' + jsonified_request["targetTcpProxy"] = 'target_tcp_proxy_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "region" in jsonified_request + assert jsonified_request["region"] == 'region_value' + assert "targetTcpProxy" in jsonified_request + assert jsonified_request["targetTcpProxy"] == 'target_tcp_proxy_value' + + client = RegionTargetTcpProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.TargetTcpProxy() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "get", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.TargetTcpProxy.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.get(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_get_rest_unset_required_fields(): + transport = transports.RegionTargetTcpProxiesRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.get._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("project", "region", "targetTcpProxy", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_rest_interceptors(null_interceptor): + transport = transports.RegionTargetTcpProxiesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.RegionTargetTcpProxiesRestInterceptor(), + ) + client = RegionTargetTcpProxiesClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.RegionTargetTcpProxiesRestInterceptor, "post_get") as post, \ + mock.patch.object(transports.RegionTargetTcpProxiesRestInterceptor, "pre_get") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.GetRegionTargetTcpProxyRequest.pb(compute.GetRegionTargetTcpProxyRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.TargetTcpProxy.to_json(compute.TargetTcpProxy()) + + request = compute.GetRegionTargetTcpProxyRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.TargetTcpProxy() + + client.get(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_get_rest_bad_request(transport: str = 'rest', request_type=compute.GetRegionTargetTcpProxyRequest): + client = RegionTargetTcpProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2', 'target_tcp_proxy': 'sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get(request) + + +def test_get_rest_flattened(): + client = RegionTargetTcpProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.TargetTcpProxy() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'region': 'sample2', 'target_tcp_proxy': 'sample3'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + region='region_value', + target_tcp_proxy='target_tcp_proxy_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.TargetTcpProxy.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.get(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/regions/{region}/targetTcpProxies/{target_tcp_proxy}" % client.transport._host, args[1]) + + +def test_get_rest_flattened_error(transport: str = 'rest'): + client = RegionTargetTcpProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get( + compute.GetRegionTargetTcpProxyRequest(), + project='project_value', + region='region_value', + target_tcp_proxy='target_tcp_proxy_value', + ) + + +def test_get_rest_error(): + client = RegionTargetTcpProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.InsertRegionTargetTcpProxyRequest, + dict, +]) +def test_insert_rest(request_type): + client = RegionTargetTcpProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2'} + request_init["target_tcp_proxy_resource"] = {'creation_timestamp': 'creation_timestamp_value', 'description': 'description_value', 'id': 205, 'kind': 'kind_value', 'name': 'name_value', 'proxy_bind': True, 'proxy_header': 'proxy_header_value', 'region': 'region_value', 'self_link': 'self_link_value', 'service': 'service_value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.insert(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, extended_operation.ExtendedOperation) + assert response.client_operation_id == 'client_operation_id_value' + assert response.creation_timestamp == 'creation_timestamp_value' + assert response.description == 'description_value' + assert response.end_time == 'end_time_value' + assert response.http_error_message == 'http_error_message_value' + assert response.http_error_status_code == 2374 + assert response.id == 205 + assert response.insert_time == 'insert_time_value' + assert response.kind == 'kind_value' + assert response.name == 'name_value' + assert response.operation_group_id == 'operation_group_id_value' + assert response.operation_type == 'operation_type_value' + assert response.progress == 885 + assert response.region == 'region_value' + assert response.self_link == 'self_link_value' + assert response.start_time == 'start_time_value' + assert response.status == compute.Operation.Status.DONE + assert response.status_message == 'status_message_value' + assert response.target_id == 947 + assert response.target_link == 'target_link_value' + assert response.user == 'user_value' + assert response.zone == 'zone_value' + + +def test_insert_rest_required_fields(request_type=compute.InsertRegionTargetTcpProxyRequest): + transport_class = transports.RegionTargetTcpProxiesRestTransport + + request_init = {} + request_init["project"] = "" + request_init["region"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).insert._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + jsonified_request["region"] = 'region_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).insert._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "region" in jsonified_request + assert jsonified_request["region"] == 'region_value' + + client = RegionTargetTcpProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.insert(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_insert_rest_unset_required_fields(): + transport = transports.RegionTargetTcpProxiesRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.insert._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("project", "region", "targetTcpProxyResource", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_insert_rest_interceptors(null_interceptor): + transport = transports.RegionTargetTcpProxiesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.RegionTargetTcpProxiesRestInterceptor(), + ) + client = RegionTargetTcpProxiesClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.RegionTargetTcpProxiesRestInterceptor, "post_insert") as post, \ + mock.patch.object(transports.RegionTargetTcpProxiesRestInterceptor, "pre_insert") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.InsertRegionTargetTcpProxyRequest.pb(compute.InsertRegionTargetTcpProxyRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.InsertRegionTargetTcpProxyRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.insert(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_insert_rest_bad_request(transport: str = 'rest', request_type=compute.InsertRegionTargetTcpProxyRequest): + client = RegionTargetTcpProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2'} + request_init["target_tcp_proxy_resource"] = {'creation_timestamp': 'creation_timestamp_value', 'description': 'description_value', 'id': 205, 'kind': 'kind_value', 'name': 'name_value', 'proxy_bind': True, 'proxy_header': 'proxy_header_value', 'region': 'region_value', 'self_link': 'self_link_value', 'service': 'service_value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.insert(request) + + +def test_insert_rest_flattened(): + client = RegionTargetTcpProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'region': 'sample2'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + region='region_value', + target_tcp_proxy_resource=compute.TargetTcpProxy(creation_timestamp='creation_timestamp_value'), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.insert(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/regions/{region}/targetTcpProxies" % client.transport._host, args[1]) + + +def test_insert_rest_flattened_error(transport: str = 'rest'): + client = RegionTargetTcpProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.insert( + compute.InsertRegionTargetTcpProxyRequest(), + project='project_value', + region='region_value', + target_tcp_proxy_resource=compute.TargetTcpProxy(creation_timestamp='creation_timestamp_value'), + ) + + +def test_insert_rest_error(): + client = RegionTargetTcpProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.InsertRegionTargetTcpProxyRequest, + dict, +]) +def test_insert_unary_rest(request_type): + client = RegionTargetTcpProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2'} + request_init["target_tcp_proxy_resource"] = {'creation_timestamp': 'creation_timestamp_value', 'description': 'description_value', 'id': 205, 'kind': 'kind_value', 'name': 'name_value', 'proxy_bind': True, 'proxy_header': 'proxy_header_value', 'region': 'region_value', 'self_link': 'self_link_value', 'service': 'service_value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.insert_unary(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.Operation) + + +def test_insert_unary_rest_required_fields(request_type=compute.InsertRegionTargetTcpProxyRequest): + transport_class = transports.RegionTargetTcpProxiesRestTransport + + request_init = {} + request_init["project"] = "" + request_init["region"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).insert._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + jsonified_request["region"] = 'region_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).insert._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "region" in jsonified_request + assert jsonified_request["region"] == 'region_value' + + client = RegionTargetTcpProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.insert_unary(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_insert_unary_rest_unset_required_fields(): + transport = transports.RegionTargetTcpProxiesRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.insert._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("project", "region", "targetTcpProxyResource", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_insert_unary_rest_interceptors(null_interceptor): + transport = transports.RegionTargetTcpProxiesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.RegionTargetTcpProxiesRestInterceptor(), + ) + client = RegionTargetTcpProxiesClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.RegionTargetTcpProxiesRestInterceptor, "post_insert") as post, \ + mock.patch.object(transports.RegionTargetTcpProxiesRestInterceptor, "pre_insert") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.InsertRegionTargetTcpProxyRequest.pb(compute.InsertRegionTargetTcpProxyRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.InsertRegionTargetTcpProxyRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.insert_unary(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_insert_unary_rest_bad_request(transport: str = 'rest', request_type=compute.InsertRegionTargetTcpProxyRequest): + client = RegionTargetTcpProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2'} + request_init["target_tcp_proxy_resource"] = {'creation_timestamp': 'creation_timestamp_value', 'description': 'description_value', 'id': 205, 'kind': 'kind_value', 'name': 'name_value', 'proxy_bind': True, 'proxy_header': 'proxy_header_value', 'region': 'region_value', 'self_link': 'self_link_value', 'service': 'service_value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.insert_unary(request) + + +def test_insert_unary_rest_flattened(): + client = RegionTargetTcpProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'region': 'sample2'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + region='region_value', + target_tcp_proxy_resource=compute.TargetTcpProxy(creation_timestamp='creation_timestamp_value'), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.insert_unary(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/regions/{region}/targetTcpProxies" % client.transport._host, args[1]) + + +def test_insert_unary_rest_flattened_error(transport: str = 'rest'): + client = RegionTargetTcpProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.insert_unary( + compute.InsertRegionTargetTcpProxyRequest(), + project='project_value', + region='region_value', + target_tcp_proxy_resource=compute.TargetTcpProxy(creation_timestamp='creation_timestamp_value'), + ) + + +def test_insert_unary_rest_error(): + client = RegionTargetTcpProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.ListRegionTargetTcpProxiesRequest, + dict, +]) +def test_list_rest(request_type): + client = RegionTargetTcpProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.TargetTcpProxyList( + id='id_value', + kind='kind_value', + next_page_token='next_page_token_value', + self_link='self_link_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.TargetTcpProxyList.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.list(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListPager) + assert response.id == 'id_value' + assert response.kind == 'kind_value' + assert response.next_page_token == 'next_page_token_value' + assert response.self_link == 'self_link_value' + + +def test_list_rest_required_fields(request_type=compute.ListRegionTargetTcpProxiesRequest): + transport_class = transports.RegionTargetTcpProxiesRestTransport + + request_init = {} + request_init["project"] = "" + request_init["region"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + jsonified_request["region"] = 'region_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("filter", "max_results", "order_by", "page_token", "return_partial_success", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "region" in jsonified_request + assert jsonified_request["region"] == 'region_value' + + client = RegionTargetTcpProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.TargetTcpProxyList() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "get", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.TargetTcpProxyList.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.list(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_list_rest_unset_required_fields(): + transport = transports.RegionTargetTcpProxiesRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.list._get_unset_required_fields({}) + assert set(unset_fields) == (set(("filter", "maxResults", "orderBy", "pageToken", "returnPartialSuccess", )) & set(("project", "region", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_rest_interceptors(null_interceptor): + transport = transports.RegionTargetTcpProxiesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.RegionTargetTcpProxiesRestInterceptor(), + ) + client = RegionTargetTcpProxiesClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.RegionTargetTcpProxiesRestInterceptor, "post_list") as post, \ + mock.patch.object(transports.RegionTargetTcpProxiesRestInterceptor, "pre_list") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.ListRegionTargetTcpProxiesRequest.pb(compute.ListRegionTargetTcpProxiesRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.TargetTcpProxyList.to_json(compute.TargetTcpProxyList()) + + request = compute.ListRegionTargetTcpProxiesRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.TargetTcpProxyList() + + client.list(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_list_rest_bad_request(transport: str = 'rest', request_type=compute.ListRegionTargetTcpProxiesRequest): + client = RegionTargetTcpProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list(request) + + +def test_list_rest_flattened(): + client = RegionTargetTcpProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.TargetTcpProxyList() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'region': 'sample2'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + region='region_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.TargetTcpProxyList.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.list(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/regions/{region}/targetTcpProxies" % client.transport._host, args[1]) + + +def test_list_rest_flattened_error(transport: str = 'rest'): + client = RegionTargetTcpProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list( + compute.ListRegionTargetTcpProxiesRequest(), + project='project_value', + region='region_value', + ) + + +def test_list_rest_pager(transport: str = 'rest'): + client = RegionTargetTcpProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + #with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + compute.TargetTcpProxyList( + items=[ + compute.TargetTcpProxy(), + compute.TargetTcpProxy(), + compute.TargetTcpProxy(), + ], + next_page_token='abc', + ), + compute.TargetTcpProxyList( + items=[], + next_page_token='def', + ), + compute.TargetTcpProxyList( + items=[ + compute.TargetTcpProxy(), + ], + next_page_token='ghi', + ), + compute.TargetTcpProxyList( + items=[ + compute.TargetTcpProxy(), + compute.TargetTcpProxy(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple(compute.TargetTcpProxyList.to_json(x) for x in response) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode('UTF-8') + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {'project': 'sample1', 'region': 'sample2'} + + pager = client.list(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, compute.TargetTcpProxy) + for i in results) + + pages = list(client.list(request=sample_request).pages) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + + +def test_credentials_transport_error(): + # It is an error to provide credentials and a transport instance. + transport = transports.RegionTargetTcpProxiesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = RegionTargetTcpProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # It is an error to provide a credentials file and a transport instance. + transport = transports.RegionTargetTcpProxiesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = RegionTargetTcpProxiesClient( + client_options={"credentials_file": "credentials.json"}, + transport=transport, + ) + + # It is an error to provide an api_key and a transport instance. + transport = transports.RegionTargetTcpProxiesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = RegionTargetTcpProxiesClient( + client_options=options, + transport=transport, + ) + + # It is an error to provide an api_key and a credential. + options = mock.Mock() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = RegionTargetTcpProxiesClient( + client_options=options, + credentials=ga_credentials.AnonymousCredentials() + ) + + # It is an error to provide scopes and a transport instance. + transport = transports.RegionTargetTcpProxiesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = RegionTargetTcpProxiesClient( + client_options={"scopes": ["1", "2"]}, + transport=transport, + ) + + +def test_transport_instance(): + # A client may be instantiated with a custom transport instance. + transport = transports.RegionTargetTcpProxiesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + client = RegionTargetTcpProxiesClient(transport=transport) + assert client.transport is transport + + +@pytest.mark.parametrize("transport_class", [ + transports.RegionTargetTcpProxiesRestTransport, +]) +def test_transport_adc(transport_class): + # Test default credentials are used if not provided. + with mock.patch.object(google.auth, 'default') as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class() + adc.assert_called_once() + +@pytest.mark.parametrize("transport_name", [ + "rest", +]) +def test_transport_kind(transport_name): + transport = RegionTargetTcpProxiesClient.get_transport_class(transport_name)( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert transport.kind == transport_name + + +def test_region_target_tcp_proxies_base_transport_error(): + # Passing both a credentials object and credentials_file should raise an error + with pytest.raises(core_exceptions.DuplicateCredentialArgs): + transport = transports.RegionTargetTcpProxiesTransport( + credentials=ga_credentials.AnonymousCredentials(), + credentials_file="credentials.json" + ) + + +def test_region_target_tcp_proxies_base_transport(): + # Instantiate the base transport. + with mock.patch('google.cloud.compute_v1.services.region_target_tcp_proxies.transports.RegionTargetTcpProxiesTransport.__init__') as Transport: + Transport.return_value = None + transport = transports.RegionTargetTcpProxiesTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Every method on the transport should just blindly + # raise NotImplementedError. + methods = ( + 'delete', + 'get', + 'insert', + 'list', + ) + for method in methods: + with pytest.raises(NotImplementedError): + getattr(transport, method)(request=object()) + + with pytest.raises(NotImplementedError): + transport.close() + + # Catch all for all remaining methods and properties + remainder = [ + 'kind', + ] + for r in remainder: + with pytest.raises(NotImplementedError): + getattr(transport, r)() + + +def test_region_target_tcp_proxies_base_transport_with_credentials_file(): + # Instantiate the base transport with a credentials file + with mock.patch.object(google.auth, 'load_credentials_from_file', autospec=True) as load_creds, mock.patch('google.cloud.compute_v1.services.region_target_tcp_proxies.transports.RegionTargetTcpProxiesTransport._prep_wrapped_messages') as Transport: + Transport.return_value = None + load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.RegionTargetTcpProxiesTransport( + credentials_file="credentials.json", + quota_project_id="octopus", + ) + load_creds.assert_called_once_with("credentials.json", + scopes=None, + default_scopes=( + 'https://www.googleapis.com/auth/compute', + 'https://www.googleapis.com/auth/cloud-platform', +), + quota_project_id="octopus", + ) + + +def test_region_target_tcp_proxies_base_transport_with_adc(): + # Test the default credentials are used if credentials and credentials_file are None. + with mock.patch.object(google.auth, 'default', autospec=True) as adc, mock.patch('google.cloud.compute_v1.services.region_target_tcp_proxies.transports.RegionTargetTcpProxiesTransport._prep_wrapped_messages') as Transport: + Transport.return_value = None + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.RegionTargetTcpProxiesTransport() + adc.assert_called_once() + + +def test_region_target_tcp_proxies_auth_adc(): + # If no credentials are provided, we should use ADC credentials. + with mock.patch.object(google.auth, 'default', autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + RegionTargetTcpProxiesClient() + adc.assert_called_once_with( + scopes=None, + default_scopes=( + 'https://www.googleapis.com/auth/compute', + 'https://www.googleapis.com/auth/cloud-platform', +), + quota_project_id=None, + ) + + +def test_region_target_tcp_proxies_http_transport_client_cert_source_for_mtls(): + cred = ga_credentials.AnonymousCredentials() + with mock.patch("google.auth.transport.requests.AuthorizedSession.configure_mtls_channel") as mock_configure_mtls_channel: + transports.RegionTargetTcpProxiesRestTransport ( + credentials=cred, + client_cert_source_for_mtls=client_cert_source_callback + ) + mock_configure_mtls_channel.assert_called_once_with(client_cert_source_callback) + + +@pytest.mark.parametrize("transport_name", [ + "rest", +]) +def test_region_target_tcp_proxies_host_no_port(transport_name): + client = RegionTargetTcpProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions(api_endpoint='compute.googleapis.com'), + transport=transport_name, + ) + assert client.transport._host == ( + 'compute.googleapis.com:443' + if transport_name in ['grpc', 'grpc_asyncio'] + else 'https://compute.googleapis.com' + ) + +@pytest.mark.parametrize("transport_name", [ + "rest", +]) +def test_region_target_tcp_proxies_host_with_port(transport_name): + client = RegionTargetTcpProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions(api_endpoint='compute.googleapis.com:8000'), + transport=transport_name, + ) + assert client.transport._host == ( + 'compute.googleapis.com:8000' + if transport_name in ['grpc', 'grpc_asyncio'] + else 'https://compute.googleapis.com:8000' + ) + +@pytest.mark.parametrize("transport_name", [ + "rest", +]) +def test_region_target_tcp_proxies_client_transport_session_collision(transport_name): + creds1 = ga_credentials.AnonymousCredentials() + creds2 = ga_credentials.AnonymousCredentials() + client1 = RegionTargetTcpProxiesClient( + credentials=creds1, + transport=transport_name, + ) + client2 = RegionTargetTcpProxiesClient( + credentials=creds2, + transport=transport_name, + ) + session1 = client1.transport.delete._session + session2 = client2.transport.delete._session + assert session1 != session2 + session1 = client1.transport.get._session + session2 = client2.transport.get._session + assert session1 != session2 + session1 = client1.transport.insert._session + session2 = client2.transport.insert._session + assert session1 != session2 + session1 = client1.transport.list._session + session2 = client2.transport.list._session + assert session1 != session2 + +def test_common_billing_account_path(): + billing_account = "squid" + expected = "billingAccounts/{billing_account}".format(billing_account=billing_account, ) + actual = RegionTargetTcpProxiesClient.common_billing_account_path(billing_account) + assert expected == actual + + +def test_parse_common_billing_account_path(): + expected = { + "billing_account": "clam", + } + path = RegionTargetTcpProxiesClient.common_billing_account_path(**expected) + + # Check that the path construction is reversible. + actual = RegionTargetTcpProxiesClient.parse_common_billing_account_path(path) + assert expected == actual + +def test_common_folder_path(): + folder = "whelk" + expected = "folders/{folder}".format(folder=folder, ) + actual = RegionTargetTcpProxiesClient.common_folder_path(folder) + assert expected == actual + + +def test_parse_common_folder_path(): + expected = { + "folder": "octopus", + } + path = RegionTargetTcpProxiesClient.common_folder_path(**expected) + + # Check that the path construction is reversible. + actual = RegionTargetTcpProxiesClient.parse_common_folder_path(path) + assert expected == actual + +def test_common_organization_path(): + organization = "oyster" + expected = "organizations/{organization}".format(organization=organization, ) + actual = RegionTargetTcpProxiesClient.common_organization_path(organization) + assert expected == actual + + +def test_parse_common_organization_path(): + expected = { + "organization": "nudibranch", + } + path = RegionTargetTcpProxiesClient.common_organization_path(**expected) + + # Check that the path construction is reversible. + actual = RegionTargetTcpProxiesClient.parse_common_organization_path(path) + assert expected == actual + +def test_common_project_path(): + project = "cuttlefish" + expected = "projects/{project}".format(project=project, ) + actual = RegionTargetTcpProxiesClient.common_project_path(project) + assert expected == actual + + +def test_parse_common_project_path(): + expected = { + "project": "mussel", + } + path = RegionTargetTcpProxiesClient.common_project_path(**expected) + + # Check that the path construction is reversible. + actual = RegionTargetTcpProxiesClient.parse_common_project_path(path) + assert expected == actual + +def test_common_location_path(): + project = "winkle" + location = "nautilus" + expected = "projects/{project}/locations/{location}".format(project=project, location=location, ) + actual = RegionTargetTcpProxiesClient.common_location_path(project, location) + assert expected == actual + + +def test_parse_common_location_path(): + expected = { + "project": "scallop", + "location": "abalone", + } + path = RegionTargetTcpProxiesClient.common_location_path(**expected) + + # Check that the path construction is reversible. + actual = RegionTargetTcpProxiesClient.parse_common_location_path(path) + assert expected == actual + + +def test_client_with_default_client_info(): + client_info = gapic_v1.client_info.ClientInfo() + + with mock.patch.object(transports.RegionTargetTcpProxiesTransport, '_prep_wrapped_messages') as prep: + client = RegionTargetTcpProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + with mock.patch.object(transports.RegionTargetTcpProxiesTransport, '_prep_wrapped_messages') as prep: + transport_class = RegionTargetTcpProxiesClient.get_transport_class() + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + +def test_transport_close(): + transports = { + "rest": "_session", + } + + for transport, close_name in transports.items(): + client = RegionTargetTcpProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport + ) + with mock.patch.object(type(getattr(client.transport, close_name)), "close") as close: + with client: + close.assert_not_called() + close.assert_called_once() + +def test_client_ctx(): + transports = [ + 'rest', + ] + for transport in transports: + client = RegionTargetTcpProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport + ) + # Test client calls underlying transport. + with mock.patch.object(type(client.transport), "close") as close: + close.assert_not_called() + with client: + pass + close.assert_called() + +@pytest.mark.parametrize("client_class,transport_class", [ + (RegionTargetTcpProxiesClient, transports.RegionTargetTcpProxiesRestTransport), +]) +def test_api_key_credentials(client_class, transport_class): + with mock.patch.object( + google.auth._default, "get_api_key_credentials", create=True + ) as get_api_key_credentials: + mock_cred = mock.Mock() + get_api_key_credentials.return_value = mock_cred + options = client_options.ClientOptions() + options.api_key = "api_key" + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=mock_cred, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) diff --git a/owl-bot-staging/v1/tests/unit/gapic/compute_v1/test_region_url_maps.py b/owl-bot-staging/v1/tests/unit/gapic/compute_v1/test_region_url_maps.py new file mode 100644 index 000000000..eb7e81699 --- /dev/null +++ b/owl-bot-staging/v1/tests/unit/gapic/compute_v1/test_region_url_maps.py @@ -0,0 +1,3898 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import os +# try/except added for compatibility with python < 3.8 +try: + from unittest import mock + from unittest.mock import AsyncMock # pragma: NO COVER +except ImportError: # pragma: NO COVER + import mock + +import grpc +from grpc.experimental import aio +from collections.abc import Iterable +from google.protobuf import json_format +import json +import math +import pytest +from proto.marshal.rules.dates import DurationRule, TimestampRule +from proto.marshal.rules import wrappers +from requests import Response +from requests import Request, PreparedRequest +from requests.sessions import Session +from google.protobuf import json_format + +from google.api_core import client_options +from google.api_core import exceptions as core_exceptions +from google.api_core import extended_operation # type: ignore +from google.api_core import future +from google.api_core import gapic_v1 +from google.api_core import grpc_helpers +from google.api_core import grpc_helpers_async +from google.api_core import path_template +from google.auth import credentials as ga_credentials +from google.auth.exceptions import MutualTLSChannelError +from google.cloud.compute_v1.services.region_url_maps import RegionUrlMapsClient +from google.cloud.compute_v1.services.region_url_maps import pagers +from google.cloud.compute_v1.services.region_url_maps import transports +from google.cloud.compute_v1.types import compute +from google.oauth2 import service_account +import google.auth + + +def client_cert_source_callback(): + return b"cert bytes", b"key bytes" + + +# If default endpoint is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint(client): + return "foo.googleapis.com" if ("localhost" in client.DEFAULT_ENDPOINT) else client.DEFAULT_ENDPOINT + + +def test__get_default_mtls_endpoint(): + api_endpoint = "example.googleapis.com" + api_mtls_endpoint = "example.mtls.googleapis.com" + sandbox_endpoint = "example.sandbox.googleapis.com" + sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com" + non_googleapi = "api.example.com" + + assert RegionUrlMapsClient._get_default_mtls_endpoint(None) is None + assert RegionUrlMapsClient._get_default_mtls_endpoint(api_endpoint) == api_mtls_endpoint + assert RegionUrlMapsClient._get_default_mtls_endpoint(api_mtls_endpoint) == api_mtls_endpoint + assert RegionUrlMapsClient._get_default_mtls_endpoint(sandbox_endpoint) == sandbox_mtls_endpoint + assert RegionUrlMapsClient._get_default_mtls_endpoint(sandbox_mtls_endpoint) == sandbox_mtls_endpoint + assert RegionUrlMapsClient._get_default_mtls_endpoint(non_googleapi) == non_googleapi + + +@pytest.mark.parametrize("client_class,transport_name", [ + (RegionUrlMapsClient, "rest"), +]) +def test_region_url_maps_client_from_service_account_info(client_class, transport_name): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object(service_account.Credentials, 'from_service_account_info') as factory: + factory.return_value = creds + info = {"valid": True} + client = client_class.from_service_account_info(info, transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + 'compute.googleapis.com:443' + if transport_name in ['grpc', 'grpc_asyncio'] + else + 'https://compute.googleapis.com' + ) + + +@pytest.mark.parametrize("transport_class,transport_name", [ + (transports.RegionUrlMapsRestTransport, "rest"), +]) +def test_region_url_maps_client_service_account_always_use_jwt(transport_class, transport_name): + with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=True) + use_jwt.assert_called_once_with(True) + + with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=False) + use_jwt.assert_not_called() + + +@pytest.mark.parametrize("client_class,transport_name", [ + (RegionUrlMapsClient, "rest"), +]) +def test_region_url_maps_client_from_service_account_file(client_class, transport_name): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object(service_account.Credentials, 'from_service_account_file') as factory: + factory.return_value = creds + client = client_class.from_service_account_file("dummy/file/path.json", transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + client = client_class.from_service_account_json("dummy/file/path.json", transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + 'compute.googleapis.com:443' + if transport_name in ['grpc', 'grpc_asyncio'] + else + 'https://compute.googleapis.com' + ) + + +def test_region_url_maps_client_get_transport_class(): + transport = RegionUrlMapsClient.get_transport_class() + available_transports = [ + transports.RegionUrlMapsRestTransport, + ] + assert transport in available_transports + + transport = RegionUrlMapsClient.get_transport_class("rest") + assert transport == transports.RegionUrlMapsRestTransport + + +@pytest.mark.parametrize("client_class,transport_class,transport_name", [ + (RegionUrlMapsClient, transports.RegionUrlMapsRestTransport, "rest"), +]) +@mock.patch.object(RegionUrlMapsClient, "DEFAULT_ENDPOINT", modify_default_endpoint(RegionUrlMapsClient)) +def test_region_url_maps_client_client_options(client_class, transport_class, transport_name): + # Check that if channel is provided we won't create a new one. + with mock.patch.object(RegionUrlMapsClient, 'get_transport_class') as gtc: + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ) + client = client_class(transport=transport) + gtc.assert_not_called() + + # Check that if channel is provided via str we will create a new one. + with mock.patch.object(RegionUrlMapsClient, 'get_transport_class') as gtc: + client = client_class(transport=transport_name) + gtc.assert_called() + + # Check the case api_endpoint is provided. + options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(transport=transport_name, client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_MTLS_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError): + client = client_class(transport=transport_name) + + # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): + with pytest.raises(ValueError): + client = client_class(transport=transport_name) + + # Check the case quota_project_id is provided + options = client_options.ClientOptions(quota_project_id="octopus") + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id="octopus", + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + # Check the case api_endpoint is provided + options = client_options.ClientOptions(api_audience="https://language.googleapis.com") + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience="https://language.googleapis.com" + ) + +@pytest.mark.parametrize("client_class,transport_class,transport_name,use_client_cert_env", [ + (RegionUrlMapsClient, transports.RegionUrlMapsRestTransport, "rest", "true"), + (RegionUrlMapsClient, transports.RegionUrlMapsRestTransport, "rest", "false"), +]) +@mock.patch.object(RegionUrlMapsClient, "DEFAULT_ENDPOINT", modify_default_endpoint(RegionUrlMapsClient)) +@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) +def test_region_url_maps_client_mtls_env_auto(client_class, transport_class, transport_name, use_client_cert_env): + # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default + # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists. + + # Check the case client_cert_source is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): + options = client_options.ClientOptions(client_cert_source=client_cert_source_callback) + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + + if use_client_cert_env == "false": + expected_client_cert_source = None + expected_host = client.DEFAULT_ENDPOINT + else: + expected_client_cert_source = client_cert_source_callback + expected_host = client.DEFAULT_MTLS_ENDPOINT + + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case ADC client cert is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): + with mock.patch.object(transport_class, '__init__') as patched: + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): + with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=client_cert_source_callback): + if use_client_cert_env == "false": + expected_host = client.DEFAULT_ENDPOINT + expected_client_cert_source = None + else: + expected_host = client.DEFAULT_MTLS_ENDPOINT + expected_client_cert_source = client_cert_source_callback + + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case client_cert_source and ADC client cert are not provided. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): + with mock.patch.object(transport_class, '__init__') as patched: + with mock.patch("google.auth.transport.mtls.has_default_client_cert_source", return_value=False): + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize("client_class", [ + RegionUrlMapsClient +]) +@mock.patch.object(RegionUrlMapsClient, "DEFAULT_ENDPOINT", modify_default_endpoint(RegionUrlMapsClient)) +def test_region_url_maps_client_get_mtls_endpoint_and_cert_source(client_class): + mock_client_cert_source = mock.Mock() + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "true". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + mock_api_endpoint = "foo" + options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(options) + assert api_endpoint == mock_api_endpoint + assert cert_source == mock_client_cert_source + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "false". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + mock_client_cert_source = mock.Mock() + mock_api_endpoint = "foo" + options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(options) + assert api_endpoint == mock_api_endpoint + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert doesn't exist. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=False): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert exists. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): + with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=mock_client_cert_source): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source == mock_client_cert_source + + +@pytest.mark.parametrize("client_class,transport_class,transport_name", [ + (RegionUrlMapsClient, transports.RegionUrlMapsRestTransport, "rest"), +]) +def test_region_url_maps_client_client_options_scopes(client_class, transport_class, transport_name): + # Check the case scopes are provided. + options = client_options.ClientOptions( + scopes=["1", "2"], + ) + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=["1", "2"], + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + +@pytest.mark.parametrize("client_class,transport_class,transport_name,grpc_helpers", [ + (RegionUrlMapsClient, transports.RegionUrlMapsRestTransport, "rest", None), +]) +def test_region_url_maps_client_client_options_credentials_file(client_class, transport_class, transport_name, grpc_helpers): + # Check the case credentials file is provided. + options = client_options.ClientOptions( + credentials_file="credentials.json" + ) + + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize("request_type", [ + compute.DeleteRegionUrlMapRequest, + dict, +]) +def test_delete_rest(request_type): + client = RegionUrlMapsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2', 'url_map': 'sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.delete(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, extended_operation.ExtendedOperation) + assert response.client_operation_id == 'client_operation_id_value' + assert response.creation_timestamp == 'creation_timestamp_value' + assert response.description == 'description_value' + assert response.end_time == 'end_time_value' + assert response.http_error_message == 'http_error_message_value' + assert response.http_error_status_code == 2374 + assert response.id == 205 + assert response.insert_time == 'insert_time_value' + assert response.kind == 'kind_value' + assert response.name == 'name_value' + assert response.operation_group_id == 'operation_group_id_value' + assert response.operation_type == 'operation_type_value' + assert response.progress == 885 + assert response.region == 'region_value' + assert response.self_link == 'self_link_value' + assert response.start_time == 'start_time_value' + assert response.status == compute.Operation.Status.DONE + assert response.status_message == 'status_message_value' + assert response.target_id == 947 + assert response.target_link == 'target_link_value' + assert response.user == 'user_value' + assert response.zone == 'zone_value' + + +def test_delete_rest_required_fields(request_type=compute.DeleteRegionUrlMapRequest): + transport_class = transports.RegionUrlMapsRestTransport + + request_init = {} + request_init["project"] = "" + request_init["region"] = "" + request_init["url_map"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + jsonified_request["region"] = 'region_value' + jsonified_request["urlMap"] = 'url_map_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "region" in jsonified_request + assert jsonified_request["region"] == 'region_value' + assert "urlMap" in jsonified_request + assert jsonified_request["urlMap"] == 'url_map_value' + + client = RegionUrlMapsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "delete", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.delete(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_delete_rest_unset_required_fields(): + transport = transports.RegionUrlMapsRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.delete._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("project", "region", "urlMap", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_rest_interceptors(null_interceptor): + transport = transports.RegionUrlMapsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.RegionUrlMapsRestInterceptor(), + ) + client = RegionUrlMapsClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.RegionUrlMapsRestInterceptor, "post_delete") as post, \ + mock.patch.object(transports.RegionUrlMapsRestInterceptor, "pre_delete") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.DeleteRegionUrlMapRequest.pb(compute.DeleteRegionUrlMapRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.DeleteRegionUrlMapRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.delete(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_delete_rest_bad_request(transport: str = 'rest', request_type=compute.DeleteRegionUrlMapRequest): + client = RegionUrlMapsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2', 'url_map': 'sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.delete(request) + + +def test_delete_rest_flattened(): + client = RegionUrlMapsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'region': 'sample2', 'url_map': 'sample3'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + region='region_value', + url_map='url_map_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.delete(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/regions/{region}/urlMaps/{url_map}" % client.transport._host, args[1]) + + +def test_delete_rest_flattened_error(transport: str = 'rest'): + client = RegionUrlMapsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete( + compute.DeleteRegionUrlMapRequest(), + project='project_value', + region='region_value', + url_map='url_map_value', + ) + + +def test_delete_rest_error(): + client = RegionUrlMapsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.DeleteRegionUrlMapRequest, + dict, +]) +def test_delete_unary_rest(request_type): + client = RegionUrlMapsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2', 'url_map': 'sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.delete_unary(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.Operation) + + +def test_delete_unary_rest_required_fields(request_type=compute.DeleteRegionUrlMapRequest): + transport_class = transports.RegionUrlMapsRestTransport + + request_init = {} + request_init["project"] = "" + request_init["region"] = "" + request_init["url_map"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + jsonified_request["region"] = 'region_value' + jsonified_request["urlMap"] = 'url_map_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "region" in jsonified_request + assert jsonified_request["region"] == 'region_value' + assert "urlMap" in jsonified_request + assert jsonified_request["urlMap"] == 'url_map_value' + + client = RegionUrlMapsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "delete", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.delete_unary(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_delete_unary_rest_unset_required_fields(): + transport = transports.RegionUrlMapsRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.delete._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("project", "region", "urlMap", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_unary_rest_interceptors(null_interceptor): + transport = transports.RegionUrlMapsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.RegionUrlMapsRestInterceptor(), + ) + client = RegionUrlMapsClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.RegionUrlMapsRestInterceptor, "post_delete") as post, \ + mock.patch.object(transports.RegionUrlMapsRestInterceptor, "pre_delete") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.DeleteRegionUrlMapRequest.pb(compute.DeleteRegionUrlMapRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.DeleteRegionUrlMapRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.delete_unary(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_delete_unary_rest_bad_request(transport: str = 'rest', request_type=compute.DeleteRegionUrlMapRequest): + client = RegionUrlMapsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2', 'url_map': 'sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.delete_unary(request) + + +def test_delete_unary_rest_flattened(): + client = RegionUrlMapsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'region': 'sample2', 'url_map': 'sample3'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + region='region_value', + url_map='url_map_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.delete_unary(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/regions/{region}/urlMaps/{url_map}" % client.transport._host, args[1]) + + +def test_delete_unary_rest_flattened_error(transport: str = 'rest'): + client = RegionUrlMapsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_unary( + compute.DeleteRegionUrlMapRequest(), + project='project_value', + region='region_value', + url_map='url_map_value', + ) + + +def test_delete_unary_rest_error(): + client = RegionUrlMapsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.GetRegionUrlMapRequest, + dict, +]) +def test_get_rest(request_type): + client = RegionUrlMapsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2', 'url_map': 'sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.UrlMap( + creation_timestamp='creation_timestamp_value', + default_service='default_service_value', + description='description_value', + fingerprint='fingerprint_value', + id=205, + kind='kind_value', + name='name_value', + region='region_value', + self_link='self_link_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.UrlMap.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.get(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.UrlMap) + assert response.creation_timestamp == 'creation_timestamp_value' + assert response.default_service == 'default_service_value' + assert response.description == 'description_value' + assert response.fingerprint == 'fingerprint_value' + assert response.id == 205 + assert response.kind == 'kind_value' + assert response.name == 'name_value' + assert response.region == 'region_value' + assert response.self_link == 'self_link_value' + + +def test_get_rest_required_fields(request_type=compute.GetRegionUrlMapRequest): + transport_class = transports.RegionUrlMapsRestTransport + + request_init = {} + request_init["project"] = "" + request_init["region"] = "" + request_init["url_map"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + jsonified_request["region"] = 'region_value' + jsonified_request["urlMap"] = 'url_map_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "region" in jsonified_request + assert jsonified_request["region"] == 'region_value' + assert "urlMap" in jsonified_request + assert jsonified_request["urlMap"] == 'url_map_value' + + client = RegionUrlMapsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.UrlMap() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "get", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.UrlMap.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.get(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_get_rest_unset_required_fields(): + transport = transports.RegionUrlMapsRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.get._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("project", "region", "urlMap", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_rest_interceptors(null_interceptor): + transport = transports.RegionUrlMapsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.RegionUrlMapsRestInterceptor(), + ) + client = RegionUrlMapsClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.RegionUrlMapsRestInterceptor, "post_get") as post, \ + mock.patch.object(transports.RegionUrlMapsRestInterceptor, "pre_get") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.GetRegionUrlMapRequest.pb(compute.GetRegionUrlMapRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.UrlMap.to_json(compute.UrlMap()) + + request = compute.GetRegionUrlMapRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.UrlMap() + + client.get(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_get_rest_bad_request(transport: str = 'rest', request_type=compute.GetRegionUrlMapRequest): + client = RegionUrlMapsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2', 'url_map': 'sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get(request) + + +def test_get_rest_flattened(): + client = RegionUrlMapsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.UrlMap() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'region': 'sample2', 'url_map': 'sample3'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + region='region_value', + url_map='url_map_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.UrlMap.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.get(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/regions/{region}/urlMaps/{url_map}" % client.transport._host, args[1]) + + +def test_get_rest_flattened_error(transport: str = 'rest'): + client = RegionUrlMapsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get( + compute.GetRegionUrlMapRequest(), + project='project_value', + region='region_value', + url_map='url_map_value', + ) + + +def test_get_rest_error(): + client = RegionUrlMapsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.InsertRegionUrlMapRequest, + dict, +]) +def test_insert_rest(request_type): + client = RegionUrlMapsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2'} + request_init["url_map_resource"] = {'creation_timestamp': 'creation_timestamp_value', 'default_route_action': {'cors_policy': {'allow_credentials': True, 'allow_headers': ['allow_headers_value1', 'allow_headers_value2'], 'allow_methods': ['allow_methods_value1', 'allow_methods_value2'], 'allow_origin_regexes': ['allow_origin_regexes_value1', 'allow_origin_regexes_value2'], 'allow_origins': ['allow_origins_value1', 'allow_origins_value2'], 'disabled': True, 'expose_headers': ['expose_headers_value1', 'expose_headers_value2'], 'max_age': 722}, 'fault_injection_policy': {'abort': {'http_status': 1219, 'percentage': 0.10540000000000001}, 'delay': {'fixed_delay': {'nanos': 543, 'seconds': 751}, 'percentage': 0.10540000000000001}}, 'max_stream_duration': {}, 'request_mirror_policy': {'backend_service': 'backend_service_value'}, 'retry_policy': {'num_retries': 1197, 'per_try_timeout': {}, 'retry_conditions': ['retry_conditions_value1', 'retry_conditions_value2']}, 'timeout': {}, 'url_rewrite': {'host_rewrite': 'host_rewrite_value', 'path_prefix_rewrite': 'path_prefix_rewrite_value', 'path_template_rewrite': 'path_template_rewrite_value'}, 'weighted_backend_services': [{'backend_service': 'backend_service_value', 'header_action': {'request_headers_to_add': [{'header_name': 'header_name_value', 'header_value': 'header_value_value', 'replace': True}], 'request_headers_to_remove': ['request_headers_to_remove_value1', 'request_headers_to_remove_value2'], 'response_headers_to_add': {}, 'response_headers_to_remove': ['response_headers_to_remove_value1', 'response_headers_to_remove_value2']}, 'weight': 648}]}, 'default_service': 'default_service_value', 'default_url_redirect': {'host_redirect': 'host_redirect_value', 'https_redirect': True, 'path_redirect': 'path_redirect_value', 'prefix_redirect': 'prefix_redirect_value', 'redirect_response_code': 'redirect_response_code_value', 'strip_query': True}, 'description': 'description_value', 'fingerprint': 'fingerprint_value', 'header_action': {}, 'host_rules': [{'description': 'description_value', 'hosts': ['hosts_value1', 'hosts_value2'], 'path_matcher': 'path_matcher_value'}], 'id': 205, 'kind': 'kind_value', 'name': 'name_value', 'path_matchers': [{'default_route_action': {}, 'default_service': 'default_service_value', 'default_url_redirect': {}, 'description': 'description_value', 'header_action': {}, 'name': 'name_value', 'path_rules': [{'paths': ['paths_value1', 'paths_value2'], 'route_action': {}, 'service': 'service_value', 'url_redirect': {}}], 'route_rules': [{'description': 'description_value', 'header_action': {}, 'match_rules': [{'full_path_match': 'full_path_match_value', 'header_matches': [{'exact_match': 'exact_match_value', 'header_name': 'header_name_value', 'invert_match': True, 'prefix_match': 'prefix_match_value', 'present_match': True, 'range_match': {'range_end': 931, 'range_start': 1178}, 'regex_match': 'regex_match_value', 'suffix_match': 'suffix_match_value'}], 'ignore_case': True, 'metadata_filters': [{'filter_labels': [{'name': 'name_value', 'value': 'value_value'}], 'filter_match_criteria': 'filter_match_criteria_value'}], 'path_template_match': 'path_template_match_value', 'prefix_match': 'prefix_match_value', 'query_parameter_matches': [{'exact_match': 'exact_match_value', 'name': 'name_value', 'present_match': True, 'regex_match': 'regex_match_value'}], 'regex_match': 'regex_match_value'}], 'priority': 898, 'route_action': {}, 'service': 'service_value', 'url_redirect': {}}]}], 'region': 'region_value', 'self_link': 'self_link_value', 'tests': [{'description': 'description_value', 'expected_output_url': 'expected_output_url_value', 'expected_redirect_response_code': 3275, 'headers': [{'name': 'name_value', 'value': 'value_value'}], 'host': 'host_value', 'path': 'path_value', 'service': 'service_value'}]} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.insert(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, extended_operation.ExtendedOperation) + assert response.client_operation_id == 'client_operation_id_value' + assert response.creation_timestamp == 'creation_timestamp_value' + assert response.description == 'description_value' + assert response.end_time == 'end_time_value' + assert response.http_error_message == 'http_error_message_value' + assert response.http_error_status_code == 2374 + assert response.id == 205 + assert response.insert_time == 'insert_time_value' + assert response.kind == 'kind_value' + assert response.name == 'name_value' + assert response.operation_group_id == 'operation_group_id_value' + assert response.operation_type == 'operation_type_value' + assert response.progress == 885 + assert response.region == 'region_value' + assert response.self_link == 'self_link_value' + assert response.start_time == 'start_time_value' + assert response.status == compute.Operation.Status.DONE + assert response.status_message == 'status_message_value' + assert response.target_id == 947 + assert response.target_link == 'target_link_value' + assert response.user == 'user_value' + assert response.zone == 'zone_value' + + +def test_insert_rest_required_fields(request_type=compute.InsertRegionUrlMapRequest): + transport_class = transports.RegionUrlMapsRestTransport + + request_init = {} + request_init["project"] = "" + request_init["region"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).insert._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + jsonified_request["region"] = 'region_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).insert._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "region" in jsonified_request + assert jsonified_request["region"] == 'region_value' + + client = RegionUrlMapsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.insert(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_insert_rest_unset_required_fields(): + transport = transports.RegionUrlMapsRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.insert._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("project", "region", "urlMapResource", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_insert_rest_interceptors(null_interceptor): + transport = transports.RegionUrlMapsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.RegionUrlMapsRestInterceptor(), + ) + client = RegionUrlMapsClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.RegionUrlMapsRestInterceptor, "post_insert") as post, \ + mock.patch.object(transports.RegionUrlMapsRestInterceptor, "pre_insert") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.InsertRegionUrlMapRequest.pb(compute.InsertRegionUrlMapRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.InsertRegionUrlMapRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.insert(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_insert_rest_bad_request(transport: str = 'rest', request_type=compute.InsertRegionUrlMapRequest): + client = RegionUrlMapsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2'} + request_init["url_map_resource"] = {'creation_timestamp': 'creation_timestamp_value', 'default_route_action': {'cors_policy': {'allow_credentials': True, 'allow_headers': ['allow_headers_value1', 'allow_headers_value2'], 'allow_methods': ['allow_methods_value1', 'allow_methods_value2'], 'allow_origin_regexes': ['allow_origin_regexes_value1', 'allow_origin_regexes_value2'], 'allow_origins': ['allow_origins_value1', 'allow_origins_value2'], 'disabled': True, 'expose_headers': ['expose_headers_value1', 'expose_headers_value2'], 'max_age': 722}, 'fault_injection_policy': {'abort': {'http_status': 1219, 'percentage': 0.10540000000000001}, 'delay': {'fixed_delay': {'nanos': 543, 'seconds': 751}, 'percentage': 0.10540000000000001}}, 'max_stream_duration': {}, 'request_mirror_policy': {'backend_service': 'backend_service_value'}, 'retry_policy': {'num_retries': 1197, 'per_try_timeout': {}, 'retry_conditions': ['retry_conditions_value1', 'retry_conditions_value2']}, 'timeout': {}, 'url_rewrite': {'host_rewrite': 'host_rewrite_value', 'path_prefix_rewrite': 'path_prefix_rewrite_value', 'path_template_rewrite': 'path_template_rewrite_value'}, 'weighted_backend_services': [{'backend_service': 'backend_service_value', 'header_action': {'request_headers_to_add': [{'header_name': 'header_name_value', 'header_value': 'header_value_value', 'replace': True}], 'request_headers_to_remove': ['request_headers_to_remove_value1', 'request_headers_to_remove_value2'], 'response_headers_to_add': {}, 'response_headers_to_remove': ['response_headers_to_remove_value1', 'response_headers_to_remove_value2']}, 'weight': 648}]}, 'default_service': 'default_service_value', 'default_url_redirect': {'host_redirect': 'host_redirect_value', 'https_redirect': True, 'path_redirect': 'path_redirect_value', 'prefix_redirect': 'prefix_redirect_value', 'redirect_response_code': 'redirect_response_code_value', 'strip_query': True}, 'description': 'description_value', 'fingerprint': 'fingerprint_value', 'header_action': {}, 'host_rules': [{'description': 'description_value', 'hosts': ['hosts_value1', 'hosts_value2'], 'path_matcher': 'path_matcher_value'}], 'id': 205, 'kind': 'kind_value', 'name': 'name_value', 'path_matchers': [{'default_route_action': {}, 'default_service': 'default_service_value', 'default_url_redirect': {}, 'description': 'description_value', 'header_action': {}, 'name': 'name_value', 'path_rules': [{'paths': ['paths_value1', 'paths_value2'], 'route_action': {}, 'service': 'service_value', 'url_redirect': {}}], 'route_rules': [{'description': 'description_value', 'header_action': {}, 'match_rules': [{'full_path_match': 'full_path_match_value', 'header_matches': [{'exact_match': 'exact_match_value', 'header_name': 'header_name_value', 'invert_match': True, 'prefix_match': 'prefix_match_value', 'present_match': True, 'range_match': {'range_end': 931, 'range_start': 1178}, 'regex_match': 'regex_match_value', 'suffix_match': 'suffix_match_value'}], 'ignore_case': True, 'metadata_filters': [{'filter_labels': [{'name': 'name_value', 'value': 'value_value'}], 'filter_match_criteria': 'filter_match_criteria_value'}], 'path_template_match': 'path_template_match_value', 'prefix_match': 'prefix_match_value', 'query_parameter_matches': [{'exact_match': 'exact_match_value', 'name': 'name_value', 'present_match': True, 'regex_match': 'regex_match_value'}], 'regex_match': 'regex_match_value'}], 'priority': 898, 'route_action': {}, 'service': 'service_value', 'url_redirect': {}}]}], 'region': 'region_value', 'self_link': 'self_link_value', 'tests': [{'description': 'description_value', 'expected_output_url': 'expected_output_url_value', 'expected_redirect_response_code': 3275, 'headers': [{'name': 'name_value', 'value': 'value_value'}], 'host': 'host_value', 'path': 'path_value', 'service': 'service_value'}]} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.insert(request) + + +def test_insert_rest_flattened(): + client = RegionUrlMapsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'region': 'sample2'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + region='region_value', + url_map_resource=compute.UrlMap(creation_timestamp='creation_timestamp_value'), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.insert(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/regions/{region}/urlMaps" % client.transport._host, args[1]) + + +def test_insert_rest_flattened_error(transport: str = 'rest'): + client = RegionUrlMapsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.insert( + compute.InsertRegionUrlMapRequest(), + project='project_value', + region='region_value', + url_map_resource=compute.UrlMap(creation_timestamp='creation_timestamp_value'), + ) + + +def test_insert_rest_error(): + client = RegionUrlMapsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.InsertRegionUrlMapRequest, + dict, +]) +def test_insert_unary_rest(request_type): + client = RegionUrlMapsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2'} + request_init["url_map_resource"] = {'creation_timestamp': 'creation_timestamp_value', 'default_route_action': {'cors_policy': {'allow_credentials': True, 'allow_headers': ['allow_headers_value1', 'allow_headers_value2'], 'allow_methods': ['allow_methods_value1', 'allow_methods_value2'], 'allow_origin_regexes': ['allow_origin_regexes_value1', 'allow_origin_regexes_value2'], 'allow_origins': ['allow_origins_value1', 'allow_origins_value2'], 'disabled': True, 'expose_headers': ['expose_headers_value1', 'expose_headers_value2'], 'max_age': 722}, 'fault_injection_policy': {'abort': {'http_status': 1219, 'percentage': 0.10540000000000001}, 'delay': {'fixed_delay': {'nanos': 543, 'seconds': 751}, 'percentage': 0.10540000000000001}}, 'max_stream_duration': {}, 'request_mirror_policy': {'backend_service': 'backend_service_value'}, 'retry_policy': {'num_retries': 1197, 'per_try_timeout': {}, 'retry_conditions': ['retry_conditions_value1', 'retry_conditions_value2']}, 'timeout': {}, 'url_rewrite': {'host_rewrite': 'host_rewrite_value', 'path_prefix_rewrite': 'path_prefix_rewrite_value', 'path_template_rewrite': 'path_template_rewrite_value'}, 'weighted_backend_services': [{'backend_service': 'backend_service_value', 'header_action': {'request_headers_to_add': [{'header_name': 'header_name_value', 'header_value': 'header_value_value', 'replace': True}], 'request_headers_to_remove': ['request_headers_to_remove_value1', 'request_headers_to_remove_value2'], 'response_headers_to_add': {}, 'response_headers_to_remove': ['response_headers_to_remove_value1', 'response_headers_to_remove_value2']}, 'weight': 648}]}, 'default_service': 'default_service_value', 'default_url_redirect': {'host_redirect': 'host_redirect_value', 'https_redirect': True, 'path_redirect': 'path_redirect_value', 'prefix_redirect': 'prefix_redirect_value', 'redirect_response_code': 'redirect_response_code_value', 'strip_query': True}, 'description': 'description_value', 'fingerprint': 'fingerprint_value', 'header_action': {}, 'host_rules': [{'description': 'description_value', 'hosts': ['hosts_value1', 'hosts_value2'], 'path_matcher': 'path_matcher_value'}], 'id': 205, 'kind': 'kind_value', 'name': 'name_value', 'path_matchers': [{'default_route_action': {}, 'default_service': 'default_service_value', 'default_url_redirect': {}, 'description': 'description_value', 'header_action': {}, 'name': 'name_value', 'path_rules': [{'paths': ['paths_value1', 'paths_value2'], 'route_action': {}, 'service': 'service_value', 'url_redirect': {}}], 'route_rules': [{'description': 'description_value', 'header_action': {}, 'match_rules': [{'full_path_match': 'full_path_match_value', 'header_matches': [{'exact_match': 'exact_match_value', 'header_name': 'header_name_value', 'invert_match': True, 'prefix_match': 'prefix_match_value', 'present_match': True, 'range_match': {'range_end': 931, 'range_start': 1178}, 'regex_match': 'regex_match_value', 'suffix_match': 'suffix_match_value'}], 'ignore_case': True, 'metadata_filters': [{'filter_labels': [{'name': 'name_value', 'value': 'value_value'}], 'filter_match_criteria': 'filter_match_criteria_value'}], 'path_template_match': 'path_template_match_value', 'prefix_match': 'prefix_match_value', 'query_parameter_matches': [{'exact_match': 'exact_match_value', 'name': 'name_value', 'present_match': True, 'regex_match': 'regex_match_value'}], 'regex_match': 'regex_match_value'}], 'priority': 898, 'route_action': {}, 'service': 'service_value', 'url_redirect': {}}]}], 'region': 'region_value', 'self_link': 'self_link_value', 'tests': [{'description': 'description_value', 'expected_output_url': 'expected_output_url_value', 'expected_redirect_response_code': 3275, 'headers': [{'name': 'name_value', 'value': 'value_value'}], 'host': 'host_value', 'path': 'path_value', 'service': 'service_value'}]} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.insert_unary(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.Operation) + + +def test_insert_unary_rest_required_fields(request_type=compute.InsertRegionUrlMapRequest): + transport_class = transports.RegionUrlMapsRestTransport + + request_init = {} + request_init["project"] = "" + request_init["region"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).insert._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + jsonified_request["region"] = 'region_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).insert._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "region" in jsonified_request + assert jsonified_request["region"] == 'region_value' + + client = RegionUrlMapsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.insert_unary(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_insert_unary_rest_unset_required_fields(): + transport = transports.RegionUrlMapsRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.insert._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("project", "region", "urlMapResource", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_insert_unary_rest_interceptors(null_interceptor): + transport = transports.RegionUrlMapsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.RegionUrlMapsRestInterceptor(), + ) + client = RegionUrlMapsClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.RegionUrlMapsRestInterceptor, "post_insert") as post, \ + mock.patch.object(transports.RegionUrlMapsRestInterceptor, "pre_insert") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.InsertRegionUrlMapRequest.pb(compute.InsertRegionUrlMapRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.InsertRegionUrlMapRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.insert_unary(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_insert_unary_rest_bad_request(transport: str = 'rest', request_type=compute.InsertRegionUrlMapRequest): + client = RegionUrlMapsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2'} + request_init["url_map_resource"] = {'creation_timestamp': 'creation_timestamp_value', 'default_route_action': {'cors_policy': {'allow_credentials': True, 'allow_headers': ['allow_headers_value1', 'allow_headers_value2'], 'allow_methods': ['allow_methods_value1', 'allow_methods_value2'], 'allow_origin_regexes': ['allow_origin_regexes_value1', 'allow_origin_regexes_value2'], 'allow_origins': ['allow_origins_value1', 'allow_origins_value2'], 'disabled': True, 'expose_headers': ['expose_headers_value1', 'expose_headers_value2'], 'max_age': 722}, 'fault_injection_policy': {'abort': {'http_status': 1219, 'percentage': 0.10540000000000001}, 'delay': {'fixed_delay': {'nanos': 543, 'seconds': 751}, 'percentage': 0.10540000000000001}}, 'max_stream_duration': {}, 'request_mirror_policy': {'backend_service': 'backend_service_value'}, 'retry_policy': {'num_retries': 1197, 'per_try_timeout': {}, 'retry_conditions': ['retry_conditions_value1', 'retry_conditions_value2']}, 'timeout': {}, 'url_rewrite': {'host_rewrite': 'host_rewrite_value', 'path_prefix_rewrite': 'path_prefix_rewrite_value', 'path_template_rewrite': 'path_template_rewrite_value'}, 'weighted_backend_services': [{'backend_service': 'backend_service_value', 'header_action': {'request_headers_to_add': [{'header_name': 'header_name_value', 'header_value': 'header_value_value', 'replace': True}], 'request_headers_to_remove': ['request_headers_to_remove_value1', 'request_headers_to_remove_value2'], 'response_headers_to_add': {}, 'response_headers_to_remove': ['response_headers_to_remove_value1', 'response_headers_to_remove_value2']}, 'weight': 648}]}, 'default_service': 'default_service_value', 'default_url_redirect': {'host_redirect': 'host_redirect_value', 'https_redirect': True, 'path_redirect': 'path_redirect_value', 'prefix_redirect': 'prefix_redirect_value', 'redirect_response_code': 'redirect_response_code_value', 'strip_query': True}, 'description': 'description_value', 'fingerprint': 'fingerprint_value', 'header_action': {}, 'host_rules': [{'description': 'description_value', 'hosts': ['hosts_value1', 'hosts_value2'], 'path_matcher': 'path_matcher_value'}], 'id': 205, 'kind': 'kind_value', 'name': 'name_value', 'path_matchers': [{'default_route_action': {}, 'default_service': 'default_service_value', 'default_url_redirect': {}, 'description': 'description_value', 'header_action': {}, 'name': 'name_value', 'path_rules': [{'paths': ['paths_value1', 'paths_value2'], 'route_action': {}, 'service': 'service_value', 'url_redirect': {}}], 'route_rules': [{'description': 'description_value', 'header_action': {}, 'match_rules': [{'full_path_match': 'full_path_match_value', 'header_matches': [{'exact_match': 'exact_match_value', 'header_name': 'header_name_value', 'invert_match': True, 'prefix_match': 'prefix_match_value', 'present_match': True, 'range_match': {'range_end': 931, 'range_start': 1178}, 'regex_match': 'regex_match_value', 'suffix_match': 'suffix_match_value'}], 'ignore_case': True, 'metadata_filters': [{'filter_labels': [{'name': 'name_value', 'value': 'value_value'}], 'filter_match_criteria': 'filter_match_criteria_value'}], 'path_template_match': 'path_template_match_value', 'prefix_match': 'prefix_match_value', 'query_parameter_matches': [{'exact_match': 'exact_match_value', 'name': 'name_value', 'present_match': True, 'regex_match': 'regex_match_value'}], 'regex_match': 'regex_match_value'}], 'priority': 898, 'route_action': {}, 'service': 'service_value', 'url_redirect': {}}]}], 'region': 'region_value', 'self_link': 'self_link_value', 'tests': [{'description': 'description_value', 'expected_output_url': 'expected_output_url_value', 'expected_redirect_response_code': 3275, 'headers': [{'name': 'name_value', 'value': 'value_value'}], 'host': 'host_value', 'path': 'path_value', 'service': 'service_value'}]} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.insert_unary(request) + + +def test_insert_unary_rest_flattened(): + client = RegionUrlMapsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'region': 'sample2'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + region='region_value', + url_map_resource=compute.UrlMap(creation_timestamp='creation_timestamp_value'), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.insert_unary(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/regions/{region}/urlMaps" % client.transport._host, args[1]) + + +def test_insert_unary_rest_flattened_error(transport: str = 'rest'): + client = RegionUrlMapsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.insert_unary( + compute.InsertRegionUrlMapRequest(), + project='project_value', + region='region_value', + url_map_resource=compute.UrlMap(creation_timestamp='creation_timestamp_value'), + ) + + +def test_insert_unary_rest_error(): + client = RegionUrlMapsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.ListRegionUrlMapsRequest, + dict, +]) +def test_list_rest(request_type): + client = RegionUrlMapsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.UrlMapList( + id='id_value', + kind='kind_value', + next_page_token='next_page_token_value', + self_link='self_link_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.UrlMapList.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.list(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListPager) + assert response.id == 'id_value' + assert response.kind == 'kind_value' + assert response.next_page_token == 'next_page_token_value' + assert response.self_link == 'self_link_value' + + +def test_list_rest_required_fields(request_type=compute.ListRegionUrlMapsRequest): + transport_class = transports.RegionUrlMapsRestTransport + + request_init = {} + request_init["project"] = "" + request_init["region"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + jsonified_request["region"] = 'region_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("filter", "max_results", "order_by", "page_token", "return_partial_success", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "region" in jsonified_request + assert jsonified_request["region"] == 'region_value' + + client = RegionUrlMapsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.UrlMapList() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "get", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.UrlMapList.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.list(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_list_rest_unset_required_fields(): + transport = transports.RegionUrlMapsRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.list._get_unset_required_fields({}) + assert set(unset_fields) == (set(("filter", "maxResults", "orderBy", "pageToken", "returnPartialSuccess", )) & set(("project", "region", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_rest_interceptors(null_interceptor): + transport = transports.RegionUrlMapsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.RegionUrlMapsRestInterceptor(), + ) + client = RegionUrlMapsClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.RegionUrlMapsRestInterceptor, "post_list") as post, \ + mock.patch.object(transports.RegionUrlMapsRestInterceptor, "pre_list") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.ListRegionUrlMapsRequest.pb(compute.ListRegionUrlMapsRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.UrlMapList.to_json(compute.UrlMapList()) + + request = compute.ListRegionUrlMapsRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.UrlMapList() + + client.list(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_list_rest_bad_request(transport: str = 'rest', request_type=compute.ListRegionUrlMapsRequest): + client = RegionUrlMapsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list(request) + + +def test_list_rest_flattened(): + client = RegionUrlMapsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.UrlMapList() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'region': 'sample2'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + region='region_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.UrlMapList.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.list(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/regions/{region}/urlMaps" % client.transport._host, args[1]) + + +def test_list_rest_flattened_error(transport: str = 'rest'): + client = RegionUrlMapsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list( + compute.ListRegionUrlMapsRequest(), + project='project_value', + region='region_value', + ) + + +def test_list_rest_pager(transport: str = 'rest'): + client = RegionUrlMapsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + #with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + compute.UrlMapList( + items=[ + compute.UrlMap(), + compute.UrlMap(), + compute.UrlMap(), + ], + next_page_token='abc', + ), + compute.UrlMapList( + items=[], + next_page_token='def', + ), + compute.UrlMapList( + items=[ + compute.UrlMap(), + ], + next_page_token='ghi', + ), + compute.UrlMapList( + items=[ + compute.UrlMap(), + compute.UrlMap(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple(compute.UrlMapList.to_json(x) for x in response) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode('UTF-8') + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {'project': 'sample1', 'region': 'sample2'} + + pager = client.list(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, compute.UrlMap) + for i in results) + + pages = list(client.list(request=sample_request).pages) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize("request_type", [ + compute.PatchRegionUrlMapRequest, + dict, +]) +def test_patch_rest(request_type): + client = RegionUrlMapsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2', 'url_map': 'sample3'} + request_init["url_map_resource"] = {'creation_timestamp': 'creation_timestamp_value', 'default_route_action': {'cors_policy': {'allow_credentials': True, 'allow_headers': ['allow_headers_value1', 'allow_headers_value2'], 'allow_methods': ['allow_methods_value1', 'allow_methods_value2'], 'allow_origin_regexes': ['allow_origin_regexes_value1', 'allow_origin_regexes_value2'], 'allow_origins': ['allow_origins_value1', 'allow_origins_value2'], 'disabled': True, 'expose_headers': ['expose_headers_value1', 'expose_headers_value2'], 'max_age': 722}, 'fault_injection_policy': {'abort': {'http_status': 1219, 'percentage': 0.10540000000000001}, 'delay': {'fixed_delay': {'nanos': 543, 'seconds': 751}, 'percentage': 0.10540000000000001}}, 'max_stream_duration': {}, 'request_mirror_policy': {'backend_service': 'backend_service_value'}, 'retry_policy': {'num_retries': 1197, 'per_try_timeout': {}, 'retry_conditions': ['retry_conditions_value1', 'retry_conditions_value2']}, 'timeout': {}, 'url_rewrite': {'host_rewrite': 'host_rewrite_value', 'path_prefix_rewrite': 'path_prefix_rewrite_value', 'path_template_rewrite': 'path_template_rewrite_value'}, 'weighted_backend_services': [{'backend_service': 'backend_service_value', 'header_action': {'request_headers_to_add': [{'header_name': 'header_name_value', 'header_value': 'header_value_value', 'replace': True}], 'request_headers_to_remove': ['request_headers_to_remove_value1', 'request_headers_to_remove_value2'], 'response_headers_to_add': {}, 'response_headers_to_remove': ['response_headers_to_remove_value1', 'response_headers_to_remove_value2']}, 'weight': 648}]}, 'default_service': 'default_service_value', 'default_url_redirect': {'host_redirect': 'host_redirect_value', 'https_redirect': True, 'path_redirect': 'path_redirect_value', 'prefix_redirect': 'prefix_redirect_value', 'redirect_response_code': 'redirect_response_code_value', 'strip_query': True}, 'description': 'description_value', 'fingerprint': 'fingerprint_value', 'header_action': {}, 'host_rules': [{'description': 'description_value', 'hosts': ['hosts_value1', 'hosts_value2'], 'path_matcher': 'path_matcher_value'}], 'id': 205, 'kind': 'kind_value', 'name': 'name_value', 'path_matchers': [{'default_route_action': {}, 'default_service': 'default_service_value', 'default_url_redirect': {}, 'description': 'description_value', 'header_action': {}, 'name': 'name_value', 'path_rules': [{'paths': ['paths_value1', 'paths_value2'], 'route_action': {}, 'service': 'service_value', 'url_redirect': {}}], 'route_rules': [{'description': 'description_value', 'header_action': {}, 'match_rules': [{'full_path_match': 'full_path_match_value', 'header_matches': [{'exact_match': 'exact_match_value', 'header_name': 'header_name_value', 'invert_match': True, 'prefix_match': 'prefix_match_value', 'present_match': True, 'range_match': {'range_end': 931, 'range_start': 1178}, 'regex_match': 'regex_match_value', 'suffix_match': 'suffix_match_value'}], 'ignore_case': True, 'metadata_filters': [{'filter_labels': [{'name': 'name_value', 'value': 'value_value'}], 'filter_match_criteria': 'filter_match_criteria_value'}], 'path_template_match': 'path_template_match_value', 'prefix_match': 'prefix_match_value', 'query_parameter_matches': [{'exact_match': 'exact_match_value', 'name': 'name_value', 'present_match': True, 'regex_match': 'regex_match_value'}], 'regex_match': 'regex_match_value'}], 'priority': 898, 'route_action': {}, 'service': 'service_value', 'url_redirect': {}}]}], 'region': 'region_value', 'self_link': 'self_link_value', 'tests': [{'description': 'description_value', 'expected_output_url': 'expected_output_url_value', 'expected_redirect_response_code': 3275, 'headers': [{'name': 'name_value', 'value': 'value_value'}], 'host': 'host_value', 'path': 'path_value', 'service': 'service_value'}]} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.patch(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, extended_operation.ExtendedOperation) + assert response.client_operation_id == 'client_operation_id_value' + assert response.creation_timestamp == 'creation_timestamp_value' + assert response.description == 'description_value' + assert response.end_time == 'end_time_value' + assert response.http_error_message == 'http_error_message_value' + assert response.http_error_status_code == 2374 + assert response.id == 205 + assert response.insert_time == 'insert_time_value' + assert response.kind == 'kind_value' + assert response.name == 'name_value' + assert response.operation_group_id == 'operation_group_id_value' + assert response.operation_type == 'operation_type_value' + assert response.progress == 885 + assert response.region == 'region_value' + assert response.self_link == 'self_link_value' + assert response.start_time == 'start_time_value' + assert response.status == compute.Operation.Status.DONE + assert response.status_message == 'status_message_value' + assert response.target_id == 947 + assert response.target_link == 'target_link_value' + assert response.user == 'user_value' + assert response.zone == 'zone_value' + + +def test_patch_rest_required_fields(request_type=compute.PatchRegionUrlMapRequest): + transport_class = transports.RegionUrlMapsRestTransport + + request_init = {} + request_init["project"] = "" + request_init["region"] = "" + request_init["url_map"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).patch._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + jsonified_request["region"] = 'region_value' + jsonified_request["urlMap"] = 'url_map_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).patch._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "region" in jsonified_request + assert jsonified_request["region"] == 'region_value' + assert "urlMap" in jsonified_request + assert jsonified_request["urlMap"] == 'url_map_value' + + client = RegionUrlMapsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "patch", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.patch(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_patch_rest_unset_required_fields(): + transport = transports.RegionUrlMapsRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.patch._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("project", "region", "urlMap", "urlMapResource", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_patch_rest_interceptors(null_interceptor): + transport = transports.RegionUrlMapsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.RegionUrlMapsRestInterceptor(), + ) + client = RegionUrlMapsClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.RegionUrlMapsRestInterceptor, "post_patch") as post, \ + mock.patch.object(transports.RegionUrlMapsRestInterceptor, "pre_patch") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.PatchRegionUrlMapRequest.pb(compute.PatchRegionUrlMapRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.PatchRegionUrlMapRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.patch(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_patch_rest_bad_request(transport: str = 'rest', request_type=compute.PatchRegionUrlMapRequest): + client = RegionUrlMapsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2', 'url_map': 'sample3'} + request_init["url_map_resource"] = {'creation_timestamp': 'creation_timestamp_value', 'default_route_action': {'cors_policy': {'allow_credentials': True, 'allow_headers': ['allow_headers_value1', 'allow_headers_value2'], 'allow_methods': ['allow_methods_value1', 'allow_methods_value2'], 'allow_origin_regexes': ['allow_origin_regexes_value1', 'allow_origin_regexes_value2'], 'allow_origins': ['allow_origins_value1', 'allow_origins_value2'], 'disabled': True, 'expose_headers': ['expose_headers_value1', 'expose_headers_value2'], 'max_age': 722}, 'fault_injection_policy': {'abort': {'http_status': 1219, 'percentage': 0.10540000000000001}, 'delay': {'fixed_delay': {'nanos': 543, 'seconds': 751}, 'percentage': 0.10540000000000001}}, 'max_stream_duration': {}, 'request_mirror_policy': {'backend_service': 'backend_service_value'}, 'retry_policy': {'num_retries': 1197, 'per_try_timeout': {}, 'retry_conditions': ['retry_conditions_value1', 'retry_conditions_value2']}, 'timeout': {}, 'url_rewrite': {'host_rewrite': 'host_rewrite_value', 'path_prefix_rewrite': 'path_prefix_rewrite_value', 'path_template_rewrite': 'path_template_rewrite_value'}, 'weighted_backend_services': [{'backend_service': 'backend_service_value', 'header_action': {'request_headers_to_add': [{'header_name': 'header_name_value', 'header_value': 'header_value_value', 'replace': True}], 'request_headers_to_remove': ['request_headers_to_remove_value1', 'request_headers_to_remove_value2'], 'response_headers_to_add': {}, 'response_headers_to_remove': ['response_headers_to_remove_value1', 'response_headers_to_remove_value2']}, 'weight': 648}]}, 'default_service': 'default_service_value', 'default_url_redirect': {'host_redirect': 'host_redirect_value', 'https_redirect': True, 'path_redirect': 'path_redirect_value', 'prefix_redirect': 'prefix_redirect_value', 'redirect_response_code': 'redirect_response_code_value', 'strip_query': True}, 'description': 'description_value', 'fingerprint': 'fingerprint_value', 'header_action': {}, 'host_rules': [{'description': 'description_value', 'hosts': ['hosts_value1', 'hosts_value2'], 'path_matcher': 'path_matcher_value'}], 'id': 205, 'kind': 'kind_value', 'name': 'name_value', 'path_matchers': [{'default_route_action': {}, 'default_service': 'default_service_value', 'default_url_redirect': {}, 'description': 'description_value', 'header_action': {}, 'name': 'name_value', 'path_rules': [{'paths': ['paths_value1', 'paths_value2'], 'route_action': {}, 'service': 'service_value', 'url_redirect': {}}], 'route_rules': [{'description': 'description_value', 'header_action': {}, 'match_rules': [{'full_path_match': 'full_path_match_value', 'header_matches': [{'exact_match': 'exact_match_value', 'header_name': 'header_name_value', 'invert_match': True, 'prefix_match': 'prefix_match_value', 'present_match': True, 'range_match': {'range_end': 931, 'range_start': 1178}, 'regex_match': 'regex_match_value', 'suffix_match': 'suffix_match_value'}], 'ignore_case': True, 'metadata_filters': [{'filter_labels': [{'name': 'name_value', 'value': 'value_value'}], 'filter_match_criteria': 'filter_match_criteria_value'}], 'path_template_match': 'path_template_match_value', 'prefix_match': 'prefix_match_value', 'query_parameter_matches': [{'exact_match': 'exact_match_value', 'name': 'name_value', 'present_match': True, 'regex_match': 'regex_match_value'}], 'regex_match': 'regex_match_value'}], 'priority': 898, 'route_action': {}, 'service': 'service_value', 'url_redirect': {}}]}], 'region': 'region_value', 'self_link': 'self_link_value', 'tests': [{'description': 'description_value', 'expected_output_url': 'expected_output_url_value', 'expected_redirect_response_code': 3275, 'headers': [{'name': 'name_value', 'value': 'value_value'}], 'host': 'host_value', 'path': 'path_value', 'service': 'service_value'}]} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.patch(request) + + +def test_patch_rest_flattened(): + client = RegionUrlMapsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'region': 'sample2', 'url_map': 'sample3'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + region='region_value', + url_map='url_map_value', + url_map_resource=compute.UrlMap(creation_timestamp='creation_timestamp_value'), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.patch(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/regions/{region}/urlMaps/{url_map}" % client.transport._host, args[1]) + + +def test_patch_rest_flattened_error(transport: str = 'rest'): + client = RegionUrlMapsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.patch( + compute.PatchRegionUrlMapRequest(), + project='project_value', + region='region_value', + url_map='url_map_value', + url_map_resource=compute.UrlMap(creation_timestamp='creation_timestamp_value'), + ) + + +def test_patch_rest_error(): + client = RegionUrlMapsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.PatchRegionUrlMapRequest, + dict, +]) +def test_patch_unary_rest(request_type): + client = RegionUrlMapsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2', 'url_map': 'sample3'} + request_init["url_map_resource"] = {'creation_timestamp': 'creation_timestamp_value', 'default_route_action': {'cors_policy': {'allow_credentials': True, 'allow_headers': ['allow_headers_value1', 'allow_headers_value2'], 'allow_methods': ['allow_methods_value1', 'allow_methods_value2'], 'allow_origin_regexes': ['allow_origin_regexes_value1', 'allow_origin_regexes_value2'], 'allow_origins': ['allow_origins_value1', 'allow_origins_value2'], 'disabled': True, 'expose_headers': ['expose_headers_value1', 'expose_headers_value2'], 'max_age': 722}, 'fault_injection_policy': {'abort': {'http_status': 1219, 'percentage': 0.10540000000000001}, 'delay': {'fixed_delay': {'nanos': 543, 'seconds': 751}, 'percentage': 0.10540000000000001}}, 'max_stream_duration': {}, 'request_mirror_policy': {'backend_service': 'backend_service_value'}, 'retry_policy': {'num_retries': 1197, 'per_try_timeout': {}, 'retry_conditions': ['retry_conditions_value1', 'retry_conditions_value2']}, 'timeout': {}, 'url_rewrite': {'host_rewrite': 'host_rewrite_value', 'path_prefix_rewrite': 'path_prefix_rewrite_value', 'path_template_rewrite': 'path_template_rewrite_value'}, 'weighted_backend_services': [{'backend_service': 'backend_service_value', 'header_action': {'request_headers_to_add': [{'header_name': 'header_name_value', 'header_value': 'header_value_value', 'replace': True}], 'request_headers_to_remove': ['request_headers_to_remove_value1', 'request_headers_to_remove_value2'], 'response_headers_to_add': {}, 'response_headers_to_remove': ['response_headers_to_remove_value1', 'response_headers_to_remove_value2']}, 'weight': 648}]}, 'default_service': 'default_service_value', 'default_url_redirect': {'host_redirect': 'host_redirect_value', 'https_redirect': True, 'path_redirect': 'path_redirect_value', 'prefix_redirect': 'prefix_redirect_value', 'redirect_response_code': 'redirect_response_code_value', 'strip_query': True}, 'description': 'description_value', 'fingerprint': 'fingerprint_value', 'header_action': {}, 'host_rules': [{'description': 'description_value', 'hosts': ['hosts_value1', 'hosts_value2'], 'path_matcher': 'path_matcher_value'}], 'id': 205, 'kind': 'kind_value', 'name': 'name_value', 'path_matchers': [{'default_route_action': {}, 'default_service': 'default_service_value', 'default_url_redirect': {}, 'description': 'description_value', 'header_action': {}, 'name': 'name_value', 'path_rules': [{'paths': ['paths_value1', 'paths_value2'], 'route_action': {}, 'service': 'service_value', 'url_redirect': {}}], 'route_rules': [{'description': 'description_value', 'header_action': {}, 'match_rules': [{'full_path_match': 'full_path_match_value', 'header_matches': [{'exact_match': 'exact_match_value', 'header_name': 'header_name_value', 'invert_match': True, 'prefix_match': 'prefix_match_value', 'present_match': True, 'range_match': {'range_end': 931, 'range_start': 1178}, 'regex_match': 'regex_match_value', 'suffix_match': 'suffix_match_value'}], 'ignore_case': True, 'metadata_filters': [{'filter_labels': [{'name': 'name_value', 'value': 'value_value'}], 'filter_match_criteria': 'filter_match_criteria_value'}], 'path_template_match': 'path_template_match_value', 'prefix_match': 'prefix_match_value', 'query_parameter_matches': [{'exact_match': 'exact_match_value', 'name': 'name_value', 'present_match': True, 'regex_match': 'regex_match_value'}], 'regex_match': 'regex_match_value'}], 'priority': 898, 'route_action': {}, 'service': 'service_value', 'url_redirect': {}}]}], 'region': 'region_value', 'self_link': 'self_link_value', 'tests': [{'description': 'description_value', 'expected_output_url': 'expected_output_url_value', 'expected_redirect_response_code': 3275, 'headers': [{'name': 'name_value', 'value': 'value_value'}], 'host': 'host_value', 'path': 'path_value', 'service': 'service_value'}]} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.patch_unary(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.Operation) + + +def test_patch_unary_rest_required_fields(request_type=compute.PatchRegionUrlMapRequest): + transport_class = transports.RegionUrlMapsRestTransport + + request_init = {} + request_init["project"] = "" + request_init["region"] = "" + request_init["url_map"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).patch._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + jsonified_request["region"] = 'region_value' + jsonified_request["urlMap"] = 'url_map_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).patch._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "region" in jsonified_request + assert jsonified_request["region"] == 'region_value' + assert "urlMap" in jsonified_request + assert jsonified_request["urlMap"] == 'url_map_value' + + client = RegionUrlMapsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "patch", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.patch_unary(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_patch_unary_rest_unset_required_fields(): + transport = transports.RegionUrlMapsRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.patch._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("project", "region", "urlMap", "urlMapResource", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_patch_unary_rest_interceptors(null_interceptor): + transport = transports.RegionUrlMapsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.RegionUrlMapsRestInterceptor(), + ) + client = RegionUrlMapsClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.RegionUrlMapsRestInterceptor, "post_patch") as post, \ + mock.patch.object(transports.RegionUrlMapsRestInterceptor, "pre_patch") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.PatchRegionUrlMapRequest.pb(compute.PatchRegionUrlMapRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.PatchRegionUrlMapRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.patch_unary(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_patch_unary_rest_bad_request(transport: str = 'rest', request_type=compute.PatchRegionUrlMapRequest): + client = RegionUrlMapsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2', 'url_map': 'sample3'} + request_init["url_map_resource"] = {'creation_timestamp': 'creation_timestamp_value', 'default_route_action': {'cors_policy': {'allow_credentials': True, 'allow_headers': ['allow_headers_value1', 'allow_headers_value2'], 'allow_methods': ['allow_methods_value1', 'allow_methods_value2'], 'allow_origin_regexes': ['allow_origin_regexes_value1', 'allow_origin_regexes_value2'], 'allow_origins': ['allow_origins_value1', 'allow_origins_value2'], 'disabled': True, 'expose_headers': ['expose_headers_value1', 'expose_headers_value2'], 'max_age': 722}, 'fault_injection_policy': {'abort': {'http_status': 1219, 'percentage': 0.10540000000000001}, 'delay': {'fixed_delay': {'nanos': 543, 'seconds': 751}, 'percentage': 0.10540000000000001}}, 'max_stream_duration': {}, 'request_mirror_policy': {'backend_service': 'backend_service_value'}, 'retry_policy': {'num_retries': 1197, 'per_try_timeout': {}, 'retry_conditions': ['retry_conditions_value1', 'retry_conditions_value2']}, 'timeout': {}, 'url_rewrite': {'host_rewrite': 'host_rewrite_value', 'path_prefix_rewrite': 'path_prefix_rewrite_value', 'path_template_rewrite': 'path_template_rewrite_value'}, 'weighted_backend_services': [{'backend_service': 'backend_service_value', 'header_action': {'request_headers_to_add': [{'header_name': 'header_name_value', 'header_value': 'header_value_value', 'replace': True}], 'request_headers_to_remove': ['request_headers_to_remove_value1', 'request_headers_to_remove_value2'], 'response_headers_to_add': {}, 'response_headers_to_remove': ['response_headers_to_remove_value1', 'response_headers_to_remove_value2']}, 'weight': 648}]}, 'default_service': 'default_service_value', 'default_url_redirect': {'host_redirect': 'host_redirect_value', 'https_redirect': True, 'path_redirect': 'path_redirect_value', 'prefix_redirect': 'prefix_redirect_value', 'redirect_response_code': 'redirect_response_code_value', 'strip_query': True}, 'description': 'description_value', 'fingerprint': 'fingerprint_value', 'header_action': {}, 'host_rules': [{'description': 'description_value', 'hosts': ['hosts_value1', 'hosts_value2'], 'path_matcher': 'path_matcher_value'}], 'id': 205, 'kind': 'kind_value', 'name': 'name_value', 'path_matchers': [{'default_route_action': {}, 'default_service': 'default_service_value', 'default_url_redirect': {}, 'description': 'description_value', 'header_action': {}, 'name': 'name_value', 'path_rules': [{'paths': ['paths_value1', 'paths_value2'], 'route_action': {}, 'service': 'service_value', 'url_redirect': {}}], 'route_rules': [{'description': 'description_value', 'header_action': {}, 'match_rules': [{'full_path_match': 'full_path_match_value', 'header_matches': [{'exact_match': 'exact_match_value', 'header_name': 'header_name_value', 'invert_match': True, 'prefix_match': 'prefix_match_value', 'present_match': True, 'range_match': {'range_end': 931, 'range_start': 1178}, 'regex_match': 'regex_match_value', 'suffix_match': 'suffix_match_value'}], 'ignore_case': True, 'metadata_filters': [{'filter_labels': [{'name': 'name_value', 'value': 'value_value'}], 'filter_match_criteria': 'filter_match_criteria_value'}], 'path_template_match': 'path_template_match_value', 'prefix_match': 'prefix_match_value', 'query_parameter_matches': [{'exact_match': 'exact_match_value', 'name': 'name_value', 'present_match': True, 'regex_match': 'regex_match_value'}], 'regex_match': 'regex_match_value'}], 'priority': 898, 'route_action': {}, 'service': 'service_value', 'url_redirect': {}}]}], 'region': 'region_value', 'self_link': 'self_link_value', 'tests': [{'description': 'description_value', 'expected_output_url': 'expected_output_url_value', 'expected_redirect_response_code': 3275, 'headers': [{'name': 'name_value', 'value': 'value_value'}], 'host': 'host_value', 'path': 'path_value', 'service': 'service_value'}]} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.patch_unary(request) + + +def test_patch_unary_rest_flattened(): + client = RegionUrlMapsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'region': 'sample2', 'url_map': 'sample3'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + region='region_value', + url_map='url_map_value', + url_map_resource=compute.UrlMap(creation_timestamp='creation_timestamp_value'), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.patch_unary(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/regions/{region}/urlMaps/{url_map}" % client.transport._host, args[1]) + + +def test_patch_unary_rest_flattened_error(transport: str = 'rest'): + client = RegionUrlMapsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.patch_unary( + compute.PatchRegionUrlMapRequest(), + project='project_value', + region='region_value', + url_map='url_map_value', + url_map_resource=compute.UrlMap(creation_timestamp='creation_timestamp_value'), + ) + + +def test_patch_unary_rest_error(): + client = RegionUrlMapsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.UpdateRegionUrlMapRequest, + dict, +]) +def test_update_rest(request_type): + client = RegionUrlMapsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2', 'url_map': 'sample3'} + request_init["url_map_resource"] = {'creation_timestamp': 'creation_timestamp_value', 'default_route_action': {'cors_policy': {'allow_credentials': True, 'allow_headers': ['allow_headers_value1', 'allow_headers_value2'], 'allow_methods': ['allow_methods_value1', 'allow_methods_value2'], 'allow_origin_regexes': ['allow_origin_regexes_value1', 'allow_origin_regexes_value2'], 'allow_origins': ['allow_origins_value1', 'allow_origins_value2'], 'disabled': True, 'expose_headers': ['expose_headers_value1', 'expose_headers_value2'], 'max_age': 722}, 'fault_injection_policy': {'abort': {'http_status': 1219, 'percentage': 0.10540000000000001}, 'delay': {'fixed_delay': {'nanos': 543, 'seconds': 751}, 'percentage': 0.10540000000000001}}, 'max_stream_duration': {}, 'request_mirror_policy': {'backend_service': 'backend_service_value'}, 'retry_policy': {'num_retries': 1197, 'per_try_timeout': {}, 'retry_conditions': ['retry_conditions_value1', 'retry_conditions_value2']}, 'timeout': {}, 'url_rewrite': {'host_rewrite': 'host_rewrite_value', 'path_prefix_rewrite': 'path_prefix_rewrite_value', 'path_template_rewrite': 'path_template_rewrite_value'}, 'weighted_backend_services': [{'backend_service': 'backend_service_value', 'header_action': {'request_headers_to_add': [{'header_name': 'header_name_value', 'header_value': 'header_value_value', 'replace': True}], 'request_headers_to_remove': ['request_headers_to_remove_value1', 'request_headers_to_remove_value2'], 'response_headers_to_add': {}, 'response_headers_to_remove': ['response_headers_to_remove_value1', 'response_headers_to_remove_value2']}, 'weight': 648}]}, 'default_service': 'default_service_value', 'default_url_redirect': {'host_redirect': 'host_redirect_value', 'https_redirect': True, 'path_redirect': 'path_redirect_value', 'prefix_redirect': 'prefix_redirect_value', 'redirect_response_code': 'redirect_response_code_value', 'strip_query': True}, 'description': 'description_value', 'fingerprint': 'fingerprint_value', 'header_action': {}, 'host_rules': [{'description': 'description_value', 'hosts': ['hosts_value1', 'hosts_value2'], 'path_matcher': 'path_matcher_value'}], 'id': 205, 'kind': 'kind_value', 'name': 'name_value', 'path_matchers': [{'default_route_action': {}, 'default_service': 'default_service_value', 'default_url_redirect': {}, 'description': 'description_value', 'header_action': {}, 'name': 'name_value', 'path_rules': [{'paths': ['paths_value1', 'paths_value2'], 'route_action': {}, 'service': 'service_value', 'url_redirect': {}}], 'route_rules': [{'description': 'description_value', 'header_action': {}, 'match_rules': [{'full_path_match': 'full_path_match_value', 'header_matches': [{'exact_match': 'exact_match_value', 'header_name': 'header_name_value', 'invert_match': True, 'prefix_match': 'prefix_match_value', 'present_match': True, 'range_match': {'range_end': 931, 'range_start': 1178}, 'regex_match': 'regex_match_value', 'suffix_match': 'suffix_match_value'}], 'ignore_case': True, 'metadata_filters': [{'filter_labels': [{'name': 'name_value', 'value': 'value_value'}], 'filter_match_criteria': 'filter_match_criteria_value'}], 'path_template_match': 'path_template_match_value', 'prefix_match': 'prefix_match_value', 'query_parameter_matches': [{'exact_match': 'exact_match_value', 'name': 'name_value', 'present_match': True, 'regex_match': 'regex_match_value'}], 'regex_match': 'regex_match_value'}], 'priority': 898, 'route_action': {}, 'service': 'service_value', 'url_redirect': {}}]}], 'region': 'region_value', 'self_link': 'self_link_value', 'tests': [{'description': 'description_value', 'expected_output_url': 'expected_output_url_value', 'expected_redirect_response_code': 3275, 'headers': [{'name': 'name_value', 'value': 'value_value'}], 'host': 'host_value', 'path': 'path_value', 'service': 'service_value'}]} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.update(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, extended_operation.ExtendedOperation) + assert response.client_operation_id == 'client_operation_id_value' + assert response.creation_timestamp == 'creation_timestamp_value' + assert response.description == 'description_value' + assert response.end_time == 'end_time_value' + assert response.http_error_message == 'http_error_message_value' + assert response.http_error_status_code == 2374 + assert response.id == 205 + assert response.insert_time == 'insert_time_value' + assert response.kind == 'kind_value' + assert response.name == 'name_value' + assert response.operation_group_id == 'operation_group_id_value' + assert response.operation_type == 'operation_type_value' + assert response.progress == 885 + assert response.region == 'region_value' + assert response.self_link == 'self_link_value' + assert response.start_time == 'start_time_value' + assert response.status == compute.Operation.Status.DONE + assert response.status_message == 'status_message_value' + assert response.target_id == 947 + assert response.target_link == 'target_link_value' + assert response.user == 'user_value' + assert response.zone == 'zone_value' + + +def test_update_rest_required_fields(request_type=compute.UpdateRegionUrlMapRequest): + transport_class = transports.RegionUrlMapsRestTransport + + request_init = {} + request_init["project"] = "" + request_init["region"] = "" + request_init["url_map"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).update._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + jsonified_request["region"] = 'region_value' + jsonified_request["urlMap"] = 'url_map_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).update._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "region" in jsonified_request + assert jsonified_request["region"] == 'region_value' + assert "urlMap" in jsonified_request + assert jsonified_request["urlMap"] == 'url_map_value' + + client = RegionUrlMapsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "put", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.update(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_update_rest_unset_required_fields(): + transport = transports.RegionUrlMapsRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.update._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("project", "region", "urlMap", "urlMapResource", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_update_rest_interceptors(null_interceptor): + transport = transports.RegionUrlMapsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.RegionUrlMapsRestInterceptor(), + ) + client = RegionUrlMapsClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.RegionUrlMapsRestInterceptor, "post_update") as post, \ + mock.patch.object(transports.RegionUrlMapsRestInterceptor, "pre_update") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.UpdateRegionUrlMapRequest.pb(compute.UpdateRegionUrlMapRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.UpdateRegionUrlMapRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.update(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_update_rest_bad_request(transport: str = 'rest', request_type=compute.UpdateRegionUrlMapRequest): + client = RegionUrlMapsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2', 'url_map': 'sample3'} + request_init["url_map_resource"] = {'creation_timestamp': 'creation_timestamp_value', 'default_route_action': {'cors_policy': {'allow_credentials': True, 'allow_headers': ['allow_headers_value1', 'allow_headers_value2'], 'allow_methods': ['allow_methods_value1', 'allow_methods_value2'], 'allow_origin_regexes': ['allow_origin_regexes_value1', 'allow_origin_regexes_value2'], 'allow_origins': ['allow_origins_value1', 'allow_origins_value2'], 'disabled': True, 'expose_headers': ['expose_headers_value1', 'expose_headers_value2'], 'max_age': 722}, 'fault_injection_policy': {'abort': {'http_status': 1219, 'percentage': 0.10540000000000001}, 'delay': {'fixed_delay': {'nanos': 543, 'seconds': 751}, 'percentage': 0.10540000000000001}}, 'max_stream_duration': {}, 'request_mirror_policy': {'backend_service': 'backend_service_value'}, 'retry_policy': {'num_retries': 1197, 'per_try_timeout': {}, 'retry_conditions': ['retry_conditions_value1', 'retry_conditions_value2']}, 'timeout': {}, 'url_rewrite': {'host_rewrite': 'host_rewrite_value', 'path_prefix_rewrite': 'path_prefix_rewrite_value', 'path_template_rewrite': 'path_template_rewrite_value'}, 'weighted_backend_services': [{'backend_service': 'backend_service_value', 'header_action': {'request_headers_to_add': [{'header_name': 'header_name_value', 'header_value': 'header_value_value', 'replace': True}], 'request_headers_to_remove': ['request_headers_to_remove_value1', 'request_headers_to_remove_value2'], 'response_headers_to_add': {}, 'response_headers_to_remove': ['response_headers_to_remove_value1', 'response_headers_to_remove_value2']}, 'weight': 648}]}, 'default_service': 'default_service_value', 'default_url_redirect': {'host_redirect': 'host_redirect_value', 'https_redirect': True, 'path_redirect': 'path_redirect_value', 'prefix_redirect': 'prefix_redirect_value', 'redirect_response_code': 'redirect_response_code_value', 'strip_query': True}, 'description': 'description_value', 'fingerprint': 'fingerprint_value', 'header_action': {}, 'host_rules': [{'description': 'description_value', 'hosts': ['hosts_value1', 'hosts_value2'], 'path_matcher': 'path_matcher_value'}], 'id': 205, 'kind': 'kind_value', 'name': 'name_value', 'path_matchers': [{'default_route_action': {}, 'default_service': 'default_service_value', 'default_url_redirect': {}, 'description': 'description_value', 'header_action': {}, 'name': 'name_value', 'path_rules': [{'paths': ['paths_value1', 'paths_value2'], 'route_action': {}, 'service': 'service_value', 'url_redirect': {}}], 'route_rules': [{'description': 'description_value', 'header_action': {}, 'match_rules': [{'full_path_match': 'full_path_match_value', 'header_matches': [{'exact_match': 'exact_match_value', 'header_name': 'header_name_value', 'invert_match': True, 'prefix_match': 'prefix_match_value', 'present_match': True, 'range_match': {'range_end': 931, 'range_start': 1178}, 'regex_match': 'regex_match_value', 'suffix_match': 'suffix_match_value'}], 'ignore_case': True, 'metadata_filters': [{'filter_labels': [{'name': 'name_value', 'value': 'value_value'}], 'filter_match_criteria': 'filter_match_criteria_value'}], 'path_template_match': 'path_template_match_value', 'prefix_match': 'prefix_match_value', 'query_parameter_matches': [{'exact_match': 'exact_match_value', 'name': 'name_value', 'present_match': True, 'regex_match': 'regex_match_value'}], 'regex_match': 'regex_match_value'}], 'priority': 898, 'route_action': {}, 'service': 'service_value', 'url_redirect': {}}]}], 'region': 'region_value', 'self_link': 'self_link_value', 'tests': [{'description': 'description_value', 'expected_output_url': 'expected_output_url_value', 'expected_redirect_response_code': 3275, 'headers': [{'name': 'name_value', 'value': 'value_value'}], 'host': 'host_value', 'path': 'path_value', 'service': 'service_value'}]} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.update(request) + + +def test_update_rest_flattened(): + client = RegionUrlMapsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'region': 'sample2', 'url_map': 'sample3'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + region='region_value', + url_map='url_map_value', + url_map_resource=compute.UrlMap(creation_timestamp='creation_timestamp_value'), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.update(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/regions/{region}/urlMaps/{url_map}" % client.transport._host, args[1]) + + +def test_update_rest_flattened_error(transport: str = 'rest'): + client = RegionUrlMapsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update( + compute.UpdateRegionUrlMapRequest(), + project='project_value', + region='region_value', + url_map='url_map_value', + url_map_resource=compute.UrlMap(creation_timestamp='creation_timestamp_value'), + ) + + +def test_update_rest_error(): + client = RegionUrlMapsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.UpdateRegionUrlMapRequest, + dict, +]) +def test_update_unary_rest(request_type): + client = RegionUrlMapsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2', 'url_map': 'sample3'} + request_init["url_map_resource"] = {'creation_timestamp': 'creation_timestamp_value', 'default_route_action': {'cors_policy': {'allow_credentials': True, 'allow_headers': ['allow_headers_value1', 'allow_headers_value2'], 'allow_methods': ['allow_methods_value1', 'allow_methods_value2'], 'allow_origin_regexes': ['allow_origin_regexes_value1', 'allow_origin_regexes_value2'], 'allow_origins': ['allow_origins_value1', 'allow_origins_value2'], 'disabled': True, 'expose_headers': ['expose_headers_value1', 'expose_headers_value2'], 'max_age': 722}, 'fault_injection_policy': {'abort': {'http_status': 1219, 'percentage': 0.10540000000000001}, 'delay': {'fixed_delay': {'nanos': 543, 'seconds': 751}, 'percentage': 0.10540000000000001}}, 'max_stream_duration': {}, 'request_mirror_policy': {'backend_service': 'backend_service_value'}, 'retry_policy': {'num_retries': 1197, 'per_try_timeout': {}, 'retry_conditions': ['retry_conditions_value1', 'retry_conditions_value2']}, 'timeout': {}, 'url_rewrite': {'host_rewrite': 'host_rewrite_value', 'path_prefix_rewrite': 'path_prefix_rewrite_value', 'path_template_rewrite': 'path_template_rewrite_value'}, 'weighted_backend_services': [{'backend_service': 'backend_service_value', 'header_action': {'request_headers_to_add': [{'header_name': 'header_name_value', 'header_value': 'header_value_value', 'replace': True}], 'request_headers_to_remove': ['request_headers_to_remove_value1', 'request_headers_to_remove_value2'], 'response_headers_to_add': {}, 'response_headers_to_remove': ['response_headers_to_remove_value1', 'response_headers_to_remove_value2']}, 'weight': 648}]}, 'default_service': 'default_service_value', 'default_url_redirect': {'host_redirect': 'host_redirect_value', 'https_redirect': True, 'path_redirect': 'path_redirect_value', 'prefix_redirect': 'prefix_redirect_value', 'redirect_response_code': 'redirect_response_code_value', 'strip_query': True}, 'description': 'description_value', 'fingerprint': 'fingerprint_value', 'header_action': {}, 'host_rules': [{'description': 'description_value', 'hosts': ['hosts_value1', 'hosts_value2'], 'path_matcher': 'path_matcher_value'}], 'id': 205, 'kind': 'kind_value', 'name': 'name_value', 'path_matchers': [{'default_route_action': {}, 'default_service': 'default_service_value', 'default_url_redirect': {}, 'description': 'description_value', 'header_action': {}, 'name': 'name_value', 'path_rules': [{'paths': ['paths_value1', 'paths_value2'], 'route_action': {}, 'service': 'service_value', 'url_redirect': {}}], 'route_rules': [{'description': 'description_value', 'header_action': {}, 'match_rules': [{'full_path_match': 'full_path_match_value', 'header_matches': [{'exact_match': 'exact_match_value', 'header_name': 'header_name_value', 'invert_match': True, 'prefix_match': 'prefix_match_value', 'present_match': True, 'range_match': {'range_end': 931, 'range_start': 1178}, 'regex_match': 'regex_match_value', 'suffix_match': 'suffix_match_value'}], 'ignore_case': True, 'metadata_filters': [{'filter_labels': [{'name': 'name_value', 'value': 'value_value'}], 'filter_match_criteria': 'filter_match_criteria_value'}], 'path_template_match': 'path_template_match_value', 'prefix_match': 'prefix_match_value', 'query_parameter_matches': [{'exact_match': 'exact_match_value', 'name': 'name_value', 'present_match': True, 'regex_match': 'regex_match_value'}], 'regex_match': 'regex_match_value'}], 'priority': 898, 'route_action': {}, 'service': 'service_value', 'url_redirect': {}}]}], 'region': 'region_value', 'self_link': 'self_link_value', 'tests': [{'description': 'description_value', 'expected_output_url': 'expected_output_url_value', 'expected_redirect_response_code': 3275, 'headers': [{'name': 'name_value', 'value': 'value_value'}], 'host': 'host_value', 'path': 'path_value', 'service': 'service_value'}]} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.update_unary(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.Operation) + + +def test_update_unary_rest_required_fields(request_type=compute.UpdateRegionUrlMapRequest): + transport_class = transports.RegionUrlMapsRestTransport + + request_init = {} + request_init["project"] = "" + request_init["region"] = "" + request_init["url_map"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).update._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + jsonified_request["region"] = 'region_value' + jsonified_request["urlMap"] = 'url_map_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).update._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "region" in jsonified_request + assert jsonified_request["region"] == 'region_value' + assert "urlMap" in jsonified_request + assert jsonified_request["urlMap"] == 'url_map_value' + + client = RegionUrlMapsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "put", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.update_unary(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_update_unary_rest_unset_required_fields(): + transport = transports.RegionUrlMapsRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.update._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("project", "region", "urlMap", "urlMapResource", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_update_unary_rest_interceptors(null_interceptor): + transport = transports.RegionUrlMapsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.RegionUrlMapsRestInterceptor(), + ) + client = RegionUrlMapsClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.RegionUrlMapsRestInterceptor, "post_update") as post, \ + mock.patch.object(transports.RegionUrlMapsRestInterceptor, "pre_update") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.UpdateRegionUrlMapRequest.pb(compute.UpdateRegionUrlMapRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.UpdateRegionUrlMapRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.update_unary(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_update_unary_rest_bad_request(transport: str = 'rest', request_type=compute.UpdateRegionUrlMapRequest): + client = RegionUrlMapsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2', 'url_map': 'sample3'} + request_init["url_map_resource"] = {'creation_timestamp': 'creation_timestamp_value', 'default_route_action': {'cors_policy': {'allow_credentials': True, 'allow_headers': ['allow_headers_value1', 'allow_headers_value2'], 'allow_methods': ['allow_methods_value1', 'allow_methods_value2'], 'allow_origin_regexes': ['allow_origin_regexes_value1', 'allow_origin_regexes_value2'], 'allow_origins': ['allow_origins_value1', 'allow_origins_value2'], 'disabled': True, 'expose_headers': ['expose_headers_value1', 'expose_headers_value2'], 'max_age': 722}, 'fault_injection_policy': {'abort': {'http_status': 1219, 'percentage': 0.10540000000000001}, 'delay': {'fixed_delay': {'nanos': 543, 'seconds': 751}, 'percentage': 0.10540000000000001}}, 'max_stream_duration': {}, 'request_mirror_policy': {'backend_service': 'backend_service_value'}, 'retry_policy': {'num_retries': 1197, 'per_try_timeout': {}, 'retry_conditions': ['retry_conditions_value1', 'retry_conditions_value2']}, 'timeout': {}, 'url_rewrite': {'host_rewrite': 'host_rewrite_value', 'path_prefix_rewrite': 'path_prefix_rewrite_value', 'path_template_rewrite': 'path_template_rewrite_value'}, 'weighted_backend_services': [{'backend_service': 'backend_service_value', 'header_action': {'request_headers_to_add': [{'header_name': 'header_name_value', 'header_value': 'header_value_value', 'replace': True}], 'request_headers_to_remove': ['request_headers_to_remove_value1', 'request_headers_to_remove_value2'], 'response_headers_to_add': {}, 'response_headers_to_remove': ['response_headers_to_remove_value1', 'response_headers_to_remove_value2']}, 'weight': 648}]}, 'default_service': 'default_service_value', 'default_url_redirect': {'host_redirect': 'host_redirect_value', 'https_redirect': True, 'path_redirect': 'path_redirect_value', 'prefix_redirect': 'prefix_redirect_value', 'redirect_response_code': 'redirect_response_code_value', 'strip_query': True}, 'description': 'description_value', 'fingerprint': 'fingerprint_value', 'header_action': {}, 'host_rules': [{'description': 'description_value', 'hosts': ['hosts_value1', 'hosts_value2'], 'path_matcher': 'path_matcher_value'}], 'id': 205, 'kind': 'kind_value', 'name': 'name_value', 'path_matchers': [{'default_route_action': {}, 'default_service': 'default_service_value', 'default_url_redirect': {}, 'description': 'description_value', 'header_action': {}, 'name': 'name_value', 'path_rules': [{'paths': ['paths_value1', 'paths_value2'], 'route_action': {}, 'service': 'service_value', 'url_redirect': {}}], 'route_rules': [{'description': 'description_value', 'header_action': {}, 'match_rules': [{'full_path_match': 'full_path_match_value', 'header_matches': [{'exact_match': 'exact_match_value', 'header_name': 'header_name_value', 'invert_match': True, 'prefix_match': 'prefix_match_value', 'present_match': True, 'range_match': {'range_end': 931, 'range_start': 1178}, 'regex_match': 'regex_match_value', 'suffix_match': 'suffix_match_value'}], 'ignore_case': True, 'metadata_filters': [{'filter_labels': [{'name': 'name_value', 'value': 'value_value'}], 'filter_match_criteria': 'filter_match_criteria_value'}], 'path_template_match': 'path_template_match_value', 'prefix_match': 'prefix_match_value', 'query_parameter_matches': [{'exact_match': 'exact_match_value', 'name': 'name_value', 'present_match': True, 'regex_match': 'regex_match_value'}], 'regex_match': 'regex_match_value'}], 'priority': 898, 'route_action': {}, 'service': 'service_value', 'url_redirect': {}}]}], 'region': 'region_value', 'self_link': 'self_link_value', 'tests': [{'description': 'description_value', 'expected_output_url': 'expected_output_url_value', 'expected_redirect_response_code': 3275, 'headers': [{'name': 'name_value', 'value': 'value_value'}], 'host': 'host_value', 'path': 'path_value', 'service': 'service_value'}]} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.update_unary(request) + + +def test_update_unary_rest_flattened(): + client = RegionUrlMapsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'region': 'sample2', 'url_map': 'sample3'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + region='region_value', + url_map='url_map_value', + url_map_resource=compute.UrlMap(creation_timestamp='creation_timestamp_value'), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.update_unary(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/regions/{region}/urlMaps/{url_map}" % client.transport._host, args[1]) + + +def test_update_unary_rest_flattened_error(transport: str = 'rest'): + client = RegionUrlMapsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_unary( + compute.UpdateRegionUrlMapRequest(), + project='project_value', + region='region_value', + url_map='url_map_value', + url_map_resource=compute.UrlMap(creation_timestamp='creation_timestamp_value'), + ) + + +def test_update_unary_rest_error(): + client = RegionUrlMapsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.ValidateRegionUrlMapRequest, + dict, +]) +def test_validate_rest(request_type): + client = RegionUrlMapsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2', 'url_map': 'sample3'} + request_init["region_url_maps_validate_request_resource"] = {'resource': {'creation_timestamp': 'creation_timestamp_value', 'default_route_action': {'cors_policy': {'allow_credentials': True, 'allow_headers': ['allow_headers_value1', 'allow_headers_value2'], 'allow_methods': ['allow_methods_value1', 'allow_methods_value2'], 'allow_origin_regexes': ['allow_origin_regexes_value1', 'allow_origin_regexes_value2'], 'allow_origins': ['allow_origins_value1', 'allow_origins_value2'], 'disabled': True, 'expose_headers': ['expose_headers_value1', 'expose_headers_value2'], 'max_age': 722}, 'fault_injection_policy': {'abort': {'http_status': 1219, 'percentage': 0.10540000000000001}, 'delay': {'fixed_delay': {'nanos': 543, 'seconds': 751}, 'percentage': 0.10540000000000001}}, 'max_stream_duration': {}, 'request_mirror_policy': {'backend_service': 'backend_service_value'}, 'retry_policy': {'num_retries': 1197, 'per_try_timeout': {}, 'retry_conditions': ['retry_conditions_value1', 'retry_conditions_value2']}, 'timeout': {}, 'url_rewrite': {'host_rewrite': 'host_rewrite_value', 'path_prefix_rewrite': 'path_prefix_rewrite_value', 'path_template_rewrite': 'path_template_rewrite_value'}, 'weighted_backend_services': [{'backend_service': 'backend_service_value', 'header_action': {'request_headers_to_add': [{'header_name': 'header_name_value', 'header_value': 'header_value_value', 'replace': True}], 'request_headers_to_remove': ['request_headers_to_remove_value1', 'request_headers_to_remove_value2'], 'response_headers_to_add': {}, 'response_headers_to_remove': ['response_headers_to_remove_value1', 'response_headers_to_remove_value2']}, 'weight': 648}]}, 'default_service': 'default_service_value', 'default_url_redirect': {'host_redirect': 'host_redirect_value', 'https_redirect': True, 'path_redirect': 'path_redirect_value', 'prefix_redirect': 'prefix_redirect_value', 'redirect_response_code': 'redirect_response_code_value', 'strip_query': True}, 'description': 'description_value', 'fingerprint': 'fingerprint_value', 'header_action': {}, 'host_rules': [{'description': 'description_value', 'hosts': ['hosts_value1', 'hosts_value2'], 'path_matcher': 'path_matcher_value'}], 'id': 205, 'kind': 'kind_value', 'name': 'name_value', 'path_matchers': [{'default_route_action': {}, 'default_service': 'default_service_value', 'default_url_redirect': {}, 'description': 'description_value', 'header_action': {}, 'name': 'name_value', 'path_rules': [{'paths': ['paths_value1', 'paths_value2'], 'route_action': {}, 'service': 'service_value', 'url_redirect': {}}], 'route_rules': [{'description': 'description_value', 'header_action': {}, 'match_rules': [{'full_path_match': 'full_path_match_value', 'header_matches': [{'exact_match': 'exact_match_value', 'header_name': 'header_name_value', 'invert_match': True, 'prefix_match': 'prefix_match_value', 'present_match': True, 'range_match': {'range_end': 931, 'range_start': 1178}, 'regex_match': 'regex_match_value', 'suffix_match': 'suffix_match_value'}], 'ignore_case': True, 'metadata_filters': [{'filter_labels': [{'name': 'name_value', 'value': 'value_value'}], 'filter_match_criteria': 'filter_match_criteria_value'}], 'path_template_match': 'path_template_match_value', 'prefix_match': 'prefix_match_value', 'query_parameter_matches': [{'exact_match': 'exact_match_value', 'name': 'name_value', 'present_match': True, 'regex_match': 'regex_match_value'}], 'regex_match': 'regex_match_value'}], 'priority': 898, 'route_action': {}, 'service': 'service_value', 'url_redirect': {}}]}], 'region': 'region_value', 'self_link': 'self_link_value', 'tests': [{'description': 'description_value', 'expected_output_url': 'expected_output_url_value', 'expected_redirect_response_code': 3275, 'headers': [{'name': 'name_value', 'value': 'value_value'}], 'host': 'host_value', 'path': 'path_value', 'service': 'service_value'}]}} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.UrlMapsValidateResponse( + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.UrlMapsValidateResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.validate(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.UrlMapsValidateResponse) + + +def test_validate_rest_required_fields(request_type=compute.ValidateRegionUrlMapRequest): + transport_class = transports.RegionUrlMapsRestTransport + + request_init = {} + request_init["project"] = "" + request_init["region"] = "" + request_init["url_map"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).validate._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + jsonified_request["region"] = 'region_value' + jsonified_request["urlMap"] = 'url_map_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).validate._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "region" in jsonified_request + assert jsonified_request["region"] == 'region_value' + assert "urlMap" in jsonified_request + assert jsonified_request["urlMap"] == 'url_map_value' + + client = RegionUrlMapsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.UrlMapsValidateResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.UrlMapsValidateResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.validate(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_validate_rest_unset_required_fields(): + transport = transports.RegionUrlMapsRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.validate._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("project", "region", "regionUrlMapsValidateRequestResource", "urlMap", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_validate_rest_interceptors(null_interceptor): + transport = transports.RegionUrlMapsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.RegionUrlMapsRestInterceptor(), + ) + client = RegionUrlMapsClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.RegionUrlMapsRestInterceptor, "post_validate") as post, \ + mock.patch.object(transports.RegionUrlMapsRestInterceptor, "pre_validate") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.ValidateRegionUrlMapRequest.pb(compute.ValidateRegionUrlMapRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.UrlMapsValidateResponse.to_json(compute.UrlMapsValidateResponse()) + + request = compute.ValidateRegionUrlMapRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.UrlMapsValidateResponse() + + client.validate(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_validate_rest_bad_request(transport: str = 'rest', request_type=compute.ValidateRegionUrlMapRequest): + client = RegionUrlMapsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2', 'url_map': 'sample3'} + request_init["region_url_maps_validate_request_resource"] = {'resource': {'creation_timestamp': 'creation_timestamp_value', 'default_route_action': {'cors_policy': {'allow_credentials': True, 'allow_headers': ['allow_headers_value1', 'allow_headers_value2'], 'allow_methods': ['allow_methods_value1', 'allow_methods_value2'], 'allow_origin_regexes': ['allow_origin_regexes_value1', 'allow_origin_regexes_value2'], 'allow_origins': ['allow_origins_value1', 'allow_origins_value2'], 'disabled': True, 'expose_headers': ['expose_headers_value1', 'expose_headers_value2'], 'max_age': 722}, 'fault_injection_policy': {'abort': {'http_status': 1219, 'percentage': 0.10540000000000001}, 'delay': {'fixed_delay': {'nanos': 543, 'seconds': 751}, 'percentage': 0.10540000000000001}}, 'max_stream_duration': {}, 'request_mirror_policy': {'backend_service': 'backend_service_value'}, 'retry_policy': {'num_retries': 1197, 'per_try_timeout': {}, 'retry_conditions': ['retry_conditions_value1', 'retry_conditions_value2']}, 'timeout': {}, 'url_rewrite': {'host_rewrite': 'host_rewrite_value', 'path_prefix_rewrite': 'path_prefix_rewrite_value', 'path_template_rewrite': 'path_template_rewrite_value'}, 'weighted_backend_services': [{'backend_service': 'backend_service_value', 'header_action': {'request_headers_to_add': [{'header_name': 'header_name_value', 'header_value': 'header_value_value', 'replace': True}], 'request_headers_to_remove': ['request_headers_to_remove_value1', 'request_headers_to_remove_value2'], 'response_headers_to_add': {}, 'response_headers_to_remove': ['response_headers_to_remove_value1', 'response_headers_to_remove_value2']}, 'weight': 648}]}, 'default_service': 'default_service_value', 'default_url_redirect': {'host_redirect': 'host_redirect_value', 'https_redirect': True, 'path_redirect': 'path_redirect_value', 'prefix_redirect': 'prefix_redirect_value', 'redirect_response_code': 'redirect_response_code_value', 'strip_query': True}, 'description': 'description_value', 'fingerprint': 'fingerprint_value', 'header_action': {}, 'host_rules': [{'description': 'description_value', 'hosts': ['hosts_value1', 'hosts_value2'], 'path_matcher': 'path_matcher_value'}], 'id': 205, 'kind': 'kind_value', 'name': 'name_value', 'path_matchers': [{'default_route_action': {}, 'default_service': 'default_service_value', 'default_url_redirect': {}, 'description': 'description_value', 'header_action': {}, 'name': 'name_value', 'path_rules': [{'paths': ['paths_value1', 'paths_value2'], 'route_action': {}, 'service': 'service_value', 'url_redirect': {}}], 'route_rules': [{'description': 'description_value', 'header_action': {}, 'match_rules': [{'full_path_match': 'full_path_match_value', 'header_matches': [{'exact_match': 'exact_match_value', 'header_name': 'header_name_value', 'invert_match': True, 'prefix_match': 'prefix_match_value', 'present_match': True, 'range_match': {'range_end': 931, 'range_start': 1178}, 'regex_match': 'regex_match_value', 'suffix_match': 'suffix_match_value'}], 'ignore_case': True, 'metadata_filters': [{'filter_labels': [{'name': 'name_value', 'value': 'value_value'}], 'filter_match_criteria': 'filter_match_criteria_value'}], 'path_template_match': 'path_template_match_value', 'prefix_match': 'prefix_match_value', 'query_parameter_matches': [{'exact_match': 'exact_match_value', 'name': 'name_value', 'present_match': True, 'regex_match': 'regex_match_value'}], 'regex_match': 'regex_match_value'}], 'priority': 898, 'route_action': {}, 'service': 'service_value', 'url_redirect': {}}]}], 'region': 'region_value', 'self_link': 'self_link_value', 'tests': [{'description': 'description_value', 'expected_output_url': 'expected_output_url_value', 'expected_redirect_response_code': 3275, 'headers': [{'name': 'name_value', 'value': 'value_value'}], 'host': 'host_value', 'path': 'path_value', 'service': 'service_value'}]}} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.validate(request) + + +def test_validate_rest_flattened(): + client = RegionUrlMapsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.UrlMapsValidateResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'region': 'sample2', 'url_map': 'sample3'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + region='region_value', + url_map='url_map_value', + region_url_maps_validate_request_resource=compute.RegionUrlMapsValidateRequest(resource=compute.UrlMap(creation_timestamp='creation_timestamp_value')), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.UrlMapsValidateResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.validate(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/regions/{region}/urlMaps/{url_map}/validate" % client.transport._host, args[1]) + + +def test_validate_rest_flattened_error(transport: str = 'rest'): + client = RegionUrlMapsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.validate( + compute.ValidateRegionUrlMapRequest(), + project='project_value', + region='region_value', + url_map='url_map_value', + region_url_maps_validate_request_resource=compute.RegionUrlMapsValidateRequest(resource=compute.UrlMap(creation_timestamp='creation_timestamp_value')), + ) + + +def test_validate_rest_error(): + client = RegionUrlMapsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +def test_credentials_transport_error(): + # It is an error to provide credentials and a transport instance. + transport = transports.RegionUrlMapsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = RegionUrlMapsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # It is an error to provide a credentials file and a transport instance. + transport = transports.RegionUrlMapsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = RegionUrlMapsClient( + client_options={"credentials_file": "credentials.json"}, + transport=transport, + ) + + # It is an error to provide an api_key and a transport instance. + transport = transports.RegionUrlMapsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = RegionUrlMapsClient( + client_options=options, + transport=transport, + ) + + # It is an error to provide an api_key and a credential. + options = mock.Mock() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = RegionUrlMapsClient( + client_options=options, + credentials=ga_credentials.AnonymousCredentials() + ) + + # It is an error to provide scopes and a transport instance. + transport = transports.RegionUrlMapsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = RegionUrlMapsClient( + client_options={"scopes": ["1", "2"]}, + transport=transport, + ) + + +def test_transport_instance(): + # A client may be instantiated with a custom transport instance. + transport = transports.RegionUrlMapsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + client = RegionUrlMapsClient(transport=transport) + assert client.transport is transport + + +@pytest.mark.parametrize("transport_class", [ + transports.RegionUrlMapsRestTransport, +]) +def test_transport_adc(transport_class): + # Test default credentials are used if not provided. + with mock.patch.object(google.auth, 'default') as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class() + adc.assert_called_once() + +@pytest.mark.parametrize("transport_name", [ + "rest", +]) +def test_transport_kind(transport_name): + transport = RegionUrlMapsClient.get_transport_class(transport_name)( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert transport.kind == transport_name + + +def test_region_url_maps_base_transport_error(): + # Passing both a credentials object and credentials_file should raise an error + with pytest.raises(core_exceptions.DuplicateCredentialArgs): + transport = transports.RegionUrlMapsTransport( + credentials=ga_credentials.AnonymousCredentials(), + credentials_file="credentials.json" + ) + + +def test_region_url_maps_base_transport(): + # Instantiate the base transport. + with mock.patch('google.cloud.compute_v1.services.region_url_maps.transports.RegionUrlMapsTransport.__init__') as Transport: + Transport.return_value = None + transport = transports.RegionUrlMapsTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Every method on the transport should just blindly + # raise NotImplementedError. + methods = ( + 'delete', + 'get', + 'insert', + 'list', + 'patch', + 'update', + 'validate', + ) + for method in methods: + with pytest.raises(NotImplementedError): + getattr(transport, method)(request=object()) + + with pytest.raises(NotImplementedError): + transport.close() + + # Catch all for all remaining methods and properties + remainder = [ + 'kind', + ] + for r in remainder: + with pytest.raises(NotImplementedError): + getattr(transport, r)() + + +def test_region_url_maps_base_transport_with_credentials_file(): + # Instantiate the base transport with a credentials file + with mock.patch.object(google.auth, 'load_credentials_from_file', autospec=True) as load_creds, mock.patch('google.cloud.compute_v1.services.region_url_maps.transports.RegionUrlMapsTransport._prep_wrapped_messages') as Transport: + Transport.return_value = None + load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.RegionUrlMapsTransport( + credentials_file="credentials.json", + quota_project_id="octopus", + ) + load_creds.assert_called_once_with("credentials.json", + scopes=None, + default_scopes=( + 'https://www.googleapis.com/auth/compute', + 'https://www.googleapis.com/auth/cloud-platform', +), + quota_project_id="octopus", + ) + + +def test_region_url_maps_base_transport_with_adc(): + # Test the default credentials are used if credentials and credentials_file are None. + with mock.patch.object(google.auth, 'default', autospec=True) as adc, mock.patch('google.cloud.compute_v1.services.region_url_maps.transports.RegionUrlMapsTransport._prep_wrapped_messages') as Transport: + Transport.return_value = None + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.RegionUrlMapsTransport() + adc.assert_called_once() + + +def test_region_url_maps_auth_adc(): + # If no credentials are provided, we should use ADC credentials. + with mock.patch.object(google.auth, 'default', autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + RegionUrlMapsClient() + adc.assert_called_once_with( + scopes=None, + default_scopes=( + 'https://www.googleapis.com/auth/compute', + 'https://www.googleapis.com/auth/cloud-platform', +), + quota_project_id=None, + ) + + +def test_region_url_maps_http_transport_client_cert_source_for_mtls(): + cred = ga_credentials.AnonymousCredentials() + with mock.patch("google.auth.transport.requests.AuthorizedSession.configure_mtls_channel") as mock_configure_mtls_channel: + transports.RegionUrlMapsRestTransport ( + credentials=cred, + client_cert_source_for_mtls=client_cert_source_callback + ) + mock_configure_mtls_channel.assert_called_once_with(client_cert_source_callback) + + +@pytest.mark.parametrize("transport_name", [ + "rest", +]) +def test_region_url_maps_host_no_port(transport_name): + client = RegionUrlMapsClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions(api_endpoint='compute.googleapis.com'), + transport=transport_name, + ) + assert client.transport._host == ( + 'compute.googleapis.com:443' + if transport_name in ['grpc', 'grpc_asyncio'] + else 'https://compute.googleapis.com' + ) + +@pytest.mark.parametrize("transport_name", [ + "rest", +]) +def test_region_url_maps_host_with_port(transport_name): + client = RegionUrlMapsClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions(api_endpoint='compute.googleapis.com:8000'), + transport=transport_name, + ) + assert client.transport._host == ( + 'compute.googleapis.com:8000' + if transport_name in ['grpc', 'grpc_asyncio'] + else 'https://compute.googleapis.com:8000' + ) + +@pytest.mark.parametrize("transport_name", [ + "rest", +]) +def test_region_url_maps_client_transport_session_collision(transport_name): + creds1 = ga_credentials.AnonymousCredentials() + creds2 = ga_credentials.AnonymousCredentials() + client1 = RegionUrlMapsClient( + credentials=creds1, + transport=transport_name, + ) + client2 = RegionUrlMapsClient( + credentials=creds2, + transport=transport_name, + ) + session1 = client1.transport.delete._session + session2 = client2.transport.delete._session + assert session1 != session2 + session1 = client1.transport.get._session + session2 = client2.transport.get._session + assert session1 != session2 + session1 = client1.transport.insert._session + session2 = client2.transport.insert._session + assert session1 != session2 + session1 = client1.transport.list._session + session2 = client2.transport.list._session + assert session1 != session2 + session1 = client1.transport.patch._session + session2 = client2.transport.patch._session + assert session1 != session2 + session1 = client1.transport.update._session + session2 = client2.transport.update._session + assert session1 != session2 + session1 = client1.transport.validate._session + session2 = client2.transport.validate._session + assert session1 != session2 + +def test_common_billing_account_path(): + billing_account = "squid" + expected = "billingAccounts/{billing_account}".format(billing_account=billing_account, ) + actual = RegionUrlMapsClient.common_billing_account_path(billing_account) + assert expected == actual + + +def test_parse_common_billing_account_path(): + expected = { + "billing_account": "clam", + } + path = RegionUrlMapsClient.common_billing_account_path(**expected) + + # Check that the path construction is reversible. + actual = RegionUrlMapsClient.parse_common_billing_account_path(path) + assert expected == actual + +def test_common_folder_path(): + folder = "whelk" + expected = "folders/{folder}".format(folder=folder, ) + actual = RegionUrlMapsClient.common_folder_path(folder) + assert expected == actual + + +def test_parse_common_folder_path(): + expected = { + "folder": "octopus", + } + path = RegionUrlMapsClient.common_folder_path(**expected) + + # Check that the path construction is reversible. + actual = RegionUrlMapsClient.parse_common_folder_path(path) + assert expected == actual + +def test_common_organization_path(): + organization = "oyster" + expected = "organizations/{organization}".format(organization=organization, ) + actual = RegionUrlMapsClient.common_organization_path(organization) + assert expected == actual + + +def test_parse_common_organization_path(): + expected = { + "organization": "nudibranch", + } + path = RegionUrlMapsClient.common_organization_path(**expected) + + # Check that the path construction is reversible. + actual = RegionUrlMapsClient.parse_common_organization_path(path) + assert expected == actual + +def test_common_project_path(): + project = "cuttlefish" + expected = "projects/{project}".format(project=project, ) + actual = RegionUrlMapsClient.common_project_path(project) + assert expected == actual + + +def test_parse_common_project_path(): + expected = { + "project": "mussel", + } + path = RegionUrlMapsClient.common_project_path(**expected) + + # Check that the path construction is reversible. + actual = RegionUrlMapsClient.parse_common_project_path(path) + assert expected == actual + +def test_common_location_path(): + project = "winkle" + location = "nautilus" + expected = "projects/{project}/locations/{location}".format(project=project, location=location, ) + actual = RegionUrlMapsClient.common_location_path(project, location) + assert expected == actual + + +def test_parse_common_location_path(): + expected = { + "project": "scallop", + "location": "abalone", + } + path = RegionUrlMapsClient.common_location_path(**expected) + + # Check that the path construction is reversible. + actual = RegionUrlMapsClient.parse_common_location_path(path) + assert expected == actual + + +def test_client_with_default_client_info(): + client_info = gapic_v1.client_info.ClientInfo() + + with mock.patch.object(transports.RegionUrlMapsTransport, '_prep_wrapped_messages') as prep: + client = RegionUrlMapsClient( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + with mock.patch.object(transports.RegionUrlMapsTransport, '_prep_wrapped_messages') as prep: + transport_class = RegionUrlMapsClient.get_transport_class() + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + +def test_transport_close(): + transports = { + "rest": "_session", + } + + for transport, close_name in transports.items(): + client = RegionUrlMapsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport + ) + with mock.patch.object(type(getattr(client.transport, close_name)), "close") as close: + with client: + close.assert_not_called() + close.assert_called_once() + +def test_client_ctx(): + transports = [ + 'rest', + ] + for transport in transports: + client = RegionUrlMapsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport + ) + # Test client calls underlying transport. + with mock.patch.object(type(client.transport), "close") as close: + close.assert_not_called() + with client: + pass + close.assert_called() + +@pytest.mark.parametrize("client_class,transport_class", [ + (RegionUrlMapsClient, transports.RegionUrlMapsRestTransport), +]) +def test_api_key_credentials(client_class, transport_class): + with mock.patch.object( + google.auth._default, "get_api_key_credentials", create=True + ) as get_api_key_credentials: + mock_cred = mock.Mock() + get_api_key_credentials.return_value = mock_cred + options = client_options.ClientOptions() + options.api_key = "api_key" + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=mock_cred, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) diff --git a/owl-bot-staging/v1/tests/unit/gapic/compute_v1/test_regions.py b/owl-bot-staging/v1/tests/unit/gapic/compute_v1/test_regions.py new file mode 100644 index 000000000..7675449da --- /dev/null +++ b/owl-bot-staging/v1/tests/unit/gapic/compute_v1/test_regions.py @@ -0,0 +1,1385 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import os +# try/except added for compatibility with python < 3.8 +try: + from unittest import mock + from unittest.mock import AsyncMock # pragma: NO COVER +except ImportError: # pragma: NO COVER + import mock + +import grpc +from grpc.experimental import aio +from collections.abc import Iterable +from google.protobuf import json_format +import json +import math +import pytest +from proto.marshal.rules.dates import DurationRule, TimestampRule +from proto.marshal.rules import wrappers +from requests import Response +from requests import Request, PreparedRequest +from requests.sessions import Session +from google.protobuf import json_format + +from google.api_core import client_options +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import grpc_helpers +from google.api_core import grpc_helpers_async +from google.api_core import path_template +from google.auth import credentials as ga_credentials +from google.auth.exceptions import MutualTLSChannelError +from google.cloud.compute_v1.services.regions import RegionsClient +from google.cloud.compute_v1.services.regions import pagers +from google.cloud.compute_v1.services.regions import transports +from google.cloud.compute_v1.types import compute +from google.oauth2 import service_account +import google.auth + + +def client_cert_source_callback(): + return b"cert bytes", b"key bytes" + + +# If default endpoint is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint(client): + return "foo.googleapis.com" if ("localhost" in client.DEFAULT_ENDPOINT) else client.DEFAULT_ENDPOINT + + +def test__get_default_mtls_endpoint(): + api_endpoint = "example.googleapis.com" + api_mtls_endpoint = "example.mtls.googleapis.com" + sandbox_endpoint = "example.sandbox.googleapis.com" + sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com" + non_googleapi = "api.example.com" + + assert RegionsClient._get_default_mtls_endpoint(None) is None + assert RegionsClient._get_default_mtls_endpoint(api_endpoint) == api_mtls_endpoint + assert RegionsClient._get_default_mtls_endpoint(api_mtls_endpoint) == api_mtls_endpoint + assert RegionsClient._get_default_mtls_endpoint(sandbox_endpoint) == sandbox_mtls_endpoint + assert RegionsClient._get_default_mtls_endpoint(sandbox_mtls_endpoint) == sandbox_mtls_endpoint + assert RegionsClient._get_default_mtls_endpoint(non_googleapi) == non_googleapi + + +@pytest.mark.parametrize("client_class,transport_name", [ + (RegionsClient, "rest"), +]) +def test_regions_client_from_service_account_info(client_class, transport_name): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object(service_account.Credentials, 'from_service_account_info') as factory: + factory.return_value = creds + info = {"valid": True} + client = client_class.from_service_account_info(info, transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + 'compute.googleapis.com:443' + if transport_name in ['grpc', 'grpc_asyncio'] + else + 'https://compute.googleapis.com' + ) + + +@pytest.mark.parametrize("transport_class,transport_name", [ + (transports.RegionsRestTransport, "rest"), +]) +def test_regions_client_service_account_always_use_jwt(transport_class, transport_name): + with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=True) + use_jwt.assert_called_once_with(True) + + with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=False) + use_jwt.assert_not_called() + + +@pytest.mark.parametrize("client_class,transport_name", [ + (RegionsClient, "rest"), +]) +def test_regions_client_from_service_account_file(client_class, transport_name): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object(service_account.Credentials, 'from_service_account_file') as factory: + factory.return_value = creds + client = client_class.from_service_account_file("dummy/file/path.json", transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + client = client_class.from_service_account_json("dummy/file/path.json", transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + 'compute.googleapis.com:443' + if transport_name in ['grpc', 'grpc_asyncio'] + else + 'https://compute.googleapis.com' + ) + + +def test_regions_client_get_transport_class(): + transport = RegionsClient.get_transport_class() + available_transports = [ + transports.RegionsRestTransport, + ] + assert transport in available_transports + + transport = RegionsClient.get_transport_class("rest") + assert transport == transports.RegionsRestTransport + + +@pytest.mark.parametrize("client_class,transport_class,transport_name", [ + (RegionsClient, transports.RegionsRestTransport, "rest"), +]) +@mock.patch.object(RegionsClient, "DEFAULT_ENDPOINT", modify_default_endpoint(RegionsClient)) +def test_regions_client_client_options(client_class, transport_class, transport_name): + # Check that if channel is provided we won't create a new one. + with mock.patch.object(RegionsClient, 'get_transport_class') as gtc: + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ) + client = client_class(transport=transport) + gtc.assert_not_called() + + # Check that if channel is provided via str we will create a new one. + with mock.patch.object(RegionsClient, 'get_transport_class') as gtc: + client = client_class(transport=transport_name) + gtc.assert_called() + + # Check the case api_endpoint is provided. + options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(transport=transport_name, client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_MTLS_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError): + client = client_class(transport=transport_name) + + # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): + with pytest.raises(ValueError): + client = client_class(transport=transport_name) + + # Check the case quota_project_id is provided + options = client_options.ClientOptions(quota_project_id="octopus") + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id="octopus", + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + # Check the case api_endpoint is provided + options = client_options.ClientOptions(api_audience="https://language.googleapis.com") + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience="https://language.googleapis.com" + ) + +@pytest.mark.parametrize("client_class,transport_class,transport_name,use_client_cert_env", [ + (RegionsClient, transports.RegionsRestTransport, "rest", "true"), + (RegionsClient, transports.RegionsRestTransport, "rest", "false"), +]) +@mock.patch.object(RegionsClient, "DEFAULT_ENDPOINT", modify_default_endpoint(RegionsClient)) +@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) +def test_regions_client_mtls_env_auto(client_class, transport_class, transport_name, use_client_cert_env): + # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default + # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists. + + # Check the case client_cert_source is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): + options = client_options.ClientOptions(client_cert_source=client_cert_source_callback) + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + + if use_client_cert_env == "false": + expected_client_cert_source = None + expected_host = client.DEFAULT_ENDPOINT + else: + expected_client_cert_source = client_cert_source_callback + expected_host = client.DEFAULT_MTLS_ENDPOINT + + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case ADC client cert is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): + with mock.patch.object(transport_class, '__init__') as patched: + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): + with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=client_cert_source_callback): + if use_client_cert_env == "false": + expected_host = client.DEFAULT_ENDPOINT + expected_client_cert_source = None + else: + expected_host = client.DEFAULT_MTLS_ENDPOINT + expected_client_cert_source = client_cert_source_callback + + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case client_cert_source and ADC client cert are not provided. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): + with mock.patch.object(transport_class, '__init__') as patched: + with mock.patch("google.auth.transport.mtls.has_default_client_cert_source", return_value=False): + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize("client_class", [ + RegionsClient +]) +@mock.patch.object(RegionsClient, "DEFAULT_ENDPOINT", modify_default_endpoint(RegionsClient)) +def test_regions_client_get_mtls_endpoint_and_cert_source(client_class): + mock_client_cert_source = mock.Mock() + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "true". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + mock_api_endpoint = "foo" + options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(options) + assert api_endpoint == mock_api_endpoint + assert cert_source == mock_client_cert_source + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "false". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + mock_client_cert_source = mock.Mock() + mock_api_endpoint = "foo" + options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(options) + assert api_endpoint == mock_api_endpoint + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert doesn't exist. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=False): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert exists. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): + with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=mock_client_cert_source): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source == mock_client_cert_source + + +@pytest.mark.parametrize("client_class,transport_class,transport_name", [ + (RegionsClient, transports.RegionsRestTransport, "rest"), +]) +def test_regions_client_client_options_scopes(client_class, transport_class, transport_name): + # Check the case scopes are provided. + options = client_options.ClientOptions( + scopes=["1", "2"], + ) + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=["1", "2"], + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + +@pytest.mark.parametrize("client_class,transport_class,transport_name,grpc_helpers", [ + (RegionsClient, transports.RegionsRestTransport, "rest", None), +]) +def test_regions_client_client_options_credentials_file(client_class, transport_class, transport_name, grpc_helpers): + # Check the case credentials file is provided. + options = client_options.ClientOptions( + credentials_file="credentials.json" + ) + + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize("request_type", [ + compute.GetRegionRequest, + dict, +]) +def test_get_rest(request_type): + client = RegionsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Region( + creation_timestamp='creation_timestamp_value', + description='description_value', + id=205, + kind='kind_value', + name='name_value', + self_link='self_link_value', + status='status_value', + supports_pzs=True, + zones=['zones_value'], + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Region.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.get(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.Region) + assert response.creation_timestamp == 'creation_timestamp_value' + assert response.description == 'description_value' + assert response.id == 205 + assert response.kind == 'kind_value' + assert response.name == 'name_value' + assert response.self_link == 'self_link_value' + assert response.status == 'status_value' + assert response.supports_pzs is True + assert response.zones == ['zones_value'] + + +def test_get_rest_required_fields(request_type=compute.GetRegionRequest): + transport_class = transports.RegionsRestTransport + + request_init = {} + request_init["project"] = "" + request_init["region"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + jsonified_request["region"] = 'region_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "region" in jsonified_request + assert jsonified_request["region"] == 'region_value' + + client = RegionsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Region() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "get", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Region.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.get(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_get_rest_unset_required_fields(): + transport = transports.RegionsRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.get._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("project", "region", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_rest_interceptors(null_interceptor): + transport = transports.RegionsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.RegionsRestInterceptor(), + ) + client = RegionsClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.RegionsRestInterceptor, "post_get") as post, \ + mock.patch.object(transports.RegionsRestInterceptor, "pre_get") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.GetRegionRequest.pb(compute.GetRegionRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Region.to_json(compute.Region()) + + request = compute.GetRegionRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Region() + + client.get(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_get_rest_bad_request(transport: str = 'rest', request_type=compute.GetRegionRequest): + client = RegionsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get(request) + + +def test_get_rest_flattened(): + client = RegionsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Region() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'region': 'sample2'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + region='region_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Region.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.get(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/regions/{region}" % client.transport._host, args[1]) + + +def test_get_rest_flattened_error(transport: str = 'rest'): + client = RegionsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get( + compute.GetRegionRequest(), + project='project_value', + region='region_value', + ) + + +def test_get_rest_error(): + client = RegionsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.ListRegionsRequest, + dict, +]) +def test_list_rest(request_type): + client = RegionsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.RegionList( + id='id_value', + kind='kind_value', + next_page_token='next_page_token_value', + self_link='self_link_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.RegionList.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.list(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListPager) + assert response.id == 'id_value' + assert response.kind == 'kind_value' + assert response.next_page_token == 'next_page_token_value' + assert response.self_link == 'self_link_value' + + +def test_list_rest_required_fields(request_type=compute.ListRegionsRequest): + transport_class = transports.RegionsRestTransport + + request_init = {} + request_init["project"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("filter", "max_results", "order_by", "page_token", "return_partial_success", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + + client = RegionsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.RegionList() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "get", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.RegionList.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.list(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_list_rest_unset_required_fields(): + transport = transports.RegionsRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.list._get_unset_required_fields({}) + assert set(unset_fields) == (set(("filter", "maxResults", "orderBy", "pageToken", "returnPartialSuccess", )) & set(("project", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_rest_interceptors(null_interceptor): + transport = transports.RegionsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.RegionsRestInterceptor(), + ) + client = RegionsClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.RegionsRestInterceptor, "post_list") as post, \ + mock.patch.object(transports.RegionsRestInterceptor, "pre_list") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.ListRegionsRequest.pb(compute.ListRegionsRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.RegionList.to_json(compute.RegionList()) + + request = compute.ListRegionsRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.RegionList() + + client.list(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_list_rest_bad_request(transport: str = 'rest', request_type=compute.ListRegionsRequest): + client = RegionsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list(request) + + +def test_list_rest_flattened(): + client = RegionsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.RegionList() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.RegionList.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.list(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/regions" % client.transport._host, args[1]) + + +def test_list_rest_flattened_error(transport: str = 'rest'): + client = RegionsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list( + compute.ListRegionsRequest(), + project='project_value', + ) + + +def test_list_rest_pager(transport: str = 'rest'): + client = RegionsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + #with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + compute.RegionList( + items=[ + compute.Region(), + compute.Region(), + compute.Region(), + ], + next_page_token='abc', + ), + compute.RegionList( + items=[], + next_page_token='def', + ), + compute.RegionList( + items=[ + compute.Region(), + ], + next_page_token='ghi', + ), + compute.RegionList( + items=[ + compute.Region(), + compute.Region(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple(compute.RegionList.to_json(x) for x in response) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode('UTF-8') + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {'project': 'sample1'} + + pager = client.list(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, compute.Region) + for i in results) + + pages = list(client.list(request=sample_request).pages) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + + +def test_credentials_transport_error(): + # It is an error to provide credentials and a transport instance. + transport = transports.RegionsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = RegionsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # It is an error to provide a credentials file and a transport instance. + transport = transports.RegionsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = RegionsClient( + client_options={"credentials_file": "credentials.json"}, + transport=transport, + ) + + # It is an error to provide an api_key and a transport instance. + transport = transports.RegionsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = RegionsClient( + client_options=options, + transport=transport, + ) + + # It is an error to provide an api_key and a credential. + options = mock.Mock() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = RegionsClient( + client_options=options, + credentials=ga_credentials.AnonymousCredentials() + ) + + # It is an error to provide scopes and a transport instance. + transport = transports.RegionsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = RegionsClient( + client_options={"scopes": ["1", "2"]}, + transport=transport, + ) + + +def test_transport_instance(): + # A client may be instantiated with a custom transport instance. + transport = transports.RegionsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + client = RegionsClient(transport=transport) + assert client.transport is transport + + +@pytest.mark.parametrize("transport_class", [ + transports.RegionsRestTransport, +]) +def test_transport_adc(transport_class): + # Test default credentials are used if not provided. + with mock.patch.object(google.auth, 'default') as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class() + adc.assert_called_once() + +@pytest.mark.parametrize("transport_name", [ + "rest", +]) +def test_transport_kind(transport_name): + transport = RegionsClient.get_transport_class(transport_name)( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert transport.kind == transport_name + + +def test_regions_base_transport_error(): + # Passing both a credentials object and credentials_file should raise an error + with pytest.raises(core_exceptions.DuplicateCredentialArgs): + transport = transports.RegionsTransport( + credentials=ga_credentials.AnonymousCredentials(), + credentials_file="credentials.json" + ) + + +def test_regions_base_transport(): + # Instantiate the base transport. + with mock.patch('google.cloud.compute_v1.services.regions.transports.RegionsTransport.__init__') as Transport: + Transport.return_value = None + transport = transports.RegionsTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Every method on the transport should just blindly + # raise NotImplementedError. + methods = ( + 'get', + 'list', + ) + for method in methods: + with pytest.raises(NotImplementedError): + getattr(transport, method)(request=object()) + + with pytest.raises(NotImplementedError): + transport.close() + + # Catch all for all remaining methods and properties + remainder = [ + 'kind', + ] + for r in remainder: + with pytest.raises(NotImplementedError): + getattr(transport, r)() + + +def test_regions_base_transport_with_credentials_file(): + # Instantiate the base transport with a credentials file + with mock.patch.object(google.auth, 'load_credentials_from_file', autospec=True) as load_creds, mock.patch('google.cloud.compute_v1.services.regions.transports.RegionsTransport._prep_wrapped_messages') as Transport: + Transport.return_value = None + load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.RegionsTransport( + credentials_file="credentials.json", + quota_project_id="octopus", + ) + load_creds.assert_called_once_with("credentials.json", + scopes=None, + default_scopes=( + 'https://www.googleapis.com/auth/compute.readonly', + 'https://www.googleapis.com/auth/compute', + 'https://www.googleapis.com/auth/cloud-platform', +), + quota_project_id="octopus", + ) + + +def test_regions_base_transport_with_adc(): + # Test the default credentials are used if credentials and credentials_file are None. + with mock.patch.object(google.auth, 'default', autospec=True) as adc, mock.patch('google.cloud.compute_v1.services.regions.transports.RegionsTransport._prep_wrapped_messages') as Transport: + Transport.return_value = None + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.RegionsTransport() + adc.assert_called_once() + + +def test_regions_auth_adc(): + # If no credentials are provided, we should use ADC credentials. + with mock.patch.object(google.auth, 'default', autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + RegionsClient() + adc.assert_called_once_with( + scopes=None, + default_scopes=( + 'https://www.googleapis.com/auth/compute.readonly', + 'https://www.googleapis.com/auth/compute', + 'https://www.googleapis.com/auth/cloud-platform', +), + quota_project_id=None, + ) + + +def test_regions_http_transport_client_cert_source_for_mtls(): + cred = ga_credentials.AnonymousCredentials() + with mock.patch("google.auth.transport.requests.AuthorizedSession.configure_mtls_channel") as mock_configure_mtls_channel: + transports.RegionsRestTransport ( + credentials=cred, + client_cert_source_for_mtls=client_cert_source_callback + ) + mock_configure_mtls_channel.assert_called_once_with(client_cert_source_callback) + + +@pytest.mark.parametrize("transport_name", [ + "rest", +]) +def test_regions_host_no_port(transport_name): + client = RegionsClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions(api_endpoint='compute.googleapis.com'), + transport=transport_name, + ) + assert client.transport._host == ( + 'compute.googleapis.com:443' + if transport_name in ['grpc', 'grpc_asyncio'] + else 'https://compute.googleapis.com' + ) + +@pytest.mark.parametrize("transport_name", [ + "rest", +]) +def test_regions_host_with_port(transport_name): + client = RegionsClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions(api_endpoint='compute.googleapis.com:8000'), + transport=transport_name, + ) + assert client.transport._host == ( + 'compute.googleapis.com:8000' + if transport_name in ['grpc', 'grpc_asyncio'] + else 'https://compute.googleapis.com:8000' + ) + +@pytest.mark.parametrize("transport_name", [ + "rest", +]) +def test_regions_client_transport_session_collision(transport_name): + creds1 = ga_credentials.AnonymousCredentials() + creds2 = ga_credentials.AnonymousCredentials() + client1 = RegionsClient( + credentials=creds1, + transport=transport_name, + ) + client2 = RegionsClient( + credentials=creds2, + transport=transport_name, + ) + session1 = client1.transport.get._session + session2 = client2.transport.get._session + assert session1 != session2 + session1 = client1.transport.list._session + session2 = client2.transport.list._session + assert session1 != session2 + +def test_common_billing_account_path(): + billing_account = "squid" + expected = "billingAccounts/{billing_account}".format(billing_account=billing_account, ) + actual = RegionsClient.common_billing_account_path(billing_account) + assert expected == actual + + +def test_parse_common_billing_account_path(): + expected = { + "billing_account": "clam", + } + path = RegionsClient.common_billing_account_path(**expected) + + # Check that the path construction is reversible. + actual = RegionsClient.parse_common_billing_account_path(path) + assert expected == actual + +def test_common_folder_path(): + folder = "whelk" + expected = "folders/{folder}".format(folder=folder, ) + actual = RegionsClient.common_folder_path(folder) + assert expected == actual + + +def test_parse_common_folder_path(): + expected = { + "folder": "octopus", + } + path = RegionsClient.common_folder_path(**expected) + + # Check that the path construction is reversible. + actual = RegionsClient.parse_common_folder_path(path) + assert expected == actual + +def test_common_organization_path(): + organization = "oyster" + expected = "organizations/{organization}".format(organization=organization, ) + actual = RegionsClient.common_organization_path(organization) + assert expected == actual + + +def test_parse_common_organization_path(): + expected = { + "organization": "nudibranch", + } + path = RegionsClient.common_organization_path(**expected) + + # Check that the path construction is reversible. + actual = RegionsClient.parse_common_organization_path(path) + assert expected == actual + +def test_common_project_path(): + project = "cuttlefish" + expected = "projects/{project}".format(project=project, ) + actual = RegionsClient.common_project_path(project) + assert expected == actual + + +def test_parse_common_project_path(): + expected = { + "project": "mussel", + } + path = RegionsClient.common_project_path(**expected) + + # Check that the path construction is reversible. + actual = RegionsClient.parse_common_project_path(path) + assert expected == actual + +def test_common_location_path(): + project = "winkle" + location = "nautilus" + expected = "projects/{project}/locations/{location}".format(project=project, location=location, ) + actual = RegionsClient.common_location_path(project, location) + assert expected == actual + + +def test_parse_common_location_path(): + expected = { + "project": "scallop", + "location": "abalone", + } + path = RegionsClient.common_location_path(**expected) + + # Check that the path construction is reversible. + actual = RegionsClient.parse_common_location_path(path) + assert expected == actual + + +def test_client_with_default_client_info(): + client_info = gapic_v1.client_info.ClientInfo() + + with mock.patch.object(transports.RegionsTransport, '_prep_wrapped_messages') as prep: + client = RegionsClient( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + with mock.patch.object(transports.RegionsTransport, '_prep_wrapped_messages') as prep: + transport_class = RegionsClient.get_transport_class() + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + +def test_transport_close(): + transports = { + "rest": "_session", + } + + for transport, close_name in transports.items(): + client = RegionsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport + ) + with mock.patch.object(type(getattr(client.transport, close_name)), "close") as close: + with client: + close.assert_not_called() + close.assert_called_once() + +def test_client_ctx(): + transports = [ + 'rest', + ] + for transport in transports: + client = RegionsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport + ) + # Test client calls underlying transport. + with mock.patch.object(type(client.transport), "close") as close: + close.assert_not_called() + with client: + pass + close.assert_called() + +@pytest.mark.parametrize("client_class,transport_class", [ + (RegionsClient, transports.RegionsRestTransport), +]) +def test_api_key_credentials(client_class, transport_class): + with mock.patch.object( + google.auth._default, "get_api_key_credentials", create=True + ) as get_api_key_credentials: + mock_cred = mock.Mock() + get_api_key_credentials.return_value = mock_cred + options = client_options.ClientOptions() + options.api_key = "api_key" + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=mock_cred, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) diff --git a/owl-bot-staging/v1/tests/unit/gapic/compute_v1/test_reservations.py b/owl-bot-staging/v1/tests/unit/gapic/compute_v1/test_reservations.py new file mode 100644 index 000000000..0f7dfb27a --- /dev/null +++ b/owl-bot-staging/v1/tests/unit/gapic/compute_v1/test_reservations.py @@ -0,0 +1,4726 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import os +# try/except added for compatibility with python < 3.8 +try: + from unittest import mock + from unittest.mock import AsyncMock # pragma: NO COVER +except ImportError: # pragma: NO COVER + import mock + +import grpc +from grpc.experimental import aio +from collections.abc import Iterable +from google.protobuf import json_format +import json +import math +import pytest +from proto.marshal.rules.dates import DurationRule, TimestampRule +from proto.marshal.rules import wrappers +from requests import Response +from requests import Request, PreparedRequest +from requests.sessions import Session +from google.protobuf import json_format + +from google.api_core import client_options +from google.api_core import exceptions as core_exceptions +from google.api_core import extended_operation # type: ignore +from google.api_core import future +from google.api_core import gapic_v1 +from google.api_core import grpc_helpers +from google.api_core import grpc_helpers_async +from google.api_core import path_template +from google.auth import credentials as ga_credentials +from google.auth.exceptions import MutualTLSChannelError +from google.cloud.compute_v1.services.reservations import ReservationsClient +from google.cloud.compute_v1.services.reservations import pagers +from google.cloud.compute_v1.services.reservations import transports +from google.cloud.compute_v1.types import compute +from google.oauth2 import service_account +import google.auth + + +def client_cert_source_callback(): + return b"cert bytes", b"key bytes" + + +# If default endpoint is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint(client): + return "foo.googleapis.com" if ("localhost" in client.DEFAULT_ENDPOINT) else client.DEFAULT_ENDPOINT + + +def test__get_default_mtls_endpoint(): + api_endpoint = "example.googleapis.com" + api_mtls_endpoint = "example.mtls.googleapis.com" + sandbox_endpoint = "example.sandbox.googleapis.com" + sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com" + non_googleapi = "api.example.com" + + assert ReservationsClient._get_default_mtls_endpoint(None) is None + assert ReservationsClient._get_default_mtls_endpoint(api_endpoint) == api_mtls_endpoint + assert ReservationsClient._get_default_mtls_endpoint(api_mtls_endpoint) == api_mtls_endpoint + assert ReservationsClient._get_default_mtls_endpoint(sandbox_endpoint) == sandbox_mtls_endpoint + assert ReservationsClient._get_default_mtls_endpoint(sandbox_mtls_endpoint) == sandbox_mtls_endpoint + assert ReservationsClient._get_default_mtls_endpoint(non_googleapi) == non_googleapi + + +@pytest.mark.parametrize("client_class,transport_name", [ + (ReservationsClient, "rest"), +]) +def test_reservations_client_from_service_account_info(client_class, transport_name): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object(service_account.Credentials, 'from_service_account_info') as factory: + factory.return_value = creds + info = {"valid": True} + client = client_class.from_service_account_info(info, transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + 'compute.googleapis.com:443' + if transport_name in ['grpc', 'grpc_asyncio'] + else + 'https://compute.googleapis.com' + ) + + +@pytest.mark.parametrize("transport_class,transport_name", [ + (transports.ReservationsRestTransport, "rest"), +]) +def test_reservations_client_service_account_always_use_jwt(transport_class, transport_name): + with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=True) + use_jwt.assert_called_once_with(True) + + with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=False) + use_jwt.assert_not_called() + + +@pytest.mark.parametrize("client_class,transport_name", [ + (ReservationsClient, "rest"), +]) +def test_reservations_client_from_service_account_file(client_class, transport_name): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object(service_account.Credentials, 'from_service_account_file') as factory: + factory.return_value = creds + client = client_class.from_service_account_file("dummy/file/path.json", transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + client = client_class.from_service_account_json("dummy/file/path.json", transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + 'compute.googleapis.com:443' + if transport_name in ['grpc', 'grpc_asyncio'] + else + 'https://compute.googleapis.com' + ) + + +def test_reservations_client_get_transport_class(): + transport = ReservationsClient.get_transport_class() + available_transports = [ + transports.ReservationsRestTransport, + ] + assert transport in available_transports + + transport = ReservationsClient.get_transport_class("rest") + assert transport == transports.ReservationsRestTransport + + +@pytest.mark.parametrize("client_class,transport_class,transport_name", [ + (ReservationsClient, transports.ReservationsRestTransport, "rest"), +]) +@mock.patch.object(ReservationsClient, "DEFAULT_ENDPOINT", modify_default_endpoint(ReservationsClient)) +def test_reservations_client_client_options(client_class, transport_class, transport_name): + # Check that if channel is provided we won't create a new one. + with mock.patch.object(ReservationsClient, 'get_transport_class') as gtc: + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ) + client = client_class(transport=transport) + gtc.assert_not_called() + + # Check that if channel is provided via str we will create a new one. + with mock.patch.object(ReservationsClient, 'get_transport_class') as gtc: + client = client_class(transport=transport_name) + gtc.assert_called() + + # Check the case api_endpoint is provided. + options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(transport=transport_name, client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_MTLS_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError): + client = client_class(transport=transport_name) + + # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): + with pytest.raises(ValueError): + client = client_class(transport=transport_name) + + # Check the case quota_project_id is provided + options = client_options.ClientOptions(quota_project_id="octopus") + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id="octopus", + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + # Check the case api_endpoint is provided + options = client_options.ClientOptions(api_audience="https://language.googleapis.com") + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience="https://language.googleapis.com" + ) + +@pytest.mark.parametrize("client_class,transport_class,transport_name,use_client_cert_env", [ + (ReservationsClient, transports.ReservationsRestTransport, "rest", "true"), + (ReservationsClient, transports.ReservationsRestTransport, "rest", "false"), +]) +@mock.patch.object(ReservationsClient, "DEFAULT_ENDPOINT", modify_default_endpoint(ReservationsClient)) +@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) +def test_reservations_client_mtls_env_auto(client_class, transport_class, transport_name, use_client_cert_env): + # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default + # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists. + + # Check the case client_cert_source is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): + options = client_options.ClientOptions(client_cert_source=client_cert_source_callback) + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + + if use_client_cert_env == "false": + expected_client_cert_source = None + expected_host = client.DEFAULT_ENDPOINT + else: + expected_client_cert_source = client_cert_source_callback + expected_host = client.DEFAULT_MTLS_ENDPOINT + + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case ADC client cert is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): + with mock.patch.object(transport_class, '__init__') as patched: + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): + with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=client_cert_source_callback): + if use_client_cert_env == "false": + expected_host = client.DEFAULT_ENDPOINT + expected_client_cert_source = None + else: + expected_host = client.DEFAULT_MTLS_ENDPOINT + expected_client_cert_source = client_cert_source_callback + + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case client_cert_source and ADC client cert are not provided. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): + with mock.patch.object(transport_class, '__init__') as patched: + with mock.patch("google.auth.transport.mtls.has_default_client_cert_source", return_value=False): + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize("client_class", [ + ReservationsClient +]) +@mock.patch.object(ReservationsClient, "DEFAULT_ENDPOINT", modify_default_endpoint(ReservationsClient)) +def test_reservations_client_get_mtls_endpoint_and_cert_source(client_class): + mock_client_cert_source = mock.Mock() + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "true". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + mock_api_endpoint = "foo" + options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(options) + assert api_endpoint == mock_api_endpoint + assert cert_source == mock_client_cert_source + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "false". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + mock_client_cert_source = mock.Mock() + mock_api_endpoint = "foo" + options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(options) + assert api_endpoint == mock_api_endpoint + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert doesn't exist. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=False): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert exists. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): + with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=mock_client_cert_source): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source == mock_client_cert_source + + +@pytest.mark.parametrize("client_class,transport_class,transport_name", [ + (ReservationsClient, transports.ReservationsRestTransport, "rest"), +]) +def test_reservations_client_client_options_scopes(client_class, transport_class, transport_name): + # Check the case scopes are provided. + options = client_options.ClientOptions( + scopes=["1", "2"], + ) + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=["1", "2"], + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + +@pytest.mark.parametrize("client_class,transport_class,transport_name,grpc_helpers", [ + (ReservationsClient, transports.ReservationsRestTransport, "rest", None), +]) +def test_reservations_client_client_options_credentials_file(client_class, transport_class, transport_name, grpc_helpers): + # Check the case credentials file is provided. + options = client_options.ClientOptions( + credentials_file="credentials.json" + ) + + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize("request_type", [ + compute.AggregatedListReservationsRequest, + dict, +]) +def test_aggregated_list_rest(request_type): + client = ReservationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.ReservationAggregatedList( + id='id_value', + kind='kind_value', + next_page_token='next_page_token_value', + self_link='self_link_value', + unreachables=['unreachables_value'], + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.ReservationAggregatedList.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.aggregated_list(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.AggregatedListPager) + assert response.id == 'id_value' + assert response.kind == 'kind_value' + assert response.next_page_token == 'next_page_token_value' + assert response.self_link == 'self_link_value' + assert response.unreachables == ['unreachables_value'] + + +def test_aggregated_list_rest_required_fields(request_type=compute.AggregatedListReservationsRequest): + transport_class = transports.ReservationsRestTransport + + request_init = {} + request_init["project"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).aggregated_list._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).aggregated_list._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("filter", "include_all_scopes", "max_results", "order_by", "page_token", "return_partial_success", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + + client = ReservationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.ReservationAggregatedList() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "get", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.ReservationAggregatedList.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.aggregated_list(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_aggregated_list_rest_unset_required_fields(): + transport = transports.ReservationsRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.aggregated_list._get_unset_required_fields({}) + assert set(unset_fields) == (set(("filter", "includeAllScopes", "maxResults", "orderBy", "pageToken", "returnPartialSuccess", )) & set(("project", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_aggregated_list_rest_interceptors(null_interceptor): + transport = transports.ReservationsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.ReservationsRestInterceptor(), + ) + client = ReservationsClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.ReservationsRestInterceptor, "post_aggregated_list") as post, \ + mock.patch.object(transports.ReservationsRestInterceptor, "pre_aggregated_list") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.AggregatedListReservationsRequest.pb(compute.AggregatedListReservationsRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.ReservationAggregatedList.to_json(compute.ReservationAggregatedList()) + + request = compute.AggregatedListReservationsRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.ReservationAggregatedList() + + client.aggregated_list(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_aggregated_list_rest_bad_request(transport: str = 'rest', request_type=compute.AggregatedListReservationsRequest): + client = ReservationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.aggregated_list(request) + + +def test_aggregated_list_rest_flattened(): + client = ReservationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.ReservationAggregatedList() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.ReservationAggregatedList.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.aggregated_list(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/aggregated/reservations" % client.transport._host, args[1]) + + +def test_aggregated_list_rest_flattened_error(transport: str = 'rest'): + client = ReservationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.aggregated_list( + compute.AggregatedListReservationsRequest(), + project='project_value', + ) + + +def test_aggregated_list_rest_pager(transport: str = 'rest'): + client = ReservationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + #with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + compute.ReservationAggregatedList( + items={ + 'a':compute.ReservationsScopedList(), + 'b':compute.ReservationsScopedList(), + 'c':compute.ReservationsScopedList(), + }, + next_page_token='abc', + ), + compute.ReservationAggregatedList( + items={}, + next_page_token='def', + ), + compute.ReservationAggregatedList( + items={ + 'g':compute.ReservationsScopedList(), + }, + next_page_token='ghi', + ), + compute.ReservationAggregatedList( + items={ + 'h':compute.ReservationsScopedList(), + 'i':compute.ReservationsScopedList(), + }, + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple(compute.ReservationAggregatedList.to_json(x) for x in response) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode('UTF-8') + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {'project': 'sample1'} + + pager = client.aggregated_list(request=sample_request) + + assert isinstance(pager.get('a'), compute.ReservationsScopedList) + assert pager.get('h') is None + + results = list(pager) + assert len(results) == 6 + assert all( + isinstance(i, tuple) + for i in results) + for result in results: + assert isinstance(result, tuple) + assert tuple(type(t) for t in result) == (str, compute.ReservationsScopedList) + + assert pager.get('a') is None + assert isinstance(pager.get('h'), compute.ReservationsScopedList) + + pages = list(client.aggregated_list(request=sample_request).pages) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize("request_type", [ + compute.DeleteReservationRequest, + dict, +]) +def test_delete_rest(request_type): + client = ReservationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'zone': 'sample2', 'reservation': 'sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.delete(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, extended_operation.ExtendedOperation) + assert response.client_operation_id == 'client_operation_id_value' + assert response.creation_timestamp == 'creation_timestamp_value' + assert response.description == 'description_value' + assert response.end_time == 'end_time_value' + assert response.http_error_message == 'http_error_message_value' + assert response.http_error_status_code == 2374 + assert response.id == 205 + assert response.insert_time == 'insert_time_value' + assert response.kind == 'kind_value' + assert response.name == 'name_value' + assert response.operation_group_id == 'operation_group_id_value' + assert response.operation_type == 'operation_type_value' + assert response.progress == 885 + assert response.region == 'region_value' + assert response.self_link == 'self_link_value' + assert response.start_time == 'start_time_value' + assert response.status == compute.Operation.Status.DONE + assert response.status_message == 'status_message_value' + assert response.target_id == 947 + assert response.target_link == 'target_link_value' + assert response.user == 'user_value' + assert response.zone == 'zone_value' + + +def test_delete_rest_required_fields(request_type=compute.DeleteReservationRequest): + transport_class = transports.ReservationsRestTransport + + request_init = {} + request_init["project"] = "" + request_init["reservation"] = "" + request_init["zone"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + jsonified_request["reservation"] = 'reservation_value' + jsonified_request["zone"] = 'zone_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "reservation" in jsonified_request + assert jsonified_request["reservation"] == 'reservation_value' + assert "zone" in jsonified_request + assert jsonified_request["zone"] == 'zone_value' + + client = ReservationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "delete", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.delete(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_delete_rest_unset_required_fields(): + transport = transports.ReservationsRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.delete._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("project", "reservation", "zone", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_rest_interceptors(null_interceptor): + transport = transports.ReservationsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.ReservationsRestInterceptor(), + ) + client = ReservationsClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.ReservationsRestInterceptor, "post_delete") as post, \ + mock.patch.object(transports.ReservationsRestInterceptor, "pre_delete") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.DeleteReservationRequest.pb(compute.DeleteReservationRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.DeleteReservationRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.delete(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_delete_rest_bad_request(transport: str = 'rest', request_type=compute.DeleteReservationRequest): + client = ReservationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'zone': 'sample2', 'reservation': 'sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.delete(request) + + +def test_delete_rest_flattened(): + client = ReservationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'zone': 'sample2', 'reservation': 'sample3'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + zone='zone_value', + reservation='reservation_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.delete(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/zones/{zone}/reservations/{reservation}" % client.transport._host, args[1]) + + +def test_delete_rest_flattened_error(transport: str = 'rest'): + client = ReservationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete( + compute.DeleteReservationRequest(), + project='project_value', + zone='zone_value', + reservation='reservation_value', + ) + + +def test_delete_rest_error(): + client = ReservationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.DeleteReservationRequest, + dict, +]) +def test_delete_unary_rest(request_type): + client = ReservationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'zone': 'sample2', 'reservation': 'sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.delete_unary(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.Operation) + + +def test_delete_unary_rest_required_fields(request_type=compute.DeleteReservationRequest): + transport_class = transports.ReservationsRestTransport + + request_init = {} + request_init["project"] = "" + request_init["reservation"] = "" + request_init["zone"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + jsonified_request["reservation"] = 'reservation_value' + jsonified_request["zone"] = 'zone_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "reservation" in jsonified_request + assert jsonified_request["reservation"] == 'reservation_value' + assert "zone" in jsonified_request + assert jsonified_request["zone"] == 'zone_value' + + client = ReservationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "delete", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.delete_unary(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_delete_unary_rest_unset_required_fields(): + transport = transports.ReservationsRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.delete._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("project", "reservation", "zone", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_unary_rest_interceptors(null_interceptor): + transport = transports.ReservationsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.ReservationsRestInterceptor(), + ) + client = ReservationsClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.ReservationsRestInterceptor, "post_delete") as post, \ + mock.patch.object(transports.ReservationsRestInterceptor, "pre_delete") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.DeleteReservationRequest.pb(compute.DeleteReservationRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.DeleteReservationRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.delete_unary(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_delete_unary_rest_bad_request(transport: str = 'rest', request_type=compute.DeleteReservationRequest): + client = ReservationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'zone': 'sample2', 'reservation': 'sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.delete_unary(request) + + +def test_delete_unary_rest_flattened(): + client = ReservationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'zone': 'sample2', 'reservation': 'sample3'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + zone='zone_value', + reservation='reservation_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.delete_unary(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/zones/{zone}/reservations/{reservation}" % client.transport._host, args[1]) + + +def test_delete_unary_rest_flattened_error(transport: str = 'rest'): + client = ReservationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_unary( + compute.DeleteReservationRequest(), + project='project_value', + zone='zone_value', + reservation='reservation_value', + ) + + +def test_delete_unary_rest_error(): + client = ReservationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.GetReservationRequest, + dict, +]) +def test_get_rest(request_type): + client = ReservationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'zone': 'sample2', 'reservation': 'sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Reservation( + commitment='commitment_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + id=205, + kind='kind_value', + name='name_value', + satisfies_pzs=True, + self_link='self_link_value', + specific_reservation_required=True, + status='status_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Reservation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.get(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.Reservation) + assert response.commitment == 'commitment_value' + assert response.creation_timestamp == 'creation_timestamp_value' + assert response.description == 'description_value' + assert response.id == 205 + assert response.kind == 'kind_value' + assert response.name == 'name_value' + assert response.satisfies_pzs is True + assert response.self_link == 'self_link_value' + assert response.specific_reservation_required is True + assert response.status == 'status_value' + assert response.zone == 'zone_value' + + +def test_get_rest_required_fields(request_type=compute.GetReservationRequest): + transport_class = transports.ReservationsRestTransport + + request_init = {} + request_init["project"] = "" + request_init["reservation"] = "" + request_init["zone"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + jsonified_request["reservation"] = 'reservation_value' + jsonified_request["zone"] = 'zone_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "reservation" in jsonified_request + assert jsonified_request["reservation"] == 'reservation_value' + assert "zone" in jsonified_request + assert jsonified_request["zone"] == 'zone_value' + + client = ReservationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Reservation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "get", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Reservation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.get(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_get_rest_unset_required_fields(): + transport = transports.ReservationsRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.get._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("project", "reservation", "zone", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_rest_interceptors(null_interceptor): + transport = transports.ReservationsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.ReservationsRestInterceptor(), + ) + client = ReservationsClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.ReservationsRestInterceptor, "post_get") as post, \ + mock.patch.object(transports.ReservationsRestInterceptor, "pre_get") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.GetReservationRequest.pb(compute.GetReservationRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Reservation.to_json(compute.Reservation()) + + request = compute.GetReservationRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Reservation() + + client.get(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_get_rest_bad_request(transport: str = 'rest', request_type=compute.GetReservationRequest): + client = ReservationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'zone': 'sample2', 'reservation': 'sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get(request) + + +def test_get_rest_flattened(): + client = ReservationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Reservation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'zone': 'sample2', 'reservation': 'sample3'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + zone='zone_value', + reservation='reservation_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Reservation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.get(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/zones/{zone}/reservations/{reservation}" % client.transport._host, args[1]) + + +def test_get_rest_flattened_error(transport: str = 'rest'): + client = ReservationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get( + compute.GetReservationRequest(), + project='project_value', + zone='zone_value', + reservation='reservation_value', + ) + + +def test_get_rest_error(): + client = ReservationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.GetIamPolicyReservationRequest, + dict, +]) +def test_get_iam_policy_rest(request_type): + client = ReservationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'zone': 'sample2', 'resource': 'sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Policy( + etag='etag_value', + iam_owned=True, + version=774, + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Policy.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.get_iam_policy(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.Policy) + assert response.etag == 'etag_value' + assert response.iam_owned is True + assert response.version == 774 + + +def test_get_iam_policy_rest_required_fields(request_type=compute.GetIamPolicyReservationRequest): + transport_class = transports.ReservationsRestTransport + + request_init = {} + request_init["project"] = "" + request_init["resource"] = "" + request_init["zone"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_iam_policy._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + jsonified_request["resource"] = 'resource_value' + jsonified_request["zone"] = 'zone_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_iam_policy._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("options_requested_policy_version", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "resource" in jsonified_request + assert jsonified_request["resource"] == 'resource_value' + assert "zone" in jsonified_request + assert jsonified_request["zone"] == 'zone_value' + + client = ReservationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Policy() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "get", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Policy.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.get_iam_policy(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_get_iam_policy_rest_unset_required_fields(): + transport = transports.ReservationsRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.get_iam_policy._get_unset_required_fields({}) + assert set(unset_fields) == (set(("optionsRequestedPolicyVersion", )) & set(("project", "resource", "zone", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_iam_policy_rest_interceptors(null_interceptor): + transport = transports.ReservationsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.ReservationsRestInterceptor(), + ) + client = ReservationsClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.ReservationsRestInterceptor, "post_get_iam_policy") as post, \ + mock.patch.object(transports.ReservationsRestInterceptor, "pre_get_iam_policy") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.GetIamPolicyReservationRequest.pb(compute.GetIamPolicyReservationRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Policy.to_json(compute.Policy()) + + request = compute.GetIamPolicyReservationRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Policy() + + client.get_iam_policy(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_get_iam_policy_rest_bad_request(transport: str = 'rest', request_type=compute.GetIamPolicyReservationRequest): + client = ReservationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'zone': 'sample2', 'resource': 'sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_iam_policy(request) + + +def test_get_iam_policy_rest_flattened(): + client = ReservationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Policy() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'zone': 'sample2', 'resource': 'sample3'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + zone='zone_value', + resource='resource_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Policy.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.get_iam_policy(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/zones/{zone}/reservations/{resource}/getIamPolicy" % client.transport._host, args[1]) + + +def test_get_iam_policy_rest_flattened_error(transport: str = 'rest'): + client = ReservationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_iam_policy( + compute.GetIamPolicyReservationRequest(), + project='project_value', + zone='zone_value', + resource='resource_value', + ) + + +def test_get_iam_policy_rest_error(): + client = ReservationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.InsertReservationRequest, + dict, +]) +def test_insert_rest(request_type): + client = ReservationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'zone': 'sample2'} + request_init["reservation_resource"] = {'commitment': 'commitment_value', 'creation_timestamp': 'creation_timestamp_value', 'description': 'description_value', 'id': 205, 'kind': 'kind_value', 'name': 'name_value', 'resource_policies': {}, 'resource_status': {'specific_sku_allocation': {'source_instance_template_id': 'source_instance_template_id_value'}}, 'satisfies_pzs': True, 'self_link': 'self_link_value', 'share_settings': {'project_map': {}, 'share_type': 'share_type_value'}, 'specific_reservation': {'assured_count': 1407, 'count': 553, 'in_use_count': 1291, 'instance_properties': {'guest_accelerators': [{'accelerator_count': 1805, 'accelerator_type': 'accelerator_type_value'}], 'local_ssds': [{'disk_size_gb': 1261, 'interface': 'interface_value'}], 'location_hint': 'location_hint_value', 'machine_type': 'machine_type_value', 'min_cpu_platform': 'min_cpu_platform_value'}, 'source_instance_template': 'source_instance_template_value'}, 'specific_reservation_required': True, 'status': 'status_value', 'zone': 'zone_value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.insert(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, extended_operation.ExtendedOperation) + assert response.client_operation_id == 'client_operation_id_value' + assert response.creation_timestamp == 'creation_timestamp_value' + assert response.description == 'description_value' + assert response.end_time == 'end_time_value' + assert response.http_error_message == 'http_error_message_value' + assert response.http_error_status_code == 2374 + assert response.id == 205 + assert response.insert_time == 'insert_time_value' + assert response.kind == 'kind_value' + assert response.name == 'name_value' + assert response.operation_group_id == 'operation_group_id_value' + assert response.operation_type == 'operation_type_value' + assert response.progress == 885 + assert response.region == 'region_value' + assert response.self_link == 'self_link_value' + assert response.start_time == 'start_time_value' + assert response.status == compute.Operation.Status.DONE + assert response.status_message == 'status_message_value' + assert response.target_id == 947 + assert response.target_link == 'target_link_value' + assert response.user == 'user_value' + assert response.zone == 'zone_value' + + +def test_insert_rest_required_fields(request_type=compute.InsertReservationRequest): + transport_class = transports.ReservationsRestTransport + + request_init = {} + request_init["project"] = "" + request_init["zone"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).insert._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + jsonified_request["zone"] = 'zone_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).insert._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "zone" in jsonified_request + assert jsonified_request["zone"] == 'zone_value' + + client = ReservationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.insert(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_insert_rest_unset_required_fields(): + transport = transports.ReservationsRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.insert._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("project", "reservationResource", "zone", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_insert_rest_interceptors(null_interceptor): + transport = transports.ReservationsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.ReservationsRestInterceptor(), + ) + client = ReservationsClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.ReservationsRestInterceptor, "post_insert") as post, \ + mock.patch.object(transports.ReservationsRestInterceptor, "pre_insert") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.InsertReservationRequest.pb(compute.InsertReservationRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.InsertReservationRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.insert(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_insert_rest_bad_request(transport: str = 'rest', request_type=compute.InsertReservationRequest): + client = ReservationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'zone': 'sample2'} + request_init["reservation_resource"] = {'commitment': 'commitment_value', 'creation_timestamp': 'creation_timestamp_value', 'description': 'description_value', 'id': 205, 'kind': 'kind_value', 'name': 'name_value', 'resource_policies': {}, 'resource_status': {'specific_sku_allocation': {'source_instance_template_id': 'source_instance_template_id_value'}}, 'satisfies_pzs': True, 'self_link': 'self_link_value', 'share_settings': {'project_map': {}, 'share_type': 'share_type_value'}, 'specific_reservation': {'assured_count': 1407, 'count': 553, 'in_use_count': 1291, 'instance_properties': {'guest_accelerators': [{'accelerator_count': 1805, 'accelerator_type': 'accelerator_type_value'}], 'local_ssds': [{'disk_size_gb': 1261, 'interface': 'interface_value'}], 'location_hint': 'location_hint_value', 'machine_type': 'machine_type_value', 'min_cpu_platform': 'min_cpu_platform_value'}, 'source_instance_template': 'source_instance_template_value'}, 'specific_reservation_required': True, 'status': 'status_value', 'zone': 'zone_value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.insert(request) + + +def test_insert_rest_flattened(): + client = ReservationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'zone': 'sample2'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + zone='zone_value', + reservation_resource=compute.Reservation(commitment='commitment_value'), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.insert(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/zones/{zone}/reservations" % client.transport._host, args[1]) + + +def test_insert_rest_flattened_error(transport: str = 'rest'): + client = ReservationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.insert( + compute.InsertReservationRequest(), + project='project_value', + zone='zone_value', + reservation_resource=compute.Reservation(commitment='commitment_value'), + ) + + +def test_insert_rest_error(): + client = ReservationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.InsertReservationRequest, + dict, +]) +def test_insert_unary_rest(request_type): + client = ReservationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'zone': 'sample2'} + request_init["reservation_resource"] = {'commitment': 'commitment_value', 'creation_timestamp': 'creation_timestamp_value', 'description': 'description_value', 'id': 205, 'kind': 'kind_value', 'name': 'name_value', 'resource_policies': {}, 'resource_status': {'specific_sku_allocation': {'source_instance_template_id': 'source_instance_template_id_value'}}, 'satisfies_pzs': True, 'self_link': 'self_link_value', 'share_settings': {'project_map': {}, 'share_type': 'share_type_value'}, 'specific_reservation': {'assured_count': 1407, 'count': 553, 'in_use_count': 1291, 'instance_properties': {'guest_accelerators': [{'accelerator_count': 1805, 'accelerator_type': 'accelerator_type_value'}], 'local_ssds': [{'disk_size_gb': 1261, 'interface': 'interface_value'}], 'location_hint': 'location_hint_value', 'machine_type': 'machine_type_value', 'min_cpu_platform': 'min_cpu_platform_value'}, 'source_instance_template': 'source_instance_template_value'}, 'specific_reservation_required': True, 'status': 'status_value', 'zone': 'zone_value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.insert_unary(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.Operation) + + +def test_insert_unary_rest_required_fields(request_type=compute.InsertReservationRequest): + transport_class = transports.ReservationsRestTransport + + request_init = {} + request_init["project"] = "" + request_init["zone"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).insert._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + jsonified_request["zone"] = 'zone_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).insert._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "zone" in jsonified_request + assert jsonified_request["zone"] == 'zone_value' + + client = ReservationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.insert_unary(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_insert_unary_rest_unset_required_fields(): + transport = transports.ReservationsRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.insert._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("project", "reservationResource", "zone", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_insert_unary_rest_interceptors(null_interceptor): + transport = transports.ReservationsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.ReservationsRestInterceptor(), + ) + client = ReservationsClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.ReservationsRestInterceptor, "post_insert") as post, \ + mock.patch.object(transports.ReservationsRestInterceptor, "pre_insert") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.InsertReservationRequest.pb(compute.InsertReservationRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.InsertReservationRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.insert_unary(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_insert_unary_rest_bad_request(transport: str = 'rest', request_type=compute.InsertReservationRequest): + client = ReservationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'zone': 'sample2'} + request_init["reservation_resource"] = {'commitment': 'commitment_value', 'creation_timestamp': 'creation_timestamp_value', 'description': 'description_value', 'id': 205, 'kind': 'kind_value', 'name': 'name_value', 'resource_policies': {}, 'resource_status': {'specific_sku_allocation': {'source_instance_template_id': 'source_instance_template_id_value'}}, 'satisfies_pzs': True, 'self_link': 'self_link_value', 'share_settings': {'project_map': {}, 'share_type': 'share_type_value'}, 'specific_reservation': {'assured_count': 1407, 'count': 553, 'in_use_count': 1291, 'instance_properties': {'guest_accelerators': [{'accelerator_count': 1805, 'accelerator_type': 'accelerator_type_value'}], 'local_ssds': [{'disk_size_gb': 1261, 'interface': 'interface_value'}], 'location_hint': 'location_hint_value', 'machine_type': 'machine_type_value', 'min_cpu_platform': 'min_cpu_platform_value'}, 'source_instance_template': 'source_instance_template_value'}, 'specific_reservation_required': True, 'status': 'status_value', 'zone': 'zone_value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.insert_unary(request) + + +def test_insert_unary_rest_flattened(): + client = ReservationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'zone': 'sample2'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + zone='zone_value', + reservation_resource=compute.Reservation(commitment='commitment_value'), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.insert_unary(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/zones/{zone}/reservations" % client.transport._host, args[1]) + + +def test_insert_unary_rest_flattened_error(transport: str = 'rest'): + client = ReservationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.insert_unary( + compute.InsertReservationRequest(), + project='project_value', + zone='zone_value', + reservation_resource=compute.Reservation(commitment='commitment_value'), + ) + + +def test_insert_unary_rest_error(): + client = ReservationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.ListReservationsRequest, + dict, +]) +def test_list_rest(request_type): + client = ReservationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'zone': 'sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.ReservationList( + id='id_value', + kind='kind_value', + next_page_token='next_page_token_value', + self_link='self_link_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.ReservationList.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.list(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListPager) + assert response.id == 'id_value' + assert response.kind == 'kind_value' + assert response.next_page_token == 'next_page_token_value' + assert response.self_link == 'self_link_value' + + +def test_list_rest_required_fields(request_type=compute.ListReservationsRequest): + transport_class = transports.ReservationsRestTransport + + request_init = {} + request_init["project"] = "" + request_init["zone"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + jsonified_request["zone"] = 'zone_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("filter", "max_results", "order_by", "page_token", "return_partial_success", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "zone" in jsonified_request + assert jsonified_request["zone"] == 'zone_value' + + client = ReservationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.ReservationList() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "get", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.ReservationList.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.list(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_list_rest_unset_required_fields(): + transport = transports.ReservationsRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.list._get_unset_required_fields({}) + assert set(unset_fields) == (set(("filter", "maxResults", "orderBy", "pageToken", "returnPartialSuccess", )) & set(("project", "zone", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_rest_interceptors(null_interceptor): + transport = transports.ReservationsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.ReservationsRestInterceptor(), + ) + client = ReservationsClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.ReservationsRestInterceptor, "post_list") as post, \ + mock.patch.object(transports.ReservationsRestInterceptor, "pre_list") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.ListReservationsRequest.pb(compute.ListReservationsRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.ReservationList.to_json(compute.ReservationList()) + + request = compute.ListReservationsRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.ReservationList() + + client.list(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_list_rest_bad_request(transport: str = 'rest', request_type=compute.ListReservationsRequest): + client = ReservationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'zone': 'sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list(request) + + +def test_list_rest_flattened(): + client = ReservationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.ReservationList() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'zone': 'sample2'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + zone='zone_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.ReservationList.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.list(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/zones/{zone}/reservations" % client.transport._host, args[1]) + + +def test_list_rest_flattened_error(transport: str = 'rest'): + client = ReservationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list( + compute.ListReservationsRequest(), + project='project_value', + zone='zone_value', + ) + + +def test_list_rest_pager(transport: str = 'rest'): + client = ReservationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + #with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + compute.ReservationList( + items=[ + compute.Reservation(), + compute.Reservation(), + compute.Reservation(), + ], + next_page_token='abc', + ), + compute.ReservationList( + items=[], + next_page_token='def', + ), + compute.ReservationList( + items=[ + compute.Reservation(), + ], + next_page_token='ghi', + ), + compute.ReservationList( + items=[ + compute.Reservation(), + compute.Reservation(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple(compute.ReservationList.to_json(x) for x in response) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode('UTF-8') + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {'project': 'sample1', 'zone': 'sample2'} + + pager = client.list(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, compute.Reservation) + for i in results) + + pages = list(client.list(request=sample_request).pages) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize("request_type", [ + compute.ResizeReservationRequest, + dict, +]) +def test_resize_rest(request_type): + client = ReservationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'zone': 'sample2', 'reservation': 'sample3'} + request_init["reservations_resize_request_resource"] = {'specific_sku_count': 1920} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.resize(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, extended_operation.ExtendedOperation) + assert response.client_operation_id == 'client_operation_id_value' + assert response.creation_timestamp == 'creation_timestamp_value' + assert response.description == 'description_value' + assert response.end_time == 'end_time_value' + assert response.http_error_message == 'http_error_message_value' + assert response.http_error_status_code == 2374 + assert response.id == 205 + assert response.insert_time == 'insert_time_value' + assert response.kind == 'kind_value' + assert response.name == 'name_value' + assert response.operation_group_id == 'operation_group_id_value' + assert response.operation_type == 'operation_type_value' + assert response.progress == 885 + assert response.region == 'region_value' + assert response.self_link == 'self_link_value' + assert response.start_time == 'start_time_value' + assert response.status == compute.Operation.Status.DONE + assert response.status_message == 'status_message_value' + assert response.target_id == 947 + assert response.target_link == 'target_link_value' + assert response.user == 'user_value' + assert response.zone == 'zone_value' + + +def test_resize_rest_required_fields(request_type=compute.ResizeReservationRequest): + transport_class = transports.ReservationsRestTransport + + request_init = {} + request_init["project"] = "" + request_init["reservation"] = "" + request_init["zone"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).resize._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + jsonified_request["reservation"] = 'reservation_value' + jsonified_request["zone"] = 'zone_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).resize._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "reservation" in jsonified_request + assert jsonified_request["reservation"] == 'reservation_value' + assert "zone" in jsonified_request + assert jsonified_request["zone"] == 'zone_value' + + client = ReservationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.resize(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_resize_rest_unset_required_fields(): + transport = transports.ReservationsRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.resize._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("project", "reservation", "reservationsResizeRequestResource", "zone", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_resize_rest_interceptors(null_interceptor): + transport = transports.ReservationsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.ReservationsRestInterceptor(), + ) + client = ReservationsClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.ReservationsRestInterceptor, "post_resize") as post, \ + mock.patch.object(transports.ReservationsRestInterceptor, "pre_resize") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.ResizeReservationRequest.pb(compute.ResizeReservationRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.ResizeReservationRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.resize(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_resize_rest_bad_request(transport: str = 'rest', request_type=compute.ResizeReservationRequest): + client = ReservationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'zone': 'sample2', 'reservation': 'sample3'} + request_init["reservations_resize_request_resource"] = {'specific_sku_count': 1920} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.resize(request) + + +def test_resize_rest_flattened(): + client = ReservationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'zone': 'sample2', 'reservation': 'sample3'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + zone='zone_value', + reservation='reservation_value', + reservations_resize_request_resource=compute.ReservationsResizeRequest(specific_sku_count=1920), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.resize(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/zones/{zone}/reservations/{reservation}/resize" % client.transport._host, args[1]) + + +def test_resize_rest_flattened_error(transport: str = 'rest'): + client = ReservationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.resize( + compute.ResizeReservationRequest(), + project='project_value', + zone='zone_value', + reservation='reservation_value', + reservations_resize_request_resource=compute.ReservationsResizeRequest(specific_sku_count=1920), + ) + + +def test_resize_rest_error(): + client = ReservationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.ResizeReservationRequest, + dict, +]) +def test_resize_unary_rest(request_type): + client = ReservationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'zone': 'sample2', 'reservation': 'sample3'} + request_init["reservations_resize_request_resource"] = {'specific_sku_count': 1920} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.resize_unary(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.Operation) + + +def test_resize_unary_rest_required_fields(request_type=compute.ResizeReservationRequest): + transport_class = transports.ReservationsRestTransport + + request_init = {} + request_init["project"] = "" + request_init["reservation"] = "" + request_init["zone"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).resize._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + jsonified_request["reservation"] = 'reservation_value' + jsonified_request["zone"] = 'zone_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).resize._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "reservation" in jsonified_request + assert jsonified_request["reservation"] == 'reservation_value' + assert "zone" in jsonified_request + assert jsonified_request["zone"] == 'zone_value' + + client = ReservationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.resize_unary(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_resize_unary_rest_unset_required_fields(): + transport = transports.ReservationsRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.resize._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("project", "reservation", "reservationsResizeRequestResource", "zone", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_resize_unary_rest_interceptors(null_interceptor): + transport = transports.ReservationsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.ReservationsRestInterceptor(), + ) + client = ReservationsClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.ReservationsRestInterceptor, "post_resize") as post, \ + mock.patch.object(transports.ReservationsRestInterceptor, "pre_resize") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.ResizeReservationRequest.pb(compute.ResizeReservationRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.ResizeReservationRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.resize_unary(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_resize_unary_rest_bad_request(transport: str = 'rest', request_type=compute.ResizeReservationRequest): + client = ReservationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'zone': 'sample2', 'reservation': 'sample3'} + request_init["reservations_resize_request_resource"] = {'specific_sku_count': 1920} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.resize_unary(request) + + +def test_resize_unary_rest_flattened(): + client = ReservationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'zone': 'sample2', 'reservation': 'sample3'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + zone='zone_value', + reservation='reservation_value', + reservations_resize_request_resource=compute.ReservationsResizeRequest(specific_sku_count=1920), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.resize_unary(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/zones/{zone}/reservations/{reservation}/resize" % client.transport._host, args[1]) + + +def test_resize_unary_rest_flattened_error(transport: str = 'rest'): + client = ReservationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.resize_unary( + compute.ResizeReservationRequest(), + project='project_value', + zone='zone_value', + reservation='reservation_value', + reservations_resize_request_resource=compute.ReservationsResizeRequest(specific_sku_count=1920), + ) + + +def test_resize_unary_rest_error(): + client = ReservationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.SetIamPolicyReservationRequest, + dict, +]) +def test_set_iam_policy_rest(request_type): + client = ReservationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'zone': 'sample2', 'resource': 'sample3'} + request_init["zone_set_policy_request_resource"] = {'bindings': [{'binding_id': 'binding_id_value', 'condition': {'description': 'description_value', 'expression': 'expression_value', 'location': 'location_value', 'title': 'title_value'}, 'members': ['members_value1', 'members_value2'], 'role': 'role_value'}], 'etag': 'etag_value', 'policy': {'audit_configs': [{'audit_log_configs': [{'exempted_members': ['exempted_members_value1', 'exempted_members_value2'], 'ignore_child_exemptions': True, 'log_type': 'log_type_value'}], 'exempted_members': ['exempted_members_value1', 'exempted_members_value2'], 'service': 'service_value'}], 'bindings': {}, 'etag': 'etag_value', 'iam_owned': True, 'rules': [{'action': 'action_value', 'conditions': [{'iam': 'iam_value', 'op': 'op_value', 'svc': 'svc_value', 'sys': 'sys_value', 'values': ['values_value1', 'values_value2']}], 'description': 'description_value', 'ins': ['ins_value1', 'ins_value2'], 'log_configs': [{'cloud_audit': {'authorization_logging_options': {'permission_type': 'permission_type_value'}, 'log_name': 'log_name_value'}, 'counter': {'custom_fields': [{'name': 'name_value', 'value': 'value_value'}], 'field': 'field_value', 'metric': 'metric_value'}, 'data_access': {'log_mode': 'log_mode_value'}}], 'not_ins': ['not_ins_value1', 'not_ins_value2'], 'permissions': ['permissions_value1', 'permissions_value2']}], 'version': 774}} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Policy( + etag='etag_value', + iam_owned=True, + version=774, + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Policy.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.set_iam_policy(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.Policy) + assert response.etag == 'etag_value' + assert response.iam_owned is True + assert response.version == 774 + + +def test_set_iam_policy_rest_required_fields(request_type=compute.SetIamPolicyReservationRequest): + transport_class = transports.ReservationsRestTransport + + request_init = {} + request_init["project"] = "" + request_init["resource"] = "" + request_init["zone"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).set_iam_policy._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + jsonified_request["resource"] = 'resource_value' + jsonified_request["zone"] = 'zone_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).set_iam_policy._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "resource" in jsonified_request + assert jsonified_request["resource"] == 'resource_value' + assert "zone" in jsonified_request + assert jsonified_request["zone"] == 'zone_value' + + client = ReservationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Policy() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Policy.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.set_iam_policy(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_set_iam_policy_rest_unset_required_fields(): + transport = transports.ReservationsRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.set_iam_policy._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("project", "resource", "zone", "zoneSetPolicyRequestResource", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_set_iam_policy_rest_interceptors(null_interceptor): + transport = transports.ReservationsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.ReservationsRestInterceptor(), + ) + client = ReservationsClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.ReservationsRestInterceptor, "post_set_iam_policy") as post, \ + mock.patch.object(transports.ReservationsRestInterceptor, "pre_set_iam_policy") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.SetIamPolicyReservationRequest.pb(compute.SetIamPolicyReservationRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Policy.to_json(compute.Policy()) + + request = compute.SetIamPolicyReservationRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Policy() + + client.set_iam_policy(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_set_iam_policy_rest_bad_request(transport: str = 'rest', request_type=compute.SetIamPolicyReservationRequest): + client = ReservationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'zone': 'sample2', 'resource': 'sample3'} + request_init["zone_set_policy_request_resource"] = {'bindings': [{'binding_id': 'binding_id_value', 'condition': {'description': 'description_value', 'expression': 'expression_value', 'location': 'location_value', 'title': 'title_value'}, 'members': ['members_value1', 'members_value2'], 'role': 'role_value'}], 'etag': 'etag_value', 'policy': {'audit_configs': [{'audit_log_configs': [{'exempted_members': ['exempted_members_value1', 'exempted_members_value2'], 'ignore_child_exemptions': True, 'log_type': 'log_type_value'}], 'exempted_members': ['exempted_members_value1', 'exempted_members_value2'], 'service': 'service_value'}], 'bindings': {}, 'etag': 'etag_value', 'iam_owned': True, 'rules': [{'action': 'action_value', 'conditions': [{'iam': 'iam_value', 'op': 'op_value', 'svc': 'svc_value', 'sys': 'sys_value', 'values': ['values_value1', 'values_value2']}], 'description': 'description_value', 'ins': ['ins_value1', 'ins_value2'], 'log_configs': [{'cloud_audit': {'authorization_logging_options': {'permission_type': 'permission_type_value'}, 'log_name': 'log_name_value'}, 'counter': {'custom_fields': [{'name': 'name_value', 'value': 'value_value'}], 'field': 'field_value', 'metric': 'metric_value'}, 'data_access': {'log_mode': 'log_mode_value'}}], 'not_ins': ['not_ins_value1', 'not_ins_value2'], 'permissions': ['permissions_value1', 'permissions_value2']}], 'version': 774}} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.set_iam_policy(request) + + +def test_set_iam_policy_rest_flattened(): + client = ReservationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Policy() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'zone': 'sample2', 'resource': 'sample3'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + zone='zone_value', + resource='resource_value', + zone_set_policy_request_resource=compute.ZoneSetPolicyRequest(bindings=[compute.Binding(binding_id='binding_id_value')]), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Policy.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.set_iam_policy(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/zones/{zone}/reservations/{resource}/setIamPolicy" % client.transport._host, args[1]) + + +def test_set_iam_policy_rest_flattened_error(transport: str = 'rest'): + client = ReservationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.set_iam_policy( + compute.SetIamPolicyReservationRequest(), + project='project_value', + zone='zone_value', + resource='resource_value', + zone_set_policy_request_resource=compute.ZoneSetPolicyRequest(bindings=[compute.Binding(binding_id='binding_id_value')]), + ) + + +def test_set_iam_policy_rest_error(): + client = ReservationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.TestIamPermissionsReservationRequest, + dict, +]) +def test_test_iam_permissions_rest(request_type): + client = ReservationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'zone': 'sample2', 'resource': 'sample3'} + request_init["test_permissions_request_resource"] = {'permissions': ['permissions_value1', 'permissions_value2']} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.TestPermissionsResponse( + permissions=['permissions_value'], + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.TestPermissionsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.test_iam_permissions(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.TestPermissionsResponse) + assert response.permissions == ['permissions_value'] + + +def test_test_iam_permissions_rest_required_fields(request_type=compute.TestIamPermissionsReservationRequest): + transport_class = transports.ReservationsRestTransport + + request_init = {} + request_init["project"] = "" + request_init["resource"] = "" + request_init["zone"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).test_iam_permissions._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + jsonified_request["resource"] = 'resource_value' + jsonified_request["zone"] = 'zone_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).test_iam_permissions._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "resource" in jsonified_request + assert jsonified_request["resource"] == 'resource_value' + assert "zone" in jsonified_request + assert jsonified_request["zone"] == 'zone_value' + + client = ReservationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.TestPermissionsResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.TestPermissionsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.test_iam_permissions(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_test_iam_permissions_rest_unset_required_fields(): + transport = transports.ReservationsRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.test_iam_permissions._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("project", "resource", "testPermissionsRequestResource", "zone", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_test_iam_permissions_rest_interceptors(null_interceptor): + transport = transports.ReservationsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.ReservationsRestInterceptor(), + ) + client = ReservationsClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.ReservationsRestInterceptor, "post_test_iam_permissions") as post, \ + mock.patch.object(transports.ReservationsRestInterceptor, "pre_test_iam_permissions") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.TestIamPermissionsReservationRequest.pb(compute.TestIamPermissionsReservationRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.TestPermissionsResponse.to_json(compute.TestPermissionsResponse()) + + request = compute.TestIamPermissionsReservationRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.TestPermissionsResponse() + + client.test_iam_permissions(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_test_iam_permissions_rest_bad_request(transport: str = 'rest', request_type=compute.TestIamPermissionsReservationRequest): + client = ReservationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'zone': 'sample2', 'resource': 'sample3'} + request_init["test_permissions_request_resource"] = {'permissions': ['permissions_value1', 'permissions_value2']} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.test_iam_permissions(request) + + +def test_test_iam_permissions_rest_flattened(): + client = ReservationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.TestPermissionsResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'zone': 'sample2', 'resource': 'sample3'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + zone='zone_value', + resource='resource_value', + test_permissions_request_resource=compute.TestPermissionsRequest(permissions=['permissions_value']), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.TestPermissionsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.test_iam_permissions(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/zones/{zone}/reservations/{resource}/testIamPermissions" % client.transport._host, args[1]) + + +def test_test_iam_permissions_rest_flattened_error(transport: str = 'rest'): + client = ReservationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.test_iam_permissions( + compute.TestIamPermissionsReservationRequest(), + project='project_value', + zone='zone_value', + resource='resource_value', + test_permissions_request_resource=compute.TestPermissionsRequest(permissions=['permissions_value']), + ) + + +def test_test_iam_permissions_rest_error(): + client = ReservationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.UpdateReservationRequest, + dict, +]) +def test_update_rest(request_type): + client = ReservationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'zone': 'sample2', 'reservation': 'sample3'} + request_init["reservation_resource"] = {'commitment': 'commitment_value', 'creation_timestamp': 'creation_timestamp_value', 'description': 'description_value', 'id': 205, 'kind': 'kind_value', 'name': 'name_value', 'resource_policies': {}, 'resource_status': {'specific_sku_allocation': {'source_instance_template_id': 'source_instance_template_id_value'}}, 'satisfies_pzs': True, 'self_link': 'self_link_value', 'share_settings': {'project_map': {}, 'share_type': 'share_type_value'}, 'specific_reservation': {'assured_count': 1407, 'count': 553, 'in_use_count': 1291, 'instance_properties': {'guest_accelerators': [{'accelerator_count': 1805, 'accelerator_type': 'accelerator_type_value'}], 'local_ssds': [{'disk_size_gb': 1261, 'interface': 'interface_value'}], 'location_hint': 'location_hint_value', 'machine_type': 'machine_type_value', 'min_cpu_platform': 'min_cpu_platform_value'}, 'source_instance_template': 'source_instance_template_value'}, 'specific_reservation_required': True, 'status': 'status_value', 'zone': 'zone_value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.update(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, extended_operation.ExtendedOperation) + assert response.client_operation_id == 'client_operation_id_value' + assert response.creation_timestamp == 'creation_timestamp_value' + assert response.description == 'description_value' + assert response.end_time == 'end_time_value' + assert response.http_error_message == 'http_error_message_value' + assert response.http_error_status_code == 2374 + assert response.id == 205 + assert response.insert_time == 'insert_time_value' + assert response.kind == 'kind_value' + assert response.name == 'name_value' + assert response.operation_group_id == 'operation_group_id_value' + assert response.operation_type == 'operation_type_value' + assert response.progress == 885 + assert response.region == 'region_value' + assert response.self_link == 'self_link_value' + assert response.start_time == 'start_time_value' + assert response.status == compute.Operation.Status.DONE + assert response.status_message == 'status_message_value' + assert response.target_id == 947 + assert response.target_link == 'target_link_value' + assert response.user == 'user_value' + assert response.zone == 'zone_value' + + +def test_update_rest_required_fields(request_type=compute.UpdateReservationRequest): + transport_class = transports.ReservationsRestTransport + + request_init = {} + request_init["project"] = "" + request_init["reservation"] = "" + request_init["zone"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).update._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + jsonified_request["reservation"] = 'reservation_value' + jsonified_request["zone"] = 'zone_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).update._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("paths", "request_id", "update_mask", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "reservation" in jsonified_request + assert jsonified_request["reservation"] == 'reservation_value' + assert "zone" in jsonified_request + assert jsonified_request["zone"] == 'zone_value' + + client = ReservationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "patch", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.update(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_update_rest_unset_required_fields(): + transport = transports.ReservationsRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.update._get_unset_required_fields({}) + assert set(unset_fields) == (set(("paths", "requestId", "updateMask", )) & set(("project", "reservation", "reservationResource", "zone", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_update_rest_interceptors(null_interceptor): + transport = transports.ReservationsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.ReservationsRestInterceptor(), + ) + client = ReservationsClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.ReservationsRestInterceptor, "post_update") as post, \ + mock.patch.object(transports.ReservationsRestInterceptor, "pre_update") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.UpdateReservationRequest.pb(compute.UpdateReservationRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.UpdateReservationRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.update(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_update_rest_bad_request(transport: str = 'rest', request_type=compute.UpdateReservationRequest): + client = ReservationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'zone': 'sample2', 'reservation': 'sample3'} + request_init["reservation_resource"] = {'commitment': 'commitment_value', 'creation_timestamp': 'creation_timestamp_value', 'description': 'description_value', 'id': 205, 'kind': 'kind_value', 'name': 'name_value', 'resource_policies': {}, 'resource_status': {'specific_sku_allocation': {'source_instance_template_id': 'source_instance_template_id_value'}}, 'satisfies_pzs': True, 'self_link': 'self_link_value', 'share_settings': {'project_map': {}, 'share_type': 'share_type_value'}, 'specific_reservation': {'assured_count': 1407, 'count': 553, 'in_use_count': 1291, 'instance_properties': {'guest_accelerators': [{'accelerator_count': 1805, 'accelerator_type': 'accelerator_type_value'}], 'local_ssds': [{'disk_size_gb': 1261, 'interface': 'interface_value'}], 'location_hint': 'location_hint_value', 'machine_type': 'machine_type_value', 'min_cpu_platform': 'min_cpu_platform_value'}, 'source_instance_template': 'source_instance_template_value'}, 'specific_reservation_required': True, 'status': 'status_value', 'zone': 'zone_value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.update(request) + + +def test_update_rest_flattened(): + client = ReservationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'zone': 'sample2', 'reservation': 'sample3'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + zone='zone_value', + reservation='reservation_value', + reservation_resource=compute.Reservation(commitment='commitment_value'), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.update(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/zones/{zone}/reservations/{reservation}" % client.transport._host, args[1]) + + +def test_update_rest_flattened_error(transport: str = 'rest'): + client = ReservationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update( + compute.UpdateReservationRequest(), + project='project_value', + zone='zone_value', + reservation='reservation_value', + reservation_resource=compute.Reservation(commitment='commitment_value'), + ) + + +def test_update_rest_error(): + client = ReservationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.UpdateReservationRequest, + dict, +]) +def test_update_unary_rest(request_type): + client = ReservationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'zone': 'sample2', 'reservation': 'sample3'} + request_init["reservation_resource"] = {'commitment': 'commitment_value', 'creation_timestamp': 'creation_timestamp_value', 'description': 'description_value', 'id': 205, 'kind': 'kind_value', 'name': 'name_value', 'resource_policies': {}, 'resource_status': {'specific_sku_allocation': {'source_instance_template_id': 'source_instance_template_id_value'}}, 'satisfies_pzs': True, 'self_link': 'self_link_value', 'share_settings': {'project_map': {}, 'share_type': 'share_type_value'}, 'specific_reservation': {'assured_count': 1407, 'count': 553, 'in_use_count': 1291, 'instance_properties': {'guest_accelerators': [{'accelerator_count': 1805, 'accelerator_type': 'accelerator_type_value'}], 'local_ssds': [{'disk_size_gb': 1261, 'interface': 'interface_value'}], 'location_hint': 'location_hint_value', 'machine_type': 'machine_type_value', 'min_cpu_platform': 'min_cpu_platform_value'}, 'source_instance_template': 'source_instance_template_value'}, 'specific_reservation_required': True, 'status': 'status_value', 'zone': 'zone_value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.update_unary(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.Operation) + + +def test_update_unary_rest_required_fields(request_type=compute.UpdateReservationRequest): + transport_class = transports.ReservationsRestTransport + + request_init = {} + request_init["project"] = "" + request_init["reservation"] = "" + request_init["zone"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).update._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + jsonified_request["reservation"] = 'reservation_value' + jsonified_request["zone"] = 'zone_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).update._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("paths", "request_id", "update_mask", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "reservation" in jsonified_request + assert jsonified_request["reservation"] == 'reservation_value' + assert "zone" in jsonified_request + assert jsonified_request["zone"] == 'zone_value' + + client = ReservationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "patch", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.update_unary(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_update_unary_rest_unset_required_fields(): + transport = transports.ReservationsRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.update._get_unset_required_fields({}) + assert set(unset_fields) == (set(("paths", "requestId", "updateMask", )) & set(("project", "reservation", "reservationResource", "zone", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_update_unary_rest_interceptors(null_interceptor): + transport = transports.ReservationsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.ReservationsRestInterceptor(), + ) + client = ReservationsClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.ReservationsRestInterceptor, "post_update") as post, \ + mock.patch.object(transports.ReservationsRestInterceptor, "pre_update") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.UpdateReservationRequest.pb(compute.UpdateReservationRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.UpdateReservationRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.update_unary(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_update_unary_rest_bad_request(transport: str = 'rest', request_type=compute.UpdateReservationRequest): + client = ReservationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'zone': 'sample2', 'reservation': 'sample3'} + request_init["reservation_resource"] = {'commitment': 'commitment_value', 'creation_timestamp': 'creation_timestamp_value', 'description': 'description_value', 'id': 205, 'kind': 'kind_value', 'name': 'name_value', 'resource_policies': {}, 'resource_status': {'specific_sku_allocation': {'source_instance_template_id': 'source_instance_template_id_value'}}, 'satisfies_pzs': True, 'self_link': 'self_link_value', 'share_settings': {'project_map': {}, 'share_type': 'share_type_value'}, 'specific_reservation': {'assured_count': 1407, 'count': 553, 'in_use_count': 1291, 'instance_properties': {'guest_accelerators': [{'accelerator_count': 1805, 'accelerator_type': 'accelerator_type_value'}], 'local_ssds': [{'disk_size_gb': 1261, 'interface': 'interface_value'}], 'location_hint': 'location_hint_value', 'machine_type': 'machine_type_value', 'min_cpu_platform': 'min_cpu_platform_value'}, 'source_instance_template': 'source_instance_template_value'}, 'specific_reservation_required': True, 'status': 'status_value', 'zone': 'zone_value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.update_unary(request) + + +def test_update_unary_rest_flattened(): + client = ReservationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'zone': 'sample2', 'reservation': 'sample3'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + zone='zone_value', + reservation='reservation_value', + reservation_resource=compute.Reservation(commitment='commitment_value'), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.update_unary(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/zones/{zone}/reservations/{reservation}" % client.transport._host, args[1]) + + +def test_update_unary_rest_flattened_error(transport: str = 'rest'): + client = ReservationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_unary( + compute.UpdateReservationRequest(), + project='project_value', + zone='zone_value', + reservation='reservation_value', + reservation_resource=compute.Reservation(commitment='commitment_value'), + ) + + +def test_update_unary_rest_error(): + client = ReservationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +def test_credentials_transport_error(): + # It is an error to provide credentials and a transport instance. + transport = transports.ReservationsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = ReservationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # It is an error to provide a credentials file and a transport instance. + transport = transports.ReservationsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = ReservationsClient( + client_options={"credentials_file": "credentials.json"}, + transport=transport, + ) + + # It is an error to provide an api_key and a transport instance. + transport = transports.ReservationsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = ReservationsClient( + client_options=options, + transport=transport, + ) + + # It is an error to provide an api_key and a credential. + options = mock.Mock() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = ReservationsClient( + client_options=options, + credentials=ga_credentials.AnonymousCredentials() + ) + + # It is an error to provide scopes and a transport instance. + transport = transports.ReservationsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = ReservationsClient( + client_options={"scopes": ["1", "2"]}, + transport=transport, + ) + + +def test_transport_instance(): + # A client may be instantiated with a custom transport instance. + transport = transports.ReservationsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + client = ReservationsClient(transport=transport) + assert client.transport is transport + + +@pytest.mark.parametrize("transport_class", [ + transports.ReservationsRestTransport, +]) +def test_transport_adc(transport_class): + # Test default credentials are used if not provided. + with mock.patch.object(google.auth, 'default') as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class() + adc.assert_called_once() + +@pytest.mark.parametrize("transport_name", [ + "rest", +]) +def test_transport_kind(transport_name): + transport = ReservationsClient.get_transport_class(transport_name)( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert transport.kind == transport_name + + +def test_reservations_base_transport_error(): + # Passing both a credentials object and credentials_file should raise an error + with pytest.raises(core_exceptions.DuplicateCredentialArgs): + transport = transports.ReservationsTransport( + credentials=ga_credentials.AnonymousCredentials(), + credentials_file="credentials.json" + ) + + +def test_reservations_base_transport(): + # Instantiate the base transport. + with mock.patch('google.cloud.compute_v1.services.reservations.transports.ReservationsTransport.__init__') as Transport: + Transport.return_value = None + transport = transports.ReservationsTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Every method on the transport should just blindly + # raise NotImplementedError. + methods = ( + 'aggregated_list', + 'delete', + 'get', + 'get_iam_policy', + 'insert', + 'list', + 'resize', + 'set_iam_policy', + 'test_iam_permissions', + 'update', + ) + for method in methods: + with pytest.raises(NotImplementedError): + getattr(transport, method)(request=object()) + + with pytest.raises(NotImplementedError): + transport.close() + + # Catch all for all remaining methods and properties + remainder = [ + 'kind', + ] + for r in remainder: + with pytest.raises(NotImplementedError): + getattr(transport, r)() + + +def test_reservations_base_transport_with_credentials_file(): + # Instantiate the base transport with a credentials file + with mock.patch.object(google.auth, 'load_credentials_from_file', autospec=True) as load_creds, mock.patch('google.cloud.compute_v1.services.reservations.transports.ReservationsTransport._prep_wrapped_messages') as Transport: + Transport.return_value = None + load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.ReservationsTransport( + credentials_file="credentials.json", + quota_project_id="octopus", + ) + load_creds.assert_called_once_with("credentials.json", + scopes=None, + default_scopes=( + 'https://www.googleapis.com/auth/compute', + 'https://www.googleapis.com/auth/cloud-platform', +), + quota_project_id="octopus", + ) + + +def test_reservations_base_transport_with_adc(): + # Test the default credentials are used if credentials and credentials_file are None. + with mock.patch.object(google.auth, 'default', autospec=True) as adc, mock.patch('google.cloud.compute_v1.services.reservations.transports.ReservationsTransport._prep_wrapped_messages') as Transport: + Transport.return_value = None + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.ReservationsTransport() + adc.assert_called_once() + + +def test_reservations_auth_adc(): + # If no credentials are provided, we should use ADC credentials. + with mock.patch.object(google.auth, 'default', autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + ReservationsClient() + adc.assert_called_once_with( + scopes=None, + default_scopes=( + 'https://www.googleapis.com/auth/compute', + 'https://www.googleapis.com/auth/cloud-platform', +), + quota_project_id=None, + ) + + +def test_reservations_http_transport_client_cert_source_for_mtls(): + cred = ga_credentials.AnonymousCredentials() + with mock.patch("google.auth.transport.requests.AuthorizedSession.configure_mtls_channel") as mock_configure_mtls_channel: + transports.ReservationsRestTransport ( + credentials=cred, + client_cert_source_for_mtls=client_cert_source_callback + ) + mock_configure_mtls_channel.assert_called_once_with(client_cert_source_callback) + + +@pytest.mark.parametrize("transport_name", [ + "rest", +]) +def test_reservations_host_no_port(transport_name): + client = ReservationsClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions(api_endpoint='compute.googleapis.com'), + transport=transport_name, + ) + assert client.transport._host == ( + 'compute.googleapis.com:443' + if transport_name in ['grpc', 'grpc_asyncio'] + else 'https://compute.googleapis.com' + ) + +@pytest.mark.parametrize("transport_name", [ + "rest", +]) +def test_reservations_host_with_port(transport_name): + client = ReservationsClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions(api_endpoint='compute.googleapis.com:8000'), + transport=transport_name, + ) + assert client.transport._host == ( + 'compute.googleapis.com:8000' + if transport_name in ['grpc', 'grpc_asyncio'] + else 'https://compute.googleapis.com:8000' + ) + +@pytest.mark.parametrize("transport_name", [ + "rest", +]) +def test_reservations_client_transport_session_collision(transport_name): + creds1 = ga_credentials.AnonymousCredentials() + creds2 = ga_credentials.AnonymousCredentials() + client1 = ReservationsClient( + credentials=creds1, + transport=transport_name, + ) + client2 = ReservationsClient( + credentials=creds2, + transport=transport_name, + ) + session1 = client1.transport.aggregated_list._session + session2 = client2.transport.aggregated_list._session + assert session1 != session2 + session1 = client1.transport.delete._session + session2 = client2.transport.delete._session + assert session1 != session2 + session1 = client1.transport.get._session + session2 = client2.transport.get._session + assert session1 != session2 + session1 = client1.transport.get_iam_policy._session + session2 = client2.transport.get_iam_policy._session + assert session1 != session2 + session1 = client1.transport.insert._session + session2 = client2.transport.insert._session + assert session1 != session2 + session1 = client1.transport.list._session + session2 = client2.transport.list._session + assert session1 != session2 + session1 = client1.transport.resize._session + session2 = client2.transport.resize._session + assert session1 != session2 + session1 = client1.transport.set_iam_policy._session + session2 = client2.transport.set_iam_policy._session + assert session1 != session2 + session1 = client1.transport.test_iam_permissions._session + session2 = client2.transport.test_iam_permissions._session + assert session1 != session2 + session1 = client1.transport.update._session + session2 = client2.transport.update._session + assert session1 != session2 + +def test_common_billing_account_path(): + billing_account = "squid" + expected = "billingAccounts/{billing_account}".format(billing_account=billing_account, ) + actual = ReservationsClient.common_billing_account_path(billing_account) + assert expected == actual + + +def test_parse_common_billing_account_path(): + expected = { + "billing_account": "clam", + } + path = ReservationsClient.common_billing_account_path(**expected) + + # Check that the path construction is reversible. + actual = ReservationsClient.parse_common_billing_account_path(path) + assert expected == actual + +def test_common_folder_path(): + folder = "whelk" + expected = "folders/{folder}".format(folder=folder, ) + actual = ReservationsClient.common_folder_path(folder) + assert expected == actual + + +def test_parse_common_folder_path(): + expected = { + "folder": "octopus", + } + path = ReservationsClient.common_folder_path(**expected) + + # Check that the path construction is reversible. + actual = ReservationsClient.parse_common_folder_path(path) + assert expected == actual + +def test_common_organization_path(): + organization = "oyster" + expected = "organizations/{organization}".format(organization=organization, ) + actual = ReservationsClient.common_organization_path(organization) + assert expected == actual + + +def test_parse_common_organization_path(): + expected = { + "organization": "nudibranch", + } + path = ReservationsClient.common_organization_path(**expected) + + # Check that the path construction is reversible. + actual = ReservationsClient.parse_common_organization_path(path) + assert expected == actual + +def test_common_project_path(): + project = "cuttlefish" + expected = "projects/{project}".format(project=project, ) + actual = ReservationsClient.common_project_path(project) + assert expected == actual + + +def test_parse_common_project_path(): + expected = { + "project": "mussel", + } + path = ReservationsClient.common_project_path(**expected) + + # Check that the path construction is reversible. + actual = ReservationsClient.parse_common_project_path(path) + assert expected == actual + +def test_common_location_path(): + project = "winkle" + location = "nautilus" + expected = "projects/{project}/locations/{location}".format(project=project, location=location, ) + actual = ReservationsClient.common_location_path(project, location) + assert expected == actual + + +def test_parse_common_location_path(): + expected = { + "project": "scallop", + "location": "abalone", + } + path = ReservationsClient.common_location_path(**expected) + + # Check that the path construction is reversible. + actual = ReservationsClient.parse_common_location_path(path) + assert expected == actual + + +def test_client_with_default_client_info(): + client_info = gapic_v1.client_info.ClientInfo() + + with mock.patch.object(transports.ReservationsTransport, '_prep_wrapped_messages') as prep: + client = ReservationsClient( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + with mock.patch.object(transports.ReservationsTransport, '_prep_wrapped_messages') as prep: + transport_class = ReservationsClient.get_transport_class() + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + +def test_transport_close(): + transports = { + "rest": "_session", + } + + for transport, close_name in transports.items(): + client = ReservationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport + ) + with mock.patch.object(type(getattr(client.transport, close_name)), "close") as close: + with client: + close.assert_not_called() + close.assert_called_once() + +def test_client_ctx(): + transports = [ + 'rest', + ] + for transport in transports: + client = ReservationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport + ) + # Test client calls underlying transport. + with mock.patch.object(type(client.transport), "close") as close: + close.assert_not_called() + with client: + pass + close.assert_called() + +@pytest.mark.parametrize("client_class,transport_class", [ + (ReservationsClient, transports.ReservationsRestTransport), +]) +def test_api_key_credentials(client_class, transport_class): + with mock.patch.object( + google.auth._default, "get_api_key_credentials", create=True + ) as get_api_key_credentials: + mock_cred = mock.Mock() + get_api_key_credentials.return_value = mock_cred + options = client_options.ClientOptions() + options.api_key = "api_key" + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=mock_cred, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) diff --git a/owl-bot-staging/v1/tests/unit/gapic/compute_v1/test_resource_policies.py b/owl-bot-staging/v1/tests/unit/gapic/compute_v1/test_resource_policies.py new file mode 100644 index 000000000..56cd30814 --- /dev/null +++ b/owl-bot-staging/v1/tests/unit/gapic/compute_v1/test_resource_policies.py @@ -0,0 +1,4156 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import os +# try/except added for compatibility with python < 3.8 +try: + from unittest import mock + from unittest.mock import AsyncMock # pragma: NO COVER +except ImportError: # pragma: NO COVER + import mock + +import grpc +from grpc.experimental import aio +from collections.abc import Iterable +from google.protobuf import json_format +import json +import math +import pytest +from proto.marshal.rules.dates import DurationRule, TimestampRule +from proto.marshal.rules import wrappers +from requests import Response +from requests import Request, PreparedRequest +from requests.sessions import Session +from google.protobuf import json_format + +from google.api_core import client_options +from google.api_core import exceptions as core_exceptions +from google.api_core import extended_operation # type: ignore +from google.api_core import future +from google.api_core import gapic_v1 +from google.api_core import grpc_helpers +from google.api_core import grpc_helpers_async +from google.api_core import path_template +from google.auth import credentials as ga_credentials +from google.auth.exceptions import MutualTLSChannelError +from google.cloud.compute_v1.services.resource_policies import ResourcePoliciesClient +from google.cloud.compute_v1.services.resource_policies import pagers +from google.cloud.compute_v1.services.resource_policies import transports +from google.cloud.compute_v1.types import compute +from google.oauth2 import service_account +import google.auth + + +def client_cert_source_callback(): + return b"cert bytes", b"key bytes" + + +# If default endpoint is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint(client): + return "foo.googleapis.com" if ("localhost" in client.DEFAULT_ENDPOINT) else client.DEFAULT_ENDPOINT + + +def test__get_default_mtls_endpoint(): + api_endpoint = "example.googleapis.com" + api_mtls_endpoint = "example.mtls.googleapis.com" + sandbox_endpoint = "example.sandbox.googleapis.com" + sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com" + non_googleapi = "api.example.com" + + assert ResourcePoliciesClient._get_default_mtls_endpoint(None) is None + assert ResourcePoliciesClient._get_default_mtls_endpoint(api_endpoint) == api_mtls_endpoint + assert ResourcePoliciesClient._get_default_mtls_endpoint(api_mtls_endpoint) == api_mtls_endpoint + assert ResourcePoliciesClient._get_default_mtls_endpoint(sandbox_endpoint) == sandbox_mtls_endpoint + assert ResourcePoliciesClient._get_default_mtls_endpoint(sandbox_mtls_endpoint) == sandbox_mtls_endpoint + assert ResourcePoliciesClient._get_default_mtls_endpoint(non_googleapi) == non_googleapi + + +@pytest.mark.parametrize("client_class,transport_name", [ + (ResourcePoliciesClient, "rest"), +]) +def test_resource_policies_client_from_service_account_info(client_class, transport_name): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object(service_account.Credentials, 'from_service_account_info') as factory: + factory.return_value = creds + info = {"valid": True} + client = client_class.from_service_account_info(info, transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + 'compute.googleapis.com:443' + if transport_name in ['grpc', 'grpc_asyncio'] + else + 'https://compute.googleapis.com' + ) + + +@pytest.mark.parametrize("transport_class,transport_name", [ + (transports.ResourcePoliciesRestTransport, "rest"), +]) +def test_resource_policies_client_service_account_always_use_jwt(transport_class, transport_name): + with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=True) + use_jwt.assert_called_once_with(True) + + with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=False) + use_jwt.assert_not_called() + + +@pytest.mark.parametrize("client_class,transport_name", [ + (ResourcePoliciesClient, "rest"), +]) +def test_resource_policies_client_from_service_account_file(client_class, transport_name): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object(service_account.Credentials, 'from_service_account_file') as factory: + factory.return_value = creds + client = client_class.from_service_account_file("dummy/file/path.json", transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + client = client_class.from_service_account_json("dummy/file/path.json", transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + 'compute.googleapis.com:443' + if transport_name in ['grpc', 'grpc_asyncio'] + else + 'https://compute.googleapis.com' + ) + + +def test_resource_policies_client_get_transport_class(): + transport = ResourcePoliciesClient.get_transport_class() + available_transports = [ + transports.ResourcePoliciesRestTransport, + ] + assert transport in available_transports + + transport = ResourcePoliciesClient.get_transport_class("rest") + assert transport == transports.ResourcePoliciesRestTransport + + +@pytest.mark.parametrize("client_class,transport_class,transport_name", [ + (ResourcePoliciesClient, transports.ResourcePoliciesRestTransport, "rest"), +]) +@mock.patch.object(ResourcePoliciesClient, "DEFAULT_ENDPOINT", modify_default_endpoint(ResourcePoliciesClient)) +def test_resource_policies_client_client_options(client_class, transport_class, transport_name): + # Check that if channel is provided we won't create a new one. + with mock.patch.object(ResourcePoliciesClient, 'get_transport_class') as gtc: + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ) + client = client_class(transport=transport) + gtc.assert_not_called() + + # Check that if channel is provided via str we will create a new one. + with mock.patch.object(ResourcePoliciesClient, 'get_transport_class') as gtc: + client = client_class(transport=transport_name) + gtc.assert_called() + + # Check the case api_endpoint is provided. + options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(transport=transport_name, client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_MTLS_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError): + client = client_class(transport=transport_name) + + # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): + with pytest.raises(ValueError): + client = client_class(transport=transport_name) + + # Check the case quota_project_id is provided + options = client_options.ClientOptions(quota_project_id="octopus") + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id="octopus", + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + # Check the case api_endpoint is provided + options = client_options.ClientOptions(api_audience="https://language.googleapis.com") + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience="https://language.googleapis.com" + ) + +@pytest.mark.parametrize("client_class,transport_class,transport_name,use_client_cert_env", [ + (ResourcePoliciesClient, transports.ResourcePoliciesRestTransport, "rest", "true"), + (ResourcePoliciesClient, transports.ResourcePoliciesRestTransport, "rest", "false"), +]) +@mock.patch.object(ResourcePoliciesClient, "DEFAULT_ENDPOINT", modify_default_endpoint(ResourcePoliciesClient)) +@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) +def test_resource_policies_client_mtls_env_auto(client_class, transport_class, transport_name, use_client_cert_env): + # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default + # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists. + + # Check the case client_cert_source is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): + options = client_options.ClientOptions(client_cert_source=client_cert_source_callback) + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + + if use_client_cert_env == "false": + expected_client_cert_source = None + expected_host = client.DEFAULT_ENDPOINT + else: + expected_client_cert_source = client_cert_source_callback + expected_host = client.DEFAULT_MTLS_ENDPOINT + + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case ADC client cert is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): + with mock.patch.object(transport_class, '__init__') as patched: + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): + with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=client_cert_source_callback): + if use_client_cert_env == "false": + expected_host = client.DEFAULT_ENDPOINT + expected_client_cert_source = None + else: + expected_host = client.DEFAULT_MTLS_ENDPOINT + expected_client_cert_source = client_cert_source_callback + + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case client_cert_source and ADC client cert are not provided. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): + with mock.patch.object(transport_class, '__init__') as patched: + with mock.patch("google.auth.transport.mtls.has_default_client_cert_source", return_value=False): + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize("client_class", [ + ResourcePoliciesClient +]) +@mock.patch.object(ResourcePoliciesClient, "DEFAULT_ENDPOINT", modify_default_endpoint(ResourcePoliciesClient)) +def test_resource_policies_client_get_mtls_endpoint_and_cert_source(client_class): + mock_client_cert_source = mock.Mock() + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "true". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + mock_api_endpoint = "foo" + options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(options) + assert api_endpoint == mock_api_endpoint + assert cert_source == mock_client_cert_source + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "false". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + mock_client_cert_source = mock.Mock() + mock_api_endpoint = "foo" + options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(options) + assert api_endpoint == mock_api_endpoint + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert doesn't exist. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=False): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert exists. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): + with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=mock_client_cert_source): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source == mock_client_cert_source + + +@pytest.mark.parametrize("client_class,transport_class,transport_name", [ + (ResourcePoliciesClient, transports.ResourcePoliciesRestTransport, "rest"), +]) +def test_resource_policies_client_client_options_scopes(client_class, transport_class, transport_name): + # Check the case scopes are provided. + options = client_options.ClientOptions( + scopes=["1", "2"], + ) + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=["1", "2"], + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + +@pytest.mark.parametrize("client_class,transport_class,transport_name,grpc_helpers", [ + (ResourcePoliciesClient, transports.ResourcePoliciesRestTransport, "rest", None), +]) +def test_resource_policies_client_client_options_credentials_file(client_class, transport_class, transport_name, grpc_helpers): + # Check the case credentials file is provided. + options = client_options.ClientOptions( + credentials_file="credentials.json" + ) + + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize("request_type", [ + compute.AggregatedListResourcePoliciesRequest, + dict, +]) +def test_aggregated_list_rest(request_type): + client = ResourcePoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.ResourcePolicyAggregatedList( + etag='etag_value', + id='id_value', + kind='kind_value', + next_page_token='next_page_token_value', + self_link='self_link_value', + unreachables=['unreachables_value'], + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.ResourcePolicyAggregatedList.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.aggregated_list(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.AggregatedListPager) + assert response.etag == 'etag_value' + assert response.id == 'id_value' + assert response.kind == 'kind_value' + assert response.next_page_token == 'next_page_token_value' + assert response.self_link == 'self_link_value' + assert response.unreachables == ['unreachables_value'] + + +def test_aggregated_list_rest_required_fields(request_type=compute.AggregatedListResourcePoliciesRequest): + transport_class = transports.ResourcePoliciesRestTransport + + request_init = {} + request_init["project"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).aggregated_list._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).aggregated_list._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("filter", "include_all_scopes", "max_results", "order_by", "page_token", "return_partial_success", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + + client = ResourcePoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.ResourcePolicyAggregatedList() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "get", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.ResourcePolicyAggregatedList.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.aggregated_list(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_aggregated_list_rest_unset_required_fields(): + transport = transports.ResourcePoliciesRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.aggregated_list._get_unset_required_fields({}) + assert set(unset_fields) == (set(("filter", "includeAllScopes", "maxResults", "orderBy", "pageToken", "returnPartialSuccess", )) & set(("project", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_aggregated_list_rest_interceptors(null_interceptor): + transport = transports.ResourcePoliciesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.ResourcePoliciesRestInterceptor(), + ) + client = ResourcePoliciesClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.ResourcePoliciesRestInterceptor, "post_aggregated_list") as post, \ + mock.patch.object(transports.ResourcePoliciesRestInterceptor, "pre_aggregated_list") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.AggregatedListResourcePoliciesRequest.pb(compute.AggregatedListResourcePoliciesRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.ResourcePolicyAggregatedList.to_json(compute.ResourcePolicyAggregatedList()) + + request = compute.AggregatedListResourcePoliciesRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.ResourcePolicyAggregatedList() + + client.aggregated_list(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_aggregated_list_rest_bad_request(transport: str = 'rest', request_type=compute.AggregatedListResourcePoliciesRequest): + client = ResourcePoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.aggregated_list(request) + + +def test_aggregated_list_rest_flattened(): + client = ResourcePoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.ResourcePolicyAggregatedList() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.ResourcePolicyAggregatedList.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.aggregated_list(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/aggregated/resourcePolicies" % client.transport._host, args[1]) + + +def test_aggregated_list_rest_flattened_error(transport: str = 'rest'): + client = ResourcePoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.aggregated_list( + compute.AggregatedListResourcePoliciesRequest(), + project='project_value', + ) + + +def test_aggregated_list_rest_pager(transport: str = 'rest'): + client = ResourcePoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + #with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + compute.ResourcePolicyAggregatedList( + items={ + 'a':compute.ResourcePoliciesScopedList(), + 'b':compute.ResourcePoliciesScopedList(), + 'c':compute.ResourcePoliciesScopedList(), + }, + next_page_token='abc', + ), + compute.ResourcePolicyAggregatedList( + items={}, + next_page_token='def', + ), + compute.ResourcePolicyAggregatedList( + items={ + 'g':compute.ResourcePoliciesScopedList(), + }, + next_page_token='ghi', + ), + compute.ResourcePolicyAggregatedList( + items={ + 'h':compute.ResourcePoliciesScopedList(), + 'i':compute.ResourcePoliciesScopedList(), + }, + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple(compute.ResourcePolicyAggregatedList.to_json(x) for x in response) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode('UTF-8') + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {'project': 'sample1'} + + pager = client.aggregated_list(request=sample_request) + + assert isinstance(pager.get('a'), compute.ResourcePoliciesScopedList) + assert pager.get('h') is None + + results = list(pager) + assert len(results) == 6 + assert all( + isinstance(i, tuple) + for i in results) + for result in results: + assert isinstance(result, tuple) + assert tuple(type(t) for t in result) == (str, compute.ResourcePoliciesScopedList) + + assert pager.get('a') is None + assert isinstance(pager.get('h'), compute.ResourcePoliciesScopedList) + + pages = list(client.aggregated_list(request=sample_request).pages) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize("request_type", [ + compute.DeleteResourcePolicyRequest, + dict, +]) +def test_delete_rest(request_type): + client = ResourcePoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2', 'resource_policy': 'sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.delete(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, extended_operation.ExtendedOperation) + assert response.client_operation_id == 'client_operation_id_value' + assert response.creation_timestamp == 'creation_timestamp_value' + assert response.description == 'description_value' + assert response.end_time == 'end_time_value' + assert response.http_error_message == 'http_error_message_value' + assert response.http_error_status_code == 2374 + assert response.id == 205 + assert response.insert_time == 'insert_time_value' + assert response.kind == 'kind_value' + assert response.name == 'name_value' + assert response.operation_group_id == 'operation_group_id_value' + assert response.operation_type == 'operation_type_value' + assert response.progress == 885 + assert response.region == 'region_value' + assert response.self_link == 'self_link_value' + assert response.start_time == 'start_time_value' + assert response.status == compute.Operation.Status.DONE + assert response.status_message == 'status_message_value' + assert response.target_id == 947 + assert response.target_link == 'target_link_value' + assert response.user == 'user_value' + assert response.zone == 'zone_value' + + +def test_delete_rest_required_fields(request_type=compute.DeleteResourcePolicyRequest): + transport_class = transports.ResourcePoliciesRestTransport + + request_init = {} + request_init["project"] = "" + request_init["region"] = "" + request_init["resource_policy"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + jsonified_request["region"] = 'region_value' + jsonified_request["resourcePolicy"] = 'resource_policy_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "region" in jsonified_request + assert jsonified_request["region"] == 'region_value' + assert "resourcePolicy" in jsonified_request + assert jsonified_request["resourcePolicy"] == 'resource_policy_value' + + client = ResourcePoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "delete", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.delete(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_delete_rest_unset_required_fields(): + transport = transports.ResourcePoliciesRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.delete._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("project", "region", "resourcePolicy", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_rest_interceptors(null_interceptor): + transport = transports.ResourcePoliciesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.ResourcePoliciesRestInterceptor(), + ) + client = ResourcePoliciesClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.ResourcePoliciesRestInterceptor, "post_delete") as post, \ + mock.patch.object(transports.ResourcePoliciesRestInterceptor, "pre_delete") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.DeleteResourcePolicyRequest.pb(compute.DeleteResourcePolicyRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.DeleteResourcePolicyRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.delete(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_delete_rest_bad_request(transport: str = 'rest', request_type=compute.DeleteResourcePolicyRequest): + client = ResourcePoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2', 'resource_policy': 'sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.delete(request) + + +def test_delete_rest_flattened(): + client = ResourcePoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'region': 'sample2', 'resource_policy': 'sample3'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + region='region_value', + resource_policy='resource_policy_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.delete(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/regions/{region}/resourcePolicies/{resource_policy}" % client.transport._host, args[1]) + + +def test_delete_rest_flattened_error(transport: str = 'rest'): + client = ResourcePoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete( + compute.DeleteResourcePolicyRequest(), + project='project_value', + region='region_value', + resource_policy='resource_policy_value', + ) + + +def test_delete_rest_error(): + client = ResourcePoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.DeleteResourcePolicyRequest, + dict, +]) +def test_delete_unary_rest(request_type): + client = ResourcePoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2', 'resource_policy': 'sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.delete_unary(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.Operation) + + +def test_delete_unary_rest_required_fields(request_type=compute.DeleteResourcePolicyRequest): + transport_class = transports.ResourcePoliciesRestTransport + + request_init = {} + request_init["project"] = "" + request_init["region"] = "" + request_init["resource_policy"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + jsonified_request["region"] = 'region_value' + jsonified_request["resourcePolicy"] = 'resource_policy_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "region" in jsonified_request + assert jsonified_request["region"] == 'region_value' + assert "resourcePolicy" in jsonified_request + assert jsonified_request["resourcePolicy"] == 'resource_policy_value' + + client = ResourcePoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "delete", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.delete_unary(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_delete_unary_rest_unset_required_fields(): + transport = transports.ResourcePoliciesRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.delete._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("project", "region", "resourcePolicy", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_unary_rest_interceptors(null_interceptor): + transport = transports.ResourcePoliciesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.ResourcePoliciesRestInterceptor(), + ) + client = ResourcePoliciesClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.ResourcePoliciesRestInterceptor, "post_delete") as post, \ + mock.patch.object(transports.ResourcePoliciesRestInterceptor, "pre_delete") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.DeleteResourcePolicyRequest.pb(compute.DeleteResourcePolicyRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.DeleteResourcePolicyRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.delete_unary(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_delete_unary_rest_bad_request(transport: str = 'rest', request_type=compute.DeleteResourcePolicyRequest): + client = ResourcePoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2', 'resource_policy': 'sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.delete_unary(request) + + +def test_delete_unary_rest_flattened(): + client = ResourcePoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'region': 'sample2', 'resource_policy': 'sample3'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + region='region_value', + resource_policy='resource_policy_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.delete_unary(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/regions/{region}/resourcePolicies/{resource_policy}" % client.transport._host, args[1]) + + +def test_delete_unary_rest_flattened_error(transport: str = 'rest'): + client = ResourcePoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_unary( + compute.DeleteResourcePolicyRequest(), + project='project_value', + region='region_value', + resource_policy='resource_policy_value', + ) + + +def test_delete_unary_rest_error(): + client = ResourcePoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.GetResourcePolicyRequest, + dict, +]) +def test_get_rest(request_type): + client = ResourcePoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2', 'resource_policy': 'sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.ResourcePolicy( + creation_timestamp='creation_timestamp_value', + description='description_value', + id=205, + kind='kind_value', + name='name_value', + region='region_value', + self_link='self_link_value', + status='status_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.ResourcePolicy.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.get(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.ResourcePolicy) + assert response.creation_timestamp == 'creation_timestamp_value' + assert response.description == 'description_value' + assert response.id == 205 + assert response.kind == 'kind_value' + assert response.name == 'name_value' + assert response.region == 'region_value' + assert response.self_link == 'self_link_value' + assert response.status == 'status_value' + + +def test_get_rest_required_fields(request_type=compute.GetResourcePolicyRequest): + transport_class = transports.ResourcePoliciesRestTransport + + request_init = {} + request_init["project"] = "" + request_init["region"] = "" + request_init["resource_policy"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + jsonified_request["region"] = 'region_value' + jsonified_request["resourcePolicy"] = 'resource_policy_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "region" in jsonified_request + assert jsonified_request["region"] == 'region_value' + assert "resourcePolicy" in jsonified_request + assert jsonified_request["resourcePolicy"] == 'resource_policy_value' + + client = ResourcePoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.ResourcePolicy() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "get", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.ResourcePolicy.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.get(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_get_rest_unset_required_fields(): + transport = transports.ResourcePoliciesRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.get._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("project", "region", "resourcePolicy", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_rest_interceptors(null_interceptor): + transport = transports.ResourcePoliciesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.ResourcePoliciesRestInterceptor(), + ) + client = ResourcePoliciesClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.ResourcePoliciesRestInterceptor, "post_get") as post, \ + mock.patch.object(transports.ResourcePoliciesRestInterceptor, "pre_get") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.GetResourcePolicyRequest.pb(compute.GetResourcePolicyRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.ResourcePolicy.to_json(compute.ResourcePolicy()) + + request = compute.GetResourcePolicyRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.ResourcePolicy() + + client.get(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_get_rest_bad_request(transport: str = 'rest', request_type=compute.GetResourcePolicyRequest): + client = ResourcePoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2', 'resource_policy': 'sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get(request) + + +def test_get_rest_flattened(): + client = ResourcePoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.ResourcePolicy() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'region': 'sample2', 'resource_policy': 'sample3'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + region='region_value', + resource_policy='resource_policy_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.ResourcePolicy.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.get(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/regions/{region}/resourcePolicies/{resource_policy}" % client.transport._host, args[1]) + + +def test_get_rest_flattened_error(transport: str = 'rest'): + client = ResourcePoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get( + compute.GetResourcePolicyRequest(), + project='project_value', + region='region_value', + resource_policy='resource_policy_value', + ) + + +def test_get_rest_error(): + client = ResourcePoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.GetIamPolicyResourcePolicyRequest, + dict, +]) +def test_get_iam_policy_rest(request_type): + client = ResourcePoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2', 'resource': 'sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Policy( + etag='etag_value', + iam_owned=True, + version=774, + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Policy.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.get_iam_policy(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.Policy) + assert response.etag == 'etag_value' + assert response.iam_owned is True + assert response.version == 774 + + +def test_get_iam_policy_rest_required_fields(request_type=compute.GetIamPolicyResourcePolicyRequest): + transport_class = transports.ResourcePoliciesRestTransport + + request_init = {} + request_init["project"] = "" + request_init["region"] = "" + request_init["resource"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_iam_policy._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + jsonified_request["region"] = 'region_value' + jsonified_request["resource"] = 'resource_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_iam_policy._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("options_requested_policy_version", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "region" in jsonified_request + assert jsonified_request["region"] == 'region_value' + assert "resource" in jsonified_request + assert jsonified_request["resource"] == 'resource_value' + + client = ResourcePoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Policy() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "get", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Policy.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.get_iam_policy(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_get_iam_policy_rest_unset_required_fields(): + transport = transports.ResourcePoliciesRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.get_iam_policy._get_unset_required_fields({}) + assert set(unset_fields) == (set(("optionsRequestedPolicyVersion", )) & set(("project", "region", "resource", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_iam_policy_rest_interceptors(null_interceptor): + transport = transports.ResourcePoliciesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.ResourcePoliciesRestInterceptor(), + ) + client = ResourcePoliciesClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.ResourcePoliciesRestInterceptor, "post_get_iam_policy") as post, \ + mock.patch.object(transports.ResourcePoliciesRestInterceptor, "pre_get_iam_policy") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.GetIamPolicyResourcePolicyRequest.pb(compute.GetIamPolicyResourcePolicyRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Policy.to_json(compute.Policy()) + + request = compute.GetIamPolicyResourcePolicyRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Policy() + + client.get_iam_policy(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_get_iam_policy_rest_bad_request(transport: str = 'rest', request_type=compute.GetIamPolicyResourcePolicyRequest): + client = ResourcePoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2', 'resource': 'sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_iam_policy(request) + + +def test_get_iam_policy_rest_flattened(): + client = ResourcePoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Policy() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'region': 'sample2', 'resource': 'sample3'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + region='region_value', + resource='resource_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Policy.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.get_iam_policy(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/regions/{region}/resourcePolicies/{resource}/getIamPolicy" % client.transport._host, args[1]) + + +def test_get_iam_policy_rest_flattened_error(transport: str = 'rest'): + client = ResourcePoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_iam_policy( + compute.GetIamPolicyResourcePolicyRequest(), + project='project_value', + region='region_value', + resource='resource_value', + ) + + +def test_get_iam_policy_rest_error(): + client = ResourcePoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.InsertResourcePolicyRequest, + dict, +]) +def test_insert_rest(request_type): + client = ResourcePoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2'} + request_init["resource_policy_resource"] = {'creation_timestamp': 'creation_timestamp_value', 'description': 'description_value', 'disk_consistency_group_policy': {}, 'group_placement_policy': {'availability_domain_count': 2650, 'collocation': 'collocation_value', 'vm_count': 875}, 'id': 205, 'instance_schedule_policy': {'expiration_time': 'expiration_time_value', 'start_time': 'start_time_value', 'time_zone': 'time_zone_value', 'vm_start_schedule': {'schedule': 'schedule_value'}, 'vm_stop_schedule': {}}, 'kind': 'kind_value', 'name': 'name_value', 'region': 'region_value', 'resource_status': {'instance_schedule_policy': {'last_run_start_time': 'last_run_start_time_value', 'next_run_start_time': 'next_run_start_time_value'}}, 'self_link': 'self_link_value', 'snapshot_schedule_policy': {'retention_policy': {'max_retention_days': 1933, 'on_source_disk_delete': 'on_source_disk_delete_value'}, 'schedule': {'daily_schedule': {'days_in_cycle': 1366, 'duration': 'duration_value', 'start_time': 'start_time_value'}, 'hourly_schedule': {'duration': 'duration_value', 'hours_in_cycle': 1494, 'start_time': 'start_time_value'}, 'weekly_schedule': {'day_of_weeks': [{'day': 'day_value', 'duration': 'duration_value', 'start_time': 'start_time_value'}]}}, 'snapshot_properties': {'chain_name': 'chain_name_value', 'guest_flush': True, 'labels': {}, 'storage_locations': ['storage_locations_value1', 'storage_locations_value2']}}, 'status': 'status_value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.insert(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, extended_operation.ExtendedOperation) + assert response.client_operation_id == 'client_operation_id_value' + assert response.creation_timestamp == 'creation_timestamp_value' + assert response.description == 'description_value' + assert response.end_time == 'end_time_value' + assert response.http_error_message == 'http_error_message_value' + assert response.http_error_status_code == 2374 + assert response.id == 205 + assert response.insert_time == 'insert_time_value' + assert response.kind == 'kind_value' + assert response.name == 'name_value' + assert response.operation_group_id == 'operation_group_id_value' + assert response.operation_type == 'operation_type_value' + assert response.progress == 885 + assert response.region == 'region_value' + assert response.self_link == 'self_link_value' + assert response.start_time == 'start_time_value' + assert response.status == compute.Operation.Status.DONE + assert response.status_message == 'status_message_value' + assert response.target_id == 947 + assert response.target_link == 'target_link_value' + assert response.user == 'user_value' + assert response.zone == 'zone_value' + + +def test_insert_rest_required_fields(request_type=compute.InsertResourcePolicyRequest): + transport_class = transports.ResourcePoliciesRestTransport + + request_init = {} + request_init["project"] = "" + request_init["region"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).insert._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + jsonified_request["region"] = 'region_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).insert._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "region" in jsonified_request + assert jsonified_request["region"] == 'region_value' + + client = ResourcePoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.insert(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_insert_rest_unset_required_fields(): + transport = transports.ResourcePoliciesRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.insert._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("project", "region", "resourcePolicyResource", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_insert_rest_interceptors(null_interceptor): + transport = transports.ResourcePoliciesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.ResourcePoliciesRestInterceptor(), + ) + client = ResourcePoliciesClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.ResourcePoliciesRestInterceptor, "post_insert") as post, \ + mock.patch.object(transports.ResourcePoliciesRestInterceptor, "pre_insert") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.InsertResourcePolicyRequest.pb(compute.InsertResourcePolicyRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.InsertResourcePolicyRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.insert(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_insert_rest_bad_request(transport: str = 'rest', request_type=compute.InsertResourcePolicyRequest): + client = ResourcePoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2'} + request_init["resource_policy_resource"] = {'creation_timestamp': 'creation_timestamp_value', 'description': 'description_value', 'disk_consistency_group_policy': {}, 'group_placement_policy': {'availability_domain_count': 2650, 'collocation': 'collocation_value', 'vm_count': 875}, 'id': 205, 'instance_schedule_policy': {'expiration_time': 'expiration_time_value', 'start_time': 'start_time_value', 'time_zone': 'time_zone_value', 'vm_start_schedule': {'schedule': 'schedule_value'}, 'vm_stop_schedule': {}}, 'kind': 'kind_value', 'name': 'name_value', 'region': 'region_value', 'resource_status': {'instance_schedule_policy': {'last_run_start_time': 'last_run_start_time_value', 'next_run_start_time': 'next_run_start_time_value'}}, 'self_link': 'self_link_value', 'snapshot_schedule_policy': {'retention_policy': {'max_retention_days': 1933, 'on_source_disk_delete': 'on_source_disk_delete_value'}, 'schedule': {'daily_schedule': {'days_in_cycle': 1366, 'duration': 'duration_value', 'start_time': 'start_time_value'}, 'hourly_schedule': {'duration': 'duration_value', 'hours_in_cycle': 1494, 'start_time': 'start_time_value'}, 'weekly_schedule': {'day_of_weeks': [{'day': 'day_value', 'duration': 'duration_value', 'start_time': 'start_time_value'}]}}, 'snapshot_properties': {'chain_name': 'chain_name_value', 'guest_flush': True, 'labels': {}, 'storage_locations': ['storage_locations_value1', 'storage_locations_value2']}}, 'status': 'status_value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.insert(request) + + +def test_insert_rest_flattened(): + client = ResourcePoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'region': 'sample2'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + region='region_value', + resource_policy_resource=compute.ResourcePolicy(creation_timestamp='creation_timestamp_value'), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.insert(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/regions/{region}/resourcePolicies" % client.transport._host, args[1]) + + +def test_insert_rest_flattened_error(transport: str = 'rest'): + client = ResourcePoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.insert( + compute.InsertResourcePolicyRequest(), + project='project_value', + region='region_value', + resource_policy_resource=compute.ResourcePolicy(creation_timestamp='creation_timestamp_value'), + ) + + +def test_insert_rest_error(): + client = ResourcePoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.InsertResourcePolicyRequest, + dict, +]) +def test_insert_unary_rest(request_type): + client = ResourcePoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2'} + request_init["resource_policy_resource"] = {'creation_timestamp': 'creation_timestamp_value', 'description': 'description_value', 'disk_consistency_group_policy': {}, 'group_placement_policy': {'availability_domain_count': 2650, 'collocation': 'collocation_value', 'vm_count': 875}, 'id': 205, 'instance_schedule_policy': {'expiration_time': 'expiration_time_value', 'start_time': 'start_time_value', 'time_zone': 'time_zone_value', 'vm_start_schedule': {'schedule': 'schedule_value'}, 'vm_stop_schedule': {}}, 'kind': 'kind_value', 'name': 'name_value', 'region': 'region_value', 'resource_status': {'instance_schedule_policy': {'last_run_start_time': 'last_run_start_time_value', 'next_run_start_time': 'next_run_start_time_value'}}, 'self_link': 'self_link_value', 'snapshot_schedule_policy': {'retention_policy': {'max_retention_days': 1933, 'on_source_disk_delete': 'on_source_disk_delete_value'}, 'schedule': {'daily_schedule': {'days_in_cycle': 1366, 'duration': 'duration_value', 'start_time': 'start_time_value'}, 'hourly_schedule': {'duration': 'duration_value', 'hours_in_cycle': 1494, 'start_time': 'start_time_value'}, 'weekly_schedule': {'day_of_weeks': [{'day': 'day_value', 'duration': 'duration_value', 'start_time': 'start_time_value'}]}}, 'snapshot_properties': {'chain_name': 'chain_name_value', 'guest_flush': True, 'labels': {}, 'storage_locations': ['storage_locations_value1', 'storage_locations_value2']}}, 'status': 'status_value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.insert_unary(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.Operation) + + +def test_insert_unary_rest_required_fields(request_type=compute.InsertResourcePolicyRequest): + transport_class = transports.ResourcePoliciesRestTransport + + request_init = {} + request_init["project"] = "" + request_init["region"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).insert._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + jsonified_request["region"] = 'region_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).insert._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "region" in jsonified_request + assert jsonified_request["region"] == 'region_value' + + client = ResourcePoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.insert_unary(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_insert_unary_rest_unset_required_fields(): + transport = transports.ResourcePoliciesRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.insert._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("project", "region", "resourcePolicyResource", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_insert_unary_rest_interceptors(null_interceptor): + transport = transports.ResourcePoliciesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.ResourcePoliciesRestInterceptor(), + ) + client = ResourcePoliciesClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.ResourcePoliciesRestInterceptor, "post_insert") as post, \ + mock.patch.object(transports.ResourcePoliciesRestInterceptor, "pre_insert") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.InsertResourcePolicyRequest.pb(compute.InsertResourcePolicyRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.InsertResourcePolicyRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.insert_unary(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_insert_unary_rest_bad_request(transport: str = 'rest', request_type=compute.InsertResourcePolicyRequest): + client = ResourcePoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2'} + request_init["resource_policy_resource"] = {'creation_timestamp': 'creation_timestamp_value', 'description': 'description_value', 'disk_consistency_group_policy': {}, 'group_placement_policy': {'availability_domain_count': 2650, 'collocation': 'collocation_value', 'vm_count': 875}, 'id': 205, 'instance_schedule_policy': {'expiration_time': 'expiration_time_value', 'start_time': 'start_time_value', 'time_zone': 'time_zone_value', 'vm_start_schedule': {'schedule': 'schedule_value'}, 'vm_stop_schedule': {}}, 'kind': 'kind_value', 'name': 'name_value', 'region': 'region_value', 'resource_status': {'instance_schedule_policy': {'last_run_start_time': 'last_run_start_time_value', 'next_run_start_time': 'next_run_start_time_value'}}, 'self_link': 'self_link_value', 'snapshot_schedule_policy': {'retention_policy': {'max_retention_days': 1933, 'on_source_disk_delete': 'on_source_disk_delete_value'}, 'schedule': {'daily_schedule': {'days_in_cycle': 1366, 'duration': 'duration_value', 'start_time': 'start_time_value'}, 'hourly_schedule': {'duration': 'duration_value', 'hours_in_cycle': 1494, 'start_time': 'start_time_value'}, 'weekly_schedule': {'day_of_weeks': [{'day': 'day_value', 'duration': 'duration_value', 'start_time': 'start_time_value'}]}}, 'snapshot_properties': {'chain_name': 'chain_name_value', 'guest_flush': True, 'labels': {}, 'storage_locations': ['storage_locations_value1', 'storage_locations_value2']}}, 'status': 'status_value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.insert_unary(request) + + +def test_insert_unary_rest_flattened(): + client = ResourcePoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'region': 'sample2'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + region='region_value', + resource_policy_resource=compute.ResourcePolicy(creation_timestamp='creation_timestamp_value'), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.insert_unary(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/regions/{region}/resourcePolicies" % client.transport._host, args[1]) + + +def test_insert_unary_rest_flattened_error(transport: str = 'rest'): + client = ResourcePoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.insert_unary( + compute.InsertResourcePolicyRequest(), + project='project_value', + region='region_value', + resource_policy_resource=compute.ResourcePolicy(creation_timestamp='creation_timestamp_value'), + ) + + +def test_insert_unary_rest_error(): + client = ResourcePoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.ListResourcePoliciesRequest, + dict, +]) +def test_list_rest(request_type): + client = ResourcePoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.ResourcePolicyList( + etag='etag_value', + id='id_value', + kind='kind_value', + next_page_token='next_page_token_value', + self_link='self_link_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.ResourcePolicyList.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.list(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListPager) + assert response.etag == 'etag_value' + assert response.id == 'id_value' + assert response.kind == 'kind_value' + assert response.next_page_token == 'next_page_token_value' + assert response.self_link == 'self_link_value' + + +def test_list_rest_required_fields(request_type=compute.ListResourcePoliciesRequest): + transport_class = transports.ResourcePoliciesRestTransport + + request_init = {} + request_init["project"] = "" + request_init["region"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + jsonified_request["region"] = 'region_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("filter", "max_results", "order_by", "page_token", "return_partial_success", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "region" in jsonified_request + assert jsonified_request["region"] == 'region_value' + + client = ResourcePoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.ResourcePolicyList() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "get", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.ResourcePolicyList.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.list(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_list_rest_unset_required_fields(): + transport = transports.ResourcePoliciesRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.list._get_unset_required_fields({}) + assert set(unset_fields) == (set(("filter", "maxResults", "orderBy", "pageToken", "returnPartialSuccess", )) & set(("project", "region", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_rest_interceptors(null_interceptor): + transport = transports.ResourcePoliciesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.ResourcePoliciesRestInterceptor(), + ) + client = ResourcePoliciesClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.ResourcePoliciesRestInterceptor, "post_list") as post, \ + mock.patch.object(transports.ResourcePoliciesRestInterceptor, "pre_list") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.ListResourcePoliciesRequest.pb(compute.ListResourcePoliciesRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.ResourcePolicyList.to_json(compute.ResourcePolicyList()) + + request = compute.ListResourcePoliciesRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.ResourcePolicyList() + + client.list(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_list_rest_bad_request(transport: str = 'rest', request_type=compute.ListResourcePoliciesRequest): + client = ResourcePoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list(request) + + +def test_list_rest_flattened(): + client = ResourcePoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.ResourcePolicyList() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'region': 'sample2'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + region='region_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.ResourcePolicyList.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.list(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/regions/{region}/resourcePolicies" % client.transport._host, args[1]) + + +def test_list_rest_flattened_error(transport: str = 'rest'): + client = ResourcePoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list( + compute.ListResourcePoliciesRequest(), + project='project_value', + region='region_value', + ) + + +def test_list_rest_pager(transport: str = 'rest'): + client = ResourcePoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + #with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + compute.ResourcePolicyList( + items=[ + compute.ResourcePolicy(), + compute.ResourcePolicy(), + compute.ResourcePolicy(), + ], + next_page_token='abc', + ), + compute.ResourcePolicyList( + items=[], + next_page_token='def', + ), + compute.ResourcePolicyList( + items=[ + compute.ResourcePolicy(), + ], + next_page_token='ghi', + ), + compute.ResourcePolicyList( + items=[ + compute.ResourcePolicy(), + compute.ResourcePolicy(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple(compute.ResourcePolicyList.to_json(x) for x in response) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode('UTF-8') + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {'project': 'sample1', 'region': 'sample2'} + + pager = client.list(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, compute.ResourcePolicy) + for i in results) + + pages = list(client.list(request=sample_request).pages) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize("request_type", [ + compute.PatchResourcePolicyRequest, + dict, +]) +def test_patch_rest(request_type): + client = ResourcePoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2', 'resource_policy': 'sample3'} + request_init["resource_policy_resource"] = {'creation_timestamp': 'creation_timestamp_value', 'description': 'description_value', 'disk_consistency_group_policy': {}, 'group_placement_policy': {'availability_domain_count': 2650, 'collocation': 'collocation_value', 'vm_count': 875}, 'id': 205, 'instance_schedule_policy': {'expiration_time': 'expiration_time_value', 'start_time': 'start_time_value', 'time_zone': 'time_zone_value', 'vm_start_schedule': {'schedule': 'schedule_value'}, 'vm_stop_schedule': {}}, 'kind': 'kind_value', 'name': 'name_value', 'region': 'region_value', 'resource_status': {'instance_schedule_policy': {'last_run_start_time': 'last_run_start_time_value', 'next_run_start_time': 'next_run_start_time_value'}}, 'self_link': 'self_link_value', 'snapshot_schedule_policy': {'retention_policy': {'max_retention_days': 1933, 'on_source_disk_delete': 'on_source_disk_delete_value'}, 'schedule': {'daily_schedule': {'days_in_cycle': 1366, 'duration': 'duration_value', 'start_time': 'start_time_value'}, 'hourly_schedule': {'duration': 'duration_value', 'hours_in_cycle': 1494, 'start_time': 'start_time_value'}, 'weekly_schedule': {'day_of_weeks': [{'day': 'day_value', 'duration': 'duration_value', 'start_time': 'start_time_value'}]}}, 'snapshot_properties': {'chain_name': 'chain_name_value', 'guest_flush': True, 'labels': {}, 'storage_locations': ['storage_locations_value1', 'storage_locations_value2']}}, 'status': 'status_value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.patch(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, extended_operation.ExtendedOperation) + assert response.client_operation_id == 'client_operation_id_value' + assert response.creation_timestamp == 'creation_timestamp_value' + assert response.description == 'description_value' + assert response.end_time == 'end_time_value' + assert response.http_error_message == 'http_error_message_value' + assert response.http_error_status_code == 2374 + assert response.id == 205 + assert response.insert_time == 'insert_time_value' + assert response.kind == 'kind_value' + assert response.name == 'name_value' + assert response.operation_group_id == 'operation_group_id_value' + assert response.operation_type == 'operation_type_value' + assert response.progress == 885 + assert response.region == 'region_value' + assert response.self_link == 'self_link_value' + assert response.start_time == 'start_time_value' + assert response.status == compute.Operation.Status.DONE + assert response.status_message == 'status_message_value' + assert response.target_id == 947 + assert response.target_link == 'target_link_value' + assert response.user == 'user_value' + assert response.zone == 'zone_value' + + +def test_patch_rest_required_fields(request_type=compute.PatchResourcePolicyRequest): + transport_class = transports.ResourcePoliciesRestTransport + + request_init = {} + request_init["project"] = "" + request_init["region"] = "" + request_init["resource_policy"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).patch._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + jsonified_request["region"] = 'region_value' + jsonified_request["resourcePolicy"] = 'resource_policy_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).patch._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", "update_mask", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "region" in jsonified_request + assert jsonified_request["region"] == 'region_value' + assert "resourcePolicy" in jsonified_request + assert jsonified_request["resourcePolicy"] == 'resource_policy_value' + + client = ResourcePoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "patch", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.patch(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_patch_rest_unset_required_fields(): + transport = transports.ResourcePoliciesRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.patch._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", "updateMask", )) & set(("project", "region", "resourcePolicy", "resourcePolicyResource", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_patch_rest_interceptors(null_interceptor): + transport = transports.ResourcePoliciesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.ResourcePoliciesRestInterceptor(), + ) + client = ResourcePoliciesClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.ResourcePoliciesRestInterceptor, "post_patch") as post, \ + mock.patch.object(transports.ResourcePoliciesRestInterceptor, "pre_patch") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.PatchResourcePolicyRequest.pb(compute.PatchResourcePolicyRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.PatchResourcePolicyRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.patch(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_patch_rest_bad_request(transport: str = 'rest', request_type=compute.PatchResourcePolicyRequest): + client = ResourcePoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2', 'resource_policy': 'sample3'} + request_init["resource_policy_resource"] = {'creation_timestamp': 'creation_timestamp_value', 'description': 'description_value', 'disk_consistency_group_policy': {}, 'group_placement_policy': {'availability_domain_count': 2650, 'collocation': 'collocation_value', 'vm_count': 875}, 'id': 205, 'instance_schedule_policy': {'expiration_time': 'expiration_time_value', 'start_time': 'start_time_value', 'time_zone': 'time_zone_value', 'vm_start_schedule': {'schedule': 'schedule_value'}, 'vm_stop_schedule': {}}, 'kind': 'kind_value', 'name': 'name_value', 'region': 'region_value', 'resource_status': {'instance_schedule_policy': {'last_run_start_time': 'last_run_start_time_value', 'next_run_start_time': 'next_run_start_time_value'}}, 'self_link': 'self_link_value', 'snapshot_schedule_policy': {'retention_policy': {'max_retention_days': 1933, 'on_source_disk_delete': 'on_source_disk_delete_value'}, 'schedule': {'daily_schedule': {'days_in_cycle': 1366, 'duration': 'duration_value', 'start_time': 'start_time_value'}, 'hourly_schedule': {'duration': 'duration_value', 'hours_in_cycle': 1494, 'start_time': 'start_time_value'}, 'weekly_schedule': {'day_of_weeks': [{'day': 'day_value', 'duration': 'duration_value', 'start_time': 'start_time_value'}]}}, 'snapshot_properties': {'chain_name': 'chain_name_value', 'guest_flush': True, 'labels': {}, 'storage_locations': ['storage_locations_value1', 'storage_locations_value2']}}, 'status': 'status_value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.patch(request) + + +def test_patch_rest_flattened(): + client = ResourcePoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'region': 'sample2', 'resource_policy': 'sample3'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + region='region_value', + resource_policy='resource_policy_value', + resource_policy_resource=compute.ResourcePolicy(creation_timestamp='creation_timestamp_value'), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.patch(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/regions/{region}/resourcePolicies/{resource_policy}" % client.transport._host, args[1]) + + +def test_patch_rest_flattened_error(transport: str = 'rest'): + client = ResourcePoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.patch( + compute.PatchResourcePolicyRequest(), + project='project_value', + region='region_value', + resource_policy='resource_policy_value', + resource_policy_resource=compute.ResourcePolicy(creation_timestamp='creation_timestamp_value'), + ) + + +def test_patch_rest_error(): + client = ResourcePoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.PatchResourcePolicyRequest, + dict, +]) +def test_patch_unary_rest(request_type): + client = ResourcePoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2', 'resource_policy': 'sample3'} + request_init["resource_policy_resource"] = {'creation_timestamp': 'creation_timestamp_value', 'description': 'description_value', 'disk_consistency_group_policy': {}, 'group_placement_policy': {'availability_domain_count': 2650, 'collocation': 'collocation_value', 'vm_count': 875}, 'id': 205, 'instance_schedule_policy': {'expiration_time': 'expiration_time_value', 'start_time': 'start_time_value', 'time_zone': 'time_zone_value', 'vm_start_schedule': {'schedule': 'schedule_value'}, 'vm_stop_schedule': {}}, 'kind': 'kind_value', 'name': 'name_value', 'region': 'region_value', 'resource_status': {'instance_schedule_policy': {'last_run_start_time': 'last_run_start_time_value', 'next_run_start_time': 'next_run_start_time_value'}}, 'self_link': 'self_link_value', 'snapshot_schedule_policy': {'retention_policy': {'max_retention_days': 1933, 'on_source_disk_delete': 'on_source_disk_delete_value'}, 'schedule': {'daily_schedule': {'days_in_cycle': 1366, 'duration': 'duration_value', 'start_time': 'start_time_value'}, 'hourly_schedule': {'duration': 'duration_value', 'hours_in_cycle': 1494, 'start_time': 'start_time_value'}, 'weekly_schedule': {'day_of_weeks': [{'day': 'day_value', 'duration': 'duration_value', 'start_time': 'start_time_value'}]}}, 'snapshot_properties': {'chain_name': 'chain_name_value', 'guest_flush': True, 'labels': {}, 'storage_locations': ['storage_locations_value1', 'storage_locations_value2']}}, 'status': 'status_value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.patch_unary(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.Operation) + + +def test_patch_unary_rest_required_fields(request_type=compute.PatchResourcePolicyRequest): + transport_class = transports.ResourcePoliciesRestTransport + + request_init = {} + request_init["project"] = "" + request_init["region"] = "" + request_init["resource_policy"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).patch._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + jsonified_request["region"] = 'region_value' + jsonified_request["resourcePolicy"] = 'resource_policy_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).patch._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", "update_mask", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "region" in jsonified_request + assert jsonified_request["region"] == 'region_value' + assert "resourcePolicy" in jsonified_request + assert jsonified_request["resourcePolicy"] == 'resource_policy_value' + + client = ResourcePoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "patch", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.patch_unary(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_patch_unary_rest_unset_required_fields(): + transport = transports.ResourcePoliciesRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.patch._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", "updateMask", )) & set(("project", "region", "resourcePolicy", "resourcePolicyResource", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_patch_unary_rest_interceptors(null_interceptor): + transport = transports.ResourcePoliciesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.ResourcePoliciesRestInterceptor(), + ) + client = ResourcePoliciesClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.ResourcePoliciesRestInterceptor, "post_patch") as post, \ + mock.patch.object(transports.ResourcePoliciesRestInterceptor, "pre_patch") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.PatchResourcePolicyRequest.pb(compute.PatchResourcePolicyRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.PatchResourcePolicyRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.patch_unary(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_patch_unary_rest_bad_request(transport: str = 'rest', request_type=compute.PatchResourcePolicyRequest): + client = ResourcePoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2', 'resource_policy': 'sample3'} + request_init["resource_policy_resource"] = {'creation_timestamp': 'creation_timestamp_value', 'description': 'description_value', 'disk_consistency_group_policy': {}, 'group_placement_policy': {'availability_domain_count': 2650, 'collocation': 'collocation_value', 'vm_count': 875}, 'id': 205, 'instance_schedule_policy': {'expiration_time': 'expiration_time_value', 'start_time': 'start_time_value', 'time_zone': 'time_zone_value', 'vm_start_schedule': {'schedule': 'schedule_value'}, 'vm_stop_schedule': {}}, 'kind': 'kind_value', 'name': 'name_value', 'region': 'region_value', 'resource_status': {'instance_schedule_policy': {'last_run_start_time': 'last_run_start_time_value', 'next_run_start_time': 'next_run_start_time_value'}}, 'self_link': 'self_link_value', 'snapshot_schedule_policy': {'retention_policy': {'max_retention_days': 1933, 'on_source_disk_delete': 'on_source_disk_delete_value'}, 'schedule': {'daily_schedule': {'days_in_cycle': 1366, 'duration': 'duration_value', 'start_time': 'start_time_value'}, 'hourly_schedule': {'duration': 'duration_value', 'hours_in_cycle': 1494, 'start_time': 'start_time_value'}, 'weekly_schedule': {'day_of_weeks': [{'day': 'day_value', 'duration': 'duration_value', 'start_time': 'start_time_value'}]}}, 'snapshot_properties': {'chain_name': 'chain_name_value', 'guest_flush': True, 'labels': {}, 'storage_locations': ['storage_locations_value1', 'storage_locations_value2']}}, 'status': 'status_value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.patch_unary(request) + + +def test_patch_unary_rest_flattened(): + client = ResourcePoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'region': 'sample2', 'resource_policy': 'sample3'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + region='region_value', + resource_policy='resource_policy_value', + resource_policy_resource=compute.ResourcePolicy(creation_timestamp='creation_timestamp_value'), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.patch_unary(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/regions/{region}/resourcePolicies/{resource_policy}" % client.transport._host, args[1]) + + +def test_patch_unary_rest_flattened_error(transport: str = 'rest'): + client = ResourcePoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.patch_unary( + compute.PatchResourcePolicyRequest(), + project='project_value', + region='region_value', + resource_policy='resource_policy_value', + resource_policy_resource=compute.ResourcePolicy(creation_timestamp='creation_timestamp_value'), + ) + + +def test_patch_unary_rest_error(): + client = ResourcePoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.SetIamPolicyResourcePolicyRequest, + dict, +]) +def test_set_iam_policy_rest(request_type): + client = ResourcePoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2', 'resource': 'sample3'} + request_init["region_set_policy_request_resource"] = {'bindings': [{'binding_id': 'binding_id_value', 'condition': {'description': 'description_value', 'expression': 'expression_value', 'location': 'location_value', 'title': 'title_value'}, 'members': ['members_value1', 'members_value2'], 'role': 'role_value'}], 'etag': 'etag_value', 'policy': {'audit_configs': [{'audit_log_configs': [{'exempted_members': ['exempted_members_value1', 'exempted_members_value2'], 'ignore_child_exemptions': True, 'log_type': 'log_type_value'}], 'exempted_members': ['exempted_members_value1', 'exempted_members_value2'], 'service': 'service_value'}], 'bindings': {}, 'etag': 'etag_value', 'iam_owned': True, 'rules': [{'action': 'action_value', 'conditions': [{'iam': 'iam_value', 'op': 'op_value', 'svc': 'svc_value', 'sys': 'sys_value', 'values': ['values_value1', 'values_value2']}], 'description': 'description_value', 'ins': ['ins_value1', 'ins_value2'], 'log_configs': [{'cloud_audit': {'authorization_logging_options': {'permission_type': 'permission_type_value'}, 'log_name': 'log_name_value'}, 'counter': {'custom_fields': [{'name': 'name_value', 'value': 'value_value'}], 'field': 'field_value', 'metric': 'metric_value'}, 'data_access': {'log_mode': 'log_mode_value'}}], 'not_ins': ['not_ins_value1', 'not_ins_value2'], 'permissions': ['permissions_value1', 'permissions_value2']}], 'version': 774}} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Policy( + etag='etag_value', + iam_owned=True, + version=774, + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Policy.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.set_iam_policy(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.Policy) + assert response.etag == 'etag_value' + assert response.iam_owned is True + assert response.version == 774 + + +def test_set_iam_policy_rest_required_fields(request_type=compute.SetIamPolicyResourcePolicyRequest): + transport_class = transports.ResourcePoliciesRestTransport + + request_init = {} + request_init["project"] = "" + request_init["region"] = "" + request_init["resource"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).set_iam_policy._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + jsonified_request["region"] = 'region_value' + jsonified_request["resource"] = 'resource_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).set_iam_policy._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "region" in jsonified_request + assert jsonified_request["region"] == 'region_value' + assert "resource" in jsonified_request + assert jsonified_request["resource"] == 'resource_value' + + client = ResourcePoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Policy() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Policy.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.set_iam_policy(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_set_iam_policy_rest_unset_required_fields(): + transport = transports.ResourcePoliciesRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.set_iam_policy._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("project", "region", "regionSetPolicyRequestResource", "resource", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_set_iam_policy_rest_interceptors(null_interceptor): + transport = transports.ResourcePoliciesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.ResourcePoliciesRestInterceptor(), + ) + client = ResourcePoliciesClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.ResourcePoliciesRestInterceptor, "post_set_iam_policy") as post, \ + mock.patch.object(transports.ResourcePoliciesRestInterceptor, "pre_set_iam_policy") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.SetIamPolicyResourcePolicyRequest.pb(compute.SetIamPolicyResourcePolicyRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Policy.to_json(compute.Policy()) + + request = compute.SetIamPolicyResourcePolicyRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Policy() + + client.set_iam_policy(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_set_iam_policy_rest_bad_request(transport: str = 'rest', request_type=compute.SetIamPolicyResourcePolicyRequest): + client = ResourcePoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2', 'resource': 'sample3'} + request_init["region_set_policy_request_resource"] = {'bindings': [{'binding_id': 'binding_id_value', 'condition': {'description': 'description_value', 'expression': 'expression_value', 'location': 'location_value', 'title': 'title_value'}, 'members': ['members_value1', 'members_value2'], 'role': 'role_value'}], 'etag': 'etag_value', 'policy': {'audit_configs': [{'audit_log_configs': [{'exempted_members': ['exempted_members_value1', 'exempted_members_value2'], 'ignore_child_exemptions': True, 'log_type': 'log_type_value'}], 'exempted_members': ['exempted_members_value1', 'exempted_members_value2'], 'service': 'service_value'}], 'bindings': {}, 'etag': 'etag_value', 'iam_owned': True, 'rules': [{'action': 'action_value', 'conditions': [{'iam': 'iam_value', 'op': 'op_value', 'svc': 'svc_value', 'sys': 'sys_value', 'values': ['values_value1', 'values_value2']}], 'description': 'description_value', 'ins': ['ins_value1', 'ins_value2'], 'log_configs': [{'cloud_audit': {'authorization_logging_options': {'permission_type': 'permission_type_value'}, 'log_name': 'log_name_value'}, 'counter': {'custom_fields': [{'name': 'name_value', 'value': 'value_value'}], 'field': 'field_value', 'metric': 'metric_value'}, 'data_access': {'log_mode': 'log_mode_value'}}], 'not_ins': ['not_ins_value1', 'not_ins_value2'], 'permissions': ['permissions_value1', 'permissions_value2']}], 'version': 774}} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.set_iam_policy(request) + + +def test_set_iam_policy_rest_flattened(): + client = ResourcePoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Policy() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'region': 'sample2', 'resource': 'sample3'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + region='region_value', + resource='resource_value', + region_set_policy_request_resource=compute.RegionSetPolicyRequest(bindings=[compute.Binding(binding_id='binding_id_value')]), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Policy.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.set_iam_policy(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/regions/{region}/resourcePolicies/{resource}/setIamPolicy" % client.transport._host, args[1]) + + +def test_set_iam_policy_rest_flattened_error(transport: str = 'rest'): + client = ResourcePoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.set_iam_policy( + compute.SetIamPolicyResourcePolicyRequest(), + project='project_value', + region='region_value', + resource='resource_value', + region_set_policy_request_resource=compute.RegionSetPolicyRequest(bindings=[compute.Binding(binding_id='binding_id_value')]), + ) + + +def test_set_iam_policy_rest_error(): + client = ResourcePoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.TestIamPermissionsResourcePolicyRequest, + dict, +]) +def test_test_iam_permissions_rest(request_type): + client = ResourcePoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2', 'resource': 'sample3'} + request_init["test_permissions_request_resource"] = {'permissions': ['permissions_value1', 'permissions_value2']} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.TestPermissionsResponse( + permissions=['permissions_value'], + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.TestPermissionsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.test_iam_permissions(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.TestPermissionsResponse) + assert response.permissions == ['permissions_value'] + + +def test_test_iam_permissions_rest_required_fields(request_type=compute.TestIamPermissionsResourcePolicyRequest): + transport_class = transports.ResourcePoliciesRestTransport + + request_init = {} + request_init["project"] = "" + request_init["region"] = "" + request_init["resource"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).test_iam_permissions._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + jsonified_request["region"] = 'region_value' + jsonified_request["resource"] = 'resource_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).test_iam_permissions._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "region" in jsonified_request + assert jsonified_request["region"] == 'region_value' + assert "resource" in jsonified_request + assert jsonified_request["resource"] == 'resource_value' + + client = ResourcePoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.TestPermissionsResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.TestPermissionsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.test_iam_permissions(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_test_iam_permissions_rest_unset_required_fields(): + transport = transports.ResourcePoliciesRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.test_iam_permissions._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("project", "region", "resource", "testPermissionsRequestResource", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_test_iam_permissions_rest_interceptors(null_interceptor): + transport = transports.ResourcePoliciesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.ResourcePoliciesRestInterceptor(), + ) + client = ResourcePoliciesClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.ResourcePoliciesRestInterceptor, "post_test_iam_permissions") as post, \ + mock.patch.object(transports.ResourcePoliciesRestInterceptor, "pre_test_iam_permissions") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.TestIamPermissionsResourcePolicyRequest.pb(compute.TestIamPermissionsResourcePolicyRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.TestPermissionsResponse.to_json(compute.TestPermissionsResponse()) + + request = compute.TestIamPermissionsResourcePolicyRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.TestPermissionsResponse() + + client.test_iam_permissions(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_test_iam_permissions_rest_bad_request(transport: str = 'rest', request_type=compute.TestIamPermissionsResourcePolicyRequest): + client = ResourcePoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2', 'resource': 'sample3'} + request_init["test_permissions_request_resource"] = {'permissions': ['permissions_value1', 'permissions_value2']} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.test_iam_permissions(request) + + +def test_test_iam_permissions_rest_flattened(): + client = ResourcePoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.TestPermissionsResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'region': 'sample2', 'resource': 'sample3'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + region='region_value', + resource='resource_value', + test_permissions_request_resource=compute.TestPermissionsRequest(permissions=['permissions_value']), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.TestPermissionsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.test_iam_permissions(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/regions/{region}/resourcePolicies/{resource}/testIamPermissions" % client.transport._host, args[1]) + + +def test_test_iam_permissions_rest_flattened_error(transport: str = 'rest'): + client = ResourcePoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.test_iam_permissions( + compute.TestIamPermissionsResourcePolicyRequest(), + project='project_value', + region='region_value', + resource='resource_value', + test_permissions_request_resource=compute.TestPermissionsRequest(permissions=['permissions_value']), + ) + + +def test_test_iam_permissions_rest_error(): + client = ResourcePoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +def test_credentials_transport_error(): + # It is an error to provide credentials and a transport instance. + transport = transports.ResourcePoliciesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = ResourcePoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # It is an error to provide a credentials file and a transport instance. + transport = transports.ResourcePoliciesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = ResourcePoliciesClient( + client_options={"credentials_file": "credentials.json"}, + transport=transport, + ) + + # It is an error to provide an api_key and a transport instance. + transport = transports.ResourcePoliciesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = ResourcePoliciesClient( + client_options=options, + transport=transport, + ) + + # It is an error to provide an api_key and a credential. + options = mock.Mock() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = ResourcePoliciesClient( + client_options=options, + credentials=ga_credentials.AnonymousCredentials() + ) + + # It is an error to provide scopes and a transport instance. + transport = transports.ResourcePoliciesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = ResourcePoliciesClient( + client_options={"scopes": ["1", "2"]}, + transport=transport, + ) + + +def test_transport_instance(): + # A client may be instantiated with a custom transport instance. + transport = transports.ResourcePoliciesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + client = ResourcePoliciesClient(transport=transport) + assert client.transport is transport + + +@pytest.mark.parametrize("transport_class", [ + transports.ResourcePoliciesRestTransport, +]) +def test_transport_adc(transport_class): + # Test default credentials are used if not provided. + with mock.patch.object(google.auth, 'default') as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class() + adc.assert_called_once() + +@pytest.mark.parametrize("transport_name", [ + "rest", +]) +def test_transport_kind(transport_name): + transport = ResourcePoliciesClient.get_transport_class(transport_name)( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert transport.kind == transport_name + + +def test_resource_policies_base_transport_error(): + # Passing both a credentials object and credentials_file should raise an error + with pytest.raises(core_exceptions.DuplicateCredentialArgs): + transport = transports.ResourcePoliciesTransport( + credentials=ga_credentials.AnonymousCredentials(), + credentials_file="credentials.json" + ) + + +def test_resource_policies_base_transport(): + # Instantiate the base transport. + with mock.patch('google.cloud.compute_v1.services.resource_policies.transports.ResourcePoliciesTransport.__init__') as Transport: + Transport.return_value = None + transport = transports.ResourcePoliciesTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Every method on the transport should just blindly + # raise NotImplementedError. + methods = ( + 'aggregated_list', + 'delete', + 'get', + 'get_iam_policy', + 'insert', + 'list', + 'patch', + 'set_iam_policy', + 'test_iam_permissions', + ) + for method in methods: + with pytest.raises(NotImplementedError): + getattr(transport, method)(request=object()) + + with pytest.raises(NotImplementedError): + transport.close() + + # Catch all for all remaining methods and properties + remainder = [ + 'kind', + ] + for r in remainder: + with pytest.raises(NotImplementedError): + getattr(transport, r)() + + +def test_resource_policies_base_transport_with_credentials_file(): + # Instantiate the base transport with a credentials file + with mock.patch.object(google.auth, 'load_credentials_from_file', autospec=True) as load_creds, mock.patch('google.cloud.compute_v1.services.resource_policies.transports.ResourcePoliciesTransport._prep_wrapped_messages') as Transport: + Transport.return_value = None + load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.ResourcePoliciesTransport( + credentials_file="credentials.json", + quota_project_id="octopus", + ) + load_creds.assert_called_once_with("credentials.json", + scopes=None, + default_scopes=( + 'https://www.googleapis.com/auth/compute', + 'https://www.googleapis.com/auth/cloud-platform', +), + quota_project_id="octopus", + ) + + +def test_resource_policies_base_transport_with_adc(): + # Test the default credentials are used if credentials and credentials_file are None. + with mock.patch.object(google.auth, 'default', autospec=True) as adc, mock.patch('google.cloud.compute_v1.services.resource_policies.transports.ResourcePoliciesTransport._prep_wrapped_messages') as Transport: + Transport.return_value = None + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.ResourcePoliciesTransport() + adc.assert_called_once() + + +def test_resource_policies_auth_adc(): + # If no credentials are provided, we should use ADC credentials. + with mock.patch.object(google.auth, 'default', autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + ResourcePoliciesClient() + adc.assert_called_once_with( + scopes=None, + default_scopes=( + 'https://www.googleapis.com/auth/compute', + 'https://www.googleapis.com/auth/cloud-platform', +), + quota_project_id=None, + ) + + +def test_resource_policies_http_transport_client_cert_source_for_mtls(): + cred = ga_credentials.AnonymousCredentials() + with mock.patch("google.auth.transport.requests.AuthorizedSession.configure_mtls_channel") as mock_configure_mtls_channel: + transports.ResourcePoliciesRestTransport ( + credentials=cred, + client_cert_source_for_mtls=client_cert_source_callback + ) + mock_configure_mtls_channel.assert_called_once_with(client_cert_source_callback) + + +@pytest.mark.parametrize("transport_name", [ + "rest", +]) +def test_resource_policies_host_no_port(transport_name): + client = ResourcePoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions(api_endpoint='compute.googleapis.com'), + transport=transport_name, + ) + assert client.transport._host == ( + 'compute.googleapis.com:443' + if transport_name in ['grpc', 'grpc_asyncio'] + else 'https://compute.googleapis.com' + ) + +@pytest.mark.parametrize("transport_name", [ + "rest", +]) +def test_resource_policies_host_with_port(transport_name): + client = ResourcePoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions(api_endpoint='compute.googleapis.com:8000'), + transport=transport_name, + ) + assert client.transport._host == ( + 'compute.googleapis.com:8000' + if transport_name in ['grpc', 'grpc_asyncio'] + else 'https://compute.googleapis.com:8000' + ) + +@pytest.mark.parametrize("transport_name", [ + "rest", +]) +def test_resource_policies_client_transport_session_collision(transport_name): + creds1 = ga_credentials.AnonymousCredentials() + creds2 = ga_credentials.AnonymousCredentials() + client1 = ResourcePoliciesClient( + credentials=creds1, + transport=transport_name, + ) + client2 = ResourcePoliciesClient( + credentials=creds2, + transport=transport_name, + ) + session1 = client1.transport.aggregated_list._session + session2 = client2.transport.aggregated_list._session + assert session1 != session2 + session1 = client1.transport.delete._session + session2 = client2.transport.delete._session + assert session1 != session2 + session1 = client1.transport.get._session + session2 = client2.transport.get._session + assert session1 != session2 + session1 = client1.transport.get_iam_policy._session + session2 = client2.transport.get_iam_policy._session + assert session1 != session2 + session1 = client1.transport.insert._session + session2 = client2.transport.insert._session + assert session1 != session2 + session1 = client1.transport.list._session + session2 = client2.transport.list._session + assert session1 != session2 + session1 = client1.transport.patch._session + session2 = client2.transport.patch._session + assert session1 != session2 + session1 = client1.transport.set_iam_policy._session + session2 = client2.transport.set_iam_policy._session + assert session1 != session2 + session1 = client1.transport.test_iam_permissions._session + session2 = client2.transport.test_iam_permissions._session + assert session1 != session2 + +def test_common_billing_account_path(): + billing_account = "squid" + expected = "billingAccounts/{billing_account}".format(billing_account=billing_account, ) + actual = ResourcePoliciesClient.common_billing_account_path(billing_account) + assert expected == actual + + +def test_parse_common_billing_account_path(): + expected = { + "billing_account": "clam", + } + path = ResourcePoliciesClient.common_billing_account_path(**expected) + + # Check that the path construction is reversible. + actual = ResourcePoliciesClient.parse_common_billing_account_path(path) + assert expected == actual + +def test_common_folder_path(): + folder = "whelk" + expected = "folders/{folder}".format(folder=folder, ) + actual = ResourcePoliciesClient.common_folder_path(folder) + assert expected == actual + + +def test_parse_common_folder_path(): + expected = { + "folder": "octopus", + } + path = ResourcePoliciesClient.common_folder_path(**expected) + + # Check that the path construction is reversible. + actual = ResourcePoliciesClient.parse_common_folder_path(path) + assert expected == actual + +def test_common_organization_path(): + organization = "oyster" + expected = "organizations/{organization}".format(organization=organization, ) + actual = ResourcePoliciesClient.common_organization_path(organization) + assert expected == actual + + +def test_parse_common_organization_path(): + expected = { + "organization": "nudibranch", + } + path = ResourcePoliciesClient.common_organization_path(**expected) + + # Check that the path construction is reversible. + actual = ResourcePoliciesClient.parse_common_organization_path(path) + assert expected == actual + +def test_common_project_path(): + project = "cuttlefish" + expected = "projects/{project}".format(project=project, ) + actual = ResourcePoliciesClient.common_project_path(project) + assert expected == actual + + +def test_parse_common_project_path(): + expected = { + "project": "mussel", + } + path = ResourcePoliciesClient.common_project_path(**expected) + + # Check that the path construction is reversible. + actual = ResourcePoliciesClient.parse_common_project_path(path) + assert expected == actual + +def test_common_location_path(): + project = "winkle" + location = "nautilus" + expected = "projects/{project}/locations/{location}".format(project=project, location=location, ) + actual = ResourcePoliciesClient.common_location_path(project, location) + assert expected == actual + + +def test_parse_common_location_path(): + expected = { + "project": "scallop", + "location": "abalone", + } + path = ResourcePoliciesClient.common_location_path(**expected) + + # Check that the path construction is reversible. + actual = ResourcePoliciesClient.parse_common_location_path(path) + assert expected == actual + + +def test_client_with_default_client_info(): + client_info = gapic_v1.client_info.ClientInfo() + + with mock.patch.object(transports.ResourcePoliciesTransport, '_prep_wrapped_messages') as prep: + client = ResourcePoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + with mock.patch.object(transports.ResourcePoliciesTransport, '_prep_wrapped_messages') as prep: + transport_class = ResourcePoliciesClient.get_transport_class() + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + +def test_transport_close(): + transports = { + "rest": "_session", + } + + for transport, close_name in transports.items(): + client = ResourcePoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport + ) + with mock.patch.object(type(getattr(client.transport, close_name)), "close") as close: + with client: + close.assert_not_called() + close.assert_called_once() + +def test_client_ctx(): + transports = [ + 'rest', + ] + for transport in transports: + client = ResourcePoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport + ) + # Test client calls underlying transport. + with mock.patch.object(type(client.transport), "close") as close: + close.assert_not_called() + with client: + pass + close.assert_called() + +@pytest.mark.parametrize("client_class,transport_class", [ + (ResourcePoliciesClient, transports.ResourcePoliciesRestTransport), +]) +def test_api_key_credentials(client_class, transport_class): + with mock.patch.object( + google.auth._default, "get_api_key_credentials", create=True + ) as get_api_key_credentials: + mock_cred = mock.Mock() + get_api_key_credentials.return_value = mock_cred + options = client_options.ClientOptions() + options.api_key = "api_key" + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=mock_cred, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) diff --git a/owl-bot-staging/v1/tests/unit/gapic/compute_v1/test_routers.py b/owl-bot-staging/v1/tests/unit/gapic/compute_v1/test_routers.py new file mode 100644 index 000000000..9c1369748 --- /dev/null +++ b/owl-bot-staging/v1/tests/unit/gapic/compute_v1/test_routers.py @@ -0,0 +1,4768 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import os +# try/except added for compatibility with python < 3.8 +try: + from unittest import mock + from unittest.mock import AsyncMock # pragma: NO COVER +except ImportError: # pragma: NO COVER + import mock + +import grpc +from grpc.experimental import aio +from collections.abc import Iterable +from google.protobuf import json_format +import json +import math +import pytest +from proto.marshal.rules.dates import DurationRule, TimestampRule +from proto.marshal.rules import wrappers +from requests import Response +from requests import Request, PreparedRequest +from requests.sessions import Session +from google.protobuf import json_format + +from google.api_core import client_options +from google.api_core import exceptions as core_exceptions +from google.api_core import extended_operation # type: ignore +from google.api_core import future +from google.api_core import gapic_v1 +from google.api_core import grpc_helpers +from google.api_core import grpc_helpers_async +from google.api_core import path_template +from google.auth import credentials as ga_credentials +from google.auth.exceptions import MutualTLSChannelError +from google.cloud.compute_v1.services.routers import RoutersClient +from google.cloud.compute_v1.services.routers import pagers +from google.cloud.compute_v1.services.routers import transports +from google.cloud.compute_v1.types import compute +from google.oauth2 import service_account +import google.auth + + +def client_cert_source_callback(): + return b"cert bytes", b"key bytes" + + +# If default endpoint is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint(client): + return "foo.googleapis.com" if ("localhost" in client.DEFAULT_ENDPOINT) else client.DEFAULT_ENDPOINT + + +def test__get_default_mtls_endpoint(): + api_endpoint = "example.googleapis.com" + api_mtls_endpoint = "example.mtls.googleapis.com" + sandbox_endpoint = "example.sandbox.googleapis.com" + sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com" + non_googleapi = "api.example.com" + + assert RoutersClient._get_default_mtls_endpoint(None) is None + assert RoutersClient._get_default_mtls_endpoint(api_endpoint) == api_mtls_endpoint + assert RoutersClient._get_default_mtls_endpoint(api_mtls_endpoint) == api_mtls_endpoint + assert RoutersClient._get_default_mtls_endpoint(sandbox_endpoint) == sandbox_mtls_endpoint + assert RoutersClient._get_default_mtls_endpoint(sandbox_mtls_endpoint) == sandbox_mtls_endpoint + assert RoutersClient._get_default_mtls_endpoint(non_googleapi) == non_googleapi + + +@pytest.mark.parametrize("client_class,transport_name", [ + (RoutersClient, "rest"), +]) +def test_routers_client_from_service_account_info(client_class, transport_name): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object(service_account.Credentials, 'from_service_account_info') as factory: + factory.return_value = creds + info = {"valid": True} + client = client_class.from_service_account_info(info, transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + 'compute.googleapis.com:443' + if transport_name in ['grpc', 'grpc_asyncio'] + else + 'https://compute.googleapis.com' + ) + + +@pytest.mark.parametrize("transport_class,transport_name", [ + (transports.RoutersRestTransport, "rest"), +]) +def test_routers_client_service_account_always_use_jwt(transport_class, transport_name): + with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=True) + use_jwt.assert_called_once_with(True) + + with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=False) + use_jwt.assert_not_called() + + +@pytest.mark.parametrize("client_class,transport_name", [ + (RoutersClient, "rest"), +]) +def test_routers_client_from_service_account_file(client_class, transport_name): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object(service_account.Credentials, 'from_service_account_file') as factory: + factory.return_value = creds + client = client_class.from_service_account_file("dummy/file/path.json", transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + client = client_class.from_service_account_json("dummy/file/path.json", transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + 'compute.googleapis.com:443' + if transport_name in ['grpc', 'grpc_asyncio'] + else + 'https://compute.googleapis.com' + ) + + +def test_routers_client_get_transport_class(): + transport = RoutersClient.get_transport_class() + available_transports = [ + transports.RoutersRestTransport, + ] + assert transport in available_transports + + transport = RoutersClient.get_transport_class("rest") + assert transport == transports.RoutersRestTransport + + +@pytest.mark.parametrize("client_class,transport_class,transport_name", [ + (RoutersClient, transports.RoutersRestTransport, "rest"), +]) +@mock.patch.object(RoutersClient, "DEFAULT_ENDPOINT", modify_default_endpoint(RoutersClient)) +def test_routers_client_client_options(client_class, transport_class, transport_name): + # Check that if channel is provided we won't create a new one. + with mock.patch.object(RoutersClient, 'get_transport_class') as gtc: + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ) + client = client_class(transport=transport) + gtc.assert_not_called() + + # Check that if channel is provided via str we will create a new one. + with mock.patch.object(RoutersClient, 'get_transport_class') as gtc: + client = client_class(transport=transport_name) + gtc.assert_called() + + # Check the case api_endpoint is provided. + options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(transport=transport_name, client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_MTLS_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError): + client = client_class(transport=transport_name) + + # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): + with pytest.raises(ValueError): + client = client_class(transport=transport_name) + + # Check the case quota_project_id is provided + options = client_options.ClientOptions(quota_project_id="octopus") + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id="octopus", + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + # Check the case api_endpoint is provided + options = client_options.ClientOptions(api_audience="https://language.googleapis.com") + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience="https://language.googleapis.com" + ) + +@pytest.mark.parametrize("client_class,transport_class,transport_name,use_client_cert_env", [ + (RoutersClient, transports.RoutersRestTransport, "rest", "true"), + (RoutersClient, transports.RoutersRestTransport, "rest", "false"), +]) +@mock.patch.object(RoutersClient, "DEFAULT_ENDPOINT", modify_default_endpoint(RoutersClient)) +@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) +def test_routers_client_mtls_env_auto(client_class, transport_class, transport_name, use_client_cert_env): + # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default + # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists. + + # Check the case client_cert_source is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): + options = client_options.ClientOptions(client_cert_source=client_cert_source_callback) + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + + if use_client_cert_env == "false": + expected_client_cert_source = None + expected_host = client.DEFAULT_ENDPOINT + else: + expected_client_cert_source = client_cert_source_callback + expected_host = client.DEFAULT_MTLS_ENDPOINT + + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case ADC client cert is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): + with mock.patch.object(transport_class, '__init__') as patched: + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): + with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=client_cert_source_callback): + if use_client_cert_env == "false": + expected_host = client.DEFAULT_ENDPOINT + expected_client_cert_source = None + else: + expected_host = client.DEFAULT_MTLS_ENDPOINT + expected_client_cert_source = client_cert_source_callback + + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case client_cert_source and ADC client cert are not provided. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): + with mock.patch.object(transport_class, '__init__') as patched: + with mock.patch("google.auth.transport.mtls.has_default_client_cert_source", return_value=False): + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize("client_class", [ + RoutersClient +]) +@mock.patch.object(RoutersClient, "DEFAULT_ENDPOINT", modify_default_endpoint(RoutersClient)) +def test_routers_client_get_mtls_endpoint_and_cert_source(client_class): + mock_client_cert_source = mock.Mock() + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "true". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + mock_api_endpoint = "foo" + options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(options) + assert api_endpoint == mock_api_endpoint + assert cert_source == mock_client_cert_source + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "false". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + mock_client_cert_source = mock.Mock() + mock_api_endpoint = "foo" + options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(options) + assert api_endpoint == mock_api_endpoint + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert doesn't exist. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=False): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert exists. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): + with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=mock_client_cert_source): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source == mock_client_cert_source + + +@pytest.mark.parametrize("client_class,transport_class,transport_name", [ + (RoutersClient, transports.RoutersRestTransport, "rest"), +]) +def test_routers_client_client_options_scopes(client_class, transport_class, transport_name): + # Check the case scopes are provided. + options = client_options.ClientOptions( + scopes=["1", "2"], + ) + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=["1", "2"], + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + +@pytest.mark.parametrize("client_class,transport_class,transport_name,grpc_helpers", [ + (RoutersClient, transports.RoutersRestTransport, "rest", None), +]) +def test_routers_client_client_options_credentials_file(client_class, transport_class, transport_name, grpc_helpers): + # Check the case credentials file is provided. + options = client_options.ClientOptions( + credentials_file="credentials.json" + ) + + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize("request_type", [ + compute.AggregatedListRoutersRequest, + dict, +]) +def test_aggregated_list_rest(request_type): + client = RoutersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.RouterAggregatedList( + id='id_value', + kind='kind_value', + next_page_token='next_page_token_value', + self_link='self_link_value', + unreachables=['unreachables_value'], + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.RouterAggregatedList.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.aggregated_list(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.AggregatedListPager) + assert response.id == 'id_value' + assert response.kind == 'kind_value' + assert response.next_page_token == 'next_page_token_value' + assert response.self_link == 'self_link_value' + assert response.unreachables == ['unreachables_value'] + + +def test_aggregated_list_rest_required_fields(request_type=compute.AggregatedListRoutersRequest): + transport_class = transports.RoutersRestTransport + + request_init = {} + request_init["project"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).aggregated_list._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).aggregated_list._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("filter", "include_all_scopes", "max_results", "order_by", "page_token", "return_partial_success", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + + client = RoutersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.RouterAggregatedList() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "get", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.RouterAggregatedList.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.aggregated_list(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_aggregated_list_rest_unset_required_fields(): + transport = transports.RoutersRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.aggregated_list._get_unset_required_fields({}) + assert set(unset_fields) == (set(("filter", "includeAllScopes", "maxResults", "orderBy", "pageToken", "returnPartialSuccess", )) & set(("project", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_aggregated_list_rest_interceptors(null_interceptor): + transport = transports.RoutersRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.RoutersRestInterceptor(), + ) + client = RoutersClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.RoutersRestInterceptor, "post_aggregated_list") as post, \ + mock.patch.object(transports.RoutersRestInterceptor, "pre_aggregated_list") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.AggregatedListRoutersRequest.pb(compute.AggregatedListRoutersRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.RouterAggregatedList.to_json(compute.RouterAggregatedList()) + + request = compute.AggregatedListRoutersRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.RouterAggregatedList() + + client.aggregated_list(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_aggregated_list_rest_bad_request(transport: str = 'rest', request_type=compute.AggregatedListRoutersRequest): + client = RoutersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.aggregated_list(request) + + +def test_aggregated_list_rest_flattened(): + client = RoutersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.RouterAggregatedList() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.RouterAggregatedList.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.aggregated_list(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/aggregated/routers" % client.transport._host, args[1]) + + +def test_aggregated_list_rest_flattened_error(transport: str = 'rest'): + client = RoutersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.aggregated_list( + compute.AggregatedListRoutersRequest(), + project='project_value', + ) + + +def test_aggregated_list_rest_pager(transport: str = 'rest'): + client = RoutersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + #with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + compute.RouterAggregatedList( + items={ + 'a':compute.RoutersScopedList(), + 'b':compute.RoutersScopedList(), + 'c':compute.RoutersScopedList(), + }, + next_page_token='abc', + ), + compute.RouterAggregatedList( + items={}, + next_page_token='def', + ), + compute.RouterAggregatedList( + items={ + 'g':compute.RoutersScopedList(), + }, + next_page_token='ghi', + ), + compute.RouterAggregatedList( + items={ + 'h':compute.RoutersScopedList(), + 'i':compute.RoutersScopedList(), + }, + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple(compute.RouterAggregatedList.to_json(x) for x in response) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode('UTF-8') + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {'project': 'sample1'} + + pager = client.aggregated_list(request=sample_request) + + assert isinstance(pager.get('a'), compute.RoutersScopedList) + assert pager.get('h') is None + + results = list(pager) + assert len(results) == 6 + assert all( + isinstance(i, tuple) + for i in results) + for result in results: + assert isinstance(result, tuple) + assert tuple(type(t) for t in result) == (str, compute.RoutersScopedList) + + assert pager.get('a') is None + assert isinstance(pager.get('h'), compute.RoutersScopedList) + + pages = list(client.aggregated_list(request=sample_request).pages) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize("request_type", [ + compute.DeleteRouterRequest, + dict, +]) +def test_delete_rest(request_type): + client = RoutersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2', 'router': 'sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.delete(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, extended_operation.ExtendedOperation) + assert response.client_operation_id == 'client_operation_id_value' + assert response.creation_timestamp == 'creation_timestamp_value' + assert response.description == 'description_value' + assert response.end_time == 'end_time_value' + assert response.http_error_message == 'http_error_message_value' + assert response.http_error_status_code == 2374 + assert response.id == 205 + assert response.insert_time == 'insert_time_value' + assert response.kind == 'kind_value' + assert response.name == 'name_value' + assert response.operation_group_id == 'operation_group_id_value' + assert response.operation_type == 'operation_type_value' + assert response.progress == 885 + assert response.region == 'region_value' + assert response.self_link == 'self_link_value' + assert response.start_time == 'start_time_value' + assert response.status == compute.Operation.Status.DONE + assert response.status_message == 'status_message_value' + assert response.target_id == 947 + assert response.target_link == 'target_link_value' + assert response.user == 'user_value' + assert response.zone == 'zone_value' + + +def test_delete_rest_required_fields(request_type=compute.DeleteRouterRequest): + transport_class = transports.RoutersRestTransport + + request_init = {} + request_init["project"] = "" + request_init["region"] = "" + request_init["router"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + jsonified_request["region"] = 'region_value' + jsonified_request["router"] = 'router_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "region" in jsonified_request + assert jsonified_request["region"] == 'region_value' + assert "router" in jsonified_request + assert jsonified_request["router"] == 'router_value' + + client = RoutersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "delete", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.delete(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_delete_rest_unset_required_fields(): + transport = transports.RoutersRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.delete._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("project", "region", "router", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_rest_interceptors(null_interceptor): + transport = transports.RoutersRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.RoutersRestInterceptor(), + ) + client = RoutersClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.RoutersRestInterceptor, "post_delete") as post, \ + mock.patch.object(transports.RoutersRestInterceptor, "pre_delete") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.DeleteRouterRequest.pb(compute.DeleteRouterRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.DeleteRouterRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.delete(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_delete_rest_bad_request(transport: str = 'rest', request_type=compute.DeleteRouterRequest): + client = RoutersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2', 'router': 'sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.delete(request) + + +def test_delete_rest_flattened(): + client = RoutersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'region': 'sample2', 'router': 'sample3'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + region='region_value', + router='router_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.delete(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/regions/{region}/routers/{router}" % client.transport._host, args[1]) + + +def test_delete_rest_flattened_error(transport: str = 'rest'): + client = RoutersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete( + compute.DeleteRouterRequest(), + project='project_value', + region='region_value', + router='router_value', + ) + + +def test_delete_rest_error(): + client = RoutersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.DeleteRouterRequest, + dict, +]) +def test_delete_unary_rest(request_type): + client = RoutersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2', 'router': 'sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.delete_unary(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.Operation) + + +def test_delete_unary_rest_required_fields(request_type=compute.DeleteRouterRequest): + transport_class = transports.RoutersRestTransport + + request_init = {} + request_init["project"] = "" + request_init["region"] = "" + request_init["router"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + jsonified_request["region"] = 'region_value' + jsonified_request["router"] = 'router_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "region" in jsonified_request + assert jsonified_request["region"] == 'region_value' + assert "router" in jsonified_request + assert jsonified_request["router"] == 'router_value' + + client = RoutersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "delete", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.delete_unary(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_delete_unary_rest_unset_required_fields(): + transport = transports.RoutersRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.delete._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("project", "region", "router", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_unary_rest_interceptors(null_interceptor): + transport = transports.RoutersRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.RoutersRestInterceptor(), + ) + client = RoutersClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.RoutersRestInterceptor, "post_delete") as post, \ + mock.patch.object(transports.RoutersRestInterceptor, "pre_delete") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.DeleteRouterRequest.pb(compute.DeleteRouterRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.DeleteRouterRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.delete_unary(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_delete_unary_rest_bad_request(transport: str = 'rest', request_type=compute.DeleteRouterRequest): + client = RoutersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2', 'router': 'sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.delete_unary(request) + + +def test_delete_unary_rest_flattened(): + client = RoutersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'region': 'sample2', 'router': 'sample3'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + region='region_value', + router='router_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.delete_unary(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/regions/{region}/routers/{router}" % client.transport._host, args[1]) + + +def test_delete_unary_rest_flattened_error(transport: str = 'rest'): + client = RoutersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_unary( + compute.DeleteRouterRequest(), + project='project_value', + region='region_value', + router='router_value', + ) + + +def test_delete_unary_rest_error(): + client = RoutersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.GetRouterRequest, + dict, +]) +def test_get_rest(request_type): + client = RoutersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2', 'router': 'sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Router( + creation_timestamp='creation_timestamp_value', + description='description_value', + encrypted_interconnect_router=True, + id=205, + kind='kind_value', + name='name_value', + network='network_value', + region='region_value', + self_link='self_link_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Router.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.get(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.Router) + assert response.creation_timestamp == 'creation_timestamp_value' + assert response.description == 'description_value' + assert response.encrypted_interconnect_router is True + assert response.id == 205 + assert response.kind == 'kind_value' + assert response.name == 'name_value' + assert response.network == 'network_value' + assert response.region == 'region_value' + assert response.self_link == 'self_link_value' + + +def test_get_rest_required_fields(request_type=compute.GetRouterRequest): + transport_class = transports.RoutersRestTransport + + request_init = {} + request_init["project"] = "" + request_init["region"] = "" + request_init["router"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + jsonified_request["region"] = 'region_value' + jsonified_request["router"] = 'router_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "region" in jsonified_request + assert jsonified_request["region"] == 'region_value' + assert "router" in jsonified_request + assert jsonified_request["router"] == 'router_value' + + client = RoutersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Router() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "get", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Router.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.get(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_get_rest_unset_required_fields(): + transport = transports.RoutersRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.get._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("project", "region", "router", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_rest_interceptors(null_interceptor): + transport = transports.RoutersRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.RoutersRestInterceptor(), + ) + client = RoutersClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.RoutersRestInterceptor, "post_get") as post, \ + mock.patch.object(transports.RoutersRestInterceptor, "pre_get") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.GetRouterRequest.pb(compute.GetRouterRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Router.to_json(compute.Router()) + + request = compute.GetRouterRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Router() + + client.get(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_get_rest_bad_request(transport: str = 'rest', request_type=compute.GetRouterRequest): + client = RoutersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2', 'router': 'sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get(request) + + +def test_get_rest_flattened(): + client = RoutersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Router() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'region': 'sample2', 'router': 'sample3'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + region='region_value', + router='router_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Router.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.get(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/regions/{region}/routers/{router}" % client.transport._host, args[1]) + + +def test_get_rest_flattened_error(transport: str = 'rest'): + client = RoutersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get( + compute.GetRouterRequest(), + project='project_value', + region='region_value', + router='router_value', + ) + + +def test_get_rest_error(): + client = RoutersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.GetNatMappingInfoRoutersRequest, + dict, +]) +def test_get_nat_mapping_info_rest(request_type): + client = RoutersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2', 'router': 'sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.VmEndpointNatMappingsList( + id='id_value', + kind='kind_value', + next_page_token='next_page_token_value', + self_link='self_link_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.VmEndpointNatMappingsList.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.get_nat_mapping_info(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.GetNatMappingInfoPager) + assert response.id == 'id_value' + assert response.kind == 'kind_value' + assert response.next_page_token == 'next_page_token_value' + assert response.self_link == 'self_link_value' + + +def test_get_nat_mapping_info_rest_required_fields(request_type=compute.GetNatMappingInfoRoutersRequest): + transport_class = transports.RoutersRestTransport + + request_init = {} + request_init["project"] = "" + request_init["region"] = "" + request_init["router"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_nat_mapping_info._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + jsonified_request["region"] = 'region_value' + jsonified_request["router"] = 'router_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_nat_mapping_info._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("filter", "max_results", "nat_name", "order_by", "page_token", "return_partial_success", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "region" in jsonified_request + assert jsonified_request["region"] == 'region_value' + assert "router" in jsonified_request + assert jsonified_request["router"] == 'router_value' + + client = RoutersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.VmEndpointNatMappingsList() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "get", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.VmEndpointNatMappingsList.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.get_nat_mapping_info(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_get_nat_mapping_info_rest_unset_required_fields(): + transport = transports.RoutersRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.get_nat_mapping_info._get_unset_required_fields({}) + assert set(unset_fields) == (set(("filter", "maxResults", "natName", "orderBy", "pageToken", "returnPartialSuccess", )) & set(("project", "region", "router", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_nat_mapping_info_rest_interceptors(null_interceptor): + transport = transports.RoutersRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.RoutersRestInterceptor(), + ) + client = RoutersClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.RoutersRestInterceptor, "post_get_nat_mapping_info") as post, \ + mock.patch.object(transports.RoutersRestInterceptor, "pre_get_nat_mapping_info") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.GetNatMappingInfoRoutersRequest.pb(compute.GetNatMappingInfoRoutersRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.VmEndpointNatMappingsList.to_json(compute.VmEndpointNatMappingsList()) + + request = compute.GetNatMappingInfoRoutersRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.VmEndpointNatMappingsList() + + client.get_nat_mapping_info(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_get_nat_mapping_info_rest_bad_request(transport: str = 'rest', request_type=compute.GetNatMappingInfoRoutersRequest): + client = RoutersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2', 'router': 'sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_nat_mapping_info(request) + + +def test_get_nat_mapping_info_rest_flattened(): + client = RoutersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.VmEndpointNatMappingsList() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'region': 'sample2', 'router': 'sample3'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + region='region_value', + router='router_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.VmEndpointNatMappingsList.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.get_nat_mapping_info(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/regions/{region}/routers/{router}/getNatMappingInfo" % client.transport._host, args[1]) + + +def test_get_nat_mapping_info_rest_flattened_error(transport: str = 'rest'): + client = RoutersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_nat_mapping_info( + compute.GetNatMappingInfoRoutersRequest(), + project='project_value', + region='region_value', + router='router_value', + ) + + +def test_get_nat_mapping_info_rest_pager(transport: str = 'rest'): + client = RoutersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + #with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + compute.VmEndpointNatMappingsList( + result=[ + compute.VmEndpointNatMappings(), + compute.VmEndpointNatMappings(), + compute.VmEndpointNatMappings(), + ], + next_page_token='abc', + ), + compute.VmEndpointNatMappingsList( + result=[], + next_page_token='def', + ), + compute.VmEndpointNatMappingsList( + result=[ + compute.VmEndpointNatMappings(), + ], + next_page_token='ghi', + ), + compute.VmEndpointNatMappingsList( + result=[ + compute.VmEndpointNatMappings(), + compute.VmEndpointNatMappings(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple(compute.VmEndpointNatMappingsList.to_json(x) for x in response) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode('UTF-8') + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {'project': 'sample1', 'region': 'sample2', 'router': 'sample3'} + + pager = client.get_nat_mapping_info(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, compute.VmEndpointNatMappings) + for i in results) + + pages = list(client.get_nat_mapping_info(request=sample_request).pages) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize("request_type", [ + compute.GetRouterStatusRouterRequest, + dict, +]) +def test_get_router_status_rest(request_type): + client = RoutersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2', 'router': 'sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.RouterStatusResponse( + kind='kind_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.RouterStatusResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.get_router_status(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.RouterStatusResponse) + assert response.kind == 'kind_value' + + +def test_get_router_status_rest_required_fields(request_type=compute.GetRouterStatusRouterRequest): + transport_class = transports.RoutersRestTransport + + request_init = {} + request_init["project"] = "" + request_init["region"] = "" + request_init["router"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_router_status._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + jsonified_request["region"] = 'region_value' + jsonified_request["router"] = 'router_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_router_status._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "region" in jsonified_request + assert jsonified_request["region"] == 'region_value' + assert "router" in jsonified_request + assert jsonified_request["router"] == 'router_value' + + client = RoutersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.RouterStatusResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "get", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.RouterStatusResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.get_router_status(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_get_router_status_rest_unset_required_fields(): + transport = transports.RoutersRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.get_router_status._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("project", "region", "router", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_router_status_rest_interceptors(null_interceptor): + transport = transports.RoutersRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.RoutersRestInterceptor(), + ) + client = RoutersClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.RoutersRestInterceptor, "post_get_router_status") as post, \ + mock.patch.object(transports.RoutersRestInterceptor, "pre_get_router_status") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.GetRouterStatusRouterRequest.pb(compute.GetRouterStatusRouterRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.RouterStatusResponse.to_json(compute.RouterStatusResponse()) + + request = compute.GetRouterStatusRouterRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.RouterStatusResponse() + + client.get_router_status(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_get_router_status_rest_bad_request(transport: str = 'rest', request_type=compute.GetRouterStatusRouterRequest): + client = RoutersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2', 'router': 'sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_router_status(request) + + +def test_get_router_status_rest_flattened(): + client = RoutersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.RouterStatusResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'region': 'sample2', 'router': 'sample3'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + region='region_value', + router='router_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.RouterStatusResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.get_router_status(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/regions/{region}/routers/{router}/getRouterStatus" % client.transport._host, args[1]) + + +def test_get_router_status_rest_flattened_error(transport: str = 'rest'): + client = RoutersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_router_status( + compute.GetRouterStatusRouterRequest(), + project='project_value', + region='region_value', + router='router_value', + ) + + +def test_get_router_status_rest_error(): + client = RoutersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.InsertRouterRequest, + dict, +]) +def test_insert_rest(request_type): + client = RoutersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2'} + request_init["router_resource"] = {'bgp': {'advertise_mode': 'advertise_mode_value', 'advertised_groups': ['advertised_groups_value1', 'advertised_groups_value2'], 'advertised_ip_ranges': [{'description': 'description_value', 'range_': 'range__value'}], 'asn': 322, 'keepalive_interval': 1914}, 'bgp_peers': [{'advertise_mode': 'advertise_mode_value', 'advertised_groups': ['advertised_groups_value1', 'advertised_groups_value2'], 'advertised_ip_ranges': {}, 'advertised_route_priority': 2714, 'bfd': {'min_receive_interval': 2122, 'min_transmit_interval': 2265, 'multiplier': 1095, 'session_initialization_mode': 'session_initialization_mode_value'}, 'custom_learned_ip_ranges': [{'range_': 'range__value'}], 'custom_learned_route_priority': 3140, 'enable': 'enable_value', 'enable_ipv6': True, 'interface_name': 'interface_name_value', 'ip_address': 'ip_address_value', 'ipv6_nexthop_address': 'ipv6_nexthop_address_value', 'management_type': 'management_type_value', 'md5_authentication_key_name': 'md5_authentication_key_name_value', 'name': 'name_value', 'peer_asn': 845, 'peer_ip_address': 'peer_ip_address_value', 'peer_ipv6_nexthop_address': 'peer_ipv6_nexthop_address_value', 'router_appliance_instance': 'router_appliance_instance_value'}], 'creation_timestamp': 'creation_timestamp_value', 'description': 'description_value', 'encrypted_interconnect_router': True, 'id': 205, 'interfaces': [{'ip_range': 'ip_range_value', 'linked_interconnect_attachment': 'linked_interconnect_attachment_value', 'linked_vpn_tunnel': 'linked_vpn_tunnel_value', 'management_type': 'management_type_value', 'name': 'name_value', 'private_ip_address': 'private_ip_address_value', 'redundant_interface': 'redundant_interface_value', 'subnetwork': 'subnetwork_value'}], 'kind': 'kind_value', 'md5_authentication_keys': [{'key': 'key_value', 'name': 'name_value'}], 'name': 'name_value', 'nats': [{'auto_network_tier': 'auto_network_tier_value', 'drain_nat_ips': ['drain_nat_ips_value1', 'drain_nat_ips_value2'], 'enable_dynamic_port_allocation': True, 'enable_endpoint_independent_mapping': True, 'endpoint_types': ['endpoint_types_value1', 'endpoint_types_value2'], 'icmp_idle_timeout_sec': 2214, 'log_config': {'enable': True, 'filter': 'filter_value'}, 'max_ports_per_vm': 1733, 'min_ports_per_vm': 1731, 'name': 'name_value', 'nat_ip_allocate_option': 'nat_ip_allocate_option_value', 'nat_ips': ['nat_ips_value1', 'nat_ips_value2'], 'rules': [{'action': {'source_nat_active_ips': ['source_nat_active_ips_value1', 'source_nat_active_ips_value2'], 'source_nat_drain_ips': ['source_nat_drain_ips_value1', 'source_nat_drain_ips_value2']}, 'description': 'description_value', 'match': 'match_value', 'rule_number': 1184}], 'source_subnetwork_ip_ranges_to_nat': 'source_subnetwork_ip_ranges_to_nat_value', 'subnetworks': [{'name': 'name_value', 'secondary_ip_range_names': ['secondary_ip_range_names_value1', 'secondary_ip_range_names_value2'], 'source_ip_ranges_to_nat': ['source_ip_ranges_to_nat_value1', 'source_ip_ranges_to_nat_value2']}], 'tcp_established_idle_timeout_sec': 3371, 'tcp_time_wait_timeout_sec': 2665, 'tcp_transitory_idle_timeout_sec': 3330, 'udp_idle_timeout_sec': 2118}], 'network': 'network_value', 'region': 'region_value', 'self_link': 'self_link_value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.insert(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, extended_operation.ExtendedOperation) + assert response.client_operation_id == 'client_operation_id_value' + assert response.creation_timestamp == 'creation_timestamp_value' + assert response.description == 'description_value' + assert response.end_time == 'end_time_value' + assert response.http_error_message == 'http_error_message_value' + assert response.http_error_status_code == 2374 + assert response.id == 205 + assert response.insert_time == 'insert_time_value' + assert response.kind == 'kind_value' + assert response.name == 'name_value' + assert response.operation_group_id == 'operation_group_id_value' + assert response.operation_type == 'operation_type_value' + assert response.progress == 885 + assert response.region == 'region_value' + assert response.self_link == 'self_link_value' + assert response.start_time == 'start_time_value' + assert response.status == compute.Operation.Status.DONE + assert response.status_message == 'status_message_value' + assert response.target_id == 947 + assert response.target_link == 'target_link_value' + assert response.user == 'user_value' + assert response.zone == 'zone_value' + + +def test_insert_rest_required_fields(request_type=compute.InsertRouterRequest): + transport_class = transports.RoutersRestTransport + + request_init = {} + request_init["project"] = "" + request_init["region"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).insert._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + jsonified_request["region"] = 'region_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).insert._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "region" in jsonified_request + assert jsonified_request["region"] == 'region_value' + + client = RoutersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.insert(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_insert_rest_unset_required_fields(): + transport = transports.RoutersRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.insert._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("project", "region", "routerResource", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_insert_rest_interceptors(null_interceptor): + transport = transports.RoutersRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.RoutersRestInterceptor(), + ) + client = RoutersClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.RoutersRestInterceptor, "post_insert") as post, \ + mock.patch.object(transports.RoutersRestInterceptor, "pre_insert") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.InsertRouterRequest.pb(compute.InsertRouterRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.InsertRouterRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.insert(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_insert_rest_bad_request(transport: str = 'rest', request_type=compute.InsertRouterRequest): + client = RoutersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2'} + request_init["router_resource"] = {'bgp': {'advertise_mode': 'advertise_mode_value', 'advertised_groups': ['advertised_groups_value1', 'advertised_groups_value2'], 'advertised_ip_ranges': [{'description': 'description_value', 'range_': 'range__value'}], 'asn': 322, 'keepalive_interval': 1914}, 'bgp_peers': [{'advertise_mode': 'advertise_mode_value', 'advertised_groups': ['advertised_groups_value1', 'advertised_groups_value2'], 'advertised_ip_ranges': {}, 'advertised_route_priority': 2714, 'bfd': {'min_receive_interval': 2122, 'min_transmit_interval': 2265, 'multiplier': 1095, 'session_initialization_mode': 'session_initialization_mode_value'}, 'custom_learned_ip_ranges': [{'range_': 'range__value'}], 'custom_learned_route_priority': 3140, 'enable': 'enable_value', 'enable_ipv6': True, 'interface_name': 'interface_name_value', 'ip_address': 'ip_address_value', 'ipv6_nexthop_address': 'ipv6_nexthop_address_value', 'management_type': 'management_type_value', 'md5_authentication_key_name': 'md5_authentication_key_name_value', 'name': 'name_value', 'peer_asn': 845, 'peer_ip_address': 'peer_ip_address_value', 'peer_ipv6_nexthop_address': 'peer_ipv6_nexthop_address_value', 'router_appliance_instance': 'router_appliance_instance_value'}], 'creation_timestamp': 'creation_timestamp_value', 'description': 'description_value', 'encrypted_interconnect_router': True, 'id': 205, 'interfaces': [{'ip_range': 'ip_range_value', 'linked_interconnect_attachment': 'linked_interconnect_attachment_value', 'linked_vpn_tunnel': 'linked_vpn_tunnel_value', 'management_type': 'management_type_value', 'name': 'name_value', 'private_ip_address': 'private_ip_address_value', 'redundant_interface': 'redundant_interface_value', 'subnetwork': 'subnetwork_value'}], 'kind': 'kind_value', 'md5_authentication_keys': [{'key': 'key_value', 'name': 'name_value'}], 'name': 'name_value', 'nats': [{'auto_network_tier': 'auto_network_tier_value', 'drain_nat_ips': ['drain_nat_ips_value1', 'drain_nat_ips_value2'], 'enable_dynamic_port_allocation': True, 'enable_endpoint_independent_mapping': True, 'endpoint_types': ['endpoint_types_value1', 'endpoint_types_value2'], 'icmp_idle_timeout_sec': 2214, 'log_config': {'enable': True, 'filter': 'filter_value'}, 'max_ports_per_vm': 1733, 'min_ports_per_vm': 1731, 'name': 'name_value', 'nat_ip_allocate_option': 'nat_ip_allocate_option_value', 'nat_ips': ['nat_ips_value1', 'nat_ips_value2'], 'rules': [{'action': {'source_nat_active_ips': ['source_nat_active_ips_value1', 'source_nat_active_ips_value2'], 'source_nat_drain_ips': ['source_nat_drain_ips_value1', 'source_nat_drain_ips_value2']}, 'description': 'description_value', 'match': 'match_value', 'rule_number': 1184}], 'source_subnetwork_ip_ranges_to_nat': 'source_subnetwork_ip_ranges_to_nat_value', 'subnetworks': [{'name': 'name_value', 'secondary_ip_range_names': ['secondary_ip_range_names_value1', 'secondary_ip_range_names_value2'], 'source_ip_ranges_to_nat': ['source_ip_ranges_to_nat_value1', 'source_ip_ranges_to_nat_value2']}], 'tcp_established_idle_timeout_sec': 3371, 'tcp_time_wait_timeout_sec': 2665, 'tcp_transitory_idle_timeout_sec': 3330, 'udp_idle_timeout_sec': 2118}], 'network': 'network_value', 'region': 'region_value', 'self_link': 'self_link_value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.insert(request) + + +def test_insert_rest_flattened(): + client = RoutersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'region': 'sample2'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + region='region_value', + router_resource=compute.Router(bgp=compute.RouterBgp(advertise_mode='advertise_mode_value')), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.insert(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/regions/{region}/routers" % client.transport._host, args[1]) + + +def test_insert_rest_flattened_error(transport: str = 'rest'): + client = RoutersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.insert( + compute.InsertRouterRequest(), + project='project_value', + region='region_value', + router_resource=compute.Router(bgp=compute.RouterBgp(advertise_mode='advertise_mode_value')), + ) + + +def test_insert_rest_error(): + client = RoutersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.InsertRouterRequest, + dict, +]) +def test_insert_unary_rest(request_type): + client = RoutersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2'} + request_init["router_resource"] = {'bgp': {'advertise_mode': 'advertise_mode_value', 'advertised_groups': ['advertised_groups_value1', 'advertised_groups_value2'], 'advertised_ip_ranges': [{'description': 'description_value', 'range_': 'range__value'}], 'asn': 322, 'keepalive_interval': 1914}, 'bgp_peers': [{'advertise_mode': 'advertise_mode_value', 'advertised_groups': ['advertised_groups_value1', 'advertised_groups_value2'], 'advertised_ip_ranges': {}, 'advertised_route_priority': 2714, 'bfd': {'min_receive_interval': 2122, 'min_transmit_interval': 2265, 'multiplier': 1095, 'session_initialization_mode': 'session_initialization_mode_value'}, 'custom_learned_ip_ranges': [{'range_': 'range__value'}], 'custom_learned_route_priority': 3140, 'enable': 'enable_value', 'enable_ipv6': True, 'interface_name': 'interface_name_value', 'ip_address': 'ip_address_value', 'ipv6_nexthop_address': 'ipv6_nexthop_address_value', 'management_type': 'management_type_value', 'md5_authentication_key_name': 'md5_authentication_key_name_value', 'name': 'name_value', 'peer_asn': 845, 'peer_ip_address': 'peer_ip_address_value', 'peer_ipv6_nexthop_address': 'peer_ipv6_nexthop_address_value', 'router_appliance_instance': 'router_appliance_instance_value'}], 'creation_timestamp': 'creation_timestamp_value', 'description': 'description_value', 'encrypted_interconnect_router': True, 'id': 205, 'interfaces': [{'ip_range': 'ip_range_value', 'linked_interconnect_attachment': 'linked_interconnect_attachment_value', 'linked_vpn_tunnel': 'linked_vpn_tunnel_value', 'management_type': 'management_type_value', 'name': 'name_value', 'private_ip_address': 'private_ip_address_value', 'redundant_interface': 'redundant_interface_value', 'subnetwork': 'subnetwork_value'}], 'kind': 'kind_value', 'md5_authentication_keys': [{'key': 'key_value', 'name': 'name_value'}], 'name': 'name_value', 'nats': [{'auto_network_tier': 'auto_network_tier_value', 'drain_nat_ips': ['drain_nat_ips_value1', 'drain_nat_ips_value2'], 'enable_dynamic_port_allocation': True, 'enable_endpoint_independent_mapping': True, 'endpoint_types': ['endpoint_types_value1', 'endpoint_types_value2'], 'icmp_idle_timeout_sec': 2214, 'log_config': {'enable': True, 'filter': 'filter_value'}, 'max_ports_per_vm': 1733, 'min_ports_per_vm': 1731, 'name': 'name_value', 'nat_ip_allocate_option': 'nat_ip_allocate_option_value', 'nat_ips': ['nat_ips_value1', 'nat_ips_value2'], 'rules': [{'action': {'source_nat_active_ips': ['source_nat_active_ips_value1', 'source_nat_active_ips_value2'], 'source_nat_drain_ips': ['source_nat_drain_ips_value1', 'source_nat_drain_ips_value2']}, 'description': 'description_value', 'match': 'match_value', 'rule_number': 1184}], 'source_subnetwork_ip_ranges_to_nat': 'source_subnetwork_ip_ranges_to_nat_value', 'subnetworks': [{'name': 'name_value', 'secondary_ip_range_names': ['secondary_ip_range_names_value1', 'secondary_ip_range_names_value2'], 'source_ip_ranges_to_nat': ['source_ip_ranges_to_nat_value1', 'source_ip_ranges_to_nat_value2']}], 'tcp_established_idle_timeout_sec': 3371, 'tcp_time_wait_timeout_sec': 2665, 'tcp_transitory_idle_timeout_sec': 3330, 'udp_idle_timeout_sec': 2118}], 'network': 'network_value', 'region': 'region_value', 'self_link': 'self_link_value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.insert_unary(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.Operation) + + +def test_insert_unary_rest_required_fields(request_type=compute.InsertRouterRequest): + transport_class = transports.RoutersRestTransport + + request_init = {} + request_init["project"] = "" + request_init["region"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).insert._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + jsonified_request["region"] = 'region_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).insert._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "region" in jsonified_request + assert jsonified_request["region"] == 'region_value' + + client = RoutersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.insert_unary(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_insert_unary_rest_unset_required_fields(): + transport = transports.RoutersRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.insert._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("project", "region", "routerResource", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_insert_unary_rest_interceptors(null_interceptor): + transport = transports.RoutersRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.RoutersRestInterceptor(), + ) + client = RoutersClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.RoutersRestInterceptor, "post_insert") as post, \ + mock.patch.object(transports.RoutersRestInterceptor, "pre_insert") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.InsertRouterRequest.pb(compute.InsertRouterRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.InsertRouterRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.insert_unary(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_insert_unary_rest_bad_request(transport: str = 'rest', request_type=compute.InsertRouterRequest): + client = RoutersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2'} + request_init["router_resource"] = {'bgp': {'advertise_mode': 'advertise_mode_value', 'advertised_groups': ['advertised_groups_value1', 'advertised_groups_value2'], 'advertised_ip_ranges': [{'description': 'description_value', 'range_': 'range__value'}], 'asn': 322, 'keepalive_interval': 1914}, 'bgp_peers': [{'advertise_mode': 'advertise_mode_value', 'advertised_groups': ['advertised_groups_value1', 'advertised_groups_value2'], 'advertised_ip_ranges': {}, 'advertised_route_priority': 2714, 'bfd': {'min_receive_interval': 2122, 'min_transmit_interval': 2265, 'multiplier': 1095, 'session_initialization_mode': 'session_initialization_mode_value'}, 'custom_learned_ip_ranges': [{'range_': 'range__value'}], 'custom_learned_route_priority': 3140, 'enable': 'enable_value', 'enable_ipv6': True, 'interface_name': 'interface_name_value', 'ip_address': 'ip_address_value', 'ipv6_nexthop_address': 'ipv6_nexthop_address_value', 'management_type': 'management_type_value', 'md5_authentication_key_name': 'md5_authentication_key_name_value', 'name': 'name_value', 'peer_asn': 845, 'peer_ip_address': 'peer_ip_address_value', 'peer_ipv6_nexthop_address': 'peer_ipv6_nexthop_address_value', 'router_appliance_instance': 'router_appliance_instance_value'}], 'creation_timestamp': 'creation_timestamp_value', 'description': 'description_value', 'encrypted_interconnect_router': True, 'id': 205, 'interfaces': [{'ip_range': 'ip_range_value', 'linked_interconnect_attachment': 'linked_interconnect_attachment_value', 'linked_vpn_tunnel': 'linked_vpn_tunnel_value', 'management_type': 'management_type_value', 'name': 'name_value', 'private_ip_address': 'private_ip_address_value', 'redundant_interface': 'redundant_interface_value', 'subnetwork': 'subnetwork_value'}], 'kind': 'kind_value', 'md5_authentication_keys': [{'key': 'key_value', 'name': 'name_value'}], 'name': 'name_value', 'nats': [{'auto_network_tier': 'auto_network_tier_value', 'drain_nat_ips': ['drain_nat_ips_value1', 'drain_nat_ips_value2'], 'enable_dynamic_port_allocation': True, 'enable_endpoint_independent_mapping': True, 'endpoint_types': ['endpoint_types_value1', 'endpoint_types_value2'], 'icmp_idle_timeout_sec': 2214, 'log_config': {'enable': True, 'filter': 'filter_value'}, 'max_ports_per_vm': 1733, 'min_ports_per_vm': 1731, 'name': 'name_value', 'nat_ip_allocate_option': 'nat_ip_allocate_option_value', 'nat_ips': ['nat_ips_value1', 'nat_ips_value2'], 'rules': [{'action': {'source_nat_active_ips': ['source_nat_active_ips_value1', 'source_nat_active_ips_value2'], 'source_nat_drain_ips': ['source_nat_drain_ips_value1', 'source_nat_drain_ips_value2']}, 'description': 'description_value', 'match': 'match_value', 'rule_number': 1184}], 'source_subnetwork_ip_ranges_to_nat': 'source_subnetwork_ip_ranges_to_nat_value', 'subnetworks': [{'name': 'name_value', 'secondary_ip_range_names': ['secondary_ip_range_names_value1', 'secondary_ip_range_names_value2'], 'source_ip_ranges_to_nat': ['source_ip_ranges_to_nat_value1', 'source_ip_ranges_to_nat_value2']}], 'tcp_established_idle_timeout_sec': 3371, 'tcp_time_wait_timeout_sec': 2665, 'tcp_transitory_idle_timeout_sec': 3330, 'udp_idle_timeout_sec': 2118}], 'network': 'network_value', 'region': 'region_value', 'self_link': 'self_link_value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.insert_unary(request) + + +def test_insert_unary_rest_flattened(): + client = RoutersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'region': 'sample2'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + region='region_value', + router_resource=compute.Router(bgp=compute.RouterBgp(advertise_mode='advertise_mode_value')), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.insert_unary(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/regions/{region}/routers" % client.transport._host, args[1]) + + +def test_insert_unary_rest_flattened_error(transport: str = 'rest'): + client = RoutersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.insert_unary( + compute.InsertRouterRequest(), + project='project_value', + region='region_value', + router_resource=compute.Router(bgp=compute.RouterBgp(advertise_mode='advertise_mode_value')), + ) + + +def test_insert_unary_rest_error(): + client = RoutersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.ListRoutersRequest, + dict, +]) +def test_list_rest(request_type): + client = RoutersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.RouterList( + id='id_value', + kind='kind_value', + next_page_token='next_page_token_value', + self_link='self_link_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.RouterList.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.list(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListPager) + assert response.id == 'id_value' + assert response.kind == 'kind_value' + assert response.next_page_token == 'next_page_token_value' + assert response.self_link == 'self_link_value' + + +def test_list_rest_required_fields(request_type=compute.ListRoutersRequest): + transport_class = transports.RoutersRestTransport + + request_init = {} + request_init["project"] = "" + request_init["region"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + jsonified_request["region"] = 'region_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("filter", "max_results", "order_by", "page_token", "return_partial_success", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "region" in jsonified_request + assert jsonified_request["region"] == 'region_value' + + client = RoutersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.RouterList() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "get", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.RouterList.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.list(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_list_rest_unset_required_fields(): + transport = transports.RoutersRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.list._get_unset_required_fields({}) + assert set(unset_fields) == (set(("filter", "maxResults", "orderBy", "pageToken", "returnPartialSuccess", )) & set(("project", "region", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_rest_interceptors(null_interceptor): + transport = transports.RoutersRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.RoutersRestInterceptor(), + ) + client = RoutersClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.RoutersRestInterceptor, "post_list") as post, \ + mock.patch.object(transports.RoutersRestInterceptor, "pre_list") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.ListRoutersRequest.pb(compute.ListRoutersRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.RouterList.to_json(compute.RouterList()) + + request = compute.ListRoutersRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.RouterList() + + client.list(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_list_rest_bad_request(transport: str = 'rest', request_type=compute.ListRoutersRequest): + client = RoutersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list(request) + + +def test_list_rest_flattened(): + client = RoutersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.RouterList() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'region': 'sample2'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + region='region_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.RouterList.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.list(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/regions/{region}/routers" % client.transport._host, args[1]) + + +def test_list_rest_flattened_error(transport: str = 'rest'): + client = RoutersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list( + compute.ListRoutersRequest(), + project='project_value', + region='region_value', + ) + + +def test_list_rest_pager(transport: str = 'rest'): + client = RoutersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + #with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + compute.RouterList( + items=[ + compute.Router(), + compute.Router(), + compute.Router(), + ], + next_page_token='abc', + ), + compute.RouterList( + items=[], + next_page_token='def', + ), + compute.RouterList( + items=[ + compute.Router(), + ], + next_page_token='ghi', + ), + compute.RouterList( + items=[ + compute.Router(), + compute.Router(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple(compute.RouterList.to_json(x) for x in response) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode('UTF-8') + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {'project': 'sample1', 'region': 'sample2'} + + pager = client.list(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, compute.Router) + for i in results) + + pages = list(client.list(request=sample_request).pages) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize("request_type", [ + compute.PatchRouterRequest, + dict, +]) +def test_patch_rest(request_type): + client = RoutersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2', 'router': 'sample3'} + request_init["router_resource"] = {'bgp': {'advertise_mode': 'advertise_mode_value', 'advertised_groups': ['advertised_groups_value1', 'advertised_groups_value2'], 'advertised_ip_ranges': [{'description': 'description_value', 'range_': 'range__value'}], 'asn': 322, 'keepalive_interval': 1914}, 'bgp_peers': [{'advertise_mode': 'advertise_mode_value', 'advertised_groups': ['advertised_groups_value1', 'advertised_groups_value2'], 'advertised_ip_ranges': {}, 'advertised_route_priority': 2714, 'bfd': {'min_receive_interval': 2122, 'min_transmit_interval': 2265, 'multiplier': 1095, 'session_initialization_mode': 'session_initialization_mode_value'}, 'custom_learned_ip_ranges': [{'range_': 'range__value'}], 'custom_learned_route_priority': 3140, 'enable': 'enable_value', 'enable_ipv6': True, 'interface_name': 'interface_name_value', 'ip_address': 'ip_address_value', 'ipv6_nexthop_address': 'ipv6_nexthop_address_value', 'management_type': 'management_type_value', 'md5_authentication_key_name': 'md5_authentication_key_name_value', 'name': 'name_value', 'peer_asn': 845, 'peer_ip_address': 'peer_ip_address_value', 'peer_ipv6_nexthop_address': 'peer_ipv6_nexthop_address_value', 'router_appliance_instance': 'router_appliance_instance_value'}], 'creation_timestamp': 'creation_timestamp_value', 'description': 'description_value', 'encrypted_interconnect_router': True, 'id': 205, 'interfaces': [{'ip_range': 'ip_range_value', 'linked_interconnect_attachment': 'linked_interconnect_attachment_value', 'linked_vpn_tunnel': 'linked_vpn_tunnel_value', 'management_type': 'management_type_value', 'name': 'name_value', 'private_ip_address': 'private_ip_address_value', 'redundant_interface': 'redundant_interface_value', 'subnetwork': 'subnetwork_value'}], 'kind': 'kind_value', 'md5_authentication_keys': [{'key': 'key_value', 'name': 'name_value'}], 'name': 'name_value', 'nats': [{'auto_network_tier': 'auto_network_tier_value', 'drain_nat_ips': ['drain_nat_ips_value1', 'drain_nat_ips_value2'], 'enable_dynamic_port_allocation': True, 'enable_endpoint_independent_mapping': True, 'endpoint_types': ['endpoint_types_value1', 'endpoint_types_value2'], 'icmp_idle_timeout_sec': 2214, 'log_config': {'enable': True, 'filter': 'filter_value'}, 'max_ports_per_vm': 1733, 'min_ports_per_vm': 1731, 'name': 'name_value', 'nat_ip_allocate_option': 'nat_ip_allocate_option_value', 'nat_ips': ['nat_ips_value1', 'nat_ips_value2'], 'rules': [{'action': {'source_nat_active_ips': ['source_nat_active_ips_value1', 'source_nat_active_ips_value2'], 'source_nat_drain_ips': ['source_nat_drain_ips_value1', 'source_nat_drain_ips_value2']}, 'description': 'description_value', 'match': 'match_value', 'rule_number': 1184}], 'source_subnetwork_ip_ranges_to_nat': 'source_subnetwork_ip_ranges_to_nat_value', 'subnetworks': [{'name': 'name_value', 'secondary_ip_range_names': ['secondary_ip_range_names_value1', 'secondary_ip_range_names_value2'], 'source_ip_ranges_to_nat': ['source_ip_ranges_to_nat_value1', 'source_ip_ranges_to_nat_value2']}], 'tcp_established_idle_timeout_sec': 3371, 'tcp_time_wait_timeout_sec': 2665, 'tcp_transitory_idle_timeout_sec': 3330, 'udp_idle_timeout_sec': 2118}], 'network': 'network_value', 'region': 'region_value', 'self_link': 'self_link_value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.patch(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, extended_operation.ExtendedOperation) + assert response.client_operation_id == 'client_operation_id_value' + assert response.creation_timestamp == 'creation_timestamp_value' + assert response.description == 'description_value' + assert response.end_time == 'end_time_value' + assert response.http_error_message == 'http_error_message_value' + assert response.http_error_status_code == 2374 + assert response.id == 205 + assert response.insert_time == 'insert_time_value' + assert response.kind == 'kind_value' + assert response.name == 'name_value' + assert response.operation_group_id == 'operation_group_id_value' + assert response.operation_type == 'operation_type_value' + assert response.progress == 885 + assert response.region == 'region_value' + assert response.self_link == 'self_link_value' + assert response.start_time == 'start_time_value' + assert response.status == compute.Operation.Status.DONE + assert response.status_message == 'status_message_value' + assert response.target_id == 947 + assert response.target_link == 'target_link_value' + assert response.user == 'user_value' + assert response.zone == 'zone_value' + + +def test_patch_rest_required_fields(request_type=compute.PatchRouterRequest): + transport_class = transports.RoutersRestTransport + + request_init = {} + request_init["project"] = "" + request_init["region"] = "" + request_init["router"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).patch._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + jsonified_request["region"] = 'region_value' + jsonified_request["router"] = 'router_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).patch._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "region" in jsonified_request + assert jsonified_request["region"] == 'region_value' + assert "router" in jsonified_request + assert jsonified_request["router"] == 'router_value' + + client = RoutersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "patch", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.patch(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_patch_rest_unset_required_fields(): + transport = transports.RoutersRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.patch._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("project", "region", "router", "routerResource", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_patch_rest_interceptors(null_interceptor): + transport = transports.RoutersRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.RoutersRestInterceptor(), + ) + client = RoutersClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.RoutersRestInterceptor, "post_patch") as post, \ + mock.patch.object(transports.RoutersRestInterceptor, "pre_patch") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.PatchRouterRequest.pb(compute.PatchRouterRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.PatchRouterRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.patch(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_patch_rest_bad_request(transport: str = 'rest', request_type=compute.PatchRouterRequest): + client = RoutersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2', 'router': 'sample3'} + request_init["router_resource"] = {'bgp': {'advertise_mode': 'advertise_mode_value', 'advertised_groups': ['advertised_groups_value1', 'advertised_groups_value2'], 'advertised_ip_ranges': [{'description': 'description_value', 'range_': 'range__value'}], 'asn': 322, 'keepalive_interval': 1914}, 'bgp_peers': [{'advertise_mode': 'advertise_mode_value', 'advertised_groups': ['advertised_groups_value1', 'advertised_groups_value2'], 'advertised_ip_ranges': {}, 'advertised_route_priority': 2714, 'bfd': {'min_receive_interval': 2122, 'min_transmit_interval': 2265, 'multiplier': 1095, 'session_initialization_mode': 'session_initialization_mode_value'}, 'custom_learned_ip_ranges': [{'range_': 'range__value'}], 'custom_learned_route_priority': 3140, 'enable': 'enable_value', 'enable_ipv6': True, 'interface_name': 'interface_name_value', 'ip_address': 'ip_address_value', 'ipv6_nexthop_address': 'ipv6_nexthop_address_value', 'management_type': 'management_type_value', 'md5_authentication_key_name': 'md5_authentication_key_name_value', 'name': 'name_value', 'peer_asn': 845, 'peer_ip_address': 'peer_ip_address_value', 'peer_ipv6_nexthop_address': 'peer_ipv6_nexthop_address_value', 'router_appliance_instance': 'router_appliance_instance_value'}], 'creation_timestamp': 'creation_timestamp_value', 'description': 'description_value', 'encrypted_interconnect_router': True, 'id': 205, 'interfaces': [{'ip_range': 'ip_range_value', 'linked_interconnect_attachment': 'linked_interconnect_attachment_value', 'linked_vpn_tunnel': 'linked_vpn_tunnel_value', 'management_type': 'management_type_value', 'name': 'name_value', 'private_ip_address': 'private_ip_address_value', 'redundant_interface': 'redundant_interface_value', 'subnetwork': 'subnetwork_value'}], 'kind': 'kind_value', 'md5_authentication_keys': [{'key': 'key_value', 'name': 'name_value'}], 'name': 'name_value', 'nats': [{'auto_network_tier': 'auto_network_tier_value', 'drain_nat_ips': ['drain_nat_ips_value1', 'drain_nat_ips_value2'], 'enable_dynamic_port_allocation': True, 'enable_endpoint_independent_mapping': True, 'endpoint_types': ['endpoint_types_value1', 'endpoint_types_value2'], 'icmp_idle_timeout_sec': 2214, 'log_config': {'enable': True, 'filter': 'filter_value'}, 'max_ports_per_vm': 1733, 'min_ports_per_vm': 1731, 'name': 'name_value', 'nat_ip_allocate_option': 'nat_ip_allocate_option_value', 'nat_ips': ['nat_ips_value1', 'nat_ips_value2'], 'rules': [{'action': {'source_nat_active_ips': ['source_nat_active_ips_value1', 'source_nat_active_ips_value2'], 'source_nat_drain_ips': ['source_nat_drain_ips_value1', 'source_nat_drain_ips_value2']}, 'description': 'description_value', 'match': 'match_value', 'rule_number': 1184}], 'source_subnetwork_ip_ranges_to_nat': 'source_subnetwork_ip_ranges_to_nat_value', 'subnetworks': [{'name': 'name_value', 'secondary_ip_range_names': ['secondary_ip_range_names_value1', 'secondary_ip_range_names_value2'], 'source_ip_ranges_to_nat': ['source_ip_ranges_to_nat_value1', 'source_ip_ranges_to_nat_value2']}], 'tcp_established_idle_timeout_sec': 3371, 'tcp_time_wait_timeout_sec': 2665, 'tcp_transitory_idle_timeout_sec': 3330, 'udp_idle_timeout_sec': 2118}], 'network': 'network_value', 'region': 'region_value', 'self_link': 'self_link_value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.patch(request) + + +def test_patch_rest_flattened(): + client = RoutersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'region': 'sample2', 'router': 'sample3'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + region='region_value', + router='router_value', + router_resource=compute.Router(bgp=compute.RouterBgp(advertise_mode='advertise_mode_value')), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.patch(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/regions/{region}/routers/{router}" % client.transport._host, args[1]) + + +def test_patch_rest_flattened_error(transport: str = 'rest'): + client = RoutersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.patch( + compute.PatchRouterRequest(), + project='project_value', + region='region_value', + router='router_value', + router_resource=compute.Router(bgp=compute.RouterBgp(advertise_mode='advertise_mode_value')), + ) + + +def test_patch_rest_error(): + client = RoutersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.PatchRouterRequest, + dict, +]) +def test_patch_unary_rest(request_type): + client = RoutersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2', 'router': 'sample3'} + request_init["router_resource"] = {'bgp': {'advertise_mode': 'advertise_mode_value', 'advertised_groups': ['advertised_groups_value1', 'advertised_groups_value2'], 'advertised_ip_ranges': [{'description': 'description_value', 'range_': 'range__value'}], 'asn': 322, 'keepalive_interval': 1914}, 'bgp_peers': [{'advertise_mode': 'advertise_mode_value', 'advertised_groups': ['advertised_groups_value1', 'advertised_groups_value2'], 'advertised_ip_ranges': {}, 'advertised_route_priority': 2714, 'bfd': {'min_receive_interval': 2122, 'min_transmit_interval': 2265, 'multiplier': 1095, 'session_initialization_mode': 'session_initialization_mode_value'}, 'custom_learned_ip_ranges': [{'range_': 'range__value'}], 'custom_learned_route_priority': 3140, 'enable': 'enable_value', 'enable_ipv6': True, 'interface_name': 'interface_name_value', 'ip_address': 'ip_address_value', 'ipv6_nexthop_address': 'ipv6_nexthop_address_value', 'management_type': 'management_type_value', 'md5_authentication_key_name': 'md5_authentication_key_name_value', 'name': 'name_value', 'peer_asn': 845, 'peer_ip_address': 'peer_ip_address_value', 'peer_ipv6_nexthop_address': 'peer_ipv6_nexthop_address_value', 'router_appliance_instance': 'router_appliance_instance_value'}], 'creation_timestamp': 'creation_timestamp_value', 'description': 'description_value', 'encrypted_interconnect_router': True, 'id': 205, 'interfaces': [{'ip_range': 'ip_range_value', 'linked_interconnect_attachment': 'linked_interconnect_attachment_value', 'linked_vpn_tunnel': 'linked_vpn_tunnel_value', 'management_type': 'management_type_value', 'name': 'name_value', 'private_ip_address': 'private_ip_address_value', 'redundant_interface': 'redundant_interface_value', 'subnetwork': 'subnetwork_value'}], 'kind': 'kind_value', 'md5_authentication_keys': [{'key': 'key_value', 'name': 'name_value'}], 'name': 'name_value', 'nats': [{'auto_network_tier': 'auto_network_tier_value', 'drain_nat_ips': ['drain_nat_ips_value1', 'drain_nat_ips_value2'], 'enable_dynamic_port_allocation': True, 'enable_endpoint_independent_mapping': True, 'endpoint_types': ['endpoint_types_value1', 'endpoint_types_value2'], 'icmp_idle_timeout_sec': 2214, 'log_config': {'enable': True, 'filter': 'filter_value'}, 'max_ports_per_vm': 1733, 'min_ports_per_vm': 1731, 'name': 'name_value', 'nat_ip_allocate_option': 'nat_ip_allocate_option_value', 'nat_ips': ['nat_ips_value1', 'nat_ips_value2'], 'rules': [{'action': {'source_nat_active_ips': ['source_nat_active_ips_value1', 'source_nat_active_ips_value2'], 'source_nat_drain_ips': ['source_nat_drain_ips_value1', 'source_nat_drain_ips_value2']}, 'description': 'description_value', 'match': 'match_value', 'rule_number': 1184}], 'source_subnetwork_ip_ranges_to_nat': 'source_subnetwork_ip_ranges_to_nat_value', 'subnetworks': [{'name': 'name_value', 'secondary_ip_range_names': ['secondary_ip_range_names_value1', 'secondary_ip_range_names_value2'], 'source_ip_ranges_to_nat': ['source_ip_ranges_to_nat_value1', 'source_ip_ranges_to_nat_value2']}], 'tcp_established_idle_timeout_sec': 3371, 'tcp_time_wait_timeout_sec': 2665, 'tcp_transitory_idle_timeout_sec': 3330, 'udp_idle_timeout_sec': 2118}], 'network': 'network_value', 'region': 'region_value', 'self_link': 'self_link_value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.patch_unary(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.Operation) + + +def test_patch_unary_rest_required_fields(request_type=compute.PatchRouterRequest): + transport_class = transports.RoutersRestTransport + + request_init = {} + request_init["project"] = "" + request_init["region"] = "" + request_init["router"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).patch._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + jsonified_request["region"] = 'region_value' + jsonified_request["router"] = 'router_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).patch._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "region" in jsonified_request + assert jsonified_request["region"] == 'region_value' + assert "router" in jsonified_request + assert jsonified_request["router"] == 'router_value' + + client = RoutersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "patch", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.patch_unary(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_patch_unary_rest_unset_required_fields(): + transport = transports.RoutersRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.patch._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("project", "region", "router", "routerResource", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_patch_unary_rest_interceptors(null_interceptor): + transport = transports.RoutersRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.RoutersRestInterceptor(), + ) + client = RoutersClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.RoutersRestInterceptor, "post_patch") as post, \ + mock.patch.object(transports.RoutersRestInterceptor, "pre_patch") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.PatchRouterRequest.pb(compute.PatchRouterRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.PatchRouterRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.patch_unary(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_patch_unary_rest_bad_request(transport: str = 'rest', request_type=compute.PatchRouterRequest): + client = RoutersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2', 'router': 'sample3'} + request_init["router_resource"] = {'bgp': {'advertise_mode': 'advertise_mode_value', 'advertised_groups': ['advertised_groups_value1', 'advertised_groups_value2'], 'advertised_ip_ranges': [{'description': 'description_value', 'range_': 'range__value'}], 'asn': 322, 'keepalive_interval': 1914}, 'bgp_peers': [{'advertise_mode': 'advertise_mode_value', 'advertised_groups': ['advertised_groups_value1', 'advertised_groups_value2'], 'advertised_ip_ranges': {}, 'advertised_route_priority': 2714, 'bfd': {'min_receive_interval': 2122, 'min_transmit_interval': 2265, 'multiplier': 1095, 'session_initialization_mode': 'session_initialization_mode_value'}, 'custom_learned_ip_ranges': [{'range_': 'range__value'}], 'custom_learned_route_priority': 3140, 'enable': 'enable_value', 'enable_ipv6': True, 'interface_name': 'interface_name_value', 'ip_address': 'ip_address_value', 'ipv6_nexthop_address': 'ipv6_nexthop_address_value', 'management_type': 'management_type_value', 'md5_authentication_key_name': 'md5_authentication_key_name_value', 'name': 'name_value', 'peer_asn': 845, 'peer_ip_address': 'peer_ip_address_value', 'peer_ipv6_nexthop_address': 'peer_ipv6_nexthop_address_value', 'router_appliance_instance': 'router_appliance_instance_value'}], 'creation_timestamp': 'creation_timestamp_value', 'description': 'description_value', 'encrypted_interconnect_router': True, 'id': 205, 'interfaces': [{'ip_range': 'ip_range_value', 'linked_interconnect_attachment': 'linked_interconnect_attachment_value', 'linked_vpn_tunnel': 'linked_vpn_tunnel_value', 'management_type': 'management_type_value', 'name': 'name_value', 'private_ip_address': 'private_ip_address_value', 'redundant_interface': 'redundant_interface_value', 'subnetwork': 'subnetwork_value'}], 'kind': 'kind_value', 'md5_authentication_keys': [{'key': 'key_value', 'name': 'name_value'}], 'name': 'name_value', 'nats': [{'auto_network_tier': 'auto_network_tier_value', 'drain_nat_ips': ['drain_nat_ips_value1', 'drain_nat_ips_value2'], 'enable_dynamic_port_allocation': True, 'enable_endpoint_independent_mapping': True, 'endpoint_types': ['endpoint_types_value1', 'endpoint_types_value2'], 'icmp_idle_timeout_sec': 2214, 'log_config': {'enable': True, 'filter': 'filter_value'}, 'max_ports_per_vm': 1733, 'min_ports_per_vm': 1731, 'name': 'name_value', 'nat_ip_allocate_option': 'nat_ip_allocate_option_value', 'nat_ips': ['nat_ips_value1', 'nat_ips_value2'], 'rules': [{'action': {'source_nat_active_ips': ['source_nat_active_ips_value1', 'source_nat_active_ips_value2'], 'source_nat_drain_ips': ['source_nat_drain_ips_value1', 'source_nat_drain_ips_value2']}, 'description': 'description_value', 'match': 'match_value', 'rule_number': 1184}], 'source_subnetwork_ip_ranges_to_nat': 'source_subnetwork_ip_ranges_to_nat_value', 'subnetworks': [{'name': 'name_value', 'secondary_ip_range_names': ['secondary_ip_range_names_value1', 'secondary_ip_range_names_value2'], 'source_ip_ranges_to_nat': ['source_ip_ranges_to_nat_value1', 'source_ip_ranges_to_nat_value2']}], 'tcp_established_idle_timeout_sec': 3371, 'tcp_time_wait_timeout_sec': 2665, 'tcp_transitory_idle_timeout_sec': 3330, 'udp_idle_timeout_sec': 2118}], 'network': 'network_value', 'region': 'region_value', 'self_link': 'self_link_value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.patch_unary(request) + + +def test_patch_unary_rest_flattened(): + client = RoutersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'region': 'sample2', 'router': 'sample3'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + region='region_value', + router='router_value', + router_resource=compute.Router(bgp=compute.RouterBgp(advertise_mode='advertise_mode_value')), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.patch_unary(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/regions/{region}/routers/{router}" % client.transport._host, args[1]) + + +def test_patch_unary_rest_flattened_error(transport: str = 'rest'): + client = RoutersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.patch_unary( + compute.PatchRouterRequest(), + project='project_value', + region='region_value', + router='router_value', + router_resource=compute.Router(bgp=compute.RouterBgp(advertise_mode='advertise_mode_value')), + ) + + +def test_patch_unary_rest_error(): + client = RoutersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.PreviewRouterRequest, + dict, +]) +def test_preview_rest(request_type): + client = RoutersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2', 'router': 'sample3'} + request_init["router_resource"] = {'bgp': {'advertise_mode': 'advertise_mode_value', 'advertised_groups': ['advertised_groups_value1', 'advertised_groups_value2'], 'advertised_ip_ranges': [{'description': 'description_value', 'range_': 'range__value'}], 'asn': 322, 'keepalive_interval': 1914}, 'bgp_peers': [{'advertise_mode': 'advertise_mode_value', 'advertised_groups': ['advertised_groups_value1', 'advertised_groups_value2'], 'advertised_ip_ranges': {}, 'advertised_route_priority': 2714, 'bfd': {'min_receive_interval': 2122, 'min_transmit_interval': 2265, 'multiplier': 1095, 'session_initialization_mode': 'session_initialization_mode_value'}, 'custom_learned_ip_ranges': [{'range_': 'range__value'}], 'custom_learned_route_priority': 3140, 'enable': 'enable_value', 'enable_ipv6': True, 'interface_name': 'interface_name_value', 'ip_address': 'ip_address_value', 'ipv6_nexthop_address': 'ipv6_nexthop_address_value', 'management_type': 'management_type_value', 'md5_authentication_key_name': 'md5_authentication_key_name_value', 'name': 'name_value', 'peer_asn': 845, 'peer_ip_address': 'peer_ip_address_value', 'peer_ipv6_nexthop_address': 'peer_ipv6_nexthop_address_value', 'router_appliance_instance': 'router_appliance_instance_value'}], 'creation_timestamp': 'creation_timestamp_value', 'description': 'description_value', 'encrypted_interconnect_router': True, 'id': 205, 'interfaces': [{'ip_range': 'ip_range_value', 'linked_interconnect_attachment': 'linked_interconnect_attachment_value', 'linked_vpn_tunnel': 'linked_vpn_tunnel_value', 'management_type': 'management_type_value', 'name': 'name_value', 'private_ip_address': 'private_ip_address_value', 'redundant_interface': 'redundant_interface_value', 'subnetwork': 'subnetwork_value'}], 'kind': 'kind_value', 'md5_authentication_keys': [{'key': 'key_value', 'name': 'name_value'}], 'name': 'name_value', 'nats': [{'auto_network_tier': 'auto_network_tier_value', 'drain_nat_ips': ['drain_nat_ips_value1', 'drain_nat_ips_value2'], 'enable_dynamic_port_allocation': True, 'enable_endpoint_independent_mapping': True, 'endpoint_types': ['endpoint_types_value1', 'endpoint_types_value2'], 'icmp_idle_timeout_sec': 2214, 'log_config': {'enable': True, 'filter': 'filter_value'}, 'max_ports_per_vm': 1733, 'min_ports_per_vm': 1731, 'name': 'name_value', 'nat_ip_allocate_option': 'nat_ip_allocate_option_value', 'nat_ips': ['nat_ips_value1', 'nat_ips_value2'], 'rules': [{'action': {'source_nat_active_ips': ['source_nat_active_ips_value1', 'source_nat_active_ips_value2'], 'source_nat_drain_ips': ['source_nat_drain_ips_value1', 'source_nat_drain_ips_value2']}, 'description': 'description_value', 'match': 'match_value', 'rule_number': 1184}], 'source_subnetwork_ip_ranges_to_nat': 'source_subnetwork_ip_ranges_to_nat_value', 'subnetworks': [{'name': 'name_value', 'secondary_ip_range_names': ['secondary_ip_range_names_value1', 'secondary_ip_range_names_value2'], 'source_ip_ranges_to_nat': ['source_ip_ranges_to_nat_value1', 'source_ip_ranges_to_nat_value2']}], 'tcp_established_idle_timeout_sec': 3371, 'tcp_time_wait_timeout_sec': 2665, 'tcp_transitory_idle_timeout_sec': 3330, 'udp_idle_timeout_sec': 2118}], 'network': 'network_value', 'region': 'region_value', 'self_link': 'self_link_value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.RoutersPreviewResponse( + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.RoutersPreviewResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.preview(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.RoutersPreviewResponse) + + +def test_preview_rest_required_fields(request_type=compute.PreviewRouterRequest): + transport_class = transports.RoutersRestTransport + + request_init = {} + request_init["project"] = "" + request_init["region"] = "" + request_init["router"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).preview._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + jsonified_request["region"] = 'region_value' + jsonified_request["router"] = 'router_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).preview._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "region" in jsonified_request + assert jsonified_request["region"] == 'region_value' + assert "router" in jsonified_request + assert jsonified_request["router"] == 'router_value' + + client = RoutersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.RoutersPreviewResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.RoutersPreviewResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.preview(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_preview_rest_unset_required_fields(): + transport = transports.RoutersRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.preview._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("project", "region", "router", "routerResource", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_preview_rest_interceptors(null_interceptor): + transport = transports.RoutersRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.RoutersRestInterceptor(), + ) + client = RoutersClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.RoutersRestInterceptor, "post_preview") as post, \ + mock.patch.object(transports.RoutersRestInterceptor, "pre_preview") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.PreviewRouterRequest.pb(compute.PreviewRouterRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.RoutersPreviewResponse.to_json(compute.RoutersPreviewResponse()) + + request = compute.PreviewRouterRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.RoutersPreviewResponse() + + client.preview(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_preview_rest_bad_request(transport: str = 'rest', request_type=compute.PreviewRouterRequest): + client = RoutersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2', 'router': 'sample3'} + request_init["router_resource"] = {'bgp': {'advertise_mode': 'advertise_mode_value', 'advertised_groups': ['advertised_groups_value1', 'advertised_groups_value2'], 'advertised_ip_ranges': [{'description': 'description_value', 'range_': 'range__value'}], 'asn': 322, 'keepalive_interval': 1914}, 'bgp_peers': [{'advertise_mode': 'advertise_mode_value', 'advertised_groups': ['advertised_groups_value1', 'advertised_groups_value2'], 'advertised_ip_ranges': {}, 'advertised_route_priority': 2714, 'bfd': {'min_receive_interval': 2122, 'min_transmit_interval': 2265, 'multiplier': 1095, 'session_initialization_mode': 'session_initialization_mode_value'}, 'custom_learned_ip_ranges': [{'range_': 'range__value'}], 'custom_learned_route_priority': 3140, 'enable': 'enable_value', 'enable_ipv6': True, 'interface_name': 'interface_name_value', 'ip_address': 'ip_address_value', 'ipv6_nexthop_address': 'ipv6_nexthop_address_value', 'management_type': 'management_type_value', 'md5_authentication_key_name': 'md5_authentication_key_name_value', 'name': 'name_value', 'peer_asn': 845, 'peer_ip_address': 'peer_ip_address_value', 'peer_ipv6_nexthop_address': 'peer_ipv6_nexthop_address_value', 'router_appliance_instance': 'router_appliance_instance_value'}], 'creation_timestamp': 'creation_timestamp_value', 'description': 'description_value', 'encrypted_interconnect_router': True, 'id': 205, 'interfaces': [{'ip_range': 'ip_range_value', 'linked_interconnect_attachment': 'linked_interconnect_attachment_value', 'linked_vpn_tunnel': 'linked_vpn_tunnel_value', 'management_type': 'management_type_value', 'name': 'name_value', 'private_ip_address': 'private_ip_address_value', 'redundant_interface': 'redundant_interface_value', 'subnetwork': 'subnetwork_value'}], 'kind': 'kind_value', 'md5_authentication_keys': [{'key': 'key_value', 'name': 'name_value'}], 'name': 'name_value', 'nats': [{'auto_network_tier': 'auto_network_tier_value', 'drain_nat_ips': ['drain_nat_ips_value1', 'drain_nat_ips_value2'], 'enable_dynamic_port_allocation': True, 'enable_endpoint_independent_mapping': True, 'endpoint_types': ['endpoint_types_value1', 'endpoint_types_value2'], 'icmp_idle_timeout_sec': 2214, 'log_config': {'enable': True, 'filter': 'filter_value'}, 'max_ports_per_vm': 1733, 'min_ports_per_vm': 1731, 'name': 'name_value', 'nat_ip_allocate_option': 'nat_ip_allocate_option_value', 'nat_ips': ['nat_ips_value1', 'nat_ips_value2'], 'rules': [{'action': {'source_nat_active_ips': ['source_nat_active_ips_value1', 'source_nat_active_ips_value2'], 'source_nat_drain_ips': ['source_nat_drain_ips_value1', 'source_nat_drain_ips_value2']}, 'description': 'description_value', 'match': 'match_value', 'rule_number': 1184}], 'source_subnetwork_ip_ranges_to_nat': 'source_subnetwork_ip_ranges_to_nat_value', 'subnetworks': [{'name': 'name_value', 'secondary_ip_range_names': ['secondary_ip_range_names_value1', 'secondary_ip_range_names_value2'], 'source_ip_ranges_to_nat': ['source_ip_ranges_to_nat_value1', 'source_ip_ranges_to_nat_value2']}], 'tcp_established_idle_timeout_sec': 3371, 'tcp_time_wait_timeout_sec': 2665, 'tcp_transitory_idle_timeout_sec': 3330, 'udp_idle_timeout_sec': 2118}], 'network': 'network_value', 'region': 'region_value', 'self_link': 'self_link_value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.preview(request) + + +def test_preview_rest_flattened(): + client = RoutersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.RoutersPreviewResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'region': 'sample2', 'router': 'sample3'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + region='region_value', + router='router_value', + router_resource=compute.Router(bgp=compute.RouterBgp(advertise_mode='advertise_mode_value')), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.RoutersPreviewResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.preview(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/regions/{region}/routers/{router}/preview" % client.transport._host, args[1]) + + +def test_preview_rest_flattened_error(transport: str = 'rest'): + client = RoutersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.preview( + compute.PreviewRouterRequest(), + project='project_value', + region='region_value', + router='router_value', + router_resource=compute.Router(bgp=compute.RouterBgp(advertise_mode='advertise_mode_value')), + ) + + +def test_preview_rest_error(): + client = RoutersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.UpdateRouterRequest, + dict, +]) +def test_update_rest(request_type): + client = RoutersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2', 'router': 'sample3'} + request_init["router_resource"] = {'bgp': {'advertise_mode': 'advertise_mode_value', 'advertised_groups': ['advertised_groups_value1', 'advertised_groups_value2'], 'advertised_ip_ranges': [{'description': 'description_value', 'range_': 'range__value'}], 'asn': 322, 'keepalive_interval': 1914}, 'bgp_peers': [{'advertise_mode': 'advertise_mode_value', 'advertised_groups': ['advertised_groups_value1', 'advertised_groups_value2'], 'advertised_ip_ranges': {}, 'advertised_route_priority': 2714, 'bfd': {'min_receive_interval': 2122, 'min_transmit_interval': 2265, 'multiplier': 1095, 'session_initialization_mode': 'session_initialization_mode_value'}, 'custom_learned_ip_ranges': [{'range_': 'range__value'}], 'custom_learned_route_priority': 3140, 'enable': 'enable_value', 'enable_ipv6': True, 'interface_name': 'interface_name_value', 'ip_address': 'ip_address_value', 'ipv6_nexthop_address': 'ipv6_nexthop_address_value', 'management_type': 'management_type_value', 'md5_authentication_key_name': 'md5_authentication_key_name_value', 'name': 'name_value', 'peer_asn': 845, 'peer_ip_address': 'peer_ip_address_value', 'peer_ipv6_nexthop_address': 'peer_ipv6_nexthop_address_value', 'router_appliance_instance': 'router_appliance_instance_value'}], 'creation_timestamp': 'creation_timestamp_value', 'description': 'description_value', 'encrypted_interconnect_router': True, 'id': 205, 'interfaces': [{'ip_range': 'ip_range_value', 'linked_interconnect_attachment': 'linked_interconnect_attachment_value', 'linked_vpn_tunnel': 'linked_vpn_tunnel_value', 'management_type': 'management_type_value', 'name': 'name_value', 'private_ip_address': 'private_ip_address_value', 'redundant_interface': 'redundant_interface_value', 'subnetwork': 'subnetwork_value'}], 'kind': 'kind_value', 'md5_authentication_keys': [{'key': 'key_value', 'name': 'name_value'}], 'name': 'name_value', 'nats': [{'auto_network_tier': 'auto_network_tier_value', 'drain_nat_ips': ['drain_nat_ips_value1', 'drain_nat_ips_value2'], 'enable_dynamic_port_allocation': True, 'enable_endpoint_independent_mapping': True, 'endpoint_types': ['endpoint_types_value1', 'endpoint_types_value2'], 'icmp_idle_timeout_sec': 2214, 'log_config': {'enable': True, 'filter': 'filter_value'}, 'max_ports_per_vm': 1733, 'min_ports_per_vm': 1731, 'name': 'name_value', 'nat_ip_allocate_option': 'nat_ip_allocate_option_value', 'nat_ips': ['nat_ips_value1', 'nat_ips_value2'], 'rules': [{'action': {'source_nat_active_ips': ['source_nat_active_ips_value1', 'source_nat_active_ips_value2'], 'source_nat_drain_ips': ['source_nat_drain_ips_value1', 'source_nat_drain_ips_value2']}, 'description': 'description_value', 'match': 'match_value', 'rule_number': 1184}], 'source_subnetwork_ip_ranges_to_nat': 'source_subnetwork_ip_ranges_to_nat_value', 'subnetworks': [{'name': 'name_value', 'secondary_ip_range_names': ['secondary_ip_range_names_value1', 'secondary_ip_range_names_value2'], 'source_ip_ranges_to_nat': ['source_ip_ranges_to_nat_value1', 'source_ip_ranges_to_nat_value2']}], 'tcp_established_idle_timeout_sec': 3371, 'tcp_time_wait_timeout_sec': 2665, 'tcp_transitory_idle_timeout_sec': 3330, 'udp_idle_timeout_sec': 2118}], 'network': 'network_value', 'region': 'region_value', 'self_link': 'self_link_value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.update(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, extended_operation.ExtendedOperation) + assert response.client_operation_id == 'client_operation_id_value' + assert response.creation_timestamp == 'creation_timestamp_value' + assert response.description == 'description_value' + assert response.end_time == 'end_time_value' + assert response.http_error_message == 'http_error_message_value' + assert response.http_error_status_code == 2374 + assert response.id == 205 + assert response.insert_time == 'insert_time_value' + assert response.kind == 'kind_value' + assert response.name == 'name_value' + assert response.operation_group_id == 'operation_group_id_value' + assert response.operation_type == 'operation_type_value' + assert response.progress == 885 + assert response.region == 'region_value' + assert response.self_link == 'self_link_value' + assert response.start_time == 'start_time_value' + assert response.status == compute.Operation.Status.DONE + assert response.status_message == 'status_message_value' + assert response.target_id == 947 + assert response.target_link == 'target_link_value' + assert response.user == 'user_value' + assert response.zone == 'zone_value' + + +def test_update_rest_required_fields(request_type=compute.UpdateRouterRequest): + transport_class = transports.RoutersRestTransport + + request_init = {} + request_init["project"] = "" + request_init["region"] = "" + request_init["router"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).update._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + jsonified_request["region"] = 'region_value' + jsonified_request["router"] = 'router_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).update._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "region" in jsonified_request + assert jsonified_request["region"] == 'region_value' + assert "router" in jsonified_request + assert jsonified_request["router"] == 'router_value' + + client = RoutersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "put", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.update(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_update_rest_unset_required_fields(): + transport = transports.RoutersRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.update._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("project", "region", "router", "routerResource", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_update_rest_interceptors(null_interceptor): + transport = transports.RoutersRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.RoutersRestInterceptor(), + ) + client = RoutersClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.RoutersRestInterceptor, "post_update") as post, \ + mock.patch.object(transports.RoutersRestInterceptor, "pre_update") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.UpdateRouterRequest.pb(compute.UpdateRouterRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.UpdateRouterRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.update(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_update_rest_bad_request(transport: str = 'rest', request_type=compute.UpdateRouterRequest): + client = RoutersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2', 'router': 'sample3'} + request_init["router_resource"] = {'bgp': {'advertise_mode': 'advertise_mode_value', 'advertised_groups': ['advertised_groups_value1', 'advertised_groups_value2'], 'advertised_ip_ranges': [{'description': 'description_value', 'range_': 'range__value'}], 'asn': 322, 'keepalive_interval': 1914}, 'bgp_peers': [{'advertise_mode': 'advertise_mode_value', 'advertised_groups': ['advertised_groups_value1', 'advertised_groups_value2'], 'advertised_ip_ranges': {}, 'advertised_route_priority': 2714, 'bfd': {'min_receive_interval': 2122, 'min_transmit_interval': 2265, 'multiplier': 1095, 'session_initialization_mode': 'session_initialization_mode_value'}, 'custom_learned_ip_ranges': [{'range_': 'range__value'}], 'custom_learned_route_priority': 3140, 'enable': 'enable_value', 'enable_ipv6': True, 'interface_name': 'interface_name_value', 'ip_address': 'ip_address_value', 'ipv6_nexthop_address': 'ipv6_nexthop_address_value', 'management_type': 'management_type_value', 'md5_authentication_key_name': 'md5_authentication_key_name_value', 'name': 'name_value', 'peer_asn': 845, 'peer_ip_address': 'peer_ip_address_value', 'peer_ipv6_nexthop_address': 'peer_ipv6_nexthop_address_value', 'router_appliance_instance': 'router_appliance_instance_value'}], 'creation_timestamp': 'creation_timestamp_value', 'description': 'description_value', 'encrypted_interconnect_router': True, 'id': 205, 'interfaces': [{'ip_range': 'ip_range_value', 'linked_interconnect_attachment': 'linked_interconnect_attachment_value', 'linked_vpn_tunnel': 'linked_vpn_tunnel_value', 'management_type': 'management_type_value', 'name': 'name_value', 'private_ip_address': 'private_ip_address_value', 'redundant_interface': 'redundant_interface_value', 'subnetwork': 'subnetwork_value'}], 'kind': 'kind_value', 'md5_authentication_keys': [{'key': 'key_value', 'name': 'name_value'}], 'name': 'name_value', 'nats': [{'auto_network_tier': 'auto_network_tier_value', 'drain_nat_ips': ['drain_nat_ips_value1', 'drain_nat_ips_value2'], 'enable_dynamic_port_allocation': True, 'enable_endpoint_independent_mapping': True, 'endpoint_types': ['endpoint_types_value1', 'endpoint_types_value2'], 'icmp_idle_timeout_sec': 2214, 'log_config': {'enable': True, 'filter': 'filter_value'}, 'max_ports_per_vm': 1733, 'min_ports_per_vm': 1731, 'name': 'name_value', 'nat_ip_allocate_option': 'nat_ip_allocate_option_value', 'nat_ips': ['nat_ips_value1', 'nat_ips_value2'], 'rules': [{'action': {'source_nat_active_ips': ['source_nat_active_ips_value1', 'source_nat_active_ips_value2'], 'source_nat_drain_ips': ['source_nat_drain_ips_value1', 'source_nat_drain_ips_value2']}, 'description': 'description_value', 'match': 'match_value', 'rule_number': 1184}], 'source_subnetwork_ip_ranges_to_nat': 'source_subnetwork_ip_ranges_to_nat_value', 'subnetworks': [{'name': 'name_value', 'secondary_ip_range_names': ['secondary_ip_range_names_value1', 'secondary_ip_range_names_value2'], 'source_ip_ranges_to_nat': ['source_ip_ranges_to_nat_value1', 'source_ip_ranges_to_nat_value2']}], 'tcp_established_idle_timeout_sec': 3371, 'tcp_time_wait_timeout_sec': 2665, 'tcp_transitory_idle_timeout_sec': 3330, 'udp_idle_timeout_sec': 2118}], 'network': 'network_value', 'region': 'region_value', 'self_link': 'self_link_value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.update(request) + + +def test_update_rest_flattened(): + client = RoutersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'region': 'sample2', 'router': 'sample3'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + region='region_value', + router='router_value', + router_resource=compute.Router(bgp=compute.RouterBgp(advertise_mode='advertise_mode_value')), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.update(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/regions/{region}/routers/{router}" % client.transport._host, args[1]) + + +def test_update_rest_flattened_error(transport: str = 'rest'): + client = RoutersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update( + compute.UpdateRouterRequest(), + project='project_value', + region='region_value', + router='router_value', + router_resource=compute.Router(bgp=compute.RouterBgp(advertise_mode='advertise_mode_value')), + ) + + +def test_update_rest_error(): + client = RoutersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.UpdateRouterRequest, + dict, +]) +def test_update_unary_rest(request_type): + client = RoutersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2', 'router': 'sample3'} + request_init["router_resource"] = {'bgp': {'advertise_mode': 'advertise_mode_value', 'advertised_groups': ['advertised_groups_value1', 'advertised_groups_value2'], 'advertised_ip_ranges': [{'description': 'description_value', 'range_': 'range__value'}], 'asn': 322, 'keepalive_interval': 1914}, 'bgp_peers': [{'advertise_mode': 'advertise_mode_value', 'advertised_groups': ['advertised_groups_value1', 'advertised_groups_value2'], 'advertised_ip_ranges': {}, 'advertised_route_priority': 2714, 'bfd': {'min_receive_interval': 2122, 'min_transmit_interval': 2265, 'multiplier': 1095, 'session_initialization_mode': 'session_initialization_mode_value'}, 'custom_learned_ip_ranges': [{'range_': 'range__value'}], 'custom_learned_route_priority': 3140, 'enable': 'enable_value', 'enable_ipv6': True, 'interface_name': 'interface_name_value', 'ip_address': 'ip_address_value', 'ipv6_nexthop_address': 'ipv6_nexthop_address_value', 'management_type': 'management_type_value', 'md5_authentication_key_name': 'md5_authentication_key_name_value', 'name': 'name_value', 'peer_asn': 845, 'peer_ip_address': 'peer_ip_address_value', 'peer_ipv6_nexthop_address': 'peer_ipv6_nexthop_address_value', 'router_appliance_instance': 'router_appliance_instance_value'}], 'creation_timestamp': 'creation_timestamp_value', 'description': 'description_value', 'encrypted_interconnect_router': True, 'id': 205, 'interfaces': [{'ip_range': 'ip_range_value', 'linked_interconnect_attachment': 'linked_interconnect_attachment_value', 'linked_vpn_tunnel': 'linked_vpn_tunnel_value', 'management_type': 'management_type_value', 'name': 'name_value', 'private_ip_address': 'private_ip_address_value', 'redundant_interface': 'redundant_interface_value', 'subnetwork': 'subnetwork_value'}], 'kind': 'kind_value', 'md5_authentication_keys': [{'key': 'key_value', 'name': 'name_value'}], 'name': 'name_value', 'nats': [{'auto_network_tier': 'auto_network_tier_value', 'drain_nat_ips': ['drain_nat_ips_value1', 'drain_nat_ips_value2'], 'enable_dynamic_port_allocation': True, 'enable_endpoint_independent_mapping': True, 'endpoint_types': ['endpoint_types_value1', 'endpoint_types_value2'], 'icmp_idle_timeout_sec': 2214, 'log_config': {'enable': True, 'filter': 'filter_value'}, 'max_ports_per_vm': 1733, 'min_ports_per_vm': 1731, 'name': 'name_value', 'nat_ip_allocate_option': 'nat_ip_allocate_option_value', 'nat_ips': ['nat_ips_value1', 'nat_ips_value2'], 'rules': [{'action': {'source_nat_active_ips': ['source_nat_active_ips_value1', 'source_nat_active_ips_value2'], 'source_nat_drain_ips': ['source_nat_drain_ips_value1', 'source_nat_drain_ips_value2']}, 'description': 'description_value', 'match': 'match_value', 'rule_number': 1184}], 'source_subnetwork_ip_ranges_to_nat': 'source_subnetwork_ip_ranges_to_nat_value', 'subnetworks': [{'name': 'name_value', 'secondary_ip_range_names': ['secondary_ip_range_names_value1', 'secondary_ip_range_names_value2'], 'source_ip_ranges_to_nat': ['source_ip_ranges_to_nat_value1', 'source_ip_ranges_to_nat_value2']}], 'tcp_established_idle_timeout_sec': 3371, 'tcp_time_wait_timeout_sec': 2665, 'tcp_transitory_idle_timeout_sec': 3330, 'udp_idle_timeout_sec': 2118}], 'network': 'network_value', 'region': 'region_value', 'self_link': 'self_link_value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.update_unary(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.Operation) + + +def test_update_unary_rest_required_fields(request_type=compute.UpdateRouterRequest): + transport_class = transports.RoutersRestTransport + + request_init = {} + request_init["project"] = "" + request_init["region"] = "" + request_init["router"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).update._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + jsonified_request["region"] = 'region_value' + jsonified_request["router"] = 'router_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).update._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "region" in jsonified_request + assert jsonified_request["region"] == 'region_value' + assert "router" in jsonified_request + assert jsonified_request["router"] == 'router_value' + + client = RoutersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "put", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.update_unary(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_update_unary_rest_unset_required_fields(): + transport = transports.RoutersRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.update._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("project", "region", "router", "routerResource", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_update_unary_rest_interceptors(null_interceptor): + transport = transports.RoutersRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.RoutersRestInterceptor(), + ) + client = RoutersClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.RoutersRestInterceptor, "post_update") as post, \ + mock.patch.object(transports.RoutersRestInterceptor, "pre_update") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.UpdateRouterRequest.pb(compute.UpdateRouterRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.UpdateRouterRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.update_unary(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_update_unary_rest_bad_request(transport: str = 'rest', request_type=compute.UpdateRouterRequest): + client = RoutersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2', 'router': 'sample3'} + request_init["router_resource"] = {'bgp': {'advertise_mode': 'advertise_mode_value', 'advertised_groups': ['advertised_groups_value1', 'advertised_groups_value2'], 'advertised_ip_ranges': [{'description': 'description_value', 'range_': 'range__value'}], 'asn': 322, 'keepalive_interval': 1914}, 'bgp_peers': [{'advertise_mode': 'advertise_mode_value', 'advertised_groups': ['advertised_groups_value1', 'advertised_groups_value2'], 'advertised_ip_ranges': {}, 'advertised_route_priority': 2714, 'bfd': {'min_receive_interval': 2122, 'min_transmit_interval': 2265, 'multiplier': 1095, 'session_initialization_mode': 'session_initialization_mode_value'}, 'custom_learned_ip_ranges': [{'range_': 'range__value'}], 'custom_learned_route_priority': 3140, 'enable': 'enable_value', 'enable_ipv6': True, 'interface_name': 'interface_name_value', 'ip_address': 'ip_address_value', 'ipv6_nexthop_address': 'ipv6_nexthop_address_value', 'management_type': 'management_type_value', 'md5_authentication_key_name': 'md5_authentication_key_name_value', 'name': 'name_value', 'peer_asn': 845, 'peer_ip_address': 'peer_ip_address_value', 'peer_ipv6_nexthop_address': 'peer_ipv6_nexthop_address_value', 'router_appliance_instance': 'router_appliance_instance_value'}], 'creation_timestamp': 'creation_timestamp_value', 'description': 'description_value', 'encrypted_interconnect_router': True, 'id': 205, 'interfaces': [{'ip_range': 'ip_range_value', 'linked_interconnect_attachment': 'linked_interconnect_attachment_value', 'linked_vpn_tunnel': 'linked_vpn_tunnel_value', 'management_type': 'management_type_value', 'name': 'name_value', 'private_ip_address': 'private_ip_address_value', 'redundant_interface': 'redundant_interface_value', 'subnetwork': 'subnetwork_value'}], 'kind': 'kind_value', 'md5_authentication_keys': [{'key': 'key_value', 'name': 'name_value'}], 'name': 'name_value', 'nats': [{'auto_network_tier': 'auto_network_tier_value', 'drain_nat_ips': ['drain_nat_ips_value1', 'drain_nat_ips_value2'], 'enable_dynamic_port_allocation': True, 'enable_endpoint_independent_mapping': True, 'endpoint_types': ['endpoint_types_value1', 'endpoint_types_value2'], 'icmp_idle_timeout_sec': 2214, 'log_config': {'enable': True, 'filter': 'filter_value'}, 'max_ports_per_vm': 1733, 'min_ports_per_vm': 1731, 'name': 'name_value', 'nat_ip_allocate_option': 'nat_ip_allocate_option_value', 'nat_ips': ['nat_ips_value1', 'nat_ips_value2'], 'rules': [{'action': {'source_nat_active_ips': ['source_nat_active_ips_value1', 'source_nat_active_ips_value2'], 'source_nat_drain_ips': ['source_nat_drain_ips_value1', 'source_nat_drain_ips_value2']}, 'description': 'description_value', 'match': 'match_value', 'rule_number': 1184}], 'source_subnetwork_ip_ranges_to_nat': 'source_subnetwork_ip_ranges_to_nat_value', 'subnetworks': [{'name': 'name_value', 'secondary_ip_range_names': ['secondary_ip_range_names_value1', 'secondary_ip_range_names_value2'], 'source_ip_ranges_to_nat': ['source_ip_ranges_to_nat_value1', 'source_ip_ranges_to_nat_value2']}], 'tcp_established_idle_timeout_sec': 3371, 'tcp_time_wait_timeout_sec': 2665, 'tcp_transitory_idle_timeout_sec': 3330, 'udp_idle_timeout_sec': 2118}], 'network': 'network_value', 'region': 'region_value', 'self_link': 'self_link_value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.update_unary(request) + + +def test_update_unary_rest_flattened(): + client = RoutersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'region': 'sample2', 'router': 'sample3'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + region='region_value', + router='router_value', + router_resource=compute.Router(bgp=compute.RouterBgp(advertise_mode='advertise_mode_value')), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.update_unary(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/regions/{region}/routers/{router}" % client.transport._host, args[1]) + + +def test_update_unary_rest_flattened_error(transport: str = 'rest'): + client = RoutersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_unary( + compute.UpdateRouterRequest(), + project='project_value', + region='region_value', + router='router_value', + router_resource=compute.Router(bgp=compute.RouterBgp(advertise_mode='advertise_mode_value')), + ) + + +def test_update_unary_rest_error(): + client = RoutersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +def test_credentials_transport_error(): + # It is an error to provide credentials and a transport instance. + transport = transports.RoutersRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = RoutersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # It is an error to provide a credentials file and a transport instance. + transport = transports.RoutersRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = RoutersClient( + client_options={"credentials_file": "credentials.json"}, + transport=transport, + ) + + # It is an error to provide an api_key and a transport instance. + transport = transports.RoutersRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = RoutersClient( + client_options=options, + transport=transport, + ) + + # It is an error to provide an api_key and a credential. + options = mock.Mock() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = RoutersClient( + client_options=options, + credentials=ga_credentials.AnonymousCredentials() + ) + + # It is an error to provide scopes and a transport instance. + transport = transports.RoutersRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = RoutersClient( + client_options={"scopes": ["1", "2"]}, + transport=transport, + ) + + +def test_transport_instance(): + # A client may be instantiated with a custom transport instance. + transport = transports.RoutersRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + client = RoutersClient(transport=transport) + assert client.transport is transport + + +@pytest.mark.parametrize("transport_class", [ + transports.RoutersRestTransport, +]) +def test_transport_adc(transport_class): + # Test default credentials are used if not provided. + with mock.patch.object(google.auth, 'default') as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class() + adc.assert_called_once() + +@pytest.mark.parametrize("transport_name", [ + "rest", +]) +def test_transport_kind(transport_name): + transport = RoutersClient.get_transport_class(transport_name)( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert transport.kind == transport_name + + +def test_routers_base_transport_error(): + # Passing both a credentials object and credentials_file should raise an error + with pytest.raises(core_exceptions.DuplicateCredentialArgs): + transport = transports.RoutersTransport( + credentials=ga_credentials.AnonymousCredentials(), + credentials_file="credentials.json" + ) + + +def test_routers_base_transport(): + # Instantiate the base transport. + with mock.patch('google.cloud.compute_v1.services.routers.transports.RoutersTransport.__init__') as Transport: + Transport.return_value = None + transport = transports.RoutersTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Every method on the transport should just blindly + # raise NotImplementedError. + methods = ( + 'aggregated_list', + 'delete', + 'get', + 'get_nat_mapping_info', + 'get_router_status', + 'insert', + 'list', + 'patch', + 'preview', + 'update', + ) + for method in methods: + with pytest.raises(NotImplementedError): + getattr(transport, method)(request=object()) + + with pytest.raises(NotImplementedError): + transport.close() + + # Catch all for all remaining methods and properties + remainder = [ + 'kind', + ] + for r in remainder: + with pytest.raises(NotImplementedError): + getattr(transport, r)() + + +def test_routers_base_transport_with_credentials_file(): + # Instantiate the base transport with a credentials file + with mock.patch.object(google.auth, 'load_credentials_from_file', autospec=True) as load_creds, mock.patch('google.cloud.compute_v1.services.routers.transports.RoutersTransport._prep_wrapped_messages') as Transport: + Transport.return_value = None + load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.RoutersTransport( + credentials_file="credentials.json", + quota_project_id="octopus", + ) + load_creds.assert_called_once_with("credentials.json", + scopes=None, + default_scopes=( + 'https://www.googleapis.com/auth/compute', + 'https://www.googleapis.com/auth/cloud-platform', +), + quota_project_id="octopus", + ) + + +def test_routers_base_transport_with_adc(): + # Test the default credentials are used if credentials and credentials_file are None. + with mock.patch.object(google.auth, 'default', autospec=True) as adc, mock.patch('google.cloud.compute_v1.services.routers.transports.RoutersTransport._prep_wrapped_messages') as Transport: + Transport.return_value = None + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.RoutersTransport() + adc.assert_called_once() + + +def test_routers_auth_adc(): + # If no credentials are provided, we should use ADC credentials. + with mock.patch.object(google.auth, 'default', autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + RoutersClient() + adc.assert_called_once_with( + scopes=None, + default_scopes=( + 'https://www.googleapis.com/auth/compute', + 'https://www.googleapis.com/auth/cloud-platform', +), + quota_project_id=None, + ) + + +def test_routers_http_transport_client_cert_source_for_mtls(): + cred = ga_credentials.AnonymousCredentials() + with mock.patch("google.auth.transport.requests.AuthorizedSession.configure_mtls_channel") as mock_configure_mtls_channel: + transports.RoutersRestTransport ( + credentials=cred, + client_cert_source_for_mtls=client_cert_source_callback + ) + mock_configure_mtls_channel.assert_called_once_with(client_cert_source_callback) + + +@pytest.mark.parametrize("transport_name", [ + "rest", +]) +def test_routers_host_no_port(transport_name): + client = RoutersClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions(api_endpoint='compute.googleapis.com'), + transport=transport_name, + ) + assert client.transport._host == ( + 'compute.googleapis.com:443' + if transport_name in ['grpc', 'grpc_asyncio'] + else 'https://compute.googleapis.com' + ) + +@pytest.mark.parametrize("transport_name", [ + "rest", +]) +def test_routers_host_with_port(transport_name): + client = RoutersClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions(api_endpoint='compute.googleapis.com:8000'), + transport=transport_name, + ) + assert client.transport._host == ( + 'compute.googleapis.com:8000' + if transport_name in ['grpc', 'grpc_asyncio'] + else 'https://compute.googleapis.com:8000' + ) + +@pytest.mark.parametrize("transport_name", [ + "rest", +]) +def test_routers_client_transport_session_collision(transport_name): + creds1 = ga_credentials.AnonymousCredentials() + creds2 = ga_credentials.AnonymousCredentials() + client1 = RoutersClient( + credentials=creds1, + transport=transport_name, + ) + client2 = RoutersClient( + credentials=creds2, + transport=transport_name, + ) + session1 = client1.transport.aggregated_list._session + session2 = client2.transport.aggregated_list._session + assert session1 != session2 + session1 = client1.transport.delete._session + session2 = client2.transport.delete._session + assert session1 != session2 + session1 = client1.transport.get._session + session2 = client2.transport.get._session + assert session1 != session2 + session1 = client1.transport.get_nat_mapping_info._session + session2 = client2.transport.get_nat_mapping_info._session + assert session1 != session2 + session1 = client1.transport.get_router_status._session + session2 = client2.transport.get_router_status._session + assert session1 != session2 + session1 = client1.transport.insert._session + session2 = client2.transport.insert._session + assert session1 != session2 + session1 = client1.transport.list._session + session2 = client2.transport.list._session + assert session1 != session2 + session1 = client1.transport.patch._session + session2 = client2.transport.patch._session + assert session1 != session2 + session1 = client1.transport.preview._session + session2 = client2.transport.preview._session + assert session1 != session2 + session1 = client1.transport.update._session + session2 = client2.transport.update._session + assert session1 != session2 + +def test_common_billing_account_path(): + billing_account = "squid" + expected = "billingAccounts/{billing_account}".format(billing_account=billing_account, ) + actual = RoutersClient.common_billing_account_path(billing_account) + assert expected == actual + + +def test_parse_common_billing_account_path(): + expected = { + "billing_account": "clam", + } + path = RoutersClient.common_billing_account_path(**expected) + + # Check that the path construction is reversible. + actual = RoutersClient.parse_common_billing_account_path(path) + assert expected == actual + +def test_common_folder_path(): + folder = "whelk" + expected = "folders/{folder}".format(folder=folder, ) + actual = RoutersClient.common_folder_path(folder) + assert expected == actual + + +def test_parse_common_folder_path(): + expected = { + "folder": "octopus", + } + path = RoutersClient.common_folder_path(**expected) + + # Check that the path construction is reversible. + actual = RoutersClient.parse_common_folder_path(path) + assert expected == actual + +def test_common_organization_path(): + organization = "oyster" + expected = "organizations/{organization}".format(organization=organization, ) + actual = RoutersClient.common_organization_path(organization) + assert expected == actual + + +def test_parse_common_organization_path(): + expected = { + "organization": "nudibranch", + } + path = RoutersClient.common_organization_path(**expected) + + # Check that the path construction is reversible. + actual = RoutersClient.parse_common_organization_path(path) + assert expected == actual + +def test_common_project_path(): + project = "cuttlefish" + expected = "projects/{project}".format(project=project, ) + actual = RoutersClient.common_project_path(project) + assert expected == actual + + +def test_parse_common_project_path(): + expected = { + "project": "mussel", + } + path = RoutersClient.common_project_path(**expected) + + # Check that the path construction is reversible. + actual = RoutersClient.parse_common_project_path(path) + assert expected == actual + +def test_common_location_path(): + project = "winkle" + location = "nautilus" + expected = "projects/{project}/locations/{location}".format(project=project, location=location, ) + actual = RoutersClient.common_location_path(project, location) + assert expected == actual + + +def test_parse_common_location_path(): + expected = { + "project": "scallop", + "location": "abalone", + } + path = RoutersClient.common_location_path(**expected) + + # Check that the path construction is reversible. + actual = RoutersClient.parse_common_location_path(path) + assert expected == actual + + +def test_client_with_default_client_info(): + client_info = gapic_v1.client_info.ClientInfo() + + with mock.patch.object(transports.RoutersTransport, '_prep_wrapped_messages') as prep: + client = RoutersClient( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + with mock.patch.object(transports.RoutersTransport, '_prep_wrapped_messages') as prep: + transport_class = RoutersClient.get_transport_class() + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + +def test_transport_close(): + transports = { + "rest": "_session", + } + + for transport, close_name in transports.items(): + client = RoutersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport + ) + with mock.patch.object(type(getattr(client.transport, close_name)), "close") as close: + with client: + close.assert_not_called() + close.assert_called_once() + +def test_client_ctx(): + transports = [ + 'rest', + ] + for transport in transports: + client = RoutersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport + ) + # Test client calls underlying transport. + with mock.patch.object(type(client.transport), "close") as close: + close.assert_not_called() + with client: + pass + close.assert_called() + +@pytest.mark.parametrize("client_class,transport_class", [ + (RoutersClient, transports.RoutersRestTransport), +]) +def test_api_key_credentials(client_class, transport_class): + with mock.patch.object( + google.auth._default, "get_api_key_credentials", create=True + ) as get_api_key_credentials: + mock_cred = mock.Mock() + get_api_key_credentials.return_value = mock_cred + options = client_options.ClientOptions() + options.api_key = "api_key" + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=mock_cred, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) diff --git a/owl-bot-staging/v1/tests/unit/gapic/compute_v1/test_routes.py b/owl-bot-staging/v1/tests/unit/gapic/compute_v1/test_routes.py new file mode 100644 index 000000000..a9c48c7d2 --- /dev/null +++ b/owl-bot-staging/v1/tests/unit/gapic/compute_v1/test_routes.py @@ -0,0 +1,2497 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import os +# try/except added for compatibility with python < 3.8 +try: + from unittest import mock + from unittest.mock import AsyncMock # pragma: NO COVER +except ImportError: # pragma: NO COVER + import mock + +import grpc +from grpc.experimental import aio +from collections.abc import Iterable +from google.protobuf import json_format +import json +import math +import pytest +from proto.marshal.rules.dates import DurationRule, TimestampRule +from proto.marshal.rules import wrappers +from requests import Response +from requests import Request, PreparedRequest +from requests.sessions import Session +from google.protobuf import json_format + +from google.api_core import client_options +from google.api_core import exceptions as core_exceptions +from google.api_core import extended_operation # type: ignore +from google.api_core import future +from google.api_core import gapic_v1 +from google.api_core import grpc_helpers +from google.api_core import grpc_helpers_async +from google.api_core import path_template +from google.auth import credentials as ga_credentials +from google.auth.exceptions import MutualTLSChannelError +from google.cloud.compute_v1.services.routes import RoutesClient +from google.cloud.compute_v1.services.routes import pagers +from google.cloud.compute_v1.services.routes import transports +from google.cloud.compute_v1.types import compute +from google.oauth2 import service_account +import google.auth + + +def client_cert_source_callback(): + return b"cert bytes", b"key bytes" + + +# If default endpoint is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint(client): + return "foo.googleapis.com" if ("localhost" in client.DEFAULT_ENDPOINT) else client.DEFAULT_ENDPOINT + + +def test__get_default_mtls_endpoint(): + api_endpoint = "example.googleapis.com" + api_mtls_endpoint = "example.mtls.googleapis.com" + sandbox_endpoint = "example.sandbox.googleapis.com" + sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com" + non_googleapi = "api.example.com" + + assert RoutesClient._get_default_mtls_endpoint(None) is None + assert RoutesClient._get_default_mtls_endpoint(api_endpoint) == api_mtls_endpoint + assert RoutesClient._get_default_mtls_endpoint(api_mtls_endpoint) == api_mtls_endpoint + assert RoutesClient._get_default_mtls_endpoint(sandbox_endpoint) == sandbox_mtls_endpoint + assert RoutesClient._get_default_mtls_endpoint(sandbox_mtls_endpoint) == sandbox_mtls_endpoint + assert RoutesClient._get_default_mtls_endpoint(non_googleapi) == non_googleapi + + +@pytest.mark.parametrize("client_class,transport_name", [ + (RoutesClient, "rest"), +]) +def test_routes_client_from_service_account_info(client_class, transport_name): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object(service_account.Credentials, 'from_service_account_info') as factory: + factory.return_value = creds + info = {"valid": True} + client = client_class.from_service_account_info(info, transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + 'compute.googleapis.com:443' + if transport_name in ['grpc', 'grpc_asyncio'] + else + 'https://compute.googleapis.com' + ) + + +@pytest.mark.parametrize("transport_class,transport_name", [ + (transports.RoutesRestTransport, "rest"), +]) +def test_routes_client_service_account_always_use_jwt(transport_class, transport_name): + with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=True) + use_jwt.assert_called_once_with(True) + + with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=False) + use_jwt.assert_not_called() + + +@pytest.mark.parametrize("client_class,transport_name", [ + (RoutesClient, "rest"), +]) +def test_routes_client_from_service_account_file(client_class, transport_name): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object(service_account.Credentials, 'from_service_account_file') as factory: + factory.return_value = creds + client = client_class.from_service_account_file("dummy/file/path.json", transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + client = client_class.from_service_account_json("dummy/file/path.json", transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + 'compute.googleapis.com:443' + if transport_name in ['grpc', 'grpc_asyncio'] + else + 'https://compute.googleapis.com' + ) + + +def test_routes_client_get_transport_class(): + transport = RoutesClient.get_transport_class() + available_transports = [ + transports.RoutesRestTransport, + ] + assert transport in available_transports + + transport = RoutesClient.get_transport_class("rest") + assert transport == transports.RoutesRestTransport + + +@pytest.mark.parametrize("client_class,transport_class,transport_name", [ + (RoutesClient, transports.RoutesRestTransport, "rest"), +]) +@mock.patch.object(RoutesClient, "DEFAULT_ENDPOINT", modify_default_endpoint(RoutesClient)) +def test_routes_client_client_options(client_class, transport_class, transport_name): + # Check that if channel is provided we won't create a new one. + with mock.patch.object(RoutesClient, 'get_transport_class') as gtc: + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ) + client = client_class(transport=transport) + gtc.assert_not_called() + + # Check that if channel is provided via str we will create a new one. + with mock.patch.object(RoutesClient, 'get_transport_class') as gtc: + client = client_class(transport=transport_name) + gtc.assert_called() + + # Check the case api_endpoint is provided. + options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(transport=transport_name, client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_MTLS_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError): + client = client_class(transport=transport_name) + + # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): + with pytest.raises(ValueError): + client = client_class(transport=transport_name) + + # Check the case quota_project_id is provided + options = client_options.ClientOptions(quota_project_id="octopus") + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id="octopus", + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + # Check the case api_endpoint is provided + options = client_options.ClientOptions(api_audience="https://language.googleapis.com") + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience="https://language.googleapis.com" + ) + +@pytest.mark.parametrize("client_class,transport_class,transport_name,use_client_cert_env", [ + (RoutesClient, transports.RoutesRestTransport, "rest", "true"), + (RoutesClient, transports.RoutesRestTransport, "rest", "false"), +]) +@mock.patch.object(RoutesClient, "DEFAULT_ENDPOINT", modify_default_endpoint(RoutesClient)) +@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) +def test_routes_client_mtls_env_auto(client_class, transport_class, transport_name, use_client_cert_env): + # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default + # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists. + + # Check the case client_cert_source is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): + options = client_options.ClientOptions(client_cert_source=client_cert_source_callback) + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + + if use_client_cert_env == "false": + expected_client_cert_source = None + expected_host = client.DEFAULT_ENDPOINT + else: + expected_client_cert_source = client_cert_source_callback + expected_host = client.DEFAULT_MTLS_ENDPOINT + + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case ADC client cert is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): + with mock.patch.object(transport_class, '__init__') as patched: + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): + with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=client_cert_source_callback): + if use_client_cert_env == "false": + expected_host = client.DEFAULT_ENDPOINT + expected_client_cert_source = None + else: + expected_host = client.DEFAULT_MTLS_ENDPOINT + expected_client_cert_source = client_cert_source_callback + + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case client_cert_source and ADC client cert are not provided. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): + with mock.patch.object(transport_class, '__init__') as patched: + with mock.patch("google.auth.transport.mtls.has_default_client_cert_source", return_value=False): + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize("client_class", [ + RoutesClient +]) +@mock.patch.object(RoutesClient, "DEFAULT_ENDPOINT", modify_default_endpoint(RoutesClient)) +def test_routes_client_get_mtls_endpoint_and_cert_source(client_class): + mock_client_cert_source = mock.Mock() + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "true". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + mock_api_endpoint = "foo" + options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(options) + assert api_endpoint == mock_api_endpoint + assert cert_source == mock_client_cert_source + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "false". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + mock_client_cert_source = mock.Mock() + mock_api_endpoint = "foo" + options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(options) + assert api_endpoint == mock_api_endpoint + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert doesn't exist. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=False): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert exists. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): + with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=mock_client_cert_source): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source == mock_client_cert_source + + +@pytest.mark.parametrize("client_class,transport_class,transport_name", [ + (RoutesClient, transports.RoutesRestTransport, "rest"), +]) +def test_routes_client_client_options_scopes(client_class, transport_class, transport_name): + # Check the case scopes are provided. + options = client_options.ClientOptions( + scopes=["1", "2"], + ) + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=["1", "2"], + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + +@pytest.mark.parametrize("client_class,transport_class,transport_name,grpc_helpers", [ + (RoutesClient, transports.RoutesRestTransport, "rest", None), +]) +def test_routes_client_client_options_credentials_file(client_class, transport_class, transport_name, grpc_helpers): + # Check the case credentials file is provided. + options = client_options.ClientOptions( + credentials_file="credentials.json" + ) + + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize("request_type", [ + compute.DeleteRouteRequest, + dict, +]) +def test_delete_rest(request_type): + client = RoutesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'route': 'sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.delete(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, extended_operation.ExtendedOperation) + assert response.client_operation_id == 'client_operation_id_value' + assert response.creation_timestamp == 'creation_timestamp_value' + assert response.description == 'description_value' + assert response.end_time == 'end_time_value' + assert response.http_error_message == 'http_error_message_value' + assert response.http_error_status_code == 2374 + assert response.id == 205 + assert response.insert_time == 'insert_time_value' + assert response.kind == 'kind_value' + assert response.name == 'name_value' + assert response.operation_group_id == 'operation_group_id_value' + assert response.operation_type == 'operation_type_value' + assert response.progress == 885 + assert response.region == 'region_value' + assert response.self_link == 'self_link_value' + assert response.start_time == 'start_time_value' + assert response.status == compute.Operation.Status.DONE + assert response.status_message == 'status_message_value' + assert response.target_id == 947 + assert response.target_link == 'target_link_value' + assert response.user == 'user_value' + assert response.zone == 'zone_value' + + +def test_delete_rest_required_fields(request_type=compute.DeleteRouteRequest): + transport_class = transports.RoutesRestTransport + + request_init = {} + request_init["project"] = "" + request_init["route"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + jsonified_request["route"] = 'route_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "route" in jsonified_request + assert jsonified_request["route"] == 'route_value' + + client = RoutesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "delete", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.delete(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_delete_rest_unset_required_fields(): + transport = transports.RoutesRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.delete._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("project", "route", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_rest_interceptors(null_interceptor): + transport = transports.RoutesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.RoutesRestInterceptor(), + ) + client = RoutesClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.RoutesRestInterceptor, "post_delete") as post, \ + mock.patch.object(transports.RoutesRestInterceptor, "pre_delete") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.DeleteRouteRequest.pb(compute.DeleteRouteRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.DeleteRouteRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.delete(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_delete_rest_bad_request(transport: str = 'rest', request_type=compute.DeleteRouteRequest): + client = RoutesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'route': 'sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.delete(request) + + +def test_delete_rest_flattened(): + client = RoutesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'route': 'sample2'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + route='route_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.delete(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/global/routes/{route}" % client.transport._host, args[1]) + + +def test_delete_rest_flattened_error(transport: str = 'rest'): + client = RoutesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete( + compute.DeleteRouteRequest(), + project='project_value', + route='route_value', + ) + + +def test_delete_rest_error(): + client = RoutesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.DeleteRouteRequest, + dict, +]) +def test_delete_unary_rest(request_type): + client = RoutesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'route': 'sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.delete_unary(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.Operation) + + +def test_delete_unary_rest_required_fields(request_type=compute.DeleteRouteRequest): + transport_class = transports.RoutesRestTransport + + request_init = {} + request_init["project"] = "" + request_init["route"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + jsonified_request["route"] = 'route_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "route" in jsonified_request + assert jsonified_request["route"] == 'route_value' + + client = RoutesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "delete", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.delete_unary(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_delete_unary_rest_unset_required_fields(): + transport = transports.RoutesRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.delete._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("project", "route", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_unary_rest_interceptors(null_interceptor): + transport = transports.RoutesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.RoutesRestInterceptor(), + ) + client = RoutesClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.RoutesRestInterceptor, "post_delete") as post, \ + mock.patch.object(transports.RoutesRestInterceptor, "pre_delete") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.DeleteRouteRequest.pb(compute.DeleteRouteRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.DeleteRouteRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.delete_unary(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_delete_unary_rest_bad_request(transport: str = 'rest', request_type=compute.DeleteRouteRequest): + client = RoutesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'route': 'sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.delete_unary(request) + + +def test_delete_unary_rest_flattened(): + client = RoutesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'route': 'sample2'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + route='route_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.delete_unary(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/global/routes/{route}" % client.transport._host, args[1]) + + +def test_delete_unary_rest_flattened_error(transport: str = 'rest'): + client = RoutesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_unary( + compute.DeleteRouteRequest(), + project='project_value', + route='route_value', + ) + + +def test_delete_unary_rest_error(): + client = RoutesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.GetRouteRequest, + dict, +]) +def test_get_rest(request_type): + client = RoutesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'route': 'sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Route( + creation_timestamp='creation_timestamp_value', + description='description_value', + dest_range='dest_range_value', + id=205, + kind='kind_value', + name='name_value', + network='network_value', + next_hop_gateway='next_hop_gateway_value', + next_hop_hub='next_hop_hub_value', + next_hop_ilb='next_hop_ilb_value', + next_hop_instance='next_hop_instance_value', + next_hop_ip='next_hop_ip_value', + next_hop_network='next_hop_network_value', + next_hop_peering='next_hop_peering_value', + next_hop_vpn_tunnel='next_hop_vpn_tunnel_value', + priority=898, + route_status='route_status_value', + route_type='route_type_value', + self_link='self_link_value', + tags=['tags_value'], + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Route.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.get(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.Route) + assert response.creation_timestamp == 'creation_timestamp_value' + assert response.description == 'description_value' + assert response.dest_range == 'dest_range_value' + assert response.id == 205 + assert response.kind == 'kind_value' + assert response.name == 'name_value' + assert response.network == 'network_value' + assert response.next_hop_gateway == 'next_hop_gateway_value' + assert response.next_hop_hub == 'next_hop_hub_value' + assert response.next_hop_ilb == 'next_hop_ilb_value' + assert response.next_hop_instance == 'next_hop_instance_value' + assert response.next_hop_ip == 'next_hop_ip_value' + assert response.next_hop_network == 'next_hop_network_value' + assert response.next_hop_peering == 'next_hop_peering_value' + assert response.next_hop_vpn_tunnel == 'next_hop_vpn_tunnel_value' + assert response.priority == 898 + assert response.route_status == 'route_status_value' + assert response.route_type == 'route_type_value' + assert response.self_link == 'self_link_value' + assert response.tags == ['tags_value'] + + +def test_get_rest_required_fields(request_type=compute.GetRouteRequest): + transport_class = transports.RoutesRestTransport + + request_init = {} + request_init["project"] = "" + request_init["route"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + jsonified_request["route"] = 'route_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "route" in jsonified_request + assert jsonified_request["route"] == 'route_value' + + client = RoutesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Route() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "get", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Route.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.get(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_get_rest_unset_required_fields(): + transport = transports.RoutesRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.get._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("project", "route", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_rest_interceptors(null_interceptor): + transport = transports.RoutesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.RoutesRestInterceptor(), + ) + client = RoutesClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.RoutesRestInterceptor, "post_get") as post, \ + mock.patch.object(transports.RoutesRestInterceptor, "pre_get") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.GetRouteRequest.pb(compute.GetRouteRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Route.to_json(compute.Route()) + + request = compute.GetRouteRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Route() + + client.get(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_get_rest_bad_request(transport: str = 'rest', request_type=compute.GetRouteRequest): + client = RoutesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'route': 'sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get(request) + + +def test_get_rest_flattened(): + client = RoutesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Route() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'route': 'sample2'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + route='route_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Route.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.get(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/global/routes/{route}" % client.transport._host, args[1]) + + +def test_get_rest_flattened_error(transport: str = 'rest'): + client = RoutesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get( + compute.GetRouteRequest(), + project='project_value', + route='route_value', + ) + + +def test_get_rest_error(): + client = RoutesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.InsertRouteRequest, + dict, +]) +def test_insert_rest(request_type): + client = RoutesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1'} + request_init["route_resource"] = {'as_paths': [{'as_lists': [867, 868], 'path_segment_type': 'path_segment_type_value'}], 'creation_timestamp': 'creation_timestamp_value', 'description': 'description_value', 'dest_range': 'dest_range_value', 'id': 205, 'kind': 'kind_value', 'name': 'name_value', 'network': 'network_value', 'next_hop_gateway': 'next_hop_gateway_value', 'next_hop_hub': 'next_hop_hub_value', 'next_hop_ilb': 'next_hop_ilb_value', 'next_hop_instance': 'next_hop_instance_value', 'next_hop_ip': 'next_hop_ip_value', 'next_hop_network': 'next_hop_network_value', 'next_hop_peering': 'next_hop_peering_value', 'next_hop_vpn_tunnel': 'next_hop_vpn_tunnel_value', 'priority': 898, 'route_status': 'route_status_value', 'route_type': 'route_type_value', 'self_link': 'self_link_value', 'tags': ['tags_value1', 'tags_value2'], 'warnings': [{'code': 'code_value', 'data': [{'key': 'key_value', 'value': 'value_value'}], 'message': 'message_value'}]} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.insert(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, extended_operation.ExtendedOperation) + assert response.client_operation_id == 'client_operation_id_value' + assert response.creation_timestamp == 'creation_timestamp_value' + assert response.description == 'description_value' + assert response.end_time == 'end_time_value' + assert response.http_error_message == 'http_error_message_value' + assert response.http_error_status_code == 2374 + assert response.id == 205 + assert response.insert_time == 'insert_time_value' + assert response.kind == 'kind_value' + assert response.name == 'name_value' + assert response.operation_group_id == 'operation_group_id_value' + assert response.operation_type == 'operation_type_value' + assert response.progress == 885 + assert response.region == 'region_value' + assert response.self_link == 'self_link_value' + assert response.start_time == 'start_time_value' + assert response.status == compute.Operation.Status.DONE + assert response.status_message == 'status_message_value' + assert response.target_id == 947 + assert response.target_link == 'target_link_value' + assert response.user == 'user_value' + assert response.zone == 'zone_value' + + +def test_insert_rest_required_fields(request_type=compute.InsertRouteRequest): + transport_class = transports.RoutesRestTransport + + request_init = {} + request_init["project"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).insert._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).insert._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + + client = RoutesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.insert(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_insert_rest_unset_required_fields(): + transport = transports.RoutesRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.insert._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("project", "routeResource", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_insert_rest_interceptors(null_interceptor): + transport = transports.RoutesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.RoutesRestInterceptor(), + ) + client = RoutesClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.RoutesRestInterceptor, "post_insert") as post, \ + mock.patch.object(transports.RoutesRestInterceptor, "pre_insert") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.InsertRouteRequest.pb(compute.InsertRouteRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.InsertRouteRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.insert(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_insert_rest_bad_request(transport: str = 'rest', request_type=compute.InsertRouteRequest): + client = RoutesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1'} + request_init["route_resource"] = {'as_paths': [{'as_lists': [867, 868], 'path_segment_type': 'path_segment_type_value'}], 'creation_timestamp': 'creation_timestamp_value', 'description': 'description_value', 'dest_range': 'dest_range_value', 'id': 205, 'kind': 'kind_value', 'name': 'name_value', 'network': 'network_value', 'next_hop_gateway': 'next_hop_gateway_value', 'next_hop_hub': 'next_hop_hub_value', 'next_hop_ilb': 'next_hop_ilb_value', 'next_hop_instance': 'next_hop_instance_value', 'next_hop_ip': 'next_hop_ip_value', 'next_hop_network': 'next_hop_network_value', 'next_hop_peering': 'next_hop_peering_value', 'next_hop_vpn_tunnel': 'next_hop_vpn_tunnel_value', 'priority': 898, 'route_status': 'route_status_value', 'route_type': 'route_type_value', 'self_link': 'self_link_value', 'tags': ['tags_value1', 'tags_value2'], 'warnings': [{'code': 'code_value', 'data': [{'key': 'key_value', 'value': 'value_value'}], 'message': 'message_value'}]} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.insert(request) + + +def test_insert_rest_flattened(): + client = RoutesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + route_resource=compute.Route(as_paths=[compute.RouteAsPath(as_lists=[866])]), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.insert(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/global/routes" % client.transport._host, args[1]) + + +def test_insert_rest_flattened_error(transport: str = 'rest'): + client = RoutesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.insert( + compute.InsertRouteRequest(), + project='project_value', + route_resource=compute.Route(as_paths=[compute.RouteAsPath(as_lists=[866])]), + ) + + +def test_insert_rest_error(): + client = RoutesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.InsertRouteRequest, + dict, +]) +def test_insert_unary_rest(request_type): + client = RoutesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1'} + request_init["route_resource"] = {'as_paths': [{'as_lists': [867, 868], 'path_segment_type': 'path_segment_type_value'}], 'creation_timestamp': 'creation_timestamp_value', 'description': 'description_value', 'dest_range': 'dest_range_value', 'id': 205, 'kind': 'kind_value', 'name': 'name_value', 'network': 'network_value', 'next_hop_gateway': 'next_hop_gateway_value', 'next_hop_hub': 'next_hop_hub_value', 'next_hop_ilb': 'next_hop_ilb_value', 'next_hop_instance': 'next_hop_instance_value', 'next_hop_ip': 'next_hop_ip_value', 'next_hop_network': 'next_hop_network_value', 'next_hop_peering': 'next_hop_peering_value', 'next_hop_vpn_tunnel': 'next_hop_vpn_tunnel_value', 'priority': 898, 'route_status': 'route_status_value', 'route_type': 'route_type_value', 'self_link': 'self_link_value', 'tags': ['tags_value1', 'tags_value2'], 'warnings': [{'code': 'code_value', 'data': [{'key': 'key_value', 'value': 'value_value'}], 'message': 'message_value'}]} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.insert_unary(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.Operation) + + +def test_insert_unary_rest_required_fields(request_type=compute.InsertRouteRequest): + transport_class = transports.RoutesRestTransport + + request_init = {} + request_init["project"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).insert._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).insert._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + + client = RoutesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.insert_unary(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_insert_unary_rest_unset_required_fields(): + transport = transports.RoutesRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.insert._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("project", "routeResource", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_insert_unary_rest_interceptors(null_interceptor): + transport = transports.RoutesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.RoutesRestInterceptor(), + ) + client = RoutesClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.RoutesRestInterceptor, "post_insert") as post, \ + mock.patch.object(transports.RoutesRestInterceptor, "pre_insert") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.InsertRouteRequest.pb(compute.InsertRouteRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.InsertRouteRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.insert_unary(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_insert_unary_rest_bad_request(transport: str = 'rest', request_type=compute.InsertRouteRequest): + client = RoutesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1'} + request_init["route_resource"] = {'as_paths': [{'as_lists': [867, 868], 'path_segment_type': 'path_segment_type_value'}], 'creation_timestamp': 'creation_timestamp_value', 'description': 'description_value', 'dest_range': 'dest_range_value', 'id': 205, 'kind': 'kind_value', 'name': 'name_value', 'network': 'network_value', 'next_hop_gateway': 'next_hop_gateway_value', 'next_hop_hub': 'next_hop_hub_value', 'next_hop_ilb': 'next_hop_ilb_value', 'next_hop_instance': 'next_hop_instance_value', 'next_hop_ip': 'next_hop_ip_value', 'next_hop_network': 'next_hop_network_value', 'next_hop_peering': 'next_hop_peering_value', 'next_hop_vpn_tunnel': 'next_hop_vpn_tunnel_value', 'priority': 898, 'route_status': 'route_status_value', 'route_type': 'route_type_value', 'self_link': 'self_link_value', 'tags': ['tags_value1', 'tags_value2'], 'warnings': [{'code': 'code_value', 'data': [{'key': 'key_value', 'value': 'value_value'}], 'message': 'message_value'}]} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.insert_unary(request) + + +def test_insert_unary_rest_flattened(): + client = RoutesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + route_resource=compute.Route(as_paths=[compute.RouteAsPath(as_lists=[866])]), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.insert_unary(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/global/routes" % client.transport._host, args[1]) + + +def test_insert_unary_rest_flattened_error(transport: str = 'rest'): + client = RoutesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.insert_unary( + compute.InsertRouteRequest(), + project='project_value', + route_resource=compute.Route(as_paths=[compute.RouteAsPath(as_lists=[866])]), + ) + + +def test_insert_unary_rest_error(): + client = RoutesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.ListRoutesRequest, + dict, +]) +def test_list_rest(request_type): + client = RoutesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.RouteList( + id='id_value', + kind='kind_value', + next_page_token='next_page_token_value', + self_link='self_link_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.RouteList.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.list(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListPager) + assert response.id == 'id_value' + assert response.kind == 'kind_value' + assert response.next_page_token == 'next_page_token_value' + assert response.self_link == 'self_link_value' + + +def test_list_rest_required_fields(request_type=compute.ListRoutesRequest): + transport_class = transports.RoutesRestTransport + + request_init = {} + request_init["project"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("filter", "max_results", "order_by", "page_token", "return_partial_success", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + + client = RoutesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.RouteList() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "get", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.RouteList.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.list(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_list_rest_unset_required_fields(): + transport = transports.RoutesRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.list._get_unset_required_fields({}) + assert set(unset_fields) == (set(("filter", "maxResults", "orderBy", "pageToken", "returnPartialSuccess", )) & set(("project", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_rest_interceptors(null_interceptor): + transport = transports.RoutesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.RoutesRestInterceptor(), + ) + client = RoutesClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.RoutesRestInterceptor, "post_list") as post, \ + mock.patch.object(transports.RoutesRestInterceptor, "pre_list") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.ListRoutesRequest.pb(compute.ListRoutesRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.RouteList.to_json(compute.RouteList()) + + request = compute.ListRoutesRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.RouteList() + + client.list(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_list_rest_bad_request(transport: str = 'rest', request_type=compute.ListRoutesRequest): + client = RoutesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list(request) + + +def test_list_rest_flattened(): + client = RoutesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.RouteList() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.RouteList.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.list(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/global/routes" % client.transport._host, args[1]) + + +def test_list_rest_flattened_error(transport: str = 'rest'): + client = RoutesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list( + compute.ListRoutesRequest(), + project='project_value', + ) + + +def test_list_rest_pager(transport: str = 'rest'): + client = RoutesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + #with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + compute.RouteList( + items=[ + compute.Route(), + compute.Route(), + compute.Route(), + ], + next_page_token='abc', + ), + compute.RouteList( + items=[], + next_page_token='def', + ), + compute.RouteList( + items=[ + compute.Route(), + ], + next_page_token='ghi', + ), + compute.RouteList( + items=[ + compute.Route(), + compute.Route(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple(compute.RouteList.to_json(x) for x in response) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode('UTF-8') + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {'project': 'sample1'} + + pager = client.list(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, compute.Route) + for i in results) + + pages = list(client.list(request=sample_request).pages) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + + +def test_credentials_transport_error(): + # It is an error to provide credentials and a transport instance. + transport = transports.RoutesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = RoutesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # It is an error to provide a credentials file and a transport instance. + transport = transports.RoutesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = RoutesClient( + client_options={"credentials_file": "credentials.json"}, + transport=transport, + ) + + # It is an error to provide an api_key and a transport instance. + transport = transports.RoutesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = RoutesClient( + client_options=options, + transport=transport, + ) + + # It is an error to provide an api_key and a credential. + options = mock.Mock() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = RoutesClient( + client_options=options, + credentials=ga_credentials.AnonymousCredentials() + ) + + # It is an error to provide scopes and a transport instance. + transport = transports.RoutesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = RoutesClient( + client_options={"scopes": ["1", "2"]}, + transport=transport, + ) + + +def test_transport_instance(): + # A client may be instantiated with a custom transport instance. + transport = transports.RoutesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + client = RoutesClient(transport=transport) + assert client.transport is transport + + +@pytest.mark.parametrize("transport_class", [ + transports.RoutesRestTransport, +]) +def test_transport_adc(transport_class): + # Test default credentials are used if not provided. + with mock.patch.object(google.auth, 'default') as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class() + adc.assert_called_once() + +@pytest.mark.parametrize("transport_name", [ + "rest", +]) +def test_transport_kind(transport_name): + transport = RoutesClient.get_transport_class(transport_name)( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert transport.kind == transport_name + + +def test_routes_base_transport_error(): + # Passing both a credentials object and credentials_file should raise an error + with pytest.raises(core_exceptions.DuplicateCredentialArgs): + transport = transports.RoutesTransport( + credentials=ga_credentials.AnonymousCredentials(), + credentials_file="credentials.json" + ) + + +def test_routes_base_transport(): + # Instantiate the base transport. + with mock.patch('google.cloud.compute_v1.services.routes.transports.RoutesTransport.__init__') as Transport: + Transport.return_value = None + transport = transports.RoutesTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Every method on the transport should just blindly + # raise NotImplementedError. + methods = ( + 'delete', + 'get', + 'insert', + 'list', + ) + for method in methods: + with pytest.raises(NotImplementedError): + getattr(transport, method)(request=object()) + + with pytest.raises(NotImplementedError): + transport.close() + + # Catch all for all remaining methods and properties + remainder = [ + 'kind', + ] + for r in remainder: + with pytest.raises(NotImplementedError): + getattr(transport, r)() + + +def test_routes_base_transport_with_credentials_file(): + # Instantiate the base transport with a credentials file + with mock.patch.object(google.auth, 'load_credentials_from_file', autospec=True) as load_creds, mock.patch('google.cloud.compute_v1.services.routes.transports.RoutesTransport._prep_wrapped_messages') as Transport: + Transport.return_value = None + load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.RoutesTransport( + credentials_file="credentials.json", + quota_project_id="octopus", + ) + load_creds.assert_called_once_with("credentials.json", + scopes=None, + default_scopes=( + 'https://www.googleapis.com/auth/compute', + 'https://www.googleapis.com/auth/cloud-platform', +), + quota_project_id="octopus", + ) + + +def test_routes_base_transport_with_adc(): + # Test the default credentials are used if credentials and credentials_file are None. + with mock.patch.object(google.auth, 'default', autospec=True) as adc, mock.patch('google.cloud.compute_v1.services.routes.transports.RoutesTransport._prep_wrapped_messages') as Transport: + Transport.return_value = None + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.RoutesTransport() + adc.assert_called_once() + + +def test_routes_auth_adc(): + # If no credentials are provided, we should use ADC credentials. + with mock.patch.object(google.auth, 'default', autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + RoutesClient() + adc.assert_called_once_with( + scopes=None, + default_scopes=( + 'https://www.googleapis.com/auth/compute', + 'https://www.googleapis.com/auth/cloud-platform', +), + quota_project_id=None, + ) + + +def test_routes_http_transport_client_cert_source_for_mtls(): + cred = ga_credentials.AnonymousCredentials() + with mock.patch("google.auth.transport.requests.AuthorizedSession.configure_mtls_channel") as mock_configure_mtls_channel: + transports.RoutesRestTransport ( + credentials=cred, + client_cert_source_for_mtls=client_cert_source_callback + ) + mock_configure_mtls_channel.assert_called_once_with(client_cert_source_callback) + + +@pytest.mark.parametrize("transport_name", [ + "rest", +]) +def test_routes_host_no_port(transport_name): + client = RoutesClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions(api_endpoint='compute.googleapis.com'), + transport=transport_name, + ) + assert client.transport._host == ( + 'compute.googleapis.com:443' + if transport_name in ['grpc', 'grpc_asyncio'] + else 'https://compute.googleapis.com' + ) + +@pytest.mark.parametrize("transport_name", [ + "rest", +]) +def test_routes_host_with_port(transport_name): + client = RoutesClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions(api_endpoint='compute.googleapis.com:8000'), + transport=transport_name, + ) + assert client.transport._host == ( + 'compute.googleapis.com:8000' + if transport_name in ['grpc', 'grpc_asyncio'] + else 'https://compute.googleapis.com:8000' + ) + +@pytest.mark.parametrize("transport_name", [ + "rest", +]) +def test_routes_client_transport_session_collision(transport_name): + creds1 = ga_credentials.AnonymousCredentials() + creds2 = ga_credentials.AnonymousCredentials() + client1 = RoutesClient( + credentials=creds1, + transport=transport_name, + ) + client2 = RoutesClient( + credentials=creds2, + transport=transport_name, + ) + session1 = client1.transport.delete._session + session2 = client2.transport.delete._session + assert session1 != session2 + session1 = client1.transport.get._session + session2 = client2.transport.get._session + assert session1 != session2 + session1 = client1.transport.insert._session + session2 = client2.transport.insert._session + assert session1 != session2 + session1 = client1.transport.list._session + session2 = client2.transport.list._session + assert session1 != session2 + +def test_common_billing_account_path(): + billing_account = "squid" + expected = "billingAccounts/{billing_account}".format(billing_account=billing_account, ) + actual = RoutesClient.common_billing_account_path(billing_account) + assert expected == actual + + +def test_parse_common_billing_account_path(): + expected = { + "billing_account": "clam", + } + path = RoutesClient.common_billing_account_path(**expected) + + # Check that the path construction is reversible. + actual = RoutesClient.parse_common_billing_account_path(path) + assert expected == actual + +def test_common_folder_path(): + folder = "whelk" + expected = "folders/{folder}".format(folder=folder, ) + actual = RoutesClient.common_folder_path(folder) + assert expected == actual + + +def test_parse_common_folder_path(): + expected = { + "folder": "octopus", + } + path = RoutesClient.common_folder_path(**expected) + + # Check that the path construction is reversible. + actual = RoutesClient.parse_common_folder_path(path) + assert expected == actual + +def test_common_organization_path(): + organization = "oyster" + expected = "organizations/{organization}".format(organization=organization, ) + actual = RoutesClient.common_organization_path(organization) + assert expected == actual + + +def test_parse_common_organization_path(): + expected = { + "organization": "nudibranch", + } + path = RoutesClient.common_organization_path(**expected) + + # Check that the path construction is reversible. + actual = RoutesClient.parse_common_organization_path(path) + assert expected == actual + +def test_common_project_path(): + project = "cuttlefish" + expected = "projects/{project}".format(project=project, ) + actual = RoutesClient.common_project_path(project) + assert expected == actual + + +def test_parse_common_project_path(): + expected = { + "project": "mussel", + } + path = RoutesClient.common_project_path(**expected) + + # Check that the path construction is reversible. + actual = RoutesClient.parse_common_project_path(path) + assert expected == actual + +def test_common_location_path(): + project = "winkle" + location = "nautilus" + expected = "projects/{project}/locations/{location}".format(project=project, location=location, ) + actual = RoutesClient.common_location_path(project, location) + assert expected == actual + + +def test_parse_common_location_path(): + expected = { + "project": "scallop", + "location": "abalone", + } + path = RoutesClient.common_location_path(**expected) + + # Check that the path construction is reversible. + actual = RoutesClient.parse_common_location_path(path) + assert expected == actual + + +def test_client_with_default_client_info(): + client_info = gapic_v1.client_info.ClientInfo() + + with mock.patch.object(transports.RoutesTransport, '_prep_wrapped_messages') as prep: + client = RoutesClient( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + with mock.patch.object(transports.RoutesTransport, '_prep_wrapped_messages') as prep: + transport_class = RoutesClient.get_transport_class() + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + +def test_transport_close(): + transports = { + "rest": "_session", + } + + for transport, close_name in transports.items(): + client = RoutesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport + ) + with mock.patch.object(type(getattr(client.transport, close_name)), "close") as close: + with client: + close.assert_not_called() + close.assert_called_once() + +def test_client_ctx(): + transports = [ + 'rest', + ] + for transport in transports: + client = RoutesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport + ) + # Test client calls underlying transport. + with mock.patch.object(type(client.transport), "close") as close: + close.assert_not_called() + with client: + pass + close.assert_called() + +@pytest.mark.parametrize("client_class,transport_class", [ + (RoutesClient, transports.RoutesRestTransport), +]) +def test_api_key_credentials(client_class, transport_class): + with mock.patch.object( + google.auth._default, "get_api_key_credentials", create=True + ) as get_api_key_credentials: + mock_cred = mock.Mock() + get_api_key_credentials.return_value = mock_cred + options = client_options.ClientOptions() + options.api_key = "api_key" + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=mock_cred, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) diff --git a/owl-bot-staging/v1/tests/unit/gapic/compute_v1/test_security_policies.py b/owl-bot-staging/v1/tests/unit/gapic/compute_v1/test_security_policies.py new file mode 100644 index 000000000..21651a89f --- /dev/null +++ b/owl-bot-staging/v1/tests/unit/gapic/compute_v1/test_security_policies.py @@ -0,0 +1,6042 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import os +# try/except added for compatibility with python < 3.8 +try: + from unittest import mock + from unittest.mock import AsyncMock # pragma: NO COVER +except ImportError: # pragma: NO COVER + import mock + +import grpc +from grpc.experimental import aio +from collections.abc import Iterable +from google.protobuf import json_format +import json +import math +import pytest +from proto.marshal.rules.dates import DurationRule, TimestampRule +from proto.marshal.rules import wrappers +from requests import Response +from requests import Request, PreparedRequest +from requests.sessions import Session +from google.protobuf import json_format + +from google.api_core import client_options +from google.api_core import exceptions as core_exceptions +from google.api_core import extended_operation # type: ignore +from google.api_core import future +from google.api_core import gapic_v1 +from google.api_core import grpc_helpers +from google.api_core import grpc_helpers_async +from google.api_core import path_template +from google.auth import credentials as ga_credentials +from google.auth.exceptions import MutualTLSChannelError +from google.cloud.compute_v1.services.security_policies import SecurityPoliciesClient +from google.cloud.compute_v1.services.security_policies import pagers +from google.cloud.compute_v1.services.security_policies import transports +from google.cloud.compute_v1.types import compute +from google.oauth2 import service_account +import google.auth + + +def client_cert_source_callback(): + return b"cert bytes", b"key bytes" + + +# If default endpoint is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint(client): + return "foo.googleapis.com" if ("localhost" in client.DEFAULT_ENDPOINT) else client.DEFAULT_ENDPOINT + + +def test__get_default_mtls_endpoint(): + api_endpoint = "example.googleapis.com" + api_mtls_endpoint = "example.mtls.googleapis.com" + sandbox_endpoint = "example.sandbox.googleapis.com" + sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com" + non_googleapi = "api.example.com" + + assert SecurityPoliciesClient._get_default_mtls_endpoint(None) is None + assert SecurityPoliciesClient._get_default_mtls_endpoint(api_endpoint) == api_mtls_endpoint + assert SecurityPoliciesClient._get_default_mtls_endpoint(api_mtls_endpoint) == api_mtls_endpoint + assert SecurityPoliciesClient._get_default_mtls_endpoint(sandbox_endpoint) == sandbox_mtls_endpoint + assert SecurityPoliciesClient._get_default_mtls_endpoint(sandbox_mtls_endpoint) == sandbox_mtls_endpoint + assert SecurityPoliciesClient._get_default_mtls_endpoint(non_googleapi) == non_googleapi + + +@pytest.mark.parametrize("client_class,transport_name", [ + (SecurityPoliciesClient, "rest"), +]) +def test_security_policies_client_from_service_account_info(client_class, transport_name): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object(service_account.Credentials, 'from_service_account_info') as factory: + factory.return_value = creds + info = {"valid": True} + client = client_class.from_service_account_info(info, transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + 'compute.googleapis.com:443' + if transport_name in ['grpc', 'grpc_asyncio'] + else + 'https://compute.googleapis.com' + ) + + +@pytest.mark.parametrize("transport_class,transport_name", [ + (transports.SecurityPoliciesRestTransport, "rest"), +]) +def test_security_policies_client_service_account_always_use_jwt(transport_class, transport_name): + with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=True) + use_jwt.assert_called_once_with(True) + + with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=False) + use_jwt.assert_not_called() + + +@pytest.mark.parametrize("client_class,transport_name", [ + (SecurityPoliciesClient, "rest"), +]) +def test_security_policies_client_from_service_account_file(client_class, transport_name): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object(service_account.Credentials, 'from_service_account_file') as factory: + factory.return_value = creds + client = client_class.from_service_account_file("dummy/file/path.json", transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + client = client_class.from_service_account_json("dummy/file/path.json", transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + 'compute.googleapis.com:443' + if transport_name in ['grpc', 'grpc_asyncio'] + else + 'https://compute.googleapis.com' + ) + + +def test_security_policies_client_get_transport_class(): + transport = SecurityPoliciesClient.get_transport_class() + available_transports = [ + transports.SecurityPoliciesRestTransport, + ] + assert transport in available_transports + + transport = SecurityPoliciesClient.get_transport_class("rest") + assert transport == transports.SecurityPoliciesRestTransport + + +@pytest.mark.parametrize("client_class,transport_class,transport_name", [ + (SecurityPoliciesClient, transports.SecurityPoliciesRestTransport, "rest"), +]) +@mock.patch.object(SecurityPoliciesClient, "DEFAULT_ENDPOINT", modify_default_endpoint(SecurityPoliciesClient)) +def test_security_policies_client_client_options(client_class, transport_class, transport_name): + # Check that if channel is provided we won't create a new one. + with mock.patch.object(SecurityPoliciesClient, 'get_transport_class') as gtc: + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ) + client = client_class(transport=transport) + gtc.assert_not_called() + + # Check that if channel is provided via str we will create a new one. + with mock.patch.object(SecurityPoliciesClient, 'get_transport_class') as gtc: + client = client_class(transport=transport_name) + gtc.assert_called() + + # Check the case api_endpoint is provided. + options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(transport=transport_name, client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_MTLS_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError): + client = client_class(transport=transport_name) + + # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): + with pytest.raises(ValueError): + client = client_class(transport=transport_name) + + # Check the case quota_project_id is provided + options = client_options.ClientOptions(quota_project_id="octopus") + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id="octopus", + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + # Check the case api_endpoint is provided + options = client_options.ClientOptions(api_audience="https://language.googleapis.com") + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience="https://language.googleapis.com" + ) + +@pytest.mark.parametrize("client_class,transport_class,transport_name,use_client_cert_env", [ + (SecurityPoliciesClient, transports.SecurityPoliciesRestTransport, "rest", "true"), + (SecurityPoliciesClient, transports.SecurityPoliciesRestTransport, "rest", "false"), +]) +@mock.patch.object(SecurityPoliciesClient, "DEFAULT_ENDPOINT", modify_default_endpoint(SecurityPoliciesClient)) +@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) +def test_security_policies_client_mtls_env_auto(client_class, transport_class, transport_name, use_client_cert_env): + # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default + # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists. + + # Check the case client_cert_source is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): + options = client_options.ClientOptions(client_cert_source=client_cert_source_callback) + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + + if use_client_cert_env == "false": + expected_client_cert_source = None + expected_host = client.DEFAULT_ENDPOINT + else: + expected_client_cert_source = client_cert_source_callback + expected_host = client.DEFAULT_MTLS_ENDPOINT + + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case ADC client cert is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): + with mock.patch.object(transport_class, '__init__') as patched: + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): + with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=client_cert_source_callback): + if use_client_cert_env == "false": + expected_host = client.DEFAULT_ENDPOINT + expected_client_cert_source = None + else: + expected_host = client.DEFAULT_MTLS_ENDPOINT + expected_client_cert_source = client_cert_source_callback + + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case client_cert_source and ADC client cert are not provided. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): + with mock.patch.object(transport_class, '__init__') as patched: + with mock.patch("google.auth.transport.mtls.has_default_client_cert_source", return_value=False): + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize("client_class", [ + SecurityPoliciesClient +]) +@mock.patch.object(SecurityPoliciesClient, "DEFAULT_ENDPOINT", modify_default_endpoint(SecurityPoliciesClient)) +def test_security_policies_client_get_mtls_endpoint_and_cert_source(client_class): + mock_client_cert_source = mock.Mock() + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "true". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + mock_api_endpoint = "foo" + options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(options) + assert api_endpoint == mock_api_endpoint + assert cert_source == mock_client_cert_source + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "false". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + mock_client_cert_source = mock.Mock() + mock_api_endpoint = "foo" + options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(options) + assert api_endpoint == mock_api_endpoint + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert doesn't exist. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=False): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert exists. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): + with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=mock_client_cert_source): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source == mock_client_cert_source + + +@pytest.mark.parametrize("client_class,transport_class,transport_name", [ + (SecurityPoliciesClient, transports.SecurityPoliciesRestTransport, "rest"), +]) +def test_security_policies_client_client_options_scopes(client_class, transport_class, transport_name): + # Check the case scopes are provided. + options = client_options.ClientOptions( + scopes=["1", "2"], + ) + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=["1", "2"], + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + +@pytest.mark.parametrize("client_class,transport_class,transport_name,grpc_helpers", [ + (SecurityPoliciesClient, transports.SecurityPoliciesRestTransport, "rest", None), +]) +def test_security_policies_client_client_options_credentials_file(client_class, transport_class, transport_name, grpc_helpers): + # Check the case credentials file is provided. + options = client_options.ClientOptions( + credentials_file="credentials.json" + ) + + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize("request_type", [ + compute.AddRuleSecurityPolicyRequest, + dict, +]) +def test_add_rule_rest(request_type): + client = SecurityPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'security_policy': 'sample2'} + request_init["security_policy_rule_resource"] = {'action': 'action_value', 'description': 'description_value', 'header_action': {'request_headers_to_adds': [{'header_name': 'header_name_value', 'header_value': 'header_value_value'}]}, 'kind': 'kind_value', 'match': {'config': {'src_ip_ranges': ['src_ip_ranges_value1', 'src_ip_ranges_value2']}, 'expr': {'description': 'description_value', 'expression': 'expression_value', 'location': 'location_value', 'title': 'title_value'}, 'versioned_expr': 'versioned_expr_value'}, 'preconfigured_waf_config': {'exclusions': [{'request_cookies_to_exclude': [{'op': 'op_value', 'val': 'val_value'}], 'request_headers_to_exclude': {}, 'request_query_params_to_exclude': {}, 'request_uris_to_exclude': {}, 'target_rule_ids': ['target_rule_ids_value1', 'target_rule_ids_value2'], 'target_rule_set': 'target_rule_set_value'}]}, 'preview': True, 'priority': 898, 'rate_limit_options': {'ban_duration_sec': 1680, 'ban_threshold': {'count': 553, 'interval_sec': 1279}, 'conform_action': 'conform_action_value', 'enforce_on_key': 'enforce_on_key_value', 'enforce_on_key_configs': [{'enforce_on_key_name': 'enforce_on_key_name_value', 'enforce_on_key_type': 'enforce_on_key_type_value'}], 'enforce_on_key_name': 'enforce_on_key_name_value', 'exceed_action': 'exceed_action_value', 'exceed_redirect_options': {'target': 'target_value', 'type_': 'type__value'}, 'rate_limit_threshold': {}}, 'redirect_options': {}} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.add_rule(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, extended_operation.ExtendedOperation) + assert response.client_operation_id == 'client_operation_id_value' + assert response.creation_timestamp == 'creation_timestamp_value' + assert response.description == 'description_value' + assert response.end_time == 'end_time_value' + assert response.http_error_message == 'http_error_message_value' + assert response.http_error_status_code == 2374 + assert response.id == 205 + assert response.insert_time == 'insert_time_value' + assert response.kind == 'kind_value' + assert response.name == 'name_value' + assert response.operation_group_id == 'operation_group_id_value' + assert response.operation_type == 'operation_type_value' + assert response.progress == 885 + assert response.region == 'region_value' + assert response.self_link == 'self_link_value' + assert response.start_time == 'start_time_value' + assert response.status == compute.Operation.Status.DONE + assert response.status_message == 'status_message_value' + assert response.target_id == 947 + assert response.target_link == 'target_link_value' + assert response.user == 'user_value' + assert response.zone == 'zone_value' + + +def test_add_rule_rest_required_fields(request_type=compute.AddRuleSecurityPolicyRequest): + transport_class = transports.SecurityPoliciesRestTransport + + request_init = {} + request_init["project"] = "" + request_init["security_policy"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).add_rule._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + jsonified_request["securityPolicy"] = 'security_policy_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).add_rule._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("validate_only", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "securityPolicy" in jsonified_request + assert jsonified_request["securityPolicy"] == 'security_policy_value' + + client = SecurityPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.add_rule(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_add_rule_rest_unset_required_fields(): + transport = transports.SecurityPoliciesRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.add_rule._get_unset_required_fields({}) + assert set(unset_fields) == (set(("validateOnly", )) & set(("project", "securityPolicy", "securityPolicyRuleResource", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_add_rule_rest_interceptors(null_interceptor): + transport = transports.SecurityPoliciesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.SecurityPoliciesRestInterceptor(), + ) + client = SecurityPoliciesClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.SecurityPoliciesRestInterceptor, "post_add_rule") as post, \ + mock.patch.object(transports.SecurityPoliciesRestInterceptor, "pre_add_rule") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.AddRuleSecurityPolicyRequest.pb(compute.AddRuleSecurityPolicyRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.AddRuleSecurityPolicyRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.add_rule(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_add_rule_rest_bad_request(transport: str = 'rest', request_type=compute.AddRuleSecurityPolicyRequest): + client = SecurityPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'security_policy': 'sample2'} + request_init["security_policy_rule_resource"] = {'action': 'action_value', 'description': 'description_value', 'header_action': {'request_headers_to_adds': [{'header_name': 'header_name_value', 'header_value': 'header_value_value'}]}, 'kind': 'kind_value', 'match': {'config': {'src_ip_ranges': ['src_ip_ranges_value1', 'src_ip_ranges_value2']}, 'expr': {'description': 'description_value', 'expression': 'expression_value', 'location': 'location_value', 'title': 'title_value'}, 'versioned_expr': 'versioned_expr_value'}, 'preconfigured_waf_config': {'exclusions': [{'request_cookies_to_exclude': [{'op': 'op_value', 'val': 'val_value'}], 'request_headers_to_exclude': {}, 'request_query_params_to_exclude': {}, 'request_uris_to_exclude': {}, 'target_rule_ids': ['target_rule_ids_value1', 'target_rule_ids_value2'], 'target_rule_set': 'target_rule_set_value'}]}, 'preview': True, 'priority': 898, 'rate_limit_options': {'ban_duration_sec': 1680, 'ban_threshold': {'count': 553, 'interval_sec': 1279}, 'conform_action': 'conform_action_value', 'enforce_on_key': 'enforce_on_key_value', 'enforce_on_key_configs': [{'enforce_on_key_name': 'enforce_on_key_name_value', 'enforce_on_key_type': 'enforce_on_key_type_value'}], 'enforce_on_key_name': 'enforce_on_key_name_value', 'exceed_action': 'exceed_action_value', 'exceed_redirect_options': {'target': 'target_value', 'type_': 'type__value'}, 'rate_limit_threshold': {}}, 'redirect_options': {}} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.add_rule(request) + + +def test_add_rule_rest_flattened(): + client = SecurityPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'security_policy': 'sample2'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + security_policy='security_policy_value', + security_policy_rule_resource=compute.SecurityPolicyRule(action='action_value'), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.add_rule(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/global/securityPolicies/{security_policy}/addRule" % client.transport._host, args[1]) + + +def test_add_rule_rest_flattened_error(transport: str = 'rest'): + client = SecurityPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.add_rule( + compute.AddRuleSecurityPolicyRequest(), + project='project_value', + security_policy='security_policy_value', + security_policy_rule_resource=compute.SecurityPolicyRule(action='action_value'), + ) + + +def test_add_rule_rest_error(): + client = SecurityPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.AddRuleSecurityPolicyRequest, + dict, +]) +def test_add_rule_unary_rest(request_type): + client = SecurityPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'security_policy': 'sample2'} + request_init["security_policy_rule_resource"] = {'action': 'action_value', 'description': 'description_value', 'header_action': {'request_headers_to_adds': [{'header_name': 'header_name_value', 'header_value': 'header_value_value'}]}, 'kind': 'kind_value', 'match': {'config': {'src_ip_ranges': ['src_ip_ranges_value1', 'src_ip_ranges_value2']}, 'expr': {'description': 'description_value', 'expression': 'expression_value', 'location': 'location_value', 'title': 'title_value'}, 'versioned_expr': 'versioned_expr_value'}, 'preconfigured_waf_config': {'exclusions': [{'request_cookies_to_exclude': [{'op': 'op_value', 'val': 'val_value'}], 'request_headers_to_exclude': {}, 'request_query_params_to_exclude': {}, 'request_uris_to_exclude': {}, 'target_rule_ids': ['target_rule_ids_value1', 'target_rule_ids_value2'], 'target_rule_set': 'target_rule_set_value'}]}, 'preview': True, 'priority': 898, 'rate_limit_options': {'ban_duration_sec': 1680, 'ban_threshold': {'count': 553, 'interval_sec': 1279}, 'conform_action': 'conform_action_value', 'enforce_on_key': 'enforce_on_key_value', 'enforce_on_key_configs': [{'enforce_on_key_name': 'enforce_on_key_name_value', 'enforce_on_key_type': 'enforce_on_key_type_value'}], 'enforce_on_key_name': 'enforce_on_key_name_value', 'exceed_action': 'exceed_action_value', 'exceed_redirect_options': {'target': 'target_value', 'type_': 'type__value'}, 'rate_limit_threshold': {}}, 'redirect_options': {}} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.add_rule_unary(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.Operation) + + +def test_add_rule_unary_rest_required_fields(request_type=compute.AddRuleSecurityPolicyRequest): + transport_class = transports.SecurityPoliciesRestTransport + + request_init = {} + request_init["project"] = "" + request_init["security_policy"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).add_rule._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + jsonified_request["securityPolicy"] = 'security_policy_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).add_rule._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("validate_only", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "securityPolicy" in jsonified_request + assert jsonified_request["securityPolicy"] == 'security_policy_value' + + client = SecurityPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.add_rule_unary(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_add_rule_unary_rest_unset_required_fields(): + transport = transports.SecurityPoliciesRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.add_rule._get_unset_required_fields({}) + assert set(unset_fields) == (set(("validateOnly", )) & set(("project", "securityPolicy", "securityPolicyRuleResource", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_add_rule_unary_rest_interceptors(null_interceptor): + transport = transports.SecurityPoliciesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.SecurityPoliciesRestInterceptor(), + ) + client = SecurityPoliciesClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.SecurityPoliciesRestInterceptor, "post_add_rule") as post, \ + mock.patch.object(transports.SecurityPoliciesRestInterceptor, "pre_add_rule") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.AddRuleSecurityPolicyRequest.pb(compute.AddRuleSecurityPolicyRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.AddRuleSecurityPolicyRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.add_rule_unary(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_add_rule_unary_rest_bad_request(transport: str = 'rest', request_type=compute.AddRuleSecurityPolicyRequest): + client = SecurityPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'security_policy': 'sample2'} + request_init["security_policy_rule_resource"] = {'action': 'action_value', 'description': 'description_value', 'header_action': {'request_headers_to_adds': [{'header_name': 'header_name_value', 'header_value': 'header_value_value'}]}, 'kind': 'kind_value', 'match': {'config': {'src_ip_ranges': ['src_ip_ranges_value1', 'src_ip_ranges_value2']}, 'expr': {'description': 'description_value', 'expression': 'expression_value', 'location': 'location_value', 'title': 'title_value'}, 'versioned_expr': 'versioned_expr_value'}, 'preconfigured_waf_config': {'exclusions': [{'request_cookies_to_exclude': [{'op': 'op_value', 'val': 'val_value'}], 'request_headers_to_exclude': {}, 'request_query_params_to_exclude': {}, 'request_uris_to_exclude': {}, 'target_rule_ids': ['target_rule_ids_value1', 'target_rule_ids_value2'], 'target_rule_set': 'target_rule_set_value'}]}, 'preview': True, 'priority': 898, 'rate_limit_options': {'ban_duration_sec': 1680, 'ban_threshold': {'count': 553, 'interval_sec': 1279}, 'conform_action': 'conform_action_value', 'enforce_on_key': 'enforce_on_key_value', 'enforce_on_key_configs': [{'enforce_on_key_name': 'enforce_on_key_name_value', 'enforce_on_key_type': 'enforce_on_key_type_value'}], 'enforce_on_key_name': 'enforce_on_key_name_value', 'exceed_action': 'exceed_action_value', 'exceed_redirect_options': {'target': 'target_value', 'type_': 'type__value'}, 'rate_limit_threshold': {}}, 'redirect_options': {}} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.add_rule_unary(request) + + +def test_add_rule_unary_rest_flattened(): + client = SecurityPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'security_policy': 'sample2'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + security_policy='security_policy_value', + security_policy_rule_resource=compute.SecurityPolicyRule(action='action_value'), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.add_rule_unary(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/global/securityPolicies/{security_policy}/addRule" % client.transport._host, args[1]) + + +def test_add_rule_unary_rest_flattened_error(transport: str = 'rest'): + client = SecurityPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.add_rule_unary( + compute.AddRuleSecurityPolicyRequest(), + project='project_value', + security_policy='security_policy_value', + security_policy_rule_resource=compute.SecurityPolicyRule(action='action_value'), + ) + + +def test_add_rule_unary_rest_error(): + client = SecurityPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.AggregatedListSecurityPoliciesRequest, + dict, +]) +def test_aggregated_list_rest(request_type): + client = SecurityPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.SecurityPoliciesAggregatedList( + etag='etag_value', + id='id_value', + kind='kind_value', + next_page_token='next_page_token_value', + self_link='self_link_value', + unreachables=['unreachables_value'], + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.SecurityPoliciesAggregatedList.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.aggregated_list(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.AggregatedListPager) + assert response.etag == 'etag_value' + assert response.id == 'id_value' + assert response.kind == 'kind_value' + assert response.next_page_token == 'next_page_token_value' + assert response.self_link == 'self_link_value' + assert response.unreachables == ['unreachables_value'] + + +def test_aggregated_list_rest_required_fields(request_type=compute.AggregatedListSecurityPoliciesRequest): + transport_class = transports.SecurityPoliciesRestTransport + + request_init = {} + request_init["project"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).aggregated_list._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).aggregated_list._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("filter", "include_all_scopes", "max_results", "order_by", "page_token", "return_partial_success", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + + client = SecurityPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.SecurityPoliciesAggregatedList() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "get", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.SecurityPoliciesAggregatedList.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.aggregated_list(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_aggregated_list_rest_unset_required_fields(): + transport = transports.SecurityPoliciesRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.aggregated_list._get_unset_required_fields({}) + assert set(unset_fields) == (set(("filter", "includeAllScopes", "maxResults", "orderBy", "pageToken", "returnPartialSuccess", )) & set(("project", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_aggregated_list_rest_interceptors(null_interceptor): + transport = transports.SecurityPoliciesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.SecurityPoliciesRestInterceptor(), + ) + client = SecurityPoliciesClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.SecurityPoliciesRestInterceptor, "post_aggregated_list") as post, \ + mock.patch.object(transports.SecurityPoliciesRestInterceptor, "pre_aggregated_list") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.AggregatedListSecurityPoliciesRequest.pb(compute.AggregatedListSecurityPoliciesRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.SecurityPoliciesAggregatedList.to_json(compute.SecurityPoliciesAggregatedList()) + + request = compute.AggregatedListSecurityPoliciesRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.SecurityPoliciesAggregatedList() + + client.aggregated_list(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_aggregated_list_rest_bad_request(transport: str = 'rest', request_type=compute.AggregatedListSecurityPoliciesRequest): + client = SecurityPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.aggregated_list(request) + + +def test_aggregated_list_rest_flattened(): + client = SecurityPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.SecurityPoliciesAggregatedList() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.SecurityPoliciesAggregatedList.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.aggregated_list(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/aggregated/securityPolicies" % client.transport._host, args[1]) + + +def test_aggregated_list_rest_flattened_error(transport: str = 'rest'): + client = SecurityPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.aggregated_list( + compute.AggregatedListSecurityPoliciesRequest(), + project='project_value', + ) + + +def test_aggregated_list_rest_pager(transport: str = 'rest'): + client = SecurityPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + #with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + compute.SecurityPoliciesAggregatedList( + items={ + 'a':compute.SecurityPoliciesScopedList(), + 'b':compute.SecurityPoliciesScopedList(), + 'c':compute.SecurityPoliciesScopedList(), + }, + next_page_token='abc', + ), + compute.SecurityPoliciesAggregatedList( + items={}, + next_page_token='def', + ), + compute.SecurityPoliciesAggregatedList( + items={ + 'g':compute.SecurityPoliciesScopedList(), + }, + next_page_token='ghi', + ), + compute.SecurityPoliciesAggregatedList( + items={ + 'h':compute.SecurityPoliciesScopedList(), + 'i':compute.SecurityPoliciesScopedList(), + }, + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple(compute.SecurityPoliciesAggregatedList.to_json(x) for x in response) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode('UTF-8') + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {'project': 'sample1'} + + pager = client.aggregated_list(request=sample_request) + + assert isinstance(pager.get('a'), compute.SecurityPoliciesScopedList) + assert pager.get('h') is None + + results = list(pager) + assert len(results) == 6 + assert all( + isinstance(i, tuple) + for i in results) + for result in results: + assert isinstance(result, tuple) + assert tuple(type(t) for t in result) == (str, compute.SecurityPoliciesScopedList) + + assert pager.get('a') is None + assert isinstance(pager.get('h'), compute.SecurityPoliciesScopedList) + + pages = list(client.aggregated_list(request=sample_request).pages) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize("request_type", [ + compute.DeleteSecurityPolicyRequest, + dict, +]) +def test_delete_rest(request_type): + client = SecurityPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'security_policy': 'sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.delete(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, extended_operation.ExtendedOperation) + assert response.client_operation_id == 'client_operation_id_value' + assert response.creation_timestamp == 'creation_timestamp_value' + assert response.description == 'description_value' + assert response.end_time == 'end_time_value' + assert response.http_error_message == 'http_error_message_value' + assert response.http_error_status_code == 2374 + assert response.id == 205 + assert response.insert_time == 'insert_time_value' + assert response.kind == 'kind_value' + assert response.name == 'name_value' + assert response.operation_group_id == 'operation_group_id_value' + assert response.operation_type == 'operation_type_value' + assert response.progress == 885 + assert response.region == 'region_value' + assert response.self_link == 'self_link_value' + assert response.start_time == 'start_time_value' + assert response.status == compute.Operation.Status.DONE + assert response.status_message == 'status_message_value' + assert response.target_id == 947 + assert response.target_link == 'target_link_value' + assert response.user == 'user_value' + assert response.zone == 'zone_value' + + +def test_delete_rest_required_fields(request_type=compute.DeleteSecurityPolicyRequest): + transport_class = transports.SecurityPoliciesRestTransport + + request_init = {} + request_init["project"] = "" + request_init["security_policy"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + jsonified_request["securityPolicy"] = 'security_policy_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "securityPolicy" in jsonified_request + assert jsonified_request["securityPolicy"] == 'security_policy_value' + + client = SecurityPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "delete", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.delete(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_delete_rest_unset_required_fields(): + transport = transports.SecurityPoliciesRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.delete._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("project", "securityPolicy", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_rest_interceptors(null_interceptor): + transport = transports.SecurityPoliciesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.SecurityPoliciesRestInterceptor(), + ) + client = SecurityPoliciesClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.SecurityPoliciesRestInterceptor, "post_delete") as post, \ + mock.patch.object(transports.SecurityPoliciesRestInterceptor, "pre_delete") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.DeleteSecurityPolicyRequest.pb(compute.DeleteSecurityPolicyRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.DeleteSecurityPolicyRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.delete(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_delete_rest_bad_request(transport: str = 'rest', request_type=compute.DeleteSecurityPolicyRequest): + client = SecurityPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'security_policy': 'sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.delete(request) + + +def test_delete_rest_flattened(): + client = SecurityPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'security_policy': 'sample2'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + security_policy='security_policy_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.delete(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/global/securityPolicies/{security_policy}" % client.transport._host, args[1]) + + +def test_delete_rest_flattened_error(transport: str = 'rest'): + client = SecurityPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete( + compute.DeleteSecurityPolicyRequest(), + project='project_value', + security_policy='security_policy_value', + ) + + +def test_delete_rest_error(): + client = SecurityPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.DeleteSecurityPolicyRequest, + dict, +]) +def test_delete_unary_rest(request_type): + client = SecurityPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'security_policy': 'sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.delete_unary(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.Operation) + + +def test_delete_unary_rest_required_fields(request_type=compute.DeleteSecurityPolicyRequest): + transport_class = transports.SecurityPoliciesRestTransport + + request_init = {} + request_init["project"] = "" + request_init["security_policy"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + jsonified_request["securityPolicy"] = 'security_policy_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "securityPolicy" in jsonified_request + assert jsonified_request["securityPolicy"] == 'security_policy_value' + + client = SecurityPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "delete", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.delete_unary(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_delete_unary_rest_unset_required_fields(): + transport = transports.SecurityPoliciesRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.delete._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("project", "securityPolicy", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_unary_rest_interceptors(null_interceptor): + transport = transports.SecurityPoliciesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.SecurityPoliciesRestInterceptor(), + ) + client = SecurityPoliciesClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.SecurityPoliciesRestInterceptor, "post_delete") as post, \ + mock.patch.object(transports.SecurityPoliciesRestInterceptor, "pre_delete") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.DeleteSecurityPolicyRequest.pb(compute.DeleteSecurityPolicyRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.DeleteSecurityPolicyRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.delete_unary(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_delete_unary_rest_bad_request(transport: str = 'rest', request_type=compute.DeleteSecurityPolicyRequest): + client = SecurityPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'security_policy': 'sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.delete_unary(request) + + +def test_delete_unary_rest_flattened(): + client = SecurityPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'security_policy': 'sample2'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + security_policy='security_policy_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.delete_unary(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/global/securityPolicies/{security_policy}" % client.transport._host, args[1]) + + +def test_delete_unary_rest_flattened_error(transport: str = 'rest'): + client = SecurityPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_unary( + compute.DeleteSecurityPolicyRequest(), + project='project_value', + security_policy='security_policy_value', + ) + + +def test_delete_unary_rest_error(): + client = SecurityPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.GetSecurityPolicyRequest, + dict, +]) +def test_get_rest(request_type): + client = SecurityPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'security_policy': 'sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.SecurityPolicy( + creation_timestamp='creation_timestamp_value', + description='description_value', + fingerprint='fingerprint_value', + id=205, + kind='kind_value', + label_fingerprint='label_fingerprint_value', + name='name_value', + region='region_value', + self_link='self_link_value', + type_='type__value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.SecurityPolicy.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.get(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.SecurityPolicy) + assert response.creation_timestamp == 'creation_timestamp_value' + assert response.description == 'description_value' + assert response.fingerprint == 'fingerprint_value' + assert response.id == 205 + assert response.kind == 'kind_value' + assert response.label_fingerprint == 'label_fingerprint_value' + assert response.name == 'name_value' + assert response.region == 'region_value' + assert response.self_link == 'self_link_value' + assert response.type_ == 'type__value' + + +def test_get_rest_required_fields(request_type=compute.GetSecurityPolicyRequest): + transport_class = transports.SecurityPoliciesRestTransport + + request_init = {} + request_init["project"] = "" + request_init["security_policy"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + jsonified_request["securityPolicy"] = 'security_policy_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "securityPolicy" in jsonified_request + assert jsonified_request["securityPolicy"] == 'security_policy_value' + + client = SecurityPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.SecurityPolicy() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "get", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.SecurityPolicy.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.get(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_get_rest_unset_required_fields(): + transport = transports.SecurityPoliciesRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.get._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("project", "securityPolicy", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_rest_interceptors(null_interceptor): + transport = transports.SecurityPoliciesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.SecurityPoliciesRestInterceptor(), + ) + client = SecurityPoliciesClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.SecurityPoliciesRestInterceptor, "post_get") as post, \ + mock.patch.object(transports.SecurityPoliciesRestInterceptor, "pre_get") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.GetSecurityPolicyRequest.pb(compute.GetSecurityPolicyRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.SecurityPolicy.to_json(compute.SecurityPolicy()) + + request = compute.GetSecurityPolicyRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.SecurityPolicy() + + client.get(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_get_rest_bad_request(transport: str = 'rest', request_type=compute.GetSecurityPolicyRequest): + client = SecurityPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'security_policy': 'sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get(request) + + +def test_get_rest_flattened(): + client = SecurityPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.SecurityPolicy() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'security_policy': 'sample2'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + security_policy='security_policy_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.SecurityPolicy.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.get(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/global/securityPolicies/{security_policy}" % client.transport._host, args[1]) + + +def test_get_rest_flattened_error(transport: str = 'rest'): + client = SecurityPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get( + compute.GetSecurityPolicyRequest(), + project='project_value', + security_policy='security_policy_value', + ) + + +def test_get_rest_error(): + client = SecurityPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.GetRuleSecurityPolicyRequest, + dict, +]) +def test_get_rule_rest(request_type): + client = SecurityPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'security_policy': 'sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.SecurityPolicyRule( + action='action_value', + description='description_value', + kind='kind_value', + preview=True, + priority=898, + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.SecurityPolicyRule.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.get_rule(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.SecurityPolicyRule) + assert response.action == 'action_value' + assert response.description == 'description_value' + assert response.kind == 'kind_value' + assert response.preview is True + assert response.priority == 898 + + +def test_get_rule_rest_required_fields(request_type=compute.GetRuleSecurityPolicyRequest): + transport_class = transports.SecurityPoliciesRestTransport + + request_init = {} + request_init["project"] = "" + request_init["security_policy"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_rule._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + jsonified_request["securityPolicy"] = 'security_policy_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_rule._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("priority", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "securityPolicy" in jsonified_request + assert jsonified_request["securityPolicy"] == 'security_policy_value' + + client = SecurityPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.SecurityPolicyRule() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "get", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.SecurityPolicyRule.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.get_rule(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_get_rule_rest_unset_required_fields(): + transport = transports.SecurityPoliciesRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.get_rule._get_unset_required_fields({}) + assert set(unset_fields) == (set(("priority", )) & set(("project", "securityPolicy", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_rule_rest_interceptors(null_interceptor): + transport = transports.SecurityPoliciesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.SecurityPoliciesRestInterceptor(), + ) + client = SecurityPoliciesClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.SecurityPoliciesRestInterceptor, "post_get_rule") as post, \ + mock.patch.object(transports.SecurityPoliciesRestInterceptor, "pre_get_rule") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.GetRuleSecurityPolicyRequest.pb(compute.GetRuleSecurityPolicyRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.SecurityPolicyRule.to_json(compute.SecurityPolicyRule()) + + request = compute.GetRuleSecurityPolicyRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.SecurityPolicyRule() + + client.get_rule(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_get_rule_rest_bad_request(transport: str = 'rest', request_type=compute.GetRuleSecurityPolicyRequest): + client = SecurityPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'security_policy': 'sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_rule(request) + + +def test_get_rule_rest_flattened(): + client = SecurityPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.SecurityPolicyRule() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'security_policy': 'sample2'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + security_policy='security_policy_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.SecurityPolicyRule.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.get_rule(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/global/securityPolicies/{security_policy}/getRule" % client.transport._host, args[1]) + + +def test_get_rule_rest_flattened_error(transport: str = 'rest'): + client = SecurityPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_rule( + compute.GetRuleSecurityPolicyRequest(), + project='project_value', + security_policy='security_policy_value', + ) + + +def test_get_rule_rest_error(): + client = SecurityPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.InsertSecurityPolicyRequest, + dict, +]) +def test_insert_rest(request_type): + client = SecurityPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1'} + request_init["security_policy_resource"] = {'adaptive_protection_config': {'layer7_ddos_defense_config': {'enable': True, 'rule_visibility': 'rule_visibility_value'}}, 'advanced_options_config': {'json_custom_config': {'content_types': ['content_types_value1', 'content_types_value2']}, 'json_parsing': 'json_parsing_value', 'log_level': 'log_level_value'}, 'creation_timestamp': 'creation_timestamp_value', 'ddos_protection_config': {'ddos_protection': 'ddos_protection_value'}, 'description': 'description_value', 'fingerprint': 'fingerprint_value', 'id': 205, 'kind': 'kind_value', 'label_fingerprint': 'label_fingerprint_value', 'labels': {}, 'name': 'name_value', 'recaptcha_options_config': {'redirect_site_key': 'redirect_site_key_value'}, 'region': 'region_value', 'rules': [{'action': 'action_value', 'description': 'description_value', 'header_action': {'request_headers_to_adds': [{'header_name': 'header_name_value', 'header_value': 'header_value_value'}]}, 'kind': 'kind_value', 'match': {'config': {'src_ip_ranges': ['src_ip_ranges_value1', 'src_ip_ranges_value2']}, 'expr': {'description': 'description_value', 'expression': 'expression_value', 'location': 'location_value', 'title': 'title_value'}, 'versioned_expr': 'versioned_expr_value'}, 'preconfigured_waf_config': {'exclusions': [{'request_cookies_to_exclude': [{'op': 'op_value', 'val': 'val_value'}], 'request_headers_to_exclude': {}, 'request_query_params_to_exclude': {}, 'request_uris_to_exclude': {}, 'target_rule_ids': ['target_rule_ids_value1', 'target_rule_ids_value2'], 'target_rule_set': 'target_rule_set_value'}]}, 'preview': True, 'priority': 898, 'rate_limit_options': {'ban_duration_sec': 1680, 'ban_threshold': {'count': 553, 'interval_sec': 1279}, 'conform_action': 'conform_action_value', 'enforce_on_key': 'enforce_on_key_value', 'enforce_on_key_configs': [{'enforce_on_key_name': 'enforce_on_key_name_value', 'enforce_on_key_type': 'enforce_on_key_type_value'}], 'enforce_on_key_name': 'enforce_on_key_name_value', 'exceed_action': 'exceed_action_value', 'exceed_redirect_options': {'target': 'target_value', 'type_': 'type__value'}, 'rate_limit_threshold': {}}, 'redirect_options': {}}], 'self_link': 'self_link_value', 'type_': 'type__value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.insert(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, extended_operation.ExtendedOperation) + assert response.client_operation_id == 'client_operation_id_value' + assert response.creation_timestamp == 'creation_timestamp_value' + assert response.description == 'description_value' + assert response.end_time == 'end_time_value' + assert response.http_error_message == 'http_error_message_value' + assert response.http_error_status_code == 2374 + assert response.id == 205 + assert response.insert_time == 'insert_time_value' + assert response.kind == 'kind_value' + assert response.name == 'name_value' + assert response.operation_group_id == 'operation_group_id_value' + assert response.operation_type == 'operation_type_value' + assert response.progress == 885 + assert response.region == 'region_value' + assert response.self_link == 'self_link_value' + assert response.start_time == 'start_time_value' + assert response.status == compute.Operation.Status.DONE + assert response.status_message == 'status_message_value' + assert response.target_id == 947 + assert response.target_link == 'target_link_value' + assert response.user == 'user_value' + assert response.zone == 'zone_value' + + +def test_insert_rest_required_fields(request_type=compute.InsertSecurityPolicyRequest): + transport_class = transports.SecurityPoliciesRestTransport + + request_init = {} + request_init["project"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).insert._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).insert._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", "validate_only", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + + client = SecurityPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.insert(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_insert_rest_unset_required_fields(): + transport = transports.SecurityPoliciesRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.insert._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", "validateOnly", )) & set(("project", "securityPolicyResource", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_insert_rest_interceptors(null_interceptor): + transport = transports.SecurityPoliciesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.SecurityPoliciesRestInterceptor(), + ) + client = SecurityPoliciesClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.SecurityPoliciesRestInterceptor, "post_insert") as post, \ + mock.patch.object(transports.SecurityPoliciesRestInterceptor, "pre_insert") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.InsertSecurityPolicyRequest.pb(compute.InsertSecurityPolicyRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.InsertSecurityPolicyRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.insert(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_insert_rest_bad_request(transport: str = 'rest', request_type=compute.InsertSecurityPolicyRequest): + client = SecurityPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1'} + request_init["security_policy_resource"] = {'adaptive_protection_config': {'layer7_ddos_defense_config': {'enable': True, 'rule_visibility': 'rule_visibility_value'}}, 'advanced_options_config': {'json_custom_config': {'content_types': ['content_types_value1', 'content_types_value2']}, 'json_parsing': 'json_parsing_value', 'log_level': 'log_level_value'}, 'creation_timestamp': 'creation_timestamp_value', 'ddos_protection_config': {'ddos_protection': 'ddos_protection_value'}, 'description': 'description_value', 'fingerprint': 'fingerprint_value', 'id': 205, 'kind': 'kind_value', 'label_fingerprint': 'label_fingerprint_value', 'labels': {}, 'name': 'name_value', 'recaptcha_options_config': {'redirect_site_key': 'redirect_site_key_value'}, 'region': 'region_value', 'rules': [{'action': 'action_value', 'description': 'description_value', 'header_action': {'request_headers_to_adds': [{'header_name': 'header_name_value', 'header_value': 'header_value_value'}]}, 'kind': 'kind_value', 'match': {'config': {'src_ip_ranges': ['src_ip_ranges_value1', 'src_ip_ranges_value2']}, 'expr': {'description': 'description_value', 'expression': 'expression_value', 'location': 'location_value', 'title': 'title_value'}, 'versioned_expr': 'versioned_expr_value'}, 'preconfigured_waf_config': {'exclusions': [{'request_cookies_to_exclude': [{'op': 'op_value', 'val': 'val_value'}], 'request_headers_to_exclude': {}, 'request_query_params_to_exclude': {}, 'request_uris_to_exclude': {}, 'target_rule_ids': ['target_rule_ids_value1', 'target_rule_ids_value2'], 'target_rule_set': 'target_rule_set_value'}]}, 'preview': True, 'priority': 898, 'rate_limit_options': {'ban_duration_sec': 1680, 'ban_threshold': {'count': 553, 'interval_sec': 1279}, 'conform_action': 'conform_action_value', 'enforce_on_key': 'enforce_on_key_value', 'enforce_on_key_configs': [{'enforce_on_key_name': 'enforce_on_key_name_value', 'enforce_on_key_type': 'enforce_on_key_type_value'}], 'enforce_on_key_name': 'enforce_on_key_name_value', 'exceed_action': 'exceed_action_value', 'exceed_redirect_options': {'target': 'target_value', 'type_': 'type__value'}, 'rate_limit_threshold': {}}, 'redirect_options': {}}], 'self_link': 'self_link_value', 'type_': 'type__value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.insert(request) + + +def test_insert_rest_flattened(): + client = SecurityPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + security_policy_resource=compute.SecurityPolicy(adaptive_protection_config=compute.SecurityPolicyAdaptiveProtectionConfig(layer7_ddos_defense_config=compute.SecurityPolicyAdaptiveProtectionConfigLayer7DdosDefenseConfig(enable=True))), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.insert(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/global/securityPolicies" % client.transport._host, args[1]) + + +def test_insert_rest_flattened_error(transport: str = 'rest'): + client = SecurityPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.insert( + compute.InsertSecurityPolicyRequest(), + project='project_value', + security_policy_resource=compute.SecurityPolicy(adaptive_protection_config=compute.SecurityPolicyAdaptiveProtectionConfig(layer7_ddos_defense_config=compute.SecurityPolicyAdaptiveProtectionConfigLayer7DdosDefenseConfig(enable=True))), + ) + + +def test_insert_rest_error(): + client = SecurityPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.InsertSecurityPolicyRequest, + dict, +]) +def test_insert_unary_rest(request_type): + client = SecurityPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1'} + request_init["security_policy_resource"] = {'adaptive_protection_config': {'layer7_ddos_defense_config': {'enable': True, 'rule_visibility': 'rule_visibility_value'}}, 'advanced_options_config': {'json_custom_config': {'content_types': ['content_types_value1', 'content_types_value2']}, 'json_parsing': 'json_parsing_value', 'log_level': 'log_level_value'}, 'creation_timestamp': 'creation_timestamp_value', 'ddos_protection_config': {'ddos_protection': 'ddos_protection_value'}, 'description': 'description_value', 'fingerprint': 'fingerprint_value', 'id': 205, 'kind': 'kind_value', 'label_fingerprint': 'label_fingerprint_value', 'labels': {}, 'name': 'name_value', 'recaptcha_options_config': {'redirect_site_key': 'redirect_site_key_value'}, 'region': 'region_value', 'rules': [{'action': 'action_value', 'description': 'description_value', 'header_action': {'request_headers_to_adds': [{'header_name': 'header_name_value', 'header_value': 'header_value_value'}]}, 'kind': 'kind_value', 'match': {'config': {'src_ip_ranges': ['src_ip_ranges_value1', 'src_ip_ranges_value2']}, 'expr': {'description': 'description_value', 'expression': 'expression_value', 'location': 'location_value', 'title': 'title_value'}, 'versioned_expr': 'versioned_expr_value'}, 'preconfigured_waf_config': {'exclusions': [{'request_cookies_to_exclude': [{'op': 'op_value', 'val': 'val_value'}], 'request_headers_to_exclude': {}, 'request_query_params_to_exclude': {}, 'request_uris_to_exclude': {}, 'target_rule_ids': ['target_rule_ids_value1', 'target_rule_ids_value2'], 'target_rule_set': 'target_rule_set_value'}]}, 'preview': True, 'priority': 898, 'rate_limit_options': {'ban_duration_sec': 1680, 'ban_threshold': {'count': 553, 'interval_sec': 1279}, 'conform_action': 'conform_action_value', 'enforce_on_key': 'enforce_on_key_value', 'enforce_on_key_configs': [{'enforce_on_key_name': 'enforce_on_key_name_value', 'enforce_on_key_type': 'enforce_on_key_type_value'}], 'enforce_on_key_name': 'enforce_on_key_name_value', 'exceed_action': 'exceed_action_value', 'exceed_redirect_options': {'target': 'target_value', 'type_': 'type__value'}, 'rate_limit_threshold': {}}, 'redirect_options': {}}], 'self_link': 'self_link_value', 'type_': 'type__value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.insert_unary(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.Operation) + + +def test_insert_unary_rest_required_fields(request_type=compute.InsertSecurityPolicyRequest): + transport_class = transports.SecurityPoliciesRestTransport + + request_init = {} + request_init["project"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).insert._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).insert._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", "validate_only", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + + client = SecurityPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.insert_unary(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_insert_unary_rest_unset_required_fields(): + transport = transports.SecurityPoliciesRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.insert._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", "validateOnly", )) & set(("project", "securityPolicyResource", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_insert_unary_rest_interceptors(null_interceptor): + transport = transports.SecurityPoliciesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.SecurityPoliciesRestInterceptor(), + ) + client = SecurityPoliciesClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.SecurityPoliciesRestInterceptor, "post_insert") as post, \ + mock.patch.object(transports.SecurityPoliciesRestInterceptor, "pre_insert") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.InsertSecurityPolicyRequest.pb(compute.InsertSecurityPolicyRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.InsertSecurityPolicyRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.insert_unary(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_insert_unary_rest_bad_request(transport: str = 'rest', request_type=compute.InsertSecurityPolicyRequest): + client = SecurityPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1'} + request_init["security_policy_resource"] = {'adaptive_protection_config': {'layer7_ddos_defense_config': {'enable': True, 'rule_visibility': 'rule_visibility_value'}}, 'advanced_options_config': {'json_custom_config': {'content_types': ['content_types_value1', 'content_types_value2']}, 'json_parsing': 'json_parsing_value', 'log_level': 'log_level_value'}, 'creation_timestamp': 'creation_timestamp_value', 'ddos_protection_config': {'ddos_protection': 'ddos_protection_value'}, 'description': 'description_value', 'fingerprint': 'fingerprint_value', 'id': 205, 'kind': 'kind_value', 'label_fingerprint': 'label_fingerprint_value', 'labels': {}, 'name': 'name_value', 'recaptcha_options_config': {'redirect_site_key': 'redirect_site_key_value'}, 'region': 'region_value', 'rules': [{'action': 'action_value', 'description': 'description_value', 'header_action': {'request_headers_to_adds': [{'header_name': 'header_name_value', 'header_value': 'header_value_value'}]}, 'kind': 'kind_value', 'match': {'config': {'src_ip_ranges': ['src_ip_ranges_value1', 'src_ip_ranges_value2']}, 'expr': {'description': 'description_value', 'expression': 'expression_value', 'location': 'location_value', 'title': 'title_value'}, 'versioned_expr': 'versioned_expr_value'}, 'preconfigured_waf_config': {'exclusions': [{'request_cookies_to_exclude': [{'op': 'op_value', 'val': 'val_value'}], 'request_headers_to_exclude': {}, 'request_query_params_to_exclude': {}, 'request_uris_to_exclude': {}, 'target_rule_ids': ['target_rule_ids_value1', 'target_rule_ids_value2'], 'target_rule_set': 'target_rule_set_value'}]}, 'preview': True, 'priority': 898, 'rate_limit_options': {'ban_duration_sec': 1680, 'ban_threshold': {'count': 553, 'interval_sec': 1279}, 'conform_action': 'conform_action_value', 'enforce_on_key': 'enforce_on_key_value', 'enforce_on_key_configs': [{'enforce_on_key_name': 'enforce_on_key_name_value', 'enforce_on_key_type': 'enforce_on_key_type_value'}], 'enforce_on_key_name': 'enforce_on_key_name_value', 'exceed_action': 'exceed_action_value', 'exceed_redirect_options': {'target': 'target_value', 'type_': 'type__value'}, 'rate_limit_threshold': {}}, 'redirect_options': {}}], 'self_link': 'self_link_value', 'type_': 'type__value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.insert_unary(request) + + +def test_insert_unary_rest_flattened(): + client = SecurityPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + security_policy_resource=compute.SecurityPolicy(adaptive_protection_config=compute.SecurityPolicyAdaptiveProtectionConfig(layer7_ddos_defense_config=compute.SecurityPolicyAdaptiveProtectionConfigLayer7DdosDefenseConfig(enable=True))), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.insert_unary(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/global/securityPolicies" % client.transport._host, args[1]) + + +def test_insert_unary_rest_flattened_error(transport: str = 'rest'): + client = SecurityPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.insert_unary( + compute.InsertSecurityPolicyRequest(), + project='project_value', + security_policy_resource=compute.SecurityPolicy(adaptive_protection_config=compute.SecurityPolicyAdaptiveProtectionConfig(layer7_ddos_defense_config=compute.SecurityPolicyAdaptiveProtectionConfigLayer7DdosDefenseConfig(enable=True))), + ) + + +def test_insert_unary_rest_error(): + client = SecurityPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.ListSecurityPoliciesRequest, + dict, +]) +def test_list_rest(request_type): + client = SecurityPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.SecurityPolicyList( + id='id_value', + kind='kind_value', + next_page_token='next_page_token_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.SecurityPolicyList.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.list(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListPager) + assert response.id == 'id_value' + assert response.kind == 'kind_value' + assert response.next_page_token == 'next_page_token_value' + + +def test_list_rest_required_fields(request_type=compute.ListSecurityPoliciesRequest): + transport_class = transports.SecurityPoliciesRestTransport + + request_init = {} + request_init["project"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("filter", "max_results", "order_by", "page_token", "return_partial_success", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + + client = SecurityPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.SecurityPolicyList() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "get", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.SecurityPolicyList.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.list(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_list_rest_unset_required_fields(): + transport = transports.SecurityPoliciesRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.list._get_unset_required_fields({}) + assert set(unset_fields) == (set(("filter", "maxResults", "orderBy", "pageToken", "returnPartialSuccess", )) & set(("project", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_rest_interceptors(null_interceptor): + transport = transports.SecurityPoliciesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.SecurityPoliciesRestInterceptor(), + ) + client = SecurityPoliciesClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.SecurityPoliciesRestInterceptor, "post_list") as post, \ + mock.patch.object(transports.SecurityPoliciesRestInterceptor, "pre_list") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.ListSecurityPoliciesRequest.pb(compute.ListSecurityPoliciesRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.SecurityPolicyList.to_json(compute.SecurityPolicyList()) + + request = compute.ListSecurityPoliciesRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.SecurityPolicyList() + + client.list(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_list_rest_bad_request(transport: str = 'rest', request_type=compute.ListSecurityPoliciesRequest): + client = SecurityPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list(request) + + +def test_list_rest_flattened(): + client = SecurityPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.SecurityPolicyList() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.SecurityPolicyList.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.list(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/global/securityPolicies" % client.transport._host, args[1]) + + +def test_list_rest_flattened_error(transport: str = 'rest'): + client = SecurityPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list( + compute.ListSecurityPoliciesRequest(), + project='project_value', + ) + + +def test_list_rest_pager(transport: str = 'rest'): + client = SecurityPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + #with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + compute.SecurityPolicyList( + items=[ + compute.SecurityPolicy(), + compute.SecurityPolicy(), + compute.SecurityPolicy(), + ], + next_page_token='abc', + ), + compute.SecurityPolicyList( + items=[], + next_page_token='def', + ), + compute.SecurityPolicyList( + items=[ + compute.SecurityPolicy(), + ], + next_page_token='ghi', + ), + compute.SecurityPolicyList( + items=[ + compute.SecurityPolicy(), + compute.SecurityPolicy(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple(compute.SecurityPolicyList.to_json(x) for x in response) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode('UTF-8') + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {'project': 'sample1'} + + pager = client.list(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, compute.SecurityPolicy) + for i in results) + + pages = list(client.list(request=sample_request).pages) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize("request_type", [ + compute.ListPreconfiguredExpressionSetsSecurityPoliciesRequest, + dict, +]) +def test_list_preconfigured_expression_sets_rest(request_type): + client = SecurityPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.SecurityPoliciesListPreconfiguredExpressionSetsResponse( + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.SecurityPoliciesListPreconfiguredExpressionSetsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.list_preconfigured_expression_sets(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.SecurityPoliciesListPreconfiguredExpressionSetsResponse) + + +def test_list_preconfigured_expression_sets_rest_required_fields(request_type=compute.ListPreconfiguredExpressionSetsSecurityPoliciesRequest): + transport_class = transports.SecurityPoliciesRestTransport + + request_init = {} + request_init["project"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_preconfigured_expression_sets._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_preconfigured_expression_sets._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("filter", "max_results", "order_by", "page_token", "return_partial_success", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + + client = SecurityPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.SecurityPoliciesListPreconfiguredExpressionSetsResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "get", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.SecurityPoliciesListPreconfiguredExpressionSetsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.list_preconfigured_expression_sets(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_list_preconfigured_expression_sets_rest_unset_required_fields(): + transport = transports.SecurityPoliciesRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.list_preconfigured_expression_sets._get_unset_required_fields({}) + assert set(unset_fields) == (set(("filter", "maxResults", "orderBy", "pageToken", "returnPartialSuccess", )) & set(("project", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_preconfigured_expression_sets_rest_interceptors(null_interceptor): + transport = transports.SecurityPoliciesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.SecurityPoliciesRestInterceptor(), + ) + client = SecurityPoliciesClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.SecurityPoliciesRestInterceptor, "post_list_preconfigured_expression_sets") as post, \ + mock.patch.object(transports.SecurityPoliciesRestInterceptor, "pre_list_preconfigured_expression_sets") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.ListPreconfiguredExpressionSetsSecurityPoliciesRequest.pb(compute.ListPreconfiguredExpressionSetsSecurityPoliciesRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.SecurityPoliciesListPreconfiguredExpressionSetsResponse.to_json(compute.SecurityPoliciesListPreconfiguredExpressionSetsResponse()) + + request = compute.ListPreconfiguredExpressionSetsSecurityPoliciesRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.SecurityPoliciesListPreconfiguredExpressionSetsResponse() + + client.list_preconfigured_expression_sets(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_list_preconfigured_expression_sets_rest_bad_request(transport: str = 'rest', request_type=compute.ListPreconfiguredExpressionSetsSecurityPoliciesRequest): + client = SecurityPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_preconfigured_expression_sets(request) + + +def test_list_preconfigured_expression_sets_rest_flattened(): + client = SecurityPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.SecurityPoliciesListPreconfiguredExpressionSetsResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.SecurityPoliciesListPreconfiguredExpressionSetsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.list_preconfigured_expression_sets(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/global/securityPolicies/listPreconfiguredExpressionSets" % client.transport._host, args[1]) + + +def test_list_preconfigured_expression_sets_rest_flattened_error(transport: str = 'rest'): + client = SecurityPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_preconfigured_expression_sets( + compute.ListPreconfiguredExpressionSetsSecurityPoliciesRequest(), + project='project_value', + ) + + +def test_list_preconfigured_expression_sets_rest_error(): + client = SecurityPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.PatchSecurityPolicyRequest, + dict, +]) +def test_patch_rest(request_type): + client = SecurityPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'security_policy': 'sample2'} + request_init["security_policy_resource"] = {'adaptive_protection_config': {'layer7_ddos_defense_config': {'enable': True, 'rule_visibility': 'rule_visibility_value'}}, 'advanced_options_config': {'json_custom_config': {'content_types': ['content_types_value1', 'content_types_value2']}, 'json_parsing': 'json_parsing_value', 'log_level': 'log_level_value'}, 'creation_timestamp': 'creation_timestamp_value', 'ddos_protection_config': {'ddos_protection': 'ddos_protection_value'}, 'description': 'description_value', 'fingerprint': 'fingerprint_value', 'id': 205, 'kind': 'kind_value', 'label_fingerprint': 'label_fingerprint_value', 'labels': {}, 'name': 'name_value', 'recaptcha_options_config': {'redirect_site_key': 'redirect_site_key_value'}, 'region': 'region_value', 'rules': [{'action': 'action_value', 'description': 'description_value', 'header_action': {'request_headers_to_adds': [{'header_name': 'header_name_value', 'header_value': 'header_value_value'}]}, 'kind': 'kind_value', 'match': {'config': {'src_ip_ranges': ['src_ip_ranges_value1', 'src_ip_ranges_value2']}, 'expr': {'description': 'description_value', 'expression': 'expression_value', 'location': 'location_value', 'title': 'title_value'}, 'versioned_expr': 'versioned_expr_value'}, 'preconfigured_waf_config': {'exclusions': [{'request_cookies_to_exclude': [{'op': 'op_value', 'val': 'val_value'}], 'request_headers_to_exclude': {}, 'request_query_params_to_exclude': {}, 'request_uris_to_exclude': {}, 'target_rule_ids': ['target_rule_ids_value1', 'target_rule_ids_value2'], 'target_rule_set': 'target_rule_set_value'}]}, 'preview': True, 'priority': 898, 'rate_limit_options': {'ban_duration_sec': 1680, 'ban_threshold': {'count': 553, 'interval_sec': 1279}, 'conform_action': 'conform_action_value', 'enforce_on_key': 'enforce_on_key_value', 'enforce_on_key_configs': [{'enforce_on_key_name': 'enforce_on_key_name_value', 'enforce_on_key_type': 'enforce_on_key_type_value'}], 'enforce_on_key_name': 'enforce_on_key_name_value', 'exceed_action': 'exceed_action_value', 'exceed_redirect_options': {'target': 'target_value', 'type_': 'type__value'}, 'rate_limit_threshold': {}}, 'redirect_options': {}}], 'self_link': 'self_link_value', 'type_': 'type__value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.patch(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, extended_operation.ExtendedOperation) + assert response.client_operation_id == 'client_operation_id_value' + assert response.creation_timestamp == 'creation_timestamp_value' + assert response.description == 'description_value' + assert response.end_time == 'end_time_value' + assert response.http_error_message == 'http_error_message_value' + assert response.http_error_status_code == 2374 + assert response.id == 205 + assert response.insert_time == 'insert_time_value' + assert response.kind == 'kind_value' + assert response.name == 'name_value' + assert response.operation_group_id == 'operation_group_id_value' + assert response.operation_type == 'operation_type_value' + assert response.progress == 885 + assert response.region == 'region_value' + assert response.self_link == 'self_link_value' + assert response.start_time == 'start_time_value' + assert response.status == compute.Operation.Status.DONE + assert response.status_message == 'status_message_value' + assert response.target_id == 947 + assert response.target_link == 'target_link_value' + assert response.user == 'user_value' + assert response.zone == 'zone_value' + + +def test_patch_rest_required_fields(request_type=compute.PatchSecurityPolicyRequest): + transport_class = transports.SecurityPoliciesRestTransport + + request_init = {} + request_init["project"] = "" + request_init["security_policy"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).patch._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + jsonified_request["securityPolicy"] = 'security_policy_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).patch._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "securityPolicy" in jsonified_request + assert jsonified_request["securityPolicy"] == 'security_policy_value' + + client = SecurityPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "patch", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.patch(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_patch_rest_unset_required_fields(): + transport = transports.SecurityPoliciesRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.patch._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("project", "securityPolicy", "securityPolicyResource", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_patch_rest_interceptors(null_interceptor): + transport = transports.SecurityPoliciesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.SecurityPoliciesRestInterceptor(), + ) + client = SecurityPoliciesClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.SecurityPoliciesRestInterceptor, "post_patch") as post, \ + mock.patch.object(transports.SecurityPoliciesRestInterceptor, "pre_patch") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.PatchSecurityPolicyRequest.pb(compute.PatchSecurityPolicyRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.PatchSecurityPolicyRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.patch(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_patch_rest_bad_request(transport: str = 'rest', request_type=compute.PatchSecurityPolicyRequest): + client = SecurityPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'security_policy': 'sample2'} + request_init["security_policy_resource"] = {'adaptive_protection_config': {'layer7_ddos_defense_config': {'enable': True, 'rule_visibility': 'rule_visibility_value'}}, 'advanced_options_config': {'json_custom_config': {'content_types': ['content_types_value1', 'content_types_value2']}, 'json_parsing': 'json_parsing_value', 'log_level': 'log_level_value'}, 'creation_timestamp': 'creation_timestamp_value', 'ddos_protection_config': {'ddos_protection': 'ddos_protection_value'}, 'description': 'description_value', 'fingerprint': 'fingerprint_value', 'id': 205, 'kind': 'kind_value', 'label_fingerprint': 'label_fingerprint_value', 'labels': {}, 'name': 'name_value', 'recaptcha_options_config': {'redirect_site_key': 'redirect_site_key_value'}, 'region': 'region_value', 'rules': [{'action': 'action_value', 'description': 'description_value', 'header_action': {'request_headers_to_adds': [{'header_name': 'header_name_value', 'header_value': 'header_value_value'}]}, 'kind': 'kind_value', 'match': {'config': {'src_ip_ranges': ['src_ip_ranges_value1', 'src_ip_ranges_value2']}, 'expr': {'description': 'description_value', 'expression': 'expression_value', 'location': 'location_value', 'title': 'title_value'}, 'versioned_expr': 'versioned_expr_value'}, 'preconfigured_waf_config': {'exclusions': [{'request_cookies_to_exclude': [{'op': 'op_value', 'val': 'val_value'}], 'request_headers_to_exclude': {}, 'request_query_params_to_exclude': {}, 'request_uris_to_exclude': {}, 'target_rule_ids': ['target_rule_ids_value1', 'target_rule_ids_value2'], 'target_rule_set': 'target_rule_set_value'}]}, 'preview': True, 'priority': 898, 'rate_limit_options': {'ban_duration_sec': 1680, 'ban_threshold': {'count': 553, 'interval_sec': 1279}, 'conform_action': 'conform_action_value', 'enforce_on_key': 'enforce_on_key_value', 'enforce_on_key_configs': [{'enforce_on_key_name': 'enforce_on_key_name_value', 'enforce_on_key_type': 'enforce_on_key_type_value'}], 'enforce_on_key_name': 'enforce_on_key_name_value', 'exceed_action': 'exceed_action_value', 'exceed_redirect_options': {'target': 'target_value', 'type_': 'type__value'}, 'rate_limit_threshold': {}}, 'redirect_options': {}}], 'self_link': 'self_link_value', 'type_': 'type__value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.patch(request) + + +def test_patch_rest_flattened(): + client = SecurityPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'security_policy': 'sample2'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + security_policy='security_policy_value', + security_policy_resource=compute.SecurityPolicy(adaptive_protection_config=compute.SecurityPolicyAdaptiveProtectionConfig(layer7_ddos_defense_config=compute.SecurityPolicyAdaptiveProtectionConfigLayer7DdosDefenseConfig(enable=True))), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.patch(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/global/securityPolicies/{security_policy}" % client.transport._host, args[1]) + + +def test_patch_rest_flattened_error(transport: str = 'rest'): + client = SecurityPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.patch( + compute.PatchSecurityPolicyRequest(), + project='project_value', + security_policy='security_policy_value', + security_policy_resource=compute.SecurityPolicy(adaptive_protection_config=compute.SecurityPolicyAdaptiveProtectionConfig(layer7_ddos_defense_config=compute.SecurityPolicyAdaptiveProtectionConfigLayer7DdosDefenseConfig(enable=True))), + ) + + +def test_patch_rest_error(): + client = SecurityPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.PatchSecurityPolicyRequest, + dict, +]) +def test_patch_unary_rest(request_type): + client = SecurityPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'security_policy': 'sample2'} + request_init["security_policy_resource"] = {'adaptive_protection_config': {'layer7_ddos_defense_config': {'enable': True, 'rule_visibility': 'rule_visibility_value'}}, 'advanced_options_config': {'json_custom_config': {'content_types': ['content_types_value1', 'content_types_value2']}, 'json_parsing': 'json_parsing_value', 'log_level': 'log_level_value'}, 'creation_timestamp': 'creation_timestamp_value', 'ddos_protection_config': {'ddos_protection': 'ddos_protection_value'}, 'description': 'description_value', 'fingerprint': 'fingerprint_value', 'id': 205, 'kind': 'kind_value', 'label_fingerprint': 'label_fingerprint_value', 'labels': {}, 'name': 'name_value', 'recaptcha_options_config': {'redirect_site_key': 'redirect_site_key_value'}, 'region': 'region_value', 'rules': [{'action': 'action_value', 'description': 'description_value', 'header_action': {'request_headers_to_adds': [{'header_name': 'header_name_value', 'header_value': 'header_value_value'}]}, 'kind': 'kind_value', 'match': {'config': {'src_ip_ranges': ['src_ip_ranges_value1', 'src_ip_ranges_value2']}, 'expr': {'description': 'description_value', 'expression': 'expression_value', 'location': 'location_value', 'title': 'title_value'}, 'versioned_expr': 'versioned_expr_value'}, 'preconfigured_waf_config': {'exclusions': [{'request_cookies_to_exclude': [{'op': 'op_value', 'val': 'val_value'}], 'request_headers_to_exclude': {}, 'request_query_params_to_exclude': {}, 'request_uris_to_exclude': {}, 'target_rule_ids': ['target_rule_ids_value1', 'target_rule_ids_value2'], 'target_rule_set': 'target_rule_set_value'}]}, 'preview': True, 'priority': 898, 'rate_limit_options': {'ban_duration_sec': 1680, 'ban_threshold': {'count': 553, 'interval_sec': 1279}, 'conform_action': 'conform_action_value', 'enforce_on_key': 'enforce_on_key_value', 'enforce_on_key_configs': [{'enforce_on_key_name': 'enforce_on_key_name_value', 'enforce_on_key_type': 'enforce_on_key_type_value'}], 'enforce_on_key_name': 'enforce_on_key_name_value', 'exceed_action': 'exceed_action_value', 'exceed_redirect_options': {'target': 'target_value', 'type_': 'type__value'}, 'rate_limit_threshold': {}}, 'redirect_options': {}}], 'self_link': 'self_link_value', 'type_': 'type__value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.patch_unary(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.Operation) + + +def test_patch_unary_rest_required_fields(request_type=compute.PatchSecurityPolicyRequest): + transport_class = transports.SecurityPoliciesRestTransport + + request_init = {} + request_init["project"] = "" + request_init["security_policy"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).patch._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + jsonified_request["securityPolicy"] = 'security_policy_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).patch._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "securityPolicy" in jsonified_request + assert jsonified_request["securityPolicy"] == 'security_policy_value' + + client = SecurityPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "patch", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.patch_unary(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_patch_unary_rest_unset_required_fields(): + transport = transports.SecurityPoliciesRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.patch._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("project", "securityPolicy", "securityPolicyResource", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_patch_unary_rest_interceptors(null_interceptor): + transport = transports.SecurityPoliciesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.SecurityPoliciesRestInterceptor(), + ) + client = SecurityPoliciesClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.SecurityPoliciesRestInterceptor, "post_patch") as post, \ + mock.patch.object(transports.SecurityPoliciesRestInterceptor, "pre_patch") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.PatchSecurityPolicyRequest.pb(compute.PatchSecurityPolicyRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.PatchSecurityPolicyRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.patch_unary(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_patch_unary_rest_bad_request(transport: str = 'rest', request_type=compute.PatchSecurityPolicyRequest): + client = SecurityPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'security_policy': 'sample2'} + request_init["security_policy_resource"] = {'adaptive_protection_config': {'layer7_ddos_defense_config': {'enable': True, 'rule_visibility': 'rule_visibility_value'}}, 'advanced_options_config': {'json_custom_config': {'content_types': ['content_types_value1', 'content_types_value2']}, 'json_parsing': 'json_parsing_value', 'log_level': 'log_level_value'}, 'creation_timestamp': 'creation_timestamp_value', 'ddos_protection_config': {'ddos_protection': 'ddos_protection_value'}, 'description': 'description_value', 'fingerprint': 'fingerprint_value', 'id': 205, 'kind': 'kind_value', 'label_fingerprint': 'label_fingerprint_value', 'labels': {}, 'name': 'name_value', 'recaptcha_options_config': {'redirect_site_key': 'redirect_site_key_value'}, 'region': 'region_value', 'rules': [{'action': 'action_value', 'description': 'description_value', 'header_action': {'request_headers_to_adds': [{'header_name': 'header_name_value', 'header_value': 'header_value_value'}]}, 'kind': 'kind_value', 'match': {'config': {'src_ip_ranges': ['src_ip_ranges_value1', 'src_ip_ranges_value2']}, 'expr': {'description': 'description_value', 'expression': 'expression_value', 'location': 'location_value', 'title': 'title_value'}, 'versioned_expr': 'versioned_expr_value'}, 'preconfigured_waf_config': {'exclusions': [{'request_cookies_to_exclude': [{'op': 'op_value', 'val': 'val_value'}], 'request_headers_to_exclude': {}, 'request_query_params_to_exclude': {}, 'request_uris_to_exclude': {}, 'target_rule_ids': ['target_rule_ids_value1', 'target_rule_ids_value2'], 'target_rule_set': 'target_rule_set_value'}]}, 'preview': True, 'priority': 898, 'rate_limit_options': {'ban_duration_sec': 1680, 'ban_threshold': {'count': 553, 'interval_sec': 1279}, 'conform_action': 'conform_action_value', 'enforce_on_key': 'enforce_on_key_value', 'enforce_on_key_configs': [{'enforce_on_key_name': 'enforce_on_key_name_value', 'enforce_on_key_type': 'enforce_on_key_type_value'}], 'enforce_on_key_name': 'enforce_on_key_name_value', 'exceed_action': 'exceed_action_value', 'exceed_redirect_options': {'target': 'target_value', 'type_': 'type__value'}, 'rate_limit_threshold': {}}, 'redirect_options': {}}], 'self_link': 'self_link_value', 'type_': 'type__value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.patch_unary(request) + + +def test_patch_unary_rest_flattened(): + client = SecurityPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'security_policy': 'sample2'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + security_policy='security_policy_value', + security_policy_resource=compute.SecurityPolicy(adaptive_protection_config=compute.SecurityPolicyAdaptiveProtectionConfig(layer7_ddos_defense_config=compute.SecurityPolicyAdaptiveProtectionConfigLayer7DdosDefenseConfig(enable=True))), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.patch_unary(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/global/securityPolicies/{security_policy}" % client.transport._host, args[1]) + + +def test_patch_unary_rest_flattened_error(transport: str = 'rest'): + client = SecurityPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.patch_unary( + compute.PatchSecurityPolicyRequest(), + project='project_value', + security_policy='security_policy_value', + security_policy_resource=compute.SecurityPolicy(adaptive_protection_config=compute.SecurityPolicyAdaptiveProtectionConfig(layer7_ddos_defense_config=compute.SecurityPolicyAdaptiveProtectionConfigLayer7DdosDefenseConfig(enable=True))), + ) + + +def test_patch_unary_rest_error(): + client = SecurityPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.PatchRuleSecurityPolicyRequest, + dict, +]) +def test_patch_rule_rest(request_type): + client = SecurityPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'security_policy': 'sample2'} + request_init["security_policy_rule_resource"] = {'action': 'action_value', 'description': 'description_value', 'header_action': {'request_headers_to_adds': [{'header_name': 'header_name_value', 'header_value': 'header_value_value'}]}, 'kind': 'kind_value', 'match': {'config': {'src_ip_ranges': ['src_ip_ranges_value1', 'src_ip_ranges_value2']}, 'expr': {'description': 'description_value', 'expression': 'expression_value', 'location': 'location_value', 'title': 'title_value'}, 'versioned_expr': 'versioned_expr_value'}, 'preconfigured_waf_config': {'exclusions': [{'request_cookies_to_exclude': [{'op': 'op_value', 'val': 'val_value'}], 'request_headers_to_exclude': {}, 'request_query_params_to_exclude': {}, 'request_uris_to_exclude': {}, 'target_rule_ids': ['target_rule_ids_value1', 'target_rule_ids_value2'], 'target_rule_set': 'target_rule_set_value'}]}, 'preview': True, 'priority': 898, 'rate_limit_options': {'ban_duration_sec': 1680, 'ban_threshold': {'count': 553, 'interval_sec': 1279}, 'conform_action': 'conform_action_value', 'enforce_on_key': 'enforce_on_key_value', 'enforce_on_key_configs': [{'enforce_on_key_name': 'enforce_on_key_name_value', 'enforce_on_key_type': 'enforce_on_key_type_value'}], 'enforce_on_key_name': 'enforce_on_key_name_value', 'exceed_action': 'exceed_action_value', 'exceed_redirect_options': {'target': 'target_value', 'type_': 'type__value'}, 'rate_limit_threshold': {}}, 'redirect_options': {}} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.patch_rule(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, extended_operation.ExtendedOperation) + assert response.client_operation_id == 'client_operation_id_value' + assert response.creation_timestamp == 'creation_timestamp_value' + assert response.description == 'description_value' + assert response.end_time == 'end_time_value' + assert response.http_error_message == 'http_error_message_value' + assert response.http_error_status_code == 2374 + assert response.id == 205 + assert response.insert_time == 'insert_time_value' + assert response.kind == 'kind_value' + assert response.name == 'name_value' + assert response.operation_group_id == 'operation_group_id_value' + assert response.operation_type == 'operation_type_value' + assert response.progress == 885 + assert response.region == 'region_value' + assert response.self_link == 'self_link_value' + assert response.start_time == 'start_time_value' + assert response.status == compute.Operation.Status.DONE + assert response.status_message == 'status_message_value' + assert response.target_id == 947 + assert response.target_link == 'target_link_value' + assert response.user == 'user_value' + assert response.zone == 'zone_value' + + +def test_patch_rule_rest_required_fields(request_type=compute.PatchRuleSecurityPolicyRequest): + transport_class = transports.SecurityPoliciesRestTransport + + request_init = {} + request_init["project"] = "" + request_init["security_policy"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).patch_rule._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + jsonified_request["securityPolicy"] = 'security_policy_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).patch_rule._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("priority", "validate_only", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "securityPolicy" in jsonified_request + assert jsonified_request["securityPolicy"] == 'security_policy_value' + + client = SecurityPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.patch_rule(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_patch_rule_rest_unset_required_fields(): + transport = transports.SecurityPoliciesRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.patch_rule._get_unset_required_fields({}) + assert set(unset_fields) == (set(("priority", "validateOnly", )) & set(("project", "securityPolicy", "securityPolicyRuleResource", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_patch_rule_rest_interceptors(null_interceptor): + transport = transports.SecurityPoliciesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.SecurityPoliciesRestInterceptor(), + ) + client = SecurityPoliciesClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.SecurityPoliciesRestInterceptor, "post_patch_rule") as post, \ + mock.patch.object(transports.SecurityPoliciesRestInterceptor, "pre_patch_rule") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.PatchRuleSecurityPolicyRequest.pb(compute.PatchRuleSecurityPolicyRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.PatchRuleSecurityPolicyRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.patch_rule(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_patch_rule_rest_bad_request(transport: str = 'rest', request_type=compute.PatchRuleSecurityPolicyRequest): + client = SecurityPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'security_policy': 'sample2'} + request_init["security_policy_rule_resource"] = {'action': 'action_value', 'description': 'description_value', 'header_action': {'request_headers_to_adds': [{'header_name': 'header_name_value', 'header_value': 'header_value_value'}]}, 'kind': 'kind_value', 'match': {'config': {'src_ip_ranges': ['src_ip_ranges_value1', 'src_ip_ranges_value2']}, 'expr': {'description': 'description_value', 'expression': 'expression_value', 'location': 'location_value', 'title': 'title_value'}, 'versioned_expr': 'versioned_expr_value'}, 'preconfigured_waf_config': {'exclusions': [{'request_cookies_to_exclude': [{'op': 'op_value', 'val': 'val_value'}], 'request_headers_to_exclude': {}, 'request_query_params_to_exclude': {}, 'request_uris_to_exclude': {}, 'target_rule_ids': ['target_rule_ids_value1', 'target_rule_ids_value2'], 'target_rule_set': 'target_rule_set_value'}]}, 'preview': True, 'priority': 898, 'rate_limit_options': {'ban_duration_sec': 1680, 'ban_threshold': {'count': 553, 'interval_sec': 1279}, 'conform_action': 'conform_action_value', 'enforce_on_key': 'enforce_on_key_value', 'enforce_on_key_configs': [{'enforce_on_key_name': 'enforce_on_key_name_value', 'enforce_on_key_type': 'enforce_on_key_type_value'}], 'enforce_on_key_name': 'enforce_on_key_name_value', 'exceed_action': 'exceed_action_value', 'exceed_redirect_options': {'target': 'target_value', 'type_': 'type__value'}, 'rate_limit_threshold': {}}, 'redirect_options': {}} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.patch_rule(request) + + +def test_patch_rule_rest_flattened(): + client = SecurityPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'security_policy': 'sample2'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + security_policy='security_policy_value', + security_policy_rule_resource=compute.SecurityPolicyRule(action='action_value'), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.patch_rule(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/global/securityPolicies/{security_policy}/patchRule" % client.transport._host, args[1]) + + +def test_patch_rule_rest_flattened_error(transport: str = 'rest'): + client = SecurityPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.patch_rule( + compute.PatchRuleSecurityPolicyRequest(), + project='project_value', + security_policy='security_policy_value', + security_policy_rule_resource=compute.SecurityPolicyRule(action='action_value'), + ) + + +def test_patch_rule_rest_error(): + client = SecurityPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.PatchRuleSecurityPolicyRequest, + dict, +]) +def test_patch_rule_unary_rest(request_type): + client = SecurityPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'security_policy': 'sample2'} + request_init["security_policy_rule_resource"] = {'action': 'action_value', 'description': 'description_value', 'header_action': {'request_headers_to_adds': [{'header_name': 'header_name_value', 'header_value': 'header_value_value'}]}, 'kind': 'kind_value', 'match': {'config': {'src_ip_ranges': ['src_ip_ranges_value1', 'src_ip_ranges_value2']}, 'expr': {'description': 'description_value', 'expression': 'expression_value', 'location': 'location_value', 'title': 'title_value'}, 'versioned_expr': 'versioned_expr_value'}, 'preconfigured_waf_config': {'exclusions': [{'request_cookies_to_exclude': [{'op': 'op_value', 'val': 'val_value'}], 'request_headers_to_exclude': {}, 'request_query_params_to_exclude': {}, 'request_uris_to_exclude': {}, 'target_rule_ids': ['target_rule_ids_value1', 'target_rule_ids_value2'], 'target_rule_set': 'target_rule_set_value'}]}, 'preview': True, 'priority': 898, 'rate_limit_options': {'ban_duration_sec': 1680, 'ban_threshold': {'count': 553, 'interval_sec': 1279}, 'conform_action': 'conform_action_value', 'enforce_on_key': 'enforce_on_key_value', 'enforce_on_key_configs': [{'enforce_on_key_name': 'enforce_on_key_name_value', 'enforce_on_key_type': 'enforce_on_key_type_value'}], 'enforce_on_key_name': 'enforce_on_key_name_value', 'exceed_action': 'exceed_action_value', 'exceed_redirect_options': {'target': 'target_value', 'type_': 'type__value'}, 'rate_limit_threshold': {}}, 'redirect_options': {}} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.patch_rule_unary(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.Operation) + + +def test_patch_rule_unary_rest_required_fields(request_type=compute.PatchRuleSecurityPolicyRequest): + transport_class = transports.SecurityPoliciesRestTransport + + request_init = {} + request_init["project"] = "" + request_init["security_policy"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).patch_rule._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + jsonified_request["securityPolicy"] = 'security_policy_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).patch_rule._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("priority", "validate_only", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "securityPolicy" in jsonified_request + assert jsonified_request["securityPolicy"] == 'security_policy_value' + + client = SecurityPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.patch_rule_unary(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_patch_rule_unary_rest_unset_required_fields(): + transport = transports.SecurityPoliciesRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.patch_rule._get_unset_required_fields({}) + assert set(unset_fields) == (set(("priority", "validateOnly", )) & set(("project", "securityPolicy", "securityPolicyRuleResource", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_patch_rule_unary_rest_interceptors(null_interceptor): + transport = transports.SecurityPoliciesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.SecurityPoliciesRestInterceptor(), + ) + client = SecurityPoliciesClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.SecurityPoliciesRestInterceptor, "post_patch_rule") as post, \ + mock.patch.object(transports.SecurityPoliciesRestInterceptor, "pre_patch_rule") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.PatchRuleSecurityPolicyRequest.pb(compute.PatchRuleSecurityPolicyRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.PatchRuleSecurityPolicyRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.patch_rule_unary(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_patch_rule_unary_rest_bad_request(transport: str = 'rest', request_type=compute.PatchRuleSecurityPolicyRequest): + client = SecurityPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'security_policy': 'sample2'} + request_init["security_policy_rule_resource"] = {'action': 'action_value', 'description': 'description_value', 'header_action': {'request_headers_to_adds': [{'header_name': 'header_name_value', 'header_value': 'header_value_value'}]}, 'kind': 'kind_value', 'match': {'config': {'src_ip_ranges': ['src_ip_ranges_value1', 'src_ip_ranges_value2']}, 'expr': {'description': 'description_value', 'expression': 'expression_value', 'location': 'location_value', 'title': 'title_value'}, 'versioned_expr': 'versioned_expr_value'}, 'preconfigured_waf_config': {'exclusions': [{'request_cookies_to_exclude': [{'op': 'op_value', 'val': 'val_value'}], 'request_headers_to_exclude': {}, 'request_query_params_to_exclude': {}, 'request_uris_to_exclude': {}, 'target_rule_ids': ['target_rule_ids_value1', 'target_rule_ids_value2'], 'target_rule_set': 'target_rule_set_value'}]}, 'preview': True, 'priority': 898, 'rate_limit_options': {'ban_duration_sec': 1680, 'ban_threshold': {'count': 553, 'interval_sec': 1279}, 'conform_action': 'conform_action_value', 'enforce_on_key': 'enforce_on_key_value', 'enforce_on_key_configs': [{'enforce_on_key_name': 'enforce_on_key_name_value', 'enforce_on_key_type': 'enforce_on_key_type_value'}], 'enforce_on_key_name': 'enforce_on_key_name_value', 'exceed_action': 'exceed_action_value', 'exceed_redirect_options': {'target': 'target_value', 'type_': 'type__value'}, 'rate_limit_threshold': {}}, 'redirect_options': {}} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.patch_rule_unary(request) + + +def test_patch_rule_unary_rest_flattened(): + client = SecurityPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'security_policy': 'sample2'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + security_policy='security_policy_value', + security_policy_rule_resource=compute.SecurityPolicyRule(action='action_value'), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.patch_rule_unary(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/global/securityPolicies/{security_policy}/patchRule" % client.transport._host, args[1]) + + +def test_patch_rule_unary_rest_flattened_error(transport: str = 'rest'): + client = SecurityPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.patch_rule_unary( + compute.PatchRuleSecurityPolicyRequest(), + project='project_value', + security_policy='security_policy_value', + security_policy_rule_resource=compute.SecurityPolicyRule(action='action_value'), + ) + + +def test_patch_rule_unary_rest_error(): + client = SecurityPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.RemoveRuleSecurityPolicyRequest, + dict, +]) +def test_remove_rule_rest(request_type): + client = SecurityPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'security_policy': 'sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.remove_rule(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, extended_operation.ExtendedOperation) + assert response.client_operation_id == 'client_operation_id_value' + assert response.creation_timestamp == 'creation_timestamp_value' + assert response.description == 'description_value' + assert response.end_time == 'end_time_value' + assert response.http_error_message == 'http_error_message_value' + assert response.http_error_status_code == 2374 + assert response.id == 205 + assert response.insert_time == 'insert_time_value' + assert response.kind == 'kind_value' + assert response.name == 'name_value' + assert response.operation_group_id == 'operation_group_id_value' + assert response.operation_type == 'operation_type_value' + assert response.progress == 885 + assert response.region == 'region_value' + assert response.self_link == 'self_link_value' + assert response.start_time == 'start_time_value' + assert response.status == compute.Operation.Status.DONE + assert response.status_message == 'status_message_value' + assert response.target_id == 947 + assert response.target_link == 'target_link_value' + assert response.user == 'user_value' + assert response.zone == 'zone_value' + + +def test_remove_rule_rest_required_fields(request_type=compute.RemoveRuleSecurityPolicyRequest): + transport_class = transports.SecurityPoliciesRestTransport + + request_init = {} + request_init["project"] = "" + request_init["security_policy"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).remove_rule._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + jsonified_request["securityPolicy"] = 'security_policy_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).remove_rule._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("priority", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "securityPolicy" in jsonified_request + assert jsonified_request["securityPolicy"] == 'security_policy_value' + + client = SecurityPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.remove_rule(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_remove_rule_rest_unset_required_fields(): + transport = transports.SecurityPoliciesRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.remove_rule._get_unset_required_fields({}) + assert set(unset_fields) == (set(("priority", )) & set(("project", "securityPolicy", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_remove_rule_rest_interceptors(null_interceptor): + transport = transports.SecurityPoliciesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.SecurityPoliciesRestInterceptor(), + ) + client = SecurityPoliciesClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.SecurityPoliciesRestInterceptor, "post_remove_rule") as post, \ + mock.patch.object(transports.SecurityPoliciesRestInterceptor, "pre_remove_rule") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.RemoveRuleSecurityPolicyRequest.pb(compute.RemoveRuleSecurityPolicyRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.RemoveRuleSecurityPolicyRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.remove_rule(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_remove_rule_rest_bad_request(transport: str = 'rest', request_type=compute.RemoveRuleSecurityPolicyRequest): + client = SecurityPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'security_policy': 'sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.remove_rule(request) + + +def test_remove_rule_rest_flattened(): + client = SecurityPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'security_policy': 'sample2'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + security_policy='security_policy_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.remove_rule(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/global/securityPolicies/{security_policy}/removeRule" % client.transport._host, args[1]) + + +def test_remove_rule_rest_flattened_error(transport: str = 'rest'): + client = SecurityPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.remove_rule( + compute.RemoveRuleSecurityPolicyRequest(), + project='project_value', + security_policy='security_policy_value', + ) + + +def test_remove_rule_rest_error(): + client = SecurityPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.RemoveRuleSecurityPolicyRequest, + dict, +]) +def test_remove_rule_unary_rest(request_type): + client = SecurityPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'security_policy': 'sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.remove_rule_unary(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.Operation) + + +def test_remove_rule_unary_rest_required_fields(request_type=compute.RemoveRuleSecurityPolicyRequest): + transport_class = transports.SecurityPoliciesRestTransport + + request_init = {} + request_init["project"] = "" + request_init["security_policy"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).remove_rule._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + jsonified_request["securityPolicy"] = 'security_policy_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).remove_rule._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("priority", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "securityPolicy" in jsonified_request + assert jsonified_request["securityPolicy"] == 'security_policy_value' + + client = SecurityPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.remove_rule_unary(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_remove_rule_unary_rest_unset_required_fields(): + transport = transports.SecurityPoliciesRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.remove_rule._get_unset_required_fields({}) + assert set(unset_fields) == (set(("priority", )) & set(("project", "securityPolicy", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_remove_rule_unary_rest_interceptors(null_interceptor): + transport = transports.SecurityPoliciesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.SecurityPoliciesRestInterceptor(), + ) + client = SecurityPoliciesClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.SecurityPoliciesRestInterceptor, "post_remove_rule") as post, \ + mock.patch.object(transports.SecurityPoliciesRestInterceptor, "pre_remove_rule") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.RemoveRuleSecurityPolicyRequest.pb(compute.RemoveRuleSecurityPolicyRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.RemoveRuleSecurityPolicyRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.remove_rule_unary(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_remove_rule_unary_rest_bad_request(transport: str = 'rest', request_type=compute.RemoveRuleSecurityPolicyRequest): + client = SecurityPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'security_policy': 'sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.remove_rule_unary(request) + + +def test_remove_rule_unary_rest_flattened(): + client = SecurityPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'security_policy': 'sample2'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + security_policy='security_policy_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.remove_rule_unary(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/global/securityPolicies/{security_policy}/removeRule" % client.transport._host, args[1]) + + +def test_remove_rule_unary_rest_flattened_error(transport: str = 'rest'): + client = SecurityPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.remove_rule_unary( + compute.RemoveRuleSecurityPolicyRequest(), + project='project_value', + security_policy='security_policy_value', + ) + + +def test_remove_rule_unary_rest_error(): + client = SecurityPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.SetLabelsSecurityPolicyRequest, + dict, +]) +def test_set_labels_rest(request_type): + client = SecurityPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'resource': 'sample2'} + request_init["global_set_labels_request_resource"] = {'label_fingerprint': 'label_fingerprint_value', 'labels': {}} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.set_labels(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, extended_operation.ExtendedOperation) + assert response.client_operation_id == 'client_operation_id_value' + assert response.creation_timestamp == 'creation_timestamp_value' + assert response.description == 'description_value' + assert response.end_time == 'end_time_value' + assert response.http_error_message == 'http_error_message_value' + assert response.http_error_status_code == 2374 + assert response.id == 205 + assert response.insert_time == 'insert_time_value' + assert response.kind == 'kind_value' + assert response.name == 'name_value' + assert response.operation_group_id == 'operation_group_id_value' + assert response.operation_type == 'operation_type_value' + assert response.progress == 885 + assert response.region == 'region_value' + assert response.self_link == 'self_link_value' + assert response.start_time == 'start_time_value' + assert response.status == compute.Operation.Status.DONE + assert response.status_message == 'status_message_value' + assert response.target_id == 947 + assert response.target_link == 'target_link_value' + assert response.user == 'user_value' + assert response.zone == 'zone_value' + + +def test_set_labels_rest_required_fields(request_type=compute.SetLabelsSecurityPolicyRequest): + transport_class = transports.SecurityPoliciesRestTransport + + request_init = {} + request_init["project"] = "" + request_init["resource"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).set_labels._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + jsonified_request["resource"] = 'resource_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).set_labels._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "resource" in jsonified_request + assert jsonified_request["resource"] == 'resource_value' + + client = SecurityPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.set_labels(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_set_labels_rest_unset_required_fields(): + transport = transports.SecurityPoliciesRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.set_labels._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("globalSetLabelsRequestResource", "project", "resource", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_set_labels_rest_interceptors(null_interceptor): + transport = transports.SecurityPoliciesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.SecurityPoliciesRestInterceptor(), + ) + client = SecurityPoliciesClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.SecurityPoliciesRestInterceptor, "post_set_labels") as post, \ + mock.patch.object(transports.SecurityPoliciesRestInterceptor, "pre_set_labels") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.SetLabelsSecurityPolicyRequest.pb(compute.SetLabelsSecurityPolicyRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.SetLabelsSecurityPolicyRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.set_labels(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_set_labels_rest_bad_request(transport: str = 'rest', request_type=compute.SetLabelsSecurityPolicyRequest): + client = SecurityPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'resource': 'sample2'} + request_init["global_set_labels_request_resource"] = {'label_fingerprint': 'label_fingerprint_value', 'labels': {}} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.set_labels(request) + + +def test_set_labels_rest_flattened(): + client = SecurityPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'resource': 'sample2'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + resource='resource_value', + global_set_labels_request_resource=compute.GlobalSetLabelsRequest(label_fingerprint='label_fingerprint_value'), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.set_labels(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/global/securityPolicies/{resource}/setLabels" % client.transport._host, args[1]) + + +def test_set_labels_rest_flattened_error(transport: str = 'rest'): + client = SecurityPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.set_labels( + compute.SetLabelsSecurityPolicyRequest(), + project='project_value', + resource='resource_value', + global_set_labels_request_resource=compute.GlobalSetLabelsRequest(label_fingerprint='label_fingerprint_value'), + ) + + +def test_set_labels_rest_error(): + client = SecurityPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.SetLabelsSecurityPolicyRequest, + dict, +]) +def test_set_labels_unary_rest(request_type): + client = SecurityPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'resource': 'sample2'} + request_init["global_set_labels_request_resource"] = {'label_fingerprint': 'label_fingerprint_value', 'labels': {}} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.set_labels_unary(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.Operation) + + +def test_set_labels_unary_rest_required_fields(request_type=compute.SetLabelsSecurityPolicyRequest): + transport_class = transports.SecurityPoliciesRestTransport + + request_init = {} + request_init["project"] = "" + request_init["resource"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).set_labels._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + jsonified_request["resource"] = 'resource_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).set_labels._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "resource" in jsonified_request + assert jsonified_request["resource"] == 'resource_value' + + client = SecurityPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.set_labels_unary(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_set_labels_unary_rest_unset_required_fields(): + transport = transports.SecurityPoliciesRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.set_labels._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("globalSetLabelsRequestResource", "project", "resource", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_set_labels_unary_rest_interceptors(null_interceptor): + transport = transports.SecurityPoliciesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.SecurityPoliciesRestInterceptor(), + ) + client = SecurityPoliciesClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.SecurityPoliciesRestInterceptor, "post_set_labels") as post, \ + mock.patch.object(transports.SecurityPoliciesRestInterceptor, "pre_set_labels") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.SetLabelsSecurityPolicyRequest.pb(compute.SetLabelsSecurityPolicyRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.SetLabelsSecurityPolicyRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.set_labels_unary(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_set_labels_unary_rest_bad_request(transport: str = 'rest', request_type=compute.SetLabelsSecurityPolicyRequest): + client = SecurityPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'resource': 'sample2'} + request_init["global_set_labels_request_resource"] = {'label_fingerprint': 'label_fingerprint_value', 'labels': {}} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.set_labels_unary(request) + + +def test_set_labels_unary_rest_flattened(): + client = SecurityPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'resource': 'sample2'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + resource='resource_value', + global_set_labels_request_resource=compute.GlobalSetLabelsRequest(label_fingerprint='label_fingerprint_value'), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.set_labels_unary(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/global/securityPolicies/{resource}/setLabels" % client.transport._host, args[1]) + + +def test_set_labels_unary_rest_flattened_error(transport: str = 'rest'): + client = SecurityPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.set_labels_unary( + compute.SetLabelsSecurityPolicyRequest(), + project='project_value', + resource='resource_value', + global_set_labels_request_resource=compute.GlobalSetLabelsRequest(label_fingerprint='label_fingerprint_value'), + ) + + +def test_set_labels_unary_rest_error(): + client = SecurityPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +def test_credentials_transport_error(): + # It is an error to provide credentials and a transport instance. + transport = transports.SecurityPoliciesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = SecurityPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # It is an error to provide a credentials file and a transport instance. + transport = transports.SecurityPoliciesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = SecurityPoliciesClient( + client_options={"credentials_file": "credentials.json"}, + transport=transport, + ) + + # It is an error to provide an api_key and a transport instance. + transport = transports.SecurityPoliciesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = SecurityPoliciesClient( + client_options=options, + transport=transport, + ) + + # It is an error to provide an api_key and a credential. + options = mock.Mock() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = SecurityPoliciesClient( + client_options=options, + credentials=ga_credentials.AnonymousCredentials() + ) + + # It is an error to provide scopes and a transport instance. + transport = transports.SecurityPoliciesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = SecurityPoliciesClient( + client_options={"scopes": ["1", "2"]}, + transport=transport, + ) + + +def test_transport_instance(): + # A client may be instantiated with a custom transport instance. + transport = transports.SecurityPoliciesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + client = SecurityPoliciesClient(transport=transport) + assert client.transport is transport + + +@pytest.mark.parametrize("transport_class", [ + transports.SecurityPoliciesRestTransport, +]) +def test_transport_adc(transport_class): + # Test default credentials are used if not provided. + with mock.patch.object(google.auth, 'default') as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class() + adc.assert_called_once() + +@pytest.mark.parametrize("transport_name", [ + "rest", +]) +def test_transport_kind(transport_name): + transport = SecurityPoliciesClient.get_transport_class(transport_name)( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert transport.kind == transport_name + + +def test_security_policies_base_transport_error(): + # Passing both a credentials object and credentials_file should raise an error + with pytest.raises(core_exceptions.DuplicateCredentialArgs): + transport = transports.SecurityPoliciesTransport( + credentials=ga_credentials.AnonymousCredentials(), + credentials_file="credentials.json" + ) + + +def test_security_policies_base_transport(): + # Instantiate the base transport. + with mock.patch('google.cloud.compute_v1.services.security_policies.transports.SecurityPoliciesTransport.__init__') as Transport: + Transport.return_value = None + transport = transports.SecurityPoliciesTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Every method on the transport should just blindly + # raise NotImplementedError. + methods = ( + 'add_rule', + 'aggregated_list', + 'delete', + 'get', + 'get_rule', + 'insert', + 'list', + 'list_preconfigured_expression_sets', + 'patch', + 'patch_rule', + 'remove_rule', + 'set_labels', + ) + for method in methods: + with pytest.raises(NotImplementedError): + getattr(transport, method)(request=object()) + + with pytest.raises(NotImplementedError): + transport.close() + + # Catch all for all remaining methods and properties + remainder = [ + 'kind', + ] + for r in remainder: + with pytest.raises(NotImplementedError): + getattr(transport, r)() + + +def test_security_policies_base_transport_with_credentials_file(): + # Instantiate the base transport with a credentials file + with mock.patch.object(google.auth, 'load_credentials_from_file', autospec=True) as load_creds, mock.patch('google.cloud.compute_v1.services.security_policies.transports.SecurityPoliciesTransport._prep_wrapped_messages') as Transport: + Transport.return_value = None + load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.SecurityPoliciesTransport( + credentials_file="credentials.json", + quota_project_id="octopus", + ) + load_creds.assert_called_once_with("credentials.json", + scopes=None, + default_scopes=( + 'https://www.googleapis.com/auth/compute', + 'https://www.googleapis.com/auth/cloud-platform', +), + quota_project_id="octopus", + ) + + +def test_security_policies_base_transport_with_adc(): + # Test the default credentials are used if credentials and credentials_file are None. + with mock.patch.object(google.auth, 'default', autospec=True) as adc, mock.patch('google.cloud.compute_v1.services.security_policies.transports.SecurityPoliciesTransport._prep_wrapped_messages') as Transport: + Transport.return_value = None + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.SecurityPoliciesTransport() + adc.assert_called_once() + + +def test_security_policies_auth_adc(): + # If no credentials are provided, we should use ADC credentials. + with mock.patch.object(google.auth, 'default', autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + SecurityPoliciesClient() + adc.assert_called_once_with( + scopes=None, + default_scopes=( + 'https://www.googleapis.com/auth/compute', + 'https://www.googleapis.com/auth/cloud-platform', +), + quota_project_id=None, + ) + + +def test_security_policies_http_transport_client_cert_source_for_mtls(): + cred = ga_credentials.AnonymousCredentials() + with mock.patch("google.auth.transport.requests.AuthorizedSession.configure_mtls_channel") as mock_configure_mtls_channel: + transports.SecurityPoliciesRestTransport ( + credentials=cred, + client_cert_source_for_mtls=client_cert_source_callback + ) + mock_configure_mtls_channel.assert_called_once_with(client_cert_source_callback) + + +@pytest.mark.parametrize("transport_name", [ + "rest", +]) +def test_security_policies_host_no_port(transport_name): + client = SecurityPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions(api_endpoint='compute.googleapis.com'), + transport=transport_name, + ) + assert client.transport._host == ( + 'compute.googleapis.com:443' + if transport_name in ['grpc', 'grpc_asyncio'] + else 'https://compute.googleapis.com' + ) + +@pytest.mark.parametrize("transport_name", [ + "rest", +]) +def test_security_policies_host_with_port(transport_name): + client = SecurityPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions(api_endpoint='compute.googleapis.com:8000'), + transport=transport_name, + ) + assert client.transport._host == ( + 'compute.googleapis.com:8000' + if transport_name in ['grpc', 'grpc_asyncio'] + else 'https://compute.googleapis.com:8000' + ) + +@pytest.mark.parametrize("transport_name", [ + "rest", +]) +def test_security_policies_client_transport_session_collision(transport_name): + creds1 = ga_credentials.AnonymousCredentials() + creds2 = ga_credentials.AnonymousCredentials() + client1 = SecurityPoliciesClient( + credentials=creds1, + transport=transport_name, + ) + client2 = SecurityPoliciesClient( + credentials=creds2, + transport=transport_name, + ) + session1 = client1.transport.add_rule._session + session2 = client2.transport.add_rule._session + assert session1 != session2 + session1 = client1.transport.aggregated_list._session + session2 = client2.transport.aggregated_list._session + assert session1 != session2 + session1 = client1.transport.delete._session + session2 = client2.transport.delete._session + assert session1 != session2 + session1 = client1.transport.get._session + session2 = client2.transport.get._session + assert session1 != session2 + session1 = client1.transport.get_rule._session + session2 = client2.transport.get_rule._session + assert session1 != session2 + session1 = client1.transport.insert._session + session2 = client2.transport.insert._session + assert session1 != session2 + session1 = client1.transport.list._session + session2 = client2.transport.list._session + assert session1 != session2 + session1 = client1.transport.list_preconfigured_expression_sets._session + session2 = client2.transport.list_preconfigured_expression_sets._session + assert session1 != session2 + session1 = client1.transport.patch._session + session2 = client2.transport.patch._session + assert session1 != session2 + session1 = client1.transport.patch_rule._session + session2 = client2.transport.patch_rule._session + assert session1 != session2 + session1 = client1.transport.remove_rule._session + session2 = client2.transport.remove_rule._session + assert session1 != session2 + session1 = client1.transport.set_labels._session + session2 = client2.transport.set_labels._session + assert session1 != session2 + +def test_common_billing_account_path(): + billing_account = "squid" + expected = "billingAccounts/{billing_account}".format(billing_account=billing_account, ) + actual = SecurityPoliciesClient.common_billing_account_path(billing_account) + assert expected == actual + + +def test_parse_common_billing_account_path(): + expected = { + "billing_account": "clam", + } + path = SecurityPoliciesClient.common_billing_account_path(**expected) + + # Check that the path construction is reversible. + actual = SecurityPoliciesClient.parse_common_billing_account_path(path) + assert expected == actual + +def test_common_folder_path(): + folder = "whelk" + expected = "folders/{folder}".format(folder=folder, ) + actual = SecurityPoliciesClient.common_folder_path(folder) + assert expected == actual + + +def test_parse_common_folder_path(): + expected = { + "folder": "octopus", + } + path = SecurityPoliciesClient.common_folder_path(**expected) + + # Check that the path construction is reversible. + actual = SecurityPoliciesClient.parse_common_folder_path(path) + assert expected == actual + +def test_common_organization_path(): + organization = "oyster" + expected = "organizations/{organization}".format(organization=organization, ) + actual = SecurityPoliciesClient.common_organization_path(organization) + assert expected == actual + + +def test_parse_common_organization_path(): + expected = { + "organization": "nudibranch", + } + path = SecurityPoliciesClient.common_organization_path(**expected) + + # Check that the path construction is reversible. + actual = SecurityPoliciesClient.parse_common_organization_path(path) + assert expected == actual + +def test_common_project_path(): + project = "cuttlefish" + expected = "projects/{project}".format(project=project, ) + actual = SecurityPoliciesClient.common_project_path(project) + assert expected == actual + + +def test_parse_common_project_path(): + expected = { + "project": "mussel", + } + path = SecurityPoliciesClient.common_project_path(**expected) + + # Check that the path construction is reversible. + actual = SecurityPoliciesClient.parse_common_project_path(path) + assert expected == actual + +def test_common_location_path(): + project = "winkle" + location = "nautilus" + expected = "projects/{project}/locations/{location}".format(project=project, location=location, ) + actual = SecurityPoliciesClient.common_location_path(project, location) + assert expected == actual + + +def test_parse_common_location_path(): + expected = { + "project": "scallop", + "location": "abalone", + } + path = SecurityPoliciesClient.common_location_path(**expected) + + # Check that the path construction is reversible. + actual = SecurityPoliciesClient.parse_common_location_path(path) + assert expected == actual + + +def test_client_with_default_client_info(): + client_info = gapic_v1.client_info.ClientInfo() + + with mock.patch.object(transports.SecurityPoliciesTransport, '_prep_wrapped_messages') as prep: + client = SecurityPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + with mock.patch.object(transports.SecurityPoliciesTransport, '_prep_wrapped_messages') as prep: + transport_class = SecurityPoliciesClient.get_transport_class() + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + +def test_transport_close(): + transports = { + "rest": "_session", + } + + for transport, close_name in transports.items(): + client = SecurityPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport + ) + with mock.patch.object(type(getattr(client.transport, close_name)), "close") as close: + with client: + close.assert_not_called() + close.assert_called_once() + +def test_client_ctx(): + transports = [ + 'rest', + ] + for transport in transports: + client = SecurityPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport + ) + # Test client calls underlying transport. + with mock.patch.object(type(client.transport), "close") as close: + close.assert_not_called() + with client: + pass + close.assert_called() + +@pytest.mark.parametrize("client_class,transport_class", [ + (SecurityPoliciesClient, transports.SecurityPoliciesRestTransport), +]) +def test_api_key_credentials(client_class, transport_class): + with mock.patch.object( + google.auth._default, "get_api_key_credentials", create=True + ) as get_api_key_credentials: + mock_cred = mock.Mock() + get_api_key_credentials.return_value = mock_cred + options = client_options.ClientOptions() + options.api_key = "api_key" + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=mock_cred, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) diff --git a/owl-bot-staging/v1/tests/unit/gapic/compute_v1/test_service_attachments.py b/owl-bot-staging/v1/tests/unit/gapic/compute_v1/test_service_attachments.py new file mode 100644 index 000000000..ab27ba382 --- /dev/null +++ b/owl-bot-staging/v1/tests/unit/gapic/compute_v1/test_service_attachments.py @@ -0,0 +1,4168 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import os +# try/except added for compatibility with python < 3.8 +try: + from unittest import mock + from unittest.mock import AsyncMock # pragma: NO COVER +except ImportError: # pragma: NO COVER + import mock + +import grpc +from grpc.experimental import aio +from collections.abc import Iterable +from google.protobuf import json_format +import json +import math +import pytest +from proto.marshal.rules.dates import DurationRule, TimestampRule +from proto.marshal.rules import wrappers +from requests import Response +from requests import Request, PreparedRequest +from requests.sessions import Session +from google.protobuf import json_format + +from google.api_core import client_options +from google.api_core import exceptions as core_exceptions +from google.api_core import extended_operation # type: ignore +from google.api_core import future +from google.api_core import gapic_v1 +from google.api_core import grpc_helpers +from google.api_core import grpc_helpers_async +from google.api_core import path_template +from google.auth import credentials as ga_credentials +from google.auth.exceptions import MutualTLSChannelError +from google.cloud.compute_v1.services.service_attachments import ServiceAttachmentsClient +from google.cloud.compute_v1.services.service_attachments import pagers +from google.cloud.compute_v1.services.service_attachments import transports +from google.cloud.compute_v1.types import compute +from google.oauth2 import service_account +import google.auth + + +def client_cert_source_callback(): + return b"cert bytes", b"key bytes" + + +# If default endpoint is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint(client): + return "foo.googleapis.com" if ("localhost" in client.DEFAULT_ENDPOINT) else client.DEFAULT_ENDPOINT + + +def test__get_default_mtls_endpoint(): + api_endpoint = "example.googleapis.com" + api_mtls_endpoint = "example.mtls.googleapis.com" + sandbox_endpoint = "example.sandbox.googleapis.com" + sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com" + non_googleapi = "api.example.com" + + assert ServiceAttachmentsClient._get_default_mtls_endpoint(None) is None + assert ServiceAttachmentsClient._get_default_mtls_endpoint(api_endpoint) == api_mtls_endpoint + assert ServiceAttachmentsClient._get_default_mtls_endpoint(api_mtls_endpoint) == api_mtls_endpoint + assert ServiceAttachmentsClient._get_default_mtls_endpoint(sandbox_endpoint) == sandbox_mtls_endpoint + assert ServiceAttachmentsClient._get_default_mtls_endpoint(sandbox_mtls_endpoint) == sandbox_mtls_endpoint + assert ServiceAttachmentsClient._get_default_mtls_endpoint(non_googleapi) == non_googleapi + + +@pytest.mark.parametrize("client_class,transport_name", [ + (ServiceAttachmentsClient, "rest"), +]) +def test_service_attachments_client_from_service_account_info(client_class, transport_name): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object(service_account.Credentials, 'from_service_account_info') as factory: + factory.return_value = creds + info = {"valid": True} + client = client_class.from_service_account_info(info, transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + 'compute.googleapis.com:443' + if transport_name in ['grpc', 'grpc_asyncio'] + else + 'https://compute.googleapis.com' + ) + + +@pytest.mark.parametrize("transport_class,transport_name", [ + (transports.ServiceAttachmentsRestTransport, "rest"), +]) +def test_service_attachments_client_service_account_always_use_jwt(transport_class, transport_name): + with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=True) + use_jwt.assert_called_once_with(True) + + with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=False) + use_jwt.assert_not_called() + + +@pytest.mark.parametrize("client_class,transport_name", [ + (ServiceAttachmentsClient, "rest"), +]) +def test_service_attachments_client_from_service_account_file(client_class, transport_name): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object(service_account.Credentials, 'from_service_account_file') as factory: + factory.return_value = creds + client = client_class.from_service_account_file("dummy/file/path.json", transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + client = client_class.from_service_account_json("dummy/file/path.json", transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + 'compute.googleapis.com:443' + if transport_name in ['grpc', 'grpc_asyncio'] + else + 'https://compute.googleapis.com' + ) + + +def test_service_attachments_client_get_transport_class(): + transport = ServiceAttachmentsClient.get_transport_class() + available_transports = [ + transports.ServiceAttachmentsRestTransport, + ] + assert transport in available_transports + + transport = ServiceAttachmentsClient.get_transport_class("rest") + assert transport == transports.ServiceAttachmentsRestTransport + + +@pytest.mark.parametrize("client_class,transport_class,transport_name", [ + (ServiceAttachmentsClient, transports.ServiceAttachmentsRestTransport, "rest"), +]) +@mock.patch.object(ServiceAttachmentsClient, "DEFAULT_ENDPOINT", modify_default_endpoint(ServiceAttachmentsClient)) +def test_service_attachments_client_client_options(client_class, transport_class, transport_name): + # Check that if channel is provided we won't create a new one. + with mock.patch.object(ServiceAttachmentsClient, 'get_transport_class') as gtc: + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ) + client = client_class(transport=transport) + gtc.assert_not_called() + + # Check that if channel is provided via str we will create a new one. + with mock.patch.object(ServiceAttachmentsClient, 'get_transport_class') as gtc: + client = client_class(transport=transport_name) + gtc.assert_called() + + # Check the case api_endpoint is provided. + options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(transport=transport_name, client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_MTLS_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError): + client = client_class(transport=transport_name) + + # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): + with pytest.raises(ValueError): + client = client_class(transport=transport_name) + + # Check the case quota_project_id is provided + options = client_options.ClientOptions(quota_project_id="octopus") + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id="octopus", + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + # Check the case api_endpoint is provided + options = client_options.ClientOptions(api_audience="https://language.googleapis.com") + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience="https://language.googleapis.com" + ) + +@pytest.mark.parametrize("client_class,transport_class,transport_name,use_client_cert_env", [ + (ServiceAttachmentsClient, transports.ServiceAttachmentsRestTransport, "rest", "true"), + (ServiceAttachmentsClient, transports.ServiceAttachmentsRestTransport, "rest", "false"), +]) +@mock.patch.object(ServiceAttachmentsClient, "DEFAULT_ENDPOINT", modify_default_endpoint(ServiceAttachmentsClient)) +@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) +def test_service_attachments_client_mtls_env_auto(client_class, transport_class, transport_name, use_client_cert_env): + # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default + # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists. + + # Check the case client_cert_source is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): + options = client_options.ClientOptions(client_cert_source=client_cert_source_callback) + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + + if use_client_cert_env == "false": + expected_client_cert_source = None + expected_host = client.DEFAULT_ENDPOINT + else: + expected_client_cert_source = client_cert_source_callback + expected_host = client.DEFAULT_MTLS_ENDPOINT + + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case ADC client cert is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): + with mock.patch.object(transport_class, '__init__') as patched: + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): + with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=client_cert_source_callback): + if use_client_cert_env == "false": + expected_host = client.DEFAULT_ENDPOINT + expected_client_cert_source = None + else: + expected_host = client.DEFAULT_MTLS_ENDPOINT + expected_client_cert_source = client_cert_source_callback + + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case client_cert_source and ADC client cert are not provided. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): + with mock.patch.object(transport_class, '__init__') as patched: + with mock.patch("google.auth.transport.mtls.has_default_client_cert_source", return_value=False): + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize("client_class", [ + ServiceAttachmentsClient +]) +@mock.patch.object(ServiceAttachmentsClient, "DEFAULT_ENDPOINT", modify_default_endpoint(ServiceAttachmentsClient)) +def test_service_attachments_client_get_mtls_endpoint_and_cert_source(client_class): + mock_client_cert_source = mock.Mock() + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "true". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + mock_api_endpoint = "foo" + options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(options) + assert api_endpoint == mock_api_endpoint + assert cert_source == mock_client_cert_source + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "false". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + mock_client_cert_source = mock.Mock() + mock_api_endpoint = "foo" + options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(options) + assert api_endpoint == mock_api_endpoint + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert doesn't exist. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=False): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert exists. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): + with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=mock_client_cert_source): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source == mock_client_cert_source + + +@pytest.mark.parametrize("client_class,transport_class,transport_name", [ + (ServiceAttachmentsClient, transports.ServiceAttachmentsRestTransport, "rest"), +]) +def test_service_attachments_client_client_options_scopes(client_class, transport_class, transport_name): + # Check the case scopes are provided. + options = client_options.ClientOptions( + scopes=["1", "2"], + ) + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=["1", "2"], + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + +@pytest.mark.parametrize("client_class,transport_class,transport_name,grpc_helpers", [ + (ServiceAttachmentsClient, transports.ServiceAttachmentsRestTransport, "rest", None), +]) +def test_service_attachments_client_client_options_credentials_file(client_class, transport_class, transport_name, grpc_helpers): + # Check the case credentials file is provided. + options = client_options.ClientOptions( + credentials_file="credentials.json" + ) + + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize("request_type", [ + compute.AggregatedListServiceAttachmentsRequest, + dict, +]) +def test_aggregated_list_rest(request_type): + client = ServiceAttachmentsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.ServiceAttachmentAggregatedList( + id='id_value', + kind='kind_value', + next_page_token='next_page_token_value', + self_link='self_link_value', + unreachables=['unreachables_value'], + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.ServiceAttachmentAggregatedList.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.aggregated_list(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.AggregatedListPager) + assert response.id == 'id_value' + assert response.kind == 'kind_value' + assert response.next_page_token == 'next_page_token_value' + assert response.self_link == 'self_link_value' + assert response.unreachables == ['unreachables_value'] + + +def test_aggregated_list_rest_required_fields(request_type=compute.AggregatedListServiceAttachmentsRequest): + transport_class = transports.ServiceAttachmentsRestTransport + + request_init = {} + request_init["project"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).aggregated_list._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).aggregated_list._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("filter", "include_all_scopes", "max_results", "order_by", "page_token", "return_partial_success", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + + client = ServiceAttachmentsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.ServiceAttachmentAggregatedList() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "get", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.ServiceAttachmentAggregatedList.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.aggregated_list(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_aggregated_list_rest_unset_required_fields(): + transport = transports.ServiceAttachmentsRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.aggregated_list._get_unset_required_fields({}) + assert set(unset_fields) == (set(("filter", "includeAllScopes", "maxResults", "orderBy", "pageToken", "returnPartialSuccess", )) & set(("project", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_aggregated_list_rest_interceptors(null_interceptor): + transport = transports.ServiceAttachmentsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.ServiceAttachmentsRestInterceptor(), + ) + client = ServiceAttachmentsClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.ServiceAttachmentsRestInterceptor, "post_aggregated_list") as post, \ + mock.patch.object(transports.ServiceAttachmentsRestInterceptor, "pre_aggregated_list") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.AggregatedListServiceAttachmentsRequest.pb(compute.AggregatedListServiceAttachmentsRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.ServiceAttachmentAggregatedList.to_json(compute.ServiceAttachmentAggregatedList()) + + request = compute.AggregatedListServiceAttachmentsRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.ServiceAttachmentAggregatedList() + + client.aggregated_list(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_aggregated_list_rest_bad_request(transport: str = 'rest', request_type=compute.AggregatedListServiceAttachmentsRequest): + client = ServiceAttachmentsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.aggregated_list(request) + + +def test_aggregated_list_rest_flattened(): + client = ServiceAttachmentsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.ServiceAttachmentAggregatedList() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.ServiceAttachmentAggregatedList.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.aggregated_list(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/aggregated/serviceAttachments" % client.transport._host, args[1]) + + +def test_aggregated_list_rest_flattened_error(transport: str = 'rest'): + client = ServiceAttachmentsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.aggregated_list( + compute.AggregatedListServiceAttachmentsRequest(), + project='project_value', + ) + + +def test_aggregated_list_rest_pager(transport: str = 'rest'): + client = ServiceAttachmentsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + #with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + compute.ServiceAttachmentAggregatedList( + items={ + 'a':compute.ServiceAttachmentsScopedList(), + 'b':compute.ServiceAttachmentsScopedList(), + 'c':compute.ServiceAttachmentsScopedList(), + }, + next_page_token='abc', + ), + compute.ServiceAttachmentAggregatedList( + items={}, + next_page_token='def', + ), + compute.ServiceAttachmentAggregatedList( + items={ + 'g':compute.ServiceAttachmentsScopedList(), + }, + next_page_token='ghi', + ), + compute.ServiceAttachmentAggregatedList( + items={ + 'h':compute.ServiceAttachmentsScopedList(), + 'i':compute.ServiceAttachmentsScopedList(), + }, + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple(compute.ServiceAttachmentAggregatedList.to_json(x) for x in response) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode('UTF-8') + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {'project': 'sample1'} + + pager = client.aggregated_list(request=sample_request) + + assert isinstance(pager.get('a'), compute.ServiceAttachmentsScopedList) + assert pager.get('h') is None + + results = list(pager) + assert len(results) == 6 + assert all( + isinstance(i, tuple) + for i in results) + for result in results: + assert isinstance(result, tuple) + assert tuple(type(t) for t in result) == (str, compute.ServiceAttachmentsScopedList) + + assert pager.get('a') is None + assert isinstance(pager.get('h'), compute.ServiceAttachmentsScopedList) + + pages = list(client.aggregated_list(request=sample_request).pages) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize("request_type", [ + compute.DeleteServiceAttachmentRequest, + dict, +]) +def test_delete_rest(request_type): + client = ServiceAttachmentsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2', 'service_attachment': 'sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.delete(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, extended_operation.ExtendedOperation) + assert response.client_operation_id == 'client_operation_id_value' + assert response.creation_timestamp == 'creation_timestamp_value' + assert response.description == 'description_value' + assert response.end_time == 'end_time_value' + assert response.http_error_message == 'http_error_message_value' + assert response.http_error_status_code == 2374 + assert response.id == 205 + assert response.insert_time == 'insert_time_value' + assert response.kind == 'kind_value' + assert response.name == 'name_value' + assert response.operation_group_id == 'operation_group_id_value' + assert response.operation_type == 'operation_type_value' + assert response.progress == 885 + assert response.region == 'region_value' + assert response.self_link == 'self_link_value' + assert response.start_time == 'start_time_value' + assert response.status == compute.Operation.Status.DONE + assert response.status_message == 'status_message_value' + assert response.target_id == 947 + assert response.target_link == 'target_link_value' + assert response.user == 'user_value' + assert response.zone == 'zone_value' + + +def test_delete_rest_required_fields(request_type=compute.DeleteServiceAttachmentRequest): + transport_class = transports.ServiceAttachmentsRestTransport + + request_init = {} + request_init["project"] = "" + request_init["region"] = "" + request_init["service_attachment"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + jsonified_request["region"] = 'region_value' + jsonified_request["serviceAttachment"] = 'service_attachment_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "region" in jsonified_request + assert jsonified_request["region"] == 'region_value' + assert "serviceAttachment" in jsonified_request + assert jsonified_request["serviceAttachment"] == 'service_attachment_value' + + client = ServiceAttachmentsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "delete", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.delete(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_delete_rest_unset_required_fields(): + transport = transports.ServiceAttachmentsRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.delete._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("project", "region", "serviceAttachment", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_rest_interceptors(null_interceptor): + transport = transports.ServiceAttachmentsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.ServiceAttachmentsRestInterceptor(), + ) + client = ServiceAttachmentsClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.ServiceAttachmentsRestInterceptor, "post_delete") as post, \ + mock.patch.object(transports.ServiceAttachmentsRestInterceptor, "pre_delete") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.DeleteServiceAttachmentRequest.pb(compute.DeleteServiceAttachmentRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.DeleteServiceAttachmentRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.delete(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_delete_rest_bad_request(transport: str = 'rest', request_type=compute.DeleteServiceAttachmentRequest): + client = ServiceAttachmentsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2', 'service_attachment': 'sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.delete(request) + + +def test_delete_rest_flattened(): + client = ServiceAttachmentsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'region': 'sample2', 'service_attachment': 'sample3'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + region='region_value', + service_attachment='service_attachment_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.delete(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/regions/{region}/serviceAttachments/{service_attachment}" % client.transport._host, args[1]) + + +def test_delete_rest_flattened_error(transport: str = 'rest'): + client = ServiceAttachmentsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete( + compute.DeleteServiceAttachmentRequest(), + project='project_value', + region='region_value', + service_attachment='service_attachment_value', + ) + + +def test_delete_rest_error(): + client = ServiceAttachmentsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.DeleteServiceAttachmentRequest, + dict, +]) +def test_delete_unary_rest(request_type): + client = ServiceAttachmentsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2', 'service_attachment': 'sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.delete_unary(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.Operation) + + +def test_delete_unary_rest_required_fields(request_type=compute.DeleteServiceAttachmentRequest): + transport_class = transports.ServiceAttachmentsRestTransport + + request_init = {} + request_init["project"] = "" + request_init["region"] = "" + request_init["service_attachment"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + jsonified_request["region"] = 'region_value' + jsonified_request["serviceAttachment"] = 'service_attachment_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "region" in jsonified_request + assert jsonified_request["region"] == 'region_value' + assert "serviceAttachment" in jsonified_request + assert jsonified_request["serviceAttachment"] == 'service_attachment_value' + + client = ServiceAttachmentsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "delete", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.delete_unary(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_delete_unary_rest_unset_required_fields(): + transport = transports.ServiceAttachmentsRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.delete._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("project", "region", "serviceAttachment", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_unary_rest_interceptors(null_interceptor): + transport = transports.ServiceAttachmentsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.ServiceAttachmentsRestInterceptor(), + ) + client = ServiceAttachmentsClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.ServiceAttachmentsRestInterceptor, "post_delete") as post, \ + mock.patch.object(transports.ServiceAttachmentsRestInterceptor, "pre_delete") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.DeleteServiceAttachmentRequest.pb(compute.DeleteServiceAttachmentRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.DeleteServiceAttachmentRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.delete_unary(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_delete_unary_rest_bad_request(transport: str = 'rest', request_type=compute.DeleteServiceAttachmentRequest): + client = ServiceAttachmentsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2', 'service_attachment': 'sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.delete_unary(request) + + +def test_delete_unary_rest_flattened(): + client = ServiceAttachmentsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'region': 'sample2', 'service_attachment': 'sample3'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + region='region_value', + service_attachment='service_attachment_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.delete_unary(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/regions/{region}/serviceAttachments/{service_attachment}" % client.transport._host, args[1]) + + +def test_delete_unary_rest_flattened_error(transport: str = 'rest'): + client = ServiceAttachmentsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_unary( + compute.DeleteServiceAttachmentRequest(), + project='project_value', + region='region_value', + service_attachment='service_attachment_value', + ) + + +def test_delete_unary_rest_error(): + client = ServiceAttachmentsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.GetServiceAttachmentRequest, + dict, +]) +def test_get_rest(request_type): + client = ServiceAttachmentsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2', 'service_attachment': 'sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.ServiceAttachment( + connection_preference='connection_preference_value', + consumer_reject_lists=['consumer_reject_lists_value'], + creation_timestamp='creation_timestamp_value', + description='description_value', + domain_names=['domain_names_value'], + enable_proxy_protocol=True, + fingerprint='fingerprint_value', + id=205, + kind='kind_value', + name='name_value', + nat_subnets=['nat_subnets_value'], + producer_forwarding_rule='producer_forwarding_rule_value', + reconcile_connections=True, + region='region_value', + self_link='self_link_value', + target_service='target_service_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.ServiceAttachment.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.get(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.ServiceAttachment) + assert response.connection_preference == 'connection_preference_value' + assert response.consumer_reject_lists == ['consumer_reject_lists_value'] + assert response.creation_timestamp == 'creation_timestamp_value' + assert response.description == 'description_value' + assert response.domain_names == ['domain_names_value'] + assert response.enable_proxy_protocol is True + assert response.fingerprint == 'fingerprint_value' + assert response.id == 205 + assert response.kind == 'kind_value' + assert response.name == 'name_value' + assert response.nat_subnets == ['nat_subnets_value'] + assert response.producer_forwarding_rule == 'producer_forwarding_rule_value' + assert response.reconcile_connections is True + assert response.region == 'region_value' + assert response.self_link == 'self_link_value' + assert response.target_service == 'target_service_value' + + +def test_get_rest_required_fields(request_type=compute.GetServiceAttachmentRequest): + transport_class = transports.ServiceAttachmentsRestTransport + + request_init = {} + request_init["project"] = "" + request_init["region"] = "" + request_init["service_attachment"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + jsonified_request["region"] = 'region_value' + jsonified_request["serviceAttachment"] = 'service_attachment_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "region" in jsonified_request + assert jsonified_request["region"] == 'region_value' + assert "serviceAttachment" in jsonified_request + assert jsonified_request["serviceAttachment"] == 'service_attachment_value' + + client = ServiceAttachmentsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.ServiceAttachment() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "get", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.ServiceAttachment.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.get(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_get_rest_unset_required_fields(): + transport = transports.ServiceAttachmentsRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.get._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("project", "region", "serviceAttachment", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_rest_interceptors(null_interceptor): + transport = transports.ServiceAttachmentsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.ServiceAttachmentsRestInterceptor(), + ) + client = ServiceAttachmentsClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.ServiceAttachmentsRestInterceptor, "post_get") as post, \ + mock.patch.object(transports.ServiceAttachmentsRestInterceptor, "pre_get") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.GetServiceAttachmentRequest.pb(compute.GetServiceAttachmentRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.ServiceAttachment.to_json(compute.ServiceAttachment()) + + request = compute.GetServiceAttachmentRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.ServiceAttachment() + + client.get(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_get_rest_bad_request(transport: str = 'rest', request_type=compute.GetServiceAttachmentRequest): + client = ServiceAttachmentsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2', 'service_attachment': 'sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get(request) + + +def test_get_rest_flattened(): + client = ServiceAttachmentsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.ServiceAttachment() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'region': 'sample2', 'service_attachment': 'sample3'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + region='region_value', + service_attachment='service_attachment_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.ServiceAttachment.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.get(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/regions/{region}/serviceAttachments/{service_attachment}" % client.transport._host, args[1]) + + +def test_get_rest_flattened_error(transport: str = 'rest'): + client = ServiceAttachmentsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get( + compute.GetServiceAttachmentRequest(), + project='project_value', + region='region_value', + service_attachment='service_attachment_value', + ) + + +def test_get_rest_error(): + client = ServiceAttachmentsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.GetIamPolicyServiceAttachmentRequest, + dict, +]) +def test_get_iam_policy_rest(request_type): + client = ServiceAttachmentsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2', 'resource': 'sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Policy( + etag='etag_value', + iam_owned=True, + version=774, + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Policy.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.get_iam_policy(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.Policy) + assert response.etag == 'etag_value' + assert response.iam_owned is True + assert response.version == 774 + + +def test_get_iam_policy_rest_required_fields(request_type=compute.GetIamPolicyServiceAttachmentRequest): + transport_class = transports.ServiceAttachmentsRestTransport + + request_init = {} + request_init["project"] = "" + request_init["region"] = "" + request_init["resource"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_iam_policy._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + jsonified_request["region"] = 'region_value' + jsonified_request["resource"] = 'resource_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_iam_policy._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("options_requested_policy_version", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "region" in jsonified_request + assert jsonified_request["region"] == 'region_value' + assert "resource" in jsonified_request + assert jsonified_request["resource"] == 'resource_value' + + client = ServiceAttachmentsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Policy() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "get", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Policy.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.get_iam_policy(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_get_iam_policy_rest_unset_required_fields(): + transport = transports.ServiceAttachmentsRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.get_iam_policy._get_unset_required_fields({}) + assert set(unset_fields) == (set(("optionsRequestedPolicyVersion", )) & set(("project", "region", "resource", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_iam_policy_rest_interceptors(null_interceptor): + transport = transports.ServiceAttachmentsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.ServiceAttachmentsRestInterceptor(), + ) + client = ServiceAttachmentsClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.ServiceAttachmentsRestInterceptor, "post_get_iam_policy") as post, \ + mock.patch.object(transports.ServiceAttachmentsRestInterceptor, "pre_get_iam_policy") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.GetIamPolicyServiceAttachmentRequest.pb(compute.GetIamPolicyServiceAttachmentRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Policy.to_json(compute.Policy()) + + request = compute.GetIamPolicyServiceAttachmentRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Policy() + + client.get_iam_policy(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_get_iam_policy_rest_bad_request(transport: str = 'rest', request_type=compute.GetIamPolicyServiceAttachmentRequest): + client = ServiceAttachmentsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2', 'resource': 'sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_iam_policy(request) + + +def test_get_iam_policy_rest_flattened(): + client = ServiceAttachmentsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Policy() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'region': 'sample2', 'resource': 'sample3'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + region='region_value', + resource='resource_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Policy.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.get_iam_policy(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/regions/{region}/serviceAttachments/{resource}/getIamPolicy" % client.transport._host, args[1]) + + +def test_get_iam_policy_rest_flattened_error(transport: str = 'rest'): + client = ServiceAttachmentsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_iam_policy( + compute.GetIamPolicyServiceAttachmentRequest(), + project='project_value', + region='region_value', + resource='resource_value', + ) + + +def test_get_iam_policy_rest_error(): + client = ServiceAttachmentsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.InsertServiceAttachmentRequest, + dict, +]) +def test_insert_rest(request_type): + client = ServiceAttachmentsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2'} + request_init["service_attachment_resource"] = {'connected_endpoints': [{'consumer_network': 'consumer_network_value', 'endpoint': 'endpoint_value', 'psc_connection_id': 1793, 'status': 'status_value'}], 'connection_preference': 'connection_preference_value', 'consumer_accept_lists': [{'connection_limit': 1710, 'network_url': 'network_url_value', 'project_id_or_num': 'project_id_or_num_value'}], 'consumer_reject_lists': ['consumer_reject_lists_value1', 'consumer_reject_lists_value2'], 'creation_timestamp': 'creation_timestamp_value', 'description': 'description_value', 'domain_names': ['domain_names_value1', 'domain_names_value2'], 'enable_proxy_protocol': True, 'fingerprint': 'fingerprint_value', 'id': 205, 'kind': 'kind_value', 'name': 'name_value', 'nat_subnets': ['nat_subnets_value1', 'nat_subnets_value2'], 'producer_forwarding_rule': 'producer_forwarding_rule_value', 'psc_service_attachment_id': {'high': 416, 'low': 338}, 'reconcile_connections': True, 'region': 'region_value', 'self_link': 'self_link_value', 'target_service': 'target_service_value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.insert(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, extended_operation.ExtendedOperation) + assert response.client_operation_id == 'client_operation_id_value' + assert response.creation_timestamp == 'creation_timestamp_value' + assert response.description == 'description_value' + assert response.end_time == 'end_time_value' + assert response.http_error_message == 'http_error_message_value' + assert response.http_error_status_code == 2374 + assert response.id == 205 + assert response.insert_time == 'insert_time_value' + assert response.kind == 'kind_value' + assert response.name == 'name_value' + assert response.operation_group_id == 'operation_group_id_value' + assert response.operation_type == 'operation_type_value' + assert response.progress == 885 + assert response.region == 'region_value' + assert response.self_link == 'self_link_value' + assert response.start_time == 'start_time_value' + assert response.status == compute.Operation.Status.DONE + assert response.status_message == 'status_message_value' + assert response.target_id == 947 + assert response.target_link == 'target_link_value' + assert response.user == 'user_value' + assert response.zone == 'zone_value' + + +def test_insert_rest_required_fields(request_type=compute.InsertServiceAttachmentRequest): + transport_class = transports.ServiceAttachmentsRestTransport + + request_init = {} + request_init["project"] = "" + request_init["region"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).insert._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + jsonified_request["region"] = 'region_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).insert._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "region" in jsonified_request + assert jsonified_request["region"] == 'region_value' + + client = ServiceAttachmentsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.insert(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_insert_rest_unset_required_fields(): + transport = transports.ServiceAttachmentsRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.insert._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("project", "region", "serviceAttachmentResource", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_insert_rest_interceptors(null_interceptor): + transport = transports.ServiceAttachmentsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.ServiceAttachmentsRestInterceptor(), + ) + client = ServiceAttachmentsClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.ServiceAttachmentsRestInterceptor, "post_insert") as post, \ + mock.patch.object(transports.ServiceAttachmentsRestInterceptor, "pre_insert") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.InsertServiceAttachmentRequest.pb(compute.InsertServiceAttachmentRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.InsertServiceAttachmentRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.insert(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_insert_rest_bad_request(transport: str = 'rest', request_type=compute.InsertServiceAttachmentRequest): + client = ServiceAttachmentsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2'} + request_init["service_attachment_resource"] = {'connected_endpoints': [{'consumer_network': 'consumer_network_value', 'endpoint': 'endpoint_value', 'psc_connection_id': 1793, 'status': 'status_value'}], 'connection_preference': 'connection_preference_value', 'consumer_accept_lists': [{'connection_limit': 1710, 'network_url': 'network_url_value', 'project_id_or_num': 'project_id_or_num_value'}], 'consumer_reject_lists': ['consumer_reject_lists_value1', 'consumer_reject_lists_value2'], 'creation_timestamp': 'creation_timestamp_value', 'description': 'description_value', 'domain_names': ['domain_names_value1', 'domain_names_value2'], 'enable_proxy_protocol': True, 'fingerprint': 'fingerprint_value', 'id': 205, 'kind': 'kind_value', 'name': 'name_value', 'nat_subnets': ['nat_subnets_value1', 'nat_subnets_value2'], 'producer_forwarding_rule': 'producer_forwarding_rule_value', 'psc_service_attachment_id': {'high': 416, 'low': 338}, 'reconcile_connections': True, 'region': 'region_value', 'self_link': 'self_link_value', 'target_service': 'target_service_value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.insert(request) + + +def test_insert_rest_flattened(): + client = ServiceAttachmentsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'region': 'sample2'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + region='region_value', + service_attachment_resource=compute.ServiceAttachment(connected_endpoints=[compute.ServiceAttachmentConnectedEndpoint(consumer_network='consumer_network_value')]), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.insert(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/regions/{region}/serviceAttachments" % client.transport._host, args[1]) + + +def test_insert_rest_flattened_error(transport: str = 'rest'): + client = ServiceAttachmentsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.insert( + compute.InsertServiceAttachmentRequest(), + project='project_value', + region='region_value', + service_attachment_resource=compute.ServiceAttachment(connected_endpoints=[compute.ServiceAttachmentConnectedEndpoint(consumer_network='consumer_network_value')]), + ) + + +def test_insert_rest_error(): + client = ServiceAttachmentsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.InsertServiceAttachmentRequest, + dict, +]) +def test_insert_unary_rest(request_type): + client = ServiceAttachmentsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2'} + request_init["service_attachment_resource"] = {'connected_endpoints': [{'consumer_network': 'consumer_network_value', 'endpoint': 'endpoint_value', 'psc_connection_id': 1793, 'status': 'status_value'}], 'connection_preference': 'connection_preference_value', 'consumer_accept_lists': [{'connection_limit': 1710, 'network_url': 'network_url_value', 'project_id_or_num': 'project_id_or_num_value'}], 'consumer_reject_lists': ['consumer_reject_lists_value1', 'consumer_reject_lists_value2'], 'creation_timestamp': 'creation_timestamp_value', 'description': 'description_value', 'domain_names': ['domain_names_value1', 'domain_names_value2'], 'enable_proxy_protocol': True, 'fingerprint': 'fingerprint_value', 'id': 205, 'kind': 'kind_value', 'name': 'name_value', 'nat_subnets': ['nat_subnets_value1', 'nat_subnets_value2'], 'producer_forwarding_rule': 'producer_forwarding_rule_value', 'psc_service_attachment_id': {'high': 416, 'low': 338}, 'reconcile_connections': True, 'region': 'region_value', 'self_link': 'self_link_value', 'target_service': 'target_service_value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.insert_unary(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.Operation) + + +def test_insert_unary_rest_required_fields(request_type=compute.InsertServiceAttachmentRequest): + transport_class = transports.ServiceAttachmentsRestTransport + + request_init = {} + request_init["project"] = "" + request_init["region"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).insert._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + jsonified_request["region"] = 'region_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).insert._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "region" in jsonified_request + assert jsonified_request["region"] == 'region_value' + + client = ServiceAttachmentsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.insert_unary(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_insert_unary_rest_unset_required_fields(): + transport = transports.ServiceAttachmentsRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.insert._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("project", "region", "serviceAttachmentResource", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_insert_unary_rest_interceptors(null_interceptor): + transport = transports.ServiceAttachmentsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.ServiceAttachmentsRestInterceptor(), + ) + client = ServiceAttachmentsClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.ServiceAttachmentsRestInterceptor, "post_insert") as post, \ + mock.patch.object(transports.ServiceAttachmentsRestInterceptor, "pre_insert") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.InsertServiceAttachmentRequest.pb(compute.InsertServiceAttachmentRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.InsertServiceAttachmentRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.insert_unary(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_insert_unary_rest_bad_request(transport: str = 'rest', request_type=compute.InsertServiceAttachmentRequest): + client = ServiceAttachmentsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2'} + request_init["service_attachment_resource"] = {'connected_endpoints': [{'consumer_network': 'consumer_network_value', 'endpoint': 'endpoint_value', 'psc_connection_id': 1793, 'status': 'status_value'}], 'connection_preference': 'connection_preference_value', 'consumer_accept_lists': [{'connection_limit': 1710, 'network_url': 'network_url_value', 'project_id_or_num': 'project_id_or_num_value'}], 'consumer_reject_lists': ['consumer_reject_lists_value1', 'consumer_reject_lists_value2'], 'creation_timestamp': 'creation_timestamp_value', 'description': 'description_value', 'domain_names': ['domain_names_value1', 'domain_names_value2'], 'enable_proxy_protocol': True, 'fingerprint': 'fingerprint_value', 'id': 205, 'kind': 'kind_value', 'name': 'name_value', 'nat_subnets': ['nat_subnets_value1', 'nat_subnets_value2'], 'producer_forwarding_rule': 'producer_forwarding_rule_value', 'psc_service_attachment_id': {'high': 416, 'low': 338}, 'reconcile_connections': True, 'region': 'region_value', 'self_link': 'self_link_value', 'target_service': 'target_service_value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.insert_unary(request) + + +def test_insert_unary_rest_flattened(): + client = ServiceAttachmentsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'region': 'sample2'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + region='region_value', + service_attachment_resource=compute.ServiceAttachment(connected_endpoints=[compute.ServiceAttachmentConnectedEndpoint(consumer_network='consumer_network_value')]), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.insert_unary(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/regions/{region}/serviceAttachments" % client.transport._host, args[1]) + + +def test_insert_unary_rest_flattened_error(transport: str = 'rest'): + client = ServiceAttachmentsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.insert_unary( + compute.InsertServiceAttachmentRequest(), + project='project_value', + region='region_value', + service_attachment_resource=compute.ServiceAttachment(connected_endpoints=[compute.ServiceAttachmentConnectedEndpoint(consumer_network='consumer_network_value')]), + ) + + +def test_insert_unary_rest_error(): + client = ServiceAttachmentsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.ListServiceAttachmentsRequest, + dict, +]) +def test_list_rest(request_type): + client = ServiceAttachmentsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.ServiceAttachmentList( + id='id_value', + kind='kind_value', + next_page_token='next_page_token_value', + self_link='self_link_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.ServiceAttachmentList.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.list(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListPager) + assert response.id == 'id_value' + assert response.kind == 'kind_value' + assert response.next_page_token == 'next_page_token_value' + assert response.self_link == 'self_link_value' + + +def test_list_rest_required_fields(request_type=compute.ListServiceAttachmentsRequest): + transport_class = transports.ServiceAttachmentsRestTransport + + request_init = {} + request_init["project"] = "" + request_init["region"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + jsonified_request["region"] = 'region_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("filter", "max_results", "order_by", "page_token", "return_partial_success", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "region" in jsonified_request + assert jsonified_request["region"] == 'region_value' + + client = ServiceAttachmentsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.ServiceAttachmentList() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "get", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.ServiceAttachmentList.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.list(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_list_rest_unset_required_fields(): + transport = transports.ServiceAttachmentsRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.list._get_unset_required_fields({}) + assert set(unset_fields) == (set(("filter", "maxResults", "orderBy", "pageToken", "returnPartialSuccess", )) & set(("project", "region", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_rest_interceptors(null_interceptor): + transport = transports.ServiceAttachmentsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.ServiceAttachmentsRestInterceptor(), + ) + client = ServiceAttachmentsClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.ServiceAttachmentsRestInterceptor, "post_list") as post, \ + mock.patch.object(transports.ServiceAttachmentsRestInterceptor, "pre_list") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.ListServiceAttachmentsRequest.pb(compute.ListServiceAttachmentsRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.ServiceAttachmentList.to_json(compute.ServiceAttachmentList()) + + request = compute.ListServiceAttachmentsRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.ServiceAttachmentList() + + client.list(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_list_rest_bad_request(transport: str = 'rest', request_type=compute.ListServiceAttachmentsRequest): + client = ServiceAttachmentsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list(request) + + +def test_list_rest_flattened(): + client = ServiceAttachmentsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.ServiceAttachmentList() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'region': 'sample2'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + region='region_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.ServiceAttachmentList.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.list(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/regions/{region}/serviceAttachments" % client.transport._host, args[1]) + + +def test_list_rest_flattened_error(transport: str = 'rest'): + client = ServiceAttachmentsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list( + compute.ListServiceAttachmentsRequest(), + project='project_value', + region='region_value', + ) + + +def test_list_rest_pager(transport: str = 'rest'): + client = ServiceAttachmentsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + #with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + compute.ServiceAttachmentList( + items=[ + compute.ServiceAttachment(), + compute.ServiceAttachment(), + compute.ServiceAttachment(), + ], + next_page_token='abc', + ), + compute.ServiceAttachmentList( + items=[], + next_page_token='def', + ), + compute.ServiceAttachmentList( + items=[ + compute.ServiceAttachment(), + ], + next_page_token='ghi', + ), + compute.ServiceAttachmentList( + items=[ + compute.ServiceAttachment(), + compute.ServiceAttachment(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple(compute.ServiceAttachmentList.to_json(x) for x in response) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode('UTF-8') + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {'project': 'sample1', 'region': 'sample2'} + + pager = client.list(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, compute.ServiceAttachment) + for i in results) + + pages = list(client.list(request=sample_request).pages) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize("request_type", [ + compute.PatchServiceAttachmentRequest, + dict, +]) +def test_patch_rest(request_type): + client = ServiceAttachmentsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2', 'service_attachment': 'sample3'} + request_init["service_attachment_resource"] = {'connected_endpoints': [{'consumer_network': 'consumer_network_value', 'endpoint': 'endpoint_value', 'psc_connection_id': 1793, 'status': 'status_value'}], 'connection_preference': 'connection_preference_value', 'consumer_accept_lists': [{'connection_limit': 1710, 'network_url': 'network_url_value', 'project_id_or_num': 'project_id_or_num_value'}], 'consumer_reject_lists': ['consumer_reject_lists_value1', 'consumer_reject_lists_value2'], 'creation_timestamp': 'creation_timestamp_value', 'description': 'description_value', 'domain_names': ['domain_names_value1', 'domain_names_value2'], 'enable_proxy_protocol': True, 'fingerprint': 'fingerprint_value', 'id': 205, 'kind': 'kind_value', 'name': 'name_value', 'nat_subnets': ['nat_subnets_value1', 'nat_subnets_value2'], 'producer_forwarding_rule': 'producer_forwarding_rule_value', 'psc_service_attachment_id': {'high': 416, 'low': 338}, 'reconcile_connections': True, 'region': 'region_value', 'self_link': 'self_link_value', 'target_service': 'target_service_value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.patch(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, extended_operation.ExtendedOperation) + assert response.client_operation_id == 'client_operation_id_value' + assert response.creation_timestamp == 'creation_timestamp_value' + assert response.description == 'description_value' + assert response.end_time == 'end_time_value' + assert response.http_error_message == 'http_error_message_value' + assert response.http_error_status_code == 2374 + assert response.id == 205 + assert response.insert_time == 'insert_time_value' + assert response.kind == 'kind_value' + assert response.name == 'name_value' + assert response.operation_group_id == 'operation_group_id_value' + assert response.operation_type == 'operation_type_value' + assert response.progress == 885 + assert response.region == 'region_value' + assert response.self_link == 'self_link_value' + assert response.start_time == 'start_time_value' + assert response.status == compute.Operation.Status.DONE + assert response.status_message == 'status_message_value' + assert response.target_id == 947 + assert response.target_link == 'target_link_value' + assert response.user == 'user_value' + assert response.zone == 'zone_value' + + +def test_patch_rest_required_fields(request_type=compute.PatchServiceAttachmentRequest): + transport_class = transports.ServiceAttachmentsRestTransport + + request_init = {} + request_init["project"] = "" + request_init["region"] = "" + request_init["service_attachment"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).patch._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + jsonified_request["region"] = 'region_value' + jsonified_request["serviceAttachment"] = 'service_attachment_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).patch._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "region" in jsonified_request + assert jsonified_request["region"] == 'region_value' + assert "serviceAttachment" in jsonified_request + assert jsonified_request["serviceAttachment"] == 'service_attachment_value' + + client = ServiceAttachmentsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "patch", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.patch(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_patch_rest_unset_required_fields(): + transport = transports.ServiceAttachmentsRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.patch._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("project", "region", "serviceAttachment", "serviceAttachmentResource", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_patch_rest_interceptors(null_interceptor): + transport = transports.ServiceAttachmentsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.ServiceAttachmentsRestInterceptor(), + ) + client = ServiceAttachmentsClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.ServiceAttachmentsRestInterceptor, "post_patch") as post, \ + mock.patch.object(transports.ServiceAttachmentsRestInterceptor, "pre_patch") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.PatchServiceAttachmentRequest.pb(compute.PatchServiceAttachmentRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.PatchServiceAttachmentRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.patch(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_patch_rest_bad_request(transport: str = 'rest', request_type=compute.PatchServiceAttachmentRequest): + client = ServiceAttachmentsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2', 'service_attachment': 'sample3'} + request_init["service_attachment_resource"] = {'connected_endpoints': [{'consumer_network': 'consumer_network_value', 'endpoint': 'endpoint_value', 'psc_connection_id': 1793, 'status': 'status_value'}], 'connection_preference': 'connection_preference_value', 'consumer_accept_lists': [{'connection_limit': 1710, 'network_url': 'network_url_value', 'project_id_or_num': 'project_id_or_num_value'}], 'consumer_reject_lists': ['consumer_reject_lists_value1', 'consumer_reject_lists_value2'], 'creation_timestamp': 'creation_timestamp_value', 'description': 'description_value', 'domain_names': ['domain_names_value1', 'domain_names_value2'], 'enable_proxy_protocol': True, 'fingerprint': 'fingerprint_value', 'id': 205, 'kind': 'kind_value', 'name': 'name_value', 'nat_subnets': ['nat_subnets_value1', 'nat_subnets_value2'], 'producer_forwarding_rule': 'producer_forwarding_rule_value', 'psc_service_attachment_id': {'high': 416, 'low': 338}, 'reconcile_connections': True, 'region': 'region_value', 'self_link': 'self_link_value', 'target_service': 'target_service_value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.patch(request) + + +def test_patch_rest_flattened(): + client = ServiceAttachmentsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'region': 'sample2', 'service_attachment': 'sample3'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + region='region_value', + service_attachment='service_attachment_value', + service_attachment_resource=compute.ServiceAttachment(connected_endpoints=[compute.ServiceAttachmentConnectedEndpoint(consumer_network='consumer_network_value')]), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.patch(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/regions/{region}/serviceAttachments/{service_attachment}" % client.transport._host, args[1]) + + +def test_patch_rest_flattened_error(transport: str = 'rest'): + client = ServiceAttachmentsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.patch( + compute.PatchServiceAttachmentRequest(), + project='project_value', + region='region_value', + service_attachment='service_attachment_value', + service_attachment_resource=compute.ServiceAttachment(connected_endpoints=[compute.ServiceAttachmentConnectedEndpoint(consumer_network='consumer_network_value')]), + ) + + +def test_patch_rest_error(): + client = ServiceAttachmentsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.PatchServiceAttachmentRequest, + dict, +]) +def test_patch_unary_rest(request_type): + client = ServiceAttachmentsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2', 'service_attachment': 'sample3'} + request_init["service_attachment_resource"] = {'connected_endpoints': [{'consumer_network': 'consumer_network_value', 'endpoint': 'endpoint_value', 'psc_connection_id': 1793, 'status': 'status_value'}], 'connection_preference': 'connection_preference_value', 'consumer_accept_lists': [{'connection_limit': 1710, 'network_url': 'network_url_value', 'project_id_or_num': 'project_id_or_num_value'}], 'consumer_reject_lists': ['consumer_reject_lists_value1', 'consumer_reject_lists_value2'], 'creation_timestamp': 'creation_timestamp_value', 'description': 'description_value', 'domain_names': ['domain_names_value1', 'domain_names_value2'], 'enable_proxy_protocol': True, 'fingerprint': 'fingerprint_value', 'id': 205, 'kind': 'kind_value', 'name': 'name_value', 'nat_subnets': ['nat_subnets_value1', 'nat_subnets_value2'], 'producer_forwarding_rule': 'producer_forwarding_rule_value', 'psc_service_attachment_id': {'high': 416, 'low': 338}, 'reconcile_connections': True, 'region': 'region_value', 'self_link': 'self_link_value', 'target_service': 'target_service_value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.patch_unary(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.Operation) + + +def test_patch_unary_rest_required_fields(request_type=compute.PatchServiceAttachmentRequest): + transport_class = transports.ServiceAttachmentsRestTransport + + request_init = {} + request_init["project"] = "" + request_init["region"] = "" + request_init["service_attachment"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).patch._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + jsonified_request["region"] = 'region_value' + jsonified_request["serviceAttachment"] = 'service_attachment_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).patch._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "region" in jsonified_request + assert jsonified_request["region"] == 'region_value' + assert "serviceAttachment" in jsonified_request + assert jsonified_request["serviceAttachment"] == 'service_attachment_value' + + client = ServiceAttachmentsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "patch", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.patch_unary(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_patch_unary_rest_unset_required_fields(): + transport = transports.ServiceAttachmentsRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.patch._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("project", "region", "serviceAttachment", "serviceAttachmentResource", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_patch_unary_rest_interceptors(null_interceptor): + transport = transports.ServiceAttachmentsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.ServiceAttachmentsRestInterceptor(), + ) + client = ServiceAttachmentsClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.ServiceAttachmentsRestInterceptor, "post_patch") as post, \ + mock.patch.object(transports.ServiceAttachmentsRestInterceptor, "pre_patch") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.PatchServiceAttachmentRequest.pb(compute.PatchServiceAttachmentRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.PatchServiceAttachmentRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.patch_unary(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_patch_unary_rest_bad_request(transport: str = 'rest', request_type=compute.PatchServiceAttachmentRequest): + client = ServiceAttachmentsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2', 'service_attachment': 'sample3'} + request_init["service_attachment_resource"] = {'connected_endpoints': [{'consumer_network': 'consumer_network_value', 'endpoint': 'endpoint_value', 'psc_connection_id': 1793, 'status': 'status_value'}], 'connection_preference': 'connection_preference_value', 'consumer_accept_lists': [{'connection_limit': 1710, 'network_url': 'network_url_value', 'project_id_or_num': 'project_id_or_num_value'}], 'consumer_reject_lists': ['consumer_reject_lists_value1', 'consumer_reject_lists_value2'], 'creation_timestamp': 'creation_timestamp_value', 'description': 'description_value', 'domain_names': ['domain_names_value1', 'domain_names_value2'], 'enable_proxy_protocol': True, 'fingerprint': 'fingerprint_value', 'id': 205, 'kind': 'kind_value', 'name': 'name_value', 'nat_subnets': ['nat_subnets_value1', 'nat_subnets_value2'], 'producer_forwarding_rule': 'producer_forwarding_rule_value', 'psc_service_attachment_id': {'high': 416, 'low': 338}, 'reconcile_connections': True, 'region': 'region_value', 'self_link': 'self_link_value', 'target_service': 'target_service_value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.patch_unary(request) + + +def test_patch_unary_rest_flattened(): + client = ServiceAttachmentsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'region': 'sample2', 'service_attachment': 'sample3'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + region='region_value', + service_attachment='service_attachment_value', + service_attachment_resource=compute.ServiceAttachment(connected_endpoints=[compute.ServiceAttachmentConnectedEndpoint(consumer_network='consumer_network_value')]), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.patch_unary(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/regions/{region}/serviceAttachments/{service_attachment}" % client.transport._host, args[1]) + + +def test_patch_unary_rest_flattened_error(transport: str = 'rest'): + client = ServiceAttachmentsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.patch_unary( + compute.PatchServiceAttachmentRequest(), + project='project_value', + region='region_value', + service_attachment='service_attachment_value', + service_attachment_resource=compute.ServiceAttachment(connected_endpoints=[compute.ServiceAttachmentConnectedEndpoint(consumer_network='consumer_network_value')]), + ) + + +def test_patch_unary_rest_error(): + client = ServiceAttachmentsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.SetIamPolicyServiceAttachmentRequest, + dict, +]) +def test_set_iam_policy_rest(request_type): + client = ServiceAttachmentsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2', 'resource': 'sample3'} + request_init["region_set_policy_request_resource"] = {'bindings': [{'binding_id': 'binding_id_value', 'condition': {'description': 'description_value', 'expression': 'expression_value', 'location': 'location_value', 'title': 'title_value'}, 'members': ['members_value1', 'members_value2'], 'role': 'role_value'}], 'etag': 'etag_value', 'policy': {'audit_configs': [{'audit_log_configs': [{'exempted_members': ['exempted_members_value1', 'exempted_members_value2'], 'ignore_child_exemptions': True, 'log_type': 'log_type_value'}], 'exempted_members': ['exempted_members_value1', 'exempted_members_value2'], 'service': 'service_value'}], 'bindings': {}, 'etag': 'etag_value', 'iam_owned': True, 'rules': [{'action': 'action_value', 'conditions': [{'iam': 'iam_value', 'op': 'op_value', 'svc': 'svc_value', 'sys': 'sys_value', 'values': ['values_value1', 'values_value2']}], 'description': 'description_value', 'ins': ['ins_value1', 'ins_value2'], 'log_configs': [{'cloud_audit': {'authorization_logging_options': {'permission_type': 'permission_type_value'}, 'log_name': 'log_name_value'}, 'counter': {'custom_fields': [{'name': 'name_value', 'value': 'value_value'}], 'field': 'field_value', 'metric': 'metric_value'}, 'data_access': {'log_mode': 'log_mode_value'}}], 'not_ins': ['not_ins_value1', 'not_ins_value2'], 'permissions': ['permissions_value1', 'permissions_value2']}], 'version': 774}} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Policy( + etag='etag_value', + iam_owned=True, + version=774, + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Policy.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.set_iam_policy(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.Policy) + assert response.etag == 'etag_value' + assert response.iam_owned is True + assert response.version == 774 + + +def test_set_iam_policy_rest_required_fields(request_type=compute.SetIamPolicyServiceAttachmentRequest): + transport_class = transports.ServiceAttachmentsRestTransport + + request_init = {} + request_init["project"] = "" + request_init["region"] = "" + request_init["resource"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).set_iam_policy._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + jsonified_request["region"] = 'region_value' + jsonified_request["resource"] = 'resource_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).set_iam_policy._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "region" in jsonified_request + assert jsonified_request["region"] == 'region_value' + assert "resource" in jsonified_request + assert jsonified_request["resource"] == 'resource_value' + + client = ServiceAttachmentsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Policy() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Policy.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.set_iam_policy(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_set_iam_policy_rest_unset_required_fields(): + transport = transports.ServiceAttachmentsRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.set_iam_policy._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("project", "region", "regionSetPolicyRequestResource", "resource", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_set_iam_policy_rest_interceptors(null_interceptor): + transport = transports.ServiceAttachmentsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.ServiceAttachmentsRestInterceptor(), + ) + client = ServiceAttachmentsClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.ServiceAttachmentsRestInterceptor, "post_set_iam_policy") as post, \ + mock.patch.object(transports.ServiceAttachmentsRestInterceptor, "pre_set_iam_policy") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.SetIamPolicyServiceAttachmentRequest.pb(compute.SetIamPolicyServiceAttachmentRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Policy.to_json(compute.Policy()) + + request = compute.SetIamPolicyServiceAttachmentRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Policy() + + client.set_iam_policy(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_set_iam_policy_rest_bad_request(transport: str = 'rest', request_type=compute.SetIamPolicyServiceAttachmentRequest): + client = ServiceAttachmentsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2', 'resource': 'sample3'} + request_init["region_set_policy_request_resource"] = {'bindings': [{'binding_id': 'binding_id_value', 'condition': {'description': 'description_value', 'expression': 'expression_value', 'location': 'location_value', 'title': 'title_value'}, 'members': ['members_value1', 'members_value2'], 'role': 'role_value'}], 'etag': 'etag_value', 'policy': {'audit_configs': [{'audit_log_configs': [{'exempted_members': ['exempted_members_value1', 'exempted_members_value2'], 'ignore_child_exemptions': True, 'log_type': 'log_type_value'}], 'exempted_members': ['exempted_members_value1', 'exempted_members_value2'], 'service': 'service_value'}], 'bindings': {}, 'etag': 'etag_value', 'iam_owned': True, 'rules': [{'action': 'action_value', 'conditions': [{'iam': 'iam_value', 'op': 'op_value', 'svc': 'svc_value', 'sys': 'sys_value', 'values': ['values_value1', 'values_value2']}], 'description': 'description_value', 'ins': ['ins_value1', 'ins_value2'], 'log_configs': [{'cloud_audit': {'authorization_logging_options': {'permission_type': 'permission_type_value'}, 'log_name': 'log_name_value'}, 'counter': {'custom_fields': [{'name': 'name_value', 'value': 'value_value'}], 'field': 'field_value', 'metric': 'metric_value'}, 'data_access': {'log_mode': 'log_mode_value'}}], 'not_ins': ['not_ins_value1', 'not_ins_value2'], 'permissions': ['permissions_value1', 'permissions_value2']}], 'version': 774}} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.set_iam_policy(request) + + +def test_set_iam_policy_rest_flattened(): + client = ServiceAttachmentsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Policy() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'region': 'sample2', 'resource': 'sample3'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + region='region_value', + resource='resource_value', + region_set_policy_request_resource=compute.RegionSetPolicyRequest(bindings=[compute.Binding(binding_id='binding_id_value')]), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Policy.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.set_iam_policy(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/regions/{region}/serviceAttachments/{resource}/setIamPolicy" % client.transport._host, args[1]) + + +def test_set_iam_policy_rest_flattened_error(transport: str = 'rest'): + client = ServiceAttachmentsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.set_iam_policy( + compute.SetIamPolicyServiceAttachmentRequest(), + project='project_value', + region='region_value', + resource='resource_value', + region_set_policy_request_resource=compute.RegionSetPolicyRequest(bindings=[compute.Binding(binding_id='binding_id_value')]), + ) + + +def test_set_iam_policy_rest_error(): + client = ServiceAttachmentsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.TestIamPermissionsServiceAttachmentRequest, + dict, +]) +def test_test_iam_permissions_rest(request_type): + client = ServiceAttachmentsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2', 'resource': 'sample3'} + request_init["test_permissions_request_resource"] = {'permissions': ['permissions_value1', 'permissions_value2']} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.TestPermissionsResponse( + permissions=['permissions_value'], + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.TestPermissionsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.test_iam_permissions(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.TestPermissionsResponse) + assert response.permissions == ['permissions_value'] + + +def test_test_iam_permissions_rest_required_fields(request_type=compute.TestIamPermissionsServiceAttachmentRequest): + transport_class = transports.ServiceAttachmentsRestTransport + + request_init = {} + request_init["project"] = "" + request_init["region"] = "" + request_init["resource"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).test_iam_permissions._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + jsonified_request["region"] = 'region_value' + jsonified_request["resource"] = 'resource_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).test_iam_permissions._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "region" in jsonified_request + assert jsonified_request["region"] == 'region_value' + assert "resource" in jsonified_request + assert jsonified_request["resource"] == 'resource_value' + + client = ServiceAttachmentsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.TestPermissionsResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.TestPermissionsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.test_iam_permissions(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_test_iam_permissions_rest_unset_required_fields(): + transport = transports.ServiceAttachmentsRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.test_iam_permissions._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("project", "region", "resource", "testPermissionsRequestResource", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_test_iam_permissions_rest_interceptors(null_interceptor): + transport = transports.ServiceAttachmentsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.ServiceAttachmentsRestInterceptor(), + ) + client = ServiceAttachmentsClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.ServiceAttachmentsRestInterceptor, "post_test_iam_permissions") as post, \ + mock.patch.object(transports.ServiceAttachmentsRestInterceptor, "pre_test_iam_permissions") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.TestIamPermissionsServiceAttachmentRequest.pb(compute.TestIamPermissionsServiceAttachmentRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.TestPermissionsResponse.to_json(compute.TestPermissionsResponse()) + + request = compute.TestIamPermissionsServiceAttachmentRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.TestPermissionsResponse() + + client.test_iam_permissions(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_test_iam_permissions_rest_bad_request(transport: str = 'rest', request_type=compute.TestIamPermissionsServiceAttachmentRequest): + client = ServiceAttachmentsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2', 'resource': 'sample3'} + request_init["test_permissions_request_resource"] = {'permissions': ['permissions_value1', 'permissions_value2']} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.test_iam_permissions(request) + + +def test_test_iam_permissions_rest_flattened(): + client = ServiceAttachmentsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.TestPermissionsResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'region': 'sample2', 'resource': 'sample3'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + region='region_value', + resource='resource_value', + test_permissions_request_resource=compute.TestPermissionsRequest(permissions=['permissions_value']), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.TestPermissionsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.test_iam_permissions(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/regions/{region}/serviceAttachments/{resource}/testIamPermissions" % client.transport._host, args[1]) + + +def test_test_iam_permissions_rest_flattened_error(transport: str = 'rest'): + client = ServiceAttachmentsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.test_iam_permissions( + compute.TestIamPermissionsServiceAttachmentRequest(), + project='project_value', + region='region_value', + resource='resource_value', + test_permissions_request_resource=compute.TestPermissionsRequest(permissions=['permissions_value']), + ) + + +def test_test_iam_permissions_rest_error(): + client = ServiceAttachmentsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +def test_credentials_transport_error(): + # It is an error to provide credentials and a transport instance. + transport = transports.ServiceAttachmentsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = ServiceAttachmentsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # It is an error to provide a credentials file and a transport instance. + transport = transports.ServiceAttachmentsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = ServiceAttachmentsClient( + client_options={"credentials_file": "credentials.json"}, + transport=transport, + ) + + # It is an error to provide an api_key and a transport instance. + transport = transports.ServiceAttachmentsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = ServiceAttachmentsClient( + client_options=options, + transport=transport, + ) + + # It is an error to provide an api_key and a credential. + options = mock.Mock() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = ServiceAttachmentsClient( + client_options=options, + credentials=ga_credentials.AnonymousCredentials() + ) + + # It is an error to provide scopes and a transport instance. + transport = transports.ServiceAttachmentsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = ServiceAttachmentsClient( + client_options={"scopes": ["1", "2"]}, + transport=transport, + ) + + +def test_transport_instance(): + # A client may be instantiated with a custom transport instance. + transport = transports.ServiceAttachmentsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + client = ServiceAttachmentsClient(transport=transport) + assert client.transport is transport + + +@pytest.mark.parametrize("transport_class", [ + transports.ServiceAttachmentsRestTransport, +]) +def test_transport_adc(transport_class): + # Test default credentials are used if not provided. + with mock.patch.object(google.auth, 'default') as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class() + adc.assert_called_once() + +@pytest.mark.parametrize("transport_name", [ + "rest", +]) +def test_transport_kind(transport_name): + transport = ServiceAttachmentsClient.get_transport_class(transport_name)( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert transport.kind == transport_name + + +def test_service_attachments_base_transport_error(): + # Passing both a credentials object and credentials_file should raise an error + with pytest.raises(core_exceptions.DuplicateCredentialArgs): + transport = transports.ServiceAttachmentsTransport( + credentials=ga_credentials.AnonymousCredentials(), + credentials_file="credentials.json" + ) + + +def test_service_attachments_base_transport(): + # Instantiate the base transport. + with mock.patch('google.cloud.compute_v1.services.service_attachments.transports.ServiceAttachmentsTransport.__init__') as Transport: + Transport.return_value = None + transport = transports.ServiceAttachmentsTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Every method on the transport should just blindly + # raise NotImplementedError. + methods = ( + 'aggregated_list', + 'delete', + 'get', + 'get_iam_policy', + 'insert', + 'list', + 'patch', + 'set_iam_policy', + 'test_iam_permissions', + ) + for method in methods: + with pytest.raises(NotImplementedError): + getattr(transport, method)(request=object()) + + with pytest.raises(NotImplementedError): + transport.close() + + # Catch all for all remaining methods and properties + remainder = [ + 'kind', + ] + for r in remainder: + with pytest.raises(NotImplementedError): + getattr(transport, r)() + + +def test_service_attachments_base_transport_with_credentials_file(): + # Instantiate the base transport with a credentials file + with mock.patch.object(google.auth, 'load_credentials_from_file', autospec=True) as load_creds, mock.patch('google.cloud.compute_v1.services.service_attachments.transports.ServiceAttachmentsTransport._prep_wrapped_messages') as Transport: + Transport.return_value = None + load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.ServiceAttachmentsTransport( + credentials_file="credentials.json", + quota_project_id="octopus", + ) + load_creds.assert_called_once_with("credentials.json", + scopes=None, + default_scopes=( + 'https://www.googleapis.com/auth/compute', + 'https://www.googleapis.com/auth/cloud-platform', +), + quota_project_id="octopus", + ) + + +def test_service_attachments_base_transport_with_adc(): + # Test the default credentials are used if credentials and credentials_file are None. + with mock.patch.object(google.auth, 'default', autospec=True) as adc, mock.patch('google.cloud.compute_v1.services.service_attachments.transports.ServiceAttachmentsTransport._prep_wrapped_messages') as Transport: + Transport.return_value = None + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.ServiceAttachmentsTransport() + adc.assert_called_once() + + +def test_service_attachments_auth_adc(): + # If no credentials are provided, we should use ADC credentials. + with mock.patch.object(google.auth, 'default', autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + ServiceAttachmentsClient() + adc.assert_called_once_with( + scopes=None, + default_scopes=( + 'https://www.googleapis.com/auth/compute', + 'https://www.googleapis.com/auth/cloud-platform', +), + quota_project_id=None, + ) + + +def test_service_attachments_http_transport_client_cert_source_for_mtls(): + cred = ga_credentials.AnonymousCredentials() + with mock.patch("google.auth.transport.requests.AuthorizedSession.configure_mtls_channel") as mock_configure_mtls_channel: + transports.ServiceAttachmentsRestTransport ( + credentials=cred, + client_cert_source_for_mtls=client_cert_source_callback + ) + mock_configure_mtls_channel.assert_called_once_with(client_cert_source_callback) + + +@pytest.mark.parametrize("transport_name", [ + "rest", +]) +def test_service_attachments_host_no_port(transport_name): + client = ServiceAttachmentsClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions(api_endpoint='compute.googleapis.com'), + transport=transport_name, + ) + assert client.transport._host == ( + 'compute.googleapis.com:443' + if transport_name in ['grpc', 'grpc_asyncio'] + else 'https://compute.googleapis.com' + ) + +@pytest.mark.parametrize("transport_name", [ + "rest", +]) +def test_service_attachments_host_with_port(transport_name): + client = ServiceAttachmentsClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions(api_endpoint='compute.googleapis.com:8000'), + transport=transport_name, + ) + assert client.transport._host == ( + 'compute.googleapis.com:8000' + if transport_name in ['grpc', 'grpc_asyncio'] + else 'https://compute.googleapis.com:8000' + ) + +@pytest.mark.parametrize("transport_name", [ + "rest", +]) +def test_service_attachments_client_transport_session_collision(transport_name): + creds1 = ga_credentials.AnonymousCredentials() + creds2 = ga_credentials.AnonymousCredentials() + client1 = ServiceAttachmentsClient( + credentials=creds1, + transport=transport_name, + ) + client2 = ServiceAttachmentsClient( + credentials=creds2, + transport=transport_name, + ) + session1 = client1.transport.aggregated_list._session + session2 = client2.transport.aggregated_list._session + assert session1 != session2 + session1 = client1.transport.delete._session + session2 = client2.transport.delete._session + assert session1 != session2 + session1 = client1.transport.get._session + session2 = client2.transport.get._session + assert session1 != session2 + session1 = client1.transport.get_iam_policy._session + session2 = client2.transport.get_iam_policy._session + assert session1 != session2 + session1 = client1.transport.insert._session + session2 = client2.transport.insert._session + assert session1 != session2 + session1 = client1.transport.list._session + session2 = client2.transport.list._session + assert session1 != session2 + session1 = client1.transport.patch._session + session2 = client2.transport.patch._session + assert session1 != session2 + session1 = client1.transport.set_iam_policy._session + session2 = client2.transport.set_iam_policy._session + assert session1 != session2 + session1 = client1.transport.test_iam_permissions._session + session2 = client2.transport.test_iam_permissions._session + assert session1 != session2 + +def test_common_billing_account_path(): + billing_account = "squid" + expected = "billingAccounts/{billing_account}".format(billing_account=billing_account, ) + actual = ServiceAttachmentsClient.common_billing_account_path(billing_account) + assert expected == actual + + +def test_parse_common_billing_account_path(): + expected = { + "billing_account": "clam", + } + path = ServiceAttachmentsClient.common_billing_account_path(**expected) + + # Check that the path construction is reversible. + actual = ServiceAttachmentsClient.parse_common_billing_account_path(path) + assert expected == actual + +def test_common_folder_path(): + folder = "whelk" + expected = "folders/{folder}".format(folder=folder, ) + actual = ServiceAttachmentsClient.common_folder_path(folder) + assert expected == actual + + +def test_parse_common_folder_path(): + expected = { + "folder": "octopus", + } + path = ServiceAttachmentsClient.common_folder_path(**expected) + + # Check that the path construction is reversible. + actual = ServiceAttachmentsClient.parse_common_folder_path(path) + assert expected == actual + +def test_common_organization_path(): + organization = "oyster" + expected = "organizations/{organization}".format(organization=organization, ) + actual = ServiceAttachmentsClient.common_organization_path(organization) + assert expected == actual + + +def test_parse_common_organization_path(): + expected = { + "organization": "nudibranch", + } + path = ServiceAttachmentsClient.common_organization_path(**expected) + + # Check that the path construction is reversible. + actual = ServiceAttachmentsClient.parse_common_organization_path(path) + assert expected == actual + +def test_common_project_path(): + project = "cuttlefish" + expected = "projects/{project}".format(project=project, ) + actual = ServiceAttachmentsClient.common_project_path(project) + assert expected == actual + + +def test_parse_common_project_path(): + expected = { + "project": "mussel", + } + path = ServiceAttachmentsClient.common_project_path(**expected) + + # Check that the path construction is reversible. + actual = ServiceAttachmentsClient.parse_common_project_path(path) + assert expected == actual + +def test_common_location_path(): + project = "winkle" + location = "nautilus" + expected = "projects/{project}/locations/{location}".format(project=project, location=location, ) + actual = ServiceAttachmentsClient.common_location_path(project, location) + assert expected == actual + + +def test_parse_common_location_path(): + expected = { + "project": "scallop", + "location": "abalone", + } + path = ServiceAttachmentsClient.common_location_path(**expected) + + # Check that the path construction is reversible. + actual = ServiceAttachmentsClient.parse_common_location_path(path) + assert expected == actual + + +def test_client_with_default_client_info(): + client_info = gapic_v1.client_info.ClientInfo() + + with mock.patch.object(transports.ServiceAttachmentsTransport, '_prep_wrapped_messages') as prep: + client = ServiceAttachmentsClient( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + with mock.patch.object(transports.ServiceAttachmentsTransport, '_prep_wrapped_messages') as prep: + transport_class = ServiceAttachmentsClient.get_transport_class() + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + +def test_transport_close(): + transports = { + "rest": "_session", + } + + for transport, close_name in transports.items(): + client = ServiceAttachmentsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport + ) + with mock.patch.object(type(getattr(client.transport, close_name)), "close") as close: + with client: + close.assert_not_called() + close.assert_called_once() + +def test_client_ctx(): + transports = [ + 'rest', + ] + for transport in transports: + client = ServiceAttachmentsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport + ) + # Test client calls underlying transport. + with mock.patch.object(type(client.transport), "close") as close: + close.assert_not_called() + with client: + pass + close.assert_called() + +@pytest.mark.parametrize("client_class,transport_class", [ + (ServiceAttachmentsClient, transports.ServiceAttachmentsRestTransport), +]) +def test_api_key_credentials(client_class, transport_class): + with mock.patch.object( + google.auth._default, "get_api_key_credentials", create=True + ) as get_api_key_credentials: + mock_cred = mock.Mock() + get_api_key_credentials.return_value = mock_cred + options = client_options.ClientOptions() + options.api_key = "api_key" + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=mock_cred, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) diff --git a/owl-bot-staging/v1/tests/unit/gapic/compute_v1/test_snapshots.py b/owl-bot-staging/v1/tests/unit/gapic/compute_v1/test_snapshots.py new file mode 100644 index 000000000..cb6332e9a --- /dev/null +++ b/owl-bot-staging/v1/tests/unit/gapic/compute_v1/test_snapshots.py @@ -0,0 +1,3807 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import os +# try/except added for compatibility with python < 3.8 +try: + from unittest import mock + from unittest.mock import AsyncMock # pragma: NO COVER +except ImportError: # pragma: NO COVER + import mock + +import grpc +from grpc.experimental import aio +from collections.abc import Iterable +from google.protobuf import json_format +import json +import math +import pytest +from proto.marshal.rules.dates import DurationRule, TimestampRule +from proto.marshal.rules import wrappers +from requests import Response +from requests import Request, PreparedRequest +from requests.sessions import Session +from google.protobuf import json_format + +from google.api_core import client_options +from google.api_core import exceptions as core_exceptions +from google.api_core import extended_operation # type: ignore +from google.api_core import future +from google.api_core import gapic_v1 +from google.api_core import grpc_helpers +from google.api_core import grpc_helpers_async +from google.api_core import path_template +from google.auth import credentials as ga_credentials +from google.auth.exceptions import MutualTLSChannelError +from google.cloud.compute_v1.services.snapshots import SnapshotsClient +from google.cloud.compute_v1.services.snapshots import pagers +from google.cloud.compute_v1.services.snapshots import transports +from google.cloud.compute_v1.types import compute +from google.oauth2 import service_account +import google.auth + + +def client_cert_source_callback(): + return b"cert bytes", b"key bytes" + + +# If default endpoint is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint(client): + return "foo.googleapis.com" if ("localhost" in client.DEFAULT_ENDPOINT) else client.DEFAULT_ENDPOINT + + +def test__get_default_mtls_endpoint(): + api_endpoint = "example.googleapis.com" + api_mtls_endpoint = "example.mtls.googleapis.com" + sandbox_endpoint = "example.sandbox.googleapis.com" + sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com" + non_googleapi = "api.example.com" + + assert SnapshotsClient._get_default_mtls_endpoint(None) is None + assert SnapshotsClient._get_default_mtls_endpoint(api_endpoint) == api_mtls_endpoint + assert SnapshotsClient._get_default_mtls_endpoint(api_mtls_endpoint) == api_mtls_endpoint + assert SnapshotsClient._get_default_mtls_endpoint(sandbox_endpoint) == sandbox_mtls_endpoint + assert SnapshotsClient._get_default_mtls_endpoint(sandbox_mtls_endpoint) == sandbox_mtls_endpoint + assert SnapshotsClient._get_default_mtls_endpoint(non_googleapi) == non_googleapi + + +@pytest.mark.parametrize("client_class,transport_name", [ + (SnapshotsClient, "rest"), +]) +def test_snapshots_client_from_service_account_info(client_class, transport_name): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object(service_account.Credentials, 'from_service_account_info') as factory: + factory.return_value = creds + info = {"valid": True} + client = client_class.from_service_account_info(info, transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + 'compute.googleapis.com:443' + if transport_name in ['grpc', 'grpc_asyncio'] + else + 'https://compute.googleapis.com' + ) + + +@pytest.mark.parametrize("transport_class,transport_name", [ + (transports.SnapshotsRestTransport, "rest"), +]) +def test_snapshots_client_service_account_always_use_jwt(transport_class, transport_name): + with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=True) + use_jwt.assert_called_once_with(True) + + with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=False) + use_jwt.assert_not_called() + + +@pytest.mark.parametrize("client_class,transport_name", [ + (SnapshotsClient, "rest"), +]) +def test_snapshots_client_from_service_account_file(client_class, transport_name): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object(service_account.Credentials, 'from_service_account_file') as factory: + factory.return_value = creds + client = client_class.from_service_account_file("dummy/file/path.json", transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + client = client_class.from_service_account_json("dummy/file/path.json", transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + 'compute.googleapis.com:443' + if transport_name in ['grpc', 'grpc_asyncio'] + else + 'https://compute.googleapis.com' + ) + + +def test_snapshots_client_get_transport_class(): + transport = SnapshotsClient.get_transport_class() + available_transports = [ + transports.SnapshotsRestTransport, + ] + assert transport in available_transports + + transport = SnapshotsClient.get_transport_class("rest") + assert transport == transports.SnapshotsRestTransport + + +@pytest.mark.parametrize("client_class,transport_class,transport_name", [ + (SnapshotsClient, transports.SnapshotsRestTransport, "rest"), +]) +@mock.patch.object(SnapshotsClient, "DEFAULT_ENDPOINT", modify_default_endpoint(SnapshotsClient)) +def test_snapshots_client_client_options(client_class, transport_class, transport_name): + # Check that if channel is provided we won't create a new one. + with mock.patch.object(SnapshotsClient, 'get_transport_class') as gtc: + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ) + client = client_class(transport=transport) + gtc.assert_not_called() + + # Check that if channel is provided via str we will create a new one. + with mock.patch.object(SnapshotsClient, 'get_transport_class') as gtc: + client = client_class(transport=transport_name) + gtc.assert_called() + + # Check the case api_endpoint is provided. + options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(transport=transport_name, client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_MTLS_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError): + client = client_class(transport=transport_name) + + # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): + with pytest.raises(ValueError): + client = client_class(transport=transport_name) + + # Check the case quota_project_id is provided + options = client_options.ClientOptions(quota_project_id="octopus") + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id="octopus", + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + # Check the case api_endpoint is provided + options = client_options.ClientOptions(api_audience="https://language.googleapis.com") + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience="https://language.googleapis.com" + ) + +@pytest.mark.parametrize("client_class,transport_class,transport_name,use_client_cert_env", [ + (SnapshotsClient, transports.SnapshotsRestTransport, "rest", "true"), + (SnapshotsClient, transports.SnapshotsRestTransport, "rest", "false"), +]) +@mock.patch.object(SnapshotsClient, "DEFAULT_ENDPOINT", modify_default_endpoint(SnapshotsClient)) +@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) +def test_snapshots_client_mtls_env_auto(client_class, transport_class, transport_name, use_client_cert_env): + # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default + # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists. + + # Check the case client_cert_source is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): + options = client_options.ClientOptions(client_cert_source=client_cert_source_callback) + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + + if use_client_cert_env == "false": + expected_client_cert_source = None + expected_host = client.DEFAULT_ENDPOINT + else: + expected_client_cert_source = client_cert_source_callback + expected_host = client.DEFAULT_MTLS_ENDPOINT + + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case ADC client cert is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): + with mock.patch.object(transport_class, '__init__') as patched: + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): + with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=client_cert_source_callback): + if use_client_cert_env == "false": + expected_host = client.DEFAULT_ENDPOINT + expected_client_cert_source = None + else: + expected_host = client.DEFAULT_MTLS_ENDPOINT + expected_client_cert_source = client_cert_source_callback + + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case client_cert_source and ADC client cert are not provided. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): + with mock.patch.object(transport_class, '__init__') as patched: + with mock.patch("google.auth.transport.mtls.has_default_client_cert_source", return_value=False): + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize("client_class", [ + SnapshotsClient +]) +@mock.patch.object(SnapshotsClient, "DEFAULT_ENDPOINT", modify_default_endpoint(SnapshotsClient)) +def test_snapshots_client_get_mtls_endpoint_and_cert_source(client_class): + mock_client_cert_source = mock.Mock() + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "true". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + mock_api_endpoint = "foo" + options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(options) + assert api_endpoint == mock_api_endpoint + assert cert_source == mock_client_cert_source + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "false". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + mock_client_cert_source = mock.Mock() + mock_api_endpoint = "foo" + options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(options) + assert api_endpoint == mock_api_endpoint + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert doesn't exist. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=False): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert exists. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): + with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=mock_client_cert_source): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source == mock_client_cert_source + + +@pytest.mark.parametrize("client_class,transport_class,transport_name", [ + (SnapshotsClient, transports.SnapshotsRestTransport, "rest"), +]) +def test_snapshots_client_client_options_scopes(client_class, transport_class, transport_name): + # Check the case scopes are provided. + options = client_options.ClientOptions( + scopes=["1", "2"], + ) + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=["1", "2"], + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + +@pytest.mark.parametrize("client_class,transport_class,transport_name,grpc_helpers", [ + (SnapshotsClient, transports.SnapshotsRestTransport, "rest", None), +]) +def test_snapshots_client_client_options_credentials_file(client_class, transport_class, transport_name, grpc_helpers): + # Check the case credentials file is provided. + options = client_options.ClientOptions( + credentials_file="credentials.json" + ) + + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize("request_type", [ + compute.DeleteSnapshotRequest, + dict, +]) +def test_delete_rest(request_type): + client = SnapshotsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'snapshot': 'sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.delete(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, extended_operation.ExtendedOperation) + assert response.client_operation_id == 'client_operation_id_value' + assert response.creation_timestamp == 'creation_timestamp_value' + assert response.description == 'description_value' + assert response.end_time == 'end_time_value' + assert response.http_error_message == 'http_error_message_value' + assert response.http_error_status_code == 2374 + assert response.id == 205 + assert response.insert_time == 'insert_time_value' + assert response.kind == 'kind_value' + assert response.name == 'name_value' + assert response.operation_group_id == 'operation_group_id_value' + assert response.operation_type == 'operation_type_value' + assert response.progress == 885 + assert response.region == 'region_value' + assert response.self_link == 'self_link_value' + assert response.start_time == 'start_time_value' + assert response.status == compute.Operation.Status.DONE + assert response.status_message == 'status_message_value' + assert response.target_id == 947 + assert response.target_link == 'target_link_value' + assert response.user == 'user_value' + assert response.zone == 'zone_value' + + +def test_delete_rest_required_fields(request_type=compute.DeleteSnapshotRequest): + transport_class = transports.SnapshotsRestTransport + + request_init = {} + request_init["project"] = "" + request_init["snapshot"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + jsonified_request["snapshot"] = 'snapshot_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "snapshot" in jsonified_request + assert jsonified_request["snapshot"] == 'snapshot_value' + + client = SnapshotsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "delete", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.delete(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_delete_rest_unset_required_fields(): + transport = transports.SnapshotsRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.delete._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("project", "snapshot", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_rest_interceptors(null_interceptor): + transport = transports.SnapshotsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.SnapshotsRestInterceptor(), + ) + client = SnapshotsClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.SnapshotsRestInterceptor, "post_delete") as post, \ + mock.patch.object(transports.SnapshotsRestInterceptor, "pre_delete") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.DeleteSnapshotRequest.pb(compute.DeleteSnapshotRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.DeleteSnapshotRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.delete(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_delete_rest_bad_request(transport: str = 'rest', request_type=compute.DeleteSnapshotRequest): + client = SnapshotsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'snapshot': 'sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.delete(request) + + +def test_delete_rest_flattened(): + client = SnapshotsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'snapshot': 'sample2'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + snapshot='snapshot_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.delete(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/global/snapshots/{snapshot}" % client.transport._host, args[1]) + + +def test_delete_rest_flattened_error(transport: str = 'rest'): + client = SnapshotsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete( + compute.DeleteSnapshotRequest(), + project='project_value', + snapshot='snapshot_value', + ) + + +def test_delete_rest_error(): + client = SnapshotsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.DeleteSnapshotRequest, + dict, +]) +def test_delete_unary_rest(request_type): + client = SnapshotsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'snapshot': 'sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.delete_unary(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.Operation) + + +def test_delete_unary_rest_required_fields(request_type=compute.DeleteSnapshotRequest): + transport_class = transports.SnapshotsRestTransport + + request_init = {} + request_init["project"] = "" + request_init["snapshot"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + jsonified_request["snapshot"] = 'snapshot_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "snapshot" in jsonified_request + assert jsonified_request["snapshot"] == 'snapshot_value' + + client = SnapshotsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "delete", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.delete_unary(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_delete_unary_rest_unset_required_fields(): + transport = transports.SnapshotsRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.delete._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("project", "snapshot", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_unary_rest_interceptors(null_interceptor): + transport = transports.SnapshotsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.SnapshotsRestInterceptor(), + ) + client = SnapshotsClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.SnapshotsRestInterceptor, "post_delete") as post, \ + mock.patch.object(transports.SnapshotsRestInterceptor, "pre_delete") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.DeleteSnapshotRequest.pb(compute.DeleteSnapshotRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.DeleteSnapshotRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.delete_unary(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_delete_unary_rest_bad_request(transport: str = 'rest', request_type=compute.DeleteSnapshotRequest): + client = SnapshotsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'snapshot': 'sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.delete_unary(request) + + +def test_delete_unary_rest_flattened(): + client = SnapshotsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'snapshot': 'sample2'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + snapshot='snapshot_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.delete_unary(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/global/snapshots/{snapshot}" % client.transport._host, args[1]) + + +def test_delete_unary_rest_flattened_error(transport: str = 'rest'): + client = SnapshotsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_unary( + compute.DeleteSnapshotRequest(), + project='project_value', + snapshot='snapshot_value', + ) + + +def test_delete_unary_rest_error(): + client = SnapshotsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.GetSnapshotRequest, + dict, +]) +def test_get_rest(request_type): + client = SnapshotsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'snapshot': 'sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Snapshot( + architecture='architecture_value', + auto_created=True, + chain_name='chain_name_value', + creation_size_bytes=2037, + creation_timestamp='creation_timestamp_value', + description='description_value', + disk_size_gb=1261, + download_bytes=1502, + id=205, + kind='kind_value', + label_fingerprint='label_fingerprint_value', + license_codes=[1360], + licenses=['licenses_value'], + location_hint='location_hint_value', + name='name_value', + satisfies_pzs=True, + self_link='self_link_value', + snapshot_type='snapshot_type_value', + source_disk='source_disk_value', + source_disk_id='source_disk_id_value', + source_snapshot_schedule_policy='source_snapshot_schedule_policy_value', + source_snapshot_schedule_policy_id='source_snapshot_schedule_policy_id_value', + status='status_value', + storage_bytes=1403, + storage_bytes_status='storage_bytes_status_value', + storage_locations=['storage_locations_value'], + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Snapshot.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.get(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.Snapshot) + assert response.architecture == 'architecture_value' + assert response.auto_created is True + assert response.chain_name == 'chain_name_value' + assert response.creation_size_bytes == 2037 + assert response.creation_timestamp == 'creation_timestamp_value' + assert response.description == 'description_value' + assert response.disk_size_gb == 1261 + assert response.download_bytes == 1502 + assert response.id == 205 + assert response.kind == 'kind_value' + assert response.label_fingerprint == 'label_fingerprint_value' + assert response.license_codes == [1360] + assert response.licenses == ['licenses_value'] + assert response.location_hint == 'location_hint_value' + assert response.name == 'name_value' + assert response.satisfies_pzs is True + assert response.self_link == 'self_link_value' + assert response.snapshot_type == 'snapshot_type_value' + assert response.source_disk == 'source_disk_value' + assert response.source_disk_id == 'source_disk_id_value' + assert response.source_snapshot_schedule_policy == 'source_snapshot_schedule_policy_value' + assert response.source_snapshot_schedule_policy_id == 'source_snapshot_schedule_policy_id_value' + assert response.status == 'status_value' + assert response.storage_bytes == 1403 + assert response.storage_bytes_status == 'storage_bytes_status_value' + assert response.storage_locations == ['storage_locations_value'] + + +def test_get_rest_required_fields(request_type=compute.GetSnapshotRequest): + transport_class = transports.SnapshotsRestTransport + + request_init = {} + request_init["project"] = "" + request_init["snapshot"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + jsonified_request["snapshot"] = 'snapshot_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "snapshot" in jsonified_request + assert jsonified_request["snapshot"] == 'snapshot_value' + + client = SnapshotsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Snapshot() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "get", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Snapshot.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.get(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_get_rest_unset_required_fields(): + transport = transports.SnapshotsRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.get._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("project", "snapshot", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_rest_interceptors(null_interceptor): + transport = transports.SnapshotsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.SnapshotsRestInterceptor(), + ) + client = SnapshotsClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.SnapshotsRestInterceptor, "post_get") as post, \ + mock.patch.object(transports.SnapshotsRestInterceptor, "pre_get") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.GetSnapshotRequest.pb(compute.GetSnapshotRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Snapshot.to_json(compute.Snapshot()) + + request = compute.GetSnapshotRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Snapshot() + + client.get(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_get_rest_bad_request(transport: str = 'rest', request_type=compute.GetSnapshotRequest): + client = SnapshotsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'snapshot': 'sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get(request) + + +def test_get_rest_flattened(): + client = SnapshotsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Snapshot() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'snapshot': 'sample2'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + snapshot='snapshot_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Snapshot.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.get(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/global/snapshots/{snapshot}" % client.transport._host, args[1]) + + +def test_get_rest_flattened_error(transport: str = 'rest'): + client = SnapshotsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get( + compute.GetSnapshotRequest(), + project='project_value', + snapshot='snapshot_value', + ) + + +def test_get_rest_error(): + client = SnapshotsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.GetIamPolicySnapshotRequest, + dict, +]) +def test_get_iam_policy_rest(request_type): + client = SnapshotsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'resource': 'sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Policy( + etag='etag_value', + iam_owned=True, + version=774, + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Policy.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.get_iam_policy(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.Policy) + assert response.etag == 'etag_value' + assert response.iam_owned is True + assert response.version == 774 + + +def test_get_iam_policy_rest_required_fields(request_type=compute.GetIamPolicySnapshotRequest): + transport_class = transports.SnapshotsRestTransport + + request_init = {} + request_init["project"] = "" + request_init["resource"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_iam_policy._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + jsonified_request["resource"] = 'resource_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_iam_policy._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("options_requested_policy_version", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "resource" in jsonified_request + assert jsonified_request["resource"] == 'resource_value' + + client = SnapshotsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Policy() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "get", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Policy.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.get_iam_policy(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_get_iam_policy_rest_unset_required_fields(): + transport = transports.SnapshotsRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.get_iam_policy._get_unset_required_fields({}) + assert set(unset_fields) == (set(("optionsRequestedPolicyVersion", )) & set(("project", "resource", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_iam_policy_rest_interceptors(null_interceptor): + transport = transports.SnapshotsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.SnapshotsRestInterceptor(), + ) + client = SnapshotsClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.SnapshotsRestInterceptor, "post_get_iam_policy") as post, \ + mock.patch.object(transports.SnapshotsRestInterceptor, "pre_get_iam_policy") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.GetIamPolicySnapshotRequest.pb(compute.GetIamPolicySnapshotRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Policy.to_json(compute.Policy()) + + request = compute.GetIamPolicySnapshotRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Policy() + + client.get_iam_policy(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_get_iam_policy_rest_bad_request(transport: str = 'rest', request_type=compute.GetIamPolicySnapshotRequest): + client = SnapshotsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'resource': 'sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_iam_policy(request) + + +def test_get_iam_policy_rest_flattened(): + client = SnapshotsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Policy() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'resource': 'sample2'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + resource='resource_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Policy.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.get_iam_policy(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/global/snapshots/{resource}/getIamPolicy" % client.transport._host, args[1]) + + +def test_get_iam_policy_rest_flattened_error(transport: str = 'rest'): + client = SnapshotsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_iam_policy( + compute.GetIamPolicySnapshotRequest(), + project='project_value', + resource='resource_value', + ) + + +def test_get_iam_policy_rest_error(): + client = SnapshotsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.InsertSnapshotRequest, + dict, +]) +def test_insert_rest(request_type): + client = SnapshotsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1'} + request_init["snapshot_resource"] = {'architecture': 'architecture_value', 'auto_created': True, 'chain_name': 'chain_name_value', 'creation_size_bytes': 2037, 'creation_timestamp': 'creation_timestamp_value', 'description': 'description_value', 'disk_size_gb': 1261, 'download_bytes': 1502, 'id': 205, 'kind': 'kind_value', 'label_fingerprint': 'label_fingerprint_value', 'labels': {}, 'license_codes': [1361, 1362], 'licenses': ['licenses_value1', 'licenses_value2'], 'location_hint': 'location_hint_value', 'name': 'name_value', 'satisfies_pzs': True, 'self_link': 'self_link_value', 'snapshot_encryption_key': {'kms_key_name': 'kms_key_name_value', 'kms_key_service_account': 'kms_key_service_account_value', 'raw_key': 'raw_key_value', 'rsa_encrypted_key': 'rsa_encrypted_key_value', 'sha256': 'sha256_value'}, 'snapshot_type': 'snapshot_type_value', 'source_disk': 'source_disk_value', 'source_disk_encryption_key': {}, 'source_disk_id': 'source_disk_id_value', 'source_snapshot_schedule_policy': 'source_snapshot_schedule_policy_value', 'source_snapshot_schedule_policy_id': 'source_snapshot_schedule_policy_id_value', 'status': 'status_value', 'storage_bytes': 1403, 'storage_bytes_status': 'storage_bytes_status_value', 'storage_locations': ['storage_locations_value1', 'storage_locations_value2']} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.insert(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, extended_operation.ExtendedOperation) + assert response.client_operation_id == 'client_operation_id_value' + assert response.creation_timestamp == 'creation_timestamp_value' + assert response.description == 'description_value' + assert response.end_time == 'end_time_value' + assert response.http_error_message == 'http_error_message_value' + assert response.http_error_status_code == 2374 + assert response.id == 205 + assert response.insert_time == 'insert_time_value' + assert response.kind == 'kind_value' + assert response.name == 'name_value' + assert response.operation_group_id == 'operation_group_id_value' + assert response.operation_type == 'operation_type_value' + assert response.progress == 885 + assert response.region == 'region_value' + assert response.self_link == 'self_link_value' + assert response.start_time == 'start_time_value' + assert response.status == compute.Operation.Status.DONE + assert response.status_message == 'status_message_value' + assert response.target_id == 947 + assert response.target_link == 'target_link_value' + assert response.user == 'user_value' + assert response.zone == 'zone_value' + + +def test_insert_rest_required_fields(request_type=compute.InsertSnapshotRequest): + transport_class = transports.SnapshotsRestTransport + + request_init = {} + request_init["project"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).insert._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).insert._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + + client = SnapshotsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.insert(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_insert_rest_unset_required_fields(): + transport = transports.SnapshotsRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.insert._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("project", "snapshotResource", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_insert_rest_interceptors(null_interceptor): + transport = transports.SnapshotsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.SnapshotsRestInterceptor(), + ) + client = SnapshotsClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.SnapshotsRestInterceptor, "post_insert") as post, \ + mock.patch.object(transports.SnapshotsRestInterceptor, "pre_insert") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.InsertSnapshotRequest.pb(compute.InsertSnapshotRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.InsertSnapshotRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.insert(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_insert_rest_bad_request(transport: str = 'rest', request_type=compute.InsertSnapshotRequest): + client = SnapshotsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1'} + request_init["snapshot_resource"] = {'architecture': 'architecture_value', 'auto_created': True, 'chain_name': 'chain_name_value', 'creation_size_bytes': 2037, 'creation_timestamp': 'creation_timestamp_value', 'description': 'description_value', 'disk_size_gb': 1261, 'download_bytes': 1502, 'id': 205, 'kind': 'kind_value', 'label_fingerprint': 'label_fingerprint_value', 'labels': {}, 'license_codes': [1361, 1362], 'licenses': ['licenses_value1', 'licenses_value2'], 'location_hint': 'location_hint_value', 'name': 'name_value', 'satisfies_pzs': True, 'self_link': 'self_link_value', 'snapshot_encryption_key': {'kms_key_name': 'kms_key_name_value', 'kms_key_service_account': 'kms_key_service_account_value', 'raw_key': 'raw_key_value', 'rsa_encrypted_key': 'rsa_encrypted_key_value', 'sha256': 'sha256_value'}, 'snapshot_type': 'snapshot_type_value', 'source_disk': 'source_disk_value', 'source_disk_encryption_key': {}, 'source_disk_id': 'source_disk_id_value', 'source_snapshot_schedule_policy': 'source_snapshot_schedule_policy_value', 'source_snapshot_schedule_policy_id': 'source_snapshot_schedule_policy_id_value', 'status': 'status_value', 'storage_bytes': 1403, 'storage_bytes_status': 'storage_bytes_status_value', 'storage_locations': ['storage_locations_value1', 'storage_locations_value2']} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.insert(request) + + +def test_insert_rest_flattened(): + client = SnapshotsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + snapshot_resource=compute.Snapshot(architecture='architecture_value'), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.insert(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/global/snapshots" % client.transport._host, args[1]) + + +def test_insert_rest_flattened_error(transport: str = 'rest'): + client = SnapshotsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.insert( + compute.InsertSnapshotRequest(), + project='project_value', + snapshot_resource=compute.Snapshot(architecture='architecture_value'), + ) + + +def test_insert_rest_error(): + client = SnapshotsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.InsertSnapshotRequest, + dict, +]) +def test_insert_unary_rest(request_type): + client = SnapshotsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1'} + request_init["snapshot_resource"] = {'architecture': 'architecture_value', 'auto_created': True, 'chain_name': 'chain_name_value', 'creation_size_bytes': 2037, 'creation_timestamp': 'creation_timestamp_value', 'description': 'description_value', 'disk_size_gb': 1261, 'download_bytes': 1502, 'id': 205, 'kind': 'kind_value', 'label_fingerprint': 'label_fingerprint_value', 'labels': {}, 'license_codes': [1361, 1362], 'licenses': ['licenses_value1', 'licenses_value2'], 'location_hint': 'location_hint_value', 'name': 'name_value', 'satisfies_pzs': True, 'self_link': 'self_link_value', 'snapshot_encryption_key': {'kms_key_name': 'kms_key_name_value', 'kms_key_service_account': 'kms_key_service_account_value', 'raw_key': 'raw_key_value', 'rsa_encrypted_key': 'rsa_encrypted_key_value', 'sha256': 'sha256_value'}, 'snapshot_type': 'snapshot_type_value', 'source_disk': 'source_disk_value', 'source_disk_encryption_key': {}, 'source_disk_id': 'source_disk_id_value', 'source_snapshot_schedule_policy': 'source_snapshot_schedule_policy_value', 'source_snapshot_schedule_policy_id': 'source_snapshot_schedule_policy_id_value', 'status': 'status_value', 'storage_bytes': 1403, 'storage_bytes_status': 'storage_bytes_status_value', 'storage_locations': ['storage_locations_value1', 'storage_locations_value2']} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.insert_unary(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.Operation) + + +def test_insert_unary_rest_required_fields(request_type=compute.InsertSnapshotRequest): + transport_class = transports.SnapshotsRestTransport + + request_init = {} + request_init["project"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).insert._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).insert._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + + client = SnapshotsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.insert_unary(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_insert_unary_rest_unset_required_fields(): + transport = transports.SnapshotsRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.insert._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("project", "snapshotResource", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_insert_unary_rest_interceptors(null_interceptor): + transport = transports.SnapshotsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.SnapshotsRestInterceptor(), + ) + client = SnapshotsClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.SnapshotsRestInterceptor, "post_insert") as post, \ + mock.patch.object(transports.SnapshotsRestInterceptor, "pre_insert") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.InsertSnapshotRequest.pb(compute.InsertSnapshotRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.InsertSnapshotRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.insert_unary(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_insert_unary_rest_bad_request(transport: str = 'rest', request_type=compute.InsertSnapshotRequest): + client = SnapshotsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1'} + request_init["snapshot_resource"] = {'architecture': 'architecture_value', 'auto_created': True, 'chain_name': 'chain_name_value', 'creation_size_bytes': 2037, 'creation_timestamp': 'creation_timestamp_value', 'description': 'description_value', 'disk_size_gb': 1261, 'download_bytes': 1502, 'id': 205, 'kind': 'kind_value', 'label_fingerprint': 'label_fingerprint_value', 'labels': {}, 'license_codes': [1361, 1362], 'licenses': ['licenses_value1', 'licenses_value2'], 'location_hint': 'location_hint_value', 'name': 'name_value', 'satisfies_pzs': True, 'self_link': 'self_link_value', 'snapshot_encryption_key': {'kms_key_name': 'kms_key_name_value', 'kms_key_service_account': 'kms_key_service_account_value', 'raw_key': 'raw_key_value', 'rsa_encrypted_key': 'rsa_encrypted_key_value', 'sha256': 'sha256_value'}, 'snapshot_type': 'snapshot_type_value', 'source_disk': 'source_disk_value', 'source_disk_encryption_key': {}, 'source_disk_id': 'source_disk_id_value', 'source_snapshot_schedule_policy': 'source_snapshot_schedule_policy_value', 'source_snapshot_schedule_policy_id': 'source_snapshot_schedule_policy_id_value', 'status': 'status_value', 'storage_bytes': 1403, 'storage_bytes_status': 'storage_bytes_status_value', 'storage_locations': ['storage_locations_value1', 'storage_locations_value2']} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.insert_unary(request) + + +def test_insert_unary_rest_flattened(): + client = SnapshotsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + snapshot_resource=compute.Snapshot(architecture='architecture_value'), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.insert_unary(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/global/snapshots" % client.transport._host, args[1]) + + +def test_insert_unary_rest_flattened_error(transport: str = 'rest'): + client = SnapshotsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.insert_unary( + compute.InsertSnapshotRequest(), + project='project_value', + snapshot_resource=compute.Snapshot(architecture='architecture_value'), + ) + + +def test_insert_unary_rest_error(): + client = SnapshotsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.ListSnapshotsRequest, + dict, +]) +def test_list_rest(request_type): + client = SnapshotsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.SnapshotList( + id='id_value', + kind='kind_value', + next_page_token='next_page_token_value', + self_link='self_link_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.SnapshotList.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.list(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListPager) + assert response.id == 'id_value' + assert response.kind == 'kind_value' + assert response.next_page_token == 'next_page_token_value' + assert response.self_link == 'self_link_value' + + +def test_list_rest_required_fields(request_type=compute.ListSnapshotsRequest): + transport_class = transports.SnapshotsRestTransport + + request_init = {} + request_init["project"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("filter", "max_results", "order_by", "page_token", "return_partial_success", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + + client = SnapshotsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.SnapshotList() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "get", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.SnapshotList.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.list(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_list_rest_unset_required_fields(): + transport = transports.SnapshotsRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.list._get_unset_required_fields({}) + assert set(unset_fields) == (set(("filter", "maxResults", "orderBy", "pageToken", "returnPartialSuccess", )) & set(("project", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_rest_interceptors(null_interceptor): + transport = transports.SnapshotsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.SnapshotsRestInterceptor(), + ) + client = SnapshotsClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.SnapshotsRestInterceptor, "post_list") as post, \ + mock.patch.object(transports.SnapshotsRestInterceptor, "pre_list") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.ListSnapshotsRequest.pb(compute.ListSnapshotsRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.SnapshotList.to_json(compute.SnapshotList()) + + request = compute.ListSnapshotsRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.SnapshotList() + + client.list(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_list_rest_bad_request(transport: str = 'rest', request_type=compute.ListSnapshotsRequest): + client = SnapshotsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list(request) + + +def test_list_rest_flattened(): + client = SnapshotsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.SnapshotList() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.SnapshotList.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.list(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/global/snapshots" % client.transport._host, args[1]) + + +def test_list_rest_flattened_error(transport: str = 'rest'): + client = SnapshotsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list( + compute.ListSnapshotsRequest(), + project='project_value', + ) + + +def test_list_rest_pager(transport: str = 'rest'): + client = SnapshotsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + #with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + compute.SnapshotList( + items=[ + compute.Snapshot(), + compute.Snapshot(), + compute.Snapshot(), + ], + next_page_token='abc', + ), + compute.SnapshotList( + items=[], + next_page_token='def', + ), + compute.SnapshotList( + items=[ + compute.Snapshot(), + ], + next_page_token='ghi', + ), + compute.SnapshotList( + items=[ + compute.Snapshot(), + compute.Snapshot(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple(compute.SnapshotList.to_json(x) for x in response) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode('UTF-8') + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {'project': 'sample1'} + + pager = client.list(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, compute.Snapshot) + for i in results) + + pages = list(client.list(request=sample_request).pages) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize("request_type", [ + compute.SetIamPolicySnapshotRequest, + dict, +]) +def test_set_iam_policy_rest(request_type): + client = SnapshotsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'resource': 'sample2'} + request_init["global_set_policy_request_resource"] = {'bindings': [{'binding_id': 'binding_id_value', 'condition': {'description': 'description_value', 'expression': 'expression_value', 'location': 'location_value', 'title': 'title_value'}, 'members': ['members_value1', 'members_value2'], 'role': 'role_value'}], 'etag': 'etag_value', 'policy': {'audit_configs': [{'audit_log_configs': [{'exempted_members': ['exempted_members_value1', 'exempted_members_value2'], 'ignore_child_exemptions': True, 'log_type': 'log_type_value'}], 'exempted_members': ['exempted_members_value1', 'exempted_members_value2'], 'service': 'service_value'}], 'bindings': {}, 'etag': 'etag_value', 'iam_owned': True, 'rules': [{'action': 'action_value', 'conditions': [{'iam': 'iam_value', 'op': 'op_value', 'svc': 'svc_value', 'sys': 'sys_value', 'values': ['values_value1', 'values_value2']}], 'description': 'description_value', 'ins': ['ins_value1', 'ins_value2'], 'log_configs': [{'cloud_audit': {'authorization_logging_options': {'permission_type': 'permission_type_value'}, 'log_name': 'log_name_value'}, 'counter': {'custom_fields': [{'name': 'name_value', 'value': 'value_value'}], 'field': 'field_value', 'metric': 'metric_value'}, 'data_access': {'log_mode': 'log_mode_value'}}], 'not_ins': ['not_ins_value1', 'not_ins_value2'], 'permissions': ['permissions_value1', 'permissions_value2']}], 'version': 774}} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Policy( + etag='etag_value', + iam_owned=True, + version=774, + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Policy.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.set_iam_policy(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.Policy) + assert response.etag == 'etag_value' + assert response.iam_owned is True + assert response.version == 774 + + +def test_set_iam_policy_rest_required_fields(request_type=compute.SetIamPolicySnapshotRequest): + transport_class = transports.SnapshotsRestTransport + + request_init = {} + request_init["project"] = "" + request_init["resource"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).set_iam_policy._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + jsonified_request["resource"] = 'resource_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).set_iam_policy._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "resource" in jsonified_request + assert jsonified_request["resource"] == 'resource_value' + + client = SnapshotsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Policy() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Policy.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.set_iam_policy(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_set_iam_policy_rest_unset_required_fields(): + transport = transports.SnapshotsRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.set_iam_policy._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("globalSetPolicyRequestResource", "project", "resource", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_set_iam_policy_rest_interceptors(null_interceptor): + transport = transports.SnapshotsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.SnapshotsRestInterceptor(), + ) + client = SnapshotsClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.SnapshotsRestInterceptor, "post_set_iam_policy") as post, \ + mock.patch.object(transports.SnapshotsRestInterceptor, "pre_set_iam_policy") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.SetIamPolicySnapshotRequest.pb(compute.SetIamPolicySnapshotRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Policy.to_json(compute.Policy()) + + request = compute.SetIamPolicySnapshotRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Policy() + + client.set_iam_policy(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_set_iam_policy_rest_bad_request(transport: str = 'rest', request_type=compute.SetIamPolicySnapshotRequest): + client = SnapshotsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'resource': 'sample2'} + request_init["global_set_policy_request_resource"] = {'bindings': [{'binding_id': 'binding_id_value', 'condition': {'description': 'description_value', 'expression': 'expression_value', 'location': 'location_value', 'title': 'title_value'}, 'members': ['members_value1', 'members_value2'], 'role': 'role_value'}], 'etag': 'etag_value', 'policy': {'audit_configs': [{'audit_log_configs': [{'exempted_members': ['exempted_members_value1', 'exempted_members_value2'], 'ignore_child_exemptions': True, 'log_type': 'log_type_value'}], 'exempted_members': ['exempted_members_value1', 'exempted_members_value2'], 'service': 'service_value'}], 'bindings': {}, 'etag': 'etag_value', 'iam_owned': True, 'rules': [{'action': 'action_value', 'conditions': [{'iam': 'iam_value', 'op': 'op_value', 'svc': 'svc_value', 'sys': 'sys_value', 'values': ['values_value1', 'values_value2']}], 'description': 'description_value', 'ins': ['ins_value1', 'ins_value2'], 'log_configs': [{'cloud_audit': {'authorization_logging_options': {'permission_type': 'permission_type_value'}, 'log_name': 'log_name_value'}, 'counter': {'custom_fields': [{'name': 'name_value', 'value': 'value_value'}], 'field': 'field_value', 'metric': 'metric_value'}, 'data_access': {'log_mode': 'log_mode_value'}}], 'not_ins': ['not_ins_value1', 'not_ins_value2'], 'permissions': ['permissions_value1', 'permissions_value2']}], 'version': 774}} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.set_iam_policy(request) + + +def test_set_iam_policy_rest_flattened(): + client = SnapshotsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Policy() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'resource': 'sample2'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + resource='resource_value', + global_set_policy_request_resource=compute.GlobalSetPolicyRequest(bindings=[compute.Binding(binding_id='binding_id_value')]), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Policy.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.set_iam_policy(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/global/snapshots/{resource}/setIamPolicy" % client.transport._host, args[1]) + + +def test_set_iam_policy_rest_flattened_error(transport: str = 'rest'): + client = SnapshotsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.set_iam_policy( + compute.SetIamPolicySnapshotRequest(), + project='project_value', + resource='resource_value', + global_set_policy_request_resource=compute.GlobalSetPolicyRequest(bindings=[compute.Binding(binding_id='binding_id_value')]), + ) + + +def test_set_iam_policy_rest_error(): + client = SnapshotsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.SetLabelsSnapshotRequest, + dict, +]) +def test_set_labels_rest(request_type): + client = SnapshotsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'resource': 'sample2'} + request_init["global_set_labels_request_resource"] = {'label_fingerprint': 'label_fingerprint_value', 'labels': {}} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.set_labels(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, extended_operation.ExtendedOperation) + assert response.client_operation_id == 'client_operation_id_value' + assert response.creation_timestamp == 'creation_timestamp_value' + assert response.description == 'description_value' + assert response.end_time == 'end_time_value' + assert response.http_error_message == 'http_error_message_value' + assert response.http_error_status_code == 2374 + assert response.id == 205 + assert response.insert_time == 'insert_time_value' + assert response.kind == 'kind_value' + assert response.name == 'name_value' + assert response.operation_group_id == 'operation_group_id_value' + assert response.operation_type == 'operation_type_value' + assert response.progress == 885 + assert response.region == 'region_value' + assert response.self_link == 'self_link_value' + assert response.start_time == 'start_time_value' + assert response.status == compute.Operation.Status.DONE + assert response.status_message == 'status_message_value' + assert response.target_id == 947 + assert response.target_link == 'target_link_value' + assert response.user == 'user_value' + assert response.zone == 'zone_value' + + +def test_set_labels_rest_required_fields(request_type=compute.SetLabelsSnapshotRequest): + transport_class = transports.SnapshotsRestTransport + + request_init = {} + request_init["project"] = "" + request_init["resource"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).set_labels._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + jsonified_request["resource"] = 'resource_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).set_labels._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "resource" in jsonified_request + assert jsonified_request["resource"] == 'resource_value' + + client = SnapshotsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.set_labels(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_set_labels_rest_unset_required_fields(): + transport = transports.SnapshotsRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.set_labels._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("globalSetLabelsRequestResource", "project", "resource", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_set_labels_rest_interceptors(null_interceptor): + transport = transports.SnapshotsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.SnapshotsRestInterceptor(), + ) + client = SnapshotsClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.SnapshotsRestInterceptor, "post_set_labels") as post, \ + mock.patch.object(transports.SnapshotsRestInterceptor, "pre_set_labels") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.SetLabelsSnapshotRequest.pb(compute.SetLabelsSnapshotRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.SetLabelsSnapshotRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.set_labels(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_set_labels_rest_bad_request(transport: str = 'rest', request_type=compute.SetLabelsSnapshotRequest): + client = SnapshotsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'resource': 'sample2'} + request_init["global_set_labels_request_resource"] = {'label_fingerprint': 'label_fingerprint_value', 'labels': {}} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.set_labels(request) + + +def test_set_labels_rest_flattened(): + client = SnapshotsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'resource': 'sample2'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + resource='resource_value', + global_set_labels_request_resource=compute.GlobalSetLabelsRequest(label_fingerprint='label_fingerprint_value'), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.set_labels(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/global/snapshots/{resource}/setLabels" % client.transport._host, args[1]) + + +def test_set_labels_rest_flattened_error(transport: str = 'rest'): + client = SnapshotsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.set_labels( + compute.SetLabelsSnapshotRequest(), + project='project_value', + resource='resource_value', + global_set_labels_request_resource=compute.GlobalSetLabelsRequest(label_fingerprint='label_fingerprint_value'), + ) + + +def test_set_labels_rest_error(): + client = SnapshotsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.SetLabelsSnapshotRequest, + dict, +]) +def test_set_labels_unary_rest(request_type): + client = SnapshotsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'resource': 'sample2'} + request_init["global_set_labels_request_resource"] = {'label_fingerprint': 'label_fingerprint_value', 'labels': {}} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.set_labels_unary(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.Operation) + + +def test_set_labels_unary_rest_required_fields(request_type=compute.SetLabelsSnapshotRequest): + transport_class = transports.SnapshotsRestTransport + + request_init = {} + request_init["project"] = "" + request_init["resource"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).set_labels._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + jsonified_request["resource"] = 'resource_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).set_labels._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "resource" in jsonified_request + assert jsonified_request["resource"] == 'resource_value' + + client = SnapshotsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.set_labels_unary(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_set_labels_unary_rest_unset_required_fields(): + transport = transports.SnapshotsRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.set_labels._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("globalSetLabelsRequestResource", "project", "resource", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_set_labels_unary_rest_interceptors(null_interceptor): + transport = transports.SnapshotsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.SnapshotsRestInterceptor(), + ) + client = SnapshotsClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.SnapshotsRestInterceptor, "post_set_labels") as post, \ + mock.patch.object(transports.SnapshotsRestInterceptor, "pre_set_labels") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.SetLabelsSnapshotRequest.pb(compute.SetLabelsSnapshotRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.SetLabelsSnapshotRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.set_labels_unary(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_set_labels_unary_rest_bad_request(transport: str = 'rest', request_type=compute.SetLabelsSnapshotRequest): + client = SnapshotsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'resource': 'sample2'} + request_init["global_set_labels_request_resource"] = {'label_fingerprint': 'label_fingerprint_value', 'labels': {}} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.set_labels_unary(request) + + +def test_set_labels_unary_rest_flattened(): + client = SnapshotsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'resource': 'sample2'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + resource='resource_value', + global_set_labels_request_resource=compute.GlobalSetLabelsRequest(label_fingerprint='label_fingerprint_value'), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.set_labels_unary(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/global/snapshots/{resource}/setLabels" % client.transport._host, args[1]) + + +def test_set_labels_unary_rest_flattened_error(transport: str = 'rest'): + client = SnapshotsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.set_labels_unary( + compute.SetLabelsSnapshotRequest(), + project='project_value', + resource='resource_value', + global_set_labels_request_resource=compute.GlobalSetLabelsRequest(label_fingerprint='label_fingerprint_value'), + ) + + +def test_set_labels_unary_rest_error(): + client = SnapshotsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.TestIamPermissionsSnapshotRequest, + dict, +]) +def test_test_iam_permissions_rest(request_type): + client = SnapshotsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'resource': 'sample2'} + request_init["test_permissions_request_resource"] = {'permissions': ['permissions_value1', 'permissions_value2']} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.TestPermissionsResponse( + permissions=['permissions_value'], + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.TestPermissionsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.test_iam_permissions(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.TestPermissionsResponse) + assert response.permissions == ['permissions_value'] + + +def test_test_iam_permissions_rest_required_fields(request_type=compute.TestIamPermissionsSnapshotRequest): + transport_class = transports.SnapshotsRestTransport + + request_init = {} + request_init["project"] = "" + request_init["resource"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).test_iam_permissions._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + jsonified_request["resource"] = 'resource_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).test_iam_permissions._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "resource" in jsonified_request + assert jsonified_request["resource"] == 'resource_value' + + client = SnapshotsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.TestPermissionsResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.TestPermissionsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.test_iam_permissions(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_test_iam_permissions_rest_unset_required_fields(): + transport = transports.SnapshotsRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.test_iam_permissions._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("project", "resource", "testPermissionsRequestResource", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_test_iam_permissions_rest_interceptors(null_interceptor): + transport = transports.SnapshotsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.SnapshotsRestInterceptor(), + ) + client = SnapshotsClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.SnapshotsRestInterceptor, "post_test_iam_permissions") as post, \ + mock.patch.object(transports.SnapshotsRestInterceptor, "pre_test_iam_permissions") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.TestIamPermissionsSnapshotRequest.pb(compute.TestIamPermissionsSnapshotRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.TestPermissionsResponse.to_json(compute.TestPermissionsResponse()) + + request = compute.TestIamPermissionsSnapshotRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.TestPermissionsResponse() + + client.test_iam_permissions(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_test_iam_permissions_rest_bad_request(transport: str = 'rest', request_type=compute.TestIamPermissionsSnapshotRequest): + client = SnapshotsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'resource': 'sample2'} + request_init["test_permissions_request_resource"] = {'permissions': ['permissions_value1', 'permissions_value2']} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.test_iam_permissions(request) + + +def test_test_iam_permissions_rest_flattened(): + client = SnapshotsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.TestPermissionsResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'resource': 'sample2'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + resource='resource_value', + test_permissions_request_resource=compute.TestPermissionsRequest(permissions=['permissions_value']), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.TestPermissionsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.test_iam_permissions(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/global/snapshots/{resource}/testIamPermissions" % client.transport._host, args[1]) + + +def test_test_iam_permissions_rest_flattened_error(transport: str = 'rest'): + client = SnapshotsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.test_iam_permissions( + compute.TestIamPermissionsSnapshotRequest(), + project='project_value', + resource='resource_value', + test_permissions_request_resource=compute.TestPermissionsRequest(permissions=['permissions_value']), + ) + + +def test_test_iam_permissions_rest_error(): + client = SnapshotsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +def test_credentials_transport_error(): + # It is an error to provide credentials and a transport instance. + transport = transports.SnapshotsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = SnapshotsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # It is an error to provide a credentials file and a transport instance. + transport = transports.SnapshotsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = SnapshotsClient( + client_options={"credentials_file": "credentials.json"}, + transport=transport, + ) + + # It is an error to provide an api_key and a transport instance. + transport = transports.SnapshotsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = SnapshotsClient( + client_options=options, + transport=transport, + ) + + # It is an error to provide an api_key and a credential. + options = mock.Mock() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = SnapshotsClient( + client_options=options, + credentials=ga_credentials.AnonymousCredentials() + ) + + # It is an error to provide scopes and a transport instance. + transport = transports.SnapshotsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = SnapshotsClient( + client_options={"scopes": ["1", "2"]}, + transport=transport, + ) + + +def test_transport_instance(): + # A client may be instantiated with a custom transport instance. + transport = transports.SnapshotsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + client = SnapshotsClient(transport=transport) + assert client.transport is transport + + +@pytest.mark.parametrize("transport_class", [ + transports.SnapshotsRestTransport, +]) +def test_transport_adc(transport_class): + # Test default credentials are used if not provided. + with mock.patch.object(google.auth, 'default') as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class() + adc.assert_called_once() + +@pytest.mark.parametrize("transport_name", [ + "rest", +]) +def test_transport_kind(transport_name): + transport = SnapshotsClient.get_transport_class(transport_name)( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert transport.kind == transport_name + + +def test_snapshots_base_transport_error(): + # Passing both a credentials object and credentials_file should raise an error + with pytest.raises(core_exceptions.DuplicateCredentialArgs): + transport = transports.SnapshotsTransport( + credentials=ga_credentials.AnonymousCredentials(), + credentials_file="credentials.json" + ) + + +def test_snapshots_base_transport(): + # Instantiate the base transport. + with mock.patch('google.cloud.compute_v1.services.snapshots.transports.SnapshotsTransport.__init__') as Transport: + Transport.return_value = None + transport = transports.SnapshotsTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Every method on the transport should just blindly + # raise NotImplementedError. + methods = ( + 'delete', + 'get', + 'get_iam_policy', + 'insert', + 'list', + 'set_iam_policy', + 'set_labels', + 'test_iam_permissions', + ) + for method in methods: + with pytest.raises(NotImplementedError): + getattr(transport, method)(request=object()) + + with pytest.raises(NotImplementedError): + transport.close() + + # Catch all for all remaining methods and properties + remainder = [ + 'kind', + ] + for r in remainder: + with pytest.raises(NotImplementedError): + getattr(transport, r)() + + +def test_snapshots_base_transport_with_credentials_file(): + # Instantiate the base transport with a credentials file + with mock.patch.object(google.auth, 'load_credentials_from_file', autospec=True) as load_creds, mock.patch('google.cloud.compute_v1.services.snapshots.transports.SnapshotsTransport._prep_wrapped_messages') as Transport: + Transport.return_value = None + load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.SnapshotsTransport( + credentials_file="credentials.json", + quota_project_id="octopus", + ) + load_creds.assert_called_once_with("credentials.json", + scopes=None, + default_scopes=( + 'https://www.googleapis.com/auth/compute', + 'https://www.googleapis.com/auth/cloud-platform', +), + quota_project_id="octopus", + ) + + +def test_snapshots_base_transport_with_adc(): + # Test the default credentials are used if credentials and credentials_file are None. + with mock.patch.object(google.auth, 'default', autospec=True) as adc, mock.patch('google.cloud.compute_v1.services.snapshots.transports.SnapshotsTransport._prep_wrapped_messages') as Transport: + Transport.return_value = None + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.SnapshotsTransport() + adc.assert_called_once() + + +def test_snapshots_auth_adc(): + # If no credentials are provided, we should use ADC credentials. + with mock.patch.object(google.auth, 'default', autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + SnapshotsClient() + adc.assert_called_once_with( + scopes=None, + default_scopes=( + 'https://www.googleapis.com/auth/compute', + 'https://www.googleapis.com/auth/cloud-platform', +), + quota_project_id=None, + ) + + +def test_snapshots_http_transport_client_cert_source_for_mtls(): + cred = ga_credentials.AnonymousCredentials() + with mock.patch("google.auth.transport.requests.AuthorizedSession.configure_mtls_channel") as mock_configure_mtls_channel: + transports.SnapshotsRestTransport ( + credentials=cred, + client_cert_source_for_mtls=client_cert_source_callback + ) + mock_configure_mtls_channel.assert_called_once_with(client_cert_source_callback) + + +@pytest.mark.parametrize("transport_name", [ + "rest", +]) +def test_snapshots_host_no_port(transport_name): + client = SnapshotsClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions(api_endpoint='compute.googleapis.com'), + transport=transport_name, + ) + assert client.transport._host == ( + 'compute.googleapis.com:443' + if transport_name in ['grpc', 'grpc_asyncio'] + else 'https://compute.googleapis.com' + ) + +@pytest.mark.parametrize("transport_name", [ + "rest", +]) +def test_snapshots_host_with_port(transport_name): + client = SnapshotsClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions(api_endpoint='compute.googleapis.com:8000'), + transport=transport_name, + ) + assert client.transport._host == ( + 'compute.googleapis.com:8000' + if transport_name in ['grpc', 'grpc_asyncio'] + else 'https://compute.googleapis.com:8000' + ) + +@pytest.mark.parametrize("transport_name", [ + "rest", +]) +def test_snapshots_client_transport_session_collision(transport_name): + creds1 = ga_credentials.AnonymousCredentials() + creds2 = ga_credentials.AnonymousCredentials() + client1 = SnapshotsClient( + credentials=creds1, + transport=transport_name, + ) + client2 = SnapshotsClient( + credentials=creds2, + transport=transport_name, + ) + session1 = client1.transport.delete._session + session2 = client2.transport.delete._session + assert session1 != session2 + session1 = client1.transport.get._session + session2 = client2.transport.get._session + assert session1 != session2 + session1 = client1.transport.get_iam_policy._session + session2 = client2.transport.get_iam_policy._session + assert session1 != session2 + session1 = client1.transport.insert._session + session2 = client2.transport.insert._session + assert session1 != session2 + session1 = client1.transport.list._session + session2 = client2.transport.list._session + assert session1 != session2 + session1 = client1.transport.set_iam_policy._session + session2 = client2.transport.set_iam_policy._session + assert session1 != session2 + session1 = client1.transport.set_labels._session + session2 = client2.transport.set_labels._session + assert session1 != session2 + session1 = client1.transport.test_iam_permissions._session + session2 = client2.transport.test_iam_permissions._session + assert session1 != session2 + +def test_common_billing_account_path(): + billing_account = "squid" + expected = "billingAccounts/{billing_account}".format(billing_account=billing_account, ) + actual = SnapshotsClient.common_billing_account_path(billing_account) + assert expected == actual + + +def test_parse_common_billing_account_path(): + expected = { + "billing_account": "clam", + } + path = SnapshotsClient.common_billing_account_path(**expected) + + # Check that the path construction is reversible. + actual = SnapshotsClient.parse_common_billing_account_path(path) + assert expected == actual + +def test_common_folder_path(): + folder = "whelk" + expected = "folders/{folder}".format(folder=folder, ) + actual = SnapshotsClient.common_folder_path(folder) + assert expected == actual + + +def test_parse_common_folder_path(): + expected = { + "folder": "octopus", + } + path = SnapshotsClient.common_folder_path(**expected) + + # Check that the path construction is reversible. + actual = SnapshotsClient.parse_common_folder_path(path) + assert expected == actual + +def test_common_organization_path(): + organization = "oyster" + expected = "organizations/{organization}".format(organization=organization, ) + actual = SnapshotsClient.common_organization_path(organization) + assert expected == actual + + +def test_parse_common_organization_path(): + expected = { + "organization": "nudibranch", + } + path = SnapshotsClient.common_organization_path(**expected) + + # Check that the path construction is reversible. + actual = SnapshotsClient.parse_common_organization_path(path) + assert expected == actual + +def test_common_project_path(): + project = "cuttlefish" + expected = "projects/{project}".format(project=project, ) + actual = SnapshotsClient.common_project_path(project) + assert expected == actual + + +def test_parse_common_project_path(): + expected = { + "project": "mussel", + } + path = SnapshotsClient.common_project_path(**expected) + + # Check that the path construction is reversible. + actual = SnapshotsClient.parse_common_project_path(path) + assert expected == actual + +def test_common_location_path(): + project = "winkle" + location = "nautilus" + expected = "projects/{project}/locations/{location}".format(project=project, location=location, ) + actual = SnapshotsClient.common_location_path(project, location) + assert expected == actual + + +def test_parse_common_location_path(): + expected = { + "project": "scallop", + "location": "abalone", + } + path = SnapshotsClient.common_location_path(**expected) + + # Check that the path construction is reversible. + actual = SnapshotsClient.parse_common_location_path(path) + assert expected == actual + + +def test_client_with_default_client_info(): + client_info = gapic_v1.client_info.ClientInfo() + + with mock.patch.object(transports.SnapshotsTransport, '_prep_wrapped_messages') as prep: + client = SnapshotsClient( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + with mock.patch.object(transports.SnapshotsTransport, '_prep_wrapped_messages') as prep: + transport_class = SnapshotsClient.get_transport_class() + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + +def test_transport_close(): + transports = { + "rest": "_session", + } + + for transport, close_name in transports.items(): + client = SnapshotsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport + ) + with mock.patch.object(type(getattr(client.transport, close_name)), "close") as close: + with client: + close.assert_not_called() + close.assert_called_once() + +def test_client_ctx(): + transports = [ + 'rest', + ] + for transport in transports: + client = SnapshotsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport + ) + # Test client calls underlying transport. + with mock.patch.object(type(client.transport), "close") as close: + close.assert_not_called() + with client: + pass + close.assert_called() + +@pytest.mark.parametrize("client_class,transport_class", [ + (SnapshotsClient, transports.SnapshotsRestTransport), +]) +def test_api_key_credentials(client_class, transport_class): + with mock.patch.object( + google.auth._default, "get_api_key_credentials", create=True + ) as get_api_key_credentials: + mock_cred = mock.Mock() + get_api_key_credentials.return_value = mock_cred + options = client_options.ClientOptions() + options.api_key = "api_key" + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=mock_cred, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) diff --git a/owl-bot-staging/v1/tests/unit/gapic/compute_v1/test_ssl_certificates.py b/owl-bot-staging/v1/tests/unit/gapic/compute_v1/test_ssl_certificates.py new file mode 100644 index 000000000..686e80e3a --- /dev/null +++ b/owl-bot-staging/v1/tests/unit/gapic/compute_v1/test_ssl_certificates.py @@ -0,0 +1,2792 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import os +# try/except added for compatibility with python < 3.8 +try: + from unittest import mock + from unittest.mock import AsyncMock # pragma: NO COVER +except ImportError: # pragma: NO COVER + import mock + +import grpc +from grpc.experimental import aio +from collections.abc import Iterable +from google.protobuf import json_format +import json +import math +import pytest +from proto.marshal.rules.dates import DurationRule, TimestampRule +from proto.marshal.rules import wrappers +from requests import Response +from requests import Request, PreparedRequest +from requests.sessions import Session +from google.protobuf import json_format + +from google.api_core import client_options +from google.api_core import exceptions as core_exceptions +from google.api_core import extended_operation # type: ignore +from google.api_core import future +from google.api_core import gapic_v1 +from google.api_core import grpc_helpers +from google.api_core import grpc_helpers_async +from google.api_core import path_template +from google.auth import credentials as ga_credentials +from google.auth.exceptions import MutualTLSChannelError +from google.cloud.compute_v1.services.ssl_certificates import SslCertificatesClient +from google.cloud.compute_v1.services.ssl_certificates import pagers +from google.cloud.compute_v1.services.ssl_certificates import transports +from google.cloud.compute_v1.types import compute +from google.oauth2 import service_account +import google.auth + + +def client_cert_source_callback(): + return b"cert bytes", b"key bytes" + + +# If default endpoint is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint(client): + return "foo.googleapis.com" if ("localhost" in client.DEFAULT_ENDPOINT) else client.DEFAULT_ENDPOINT + + +def test__get_default_mtls_endpoint(): + api_endpoint = "example.googleapis.com" + api_mtls_endpoint = "example.mtls.googleapis.com" + sandbox_endpoint = "example.sandbox.googleapis.com" + sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com" + non_googleapi = "api.example.com" + + assert SslCertificatesClient._get_default_mtls_endpoint(None) is None + assert SslCertificatesClient._get_default_mtls_endpoint(api_endpoint) == api_mtls_endpoint + assert SslCertificatesClient._get_default_mtls_endpoint(api_mtls_endpoint) == api_mtls_endpoint + assert SslCertificatesClient._get_default_mtls_endpoint(sandbox_endpoint) == sandbox_mtls_endpoint + assert SslCertificatesClient._get_default_mtls_endpoint(sandbox_mtls_endpoint) == sandbox_mtls_endpoint + assert SslCertificatesClient._get_default_mtls_endpoint(non_googleapi) == non_googleapi + + +@pytest.mark.parametrize("client_class,transport_name", [ + (SslCertificatesClient, "rest"), +]) +def test_ssl_certificates_client_from_service_account_info(client_class, transport_name): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object(service_account.Credentials, 'from_service_account_info') as factory: + factory.return_value = creds + info = {"valid": True} + client = client_class.from_service_account_info(info, transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + 'compute.googleapis.com:443' + if transport_name in ['grpc', 'grpc_asyncio'] + else + 'https://compute.googleapis.com' + ) + + +@pytest.mark.parametrize("transport_class,transport_name", [ + (transports.SslCertificatesRestTransport, "rest"), +]) +def test_ssl_certificates_client_service_account_always_use_jwt(transport_class, transport_name): + with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=True) + use_jwt.assert_called_once_with(True) + + with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=False) + use_jwt.assert_not_called() + + +@pytest.mark.parametrize("client_class,transport_name", [ + (SslCertificatesClient, "rest"), +]) +def test_ssl_certificates_client_from_service_account_file(client_class, transport_name): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object(service_account.Credentials, 'from_service_account_file') as factory: + factory.return_value = creds + client = client_class.from_service_account_file("dummy/file/path.json", transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + client = client_class.from_service_account_json("dummy/file/path.json", transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + 'compute.googleapis.com:443' + if transport_name in ['grpc', 'grpc_asyncio'] + else + 'https://compute.googleapis.com' + ) + + +def test_ssl_certificates_client_get_transport_class(): + transport = SslCertificatesClient.get_transport_class() + available_transports = [ + transports.SslCertificatesRestTransport, + ] + assert transport in available_transports + + transport = SslCertificatesClient.get_transport_class("rest") + assert transport == transports.SslCertificatesRestTransport + + +@pytest.mark.parametrize("client_class,transport_class,transport_name", [ + (SslCertificatesClient, transports.SslCertificatesRestTransport, "rest"), +]) +@mock.patch.object(SslCertificatesClient, "DEFAULT_ENDPOINT", modify_default_endpoint(SslCertificatesClient)) +def test_ssl_certificates_client_client_options(client_class, transport_class, transport_name): + # Check that if channel is provided we won't create a new one. + with mock.patch.object(SslCertificatesClient, 'get_transport_class') as gtc: + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ) + client = client_class(transport=transport) + gtc.assert_not_called() + + # Check that if channel is provided via str we will create a new one. + with mock.patch.object(SslCertificatesClient, 'get_transport_class') as gtc: + client = client_class(transport=transport_name) + gtc.assert_called() + + # Check the case api_endpoint is provided. + options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(transport=transport_name, client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_MTLS_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError): + client = client_class(transport=transport_name) + + # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): + with pytest.raises(ValueError): + client = client_class(transport=transport_name) + + # Check the case quota_project_id is provided + options = client_options.ClientOptions(quota_project_id="octopus") + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id="octopus", + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + # Check the case api_endpoint is provided + options = client_options.ClientOptions(api_audience="https://language.googleapis.com") + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience="https://language.googleapis.com" + ) + +@pytest.mark.parametrize("client_class,transport_class,transport_name,use_client_cert_env", [ + (SslCertificatesClient, transports.SslCertificatesRestTransport, "rest", "true"), + (SslCertificatesClient, transports.SslCertificatesRestTransport, "rest", "false"), +]) +@mock.patch.object(SslCertificatesClient, "DEFAULT_ENDPOINT", modify_default_endpoint(SslCertificatesClient)) +@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) +def test_ssl_certificates_client_mtls_env_auto(client_class, transport_class, transport_name, use_client_cert_env): + # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default + # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists. + + # Check the case client_cert_source is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): + options = client_options.ClientOptions(client_cert_source=client_cert_source_callback) + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + + if use_client_cert_env == "false": + expected_client_cert_source = None + expected_host = client.DEFAULT_ENDPOINT + else: + expected_client_cert_source = client_cert_source_callback + expected_host = client.DEFAULT_MTLS_ENDPOINT + + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case ADC client cert is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): + with mock.patch.object(transport_class, '__init__') as patched: + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): + with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=client_cert_source_callback): + if use_client_cert_env == "false": + expected_host = client.DEFAULT_ENDPOINT + expected_client_cert_source = None + else: + expected_host = client.DEFAULT_MTLS_ENDPOINT + expected_client_cert_source = client_cert_source_callback + + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case client_cert_source and ADC client cert are not provided. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): + with mock.patch.object(transport_class, '__init__') as patched: + with mock.patch("google.auth.transport.mtls.has_default_client_cert_source", return_value=False): + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize("client_class", [ + SslCertificatesClient +]) +@mock.patch.object(SslCertificatesClient, "DEFAULT_ENDPOINT", modify_default_endpoint(SslCertificatesClient)) +def test_ssl_certificates_client_get_mtls_endpoint_and_cert_source(client_class): + mock_client_cert_source = mock.Mock() + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "true". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + mock_api_endpoint = "foo" + options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(options) + assert api_endpoint == mock_api_endpoint + assert cert_source == mock_client_cert_source + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "false". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + mock_client_cert_source = mock.Mock() + mock_api_endpoint = "foo" + options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(options) + assert api_endpoint == mock_api_endpoint + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert doesn't exist. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=False): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert exists. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): + with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=mock_client_cert_source): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source == mock_client_cert_source + + +@pytest.mark.parametrize("client_class,transport_class,transport_name", [ + (SslCertificatesClient, transports.SslCertificatesRestTransport, "rest"), +]) +def test_ssl_certificates_client_client_options_scopes(client_class, transport_class, transport_name): + # Check the case scopes are provided. + options = client_options.ClientOptions( + scopes=["1", "2"], + ) + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=["1", "2"], + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + +@pytest.mark.parametrize("client_class,transport_class,transport_name,grpc_helpers", [ + (SslCertificatesClient, transports.SslCertificatesRestTransport, "rest", None), +]) +def test_ssl_certificates_client_client_options_credentials_file(client_class, transport_class, transport_name, grpc_helpers): + # Check the case credentials file is provided. + options = client_options.ClientOptions( + credentials_file="credentials.json" + ) + + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize("request_type", [ + compute.AggregatedListSslCertificatesRequest, + dict, +]) +def test_aggregated_list_rest(request_type): + client = SslCertificatesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.SslCertificateAggregatedList( + id='id_value', + kind='kind_value', + next_page_token='next_page_token_value', + self_link='self_link_value', + unreachables=['unreachables_value'], + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.SslCertificateAggregatedList.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.aggregated_list(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.AggregatedListPager) + assert response.id == 'id_value' + assert response.kind == 'kind_value' + assert response.next_page_token == 'next_page_token_value' + assert response.self_link == 'self_link_value' + assert response.unreachables == ['unreachables_value'] + + +def test_aggregated_list_rest_required_fields(request_type=compute.AggregatedListSslCertificatesRequest): + transport_class = transports.SslCertificatesRestTransport + + request_init = {} + request_init["project"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).aggregated_list._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).aggregated_list._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("filter", "include_all_scopes", "max_results", "order_by", "page_token", "return_partial_success", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + + client = SslCertificatesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.SslCertificateAggregatedList() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "get", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.SslCertificateAggregatedList.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.aggregated_list(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_aggregated_list_rest_unset_required_fields(): + transport = transports.SslCertificatesRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.aggregated_list._get_unset_required_fields({}) + assert set(unset_fields) == (set(("filter", "includeAllScopes", "maxResults", "orderBy", "pageToken", "returnPartialSuccess", )) & set(("project", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_aggregated_list_rest_interceptors(null_interceptor): + transport = transports.SslCertificatesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.SslCertificatesRestInterceptor(), + ) + client = SslCertificatesClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.SslCertificatesRestInterceptor, "post_aggregated_list") as post, \ + mock.patch.object(transports.SslCertificatesRestInterceptor, "pre_aggregated_list") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.AggregatedListSslCertificatesRequest.pb(compute.AggregatedListSslCertificatesRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.SslCertificateAggregatedList.to_json(compute.SslCertificateAggregatedList()) + + request = compute.AggregatedListSslCertificatesRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.SslCertificateAggregatedList() + + client.aggregated_list(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_aggregated_list_rest_bad_request(transport: str = 'rest', request_type=compute.AggregatedListSslCertificatesRequest): + client = SslCertificatesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.aggregated_list(request) + + +def test_aggregated_list_rest_flattened(): + client = SslCertificatesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.SslCertificateAggregatedList() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.SslCertificateAggregatedList.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.aggregated_list(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/aggregated/sslCertificates" % client.transport._host, args[1]) + + +def test_aggregated_list_rest_flattened_error(transport: str = 'rest'): + client = SslCertificatesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.aggregated_list( + compute.AggregatedListSslCertificatesRequest(), + project='project_value', + ) + + +def test_aggregated_list_rest_pager(transport: str = 'rest'): + client = SslCertificatesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + #with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + compute.SslCertificateAggregatedList( + items={ + 'a':compute.SslCertificatesScopedList(), + 'b':compute.SslCertificatesScopedList(), + 'c':compute.SslCertificatesScopedList(), + }, + next_page_token='abc', + ), + compute.SslCertificateAggregatedList( + items={}, + next_page_token='def', + ), + compute.SslCertificateAggregatedList( + items={ + 'g':compute.SslCertificatesScopedList(), + }, + next_page_token='ghi', + ), + compute.SslCertificateAggregatedList( + items={ + 'h':compute.SslCertificatesScopedList(), + 'i':compute.SslCertificatesScopedList(), + }, + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple(compute.SslCertificateAggregatedList.to_json(x) for x in response) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode('UTF-8') + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {'project': 'sample1'} + + pager = client.aggregated_list(request=sample_request) + + assert isinstance(pager.get('a'), compute.SslCertificatesScopedList) + assert pager.get('h') is None + + results = list(pager) + assert len(results) == 6 + assert all( + isinstance(i, tuple) + for i in results) + for result in results: + assert isinstance(result, tuple) + assert tuple(type(t) for t in result) == (str, compute.SslCertificatesScopedList) + + assert pager.get('a') is None + assert isinstance(pager.get('h'), compute.SslCertificatesScopedList) + + pages = list(client.aggregated_list(request=sample_request).pages) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize("request_type", [ + compute.DeleteSslCertificateRequest, + dict, +]) +def test_delete_rest(request_type): + client = SslCertificatesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'ssl_certificate': 'sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.delete(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, extended_operation.ExtendedOperation) + assert response.client_operation_id == 'client_operation_id_value' + assert response.creation_timestamp == 'creation_timestamp_value' + assert response.description == 'description_value' + assert response.end_time == 'end_time_value' + assert response.http_error_message == 'http_error_message_value' + assert response.http_error_status_code == 2374 + assert response.id == 205 + assert response.insert_time == 'insert_time_value' + assert response.kind == 'kind_value' + assert response.name == 'name_value' + assert response.operation_group_id == 'operation_group_id_value' + assert response.operation_type == 'operation_type_value' + assert response.progress == 885 + assert response.region == 'region_value' + assert response.self_link == 'self_link_value' + assert response.start_time == 'start_time_value' + assert response.status == compute.Operation.Status.DONE + assert response.status_message == 'status_message_value' + assert response.target_id == 947 + assert response.target_link == 'target_link_value' + assert response.user == 'user_value' + assert response.zone == 'zone_value' + + +def test_delete_rest_required_fields(request_type=compute.DeleteSslCertificateRequest): + transport_class = transports.SslCertificatesRestTransport + + request_init = {} + request_init["project"] = "" + request_init["ssl_certificate"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + jsonified_request["sslCertificate"] = 'ssl_certificate_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "sslCertificate" in jsonified_request + assert jsonified_request["sslCertificate"] == 'ssl_certificate_value' + + client = SslCertificatesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "delete", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.delete(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_delete_rest_unset_required_fields(): + transport = transports.SslCertificatesRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.delete._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("project", "sslCertificate", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_rest_interceptors(null_interceptor): + transport = transports.SslCertificatesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.SslCertificatesRestInterceptor(), + ) + client = SslCertificatesClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.SslCertificatesRestInterceptor, "post_delete") as post, \ + mock.patch.object(transports.SslCertificatesRestInterceptor, "pre_delete") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.DeleteSslCertificateRequest.pb(compute.DeleteSslCertificateRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.DeleteSslCertificateRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.delete(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_delete_rest_bad_request(transport: str = 'rest', request_type=compute.DeleteSslCertificateRequest): + client = SslCertificatesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'ssl_certificate': 'sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.delete(request) + + +def test_delete_rest_flattened(): + client = SslCertificatesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'ssl_certificate': 'sample2'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + ssl_certificate='ssl_certificate_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.delete(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/global/sslCertificates/{ssl_certificate}" % client.transport._host, args[1]) + + +def test_delete_rest_flattened_error(transport: str = 'rest'): + client = SslCertificatesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete( + compute.DeleteSslCertificateRequest(), + project='project_value', + ssl_certificate='ssl_certificate_value', + ) + + +def test_delete_rest_error(): + client = SslCertificatesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.DeleteSslCertificateRequest, + dict, +]) +def test_delete_unary_rest(request_type): + client = SslCertificatesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'ssl_certificate': 'sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.delete_unary(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.Operation) + + +def test_delete_unary_rest_required_fields(request_type=compute.DeleteSslCertificateRequest): + transport_class = transports.SslCertificatesRestTransport + + request_init = {} + request_init["project"] = "" + request_init["ssl_certificate"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + jsonified_request["sslCertificate"] = 'ssl_certificate_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "sslCertificate" in jsonified_request + assert jsonified_request["sslCertificate"] == 'ssl_certificate_value' + + client = SslCertificatesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "delete", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.delete_unary(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_delete_unary_rest_unset_required_fields(): + transport = transports.SslCertificatesRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.delete._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("project", "sslCertificate", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_unary_rest_interceptors(null_interceptor): + transport = transports.SslCertificatesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.SslCertificatesRestInterceptor(), + ) + client = SslCertificatesClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.SslCertificatesRestInterceptor, "post_delete") as post, \ + mock.patch.object(transports.SslCertificatesRestInterceptor, "pre_delete") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.DeleteSslCertificateRequest.pb(compute.DeleteSslCertificateRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.DeleteSslCertificateRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.delete_unary(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_delete_unary_rest_bad_request(transport: str = 'rest', request_type=compute.DeleteSslCertificateRequest): + client = SslCertificatesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'ssl_certificate': 'sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.delete_unary(request) + + +def test_delete_unary_rest_flattened(): + client = SslCertificatesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'ssl_certificate': 'sample2'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + ssl_certificate='ssl_certificate_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.delete_unary(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/global/sslCertificates/{ssl_certificate}" % client.transport._host, args[1]) + + +def test_delete_unary_rest_flattened_error(transport: str = 'rest'): + client = SslCertificatesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_unary( + compute.DeleteSslCertificateRequest(), + project='project_value', + ssl_certificate='ssl_certificate_value', + ) + + +def test_delete_unary_rest_error(): + client = SslCertificatesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.GetSslCertificateRequest, + dict, +]) +def test_get_rest(request_type): + client = SslCertificatesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'ssl_certificate': 'sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.SslCertificate( + certificate='certificate_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + expire_time='expire_time_value', + id=205, + kind='kind_value', + name='name_value', + private_key='private_key_value', + region='region_value', + self_link='self_link_value', + subject_alternative_names=['subject_alternative_names_value'], + type_='type__value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.SslCertificate.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.get(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.SslCertificate) + assert response.certificate == 'certificate_value' + assert response.creation_timestamp == 'creation_timestamp_value' + assert response.description == 'description_value' + assert response.expire_time == 'expire_time_value' + assert response.id == 205 + assert response.kind == 'kind_value' + assert response.name == 'name_value' + assert response.private_key == 'private_key_value' + assert response.region == 'region_value' + assert response.self_link == 'self_link_value' + assert response.subject_alternative_names == ['subject_alternative_names_value'] + assert response.type_ == 'type__value' + + +def test_get_rest_required_fields(request_type=compute.GetSslCertificateRequest): + transport_class = transports.SslCertificatesRestTransport + + request_init = {} + request_init["project"] = "" + request_init["ssl_certificate"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + jsonified_request["sslCertificate"] = 'ssl_certificate_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "sslCertificate" in jsonified_request + assert jsonified_request["sslCertificate"] == 'ssl_certificate_value' + + client = SslCertificatesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.SslCertificate() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "get", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.SslCertificate.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.get(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_get_rest_unset_required_fields(): + transport = transports.SslCertificatesRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.get._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("project", "sslCertificate", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_rest_interceptors(null_interceptor): + transport = transports.SslCertificatesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.SslCertificatesRestInterceptor(), + ) + client = SslCertificatesClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.SslCertificatesRestInterceptor, "post_get") as post, \ + mock.patch.object(transports.SslCertificatesRestInterceptor, "pre_get") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.GetSslCertificateRequest.pb(compute.GetSslCertificateRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.SslCertificate.to_json(compute.SslCertificate()) + + request = compute.GetSslCertificateRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.SslCertificate() + + client.get(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_get_rest_bad_request(transport: str = 'rest', request_type=compute.GetSslCertificateRequest): + client = SslCertificatesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'ssl_certificate': 'sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get(request) + + +def test_get_rest_flattened(): + client = SslCertificatesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.SslCertificate() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'ssl_certificate': 'sample2'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + ssl_certificate='ssl_certificate_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.SslCertificate.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.get(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/global/sslCertificates/{ssl_certificate}" % client.transport._host, args[1]) + + +def test_get_rest_flattened_error(transport: str = 'rest'): + client = SslCertificatesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get( + compute.GetSslCertificateRequest(), + project='project_value', + ssl_certificate='ssl_certificate_value', + ) + + +def test_get_rest_error(): + client = SslCertificatesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.InsertSslCertificateRequest, + dict, +]) +def test_insert_rest(request_type): + client = SslCertificatesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1'} + request_init["ssl_certificate_resource"] = {'certificate': 'certificate_value', 'creation_timestamp': 'creation_timestamp_value', 'description': 'description_value', 'expire_time': 'expire_time_value', 'id': 205, 'kind': 'kind_value', 'managed': {'domain_status': {}, 'domains': ['domains_value1', 'domains_value2'], 'status': 'status_value'}, 'name': 'name_value', 'private_key': 'private_key_value', 'region': 'region_value', 'self_link': 'self_link_value', 'self_managed': {'certificate': 'certificate_value', 'private_key': 'private_key_value'}, 'subject_alternative_names': ['subject_alternative_names_value1', 'subject_alternative_names_value2'], 'type_': 'type__value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.insert(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, extended_operation.ExtendedOperation) + assert response.client_operation_id == 'client_operation_id_value' + assert response.creation_timestamp == 'creation_timestamp_value' + assert response.description == 'description_value' + assert response.end_time == 'end_time_value' + assert response.http_error_message == 'http_error_message_value' + assert response.http_error_status_code == 2374 + assert response.id == 205 + assert response.insert_time == 'insert_time_value' + assert response.kind == 'kind_value' + assert response.name == 'name_value' + assert response.operation_group_id == 'operation_group_id_value' + assert response.operation_type == 'operation_type_value' + assert response.progress == 885 + assert response.region == 'region_value' + assert response.self_link == 'self_link_value' + assert response.start_time == 'start_time_value' + assert response.status == compute.Operation.Status.DONE + assert response.status_message == 'status_message_value' + assert response.target_id == 947 + assert response.target_link == 'target_link_value' + assert response.user == 'user_value' + assert response.zone == 'zone_value' + + +def test_insert_rest_required_fields(request_type=compute.InsertSslCertificateRequest): + transport_class = transports.SslCertificatesRestTransport + + request_init = {} + request_init["project"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).insert._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).insert._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + + client = SslCertificatesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.insert(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_insert_rest_unset_required_fields(): + transport = transports.SslCertificatesRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.insert._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("project", "sslCertificateResource", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_insert_rest_interceptors(null_interceptor): + transport = transports.SslCertificatesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.SslCertificatesRestInterceptor(), + ) + client = SslCertificatesClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.SslCertificatesRestInterceptor, "post_insert") as post, \ + mock.patch.object(transports.SslCertificatesRestInterceptor, "pre_insert") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.InsertSslCertificateRequest.pb(compute.InsertSslCertificateRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.InsertSslCertificateRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.insert(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_insert_rest_bad_request(transport: str = 'rest', request_type=compute.InsertSslCertificateRequest): + client = SslCertificatesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1'} + request_init["ssl_certificate_resource"] = {'certificate': 'certificate_value', 'creation_timestamp': 'creation_timestamp_value', 'description': 'description_value', 'expire_time': 'expire_time_value', 'id': 205, 'kind': 'kind_value', 'managed': {'domain_status': {}, 'domains': ['domains_value1', 'domains_value2'], 'status': 'status_value'}, 'name': 'name_value', 'private_key': 'private_key_value', 'region': 'region_value', 'self_link': 'self_link_value', 'self_managed': {'certificate': 'certificate_value', 'private_key': 'private_key_value'}, 'subject_alternative_names': ['subject_alternative_names_value1', 'subject_alternative_names_value2'], 'type_': 'type__value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.insert(request) + + +def test_insert_rest_flattened(): + client = SslCertificatesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + ssl_certificate_resource=compute.SslCertificate(certificate='certificate_value'), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.insert(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/global/sslCertificates" % client.transport._host, args[1]) + + +def test_insert_rest_flattened_error(transport: str = 'rest'): + client = SslCertificatesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.insert( + compute.InsertSslCertificateRequest(), + project='project_value', + ssl_certificate_resource=compute.SslCertificate(certificate='certificate_value'), + ) + + +def test_insert_rest_error(): + client = SslCertificatesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.InsertSslCertificateRequest, + dict, +]) +def test_insert_unary_rest(request_type): + client = SslCertificatesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1'} + request_init["ssl_certificate_resource"] = {'certificate': 'certificate_value', 'creation_timestamp': 'creation_timestamp_value', 'description': 'description_value', 'expire_time': 'expire_time_value', 'id': 205, 'kind': 'kind_value', 'managed': {'domain_status': {}, 'domains': ['domains_value1', 'domains_value2'], 'status': 'status_value'}, 'name': 'name_value', 'private_key': 'private_key_value', 'region': 'region_value', 'self_link': 'self_link_value', 'self_managed': {'certificate': 'certificate_value', 'private_key': 'private_key_value'}, 'subject_alternative_names': ['subject_alternative_names_value1', 'subject_alternative_names_value2'], 'type_': 'type__value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.insert_unary(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.Operation) + + +def test_insert_unary_rest_required_fields(request_type=compute.InsertSslCertificateRequest): + transport_class = transports.SslCertificatesRestTransport + + request_init = {} + request_init["project"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).insert._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).insert._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + + client = SslCertificatesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.insert_unary(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_insert_unary_rest_unset_required_fields(): + transport = transports.SslCertificatesRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.insert._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("project", "sslCertificateResource", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_insert_unary_rest_interceptors(null_interceptor): + transport = transports.SslCertificatesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.SslCertificatesRestInterceptor(), + ) + client = SslCertificatesClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.SslCertificatesRestInterceptor, "post_insert") as post, \ + mock.patch.object(transports.SslCertificatesRestInterceptor, "pre_insert") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.InsertSslCertificateRequest.pb(compute.InsertSslCertificateRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.InsertSslCertificateRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.insert_unary(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_insert_unary_rest_bad_request(transport: str = 'rest', request_type=compute.InsertSslCertificateRequest): + client = SslCertificatesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1'} + request_init["ssl_certificate_resource"] = {'certificate': 'certificate_value', 'creation_timestamp': 'creation_timestamp_value', 'description': 'description_value', 'expire_time': 'expire_time_value', 'id': 205, 'kind': 'kind_value', 'managed': {'domain_status': {}, 'domains': ['domains_value1', 'domains_value2'], 'status': 'status_value'}, 'name': 'name_value', 'private_key': 'private_key_value', 'region': 'region_value', 'self_link': 'self_link_value', 'self_managed': {'certificate': 'certificate_value', 'private_key': 'private_key_value'}, 'subject_alternative_names': ['subject_alternative_names_value1', 'subject_alternative_names_value2'], 'type_': 'type__value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.insert_unary(request) + + +def test_insert_unary_rest_flattened(): + client = SslCertificatesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + ssl_certificate_resource=compute.SslCertificate(certificate='certificate_value'), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.insert_unary(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/global/sslCertificates" % client.transport._host, args[1]) + + +def test_insert_unary_rest_flattened_error(transport: str = 'rest'): + client = SslCertificatesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.insert_unary( + compute.InsertSslCertificateRequest(), + project='project_value', + ssl_certificate_resource=compute.SslCertificate(certificate='certificate_value'), + ) + + +def test_insert_unary_rest_error(): + client = SslCertificatesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.ListSslCertificatesRequest, + dict, +]) +def test_list_rest(request_type): + client = SslCertificatesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.SslCertificateList( + id='id_value', + kind='kind_value', + next_page_token='next_page_token_value', + self_link='self_link_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.SslCertificateList.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.list(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListPager) + assert response.id == 'id_value' + assert response.kind == 'kind_value' + assert response.next_page_token == 'next_page_token_value' + assert response.self_link == 'self_link_value' + + +def test_list_rest_required_fields(request_type=compute.ListSslCertificatesRequest): + transport_class = transports.SslCertificatesRestTransport + + request_init = {} + request_init["project"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("filter", "max_results", "order_by", "page_token", "return_partial_success", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + + client = SslCertificatesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.SslCertificateList() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "get", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.SslCertificateList.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.list(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_list_rest_unset_required_fields(): + transport = transports.SslCertificatesRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.list._get_unset_required_fields({}) + assert set(unset_fields) == (set(("filter", "maxResults", "orderBy", "pageToken", "returnPartialSuccess", )) & set(("project", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_rest_interceptors(null_interceptor): + transport = transports.SslCertificatesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.SslCertificatesRestInterceptor(), + ) + client = SslCertificatesClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.SslCertificatesRestInterceptor, "post_list") as post, \ + mock.patch.object(transports.SslCertificatesRestInterceptor, "pre_list") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.ListSslCertificatesRequest.pb(compute.ListSslCertificatesRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.SslCertificateList.to_json(compute.SslCertificateList()) + + request = compute.ListSslCertificatesRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.SslCertificateList() + + client.list(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_list_rest_bad_request(transport: str = 'rest', request_type=compute.ListSslCertificatesRequest): + client = SslCertificatesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list(request) + + +def test_list_rest_flattened(): + client = SslCertificatesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.SslCertificateList() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.SslCertificateList.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.list(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/global/sslCertificates" % client.transport._host, args[1]) + + +def test_list_rest_flattened_error(transport: str = 'rest'): + client = SslCertificatesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list( + compute.ListSslCertificatesRequest(), + project='project_value', + ) + + +def test_list_rest_pager(transport: str = 'rest'): + client = SslCertificatesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + #with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + compute.SslCertificateList( + items=[ + compute.SslCertificate(), + compute.SslCertificate(), + compute.SslCertificate(), + ], + next_page_token='abc', + ), + compute.SslCertificateList( + items=[], + next_page_token='def', + ), + compute.SslCertificateList( + items=[ + compute.SslCertificate(), + ], + next_page_token='ghi', + ), + compute.SslCertificateList( + items=[ + compute.SslCertificate(), + compute.SslCertificate(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple(compute.SslCertificateList.to_json(x) for x in response) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode('UTF-8') + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {'project': 'sample1'} + + pager = client.list(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, compute.SslCertificate) + for i in results) + + pages = list(client.list(request=sample_request).pages) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + + +def test_credentials_transport_error(): + # It is an error to provide credentials and a transport instance. + transport = transports.SslCertificatesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = SslCertificatesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # It is an error to provide a credentials file and a transport instance. + transport = transports.SslCertificatesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = SslCertificatesClient( + client_options={"credentials_file": "credentials.json"}, + transport=transport, + ) + + # It is an error to provide an api_key and a transport instance. + transport = transports.SslCertificatesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = SslCertificatesClient( + client_options=options, + transport=transport, + ) + + # It is an error to provide an api_key and a credential. + options = mock.Mock() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = SslCertificatesClient( + client_options=options, + credentials=ga_credentials.AnonymousCredentials() + ) + + # It is an error to provide scopes and a transport instance. + transport = transports.SslCertificatesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = SslCertificatesClient( + client_options={"scopes": ["1", "2"]}, + transport=transport, + ) + + +def test_transport_instance(): + # A client may be instantiated with a custom transport instance. + transport = transports.SslCertificatesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + client = SslCertificatesClient(transport=transport) + assert client.transport is transport + + +@pytest.mark.parametrize("transport_class", [ + transports.SslCertificatesRestTransport, +]) +def test_transport_adc(transport_class): + # Test default credentials are used if not provided. + with mock.patch.object(google.auth, 'default') as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class() + adc.assert_called_once() + +@pytest.mark.parametrize("transport_name", [ + "rest", +]) +def test_transport_kind(transport_name): + transport = SslCertificatesClient.get_transport_class(transport_name)( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert transport.kind == transport_name + + +def test_ssl_certificates_base_transport_error(): + # Passing both a credentials object and credentials_file should raise an error + with pytest.raises(core_exceptions.DuplicateCredentialArgs): + transport = transports.SslCertificatesTransport( + credentials=ga_credentials.AnonymousCredentials(), + credentials_file="credentials.json" + ) + + +def test_ssl_certificates_base_transport(): + # Instantiate the base transport. + with mock.patch('google.cloud.compute_v1.services.ssl_certificates.transports.SslCertificatesTransport.__init__') as Transport: + Transport.return_value = None + transport = transports.SslCertificatesTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Every method on the transport should just blindly + # raise NotImplementedError. + methods = ( + 'aggregated_list', + 'delete', + 'get', + 'insert', + 'list', + ) + for method in methods: + with pytest.raises(NotImplementedError): + getattr(transport, method)(request=object()) + + with pytest.raises(NotImplementedError): + transport.close() + + # Catch all for all remaining methods and properties + remainder = [ + 'kind', + ] + for r in remainder: + with pytest.raises(NotImplementedError): + getattr(transport, r)() + + +def test_ssl_certificates_base_transport_with_credentials_file(): + # Instantiate the base transport with a credentials file + with mock.patch.object(google.auth, 'load_credentials_from_file', autospec=True) as load_creds, mock.patch('google.cloud.compute_v1.services.ssl_certificates.transports.SslCertificatesTransport._prep_wrapped_messages') as Transport: + Transport.return_value = None + load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.SslCertificatesTransport( + credentials_file="credentials.json", + quota_project_id="octopus", + ) + load_creds.assert_called_once_with("credentials.json", + scopes=None, + default_scopes=( + 'https://www.googleapis.com/auth/compute', + 'https://www.googleapis.com/auth/cloud-platform', +), + quota_project_id="octopus", + ) + + +def test_ssl_certificates_base_transport_with_adc(): + # Test the default credentials are used if credentials and credentials_file are None. + with mock.patch.object(google.auth, 'default', autospec=True) as adc, mock.patch('google.cloud.compute_v1.services.ssl_certificates.transports.SslCertificatesTransport._prep_wrapped_messages') as Transport: + Transport.return_value = None + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.SslCertificatesTransport() + adc.assert_called_once() + + +def test_ssl_certificates_auth_adc(): + # If no credentials are provided, we should use ADC credentials. + with mock.patch.object(google.auth, 'default', autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + SslCertificatesClient() + adc.assert_called_once_with( + scopes=None, + default_scopes=( + 'https://www.googleapis.com/auth/compute', + 'https://www.googleapis.com/auth/cloud-platform', +), + quota_project_id=None, + ) + + +def test_ssl_certificates_http_transport_client_cert_source_for_mtls(): + cred = ga_credentials.AnonymousCredentials() + with mock.patch("google.auth.transport.requests.AuthorizedSession.configure_mtls_channel") as mock_configure_mtls_channel: + transports.SslCertificatesRestTransport ( + credentials=cred, + client_cert_source_for_mtls=client_cert_source_callback + ) + mock_configure_mtls_channel.assert_called_once_with(client_cert_source_callback) + + +@pytest.mark.parametrize("transport_name", [ + "rest", +]) +def test_ssl_certificates_host_no_port(transport_name): + client = SslCertificatesClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions(api_endpoint='compute.googleapis.com'), + transport=transport_name, + ) + assert client.transport._host == ( + 'compute.googleapis.com:443' + if transport_name in ['grpc', 'grpc_asyncio'] + else 'https://compute.googleapis.com' + ) + +@pytest.mark.parametrize("transport_name", [ + "rest", +]) +def test_ssl_certificates_host_with_port(transport_name): + client = SslCertificatesClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions(api_endpoint='compute.googleapis.com:8000'), + transport=transport_name, + ) + assert client.transport._host == ( + 'compute.googleapis.com:8000' + if transport_name in ['grpc', 'grpc_asyncio'] + else 'https://compute.googleapis.com:8000' + ) + +@pytest.mark.parametrize("transport_name", [ + "rest", +]) +def test_ssl_certificates_client_transport_session_collision(transport_name): + creds1 = ga_credentials.AnonymousCredentials() + creds2 = ga_credentials.AnonymousCredentials() + client1 = SslCertificatesClient( + credentials=creds1, + transport=transport_name, + ) + client2 = SslCertificatesClient( + credentials=creds2, + transport=transport_name, + ) + session1 = client1.transport.aggregated_list._session + session2 = client2.transport.aggregated_list._session + assert session1 != session2 + session1 = client1.transport.delete._session + session2 = client2.transport.delete._session + assert session1 != session2 + session1 = client1.transport.get._session + session2 = client2.transport.get._session + assert session1 != session2 + session1 = client1.transport.insert._session + session2 = client2.transport.insert._session + assert session1 != session2 + session1 = client1.transport.list._session + session2 = client2.transport.list._session + assert session1 != session2 + +def test_common_billing_account_path(): + billing_account = "squid" + expected = "billingAccounts/{billing_account}".format(billing_account=billing_account, ) + actual = SslCertificatesClient.common_billing_account_path(billing_account) + assert expected == actual + + +def test_parse_common_billing_account_path(): + expected = { + "billing_account": "clam", + } + path = SslCertificatesClient.common_billing_account_path(**expected) + + # Check that the path construction is reversible. + actual = SslCertificatesClient.parse_common_billing_account_path(path) + assert expected == actual + +def test_common_folder_path(): + folder = "whelk" + expected = "folders/{folder}".format(folder=folder, ) + actual = SslCertificatesClient.common_folder_path(folder) + assert expected == actual + + +def test_parse_common_folder_path(): + expected = { + "folder": "octopus", + } + path = SslCertificatesClient.common_folder_path(**expected) + + # Check that the path construction is reversible. + actual = SslCertificatesClient.parse_common_folder_path(path) + assert expected == actual + +def test_common_organization_path(): + organization = "oyster" + expected = "organizations/{organization}".format(organization=organization, ) + actual = SslCertificatesClient.common_organization_path(organization) + assert expected == actual + + +def test_parse_common_organization_path(): + expected = { + "organization": "nudibranch", + } + path = SslCertificatesClient.common_organization_path(**expected) + + # Check that the path construction is reversible. + actual = SslCertificatesClient.parse_common_organization_path(path) + assert expected == actual + +def test_common_project_path(): + project = "cuttlefish" + expected = "projects/{project}".format(project=project, ) + actual = SslCertificatesClient.common_project_path(project) + assert expected == actual + + +def test_parse_common_project_path(): + expected = { + "project": "mussel", + } + path = SslCertificatesClient.common_project_path(**expected) + + # Check that the path construction is reversible. + actual = SslCertificatesClient.parse_common_project_path(path) + assert expected == actual + +def test_common_location_path(): + project = "winkle" + location = "nautilus" + expected = "projects/{project}/locations/{location}".format(project=project, location=location, ) + actual = SslCertificatesClient.common_location_path(project, location) + assert expected == actual + + +def test_parse_common_location_path(): + expected = { + "project": "scallop", + "location": "abalone", + } + path = SslCertificatesClient.common_location_path(**expected) + + # Check that the path construction is reversible. + actual = SslCertificatesClient.parse_common_location_path(path) + assert expected == actual + + +def test_client_with_default_client_info(): + client_info = gapic_v1.client_info.ClientInfo() + + with mock.patch.object(transports.SslCertificatesTransport, '_prep_wrapped_messages') as prep: + client = SslCertificatesClient( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + with mock.patch.object(transports.SslCertificatesTransport, '_prep_wrapped_messages') as prep: + transport_class = SslCertificatesClient.get_transport_class() + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + +def test_transport_close(): + transports = { + "rest": "_session", + } + + for transport, close_name in transports.items(): + client = SslCertificatesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport + ) + with mock.patch.object(type(getattr(client.transport, close_name)), "close") as close: + with client: + close.assert_not_called() + close.assert_called_once() + +def test_client_ctx(): + transports = [ + 'rest', + ] + for transport in transports: + client = SslCertificatesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport + ) + # Test client calls underlying transport. + with mock.patch.object(type(client.transport), "close") as close: + close.assert_not_called() + with client: + pass + close.assert_called() + +@pytest.mark.parametrize("client_class,transport_class", [ + (SslCertificatesClient, transports.SslCertificatesRestTransport), +]) +def test_api_key_credentials(client_class, transport_class): + with mock.patch.object( + google.auth._default, "get_api_key_credentials", create=True + ) as get_api_key_credentials: + mock_cred = mock.Mock() + get_api_key_credentials.return_value = mock_cred + options = client_options.ClientOptions() + options.api_key = "api_key" + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=mock_cred, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) diff --git a/owl-bot-staging/v1/tests/unit/gapic/compute_v1/test_ssl_policies.py b/owl-bot-staging/v1/tests/unit/gapic/compute_v1/test_ssl_policies.py new file mode 100644 index 000000000..f534ce229 --- /dev/null +++ b/owl-bot-staging/v1/tests/unit/gapic/compute_v1/test_ssl_policies.py @@ -0,0 +1,3588 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import os +# try/except added for compatibility with python < 3.8 +try: + from unittest import mock + from unittest.mock import AsyncMock # pragma: NO COVER +except ImportError: # pragma: NO COVER + import mock + +import grpc +from grpc.experimental import aio +from collections.abc import Iterable +from google.protobuf import json_format +import json +import math +import pytest +from proto.marshal.rules.dates import DurationRule, TimestampRule +from proto.marshal.rules import wrappers +from requests import Response +from requests import Request, PreparedRequest +from requests.sessions import Session +from google.protobuf import json_format + +from google.api_core import client_options +from google.api_core import exceptions as core_exceptions +from google.api_core import extended_operation # type: ignore +from google.api_core import future +from google.api_core import gapic_v1 +from google.api_core import grpc_helpers +from google.api_core import grpc_helpers_async +from google.api_core import path_template +from google.auth import credentials as ga_credentials +from google.auth.exceptions import MutualTLSChannelError +from google.cloud.compute_v1.services.ssl_policies import SslPoliciesClient +from google.cloud.compute_v1.services.ssl_policies import pagers +from google.cloud.compute_v1.services.ssl_policies import transports +from google.cloud.compute_v1.types import compute +from google.oauth2 import service_account +import google.auth + + +def client_cert_source_callback(): + return b"cert bytes", b"key bytes" + + +# If default endpoint is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint(client): + return "foo.googleapis.com" if ("localhost" in client.DEFAULT_ENDPOINT) else client.DEFAULT_ENDPOINT + + +def test__get_default_mtls_endpoint(): + api_endpoint = "example.googleapis.com" + api_mtls_endpoint = "example.mtls.googleapis.com" + sandbox_endpoint = "example.sandbox.googleapis.com" + sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com" + non_googleapi = "api.example.com" + + assert SslPoliciesClient._get_default_mtls_endpoint(None) is None + assert SslPoliciesClient._get_default_mtls_endpoint(api_endpoint) == api_mtls_endpoint + assert SslPoliciesClient._get_default_mtls_endpoint(api_mtls_endpoint) == api_mtls_endpoint + assert SslPoliciesClient._get_default_mtls_endpoint(sandbox_endpoint) == sandbox_mtls_endpoint + assert SslPoliciesClient._get_default_mtls_endpoint(sandbox_mtls_endpoint) == sandbox_mtls_endpoint + assert SslPoliciesClient._get_default_mtls_endpoint(non_googleapi) == non_googleapi + + +@pytest.mark.parametrize("client_class,transport_name", [ + (SslPoliciesClient, "rest"), +]) +def test_ssl_policies_client_from_service_account_info(client_class, transport_name): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object(service_account.Credentials, 'from_service_account_info') as factory: + factory.return_value = creds + info = {"valid": True} + client = client_class.from_service_account_info(info, transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + 'compute.googleapis.com:443' + if transport_name in ['grpc', 'grpc_asyncio'] + else + 'https://compute.googleapis.com' + ) + + +@pytest.mark.parametrize("transport_class,transport_name", [ + (transports.SslPoliciesRestTransport, "rest"), +]) +def test_ssl_policies_client_service_account_always_use_jwt(transport_class, transport_name): + with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=True) + use_jwt.assert_called_once_with(True) + + with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=False) + use_jwt.assert_not_called() + + +@pytest.mark.parametrize("client_class,transport_name", [ + (SslPoliciesClient, "rest"), +]) +def test_ssl_policies_client_from_service_account_file(client_class, transport_name): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object(service_account.Credentials, 'from_service_account_file') as factory: + factory.return_value = creds + client = client_class.from_service_account_file("dummy/file/path.json", transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + client = client_class.from_service_account_json("dummy/file/path.json", transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + 'compute.googleapis.com:443' + if transport_name in ['grpc', 'grpc_asyncio'] + else + 'https://compute.googleapis.com' + ) + + +def test_ssl_policies_client_get_transport_class(): + transport = SslPoliciesClient.get_transport_class() + available_transports = [ + transports.SslPoliciesRestTransport, + ] + assert transport in available_transports + + transport = SslPoliciesClient.get_transport_class("rest") + assert transport == transports.SslPoliciesRestTransport + + +@pytest.mark.parametrize("client_class,transport_class,transport_name", [ + (SslPoliciesClient, transports.SslPoliciesRestTransport, "rest"), +]) +@mock.patch.object(SslPoliciesClient, "DEFAULT_ENDPOINT", modify_default_endpoint(SslPoliciesClient)) +def test_ssl_policies_client_client_options(client_class, transport_class, transport_name): + # Check that if channel is provided we won't create a new one. + with mock.patch.object(SslPoliciesClient, 'get_transport_class') as gtc: + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ) + client = client_class(transport=transport) + gtc.assert_not_called() + + # Check that if channel is provided via str we will create a new one. + with mock.patch.object(SslPoliciesClient, 'get_transport_class') as gtc: + client = client_class(transport=transport_name) + gtc.assert_called() + + # Check the case api_endpoint is provided. + options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(transport=transport_name, client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_MTLS_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError): + client = client_class(transport=transport_name) + + # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): + with pytest.raises(ValueError): + client = client_class(transport=transport_name) + + # Check the case quota_project_id is provided + options = client_options.ClientOptions(quota_project_id="octopus") + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id="octopus", + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + # Check the case api_endpoint is provided + options = client_options.ClientOptions(api_audience="https://language.googleapis.com") + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience="https://language.googleapis.com" + ) + +@pytest.mark.parametrize("client_class,transport_class,transport_name,use_client_cert_env", [ + (SslPoliciesClient, transports.SslPoliciesRestTransport, "rest", "true"), + (SslPoliciesClient, transports.SslPoliciesRestTransport, "rest", "false"), +]) +@mock.patch.object(SslPoliciesClient, "DEFAULT_ENDPOINT", modify_default_endpoint(SslPoliciesClient)) +@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) +def test_ssl_policies_client_mtls_env_auto(client_class, transport_class, transport_name, use_client_cert_env): + # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default + # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists. + + # Check the case client_cert_source is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): + options = client_options.ClientOptions(client_cert_source=client_cert_source_callback) + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + + if use_client_cert_env == "false": + expected_client_cert_source = None + expected_host = client.DEFAULT_ENDPOINT + else: + expected_client_cert_source = client_cert_source_callback + expected_host = client.DEFAULT_MTLS_ENDPOINT + + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case ADC client cert is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): + with mock.patch.object(transport_class, '__init__') as patched: + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): + with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=client_cert_source_callback): + if use_client_cert_env == "false": + expected_host = client.DEFAULT_ENDPOINT + expected_client_cert_source = None + else: + expected_host = client.DEFAULT_MTLS_ENDPOINT + expected_client_cert_source = client_cert_source_callback + + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case client_cert_source and ADC client cert are not provided. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): + with mock.patch.object(transport_class, '__init__') as patched: + with mock.patch("google.auth.transport.mtls.has_default_client_cert_source", return_value=False): + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize("client_class", [ + SslPoliciesClient +]) +@mock.patch.object(SslPoliciesClient, "DEFAULT_ENDPOINT", modify_default_endpoint(SslPoliciesClient)) +def test_ssl_policies_client_get_mtls_endpoint_and_cert_source(client_class): + mock_client_cert_source = mock.Mock() + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "true". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + mock_api_endpoint = "foo" + options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(options) + assert api_endpoint == mock_api_endpoint + assert cert_source == mock_client_cert_source + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "false". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + mock_client_cert_source = mock.Mock() + mock_api_endpoint = "foo" + options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(options) + assert api_endpoint == mock_api_endpoint + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert doesn't exist. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=False): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert exists. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): + with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=mock_client_cert_source): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source == mock_client_cert_source + + +@pytest.mark.parametrize("client_class,transport_class,transport_name", [ + (SslPoliciesClient, transports.SslPoliciesRestTransport, "rest"), +]) +def test_ssl_policies_client_client_options_scopes(client_class, transport_class, transport_name): + # Check the case scopes are provided. + options = client_options.ClientOptions( + scopes=["1", "2"], + ) + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=["1", "2"], + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + +@pytest.mark.parametrize("client_class,transport_class,transport_name,grpc_helpers", [ + (SslPoliciesClient, transports.SslPoliciesRestTransport, "rest", None), +]) +def test_ssl_policies_client_client_options_credentials_file(client_class, transport_class, transport_name, grpc_helpers): + # Check the case credentials file is provided. + options = client_options.ClientOptions( + credentials_file="credentials.json" + ) + + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize("request_type", [ + compute.AggregatedListSslPoliciesRequest, + dict, +]) +def test_aggregated_list_rest(request_type): + client = SslPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.SslPoliciesAggregatedList( + etag='etag_value', + id='id_value', + kind='kind_value', + next_page_token='next_page_token_value', + self_link='self_link_value', + unreachables=['unreachables_value'], + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.SslPoliciesAggregatedList.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.aggregated_list(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.AggregatedListPager) + assert response.etag == 'etag_value' + assert response.id == 'id_value' + assert response.kind == 'kind_value' + assert response.next_page_token == 'next_page_token_value' + assert response.self_link == 'self_link_value' + assert response.unreachables == ['unreachables_value'] + + +def test_aggregated_list_rest_required_fields(request_type=compute.AggregatedListSslPoliciesRequest): + transport_class = transports.SslPoliciesRestTransport + + request_init = {} + request_init["project"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).aggregated_list._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).aggregated_list._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("filter", "include_all_scopes", "max_results", "order_by", "page_token", "return_partial_success", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + + client = SslPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.SslPoliciesAggregatedList() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "get", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.SslPoliciesAggregatedList.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.aggregated_list(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_aggregated_list_rest_unset_required_fields(): + transport = transports.SslPoliciesRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.aggregated_list._get_unset_required_fields({}) + assert set(unset_fields) == (set(("filter", "includeAllScopes", "maxResults", "orderBy", "pageToken", "returnPartialSuccess", )) & set(("project", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_aggregated_list_rest_interceptors(null_interceptor): + transport = transports.SslPoliciesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.SslPoliciesRestInterceptor(), + ) + client = SslPoliciesClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.SslPoliciesRestInterceptor, "post_aggregated_list") as post, \ + mock.patch.object(transports.SslPoliciesRestInterceptor, "pre_aggregated_list") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.AggregatedListSslPoliciesRequest.pb(compute.AggregatedListSslPoliciesRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.SslPoliciesAggregatedList.to_json(compute.SslPoliciesAggregatedList()) + + request = compute.AggregatedListSslPoliciesRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.SslPoliciesAggregatedList() + + client.aggregated_list(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_aggregated_list_rest_bad_request(transport: str = 'rest', request_type=compute.AggregatedListSslPoliciesRequest): + client = SslPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.aggregated_list(request) + + +def test_aggregated_list_rest_flattened(): + client = SslPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.SslPoliciesAggregatedList() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.SslPoliciesAggregatedList.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.aggregated_list(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/aggregated/sslPolicies" % client.transport._host, args[1]) + + +def test_aggregated_list_rest_flattened_error(transport: str = 'rest'): + client = SslPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.aggregated_list( + compute.AggregatedListSslPoliciesRequest(), + project='project_value', + ) + + +def test_aggregated_list_rest_pager(transport: str = 'rest'): + client = SslPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + #with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + compute.SslPoliciesAggregatedList( + items={ + 'a':compute.SslPoliciesScopedList(), + 'b':compute.SslPoliciesScopedList(), + 'c':compute.SslPoliciesScopedList(), + }, + next_page_token='abc', + ), + compute.SslPoliciesAggregatedList( + items={}, + next_page_token='def', + ), + compute.SslPoliciesAggregatedList( + items={ + 'g':compute.SslPoliciesScopedList(), + }, + next_page_token='ghi', + ), + compute.SslPoliciesAggregatedList( + items={ + 'h':compute.SslPoliciesScopedList(), + 'i':compute.SslPoliciesScopedList(), + }, + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple(compute.SslPoliciesAggregatedList.to_json(x) for x in response) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode('UTF-8') + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {'project': 'sample1'} + + pager = client.aggregated_list(request=sample_request) + + assert isinstance(pager.get('a'), compute.SslPoliciesScopedList) + assert pager.get('h') is None + + results = list(pager) + assert len(results) == 6 + assert all( + isinstance(i, tuple) + for i in results) + for result in results: + assert isinstance(result, tuple) + assert tuple(type(t) for t in result) == (str, compute.SslPoliciesScopedList) + + assert pager.get('a') is None + assert isinstance(pager.get('h'), compute.SslPoliciesScopedList) + + pages = list(client.aggregated_list(request=sample_request).pages) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize("request_type", [ + compute.DeleteSslPolicyRequest, + dict, +]) +def test_delete_rest(request_type): + client = SslPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'ssl_policy': 'sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.delete(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, extended_operation.ExtendedOperation) + assert response.client_operation_id == 'client_operation_id_value' + assert response.creation_timestamp == 'creation_timestamp_value' + assert response.description == 'description_value' + assert response.end_time == 'end_time_value' + assert response.http_error_message == 'http_error_message_value' + assert response.http_error_status_code == 2374 + assert response.id == 205 + assert response.insert_time == 'insert_time_value' + assert response.kind == 'kind_value' + assert response.name == 'name_value' + assert response.operation_group_id == 'operation_group_id_value' + assert response.operation_type == 'operation_type_value' + assert response.progress == 885 + assert response.region == 'region_value' + assert response.self_link == 'self_link_value' + assert response.start_time == 'start_time_value' + assert response.status == compute.Operation.Status.DONE + assert response.status_message == 'status_message_value' + assert response.target_id == 947 + assert response.target_link == 'target_link_value' + assert response.user == 'user_value' + assert response.zone == 'zone_value' + + +def test_delete_rest_required_fields(request_type=compute.DeleteSslPolicyRequest): + transport_class = transports.SslPoliciesRestTransport + + request_init = {} + request_init["project"] = "" + request_init["ssl_policy"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + jsonified_request["sslPolicy"] = 'ssl_policy_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "sslPolicy" in jsonified_request + assert jsonified_request["sslPolicy"] == 'ssl_policy_value' + + client = SslPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "delete", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.delete(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_delete_rest_unset_required_fields(): + transport = transports.SslPoliciesRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.delete._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("project", "sslPolicy", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_rest_interceptors(null_interceptor): + transport = transports.SslPoliciesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.SslPoliciesRestInterceptor(), + ) + client = SslPoliciesClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.SslPoliciesRestInterceptor, "post_delete") as post, \ + mock.patch.object(transports.SslPoliciesRestInterceptor, "pre_delete") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.DeleteSslPolicyRequest.pb(compute.DeleteSslPolicyRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.DeleteSslPolicyRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.delete(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_delete_rest_bad_request(transport: str = 'rest', request_type=compute.DeleteSslPolicyRequest): + client = SslPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'ssl_policy': 'sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.delete(request) + + +def test_delete_rest_flattened(): + client = SslPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'ssl_policy': 'sample2'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + ssl_policy='ssl_policy_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.delete(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/global/sslPolicies/{ssl_policy}" % client.transport._host, args[1]) + + +def test_delete_rest_flattened_error(transport: str = 'rest'): + client = SslPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete( + compute.DeleteSslPolicyRequest(), + project='project_value', + ssl_policy='ssl_policy_value', + ) + + +def test_delete_rest_error(): + client = SslPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.DeleteSslPolicyRequest, + dict, +]) +def test_delete_unary_rest(request_type): + client = SslPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'ssl_policy': 'sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.delete_unary(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.Operation) + + +def test_delete_unary_rest_required_fields(request_type=compute.DeleteSslPolicyRequest): + transport_class = transports.SslPoliciesRestTransport + + request_init = {} + request_init["project"] = "" + request_init["ssl_policy"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + jsonified_request["sslPolicy"] = 'ssl_policy_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "sslPolicy" in jsonified_request + assert jsonified_request["sslPolicy"] == 'ssl_policy_value' + + client = SslPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "delete", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.delete_unary(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_delete_unary_rest_unset_required_fields(): + transport = transports.SslPoliciesRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.delete._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("project", "sslPolicy", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_unary_rest_interceptors(null_interceptor): + transport = transports.SslPoliciesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.SslPoliciesRestInterceptor(), + ) + client = SslPoliciesClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.SslPoliciesRestInterceptor, "post_delete") as post, \ + mock.patch.object(transports.SslPoliciesRestInterceptor, "pre_delete") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.DeleteSslPolicyRequest.pb(compute.DeleteSslPolicyRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.DeleteSslPolicyRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.delete_unary(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_delete_unary_rest_bad_request(transport: str = 'rest', request_type=compute.DeleteSslPolicyRequest): + client = SslPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'ssl_policy': 'sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.delete_unary(request) + + +def test_delete_unary_rest_flattened(): + client = SslPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'ssl_policy': 'sample2'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + ssl_policy='ssl_policy_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.delete_unary(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/global/sslPolicies/{ssl_policy}" % client.transport._host, args[1]) + + +def test_delete_unary_rest_flattened_error(transport: str = 'rest'): + client = SslPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_unary( + compute.DeleteSslPolicyRequest(), + project='project_value', + ssl_policy='ssl_policy_value', + ) + + +def test_delete_unary_rest_error(): + client = SslPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.GetSslPolicyRequest, + dict, +]) +def test_get_rest(request_type): + client = SslPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'ssl_policy': 'sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.SslPolicy( + creation_timestamp='creation_timestamp_value', + custom_features=['custom_features_value'], + description='description_value', + enabled_features=['enabled_features_value'], + fingerprint='fingerprint_value', + id=205, + kind='kind_value', + min_tls_version='min_tls_version_value', + name='name_value', + profile='profile_value', + region='region_value', + self_link='self_link_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.SslPolicy.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.get(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.SslPolicy) + assert response.creation_timestamp == 'creation_timestamp_value' + assert response.custom_features == ['custom_features_value'] + assert response.description == 'description_value' + assert response.enabled_features == ['enabled_features_value'] + assert response.fingerprint == 'fingerprint_value' + assert response.id == 205 + assert response.kind == 'kind_value' + assert response.min_tls_version == 'min_tls_version_value' + assert response.name == 'name_value' + assert response.profile == 'profile_value' + assert response.region == 'region_value' + assert response.self_link == 'self_link_value' + + +def test_get_rest_required_fields(request_type=compute.GetSslPolicyRequest): + transport_class = transports.SslPoliciesRestTransport + + request_init = {} + request_init["project"] = "" + request_init["ssl_policy"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + jsonified_request["sslPolicy"] = 'ssl_policy_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "sslPolicy" in jsonified_request + assert jsonified_request["sslPolicy"] == 'ssl_policy_value' + + client = SslPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.SslPolicy() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "get", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.SslPolicy.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.get(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_get_rest_unset_required_fields(): + transport = transports.SslPoliciesRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.get._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("project", "sslPolicy", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_rest_interceptors(null_interceptor): + transport = transports.SslPoliciesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.SslPoliciesRestInterceptor(), + ) + client = SslPoliciesClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.SslPoliciesRestInterceptor, "post_get") as post, \ + mock.patch.object(transports.SslPoliciesRestInterceptor, "pre_get") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.GetSslPolicyRequest.pb(compute.GetSslPolicyRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.SslPolicy.to_json(compute.SslPolicy()) + + request = compute.GetSslPolicyRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.SslPolicy() + + client.get(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_get_rest_bad_request(transport: str = 'rest', request_type=compute.GetSslPolicyRequest): + client = SslPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'ssl_policy': 'sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get(request) + + +def test_get_rest_flattened(): + client = SslPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.SslPolicy() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'ssl_policy': 'sample2'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + ssl_policy='ssl_policy_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.SslPolicy.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.get(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/global/sslPolicies/{ssl_policy}" % client.transport._host, args[1]) + + +def test_get_rest_flattened_error(transport: str = 'rest'): + client = SslPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get( + compute.GetSslPolicyRequest(), + project='project_value', + ssl_policy='ssl_policy_value', + ) + + +def test_get_rest_error(): + client = SslPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.InsertSslPolicyRequest, + dict, +]) +def test_insert_rest(request_type): + client = SslPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1'} + request_init["ssl_policy_resource"] = {'creation_timestamp': 'creation_timestamp_value', 'custom_features': ['custom_features_value1', 'custom_features_value2'], 'description': 'description_value', 'enabled_features': ['enabled_features_value1', 'enabled_features_value2'], 'fingerprint': 'fingerprint_value', 'id': 205, 'kind': 'kind_value', 'min_tls_version': 'min_tls_version_value', 'name': 'name_value', 'profile': 'profile_value', 'region': 'region_value', 'self_link': 'self_link_value', 'warnings': [{'code': 'code_value', 'data': [{'key': 'key_value', 'value': 'value_value'}], 'message': 'message_value'}]} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.insert(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, extended_operation.ExtendedOperation) + assert response.client_operation_id == 'client_operation_id_value' + assert response.creation_timestamp == 'creation_timestamp_value' + assert response.description == 'description_value' + assert response.end_time == 'end_time_value' + assert response.http_error_message == 'http_error_message_value' + assert response.http_error_status_code == 2374 + assert response.id == 205 + assert response.insert_time == 'insert_time_value' + assert response.kind == 'kind_value' + assert response.name == 'name_value' + assert response.operation_group_id == 'operation_group_id_value' + assert response.operation_type == 'operation_type_value' + assert response.progress == 885 + assert response.region == 'region_value' + assert response.self_link == 'self_link_value' + assert response.start_time == 'start_time_value' + assert response.status == compute.Operation.Status.DONE + assert response.status_message == 'status_message_value' + assert response.target_id == 947 + assert response.target_link == 'target_link_value' + assert response.user == 'user_value' + assert response.zone == 'zone_value' + + +def test_insert_rest_required_fields(request_type=compute.InsertSslPolicyRequest): + transport_class = transports.SslPoliciesRestTransport + + request_init = {} + request_init["project"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).insert._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).insert._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + + client = SslPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.insert(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_insert_rest_unset_required_fields(): + transport = transports.SslPoliciesRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.insert._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("project", "sslPolicyResource", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_insert_rest_interceptors(null_interceptor): + transport = transports.SslPoliciesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.SslPoliciesRestInterceptor(), + ) + client = SslPoliciesClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.SslPoliciesRestInterceptor, "post_insert") as post, \ + mock.patch.object(transports.SslPoliciesRestInterceptor, "pre_insert") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.InsertSslPolicyRequest.pb(compute.InsertSslPolicyRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.InsertSslPolicyRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.insert(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_insert_rest_bad_request(transport: str = 'rest', request_type=compute.InsertSslPolicyRequest): + client = SslPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1'} + request_init["ssl_policy_resource"] = {'creation_timestamp': 'creation_timestamp_value', 'custom_features': ['custom_features_value1', 'custom_features_value2'], 'description': 'description_value', 'enabled_features': ['enabled_features_value1', 'enabled_features_value2'], 'fingerprint': 'fingerprint_value', 'id': 205, 'kind': 'kind_value', 'min_tls_version': 'min_tls_version_value', 'name': 'name_value', 'profile': 'profile_value', 'region': 'region_value', 'self_link': 'self_link_value', 'warnings': [{'code': 'code_value', 'data': [{'key': 'key_value', 'value': 'value_value'}], 'message': 'message_value'}]} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.insert(request) + + +def test_insert_rest_flattened(): + client = SslPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + ssl_policy_resource=compute.SslPolicy(creation_timestamp='creation_timestamp_value'), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.insert(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/global/sslPolicies" % client.transport._host, args[1]) + + +def test_insert_rest_flattened_error(transport: str = 'rest'): + client = SslPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.insert( + compute.InsertSslPolicyRequest(), + project='project_value', + ssl_policy_resource=compute.SslPolicy(creation_timestamp='creation_timestamp_value'), + ) + + +def test_insert_rest_error(): + client = SslPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.InsertSslPolicyRequest, + dict, +]) +def test_insert_unary_rest(request_type): + client = SslPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1'} + request_init["ssl_policy_resource"] = {'creation_timestamp': 'creation_timestamp_value', 'custom_features': ['custom_features_value1', 'custom_features_value2'], 'description': 'description_value', 'enabled_features': ['enabled_features_value1', 'enabled_features_value2'], 'fingerprint': 'fingerprint_value', 'id': 205, 'kind': 'kind_value', 'min_tls_version': 'min_tls_version_value', 'name': 'name_value', 'profile': 'profile_value', 'region': 'region_value', 'self_link': 'self_link_value', 'warnings': [{'code': 'code_value', 'data': [{'key': 'key_value', 'value': 'value_value'}], 'message': 'message_value'}]} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.insert_unary(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.Operation) + + +def test_insert_unary_rest_required_fields(request_type=compute.InsertSslPolicyRequest): + transport_class = transports.SslPoliciesRestTransport + + request_init = {} + request_init["project"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).insert._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).insert._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + + client = SslPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.insert_unary(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_insert_unary_rest_unset_required_fields(): + transport = transports.SslPoliciesRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.insert._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("project", "sslPolicyResource", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_insert_unary_rest_interceptors(null_interceptor): + transport = transports.SslPoliciesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.SslPoliciesRestInterceptor(), + ) + client = SslPoliciesClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.SslPoliciesRestInterceptor, "post_insert") as post, \ + mock.patch.object(transports.SslPoliciesRestInterceptor, "pre_insert") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.InsertSslPolicyRequest.pb(compute.InsertSslPolicyRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.InsertSslPolicyRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.insert_unary(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_insert_unary_rest_bad_request(transport: str = 'rest', request_type=compute.InsertSslPolicyRequest): + client = SslPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1'} + request_init["ssl_policy_resource"] = {'creation_timestamp': 'creation_timestamp_value', 'custom_features': ['custom_features_value1', 'custom_features_value2'], 'description': 'description_value', 'enabled_features': ['enabled_features_value1', 'enabled_features_value2'], 'fingerprint': 'fingerprint_value', 'id': 205, 'kind': 'kind_value', 'min_tls_version': 'min_tls_version_value', 'name': 'name_value', 'profile': 'profile_value', 'region': 'region_value', 'self_link': 'self_link_value', 'warnings': [{'code': 'code_value', 'data': [{'key': 'key_value', 'value': 'value_value'}], 'message': 'message_value'}]} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.insert_unary(request) + + +def test_insert_unary_rest_flattened(): + client = SslPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + ssl_policy_resource=compute.SslPolicy(creation_timestamp='creation_timestamp_value'), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.insert_unary(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/global/sslPolicies" % client.transport._host, args[1]) + + +def test_insert_unary_rest_flattened_error(transport: str = 'rest'): + client = SslPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.insert_unary( + compute.InsertSslPolicyRequest(), + project='project_value', + ssl_policy_resource=compute.SslPolicy(creation_timestamp='creation_timestamp_value'), + ) + + +def test_insert_unary_rest_error(): + client = SslPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.ListSslPoliciesRequest, + dict, +]) +def test_list_rest(request_type): + client = SslPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.SslPoliciesList( + id='id_value', + kind='kind_value', + next_page_token='next_page_token_value', + self_link='self_link_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.SslPoliciesList.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.list(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListPager) + assert response.id == 'id_value' + assert response.kind == 'kind_value' + assert response.next_page_token == 'next_page_token_value' + assert response.self_link == 'self_link_value' + + +def test_list_rest_required_fields(request_type=compute.ListSslPoliciesRequest): + transport_class = transports.SslPoliciesRestTransport + + request_init = {} + request_init["project"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("filter", "max_results", "order_by", "page_token", "return_partial_success", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + + client = SslPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.SslPoliciesList() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "get", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.SslPoliciesList.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.list(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_list_rest_unset_required_fields(): + transport = transports.SslPoliciesRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.list._get_unset_required_fields({}) + assert set(unset_fields) == (set(("filter", "maxResults", "orderBy", "pageToken", "returnPartialSuccess", )) & set(("project", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_rest_interceptors(null_interceptor): + transport = transports.SslPoliciesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.SslPoliciesRestInterceptor(), + ) + client = SslPoliciesClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.SslPoliciesRestInterceptor, "post_list") as post, \ + mock.patch.object(transports.SslPoliciesRestInterceptor, "pre_list") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.ListSslPoliciesRequest.pb(compute.ListSslPoliciesRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.SslPoliciesList.to_json(compute.SslPoliciesList()) + + request = compute.ListSslPoliciesRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.SslPoliciesList() + + client.list(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_list_rest_bad_request(transport: str = 'rest', request_type=compute.ListSslPoliciesRequest): + client = SslPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list(request) + + +def test_list_rest_flattened(): + client = SslPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.SslPoliciesList() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.SslPoliciesList.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.list(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/global/sslPolicies" % client.transport._host, args[1]) + + +def test_list_rest_flattened_error(transport: str = 'rest'): + client = SslPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list( + compute.ListSslPoliciesRequest(), + project='project_value', + ) + + +def test_list_rest_pager(transport: str = 'rest'): + client = SslPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + #with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + compute.SslPoliciesList( + items=[ + compute.SslPolicy(), + compute.SslPolicy(), + compute.SslPolicy(), + ], + next_page_token='abc', + ), + compute.SslPoliciesList( + items=[], + next_page_token='def', + ), + compute.SslPoliciesList( + items=[ + compute.SslPolicy(), + ], + next_page_token='ghi', + ), + compute.SslPoliciesList( + items=[ + compute.SslPolicy(), + compute.SslPolicy(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple(compute.SslPoliciesList.to_json(x) for x in response) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode('UTF-8') + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {'project': 'sample1'} + + pager = client.list(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, compute.SslPolicy) + for i in results) + + pages = list(client.list(request=sample_request).pages) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize("request_type", [ + compute.ListAvailableFeaturesSslPoliciesRequest, + dict, +]) +def test_list_available_features_rest(request_type): + client = SslPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.SslPoliciesListAvailableFeaturesResponse( + features=['features_value'], + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.SslPoliciesListAvailableFeaturesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.list_available_features(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.SslPoliciesListAvailableFeaturesResponse) + assert response.features == ['features_value'] + + +def test_list_available_features_rest_required_fields(request_type=compute.ListAvailableFeaturesSslPoliciesRequest): + transport_class = transports.SslPoliciesRestTransport + + request_init = {} + request_init["project"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_available_features._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_available_features._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("filter", "max_results", "order_by", "page_token", "return_partial_success", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + + client = SslPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.SslPoliciesListAvailableFeaturesResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "get", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.SslPoliciesListAvailableFeaturesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.list_available_features(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_list_available_features_rest_unset_required_fields(): + transport = transports.SslPoliciesRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.list_available_features._get_unset_required_fields({}) + assert set(unset_fields) == (set(("filter", "maxResults", "orderBy", "pageToken", "returnPartialSuccess", )) & set(("project", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_available_features_rest_interceptors(null_interceptor): + transport = transports.SslPoliciesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.SslPoliciesRestInterceptor(), + ) + client = SslPoliciesClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.SslPoliciesRestInterceptor, "post_list_available_features") as post, \ + mock.patch.object(transports.SslPoliciesRestInterceptor, "pre_list_available_features") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.ListAvailableFeaturesSslPoliciesRequest.pb(compute.ListAvailableFeaturesSslPoliciesRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.SslPoliciesListAvailableFeaturesResponse.to_json(compute.SslPoliciesListAvailableFeaturesResponse()) + + request = compute.ListAvailableFeaturesSslPoliciesRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.SslPoliciesListAvailableFeaturesResponse() + + client.list_available_features(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_list_available_features_rest_bad_request(transport: str = 'rest', request_type=compute.ListAvailableFeaturesSslPoliciesRequest): + client = SslPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_available_features(request) + + +def test_list_available_features_rest_flattened(): + client = SslPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.SslPoliciesListAvailableFeaturesResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.SslPoliciesListAvailableFeaturesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.list_available_features(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/global/sslPolicies/listAvailableFeatures" % client.transport._host, args[1]) + + +def test_list_available_features_rest_flattened_error(transport: str = 'rest'): + client = SslPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_available_features( + compute.ListAvailableFeaturesSslPoliciesRequest(), + project='project_value', + ) + + +def test_list_available_features_rest_error(): + client = SslPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.PatchSslPolicyRequest, + dict, +]) +def test_patch_rest(request_type): + client = SslPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'ssl_policy': 'sample2'} + request_init["ssl_policy_resource"] = {'creation_timestamp': 'creation_timestamp_value', 'custom_features': ['custom_features_value1', 'custom_features_value2'], 'description': 'description_value', 'enabled_features': ['enabled_features_value1', 'enabled_features_value2'], 'fingerprint': 'fingerprint_value', 'id': 205, 'kind': 'kind_value', 'min_tls_version': 'min_tls_version_value', 'name': 'name_value', 'profile': 'profile_value', 'region': 'region_value', 'self_link': 'self_link_value', 'warnings': [{'code': 'code_value', 'data': [{'key': 'key_value', 'value': 'value_value'}], 'message': 'message_value'}]} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.patch(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, extended_operation.ExtendedOperation) + assert response.client_operation_id == 'client_operation_id_value' + assert response.creation_timestamp == 'creation_timestamp_value' + assert response.description == 'description_value' + assert response.end_time == 'end_time_value' + assert response.http_error_message == 'http_error_message_value' + assert response.http_error_status_code == 2374 + assert response.id == 205 + assert response.insert_time == 'insert_time_value' + assert response.kind == 'kind_value' + assert response.name == 'name_value' + assert response.operation_group_id == 'operation_group_id_value' + assert response.operation_type == 'operation_type_value' + assert response.progress == 885 + assert response.region == 'region_value' + assert response.self_link == 'self_link_value' + assert response.start_time == 'start_time_value' + assert response.status == compute.Operation.Status.DONE + assert response.status_message == 'status_message_value' + assert response.target_id == 947 + assert response.target_link == 'target_link_value' + assert response.user == 'user_value' + assert response.zone == 'zone_value' + + +def test_patch_rest_required_fields(request_type=compute.PatchSslPolicyRequest): + transport_class = transports.SslPoliciesRestTransport + + request_init = {} + request_init["project"] = "" + request_init["ssl_policy"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).patch._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + jsonified_request["sslPolicy"] = 'ssl_policy_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).patch._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "sslPolicy" in jsonified_request + assert jsonified_request["sslPolicy"] == 'ssl_policy_value' + + client = SslPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "patch", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.patch(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_patch_rest_unset_required_fields(): + transport = transports.SslPoliciesRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.patch._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("project", "sslPolicy", "sslPolicyResource", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_patch_rest_interceptors(null_interceptor): + transport = transports.SslPoliciesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.SslPoliciesRestInterceptor(), + ) + client = SslPoliciesClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.SslPoliciesRestInterceptor, "post_patch") as post, \ + mock.patch.object(transports.SslPoliciesRestInterceptor, "pre_patch") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.PatchSslPolicyRequest.pb(compute.PatchSslPolicyRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.PatchSslPolicyRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.patch(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_patch_rest_bad_request(transport: str = 'rest', request_type=compute.PatchSslPolicyRequest): + client = SslPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'ssl_policy': 'sample2'} + request_init["ssl_policy_resource"] = {'creation_timestamp': 'creation_timestamp_value', 'custom_features': ['custom_features_value1', 'custom_features_value2'], 'description': 'description_value', 'enabled_features': ['enabled_features_value1', 'enabled_features_value2'], 'fingerprint': 'fingerprint_value', 'id': 205, 'kind': 'kind_value', 'min_tls_version': 'min_tls_version_value', 'name': 'name_value', 'profile': 'profile_value', 'region': 'region_value', 'self_link': 'self_link_value', 'warnings': [{'code': 'code_value', 'data': [{'key': 'key_value', 'value': 'value_value'}], 'message': 'message_value'}]} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.patch(request) + + +def test_patch_rest_flattened(): + client = SslPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'ssl_policy': 'sample2'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + ssl_policy='ssl_policy_value', + ssl_policy_resource=compute.SslPolicy(creation_timestamp='creation_timestamp_value'), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.patch(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/global/sslPolicies/{ssl_policy}" % client.transport._host, args[1]) + + +def test_patch_rest_flattened_error(transport: str = 'rest'): + client = SslPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.patch( + compute.PatchSslPolicyRequest(), + project='project_value', + ssl_policy='ssl_policy_value', + ssl_policy_resource=compute.SslPolicy(creation_timestamp='creation_timestamp_value'), + ) + + +def test_patch_rest_error(): + client = SslPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.PatchSslPolicyRequest, + dict, +]) +def test_patch_unary_rest(request_type): + client = SslPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'ssl_policy': 'sample2'} + request_init["ssl_policy_resource"] = {'creation_timestamp': 'creation_timestamp_value', 'custom_features': ['custom_features_value1', 'custom_features_value2'], 'description': 'description_value', 'enabled_features': ['enabled_features_value1', 'enabled_features_value2'], 'fingerprint': 'fingerprint_value', 'id': 205, 'kind': 'kind_value', 'min_tls_version': 'min_tls_version_value', 'name': 'name_value', 'profile': 'profile_value', 'region': 'region_value', 'self_link': 'self_link_value', 'warnings': [{'code': 'code_value', 'data': [{'key': 'key_value', 'value': 'value_value'}], 'message': 'message_value'}]} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.patch_unary(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.Operation) + + +def test_patch_unary_rest_required_fields(request_type=compute.PatchSslPolicyRequest): + transport_class = transports.SslPoliciesRestTransport + + request_init = {} + request_init["project"] = "" + request_init["ssl_policy"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).patch._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + jsonified_request["sslPolicy"] = 'ssl_policy_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).patch._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "sslPolicy" in jsonified_request + assert jsonified_request["sslPolicy"] == 'ssl_policy_value' + + client = SslPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "patch", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.patch_unary(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_patch_unary_rest_unset_required_fields(): + transport = transports.SslPoliciesRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.patch._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("project", "sslPolicy", "sslPolicyResource", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_patch_unary_rest_interceptors(null_interceptor): + transport = transports.SslPoliciesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.SslPoliciesRestInterceptor(), + ) + client = SslPoliciesClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.SslPoliciesRestInterceptor, "post_patch") as post, \ + mock.patch.object(transports.SslPoliciesRestInterceptor, "pre_patch") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.PatchSslPolicyRequest.pb(compute.PatchSslPolicyRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.PatchSslPolicyRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.patch_unary(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_patch_unary_rest_bad_request(transport: str = 'rest', request_type=compute.PatchSslPolicyRequest): + client = SslPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'ssl_policy': 'sample2'} + request_init["ssl_policy_resource"] = {'creation_timestamp': 'creation_timestamp_value', 'custom_features': ['custom_features_value1', 'custom_features_value2'], 'description': 'description_value', 'enabled_features': ['enabled_features_value1', 'enabled_features_value2'], 'fingerprint': 'fingerprint_value', 'id': 205, 'kind': 'kind_value', 'min_tls_version': 'min_tls_version_value', 'name': 'name_value', 'profile': 'profile_value', 'region': 'region_value', 'self_link': 'self_link_value', 'warnings': [{'code': 'code_value', 'data': [{'key': 'key_value', 'value': 'value_value'}], 'message': 'message_value'}]} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.patch_unary(request) + + +def test_patch_unary_rest_flattened(): + client = SslPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'ssl_policy': 'sample2'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + ssl_policy='ssl_policy_value', + ssl_policy_resource=compute.SslPolicy(creation_timestamp='creation_timestamp_value'), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.patch_unary(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/global/sslPolicies/{ssl_policy}" % client.transport._host, args[1]) + + +def test_patch_unary_rest_flattened_error(transport: str = 'rest'): + client = SslPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.patch_unary( + compute.PatchSslPolicyRequest(), + project='project_value', + ssl_policy='ssl_policy_value', + ssl_policy_resource=compute.SslPolicy(creation_timestamp='creation_timestamp_value'), + ) + + +def test_patch_unary_rest_error(): + client = SslPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +def test_credentials_transport_error(): + # It is an error to provide credentials and a transport instance. + transport = transports.SslPoliciesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = SslPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # It is an error to provide a credentials file and a transport instance. + transport = transports.SslPoliciesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = SslPoliciesClient( + client_options={"credentials_file": "credentials.json"}, + transport=transport, + ) + + # It is an error to provide an api_key and a transport instance. + transport = transports.SslPoliciesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = SslPoliciesClient( + client_options=options, + transport=transport, + ) + + # It is an error to provide an api_key and a credential. + options = mock.Mock() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = SslPoliciesClient( + client_options=options, + credentials=ga_credentials.AnonymousCredentials() + ) + + # It is an error to provide scopes and a transport instance. + transport = transports.SslPoliciesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = SslPoliciesClient( + client_options={"scopes": ["1", "2"]}, + transport=transport, + ) + + +def test_transport_instance(): + # A client may be instantiated with a custom transport instance. + transport = transports.SslPoliciesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + client = SslPoliciesClient(transport=transport) + assert client.transport is transport + + +@pytest.mark.parametrize("transport_class", [ + transports.SslPoliciesRestTransport, +]) +def test_transport_adc(transport_class): + # Test default credentials are used if not provided. + with mock.patch.object(google.auth, 'default') as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class() + adc.assert_called_once() + +@pytest.mark.parametrize("transport_name", [ + "rest", +]) +def test_transport_kind(transport_name): + transport = SslPoliciesClient.get_transport_class(transport_name)( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert transport.kind == transport_name + + +def test_ssl_policies_base_transport_error(): + # Passing both a credentials object and credentials_file should raise an error + with pytest.raises(core_exceptions.DuplicateCredentialArgs): + transport = transports.SslPoliciesTransport( + credentials=ga_credentials.AnonymousCredentials(), + credentials_file="credentials.json" + ) + + +def test_ssl_policies_base_transport(): + # Instantiate the base transport. + with mock.patch('google.cloud.compute_v1.services.ssl_policies.transports.SslPoliciesTransport.__init__') as Transport: + Transport.return_value = None + transport = transports.SslPoliciesTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Every method on the transport should just blindly + # raise NotImplementedError. + methods = ( + 'aggregated_list', + 'delete', + 'get', + 'insert', + 'list', + 'list_available_features', + 'patch', + ) + for method in methods: + with pytest.raises(NotImplementedError): + getattr(transport, method)(request=object()) + + with pytest.raises(NotImplementedError): + transport.close() + + # Catch all for all remaining methods and properties + remainder = [ + 'kind', + ] + for r in remainder: + with pytest.raises(NotImplementedError): + getattr(transport, r)() + + +def test_ssl_policies_base_transport_with_credentials_file(): + # Instantiate the base transport with a credentials file + with mock.patch.object(google.auth, 'load_credentials_from_file', autospec=True) as load_creds, mock.patch('google.cloud.compute_v1.services.ssl_policies.transports.SslPoliciesTransport._prep_wrapped_messages') as Transport: + Transport.return_value = None + load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.SslPoliciesTransport( + credentials_file="credentials.json", + quota_project_id="octopus", + ) + load_creds.assert_called_once_with("credentials.json", + scopes=None, + default_scopes=( + 'https://www.googleapis.com/auth/compute', + 'https://www.googleapis.com/auth/cloud-platform', +), + quota_project_id="octopus", + ) + + +def test_ssl_policies_base_transport_with_adc(): + # Test the default credentials are used if credentials and credentials_file are None. + with mock.patch.object(google.auth, 'default', autospec=True) as adc, mock.patch('google.cloud.compute_v1.services.ssl_policies.transports.SslPoliciesTransport._prep_wrapped_messages') as Transport: + Transport.return_value = None + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.SslPoliciesTransport() + adc.assert_called_once() + + +def test_ssl_policies_auth_adc(): + # If no credentials are provided, we should use ADC credentials. + with mock.patch.object(google.auth, 'default', autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + SslPoliciesClient() + adc.assert_called_once_with( + scopes=None, + default_scopes=( + 'https://www.googleapis.com/auth/compute', + 'https://www.googleapis.com/auth/cloud-platform', +), + quota_project_id=None, + ) + + +def test_ssl_policies_http_transport_client_cert_source_for_mtls(): + cred = ga_credentials.AnonymousCredentials() + with mock.patch("google.auth.transport.requests.AuthorizedSession.configure_mtls_channel") as mock_configure_mtls_channel: + transports.SslPoliciesRestTransport ( + credentials=cred, + client_cert_source_for_mtls=client_cert_source_callback + ) + mock_configure_mtls_channel.assert_called_once_with(client_cert_source_callback) + + +@pytest.mark.parametrize("transport_name", [ + "rest", +]) +def test_ssl_policies_host_no_port(transport_name): + client = SslPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions(api_endpoint='compute.googleapis.com'), + transport=transport_name, + ) + assert client.transport._host == ( + 'compute.googleapis.com:443' + if transport_name in ['grpc', 'grpc_asyncio'] + else 'https://compute.googleapis.com' + ) + +@pytest.mark.parametrize("transport_name", [ + "rest", +]) +def test_ssl_policies_host_with_port(transport_name): + client = SslPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions(api_endpoint='compute.googleapis.com:8000'), + transport=transport_name, + ) + assert client.transport._host == ( + 'compute.googleapis.com:8000' + if transport_name in ['grpc', 'grpc_asyncio'] + else 'https://compute.googleapis.com:8000' + ) + +@pytest.mark.parametrize("transport_name", [ + "rest", +]) +def test_ssl_policies_client_transport_session_collision(transport_name): + creds1 = ga_credentials.AnonymousCredentials() + creds2 = ga_credentials.AnonymousCredentials() + client1 = SslPoliciesClient( + credentials=creds1, + transport=transport_name, + ) + client2 = SslPoliciesClient( + credentials=creds2, + transport=transport_name, + ) + session1 = client1.transport.aggregated_list._session + session2 = client2.transport.aggregated_list._session + assert session1 != session2 + session1 = client1.transport.delete._session + session2 = client2.transport.delete._session + assert session1 != session2 + session1 = client1.transport.get._session + session2 = client2.transport.get._session + assert session1 != session2 + session1 = client1.transport.insert._session + session2 = client2.transport.insert._session + assert session1 != session2 + session1 = client1.transport.list._session + session2 = client2.transport.list._session + assert session1 != session2 + session1 = client1.transport.list_available_features._session + session2 = client2.transport.list_available_features._session + assert session1 != session2 + session1 = client1.transport.patch._session + session2 = client2.transport.patch._session + assert session1 != session2 + +def test_common_billing_account_path(): + billing_account = "squid" + expected = "billingAccounts/{billing_account}".format(billing_account=billing_account, ) + actual = SslPoliciesClient.common_billing_account_path(billing_account) + assert expected == actual + + +def test_parse_common_billing_account_path(): + expected = { + "billing_account": "clam", + } + path = SslPoliciesClient.common_billing_account_path(**expected) + + # Check that the path construction is reversible. + actual = SslPoliciesClient.parse_common_billing_account_path(path) + assert expected == actual + +def test_common_folder_path(): + folder = "whelk" + expected = "folders/{folder}".format(folder=folder, ) + actual = SslPoliciesClient.common_folder_path(folder) + assert expected == actual + + +def test_parse_common_folder_path(): + expected = { + "folder": "octopus", + } + path = SslPoliciesClient.common_folder_path(**expected) + + # Check that the path construction is reversible. + actual = SslPoliciesClient.parse_common_folder_path(path) + assert expected == actual + +def test_common_organization_path(): + organization = "oyster" + expected = "organizations/{organization}".format(organization=organization, ) + actual = SslPoliciesClient.common_organization_path(organization) + assert expected == actual + + +def test_parse_common_organization_path(): + expected = { + "organization": "nudibranch", + } + path = SslPoliciesClient.common_organization_path(**expected) + + # Check that the path construction is reversible. + actual = SslPoliciesClient.parse_common_organization_path(path) + assert expected == actual + +def test_common_project_path(): + project = "cuttlefish" + expected = "projects/{project}".format(project=project, ) + actual = SslPoliciesClient.common_project_path(project) + assert expected == actual + + +def test_parse_common_project_path(): + expected = { + "project": "mussel", + } + path = SslPoliciesClient.common_project_path(**expected) + + # Check that the path construction is reversible. + actual = SslPoliciesClient.parse_common_project_path(path) + assert expected == actual + +def test_common_location_path(): + project = "winkle" + location = "nautilus" + expected = "projects/{project}/locations/{location}".format(project=project, location=location, ) + actual = SslPoliciesClient.common_location_path(project, location) + assert expected == actual + + +def test_parse_common_location_path(): + expected = { + "project": "scallop", + "location": "abalone", + } + path = SslPoliciesClient.common_location_path(**expected) + + # Check that the path construction is reversible. + actual = SslPoliciesClient.parse_common_location_path(path) + assert expected == actual + + +def test_client_with_default_client_info(): + client_info = gapic_v1.client_info.ClientInfo() + + with mock.patch.object(transports.SslPoliciesTransport, '_prep_wrapped_messages') as prep: + client = SslPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + with mock.patch.object(transports.SslPoliciesTransport, '_prep_wrapped_messages') as prep: + transport_class = SslPoliciesClient.get_transport_class() + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + +def test_transport_close(): + transports = { + "rest": "_session", + } + + for transport, close_name in transports.items(): + client = SslPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport + ) + with mock.patch.object(type(getattr(client.transport, close_name)), "close") as close: + with client: + close.assert_not_called() + close.assert_called_once() + +def test_client_ctx(): + transports = [ + 'rest', + ] + for transport in transports: + client = SslPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport + ) + # Test client calls underlying transport. + with mock.patch.object(type(client.transport), "close") as close: + close.assert_not_called() + with client: + pass + close.assert_called() + +@pytest.mark.parametrize("client_class,transport_class", [ + (SslPoliciesClient, transports.SslPoliciesRestTransport), +]) +def test_api_key_credentials(client_class, transport_class): + with mock.patch.object( + google.auth._default, "get_api_key_credentials", create=True + ) as get_api_key_credentials: + mock_cred = mock.Mock() + get_api_key_credentials.return_value = mock_cred + options = client_options.ClientOptions() + options.api_key = "api_key" + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=mock_cred, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) diff --git a/owl-bot-staging/v1/tests/unit/gapic/compute_v1/test_subnetworks.py b/owl-bot-staging/v1/tests/unit/gapic/compute_v1/test_subnetworks.py new file mode 100644 index 000000000..912a38f4b --- /dev/null +++ b/owl-bot-staging/v1/tests/unit/gapic/compute_v1/test_subnetworks.py @@ -0,0 +1,5615 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import os +# try/except added for compatibility with python < 3.8 +try: + from unittest import mock + from unittest.mock import AsyncMock # pragma: NO COVER +except ImportError: # pragma: NO COVER + import mock + +import grpc +from grpc.experimental import aio +from collections.abc import Iterable +from google.protobuf import json_format +import json +import math +import pytest +from proto.marshal.rules.dates import DurationRule, TimestampRule +from proto.marshal.rules import wrappers +from requests import Response +from requests import Request, PreparedRequest +from requests.sessions import Session +from google.protobuf import json_format + +from google.api_core import client_options +from google.api_core import exceptions as core_exceptions +from google.api_core import extended_operation # type: ignore +from google.api_core import future +from google.api_core import gapic_v1 +from google.api_core import grpc_helpers +from google.api_core import grpc_helpers_async +from google.api_core import path_template +from google.auth import credentials as ga_credentials +from google.auth.exceptions import MutualTLSChannelError +from google.cloud.compute_v1.services.subnetworks import SubnetworksClient +from google.cloud.compute_v1.services.subnetworks import pagers +from google.cloud.compute_v1.services.subnetworks import transports +from google.cloud.compute_v1.types import compute +from google.oauth2 import service_account +import google.auth + + +def client_cert_source_callback(): + return b"cert bytes", b"key bytes" + + +# If default endpoint is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint(client): + return "foo.googleapis.com" if ("localhost" in client.DEFAULT_ENDPOINT) else client.DEFAULT_ENDPOINT + + +def test__get_default_mtls_endpoint(): + api_endpoint = "example.googleapis.com" + api_mtls_endpoint = "example.mtls.googleapis.com" + sandbox_endpoint = "example.sandbox.googleapis.com" + sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com" + non_googleapi = "api.example.com" + + assert SubnetworksClient._get_default_mtls_endpoint(None) is None + assert SubnetworksClient._get_default_mtls_endpoint(api_endpoint) == api_mtls_endpoint + assert SubnetworksClient._get_default_mtls_endpoint(api_mtls_endpoint) == api_mtls_endpoint + assert SubnetworksClient._get_default_mtls_endpoint(sandbox_endpoint) == sandbox_mtls_endpoint + assert SubnetworksClient._get_default_mtls_endpoint(sandbox_mtls_endpoint) == sandbox_mtls_endpoint + assert SubnetworksClient._get_default_mtls_endpoint(non_googleapi) == non_googleapi + + +@pytest.mark.parametrize("client_class,transport_name", [ + (SubnetworksClient, "rest"), +]) +def test_subnetworks_client_from_service_account_info(client_class, transport_name): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object(service_account.Credentials, 'from_service_account_info') as factory: + factory.return_value = creds + info = {"valid": True} + client = client_class.from_service_account_info(info, transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + 'compute.googleapis.com:443' + if transport_name in ['grpc', 'grpc_asyncio'] + else + 'https://compute.googleapis.com' + ) + + +@pytest.mark.parametrize("transport_class,transport_name", [ + (transports.SubnetworksRestTransport, "rest"), +]) +def test_subnetworks_client_service_account_always_use_jwt(transport_class, transport_name): + with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=True) + use_jwt.assert_called_once_with(True) + + with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=False) + use_jwt.assert_not_called() + + +@pytest.mark.parametrize("client_class,transport_name", [ + (SubnetworksClient, "rest"), +]) +def test_subnetworks_client_from_service_account_file(client_class, transport_name): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object(service_account.Credentials, 'from_service_account_file') as factory: + factory.return_value = creds + client = client_class.from_service_account_file("dummy/file/path.json", transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + client = client_class.from_service_account_json("dummy/file/path.json", transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + 'compute.googleapis.com:443' + if transport_name in ['grpc', 'grpc_asyncio'] + else + 'https://compute.googleapis.com' + ) + + +def test_subnetworks_client_get_transport_class(): + transport = SubnetworksClient.get_transport_class() + available_transports = [ + transports.SubnetworksRestTransport, + ] + assert transport in available_transports + + transport = SubnetworksClient.get_transport_class("rest") + assert transport == transports.SubnetworksRestTransport + + +@pytest.mark.parametrize("client_class,transport_class,transport_name", [ + (SubnetworksClient, transports.SubnetworksRestTransport, "rest"), +]) +@mock.patch.object(SubnetworksClient, "DEFAULT_ENDPOINT", modify_default_endpoint(SubnetworksClient)) +def test_subnetworks_client_client_options(client_class, transport_class, transport_name): + # Check that if channel is provided we won't create a new one. + with mock.patch.object(SubnetworksClient, 'get_transport_class') as gtc: + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ) + client = client_class(transport=transport) + gtc.assert_not_called() + + # Check that if channel is provided via str we will create a new one. + with mock.patch.object(SubnetworksClient, 'get_transport_class') as gtc: + client = client_class(transport=transport_name) + gtc.assert_called() + + # Check the case api_endpoint is provided. + options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(transport=transport_name, client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_MTLS_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError): + client = client_class(transport=transport_name) + + # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): + with pytest.raises(ValueError): + client = client_class(transport=transport_name) + + # Check the case quota_project_id is provided + options = client_options.ClientOptions(quota_project_id="octopus") + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id="octopus", + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + # Check the case api_endpoint is provided + options = client_options.ClientOptions(api_audience="https://language.googleapis.com") + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience="https://language.googleapis.com" + ) + +@pytest.mark.parametrize("client_class,transport_class,transport_name,use_client_cert_env", [ + (SubnetworksClient, transports.SubnetworksRestTransport, "rest", "true"), + (SubnetworksClient, transports.SubnetworksRestTransport, "rest", "false"), +]) +@mock.patch.object(SubnetworksClient, "DEFAULT_ENDPOINT", modify_default_endpoint(SubnetworksClient)) +@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) +def test_subnetworks_client_mtls_env_auto(client_class, transport_class, transport_name, use_client_cert_env): + # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default + # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists. + + # Check the case client_cert_source is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): + options = client_options.ClientOptions(client_cert_source=client_cert_source_callback) + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + + if use_client_cert_env == "false": + expected_client_cert_source = None + expected_host = client.DEFAULT_ENDPOINT + else: + expected_client_cert_source = client_cert_source_callback + expected_host = client.DEFAULT_MTLS_ENDPOINT + + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case ADC client cert is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): + with mock.patch.object(transport_class, '__init__') as patched: + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): + with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=client_cert_source_callback): + if use_client_cert_env == "false": + expected_host = client.DEFAULT_ENDPOINT + expected_client_cert_source = None + else: + expected_host = client.DEFAULT_MTLS_ENDPOINT + expected_client_cert_source = client_cert_source_callback + + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case client_cert_source and ADC client cert are not provided. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): + with mock.patch.object(transport_class, '__init__') as patched: + with mock.patch("google.auth.transport.mtls.has_default_client_cert_source", return_value=False): + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize("client_class", [ + SubnetworksClient +]) +@mock.patch.object(SubnetworksClient, "DEFAULT_ENDPOINT", modify_default_endpoint(SubnetworksClient)) +def test_subnetworks_client_get_mtls_endpoint_and_cert_source(client_class): + mock_client_cert_source = mock.Mock() + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "true". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + mock_api_endpoint = "foo" + options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(options) + assert api_endpoint == mock_api_endpoint + assert cert_source == mock_client_cert_source + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "false". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + mock_client_cert_source = mock.Mock() + mock_api_endpoint = "foo" + options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(options) + assert api_endpoint == mock_api_endpoint + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert doesn't exist. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=False): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert exists. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): + with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=mock_client_cert_source): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source == mock_client_cert_source + + +@pytest.mark.parametrize("client_class,transport_class,transport_name", [ + (SubnetworksClient, transports.SubnetworksRestTransport, "rest"), +]) +def test_subnetworks_client_client_options_scopes(client_class, transport_class, transport_name): + # Check the case scopes are provided. + options = client_options.ClientOptions( + scopes=["1", "2"], + ) + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=["1", "2"], + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + +@pytest.mark.parametrize("client_class,transport_class,transport_name,grpc_helpers", [ + (SubnetworksClient, transports.SubnetworksRestTransport, "rest", None), +]) +def test_subnetworks_client_client_options_credentials_file(client_class, transport_class, transport_name, grpc_helpers): + # Check the case credentials file is provided. + options = client_options.ClientOptions( + credentials_file="credentials.json" + ) + + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize("request_type", [ + compute.AggregatedListSubnetworksRequest, + dict, +]) +def test_aggregated_list_rest(request_type): + client = SubnetworksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.SubnetworkAggregatedList( + id='id_value', + kind='kind_value', + next_page_token='next_page_token_value', + self_link='self_link_value', + unreachables=['unreachables_value'], + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.SubnetworkAggregatedList.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.aggregated_list(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.AggregatedListPager) + assert response.id == 'id_value' + assert response.kind == 'kind_value' + assert response.next_page_token == 'next_page_token_value' + assert response.self_link == 'self_link_value' + assert response.unreachables == ['unreachables_value'] + + +def test_aggregated_list_rest_required_fields(request_type=compute.AggregatedListSubnetworksRequest): + transport_class = transports.SubnetworksRestTransport + + request_init = {} + request_init["project"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).aggregated_list._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).aggregated_list._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("filter", "include_all_scopes", "max_results", "order_by", "page_token", "return_partial_success", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + + client = SubnetworksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.SubnetworkAggregatedList() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "get", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.SubnetworkAggregatedList.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.aggregated_list(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_aggregated_list_rest_unset_required_fields(): + transport = transports.SubnetworksRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.aggregated_list._get_unset_required_fields({}) + assert set(unset_fields) == (set(("filter", "includeAllScopes", "maxResults", "orderBy", "pageToken", "returnPartialSuccess", )) & set(("project", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_aggregated_list_rest_interceptors(null_interceptor): + transport = transports.SubnetworksRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.SubnetworksRestInterceptor(), + ) + client = SubnetworksClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.SubnetworksRestInterceptor, "post_aggregated_list") as post, \ + mock.patch.object(transports.SubnetworksRestInterceptor, "pre_aggregated_list") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.AggregatedListSubnetworksRequest.pb(compute.AggregatedListSubnetworksRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.SubnetworkAggregatedList.to_json(compute.SubnetworkAggregatedList()) + + request = compute.AggregatedListSubnetworksRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.SubnetworkAggregatedList() + + client.aggregated_list(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_aggregated_list_rest_bad_request(transport: str = 'rest', request_type=compute.AggregatedListSubnetworksRequest): + client = SubnetworksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.aggregated_list(request) + + +def test_aggregated_list_rest_flattened(): + client = SubnetworksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.SubnetworkAggregatedList() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.SubnetworkAggregatedList.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.aggregated_list(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/aggregated/subnetworks" % client.transport._host, args[1]) + + +def test_aggregated_list_rest_flattened_error(transport: str = 'rest'): + client = SubnetworksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.aggregated_list( + compute.AggregatedListSubnetworksRequest(), + project='project_value', + ) + + +def test_aggregated_list_rest_pager(transport: str = 'rest'): + client = SubnetworksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + #with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + compute.SubnetworkAggregatedList( + items={ + 'a':compute.SubnetworksScopedList(), + 'b':compute.SubnetworksScopedList(), + 'c':compute.SubnetworksScopedList(), + }, + next_page_token='abc', + ), + compute.SubnetworkAggregatedList( + items={}, + next_page_token='def', + ), + compute.SubnetworkAggregatedList( + items={ + 'g':compute.SubnetworksScopedList(), + }, + next_page_token='ghi', + ), + compute.SubnetworkAggregatedList( + items={ + 'h':compute.SubnetworksScopedList(), + 'i':compute.SubnetworksScopedList(), + }, + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple(compute.SubnetworkAggregatedList.to_json(x) for x in response) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode('UTF-8') + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {'project': 'sample1'} + + pager = client.aggregated_list(request=sample_request) + + assert isinstance(pager.get('a'), compute.SubnetworksScopedList) + assert pager.get('h') is None + + results = list(pager) + assert len(results) == 6 + assert all( + isinstance(i, tuple) + for i in results) + for result in results: + assert isinstance(result, tuple) + assert tuple(type(t) for t in result) == (str, compute.SubnetworksScopedList) + + assert pager.get('a') is None + assert isinstance(pager.get('h'), compute.SubnetworksScopedList) + + pages = list(client.aggregated_list(request=sample_request).pages) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize("request_type", [ + compute.DeleteSubnetworkRequest, + dict, +]) +def test_delete_rest(request_type): + client = SubnetworksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2', 'subnetwork': 'sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.delete(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, extended_operation.ExtendedOperation) + assert response.client_operation_id == 'client_operation_id_value' + assert response.creation_timestamp == 'creation_timestamp_value' + assert response.description == 'description_value' + assert response.end_time == 'end_time_value' + assert response.http_error_message == 'http_error_message_value' + assert response.http_error_status_code == 2374 + assert response.id == 205 + assert response.insert_time == 'insert_time_value' + assert response.kind == 'kind_value' + assert response.name == 'name_value' + assert response.operation_group_id == 'operation_group_id_value' + assert response.operation_type == 'operation_type_value' + assert response.progress == 885 + assert response.region == 'region_value' + assert response.self_link == 'self_link_value' + assert response.start_time == 'start_time_value' + assert response.status == compute.Operation.Status.DONE + assert response.status_message == 'status_message_value' + assert response.target_id == 947 + assert response.target_link == 'target_link_value' + assert response.user == 'user_value' + assert response.zone == 'zone_value' + + +def test_delete_rest_required_fields(request_type=compute.DeleteSubnetworkRequest): + transport_class = transports.SubnetworksRestTransport + + request_init = {} + request_init["project"] = "" + request_init["region"] = "" + request_init["subnetwork"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + jsonified_request["region"] = 'region_value' + jsonified_request["subnetwork"] = 'subnetwork_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "region" in jsonified_request + assert jsonified_request["region"] == 'region_value' + assert "subnetwork" in jsonified_request + assert jsonified_request["subnetwork"] == 'subnetwork_value' + + client = SubnetworksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "delete", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.delete(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_delete_rest_unset_required_fields(): + transport = transports.SubnetworksRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.delete._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("project", "region", "subnetwork", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_rest_interceptors(null_interceptor): + transport = transports.SubnetworksRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.SubnetworksRestInterceptor(), + ) + client = SubnetworksClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.SubnetworksRestInterceptor, "post_delete") as post, \ + mock.patch.object(transports.SubnetworksRestInterceptor, "pre_delete") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.DeleteSubnetworkRequest.pb(compute.DeleteSubnetworkRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.DeleteSubnetworkRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.delete(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_delete_rest_bad_request(transport: str = 'rest', request_type=compute.DeleteSubnetworkRequest): + client = SubnetworksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2', 'subnetwork': 'sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.delete(request) + + +def test_delete_rest_flattened(): + client = SubnetworksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'region': 'sample2', 'subnetwork': 'sample3'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + region='region_value', + subnetwork='subnetwork_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.delete(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/regions/{region}/subnetworks/{subnetwork}" % client.transport._host, args[1]) + + +def test_delete_rest_flattened_error(transport: str = 'rest'): + client = SubnetworksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete( + compute.DeleteSubnetworkRequest(), + project='project_value', + region='region_value', + subnetwork='subnetwork_value', + ) + + +def test_delete_rest_error(): + client = SubnetworksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.DeleteSubnetworkRequest, + dict, +]) +def test_delete_unary_rest(request_type): + client = SubnetworksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2', 'subnetwork': 'sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.delete_unary(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.Operation) + + +def test_delete_unary_rest_required_fields(request_type=compute.DeleteSubnetworkRequest): + transport_class = transports.SubnetworksRestTransport + + request_init = {} + request_init["project"] = "" + request_init["region"] = "" + request_init["subnetwork"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + jsonified_request["region"] = 'region_value' + jsonified_request["subnetwork"] = 'subnetwork_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "region" in jsonified_request + assert jsonified_request["region"] == 'region_value' + assert "subnetwork" in jsonified_request + assert jsonified_request["subnetwork"] == 'subnetwork_value' + + client = SubnetworksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "delete", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.delete_unary(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_delete_unary_rest_unset_required_fields(): + transport = transports.SubnetworksRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.delete._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("project", "region", "subnetwork", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_unary_rest_interceptors(null_interceptor): + transport = transports.SubnetworksRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.SubnetworksRestInterceptor(), + ) + client = SubnetworksClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.SubnetworksRestInterceptor, "post_delete") as post, \ + mock.patch.object(transports.SubnetworksRestInterceptor, "pre_delete") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.DeleteSubnetworkRequest.pb(compute.DeleteSubnetworkRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.DeleteSubnetworkRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.delete_unary(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_delete_unary_rest_bad_request(transport: str = 'rest', request_type=compute.DeleteSubnetworkRequest): + client = SubnetworksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2', 'subnetwork': 'sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.delete_unary(request) + + +def test_delete_unary_rest_flattened(): + client = SubnetworksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'region': 'sample2', 'subnetwork': 'sample3'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + region='region_value', + subnetwork='subnetwork_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.delete_unary(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/regions/{region}/subnetworks/{subnetwork}" % client.transport._host, args[1]) + + +def test_delete_unary_rest_flattened_error(transport: str = 'rest'): + client = SubnetworksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_unary( + compute.DeleteSubnetworkRequest(), + project='project_value', + region='region_value', + subnetwork='subnetwork_value', + ) + + +def test_delete_unary_rest_error(): + client = SubnetworksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.ExpandIpCidrRangeSubnetworkRequest, + dict, +]) +def test_expand_ip_cidr_range_rest(request_type): + client = SubnetworksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2', 'subnetwork': 'sample3'} + request_init["subnetworks_expand_ip_cidr_range_request_resource"] = {'ip_cidr_range': 'ip_cidr_range_value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.expand_ip_cidr_range(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, extended_operation.ExtendedOperation) + assert response.client_operation_id == 'client_operation_id_value' + assert response.creation_timestamp == 'creation_timestamp_value' + assert response.description == 'description_value' + assert response.end_time == 'end_time_value' + assert response.http_error_message == 'http_error_message_value' + assert response.http_error_status_code == 2374 + assert response.id == 205 + assert response.insert_time == 'insert_time_value' + assert response.kind == 'kind_value' + assert response.name == 'name_value' + assert response.operation_group_id == 'operation_group_id_value' + assert response.operation_type == 'operation_type_value' + assert response.progress == 885 + assert response.region == 'region_value' + assert response.self_link == 'self_link_value' + assert response.start_time == 'start_time_value' + assert response.status == compute.Operation.Status.DONE + assert response.status_message == 'status_message_value' + assert response.target_id == 947 + assert response.target_link == 'target_link_value' + assert response.user == 'user_value' + assert response.zone == 'zone_value' + + +def test_expand_ip_cidr_range_rest_required_fields(request_type=compute.ExpandIpCidrRangeSubnetworkRequest): + transport_class = transports.SubnetworksRestTransport + + request_init = {} + request_init["project"] = "" + request_init["region"] = "" + request_init["subnetwork"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).expand_ip_cidr_range._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + jsonified_request["region"] = 'region_value' + jsonified_request["subnetwork"] = 'subnetwork_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).expand_ip_cidr_range._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "region" in jsonified_request + assert jsonified_request["region"] == 'region_value' + assert "subnetwork" in jsonified_request + assert jsonified_request["subnetwork"] == 'subnetwork_value' + + client = SubnetworksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.expand_ip_cidr_range(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_expand_ip_cidr_range_rest_unset_required_fields(): + transport = transports.SubnetworksRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.expand_ip_cidr_range._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("project", "region", "subnetwork", "subnetworksExpandIpCidrRangeRequestResource", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_expand_ip_cidr_range_rest_interceptors(null_interceptor): + transport = transports.SubnetworksRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.SubnetworksRestInterceptor(), + ) + client = SubnetworksClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.SubnetworksRestInterceptor, "post_expand_ip_cidr_range") as post, \ + mock.patch.object(transports.SubnetworksRestInterceptor, "pre_expand_ip_cidr_range") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.ExpandIpCidrRangeSubnetworkRequest.pb(compute.ExpandIpCidrRangeSubnetworkRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.ExpandIpCidrRangeSubnetworkRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.expand_ip_cidr_range(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_expand_ip_cidr_range_rest_bad_request(transport: str = 'rest', request_type=compute.ExpandIpCidrRangeSubnetworkRequest): + client = SubnetworksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2', 'subnetwork': 'sample3'} + request_init["subnetworks_expand_ip_cidr_range_request_resource"] = {'ip_cidr_range': 'ip_cidr_range_value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.expand_ip_cidr_range(request) + + +def test_expand_ip_cidr_range_rest_flattened(): + client = SubnetworksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'region': 'sample2', 'subnetwork': 'sample3'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + region='region_value', + subnetwork='subnetwork_value', + subnetworks_expand_ip_cidr_range_request_resource=compute.SubnetworksExpandIpCidrRangeRequest(ip_cidr_range='ip_cidr_range_value'), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.expand_ip_cidr_range(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/regions/{region}/subnetworks/{subnetwork}/expandIpCidrRange" % client.transport._host, args[1]) + + +def test_expand_ip_cidr_range_rest_flattened_error(transport: str = 'rest'): + client = SubnetworksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.expand_ip_cidr_range( + compute.ExpandIpCidrRangeSubnetworkRequest(), + project='project_value', + region='region_value', + subnetwork='subnetwork_value', + subnetworks_expand_ip_cidr_range_request_resource=compute.SubnetworksExpandIpCidrRangeRequest(ip_cidr_range='ip_cidr_range_value'), + ) + + +def test_expand_ip_cidr_range_rest_error(): + client = SubnetworksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.ExpandIpCidrRangeSubnetworkRequest, + dict, +]) +def test_expand_ip_cidr_range_unary_rest(request_type): + client = SubnetworksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2', 'subnetwork': 'sample3'} + request_init["subnetworks_expand_ip_cidr_range_request_resource"] = {'ip_cidr_range': 'ip_cidr_range_value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.expand_ip_cidr_range_unary(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.Operation) + + +def test_expand_ip_cidr_range_unary_rest_required_fields(request_type=compute.ExpandIpCidrRangeSubnetworkRequest): + transport_class = transports.SubnetworksRestTransport + + request_init = {} + request_init["project"] = "" + request_init["region"] = "" + request_init["subnetwork"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).expand_ip_cidr_range._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + jsonified_request["region"] = 'region_value' + jsonified_request["subnetwork"] = 'subnetwork_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).expand_ip_cidr_range._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "region" in jsonified_request + assert jsonified_request["region"] == 'region_value' + assert "subnetwork" in jsonified_request + assert jsonified_request["subnetwork"] == 'subnetwork_value' + + client = SubnetworksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.expand_ip_cidr_range_unary(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_expand_ip_cidr_range_unary_rest_unset_required_fields(): + transport = transports.SubnetworksRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.expand_ip_cidr_range._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("project", "region", "subnetwork", "subnetworksExpandIpCidrRangeRequestResource", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_expand_ip_cidr_range_unary_rest_interceptors(null_interceptor): + transport = transports.SubnetworksRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.SubnetworksRestInterceptor(), + ) + client = SubnetworksClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.SubnetworksRestInterceptor, "post_expand_ip_cidr_range") as post, \ + mock.patch.object(transports.SubnetworksRestInterceptor, "pre_expand_ip_cidr_range") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.ExpandIpCidrRangeSubnetworkRequest.pb(compute.ExpandIpCidrRangeSubnetworkRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.ExpandIpCidrRangeSubnetworkRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.expand_ip_cidr_range_unary(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_expand_ip_cidr_range_unary_rest_bad_request(transport: str = 'rest', request_type=compute.ExpandIpCidrRangeSubnetworkRequest): + client = SubnetworksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2', 'subnetwork': 'sample3'} + request_init["subnetworks_expand_ip_cidr_range_request_resource"] = {'ip_cidr_range': 'ip_cidr_range_value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.expand_ip_cidr_range_unary(request) + + +def test_expand_ip_cidr_range_unary_rest_flattened(): + client = SubnetworksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'region': 'sample2', 'subnetwork': 'sample3'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + region='region_value', + subnetwork='subnetwork_value', + subnetworks_expand_ip_cidr_range_request_resource=compute.SubnetworksExpandIpCidrRangeRequest(ip_cidr_range='ip_cidr_range_value'), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.expand_ip_cidr_range_unary(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/regions/{region}/subnetworks/{subnetwork}/expandIpCidrRange" % client.transport._host, args[1]) + + +def test_expand_ip_cidr_range_unary_rest_flattened_error(transport: str = 'rest'): + client = SubnetworksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.expand_ip_cidr_range_unary( + compute.ExpandIpCidrRangeSubnetworkRequest(), + project='project_value', + region='region_value', + subnetwork='subnetwork_value', + subnetworks_expand_ip_cidr_range_request_resource=compute.SubnetworksExpandIpCidrRangeRequest(ip_cidr_range='ip_cidr_range_value'), + ) + + +def test_expand_ip_cidr_range_unary_rest_error(): + client = SubnetworksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.GetSubnetworkRequest, + dict, +]) +def test_get_rest(request_type): + client = SubnetworksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2', 'subnetwork': 'sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Subnetwork( + creation_timestamp='creation_timestamp_value', + description='description_value', + enable_flow_logs=True, + external_ipv6_prefix='external_ipv6_prefix_value', + fingerprint='fingerprint_value', + gateway_address='gateway_address_value', + id=205, + internal_ipv6_prefix='internal_ipv6_prefix_value', + ip_cidr_range='ip_cidr_range_value', + ipv6_access_type='ipv6_access_type_value', + ipv6_cidr_range='ipv6_cidr_range_value', + kind='kind_value', + name='name_value', + network='network_value', + private_ip_google_access=True, + private_ipv6_google_access='private_ipv6_google_access_value', + purpose='purpose_value', + region='region_value', + role='role_value', + self_link='self_link_value', + stack_type='stack_type_value', + state='state_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Subnetwork.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.get(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.Subnetwork) + assert response.creation_timestamp == 'creation_timestamp_value' + assert response.description == 'description_value' + assert response.enable_flow_logs is True + assert response.external_ipv6_prefix == 'external_ipv6_prefix_value' + assert response.fingerprint == 'fingerprint_value' + assert response.gateway_address == 'gateway_address_value' + assert response.id == 205 + assert response.internal_ipv6_prefix == 'internal_ipv6_prefix_value' + assert response.ip_cidr_range == 'ip_cidr_range_value' + assert response.ipv6_access_type == 'ipv6_access_type_value' + assert response.ipv6_cidr_range == 'ipv6_cidr_range_value' + assert response.kind == 'kind_value' + assert response.name == 'name_value' + assert response.network == 'network_value' + assert response.private_ip_google_access is True + assert response.private_ipv6_google_access == 'private_ipv6_google_access_value' + assert response.purpose == 'purpose_value' + assert response.region == 'region_value' + assert response.role == 'role_value' + assert response.self_link == 'self_link_value' + assert response.stack_type == 'stack_type_value' + assert response.state == 'state_value' + + +def test_get_rest_required_fields(request_type=compute.GetSubnetworkRequest): + transport_class = transports.SubnetworksRestTransport + + request_init = {} + request_init["project"] = "" + request_init["region"] = "" + request_init["subnetwork"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + jsonified_request["region"] = 'region_value' + jsonified_request["subnetwork"] = 'subnetwork_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "region" in jsonified_request + assert jsonified_request["region"] == 'region_value' + assert "subnetwork" in jsonified_request + assert jsonified_request["subnetwork"] == 'subnetwork_value' + + client = SubnetworksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Subnetwork() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "get", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Subnetwork.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.get(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_get_rest_unset_required_fields(): + transport = transports.SubnetworksRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.get._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("project", "region", "subnetwork", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_rest_interceptors(null_interceptor): + transport = transports.SubnetworksRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.SubnetworksRestInterceptor(), + ) + client = SubnetworksClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.SubnetworksRestInterceptor, "post_get") as post, \ + mock.patch.object(transports.SubnetworksRestInterceptor, "pre_get") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.GetSubnetworkRequest.pb(compute.GetSubnetworkRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Subnetwork.to_json(compute.Subnetwork()) + + request = compute.GetSubnetworkRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Subnetwork() + + client.get(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_get_rest_bad_request(transport: str = 'rest', request_type=compute.GetSubnetworkRequest): + client = SubnetworksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2', 'subnetwork': 'sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get(request) + + +def test_get_rest_flattened(): + client = SubnetworksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Subnetwork() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'region': 'sample2', 'subnetwork': 'sample3'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + region='region_value', + subnetwork='subnetwork_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Subnetwork.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.get(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/regions/{region}/subnetworks/{subnetwork}" % client.transport._host, args[1]) + + +def test_get_rest_flattened_error(transport: str = 'rest'): + client = SubnetworksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get( + compute.GetSubnetworkRequest(), + project='project_value', + region='region_value', + subnetwork='subnetwork_value', + ) + + +def test_get_rest_error(): + client = SubnetworksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.GetIamPolicySubnetworkRequest, + dict, +]) +def test_get_iam_policy_rest(request_type): + client = SubnetworksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2', 'resource': 'sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Policy( + etag='etag_value', + iam_owned=True, + version=774, + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Policy.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.get_iam_policy(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.Policy) + assert response.etag == 'etag_value' + assert response.iam_owned is True + assert response.version == 774 + + +def test_get_iam_policy_rest_required_fields(request_type=compute.GetIamPolicySubnetworkRequest): + transport_class = transports.SubnetworksRestTransport + + request_init = {} + request_init["project"] = "" + request_init["region"] = "" + request_init["resource"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_iam_policy._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + jsonified_request["region"] = 'region_value' + jsonified_request["resource"] = 'resource_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_iam_policy._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("options_requested_policy_version", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "region" in jsonified_request + assert jsonified_request["region"] == 'region_value' + assert "resource" in jsonified_request + assert jsonified_request["resource"] == 'resource_value' + + client = SubnetworksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Policy() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "get", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Policy.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.get_iam_policy(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_get_iam_policy_rest_unset_required_fields(): + transport = transports.SubnetworksRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.get_iam_policy._get_unset_required_fields({}) + assert set(unset_fields) == (set(("optionsRequestedPolicyVersion", )) & set(("project", "region", "resource", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_iam_policy_rest_interceptors(null_interceptor): + transport = transports.SubnetworksRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.SubnetworksRestInterceptor(), + ) + client = SubnetworksClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.SubnetworksRestInterceptor, "post_get_iam_policy") as post, \ + mock.patch.object(transports.SubnetworksRestInterceptor, "pre_get_iam_policy") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.GetIamPolicySubnetworkRequest.pb(compute.GetIamPolicySubnetworkRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Policy.to_json(compute.Policy()) + + request = compute.GetIamPolicySubnetworkRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Policy() + + client.get_iam_policy(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_get_iam_policy_rest_bad_request(transport: str = 'rest', request_type=compute.GetIamPolicySubnetworkRequest): + client = SubnetworksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2', 'resource': 'sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_iam_policy(request) + + +def test_get_iam_policy_rest_flattened(): + client = SubnetworksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Policy() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'region': 'sample2', 'resource': 'sample3'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + region='region_value', + resource='resource_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Policy.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.get_iam_policy(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/regions/{region}/subnetworks/{resource}/getIamPolicy" % client.transport._host, args[1]) + + +def test_get_iam_policy_rest_flattened_error(transport: str = 'rest'): + client = SubnetworksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_iam_policy( + compute.GetIamPolicySubnetworkRequest(), + project='project_value', + region='region_value', + resource='resource_value', + ) + + +def test_get_iam_policy_rest_error(): + client = SubnetworksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.InsertSubnetworkRequest, + dict, +]) +def test_insert_rest(request_type): + client = SubnetworksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2'} + request_init["subnetwork_resource"] = {'creation_timestamp': 'creation_timestamp_value', 'description': 'description_value', 'enable_flow_logs': True, 'external_ipv6_prefix': 'external_ipv6_prefix_value', 'fingerprint': 'fingerprint_value', 'gateway_address': 'gateway_address_value', 'id': 205, 'internal_ipv6_prefix': 'internal_ipv6_prefix_value', 'ip_cidr_range': 'ip_cidr_range_value', 'ipv6_access_type': 'ipv6_access_type_value', 'ipv6_cidr_range': 'ipv6_cidr_range_value', 'kind': 'kind_value', 'log_config': {'aggregation_interval': 'aggregation_interval_value', 'enable': True, 'filter_expr': 'filter_expr_value', 'flow_sampling': 0.1394, 'metadata': 'metadata_value', 'metadata_fields': ['metadata_fields_value1', 'metadata_fields_value2']}, 'name': 'name_value', 'network': 'network_value', 'private_ip_google_access': True, 'private_ipv6_google_access': 'private_ipv6_google_access_value', 'purpose': 'purpose_value', 'region': 'region_value', 'role': 'role_value', 'secondary_ip_ranges': [{'ip_cidr_range': 'ip_cidr_range_value', 'range_name': 'range_name_value'}], 'self_link': 'self_link_value', 'stack_type': 'stack_type_value', 'state': 'state_value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.insert(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, extended_operation.ExtendedOperation) + assert response.client_operation_id == 'client_operation_id_value' + assert response.creation_timestamp == 'creation_timestamp_value' + assert response.description == 'description_value' + assert response.end_time == 'end_time_value' + assert response.http_error_message == 'http_error_message_value' + assert response.http_error_status_code == 2374 + assert response.id == 205 + assert response.insert_time == 'insert_time_value' + assert response.kind == 'kind_value' + assert response.name == 'name_value' + assert response.operation_group_id == 'operation_group_id_value' + assert response.operation_type == 'operation_type_value' + assert response.progress == 885 + assert response.region == 'region_value' + assert response.self_link == 'self_link_value' + assert response.start_time == 'start_time_value' + assert response.status == compute.Operation.Status.DONE + assert response.status_message == 'status_message_value' + assert response.target_id == 947 + assert response.target_link == 'target_link_value' + assert response.user == 'user_value' + assert response.zone == 'zone_value' + + +def test_insert_rest_required_fields(request_type=compute.InsertSubnetworkRequest): + transport_class = transports.SubnetworksRestTransport + + request_init = {} + request_init["project"] = "" + request_init["region"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).insert._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + jsonified_request["region"] = 'region_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).insert._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "region" in jsonified_request + assert jsonified_request["region"] == 'region_value' + + client = SubnetworksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.insert(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_insert_rest_unset_required_fields(): + transport = transports.SubnetworksRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.insert._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("project", "region", "subnetworkResource", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_insert_rest_interceptors(null_interceptor): + transport = transports.SubnetworksRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.SubnetworksRestInterceptor(), + ) + client = SubnetworksClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.SubnetworksRestInterceptor, "post_insert") as post, \ + mock.patch.object(transports.SubnetworksRestInterceptor, "pre_insert") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.InsertSubnetworkRequest.pb(compute.InsertSubnetworkRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.InsertSubnetworkRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.insert(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_insert_rest_bad_request(transport: str = 'rest', request_type=compute.InsertSubnetworkRequest): + client = SubnetworksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2'} + request_init["subnetwork_resource"] = {'creation_timestamp': 'creation_timestamp_value', 'description': 'description_value', 'enable_flow_logs': True, 'external_ipv6_prefix': 'external_ipv6_prefix_value', 'fingerprint': 'fingerprint_value', 'gateway_address': 'gateway_address_value', 'id': 205, 'internal_ipv6_prefix': 'internal_ipv6_prefix_value', 'ip_cidr_range': 'ip_cidr_range_value', 'ipv6_access_type': 'ipv6_access_type_value', 'ipv6_cidr_range': 'ipv6_cidr_range_value', 'kind': 'kind_value', 'log_config': {'aggregation_interval': 'aggregation_interval_value', 'enable': True, 'filter_expr': 'filter_expr_value', 'flow_sampling': 0.1394, 'metadata': 'metadata_value', 'metadata_fields': ['metadata_fields_value1', 'metadata_fields_value2']}, 'name': 'name_value', 'network': 'network_value', 'private_ip_google_access': True, 'private_ipv6_google_access': 'private_ipv6_google_access_value', 'purpose': 'purpose_value', 'region': 'region_value', 'role': 'role_value', 'secondary_ip_ranges': [{'ip_cidr_range': 'ip_cidr_range_value', 'range_name': 'range_name_value'}], 'self_link': 'self_link_value', 'stack_type': 'stack_type_value', 'state': 'state_value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.insert(request) + + +def test_insert_rest_flattened(): + client = SubnetworksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'region': 'sample2'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + region='region_value', + subnetwork_resource=compute.Subnetwork(creation_timestamp='creation_timestamp_value'), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.insert(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/regions/{region}/subnetworks" % client.transport._host, args[1]) + + +def test_insert_rest_flattened_error(transport: str = 'rest'): + client = SubnetworksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.insert( + compute.InsertSubnetworkRequest(), + project='project_value', + region='region_value', + subnetwork_resource=compute.Subnetwork(creation_timestamp='creation_timestamp_value'), + ) + + +def test_insert_rest_error(): + client = SubnetworksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.InsertSubnetworkRequest, + dict, +]) +def test_insert_unary_rest(request_type): + client = SubnetworksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2'} + request_init["subnetwork_resource"] = {'creation_timestamp': 'creation_timestamp_value', 'description': 'description_value', 'enable_flow_logs': True, 'external_ipv6_prefix': 'external_ipv6_prefix_value', 'fingerprint': 'fingerprint_value', 'gateway_address': 'gateway_address_value', 'id': 205, 'internal_ipv6_prefix': 'internal_ipv6_prefix_value', 'ip_cidr_range': 'ip_cidr_range_value', 'ipv6_access_type': 'ipv6_access_type_value', 'ipv6_cidr_range': 'ipv6_cidr_range_value', 'kind': 'kind_value', 'log_config': {'aggregation_interval': 'aggregation_interval_value', 'enable': True, 'filter_expr': 'filter_expr_value', 'flow_sampling': 0.1394, 'metadata': 'metadata_value', 'metadata_fields': ['metadata_fields_value1', 'metadata_fields_value2']}, 'name': 'name_value', 'network': 'network_value', 'private_ip_google_access': True, 'private_ipv6_google_access': 'private_ipv6_google_access_value', 'purpose': 'purpose_value', 'region': 'region_value', 'role': 'role_value', 'secondary_ip_ranges': [{'ip_cidr_range': 'ip_cidr_range_value', 'range_name': 'range_name_value'}], 'self_link': 'self_link_value', 'stack_type': 'stack_type_value', 'state': 'state_value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.insert_unary(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.Operation) + + +def test_insert_unary_rest_required_fields(request_type=compute.InsertSubnetworkRequest): + transport_class = transports.SubnetworksRestTransport + + request_init = {} + request_init["project"] = "" + request_init["region"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).insert._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + jsonified_request["region"] = 'region_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).insert._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "region" in jsonified_request + assert jsonified_request["region"] == 'region_value' + + client = SubnetworksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.insert_unary(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_insert_unary_rest_unset_required_fields(): + transport = transports.SubnetworksRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.insert._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("project", "region", "subnetworkResource", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_insert_unary_rest_interceptors(null_interceptor): + transport = transports.SubnetworksRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.SubnetworksRestInterceptor(), + ) + client = SubnetworksClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.SubnetworksRestInterceptor, "post_insert") as post, \ + mock.patch.object(transports.SubnetworksRestInterceptor, "pre_insert") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.InsertSubnetworkRequest.pb(compute.InsertSubnetworkRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.InsertSubnetworkRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.insert_unary(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_insert_unary_rest_bad_request(transport: str = 'rest', request_type=compute.InsertSubnetworkRequest): + client = SubnetworksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2'} + request_init["subnetwork_resource"] = {'creation_timestamp': 'creation_timestamp_value', 'description': 'description_value', 'enable_flow_logs': True, 'external_ipv6_prefix': 'external_ipv6_prefix_value', 'fingerprint': 'fingerprint_value', 'gateway_address': 'gateway_address_value', 'id': 205, 'internal_ipv6_prefix': 'internal_ipv6_prefix_value', 'ip_cidr_range': 'ip_cidr_range_value', 'ipv6_access_type': 'ipv6_access_type_value', 'ipv6_cidr_range': 'ipv6_cidr_range_value', 'kind': 'kind_value', 'log_config': {'aggregation_interval': 'aggregation_interval_value', 'enable': True, 'filter_expr': 'filter_expr_value', 'flow_sampling': 0.1394, 'metadata': 'metadata_value', 'metadata_fields': ['metadata_fields_value1', 'metadata_fields_value2']}, 'name': 'name_value', 'network': 'network_value', 'private_ip_google_access': True, 'private_ipv6_google_access': 'private_ipv6_google_access_value', 'purpose': 'purpose_value', 'region': 'region_value', 'role': 'role_value', 'secondary_ip_ranges': [{'ip_cidr_range': 'ip_cidr_range_value', 'range_name': 'range_name_value'}], 'self_link': 'self_link_value', 'stack_type': 'stack_type_value', 'state': 'state_value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.insert_unary(request) + + +def test_insert_unary_rest_flattened(): + client = SubnetworksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'region': 'sample2'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + region='region_value', + subnetwork_resource=compute.Subnetwork(creation_timestamp='creation_timestamp_value'), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.insert_unary(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/regions/{region}/subnetworks" % client.transport._host, args[1]) + + +def test_insert_unary_rest_flattened_error(transport: str = 'rest'): + client = SubnetworksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.insert_unary( + compute.InsertSubnetworkRequest(), + project='project_value', + region='region_value', + subnetwork_resource=compute.Subnetwork(creation_timestamp='creation_timestamp_value'), + ) + + +def test_insert_unary_rest_error(): + client = SubnetworksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.ListSubnetworksRequest, + dict, +]) +def test_list_rest(request_type): + client = SubnetworksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.SubnetworkList( + id='id_value', + kind='kind_value', + next_page_token='next_page_token_value', + self_link='self_link_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.SubnetworkList.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.list(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListPager) + assert response.id == 'id_value' + assert response.kind == 'kind_value' + assert response.next_page_token == 'next_page_token_value' + assert response.self_link == 'self_link_value' + + +def test_list_rest_required_fields(request_type=compute.ListSubnetworksRequest): + transport_class = transports.SubnetworksRestTransport + + request_init = {} + request_init["project"] = "" + request_init["region"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + jsonified_request["region"] = 'region_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("filter", "max_results", "order_by", "page_token", "return_partial_success", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "region" in jsonified_request + assert jsonified_request["region"] == 'region_value' + + client = SubnetworksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.SubnetworkList() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "get", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.SubnetworkList.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.list(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_list_rest_unset_required_fields(): + transport = transports.SubnetworksRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.list._get_unset_required_fields({}) + assert set(unset_fields) == (set(("filter", "maxResults", "orderBy", "pageToken", "returnPartialSuccess", )) & set(("project", "region", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_rest_interceptors(null_interceptor): + transport = transports.SubnetworksRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.SubnetworksRestInterceptor(), + ) + client = SubnetworksClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.SubnetworksRestInterceptor, "post_list") as post, \ + mock.patch.object(transports.SubnetworksRestInterceptor, "pre_list") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.ListSubnetworksRequest.pb(compute.ListSubnetworksRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.SubnetworkList.to_json(compute.SubnetworkList()) + + request = compute.ListSubnetworksRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.SubnetworkList() + + client.list(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_list_rest_bad_request(transport: str = 'rest', request_type=compute.ListSubnetworksRequest): + client = SubnetworksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list(request) + + +def test_list_rest_flattened(): + client = SubnetworksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.SubnetworkList() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'region': 'sample2'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + region='region_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.SubnetworkList.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.list(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/regions/{region}/subnetworks" % client.transport._host, args[1]) + + +def test_list_rest_flattened_error(transport: str = 'rest'): + client = SubnetworksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list( + compute.ListSubnetworksRequest(), + project='project_value', + region='region_value', + ) + + +def test_list_rest_pager(transport: str = 'rest'): + client = SubnetworksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + #with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + compute.SubnetworkList( + items=[ + compute.Subnetwork(), + compute.Subnetwork(), + compute.Subnetwork(), + ], + next_page_token='abc', + ), + compute.SubnetworkList( + items=[], + next_page_token='def', + ), + compute.SubnetworkList( + items=[ + compute.Subnetwork(), + ], + next_page_token='ghi', + ), + compute.SubnetworkList( + items=[ + compute.Subnetwork(), + compute.Subnetwork(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple(compute.SubnetworkList.to_json(x) for x in response) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode('UTF-8') + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {'project': 'sample1', 'region': 'sample2'} + + pager = client.list(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, compute.Subnetwork) + for i in results) + + pages = list(client.list(request=sample_request).pages) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize("request_type", [ + compute.ListUsableSubnetworksRequest, + dict, +]) +def test_list_usable_rest(request_type): + client = SubnetworksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.UsableSubnetworksAggregatedList( + id='id_value', + kind='kind_value', + next_page_token='next_page_token_value', + self_link='self_link_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.UsableSubnetworksAggregatedList.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.list_usable(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListUsablePager) + assert response.id == 'id_value' + assert response.kind == 'kind_value' + assert response.next_page_token == 'next_page_token_value' + assert response.self_link == 'self_link_value' + + +def test_list_usable_rest_required_fields(request_type=compute.ListUsableSubnetworksRequest): + transport_class = transports.SubnetworksRestTransport + + request_init = {} + request_init["project"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_usable._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_usable._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("filter", "max_results", "order_by", "page_token", "return_partial_success", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + + client = SubnetworksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.UsableSubnetworksAggregatedList() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "get", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.UsableSubnetworksAggregatedList.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.list_usable(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_list_usable_rest_unset_required_fields(): + transport = transports.SubnetworksRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.list_usable._get_unset_required_fields({}) + assert set(unset_fields) == (set(("filter", "maxResults", "orderBy", "pageToken", "returnPartialSuccess", )) & set(("project", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_usable_rest_interceptors(null_interceptor): + transport = transports.SubnetworksRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.SubnetworksRestInterceptor(), + ) + client = SubnetworksClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.SubnetworksRestInterceptor, "post_list_usable") as post, \ + mock.patch.object(transports.SubnetworksRestInterceptor, "pre_list_usable") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.ListUsableSubnetworksRequest.pb(compute.ListUsableSubnetworksRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.UsableSubnetworksAggregatedList.to_json(compute.UsableSubnetworksAggregatedList()) + + request = compute.ListUsableSubnetworksRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.UsableSubnetworksAggregatedList() + + client.list_usable(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_list_usable_rest_bad_request(transport: str = 'rest', request_type=compute.ListUsableSubnetworksRequest): + client = SubnetworksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_usable(request) + + +def test_list_usable_rest_flattened(): + client = SubnetworksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.UsableSubnetworksAggregatedList() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.UsableSubnetworksAggregatedList.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.list_usable(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/aggregated/subnetworks/listUsable" % client.transport._host, args[1]) + + +def test_list_usable_rest_flattened_error(transport: str = 'rest'): + client = SubnetworksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_usable( + compute.ListUsableSubnetworksRequest(), + project='project_value', + ) + + +def test_list_usable_rest_pager(transport: str = 'rest'): + client = SubnetworksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + #with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + compute.UsableSubnetworksAggregatedList( + items=[ + compute.UsableSubnetwork(), + compute.UsableSubnetwork(), + compute.UsableSubnetwork(), + ], + next_page_token='abc', + ), + compute.UsableSubnetworksAggregatedList( + items=[], + next_page_token='def', + ), + compute.UsableSubnetworksAggregatedList( + items=[ + compute.UsableSubnetwork(), + ], + next_page_token='ghi', + ), + compute.UsableSubnetworksAggregatedList( + items=[ + compute.UsableSubnetwork(), + compute.UsableSubnetwork(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple(compute.UsableSubnetworksAggregatedList.to_json(x) for x in response) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode('UTF-8') + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {'project': 'sample1'} + + pager = client.list_usable(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, compute.UsableSubnetwork) + for i in results) + + pages = list(client.list_usable(request=sample_request).pages) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize("request_type", [ + compute.PatchSubnetworkRequest, + dict, +]) +def test_patch_rest(request_type): + client = SubnetworksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2', 'subnetwork': 'sample3'} + request_init["subnetwork_resource"] = {'creation_timestamp': 'creation_timestamp_value', 'description': 'description_value', 'enable_flow_logs': True, 'external_ipv6_prefix': 'external_ipv6_prefix_value', 'fingerprint': 'fingerprint_value', 'gateway_address': 'gateway_address_value', 'id': 205, 'internal_ipv6_prefix': 'internal_ipv6_prefix_value', 'ip_cidr_range': 'ip_cidr_range_value', 'ipv6_access_type': 'ipv6_access_type_value', 'ipv6_cidr_range': 'ipv6_cidr_range_value', 'kind': 'kind_value', 'log_config': {'aggregation_interval': 'aggregation_interval_value', 'enable': True, 'filter_expr': 'filter_expr_value', 'flow_sampling': 0.1394, 'metadata': 'metadata_value', 'metadata_fields': ['metadata_fields_value1', 'metadata_fields_value2']}, 'name': 'name_value', 'network': 'network_value', 'private_ip_google_access': True, 'private_ipv6_google_access': 'private_ipv6_google_access_value', 'purpose': 'purpose_value', 'region': 'region_value', 'role': 'role_value', 'secondary_ip_ranges': [{'ip_cidr_range': 'ip_cidr_range_value', 'range_name': 'range_name_value'}], 'self_link': 'self_link_value', 'stack_type': 'stack_type_value', 'state': 'state_value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.patch(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, extended_operation.ExtendedOperation) + assert response.client_operation_id == 'client_operation_id_value' + assert response.creation_timestamp == 'creation_timestamp_value' + assert response.description == 'description_value' + assert response.end_time == 'end_time_value' + assert response.http_error_message == 'http_error_message_value' + assert response.http_error_status_code == 2374 + assert response.id == 205 + assert response.insert_time == 'insert_time_value' + assert response.kind == 'kind_value' + assert response.name == 'name_value' + assert response.operation_group_id == 'operation_group_id_value' + assert response.operation_type == 'operation_type_value' + assert response.progress == 885 + assert response.region == 'region_value' + assert response.self_link == 'self_link_value' + assert response.start_time == 'start_time_value' + assert response.status == compute.Operation.Status.DONE + assert response.status_message == 'status_message_value' + assert response.target_id == 947 + assert response.target_link == 'target_link_value' + assert response.user == 'user_value' + assert response.zone == 'zone_value' + + +def test_patch_rest_required_fields(request_type=compute.PatchSubnetworkRequest): + transport_class = transports.SubnetworksRestTransport + + request_init = {} + request_init["project"] = "" + request_init["region"] = "" + request_init["subnetwork"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).patch._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + jsonified_request["region"] = 'region_value' + jsonified_request["subnetwork"] = 'subnetwork_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).patch._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("drain_timeout_seconds", "request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "region" in jsonified_request + assert jsonified_request["region"] == 'region_value' + assert "subnetwork" in jsonified_request + assert jsonified_request["subnetwork"] == 'subnetwork_value' + + client = SubnetworksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "patch", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.patch(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_patch_rest_unset_required_fields(): + transport = transports.SubnetworksRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.patch._get_unset_required_fields({}) + assert set(unset_fields) == (set(("drainTimeoutSeconds", "requestId", )) & set(("project", "region", "subnetwork", "subnetworkResource", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_patch_rest_interceptors(null_interceptor): + transport = transports.SubnetworksRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.SubnetworksRestInterceptor(), + ) + client = SubnetworksClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.SubnetworksRestInterceptor, "post_patch") as post, \ + mock.patch.object(transports.SubnetworksRestInterceptor, "pre_patch") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.PatchSubnetworkRequest.pb(compute.PatchSubnetworkRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.PatchSubnetworkRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.patch(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_patch_rest_bad_request(transport: str = 'rest', request_type=compute.PatchSubnetworkRequest): + client = SubnetworksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2', 'subnetwork': 'sample3'} + request_init["subnetwork_resource"] = {'creation_timestamp': 'creation_timestamp_value', 'description': 'description_value', 'enable_flow_logs': True, 'external_ipv6_prefix': 'external_ipv6_prefix_value', 'fingerprint': 'fingerprint_value', 'gateway_address': 'gateway_address_value', 'id': 205, 'internal_ipv6_prefix': 'internal_ipv6_prefix_value', 'ip_cidr_range': 'ip_cidr_range_value', 'ipv6_access_type': 'ipv6_access_type_value', 'ipv6_cidr_range': 'ipv6_cidr_range_value', 'kind': 'kind_value', 'log_config': {'aggregation_interval': 'aggregation_interval_value', 'enable': True, 'filter_expr': 'filter_expr_value', 'flow_sampling': 0.1394, 'metadata': 'metadata_value', 'metadata_fields': ['metadata_fields_value1', 'metadata_fields_value2']}, 'name': 'name_value', 'network': 'network_value', 'private_ip_google_access': True, 'private_ipv6_google_access': 'private_ipv6_google_access_value', 'purpose': 'purpose_value', 'region': 'region_value', 'role': 'role_value', 'secondary_ip_ranges': [{'ip_cidr_range': 'ip_cidr_range_value', 'range_name': 'range_name_value'}], 'self_link': 'self_link_value', 'stack_type': 'stack_type_value', 'state': 'state_value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.patch(request) + + +def test_patch_rest_flattened(): + client = SubnetworksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'region': 'sample2', 'subnetwork': 'sample3'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + region='region_value', + subnetwork='subnetwork_value', + subnetwork_resource=compute.Subnetwork(creation_timestamp='creation_timestamp_value'), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.patch(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/regions/{region}/subnetworks/{subnetwork}" % client.transport._host, args[1]) + + +def test_patch_rest_flattened_error(transport: str = 'rest'): + client = SubnetworksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.patch( + compute.PatchSubnetworkRequest(), + project='project_value', + region='region_value', + subnetwork='subnetwork_value', + subnetwork_resource=compute.Subnetwork(creation_timestamp='creation_timestamp_value'), + ) + + +def test_patch_rest_error(): + client = SubnetworksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.PatchSubnetworkRequest, + dict, +]) +def test_patch_unary_rest(request_type): + client = SubnetworksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2', 'subnetwork': 'sample3'} + request_init["subnetwork_resource"] = {'creation_timestamp': 'creation_timestamp_value', 'description': 'description_value', 'enable_flow_logs': True, 'external_ipv6_prefix': 'external_ipv6_prefix_value', 'fingerprint': 'fingerprint_value', 'gateway_address': 'gateway_address_value', 'id': 205, 'internal_ipv6_prefix': 'internal_ipv6_prefix_value', 'ip_cidr_range': 'ip_cidr_range_value', 'ipv6_access_type': 'ipv6_access_type_value', 'ipv6_cidr_range': 'ipv6_cidr_range_value', 'kind': 'kind_value', 'log_config': {'aggregation_interval': 'aggregation_interval_value', 'enable': True, 'filter_expr': 'filter_expr_value', 'flow_sampling': 0.1394, 'metadata': 'metadata_value', 'metadata_fields': ['metadata_fields_value1', 'metadata_fields_value2']}, 'name': 'name_value', 'network': 'network_value', 'private_ip_google_access': True, 'private_ipv6_google_access': 'private_ipv6_google_access_value', 'purpose': 'purpose_value', 'region': 'region_value', 'role': 'role_value', 'secondary_ip_ranges': [{'ip_cidr_range': 'ip_cidr_range_value', 'range_name': 'range_name_value'}], 'self_link': 'self_link_value', 'stack_type': 'stack_type_value', 'state': 'state_value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.patch_unary(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.Operation) + + +def test_patch_unary_rest_required_fields(request_type=compute.PatchSubnetworkRequest): + transport_class = transports.SubnetworksRestTransport + + request_init = {} + request_init["project"] = "" + request_init["region"] = "" + request_init["subnetwork"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).patch._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + jsonified_request["region"] = 'region_value' + jsonified_request["subnetwork"] = 'subnetwork_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).patch._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("drain_timeout_seconds", "request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "region" in jsonified_request + assert jsonified_request["region"] == 'region_value' + assert "subnetwork" in jsonified_request + assert jsonified_request["subnetwork"] == 'subnetwork_value' + + client = SubnetworksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "patch", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.patch_unary(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_patch_unary_rest_unset_required_fields(): + transport = transports.SubnetworksRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.patch._get_unset_required_fields({}) + assert set(unset_fields) == (set(("drainTimeoutSeconds", "requestId", )) & set(("project", "region", "subnetwork", "subnetworkResource", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_patch_unary_rest_interceptors(null_interceptor): + transport = transports.SubnetworksRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.SubnetworksRestInterceptor(), + ) + client = SubnetworksClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.SubnetworksRestInterceptor, "post_patch") as post, \ + mock.patch.object(transports.SubnetworksRestInterceptor, "pre_patch") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.PatchSubnetworkRequest.pb(compute.PatchSubnetworkRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.PatchSubnetworkRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.patch_unary(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_patch_unary_rest_bad_request(transport: str = 'rest', request_type=compute.PatchSubnetworkRequest): + client = SubnetworksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2', 'subnetwork': 'sample3'} + request_init["subnetwork_resource"] = {'creation_timestamp': 'creation_timestamp_value', 'description': 'description_value', 'enable_flow_logs': True, 'external_ipv6_prefix': 'external_ipv6_prefix_value', 'fingerprint': 'fingerprint_value', 'gateway_address': 'gateway_address_value', 'id': 205, 'internal_ipv6_prefix': 'internal_ipv6_prefix_value', 'ip_cidr_range': 'ip_cidr_range_value', 'ipv6_access_type': 'ipv6_access_type_value', 'ipv6_cidr_range': 'ipv6_cidr_range_value', 'kind': 'kind_value', 'log_config': {'aggregation_interval': 'aggregation_interval_value', 'enable': True, 'filter_expr': 'filter_expr_value', 'flow_sampling': 0.1394, 'metadata': 'metadata_value', 'metadata_fields': ['metadata_fields_value1', 'metadata_fields_value2']}, 'name': 'name_value', 'network': 'network_value', 'private_ip_google_access': True, 'private_ipv6_google_access': 'private_ipv6_google_access_value', 'purpose': 'purpose_value', 'region': 'region_value', 'role': 'role_value', 'secondary_ip_ranges': [{'ip_cidr_range': 'ip_cidr_range_value', 'range_name': 'range_name_value'}], 'self_link': 'self_link_value', 'stack_type': 'stack_type_value', 'state': 'state_value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.patch_unary(request) + + +def test_patch_unary_rest_flattened(): + client = SubnetworksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'region': 'sample2', 'subnetwork': 'sample3'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + region='region_value', + subnetwork='subnetwork_value', + subnetwork_resource=compute.Subnetwork(creation_timestamp='creation_timestamp_value'), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.patch_unary(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/regions/{region}/subnetworks/{subnetwork}" % client.transport._host, args[1]) + + +def test_patch_unary_rest_flattened_error(transport: str = 'rest'): + client = SubnetworksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.patch_unary( + compute.PatchSubnetworkRequest(), + project='project_value', + region='region_value', + subnetwork='subnetwork_value', + subnetwork_resource=compute.Subnetwork(creation_timestamp='creation_timestamp_value'), + ) + + +def test_patch_unary_rest_error(): + client = SubnetworksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.SetIamPolicySubnetworkRequest, + dict, +]) +def test_set_iam_policy_rest(request_type): + client = SubnetworksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2', 'resource': 'sample3'} + request_init["region_set_policy_request_resource"] = {'bindings': [{'binding_id': 'binding_id_value', 'condition': {'description': 'description_value', 'expression': 'expression_value', 'location': 'location_value', 'title': 'title_value'}, 'members': ['members_value1', 'members_value2'], 'role': 'role_value'}], 'etag': 'etag_value', 'policy': {'audit_configs': [{'audit_log_configs': [{'exempted_members': ['exempted_members_value1', 'exempted_members_value2'], 'ignore_child_exemptions': True, 'log_type': 'log_type_value'}], 'exempted_members': ['exempted_members_value1', 'exempted_members_value2'], 'service': 'service_value'}], 'bindings': {}, 'etag': 'etag_value', 'iam_owned': True, 'rules': [{'action': 'action_value', 'conditions': [{'iam': 'iam_value', 'op': 'op_value', 'svc': 'svc_value', 'sys': 'sys_value', 'values': ['values_value1', 'values_value2']}], 'description': 'description_value', 'ins': ['ins_value1', 'ins_value2'], 'log_configs': [{'cloud_audit': {'authorization_logging_options': {'permission_type': 'permission_type_value'}, 'log_name': 'log_name_value'}, 'counter': {'custom_fields': [{'name': 'name_value', 'value': 'value_value'}], 'field': 'field_value', 'metric': 'metric_value'}, 'data_access': {'log_mode': 'log_mode_value'}}], 'not_ins': ['not_ins_value1', 'not_ins_value2'], 'permissions': ['permissions_value1', 'permissions_value2']}], 'version': 774}} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Policy( + etag='etag_value', + iam_owned=True, + version=774, + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Policy.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.set_iam_policy(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.Policy) + assert response.etag == 'etag_value' + assert response.iam_owned is True + assert response.version == 774 + + +def test_set_iam_policy_rest_required_fields(request_type=compute.SetIamPolicySubnetworkRequest): + transport_class = transports.SubnetworksRestTransport + + request_init = {} + request_init["project"] = "" + request_init["region"] = "" + request_init["resource"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).set_iam_policy._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + jsonified_request["region"] = 'region_value' + jsonified_request["resource"] = 'resource_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).set_iam_policy._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "region" in jsonified_request + assert jsonified_request["region"] == 'region_value' + assert "resource" in jsonified_request + assert jsonified_request["resource"] == 'resource_value' + + client = SubnetworksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Policy() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Policy.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.set_iam_policy(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_set_iam_policy_rest_unset_required_fields(): + transport = transports.SubnetworksRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.set_iam_policy._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("project", "region", "regionSetPolicyRequestResource", "resource", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_set_iam_policy_rest_interceptors(null_interceptor): + transport = transports.SubnetworksRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.SubnetworksRestInterceptor(), + ) + client = SubnetworksClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.SubnetworksRestInterceptor, "post_set_iam_policy") as post, \ + mock.patch.object(transports.SubnetworksRestInterceptor, "pre_set_iam_policy") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.SetIamPolicySubnetworkRequest.pb(compute.SetIamPolicySubnetworkRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Policy.to_json(compute.Policy()) + + request = compute.SetIamPolicySubnetworkRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Policy() + + client.set_iam_policy(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_set_iam_policy_rest_bad_request(transport: str = 'rest', request_type=compute.SetIamPolicySubnetworkRequest): + client = SubnetworksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2', 'resource': 'sample3'} + request_init["region_set_policy_request_resource"] = {'bindings': [{'binding_id': 'binding_id_value', 'condition': {'description': 'description_value', 'expression': 'expression_value', 'location': 'location_value', 'title': 'title_value'}, 'members': ['members_value1', 'members_value2'], 'role': 'role_value'}], 'etag': 'etag_value', 'policy': {'audit_configs': [{'audit_log_configs': [{'exempted_members': ['exempted_members_value1', 'exempted_members_value2'], 'ignore_child_exemptions': True, 'log_type': 'log_type_value'}], 'exempted_members': ['exempted_members_value1', 'exempted_members_value2'], 'service': 'service_value'}], 'bindings': {}, 'etag': 'etag_value', 'iam_owned': True, 'rules': [{'action': 'action_value', 'conditions': [{'iam': 'iam_value', 'op': 'op_value', 'svc': 'svc_value', 'sys': 'sys_value', 'values': ['values_value1', 'values_value2']}], 'description': 'description_value', 'ins': ['ins_value1', 'ins_value2'], 'log_configs': [{'cloud_audit': {'authorization_logging_options': {'permission_type': 'permission_type_value'}, 'log_name': 'log_name_value'}, 'counter': {'custom_fields': [{'name': 'name_value', 'value': 'value_value'}], 'field': 'field_value', 'metric': 'metric_value'}, 'data_access': {'log_mode': 'log_mode_value'}}], 'not_ins': ['not_ins_value1', 'not_ins_value2'], 'permissions': ['permissions_value1', 'permissions_value2']}], 'version': 774}} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.set_iam_policy(request) + + +def test_set_iam_policy_rest_flattened(): + client = SubnetworksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Policy() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'region': 'sample2', 'resource': 'sample3'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + region='region_value', + resource='resource_value', + region_set_policy_request_resource=compute.RegionSetPolicyRequest(bindings=[compute.Binding(binding_id='binding_id_value')]), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Policy.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.set_iam_policy(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/regions/{region}/subnetworks/{resource}/setIamPolicy" % client.transport._host, args[1]) + + +def test_set_iam_policy_rest_flattened_error(transport: str = 'rest'): + client = SubnetworksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.set_iam_policy( + compute.SetIamPolicySubnetworkRequest(), + project='project_value', + region='region_value', + resource='resource_value', + region_set_policy_request_resource=compute.RegionSetPolicyRequest(bindings=[compute.Binding(binding_id='binding_id_value')]), + ) + + +def test_set_iam_policy_rest_error(): + client = SubnetworksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.SetPrivateIpGoogleAccessSubnetworkRequest, + dict, +]) +def test_set_private_ip_google_access_rest(request_type): + client = SubnetworksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2', 'subnetwork': 'sample3'} + request_init["subnetworks_set_private_ip_google_access_request_resource"] = {'private_ip_google_access': True} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.set_private_ip_google_access(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, extended_operation.ExtendedOperation) + assert response.client_operation_id == 'client_operation_id_value' + assert response.creation_timestamp == 'creation_timestamp_value' + assert response.description == 'description_value' + assert response.end_time == 'end_time_value' + assert response.http_error_message == 'http_error_message_value' + assert response.http_error_status_code == 2374 + assert response.id == 205 + assert response.insert_time == 'insert_time_value' + assert response.kind == 'kind_value' + assert response.name == 'name_value' + assert response.operation_group_id == 'operation_group_id_value' + assert response.operation_type == 'operation_type_value' + assert response.progress == 885 + assert response.region == 'region_value' + assert response.self_link == 'self_link_value' + assert response.start_time == 'start_time_value' + assert response.status == compute.Operation.Status.DONE + assert response.status_message == 'status_message_value' + assert response.target_id == 947 + assert response.target_link == 'target_link_value' + assert response.user == 'user_value' + assert response.zone == 'zone_value' + + +def test_set_private_ip_google_access_rest_required_fields(request_type=compute.SetPrivateIpGoogleAccessSubnetworkRequest): + transport_class = transports.SubnetworksRestTransport + + request_init = {} + request_init["project"] = "" + request_init["region"] = "" + request_init["subnetwork"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).set_private_ip_google_access._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + jsonified_request["region"] = 'region_value' + jsonified_request["subnetwork"] = 'subnetwork_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).set_private_ip_google_access._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "region" in jsonified_request + assert jsonified_request["region"] == 'region_value' + assert "subnetwork" in jsonified_request + assert jsonified_request["subnetwork"] == 'subnetwork_value' + + client = SubnetworksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.set_private_ip_google_access(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_set_private_ip_google_access_rest_unset_required_fields(): + transport = transports.SubnetworksRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.set_private_ip_google_access._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("project", "region", "subnetwork", "subnetworksSetPrivateIpGoogleAccessRequestResource", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_set_private_ip_google_access_rest_interceptors(null_interceptor): + transport = transports.SubnetworksRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.SubnetworksRestInterceptor(), + ) + client = SubnetworksClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.SubnetworksRestInterceptor, "post_set_private_ip_google_access") as post, \ + mock.patch.object(transports.SubnetworksRestInterceptor, "pre_set_private_ip_google_access") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.SetPrivateIpGoogleAccessSubnetworkRequest.pb(compute.SetPrivateIpGoogleAccessSubnetworkRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.SetPrivateIpGoogleAccessSubnetworkRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.set_private_ip_google_access(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_set_private_ip_google_access_rest_bad_request(transport: str = 'rest', request_type=compute.SetPrivateIpGoogleAccessSubnetworkRequest): + client = SubnetworksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2', 'subnetwork': 'sample3'} + request_init["subnetworks_set_private_ip_google_access_request_resource"] = {'private_ip_google_access': True} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.set_private_ip_google_access(request) + + +def test_set_private_ip_google_access_rest_flattened(): + client = SubnetworksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'region': 'sample2', 'subnetwork': 'sample3'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + region='region_value', + subnetwork='subnetwork_value', + subnetworks_set_private_ip_google_access_request_resource=compute.SubnetworksSetPrivateIpGoogleAccessRequest(private_ip_google_access=True), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.set_private_ip_google_access(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/regions/{region}/subnetworks/{subnetwork}/setPrivateIpGoogleAccess" % client.transport._host, args[1]) + + +def test_set_private_ip_google_access_rest_flattened_error(transport: str = 'rest'): + client = SubnetworksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.set_private_ip_google_access( + compute.SetPrivateIpGoogleAccessSubnetworkRequest(), + project='project_value', + region='region_value', + subnetwork='subnetwork_value', + subnetworks_set_private_ip_google_access_request_resource=compute.SubnetworksSetPrivateIpGoogleAccessRequest(private_ip_google_access=True), + ) + + +def test_set_private_ip_google_access_rest_error(): + client = SubnetworksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.SetPrivateIpGoogleAccessSubnetworkRequest, + dict, +]) +def test_set_private_ip_google_access_unary_rest(request_type): + client = SubnetworksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2', 'subnetwork': 'sample3'} + request_init["subnetworks_set_private_ip_google_access_request_resource"] = {'private_ip_google_access': True} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.set_private_ip_google_access_unary(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.Operation) + + +def test_set_private_ip_google_access_unary_rest_required_fields(request_type=compute.SetPrivateIpGoogleAccessSubnetworkRequest): + transport_class = transports.SubnetworksRestTransport + + request_init = {} + request_init["project"] = "" + request_init["region"] = "" + request_init["subnetwork"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).set_private_ip_google_access._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + jsonified_request["region"] = 'region_value' + jsonified_request["subnetwork"] = 'subnetwork_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).set_private_ip_google_access._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "region" in jsonified_request + assert jsonified_request["region"] == 'region_value' + assert "subnetwork" in jsonified_request + assert jsonified_request["subnetwork"] == 'subnetwork_value' + + client = SubnetworksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.set_private_ip_google_access_unary(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_set_private_ip_google_access_unary_rest_unset_required_fields(): + transport = transports.SubnetworksRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.set_private_ip_google_access._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("project", "region", "subnetwork", "subnetworksSetPrivateIpGoogleAccessRequestResource", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_set_private_ip_google_access_unary_rest_interceptors(null_interceptor): + transport = transports.SubnetworksRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.SubnetworksRestInterceptor(), + ) + client = SubnetworksClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.SubnetworksRestInterceptor, "post_set_private_ip_google_access") as post, \ + mock.patch.object(transports.SubnetworksRestInterceptor, "pre_set_private_ip_google_access") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.SetPrivateIpGoogleAccessSubnetworkRequest.pb(compute.SetPrivateIpGoogleAccessSubnetworkRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.SetPrivateIpGoogleAccessSubnetworkRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.set_private_ip_google_access_unary(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_set_private_ip_google_access_unary_rest_bad_request(transport: str = 'rest', request_type=compute.SetPrivateIpGoogleAccessSubnetworkRequest): + client = SubnetworksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2', 'subnetwork': 'sample3'} + request_init["subnetworks_set_private_ip_google_access_request_resource"] = {'private_ip_google_access': True} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.set_private_ip_google_access_unary(request) + + +def test_set_private_ip_google_access_unary_rest_flattened(): + client = SubnetworksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'region': 'sample2', 'subnetwork': 'sample3'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + region='region_value', + subnetwork='subnetwork_value', + subnetworks_set_private_ip_google_access_request_resource=compute.SubnetworksSetPrivateIpGoogleAccessRequest(private_ip_google_access=True), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.set_private_ip_google_access_unary(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/regions/{region}/subnetworks/{subnetwork}/setPrivateIpGoogleAccess" % client.transport._host, args[1]) + + +def test_set_private_ip_google_access_unary_rest_flattened_error(transport: str = 'rest'): + client = SubnetworksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.set_private_ip_google_access_unary( + compute.SetPrivateIpGoogleAccessSubnetworkRequest(), + project='project_value', + region='region_value', + subnetwork='subnetwork_value', + subnetworks_set_private_ip_google_access_request_resource=compute.SubnetworksSetPrivateIpGoogleAccessRequest(private_ip_google_access=True), + ) + + +def test_set_private_ip_google_access_unary_rest_error(): + client = SubnetworksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.TestIamPermissionsSubnetworkRequest, + dict, +]) +def test_test_iam_permissions_rest(request_type): + client = SubnetworksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2', 'resource': 'sample3'} + request_init["test_permissions_request_resource"] = {'permissions': ['permissions_value1', 'permissions_value2']} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.TestPermissionsResponse( + permissions=['permissions_value'], + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.TestPermissionsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.test_iam_permissions(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.TestPermissionsResponse) + assert response.permissions == ['permissions_value'] + + +def test_test_iam_permissions_rest_required_fields(request_type=compute.TestIamPermissionsSubnetworkRequest): + transport_class = transports.SubnetworksRestTransport + + request_init = {} + request_init["project"] = "" + request_init["region"] = "" + request_init["resource"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).test_iam_permissions._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + jsonified_request["region"] = 'region_value' + jsonified_request["resource"] = 'resource_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).test_iam_permissions._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "region" in jsonified_request + assert jsonified_request["region"] == 'region_value' + assert "resource" in jsonified_request + assert jsonified_request["resource"] == 'resource_value' + + client = SubnetworksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.TestPermissionsResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.TestPermissionsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.test_iam_permissions(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_test_iam_permissions_rest_unset_required_fields(): + transport = transports.SubnetworksRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.test_iam_permissions._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("project", "region", "resource", "testPermissionsRequestResource", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_test_iam_permissions_rest_interceptors(null_interceptor): + transport = transports.SubnetworksRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.SubnetworksRestInterceptor(), + ) + client = SubnetworksClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.SubnetworksRestInterceptor, "post_test_iam_permissions") as post, \ + mock.patch.object(transports.SubnetworksRestInterceptor, "pre_test_iam_permissions") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.TestIamPermissionsSubnetworkRequest.pb(compute.TestIamPermissionsSubnetworkRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.TestPermissionsResponse.to_json(compute.TestPermissionsResponse()) + + request = compute.TestIamPermissionsSubnetworkRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.TestPermissionsResponse() + + client.test_iam_permissions(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_test_iam_permissions_rest_bad_request(transport: str = 'rest', request_type=compute.TestIamPermissionsSubnetworkRequest): + client = SubnetworksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2', 'resource': 'sample3'} + request_init["test_permissions_request_resource"] = {'permissions': ['permissions_value1', 'permissions_value2']} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.test_iam_permissions(request) + + +def test_test_iam_permissions_rest_flattened(): + client = SubnetworksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.TestPermissionsResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'region': 'sample2', 'resource': 'sample3'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + region='region_value', + resource='resource_value', + test_permissions_request_resource=compute.TestPermissionsRequest(permissions=['permissions_value']), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.TestPermissionsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.test_iam_permissions(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/regions/{region}/subnetworks/{resource}/testIamPermissions" % client.transport._host, args[1]) + + +def test_test_iam_permissions_rest_flattened_error(transport: str = 'rest'): + client = SubnetworksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.test_iam_permissions( + compute.TestIamPermissionsSubnetworkRequest(), + project='project_value', + region='region_value', + resource='resource_value', + test_permissions_request_resource=compute.TestPermissionsRequest(permissions=['permissions_value']), + ) + + +def test_test_iam_permissions_rest_error(): + client = SubnetworksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +def test_credentials_transport_error(): + # It is an error to provide credentials and a transport instance. + transport = transports.SubnetworksRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = SubnetworksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # It is an error to provide a credentials file and a transport instance. + transport = transports.SubnetworksRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = SubnetworksClient( + client_options={"credentials_file": "credentials.json"}, + transport=transport, + ) + + # It is an error to provide an api_key and a transport instance. + transport = transports.SubnetworksRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = SubnetworksClient( + client_options=options, + transport=transport, + ) + + # It is an error to provide an api_key and a credential. + options = mock.Mock() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = SubnetworksClient( + client_options=options, + credentials=ga_credentials.AnonymousCredentials() + ) + + # It is an error to provide scopes and a transport instance. + transport = transports.SubnetworksRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = SubnetworksClient( + client_options={"scopes": ["1", "2"]}, + transport=transport, + ) + + +def test_transport_instance(): + # A client may be instantiated with a custom transport instance. + transport = transports.SubnetworksRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + client = SubnetworksClient(transport=transport) + assert client.transport is transport + + +@pytest.mark.parametrize("transport_class", [ + transports.SubnetworksRestTransport, +]) +def test_transport_adc(transport_class): + # Test default credentials are used if not provided. + with mock.patch.object(google.auth, 'default') as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class() + adc.assert_called_once() + +@pytest.mark.parametrize("transport_name", [ + "rest", +]) +def test_transport_kind(transport_name): + transport = SubnetworksClient.get_transport_class(transport_name)( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert transport.kind == transport_name + + +def test_subnetworks_base_transport_error(): + # Passing both a credentials object and credentials_file should raise an error + with pytest.raises(core_exceptions.DuplicateCredentialArgs): + transport = transports.SubnetworksTransport( + credentials=ga_credentials.AnonymousCredentials(), + credentials_file="credentials.json" + ) + + +def test_subnetworks_base_transport(): + # Instantiate the base transport. + with mock.patch('google.cloud.compute_v1.services.subnetworks.transports.SubnetworksTransport.__init__') as Transport: + Transport.return_value = None + transport = transports.SubnetworksTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Every method on the transport should just blindly + # raise NotImplementedError. + methods = ( + 'aggregated_list', + 'delete', + 'expand_ip_cidr_range', + 'get', + 'get_iam_policy', + 'insert', + 'list', + 'list_usable', + 'patch', + 'set_iam_policy', + 'set_private_ip_google_access', + 'test_iam_permissions', + ) + for method in methods: + with pytest.raises(NotImplementedError): + getattr(transport, method)(request=object()) + + with pytest.raises(NotImplementedError): + transport.close() + + # Catch all for all remaining methods and properties + remainder = [ + 'kind', + ] + for r in remainder: + with pytest.raises(NotImplementedError): + getattr(transport, r)() + + +def test_subnetworks_base_transport_with_credentials_file(): + # Instantiate the base transport with a credentials file + with mock.patch.object(google.auth, 'load_credentials_from_file', autospec=True) as load_creds, mock.patch('google.cloud.compute_v1.services.subnetworks.transports.SubnetworksTransport._prep_wrapped_messages') as Transport: + Transport.return_value = None + load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.SubnetworksTransport( + credentials_file="credentials.json", + quota_project_id="octopus", + ) + load_creds.assert_called_once_with("credentials.json", + scopes=None, + default_scopes=( + 'https://www.googleapis.com/auth/compute', + 'https://www.googleapis.com/auth/cloud-platform', +), + quota_project_id="octopus", + ) + + +def test_subnetworks_base_transport_with_adc(): + # Test the default credentials are used if credentials and credentials_file are None. + with mock.patch.object(google.auth, 'default', autospec=True) as adc, mock.patch('google.cloud.compute_v1.services.subnetworks.transports.SubnetworksTransport._prep_wrapped_messages') as Transport: + Transport.return_value = None + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.SubnetworksTransport() + adc.assert_called_once() + + +def test_subnetworks_auth_adc(): + # If no credentials are provided, we should use ADC credentials. + with mock.patch.object(google.auth, 'default', autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + SubnetworksClient() + adc.assert_called_once_with( + scopes=None, + default_scopes=( + 'https://www.googleapis.com/auth/compute', + 'https://www.googleapis.com/auth/cloud-platform', +), + quota_project_id=None, + ) + + +def test_subnetworks_http_transport_client_cert_source_for_mtls(): + cred = ga_credentials.AnonymousCredentials() + with mock.patch("google.auth.transport.requests.AuthorizedSession.configure_mtls_channel") as mock_configure_mtls_channel: + transports.SubnetworksRestTransport ( + credentials=cred, + client_cert_source_for_mtls=client_cert_source_callback + ) + mock_configure_mtls_channel.assert_called_once_with(client_cert_source_callback) + + +@pytest.mark.parametrize("transport_name", [ + "rest", +]) +def test_subnetworks_host_no_port(transport_name): + client = SubnetworksClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions(api_endpoint='compute.googleapis.com'), + transport=transport_name, + ) + assert client.transport._host == ( + 'compute.googleapis.com:443' + if transport_name in ['grpc', 'grpc_asyncio'] + else 'https://compute.googleapis.com' + ) + +@pytest.mark.parametrize("transport_name", [ + "rest", +]) +def test_subnetworks_host_with_port(transport_name): + client = SubnetworksClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions(api_endpoint='compute.googleapis.com:8000'), + transport=transport_name, + ) + assert client.transport._host == ( + 'compute.googleapis.com:8000' + if transport_name in ['grpc', 'grpc_asyncio'] + else 'https://compute.googleapis.com:8000' + ) + +@pytest.mark.parametrize("transport_name", [ + "rest", +]) +def test_subnetworks_client_transport_session_collision(transport_name): + creds1 = ga_credentials.AnonymousCredentials() + creds2 = ga_credentials.AnonymousCredentials() + client1 = SubnetworksClient( + credentials=creds1, + transport=transport_name, + ) + client2 = SubnetworksClient( + credentials=creds2, + transport=transport_name, + ) + session1 = client1.transport.aggregated_list._session + session2 = client2.transport.aggregated_list._session + assert session1 != session2 + session1 = client1.transport.delete._session + session2 = client2.transport.delete._session + assert session1 != session2 + session1 = client1.transport.expand_ip_cidr_range._session + session2 = client2.transport.expand_ip_cidr_range._session + assert session1 != session2 + session1 = client1.transport.get._session + session2 = client2.transport.get._session + assert session1 != session2 + session1 = client1.transport.get_iam_policy._session + session2 = client2.transport.get_iam_policy._session + assert session1 != session2 + session1 = client1.transport.insert._session + session2 = client2.transport.insert._session + assert session1 != session2 + session1 = client1.transport.list._session + session2 = client2.transport.list._session + assert session1 != session2 + session1 = client1.transport.list_usable._session + session2 = client2.transport.list_usable._session + assert session1 != session2 + session1 = client1.transport.patch._session + session2 = client2.transport.patch._session + assert session1 != session2 + session1 = client1.transport.set_iam_policy._session + session2 = client2.transport.set_iam_policy._session + assert session1 != session2 + session1 = client1.transport.set_private_ip_google_access._session + session2 = client2.transport.set_private_ip_google_access._session + assert session1 != session2 + session1 = client1.transport.test_iam_permissions._session + session2 = client2.transport.test_iam_permissions._session + assert session1 != session2 + +def test_common_billing_account_path(): + billing_account = "squid" + expected = "billingAccounts/{billing_account}".format(billing_account=billing_account, ) + actual = SubnetworksClient.common_billing_account_path(billing_account) + assert expected == actual + + +def test_parse_common_billing_account_path(): + expected = { + "billing_account": "clam", + } + path = SubnetworksClient.common_billing_account_path(**expected) + + # Check that the path construction is reversible. + actual = SubnetworksClient.parse_common_billing_account_path(path) + assert expected == actual + +def test_common_folder_path(): + folder = "whelk" + expected = "folders/{folder}".format(folder=folder, ) + actual = SubnetworksClient.common_folder_path(folder) + assert expected == actual + + +def test_parse_common_folder_path(): + expected = { + "folder": "octopus", + } + path = SubnetworksClient.common_folder_path(**expected) + + # Check that the path construction is reversible. + actual = SubnetworksClient.parse_common_folder_path(path) + assert expected == actual + +def test_common_organization_path(): + organization = "oyster" + expected = "organizations/{organization}".format(organization=organization, ) + actual = SubnetworksClient.common_organization_path(organization) + assert expected == actual + + +def test_parse_common_organization_path(): + expected = { + "organization": "nudibranch", + } + path = SubnetworksClient.common_organization_path(**expected) + + # Check that the path construction is reversible. + actual = SubnetworksClient.parse_common_organization_path(path) + assert expected == actual + +def test_common_project_path(): + project = "cuttlefish" + expected = "projects/{project}".format(project=project, ) + actual = SubnetworksClient.common_project_path(project) + assert expected == actual + + +def test_parse_common_project_path(): + expected = { + "project": "mussel", + } + path = SubnetworksClient.common_project_path(**expected) + + # Check that the path construction is reversible. + actual = SubnetworksClient.parse_common_project_path(path) + assert expected == actual + +def test_common_location_path(): + project = "winkle" + location = "nautilus" + expected = "projects/{project}/locations/{location}".format(project=project, location=location, ) + actual = SubnetworksClient.common_location_path(project, location) + assert expected == actual + + +def test_parse_common_location_path(): + expected = { + "project": "scallop", + "location": "abalone", + } + path = SubnetworksClient.common_location_path(**expected) + + # Check that the path construction is reversible. + actual = SubnetworksClient.parse_common_location_path(path) + assert expected == actual + + +def test_client_with_default_client_info(): + client_info = gapic_v1.client_info.ClientInfo() + + with mock.patch.object(transports.SubnetworksTransport, '_prep_wrapped_messages') as prep: + client = SubnetworksClient( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + with mock.patch.object(transports.SubnetworksTransport, '_prep_wrapped_messages') as prep: + transport_class = SubnetworksClient.get_transport_class() + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + +def test_transport_close(): + transports = { + "rest": "_session", + } + + for transport, close_name in transports.items(): + client = SubnetworksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport + ) + with mock.patch.object(type(getattr(client.transport, close_name)), "close") as close: + with client: + close.assert_not_called() + close.assert_called_once() + +def test_client_ctx(): + transports = [ + 'rest', + ] + for transport in transports: + client = SubnetworksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport + ) + # Test client calls underlying transport. + with mock.patch.object(type(client.transport), "close") as close: + close.assert_not_called() + with client: + pass + close.assert_called() + +@pytest.mark.parametrize("client_class,transport_class", [ + (SubnetworksClient, transports.SubnetworksRestTransport), +]) +def test_api_key_credentials(client_class, transport_class): + with mock.patch.object( + google.auth._default, "get_api_key_credentials", create=True + ) as get_api_key_credentials: + mock_cred = mock.Mock() + get_api_key_credentials.return_value = mock_cred + options = client_options.ClientOptions() + options.api_key = "api_key" + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=mock_cred, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) diff --git a/owl-bot-staging/v1/tests/unit/gapic/compute_v1/test_target_grpc_proxies.py b/owl-bot-staging/v1/tests/unit/gapic/compute_v1/test_target_grpc_proxies.py new file mode 100644 index 000000000..9c1d9b386 --- /dev/null +++ b/owl-bot-staging/v1/tests/unit/gapic/compute_v1/test_target_grpc_proxies.py @@ -0,0 +1,3033 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import os +# try/except added for compatibility with python < 3.8 +try: + from unittest import mock + from unittest.mock import AsyncMock # pragma: NO COVER +except ImportError: # pragma: NO COVER + import mock + +import grpc +from grpc.experimental import aio +from collections.abc import Iterable +from google.protobuf import json_format +import json +import math +import pytest +from proto.marshal.rules.dates import DurationRule, TimestampRule +from proto.marshal.rules import wrappers +from requests import Response +from requests import Request, PreparedRequest +from requests.sessions import Session +from google.protobuf import json_format + +from google.api_core import client_options +from google.api_core import exceptions as core_exceptions +from google.api_core import extended_operation # type: ignore +from google.api_core import future +from google.api_core import gapic_v1 +from google.api_core import grpc_helpers +from google.api_core import grpc_helpers_async +from google.api_core import path_template +from google.auth import credentials as ga_credentials +from google.auth.exceptions import MutualTLSChannelError +from google.cloud.compute_v1.services.target_grpc_proxies import TargetGrpcProxiesClient +from google.cloud.compute_v1.services.target_grpc_proxies import pagers +from google.cloud.compute_v1.services.target_grpc_proxies import transports +from google.cloud.compute_v1.types import compute +from google.oauth2 import service_account +import google.auth + + +def client_cert_source_callback(): + return b"cert bytes", b"key bytes" + + +# If default endpoint is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint(client): + return "foo.googleapis.com" if ("localhost" in client.DEFAULT_ENDPOINT) else client.DEFAULT_ENDPOINT + + +def test__get_default_mtls_endpoint(): + api_endpoint = "example.googleapis.com" + api_mtls_endpoint = "example.mtls.googleapis.com" + sandbox_endpoint = "example.sandbox.googleapis.com" + sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com" + non_googleapi = "api.example.com" + + assert TargetGrpcProxiesClient._get_default_mtls_endpoint(None) is None + assert TargetGrpcProxiesClient._get_default_mtls_endpoint(api_endpoint) == api_mtls_endpoint + assert TargetGrpcProxiesClient._get_default_mtls_endpoint(api_mtls_endpoint) == api_mtls_endpoint + assert TargetGrpcProxiesClient._get_default_mtls_endpoint(sandbox_endpoint) == sandbox_mtls_endpoint + assert TargetGrpcProxiesClient._get_default_mtls_endpoint(sandbox_mtls_endpoint) == sandbox_mtls_endpoint + assert TargetGrpcProxiesClient._get_default_mtls_endpoint(non_googleapi) == non_googleapi + + +@pytest.mark.parametrize("client_class,transport_name", [ + (TargetGrpcProxiesClient, "rest"), +]) +def test_target_grpc_proxies_client_from_service_account_info(client_class, transport_name): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object(service_account.Credentials, 'from_service_account_info') as factory: + factory.return_value = creds + info = {"valid": True} + client = client_class.from_service_account_info(info, transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + 'compute.googleapis.com:443' + if transport_name in ['grpc', 'grpc_asyncio'] + else + 'https://compute.googleapis.com' + ) + + +@pytest.mark.parametrize("transport_class,transport_name", [ + (transports.TargetGrpcProxiesRestTransport, "rest"), +]) +def test_target_grpc_proxies_client_service_account_always_use_jwt(transport_class, transport_name): + with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=True) + use_jwt.assert_called_once_with(True) + + with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=False) + use_jwt.assert_not_called() + + +@pytest.mark.parametrize("client_class,transport_name", [ + (TargetGrpcProxiesClient, "rest"), +]) +def test_target_grpc_proxies_client_from_service_account_file(client_class, transport_name): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object(service_account.Credentials, 'from_service_account_file') as factory: + factory.return_value = creds + client = client_class.from_service_account_file("dummy/file/path.json", transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + client = client_class.from_service_account_json("dummy/file/path.json", transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + 'compute.googleapis.com:443' + if transport_name in ['grpc', 'grpc_asyncio'] + else + 'https://compute.googleapis.com' + ) + + +def test_target_grpc_proxies_client_get_transport_class(): + transport = TargetGrpcProxiesClient.get_transport_class() + available_transports = [ + transports.TargetGrpcProxiesRestTransport, + ] + assert transport in available_transports + + transport = TargetGrpcProxiesClient.get_transport_class("rest") + assert transport == transports.TargetGrpcProxiesRestTransport + + +@pytest.mark.parametrize("client_class,transport_class,transport_name", [ + (TargetGrpcProxiesClient, transports.TargetGrpcProxiesRestTransport, "rest"), +]) +@mock.patch.object(TargetGrpcProxiesClient, "DEFAULT_ENDPOINT", modify_default_endpoint(TargetGrpcProxiesClient)) +def test_target_grpc_proxies_client_client_options(client_class, transport_class, transport_name): + # Check that if channel is provided we won't create a new one. + with mock.patch.object(TargetGrpcProxiesClient, 'get_transport_class') as gtc: + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ) + client = client_class(transport=transport) + gtc.assert_not_called() + + # Check that if channel is provided via str we will create a new one. + with mock.patch.object(TargetGrpcProxiesClient, 'get_transport_class') as gtc: + client = client_class(transport=transport_name) + gtc.assert_called() + + # Check the case api_endpoint is provided. + options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(transport=transport_name, client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_MTLS_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError): + client = client_class(transport=transport_name) + + # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): + with pytest.raises(ValueError): + client = client_class(transport=transport_name) + + # Check the case quota_project_id is provided + options = client_options.ClientOptions(quota_project_id="octopus") + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id="octopus", + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + # Check the case api_endpoint is provided + options = client_options.ClientOptions(api_audience="https://language.googleapis.com") + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience="https://language.googleapis.com" + ) + +@pytest.mark.parametrize("client_class,transport_class,transport_name,use_client_cert_env", [ + (TargetGrpcProxiesClient, transports.TargetGrpcProxiesRestTransport, "rest", "true"), + (TargetGrpcProxiesClient, transports.TargetGrpcProxiesRestTransport, "rest", "false"), +]) +@mock.patch.object(TargetGrpcProxiesClient, "DEFAULT_ENDPOINT", modify_default_endpoint(TargetGrpcProxiesClient)) +@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) +def test_target_grpc_proxies_client_mtls_env_auto(client_class, transport_class, transport_name, use_client_cert_env): + # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default + # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists. + + # Check the case client_cert_source is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): + options = client_options.ClientOptions(client_cert_source=client_cert_source_callback) + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + + if use_client_cert_env == "false": + expected_client_cert_source = None + expected_host = client.DEFAULT_ENDPOINT + else: + expected_client_cert_source = client_cert_source_callback + expected_host = client.DEFAULT_MTLS_ENDPOINT + + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case ADC client cert is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): + with mock.patch.object(transport_class, '__init__') as patched: + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): + with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=client_cert_source_callback): + if use_client_cert_env == "false": + expected_host = client.DEFAULT_ENDPOINT + expected_client_cert_source = None + else: + expected_host = client.DEFAULT_MTLS_ENDPOINT + expected_client_cert_source = client_cert_source_callback + + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case client_cert_source and ADC client cert are not provided. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): + with mock.patch.object(transport_class, '__init__') as patched: + with mock.patch("google.auth.transport.mtls.has_default_client_cert_source", return_value=False): + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize("client_class", [ + TargetGrpcProxiesClient +]) +@mock.patch.object(TargetGrpcProxiesClient, "DEFAULT_ENDPOINT", modify_default_endpoint(TargetGrpcProxiesClient)) +def test_target_grpc_proxies_client_get_mtls_endpoint_and_cert_source(client_class): + mock_client_cert_source = mock.Mock() + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "true". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + mock_api_endpoint = "foo" + options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(options) + assert api_endpoint == mock_api_endpoint + assert cert_source == mock_client_cert_source + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "false". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + mock_client_cert_source = mock.Mock() + mock_api_endpoint = "foo" + options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(options) + assert api_endpoint == mock_api_endpoint + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert doesn't exist. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=False): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert exists. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): + with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=mock_client_cert_source): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source == mock_client_cert_source + + +@pytest.mark.parametrize("client_class,transport_class,transport_name", [ + (TargetGrpcProxiesClient, transports.TargetGrpcProxiesRestTransport, "rest"), +]) +def test_target_grpc_proxies_client_client_options_scopes(client_class, transport_class, transport_name): + # Check the case scopes are provided. + options = client_options.ClientOptions( + scopes=["1", "2"], + ) + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=["1", "2"], + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + +@pytest.mark.parametrize("client_class,transport_class,transport_name,grpc_helpers", [ + (TargetGrpcProxiesClient, transports.TargetGrpcProxiesRestTransport, "rest", None), +]) +def test_target_grpc_proxies_client_client_options_credentials_file(client_class, transport_class, transport_name, grpc_helpers): + # Check the case credentials file is provided. + options = client_options.ClientOptions( + credentials_file="credentials.json" + ) + + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize("request_type", [ + compute.DeleteTargetGrpcProxyRequest, + dict, +]) +def test_delete_rest(request_type): + client = TargetGrpcProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'target_grpc_proxy': 'sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.delete(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, extended_operation.ExtendedOperation) + assert response.client_operation_id == 'client_operation_id_value' + assert response.creation_timestamp == 'creation_timestamp_value' + assert response.description == 'description_value' + assert response.end_time == 'end_time_value' + assert response.http_error_message == 'http_error_message_value' + assert response.http_error_status_code == 2374 + assert response.id == 205 + assert response.insert_time == 'insert_time_value' + assert response.kind == 'kind_value' + assert response.name == 'name_value' + assert response.operation_group_id == 'operation_group_id_value' + assert response.operation_type == 'operation_type_value' + assert response.progress == 885 + assert response.region == 'region_value' + assert response.self_link == 'self_link_value' + assert response.start_time == 'start_time_value' + assert response.status == compute.Operation.Status.DONE + assert response.status_message == 'status_message_value' + assert response.target_id == 947 + assert response.target_link == 'target_link_value' + assert response.user == 'user_value' + assert response.zone == 'zone_value' + + +def test_delete_rest_required_fields(request_type=compute.DeleteTargetGrpcProxyRequest): + transport_class = transports.TargetGrpcProxiesRestTransport + + request_init = {} + request_init["project"] = "" + request_init["target_grpc_proxy"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + jsonified_request["targetGrpcProxy"] = 'target_grpc_proxy_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "targetGrpcProxy" in jsonified_request + assert jsonified_request["targetGrpcProxy"] == 'target_grpc_proxy_value' + + client = TargetGrpcProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "delete", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.delete(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_delete_rest_unset_required_fields(): + transport = transports.TargetGrpcProxiesRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.delete._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("project", "targetGrpcProxy", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_rest_interceptors(null_interceptor): + transport = transports.TargetGrpcProxiesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.TargetGrpcProxiesRestInterceptor(), + ) + client = TargetGrpcProxiesClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.TargetGrpcProxiesRestInterceptor, "post_delete") as post, \ + mock.patch.object(transports.TargetGrpcProxiesRestInterceptor, "pre_delete") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.DeleteTargetGrpcProxyRequest.pb(compute.DeleteTargetGrpcProxyRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.DeleteTargetGrpcProxyRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.delete(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_delete_rest_bad_request(transport: str = 'rest', request_type=compute.DeleteTargetGrpcProxyRequest): + client = TargetGrpcProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'target_grpc_proxy': 'sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.delete(request) + + +def test_delete_rest_flattened(): + client = TargetGrpcProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'target_grpc_proxy': 'sample2'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + target_grpc_proxy='target_grpc_proxy_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.delete(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/global/targetGrpcProxies/{target_grpc_proxy}" % client.transport._host, args[1]) + + +def test_delete_rest_flattened_error(transport: str = 'rest'): + client = TargetGrpcProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete( + compute.DeleteTargetGrpcProxyRequest(), + project='project_value', + target_grpc_proxy='target_grpc_proxy_value', + ) + + +def test_delete_rest_error(): + client = TargetGrpcProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.DeleteTargetGrpcProxyRequest, + dict, +]) +def test_delete_unary_rest(request_type): + client = TargetGrpcProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'target_grpc_proxy': 'sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.delete_unary(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.Operation) + + +def test_delete_unary_rest_required_fields(request_type=compute.DeleteTargetGrpcProxyRequest): + transport_class = transports.TargetGrpcProxiesRestTransport + + request_init = {} + request_init["project"] = "" + request_init["target_grpc_proxy"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + jsonified_request["targetGrpcProxy"] = 'target_grpc_proxy_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "targetGrpcProxy" in jsonified_request + assert jsonified_request["targetGrpcProxy"] == 'target_grpc_proxy_value' + + client = TargetGrpcProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "delete", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.delete_unary(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_delete_unary_rest_unset_required_fields(): + transport = transports.TargetGrpcProxiesRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.delete._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("project", "targetGrpcProxy", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_unary_rest_interceptors(null_interceptor): + transport = transports.TargetGrpcProxiesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.TargetGrpcProxiesRestInterceptor(), + ) + client = TargetGrpcProxiesClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.TargetGrpcProxiesRestInterceptor, "post_delete") as post, \ + mock.patch.object(transports.TargetGrpcProxiesRestInterceptor, "pre_delete") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.DeleteTargetGrpcProxyRequest.pb(compute.DeleteTargetGrpcProxyRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.DeleteTargetGrpcProxyRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.delete_unary(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_delete_unary_rest_bad_request(transport: str = 'rest', request_type=compute.DeleteTargetGrpcProxyRequest): + client = TargetGrpcProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'target_grpc_proxy': 'sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.delete_unary(request) + + +def test_delete_unary_rest_flattened(): + client = TargetGrpcProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'target_grpc_proxy': 'sample2'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + target_grpc_proxy='target_grpc_proxy_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.delete_unary(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/global/targetGrpcProxies/{target_grpc_proxy}" % client.transport._host, args[1]) + + +def test_delete_unary_rest_flattened_error(transport: str = 'rest'): + client = TargetGrpcProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_unary( + compute.DeleteTargetGrpcProxyRequest(), + project='project_value', + target_grpc_proxy='target_grpc_proxy_value', + ) + + +def test_delete_unary_rest_error(): + client = TargetGrpcProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.GetTargetGrpcProxyRequest, + dict, +]) +def test_get_rest(request_type): + client = TargetGrpcProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'target_grpc_proxy': 'sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.TargetGrpcProxy( + creation_timestamp='creation_timestamp_value', + description='description_value', + fingerprint='fingerprint_value', + id=205, + kind='kind_value', + name='name_value', + self_link='self_link_value', + self_link_with_id='self_link_with_id_value', + url_map='url_map_value', + validate_for_proxyless=True, + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.TargetGrpcProxy.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.get(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.TargetGrpcProxy) + assert response.creation_timestamp == 'creation_timestamp_value' + assert response.description == 'description_value' + assert response.fingerprint == 'fingerprint_value' + assert response.id == 205 + assert response.kind == 'kind_value' + assert response.name == 'name_value' + assert response.self_link == 'self_link_value' + assert response.self_link_with_id == 'self_link_with_id_value' + assert response.url_map == 'url_map_value' + assert response.validate_for_proxyless is True + + +def test_get_rest_required_fields(request_type=compute.GetTargetGrpcProxyRequest): + transport_class = transports.TargetGrpcProxiesRestTransport + + request_init = {} + request_init["project"] = "" + request_init["target_grpc_proxy"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + jsonified_request["targetGrpcProxy"] = 'target_grpc_proxy_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "targetGrpcProxy" in jsonified_request + assert jsonified_request["targetGrpcProxy"] == 'target_grpc_proxy_value' + + client = TargetGrpcProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.TargetGrpcProxy() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "get", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.TargetGrpcProxy.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.get(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_get_rest_unset_required_fields(): + transport = transports.TargetGrpcProxiesRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.get._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("project", "targetGrpcProxy", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_rest_interceptors(null_interceptor): + transport = transports.TargetGrpcProxiesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.TargetGrpcProxiesRestInterceptor(), + ) + client = TargetGrpcProxiesClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.TargetGrpcProxiesRestInterceptor, "post_get") as post, \ + mock.patch.object(transports.TargetGrpcProxiesRestInterceptor, "pre_get") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.GetTargetGrpcProxyRequest.pb(compute.GetTargetGrpcProxyRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.TargetGrpcProxy.to_json(compute.TargetGrpcProxy()) + + request = compute.GetTargetGrpcProxyRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.TargetGrpcProxy() + + client.get(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_get_rest_bad_request(transport: str = 'rest', request_type=compute.GetTargetGrpcProxyRequest): + client = TargetGrpcProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'target_grpc_proxy': 'sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get(request) + + +def test_get_rest_flattened(): + client = TargetGrpcProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.TargetGrpcProxy() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'target_grpc_proxy': 'sample2'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + target_grpc_proxy='target_grpc_proxy_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.TargetGrpcProxy.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.get(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/global/targetGrpcProxies/{target_grpc_proxy}" % client.transport._host, args[1]) + + +def test_get_rest_flattened_error(transport: str = 'rest'): + client = TargetGrpcProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get( + compute.GetTargetGrpcProxyRequest(), + project='project_value', + target_grpc_proxy='target_grpc_proxy_value', + ) + + +def test_get_rest_error(): + client = TargetGrpcProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.InsertTargetGrpcProxyRequest, + dict, +]) +def test_insert_rest(request_type): + client = TargetGrpcProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1'} + request_init["target_grpc_proxy_resource"] = {'creation_timestamp': 'creation_timestamp_value', 'description': 'description_value', 'fingerprint': 'fingerprint_value', 'id': 205, 'kind': 'kind_value', 'name': 'name_value', 'self_link': 'self_link_value', 'self_link_with_id': 'self_link_with_id_value', 'url_map': 'url_map_value', 'validate_for_proxyless': True} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.insert(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, extended_operation.ExtendedOperation) + assert response.client_operation_id == 'client_operation_id_value' + assert response.creation_timestamp == 'creation_timestamp_value' + assert response.description == 'description_value' + assert response.end_time == 'end_time_value' + assert response.http_error_message == 'http_error_message_value' + assert response.http_error_status_code == 2374 + assert response.id == 205 + assert response.insert_time == 'insert_time_value' + assert response.kind == 'kind_value' + assert response.name == 'name_value' + assert response.operation_group_id == 'operation_group_id_value' + assert response.operation_type == 'operation_type_value' + assert response.progress == 885 + assert response.region == 'region_value' + assert response.self_link == 'self_link_value' + assert response.start_time == 'start_time_value' + assert response.status == compute.Operation.Status.DONE + assert response.status_message == 'status_message_value' + assert response.target_id == 947 + assert response.target_link == 'target_link_value' + assert response.user == 'user_value' + assert response.zone == 'zone_value' + + +def test_insert_rest_required_fields(request_type=compute.InsertTargetGrpcProxyRequest): + transport_class = transports.TargetGrpcProxiesRestTransport + + request_init = {} + request_init["project"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).insert._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).insert._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + + client = TargetGrpcProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.insert(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_insert_rest_unset_required_fields(): + transport = transports.TargetGrpcProxiesRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.insert._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("project", "targetGrpcProxyResource", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_insert_rest_interceptors(null_interceptor): + transport = transports.TargetGrpcProxiesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.TargetGrpcProxiesRestInterceptor(), + ) + client = TargetGrpcProxiesClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.TargetGrpcProxiesRestInterceptor, "post_insert") as post, \ + mock.patch.object(transports.TargetGrpcProxiesRestInterceptor, "pre_insert") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.InsertTargetGrpcProxyRequest.pb(compute.InsertTargetGrpcProxyRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.InsertTargetGrpcProxyRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.insert(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_insert_rest_bad_request(transport: str = 'rest', request_type=compute.InsertTargetGrpcProxyRequest): + client = TargetGrpcProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1'} + request_init["target_grpc_proxy_resource"] = {'creation_timestamp': 'creation_timestamp_value', 'description': 'description_value', 'fingerprint': 'fingerprint_value', 'id': 205, 'kind': 'kind_value', 'name': 'name_value', 'self_link': 'self_link_value', 'self_link_with_id': 'self_link_with_id_value', 'url_map': 'url_map_value', 'validate_for_proxyless': True} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.insert(request) + + +def test_insert_rest_flattened(): + client = TargetGrpcProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + target_grpc_proxy_resource=compute.TargetGrpcProxy(creation_timestamp='creation_timestamp_value'), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.insert(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/global/targetGrpcProxies" % client.transport._host, args[1]) + + +def test_insert_rest_flattened_error(transport: str = 'rest'): + client = TargetGrpcProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.insert( + compute.InsertTargetGrpcProxyRequest(), + project='project_value', + target_grpc_proxy_resource=compute.TargetGrpcProxy(creation_timestamp='creation_timestamp_value'), + ) + + +def test_insert_rest_error(): + client = TargetGrpcProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.InsertTargetGrpcProxyRequest, + dict, +]) +def test_insert_unary_rest(request_type): + client = TargetGrpcProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1'} + request_init["target_grpc_proxy_resource"] = {'creation_timestamp': 'creation_timestamp_value', 'description': 'description_value', 'fingerprint': 'fingerprint_value', 'id': 205, 'kind': 'kind_value', 'name': 'name_value', 'self_link': 'self_link_value', 'self_link_with_id': 'self_link_with_id_value', 'url_map': 'url_map_value', 'validate_for_proxyless': True} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.insert_unary(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.Operation) + + +def test_insert_unary_rest_required_fields(request_type=compute.InsertTargetGrpcProxyRequest): + transport_class = transports.TargetGrpcProxiesRestTransport + + request_init = {} + request_init["project"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).insert._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).insert._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + + client = TargetGrpcProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.insert_unary(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_insert_unary_rest_unset_required_fields(): + transport = transports.TargetGrpcProxiesRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.insert._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("project", "targetGrpcProxyResource", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_insert_unary_rest_interceptors(null_interceptor): + transport = transports.TargetGrpcProxiesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.TargetGrpcProxiesRestInterceptor(), + ) + client = TargetGrpcProxiesClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.TargetGrpcProxiesRestInterceptor, "post_insert") as post, \ + mock.patch.object(transports.TargetGrpcProxiesRestInterceptor, "pre_insert") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.InsertTargetGrpcProxyRequest.pb(compute.InsertTargetGrpcProxyRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.InsertTargetGrpcProxyRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.insert_unary(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_insert_unary_rest_bad_request(transport: str = 'rest', request_type=compute.InsertTargetGrpcProxyRequest): + client = TargetGrpcProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1'} + request_init["target_grpc_proxy_resource"] = {'creation_timestamp': 'creation_timestamp_value', 'description': 'description_value', 'fingerprint': 'fingerprint_value', 'id': 205, 'kind': 'kind_value', 'name': 'name_value', 'self_link': 'self_link_value', 'self_link_with_id': 'self_link_with_id_value', 'url_map': 'url_map_value', 'validate_for_proxyless': True} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.insert_unary(request) + + +def test_insert_unary_rest_flattened(): + client = TargetGrpcProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + target_grpc_proxy_resource=compute.TargetGrpcProxy(creation_timestamp='creation_timestamp_value'), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.insert_unary(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/global/targetGrpcProxies" % client.transport._host, args[1]) + + +def test_insert_unary_rest_flattened_error(transport: str = 'rest'): + client = TargetGrpcProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.insert_unary( + compute.InsertTargetGrpcProxyRequest(), + project='project_value', + target_grpc_proxy_resource=compute.TargetGrpcProxy(creation_timestamp='creation_timestamp_value'), + ) + + +def test_insert_unary_rest_error(): + client = TargetGrpcProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.ListTargetGrpcProxiesRequest, + dict, +]) +def test_list_rest(request_type): + client = TargetGrpcProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.TargetGrpcProxyList( + id='id_value', + kind='kind_value', + next_page_token='next_page_token_value', + self_link='self_link_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.TargetGrpcProxyList.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.list(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListPager) + assert response.id == 'id_value' + assert response.kind == 'kind_value' + assert response.next_page_token == 'next_page_token_value' + assert response.self_link == 'self_link_value' + + +def test_list_rest_required_fields(request_type=compute.ListTargetGrpcProxiesRequest): + transport_class = transports.TargetGrpcProxiesRestTransport + + request_init = {} + request_init["project"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("filter", "max_results", "order_by", "page_token", "return_partial_success", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + + client = TargetGrpcProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.TargetGrpcProxyList() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "get", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.TargetGrpcProxyList.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.list(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_list_rest_unset_required_fields(): + transport = transports.TargetGrpcProxiesRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.list._get_unset_required_fields({}) + assert set(unset_fields) == (set(("filter", "maxResults", "orderBy", "pageToken", "returnPartialSuccess", )) & set(("project", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_rest_interceptors(null_interceptor): + transport = transports.TargetGrpcProxiesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.TargetGrpcProxiesRestInterceptor(), + ) + client = TargetGrpcProxiesClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.TargetGrpcProxiesRestInterceptor, "post_list") as post, \ + mock.patch.object(transports.TargetGrpcProxiesRestInterceptor, "pre_list") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.ListTargetGrpcProxiesRequest.pb(compute.ListTargetGrpcProxiesRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.TargetGrpcProxyList.to_json(compute.TargetGrpcProxyList()) + + request = compute.ListTargetGrpcProxiesRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.TargetGrpcProxyList() + + client.list(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_list_rest_bad_request(transport: str = 'rest', request_type=compute.ListTargetGrpcProxiesRequest): + client = TargetGrpcProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list(request) + + +def test_list_rest_flattened(): + client = TargetGrpcProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.TargetGrpcProxyList() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.TargetGrpcProxyList.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.list(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/global/targetGrpcProxies" % client.transport._host, args[1]) + + +def test_list_rest_flattened_error(transport: str = 'rest'): + client = TargetGrpcProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list( + compute.ListTargetGrpcProxiesRequest(), + project='project_value', + ) + + +def test_list_rest_pager(transport: str = 'rest'): + client = TargetGrpcProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + #with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + compute.TargetGrpcProxyList( + items=[ + compute.TargetGrpcProxy(), + compute.TargetGrpcProxy(), + compute.TargetGrpcProxy(), + ], + next_page_token='abc', + ), + compute.TargetGrpcProxyList( + items=[], + next_page_token='def', + ), + compute.TargetGrpcProxyList( + items=[ + compute.TargetGrpcProxy(), + ], + next_page_token='ghi', + ), + compute.TargetGrpcProxyList( + items=[ + compute.TargetGrpcProxy(), + compute.TargetGrpcProxy(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple(compute.TargetGrpcProxyList.to_json(x) for x in response) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode('UTF-8') + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {'project': 'sample1'} + + pager = client.list(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, compute.TargetGrpcProxy) + for i in results) + + pages = list(client.list(request=sample_request).pages) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize("request_type", [ + compute.PatchTargetGrpcProxyRequest, + dict, +]) +def test_patch_rest(request_type): + client = TargetGrpcProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'target_grpc_proxy': 'sample2'} + request_init["target_grpc_proxy_resource"] = {'creation_timestamp': 'creation_timestamp_value', 'description': 'description_value', 'fingerprint': 'fingerprint_value', 'id': 205, 'kind': 'kind_value', 'name': 'name_value', 'self_link': 'self_link_value', 'self_link_with_id': 'self_link_with_id_value', 'url_map': 'url_map_value', 'validate_for_proxyless': True} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.patch(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, extended_operation.ExtendedOperation) + assert response.client_operation_id == 'client_operation_id_value' + assert response.creation_timestamp == 'creation_timestamp_value' + assert response.description == 'description_value' + assert response.end_time == 'end_time_value' + assert response.http_error_message == 'http_error_message_value' + assert response.http_error_status_code == 2374 + assert response.id == 205 + assert response.insert_time == 'insert_time_value' + assert response.kind == 'kind_value' + assert response.name == 'name_value' + assert response.operation_group_id == 'operation_group_id_value' + assert response.operation_type == 'operation_type_value' + assert response.progress == 885 + assert response.region == 'region_value' + assert response.self_link == 'self_link_value' + assert response.start_time == 'start_time_value' + assert response.status == compute.Operation.Status.DONE + assert response.status_message == 'status_message_value' + assert response.target_id == 947 + assert response.target_link == 'target_link_value' + assert response.user == 'user_value' + assert response.zone == 'zone_value' + + +def test_patch_rest_required_fields(request_type=compute.PatchTargetGrpcProxyRequest): + transport_class = transports.TargetGrpcProxiesRestTransport + + request_init = {} + request_init["project"] = "" + request_init["target_grpc_proxy"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).patch._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + jsonified_request["targetGrpcProxy"] = 'target_grpc_proxy_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).patch._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "targetGrpcProxy" in jsonified_request + assert jsonified_request["targetGrpcProxy"] == 'target_grpc_proxy_value' + + client = TargetGrpcProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "patch", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.patch(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_patch_rest_unset_required_fields(): + transport = transports.TargetGrpcProxiesRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.patch._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("project", "targetGrpcProxy", "targetGrpcProxyResource", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_patch_rest_interceptors(null_interceptor): + transport = transports.TargetGrpcProxiesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.TargetGrpcProxiesRestInterceptor(), + ) + client = TargetGrpcProxiesClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.TargetGrpcProxiesRestInterceptor, "post_patch") as post, \ + mock.patch.object(transports.TargetGrpcProxiesRestInterceptor, "pre_patch") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.PatchTargetGrpcProxyRequest.pb(compute.PatchTargetGrpcProxyRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.PatchTargetGrpcProxyRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.patch(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_patch_rest_bad_request(transport: str = 'rest', request_type=compute.PatchTargetGrpcProxyRequest): + client = TargetGrpcProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'target_grpc_proxy': 'sample2'} + request_init["target_grpc_proxy_resource"] = {'creation_timestamp': 'creation_timestamp_value', 'description': 'description_value', 'fingerprint': 'fingerprint_value', 'id': 205, 'kind': 'kind_value', 'name': 'name_value', 'self_link': 'self_link_value', 'self_link_with_id': 'self_link_with_id_value', 'url_map': 'url_map_value', 'validate_for_proxyless': True} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.patch(request) + + +def test_patch_rest_flattened(): + client = TargetGrpcProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'target_grpc_proxy': 'sample2'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + target_grpc_proxy='target_grpc_proxy_value', + target_grpc_proxy_resource=compute.TargetGrpcProxy(creation_timestamp='creation_timestamp_value'), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.patch(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/global/targetGrpcProxies/{target_grpc_proxy}" % client.transport._host, args[1]) + + +def test_patch_rest_flattened_error(transport: str = 'rest'): + client = TargetGrpcProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.patch( + compute.PatchTargetGrpcProxyRequest(), + project='project_value', + target_grpc_proxy='target_grpc_proxy_value', + target_grpc_proxy_resource=compute.TargetGrpcProxy(creation_timestamp='creation_timestamp_value'), + ) + + +def test_patch_rest_error(): + client = TargetGrpcProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.PatchTargetGrpcProxyRequest, + dict, +]) +def test_patch_unary_rest(request_type): + client = TargetGrpcProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'target_grpc_proxy': 'sample2'} + request_init["target_grpc_proxy_resource"] = {'creation_timestamp': 'creation_timestamp_value', 'description': 'description_value', 'fingerprint': 'fingerprint_value', 'id': 205, 'kind': 'kind_value', 'name': 'name_value', 'self_link': 'self_link_value', 'self_link_with_id': 'self_link_with_id_value', 'url_map': 'url_map_value', 'validate_for_proxyless': True} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.patch_unary(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.Operation) + + +def test_patch_unary_rest_required_fields(request_type=compute.PatchTargetGrpcProxyRequest): + transport_class = transports.TargetGrpcProxiesRestTransport + + request_init = {} + request_init["project"] = "" + request_init["target_grpc_proxy"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).patch._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + jsonified_request["targetGrpcProxy"] = 'target_grpc_proxy_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).patch._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "targetGrpcProxy" in jsonified_request + assert jsonified_request["targetGrpcProxy"] == 'target_grpc_proxy_value' + + client = TargetGrpcProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "patch", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.patch_unary(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_patch_unary_rest_unset_required_fields(): + transport = transports.TargetGrpcProxiesRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.patch._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("project", "targetGrpcProxy", "targetGrpcProxyResource", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_patch_unary_rest_interceptors(null_interceptor): + transport = transports.TargetGrpcProxiesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.TargetGrpcProxiesRestInterceptor(), + ) + client = TargetGrpcProxiesClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.TargetGrpcProxiesRestInterceptor, "post_patch") as post, \ + mock.patch.object(transports.TargetGrpcProxiesRestInterceptor, "pre_patch") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.PatchTargetGrpcProxyRequest.pb(compute.PatchTargetGrpcProxyRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.PatchTargetGrpcProxyRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.patch_unary(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_patch_unary_rest_bad_request(transport: str = 'rest', request_type=compute.PatchTargetGrpcProxyRequest): + client = TargetGrpcProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'target_grpc_proxy': 'sample2'} + request_init["target_grpc_proxy_resource"] = {'creation_timestamp': 'creation_timestamp_value', 'description': 'description_value', 'fingerprint': 'fingerprint_value', 'id': 205, 'kind': 'kind_value', 'name': 'name_value', 'self_link': 'self_link_value', 'self_link_with_id': 'self_link_with_id_value', 'url_map': 'url_map_value', 'validate_for_proxyless': True} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.patch_unary(request) + + +def test_patch_unary_rest_flattened(): + client = TargetGrpcProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'target_grpc_proxy': 'sample2'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + target_grpc_proxy='target_grpc_proxy_value', + target_grpc_proxy_resource=compute.TargetGrpcProxy(creation_timestamp='creation_timestamp_value'), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.patch_unary(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/global/targetGrpcProxies/{target_grpc_proxy}" % client.transport._host, args[1]) + + +def test_patch_unary_rest_flattened_error(transport: str = 'rest'): + client = TargetGrpcProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.patch_unary( + compute.PatchTargetGrpcProxyRequest(), + project='project_value', + target_grpc_proxy='target_grpc_proxy_value', + target_grpc_proxy_resource=compute.TargetGrpcProxy(creation_timestamp='creation_timestamp_value'), + ) + + +def test_patch_unary_rest_error(): + client = TargetGrpcProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +def test_credentials_transport_error(): + # It is an error to provide credentials and a transport instance. + transport = transports.TargetGrpcProxiesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = TargetGrpcProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # It is an error to provide a credentials file and a transport instance. + transport = transports.TargetGrpcProxiesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = TargetGrpcProxiesClient( + client_options={"credentials_file": "credentials.json"}, + transport=transport, + ) + + # It is an error to provide an api_key and a transport instance. + transport = transports.TargetGrpcProxiesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = TargetGrpcProxiesClient( + client_options=options, + transport=transport, + ) + + # It is an error to provide an api_key and a credential. + options = mock.Mock() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = TargetGrpcProxiesClient( + client_options=options, + credentials=ga_credentials.AnonymousCredentials() + ) + + # It is an error to provide scopes and a transport instance. + transport = transports.TargetGrpcProxiesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = TargetGrpcProxiesClient( + client_options={"scopes": ["1", "2"]}, + transport=transport, + ) + + +def test_transport_instance(): + # A client may be instantiated with a custom transport instance. + transport = transports.TargetGrpcProxiesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + client = TargetGrpcProxiesClient(transport=transport) + assert client.transport is transport + + +@pytest.mark.parametrize("transport_class", [ + transports.TargetGrpcProxiesRestTransport, +]) +def test_transport_adc(transport_class): + # Test default credentials are used if not provided. + with mock.patch.object(google.auth, 'default') as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class() + adc.assert_called_once() + +@pytest.mark.parametrize("transport_name", [ + "rest", +]) +def test_transport_kind(transport_name): + transport = TargetGrpcProxiesClient.get_transport_class(transport_name)( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert transport.kind == transport_name + + +def test_target_grpc_proxies_base_transport_error(): + # Passing both a credentials object and credentials_file should raise an error + with pytest.raises(core_exceptions.DuplicateCredentialArgs): + transport = transports.TargetGrpcProxiesTransport( + credentials=ga_credentials.AnonymousCredentials(), + credentials_file="credentials.json" + ) + + +def test_target_grpc_proxies_base_transport(): + # Instantiate the base transport. + with mock.patch('google.cloud.compute_v1.services.target_grpc_proxies.transports.TargetGrpcProxiesTransport.__init__') as Transport: + Transport.return_value = None + transport = transports.TargetGrpcProxiesTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Every method on the transport should just blindly + # raise NotImplementedError. + methods = ( + 'delete', + 'get', + 'insert', + 'list', + 'patch', + ) + for method in methods: + with pytest.raises(NotImplementedError): + getattr(transport, method)(request=object()) + + with pytest.raises(NotImplementedError): + transport.close() + + # Catch all for all remaining methods and properties + remainder = [ + 'kind', + ] + for r in remainder: + with pytest.raises(NotImplementedError): + getattr(transport, r)() + + +def test_target_grpc_proxies_base_transport_with_credentials_file(): + # Instantiate the base transport with a credentials file + with mock.patch.object(google.auth, 'load_credentials_from_file', autospec=True) as load_creds, mock.patch('google.cloud.compute_v1.services.target_grpc_proxies.transports.TargetGrpcProxiesTransport._prep_wrapped_messages') as Transport: + Transport.return_value = None + load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.TargetGrpcProxiesTransport( + credentials_file="credentials.json", + quota_project_id="octopus", + ) + load_creds.assert_called_once_with("credentials.json", + scopes=None, + default_scopes=( + 'https://www.googleapis.com/auth/compute', + 'https://www.googleapis.com/auth/cloud-platform', +), + quota_project_id="octopus", + ) + + +def test_target_grpc_proxies_base_transport_with_adc(): + # Test the default credentials are used if credentials and credentials_file are None. + with mock.patch.object(google.auth, 'default', autospec=True) as adc, mock.patch('google.cloud.compute_v1.services.target_grpc_proxies.transports.TargetGrpcProxiesTransport._prep_wrapped_messages') as Transport: + Transport.return_value = None + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.TargetGrpcProxiesTransport() + adc.assert_called_once() + + +def test_target_grpc_proxies_auth_adc(): + # If no credentials are provided, we should use ADC credentials. + with mock.patch.object(google.auth, 'default', autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + TargetGrpcProxiesClient() + adc.assert_called_once_with( + scopes=None, + default_scopes=( + 'https://www.googleapis.com/auth/compute', + 'https://www.googleapis.com/auth/cloud-platform', +), + quota_project_id=None, + ) + + +def test_target_grpc_proxies_http_transport_client_cert_source_for_mtls(): + cred = ga_credentials.AnonymousCredentials() + with mock.patch("google.auth.transport.requests.AuthorizedSession.configure_mtls_channel") as mock_configure_mtls_channel: + transports.TargetGrpcProxiesRestTransport ( + credentials=cred, + client_cert_source_for_mtls=client_cert_source_callback + ) + mock_configure_mtls_channel.assert_called_once_with(client_cert_source_callback) + + +@pytest.mark.parametrize("transport_name", [ + "rest", +]) +def test_target_grpc_proxies_host_no_port(transport_name): + client = TargetGrpcProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions(api_endpoint='compute.googleapis.com'), + transport=transport_name, + ) + assert client.transport._host == ( + 'compute.googleapis.com:443' + if transport_name in ['grpc', 'grpc_asyncio'] + else 'https://compute.googleapis.com' + ) + +@pytest.mark.parametrize("transport_name", [ + "rest", +]) +def test_target_grpc_proxies_host_with_port(transport_name): + client = TargetGrpcProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions(api_endpoint='compute.googleapis.com:8000'), + transport=transport_name, + ) + assert client.transport._host == ( + 'compute.googleapis.com:8000' + if transport_name in ['grpc', 'grpc_asyncio'] + else 'https://compute.googleapis.com:8000' + ) + +@pytest.mark.parametrize("transport_name", [ + "rest", +]) +def test_target_grpc_proxies_client_transport_session_collision(transport_name): + creds1 = ga_credentials.AnonymousCredentials() + creds2 = ga_credentials.AnonymousCredentials() + client1 = TargetGrpcProxiesClient( + credentials=creds1, + transport=transport_name, + ) + client2 = TargetGrpcProxiesClient( + credentials=creds2, + transport=transport_name, + ) + session1 = client1.transport.delete._session + session2 = client2.transport.delete._session + assert session1 != session2 + session1 = client1.transport.get._session + session2 = client2.transport.get._session + assert session1 != session2 + session1 = client1.transport.insert._session + session2 = client2.transport.insert._session + assert session1 != session2 + session1 = client1.transport.list._session + session2 = client2.transport.list._session + assert session1 != session2 + session1 = client1.transport.patch._session + session2 = client2.transport.patch._session + assert session1 != session2 + +def test_common_billing_account_path(): + billing_account = "squid" + expected = "billingAccounts/{billing_account}".format(billing_account=billing_account, ) + actual = TargetGrpcProxiesClient.common_billing_account_path(billing_account) + assert expected == actual + + +def test_parse_common_billing_account_path(): + expected = { + "billing_account": "clam", + } + path = TargetGrpcProxiesClient.common_billing_account_path(**expected) + + # Check that the path construction is reversible. + actual = TargetGrpcProxiesClient.parse_common_billing_account_path(path) + assert expected == actual + +def test_common_folder_path(): + folder = "whelk" + expected = "folders/{folder}".format(folder=folder, ) + actual = TargetGrpcProxiesClient.common_folder_path(folder) + assert expected == actual + + +def test_parse_common_folder_path(): + expected = { + "folder": "octopus", + } + path = TargetGrpcProxiesClient.common_folder_path(**expected) + + # Check that the path construction is reversible. + actual = TargetGrpcProxiesClient.parse_common_folder_path(path) + assert expected == actual + +def test_common_organization_path(): + organization = "oyster" + expected = "organizations/{organization}".format(organization=organization, ) + actual = TargetGrpcProxiesClient.common_organization_path(organization) + assert expected == actual + + +def test_parse_common_organization_path(): + expected = { + "organization": "nudibranch", + } + path = TargetGrpcProxiesClient.common_organization_path(**expected) + + # Check that the path construction is reversible. + actual = TargetGrpcProxiesClient.parse_common_organization_path(path) + assert expected == actual + +def test_common_project_path(): + project = "cuttlefish" + expected = "projects/{project}".format(project=project, ) + actual = TargetGrpcProxiesClient.common_project_path(project) + assert expected == actual + + +def test_parse_common_project_path(): + expected = { + "project": "mussel", + } + path = TargetGrpcProxiesClient.common_project_path(**expected) + + # Check that the path construction is reversible. + actual = TargetGrpcProxiesClient.parse_common_project_path(path) + assert expected == actual + +def test_common_location_path(): + project = "winkle" + location = "nautilus" + expected = "projects/{project}/locations/{location}".format(project=project, location=location, ) + actual = TargetGrpcProxiesClient.common_location_path(project, location) + assert expected == actual + + +def test_parse_common_location_path(): + expected = { + "project": "scallop", + "location": "abalone", + } + path = TargetGrpcProxiesClient.common_location_path(**expected) + + # Check that the path construction is reversible. + actual = TargetGrpcProxiesClient.parse_common_location_path(path) + assert expected == actual + + +def test_client_with_default_client_info(): + client_info = gapic_v1.client_info.ClientInfo() + + with mock.patch.object(transports.TargetGrpcProxiesTransport, '_prep_wrapped_messages') as prep: + client = TargetGrpcProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + with mock.patch.object(transports.TargetGrpcProxiesTransport, '_prep_wrapped_messages') as prep: + transport_class = TargetGrpcProxiesClient.get_transport_class() + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + +def test_transport_close(): + transports = { + "rest": "_session", + } + + for transport, close_name in transports.items(): + client = TargetGrpcProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport + ) + with mock.patch.object(type(getattr(client.transport, close_name)), "close") as close: + with client: + close.assert_not_called() + close.assert_called_once() + +def test_client_ctx(): + transports = [ + 'rest', + ] + for transport in transports: + client = TargetGrpcProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport + ) + # Test client calls underlying transport. + with mock.patch.object(type(client.transport), "close") as close: + close.assert_not_called() + with client: + pass + close.assert_called() + +@pytest.mark.parametrize("client_class,transport_class", [ + (TargetGrpcProxiesClient, transports.TargetGrpcProxiesRestTransport), +]) +def test_api_key_credentials(client_class, transport_class): + with mock.patch.object( + google.auth._default, "get_api_key_credentials", create=True + ) as get_api_key_credentials: + mock_cred = mock.Mock() + get_api_key_credentials.return_value = mock_cred + options = client_options.ClientOptions() + options.api_key = "api_key" + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=mock_cred, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) diff --git a/owl-bot-staging/v1/tests/unit/gapic/compute_v1/test_target_http_proxies.py b/owl-bot-staging/v1/tests/unit/gapic/compute_v1/test_target_http_proxies.py new file mode 100644 index 000000000..b9b1f4b0d --- /dev/null +++ b/owl-bot-staging/v1/tests/unit/gapic/compute_v1/test_target_http_proxies.py @@ -0,0 +1,3902 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import os +# try/except added for compatibility with python < 3.8 +try: + from unittest import mock + from unittest.mock import AsyncMock # pragma: NO COVER +except ImportError: # pragma: NO COVER + import mock + +import grpc +from grpc.experimental import aio +from collections.abc import Iterable +from google.protobuf import json_format +import json +import math +import pytest +from proto.marshal.rules.dates import DurationRule, TimestampRule +from proto.marshal.rules import wrappers +from requests import Response +from requests import Request, PreparedRequest +from requests.sessions import Session +from google.protobuf import json_format + +from google.api_core import client_options +from google.api_core import exceptions as core_exceptions +from google.api_core import extended_operation # type: ignore +from google.api_core import future +from google.api_core import gapic_v1 +from google.api_core import grpc_helpers +from google.api_core import grpc_helpers_async +from google.api_core import path_template +from google.auth import credentials as ga_credentials +from google.auth.exceptions import MutualTLSChannelError +from google.cloud.compute_v1.services.target_http_proxies import TargetHttpProxiesClient +from google.cloud.compute_v1.services.target_http_proxies import pagers +from google.cloud.compute_v1.services.target_http_proxies import transports +from google.cloud.compute_v1.types import compute +from google.oauth2 import service_account +import google.auth + + +def client_cert_source_callback(): + return b"cert bytes", b"key bytes" + + +# If default endpoint is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint(client): + return "foo.googleapis.com" if ("localhost" in client.DEFAULT_ENDPOINT) else client.DEFAULT_ENDPOINT + + +def test__get_default_mtls_endpoint(): + api_endpoint = "example.googleapis.com" + api_mtls_endpoint = "example.mtls.googleapis.com" + sandbox_endpoint = "example.sandbox.googleapis.com" + sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com" + non_googleapi = "api.example.com" + + assert TargetHttpProxiesClient._get_default_mtls_endpoint(None) is None + assert TargetHttpProxiesClient._get_default_mtls_endpoint(api_endpoint) == api_mtls_endpoint + assert TargetHttpProxiesClient._get_default_mtls_endpoint(api_mtls_endpoint) == api_mtls_endpoint + assert TargetHttpProxiesClient._get_default_mtls_endpoint(sandbox_endpoint) == sandbox_mtls_endpoint + assert TargetHttpProxiesClient._get_default_mtls_endpoint(sandbox_mtls_endpoint) == sandbox_mtls_endpoint + assert TargetHttpProxiesClient._get_default_mtls_endpoint(non_googleapi) == non_googleapi + + +@pytest.mark.parametrize("client_class,transport_name", [ + (TargetHttpProxiesClient, "rest"), +]) +def test_target_http_proxies_client_from_service_account_info(client_class, transport_name): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object(service_account.Credentials, 'from_service_account_info') as factory: + factory.return_value = creds + info = {"valid": True} + client = client_class.from_service_account_info(info, transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + 'compute.googleapis.com:443' + if transport_name in ['grpc', 'grpc_asyncio'] + else + 'https://compute.googleapis.com' + ) + + +@pytest.mark.parametrize("transport_class,transport_name", [ + (transports.TargetHttpProxiesRestTransport, "rest"), +]) +def test_target_http_proxies_client_service_account_always_use_jwt(transport_class, transport_name): + with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=True) + use_jwt.assert_called_once_with(True) + + with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=False) + use_jwt.assert_not_called() + + +@pytest.mark.parametrize("client_class,transport_name", [ + (TargetHttpProxiesClient, "rest"), +]) +def test_target_http_proxies_client_from_service_account_file(client_class, transport_name): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object(service_account.Credentials, 'from_service_account_file') as factory: + factory.return_value = creds + client = client_class.from_service_account_file("dummy/file/path.json", transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + client = client_class.from_service_account_json("dummy/file/path.json", transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + 'compute.googleapis.com:443' + if transport_name in ['grpc', 'grpc_asyncio'] + else + 'https://compute.googleapis.com' + ) + + +def test_target_http_proxies_client_get_transport_class(): + transport = TargetHttpProxiesClient.get_transport_class() + available_transports = [ + transports.TargetHttpProxiesRestTransport, + ] + assert transport in available_transports + + transport = TargetHttpProxiesClient.get_transport_class("rest") + assert transport == transports.TargetHttpProxiesRestTransport + + +@pytest.mark.parametrize("client_class,transport_class,transport_name", [ + (TargetHttpProxiesClient, transports.TargetHttpProxiesRestTransport, "rest"), +]) +@mock.patch.object(TargetHttpProxiesClient, "DEFAULT_ENDPOINT", modify_default_endpoint(TargetHttpProxiesClient)) +def test_target_http_proxies_client_client_options(client_class, transport_class, transport_name): + # Check that if channel is provided we won't create a new one. + with mock.patch.object(TargetHttpProxiesClient, 'get_transport_class') as gtc: + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ) + client = client_class(transport=transport) + gtc.assert_not_called() + + # Check that if channel is provided via str we will create a new one. + with mock.patch.object(TargetHttpProxiesClient, 'get_transport_class') as gtc: + client = client_class(transport=transport_name) + gtc.assert_called() + + # Check the case api_endpoint is provided. + options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(transport=transport_name, client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_MTLS_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError): + client = client_class(transport=transport_name) + + # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): + with pytest.raises(ValueError): + client = client_class(transport=transport_name) + + # Check the case quota_project_id is provided + options = client_options.ClientOptions(quota_project_id="octopus") + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id="octopus", + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + # Check the case api_endpoint is provided + options = client_options.ClientOptions(api_audience="https://language.googleapis.com") + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience="https://language.googleapis.com" + ) + +@pytest.mark.parametrize("client_class,transport_class,transport_name,use_client_cert_env", [ + (TargetHttpProxiesClient, transports.TargetHttpProxiesRestTransport, "rest", "true"), + (TargetHttpProxiesClient, transports.TargetHttpProxiesRestTransport, "rest", "false"), +]) +@mock.patch.object(TargetHttpProxiesClient, "DEFAULT_ENDPOINT", modify_default_endpoint(TargetHttpProxiesClient)) +@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) +def test_target_http_proxies_client_mtls_env_auto(client_class, transport_class, transport_name, use_client_cert_env): + # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default + # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists. + + # Check the case client_cert_source is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): + options = client_options.ClientOptions(client_cert_source=client_cert_source_callback) + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + + if use_client_cert_env == "false": + expected_client_cert_source = None + expected_host = client.DEFAULT_ENDPOINT + else: + expected_client_cert_source = client_cert_source_callback + expected_host = client.DEFAULT_MTLS_ENDPOINT + + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case ADC client cert is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): + with mock.patch.object(transport_class, '__init__') as patched: + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): + with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=client_cert_source_callback): + if use_client_cert_env == "false": + expected_host = client.DEFAULT_ENDPOINT + expected_client_cert_source = None + else: + expected_host = client.DEFAULT_MTLS_ENDPOINT + expected_client_cert_source = client_cert_source_callback + + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case client_cert_source and ADC client cert are not provided. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): + with mock.patch.object(transport_class, '__init__') as patched: + with mock.patch("google.auth.transport.mtls.has_default_client_cert_source", return_value=False): + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize("client_class", [ + TargetHttpProxiesClient +]) +@mock.patch.object(TargetHttpProxiesClient, "DEFAULT_ENDPOINT", modify_default_endpoint(TargetHttpProxiesClient)) +def test_target_http_proxies_client_get_mtls_endpoint_and_cert_source(client_class): + mock_client_cert_source = mock.Mock() + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "true". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + mock_api_endpoint = "foo" + options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(options) + assert api_endpoint == mock_api_endpoint + assert cert_source == mock_client_cert_source + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "false". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + mock_client_cert_source = mock.Mock() + mock_api_endpoint = "foo" + options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(options) + assert api_endpoint == mock_api_endpoint + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert doesn't exist. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=False): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert exists. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): + with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=mock_client_cert_source): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source == mock_client_cert_source + + +@pytest.mark.parametrize("client_class,transport_class,transport_name", [ + (TargetHttpProxiesClient, transports.TargetHttpProxiesRestTransport, "rest"), +]) +def test_target_http_proxies_client_client_options_scopes(client_class, transport_class, transport_name): + # Check the case scopes are provided. + options = client_options.ClientOptions( + scopes=["1", "2"], + ) + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=["1", "2"], + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + +@pytest.mark.parametrize("client_class,transport_class,transport_name,grpc_helpers", [ + (TargetHttpProxiesClient, transports.TargetHttpProxiesRestTransport, "rest", None), +]) +def test_target_http_proxies_client_client_options_credentials_file(client_class, transport_class, transport_name, grpc_helpers): + # Check the case credentials file is provided. + options = client_options.ClientOptions( + credentials_file="credentials.json" + ) + + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize("request_type", [ + compute.AggregatedListTargetHttpProxiesRequest, + dict, +]) +def test_aggregated_list_rest(request_type): + client = TargetHttpProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.TargetHttpProxyAggregatedList( + id='id_value', + kind='kind_value', + next_page_token='next_page_token_value', + self_link='self_link_value', + unreachables=['unreachables_value'], + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.TargetHttpProxyAggregatedList.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.aggregated_list(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.AggregatedListPager) + assert response.id == 'id_value' + assert response.kind == 'kind_value' + assert response.next_page_token == 'next_page_token_value' + assert response.self_link == 'self_link_value' + assert response.unreachables == ['unreachables_value'] + + +def test_aggregated_list_rest_required_fields(request_type=compute.AggregatedListTargetHttpProxiesRequest): + transport_class = transports.TargetHttpProxiesRestTransport + + request_init = {} + request_init["project"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).aggregated_list._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).aggregated_list._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("filter", "include_all_scopes", "max_results", "order_by", "page_token", "return_partial_success", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + + client = TargetHttpProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.TargetHttpProxyAggregatedList() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "get", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.TargetHttpProxyAggregatedList.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.aggregated_list(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_aggregated_list_rest_unset_required_fields(): + transport = transports.TargetHttpProxiesRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.aggregated_list._get_unset_required_fields({}) + assert set(unset_fields) == (set(("filter", "includeAllScopes", "maxResults", "orderBy", "pageToken", "returnPartialSuccess", )) & set(("project", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_aggregated_list_rest_interceptors(null_interceptor): + transport = transports.TargetHttpProxiesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.TargetHttpProxiesRestInterceptor(), + ) + client = TargetHttpProxiesClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.TargetHttpProxiesRestInterceptor, "post_aggregated_list") as post, \ + mock.patch.object(transports.TargetHttpProxiesRestInterceptor, "pre_aggregated_list") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.AggregatedListTargetHttpProxiesRequest.pb(compute.AggregatedListTargetHttpProxiesRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.TargetHttpProxyAggregatedList.to_json(compute.TargetHttpProxyAggregatedList()) + + request = compute.AggregatedListTargetHttpProxiesRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.TargetHttpProxyAggregatedList() + + client.aggregated_list(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_aggregated_list_rest_bad_request(transport: str = 'rest', request_type=compute.AggregatedListTargetHttpProxiesRequest): + client = TargetHttpProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.aggregated_list(request) + + +def test_aggregated_list_rest_flattened(): + client = TargetHttpProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.TargetHttpProxyAggregatedList() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.TargetHttpProxyAggregatedList.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.aggregated_list(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/aggregated/targetHttpProxies" % client.transport._host, args[1]) + + +def test_aggregated_list_rest_flattened_error(transport: str = 'rest'): + client = TargetHttpProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.aggregated_list( + compute.AggregatedListTargetHttpProxiesRequest(), + project='project_value', + ) + + +def test_aggregated_list_rest_pager(transport: str = 'rest'): + client = TargetHttpProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + #with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + compute.TargetHttpProxyAggregatedList( + items={ + 'a':compute.TargetHttpProxiesScopedList(), + 'b':compute.TargetHttpProxiesScopedList(), + 'c':compute.TargetHttpProxiesScopedList(), + }, + next_page_token='abc', + ), + compute.TargetHttpProxyAggregatedList( + items={}, + next_page_token='def', + ), + compute.TargetHttpProxyAggregatedList( + items={ + 'g':compute.TargetHttpProxiesScopedList(), + }, + next_page_token='ghi', + ), + compute.TargetHttpProxyAggregatedList( + items={ + 'h':compute.TargetHttpProxiesScopedList(), + 'i':compute.TargetHttpProxiesScopedList(), + }, + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple(compute.TargetHttpProxyAggregatedList.to_json(x) for x in response) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode('UTF-8') + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {'project': 'sample1'} + + pager = client.aggregated_list(request=sample_request) + + assert isinstance(pager.get('a'), compute.TargetHttpProxiesScopedList) + assert pager.get('h') is None + + results = list(pager) + assert len(results) == 6 + assert all( + isinstance(i, tuple) + for i in results) + for result in results: + assert isinstance(result, tuple) + assert tuple(type(t) for t in result) == (str, compute.TargetHttpProxiesScopedList) + + assert pager.get('a') is None + assert isinstance(pager.get('h'), compute.TargetHttpProxiesScopedList) + + pages = list(client.aggregated_list(request=sample_request).pages) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize("request_type", [ + compute.DeleteTargetHttpProxyRequest, + dict, +]) +def test_delete_rest(request_type): + client = TargetHttpProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'target_http_proxy': 'sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.delete(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, extended_operation.ExtendedOperation) + assert response.client_operation_id == 'client_operation_id_value' + assert response.creation_timestamp == 'creation_timestamp_value' + assert response.description == 'description_value' + assert response.end_time == 'end_time_value' + assert response.http_error_message == 'http_error_message_value' + assert response.http_error_status_code == 2374 + assert response.id == 205 + assert response.insert_time == 'insert_time_value' + assert response.kind == 'kind_value' + assert response.name == 'name_value' + assert response.operation_group_id == 'operation_group_id_value' + assert response.operation_type == 'operation_type_value' + assert response.progress == 885 + assert response.region == 'region_value' + assert response.self_link == 'self_link_value' + assert response.start_time == 'start_time_value' + assert response.status == compute.Operation.Status.DONE + assert response.status_message == 'status_message_value' + assert response.target_id == 947 + assert response.target_link == 'target_link_value' + assert response.user == 'user_value' + assert response.zone == 'zone_value' + + +def test_delete_rest_required_fields(request_type=compute.DeleteTargetHttpProxyRequest): + transport_class = transports.TargetHttpProxiesRestTransport + + request_init = {} + request_init["project"] = "" + request_init["target_http_proxy"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + jsonified_request["targetHttpProxy"] = 'target_http_proxy_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "targetHttpProxy" in jsonified_request + assert jsonified_request["targetHttpProxy"] == 'target_http_proxy_value' + + client = TargetHttpProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "delete", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.delete(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_delete_rest_unset_required_fields(): + transport = transports.TargetHttpProxiesRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.delete._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("project", "targetHttpProxy", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_rest_interceptors(null_interceptor): + transport = transports.TargetHttpProxiesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.TargetHttpProxiesRestInterceptor(), + ) + client = TargetHttpProxiesClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.TargetHttpProxiesRestInterceptor, "post_delete") as post, \ + mock.patch.object(transports.TargetHttpProxiesRestInterceptor, "pre_delete") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.DeleteTargetHttpProxyRequest.pb(compute.DeleteTargetHttpProxyRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.DeleteTargetHttpProxyRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.delete(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_delete_rest_bad_request(transport: str = 'rest', request_type=compute.DeleteTargetHttpProxyRequest): + client = TargetHttpProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'target_http_proxy': 'sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.delete(request) + + +def test_delete_rest_flattened(): + client = TargetHttpProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'target_http_proxy': 'sample2'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + target_http_proxy='target_http_proxy_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.delete(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/global/targetHttpProxies/{target_http_proxy}" % client.transport._host, args[1]) + + +def test_delete_rest_flattened_error(transport: str = 'rest'): + client = TargetHttpProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete( + compute.DeleteTargetHttpProxyRequest(), + project='project_value', + target_http_proxy='target_http_proxy_value', + ) + + +def test_delete_rest_error(): + client = TargetHttpProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.DeleteTargetHttpProxyRequest, + dict, +]) +def test_delete_unary_rest(request_type): + client = TargetHttpProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'target_http_proxy': 'sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.delete_unary(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.Operation) + + +def test_delete_unary_rest_required_fields(request_type=compute.DeleteTargetHttpProxyRequest): + transport_class = transports.TargetHttpProxiesRestTransport + + request_init = {} + request_init["project"] = "" + request_init["target_http_proxy"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + jsonified_request["targetHttpProxy"] = 'target_http_proxy_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "targetHttpProxy" in jsonified_request + assert jsonified_request["targetHttpProxy"] == 'target_http_proxy_value' + + client = TargetHttpProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "delete", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.delete_unary(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_delete_unary_rest_unset_required_fields(): + transport = transports.TargetHttpProxiesRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.delete._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("project", "targetHttpProxy", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_unary_rest_interceptors(null_interceptor): + transport = transports.TargetHttpProxiesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.TargetHttpProxiesRestInterceptor(), + ) + client = TargetHttpProxiesClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.TargetHttpProxiesRestInterceptor, "post_delete") as post, \ + mock.patch.object(transports.TargetHttpProxiesRestInterceptor, "pre_delete") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.DeleteTargetHttpProxyRequest.pb(compute.DeleteTargetHttpProxyRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.DeleteTargetHttpProxyRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.delete_unary(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_delete_unary_rest_bad_request(transport: str = 'rest', request_type=compute.DeleteTargetHttpProxyRequest): + client = TargetHttpProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'target_http_proxy': 'sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.delete_unary(request) + + +def test_delete_unary_rest_flattened(): + client = TargetHttpProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'target_http_proxy': 'sample2'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + target_http_proxy='target_http_proxy_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.delete_unary(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/global/targetHttpProxies/{target_http_proxy}" % client.transport._host, args[1]) + + +def test_delete_unary_rest_flattened_error(transport: str = 'rest'): + client = TargetHttpProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_unary( + compute.DeleteTargetHttpProxyRequest(), + project='project_value', + target_http_proxy='target_http_proxy_value', + ) + + +def test_delete_unary_rest_error(): + client = TargetHttpProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.GetTargetHttpProxyRequest, + dict, +]) +def test_get_rest(request_type): + client = TargetHttpProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'target_http_proxy': 'sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.TargetHttpProxy( + creation_timestamp='creation_timestamp_value', + description='description_value', + fingerprint='fingerprint_value', + http_keep_alive_timeout_sec=2868, + id=205, + kind='kind_value', + name='name_value', + proxy_bind=True, + region='region_value', + self_link='self_link_value', + url_map='url_map_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.TargetHttpProxy.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.get(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.TargetHttpProxy) + assert response.creation_timestamp == 'creation_timestamp_value' + assert response.description == 'description_value' + assert response.fingerprint == 'fingerprint_value' + assert response.http_keep_alive_timeout_sec == 2868 + assert response.id == 205 + assert response.kind == 'kind_value' + assert response.name == 'name_value' + assert response.proxy_bind is True + assert response.region == 'region_value' + assert response.self_link == 'self_link_value' + assert response.url_map == 'url_map_value' + + +def test_get_rest_required_fields(request_type=compute.GetTargetHttpProxyRequest): + transport_class = transports.TargetHttpProxiesRestTransport + + request_init = {} + request_init["project"] = "" + request_init["target_http_proxy"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + jsonified_request["targetHttpProxy"] = 'target_http_proxy_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "targetHttpProxy" in jsonified_request + assert jsonified_request["targetHttpProxy"] == 'target_http_proxy_value' + + client = TargetHttpProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.TargetHttpProxy() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "get", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.TargetHttpProxy.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.get(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_get_rest_unset_required_fields(): + transport = transports.TargetHttpProxiesRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.get._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("project", "targetHttpProxy", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_rest_interceptors(null_interceptor): + transport = transports.TargetHttpProxiesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.TargetHttpProxiesRestInterceptor(), + ) + client = TargetHttpProxiesClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.TargetHttpProxiesRestInterceptor, "post_get") as post, \ + mock.patch.object(transports.TargetHttpProxiesRestInterceptor, "pre_get") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.GetTargetHttpProxyRequest.pb(compute.GetTargetHttpProxyRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.TargetHttpProxy.to_json(compute.TargetHttpProxy()) + + request = compute.GetTargetHttpProxyRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.TargetHttpProxy() + + client.get(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_get_rest_bad_request(transport: str = 'rest', request_type=compute.GetTargetHttpProxyRequest): + client = TargetHttpProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'target_http_proxy': 'sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get(request) + + +def test_get_rest_flattened(): + client = TargetHttpProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.TargetHttpProxy() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'target_http_proxy': 'sample2'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + target_http_proxy='target_http_proxy_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.TargetHttpProxy.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.get(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/global/targetHttpProxies/{target_http_proxy}" % client.transport._host, args[1]) + + +def test_get_rest_flattened_error(transport: str = 'rest'): + client = TargetHttpProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get( + compute.GetTargetHttpProxyRequest(), + project='project_value', + target_http_proxy='target_http_proxy_value', + ) + + +def test_get_rest_error(): + client = TargetHttpProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.InsertTargetHttpProxyRequest, + dict, +]) +def test_insert_rest(request_type): + client = TargetHttpProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1'} + request_init["target_http_proxy_resource"] = {'creation_timestamp': 'creation_timestamp_value', 'description': 'description_value', 'fingerprint': 'fingerprint_value', 'http_keep_alive_timeout_sec': 2868, 'id': 205, 'kind': 'kind_value', 'name': 'name_value', 'proxy_bind': True, 'region': 'region_value', 'self_link': 'self_link_value', 'url_map': 'url_map_value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.insert(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, extended_operation.ExtendedOperation) + assert response.client_operation_id == 'client_operation_id_value' + assert response.creation_timestamp == 'creation_timestamp_value' + assert response.description == 'description_value' + assert response.end_time == 'end_time_value' + assert response.http_error_message == 'http_error_message_value' + assert response.http_error_status_code == 2374 + assert response.id == 205 + assert response.insert_time == 'insert_time_value' + assert response.kind == 'kind_value' + assert response.name == 'name_value' + assert response.operation_group_id == 'operation_group_id_value' + assert response.operation_type == 'operation_type_value' + assert response.progress == 885 + assert response.region == 'region_value' + assert response.self_link == 'self_link_value' + assert response.start_time == 'start_time_value' + assert response.status == compute.Operation.Status.DONE + assert response.status_message == 'status_message_value' + assert response.target_id == 947 + assert response.target_link == 'target_link_value' + assert response.user == 'user_value' + assert response.zone == 'zone_value' + + +def test_insert_rest_required_fields(request_type=compute.InsertTargetHttpProxyRequest): + transport_class = transports.TargetHttpProxiesRestTransport + + request_init = {} + request_init["project"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).insert._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).insert._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + + client = TargetHttpProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.insert(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_insert_rest_unset_required_fields(): + transport = transports.TargetHttpProxiesRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.insert._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("project", "targetHttpProxyResource", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_insert_rest_interceptors(null_interceptor): + transport = transports.TargetHttpProxiesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.TargetHttpProxiesRestInterceptor(), + ) + client = TargetHttpProxiesClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.TargetHttpProxiesRestInterceptor, "post_insert") as post, \ + mock.patch.object(transports.TargetHttpProxiesRestInterceptor, "pre_insert") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.InsertTargetHttpProxyRequest.pb(compute.InsertTargetHttpProxyRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.InsertTargetHttpProxyRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.insert(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_insert_rest_bad_request(transport: str = 'rest', request_type=compute.InsertTargetHttpProxyRequest): + client = TargetHttpProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1'} + request_init["target_http_proxy_resource"] = {'creation_timestamp': 'creation_timestamp_value', 'description': 'description_value', 'fingerprint': 'fingerprint_value', 'http_keep_alive_timeout_sec': 2868, 'id': 205, 'kind': 'kind_value', 'name': 'name_value', 'proxy_bind': True, 'region': 'region_value', 'self_link': 'self_link_value', 'url_map': 'url_map_value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.insert(request) + + +def test_insert_rest_flattened(): + client = TargetHttpProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + target_http_proxy_resource=compute.TargetHttpProxy(creation_timestamp='creation_timestamp_value'), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.insert(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/global/targetHttpProxies" % client.transport._host, args[1]) + + +def test_insert_rest_flattened_error(transport: str = 'rest'): + client = TargetHttpProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.insert( + compute.InsertTargetHttpProxyRequest(), + project='project_value', + target_http_proxy_resource=compute.TargetHttpProxy(creation_timestamp='creation_timestamp_value'), + ) + + +def test_insert_rest_error(): + client = TargetHttpProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.InsertTargetHttpProxyRequest, + dict, +]) +def test_insert_unary_rest(request_type): + client = TargetHttpProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1'} + request_init["target_http_proxy_resource"] = {'creation_timestamp': 'creation_timestamp_value', 'description': 'description_value', 'fingerprint': 'fingerprint_value', 'http_keep_alive_timeout_sec': 2868, 'id': 205, 'kind': 'kind_value', 'name': 'name_value', 'proxy_bind': True, 'region': 'region_value', 'self_link': 'self_link_value', 'url_map': 'url_map_value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.insert_unary(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.Operation) + + +def test_insert_unary_rest_required_fields(request_type=compute.InsertTargetHttpProxyRequest): + transport_class = transports.TargetHttpProxiesRestTransport + + request_init = {} + request_init["project"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).insert._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).insert._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + + client = TargetHttpProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.insert_unary(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_insert_unary_rest_unset_required_fields(): + transport = transports.TargetHttpProxiesRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.insert._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("project", "targetHttpProxyResource", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_insert_unary_rest_interceptors(null_interceptor): + transport = transports.TargetHttpProxiesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.TargetHttpProxiesRestInterceptor(), + ) + client = TargetHttpProxiesClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.TargetHttpProxiesRestInterceptor, "post_insert") as post, \ + mock.patch.object(transports.TargetHttpProxiesRestInterceptor, "pre_insert") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.InsertTargetHttpProxyRequest.pb(compute.InsertTargetHttpProxyRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.InsertTargetHttpProxyRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.insert_unary(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_insert_unary_rest_bad_request(transport: str = 'rest', request_type=compute.InsertTargetHttpProxyRequest): + client = TargetHttpProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1'} + request_init["target_http_proxy_resource"] = {'creation_timestamp': 'creation_timestamp_value', 'description': 'description_value', 'fingerprint': 'fingerprint_value', 'http_keep_alive_timeout_sec': 2868, 'id': 205, 'kind': 'kind_value', 'name': 'name_value', 'proxy_bind': True, 'region': 'region_value', 'self_link': 'self_link_value', 'url_map': 'url_map_value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.insert_unary(request) + + +def test_insert_unary_rest_flattened(): + client = TargetHttpProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + target_http_proxy_resource=compute.TargetHttpProxy(creation_timestamp='creation_timestamp_value'), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.insert_unary(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/global/targetHttpProxies" % client.transport._host, args[1]) + + +def test_insert_unary_rest_flattened_error(transport: str = 'rest'): + client = TargetHttpProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.insert_unary( + compute.InsertTargetHttpProxyRequest(), + project='project_value', + target_http_proxy_resource=compute.TargetHttpProxy(creation_timestamp='creation_timestamp_value'), + ) + + +def test_insert_unary_rest_error(): + client = TargetHttpProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.ListTargetHttpProxiesRequest, + dict, +]) +def test_list_rest(request_type): + client = TargetHttpProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.TargetHttpProxyList( + id='id_value', + kind='kind_value', + next_page_token='next_page_token_value', + self_link='self_link_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.TargetHttpProxyList.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.list(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListPager) + assert response.id == 'id_value' + assert response.kind == 'kind_value' + assert response.next_page_token == 'next_page_token_value' + assert response.self_link == 'self_link_value' + + +def test_list_rest_required_fields(request_type=compute.ListTargetHttpProxiesRequest): + transport_class = transports.TargetHttpProxiesRestTransport + + request_init = {} + request_init["project"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("filter", "max_results", "order_by", "page_token", "return_partial_success", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + + client = TargetHttpProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.TargetHttpProxyList() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "get", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.TargetHttpProxyList.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.list(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_list_rest_unset_required_fields(): + transport = transports.TargetHttpProxiesRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.list._get_unset_required_fields({}) + assert set(unset_fields) == (set(("filter", "maxResults", "orderBy", "pageToken", "returnPartialSuccess", )) & set(("project", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_rest_interceptors(null_interceptor): + transport = transports.TargetHttpProxiesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.TargetHttpProxiesRestInterceptor(), + ) + client = TargetHttpProxiesClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.TargetHttpProxiesRestInterceptor, "post_list") as post, \ + mock.patch.object(transports.TargetHttpProxiesRestInterceptor, "pre_list") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.ListTargetHttpProxiesRequest.pb(compute.ListTargetHttpProxiesRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.TargetHttpProxyList.to_json(compute.TargetHttpProxyList()) + + request = compute.ListTargetHttpProxiesRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.TargetHttpProxyList() + + client.list(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_list_rest_bad_request(transport: str = 'rest', request_type=compute.ListTargetHttpProxiesRequest): + client = TargetHttpProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list(request) + + +def test_list_rest_flattened(): + client = TargetHttpProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.TargetHttpProxyList() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.TargetHttpProxyList.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.list(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/global/targetHttpProxies" % client.transport._host, args[1]) + + +def test_list_rest_flattened_error(transport: str = 'rest'): + client = TargetHttpProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list( + compute.ListTargetHttpProxiesRequest(), + project='project_value', + ) + + +def test_list_rest_pager(transport: str = 'rest'): + client = TargetHttpProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + #with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + compute.TargetHttpProxyList( + items=[ + compute.TargetHttpProxy(), + compute.TargetHttpProxy(), + compute.TargetHttpProxy(), + ], + next_page_token='abc', + ), + compute.TargetHttpProxyList( + items=[], + next_page_token='def', + ), + compute.TargetHttpProxyList( + items=[ + compute.TargetHttpProxy(), + ], + next_page_token='ghi', + ), + compute.TargetHttpProxyList( + items=[ + compute.TargetHttpProxy(), + compute.TargetHttpProxy(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple(compute.TargetHttpProxyList.to_json(x) for x in response) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode('UTF-8') + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {'project': 'sample1'} + + pager = client.list(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, compute.TargetHttpProxy) + for i in results) + + pages = list(client.list(request=sample_request).pages) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize("request_type", [ + compute.PatchTargetHttpProxyRequest, + dict, +]) +def test_patch_rest(request_type): + client = TargetHttpProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'target_http_proxy': 'sample2'} + request_init["target_http_proxy_resource"] = {'creation_timestamp': 'creation_timestamp_value', 'description': 'description_value', 'fingerprint': 'fingerprint_value', 'http_keep_alive_timeout_sec': 2868, 'id': 205, 'kind': 'kind_value', 'name': 'name_value', 'proxy_bind': True, 'region': 'region_value', 'self_link': 'self_link_value', 'url_map': 'url_map_value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.patch(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, extended_operation.ExtendedOperation) + assert response.client_operation_id == 'client_operation_id_value' + assert response.creation_timestamp == 'creation_timestamp_value' + assert response.description == 'description_value' + assert response.end_time == 'end_time_value' + assert response.http_error_message == 'http_error_message_value' + assert response.http_error_status_code == 2374 + assert response.id == 205 + assert response.insert_time == 'insert_time_value' + assert response.kind == 'kind_value' + assert response.name == 'name_value' + assert response.operation_group_id == 'operation_group_id_value' + assert response.operation_type == 'operation_type_value' + assert response.progress == 885 + assert response.region == 'region_value' + assert response.self_link == 'self_link_value' + assert response.start_time == 'start_time_value' + assert response.status == compute.Operation.Status.DONE + assert response.status_message == 'status_message_value' + assert response.target_id == 947 + assert response.target_link == 'target_link_value' + assert response.user == 'user_value' + assert response.zone == 'zone_value' + + +def test_patch_rest_required_fields(request_type=compute.PatchTargetHttpProxyRequest): + transport_class = transports.TargetHttpProxiesRestTransport + + request_init = {} + request_init["project"] = "" + request_init["target_http_proxy"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).patch._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + jsonified_request["targetHttpProxy"] = 'target_http_proxy_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).patch._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "targetHttpProxy" in jsonified_request + assert jsonified_request["targetHttpProxy"] == 'target_http_proxy_value' + + client = TargetHttpProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "patch", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.patch(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_patch_rest_unset_required_fields(): + transport = transports.TargetHttpProxiesRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.patch._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("project", "targetHttpProxy", "targetHttpProxyResource", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_patch_rest_interceptors(null_interceptor): + transport = transports.TargetHttpProxiesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.TargetHttpProxiesRestInterceptor(), + ) + client = TargetHttpProxiesClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.TargetHttpProxiesRestInterceptor, "post_patch") as post, \ + mock.patch.object(transports.TargetHttpProxiesRestInterceptor, "pre_patch") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.PatchTargetHttpProxyRequest.pb(compute.PatchTargetHttpProxyRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.PatchTargetHttpProxyRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.patch(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_patch_rest_bad_request(transport: str = 'rest', request_type=compute.PatchTargetHttpProxyRequest): + client = TargetHttpProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'target_http_proxy': 'sample2'} + request_init["target_http_proxy_resource"] = {'creation_timestamp': 'creation_timestamp_value', 'description': 'description_value', 'fingerprint': 'fingerprint_value', 'http_keep_alive_timeout_sec': 2868, 'id': 205, 'kind': 'kind_value', 'name': 'name_value', 'proxy_bind': True, 'region': 'region_value', 'self_link': 'self_link_value', 'url_map': 'url_map_value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.patch(request) + + +def test_patch_rest_flattened(): + client = TargetHttpProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'target_http_proxy': 'sample2'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + target_http_proxy='target_http_proxy_value', + target_http_proxy_resource=compute.TargetHttpProxy(creation_timestamp='creation_timestamp_value'), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.patch(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/global/targetHttpProxies/{target_http_proxy}" % client.transport._host, args[1]) + + +def test_patch_rest_flattened_error(transport: str = 'rest'): + client = TargetHttpProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.patch( + compute.PatchTargetHttpProxyRequest(), + project='project_value', + target_http_proxy='target_http_proxy_value', + target_http_proxy_resource=compute.TargetHttpProxy(creation_timestamp='creation_timestamp_value'), + ) + + +def test_patch_rest_error(): + client = TargetHttpProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.PatchTargetHttpProxyRequest, + dict, +]) +def test_patch_unary_rest(request_type): + client = TargetHttpProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'target_http_proxy': 'sample2'} + request_init["target_http_proxy_resource"] = {'creation_timestamp': 'creation_timestamp_value', 'description': 'description_value', 'fingerprint': 'fingerprint_value', 'http_keep_alive_timeout_sec': 2868, 'id': 205, 'kind': 'kind_value', 'name': 'name_value', 'proxy_bind': True, 'region': 'region_value', 'self_link': 'self_link_value', 'url_map': 'url_map_value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.patch_unary(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.Operation) + + +def test_patch_unary_rest_required_fields(request_type=compute.PatchTargetHttpProxyRequest): + transport_class = transports.TargetHttpProxiesRestTransport + + request_init = {} + request_init["project"] = "" + request_init["target_http_proxy"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).patch._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + jsonified_request["targetHttpProxy"] = 'target_http_proxy_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).patch._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "targetHttpProxy" in jsonified_request + assert jsonified_request["targetHttpProxy"] == 'target_http_proxy_value' + + client = TargetHttpProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "patch", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.patch_unary(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_patch_unary_rest_unset_required_fields(): + transport = transports.TargetHttpProxiesRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.patch._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("project", "targetHttpProxy", "targetHttpProxyResource", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_patch_unary_rest_interceptors(null_interceptor): + transport = transports.TargetHttpProxiesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.TargetHttpProxiesRestInterceptor(), + ) + client = TargetHttpProxiesClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.TargetHttpProxiesRestInterceptor, "post_patch") as post, \ + mock.patch.object(transports.TargetHttpProxiesRestInterceptor, "pre_patch") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.PatchTargetHttpProxyRequest.pb(compute.PatchTargetHttpProxyRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.PatchTargetHttpProxyRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.patch_unary(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_patch_unary_rest_bad_request(transport: str = 'rest', request_type=compute.PatchTargetHttpProxyRequest): + client = TargetHttpProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'target_http_proxy': 'sample2'} + request_init["target_http_proxy_resource"] = {'creation_timestamp': 'creation_timestamp_value', 'description': 'description_value', 'fingerprint': 'fingerprint_value', 'http_keep_alive_timeout_sec': 2868, 'id': 205, 'kind': 'kind_value', 'name': 'name_value', 'proxy_bind': True, 'region': 'region_value', 'self_link': 'self_link_value', 'url_map': 'url_map_value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.patch_unary(request) + + +def test_patch_unary_rest_flattened(): + client = TargetHttpProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'target_http_proxy': 'sample2'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + target_http_proxy='target_http_proxy_value', + target_http_proxy_resource=compute.TargetHttpProxy(creation_timestamp='creation_timestamp_value'), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.patch_unary(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/global/targetHttpProxies/{target_http_proxy}" % client.transport._host, args[1]) + + +def test_patch_unary_rest_flattened_error(transport: str = 'rest'): + client = TargetHttpProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.patch_unary( + compute.PatchTargetHttpProxyRequest(), + project='project_value', + target_http_proxy='target_http_proxy_value', + target_http_proxy_resource=compute.TargetHttpProxy(creation_timestamp='creation_timestamp_value'), + ) + + +def test_patch_unary_rest_error(): + client = TargetHttpProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.SetUrlMapTargetHttpProxyRequest, + dict, +]) +def test_set_url_map_rest(request_type): + client = TargetHttpProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'target_http_proxy': 'sample2'} + request_init["url_map_reference_resource"] = {'url_map': 'url_map_value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.set_url_map(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, extended_operation.ExtendedOperation) + assert response.client_operation_id == 'client_operation_id_value' + assert response.creation_timestamp == 'creation_timestamp_value' + assert response.description == 'description_value' + assert response.end_time == 'end_time_value' + assert response.http_error_message == 'http_error_message_value' + assert response.http_error_status_code == 2374 + assert response.id == 205 + assert response.insert_time == 'insert_time_value' + assert response.kind == 'kind_value' + assert response.name == 'name_value' + assert response.operation_group_id == 'operation_group_id_value' + assert response.operation_type == 'operation_type_value' + assert response.progress == 885 + assert response.region == 'region_value' + assert response.self_link == 'self_link_value' + assert response.start_time == 'start_time_value' + assert response.status == compute.Operation.Status.DONE + assert response.status_message == 'status_message_value' + assert response.target_id == 947 + assert response.target_link == 'target_link_value' + assert response.user == 'user_value' + assert response.zone == 'zone_value' + + +def test_set_url_map_rest_required_fields(request_type=compute.SetUrlMapTargetHttpProxyRequest): + transport_class = transports.TargetHttpProxiesRestTransport + + request_init = {} + request_init["project"] = "" + request_init["target_http_proxy"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).set_url_map._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + jsonified_request["targetHttpProxy"] = 'target_http_proxy_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).set_url_map._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "targetHttpProxy" in jsonified_request + assert jsonified_request["targetHttpProxy"] == 'target_http_proxy_value' + + client = TargetHttpProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.set_url_map(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_set_url_map_rest_unset_required_fields(): + transport = transports.TargetHttpProxiesRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.set_url_map._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("project", "targetHttpProxy", "urlMapReferenceResource", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_set_url_map_rest_interceptors(null_interceptor): + transport = transports.TargetHttpProxiesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.TargetHttpProxiesRestInterceptor(), + ) + client = TargetHttpProxiesClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.TargetHttpProxiesRestInterceptor, "post_set_url_map") as post, \ + mock.patch.object(transports.TargetHttpProxiesRestInterceptor, "pre_set_url_map") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.SetUrlMapTargetHttpProxyRequest.pb(compute.SetUrlMapTargetHttpProxyRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.SetUrlMapTargetHttpProxyRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.set_url_map(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_set_url_map_rest_bad_request(transport: str = 'rest', request_type=compute.SetUrlMapTargetHttpProxyRequest): + client = TargetHttpProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'target_http_proxy': 'sample2'} + request_init["url_map_reference_resource"] = {'url_map': 'url_map_value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.set_url_map(request) + + +def test_set_url_map_rest_flattened(): + client = TargetHttpProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'target_http_proxy': 'sample2'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + target_http_proxy='target_http_proxy_value', + url_map_reference_resource=compute.UrlMapReference(url_map='url_map_value'), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.set_url_map(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/targetHttpProxies/{target_http_proxy}/setUrlMap" % client.transport._host, args[1]) + + +def test_set_url_map_rest_flattened_error(transport: str = 'rest'): + client = TargetHttpProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.set_url_map( + compute.SetUrlMapTargetHttpProxyRequest(), + project='project_value', + target_http_proxy='target_http_proxy_value', + url_map_reference_resource=compute.UrlMapReference(url_map='url_map_value'), + ) + + +def test_set_url_map_rest_error(): + client = TargetHttpProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.SetUrlMapTargetHttpProxyRequest, + dict, +]) +def test_set_url_map_unary_rest(request_type): + client = TargetHttpProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'target_http_proxy': 'sample2'} + request_init["url_map_reference_resource"] = {'url_map': 'url_map_value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.set_url_map_unary(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.Operation) + + +def test_set_url_map_unary_rest_required_fields(request_type=compute.SetUrlMapTargetHttpProxyRequest): + transport_class = transports.TargetHttpProxiesRestTransport + + request_init = {} + request_init["project"] = "" + request_init["target_http_proxy"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).set_url_map._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + jsonified_request["targetHttpProxy"] = 'target_http_proxy_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).set_url_map._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "targetHttpProxy" in jsonified_request + assert jsonified_request["targetHttpProxy"] == 'target_http_proxy_value' + + client = TargetHttpProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.set_url_map_unary(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_set_url_map_unary_rest_unset_required_fields(): + transport = transports.TargetHttpProxiesRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.set_url_map._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("project", "targetHttpProxy", "urlMapReferenceResource", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_set_url_map_unary_rest_interceptors(null_interceptor): + transport = transports.TargetHttpProxiesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.TargetHttpProxiesRestInterceptor(), + ) + client = TargetHttpProxiesClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.TargetHttpProxiesRestInterceptor, "post_set_url_map") as post, \ + mock.patch.object(transports.TargetHttpProxiesRestInterceptor, "pre_set_url_map") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.SetUrlMapTargetHttpProxyRequest.pb(compute.SetUrlMapTargetHttpProxyRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.SetUrlMapTargetHttpProxyRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.set_url_map_unary(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_set_url_map_unary_rest_bad_request(transport: str = 'rest', request_type=compute.SetUrlMapTargetHttpProxyRequest): + client = TargetHttpProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'target_http_proxy': 'sample2'} + request_init["url_map_reference_resource"] = {'url_map': 'url_map_value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.set_url_map_unary(request) + + +def test_set_url_map_unary_rest_flattened(): + client = TargetHttpProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'target_http_proxy': 'sample2'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + target_http_proxy='target_http_proxy_value', + url_map_reference_resource=compute.UrlMapReference(url_map='url_map_value'), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.set_url_map_unary(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/targetHttpProxies/{target_http_proxy}/setUrlMap" % client.transport._host, args[1]) + + +def test_set_url_map_unary_rest_flattened_error(transport: str = 'rest'): + client = TargetHttpProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.set_url_map_unary( + compute.SetUrlMapTargetHttpProxyRequest(), + project='project_value', + target_http_proxy='target_http_proxy_value', + url_map_reference_resource=compute.UrlMapReference(url_map='url_map_value'), + ) + + +def test_set_url_map_unary_rest_error(): + client = TargetHttpProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +def test_credentials_transport_error(): + # It is an error to provide credentials and a transport instance. + transport = transports.TargetHttpProxiesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = TargetHttpProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # It is an error to provide a credentials file and a transport instance. + transport = transports.TargetHttpProxiesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = TargetHttpProxiesClient( + client_options={"credentials_file": "credentials.json"}, + transport=transport, + ) + + # It is an error to provide an api_key and a transport instance. + transport = transports.TargetHttpProxiesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = TargetHttpProxiesClient( + client_options=options, + transport=transport, + ) + + # It is an error to provide an api_key and a credential. + options = mock.Mock() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = TargetHttpProxiesClient( + client_options=options, + credentials=ga_credentials.AnonymousCredentials() + ) + + # It is an error to provide scopes and a transport instance. + transport = transports.TargetHttpProxiesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = TargetHttpProxiesClient( + client_options={"scopes": ["1", "2"]}, + transport=transport, + ) + + +def test_transport_instance(): + # A client may be instantiated with a custom transport instance. + transport = transports.TargetHttpProxiesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + client = TargetHttpProxiesClient(transport=transport) + assert client.transport is transport + + +@pytest.mark.parametrize("transport_class", [ + transports.TargetHttpProxiesRestTransport, +]) +def test_transport_adc(transport_class): + # Test default credentials are used if not provided. + with mock.patch.object(google.auth, 'default') as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class() + adc.assert_called_once() + +@pytest.mark.parametrize("transport_name", [ + "rest", +]) +def test_transport_kind(transport_name): + transport = TargetHttpProxiesClient.get_transport_class(transport_name)( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert transport.kind == transport_name + + +def test_target_http_proxies_base_transport_error(): + # Passing both a credentials object and credentials_file should raise an error + with pytest.raises(core_exceptions.DuplicateCredentialArgs): + transport = transports.TargetHttpProxiesTransport( + credentials=ga_credentials.AnonymousCredentials(), + credentials_file="credentials.json" + ) + + +def test_target_http_proxies_base_transport(): + # Instantiate the base transport. + with mock.patch('google.cloud.compute_v1.services.target_http_proxies.transports.TargetHttpProxiesTransport.__init__') as Transport: + Transport.return_value = None + transport = transports.TargetHttpProxiesTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Every method on the transport should just blindly + # raise NotImplementedError. + methods = ( + 'aggregated_list', + 'delete', + 'get', + 'insert', + 'list', + 'patch', + 'set_url_map', + ) + for method in methods: + with pytest.raises(NotImplementedError): + getattr(transport, method)(request=object()) + + with pytest.raises(NotImplementedError): + transport.close() + + # Catch all for all remaining methods and properties + remainder = [ + 'kind', + ] + for r in remainder: + with pytest.raises(NotImplementedError): + getattr(transport, r)() + + +def test_target_http_proxies_base_transport_with_credentials_file(): + # Instantiate the base transport with a credentials file + with mock.patch.object(google.auth, 'load_credentials_from_file', autospec=True) as load_creds, mock.patch('google.cloud.compute_v1.services.target_http_proxies.transports.TargetHttpProxiesTransport._prep_wrapped_messages') as Transport: + Transport.return_value = None + load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.TargetHttpProxiesTransport( + credentials_file="credentials.json", + quota_project_id="octopus", + ) + load_creds.assert_called_once_with("credentials.json", + scopes=None, + default_scopes=( + 'https://www.googleapis.com/auth/compute', + 'https://www.googleapis.com/auth/cloud-platform', +), + quota_project_id="octopus", + ) + + +def test_target_http_proxies_base_transport_with_adc(): + # Test the default credentials are used if credentials and credentials_file are None. + with mock.patch.object(google.auth, 'default', autospec=True) as adc, mock.patch('google.cloud.compute_v1.services.target_http_proxies.transports.TargetHttpProxiesTransport._prep_wrapped_messages') as Transport: + Transport.return_value = None + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.TargetHttpProxiesTransport() + adc.assert_called_once() + + +def test_target_http_proxies_auth_adc(): + # If no credentials are provided, we should use ADC credentials. + with mock.patch.object(google.auth, 'default', autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + TargetHttpProxiesClient() + adc.assert_called_once_with( + scopes=None, + default_scopes=( + 'https://www.googleapis.com/auth/compute', + 'https://www.googleapis.com/auth/cloud-platform', +), + quota_project_id=None, + ) + + +def test_target_http_proxies_http_transport_client_cert_source_for_mtls(): + cred = ga_credentials.AnonymousCredentials() + with mock.patch("google.auth.transport.requests.AuthorizedSession.configure_mtls_channel") as mock_configure_mtls_channel: + transports.TargetHttpProxiesRestTransport ( + credentials=cred, + client_cert_source_for_mtls=client_cert_source_callback + ) + mock_configure_mtls_channel.assert_called_once_with(client_cert_source_callback) + + +@pytest.mark.parametrize("transport_name", [ + "rest", +]) +def test_target_http_proxies_host_no_port(transport_name): + client = TargetHttpProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions(api_endpoint='compute.googleapis.com'), + transport=transport_name, + ) + assert client.transport._host == ( + 'compute.googleapis.com:443' + if transport_name in ['grpc', 'grpc_asyncio'] + else 'https://compute.googleapis.com' + ) + +@pytest.mark.parametrize("transport_name", [ + "rest", +]) +def test_target_http_proxies_host_with_port(transport_name): + client = TargetHttpProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions(api_endpoint='compute.googleapis.com:8000'), + transport=transport_name, + ) + assert client.transport._host == ( + 'compute.googleapis.com:8000' + if transport_name in ['grpc', 'grpc_asyncio'] + else 'https://compute.googleapis.com:8000' + ) + +@pytest.mark.parametrize("transport_name", [ + "rest", +]) +def test_target_http_proxies_client_transport_session_collision(transport_name): + creds1 = ga_credentials.AnonymousCredentials() + creds2 = ga_credentials.AnonymousCredentials() + client1 = TargetHttpProxiesClient( + credentials=creds1, + transport=transport_name, + ) + client2 = TargetHttpProxiesClient( + credentials=creds2, + transport=transport_name, + ) + session1 = client1.transport.aggregated_list._session + session2 = client2.transport.aggregated_list._session + assert session1 != session2 + session1 = client1.transport.delete._session + session2 = client2.transport.delete._session + assert session1 != session2 + session1 = client1.transport.get._session + session2 = client2.transport.get._session + assert session1 != session2 + session1 = client1.transport.insert._session + session2 = client2.transport.insert._session + assert session1 != session2 + session1 = client1.transport.list._session + session2 = client2.transport.list._session + assert session1 != session2 + session1 = client1.transport.patch._session + session2 = client2.transport.patch._session + assert session1 != session2 + session1 = client1.transport.set_url_map._session + session2 = client2.transport.set_url_map._session + assert session1 != session2 + +def test_common_billing_account_path(): + billing_account = "squid" + expected = "billingAccounts/{billing_account}".format(billing_account=billing_account, ) + actual = TargetHttpProxiesClient.common_billing_account_path(billing_account) + assert expected == actual + + +def test_parse_common_billing_account_path(): + expected = { + "billing_account": "clam", + } + path = TargetHttpProxiesClient.common_billing_account_path(**expected) + + # Check that the path construction is reversible. + actual = TargetHttpProxiesClient.parse_common_billing_account_path(path) + assert expected == actual + +def test_common_folder_path(): + folder = "whelk" + expected = "folders/{folder}".format(folder=folder, ) + actual = TargetHttpProxiesClient.common_folder_path(folder) + assert expected == actual + + +def test_parse_common_folder_path(): + expected = { + "folder": "octopus", + } + path = TargetHttpProxiesClient.common_folder_path(**expected) + + # Check that the path construction is reversible. + actual = TargetHttpProxiesClient.parse_common_folder_path(path) + assert expected == actual + +def test_common_organization_path(): + organization = "oyster" + expected = "organizations/{organization}".format(organization=organization, ) + actual = TargetHttpProxiesClient.common_organization_path(organization) + assert expected == actual + + +def test_parse_common_organization_path(): + expected = { + "organization": "nudibranch", + } + path = TargetHttpProxiesClient.common_organization_path(**expected) + + # Check that the path construction is reversible. + actual = TargetHttpProxiesClient.parse_common_organization_path(path) + assert expected == actual + +def test_common_project_path(): + project = "cuttlefish" + expected = "projects/{project}".format(project=project, ) + actual = TargetHttpProxiesClient.common_project_path(project) + assert expected == actual + + +def test_parse_common_project_path(): + expected = { + "project": "mussel", + } + path = TargetHttpProxiesClient.common_project_path(**expected) + + # Check that the path construction is reversible. + actual = TargetHttpProxiesClient.parse_common_project_path(path) + assert expected == actual + +def test_common_location_path(): + project = "winkle" + location = "nautilus" + expected = "projects/{project}/locations/{location}".format(project=project, location=location, ) + actual = TargetHttpProxiesClient.common_location_path(project, location) + assert expected == actual + + +def test_parse_common_location_path(): + expected = { + "project": "scallop", + "location": "abalone", + } + path = TargetHttpProxiesClient.common_location_path(**expected) + + # Check that the path construction is reversible. + actual = TargetHttpProxiesClient.parse_common_location_path(path) + assert expected == actual + + +def test_client_with_default_client_info(): + client_info = gapic_v1.client_info.ClientInfo() + + with mock.patch.object(transports.TargetHttpProxiesTransport, '_prep_wrapped_messages') as prep: + client = TargetHttpProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + with mock.patch.object(transports.TargetHttpProxiesTransport, '_prep_wrapped_messages') as prep: + transport_class = TargetHttpProxiesClient.get_transport_class() + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + +def test_transport_close(): + transports = { + "rest": "_session", + } + + for transport, close_name in transports.items(): + client = TargetHttpProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport + ) + with mock.patch.object(type(getattr(client.transport, close_name)), "close") as close: + with client: + close.assert_not_called() + close.assert_called_once() + +def test_client_ctx(): + transports = [ + 'rest', + ] + for transport in transports: + client = TargetHttpProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport + ) + # Test client calls underlying transport. + with mock.patch.object(type(client.transport), "close") as close: + close.assert_not_called() + with client: + pass + close.assert_called() + +@pytest.mark.parametrize("client_class,transport_class", [ + (TargetHttpProxiesClient, transports.TargetHttpProxiesRestTransport), +]) +def test_api_key_credentials(client_class, transport_class): + with mock.patch.object( + google.auth._default, "get_api_key_credentials", create=True + ) as get_api_key_credentials: + mock_cred = mock.Mock() + get_api_key_credentials.return_value = mock_cred + options = client_options.ClientOptions() + options.api_key = "api_key" + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=mock_cred, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) diff --git a/owl-bot-staging/v1/tests/unit/gapic/compute_v1/test_target_https_proxies.py b/owl-bot-staging/v1/tests/unit/gapic/compute_v1/test_target_https_proxies.py new file mode 100644 index 000000000..b74f94115 --- /dev/null +++ b/owl-bot-staging/v1/tests/unit/gapic/compute_v1/test_target_https_proxies.py @@ -0,0 +1,6138 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import os +# try/except added for compatibility with python < 3.8 +try: + from unittest import mock + from unittest.mock import AsyncMock # pragma: NO COVER +except ImportError: # pragma: NO COVER + import mock + +import grpc +from grpc.experimental import aio +from collections.abc import Iterable +from google.protobuf import json_format +import json +import math +import pytest +from proto.marshal.rules.dates import DurationRule, TimestampRule +from proto.marshal.rules import wrappers +from requests import Response +from requests import Request, PreparedRequest +from requests.sessions import Session +from google.protobuf import json_format + +from google.api_core import client_options +from google.api_core import exceptions as core_exceptions +from google.api_core import extended_operation # type: ignore +from google.api_core import future +from google.api_core import gapic_v1 +from google.api_core import grpc_helpers +from google.api_core import grpc_helpers_async +from google.api_core import path_template +from google.auth import credentials as ga_credentials +from google.auth.exceptions import MutualTLSChannelError +from google.cloud.compute_v1.services.target_https_proxies import TargetHttpsProxiesClient +from google.cloud.compute_v1.services.target_https_proxies import pagers +from google.cloud.compute_v1.services.target_https_proxies import transports +from google.cloud.compute_v1.types import compute +from google.oauth2 import service_account +import google.auth + + +def client_cert_source_callback(): + return b"cert bytes", b"key bytes" + + +# If default endpoint is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint(client): + return "foo.googleapis.com" if ("localhost" in client.DEFAULT_ENDPOINT) else client.DEFAULT_ENDPOINT + + +def test__get_default_mtls_endpoint(): + api_endpoint = "example.googleapis.com" + api_mtls_endpoint = "example.mtls.googleapis.com" + sandbox_endpoint = "example.sandbox.googleapis.com" + sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com" + non_googleapi = "api.example.com" + + assert TargetHttpsProxiesClient._get_default_mtls_endpoint(None) is None + assert TargetHttpsProxiesClient._get_default_mtls_endpoint(api_endpoint) == api_mtls_endpoint + assert TargetHttpsProxiesClient._get_default_mtls_endpoint(api_mtls_endpoint) == api_mtls_endpoint + assert TargetHttpsProxiesClient._get_default_mtls_endpoint(sandbox_endpoint) == sandbox_mtls_endpoint + assert TargetHttpsProxiesClient._get_default_mtls_endpoint(sandbox_mtls_endpoint) == sandbox_mtls_endpoint + assert TargetHttpsProxiesClient._get_default_mtls_endpoint(non_googleapi) == non_googleapi + + +@pytest.mark.parametrize("client_class,transport_name", [ + (TargetHttpsProxiesClient, "rest"), +]) +def test_target_https_proxies_client_from_service_account_info(client_class, transport_name): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object(service_account.Credentials, 'from_service_account_info') as factory: + factory.return_value = creds + info = {"valid": True} + client = client_class.from_service_account_info(info, transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + 'compute.googleapis.com:443' + if transport_name in ['grpc', 'grpc_asyncio'] + else + 'https://compute.googleapis.com' + ) + + +@pytest.mark.parametrize("transport_class,transport_name", [ + (transports.TargetHttpsProxiesRestTransport, "rest"), +]) +def test_target_https_proxies_client_service_account_always_use_jwt(transport_class, transport_name): + with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=True) + use_jwt.assert_called_once_with(True) + + with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=False) + use_jwt.assert_not_called() + + +@pytest.mark.parametrize("client_class,transport_name", [ + (TargetHttpsProxiesClient, "rest"), +]) +def test_target_https_proxies_client_from_service_account_file(client_class, transport_name): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object(service_account.Credentials, 'from_service_account_file') as factory: + factory.return_value = creds + client = client_class.from_service_account_file("dummy/file/path.json", transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + client = client_class.from_service_account_json("dummy/file/path.json", transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + 'compute.googleapis.com:443' + if transport_name in ['grpc', 'grpc_asyncio'] + else + 'https://compute.googleapis.com' + ) + + +def test_target_https_proxies_client_get_transport_class(): + transport = TargetHttpsProxiesClient.get_transport_class() + available_transports = [ + transports.TargetHttpsProxiesRestTransport, + ] + assert transport in available_transports + + transport = TargetHttpsProxiesClient.get_transport_class("rest") + assert transport == transports.TargetHttpsProxiesRestTransport + + +@pytest.mark.parametrize("client_class,transport_class,transport_name", [ + (TargetHttpsProxiesClient, transports.TargetHttpsProxiesRestTransport, "rest"), +]) +@mock.patch.object(TargetHttpsProxiesClient, "DEFAULT_ENDPOINT", modify_default_endpoint(TargetHttpsProxiesClient)) +def test_target_https_proxies_client_client_options(client_class, transport_class, transport_name): + # Check that if channel is provided we won't create a new one. + with mock.patch.object(TargetHttpsProxiesClient, 'get_transport_class') as gtc: + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ) + client = client_class(transport=transport) + gtc.assert_not_called() + + # Check that if channel is provided via str we will create a new one. + with mock.patch.object(TargetHttpsProxiesClient, 'get_transport_class') as gtc: + client = client_class(transport=transport_name) + gtc.assert_called() + + # Check the case api_endpoint is provided. + options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(transport=transport_name, client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_MTLS_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError): + client = client_class(transport=transport_name) + + # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): + with pytest.raises(ValueError): + client = client_class(transport=transport_name) + + # Check the case quota_project_id is provided + options = client_options.ClientOptions(quota_project_id="octopus") + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id="octopus", + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + # Check the case api_endpoint is provided + options = client_options.ClientOptions(api_audience="https://language.googleapis.com") + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience="https://language.googleapis.com" + ) + +@pytest.mark.parametrize("client_class,transport_class,transport_name,use_client_cert_env", [ + (TargetHttpsProxiesClient, transports.TargetHttpsProxiesRestTransport, "rest", "true"), + (TargetHttpsProxiesClient, transports.TargetHttpsProxiesRestTransport, "rest", "false"), +]) +@mock.patch.object(TargetHttpsProxiesClient, "DEFAULT_ENDPOINT", modify_default_endpoint(TargetHttpsProxiesClient)) +@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) +def test_target_https_proxies_client_mtls_env_auto(client_class, transport_class, transport_name, use_client_cert_env): + # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default + # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists. + + # Check the case client_cert_source is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): + options = client_options.ClientOptions(client_cert_source=client_cert_source_callback) + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + + if use_client_cert_env == "false": + expected_client_cert_source = None + expected_host = client.DEFAULT_ENDPOINT + else: + expected_client_cert_source = client_cert_source_callback + expected_host = client.DEFAULT_MTLS_ENDPOINT + + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case ADC client cert is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): + with mock.patch.object(transport_class, '__init__') as patched: + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): + with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=client_cert_source_callback): + if use_client_cert_env == "false": + expected_host = client.DEFAULT_ENDPOINT + expected_client_cert_source = None + else: + expected_host = client.DEFAULT_MTLS_ENDPOINT + expected_client_cert_source = client_cert_source_callback + + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case client_cert_source and ADC client cert are not provided. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): + with mock.patch.object(transport_class, '__init__') as patched: + with mock.patch("google.auth.transport.mtls.has_default_client_cert_source", return_value=False): + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize("client_class", [ + TargetHttpsProxiesClient +]) +@mock.patch.object(TargetHttpsProxiesClient, "DEFAULT_ENDPOINT", modify_default_endpoint(TargetHttpsProxiesClient)) +def test_target_https_proxies_client_get_mtls_endpoint_and_cert_source(client_class): + mock_client_cert_source = mock.Mock() + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "true". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + mock_api_endpoint = "foo" + options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(options) + assert api_endpoint == mock_api_endpoint + assert cert_source == mock_client_cert_source + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "false". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + mock_client_cert_source = mock.Mock() + mock_api_endpoint = "foo" + options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(options) + assert api_endpoint == mock_api_endpoint + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert doesn't exist. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=False): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert exists. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): + with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=mock_client_cert_source): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source == mock_client_cert_source + + +@pytest.mark.parametrize("client_class,transport_class,transport_name", [ + (TargetHttpsProxiesClient, transports.TargetHttpsProxiesRestTransport, "rest"), +]) +def test_target_https_proxies_client_client_options_scopes(client_class, transport_class, transport_name): + # Check the case scopes are provided. + options = client_options.ClientOptions( + scopes=["1", "2"], + ) + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=["1", "2"], + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + +@pytest.mark.parametrize("client_class,transport_class,transport_name,grpc_helpers", [ + (TargetHttpsProxiesClient, transports.TargetHttpsProxiesRestTransport, "rest", None), +]) +def test_target_https_proxies_client_client_options_credentials_file(client_class, transport_class, transport_name, grpc_helpers): + # Check the case credentials file is provided. + options = client_options.ClientOptions( + credentials_file="credentials.json" + ) + + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize("request_type", [ + compute.AggregatedListTargetHttpsProxiesRequest, + dict, +]) +def test_aggregated_list_rest(request_type): + client = TargetHttpsProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.TargetHttpsProxyAggregatedList( + id='id_value', + kind='kind_value', + next_page_token='next_page_token_value', + self_link='self_link_value', + unreachables=['unreachables_value'], + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.TargetHttpsProxyAggregatedList.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.aggregated_list(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.AggregatedListPager) + assert response.id == 'id_value' + assert response.kind == 'kind_value' + assert response.next_page_token == 'next_page_token_value' + assert response.self_link == 'self_link_value' + assert response.unreachables == ['unreachables_value'] + + +def test_aggregated_list_rest_required_fields(request_type=compute.AggregatedListTargetHttpsProxiesRequest): + transport_class = transports.TargetHttpsProxiesRestTransport + + request_init = {} + request_init["project"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).aggregated_list._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).aggregated_list._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("filter", "include_all_scopes", "max_results", "order_by", "page_token", "return_partial_success", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + + client = TargetHttpsProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.TargetHttpsProxyAggregatedList() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "get", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.TargetHttpsProxyAggregatedList.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.aggregated_list(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_aggregated_list_rest_unset_required_fields(): + transport = transports.TargetHttpsProxiesRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.aggregated_list._get_unset_required_fields({}) + assert set(unset_fields) == (set(("filter", "includeAllScopes", "maxResults", "orderBy", "pageToken", "returnPartialSuccess", )) & set(("project", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_aggregated_list_rest_interceptors(null_interceptor): + transport = transports.TargetHttpsProxiesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.TargetHttpsProxiesRestInterceptor(), + ) + client = TargetHttpsProxiesClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.TargetHttpsProxiesRestInterceptor, "post_aggregated_list") as post, \ + mock.patch.object(transports.TargetHttpsProxiesRestInterceptor, "pre_aggregated_list") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.AggregatedListTargetHttpsProxiesRequest.pb(compute.AggregatedListTargetHttpsProxiesRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.TargetHttpsProxyAggregatedList.to_json(compute.TargetHttpsProxyAggregatedList()) + + request = compute.AggregatedListTargetHttpsProxiesRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.TargetHttpsProxyAggregatedList() + + client.aggregated_list(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_aggregated_list_rest_bad_request(transport: str = 'rest', request_type=compute.AggregatedListTargetHttpsProxiesRequest): + client = TargetHttpsProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.aggregated_list(request) + + +def test_aggregated_list_rest_flattened(): + client = TargetHttpsProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.TargetHttpsProxyAggregatedList() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.TargetHttpsProxyAggregatedList.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.aggregated_list(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/aggregated/targetHttpsProxies" % client.transport._host, args[1]) + + +def test_aggregated_list_rest_flattened_error(transport: str = 'rest'): + client = TargetHttpsProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.aggregated_list( + compute.AggregatedListTargetHttpsProxiesRequest(), + project='project_value', + ) + + +def test_aggregated_list_rest_pager(transport: str = 'rest'): + client = TargetHttpsProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + #with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + compute.TargetHttpsProxyAggregatedList( + items={ + 'a':compute.TargetHttpsProxiesScopedList(), + 'b':compute.TargetHttpsProxiesScopedList(), + 'c':compute.TargetHttpsProxiesScopedList(), + }, + next_page_token='abc', + ), + compute.TargetHttpsProxyAggregatedList( + items={}, + next_page_token='def', + ), + compute.TargetHttpsProxyAggregatedList( + items={ + 'g':compute.TargetHttpsProxiesScopedList(), + }, + next_page_token='ghi', + ), + compute.TargetHttpsProxyAggregatedList( + items={ + 'h':compute.TargetHttpsProxiesScopedList(), + 'i':compute.TargetHttpsProxiesScopedList(), + }, + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple(compute.TargetHttpsProxyAggregatedList.to_json(x) for x in response) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode('UTF-8') + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {'project': 'sample1'} + + pager = client.aggregated_list(request=sample_request) + + assert isinstance(pager.get('a'), compute.TargetHttpsProxiesScopedList) + assert pager.get('h') is None + + results = list(pager) + assert len(results) == 6 + assert all( + isinstance(i, tuple) + for i in results) + for result in results: + assert isinstance(result, tuple) + assert tuple(type(t) for t in result) == (str, compute.TargetHttpsProxiesScopedList) + + assert pager.get('a') is None + assert isinstance(pager.get('h'), compute.TargetHttpsProxiesScopedList) + + pages = list(client.aggregated_list(request=sample_request).pages) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize("request_type", [ + compute.DeleteTargetHttpsProxyRequest, + dict, +]) +def test_delete_rest(request_type): + client = TargetHttpsProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'target_https_proxy': 'sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.delete(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, extended_operation.ExtendedOperation) + assert response.client_operation_id == 'client_operation_id_value' + assert response.creation_timestamp == 'creation_timestamp_value' + assert response.description == 'description_value' + assert response.end_time == 'end_time_value' + assert response.http_error_message == 'http_error_message_value' + assert response.http_error_status_code == 2374 + assert response.id == 205 + assert response.insert_time == 'insert_time_value' + assert response.kind == 'kind_value' + assert response.name == 'name_value' + assert response.operation_group_id == 'operation_group_id_value' + assert response.operation_type == 'operation_type_value' + assert response.progress == 885 + assert response.region == 'region_value' + assert response.self_link == 'self_link_value' + assert response.start_time == 'start_time_value' + assert response.status == compute.Operation.Status.DONE + assert response.status_message == 'status_message_value' + assert response.target_id == 947 + assert response.target_link == 'target_link_value' + assert response.user == 'user_value' + assert response.zone == 'zone_value' + + +def test_delete_rest_required_fields(request_type=compute.DeleteTargetHttpsProxyRequest): + transport_class = transports.TargetHttpsProxiesRestTransport + + request_init = {} + request_init["project"] = "" + request_init["target_https_proxy"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + jsonified_request["targetHttpsProxy"] = 'target_https_proxy_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "targetHttpsProxy" in jsonified_request + assert jsonified_request["targetHttpsProxy"] == 'target_https_proxy_value' + + client = TargetHttpsProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "delete", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.delete(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_delete_rest_unset_required_fields(): + transport = transports.TargetHttpsProxiesRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.delete._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("project", "targetHttpsProxy", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_rest_interceptors(null_interceptor): + transport = transports.TargetHttpsProxiesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.TargetHttpsProxiesRestInterceptor(), + ) + client = TargetHttpsProxiesClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.TargetHttpsProxiesRestInterceptor, "post_delete") as post, \ + mock.patch.object(transports.TargetHttpsProxiesRestInterceptor, "pre_delete") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.DeleteTargetHttpsProxyRequest.pb(compute.DeleteTargetHttpsProxyRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.DeleteTargetHttpsProxyRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.delete(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_delete_rest_bad_request(transport: str = 'rest', request_type=compute.DeleteTargetHttpsProxyRequest): + client = TargetHttpsProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'target_https_proxy': 'sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.delete(request) + + +def test_delete_rest_flattened(): + client = TargetHttpsProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'target_https_proxy': 'sample2'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + target_https_proxy='target_https_proxy_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.delete(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/global/targetHttpsProxies/{target_https_proxy}" % client.transport._host, args[1]) + + +def test_delete_rest_flattened_error(transport: str = 'rest'): + client = TargetHttpsProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete( + compute.DeleteTargetHttpsProxyRequest(), + project='project_value', + target_https_proxy='target_https_proxy_value', + ) + + +def test_delete_rest_error(): + client = TargetHttpsProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.DeleteTargetHttpsProxyRequest, + dict, +]) +def test_delete_unary_rest(request_type): + client = TargetHttpsProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'target_https_proxy': 'sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.delete_unary(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.Operation) + + +def test_delete_unary_rest_required_fields(request_type=compute.DeleteTargetHttpsProxyRequest): + transport_class = transports.TargetHttpsProxiesRestTransport + + request_init = {} + request_init["project"] = "" + request_init["target_https_proxy"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + jsonified_request["targetHttpsProxy"] = 'target_https_proxy_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "targetHttpsProxy" in jsonified_request + assert jsonified_request["targetHttpsProxy"] == 'target_https_proxy_value' + + client = TargetHttpsProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "delete", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.delete_unary(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_delete_unary_rest_unset_required_fields(): + transport = transports.TargetHttpsProxiesRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.delete._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("project", "targetHttpsProxy", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_unary_rest_interceptors(null_interceptor): + transport = transports.TargetHttpsProxiesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.TargetHttpsProxiesRestInterceptor(), + ) + client = TargetHttpsProxiesClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.TargetHttpsProxiesRestInterceptor, "post_delete") as post, \ + mock.patch.object(transports.TargetHttpsProxiesRestInterceptor, "pre_delete") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.DeleteTargetHttpsProxyRequest.pb(compute.DeleteTargetHttpsProxyRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.DeleteTargetHttpsProxyRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.delete_unary(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_delete_unary_rest_bad_request(transport: str = 'rest', request_type=compute.DeleteTargetHttpsProxyRequest): + client = TargetHttpsProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'target_https_proxy': 'sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.delete_unary(request) + + +def test_delete_unary_rest_flattened(): + client = TargetHttpsProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'target_https_proxy': 'sample2'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + target_https_proxy='target_https_proxy_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.delete_unary(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/global/targetHttpsProxies/{target_https_proxy}" % client.transport._host, args[1]) + + +def test_delete_unary_rest_flattened_error(transport: str = 'rest'): + client = TargetHttpsProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_unary( + compute.DeleteTargetHttpsProxyRequest(), + project='project_value', + target_https_proxy='target_https_proxy_value', + ) + + +def test_delete_unary_rest_error(): + client = TargetHttpsProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.GetTargetHttpsProxyRequest, + dict, +]) +def test_get_rest(request_type): + client = TargetHttpsProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'target_https_proxy': 'sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.TargetHttpsProxy( + authorization_policy='authorization_policy_value', + certificate_map='certificate_map_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + fingerprint='fingerprint_value', + http_keep_alive_timeout_sec=2868, + id=205, + kind='kind_value', + name='name_value', + proxy_bind=True, + quic_override='quic_override_value', + region='region_value', + self_link='self_link_value', + server_tls_policy='server_tls_policy_value', + ssl_certificates=['ssl_certificates_value'], + ssl_policy='ssl_policy_value', + url_map='url_map_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.TargetHttpsProxy.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.get(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.TargetHttpsProxy) + assert response.authorization_policy == 'authorization_policy_value' + assert response.certificate_map == 'certificate_map_value' + assert response.creation_timestamp == 'creation_timestamp_value' + assert response.description == 'description_value' + assert response.fingerprint == 'fingerprint_value' + assert response.http_keep_alive_timeout_sec == 2868 + assert response.id == 205 + assert response.kind == 'kind_value' + assert response.name == 'name_value' + assert response.proxy_bind is True + assert response.quic_override == 'quic_override_value' + assert response.region == 'region_value' + assert response.self_link == 'self_link_value' + assert response.server_tls_policy == 'server_tls_policy_value' + assert response.ssl_certificates == ['ssl_certificates_value'] + assert response.ssl_policy == 'ssl_policy_value' + assert response.url_map == 'url_map_value' + + +def test_get_rest_required_fields(request_type=compute.GetTargetHttpsProxyRequest): + transport_class = transports.TargetHttpsProxiesRestTransport + + request_init = {} + request_init["project"] = "" + request_init["target_https_proxy"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + jsonified_request["targetHttpsProxy"] = 'target_https_proxy_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "targetHttpsProxy" in jsonified_request + assert jsonified_request["targetHttpsProxy"] == 'target_https_proxy_value' + + client = TargetHttpsProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.TargetHttpsProxy() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "get", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.TargetHttpsProxy.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.get(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_get_rest_unset_required_fields(): + transport = transports.TargetHttpsProxiesRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.get._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("project", "targetHttpsProxy", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_rest_interceptors(null_interceptor): + transport = transports.TargetHttpsProxiesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.TargetHttpsProxiesRestInterceptor(), + ) + client = TargetHttpsProxiesClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.TargetHttpsProxiesRestInterceptor, "post_get") as post, \ + mock.patch.object(transports.TargetHttpsProxiesRestInterceptor, "pre_get") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.GetTargetHttpsProxyRequest.pb(compute.GetTargetHttpsProxyRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.TargetHttpsProxy.to_json(compute.TargetHttpsProxy()) + + request = compute.GetTargetHttpsProxyRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.TargetHttpsProxy() + + client.get(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_get_rest_bad_request(transport: str = 'rest', request_type=compute.GetTargetHttpsProxyRequest): + client = TargetHttpsProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'target_https_proxy': 'sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get(request) + + +def test_get_rest_flattened(): + client = TargetHttpsProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.TargetHttpsProxy() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'target_https_proxy': 'sample2'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + target_https_proxy='target_https_proxy_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.TargetHttpsProxy.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.get(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/global/targetHttpsProxies/{target_https_proxy}" % client.transport._host, args[1]) + + +def test_get_rest_flattened_error(transport: str = 'rest'): + client = TargetHttpsProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get( + compute.GetTargetHttpsProxyRequest(), + project='project_value', + target_https_proxy='target_https_proxy_value', + ) + + +def test_get_rest_error(): + client = TargetHttpsProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.InsertTargetHttpsProxyRequest, + dict, +]) +def test_insert_rest(request_type): + client = TargetHttpsProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1'} + request_init["target_https_proxy_resource"] = {'authorization_policy': 'authorization_policy_value', 'certificate_map': 'certificate_map_value', 'creation_timestamp': 'creation_timestamp_value', 'description': 'description_value', 'fingerprint': 'fingerprint_value', 'http_keep_alive_timeout_sec': 2868, 'id': 205, 'kind': 'kind_value', 'name': 'name_value', 'proxy_bind': True, 'quic_override': 'quic_override_value', 'region': 'region_value', 'self_link': 'self_link_value', 'server_tls_policy': 'server_tls_policy_value', 'ssl_certificates': ['ssl_certificates_value1', 'ssl_certificates_value2'], 'ssl_policy': 'ssl_policy_value', 'url_map': 'url_map_value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.insert(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, extended_operation.ExtendedOperation) + assert response.client_operation_id == 'client_operation_id_value' + assert response.creation_timestamp == 'creation_timestamp_value' + assert response.description == 'description_value' + assert response.end_time == 'end_time_value' + assert response.http_error_message == 'http_error_message_value' + assert response.http_error_status_code == 2374 + assert response.id == 205 + assert response.insert_time == 'insert_time_value' + assert response.kind == 'kind_value' + assert response.name == 'name_value' + assert response.operation_group_id == 'operation_group_id_value' + assert response.operation_type == 'operation_type_value' + assert response.progress == 885 + assert response.region == 'region_value' + assert response.self_link == 'self_link_value' + assert response.start_time == 'start_time_value' + assert response.status == compute.Operation.Status.DONE + assert response.status_message == 'status_message_value' + assert response.target_id == 947 + assert response.target_link == 'target_link_value' + assert response.user == 'user_value' + assert response.zone == 'zone_value' + + +def test_insert_rest_required_fields(request_type=compute.InsertTargetHttpsProxyRequest): + transport_class = transports.TargetHttpsProxiesRestTransport + + request_init = {} + request_init["project"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).insert._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).insert._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + + client = TargetHttpsProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.insert(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_insert_rest_unset_required_fields(): + transport = transports.TargetHttpsProxiesRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.insert._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("project", "targetHttpsProxyResource", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_insert_rest_interceptors(null_interceptor): + transport = transports.TargetHttpsProxiesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.TargetHttpsProxiesRestInterceptor(), + ) + client = TargetHttpsProxiesClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.TargetHttpsProxiesRestInterceptor, "post_insert") as post, \ + mock.patch.object(transports.TargetHttpsProxiesRestInterceptor, "pre_insert") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.InsertTargetHttpsProxyRequest.pb(compute.InsertTargetHttpsProxyRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.InsertTargetHttpsProxyRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.insert(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_insert_rest_bad_request(transport: str = 'rest', request_type=compute.InsertTargetHttpsProxyRequest): + client = TargetHttpsProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1'} + request_init["target_https_proxy_resource"] = {'authorization_policy': 'authorization_policy_value', 'certificate_map': 'certificate_map_value', 'creation_timestamp': 'creation_timestamp_value', 'description': 'description_value', 'fingerprint': 'fingerprint_value', 'http_keep_alive_timeout_sec': 2868, 'id': 205, 'kind': 'kind_value', 'name': 'name_value', 'proxy_bind': True, 'quic_override': 'quic_override_value', 'region': 'region_value', 'self_link': 'self_link_value', 'server_tls_policy': 'server_tls_policy_value', 'ssl_certificates': ['ssl_certificates_value1', 'ssl_certificates_value2'], 'ssl_policy': 'ssl_policy_value', 'url_map': 'url_map_value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.insert(request) + + +def test_insert_rest_flattened(): + client = TargetHttpsProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + target_https_proxy_resource=compute.TargetHttpsProxy(authorization_policy='authorization_policy_value'), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.insert(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/global/targetHttpsProxies" % client.transport._host, args[1]) + + +def test_insert_rest_flattened_error(transport: str = 'rest'): + client = TargetHttpsProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.insert( + compute.InsertTargetHttpsProxyRequest(), + project='project_value', + target_https_proxy_resource=compute.TargetHttpsProxy(authorization_policy='authorization_policy_value'), + ) + + +def test_insert_rest_error(): + client = TargetHttpsProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.InsertTargetHttpsProxyRequest, + dict, +]) +def test_insert_unary_rest(request_type): + client = TargetHttpsProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1'} + request_init["target_https_proxy_resource"] = {'authorization_policy': 'authorization_policy_value', 'certificate_map': 'certificate_map_value', 'creation_timestamp': 'creation_timestamp_value', 'description': 'description_value', 'fingerprint': 'fingerprint_value', 'http_keep_alive_timeout_sec': 2868, 'id': 205, 'kind': 'kind_value', 'name': 'name_value', 'proxy_bind': True, 'quic_override': 'quic_override_value', 'region': 'region_value', 'self_link': 'self_link_value', 'server_tls_policy': 'server_tls_policy_value', 'ssl_certificates': ['ssl_certificates_value1', 'ssl_certificates_value2'], 'ssl_policy': 'ssl_policy_value', 'url_map': 'url_map_value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.insert_unary(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.Operation) + + +def test_insert_unary_rest_required_fields(request_type=compute.InsertTargetHttpsProxyRequest): + transport_class = transports.TargetHttpsProxiesRestTransport + + request_init = {} + request_init["project"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).insert._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).insert._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + + client = TargetHttpsProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.insert_unary(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_insert_unary_rest_unset_required_fields(): + transport = transports.TargetHttpsProxiesRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.insert._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("project", "targetHttpsProxyResource", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_insert_unary_rest_interceptors(null_interceptor): + transport = transports.TargetHttpsProxiesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.TargetHttpsProxiesRestInterceptor(), + ) + client = TargetHttpsProxiesClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.TargetHttpsProxiesRestInterceptor, "post_insert") as post, \ + mock.patch.object(transports.TargetHttpsProxiesRestInterceptor, "pre_insert") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.InsertTargetHttpsProxyRequest.pb(compute.InsertTargetHttpsProxyRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.InsertTargetHttpsProxyRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.insert_unary(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_insert_unary_rest_bad_request(transport: str = 'rest', request_type=compute.InsertTargetHttpsProxyRequest): + client = TargetHttpsProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1'} + request_init["target_https_proxy_resource"] = {'authorization_policy': 'authorization_policy_value', 'certificate_map': 'certificate_map_value', 'creation_timestamp': 'creation_timestamp_value', 'description': 'description_value', 'fingerprint': 'fingerprint_value', 'http_keep_alive_timeout_sec': 2868, 'id': 205, 'kind': 'kind_value', 'name': 'name_value', 'proxy_bind': True, 'quic_override': 'quic_override_value', 'region': 'region_value', 'self_link': 'self_link_value', 'server_tls_policy': 'server_tls_policy_value', 'ssl_certificates': ['ssl_certificates_value1', 'ssl_certificates_value2'], 'ssl_policy': 'ssl_policy_value', 'url_map': 'url_map_value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.insert_unary(request) + + +def test_insert_unary_rest_flattened(): + client = TargetHttpsProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + target_https_proxy_resource=compute.TargetHttpsProxy(authorization_policy='authorization_policy_value'), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.insert_unary(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/global/targetHttpsProxies" % client.transport._host, args[1]) + + +def test_insert_unary_rest_flattened_error(transport: str = 'rest'): + client = TargetHttpsProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.insert_unary( + compute.InsertTargetHttpsProxyRequest(), + project='project_value', + target_https_proxy_resource=compute.TargetHttpsProxy(authorization_policy='authorization_policy_value'), + ) + + +def test_insert_unary_rest_error(): + client = TargetHttpsProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.ListTargetHttpsProxiesRequest, + dict, +]) +def test_list_rest(request_type): + client = TargetHttpsProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.TargetHttpsProxyList( + id='id_value', + kind='kind_value', + next_page_token='next_page_token_value', + self_link='self_link_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.TargetHttpsProxyList.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.list(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListPager) + assert response.id == 'id_value' + assert response.kind == 'kind_value' + assert response.next_page_token == 'next_page_token_value' + assert response.self_link == 'self_link_value' + + +def test_list_rest_required_fields(request_type=compute.ListTargetHttpsProxiesRequest): + transport_class = transports.TargetHttpsProxiesRestTransport + + request_init = {} + request_init["project"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("filter", "max_results", "order_by", "page_token", "return_partial_success", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + + client = TargetHttpsProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.TargetHttpsProxyList() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "get", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.TargetHttpsProxyList.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.list(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_list_rest_unset_required_fields(): + transport = transports.TargetHttpsProxiesRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.list._get_unset_required_fields({}) + assert set(unset_fields) == (set(("filter", "maxResults", "orderBy", "pageToken", "returnPartialSuccess", )) & set(("project", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_rest_interceptors(null_interceptor): + transport = transports.TargetHttpsProxiesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.TargetHttpsProxiesRestInterceptor(), + ) + client = TargetHttpsProxiesClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.TargetHttpsProxiesRestInterceptor, "post_list") as post, \ + mock.patch.object(transports.TargetHttpsProxiesRestInterceptor, "pre_list") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.ListTargetHttpsProxiesRequest.pb(compute.ListTargetHttpsProxiesRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.TargetHttpsProxyList.to_json(compute.TargetHttpsProxyList()) + + request = compute.ListTargetHttpsProxiesRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.TargetHttpsProxyList() + + client.list(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_list_rest_bad_request(transport: str = 'rest', request_type=compute.ListTargetHttpsProxiesRequest): + client = TargetHttpsProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list(request) + + +def test_list_rest_flattened(): + client = TargetHttpsProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.TargetHttpsProxyList() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.TargetHttpsProxyList.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.list(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/global/targetHttpsProxies" % client.transport._host, args[1]) + + +def test_list_rest_flattened_error(transport: str = 'rest'): + client = TargetHttpsProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list( + compute.ListTargetHttpsProxiesRequest(), + project='project_value', + ) + + +def test_list_rest_pager(transport: str = 'rest'): + client = TargetHttpsProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + #with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + compute.TargetHttpsProxyList( + items=[ + compute.TargetHttpsProxy(), + compute.TargetHttpsProxy(), + compute.TargetHttpsProxy(), + ], + next_page_token='abc', + ), + compute.TargetHttpsProxyList( + items=[], + next_page_token='def', + ), + compute.TargetHttpsProxyList( + items=[ + compute.TargetHttpsProxy(), + ], + next_page_token='ghi', + ), + compute.TargetHttpsProxyList( + items=[ + compute.TargetHttpsProxy(), + compute.TargetHttpsProxy(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple(compute.TargetHttpsProxyList.to_json(x) for x in response) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode('UTF-8') + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {'project': 'sample1'} + + pager = client.list(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, compute.TargetHttpsProxy) + for i in results) + + pages = list(client.list(request=sample_request).pages) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize("request_type", [ + compute.PatchTargetHttpsProxyRequest, + dict, +]) +def test_patch_rest(request_type): + client = TargetHttpsProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'target_https_proxy': 'sample2'} + request_init["target_https_proxy_resource"] = {'authorization_policy': 'authorization_policy_value', 'certificate_map': 'certificate_map_value', 'creation_timestamp': 'creation_timestamp_value', 'description': 'description_value', 'fingerprint': 'fingerprint_value', 'http_keep_alive_timeout_sec': 2868, 'id': 205, 'kind': 'kind_value', 'name': 'name_value', 'proxy_bind': True, 'quic_override': 'quic_override_value', 'region': 'region_value', 'self_link': 'self_link_value', 'server_tls_policy': 'server_tls_policy_value', 'ssl_certificates': ['ssl_certificates_value1', 'ssl_certificates_value2'], 'ssl_policy': 'ssl_policy_value', 'url_map': 'url_map_value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.patch(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, extended_operation.ExtendedOperation) + assert response.client_operation_id == 'client_operation_id_value' + assert response.creation_timestamp == 'creation_timestamp_value' + assert response.description == 'description_value' + assert response.end_time == 'end_time_value' + assert response.http_error_message == 'http_error_message_value' + assert response.http_error_status_code == 2374 + assert response.id == 205 + assert response.insert_time == 'insert_time_value' + assert response.kind == 'kind_value' + assert response.name == 'name_value' + assert response.operation_group_id == 'operation_group_id_value' + assert response.operation_type == 'operation_type_value' + assert response.progress == 885 + assert response.region == 'region_value' + assert response.self_link == 'self_link_value' + assert response.start_time == 'start_time_value' + assert response.status == compute.Operation.Status.DONE + assert response.status_message == 'status_message_value' + assert response.target_id == 947 + assert response.target_link == 'target_link_value' + assert response.user == 'user_value' + assert response.zone == 'zone_value' + + +def test_patch_rest_required_fields(request_type=compute.PatchTargetHttpsProxyRequest): + transport_class = transports.TargetHttpsProxiesRestTransport + + request_init = {} + request_init["project"] = "" + request_init["target_https_proxy"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).patch._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + jsonified_request["targetHttpsProxy"] = 'target_https_proxy_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).patch._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "targetHttpsProxy" in jsonified_request + assert jsonified_request["targetHttpsProxy"] == 'target_https_proxy_value' + + client = TargetHttpsProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "patch", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.patch(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_patch_rest_unset_required_fields(): + transport = transports.TargetHttpsProxiesRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.patch._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("project", "targetHttpsProxy", "targetHttpsProxyResource", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_patch_rest_interceptors(null_interceptor): + transport = transports.TargetHttpsProxiesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.TargetHttpsProxiesRestInterceptor(), + ) + client = TargetHttpsProxiesClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.TargetHttpsProxiesRestInterceptor, "post_patch") as post, \ + mock.patch.object(transports.TargetHttpsProxiesRestInterceptor, "pre_patch") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.PatchTargetHttpsProxyRequest.pb(compute.PatchTargetHttpsProxyRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.PatchTargetHttpsProxyRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.patch(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_patch_rest_bad_request(transport: str = 'rest', request_type=compute.PatchTargetHttpsProxyRequest): + client = TargetHttpsProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'target_https_proxy': 'sample2'} + request_init["target_https_proxy_resource"] = {'authorization_policy': 'authorization_policy_value', 'certificate_map': 'certificate_map_value', 'creation_timestamp': 'creation_timestamp_value', 'description': 'description_value', 'fingerprint': 'fingerprint_value', 'http_keep_alive_timeout_sec': 2868, 'id': 205, 'kind': 'kind_value', 'name': 'name_value', 'proxy_bind': True, 'quic_override': 'quic_override_value', 'region': 'region_value', 'self_link': 'self_link_value', 'server_tls_policy': 'server_tls_policy_value', 'ssl_certificates': ['ssl_certificates_value1', 'ssl_certificates_value2'], 'ssl_policy': 'ssl_policy_value', 'url_map': 'url_map_value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.patch(request) + + +def test_patch_rest_flattened(): + client = TargetHttpsProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'target_https_proxy': 'sample2'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + target_https_proxy='target_https_proxy_value', + target_https_proxy_resource=compute.TargetHttpsProxy(authorization_policy='authorization_policy_value'), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.patch(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/global/targetHttpsProxies/{target_https_proxy}" % client.transport._host, args[1]) + + +def test_patch_rest_flattened_error(transport: str = 'rest'): + client = TargetHttpsProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.patch( + compute.PatchTargetHttpsProxyRequest(), + project='project_value', + target_https_proxy='target_https_proxy_value', + target_https_proxy_resource=compute.TargetHttpsProxy(authorization_policy='authorization_policy_value'), + ) + + +def test_patch_rest_error(): + client = TargetHttpsProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.PatchTargetHttpsProxyRequest, + dict, +]) +def test_patch_unary_rest(request_type): + client = TargetHttpsProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'target_https_proxy': 'sample2'} + request_init["target_https_proxy_resource"] = {'authorization_policy': 'authorization_policy_value', 'certificate_map': 'certificate_map_value', 'creation_timestamp': 'creation_timestamp_value', 'description': 'description_value', 'fingerprint': 'fingerprint_value', 'http_keep_alive_timeout_sec': 2868, 'id': 205, 'kind': 'kind_value', 'name': 'name_value', 'proxy_bind': True, 'quic_override': 'quic_override_value', 'region': 'region_value', 'self_link': 'self_link_value', 'server_tls_policy': 'server_tls_policy_value', 'ssl_certificates': ['ssl_certificates_value1', 'ssl_certificates_value2'], 'ssl_policy': 'ssl_policy_value', 'url_map': 'url_map_value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.patch_unary(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.Operation) + + +def test_patch_unary_rest_required_fields(request_type=compute.PatchTargetHttpsProxyRequest): + transport_class = transports.TargetHttpsProxiesRestTransport + + request_init = {} + request_init["project"] = "" + request_init["target_https_proxy"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).patch._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + jsonified_request["targetHttpsProxy"] = 'target_https_proxy_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).patch._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "targetHttpsProxy" in jsonified_request + assert jsonified_request["targetHttpsProxy"] == 'target_https_proxy_value' + + client = TargetHttpsProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "patch", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.patch_unary(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_patch_unary_rest_unset_required_fields(): + transport = transports.TargetHttpsProxiesRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.patch._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("project", "targetHttpsProxy", "targetHttpsProxyResource", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_patch_unary_rest_interceptors(null_interceptor): + transport = transports.TargetHttpsProxiesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.TargetHttpsProxiesRestInterceptor(), + ) + client = TargetHttpsProxiesClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.TargetHttpsProxiesRestInterceptor, "post_patch") as post, \ + mock.patch.object(transports.TargetHttpsProxiesRestInterceptor, "pre_patch") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.PatchTargetHttpsProxyRequest.pb(compute.PatchTargetHttpsProxyRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.PatchTargetHttpsProxyRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.patch_unary(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_patch_unary_rest_bad_request(transport: str = 'rest', request_type=compute.PatchTargetHttpsProxyRequest): + client = TargetHttpsProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'target_https_proxy': 'sample2'} + request_init["target_https_proxy_resource"] = {'authorization_policy': 'authorization_policy_value', 'certificate_map': 'certificate_map_value', 'creation_timestamp': 'creation_timestamp_value', 'description': 'description_value', 'fingerprint': 'fingerprint_value', 'http_keep_alive_timeout_sec': 2868, 'id': 205, 'kind': 'kind_value', 'name': 'name_value', 'proxy_bind': True, 'quic_override': 'quic_override_value', 'region': 'region_value', 'self_link': 'self_link_value', 'server_tls_policy': 'server_tls_policy_value', 'ssl_certificates': ['ssl_certificates_value1', 'ssl_certificates_value2'], 'ssl_policy': 'ssl_policy_value', 'url_map': 'url_map_value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.patch_unary(request) + + +def test_patch_unary_rest_flattened(): + client = TargetHttpsProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'target_https_proxy': 'sample2'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + target_https_proxy='target_https_proxy_value', + target_https_proxy_resource=compute.TargetHttpsProxy(authorization_policy='authorization_policy_value'), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.patch_unary(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/global/targetHttpsProxies/{target_https_proxy}" % client.transport._host, args[1]) + + +def test_patch_unary_rest_flattened_error(transport: str = 'rest'): + client = TargetHttpsProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.patch_unary( + compute.PatchTargetHttpsProxyRequest(), + project='project_value', + target_https_proxy='target_https_proxy_value', + target_https_proxy_resource=compute.TargetHttpsProxy(authorization_policy='authorization_policy_value'), + ) + + +def test_patch_unary_rest_error(): + client = TargetHttpsProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.SetCertificateMapTargetHttpsProxyRequest, + dict, +]) +def test_set_certificate_map_rest(request_type): + client = TargetHttpsProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'target_https_proxy': 'sample2'} + request_init["target_https_proxies_set_certificate_map_request_resource"] = {'certificate_map': 'certificate_map_value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.set_certificate_map(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, extended_operation.ExtendedOperation) + assert response.client_operation_id == 'client_operation_id_value' + assert response.creation_timestamp == 'creation_timestamp_value' + assert response.description == 'description_value' + assert response.end_time == 'end_time_value' + assert response.http_error_message == 'http_error_message_value' + assert response.http_error_status_code == 2374 + assert response.id == 205 + assert response.insert_time == 'insert_time_value' + assert response.kind == 'kind_value' + assert response.name == 'name_value' + assert response.operation_group_id == 'operation_group_id_value' + assert response.operation_type == 'operation_type_value' + assert response.progress == 885 + assert response.region == 'region_value' + assert response.self_link == 'self_link_value' + assert response.start_time == 'start_time_value' + assert response.status == compute.Operation.Status.DONE + assert response.status_message == 'status_message_value' + assert response.target_id == 947 + assert response.target_link == 'target_link_value' + assert response.user == 'user_value' + assert response.zone == 'zone_value' + + +def test_set_certificate_map_rest_required_fields(request_type=compute.SetCertificateMapTargetHttpsProxyRequest): + transport_class = transports.TargetHttpsProxiesRestTransport + + request_init = {} + request_init["project"] = "" + request_init["target_https_proxy"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).set_certificate_map._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + jsonified_request["targetHttpsProxy"] = 'target_https_proxy_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).set_certificate_map._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "targetHttpsProxy" in jsonified_request + assert jsonified_request["targetHttpsProxy"] == 'target_https_proxy_value' + + client = TargetHttpsProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.set_certificate_map(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_set_certificate_map_rest_unset_required_fields(): + transport = transports.TargetHttpsProxiesRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.set_certificate_map._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("project", "targetHttpsProxiesSetCertificateMapRequestResource", "targetHttpsProxy", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_set_certificate_map_rest_interceptors(null_interceptor): + transport = transports.TargetHttpsProxiesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.TargetHttpsProxiesRestInterceptor(), + ) + client = TargetHttpsProxiesClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.TargetHttpsProxiesRestInterceptor, "post_set_certificate_map") as post, \ + mock.patch.object(transports.TargetHttpsProxiesRestInterceptor, "pre_set_certificate_map") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.SetCertificateMapTargetHttpsProxyRequest.pb(compute.SetCertificateMapTargetHttpsProxyRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.SetCertificateMapTargetHttpsProxyRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.set_certificate_map(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_set_certificate_map_rest_bad_request(transport: str = 'rest', request_type=compute.SetCertificateMapTargetHttpsProxyRequest): + client = TargetHttpsProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'target_https_proxy': 'sample2'} + request_init["target_https_proxies_set_certificate_map_request_resource"] = {'certificate_map': 'certificate_map_value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.set_certificate_map(request) + + +def test_set_certificate_map_rest_flattened(): + client = TargetHttpsProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'target_https_proxy': 'sample2'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + target_https_proxy='target_https_proxy_value', + target_https_proxies_set_certificate_map_request_resource=compute.TargetHttpsProxiesSetCertificateMapRequest(certificate_map='certificate_map_value'), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.set_certificate_map(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/global/targetHttpsProxies/{target_https_proxy}/setCertificateMap" % client.transport._host, args[1]) + + +def test_set_certificate_map_rest_flattened_error(transport: str = 'rest'): + client = TargetHttpsProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.set_certificate_map( + compute.SetCertificateMapTargetHttpsProxyRequest(), + project='project_value', + target_https_proxy='target_https_proxy_value', + target_https_proxies_set_certificate_map_request_resource=compute.TargetHttpsProxiesSetCertificateMapRequest(certificate_map='certificate_map_value'), + ) + + +def test_set_certificate_map_rest_error(): + client = TargetHttpsProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.SetCertificateMapTargetHttpsProxyRequest, + dict, +]) +def test_set_certificate_map_unary_rest(request_type): + client = TargetHttpsProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'target_https_proxy': 'sample2'} + request_init["target_https_proxies_set_certificate_map_request_resource"] = {'certificate_map': 'certificate_map_value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.set_certificate_map_unary(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.Operation) + + +def test_set_certificate_map_unary_rest_required_fields(request_type=compute.SetCertificateMapTargetHttpsProxyRequest): + transport_class = transports.TargetHttpsProxiesRestTransport + + request_init = {} + request_init["project"] = "" + request_init["target_https_proxy"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).set_certificate_map._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + jsonified_request["targetHttpsProxy"] = 'target_https_proxy_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).set_certificate_map._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "targetHttpsProxy" in jsonified_request + assert jsonified_request["targetHttpsProxy"] == 'target_https_proxy_value' + + client = TargetHttpsProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.set_certificate_map_unary(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_set_certificate_map_unary_rest_unset_required_fields(): + transport = transports.TargetHttpsProxiesRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.set_certificate_map._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("project", "targetHttpsProxiesSetCertificateMapRequestResource", "targetHttpsProxy", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_set_certificate_map_unary_rest_interceptors(null_interceptor): + transport = transports.TargetHttpsProxiesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.TargetHttpsProxiesRestInterceptor(), + ) + client = TargetHttpsProxiesClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.TargetHttpsProxiesRestInterceptor, "post_set_certificate_map") as post, \ + mock.patch.object(transports.TargetHttpsProxiesRestInterceptor, "pre_set_certificate_map") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.SetCertificateMapTargetHttpsProxyRequest.pb(compute.SetCertificateMapTargetHttpsProxyRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.SetCertificateMapTargetHttpsProxyRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.set_certificate_map_unary(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_set_certificate_map_unary_rest_bad_request(transport: str = 'rest', request_type=compute.SetCertificateMapTargetHttpsProxyRequest): + client = TargetHttpsProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'target_https_proxy': 'sample2'} + request_init["target_https_proxies_set_certificate_map_request_resource"] = {'certificate_map': 'certificate_map_value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.set_certificate_map_unary(request) + + +def test_set_certificate_map_unary_rest_flattened(): + client = TargetHttpsProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'target_https_proxy': 'sample2'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + target_https_proxy='target_https_proxy_value', + target_https_proxies_set_certificate_map_request_resource=compute.TargetHttpsProxiesSetCertificateMapRequest(certificate_map='certificate_map_value'), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.set_certificate_map_unary(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/global/targetHttpsProxies/{target_https_proxy}/setCertificateMap" % client.transport._host, args[1]) + + +def test_set_certificate_map_unary_rest_flattened_error(transport: str = 'rest'): + client = TargetHttpsProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.set_certificate_map_unary( + compute.SetCertificateMapTargetHttpsProxyRequest(), + project='project_value', + target_https_proxy='target_https_proxy_value', + target_https_proxies_set_certificate_map_request_resource=compute.TargetHttpsProxiesSetCertificateMapRequest(certificate_map='certificate_map_value'), + ) + + +def test_set_certificate_map_unary_rest_error(): + client = TargetHttpsProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.SetQuicOverrideTargetHttpsProxyRequest, + dict, +]) +def test_set_quic_override_rest(request_type): + client = TargetHttpsProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'target_https_proxy': 'sample2'} + request_init["target_https_proxies_set_quic_override_request_resource"] = {'quic_override': 'quic_override_value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.set_quic_override(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, extended_operation.ExtendedOperation) + assert response.client_operation_id == 'client_operation_id_value' + assert response.creation_timestamp == 'creation_timestamp_value' + assert response.description == 'description_value' + assert response.end_time == 'end_time_value' + assert response.http_error_message == 'http_error_message_value' + assert response.http_error_status_code == 2374 + assert response.id == 205 + assert response.insert_time == 'insert_time_value' + assert response.kind == 'kind_value' + assert response.name == 'name_value' + assert response.operation_group_id == 'operation_group_id_value' + assert response.operation_type == 'operation_type_value' + assert response.progress == 885 + assert response.region == 'region_value' + assert response.self_link == 'self_link_value' + assert response.start_time == 'start_time_value' + assert response.status == compute.Operation.Status.DONE + assert response.status_message == 'status_message_value' + assert response.target_id == 947 + assert response.target_link == 'target_link_value' + assert response.user == 'user_value' + assert response.zone == 'zone_value' + + +def test_set_quic_override_rest_required_fields(request_type=compute.SetQuicOverrideTargetHttpsProxyRequest): + transport_class = transports.TargetHttpsProxiesRestTransport + + request_init = {} + request_init["project"] = "" + request_init["target_https_proxy"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).set_quic_override._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + jsonified_request["targetHttpsProxy"] = 'target_https_proxy_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).set_quic_override._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "targetHttpsProxy" in jsonified_request + assert jsonified_request["targetHttpsProxy"] == 'target_https_proxy_value' + + client = TargetHttpsProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.set_quic_override(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_set_quic_override_rest_unset_required_fields(): + transport = transports.TargetHttpsProxiesRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.set_quic_override._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("project", "targetHttpsProxiesSetQuicOverrideRequestResource", "targetHttpsProxy", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_set_quic_override_rest_interceptors(null_interceptor): + transport = transports.TargetHttpsProxiesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.TargetHttpsProxiesRestInterceptor(), + ) + client = TargetHttpsProxiesClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.TargetHttpsProxiesRestInterceptor, "post_set_quic_override") as post, \ + mock.patch.object(transports.TargetHttpsProxiesRestInterceptor, "pre_set_quic_override") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.SetQuicOverrideTargetHttpsProxyRequest.pb(compute.SetQuicOverrideTargetHttpsProxyRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.SetQuicOverrideTargetHttpsProxyRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.set_quic_override(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_set_quic_override_rest_bad_request(transport: str = 'rest', request_type=compute.SetQuicOverrideTargetHttpsProxyRequest): + client = TargetHttpsProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'target_https_proxy': 'sample2'} + request_init["target_https_proxies_set_quic_override_request_resource"] = {'quic_override': 'quic_override_value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.set_quic_override(request) + + +def test_set_quic_override_rest_flattened(): + client = TargetHttpsProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'target_https_proxy': 'sample2'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + target_https_proxy='target_https_proxy_value', + target_https_proxies_set_quic_override_request_resource=compute.TargetHttpsProxiesSetQuicOverrideRequest(quic_override='quic_override_value'), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.set_quic_override(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/global/targetHttpsProxies/{target_https_proxy}/setQuicOverride" % client.transport._host, args[1]) + + +def test_set_quic_override_rest_flattened_error(transport: str = 'rest'): + client = TargetHttpsProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.set_quic_override( + compute.SetQuicOverrideTargetHttpsProxyRequest(), + project='project_value', + target_https_proxy='target_https_proxy_value', + target_https_proxies_set_quic_override_request_resource=compute.TargetHttpsProxiesSetQuicOverrideRequest(quic_override='quic_override_value'), + ) + + +def test_set_quic_override_rest_error(): + client = TargetHttpsProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.SetQuicOverrideTargetHttpsProxyRequest, + dict, +]) +def test_set_quic_override_unary_rest(request_type): + client = TargetHttpsProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'target_https_proxy': 'sample2'} + request_init["target_https_proxies_set_quic_override_request_resource"] = {'quic_override': 'quic_override_value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.set_quic_override_unary(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.Operation) + + +def test_set_quic_override_unary_rest_required_fields(request_type=compute.SetQuicOverrideTargetHttpsProxyRequest): + transport_class = transports.TargetHttpsProxiesRestTransport + + request_init = {} + request_init["project"] = "" + request_init["target_https_proxy"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).set_quic_override._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + jsonified_request["targetHttpsProxy"] = 'target_https_proxy_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).set_quic_override._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "targetHttpsProxy" in jsonified_request + assert jsonified_request["targetHttpsProxy"] == 'target_https_proxy_value' + + client = TargetHttpsProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.set_quic_override_unary(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_set_quic_override_unary_rest_unset_required_fields(): + transport = transports.TargetHttpsProxiesRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.set_quic_override._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("project", "targetHttpsProxiesSetQuicOverrideRequestResource", "targetHttpsProxy", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_set_quic_override_unary_rest_interceptors(null_interceptor): + transport = transports.TargetHttpsProxiesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.TargetHttpsProxiesRestInterceptor(), + ) + client = TargetHttpsProxiesClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.TargetHttpsProxiesRestInterceptor, "post_set_quic_override") as post, \ + mock.patch.object(transports.TargetHttpsProxiesRestInterceptor, "pre_set_quic_override") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.SetQuicOverrideTargetHttpsProxyRequest.pb(compute.SetQuicOverrideTargetHttpsProxyRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.SetQuicOverrideTargetHttpsProxyRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.set_quic_override_unary(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_set_quic_override_unary_rest_bad_request(transport: str = 'rest', request_type=compute.SetQuicOverrideTargetHttpsProxyRequest): + client = TargetHttpsProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'target_https_proxy': 'sample2'} + request_init["target_https_proxies_set_quic_override_request_resource"] = {'quic_override': 'quic_override_value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.set_quic_override_unary(request) + + +def test_set_quic_override_unary_rest_flattened(): + client = TargetHttpsProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'target_https_proxy': 'sample2'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + target_https_proxy='target_https_proxy_value', + target_https_proxies_set_quic_override_request_resource=compute.TargetHttpsProxiesSetQuicOverrideRequest(quic_override='quic_override_value'), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.set_quic_override_unary(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/global/targetHttpsProxies/{target_https_proxy}/setQuicOverride" % client.transport._host, args[1]) + + +def test_set_quic_override_unary_rest_flattened_error(transport: str = 'rest'): + client = TargetHttpsProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.set_quic_override_unary( + compute.SetQuicOverrideTargetHttpsProxyRequest(), + project='project_value', + target_https_proxy='target_https_proxy_value', + target_https_proxies_set_quic_override_request_resource=compute.TargetHttpsProxiesSetQuicOverrideRequest(quic_override='quic_override_value'), + ) + + +def test_set_quic_override_unary_rest_error(): + client = TargetHttpsProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.SetSslCertificatesTargetHttpsProxyRequest, + dict, +]) +def test_set_ssl_certificates_rest(request_type): + client = TargetHttpsProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'target_https_proxy': 'sample2'} + request_init["target_https_proxies_set_ssl_certificates_request_resource"] = {'ssl_certificates': ['ssl_certificates_value1', 'ssl_certificates_value2']} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.set_ssl_certificates(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, extended_operation.ExtendedOperation) + assert response.client_operation_id == 'client_operation_id_value' + assert response.creation_timestamp == 'creation_timestamp_value' + assert response.description == 'description_value' + assert response.end_time == 'end_time_value' + assert response.http_error_message == 'http_error_message_value' + assert response.http_error_status_code == 2374 + assert response.id == 205 + assert response.insert_time == 'insert_time_value' + assert response.kind == 'kind_value' + assert response.name == 'name_value' + assert response.operation_group_id == 'operation_group_id_value' + assert response.operation_type == 'operation_type_value' + assert response.progress == 885 + assert response.region == 'region_value' + assert response.self_link == 'self_link_value' + assert response.start_time == 'start_time_value' + assert response.status == compute.Operation.Status.DONE + assert response.status_message == 'status_message_value' + assert response.target_id == 947 + assert response.target_link == 'target_link_value' + assert response.user == 'user_value' + assert response.zone == 'zone_value' + + +def test_set_ssl_certificates_rest_required_fields(request_type=compute.SetSslCertificatesTargetHttpsProxyRequest): + transport_class = transports.TargetHttpsProxiesRestTransport + + request_init = {} + request_init["project"] = "" + request_init["target_https_proxy"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).set_ssl_certificates._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + jsonified_request["targetHttpsProxy"] = 'target_https_proxy_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).set_ssl_certificates._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "targetHttpsProxy" in jsonified_request + assert jsonified_request["targetHttpsProxy"] == 'target_https_proxy_value' + + client = TargetHttpsProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.set_ssl_certificates(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_set_ssl_certificates_rest_unset_required_fields(): + transport = transports.TargetHttpsProxiesRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.set_ssl_certificates._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("project", "targetHttpsProxiesSetSslCertificatesRequestResource", "targetHttpsProxy", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_set_ssl_certificates_rest_interceptors(null_interceptor): + transport = transports.TargetHttpsProxiesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.TargetHttpsProxiesRestInterceptor(), + ) + client = TargetHttpsProxiesClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.TargetHttpsProxiesRestInterceptor, "post_set_ssl_certificates") as post, \ + mock.patch.object(transports.TargetHttpsProxiesRestInterceptor, "pre_set_ssl_certificates") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.SetSslCertificatesTargetHttpsProxyRequest.pb(compute.SetSslCertificatesTargetHttpsProxyRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.SetSslCertificatesTargetHttpsProxyRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.set_ssl_certificates(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_set_ssl_certificates_rest_bad_request(transport: str = 'rest', request_type=compute.SetSslCertificatesTargetHttpsProxyRequest): + client = TargetHttpsProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'target_https_proxy': 'sample2'} + request_init["target_https_proxies_set_ssl_certificates_request_resource"] = {'ssl_certificates': ['ssl_certificates_value1', 'ssl_certificates_value2']} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.set_ssl_certificates(request) + + +def test_set_ssl_certificates_rest_flattened(): + client = TargetHttpsProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'target_https_proxy': 'sample2'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + target_https_proxy='target_https_proxy_value', + target_https_proxies_set_ssl_certificates_request_resource=compute.TargetHttpsProxiesSetSslCertificatesRequest(ssl_certificates=['ssl_certificates_value']), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.set_ssl_certificates(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/targetHttpsProxies/{target_https_proxy}/setSslCertificates" % client.transport._host, args[1]) + + +def test_set_ssl_certificates_rest_flattened_error(transport: str = 'rest'): + client = TargetHttpsProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.set_ssl_certificates( + compute.SetSslCertificatesTargetHttpsProxyRequest(), + project='project_value', + target_https_proxy='target_https_proxy_value', + target_https_proxies_set_ssl_certificates_request_resource=compute.TargetHttpsProxiesSetSslCertificatesRequest(ssl_certificates=['ssl_certificates_value']), + ) + + +def test_set_ssl_certificates_rest_error(): + client = TargetHttpsProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.SetSslCertificatesTargetHttpsProxyRequest, + dict, +]) +def test_set_ssl_certificates_unary_rest(request_type): + client = TargetHttpsProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'target_https_proxy': 'sample2'} + request_init["target_https_proxies_set_ssl_certificates_request_resource"] = {'ssl_certificates': ['ssl_certificates_value1', 'ssl_certificates_value2']} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.set_ssl_certificates_unary(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.Operation) + + +def test_set_ssl_certificates_unary_rest_required_fields(request_type=compute.SetSslCertificatesTargetHttpsProxyRequest): + transport_class = transports.TargetHttpsProxiesRestTransport + + request_init = {} + request_init["project"] = "" + request_init["target_https_proxy"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).set_ssl_certificates._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + jsonified_request["targetHttpsProxy"] = 'target_https_proxy_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).set_ssl_certificates._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "targetHttpsProxy" in jsonified_request + assert jsonified_request["targetHttpsProxy"] == 'target_https_proxy_value' + + client = TargetHttpsProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.set_ssl_certificates_unary(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_set_ssl_certificates_unary_rest_unset_required_fields(): + transport = transports.TargetHttpsProxiesRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.set_ssl_certificates._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("project", "targetHttpsProxiesSetSslCertificatesRequestResource", "targetHttpsProxy", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_set_ssl_certificates_unary_rest_interceptors(null_interceptor): + transport = transports.TargetHttpsProxiesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.TargetHttpsProxiesRestInterceptor(), + ) + client = TargetHttpsProxiesClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.TargetHttpsProxiesRestInterceptor, "post_set_ssl_certificates") as post, \ + mock.patch.object(transports.TargetHttpsProxiesRestInterceptor, "pre_set_ssl_certificates") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.SetSslCertificatesTargetHttpsProxyRequest.pb(compute.SetSslCertificatesTargetHttpsProxyRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.SetSslCertificatesTargetHttpsProxyRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.set_ssl_certificates_unary(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_set_ssl_certificates_unary_rest_bad_request(transport: str = 'rest', request_type=compute.SetSslCertificatesTargetHttpsProxyRequest): + client = TargetHttpsProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'target_https_proxy': 'sample2'} + request_init["target_https_proxies_set_ssl_certificates_request_resource"] = {'ssl_certificates': ['ssl_certificates_value1', 'ssl_certificates_value2']} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.set_ssl_certificates_unary(request) + + +def test_set_ssl_certificates_unary_rest_flattened(): + client = TargetHttpsProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'target_https_proxy': 'sample2'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + target_https_proxy='target_https_proxy_value', + target_https_proxies_set_ssl_certificates_request_resource=compute.TargetHttpsProxiesSetSslCertificatesRequest(ssl_certificates=['ssl_certificates_value']), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.set_ssl_certificates_unary(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/targetHttpsProxies/{target_https_proxy}/setSslCertificates" % client.transport._host, args[1]) + + +def test_set_ssl_certificates_unary_rest_flattened_error(transport: str = 'rest'): + client = TargetHttpsProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.set_ssl_certificates_unary( + compute.SetSslCertificatesTargetHttpsProxyRequest(), + project='project_value', + target_https_proxy='target_https_proxy_value', + target_https_proxies_set_ssl_certificates_request_resource=compute.TargetHttpsProxiesSetSslCertificatesRequest(ssl_certificates=['ssl_certificates_value']), + ) + + +def test_set_ssl_certificates_unary_rest_error(): + client = TargetHttpsProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.SetSslPolicyTargetHttpsProxyRequest, + dict, +]) +def test_set_ssl_policy_rest(request_type): + client = TargetHttpsProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'target_https_proxy': 'sample2'} + request_init["ssl_policy_reference_resource"] = {'ssl_policy': 'ssl_policy_value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.set_ssl_policy(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, extended_operation.ExtendedOperation) + assert response.client_operation_id == 'client_operation_id_value' + assert response.creation_timestamp == 'creation_timestamp_value' + assert response.description == 'description_value' + assert response.end_time == 'end_time_value' + assert response.http_error_message == 'http_error_message_value' + assert response.http_error_status_code == 2374 + assert response.id == 205 + assert response.insert_time == 'insert_time_value' + assert response.kind == 'kind_value' + assert response.name == 'name_value' + assert response.operation_group_id == 'operation_group_id_value' + assert response.operation_type == 'operation_type_value' + assert response.progress == 885 + assert response.region == 'region_value' + assert response.self_link == 'self_link_value' + assert response.start_time == 'start_time_value' + assert response.status == compute.Operation.Status.DONE + assert response.status_message == 'status_message_value' + assert response.target_id == 947 + assert response.target_link == 'target_link_value' + assert response.user == 'user_value' + assert response.zone == 'zone_value' + + +def test_set_ssl_policy_rest_required_fields(request_type=compute.SetSslPolicyTargetHttpsProxyRequest): + transport_class = transports.TargetHttpsProxiesRestTransport + + request_init = {} + request_init["project"] = "" + request_init["target_https_proxy"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).set_ssl_policy._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + jsonified_request["targetHttpsProxy"] = 'target_https_proxy_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).set_ssl_policy._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "targetHttpsProxy" in jsonified_request + assert jsonified_request["targetHttpsProxy"] == 'target_https_proxy_value' + + client = TargetHttpsProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.set_ssl_policy(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_set_ssl_policy_rest_unset_required_fields(): + transport = transports.TargetHttpsProxiesRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.set_ssl_policy._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("project", "sslPolicyReferenceResource", "targetHttpsProxy", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_set_ssl_policy_rest_interceptors(null_interceptor): + transport = transports.TargetHttpsProxiesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.TargetHttpsProxiesRestInterceptor(), + ) + client = TargetHttpsProxiesClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.TargetHttpsProxiesRestInterceptor, "post_set_ssl_policy") as post, \ + mock.patch.object(transports.TargetHttpsProxiesRestInterceptor, "pre_set_ssl_policy") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.SetSslPolicyTargetHttpsProxyRequest.pb(compute.SetSslPolicyTargetHttpsProxyRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.SetSslPolicyTargetHttpsProxyRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.set_ssl_policy(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_set_ssl_policy_rest_bad_request(transport: str = 'rest', request_type=compute.SetSslPolicyTargetHttpsProxyRequest): + client = TargetHttpsProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'target_https_proxy': 'sample2'} + request_init["ssl_policy_reference_resource"] = {'ssl_policy': 'ssl_policy_value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.set_ssl_policy(request) + + +def test_set_ssl_policy_rest_flattened(): + client = TargetHttpsProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'target_https_proxy': 'sample2'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + target_https_proxy='target_https_proxy_value', + ssl_policy_reference_resource=compute.SslPolicyReference(ssl_policy='ssl_policy_value'), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.set_ssl_policy(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/global/targetHttpsProxies/{target_https_proxy}/setSslPolicy" % client.transport._host, args[1]) + + +def test_set_ssl_policy_rest_flattened_error(transport: str = 'rest'): + client = TargetHttpsProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.set_ssl_policy( + compute.SetSslPolicyTargetHttpsProxyRequest(), + project='project_value', + target_https_proxy='target_https_proxy_value', + ssl_policy_reference_resource=compute.SslPolicyReference(ssl_policy='ssl_policy_value'), + ) + + +def test_set_ssl_policy_rest_error(): + client = TargetHttpsProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.SetSslPolicyTargetHttpsProxyRequest, + dict, +]) +def test_set_ssl_policy_unary_rest(request_type): + client = TargetHttpsProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'target_https_proxy': 'sample2'} + request_init["ssl_policy_reference_resource"] = {'ssl_policy': 'ssl_policy_value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.set_ssl_policy_unary(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.Operation) + + +def test_set_ssl_policy_unary_rest_required_fields(request_type=compute.SetSslPolicyTargetHttpsProxyRequest): + transport_class = transports.TargetHttpsProxiesRestTransport + + request_init = {} + request_init["project"] = "" + request_init["target_https_proxy"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).set_ssl_policy._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + jsonified_request["targetHttpsProxy"] = 'target_https_proxy_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).set_ssl_policy._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "targetHttpsProxy" in jsonified_request + assert jsonified_request["targetHttpsProxy"] == 'target_https_proxy_value' + + client = TargetHttpsProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.set_ssl_policy_unary(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_set_ssl_policy_unary_rest_unset_required_fields(): + transport = transports.TargetHttpsProxiesRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.set_ssl_policy._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("project", "sslPolicyReferenceResource", "targetHttpsProxy", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_set_ssl_policy_unary_rest_interceptors(null_interceptor): + transport = transports.TargetHttpsProxiesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.TargetHttpsProxiesRestInterceptor(), + ) + client = TargetHttpsProxiesClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.TargetHttpsProxiesRestInterceptor, "post_set_ssl_policy") as post, \ + mock.patch.object(transports.TargetHttpsProxiesRestInterceptor, "pre_set_ssl_policy") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.SetSslPolicyTargetHttpsProxyRequest.pb(compute.SetSslPolicyTargetHttpsProxyRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.SetSslPolicyTargetHttpsProxyRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.set_ssl_policy_unary(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_set_ssl_policy_unary_rest_bad_request(transport: str = 'rest', request_type=compute.SetSslPolicyTargetHttpsProxyRequest): + client = TargetHttpsProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'target_https_proxy': 'sample2'} + request_init["ssl_policy_reference_resource"] = {'ssl_policy': 'ssl_policy_value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.set_ssl_policy_unary(request) + + +def test_set_ssl_policy_unary_rest_flattened(): + client = TargetHttpsProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'target_https_proxy': 'sample2'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + target_https_proxy='target_https_proxy_value', + ssl_policy_reference_resource=compute.SslPolicyReference(ssl_policy='ssl_policy_value'), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.set_ssl_policy_unary(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/global/targetHttpsProxies/{target_https_proxy}/setSslPolicy" % client.transport._host, args[1]) + + +def test_set_ssl_policy_unary_rest_flattened_error(transport: str = 'rest'): + client = TargetHttpsProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.set_ssl_policy_unary( + compute.SetSslPolicyTargetHttpsProxyRequest(), + project='project_value', + target_https_proxy='target_https_proxy_value', + ssl_policy_reference_resource=compute.SslPolicyReference(ssl_policy='ssl_policy_value'), + ) + + +def test_set_ssl_policy_unary_rest_error(): + client = TargetHttpsProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.SetUrlMapTargetHttpsProxyRequest, + dict, +]) +def test_set_url_map_rest(request_type): + client = TargetHttpsProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'target_https_proxy': 'sample2'} + request_init["url_map_reference_resource"] = {'url_map': 'url_map_value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.set_url_map(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, extended_operation.ExtendedOperation) + assert response.client_operation_id == 'client_operation_id_value' + assert response.creation_timestamp == 'creation_timestamp_value' + assert response.description == 'description_value' + assert response.end_time == 'end_time_value' + assert response.http_error_message == 'http_error_message_value' + assert response.http_error_status_code == 2374 + assert response.id == 205 + assert response.insert_time == 'insert_time_value' + assert response.kind == 'kind_value' + assert response.name == 'name_value' + assert response.operation_group_id == 'operation_group_id_value' + assert response.operation_type == 'operation_type_value' + assert response.progress == 885 + assert response.region == 'region_value' + assert response.self_link == 'self_link_value' + assert response.start_time == 'start_time_value' + assert response.status == compute.Operation.Status.DONE + assert response.status_message == 'status_message_value' + assert response.target_id == 947 + assert response.target_link == 'target_link_value' + assert response.user == 'user_value' + assert response.zone == 'zone_value' + + +def test_set_url_map_rest_required_fields(request_type=compute.SetUrlMapTargetHttpsProxyRequest): + transport_class = transports.TargetHttpsProxiesRestTransport + + request_init = {} + request_init["project"] = "" + request_init["target_https_proxy"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).set_url_map._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + jsonified_request["targetHttpsProxy"] = 'target_https_proxy_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).set_url_map._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "targetHttpsProxy" in jsonified_request + assert jsonified_request["targetHttpsProxy"] == 'target_https_proxy_value' + + client = TargetHttpsProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.set_url_map(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_set_url_map_rest_unset_required_fields(): + transport = transports.TargetHttpsProxiesRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.set_url_map._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("project", "targetHttpsProxy", "urlMapReferenceResource", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_set_url_map_rest_interceptors(null_interceptor): + transport = transports.TargetHttpsProxiesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.TargetHttpsProxiesRestInterceptor(), + ) + client = TargetHttpsProxiesClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.TargetHttpsProxiesRestInterceptor, "post_set_url_map") as post, \ + mock.patch.object(transports.TargetHttpsProxiesRestInterceptor, "pre_set_url_map") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.SetUrlMapTargetHttpsProxyRequest.pb(compute.SetUrlMapTargetHttpsProxyRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.SetUrlMapTargetHttpsProxyRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.set_url_map(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_set_url_map_rest_bad_request(transport: str = 'rest', request_type=compute.SetUrlMapTargetHttpsProxyRequest): + client = TargetHttpsProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'target_https_proxy': 'sample2'} + request_init["url_map_reference_resource"] = {'url_map': 'url_map_value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.set_url_map(request) + + +def test_set_url_map_rest_flattened(): + client = TargetHttpsProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'target_https_proxy': 'sample2'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + target_https_proxy='target_https_proxy_value', + url_map_reference_resource=compute.UrlMapReference(url_map='url_map_value'), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.set_url_map(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/targetHttpsProxies/{target_https_proxy}/setUrlMap" % client.transport._host, args[1]) + + +def test_set_url_map_rest_flattened_error(transport: str = 'rest'): + client = TargetHttpsProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.set_url_map( + compute.SetUrlMapTargetHttpsProxyRequest(), + project='project_value', + target_https_proxy='target_https_proxy_value', + url_map_reference_resource=compute.UrlMapReference(url_map='url_map_value'), + ) + + +def test_set_url_map_rest_error(): + client = TargetHttpsProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.SetUrlMapTargetHttpsProxyRequest, + dict, +]) +def test_set_url_map_unary_rest(request_type): + client = TargetHttpsProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'target_https_proxy': 'sample2'} + request_init["url_map_reference_resource"] = {'url_map': 'url_map_value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.set_url_map_unary(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.Operation) + + +def test_set_url_map_unary_rest_required_fields(request_type=compute.SetUrlMapTargetHttpsProxyRequest): + transport_class = transports.TargetHttpsProxiesRestTransport + + request_init = {} + request_init["project"] = "" + request_init["target_https_proxy"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).set_url_map._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + jsonified_request["targetHttpsProxy"] = 'target_https_proxy_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).set_url_map._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "targetHttpsProxy" in jsonified_request + assert jsonified_request["targetHttpsProxy"] == 'target_https_proxy_value' + + client = TargetHttpsProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.set_url_map_unary(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_set_url_map_unary_rest_unset_required_fields(): + transport = transports.TargetHttpsProxiesRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.set_url_map._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("project", "targetHttpsProxy", "urlMapReferenceResource", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_set_url_map_unary_rest_interceptors(null_interceptor): + transport = transports.TargetHttpsProxiesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.TargetHttpsProxiesRestInterceptor(), + ) + client = TargetHttpsProxiesClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.TargetHttpsProxiesRestInterceptor, "post_set_url_map") as post, \ + mock.patch.object(transports.TargetHttpsProxiesRestInterceptor, "pre_set_url_map") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.SetUrlMapTargetHttpsProxyRequest.pb(compute.SetUrlMapTargetHttpsProxyRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.SetUrlMapTargetHttpsProxyRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.set_url_map_unary(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_set_url_map_unary_rest_bad_request(transport: str = 'rest', request_type=compute.SetUrlMapTargetHttpsProxyRequest): + client = TargetHttpsProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'target_https_proxy': 'sample2'} + request_init["url_map_reference_resource"] = {'url_map': 'url_map_value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.set_url_map_unary(request) + + +def test_set_url_map_unary_rest_flattened(): + client = TargetHttpsProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'target_https_proxy': 'sample2'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + target_https_proxy='target_https_proxy_value', + url_map_reference_resource=compute.UrlMapReference(url_map='url_map_value'), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.set_url_map_unary(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/targetHttpsProxies/{target_https_proxy}/setUrlMap" % client.transport._host, args[1]) + + +def test_set_url_map_unary_rest_flattened_error(transport: str = 'rest'): + client = TargetHttpsProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.set_url_map_unary( + compute.SetUrlMapTargetHttpsProxyRequest(), + project='project_value', + target_https_proxy='target_https_proxy_value', + url_map_reference_resource=compute.UrlMapReference(url_map='url_map_value'), + ) + + +def test_set_url_map_unary_rest_error(): + client = TargetHttpsProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +def test_credentials_transport_error(): + # It is an error to provide credentials and a transport instance. + transport = transports.TargetHttpsProxiesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = TargetHttpsProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # It is an error to provide a credentials file and a transport instance. + transport = transports.TargetHttpsProxiesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = TargetHttpsProxiesClient( + client_options={"credentials_file": "credentials.json"}, + transport=transport, + ) + + # It is an error to provide an api_key and a transport instance. + transport = transports.TargetHttpsProxiesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = TargetHttpsProxiesClient( + client_options=options, + transport=transport, + ) + + # It is an error to provide an api_key and a credential. + options = mock.Mock() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = TargetHttpsProxiesClient( + client_options=options, + credentials=ga_credentials.AnonymousCredentials() + ) + + # It is an error to provide scopes and a transport instance. + transport = transports.TargetHttpsProxiesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = TargetHttpsProxiesClient( + client_options={"scopes": ["1", "2"]}, + transport=transport, + ) + + +def test_transport_instance(): + # A client may be instantiated with a custom transport instance. + transport = transports.TargetHttpsProxiesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + client = TargetHttpsProxiesClient(transport=transport) + assert client.transport is transport + + +@pytest.mark.parametrize("transport_class", [ + transports.TargetHttpsProxiesRestTransport, +]) +def test_transport_adc(transport_class): + # Test default credentials are used if not provided. + with mock.patch.object(google.auth, 'default') as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class() + adc.assert_called_once() + +@pytest.mark.parametrize("transport_name", [ + "rest", +]) +def test_transport_kind(transport_name): + transport = TargetHttpsProxiesClient.get_transport_class(transport_name)( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert transport.kind == transport_name + + +def test_target_https_proxies_base_transport_error(): + # Passing both a credentials object and credentials_file should raise an error + with pytest.raises(core_exceptions.DuplicateCredentialArgs): + transport = transports.TargetHttpsProxiesTransport( + credentials=ga_credentials.AnonymousCredentials(), + credentials_file="credentials.json" + ) + + +def test_target_https_proxies_base_transport(): + # Instantiate the base transport. + with mock.patch('google.cloud.compute_v1.services.target_https_proxies.transports.TargetHttpsProxiesTransport.__init__') as Transport: + Transport.return_value = None + transport = transports.TargetHttpsProxiesTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Every method on the transport should just blindly + # raise NotImplementedError. + methods = ( + 'aggregated_list', + 'delete', + 'get', + 'insert', + 'list', + 'patch', + 'set_certificate_map', + 'set_quic_override', + 'set_ssl_certificates', + 'set_ssl_policy', + 'set_url_map', + ) + for method in methods: + with pytest.raises(NotImplementedError): + getattr(transport, method)(request=object()) + + with pytest.raises(NotImplementedError): + transport.close() + + # Catch all for all remaining methods and properties + remainder = [ + 'kind', + ] + for r in remainder: + with pytest.raises(NotImplementedError): + getattr(transport, r)() + + +def test_target_https_proxies_base_transport_with_credentials_file(): + # Instantiate the base transport with a credentials file + with mock.patch.object(google.auth, 'load_credentials_from_file', autospec=True) as load_creds, mock.patch('google.cloud.compute_v1.services.target_https_proxies.transports.TargetHttpsProxiesTransport._prep_wrapped_messages') as Transport: + Transport.return_value = None + load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.TargetHttpsProxiesTransport( + credentials_file="credentials.json", + quota_project_id="octopus", + ) + load_creds.assert_called_once_with("credentials.json", + scopes=None, + default_scopes=( + 'https://www.googleapis.com/auth/compute', + 'https://www.googleapis.com/auth/cloud-platform', +), + quota_project_id="octopus", + ) + + +def test_target_https_proxies_base_transport_with_adc(): + # Test the default credentials are used if credentials and credentials_file are None. + with mock.patch.object(google.auth, 'default', autospec=True) as adc, mock.patch('google.cloud.compute_v1.services.target_https_proxies.transports.TargetHttpsProxiesTransport._prep_wrapped_messages') as Transport: + Transport.return_value = None + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.TargetHttpsProxiesTransport() + adc.assert_called_once() + + +def test_target_https_proxies_auth_adc(): + # If no credentials are provided, we should use ADC credentials. + with mock.patch.object(google.auth, 'default', autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + TargetHttpsProxiesClient() + adc.assert_called_once_with( + scopes=None, + default_scopes=( + 'https://www.googleapis.com/auth/compute', + 'https://www.googleapis.com/auth/cloud-platform', +), + quota_project_id=None, + ) + + +def test_target_https_proxies_http_transport_client_cert_source_for_mtls(): + cred = ga_credentials.AnonymousCredentials() + with mock.patch("google.auth.transport.requests.AuthorizedSession.configure_mtls_channel") as mock_configure_mtls_channel: + transports.TargetHttpsProxiesRestTransport ( + credentials=cred, + client_cert_source_for_mtls=client_cert_source_callback + ) + mock_configure_mtls_channel.assert_called_once_with(client_cert_source_callback) + + +@pytest.mark.parametrize("transport_name", [ + "rest", +]) +def test_target_https_proxies_host_no_port(transport_name): + client = TargetHttpsProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions(api_endpoint='compute.googleapis.com'), + transport=transport_name, + ) + assert client.transport._host == ( + 'compute.googleapis.com:443' + if transport_name in ['grpc', 'grpc_asyncio'] + else 'https://compute.googleapis.com' + ) + +@pytest.mark.parametrize("transport_name", [ + "rest", +]) +def test_target_https_proxies_host_with_port(transport_name): + client = TargetHttpsProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions(api_endpoint='compute.googleapis.com:8000'), + transport=transport_name, + ) + assert client.transport._host == ( + 'compute.googleapis.com:8000' + if transport_name in ['grpc', 'grpc_asyncio'] + else 'https://compute.googleapis.com:8000' + ) + +@pytest.mark.parametrize("transport_name", [ + "rest", +]) +def test_target_https_proxies_client_transport_session_collision(transport_name): + creds1 = ga_credentials.AnonymousCredentials() + creds2 = ga_credentials.AnonymousCredentials() + client1 = TargetHttpsProxiesClient( + credentials=creds1, + transport=transport_name, + ) + client2 = TargetHttpsProxiesClient( + credentials=creds2, + transport=transport_name, + ) + session1 = client1.transport.aggregated_list._session + session2 = client2.transport.aggregated_list._session + assert session1 != session2 + session1 = client1.transport.delete._session + session2 = client2.transport.delete._session + assert session1 != session2 + session1 = client1.transport.get._session + session2 = client2.transport.get._session + assert session1 != session2 + session1 = client1.transport.insert._session + session2 = client2.transport.insert._session + assert session1 != session2 + session1 = client1.transport.list._session + session2 = client2.transport.list._session + assert session1 != session2 + session1 = client1.transport.patch._session + session2 = client2.transport.patch._session + assert session1 != session2 + session1 = client1.transport.set_certificate_map._session + session2 = client2.transport.set_certificate_map._session + assert session1 != session2 + session1 = client1.transport.set_quic_override._session + session2 = client2.transport.set_quic_override._session + assert session1 != session2 + session1 = client1.transport.set_ssl_certificates._session + session2 = client2.transport.set_ssl_certificates._session + assert session1 != session2 + session1 = client1.transport.set_ssl_policy._session + session2 = client2.transport.set_ssl_policy._session + assert session1 != session2 + session1 = client1.transport.set_url_map._session + session2 = client2.transport.set_url_map._session + assert session1 != session2 + +def test_common_billing_account_path(): + billing_account = "squid" + expected = "billingAccounts/{billing_account}".format(billing_account=billing_account, ) + actual = TargetHttpsProxiesClient.common_billing_account_path(billing_account) + assert expected == actual + + +def test_parse_common_billing_account_path(): + expected = { + "billing_account": "clam", + } + path = TargetHttpsProxiesClient.common_billing_account_path(**expected) + + # Check that the path construction is reversible. + actual = TargetHttpsProxiesClient.parse_common_billing_account_path(path) + assert expected == actual + +def test_common_folder_path(): + folder = "whelk" + expected = "folders/{folder}".format(folder=folder, ) + actual = TargetHttpsProxiesClient.common_folder_path(folder) + assert expected == actual + + +def test_parse_common_folder_path(): + expected = { + "folder": "octopus", + } + path = TargetHttpsProxiesClient.common_folder_path(**expected) + + # Check that the path construction is reversible. + actual = TargetHttpsProxiesClient.parse_common_folder_path(path) + assert expected == actual + +def test_common_organization_path(): + organization = "oyster" + expected = "organizations/{organization}".format(organization=organization, ) + actual = TargetHttpsProxiesClient.common_organization_path(organization) + assert expected == actual + + +def test_parse_common_organization_path(): + expected = { + "organization": "nudibranch", + } + path = TargetHttpsProxiesClient.common_organization_path(**expected) + + # Check that the path construction is reversible. + actual = TargetHttpsProxiesClient.parse_common_organization_path(path) + assert expected == actual + +def test_common_project_path(): + project = "cuttlefish" + expected = "projects/{project}".format(project=project, ) + actual = TargetHttpsProxiesClient.common_project_path(project) + assert expected == actual + + +def test_parse_common_project_path(): + expected = { + "project": "mussel", + } + path = TargetHttpsProxiesClient.common_project_path(**expected) + + # Check that the path construction is reversible. + actual = TargetHttpsProxiesClient.parse_common_project_path(path) + assert expected == actual + +def test_common_location_path(): + project = "winkle" + location = "nautilus" + expected = "projects/{project}/locations/{location}".format(project=project, location=location, ) + actual = TargetHttpsProxiesClient.common_location_path(project, location) + assert expected == actual + + +def test_parse_common_location_path(): + expected = { + "project": "scallop", + "location": "abalone", + } + path = TargetHttpsProxiesClient.common_location_path(**expected) + + # Check that the path construction is reversible. + actual = TargetHttpsProxiesClient.parse_common_location_path(path) + assert expected == actual + + +def test_client_with_default_client_info(): + client_info = gapic_v1.client_info.ClientInfo() + + with mock.patch.object(transports.TargetHttpsProxiesTransport, '_prep_wrapped_messages') as prep: + client = TargetHttpsProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + with mock.patch.object(transports.TargetHttpsProxiesTransport, '_prep_wrapped_messages') as prep: + transport_class = TargetHttpsProxiesClient.get_transport_class() + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + +def test_transport_close(): + transports = { + "rest": "_session", + } + + for transport, close_name in transports.items(): + client = TargetHttpsProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport + ) + with mock.patch.object(type(getattr(client.transport, close_name)), "close") as close: + with client: + close.assert_not_called() + close.assert_called_once() + +def test_client_ctx(): + transports = [ + 'rest', + ] + for transport in transports: + client = TargetHttpsProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport + ) + # Test client calls underlying transport. + with mock.patch.object(type(client.transport), "close") as close: + close.assert_not_called() + with client: + pass + close.assert_called() + +@pytest.mark.parametrize("client_class,transport_class", [ + (TargetHttpsProxiesClient, transports.TargetHttpsProxiesRestTransport), +]) +def test_api_key_credentials(client_class, transport_class): + with mock.patch.object( + google.auth._default, "get_api_key_credentials", create=True + ) as get_api_key_credentials: + mock_cred = mock.Mock() + get_api_key_credentials.return_value = mock_cred + options = client_options.ClientOptions() + options.api_key = "api_key" + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=mock_cred, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) diff --git a/owl-bot-staging/v1/tests/unit/gapic/compute_v1/test_target_instances.py b/owl-bot-staging/v1/tests/unit/gapic/compute_v1/test_target_instances.py new file mode 100644 index 000000000..e330cb5e3 --- /dev/null +++ b/owl-bot-staging/v1/tests/unit/gapic/compute_v1/test_target_instances.py @@ -0,0 +1,2824 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import os +# try/except added for compatibility with python < 3.8 +try: + from unittest import mock + from unittest.mock import AsyncMock # pragma: NO COVER +except ImportError: # pragma: NO COVER + import mock + +import grpc +from grpc.experimental import aio +from collections.abc import Iterable +from google.protobuf import json_format +import json +import math +import pytest +from proto.marshal.rules.dates import DurationRule, TimestampRule +from proto.marshal.rules import wrappers +from requests import Response +from requests import Request, PreparedRequest +from requests.sessions import Session +from google.protobuf import json_format + +from google.api_core import client_options +from google.api_core import exceptions as core_exceptions +from google.api_core import extended_operation # type: ignore +from google.api_core import future +from google.api_core import gapic_v1 +from google.api_core import grpc_helpers +from google.api_core import grpc_helpers_async +from google.api_core import path_template +from google.auth import credentials as ga_credentials +from google.auth.exceptions import MutualTLSChannelError +from google.cloud.compute_v1.services.target_instances import TargetInstancesClient +from google.cloud.compute_v1.services.target_instances import pagers +from google.cloud.compute_v1.services.target_instances import transports +from google.cloud.compute_v1.types import compute +from google.oauth2 import service_account +import google.auth + + +def client_cert_source_callback(): + return b"cert bytes", b"key bytes" + + +# If default endpoint is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint(client): + return "foo.googleapis.com" if ("localhost" in client.DEFAULT_ENDPOINT) else client.DEFAULT_ENDPOINT + + +def test__get_default_mtls_endpoint(): + api_endpoint = "example.googleapis.com" + api_mtls_endpoint = "example.mtls.googleapis.com" + sandbox_endpoint = "example.sandbox.googleapis.com" + sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com" + non_googleapi = "api.example.com" + + assert TargetInstancesClient._get_default_mtls_endpoint(None) is None + assert TargetInstancesClient._get_default_mtls_endpoint(api_endpoint) == api_mtls_endpoint + assert TargetInstancesClient._get_default_mtls_endpoint(api_mtls_endpoint) == api_mtls_endpoint + assert TargetInstancesClient._get_default_mtls_endpoint(sandbox_endpoint) == sandbox_mtls_endpoint + assert TargetInstancesClient._get_default_mtls_endpoint(sandbox_mtls_endpoint) == sandbox_mtls_endpoint + assert TargetInstancesClient._get_default_mtls_endpoint(non_googleapi) == non_googleapi + + +@pytest.mark.parametrize("client_class,transport_name", [ + (TargetInstancesClient, "rest"), +]) +def test_target_instances_client_from_service_account_info(client_class, transport_name): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object(service_account.Credentials, 'from_service_account_info') as factory: + factory.return_value = creds + info = {"valid": True} + client = client_class.from_service_account_info(info, transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + 'compute.googleapis.com:443' + if transport_name in ['grpc', 'grpc_asyncio'] + else + 'https://compute.googleapis.com' + ) + + +@pytest.mark.parametrize("transport_class,transport_name", [ + (transports.TargetInstancesRestTransport, "rest"), +]) +def test_target_instances_client_service_account_always_use_jwt(transport_class, transport_name): + with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=True) + use_jwt.assert_called_once_with(True) + + with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=False) + use_jwt.assert_not_called() + + +@pytest.mark.parametrize("client_class,transport_name", [ + (TargetInstancesClient, "rest"), +]) +def test_target_instances_client_from_service_account_file(client_class, transport_name): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object(service_account.Credentials, 'from_service_account_file') as factory: + factory.return_value = creds + client = client_class.from_service_account_file("dummy/file/path.json", transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + client = client_class.from_service_account_json("dummy/file/path.json", transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + 'compute.googleapis.com:443' + if transport_name in ['grpc', 'grpc_asyncio'] + else + 'https://compute.googleapis.com' + ) + + +def test_target_instances_client_get_transport_class(): + transport = TargetInstancesClient.get_transport_class() + available_transports = [ + transports.TargetInstancesRestTransport, + ] + assert transport in available_transports + + transport = TargetInstancesClient.get_transport_class("rest") + assert transport == transports.TargetInstancesRestTransport + + +@pytest.mark.parametrize("client_class,transport_class,transport_name", [ + (TargetInstancesClient, transports.TargetInstancesRestTransport, "rest"), +]) +@mock.patch.object(TargetInstancesClient, "DEFAULT_ENDPOINT", modify_default_endpoint(TargetInstancesClient)) +def test_target_instances_client_client_options(client_class, transport_class, transport_name): + # Check that if channel is provided we won't create a new one. + with mock.patch.object(TargetInstancesClient, 'get_transport_class') as gtc: + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ) + client = client_class(transport=transport) + gtc.assert_not_called() + + # Check that if channel is provided via str we will create a new one. + with mock.patch.object(TargetInstancesClient, 'get_transport_class') as gtc: + client = client_class(transport=transport_name) + gtc.assert_called() + + # Check the case api_endpoint is provided. + options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(transport=transport_name, client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_MTLS_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError): + client = client_class(transport=transport_name) + + # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): + with pytest.raises(ValueError): + client = client_class(transport=transport_name) + + # Check the case quota_project_id is provided + options = client_options.ClientOptions(quota_project_id="octopus") + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id="octopus", + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + # Check the case api_endpoint is provided + options = client_options.ClientOptions(api_audience="https://language.googleapis.com") + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience="https://language.googleapis.com" + ) + +@pytest.mark.parametrize("client_class,transport_class,transport_name,use_client_cert_env", [ + (TargetInstancesClient, transports.TargetInstancesRestTransport, "rest", "true"), + (TargetInstancesClient, transports.TargetInstancesRestTransport, "rest", "false"), +]) +@mock.patch.object(TargetInstancesClient, "DEFAULT_ENDPOINT", modify_default_endpoint(TargetInstancesClient)) +@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) +def test_target_instances_client_mtls_env_auto(client_class, transport_class, transport_name, use_client_cert_env): + # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default + # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists. + + # Check the case client_cert_source is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): + options = client_options.ClientOptions(client_cert_source=client_cert_source_callback) + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + + if use_client_cert_env == "false": + expected_client_cert_source = None + expected_host = client.DEFAULT_ENDPOINT + else: + expected_client_cert_source = client_cert_source_callback + expected_host = client.DEFAULT_MTLS_ENDPOINT + + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case ADC client cert is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): + with mock.patch.object(transport_class, '__init__') as patched: + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): + with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=client_cert_source_callback): + if use_client_cert_env == "false": + expected_host = client.DEFAULT_ENDPOINT + expected_client_cert_source = None + else: + expected_host = client.DEFAULT_MTLS_ENDPOINT + expected_client_cert_source = client_cert_source_callback + + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case client_cert_source and ADC client cert are not provided. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): + with mock.patch.object(transport_class, '__init__') as patched: + with mock.patch("google.auth.transport.mtls.has_default_client_cert_source", return_value=False): + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize("client_class", [ + TargetInstancesClient +]) +@mock.patch.object(TargetInstancesClient, "DEFAULT_ENDPOINT", modify_default_endpoint(TargetInstancesClient)) +def test_target_instances_client_get_mtls_endpoint_and_cert_source(client_class): + mock_client_cert_source = mock.Mock() + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "true". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + mock_api_endpoint = "foo" + options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(options) + assert api_endpoint == mock_api_endpoint + assert cert_source == mock_client_cert_source + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "false". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + mock_client_cert_source = mock.Mock() + mock_api_endpoint = "foo" + options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(options) + assert api_endpoint == mock_api_endpoint + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert doesn't exist. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=False): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert exists. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): + with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=mock_client_cert_source): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source == mock_client_cert_source + + +@pytest.mark.parametrize("client_class,transport_class,transport_name", [ + (TargetInstancesClient, transports.TargetInstancesRestTransport, "rest"), +]) +def test_target_instances_client_client_options_scopes(client_class, transport_class, transport_name): + # Check the case scopes are provided. + options = client_options.ClientOptions( + scopes=["1", "2"], + ) + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=["1", "2"], + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + +@pytest.mark.parametrize("client_class,transport_class,transport_name,grpc_helpers", [ + (TargetInstancesClient, transports.TargetInstancesRestTransport, "rest", None), +]) +def test_target_instances_client_client_options_credentials_file(client_class, transport_class, transport_name, grpc_helpers): + # Check the case credentials file is provided. + options = client_options.ClientOptions( + credentials_file="credentials.json" + ) + + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize("request_type", [ + compute.AggregatedListTargetInstancesRequest, + dict, +]) +def test_aggregated_list_rest(request_type): + client = TargetInstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.TargetInstanceAggregatedList( + id='id_value', + kind='kind_value', + next_page_token='next_page_token_value', + self_link='self_link_value', + unreachables=['unreachables_value'], + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.TargetInstanceAggregatedList.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.aggregated_list(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.AggregatedListPager) + assert response.id == 'id_value' + assert response.kind == 'kind_value' + assert response.next_page_token == 'next_page_token_value' + assert response.self_link == 'self_link_value' + assert response.unreachables == ['unreachables_value'] + + +def test_aggregated_list_rest_required_fields(request_type=compute.AggregatedListTargetInstancesRequest): + transport_class = transports.TargetInstancesRestTransport + + request_init = {} + request_init["project"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).aggregated_list._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).aggregated_list._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("filter", "include_all_scopes", "max_results", "order_by", "page_token", "return_partial_success", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + + client = TargetInstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.TargetInstanceAggregatedList() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "get", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.TargetInstanceAggregatedList.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.aggregated_list(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_aggregated_list_rest_unset_required_fields(): + transport = transports.TargetInstancesRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.aggregated_list._get_unset_required_fields({}) + assert set(unset_fields) == (set(("filter", "includeAllScopes", "maxResults", "orderBy", "pageToken", "returnPartialSuccess", )) & set(("project", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_aggregated_list_rest_interceptors(null_interceptor): + transport = transports.TargetInstancesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.TargetInstancesRestInterceptor(), + ) + client = TargetInstancesClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.TargetInstancesRestInterceptor, "post_aggregated_list") as post, \ + mock.patch.object(transports.TargetInstancesRestInterceptor, "pre_aggregated_list") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.AggregatedListTargetInstancesRequest.pb(compute.AggregatedListTargetInstancesRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.TargetInstanceAggregatedList.to_json(compute.TargetInstanceAggregatedList()) + + request = compute.AggregatedListTargetInstancesRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.TargetInstanceAggregatedList() + + client.aggregated_list(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_aggregated_list_rest_bad_request(transport: str = 'rest', request_type=compute.AggregatedListTargetInstancesRequest): + client = TargetInstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.aggregated_list(request) + + +def test_aggregated_list_rest_flattened(): + client = TargetInstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.TargetInstanceAggregatedList() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.TargetInstanceAggregatedList.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.aggregated_list(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/aggregated/targetInstances" % client.transport._host, args[1]) + + +def test_aggregated_list_rest_flattened_error(transport: str = 'rest'): + client = TargetInstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.aggregated_list( + compute.AggregatedListTargetInstancesRequest(), + project='project_value', + ) + + +def test_aggregated_list_rest_pager(transport: str = 'rest'): + client = TargetInstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + #with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + compute.TargetInstanceAggregatedList( + items={ + 'a':compute.TargetInstancesScopedList(), + 'b':compute.TargetInstancesScopedList(), + 'c':compute.TargetInstancesScopedList(), + }, + next_page_token='abc', + ), + compute.TargetInstanceAggregatedList( + items={}, + next_page_token='def', + ), + compute.TargetInstanceAggregatedList( + items={ + 'g':compute.TargetInstancesScopedList(), + }, + next_page_token='ghi', + ), + compute.TargetInstanceAggregatedList( + items={ + 'h':compute.TargetInstancesScopedList(), + 'i':compute.TargetInstancesScopedList(), + }, + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple(compute.TargetInstanceAggregatedList.to_json(x) for x in response) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode('UTF-8') + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {'project': 'sample1'} + + pager = client.aggregated_list(request=sample_request) + + assert isinstance(pager.get('a'), compute.TargetInstancesScopedList) + assert pager.get('h') is None + + results = list(pager) + assert len(results) == 6 + assert all( + isinstance(i, tuple) + for i in results) + for result in results: + assert isinstance(result, tuple) + assert tuple(type(t) for t in result) == (str, compute.TargetInstancesScopedList) + + assert pager.get('a') is None + assert isinstance(pager.get('h'), compute.TargetInstancesScopedList) + + pages = list(client.aggregated_list(request=sample_request).pages) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize("request_type", [ + compute.DeleteTargetInstanceRequest, + dict, +]) +def test_delete_rest(request_type): + client = TargetInstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'zone': 'sample2', 'target_instance': 'sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.delete(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, extended_operation.ExtendedOperation) + assert response.client_operation_id == 'client_operation_id_value' + assert response.creation_timestamp == 'creation_timestamp_value' + assert response.description == 'description_value' + assert response.end_time == 'end_time_value' + assert response.http_error_message == 'http_error_message_value' + assert response.http_error_status_code == 2374 + assert response.id == 205 + assert response.insert_time == 'insert_time_value' + assert response.kind == 'kind_value' + assert response.name == 'name_value' + assert response.operation_group_id == 'operation_group_id_value' + assert response.operation_type == 'operation_type_value' + assert response.progress == 885 + assert response.region == 'region_value' + assert response.self_link == 'self_link_value' + assert response.start_time == 'start_time_value' + assert response.status == compute.Operation.Status.DONE + assert response.status_message == 'status_message_value' + assert response.target_id == 947 + assert response.target_link == 'target_link_value' + assert response.user == 'user_value' + assert response.zone == 'zone_value' + + +def test_delete_rest_required_fields(request_type=compute.DeleteTargetInstanceRequest): + transport_class = transports.TargetInstancesRestTransport + + request_init = {} + request_init["project"] = "" + request_init["target_instance"] = "" + request_init["zone"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + jsonified_request["targetInstance"] = 'target_instance_value' + jsonified_request["zone"] = 'zone_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "targetInstance" in jsonified_request + assert jsonified_request["targetInstance"] == 'target_instance_value' + assert "zone" in jsonified_request + assert jsonified_request["zone"] == 'zone_value' + + client = TargetInstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "delete", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.delete(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_delete_rest_unset_required_fields(): + transport = transports.TargetInstancesRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.delete._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("project", "targetInstance", "zone", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_rest_interceptors(null_interceptor): + transport = transports.TargetInstancesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.TargetInstancesRestInterceptor(), + ) + client = TargetInstancesClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.TargetInstancesRestInterceptor, "post_delete") as post, \ + mock.patch.object(transports.TargetInstancesRestInterceptor, "pre_delete") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.DeleteTargetInstanceRequest.pb(compute.DeleteTargetInstanceRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.DeleteTargetInstanceRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.delete(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_delete_rest_bad_request(transport: str = 'rest', request_type=compute.DeleteTargetInstanceRequest): + client = TargetInstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'zone': 'sample2', 'target_instance': 'sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.delete(request) + + +def test_delete_rest_flattened(): + client = TargetInstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'zone': 'sample2', 'target_instance': 'sample3'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + zone='zone_value', + target_instance='target_instance_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.delete(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/zones/{zone}/targetInstances/{target_instance}" % client.transport._host, args[1]) + + +def test_delete_rest_flattened_error(transport: str = 'rest'): + client = TargetInstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete( + compute.DeleteTargetInstanceRequest(), + project='project_value', + zone='zone_value', + target_instance='target_instance_value', + ) + + +def test_delete_rest_error(): + client = TargetInstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.DeleteTargetInstanceRequest, + dict, +]) +def test_delete_unary_rest(request_type): + client = TargetInstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'zone': 'sample2', 'target_instance': 'sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.delete_unary(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.Operation) + + +def test_delete_unary_rest_required_fields(request_type=compute.DeleteTargetInstanceRequest): + transport_class = transports.TargetInstancesRestTransport + + request_init = {} + request_init["project"] = "" + request_init["target_instance"] = "" + request_init["zone"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + jsonified_request["targetInstance"] = 'target_instance_value' + jsonified_request["zone"] = 'zone_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "targetInstance" in jsonified_request + assert jsonified_request["targetInstance"] == 'target_instance_value' + assert "zone" in jsonified_request + assert jsonified_request["zone"] == 'zone_value' + + client = TargetInstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "delete", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.delete_unary(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_delete_unary_rest_unset_required_fields(): + transport = transports.TargetInstancesRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.delete._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("project", "targetInstance", "zone", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_unary_rest_interceptors(null_interceptor): + transport = transports.TargetInstancesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.TargetInstancesRestInterceptor(), + ) + client = TargetInstancesClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.TargetInstancesRestInterceptor, "post_delete") as post, \ + mock.patch.object(transports.TargetInstancesRestInterceptor, "pre_delete") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.DeleteTargetInstanceRequest.pb(compute.DeleteTargetInstanceRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.DeleteTargetInstanceRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.delete_unary(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_delete_unary_rest_bad_request(transport: str = 'rest', request_type=compute.DeleteTargetInstanceRequest): + client = TargetInstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'zone': 'sample2', 'target_instance': 'sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.delete_unary(request) + + +def test_delete_unary_rest_flattened(): + client = TargetInstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'zone': 'sample2', 'target_instance': 'sample3'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + zone='zone_value', + target_instance='target_instance_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.delete_unary(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/zones/{zone}/targetInstances/{target_instance}" % client.transport._host, args[1]) + + +def test_delete_unary_rest_flattened_error(transport: str = 'rest'): + client = TargetInstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_unary( + compute.DeleteTargetInstanceRequest(), + project='project_value', + zone='zone_value', + target_instance='target_instance_value', + ) + + +def test_delete_unary_rest_error(): + client = TargetInstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.GetTargetInstanceRequest, + dict, +]) +def test_get_rest(request_type): + client = TargetInstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'zone': 'sample2', 'target_instance': 'sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.TargetInstance( + creation_timestamp='creation_timestamp_value', + description='description_value', + id=205, + instance='instance_value', + kind='kind_value', + name='name_value', + nat_policy='nat_policy_value', + network='network_value', + self_link='self_link_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.TargetInstance.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.get(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.TargetInstance) + assert response.creation_timestamp == 'creation_timestamp_value' + assert response.description == 'description_value' + assert response.id == 205 + assert response.instance == 'instance_value' + assert response.kind == 'kind_value' + assert response.name == 'name_value' + assert response.nat_policy == 'nat_policy_value' + assert response.network == 'network_value' + assert response.self_link == 'self_link_value' + assert response.zone == 'zone_value' + + +def test_get_rest_required_fields(request_type=compute.GetTargetInstanceRequest): + transport_class = transports.TargetInstancesRestTransport + + request_init = {} + request_init["project"] = "" + request_init["target_instance"] = "" + request_init["zone"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + jsonified_request["targetInstance"] = 'target_instance_value' + jsonified_request["zone"] = 'zone_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "targetInstance" in jsonified_request + assert jsonified_request["targetInstance"] == 'target_instance_value' + assert "zone" in jsonified_request + assert jsonified_request["zone"] == 'zone_value' + + client = TargetInstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.TargetInstance() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "get", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.TargetInstance.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.get(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_get_rest_unset_required_fields(): + transport = transports.TargetInstancesRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.get._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("project", "targetInstance", "zone", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_rest_interceptors(null_interceptor): + transport = transports.TargetInstancesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.TargetInstancesRestInterceptor(), + ) + client = TargetInstancesClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.TargetInstancesRestInterceptor, "post_get") as post, \ + mock.patch.object(transports.TargetInstancesRestInterceptor, "pre_get") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.GetTargetInstanceRequest.pb(compute.GetTargetInstanceRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.TargetInstance.to_json(compute.TargetInstance()) + + request = compute.GetTargetInstanceRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.TargetInstance() + + client.get(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_get_rest_bad_request(transport: str = 'rest', request_type=compute.GetTargetInstanceRequest): + client = TargetInstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'zone': 'sample2', 'target_instance': 'sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get(request) + + +def test_get_rest_flattened(): + client = TargetInstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.TargetInstance() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'zone': 'sample2', 'target_instance': 'sample3'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + zone='zone_value', + target_instance='target_instance_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.TargetInstance.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.get(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/zones/{zone}/targetInstances/{target_instance}" % client.transport._host, args[1]) + + +def test_get_rest_flattened_error(transport: str = 'rest'): + client = TargetInstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get( + compute.GetTargetInstanceRequest(), + project='project_value', + zone='zone_value', + target_instance='target_instance_value', + ) + + +def test_get_rest_error(): + client = TargetInstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.InsertTargetInstanceRequest, + dict, +]) +def test_insert_rest(request_type): + client = TargetInstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'zone': 'sample2'} + request_init["target_instance_resource"] = {'creation_timestamp': 'creation_timestamp_value', 'description': 'description_value', 'id': 205, 'instance': 'instance_value', 'kind': 'kind_value', 'name': 'name_value', 'nat_policy': 'nat_policy_value', 'network': 'network_value', 'self_link': 'self_link_value', 'zone': 'zone_value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.insert(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, extended_operation.ExtendedOperation) + assert response.client_operation_id == 'client_operation_id_value' + assert response.creation_timestamp == 'creation_timestamp_value' + assert response.description == 'description_value' + assert response.end_time == 'end_time_value' + assert response.http_error_message == 'http_error_message_value' + assert response.http_error_status_code == 2374 + assert response.id == 205 + assert response.insert_time == 'insert_time_value' + assert response.kind == 'kind_value' + assert response.name == 'name_value' + assert response.operation_group_id == 'operation_group_id_value' + assert response.operation_type == 'operation_type_value' + assert response.progress == 885 + assert response.region == 'region_value' + assert response.self_link == 'self_link_value' + assert response.start_time == 'start_time_value' + assert response.status == compute.Operation.Status.DONE + assert response.status_message == 'status_message_value' + assert response.target_id == 947 + assert response.target_link == 'target_link_value' + assert response.user == 'user_value' + assert response.zone == 'zone_value' + + +def test_insert_rest_required_fields(request_type=compute.InsertTargetInstanceRequest): + transport_class = transports.TargetInstancesRestTransport + + request_init = {} + request_init["project"] = "" + request_init["zone"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).insert._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + jsonified_request["zone"] = 'zone_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).insert._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "zone" in jsonified_request + assert jsonified_request["zone"] == 'zone_value' + + client = TargetInstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.insert(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_insert_rest_unset_required_fields(): + transport = transports.TargetInstancesRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.insert._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("project", "targetInstanceResource", "zone", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_insert_rest_interceptors(null_interceptor): + transport = transports.TargetInstancesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.TargetInstancesRestInterceptor(), + ) + client = TargetInstancesClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.TargetInstancesRestInterceptor, "post_insert") as post, \ + mock.patch.object(transports.TargetInstancesRestInterceptor, "pre_insert") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.InsertTargetInstanceRequest.pb(compute.InsertTargetInstanceRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.InsertTargetInstanceRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.insert(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_insert_rest_bad_request(transport: str = 'rest', request_type=compute.InsertTargetInstanceRequest): + client = TargetInstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'zone': 'sample2'} + request_init["target_instance_resource"] = {'creation_timestamp': 'creation_timestamp_value', 'description': 'description_value', 'id': 205, 'instance': 'instance_value', 'kind': 'kind_value', 'name': 'name_value', 'nat_policy': 'nat_policy_value', 'network': 'network_value', 'self_link': 'self_link_value', 'zone': 'zone_value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.insert(request) + + +def test_insert_rest_flattened(): + client = TargetInstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'zone': 'sample2'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + zone='zone_value', + target_instance_resource=compute.TargetInstance(creation_timestamp='creation_timestamp_value'), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.insert(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/zones/{zone}/targetInstances" % client.transport._host, args[1]) + + +def test_insert_rest_flattened_error(transport: str = 'rest'): + client = TargetInstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.insert( + compute.InsertTargetInstanceRequest(), + project='project_value', + zone='zone_value', + target_instance_resource=compute.TargetInstance(creation_timestamp='creation_timestamp_value'), + ) + + +def test_insert_rest_error(): + client = TargetInstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.InsertTargetInstanceRequest, + dict, +]) +def test_insert_unary_rest(request_type): + client = TargetInstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'zone': 'sample2'} + request_init["target_instance_resource"] = {'creation_timestamp': 'creation_timestamp_value', 'description': 'description_value', 'id': 205, 'instance': 'instance_value', 'kind': 'kind_value', 'name': 'name_value', 'nat_policy': 'nat_policy_value', 'network': 'network_value', 'self_link': 'self_link_value', 'zone': 'zone_value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.insert_unary(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.Operation) + + +def test_insert_unary_rest_required_fields(request_type=compute.InsertTargetInstanceRequest): + transport_class = transports.TargetInstancesRestTransport + + request_init = {} + request_init["project"] = "" + request_init["zone"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).insert._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + jsonified_request["zone"] = 'zone_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).insert._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "zone" in jsonified_request + assert jsonified_request["zone"] == 'zone_value' + + client = TargetInstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.insert_unary(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_insert_unary_rest_unset_required_fields(): + transport = transports.TargetInstancesRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.insert._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("project", "targetInstanceResource", "zone", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_insert_unary_rest_interceptors(null_interceptor): + transport = transports.TargetInstancesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.TargetInstancesRestInterceptor(), + ) + client = TargetInstancesClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.TargetInstancesRestInterceptor, "post_insert") as post, \ + mock.patch.object(transports.TargetInstancesRestInterceptor, "pre_insert") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.InsertTargetInstanceRequest.pb(compute.InsertTargetInstanceRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.InsertTargetInstanceRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.insert_unary(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_insert_unary_rest_bad_request(transport: str = 'rest', request_type=compute.InsertTargetInstanceRequest): + client = TargetInstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'zone': 'sample2'} + request_init["target_instance_resource"] = {'creation_timestamp': 'creation_timestamp_value', 'description': 'description_value', 'id': 205, 'instance': 'instance_value', 'kind': 'kind_value', 'name': 'name_value', 'nat_policy': 'nat_policy_value', 'network': 'network_value', 'self_link': 'self_link_value', 'zone': 'zone_value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.insert_unary(request) + + +def test_insert_unary_rest_flattened(): + client = TargetInstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'zone': 'sample2'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + zone='zone_value', + target_instance_resource=compute.TargetInstance(creation_timestamp='creation_timestamp_value'), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.insert_unary(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/zones/{zone}/targetInstances" % client.transport._host, args[1]) + + +def test_insert_unary_rest_flattened_error(transport: str = 'rest'): + client = TargetInstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.insert_unary( + compute.InsertTargetInstanceRequest(), + project='project_value', + zone='zone_value', + target_instance_resource=compute.TargetInstance(creation_timestamp='creation_timestamp_value'), + ) + + +def test_insert_unary_rest_error(): + client = TargetInstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.ListTargetInstancesRequest, + dict, +]) +def test_list_rest(request_type): + client = TargetInstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'zone': 'sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.TargetInstanceList( + id='id_value', + kind='kind_value', + next_page_token='next_page_token_value', + self_link='self_link_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.TargetInstanceList.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.list(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListPager) + assert response.id == 'id_value' + assert response.kind == 'kind_value' + assert response.next_page_token == 'next_page_token_value' + assert response.self_link == 'self_link_value' + + +def test_list_rest_required_fields(request_type=compute.ListTargetInstancesRequest): + transport_class = transports.TargetInstancesRestTransport + + request_init = {} + request_init["project"] = "" + request_init["zone"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + jsonified_request["zone"] = 'zone_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("filter", "max_results", "order_by", "page_token", "return_partial_success", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "zone" in jsonified_request + assert jsonified_request["zone"] == 'zone_value' + + client = TargetInstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.TargetInstanceList() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "get", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.TargetInstanceList.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.list(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_list_rest_unset_required_fields(): + transport = transports.TargetInstancesRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.list._get_unset_required_fields({}) + assert set(unset_fields) == (set(("filter", "maxResults", "orderBy", "pageToken", "returnPartialSuccess", )) & set(("project", "zone", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_rest_interceptors(null_interceptor): + transport = transports.TargetInstancesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.TargetInstancesRestInterceptor(), + ) + client = TargetInstancesClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.TargetInstancesRestInterceptor, "post_list") as post, \ + mock.patch.object(transports.TargetInstancesRestInterceptor, "pre_list") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.ListTargetInstancesRequest.pb(compute.ListTargetInstancesRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.TargetInstanceList.to_json(compute.TargetInstanceList()) + + request = compute.ListTargetInstancesRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.TargetInstanceList() + + client.list(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_list_rest_bad_request(transport: str = 'rest', request_type=compute.ListTargetInstancesRequest): + client = TargetInstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'zone': 'sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list(request) + + +def test_list_rest_flattened(): + client = TargetInstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.TargetInstanceList() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'zone': 'sample2'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + zone='zone_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.TargetInstanceList.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.list(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/zones/{zone}/targetInstances" % client.transport._host, args[1]) + + +def test_list_rest_flattened_error(transport: str = 'rest'): + client = TargetInstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list( + compute.ListTargetInstancesRequest(), + project='project_value', + zone='zone_value', + ) + + +def test_list_rest_pager(transport: str = 'rest'): + client = TargetInstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + #with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + compute.TargetInstanceList( + items=[ + compute.TargetInstance(), + compute.TargetInstance(), + compute.TargetInstance(), + ], + next_page_token='abc', + ), + compute.TargetInstanceList( + items=[], + next_page_token='def', + ), + compute.TargetInstanceList( + items=[ + compute.TargetInstance(), + ], + next_page_token='ghi', + ), + compute.TargetInstanceList( + items=[ + compute.TargetInstance(), + compute.TargetInstance(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple(compute.TargetInstanceList.to_json(x) for x in response) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode('UTF-8') + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {'project': 'sample1', 'zone': 'sample2'} + + pager = client.list(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, compute.TargetInstance) + for i in results) + + pages = list(client.list(request=sample_request).pages) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + + +def test_credentials_transport_error(): + # It is an error to provide credentials and a transport instance. + transport = transports.TargetInstancesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = TargetInstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # It is an error to provide a credentials file and a transport instance. + transport = transports.TargetInstancesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = TargetInstancesClient( + client_options={"credentials_file": "credentials.json"}, + transport=transport, + ) + + # It is an error to provide an api_key and a transport instance. + transport = transports.TargetInstancesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = TargetInstancesClient( + client_options=options, + transport=transport, + ) + + # It is an error to provide an api_key and a credential. + options = mock.Mock() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = TargetInstancesClient( + client_options=options, + credentials=ga_credentials.AnonymousCredentials() + ) + + # It is an error to provide scopes and a transport instance. + transport = transports.TargetInstancesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = TargetInstancesClient( + client_options={"scopes": ["1", "2"]}, + transport=transport, + ) + + +def test_transport_instance(): + # A client may be instantiated with a custom transport instance. + transport = transports.TargetInstancesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + client = TargetInstancesClient(transport=transport) + assert client.transport is transport + + +@pytest.mark.parametrize("transport_class", [ + transports.TargetInstancesRestTransport, +]) +def test_transport_adc(transport_class): + # Test default credentials are used if not provided. + with mock.patch.object(google.auth, 'default') as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class() + adc.assert_called_once() + +@pytest.mark.parametrize("transport_name", [ + "rest", +]) +def test_transport_kind(transport_name): + transport = TargetInstancesClient.get_transport_class(transport_name)( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert transport.kind == transport_name + + +def test_target_instances_base_transport_error(): + # Passing both a credentials object and credentials_file should raise an error + with pytest.raises(core_exceptions.DuplicateCredentialArgs): + transport = transports.TargetInstancesTransport( + credentials=ga_credentials.AnonymousCredentials(), + credentials_file="credentials.json" + ) + + +def test_target_instances_base_transport(): + # Instantiate the base transport. + with mock.patch('google.cloud.compute_v1.services.target_instances.transports.TargetInstancesTransport.__init__') as Transport: + Transport.return_value = None + transport = transports.TargetInstancesTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Every method on the transport should just blindly + # raise NotImplementedError. + methods = ( + 'aggregated_list', + 'delete', + 'get', + 'insert', + 'list', + ) + for method in methods: + with pytest.raises(NotImplementedError): + getattr(transport, method)(request=object()) + + with pytest.raises(NotImplementedError): + transport.close() + + # Catch all for all remaining methods and properties + remainder = [ + 'kind', + ] + for r in remainder: + with pytest.raises(NotImplementedError): + getattr(transport, r)() + + +def test_target_instances_base_transport_with_credentials_file(): + # Instantiate the base transport with a credentials file + with mock.patch.object(google.auth, 'load_credentials_from_file', autospec=True) as load_creds, mock.patch('google.cloud.compute_v1.services.target_instances.transports.TargetInstancesTransport._prep_wrapped_messages') as Transport: + Transport.return_value = None + load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.TargetInstancesTransport( + credentials_file="credentials.json", + quota_project_id="octopus", + ) + load_creds.assert_called_once_with("credentials.json", + scopes=None, + default_scopes=( + 'https://www.googleapis.com/auth/compute', + 'https://www.googleapis.com/auth/cloud-platform', +), + quota_project_id="octopus", + ) + + +def test_target_instances_base_transport_with_adc(): + # Test the default credentials are used if credentials and credentials_file are None. + with mock.patch.object(google.auth, 'default', autospec=True) as adc, mock.patch('google.cloud.compute_v1.services.target_instances.transports.TargetInstancesTransport._prep_wrapped_messages') as Transport: + Transport.return_value = None + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.TargetInstancesTransport() + adc.assert_called_once() + + +def test_target_instances_auth_adc(): + # If no credentials are provided, we should use ADC credentials. + with mock.patch.object(google.auth, 'default', autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + TargetInstancesClient() + adc.assert_called_once_with( + scopes=None, + default_scopes=( + 'https://www.googleapis.com/auth/compute', + 'https://www.googleapis.com/auth/cloud-platform', +), + quota_project_id=None, + ) + + +def test_target_instances_http_transport_client_cert_source_for_mtls(): + cred = ga_credentials.AnonymousCredentials() + with mock.patch("google.auth.transport.requests.AuthorizedSession.configure_mtls_channel") as mock_configure_mtls_channel: + transports.TargetInstancesRestTransport ( + credentials=cred, + client_cert_source_for_mtls=client_cert_source_callback + ) + mock_configure_mtls_channel.assert_called_once_with(client_cert_source_callback) + + +@pytest.mark.parametrize("transport_name", [ + "rest", +]) +def test_target_instances_host_no_port(transport_name): + client = TargetInstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions(api_endpoint='compute.googleapis.com'), + transport=transport_name, + ) + assert client.transport._host == ( + 'compute.googleapis.com:443' + if transport_name in ['grpc', 'grpc_asyncio'] + else 'https://compute.googleapis.com' + ) + +@pytest.mark.parametrize("transport_name", [ + "rest", +]) +def test_target_instances_host_with_port(transport_name): + client = TargetInstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions(api_endpoint='compute.googleapis.com:8000'), + transport=transport_name, + ) + assert client.transport._host == ( + 'compute.googleapis.com:8000' + if transport_name in ['grpc', 'grpc_asyncio'] + else 'https://compute.googleapis.com:8000' + ) + +@pytest.mark.parametrize("transport_name", [ + "rest", +]) +def test_target_instances_client_transport_session_collision(transport_name): + creds1 = ga_credentials.AnonymousCredentials() + creds2 = ga_credentials.AnonymousCredentials() + client1 = TargetInstancesClient( + credentials=creds1, + transport=transport_name, + ) + client2 = TargetInstancesClient( + credentials=creds2, + transport=transport_name, + ) + session1 = client1.transport.aggregated_list._session + session2 = client2.transport.aggregated_list._session + assert session1 != session2 + session1 = client1.transport.delete._session + session2 = client2.transport.delete._session + assert session1 != session2 + session1 = client1.transport.get._session + session2 = client2.transport.get._session + assert session1 != session2 + session1 = client1.transport.insert._session + session2 = client2.transport.insert._session + assert session1 != session2 + session1 = client1.transport.list._session + session2 = client2.transport.list._session + assert session1 != session2 + +def test_common_billing_account_path(): + billing_account = "squid" + expected = "billingAccounts/{billing_account}".format(billing_account=billing_account, ) + actual = TargetInstancesClient.common_billing_account_path(billing_account) + assert expected == actual + + +def test_parse_common_billing_account_path(): + expected = { + "billing_account": "clam", + } + path = TargetInstancesClient.common_billing_account_path(**expected) + + # Check that the path construction is reversible. + actual = TargetInstancesClient.parse_common_billing_account_path(path) + assert expected == actual + +def test_common_folder_path(): + folder = "whelk" + expected = "folders/{folder}".format(folder=folder, ) + actual = TargetInstancesClient.common_folder_path(folder) + assert expected == actual + + +def test_parse_common_folder_path(): + expected = { + "folder": "octopus", + } + path = TargetInstancesClient.common_folder_path(**expected) + + # Check that the path construction is reversible. + actual = TargetInstancesClient.parse_common_folder_path(path) + assert expected == actual + +def test_common_organization_path(): + organization = "oyster" + expected = "organizations/{organization}".format(organization=organization, ) + actual = TargetInstancesClient.common_organization_path(organization) + assert expected == actual + + +def test_parse_common_organization_path(): + expected = { + "organization": "nudibranch", + } + path = TargetInstancesClient.common_organization_path(**expected) + + # Check that the path construction is reversible. + actual = TargetInstancesClient.parse_common_organization_path(path) + assert expected == actual + +def test_common_project_path(): + project = "cuttlefish" + expected = "projects/{project}".format(project=project, ) + actual = TargetInstancesClient.common_project_path(project) + assert expected == actual + + +def test_parse_common_project_path(): + expected = { + "project": "mussel", + } + path = TargetInstancesClient.common_project_path(**expected) + + # Check that the path construction is reversible. + actual = TargetInstancesClient.parse_common_project_path(path) + assert expected == actual + +def test_common_location_path(): + project = "winkle" + location = "nautilus" + expected = "projects/{project}/locations/{location}".format(project=project, location=location, ) + actual = TargetInstancesClient.common_location_path(project, location) + assert expected == actual + + +def test_parse_common_location_path(): + expected = { + "project": "scallop", + "location": "abalone", + } + path = TargetInstancesClient.common_location_path(**expected) + + # Check that the path construction is reversible. + actual = TargetInstancesClient.parse_common_location_path(path) + assert expected == actual + + +def test_client_with_default_client_info(): + client_info = gapic_v1.client_info.ClientInfo() + + with mock.patch.object(transports.TargetInstancesTransport, '_prep_wrapped_messages') as prep: + client = TargetInstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + with mock.patch.object(transports.TargetInstancesTransport, '_prep_wrapped_messages') as prep: + transport_class = TargetInstancesClient.get_transport_class() + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + +def test_transport_close(): + transports = { + "rest": "_session", + } + + for transport, close_name in transports.items(): + client = TargetInstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport + ) + with mock.patch.object(type(getattr(client.transport, close_name)), "close") as close: + with client: + close.assert_not_called() + close.assert_called_once() + +def test_client_ctx(): + transports = [ + 'rest', + ] + for transport in transports: + client = TargetInstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport + ) + # Test client calls underlying transport. + with mock.patch.object(type(client.transport), "close") as close: + close.assert_not_called() + with client: + pass + close.assert_called() + +@pytest.mark.parametrize("client_class,transport_class", [ + (TargetInstancesClient, transports.TargetInstancesRestTransport), +]) +def test_api_key_credentials(client_class, transport_class): + with mock.patch.object( + google.auth._default, "get_api_key_credentials", create=True + ) as get_api_key_credentials: + mock_cred = mock.Mock() + get_api_key_credentials.return_value = mock_cred + options = client_options.ClientOptions() + options.api_key = "api_key" + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=mock_cred, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) diff --git a/owl-bot-staging/v1/tests/unit/gapic/compute_v1/test_target_pools.py b/owl-bot-staging/v1/tests/unit/gapic/compute_v1/test_target_pools.py new file mode 100644 index 000000000..b559c21bb --- /dev/null +++ b/owl-bot-staging/v1/tests/unit/gapic/compute_v1/test_target_pools.py @@ -0,0 +1,5921 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import os +# try/except added for compatibility with python < 3.8 +try: + from unittest import mock + from unittest.mock import AsyncMock # pragma: NO COVER +except ImportError: # pragma: NO COVER + import mock + +import grpc +from grpc.experimental import aio +from collections.abc import Iterable +from google.protobuf import json_format +import json +import math +import pytest +from proto.marshal.rules.dates import DurationRule, TimestampRule +from proto.marshal.rules import wrappers +from requests import Response +from requests import Request, PreparedRequest +from requests.sessions import Session +from google.protobuf import json_format + +from google.api_core import client_options +from google.api_core import exceptions as core_exceptions +from google.api_core import extended_operation # type: ignore +from google.api_core import future +from google.api_core import gapic_v1 +from google.api_core import grpc_helpers +from google.api_core import grpc_helpers_async +from google.api_core import path_template +from google.auth import credentials as ga_credentials +from google.auth.exceptions import MutualTLSChannelError +from google.cloud.compute_v1.services.target_pools import TargetPoolsClient +from google.cloud.compute_v1.services.target_pools import pagers +from google.cloud.compute_v1.services.target_pools import transports +from google.cloud.compute_v1.types import compute +from google.oauth2 import service_account +import google.auth + + +def client_cert_source_callback(): + return b"cert bytes", b"key bytes" + + +# If default endpoint is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint(client): + return "foo.googleapis.com" if ("localhost" in client.DEFAULT_ENDPOINT) else client.DEFAULT_ENDPOINT + + +def test__get_default_mtls_endpoint(): + api_endpoint = "example.googleapis.com" + api_mtls_endpoint = "example.mtls.googleapis.com" + sandbox_endpoint = "example.sandbox.googleapis.com" + sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com" + non_googleapi = "api.example.com" + + assert TargetPoolsClient._get_default_mtls_endpoint(None) is None + assert TargetPoolsClient._get_default_mtls_endpoint(api_endpoint) == api_mtls_endpoint + assert TargetPoolsClient._get_default_mtls_endpoint(api_mtls_endpoint) == api_mtls_endpoint + assert TargetPoolsClient._get_default_mtls_endpoint(sandbox_endpoint) == sandbox_mtls_endpoint + assert TargetPoolsClient._get_default_mtls_endpoint(sandbox_mtls_endpoint) == sandbox_mtls_endpoint + assert TargetPoolsClient._get_default_mtls_endpoint(non_googleapi) == non_googleapi + + +@pytest.mark.parametrize("client_class,transport_name", [ + (TargetPoolsClient, "rest"), +]) +def test_target_pools_client_from_service_account_info(client_class, transport_name): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object(service_account.Credentials, 'from_service_account_info') as factory: + factory.return_value = creds + info = {"valid": True} + client = client_class.from_service_account_info(info, transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + 'compute.googleapis.com:443' + if transport_name in ['grpc', 'grpc_asyncio'] + else + 'https://compute.googleapis.com' + ) + + +@pytest.mark.parametrize("transport_class,transport_name", [ + (transports.TargetPoolsRestTransport, "rest"), +]) +def test_target_pools_client_service_account_always_use_jwt(transport_class, transport_name): + with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=True) + use_jwt.assert_called_once_with(True) + + with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=False) + use_jwt.assert_not_called() + + +@pytest.mark.parametrize("client_class,transport_name", [ + (TargetPoolsClient, "rest"), +]) +def test_target_pools_client_from_service_account_file(client_class, transport_name): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object(service_account.Credentials, 'from_service_account_file') as factory: + factory.return_value = creds + client = client_class.from_service_account_file("dummy/file/path.json", transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + client = client_class.from_service_account_json("dummy/file/path.json", transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + 'compute.googleapis.com:443' + if transport_name in ['grpc', 'grpc_asyncio'] + else + 'https://compute.googleapis.com' + ) + + +def test_target_pools_client_get_transport_class(): + transport = TargetPoolsClient.get_transport_class() + available_transports = [ + transports.TargetPoolsRestTransport, + ] + assert transport in available_transports + + transport = TargetPoolsClient.get_transport_class("rest") + assert transport == transports.TargetPoolsRestTransport + + +@pytest.mark.parametrize("client_class,transport_class,transport_name", [ + (TargetPoolsClient, transports.TargetPoolsRestTransport, "rest"), +]) +@mock.patch.object(TargetPoolsClient, "DEFAULT_ENDPOINT", modify_default_endpoint(TargetPoolsClient)) +def test_target_pools_client_client_options(client_class, transport_class, transport_name): + # Check that if channel is provided we won't create a new one. + with mock.patch.object(TargetPoolsClient, 'get_transport_class') as gtc: + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ) + client = client_class(transport=transport) + gtc.assert_not_called() + + # Check that if channel is provided via str we will create a new one. + with mock.patch.object(TargetPoolsClient, 'get_transport_class') as gtc: + client = client_class(transport=transport_name) + gtc.assert_called() + + # Check the case api_endpoint is provided. + options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(transport=transport_name, client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_MTLS_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError): + client = client_class(transport=transport_name) + + # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): + with pytest.raises(ValueError): + client = client_class(transport=transport_name) + + # Check the case quota_project_id is provided + options = client_options.ClientOptions(quota_project_id="octopus") + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id="octopus", + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + # Check the case api_endpoint is provided + options = client_options.ClientOptions(api_audience="https://language.googleapis.com") + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience="https://language.googleapis.com" + ) + +@pytest.mark.parametrize("client_class,transport_class,transport_name,use_client_cert_env", [ + (TargetPoolsClient, transports.TargetPoolsRestTransport, "rest", "true"), + (TargetPoolsClient, transports.TargetPoolsRestTransport, "rest", "false"), +]) +@mock.patch.object(TargetPoolsClient, "DEFAULT_ENDPOINT", modify_default_endpoint(TargetPoolsClient)) +@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) +def test_target_pools_client_mtls_env_auto(client_class, transport_class, transport_name, use_client_cert_env): + # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default + # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists. + + # Check the case client_cert_source is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): + options = client_options.ClientOptions(client_cert_source=client_cert_source_callback) + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + + if use_client_cert_env == "false": + expected_client_cert_source = None + expected_host = client.DEFAULT_ENDPOINT + else: + expected_client_cert_source = client_cert_source_callback + expected_host = client.DEFAULT_MTLS_ENDPOINT + + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case ADC client cert is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): + with mock.patch.object(transport_class, '__init__') as patched: + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): + with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=client_cert_source_callback): + if use_client_cert_env == "false": + expected_host = client.DEFAULT_ENDPOINT + expected_client_cert_source = None + else: + expected_host = client.DEFAULT_MTLS_ENDPOINT + expected_client_cert_source = client_cert_source_callback + + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case client_cert_source and ADC client cert are not provided. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): + with mock.patch.object(transport_class, '__init__') as patched: + with mock.patch("google.auth.transport.mtls.has_default_client_cert_source", return_value=False): + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize("client_class", [ + TargetPoolsClient +]) +@mock.patch.object(TargetPoolsClient, "DEFAULT_ENDPOINT", modify_default_endpoint(TargetPoolsClient)) +def test_target_pools_client_get_mtls_endpoint_and_cert_source(client_class): + mock_client_cert_source = mock.Mock() + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "true". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + mock_api_endpoint = "foo" + options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(options) + assert api_endpoint == mock_api_endpoint + assert cert_source == mock_client_cert_source + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "false". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + mock_client_cert_source = mock.Mock() + mock_api_endpoint = "foo" + options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(options) + assert api_endpoint == mock_api_endpoint + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert doesn't exist. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=False): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert exists. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): + with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=mock_client_cert_source): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source == mock_client_cert_source + + +@pytest.mark.parametrize("client_class,transport_class,transport_name", [ + (TargetPoolsClient, transports.TargetPoolsRestTransport, "rest"), +]) +def test_target_pools_client_client_options_scopes(client_class, transport_class, transport_name): + # Check the case scopes are provided. + options = client_options.ClientOptions( + scopes=["1", "2"], + ) + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=["1", "2"], + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + +@pytest.mark.parametrize("client_class,transport_class,transport_name,grpc_helpers", [ + (TargetPoolsClient, transports.TargetPoolsRestTransport, "rest", None), +]) +def test_target_pools_client_client_options_credentials_file(client_class, transport_class, transport_name, grpc_helpers): + # Check the case credentials file is provided. + options = client_options.ClientOptions( + credentials_file="credentials.json" + ) + + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize("request_type", [ + compute.AddHealthCheckTargetPoolRequest, + dict, +]) +def test_add_health_check_rest(request_type): + client = TargetPoolsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2', 'target_pool': 'sample3'} + request_init["target_pools_add_health_check_request_resource"] = {'health_checks': [{'health_check': 'health_check_value'}]} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.add_health_check(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, extended_operation.ExtendedOperation) + assert response.client_operation_id == 'client_operation_id_value' + assert response.creation_timestamp == 'creation_timestamp_value' + assert response.description == 'description_value' + assert response.end_time == 'end_time_value' + assert response.http_error_message == 'http_error_message_value' + assert response.http_error_status_code == 2374 + assert response.id == 205 + assert response.insert_time == 'insert_time_value' + assert response.kind == 'kind_value' + assert response.name == 'name_value' + assert response.operation_group_id == 'operation_group_id_value' + assert response.operation_type == 'operation_type_value' + assert response.progress == 885 + assert response.region == 'region_value' + assert response.self_link == 'self_link_value' + assert response.start_time == 'start_time_value' + assert response.status == compute.Operation.Status.DONE + assert response.status_message == 'status_message_value' + assert response.target_id == 947 + assert response.target_link == 'target_link_value' + assert response.user == 'user_value' + assert response.zone == 'zone_value' + + +def test_add_health_check_rest_required_fields(request_type=compute.AddHealthCheckTargetPoolRequest): + transport_class = transports.TargetPoolsRestTransport + + request_init = {} + request_init["project"] = "" + request_init["region"] = "" + request_init["target_pool"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).add_health_check._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + jsonified_request["region"] = 'region_value' + jsonified_request["targetPool"] = 'target_pool_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).add_health_check._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "region" in jsonified_request + assert jsonified_request["region"] == 'region_value' + assert "targetPool" in jsonified_request + assert jsonified_request["targetPool"] == 'target_pool_value' + + client = TargetPoolsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.add_health_check(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_add_health_check_rest_unset_required_fields(): + transport = transports.TargetPoolsRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.add_health_check._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("project", "region", "targetPool", "targetPoolsAddHealthCheckRequestResource", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_add_health_check_rest_interceptors(null_interceptor): + transport = transports.TargetPoolsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.TargetPoolsRestInterceptor(), + ) + client = TargetPoolsClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.TargetPoolsRestInterceptor, "post_add_health_check") as post, \ + mock.patch.object(transports.TargetPoolsRestInterceptor, "pre_add_health_check") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.AddHealthCheckTargetPoolRequest.pb(compute.AddHealthCheckTargetPoolRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.AddHealthCheckTargetPoolRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.add_health_check(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_add_health_check_rest_bad_request(transport: str = 'rest', request_type=compute.AddHealthCheckTargetPoolRequest): + client = TargetPoolsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2', 'target_pool': 'sample3'} + request_init["target_pools_add_health_check_request_resource"] = {'health_checks': [{'health_check': 'health_check_value'}]} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.add_health_check(request) + + +def test_add_health_check_rest_flattened(): + client = TargetPoolsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'region': 'sample2', 'target_pool': 'sample3'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + region='region_value', + target_pool='target_pool_value', + target_pools_add_health_check_request_resource=compute.TargetPoolsAddHealthCheckRequest(health_checks=[compute.HealthCheckReference(health_check='health_check_value')]), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.add_health_check(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/regions/{region}/targetPools/{target_pool}/addHealthCheck" % client.transport._host, args[1]) + + +def test_add_health_check_rest_flattened_error(transport: str = 'rest'): + client = TargetPoolsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.add_health_check( + compute.AddHealthCheckTargetPoolRequest(), + project='project_value', + region='region_value', + target_pool='target_pool_value', + target_pools_add_health_check_request_resource=compute.TargetPoolsAddHealthCheckRequest(health_checks=[compute.HealthCheckReference(health_check='health_check_value')]), + ) + + +def test_add_health_check_rest_error(): + client = TargetPoolsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.AddHealthCheckTargetPoolRequest, + dict, +]) +def test_add_health_check_unary_rest(request_type): + client = TargetPoolsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2', 'target_pool': 'sample3'} + request_init["target_pools_add_health_check_request_resource"] = {'health_checks': [{'health_check': 'health_check_value'}]} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.add_health_check_unary(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.Operation) + + +def test_add_health_check_unary_rest_required_fields(request_type=compute.AddHealthCheckTargetPoolRequest): + transport_class = transports.TargetPoolsRestTransport + + request_init = {} + request_init["project"] = "" + request_init["region"] = "" + request_init["target_pool"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).add_health_check._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + jsonified_request["region"] = 'region_value' + jsonified_request["targetPool"] = 'target_pool_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).add_health_check._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "region" in jsonified_request + assert jsonified_request["region"] == 'region_value' + assert "targetPool" in jsonified_request + assert jsonified_request["targetPool"] == 'target_pool_value' + + client = TargetPoolsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.add_health_check_unary(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_add_health_check_unary_rest_unset_required_fields(): + transport = transports.TargetPoolsRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.add_health_check._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("project", "region", "targetPool", "targetPoolsAddHealthCheckRequestResource", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_add_health_check_unary_rest_interceptors(null_interceptor): + transport = transports.TargetPoolsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.TargetPoolsRestInterceptor(), + ) + client = TargetPoolsClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.TargetPoolsRestInterceptor, "post_add_health_check") as post, \ + mock.patch.object(transports.TargetPoolsRestInterceptor, "pre_add_health_check") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.AddHealthCheckTargetPoolRequest.pb(compute.AddHealthCheckTargetPoolRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.AddHealthCheckTargetPoolRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.add_health_check_unary(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_add_health_check_unary_rest_bad_request(transport: str = 'rest', request_type=compute.AddHealthCheckTargetPoolRequest): + client = TargetPoolsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2', 'target_pool': 'sample3'} + request_init["target_pools_add_health_check_request_resource"] = {'health_checks': [{'health_check': 'health_check_value'}]} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.add_health_check_unary(request) + + +def test_add_health_check_unary_rest_flattened(): + client = TargetPoolsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'region': 'sample2', 'target_pool': 'sample3'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + region='region_value', + target_pool='target_pool_value', + target_pools_add_health_check_request_resource=compute.TargetPoolsAddHealthCheckRequest(health_checks=[compute.HealthCheckReference(health_check='health_check_value')]), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.add_health_check_unary(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/regions/{region}/targetPools/{target_pool}/addHealthCheck" % client.transport._host, args[1]) + + +def test_add_health_check_unary_rest_flattened_error(transport: str = 'rest'): + client = TargetPoolsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.add_health_check_unary( + compute.AddHealthCheckTargetPoolRequest(), + project='project_value', + region='region_value', + target_pool='target_pool_value', + target_pools_add_health_check_request_resource=compute.TargetPoolsAddHealthCheckRequest(health_checks=[compute.HealthCheckReference(health_check='health_check_value')]), + ) + + +def test_add_health_check_unary_rest_error(): + client = TargetPoolsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.AddInstanceTargetPoolRequest, + dict, +]) +def test_add_instance_rest(request_type): + client = TargetPoolsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2', 'target_pool': 'sample3'} + request_init["target_pools_add_instance_request_resource"] = {'instances': [{'instance': 'instance_value'}]} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.add_instance(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, extended_operation.ExtendedOperation) + assert response.client_operation_id == 'client_operation_id_value' + assert response.creation_timestamp == 'creation_timestamp_value' + assert response.description == 'description_value' + assert response.end_time == 'end_time_value' + assert response.http_error_message == 'http_error_message_value' + assert response.http_error_status_code == 2374 + assert response.id == 205 + assert response.insert_time == 'insert_time_value' + assert response.kind == 'kind_value' + assert response.name == 'name_value' + assert response.operation_group_id == 'operation_group_id_value' + assert response.operation_type == 'operation_type_value' + assert response.progress == 885 + assert response.region == 'region_value' + assert response.self_link == 'self_link_value' + assert response.start_time == 'start_time_value' + assert response.status == compute.Operation.Status.DONE + assert response.status_message == 'status_message_value' + assert response.target_id == 947 + assert response.target_link == 'target_link_value' + assert response.user == 'user_value' + assert response.zone == 'zone_value' + + +def test_add_instance_rest_required_fields(request_type=compute.AddInstanceTargetPoolRequest): + transport_class = transports.TargetPoolsRestTransport + + request_init = {} + request_init["project"] = "" + request_init["region"] = "" + request_init["target_pool"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).add_instance._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + jsonified_request["region"] = 'region_value' + jsonified_request["targetPool"] = 'target_pool_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).add_instance._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "region" in jsonified_request + assert jsonified_request["region"] == 'region_value' + assert "targetPool" in jsonified_request + assert jsonified_request["targetPool"] == 'target_pool_value' + + client = TargetPoolsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.add_instance(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_add_instance_rest_unset_required_fields(): + transport = transports.TargetPoolsRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.add_instance._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("project", "region", "targetPool", "targetPoolsAddInstanceRequestResource", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_add_instance_rest_interceptors(null_interceptor): + transport = transports.TargetPoolsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.TargetPoolsRestInterceptor(), + ) + client = TargetPoolsClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.TargetPoolsRestInterceptor, "post_add_instance") as post, \ + mock.patch.object(transports.TargetPoolsRestInterceptor, "pre_add_instance") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.AddInstanceTargetPoolRequest.pb(compute.AddInstanceTargetPoolRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.AddInstanceTargetPoolRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.add_instance(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_add_instance_rest_bad_request(transport: str = 'rest', request_type=compute.AddInstanceTargetPoolRequest): + client = TargetPoolsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2', 'target_pool': 'sample3'} + request_init["target_pools_add_instance_request_resource"] = {'instances': [{'instance': 'instance_value'}]} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.add_instance(request) + + +def test_add_instance_rest_flattened(): + client = TargetPoolsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'region': 'sample2', 'target_pool': 'sample3'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + region='region_value', + target_pool='target_pool_value', + target_pools_add_instance_request_resource=compute.TargetPoolsAddInstanceRequest(instances=[compute.InstanceReference(instance='instance_value')]), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.add_instance(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/regions/{region}/targetPools/{target_pool}/addInstance" % client.transport._host, args[1]) + + +def test_add_instance_rest_flattened_error(transport: str = 'rest'): + client = TargetPoolsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.add_instance( + compute.AddInstanceTargetPoolRequest(), + project='project_value', + region='region_value', + target_pool='target_pool_value', + target_pools_add_instance_request_resource=compute.TargetPoolsAddInstanceRequest(instances=[compute.InstanceReference(instance='instance_value')]), + ) + + +def test_add_instance_rest_error(): + client = TargetPoolsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.AddInstanceTargetPoolRequest, + dict, +]) +def test_add_instance_unary_rest(request_type): + client = TargetPoolsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2', 'target_pool': 'sample3'} + request_init["target_pools_add_instance_request_resource"] = {'instances': [{'instance': 'instance_value'}]} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.add_instance_unary(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.Operation) + + +def test_add_instance_unary_rest_required_fields(request_type=compute.AddInstanceTargetPoolRequest): + transport_class = transports.TargetPoolsRestTransport + + request_init = {} + request_init["project"] = "" + request_init["region"] = "" + request_init["target_pool"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).add_instance._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + jsonified_request["region"] = 'region_value' + jsonified_request["targetPool"] = 'target_pool_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).add_instance._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "region" in jsonified_request + assert jsonified_request["region"] == 'region_value' + assert "targetPool" in jsonified_request + assert jsonified_request["targetPool"] == 'target_pool_value' + + client = TargetPoolsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.add_instance_unary(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_add_instance_unary_rest_unset_required_fields(): + transport = transports.TargetPoolsRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.add_instance._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("project", "region", "targetPool", "targetPoolsAddInstanceRequestResource", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_add_instance_unary_rest_interceptors(null_interceptor): + transport = transports.TargetPoolsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.TargetPoolsRestInterceptor(), + ) + client = TargetPoolsClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.TargetPoolsRestInterceptor, "post_add_instance") as post, \ + mock.patch.object(transports.TargetPoolsRestInterceptor, "pre_add_instance") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.AddInstanceTargetPoolRequest.pb(compute.AddInstanceTargetPoolRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.AddInstanceTargetPoolRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.add_instance_unary(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_add_instance_unary_rest_bad_request(transport: str = 'rest', request_type=compute.AddInstanceTargetPoolRequest): + client = TargetPoolsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2', 'target_pool': 'sample3'} + request_init["target_pools_add_instance_request_resource"] = {'instances': [{'instance': 'instance_value'}]} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.add_instance_unary(request) + + +def test_add_instance_unary_rest_flattened(): + client = TargetPoolsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'region': 'sample2', 'target_pool': 'sample3'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + region='region_value', + target_pool='target_pool_value', + target_pools_add_instance_request_resource=compute.TargetPoolsAddInstanceRequest(instances=[compute.InstanceReference(instance='instance_value')]), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.add_instance_unary(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/regions/{region}/targetPools/{target_pool}/addInstance" % client.transport._host, args[1]) + + +def test_add_instance_unary_rest_flattened_error(transport: str = 'rest'): + client = TargetPoolsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.add_instance_unary( + compute.AddInstanceTargetPoolRequest(), + project='project_value', + region='region_value', + target_pool='target_pool_value', + target_pools_add_instance_request_resource=compute.TargetPoolsAddInstanceRequest(instances=[compute.InstanceReference(instance='instance_value')]), + ) + + +def test_add_instance_unary_rest_error(): + client = TargetPoolsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.AggregatedListTargetPoolsRequest, + dict, +]) +def test_aggregated_list_rest(request_type): + client = TargetPoolsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.TargetPoolAggregatedList( + id='id_value', + kind='kind_value', + next_page_token='next_page_token_value', + self_link='self_link_value', + unreachables=['unreachables_value'], + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.TargetPoolAggregatedList.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.aggregated_list(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.AggregatedListPager) + assert response.id == 'id_value' + assert response.kind == 'kind_value' + assert response.next_page_token == 'next_page_token_value' + assert response.self_link == 'self_link_value' + assert response.unreachables == ['unreachables_value'] + + +def test_aggregated_list_rest_required_fields(request_type=compute.AggregatedListTargetPoolsRequest): + transport_class = transports.TargetPoolsRestTransport + + request_init = {} + request_init["project"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).aggregated_list._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).aggregated_list._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("filter", "include_all_scopes", "max_results", "order_by", "page_token", "return_partial_success", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + + client = TargetPoolsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.TargetPoolAggregatedList() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "get", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.TargetPoolAggregatedList.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.aggregated_list(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_aggregated_list_rest_unset_required_fields(): + transport = transports.TargetPoolsRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.aggregated_list._get_unset_required_fields({}) + assert set(unset_fields) == (set(("filter", "includeAllScopes", "maxResults", "orderBy", "pageToken", "returnPartialSuccess", )) & set(("project", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_aggregated_list_rest_interceptors(null_interceptor): + transport = transports.TargetPoolsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.TargetPoolsRestInterceptor(), + ) + client = TargetPoolsClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.TargetPoolsRestInterceptor, "post_aggregated_list") as post, \ + mock.patch.object(transports.TargetPoolsRestInterceptor, "pre_aggregated_list") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.AggregatedListTargetPoolsRequest.pb(compute.AggregatedListTargetPoolsRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.TargetPoolAggregatedList.to_json(compute.TargetPoolAggregatedList()) + + request = compute.AggregatedListTargetPoolsRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.TargetPoolAggregatedList() + + client.aggregated_list(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_aggregated_list_rest_bad_request(transport: str = 'rest', request_type=compute.AggregatedListTargetPoolsRequest): + client = TargetPoolsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.aggregated_list(request) + + +def test_aggregated_list_rest_flattened(): + client = TargetPoolsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.TargetPoolAggregatedList() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.TargetPoolAggregatedList.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.aggregated_list(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/aggregated/targetPools" % client.transport._host, args[1]) + + +def test_aggregated_list_rest_flattened_error(transport: str = 'rest'): + client = TargetPoolsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.aggregated_list( + compute.AggregatedListTargetPoolsRequest(), + project='project_value', + ) + + +def test_aggregated_list_rest_pager(transport: str = 'rest'): + client = TargetPoolsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + #with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + compute.TargetPoolAggregatedList( + items={ + 'a':compute.TargetPoolsScopedList(), + 'b':compute.TargetPoolsScopedList(), + 'c':compute.TargetPoolsScopedList(), + }, + next_page_token='abc', + ), + compute.TargetPoolAggregatedList( + items={}, + next_page_token='def', + ), + compute.TargetPoolAggregatedList( + items={ + 'g':compute.TargetPoolsScopedList(), + }, + next_page_token='ghi', + ), + compute.TargetPoolAggregatedList( + items={ + 'h':compute.TargetPoolsScopedList(), + 'i':compute.TargetPoolsScopedList(), + }, + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple(compute.TargetPoolAggregatedList.to_json(x) for x in response) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode('UTF-8') + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {'project': 'sample1'} + + pager = client.aggregated_list(request=sample_request) + + assert isinstance(pager.get('a'), compute.TargetPoolsScopedList) + assert pager.get('h') is None + + results = list(pager) + assert len(results) == 6 + assert all( + isinstance(i, tuple) + for i in results) + for result in results: + assert isinstance(result, tuple) + assert tuple(type(t) for t in result) == (str, compute.TargetPoolsScopedList) + + assert pager.get('a') is None + assert isinstance(pager.get('h'), compute.TargetPoolsScopedList) + + pages = list(client.aggregated_list(request=sample_request).pages) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize("request_type", [ + compute.DeleteTargetPoolRequest, + dict, +]) +def test_delete_rest(request_type): + client = TargetPoolsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2', 'target_pool': 'sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.delete(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, extended_operation.ExtendedOperation) + assert response.client_operation_id == 'client_operation_id_value' + assert response.creation_timestamp == 'creation_timestamp_value' + assert response.description == 'description_value' + assert response.end_time == 'end_time_value' + assert response.http_error_message == 'http_error_message_value' + assert response.http_error_status_code == 2374 + assert response.id == 205 + assert response.insert_time == 'insert_time_value' + assert response.kind == 'kind_value' + assert response.name == 'name_value' + assert response.operation_group_id == 'operation_group_id_value' + assert response.operation_type == 'operation_type_value' + assert response.progress == 885 + assert response.region == 'region_value' + assert response.self_link == 'self_link_value' + assert response.start_time == 'start_time_value' + assert response.status == compute.Operation.Status.DONE + assert response.status_message == 'status_message_value' + assert response.target_id == 947 + assert response.target_link == 'target_link_value' + assert response.user == 'user_value' + assert response.zone == 'zone_value' + + +def test_delete_rest_required_fields(request_type=compute.DeleteTargetPoolRequest): + transport_class = transports.TargetPoolsRestTransport + + request_init = {} + request_init["project"] = "" + request_init["region"] = "" + request_init["target_pool"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + jsonified_request["region"] = 'region_value' + jsonified_request["targetPool"] = 'target_pool_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "region" in jsonified_request + assert jsonified_request["region"] == 'region_value' + assert "targetPool" in jsonified_request + assert jsonified_request["targetPool"] == 'target_pool_value' + + client = TargetPoolsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "delete", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.delete(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_delete_rest_unset_required_fields(): + transport = transports.TargetPoolsRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.delete._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("project", "region", "targetPool", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_rest_interceptors(null_interceptor): + transport = transports.TargetPoolsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.TargetPoolsRestInterceptor(), + ) + client = TargetPoolsClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.TargetPoolsRestInterceptor, "post_delete") as post, \ + mock.patch.object(transports.TargetPoolsRestInterceptor, "pre_delete") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.DeleteTargetPoolRequest.pb(compute.DeleteTargetPoolRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.DeleteTargetPoolRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.delete(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_delete_rest_bad_request(transport: str = 'rest', request_type=compute.DeleteTargetPoolRequest): + client = TargetPoolsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2', 'target_pool': 'sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.delete(request) + + +def test_delete_rest_flattened(): + client = TargetPoolsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'region': 'sample2', 'target_pool': 'sample3'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + region='region_value', + target_pool='target_pool_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.delete(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/regions/{region}/targetPools/{target_pool}" % client.transport._host, args[1]) + + +def test_delete_rest_flattened_error(transport: str = 'rest'): + client = TargetPoolsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete( + compute.DeleteTargetPoolRequest(), + project='project_value', + region='region_value', + target_pool='target_pool_value', + ) + + +def test_delete_rest_error(): + client = TargetPoolsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.DeleteTargetPoolRequest, + dict, +]) +def test_delete_unary_rest(request_type): + client = TargetPoolsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2', 'target_pool': 'sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.delete_unary(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.Operation) + + +def test_delete_unary_rest_required_fields(request_type=compute.DeleteTargetPoolRequest): + transport_class = transports.TargetPoolsRestTransport + + request_init = {} + request_init["project"] = "" + request_init["region"] = "" + request_init["target_pool"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + jsonified_request["region"] = 'region_value' + jsonified_request["targetPool"] = 'target_pool_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "region" in jsonified_request + assert jsonified_request["region"] == 'region_value' + assert "targetPool" in jsonified_request + assert jsonified_request["targetPool"] == 'target_pool_value' + + client = TargetPoolsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "delete", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.delete_unary(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_delete_unary_rest_unset_required_fields(): + transport = transports.TargetPoolsRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.delete._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("project", "region", "targetPool", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_unary_rest_interceptors(null_interceptor): + transport = transports.TargetPoolsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.TargetPoolsRestInterceptor(), + ) + client = TargetPoolsClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.TargetPoolsRestInterceptor, "post_delete") as post, \ + mock.patch.object(transports.TargetPoolsRestInterceptor, "pre_delete") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.DeleteTargetPoolRequest.pb(compute.DeleteTargetPoolRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.DeleteTargetPoolRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.delete_unary(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_delete_unary_rest_bad_request(transport: str = 'rest', request_type=compute.DeleteTargetPoolRequest): + client = TargetPoolsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2', 'target_pool': 'sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.delete_unary(request) + + +def test_delete_unary_rest_flattened(): + client = TargetPoolsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'region': 'sample2', 'target_pool': 'sample3'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + region='region_value', + target_pool='target_pool_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.delete_unary(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/regions/{region}/targetPools/{target_pool}" % client.transport._host, args[1]) + + +def test_delete_unary_rest_flattened_error(transport: str = 'rest'): + client = TargetPoolsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_unary( + compute.DeleteTargetPoolRequest(), + project='project_value', + region='region_value', + target_pool='target_pool_value', + ) + + +def test_delete_unary_rest_error(): + client = TargetPoolsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.GetTargetPoolRequest, + dict, +]) +def test_get_rest(request_type): + client = TargetPoolsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2', 'target_pool': 'sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.TargetPool( + backup_pool='backup_pool_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + failover_ratio=0.1494, + health_checks=['health_checks_value'], + id=205, + instances=['instances_value'], + kind='kind_value', + name='name_value', + region='region_value', + self_link='self_link_value', + session_affinity='session_affinity_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.TargetPool.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.get(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.TargetPool) + assert response.backup_pool == 'backup_pool_value' + assert response.creation_timestamp == 'creation_timestamp_value' + assert response.description == 'description_value' + assert math.isclose(response.failover_ratio, 0.1494, rel_tol=1e-6) + assert response.health_checks == ['health_checks_value'] + assert response.id == 205 + assert response.instances == ['instances_value'] + assert response.kind == 'kind_value' + assert response.name == 'name_value' + assert response.region == 'region_value' + assert response.self_link == 'self_link_value' + assert response.session_affinity == 'session_affinity_value' + + +def test_get_rest_required_fields(request_type=compute.GetTargetPoolRequest): + transport_class = transports.TargetPoolsRestTransport + + request_init = {} + request_init["project"] = "" + request_init["region"] = "" + request_init["target_pool"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + jsonified_request["region"] = 'region_value' + jsonified_request["targetPool"] = 'target_pool_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "region" in jsonified_request + assert jsonified_request["region"] == 'region_value' + assert "targetPool" in jsonified_request + assert jsonified_request["targetPool"] == 'target_pool_value' + + client = TargetPoolsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.TargetPool() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "get", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.TargetPool.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.get(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_get_rest_unset_required_fields(): + transport = transports.TargetPoolsRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.get._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("project", "region", "targetPool", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_rest_interceptors(null_interceptor): + transport = transports.TargetPoolsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.TargetPoolsRestInterceptor(), + ) + client = TargetPoolsClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.TargetPoolsRestInterceptor, "post_get") as post, \ + mock.patch.object(transports.TargetPoolsRestInterceptor, "pre_get") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.GetTargetPoolRequest.pb(compute.GetTargetPoolRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.TargetPool.to_json(compute.TargetPool()) + + request = compute.GetTargetPoolRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.TargetPool() + + client.get(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_get_rest_bad_request(transport: str = 'rest', request_type=compute.GetTargetPoolRequest): + client = TargetPoolsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2', 'target_pool': 'sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get(request) + + +def test_get_rest_flattened(): + client = TargetPoolsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.TargetPool() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'region': 'sample2', 'target_pool': 'sample3'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + region='region_value', + target_pool='target_pool_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.TargetPool.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.get(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/regions/{region}/targetPools/{target_pool}" % client.transport._host, args[1]) + + +def test_get_rest_flattened_error(transport: str = 'rest'): + client = TargetPoolsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get( + compute.GetTargetPoolRequest(), + project='project_value', + region='region_value', + target_pool='target_pool_value', + ) + + +def test_get_rest_error(): + client = TargetPoolsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.GetHealthTargetPoolRequest, + dict, +]) +def test_get_health_rest(request_type): + client = TargetPoolsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2', 'target_pool': 'sample3'} + request_init["instance_reference_resource"] = {'instance': 'instance_value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.TargetPoolInstanceHealth( + kind='kind_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.TargetPoolInstanceHealth.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.get_health(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.TargetPoolInstanceHealth) + assert response.kind == 'kind_value' + + +def test_get_health_rest_required_fields(request_type=compute.GetHealthTargetPoolRequest): + transport_class = transports.TargetPoolsRestTransport + + request_init = {} + request_init["project"] = "" + request_init["region"] = "" + request_init["target_pool"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_health._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + jsonified_request["region"] = 'region_value' + jsonified_request["targetPool"] = 'target_pool_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_health._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "region" in jsonified_request + assert jsonified_request["region"] == 'region_value' + assert "targetPool" in jsonified_request + assert jsonified_request["targetPool"] == 'target_pool_value' + + client = TargetPoolsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.TargetPoolInstanceHealth() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.TargetPoolInstanceHealth.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.get_health(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_get_health_rest_unset_required_fields(): + transport = transports.TargetPoolsRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.get_health._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("instanceReferenceResource", "project", "region", "targetPool", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_health_rest_interceptors(null_interceptor): + transport = transports.TargetPoolsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.TargetPoolsRestInterceptor(), + ) + client = TargetPoolsClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.TargetPoolsRestInterceptor, "post_get_health") as post, \ + mock.patch.object(transports.TargetPoolsRestInterceptor, "pre_get_health") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.GetHealthTargetPoolRequest.pb(compute.GetHealthTargetPoolRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.TargetPoolInstanceHealth.to_json(compute.TargetPoolInstanceHealth()) + + request = compute.GetHealthTargetPoolRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.TargetPoolInstanceHealth() + + client.get_health(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_get_health_rest_bad_request(transport: str = 'rest', request_type=compute.GetHealthTargetPoolRequest): + client = TargetPoolsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2', 'target_pool': 'sample3'} + request_init["instance_reference_resource"] = {'instance': 'instance_value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_health(request) + + +def test_get_health_rest_flattened(): + client = TargetPoolsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.TargetPoolInstanceHealth() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'region': 'sample2', 'target_pool': 'sample3'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + region='region_value', + target_pool='target_pool_value', + instance_reference_resource=compute.InstanceReference(instance='instance_value'), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.TargetPoolInstanceHealth.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.get_health(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/regions/{region}/targetPools/{target_pool}/getHealth" % client.transport._host, args[1]) + + +def test_get_health_rest_flattened_error(transport: str = 'rest'): + client = TargetPoolsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_health( + compute.GetHealthTargetPoolRequest(), + project='project_value', + region='region_value', + target_pool='target_pool_value', + instance_reference_resource=compute.InstanceReference(instance='instance_value'), + ) + + +def test_get_health_rest_error(): + client = TargetPoolsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.InsertTargetPoolRequest, + dict, +]) +def test_insert_rest(request_type): + client = TargetPoolsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2'} + request_init["target_pool_resource"] = {'backup_pool': 'backup_pool_value', 'creation_timestamp': 'creation_timestamp_value', 'description': 'description_value', 'failover_ratio': 0.1494, 'health_checks': ['health_checks_value1', 'health_checks_value2'], 'id': 205, 'instances': ['instances_value1', 'instances_value2'], 'kind': 'kind_value', 'name': 'name_value', 'region': 'region_value', 'self_link': 'self_link_value', 'session_affinity': 'session_affinity_value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.insert(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, extended_operation.ExtendedOperation) + assert response.client_operation_id == 'client_operation_id_value' + assert response.creation_timestamp == 'creation_timestamp_value' + assert response.description == 'description_value' + assert response.end_time == 'end_time_value' + assert response.http_error_message == 'http_error_message_value' + assert response.http_error_status_code == 2374 + assert response.id == 205 + assert response.insert_time == 'insert_time_value' + assert response.kind == 'kind_value' + assert response.name == 'name_value' + assert response.operation_group_id == 'operation_group_id_value' + assert response.operation_type == 'operation_type_value' + assert response.progress == 885 + assert response.region == 'region_value' + assert response.self_link == 'self_link_value' + assert response.start_time == 'start_time_value' + assert response.status == compute.Operation.Status.DONE + assert response.status_message == 'status_message_value' + assert response.target_id == 947 + assert response.target_link == 'target_link_value' + assert response.user == 'user_value' + assert response.zone == 'zone_value' + + +def test_insert_rest_required_fields(request_type=compute.InsertTargetPoolRequest): + transport_class = transports.TargetPoolsRestTransport + + request_init = {} + request_init["project"] = "" + request_init["region"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).insert._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + jsonified_request["region"] = 'region_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).insert._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "region" in jsonified_request + assert jsonified_request["region"] == 'region_value' + + client = TargetPoolsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.insert(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_insert_rest_unset_required_fields(): + transport = transports.TargetPoolsRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.insert._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("project", "region", "targetPoolResource", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_insert_rest_interceptors(null_interceptor): + transport = transports.TargetPoolsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.TargetPoolsRestInterceptor(), + ) + client = TargetPoolsClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.TargetPoolsRestInterceptor, "post_insert") as post, \ + mock.patch.object(transports.TargetPoolsRestInterceptor, "pre_insert") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.InsertTargetPoolRequest.pb(compute.InsertTargetPoolRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.InsertTargetPoolRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.insert(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_insert_rest_bad_request(transport: str = 'rest', request_type=compute.InsertTargetPoolRequest): + client = TargetPoolsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2'} + request_init["target_pool_resource"] = {'backup_pool': 'backup_pool_value', 'creation_timestamp': 'creation_timestamp_value', 'description': 'description_value', 'failover_ratio': 0.1494, 'health_checks': ['health_checks_value1', 'health_checks_value2'], 'id': 205, 'instances': ['instances_value1', 'instances_value2'], 'kind': 'kind_value', 'name': 'name_value', 'region': 'region_value', 'self_link': 'self_link_value', 'session_affinity': 'session_affinity_value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.insert(request) + + +def test_insert_rest_flattened(): + client = TargetPoolsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'region': 'sample2'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + region='region_value', + target_pool_resource=compute.TargetPool(backup_pool='backup_pool_value'), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.insert(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/regions/{region}/targetPools" % client.transport._host, args[1]) + + +def test_insert_rest_flattened_error(transport: str = 'rest'): + client = TargetPoolsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.insert( + compute.InsertTargetPoolRequest(), + project='project_value', + region='region_value', + target_pool_resource=compute.TargetPool(backup_pool='backup_pool_value'), + ) + + +def test_insert_rest_error(): + client = TargetPoolsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.InsertTargetPoolRequest, + dict, +]) +def test_insert_unary_rest(request_type): + client = TargetPoolsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2'} + request_init["target_pool_resource"] = {'backup_pool': 'backup_pool_value', 'creation_timestamp': 'creation_timestamp_value', 'description': 'description_value', 'failover_ratio': 0.1494, 'health_checks': ['health_checks_value1', 'health_checks_value2'], 'id': 205, 'instances': ['instances_value1', 'instances_value2'], 'kind': 'kind_value', 'name': 'name_value', 'region': 'region_value', 'self_link': 'self_link_value', 'session_affinity': 'session_affinity_value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.insert_unary(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.Operation) + + +def test_insert_unary_rest_required_fields(request_type=compute.InsertTargetPoolRequest): + transport_class = transports.TargetPoolsRestTransport + + request_init = {} + request_init["project"] = "" + request_init["region"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).insert._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + jsonified_request["region"] = 'region_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).insert._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "region" in jsonified_request + assert jsonified_request["region"] == 'region_value' + + client = TargetPoolsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.insert_unary(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_insert_unary_rest_unset_required_fields(): + transport = transports.TargetPoolsRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.insert._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("project", "region", "targetPoolResource", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_insert_unary_rest_interceptors(null_interceptor): + transport = transports.TargetPoolsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.TargetPoolsRestInterceptor(), + ) + client = TargetPoolsClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.TargetPoolsRestInterceptor, "post_insert") as post, \ + mock.patch.object(transports.TargetPoolsRestInterceptor, "pre_insert") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.InsertTargetPoolRequest.pb(compute.InsertTargetPoolRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.InsertTargetPoolRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.insert_unary(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_insert_unary_rest_bad_request(transport: str = 'rest', request_type=compute.InsertTargetPoolRequest): + client = TargetPoolsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2'} + request_init["target_pool_resource"] = {'backup_pool': 'backup_pool_value', 'creation_timestamp': 'creation_timestamp_value', 'description': 'description_value', 'failover_ratio': 0.1494, 'health_checks': ['health_checks_value1', 'health_checks_value2'], 'id': 205, 'instances': ['instances_value1', 'instances_value2'], 'kind': 'kind_value', 'name': 'name_value', 'region': 'region_value', 'self_link': 'self_link_value', 'session_affinity': 'session_affinity_value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.insert_unary(request) + + +def test_insert_unary_rest_flattened(): + client = TargetPoolsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'region': 'sample2'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + region='region_value', + target_pool_resource=compute.TargetPool(backup_pool='backup_pool_value'), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.insert_unary(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/regions/{region}/targetPools" % client.transport._host, args[1]) + + +def test_insert_unary_rest_flattened_error(transport: str = 'rest'): + client = TargetPoolsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.insert_unary( + compute.InsertTargetPoolRequest(), + project='project_value', + region='region_value', + target_pool_resource=compute.TargetPool(backup_pool='backup_pool_value'), + ) + + +def test_insert_unary_rest_error(): + client = TargetPoolsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.ListTargetPoolsRequest, + dict, +]) +def test_list_rest(request_type): + client = TargetPoolsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.TargetPoolList( + id='id_value', + kind='kind_value', + next_page_token='next_page_token_value', + self_link='self_link_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.TargetPoolList.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.list(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListPager) + assert response.id == 'id_value' + assert response.kind == 'kind_value' + assert response.next_page_token == 'next_page_token_value' + assert response.self_link == 'self_link_value' + + +def test_list_rest_required_fields(request_type=compute.ListTargetPoolsRequest): + transport_class = transports.TargetPoolsRestTransport + + request_init = {} + request_init["project"] = "" + request_init["region"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + jsonified_request["region"] = 'region_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("filter", "max_results", "order_by", "page_token", "return_partial_success", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "region" in jsonified_request + assert jsonified_request["region"] == 'region_value' + + client = TargetPoolsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.TargetPoolList() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "get", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.TargetPoolList.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.list(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_list_rest_unset_required_fields(): + transport = transports.TargetPoolsRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.list._get_unset_required_fields({}) + assert set(unset_fields) == (set(("filter", "maxResults", "orderBy", "pageToken", "returnPartialSuccess", )) & set(("project", "region", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_rest_interceptors(null_interceptor): + transport = transports.TargetPoolsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.TargetPoolsRestInterceptor(), + ) + client = TargetPoolsClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.TargetPoolsRestInterceptor, "post_list") as post, \ + mock.patch.object(transports.TargetPoolsRestInterceptor, "pre_list") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.ListTargetPoolsRequest.pb(compute.ListTargetPoolsRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.TargetPoolList.to_json(compute.TargetPoolList()) + + request = compute.ListTargetPoolsRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.TargetPoolList() + + client.list(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_list_rest_bad_request(transport: str = 'rest', request_type=compute.ListTargetPoolsRequest): + client = TargetPoolsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list(request) + + +def test_list_rest_flattened(): + client = TargetPoolsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.TargetPoolList() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'region': 'sample2'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + region='region_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.TargetPoolList.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.list(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/regions/{region}/targetPools" % client.transport._host, args[1]) + + +def test_list_rest_flattened_error(transport: str = 'rest'): + client = TargetPoolsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list( + compute.ListTargetPoolsRequest(), + project='project_value', + region='region_value', + ) + + +def test_list_rest_pager(transport: str = 'rest'): + client = TargetPoolsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + #with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + compute.TargetPoolList( + items=[ + compute.TargetPool(), + compute.TargetPool(), + compute.TargetPool(), + ], + next_page_token='abc', + ), + compute.TargetPoolList( + items=[], + next_page_token='def', + ), + compute.TargetPoolList( + items=[ + compute.TargetPool(), + ], + next_page_token='ghi', + ), + compute.TargetPoolList( + items=[ + compute.TargetPool(), + compute.TargetPool(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple(compute.TargetPoolList.to_json(x) for x in response) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode('UTF-8') + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {'project': 'sample1', 'region': 'sample2'} + + pager = client.list(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, compute.TargetPool) + for i in results) + + pages = list(client.list(request=sample_request).pages) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize("request_type", [ + compute.RemoveHealthCheckTargetPoolRequest, + dict, +]) +def test_remove_health_check_rest(request_type): + client = TargetPoolsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2', 'target_pool': 'sample3'} + request_init["target_pools_remove_health_check_request_resource"] = {'health_checks': [{'health_check': 'health_check_value'}]} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.remove_health_check(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, extended_operation.ExtendedOperation) + assert response.client_operation_id == 'client_operation_id_value' + assert response.creation_timestamp == 'creation_timestamp_value' + assert response.description == 'description_value' + assert response.end_time == 'end_time_value' + assert response.http_error_message == 'http_error_message_value' + assert response.http_error_status_code == 2374 + assert response.id == 205 + assert response.insert_time == 'insert_time_value' + assert response.kind == 'kind_value' + assert response.name == 'name_value' + assert response.operation_group_id == 'operation_group_id_value' + assert response.operation_type == 'operation_type_value' + assert response.progress == 885 + assert response.region == 'region_value' + assert response.self_link == 'self_link_value' + assert response.start_time == 'start_time_value' + assert response.status == compute.Operation.Status.DONE + assert response.status_message == 'status_message_value' + assert response.target_id == 947 + assert response.target_link == 'target_link_value' + assert response.user == 'user_value' + assert response.zone == 'zone_value' + + +def test_remove_health_check_rest_required_fields(request_type=compute.RemoveHealthCheckTargetPoolRequest): + transport_class = transports.TargetPoolsRestTransport + + request_init = {} + request_init["project"] = "" + request_init["region"] = "" + request_init["target_pool"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).remove_health_check._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + jsonified_request["region"] = 'region_value' + jsonified_request["targetPool"] = 'target_pool_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).remove_health_check._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "region" in jsonified_request + assert jsonified_request["region"] == 'region_value' + assert "targetPool" in jsonified_request + assert jsonified_request["targetPool"] == 'target_pool_value' + + client = TargetPoolsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.remove_health_check(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_remove_health_check_rest_unset_required_fields(): + transport = transports.TargetPoolsRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.remove_health_check._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("project", "region", "targetPool", "targetPoolsRemoveHealthCheckRequestResource", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_remove_health_check_rest_interceptors(null_interceptor): + transport = transports.TargetPoolsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.TargetPoolsRestInterceptor(), + ) + client = TargetPoolsClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.TargetPoolsRestInterceptor, "post_remove_health_check") as post, \ + mock.patch.object(transports.TargetPoolsRestInterceptor, "pre_remove_health_check") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.RemoveHealthCheckTargetPoolRequest.pb(compute.RemoveHealthCheckTargetPoolRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.RemoveHealthCheckTargetPoolRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.remove_health_check(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_remove_health_check_rest_bad_request(transport: str = 'rest', request_type=compute.RemoveHealthCheckTargetPoolRequest): + client = TargetPoolsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2', 'target_pool': 'sample3'} + request_init["target_pools_remove_health_check_request_resource"] = {'health_checks': [{'health_check': 'health_check_value'}]} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.remove_health_check(request) + + +def test_remove_health_check_rest_flattened(): + client = TargetPoolsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'region': 'sample2', 'target_pool': 'sample3'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + region='region_value', + target_pool='target_pool_value', + target_pools_remove_health_check_request_resource=compute.TargetPoolsRemoveHealthCheckRequest(health_checks=[compute.HealthCheckReference(health_check='health_check_value')]), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.remove_health_check(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/regions/{region}/targetPools/{target_pool}/removeHealthCheck" % client.transport._host, args[1]) + + +def test_remove_health_check_rest_flattened_error(transport: str = 'rest'): + client = TargetPoolsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.remove_health_check( + compute.RemoveHealthCheckTargetPoolRequest(), + project='project_value', + region='region_value', + target_pool='target_pool_value', + target_pools_remove_health_check_request_resource=compute.TargetPoolsRemoveHealthCheckRequest(health_checks=[compute.HealthCheckReference(health_check='health_check_value')]), + ) + + +def test_remove_health_check_rest_error(): + client = TargetPoolsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.RemoveHealthCheckTargetPoolRequest, + dict, +]) +def test_remove_health_check_unary_rest(request_type): + client = TargetPoolsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2', 'target_pool': 'sample3'} + request_init["target_pools_remove_health_check_request_resource"] = {'health_checks': [{'health_check': 'health_check_value'}]} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.remove_health_check_unary(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.Operation) + + +def test_remove_health_check_unary_rest_required_fields(request_type=compute.RemoveHealthCheckTargetPoolRequest): + transport_class = transports.TargetPoolsRestTransport + + request_init = {} + request_init["project"] = "" + request_init["region"] = "" + request_init["target_pool"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).remove_health_check._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + jsonified_request["region"] = 'region_value' + jsonified_request["targetPool"] = 'target_pool_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).remove_health_check._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "region" in jsonified_request + assert jsonified_request["region"] == 'region_value' + assert "targetPool" in jsonified_request + assert jsonified_request["targetPool"] == 'target_pool_value' + + client = TargetPoolsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.remove_health_check_unary(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_remove_health_check_unary_rest_unset_required_fields(): + transport = transports.TargetPoolsRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.remove_health_check._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("project", "region", "targetPool", "targetPoolsRemoveHealthCheckRequestResource", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_remove_health_check_unary_rest_interceptors(null_interceptor): + transport = transports.TargetPoolsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.TargetPoolsRestInterceptor(), + ) + client = TargetPoolsClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.TargetPoolsRestInterceptor, "post_remove_health_check") as post, \ + mock.patch.object(transports.TargetPoolsRestInterceptor, "pre_remove_health_check") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.RemoveHealthCheckTargetPoolRequest.pb(compute.RemoveHealthCheckTargetPoolRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.RemoveHealthCheckTargetPoolRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.remove_health_check_unary(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_remove_health_check_unary_rest_bad_request(transport: str = 'rest', request_type=compute.RemoveHealthCheckTargetPoolRequest): + client = TargetPoolsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2', 'target_pool': 'sample3'} + request_init["target_pools_remove_health_check_request_resource"] = {'health_checks': [{'health_check': 'health_check_value'}]} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.remove_health_check_unary(request) + + +def test_remove_health_check_unary_rest_flattened(): + client = TargetPoolsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'region': 'sample2', 'target_pool': 'sample3'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + region='region_value', + target_pool='target_pool_value', + target_pools_remove_health_check_request_resource=compute.TargetPoolsRemoveHealthCheckRequest(health_checks=[compute.HealthCheckReference(health_check='health_check_value')]), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.remove_health_check_unary(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/regions/{region}/targetPools/{target_pool}/removeHealthCheck" % client.transport._host, args[1]) + + +def test_remove_health_check_unary_rest_flattened_error(transport: str = 'rest'): + client = TargetPoolsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.remove_health_check_unary( + compute.RemoveHealthCheckTargetPoolRequest(), + project='project_value', + region='region_value', + target_pool='target_pool_value', + target_pools_remove_health_check_request_resource=compute.TargetPoolsRemoveHealthCheckRequest(health_checks=[compute.HealthCheckReference(health_check='health_check_value')]), + ) + + +def test_remove_health_check_unary_rest_error(): + client = TargetPoolsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.RemoveInstanceTargetPoolRequest, + dict, +]) +def test_remove_instance_rest(request_type): + client = TargetPoolsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2', 'target_pool': 'sample3'} + request_init["target_pools_remove_instance_request_resource"] = {'instances': [{'instance': 'instance_value'}]} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.remove_instance(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, extended_operation.ExtendedOperation) + assert response.client_operation_id == 'client_operation_id_value' + assert response.creation_timestamp == 'creation_timestamp_value' + assert response.description == 'description_value' + assert response.end_time == 'end_time_value' + assert response.http_error_message == 'http_error_message_value' + assert response.http_error_status_code == 2374 + assert response.id == 205 + assert response.insert_time == 'insert_time_value' + assert response.kind == 'kind_value' + assert response.name == 'name_value' + assert response.operation_group_id == 'operation_group_id_value' + assert response.operation_type == 'operation_type_value' + assert response.progress == 885 + assert response.region == 'region_value' + assert response.self_link == 'self_link_value' + assert response.start_time == 'start_time_value' + assert response.status == compute.Operation.Status.DONE + assert response.status_message == 'status_message_value' + assert response.target_id == 947 + assert response.target_link == 'target_link_value' + assert response.user == 'user_value' + assert response.zone == 'zone_value' + + +def test_remove_instance_rest_required_fields(request_type=compute.RemoveInstanceTargetPoolRequest): + transport_class = transports.TargetPoolsRestTransport + + request_init = {} + request_init["project"] = "" + request_init["region"] = "" + request_init["target_pool"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).remove_instance._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + jsonified_request["region"] = 'region_value' + jsonified_request["targetPool"] = 'target_pool_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).remove_instance._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "region" in jsonified_request + assert jsonified_request["region"] == 'region_value' + assert "targetPool" in jsonified_request + assert jsonified_request["targetPool"] == 'target_pool_value' + + client = TargetPoolsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.remove_instance(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_remove_instance_rest_unset_required_fields(): + transport = transports.TargetPoolsRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.remove_instance._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("project", "region", "targetPool", "targetPoolsRemoveInstanceRequestResource", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_remove_instance_rest_interceptors(null_interceptor): + transport = transports.TargetPoolsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.TargetPoolsRestInterceptor(), + ) + client = TargetPoolsClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.TargetPoolsRestInterceptor, "post_remove_instance") as post, \ + mock.patch.object(transports.TargetPoolsRestInterceptor, "pre_remove_instance") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.RemoveInstanceTargetPoolRequest.pb(compute.RemoveInstanceTargetPoolRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.RemoveInstanceTargetPoolRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.remove_instance(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_remove_instance_rest_bad_request(transport: str = 'rest', request_type=compute.RemoveInstanceTargetPoolRequest): + client = TargetPoolsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2', 'target_pool': 'sample3'} + request_init["target_pools_remove_instance_request_resource"] = {'instances': [{'instance': 'instance_value'}]} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.remove_instance(request) + + +def test_remove_instance_rest_flattened(): + client = TargetPoolsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'region': 'sample2', 'target_pool': 'sample3'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + region='region_value', + target_pool='target_pool_value', + target_pools_remove_instance_request_resource=compute.TargetPoolsRemoveInstanceRequest(instances=[compute.InstanceReference(instance='instance_value')]), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.remove_instance(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/regions/{region}/targetPools/{target_pool}/removeInstance" % client.transport._host, args[1]) + + +def test_remove_instance_rest_flattened_error(transport: str = 'rest'): + client = TargetPoolsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.remove_instance( + compute.RemoveInstanceTargetPoolRequest(), + project='project_value', + region='region_value', + target_pool='target_pool_value', + target_pools_remove_instance_request_resource=compute.TargetPoolsRemoveInstanceRequest(instances=[compute.InstanceReference(instance='instance_value')]), + ) + + +def test_remove_instance_rest_error(): + client = TargetPoolsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.RemoveInstanceTargetPoolRequest, + dict, +]) +def test_remove_instance_unary_rest(request_type): + client = TargetPoolsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2', 'target_pool': 'sample3'} + request_init["target_pools_remove_instance_request_resource"] = {'instances': [{'instance': 'instance_value'}]} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.remove_instance_unary(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.Operation) + + +def test_remove_instance_unary_rest_required_fields(request_type=compute.RemoveInstanceTargetPoolRequest): + transport_class = transports.TargetPoolsRestTransport + + request_init = {} + request_init["project"] = "" + request_init["region"] = "" + request_init["target_pool"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).remove_instance._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + jsonified_request["region"] = 'region_value' + jsonified_request["targetPool"] = 'target_pool_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).remove_instance._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "region" in jsonified_request + assert jsonified_request["region"] == 'region_value' + assert "targetPool" in jsonified_request + assert jsonified_request["targetPool"] == 'target_pool_value' + + client = TargetPoolsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.remove_instance_unary(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_remove_instance_unary_rest_unset_required_fields(): + transport = transports.TargetPoolsRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.remove_instance._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("project", "region", "targetPool", "targetPoolsRemoveInstanceRequestResource", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_remove_instance_unary_rest_interceptors(null_interceptor): + transport = transports.TargetPoolsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.TargetPoolsRestInterceptor(), + ) + client = TargetPoolsClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.TargetPoolsRestInterceptor, "post_remove_instance") as post, \ + mock.patch.object(transports.TargetPoolsRestInterceptor, "pre_remove_instance") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.RemoveInstanceTargetPoolRequest.pb(compute.RemoveInstanceTargetPoolRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.RemoveInstanceTargetPoolRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.remove_instance_unary(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_remove_instance_unary_rest_bad_request(transport: str = 'rest', request_type=compute.RemoveInstanceTargetPoolRequest): + client = TargetPoolsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2', 'target_pool': 'sample3'} + request_init["target_pools_remove_instance_request_resource"] = {'instances': [{'instance': 'instance_value'}]} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.remove_instance_unary(request) + + +def test_remove_instance_unary_rest_flattened(): + client = TargetPoolsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'region': 'sample2', 'target_pool': 'sample3'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + region='region_value', + target_pool='target_pool_value', + target_pools_remove_instance_request_resource=compute.TargetPoolsRemoveInstanceRequest(instances=[compute.InstanceReference(instance='instance_value')]), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.remove_instance_unary(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/regions/{region}/targetPools/{target_pool}/removeInstance" % client.transport._host, args[1]) + + +def test_remove_instance_unary_rest_flattened_error(transport: str = 'rest'): + client = TargetPoolsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.remove_instance_unary( + compute.RemoveInstanceTargetPoolRequest(), + project='project_value', + region='region_value', + target_pool='target_pool_value', + target_pools_remove_instance_request_resource=compute.TargetPoolsRemoveInstanceRequest(instances=[compute.InstanceReference(instance='instance_value')]), + ) + + +def test_remove_instance_unary_rest_error(): + client = TargetPoolsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.SetBackupTargetPoolRequest, + dict, +]) +def test_set_backup_rest(request_type): + client = TargetPoolsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2', 'target_pool': 'sample3'} + request_init["target_reference_resource"] = {'target': 'target_value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.set_backup(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, extended_operation.ExtendedOperation) + assert response.client_operation_id == 'client_operation_id_value' + assert response.creation_timestamp == 'creation_timestamp_value' + assert response.description == 'description_value' + assert response.end_time == 'end_time_value' + assert response.http_error_message == 'http_error_message_value' + assert response.http_error_status_code == 2374 + assert response.id == 205 + assert response.insert_time == 'insert_time_value' + assert response.kind == 'kind_value' + assert response.name == 'name_value' + assert response.operation_group_id == 'operation_group_id_value' + assert response.operation_type == 'operation_type_value' + assert response.progress == 885 + assert response.region == 'region_value' + assert response.self_link == 'self_link_value' + assert response.start_time == 'start_time_value' + assert response.status == compute.Operation.Status.DONE + assert response.status_message == 'status_message_value' + assert response.target_id == 947 + assert response.target_link == 'target_link_value' + assert response.user == 'user_value' + assert response.zone == 'zone_value' + + +def test_set_backup_rest_required_fields(request_type=compute.SetBackupTargetPoolRequest): + transport_class = transports.TargetPoolsRestTransport + + request_init = {} + request_init["project"] = "" + request_init["region"] = "" + request_init["target_pool"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).set_backup._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + jsonified_request["region"] = 'region_value' + jsonified_request["targetPool"] = 'target_pool_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).set_backup._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("failover_ratio", "request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "region" in jsonified_request + assert jsonified_request["region"] == 'region_value' + assert "targetPool" in jsonified_request + assert jsonified_request["targetPool"] == 'target_pool_value' + + client = TargetPoolsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.set_backup(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_set_backup_rest_unset_required_fields(): + transport = transports.TargetPoolsRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.set_backup._get_unset_required_fields({}) + assert set(unset_fields) == (set(("failoverRatio", "requestId", )) & set(("project", "region", "targetPool", "targetReferenceResource", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_set_backup_rest_interceptors(null_interceptor): + transport = transports.TargetPoolsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.TargetPoolsRestInterceptor(), + ) + client = TargetPoolsClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.TargetPoolsRestInterceptor, "post_set_backup") as post, \ + mock.patch.object(transports.TargetPoolsRestInterceptor, "pre_set_backup") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.SetBackupTargetPoolRequest.pb(compute.SetBackupTargetPoolRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.SetBackupTargetPoolRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.set_backup(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_set_backup_rest_bad_request(transport: str = 'rest', request_type=compute.SetBackupTargetPoolRequest): + client = TargetPoolsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2', 'target_pool': 'sample3'} + request_init["target_reference_resource"] = {'target': 'target_value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.set_backup(request) + + +def test_set_backup_rest_flattened(): + client = TargetPoolsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'region': 'sample2', 'target_pool': 'sample3'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + region='region_value', + target_pool='target_pool_value', + target_reference_resource=compute.TargetReference(target='target_value'), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.set_backup(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/regions/{region}/targetPools/{target_pool}/setBackup" % client.transport._host, args[1]) + + +def test_set_backup_rest_flattened_error(transport: str = 'rest'): + client = TargetPoolsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.set_backup( + compute.SetBackupTargetPoolRequest(), + project='project_value', + region='region_value', + target_pool='target_pool_value', + target_reference_resource=compute.TargetReference(target='target_value'), + ) + + +def test_set_backup_rest_error(): + client = TargetPoolsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.SetBackupTargetPoolRequest, + dict, +]) +def test_set_backup_unary_rest(request_type): + client = TargetPoolsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2', 'target_pool': 'sample3'} + request_init["target_reference_resource"] = {'target': 'target_value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.set_backup_unary(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.Operation) + + +def test_set_backup_unary_rest_required_fields(request_type=compute.SetBackupTargetPoolRequest): + transport_class = transports.TargetPoolsRestTransport + + request_init = {} + request_init["project"] = "" + request_init["region"] = "" + request_init["target_pool"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).set_backup._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + jsonified_request["region"] = 'region_value' + jsonified_request["targetPool"] = 'target_pool_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).set_backup._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("failover_ratio", "request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "region" in jsonified_request + assert jsonified_request["region"] == 'region_value' + assert "targetPool" in jsonified_request + assert jsonified_request["targetPool"] == 'target_pool_value' + + client = TargetPoolsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.set_backup_unary(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_set_backup_unary_rest_unset_required_fields(): + transport = transports.TargetPoolsRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.set_backup._get_unset_required_fields({}) + assert set(unset_fields) == (set(("failoverRatio", "requestId", )) & set(("project", "region", "targetPool", "targetReferenceResource", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_set_backup_unary_rest_interceptors(null_interceptor): + transport = transports.TargetPoolsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.TargetPoolsRestInterceptor(), + ) + client = TargetPoolsClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.TargetPoolsRestInterceptor, "post_set_backup") as post, \ + mock.patch.object(transports.TargetPoolsRestInterceptor, "pre_set_backup") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.SetBackupTargetPoolRequest.pb(compute.SetBackupTargetPoolRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.SetBackupTargetPoolRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.set_backup_unary(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_set_backup_unary_rest_bad_request(transport: str = 'rest', request_type=compute.SetBackupTargetPoolRequest): + client = TargetPoolsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2', 'target_pool': 'sample3'} + request_init["target_reference_resource"] = {'target': 'target_value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.set_backup_unary(request) + + +def test_set_backup_unary_rest_flattened(): + client = TargetPoolsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'region': 'sample2', 'target_pool': 'sample3'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + region='region_value', + target_pool='target_pool_value', + target_reference_resource=compute.TargetReference(target='target_value'), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.set_backup_unary(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/regions/{region}/targetPools/{target_pool}/setBackup" % client.transport._host, args[1]) + + +def test_set_backup_unary_rest_flattened_error(transport: str = 'rest'): + client = TargetPoolsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.set_backup_unary( + compute.SetBackupTargetPoolRequest(), + project='project_value', + region='region_value', + target_pool='target_pool_value', + target_reference_resource=compute.TargetReference(target='target_value'), + ) + + +def test_set_backup_unary_rest_error(): + client = TargetPoolsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +def test_credentials_transport_error(): + # It is an error to provide credentials and a transport instance. + transport = transports.TargetPoolsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = TargetPoolsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # It is an error to provide a credentials file and a transport instance. + transport = transports.TargetPoolsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = TargetPoolsClient( + client_options={"credentials_file": "credentials.json"}, + transport=transport, + ) + + # It is an error to provide an api_key and a transport instance. + transport = transports.TargetPoolsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = TargetPoolsClient( + client_options=options, + transport=transport, + ) + + # It is an error to provide an api_key and a credential. + options = mock.Mock() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = TargetPoolsClient( + client_options=options, + credentials=ga_credentials.AnonymousCredentials() + ) + + # It is an error to provide scopes and a transport instance. + transport = transports.TargetPoolsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = TargetPoolsClient( + client_options={"scopes": ["1", "2"]}, + transport=transport, + ) + + +def test_transport_instance(): + # A client may be instantiated with a custom transport instance. + transport = transports.TargetPoolsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + client = TargetPoolsClient(transport=transport) + assert client.transport is transport + + +@pytest.mark.parametrize("transport_class", [ + transports.TargetPoolsRestTransport, +]) +def test_transport_adc(transport_class): + # Test default credentials are used if not provided. + with mock.patch.object(google.auth, 'default') as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class() + adc.assert_called_once() + +@pytest.mark.parametrize("transport_name", [ + "rest", +]) +def test_transport_kind(transport_name): + transport = TargetPoolsClient.get_transport_class(transport_name)( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert transport.kind == transport_name + + +def test_target_pools_base_transport_error(): + # Passing both a credentials object and credentials_file should raise an error + with pytest.raises(core_exceptions.DuplicateCredentialArgs): + transport = transports.TargetPoolsTransport( + credentials=ga_credentials.AnonymousCredentials(), + credentials_file="credentials.json" + ) + + +def test_target_pools_base_transport(): + # Instantiate the base transport. + with mock.patch('google.cloud.compute_v1.services.target_pools.transports.TargetPoolsTransport.__init__') as Transport: + Transport.return_value = None + transport = transports.TargetPoolsTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Every method on the transport should just blindly + # raise NotImplementedError. + methods = ( + 'add_health_check', + 'add_instance', + 'aggregated_list', + 'delete', + 'get', + 'get_health', + 'insert', + 'list', + 'remove_health_check', + 'remove_instance', + 'set_backup', + ) + for method in methods: + with pytest.raises(NotImplementedError): + getattr(transport, method)(request=object()) + + with pytest.raises(NotImplementedError): + transport.close() + + # Catch all for all remaining methods and properties + remainder = [ + 'kind', + ] + for r in remainder: + with pytest.raises(NotImplementedError): + getattr(transport, r)() + + +def test_target_pools_base_transport_with_credentials_file(): + # Instantiate the base transport with a credentials file + with mock.patch.object(google.auth, 'load_credentials_from_file', autospec=True) as load_creds, mock.patch('google.cloud.compute_v1.services.target_pools.transports.TargetPoolsTransport._prep_wrapped_messages') as Transport: + Transport.return_value = None + load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.TargetPoolsTransport( + credentials_file="credentials.json", + quota_project_id="octopus", + ) + load_creds.assert_called_once_with("credentials.json", + scopes=None, + default_scopes=( + 'https://www.googleapis.com/auth/compute', + 'https://www.googleapis.com/auth/cloud-platform', +), + quota_project_id="octopus", + ) + + +def test_target_pools_base_transport_with_adc(): + # Test the default credentials are used if credentials and credentials_file are None. + with mock.patch.object(google.auth, 'default', autospec=True) as adc, mock.patch('google.cloud.compute_v1.services.target_pools.transports.TargetPoolsTransport._prep_wrapped_messages') as Transport: + Transport.return_value = None + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.TargetPoolsTransport() + adc.assert_called_once() + + +def test_target_pools_auth_adc(): + # If no credentials are provided, we should use ADC credentials. + with mock.patch.object(google.auth, 'default', autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + TargetPoolsClient() + adc.assert_called_once_with( + scopes=None, + default_scopes=( + 'https://www.googleapis.com/auth/compute', + 'https://www.googleapis.com/auth/cloud-platform', +), + quota_project_id=None, + ) + + +def test_target_pools_http_transport_client_cert_source_for_mtls(): + cred = ga_credentials.AnonymousCredentials() + with mock.patch("google.auth.transport.requests.AuthorizedSession.configure_mtls_channel") as mock_configure_mtls_channel: + transports.TargetPoolsRestTransport ( + credentials=cred, + client_cert_source_for_mtls=client_cert_source_callback + ) + mock_configure_mtls_channel.assert_called_once_with(client_cert_source_callback) + + +@pytest.mark.parametrize("transport_name", [ + "rest", +]) +def test_target_pools_host_no_port(transport_name): + client = TargetPoolsClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions(api_endpoint='compute.googleapis.com'), + transport=transport_name, + ) + assert client.transport._host == ( + 'compute.googleapis.com:443' + if transport_name in ['grpc', 'grpc_asyncio'] + else 'https://compute.googleapis.com' + ) + +@pytest.mark.parametrize("transport_name", [ + "rest", +]) +def test_target_pools_host_with_port(transport_name): + client = TargetPoolsClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions(api_endpoint='compute.googleapis.com:8000'), + transport=transport_name, + ) + assert client.transport._host == ( + 'compute.googleapis.com:8000' + if transport_name in ['grpc', 'grpc_asyncio'] + else 'https://compute.googleapis.com:8000' + ) + +@pytest.mark.parametrize("transport_name", [ + "rest", +]) +def test_target_pools_client_transport_session_collision(transport_name): + creds1 = ga_credentials.AnonymousCredentials() + creds2 = ga_credentials.AnonymousCredentials() + client1 = TargetPoolsClient( + credentials=creds1, + transport=transport_name, + ) + client2 = TargetPoolsClient( + credentials=creds2, + transport=transport_name, + ) + session1 = client1.transport.add_health_check._session + session2 = client2.transport.add_health_check._session + assert session1 != session2 + session1 = client1.transport.add_instance._session + session2 = client2.transport.add_instance._session + assert session1 != session2 + session1 = client1.transport.aggregated_list._session + session2 = client2.transport.aggregated_list._session + assert session1 != session2 + session1 = client1.transport.delete._session + session2 = client2.transport.delete._session + assert session1 != session2 + session1 = client1.transport.get._session + session2 = client2.transport.get._session + assert session1 != session2 + session1 = client1.transport.get_health._session + session2 = client2.transport.get_health._session + assert session1 != session2 + session1 = client1.transport.insert._session + session2 = client2.transport.insert._session + assert session1 != session2 + session1 = client1.transport.list._session + session2 = client2.transport.list._session + assert session1 != session2 + session1 = client1.transport.remove_health_check._session + session2 = client2.transport.remove_health_check._session + assert session1 != session2 + session1 = client1.transport.remove_instance._session + session2 = client2.transport.remove_instance._session + assert session1 != session2 + session1 = client1.transport.set_backup._session + session2 = client2.transport.set_backup._session + assert session1 != session2 + +def test_common_billing_account_path(): + billing_account = "squid" + expected = "billingAccounts/{billing_account}".format(billing_account=billing_account, ) + actual = TargetPoolsClient.common_billing_account_path(billing_account) + assert expected == actual + + +def test_parse_common_billing_account_path(): + expected = { + "billing_account": "clam", + } + path = TargetPoolsClient.common_billing_account_path(**expected) + + # Check that the path construction is reversible. + actual = TargetPoolsClient.parse_common_billing_account_path(path) + assert expected == actual + +def test_common_folder_path(): + folder = "whelk" + expected = "folders/{folder}".format(folder=folder, ) + actual = TargetPoolsClient.common_folder_path(folder) + assert expected == actual + + +def test_parse_common_folder_path(): + expected = { + "folder": "octopus", + } + path = TargetPoolsClient.common_folder_path(**expected) + + # Check that the path construction is reversible. + actual = TargetPoolsClient.parse_common_folder_path(path) + assert expected == actual + +def test_common_organization_path(): + organization = "oyster" + expected = "organizations/{organization}".format(organization=organization, ) + actual = TargetPoolsClient.common_organization_path(organization) + assert expected == actual + + +def test_parse_common_organization_path(): + expected = { + "organization": "nudibranch", + } + path = TargetPoolsClient.common_organization_path(**expected) + + # Check that the path construction is reversible. + actual = TargetPoolsClient.parse_common_organization_path(path) + assert expected == actual + +def test_common_project_path(): + project = "cuttlefish" + expected = "projects/{project}".format(project=project, ) + actual = TargetPoolsClient.common_project_path(project) + assert expected == actual + + +def test_parse_common_project_path(): + expected = { + "project": "mussel", + } + path = TargetPoolsClient.common_project_path(**expected) + + # Check that the path construction is reversible. + actual = TargetPoolsClient.parse_common_project_path(path) + assert expected == actual + +def test_common_location_path(): + project = "winkle" + location = "nautilus" + expected = "projects/{project}/locations/{location}".format(project=project, location=location, ) + actual = TargetPoolsClient.common_location_path(project, location) + assert expected == actual + + +def test_parse_common_location_path(): + expected = { + "project": "scallop", + "location": "abalone", + } + path = TargetPoolsClient.common_location_path(**expected) + + # Check that the path construction is reversible. + actual = TargetPoolsClient.parse_common_location_path(path) + assert expected == actual + + +def test_client_with_default_client_info(): + client_info = gapic_v1.client_info.ClientInfo() + + with mock.patch.object(transports.TargetPoolsTransport, '_prep_wrapped_messages') as prep: + client = TargetPoolsClient( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + with mock.patch.object(transports.TargetPoolsTransport, '_prep_wrapped_messages') as prep: + transport_class = TargetPoolsClient.get_transport_class() + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + +def test_transport_close(): + transports = { + "rest": "_session", + } + + for transport, close_name in transports.items(): + client = TargetPoolsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport + ) + with mock.patch.object(type(getattr(client.transport, close_name)), "close") as close: + with client: + close.assert_not_called() + close.assert_called_once() + +def test_client_ctx(): + transports = [ + 'rest', + ] + for transport in transports: + client = TargetPoolsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport + ) + # Test client calls underlying transport. + with mock.patch.object(type(client.transport), "close") as close: + close.assert_not_called() + with client: + pass + close.assert_called() + +@pytest.mark.parametrize("client_class,transport_class", [ + (TargetPoolsClient, transports.TargetPoolsRestTransport), +]) +def test_api_key_credentials(client_class, transport_class): + with mock.patch.object( + google.auth._default, "get_api_key_credentials", create=True + ) as get_api_key_credentials: + mock_cred = mock.Mock() + get_api_key_credentials.return_value = mock_cred + options = client_options.ClientOptions() + options.api_key = "api_key" + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=mock_cred, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) diff --git a/owl-bot-staging/v1/tests/unit/gapic/compute_v1/test_target_ssl_proxies.py b/owl-bot-staging/v1/tests/unit/gapic/compute_v1/test_target_ssl_proxies.py new file mode 100644 index 000000000..b3b7dc31d --- /dev/null +++ b/owl-bot-staging/v1/tests/unit/gapic/compute_v1/test_target_ssl_proxies.py @@ -0,0 +1,5259 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import os +# try/except added for compatibility with python < 3.8 +try: + from unittest import mock + from unittest.mock import AsyncMock # pragma: NO COVER +except ImportError: # pragma: NO COVER + import mock + +import grpc +from grpc.experimental import aio +from collections.abc import Iterable +from google.protobuf import json_format +import json +import math +import pytest +from proto.marshal.rules.dates import DurationRule, TimestampRule +from proto.marshal.rules import wrappers +from requests import Response +from requests import Request, PreparedRequest +from requests.sessions import Session +from google.protobuf import json_format + +from google.api_core import client_options +from google.api_core import exceptions as core_exceptions +from google.api_core import extended_operation # type: ignore +from google.api_core import future +from google.api_core import gapic_v1 +from google.api_core import grpc_helpers +from google.api_core import grpc_helpers_async +from google.api_core import path_template +from google.auth import credentials as ga_credentials +from google.auth.exceptions import MutualTLSChannelError +from google.cloud.compute_v1.services.target_ssl_proxies import TargetSslProxiesClient +from google.cloud.compute_v1.services.target_ssl_proxies import pagers +from google.cloud.compute_v1.services.target_ssl_proxies import transports +from google.cloud.compute_v1.types import compute +from google.oauth2 import service_account +import google.auth + + +def client_cert_source_callback(): + return b"cert bytes", b"key bytes" + + +# If default endpoint is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint(client): + return "foo.googleapis.com" if ("localhost" in client.DEFAULT_ENDPOINT) else client.DEFAULT_ENDPOINT + + +def test__get_default_mtls_endpoint(): + api_endpoint = "example.googleapis.com" + api_mtls_endpoint = "example.mtls.googleapis.com" + sandbox_endpoint = "example.sandbox.googleapis.com" + sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com" + non_googleapi = "api.example.com" + + assert TargetSslProxiesClient._get_default_mtls_endpoint(None) is None + assert TargetSslProxiesClient._get_default_mtls_endpoint(api_endpoint) == api_mtls_endpoint + assert TargetSslProxiesClient._get_default_mtls_endpoint(api_mtls_endpoint) == api_mtls_endpoint + assert TargetSslProxiesClient._get_default_mtls_endpoint(sandbox_endpoint) == sandbox_mtls_endpoint + assert TargetSslProxiesClient._get_default_mtls_endpoint(sandbox_mtls_endpoint) == sandbox_mtls_endpoint + assert TargetSslProxiesClient._get_default_mtls_endpoint(non_googleapi) == non_googleapi + + +@pytest.mark.parametrize("client_class,transport_name", [ + (TargetSslProxiesClient, "rest"), +]) +def test_target_ssl_proxies_client_from_service_account_info(client_class, transport_name): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object(service_account.Credentials, 'from_service_account_info') as factory: + factory.return_value = creds + info = {"valid": True} + client = client_class.from_service_account_info(info, transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + 'compute.googleapis.com:443' + if transport_name in ['grpc', 'grpc_asyncio'] + else + 'https://compute.googleapis.com' + ) + + +@pytest.mark.parametrize("transport_class,transport_name", [ + (transports.TargetSslProxiesRestTransport, "rest"), +]) +def test_target_ssl_proxies_client_service_account_always_use_jwt(transport_class, transport_name): + with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=True) + use_jwt.assert_called_once_with(True) + + with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=False) + use_jwt.assert_not_called() + + +@pytest.mark.parametrize("client_class,transport_name", [ + (TargetSslProxiesClient, "rest"), +]) +def test_target_ssl_proxies_client_from_service_account_file(client_class, transport_name): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object(service_account.Credentials, 'from_service_account_file') as factory: + factory.return_value = creds + client = client_class.from_service_account_file("dummy/file/path.json", transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + client = client_class.from_service_account_json("dummy/file/path.json", transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + 'compute.googleapis.com:443' + if transport_name in ['grpc', 'grpc_asyncio'] + else + 'https://compute.googleapis.com' + ) + + +def test_target_ssl_proxies_client_get_transport_class(): + transport = TargetSslProxiesClient.get_transport_class() + available_transports = [ + transports.TargetSslProxiesRestTransport, + ] + assert transport in available_transports + + transport = TargetSslProxiesClient.get_transport_class("rest") + assert transport == transports.TargetSslProxiesRestTransport + + +@pytest.mark.parametrize("client_class,transport_class,transport_name", [ + (TargetSslProxiesClient, transports.TargetSslProxiesRestTransport, "rest"), +]) +@mock.patch.object(TargetSslProxiesClient, "DEFAULT_ENDPOINT", modify_default_endpoint(TargetSslProxiesClient)) +def test_target_ssl_proxies_client_client_options(client_class, transport_class, transport_name): + # Check that if channel is provided we won't create a new one. + with mock.patch.object(TargetSslProxiesClient, 'get_transport_class') as gtc: + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ) + client = client_class(transport=transport) + gtc.assert_not_called() + + # Check that if channel is provided via str we will create a new one. + with mock.patch.object(TargetSslProxiesClient, 'get_transport_class') as gtc: + client = client_class(transport=transport_name) + gtc.assert_called() + + # Check the case api_endpoint is provided. + options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(transport=transport_name, client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_MTLS_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError): + client = client_class(transport=transport_name) + + # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): + with pytest.raises(ValueError): + client = client_class(transport=transport_name) + + # Check the case quota_project_id is provided + options = client_options.ClientOptions(quota_project_id="octopus") + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id="octopus", + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + # Check the case api_endpoint is provided + options = client_options.ClientOptions(api_audience="https://language.googleapis.com") + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience="https://language.googleapis.com" + ) + +@pytest.mark.parametrize("client_class,transport_class,transport_name,use_client_cert_env", [ + (TargetSslProxiesClient, transports.TargetSslProxiesRestTransport, "rest", "true"), + (TargetSslProxiesClient, transports.TargetSslProxiesRestTransport, "rest", "false"), +]) +@mock.patch.object(TargetSslProxiesClient, "DEFAULT_ENDPOINT", modify_default_endpoint(TargetSslProxiesClient)) +@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) +def test_target_ssl_proxies_client_mtls_env_auto(client_class, transport_class, transport_name, use_client_cert_env): + # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default + # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists. + + # Check the case client_cert_source is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): + options = client_options.ClientOptions(client_cert_source=client_cert_source_callback) + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + + if use_client_cert_env == "false": + expected_client_cert_source = None + expected_host = client.DEFAULT_ENDPOINT + else: + expected_client_cert_source = client_cert_source_callback + expected_host = client.DEFAULT_MTLS_ENDPOINT + + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case ADC client cert is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): + with mock.patch.object(transport_class, '__init__') as patched: + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): + with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=client_cert_source_callback): + if use_client_cert_env == "false": + expected_host = client.DEFAULT_ENDPOINT + expected_client_cert_source = None + else: + expected_host = client.DEFAULT_MTLS_ENDPOINT + expected_client_cert_source = client_cert_source_callback + + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case client_cert_source and ADC client cert are not provided. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): + with mock.patch.object(transport_class, '__init__') as patched: + with mock.patch("google.auth.transport.mtls.has_default_client_cert_source", return_value=False): + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize("client_class", [ + TargetSslProxiesClient +]) +@mock.patch.object(TargetSslProxiesClient, "DEFAULT_ENDPOINT", modify_default_endpoint(TargetSslProxiesClient)) +def test_target_ssl_proxies_client_get_mtls_endpoint_and_cert_source(client_class): + mock_client_cert_source = mock.Mock() + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "true". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + mock_api_endpoint = "foo" + options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(options) + assert api_endpoint == mock_api_endpoint + assert cert_source == mock_client_cert_source + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "false". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + mock_client_cert_source = mock.Mock() + mock_api_endpoint = "foo" + options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(options) + assert api_endpoint == mock_api_endpoint + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert doesn't exist. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=False): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert exists. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): + with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=mock_client_cert_source): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source == mock_client_cert_source + + +@pytest.mark.parametrize("client_class,transport_class,transport_name", [ + (TargetSslProxiesClient, transports.TargetSslProxiesRestTransport, "rest"), +]) +def test_target_ssl_proxies_client_client_options_scopes(client_class, transport_class, transport_name): + # Check the case scopes are provided. + options = client_options.ClientOptions( + scopes=["1", "2"], + ) + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=["1", "2"], + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + +@pytest.mark.parametrize("client_class,transport_class,transport_name,grpc_helpers", [ + (TargetSslProxiesClient, transports.TargetSslProxiesRestTransport, "rest", None), +]) +def test_target_ssl_proxies_client_client_options_credentials_file(client_class, transport_class, transport_name, grpc_helpers): + # Check the case credentials file is provided. + options = client_options.ClientOptions( + credentials_file="credentials.json" + ) + + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize("request_type", [ + compute.DeleteTargetSslProxyRequest, + dict, +]) +def test_delete_rest(request_type): + client = TargetSslProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'target_ssl_proxy': 'sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.delete(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, extended_operation.ExtendedOperation) + assert response.client_operation_id == 'client_operation_id_value' + assert response.creation_timestamp == 'creation_timestamp_value' + assert response.description == 'description_value' + assert response.end_time == 'end_time_value' + assert response.http_error_message == 'http_error_message_value' + assert response.http_error_status_code == 2374 + assert response.id == 205 + assert response.insert_time == 'insert_time_value' + assert response.kind == 'kind_value' + assert response.name == 'name_value' + assert response.operation_group_id == 'operation_group_id_value' + assert response.operation_type == 'operation_type_value' + assert response.progress == 885 + assert response.region == 'region_value' + assert response.self_link == 'self_link_value' + assert response.start_time == 'start_time_value' + assert response.status == compute.Operation.Status.DONE + assert response.status_message == 'status_message_value' + assert response.target_id == 947 + assert response.target_link == 'target_link_value' + assert response.user == 'user_value' + assert response.zone == 'zone_value' + + +def test_delete_rest_required_fields(request_type=compute.DeleteTargetSslProxyRequest): + transport_class = transports.TargetSslProxiesRestTransport + + request_init = {} + request_init["project"] = "" + request_init["target_ssl_proxy"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + jsonified_request["targetSslProxy"] = 'target_ssl_proxy_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "targetSslProxy" in jsonified_request + assert jsonified_request["targetSslProxy"] == 'target_ssl_proxy_value' + + client = TargetSslProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "delete", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.delete(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_delete_rest_unset_required_fields(): + transport = transports.TargetSslProxiesRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.delete._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("project", "targetSslProxy", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_rest_interceptors(null_interceptor): + transport = transports.TargetSslProxiesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.TargetSslProxiesRestInterceptor(), + ) + client = TargetSslProxiesClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.TargetSslProxiesRestInterceptor, "post_delete") as post, \ + mock.patch.object(transports.TargetSslProxiesRestInterceptor, "pre_delete") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.DeleteTargetSslProxyRequest.pb(compute.DeleteTargetSslProxyRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.DeleteTargetSslProxyRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.delete(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_delete_rest_bad_request(transport: str = 'rest', request_type=compute.DeleteTargetSslProxyRequest): + client = TargetSslProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'target_ssl_proxy': 'sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.delete(request) + + +def test_delete_rest_flattened(): + client = TargetSslProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'target_ssl_proxy': 'sample2'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + target_ssl_proxy='target_ssl_proxy_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.delete(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/global/targetSslProxies/{target_ssl_proxy}" % client.transport._host, args[1]) + + +def test_delete_rest_flattened_error(transport: str = 'rest'): + client = TargetSslProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete( + compute.DeleteTargetSslProxyRequest(), + project='project_value', + target_ssl_proxy='target_ssl_proxy_value', + ) + + +def test_delete_rest_error(): + client = TargetSslProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.DeleteTargetSslProxyRequest, + dict, +]) +def test_delete_unary_rest(request_type): + client = TargetSslProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'target_ssl_proxy': 'sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.delete_unary(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.Operation) + + +def test_delete_unary_rest_required_fields(request_type=compute.DeleteTargetSslProxyRequest): + transport_class = transports.TargetSslProxiesRestTransport + + request_init = {} + request_init["project"] = "" + request_init["target_ssl_proxy"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + jsonified_request["targetSslProxy"] = 'target_ssl_proxy_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "targetSslProxy" in jsonified_request + assert jsonified_request["targetSslProxy"] == 'target_ssl_proxy_value' + + client = TargetSslProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "delete", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.delete_unary(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_delete_unary_rest_unset_required_fields(): + transport = transports.TargetSslProxiesRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.delete._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("project", "targetSslProxy", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_unary_rest_interceptors(null_interceptor): + transport = transports.TargetSslProxiesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.TargetSslProxiesRestInterceptor(), + ) + client = TargetSslProxiesClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.TargetSslProxiesRestInterceptor, "post_delete") as post, \ + mock.patch.object(transports.TargetSslProxiesRestInterceptor, "pre_delete") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.DeleteTargetSslProxyRequest.pb(compute.DeleteTargetSslProxyRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.DeleteTargetSslProxyRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.delete_unary(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_delete_unary_rest_bad_request(transport: str = 'rest', request_type=compute.DeleteTargetSslProxyRequest): + client = TargetSslProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'target_ssl_proxy': 'sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.delete_unary(request) + + +def test_delete_unary_rest_flattened(): + client = TargetSslProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'target_ssl_proxy': 'sample2'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + target_ssl_proxy='target_ssl_proxy_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.delete_unary(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/global/targetSslProxies/{target_ssl_proxy}" % client.transport._host, args[1]) + + +def test_delete_unary_rest_flattened_error(transport: str = 'rest'): + client = TargetSslProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_unary( + compute.DeleteTargetSslProxyRequest(), + project='project_value', + target_ssl_proxy='target_ssl_proxy_value', + ) + + +def test_delete_unary_rest_error(): + client = TargetSslProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.GetTargetSslProxyRequest, + dict, +]) +def test_get_rest(request_type): + client = TargetSslProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'target_ssl_proxy': 'sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.TargetSslProxy( + certificate_map='certificate_map_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + id=205, + kind='kind_value', + name='name_value', + proxy_header='proxy_header_value', + self_link='self_link_value', + service='service_value', + ssl_certificates=['ssl_certificates_value'], + ssl_policy='ssl_policy_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.TargetSslProxy.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.get(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.TargetSslProxy) + assert response.certificate_map == 'certificate_map_value' + assert response.creation_timestamp == 'creation_timestamp_value' + assert response.description == 'description_value' + assert response.id == 205 + assert response.kind == 'kind_value' + assert response.name == 'name_value' + assert response.proxy_header == 'proxy_header_value' + assert response.self_link == 'self_link_value' + assert response.service == 'service_value' + assert response.ssl_certificates == ['ssl_certificates_value'] + assert response.ssl_policy == 'ssl_policy_value' + + +def test_get_rest_required_fields(request_type=compute.GetTargetSslProxyRequest): + transport_class = transports.TargetSslProxiesRestTransport + + request_init = {} + request_init["project"] = "" + request_init["target_ssl_proxy"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + jsonified_request["targetSslProxy"] = 'target_ssl_proxy_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "targetSslProxy" in jsonified_request + assert jsonified_request["targetSslProxy"] == 'target_ssl_proxy_value' + + client = TargetSslProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.TargetSslProxy() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "get", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.TargetSslProxy.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.get(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_get_rest_unset_required_fields(): + transport = transports.TargetSslProxiesRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.get._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("project", "targetSslProxy", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_rest_interceptors(null_interceptor): + transport = transports.TargetSslProxiesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.TargetSslProxiesRestInterceptor(), + ) + client = TargetSslProxiesClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.TargetSslProxiesRestInterceptor, "post_get") as post, \ + mock.patch.object(transports.TargetSslProxiesRestInterceptor, "pre_get") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.GetTargetSslProxyRequest.pb(compute.GetTargetSslProxyRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.TargetSslProxy.to_json(compute.TargetSslProxy()) + + request = compute.GetTargetSslProxyRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.TargetSslProxy() + + client.get(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_get_rest_bad_request(transport: str = 'rest', request_type=compute.GetTargetSslProxyRequest): + client = TargetSslProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'target_ssl_proxy': 'sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get(request) + + +def test_get_rest_flattened(): + client = TargetSslProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.TargetSslProxy() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'target_ssl_proxy': 'sample2'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + target_ssl_proxy='target_ssl_proxy_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.TargetSslProxy.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.get(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/global/targetSslProxies/{target_ssl_proxy}" % client.transport._host, args[1]) + + +def test_get_rest_flattened_error(transport: str = 'rest'): + client = TargetSslProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get( + compute.GetTargetSslProxyRequest(), + project='project_value', + target_ssl_proxy='target_ssl_proxy_value', + ) + + +def test_get_rest_error(): + client = TargetSslProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.InsertTargetSslProxyRequest, + dict, +]) +def test_insert_rest(request_type): + client = TargetSslProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1'} + request_init["target_ssl_proxy_resource"] = {'certificate_map': 'certificate_map_value', 'creation_timestamp': 'creation_timestamp_value', 'description': 'description_value', 'id': 205, 'kind': 'kind_value', 'name': 'name_value', 'proxy_header': 'proxy_header_value', 'self_link': 'self_link_value', 'service': 'service_value', 'ssl_certificates': ['ssl_certificates_value1', 'ssl_certificates_value2'], 'ssl_policy': 'ssl_policy_value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.insert(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, extended_operation.ExtendedOperation) + assert response.client_operation_id == 'client_operation_id_value' + assert response.creation_timestamp == 'creation_timestamp_value' + assert response.description == 'description_value' + assert response.end_time == 'end_time_value' + assert response.http_error_message == 'http_error_message_value' + assert response.http_error_status_code == 2374 + assert response.id == 205 + assert response.insert_time == 'insert_time_value' + assert response.kind == 'kind_value' + assert response.name == 'name_value' + assert response.operation_group_id == 'operation_group_id_value' + assert response.operation_type == 'operation_type_value' + assert response.progress == 885 + assert response.region == 'region_value' + assert response.self_link == 'self_link_value' + assert response.start_time == 'start_time_value' + assert response.status == compute.Operation.Status.DONE + assert response.status_message == 'status_message_value' + assert response.target_id == 947 + assert response.target_link == 'target_link_value' + assert response.user == 'user_value' + assert response.zone == 'zone_value' + + +def test_insert_rest_required_fields(request_type=compute.InsertTargetSslProxyRequest): + transport_class = transports.TargetSslProxiesRestTransport + + request_init = {} + request_init["project"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).insert._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).insert._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + + client = TargetSslProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.insert(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_insert_rest_unset_required_fields(): + transport = transports.TargetSslProxiesRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.insert._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("project", "targetSslProxyResource", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_insert_rest_interceptors(null_interceptor): + transport = transports.TargetSslProxiesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.TargetSslProxiesRestInterceptor(), + ) + client = TargetSslProxiesClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.TargetSslProxiesRestInterceptor, "post_insert") as post, \ + mock.patch.object(transports.TargetSslProxiesRestInterceptor, "pre_insert") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.InsertTargetSslProxyRequest.pb(compute.InsertTargetSslProxyRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.InsertTargetSslProxyRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.insert(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_insert_rest_bad_request(transport: str = 'rest', request_type=compute.InsertTargetSslProxyRequest): + client = TargetSslProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1'} + request_init["target_ssl_proxy_resource"] = {'certificate_map': 'certificate_map_value', 'creation_timestamp': 'creation_timestamp_value', 'description': 'description_value', 'id': 205, 'kind': 'kind_value', 'name': 'name_value', 'proxy_header': 'proxy_header_value', 'self_link': 'self_link_value', 'service': 'service_value', 'ssl_certificates': ['ssl_certificates_value1', 'ssl_certificates_value2'], 'ssl_policy': 'ssl_policy_value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.insert(request) + + +def test_insert_rest_flattened(): + client = TargetSslProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + target_ssl_proxy_resource=compute.TargetSslProxy(certificate_map='certificate_map_value'), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.insert(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/global/targetSslProxies" % client.transport._host, args[1]) + + +def test_insert_rest_flattened_error(transport: str = 'rest'): + client = TargetSslProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.insert( + compute.InsertTargetSslProxyRequest(), + project='project_value', + target_ssl_proxy_resource=compute.TargetSslProxy(certificate_map='certificate_map_value'), + ) + + +def test_insert_rest_error(): + client = TargetSslProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.InsertTargetSslProxyRequest, + dict, +]) +def test_insert_unary_rest(request_type): + client = TargetSslProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1'} + request_init["target_ssl_proxy_resource"] = {'certificate_map': 'certificate_map_value', 'creation_timestamp': 'creation_timestamp_value', 'description': 'description_value', 'id': 205, 'kind': 'kind_value', 'name': 'name_value', 'proxy_header': 'proxy_header_value', 'self_link': 'self_link_value', 'service': 'service_value', 'ssl_certificates': ['ssl_certificates_value1', 'ssl_certificates_value2'], 'ssl_policy': 'ssl_policy_value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.insert_unary(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.Operation) + + +def test_insert_unary_rest_required_fields(request_type=compute.InsertTargetSslProxyRequest): + transport_class = transports.TargetSslProxiesRestTransport + + request_init = {} + request_init["project"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).insert._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).insert._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + + client = TargetSslProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.insert_unary(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_insert_unary_rest_unset_required_fields(): + transport = transports.TargetSslProxiesRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.insert._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("project", "targetSslProxyResource", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_insert_unary_rest_interceptors(null_interceptor): + transport = transports.TargetSslProxiesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.TargetSslProxiesRestInterceptor(), + ) + client = TargetSslProxiesClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.TargetSslProxiesRestInterceptor, "post_insert") as post, \ + mock.patch.object(transports.TargetSslProxiesRestInterceptor, "pre_insert") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.InsertTargetSslProxyRequest.pb(compute.InsertTargetSslProxyRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.InsertTargetSslProxyRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.insert_unary(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_insert_unary_rest_bad_request(transport: str = 'rest', request_type=compute.InsertTargetSslProxyRequest): + client = TargetSslProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1'} + request_init["target_ssl_proxy_resource"] = {'certificate_map': 'certificate_map_value', 'creation_timestamp': 'creation_timestamp_value', 'description': 'description_value', 'id': 205, 'kind': 'kind_value', 'name': 'name_value', 'proxy_header': 'proxy_header_value', 'self_link': 'self_link_value', 'service': 'service_value', 'ssl_certificates': ['ssl_certificates_value1', 'ssl_certificates_value2'], 'ssl_policy': 'ssl_policy_value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.insert_unary(request) + + +def test_insert_unary_rest_flattened(): + client = TargetSslProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + target_ssl_proxy_resource=compute.TargetSslProxy(certificate_map='certificate_map_value'), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.insert_unary(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/global/targetSslProxies" % client.transport._host, args[1]) + + +def test_insert_unary_rest_flattened_error(transport: str = 'rest'): + client = TargetSslProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.insert_unary( + compute.InsertTargetSslProxyRequest(), + project='project_value', + target_ssl_proxy_resource=compute.TargetSslProxy(certificate_map='certificate_map_value'), + ) + + +def test_insert_unary_rest_error(): + client = TargetSslProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.ListTargetSslProxiesRequest, + dict, +]) +def test_list_rest(request_type): + client = TargetSslProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.TargetSslProxyList( + id='id_value', + kind='kind_value', + next_page_token='next_page_token_value', + self_link='self_link_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.TargetSslProxyList.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.list(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListPager) + assert response.id == 'id_value' + assert response.kind == 'kind_value' + assert response.next_page_token == 'next_page_token_value' + assert response.self_link == 'self_link_value' + + +def test_list_rest_required_fields(request_type=compute.ListTargetSslProxiesRequest): + transport_class = transports.TargetSslProxiesRestTransport + + request_init = {} + request_init["project"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("filter", "max_results", "order_by", "page_token", "return_partial_success", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + + client = TargetSslProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.TargetSslProxyList() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "get", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.TargetSslProxyList.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.list(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_list_rest_unset_required_fields(): + transport = transports.TargetSslProxiesRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.list._get_unset_required_fields({}) + assert set(unset_fields) == (set(("filter", "maxResults", "orderBy", "pageToken", "returnPartialSuccess", )) & set(("project", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_rest_interceptors(null_interceptor): + transport = transports.TargetSslProxiesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.TargetSslProxiesRestInterceptor(), + ) + client = TargetSslProxiesClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.TargetSslProxiesRestInterceptor, "post_list") as post, \ + mock.patch.object(transports.TargetSslProxiesRestInterceptor, "pre_list") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.ListTargetSslProxiesRequest.pb(compute.ListTargetSslProxiesRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.TargetSslProxyList.to_json(compute.TargetSslProxyList()) + + request = compute.ListTargetSslProxiesRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.TargetSslProxyList() + + client.list(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_list_rest_bad_request(transport: str = 'rest', request_type=compute.ListTargetSslProxiesRequest): + client = TargetSslProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list(request) + + +def test_list_rest_flattened(): + client = TargetSslProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.TargetSslProxyList() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.TargetSslProxyList.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.list(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/global/targetSslProxies" % client.transport._host, args[1]) + + +def test_list_rest_flattened_error(transport: str = 'rest'): + client = TargetSslProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list( + compute.ListTargetSslProxiesRequest(), + project='project_value', + ) + + +def test_list_rest_pager(transport: str = 'rest'): + client = TargetSslProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + #with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + compute.TargetSslProxyList( + items=[ + compute.TargetSslProxy(), + compute.TargetSslProxy(), + compute.TargetSslProxy(), + ], + next_page_token='abc', + ), + compute.TargetSslProxyList( + items=[], + next_page_token='def', + ), + compute.TargetSslProxyList( + items=[ + compute.TargetSslProxy(), + ], + next_page_token='ghi', + ), + compute.TargetSslProxyList( + items=[ + compute.TargetSslProxy(), + compute.TargetSslProxy(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple(compute.TargetSslProxyList.to_json(x) for x in response) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode('UTF-8') + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {'project': 'sample1'} + + pager = client.list(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, compute.TargetSslProxy) + for i in results) + + pages = list(client.list(request=sample_request).pages) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize("request_type", [ + compute.SetBackendServiceTargetSslProxyRequest, + dict, +]) +def test_set_backend_service_rest(request_type): + client = TargetSslProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'target_ssl_proxy': 'sample2'} + request_init["target_ssl_proxies_set_backend_service_request_resource"] = {'service': 'service_value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.set_backend_service(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, extended_operation.ExtendedOperation) + assert response.client_operation_id == 'client_operation_id_value' + assert response.creation_timestamp == 'creation_timestamp_value' + assert response.description == 'description_value' + assert response.end_time == 'end_time_value' + assert response.http_error_message == 'http_error_message_value' + assert response.http_error_status_code == 2374 + assert response.id == 205 + assert response.insert_time == 'insert_time_value' + assert response.kind == 'kind_value' + assert response.name == 'name_value' + assert response.operation_group_id == 'operation_group_id_value' + assert response.operation_type == 'operation_type_value' + assert response.progress == 885 + assert response.region == 'region_value' + assert response.self_link == 'self_link_value' + assert response.start_time == 'start_time_value' + assert response.status == compute.Operation.Status.DONE + assert response.status_message == 'status_message_value' + assert response.target_id == 947 + assert response.target_link == 'target_link_value' + assert response.user == 'user_value' + assert response.zone == 'zone_value' + + +def test_set_backend_service_rest_required_fields(request_type=compute.SetBackendServiceTargetSslProxyRequest): + transport_class = transports.TargetSslProxiesRestTransport + + request_init = {} + request_init["project"] = "" + request_init["target_ssl_proxy"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).set_backend_service._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + jsonified_request["targetSslProxy"] = 'target_ssl_proxy_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).set_backend_service._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "targetSslProxy" in jsonified_request + assert jsonified_request["targetSslProxy"] == 'target_ssl_proxy_value' + + client = TargetSslProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.set_backend_service(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_set_backend_service_rest_unset_required_fields(): + transport = transports.TargetSslProxiesRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.set_backend_service._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("project", "targetSslProxiesSetBackendServiceRequestResource", "targetSslProxy", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_set_backend_service_rest_interceptors(null_interceptor): + transport = transports.TargetSslProxiesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.TargetSslProxiesRestInterceptor(), + ) + client = TargetSslProxiesClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.TargetSslProxiesRestInterceptor, "post_set_backend_service") as post, \ + mock.patch.object(transports.TargetSslProxiesRestInterceptor, "pre_set_backend_service") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.SetBackendServiceTargetSslProxyRequest.pb(compute.SetBackendServiceTargetSslProxyRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.SetBackendServiceTargetSslProxyRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.set_backend_service(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_set_backend_service_rest_bad_request(transport: str = 'rest', request_type=compute.SetBackendServiceTargetSslProxyRequest): + client = TargetSslProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'target_ssl_proxy': 'sample2'} + request_init["target_ssl_proxies_set_backend_service_request_resource"] = {'service': 'service_value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.set_backend_service(request) + + +def test_set_backend_service_rest_flattened(): + client = TargetSslProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'target_ssl_proxy': 'sample2'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + target_ssl_proxy='target_ssl_proxy_value', + target_ssl_proxies_set_backend_service_request_resource=compute.TargetSslProxiesSetBackendServiceRequest(service='service_value'), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.set_backend_service(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/global/targetSslProxies/{target_ssl_proxy}/setBackendService" % client.transport._host, args[1]) + + +def test_set_backend_service_rest_flattened_error(transport: str = 'rest'): + client = TargetSslProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.set_backend_service( + compute.SetBackendServiceTargetSslProxyRequest(), + project='project_value', + target_ssl_proxy='target_ssl_proxy_value', + target_ssl_proxies_set_backend_service_request_resource=compute.TargetSslProxiesSetBackendServiceRequest(service='service_value'), + ) + + +def test_set_backend_service_rest_error(): + client = TargetSslProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.SetBackendServiceTargetSslProxyRequest, + dict, +]) +def test_set_backend_service_unary_rest(request_type): + client = TargetSslProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'target_ssl_proxy': 'sample2'} + request_init["target_ssl_proxies_set_backend_service_request_resource"] = {'service': 'service_value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.set_backend_service_unary(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.Operation) + + +def test_set_backend_service_unary_rest_required_fields(request_type=compute.SetBackendServiceTargetSslProxyRequest): + transport_class = transports.TargetSslProxiesRestTransport + + request_init = {} + request_init["project"] = "" + request_init["target_ssl_proxy"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).set_backend_service._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + jsonified_request["targetSslProxy"] = 'target_ssl_proxy_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).set_backend_service._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "targetSslProxy" in jsonified_request + assert jsonified_request["targetSslProxy"] == 'target_ssl_proxy_value' + + client = TargetSslProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.set_backend_service_unary(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_set_backend_service_unary_rest_unset_required_fields(): + transport = transports.TargetSslProxiesRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.set_backend_service._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("project", "targetSslProxiesSetBackendServiceRequestResource", "targetSslProxy", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_set_backend_service_unary_rest_interceptors(null_interceptor): + transport = transports.TargetSslProxiesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.TargetSslProxiesRestInterceptor(), + ) + client = TargetSslProxiesClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.TargetSslProxiesRestInterceptor, "post_set_backend_service") as post, \ + mock.patch.object(transports.TargetSslProxiesRestInterceptor, "pre_set_backend_service") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.SetBackendServiceTargetSslProxyRequest.pb(compute.SetBackendServiceTargetSslProxyRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.SetBackendServiceTargetSslProxyRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.set_backend_service_unary(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_set_backend_service_unary_rest_bad_request(transport: str = 'rest', request_type=compute.SetBackendServiceTargetSslProxyRequest): + client = TargetSslProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'target_ssl_proxy': 'sample2'} + request_init["target_ssl_proxies_set_backend_service_request_resource"] = {'service': 'service_value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.set_backend_service_unary(request) + + +def test_set_backend_service_unary_rest_flattened(): + client = TargetSslProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'target_ssl_proxy': 'sample2'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + target_ssl_proxy='target_ssl_proxy_value', + target_ssl_proxies_set_backend_service_request_resource=compute.TargetSslProxiesSetBackendServiceRequest(service='service_value'), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.set_backend_service_unary(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/global/targetSslProxies/{target_ssl_proxy}/setBackendService" % client.transport._host, args[1]) + + +def test_set_backend_service_unary_rest_flattened_error(transport: str = 'rest'): + client = TargetSslProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.set_backend_service_unary( + compute.SetBackendServiceTargetSslProxyRequest(), + project='project_value', + target_ssl_proxy='target_ssl_proxy_value', + target_ssl_proxies_set_backend_service_request_resource=compute.TargetSslProxiesSetBackendServiceRequest(service='service_value'), + ) + + +def test_set_backend_service_unary_rest_error(): + client = TargetSslProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.SetCertificateMapTargetSslProxyRequest, + dict, +]) +def test_set_certificate_map_rest(request_type): + client = TargetSslProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'target_ssl_proxy': 'sample2'} + request_init["target_ssl_proxies_set_certificate_map_request_resource"] = {'certificate_map': 'certificate_map_value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.set_certificate_map(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, extended_operation.ExtendedOperation) + assert response.client_operation_id == 'client_operation_id_value' + assert response.creation_timestamp == 'creation_timestamp_value' + assert response.description == 'description_value' + assert response.end_time == 'end_time_value' + assert response.http_error_message == 'http_error_message_value' + assert response.http_error_status_code == 2374 + assert response.id == 205 + assert response.insert_time == 'insert_time_value' + assert response.kind == 'kind_value' + assert response.name == 'name_value' + assert response.operation_group_id == 'operation_group_id_value' + assert response.operation_type == 'operation_type_value' + assert response.progress == 885 + assert response.region == 'region_value' + assert response.self_link == 'self_link_value' + assert response.start_time == 'start_time_value' + assert response.status == compute.Operation.Status.DONE + assert response.status_message == 'status_message_value' + assert response.target_id == 947 + assert response.target_link == 'target_link_value' + assert response.user == 'user_value' + assert response.zone == 'zone_value' + + +def test_set_certificate_map_rest_required_fields(request_type=compute.SetCertificateMapTargetSslProxyRequest): + transport_class = transports.TargetSslProxiesRestTransport + + request_init = {} + request_init["project"] = "" + request_init["target_ssl_proxy"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).set_certificate_map._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + jsonified_request["targetSslProxy"] = 'target_ssl_proxy_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).set_certificate_map._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "targetSslProxy" in jsonified_request + assert jsonified_request["targetSslProxy"] == 'target_ssl_proxy_value' + + client = TargetSslProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.set_certificate_map(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_set_certificate_map_rest_unset_required_fields(): + transport = transports.TargetSslProxiesRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.set_certificate_map._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("project", "targetSslProxiesSetCertificateMapRequestResource", "targetSslProxy", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_set_certificate_map_rest_interceptors(null_interceptor): + transport = transports.TargetSslProxiesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.TargetSslProxiesRestInterceptor(), + ) + client = TargetSslProxiesClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.TargetSslProxiesRestInterceptor, "post_set_certificate_map") as post, \ + mock.patch.object(transports.TargetSslProxiesRestInterceptor, "pre_set_certificate_map") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.SetCertificateMapTargetSslProxyRequest.pb(compute.SetCertificateMapTargetSslProxyRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.SetCertificateMapTargetSslProxyRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.set_certificate_map(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_set_certificate_map_rest_bad_request(transport: str = 'rest', request_type=compute.SetCertificateMapTargetSslProxyRequest): + client = TargetSslProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'target_ssl_proxy': 'sample2'} + request_init["target_ssl_proxies_set_certificate_map_request_resource"] = {'certificate_map': 'certificate_map_value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.set_certificate_map(request) + + +def test_set_certificate_map_rest_flattened(): + client = TargetSslProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'target_ssl_proxy': 'sample2'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + target_ssl_proxy='target_ssl_proxy_value', + target_ssl_proxies_set_certificate_map_request_resource=compute.TargetSslProxiesSetCertificateMapRequest(certificate_map='certificate_map_value'), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.set_certificate_map(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/global/targetSslProxies/{target_ssl_proxy}/setCertificateMap" % client.transport._host, args[1]) + + +def test_set_certificate_map_rest_flattened_error(transport: str = 'rest'): + client = TargetSslProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.set_certificate_map( + compute.SetCertificateMapTargetSslProxyRequest(), + project='project_value', + target_ssl_proxy='target_ssl_proxy_value', + target_ssl_proxies_set_certificate_map_request_resource=compute.TargetSslProxiesSetCertificateMapRequest(certificate_map='certificate_map_value'), + ) + + +def test_set_certificate_map_rest_error(): + client = TargetSslProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.SetCertificateMapTargetSslProxyRequest, + dict, +]) +def test_set_certificate_map_unary_rest(request_type): + client = TargetSslProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'target_ssl_proxy': 'sample2'} + request_init["target_ssl_proxies_set_certificate_map_request_resource"] = {'certificate_map': 'certificate_map_value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.set_certificate_map_unary(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.Operation) + + +def test_set_certificate_map_unary_rest_required_fields(request_type=compute.SetCertificateMapTargetSslProxyRequest): + transport_class = transports.TargetSslProxiesRestTransport + + request_init = {} + request_init["project"] = "" + request_init["target_ssl_proxy"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).set_certificate_map._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + jsonified_request["targetSslProxy"] = 'target_ssl_proxy_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).set_certificate_map._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "targetSslProxy" in jsonified_request + assert jsonified_request["targetSslProxy"] == 'target_ssl_proxy_value' + + client = TargetSslProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.set_certificate_map_unary(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_set_certificate_map_unary_rest_unset_required_fields(): + transport = transports.TargetSslProxiesRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.set_certificate_map._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("project", "targetSslProxiesSetCertificateMapRequestResource", "targetSslProxy", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_set_certificate_map_unary_rest_interceptors(null_interceptor): + transport = transports.TargetSslProxiesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.TargetSslProxiesRestInterceptor(), + ) + client = TargetSslProxiesClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.TargetSslProxiesRestInterceptor, "post_set_certificate_map") as post, \ + mock.patch.object(transports.TargetSslProxiesRestInterceptor, "pre_set_certificate_map") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.SetCertificateMapTargetSslProxyRequest.pb(compute.SetCertificateMapTargetSslProxyRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.SetCertificateMapTargetSslProxyRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.set_certificate_map_unary(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_set_certificate_map_unary_rest_bad_request(transport: str = 'rest', request_type=compute.SetCertificateMapTargetSslProxyRequest): + client = TargetSslProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'target_ssl_proxy': 'sample2'} + request_init["target_ssl_proxies_set_certificate_map_request_resource"] = {'certificate_map': 'certificate_map_value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.set_certificate_map_unary(request) + + +def test_set_certificate_map_unary_rest_flattened(): + client = TargetSslProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'target_ssl_proxy': 'sample2'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + target_ssl_proxy='target_ssl_proxy_value', + target_ssl_proxies_set_certificate_map_request_resource=compute.TargetSslProxiesSetCertificateMapRequest(certificate_map='certificate_map_value'), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.set_certificate_map_unary(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/global/targetSslProxies/{target_ssl_proxy}/setCertificateMap" % client.transport._host, args[1]) + + +def test_set_certificate_map_unary_rest_flattened_error(transport: str = 'rest'): + client = TargetSslProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.set_certificate_map_unary( + compute.SetCertificateMapTargetSslProxyRequest(), + project='project_value', + target_ssl_proxy='target_ssl_proxy_value', + target_ssl_proxies_set_certificate_map_request_resource=compute.TargetSslProxiesSetCertificateMapRequest(certificate_map='certificate_map_value'), + ) + + +def test_set_certificate_map_unary_rest_error(): + client = TargetSslProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.SetProxyHeaderTargetSslProxyRequest, + dict, +]) +def test_set_proxy_header_rest(request_type): + client = TargetSslProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'target_ssl_proxy': 'sample2'} + request_init["target_ssl_proxies_set_proxy_header_request_resource"] = {'proxy_header': 'proxy_header_value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.set_proxy_header(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, extended_operation.ExtendedOperation) + assert response.client_operation_id == 'client_operation_id_value' + assert response.creation_timestamp == 'creation_timestamp_value' + assert response.description == 'description_value' + assert response.end_time == 'end_time_value' + assert response.http_error_message == 'http_error_message_value' + assert response.http_error_status_code == 2374 + assert response.id == 205 + assert response.insert_time == 'insert_time_value' + assert response.kind == 'kind_value' + assert response.name == 'name_value' + assert response.operation_group_id == 'operation_group_id_value' + assert response.operation_type == 'operation_type_value' + assert response.progress == 885 + assert response.region == 'region_value' + assert response.self_link == 'self_link_value' + assert response.start_time == 'start_time_value' + assert response.status == compute.Operation.Status.DONE + assert response.status_message == 'status_message_value' + assert response.target_id == 947 + assert response.target_link == 'target_link_value' + assert response.user == 'user_value' + assert response.zone == 'zone_value' + + +def test_set_proxy_header_rest_required_fields(request_type=compute.SetProxyHeaderTargetSslProxyRequest): + transport_class = transports.TargetSslProxiesRestTransport + + request_init = {} + request_init["project"] = "" + request_init["target_ssl_proxy"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).set_proxy_header._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + jsonified_request["targetSslProxy"] = 'target_ssl_proxy_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).set_proxy_header._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "targetSslProxy" in jsonified_request + assert jsonified_request["targetSslProxy"] == 'target_ssl_proxy_value' + + client = TargetSslProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.set_proxy_header(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_set_proxy_header_rest_unset_required_fields(): + transport = transports.TargetSslProxiesRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.set_proxy_header._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("project", "targetSslProxiesSetProxyHeaderRequestResource", "targetSslProxy", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_set_proxy_header_rest_interceptors(null_interceptor): + transport = transports.TargetSslProxiesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.TargetSslProxiesRestInterceptor(), + ) + client = TargetSslProxiesClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.TargetSslProxiesRestInterceptor, "post_set_proxy_header") as post, \ + mock.patch.object(transports.TargetSslProxiesRestInterceptor, "pre_set_proxy_header") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.SetProxyHeaderTargetSslProxyRequest.pb(compute.SetProxyHeaderTargetSslProxyRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.SetProxyHeaderTargetSslProxyRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.set_proxy_header(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_set_proxy_header_rest_bad_request(transport: str = 'rest', request_type=compute.SetProxyHeaderTargetSslProxyRequest): + client = TargetSslProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'target_ssl_proxy': 'sample2'} + request_init["target_ssl_proxies_set_proxy_header_request_resource"] = {'proxy_header': 'proxy_header_value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.set_proxy_header(request) + + +def test_set_proxy_header_rest_flattened(): + client = TargetSslProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'target_ssl_proxy': 'sample2'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + target_ssl_proxy='target_ssl_proxy_value', + target_ssl_proxies_set_proxy_header_request_resource=compute.TargetSslProxiesSetProxyHeaderRequest(proxy_header='proxy_header_value'), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.set_proxy_header(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/global/targetSslProxies/{target_ssl_proxy}/setProxyHeader" % client.transport._host, args[1]) + + +def test_set_proxy_header_rest_flattened_error(transport: str = 'rest'): + client = TargetSslProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.set_proxy_header( + compute.SetProxyHeaderTargetSslProxyRequest(), + project='project_value', + target_ssl_proxy='target_ssl_proxy_value', + target_ssl_proxies_set_proxy_header_request_resource=compute.TargetSslProxiesSetProxyHeaderRequest(proxy_header='proxy_header_value'), + ) + + +def test_set_proxy_header_rest_error(): + client = TargetSslProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.SetProxyHeaderTargetSslProxyRequest, + dict, +]) +def test_set_proxy_header_unary_rest(request_type): + client = TargetSslProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'target_ssl_proxy': 'sample2'} + request_init["target_ssl_proxies_set_proxy_header_request_resource"] = {'proxy_header': 'proxy_header_value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.set_proxy_header_unary(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.Operation) + + +def test_set_proxy_header_unary_rest_required_fields(request_type=compute.SetProxyHeaderTargetSslProxyRequest): + transport_class = transports.TargetSslProxiesRestTransport + + request_init = {} + request_init["project"] = "" + request_init["target_ssl_proxy"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).set_proxy_header._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + jsonified_request["targetSslProxy"] = 'target_ssl_proxy_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).set_proxy_header._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "targetSslProxy" in jsonified_request + assert jsonified_request["targetSslProxy"] == 'target_ssl_proxy_value' + + client = TargetSslProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.set_proxy_header_unary(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_set_proxy_header_unary_rest_unset_required_fields(): + transport = transports.TargetSslProxiesRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.set_proxy_header._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("project", "targetSslProxiesSetProxyHeaderRequestResource", "targetSslProxy", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_set_proxy_header_unary_rest_interceptors(null_interceptor): + transport = transports.TargetSslProxiesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.TargetSslProxiesRestInterceptor(), + ) + client = TargetSslProxiesClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.TargetSslProxiesRestInterceptor, "post_set_proxy_header") as post, \ + mock.patch.object(transports.TargetSslProxiesRestInterceptor, "pre_set_proxy_header") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.SetProxyHeaderTargetSslProxyRequest.pb(compute.SetProxyHeaderTargetSslProxyRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.SetProxyHeaderTargetSslProxyRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.set_proxy_header_unary(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_set_proxy_header_unary_rest_bad_request(transport: str = 'rest', request_type=compute.SetProxyHeaderTargetSslProxyRequest): + client = TargetSslProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'target_ssl_proxy': 'sample2'} + request_init["target_ssl_proxies_set_proxy_header_request_resource"] = {'proxy_header': 'proxy_header_value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.set_proxy_header_unary(request) + + +def test_set_proxy_header_unary_rest_flattened(): + client = TargetSslProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'target_ssl_proxy': 'sample2'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + target_ssl_proxy='target_ssl_proxy_value', + target_ssl_proxies_set_proxy_header_request_resource=compute.TargetSslProxiesSetProxyHeaderRequest(proxy_header='proxy_header_value'), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.set_proxy_header_unary(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/global/targetSslProxies/{target_ssl_proxy}/setProxyHeader" % client.transport._host, args[1]) + + +def test_set_proxy_header_unary_rest_flattened_error(transport: str = 'rest'): + client = TargetSslProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.set_proxy_header_unary( + compute.SetProxyHeaderTargetSslProxyRequest(), + project='project_value', + target_ssl_proxy='target_ssl_proxy_value', + target_ssl_proxies_set_proxy_header_request_resource=compute.TargetSslProxiesSetProxyHeaderRequest(proxy_header='proxy_header_value'), + ) + + +def test_set_proxy_header_unary_rest_error(): + client = TargetSslProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.SetSslCertificatesTargetSslProxyRequest, + dict, +]) +def test_set_ssl_certificates_rest(request_type): + client = TargetSslProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'target_ssl_proxy': 'sample2'} + request_init["target_ssl_proxies_set_ssl_certificates_request_resource"] = {'ssl_certificates': ['ssl_certificates_value1', 'ssl_certificates_value2']} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.set_ssl_certificates(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, extended_operation.ExtendedOperation) + assert response.client_operation_id == 'client_operation_id_value' + assert response.creation_timestamp == 'creation_timestamp_value' + assert response.description == 'description_value' + assert response.end_time == 'end_time_value' + assert response.http_error_message == 'http_error_message_value' + assert response.http_error_status_code == 2374 + assert response.id == 205 + assert response.insert_time == 'insert_time_value' + assert response.kind == 'kind_value' + assert response.name == 'name_value' + assert response.operation_group_id == 'operation_group_id_value' + assert response.operation_type == 'operation_type_value' + assert response.progress == 885 + assert response.region == 'region_value' + assert response.self_link == 'self_link_value' + assert response.start_time == 'start_time_value' + assert response.status == compute.Operation.Status.DONE + assert response.status_message == 'status_message_value' + assert response.target_id == 947 + assert response.target_link == 'target_link_value' + assert response.user == 'user_value' + assert response.zone == 'zone_value' + + +def test_set_ssl_certificates_rest_required_fields(request_type=compute.SetSslCertificatesTargetSslProxyRequest): + transport_class = transports.TargetSslProxiesRestTransport + + request_init = {} + request_init["project"] = "" + request_init["target_ssl_proxy"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).set_ssl_certificates._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + jsonified_request["targetSslProxy"] = 'target_ssl_proxy_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).set_ssl_certificates._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "targetSslProxy" in jsonified_request + assert jsonified_request["targetSslProxy"] == 'target_ssl_proxy_value' + + client = TargetSslProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.set_ssl_certificates(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_set_ssl_certificates_rest_unset_required_fields(): + transport = transports.TargetSslProxiesRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.set_ssl_certificates._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("project", "targetSslProxiesSetSslCertificatesRequestResource", "targetSslProxy", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_set_ssl_certificates_rest_interceptors(null_interceptor): + transport = transports.TargetSslProxiesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.TargetSslProxiesRestInterceptor(), + ) + client = TargetSslProxiesClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.TargetSslProxiesRestInterceptor, "post_set_ssl_certificates") as post, \ + mock.patch.object(transports.TargetSslProxiesRestInterceptor, "pre_set_ssl_certificates") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.SetSslCertificatesTargetSslProxyRequest.pb(compute.SetSslCertificatesTargetSslProxyRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.SetSslCertificatesTargetSslProxyRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.set_ssl_certificates(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_set_ssl_certificates_rest_bad_request(transport: str = 'rest', request_type=compute.SetSslCertificatesTargetSslProxyRequest): + client = TargetSslProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'target_ssl_proxy': 'sample2'} + request_init["target_ssl_proxies_set_ssl_certificates_request_resource"] = {'ssl_certificates': ['ssl_certificates_value1', 'ssl_certificates_value2']} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.set_ssl_certificates(request) + + +def test_set_ssl_certificates_rest_flattened(): + client = TargetSslProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'target_ssl_proxy': 'sample2'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + target_ssl_proxy='target_ssl_proxy_value', + target_ssl_proxies_set_ssl_certificates_request_resource=compute.TargetSslProxiesSetSslCertificatesRequest(ssl_certificates=['ssl_certificates_value']), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.set_ssl_certificates(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/global/targetSslProxies/{target_ssl_proxy}/setSslCertificates" % client.transport._host, args[1]) + + +def test_set_ssl_certificates_rest_flattened_error(transport: str = 'rest'): + client = TargetSslProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.set_ssl_certificates( + compute.SetSslCertificatesTargetSslProxyRequest(), + project='project_value', + target_ssl_proxy='target_ssl_proxy_value', + target_ssl_proxies_set_ssl_certificates_request_resource=compute.TargetSslProxiesSetSslCertificatesRequest(ssl_certificates=['ssl_certificates_value']), + ) + + +def test_set_ssl_certificates_rest_error(): + client = TargetSslProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.SetSslCertificatesTargetSslProxyRequest, + dict, +]) +def test_set_ssl_certificates_unary_rest(request_type): + client = TargetSslProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'target_ssl_proxy': 'sample2'} + request_init["target_ssl_proxies_set_ssl_certificates_request_resource"] = {'ssl_certificates': ['ssl_certificates_value1', 'ssl_certificates_value2']} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.set_ssl_certificates_unary(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.Operation) + + +def test_set_ssl_certificates_unary_rest_required_fields(request_type=compute.SetSslCertificatesTargetSslProxyRequest): + transport_class = transports.TargetSslProxiesRestTransport + + request_init = {} + request_init["project"] = "" + request_init["target_ssl_proxy"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).set_ssl_certificates._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + jsonified_request["targetSslProxy"] = 'target_ssl_proxy_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).set_ssl_certificates._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "targetSslProxy" in jsonified_request + assert jsonified_request["targetSslProxy"] == 'target_ssl_proxy_value' + + client = TargetSslProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.set_ssl_certificates_unary(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_set_ssl_certificates_unary_rest_unset_required_fields(): + transport = transports.TargetSslProxiesRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.set_ssl_certificates._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("project", "targetSslProxiesSetSslCertificatesRequestResource", "targetSslProxy", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_set_ssl_certificates_unary_rest_interceptors(null_interceptor): + transport = transports.TargetSslProxiesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.TargetSslProxiesRestInterceptor(), + ) + client = TargetSslProxiesClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.TargetSslProxiesRestInterceptor, "post_set_ssl_certificates") as post, \ + mock.patch.object(transports.TargetSslProxiesRestInterceptor, "pre_set_ssl_certificates") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.SetSslCertificatesTargetSslProxyRequest.pb(compute.SetSslCertificatesTargetSslProxyRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.SetSslCertificatesTargetSslProxyRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.set_ssl_certificates_unary(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_set_ssl_certificates_unary_rest_bad_request(transport: str = 'rest', request_type=compute.SetSslCertificatesTargetSslProxyRequest): + client = TargetSslProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'target_ssl_proxy': 'sample2'} + request_init["target_ssl_proxies_set_ssl_certificates_request_resource"] = {'ssl_certificates': ['ssl_certificates_value1', 'ssl_certificates_value2']} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.set_ssl_certificates_unary(request) + + +def test_set_ssl_certificates_unary_rest_flattened(): + client = TargetSslProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'target_ssl_proxy': 'sample2'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + target_ssl_proxy='target_ssl_proxy_value', + target_ssl_proxies_set_ssl_certificates_request_resource=compute.TargetSslProxiesSetSslCertificatesRequest(ssl_certificates=['ssl_certificates_value']), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.set_ssl_certificates_unary(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/global/targetSslProxies/{target_ssl_proxy}/setSslCertificates" % client.transport._host, args[1]) + + +def test_set_ssl_certificates_unary_rest_flattened_error(transport: str = 'rest'): + client = TargetSslProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.set_ssl_certificates_unary( + compute.SetSslCertificatesTargetSslProxyRequest(), + project='project_value', + target_ssl_proxy='target_ssl_proxy_value', + target_ssl_proxies_set_ssl_certificates_request_resource=compute.TargetSslProxiesSetSslCertificatesRequest(ssl_certificates=['ssl_certificates_value']), + ) + + +def test_set_ssl_certificates_unary_rest_error(): + client = TargetSslProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.SetSslPolicyTargetSslProxyRequest, + dict, +]) +def test_set_ssl_policy_rest(request_type): + client = TargetSslProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'target_ssl_proxy': 'sample2'} + request_init["ssl_policy_reference_resource"] = {'ssl_policy': 'ssl_policy_value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.set_ssl_policy(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, extended_operation.ExtendedOperation) + assert response.client_operation_id == 'client_operation_id_value' + assert response.creation_timestamp == 'creation_timestamp_value' + assert response.description == 'description_value' + assert response.end_time == 'end_time_value' + assert response.http_error_message == 'http_error_message_value' + assert response.http_error_status_code == 2374 + assert response.id == 205 + assert response.insert_time == 'insert_time_value' + assert response.kind == 'kind_value' + assert response.name == 'name_value' + assert response.operation_group_id == 'operation_group_id_value' + assert response.operation_type == 'operation_type_value' + assert response.progress == 885 + assert response.region == 'region_value' + assert response.self_link == 'self_link_value' + assert response.start_time == 'start_time_value' + assert response.status == compute.Operation.Status.DONE + assert response.status_message == 'status_message_value' + assert response.target_id == 947 + assert response.target_link == 'target_link_value' + assert response.user == 'user_value' + assert response.zone == 'zone_value' + + +def test_set_ssl_policy_rest_required_fields(request_type=compute.SetSslPolicyTargetSslProxyRequest): + transport_class = transports.TargetSslProxiesRestTransport + + request_init = {} + request_init["project"] = "" + request_init["target_ssl_proxy"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).set_ssl_policy._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + jsonified_request["targetSslProxy"] = 'target_ssl_proxy_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).set_ssl_policy._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "targetSslProxy" in jsonified_request + assert jsonified_request["targetSslProxy"] == 'target_ssl_proxy_value' + + client = TargetSslProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.set_ssl_policy(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_set_ssl_policy_rest_unset_required_fields(): + transport = transports.TargetSslProxiesRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.set_ssl_policy._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("project", "sslPolicyReferenceResource", "targetSslProxy", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_set_ssl_policy_rest_interceptors(null_interceptor): + transport = transports.TargetSslProxiesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.TargetSslProxiesRestInterceptor(), + ) + client = TargetSslProxiesClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.TargetSslProxiesRestInterceptor, "post_set_ssl_policy") as post, \ + mock.patch.object(transports.TargetSslProxiesRestInterceptor, "pre_set_ssl_policy") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.SetSslPolicyTargetSslProxyRequest.pb(compute.SetSslPolicyTargetSslProxyRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.SetSslPolicyTargetSslProxyRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.set_ssl_policy(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_set_ssl_policy_rest_bad_request(transport: str = 'rest', request_type=compute.SetSslPolicyTargetSslProxyRequest): + client = TargetSslProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'target_ssl_proxy': 'sample2'} + request_init["ssl_policy_reference_resource"] = {'ssl_policy': 'ssl_policy_value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.set_ssl_policy(request) + + +def test_set_ssl_policy_rest_flattened(): + client = TargetSslProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'target_ssl_proxy': 'sample2'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + target_ssl_proxy='target_ssl_proxy_value', + ssl_policy_reference_resource=compute.SslPolicyReference(ssl_policy='ssl_policy_value'), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.set_ssl_policy(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/global/targetSslProxies/{target_ssl_proxy}/setSslPolicy" % client.transport._host, args[1]) + + +def test_set_ssl_policy_rest_flattened_error(transport: str = 'rest'): + client = TargetSslProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.set_ssl_policy( + compute.SetSslPolicyTargetSslProxyRequest(), + project='project_value', + target_ssl_proxy='target_ssl_proxy_value', + ssl_policy_reference_resource=compute.SslPolicyReference(ssl_policy='ssl_policy_value'), + ) + + +def test_set_ssl_policy_rest_error(): + client = TargetSslProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.SetSslPolicyTargetSslProxyRequest, + dict, +]) +def test_set_ssl_policy_unary_rest(request_type): + client = TargetSslProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'target_ssl_proxy': 'sample2'} + request_init["ssl_policy_reference_resource"] = {'ssl_policy': 'ssl_policy_value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.set_ssl_policy_unary(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.Operation) + + +def test_set_ssl_policy_unary_rest_required_fields(request_type=compute.SetSslPolicyTargetSslProxyRequest): + transport_class = transports.TargetSslProxiesRestTransport + + request_init = {} + request_init["project"] = "" + request_init["target_ssl_proxy"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).set_ssl_policy._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + jsonified_request["targetSslProxy"] = 'target_ssl_proxy_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).set_ssl_policy._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "targetSslProxy" in jsonified_request + assert jsonified_request["targetSslProxy"] == 'target_ssl_proxy_value' + + client = TargetSslProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.set_ssl_policy_unary(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_set_ssl_policy_unary_rest_unset_required_fields(): + transport = transports.TargetSslProxiesRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.set_ssl_policy._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("project", "sslPolicyReferenceResource", "targetSslProxy", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_set_ssl_policy_unary_rest_interceptors(null_interceptor): + transport = transports.TargetSslProxiesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.TargetSslProxiesRestInterceptor(), + ) + client = TargetSslProxiesClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.TargetSslProxiesRestInterceptor, "post_set_ssl_policy") as post, \ + mock.patch.object(transports.TargetSslProxiesRestInterceptor, "pre_set_ssl_policy") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.SetSslPolicyTargetSslProxyRequest.pb(compute.SetSslPolicyTargetSslProxyRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.SetSslPolicyTargetSslProxyRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.set_ssl_policy_unary(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_set_ssl_policy_unary_rest_bad_request(transport: str = 'rest', request_type=compute.SetSslPolicyTargetSslProxyRequest): + client = TargetSslProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'target_ssl_proxy': 'sample2'} + request_init["ssl_policy_reference_resource"] = {'ssl_policy': 'ssl_policy_value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.set_ssl_policy_unary(request) + + +def test_set_ssl_policy_unary_rest_flattened(): + client = TargetSslProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'target_ssl_proxy': 'sample2'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + target_ssl_proxy='target_ssl_proxy_value', + ssl_policy_reference_resource=compute.SslPolicyReference(ssl_policy='ssl_policy_value'), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.set_ssl_policy_unary(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/global/targetSslProxies/{target_ssl_proxy}/setSslPolicy" % client.transport._host, args[1]) + + +def test_set_ssl_policy_unary_rest_flattened_error(transport: str = 'rest'): + client = TargetSslProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.set_ssl_policy_unary( + compute.SetSslPolicyTargetSslProxyRequest(), + project='project_value', + target_ssl_proxy='target_ssl_proxy_value', + ssl_policy_reference_resource=compute.SslPolicyReference(ssl_policy='ssl_policy_value'), + ) + + +def test_set_ssl_policy_unary_rest_error(): + client = TargetSslProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +def test_credentials_transport_error(): + # It is an error to provide credentials and a transport instance. + transport = transports.TargetSslProxiesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = TargetSslProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # It is an error to provide a credentials file and a transport instance. + transport = transports.TargetSslProxiesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = TargetSslProxiesClient( + client_options={"credentials_file": "credentials.json"}, + transport=transport, + ) + + # It is an error to provide an api_key and a transport instance. + transport = transports.TargetSslProxiesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = TargetSslProxiesClient( + client_options=options, + transport=transport, + ) + + # It is an error to provide an api_key and a credential. + options = mock.Mock() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = TargetSslProxiesClient( + client_options=options, + credentials=ga_credentials.AnonymousCredentials() + ) + + # It is an error to provide scopes and a transport instance. + transport = transports.TargetSslProxiesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = TargetSslProxiesClient( + client_options={"scopes": ["1", "2"]}, + transport=transport, + ) + + +def test_transport_instance(): + # A client may be instantiated with a custom transport instance. + transport = transports.TargetSslProxiesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + client = TargetSslProxiesClient(transport=transport) + assert client.transport is transport + + +@pytest.mark.parametrize("transport_class", [ + transports.TargetSslProxiesRestTransport, +]) +def test_transport_adc(transport_class): + # Test default credentials are used if not provided. + with mock.patch.object(google.auth, 'default') as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class() + adc.assert_called_once() + +@pytest.mark.parametrize("transport_name", [ + "rest", +]) +def test_transport_kind(transport_name): + transport = TargetSslProxiesClient.get_transport_class(transport_name)( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert transport.kind == transport_name + + +def test_target_ssl_proxies_base_transport_error(): + # Passing both a credentials object and credentials_file should raise an error + with pytest.raises(core_exceptions.DuplicateCredentialArgs): + transport = transports.TargetSslProxiesTransport( + credentials=ga_credentials.AnonymousCredentials(), + credentials_file="credentials.json" + ) + + +def test_target_ssl_proxies_base_transport(): + # Instantiate the base transport. + with mock.patch('google.cloud.compute_v1.services.target_ssl_proxies.transports.TargetSslProxiesTransport.__init__') as Transport: + Transport.return_value = None + transport = transports.TargetSslProxiesTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Every method on the transport should just blindly + # raise NotImplementedError. + methods = ( + 'delete', + 'get', + 'insert', + 'list', + 'set_backend_service', + 'set_certificate_map', + 'set_proxy_header', + 'set_ssl_certificates', + 'set_ssl_policy', + ) + for method in methods: + with pytest.raises(NotImplementedError): + getattr(transport, method)(request=object()) + + with pytest.raises(NotImplementedError): + transport.close() + + # Catch all for all remaining methods and properties + remainder = [ + 'kind', + ] + for r in remainder: + with pytest.raises(NotImplementedError): + getattr(transport, r)() + + +def test_target_ssl_proxies_base_transport_with_credentials_file(): + # Instantiate the base transport with a credentials file + with mock.patch.object(google.auth, 'load_credentials_from_file', autospec=True) as load_creds, mock.patch('google.cloud.compute_v1.services.target_ssl_proxies.transports.TargetSslProxiesTransport._prep_wrapped_messages') as Transport: + Transport.return_value = None + load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.TargetSslProxiesTransport( + credentials_file="credentials.json", + quota_project_id="octopus", + ) + load_creds.assert_called_once_with("credentials.json", + scopes=None, + default_scopes=( + 'https://www.googleapis.com/auth/compute', + 'https://www.googleapis.com/auth/cloud-platform', +), + quota_project_id="octopus", + ) + + +def test_target_ssl_proxies_base_transport_with_adc(): + # Test the default credentials are used if credentials and credentials_file are None. + with mock.patch.object(google.auth, 'default', autospec=True) as adc, mock.patch('google.cloud.compute_v1.services.target_ssl_proxies.transports.TargetSslProxiesTransport._prep_wrapped_messages') as Transport: + Transport.return_value = None + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.TargetSslProxiesTransport() + adc.assert_called_once() + + +def test_target_ssl_proxies_auth_adc(): + # If no credentials are provided, we should use ADC credentials. + with mock.patch.object(google.auth, 'default', autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + TargetSslProxiesClient() + adc.assert_called_once_with( + scopes=None, + default_scopes=( + 'https://www.googleapis.com/auth/compute', + 'https://www.googleapis.com/auth/cloud-platform', +), + quota_project_id=None, + ) + + +def test_target_ssl_proxies_http_transport_client_cert_source_for_mtls(): + cred = ga_credentials.AnonymousCredentials() + with mock.patch("google.auth.transport.requests.AuthorizedSession.configure_mtls_channel") as mock_configure_mtls_channel: + transports.TargetSslProxiesRestTransport ( + credentials=cred, + client_cert_source_for_mtls=client_cert_source_callback + ) + mock_configure_mtls_channel.assert_called_once_with(client_cert_source_callback) + + +@pytest.mark.parametrize("transport_name", [ + "rest", +]) +def test_target_ssl_proxies_host_no_port(transport_name): + client = TargetSslProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions(api_endpoint='compute.googleapis.com'), + transport=transport_name, + ) + assert client.transport._host == ( + 'compute.googleapis.com:443' + if transport_name in ['grpc', 'grpc_asyncio'] + else 'https://compute.googleapis.com' + ) + +@pytest.mark.parametrize("transport_name", [ + "rest", +]) +def test_target_ssl_proxies_host_with_port(transport_name): + client = TargetSslProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions(api_endpoint='compute.googleapis.com:8000'), + transport=transport_name, + ) + assert client.transport._host == ( + 'compute.googleapis.com:8000' + if transport_name in ['grpc', 'grpc_asyncio'] + else 'https://compute.googleapis.com:8000' + ) + +@pytest.mark.parametrize("transport_name", [ + "rest", +]) +def test_target_ssl_proxies_client_transport_session_collision(transport_name): + creds1 = ga_credentials.AnonymousCredentials() + creds2 = ga_credentials.AnonymousCredentials() + client1 = TargetSslProxiesClient( + credentials=creds1, + transport=transport_name, + ) + client2 = TargetSslProxiesClient( + credentials=creds2, + transport=transport_name, + ) + session1 = client1.transport.delete._session + session2 = client2.transport.delete._session + assert session1 != session2 + session1 = client1.transport.get._session + session2 = client2.transport.get._session + assert session1 != session2 + session1 = client1.transport.insert._session + session2 = client2.transport.insert._session + assert session1 != session2 + session1 = client1.transport.list._session + session2 = client2.transport.list._session + assert session1 != session2 + session1 = client1.transport.set_backend_service._session + session2 = client2.transport.set_backend_service._session + assert session1 != session2 + session1 = client1.transport.set_certificate_map._session + session2 = client2.transport.set_certificate_map._session + assert session1 != session2 + session1 = client1.transport.set_proxy_header._session + session2 = client2.transport.set_proxy_header._session + assert session1 != session2 + session1 = client1.transport.set_ssl_certificates._session + session2 = client2.transport.set_ssl_certificates._session + assert session1 != session2 + session1 = client1.transport.set_ssl_policy._session + session2 = client2.transport.set_ssl_policy._session + assert session1 != session2 + +def test_common_billing_account_path(): + billing_account = "squid" + expected = "billingAccounts/{billing_account}".format(billing_account=billing_account, ) + actual = TargetSslProxiesClient.common_billing_account_path(billing_account) + assert expected == actual + + +def test_parse_common_billing_account_path(): + expected = { + "billing_account": "clam", + } + path = TargetSslProxiesClient.common_billing_account_path(**expected) + + # Check that the path construction is reversible. + actual = TargetSslProxiesClient.parse_common_billing_account_path(path) + assert expected == actual + +def test_common_folder_path(): + folder = "whelk" + expected = "folders/{folder}".format(folder=folder, ) + actual = TargetSslProxiesClient.common_folder_path(folder) + assert expected == actual + + +def test_parse_common_folder_path(): + expected = { + "folder": "octopus", + } + path = TargetSslProxiesClient.common_folder_path(**expected) + + # Check that the path construction is reversible. + actual = TargetSslProxiesClient.parse_common_folder_path(path) + assert expected == actual + +def test_common_organization_path(): + organization = "oyster" + expected = "organizations/{organization}".format(organization=organization, ) + actual = TargetSslProxiesClient.common_organization_path(organization) + assert expected == actual + + +def test_parse_common_organization_path(): + expected = { + "organization": "nudibranch", + } + path = TargetSslProxiesClient.common_organization_path(**expected) + + # Check that the path construction is reversible. + actual = TargetSslProxiesClient.parse_common_organization_path(path) + assert expected == actual + +def test_common_project_path(): + project = "cuttlefish" + expected = "projects/{project}".format(project=project, ) + actual = TargetSslProxiesClient.common_project_path(project) + assert expected == actual + + +def test_parse_common_project_path(): + expected = { + "project": "mussel", + } + path = TargetSslProxiesClient.common_project_path(**expected) + + # Check that the path construction is reversible. + actual = TargetSslProxiesClient.parse_common_project_path(path) + assert expected == actual + +def test_common_location_path(): + project = "winkle" + location = "nautilus" + expected = "projects/{project}/locations/{location}".format(project=project, location=location, ) + actual = TargetSslProxiesClient.common_location_path(project, location) + assert expected == actual + + +def test_parse_common_location_path(): + expected = { + "project": "scallop", + "location": "abalone", + } + path = TargetSslProxiesClient.common_location_path(**expected) + + # Check that the path construction is reversible. + actual = TargetSslProxiesClient.parse_common_location_path(path) + assert expected == actual + + +def test_client_with_default_client_info(): + client_info = gapic_v1.client_info.ClientInfo() + + with mock.patch.object(transports.TargetSslProxiesTransport, '_prep_wrapped_messages') as prep: + client = TargetSslProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + with mock.patch.object(transports.TargetSslProxiesTransport, '_prep_wrapped_messages') as prep: + transport_class = TargetSslProxiesClient.get_transport_class() + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + +def test_transport_close(): + transports = { + "rest": "_session", + } + + for transport, close_name in transports.items(): + client = TargetSslProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport + ) + with mock.patch.object(type(getattr(client.transport, close_name)), "close") as close: + with client: + close.assert_not_called() + close.assert_called_once() + +def test_client_ctx(): + transports = [ + 'rest', + ] + for transport in transports: + client = TargetSslProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport + ) + # Test client calls underlying transport. + with mock.patch.object(type(client.transport), "close") as close: + close.assert_not_called() + with client: + pass + close.assert_called() + +@pytest.mark.parametrize("client_class,transport_class", [ + (TargetSslProxiesClient, transports.TargetSslProxiesRestTransport), +]) +def test_api_key_credentials(client_class, transport_class): + with mock.patch.object( + google.auth._default, "get_api_key_credentials", create=True + ) as get_api_key_credentials: + mock_cred = mock.Mock() + get_api_key_credentials.return_value = mock_cred + options = client_options.ClientOptions() + options.api_key = "api_key" + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=mock_cred, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) diff --git a/owl-bot-staging/v1/tests/unit/gapic/compute_v1/test_target_tcp_proxies.py b/owl-bot-staging/v1/tests/unit/gapic/compute_v1/test_target_tcp_proxies.py new file mode 100644 index 000000000..b0cab52f5 --- /dev/null +++ b/owl-bot-staging/v1/tests/unit/gapic/compute_v1/test_target_tcp_proxies.py @@ -0,0 +1,3900 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import os +# try/except added for compatibility with python < 3.8 +try: + from unittest import mock + from unittest.mock import AsyncMock # pragma: NO COVER +except ImportError: # pragma: NO COVER + import mock + +import grpc +from grpc.experimental import aio +from collections.abc import Iterable +from google.protobuf import json_format +import json +import math +import pytest +from proto.marshal.rules.dates import DurationRule, TimestampRule +from proto.marshal.rules import wrappers +from requests import Response +from requests import Request, PreparedRequest +from requests.sessions import Session +from google.protobuf import json_format + +from google.api_core import client_options +from google.api_core import exceptions as core_exceptions +from google.api_core import extended_operation # type: ignore +from google.api_core import future +from google.api_core import gapic_v1 +from google.api_core import grpc_helpers +from google.api_core import grpc_helpers_async +from google.api_core import path_template +from google.auth import credentials as ga_credentials +from google.auth.exceptions import MutualTLSChannelError +from google.cloud.compute_v1.services.target_tcp_proxies import TargetTcpProxiesClient +from google.cloud.compute_v1.services.target_tcp_proxies import pagers +from google.cloud.compute_v1.services.target_tcp_proxies import transports +from google.cloud.compute_v1.types import compute +from google.oauth2 import service_account +import google.auth + + +def client_cert_source_callback(): + return b"cert bytes", b"key bytes" + + +# If default endpoint is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint(client): + return "foo.googleapis.com" if ("localhost" in client.DEFAULT_ENDPOINT) else client.DEFAULT_ENDPOINT + + +def test__get_default_mtls_endpoint(): + api_endpoint = "example.googleapis.com" + api_mtls_endpoint = "example.mtls.googleapis.com" + sandbox_endpoint = "example.sandbox.googleapis.com" + sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com" + non_googleapi = "api.example.com" + + assert TargetTcpProxiesClient._get_default_mtls_endpoint(None) is None + assert TargetTcpProxiesClient._get_default_mtls_endpoint(api_endpoint) == api_mtls_endpoint + assert TargetTcpProxiesClient._get_default_mtls_endpoint(api_mtls_endpoint) == api_mtls_endpoint + assert TargetTcpProxiesClient._get_default_mtls_endpoint(sandbox_endpoint) == sandbox_mtls_endpoint + assert TargetTcpProxiesClient._get_default_mtls_endpoint(sandbox_mtls_endpoint) == sandbox_mtls_endpoint + assert TargetTcpProxiesClient._get_default_mtls_endpoint(non_googleapi) == non_googleapi + + +@pytest.mark.parametrize("client_class,transport_name", [ + (TargetTcpProxiesClient, "rest"), +]) +def test_target_tcp_proxies_client_from_service_account_info(client_class, transport_name): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object(service_account.Credentials, 'from_service_account_info') as factory: + factory.return_value = creds + info = {"valid": True} + client = client_class.from_service_account_info(info, transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + 'compute.googleapis.com:443' + if transport_name in ['grpc', 'grpc_asyncio'] + else + 'https://compute.googleapis.com' + ) + + +@pytest.mark.parametrize("transport_class,transport_name", [ + (transports.TargetTcpProxiesRestTransport, "rest"), +]) +def test_target_tcp_proxies_client_service_account_always_use_jwt(transport_class, transport_name): + with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=True) + use_jwt.assert_called_once_with(True) + + with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=False) + use_jwt.assert_not_called() + + +@pytest.mark.parametrize("client_class,transport_name", [ + (TargetTcpProxiesClient, "rest"), +]) +def test_target_tcp_proxies_client_from_service_account_file(client_class, transport_name): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object(service_account.Credentials, 'from_service_account_file') as factory: + factory.return_value = creds + client = client_class.from_service_account_file("dummy/file/path.json", transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + client = client_class.from_service_account_json("dummy/file/path.json", transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + 'compute.googleapis.com:443' + if transport_name in ['grpc', 'grpc_asyncio'] + else + 'https://compute.googleapis.com' + ) + + +def test_target_tcp_proxies_client_get_transport_class(): + transport = TargetTcpProxiesClient.get_transport_class() + available_transports = [ + transports.TargetTcpProxiesRestTransport, + ] + assert transport in available_transports + + transport = TargetTcpProxiesClient.get_transport_class("rest") + assert transport == transports.TargetTcpProxiesRestTransport + + +@pytest.mark.parametrize("client_class,transport_class,transport_name", [ + (TargetTcpProxiesClient, transports.TargetTcpProxiesRestTransport, "rest"), +]) +@mock.patch.object(TargetTcpProxiesClient, "DEFAULT_ENDPOINT", modify_default_endpoint(TargetTcpProxiesClient)) +def test_target_tcp_proxies_client_client_options(client_class, transport_class, transport_name): + # Check that if channel is provided we won't create a new one. + with mock.patch.object(TargetTcpProxiesClient, 'get_transport_class') as gtc: + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ) + client = client_class(transport=transport) + gtc.assert_not_called() + + # Check that if channel is provided via str we will create a new one. + with mock.patch.object(TargetTcpProxiesClient, 'get_transport_class') as gtc: + client = client_class(transport=transport_name) + gtc.assert_called() + + # Check the case api_endpoint is provided. + options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(transport=transport_name, client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_MTLS_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError): + client = client_class(transport=transport_name) + + # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): + with pytest.raises(ValueError): + client = client_class(transport=transport_name) + + # Check the case quota_project_id is provided + options = client_options.ClientOptions(quota_project_id="octopus") + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id="octopus", + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + # Check the case api_endpoint is provided + options = client_options.ClientOptions(api_audience="https://language.googleapis.com") + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience="https://language.googleapis.com" + ) + +@pytest.mark.parametrize("client_class,transport_class,transport_name,use_client_cert_env", [ + (TargetTcpProxiesClient, transports.TargetTcpProxiesRestTransport, "rest", "true"), + (TargetTcpProxiesClient, transports.TargetTcpProxiesRestTransport, "rest", "false"), +]) +@mock.patch.object(TargetTcpProxiesClient, "DEFAULT_ENDPOINT", modify_default_endpoint(TargetTcpProxiesClient)) +@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) +def test_target_tcp_proxies_client_mtls_env_auto(client_class, transport_class, transport_name, use_client_cert_env): + # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default + # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists. + + # Check the case client_cert_source is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): + options = client_options.ClientOptions(client_cert_source=client_cert_source_callback) + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + + if use_client_cert_env == "false": + expected_client_cert_source = None + expected_host = client.DEFAULT_ENDPOINT + else: + expected_client_cert_source = client_cert_source_callback + expected_host = client.DEFAULT_MTLS_ENDPOINT + + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case ADC client cert is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): + with mock.patch.object(transport_class, '__init__') as patched: + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): + with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=client_cert_source_callback): + if use_client_cert_env == "false": + expected_host = client.DEFAULT_ENDPOINT + expected_client_cert_source = None + else: + expected_host = client.DEFAULT_MTLS_ENDPOINT + expected_client_cert_source = client_cert_source_callback + + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case client_cert_source and ADC client cert are not provided. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): + with mock.patch.object(transport_class, '__init__') as patched: + with mock.patch("google.auth.transport.mtls.has_default_client_cert_source", return_value=False): + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize("client_class", [ + TargetTcpProxiesClient +]) +@mock.patch.object(TargetTcpProxiesClient, "DEFAULT_ENDPOINT", modify_default_endpoint(TargetTcpProxiesClient)) +def test_target_tcp_proxies_client_get_mtls_endpoint_and_cert_source(client_class): + mock_client_cert_source = mock.Mock() + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "true". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + mock_api_endpoint = "foo" + options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(options) + assert api_endpoint == mock_api_endpoint + assert cert_source == mock_client_cert_source + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "false". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + mock_client_cert_source = mock.Mock() + mock_api_endpoint = "foo" + options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(options) + assert api_endpoint == mock_api_endpoint + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert doesn't exist. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=False): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert exists. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): + with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=mock_client_cert_source): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source == mock_client_cert_source + + +@pytest.mark.parametrize("client_class,transport_class,transport_name", [ + (TargetTcpProxiesClient, transports.TargetTcpProxiesRestTransport, "rest"), +]) +def test_target_tcp_proxies_client_client_options_scopes(client_class, transport_class, transport_name): + # Check the case scopes are provided. + options = client_options.ClientOptions( + scopes=["1", "2"], + ) + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=["1", "2"], + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + +@pytest.mark.parametrize("client_class,transport_class,transport_name,grpc_helpers", [ + (TargetTcpProxiesClient, transports.TargetTcpProxiesRestTransport, "rest", None), +]) +def test_target_tcp_proxies_client_client_options_credentials_file(client_class, transport_class, transport_name, grpc_helpers): + # Check the case credentials file is provided. + options = client_options.ClientOptions( + credentials_file="credentials.json" + ) + + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize("request_type", [ + compute.AggregatedListTargetTcpProxiesRequest, + dict, +]) +def test_aggregated_list_rest(request_type): + client = TargetTcpProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.TargetTcpProxyAggregatedList( + id='id_value', + kind='kind_value', + next_page_token='next_page_token_value', + self_link='self_link_value', + unreachables=['unreachables_value'], + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.TargetTcpProxyAggregatedList.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.aggregated_list(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.AggregatedListPager) + assert response.id == 'id_value' + assert response.kind == 'kind_value' + assert response.next_page_token == 'next_page_token_value' + assert response.self_link == 'self_link_value' + assert response.unreachables == ['unreachables_value'] + + +def test_aggregated_list_rest_required_fields(request_type=compute.AggregatedListTargetTcpProxiesRequest): + transport_class = transports.TargetTcpProxiesRestTransport + + request_init = {} + request_init["project"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).aggregated_list._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).aggregated_list._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("filter", "include_all_scopes", "max_results", "order_by", "page_token", "return_partial_success", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + + client = TargetTcpProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.TargetTcpProxyAggregatedList() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "get", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.TargetTcpProxyAggregatedList.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.aggregated_list(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_aggregated_list_rest_unset_required_fields(): + transport = transports.TargetTcpProxiesRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.aggregated_list._get_unset_required_fields({}) + assert set(unset_fields) == (set(("filter", "includeAllScopes", "maxResults", "orderBy", "pageToken", "returnPartialSuccess", )) & set(("project", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_aggregated_list_rest_interceptors(null_interceptor): + transport = transports.TargetTcpProxiesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.TargetTcpProxiesRestInterceptor(), + ) + client = TargetTcpProxiesClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.TargetTcpProxiesRestInterceptor, "post_aggregated_list") as post, \ + mock.patch.object(transports.TargetTcpProxiesRestInterceptor, "pre_aggregated_list") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.AggregatedListTargetTcpProxiesRequest.pb(compute.AggregatedListTargetTcpProxiesRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.TargetTcpProxyAggregatedList.to_json(compute.TargetTcpProxyAggregatedList()) + + request = compute.AggregatedListTargetTcpProxiesRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.TargetTcpProxyAggregatedList() + + client.aggregated_list(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_aggregated_list_rest_bad_request(transport: str = 'rest', request_type=compute.AggregatedListTargetTcpProxiesRequest): + client = TargetTcpProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.aggregated_list(request) + + +def test_aggregated_list_rest_flattened(): + client = TargetTcpProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.TargetTcpProxyAggregatedList() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.TargetTcpProxyAggregatedList.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.aggregated_list(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/aggregated/targetTcpProxies" % client.transport._host, args[1]) + + +def test_aggregated_list_rest_flattened_error(transport: str = 'rest'): + client = TargetTcpProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.aggregated_list( + compute.AggregatedListTargetTcpProxiesRequest(), + project='project_value', + ) + + +def test_aggregated_list_rest_pager(transport: str = 'rest'): + client = TargetTcpProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + #with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + compute.TargetTcpProxyAggregatedList( + items={ + 'a':compute.TargetTcpProxiesScopedList(), + 'b':compute.TargetTcpProxiesScopedList(), + 'c':compute.TargetTcpProxiesScopedList(), + }, + next_page_token='abc', + ), + compute.TargetTcpProxyAggregatedList( + items={}, + next_page_token='def', + ), + compute.TargetTcpProxyAggregatedList( + items={ + 'g':compute.TargetTcpProxiesScopedList(), + }, + next_page_token='ghi', + ), + compute.TargetTcpProxyAggregatedList( + items={ + 'h':compute.TargetTcpProxiesScopedList(), + 'i':compute.TargetTcpProxiesScopedList(), + }, + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple(compute.TargetTcpProxyAggregatedList.to_json(x) for x in response) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode('UTF-8') + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {'project': 'sample1'} + + pager = client.aggregated_list(request=sample_request) + + assert isinstance(pager.get('a'), compute.TargetTcpProxiesScopedList) + assert pager.get('h') is None + + results = list(pager) + assert len(results) == 6 + assert all( + isinstance(i, tuple) + for i in results) + for result in results: + assert isinstance(result, tuple) + assert tuple(type(t) for t in result) == (str, compute.TargetTcpProxiesScopedList) + + assert pager.get('a') is None + assert isinstance(pager.get('h'), compute.TargetTcpProxiesScopedList) + + pages = list(client.aggregated_list(request=sample_request).pages) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize("request_type", [ + compute.DeleteTargetTcpProxyRequest, + dict, +]) +def test_delete_rest(request_type): + client = TargetTcpProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'target_tcp_proxy': 'sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.delete(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, extended_operation.ExtendedOperation) + assert response.client_operation_id == 'client_operation_id_value' + assert response.creation_timestamp == 'creation_timestamp_value' + assert response.description == 'description_value' + assert response.end_time == 'end_time_value' + assert response.http_error_message == 'http_error_message_value' + assert response.http_error_status_code == 2374 + assert response.id == 205 + assert response.insert_time == 'insert_time_value' + assert response.kind == 'kind_value' + assert response.name == 'name_value' + assert response.operation_group_id == 'operation_group_id_value' + assert response.operation_type == 'operation_type_value' + assert response.progress == 885 + assert response.region == 'region_value' + assert response.self_link == 'self_link_value' + assert response.start_time == 'start_time_value' + assert response.status == compute.Operation.Status.DONE + assert response.status_message == 'status_message_value' + assert response.target_id == 947 + assert response.target_link == 'target_link_value' + assert response.user == 'user_value' + assert response.zone == 'zone_value' + + +def test_delete_rest_required_fields(request_type=compute.DeleteTargetTcpProxyRequest): + transport_class = transports.TargetTcpProxiesRestTransport + + request_init = {} + request_init["project"] = "" + request_init["target_tcp_proxy"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + jsonified_request["targetTcpProxy"] = 'target_tcp_proxy_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "targetTcpProxy" in jsonified_request + assert jsonified_request["targetTcpProxy"] == 'target_tcp_proxy_value' + + client = TargetTcpProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "delete", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.delete(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_delete_rest_unset_required_fields(): + transport = transports.TargetTcpProxiesRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.delete._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("project", "targetTcpProxy", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_rest_interceptors(null_interceptor): + transport = transports.TargetTcpProxiesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.TargetTcpProxiesRestInterceptor(), + ) + client = TargetTcpProxiesClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.TargetTcpProxiesRestInterceptor, "post_delete") as post, \ + mock.patch.object(transports.TargetTcpProxiesRestInterceptor, "pre_delete") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.DeleteTargetTcpProxyRequest.pb(compute.DeleteTargetTcpProxyRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.DeleteTargetTcpProxyRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.delete(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_delete_rest_bad_request(transport: str = 'rest', request_type=compute.DeleteTargetTcpProxyRequest): + client = TargetTcpProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'target_tcp_proxy': 'sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.delete(request) + + +def test_delete_rest_flattened(): + client = TargetTcpProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'target_tcp_proxy': 'sample2'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + target_tcp_proxy='target_tcp_proxy_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.delete(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/global/targetTcpProxies/{target_tcp_proxy}" % client.transport._host, args[1]) + + +def test_delete_rest_flattened_error(transport: str = 'rest'): + client = TargetTcpProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete( + compute.DeleteTargetTcpProxyRequest(), + project='project_value', + target_tcp_proxy='target_tcp_proxy_value', + ) + + +def test_delete_rest_error(): + client = TargetTcpProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.DeleteTargetTcpProxyRequest, + dict, +]) +def test_delete_unary_rest(request_type): + client = TargetTcpProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'target_tcp_proxy': 'sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.delete_unary(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.Operation) + + +def test_delete_unary_rest_required_fields(request_type=compute.DeleteTargetTcpProxyRequest): + transport_class = transports.TargetTcpProxiesRestTransport + + request_init = {} + request_init["project"] = "" + request_init["target_tcp_proxy"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + jsonified_request["targetTcpProxy"] = 'target_tcp_proxy_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "targetTcpProxy" in jsonified_request + assert jsonified_request["targetTcpProxy"] == 'target_tcp_proxy_value' + + client = TargetTcpProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "delete", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.delete_unary(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_delete_unary_rest_unset_required_fields(): + transport = transports.TargetTcpProxiesRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.delete._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("project", "targetTcpProxy", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_unary_rest_interceptors(null_interceptor): + transport = transports.TargetTcpProxiesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.TargetTcpProxiesRestInterceptor(), + ) + client = TargetTcpProxiesClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.TargetTcpProxiesRestInterceptor, "post_delete") as post, \ + mock.patch.object(transports.TargetTcpProxiesRestInterceptor, "pre_delete") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.DeleteTargetTcpProxyRequest.pb(compute.DeleteTargetTcpProxyRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.DeleteTargetTcpProxyRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.delete_unary(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_delete_unary_rest_bad_request(transport: str = 'rest', request_type=compute.DeleteTargetTcpProxyRequest): + client = TargetTcpProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'target_tcp_proxy': 'sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.delete_unary(request) + + +def test_delete_unary_rest_flattened(): + client = TargetTcpProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'target_tcp_proxy': 'sample2'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + target_tcp_proxy='target_tcp_proxy_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.delete_unary(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/global/targetTcpProxies/{target_tcp_proxy}" % client.transport._host, args[1]) + + +def test_delete_unary_rest_flattened_error(transport: str = 'rest'): + client = TargetTcpProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_unary( + compute.DeleteTargetTcpProxyRequest(), + project='project_value', + target_tcp_proxy='target_tcp_proxy_value', + ) + + +def test_delete_unary_rest_error(): + client = TargetTcpProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.GetTargetTcpProxyRequest, + dict, +]) +def test_get_rest(request_type): + client = TargetTcpProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'target_tcp_proxy': 'sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.TargetTcpProxy( + creation_timestamp='creation_timestamp_value', + description='description_value', + id=205, + kind='kind_value', + name='name_value', + proxy_bind=True, + proxy_header='proxy_header_value', + region='region_value', + self_link='self_link_value', + service='service_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.TargetTcpProxy.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.get(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.TargetTcpProxy) + assert response.creation_timestamp == 'creation_timestamp_value' + assert response.description == 'description_value' + assert response.id == 205 + assert response.kind == 'kind_value' + assert response.name == 'name_value' + assert response.proxy_bind is True + assert response.proxy_header == 'proxy_header_value' + assert response.region == 'region_value' + assert response.self_link == 'self_link_value' + assert response.service == 'service_value' + + +def test_get_rest_required_fields(request_type=compute.GetTargetTcpProxyRequest): + transport_class = transports.TargetTcpProxiesRestTransport + + request_init = {} + request_init["project"] = "" + request_init["target_tcp_proxy"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + jsonified_request["targetTcpProxy"] = 'target_tcp_proxy_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "targetTcpProxy" in jsonified_request + assert jsonified_request["targetTcpProxy"] == 'target_tcp_proxy_value' + + client = TargetTcpProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.TargetTcpProxy() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "get", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.TargetTcpProxy.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.get(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_get_rest_unset_required_fields(): + transport = transports.TargetTcpProxiesRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.get._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("project", "targetTcpProxy", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_rest_interceptors(null_interceptor): + transport = transports.TargetTcpProxiesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.TargetTcpProxiesRestInterceptor(), + ) + client = TargetTcpProxiesClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.TargetTcpProxiesRestInterceptor, "post_get") as post, \ + mock.patch.object(transports.TargetTcpProxiesRestInterceptor, "pre_get") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.GetTargetTcpProxyRequest.pb(compute.GetTargetTcpProxyRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.TargetTcpProxy.to_json(compute.TargetTcpProxy()) + + request = compute.GetTargetTcpProxyRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.TargetTcpProxy() + + client.get(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_get_rest_bad_request(transport: str = 'rest', request_type=compute.GetTargetTcpProxyRequest): + client = TargetTcpProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'target_tcp_proxy': 'sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get(request) + + +def test_get_rest_flattened(): + client = TargetTcpProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.TargetTcpProxy() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'target_tcp_proxy': 'sample2'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + target_tcp_proxy='target_tcp_proxy_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.TargetTcpProxy.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.get(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/global/targetTcpProxies/{target_tcp_proxy}" % client.transport._host, args[1]) + + +def test_get_rest_flattened_error(transport: str = 'rest'): + client = TargetTcpProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get( + compute.GetTargetTcpProxyRequest(), + project='project_value', + target_tcp_proxy='target_tcp_proxy_value', + ) + + +def test_get_rest_error(): + client = TargetTcpProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.InsertTargetTcpProxyRequest, + dict, +]) +def test_insert_rest(request_type): + client = TargetTcpProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1'} + request_init["target_tcp_proxy_resource"] = {'creation_timestamp': 'creation_timestamp_value', 'description': 'description_value', 'id': 205, 'kind': 'kind_value', 'name': 'name_value', 'proxy_bind': True, 'proxy_header': 'proxy_header_value', 'region': 'region_value', 'self_link': 'self_link_value', 'service': 'service_value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.insert(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, extended_operation.ExtendedOperation) + assert response.client_operation_id == 'client_operation_id_value' + assert response.creation_timestamp == 'creation_timestamp_value' + assert response.description == 'description_value' + assert response.end_time == 'end_time_value' + assert response.http_error_message == 'http_error_message_value' + assert response.http_error_status_code == 2374 + assert response.id == 205 + assert response.insert_time == 'insert_time_value' + assert response.kind == 'kind_value' + assert response.name == 'name_value' + assert response.operation_group_id == 'operation_group_id_value' + assert response.operation_type == 'operation_type_value' + assert response.progress == 885 + assert response.region == 'region_value' + assert response.self_link == 'self_link_value' + assert response.start_time == 'start_time_value' + assert response.status == compute.Operation.Status.DONE + assert response.status_message == 'status_message_value' + assert response.target_id == 947 + assert response.target_link == 'target_link_value' + assert response.user == 'user_value' + assert response.zone == 'zone_value' + + +def test_insert_rest_required_fields(request_type=compute.InsertTargetTcpProxyRequest): + transport_class = transports.TargetTcpProxiesRestTransport + + request_init = {} + request_init["project"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).insert._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).insert._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + + client = TargetTcpProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.insert(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_insert_rest_unset_required_fields(): + transport = transports.TargetTcpProxiesRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.insert._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("project", "targetTcpProxyResource", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_insert_rest_interceptors(null_interceptor): + transport = transports.TargetTcpProxiesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.TargetTcpProxiesRestInterceptor(), + ) + client = TargetTcpProxiesClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.TargetTcpProxiesRestInterceptor, "post_insert") as post, \ + mock.patch.object(transports.TargetTcpProxiesRestInterceptor, "pre_insert") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.InsertTargetTcpProxyRequest.pb(compute.InsertTargetTcpProxyRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.InsertTargetTcpProxyRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.insert(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_insert_rest_bad_request(transport: str = 'rest', request_type=compute.InsertTargetTcpProxyRequest): + client = TargetTcpProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1'} + request_init["target_tcp_proxy_resource"] = {'creation_timestamp': 'creation_timestamp_value', 'description': 'description_value', 'id': 205, 'kind': 'kind_value', 'name': 'name_value', 'proxy_bind': True, 'proxy_header': 'proxy_header_value', 'region': 'region_value', 'self_link': 'self_link_value', 'service': 'service_value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.insert(request) + + +def test_insert_rest_flattened(): + client = TargetTcpProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + target_tcp_proxy_resource=compute.TargetTcpProxy(creation_timestamp='creation_timestamp_value'), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.insert(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/global/targetTcpProxies" % client.transport._host, args[1]) + + +def test_insert_rest_flattened_error(transport: str = 'rest'): + client = TargetTcpProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.insert( + compute.InsertTargetTcpProxyRequest(), + project='project_value', + target_tcp_proxy_resource=compute.TargetTcpProxy(creation_timestamp='creation_timestamp_value'), + ) + + +def test_insert_rest_error(): + client = TargetTcpProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.InsertTargetTcpProxyRequest, + dict, +]) +def test_insert_unary_rest(request_type): + client = TargetTcpProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1'} + request_init["target_tcp_proxy_resource"] = {'creation_timestamp': 'creation_timestamp_value', 'description': 'description_value', 'id': 205, 'kind': 'kind_value', 'name': 'name_value', 'proxy_bind': True, 'proxy_header': 'proxy_header_value', 'region': 'region_value', 'self_link': 'self_link_value', 'service': 'service_value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.insert_unary(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.Operation) + + +def test_insert_unary_rest_required_fields(request_type=compute.InsertTargetTcpProxyRequest): + transport_class = transports.TargetTcpProxiesRestTransport + + request_init = {} + request_init["project"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).insert._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).insert._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + + client = TargetTcpProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.insert_unary(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_insert_unary_rest_unset_required_fields(): + transport = transports.TargetTcpProxiesRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.insert._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("project", "targetTcpProxyResource", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_insert_unary_rest_interceptors(null_interceptor): + transport = transports.TargetTcpProxiesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.TargetTcpProxiesRestInterceptor(), + ) + client = TargetTcpProxiesClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.TargetTcpProxiesRestInterceptor, "post_insert") as post, \ + mock.patch.object(transports.TargetTcpProxiesRestInterceptor, "pre_insert") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.InsertTargetTcpProxyRequest.pb(compute.InsertTargetTcpProxyRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.InsertTargetTcpProxyRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.insert_unary(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_insert_unary_rest_bad_request(transport: str = 'rest', request_type=compute.InsertTargetTcpProxyRequest): + client = TargetTcpProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1'} + request_init["target_tcp_proxy_resource"] = {'creation_timestamp': 'creation_timestamp_value', 'description': 'description_value', 'id': 205, 'kind': 'kind_value', 'name': 'name_value', 'proxy_bind': True, 'proxy_header': 'proxy_header_value', 'region': 'region_value', 'self_link': 'self_link_value', 'service': 'service_value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.insert_unary(request) + + +def test_insert_unary_rest_flattened(): + client = TargetTcpProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + target_tcp_proxy_resource=compute.TargetTcpProxy(creation_timestamp='creation_timestamp_value'), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.insert_unary(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/global/targetTcpProxies" % client.transport._host, args[1]) + + +def test_insert_unary_rest_flattened_error(transport: str = 'rest'): + client = TargetTcpProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.insert_unary( + compute.InsertTargetTcpProxyRequest(), + project='project_value', + target_tcp_proxy_resource=compute.TargetTcpProxy(creation_timestamp='creation_timestamp_value'), + ) + + +def test_insert_unary_rest_error(): + client = TargetTcpProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.ListTargetTcpProxiesRequest, + dict, +]) +def test_list_rest(request_type): + client = TargetTcpProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.TargetTcpProxyList( + id='id_value', + kind='kind_value', + next_page_token='next_page_token_value', + self_link='self_link_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.TargetTcpProxyList.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.list(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListPager) + assert response.id == 'id_value' + assert response.kind == 'kind_value' + assert response.next_page_token == 'next_page_token_value' + assert response.self_link == 'self_link_value' + + +def test_list_rest_required_fields(request_type=compute.ListTargetTcpProxiesRequest): + transport_class = transports.TargetTcpProxiesRestTransport + + request_init = {} + request_init["project"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("filter", "max_results", "order_by", "page_token", "return_partial_success", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + + client = TargetTcpProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.TargetTcpProxyList() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "get", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.TargetTcpProxyList.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.list(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_list_rest_unset_required_fields(): + transport = transports.TargetTcpProxiesRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.list._get_unset_required_fields({}) + assert set(unset_fields) == (set(("filter", "maxResults", "orderBy", "pageToken", "returnPartialSuccess", )) & set(("project", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_rest_interceptors(null_interceptor): + transport = transports.TargetTcpProxiesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.TargetTcpProxiesRestInterceptor(), + ) + client = TargetTcpProxiesClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.TargetTcpProxiesRestInterceptor, "post_list") as post, \ + mock.patch.object(transports.TargetTcpProxiesRestInterceptor, "pre_list") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.ListTargetTcpProxiesRequest.pb(compute.ListTargetTcpProxiesRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.TargetTcpProxyList.to_json(compute.TargetTcpProxyList()) + + request = compute.ListTargetTcpProxiesRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.TargetTcpProxyList() + + client.list(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_list_rest_bad_request(transport: str = 'rest', request_type=compute.ListTargetTcpProxiesRequest): + client = TargetTcpProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list(request) + + +def test_list_rest_flattened(): + client = TargetTcpProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.TargetTcpProxyList() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.TargetTcpProxyList.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.list(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/global/targetTcpProxies" % client.transport._host, args[1]) + + +def test_list_rest_flattened_error(transport: str = 'rest'): + client = TargetTcpProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list( + compute.ListTargetTcpProxiesRequest(), + project='project_value', + ) + + +def test_list_rest_pager(transport: str = 'rest'): + client = TargetTcpProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + #with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + compute.TargetTcpProxyList( + items=[ + compute.TargetTcpProxy(), + compute.TargetTcpProxy(), + compute.TargetTcpProxy(), + ], + next_page_token='abc', + ), + compute.TargetTcpProxyList( + items=[], + next_page_token='def', + ), + compute.TargetTcpProxyList( + items=[ + compute.TargetTcpProxy(), + ], + next_page_token='ghi', + ), + compute.TargetTcpProxyList( + items=[ + compute.TargetTcpProxy(), + compute.TargetTcpProxy(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple(compute.TargetTcpProxyList.to_json(x) for x in response) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode('UTF-8') + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {'project': 'sample1'} + + pager = client.list(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, compute.TargetTcpProxy) + for i in results) + + pages = list(client.list(request=sample_request).pages) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize("request_type", [ + compute.SetBackendServiceTargetTcpProxyRequest, + dict, +]) +def test_set_backend_service_rest(request_type): + client = TargetTcpProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'target_tcp_proxy': 'sample2'} + request_init["target_tcp_proxies_set_backend_service_request_resource"] = {'service': 'service_value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.set_backend_service(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, extended_operation.ExtendedOperation) + assert response.client_operation_id == 'client_operation_id_value' + assert response.creation_timestamp == 'creation_timestamp_value' + assert response.description == 'description_value' + assert response.end_time == 'end_time_value' + assert response.http_error_message == 'http_error_message_value' + assert response.http_error_status_code == 2374 + assert response.id == 205 + assert response.insert_time == 'insert_time_value' + assert response.kind == 'kind_value' + assert response.name == 'name_value' + assert response.operation_group_id == 'operation_group_id_value' + assert response.operation_type == 'operation_type_value' + assert response.progress == 885 + assert response.region == 'region_value' + assert response.self_link == 'self_link_value' + assert response.start_time == 'start_time_value' + assert response.status == compute.Operation.Status.DONE + assert response.status_message == 'status_message_value' + assert response.target_id == 947 + assert response.target_link == 'target_link_value' + assert response.user == 'user_value' + assert response.zone == 'zone_value' + + +def test_set_backend_service_rest_required_fields(request_type=compute.SetBackendServiceTargetTcpProxyRequest): + transport_class = transports.TargetTcpProxiesRestTransport + + request_init = {} + request_init["project"] = "" + request_init["target_tcp_proxy"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).set_backend_service._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + jsonified_request["targetTcpProxy"] = 'target_tcp_proxy_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).set_backend_service._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "targetTcpProxy" in jsonified_request + assert jsonified_request["targetTcpProxy"] == 'target_tcp_proxy_value' + + client = TargetTcpProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.set_backend_service(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_set_backend_service_rest_unset_required_fields(): + transport = transports.TargetTcpProxiesRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.set_backend_service._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("project", "targetTcpProxiesSetBackendServiceRequestResource", "targetTcpProxy", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_set_backend_service_rest_interceptors(null_interceptor): + transport = transports.TargetTcpProxiesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.TargetTcpProxiesRestInterceptor(), + ) + client = TargetTcpProxiesClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.TargetTcpProxiesRestInterceptor, "post_set_backend_service") as post, \ + mock.patch.object(transports.TargetTcpProxiesRestInterceptor, "pre_set_backend_service") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.SetBackendServiceTargetTcpProxyRequest.pb(compute.SetBackendServiceTargetTcpProxyRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.SetBackendServiceTargetTcpProxyRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.set_backend_service(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_set_backend_service_rest_bad_request(transport: str = 'rest', request_type=compute.SetBackendServiceTargetTcpProxyRequest): + client = TargetTcpProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'target_tcp_proxy': 'sample2'} + request_init["target_tcp_proxies_set_backend_service_request_resource"] = {'service': 'service_value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.set_backend_service(request) + + +def test_set_backend_service_rest_flattened(): + client = TargetTcpProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'target_tcp_proxy': 'sample2'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + target_tcp_proxy='target_tcp_proxy_value', + target_tcp_proxies_set_backend_service_request_resource=compute.TargetTcpProxiesSetBackendServiceRequest(service='service_value'), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.set_backend_service(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/global/targetTcpProxies/{target_tcp_proxy}/setBackendService" % client.transport._host, args[1]) + + +def test_set_backend_service_rest_flattened_error(transport: str = 'rest'): + client = TargetTcpProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.set_backend_service( + compute.SetBackendServiceTargetTcpProxyRequest(), + project='project_value', + target_tcp_proxy='target_tcp_proxy_value', + target_tcp_proxies_set_backend_service_request_resource=compute.TargetTcpProxiesSetBackendServiceRequest(service='service_value'), + ) + + +def test_set_backend_service_rest_error(): + client = TargetTcpProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.SetBackendServiceTargetTcpProxyRequest, + dict, +]) +def test_set_backend_service_unary_rest(request_type): + client = TargetTcpProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'target_tcp_proxy': 'sample2'} + request_init["target_tcp_proxies_set_backend_service_request_resource"] = {'service': 'service_value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.set_backend_service_unary(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.Operation) + + +def test_set_backend_service_unary_rest_required_fields(request_type=compute.SetBackendServiceTargetTcpProxyRequest): + transport_class = transports.TargetTcpProxiesRestTransport + + request_init = {} + request_init["project"] = "" + request_init["target_tcp_proxy"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).set_backend_service._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + jsonified_request["targetTcpProxy"] = 'target_tcp_proxy_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).set_backend_service._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "targetTcpProxy" in jsonified_request + assert jsonified_request["targetTcpProxy"] == 'target_tcp_proxy_value' + + client = TargetTcpProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.set_backend_service_unary(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_set_backend_service_unary_rest_unset_required_fields(): + transport = transports.TargetTcpProxiesRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.set_backend_service._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("project", "targetTcpProxiesSetBackendServiceRequestResource", "targetTcpProxy", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_set_backend_service_unary_rest_interceptors(null_interceptor): + transport = transports.TargetTcpProxiesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.TargetTcpProxiesRestInterceptor(), + ) + client = TargetTcpProxiesClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.TargetTcpProxiesRestInterceptor, "post_set_backend_service") as post, \ + mock.patch.object(transports.TargetTcpProxiesRestInterceptor, "pre_set_backend_service") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.SetBackendServiceTargetTcpProxyRequest.pb(compute.SetBackendServiceTargetTcpProxyRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.SetBackendServiceTargetTcpProxyRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.set_backend_service_unary(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_set_backend_service_unary_rest_bad_request(transport: str = 'rest', request_type=compute.SetBackendServiceTargetTcpProxyRequest): + client = TargetTcpProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'target_tcp_proxy': 'sample2'} + request_init["target_tcp_proxies_set_backend_service_request_resource"] = {'service': 'service_value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.set_backend_service_unary(request) + + +def test_set_backend_service_unary_rest_flattened(): + client = TargetTcpProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'target_tcp_proxy': 'sample2'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + target_tcp_proxy='target_tcp_proxy_value', + target_tcp_proxies_set_backend_service_request_resource=compute.TargetTcpProxiesSetBackendServiceRequest(service='service_value'), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.set_backend_service_unary(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/global/targetTcpProxies/{target_tcp_proxy}/setBackendService" % client.transport._host, args[1]) + + +def test_set_backend_service_unary_rest_flattened_error(transport: str = 'rest'): + client = TargetTcpProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.set_backend_service_unary( + compute.SetBackendServiceTargetTcpProxyRequest(), + project='project_value', + target_tcp_proxy='target_tcp_proxy_value', + target_tcp_proxies_set_backend_service_request_resource=compute.TargetTcpProxiesSetBackendServiceRequest(service='service_value'), + ) + + +def test_set_backend_service_unary_rest_error(): + client = TargetTcpProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.SetProxyHeaderTargetTcpProxyRequest, + dict, +]) +def test_set_proxy_header_rest(request_type): + client = TargetTcpProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'target_tcp_proxy': 'sample2'} + request_init["target_tcp_proxies_set_proxy_header_request_resource"] = {'proxy_header': 'proxy_header_value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.set_proxy_header(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, extended_operation.ExtendedOperation) + assert response.client_operation_id == 'client_operation_id_value' + assert response.creation_timestamp == 'creation_timestamp_value' + assert response.description == 'description_value' + assert response.end_time == 'end_time_value' + assert response.http_error_message == 'http_error_message_value' + assert response.http_error_status_code == 2374 + assert response.id == 205 + assert response.insert_time == 'insert_time_value' + assert response.kind == 'kind_value' + assert response.name == 'name_value' + assert response.operation_group_id == 'operation_group_id_value' + assert response.operation_type == 'operation_type_value' + assert response.progress == 885 + assert response.region == 'region_value' + assert response.self_link == 'self_link_value' + assert response.start_time == 'start_time_value' + assert response.status == compute.Operation.Status.DONE + assert response.status_message == 'status_message_value' + assert response.target_id == 947 + assert response.target_link == 'target_link_value' + assert response.user == 'user_value' + assert response.zone == 'zone_value' + + +def test_set_proxy_header_rest_required_fields(request_type=compute.SetProxyHeaderTargetTcpProxyRequest): + transport_class = transports.TargetTcpProxiesRestTransport + + request_init = {} + request_init["project"] = "" + request_init["target_tcp_proxy"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).set_proxy_header._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + jsonified_request["targetTcpProxy"] = 'target_tcp_proxy_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).set_proxy_header._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "targetTcpProxy" in jsonified_request + assert jsonified_request["targetTcpProxy"] == 'target_tcp_proxy_value' + + client = TargetTcpProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.set_proxy_header(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_set_proxy_header_rest_unset_required_fields(): + transport = transports.TargetTcpProxiesRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.set_proxy_header._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("project", "targetTcpProxiesSetProxyHeaderRequestResource", "targetTcpProxy", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_set_proxy_header_rest_interceptors(null_interceptor): + transport = transports.TargetTcpProxiesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.TargetTcpProxiesRestInterceptor(), + ) + client = TargetTcpProxiesClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.TargetTcpProxiesRestInterceptor, "post_set_proxy_header") as post, \ + mock.patch.object(transports.TargetTcpProxiesRestInterceptor, "pre_set_proxy_header") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.SetProxyHeaderTargetTcpProxyRequest.pb(compute.SetProxyHeaderTargetTcpProxyRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.SetProxyHeaderTargetTcpProxyRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.set_proxy_header(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_set_proxy_header_rest_bad_request(transport: str = 'rest', request_type=compute.SetProxyHeaderTargetTcpProxyRequest): + client = TargetTcpProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'target_tcp_proxy': 'sample2'} + request_init["target_tcp_proxies_set_proxy_header_request_resource"] = {'proxy_header': 'proxy_header_value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.set_proxy_header(request) + + +def test_set_proxy_header_rest_flattened(): + client = TargetTcpProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'target_tcp_proxy': 'sample2'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + target_tcp_proxy='target_tcp_proxy_value', + target_tcp_proxies_set_proxy_header_request_resource=compute.TargetTcpProxiesSetProxyHeaderRequest(proxy_header='proxy_header_value'), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.set_proxy_header(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/global/targetTcpProxies/{target_tcp_proxy}/setProxyHeader" % client.transport._host, args[1]) + + +def test_set_proxy_header_rest_flattened_error(transport: str = 'rest'): + client = TargetTcpProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.set_proxy_header( + compute.SetProxyHeaderTargetTcpProxyRequest(), + project='project_value', + target_tcp_proxy='target_tcp_proxy_value', + target_tcp_proxies_set_proxy_header_request_resource=compute.TargetTcpProxiesSetProxyHeaderRequest(proxy_header='proxy_header_value'), + ) + + +def test_set_proxy_header_rest_error(): + client = TargetTcpProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.SetProxyHeaderTargetTcpProxyRequest, + dict, +]) +def test_set_proxy_header_unary_rest(request_type): + client = TargetTcpProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'target_tcp_proxy': 'sample2'} + request_init["target_tcp_proxies_set_proxy_header_request_resource"] = {'proxy_header': 'proxy_header_value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.set_proxy_header_unary(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.Operation) + + +def test_set_proxy_header_unary_rest_required_fields(request_type=compute.SetProxyHeaderTargetTcpProxyRequest): + transport_class = transports.TargetTcpProxiesRestTransport + + request_init = {} + request_init["project"] = "" + request_init["target_tcp_proxy"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).set_proxy_header._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + jsonified_request["targetTcpProxy"] = 'target_tcp_proxy_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).set_proxy_header._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "targetTcpProxy" in jsonified_request + assert jsonified_request["targetTcpProxy"] == 'target_tcp_proxy_value' + + client = TargetTcpProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.set_proxy_header_unary(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_set_proxy_header_unary_rest_unset_required_fields(): + transport = transports.TargetTcpProxiesRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.set_proxy_header._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("project", "targetTcpProxiesSetProxyHeaderRequestResource", "targetTcpProxy", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_set_proxy_header_unary_rest_interceptors(null_interceptor): + transport = transports.TargetTcpProxiesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.TargetTcpProxiesRestInterceptor(), + ) + client = TargetTcpProxiesClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.TargetTcpProxiesRestInterceptor, "post_set_proxy_header") as post, \ + mock.patch.object(transports.TargetTcpProxiesRestInterceptor, "pre_set_proxy_header") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.SetProxyHeaderTargetTcpProxyRequest.pb(compute.SetProxyHeaderTargetTcpProxyRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.SetProxyHeaderTargetTcpProxyRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.set_proxy_header_unary(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_set_proxy_header_unary_rest_bad_request(transport: str = 'rest', request_type=compute.SetProxyHeaderTargetTcpProxyRequest): + client = TargetTcpProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'target_tcp_proxy': 'sample2'} + request_init["target_tcp_proxies_set_proxy_header_request_resource"] = {'proxy_header': 'proxy_header_value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.set_proxy_header_unary(request) + + +def test_set_proxy_header_unary_rest_flattened(): + client = TargetTcpProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'target_tcp_proxy': 'sample2'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + target_tcp_proxy='target_tcp_proxy_value', + target_tcp_proxies_set_proxy_header_request_resource=compute.TargetTcpProxiesSetProxyHeaderRequest(proxy_header='proxy_header_value'), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.set_proxy_header_unary(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/global/targetTcpProxies/{target_tcp_proxy}/setProxyHeader" % client.transport._host, args[1]) + + +def test_set_proxy_header_unary_rest_flattened_error(transport: str = 'rest'): + client = TargetTcpProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.set_proxy_header_unary( + compute.SetProxyHeaderTargetTcpProxyRequest(), + project='project_value', + target_tcp_proxy='target_tcp_proxy_value', + target_tcp_proxies_set_proxy_header_request_resource=compute.TargetTcpProxiesSetProxyHeaderRequest(proxy_header='proxy_header_value'), + ) + + +def test_set_proxy_header_unary_rest_error(): + client = TargetTcpProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +def test_credentials_transport_error(): + # It is an error to provide credentials and a transport instance. + transport = transports.TargetTcpProxiesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = TargetTcpProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # It is an error to provide a credentials file and a transport instance. + transport = transports.TargetTcpProxiesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = TargetTcpProxiesClient( + client_options={"credentials_file": "credentials.json"}, + transport=transport, + ) + + # It is an error to provide an api_key and a transport instance. + transport = transports.TargetTcpProxiesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = TargetTcpProxiesClient( + client_options=options, + transport=transport, + ) + + # It is an error to provide an api_key and a credential. + options = mock.Mock() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = TargetTcpProxiesClient( + client_options=options, + credentials=ga_credentials.AnonymousCredentials() + ) + + # It is an error to provide scopes and a transport instance. + transport = transports.TargetTcpProxiesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = TargetTcpProxiesClient( + client_options={"scopes": ["1", "2"]}, + transport=transport, + ) + + +def test_transport_instance(): + # A client may be instantiated with a custom transport instance. + transport = transports.TargetTcpProxiesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + client = TargetTcpProxiesClient(transport=transport) + assert client.transport is transport + + +@pytest.mark.parametrize("transport_class", [ + transports.TargetTcpProxiesRestTransport, +]) +def test_transport_adc(transport_class): + # Test default credentials are used if not provided. + with mock.patch.object(google.auth, 'default') as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class() + adc.assert_called_once() + +@pytest.mark.parametrize("transport_name", [ + "rest", +]) +def test_transport_kind(transport_name): + transport = TargetTcpProxiesClient.get_transport_class(transport_name)( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert transport.kind == transport_name + + +def test_target_tcp_proxies_base_transport_error(): + # Passing both a credentials object and credentials_file should raise an error + with pytest.raises(core_exceptions.DuplicateCredentialArgs): + transport = transports.TargetTcpProxiesTransport( + credentials=ga_credentials.AnonymousCredentials(), + credentials_file="credentials.json" + ) + + +def test_target_tcp_proxies_base_transport(): + # Instantiate the base transport. + with mock.patch('google.cloud.compute_v1.services.target_tcp_proxies.transports.TargetTcpProxiesTransport.__init__') as Transport: + Transport.return_value = None + transport = transports.TargetTcpProxiesTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Every method on the transport should just blindly + # raise NotImplementedError. + methods = ( + 'aggregated_list', + 'delete', + 'get', + 'insert', + 'list', + 'set_backend_service', + 'set_proxy_header', + ) + for method in methods: + with pytest.raises(NotImplementedError): + getattr(transport, method)(request=object()) + + with pytest.raises(NotImplementedError): + transport.close() + + # Catch all for all remaining methods and properties + remainder = [ + 'kind', + ] + for r in remainder: + with pytest.raises(NotImplementedError): + getattr(transport, r)() + + +def test_target_tcp_proxies_base_transport_with_credentials_file(): + # Instantiate the base transport with a credentials file + with mock.patch.object(google.auth, 'load_credentials_from_file', autospec=True) as load_creds, mock.patch('google.cloud.compute_v1.services.target_tcp_proxies.transports.TargetTcpProxiesTransport._prep_wrapped_messages') as Transport: + Transport.return_value = None + load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.TargetTcpProxiesTransport( + credentials_file="credentials.json", + quota_project_id="octopus", + ) + load_creds.assert_called_once_with("credentials.json", + scopes=None, + default_scopes=( + 'https://www.googleapis.com/auth/compute', + 'https://www.googleapis.com/auth/cloud-platform', +), + quota_project_id="octopus", + ) + + +def test_target_tcp_proxies_base_transport_with_adc(): + # Test the default credentials are used if credentials and credentials_file are None. + with mock.patch.object(google.auth, 'default', autospec=True) as adc, mock.patch('google.cloud.compute_v1.services.target_tcp_proxies.transports.TargetTcpProxiesTransport._prep_wrapped_messages') as Transport: + Transport.return_value = None + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.TargetTcpProxiesTransport() + adc.assert_called_once() + + +def test_target_tcp_proxies_auth_adc(): + # If no credentials are provided, we should use ADC credentials. + with mock.patch.object(google.auth, 'default', autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + TargetTcpProxiesClient() + adc.assert_called_once_with( + scopes=None, + default_scopes=( + 'https://www.googleapis.com/auth/compute', + 'https://www.googleapis.com/auth/cloud-platform', +), + quota_project_id=None, + ) + + +def test_target_tcp_proxies_http_transport_client_cert_source_for_mtls(): + cred = ga_credentials.AnonymousCredentials() + with mock.patch("google.auth.transport.requests.AuthorizedSession.configure_mtls_channel") as mock_configure_mtls_channel: + transports.TargetTcpProxiesRestTransport ( + credentials=cred, + client_cert_source_for_mtls=client_cert_source_callback + ) + mock_configure_mtls_channel.assert_called_once_with(client_cert_source_callback) + + +@pytest.mark.parametrize("transport_name", [ + "rest", +]) +def test_target_tcp_proxies_host_no_port(transport_name): + client = TargetTcpProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions(api_endpoint='compute.googleapis.com'), + transport=transport_name, + ) + assert client.transport._host == ( + 'compute.googleapis.com:443' + if transport_name in ['grpc', 'grpc_asyncio'] + else 'https://compute.googleapis.com' + ) + +@pytest.mark.parametrize("transport_name", [ + "rest", +]) +def test_target_tcp_proxies_host_with_port(transport_name): + client = TargetTcpProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions(api_endpoint='compute.googleapis.com:8000'), + transport=transport_name, + ) + assert client.transport._host == ( + 'compute.googleapis.com:8000' + if transport_name in ['grpc', 'grpc_asyncio'] + else 'https://compute.googleapis.com:8000' + ) + +@pytest.mark.parametrize("transport_name", [ + "rest", +]) +def test_target_tcp_proxies_client_transport_session_collision(transport_name): + creds1 = ga_credentials.AnonymousCredentials() + creds2 = ga_credentials.AnonymousCredentials() + client1 = TargetTcpProxiesClient( + credentials=creds1, + transport=transport_name, + ) + client2 = TargetTcpProxiesClient( + credentials=creds2, + transport=transport_name, + ) + session1 = client1.transport.aggregated_list._session + session2 = client2.transport.aggregated_list._session + assert session1 != session2 + session1 = client1.transport.delete._session + session2 = client2.transport.delete._session + assert session1 != session2 + session1 = client1.transport.get._session + session2 = client2.transport.get._session + assert session1 != session2 + session1 = client1.transport.insert._session + session2 = client2.transport.insert._session + assert session1 != session2 + session1 = client1.transport.list._session + session2 = client2.transport.list._session + assert session1 != session2 + session1 = client1.transport.set_backend_service._session + session2 = client2.transport.set_backend_service._session + assert session1 != session2 + session1 = client1.transport.set_proxy_header._session + session2 = client2.transport.set_proxy_header._session + assert session1 != session2 + +def test_common_billing_account_path(): + billing_account = "squid" + expected = "billingAccounts/{billing_account}".format(billing_account=billing_account, ) + actual = TargetTcpProxiesClient.common_billing_account_path(billing_account) + assert expected == actual + + +def test_parse_common_billing_account_path(): + expected = { + "billing_account": "clam", + } + path = TargetTcpProxiesClient.common_billing_account_path(**expected) + + # Check that the path construction is reversible. + actual = TargetTcpProxiesClient.parse_common_billing_account_path(path) + assert expected == actual + +def test_common_folder_path(): + folder = "whelk" + expected = "folders/{folder}".format(folder=folder, ) + actual = TargetTcpProxiesClient.common_folder_path(folder) + assert expected == actual + + +def test_parse_common_folder_path(): + expected = { + "folder": "octopus", + } + path = TargetTcpProxiesClient.common_folder_path(**expected) + + # Check that the path construction is reversible. + actual = TargetTcpProxiesClient.parse_common_folder_path(path) + assert expected == actual + +def test_common_organization_path(): + organization = "oyster" + expected = "organizations/{organization}".format(organization=organization, ) + actual = TargetTcpProxiesClient.common_organization_path(organization) + assert expected == actual + + +def test_parse_common_organization_path(): + expected = { + "organization": "nudibranch", + } + path = TargetTcpProxiesClient.common_organization_path(**expected) + + # Check that the path construction is reversible. + actual = TargetTcpProxiesClient.parse_common_organization_path(path) + assert expected == actual + +def test_common_project_path(): + project = "cuttlefish" + expected = "projects/{project}".format(project=project, ) + actual = TargetTcpProxiesClient.common_project_path(project) + assert expected == actual + + +def test_parse_common_project_path(): + expected = { + "project": "mussel", + } + path = TargetTcpProxiesClient.common_project_path(**expected) + + # Check that the path construction is reversible. + actual = TargetTcpProxiesClient.parse_common_project_path(path) + assert expected == actual + +def test_common_location_path(): + project = "winkle" + location = "nautilus" + expected = "projects/{project}/locations/{location}".format(project=project, location=location, ) + actual = TargetTcpProxiesClient.common_location_path(project, location) + assert expected == actual + + +def test_parse_common_location_path(): + expected = { + "project": "scallop", + "location": "abalone", + } + path = TargetTcpProxiesClient.common_location_path(**expected) + + # Check that the path construction is reversible. + actual = TargetTcpProxiesClient.parse_common_location_path(path) + assert expected == actual + + +def test_client_with_default_client_info(): + client_info = gapic_v1.client_info.ClientInfo() + + with mock.patch.object(transports.TargetTcpProxiesTransport, '_prep_wrapped_messages') as prep: + client = TargetTcpProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + with mock.patch.object(transports.TargetTcpProxiesTransport, '_prep_wrapped_messages') as prep: + transport_class = TargetTcpProxiesClient.get_transport_class() + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + +def test_transport_close(): + transports = { + "rest": "_session", + } + + for transport, close_name in transports.items(): + client = TargetTcpProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport + ) + with mock.patch.object(type(getattr(client.transport, close_name)), "close") as close: + with client: + close.assert_not_called() + close.assert_called_once() + +def test_client_ctx(): + transports = [ + 'rest', + ] + for transport in transports: + client = TargetTcpProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport + ) + # Test client calls underlying transport. + with mock.patch.object(type(client.transport), "close") as close: + close.assert_not_called() + with client: + pass + close.assert_called() + +@pytest.mark.parametrize("client_class,transport_class", [ + (TargetTcpProxiesClient, transports.TargetTcpProxiesRestTransport), +]) +def test_api_key_credentials(client_class, transport_class): + with mock.patch.object( + google.auth._default, "get_api_key_credentials", create=True + ) as get_api_key_credentials: + mock_cred = mock.Mock() + get_api_key_credentials.return_value = mock_cred + options = client_options.ClientOptions() + options.api_key = "api_key" + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=mock_cred, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) diff --git a/owl-bot-staging/v1/tests/unit/gapic/compute_v1/test_target_vpn_gateways.py b/owl-bot-staging/v1/tests/unit/gapic/compute_v1/test_target_vpn_gateways.py new file mode 100644 index 000000000..4181044ed --- /dev/null +++ b/owl-bot-staging/v1/tests/unit/gapic/compute_v1/test_target_vpn_gateways.py @@ -0,0 +1,3396 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import os +# try/except added for compatibility with python < 3.8 +try: + from unittest import mock + from unittest.mock import AsyncMock # pragma: NO COVER +except ImportError: # pragma: NO COVER + import mock + +import grpc +from grpc.experimental import aio +from collections.abc import Iterable +from google.protobuf import json_format +import json +import math +import pytest +from proto.marshal.rules.dates import DurationRule, TimestampRule +from proto.marshal.rules import wrappers +from requests import Response +from requests import Request, PreparedRequest +from requests.sessions import Session +from google.protobuf import json_format + +from google.api_core import client_options +from google.api_core import exceptions as core_exceptions +from google.api_core import extended_operation # type: ignore +from google.api_core import future +from google.api_core import gapic_v1 +from google.api_core import grpc_helpers +from google.api_core import grpc_helpers_async +from google.api_core import path_template +from google.auth import credentials as ga_credentials +from google.auth.exceptions import MutualTLSChannelError +from google.cloud.compute_v1.services.target_vpn_gateways import TargetVpnGatewaysClient +from google.cloud.compute_v1.services.target_vpn_gateways import pagers +from google.cloud.compute_v1.services.target_vpn_gateways import transports +from google.cloud.compute_v1.types import compute +from google.oauth2 import service_account +import google.auth + + +def client_cert_source_callback(): + return b"cert bytes", b"key bytes" + + +# If default endpoint is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint(client): + return "foo.googleapis.com" if ("localhost" in client.DEFAULT_ENDPOINT) else client.DEFAULT_ENDPOINT + + +def test__get_default_mtls_endpoint(): + api_endpoint = "example.googleapis.com" + api_mtls_endpoint = "example.mtls.googleapis.com" + sandbox_endpoint = "example.sandbox.googleapis.com" + sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com" + non_googleapi = "api.example.com" + + assert TargetVpnGatewaysClient._get_default_mtls_endpoint(None) is None + assert TargetVpnGatewaysClient._get_default_mtls_endpoint(api_endpoint) == api_mtls_endpoint + assert TargetVpnGatewaysClient._get_default_mtls_endpoint(api_mtls_endpoint) == api_mtls_endpoint + assert TargetVpnGatewaysClient._get_default_mtls_endpoint(sandbox_endpoint) == sandbox_mtls_endpoint + assert TargetVpnGatewaysClient._get_default_mtls_endpoint(sandbox_mtls_endpoint) == sandbox_mtls_endpoint + assert TargetVpnGatewaysClient._get_default_mtls_endpoint(non_googleapi) == non_googleapi + + +@pytest.mark.parametrize("client_class,transport_name", [ + (TargetVpnGatewaysClient, "rest"), +]) +def test_target_vpn_gateways_client_from_service_account_info(client_class, transport_name): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object(service_account.Credentials, 'from_service_account_info') as factory: + factory.return_value = creds + info = {"valid": True} + client = client_class.from_service_account_info(info, transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + 'compute.googleapis.com:443' + if transport_name in ['grpc', 'grpc_asyncio'] + else + 'https://compute.googleapis.com' + ) + + +@pytest.mark.parametrize("transport_class,transport_name", [ + (transports.TargetVpnGatewaysRestTransport, "rest"), +]) +def test_target_vpn_gateways_client_service_account_always_use_jwt(transport_class, transport_name): + with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=True) + use_jwt.assert_called_once_with(True) + + with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=False) + use_jwt.assert_not_called() + + +@pytest.mark.parametrize("client_class,transport_name", [ + (TargetVpnGatewaysClient, "rest"), +]) +def test_target_vpn_gateways_client_from_service_account_file(client_class, transport_name): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object(service_account.Credentials, 'from_service_account_file') as factory: + factory.return_value = creds + client = client_class.from_service_account_file("dummy/file/path.json", transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + client = client_class.from_service_account_json("dummy/file/path.json", transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + 'compute.googleapis.com:443' + if transport_name in ['grpc', 'grpc_asyncio'] + else + 'https://compute.googleapis.com' + ) + + +def test_target_vpn_gateways_client_get_transport_class(): + transport = TargetVpnGatewaysClient.get_transport_class() + available_transports = [ + transports.TargetVpnGatewaysRestTransport, + ] + assert transport in available_transports + + transport = TargetVpnGatewaysClient.get_transport_class("rest") + assert transport == transports.TargetVpnGatewaysRestTransport + + +@pytest.mark.parametrize("client_class,transport_class,transport_name", [ + (TargetVpnGatewaysClient, transports.TargetVpnGatewaysRestTransport, "rest"), +]) +@mock.patch.object(TargetVpnGatewaysClient, "DEFAULT_ENDPOINT", modify_default_endpoint(TargetVpnGatewaysClient)) +def test_target_vpn_gateways_client_client_options(client_class, transport_class, transport_name): + # Check that if channel is provided we won't create a new one. + with mock.patch.object(TargetVpnGatewaysClient, 'get_transport_class') as gtc: + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ) + client = client_class(transport=transport) + gtc.assert_not_called() + + # Check that if channel is provided via str we will create a new one. + with mock.patch.object(TargetVpnGatewaysClient, 'get_transport_class') as gtc: + client = client_class(transport=transport_name) + gtc.assert_called() + + # Check the case api_endpoint is provided. + options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(transport=transport_name, client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_MTLS_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError): + client = client_class(transport=transport_name) + + # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): + with pytest.raises(ValueError): + client = client_class(transport=transport_name) + + # Check the case quota_project_id is provided + options = client_options.ClientOptions(quota_project_id="octopus") + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id="octopus", + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + # Check the case api_endpoint is provided + options = client_options.ClientOptions(api_audience="https://language.googleapis.com") + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience="https://language.googleapis.com" + ) + +@pytest.mark.parametrize("client_class,transport_class,transport_name,use_client_cert_env", [ + (TargetVpnGatewaysClient, transports.TargetVpnGatewaysRestTransport, "rest", "true"), + (TargetVpnGatewaysClient, transports.TargetVpnGatewaysRestTransport, "rest", "false"), +]) +@mock.patch.object(TargetVpnGatewaysClient, "DEFAULT_ENDPOINT", modify_default_endpoint(TargetVpnGatewaysClient)) +@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) +def test_target_vpn_gateways_client_mtls_env_auto(client_class, transport_class, transport_name, use_client_cert_env): + # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default + # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists. + + # Check the case client_cert_source is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): + options = client_options.ClientOptions(client_cert_source=client_cert_source_callback) + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + + if use_client_cert_env == "false": + expected_client_cert_source = None + expected_host = client.DEFAULT_ENDPOINT + else: + expected_client_cert_source = client_cert_source_callback + expected_host = client.DEFAULT_MTLS_ENDPOINT + + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case ADC client cert is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): + with mock.patch.object(transport_class, '__init__') as patched: + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): + with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=client_cert_source_callback): + if use_client_cert_env == "false": + expected_host = client.DEFAULT_ENDPOINT + expected_client_cert_source = None + else: + expected_host = client.DEFAULT_MTLS_ENDPOINT + expected_client_cert_source = client_cert_source_callback + + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case client_cert_source and ADC client cert are not provided. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): + with mock.patch.object(transport_class, '__init__') as patched: + with mock.patch("google.auth.transport.mtls.has_default_client_cert_source", return_value=False): + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize("client_class", [ + TargetVpnGatewaysClient +]) +@mock.patch.object(TargetVpnGatewaysClient, "DEFAULT_ENDPOINT", modify_default_endpoint(TargetVpnGatewaysClient)) +def test_target_vpn_gateways_client_get_mtls_endpoint_and_cert_source(client_class): + mock_client_cert_source = mock.Mock() + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "true". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + mock_api_endpoint = "foo" + options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(options) + assert api_endpoint == mock_api_endpoint + assert cert_source == mock_client_cert_source + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "false". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + mock_client_cert_source = mock.Mock() + mock_api_endpoint = "foo" + options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(options) + assert api_endpoint == mock_api_endpoint + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert doesn't exist. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=False): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert exists. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): + with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=mock_client_cert_source): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source == mock_client_cert_source + + +@pytest.mark.parametrize("client_class,transport_class,transport_name", [ + (TargetVpnGatewaysClient, transports.TargetVpnGatewaysRestTransport, "rest"), +]) +def test_target_vpn_gateways_client_client_options_scopes(client_class, transport_class, transport_name): + # Check the case scopes are provided. + options = client_options.ClientOptions( + scopes=["1", "2"], + ) + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=["1", "2"], + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + +@pytest.mark.parametrize("client_class,transport_class,transport_name,grpc_helpers", [ + (TargetVpnGatewaysClient, transports.TargetVpnGatewaysRestTransport, "rest", None), +]) +def test_target_vpn_gateways_client_client_options_credentials_file(client_class, transport_class, transport_name, grpc_helpers): + # Check the case credentials file is provided. + options = client_options.ClientOptions( + credentials_file="credentials.json" + ) + + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize("request_type", [ + compute.AggregatedListTargetVpnGatewaysRequest, + dict, +]) +def test_aggregated_list_rest(request_type): + client = TargetVpnGatewaysClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.TargetVpnGatewayAggregatedList( + id='id_value', + kind='kind_value', + next_page_token='next_page_token_value', + self_link='self_link_value', + unreachables=['unreachables_value'], + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.TargetVpnGatewayAggregatedList.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.aggregated_list(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.AggregatedListPager) + assert response.id == 'id_value' + assert response.kind == 'kind_value' + assert response.next_page_token == 'next_page_token_value' + assert response.self_link == 'self_link_value' + assert response.unreachables == ['unreachables_value'] + + +def test_aggregated_list_rest_required_fields(request_type=compute.AggregatedListTargetVpnGatewaysRequest): + transport_class = transports.TargetVpnGatewaysRestTransport + + request_init = {} + request_init["project"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).aggregated_list._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).aggregated_list._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("filter", "include_all_scopes", "max_results", "order_by", "page_token", "return_partial_success", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + + client = TargetVpnGatewaysClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.TargetVpnGatewayAggregatedList() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "get", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.TargetVpnGatewayAggregatedList.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.aggregated_list(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_aggregated_list_rest_unset_required_fields(): + transport = transports.TargetVpnGatewaysRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.aggregated_list._get_unset_required_fields({}) + assert set(unset_fields) == (set(("filter", "includeAllScopes", "maxResults", "orderBy", "pageToken", "returnPartialSuccess", )) & set(("project", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_aggregated_list_rest_interceptors(null_interceptor): + transport = transports.TargetVpnGatewaysRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.TargetVpnGatewaysRestInterceptor(), + ) + client = TargetVpnGatewaysClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.TargetVpnGatewaysRestInterceptor, "post_aggregated_list") as post, \ + mock.patch.object(transports.TargetVpnGatewaysRestInterceptor, "pre_aggregated_list") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.AggregatedListTargetVpnGatewaysRequest.pb(compute.AggregatedListTargetVpnGatewaysRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.TargetVpnGatewayAggregatedList.to_json(compute.TargetVpnGatewayAggregatedList()) + + request = compute.AggregatedListTargetVpnGatewaysRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.TargetVpnGatewayAggregatedList() + + client.aggregated_list(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_aggregated_list_rest_bad_request(transport: str = 'rest', request_type=compute.AggregatedListTargetVpnGatewaysRequest): + client = TargetVpnGatewaysClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.aggregated_list(request) + + +def test_aggregated_list_rest_flattened(): + client = TargetVpnGatewaysClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.TargetVpnGatewayAggregatedList() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.TargetVpnGatewayAggregatedList.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.aggregated_list(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/aggregated/targetVpnGateways" % client.transport._host, args[1]) + + +def test_aggregated_list_rest_flattened_error(transport: str = 'rest'): + client = TargetVpnGatewaysClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.aggregated_list( + compute.AggregatedListTargetVpnGatewaysRequest(), + project='project_value', + ) + + +def test_aggregated_list_rest_pager(transport: str = 'rest'): + client = TargetVpnGatewaysClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + #with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + compute.TargetVpnGatewayAggregatedList( + items={ + 'a':compute.TargetVpnGatewaysScopedList(), + 'b':compute.TargetVpnGatewaysScopedList(), + 'c':compute.TargetVpnGatewaysScopedList(), + }, + next_page_token='abc', + ), + compute.TargetVpnGatewayAggregatedList( + items={}, + next_page_token='def', + ), + compute.TargetVpnGatewayAggregatedList( + items={ + 'g':compute.TargetVpnGatewaysScopedList(), + }, + next_page_token='ghi', + ), + compute.TargetVpnGatewayAggregatedList( + items={ + 'h':compute.TargetVpnGatewaysScopedList(), + 'i':compute.TargetVpnGatewaysScopedList(), + }, + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple(compute.TargetVpnGatewayAggregatedList.to_json(x) for x in response) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode('UTF-8') + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {'project': 'sample1'} + + pager = client.aggregated_list(request=sample_request) + + assert isinstance(pager.get('a'), compute.TargetVpnGatewaysScopedList) + assert pager.get('h') is None + + results = list(pager) + assert len(results) == 6 + assert all( + isinstance(i, tuple) + for i in results) + for result in results: + assert isinstance(result, tuple) + assert tuple(type(t) for t in result) == (str, compute.TargetVpnGatewaysScopedList) + + assert pager.get('a') is None + assert isinstance(pager.get('h'), compute.TargetVpnGatewaysScopedList) + + pages = list(client.aggregated_list(request=sample_request).pages) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize("request_type", [ + compute.DeleteTargetVpnGatewayRequest, + dict, +]) +def test_delete_rest(request_type): + client = TargetVpnGatewaysClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2', 'target_vpn_gateway': 'sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.delete(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, extended_operation.ExtendedOperation) + assert response.client_operation_id == 'client_operation_id_value' + assert response.creation_timestamp == 'creation_timestamp_value' + assert response.description == 'description_value' + assert response.end_time == 'end_time_value' + assert response.http_error_message == 'http_error_message_value' + assert response.http_error_status_code == 2374 + assert response.id == 205 + assert response.insert_time == 'insert_time_value' + assert response.kind == 'kind_value' + assert response.name == 'name_value' + assert response.operation_group_id == 'operation_group_id_value' + assert response.operation_type == 'operation_type_value' + assert response.progress == 885 + assert response.region == 'region_value' + assert response.self_link == 'self_link_value' + assert response.start_time == 'start_time_value' + assert response.status == compute.Operation.Status.DONE + assert response.status_message == 'status_message_value' + assert response.target_id == 947 + assert response.target_link == 'target_link_value' + assert response.user == 'user_value' + assert response.zone == 'zone_value' + + +def test_delete_rest_required_fields(request_type=compute.DeleteTargetVpnGatewayRequest): + transport_class = transports.TargetVpnGatewaysRestTransport + + request_init = {} + request_init["project"] = "" + request_init["region"] = "" + request_init["target_vpn_gateway"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + jsonified_request["region"] = 'region_value' + jsonified_request["targetVpnGateway"] = 'target_vpn_gateway_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "region" in jsonified_request + assert jsonified_request["region"] == 'region_value' + assert "targetVpnGateway" in jsonified_request + assert jsonified_request["targetVpnGateway"] == 'target_vpn_gateway_value' + + client = TargetVpnGatewaysClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "delete", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.delete(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_delete_rest_unset_required_fields(): + transport = transports.TargetVpnGatewaysRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.delete._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("project", "region", "targetVpnGateway", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_rest_interceptors(null_interceptor): + transport = transports.TargetVpnGatewaysRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.TargetVpnGatewaysRestInterceptor(), + ) + client = TargetVpnGatewaysClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.TargetVpnGatewaysRestInterceptor, "post_delete") as post, \ + mock.patch.object(transports.TargetVpnGatewaysRestInterceptor, "pre_delete") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.DeleteTargetVpnGatewayRequest.pb(compute.DeleteTargetVpnGatewayRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.DeleteTargetVpnGatewayRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.delete(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_delete_rest_bad_request(transport: str = 'rest', request_type=compute.DeleteTargetVpnGatewayRequest): + client = TargetVpnGatewaysClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2', 'target_vpn_gateway': 'sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.delete(request) + + +def test_delete_rest_flattened(): + client = TargetVpnGatewaysClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'region': 'sample2', 'target_vpn_gateway': 'sample3'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + region='region_value', + target_vpn_gateway='target_vpn_gateway_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.delete(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/regions/{region}/targetVpnGateways/{target_vpn_gateway}" % client.transport._host, args[1]) + + +def test_delete_rest_flattened_error(transport: str = 'rest'): + client = TargetVpnGatewaysClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete( + compute.DeleteTargetVpnGatewayRequest(), + project='project_value', + region='region_value', + target_vpn_gateway='target_vpn_gateway_value', + ) + + +def test_delete_rest_error(): + client = TargetVpnGatewaysClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.DeleteTargetVpnGatewayRequest, + dict, +]) +def test_delete_unary_rest(request_type): + client = TargetVpnGatewaysClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2', 'target_vpn_gateway': 'sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.delete_unary(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.Operation) + + +def test_delete_unary_rest_required_fields(request_type=compute.DeleteTargetVpnGatewayRequest): + transport_class = transports.TargetVpnGatewaysRestTransport + + request_init = {} + request_init["project"] = "" + request_init["region"] = "" + request_init["target_vpn_gateway"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + jsonified_request["region"] = 'region_value' + jsonified_request["targetVpnGateway"] = 'target_vpn_gateway_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "region" in jsonified_request + assert jsonified_request["region"] == 'region_value' + assert "targetVpnGateway" in jsonified_request + assert jsonified_request["targetVpnGateway"] == 'target_vpn_gateway_value' + + client = TargetVpnGatewaysClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "delete", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.delete_unary(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_delete_unary_rest_unset_required_fields(): + transport = transports.TargetVpnGatewaysRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.delete._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("project", "region", "targetVpnGateway", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_unary_rest_interceptors(null_interceptor): + transport = transports.TargetVpnGatewaysRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.TargetVpnGatewaysRestInterceptor(), + ) + client = TargetVpnGatewaysClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.TargetVpnGatewaysRestInterceptor, "post_delete") as post, \ + mock.patch.object(transports.TargetVpnGatewaysRestInterceptor, "pre_delete") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.DeleteTargetVpnGatewayRequest.pb(compute.DeleteTargetVpnGatewayRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.DeleteTargetVpnGatewayRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.delete_unary(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_delete_unary_rest_bad_request(transport: str = 'rest', request_type=compute.DeleteTargetVpnGatewayRequest): + client = TargetVpnGatewaysClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2', 'target_vpn_gateway': 'sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.delete_unary(request) + + +def test_delete_unary_rest_flattened(): + client = TargetVpnGatewaysClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'region': 'sample2', 'target_vpn_gateway': 'sample3'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + region='region_value', + target_vpn_gateway='target_vpn_gateway_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.delete_unary(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/regions/{region}/targetVpnGateways/{target_vpn_gateway}" % client.transport._host, args[1]) + + +def test_delete_unary_rest_flattened_error(transport: str = 'rest'): + client = TargetVpnGatewaysClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_unary( + compute.DeleteTargetVpnGatewayRequest(), + project='project_value', + region='region_value', + target_vpn_gateway='target_vpn_gateway_value', + ) + + +def test_delete_unary_rest_error(): + client = TargetVpnGatewaysClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.GetTargetVpnGatewayRequest, + dict, +]) +def test_get_rest(request_type): + client = TargetVpnGatewaysClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2', 'target_vpn_gateway': 'sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.TargetVpnGateway( + creation_timestamp='creation_timestamp_value', + description='description_value', + forwarding_rules=['forwarding_rules_value'], + id=205, + kind='kind_value', + label_fingerprint='label_fingerprint_value', + name='name_value', + network='network_value', + region='region_value', + self_link='self_link_value', + status='status_value', + tunnels=['tunnels_value'], + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.TargetVpnGateway.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.get(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.TargetVpnGateway) + assert response.creation_timestamp == 'creation_timestamp_value' + assert response.description == 'description_value' + assert response.forwarding_rules == ['forwarding_rules_value'] + assert response.id == 205 + assert response.kind == 'kind_value' + assert response.label_fingerprint == 'label_fingerprint_value' + assert response.name == 'name_value' + assert response.network == 'network_value' + assert response.region == 'region_value' + assert response.self_link == 'self_link_value' + assert response.status == 'status_value' + assert response.tunnels == ['tunnels_value'] + + +def test_get_rest_required_fields(request_type=compute.GetTargetVpnGatewayRequest): + transport_class = transports.TargetVpnGatewaysRestTransport + + request_init = {} + request_init["project"] = "" + request_init["region"] = "" + request_init["target_vpn_gateway"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + jsonified_request["region"] = 'region_value' + jsonified_request["targetVpnGateway"] = 'target_vpn_gateway_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "region" in jsonified_request + assert jsonified_request["region"] == 'region_value' + assert "targetVpnGateway" in jsonified_request + assert jsonified_request["targetVpnGateway"] == 'target_vpn_gateway_value' + + client = TargetVpnGatewaysClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.TargetVpnGateway() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "get", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.TargetVpnGateway.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.get(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_get_rest_unset_required_fields(): + transport = transports.TargetVpnGatewaysRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.get._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("project", "region", "targetVpnGateway", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_rest_interceptors(null_interceptor): + transport = transports.TargetVpnGatewaysRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.TargetVpnGatewaysRestInterceptor(), + ) + client = TargetVpnGatewaysClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.TargetVpnGatewaysRestInterceptor, "post_get") as post, \ + mock.patch.object(transports.TargetVpnGatewaysRestInterceptor, "pre_get") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.GetTargetVpnGatewayRequest.pb(compute.GetTargetVpnGatewayRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.TargetVpnGateway.to_json(compute.TargetVpnGateway()) + + request = compute.GetTargetVpnGatewayRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.TargetVpnGateway() + + client.get(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_get_rest_bad_request(transport: str = 'rest', request_type=compute.GetTargetVpnGatewayRequest): + client = TargetVpnGatewaysClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2', 'target_vpn_gateway': 'sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get(request) + + +def test_get_rest_flattened(): + client = TargetVpnGatewaysClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.TargetVpnGateway() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'region': 'sample2', 'target_vpn_gateway': 'sample3'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + region='region_value', + target_vpn_gateway='target_vpn_gateway_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.TargetVpnGateway.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.get(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/regions/{region}/targetVpnGateways/{target_vpn_gateway}" % client.transport._host, args[1]) + + +def test_get_rest_flattened_error(transport: str = 'rest'): + client = TargetVpnGatewaysClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get( + compute.GetTargetVpnGatewayRequest(), + project='project_value', + region='region_value', + target_vpn_gateway='target_vpn_gateway_value', + ) + + +def test_get_rest_error(): + client = TargetVpnGatewaysClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.InsertTargetVpnGatewayRequest, + dict, +]) +def test_insert_rest(request_type): + client = TargetVpnGatewaysClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2'} + request_init["target_vpn_gateway_resource"] = {'creation_timestamp': 'creation_timestamp_value', 'description': 'description_value', 'forwarding_rules': ['forwarding_rules_value1', 'forwarding_rules_value2'], 'id': 205, 'kind': 'kind_value', 'label_fingerprint': 'label_fingerprint_value', 'labels': {}, 'name': 'name_value', 'network': 'network_value', 'region': 'region_value', 'self_link': 'self_link_value', 'status': 'status_value', 'tunnels': ['tunnels_value1', 'tunnels_value2']} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.insert(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, extended_operation.ExtendedOperation) + assert response.client_operation_id == 'client_operation_id_value' + assert response.creation_timestamp == 'creation_timestamp_value' + assert response.description == 'description_value' + assert response.end_time == 'end_time_value' + assert response.http_error_message == 'http_error_message_value' + assert response.http_error_status_code == 2374 + assert response.id == 205 + assert response.insert_time == 'insert_time_value' + assert response.kind == 'kind_value' + assert response.name == 'name_value' + assert response.operation_group_id == 'operation_group_id_value' + assert response.operation_type == 'operation_type_value' + assert response.progress == 885 + assert response.region == 'region_value' + assert response.self_link == 'self_link_value' + assert response.start_time == 'start_time_value' + assert response.status == compute.Operation.Status.DONE + assert response.status_message == 'status_message_value' + assert response.target_id == 947 + assert response.target_link == 'target_link_value' + assert response.user == 'user_value' + assert response.zone == 'zone_value' + + +def test_insert_rest_required_fields(request_type=compute.InsertTargetVpnGatewayRequest): + transport_class = transports.TargetVpnGatewaysRestTransport + + request_init = {} + request_init["project"] = "" + request_init["region"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).insert._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + jsonified_request["region"] = 'region_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).insert._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "region" in jsonified_request + assert jsonified_request["region"] == 'region_value' + + client = TargetVpnGatewaysClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.insert(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_insert_rest_unset_required_fields(): + transport = transports.TargetVpnGatewaysRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.insert._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("project", "region", "targetVpnGatewayResource", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_insert_rest_interceptors(null_interceptor): + transport = transports.TargetVpnGatewaysRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.TargetVpnGatewaysRestInterceptor(), + ) + client = TargetVpnGatewaysClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.TargetVpnGatewaysRestInterceptor, "post_insert") as post, \ + mock.patch.object(transports.TargetVpnGatewaysRestInterceptor, "pre_insert") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.InsertTargetVpnGatewayRequest.pb(compute.InsertTargetVpnGatewayRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.InsertTargetVpnGatewayRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.insert(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_insert_rest_bad_request(transport: str = 'rest', request_type=compute.InsertTargetVpnGatewayRequest): + client = TargetVpnGatewaysClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2'} + request_init["target_vpn_gateway_resource"] = {'creation_timestamp': 'creation_timestamp_value', 'description': 'description_value', 'forwarding_rules': ['forwarding_rules_value1', 'forwarding_rules_value2'], 'id': 205, 'kind': 'kind_value', 'label_fingerprint': 'label_fingerprint_value', 'labels': {}, 'name': 'name_value', 'network': 'network_value', 'region': 'region_value', 'self_link': 'self_link_value', 'status': 'status_value', 'tunnels': ['tunnels_value1', 'tunnels_value2']} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.insert(request) + + +def test_insert_rest_flattened(): + client = TargetVpnGatewaysClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'region': 'sample2'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + region='region_value', + target_vpn_gateway_resource=compute.TargetVpnGateway(creation_timestamp='creation_timestamp_value'), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.insert(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/regions/{region}/targetVpnGateways" % client.transport._host, args[1]) + + +def test_insert_rest_flattened_error(transport: str = 'rest'): + client = TargetVpnGatewaysClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.insert( + compute.InsertTargetVpnGatewayRequest(), + project='project_value', + region='region_value', + target_vpn_gateway_resource=compute.TargetVpnGateway(creation_timestamp='creation_timestamp_value'), + ) + + +def test_insert_rest_error(): + client = TargetVpnGatewaysClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.InsertTargetVpnGatewayRequest, + dict, +]) +def test_insert_unary_rest(request_type): + client = TargetVpnGatewaysClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2'} + request_init["target_vpn_gateway_resource"] = {'creation_timestamp': 'creation_timestamp_value', 'description': 'description_value', 'forwarding_rules': ['forwarding_rules_value1', 'forwarding_rules_value2'], 'id': 205, 'kind': 'kind_value', 'label_fingerprint': 'label_fingerprint_value', 'labels': {}, 'name': 'name_value', 'network': 'network_value', 'region': 'region_value', 'self_link': 'self_link_value', 'status': 'status_value', 'tunnels': ['tunnels_value1', 'tunnels_value2']} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.insert_unary(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.Operation) + + +def test_insert_unary_rest_required_fields(request_type=compute.InsertTargetVpnGatewayRequest): + transport_class = transports.TargetVpnGatewaysRestTransport + + request_init = {} + request_init["project"] = "" + request_init["region"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).insert._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + jsonified_request["region"] = 'region_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).insert._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "region" in jsonified_request + assert jsonified_request["region"] == 'region_value' + + client = TargetVpnGatewaysClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.insert_unary(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_insert_unary_rest_unset_required_fields(): + transport = transports.TargetVpnGatewaysRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.insert._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("project", "region", "targetVpnGatewayResource", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_insert_unary_rest_interceptors(null_interceptor): + transport = transports.TargetVpnGatewaysRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.TargetVpnGatewaysRestInterceptor(), + ) + client = TargetVpnGatewaysClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.TargetVpnGatewaysRestInterceptor, "post_insert") as post, \ + mock.patch.object(transports.TargetVpnGatewaysRestInterceptor, "pre_insert") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.InsertTargetVpnGatewayRequest.pb(compute.InsertTargetVpnGatewayRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.InsertTargetVpnGatewayRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.insert_unary(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_insert_unary_rest_bad_request(transport: str = 'rest', request_type=compute.InsertTargetVpnGatewayRequest): + client = TargetVpnGatewaysClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2'} + request_init["target_vpn_gateway_resource"] = {'creation_timestamp': 'creation_timestamp_value', 'description': 'description_value', 'forwarding_rules': ['forwarding_rules_value1', 'forwarding_rules_value2'], 'id': 205, 'kind': 'kind_value', 'label_fingerprint': 'label_fingerprint_value', 'labels': {}, 'name': 'name_value', 'network': 'network_value', 'region': 'region_value', 'self_link': 'self_link_value', 'status': 'status_value', 'tunnels': ['tunnels_value1', 'tunnels_value2']} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.insert_unary(request) + + +def test_insert_unary_rest_flattened(): + client = TargetVpnGatewaysClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'region': 'sample2'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + region='region_value', + target_vpn_gateway_resource=compute.TargetVpnGateway(creation_timestamp='creation_timestamp_value'), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.insert_unary(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/regions/{region}/targetVpnGateways" % client.transport._host, args[1]) + + +def test_insert_unary_rest_flattened_error(transport: str = 'rest'): + client = TargetVpnGatewaysClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.insert_unary( + compute.InsertTargetVpnGatewayRequest(), + project='project_value', + region='region_value', + target_vpn_gateway_resource=compute.TargetVpnGateway(creation_timestamp='creation_timestamp_value'), + ) + + +def test_insert_unary_rest_error(): + client = TargetVpnGatewaysClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.ListTargetVpnGatewaysRequest, + dict, +]) +def test_list_rest(request_type): + client = TargetVpnGatewaysClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.TargetVpnGatewayList( + id='id_value', + kind='kind_value', + next_page_token='next_page_token_value', + self_link='self_link_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.TargetVpnGatewayList.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.list(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListPager) + assert response.id == 'id_value' + assert response.kind == 'kind_value' + assert response.next_page_token == 'next_page_token_value' + assert response.self_link == 'self_link_value' + + +def test_list_rest_required_fields(request_type=compute.ListTargetVpnGatewaysRequest): + transport_class = transports.TargetVpnGatewaysRestTransport + + request_init = {} + request_init["project"] = "" + request_init["region"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + jsonified_request["region"] = 'region_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("filter", "max_results", "order_by", "page_token", "return_partial_success", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "region" in jsonified_request + assert jsonified_request["region"] == 'region_value' + + client = TargetVpnGatewaysClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.TargetVpnGatewayList() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "get", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.TargetVpnGatewayList.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.list(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_list_rest_unset_required_fields(): + transport = transports.TargetVpnGatewaysRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.list._get_unset_required_fields({}) + assert set(unset_fields) == (set(("filter", "maxResults", "orderBy", "pageToken", "returnPartialSuccess", )) & set(("project", "region", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_rest_interceptors(null_interceptor): + transport = transports.TargetVpnGatewaysRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.TargetVpnGatewaysRestInterceptor(), + ) + client = TargetVpnGatewaysClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.TargetVpnGatewaysRestInterceptor, "post_list") as post, \ + mock.patch.object(transports.TargetVpnGatewaysRestInterceptor, "pre_list") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.ListTargetVpnGatewaysRequest.pb(compute.ListTargetVpnGatewaysRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.TargetVpnGatewayList.to_json(compute.TargetVpnGatewayList()) + + request = compute.ListTargetVpnGatewaysRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.TargetVpnGatewayList() + + client.list(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_list_rest_bad_request(transport: str = 'rest', request_type=compute.ListTargetVpnGatewaysRequest): + client = TargetVpnGatewaysClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list(request) + + +def test_list_rest_flattened(): + client = TargetVpnGatewaysClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.TargetVpnGatewayList() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'region': 'sample2'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + region='region_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.TargetVpnGatewayList.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.list(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/regions/{region}/targetVpnGateways" % client.transport._host, args[1]) + + +def test_list_rest_flattened_error(transport: str = 'rest'): + client = TargetVpnGatewaysClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list( + compute.ListTargetVpnGatewaysRequest(), + project='project_value', + region='region_value', + ) + + +def test_list_rest_pager(transport: str = 'rest'): + client = TargetVpnGatewaysClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + #with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + compute.TargetVpnGatewayList( + items=[ + compute.TargetVpnGateway(), + compute.TargetVpnGateway(), + compute.TargetVpnGateway(), + ], + next_page_token='abc', + ), + compute.TargetVpnGatewayList( + items=[], + next_page_token='def', + ), + compute.TargetVpnGatewayList( + items=[ + compute.TargetVpnGateway(), + ], + next_page_token='ghi', + ), + compute.TargetVpnGatewayList( + items=[ + compute.TargetVpnGateway(), + compute.TargetVpnGateway(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple(compute.TargetVpnGatewayList.to_json(x) for x in response) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode('UTF-8') + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {'project': 'sample1', 'region': 'sample2'} + + pager = client.list(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, compute.TargetVpnGateway) + for i in results) + + pages = list(client.list(request=sample_request).pages) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize("request_type", [ + compute.SetLabelsTargetVpnGatewayRequest, + dict, +]) +def test_set_labels_rest(request_type): + client = TargetVpnGatewaysClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2', 'resource': 'sample3'} + request_init["region_set_labels_request_resource"] = {'label_fingerprint': 'label_fingerprint_value', 'labels': {}} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.set_labels(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, extended_operation.ExtendedOperation) + assert response.client_operation_id == 'client_operation_id_value' + assert response.creation_timestamp == 'creation_timestamp_value' + assert response.description == 'description_value' + assert response.end_time == 'end_time_value' + assert response.http_error_message == 'http_error_message_value' + assert response.http_error_status_code == 2374 + assert response.id == 205 + assert response.insert_time == 'insert_time_value' + assert response.kind == 'kind_value' + assert response.name == 'name_value' + assert response.operation_group_id == 'operation_group_id_value' + assert response.operation_type == 'operation_type_value' + assert response.progress == 885 + assert response.region == 'region_value' + assert response.self_link == 'self_link_value' + assert response.start_time == 'start_time_value' + assert response.status == compute.Operation.Status.DONE + assert response.status_message == 'status_message_value' + assert response.target_id == 947 + assert response.target_link == 'target_link_value' + assert response.user == 'user_value' + assert response.zone == 'zone_value' + + +def test_set_labels_rest_required_fields(request_type=compute.SetLabelsTargetVpnGatewayRequest): + transport_class = transports.TargetVpnGatewaysRestTransport + + request_init = {} + request_init["project"] = "" + request_init["region"] = "" + request_init["resource"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).set_labels._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + jsonified_request["region"] = 'region_value' + jsonified_request["resource"] = 'resource_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).set_labels._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "region" in jsonified_request + assert jsonified_request["region"] == 'region_value' + assert "resource" in jsonified_request + assert jsonified_request["resource"] == 'resource_value' + + client = TargetVpnGatewaysClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.set_labels(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_set_labels_rest_unset_required_fields(): + transport = transports.TargetVpnGatewaysRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.set_labels._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("project", "region", "regionSetLabelsRequestResource", "resource", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_set_labels_rest_interceptors(null_interceptor): + transport = transports.TargetVpnGatewaysRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.TargetVpnGatewaysRestInterceptor(), + ) + client = TargetVpnGatewaysClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.TargetVpnGatewaysRestInterceptor, "post_set_labels") as post, \ + mock.patch.object(transports.TargetVpnGatewaysRestInterceptor, "pre_set_labels") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.SetLabelsTargetVpnGatewayRequest.pb(compute.SetLabelsTargetVpnGatewayRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.SetLabelsTargetVpnGatewayRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.set_labels(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_set_labels_rest_bad_request(transport: str = 'rest', request_type=compute.SetLabelsTargetVpnGatewayRequest): + client = TargetVpnGatewaysClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2', 'resource': 'sample3'} + request_init["region_set_labels_request_resource"] = {'label_fingerprint': 'label_fingerprint_value', 'labels': {}} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.set_labels(request) + + +def test_set_labels_rest_flattened(): + client = TargetVpnGatewaysClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'region': 'sample2', 'resource': 'sample3'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + region='region_value', + resource='resource_value', + region_set_labels_request_resource=compute.RegionSetLabelsRequest(label_fingerprint='label_fingerprint_value'), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.set_labels(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/regions/{region}/targetVpnGateways/{resource}/setLabels" % client.transport._host, args[1]) + + +def test_set_labels_rest_flattened_error(transport: str = 'rest'): + client = TargetVpnGatewaysClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.set_labels( + compute.SetLabelsTargetVpnGatewayRequest(), + project='project_value', + region='region_value', + resource='resource_value', + region_set_labels_request_resource=compute.RegionSetLabelsRequest(label_fingerprint='label_fingerprint_value'), + ) + + +def test_set_labels_rest_error(): + client = TargetVpnGatewaysClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.SetLabelsTargetVpnGatewayRequest, + dict, +]) +def test_set_labels_unary_rest(request_type): + client = TargetVpnGatewaysClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2', 'resource': 'sample3'} + request_init["region_set_labels_request_resource"] = {'label_fingerprint': 'label_fingerprint_value', 'labels': {}} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.set_labels_unary(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.Operation) + + +def test_set_labels_unary_rest_required_fields(request_type=compute.SetLabelsTargetVpnGatewayRequest): + transport_class = transports.TargetVpnGatewaysRestTransport + + request_init = {} + request_init["project"] = "" + request_init["region"] = "" + request_init["resource"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).set_labels._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + jsonified_request["region"] = 'region_value' + jsonified_request["resource"] = 'resource_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).set_labels._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "region" in jsonified_request + assert jsonified_request["region"] == 'region_value' + assert "resource" in jsonified_request + assert jsonified_request["resource"] == 'resource_value' + + client = TargetVpnGatewaysClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.set_labels_unary(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_set_labels_unary_rest_unset_required_fields(): + transport = transports.TargetVpnGatewaysRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.set_labels._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("project", "region", "regionSetLabelsRequestResource", "resource", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_set_labels_unary_rest_interceptors(null_interceptor): + transport = transports.TargetVpnGatewaysRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.TargetVpnGatewaysRestInterceptor(), + ) + client = TargetVpnGatewaysClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.TargetVpnGatewaysRestInterceptor, "post_set_labels") as post, \ + mock.patch.object(transports.TargetVpnGatewaysRestInterceptor, "pre_set_labels") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.SetLabelsTargetVpnGatewayRequest.pb(compute.SetLabelsTargetVpnGatewayRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.SetLabelsTargetVpnGatewayRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.set_labels_unary(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_set_labels_unary_rest_bad_request(transport: str = 'rest', request_type=compute.SetLabelsTargetVpnGatewayRequest): + client = TargetVpnGatewaysClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2', 'resource': 'sample3'} + request_init["region_set_labels_request_resource"] = {'label_fingerprint': 'label_fingerprint_value', 'labels': {}} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.set_labels_unary(request) + + +def test_set_labels_unary_rest_flattened(): + client = TargetVpnGatewaysClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'region': 'sample2', 'resource': 'sample3'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + region='region_value', + resource='resource_value', + region_set_labels_request_resource=compute.RegionSetLabelsRequest(label_fingerprint='label_fingerprint_value'), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.set_labels_unary(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/regions/{region}/targetVpnGateways/{resource}/setLabels" % client.transport._host, args[1]) + + +def test_set_labels_unary_rest_flattened_error(transport: str = 'rest'): + client = TargetVpnGatewaysClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.set_labels_unary( + compute.SetLabelsTargetVpnGatewayRequest(), + project='project_value', + region='region_value', + resource='resource_value', + region_set_labels_request_resource=compute.RegionSetLabelsRequest(label_fingerprint='label_fingerprint_value'), + ) + + +def test_set_labels_unary_rest_error(): + client = TargetVpnGatewaysClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +def test_credentials_transport_error(): + # It is an error to provide credentials and a transport instance. + transport = transports.TargetVpnGatewaysRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = TargetVpnGatewaysClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # It is an error to provide a credentials file and a transport instance. + transport = transports.TargetVpnGatewaysRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = TargetVpnGatewaysClient( + client_options={"credentials_file": "credentials.json"}, + transport=transport, + ) + + # It is an error to provide an api_key and a transport instance. + transport = transports.TargetVpnGatewaysRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = TargetVpnGatewaysClient( + client_options=options, + transport=transport, + ) + + # It is an error to provide an api_key and a credential. + options = mock.Mock() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = TargetVpnGatewaysClient( + client_options=options, + credentials=ga_credentials.AnonymousCredentials() + ) + + # It is an error to provide scopes and a transport instance. + transport = transports.TargetVpnGatewaysRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = TargetVpnGatewaysClient( + client_options={"scopes": ["1", "2"]}, + transport=transport, + ) + + +def test_transport_instance(): + # A client may be instantiated with a custom transport instance. + transport = transports.TargetVpnGatewaysRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + client = TargetVpnGatewaysClient(transport=transport) + assert client.transport is transport + + +@pytest.mark.parametrize("transport_class", [ + transports.TargetVpnGatewaysRestTransport, +]) +def test_transport_adc(transport_class): + # Test default credentials are used if not provided. + with mock.patch.object(google.auth, 'default') as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class() + adc.assert_called_once() + +@pytest.mark.parametrize("transport_name", [ + "rest", +]) +def test_transport_kind(transport_name): + transport = TargetVpnGatewaysClient.get_transport_class(transport_name)( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert transport.kind == transport_name + + +def test_target_vpn_gateways_base_transport_error(): + # Passing both a credentials object and credentials_file should raise an error + with pytest.raises(core_exceptions.DuplicateCredentialArgs): + transport = transports.TargetVpnGatewaysTransport( + credentials=ga_credentials.AnonymousCredentials(), + credentials_file="credentials.json" + ) + + +def test_target_vpn_gateways_base_transport(): + # Instantiate the base transport. + with mock.patch('google.cloud.compute_v1.services.target_vpn_gateways.transports.TargetVpnGatewaysTransport.__init__') as Transport: + Transport.return_value = None + transport = transports.TargetVpnGatewaysTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Every method on the transport should just blindly + # raise NotImplementedError. + methods = ( + 'aggregated_list', + 'delete', + 'get', + 'insert', + 'list', + 'set_labels', + ) + for method in methods: + with pytest.raises(NotImplementedError): + getattr(transport, method)(request=object()) + + with pytest.raises(NotImplementedError): + transport.close() + + # Catch all for all remaining methods and properties + remainder = [ + 'kind', + ] + for r in remainder: + with pytest.raises(NotImplementedError): + getattr(transport, r)() + + +def test_target_vpn_gateways_base_transport_with_credentials_file(): + # Instantiate the base transport with a credentials file + with mock.patch.object(google.auth, 'load_credentials_from_file', autospec=True) as load_creds, mock.patch('google.cloud.compute_v1.services.target_vpn_gateways.transports.TargetVpnGatewaysTransport._prep_wrapped_messages') as Transport: + Transport.return_value = None + load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.TargetVpnGatewaysTransport( + credentials_file="credentials.json", + quota_project_id="octopus", + ) + load_creds.assert_called_once_with("credentials.json", + scopes=None, + default_scopes=( + 'https://www.googleapis.com/auth/compute', + 'https://www.googleapis.com/auth/cloud-platform', +), + quota_project_id="octopus", + ) + + +def test_target_vpn_gateways_base_transport_with_adc(): + # Test the default credentials are used if credentials and credentials_file are None. + with mock.patch.object(google.auth, 'default', autospec=True) as adc, mock.patch('google.cloud.compute_v1.services.target_vpn_gateways.transports.TargetVpnGatewaysTransport._prep_wrapped_messages') as Transport: + Transport.return_value = None + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.TargetVpnGatewaysTransport() + adc.assert_called_once() + + +def test_target_vpn_gateways_auth_adc(): + # If no credentials are provided, we should use ADC credentials. + with mock.patch.object(google.auth, 'default', autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + TargetVpnGatewaysClient() + adc.assert_called_once_with( + scopes=None, + default_scopes=( + 'https://www.googleapis.com/auth/compute', + 'https://www.googleapis.com/auth/cloud-platform', +), + quota_project_id=None, + ) + + +def test_target_vpn_gateways_http_transport_client_cert_source_for_mtls(): + cred = ga_credentials.AnonymousCredentials() + with mock.patch("google.auth.transport.requests.AuthorizedSession.configure_mtls_channel") as mock_configure_mtls_channel: + transports.TargetVpnGatewaysRestTransport ( + credentials=cred, + client_cert_source_for_mtls=client_cert_source_callback + ) + mock_configure_mtls_channel.assert_called_once_with(client_cert_source_callback) + + +@pytest.mark.parametrize("transport_name", [ + "rest", +]) +def test_target_vpn_gateways_host_no_port(transport_name): + client = TargetVpnGatewaysClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions(api_endpoint='compute.googleapis.com'), + transport=transport_name, + ) + assert client.transport._host == ( + 'compute.googleapis.com:443' + if transport_name in ['grpc', 'grpc_asyncio'] + else 'https://compute.googleapis.com' + ) + +@pytest.mark.parametrize("transport_name", [ + "rest", +]) +def test_target_vpn_gateways_host_with_port(transport_name): + client = TargetVpnGatewaysClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions(api_endpoint='compute.googleapis.com:8000'), + transport=transport_name, + ) + assert client.transport._host == ( + 'compute.googleapis.com:8000' + if transport_name in ['grpc', 'grpc_asyncio'] + else 'https://compute.googleapis.com:8000' + ) + +@pytest.mark.parametrize("transport_name", [ + "rest", +]) +def test_target_vpn_gateways_client_transport_session_collision(transport_name): + creds1 = ga_credentials.AnonymousCredentials() + creds2 = ga_credentials.AnonymousCredentials() + client1 = TargetVpnGatewaysClient( + credentials=creds1, + transport=transport_name, + ) + client2 = TargetVpnGatewaysClient( + credentials=creds2, + transport=transport_name, + ) + session1 = client1.transport.aggregated_list._session + session2 = client2.transport.aggregated_list._session + assert session1 != session2 + session1 = client1.transport.delete._session + session2 = client2.transport.delete._session + assert session1 != session2 + session1 = client1.transport.get._session + session2 = client2.transport.get._session + assert session1 != session2 + session1 = client1.transport.insert._session + session2 = client2.transport.insert._session + assert session1 != session2 + session1 = client1.transport.list._session + session2 = client2.transport.list._session + assert session1 != session2 + session1 = client1.transport.set_labels._session + session2 = client2.transport.set_labels._session + assert session1 != session2 + +def test_common_billing_account_path(): + billing_account = "squid" + expected = "billingAccounts/{billing_account}".format(billing_account=billing_account, ) + actual = TargetVpnGatewaysClient.common_billing_account_path(billing_account) + assert expected == actual + + +def test_parse_common_billing_account_path(): + expected = { + "billing_account": "clam", + } + path = TargetVpnGatewaysClient.common_billing_account_path(**expected) + + # Check that the path construction is reversible. + actual = TargetVpnGatewaysClient.parse_common_billing_account_path(path) + assert expected == actual + +def test_common_folder_path(): + folder = "whelk" + expected = "folders/{folder}".format(folder=folder, ) + actual = TargetVpnGatewaysClient.common_folder_path(folder) + assert expected == actual + + +def test_parse_common_folder_path(): + expected = { + "folder": "octopus", + } + path = TargetVpnGatewaysClient.common_folder_path(**expected) + + # Check that the path construction is reversible. + actual = TargetVpnGatewaysClient.parse_common_folder_path(path) + assert expected == actual + +def test_common_organization_path(): + organization = "oyster" + expected = "organizations/{organization}".format(organization=organization, ) + actual = TargetVpnGatewaysClient.common_organization_path(organization) + assert expected == actual + + +def test_parse_common_organization_path(): + expected = { + "organization": "nudibranch", + } + path = TargetVpnGatewaysClient.common_organization_path(**expected) + + # Check that the path construction is reversible. + actual = TargetVpnGatewaysClient.parse_common_organization_path(path) + assert expected == actual + +def test_common_project_path(): + project = "cuttlefish" + expected = "projects/{project}".format(project=project, ) + actual = TargetVpnGatewaysClient.common_project_path(project) + assert expected == actual + + +def test_parse_common_project_path(): + expected = { + "project": "mussel", + } + path = TargetVpnGatewaysClient.common_project_path(**expected) + + # Check that the path construction is reversible. + actual = TargetVpnGatewaysClient.parse_common_project_path(path) + assert expected == actual + +def test_common_location_path(): + project = "winkle" + location = "nautilus" + expected = "projects/{project}/locations/{location}".format(project=project, location=location, ) + actual = TargetVpnGatewaysClient.common_location_path(project, location) + assert expected == actual + + +def test_parse_common_location_path(): + expected = { + "project": "scallop", + "location": "abalone", + } + path = TargetVpnGatewaysClient.common_location_path(**expected) + + # Check that the path construction is reversible. + actual = TargetVpnGatewaysClient.parse_common_location_path(path) + assert expected == actual + + +def test_client_with_default_client_info(): + client_info = gapic_v1.client_info.ClientInfo() + + with mock.patch.object(transports.TargetVpnGatewaysTransport, '_prep_wrapped_messages') as prep: + client = TargetVpnGatewaysClient( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + with mock.patch.object(transports.TargetVpnGatewaysTransport, '_prep_wrapped_messages') as prep: + transport_class = TargetVpnGatewaysClient.get_transport_class() + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + +def test_transport_close(): + transports = { + "rest": "_session", + } + + for transport, close_name in transports.items(): + client = TargetVpnGatewaysClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport + ) + with mock.patch.object(type(getattr(client.transport, close_name)), "close") as close: + with client: + close.assert_not_called() + close.assert_called_once() + +def test_client_ctx(): + transports = [ + 'rest', + ] + for transport in transports: + client = TargetVpnGatewaysClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport + ) + # Test client calls underlying transport. + with mock.patch.object(type(client.transport), "close") as close: + close.assert_not_called() + with client: + pass + close.assert_called() + +@pytest.mark.parametrize("client_class,transport_class", [ + (TargetVpnGatewaysClient, transports.TargetVpnGatewaysRestTransport), +]) +def test_api_key_credentials(client_class, transport_class): + with mock.patch.object( + google.auth._default, "get_api_key_credentials", create=True + ) as get_api_key_credentials: + mock_cred = mock.Mock() + get_api_key_credentials.return_value = mock_cred + options = client_options.ClientOptions() + options.api_key = "api_key" + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=mock_cred, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) diff --git a/owl-bot-staging/v1/tests/unit/gapic/compute_v1/test_url_maps.py b/owl-bot-staging/v1/tests/unit/gapic/compute_v1/test_url_maps.py new file mode 100644 index 000000000..f9917bc55 --- /dev/null +++ b/owl-bot-staging/v1/tests/unit/gapic/compute_v1/test_url_maps.py @@ -0,0 +1,4699 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import os +# try/except added for compatibility with python < 3.8 +try: + from unittest import mock + from unittest.mock import AsyncMock # pragma: NO COVER +except ImportError: # pragma: NO COVER + import mock + +import grpc +from grpc.experimental import aio +from collections.abc import Iterable +from google.protobuf import json_format +import json +import math +import pytest +from proto.marshal.rules.dates import DurationRule, TimestampRule +from proto.marshal.rules import wrappers +from requests import Response +from requests import Request, PreparedRequest +from requests.sessions import Session +from google.protobuf import json_format + +from google.api_core import client_options +from google.api_core import exceptions as core_exceptions +from google.api_core import extended_operation # type: ignore +from google.api_core import future +from google.api_core import gapic_v1 +from google.api_core import grpc_helpers +from google.api_core import grpc_helpers_async +from google.api_core import path_template +from google.auth import credentials as ga_credentials +from google.auth.exceptions import MutualTLSChannelError +from google.cloud.compute_v1.services.url_maps import UrlMapsClient +from google.cloud.compute_v1.services.url_maps import pagers +from google.cloud.compute_v1.services.url_maps import transports +from google.cloud.compute_v1.types import compute +from google.oauth2 import service_account +import google.auth + + +def client_cert_source_callback(): + return b"cert bytes", b"key bytes" + + +# If default endpoint is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint(client): + return "foo.googleapis.com" if ("localhost" in client.DEFAULT_ENDPOINT) else client.DEFAULT_ENDPOINT + + +def test__get_default_mtls_endpoint(): + api_endpoint = "example.googleapis.com" + api_mtls_endpoint = "example.mtls.googleapis.com" + sandbox_endpoint = "example.sandbox.googleapis.com" + sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com" + non_googleapi = "api.example.com" + + assert UrlMapsClient._get_default_mtls_endpoint(None) is None + assert UrlMapsClient._get_default_mtls_endpoint(api_endpoint) == api_mtls_endpoint + assert UrlMapsClient._get_default_mtls_endpoint(api_mtls_endpoint) == api_mtls_endpoint + assert UrlMapsClient._get_default_mtls_endpoint(sandbox_endpoint) == sandbox_mtls_endpoint + assert UrlMapsClient._get_default_mtls_endpoint(sandbox_mtls_endpoint) == sandbox_mtls_endpoint + assert UrlMapsClient._get_default_mtls_endpoint(non_googleapi) == non_googleapi + + +@pytest.mark.parametrize("client_class,transport_name", [ + (UrlMapsClient, "rest"), +]) +def test_url_maps_client_from_service_account_info(client_class, transport_name): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object(service_account.Credentials, 'from_service_account_info') as factory: + factory.return_value = creds + info = {"valid": True} + client = client_class.from_service_account_info(info, transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + 'compute.googleapis.com:443' + if transport_name in ['grpc', 'grpc_asyncio'] + else + 'https://compute.googleapis.com' + ) + + +@pytest.mark.parametrize("transport_class,transport_name", [ + (transports.UrlMapsRestTransport, "rest"), +]) +def test_url_maps_client_service_account_always_use_jwt(transport_class, transport_name): + with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=True) + use_jwt.assert_called_once_with(True) + + with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=False) + use_jwt.assert_not_called() + + +@pytest.mark.parametrize("client_class,transport_name", [ + (UrlMapsClient, "rest"), +]) +def test_url_maps_client_from_service_account_file(client_class, transport_name): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object(service_account.Credentials, 'from_service_account_file') as factory: + factory.return_value = creds + client = client_class.from_service_account_file("dummy/file/path.json", transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + client = client_class.from_service_account_json("dummy/file/path.json", transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + 'compute.googleapis.com:443' + if transport_name in ['grpc', 'grpc_asyncio'] + else + 'https://compute.googleapis.com' + ) + + +def test_url_maps_client_get_transport_class(): + transport = UrlMapsClient.get_transport_class() + available_transports = [ + transports.UrlMapsRestTransport, + ] + assert transport in available_transports + + transport = UrlMapsClient.get_transport_class("rest") + assert transport == transports.UrlMapsRestTransport + + +@pytest.mark.parametrize("client_class,transport_class,transport_name", [ + (UrlMapsClient, transports.UrlMapsRestTransport, "rest"), +]) +@mock.patch.object(UrlMapsClient, "DEFAULT_ENDPOINT", modify_default_endpoint(UrlMapsClient)) +def test_url_maps_client_client_options(client_class, transport_class, transport_name): + # Check that if channel is provided we won't create a new one. + with mock.patch.object(UrlMapsClient, 'get_transport_class') as gtc: + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ) + client = client_class(transport=transport) + gtc.assert_not_called() + + # Check that if channel is provided via str we will create a new one. + with mock.patch.object(UrlMapsClient, 'get_transport_class') as gtc: + client = client_class(transport=transport_name) + gtc.assert_called() + + # Check the case api_endpoint is provided. + options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(transport=transport_name, client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_MTLS_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError): + client = client_class(transport=transport_name) + + # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): + with pytest.raises(ValueError): + client = client_class(transport=transport_name) + + # Check the case quota_project_id is provided + options = client_options.ClientOptions(quota_project_id="octopus") + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id="octopus", + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + # Check the case api_endpoint is provided + options = client_options.ClientOptions(api_audience="https://language.googleapis.com") + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience="https://language.googleapis.com" + ) + +@pytest.mark.parametrize("client_class,transport_class,transport_name,use_client_cert_env", [ + (UrlMapsClient, transports.UrlMapsRestTransport, "rest", "true"), + (UrlMapsClient, transports.UrlMapsRestTransport, "rest", "false"), +]) +@mock.patch.object(UrlMapsClient, "DEFAULT_ENDPOINT", modify_default_endpoint(UrlMapsClient)) +@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) +def test_url_maps_client_mtls_env_auto(client_class, transport_class, transport_name, use_client_cert_env): + # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default + # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists. + + # Check the case client_cert_source is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): + options = client_options.ClientOptions(client_cert_source=client_cert_source_callback) + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + + if use_client_cert_env == "false": + expected_client_cert_source = None + expected_host = client.DEFAULT_ENDPOINT + else: + expected_client_cert_source = client_cert_source_callback + expected_host = client.DEFAULT_MTLS_ENDPOINT + + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case ADC client cert is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): + with mock.patch.object(transport_class, '__init__') as patched: + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): + with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=client_cert_source_callback): + if use_client_cert_env == "false": + expected_host = client.DEFAULT_ENDPOINT + expected_client_cert_source = None + else: + expected_host = client.DEFAULT_MTLS_ENDPOINT + expected_client_cert_source = client_cert_source_callback + + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case client_cert_source and ADC client cert are not provided. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): + with mock.patch.object(transport_class, '__init__') as patched: + with mock.patch("google.auth.transport.mtls.has_default_client_cert_source", return_value=False): + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize("client_class", [ + UrlMapsClient +]) +@mock.patch.object(UrlMapsClient, "DEFAULT_ENDPOINT", modify_default_endpoint(UrlMapsClient)) +def test_url_maps_client_get_mtls_endpoint_and_cert_source(client_class): + mock_client_cert_source = mock.Mock() + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "true". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + mock_api_endpoint = "foo" + options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(options) + assert api_endpoint == mock_api_endpoint + assert cert_source == mock_client_cert_source + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "false". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + mock_client_cert_source = mock.Mock() + mock_api_endpoint = "foo" + options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(options) + assert api_endpoint == mock_api_endpoint + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert doesn't exist. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=False): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert exists. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): + with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=mock_client_cert_source): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source == mock_client_cert_source + + +@pytest.mark.parametrize("client_class,transport_class,transport_name", [ + (UrlMapsClient, transports.UrlMapsRestTransport, "rest"), +]) +def test_url_maps_client_client_options_scopes(client_class, transport_class, transport_name): + # Check the case scopes are provided. + options = client_options.ClientOptions( + scopes=["1", "2"], + ) + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=["1", "2"], + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + +@pytest.mark.parametrize("client_class,transport_class,transport_name,grpc_helpers", [ + (UrlMapsClient, transports.UrlMapsRestTransport, "rest", None), +]) +def test_url_maps_client_client_options_credentials_file(client_class, transport_class, transport_name, grpc_helpers): + # Check the case credentials file is provided. + options = client_options.ClientOptions( + credentials_file="credentials.json" + ) + + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize("request_type", [ + compute.AggregatedListUrlMapsRequest, + dict, +]) +def test_aggregated_list_rest(request_type): + client = UrlMapsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.UrlMapsAggregatedList( + id='id_value', + kind='kind_value', + next_page_token='next_page_token_value', + self_link='self_link_value', + unreachables=['unreachables_value'], + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.UrlMapsAggregatedList.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.aggregated_list(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.AggregatedListPager) + assert response.id == 'id_value' + assert response.kind == 'kind_value' + assert response.next_page_token == 'next_page_token_value' + assert response.self_link == 'self_link_value' + assert response.unreachables == ['unreachables_value'] + + +def test_aggregated_list_rest_required_fields(request_type=compute.AggregatedListUrlMapsRequest): + transport_class = transports.UrlMapsRestTransport + + request_init = {} + request_init["project"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).aggregated_list._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).aggregated_list._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("filter", "include_all_scopes", "max_results", "order_by", "page_token", "return_partial_success", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + + client = UrlMapsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.UrlMapsAggregatedList() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "get", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.UrlMapsAggregatedList.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.aggregated_list(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_aggregated_list_rest_unset_required_fields(): + transport = transports.UrlMapsRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.aggregated_list._get_unset_required_fields({}) + assert set(unset_fields) == (set(("filter", "includeAllScopes", "maxResults", "orderBy", "pageToken", "returnPartialSuccess", )) & set(("project", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_aggregated_list_rest_interceptors(null_interceptor): + transport = transports.UrlMapsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.UrlMapsRestInterceptor(), + ) + client = UrlMapsClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.UrlMapsRestInterceptor, "post_aggregated_list") as post, \ + mock.patch.object(transports.UrlMapsRestInterceptor, "pre_aggregated_list") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.AggregatedListUrlMapsRequest.pb(compute.AggregatedListUrlMapsRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.UrlMapsAggregatedList.to_json(compute.UrlMapsAggregatedList()) + + request = compute.AggregatedListUrlMapsRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.UrlMapsAggregatedList() + + client.aggregated_list(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_aggregated_list_rest_bad_request(transport: str = 'rest', request_type=compute.AggregatedListUrlMapsRequest): + client = UrlMapsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.aggregated_list(request) + + +def test_aggregated_list_rest_flattened(): + client = UrlMapsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.UrlMapsAggregatedList() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.UrlMapsAggregatedList.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.aggregated_list(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/aggregated/urlMaps" % client.transport._host, args[1]) + + +def test_aggregated_list_rest_flattened_error(transport: str = 'rest'): + client = UrlMapsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.aggregated_list( + compute.AggregatedListUrlMapsRequest(), + project='project_value', + ) + + +def test_aggregated_list_rest_pager(transport: str = 'rest'): + client = UrlMapsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + #with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + compute.UrlMapsAggregatedList( + items={ + 'a':compute.UrlMapsScopedList(), + 'b':compute.UrlMapsScopedList(), + 'c':compute.UrlMapsScopedList(), + }, + next_page_token='abc', + ), + compute.UrlMapsAggregatedList( + items={}, + next_page_token='def', + ), + compute.UrlMapsAggregatedList( + items={ + 'g':compute.UrlMapsScopedList(), + }, + next_page_token='ghi', + ), + compute.UrlMapsAggregatedList( + items={ + 'h':compute.UrlMapsScopedList(), + 'i':compute.UrlMapsScopedList(), + }, + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple(compute.UrlMapsAggregatedList.to_json(x) for x in response) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode('UTF-8') + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {'project': 'sample1'} + + pager = client.aggregated_list(request=sample_request) + + assert isinstance(pager.get('a'), compute.UrlMapsScopedList) + assert pager.get('h') is None + + results = list(pager) + assert len(results) == 6 + assert all( + isinstance(i, tuple) + for i in results) + for result in results: + assert isinstance(result, tuple) + assert tuple(type(t) for t in result) == (str, compute.UrlMapsScopedList) + + assert pager.get('a') is None + assert isinstance(pager.get('h'), compute.UrlMapsScopedList) + + pages = list(client.aggregated_list(request=sample_request).pages) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize("request_type", [ + compute.DeleteUrlMapRequest, + dict, +]) +def test_delete_rest(request_type): + client = UrlMapsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'url_map': 'sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.delete(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, extended_operation.ExtendedOperation) + assert response.client_operation_id == 'client_operation_id_value' + assert response.creation_timestamp == 'creation_timestamp_value' + assert response.description == 'description_value' + assert response.end_time == 'end_time_value' + assert response.http_error_message == 'http_error_message_value' + assert response.http_error_status_code == 2374 + assert response.id == 205 + assert response.insert_time == 'insert_time_value' + assert response.kind == 'kind_value' + assert response.name == 'name_value' + assert response.operation_group_id == 'operation_group_id_value' + assert response.operation_type == 'operation_type_value' + assert response.progress == 885 + assert response.region == 'region_value' + assert response.self_link == 'self_link_value' + assert response.start_time == 'start_time_value' + assert response.status == compute.Operation.Status.DONE + assert response.status_message == 'status_message_value' + assert response.target_id == 947 + assert response.target_link == 'target_link_value' + assert response.user == 'user_value' + assert response.zone == 'zone_value' + + +def test_delete_rest_required_fields(request_type=compute.DeleteUrlMapRequest): + transport_class = transports.UrlMapsRestTransport + + request_init = {} + request_init["project"] = "" + request_init["url_map"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + jsonified_request["urlMap"] = 'url_map_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "urlMap" in jsonified_request + assert jsonified_request["urlMap"] == 'url_map_value' + + client = UrlMapsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "delete", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.delete(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_delete_rest_unset_required_fields(): + transport = transports.UrlMapsRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.delete._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("project", "urlMap", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_rest_interceptors(null_interceptor): + transport = transports.UrlMapsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.UrlMapsRestInterceptor(), + ) + client = UrlMapsClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.UrlMapsRestInterceptor, "post_delete") as post, \ + mock.patch.object(transports.UrlMapsRestInterceptor, "pre_delete") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.DeleteUrlMapRequest.pb(compute.DeleteUrlMapRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.DeleteUrlMapRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.delete(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_delete_rest_bad_request(transport: str = 'rest', request_type=compute.DeleteUrlMapRequest): + client = UrlMapsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'url_map': 'sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.delete(request) + + +def test_delete_rest_flattened(): + client = UrlMapsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'url_map': 'sample2'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + url_map='url_map_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.delete(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/global/urlMaps/{url_map}" % client.transport._host, args[1]) + + +def test_delete_rest_flattened_error(transport: str = 'rest'): + client = UrlMapsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete( + compute.DeleteUrlMapRequest(), + project='project_value', + url_map='url_map_value', + ) + + +def test_delete_rest_error(): + client = UrlMapsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.DeleteUrlMapRequest, + dict, +]) +def test_delete_unary_rest(request_type): + client = UrlMapsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'url_map': 'sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.delete_unary(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.Operation) + + +def test_delete_unary_rest_required_fields(request_type=compute.DeleteUrlMapRequest): + transport_class = transports.UrlMapsRestTransport + + request_init = {} + request_init["project"] = "" + request_init["url_map"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + jsonified_request["urlMap"] = 'url_map_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "urlMap" in jsonified_request + assert jsonified_request["urlMap"] == 'url_map_value' + + client = UrlMapsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "delete", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.delete_unary(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_delete_unary_rest_unset_required_fields(): + transport = transports.UrlMapsRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.delete._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("project", "urlMap", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_unary_rest_interceptors(null_interceptor): + transport = transports.UrlMapsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.UrlMapsRestInterceptor(), + ) + client = UrlMapsClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.UrlMapsRestInterceptor, "post_delete") as post, \ + mock.patch.object(transports.UrlMapsRestInterceptor, "pre_delete") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.DeleteUrlMapRequest.pb(compute.DeleteUrlMapRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.DeleteUrlMapRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.delete_unary(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_delete_unary_rest_bad_request(transport: str = 'rest', request_type=compute.DeleteUrlMapRequest): + client = UrlMapsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'url_map': 'sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.delete_unary(request) + + +def test_delete_unary_rest_flattened(): + client = UrlMapsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'url_map': 'sample2'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + url_map='url_map_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.delete_unary(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/global/urlMaps/{url_map}" % client.transport._host, args[1]) + + +def test_delete_unary_rest_flattened_error(transport: str = 'rest'): + client = UrlMapsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_unary( + compute.DeleteUrlMapRequest(), + project='project_value', + url_map='url_map_value', + ) + + +def test_delete_unary_rest_error(): + client = UrlMapsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.GetUrlMapRequest, + dict, +]) +def test_get_rest(request_type): + client = UrlMapsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'url_map': 'sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.UrlMap( + creation_timestamp='creation_timestamp_value', + default_service='default_service_value', + description='description_value', + fingerprint='fingerprint_value', + id=205, + kind='kind_value', + name='name_value', + region='region_value', + self_link='self_link_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.UrlMap.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.get(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.UrlMap) + assert response.creation_timestamp == 'creation_timestamp_value' + assert response.default_service == 'default_service_value' + assert response.description == 'description_value' + assert response.fingerprint == 'fingerprint_value' + assert response.id == 205 + assert response.kind == 'kind_value' + assert response.name == 'name_value' + assert response.region == 'region_value' + assert response.self_link == 'self_link_value' + + +def test_get_rest_required_fields(request_type=compute.GetUrlMapRequest): + transport_class = transports.UrlMapsRestTransport + + request_init = {} + request_init["project"] = "" + request_init["url_map"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + jsonified_request["urlMap"] = 'url_map_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "urlMap" in jsonified_request + assert jsonified_request["urlMap"] == 'url_map_value' + + client = UrlMapsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.UrlMap() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "get", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.UrlMap.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.get(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_get_rest_unset_required_fields(): + transport = transports.UrlMapsRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.get._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("project", "urlMap", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_rest_interceptors(null_interceptor): + transport = transports.UrlMapsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.UrlMapsRestInterceptor(), + ) + client = UrlMapsClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.UrlMapsRestInterceptor, "post_get") as post, \ + mock.patch.object(transports.UrlMapsRestInterceptor, "pre_get") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.GetUrlMapRequest.pb(compute.GetUrlMapRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.UrlMap.to_json(compute.UrlMap()) + + request = compute.GetUrlMapRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.UrlMap() + + client.get(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_get_rest_bad_request(transport: str = 'rest', request_type=compute.GetUrlMapRequest): + client = UrlMapsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'url_map': 'sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get(request) + + +def test_get_rest_flattened(): + client = UrlMapsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.UrlMap() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'url_map': 'sample2'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + url_map='url_map_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.UrlMap.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.get(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/global/urlMaps/{url_map}" % client.transport._host, args[1]) + + +def test_get_rest_flattened_error(transport: str = 'rest'): + client = UrlMapsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get( + compute.GetUrlMapRequest(), + project='project_value', + url_map='url_map_value', + ) + + +def test_get_rest_error(): + client = UrlMapsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.InsertUrlMapRequest, + dict, +]) +def test_insert_rest(request_type): + client = UrlMapsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1'} + request_init["url_map_resource"] = {'creation_timestamp': 'creation_timestamp_value', 'default_route_action': {'cors_policy': {'allow_credentials': True, 'allow_headers': ['allow_headers_value1', 'allow_headers_value2'], 'allow_methods': ['allow_methods_value1', 'allow_methods_value2'], 'allow_origin_regexes': ['allow_origin_regexes_value1', 'allow_origin_regexes_value2'], 'allow_origins': ['allow_origins_value1', 'allow_origins_value2'], 'disabled': True, 'expose_headers': ['expose_headers_value1', 'expose_headers_value2'], 'max_age': 722}, 'fault_injection_policy': {'abort': {'http_status': 1219, 'percentage': 0.10540000000000001}, 'delay': {'fixed_delay': {'nanos': 543, 'seconds': 751}, 'percentage': 0.10540000000000001}}, 'max_stream_duration': {}, 'request_mirror_policy': {'backend_service': 'backend_service_value'}, 'retry_policy': {'num_retries': 1197, 'per_try_timeout': {}, 'retry_conditions': ['retry_conditions_value1', 'retry_conditions_value2']}, 'timeout': {}, 'url_rewrite': {'host_rewrite': 'host_rewrite_value', 'path_prefix_rewrite': 'path_prefix_rewrite_value', 'path_template_rewrite': 'path_template_rewrite_value'}, 'weighted_backend_services': [{'backend_service': 'backend_service_value', 'header_action': {'request_headers_to_add': [{'header_name': 'header_name_value', 'header_value': 'header_value_value', 'replace': True}], 'request_headers_to_remove': ['request_headers_to_remove_value1', 'request_headers_to_remove_value2'], 'response_headers_to_add': {}, 'response_headers_to_remove': ['response_headers_to_remove_value1', 'response_headers_to_remove_value2']}, 'weight': 648}]}, 'default_service': 'default_service_value', 'default_url_redirect': {'host_redirect': 'host_redirect_value', 'https_redirect': True, 'path_redirect': 'path_redirect_value', 'prefix_redirect': 'prefix_redirect_value', 'redirect_response_code': 'redirect_response_code_value', 'strip_query': True}, 'description': 'description_value', 'fingerprint': 'fingerprint_value', 'header_action': {}, 'host_rules': [{'description': 'description_value', 'hosts': ['hosts_value1', 'hosts_value2'], 'path_matcher': 'path_matcher_value'}], 'id': 205, 'kind': 'kind_value', 'name': 'name_value', 'path_matchers': [{'default_route_action': {}, 'default_service': 'default_service_value', 'default_url_redirect': {}, 'description': 'description_value', 'header_action': {}, 'name': 'name_value', 'path_rules': [{'paths': ['paths_value1', 'paths_value2'], 'route_action': {}, 'service': 'service_value', 'url_redirect': {}}], 'route_rules': [{'description': 'description_value', 'header_action': {}, 'match_rules': [{'full_path_match': 'full_path_match_value', 'header_matches': [{'exact_match': 'exact_match_value', 'header_name': 'header_name_value', 'invert_match': True, 'prefix_match': 'prefix_match_value', 'present_match': True, 'range_match': {'range_end': 931, 'range_start': 1178}, 'regex_match': 'regex_match_value', 'suffix_match': 'suffix_match_value'}], 'ignore_case': True, 'metadata_filters': [{'filter_labels': [{'name': 'name_value', 'value': 'value_value'}], 'filter_match_criteria': 'filter_match_criteria_value'}], 'path_template_match': 'path_template_match_value', 'prefix_match': 'prefix_match_value', 'query_parameter_matches': [{'exact_match': 'exact_match_value', 'name': 'name_value', 'present_match': True, 'regex_match': 'regex_match_value'}], 'regex_match': 'regex_match_value'}], 'priority': 898, 'route_action': {}, 'service': 'service_value', 'url_redirect': {}}]}], 'region': 'region_value', 'self_link': 'self_link_value', 'tests': [{'description': 'description_value', 'expected_output_url': 'expected_output_url_value', 'expected_redirect_response_code': 3275, 'headers': [{'name': 'name_value', 'value': 'value_value'}], 'host': 'host_value', 'path': 'path_value', 'service': 'service_value'}]} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.insert(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, extended_operation.ExtendedOperation) + assert response.client_operation_id == 'client_operation_id_value' + assert response.creation_timestamp == 'creation_timestamp_value' + assert response.description == 'description_value' + assert response.end_time == 'end_time_value' + assert response.http_error_message == 'http_error_message_value' + assert response.http_error_status_code == 2374 + assert response.id == 205 + assert response.insert_time == 'insert_time_value' + assert response.kind == 'kind_value' + assert response.name == 'name_value' + assert response.operation_group_id == 'operation_group_id_value' + assert response.operation_type == 'operation_type_value' + assert response.progress == 885 + assert response.region == 'region_value' + assert response.self_link == 'self_link_value' + assert response.start_time == 'start_time_value' + assert response.status == compute.Operation.Status.DONE + assert response.status_message == 'status_message_value' + assert response.target_id == 947 + assert response.target_link == 'target_link_value' + assert response.user == 'user_value' + assert response.zone == 'zone_value' + + +def test_insert_rest_required_fields(request_type=compute.InsertUrlMapRequest): + transport_class = transports.UrlMapsRestTransport + + request_init = {} + request_init["project"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).insert._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).insert._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + + client = UrlMapsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.insert(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_insert_rest_unset_required_fields(): + transport = transports.UrlMapsRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.insert._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("project", "urlMapResource", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_insert_rest_interceptors(null_interceptor): + transport = transports.UrlMapsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.UrlMapsRestInterceptor(), + ) + client = UrlMapsClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.UrlMapsRestInterceptor, "post_insert") as post, \ + mock.patch.object(transports.UrlMapsRestInterceptor, "pre_insert") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.InsertUrlMapRequest.pb(compute.InsertUrlMapRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.InsertUrlMapRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.insert(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_insert_rest_bad_request(transport: str = 'rest', request_type=compute.InsertUrlMapRequest): + client = UrlMapsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1'} + request_init["url_map_resource"] = {'creation_timestamp': 'creation_timestamp_value', 'default_route_action': {'cors_policy': {'allow_credentials': True, 'allow_headers': ['allow_headers_value1', 'allow_headers_value2'], 'allow_methods': ['allow_methods_value1', 'allow_methods_value2'], 'allow_origin_regexes': ['allow_origin_regexes_value1', 'allow_origin_regexes_value2'], 'allow_origins': ['allow_origins_value1', 'allow_origins_value2'], 'disabled': True, 'expose_headers': ['expose_headers_value1', 'expose_headers_value2'], 'max_age': 722}, 'fault_injection_policy': {'abort': {'http_status': 1219, 'percentage': 0.10540000000000001}, 'delay': {'fixed_delay': {'nanos': 543, 'seconds': 751}, 'percentage': 0.10540000000000001}}, 'max_stream_duration': {}, 'request_mirror_policy': {'backend_service': 'backend_service_value'}, 'retry_policy': {'num_retries': 1197, 'per_try_timeout': {}, 'retry_conditions': ['retry_conditions_value1', 'retry_conditions_value2']}, 'timeout': {}, 'url_rewrite': {'host_rewrite': 'host_rewrite_value', 'path_prefix_rewrite': 'path_prefix_rewrite_value', 'path_template_rewrite': 'path_template_rewrite_value'}, 'weighted_backend_services': [{'backend_service': 'backend_service_value', 'header_action': {'request_headers_to_add': [{'header_name': 'header_name_value', 'header_value': 'header_value_value', 'replace': True}], 'request_headers_to_remove': ['request_headers_to_remove_value1', 'request_headers_to_remove_value2'], 'response_headers_to_add': {}, 'response_headers_to_remove': ['response_headers_to_remove_value1', 'response_headers_to_remove_value2']}, 'weight': 648}]}, 'default_service': 'default_service_value', 'default_url_redirect': {'host_redirect': 'host_redirect_value', 'https_redirect': True, 'path_redirect': 'path_redirect_value', 'prefix_redirect': 'prefix_redirect_value', 'redirect_response_code': 'redirect_response_code_value', 'strip_query': True}, 'description': 'description_value', 'fingerprint': 'fingerprint_value', 'header_action': {}, 'host_rules': [{'description': 'description_value', 'hosts': ['hosts_value1', 'hosts_value2'], 'path_matcher': 'path_matcher_value'}], 'id': 205, 'kind': 'kind_value', 'name': 'name_value', 'path_matchers': [{'default_route_action': {}, 'default_service': 'default_service_value', 'default_url_redirect': {}, 'description': 'description_value', 'header_action': {}, 'name': 'name_value', 'path_rules': [{'paths': ['paths_value1', 'paths_value2'], 'route_action': {}, 'service': 'service_value', 'url_redirect': {}}], 'route_rules': [{'description': 'description_value', 'header_action': {}, 'match_rules': [{'full_path_match': 'full_path_match_value', 'header_matches': [{'exact_match': 'exact_match_value', 'header_name': 'header_name_value', 'invert_match': True, 'prefix_match': 'prefix_match_value', 'present_match': True, 'range_match': {'range_end': 931, 'range_start': 1178}, 'regex_match': 'regex_match_value', 'suffix_match': 'suffix_match_value'}], 'ignore_case': True, 'metadata_filters': [{'filter_labels': [{'name': 'name_value', 'value': 'value_value'}], 'filter_match_criteria': 'filter_match_criteria_value'}], 'path_template_match': 'path_template_match_value', 'prefix_match': 'prefix_match_value', 'query_parameter_matches': [{'exact_match': 'exact_match_value', 'name': 'name_value', 'present_match': True, 'regex_match': 'regex_match_value'}], 'regex_match': 'regex_match_value'}], 'priority': 898, 'route_action': {}, 'service': 'service_value', 'url_redirect': {}}]}], 'region': 'region_value', 'self_link': 'self_link_value', 'tests': [{'description': 'description_value', 'expected_output_url': 'expected_output_url_value', 'expected_redirect_response_code': 3275, 'headers': [{'name': 'name_value', 'value': 'value_value'}], 'host': 'host_value', 'path': 'path_value', 'service': 'service_value'}]} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.insert(request) + + +def test_insert_rest_flattened(): + client = UrlMapsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + url_map_resource=compute.UrlMap(creation_timestamp='creation_timestamp_value'), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.insert(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/global/urlMaps" % client.transport._host, args[1]) + + +def test_insert_rest_flattened_error(transport: str = 'rest'): + client = UrlMapsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.insert( + compute.InsertUrlMapRequest(), + project='project_value', + url_map_resource=compute.UrlMap(creation_timestamp='creation_timestamp_value'), + ) + + +def test_insert_rest_error(): + client = UrlMapsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.InsertUrlMapRequest, + dict, +]) +def test_insert_unary_rest(request_type): + client = UrlMapsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1'} + request_init["url_map_resource"] = {'creation_timestamp': 'creation_timestamp_value', 'default_route_action': {'cors_policy': {'allow_credentials': True, 'allow_headers': ['allow_headers_value1', 'allow_headers_value2'], 'allow_methods': ['allow_methods_value1', 'allow_methods_value2'], 'allow_origin_regexes': ['allow_origin_regexes_value1', 'allow_origin_regexes_value2'], 'allow_origins': ['allow_origins_value1', 'allow_origins_value2'], 'disabled': True, 'expose_headers': ['expose_headers_value1', 'expose_headers_value2'], 'max_age': 722}, 'fault_injection_policy': {'abort': {'http_status': 1219, 'percentage': 0.10540000000000001}, 'delay': {'fixed_delay': {'nanos': 543, 'seconds': 751}, 'percentage': 0.10540000000000001}}, 'max_stream_duration': {}, 'request_mirror_policy': {'backend_service': 'backend_service_value'}, 'retry_policy': {'num_retries': 1197, 'per_try_timeout': {}, 'retry_conditions': ['retry_conditions_value1', 'retry_conditions_value2']}, 'timeout': {}, 'url_rewrite': {'host_rewrite': 'host_rewrite_value', 'path_prefix_rewrite': 'path_prefix_rewrite_value', 'path_template_rewrite': 'path_template_rewrite_value'}, 'weighted_backend_services': [{'backend_service': 'backend_service_value', 'header_action': {'request_headers_to_add': [{'header_name': 'header_name_value', 'header_value': 'header_value_value', 'replace': True}], 'request_headers_to_remove': ['request_headers_to_remove_value1', 'request_headers_to_remove_value2'], 'response_headers_to_add': {}, 'response_headers_to_remove': ['response_headers_to_remove_value1', 'response_headers_to_remove_value2']}, 'weight': 648}]}, 'default_service': 'default_service_value', 'default_url_redirect': {'host_redirect': 'host_redirect_value', 'https_redirect': True, 'path_redirect': 'path_redirect_value', 'prefix_redirect': 'prefix_redirect_value', 'redirect_response_code': 'redirect_response_code_value', 'strip_query': True}, 'description': 'description_value', 'fingerprint': 'fingerprint_value', 'header_action': {}, 'host_rules': [{'description': 'description_value', 'hosts': ['hosts_value1', 'hosts_value2'], 'path_matcher': 'path_matcher_value'}], 'id': 205, 'kind': 'kind_value', 'name': 'name_value', 'path_matchers': [{'default_route_action': {}, 'default_service': 'default_service_value', 'default_url_redirect': {}, 'description': 'description_value', 'header_action': {}, 'name': 'name_value', 'path_rules': [{'paths': ['paths_value1', 'paths_value2'], 'route_action': {}, 'service': 'service_value', 'url_redirect': {}}], 'route_rules': [{'description': 'description_value', 'header_action': {}, 'match_rules': [{'full_path_match': 'full_path_match_value', 'header_matches': [{'exact_match': 'exact_match_value', 'header_name': 'header_name_value', 'invert_match': True, 'prefix_match': 'prefix_match_value', 'present_match': True, 'range_match': {'range_end': 931, 'range_start': 1178}, 'regex_match': 'regex_match_value', 'suffix_match': 'suffix_match_value'}], 'ignore_case': True, 'metadata_filters': [{'filter_labels': [{'name': 'name_value', 'value': 'value_value'}], 'filter_match_criteria': 'filter_match_criteria_value'}], 'path_template_match': 'path_template_match_value', 'prefix_match': 'prefix_match_value', 'query_parameter_matches': [{'exact_match': 'exact_match_value', 'name': 'name_value', 'present_match': True, 'regex_match': 'regex_match_value'}], 'regex_match': 'regex_match_value'}], 'priority': 898, 'route_action': {}, 'service': 'service_value', 'url_redirect': {}}]}], 'region': 'region_value', 'self_link': 'self_link_value', 'tests': [{'description': 'description_value', 'expected_output_url': 'expected_output_url_value', 'expected_redirect_response_code': 3275, 'headers': [{'name': 'name_value', 'value': 'value_value'}], 'host': 'host_value', 'path': 'path_value', 'service': 'service_value'}]} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.insert_unary(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.Operation) + + +def test_insert_unary_rest_required_fields(request_type=compute.InsertUrlMapRequest): + transport_class = transports.UrlMapsRestTransport + + request_init = {} + request_init["project"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).insert._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).insert._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + + client = UrlMapsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.insert_unary(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_insert_unary_rest_unset_required_fields(): + transport = transports.UrlMapsRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.insert._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("project", "urlMapResource", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_insert_unary_rest_interceptors(null_interceptor): + transport = transports.UrlMapsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.UrlMapsRestInterceptor(), + ) + client = UrlMapsClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.UrlMapsRestInterceptor, "post_insert") as post, \ + mock.patch.object(transports.UrlMapsRestInterceptor, "pre_insert") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.InsertUrlMapRequest.pb(compute.InsertUrlMapRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.InsertUrlMapRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.insert_unary(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_insert_unary_rest_bad_request(transport: str = 'rest', request_type=compute.InsertUrlMapRequest): + client = UrlMapsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1'} + request_init["url_map_resource"] = {'creation_timestamp': 'creation_timestamp_value', 'default_route_action': {'cors_policy': {'allow_credentials': True, 'allow_headers': ['allow_headers_value1', 'allow_headers_value2'], 'allow_methods': ['allow_methods_value1', 'allow_methods_value2'], 'allow_origin_regexes': ['allow_origin_regexes_value1', 'allow_origin_regexes_value2'], 'allow_origins': ['allow_origins_value1', 'allow_origins_value2'], 'disabled': True, 'expose_headers': ['expose_headers_value1', 'expose_headers_value2'], 'max_age': 722}, 'fault_injection_policy': {'abort': {'http_status': 1219, 'percentage': 0.10540000000000001}, 'delay': {'fixed_delay': {'nanos': 543, 'seconds': 751}, 'percentage': 0.10540000000000001}}, 'max_stream_duration': {}, 'request_mirror_policy': {'backend_service': 'backend_service_value'}, 'retry_policy': {'num_retries': 1197, 'per_try_timeout': {}, 'retry_conditions': ['retry_conditions_value1', 'retry_conditions_value2']}, 'timeout': {}, 'url_rewrite': {'host_rewrite': 'host_rewrite_value', 'path_prefix_rewrite': 'path_prefix_rewrite_value', 'path_template_rewrite': 'path_template_rewrite_value'}, 'weighted_backend_services': [{'backend_service': 'backend_service_value', 'header_action': {'request_headers_to_add': [{'header_name': 'header_name_value', 'header_value': 'header_value_value', 'replace': True}], 'request_headers_to_remove': ['request_headers_to_remove_value1', 'request_headers_to_remove_value2'], 'response_headers_to_add': {}, 'response_headers_to_remove': ['response_headers_to_remove_value1', 'response_headers_to_remove_value2']}, 'weight': 648}]}, 'default_service': 'default_service_value', 'default_url_redirect': {'host_redirect': 'host_redirect_value', 'https_redirect': True, 'path_redirect': 'path_redirect_value', 'prefix_redirect': 'prefix_redirect_value', 'redirect_response_code': 'redirect_response_code_value', 'strip_query': True}, 'description': 'description_value', 'fingerprint': 'fingerprint_value', 'header_action': {}, 'host_rules': [{'description': 'description_value', 'hosts': ['hosts_value1', 'hosts_value2'], 'path_matcher': 'path_matcher_value'}], 'id': 205, 'kind': 'kind_value', 'name': 'name_value', 'path_matchers': [{'default_route_action': {}, 'default_service': 'default_service_value', 'default_url_redirect': {}, 'description': 'description_value', 'header_action': {}, 'name': 'name_value', 'path_rules': [{'paths': ['paths_value1', 'paths_value2'], 'route_action': {}, 'service': 'service_value', 'url_redirect': {}}], 'route_rules': [{'description': 'description_value', 'header_action': {}, 'match_rules': [{'full_path_match': 'full_path_match_value', 'header_matches': [{'exact_match': 'exact_match_value', 'header_name': 'header_name_value', 'invert_match': True, 'prefix_match': 'prefix_match_value', 'present_match': True, 'range_match': {'range_end': 931, 'range_start': 1178}, 'regex_match': 'regex_match_value', 'suffix_match': 'suffix_match_value'}], 'ignore_case': True, 'metadata_filters': [{'filter_labels': [{'name': 'name_value', 'value': 'value_value'}], 'filter_match_criteria': 'filter_match_criteria_value'}], 'path_template_match': 'path_template_match_value', 'prefix_match': 'prefix_match_value', 'query_parameter_matches': [{'exact_match': 'exact_match_value', 'name': 'name_value', 'present_match': True, 'regex_match': 'regex_match_value'}], 'regex_match': 'regex_match_value'}], 'priority': 898, 'route_action': {}, 'service': 'service_value', 'url_redirect': {}}]}], 'region': 'region_value', 'self_link': 'self_link_value', 'tests': [{'description': 'description_value', 'expected_output_url': 'expected_output_url_value', 'expected_redirect_response_code': 3275, 'headers': [{'name': 'name_value', 'value': 'value_value'}], 'host': 'host_value', 'path': 'path_value', 'service': 'service_value'}]} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.insert_unary(request) + + +def test_insert_unary_rest_flattened(): + client = UrlMapsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + url_map_resource=compute.UrlMap(creation_timestamp='creation_timestamp_value'), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.insert_unary(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/global/urlMaps" % client.transport._host, args[1]) + + +def test_insert_unary_rest_flattened_error(transport: str = 'rest'): + client = UrlMapsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.insert_unary( + compute.InsertUrlMapRequest(), + project='project_value', + url_map_resource=compute.UrlMap(creation_timestamp='creation_timestamp_value'), + ) + + +def test_insert_unary_rest_error(): + client = UrlMapsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.InvalidateCacheUrlMapRequest, + dict, +]) +def test_invalidate_cache_rest(request_type): + client = UrlMapsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'url_map': 'sample2'} + request_init["cache_invalidation_rule_resource"] = {'host': 'host_value', 'path': 'path_value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.invalidate_cache(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, extended_operation.ExtendedOperation) + assert response.client_operation_id == 'client_operation_id_value' + assert response.creation_timestamp == 'creation_timestamp_value' + assert response.description == 'description_value' + assert response.end_time == 'end_time_value' + assert response.http_error_message == 'http_error_message_value' + assert response.http_error_status_code == 2374 + assert response.id == 205 + assert response.insert_time == 'insert_time_value' + assert response.kind == 'kind_value' + assert response.name == 'name_value' + assert response.operation_group_id == 'operation_group_id_value' + assert response.operation_type == 'operation_type_value' + assert response.progress == 885 + assert response.region == 'region_value' + assert response.self_link == 'self_link_value' + assert response.start_time == 'start_time_value' + assert response.status == compute.Operation.Status.DONE + assert response.status_message == 'status_message_value' + assert response.target_id == 947 + assert response.target_link == 'target_link_value' + assert response.user == 'user_value' + assert response.zone == 'zone_value' + + +def test_invalidate_cache_rest_required_fields(request_type=compute.InvalidateCacheUrlMapRequest): + transport_class = transports.UrlMapsRestTransport + + request_init = {} + request_init["project"] = "" + request_init["url_map"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).invalidate_cache._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + jsonified_request["urlMap"] = 'url_map_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).invalidate_cache._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "urlMap" in jsonified_request + assert jsonified_request["urlMap"] == 'url_map_value' + + client = UrlMapsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.invalidate_cache(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_invalidate_cache_rest_unset_required_fields(): + transport = transports.UrlMapsRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.invalidate_cache._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("cacheInvalidationRuleResource", "project", "urlMap", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_invalidate_cache_rest_interceptors(null_interceptor): + transport = transports.UrlMapsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.UrlMapsRestInterceptor(), + ) + client = UrlMapsClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.UrlMapsRestInterceptor, "post_invalidate_cache") as post, \ + mock.patch.object(transports.UrlMapsRestInterceptor, "pre_invalidate_cache") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.InvalidateCacheUrlMapRequest.pb(compute.InvalidateCacheUrlMapRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.InvalidateCacheUrlMapRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.invalidate_cache(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_invalidate_cache_rest_bad_request(transport: str = 'rest', request_type=compute.InvalidateCacheUrlMapRequest): + client = UrlMapsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'url_map': 'sample2'} + request_init["cache_invalidation_rule_resource"] = {'host': 'host_value', 'path': 'path_value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.invalidate_cache(request) + + +def test_invalidate_cache_rest_flattened(): + client = UrlMapsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'url_map': 'sample2'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + url_map='url_map_value', + cache_invalidation_rule_resource=compute.CacheInvalidationRule(host='host_value'), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.invalidate_cache(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/global/urlMaps/{url_map}/invalidateCache" % client.transport._host, args[1]) + + +def test_invalidate_cache_rest_flattened_error(transport: str = 'rest'): + client = UrlMapsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.invalidate_cache( + compute.InvalidateCacheUrlMapRequest(), + project='project_value', + url_map='url_map_value', + cache_invalidation_rule_resource=compute.CacheInvalidationRule(host='host_value'), + ) + + +def test_invalidate_cache_rest_error(): + client = UrlMapsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.InvalidateCacheUrlMapRequest, + dict, +]) +def test_invalidate_cache_unary_rest(request_type): + client = UrlMapsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'url_map': 'sample2'} + request_init["cache_invalidation_rule_resource"] = {'host': 'host_value', 'path': 'path_value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.invalidate_cache_unary(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.Operation) + + +def test_invalidate_cache_unary_rest_required_fields(request_type=compute.InvalidateCacheUrlMapRequest): + transport_class = transports.UrlMapsRestTransport + + request_init = {} + request_init["project"] = "" + request_init["url_map"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).invalidate_cache._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + jsonified_request["urlMap"] = 'url_map_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).invalidate_cache._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "urlMap" in jsonified_request + assert jsonified_request["urlMap"] == 'url_map_value' + + client = UrlMapsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.invalidate_cache_unary(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_invalidate_cache_unary_rest_unset_required_fields(): + transport = transports.UrlMapsRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.invalidate_cache._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("cacheInvalidationRuleResource", "project", "urlMap", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_invalidate_cache_unary_rest_interceptors(null_interceptor): + transport = transports.UrlMapsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.UrlMapsRestInterceptor(), + ) + client = UrlMapsClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.UrlMapsRestInterceptor, "post_invalidate_cache") as post, \ + mock.patch.object(transports.UrlMapsRestInterceptor, "pre_invalidate_cache") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.InvalidateCacheUrlMapRequest.pb(compute.InvalidateCacheUrlMapRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.InvalidateCacheUrlMapRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.invalidate_cache_unary(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_invalidate_cache_unary_rest_bad_request(transport: str = 'rest', request_type=compute.InvalidateCacheUrlMapRequest): + client = UrlMapsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'url_map': 'sample2'} + request_init["cache_invalidation_rule_resource"] = {'host': 'host_value', 'path': 'path_value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.invalidate_cache_unary(request) + + +def test_invalidate_cache_unary_rest_flattened(): + client = UrlMapsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'url_map': 'sample2'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + url_map='url_map_value', + cache_invalidation_rule_resource=compute.CacheInvalidationRule(host='host_value'), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.invalidate_cache_unary(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/global/urlMaps/{url_map}/invalidateCache" % client.transport._host, args[1]) + + +def test_invalidate_cache_unary_rest_flattened_error(transport: str = 'rest'): + client = UrlMapsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.invalidate_cache_unary( + compute.InvalidateCacheUrlMapRequest(), + project='project_value', + url_map='url_map_value', + cache_invalidation_rule_resource=compute.CacheInvalidationRule(host='host_value'), + ) + + +def test_invalidate_cache_unary_rest_error(): + client = UrlMapsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.ListUrlMapsRequest, + dict, +]) +def test_list_rest(request_type): + client = UrlMapsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.UrlMapList( + id='id_value', + kind='kind_value', + next_page_token='next_page_token_value', + self_link='self_link_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.UrlMapList.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.list(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListPager) + assert response.id == 'id_value' + assert response.kind == 'kind_value' + assert response.next_page_token == 'next_page_token_value' + assert response.self_link == 'self_link_value' + + +def test_list_rest_required_fields(request_type=compute.ListUrlMapsRequest): + transport_class = transports.UrlMapsRestTransport + + request_init = {} + request_init["project"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("filter", "max_results", "order_by", "page_token", "return_partial_success", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + + client = UrlMapsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.UrlMapList() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "get", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.UrlMapList.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.list(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_list_rest_unset_required_fields(): + transport = transports.UrlMapsRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.list._get_unset_required_fields({}) + assert set(unset_fields) == (set(("filter", "maxResults", "orderBy", "pageToken", "returnPartialSuccess", )) & set(("project", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_rest_interceptors(null_interceptor): + transport = transports.UrlMapsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.UrlMapsRestInterceptor(), + ) + client = UrlMapsClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.UrlMapsRestInterceptor, "post_list") as post, \ + mock.patch.object(transports.UrlMapsRestInterceptor, "pre_list") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.ListUrlMapsRequest.pb(compute.ListUrlMapsRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.UrlMapList.to_json(compute.UrlMapList()) + + request = compute.ListUrlMapsRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.UrlMapList() + + client.list(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_list_rest_bad_request(transport: str = 'rest', request_type=compute.ListUrlMapsRequest): + client = UrlMapsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list(request) + + +def test_list_rest_flattened(): + client = UrlMapsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.UrlMapList() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.UrlMapList.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.list(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/global/urlMaps" % client.transport._host, args[1]) + + +def test_list_rest_flattened_error(transport: str = 'rest'): + client = UrlMapsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list( + compute.ListUrlMapsRequest(), + project='project_value', + ) + + +def test_list_rest_pager(transport: str = 'rest'): + client = UrlMapsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + #with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + compute.UrlMapList( + items=[ + compute.UrlMap(), + compute.UrlMap(), + compute.UrlMap(), + ], + next_page_token='abc', + ), + compute.UrlMapList( + items=[], + next_page_token='def', + ), + compute.UrlMapList( + items=[ + compute.UrlMap(), + ], + next_page_token='ghi', + ), + compute.UrlMapList( + items=[ + compute.UrlMap(), + compute.UrlMap(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple(compute.UrlMapList.to_json(x) for x in response) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode('UTF-8') + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {'project': 'sample1'} + + pager = client.list(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, compute.UrlMap) + for i in results) + + pages = list(client.list(request=sample_request).pages) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize("request_type", [ + compute.PatchUrlMapRequest, + dict, +]) +def test_patch_rest(request_type): + client = UrlMapsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'url_map': 'sample2'} + request_init["url_map_resource"] = {'creation_timestamp': 'creation_timestamp_value', 'default_route_action': {'cors_policy': {'allow_credentials': True, 'allow_headers': ['allow_headers_value1', 'allow_headers_value2'], 'allow_methods': ['allow_methods_value1', 'allow_methods_value2'], 'allow_origin_regexes': ['allow_origin_regexes_value1', 'allow_origin_regexes_value2'], 'allow_origins': ['allow_origins_value1', 'allow_origins_value2'], 'disabled': True, 'expose_headers': ['expose_headers_value1', 'expose_headers_value2'], 'max_age': 722}, 'fault_injection_policy': {'abort': {'http_status': 1219, 'percentage': 0.10540000000000001}, 'delay': {'fixed_delay': {'nanos': 543, 'seconds': 751}, 'percentage': 0.10540000000000001}}, 'max_stream_duration': {}, 'request_mirror_policy': {'backend_service': 'backend_service_value'}, 'retry_policy': {'num_retries': 1197, 'per_try_timeout': {}, 'retry_conditions': ['retry_conditions_value1', 'retry_conditions_value2']}, 'timeout': {}, 'url_rewrite': {'host_rewrite': 'host_rewrite_value', 'path_prefix_rewrite': 'path_prefix_rewrite_value', 'path_template_rewrite': 'path_template_rewrite_value'}, 'weighted_backend_services': [{'backend_service': 'backend_service_value', 'header_action': {'request_headers_to_add': [{'header_name': 'header_name_value', 'header_value': 'header_value_value', 'replace': True}], 'request_headers_to_remove': ['request_headers_to_remove_value1', 'request_headers_to_remove_value2'], 'response_headers_to_add': {}, 'response_headers_to_remove': ['response_headers_to_remove_value1', 'response_headers_to_remove_value2']}, 'weight': 648}]}, 'default_service': 'default_service_value', 'default_url_redirect': {'host_redirect': 'host_redirect_value', 'https_redirect': True, 'path_redirect': 'path_redirect_value', 'prefix_redirect': 'prefix_redirect_value', 'redirect_response_code': 'redirect_response_code_value', 'strip_query': True}, 'description': 'description_value', 'fingerprint': 'fingerprint_value', 'header_action': {}, 'host_rules': [{'description': 'description_value', 'hosts': ['hosts_value1', 'hosts_value2'], 'path_matcher': 'path_matcher_value'}], 'id': 205, 'kind': 'kind_value', 'name': 'name_value', 'path_matchers': [{'default_route_action': {}, 'default_service': 'default_service_value', 'default_url_redirect': {}, 'description': 'description_value', 'header_action': {}, 'name': 'name_value', 'path_rules': [{'paths': ['paths_value1', 'paths_value2'], 'route_action': {}, 'service': 'service_value', 'url_redirect': {}}], 'route_rules': [{'description': 'description_value', 'header_action': {}, 'match_rules': [{'full_path_match': 'full_path_match_value', 'header_matches': [{'exact_match': 'exact_match_value', 'header_name': 'header_name_value', 'invert_match': True, 'prefix_match': 'prefix_match_value', 'present_match': True, 'range_match': {'range_end': 931, 'range_start': 1178}, 'regex_match': 'regex_match_value', 'suffix_match': 'suffix_match_value'}], 'ignore_case': True, 'metadata_filters': [{'filter_labels': [{'name': 'name_value', 'value': 'value_value'}], 'filter_match_criteria': 'filter_match_criteria_value'}], 'path_template_match': 'path_template_match_value', 'prefix_match': 'prefix_match_value', 'query_parameter_matches': [{'exact_match': 'exact_match_value', 'name': 'name_value', 'present_match': True, 'regex_match': 'regex_match_value'}], 'regex_match': 'regex_match_value'}], 'priority': 898, 'route_action': {}, 'service': 'service_value', 'url_redirect': {}}]}], 'region': 'region_value', 'self_link': 'self_link_value', 'tests': [{'description': 'description_value', 'expected_output_url': 'expected_output_url_value', 'expected_redirect_response_code': 3275, 'headers': [{'name': 'name_value', 'value': 'value_value'}], 'host': 'host_value', 'path': 'path_value', 'service': 'service_value'}]} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.patch(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, extended_operation.ExtendedOperation) + assert response.client_operation_id == 'client_operation_id_value' + assert response.creation_timestamp == 'creation_timestamp_value' + assert response.description == 'description_value' + assert response.end_time == 'end_time_value' + assert response.http_error_message == 'http_error_message_value' + assert response.http_error_status_code == 2374 + assert response.id == 205 + assert response.insert_time == 'insert_time_value' + assert response.kind == 'kind_value' + assert response.name == 'name_value' + assert response.operation_group_id == 'operation_group_id_value' + assert response.operation_type == 'operation_type_value' + assert response.progress == 885 + assert response.region == 'region_value' + assert response.self_link == 'self_link_value' + assert response.start_time == 'start_time_value' + assert response.status == compute.Operation.Status.DONE + assert response.status_message == 'status_message_value' + assert response.target_id == 947 + assert response.target_link == 'target_link_value' + assert response.user == 'user_value' + assert response.zone == 'zone_value' + + +def test_patch_rest_required_fields(request_type=compute.PatchUrlMapRequest): + transport_class = transports.UrlMapsRestTransport + + request_init = {} + request_init["project"] = "" + request_init["url_map"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).patch._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + jsonified_request["urlMap"] = 'url_map_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).patch._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "urlMap" in jsonified_request + assert jsonified_request["urlMap"] == 'url_map_value' + + client = UrlMapsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "patch", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.patch(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_patch_rest_unset_required_fields(): + transport = transports.UrlMapsRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.patch._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("project", "urlMap", "urlMapResource", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_patch_rest_interceptors(null_interceptor): + transport = transports.UrlMapsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.UrlMapsRestInterceptor(), + ) + client = UrlMapsClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.UrlMapsRestInterceptor, "post_patch") as post, \ + mock.patch.object(transports.UrlMapsRestInterceptor, "pre_patch") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.PatchUrlMapRequest.pb(compute.PatchUrlMapRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.PatchUrlMapRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.patch(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_patch_rest_bad_request(transport: str = 'rest', request_type=compute.PatchUrlMapRequest): + client = UrlMapsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'url_map': 'sample2'} + request_init["url_map_resource"] = {'creation_timestamp': 'creation_timestamp_value', 'default_route_action': {'cors_policy': {'allow_credentials': True, 'allow_headers': ['allow_headers_value1', 'allow_headers_value2'], 'allow_methods': ['allow_methods_value1', 'allow_methods_value2'], 'allow_origin_regexes': ['allow_origin_regexes_value1', 'allow_origin_regexes_value2'], 'allow_origins': ['allow_origins_value1', 'allow_origins_value2'], 'disabled': True, 'expose_headers': ['expose_headers_value1', 'expose_headers_value2'], 'max_age': 722}, 'fault_injection_policy': {'abort': {'http_status': 1219, 'percentage': 0.10540000000000001}, 'delay': {'fixed_delay': {'nanos': 543, 'seconds': 751}, 'percentage': 0.10540000000000001}}, 'max_stream_duration': {}, 'request_mirror_policy': {'backend_service': 'backend_service_value'}, 'retry_policy': {'num_retries': 1197, 'per_try_timeout': {}, 'retry_conditions': ['retry_conditions_value1', 'retry_conditions_value2']}, 'timeout': {}, 'url_rewrite': {'host_rewrite': 'host_rewrite_value', 'path_prefix_rewrite': 'path_prefix_rewrite_value', 'path_template_rewrite': 'path_template_rewrite_value'}, 'weighted_backend_services': [{'backend_service': 'backend_service_value', 'header_action': {'request_headers_to_add': [{'header_name': 'header_name_value', 'header_value': 'header_value_value', 'replace': True}], 'request_headers_to_remove': ['request_headers_to_remove_value1', 'request_headers_to_remove_value2'], 'response_headers_to_add': {}, 'response_headers_to_remove': ['response_headers_to_remove_value1', 'response_headers_to_remove_value2']}, 'weight': 648}]}, 'default_service': 'default_service_value', 'default_url_redirect': {'host_redirect': 'host_redirect_value', 'https_redirect': True, 'path_redirect': 'path_redirect_value', 'prefix_redirect': 'prefix_redirect_value', 'redirect_response_code': 'redirect_response_code_value', 'strip_query': True}, 'description': 'description_value', 'fingerprint': 'fingerprint_value', 'header_action': {}, 'host_rules': [{'description': 'description_value', 'hosts': ['hosts_value1', 'hosts_value2'], 'path_matcher': 'path_matcher_value'}], 'id': 205, 'kind': 'kind_value', 'name': 'name_value', 'path_matchers': [{'default_route_action': {}, 'default_service': 'default_service_value', 'default_url_redirect': {}, 'description': 'description_value', 'header_action': {}, 'name': 'name_value', 'path_rules': [{'paths': ['paths_value1', 'paths_value2'], 'route_action': {}, 'service': 'service_value', 'url_redirect': {}}], 'route_rules': [{'description': 'description_value', 'header_action': {}, 'match_rules': [{'full_path_match': 'full_path_match_value', 'header_matches': [{'exact_match': 'exact_match_value', 'header_name': 'header_name_value', 'invert_match': True, 'prefix_match': 'prefix_match_value', 'present_match': True, 'range_match': {'range_end': 931, 'range_start': 1178}, 'regex_match': 'regex_match_value', 'suffix_match': 'suffix_match_value'}], 'ignore_case': True, 'metadata_filters': [{'filter_labels': [{'name': 'name_value', 'value': 'value_value'}], 'filter_match_criteria': 'filter_match_criteria_value'}], 'path_template_match': 'path_template_match_value', 'prefix_match': 'prefix_match_value', 'query_parameter_matches': [{'exact_match': 'exact_match_value', 'name': 'name_value', 'present_match': True, 'regex_match': 'regex_match_value'}], 'regex_match': 'regex_match_value'}], 'priority': 898, 'route_action': {}, 'service': 'service_value', 'url_redirect': {}}]}], 'region': 'region_value', 'self_link': 'self_link_value', 'tests': [{'description': 'description_value', 'expected_output_url': 'expected_output_url_value', 'expected_redirect_response_code': 3275, 'headers': [{'name': 'name_value', 'value': 'value_value'}], 'host': 'host_value', 'path': 'path_value', 'service': 'service_value'}]} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.patch(request) + + +def test_patch_rest_flattened(): + client = UrlMapsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'url_map': 'sample2'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + url_map='url_map_value', + url_map_resource=compute.UrlMap(creation_timestamp='creation_timestamp_value'), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.patch(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/global/urlMaps/{url_map}" % client.transport._host, args[1]) + + +def test_patch_rest_flattened_error(transport: str = 'rest'): + client = UrlMapsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.patch( + compute.PatchUrlMapRequest(), + project='project_value', + url_map='url_map_value', + url_map_resource=compute.UrlMap(creation_timestamp='creation_timestamp_value'), + ) + + +def test_patch_rest_error(): + client = UrlMapsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.PatchUrlMapRequest, + dict, +]) +def test_patch_unary_rest(request_type): + client = UrlMapsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'url_map': 'sample2'} + request_init["url_map_resource"] = {'creation_timestamp': 'creation_timestamp_value', 'default_route_action': {'cors_policy': {'allow_credentials': True, 'allow_headers': ['allow_headers_value1', 'allow_headers_value2'], 'allow_methods': ['allow_methods_value1', 'allow_methods_value2'], 'allow_origin_regexes': ['allow_origin_regexes_value1', 'allow_origin_regexes_value2'], 'allow_origins': ['allow_origins_value1', 'allow_origins_value2'], 'disabled': True, 'expose_headers': ['expose_headers_value1', 'expose_headers_value2'], 'max_age': 722}, 'fault_injection_policy': {'abort': {'http_status': 1219, 'percentage': 0.10540000000000001}, 'delay': {'fixed_delay': {'nanos': 543, 'seconds': 751}, 'percentage': 0.10540000000000001}}, 'max_stream_duration': {}, 'request_mirror_policy': {'backend_service': 'backend_service_value'}, 'retry_policy': {'num_retries': 1197, 'per_try_timeout': {}, 'retry_conditions': ['retry_conditions_value1', 'retry_conditions_value2']}, 'timeout': {}, 'url_rewrite': {'host_rewrite': 'host_rewrite_value', 'path_prefix_rewrite': 'path_prefix_rewrite_value', 'path_template_rewrite': 'path_template_rewrite_value'}, 'weighted_backend_services': [{'backend_service': 'backend_service_value', 'header_action': {'request_headers_to_add': [{'header_name': 'header_name_value', 'header_value': 'header_value_value', 'replace': True}], 'request_headers_to_remove': ['request_headers_to_remove_value1', 'request_headers_to_remove_value2'], 'response_headers_to_add': {}, 'response_headers_to_remove': ['response_headers_to_remove_value1', 'response_headers_to_remove_value2']}, 'weight': 648}]}, 'default_service': 'default_service_value', 'default_url_redirect': {'host_redirect': 'host_redirect_value', 'https_redirect': True, 'path_redirect': 'path_redirect_value', 'prefix_redirect': 'prefix_redirect_value', 'redirect_response_code': 'redirect_response_code_value', 'strip_query': True}, 'description': 'description_value', 'fingerprint': 'fingerprint_value', 'header_action': {}, 'host_rules': [{'description': 'description_value', 'hosts': ['hosts_value1', 'hosts_value2'], 'path_matcher': 'path_matcher_value'}], 'id': 205, 'kind': 'kind_value', 'name': 'name_value', 'path_matchers': [{'default_route_action': {}, 'default_service': 'default_service_value', 'default_url_redirect': {}, 'description': 'description_value', 'header_action': {}, 'name': 'name_value', 'path_rules': [{'paths': ['paths_value1', 'paths_value2'], 'route_action': {}, 'service': 'service_value', 'url_redirect': {}}], 'route_rules': [{'description': 'description_value', 'header_action': {}, 'match_rules': [{'full_path_match': 'full_path_match_value', 'header_matches': [{'exact_match': 'exact_match_value', 'header_name': 'header_name_value', 'invert_match': True, 'prefix_match': 'prefix_match_value', 'present_match': True, 'range_match': {'range_end': 931, 'range_start': 1178}, 'regex_match': 'regex_match_value', 'suffix_match': 'suffix_match_value'}], 'ignore_case': True, 'metadata_filters': [{'filter_labels': [{'name': 'name_value', 'value': 'value_value'}], 'filter_match_criteria': 'filter_match_criteria_value'}], 'path_template_match': 'path_template_match_value', 'prefix_match': 'prefix_match_value', 'query_parameter_matches': [{'exact_match': 'exact_match_value', 'name': 'name_value', 'present_match': True, 'regex_match': 'regex_match_value'}], 'regex_match': 'regex_match_value'}], 'priority': 898, 'route_action': {}, 'service': 'service_value', 'url_redirect': {}}]}], 'region': 'region_value', 'self_link': 'self_link_value', 'tests': [{'description': 'description_value', 'expected_output_url': 'expected_output_url_value', 'expected_redirect_response_code': 3275, 'headers': [{'name': 'name_value', 'value': 'value_value'}], 'host': 'host_value', 'path': 'path_value', 'service': 'service_value'}]} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.patch_unary(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.Operation) + + +def test_patch_unary_rest_required_fields(request_type=compute.PatchUrlMapRequest): + transport_class = transports.UrlMapsRestTransport + + request_init = {} + request_init["project"] = "" + request_init["url_map"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).patch._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + jsonified_request["urlMap"] = 'url_map_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).patch._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "urlMap" in jsonified_request + assert jsonified_request["urlMap"] == 'url_map_value' + + client = UrlMapsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "patch", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.patch_unary(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_patch_unary_rest_unset_required_fields(): + transport = transports.UrlMapsRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.patch._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("project", "urlMap", "urlMapResource", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_patch_unary_rest_interceptors(null_interceptor): + transport = transports.UrlMapsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.UrlMapsRestInterceptor(), + ) + client = UrlMapsClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.UrlMapsRestInterceptor, "post_patch") as post, \ + mock.patch.object(transports.UrlMapsRestInterceptor, "pre_patch") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.PatchUrlMapRequest.pb(compute.PatchUrlMapRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.PatchUrlMapRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.patch_unary(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_patch_unary_rest_bad_request(transport: str = 'rest', request_type=compute.PatchUrlMapRequest): + client = UrlMapsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'url_map': 'sample2'} + request_init["url_map_resource"] = {'creation_timestamp': 'creation_timestamp_value', 'default_route_action': {'cors_policy': {'allow_credentials': True, 'allow_headers': ['allow_headers_value1', 'allow_headers_value2'], 'allow_methods': ['allow_methods_value1', 'allow_methods_value2'], 'allow_origin_regexes': ['allow_origin_regexes_value1', 'allow_origin_regexes_value2'], 'allow_origins': ['allow_origins_value1', 'allow_origins_value2'], 'disabled': True, 'expose_headers': ['expose_headers_value1', 'expose_headers_value2'], 'max_age': 722}, 'fault_injection_policy': {'abort': {'http_status': 1219, 'percentage': 0.10540000000000001}, 'delay': {'fixed_delay': {'nanos': 543, 'seconds': 751}, 'percentage': 0.10540000000000001}}, 'max_stream_duration': {}, 'request_mirror_policy': {'backend_service': 'backend_service_value'}, 'retry_policy': {'num_retries': 1197, 'per_try_timeout': {}, 'retry_conditions': ['retry_conditions_value1', 'retry_conditions_value2']}, 'timeout': {}, 'url_rewrite': {'host_rewrite': 'host_rewrite_value', 'path_prefix_rewrite': 'path_prefix_rewrite_value', 'path_template_rewrite': 'path_template_rewrite_value'}, 'weighted_backend_services': [{'backend_service': 'backend_service_value', 'header_action': {'request_headers_to_add': [{'header_name': 'header_name_value', 'header_value': 'header_value_value', 'replace': True}], 'request_headers_to_remove': ['request_headers_to_remove_value1', 'request_headers_to_remove_value2'], 'response_headers_to_add': {}, 'response_headers_to_remove': ['response_headers_to_remove_value1', 'response_headers_to_remove_value2']}, 'weight': 648}]}, 'default_service': 'default_service_value', 'default_url_redirect': {'host_redirect': 'host_redirect_value', 'https_redirect': True, 'path_redirect': 'path_redirect_value', 'prefix_redirect': 'prefix_redirect_value', 'redirect_response_code': 'redirect_response_code_value', 'strip_query': True}, 'description': 'description_value', 'fingerprint': 'fingerprint_value', 'header_action': {}, 'host_rules': [{'description': 'description_value', 'hosts': ['hosts_value1', 'hosts_value2'], 'path_matcher': 'path_matcher_value'}], 'id': 205, 'kind': 'kind_value', 'name': 'name_value', 'path_matchers': [{'default_route_action': {}, 'default_service': 'default_service_value', 'default_url_redirect': {}, 'description': 'description_value', 'header_action': {}, 'name': 'name_value', 'path_rules': [{'paths': ['paths_value1', 'paths_value2'], 'route_action': {}, 'service': 'service_value', 'url_redirect': {}}], 'route_rules': [{'description': 'description_value', 'header_action': {}, 'match_rules': [{'full_path_match': 'full_path_match_value', 'header_matches': [{'exact_match': 'exact_match_value', 'header_name': 'header_name_value', 'invert_match': True, 'prefix_match': 'prefix_match_value', 'present_match': True, 'range_match': {'range_end': 931, 'range_start': 1178}, 'regex_match': 'regex_match_value', 'suffix_match': 'suffix_match_value'}], 'ignore_case': True, 'metadata_filters': [{'filter_labels': [{'name': 'name_value', 'value': 'value_value'}], 'filter_match_criteria': 'filter_match_criteria_value'}], 'path_template_match': 'path_template_match_value', 'prefix_match': 'prefix_match_value', 'query_parameter_matches': [{'exact_match': 'exact_match_value', 'name': 'name_value', 'present_match': True, 'regex_match': 'regex_match_value'}], 'regex_match': 'regex_match_value'}], 'priority': 898, 'route_action': {}, 'service': 'service_value', 'url_redirect': {}}]}], 'region': 'region_value', 'self_link': 'self_link_value', 'tests': [{'description': 'description_value', 'expected_output_url': 'expected_output_url_value', 'expected_redirect_response_code': 3275, 'headers': [{'name': 'name_value', 'value': 'value_value'}], 'host': 'host_value', 'path': 'path_value', 'service': 'service_value'}]} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.patch_unary(request) + + +def test_patch_unary_rest_flattened(): + client = UrlMapsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'url_map': 'sample2'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + url_map='url_map_value', + url_map_resource=compute.UrlMap(creation_timestamp='creation_timestamp_value'), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.patch_unary(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/global/urlMaps/{url_map}" % client.transport._host, args[1]) + + +def test_patch_unary_rest_flattened_error(transport: str = 'rest'): + client = UrlMapsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.patch_unary( + compute.PatchUrlMapRequest(), + project='project_value', + url_map='url_map_value', + url_map_resource=compute.UrlMap(creation_timestamp='creation_timestamp_value'), + ) + + +def test_patch_unary_rest_error(): + client = UrlMapsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.UpdateUrlMapRequest, + dict, +]) +def test_update_rest(request_type): + client = UrlMapsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'url_map': 'sample2'} + request_init["url_map_resource"] = {'creation_timestamp': 'creation_timestamp_value', 'default_route_action': {'cors_policy': {'allow_credentials': True, 'allow_headers': ['allow_headers_value1', 'allow_headers_value2'], 'allow_methods': ['allow_methods_value1', 'allow_methods_value2'], 'allow_origin_regexes': ['allow_origin_regexes_value1', 'allow_origin_regexes_value2'], 'allow_origins': ['allow_origins_value1', 'allow_origins_value2'], 'disabled': True, 'expose_headers': ['expose_headers_value1', 'expose_headers_value2'], 'max_age': 722}, 'fault_injection_policy': {'abort': {'http_status': 1219, 'percentage': 0.10540000000000001}, 'delay': {'fixed_delay': {'nanos': 543, 'seconds': 751}, 'percentage': 0.10540000000000001}}, 'max_stream_duration': {}, 'request_mirror_policy': {'backend_service': 'backend_service_value'}, 'retry_policy': {'num_retries': 1197, 'per_try_timeout': {}, 'retry_conditions': ['retry_conditions_value1', 'retry_conditions_value2']}, 'timeout': {}, 'url_rewrite': {'host_rewrite': 'host_rewrite_value', 'path_prefix_rewrite': 'path_prefix_rewrite_value', 'path_template_rewrite': 'path_template_rewrite_value'}, 'weighted_backend_services': [{'backend_service': 'backend_service_value', 'header_action': {'request_headers_to_add': [{'header_name': 'header_name_value', 'header_value': 'header_value_value', 'replace': True}], 'request_headers_to_remove': ['request_headers_to_remove_value1', 'request_headers_to_remove_value2'], 'response_headers_to_add': {}, 'response_headers_to_remove': ['response_headers_to_remove_value1', 'response_headers_to_remove_value2']}, 'weight': 648}]}, 'default_service': 'default_service_value', 'default_url_redirect': {'host_redirect': 'host_redirect_value', 'https_redirect': True, 'path_redirect': 'path_redirect_value', 'prefix_redirect': 'prefix_redirect_value', 'redirect_response_code': 'redirect_response_code_value', 'strip_query': True}, 'description': 'description_value', 'fingerprint': 'fingerprint_value', 'header_action': {}, 'host_rules': [{'description': 'description_value', 'hosts': ['hosts_value1', 'hosts_value2'], 'path_matcher': 'path_matcher_value'}], 'id': 205, 'kind': 'kind_value', 'name': 'name_value', 'path_matchers': [{'default_route_action': {}, 'default_service': 'default_service_value', 'default_url_redirect': {}, 'description': 'description_value', 'header_action': {}, 'name': 'name_value', 'path_rules': [{'paths': ['paths_value1', 'paths_value2'], 'route_action': {}, 'service': 'service_value', 'url_redirect': {}}], 'route_rules': [{'description': 'description_value', 'header_action': {}, 'match_rules': [{'full_path_match': 'full_path_match_value', 'header_matches': [{'exact_match': 'exact_match_value', 'header_name': 'header_name_value', 'invert_match': True, 'prefix_match': 'prefix_match_value', 'present_match': True, 'range_match': {'range_end': 931, 'range_start': 1178}, 'regex_match': 'regex_match_value', 'suffix_match': 'suffix_match_value'}], 'ignore_case': True, 'metadata_filters': [{'filter_labels': [{'name': 'name_value', 'value': 'value_value'}], 'filter_match_criteria': 'filter_match_criteria_value'}], 'path_template_match': 'path_template_match_value', 'prefix_match': 'prefix_match_value', 'query_parameter_matches': [{'exact_match': 'exact_match_value', 'name': 'name_value', 'present_match': True, 'regex_match': 'regex_match_value'}], 'regex_match': 'regex_match_value'}], 'priority': 898, 'route_action': {}, 'service': 'service_value', 'url_redirect': {}}]}], 'region': 'region_value', 'self_link': 'self_link_value', 'tests': [{'description': 'description_value', 'expected_output_url': 'expected_output_url_value', 'expected_redirect_response_code': 3275, 'headers': [{'name': 'name_value', 'value': 'value_value'}], 'host': 'host_value', 'path': 'path_value', 'service': 'service_value'}]} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.update(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, extended_operation.ExtendedOperation) + assert response.client_operation_id == 'client_operation_id_value' + assert response.creation_timestamp == 'creation_timestamp_value' + assert response.description == 'description_value' + assert response.end_time == 'end_time_value' + assert response.http_error_message == 'http_error_message_value' + assert response.http_error_status_code == 2374 + assert response.id == 205 + assert response.insert_time == 'insert_time_value' + assert response.kind == 'kind_value' + assert response.name == 'name_value' + assert response.operation_group_id == 'operation_group_id_value' + assert response.operation_type == 'operation_type_value' + assert response.progress == 885 + assert response.region == 'region_value' + assert response.self_link == 'self_link_value' + assert response.start_time == 'start_time_value' + assert response.status == compute.Operation.Status.DONE + assert response.status_message == 'status_message_value' + assert response.target_id == 947 + assert response.target_link == 'target_link_value' + assert response.user == 'user_value' + assert response.zone == 'zone_value' + + +def test_update_rest_required_fields(request_type=compute.UpdateUrlMapRequest): + transport_class = transports.UrlMapsRestTransport + + request_init = {} + request_init["project"] = "" + request_init["url_map"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).update._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + jsonified_request["urlMap"] = 'url_map_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).update._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "urlMap" in jsonified_request + assert jsonified_request["urlMap"] == 'url_map_value' + + client = UrlMapsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "put", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.update(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_update_rest_unset_required_fields(): + transport = transports.UrlMapsRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.update._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("project", "urlMap", "urlMapResource", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_update_rest_interceptors(null_interceptor): + transport = transports.UrlMapsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.UrlMapsRestInterceptor(), + ) + client = UrlMapsClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.UrlMapsRestInterceptor, "post_update") as post, \ + mock.patch.object(transports.UrlMapsRestInterceptor, "pre_update") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.UpdateUrlMapRequest.pb(compute.UpdateUrlMapRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.UpdateUrlMapRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.update(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_update_rest_bad_request(transport: str = 'rest', request_type=compute.UpdateUrlMapRequest): + client = UrlMapsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'url_map': 'sample2'} + request_init["url_map_resource"] = {'creation_timestamp': 'creation_timestamp_value', 'default_route_action': {'cors_policy': {'allow_credentials': True, 'allow_headers': ['allow_headers_value1', 'allow_headers_value2'], 'allow_methods': ['allow_methods_value1', 'allow_methods_value2'], 'allow_origin_regexes': ['allow_origin_regexes_value1', 'allow_origin_regexes_value2'], 'allow_origins': ['allow_origins_value1', 'allow_origins_value2'], 'disabled': True, 'expose_headers': ['expose_headers_value1', 'expose_headers_value2'], 'max_age': 722}, 'fault_injection_policy': {'abort': {'http_status': 1219, 'percentage': 0.10540000000000001}, 'delay': {'fixed_delay': {'nanos': 543, 'seconds': 751}, 'percentage': 0.10540000000000001}}, 'max_stream_duration': {}, 'request_mirror_policy': {'backend_service': 'backend_service_value'}, 'retry_policy': {'num_retries': 1197, 'per_try_timeout': {}, 'retry_conditions': ['retry_conditions_value1', 'retry_conditions_value2']}, 'timeout': {}, 'url_rewrite': {'host_rewrite': 'host_rewrite_value', 'path_prefix_rewrite': 'path_prefix_rewrite_value', 'path_template_rewrite': 'path_template_rewrite_value'}, 'weighted_backend_services': [{'backend_service': 'backend_service_value', 'header_action': {'request_headers_to_add': [{'header_name': 'header_name_value', 'header_value': 'header_value_value', 'replace': True}], 'request_headers_to_remove': ['request_headers_to_remove_value1', 'request_headers_to_remove_value2'], 'response_headers_to_add': {}, 'response_headers_to_remove': ['response_headers_to_remove_value1', 'response_headers_to_remove_value2']}, 'weight': 648}]}, 'default_service': 'default_service_value', 'default_url_redirect': {'host_redirect': 'host_redirect_value', 'https_redirect': True, 'path_redirect': 'path_redirect_value', 'prefix_redirect': 'prefix_redirect_value', 'redirect_response_code': 'redirect_response_code_value', 'strip_query': True}, 'description': 'description_value', 'fingerprint': 'fingerprint_value', 'header_action': {}, 'host_rules': [{'description': 'description_value', 'hosts': ['hosts_value1', 'hosts_value2'], 'path_matcher': 'path_matcher_value'}], 'id': 205, 'kind': 'kind_value', 'name': 'name_value', 'path_matchers': [{'default_route_action': {}, 'default_service': 'default_service_value', 'default_url_redirect': {}, 'description': 'description_value', 'header_action': {}, 'name': 'name_value', 'path_rules': [{'paths': ['paths_value1', 'paths_value2'], 'route_action': {}, 'service': 'service_value', 'url_redirect': {}}], 'route_rules': [{'description': 'description_value', 'header_action': {}, 'match_rules': [{'full_path_match': 'full_path_match_value', 'header_matches': [{'exact_match': 'exact_match_value', 'header_name': 'header_name_value', 'invert_match': True, 'prefix_match': 'prefix_match_value', 'present_match': True, 'range_match': {'range_end': 931, 'range_start': 1178}, 'regex_match': 'regex_match_value', 'suffix_match': 'suffix_match_value'}], 'ignore_case': True, 'metadata_filters': [{'filter_labels': [{'name': 'name_value', 'value': 'value_value'}], 'filter_match_criteria': 'filter_match_criteria_value'}], 'path_template_match': 'path_template_match_value', 'prefix_match': 'prefix_match_value', 'query_parameter_matches': [{'exact_match': 'exact_match_value', 'name': 'name_value', 'present_match': True, 'regex_match': 'regex_match_value'}], 'regex_match': 'regex_match_value'}], 'priority': 898, 'route_action': {}, 'service': 'service_value', 'url_redirect': {}}]}], 'region': 'region_value', 'self_link': 'self_link_value', 'tests': [{'description': 'description_value', 'expected_output_url': 'expected_output_url_value', 'expected_redirect_response_code': 3275, 'headers': [{'name': 'name_value', 'value': 'value_value'}], 'host': 'host_value', 'path': 'path_value', 'service': 'service_value'}]} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.update(request) + + +def test_update_rest_flattened(): + client = UrlMapsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'url_map': 'sample2'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + url_map='url_map_value', + url_map_resource=compute.UrlMap(creation_timestamp='creation_timestamp_value'), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.update(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/global/urlMaps/{url_map}" % client.transport._host, args[1]) + + +def test_update_rest_flattened_error(transport: str = 'rest'): + client = UrlMapsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update( + compute.UpdateUrlMapRequest(), + project='project_value', + url_map='url_map_value', + url_map_resource=compute.UrlMap(creation_timestamp='creation_timestamp_value'), + ) + + +def test_update_rest_error(): + client = UrlMapsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.UpdateUrlMapRequest, + dict, +]) +def test_update_unary_rest(request_type): + client = UrlMapsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'url_map': 'sample2'} + request_init["url_map_resource"] = {'creation_timestamp': 'creation_timestamp_value', 'default_route_action': {'cors_policy': {'allow_credentials': True, 'allow_headers': ['allow_headers_value1', 'allow_headers_value2'], 'allow_methods': ['allow_methods_value1', 'allow_methods_value2'], 'allow_origin_regexes': ['allow_origin_regexes_value1', 'allow_origin_regexes_value2'], 'allow_origins': ['allow_origins_value1', 'allow_origins_value2'], 'disabled': True, 'expose_headers': ['expose_headers_value1', 'expose_headers_value2'], 'max_age': 722}, 'fault_injection_policy': {'abort': {'http_status': 1219, 'percentage': 0.10540000000000001}, 'delay': {'fixed_delay': {'nanos': 543, 'seconds': 751}, 'percentage': 0.10540000000000001}}, 'max_stream_duration': {}, 'request_mirror_policy': {'backend_service': 'backend_service_value'}, 'retry_policy': {'num_retries': 1197, 'per_try_timeout': {}, 'retry_conditions': ['retry_conditions_value1', 'retry_conditions_value2']}, 'timeout': {}, 'url_rewrite': {'host_rewrite': 'host_rewrite_value', 'path_prefix_rewrite': 'path_prefix_rewrite_value', 'path_template_rewrite': 'path_template_rewrite_value'}, 'weighted_backend_services': [{'backend_service': 'backend_service_value', 'header_action': {'request_headers_to_add': [{'header_name': 'header_name_value', 'header_value': 'header_value_value', 'replace': True}], 'request_headers_to_remove': ['request_headers_to_remove_value1', 'request_headers_to_remove_value2'], 'response_headers_to_add': {}, 'response_headers_to_remove': ['response_headers_to_remove_value1', 'response_headers_to_remove_value2']}, 'weight': 648}]}, 'default_service': 'default_service_value', 'default_url_redirect': {'host_redirect': 'host_redirect_value', 'https_redirect': True, 'path_redirect': 'path_redirect_value', 'prefix_redirect': 'prefix_redirect_value', 'redirect_response_code': 'redirect_response_code_value', 'strip_query': True}, 'description': 'description_value', 'fingerprint': 'fingerprint_value', 'header_action': {}, 'host_rules': [{'description': 'description_value', 'hosts': ['hosts_value1', 'hosts_value2'], 'path_matcher': 'path_matcher_value'}], 'id': 205, 'kind': 'kind_value', 'name': 'name_value', 'path_matchers': [{'default_route_action': {}, 'default_service': 'default_service_value', 'default_url_redirect': {}, 'description': 'description_value', 'header_action': {}, 'name': 'name_value', 'path_rules': [{'paths': ['paths_value1', 'paths_value2'], 'route_action': {}, 'service': 'service_value', 'url_redirect': {}}], 'route_rules': [{'description': 'description_value', 'header_action': {}, 'match_rules': [{'full_path_match': 'full_path_match_value', 'header_matches': [{'exact_match': 'exact_match_value', 'header_name': 'header_name_value', 'invert_match': True, 'prefix_match': 'prefix_match_value', 'present_match': True, 'range_match': {'range_end': 931, 'range_start': 1178}, 'regex_match': 'regex_match_value', 'suffix_match': 'suffix_match_value'}], 'ignore_case': True, 'metadata_filters': [{'filter_labels': [{'name': 'name_value', 'value': 'value_value'}], 'filter_match_criteria': 'filter_match_criteria_value'}], 'path_template_match': 'path_template_match_value', 'prefix_match': 'prefix_match_value', 'query_parameter_matches': [{'exact_match': 'exact_match_value', 'name': 'name_value', 'present_match': True, 'regex_match': 'regex_match_value'}], 'regex_match': 'regex_match_value'}], 'priority': 898, 'route_action': {}, 'service': 'service_value', 'url_redirect': {}}]}], 'region': 'region_value', 'self_link': 'self_link_value', 'tests': [{'description': 'description_value', 'expected_output_url': 'expected_output_url_value', 'expected_redirect_response_code': 3275, 'headers': [{'name': 'name_value', 'value': 'value_value'}], 'host': 'host_value', 'path': 'path_value', 'service': 'service_value'}]} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.update_unary(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.Operation) + + +def test_update_unary_rest_required_fields(request_type=compute.UpdateUrlMapRequest): + transport_class = transports.UrlMapsRestTransport + + request_init = {} + request_init["project"] = "" + request_init["url_map"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).update._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + jsonified_request["urlMap"] = 'url_map_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).update._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "urlMap" in jsonified_request + assert jsonified_request["urlMap"] == 'url_map_value' + + client = UrlMapsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "put", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.update_unary(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_update_unary_rest_unset_required_fields(): + transport = transports.UrlMapsRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.update._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("project", "urlMap", "urlMapResource", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_update_unary_rest_interceptors(null_interceptor): + transport = transports.UrlMapsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.UrlMapsRestInterceptor(), + ) + client = UrlMapsClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.UrlMapsRestInterceptor, "post_update") as post, \ + mock.patch.object(transports.UrlMapsRestInterceptor, "pre_update") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.UpdateUrlMapRequest.pb(compute.UpdateUrlMapRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.UpdateUrlMapRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.update_unary(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_update_unary_rest_bad_request(transport: str = 'rest', request_type=compute.UpdateUrlMapRequest): + client = UrlMapsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'url_map': 'sample2'} + request_init["url_map_resource"] = {'creation_timestamp': 'creation_timestamp_value', 'default_route_action': {'cors_policy': {'allow_credentials': True, 'allow_headers': ['allow_headers_value1', 'allow_headers_value2'], 'allow_methods': ['allow_methods_value1', 'allow_methods_value2'], 'allow_origin_regexes': ['allow_origin_regexes_value1', 'allow_origin_regexes_value2'], 'allow_origins': ['allow_origins_value1', 'allow_origins_value2'], 'disabled': True, 'expose_headers': ['expose_headers_value1', 'expose_headers_value2'], 'max_age': 722}, 'fault_injection_policy': {'abort': {'http_status': 1219, 'percentage': 0.10540000000000001}, 'delay': {'fixed_delay': {'nanos': 543, 'seconds': 751}, 'percentage': 0.10540000000000001}}, 'max_stream_duration': {}, 'request_mirror_policy': {'backend_service': 'backend_service_value'}, 'retry_policy': {'num_retries': 1197, 'per_try_timeout': {}, 'retry_conditions': ['retry_conditions_value1', 'retry_conditions_value2']}, 'timeout': {}, 'url_rewrite': {'host_rewrite': 'host_rewrite_value', 'path_prefix_rewrite': 'path_prefix_rewrite_value', 'path_template_rewrite': 'path_template_rewrite_value'}, 'weighted_backend_services': [{'backend_service': 'backend_service_value', 'header_action': {'request_headers_to_add': [{'header_name': 'header_name_value', 'header_value': 'header_value_value', 'replace': True}], 'request_headers_to_remove': ['request_headers_to_remove_value1', 'request_headers_to_remove_value2'], 'response_headers_to_add': {}, 'response_headers_to_remove': ['response_headers_to_remove_value1', 'response_headers_to_remove_value2']}, 'weight': 648}]}, 'default_service': 'default_service_value', 'default_url_redirect': {'host_redirect': 'host_redirect_value', 'https_redirect': True, 'path_redirect': 'path_redirect_value', 'prefix_redirect': 'prefix_redirect_value', 'redirect_response_code': 'redirect_response_code_value', 'strip_query': True}, 'description': 'description_value', 'fingerprint': 'fingerprint_value', 'header_action': {}, 'host_rules': [{'description': 'description_value', 'hosts': ['hosts_value1', 'hosts_value2'], 'path_matcher': 'path_matcher_value'}], 'id': 205, 'kind': 'kind_value', 'name': 'name_value', 'path_matchers': [{'default_route_action': {}, 'default_service': 'default_service_value', 'default_url_redirect': {}, 'description': 'description_value', 'header_action': {}, 'name': 'name_value', 'path_rules': [{'paths': ['paths_value1', 'paths_value2'], 'route_action': {}, 'service': 'service_value', 'url_redirect': {}}], 'route_rules': [{'description': 'description_value', 'header_action': {}, 'match_rules': [{'full_path_match': 'full_path_match_value', 'header_matches': [{'exact_match': 'exact_match_value', 'header_name': 'header_name_value', 'invert_match': True, 'prefix_match': 'prefix_match_value', 'present_match': True, 'range_match': {'range_end': 931, 'range_start': 1178}, 'regex_match': 'regex_match_value', 'suffix_match': 'suffix_match_value'}], 'ignore_case': True, 'metadata_filters': [{'filter_labels': [{'name': 'name_value', 'value': 'value_value'}], 'filter_match_criteria': 'filter_match_criteria_value'}], 'path_template_match': 'path_template_match_value', 'prefix_match': 'prefix_match_value', 'query_parameter_matches': [{'exact_match': 'exact_match_value', 'name': 'name_value', 'present_match': True, 'regex_match': 'regex_match_value'}], 'regex_match': 'regex_match_value'}], 'priority': 898, 'route_action': {}, 'service': 'service_value', 'url_redirect': {}}]}], 'region': 'region_value', 'self_link': 'self_link_value', 'tests': [{'description': 'description_value', 'expected_output_url': 'expected_output_url_value', 'expected_redirect_response_code': 3275, 'headers': [{'name': 'name_value', 'value': 'value_value'}], 'host': 'host_value', 'path': 'path_value', 'service': 'service_value'}]} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.update_unary(request) + + +def test_update_unary_rest_flattened(): + client = UrlMapsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'url_map': 'sample2'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + url_map='url_map_value', + url_map_resource=compute.UrlMap(creation_timestamp='creation_timestamp_value'), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.update_unary(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/global/urlMaps/{url_map}" % client.transport._host, args[1]) + + +def test_update_unary_rest_flattened_error(transport: str = 'rest'): + client = UrlMapsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_unary( + compute.UpdateUrlMapRequest(), + project='project_value', + url_map='url_map_value', + url_map_resource=compute.UrlMap(creation_timestamp='creation_timestamp_value'), + ) + + +def test_update_unary_rest_error(): + client = UrlMapsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.ValidateUrlMapRequest, + dict, +]) +def test_validate_rest(request_type): + client = UrlMapsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'url_map': 'sample2'} + request_init["url_maps_validate_request_resource"] = {'load_balancing_schemes': ['load_balancing_schemes_value1', 'load_balancing_schemes_value2'], 'resource': {'creation_timestamp': 'creation_timestamp_value', 'default_route_action': {'cors_policy': {'allow_credentials': True, 'allow_headers': ['allow_headers_value1', 'allow_headers_value2'], 'allow_methods': ['allow_methods_value1', 'allow_methods_value2'], 'allow_origin_regexes': ['allow_origin_regexes_value1', 'allow_origin_regexes_value2'], 'allow_origins': ['allow_origins_value1', 'allow_origins_value2'], 'disabled': True, 'expose_headers': ['expose_headers_value1', 'expose_headers_value2'], 'max_age': 722}, 'fault_injection_policy': {'abort': {'http_status': 1219, 'percentage': 0.10540000000000001}, 'delay': {'fixed_delay': {'nanos': 543, 'seconds': 751}, 'percentage': 0.10540000000000001}}, 'max_stream_duration': {}, 'request_mirror_policy': {'backend_service': 'backend_service_value'}, 'retry_policy': {'num_retries': 1197, 'per_try_timeout': {}, 'retry_conditions': ['retry_conditions_value1', 'retry_conditions_value2']}, 'timeout': {}, 'url_rewrite': {'host_rewrite': 'host_rewrite_value', 'path_prefix_rewrite': 'path_prefix_rewrite_value', 'path_template_rewrite': 'path_template_rewrite_value'}, 'weighted_backend_services': [{'backend_service': 'backend_service_value', 'header_action': {'request_headers_to_add': [{'header_name': 'header_name_value', 'header_value': 'header_value_value', 'replace': True}], 'request_headers_to_remove': ['request_headers_to_remove_value1', 'request_headers_to_remove_value2'], 'response_headers_to_add': {}, 'response_headers_to_remove': ['response_headers_to_remove_value1', 'response_headers_to_remove_value2']}, 'weight': 648}]}, 'default_service': 'default_service_value', 'default_url_redirect': {'host_redirect': 'host_redirect_value', 'https_redirect': True, 'path_redirect': 'path_redirect_value', 'prefix_redirect': 'prefix_redirect_value', 'redirect_response_code': 'redirect_response_code_value', 'strip_query': True}, 'description': 'description_value', 'fingerprint': 'fingerprint_value', 'header_action': {}, 'host_rules': [{'description': 'description_value', 'hosts': ['hosts_value1', 'hosts_value2'], 'path_matcher': 'path_matcher_value'}], 'id': 205, 'kind': 'kind_value', 'name': 'name_value', 'path_matchers': [{'default_route_action': {}, 'default_service': 'default_service_value', 'default_url_redirect': {}, 'description': 'description_value', 'header_action': {}, 'name': 'name_value', 'path_rules': [{'paths': ['paths_value1', 'paths_value2'], 'route_action': {}, 'service': 'service_value', 'url_redirect': {}}], 'route_rules': [{'description': 'description_value', 'header_action': {}, 'match_rules': [{'full_path_match': 'full_path_match_value', 'header_matches': [{'exact_match': 'exact_match_value', 'header_name': 'header_name_value', 'invert_match': True, 'prefix_match': 'prefix_match_value', 'present_match': True, 'range_match': {'range_end': 931, 'range_start': 1178}, 'regex_match': 'regex_match_value', 'suffix_match': 'suffix_match_value'}], 'ignore_case': True, 'metadata_filters': [{'filter_labels': [{'name': 'name_value', 'value': 'value_value'}], 'filter_match_criteria': 'filter_match_criteria_value'}], 'path_template_match': 'path_template_match_value', 'prefix_match': 'prefix_match_value', 'query_parameter_matches': [{'exact_match': 'exact_match_value', 'name': 'name_value', 'present_match': True, 'regex_match': 'regex_match_value'}], 'regex_match': 'regex_match_value'}], 'priority': 898, 'route_action': {}, 'service': 'service_value', 'url_redirect': {}}]}], 'region': 'region_value', 'self_link': 'self_link_value', 'tests': [{'description': 'description_value', 'expected_output_url': 'expected_output_url_value', 'expected_redirect_response_code': 3275, 'headers': [{'name': 'name_value', 'value': 'value_value'}], 'host': 'host_value', 'path': 'path_value', 'service': 'service_value'}]}} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.UrlMapsValidateResponse( + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.UrlMapsValidateResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.validate(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.UrlMapsValidateResponse) + + +def test_validate_rest_required_fields(request_type=compute.ValidateUrlMapRequest): + transport_class = transports.UrlMapsRestTransport + + request_init = {} + request_init["project"] = "" + request_init["url_map"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).validate._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + jsonified_request["urlMap"] = 'url_map_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).validate._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "urlMap" in jsonified_request + assert jsonified_request["urlMap"] == 'url_map_value' + + client = UrlMapsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.UrlMapsValidateResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.UrlMapsValidateResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.validate(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_validate_rest_unset_required_fields(): + transport = transports.UrlMapsRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.validate._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("project", "urlMap", "urlMapsValidateRequestResource", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_validate_rest_interceptors(null_interceptor): + transport = transports.UrlMapsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.UrlMapsRestInterceptor(), + ) + client = UrlMapsClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.UrlMapsRestInterceptor, "post_validate") as post, \ + mock.patch.object(transports.UrlMapsRestInterceptor, "pre_validate") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.ValidateUrlMapRequest.pb(compute.ValidateUrlMapRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.UrlMapsValidateResponse.to_json(compute.UrlMapsValidateResponse()) + + request = compute.ValidateUrlMapRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.UrlMapsValidateResponse() + + client.validate(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_validate_rest_bad_request(transport: str = 'rest', request_type=compute.ValidateUrlMapRequest): + client = UrlMapsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'url_map': 'sample2'} + request_init["url_maps_validate_request_resource"] = {'load_balancing_schemes': ['load_balancing_schemes_value1', 'load_balancing_schemes_value2'], 'resource': {'creation_timestamp': 'creation_timestamp_value', 'default_route_action': {'cors_policy': {'allow_credentials': True, 'allow_headers': ['allow_headers_value1', 'allow_headers_value2'], 'allow_methods': ['allow_methods_value1', 'allow_methods_value2'], 'allow_origin_regexes': ['allow_origin_regexes_value1', 'allow_origin_regexes_value2'], 'allow_origins': ['allow_origins_value1', 'allow_origins_value2'], 'disabled': True, 'expose_headers': ['expose_headers_value1', 'expose_headers_value2'], 'max_age': 722}, 'fault_injection_policy': {'abort': {'http_status': 1219, 'percentage': 0.10540000000000001}, 'delay': {'fixed_delay': {'nanos': 543, 'seconds': 751}, 'percentage': 0.10540000000000001}}, 'max_stream_duration': {}, 'request_mirror_policy': {'backend_service': 'backend_service_value'}, 'retry_policy': {'num_retries': 1197, 'per_try_timeout': {}, 'retry_conditions': ['retry_conditions_value1', 'retry_conditions_value2']}, 'timeout': {}, 'url_rewrite': {'host_rewrite': 'host_rewrite_value', 'path_prefix_rewrite': 'path_prefix_rewrite_value', 'path_template_rewrite': 'path_template_rewrite_value'}, 'weighted_backend_services': [{'backend_service': 'backend_service_value', 'header_action': {'request_headers_to_add': [{'header_name': 'header_name_value', 'header_value': 'header_value_value', 'replace': True}], 'request_headers_to_remove': ['request_headers_to_remove_value1', 'request_headers_to_remove_value2'], 'response_headers_to_add': {}, 'response_headers_to_remove': ['response_headers_to_remove_value1', 'response_headers_to_remove_value2']}, 'weight': 648}]}, 'default_service': 'default_service_value', 'default_url_redirect': {'host_redirect': 'host_redirect_value', 'https_redirect': True, 'path_redirect': 'path_redirect_value', 'prefix_redirect': 'prefix_redirect_value', 'redirect_response_code': 'redirect_response_code_value', 'strip_query': True}, 'description': 'description_value', 'fingerprint': 'fingerprint_value', 'header_action': {}, 'host_rules': [{'description': 'description_value', 'hosts': ['hosts_value1', 'hosts_value2'], 'path_matcher': 'path_matcher_value'}], 'id': 205, 'kind': 'kind_value', 'name': 'name_value', 'path_matchers': [{'default_route_action': {}, 'default_service': 'default_service_value', 'default_url_redirect': {}, 'description': 'description_value', 'header_action': {}, 'name': 'name_value', 'path_rules': [{'paths': ['paths_value1', 'paths_value2'], 'route_action': {}, 'service': 'service_value', 'url_redirect': {}}], 'route_rules': [{'description': 'description_value', 'header_action': {}, 'match_rules': [{'full_path_match': 'full_path_match_value', 'header_matches': [{'exact_match': 'exact_match_value', 'header_name': 'header_name_value', 'invert_match': True, 'prefix_match': 'prefix_match_value', 'present_match': True, 'range_match': {'range_end': 931, 'range_start': 1178}, 'regex_match': 'regex_match_value', 'suffix_match': 'suffix_match_value'}], 'ignore_case': True, 'metadata_filters': [{'filter_labels': [{'name': 'name_value', 'value': 'value_value'}], 'filter_match_criteria': 'filter_match_criteria_value'}], 'path_template_match': 'path_template_match_value', 'prefix_match': 'prefix_match_value', 'query_parameter_matches': [{'exact_match': 'exact_match_value', 'name': 'name_value', 'present_match': True, 'regex_match': 'regex_match_value'}], 'regex_match': 'regex_match_value'}], 'priority': 898, 'route_action': {}, 'service': 'service_value', 'url_redirect': {}}]}], 'region': 'region_value', 'self_link': 'self_link_value', 'tests': [{'description': 'description_value', 'expected_output_url': 'expected_output_url_value', 'expected_redirect_response_code': 3275, 'headers': [{'name': 'name_value', 'value': 'value_value'}], 'host': 'host_value', 'path': 'path_value', 'service': 'service_value'}]}} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.validate(request) + + +def test_validate_rest_flattened(): + client = UrlMapsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.UrlMapsValidateResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'url_map': 'sample2'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + url_map='url_map_value', + url_maps_validate_request_resource=compute.UrlMapsValidateRequest(load_balancing_schemes=['load_balancing_schemes_value']), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.UrlMapsValidateResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.validate(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/global/urlMaps/{url_map}/validate" % client.transport._host, args[1]) + + +def test_validate_rest_flattened_error(transport: str = 'rest'): + client = UrlMapsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.validate( + compute.ValidateUrlMapRequest(), + project='project_value', + url_map='url_map_value', + url_maps_validate_request_resource=compute.UrlMapsValidateRequest(load_balancing_schemes=['load_balancing_schemes_value']), + ) + + +def test_validate_rest_error(): + client = UrlMapsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +def test_credentials_transport_error(): + # It is an error to provide credentials and a transport instance. + transport = transports.UrlMapsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = UrlMapsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # It is an error to provide a credentials file and a transport instance. + transport = transports.UrlMapsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = UrlMapsClient( + client_options={"credentials_file": "credentials.json"}, + transport=transport, + ) + + # It is an error to provide an api_key and a transport instance. + transport = transports.UrlMapsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = UrlMapsClient( + client_options=options, + transport=transport, + ) + + # It is an error to provide an api_key and a credential. + options = mock.Mock() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = UrlMapsClient( + client_options=options, + credentials=ga_credentials.AnonymousCredentials() + ) + + # It is an error to provide scopes and a transport instance. + transport = transports.UrlMapsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = UrlMapsClient( + client_options={"scopes": ["1", "2"]}, + transport=transport, + ) + + +def test_transport_instance(): + # A client may be instantiated with a custom transport instance. + transport = transports.UrlMapsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + client = UrlMapsClient(transport=transport) + assert client.transport is transport + + +@pytest.mark.parametrize("transport_class", [ + transports.UrlMapsRestTransport, +]) +def test_transport_adc(transport_class): + # Test default credentials are used if not provided. + with mock.patch.object(google.auth, 'default') as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class() + adc.assert_called_once() + +@pytest.mark.parametrize("transport_name", [ + "rest", +]) +def test_transport_kind(transport_name): + transport = UrlMapsClient.get_transport_class(transport_name)( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert transport.kind == transport_name + + +def test_url_maps_base_transport_error(): + # Passing both a credentials object and credentials_file should raise an error + with pytest.raises(core_exceptions.DuplicateCredentialArgs): + transport = transports.UrlMapsTransport( + credentials=ga_credentials.AnonymousCredentials(), + credentials_file="credentials.json" + ) + + +def test_url_maps_base_transport(): + # Instantiate the base transport. + with mock.patch('google.cloud.compute_v1.services.url_maps.transports.UrlMapsTransport.__init__') as Transport: + Transport.return_value = None + transport = transports.UrlMapsTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Every method on the transport should just blindly + # raise NotImplementedError. + methods = ( + 'aggregated_list', + 'delete', + 'get', + 'insert', + 'invalidate_cache', + 'list', + 'patch', + 'update', + 'validate', + ) + for method in methods: + with pytest.raises(NotImplementedError): + getattr(transport, method)(request=object()) + + with pytest.raises(NotImplementedError): + transport.close() + + # Catch all for all remaining methods and properties + remainder = [ + 'kind', + ] + for r in remainder: + with pytest.raises(NotImplementedError): + getattr(transport, r)() + + +def test_url_maps_base_transport_with_credentials_file(): + # Instantiate the base transport with a credentials file + with mock.patch.object(google.auth, 'load_credentials_from_file', autospec=True) as load_creds, mock.patch('google.cloud.compute_v1.services.url_maps.transports.UrlMapsTransport._prep_wrapped_messages') as Transport: + Transport.return_value = None + load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.UrlMapsTransport( + credentials_file="credentials.json", + quota_project_id="octopus", + ) + load_creds.assert_called_once_with("credentials.json", + scopes=None, + default_scopes=( + 'https://www.googleapis.com/auth/compute', + 'https://www.googleapis.com/auth/cloud-platform', +), + quota_project_id="octopus", + ) + + +def test_url_maps_base_transport_with_adc(): + # Test the default credentials are used if credentials and credentials_file are None. + with mock.patch.object(google.auth, 'default', autospec=True) as adc, mock.patch('google.cloud.compute_v1.services.url_maps.transports.UrlMapsTransport._prep_wrapped_messages') as Transport: + Transport.return_value = None + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.UrlMapsTransport() + adc.assert_called_once() + + +def test_url_maps_auth_adc(): + # If no credentials are provided, we should use ADC credentials. + with mock.patch.object(google.auth, 'default', autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + UrlMapsClient() + adc.assert_called_once_with( + scopes=None, + default_scopes=( + 'https://www.googleapis.com/auth/compute', + 'https://www.googleapis.com/auth/cloud-platform', +), + quota_project_id=None, + ) + + +def test_url_maps_http_transport_client_cert_source_for_mtls(): + cred = ga_credentials.AnonymousCredentials() + with mock.patch("google.auth.transport.requests.AuthorizedSession.configure_mtls_channel") as mock_configure_mtls_channel: + transports.UrlMapsRestTransport ( + credentials=cred, + client_cert_source_for_mtls=client_cert_source_callback + ) + mock_configure_mtls_channel.assert_called_once_with(client_cert_source_callback) + + +@pytest.mark.parametrize("transport_name", [ + "rest", +]) +def test_url_maps_host_no_port(transport_name): + client = UrlMapsClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions(api_endpoint='compute.googleapis.com'), + transport=transport_name, + ) + assert client.transport._host == ( + 'compute.googleapis.com:443' + if transport_name in ['grpc', 'grpc_asyncio'] + else 'https://compute.googleapis.com' + ) + +@pytest.mark.parametrize("transport_name", [ + "rest", +]) +def test_url_maps_host_with_port(transport_name): + client = UrlMapsClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions(api_endpoint='compute.googleapis.com:8000'), + transport=transport_name, + ) + assert client.transport._host == ( + 'compute.googleapis.com:8000' + if transport_name in ['grpc', 'grpc_asyncio'] + else 'https://compute.googleapis.com:8000' + ) + +@pytest.mark.parametrize("transport_name", [ + "rest", +]) +def test_url_maps_client_transport_session_collision(transport_name): + creds1 = ga_credentials.AnonymousCredentials() + creds2 = ga_credentials.AnonymousCredentials() + client1 = UrlMapsClient( + credentials=creds1, + transport=transport_name, + ) + client2 = UrlMapsClient( + credentials=creds2, + transport=transport_name, + ) + session1 = client1.transport.aggregated_list._session + session2 = client2.transport.aggregated_list._session + assert session1 != session2 + session1 = client1.transport.delete._session + session2 = client2.transport.delete._session + assert session1 != session2 + session1 = client1.transport.get._session + session2 = client2.transport.get._session + assert session1 != session2 + session1 = client1.transport.insert._session + session2 = client2.transport.insert._session + assert session1 != session2 + session1 = client1.transport.invalidate_cache._session + session2 = client2.transport.invalidate_cache._session + assert session1 != session2 + session1 = client1.transport.list._session + session2 = client2.transport.list._session + assert session1 != session2 + session1 = client1.transport.patch._session + session2 = client2.transport.patch._session + assert session1 != session2 + session1 = client1.transport.update._session + session2 = client2.transport.update._session + assert session1 != session2 + session1 = client1.transport.validate._session + session2 = client2.transport.validate._session + assert session1 != session2 + +def test_common_billing_account_path(): + billing_account = "squid" + expected = "billingAccounts/{billing_account}".format(billing_account=billing_account, ) + actual = UrlMapsClient.common_billing_account_path(billing_account) + assert expected == actual + + +def test_parse_common_billing_account_path(): + expected = { + "billing_account": "clam", + } + path = UrlMapsClient.common_billing_account_path(**expected) + + # Check that the path construction is reversible. + actual = UrlMapsClient.parse_common_billing_account_path(path) + assert expected == actual + +def test_common_folder_path(): + folder = "whelk" + expected = "folders/{folder}".format(folder=folder, ) + actual = UrlMapsClient.common_folder_path(folder) + assert expected == actual + + +def test_parse_common_folder_path(): + expected = { + "folder": "octopus", + } + path = UrlMapsClient.common_folder_path(**expected) + + # Check that the path construction is reversible. + actual = UrlMapsClient.parse_common_folder_path(path) + assert expected == actual + +def test_common_organization_path(): + organization = "oyster" + expected = "organizations/{organization}".format(organization=organization, ) + actual = UrlMapsClient.common_organization_path(organization) + assert expected == actual + + +def test_parse_common_organization_path(): + expected = { + "organization": "nudibranch", + } + path = UrlMapsClient.common_organization_path(**expected) + + # Check that the path construction is reversible. + actual = UrlMapsClient.parse_common_organization_path(path) + assert expected == actual + +def test_common_project_path(): + project = "cuttlefish" + expected = "projects/{project}".format(project=project, ) + actual = UrlMapsClient.common_project_path(project) + assert expected == actual + + +def test_parse_common_project_path(): + expected = { + "project": "mussel", + } + path = UrlMapsClient.common_project_path(**expected) + + # Check that the path construction is reversible. + actual = UrlMapsClient.parse_common_project_path(path) + assert expected == actual + +def test_common_location_path(): + project = "winkle" + location = "nautilus" + expected = "projects/{project}/locations/{location}".format(project=project, location=location, ) + actual = UrlMapsClient.common_location_path(project, location) + assert expected == actual + + +def test_parse_common_location_path(): + expected = { + "project": "scallop", + "location": "abalone", + } + path = UrlMapsClient.common_location_path(**expected) + + # Check that the path construction is reversible. + actual = UrlMapsClient.parse_common_location_path(path) + assert expected == actual + + +def test_client_with_default_client_info(): + client_info = gapic_v1.client_info.ClientInfo() + + with mock.patch.object(transports.UrlMapsTransport, '_prep_wrapped_messages') as prep: + client = UrlMapsClient( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + with mock.patch.object(transports.UrlMapsTransport, '_prep_wrapped_messages') as prep: + transport_class = UrlMapsClient.get_transport_class() + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + +def test_transport_close(): + transports = { + "rest": "_session", + } + + for transport, close_name in transports.items(): + client = UrlMapsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport + ) + with mock.patch.object(type(getattr(client.transport, close_name)), "close") as close: + with client: + close.assert_not_called() + close.assert_called_once() + +def test_client_ctx(): + transports = [ + 'rest', + ] + for transport in transports: + client = UrlMapsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport + ) + # Test client calls underlying transport. + with mock.patch.object(type(client.transport), "close") as close: + close.assert_not_called() + with client: + pass + close.assert_called() + +@pytest.mark.parametrize("client_class,transport_class", [ + (UrlMapsClient, transports.UrlMapsRestTransport), +]) +def test_api_key_credentials(client_class, transport_class): + with mock.patch.object( + google.auth._default, "get_api_key_credentials", create=True + ) as get_api_key_credentials: + mock_cred = mock.Mock() + get_api_key_credentials.return_value = mock_cred + options = client_options.ClientOptions() + options.api_key = "api_key" + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=mock_cred, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) diff --git a/owl-bot-staging/v1/tests/unit/gapic/compute_v1/test_vpn_gateways.py b/owl-bot-staging/v1/tests/unit/gapic/compute_v1/test_vpn_gateways.py new file mode 100644 index 000000000..da0dc6cad --- /dev/null +++ b/owl-bot-staging/v1/tests/unit/gapic/compute_v1/test_vpn_gateways.py @@ -0,0 +1,3891 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import os +# try/except added for compatibility with python < 3.8 +try: + from unittest import mock + from unittest.mock import AsyncMock # pragma: NO COVER +except ImportError: # pragma: NO COVER + import mock + +import grpc +from grpc.experimental import aio +from collections.abc import Iterable +from google.protobuf import json_format +import json +import math +import pytest +from proto.marshal.rules.dates import DurationRule, TimestampRule +from proto.marshal.rules import wrappers +from requests import Response +from requests import Request, PreparedRequest +from requests.sessions import Session +from google.protobuf import json_format + +from google.api_core import client_options +from google.api_core import exceptions as core_exceptions +from google.api_core import extended_operation # type: ignore +from google.api_core import future +from google.api_core import gapic_v1 +from google.api_core import grpc_helpers +from google.api_core import grpc_helpers_async +from google.api_core import path_template +from google.auth import credentials as ga_credentials +from google.auth.exceptions import MutualTLSChannelError +from google.cloud.compute_v1.services.vpn_gateways import VpnGatewaysClient +from google.cloud.compute_v1.services.vpn_gateways import pagers +from google.cloud.compute_v1.services.vpn_gateways import transports +from google.cloud.compute_v1.types import compute +from google.oauth2 import service_account +import google.auth + + +def client_cert_source_callback(): + return b"cert bytes", b"key bytes" + + +# If default endpoint is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint(client): + return "foo.googleapis.com" if ("localhost" in client.DEFAULT_ENDPOINT) else client.DEFAULT_ENDPOINT + + +def test__get_default_mtls_endpoint(): + api_endpoint = "example.googleapis.com" + api_mtls_endpoint = "example.mtls.googleapis.com" + sandbox_endpoint = "example.sandbox.googleapis.com" + sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com" + non_googleapi = "api.example.com" + + assert VpnGatewaysClient._get_default_mtls_endpoint(None) is None + assert VpnGatewaysClient._get_default_mtls_endpoint(api_endpoint) == api_mtls_endpoint + assert VpnGatewaysClient._get_default_mtls_endpoint(api_mtls_endpoint) == api_mtls_endpoint + assert VpnGatewaysClient._get_default_mtls_endpoint(sandbox_endpoint) == sandbox_mtls_endpoint + assert VpnGatewaysClient._get_default_mtls_endpoint(sandbox_mtls_endpoint) == sandbox_mtls_endpoint + assert VpnGatewaysClient._get_default_mtls_endpoint(non_googleapi) == non_googleapi + + +@pytest.mark.parametrize("client_class,transport_name", [ + (VpnGatewaysClient, "rest"), +]) +def test_vpn_gateways_client_from_service_account_info(client_class, transport_name): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object(service_account.Credentials, 'from_service_account_info') as factory: + factory.return_value = creds + info = {"valid": True} + client = client_class.from_service_account_info(info, transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + 'compute.googleapis.com:443' + if transport_name in ['grpc', 'grpc_asyncio'] + else + 'https://compute.googleapis.com' + ) + + +@pytest.mark.parametrize("transport_class,transport_name", [ + (transports.VpnGatewaysRestTransport, "rest"), +]) +def test_vpn_gateways_client_service_account_always_use_jwt(transport_class, transport_name): + with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=True) + use_jwt.assert_called_once_with(True) + + with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=False) + use_jwt.assert_not_called() + + +@pytest.mark.parametrize("client_class,transport_name", [ + (VpnGatewaysClient, "rest"), +]) +def test_vpn_gateways_client_from_service_account_file(client_class, transport_name): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object(service_account.Credentials, 'from_service_account_file') as factory: + factory.return_value = creds + client = client_class.from_service_account_file("dummy/file/path.json", transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + client = client_class.from_service_account_json("dummy/file/path.json", transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + 'compute.googleapis.com:443' + if transport_name in ['grpc', 'grpc_asyncio'] + else + 'https://compute.googleapis.com' + ) + + +def test_vpn_gateways_client_get_transport_class(): + transport = VpnGatewaysClient.get_transport_class() + available_transports = [ + transports.VpnGatewaysRestTransport, + ] + assert transport in available_transports + + transport = VpnGatewaysClient.get_transport_class("rest") + assert transport == transports.VpnGatewaysRestTransport + + +@pytest.mark.parametrize("client_class,transport_class,transport_name", [ + (VpnGatewaysClient, transports.VpnGatewaysRestTransport, "rest"), +]) +@mock.patch.object(VpnGatewaysClient, "DEFAULT_ENDPOINT", modify_default_endpoint(VpnGatewaysClient)) +def test_vpn_gateways_client_client_options(client_class, transport_class, transport_name): + # Check that if channel is provided we won't create a new one. + with mock.patch.object(VpnGatewaysClient, 'get_transport_class') as gtc: + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ) + client = client_class(transport=transport) + gtc.assert_not_called() + + # Check that if channel is provided via str we will create a new one. + with mock.patch.object(VpnGatewaysClient, 'get_transport_class') as gtc: + client = client_class(transport=transport_name) + gtc.assert_called() + + # Check the case api_endpoint is provided. + options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(transport=transport_name, client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_MTLS_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError): + client = client_class(transport=transport_name) + + # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): + with pytest.raises(ValueError): + client = client_class(transport=transport_name) + + # Check the case quota_project_id is provided + options = client_options.ClientOptions(quota_project_id="octopus") + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id="octopus", + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + # Check the case api_endpoint is provided + options = client_options.ClientOptions(api_audience="https://language.googleapis.com") + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience="https://language.googleapis.com" + ) + +@pytest.mark.parametrize("client_class,transport_class,transport_name,use_client_cert_env", [ + (VpnGatewaysClient, transports.VpnGatewaysRestTransport, "rest", "true"), + (VpnGatewaysClient, transports.VpnGatewaysRestTransport, "rest", "false"), +]) +@mock.patch.object(VpnGatewaysClient, "DEFAULT_ENDPOINT", modify_default_endpoint(VpnGatewaysClient)) +@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) +def test_vpn_gateways_client_mtls_env_auto(client_class, transport_class, transport_name, use_client_cert_env): + # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default + # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists. + + # Check the case client_cert_source is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): + options = client_options.ClientOptions(client_cert_source=client_cert_source_callback) + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + + if use_client_cert_env == "false": + expected_client_cert_source = None + expected_host = client.DEFAULT_ENDPOINT + else: + expected_client_cert_source = client_cert_source_callback + expected_host = client.DEFAULT_MTLS_ENDPOINT + + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case ADC client cert is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): + with mock.patch.object(transport_class, '__init__') as patched: + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): + with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=client_cert_source_callback): + if use_client_cert_env == "false": + expected_host = client.DEFAULT_ENDPOINT + expected_client_cert_source = None + else: + expected_host = client.DEFAULT_MTLS_ENDPOINT + expected_client_cert_source = client_cert_source_callback + + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case client_cert_source and ADC client cert are not provided. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): + with mock.patch.object(transport_class, '__init__') as patched: + with mock.patch("google.auth.transport.mtls.has_default_client_cert_source", return_value=False): + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize("client_class", [ + VpnGatewaysClient +]) +@mock.patch.object(VpnGatewaysClient, "DEFAULT_ENDPOINT", modify_default_endpoint(VpnGatewaysClient)) +def test_vpn_gateways_client_get_mtls_endpoint_and_cert_source(client_class): + mock_client_cert_source = mock.Mock() + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "true". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + mock_api_endpoint = "foo" + options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(options) + assert api_endpoint == mock_api_endpoint + assert cert_source == mock_client_cert_source + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "false". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + mock_client_cert_source = mock.Mock() + mock_api_endpoint = "foo" + options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(options) + assert api_endpoint == mock_api_endpoint + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert doesn't exist. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=False): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert exists. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): + with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=mock_client_cert_source): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source == mock_client_cert_source + + +@pytest.mark.parametrize("client_class,transport_class,transport_name", [ + (VpnGatewaysClient, transports.VpnGatewaysRestTransport, "rest"), +]) +def test_vpn_gateways_client_client_options_scopes(client_class, transport_class, transport_name): + # Check the case scopes are provided. + options = client_options.ClientOptions( + scopes=["1", "2"], + ) + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=["1", "2"], + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + +@pytest.mark.parametrize("client_class,transport_class,transport_name,grpc_helpers", [ + (VpnGatewaysClient, transports.VpnGatewaysRestTransport, "rest", None), +]) +def test_vpn_gateways_client_client_options_credentials_file(client_class, transport_class, transport_name, grpc_helpers): + # Check the case credentials file is provided. + options = client_options.ClientOptions( + credentials_file="credentials.json" + ) + + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize("request_type", [ + compute.AggregatedListVpnGatewaysRequest, + dict, +]) +def test_aggregated_list_rest(request_type): + client = VpnGatewaysClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.VpnGatewayAggregatedList( + id='id_value', + kind='kind_value', + next_page_token='next_page_token_value', + self_link='self_link_value', + unreachables=['unreachables_value'], + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.VpnGatewayAggregatedList.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.aggregated_list(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.AggregatedListPager) + assert response.id == 'id_value' + assert response.kind == 'kind_value' + assert response.next_page_token == 'next_page_token_value' + assert response.self_link == 'self_link_value' + assert response.unreachables == ['unreachables_value'] + + +def test_aggregated_list_rest_required_fields(request_type=compute.AggregatedListVpnGatewaysRequest): + transport_class = transports.VpnGatewaysRestTransport + + request_init = {} + request_init["project"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).aggregated_list._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).aggregated_list._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("filter", "include_all_scopes", "max_results", "order_by", "page_token", "return_partial_success", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + + client = VpnGatewaysClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.VpnGatewayAggregatedList() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "get", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.VpnGatewayAggregatedList.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.aggregated_list(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_aggregated_list_rest_unset_required_fields(): + transport = transports.VpnGatewaysRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.aggregated_list._get_unset_required_fields({}) + assert set(unset_fields) == (set(("filter", "includeAllScopes", "maxResults", "orderBy", "pageToken", "returnPartialSuccess", )) & set(("project", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_aggregated_list_rest_interceptors(null_interceptor): + transport = transports.VpnGatewaysRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.VpnGatewaysRestInterceptor(), + ) + client = VpnGatewaysClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.VpnGatewaysRestInterceptor, "post_aggregated_list") as post, \ + mock.patch.object(transports.VpnGatewaysRestInterceptor, "pre_aggregated_list") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.AggregatedListVpnGatewaysRequest.pb(compute.AggregatedListVpnGatewaysRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.VpnGatewayAggregatedList.to_json(compute.VpnGatewayAggregatedList()) + + request = compute.AggregatedListVpnGatewaysRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.VpnGatewayAggregatedList() + + client.aggregated_list(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_aggregated_list_rest_bad_request(transport: str = 'rest', request_type=compute.AggregatedListVpnGatewaysRequest): + client = VpnGatewaysClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.aggregated_list(request) + + +def test_aggregated_list_rest_flattened(): + client = VpnGatewaysClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.VpnGatewayAggregatedList() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.VpnGatewayAggregatedList.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.aggregated_list(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/aggregated/vpnGateways" % client.transport._host, args[1]) + + +def test_aggregated_list_rest_flattened_error(transport: str = 'rest'): + client = VpnGatewaysClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.aggregated_list( + compute.AggregatedListVpnGatewaysRequest(), + project='project_value', + ) + + +def test_aggregated_list_rest_pager(transport: str = 'rest'): + client = VpnGatewaysClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + #with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + compute.VpnGatewayAggregatedList( + items={ + 'a':compute.VpnGatewaysScopedList(), + 'b':compute.VpnGatewaysScopedList(), + 'c':compute.VpnGatewaysScopedList(), + }, + next_page_token='abc', + ), + compute.VpnGatewayAggregatedList( + items={}, + next_page_token='def', + ), + compute.VpnGatewayAggregatedList( + items={ + 'g':compute.VpnGatewaysScopedList(), + }, + next_page_token='ghi', + ), + compute.VpnGatewayAggregatedList( + items={ + 'h':compute.VpnGatewaysScopedList(), + 'i':compute.VpnGatewaysScopedList(), + }, + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple(compute.VpnGatewayAggregatedList.to_json(x) for x in response) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode('UTF-8') + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {'project': 'sample1'} + + pager = client.aggregated_list(request=sample_request) + + assert isinstance(pager.get('a'), compute.VpnGatewaysScopedList) + assert pager.get('h') is None + + results = list(pager) + assert len(results) == 6 + assert all( + isinstance(i, tuple) + for i in results) + for result in results: + assert isinstance(result, tuple) + assert tuple(type(t) for t in result) == (str, compute.VpnGatewaysScopedList) + + assert pager.get('a') is None + assert isinstance(pager.get('h'), compute.VpnGatewaysScopedList) + + pages = list(client.aggregated_list(request=sample_request).pages) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize("request_type", [ + compute.DeleteVpnGatewayRequest, + dict, +]) +def test_delete_rest(request_type): + client = VpnGatewaysClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2', 'vpn_gateway': 'sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.delete(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, extended_operation.ExtendedOperation) + assert response.client_operation_id == 'client_operation_id_value' + assert response.creation_timestamp == 'creation_timestamp_value' + assert response.description == 'description_value' + assert response.end_time == 'end_time_value' + assert response.http_error_message == 'http_error_message_value' + assert response.http_error_status_code == 2374 + assert response.id == 205 + assert response.insert_time == 'insert_time_value' + assert response.kind == 'kind_value' + assert response.name == 'name_value' + assert response.operation_group_id == 'operation_group_id_value' + assert response.operation_type == 'operation_type_value' + assert response.progress == 885 + assert response.region == 'region_value' + assert response.self_link == 'self_link_value' + assert response.start_time == 'start_time_value' + assert response.status == compute.Operation.Status.DONE + assert response.status_message == 'status_message_value' + assert response.target_id == 947 + assert response.target_link == 'target_link_value' + assert response.user == 'user_value' + assert response.zone == 'zone_value' + + +def test_delete_rest_required_fields(request_type=compute.DeleteVpnGatewayRequest): + transport_class = transports.VpnGatewaysRestTransport + + request_init = {} + request_init["project"] = "" + request_init["region"] = "" + request_init["vpn_gateway"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + jsonified_request["region"] = 'region_value' + jsonified_request["vpnGateway"] = 'vpn_gateway_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "region" in jsonified_request + assert jsonified_request["region"] == 'region_value' + assert "vpnGateway" in jsonified_request + assert jsonified_request["vpnGateway"] == 'vpn_gateway_value' + + client = VpnGatewaysClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "delete", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.delete(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_delete_rest_unset_required_fields(): + transport = transports.VpnGatewaysRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.delete._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("project", "region", "vpnGateway", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_rest_interceptors(null_interceptor): + transport = transports.VpnGatewaysRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.VpnGatewaysRestInterceptor(), + ) + client = VpnGatewaysClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.VpnGatewaysRestInterceptor, "post_delete") as post, \ + mock.patch.object(transports.VpnGatewaysRestInterceptor, "pre_delete") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.DeleteVpnGatewayRequest.pb(compute.DeleteVpnGatewayRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.DeleteVpnGatewayRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.delete(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_delete_rest_bad_request(transport: str = 'rest', request_type=compute.DeleteVpnGatewayRequest): + client = VpnGatewaysClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2', 'vpn_gateway': 'sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.delete(request) + + +def test_delete_rest_flattened(): + client = VpnGatewaysClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'region': 'sample2', 'vpn_gateway': 'sample3'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + region='region_value', + vpn_gateway='vpn_gateway_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.delete(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/regions/{region}/vpnGateways/{vpn_gateway}" % client.transport._host, args[1]) + + +def test_delete_rest_flattened_error(transport: str = 'rest'): + client = VpnGatewaysClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete( + compute.DeleteVpnGatewayRequest(), + project='project_value', + region='region_value', + vpn_gateway='vpn_gateway_value', + ) + + +def test_delete_rest_error(): + client = VpnGatewaysClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.DeleteVpnGatewayRequest, + dict, +]) +def test_delete_unary_rest(request_type): + client = VpnGatewaysClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2', 'vpn_gateway': 'sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.delete_unary(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.Operation) + + +def test_delete_unary_rest_required_fields(request_type=compute.DeleteVpnGatewayRequest): + transport_class = transports.VpnGatewaysRestTransport + + request_init = {} + request_init["project"] = "" + request_init["region"] = "" + request_init["vpn_gateway"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + jsonified_request["region"] = 'region_value' + jsonified_request["vpnGateway"] = 'vpn_gateway_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "region" in jsonified_request + assert jsonified_request["region"] == 'region_value' + assert "vpnGateway" in jsonified_request + assert jsonified_request["vpnGateway"] == 'vpn_gateway_value' + + client = VpnGatewaysClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "delete", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.delete_unary(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_delete_unary_rest_unset_required_fields(): + transport = transports.VpnGatewaysRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.delete._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("project", "region", "vpnGateway", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_unary_rest_interceptors(null_interceptor): + transport = transports.VpnGatewaysRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.VpnGatewaysRestInterceptor(), + ) + client = VpnGatewaysClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.VpnGatewaysRestInterceptor, "post_delete") as post, \ + mock.patch.object(transports.VpnGatewaysRestInterceptor, "pre_delete") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.DeleteVpnGatewayRequest.pb(compute.DeleteVpnGatewayRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.DeleteVpnGatewayRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.delete_unary(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_delete_unary_rest_bad_request(transport: str = 'rest', request_type=compute.DeleteVpnGatewayRequest): + client = VpnGatewaysClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2', 'vpn_gateway': 'sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.delete_unary(request) + + +def test_delete_unary_rest_flattened(): + client = VpnGatewaysClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'region': 'sample2', 'vpn_gateway': 'sample3'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + region='region_value', + vpn_gateway='vpn_gateway_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.delete_unary(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/regions/{region}/vpnGateways/{vpn_gateway}" % client.transport._host, args[1]) + + +def test_delete_unary_rest_flattened_error(transport: str = 'rest'): + client = VpnGatewaysClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_unary( + compute.DeleteVpnGatewayRequest(), + project='project_value', + region='region_value', + vpn_gateway='vpn_gateway_value', + ) + + +def test_delete_unary_rest_error(): + client = VpnGatewaysClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.GetVpnGatewayRequest, + dict, +]) +def test_get_rest(request_type): + client = VpnGatewaysClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2', 'vpn_gateway': 'sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.VpnGateway( + creation_timestamp='creation_timestamp_value', + description='description_value', + id=205, + kind='kind_value', + label_fingerprint='label_fingerprint_value', + name='name_value', + network='network_value', + region='region_value', + self_link='self_link_value', + stack_type='stack_type_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.VpnGateway.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.get(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.VpnGateway) + assert response.creation_timestamp == 'creation_timestamp_value' + assert response.description == 'description_value' + assert response.id == 205 + assert response.kind == 'kind_value' + assert response.label_fingerprint == 'label_fingerprint_value' + assert response.name == 'name_value' + assert response.network == 'network_value' + assert response.region == 'region_value' + assert response.self_link == 'self_link_value' + assert response.stack_type == 'stack_type_value' + + +def test_get_rest_required_fields(request_type=compute.GetVpnGatewayRequest): + transport_class = transports.VpnGatewaysRestTransport + + request_init = {} + request_init["project"] = "" + request_init["region"] = "" + request_init["vpn_gateway"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + jsonified_request["region"] = 'region_value' + jsonified_request["vpnGateway"] = 'vpn_gateway_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "region" in jsonified_request + assert jsonified_request["region"] == 'region_value' + assert "vpnGateway" in jsonified_request + assert jsonified_request["vpnGateway"] == 'vpn_gateway_value' + + client = VpnGatewaysClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.VpnGateway() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "get", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.VpnGateway.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.get(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_get_rest_unset_required_fields(): + transport = transports.VpnGatewaysRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.get._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("project", "region", "vpnGateway", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_rest_interceptors(null_interceptor): + transport = transports.VpnGatewaysRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.VpnGatewaysRestInterceptor(), + ) + client = VpnGatewaysClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.VpnGatewaysRestInterceptor, "post_get") as post, \ + mock.patch.object(transports.VpnGatewaysRestInterceptor, "pre_get") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.GetVpnGatewayRequest.pb(compute.GetVpnGatewayRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.VpnGateway.to_json(compute.VpnGateway()) + + request = compute.GetVpnGatewayRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.VpnGateway() + + client.get(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_get_rest_bad_request(transport: str = 'rest', request_type=compute.GetVpnGatewayRequest): + client = VpnGatewaysClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2', 'vpn_gateway': 'sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get(request) + + +def test_get_rest_flattened(): + client = VpnGatewaysClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.VpnGateway() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'region': 'sample2', 'vpn_gateway': 'sample3'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + region='region_value', + vpn_gateway='vpn_gateway_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.VpnGateway.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.get(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/regions/{region}/vpnGateways/{vpn_gateway}" % client.transport._host, args[1]) + + +def test_get_rest_flattened_error(transport: str = 'rest'): + client = VpnGatewaysClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get( + compute.GetVpnGatewayRequest(), + project='project_value', + region='region_value', + vpn_gateway='vpn_gateway_value', + ) + + +def test_get_rest_error(): + client = VpnGatewaysClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.GetStatusVpnGatewayRequest, + dict, +]) +def test_get_status_rest(request_type): + client = VpnGatewaysClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2', 'vpn_gateway': 'sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.VpnGatewaysGetStatusResponse( + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.VpnGatewaysGetStatusResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.get_status(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.VpnGatewaysGetStatusResponse) + + +def test_get_status_rest_required_fields(request_type=compute.GetStatusVpnGatewayRequest): + transport_class = transports.VpnGatewaysRestTransport + + request_init = {} + request_init["project"] = "" + request_init["region"] = "" + request_init["vpn_gateway"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_status._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + jsonified_request["region"] = 'region_value' + jsonified_request["vpnGateway"] = 'vpn_gateway_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_status._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "region" in jsonified_request + assert jsonified_request["region"] == 'region_value' + assert "vpnGateway" in jsonified_request + assert jsonified_request["vpnGateway"] == 'vpn_gateway_value' + + client = VpnGatewaysClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.VpnGatewaysGetStatusResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "get", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.VpnGatewaysGetStatusResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.get_status(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_get_status_rest_unset_required_fields(): + transport = transports.VpnGatewaysRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.get_status._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("project", "region", "vpnGateway", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_status_rest_interceptors(null_interceptor): + transport = transports.VpnGatewaysRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.VpnGatewaysRestInterceptor(), + ) + client = VpnGatewaysClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.VpnGatewaysRestInterceptor, "post_get_status") as post, \ + mock.patch.object(transports.VpnGatewaysRestInterceptor, "pre_get_status") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.GetStatusVpnGatewayRequest.pb(compute.GetStatusVpnGatewayRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.VpnGatewaysGetStatusResponse.to_json(compute.VpnGatewaysGetStatusResponse()) + + request = compute.GetStatusVpnGatewayRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.VpnGatewaysGetStatusResponse() + + client.get_status(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_get_status_rest_bad_request(transport: str = 'rest', request_type=compute.GetStatusVpnGatewayRequest): + client = VpnGatewaysClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2', 'vpn_gateway': 'sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_status(request) + + +def test_get_status_rest_flattened(): + client = VpnGatewaysClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.VpnGatewaysGetStatusResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'region': 'sample2', 'vpn_gateway': 'sample3'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + region='region_value', + vpn_gateway='vpn_gateway_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.VpnGatewaysGetStatusResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.get_status(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/regions/{region}/vpnGateways/{vpn_gateway}/getStatus" % client.transport._host, args[1]) + + +def test_get_status_rest_flattened_error(transport: str = 'rest'): + client = VpnGatewaysClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_status( + compute.GetStatusVpnGatewayRequest(), + project='project_value', + region='region_value', + vpn_gateway='vpn_gateway_value', + ) + + +def test_get_status_rest_error(): + client = VpnGatewaysClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.InsertVpnGatewayRequest, + dict, +]) +def test_insert_rest(request_type): + client = VpnGatewaysClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2'} + request_init["vpn_gateway_resource"] = {'creation_timestamp': 'creation_timestamp_value', 'description': 'description_value', 'id': 205, 'kind': 'kind_value', 'label_fingerprint': 'label_fingerprint_value', 'labels': {}, 'name': 'name_value', 'network': 'network_value', 'region': 'region_value', 'self_link': 'self_link_value', 'stack_type': 'stack_type_value', 'vpn_interfaces': [{'id': 205, 'interconnect_attachment': 'interconnect_attachment_value', 'ip_address': 'ip_address_value'}]} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.insert(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, extended_operation.ExtendedOperation) + assert response.client_operation_id == 'client_operation_id_value' + assert response.creation_timestamp == 'creation_timestamp_value' + assert response.description == 'description_value' + assert response.end_time == 'end_time_value' + assert response.http_error_message == 'http_error_message_value' + assert response.http_error_status_code == 2374 + assert response.id == 205 + assert response.insert_time == 'insert_time_value' + assert response.kind == 'kind_value' + assert response.name == 'name_value' + assert response.operation_group_id == 'operation_group_id_value' + assert response.operation_type == 'operation_type_value' + assert response.progress == 885 + assert response.region == 'region_value' + assert response.self_link == 'self_link_value' + assert response.start_time == 'start_time_value' + assert response.status == compute.Operation.Status.DONE + assert response.status_message == 'status_message_value' + assert response.target_id == 947 + assert response.target_link == 'target_link_value' + assert response.user == 'user_value' + assert response.zone == 'zone_value' + + +def test_insert_rest_required_fields(request_type=compute.InsertVpnGatewayRequest): + transport_class = transports.VpnGatewaysRestTransport + + request_init = {} + request_init["project"] = "" + request_init["region"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).insert._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + jsonified_request["region"] = 'region_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).insert._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "region" in jsonified_request + assert jsonified_request["region"] == 'region_value' + + client = VpnGatewaysClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.insert(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_insert_rest_unset_required_fields(): + transport = transports.VpnGatewaysRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.insert._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("project", "region", "vpnGatewayResource", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_insert_rest_interceptors(null_interceptor): + transport = transports.VpnGatewaysRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.VpnGatewaysRestInterceptor(), + ) + client = VpnGatewaysClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.VpnGatewaysRestInterceptor, "post_insert") as post, \ + mock.patch.object(transports.VpnGatewaysRestInterceptor, "pre_insert") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.InsertVpnGatewayRequest.pb(compute.InsertVpnGatewayRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.InsertVpnGatewayRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.insert(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_insert_rest_bad_request(transport: str = 'rest', request_type=compute.InsertVpnGatewayRequest): + client = VpnGatewaysClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2'} + request_init["vpn_gateway_resource"] = {'creation_timestamp': 'creation_timestamp_value', 'description': 'description_value', 'id': 205, 'kind': 'kind_value', 'label_fingerprint': 'label_fingerprint_value', 'labels': {}, 'name': 'name_value', 'network': 'network_value', 'region': 'region_value', 'self_link': 'self_link_value', 'stack_type': 'stack_type_value', 'vpn_interfaces': [{'id': 205, 'interconnect_attachment': 'interconnect_attachment_value', 'ip_address': 'ip_address_value'}]} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.insert(request) + + +def test_insert_rest_flattened(): + client = VpnGatewaysClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'region': 'sample2'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + region='region_value', + vpn_gateway_resource=compute.VpnGateway(creation_timestamp='creation_timestamp_value'), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.insert(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/regions/{region}/vpnGateways" % client.transport._host, args[1]) + + +def test_insert_rest_flattened_error(transport: str = 'rest'): + client = VpnGatewaysClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.insert( + compute.InsertVpnGatewayRequest(), + project='project_value', + region='region_value', + vpn_gateway_resource=compute.VpnGateway(creation_timestamp='creation_timestamp_value'), + ) + + +def test_insert_rest_error(): + client = VpnGatewaysClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.InsertVpnGatewayRequest, + dict, +]) +def test_insert_unary_rest(request_type): + client = VpnGatewaysClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2'} + request_init["vpn_gateway_resource"] = {'creation_timestamp': 'creation_timestamp_value', 'description': 'description_value', 'id': 205, 'kind': 'kind_value', 'label_fingerprint': 'label_fingerprint_value', 'labels': {}, 'name': 'name_value', 'network': 'network_value', 'region': 'region_value', 'self_link': 'self_link_value', 'stack_type': 'stack_type_value', 'vpn_interfaces': [{'id': 205, 'interconnect_attachment': 'interconnect_attachment_value', 'ip_address': 'ip_address_value'}]} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.insert_unary(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.Operation) + + +def test_insert_unary_rest_required_fields(request_type=compute.InsertVpnGatewayRequest): + transport_class = transports.VpnGatewaysRestTransport + + request_init = {} + request_init["project"] = "" + request_init["region"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).insert._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + jsonified_request["region"] = 'region_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).insert._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "region" in jsonified_request + assert jsonified_request["region"] == 'region_value' + + client = VpnGatewaysClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.insert_unary(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_insert_unary_rest_unset_required_fields(): + transport = transports.VpnGatewaysRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.insert._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("project", "region", "vpnGatewayResource", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_insert_unary_rest_interceptors(null_interceptor): + transport = transports.VpnGatewaysRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.VpnGatewaysRestInterceptor(), + ) + client = VpnGatewaysClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.VpnGatewaysRestInterceptor, "post_insert") as post, \ + mock.patch.object(transports.VpnGatewaysRestInterceptor, "pre_insert") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.InsertVpnGatewayRequest.pb(compute.InsertVpnGatewayRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.InsertVpnGatewayRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.insert_unary(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_insert_unary_rest_bad_request(transport: str = 'rest', request_type=compute.InsertVpnGatewayRequest): + client = VpnGatewaysClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2'} + request_init["vpn_gateway_resource"] = {'creation_timestamp': 'creation_timestamp_value', 'description': 'description_value', 'id': 205, 'kind': 'kind_value', 'label_fingerprint': 'label_fingerprint_value', 'labels': {}, 'name': 'name_value', 'network': 'network_value', 'region': 'region_value', 'self_link': 'self_link_value', 'stack_type': 'stack_type_value', 'vpn_interfaces': [{'id': 205, 'interconnect_attachment': 'interconnect_attachment_value', 'ip_address': 'ip_address_value'}]} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.insert_unary(request) + + +def test_insert_unary_rest_flattened(): + client = VpnGatewaysClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'region': 'sample2'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + region='region_value', + vpn_gateway_resource=compute.VpnGateway(creation_timestamp='creation_timestamp_value'), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.insert_unary(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/regions/{region}/vpnGateways" % client.transport._host, args[1]) + + +def test_insert_unary_rest_flattened_error(transport: str = 'rest'): + client = VpnGatewaysClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.insert_unary( + compute.InsertVpnGatewayRequest(), + project='project_value', + region='region_value', + vpn_gateway_resource=compute.VpnGateway(creation_timestamp='creation_timestamp_value'), + ) + + +def test_insert_unary_rest_error(): + client = VpnGatewaysClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.ListVpnGatewaysRequest, + dict, +]) +def test_list_rest(request_type): + client = VpnGatewaysClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.VpnGatewayList( + id='id_value', + kind='kind_value', + next_page_token='next_page_token_value', + self_link='self_link_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.VpnGatewayList.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.list(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListPager) + assert response.id == 'id_value' + assert response.kind == 'kind_value' + assert response.next_page_token == 'next_page_token_value' + assert response.self_link == 'self_link_value' + + +def test_list_rest_required_fields(request_type=compute.ListVpnGatewaysRequest): + transport_class = transports.VpnGatewaysRestTransport + + request_init = {} + request_init["project"] = "" + request_init["region"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + jsonified_request["region"] = 'region_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("filter", "max_results", "order_by", "page_token", "return_partial_success", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "region" in jsonified_request + assert jsonified_request["region"] == 'region_value' + + client = VpnGatewaysClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.VpnGatewayList() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "get", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.VpnGatewayList.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.list(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_list_rest_unset_required_fields(): + transport = transports.VpnGatewaysRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.list._get_unset_required_fields({}) + assert set(unset_fields) == (set(("filter", "maxResults", "orderBy", "pageToken", "returnPartialSuccess", )) & set(("project", "region", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_rest_interceptors(null_interceptor): + transport = transports.VpnGatewaysRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.VpnGatewaysRestInterceptor(), + ) + client = VpnGatewaysClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.VpnGatewaysRestInterceptor, "post_list") as post, \ + mock.patch.object(transports.VpnGatewaysRestInterceptor, "pre_list") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.ListVpnGatewaysRequest.pb(compute.ListVpnGatewaysRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.VpnGatewayList.to_json(compute.VpnGatewayList()) + + request = compute.ListVpnGatewaysRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.VpnGatewayList() + + client.list(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_list_rest_bad_request(transport: str = 'rest', request_type=compute.ListVpnGatewaysRequest): + client = VpnGatewaysClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list(request) + + +def test_list_rest_flattened(): + client = VpnGatewaysClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.VpnGatewayList() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'region': 'sample2'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + region='region_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.VpnGatewayList.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.list(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/regions/{region}/vpnGateways" % client.transport._host, args[1]) + + +def test_list_rest_flattened_error(transport: str = 'rest'): + client = VpnGatewaysClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list( + compute.ListVpnGatewaysRequest(), + project='project_value', + region='region_value', + ) + + +def test_list_rest_pager(transport: str = 'rest'): + client = VpnGatewaysClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + #with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + compute.VpnGatewayList( + items=[ + compute.VpnGateway(), + compute.VpnGateway(), + compute.VpnGateway(), + ], + next_page_token='abc', + ), + compute.VpnGatewayList( + items=[], + next_page_token='def', + ), + compute.VpnGatewayList( + items=[ + compute.VpnGateway(), + ], + next_page_token='ghi', + ), + compute.VpnGatewayList( + items=[ + compute.VpnGateway(), + compute.VpnGateway(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple(compute.VpnGatewayList.to_json(x) for x in response) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode('UTF-8') + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {'project': 'sample1', 'region': 'sample2'} + + pager = client.list(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, compute.VpnGateway) + for i in results) + + pages = list(client.list(request=sample_request).pages) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize("request_type", [ + compute.SetLabelsVpnGatewayRequest, + dict, +]) +def test_set_labels_rest(request_type): + client = VpnGatewaysClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2', 'resource': 'sample3'} + request_init["region_set_labels_request_resource"] = {'label_fingerprint': 'label_fingerprint_value', 'labels': {}} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.set_labels(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, extended_operation.ExtendedOperation) + assert response.client_operation_id == 'client_operation_id_value' + assert response.creation_timestamp == 'creation_timestamp_value' + assert response.description == 'description_value' + assert response.end_time == 'end_time_value' + assert response.http_error_message == 'http_error_message_value' + assert response.http_error_status_code == 2374 + assert response.id == 205 + assert response.insert_time == 'insert_time_value' + assert response.kind == 'kind_value' + assert response.name == 'name_value' + assert response.operation_group_id == 'operation_group_id_value' + assert response.operation_type == 'operation_type_value' + assert response.progress == 885 + assert response.region == 'region_value' + assert response.self_link == 'self_link_value' + assert response.start_time == 'start_time_value' + assert response.status == compute.Operation.Status.DONE + assert response.status_message == 'status_message_value' + assert response.target_id == 947 + assert response.target_link == 'target_link_value' + assert response.user == 'user_value' + assert response.zone == 'zone_value' + + +def test_set_labels_rest_required_fields(request_type=compute.SetLabelsVpnGatewayRequest): + transport_class = transports.VpnGatewaysRestTransport + + request_init = {} + request_init["project"] = "" + request_init["region"] = "" + request_init["resource"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).set_labels._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + jsonified_request["region"] = 'region_value' + jsonified_request["resource"] = 'resource_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).set_labels._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "region" in jsonified_request + assert jsonified_request["region"] == 'region_value' + assert "resource" in jsonified_request + assert jsonified_request["resource"] == 'resource_value' + + client = VpnGatewaysClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.set_labels(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_set_labels_rest_unset_required_fields(): + transport = transports.VpnGatewaysRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.set_labels._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("project", "region", "regionSetLabelsRequestResource", "resource", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_set_labels_rest_interceptors(null_interceptor): + transport = transports.VpnGatewaysRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.VpnGatewaysRestInterceptor(), + ) + client = VpnGatewaysClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.VpnGatewaysRestInterceptor, "post_set_labels") as post, \ + mock.patch.object(transports.VpnGatewaysRestInterceptor, "pre_set_labels") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.SetLabelsVpnGatewayRequest.pb(compute.SetLabelsVpnGatewayRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.SetLabelsVpnGatewayRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.set_labels(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_set_labels_rest_bad_request(transport: str = 'rest', request_type=compute.SetLabelsVpnGatewayRequest): + client = VpnGatewaysClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2', 'resource': 'sample3'} + request_init["region_set_labels_request_resource"] = {'label_fingerprint': 'label_fingerprint_value', 'labels': {}} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.set_labels(request) + + +def test_set_labels_rest_flattened(): + client = VpnGatewaysClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'region': 'sample2', 'resource': 'sample3'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + region='region_value', + resource='resource_value', + region_set_labels_request_resource=compute.RegionSetLabelsRequest(label_fingerprint='label_fingerprint_value'), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.set_labels(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/regions/{region}/vpnGateways/{resource}/setLabels" % client.transport._host, args[1]) + + +def test_set_labels_rest_flattened_error(transport: str = 'rest'): + client = VpnGatewaysClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.set_labels( + compute.SetLabelsVpnGatewayRequest(), + project='project_value', + region='region_value', + resource='resource_value', + region_set_labels_request_resource=compute.RegionSetLabelsRequest(label_fingerprint='label_fingerprint_value'), + ) + + +def test_set_labels_rest_error(): + client = VpnGatewaysClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.SetLabelsVpnGatewayRequest, + dict, +]) +def test_set_labels_unary_rest(request_type): + client = VpnGatewaysClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2', 'resource': 'sample3'} + request_init["region_set_labels_request_resource"] = {'label_fingerprint': 'label_fingerprint_value', 'labels': {}} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.set_labels_unary(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.Operation) + + +def test_set_labels_unary_rest_required_fields(request_type=compute.SetLabelsVpnGatewayRequest): + transport_class = transports.VpnGatewaysRestTransport + + request_init = {} + request_init["project"] = "" + request_init["region"] = "" + request_init["resource"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).set_labels._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + jsonified_request["region"] = 'region_value' + jsonified_request["resource"] = 'resource_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).set_labels._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "region" in jsonified_request + assert jsonified_request["region"] == 'region_value' + assert "resource" in jsonified_request + assert jsonified_request["resource"] == 'resource_value' + + client = VpnGatewaysClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.set_labels_unary(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_set_labels_unary_rest_unset_required_fields(): + transport = transports.VpnGatewaysRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.set_labels._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("project", "region", "regionSetLabelsRequestResource", "resource", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_set_labels_unary_rest_interceptors(null_interceptor): + transport = transports.VpnGatewaysRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.VpnGatewaysRestInterceptor(), + ) + client = VpnGatewaysClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.VpnGatewaysRestInterceptor, "post_set_labels") as post, \ + mock.patch.object(transports.VpnGatewaysRestInterceptor, "pre_set_labels") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.SetLabelsVpnGatewayRequest.pb(compute.SetLabelsVpnGatewayRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.SetLabelsVpnGatewayRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.set_labels_unary(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_set_labels_unary_rest_bad_request(transport: str = 'rest', request_type=compute.SetLabelsVpnGatewayRequest): + client = VpnGatewaysClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2', 'resource': 'sample3'} + request_init["region_set_labels_request_resource"] = {'label_fingerprint': 'label_fingerprint_value', 'labels': {}} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.set_labels_unary(request) + + +def test_set_labels_unary_rest_flattened(): + client = VpnGatewaysClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'region': 'sample2', 'resource': 'sample3'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + region='region_value', + resource='resource_value', + region_set_labels_request_resource=compute.RegionSetLabelsRequest(label_fingerprint='label_fingerprint_value'), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.set_labels_unary(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/regions/{region}/vpnGateways/{resource}/setLabels" % client.transport._host, args[1]) + + +def test_set_labels_unary_rest_flattened_error(transport: str = 'rest'): + client = VpnGatewaysClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.set_labels_unary( + compute.SetLabelsVpnGatewayRequest(), + project='project_value', + region='region_value', + resource='resource_value', + region_set_labels_request_resource=compute.RegionSetLabelsRequest(label_fingerprint='label_fingerprint_value'), + ) + + +def test_set_labels_unary_rest_error(): + client = VpnGatewaysClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.TestIamPermissionsVpnGatewayRequest, + dict, +]) +def test_test_iam_permissions_rest(request_type): + client = VpnGatewaysClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2', 'resource': 'sample3'} + request_init["test_permissions_request_resource"] = {'permissions': ['permissions_value1', 'permissions_value2']} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.TestPermissionsResponse( + permissions=['permissions_value'], + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.TestPermissionsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.test_iam_permissions(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.TestPermissionsResponse) + assert response.permissions == ['permissions_value'] + + +def test_test_iam_permissions_rest_required_fields(request_type=compute.TestIamPermissionsVpnGatewayRequest): + transport_class = transports.VpnGatewaysRestTransport + + request_init = {} + request_init["project"] = "" + request_init["region"] = "" + request_init["resource"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).test_iam_permissions._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + jsonified_request["region"] = 'region_value' + jsonified_request["resource"] = 'resource_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).test_iam_permissions._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "region" in jsonified_request + assert jsonified_request["region"] == 'region_value' + assert "resource" in jsonified_request + assert jsonified_request["resource"] == 'resource_value' + + client = VpnGatewaysClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.TestPermissionsResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.TestPermissionsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.test_iam_permissions(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_test_iam_permissions_rest_unset_required_fields(): + transport = transports.VpnGatewaysRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.test_iam_permissions._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("project", "region", "resource", "testPermissionsRequestResource", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_test_iam_permissions_rest_interceptors(null_interceptor): + transport = transports.VpnGatewaysRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.VpnGatewaysRestInterceptor(), + ) + client = VpnGatewaysClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.VpnGatewaysRestInterceptor, "post_test_iam_permissions") as post, \ + mock.patch.object(transports.VpnGatewaysRestInterceptor, "pre_test_iam_permissions") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.TestIamPermissionsVpnGatewayRequest.pb(compute.TestIamPermissionsVpnGatewayRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.TestPermissionsResponse.to_json(compute.TestPermissionsResponse()) + + request = compute.TestIamPermissionsVpnGatewayRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.TestPermissionsResponse() + + client.test_iam_permissions(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_test_iam_permissions_rest_bad_request(transport: str = 'rest', request_type=compute.TestIamPermissionsVpnGatewayRequest): + client = VpnGatewaysClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2', 'resource': 'sample3'} + request_init["test_permissions_request_resource"] = {'permissions': ['permissions_value1', 'permissions_value2']} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.test_iam_permissions(request) + + +def test_test_iam_permissions_rest_flattened(): + client = VpnGatewaysClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.TestPermissionsResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'region': 'sample2', 'resource': 'sample3'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + region='region_value', + resource='resource_value', + test_permissions_request_resource=compute.TestPermissionsRequest(permissions=['permissions_value']), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.TestPermissionsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.test_iam_permissions(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/regions/{region}/vpnGateways/{resource}/testIamPermissions" % client.transport._host, args[1]) + + +def test_test_iam_permissions_rest_flattened_error(transport: str = 'rest'): + client = VpnGatewaysClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.test_iam_permissions( + compute.TestIamPermissionsVpnGatewayRequest(), + project='project_value', + region='region_value', + resource='resource_value', + test_permissions_request_resource=compute.TestPermissionsRequest(permissions=['permissions_value']), + ) + + +def test_test_iam_permissions_rest_error(): + client = VpnGatewaysClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +def test_credentials_transport_error(): + # It is an error to provide credentials and a transport instance. + transport = transports.VpnGatewaysRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = VpnGatewaysClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # It is an error to provide a credentials file and a transport instance. + transport = transports.VpnGatewaysRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = VpnGatewaysClient( + client_options={"credentials_file": "credentials.json"}, + transport=transport, + ) + + # It is an error to provide an api_key and a transport instance. + transport = transports.VpnGatewaysRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = VpnGatewaysClient( + client_options=options, + transport=transport, + ) + + # It is an error to provide an api_key and a credential. + options = mock.Mock() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = VpnGatewaysClient( + client_options=options, + credentials=ga_credentials.AnonymousCredentials() + ) + + # It is an error to provide scopes and a transport instance. + transport = transports.VpnGatewaysRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = VpnGatewaysClient( + client_options={"scopes": ["1", "2"]}, + transport=transport, + ) + + +def test_transport_instance(): + # A client may be instantiated with a custom transport instance. + transport = transports.VpnGatewaysRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + client = VpnGatewaysClient(transport=transport) + assert client.transport is transport + + +@pytest.mark.parametrize("transport_class", [ + transports.VpnGatewaysRestTransport, +]) +def test_transport_adc(transport_class): + # Test default credentials are used if not provided. + with mock.patch.object(google.auth, 'default') as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class() + adc.assert_called_once() + +@pytest.mark.parametrize("transport_name", [ + "rest", +]) +def test_transport_kind(transport_name): + transport = VpnGatewaysClient.get_transport_class(transport_name)( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert transport.kind == transport_name + + +def test_vpn_gateways_base_transport_error(): + # Passing both a credentials object and credentials_file should raise an error + with pytest.raises(core_exceptions.DuplicateCredentialArgs): + transport = transports.VpnGatewaysTransport( + credentials=ga_credentials.AnonymousCredentials(), + credentials_file="credentials.json" + ) + + +def test_vpn_gateways_base_transport(): + # Instantiate the base transport. + with mock.patch('google.cloud.compute_v1.services.vpn_gateways.transports.VpnGatewaysTransport.__init__') as Transport: + Transport.return_value = None + transport = transports.VpnGatewaysTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Every method on the transport should just blindly + # raise NotImplementedError. + methods = ( + 'aggregated_list', + 'delete', + 'get', + 'get_status', + 'insert', + 'list', + 'set_labels', + 'test_iam_permissions', + ) + for method in methods: + with pytest.raises(NotImplementedError): + getattr(transport, method)(request=object()) + + with pytest.raises(NotImplementedError): + transport.close() + + # Catch all for all remaining methods and properties + remainder = [ + 'kind', + ] + for r in remainder: + with pytest.raises(NotImplementedError): + getattr(transport, r)() + + +def test_vpn_gateways_base_transport_with_credentials_file(): + # Instantiate the base transport with a credentials file + with mock.patch.object(google.auth, 'load_credentials_from_file', autospec=True) as load_creds, mock.patch('google.cloud.compute_v1.services.vpn_gateways.transports.VpnGatewaysTransport._prep_wrapped_messages') as Transport: + Transport.return_value = None + load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.VpnGatewaysTransport( + credentials_file="credentials.json", + quota_project_id="octopus", + ) + load_creds.assert_called_once_with("credentials.json", + scopes=None, + default_scopes=( + 'https://www.googleapis.com/auth/compute', + 'https://www.googleapis.com/auth/cloud-platform', +), + quota_project_id="octopus", + ) + + +def test_vpn_gateways_base_transport_with_adc(): + # Test the default credentials are used if credentials and credentials_file are None. + with mock.patch.object(google.auth, 'default', autospec=True) as adc, mock.patch('google.cloud.compute_v1.services.vpn_gateways.transports.VpnGatewaysTransport._prep_wrapped_messages') as Transport: + Transport.return_value = None + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.VpnGatewaysTransport() + adc.assert_called_once() + + +def test_vpn_gateways_auth_adc(): + # If no credentials are provided, we should use ADC credentials. + with mock.patch.object(google.auth, 'default', autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + VpnGatewaysClient() + adc.assert_called_once_with( + scopes=None, + default_scopes=( + 'https://www.googleapis.com/auth/compute', + 'https://www.googleapis.com/auth/cloud-platform', +), + quota_project_id=None, + ) + + +def test_vpn_gateways_http_transport_client_cert_source_for_mtls(): + cred = ga_credentials.AnonymousCredentials() + with mock.patch("google.auth.transport.requests.AuthorizedSession.configure_mtls_channel") as mock_configure_mtls_channel: + transports.VpnGatewaysRestTransport ( + credentials=cred, + client_cert_source_for_mtls=client_cert_source_callback + ) + mock_configure_mtls_channel.assert_called_once_with(client_cert_source_callback) + + +@pytest.mark.parametrize("transport_name", [ + "rest", +]) +def test_vpn_gateways_host_no_port(transport_name): + client = VpnGatewaysClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions(api_endpoint='compute.googleapis.com'), + transport=transport_name, + ) + assert client.transport._host == ( + 'compute.googleapis.com:443' + if transport_name in ['grpc', 'grpc_asyncio'] + else 'https://compute.googleapis.com' + ) + +@pytest.mark.parametrize("transport_name", [ + "rest", +]) +def test_vpn_gateways_host_with_port(transport_name): + client = VpnGatewaysClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions(api_endpoint='compute.googleapis.com:8000'), + transport=transport_name, + ) + assert client.transport._host == ( + 'compute.googleapis.com:8000' + if transport_name in ['grpc', 'grpc_asyncio'] + else 'https://compute.googleapis.com:8000' + ) + +@pytest.mark.parametrize("transport_name", [ + "rest", +]) +def test_vpn_gateways_client_transport_session_collision(transport_name): + creds1 = ga_credentials.AnonymousCredentials() + creds2 = ga_credentials.AnonymousCredentials() + client1 = VpnGatewaysClient( + credentials=creds1, + transport=transport_name, + ) + client2 = VpnGatewaysClient( + credentials=creds2, + transport=transport_name, + ) + session1 = client1.transport.aggregated_list._session + session2 = client2.transport.aggregated_list._session + assert session1 != session2 + session1 = client1.transport.delete._session + session2 = client2.transport.delete._session + assert session1 != session2 + session1 = client1.transport.get._session + session2 = client2.transport.get._session + assert session1 != session2 + session1 = client1.transport.get_status._session + session2 = client2.transport.get_status._session + assert session1 != session2 + session1 = client1.transport.insert._session + session2 = client2.transport.insert._session + assert session1 != session2 + session1 = client1.transport.list._session + session2 = client2.transport.list._session + assert session1 != session2 + session1 = client1.transport.set_labels._session + session2 = client2.transport.set_labels._session + assert session1 != session2 + session1 = client1.transport.test_iam_permissions._session + session2 = client2.transport.test_iam_permissions._session + assert session1 != session2 + +def test_common_billing_account_path(): + billing_account = "squid" + expected = "billingAccounts/{billing_account}".format(billing_account=billing_account, ) + actual = VpnGatewaysClient.common_billing_account_path(billing_account) + assert expected == actual + + +def test_parse_common_billing_account_path(): + expected = { + "billing_account": "clam", + } + path = VpnGatewaysClient.common_billing_account_path(**expected) + + # Check that the path construction is reversible. + actual = VpnGatewaysClient.parse_common_billing_account_path(path) + assert expected == actual + +def test_common_folder_path(): + folder = "whelk" + expected = "folders/{folder}".format(folder=folder, ) + actual = VpnGatewaysClient.common_folder_path(folder) + assert expected == actual + + +def test_parse_common_folder_path(): + expected = { + "folder": "octopus", + } + path = VpnGatewaysClient.common_folder_path(**expected) + + # Check that the path construction is reversible. + actual = VpnGatewaysClient.parse_common_folder_path(path) + assert expected == actual + +def test_common_organization_path(): + organization = "oyster" + expected = "organizations/{organization}".format(organization=organization, ) + actual = VpnGatewaysClient.common_organization_path(organization) + assert expected == actual + + +def test_parse_common_organization_path(): + expected = { + "organization": "nudibranch", + } + path = VpnGatewaysClient.common_organization_path(**expected) + + # Check that the path construction is reversible. + actual = VpnGatewaysClient.parse_common_organization_path(path) + assert expected == actual + +def test_common_project_path(): + project = "cuttlefish" + expected = "projects/{project}".format(project=project, ) + actual = VpnGatewaysClient.common_project_path(project) + assert expected == actual + + +def test_parse_common_project_path(): + expected = { + "project": "mussel", + } + path = VpnGatewaysClient.common_project_path(**expected) + + # Check that the path construction is reversible. + actual = VpnGatewaysClient.parse_common_project_path(path) + assert expected == actual + +def test_common_location_path(): + project = "winkle" + location = "nautilus" + expected = "projects/{project}/locations/{location}".format(project=project, location=location, ) + actual = VpnGatewaysClient.common_location_path(project, location) + assert expected == actual + + +def test_parse_common_location_path(): + expected = { + "project": "scallop", + "location": "abalone", + } + path = VpnGatewaysClient.common_location_path(**expected) + + # Check that the path construction is reversible. + actual = VpnGatewaysClient.parse_common_location_path(path) + assert expected == actual + + +def test_client_with_default_client_info(): + client_info = gapic_v1.client_info.ClientInfo() + + with mock.patch.object(transports.VpnGatewaysTransport, '_prep_wrapped_messages') as prep: + client = VpnGatewaysClient( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + with mock.patch.object(transports.VpnGatewaysTransport, '_prep_wrapped_messages') as prep: + transport_class = VpnGatewaysClient.get_transport_class() + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + +def test_transport_close(): + transports = { + "rest": "_session", + } + + for transport, close_name in transports.items(): + client = VpnGatewaysClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport + ) + with mock.patch.object(type(getattr(client.transport, close_name)), "close") as close: + with client: + close.assert_not_called() + close.assert_called_once() + +def test_client_ctx(): + transports = [ + 'rest', + ] + for transport in transports: + client = VpnGatewaysClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport + ) + # Test client calls underlying transport. + with mock.patch.object(type(client.transport), "close") as close: + close.assert_not_called() + with client: + pass + close.assert_called() + +@pytest.mark.parametrize("client_class,transport_class", [ + (VpnGatewaysClient, transports.VpnGatewaysRestTransport), +]) +def test_api_key_credentials(client_class, transport_class): + with mock.patch.object( + google.auth._default, "get_api_key_credentials", create=True + ) as get_api_key_credentials: + mock_cred = mock.Mock() + get_api_key_credentials.return_value = mock_cred + options = client_options.ClientOptions() + options.api_key = "api_key" + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=mock_cred, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) diff --git a/owl-bot-staging/v1/tests/unit/gapic/compute_v1/test_vpn_tunnels.py b/owl-bot-staging/v1/tests/unit/gapic/compute_v1/test_vpn_tunnels.py new file mode 100644 index 000000000..0398539d7 --- /dev/null +++ b/owl-bot-staging/v1/tests/unit/gapic/compute_v1/test_vpn_tunnels.py @@ -0,0 +1,3418 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import os +# try/except added for compatibility with python < 3.8 +try: + from unittest import mock + from unittest.mock import AsyncMock # pragma: NO COVER +except ImportError: # pragma: NO COVER + import mock + +import grpc +from grpc.experimental import aio +from collections.abc import Iterable +from google.protobuf import json_format +import json +import math +import pytest +from proto.marshal.rules.dates import DurationRule, TimestampRule +from proto.marshal.rules import wrappers +from requests import Response +from requests import Request, PreparedRequest +from requests.sessions import Session +from google.protobuf import json_format + +from google.api_core import client_options +from google.api_core import exceptions as core_exceptions +from google.api_core import extended_operation # type: ignore +from google.api_core import future +from google.api_core import gapic_v1 +from google.api_core import grpc_helpers +from google.api_core import grpc_helpers_async +from google.api_core import path_template +from google.auth import credentials as ga_credentials +from google.auth.exceptions import MutualTLSChannelError +from google.cloud.compute_v1.services.vpn_tunnels import VpnTunnelsClient +from google.cloud.compute_v1.services.vpn_tunnels import pagers +from google.cloud.compute_v1.services.vpn_tunnels import transports +from google.cloud.compute_v1.types import compute +from google.oauth2 import service_account +import google.auth + + +def client_cert_source_callback(): + return b"cert bytes", b"key bytes" + + +# If default endpoint is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint(client): + return "foo.googleapis.com" if ("localhost" in client.DEFAULT_ENDPOINT) else client.DEFAULT_ENDPOINT + + +def test__get_default_mtls_endpoint(): + api_endpoint = "example.googleapis.com" + api_mtls_endpoint = "example.mtls.googleapis.com" + sandbox_endpoint = "example.sandbox.googleapis.com" + sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com" + non_googleapi = "api.example.com" + + assert VpnTunnelsClient._get_default_mtls_endpoint(None) is None + assert VpnTunnelsClient._get_default_mtls_endpoint(api_endpoint) == api_mtls_endpoint + assert VpnTunnelsClient._get_default_mtls_endpoint(api_mtls_endpoint) == api_mtls_endpoint + assert VpnTunnelsClient._get_default_mtls_endpoint(sandbox_endpoint) == sandbox_mtls_endpoint + assert VpnTunnelsClient._get_default_mtls_endpoint(sandbox_mtls_endpoint) == sandbox_mtls_endpoint + assert VpnTunnelsClient._get_default_mtls_endpoint(non_googleapi) == non_googleapi + + +@pytest.mark.parametrize("client_class,transport_name", [ + (VpnTunnelsClient, "rest"), +]) +def test_vpn_tunnels_client_from_service_account_info(client_class, transport_name): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object(service_account.Credentials, 'from_service_account_info') as factory: + factory.return_value = creds + info = {"valid": True} + client = client_class.from_service_account_info(info, transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + 'compute.googleapis.com:443' + if transport_name in ['grpc', 'grpc_asyncio'] + else + 'https://compute.googleapis.com' + ) + + +@pytest.mark.parametrize("transport_class,transport_name", [ + (transports.VpnTunnelsRestTransport, "rest"), +]) +def test_vpn_tunnels_client_service_account_always_use_jwt(transport_class, transport_name): + with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=True) + use_jwt.assert_called_once_with(True) + + with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=False) + use_jwt.assert_not_called() + + +@pytest.mark.parametrize("client_class,transport_name", [ + (VpnTunnelsClient, "rest"), +]) +def test_vpn_tunnels_client_from_service_account_file(client_class, transport_name): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object(service_account.Credentials, 'from_service_account_file') as factory: + factory.return_value = creds + client = client_class.from_service_account_file("dummy/file/path.json", transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + client = client_class.from_service_account_json("dummy/file/path.json", transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + 'compute.googleapis.com:443' + if transport_name in ['grpc', 'grpc_asyncio'] + else + 'https://compute.googleapis.com' + ) + + +def test_vpn_tunnels_client_get_transport_class(): + transport = VpnTunnelsClient.get_transport_class() + available_transports = [ + transports.VpnTunnelsRestTransport, + ] + assert transport in available_transports + + transport = VpnTunnelsClient.get_transport_class("rest") + assert transport == transports.VpnTunnelsRestTransport + + +@pytest.mark.parametrize("client_class,transport_class,transport_name", [ + (VpnTunnelsClient, transports.VpnTunnelsRestTransport, "rest"), +]) +@mock.patch.object(VpnTunnelsClient, "DEFAULT_ENDPOINT", modify_default_endpoint(VpnTunnelsClient)) +def test_vpn_tunnels_client_client_options(client_class, transport_class, transport_name): + # Check that if channel is provided we won't create a new one. + with mock.patch.object(VpnTunnelsClient, 'get_transport_class') as gtc: + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ) + client = client_class(transport=transport) + gtc.assert_not_called() + + # Check that if channel is provided via str we will create a new one. + with mock.patch.object(VpnTunnelsClient, 'get_transport_class') as gtc: + client = client_class(transport=transport_name) + gtc.assert_called() + + # Check the case api_endpoint is provided. + options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(transport=transport_name, client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_MTLS_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError): + client = client_class(transport=transport_name) + + # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): + with pytest.raises(ValueError): + client = client_class(transport=transport_name) + + # Check the case quota_project_id is provided + options = client_options.ClientOptions(quota_project_id="octopus") + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id="octopus", + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + # Check the case api_endpoint is provided + options = client_options.ClientOptions(api_audience="https://language.googleapis.com") + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience="https://language.googleapis.com" + ) + +@pytest.mark.parametrize("client_class,transport_class,transport_name,use_client_cert_env", [ + (VpnTunnelsClient, transports.VpnTunnelsRestTransport, "rest", "true"), + (VpnTunnelsClient, transports.VpnTunnelsRestTransport, "rest", "false"), +]) +@mock.patch.object(VpnTunnelsClient, "DEFAULT_ENDPOINT", modify_default_endpoint(VpnTunnelsClient)) +@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) +def test_vpn_tunnels_client_mtls_env_auto(client_class, transport_class, transport_name, use_client_cert_env): + # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default + # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists. + + # Check the case client_cert_source is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): + options = client_options.ClientOptions(client_cert_source=client_cert_source_callback) + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + + if use_client_cert_env == "false": + expected_client_cert_source = None + expected_host = client.DEFAULT_ENDPOINT + else: + expected_client_cert_source = client_cert_source_callback + expected_host = client.DEFAULT_MTLS_ENDPOINT + + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case ADC client cert is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): + with mock.patch.object(transport_class, '__init__') as patched: + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): + with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=client_cert_source_callback): + if use_client_cert_env == "false": + expected_host = client.DEFAULT_ENDPOINT + expected_client_cert_source = None + else: + expected_host = client.DEFAULT_MTLS_ENDPOINT + expected_client_cert_source = client_cert_source_callback + + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case client_cert_source and ADC client cert are not provided. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): + with mock.patch.object(transport_class, '__init__') as patched: + with mock.patch("google.auth.transport.mtls.has_default_client_cert_source", return_value=False): + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize("client_class", [ + VpnTunnelsClient +]) +@mock.patch.object(VpnTunnelsClient, "DEFAULT_ENDPOINT", modify_default_endpoint(VpnTunnelsClient)) +def test_vpn_tunnels_client_get_mtls_endpoint_and_cert_source(client_class): + mock_client_cert_source = mock.Mock() + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "true". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + mock_api_endpoint = "foo" + options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(options) + assert api_endpoint == mock_api_endpoint + assert cert_source == mock_client_cert_source + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "false". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + mock_client_cert_source = mock.Mock() + mock_api_endpoint = "foo" + options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(options) + assert api_endpoint == mock_api_endpoint + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert doesn't exist. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=False): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert exists. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): + with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=mock_client_cert_source): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source == mock_client_cert_source + + +@pytest.mark.parametrize("client_class,transport_class,transport_name", [ + (VpnTunnelsClient, transports.VpnTunnelsRestTransport, "rest"), +]) +def test_vpn_tunnels_client_client_options_scopes(client_class, transport_class, transport_name): + # Check the case scopes are provided. + options = client_options.ClientOptions( + scopes=["1", "2"], + ) + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=["1", "2"], + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + +@pytest.mark.parametrize("client_class,transport_class,transport_name,grpc_helpers", [ + (VpnTunnelsClient, transports.VpnTunnelsRestTransport, "rest", None), +]) +def test_vpn_tunnels_client_client_options_credentials_file(client_class, transport_class, transport_name, grpc_helpers): + # Check the case credentials file is provided. + options = client_options.ClientOptions( + credentials_file="credentials.json" + ) + + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize("request_type", [ + compute.AggregatedListVpnTunnelsRequest, + dict, +]) +def test_aggregated_list_rest(request_type): + client = VpnTunnelsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.VpnTunnelAggregatedList( + id='id_value', + kind='kind_value', + next_page_token='next_page_token_value', + self_link='self_link_value', + unreachables=['unreachables_value'], + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.VpnTunnelAggregatedList.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.aggregated_list(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.AggregatedListPager) + assert response.id == 'id_value' + assert response.kind == 'kind_value' + assert response.next_page_token == 'next_page_token_value' + assert response.self_link == 'self_link_value' + assert response.unreachables == ['unreachables_value'] + + +def test_aggregated_list_rest_required_fields(request_type=compute.AggregatedListVpnTunnelsRequest): + transport_class = transports.VpnTunnelsRestTransport + + request_init = {} + request_init["project"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).aggregated_list._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).aggregated_list._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("filter", "include_all_scopes", "max_results", "order_by", "page_token", "return_partial_success", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + + client = VpnTunnelsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.VpnTunnelAggregatedList() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "get", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.VpnTunnelAggregatedList.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.aggregated_list(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_aggregated_list_rest_unset_required_fields(): + transport = transports.VpnTunnelsRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.aggregated_list._get_unset_required_fields({}) + assert set(unset_fields) == (set(("filter", "includeAllScopes", "maxResults", "orderBy", "pageToken", "returnPartialSuccess", )) & set(("project", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_aggregated_list_rest_interceptors(null_interceptor): + transport = transports.VpnTunnelsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.VpnTunnelsRestInterceptor(), + ) + client = VpnTunnelsClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.VpnTunnelsRestInterceptor, "post_aggregated_list") as post, \ + mock.patch.object(transports.VpnTunnelsRestInterceptor, "pre_aggregated_list") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.AggregatedListVpnTunnelsRequest.pb(compute.AggregatedListVpnTunnelsRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.VpnTunnelAggregatedList.to_json(compute.VpnTunnelAggregatedList()) + + request = compute.AggregatedListVpnTunnelsRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.VpnTunnelAggregatedList() + + client.aggregated_list(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_aggregated_list_rest_bad_request(transport: str = 'rest', request_type=compute.AggregatedListVpnTunnelsRequest): + client = VpnTunnelsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.aggregated_list(request) + + +def test_aggregated_list_rest_flattened(): + client = VpnTunnelsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.VpnTunnelAggregatedList() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.VpnTunnelAggregatedList.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.aggregated_list(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/aggregated/vpnTunnels" % client.transport._host, args[1]) + + +def test_aggregated_list_rest_flattened_error(transport: str = 'rest'): + client = VpnTunnelsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.aggregated_list( + compute.AggregatedListVpnTunnelsRequest(), + project='project_value', + ) + + +def test_aggregated_list_rest_pager(transport: str = 'rest'): + client = VpnTunnelsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + #with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + compute.VpnTunnelAggregatedList( + items={ + 'a':compute.VpnTunnelsScopedList(), + 'b':compute.VpnTunnelsScopedList(), + 'c':compute.VpnTunnelsScopedList(), + }, + next_page_token='abc', + ), + compute.VpnTunnelAggregatedList( + items={}, + next_page_token='def', + ), + compute.VpnTunnelAggregatedList( + items={ + 'g':compute.VpnTunnelsScopedList(), + }, + next_page_token='ghi', + ), + compute.VpnTunnelAggregatedList( + items={ + 'h':compute.VpnTunnelsScopedList(), + 'i':compute.VpnTunnelsScopedList(), + }, + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple(compute.VpnTunnelAggregatedList.to_json(x) for x in response) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode('UTF-8') + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {'project': 'sample1'} + + pager = client.aggregated_list(request=sample_request) + + assert isinstance(pager.get('a'), compute.VpnTunnelsScopedList) + assert pager.get('h') is None + + results = list(pager) + assert len(results) == 6 + assert all( + isinstance(i, tuple) + for i in results) + for result in results: + assert isinstance(result, tuple) + assert tuple(type(t) for t in result) == (str, compute.VpnTunnelsScopedList) + + assert pager.get('a') is None + assert isinstance(pager.get('h'), compute.VpnTunnelsScopedList) + + pages = list(client.aggregated_list(request=sample_request).pages) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize("request_type", [ + compute.DeleteVpnTunnelRequest, + dict, +]) +def test_delete_rest(request_type): + client = VpnTunnelsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2', 'vpn_tunnel': 'sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.delete(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, extended_operation.ExtendedOperation) + assert response.client_operation_id == 'client_operation_id_value' + assert response.creation_timestamp == 'creation_timestamp_value' + assert response.description == 'description_value' + assert response.end_time == 'end_time_value' + assert response.http_error_message == 'http_error_message_value' + assert response.http_error_status_code == 2374 + assert response.id == 205 + assert response.insert_time == 'insert_time_value' + assert response.kind == 'kind_value' + assert response.name == 'name_value' + assert response.operation_group_id == 'operation_group_id_value' + assert response.operation_type == 'operation_type_value' + assert response.progress == 885 + assert response.region == 'region_value' + assert response.self_link == 'self_link_value' + assert response.start_time == 'start_time_value' + assert response.status == compute.Operation.Status.DONE + assert response.status_message == 'status_message_value' + assert response.target_id == 947 + assert response.target_link == 'target_link_value' + assert response.user == 'user_value' + assert response.zone == 'zone_value' + + +def test_delete_rest_required_fields(request_type=compute.DeleteVpnTunnelRequest): + transport_class = transports.VpnTunnelsRestTransport + + request_init = {} + request_init["project"] = "" + request_init["region"] = "" + request_init["vpn_tunnel"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + jsonified_request["region"] = 'region_value' + jsonified_request["vpnTunnel"] = 'vpn_tunnel_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "region" in jsonified_request + assert jsonified_request["region"] == 'region_value' + assert "vpnTunnel" in jsonified_request + assert jsonified_request["vpnTunnel"] == 'vpn_tunnel_value' + + client = VpnTunnelsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "delete", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.delete(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_delete_rest_unset_required_fields(): + transport = transports.VpnTunnelsRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.delete._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("project", "region", "vpnTunnel", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_rest_interceptors(null_interceptor): + transport = transports.VpnTunnelsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.VpnTunnelsRestInterceptor(), + ) + client = VpnTunnelsClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.VpnTunnelsRestInterceptor, "post_delete") as post, \ + mock.patch.object(transports.VpnTunnelsRestInterceptor, "pre_delete") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.DeleteVpnTunnelRequest.pb(compute.DeleteVpnTunnelRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.DeleteVpnTunnelRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.delete(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_delete_rest_bad_request(transport: str = 'rest', request_type=compute.DeleteVpnTunnelRequest): + client = VpnTunnelsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2', 'vpn_tunnel': 'sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.delete(request) + + +def test_delete_rest_flattened(): + client = VpnTunnelsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'region': 'sample2', 'vpn_tunnel': 'sample3'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + region='region_value', + vpn_tunnel='vpn_tunnel_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.delete(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/regions/{region}/vpnTunnels/{vpn_tunnel}" % client.transport._host, args[1]) + + +def test_delete_rest_flattened_error(transport: str = 'rest'): + client = VpnTunnelsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete( + compute.DeleteVpnTunnelRequest(), + project='project_value', + region='region_value', + vpn_tunnel='vpn_tunnel_value', + ) + + +def test_delete_rest_error(): + client = VpnTunnelsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.DeleteVpnTunnelRequest, + dict, +]) +def test_delete_unary_rest(request_type): + client = VpnTunnelsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2', 'vpn_tunnel': 'sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.delete_unary(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.Operation) + + +def test_delete_unary_rest_required_fields(request_type=compute.DeleteVpnTunnelRequest): + transport_class = transports.VpnTunnelsRestTransport + + request_init = {} + request_init["project"] = "" + request_init["region"] = "" + request_init["vpn_tunnel"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + jsonified_request["region"] = 'region_value' + jsonified_request["vpnTunnel"] = 'vpn_tunnel_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "region" in jsonified_request + assert jsonified_request["region"] == 'region_value' + assert "vpnTunnel" in jsonified_request + assert jsonified_request["vpnTunnel"] == 'vpn_tunnel_value' + + client = VpnTunnelsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "delete", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.delete_unary(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_delete_unary_rest_unset_required_fields(): + transport = transports.VpnTunnelsRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.delete._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("project", "region", "vpnTunnel", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_unary_rest_interceptors(null_interceptor): + transport = transports.VpnTunnelsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.VpnTunnelsRestInterceptor(), + ) + client = VpnTunnelsClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.VpnTunnelsRestInterceptor, "post_delete") as post, \ + mock.patch.object(transports.VpnTunnelsRestInterceptor, "pre_delete") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.DeleteVpnTunnelRequest.pb(compute.DeleteVpnTunnelRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.DeleteVpnTunnelRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.delete_unary(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_delete_unary_rest_bad_request(transport: str = 'rest', request_type=compute.DeleteVpnTunnelRequest): + client = VpnTunnelsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2', 'vpn_tunnel': 'sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.delete_unary(request) + + +def test_delete_unary_rest_flattened(): + client = VpnTunnelsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'region': 'sample2', 'vpn_tunnel': 'sample3'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + region='region_value', + vpn_tunnel='vpn_tunnel_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.delete_unary(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/regions/{region}/vpnTunnels/{vpn_tunnel}" % client.transport._host, args[1]) + + +def test_delete_unary_rest_flattened_error(transport: str = 'rest'): + client = VpnTunnelsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_unary( + compute.DeleteVpnTunnelRequest(), + project='project_value', + region='region_value', + vpn_tunnel='vpn_tunnel_value', + ) + + +def test_delete_unary_rest_error(): + client = VpnTunnelsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.GetVpnTunnelRequest, + dict, +]) +def test_get_rest(request_type): + client = VpnTunnelsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2', 'vpn_tunnel': 'sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.VpnTunnel( + creation_timestamp='creation_timestamp_value', + description='description_value', + detailed_status='detailed_status_value', + id=205, + ike_version=1182, + kind='kind_value', + label_fingerprint='label_fingerprint_value', + local_traffic_selector=['local_traffic_selector_value'], + name='name_value', + peer_external_gateway='peer_external_gateway_value', + peer_external_gateway_interface=3279, + peer_gcp_gateway='peer_gcp_gateway_value', + peer_ip='peer_ip_value', + region='region_value', + remote_traffic_selector=['remote_traffic_selector_value'], + router='router_value', + self_link='self_link_value', + shared_secret='shared_secret_value', + shared_secret_hash='shared_secret_hash_value', + status='status_value', + target_vpn_gateway='target_vpn_gateway_value', + vpn_gateway='vpn_gateway_value', + vpn_gateway_interface=2229, + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.VpnTunnel.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.get(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.VpnTunnel) + assert response.creation_timestamp == 'creation_timestamp_value' + assert response.description == 'description_value' + assert response.detailed_status == 'detailed_status_value' + assert response.id == 205 + assert response.ike_version == 1182 + assert response.kind == 'kind_value' + assert response.label_fingerprint == 'label_fingerprint_value' + assert response.local_traffic_selector == ['local_traffic_selector_value'] + assert response.name == 'name_value' + assert response.peer_external_gateway == 'peer_external_gateway_value' + assert response.peer_external_gateway_interface == 3279 + assert response.peer_gcp_gateway == 'peer_gcp_gateway_value' + assert response.peer_ip == 'peer_ip_value' + assert response.region == 'region_value' + assert response.remote_traffic_selector == ['remote_traffic_selector_value'] + assert response.router == 'router_value' + assert response.self_link == 'self_link_value' + assert response.shared_secret == 'shared_secret_value' + assert response.shared_secret_hash == 'shared_secret_hash_value' + assert response.status == 'status_value' + assert response.target_vpn_gateway == 'target_vpn_gateway_value' + assert response.vpn_gateway == 'vpn_gateway_value' + assert response.vpn_gateway_interface == 2229 + + +def test_get_rest_required_fields(request_type=compute.GetVpnTunnelRequest): + transport_class = transports.VpnTunnelsRestTransport + + request_init = {} + request_init["project"] = "" + request_init["region"] = "" + request_init["vpn_tunnel"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + jsonified_request["region"] = 'region_value' + jsonified_request["vpnTunnel"] = 'vpn_tunnel_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "region" in jsonified_request + assert jsonified_request["region"] == 'region_value' + assert "vpnTunnel" in jsonified_request + assert jsonified_request["vpnTunnel"] == 'vpn_tunnel_value' + + client = VpnTunnelsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.VpnTunnel() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "get", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.VpnTunnel.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.get(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_get_rest_unset_required_fields(): + transport = transports.VpnTunnelsRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.get._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("project", "region", "vpnTunnel", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_rest_interceptors(null_interceptor): + transport = transports.VpnTunnelsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.VpnTunnelsRestInterceptor(), + ) + client = VpnTunnelsClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.VpnTunnelsRestInterceptor, "post_get") as post, \ + mock.patch.object(transports.VpnTunnelsRestInterceptor, "pre_get") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.GetVpnTunnelRequest.pb(compute.GetVpnTunnelRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.VpnTunnel.to_json(compute.VpnTunnel()) + + request = compute.GetVpnTunnelRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.VpnTunnel() + + client.get(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_get_rest_bad_request(transport: str = 'rest', request_type=compute.GetVpnTunnelRequest): + client = VpnTunnelsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2', 'vpn_tunnel': 'sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get(request) + + +def test_get_rest_flattened(): + client = VpnTunnelsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.VpnTunnel() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'region': 'sample2', 'vpn_tunnel': 'sample3'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + region='region_value', + vpn_tunnel='vpn_tunnel_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.VpnTunnel.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.get(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/regions/{region}/vpnTunnels/{vpn_tunnel}" % client.transport._host, args[1]) + + +def test_get_rest_flattened_error(transport: str = 'rest'): + client = VpnTunnelsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get( + compute.GetVpnTunnelRequest(), + project='project_value', + region='region_value', + vpn_tunnel='vpn_tunnel_value', + ) + + +def test_get_rest_error(): + client = VpnTunnelsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.InsertVpnTunnelRequest, + dict, +]) +def test_insert_rest(request_type): + client = VpnTunnelsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2'} + request_init["vpn_tunnel_resource"] = {'creation_timestamp': 'creation_timestamp_value', 'description': 'description_value', 'detailed_status': 'detailed_status_value', 'id': 205, 'ike_version': 1182, 'kind': 'kind_value', 'label_fingerprint': 'label_fingerprint_value', 'labels': {}, 'local_traffic_selector': ['local_traffic_selector_value1', 'local_traffic_selector_value2'], 'name': 'name_value', 'peer_external_gateway': 'peer_external_gateway_value', 'peer_external_gateway_interface': 3279, 'peer_gcp_gateway': 'peer_gcp_gateway_value', 'peer_ip': 'peer_ip_value', 'region': 'region_value', 'remote_traffic_selector': ['remote_traffic_selector_value1', 'remote_traffic_selector_value2'], 'router': 'router_value', 'self_link': 'self_link_value', 'shared_secret': 'shared_secret_value', 'shared_secret_hash': 'shared_secret_hash_value', 'status': 'status_value', 'target_vpn_gateway': 'target_vpn_gateway_value', 'vpn_gateway': 'vpn_gateway_value', 'vpn_gateway_interface': 2229} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.insert(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, extended_operation.ExtendedOperation) + assert response.client_operation_id == 'client_operation_id_value' + assert response.creation_timestamp == 'creation_timestamp_value' + assert response.description == 'description_value' + assert response.end_time == 'end_time_value' + assert response.http_error_message == 'http_error_message_value' + assert response.http_error_status_code == 2374 + assert response.id == 205 + assert response.insert_time == 'insert_time_value' + assert response.kind == 'kind_value' + assert response.name == 'name_value' + assert response.operation_group_id == 'operation_group_id_value' + assert response.operation_type == 'operation_type_value' + assert response.progress == 885 + assert response.region == 'region_value' + assert response.self_link == 'self_link_value' + assert response.start_time == 'start_time_value' + assert response.status == compute.Operation.Status.DONE + assert response.status_message == 'status_message_value' + assert response.target_id == 947 + assert response.target_link == 'target_link_value' + assert response.user == 'user_value' + assert response.zone == 'zone_value' + + +def test_insert_rest_required_fields(request_type=compute.InsertVpnTunnelRequest): + transport_class = transports.VpnTunnelsRestTransport + + request_init = {} + request_init["project"] = "" + request_init["region"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).insert._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + jsonified_request["region"] = 'region_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).insert._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "region" in jsonified_request + assert jsonified_request["region"] == 'region_value' + + client = VpnTunnelsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.insert(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_insert_rest_unset_required_fields(): + transport = transports.VpnTunnelsRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.insert._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("project", "region", "vpnTunnelResource", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_insert_rest_interceptors(null_interceptor): + transport = transports.VpnTunnelsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.VpnTunnelsRestInterceptor(), + ) + client = VpnTunnelsClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.VpnTunnelsRestInterceptor, "post_insert") as post, \ + mock.patch.object(transports.VpnTunnelsRestInterceptor, "pre_insert") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.InsertVpnTunnelRequest.pb(compute.InsertVpnTunnelRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.InsertVpnTunnelRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.insert(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_insert_rest_bad_request(transport: str = 'rest', request_type=compute.InsertVpnTunnelRequest): + client = VpnTunnelsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2'} + request_init["vpn_tunnel_resource"] = {'creation_timestamp': 'creation_timestamp_value', 'description': 'description_value', 'detailed_status': 'detailed_status_value', 'id': 205, 'ike_version': 1182, 'kind': 'kind_value', 'label_fingerprint': 'label_fingerprint_value', 'labels': {}, 'local_traffic_selector': ['local_traffic_selector_value1', 'local_traffic_selector_value2'], 'name': 'name_value', 'peer_external_gateway': 'peer_external_gateway_value', 'peer_external_gateway_interface': 3279, 'peer_gcp_gateway': 'peer_gcp_gateway_value', 'peer_ip': 'peer_ip_value', 'region': 'region_value', 'remote_traffic_selector': ['remote_traffic_selector_value1', 'remote_traffic_selector_value2'], 'router': 'router_value', 'self_link': 'self_link_value', 'shared_secret': 'shared_secret_value', 'shared_secret_hash': 'shared_secret_hash_value', 'status': 'status_value', 'target_vpn_gateway': 'target_vpn_gateway_value', 'vpn_gateway': 'vpn_gateway_value', 'vpn_gateway_interface': 2229} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.insert(request) + + +def test_insert_rest_flattened(): + client = VpnTunnelsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'region': 'sample2'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + region='region_value', + vpn_tunnel_resource=compute.VpnTunnel(creation_timestamp='creation_timestamp_value'), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.insert(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/regions/{region}/vpnTunnels" % client.transport._host, args[1]) + + +def test_insert_rest_flattened_error(transport: str = 'rest'): + client = VpnTunnelsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.insert( + compute.InsertVpnTunnelRequest(), + project='project_value', + region='region_value', + vpn_tunnel_resource=compute.VpnTunnel(creation_timestamp='creation_timestamp_value'), + ) + + +def test_insert_rest_error(): + client = VpnTunnelsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.InsertVpnTunnelRequest, + dict, +]) +def test_insert_unary_rest(request_type): + client = VpnTunnelsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2'} + request_init["vpn_tunnel_resource"] = {'creation_timestamp': 'creation_timestamp_value', 'description': 'description_value', 'detailed_status': 'detailed_status_value', 'id': 205, 'ike_version': 1182, 'kind': 'kind_value', 'label_fingerprint': 'label_fingerprint_value', 'labels': {}, 'local_traffic_selector': ['local_traffic_selector_value1', 'local_traffic_selector_value2'], 'name': 'name_value', 'peer_external_gateway': 'peer_external_gateway_value', 'peer_external_gateway_interface': 3279, 'peer_gcp_gateway': 'peer_gcp_gateway_value', 'peer_ip': 'peer_ip_value', 'region': 'region_value', 'remote_traffic_selector': ['remote_traffic_selector_value1', 'remote_traffic_selector_value2'], 'router': 'router_value', 'self_link': 'self_link_value', 'shared_secret': 'shared_secret_value', 'shared_secret_hash': 'shared_secret_hash_value', 'status': 'status_value', 'target_vpn_gateway': 'target_vpn_gateway_value', 'vpn_gateway': 'vpn_gateway_value', 'vpn_gateway_interface': 2229} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.insert_unary(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.Operation) + + +def test_insert_unary_rest_required_fields(request_type=compute.InsertVpnTunnelRequest): + transport_class = transports.VpnTunnelsRestTransport + + request_init = {} + request_init["project"] = "" + request_init["region"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).insert._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + jsonified_request["region"] = 'region_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).insert._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "region" in jsonified_request + assert jsonified_request["region"] == 'region_value' + + client = VpnTunnelsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.insert_unary(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_insert_unary_rest_unset_required_fields(): + transport = transports.VpnTunnelsRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.insert._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("project", "region", "vpnTunnelResource", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_insert_unary_rest_interceptors(null_interceptor): + transport = transports.VpnTunnelsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.VpnTunnelsRestInterceptor(), + ) + client = VpnTunnelsClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.VpnTunnelsRestInterceptor, "post_insert") as post, \ + mock.patch.object(transports.VpnTunnelsRestInterceptor, "pre_insert") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.InsertVpnTunnelRequest.pb(compute.InsertVpnTunnelRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.InsertVpnTunnelRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.insert_unary(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_insert_unary_rest_bad_request(transport: str = 'rest', request_type=compute.InsertVpnTunnelRequest): + client = VpnTunnelsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2'} + request_init["vpn_tunnel_resource"] = {'creation_timestamp': 'creation_timestamp_value', 'description': 'description_value', 'detailed_status': 'detailed_status_value', 'id': 205, 'ike_version': 1182, 'kind': 'kind_value', 'label_fingerprint': 'label_fingerprint_value', 'labels': {}, 'local_traffic_selector': ['local_traffic_selector_value1', 'local_traffic_selector_value2'], 'name': 'name_value', 'peer_external_gateway': 'peer_external_gateway_value', 'peer_external_gateway_interface': 3279, 'peer_gcp_gateway': 'peer_gcp_gateway_value', 'peer_ip': 'peer_ip_value', 'region': 'region_value', 'remote_traffic_selector': ['remote_traffic_selector_value1', 'remote_traffic_selector_value2'], 'router': 'router_value', 'self_link': 'self_link_value', 'shared_secret': 'shared_secret_value', 'shared_secret_hash': 'shared_secret_hash_value', 'status': 'status_value', 'target_vpn_gateway': 'target_vpn_gateway_value', 'vpn_gateway': 'vpn_gateway_value', 'vpn_gateway_interface': 2229} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.insert_unary(request) + + +def test_insert_unary_rest_flattened(): + client = VpnTunnelsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'region': 'sample2'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + region='region_value', + vpn_tunnel_resource=compute.VpnTunnel(creation_timestamp='creation_timestamp_value'), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.insert_unary(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/regions/{region}/vpnTunnels" % client.transport._host, args[1]) + + +def test_insert_unary_rest_flattened_error(transport: str = 'rest'): + client = VpnTunnelsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.insert_unary( + compute.InsertVpnTunnelRequest(), + project='project_value', + region='region_value', + vpn_tunnel_resource=compute.VpnTunnel(creation_timestamp='creation_timestamp_value'), + ) + + +def test_insert_unary_rest_error(): + client = VpnTunnelsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.ListVpnTunnelsRequest, + dict, +]) +def test_list_rest(request_type): + client = VpnTunnelsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.VpnTunnelList( + id='id_value', + kind='kind_value', + next_page_token='next_page_token_value', + self_link='self_link_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.VpnTunnelList.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.list(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListPager) + assert response.id == 'id_value' + assert response.kind == 'kind_value' + assert response.next_page_token == 'next_page_token_value' + assert response.self_link == 'self_link_value' + + +def test_list_rest_required_fields(request_type=compute.ListVpnTunnelsRequest): + transport_class = transports.VpnTunnelsRestTransport + + request_init = {} + request_init["project"] = "" + request_init["region"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + jsonified_request["region"] = 'region_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("filter", "max_results", "order_by", "page_token", "return_partial_success", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "region" in jsonified_request + assert jsonified_request["region"] == 'region_value' + + client = VpnTunnelsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.VpnTunnelList() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "get", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.VpnTunnelList.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.list(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_list_rest_unset_required_fields(): + transport = transports.VpnTunnelsRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.list._get_unset_required_fields({}) + assert set(unset_fields) == (set(("filter", "maxResults", "orderBy", "pageToken", "returnPartialSuccess", )) & set(("project", "region", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_rest_interceptors(null_interceptor): + transport = transports.VpnTunnelsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.VpnTunnelsRestInterceptor(), + ) + client = VpnTunnelsClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.VpnTunnelsRestInterceptor, "post_list") as post, \ + mock.patch.object(transports.VpnTunnelsRestInterceptor, "pre_list") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.ListVpnTunnelsRequest.pb(compute.ListVpnTunnelsRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.VpnTunnelList.to_json(compute.VpnTunnelList()) + + request = compute.ListVpnTunnelsRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.VpnTunnelList() + + client.list(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_list_rest_bad_request(transport: str = 'rest', request_type=compute.ListVpnTunnelsRequest): + client = VpnTunnelsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list(request) + + +def test_list_rest_flattened(): + client = VpnTunnelsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.VpnTunnelList() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'region': 'sample2'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + region='region_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.VpnTunnelList.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.list(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/regions/{region}/vpnTunnels" % client.transport._host, args[1]) + + +def test_list_rest_flattened_error(transport: str = 'rest'): + client = VpnTunnelsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list( + compute.ListVpnTunnelsRequest(), + project='project_value', + region='region_value', + ) + + +def test_list_rest_pager(transport: str = 'rest'): + client = VpnTunnelsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + #with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + compute.VpnTunnelList( + items=[ + compute.VpnTunnel(), + compute.VpnTunnel(), + compute.VpnTunnel(), + ], + next_page_token='abc', + ), + compute.VpnTunnelList( + items=[], + next_page_token='def', + ), + compute.VpnTunnelList( + items=[ + compute.VpnTunnel(), + ], + next_page_token='ghi', + ), + compute.VpnTunnelList( + items=[ + compute.VpnTunnel(), + compute.VpnTunnel(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple(compute.VpnTunnelList.to_json(x) for x in response) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode('UTF-8') + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {'project': 'sample1', 'region': 'sample2'} + + pager = client.list(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, compute.VpnTunnel) + for i in results) + + pages = list(client.list(request=sample_request).pages) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize("request_type", [ + compute.SetLabelsVpnTunnelRequest, + dict, +]) +def test_set_labels_rest(request_type): + client = VpnTunnelsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2', 'resource': 'sample3'} + request_init["region_set_labels_request_resource"] = {'label_fingerprint': 'label_fingerprint_value', 'labels': {}} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.set_labels(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, extended_operation.ExtendedOperation) + assert response.client_operation_id == 'client_operation_id_value' + assert response.creation_timestamp == 'creation_timestamp_value' + assert response.description == 'description_value' + assert response.end_time == 'end_time_value' + assert response.http_error_message == 'http_error_message_value' + assert response.http_error_status_code == 2374 + assert response.id == 205 + assert response.insert_time == 'insert_time_value' + assert response.kind == 'kind_value' + assert response.name == 'name_value' + assert response.operation_group_id == 'operation_group_id_value' + assert response.operation_type == 'operation_type_value' + assert response.progress == 885 + assert response.region == 'region_value' + assert response.self_link == 'self_link_value' + assert response.start_time == 'start_time_value' + assert response.status == compute.Operation.Status.DONE + assert response.status_message == 'status_message_value' + assert response.target_id == 947 + assert response.target_link == 'target_link_value' + assert response.user == 'user_value' + assert response.zone == 'zone_value' + + +def test_set_labels_rest_required_fields(request_type=compute.SetLabelsVpnTunnelRequest): + transport_class = transports.VpnTunnelsRestTransport + + request_init = {} + request_init["project"] = "" + request_init["region"] = "" + request_init["resource"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).set_labels._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + jsonified_request["region"] = 'region_value' + jsonified_request["resource"] = 'resource_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).set_labels._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "region" in jsonified_request + assert jsonified_request["region"] == 'region_value' + assert "resource" in jsonified_request + assert jsonified_request["resource"] == 'resource_value' + + client = VpnTunnelsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.set_labels(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_set_labels_rest_unset_required_fields(): + transport = transports.VpnTunnelsRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.set_labels._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("project", "region", "regionSetLabelsRequestResource", "resource", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_set_labels_rest_interceptors(null_interceptor): + transport = transports.VpnTunnelsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.VpnTunnelsRestInterceptor(), + ) + client = VpnTunnelsClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.VpnTunnelsRestInterceptor, "post_set_labels") as post, \ + mock.patch.object(transports.VpnTunnelsRestInterceptor, "pre_set_labels") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.SetLabelsVpnTunnelRequest.pb(compute.SetLabelsVpnTunnelRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.SetLabelsVpnTunnelRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.set_labels(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_set_labels_rest_bad_request(transport: str = 'rest', request_type=compute.SetLabelsVpnTunnelRequest): + client = VpnTunnelsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2', 'resource': 'sample3'} + request_init["region_set_labels_request_resource"] = {'label_fingerprint': 'label_fingerprint_value', 'labels': {}} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.set_labels(request) + + +def test_set_labels_rest_flattened(): + client = VpnTunnelsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'region': 'sample2', 'resource': 'sample3'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + region='region_value', + resource='resource_value', + region_set_labels_request_resource=compute.RegionSetLabelsRequest(label_fingerprint='label_fingerprint_value'), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.set_labels(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/regions/{region}/vpnTunnels/{resource}/setLabels" % client.transport._host, args[1]) + + +def test_set_labels_rest_flattened_error(transport: str = 'rest'): + client = VpnTunnelsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.set_labels( + compute.SetLabelsVpnTunnelRequest(), + project='project_value', + region='region_value', + resource='resource_value', + region_set_labels_request_resource=compute.RegionSetLabelsRequest(label_fingerprint='label_fingerprint_value'), + ) + + +def test_set_labels_rest_error(): + client = VpnTunnelsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.SetLabelsVpnTunnelRequest, + dict, +]) +def test_set_labels_unary_rest(request_type): + client = VpnTunnelsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2', 'resource': 'sample3'} + request_init["region_set_labels_request_resource"] = {'label_fingerprint': 'label_fingerprint_value', 'labels': {}} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.set_labels_unary(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.Operation) + + +def test_set_labels_unary_rest_required_fields(request_type=compute.SetLabelsVpnTunnelRequest): + transport_class = transports.VpnTunnelsRestTransport + + request_init = {} + request_init["project"] = "" + request_init["region"] = "" + request_init["resource"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).set_labels._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + jsonified_request["region"] = 'region_value' + jsonified_request["resource"] = 'resource_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).set_labels._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "region" in jsonified_request + assert jsonified_request["region"] == 'region_value' + assert "resource" in jsonified_request + assert jsonified_request["resource"] == 'resource_value' + + client = VpnTunnelsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.set_labels_unary(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_set_labels_unary_rest_unset_required_fields(): + transport = transports.VpnTunnelsRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.set_labels._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("project", "region", "regionSetLabelsRequestResource", "resource", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_set_labels_unary_rest_interceptors(null_interceptor): + transport = transports.VpnTunnelsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.VpnTunnelsRestInterceptor(), + ) + client = VpnTunnelsClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.VpnTunnelsRestInterceptor, "post_set_labels") as post, \ + mock.patch.object(transports.VpnTunnelsRestInterceptor, "pre_set_labels") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.SetLabelsVpnTunnelRequest.pb(compute.SetLabelsVpnTunnelRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.SetLabelsVpnTunnelRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.set_labels_unary(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_set_labels_unary_rest_bad_request(transport: str = 'rest', request_type=compute.SetLabelsVpnTunnelRequest): + client = VpnTunnelsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'region': 'sample2', 'resource': 'sample3'} + request_init["region_set_labels_request_resource"] = {'label_fingerprint': 'label_fingerprint_value', 'labels': {}} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.set_labels_unary(request) + + +def test_set_labels_unary_rest_flattened(): + client = VpnTunnelsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'region': 'sample2', 'resource': 'sample3'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + region='region_value', + resource='resource_value', + region_set_labels_request_resource=compute.RegionSetLabelsRequest(label_fingerprint='label_fingerprint_value'), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.set_labels_unary(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/regions/{region}/vpnTunnels/{resource}/setLabels" % client.transport._host, args[1]) + + +def test_set_labels_unary_rest_flattened_error(transport: str = 'rest'): + client = VpnTunnelsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.set_labels_unary( + compute.SetLabelsVpnTunnelRequest(), + project='project_value', + region='region_value', + resource='resource_value', + region_set_labels_request_resource=compute.RegionSetLabelsRequest(label_fingerprint='label_fingerprint_value'), + ) + + +def test_set_labels_unary_rest_error(): + client = VpnTunnelsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +def test_credentials_transport_error(): + # It is an error to provide credentials and a transport instance. + transport = transports.VpnTunnelsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = VpnTunnelsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # It is an error to provide a credentials file and a transport instance. + transport = transports.VpnTunnelsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = VpnTunnelsClient( + client_options={"credentials_file": "credentials.json"}, + transport=transport, + ) + + # It is an error to provide an api_key and a transport instance. + transport = transports.VpnTunnelsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = VpnTunnelsClient( + client_options=options, + transport=transport, + ) + + # It is an error to provide an api_key and a credential. + options = mock.Mock() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = VpnTunnelsClient( + client_options=options, + credentials=ga_credentials.AnonymousCredentials() + ) + + # It is an error to provide scopes and a transport instance. + transport = transports.VpnTunnelsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = VpnTunnelsClient( + client_options={"scopes": ["1", "2"]}, + transport=transport, + ) + + +def test_transport_instance(): + # A client may be instantiated with a custom transport instance. + transport = transports.VpnTunnelsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + client = VpnTunnelsClient(transport=transport) + assert client.transport is transport + + +@pytest.mark.parametrize("transport_class", [ + transports.VpnTunnelsRestTransport, +]) +def test_transport_adc(transport_class): + # Test default credentials are used if not provided. + with mock.patch.object(google.auth, 'default') as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class() + adc.assert_called_once() + +@pytest.mark.parametrize("transport_name", [ + "rest", +]) +def test_transport_kind(transport_name): + transport = VpnTunnelsClient.get_transport_class(transport_name)( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert transport.kind == transport_name + + +def test_vpn_tunnels_base_transport_error(): + # Passing both a credentials object and credentials_file should raise an error + with pytest.raises(core_exceptions.DuplicateCredentialArgs): + transport = transports.VpnTunnelsTransport( + credentials=ga_credentials.AnonymousCredentials(), + credentials_file="credentials.json" + ) + + +def test_vpn_tunnels_base_transport(): + # Instantiate the base transport. + with mock.patch('google.cloud.compute_v1.services.vpn_tunnels.transports.VpnTunnelsTransport.__init__') as Transport: + Transport.return_value = None + transport = transports.VpnTunnelsTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Every method on the transport should just blindly + # raise NotImplementedError. + methods = ( + 'aggregated_list', + 'delete', + 'get', + 'insert', + 'list', + 'set_labels', + ) + for method in methods: + with pytest.raises(NotImplementedError): + getattr(transport, method)(request=object()) + + with pytest.raises(NotImplementedError): + transport.close() + + # Catch all for all remaining methods and properties + remainder = [ + 'kind', + ] + for r in remainder: + with pytest.raises(NotImplementedError): + getattr(transport, r)() + + +def test_vpn_tunnels_base_transport_with_credentials_file(): + # Instantiate the base transport with a credentials file + with mock.patch.object(google.auth, 'load_credentials_from_file', autospec=True) as load_creds, mock.patch('google.cloud.compute_v1.services.vpn_tunnels.transports.VpnTunnelsTransport._prep_wrapped_messages') as Transport: + Transport.return_value = None + load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.VpnTunnelsTransport( + credentials_file="credentials.json", + quota_project_id="octopus", + ) + load_creds.assert_called_once_with("credentials.json", + scopes=None, + default_scopes=( + 'https://www.googleapis.com/auth/compute', + 'https://www.googleapis.com/auth/cloud-platform', +), + quota_project_id="octopus", + ) + + +def test_vpn_tunnels_base_transport_with_adc(): + # Test the default credentials are used if credentials and credentials_file are None. + with mock.patch.object(google.auth, 'default', autospec=True) as adc, mock.patch('google.cloud.compute_v1.services.vpn_tunnels.transports.VpnTunnelsTransport._prep_wrapped_messages') as Transport: + Transport.return_value = None + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.VpnTunnelsTransport() + adc.assert_called_once() + + +def test_vpn_tunnels_auth_adc(): + # If no credentials are provided, we should use ADC credentials. + with mock.patch.object(google.auth, 'default', autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + VpnTunnelsClient() + adc.assert_called_once_with( + scopes=None, + default_scopes=( + 'https://www.googleapis.com/auth/compute', + 'https://www.googleapis.com/auth/cloud-platform', +), + quota_project_id=None, + ) + + +def test_vpn_tunnels_http_transport_client_cert_source_for_mtls(): + cred = ga_credentials.AnonymousCredentials() + with mock.patch("google.auth.transport.requests.AuthorizedSession.configure_mtls_channel") as mock_configure_mtls_channel: + transports.VpnTunnelsRestTransport ( + credentials=cred, + client_cert_source_for_mtls=client_cert_source_callback + ) + mock_configure_mtls_channel.assert_called_once_with(client_cert_source_callback) + + +@pytest.mark.parametrize("transport_name", [ + "rest", +]) +def test_vpn_tunnels_host_no_port(transport_name): + client = VpnTunnelsClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions(api_endpoint='compute.googleapis.com'), + transport=transport_name, + ) + assert client.transport._host == ( + 'compute.googleapis.com:443' + if transport_name in ['grpc', 'grpc_asyncio'] + else 'https://compute.googleapis.com' + ) + +@pytest.mark.parametrize("transport_name", [ + "rest", +]) +def test_vpn_tunnels_host_with_port(transport_name): + client = VpnTunnelsClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions(api_endpoint='compute.googleapis.com:8000'), + transport=transport_name, + ) + assert client.transport._host == ( + 'compute.googleapis.com:8000' + if transport_name in ['grpc', 'grpc_asyncio'] + else 'https://compute.googleapis.com:8000' + ) + +@pytest.mark.parametrize("transport_name", [ + "rest", +]) +def test_vpn_tunnels_client_transport_session_collision(transport_name): + creds1 = ga_credentials.AnonymousCredentials() + creds2 = ga_credentials.AnonymousCredentials() + client1 = VpnTunnelsClient( + credentials=creds1, + transport=transport_name, + ) + client2 = VpnTunnelsClient( + credentials=creds2, + transport=transport_name, + ) + session1 = client1.transport.aggregated_list._session + session2 = client2.transport.aggregated_list._session + assert session1 != session2 + session1 = client1.transport.delete._session + session2 = client2.transport.delete._session + assert session1 != session2 + session1 = client1.transport.get._session + session2 = client2.transport.get._session + assert session1 != session2 + session1 = client1.transport.insert._session + session2 = client2.transport.insert._session + assert session1 != session2 + session1 = client1.transport.list._session + session2 = client2.transport.list._session + assert session1 != session2 + session1 = client1.transport.set_labels._session + session2 = client2.transport.set_labels._session + assert session1 != session2 + +def test_common_billing_account_path(): + billing_account = "squid" + expected = "billingAccounts/{billing_account}".format(billing_account=billing_account, ) + actual = VpnTunnelsClient.common_billing_account_path(billing_account) + assert expected == actual + + +def test_parse_common_billing_account_path(): + expected = { + "billing_account": "clam", + } + path = VpnTunnelsClient.common_billing_account_path(**expected) + + # Check that the path construction is reversible. + actual = VpnTunnelsClient.parse_common_billing_account_path(path) + assert expected == actual + +def test_common_folder_path(): + folder = "whelk" + expected = "folders/{folder}".format(folder=folder, ) + actual = VpnTunnelsClient.common_folder_path(folder) + assert expected == actual + + +def test_parse_common_folder_path(): + expected = { + "folder": "octopus", + } + path = VpnTunnelsClient.common_folder_path(**expected) + + # Check that the path construction is reversible. + actual = VpnTunnelsClient.parse_common_folder_path(path) + assert expected == actual + +def test_common_organization_path(): + organization = "oyster" + expected = "organizations/{organization}".format(organization=organization, ) + actual = VpnTunnelsClient.common_organization_path(organization) + assert expected == actual + + +def test_parse_common_organization_path(): + expected = { + "organization": "nudibranch", + } + path = VpnTunnelsClient.common_organization_path(**expected) + + # Check that the path construction is reversible. + actual = VpnTunnelsClient.parse_common_organization_path(path) + assert expected == actual + +def test_common_project_path(): + project = "cuttlefish" + expected = "projects/{project}".format(project=project, ) + actual = VpnTunnelsClient.common_project_path(project) + assert expected == actual + + +def test_parse_common_project_path(): + expected = { + "project": "mussel", + } + path = VpnTunnelsClient.common_project_path(**expected) + + # Check that the path construction is reversible. + actual = VpnTunnelsClient.parse_common_project_path(path) + assert expected == actual + +def test_common_location_path(): + project = "winkle" + location = "nautilus" + expected = "projects/{project}/locations/{location}".format(project=project, location=location, ) + actual = VpnTunnelsClient.common_location_path(project, location) + assert expected == actual + + +def test_parse_common_location_path(): + expected = { + "project": "scallop", + "location": "abalone", + } + path = VpnTunnelsClient.common_location_path(**expected) + + # Check that the path construction is reversible. + actual = VpnTunnelsClient.parse_common_location_path(path) + assert expected == actual + + +def test_client_with_default_client_info(): + client_info = gapic_v1.client_info.ClientInfo() + + with mock.patch.object(transports.VpnTunnelsTransport, '_prep_wrapped_messages') as prep: + client = VpnTunnelsClient( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + with mock.patch.object(transports.VpnTunnelsTransport, '_prep_wrapped_messages') as prep: + transport_class = VpnTunnelsClient.get_transport_class() + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + +def test_transport_close(): + transports = { + "rest": "_session", + } + + for transport, close_name in transports.items(): + client = VpnTunnelsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport + ) + with mock.patch.object(type(getattr(client.transport, close_name)), "close") as close: + with client: + close.assert_not_called() + close.assert_called_once() + +def test_client_ctx(): + transports = [ + 'rest', + ] + for transport in transports: + client = VpnTunnelsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport + ) + # Test client calls underlying transport. + with mock.patch.object(type(client.transport), "close") as close: + close.assert_not_called() + with client: + pass + close.assert_called() + +@pytest.mark.parametrize("client_class,transport_class", [ + (VpnTunnelsClient, transports.VpnTunnelsRestTransport), +]) +def test_api_key_credentials(client_class, transport_class): + with mock.patch.object( + google.auth._default, "get_api_key_credentials", create=True + ) as get_api_key_credentials: + mock_cred = mock.Mock() + get_api_key_credentials.return_value = mock_cred + options = client_options.ClientOptions() + options.api_key = "api_key" + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=mock_cred, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) diff --git a/owl-bot-staging/v1/tests/unit/gapic/compute_v1/test_zone_operations.py b/owl-bot-staging/v1/tests/unit/gapic/compute_v1/test_zone_operations.py new file mode 100644 index 000000000..284ae68a0 --- /dev/null +++ b/owl-bot-staging/v1/tests/unit/gapic/compute_v1/test_zone_operations.py @@ -0,0 +1,1957 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import os +# try/except added for compatibility with python < 3.8 +try: + from unittest import mock + from unittest.mock import AsyncMock # pragma: NO COVER +except ImportError: # pragma: NO COVER + import mock + +import grpc +from grpc.experimental import aio +from collections.abc import Iterable +from google.protobuf import json_format +import json +import math +import pytest +from proto.marshal.rules.dates import DurationRule, TimestampRule +from proto.marshal.rules import wrappers +from requests import Response +from requests import Request, PreparedRequest +from requests.sessions import Session +from google.protobuf import json_format + +from google.api_core import client_options +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import grpc_helpers +from google.api_core import grpc_helpers_async +from google.api_core import path_template +from google.auth import credentials as ga_credentials +from google.auth.exceptions import MutualTLSChannelError +from google.cloud.compute_v1.services.zone_operations import ZoneOperationsClient +from google.cloud.compute_v1.services.zone_operations import pagers +from google.cloud.compute_v1.services.zone_operations import transports +from google.cloud.compute_v1.types import compute +from google.oauth2 import service_account +import google.auth + + +def client_cert_source_callback(): + return b"cert bytes", b"key bytes" + + +# If default endpoint is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint(client): + return "foo.googleapis.com" if ("localhost" in client.DEFAULT_ENDPOINT) else client.DEFAULT_ENDPOINT + + +def test__get_default_mtls_endpoint(): + api_endpoint = "example.googleapis.com" + api_mtls_endpoint = "example.mtls.googleapis.com" + sandbox_endpoint = "example.sandbox.googleapis.com" + sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com" + non_googleapi = "api.example.com" + + assert ZoneOperationsClient._get_default_mtls_endpoint(None) is None + assert ZoneOperationsClient._get_default_mtls_endpoint(api_endpoint) == api_mtls_endpoint + assert ZoneOperationsClient._get_default_mtls_endpoint(api_mtls_endpoint) == api_mtls_endpoint + assert ZoneOperationsClient._get_default_mtls_endpoint(sandbox_endpoint) == sandbox_mtls_endpoint + assert ZoneOperationsClient._get_default_mtls_endpoint(sandbox_mtls_endpoint) == sandbox_mtls_endpoint + assert ZoneOperationsClient._get_default_mtls_endpoint(non_googleapi) == non_googleapi + + +@pytest.mark.parametrize("client_class,transport_name", [ + (ZoneOperationsClient, "rest"), +]) +def test_zone_operations_client_from_service_account_info(client_class, transport_name): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object(service_account.Credentials, 'from_service_account_info') as factory: + factory.return_value = creds + info = {"valid": True} + client = client_class.from_service_account_info(info, transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + 'compute.googleapis.com:443' + if transport_name in ['grpc', 'grpc_asyncio'] + else + 'https://compute.googleapis.com' + ) + + +@pytest.mark.parametrize("transport_class,transport_name", [ + (transports.ZoneOperationsRestTransport, "rest"), +]) +def test_zone_operations_client_service_account_always_use_jwt(transport_class, transport_name): + with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=True) + use_jwt.assert_called_once_with(True) + + with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=False) + use_jwt.assert_not_called() + + +@pytest.mark.parametrize("client_class,transport_name", [ + (ZoneOperationsClient, "rest"), +]) +def test_zone_operations_client_from_service_account_file(client_class, transport_name): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object(service_account.Credentials, 'from_service_account_file') as factory: + factory.return_value = creds + client = client_class.from_service_account_file("dummy/file/path.json", transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + client = client_class.from_service_account_json("dummy/file/path.json", transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + 'compute.googleapis.com:443' + if transport_name in ['grpc', 'grpc_asyncio'] + else + 'https://compute.googleapis.com' + ) + + +def test_zone_operations_client_get_transport_class(): + transport = ZoneOperationsClient.get_transport_class() + available_transports = [ + transports.ZoneOperationsRestTransport, + ] + assert transport in available_transports + + transport = ZoneOperationsClient.get_transport_class("rest") + assert transport == transports.ZoneOperationsRestTransport + + +@pytest.mark.parametrize("client_class,transport_class,transport_name", [ + (ZoneOperationsClient, transports.ZoneOperationsRestTransport, "rest"), +]) +@mock.patch.object(ZoneOperationsClient, "DEFAULT_ENDPOINT", modify_default_endpoint(ZoneOperationsClient)) +def test_zone_operations_client_client_options(client_class, transport_class, transport_name): + # Check that if channel is provided we won't create a new one. + with mock.patch.object(ZoneOperationsClient, 'get_transport_class') as gtc: + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ) + client = client_class(transport=transport) + gtc.assert_not_called() + + # Check that if channel is provided via str we will create a new one. + with mock.patch.object(ZoneOperationsClient, 'get_transport_class') as gtc: + client = client_class(transport=transport_name) + gtc.assert_called() + + # Check the case api_endpoint is provided. + options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(transport=transport_name, client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_MTLS_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError): + client = client_class(transport=transport_name) + + # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): + with pytest.raises(ValueError): + client = client_class(transport=transport_name) + + # Check the case quota_project_id is provided + options = client_options.ClientOptions(quota_project_id="octopus") + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id="octopus", + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + # Check the case api_endpoint is provided + options = client_options.ClientOptions(api_audience="https://language.googleapis.com") + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience="https://language.googleapis.com" + ) + +@pytest.mark.parametrize("client_class,transport_class,transport_name,use_client_cert_env", [ + (ZoneOperationsClient, transports.ZoneOperationsRestTransport, "rest", "true"), + (ZoneOperationsClient, transports.ZoneOperationsRestTransport, "rest", "false"), +]) +@mock.patch.object(ZoneOperationsClient, "DEFAULT_ENDPOINT", modify_default_endpoint(ZoneOperationsClient)) +@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) +def test_zone_operations_client_mtls_env_auto(client_class, transport_class, transport_name, use_client_cert_env): + # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default + # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists. + + # Check the case client_cert_source is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): + options = client_options.ClientOptions(client_cert_source=client_cert_source_callback) + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + + if use_client_cert_env == "false": + expected_client_cert_source = None + expected_host = client.DEFAULT_ENDPOINT + else: + expected_client_cert_source = client_cert_source_callback + expected_host = client.DEFAULT_MTLS_ENDPOINT + + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case ADC client cert is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): + with mock.patch.object(transport_class, '__init__') as patched: + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): + with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=client_cert_source_callback): + if use_client_cert_env == "false": + expected_host = client.DEFAULT_ENDPOINT + expected_client_cert_source = None + else: + expected_host = client.DEFAULT_MTLS_ENDPOINT + expected_client_cert_source = client_cert_source_callback + + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case client_cert_source and ADC client cert are not provided. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): + with mock.patch.object(transport_class, '__init__') as patched: + with mock.patch("google.auth.transport.mtls.has_default_client_cert_source", return_value=False): + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize("client_class", [ + ZoneOperationsClient +]) +@mock.patch.object(ZoneOperationsClient, "DEFAULT_ENDPOINT", modify_default_endpoint(ZoneOperationsClient)) +def test_zone_operations_client_get_mtls_endpoint_and_cert_source(client_class): + mock_client_cert_source = mock.Mock() + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "true". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + mock_api_endpoint = "foo" + options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(options) + assert api_endpoint == mock_api_endpoint + assert cert_source == mock_client_cert_source + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "false". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + mock_client_cert_source = mock.Mock() + mock_api_endpoint = "foo" + options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(options) + assert api_endpoint == mock_api_endpoint + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert doesn't exist. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=False): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert exists. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): + with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=mock_client_cert_source): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source == mock_client_cert_source + + +@pytest.mark.parametrize("client_class,transport_class,transport_name", [ + (ZoneOperationsClient, transports.ZoneOperationsRestTransport, "rest"), +]) +def test_zone_operations_client_client_options_scopes(client_class, transport_class, transport_name): + # Check the case scopes are provided. + options = client_options.ClientOptions( + scopes=["1", "2"], + ) + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=["1", "2"], + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + +@pytest.mark.parametrize("client_class,transport_class,transport_name,grpc_helpers", [ + (ZoneOperationsClient, transports.ZoneOperationsRestTransport, "rest", None), +]) +def test_zone_operations_client_client_options_credentials_file(client_class, transport_class, transport_name, grpc_helpers): + # Check the case credentials file is provided. + options = client_options.ClientOptions( + credentials_file="credentials.json" + ) + + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize("request_type", [ + compute.DeleteZoneOperationRequest, + dict, +]) +def test_delete_rest(request_type): + client = ZoneOperationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'zone': 'sample2', 'operation': 'sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.DeleteZoneOperationResponse( + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.DeleteZoneOperationResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.delete(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.DeleteZoneOperationResponse) + + +def test_delete_rest_required_fields(request_type=compute.DeleteZoneOperationRequest): + transport_class = transports.ZoneOperationsRestTransport + + request_init = {} + request_init["operation"] = "" + request_init["project"] = "" + request_init["zone"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["operation"] = 'operation_value' + jsonified_request["project"] = 'project_value' + jsonified_request["zone"] = 'zone_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "operation" in jsonified_request + assert jsonified_request["operation"] == 'operation_value' + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "zone" in jsonified_request + assert jsonified_request["zone"] == 'zone_value' + + client = ZoneOperationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.DeleteZoneOperationResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "delete", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.DeleteZoneOperationResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.delete(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_delete_rest_unset_required_fields(): + transport = transports.ZoneOperationsRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.delete._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("operation", "project", "zone", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_rest_interceptors(null_interceptor): + transport = transports.ZoneOperationsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.ZoneOperationsRestInterceptor(), + ) + client = ZoneOperationsClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.ZoneOperationsRestInterceptor, "post_delete") as post, \ + mock.patch.object(transports.ZoneOperationsRestInterceptor, "pre_delete") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.DeleteZoneOperationRequest.pb(compute.DeleteZoneOperationRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.DeleteZoneOperationResponse.to_json(compute.DeleteZoneOperationResponse()) + + request = compute.DeleteZoneOperationRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.DeleteZoneOperationResponse() + + client.delete(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_delete_rest_bad_request(transport: str = 'rest', request_type=compute.DeleteZoneOperationRequest): + client = ZoneOperationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'zone': 'sample2', 'operation': 'sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.delete(request) + + +def test_delete_rest_flattened(): + client = ZoneOperationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.DeleteZoneOperationResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'zone': 'sample2', 'operation': 'sample3'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + zone='zone_value', + operation='operation_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.DeleteZoneOperationResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.delete(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/zones/{zone}/operations/{operation}" % client.transport._host, args[1]) + + +def test_delete_rest_flattened_error(transport: str = 'rest'): + client = ZoneOperationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete( + compute.DeleteZoneOperationRequest(), + project='project_value', + zone='zone_value', + operation='operation_value', + ) + + +def test_delete_rest_error(): + client = ZoneOperationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.GetZoneOperationRequest, + dict, +]) +def test_get_rest(request_type): + client = ZoneOperationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'zone': 'sample2', 'operation': 'sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.get(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.Operation) + assert response.client_operation_id == 'client_operation_id_value' + assert response.creation_timestamp == 'creation_timestamp_value' + assert response.description == 'description_value' + assert response.end_time == 'end_time_value' + assert response.http_error_message == 'http_error_message_value' + assert response.http_error_status_code == 2374 + assert response.id == 205 + assert response.insert_time == 'insert_time_value' + assert response.kind == 'kind_value' + assert response.name == 'name_value' + assert response.operation_group_id == 'operation_group_id_value' + assert response.operation_type == 'operation_type_value' + assert response.progress == 885 + assert response.region == 'region_value' + assert response.self_link == 'self_link_value' + assert response.start_time == 'start_time_value' + assert response.status == compute.Operation.Status.DONE + assert response.status_message == 'status_message_value' + assert response.target_id == 947 + assert response.target_link == 'target_link_value' + assert response.user == 'user_value' + assert response.zone == 'zone_value' + + +def test_get_rest_required_fields(request_type=compute.GetZoneOperationRequest): + transport_class = transports.ZoneOperationsRestTransport + + request_init = {} + request_init["operation"] = "" + request_init["project"] = "" + request_init["zone"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["operation"] = 'operation_value' + jsonified_request["project"] = 'project_value' + jsonified_request["zone"] = 'zone_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "operation" in jsonified_request + assert jsonified_request["operation"] == 'operation_value' + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "zone" in jsonified_request + assert jsonified_request["zone"] == 'zone_value' + + client = ZoneOperationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "get", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.get(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_get_rest_unset_required_fields(): + transport = transports.ZoneOperationsRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.get._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("operation", "project", "zone", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_rest_interceptors(null_interceptor): + transport = transports.ZoneOperationsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.ZoneOperationsRestInterceptor(), + ) + client = ZoneOperationsClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.ZoneOperationsRestInterceptor, "post_get") as post, \ + mock.patch.object(transports.ZoneOperationsRestInterceptor, "pre_get") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.GetZoneOperationRequest.pb(compute.GetZoneOperationRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.GetZoneOperationRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.get(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_get_rest_bad_request(transport: str = 'rest', request_type=compute.GetZoneOperationRequest): + client = ZoneOperationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'zone': 'sample2', 'operation': 'sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get(request) + + +def test_get_rest_flattened(): + client = ZoneOperationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'zone': 'sample2', 'operation': 'sample3'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + zone='zone_value', + operation='operation_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.get(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/zones/{zone}/operations/{operation}" % client.transport._host, args[1]) + + +def test_get_rest_flattened_error(transport: str = 'rest'): + client = ZoneOperationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get( + compute.GetZoneOperationRequest(), + project='project_value', + zone='zone_value', + operation='operation_value', + ) + + +def test_get_rest_error(): + client = ZoneOperationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.ListZoneOperationsRequest, + dict, +]) +def test_list_rest(request_type): + client = ZoneOperationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'zone': 'sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.OperationList( + id='id_value', + kind='kind_value', + next_page_token='next_page_token_value', + self_link='self_link_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.OperationList.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.list(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListPager) + assert response.id == 'id_value' + assert response.kind == 'kind_value' + assert response.next_page_token == 'next_page_token_value' + assert response.self_link == 'self_link_value' + + +def test_list_rest_required_fields(request_type=compute.ListZoneOperationsRequest): + transport_class = transports.ZoneOperationsRestTransport + + request_init = {} + request_init["project"] = "" + request_init["zone"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + jsonified_request["zone"] = 'zone_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("filter", "max_results", "order_by", "page_token", "return_partial_success", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "zone" in jsonified_request + assert jsonified_request["zone"] == 'zone_value' + + client = ZoneOperationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.OperationList() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "get", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.OperationList.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.list(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_list_rest_unset_required_fields(): + transport = transports.ZoneOperationsRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.list._get_unset_required_fields({}) + assert set(unset_fields) == (set(("filter", "maxResults", "orderBy", "pageToken", "returnPartialSuccess", )) & set(("project", "zone", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_rest_interceptors(null_interceptor): + transport = transports.ZoneOperationsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.ZoneOperationsRestInterceptor(), + ) + client = ZoneOperationsClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.ZoneOperationsRestInterceptor, "post_list") as post, \ + mock.patch.object(transports.ZoneOperationsRestInterceptor, "pre_list") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.ListZoneOperationsRequest.pb(compute.ListZoneOperationsRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.OperationList.to_json(compute.OperationList()) + + request = compute.ListZoneOperationsRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.OperationList() + + client.list(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_list_rest_bad_request(transport: str = 'rest', request_type=compute.ListZoneOperationsRequest): + client = ZoneOperationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'zone': 'sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list(request) + + +def test_list_rest_flattened(): + client = ZoneOperationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.OperationList() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'zone': 'sample2'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + zone='zone_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.OperationList.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.list(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/zones/{zone}/operations" % client.transport._host, args[1]) + + +def test_list_rest_flattened_error(transport: str = 'rest'): + client = ZoneOperationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list( + compute.ListZoneOperationsRequest(), + project='project_value', + zone='zone_value', + ) + + +def test_list_rest_pager(transport: str = 'rest'): + client = ZoneOperationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + #with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + compute.OperationList( + items=[ + compute.Operation(), + compute.Operation(), + compute.Operation(), + ], + next_page_token='abc', + ), + compute.OperationList( + items=[], + next_page_token='def', + ), + compute.OperationList( + items=[ + compute.Operation(), + ], + next_page_token='ghi', + ), + compute.OperationList( + items=[ + compute.Operation(), + compute.Operation(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple(compute.OperationList.to_json(x) for x in response) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode('UTF-8') + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {'project': 'sample1', 'zone': 'sample2'} + + pager = client.list(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, compute.Operation) + for i in results) + + pages = list(client.list(request=sample_request).pages) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize("request_type", [ + compute.WaitZoneOperationRequest, + dict, +]) +def test_wait_rest(request_type): + client = ZoneOperationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'zone': 'sample2', 'operation': 'sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id='client_operation_id_value', + creation_timestamp='creation_timestamp_value', + description='description_value', + end_time='end_time_value', + http_error_message='http_error_message_value', + http_error_status_code=2374, + id=205, + insert_time='insert_time_value', + kind='kind_value', + name='name_value', + operation_group_id='operation_group_id_value', + operation_type='operation_type_value', + progress=885, + region='region_value', + self_link='self_link_value', + start_time='start_time_value', + status=compute.Operation.Status.DONE, + status_message='status_message_value', + target_id=947, + target_link='target_link_value', + user='user_value', + zone='zone_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.wait(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.Operation) + assert response.client_operation_id == 'client_operation_id_value' + assert response.creation_timestamp == 'creation_timestamp_value' + assert response.description == 'description_value' + assert response.end_time == 'end_time_value' + assert response.http_error_message == 'http_error_message_value' + assert response.http_error_status_code == 2374 + assert response.id == 205 + assert response.insert_time == 'insert_time_value' + assert response.kind == 'kind_value' + assert response.name == 'name_value' + assert response.operation_group_id == 'operation_group_id_value' + assert response.operation_type == 'operation_type_value' + assert response.progress == 885 + assert response.region == 'region_value' + assert response.self_link == 'self_link_value' + assert response.start_time == 'start_time_value' + assert response.status == compute.Operation.Status.DONE + assert response.status_message == 'status_message_value' + assert response.target_id == 947 + assert response.target_link == 'target_link_value' + assert response.user == 'user_value' + assert response.zone == 'zone_value' + + +def test_wait_rest_required_fields(request_type=compute.WaitZoneOperationRequest): + transport_class = transports.ZoneOperationsRestTransport + + request_init = {} + request_init["operation"] = "" + request_init["project"] = "" + request_init["zone"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).wait._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["operation"] = 'operation_value' + jsonified_request["project"] = 'project_value' + jsonified_request["zone"] = 'zone_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).wait._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "operation" in jsonified_request + assert jsonified_request["operation"] == 'operation_value' + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "zone" in jsonified_request + assert jsonified_request["zone"] == 'zone_value' + + client = ZoneOperationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.wait(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_wait_rest_unset_required_fields(): + transport = transports.ZoneOperationsRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.wait._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("operation", "project", "zone", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_wait_rest_interceptors(null_interceptor): + transport = transports.ZoneOperationsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.ZoneOperationsRestInterceptor(), + ) + client = ZoneOperationsClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.ZoneOperationsRestInterceptor, "post_wait") as post, \ + mock.patch.object(transports.ZoneOperationsRestInterceptor, "pre_wait") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.WaitZoneOperationRequest.pb(compute.WaitZoneOperationRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.WaitZoneOperationRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.wait(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_wait_rest_bad_request(transport: str = 'rest', request_type=compute.WaitZoneOperationRequest): + client = ZoneOperationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'zone': 'sample2', 'operation': 'sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.wait(request) + + +def test_wait_rest_flattened(): + client = ZoneOperationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'zone': 'sample2', 'operation': 'sample3'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + zone='zone_value', + operation='operation_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.wait(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/zones/{zone}/operations/{operation}/wait" % client.transport._host, args[1]) + + +def test_wait_rest_flattened_error(transport: str = 'rest'): + client = ZoneOperationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.wait( + compute.WaitZoneOperationRequest(), + project='project_value', + zone='zone_value', + operation='operation_value', + ) + + +def test_wait_rest_error(): + client = ZoneOperationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +def test_credentials_transport_error(): + # It is an error to provide credentials and a transport instance. + transport = transports.ZoneOperationsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = ZoneOperationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # It is an error to provide a credentials file and a transport instance. + transport = transports.ZoneOperationsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = ZoneOperationsClient( + client_options={"credentials_file": "credentials.json"}, + transport=transport, + ) + + # It is an error to provide an api_key and a transport instance. + transport = transports.ZoneOperationsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = ZoneOperationsClient( + client_options=options, + transport=transport, + ) + + # It is an error to provide an api_key and a credential. + options = mock.Mock() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = ZoneOperationsClient( + client_options=options, + credentials=ga_credentials.AnonymousCredentials() + ) + + # It is an error to provide scopes and a transport instance. + transport = transports.ZoneOperationsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = ZoneOperationsClient( + client_options={"scopes": ["1", "2"]}, + transport=transport, + ) + + +def test_transport_instance(): + # A client may be instantiated with a custom transport instance. + transport = transports.ZoneOperationsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + client = ZoneOperationsClient(transport=transport) + assert client.transport is transport + + +@pytest.mark.parametrize("transport_class", [ + transports.ZoneOperationsRestTransport, +]) +def test_transport_adc(transport_class): + # Test default credentials are used if not provided. + with mock.patch.object(google.auth, 'default') as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class() + adc.assert_called_once() + +@pytest.mark.parametrize("transport_name", [ + "rest", +]) +def test_transport_kind(transport_name): + transport = ZoneOperationsClient.get_transport_class(transport_name)( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert transport.kind == transport_name + + +def test_zone_operations_base_transport_error(): + # Passing both a credentials object and credentials_file should raise an error + with pytest.raises(core_exceptions.DuplicateCredentialArgs): + transport = transports.ZoneOperationsTransport( + credentials=ga_credentials.AnonymousCredentials(), + credentials_file="credentials.json" + ) + + +def test_zone_operations_base_transport(): + # Instantiate the base transport. + with mock.patch('google.cloud.compute_v1.services.zone_operations.transports.ZoneOperationsTransport.__init__') as Transport: + Transport.return_value = None + transport = transports.ZoneOperationsTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Every method on the transport should just blindly + # raise NotImplementedError. + methods = ( + 'delete', + 'get', + 'list', + 'wait', + ) + for method in methods: + with pytest.raises(NotImplementedError): + getattr(transport, method)(request=object()) + + with pytest.raises(NotImplementedError): + transport.close() + + # Catch all for all remaining methods and properties + remainder = [ + 'kind', + ] + for r in remainder: + with pytest.raises(NotImplementedError): + getattr(transport, r)() + + +def test_zone_operations_base_transport_with_credentials_file(): + # Instantiate the base transport with a credentials file + with mock.patch.object(google.auth, 'load_credentials_from_file', autospec=True) as load_creds, mock.patch('google.cloud.compute_v1.services.zone_operations.transports.ZoneOperationsTransport._prep_wrapped_messages') as Transport: + Transport.return_value = None + load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.ZoneOperationsTransport( + credentials_file="credentials.json", + quota_project_id="octopus", + ) + load_creds.assert_called_once_with("credentials.json", + scopes=None, + default_scopes=( + 'https://www.googleapis.com/auth/compute', + 'https://www.googleapis.com/auth/cloud-platform', +), + quota_project_id="octopus", + ) + + +def test_zone_operations_base_transport_with_adc(): + # Test the default credentials are used if credentials and credentials_file are None. + with mock.patch.object(google.auth, 'default', autospec=True) as adc, mock.patch('google.cloud.compute_v1.services.zone_operations.transports.ZoneOperationsTransport._prep_wrapped_messages') as Transport: + Transport.return_value = None + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.ZoneOperationsTransport() + adc.assert_called_once() + + +def test_zone_operations_auth_adc(): + # If no credentials are provided, we should use ADC credentials. + with mock.patch.object(google.auth, 'default', autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + ZoneOperationsClient() + adc.assert_called_once_with( + scopes=None, + default_scopes=( + 'https://www.googleapis.com/auth/compute', + 'https://www.googleapis.com/auth/cloud-platform', +), + quota_project_id=None, + ) + + +def test_zone_operations_http_transport_client_cert_source_for_mtls(): + cred = ga_credentials.AnonymousCredentials() + with mock.patch("google.auth.transport.requests.AuthorizedSession.configure_mtls_channel") as mock_configure_mtls_channel: + transports.ZoneOperationsRestTransport ( + credentials=cred, + client_cert_source_for_mtls=client_cert_source_callback + ) + mock_configure_mtls_channel.assert_called_once_with(client_cert_source_callback) + + +@pytest.mark.parametrize("transport_name", [ + "rest", +]) +def test_zone_operations_host_no_port(transport_name): + client = ZoneOperationsClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions(api_endpoint='compute.googleapis.com'), + transport=transport_name, + ) + assert client.transport._host == ( + 'compute.googleapis.com:443' + if transport_name in ['grpc', 'grpc_asyncio'] + else 'https://compute.googleapis.com' + ) + +@pytest.mark.parametrize("transport_name", [ + "rest", +]) +def test_zone_operations_host_with_port(transport_name): + client = ZoneOperationsClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions(api_endpoint='compute.googleapis.com:8000'), + transport=transport_name, + ) + assert client.transport._host == ( + 'compute.googleapis.com:8000' + if transport_name in ['grpc', 'grpc_asyncio'] + else 'https://compute.googleapis.com:8000' + ) + +@pytest.mark.parametrize("transport_name", [ + "rest", +]) +def test_zone_operations_client_transport_session_collision(transport_name): + creds1 = ga_credentials.AnonymousCredentials() + creds2 = ga_credentials.AnonymousCredentials() + client1 = ZoneOperationsClient( + credentials=creds1, + transport=transport_name, + ) + client2 = ZoneOperationsClient( + credentials=creds2, + transport=transport_name, + ) + session1 = client1.transport.delete._session + session2 = client2.transport.delete._session + assert session1 != session2 + session1 = client1.transport.get._session + session2 = client2.transport.get._session + assert session1 != session2 + session1 = client1.transport.list._session + session2 = client2.transport.list._session + assert session1 != session2 + session1 = client1.transport.wait._session + session2 = client2.transport.wait._session + assert session1 != session2 + +def test_common_billing_account_path(): + billing_account = "squid" + expected = "billingAccounts/{billing_account}".format(billing_account=billing_account, ) + actual = ZoneOperationsClient.common_billing_account_path(billing_account) + assert expected == actual + + +def test_parse_common_billing_account_path(): + expected = { + "billing_account": "clam", + } + path = ZoneOperationsClient.common_billing_account_path(**expected) + + # Check that the path construction is reversible. + actual = ZoneOperationsClient.parse_common_billing_account_path(path) + assert expected == actual + +def test_common_folder_path(): + folder = "whelk" + expected = "folders/{folder}".format(folder=folder, ) + actual = ZoneOperationsClient.common_folder_path(folder) + assert expected == actual + + +def test_parse_common_folder_path(): + expected = { + "folder": "octopus", + } + path = ZoneOperationsClient.common_folder_path(**expected) + + # Check that the path construction is reversible. + actual = ZoneOperationsClient.parse_common_folder_path(path) + assert expected == actual + +def test_common_organization_path(): + organization = "oyster" + expected = "organizations/{organization}".format(organization=organization, ) + actual = ZoneOperationsClient.common_organization_path(organization) + assert expected == actual + + +def test_parse_common_organization_path(): + expected = { + "organization": "nudibranch", + } + path = ZoneOperationsClient.common_organization_path(**expected) + + # Check that the path construction is reversible. + actual = ZoneOperationsClient.parse_common_organization_path(path) + assert expected == actual + +def test_common_project_path(): + project = "cuttlefish" + expected = "projects/{project}".format(project=project, ) + actual = ZoneOperationsClient.common_project_path(project) + assert expected == actual + + +def test_parse_common_project_path(): + expected = { + "project": "mussel", + } + path = ZoneOperationsClient.common_project_path(**expected) + + # Check that the path construction is reversible. + actual = ZoneOperationsClient.parse_common_project_path(path) + assert expected == actual + +def test_common_location_path(): + project = "winkle" + location = "nautilus" + expected = "projects/{project}/locations/{location}".format(project=project, location=location, ) + actual = ZoneOperationsClient.common_location_path(project, location) + assert expected == actual + + +def test_parse_common_location_path(): + expected = { + "project": "scallop", + "location": "abalone", + } + path = ZoneOperationsClient.common_location_path(**expected) + + # Check that the path construction is reversible. + actual = ZoneOperationsClient.parse_common_location_path(path) + assert expected == actual + + +def test_client_with_default_client_info(): + client_info = gapic_v1.client_info.ClientInfo() + + with mock.patch.object(transports.ZoneOperationsTransport, '_prep_wrapped_messages') as prep: + client = ZoneOperationsClient( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + with mock.patch.object(transports.ZoneOperationsTransport, '_prep_wrapped_messages') as prep: + transport_class = ZoneOperationsClient.get_transport_class() + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + +def test_transport_close(): + transports = { + "rest": "_session", + } + + for transport, close_name in transports.items(): + client = ZoneOperationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport + ) + with mock.patch.object(type(getattr(client.transport, close_name)), "close") as close: + with client: + close.assert_not_called() + close.assert_called_once() + +def test_client_ctx(): + transports = [ + 'rest', + ] + for transport in transports: + client = ZoneOperationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport + ) + # Test client calls underlying transport. + with mock.patch.object(type(client.transport), "close") as close: + close.assert_not_called() + with client: + pass + close.assert_called() + +@pytest.mark.parametrize("client_class,transport_class", [ + (ZoneOperationsClient, transports.ZoneOperationsRestTransport), +]) +def test_api_key_credentials(client_class, transport_class): + with mock.patch.object( + google.auth._default, "get_api_key_credentials", create=True + ) as get_api_key_credentials: + mock_cred = mock.Mock() + get_api_key_credentials.return_value = mock_cred + options = client_options.ClientOptions() + options.api_key = "api_key" + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=mock_cred, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) diff --git a/owl-bot-staging/v1/tests/unit/gapic/compute_v1/test_zones.py b/owl-bot-staging/v1/tests/unit/gapic/compute_v1/test_zones.py new file mode 100644 index 000000000..5f4ef569a --- /dev/null +++ b/owl-bot-staging/v1/tests/unit/gapic/compute_v1/test_zones.py @@ -0,0 +1,1387 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import os +# try/except added for compatibility with python < 3.8 +try: + from unittest import mock + from unittest.mock import AsyncMock # pragma: NO COVER +except ImportError: # pragma: NO COVER + import mock + +import grpc +from grpc.experimental import aio +from collections.abc import Iterable +from google.protobuf import json_format +import json +import math +import pytest +from proto.marshal.rules.dates import DurationRule, TimestampRule +from proto.marshal.rules import wrappers +from requests import Response +from requests import Request, PreparedRequest +from requests.sessions import Session +from google.protobuf import json_format + +from google.api_core import client_options +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import grpc_helpers +from google.api_core import grpc_helpers_async +from google.api_core import path_template +from google.auth import credentials as ga_credentials +from google.auth.exceptions import MutualTLSChannelError +from google.cloud.compute_v1.services.zones import ZonesClient +from google.cloud.compute_v1.services.zones import pagers +from google.cloud.compute_v1.services.zones import transports +from google.cloud.compute_v1.types import compute +from google.oauth2 import service_account +import google.auth + + +def client_cert_source_callback(): + return b"cert bytes", b"key bytes" + + +# If default endpoint is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint(client): + return "foo.googleapis.com" if ("localhost" in client.DEFAULT_ENDPOINT) else client.DEFAULT_ENDPOINT + + +def test__get_default_mtls_endpoint(): + api_endpoint = "example.googleapis.com" + api_mtls_endpoint = "example.mtls.googleapis.com" + sandbox_endpoint = "example.sandbox.googleapis.com" + sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com" + non_googleapi = "api.example.com" + + assert ZonesClient._get_default_mtls_endpoint(None) is None + assert ZonesClient._get_default_mtls_endpoint(api_endpoint) == api_mtls_endpoint + assert ZonesClient._get_default_mtls_endpoint(api_mtls_endpoint) == api_mtls_endpoint + assert ZonesClient._get_default_mtls_endpoint(sandbox_endpoint) == sandbox_mtls_endpoint + assert ZonesClient._get_default_mtls_endpoint(sandbox_mtls_endpoint) == sandbox_mtls_endpoint + assert ZonesClient._get_default_mtls_endpoint(non_googleapi) == non_googleapi + + +@pytest.mark.parametrize("client_class,transport_name", [ + (ZonesClient, "rest"), +]) +def test_zones_client_from_service_account_info(client_class, transport_name): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object(service_account.Credentials, 'from_service_account_info') as factory: + factory.return_value = creds + info = {"valid": True} + client = client_class.from_service_account_info(info, transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + 'compute.googleapis.com:443' + if transport_name in ['grpc', 'grpc_asyncio'] + else + 'https://compute.googleapis.com' + ) + + +@pytest.mark.parametrize("transport_class,transport_name", [ + (transports.ZonesRestTransport, "rest"), +]) +def test_zones_client_service_account_always_use_jwt(transport_class, transport_name): + with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=True) + use_jwt.assert_called_once_with(True) + + with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=False) + use_jwt.assert_not_called() + + +@pytest.mark.parametrize("client_class,transport_name", [ + (ZonesClient, "rest"), +]) +def test_zones_client_from_service_account_file(client_class, transport_name): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object(service_account.Credentials, 'from_service_account_file') as factory: + factory.return_value = creds + client = client_class.from_service_account_file("dummy/file/path.json", transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + client = client_class.from_service_account_json("dummy/file/path.json", transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + 'compute.googleapis.com:443' + if transport_name in ['grpc', 'grpc_asyncio'] + else + 'https://compute.googleapis.com' + ) + + +def test_zones_client_get_transport_class(): + transport = ZonesClient.get_transport_class() + available_transports = [ + transports.ZonesRestTransport, + ] + assert transport in available_transports + + transport = ZonesClient.get_transport_class("rest") + assert transport == transports.ZonesRestTransport + + +@pytest.mark.parametrize("client_class,transport_class,transport_name", [ + (ZonesClient, transports.ZonesRestTransport, "rest"), +]) +@mock.patch.object(ZonesClient, "DEFAULT_ENDPOINT", modify_default_endpoint(ZonesClient)) +def test_zones_client_client_options(client_class, transport_class, transport_name): + # Check that if channel is provided we won't create a new one. + with mock.patch.object(ZonesClient, 'get_transport_class') as gtc: + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ) + client = client_class(transport=transport) + gtc.assert_not_called() + + # Check that if channel is provided via str we will create a new one. + with mock.patch.object(ZonesClient, 'get_transport_class') as gtc: + client = client_class(transport=transport_name) + gtc.assert_called() + + # Check the case api_endpoint is provided. + options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(transport=transport_name, client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_MTLS_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError): + client = client_class(transport=transport_name) + + # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): + with pytest.raises(ValueError): + client = client_class(transport=transport_name) + + # Check the case quota_project_id is provided + options = client_options.ClientOptions(quota_project_id="octopus") + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id="octopus", + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + # Check the case api_endpoint is provided + options = client_options.ClientOptions(api_audience="https://language.googleapis.com") + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience="https://language.googleapis.com" + ) + +@pytest.mark.parametrize("client_class,transport_class,transport_name,use_client_cert_env", [ + (ZonesClient, transports.ZonesRestTransport, "rest", "true"), + (ZonesClient, transports.ZonesRestTransport, "rest", "false"), +]) +@mock.patch.object(ZonesClient, "DEFAULT_ENDPOINT", modify_default_endpoint(ZonesClient)) +@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) +def test_zones_client_mtls_env_auto(client_class, transport_class, transport_name, use_client_cert_env): + # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default + # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists. + + # Check the case client_cert_source is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): + options = client_options.ClientOptions(client_cert_source=client_cert_source_callback) + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + + if use_client_cert_env == "false": + expected_client_cert_source = None + expected_host = client.DEFAULT_ENDPOINT + else: + expected_client_cert_source = client_cert_source_callback + expected_host = client.DEFAULT_MTLS_ENDPOINT + + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case ADC client cert is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): + with mock.patch.object(transport_class, '__init__') as patched: + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): + with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=client_cert_source_callback): + if use_client_cert_env == "false": + expected_host = client.DEFAULT_ENDPOINT + expected_client_cert_source = None + else: + expected_host = client.DEFAULT_MTLS_ENDPOINT + expected_client_cert_source = client_cert_source_callback + + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case client_cert_source and ADC client cert are not provided. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): + with mock.patch.object(transport_class, '__init__') as patched: + with mock.patch("google.auth.transport.mtls.has_default_client_cert_source", return_value=False): + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize("client_class", [ + ZonesClient +]) +@mock.patch.object(ZonesClient, "DEFAULT_ENDPOINT", modify_default_endpoint(ZonesClient)) +def test_zones_client_get_mtls_endpoint_and_cert_source(client_class): + mock_client_cert_source = mock.Mock() + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "true". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + mock_api_endpoint = "foo" + options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(options) + assert api_endpoint == mock_api_endpoint + assert cert_source == mock_client_cert_source + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "false". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + mock_client_cert_source = mock.Mock() + mock_api_endpoint = "foo" + options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(options) + assert api_endpoint == mock_api_endpoint + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert doesn't exist. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=False): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert exists. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): + with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=mock_client_cert_source): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source == mock_client_cert_source + + +@pytest.mark.parametrize("client_class,transport_class,transport_name", [ + (ZonesClient, transports.ZonesRestTransport, "rest"), +]) +def test_zones_client_client_options_scopes(client_class, transport_class, transport_name): + # Check the case scopes are provided. + options = client_options.ClientOptions( + scopes=["1", "2"], + ) + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=["1", "2"], + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + +@pytest.mark.parametrize("client_class,transport_class,transport_name,grpc_helpers", [ + (ZonesClient, transports.ZonesRestTransport, "rest", None), +]) +def test_zones_client_client_options_credentials_file(client_class, transport_class, transport_name, grpc_helpers): + # Check the case credentials file is provided. + options = client_options.ClientOptions( + credentials_file="credentials.json" + ) + + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize("request_type", [ + compute.GetZoneRequest, + dict, +]) +def test_get_rest(request_type): + client = ZonesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'zone': 'sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Zone( + available_cpu_platforms=['available_cpu_platforms_value'], + creation_timestamp='creation_timestamp_value', + description='description_value', + id=205, + kind='kind_value', + name='name_value', + region='region_value', + self_link='self_link_value', + status='status_value', + supports_pzs=True, + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Zone.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.get(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.Zone) + assert response.available_cpu_platforms == ['available_cpu_platforms_value'] + assert response.creation_timestamp == 'creation_timestamp_value' + assert response.description == 'description_value' + assert response.id == 205 + assert response.kind == 'kind_value' + assert response.name == 'name_value' + assert response.region == 'region_value' + assert response.self_link == 'self_link_value' + assert response.status == 'status_value' + assert response.supports_pzs is True + + +def test_get_rest_required_fields(request_type=compute.GetZoneRequest): + transport_class = transports.ZonesRestTransport + + request_init = {} + request_init["project"] = "" + request_init["zone"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + jsonified_request["zone"] = 'zone_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + assert "zone" in jsonified_request + assert jsonified_request["zone"] == 'zone_value' + + client = ZonesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Zone() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "get", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.Zone.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.get(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_get_rest_unset_required_fields(): + transport = transports.ZonesRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.get._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("project", "zone", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_rest_interceptors(null_interceptor): + transport = transports.ZonesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.ZonesRestInterceptor(), + ) + client = ZonesClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.ZonesRestInterceptor, "post_get") as post, \ + mock.patch.object(transports.ZonesRestInterceptor, "pre_get") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.GetZoneRequest.pb(compute.GetZoneRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Zone.to_json(compute.Zone()) + + request = compute.GetZoneRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Zone() + + client.get(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_get_rest_bad_request(transport: str = 'rest', request_type=compute.GetZoneRequest): + client = ZonesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1', 'zone': 'sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get(request) + + +def test_get_rest_flattened(): + client = ZonesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.Zone() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1', 'zone': 'sample2'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + zone='zone_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.Zone.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.get(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/zones/{zone}" % client.transport._host, args[1]) + + +def test_get_rest_flattened_error(transport: str = 'rest'): + client = ZonesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get( + compute.GetZoneRequest(), + project='project_value', + zone='zone_value', + ) + + +def test_get_rest_error(): + client = ZonesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + compute.ListZonesRequest, + dict, +]) +def test_list_rest(request_type): + client = ZonesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.ZoneList( + id='id_value', + kind='kind_value', + next_page_token='next_page_token_value', + self_link='self_link_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.ZoneList.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.list(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListPager) + assert response.id == 'id_value' + assert response.kind == 'kind_value' + assert response.next_page_token == 'next_page_token_value' + assert response.self_link == 'self_link_value' + + +def test_list_rest_required_fields(request_type=compute.ListZonesRequest): + transport_class = transports.ZonesRestTransport + + request_init = {} + request_init["project"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = 'project_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("filter", "max_results", "order_by", "page_token", "return_partial_success", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == 'project_value' + + client = ZonesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.ZoneList() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "get", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = compute.ZoneList.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.list(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_list_rest_unset_required_fields(): + transport = transports.ZonesRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.list._get_unset_required_fields({}) + assert set(unset_fields) == (set(("filter", "maxResults", "orderBy", "pageToken", "returnPartialSuccess", )) & set(("project", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_rest_interceptors(null_interceptor): + transport = transports.ZonesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.ZonesRestInterceptor(), + ) + client = ZonesClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.ZonesRestInterceptor, "post_list") as post, \ + mock.patch.object(transports.ZonesRestInterceptor, "pre_list") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.ListZonesRequest.pb(compute.ListZonesRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.ZoneList.to_json(compute.ZoneList()) + + request = compute.ListZonesRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.ZoneList() + + client.list(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_list_rest_bad_request(transport: str = 'rest', request_type=compute.ListZonesRequest): + client = ZonesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project': 'sample1'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list(request) + + +def test_list_rest_flattened(): + client = ZonesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = compute.ZoneList() + + # get arguments that satisfy an http rule for this method + sample_request = {'project': 'sample1'} + + # get truthy value for each flattened field + mock_args = dict( + project='project_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = compute.ZoneList.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.list(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/compute/v1/projects/{project}/zones" % client.transport._host, args[1]) + + +def test_list_rest_flattened_error(transport: str = 'rest'): + client = ZonesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list( + compute.ListZonesRequest(), + project='project_value', + ) + + +def test_list_rest_pager(transport: str = 'rest'): + client = ZonesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + #with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + compute.ZoneList( + items=[ + compute.Zone(), + compute.Zone(), + compute.Zone(), + ], + next_page_token='abc', + ), + compute.ZoneList( + items=[], + next_page_token='def', + ), + compute.ZoneList( + items=[ + compute.Zone(), + ], + next_page_token='ghi', + ), + compute.ZoneList( + items=[ + compute.Zone(), + compute.Zone(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple(compute.ZoneList.to_json(x) for x in response) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode('UTF-8') + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {'project': 'sample1'} + + pager = client.list(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, compute.Zone) + for i in results) + + pages = list(client.list(request=sample_request).pages) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + + +def test_credentials_transport_error(): + # It is an error to provide credentials and a transport instance. + transport = transports.ZonesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = ZonesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # It is an error to provide a credentials file and a transport instance. + transport = transports.ZonesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = ZonesClient( + client_options={"credentials_file": "credentials.json"}, + transport=transport, + ) + + # It is an error to provide an api_key and a transport instance. + transport = transports.ZonesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = ZonesClient( + client_options=options, + transport=transport, + ) + + # It is an error to provide an api_key and a credential. + options = mock.Mock() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = ZonesClient( + client_options=options, + credentials=ga_credentials.AnonymousCredentials() + ) + + # It is an error to provide scopes and a transport instance. + transport = transports.ZonesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = ZonesClient( + client_options={"scopes": ["1", "2"]}, + transport=transport, + ) + + +def test_transport_instance(): + # A client may be instantiated with a custom transport instance. + transport = transports.ZonesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + client = ZonesClient(transport=transport) + assert client.transport is transport + + +@pytest.mark.parametrize("transport_class", [ + transports.ZonesRestTransport, +]) +def test_transport_adc(transport_class): + # Test default credentials are used if not provided. + with mock.patch.object(google.auth, 'default') as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class() + adc.assert_called_once() + +@pytest.mark.parametrize("transport_name", [ + "rest", +]) +def test_transport_kind(transport_name): + transport = ZonesClient.get_transport_class(transport_name)( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert transport.kind == transport_name + + +def test_zones_base_transport_error(): + # Passing both a credentials object and credentials_file should raise an error + with pytest.raises(core_exceptions.DuplicateCredentialArgs): + transport = transports.ZonesTransport( + credentials=ga_credentials.AnonymousCredentials(), + credentials_file="credentials.json" + ) + + +def test_zones_base_transport(): + # Instantiate the base transport. + with mock.patch('google.cloud.compute_v1.services.zones.transports.ZonesTransport.__init__') as Transport: + Transport.return_value = None + transport = transports.ZonesTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Every method on the transport should just blindly + # raise NotImplementedError. + methods = ( + 'get', + 'list', + ) + for method in methods: + with pytest.raises(NotImplementedError): + getattr(transport, method)(request=object()) + + with pytest.raises(NotImplementedError): + transport.close() + + # Catch all for all remaining methods and properties + remainder = [ + 'kind', + ] + for r in remainder: + with pytest.raises(NotImplementedError): + getattr(transport, r)() + + +def test_zones_base_transport_with_credentials_file(): + # Instantiate the base transport with a credentials file + with mock.patch.object(google.auth, 'load_credentials_from_file', autospec=True) as load_creds, mock.patch('google.cloud.compute_v1.services.zones.transports.ZonesTransport._prep_wrapped_messages') as Transport: + Transport.return_value = None + load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.ZonesTransport( + credentials_file="credentials.json", + quota_project_id="octopus", + ) + load_creds.assert_called_once_with("credentials.json", + scopes=None, + default_scopes=( + 'https://www.googleapis.com/auth/compute.readonly', + 'https://www.googleapis.com/auth/compute', + 'https://www.googleapis.com/auth/cloud-platform', +), + quota_project_id="octopus", + ) + + +def test_zones_base_transport_with_adc(): + # Test the default credentials are used if credentials and credentials_file are None. + with mock.patch.object(google.auth, 'default', autospec=True) as adc, mock.patch('google.cloud.compute_v1.services.zones.transports.ZonesTransport._prep_wrapped_messages') as Transport: + Transport.return_value = None + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.ZonesTransport() + adc.assert_called_once() + + +def test_zones_auth_adc(): + # If no credentials are provided, we should use ADC credentials. + with mock.patch.object(google.auth, 'default', autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + ZonesClient() + adc.assert_called_once_with( + scopes=None, + default_scopes=( + 'https://www.googleapis.com/auth/compute.readonly', + 'https://www.googleapis.com/auth/compute', + 'https://www.googleapis.com/auth/cloud-platform', +), + quota_project_id=None, + ) + + +def test_zones_http_transport_client_cert_source_for_mtls(): + cred = ga_credentials.AnonymousCredentials() + with mock.patch("google.auth.transport.requests.AuthorizedSession.configure_mtls_channel") as mock_configure_mtls_channel: + transports.ZonesRestTransport ( + credentials=cred, + client_cert_source_for_mtls=client_cert_source_callback + ) + mock_configure_mtls_channel.assert_called_once_with(client_cert_source_callback) + + +@pytest.mark.parametrize("transport_name", [ + "rest", +]) +def test_zones_host_no_port(transport_name): + client = ZonesClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions(api_endpoint='compute.googleapis.com'), + transport=transport_name, + ) + assert client.transport._host == ( + 'compute.googleapis.com:443' + if transport_name in ['grpc', 'grpc_asyncio'] + else 'https://compute.googleapis.com' + ) + +@pytest.mark.parametrize("transport_name", [ + "rest", +]) +def test_zones_host_with_port(transport_name): + client = ZonesClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions(api_endpoint='compute.googleapis.com:8000'), + transport=transport_name, + ) + assert client.transport._host == ( + 'compute.googleapis.com:8000' + if transport_name in ['grpc', 'grpc_asyncio'] + else 'https://compute.googleapis.com:8000' + ) + +@pytest.mark.parametrize("transport_name", [ + "rest", +]) +def test_zones_client_transport_session_collision(transport_name): + creds1 = ga_credentials.AnonymousCredentials() + creds2 = ga_credentials.AnonymousCredentials() + client1 = ZonesClient( + credentials=creds1, + transport=transport_name, + ) + client2 = ZonesClient( + credentials=creds2, + transport=transport_name, + ) + session1 = client1.transport.get._session + session2 = client2.transport.get._session + assert session1 != session2 + session1 = client1.transport.list._session + session2 = client2.transport.list._session + assert session1 != session2 + +def test_common_billing_account_path(): + billing_account = "squid" + expected = "billingAccounts/{billing_account}".format(billing_account=billing_account, ) + actual = ZonesClient.common_billing_account_path(billing_account) + assert expected == actual + + +def test_parse_common_billing_account_path(): + expected = { + "billing_account": "clam", + } + path = ZonesClient.common_billing_account_path(**expected) + + # Check that the path construction is reversible. + actual = ZonesClient.parse_common_billing_account_path(path) + assert expected == actual + +def test_common_folder_path(): + folder = "whelk" + expected = "folders/{folder}".format(folder=folder, ) + actual = ZonesClient.common_folder_path(folder) + assert expected == actual + + +def test_parse_common_folder_path(): + expected = { + "folder": "octopus", + } + path = ZonesClient.common_folder_path(**expected) + + # Check that the path construction is reversible. + actual = ZonesClient.parse_common_folder_path(path) + assert expected == actual + +def test_common_organization_path(): + organization = "oyster" + expected = "organizations/{organization}".format(organization=organization, ) + actual = ZonesClient.common_organization_path(organization) + assert expected == actual + + +def test_parse_common_organization_path(): + expected = { + "organization": "nudibranch", + } + path = ZonesClient.common_organization_path(**expected) + + # Check that the path construction is reversible. + actual = ZonesClient.parse_common_organization_path(path) + assert expected == actual + +def test_common_project_path(): + project = "cuttlefish" + expected = "projects/{project}".format(project=project, ) + actual = ZonesClient.common_project_path(project) + assert expected == actual + + +def test_parse_common_project_path(): + expected = { + "project": "mussel", + } + path = ZonesClient.common_project_path(**expected) + + # Check that the path construction is reversible. + actual = ZonesClient.parse_common_project_path(path) + assert expected == actual + +def test_common_location_path(): + project = "winkle" + location = "nautilus" + expected = "projects/{project}/locations/{location}".format(project=project, location=location, ) + actual = ZonesClient.common_location_path(project, location) + assert expected == actual + + +def test_parse_common_location_path(): + expected = { + "project": "scallop", + "location": "abalone", + } + path = ZonesClient.common_location_path(**expected) + + # Check that the path construction is reversible. + actual = ZonesClient.parse_common_location_path(path) + assert expected == actual + + +def test_client_with_default_client_info(): + client_info = gapic_v1.client_info.ClientInfo() + + with mock.patch.object(transports.ZonesTransport, '_prep_wrapped_messages') as prep: + client = ZonesClient( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + with mock.patch.object(transports.ZonesTransport, '_prep_wrapped_messages') as prep: + transport_class = ZonesClient.get_transport_class() + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + +def test_transport_close(): + transports = { + "rest": "_session", + } + + for transport, close_name in transports.items(): + client = ZonesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport + ) + with mock.patch.object(type(getattr(client.transport, close_name)), "close") as close: + with client: + close.assert_not_called() + close.assert_called_once() + +def test_client_ctx(): + transports = [ + 'rest', + ] + for transport in transports: + client = ZonesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport + ) + # Test client calls underlying transport. + with mock.patch.object(type(client.transport), "close") as close: + close.assert_not_called() + with client: + pass + close.assert_called() + +@pytest.mark.parametrize("client_class,transport_class", [ + (ZonesClient, transports.ZonesRestTransport), +]) +def test_api_key_credentials(client_class, transport_class): + with mock.patch.object( + google.auth._default, "get_api_key_credentials", create=True + ) as get_api_key_credentials: + mock_cred = mock.Mock() + get_api_key_credentials.return_value = mock_cred + options = client_options.ClientOptions() + options.api_key = "api_key" + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=mock_cred, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + )